var/home/core/zuul-output/0000755000175000017500000000000015133727725014541 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015133756744015510 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log.gz0000644000175000017500001472062115133756706020300 0ustar corecoreoikubelet.log_o[;r)Br'o b-n(!9t%Cs7}g/غIs,r.k9GfD ~6I_翪|mvşo#oVݏKf+ovpZj]% oo/q3m^]/o?8.7oW}ʋghewx/mX,ojŻ ^Tb3b#׳:}=p7뼝ca㑔`e0I1Q!&ѱ[/o^{W-{t3_U|6 x)K#/5ΌR"ggóisR)N %emOQ/Ϋ[oa0vs68/Jʢ ܚʂ9ss3+aô٥J}{37FEbп3 FKX1QRQlrTvb)E,s)Wɀ;$#k1Gdē _%Kٻւ(Ĩ$#TLX h~lys%v6:SFA֗f΀QՇ2Kݙ$ӎ;IXN :7sL0x.`6)ɚL}ӄ]C }I4Vv@%٘e#dc0Fn 촂iHSr`岮X7̝4?qKf, # qe䧤 ss]QzH.ad!rJBi`V +|i}}THW{y|*/BP3m3A- ZPmN^iL[NrrݝE)~QGGAj^3}wy/{47[q)&c(޸0"$5ڪҾη*t:%?vEmO5tqÜ3Cyu '~qlN?}|nLFR6f8yWxYd ;K44|CK4UQviYDZh$#*)e\W$IAT;s0Gp}=9ڠedۜ+EaH#QtDV:?7#w4r_۾8ZJ%PgS!][5ߜQZ݇~- MR9z_Z;57xh|_/CWuU%v[_((G yMi@'3Pmz8~Y >hl%}Р`sMC77Aztԝp ,}Nptt%q6& ND lM;ָPZGa(X(2*91n,50/mx'})')SĔv}S%xhRe)a@r AF' ]J)ӨbqMWNjʵ2PK-guZZg !M)a(!H/?R?Q~}% ;]/ľv%T&hoP~(*טj=dߛ_SRzSa™:']*}EXɧM<@:jʨΨrPE%NT&1H>g":ͨ ҄v`tYoTq&OzcP_k(PJ'ήYXFgGہwħkIM*򸆔l=q VJީ#b8&RgX2qBMoN w1ђZGd m 2P/Ɛ!" aGd;0RZ+ 9O5KiPc7CDG.b~?|ђP? -8%JNIt"`HP!]ZrͰ4j8!*(jPcǷ!)'xmv>!0[r_G{j 6JYǹ>zs;tc.mctie:x&"bR4S uV8/0%X8Ua0NET݃jYAT` &AD]Ax95mvXYs"(A+/_+*{b }@UP*5ì"M|܊W7|}N{mL=d]' =MS2[3(/hoj$=Zm Mlh>P>Qwf8*c4˥Ęk(+,«.c%_~&^%80=1Jgͤ39(&ʤdH0Ζ@.!)CGt?~=ˢ>f>\bN<Ⱦtë{{b2hKNh`0=/9Gɺɔ+'Х[)9^iX,N&+1Id0ֶ|}!oѶvhu|8Qz:^S-7;k>U~H><~5i ˿7^0*]h,*aklVIKS7d'qAWEݰLkS :}%J6TIsbFʶ褢sFUC)(k-C"TQ[;4j39_WiZSس:$3w}o$[4x:bl=pd9YfAMpIrv̡}XI{B%ZԎuHvhd`Η|ʣ)-iaE';_j{(8xPA*1bv^JLj&DY3#-1*I+g8a@(*%kX{ Z;#es=oi_)qb㼃{buU?zT u]68 QeC Hl @R SFZuU&uRz[2(A1ZK(O5dc}QQufCdX($0j(HX_$GZaPo|P5q @3ǟ6 mR!c/24مQNֆ^n,hU֝cfT :):[gCa?\&IpW$8!+Uph*/ o/{")qq҈78݇hA sTB*F$6 2C` |ɧJ~iM cO;m#NV?d?TCg5otޔC1s`u.EkB6ga׬9J2&vV,./ӐoQJ*Dw*^sCeyWtɖ9F.[-cʚmD (QMW`zP~n"U'8%kEq*Lr;TY *BCCpJhxUpܺDoGdlaQ&8#v| (~~yZ-VW"T- 0@4ޙ-did˥]5]5᪩QJlyIPEQZlY=!|!p+,ICE^fu `|M3J#BQȌ6DNnCˣ"F$/Qx%m&FK_7P|٢?I-RiAKoQrMI}0BOnYr猸p$nu̿ݣ\)#s{p'ɂN$r;fVkvo\mkmB`s ~7!GdјCyEߖs|n|zu0VhI|{}BC6q>HĜ]Xgy G[Ŷ.|37xo=N4wjDH>:&EOΆ<䧊1v@b&툒f!y̯RE2K0p\0͙npV)̍F$X8a-bp)5,] Bo|ؖA]Y`-jyL'8>JJ{>źuMp(jL!M7uTźmr(Uxbbqe5rZ HҘ3ڴ(|e@ew>w3C39k-{p?րd^T@eFZ#WWwYzK uK r؛6V L)auS6=`#(T'qp^X7c&͌ӒҶW r@/m@6P!{`ͱ)m`6*G-1F 6=X#leU d6xTV6 gn&i"@*"mr栣 IEVpq 0sy OM*@ >n) u{Hk|v;tCl2m s]-$( Pnuݮ)`Q6eMӁKzFZf;5IW1i[xU 0FPM]gl}>6sUDO5f;A@z>T+DE 6Хm<쉶K`ͯ% 0OFjTkPW1mk%?\@R>XCl}b ,8; :.b9m]XaINE`!6u.Ѫv¼%끖[7ɰ5 [jHhaϯ/lX/bjFO.= w ?>ȑ3n?z,t s5Z/ Clo-` z?a~b mzkC zF/}b&x Uhm.O 4m6^^osVЦ+*@5Fˢg'!>$]0 1_glg}릅h:@61Xv` 5DFnx ˭jCtu,R|ۯG8`&ו:ݓ3<:~iXN9`2ŦzhѤ^ MW`c?&d.'[\]}7A[?~R6*.9t,綨 3 6DFe^u; +֡X< paan}7ftJ^%0\?mg5k][ip4@]p6Uu|܀|Kx6خQU2KTǺ.ȕPQVzWuk{n#NWj8+\[ ?yiI~fs[:.۽ '5nWppH? 8>X+m7_Z`V j[ s3n/i{ 6uwŇctyX{>GXg&[ņzP8/f<8sl, 0۬Z"X.~`٦G3TE.֣eմi<~ik[m9뀥!cNIl8y$~\T B "2j*ҕ;ێIs ɛqQQKY`\ +\0(FęRQ hN œ@Nbe7}v+7Zo>W?%TbzK-6cb:XeG?hl&0Ɠbb_2++oI~!&-[TWvxZ>4(sgz1v&YN2姟d4"?oWNW݃yh~%DTt^W7q.@w5r(#'CDd2݄kTxn@v7^58þ Ţ&V"J~7+0_t[%XU͍ &dtO:odtRWon%*44JٵK+Woc.F3 %N%FF"HH"\$ۤ寚;}2Q14S`XPL`-$G{3TUFp/:4TƳ5[۲yzlW@Tiv{ !]oBLKJO*t*\n-iȚ4`{x_z;j3Xh ׄ?x]>uxZ`v?7"I8hp A&?a(8E-DHa%LMg2:-ŷX(ǒ>,ݵ𴛾é5Zٵ]z"]òI3Z=pQVΖj?+ױV}F#N9ǤWQÝiVDc5M!j1ڶù/Ok Ƴ=x?ZI,e,X Q"SoUG6.H#',c@V8 iRX &4ڻ8Ǘ{]oOtsϑ`94t1!F PI;i`ޮMLX7sTGP7^s08p15w q o(uLYQB_dWoc0a#K1P,8.L(f"-Da +iP^]OrwY~fwA#ٔ!:*땽Zp!{g4څZtu\1!zr*! 5F XrO7E[!gJ^.a&HߣaaQÝ$_vyz4}0!yܒ栒޹a% Ŋ X!cJ!A\ ?E\R1 q/rJjd A4y4c+bQ̘TT!kw/nb͵FcRG0xeO sw5TV12R7<OG1cjShGg/5TbW > ]~W9dNiee$V[\[Qp-&u~a+3~;xQFFW>='ǣC~방u)т48ZdH;j a]`bGԹ#qiP(yڤ~dO@wA[Vz/$NW\F?H4kX6)F*1*(eJAaݡ krqB}q^fnp 2oh80,kNA7,?ע|tCܳARw# y'uR;$厛p!&*̽dPt/ _T2FM?kfRSႷQ_Map@Ti& J1'%JV@]x?ʕ+9M+CWp i)-" ><-uˮ◶>waPcPw3``m- } vS¢=jڽb\N*s:Mzg =lQυo,Q.|K,ϩ3g)D͵Q5PBj(h<[rqTɈjM-y͢FY~p_~O5-֠kDNTͷItI1mk"@$AǏ}%S5<`d+0o,AրcbvJ2O`gA2Ȏp@DEu&ݛȘPˬ-Ő\B`xr`"F'Iٺ*DnA)yzr^!3Ír!S$,.:+d̋BʺJ#SX*8ҁW7~>oOFe-<uJQ|FZEP__gi(`0/ƍcv7go2G$ N%v$^^&Q 4AMbvvɀ1J{ڔhэK'9*W )IYO;E4z⛢79"hK{BFEmBAΛ3>IO j u߿d{=t-n3Pnef9[}=%G*9sX,¬xS&9'E&"/"ncx}"mV5tŘ:wcZ К G)]$mbXE ^ǽ8%>,0FЕ 6vAVKVCjrD25#Lrv?33Iam:xy`|Q'eű^\ơ' .gygSAixپ im41;P^azl5|JE2z=.wcMԧ ax& =`|#HQ*lS<.U׻`>ajϿ '!9MHK:9#s,jV剤C:LIeHJ"M8P,$N;a-zݸJWc :.<sR6 լ$gu4M*B(A ݖΑِ %H;S*ڳJt>$M!^*n3qESfU, Iĭb#UFJPvBgZvn aE5}~2E|=D' ܇q>8[¿yp/9Om/5|k \6xH.Z'OeCD@cq:Y~<1LٖY9# xe8g IKTQ:+Xg:*}.<M{ZH[^>m0G{ ̷hiOO|9Y"mma[sSbb'Rv&{{dh]4H:nV$tHI98/)=mͭ ڐn}}~ק?g_6WĩDRc0]rY9'z .(jHI :{HG}HDN`h7@{jnE#[dz;n#y 9D*A$$"^)dVQ.(rO6ӟZw_Ȣaޒu'- ^_,G;U\cAAz7EtlLuoXuA}bT2H_*kIG?S(קjhg 5EF5uKkBYx-qCfqsn[?_r=V:х@mfVg,w}QJUtesYyt7Yr+"*DtO/o۷~|hw^5wE of7cꃱ.)7.u/}tPTGc 5tW> l/`I~>|灹mQ$>N |gZ ͜IH[RNOMTq~g d0/0Љ!yB.hH׽;}VLGp3I#8'xal&Ȑc$ d7?K6xAH1H#:f _tŒ^ hgiNas*@K{7tH*t쬆Ny497ͩ KVsVokwW&4*H'\ d$]Vmr달v9dB.bq:__xW|1=6 R3y^ E#LB ZaZd1,]ןkznxtK|v+`VZ3JϧC^|/{ś}r3 >6׳oƄ%VDSWn 0,qh! E-Z%ܹpU:&&fX+EǬ.ťqpNZܗÅxjsD|[,_4EqgMƒK6f/FXJRF>i XʽAQGwG%mgo 恤hˍJ_SgskwI\t`ﶘ080ƱQŀllKX@116fqo>NrU Ѣ9*|ãeeH7.z!<7zG4p9tV|̢T`˖E ;;,tTaIUle*$!>*mBA2,gJIn_kSz)JC]?X(OPJS3.}clݨ{e!MB,cB߮4af祋,1/_xq=fBRO0P'֫-kbM6Apw,GO2}MGK'#+սE^dˋf6Y bQEz}eҏnr_ ^O^W zw~Ȳ=sXअy{E|`cZV yBjUgu$ B6 [^7 |Xpn1]nr CC5`F`J `rKJ;?28¢E WiBhFa[|ݩSRO3]J-҅31,jl3Y QuH vΎ]n_2a62;VI/ɮ|Lu>'$0&*m.)HzzBvU0h} -_.7^nya+Cs 6K!x^' ^7HNe">0]8@*0)QsUN8t^N+mXU q2EDö0^R) hCt{d}ܜFnԴ.2O⠪R/r| w,?VMqܙ7'qpUۚ5Tnj ۝jlN$q:w$U>tL)NC*<` `)ĉJآS2 z]GQ)Bی:D`W&jDk\XDy&?Y\9Ȣ{:${1`+i n8=%Ml%İȖb7A~/$fhnqgTĔO5 ꐌSYXzv9[ezksA`<dkON৯s|&*pNaJه5B5H:W2% `6MRR'xZtfC$1aH_d}oI`b#FOf_$0!i rS/wvҍ%Eb/Ec|U9F-)L)ŘF`U:VK jeFrԋ7EDYpԽ.D\dNyj荊EEg]bÔF˩ք%EGƶ*Gȱ WW/ @a#LA4.ٹ^XڋX'>דzY'Ź$:fr;)ٔf ՠ3KcxwǪ0q,7FeV/!; 瓠 Li% z}ɯww"O-]J`sdN$@"J`Y13K/9`VTńǪC%+A~~wȌ-zJS-~y30G@U#=h7) ^EUB Q:>9W|"s_JF7a)AH v_§gbȩ<Qr(3{ xRP8_S( $?uk| ]bP\vۗ晋cgLz2r~MMp!ɚ~h?ljUcw9Iqww}U~7뭱ԏ,}e7]ukDn`jSlQ7DžHa/EU^IpYWW兹Q7WyTz|nˇ _q㖟A-? [zWW,/:nY_s$-#9?mh{R 4ѭm_9p -h2 dֲ 1"j {]]Nk"䁖%5'32hDz O\!f3KX0kIKq"H~%.b@:Oec6^:V8FDza5H`:&Q5 ^hI8nʁu EA~V O8Z-mYO!tO֠υ9G`6qmJc,Qh: ݢKNw2taC0Z' O > f-TU?!$VQ`Rc1wM "U8V15> =҆#xɮ}U`۸ہtw qBAgPSO}E`́JPu#]' 3Nnl"jF;/-R9~ {^'##AA:s`uih F% [U۴"qkjXS~+(f?TT)*qy+QR"tJ8۷)'3J1>pnVGITq3J&J0CQ v&P_񾅶X/)T/ϧ+GJzApU]<:Yn\~%&58IS)`0効<9ViCbw!bX%E+o*ƾtNU*v-zߞ +4 {e6J69@28MZXc Ub+A_Aܲ'SoO1ۀS`*f'r[8ݝYvjҩJ;}]|Bޙǖߔ 3\ a-`slԵ怕e7ːزoW|A\Qu&'9~ l|`pΕ [Q =r#vQu0 M.1%]vRatIIc(Irw~Z"+A<sX4*X FVGA<^^7 vq&EwQű:؁6q\QbR9GuB/S5^fa;N(hz)}_vq@nu@$_DVH|08W12e_ʿd{xlzUܝlNDU j>zyƖݗ&!jC`@ qэ-V Rt2m%K6dX)"]lj齔{oY:8VmS!:Wh#O0} :OVGL.xllT_oqqqLec2p;Ndck[ Rh6T#0H Q}ppS@ώ@#gƖ8sѹ e^ CZLu+."T#yrHhlكʼE-XI^=bKߙԘ1"+< gb`[c1髰?(o$[eR6uOœ-m~)-&>883\6y 8V -qrG]~.3jsqY~ sjZ+9[rAJsT= 02ݬf¸9Xe>sY~ ae9} x* zjC.5Wg󵸊y!1U:pU!ƔCm-7^w]斻~[hW$k sE0ڊS'"u8V䝏)@+\ ~YJ l0VM_h#ZYBK]nc˝߂~m[jRuo[|["w?2YtVT F*OO '+EoW&DqJ5;OU!F>j\FW+[3=`BZWX Zd>t*Uǖ\*Fu6Y3[yBPj|Lc~L{vrQۻou}q}hn+.{pWEqws]]|/ǫ\}/J.MLmc ԗWrU}/Ǜ+sYn[ﯾeywyY]]¨Kpx c./mo;ߟRy*4݀wm&8֨Or0 &+Bs=8'kP 3 |}44S8UXi;f;VE7e4AdX-fS烠1Uܦ$lznlq"җ^s RTn|RKm;ԻZ3)`S!9| ?}m*2@"G{yZ${˪A6yq>Elq*E< NX9@: Ih~|Y4sopp|v1f2춓t$ėƩ]YsȮ+*ܗP⾤U'qqdT$lhR,01cS[+>hn4qwD E\yɖWx7 զHeJ/]i2jP` 1W]x@%JgxYqoVrl!}U'!\!/|)bԬ7͍$RKmȾ*LުAg% m8^2=QGE5{O!g!_S>.>9J}@<8/vI闷wIيсvI ]7 fy//M@ƗlWe2Ws`mhdqP{-( ~00mplp~ оdi'x/}d1.?mc9K$wDKnkkp[lq! Wyp1x7@a M<WCe^˟DLDZY亮aqS 4w 7/⡩aZv빚,3{d>KҼ(pix2QRw0ؚL%?Ȭ,YQ<4QлX9ðnBd. HQ֬)%(orgaǠsy6 yLFjTX娚M#"fzG]r5 (c|-Z⍀euQ0Iͪj"F [*Wd(^񓥃&ݐR]'JؔПO o?<)V, jZ£ߏHȣ/>molY#G푃,kcJ NQĵ 4ci1K|%g\i ]zy㬹8Cm<#~5̳L;Fף/Z~,f5 g IM̏'q|<:Aqwʴxg I{ޔΠLX¾|%L@L Gt2Į8>gtMaɴfi.V[# 2(Ί]y]Audz2GGw }Oo:Zף@nxcq T{הewc8 <$0v%[9;!}HA&=K5#0w o6LނZfɧp`s]O)Yʡ% }rGx❈c:jN6{A͛b /oIDu9{SS߱]U}x%gqgO@i`v_aի򄟉Q{J~ʃa69ܛ0qNAĊ~؝ϟNh nT-M.Tswƻdؗ6SM?tzR;%kASOӨf2?Y%N~,Gs\TzIX}Jrݲ(:M9 Ē3ܞ A~\ZwO|<:6XŤAzP~W;ߞq&2VOo;N =ce-v=hc۬f(Y<i[/ @0VfI%A6 sG1Jo^_ -{] knN.NȈY{ zstfIwҀ"GgVo,!CuqqIŰT`Nz@ܰOXxUf4PSmA}x>g bǽ/ _ZGSUC7I3u{h0z\)*Bq㻘2u..mBgQN(|MEaR?RU*&–z}) Iz`7r4Dqf:g%1Ք'1/G?.m({W,-(ly`>cwG4 ^O;zbTk?3 8c@Ѵ MIC uӛG34Xw@IC*q,$ Fw3}w/,ϖiT <<@WFѐɵ(dNIS i\h*Qֆ)AbnQ#f}%[0F J%=e? .q( m[{C"TJ.+)#ص HaU k6>Tͱr`B)s4U/Ľc( ,:>t(/d[,6dl":jL\MYX={j h U݆Ą ](kJ| QdyX%1tm-ku`TuuKf+uɲ*nx'g A}[Llt* &bcY0 BBix%_K00N't牄%c-~XuX]BAْWcÒRݖ[JTӰ$f e%Z{%*gSTsaI<.0e7@.FZ%Z4gq DҸ b8Gؖh\pG,tGi4⎶q<:nD`R2Qe:ֆ1`ZC?Tx}w$F ֧NycTg lП<͕zv'tD[NtU"]{SDyn*)OĤ>I&hNJԜ- vR/ 0]5E7sm(%>eS .%j=XMRi[zpOӦKMT*ϲ]q:qT*O MvMӖbc֔jLi*{% AQTM_D]9QrQpE% #f"^-ڑrͯyv-)γw"^P)J4+mOnֶ3j,W"HTJScȬ}ӵeij8U4eBb=-e!̭|CՐy6XU ?$§KP#9JJ:W"H^ef4u'¢lk@D!S-Ξ)uؒ9_}x`SS\}_!hcu:u!cp5Ok;t{G[/ + e;lZ_=aXx}_ɫ8<,AN ;8%LIz(fYt"d5t--KV]RCy 8su}Ão [ Z)A,̱0,+Us[kðL3m̋"nzyk~F1[350aڑGs,g ,܆ ,q'sTqVX2sN~%ԫ$\W(PmX2s΍ KVE-6Yq,R,Yn"-alDi[hW-Oi 8\2|MM2biP3mIdݥ#Ol+uz-+345eѥvÅbmY~V;O^-,-SJK꯳˸wfՃw!0mU<X9ݶe4/]0\4IGζ%Vk[匯prOB= "aI6@៯]յi\I VU9V%D0Xg0G'2 1,Y_4sRgtm7r$t2kx*2n R4x1)/wy K=_unx@æYOn# Nw;ˎ̭brFXxFӳI¼RT MNh(|ًTeu^I>[1Ht̑P!_9ӒD/ϤQP:t"1@3YNϚ+G& p36]P|:x2&״֚@}Vnd7@fø&k; Pm"GPZB!2"gs˽@( ۃjrgv0%ДGK0bPHjH\&R|a pJk/D0B7^o1!qXz"!${# {-Awj!4F ޳6}k5Z'AaXhVHjzWe#vz&P_-u͙ Le@y,1nע8X[…R0r2jA M|BڢHMydSx$~K@hkS^b qLSMm ;Ҁ]4?>iF:׫[E{}`J6JZglIAhL  u:b#VmN3VTf%݄2kX"rh~& zja5TWoj Q<p4@s6Y? {ףyݖ־{o7^y6qkQ:z3"j?|®؂*nIRtoETKVttp,xmXMgj|"{V6I$ (n%@]IvyodqR;T6]H!7u[Lu;szyʨ.SwñvpUK>¯SUe?Ie ,)V hq1iR~b&KG\z:7NGt`F$I4D:?şۓ3 uD&c(SNGwAdh l7D!0ڛx O#O{&Bx_I@Uܞ$QG^& G"QbL"'X %]xfuIH=6Qџ%#\HؓAiy hqt6GqIbQd /P([-<5URJ ބ1}?Jj~(1"zH NR.cއ`MC/YR@@ߋx2 D>5~X :Pˁ7m~$8uFR nHVC<]=Mgnd޼:@;3i#%ȾcR¿P\xynv~V>v z^yoq\x ԗ4@ҷ!?ӷp.vxmt%/' [8;Ư-?Y3/h?AӋ\N_q-TiQx"n(h+-7 l n8vhrB!@Qr %C8Ʃ@ K% Β׹|zDNw>Vhnq% [\t P)aS =_,#,֏L)[` 4"z-XسNq_ٞc*"Kpv nCEcBcU@|,$1TX:}ΤhrɈ)^s ^'8Vd:rB߻9) sv(yfT߰P 5DrT T*'\-+3g^dsy7v$&]OsuiR\}@%gLJg\3^%p: TI*{fug\J-ct# Y*%;5iFVڛiL@as%X7y5J^hrkz819̆^H(_xy;DĂ ٚ,O)uQaИؗ(`Dm#n;©b^uٹzbgz9O>Pad\&c{S7@7;ÑMGno 0_5땚 ryx7? sEɜeYyY KP%jR_891WRTpWN #&yvD(eFyFѽ\.U<a3Xs_TCsR >+X_7_}tc'Y Fl@p TH7e .*\gZ\tRBLzl mw`m,%3"An!SsERJ9p:0}o1{006"@o+mռë`NFDYx) F~x%a6J jx%d`  <_z8{)HXaA#GlDJ?YnޒԬ Zpk/gJ<eLd?F`9[+g5NfKh00˒no6FޭVxʗ[ڬ1几!VSW[FfS`Xd4И0aj2,agx+)F>IȈY 0# lDh4s4,,1MܞOHȶ>l7Iifx?T&È`9IM !Y,IIɔf(K0avdB,`p-zTA$d;4Ìb~};_߃" z4!F5̎A,dɻ?(_=WEސ~YN ,OM˴~ *pXTNsIgEy>t}?- RFIU'ϯ_EUZb|rŻ!؊0i~\]U (;(~[qƫV "FCB@0|L(~$Dy)]\La~^xQJ(mfZ)ϔʎƄ!ߓI<.ÑPx K9l+Eu=K9$A(_Z{5GOx2"1xiiXF׽Q"}iz7))ph%>,!`)l7;aJ2Iv̪;i鍾pS uHЇwud**,b>,c(p@m?9x yW=f{ \{J~,exWśũ{"ֈTBGgeϋ96؄* Q3FAye |?TULA%TmݬAk1 Œn|Wd~=zc,靖A~&[9ߪ<`A`f$(ۮP8\_SyI7hY(w_Hb"j}+oh$YvWW6 d,C1&Xpy*km k@>u'/)t :]˕UT0ZW~-_Vc@؇y /Uч~ム~q XO[(d(ݟQ0F遌}RϘ>pTJkdڇMn̲yec)ۃQQFƨ MFc(߃Qɨ؃Q?a{0l2h? d4؃Qɨ܃Q?a{0n2h? d4܃hhFht F|QC z((!|mӣ idx\yqxJa80q5!# r'btҋCx81 0W JpWtfy޲;_iP> gr 7ȧ9| cܻx[5OaxL d`QNi2_}QGg bF _xyӠli{>9IDo=8>Ui|L;"o5zuHXjSG.ޒR"<ßI6׆ Z7ڛSĭ5@Wm;F͐Ǵ2ěZS{f=xXoU=GGx4< ,q %S8<]: ^9ߏn39Q,UTISAPC߫S4΍Bo߾>Cx^E#R 3Wݛ5Qs[J{Q>oâ'j'e@[.Axz?j c\w ӟ^ּh9y^KhqQ9CaE E2ovtVH+2e>dעEL:?/`QOt7; 6<f-"{7ɪ& h6VU.4UL噊ʾE-fE>- Y}.k D$R?47H9 BbEsϦߤ)S>g5dʂolڗY$ˣ۰oMl}0L)ƽ* bIWqt^QL؇oɉj:̫cOT( {'sOoV !E*[3S w 5Xzk^'3 CсO2)*>3d% 3Gzy B1> Ba1>l>xtN_C/͔"/*P$S< ʏ}ey~ivkbf DcW CݿOH:#G?8H$r";clū!{[Ji+nmonqNre[x 5HuwcdGMso>[Q0||qU4."h[jB%ښ?MD:W`FZݤ3uaA'Lcak_bRqkFE;mƯ(xaF3kоƷ$)$}2_VFrk.j)ۊ$!IqeG pvWyZZ=*&v锭ɊuRfjGpS;ZDH'<% ;t,Еg$u8b褙v0m?vvKA;B҄5WMg72J8*IYx7z6LŔNm*:8UY+t\ֵ$}䒁  +:`Wn(M:;LlJF9;rWɗAymWDz蹞 15L4|z Rj"ǛUm|_i*|G +A5 vjFڑy8(FsgdXOY72?7W6Ǽ%k϶@PÓ(C9*(EJYBY\ԛʢR'RXJ݊z1>]H[2xk ^'ި ?>u%Ƅ/fۖՅ{,Ւo.ޚ&:o?hZô*eB3]j2iQڙu'T#v-'+4t&M{LL3ze!TARQqSMPnn=M-ZFpN(FES;`lS'] 4?@I$FwPkP2ՑNQfRj96ԥWQ^s*G REXiCirɤvhFWsPj x_;E15ʂ 06hpV(s%XY|zͻj#7캜!C=,JIKDO|wȉ3ujJB'Z{:_ Y`1}`>@?%Z(+R+f28,vޣs]ĬMtߺCXGw}4f _cwoj6cO#\~>vaﳱ{5q۸/;u}kuo(/M33OO |:#'4B^LW Bgm>ib=_] n?#YjO, ?<`q83?Hk8򻸽휞;{ vJMy_f7p~2=WKRN,DJA,eP-qUF {D4U4QWO 7< lA I9fLTLgkXrֳtRQmi Fc[oP\gpkJٖ A$vռ`jo+B&xYeU\Q%9HbA~Msg}CBJ"QFjNApuBĩ Fo !v\E09,gka@tSKH_#i!qr538W Vn t"~#uޏi+ vj :SZX@cuN aF lS(r],#pڀ+Be4L +r6ǀ<IɁeH9#A;wejTpsIXɱR.mAa,AK )Y Dт~] D#O.FI6eHpcqT<7b=3pxuRҲpH:Q}0֒h'd:X}1csmNh+Ux1f!aDCd;U[JoH`}&O!Fg7Q*48řÉ4)&LYIls|0%opl |+0=g.Y80=/XVVV0bκfp)#Đ{6^ΥĽ`,/]d$S Vpp#N:884lr$&`gaq\V{6g <8'0Qѳ$-2FtdHܦ0NoXm[74KؠEFbIIF:X\jd-{˚EM5%BF#)F 돰' R.ՖS7KXH})c o-Fؠ nǮJL8tlTOB3 m68cYD `F=( kU@LH,F҉nG1L6!;1nku<bu̕E;aHcK'^o̴|69.ykt| Br:LS™&Bq\DpT {5׸ȒF{v> #D Uyaq<[TN GTN=wc"qpA-q鍑48EUL98x"2_1zjoCdkjFNtp}Q .$! ҭ7['K>CމI'8~7$8yTdVR΍U|l;f8"$&ݒ܉bq'%zcJMY4F %IBQ (A0Cw+D¼er&!dz&Q3J\^ prѷ/`B@Ftb}s3+`hppBl;MӻBk,F DTdtj-cq)O$ܼ v0 !f atV n2OƼ(&WȂ FaQ5;Mޏۿ : T)rueI+9xbp'0rurrp\sQeuV"r;xGtѪxNαV|3"Ě[Ͼ uD1N뢲(KDJ&V-M6XB=>0hXC᎕-ӢFlŀkgӛs|&ҋ RTޑ4LJE1̂= %6( <%IpT$#BK2C75S_FHPx0{{۞IT *nHNЃ9 B"Tvn7;T+ -y0urVƓqv+.z̷^z9b KےNZ[ y0Z^/޳Ie( !ǽْtO IEx;w%ɬs wmw$/jyd}vUh(YCE}oT?B g] Dž_pme`͂Ea"0|r?>=h墲Z)3Wm[ϜcJ' [ŦgiAsS%8ETU;sZK)sg f:e%[eR$tp%ѯxnBr~!VכMt#vppr *)*į=1WȞ2b3IE_nIRvpLqzs,1Vˢ!ͻՇv!o}\Vcf84}V54EԬyh G!L0QY@t8sELpRLHYeF~!$4HpfwfQB$\Trb'ws?N2͚G9uCuz`o,7K}k`iݲ(OC1|mוO ݅(S׵ RƱ7?ulQ!]" -ʤN" \bXp Y~>@Nesjx`KާwP#1 7;5aI^1$eeL\]93Hg-ez.{Qvx8#7b5\Vm%[Fi1DyCI&ozJ!'zp{ι]vU.}z{$Q6)MyHzg^LC_׹ gsIFZm⶧~"ApB )MTwFVoKZ5tz\tKHpjΤldT˼ba}I*iH:ttߐ8\9[$f=K#Iqi/<͜Kx=s1'<F1s;0g0GO7$8n;>ﯻ˽'J >Ν ܘ};HpTZ2oA>qsHU}^?Ci(mZԔЌ3(4`I`%86|8&E]Ձ38! &?>?E $0;O}G/MP+~^9v~ˢ|u+Xn[8#}R*8reGLt:phv:;vd7}6aZJv.ڬ CF举!W$@D$WX7Q`q& 05I2c'<Ilw,-|es/ķÌO6%{C T3]N1z[Fj$F2ʃUM @s8x&֤)k\Ō`0OO)ix`nx! #!B83z^'n =LsLsΦG1KF0x);d;QٷgKؼ4qkAq*fK2X͘W91쩦[/j{zHTe'yR#<]2/K!LWK﩮HBC$ wspD[k}Y?'E)ox8(rCjG ;h(trkhpDRy_Zzo*ꐒf#İoK?mmmwm&=X2\=*J*mHz>8x,}*V,wX_)H6mۼF{4t0 m!щg}$$-Uݖ{|rk^6A~l +Y妼)a:G9vy0yK&%^<íՖZN}aA:\ڝO==tibMXw]7fTF׌n1ۥ/%" p==qR_UH@WݞWiA! C0EiAn:b,KaCx'>2?2Sr3lW^$U>kA߭ꑳ(ƶK%nz<:z}xcY vj%6EIX|LDe2Y놼 5NA^5*uM)C}=lN`B0#<ǾXxoO2U}k&g: /N#Y"=>|"iRQZ5%e B[OґDej{A76T*[MHɔj"OeH"QB*M-F9B]5ApiP5A)B.lNJ~nHkP&i Md3ڦICM_4Ak4hDIT(ev#k8+*a&6hnSB7 \hc-kqV"A&׍  AzaXA椠$"EjxYIG1mP޷ԔI6&F$`ths6/6U?}?L5L*H대r̓Qy~RW] dt`{a@YL&"[t" *)uR.KT+[ 2iGe'oxKU <ϦsbdTIF4yhߕ*il*(UKxK-x{b$ҌV2_o~5*g74A0Rq za *d,ѴoKe-l"tHP T{~hU#-D{oDaU:8Q-<ê+k|Z|hb‡<tpv]jkO<`Ɠ dz6&c8KHƲ~]26ޡvQ0QaŔCd8&9ևmaV܎$# HJ`$cY.VZ#%|k4GE02f6h9JziV܎$#G<} Z)\ځkB3 YY^G/V4؈=a;e AI֌%ZRD#,^yv5 s;ϐ ke{,% /2-mc) $aA1` Y1,eIFnKƻY]?:buu.!K:"X=NG-[L(&+6#NH}wW@/B Bu}ꝴ 57?8+u@EWg|Өێ NWTih5Uݯq0Ӿi8B_O3t<,PL=!檔kp8_05ZeZv%/~8wz_6wGr%W۾mWKE{tqa"Z7◟߾)g;hGQ!L{9Gi/g Pnо>-X=,7 kEr~~|Y5J2tGLhQҢJ5w?|{Z!HB0li-{krz\#vMxK,*mU#=!J&+ ho68(grj4EHw]cw{@!oYk}cO?-pO m@Mt|_9(Ǫ`s dFAfnRMOkTΫe ++Rc F*Xc2W!#LlBc1p 5(U὿^7X @ g2P/c>ҙ9i"y-W‹XEW+*uGo|ϯe;)U+bzڢr/ Z~'ўܸuȤiоݸflnz`=juՄ?k\.ҧH>Z!~Jwq+6^߲rA)"iM Ji9FD"i C:8ER W - Lh&HNG\at# `$W7?(-5`tZ'NzLx#D [,-@Ϯ~9 l\mKXZ*5Шט LK0F0iD6T oՈS >89eim:FC tGD;<@5 SQ(;E뷆L)N7-dǵȑB"L@G"aeo"#5+Řؤ; t@Sk f50/\"Gt (G+ ϣ1 3-Ms ᭍,X!""_e\kx1v0mrd6QM*Jۤb1M#ɥGŢ &N9ri"ħD ўRHIUÂ"RIU@&MRg{6W!GҾ͹HRm$)p3։L9d)̒([IQ#hXԒvzpOF{\ZejY^%h,+|b5J-"#`R*0as>;sEgg^fy7]҄"M Ea.Mf0]^ck`1Fs:b=g=!Ќ)1ݖ%aᤖ4 :,LFC 0BJH(C2&6 @7iD>=XuF0b{~RLp#Of١fpFhA=-nL7HN ƪSKpuV/h`uqyJ" q2Lp]dZAURC>z$(i+=-=*B)eaBom Jrq4 ÀE\fc+i$(DL-MYLP1j}'aWU3Гlet2Ľ^'͂7evIjH$T0!T`4Ti5<#ke8x̂T i˱{vvUjRi1C:T h4Q8i^5 )tjd΅m@tFTiY0zǬJJ$KFTE4=GD RJlE刚$uKHk$HDs5'<%1n(z g: [ i!Y {젆A3%iM,m> <cq/6o@7d3u7ddjr.=>tpHmݦFܻ p[Qr? J4{`VBn;Va'3.=lThY7oH8\2#qo yp#KSIO"3UKfաMfp>)c/_ŊDY!C)r_g/0@޻"HU8úXt<^ΦK΋Qz(R7m$2¼(ȢusPIVѭTOU/ꔂD$^oW$dS?7ڌ")Wh`7kZ~gg]Ѐgr/06g#rPώ}To #9G9#Rd]_`T!ibnD4Hڗ]/ՍKr༹pa3ZU7*jp[ )QO"KyU!3µ-[,EMGъbوKT~LjfՁӊT&w~f_&&d"< < brBuߙmDx6*k-{0xW H27ə:xU]эrz M2+4=޸dq=n"V2&ݪ'9Q䈳HR4* R) /WVsT{VfO,{Y-˃>%S/]\'UHN1RϏ1vŷ案RQf b@IQ`5o;$hwÿ.GO 5D2Ei|kpH\zQ'+痏?>̐5kV ]9ھvw%T9лhR3ߔ-J*ZG0 vCBgR+ W.@V?=+R"Spx8X!ɧ xc8'\s7ϡQ4J}xpM.ؾ4F=Άj6h"dφAӗ\}zm5=*&R^aRk Ck`ypn\n֤"xQc(p2W`IyYMm ;bCQOXMGIi{<DUf\<Γlo-z^]j-œ-px<Łt=R(/ Pl=[5LU%:~[!)\Fԩ"Dq=<@Obk5d 9! '#<."<̪ݻy%W ά*`| :B(+;Ҟ jzRMJy1'eN~}R^b2_ʏK%WD"< $q4]G )"j>;v4T J^t^wEF~Fw#v'-߳sk!2 c:N}0PDY&vz,Μhpi8w4n b>2V/.O#xTp.'AwpFv<w6cjS6 bRP "tr9J>ϧd-k1^&߾k^trnF(g+S9k6`c/8)v6Ip ,YrGn? uKWyTwZ/|ny.6(:`#i1!t6%w˦W60cXlX[{b QR< ]-m!Td#'Teha), ubڱ/Zc,iEZdm$\:ರ.# c2O(Eiܔ(](E2JOGy+|v/E\lnt;ſtE%"ɮ1 \k5 ʕG~J΅^_bk+耆Rhj#?K+fvNjӆuYc'vb˪,A; ڠQW13nbK!uvӵ/,mL Nwax c=˵65[PPUlꎓ"l8$wU_?.ʘwe60vߟg+O/GL'1Vgǥh\& ~a& apc(ccM hD E"&q$Ԩʐ8aAh=̫ 8p|`v]bגC`hEvlÄy.ghh/^h<w&h%͕?zgrJ$qV|YEbl>g:舭`^H8̮Z2*UU09ky1SgYP^}ѭj5m6+-KEo*]L_<LYϝWhN'4 FY|=Z߳Q1l/l՟0Gkxfܥ2PNُ/@dzZK)P,,oI/ GwyIx>Zqe>{FS揸SG2#X-x{2kiW)^B+.:H8klLkNDQ91ujNI:-HҚ$22Dr&D`iC89TFXBhƏ [Km#zY֫eפ{ZgO7A< 졧oL_XTƲ۪bQg\)2yw36;n+rӷ1Dab?TBhN٣C ~:y%GͰf`f <6p5tujncZmb7+$fnﶜAT.,WuFnTfҨm_e~%w{L:.b_EK@ .4`GٰJr?S}1&.oMԝ/WiualV+{ٺ! :v.=$ J."C ~aU]h8ߝT"ֆCT7"zn`ȫeQ+*2.D F%Eg+Ċ=qcQ2c_B-TS Vt!W&#̐5@lܒ{D/oy+xCv~A??@}?owwt=1k<2MN,>z*B0fSj %Īo2Hi+2~ԅ~ ]h>COX%P-R8:IILc4QaXkΤ3%ԜJgi hG}3gf:j²ͨ8pws+NOorW:ǩLr 9q3JJ~RT<(*ɴѯXbktGSl)FAY X@DV~')|*"we9~~`0b;cĩVtZ},/.TS,,1 C6BuKP P> sEPK]LY*rY`Z ?nVX+͵ywϛC5?3AnyZ[~ȟvgLѹlͽ^^s"k5-E;}S^]"nR,үmb'S@;]nO=>u&˘ԪS>6x~} w:3>}i 3Y`1mlg>Ep7w+:pH,E^XٚY>qKGeY/Wp4T[^G,Xg\>% AasZሩ?^FA@yP߿\+8 ${sG5xƻ0 a7Dk%gM000'\ベ~8on{MF=;'S Ny}3,%"i DS T'EE$8%%ׅq<{+2pDj/ v<@=S-?0^w} mv*z] b#%{f cI DBO?B wXz3)01m11vXjKH7=Q֝FJPGfo@E1PHd,}A.}>ߠxGh 'D;ehVe5W~7."vџ:3t߀S-gBH>؃ cscN|RI9gk`عvk`=5k`)'>\;5s \;5}vڷO?1fr*FCXq`s97El܃zeo!bQϗp,TY-2s 1)3ㅦLa$KNx67KGOxY/l*N'urG|e3el]=GG0רi~/u FIKjXNb<ۻ' w|VuFӺ|\^;x K5h&@JMϳ8oV/0=/5W`<|3FooBh{ь]'_]^I_.jyn~9ϟ!ycoSG6;=o6u6Q-m wK nO٫{. {. pu_5^xfwܦ-а>>xllv `7UlT%87} a1DEd/$?σ!%hMk8_za';&W/?n@ߺҋ-~魊\2ґ#.Xヷ)qKcy-|,&-WC= o"V];<6"$(f]Γ~GPcB-Oѻ3NQǠcT69ص )q TH́(򱐛4ǵ_^|8Xu^/7mfQ4Ug{ib |"g9TYs+2⋴&2g=q#!t:u ۫|qˠM9Z08骬lڋ@2߃y2|,N!v2┋PDo XXCy-|,BMe||uab,bp"N2\QW۬g=qc!f27Eg@I#^IEhz[v@HMX45?Vx%%*%,yO$8/=I9wDҳr 8wO 1 ԆL'(8w! 7սg@ XէkCk#"'iW "bFHgɈ=Q#!WtB5i@{.f%\*p.y:DVNLbꀊ# *N=W%%KB# 29Ovght\W^Qci-r`%$' Er͚A\7ܧ؃aG>lŦ KLkrw)%8yupENl 8k Ȉ ǂah?$@B#КNx/2l٥ ;D2)S"u(V$Dt!H j]صPcMldBuN[,Xn1T+,6!xxT=×o]}Ym(pO0og;>d"bl0o@ar 180=ؾ纶/|,n[Ca8xFL8 Hg$ d/c++H-.K8I:|[w/XHsJCUyE,FwpdX.?>ǚ2DJXt Pqy\HG7F" 2Syux Z;QFD`$IEJOVxzɣ- XtOԹCAiKYu'H5dg#xtE>r;e tnٻvFk[՗U\/_,n?-!P @q`wls5Pf厜^p`ѽ^E|ҫtpFIsn¤~N bh@HBpbYB8NCy5|,z:w9oimv3竏B}U4гFyɳA~G^j*ys-WM.k:3 ҳAr&V#ʀɕѢYsۋ*euqզw~\[5NlºWlf8HTy[Ղ"S.o[ DT$Fk}eJ;xqx"k{"::t@\3mf]r~D2!`=XF ̯ =v;!!(0dwFq+lHՅ,GpXq<;s*8*Lz誫ڣp\[z<$fö9-<#~ׯ2#`̩ZJ!!ԚU*%)s]`T v ȿ%CEN-iP_τJBg=ޜC)5Vv>댗쵱>!M2 %׌}1`0iJ>@x 1|,S& ̃.o\pĽuNćԎڱ@UEPTij OA<G>dw> }z@͖zfuϰKf[.Ovk GB" %U~xںkQJޔR˧mQ'R ]WAEvI6%IJ% & #M 1%y(0xyM0.Hz8E1I9hjRTRbq?+"+,u{4UmSXm8+'fY_b~T! fgs_CFԿpT.N)ISaoRuw:~ w 65g͖ӣ5k*z`RyS9t6foyD[BJ a!(%WQt1v3Qcѝ0#f}}W4G;-Z6f9d gK񥞰iŮ\pޢ(%ׇImmCPsZN"kpJUkPڢ&E=Tgb@AQ` 9g"Jp4@\Lu%hN4Qٮq]" ͍C vߨ&ZkQgj[8-ۙ)q4cVSz"~鐞OT=Vˎsk_eUj-qX4I…'s\n[9Gr805 ):arA:m9WfIj)#A2Z[)bTC4\7" 픝ge5AV.4LN.h3TI׉g qU|ыt5)YS (MDT\ `Qv_:ˏ>ŐEO:su vcg Cp;_; fuo)RípZ ||: )fzgʶTÁL^8 v"(h2}9DäxTґ`xq#*Pb%_Pp .R_nxE>_wlUPx-;N9byw 3yMc 99fl1mTy(}ɣQ XzXuZKTF4eґ#.Xヷ)qKzxHxǣ X' J1I'幽G%mgFF1(|#@7w8cg؞O/̒fZujLjN54;{ 8$4!xtPc!w%.<5J$Mx'g))phą* (X3XH60Aشۭq]" vY[oV n#ei96n3W>Du]= ]- 7$J#RLŖ"AbS3 9΋s)ᣵCwiE=KnLAkأiK#I7!fy)ǔ") s*74Is9 ;zM8v(9I]$ESL~s {&mxly/ȡpʙmɡ`,ehPϗ~ %B&2}/ c@0a E׆xWXˣ.Rw6tDr%b)#s2xLe2LxY#{#G_ë۽hy_`86FS8 ?@Me=k½8&\tbe ҁcʂ ~{pS>tk9{r'ǔuC>Lj)ǔ&Tͩ/sk vc\Xf -\jۋ;0MsIl@a =c<`%feWAb(ȏln5A';;Dh xM_gu~,} 7}IqbǧWb 6uSےa5đb84&Fk><lQMl19*zHV !"᠟ ( |û3d:pIArG\H"SeNb eK~u[$W2#U62 5t+u,萕e_zqz3#)shG${teps;!XoV<0/bNJ'86>MWcI.yvg1;j}9rʝ7Ԯ3XO"B5'\yx<sv0uyCYSE-?1=ylRN5bހɵZ*Y\}2zރ>RxI3֎eӋdO=]rOT/>^^֣~{ȩt כf;;h W`رeIR>$A ;/s./r:/ D.qm&Ɇ,R "ۥB ^`'W"UR4oP%c(k>R1\2”VkZ@hm&{Z>1&"? "J(pi E$8cغu~,x?ȯ=@8&\]a>/b.=l;pL-kkV--х6ۢ'tMMMv(|b-mycD-UUzНJl~Uǥ.\Xq25#r]mR3S?݂\nF-; ?*T_j JONKv\b8sw~X~╺: 8&T<ѩ|k g}El?9rmjtp_L/d©Tg+J12`#z; >[~QdƃJ=>b&6ɡ v ftZ)NuM.l-?x.86H  '@ ;T|:cӥޱ9;[/ uJ&ۃ>|2x~HDnpN8$w #Iν$ކ:J{xcJF`T+0=/#~JcMpnUi1>9*DrSIg %Ezq:$NƩL{LE*%CX5*ȋwȴǚSeH;pڒ$7 & yI]U;XCm"B[Y/"[$ȷE􍉞"/rM#R7WTTqN(O{FҀrAvg&_$6(",\6MLqL89%^ŔX‘,V|QD$}Pl3\Kʥa+0V^T4ݫ}$qEqW.)1& M{BsǶn U<ȧXsnoZ.[ySj=PS!e9]PÛ<šCH$(fm (ġn>N[=eH-:͉|6l2NАBdT]o>8gՕ;J -isncP -F"EFʵ8.S[;BߜXyhp:-p}6Jn{`}S=g7qcy[V8]c5?ݰD'ç#,<Ý7lj]8(ƼkN   |Ҏzȁ {Ϛ/P^vcк۔n^WzJ׋yV9vA|T]dU@xe ^9u@Hmsd=ar{(eaOJGbPhlI^#]08.1v{V8u s 9 NYex"q8g#$yVTJ:1/ұМ W{ Q1k,gr td*?/o{ t(0x pBP 5:pmHgVJvo8zqq*9LҷڔmԿS)4#tnPIpv>Tb}Ŭ*Y8[./žfz2W"o/xZ C`C^Wp+ѬXVHX{kɲH}ky}+?Yͣ]A~/)#1w&-"3,~K94?,|Z|Jf5OMWX.*5Qא`חOh*Vq %P|T|=f^/c?%feo*݁0yFoWifA7 i {QlA.&h[AQ}fd~|+ ^!FZ%'^=Qw }0ғI ' DD; }&p>d 2;y¼Qy"QbtÖ(?NWIO7fJ"8BYhfB[D gUÉD@R?R[ +ަ~5g%RX Z bpj *mboA^m_G3^V\2jh[+ooiRroWY>7?6>sh{vIW[F욷@EeNG^$ ~ݩVS wqK5%jxa.jA$:{u(Н3Te6gu F$~^@w]w1 [p|)CcF g<~E-לu`m:5jο8R%J+!w1z~ǖk^i[NU8*񈶦*(y_֣GWvۧ}Sdl5<]x_Aו>#2IbQ|P"T) ea߰id 2}/Jf B2׆X*8aG\Пd.ma]i:Č:52yU kwY.XWbwړGwb|F Mו1ߏuFH<{F4F6fm^̟9 F\s.=~U7>9}7\`lžgs4 ?IKFqHnV_.T}=5^fxLz=3/I!QEacBHCVxL@ S٭) x`dZ]a^]L05 TqPQ  MP*=u؝K]߆N]]*4^WH2{yOl:w}ƿ@']4 QAe`!GqjC ̽'6Fwȑb m${&HܥHi=#r|&w’jk~u3 M8d)ٞcJ!kjZKv406Ӳ[ ec+CI1J+')8bl-ڲTB55OGZݴZ|CwӞ1vS/uuDeI!b#j`ަm-lpQt<ޙ_x!œjM$2:zoI;D碒QXq&S{iE]8БR#<#H̋!&~f)W_e!W_!&Ӡ4QVϢ'a: Z4Nfk^OO: ['lHZd,W(DrI1a'a&3"Ⓛ Tu><6"$(f8.k' .dW@|JWb]fW}Af{d7{#;J~~ Rz9?)y(c1o^I"M0T,MxY>h9|{^d ?xhn3k bB(q?Z1&nOx1BlK3RtV'r`ŨuN>5tfs{TȝYn7Tw3'̛N6I+,6|?օpC(4[cX徍Fqd6$&ʌ5gW:X>烸 OdzGUjHW\T:)C{ `^B {6GAu3>[lX"a0D)/WUhw|L]WwnAٛq<ɱTʥ2 dTS"ZJJܕ)Vy|>ܽATuxZY=uD۲F2iIB aJ7Y'b1hsQL{G)BJD eWS5i\E4j *'fY|)h%dkVTA;ܳM(~7 UX-ڐ#BTlvl?R8q!ӲzUIJ6Qi5`B|n c^ShqqYyd)}CSUuxdovVRwt}< t~k@h}1T),=5|x ^NUF(InDqMMŕM֛:^Ϫ;C*HK`6d#NF1@ºVA&ܜze1ءz'RWM lʗnmE (֓Vb8lMfD i%diԐY`/Ec1[`m_% B+m޷s-7;`aGCma(*QP;o~q@[ t` iOxgc5u 0du7^ .TV)5A?;.ǻ9.wV_ y8[.Vg1ϳB1̆: h){Hk"snj ru gȺվX@R;`\oJSʶ"]WI iLU'W-w7U[~vnG}yˇH2}JhєX?iҶ 6Z3ьB\ ']Lu%d3Q{&E9ǫI㧳q1Y)' myI$j\Ȱ*[VVIewo_~};f-^REl/_e-_'һubԇsW] W\m `чN^O߮V^mWZMn'?Ӥ}=IC^MUW.t(Pe4'm̦vh&Q^9/qCQV*3L8 DPF N6sI`TZ* `gc`qL|űmj?%S{G5y.:51҆qYiȸC +N- x*WP R'-' pS >(_g4ے&{j8HT֫ Gft0jޫHeߩy!Ɔd&hT>oo;8rwOI7$E@ |VԻDTiq#r-)(^"(˧U g/ jf%@5oh=;`(*J'E#qf͕]dIxO쑡+chhҠf^ww=nswrsherMAO85DbOUR80UmD<-}M`JB|B10Dh*96?#B7>-%a9U]Ӑj*sʼ j1#˙m:AtAGC  jLm2KIm[F1 ) ᚂ;2]f{ؚ.NOYvwrt)(),|N$Yfb 5s[O=,0=5gc S)Zb:IiG&CB(scԼi^2Yt-/ovrg?1ؿk|)r=?.Q#tx@tIGѓ1D^`X_!Eq(5Ul WaNjjgH'yz~)qgcc11;2T#mjz9DIOk'dciעV+]ꂲ4:|,=ƐNaLA ̵j!Ckw[ݴJA[j)C̞8cR~}ЋmSFx(eVL2^6G˒ʤFJ 34b_y)+/Ds?xR;GwScG""[pFRX)K,fS˛RȈC!RE)fBKqXDxͣgbn 9ԛ||CZ>lCa50=8rvE#B~l1SW\=êrZ$?t5eoSo|!jd892]{Q\̦7(($;~9ZX7]Cl\:~vLa@k(n8?*fYs ?lP1}dٽN'P^}: KMEU@EYYfLO'f8;OhRBjlle$쓎 p7p8cZP\5^xKŰ'X1EF.Yh5tM?EC?E3[`-b?P3ᄃFoq׉^ew.CRb$bL.@\Oܺ&hsL>O7`77HB U1KC5!msS7l#QLVz*Z&.bרbZ9Z|qy;hGY<l-Yx"dn-ftndd KD5$(HR$p\]ix_O-Ti\L*'!<Ky"`B|8jD!N6;Z qQVf>xj#W+kExM3*Utp%DS+} MfהNn݋w 2p!vpUsJHUYZEʇH;)E`ew}hQ{M `!B u{6?5VtQHQ:"CcSiXy>:d7wdumX{@|Jė]}N <tTCzD'm܀Joὕ{GBۊD .?Pq|NҪZe&&#AGd(y0ْpn^ۤnJpmT!&ۿ.5x˭?MeUN9rl݋w&)' z ސ3f2[tszBoC.¼ZNe ("yۺDД1Q$~h[z:1ccH!;^m>iD{jl"Q)%M% /iSl#QV En N=W2DibA`Z6pcSC%gc^ҝ/Ge!`#,/M }g//Ki0ǥW6Ad 4ע6a? ̪%=㯗XԽPGsry. $V;qޭ0{!XHB?߿zat\B_5 ~}_&-j0~s?MH\]Myֹq*UuҴ:2o'"%ϭ_?,歟crO2M?tΒ~.Jg e/La{y']Qm[i{[ ܪ9S1agkQ$W4oGZw~iiZٷ>ZNij "pBB "3j4DdF8">8`D^VO^V)<$ O=C RfH'‡o+$=nge gƳMlcqM)1lc@՟XY[->Z?#@ij-VD_-x0+Wh;{2^}9{_޿ӏ֍7{ˣ?Vȋgcy.9捩:os?TS8c&Ӫ[pO闇>|QzTz.<:$*5BOeu"|)jX`>HatBc{uxh}"w{~ejJlKd?hpsRtcćxa,x"h ޜv$<8g]21J+8/03`GJ,,Qtk¶T.:͊&rlٜMǸw'"xk ېz 3DY Pw 9F-S# ʡF!~P$A(Aӳc^9'闟"'fg!CLH5k)+,ÐZb jI.AR9K6q2 =GVaaӋ_6"?ٛaK;1Sgg <2#ƘK5u""suV$4`{*1gqg+瞽]g͙ߔ*jTv {#`ݔ6? n!i9{IPƐo6x lho1ѳ𷮡EveDq2b1'5:l9V|u̪ؒ[/Z=k?e4(u9$Gb.y:MEn!% 2 Bl&D~ΘBb]s,pDZ=%̔yf/wE\W8"lhM8Fm&e|}ǨxBRd#SP&y6_֬.RTaòFvz9IAUYa}Q {6vpl{Y;k3g{ol\|k1.b0ۈjUᚑ_x\HA?)4Fi(\8!I`oCх$c5v1®':쇢Ǹ 2JʏGG~OsO;X9Mll`Xt0!(GNpvP6 ROɀXygc 33^X8BYe]g >E̋g#|y ֛2zMEꪮTWE*-x( J3d_ʧe#nwd]4Ϻvi}}D5>\k'$UErǰPȀŅQ̖av9$c𕚝QM) 3ĹGs` I1 zd]q?Ϻ~.avRqeZ< po?`M?8FSx(@6HH"\(0>kG3MdIwF \4rvVofmQd?8*0ok[0g1ޛ};𡏂GKf>!qK/^ +mT˅l鏵o \e_Ⱦ| } .Bɕa7QbrɬRE: $1eo2W]@:9lp3SS;: i~giEzee!ʚEK<`sל,?y~b[qw _!#N K@/h<*GV uѶ m9inVoܵ)bYh}u;kʯn:ݑ{G 2sBi:!Hs")xZU.vcm9/%ig dX]X+*t]t9^;yO4- __olul1{|NV6{m1\?PƄP(K>MDF%S#Fk#uNTAa|O{647on'!nZ5z*52{1!+VQswv ݸO,YcoKp~ſQeo:Akx+N^o?*8HxFT0[8Λ@wwGGh_S͙5Hfs^l9 M^1,gˬd;/-b3͖X#RE3'Ȟ-ݿeNU,`,,X."#l+xPE46:FnU($.[{/n]'d[˗ |Չc}6ܗ~/ ;OGui`ӦG[P̜&; EM"EMnd!bt>`kAx(,, wZ0)e= Kb-!|`]E= ۠vX6vwdcFP(a4&Cr$Q,hRU{<dj;Ymⷲ1Z^Y:i tsHcLVr&%vm§b5ɫ{$k3|=pܧԅ{B+7,Lt$]@"P6Jwk"nRt_RГYqzDhrCL➏zMfb2Oe2*Rڢ}ӎkCA> z'֯;Z4`¤10PRE(m<)r4 r|k yNSH7K RT>YBG7+G^@ g$]FUO˼n< H`mNY>z/:Ϋgy~Ϲ_K1;R2 R9ęJr*$P +iRiɣg-}qF %S0wB!u@{ RlNw 1&ΐ^"ιGsdtڤ۳"%LHĀϜR0rgot<0"Z H! <-De ǡg=GȒIֱ` 1ĂOM%#VT#]T~súW Hy|"Kãg(ѹpKxWfz -"TiょAѳp9[8~ZEɼ[7tB12 Q CT^~rs:$UE'5 ~,.##۩ֻ@Vt^&.w?r gaqAV+B7WPj0,SCzUTQ$;a6A5cedLYF=%Z&$(&y*M7Vݪ~%@3li)&ylk#s$l2 焍p=1_t4ho ZW4=ܜ+M'=Jr_\uR#JÈp`2);:  3l&&/āJA.:Cz*٪/Ov@+Q͆ *q!qk~Igx~تz×n2£/8 _.r:/Ps"$ NHAҕk93ugIH Qt87|Q8&gqxtNJA%eGopLsp|r~$bs#WY@?Y[r'&"]6M(]xӽ* qg'$s#ǣ@ i81RЀFxtG|zpBʬE(Xr'w=8yǣ78Q~[a^$K$2FZ$:&y9$q!G \vN662G,:*p&Z'8ƢvFAO<-PqkxeZ-u %V|yB 6)#A rSaU d1vBqe˜d[cKklZg;_(I'%]ibIkNJpД ]1 $GӪin4רtڈ';x XM Ϋ0]Y9A Ih-z5PQ'@ÁD d5^9-";\mHKWa-b"bM]OFk-ݓY;.1R˪EÐ >Z%kL#NsۨRb4£78csX i!XϩBIU_ ϗԃۆ Sd <"rXUXSHU~Y מWpo$L(΀b7q S$#cAߑU[XXbu׎CVj:cV9)2>ef%(91~+ 6am,tEkDZyDLZ0L@L)韯9x? <CXtBFxtA߸߼TxƠRFQ9/G0B: mS+ ,i2{ 9H_55ڞ[ c)J CYUJ1E *9u씖%SvARP.b0NHٴE 6{(ޫtD3 }}(W'E,FxtGf52y8+%s`FxtwFNQT:46[l@ηy>GoʤTE~3B͍_VCx^QWI?Š3,R| [>χq}(%Ee%y,ĵ*8:I3*Cq|ى @$082$_O 2z`ǣ38 ^q׿jRY1Έ%^A*muI8Ϻ:D*HW„VB4h婮8/ilh1@_$8D9D ) O+7|!*AGxgI'EdB5'{_SP$*߉v~[_K*:!SR#kהޝjYMt' 3 z&CTLoe΃"yؽa #<:2hh)a>Q>%YѲc%b|yz|޶팣i?|nD)D &t٢̍sF4£/8AZ*ÙFK5h[CrJ#d-3Ņ-$}uR:ޗY%ƘtAK9#J/4~3kPv^0aIs%eGgpd Wb}JQtGop.n mlY'^2xg8UJpfNVtͧaEM#<:chz 9S8WHpqj8n_(E-2Iq AºŎGgpL;L:T蒢u#<:CeF h&WW:$`Ө 8< .Xf5K A{t;$騚uee)at z\<_RXu@7%|6%'4b?~Q2k6.Z;`cps1ޫFuL\!\\TM#.DsJ˦V[-?[R'c"Lsހ?߫3yXn LK kEUq)! i\LO_V>e7ynpFo] LO_Ͽ<Ҁb ^y c.W<39ZH-a&4ݷ6_B$o>쓽 rmUh`yBj#Yh%\ Г_y^oV*)Aȗ9!?zыmPZ?E\ ÖrƝ~%e[!x_A Փo?1Bs8EMȽ>6'0IxgW|l{vƘlm:h1OVrjb7;(b8S 95Jݤ^@ 4WuXe _~O/CUuE$&ֱD.cF60z+Hv<:Cy<׿Ez~ذsOޣ cv} No5Յ`EۮN h3{sde,:C#LUT3qP_D<4hߙPLM3W<{/G@bAAnSYqMDyrK5Ί*ԧvOd'癫& 7i5oѼ qSrNK lrm?']}PZl! $G.Ȅ)@?1jǣ38hL{|뼼c$>R7mܒvD$ vlhZQ i\|y*:[t:$:UL,EbmiЫ1.|0lH[:2) >F\>P4m{qhPrb5#<:#VN[@[,$!`IB>(8 y'ƈR \ˇOf_kˏ#ҝ (׎fMZlaY~,ٌyD3 EKBnm!#{1/ISН4£38*wG^GRld" 9^IW$~]Imq܊Ny6-;~ˁ?pG^J ͖ h{l:c[{ [_ʍ_VCfF9WmئJ}}7GF?ơ'|Ǯ7U=ETATMϛIi댖\&6>oT+zgEiZ=d).7,^Un5Qc;~#Imz]Ef Tj~/,, Q,mg@*i?Ks8ťa& U*U,*-s^h߲ԥFtȌcCr2.C7QW#_bPCѳ_5 BK͘ T!D܂u.Qb3ѡw__®{Wlї5__B4ߓi:k׫ oQFo ̬3ᜯ FAO RC[>2yE_C UϘpuV>w Ay'3jn?`q37N&w1 yAn~AY/Vf4LSE6/TTȗ!X{3N:%zvsR4KeA_d8n hmMH o*"EL(!Y 0G7zs(g_ɇ (E#<:ۏzh,562G,ܳ9hhL2slxtGf__*PɨѤAAa!M8FaRǤZ鍒i]fd}ah0=9sUք`5{P"i"zH3L_sy3s͑",*D+V(|l'J^_E0ѡzifAօGF ì$ >6F":_:ڡtk8oo4B(5x}{ q5 F%Nb'H_E_q苤>+ڼ:*EwL˲X?>U\J<ߡ{t<}yo ?6.GpT|wx=w`Ý[Wp\W~vܫW~@Aj.T>ٓtOva"~6|\^K\ w}ﶗCf f>Gכ#9"r>Gαit ET-IH$.bzr *-&Gj' ԛy/w[v]hRF`qx9`?wq ɿ [wCc:fbwS_w<q^lg]~cVz^8&:X. U+tsQ n {޶- ОC@{7^w[6YaErEIZR(YZcq819<3sON.Sd24K@5wwsAo[IuwiZ`MWt8]:n^ڤi z)K(.A+#s#"H&\"@?Ӛvԉv~:M 9Jr,?|]-vv XC䌂2+$J -%f,@JW7=2`9I W\B΁BA!H՞ڻvԉڻv~:+CmX#}^EB%_u=y[EcA,U,lba Xظ PT 8B@B,Ĵ/5k$2x#j"oF;vMvt 7nk6 i`6 i`$hR`g*j|p(l& `  ,< d|FiTuwQRE)8Nq&u[apH:miFt($hf^G-9kGk[[H~=G+12֑>wֿ>֥2%LЈ3fʴ!, _pb RTv&9*,}gU[En Bk34 J' (vOõ堍g!eEh^0bz3zT`}W6(R_XC`&D8@J)` lBԜ ><;g!%*;.OJ 6U0ՐTۚhh6IvE(v Fk?@U _^ʪ/&W fIE,K#{00 5AYG^OmDSΎ]Ncmvb}2OrY̘C:,Nc ʚ̀2Z:5'UZx▥MxLh=*6}.k%&V[ &uDh #(M0v=c f]p3$pPk+֦+' Xn=v[wQJ97gK$c,dN)bxЄc*јq.`tŴզlO秛i{RR';5 0mZ~2j _3a'y=wM{©,6=ɡdyނb-!:t/g_o?>jV2⳿/_ Gê.)KuIYKR]R֬K>thJCoÛ=MWZ<Eu^V] ĊF?n>?^k}S lw(O˝̤b좠:jBHQj\)(ՕWsw]3Z3 \$=!"}ygh} 1q "'CLDK9 d_'8RϧpiÚijY'ʤDzx|RJ[hͯɅo&G\_gg'}Rƥ2HE`NJ D0gyɤh8DGRN|oEr5r3k^Q[rU=,oeC\qeYkU:{'?L,%)q.odMmhd Qi嚢M;όNHIMK"&KWm?DZ YTd@kҘGʇXeW+RكDjsA;!UFt@hYH  .D{yP0If>lv l&R *0wHs<17E]Lstvy%m[1`T #Q+,c8 $(˙Fh}d MyCiF=MqF6ޥ Og*lx2O8Gfz؞S7W:G6Kf,#5ϑm xj ŭkX<FXxݰp3”sL(Ɓh EmʱEdQ0A[ NHH!mBƃH{spYp&Q\z@h|8׋UVѡ; Nuٻu.|4u?%זּ[jv?+iǷ{naD76 ]ZfwgLG eZ30{-#chnD,(|3ݿs;mm_~+%y{v6޷Hpg~7s 4`?Y•=zۃ_, (]k=ȃC}Xe&CA'U~À?.&;o3h]?և:gutbc-3h=Cy8UFë_ъ[|{8 pkɹ8F=Kmkyy_ O/`MΰPg3V7+ةB(M!:+_7/rܻcdWߛ(V12ͬDh}{)`_}5ƧczD_Ol=aLFS{2{=Fuڰ^L9HB&AHED"p !PZ3n"FHbGwI+PR2¤*P DJ4rfbp;IfQn/;=͛C\8΁ut {Bd:8eGК@e!%hEwZ0^̠hLiADF4QALL\1 A*!ll3}g>m8-?qtU9^ ^jQT|Iv2Dâk$q+IG6T,' \=_ r_ϥcǑV>2N{#|y4`?1bT@Px'd0dBP` "Ąt2mHDSUF(+<,//:2~ Ml]IG#$>\{}d[<n <זPPd)gY8u8x̃@hu_DvY* F" 5 r<9HKკbr+ӕAGRi0=jcgPc2OՙD0hM ʒxJ4,c0.ݛrR)JHxL6dZ=٤M}|QLy!F,Uk>&6[L@L88``7#'6KȘј.v4IHrDi֯u.\.%6!`BLJ;f2Ȃp-A[~Fm5*5ߧPn7o,)|o0F-9j4?{WF PRyL/02=kyJSEV,")IIIʀ-̫"22"( $c H *hGʆ^ UC :ORl|Ir56<y )R'J3$ ,$F+TF""#@d;x{.Bm"7ňx4Xx# Nw$aG3) i6, ib'Ga軭魶\1Yd,рhR!iJc؜.X!3Ka`I 8'@Z_v1RbeBi~>ʜ߲m%n}k}OIVƼO?I~SyO/j8Ժ3oS|]a@y{3Z.5|4IOB_h x О,%0/5q:g Q1([rqr<%{R+=f]Ϫhze%C{{ .__~?Ͽ}LߟO ˤI].Ux>Ey\>M뒣MJRr-u]joBh%Ջu4` ZhoLgh9t΢irj/4DlvVTkiD"r>.᝖Zwofw͡cw(`sȹ^Q'\Sop6|,QNeʜo1x'/99{gAg s@:f_.IvhC<ڍHj _Ef<"՞/-x9Q("ɩFM&t2#@'DLS}ql'>[/oYO b6j8K*{7YgR4rdsY#TcQBBG#˚] Z2nMѮmǴĴbDi!` YVL?nwWaH*|WXH9*RHxb*'s*u+TΞp9p-QKFGυV()5ȫ5&3BDLD$4BsmOZeS{4վ'|p%֏hyTlږ;|GDP6#rQPMƷ'&(pTx(u^kfE4 `z;z΂!Xݫz|YZҎRR))Ta*|yO#m ӘׁyXKrI%P$`$3 ZAǻ*Sp<Ʌ,'בc\1 LpO5Zks*VRX;O&\nk[<*^~t\@̧t#G"ZqƊZ 6j^^2*͒pe 88o"3 L?[MUՄ?ُȬgRی落j0ʫO>_}7\׫?᱑G$w:.|.>̛j?N嶟I~77Bڏϳh7acѷRޟSZI*H\@D hf *PL %*G9KE@yNK|+2y/V.%W#|Z2}xUi=Xw<\6Cdvy<`iB[puyd`uxcPl1"^%ԳҴvw F' %ָYUoqҭrʭnnq%z][I 4+SSQ ܅|GZv,6wԹZp# ֈC+)LЬnƂy/ŽʰLKL=]7Uq`44ߎg 6U<[vR:w;Kl{h_]nx4PΌE݉[pR6pygeMX;šF|k3?$<Ǐ9n#< Geԅd䣮.$Ee+QW'_ITfs!\_B͢K@gs;GQ9%5*?Wqwy=7,4}ԛMa|ʏ='sW1HL451sDʐ,)P5rLh:gM˳Mۭ sQ򚌏"i4/D<b\pJ'Sh_B[Cfp6n?7;i?lr@SSsiQ=}~.Eq^ζ*J!$H+S؊^{-THƱ`c)U1H*TM"X{/Cg'/'w9LWi9RY$so~,ɟ`0>דn F@>wq.$LP "&10L>&Kb XSue’zR@*yT,2%BD mTH'&cƌHHPNM򠀥 $eTG:@q(Z:uH;$a˩hDԪyys4Md8'%a\n+p.ˑJ+W3[IJE#Bwe?nd#8VAjRpI%C2y&F-!ޘ@\XE̟ͤpM6=,w.wѭw[}CWiRHAH}?S$јx2!*HBGxHt-{;%¿_<M@h۹΄*+,v%~7Go(p ~2;҃J%+ E 堐y#+5W9Xt1'=q`:}d[MEWb$]z.\tvZuг۫'ɽo ۼwDHD1ćDzaC"ګ&pZYx3Y\mH݁ĸ2/&r#ց G(k8K*΁€)[eu<ջ09zzY4ufF8fwT=ۅ{c'vm2M SxC#|մtnk'vrۦI͏K8׼TrS>CCyDẂ\^d 8:\|(hd$N8%1$3-13ǂv> :IKѡ CD#]7VonbXj,þ⸴!d9DPܙ+;~ꯅP84%*P@ U<X耲^l.Ku7dq?6g<'Zdj2"2oR>]Si8V I3m,;sP)c$pP8q6V;Y!2an.䳟`HTwϲS25{?ly-p}[zH<$ 9 ڑ PP5TϠ!%Ɨd?WEeL+knIᗞiPuplxܞ鎘wLVQ1 JvcfII< (z%3/24:&(fE`y`G$SKˏA^"hX_Mǣq1r1ƂwN}⭧@Q̜'i4N$pCOBmzTSFQZas*K c9ˆ(5Lz!{Fꖑ}@H$nRF!B0eNHl4.*91> -=%5Sj$Z-'ƅOY_T9Y`ŷʓ.r_4.:Iݜfcvi ;)dsVzX:)WhV |e *Agl1F"2b,ÕA|_  TPr#IW`nCw^]6,.b8[XϕS&"նmWIӓd`儈٨VoAMk@'L.6 \LuKew4;_O.Fr~SrU`f0h̕2,hxqY-˵]qeŸzVewzXQ(26D{ Y%>J> ULZ|\n.}mtcmﵙ^7#2ݘ 9_S.C6Y'xq}vm, -Q,g/_aR2`'7R0Lj(q3|%5O*!bըpT oJⰨgn8&?!?w>cǟށe\qCǓ뿽]󮺆bmV]z-At5 ~XsW!r!G6*U|IǸ[hq~FR`#SX5qBD&RKcBc: TD{#k{r|nSNZ#L)3 N'1!u3 ;<#KF ^:Uog4J9yJ&>~yw 2akYt`fʦD4 ED43CtE"A G%nD}#Mt }Gp("%yf`?FtxD9X|V aR/y@xӍbveDDL ` Xy$ D;Ɛ m4 COHv f?htx3iWv.u+t7󹹞]$i e+I ^beΪt%\+М`P ⴌuUVYEГ/0DT܁ɥ FH"%! u}8TKn&'ً>4m:f,ɲLyZx?ȹXYT4<0smCSոʱGGτpEK $Hu0˿xM:D HrJ|S9v=#&{d: %ŷ^n  ńNv/p01Mu-] Zqnm(;롲 -0"`H B*+(/Ee,:0 I\txL3tny$LyF G] u=$KZ&ĵ r B($W1pV{tHb Z+IV7X!ŽY'oחA:0ɅudtwbP\yKH3 ,$bf8 `nB#AVI)H2 F:K9rn2j=#H"(rbrX+SZc)$tpvGHWƖԶDy/܈}/|K@5|>zLQVbo|%Yq]G I8D;+]2VHX6u:-{*G6KL?E.Vly 6Hg6+? .;yGE>nگ9)QwNJ} 鲯!Ю %XsYCɧau7<;v׾utpFkeYM7G7XH>]}ahp`%J7bi1g;Ja8$3}o-]SL|Ss2AskۛḬyD4weR͆7yJ=P|y<|Y3pfϘ=c>c޳{W'X=S|eZb扊* $V{uv*5GԮa Hv!O_z<˱f;&)wKu|4 -~q{Dm9 W~!ɯaE)M”@\}qo\w8mo^+m}p3m`k4+M 4k7yËi~GcoǼSTJY*&z-5xaģi[ @'=}Wѡqxq]{;P^*1oG|_wU5c &S%ɔ3_ b$CZDc)w`, E)ou4`癇.0E揩ZgWunO¦UG`RHCPg I}<>ˍcͨ!)eL^vsF" MBP^8 Ys_|=,\I$XkfaW8X MRZ>/_On.<2x Td{M/*_`B<UX-ꝱVc&彖豉hj4BZ": 2޺/ڱ=-W s;ze)d$B.#ĥ*PDr`FQghIo CRM+,(qGEwo> HB9UaaR".pvhVSRӤW4OkzM}om}i(â~ 49NC-P6KѩTpB(֢Z(#Bp).}Ԗx93BÿUs#1'딎b7~XI6-oK7/~r3?嬗uE#&o0GL1]pXc=" #$$8XG!1z ",iP%LR#1s Kآ` 2B 5F"p\8܍E\eo̰}E8Y|6,-R mЍH3osA#Ι5* AA!++iVϒ\9Xf8M;:H,?M)xn%*L}gϩUE!5!Xx[\3ruT:a󏧭_>{v|x,+. ÃR@ (ӦPβo>V^-/-^G_Voߛ9 _@i0$j) G+߬Vceg?�Ϯ$HX']/_Po)d49P`?? +PmkG$9Ŷ^۽}v=btRf+bD?gn4u)(i`kҕкφuŎh~ IJF˽eETp]ʶ_vIz`dWuLNTWncz-UIZuX5){%:^һcۀbuҦ||{;bZթ߶c]+R].)Boc.mذV۰MڞwJn/ck#x+lbKf v}U|7Tu,giUOM\m4RK i@p۴>NK")1  gc>Yw6@7h1瘸M&>./(~*kdMv< v&_L7;(|˧sݣ'N'cjL ,!p#ڍq#K>穏0ìj^.1i8_<&5QGUi8M0ؼүGU`VGΟG%H[<@4kR8gy:T|+b1}''Sf@2VQ0?U:nD&OE$c^So'ԻǾ-ȷJ|*r3<5|wZF ҃5\V'zt9d\aLxOi p3.>Hq˓6-~%]O^@-6yPAX5|u>4¾ꁦUX'jw5A]Or pBᐃkrYAGȾE L^UQ"=[o9@&[>E>ۗ灗aƑ]Iq=~|})_!DO8ùsf(u-2TB2̼za=WF6ae,M(2+ُ];:Jڱ 5(kDr֋0{PF ȏ67*UA IlPc)$md~ .dA_uVYDF^쒷v?c @^[ၒCkY [l3ɱ. ?BqLÑuF+<$4Ғ~ #O7xHJ(08(SD7 c tAO$)P ADJJVq5|?c @ f{Oc1ɺ#0VW "Ϣ 0];chS=ZL6HW )!# @ނu'K?!Aӣ:AM?fck^YYͥx2Z"oa0^,1d jD)b/dճ3l AL>=xbĮ!w*M0}4@7\RNugi׍B|lh5󧣣ۿD^9ݿ#My#GKmZ^I %m1y1ޘȣGEE>F D"mSFYG~ {g{ǐj?sFѲªMP@5BS7|R5Y/P'Aزa==^JPݑU`_n٧25gySe4B[I#2E[y%GbIQ]XKl)F ]G[ )@oHdݮ|rIy9.˼AW.5KЋJ ;?B2o\tZJU;#z# @l.HPcf,5,7B ;SC|l%,EdDV32. ;:#o*E7Z؀*X'?B䭹Q+W\fU9)G;dMOyyRe?6'ɔ,3e'G=bz&1{y\ZysM$3aJMEtղR42 .W/Qk]҆}fs!R՘޲OljMUR{uJNnZO|j흤:/`-jJWbV_VCy ut Ucf^g;Sr:;L 894@d8#ˤmMC=k;#$GH48R"$@*i {9P|f`8OWƚi/Pi+*R96Va ffz~9>4}swgO'QKYgғX%8ygtv3y+"G*6@y3-H6ʊ@ȘH 6*myV8Sy_[`!ȋ-䨜0r`T:ErUo$*:yIfXeyi kɳk!>hdkB}`B~#YG榪ZedO&EM̴~@聼[۳xQX"։V%ck3e@@<;QBa`PRhYv.c8=o@tF>+[bw]j]}U~ {}ʡFMuZY9!G@|}Gl9 L#zjJQLn [)a荼 Rb9vQZ**&׽@肼;j"D#"&lXW UXCOqcLe$P.Wm`P k֞,dSOWm?UyuWmOiht9jTq)o+1aOm`nC9?SEm9DWsIkQм7Ob`+ko YMil =^iv^oKaOakÛBrK1zURɂl)*ȅ&kvc @^^SY$D T5$k-k @]g,5\m֌̴YXѻ.~1z .!e֗"X*,byH@聼OH ! ]l*rV̏y+j~ֻZJmMdK X+Pٷ@!AT|vTʴ {1t=\pMqrUxuY"/O^ ٗӓr;l~, uJm],ʾ`k*=t}$bN(dBv³n7PHf >[,AL"3b!abM2XJJT2U)]BmJɒUNϙY@S!e!c;R̓M6I!%MKbk[RQQ՛HPBŪtĝTQB[xbR I%mԷICVIpS-5)xTij)d8hX&L̬F~PjR 6E)sE%GDTSCP ։MЎ=~WwwOT-ooOx;Ǯ=t&TUQ5@b1 X)Mfn_n$Oh}`Bk lz'ywS|Bֈ` 2˳qL <űחW/$RL ]Ll <%=:u \r{S`/&zJpLB6F,jLrՅPQي" 2>WHpI]>eW=ݶ":xӳKk-y]_XZttt+MǿkGjѯ柳/moq2R_J c;ًfͫyw 4+ooɟrl/sQ;8uJ>;j{FW  urW`tK?,S/X4 šx)2&`VeQ}W{͗sj^q ;yo moG{~ 0?;zAnnjpo`!ּDr~G\o%8v v`ezCcO?llڟycgm_(67h,NInAA_?&^f0Wqyb4fλ{rԘ)ψ>0ّUQnɒB.((BMrOI$fy56foggWDvV]5Hw~W_EN!/q''9gޡTrj[X)` I`'}Rj&fChF&SB` hB+b84O#!Gfov$mPL-1T1Rb͵B(PKNZ0'rIOA1&`RQ7.芃Z\ձHO$Hʣd-Wٻ6cW} rpFK>x7! 'W1E*J俟EěȖDmNW*JEL"B0$}X<VSO- ?t ߀ GTu$ M`PL̀BBb3pҜ K pF. [º@A (E>GجR)TܰWՔFSV|} !\h!Mr4 Zε!\nG: ݎюڑ(~I[QJqTe^FxAx; k+$]1n#%h!:,x=2@8K mWcb,rl·KP,QWГ KSgʇi1AFS9(dq%׀aAXjFRC]X:javt,?{BbtmBq$r:;wu+%Ũm2>}~U{j㔞hZ0eC9[|{2>W]K>D``?CRCx󡹆]%g=[ƕ׌R#o- Sވizd=>6=]L6F.x\izGބA /$'",LBCL\jEZ!L$rdW'鹣 sr|ns+LF90rWT$ (⛅ %0KTNM-o$wWto9kiK/ \X:aV-սg^2\l2hHYsMtAmcPiB`[$9Sw'X+ÌTZ0ǃ]I:83L&ʌdX*Dz$&b cbExi?tzzѣ/"1WNY5=^xtp%kc'm;S?_0_ا3\z.(OXFhl y y48}kCjCM` 5&0Gʧ <4D JP\4IhLn5fVDAHm ߽x˿.PA5|+ 1R@HyH4Kr`,:009p)5?iDL51ELiZE#%2@#Af<Dq6P T$ /˅sas)H\< MxZĜS/ #g{N,j掆~#I9uC>l$=^Z[ U! 4JO]VEk'u;^o_ M&2CY `8pـ&2)l8v_4YV*zwbٱ>y ]p63A6nT(.Qg %)Dշ.(3v~Cd~M/ڝ^y\+pޗm)VfIUrsrC$.}b"|438(JIaJQ1xΒj=+!7W/ Oˇ[n휜/e;6?& zj3`äk3nrq?Ķed*U0_N0yUB VgP|0a]W M6eRKN ޘD%t\M6T-wLZ^jUr;;vt;Q{NE\M W}b7,L\x$f椹[nawy`cL'&oX]o̫}< 9+nGd87ـ#;eҒD@'}틾f+7vҸP^j oIw O.'̆Rf*y$IIU@(wNON\xn2$EL09%&Om\] V:,q(Yuztkۃ䱋sH/^Wu=O!?L/jZ?W*jO?9v)mDȹ'4,*@<&HOb  Ԝzs[ugF6-1mo anK!wy;w˪(̭>޳ߨ ^W1HL451P )CJ@mrpk1Y,x`Fy:_|>\T2UhJ1g:YD$ttҠ 12QZW eЏ)Ju|Is!Z橏Pnk T:RDMJ]Dٸgjshn}mQp2*.V%3X6"ON+}֍I](DQ)%dwkkj#IEoP#ݙ}X̾Q,XHX6̯߬!Ѐ`Aݪkf֗UYؠcH5rz z%G"mMN)Jkmmʒ 2=mʤHIs-= |&#յe9-(ElagP[-|Up٢ҷyIav=5^Wl.vr9sBA2}FVf'dBbѤ&!1tY4 lmU;`%Lԅe* @.ShH K+h)"yPhBnl_V:tYΘ6@H6\&"E )D A J1`7bB6TMV5)5Uk(Ŗ Y`|kzU8a`fGMmQ5aXƞ/ڤimAT f2Cc M73a't*ըĬ<(S?gO1HI"VyZI^d);."i(3K<̝D3MHJ/(,T!W/>7/; 4Ek A{P:ȈΘbcى@j,N3"rVUUNwIw?MpÁ+w[Om >o%qb8݂o2eA=Ct4>M;qq"?B}FX+,4\ "ռ u!z]5v?9R܍r]^9OWK9<[,-ڻ}ɯ@鹿^!_=0RGqtz:'`$$f෶O@ZC+3U͔寿_$z3V"-޴( Iՠ@fl̊a.}D߀cښ~TKQ]bhU;Ok?j-7"Mx;F*kP+)of;m1F8gJm8pǟrf9ol:Rye S`RdmJK#IrFd@j| ƕry.AnO 7^bq^|ȯΗ-EW݋.w6m~|mϼN7wޝwX0n^7捄[vػFW/wZzQʛwx4ixrt?z RpHq^u&/^9oOζk^7..|Ŵ7a[ϭ~Ҙ}sT>5Ԑr }p;${+:FJsL>>+tv;ga2aAn4XY #L#\GR4V΁3 `1L1i4E)dJǹ xm$}ntg!нA~+$3|, dž֔pJ+_&x*nԮTeADffBFF+`2S5(́s)ɪi\8z 02eP&i$)):ڇ$| SʝgK#Ўqd2Pvl qLEVXJוzYsvԳV#5Saj`CP 3.*@qRC̺%0S&1-}aqhZz6譠dD̄ ZHtr4uwñW$Nă2^~|&eyN0Ue[EKrAH$ӨZ xW "8˫u#Bm;cSDMDdߔo Ce̐]!O.yv$%,`JftHY\AWPk3nO`sҴ,>{tcI $J" P4 6Ys/^ɐRc79lVPP:k\x{ۊt`Bh6ᯑǟ#=M.{"\NB%w76isE:DPpȅ`I6PP'ZC*g?ɱ]󮚳}V菎7L$kHzKePGȌa͠.r-,ogSF0FP9C4X2rFI#Г"h=^4 Trmb~k{ϝ;ڌ+<')$ GMp 9$H!lt:^rz*r\K.4HI@dg FI{cLҐ}V#c*ROBd"͖P$,K M4P1ې,B} v8#]٢ >~)EV'g>zigM^9~! 7~p }EzEw_w⇉f Yf%4.LEyf@ޚ#Y&[rq;86ZJ.たL4 .&S28pi$IU.pOjce"3om]57]ҸQ{px|T$|s~ޙoHF;m[6/BNOW?j7J%>&qEk]+u `]sZ'N  FϽk}ݛ^v?'?ޟOO|Xf4ƈY1?̃llەMp48o9 PdvcM%%m5nFlFլ 'RڄQkh(U|4\.zrUg?sRnUk]trSʙjYܘŅGX>`o{lͮZ,S[h qiba?%`/Wd9Y^Z|Gc4C"VfUM묋q/NHIBFX?Gѧ:i`c죛G5@G4F{T[T--Rߡ^ͺz^/wj勧5!V|.1ٌL LZ"*4T\$Is]u:9٫d>h*uD-݂j/2{SO&}]}>O&aADR$L# xkk@͢4ankۚvO&$tNzEefc 9+"%KC([-DI.$T&MrK~ϒ3N mb:R}fc Ysvc<]\3d}x67f6n[15|z2{g%s+I G3pl92Ȓr֖&;|U,˚s-3Y/n53FG59"GĢS\.mmh1B%b)GśΚ]oYoU͛6^~4wYݧ]WT*G%&Us+GճJQ >*G%|T'UJQ >*G%V >*G%|TlK1JQ >*G%|,࣊%|TJQ >*G%|TJQ >*G%|TJQ^ ~1O2翷9qXhW9N_qM*yr&,d]A[A;B[v{llLzly=̧[Ψ(g0RL^-I&8$$Ι+2Ypk%y2`g&P6 Z_N)|Z+WN)^Y"Ӛ o o:2}n4,lm*݁wߟ^s6u=X Uہ5wY}!u1dWq)+q&*BW ^9ΙqKG2F\9;= 0n ͶehxA25ZmU\[ɘbo@_owοrZiΝ"u@{#r ^G\9ƴ5cbJ)7 jX4s5Ȝ'q}mƣ۝F}CpS<[oۆ~ԟ6+徾c9GQJjihWN=MI0UipV1aOdYQ6'.6JOsWWHq&MР*4ڒB{vG{E'eLQH+ QXyI11ུ /{e@~\g6!f+0 6t|bYs7ºrWЁ^l{(\fYVtO,1NOkغpNaX7^h󀌒sS:.Ho C{m7pR-_ޝ,Ŷlv˨[^=>4[td55',A#D96IG *ʪt۠O.v~݊d \53m'틡svn@iMP a90+#je6XS.C>R{WFd]<"/lm O[c]CRՋHJ")))QrіJVEF"22ҙeGG 18aLM>y69vv}ޞ\ f7ź #}(Q[aAHV":L $CەbƉ`U+^Kًc1B*DuP)llRӊ) r'կa5Un%'Os|=3֟f~_9,Z}}+]2 cfwk&jG ,٠&[]xۤ?Mv.ekOQ[SR*!E['G2 ɖB #i\/0R-c3r[(Ula3x-ԣ-|S:eM( 5 `z=*̟XTZYOJ54I0ܥ$5ԚdK cA.% Kl а1X `'LQfeJE-v>񾅆=H_:6;1g9cuV$+p R)-lcd7b,BFM5mԴ}jڛְ=(ED38iov:^-JRS_CUXJ0Jy?.<^4tTpJ"&'-6!dW<6I:W? X>juEǯ~XeCT?PV<++p#z-He؍*Mj3V39.ZS6hRO:aY%ոik,'>pvLx-Z7Z(*pb LC4xfPn! ^\k=9)&e2#d"R*_CĊRnv.T(X;km,"SO50YD"flBww,;Pk,L4vsDz:b4̮tkh1ϯκSgu;:߿5B.Ί;`m6™uՏ 6:ݞExT;9^&brW/-zLf_YHhZY*C~Meɯ$LIRr܆}8IBXo-<쭬%LGf/kݟKzK99SDs͝/SR?. CJI+$fŖz3A 7T)Ʌ%j{0qBA hI&pE9E)'h7 r_A=)f}WSe$pC/jaTeW6Lj`>gWݲ.|:eG\bzy/5t@>bJJqu,uL"jv+Fԣ\jk| maN/=.t5F+wHV^ |t~$6GqovJ6D@ B 완X]@tw|w ;n1R.&W [3Lbr~77Ø.XRs1= Z[./ic3xjFjܴ^6:{BXz=ڿReL=鿮Z9fxq#'yM]6e4Nq'tY`49MRii$Tٺ\[g"ć@a\)GxԊ/;SK^Gp6e/۽g 7 /۵%r#i%J]w+ڞ j? KxW%:nԱ[}[yzM}p-w^Y&}id!;N=[n|>=?[dyse5&/nuk=g V=-m_+67ƙyW^>^ϛ.Eߑ!c"R H,wKeIf~h!Hj]_+J؝Н-'z[]%n9_yH\ENTɻȓJ!2=y`9iYYjKmZXN/oe.w#˛jqd7F8Dgj*՛A]xrۚi QF4Jh)'Zy)~]MWhQ" YmG.H;͙0-;F+ O/c +VRƀN˩8."/i]yƢj &˺`24)Mn k%7^*tmӗhSr+5AB%XD p1!:cǵ.!3JmC^6z{焃5voZ7fed-'?;|=\?`No#_s^N3u/ӳUZlVE/ӹt>[Uo 7\LZ#wxje *-Ycɺ7V t*ގZe<*ʒ4<"Jfr!G}H979_?|G~Z'A`{qmnY =G9o2µۨ GL x;O&GÓ?-I|E-Owf~_D#ю0^JGƣ u hur|;VUZ?غOcưDddvoiPrXFg0럶 @ P V: \\Vpm?|n`f5`tZ+ɟ:!Re۴c>,lpm?j%=x(wG7$ɴ3ɱpgs 7.-0k2cby@<)JЋn̘ lI|c3k+}z-/Iuo˫6f?=\lK)-k+WY`!QJ %!$X8ÒwTgULX-mK-`5XA秧,TLAHmb3CIA2W2IlȩgƊsI;=9x#CDG"E+1xϴIFϞAfr$y;)d Xkv$UC!->c$zYo#W^0Q aסJpg[È8x]gIf:/}M hx!%2?DChq%Lb@$ `IRM $E )U) :$K4H,k.(cX2`̌wBZ͋/$9z/a1 FCiX4,g >sŤ /&̛ڌWhLrrXH;lI,x% (x=0F3`-携leFV rLb`DSIAV:UVaFs`y<1y6t0t CB{T!+w ϐBu]FHE⠂ R[)U6M@"BbiRN>' (&']Tŋy u59iMKb ZE%d H*'X,'Fp #„ #XYAQH>3yHS25I&@bcJgdX.-m. v98tx@P q1$E0a 4o D`[ۢl-YT&.\0EALL&FW")Й"KI( p+ փx& /Q`%'؛SΠ)[5`(jYbAJ/u2 R#+avt))!՘wSPayI.LBBT#2Bk#9\QApBIђXX/+vh 'l6>^GD-6gc? dgZm"qR=u jo ,1W3faB;`)뻉0%4dg7nxrYe]ßX𶛂)BF ˁmuZ$|0<{aa3Pz[wFV: .z-U:Xe☄j8; `U5%!t\hMr PN"DiyE!|0l3z:7L,v{2a  Z@2VF@ϠmMGETg"Y /XF| S,0یh2Hr$ $H0oj[Qyw~2f敱!r[| D+pnCz%7>85<. E8AbC8*fӈkm#WEȗ~Y'fI ^#s,y$ى-vienָ3VbXR3I:Lڐ Dz02^)]s4m׬nנLʒ?& D)G r{ZYKۅ%(tH3[~\>  UdV k+@05>]#Y# B =bq0@-u*Rjd@&(2 L d!@B&2 L d!@B&2 L d!@B&2 L d!@B&2 L d!@B&2 L d!@/ ĈGL Q7L".c*rܙ@EJwH&7 L d!@B&2 L d!@B&2 L d!@B&2 L d!@B&2 L d!@B&2 L d!@zL '&l07L@@/ D:B&2 L d!@B&2 L d!@B&2 L d!@B&2 L d!@B&2 L d!@B&2 L d\&}bq 0vo@ Ϟ TzL AB&2 L d!@B&2 L d!@B&2 L d!@B&2 L d!@B&2 L d!@B&2 L d&ͣLT^}z~vhwH)Ns0N@".q UfoK,f<{RK/tyEb{䮊R**/HswWEJ#]@w% hPdUҽqW %t_U+S,㮤z@WO]b\RB33a=u޴bD="$?">~8~ͫ9Ι}GVV !GkXCj4[ K/]l_tV>{PYT Ҫ'PYlo+޸"-])Gwݕ- Hb?4ןR?ONS|;ş~dK'bIJd*s $% sղ!yP-1C)Zω8BSBWRܱ ϒ*G&q?=.vHѢ_ ϣB ,^WmгaOb30ܠNۘTYN|Շf|[7uU[b0u^aJ@ƕ u-F-VU^ G|gӃ*KKE$dRY.$4HT41PA Ǩ&r)m+{R͏U Bk+&#ts^qY%2m.KstɿҊr8[IdU9RC)W^HC^kt8dRVs`ZB1F ^y4Ӥ*u9}S>^ iZAat1uSA i8B)z%מp߭]p,"W- nh5pT<&``~Wkd]_\:kLA[k͙ėYTjg_tQaks61|zn}:jN,<&`hkQt|1_Odz܋E]K7>@pDuz=?$¼n:Vq5fߍ'w~?LЖ PO; >YH(/]aG72w|+-:itU&nu,Ç+K{K;W#p6qA2T\sƦxe4K BII~/Q/)]}]$BXk-p  ܷx/1ѿnm6zg||u>귁0:ɼ6|k%pw( 4{KѳYxGӺLZJgvrhQE Rg391࿐ kvo/oWۋu'xg;tvm›YͬubF˗o/|dC+onh2hf;ho79+Kga=rw%4͟e@XxZ[_N},X}=O.Y|Pe(S+ ykm|wfe][)V\tzv'ϼCW#7͟(6%qM-;Y w tX2/SE54$tS]dعԆ }̐33=;GXq|B02# TmkӠQM%юuO. jO Qs*E ʈ!V5˃[fIDȓ`Je%1tmRh%Yl`xx0t;:,Lqr)lY^Vcp;.5 Z8kK%7^>m>)ho`.Ȓ;9)O*%4e-0JYE `*I z{NcWsB3kMW_%Bgο&iǷCG]VKU$W,<{dbĹ ]6QX*eA3Vt sCۙ8;[A z/B/CS+h~2*ǻ\Ŗ6E'FtL"w -3`K{]50cBHt4DIB0<6nsO},x h, uƇ`5O=XwZYBm#e6(YRgBd0 L֔gtƲ$MhOba[MM $_.0Pm,0*jV=HQڒ,tT",a2B2nRGyKe#rS:Bv0.Ђ4Yv2\DsDh&&"eʻj~!M{l,֛ czP3)!!.59l]Rq1ٰ2dL:,>wzsֳ[b=dHEg58] $W'@,LF UF73?:LfE`DU`=u0Lr"d&mWQb+[?|T,( 'bρԫtUN~ʫl죊Qm `rvz iŸLE 'NaHOK$v'65%F^yct*0 fMZ3?̃留n]ve7 ևw815 jX{5Mլ{1 G -Y|4\=Y?O5Z^뼑mEIYO1\r~4IrCz@{f`c- eN[8XOz\|}3oPVGg%D^ᗾ99x^:IzìZܔJ-MU`&^蟟 8O|?~7|:O|ߠ_)d"ۃܨ_Pjo^57bM)|z]5KgaXNFdGAǺ5we'%hM/} ) #f/I,VrͱH8mi!N)m"rbcWޥϟq}mz|M))•32@>r"J"7u:ԙͷqׅlbm̰QcN=WL]2mUx~/cOz)ىek!˱wpIud٩J8f+c< KM 3}=L_1[˝@21ʨDC{SRv'X$hˬTBP,[Jw!;gh׫~˙7]7v[Bؤ?!Vbzڼr[Zbo`8iSfSDc@Y)d9`dHV* ~&t;:Ϊd&k_def][o9+'o}yb`wXttYN&~mŲdd&tU}EHQ!)ejrA#8|/zYjipuk3PcyqY)RA?|T22hpA'1(|@ THAX1O5MB*[T(֌RawGZq.)ш |h3"+:e`L*jFb8i-!T0Sɲ cF(z 6"fȔ2D;[;9av84f:B07S x'j Di9\3!)I@yT%"()KC9r@[&Bgf*rje0H]plW8,!oGcْH&9DP""0a+2gx h!OEo#N^ϦR %]ɪO LHMlIy6y'=&P >EZ)eF}|5G( eeщ(c:#`6!ɧG7Ķ!}Th;qF]_'U}yO_Wzht<Ƚ߮;M~mgarevU+=<>h_Ӷy7Tm_myީx֑QZvvyAK՗>]\'7 Н2숲# `3tETrX׷ WWz\+GI&xBm0G2$2dbL" =!!xKVƽf/.݄ϟN bݝBvw,"P>}&*s rU؅뵡|V[>kP_^K %;6guq?q_Vxpld?)A݄'tŸ%Bd)% fOgEv;g7=+&J;}]\z nv_惼$8);j^~;g&wZ-2z:͍k~o7^ϛS _΄q~ί~6^^OW&ZdyAtߧzkQLW5tƧDaΘbEC% CUrZ!"zM4 PN({ʹ#)U`)2(+U,TMY!95}ϿnM|rBv֠N!qohΎ.?O[do٥F/ ȐqRyUi R dG-hNɣ+nӌ?#^t aa3zGhVX!ϰ0ȧsOVǭCvlV婫K b=vø䥛kU{ Hֽ|e 9IHE[eU!;a(ieMT$](V%Z: l7]fWwti6tp ss-W x_􀉻|1mP]W S]6R[q. _XB6-l\k.] V`" +^>:y t<T_? q{l=^s7]*]@ ,NƵXȲȆZ\]W[ncJ[Tx>;^<ţu-U嬫-ʾ#e|adzKwa噱ٰp868i07Y{WdjgJ+Tt N mP 6)PD#[bOxn3xR»nW(|zƴmI\jCqRtROx}_<} 9ߨ9늹r܋&"; lQI4 .MOKZ[9al9S-zM1K}{z Tc2Q$(/|2H3Ɣ ԥJ1LYTDGYO^/dJxHJPSA3 * B,\1[mb s$SY)*H7Nii9VP|GN_ӯ<56x\M~o2% Zt=r:yկPm,'.wCR7L~{lyhj7E!thh`uQy[$ac[dg S2ZJ%%Q ZsR!%$#e_ Jba#26ge|6ӌB;ge WwY6O۹̾pQ_ŋ/ `4ZǠT^+9#ѺрBRNj D &)1bkk0_JTml; Y`aBv=CQ-HEzg-v3q[t9ɷSʹT`[b+6 &f @Hy; DL,]r1fYOI 0vcK!p D :U*RN,4yD8-Z`l̀igbK|Bobs }n9+CґSmMm{ E!;#1pxс @ޫh((f]zDSmE6C뮭l苺f*xf}Q7ҁRRioN)T .>>+LR)]# 29ާR`UGcҾO!o|NJc *2 I0.R4"e+gH 1Rim")bL @Bi vQ@dcQ)9O9./6g[~!x5Jf]?O.9Zc'=j='ߒbbcٜb2sE|9% K':g&&o&]ք&)Bt@)LPz瀉(fEI֓y6 "( H钁:-ټI`3qvv6dʚنq\ :Ǝ}յ;i%u:kCyD񛲛.õW/Qud\I_LHl(ROJ҈IfU%Q $DKz>54~l> #x+N KP@(#MaZFu x]26bs.MqE)lMN*(O-*ƃ9om"-:Y3qvY̌'CBp(j-S 2hpAW$QP!#auhS> T^PAKNY=kqƹ\D#7lEDڞͤ<딍njƤ2PfT .8iV=&)d1ub jb3edJ N>fn]ic8N>h3xϻ3o\o/ DSM5VR$~cG}1P)43+a:+aٮՂObF3952a.Q8+efxoGcͥ%Lr@y| ÆXӪJCGUNJb&9/Yu穧K7D,d#%frlhH{5)o\}.vN<^-5~]x(|ru? \෗g̍wwV֦5㢗 ͒է![g#*l|b*K%s!tq݄}&|WrGI4eQmݼoz~1̻~/<;o\]O'}oyio4#v%fy8ʞu%gт{|z'i?4}u YJȝnZ E0uNC\3/w7iuoixy98$b40 6 #ƵNWt>[ 8| 66^d$JN> O,LGu:dB+qvSZs?l 5;GoYP'"BP1xzw'I4~7[K  *yFrg0* }ܛӸuiODUkCy`E sev׽l֖9:AQlb-xH #q|UPr6Oy{wfcvuclu2U+U'<}2M;b㔜ΠSF맂On~?t+Mα|j8%`'tʛ:a+XwΤ@fM?RɚJة5S߽&~>߿xN>?~໌ϹVM¯"@g`迾CRCx Kz[+q)/#g_Zh)TS9] @x_~Ϯu>zNn=/7"9Q$ sAo&xyLh͹4r.XHzioD-D8 9㕕8%[Lyx4q> N %[:EuB=J+~K ֚ӅƫA!h9n d%H(gԲmj6쁦 QJf@ZF|:AC^j,#,ibte/QN 2w$Q\$0`1Ne"Hj9V\1KڴX:9[FCg01IJO1]Okov!,[{]i3y/*;^숶Dt>Ra$!?Fp4NIQIA\鴳FF#cCDG#<\&H\Ҟ֡!JQ*d`q{Kq!=`V9J-)oֲܓe EzЮ(ƔqrQ[$\a"osǓ,7=S${= 211G4Jbn, :0-vV{ml^镘O38GF_{_ | [Iٺ+U*7Vo6* XQJ $.} D Hc!-`` MoK7nyqvmGV+w^ٶHkOHpΐ0l5 ʪj -Ƀ!.gyQnHxBcp%*ԩ[PÍu̔kV0:z&|BI\b"9x) L`hRhVpǽZ'ϰc}m%f>r-F#W螄~%gLuqkrxF@ly>-$Qm>?\W-ؤj|@&@S䤂e;o"5 bV88cfYN ..@\QكQC }:Uc A0MvȇU_㼣wDT<7:>[l' *Gv=ןEIb55JJa)p#c&ˉd2"(ZcW@w+2kAdfXMELb>[sS!V-__%2f˘c˺|ugj⅙bm=o'|_1jj'MD=J]@hDҶ%d\q] T 8j!Ix+0-vZUx/'iV;)OT /wLX}qhN{fjCn 6\&hp$~nRx[m:6a;Rf"Ǟ[3Dx) u&:iSx&:4u/N+m4#CQU{;-8)KXk iXS8ΥmĄgx L\=]`O`R)b{I@=j\M/:~RȱCP= Jiچl.V(,sahT5,y11Mp,z'6ޜ}dğwϷ^>ܻt_^(d*Fj&&8 sp J@lr0kQY,xIɺ䥦JSlǰ ֺ99VV逢G4 L09N־tNRlfx1rYa.s}w(u}TZ6zk?Wrn`[r {rTPnejӿl܇CQEm$Q^8lw9NPU2/#W{1!(g"U !9sZFuHP=k 8l :%dI$nҒ9%zYXlgM0P",(NSf$ 0,XރDbVs<崖ZN%5)67j>ß.{] 6U7ǬJ VԘiy^k. _zGV\v*m-R̸4T>ҧvIp2i,52HL+Z((5-'/^3A-y,rU!фq4!ԓD41h12S I{۸mc+)U0T82N0k|n~C\,k\H|`(SLADbbPlLeH--cme";FoĻϸW?M]1`E|&M O{(߶:FξOG0{>zqJQapGpɟ^1A1f:vGYU@!⽾(:$(0Rj!gE`:ǎ 'z)'zi'z1' \x/a*gp4`$e2 AลDriRE /JC2xfN|3b w#& 6ז+RۏF08qڿoK^! *15Z”M-i3HURi<4Q$7"hL1`emb)H*x^a"ឤ|){ t^{&QI^I\kce>3" ]]!4#r-iM-LmN#YW'@Sx˹L֨i]FveKV$vuYu&=ϵ\&o7ה-VW ųu6,6m'Mw~L)o~:rGTr<{dk˭d .npEDimE\qJ q.,Vg94bê&fkHWn˃3 i,%"wVEmRI5&a4>&BdSqp"Әd DTV;1D8DƅZZn Y*l#g>ˇd裫 }wz8lP’/S}o0>ibALWW%%QL8L.9L-dm߷@Ӄn=5!>`@f05g2?us^7޵q$Blpؗ݃I0ʌ)R!ibR#CȦ4&lTw嫚W^n /m^.QjrfgI:ݤ8I ' [] &iW&ѯ|]τ׊?Z'VP4bޏZf} 蟟fwh0mg`SrlͰpŮ0="|[aީ^6@?2$ y9s󠠒طRF'ŗJ?JYU`6$L&&]#Ke\X#D`{HF/"1V$}8l{= V{p0ERaE3VXc-5T,|5Վ:;qiۨ,fPW09*wc`Q0sH[ S`bePž'mq[Ƣc\Wjg vgO?@5:Ձ_ˬnOMkLΊRd]qR+RF >@hyv D.zM\ĺ<@m|m:%}ueAMkY:07xr4iG]0ddE%M\WVR5r&~ZJ9|]'RHRr\{{ɍ7uzm n!y±̓eX+ U_x.G?fIz%ޘdXo#bJJ&`oֽ9,_H{>b]Tn{t&'1e ^ 8ί9^BRoS%Z"D.UYR/6&! le}]Q.{9vt&V!%3ZC )k2E.x,a)=X*vhmSKH" DMb@"sI HrĀDCbL `BaH]%c.uE]%j 뻺JTR:7x`wvH{4 %_H}tQ@M` S,U`6'\U0 ʆ^e=U[BNG'5p Q[ÑLKes#jt ]<ЯcҕvtY)"5٭PZ[.>2S"٬ bvyw,polށcbzIҠl2׸I1]]%X]Ƴƥ_bt%3_~vHZ CN7<6Yˆ^FcD2Y+냉KM ` Xy$RDmq^O R|AJP}krhr>m[X7w\X EʹU϶+ui.[__4t 7Rnd =!"}ghp%J7bi1g;Ja8CndS@7퍕).OrϮtg+E FnNQj]?`Ǣ-¯=^ E2só$E.^Xxf 2jrdJ$6~a`'qIA/Ej\F|t䶾Ҟ*Ql8M )ŷ2\-1ʼniZo٘pP+1z0l31[nZĶzIJxZ|գݞ`&)+eQZS$81v oS mA,V76[<1s7J]S^KLG e?3jXй2b=6MVHKD᝻֡t:~t(,RH\ 8FjN(" VhpF37fuCR*l* Hs!,ȍ$S{+%R."7&4̓]5oE}qɡ4LzG~+)mW%5,j}T*l{_3b-aߟZǽX:e1,'ɠD=_ R/-sF}P ?G"d>ZR+ P=)Q[M*Fdx5sf"(ܚ195n"0g Š J)Jk[~I@Z՗;UDo ,7Pdv7/\c3N DL(֖"Ù2%I;Ԇ $g"oi$E#mHQk %ImRYew) Ӂi]I4R.rkl;s;Š-5UF.qVqCdxю 9A"d).3m0&0g u{"wVMN7&>{ 0ʥitQX[|Ts6ݵf$j0v̷vԚ;|Gij|ބt3}ȍU'bk{^Z6ҞQ3n8|sy W\A~(ƙI&e +!Ƹ)廈PXCyH@U)I?.{9GN셓 ƈvȀDHANGڅ1;"Sy4:=VQlN;-m}ʶ!% 3Bs4fvҳӥ`x!my}~<~_> 0M&LSnTI5M3«B $B2TFx0V"Z ƴ*VKFJvz:k7#2g& D$H5vI*!8aL u\G1bdڼHZ鄒1/ h m Lc,-g rC6r:Te8&C7}G%vlJ8pcOakf_-@G릯FMq^n(D]2DpT b8ҊG)vo4"O#RK6P+r' ~ZxcN`Ʉ D @Jm`HD_(JѫfxZ#\wޏ;Tao Sk\[BM@rvl1 [yŃ勈m_HU {P!M 5 z<9Q[au|0SLYnet4S 톀aFX~^!J,-A,auf~}}=Y.m=)D8Y%$ &/lz_uI}巴USƉ$7t޵#/K|_Ü3;``li"K^I[~nIm=,eKvwdՏUd=U&v\AA_? |O ~F#m:o{mFf8a#j!ۥvRu3*u#2I8R{)\􈢕U+rG]yl,x*1"h+{$tCVtXM ⽩P p0F:.~/UO~UI-r{'\ԑYH{$F}ȍ SRsGA1?L47qBqpt׆84%Jp%4V#NNZۢ]o᮳迡},6E 繢VDM^:¸'1Zt$ c2& in&HL(Em18(8NjReL :̈́yd)ɘzGӣr7%.kos(Ϡ C;uP0gaW\ϔL),$yV*hQ"K:eNN1Мjâ&"?viz1$pP%< uB2)C !$p11zBR+H ғjXd,рB 8[*ANOk$XӬim0vApN<= >;xoyW#8*YnW5X#+z4}jxpKb8g_eq-:T^{nջ BjyUO ]v ɼU?̛XpH5y0J('*9uo#5_- J$rUQq#Fq.sKnqUtke)_cK߆ #t]*}7Ņ@jQMoU(t||5>T21y4Za\jiK\'r[5*RqnODjՍ.Gg 'b3tJ̹n[nnvf{^f M{=qb~O狺aafa7 6^'|~0Td8ש澔K+@T/ů;n&]>5Œ:j>*K>Nqd~Pw%TCE{?泫{,j땃;[UI .D:2j5`b"6~(54a";%#5{a.%8EWNrj[HQř %*pS{ȡN+4$\)G;{Zyu|wK͌bcub?CLѾw86&9W'+K̔Z;Rj%\q]#֑:2!7[*IDK-#s¡OI>`! 3SB c2wJ@$48hQ9dHfEy:@iXlGFg01A-4q>tDxk} ޘ|CjW;hvvk_3[жf; Ŷմ^HӢJ+5M vƉUY!cSjGC< iZH2RH%xbJ')hs*VzUZC{O]FsJJM$Uܙ!LD$4BhShuft Ԭś^פ;lg.|4|sWԻͪvX[UT=%{;4/CZMw>whEisܡe_U&ؐ\/pԚ݇^\GN9$ _"X0\G.sj=jzZJ%w peZzS  f \erPFpv*S9{V՛+%7r .27UV]LlW\k=+$37p{Wm낛UR[+0D}2`U&}LWJI[zp%u\B~):VY'34t=adPq,1S]1G3~F P*F9Ecn~w.>iH70jFR'Qi7R)`f8<3OϿO9qVa?x@!(YTuyeHa\$/mdVPmINK:$U <)o )&%r0~O6t]WCdtAsa.2QOiC-[⤎-e#u6 ?n|k}YnKڐ-^<Μ.C .MdM1l\ I(M2fVD6JuD4P$w7bS^o&"R6*},\w,wRچ, 1kRlnB&XM*/ Z-v}7!SiXw25nZ|yEF0]4FXKtxDV$`hs?r Zn!E-tpu^p38 4gs5;0 BEFr!(\\4*p9QI@exZĜS)Oᄡ:a("ty_IМtIag~%{#[6C壵_"V W1WrFc_+wž_7l2J͒/YaΛȌjSUeS윲Dhvwn4C=FBsc"lc[Zt;;qBiO/YVzg*3)ݎN њM-uSAd_:,v~mI6%p4_20C³-0^>fP4^Nʻr+ytX毙[&)}N ZSo/6q;~cJl+ص7K:,W;ͪJ3u& վ}Gnnn*-j؞8lOx8i`o e;ZlZ%/Q$) ))Չ -A\3Qo 8Кˋ =޼Hg;d^JWcUDOpoPmyi{Bzݺڿ=]ʏcr*>m*NϏ[볈ݒFߛ5=w2_zL^Ο^hc"1MuPHjDy &g F8/ޒh^ ί߆JF4M;L'뒗) n @ ֺF DFCj,>9_-Gp(7 5PDpJ6#gAenJ߿1>y,6  n4od|x5;}C'Gչk+WUo ᄵl>/ Tyi; &Z&}(2Rz[ QGlERBrhaq!qz^ q,tJdI% Aidlȸ\Ky,l3½)EF9ǂ]fu;XЏŃ_7(mۿvW iQ2qbhKhLAIJ΀@85 @6I8 ,&$ 1&&10L>&n:RSFlrWP1Ԗ-j2'2Zh}IBOԡүoX!F$pH= hf5$iA-d )yTyKk#b^.$G:*kYQ?q+<UÈZDlqoZQD4 Q<(`) k@mMhI3P* 9 #bcFgZ8B;R?%8:$,Þ!SY"-["%ZQ<3Ɦ_Uya}u6%Ջذ^^l^+g}tBqOR8ѧֺ^Gh ' h>ʫ(I-'+˴ 9Yy)\9d҉*Q~۴[8~P[l=GU b <1JJXv"cc1XC IsQS'1dԌ@,SҪFwIvC̦h|e_~=4PRoȾ4 ܨ?8^ {FTK +a1_yEEp UUcve՘][5fgV 3*2f$8%$yБhc2у\Y6%EhɴvS Bz`)9I /kA ?7<QLZxq9>l0H0%ɲJF14.0Mn{i^LRxU6J$SI!HD ƕB<WYe?19ߔ"\MG|{nwD8fyd\̉q1B]u ';< 2ߐ ƻLiy?`ꦮ][Q]޵<B7/֜N/_٦Uz7V|gT71U[nmTA5\%r%{}X*0`LVBUΙ,"UIU%a͸Gk?nH{g=2^)d\I\h,+4'R\jɃ0EdcN`Tp=0pKdTѡ%&v4*R279x>L_L|wXlb05>0 fTbZDxp͇ѐ""B TrYaʡGe@0)-^77h5xj D VHFD!+aA%9<:"ُ3tFD3MB:MR&ɒj KT&@΍W Ɂ Mh jh89& ^NGK7k|nhO慟/atz{!p[e}_~Qg/$!gBHK>oR2cuA"͞s )e+InEf ×'`x S`$U],P2ǀ(\ c̍ʙcI#812( M :fMX}1r6Z)01jQK'&"AA ;f1 L`D#YYDDQo-qʘ9U =:, #R(s ZatI4P󚶀dA䬠$D AJt m$YM]iFqU xA=W蛽o+|?Lk(}\St68ZUڋ!y%O#1"8[ik#B]RUЇޛȾ &C2;fH%y<7fGʇbԉyM0;Z)aؠx~5jC*aߣrX6֧5#U,Ǟ̎Kƞ@DL`' 6L1x4D_ @,1V6QP3 쭌 @iG @YxX:MX'3>.x=@m1dΩ$rgdh36jK2 ,B\ RB(xgDY1 6tODn9_|\=,%a/i&tx)H yHXd&ZϢEm$АxF10pA|B. :3f:at(YwXa l,D^QHH} [f^z8w4蟝헰>Q5\H6'2ߕ.(If$ yx,4׽nMښ6$AL""*QDt62!h3cfAOl께b }tuJ~lBGG4ɥJpV*!lx6z sK'NKFs;Ks<?^\UƛE`# i51KϺ'㵕8_'Q|Yx-lqR50f$ڑ@|0b0,HhckD+XVt1]r=tF]7W4WQ%u6Me }asC~;qnʗJ1{i aJ7; `7\PSqO:Fym?x_~T5q?SƷ;{qJB'R?}7o~p̅=~o?C軌R`- vk~ۊ% 7 ƛ--l2mr֓`\͚qvs\zX+>LT& PN~{}&"#/HJ`1앴IpecPRfRn-FK-l]{enfaKK \c .2&-!)e!*"o 1=iPggtc%jij㗞(~2|sNI'L [Oݸ>)_+0h I[)M Z*+fAɷ5 mc=m! $t j笜NY0(IB( }Y-Q.$T&MrKv˒3t ]f0l mwd_ڢ9s^ W"nOlEmBXB5CtB]փ E )LrUʱ*cQ*Fe͹MhC\wH/2DlBQ# Z)s`M'4Fgĥ7C^{ն-jb'`]ZOdYYv3yRc R'&:IT:k+*V%#h m]56Vnlv8qdA@:_! iT]NIX VVh{qNZy`Eqdzɀ 2e p^Af_t_wӴEӎ[/FU+q|c Y MZ<K"~K? B$`W_ӂ#_BW޻LKѥK?"\oGbN/}bO^,5U#W?ێZciRvTZ5GiDrQS$()ZnN|5zu4V:ctN e}hKՖ#[%GgtE-_to:Ժa:2Tu}dhr׳˓no:CMv.7+#;cCrɤ:yȃqc9z1_uK׫<[FX=1G;3{nFQ%hYl3LIq$ 51ݑ*}sJF!iEy]yֽ'5j)K*iv{~:ͅXo)>uJ* |z=WgR1qJik-H]柽OޚB0kWݷC>h]7tcggf-pa]ooa4堄|4E;쟥)ʪ&KVVy@au?P% *qW,7ɶEیW3SzX% )5,&1Ld- ZIDkd-jkIKy#YX_2jɲ-B v= CHe>3B 湜}?(T ֞|gh6&%T{~狒WRBeY{z] -|'3Gs\}@y(3_rݚoILJ܌+=c Z] u@&+lK*K Ul}mӆ fdLYW="3ȘbF.KbHKe4 dC`Rfr V hѦ.3ύ R D.Qh8T1r$8S-B{Wq@vػUgfy:u>uP|vce,kLJ{}9X)ˌ“ @MvP ։ZmDx'~UU,ljWۚvLk}Pҙ |zӇ=~.TbTgX9J-8hԕVV"ޕKn. %ЧĘ"}(v0ؗO$X^r>\#z4j9%G|*.9)*^r>k)%K6CyOwMp1$*h2(N /b&2Kxmwh"gNI0"h\qdem"ܢ`jHs +B0iWpNfT6F)D;MM(*S QV&!y &g F8 cjo~7q}&A[n*~tZƃlj4PH__~47E&vkR,N-UOOݫ܎:ݒNLFF)A[UZ?}"iFKX.]T7Wmt^fKۮR4˙/zCҜuVt15(G2ԭ}2v8yP\ZUU"%/Jdsp ˹J5a# la*Nc,EdVi /$ ꄄtuBz,F;"TMId)4E#R.9,:w*U62^\4RmxF u` 808O` Wl-7Fa5}gf6q?+Y^o!ˊ[m-H1^ S[l=0 2i* CCQ| Ph!ErFhW8#dȭ /;=opB oΙ|ޚ](A âbPS Q=Tz,qOPt%*5AS^jhAZ!A&'u<5n qR8pS$9Q=FfMHyr)ɲO(V}y^(OJPI',QNq R猷{-6ƹ4ΑuLd {1DO<&mٍ)xcTZMx&$Yg 02KYN:Ft{Ycg%_qBl&~Y7,T|}^څpP Esh=L0"VhŴˉYaAjLRK9Zh^n;G^W B(-p'k\y"i,!Vr¥j5I4gޗwgjWNKfxxY7ۄ!(Xջ5`,2gx'ZۓHdABklr$cʰƍMfV]lwkC(D @ (U<1^yœ2)Yҙqv`nUǷa-MOZ7).z)ٚ;FkaL3mYt.B31RHp;m<8vHY!2a.仟`HyS=_秤jVfZF$גN(!( I@@TЎA ꀢB :On9$ZMF9ʆ: f2xddJ <(x`!! oBa9"Zd(@nw=c?Vaz@])x7D";A0\' p >0dцV4]GYoCM0uBFJ{3**ieR$PABIabeg>Yd,р; l9< L`A[O3d'ۂa'xz ssbجrZVG-;Mb/u_H~~G_kU*9ʬ<7s:׶p&?TK z!F4|Jvܤ Y$k(daPN-9RsݟƸޜŞ)fjI#qU%oh9b$=Okɕǭ/Ke^A_fT~6б8BK-x߹&R(i}.tqpÜ>*UFs<Fg~Ruƈ`]'xN\ UeR;4-ݩg#Z~޼/_|7_O[fkQ917KzݫjlY풒ˬpo8ѵ-XM]3fjY^,iC G0b>lG67q^*#[uպR":ʮq#a篃UOF3Hm3Tk+l^i ~rw}wsPj^RƬ ǝftpÛ*gw*J**qb_52ɟp??|w{.)ӗ9'q9s- zo~݋ Q{M5͚viZ7nTkjoBhL8GAy:&׸壇hi)&DzBR- &'",LBCLf[jEZ!L$rdBҡ n;‰'VfLF9؜v5rWT$ (Bs ੽iR[9W;_J\/~IPxc5 Ly+ϧk X4I͹ LyšCa!A[2QT #,kT6h| Q@Z5  4q0wٽ & x!2 a9;D_͎:ħmٙʤOMΜl)V#UQ+Kw/=|IGJʾplѱБQګ>1.9@ FpK0'b@$Rp]("F 8"}Qz.dص isbCYow; |e+-j癒-;x{t[smFT_LR^MmGoƓaTm@?v*I]ݬ? g\d8'v8XSp7cqxp/ [/ {;lr[y՛]k3?o6ke囖oNoĥTL$А=@3CMPT 3),C.qT9* Y@gMM>[Ye|}m٫:q-VWcnY Kl&3C=E6S`23[lx"YZWGf)3~UKSy9-X1+Q2)ٖ>!)QZ$vgt`^'ܐ=UdG<_zqIgyaVc+>3T`' tqhutwk힝{;vF+@ Φ]Mej~ 3ɇjX_S4^Xh}bi+*m\(}=VZ-ӏǻ/C7&E-Fo#I7@nc*f]A->PсM@)}udm.. S;x^6?fpQ =Ӻ7ϫtOSA^A$Ү}sʫS~|żLEXt\1I@|u.[D11~G|=꒺턘C'f{ #4^<#DO^̌1 H*spEe,)I%e<7mh/|g}r>zo d):)o}OVt!`\.ǘjk61D%$Sߍe %$ރغWS+p(}Z_4ޏ8Z/47Ó*ͦu_aVr^mPSu3 b[̳?+ M䜕m |2ʠ#o$y<ߒdw 0: !''<YJ%%Q[sR1!%$#e_ 0Jba&՚36g?g- /lvo@TvyUm.ui \|xzztqsl0cT^#9Su!$-ʕ;Y@LRdcm6 1T/f%tJJ6e+ @lA*7Zy7 ;r9 ;µȵG v K1k_}X"+L.fBTNP ʣʬٶl1ELH` %C E&NjghҖGQAL,jr16{~QP8b3x,Gt99waTAFdtFIEL1S\S,8 >BPJ`uc2yT|&Xd]SGdMZI]1Gl~x_XkHfXXE?ő/e=yi3ŞeOuڗބcuQ}K{w@J}zo]2e_FS>wp:3-b30Ny;"u)&ՉߓE }Oiη>*H1 =˓w־&0 ѥ (dpEfQ2`HRV4ir #M[s?CrQR31loA_JJgl_[ N:Y9A.;r*RXF1gLa[ ^ L>kcL>P cI/:E |J`@YXlep(khJ嗵sVpuzu >u;-;A[{u B<|#Towu:[nX鰺B[OA{vs]̷[2[ϼ1r7>w̷~ (j=Hg糍=\mcU؇nKZŪ U-y=I?Vۨn‚o* /rw5Pkh-]EP6). 떉q Jn)nJ:"ҥH+1EGbVY-PDZh`RR[-Q99i %Ye'I2PzȤ1 LN(m0JPk!=Pz<$ᵗgkoc?wǻ\uR UTv:-T[L+!fW_K^N꽪j-w&OcԩP]1|F@Yu(+RFTv1<~G~B7'KkOLq:BL 2 5؀hlNXk U_5}v%toU ;A9́ <[Vݝrݯ?~I-|ŀ&:|u,!oL}Oɯ3/_g^:μX~] rHcC!JT%P.A*d+@( !da=a^2(WjMBb*و"hdUBdTkq[3o]IoC'tZW@r= if?a p.ߎ+7) w`ԫhrQp$K s@`/^) < +q}6SGܧӽ>Uo< h=%tI1R im&tD1:ȵh0:9o|}o-y//rP'"B|H`JYF0;&s@[zX˨%ѵªW%…I`HRdB MV5ʽp_JK?>ҬB4]&θω /7:EYgIh |z}PdV#?aC H/LHJyTXVҩe h J,/P$ QFBjKHjH"3sD Ԋ1+/rQVISA,>e@ʘË#\MT)jըHnwߍixg1-T.U 0eM&'qM~ʞUGnAGqi^Z'jZ>#Y)Z qYƣXY:>Z\2<;Эz+" T K6-5 Vp ns4:b.+Rڟo1SD>z+'\Ğ#_z.m"NI6J4\ok0 'íU~jd9T]]>x)=SAslu= #dv>2~7{J?[Vo] +m[Fm_Fl"OLf=XokNϦ6UؽzVݶW)47.}4R6>;cjA2lXZLڋSaDZ℉O$?|?|_݇~?}x~;咐 ''`l鿿 6Kk,[zukE-oYoו|x{#UXlWO]Zk;ėuVJ7"e%kt 2@h_PUa z]V% It^ 5`2Ja@᭷$1CMO,KQ*g.4!d6yEJ:bā)iYEpA"U*5IE0ː9P7Ȋ!~/_W@|~;̄ ݭW'1/u`IGެZ+ȁE\?F^+U\Gؖ5(J!DVX J"GՇ8AYEl]8)A9+g6#ͳ7K_ca])fJJ%߾^Q (C1ѕD|O] ,V*b̂y08 QXsK7AS9H,F9XdT!(-Jք&)Bt@uz$aX%YOZSH5J\2|G"b3p~UtC%oOpli-YR"vq$}1KʓZ8$d.P%]= h Aیr;jS\|}<1N PG:·&cYMd1U!D륍)X,ۺң(Q; B2"3 j U@ɹ ETxx"MEOOfHg1i>ʏAA 8)EBFKxٻ6r$tK'|_0no0r`"$9Nv%Y%j N.U"U.I\ 3I)݉@IR{_# R/Xs蠕!DBY4TLZəJ@@:S姃OgL{LITQ[Tj("aBS&B"$!5h9nu p* O0&*LX*]1n#IGu1DG/1PC@FhNʭ m9?'"ĉEHۄoCCyD!'4i1AFSBM8RÂ0*?(!J.,5.NM*w?|EؤT&/hCmHy@Blr`F$dlA3>w8Q ]wq3fbgG \??\5%ɇۄCKߏ#ГclE.%98j)y\(5ep,G+,>ظzD\w:(^V""F=ΚlnL /<[1 3ͥ}k^~vu8|w{7m|fz;N}O&.ɣK{=lkȼ {Q.H9 >K'z'8M$:No")e8kdelHUNoS @u)9lK}FYeSdE.W^\_e{\WY˵pA4q0w":'-Bdi~jru\>޽rAUkzw7vvCPvzO (Ϯ ߇/1X6ll.|+[NޘHeF?%V4d+Zg&gwx: |=F96 Olά/h-&ؐ9;PqRΘi%$ة;<' uAH0;&~L.!JP]+\ .;sj;J~e nLeƇJ+ۦ/&2:fmΙ,NմzW`t}ZZ\͌eĢS ]pI1\`9n۲s'׭i>B]݅ϠO &~ܿm;ܯ͎vfZ?y lޗ*0KXM'7 qC 4hPL(q&e(%*G9Kd Ⱥ3<<_'hO{]m΢-& fn5N nkk'trN2N0;<`y֝nwgN(Y(ryN&VyyU1m`}W U.RK}@l DҶ%0.) N;kʭ-wHd8]xdٿX6m[ɉi20O'&a>P4S'jcd;WۜsZ/4kxbsͳ%mY2lsƤa=_~:}~œy3%:\gbʅYQnt[h+vRf*yʅt%aV9E<:qyN9 P.CRxXɀzm]7Jv|B+6ZU.ݻ)BCAg:#_nx]OeCmETILէ1$Ee+QW'_IT!BsJc|@Z_lt?1B5fo19_<(Ebh18@;3x뜡2ƒQk9k5\Xxѿ%ppSVSɈ&V#)q|Rd]R3%ttҠ 12QZW6Ae)Jh(ABS(t"8T"g#AyÎ I)>9iCn 7&(m^-OPiﴱ.ܹVv'sް<d(ʻ+3E$U^NW1)ٍkYڴYKESu.޺VBH࡚o%9UEV'2J@fVD.8elx)CYN.Jgtuлڜ;ksYW|ͩp7TiTMjՒ&$N3˝e8kdelHUNtiS @|$ 2R]'UR>cDC@d5G 4Y$qg-ByŮ>N›vjCz /) Z%oƏ PiTI@7_)B5>ŐGypN\r.@k@9Rkm2LrBfأCY›^z{3\B>\0n4p]"CW w\,As #(N!V{+9NX΄ςԌ!v"p|})?=pmJDhOy"i,B*& :.uV1SOI mwg6x 'HQ4D"„MJcQHd>1AQu+eU,A VWOPxQ&n x/3U$[!Y.k0qIΝQE] Qg i Tr+"H8Vd2Ntk?c > dB\AO.P/Ew>4Hks2Hp*&Y)m݂aAXcUC]X:k\d`sRJlRMyn?~nr]dmH䃓yNK*|F̂/TFE mhU[P 'dBJxxc<14 )F gVC+ٻ]ϙe.j<^GQh+Dxr֊ȼIyoKGB;9cJIBϴt ̈́RH#ଶ!eQȄQhw!tmbY,zmZ(W6atnôޢ}rys'yHP$c H *hGBu@Vx'`ww-m$I6CI|Cgzt #&)5,%HJ% ؖXȬ/"AuyirHM3ݵR(FarIUt֐)MTE 5zAcPĸmG`RZɤ_zr76Vi?I!ɤ=l!&AasqT\y*p`ɗo5"HA Dv!(0n LP8$ gE<2A+HGyn y"hRbLR'aY`}i  II"$s+:Վj|TW> ]nrRNsZ[N |_e=y^y7ᅩ.4ћ75wwʞYB-E(܌'iU=oGdK(qK@q1L]FKe<9^|A 'Y][o5#$хь!mri{[՜{r-8~XP8's'Lj*EѵoH{GmV {w>8+|1Ń!4\g)lNm>qQ{>èe:m`NGۋ~m]uX^bM`##1rm_ 'hz5fі~OwI29#]v #Yvy:&BBpz'ZeNn =s|9^욜 {$F+gQh%e }z5kzqtGEm3>ZVP)JԾSDcm?L_XޮZo~9D%d'6TxFiW\=_Oc8)ˣ8 !)?O?ÿן>/Xjw`M?E {W j M㭇֜jWSN|2*ĊANt2>'V9z}^e7J(82,`fDn-v().CZ09I۰.K=^.ܽ. KLRF WE&-pzTѻIpSPWx;^>hGZ=tڂD_q55-otC{C"W0Z*j{BdC7X^f=Zᖗ>^27ޙ%>O״g0& u^+P(tnCg[O;%4D!^2j1tEaHE+Q;P7P2r:]wZ=hkc}n:Y /iluHu}W)czUgJky$+I9Q2Z[ WYLep Ld,8ƵY#sVޓ|e|tg8X{V:,כV˶ w"fNU*e B]Oje߃dnA0<`> 퐫 X(vvD}2Íݽjl;e+GWma8'n,DVmk"ӫ>.4 듖{46]ժQ/G4/2NVt4X9q >6wPNEv?n<=/75[Z$ 麩Lwy.Tج 5ɀE_[y|sHP|8qx98}r&{.|3ff`ass:eXê/xNLa 15ߧfxfCݞ y]+¦^?J!TV[SWӜ+kiNY) o9UJ|.o&t|_Ӓ9uy-gd0RXv,@@96IG 5Rf)Ey;ezMNc=VYXf1 4EəeNNm vaַV/oók,󆱈,>䨭0b.x}- q9L!VϊzyD$7db(i-pT 0kwKE~ŏMBF7qV׷}Ucl-]N7_ EkK{';s^-OUh&jYA  Z9ƒH5rz z%Lg"mMN)J{mmʒ R=mʤHs>_3F)'х8X]]tEenR'fy YUu?v~;N~yC'/|klP2jCB),GbrDm y39vZ ` P{ tݚiVfe(OEmJM"%ce1LRʹZj6˾hj= ]4d}hJC;GQx޳# *k2Ye612J]u{;#DB1*46\&"=RȆ$[S,iFz!v5b5rk/H:^C%EWY/A/zqwq +9 3eDGc2ُ#n wS/WnЋ/{sS;qT؞egGc^m/C$`Eo;Tl7 >zmyo1 @{# jP 3 -.4={izԐ!F(s rIh%<\贋9Z( L|mD^m9l6pƈ.rKdTɻ@8 Tf̵!r-r%>9븸^<w\R`i{ƷƐioލ^>k3A+6,[j NlC'8cنJnl7m׏ 6w|,yV2d̾ɊT:`Qj2g`"dmuMJ[MJ&u=I]%sd AcxI dpBAb6-#)\4n fd%EfAGI8g9!Z10g: 8.uɤD̡A:Y@|%Iop(}ЬUhB _kWƆmsA+L)F#d|V+@hjN c}HYdh#2I&f'L yL 2#a,}E~7|LqgvStr^4^H/|ٿ5Bp<4y&H !OTBp;)4vE+ms={fLcDĬ+]V9~@6ZB 0s5/0m 4EϙwP-P`jTv**2S }%/*Uĥ6Yyb6DE'd"uRo:@7iW~Q9?Zxl}'_9d3{HB|ZdސWeҞ[E%e1wu_͔(dRἈbgXoz%(m ;76z&Cb6\PP_&?iH1AbjG*=jp߬jϸXH1 :| %kPaI &DTK{}sXaKR䢪s +UоBm:٨B&%?Q7s6^>3;z}(_5-)yy:w}x{W>p`5鯳a_g% LׯLc`s^Z?jVT22zXhMcYԻYVR9zwD(ҽI\ X-ZʁU2bѰ8ȨsYv{ 3*q׻ˠ(ѴF`솮]hZ&-~56k ? ˵Q; M\ZOfw/jӇ6/9PZ6E]w{deZk[|>[2=oDWv]ׄL> /nZiyX ڦmJї_p/@z֟#.fH]:A5\V<,_,y8~p+y\\D\钒uWr'^xfǗ/O-7yiW=ٳoWW+Q־<^ Z3\0}LsW]EW/}yy+>{nB؎@ѡҒ ;ʚ J6Cp fp4orM-E=:@V4$h55,È>}`Fy i0<0fw'fE80fmfҬ\L>OIv9?2jN}nlhCML<d4C(,Llʍqw,&ciݞ슘܍+: k:VTK!Xr(R0VK!Fﮀll`\V1-V[,_zp u(gb: t3g˺)AԟNPo1?./*{vOjn] U;[u>TjktT;eoTd[vQ!紎)rM>[R.ud9a'0dhh@1$&bvJTrE9CV!**pQ̠A D.@sH{?&- \Vā4!-6YmQv23Q!Pu Ilq.kWK$,I*^pI%=f."yxۻeD+]y>a=NMЂjQ fB&dh08j7gtcw=Oz̗mܟ?=ѐ.hqsG9 T:@ BT6ܪFD2(ĒaŠG~hO'Shbn?z6*Pԛ].*nod'É W->RmԺΤhD.^z- ,pW $g\Fj-9ec v/j}Zmp98,)KČ⵷Qk3G)F[}}IڵW>R|$U_C}p f2)[‰ rUAJsm ;yo (b Hl bMXܦ; yc`U@f*c]9=v*KXvq,^ۏ^{Dk=¾|G!W$Q4,26A7!"0hݙ*,SI\Q92 w7C07t:4Tf@`! D7s QX2y 9xc)FYA*QW+ ʉx-r K B)@l}rb/-ґ" ;TOً?n:@)!(hdMV+#zZS5}e;&X$i)ygMUFSK%m<1Je:![aڧځyݱ"S9?nZ]>}oO̷_ng{( O6^WũJ&46!2=,? svLsu=W3U~ZʾdՌ>3s#yb0oo@G3~#ƻ۳6z&C·\^>  4 p1J@#y}73.g!3"&RLξ(_BTqXD= 6R2MrN+l)[\Tu.aE WXNBP zTF {^A18?//#9Z 4LpO3כS i֢?fe16fka;KU!+ +#Dm1֑}k"UyڡlOXI\ X-ZʁUbѰ8ȨsYv\gaŝ6<9*!Nz'>5o2>/w/CȵuMlmŷ0C̐\%]ajY"\B?mݮظݢ祖n횏-VeLy:.Qh)-]u44h c/+lBoϻtB,',{[JdGN᪀ qMM ;#NNyHѺ +,aqVWx$; x$I#"x$ajsb1UAg L2W:dUETC+CҬFXPu2֕H >BMpw)^l&s|L,&,PPQdBDEf~1InuRX1 'K"kOLq:B 2 5XhlFfU_5}vKڪ[A9́~B0 O'SɄgT(aȄ3a  ƾ]wPi7'Ws˚;d6|aHOѿ~{lX||;v$'J)'XoЩν6s}# (C,ኦoذ@ =GǸcw?̎vްrTFQN-’QL&,$awOM(R`p@1v[vKHJgK&5T % Dֱ@ǿ拝_OQ?WI_ Vv[vqJ)E bI'SD)A%vF K5ZΎtTL^ %al ߚeT+%'`!fs΂(A;EQ1 ʪB B۔T1/yk#in3pq72ku6D}qdPW۩1% BaF#:`S>Vy*[@]W>C6"e2a/B9Lqcj 0&ALJAO'ŋ;ZWb@>_`f`&B& ,RbX>m-bJ7cN󳾴) 'Ԧ%muU/$gk2AZV F)T.2HF'IYʙͰ8+j"°ϨEB|H߈!J]pWX2YY0̻c# 6#Mt $,Oi0vHalSܕ:|E#^).FCvȦ ի9 5Lp0>NL s ˤq Ygo(E `n|f. MhЗuCJg}݇&v=d[7yRmP`iv'K JXw1(XO5xBp>[k_KDRaH.sCY 7;^|~Hϸt4.$(< LsV)0" }H>j<Ϙ{]2ԛ9bXI9K9K畃&yл:FYm*8Lsq:74eXj nt Teb']\Ě*b ѣmGE" (?F-5:Zо0!* Je!5 dlq( lb2"e n NOEJ#-W6 Yyd4ʤc6Zo}*c~^ORUU ]jT\Dߝ~?˟5U_|[&zu\90&heϪ& ybgi\ x>>ͫ,+F7j(8jJ ::GoZtszO/ fճ Hƪ0F| ,%qzJz]1& LyhtbtZ&Ҏh_V\xFI^t~K翱\GJn~[>hR{qF=Z&F5s/ f[q xRNo.V^1&ɯ畉O޼`)M!v߹+ߵڽ̲.e"K&Li˟)[|:9XrisU%n_u]ګVͪZ4R6>R|NiUq,huR6"8mQ;֯F|>? kPkfK^,J@Μj_Mg7 i5E տ? -[Znq[&0|~㷟_?k{ %v ?Qzg,mZ[/=r׫?`]+u~s]KsBzU1 -BxnezƽU?.Mlё.}(IՆo\I"FLD=)bD*"Xj'(1HHJAIzhkÚ/fz[[S0P+ F:)2&2 B61dT.kĩӫp+krvMqr8KۍB5*?f\kJM }iBCMoeYAwߑ}HoWO7jW=RՇ:^kkt >+"iX 54=o,qfzT`&W\RQ([oI 6TH-d(v@Emuϳ²]f]RE**5IM `(;Vy;@S,U7 ʊlaħn7,Nz#pxsC6Wjܳ,ӠdECzQV%t٧n)]3kWB 2 M >`$cIl)eՁ$u`grVt(|gO߮S,+C#P:n>BZ^^L/deةR(y yɄӆufrϛb:HQ@ȅ|uIMfшH:ҳ>miꭩ\ g;H5}:\b )d ̲mRsũXxMQzqiF:Zqi ?:ggl[w8*֗k@]{mm_?UO?L+-N>p'Ёv@;t:hĖЁvs@;t:hCڡ`v옫4f|̭(ucL*4Uh1Z)tn -ݺfΘyG'͜m7lg]~ hj^k\e|1'R.'Ȃ,栖MpE7 RP1g(0 i$&{i&e$, $'1IꂋFvsDȚK$bSS%)rFRu2P4Ѻl`g_'n1hKkF]Z;t~k;,מ0ƚ}|AۜȽ`U"\]{^‡O==[s$-DVP,TAT[D n_G7ѷO9t3fO'q4 yd#>m̗O0󕑷C=Q{>秓rXp奱q=o澌wc׷|nPsCKwx1>!Տ1 PÒ;uhnnHU 'Ee=</*J</jv;^TB3xQA/mVn]}fUB"~Q&׺NA Yv> 6/dSt=)؈M¦H;.EZbp$!de][)aXfb"eT-gtUf}$yIz&2瘄Ee{ǽmߦ}'1szp1ѕh-ruRieSXTN?N zOfuf|UQйD+jŁ2"D'M@ƁZ{T+VN|o0Ԋ.Q7ȩ2bQIX2PI$sPR+ Xb6LHJch=iM3k`$@)]4YsZKaw~>N2.$ ң#{݌}}z)~V/kݭ6익]N)\K)y_Zr;܆8Ϳ> IIERƑΦXF5.9! 1P,蜻pkmd7Eoi"Y g''9 [v,y. bKUe%alf5U.V=32QN+$6%U DE)Ofl(g[צ׈Q򶔥ˠymL1>% B!Na?U S٢R8oz=!r2Ѱ!al&]DVh\``S6X\2@fP+͎;i^x2}|w%-o!dTyE@RV5gJ1FвJox< q/2f+fFEK1\[x*HH SU( $ei(gf4pV#´QWkBn5cmM^c–\t|$ ϖD4'<x0aCciZ 8ҁߣ|Z7gar~i@F,`W 6Fh< F-^(E afg. o<:!}r=bkpI/Vbm<;Ow+F})}a44uҧu`ġhֹK Jd`;Fs0[k1KDRahD׮!|r\/ΛF: RѸ^\$Y,\(.d!1iX<3f'ۺT7WmNYR]8T+%(,&ytI ,d_o+{<֝Mh 84Z-BlմQ &O$Wd_ֶ1ZXX@ab"S5@E-P o&$D =H"3kD !+9$+T/;W&h-d\Tlp>.җը:= 4k;Y yoM׺TX.+* >T:͇mt&-Yu$?CF<rGy>n5' J ;{j-u )ǣ{+=:'Z\U2or[{I8Z)%8}'+Ѥ't; ( }t_4Jѿ?-ZܯB>\a.>tQώ{iQ<72מF+vF>}␷:| >=bh>U_ ?| VԼGOq>|m\+NdyUⓃ~{D?kffLQQ~4W7L^? UrsdzN뵳*qˇ\7g[rNk,-eNl}rrϧ4:)6skWT.Q[?~=$yb7׊f޿ܜWz{ܫTABTa \O{}tM.oIrk #dF#)ɰLz9*"Xj'OHJHzioRl_ͅF$ؚPD)$'MIh1Q% dC.A6 :Mu*O|ҍQK+p:KR;TlԢڝYEh{*mMkt˼(5uAPCCEV$8^ mU&W\R{ [,hI !F#)aY$Zz6 O:z<S3ٖԠl[@Q@M E&o3r6tŐ:jZ_ʆ낈ۏ?l6xZpRp>J ,_ Pyb Y$)Ezd/M X)JdLm%3b ޒ發2M |$cH6ǖIVD"*"JٺQŔjȄLbF/< dё(-Z @`9A*h^&$5^٠ F̧z_-5 0Mw5u-6/{u!oǥB\ n]?q@Gw#|ͻW?&|( F=&|Z1è]DŽDŽj8&|7V[l\ C_sp\&fU-l9_Do٤˅34Ό S lwd\ud%#]|1UGj*iT7Zڞ|hIݪɋ3؛/C]n.lD~K46\ H[06I$􈘊7:j#ЌsXCw4IIwLxW?߇Hdȷ`E7'_'R׿W[ HSPԱV] 4Iy7d]Wil>)$8Z e;l f`DբP1$| r9%>4P$,#z(di}^8FSr/_}e8ʡ/ -SzH)k2ДA*4 Nb",IfP FxPyyƃb+(fmv$0JJ&$|1k&FyͶ\.,E(٦kmeKʰpjeMP,wLla*dH=r٠jχEd#_}ZӯSp|5UyoS}=E|[LpCMy׮:gq#}*ɂ7Ӥ2&2e6Bl|V%ֺD0x?h6lvV ibc۠dTDƣ'sPl!2*J%3-ӠyhC:|p#uG +c5KDS8W5 LI8'49\PAI}_yUzZMG P}D=~y`]/PUn߂/tӓfo\+ۍ}*&W Yim-Yy JeuoonJmpwԮ9̧?3:΋IZ'biLy5fw&wa=>tw->3Ii2x쑺fg{ncf»c -*8 ?h 8x0WH%cb[i߽lipTjŤI!kMA*9$10p"A UeMpt59P x񑁟!Xɡ+dj;+7!ul&KɆI:HK6J0GOk*rMiv~Dƪ&/ US ̾F}|HYSҤiƸpu't0jnT!RbzTڙb'\RRkLJ)vh</t!ϙ#LbfAJ(^(SBDء.˶Ih8ڢT)ɣhidld E.P@lwI}GHe 6!*g,-Ř玈'(mx./H*wEe(1x1R6ϛa֝*{\=noNΎGZPZ%P'D"%KQbI3H3p942!+m 7)$Ad!qJFsSfƭȹ_PMg7mq5:Lcc/Mfg?«U; yrq@i& !1Ʉ4-f}ΕZsv# #.N]Ni2. _WdIr|gur"$ 3Ə%Lkf}*zcFϜ 'u  \jF |F{.{XF9axTLw2s_Іg뷣έ9F Dw8=Q"3q2PA?sq0rRfu+tg)Aت4Co&+eKD1Ă2gO{a \-cVhEN,n_|ܻ!<랱xt^~zqO.tpUY1"G?*vB503o}3? ds+OW-X "?ǷM~-\yj6;6{L-v/ \~5|9ZiV`?a9 p>\&<vuþǢ.bwmqFyñJnTSv7ewSv7ewSv7ewSv7ewSv7ewSv75c{єMݔ ))vSv7ewSv7ewCĦnnMݔMݔMݔMTBТq1)]p/tq S@O(Z@2\ s1Z==~u__-o4|nIwW@فd<^]^v/vTOÛj:,.W~ܤղ;_Evm^jʏgC&Y| 7Ny nu>wp?^!'"L.4<3GDσ$O|Т ܾL8yAjb: ĖFF$ ĸFiemx4{{..-S^oeiwYN++?Z>UTu ցQ%dkk%zuvls9x1m(b[0S,;#cߡcw^QliTQ͂wsHw_`?|Dpsa<Z8h(5& :MH_UPR`"_o3 QVjdIꦤ|i%@)A#v~Zɜ4sx@m%EFBͲNR u&J(p+fv!]֥H^gmm2d2pg2^a҇g՜->,'3XΆt:W[)V}y:( 4>hў\ԧYѦ?;htͿ|@6hȆH ρ sa>ĭlP)Ol>Jx8L9}JcP\33@11&S%O&C@mAvp[2I'Ck܋kR`\Q[\eX(I7 8f_}0֜=sO /_Ei1uhWELgx>0k7>_}*iA&$=zZERh1dq#:]%f>DXDOHVd )GAb˽7`hnR-{*]zBR/#ℕ mr1 LC*:eY,o_plZp)ac=쀹P{ZsvG`=o6oI)2~u؁%WS}s[?`KF3&g<,kiKY=RҒCdskm%]]@w:ߖ:O;sޖ1?rrUHERzwXl4Q2&׽g}#7{b}v<ʰLyo:73A ?m.\#5W㗇~KitBmo`4pH҂Y2av&f$q*b#*=E}{)DLN \J+A 6S[6rʆJuK# 3υd)+9MQ\yOZs9)'gqJno֭~=ekk2ڷY{TFQU&07 M$׏^K |΁Wxs2Jc>H8FJS%ɌUf{S)')2@;y/2<eXeByuh $v3lW,? P7' 5Lv1<"GvFg),#[ӁNUgdunj{@&&H>OK..nS7~e |/&iTnNka?ƣtΚ n>ݳ,4DP&WAuVgJ2ylſ3ze2UwB̏˫+WBla?=K/?e+2԰6ǝM貜dd{yj $ÝL"w"x#6̧riE.d\f5y[ t$@x-Z>:ŽO} ur Pe$'(del5•C@ܳl)(x4Vd~z%ϔfdzM1[*O`mdo $BLgq^E%896M[;/Ŧ66s+/BRkmHnvľT_@md'b1J)R!)zxđ(i$r=U_UWWEtN9*y2$B'-P3N,W]vw'}Djl꣤mM/!W >4UmEPf|۝TIZd?8VnH& G&B}uE\;/*2Y5jsY5湭lV 3*2f$8%$yБhc2у\Y6%Eh݋R).$YKAKe1r sֆ,7Wmmȹ]Bin"+ޗay|-7l;BGպpJX#k/~ LZAJi0N XRI ;ԥi:ۘ(!A2Cr{s8.p# :ɕP]e )Kn}L.h @-L D [FchĻPOƟ,i羽[=يmf ryjT:)xmD+d*FX3RkdM Jp=6JB q Upal99n*hHr`KmsoY^2榛g+_]2ےn)Uβ|)Z.D!X5BVr PkDv kJ%8>)]ίE޹]ؒS|T߭ EV u,u,VrK* 0\ym蕂0 4|psxIE~o7#q%j_B%UVZ_yT"{sfϓ޹yaܼ%P$#"򐕠Qg9<:钐˜3o7hovR&)dI50De*Wi#9жߪ5r[OFl# 2Q$#{JX~)+P6۬$Cr"{1C: pU6{AK$5ɭՐ0}}YDH@ʀ)0e*ӮI\JZc@tr|.k1V̱d!Ȥ$c"@aY'LDV͖, O ٲFΎrV՟{$f=L̖[KHAL)AQqz8 NY#/DE)+B`6v"Ц\_( %.vQ3A1>sGcEaqJ#&|VkP+̆IϘ8r0=wx缦- Yb&9+h%)DjrGŒ^+!!GyWQ48ڨY FnxbyAj˥ϳaZoOzߍw**-5^HXlpC;J"W)Fb4Epw֞Gh\=kEMvq"1Cz,ȃ4;R>,hN+oa0;Z)aآxX_E:Z㐇JmY:5aF;&UݤZ4Y!N^ H"`_&H7>ZѝJ cK(%3F!(0W mVFF\ XDh5j%zŨz?׹ΦF|ʨ}x!DAaE^:")caՁ%9E&Arcaƍ v:dLԐ]V`恱 R ߧHBdI#-sQX+iD/ QID)yu W܅Oiv앳7@iEk.Mr7b /{nM~L?AGиa\Tcw͐,i Gܒ!PSLp%Gh) Gn:Mg~h:#xvX--k5#$=w$h69^`ƦjpMnB6Nj|Džh6?).Թ?/sW+Eھ[~ZWk7oWTB蒾(Ԋ\mjSjj}Fͯ+sZ͛#}]u͟\?x{1;]|nm!+b('󵕸^ 'Qo4zQjqi00dq$PG:kF4#qMfyOBh\'Ze_ =]98n vI64W4WQ%GZHX8uo.7ν6< [;bnJJ緣`:/׻< %d' 6Tx{qxr*]T&Uo&N|q/ OIHBJwC {{@[Q# $(x7ݯZ54ZZeh ø9q>.GYUAtGA}Uo e~{}tMlg7$E^T u1^I'X0e!eJ,h@)LLqԂHznoRߵFߒrSp ZKR Wpzwѻᘉuwʡ^$!G;tm%ji ^&~I32: ~\CG3Pu+W֍|*^)8]:w-Sث9#-xҽC !QGDK:JJR(L2Z[y#D23Wek%96ږ=bqiEӴ3/T|CU4z3jW'R1Qw״ȵ^L^{q44j0 w?+~i˖]1oB}(Tv5 J 0 0 0 0 CBubQX.EbQX.5B= ܒ,:3B3BWx@Z0Ӆwi:߅w]|:߅οncNU%Ɨjǚw4?.0ʊ*Z2l LM(˳%{ }OX쑮u`![q1wq L UbcDL !'c`\Dz$m2ɲv ^F] XT%D2 t&t`ֹ̰e\9;&;xzS'l1xp`Jx>[hT/ak3-;~om<|gmaS#H.R S?aY)3шuY$ eV KwI4BwS /mJznDLzR1".I*2ܓ%תb!F0)c ;+c4F E)VN+HcDv]-+4v @o8+wS"zZ-eRˆ.R?f2iCgi/ " W9:4cQ,Y`9o6!dbxQ<4vQtiDG@ bJ :xN He!Iޔd@کEt KIf{|^{hcMw2^0>[+`?$SiKvL3Iq:nx*eM6ٞΨM:Lx=.Ķ]_NVxv9`ţyY-DԀű˕[!u]s~Zvauřoҹu{eEvz{%mJ!]xW|ta$Մ.~4>-VЋf&97̲c10hm̱I!C7wĴ ]F΁Ys;Uvuշ_Ͽdk,󆱈,Da9j+#!s 1.3W B`ӡQe+%"HHJZ \1A"g.%xY)򺉳뾠j|r,JUQ[M6UtVy .:غ:ݮX{yo%V> M$`*:s%]k$&@KhQV7E[S!R@[TF2);2< H+ϹII֌ٯ*ta58VA)]xCQX[9;o*b{u>Lf_'vklP2jCBYIq.HX}-E;i-0TƖVfe(`'dB6&AɄuYCp:G,[YcW#giZ}ո/Z[Z{`w^ + 1@`oy! wB"FlV@,CDC'dQ ,Id8|HY% T>F~}؁UEQ}шXh*kD3hA#LeHGIzr#Țg2 lc$)d^{,g,ƨ@p \LH!Z IXS#Kq953WH/>uVctY1ϡTMA/#:`< ?&`1,z0c^A/>^wE3=IQDn{&o8qF;7D?RFW|8ʗ"׾he*_>Z__/#*_ʡѯށ*_4aۛMK3(Բ//&5 `d!4Yh@8 j~JL#jJ.%=OO\Ta"]ו6mps0i‚7wg8ILRMf>M#Bќ++iL<6swȶWx7~ξNBf]&_=Izɼ=nE: 9o@ CAŋS'sm(s+ @.IdBBb9+wc5pQ`R&-Lb!&'v0Tɡ2ɄTIv;n)AE! @.xBg2Yڤ 2nHR*XV*".2mQ* yZ?aa2׾ݡ9' TS/΂WԵM9xi|Ƚao| &+ڂo@+ތfPw'CR:nų9t|1/z qڤ`2U5IFy Ӻ< MA4,# Z$QP<0P"AMp ]yԉdS^p ڡ-7,Z``xW }:Niėwg/C5p=;&8 Ajh̗Kȉ)3Mó#3IxeF8K` SGC)T6I$a,=o!hWA rֻ%u@,Nl&Cg잮]K%r'iChRȠ1owUҺYXJ<[W^kmV=َVOIZ<=A|OlVkͺ}7k:=|u8xR_%Cn6%lbs:sOcR׋0`L^PtlA$a?H7=w ^tQ5Ц>\Av3RKAnprk#4Ft[܊/U.$Rj^;%&-sj/rG/V|6j*Xra歹fj^TL4`ncVF`j3o)LB.Wo%P+\ߣ QaԟpұUl2fϛO?u{.C웬HXU9˔!0)m)Ϙ&u=¤&}w<8_6 :R rdp[b 1‚.X2ȲK8HSB~O' xLhIHD"I09?\%b65LjOn[ %H3M]SIY=>;LM]+-ڬ,E8#C0S+R=K(*+L(& GB$[6HP'!sr,վ>9}7AI鑤 @|y!(rsQCǥ694H'3 <_ؤ8>hV*t ]qULGd^"G 02= DPbVXN`Y%n|pjʣcA$c^7(%&wd|DCo.!H70˴8 dbȓfb%R:e;vS܀ЭnmVW8 㽐%o?m\[H䡡6^6A hN"JCSҁKNV/$B fU\i4X";Lqr! EeIz( d#!*%+pIB@i'M 0E"(v:p5rV/geP/[_Ug_&}zo&k|'z8:Q&8'dD ΘbcdcQ SXZ#.>.xȖMAT~qo^ {CYDDgrVYJdž0:KPE;eUWAY7VŒ y.56@MdN9)R,y,W,fѣ宗Woj=4@-8W((:0!}BؤY*Wi8zq67|>_B٧-e46GEHo њ́dN P,1l`Iq6aL3"h[z=) Qdnh [|:NSZ/ӭG*$*ޕӢ2̻ѯet槉g-Ka%k\v9Gj } zt;%ҵ^۟W-лw,,?j7J%>OړxP+r:L 07S߮h8/?ޚQenhcN֣E_,=j~a1F,JwĜ<:[tkvH4Φ?_ͻnw?c[Fn #oFlf30 mOe.>NW ގ9>wMNʭ {$mE6{7j[ܑ2"2Z򨨍S~GN3FuJa?L_"xwkxKL`瑦՟hfq;.:u*;Sxx<3?c?ȅh9Rw+ $I( ??bhUkho5pж欗 ~՜qzw\SBP1ntLFxv&c]m-}މR6IE';p)* 7'íЎ{2eR &ҧ +tG^ ۭNRkK(I>LZ"*f!w 3ypTq8x"nA;aQ[0ki+I_]iwtspZeIv2$N ~lp 9HCGU:|vo`6,lWOn)( JL Ϗv}R] Ű&e8ׁ'Ja8R\sM(k^j^уBDV" eT>\g!$Nr*Bȹw\;&A.$ZȄ:83.JbVh <h1dclh Ex8z )wh {"nvlmBX5BgPqB2c6%9jɻYܺc2ɀ? 9Ȑ(ŨN 7U)%"&hrArdIbIDERSir=QB( M"Fv+4TB%L<ETFM; *ͩ|1rY64g&=nUlIzRkTdUIGj\թ\1\(9!TyyoLr \o91- {~8D"&#2U%iNzjۄa:kB+aH$QޕfNa6&^Hz72JŤ 5 qIoqTQ訳Ki TpI<"H[uSqG|-]cM@yD\AK.PhF(kI&!Y)c a {XgWri;̷= R >RVG^BjC$r4 6y`-&Ϩj4Ly6U3T?љZvjtRUŪRHmx-9.b9BuInq +1|tRVLbZi#sWHzF*sWڇ>e{qWߠ҄tu2Jh*b +p%ެ)󽑪.1M  vIAP|)bSTp? WQ=mS$* XYD3u#폽l;z~{6C;:w?RUAJ%bhrrS7?[ |}}cQvG5/mb/m\nrnY/mb/mbk'mb/-mb/mb/nl xɵ3o@ '4אR`-= &9ÃkPP+&sI::_Z{׫wA : g<F N6`1*xr<#h d1ZMP$Dcģ퉣8Pp5ovCgmskp-y^$cs1!I :GOyA#{aۓdj c3R%*-1OƬ,-\FrgQx }I; 2hTZ,J]OsEK5mtN`>2!# Pm3*j ʪ$&e{>Yd,рQ@pt`cf"NO9"վO&$YvQ儧kutf}[|T9^wW ;8 ڙ#zͥRe8|@7'vr3Ïc8c1gsgJo*h{'aR5C2F9XS@vLF ퟑOZqN߷Q=,^*`dL^FOa_Z^\H&ksǨ>h,TX~<U[}]uxrp F33bNeNz0'Q:BlpOb@%^'[ْ[:] [ ,.ȴ!|̣r5Ѽio|[3+[eV\꺾R:')%. U2(TdѡGh]$-|&ru?/_M7*xve ""L_)pYq)?DE?oR%-]UR׷ ;;F& ɣo޽Ϳ߿>~2stã^ 8zTUkI0[VH7hZ647mIӦ^O|vikv)!}1Y t)ɻ,50Jja9TBHLhqjЮ#8ォD(fHM #6hC6FΆY4˫1f^ܝabBnW-\Fh{>||\?3ywt|@z?vmOѨN,%3K^DEmv7nD6Jޙ# $B/(B#CT1:)#,Wm:,E Az4 H]2U!Z3&hIӉ7s7YqZ{#;rMQeDPg4eYN'efR^PJ*1ROD2 JƔBYhl锐Ȩ9ea°q/ э{ּ)τX͡j`HKab[ZyuyfaɰYG5[_ έz@[ v pT-z"CZȰI /N/?SKajiHJ'I4s:u(usa={Lֲ$ &z.}R+eB\%b;"D锨#P%9<k5n;뤥wG}}l֐KwMmm[DPO3f Qft(w&0o\p) &Qmh Q,Q5 27㉓ ۜ(@{7AHk`:.%e|vC.V$AU aQ0ĜӹVrm\QBK=[[GeBo_m#DᲮMB'RNg3Q̣Ze8:!@Nj {s~`gΎޟ'^)WŃHYۑk#?َZKYep,RQW|V%rf8?ָn|~Qh+Vqe}ٰܿ* i7WT}Poɝ_+~tww]l8@> vY=_j,gE/KU_.t@BRK124,!#+Ҹ.Cdۘo6;go2yagXtAjEBΕIF) QS4`!x.M1/e6y!W+Ku?w?Nzn>}`l<}Ͻ*-v~+)kJ|q?^2RBRs|6q#2v=P}sm]=;cTHʎp(hHZl1 L3OHZB4K= LJ3sX !A!-IRN+>1lq鄞y5y$RMԈ&FBtL{I:"%$'i@"ȓgwn4N83#[=ʢ:{k֨.AJ "w 0 |"~"tbV=2X7E?JLv RTڳb<_*ـF_vzEo%N&*iDtL$.)'Dr\J:ErV [:/ C30{9%\ )6=BL+0J0*ݟo]֏઒ZުhҡGUݾ6Dm[/jhQ ŖvL#}oշ{tS!;5c6d]].4YԔ9tt6bG./Rz^^.˿]ӷտVi6F+O/nNtP\~8;p~s\dY~-'en[q~lzYt AL_r[{{ٗ<_[Xb"~.732HbW^M2kaX٬QKƫyvۆPjA6G~[,*Ʈ}uٟ4y"yȟQVv6t O5Gp2no?q87/Fj%JRNI_Bj= ]1ng)H㦞2<yՕfݯ=]/DU!3_ TW$@h%s6iՌWD2~dW?Տެ$~2^ǎw//exh<*Ng=D|~ͼY?_Sw/Wy&1fq:oAW >ۦVcu<%},xTH˃ N(A竷8ן_޷~^ҿa6]Lt|~(d}Zʖ\w[<^5Eh_M}O#˔eb詟?:1"QXo;6wFRY@mG vɪ_via=|&ztWGz˅V.;eWj%Cۙ[yritvOY!"4\3%mɤc Vl$j "EQbiXD/:YЧ> E>zZ4$/\MFr{ R8:}D\BgzE@/kOguӍoW+ {UG玣|`; Idcg#jd[B)hj#rD A'Abċ(&ڡ1'\2THR` QYí9"„8{ =Z x"+ŧᙦM,#)q|\ۺf utʠ 1AT\ڲ `S.! Q&Uyph$Z h_Zf)qv2!-bg׵^^K-'zzh>bv}9uAI~ﰢN.,7tvs+XcNQ8 (xe dC*s&"E'hL:J ej* )!qz^K )T(FPH,R؄ UqbXXL3B5`QaDyľ;96ɫnsVr j;O?@o^e>^JljBS6I)1t,*kl͓iΞ pg6BN Hm& xХg7b$}AbVj vk#})R Cg:oXEYw" bڳ~*`T`Ӓ R^!X)P8"Fօ$TG0g76N*"c_~Pa@&baZ앍̢h-I^uIxCKKjP_ AFSBox,eqy-ЇD_G=zL@ #b1qv#!.W}Z,%)zHX*rU hFFGQ+̀)}Cs>8<<#w6=.(`=g~IVit:NN|x)y8GnJw籺-yj<̀*jc1 ]j8 w>޻n80~t,}r-9'579P۲s> svPkly:8=ei}z&J9]T/3Bsc)eŞkʇb/س6\rh3w돳?~soTK%vֱcNyj੼xj̫AFz ! Hԫ@TSAeT lto7ZeIIGoscc IJ I$(,#:j>1( <&FM7P-sH[f}GȽHHy&\ȩ~D؎! 1#0t-`@s^f-GϔìsRș% Z.M,Y`wNHP)45m' WmrjpKۥ<$ƥ7}fF>fwt>\@[E[:՗6isB}ZV2VofU$ ٯJl~Q>eܮwmzqBNZnׇtէ޴ze<ݏKzOhq܂R:HA6Xll}>W槿i[{~7Nh_ [gZD~%ճG|JÑWѸ(iԽIpo6<^eSר }f+7S5gz:_!2;@ԗK MA`6WD*$%ûě#OR_omQa2MhMh+<@:SlaBcOWz~Q?F3:^g8ob$YicbtMcY){3|߽&QS0yu71__kPJU#UU+ +̮%N.ܱ{8h%/ȓ+xrE\ _'WE֞Q-\8|.:nxcOΧTӏU?gAi|=jk'dźXk?y7՜orKmMЦVhS+ mj2kZMjJZݦVhS+ mj6BZH`V?Ҥo. +6/[ArRJsBSB[}S1fZWOƴA2Ū59D3rxƔDrȽIpm6z( dyT&ey,p`r';M+'Zx @$.4ʹޘ8ہAAP_nmU8 S#58a[ټWSTSP"Q&NeH<9N:R6vH x&~5 fpV1wz|7H Ϳ tGe +Gm9L˞hP_@] kk4_yEEpїW^>^y*=r lD2Gw(*aؘ8; mWq=|wׁVsu p>p(k.I45> 6.{Q]'!;n8t=~N7 ͖Ӣ:y-0 r'e8en6eMC=7.BȦ|e>%.&"h3Ty'UDb&56N#9ûPjI7ᤧ R9s̓w&qpQBdy̰x9Ɛ2RKI,z%#W;FnmYF-IRE>hځӨKQܰޘ8۝]>$cFWSПewf8u }Iaj}laj0ue3Y1-^}&8^%EcC@@<*{*C+ʀ`RZdϔx><8ߠQU(yJAPDNNp)d`:jHc }x5+b6I'K0.I97^'g$`qД8etJK7k..oq󀮖n=MgrlVXr+q [F<rIC6{AKJLl51ͭ aipg':g^ e2IFiפUJp(Zc@tr|.k1F̱Rhd2( M :fMX}U1qvԳZ: 5Saj6MxaCI=AALI$0IG NY#JREQ[!0c[hRn?1;g^Vʘ9Ud V{t:&YVGAi#R ZA0lOz\>^w~r+9i HֹI I<Ҙ%PX;eds"]IReT}x!DA aE^2^;")]ϲ[jCͤH 2Znl ¸6Y5dm&y`,Ԭ"=S$!2dMfd(]E^H4PJJ gBO?/)UI.e]H?+:ΛꃽT[~2"#~ʴB~R9΅E39*z ~%4.kb;{ǧ@G} 9sK@9b3.8fFKe8va򗟎#2WɳV3HZU9-&P شR\ōhܺᬾu;?++(Fcon6?]Lh>ԧt'ӧεH]&Vt~ sQ%A+dt9<;&ZY\ǂ)fֵA >ljv0:K}#F>uMbE`B#τr|=.uzO7ň_ eִ$]-]kFoFf[Rژz>Q,xwi:yݫhS9"W*V\꺾r:,WgTGJÊ?Bf:)fN|veR*!~7 Q]:?||ًwc鋡<mj*d:qQpU I_+n0h I[)M Z*+fAɷ9[b@b"DN`Ymu!^y"F t+ oVe}A*&%eILЙ Hmd:.3lC6&Ύ;rڃ~ڹOۉ]wb!!Vft1.)A:u S*XZ[DF|Q*Fe͹MKY ; %DG9"6 ralCCB-9/4&v ^͐Ξ}lXSygL5Of|JY_c:5'S W謭[ɺkP`U2ֱ܇X=zjbcdƉ# I.% IHKRtrJŠX"Dm\‡D؋sZ-2L;O|^l1@VG,^LHj{5&~6viJTċbk|q#WXn-:z&t|4tGCLK~#d$ӨQ4r *)\[H1w#mG`Zd2Zn.bW.ٷr/9{m_ݎ}Mw\Uog/5No3)26ާN@ u>\^gDʎvǝik+H߽{0ns{}&pey|r;ZLK>(Y4{6?ímA[ ຿ٶHyVy!2gc;]@pXf a'֦^ G/ XJG?'oMG?~U!SR'O8~">S8-PXE-f$+6^zVKpԀt%A٧$:<* ]dˎzA w]w_-j0v~v~TEuƕNrysMNi~L zñ=:~3~ep3bޕlٿB,Sb xIЍ/LF:e;~AEQRĘAd<.@{m(~X\Vg+Iu>Hbo[t=\ROGEd(rbrX+Sv)$d6q[`C'VɪۺLVt.<= yWE0r: k3>/M&RJ`RZ!e@ݘ^NfOo>cv0Q p0X1$d y< (Hh(&2XD;$TA!yx ͢OLϏ/ 0yϐǟM7Eq.w?{" sL7^:mG(Ey4i% $r;{baCF i,$ZčN[d=%#=DS$0+` \VbE*L<)RRarR"!0ŜcH4 {.߭=?-ͧ.=Aw' -1PK%~Lݲ}/L'&xRE*qTRG<ܥ`gFKo!/@,hOwV !]A{CQwIoG IVCSu0mi54f~u׮+|!Œަ&\mU,Y?lu+-f]WO݉q}ĸӝA,-Q(T|+cO`I q)E 9T'NbؤʼnЛv5-}JKb n05G_y E ޕFQ*#.hB mDJB ď="O~};GDh0Kjxz83wX ms A޹?3;;+_Ї*SOǯ&J?N_c3\5Sݦ; kdR;$HgqC@$P7~ AQ7dZ}^K~]ؽz?G}}0vҔw-|MSb,M<6_cB޵xsu俭bKm&0<_bܣ-ۈTX]!ԗcy?LywĘRhĽ'Ъ jUf쇟F#ϼYj:MJڂO ֢FK-à_mOL+~=&nT졹|Es}aN?ȱlӶ|匃/<ђÞ<53$rg<G+! (`e$:!z`ӎ =+1)H\79UQ,eSd[zBzix\az6ޒnyg7[ދNOwNM,u6P6M6Y.{갟-E)LO=w1ӧgCz4~k = uYOIi5riJ*¡zdth\1ޟ8=aJL Ύ|di2Kk2X;N3cP)R"{eexe#QHY;xƌL@ZFL&Z<YgE){ډGӋӗF‚ Dc(Q"`j=C0p0UfeHjPM;} CvY$PNH;roDb 5O50I)frYj|/Zjq3kF M'MD=[쾇OXaLN,;-}l͓ %ww9++# (cr JD\r0E0eȠ*p%Z2|G  BE0`̙@R1Ǚ1 MP\).B6=Km4ynwe 7@i"YI~8uB¤L>0a& p@ 1I$% $g"opXE#fHʞQk %6`V0!#a:0- 9rqƶ/FǾY`! / "7 , ZuH=N`#88f!dhю /9AB8 .#ȁR10QVMQffD3bψz \D(224Jzocx;bF$Ѝg^ $^2XobCIr4i$53fflpgċ]m>uf%̋ŞN?,rd0K=3ZC] a'eћ$kҦbuϋO{skt  (WF>؉c=(czX?jĀ[?Sc9J&OWM|Єh}g oțwpGS8pZM.`TU0?OVg\}vhyZۡcS 2( Koi\Jn};8jBJ.$1thi(7Cۢ7 -pFjWp:0T 7\x7|M#w7D;w(!gQ=##xAOOl)R:6kղX\Vg`:xyRzbC,; vLWedtZwCWTh芯AWM#o*f3trJhwJ(J! U;CW -ENW J + y[UKyW*}ju{J(k:DWU;wtP^|tű@u0~\tbJh wJ()%_Ղb,'ŧ⦥O;gt|T]0h6f5ԭa) K 8=B:Sb(<:/1nC.#`B-TZvЉh۝ф}rAiд$.) WɮhʄtJu |[Jew\BWwe"J0]0&ݑ\ʺBW -NW )~OW]+b׋ChKvw`Iw}İ]]C)LkЕjӮD1D;DW 0UBվUB)DOW/EU̺CW . ]%\Jt,tE)Zu(UHU$P[$FBݙN #\U~ᠨN7A*~qiLI5,C3 dK!s$^B`Xǡfy[} 56ͧ?^>j0кE5 w2 nV-D(fIOO[NB (g̭M"NӢʥ8xRwުiN.6g[u&)q-ߢ:1W^y/?0ǣB_qT+9 sNØ!& Ue$"_jY"M}{lK-,'l먃W85kX;F ZZQqzMeY+6ˌ%P0vY4a#AZ{1[,} *4 x"ډ/lU۽I(E-W }#`;\C'aTL\Z/@yX7 |ve]?FGhIЄ}n{^XYL2oƼ$ɬfzj@-DuԒ,MΕ>XC:Ś:D,HG BiY JgS8Ud6•Rh.Q^.{e%@Q^Zo%^Wp$bf8 ܄FPppd=q\ĈySqD$1j<[֑aY%;X+\$JGF}\pt ]`=JCq6̩k/*6sqg/f0gB]1w$bRsN1Hp)\Bh `C)ӭ靋AiL@jPAF,B0G" H} 1CDbJ{8_]@e}/c 0ݵ xX+ "##%%}#ԗ$YJfW2+*勬8E4ɐCsx/עyx!M(i;m1;K;SW\zvfW֡_J˷3[&kmZ)A`!aš"a$3[E], 4!½ BA'`U=!IJbL"efƉhU\*)BĞ\S`0g㩈,bjb2fﵲlZ9 펅Mb  A3qEiBs}6>GzL_6:z־["> 'sQw\km2-8"e RTYYҖ $BdǴ*Sp#e \or0gܮbw_(#ڶ,nqc1CðAuQg}XT} 9ō-rz,ыaX[F'?|@st5Su͏ r0 eЕ|II}g7--SoBvL3.H˟D)YX[#pTtm"sh-Ƌ3~ wxRngbzCw۟~^~'R?.?o~/)_se+'glOy-:v&"FӆSf@n'n@?|x6._^ m~I+|ryg? [WIRȞXy؏kwY p \k[T>ӈc}gӜճI+K})#"a?

q?/[x8v4y^ܳLܘӽW,nhpI7F],ys]Zg4wY$^tv4:CLcq;XWJM¢3xDk~8a?gN bcp+g9{yY ^N862@eFX]%P|$xSHЙGvbl/F9^Pg B4c-3+@* \+ bxRJfd. iBN&kzUk-~;]n7[ޛIO)_>g}M[h GU_pYZ#x*3>|J݅YB2]KA(Q`S3D ` "͔:uNK1i)SVVO%V4E.O@LddDAHA1'+2C5.%lJx=٢E 0gH] j)ט6==GPb~ᙖ HD@R*H]PJ.ǡV\3jSC%#|dϥ$ɖS&P(Lb#J[.ٜj3q*nR)0՛x^m޶r/JaoVe&kxuʃ/zzVRR>;E @cBʢ$ߑd x1S E||ɹ$^B.Z*€\"( PŰ1) sqfXL3nn/ KTmy]mm:b7 \oj@5J#g9lǘkTjɚNieD$olR k\ZA÷:{))*lja3,L̾o'B,D2"Їֵ[sEp1ǂʹc,&ԞZ :E&vV}bFPP-t2$NT8ז<` VJ^4j|LGđ1٩&s{~eQq,L?n1" 'DܛͯduFm=(h8/..`ϩ2bB0#aߘ|ިRq&ɾȞ2^UBsEb?>fZr[\q1L8.P&01 @)!c2^z/c3& 6Nx8w?c,xoaV^yp7x =(`#G~t8<ntݪ] AjfX)رخHaZvA_ ?0 AύJQ(H[CEE]r5L6*3D?eyRYT*"zl E(*9d, $L"gU(+٘7爗Zӥ[-c?VWWU*]w kX|b_,{ZjtÎf5y7?unN~2-g_旗{[=m#t`7skkq{bޫCu6ؖn?X/͓J=7$<*g o>BgE!c,tA]m;R$|fW5''t?r'}| ~DNNt)ILI"bB Eiʓ,m4YZB$#Ԇf)hP\JPv.cJNcroYy- %sD{$1%& |F>GZzp߾_/OT/bcǑ^,SI1\L)4hb%Ii>zf1Ydgl:Tl^*& Ig[xd-.cgtΥ(=SyE-C`jU Q%\"-c5H;ޡ>Ec)=6>j%"DDm9Z= BTQb>L&^?~dŐHV```m2" rj)#LTl&?kܥtH 'Of`! )ekQWDf~]i Z^_@3U;wo6p|0>:U[fcRHԾ*ņL`jY,Y9Y[z]yoG*o̕%H'3䁜MdDج2kK(CLrk#MZEJkeս,2hE?=·[J/2ww h֗a65{t6nRK=:<:qXJ1.:a)ɛ;cGȐ#JH{F|?x ~kYM { F(%G-Ʊ`} ֜ q"xPkٺB^Q#)TYɜRafX%ϝV2.8XAPVdXۮǪ6$a_v.֍2ƂwN ),S }QZ(fXi4N$pC]iV FUH K#VAXƙFDQd M䰙B[H.;>I $bQzmRGB0eNHxنi]TsJc^}7ΚdƝ&Fbl\k/{fMa/uct*M*.5vY V~ꦗ}}vbij{Vj5\p=h3{ D;.)U=@H)D01;RLݞ)pf)0y}mV$pUtb -&Ta-# y뻣2px|8( |||f`>V9R-j~^)zEfSNԊrBtP칍ZA_Zb=8ar>I>hؠa2uV5O&FZ~)/|s>=8XfkD91gSjn^풑(?'!><>O _1zH5Κ!2EpT'4i g], ^ zm+FyE 5:>ߍ&.=(fIl=ڵBˇ您|,ُ矾oEUAmv0TÓ~!K0Ԑ 2?%9MJ!]^k*SV3" ɣ߾y}?0QG;8zelU{$Iy+8ق+][tuMۤk ߠ_rC]kܙ/E" ˑf|P@E|~Mu:1*X#-ʯ(IVyJR`#SX5qBD&RKcBc: TD;%龭 3nFL)Km'1!u3 oynG.1HziuZ9yT3󨲻*0_:3dBvQzuZr1a7ӓR,[JJ+upVӬk O?OF E N(W\i5Z =im@ED\:ڀmT!RPہ7s ^A T!6מ\W`NQ h+:4KJ9f|+[Z9?QJc1F1sɭ`)$ŤS,)l﫰1 #a )e\ T61g BuVcR;ĬW逰 OΦZ:.ɁϻzF(c;S|aGzUm@q柼&<MrJ͙T:I4@cNFk>f&}YuZFu0BEzǵ]zGO6j Si~{~8 bs%b-ņ,2AGǓ0̦. }(lnR ½Mz7 OǧDרvts?]Qsvf-ծ & ,sZWpi]nb@a&i[ {piu=d i3  ]Ƣꧼ▱?-MS y,![nu '1beP|Vh*c=yΐ4CUp;oB!x3`MLj9+j$Ø`̫ eDrnREi}}K2|c-C v>BK:] %(|-u:0\2A3I8RZ)@$TҜs%SU0&]@axԨ lz?ab9,Vըk #zl1iZ94Cȴntko퍧zx1 -X'L<ؐ)1 I ƧDpC-%0li>K-} bCRiнxҼ}o-_TRja?Uz}(+ѧvЍ Um&d:WhURg `t2ƥ郏k[f2"*RèVYϛy:fkꙺVǃI| x*Bt9:iqJÃ&8.scp R%:-Xv,{mٻ&[&Ij(o"; hZ8W_WqUۯ 96l偬.R3[T~*~y9z- I6 ghsEA (Nekсmlz#OSIAabHTby@:zQ9h(&2D;$TAvu%7-7DŽl} zq<>vX'qn;+ιKl0Q܀z鴑^YqVB4rY(IH5w  ULe$"nw")YANhN4ERɵ*v>lzIiOd+(ɻj*k_NZ\:"<{Ds>X|ضI!3gf7;^&/R܅>{]xcZ.f8~?$geT~ GD@Y',_lrwΘ̋1g~ ?}~ϯD&ԯ1ů|aYbύ&a;ۿf0>18;0S}n,^"Ιu8N\ zAl8j B| /zIME*zǓ{n2hp&W2l!rl@}SfE䚠2EPwb@>duOIpl}uQwlF_ o@O1>" `:5/]3ȱ3_M~wc𯙘ɎIUeWk(U|OfgHg 0/ji^$؋r?fV7< 6~[?1E&(T|MG3|ĭy]s˒ŸhYpj޳%&2R6U\3C"wSayT!z+2VFZ;KL+y9Y ._}\c1 {$eZ㸑2[`G" ,nrg|mevWyf,QR;nYMVW]kb&E͈:HK6d% .GOk*nNWftvٿݺ Y,BG'śO{Eѿ%uJ cCz݌ X)w:Z=fkݵںϱ힭\u.g llZ_Q>lwU7Ϸ7Z[A5WwJXl^R|yƗWC^նgyUx>.fŋa&7v:$$2~Y[D޵)d ZP2hBg Ta(VB,NT*gWǘz'('\drcN<$U9{,R,CM{WG9f߫{$_ vjOfοm.8ip",EG &J%Y8'p A0 ,M@WX_g#OJ2DTh@hrD.5fTz7pemNO'ave_ܸn\؍2l_v%V%xw C@F\ kxa_J ZXD+1YEp(-RwcX-Izmb-C͜Kv墥bC%g0T 6 0ٲq4 +֚95c;[.lՅvЅ_.RTmx{<-u8M?4?EZĀ%ѢQ`1RkUdV @ TU}шkD4-w 9@*g AP+X`>BH_ny N  cc2yRgU9&!kjr)1V:T6ֈٯ?'zqIꓯ 2}w |FA/gd䑿m5͙ %TC x78euJ Yh(>" ԨM>dxTeR04%Ǻ@&^(X3 flYWXz-RrIsגV$Fwsy){zzaLuCL\1Z_ea=C#W+'> sOI:"lLڰh"w tt>A .!Il#ƌ^En s`MARz$)$$0CBG6G 693sX^pɓ_P`Q,M :#l" +!IUt k,rXB(^Qd!zWEcr듌%nIE`KLHȻOlDFO,q:`Dᄵd̂dB 2# iWeVz܂Н^]Vu ܁/;;M9o@*48ˍG¸z<"o}![>i]NG?Y "3h!JPS.^ĆHQ"yeD&ЉM)8։jAZIUWumwּՇ󅁠 @cNd!2pJ6H*mb,Y'0YRMC6»$(z~z~zN+7طy)|h@=ٸO/{:lbZTzV_vc"cjbE?Ǡ|.TύjP   (2,lv9I`bGhlm64},1 z_@DQz\**Fo. f7nk>YHH>=A[gmL2}X9r.Xfr1b0P3,hyCAObUMbUM/ Zz*osYQ(;O[z11B!R}ȶ~ xvO|$nj=COÇnuE佤ͧuP M>ߩjםs=^ݰa](wM]޷mo7#_~~MEJ{΁Χ{Aڲ7봻ҡ=Oۃ_]/*9Cn6)l^0k-.ە_:*`wJY;@+V6?x*ՏaRG?N]>`,~3z(~8jzZj.:8*'т墣 ͔YF+N:q?;8d_y\Ԣ A:H}|. ]z-wUjWk[; kmw\'Wu+%1Z ~S`QU6c6.SIh Zߛs@77֡}WM|9-LW-rޣLY+ ?W`:*"S2(ŒNCLɹ_$٪O9;f`RTSIgSd..9y6ĬBs.M,4*j2Q(T#D/MI@YIp[-g9P:Gbfсl>F}AzmPp>% BMꐂq\T S:|At_+ c. "e2; _stY9cQ`k<`L,J' 8KNNL5XU;3\3>*H6.^U>IRrfF3lAyDƵ oto7c !C/u Y02YY|OGْ&b;O^)Oðxlq6㐇Jk,kt.`Mlw2n٦_ vD8$ &/lbF4i4(E 1z?o&)gECPW!џNFNαWuX}Qoj~ӥr1ïदiNFG$N~ڟU\9qGqݵW/9ӳioFd}k?I@:Bj.;ZK|Nh`x~Dg~Y\U2r[6@GwkWK^:{[9&Jv6 ( }trt:%mo:-trrh$zRxq6?JSnƿh0 ph3e{>>hʌxݣȿv|]fxĠrKytRN߽_Wk{I4L'?,og4y BI s&0wtk{wNsY~f4vWb=ŧBϯ<=?]pZUgZeWhOD|> T$hs2LR;Ld D"1{N$n]_V$ؖʮSHN,c*$f! % \FCAN/j9p_67=Jy^ݶޟ',krc]SP~ŘU"i};wudvudi\`@Dl2M헢=P9AKaZmϻ@I %CׅA4)[VR%. *r0s6F1@ш-j@#r??9iΎͶՒ)û7\)qAR y(9_R4]j@~Ap&Arꎆ8IVCЉ7~_ɳ>魔L\d׮uULӲ&b,HYJ;+~46J ձc"aƱpGK m갛n:Ez%9蘄"54C*C-EHR.dV1e^ 2!hf) _ X)+Ŗ^+8(U7 Isձ?\Wn^Q+_<)֣ŦVWwwP=x[(cGĎ%mIx}!/@u9^ D |cu-8RCۚ6̨4MFyW:[ɋek|,Qf*<ۗ]tPsZn}xSF\rF w(^r񇣞?Wp|׷xc[xFѶZׅͭ鼭a[ 40-_3p㣄εu'7L&\(\*dcaDˢϔ tPՙ =;]nnMO=3Vs7o#q^#=f+?}ٰ] j2l|3qOT]J,}a߱;S@jr˴2"1[Oa`NYvݠynP~('O<~p%^)EMs˥h)gr,s;GpZ8VEV5 I =Ľ^G}4=z>xʾ})fD&\FJ;jIɁju%^(|r8,xq8ez2nϮ=>*ާ_/iׅh'i=m meg}1| ɽCD ~vW+4W8hppf?N'㠴p[:#PRjdar$|LE Ț8F޼Xk-Gu[noh6F$hgh Gk{lWuxO/s9WAkanԦ1rh_xͳiojr>zWg߹?{dwY{F;v/dgs2+tԹЪHX1gIͿ=gmg_BH##px*1Kgm:ȋaz@Vї![kq{7M|Oy󉚿;>Aŏ?ݔ|m:"{8p0a]F?S`[O=STyBp dI \5k{pլTvW_ \y"f=Еઙ{u+jz᪳곁+Ի=ANgW73nfy`1x3kͬL m^vA z:pUcf՗W+ U3 fWZU\}pիO?ZWZɏjW_"\ U3ՓfWO`JK^ZRknk]Z`Y*ףc &}Mqo(}qA7}rfO8a UeqQ.RpvUv-E FV؋ [jst^Ir"$PYh+QƅJY=[D!ݶ}̝mw]$cEnHc+ [٤j]xLa`g\A|ѠBBGyPr  22!U4qTT E:o,9Ѱ8{  Ata , vSx[, +x:cܠLt#+QXwFR8-P9%.+$*bv_nn}q2$א*b-V a,{إt!zŠ:"J-t@jTP P֙ Q{pQD]Pu B4 da5 *$#nH{3(g4`b- F21=:jˠ1GȌ "eѳd|NiuKkLs#S=5\c:W gFAw]LT LGh)I1?\,^Frr5W)^tBe_j`wPPH$jx  {`93# Jf-th}/j@2\bi%CسM7ksfC1IЪXU;]< ڲK1Myy ADACd.]bɾ0 FiQ~kHh!ީE8ØUsxkꅉߌd'FBphl0QF6x <'*g_}Og(k6b-6aŎ-17~{.teAl =?&{hCq@>=m!Jmz$xH ""H ""H ""H ""H ""H ""H ""H ""H ""H ""H ""H "/ $@1@+'^ۣ9 50I"# a6'H ""H ""H ""H ""H ""H ""H ""H ""H ""H ""H ""HKAn1@+s>HqmL pD=FHyc5@D D$@D D$@D D$@D D$@D D$@D D$@D D$@D D$@D D$@D D$@D D$@D=^Hs ~4$Xk@`D=F0L ""H ""H ""H ""H ""H ""H ""H ""H ""H ""H ""H " t3^h+ z=TS6'w 7߀N"~&@ ;#  ф.OnS[ڐK7@jf2})7md|m4iG¨x`ƃ'Z09_=2N拢e@Kpxb [Sufo;&F5;nu"h=xF+ C#ɕHxJ0W95\|re9@ ٿ0NN&烿qb<_\_דQ*-ȎJʹWN*hQE&xhUSjΞ/Em; \p) J +3͜Gs.تciU~*]aR\E>Z7! )bL..2WE!(Xl/x9]w73|E^]j \e\7\߆34kpyƢXߏG7j6]Lg?ow["ame>*~6+ "ٳw*.ܖ· AjoOtvRˢ#p:`ي\ nS%CO(^V画[櫵%v_{vݦ75r?KtN̾paTEjZ'kȳfenIgQsJ..xOӏLLҳUzHy4)hpmF0r.[HGYW4㖐Zv` ԉAw=H5x{=w&@sN!d +jI akH/ z}o~5N~KgZl鳷?pgԝ#ao =K}8vKl~+/}C1S]Da2CX 0 T(d2ޮXp=~cb섷/x8;-\n_ίVC:$\jm.{qA?4zVs0}XB9Al4mqmK#8=4\x0\~O>d ,~' !M\̋Y sx?ڱɍj;->Pbe ݃\5Gbvnc^]Pri?mɧHt8-tC-%՛{jacK: FCAjxB_wm夭 }%YKo6b0HdJjO4k՚66Иƺ:H* y"5ZZ$x8Z\zΞ.r;2:9$.~]~^LSQwr?_kWߴ|3r_4 u쒇ia7x9}jZ' c)hScT s V1Ÿk?'/Ng?|1g'?^_?8'oɋd>Iɫ/ؖjӞ]ϡbݡ Dڤ͑  P([3u0 kc1F73ٖ$2}k'X}4Eͧ%fAÚf{I]=Mx;*fЅ멺;,BҵƳ=׼^tΧN 6]6 Ƣ&MNwTPWjF\ u8K`= v|2ᄚĝN-k? wYoxhmo_ʹȒPE{D)di-SCMU$=nC,3n N31L,s}yC=-gA>͋^]^<=<][[ZoYY"9Οߓ!p"̶d1 J7%̥ՂH7"zS}OS޽kSS Nbߒ2]R޳_xZu~fkDj\;-tWk8 {?ogWj>~W=^p/mOiŬ1g#x&FmVbT==Ӭz"S(648YSMbp ~4gNs~a-Ă ^ ,WWe>}ܣ2f~eWM {kBCZxTZxe=-Ӝ7e>|4^M&0K Wle2:%2 _5NH/V(8UUA5``2F!ʦ4 ӳg[!cW|QT/oɻGmߴ''nr?!Ƕy;u% q&Q>ܕvՠ|ŃCmF[VC)TɥҚk޻Kdzv+ddoyN.a4*Dx1^[VtE[r{ӇC˖Zv9 h3W0BVx1h6i[ : '!IJfUh*n0D$X{\"F k,o\7sOGB7 w'pmǞO'ԥ0'G<ztt>L.V[nvlx>n]o9W|Y0mMK n`Qtt%d'b$VVH jRy!$e3&1\\1P)(/8ހY3%mZUWgPFkm6v|kӐ>@>g[m =7<q>.y?Xq8Je07 /ϗ#/_+eӥl8ډ`ă~H\r,rk#/?ىZ#R;uF9\ 2$QVZ A,9%rfx֠Q^0]tvхCںZ{$t2Zl lX~ܯE6L+Ou{\ MRGLNn )e .GqQ5e/Gu^Lڎ{Gtx#n] /GZG w\ ٥gǗ6"'\@ZH^yeV ϥ:G@SG:9$mV(;OSo!(e\kPg] |.֏50, dՓ]xW='r#iMB]uv{&n|@doMM%?$UtuD+mv4oTul_ixoypto^xr3? ޽Ȅo,巼8yR e"B6֜v/IYyB*־}>ɑZsLOdJtjJOKU}%bQJ{[N ^ 2&b?n\I7{=R^(̙ȕO:s̠h@̹4Gae]2d/ qWz8@mD%ʹCH+Bdx h0><9+uy{c8;,k7TtQ7|tmT.ytx*8a[ǫW^5]a2d̾ʚHa) YW3嘖"d>ȣ9r yG ZyO?g9d~M0%ݼrn1@pN%J g&uз ~Qyo_ b'c$c9EPd@#Z}$\H{"3FI C 푌0ܳoC[ԙ{CKrje vJ"m^Re:u>]:8~JI Ĭkҟ^dH EăEY"[5 p44*%+F/=Iڹ"ڌ“mZaD.Qcǁ[#v`EkhL |%P ~ԛlq;]}?lH #Hq2s,s̒~ 1ØKV;mEx'mdW_V@T(\|[ |]ʨv74a Nf !U.T}Udd O`1dI4d{BBR[]Qh} e*X5 }8/zn~fR@! ~8pIb®2m$Z;,~C$؛سS1iwD?.I>)5.wq9;8FJ.,^/h<;S&:H"rP3M!u\MW4nx\F'DžppQ|OC0pt7 Z$Z@;4jڛ6-ڦif'E5z|g^BX1:L <_g~[.L 8rʀΨ^ +N_{Z\pFRϴ^}&|߼|!c IJ1++TQxo#*JeRK'Ȕ**U\J&Xwl%}wGե]srihE j1M N+SdF)3Va&`6hJqxm8! *Qc&썕\ol[jիZ'q3c%vnNbCUɘP6;de %h0%}W%N2SV9Wbt.ΕnH@vۚNNItE`$^B) 02mQ&i$.+ ʜ"7(Ra(e/- 'F%@2&FSeh1%ׅ~g9k-lb4-~c}>LJ* R[CAR1 4If^;+\(!F8! [:hS<,[O'mmʜGiZ𙏮zPfx 6+ZAxqI􈩭w aC[:S4y*c$-"H˄ %xaYk灐+,{O<`oSuFh kYV1iTi/Vx*j9&1%b4Mpw֞Eh+_JO_k,o·̞YZWHK<!DŇ+ :Xm dO#< [j_5tQ0_DT#p,%vkj=QJ?<9N,cCFmF)zW2[I^C[ۣU@HK8L䀀 g*ALQLr(IӖv]>Bd^z+75  zl_iGHX^o4*H߽fn~| B"nE_,p5ݘ>y/E1Q1s6`-"6ڥj77i=RN4i8EYuRDWWNrTٜt.n<\Ƞ"e,Q`uYv=`-%ipkkcp" TB霹VY&qIItVavM{0kZ?hP[ߚσrh ~t_d;|?{WƑ O8O/r ddVB IqeJ&EjJ܆_hyӋI^m7t4&%ܺRɸ.VVtVZ)ԩs6CWC&hh{YcekT˶,;HIcSi'.ј.2dCQ2yjuxgn_|=﮳*uu3~r1\'p^8ṆJyoCy91K?'] *:n}{Ƽ+wcMO[F~}{D篷->w5Zkeftž׻NW\#)J:"Er׻,Ou݋{;?Y}v3 85 ťT;@! ÐI HƆDDU`wlg7Hӵw#4*c=s=޶tQξ1,93ɱBGn5 y@$+A!Lg(˘ϖ)_|5*̿=4'tx:\௧$XJ$dn vXiWywrm5 5aorpBxEKŻ }b>{61:;Ё2~4E =w6YpU<5} _˳Uz}D{UyW$52\#s52\#s I}}lȨ>Vj[c}lHH˗~Vj[c}l>>I>Vj[c}l>Vj[rWό5 {DN@uٸF,).;vYdVAӤniR,u 'lzB3] -H\g-Tf=L R=J*MBRv uɛxV#\0E9wtkP'%(Wa=ozVJPgtڥ(MbG^qddBf"AeYL[6KE+Zs! }TgMV( ; -X&sXa5qv CwW<)*Q])0bzbz BIC AL7qҿɅM)CБ]4MIմ`SؼKLE]i kLN1ccB5f7In~zԩ9W_j$pilD/y@|Ɋ H"$<%Y+eyx˃78Rmv|p[#4bBju"?Y2YR ɵ*9{b=̖4Cҋ9!yxTWr[3n̐E"n20|RԶ)8  ]g% ZEHi~$}KmBN|Vrv\xt%7x6z=\gl$a C%kduG'>ha>>n\o8V„rG Wko5+k8mKVW˫>HrܑU3%?FzKޖ)bVT?]~7zNÌq|NRWYf'xAgԫ/Y/]yя9Gn ⟸/mf!F#&1Nl:5}b9'DGdaD'Bzߌ~ݗ+$LNƫϸi8q:>|qT H$z_?~3?迃^7۹{SɟF?[N?/x|eX6 !j +}"΃ SL6 hܕ$Ao^ |~r:[_p Tx5;g/g}m6-vrl ۉ EeEe'nhSq\+?cP>ÌǟeGm5iFHor> aaW6ܸ~zߒ;MÿEiX7 ̜0PT$(iwu#NM;y=Jf'7̻,E]=I8;KYc1 A 8':/ eEJ.ےi#TnbtH" \%{yrA3ỷ'Le[M[貏78G{OF0ǂe Pe-C`a4'HqMdU 蟉%G Pg'S HFb9M)ZT6j6T|5iR0{jz/KՕY]&۾fW2,WXI'JVCmYzlN)ʮƹ%z8mSʊHU\Iajf M^JTmq^0~5ywsXyB雒%=%,5dz% Z9K˚❌B1V(2u: C!{! &cR) "a|s> Qy3j7%nF1kWӎcڦy`oa0DEˀ&h HdF"o!&BFUfQ64ulNsE !WĢA'd`:z@)ĀH5*j=_eYx,~mehGlj~yRg0E [Oˠ"JgCUf էQjwtAq7F$X.UeS1Ep8$qMPVG&n5ҫcuVӒ}]/޼rT֬ƬN*xAz8X?rkXz@1}GSK;\؎eW?G>eGLoײ(QikWf*\t7LGɘR.&);Ŭ4C"h z>h]fS]ku(ypflw[6.(^Ww}ۃ#fKkq.FL6c@&TQh ⥓^h!GLD.!TC7{GGAOL&@(Qg)̵.Pp[w$nJs VLctYw3嘖"f;r\D"~Rÿdw$Co鏉d[˓ʡĪUV H%1~|pJ f>0+|H&>*!q#_32Iʱj"oy|^Rv֫ؼ_֒HrBliMK-\ch[\)W.}|e-ei*nHCM+)7ܺLМI{2HpUmRiD.KRYzkJFARj4 a$Ns,^&-YY 2H2!Fm5[ǢK])r%,LB`nt[muvN3Ȃ.w9NR 8 c Y'nCz{Un漗 "$&c>G%Y 6Z1*DNdh6vD66C(9NJܓÇ6ᒄn mC[%xC0(hVz8pKyhw1X!Y+hQ^QuuO6wh9]O7XC~GZ;v 7sce7=yzǷo5W[Z[%]$+gs㮺[B&Ksţۆfn.y}<)Uê,ޕ'W4RWAy9~9&Rhu0BBHW:TzeX":*ZFn޾EFnKL DL骰gXBsʆ)t"KlTflKP9i6P(Y180@r)+IQtئl67;;_(~kl}VZ&%}qjj~ګMw\v1JL6-U3 r|zJ_$L[RfI)5ƗJ-XEl7"M6i3ҲY4d4U$YNQx `:*i-؏H7{h2Z ASU$2d}2@ $,KAӜ 74]18[m}u}u_o]55݄o_77ŝWd0LSTaX=ӘE*!=_p.,%1-fj$C' ^LMHxvMXe c٭9k2T9JsrHv\3Δ%X(@`&, fIZ[U,K#&%mwU,gR*Eb&ĬZ*|ݖLIQp-16.J n.8i431aQl"Ф|Z=Eu̔IY₁gځ f),-^jL N'L=6 W+9` HƹN0:yo9iB%NƖMͰ06Ѹ'AEǣ/ gu2r{NnjuS_)QT.[ٴlXxTsqHwko|)g˗V*0,z-.>Ӻ`~%5Ԅr`".~1O4i毨P:*Ao骂pT]o$_ۗ_~{L9~Wox 70/`l` 5W?дliho47bM*d7׫RV;_8bK٬TPXO5V6q:+f$D*$$>&&1 j H8!N)m"rbFS{Ҿk7KFhy/8q `2ʅò2@>$)(̬һH`.sDG.yu9iUock+Q|e-zUtKy%1DIq9GCןaM{+a?*XD-*tU OFC0߀$[:U)83h^ZT(A&6ł_d_еO~SG4/Ttf6_V>|-Tg1ngMR``gy^DLTs.yvOJÒ"knUN*'-ŕqkOL2*^z\,Feq*V6%1&Q1Z,R\`pT*KDM$*@m k Ζw5xPd Բ9ݮ:[cnB,CZŃL*X[ZPM<-?ePH3$+J Vg?DMo*YhZG]$+3f&*JkdP܁P@%F=Mr|Ow;:ɛ<:+_}jllSE76fOnm{/`]w7Z췜nhO9PZݮrfr/{ߩ}H}һK K.4K(E;r9UY|TZ- -}jTO`=7c$0{ytM\{]~}U3gqU)Ω/ l򥐄Mkf_4{wPg]m.kBҶ4xSЫǬ+[.EquR> R$C o֝oQ]o-%M3o%tDtpeg1Vٶ4gHWH]!`m;CWט%9ҕ0Z!UvDt(%ҕFԯ~s\;b8zXDAG~4hGR~$C秀7V}~+.f:I܋E?> ЌO,:0yލ(]:.܍0iZ GeVJ#9i˓QXLU-LVxJE,w-mH{LK|? rIf' 6nX,ip]bn˒Ւ[:V2plȪXELLcGvP[W;͇=5:~e(Txp( )q;.\3LP(,*PĵhS@T݁?V1_O2VGur´Xy,ዷg4G;~:D)bWsQ$_p0wƱW^T:;b`[c%rUklD>y[,:>E[Q!;R@;R|n:eZZ qk]Cnݓ 3hh;\/ͤ&*?)^I| 2TВۃչ/7Vh^tE\}\.͈2ϥ:f[̥-WVVV=|[(.ٝJhf&t:3gn҆y=;*Dym54q6wkii2J7Iy9wٔ8><ric HM\nT&֍h^_G+aMm5,N] flPM.&8tE 㗘QgrQ%bIXi$-N:řYUWC%8 S1t9>QG_INQffN'N/ ԑ 뼿u Gta}kVQٚrCj$&eL""^=8tN@<V9(6  H;Ju>c8GjsJIzy$Je.սBlKһOk1S*۱c헪z~Ngt%d`P2r-\3HS2֮CLL>6o/i묐<nS> X O.'WœKԪO=AONRj\%uKMU"WP3HTS+Em:Cg*Q{jN3\=Jwf6e%j϶^"WPopzETSQ{c~GB؋ZHRT> {>ա c[W8%D.EmD-ǧWJAp0l\U"ړZO'v \QƤ-+ XaJJR}pd WOL?~XWZ}P=+tU"]7W@.FYjObvp$gzp1T twAk {[p&őϏ]. مt2oG2C존@T'M8iRA7KYOiƐBd#x Hy8 Ѳr:{V3 wfk4<5ؘV`Jܷ$lئEggؑb.tRE&PO=JܑPe+]ؾSnVƽo=mfZ%[3K<ױc$Q|& #)'WgJr>S}&n~"g?U[*Q+O WOgRiZW\WZNTڕgzpCJsp{6Ϫ$*8wW&Iq^cyV#Ó6(pvW W^"h޿_I翡Y~x)7+tY`qꢃۿfI|^1*r: JrOEn>qg};'f.VU UxIϟ,>%`c5ʢ(c@sf;D(Rg&x">Z ?^V5}&~.B.tn6 N͊>4J>#  iH(ʇDhaQdꭻ~1\2ÏK/}W*P{Mٸ1SZk"'hao:$K7#m$ %@IY s6job pif!7vuQJ.HZ鄒1&墍V@i6( PRxYf8r61>29I(_w^#SeJsY 0oJjҼv즥h̓G;ςb>ïez/z'g_(]De0d~0+䙋4z& էW2p`󩲷4)}XVuF\[ـ2DKGQͬsJE1#N JgmLsWa$ߟԋ0"!.<¬ RBL;PX뉉QczfP4TJF IKD$2 RjDs$rŐ*"ֳȩg^Ԍ,^ ]i%L R9^s LOwgDYq$!!:J*Ф \}/8@RH+S hD(. \H"DF6ML-7T*oq1_)`M`u1]zRU &|{0ܬbN`ɓ AA5DLJm09WF(ϫ+,F 7Ȭ};_`P!8.v`Ve%حx^Naٓb| ec p׸"sN9 s1 [y9܊ж*FNESS: h\Oc'pVzf c" ˭頣Riw^0lP=Jc,shGcrvC=E,M {v0\"ٲHiIO.@Unco|6BZ97kmgezk:G#3,K:ciTa$x]M;|0Mo"R-*7g@EdZ"Z2ڃ'Ԍ0Y0jJ~+G_}F"#2 uwp={:wM@a|HeòO`?l`1@(ϔbzSm\h}G @ ?hy(`F#<cUs""!#CdYV^0!S&aJ;f2eA@0YUGgAn+|⭧@_<PNkqIB?dl[MMu FUEb$jrGVAXP˙FDQd M0BH*Ak~$D#L' yÔq8Y$ ӄ"ǼڼG8w;8yB,fq7du61.5;?:Kx1ڃ]^լu)ba=a@?(fw\N~:N*5Y} q'7?c_EC<(9 #?0Қ,sb]H 2`)" ~RL]f9e7 u|W  R؀`9$UH[sr5}\&)$(d~X|(ױ/3y&}םA$/or{I(#XjE9!7κV)hrUZגpu ⤸!4y+wF?1D[ [ [PZF`(Y|<^zvr0snm[]ur[lubV: >?y,tƱ`zFj)mȿ_6Du|%7;Tc4^ݲߥ{*@R`T/_x˫_^||s|SUw+ $ 8ywh74j*Vid^]v^okq3K_%ĬđAg߶B=|^5#SYp@0XeX3W,DdRJOasZ*g[/6NpajcD:yH0ei8yQ a,$=^t9d; {=dg!:kN:̪sΣTK."O9֞/䓫)6#hD љReT{y 0\6aٳwMؗ{QjF 8RCs\9rBXP`P"`癱[01J{&PC:aˆ^.0I$\7u&R0(#"b1h#@n؆lp' rzܽtp$nfP¦UߥY/29owYoj Y,O[&}`W`fl9pUi3KV*М_P ⴌ~5yVYEВ/pDT܁˥ FH"%! 5}ޤ1rvkF;x<wկY-~"w(+Ξ#y`iܸg9e|5~6ɴ`*1MѰ2r&I&X>S2,6Ý?-*4m\/>hh'Ǔ\ ʍ5J1Dt&3\Wd>߬u3 85 ťT;@!ÐI HƆDDUb< yhOKE$L ;|YL(Bt2֜6 }!K JjTI)%ҏl-&9YG[W&sQ:8ZtG E:9vؑT{cG4I 4ׁ@'͇EODϬRFo#9Dӑ3z3Zsnw;m2|M-ƽIbNX3q8R(}emH~@}nR\:vUd{7[nҬO-G앴vzP ]Wf+AzoS'(H߽Y7ӱa`x5X_ZVƛ;Q?*Nxw{K'W_Fu-_ ma[ ;vL뉘1-RsZ!CtT2ě9t.ّ7Un0D}&S^hub]w:V@wRC !^b(O*!t\,Sqʜ:Ukݕ/fwz!N ӟ"fCwn[v;DTOs˞]mJHJu&IF1+/!"hp }( .W΄y*kc .`VOR_#x'9ٍؔ,5Ή tg|[4‹tAczcz{گ{,UJ;JDkDKWH- B(ҴPtrɛ7aE8C"=/lzP8=w <~!y 'm ԲճvXj5|%/d1-%eiTѹU:tv:L;[^gp:w3b nnzvMo~4;]?`>P:}gr1{D}4b=\/UapZ,WIc#uSGӲl.=`fdyɛx^# \Ⱦsn"iזNupU *jЌN(MdFdzőr %K<ʆ [6KE[Bjͅ:X:|%7YebwA1ڲ5\ DlwB7K$WqSG/9xi~nx$S.,'Z8%[mNE}N}P\`w[2FsJMD} !+D3-:V\w%gq]tHzRks%$u?c;??"$pZk<uH`$}ӂ'";)Byjբ'?LCp~bxZϝ $Br8fъiPABT qi}5qwJFY/A ֨"E-Ӹtahd%Tċ.{ :ь;Nt'k1fɽc&; 0Ij.EK1ga.jP{O=8-'iJZ,ׯ&A7SbM;]p_}ŠmSruMl D-zk DW.ap}8Ũ mqtKІn} M?Id_m>ڬ!> %jt1餍*Ot>۹*(yz/e-;pMX{Hf*_ 뺾SwJl*a7X0ҫfSכ+[J_zMєt'aPɍ%N zӲ)4e:BofrW=ӷ7ɿp>FzMb !z4krNLtPc>=<~Eg\s,<쟽vfl$ߓ;V G[߲{/-i G8,^_?? 6Lqފ~IsQxCcTt)& `dY4nZPwG)?&㣟+~ZH^ 'Cu6+z&u  ="r`r`oOF10Co\sϱo6O 㿯uxs8nxܤ=gN]w˂_Qypv|E>'&+slګ :;)r(AzG<1Ur_^P !_?xYB@ :1X6 7e.K7UҤl $KczU@H fZs]dc^Lztvޖuyc3Γް?˘~iSYb i =-Z4nؓ ,{F Btd!rK3u_NR?X RVEm HD{!0ɜ2"%m^H1: \& \%gyrA3ỷ'Le[9L)Trήξ4'79,c,lAbdqH|RDZUQQ13x$}ּ|Jp@IIZ::EkʊZ9GAGe=%>60Ls> Qy3j՜`b jWCAmӡv`70"ˀ&h H*idiTX UVD";eY :Y "+$ZzxBF`ɣ'HEBLY'dT 2Vkveg='~ ]=Jh8>'NQVY[!ޱ#ҡ FM R7(ل4h.q1U=} A;"D37bUH6|fefΥEO7GK!]Y/ّGzxZPB-NGbqJ}eׂm6՜%nqwINm}xyf`p6^fL`nv9> _L3[% _9L( \ƌ9 D} Y73嘖"fC&yԾ#yHD~8;N$CoM 'CqL< dC%1kJX!ƒ!OOAqJ+ Kg9!:ԝ9eH 1 ^wVk=Ta3yI\y JKN4;$k"秷.?ekX{v{-.k)K=s! B61Z&݂[_4IO 0AW)l@B$QS2:*GƒMB%V<'REj͹gN] ii 2H61jg@d<)\M(efu`b7&=|AB@3u u^t.N3Ȃ.w9NR?85(1'!d/0!zU;U+:;$n漗 "&&cD>#Hi 6Z1*DN dttlP</Z׾bߵ﷎mgP|"w WRw+6?5ִVylhr (E#N0n쮓q<ÌbL IIvjJ[d L>UuԹ^oJߍiow>e'-uGMAsmaJ*8!E |a)s !4ׁLt53#pWp[.v[j[ݮx <ܴem,Q*.(㤊PxI  Z?fTP*BV70Q)4޹zkScynJd*#yF;]v͑pp|6f_Mz&i-`(ؙkP}ΪSWՀ?#gL 2Ny U<s[,fwB2ޒ6 RB81hN1,5aZi‹QcpFwYM 鉻l҃ƞaxGN-:Rl;s,c3vemMрɃ/q:c9M3]4htFΊNCZ2WUhlł$8˵VzcυN*%m6m>7CMoR./DE8fIz ts @>m+UB3>9?qa3Xtt'v(Ҽwe[M⽗&N^\ya{Fswڲvy$a`]s]:m?l#a9iGL:Hҥo>g_0xt5h -TiYYlyy#x]@Q?Ɵ,~voFШ?ųIa?B|߾v?ꙭhZz|O Sh%2E8 ï$yp@e;+ݎ58 ?Kw7s^=WwD .>]?; ﬐l^po&ݕ0gUzɛ٥g,r{vowdF=@d&>()x啰9Oϣ*o^'sog_\\mr~BECWX Zvttut%TGDWXh誄kb+DIDi;:AZ+:Z˥%YM9-C/U-YU5!'yƿ/W/h:{CiȔRz{}Z3|&Ǧx=` H@YIsaL=hk^>Ҧl {~~8Ln\`.DJB8NWF/V^5?7) ~WFp7(<"|_]mhGL߾?]!ft] ]1í3P ]!\fb+D+hDi;:AFjBG_AS"խWWҨNDWX "\b+DZBvtut%%8@k縈z߭ j[I[Q=ϳhXvcnj4@*nVRA%pƙIsy*JLsin~YSRn@ϽQnu.,2$"h tߘ 2vBS::V[#+K."#J џ"]k "#`==JvBzJXjFزc׮ GZ[e˜AQDGWOzJ0,"h r ]!aBvtutň6&豫zpXz(ULt .5t(JP]`y4tpm+DYb;:[b#+|Ѩ+D[evr0t%yu'6-/)+){(`* ۺFz1%v\B#%4[Ba-uLV{VbMwPǞժeʺb<)lwRrL4O3i ъ{rRv)zr6l!^7؛\C+e QmJzGW+È*NJm+DtGW'HW xA+I,thUe7%+cs„BRcWuJBxM-+YdGWOzʔ%&"BBGCWWX k z(1ciL1K.c"6] ]qNAޗ+T,thi;]!J::A 3UBWVضԤ+ɥ<"V8V4J *m{ϓ4'x9" 5 }I9 ݢ6'A*l# "#\Nb V^7( )ɌnQFl;ݧ஧?hp 8Rһ;KuxvԃBd.Iow-~wO`P8J `_~}%4{߂&2rvqycxߟOTea<@[S3\@_\*ϙҐ`jo_Β"FqzVOJɊ%yY+{eo˿ }.8?_ '@L9O1G?_Tz[ԚjivΚ5d7!ini }JM:huK=tR*ELiM4tp&"hi'!J)ҕ\`i]=n4kUmJ] +cիs"dcU='ԃkj}KJڲ T R]=V C.ձe+DeGW'HWL"UDt#RWX w8Һz(iGWHW\RIDt,жlI]sNkfECWgC{=T+)lݺ*%Sz\StTu-oԃk !ZDu,},fÿ+}_RV`L*iLB3PAs//w*e |jb){g| oGF?Zd/B٠G)vs\?q;9ޘ*|=-N'S?T'i|6[$lS/Ksv`o)q?..Qo2;ڽqqÀ3`a^،8ÜW:'l2߯`Y18J$>\Uz#I kr1zYƍ{c fyЂ\nWxpQʜjX|L%Gu2+?g-~UL.!-_/7S_YX&WWCՅK=}PV H&/ͦt] eE Ze]'C6|۟d`6b_$ %(-L{b*$z[Hr:uQ O{V ~ z &WM&jx|KVe^͕0Mn9NR(n;Y3 2=F{kud8޴:U﯌v:e2[K}l;5+|(O/aa͡{!-JqJm(? *RJ-4jM*ym1B۔§>=E.-rf׃2T\~d0>'aL91oG:KwX~V7?? GjWbNlvTc,=ٸzn͠=Uu㶼'k+e}07& ^]ҤRoJG&w:ҲԐSZO vuR] +zLbZY/IZgR+e]%c MKZy;Ռֺoq_'<1:[f8,&OE͋*v;w,glPfVFn៥od6 aHh]L,\TS f6`*SyT(Klu O'u | 6W1Kl=dq^sJ{ε/c6Z"!c2hP2Mrs,2p Ti!  NMCS[.*(e@'e 6ْyҫ[c>/n»Y_aYm|0CfR/m2C53!Ռe|A-X XG_&Wd/2#yj^PO= ,9\:%3m$:w2xY 9p#n x׸s:Wu_AcQєEv\ykx9)xoi's/eTt1J10W>xM4nG7";{ܑoMҕvHJBT~ +hz&u83 rREÕ"Azd7OZtZHƖR K"i|tb DN[AxN-lI{aK`X!2e͂'9tQ1hN1,5aZi‹cyy2/y@e&+R6QJ1q$Zy-C;6abmv|pt:z`N^ /)T(@W%f  wքJ啾&nՓ2ݚ䪂4b4] jE30|(:rS6O1 %9 DN16bB'i3 ׆z+Z7yIgm( hyROo<-Q(GKy!]~gF.3pJNe"mt W Mm(QCNjvdSڨl|~NIW?l }ٺ +QFk78#k_Dru÷suDuou]]$J-mq0ϛjo٨"]z=z56F0-$D|FT 7*]a&'OƧ㒯>o7C;^9jW#u7qOO+^MhIalH4krNLtPcFR#W.(d|{~7wK8*_tLω$zo?^|$_ߣYOTgOzx< |} ^6#h%%VFA䍏>6HL lDFfEn" =}EpԽd6=u]/H8z39O_NƳqO_ i[e~]yȆ2~ۛ<"|uw*sX#[o2X ?$ʹG.ƟnqZ~K±nLsA4߰0SR]@Qg67lMyst&Ϛt<ٰ@ݨ}Տmd4&&݌eǟM)}.7haw7lbީnY2 5<g_7T2=1Y񤜃H WAdCtvr(&&iyTqO{9侾ȡJgB9|^c}gfE R]X'f˥E/ȹ˒, J\-r <W tRw5޸f;>S:=ok|*vzaIo}xO2(cZ/|ӋlWj10ɜ񀤓e"%mU^H71:  )W)ҽk^>% " HHZ:ZbeAFvA7 '<%L.8>싕ZORue+u*age[Qv5ta˯o +=ɂ㳵Sn=`kgj-邵C?XR>nJ*bwrcI$9IE:Ԗ12+]is)K.z0MٓTdH k2TmdFvdTj/ ^QTmvQ45n4yw~4N> ''%J+Gle4h$=Hrdrb.%k :#b 2u:-C1BL) lJCء("'ɶcg<*or̢u"g;b,-}Aj6jZ(Lt`6*fd[ M ROHPYAdF(JSMSWiB&͐+DK7OȢXIq$/dT 2V#g;F"ʢc_싈2"Dܘ/<9"' 娬"f!Uf=#OQjw 8cmZIAiP* O)!֔b$KZ(+* 2"V#g;"~xpq싋2.\VE9T֬tcVN' #zx6*˭8"ٌ.{sS;~?<<mI'z{Vo8A06q !TV*qn Ng0T{I!k I(fe1 rQ*О@a |G{v)Eitl#f[eqqL>r >NHe2Ȕ#,^:u˔ 0qMK5x5wK۝-M `< 9mM[Ungko &M2@V1REgdΔcZ fIs8ޑsgz"lWFgc(|kyR9_ӟ QI̥)B8%6֍,gYiC2xAyg9!:i99eHNLWW#g哋R^crbWRIB1YK6.5#۽oqNX^w7NơZR9TT! .)Ҟ{rUf JMI,Ie) GRj4 C Y}U-UARz$)ȸ">qM-CϥlJFI/3ԁ ERH٤/q(}, taXȂ"w9D-'l))C"Nzʽefzy듌Ezy/EI>^d=`2njQ8 dbȓabcjY; WFZ:m78wCVz俕mc(>A+ѓ&Йmҝ>{#/]F ذhMs\G~uW^c,^hYV\LΦs=h^ybҶe^>nD8=dpy24x^}iTbl?f$rh\0d': X+Ng)51Akڭ:#/=S)IT@̪6"3S\DcA &L+MZ\.8_7JX_z}MŻoYUPڀh$@1'2,K0h7H,m2z*};T3HcZďO ZlzTR}? j`EI7f٠dL.mȃmYnyՓzݣz݋"kɹ$V(;4iz $V2W"bښ]Ϻo]i7-g3_>yK#7d<0ץ[NEog7[zCk.1t:oOc?gz UZm6/U uuۼUWIyysL{}714*kkDX )Cm" uMyH?n{n/H01 D37Ч"R"%4`YwM `R:|y";MH}SC5mڜHǬ*1ZdFwqH\`i"Y`,2Nf$e OaYҨ%'=]umu-!YuR>{#1^XR0ǖN0*[VWxc/ED} ~dgHqƹ\LFx6 |Cs*bcr$ȘXwhV:?I83O+ww\]!D P8+'Y `R%I|VW!Y98ݔĽ;^]~xyM5ck2AZkGCR0TI,Mfʝ"lgeҲ`g|Ȧ)m 5^A$e2zdC>"8ዯ#A[0Q=Vd*&!Y;k\5Ú2oYgxv{G8YnHm6y&NT ӶlYHkr (ZYΐ!r&>0I5qe4|tR^X (%"j S$|mOՒy\JH=jWhŦ2%zj`ZBѹB YJ:U1Ut*EFd2Gt! !zf> |6UۜE9gtMX9J:BMZ"K"MGO8/!tV^2|g/ ;yI0U{~yOMُwhڌ4[6n4ŀߢ]+@]򥷹V/يGKz}{uǗzw"bBlDEh4%IO>'Tm D@d/HJ]yMV^[`C%5zhBrTfS1$1M Y`.I6Lg&_ȶ<2ja;*}խmym;RKG7kp"/wgt6FZD`'0G1aUHi_.#~R{ \Q[EL!FKNEr^Aqs(RQgw%qd*B7-q]Ι&e+Ӫ@Er6`C疱Y Nx1j ?=[sNpS{.&i)ҥn/)r{ղ {a!VJwE4mFђ.$\"c!)erMb&Yһ$AZ\]ZeX6cyzbY>Rdih^E$m0.f /"tr w bGH/O>h<}yf/.*b~Ĝ`GO'P7?paB}VO>yJut9^Ȭ@ZbI0 U`[ Tm'wok!z~[誡~骡WIWT-Xx[誡5OW ]}1tn9|b2NWw#~\Zo ,rw+NFXEtу誡UtPju,UCUC펮BҖ--xUC P]0Ua"j}npm ]5m:]5v ~te KkW6]>֣}]A>~*|DQ'́Bw)oyYFN>BVw[[/]Owzqf8Ǔ?V; e~2t׏" q󹦋#rgrJzРBq] t--ZM7{3==ū P-.T ՓLPo?|B+xNOvRs`㔲zslohh?pDA;#\/&ihYl:]Yg]}tȭ+&\m>]5~GW_]}[Z*4)m,Ҡ|>=:j=&_9N漷$#Ig0+tˇ$QK|^8uz>p*YA֒K[uT+M:IL !Mn2kD̋aFP+*CR_ݩhN̽lEݼ7]xQ\u)qU=SGrT 5ʏU}ay.;CeZ&s Ss\>SuD_"€AY dDJ @B ,})cN /TFJ] 57Ѻ ܤ颻 r:y.Ր,jLQiR GYUSYIț")!I\n!K7A@1U)+!1IcltLёDI5u LG},. G%Y:݂6Luas0BҐ&!B%c*0ߦ7@pe-ZetȵWYٚ$0JpM<[H!= .@# O$M9?= \`'KFVJR>c9Fc PAiUNXɪR(#s 3f[T[. ^ƠL 4}1p\ס-rތ vׂ>I+v14.LCR [ ^i]I hn]tR$lȃ`\ #+ &D: H{(!xu(+׭FT"WѸ!lzS \mpZmjV|G' | s-2|m+ު.E^h70&.x ЗUt Q@R@"d2{<:B&oCUQXӎey@5!\ j-lchdEr[Hb94A/:P ᅝ ݡ٢RXjf'Ug=kuC[2!s6,ѳdǀVm,2aF+vRc :AYJ>9FXH#J5%i\4 t-c|5c E)o:k Pٗ9pGP k APX?˹=vdС /jW iqc:`%p<{`vNAk6WŪ˥7`ebJI-bJ8$CdN]XrI'.H56\~@HTX0B= R˥^m!:]ōƶL*&:ldxlj0gon|'RW:ڈ]t{1/fݯig}^l4cSՌ( <ǒjht~$PCi9%0 d943$%( DI JQ@$%( DI JQ@$%( DI JQ@$%( DI JQ@$%( DI JQ@$%( DI JQp@hQcJv\\h@@? @\ў@$%( DI JQ@$%( DI JQ@$%( DI JQ@$%( DI JQ@$%( DI JQ@$%( DICN pS]aIgI$PCi) tI )Ҕ$%( DI JQ@$%( DI JQ@$%( DI JQ@$%( DI JQ@$%( DI JQ@$%( DI JnD1%d%Ǔj a0JP]$%( DI JQ@$%( DI JQ@$%( DI JQ@$%( DI JQ@$%( DI JQ@$%( DI JQ耒@;.0ڊߎ^~hw*%rrCL1Xh ׏&}p9 CC.zgrDtRzn,tjtգDWHWQU,hU {Ad]}3t5s/NWO zt$zcدJWOC~ѕ{]9v=wV3"/CtZ6j J+kkgFDW UɱUCtDWHWs 5W 6rUC "]>"`h1 {uPJC+5k~_ZHm~;Zk!ܵ(j%q̗D'o|% ?ͦC3):g1s5u UHmv_^ e}$~O z9ɯQ 5? 蘏G (zF|zZz[JtT&}.a޽ڥafmX> &I?:|;TǧEzBc~+֍o,ɉcZ=xyw?89?ߖryr\0`z㟿{kQ*]v*1E}WE[dDT/9k*n6hPTNX!LݳNJ;|ȵk 5'ݩg!glnNjb[hɷǏ5F55컓D3gGDW=+ȏ\BW NW=JOtut_5+]5~Ή:ō袱R%ULo;\n)JB.ѿ,GҢz|W_DPh6ZI݌w.ޑY$^|fʕ=#BӅA0_jśT/^)D)|ߡO t}k6-[n)kwyIozaWW0z ˓1??\li9Cm8^rҫi{*eڪ_'pz?%o;moÀ <]b;t{X}ͭ_OO_Vo>,-gVI?,o-6nj3Mz|USzY1)lf۞Kl1jJmڶzn{rgEFU.ӵrl{3nYY==PWokzCLR^V>,(.mٺO~}ٸ(WE`o\ertˏ%M%>:iٿ_N&(~7o'L' `cKV.JM/薯RKn䍃ǀQhmo\azzyVf%~Z}I/U3gZ69]bvyt Y}|Zۜuiklw oQ%ݿ4(\}vGs;7gm1 _ta|^l0쟸mT.@I{3h]*ZN _` Ӯ%Py?5/}V o(|~|P7V|]p]7,: 2_';?ed`S85fb~|}Gr#3IH:?p*RzpÚ"R[!B!W^x53ïq`\U\rA-ܡI9+[*<*'v2u;lxwVgI!dLL(.ޥ"$4 ܇Zp"+Xr"ecCv"Ġ®tT3_Xφ,sR:En08 GnYϲgҨ)W`X+IkkYx6܍nɑK#64:zV+GBZqV5%_L'-Rv:aeTlSlLɖ(-f(u(L9.Ӆi&'e4 ~*}dņN:'kInT֗Y8+F_sۻ|NX89%LfjP8w'-J ̍dRR<]"cҜ-?bt䨸9lF7o6Q}4ndn~ݵίJ{eؠb6BZd3Ԗ3j S8F*[mo.ܑ5#kA\ POI.9msP@BU@\ε $543afN ƅUs WZ..2޲Bpf@'vyw>p]o7We~~*^wh^q 3[v%'m*Krlʖ5Fޥݙq|[l-(Ji&rUٻ$FSu Pms2t6Yv([Mv jdv* 0+6/Vt՞&Wrq¾Y!yuDJǮ -H z },3,P;ah)+) A(9Ke_ sg{m8/=aM>jcg'8YM "2,^)g袉.yBY-G%Z_ z CjQOBԶFЖZ095-%DaEvؒu݆"qH0*>uvEld'qu\ث-뒯.Cl)1ǚEBmQ3=Lvqvqo~zIǾC=܁ bܺ$8 wt^yFpzHޏ:cvKnQbr{||Q;y̔H0A(WUZ<@'\/Hgg9f-0Q6G }p%=B*źudʤ`BQ0 d U!j̩-=;&P5h' HƨX] ɀ/.U:r¯N uJt~ >&H% *]}_yB;y5k_wwW )=Zur*RIkO[He(VDߐu1IR*$T_p=U>;/l:6y<;_\j.']pJV[a 佇 B/u&,!ǖZ VM&XGET&dd&)LiODr*:.gEj*v"TpETKғRdn7kbX핾 z|uwso !-6w3^]3"9;xjVFͩ\Q{b-zT K TK !"UJ`2<} Ng,s<$ IZ@#HTsK8p1#6V]X:MWyv&=э&hp|d^E1皒p>x@i؞g ^aě>BN SΟe0`O:̮bqH>W  %low0wJ~ 8*&T*m* œr *oT)Sxi~xv1.r*ZYL8=ۓ& / @ w;=>¿VEBywk="f?I |r'^ΚҖcwԟo?~ E8s:Z^O«h~~zv&b~jV-f(xw9 Ɠkl͟klƫŭc%*ٹ9ɪߖ_`-m=Du0N|ayYVLRF}jᦳ#z'ó7bĨQV^-[%4^÷W ]<P",!i/sU>X755>#3{+c_oJ~Y<9:^~5G䳟O|uV3A;0>a~&C{*o?aߪ/.9,_N?o^0냯yl"XG+sI0zb϶*fРH!]~ݴ;_xa7*^fxzꇯ{:W4=ڬ!F"!씉اc lm"AeݔIJoejuv\ᎇ0һU7 #JK>ݝ.lvnT9pCSR/y@c%=ΤM=t~!&"z Z.>[v.e6]WG'Jh]-qNWRQhypٰ\m(q-9"~h!dlmcU.=I+T/we6bRdUŤ8eļ)mK NlfmHT d&ȯ9=ۗd!\q۪'|(pOShGF Xa,w=A3Cldmc)xJv3%d7@֔R>hEVѐ Fp~,~ 6ekpѺN6>O>n7=OzlcsJ0;=ˢ`d ;Xr!۽+w}\͢e͝_ƺNv?VUs#qlTݹ xy}$&]H:r!($=+J~ C*V1DwD㞓w$=I{A eAcɑՄJ4XkFIm$o!Uɷ\u.TU (r但s]'Fߜ-\e7ڸor{p'|05lWe~%#hWyc/.jfm 災<#G--X+pXcw}y2yG :g 6UQ>P͟GWY-s2 :Kn$g `wbi*hJ@56Bqjŕ(-(7Up!U;nَ;m>.",1GkJtHʝEnftyvk7_búm;[%{k?Jt 2R"8GU[tL,?e"=SW]QHuQޚ%%k=JCTXjTuupn)4e4v"s)>zXDK6Tc"Y ̶ r ҄CPե&>:qF#SͻU8 dR ̾ _^-cu퓎n,ْAFy${9 E!"]2푎[Ǿ"u ^HB׮C[mpbдHˁbYy+/Gw{%]48v(!hAiO>eWnNj>@ BY)Ά*6( J!@%mS`AgRAc(2q;yc e7;7 &9b+BWX[E RE)CS>;}{DȎw kBeW<\^/܈b/ۉ<riyض$w|;!-C KbTjjCЬ&>>XcE 05BMSZ\6:8a6)ֆ-вvF{m%*Dt`B]4M)Q>&,F݆]BϠR[xcrφ&Á?7Oy$^?kHCF2pc/ͻuA!YZ! m@eASV)Ȟf$ Z*HcuHZԪ0l:x=kz?s\ޘb4wMOjC~{~8VFMP-~’TQ%atWڐmxE4e<:A~ j%5&@֙}&9eBpBfQ*F{~v g=X|V#!}#bRi.p %(4(02Jrb2vpCfCr25qŕ^{h5b6)~VtG҉}+Qʤ]9bân+)q]k0 kIsiI^Dsdg5EʨmC\ C.dڷVRVm4PZYLkY2 D1ѰON%ND1PȪ|]g݆sK=[=R3f˚B_]}o7* 'kŀq9iS2.*z#|y4`ea&/B6IM|աk|A.W.(3e=cX#6dSDbh.mp4xyz:N[l@,rH 0*,%W`g[;-.N8\ ПT"fWP"Ebڎ&Zhp('XjE9!">t&ZbC8a0q 9,ax:3RߠlOYY F0bZtG6QUַ:VW#y($ SI s`oCl6:gJ)*-LQQ-wQG`z R.)S81 *ӯd0Fe?_7_bxMWpoeuW $%y݇o:DӇ e,U+IP;NpG4jڛ6MۤiUg'A~{]lsP!|NJF/_u>@m|?Z&#~>R`#S k㊅L !X)ƄƆ1u@Sk >so6LKnT a 6ӈt`R7(uỊv*l] 2ϭL3F_I6';_GY SfU2a;ƒ*l'}4 i cICf $b'vŤ/ʨT#<`F# E. 8gP)y7rxwQRq0."[vtP0((eZb\ :OrpTo"Ғ!-I0e_9հûjmr,tDu},>.hY?m`:QӷU3G=g%uyEP{gw(怜qs7fU Nݴ:Y`qũh 㘽z$Lz$OPWVQWRF+6WWeV]m3`N3H]%5urDE]%jz]]%*j+TW$=RW`F]%r9u*Q)y^aH]%8@.A{ &jjJTѪW%pWW=c]%jnJT*٪W8MH]%h Jbtu$3*t.br9[L95a|t|nAU|9a$',C#DEɛl8E=f&gc=bC5k<+잍L*0A36ˇ eK1/~2]s]7LaP ;ة3s 2O`Ѡ).s-c qSǰ/>K(icU/>_OIt(]L1Yɛ(&]tg&Q+}P+R3Ehmw1Rڙ}ڙ]%$k/yӕݜQsR 3|If\$oTc1bp09sH[ ΉuX\ GO W'%*l^''R%ڟcD.VzJJu)3 {\Qˮp^$ŰffSn/1U\qߐ/0D3,Hn)eYђtߛt!4wKgؗ];+0b߆&u] ?ygz7𓁿I{h#J?g0ԥĞQ)e!45hɥc, (QsxRl-a0Լsz?hm6no~-fuu];36xN'fP9X.70lB"*gYDx#NS=Q+Qwb|5w(w73j=wVfnړW5<;ywSGP:AS XrR[W[J|6r9ݫH$g=!O7i^9!S^&%7ih 5("e3ӫ,p *!({}- B/U ~v߹gF|7v(Lʌ홌 Wa<2C`Be$RA A9$SNpgVFX"hb rRZ㥐FR3XGm< qW 5vQluKk;3뱛қ|}0?{ ; ̿NuG"0D;fg8b(ABqn:Iqr(^o>tE[՟* `ñ|l8@qP8(ǘ>LxUsy݃םߣ?nb f >"d3GSi$`*Ml͜ 79+"s3m5bOh+ZuJ4JyDY~KHxkgNV9zLG zg՘'^J&Z\OKD`mlCX&|Kxx[wSA3R`7]h:kk]|A뷣qu&p>Pd޼ոseVGx9M)kI E(+ UZ/Ȁ.&os(]?뫇*pYT%s(Ouj zg|As%p0XSz Y7A?n;;+@ބ27R=wk0+֜vZtSlB|s:_Y}r8ƂfAd'!y[뛿o(B慹bnədR!ƸɍYDȭ<$ ʵN9Nh.F8!1b52`f<RPƠv!bLEJ+Sy4:=Vʩ l V@AĖP\!% 3Bswwvk#a'}o D^era?w^7V}H<Z33lE0W^}!{UcAH|Rag Ռ5#1b!SSbXm>WfiWfhd隌vC%VhسhEQ; Q ڝE !Ie #52(`8-RLp. yfͺ_{>l0Rc F*0,CL`& kf,x K;X8pOtԩ6U-yV 4HR |sc-aHK0G5 4*/j <9+ nBőp.uXERAK"Rk@!2E4N{,!|-,-,x%}6r6@'7ͺWuh͊M8Ua@zwN.FW+|#rM^biCQ\ys}&wHsU n nM n 9 [*X0Bdf4Zcx N@)w"BM @>rDiw`[i y_5uVLˢ[} ΣQKZ.ckmXe/H~0Y%nH"xM IvdRD+#$25Ӝ~LWթeN1&$DGID+"&B3haپ)mA'm( E^kt,Z(B4wSycH/+.^XZQBI਑;"S"C0Zci^5t2B7ew & X`U,"*$ H.I4&6@H]A:OC7F0hDc$tiEfHt¸@3d/ dHG9 ".&gU Lzyuw^Q/Efl|3z?aZ^Y9SB} ?2YToSa7f'.AhO=hWA^?bsE} c2k>L.AG /F 흑c8<Nd tl5DJzP ^ *J좹#[?5iM;cE)hb;XJT%bWJ z pͥ2kdLOU)rK]˓kɄwm 8AiaҧtQKkgDS}euIs`ifs>i?>ǖ.-ć8; AȆxI~mMBtS5ls5nfyA0U G0bl':U|c\]묓jWJZ'y]f2+8}+6 TdOh1>>ٷzhk?WDc9?,~80~ݟYhӳ4yyK QSqh4>GT*?]E蟝7_&~ѿ{oQf˯M0wnOwjdwhgUT-۪U͍ئjfߢ^Ez\/jaiTa91 TV_P&Td7Vč|$A'#@ʨ6i$ⴥQScP:!@$s|^l)?wqyҡq[ oTNo(y΃ dRP|"J"w)#:rSi%3ou\غ{u:ra5>}0Ϡtq]";Oc¢ Ky%v]8A$-t ec1n!GudɩR8fKc<) E܂6&t#2G@;$bQ@CyGOINIЖYǩD!`yUw!$B&t85׹qT*g8P @mC֜-}wG>>?/F|%n acXI]f)!sG6nxj̃)1!B"CT`uRG4] &5YBGĀ].>EMоٯs!Jt?6l02,Zd=DH @U1D BDuZ: ArR 29XU [ 1 SS!bk͹"fKa.JK?5=;$}_ QQX;i&IYsԁ̗<ӘDJ xIR ;mMrKD5~<= lEŋ IXDjQ&8>I<`S*#sI ctGq,8Q|¦&, f-*$R8qrZsn1+&*ޕ (ЖKM F+)tKwI9M`YT16)Oe7z =,vQjAk$*|ɩNEDfvL9řN)}VkP+LIJ{4LWh8 C)(DՄ!u3IRg9nuűSi͉',#`L8R1BwMZϸD&8UUzu6˳Mr^6[jQK@UZﭓЦjB沵y>,g>xo1mWxU_E^s?U0{QYu3='a:/+^6aGgc-뼩~iP<.; &-B?LGlce0'{%MR8Jb*3#K10!t.'l"II2:)'f:N7sg{\A&EnU0P7@Ƣ_E{ЂEL16ᅧ7~3|Srs:^~oOX0:ʤx(O`XL\QGX6r}M~7? Gud.WO5?]rԷ1+yvY?q,|~-{d%Cr.ƶs 1ZCZ(f("+/5-B -;u) 9ZKo$A0tI'җ$eNj QzLQD@NJx4$hbZϸ'f'ݠw 6 シ~rW]3ln AGzRrJ[zaF8;~IPja#u!B9N6NY6 Ь\R)9K% )DfDnQk[oo5bVmï%t̼df(ՙ!s!PP*Ā$jDY;2'NfDsE+Bf`fj~~`` vx1`g EgBXs\Lhb)"$5]9t>&gdl߃稫nP(>oC#;*k$zM^ڪR-HJ{ Sw/_( FRvHuƔBYz0JHa$hNY%DZv͹]VHOkvFJ?|A }8O \¿B"yq##43_0Ĥ̆XB)b4  ]$Ly㼎KྒྷgsǠ}\Lrt>JHM1Z:XgcB$ЈX-De@!ZGԢ2Jo9A_9x6:fʼ!!<\$DpR̾&)H3[Ťb 4$I +&\5N1Nm@Xg<,y"]z#g\/# +DۘP*HQ\m%6:w`lŷJ/&$q IYw`z7M_Zd1Ɔ5MƓLs!/嬎FI aIJFbF) qv3OObz6N<5yddjbc&mr mQbM2p"P;ut0ŚS2+b[p˧wߒ(Op SkS"BreχyVY>y7.,xF~9 X|StÈ% 79HFr]9r{Lq+jR4&brxa9yKpqX=$ \}l[y}󾠚~C/ҫF`J>aL]ӚGoDQ"!JÆtNwe,f86 I ߿z~_d~ ~$o&/zaۧYd/0_P78;q8pSME/]-ܖZ`C@GTA?/lAoϟ^IZW*zx4ѠכRmr=7] 7m\A"K5|sKfE\B/%Gv!kF!P]eZ2i\K.#B*gUp,"IJG?GYY|r]mS#G+|MCA\سXQ 8/_@H4=8<RUYOefge|p.\f~e7je]*X4u}SnM95mvt"Q[V,K <.+;<~f'2E8;_|(f`jj>Ά e\c3Ϧe0w %5M;iv9?U9'y]\g*tU#?|͸:Y߭~=jO짲6kfSj+)ʚDd1"En`$y[6Ӎ{35z SV.{ajVƒ Όh_6q5!4B>?\i+oa tUQhO`6Wa_Ht|c{P'x/EnWɗ`/)=S~jY>ͦ#垴V;N/ TqaVuGnmo}[E%&(o|s@GOֲӮ|c@\QnWvG=Y6\3C"wSayT!z+2VF22D=$Vވ T_^gK-pviec &@r) 'H*@ ajDc)w4Uc*|@Ly@z^@ONXnM6vcyX^o-ꅝ' 퓄ݤ@R^Q %k?!rr fRU>"|!O[}F" MBP^8@l}>s_`Թ ָCq'2Xl(h-`#YZҚ ֎ y6t-^EBu*2DHR -;Kc&zz-#chⲉhYm O~?H]ʃvS˳aF‚ Dc(Q"`j=C HzX&3dHj0M>P\QQ8H$*Ű[)rm'3l 5K30OS*wXɮV*oJ5Et1:7QI:lu~ 2(dt-}VީF7C_KVY 6X:e 1,'ɠD|,VA.Q1B}B))&@=)QeP /@2sf"TǙ=]5.lM2vB1paQz2iܜmrwUDjdvܿpf:!aQC Rd8S`rr%vt[ь3*cSE#aHƞQk %6`V0`!#a:0- 9Zf 'HHP@q{Ҙ58ai7QˆǮ([fD90c! ZD(224Jzoc-ݢF4g:S/Y )6$ؚ4(&LR#1s:2#qF< 0xhǻlMJvE2/^|лERRgF` !줌`?zRbmCr9-6-7xμiK:‡;B| {PTuЫ#5x02 ApCE?jFzfn\@ygi/+ڨ%zJs$5"YA5Ra/PE5e3]_JٝZ%q-Ezv/]-K#i*? u˝-dkߋb'kPnCVR& 1A}@gTQ͍Qm`\:vF6x=\;.A/K'[2) 6&,n&ŏڞ]}΋2_cι`1XOb cgmI 9Zr;Af`ج1lv)=ۮ[RA*y[x>[cR;ĬW逰  xt7r4b/| KcP\yKlb H`qQ[ÑLKes#{sL(QmGs+:2,"EqGkZ )$Oy b/W&ik<ھXv!~?"XWJ}cP1HdћKu4>T( ̛ Ueژʿa3q{UýᓿeGf8`9σ+OYhyU6ަ7(juQ"DKY RZgy ε~F7^/ }6ںZ7mn[y1U|=,'\qWrCSFُGK7JV|T_j:h&U`r>-*A٧+w,U=,f:`?i̮fMmxy'N=G"NޜՖpVh1oCWۡĨ[t+<ծSG`zCW }VUB@WUqo誄BW ]Rс!]Q]%?Z"NW %HW,2R=XwU*U](@W8U#c0]%uJEzt%QW/Vf)G퐡Z2thz2>nA:0e8!?/1US9ɩsLr3F4 RHV7湗|PT{V?qtcR\N>HRf`b.4GOݽ'P+]^ꅋ)(ߕ!A%^6~k%}w~Oױ; a'FoHp1ސv+PR1 Po\2'lj8Zw^ %yOU';i'1qvnEKXK'Na k/6E>_A" [!aJe@$ҜgI}nw9W\iz +mqsz Jq Tf?UBK;OW %<_]'N=Gj;1]m7оt& "[jשǂSN{DW F+ ]%uJBztEGtKJp ]%Zv%CbwHWT(Ձנ[֘\c0tU]CbW8I]1_#D[t -1P|tŹD+AP N9#hAV-[p_:8:lZ& j>k\[Ƣc\'8{΃els H^|waV=C|*sY̙C:<*2+zȢMz#|%|gH$LrJ͙"GP6;˜|^zf7j&)UAKp-{d`Axo,ҾXr -%Pv-~^Œ)է( ]%DUB@W$Ì[NՑ!F`_8 T,쥎x¹dKR$p-B^iQ(oO0?OMü7O1Ջ.?uJ蜢o<)Ŭ j.di1yf>E`fU0#miqkz+0P6ֲә<#$Q/˓?/xn&Ysۿn6t]殺VR(nZ,?gӐMfL-{M1x4Nyx%߲bY2I\V vxD 5WfgWa ,':PF;)Le 2iyOM |OMK]C%, $WUUU=_4la:lV6vf.<_'1kР4- Ls&S~qbo_㔧=9{a&PV9= ̸CdF\oG_g'+Q oa tUQe7i17Wao>67O2'nm4 cj [yncgZ8Nt-' Xy) ,M y XR+#K^</bjnж<|(]UbkNǩ1۟ V@d6j(C]Mؾx]v!OǽKhzzFBH^Oڹ%l2۾-Ѵ}J6x8k_xNܘ@ OaA47rhA<7Z}>nrWlg׋/<86a8np-ZU2ŁΨM:Ny=Em. =-bgN]/4iwnrLܿey|rEV's &YmGzDt &![3^|PM ߁7pW\苍 yM̢o"ğgOx #gM*1cY֚ 2o ,Uk )2`Sp*FH`Wo'ucliL0y`M1яFkuz28.bi<.V3,}]eWug۶iVYjjw9q`r7t4r8?^ >+6_ÛH+[ZvW: P>3fwd37W_WEk`FWϋ^j|'QGg-MV3s]!Aeb`4DXctdܐy>AL9?aF/|\' %,e(lG2E%0z9y,H, RٔxbUkib @*Eq䂠gr&s9x'S%xOʀtLkPgho<>iN[" "f>䨭0bЎt%`$$Rժ\?@@D/%"HIJZ \1ʊZm:?L;Ae?O8}8ZMNNe2K=1:0v2eJao+Zlm[SǠۥh~{Idj1Fs $,ؠcIW9i AZT rNE4E-ޜ}ḏ$Q{t=,G0ˆyp&ʚ(&:FZBFYTr7FІd@bgB qHmĤ X Uզs"_#=@v]Əj]eh{Ž}KbC)o;^Ftx4&Lpc Xz8cr|ovl~jIǹCw=|v )jխb=(`#7~$ᥢfy 4ɸqtGɘR&$)`F6Nj g!*ޘ>e 6^3ќ]ܣ(Ui@wY,ߗAϺ5w3>v=*'<r7(ҳߡ.Q!$hMFL@rBkAHI+0>LD.NEoS 9nI޹8|L>@S0d(;]]&컩]]{]]fkܹ׮.]=֮LJ[\Cӊ@1~lZR4DhfQq(Ch@рdqFA^U9j]g˳\pb}Ч*Uf+\{ Kk9 B9d,W %l)BV'7 6lEU5[틭jclvp-#w)1@kҘ Ty Q9@<9n+&m` 97^iSJUqױѾIwNK:2ZeS1 LC482Rn!3 ]DeId :R*XV*luLKE,B0-6LUa=Y摅YDqu9em㵷ǡS$s%!AL؋y[9QhjB| 3r~n#$x@_Ǿ* N71t LQ'M# x{;4ZK4=Zoo}, O5"l4 o9Nnsi|aNY=kgYs(E0J&4T:,GF M>P J9<AU.ߟS|n,pz u$Nm^\̮.&Ѷ4 `U Jf޺b)_;xo7w{yݸF9e-t2åd/bו^U?M._6:[ >Yyb5&YsqsΔU/drA07` $' |" ^2Úa=InΜ#H9 Q ʜ1x|tii1s.Qd)(M.q[NR6pƈ.rK3(rJޅT Ykäe}Q:DO0`%[UW%ghfכyBXeȘ}#j2g`"dƾȣgd3y{g9$Cg[]*fd_Ε ~z+T $fڂ0BXEV,䁳Ȍ4%qҸYNlt;H 1 ^wV5Fwm ˸a %q5_o #]iqK|ۇ-o>nmf%e),$!gqy476Wf)eUu"$\$zj uDT 99jQ[m:Gn~ AzM9#MAƁ䀸A8: QCǥ6H84H'3 <6i  ݅PpK+cC6ȹ 9>҄š}v*qSfy^uѴ9Xͬs2dG4pI D' Y5I!C4M1<7k߱oEBwv ݁oZbYuWO 5<4x&H !_T&NV~z:ju1HI"VŕvAόe/ÔQg,:x0(KuU7Ĩ fxxqIb@s O\`4]M0"8|#?jU |w8?|]EmU22 #jDtƜ,e Z8͘$YTUwIwtƳ(k%>Bۂo{~FFZ˄;1]F9 ]jlr79lVF ̃ήp\@-#;8W((:0!}EؤY*h z+k!_{VѢm)-RA!3)s BbdK c2&ޖZ/Oy(QTӶ@4XN0ʫ)?ѓ"h-*F䞫)~1+:Pە(U2x, I&%HQfH1 RN{-!  Dv!(0nDR8$ gE<2{EH'm(D,I2I{> ˒#/HS =lNmHJJ!eۅRnSϤf /u5_ 9ZYJIQn'~T{ӕ@zn',*.0W'zqV_){a .G Gi ~m)(&8㒉[vh)m&. fՌ4*G'ECZ"3NWsMt߆%8k 9׿_&>axCw#> n}gDcaߢ ]]/5gB_FE< ɥL 0KowN\_0\eŵFv.?ͮ/ ̶LW<~[v92OuL~$[v$9#FFl Ii'};XbY'F17پr稂uyFuiVůK)+Lc$ٸ|0;v*ŪuJw0N('7O.-'bq wm4%1j_y1q ߛQӦR*w+ߥV]|,bo1^j}Pn$|Z"Vk^l[m$Mga#Ea0JDFAQ+l҈ӖMBYꄐF")/ ᝍԋ srM\^{wh0&'`vO(xX{5z -|Bˊ `%OݞN-{:88߲L:Cm}:p}VȩkM.m<*\zq>Ậvv㶃.'9s#p̑*ύgqC٧ȣ :0T.1 xEw0EohDA{ƭN eT><^7'XkˬTg8zl0 nw$B&085`׹AqT*HM @YCm4*ƣ/$$Gާ. mՖ}%'MeY t7 j70 d ?J!ѿȐ(U[#_.Y<-|Z"&Z䲩HƳ3DkƤWL('PP&rm P PFM&|4uK<,Cek3f=wEVb5IzE9U8쌓ZIGj%̣x4${ Iy8w#nб"&a'RGdYzS:erE`.)Cw+ݑf)K(td65aI0LgIAL21jT9q768YA$n Ar/ G lB[NLn 3 c6im!P\+^:$J$H |AABfv L9řN)Iڤ;;0*6ns) W::`Jq@"hP:c1"IJ,(GݭCrìO<7`YW2AJŤ 5i ]$IxoZXĂNx=t4 rvzܔW[i૲`h)Oh&[圵AERBKMlAMG k 1zȦ<:>.^GH GEop%6BO ɎM+0*"-qPBDry gUn]H9xv)x&N0lZa[pNܫfŬ7.{Šjg8io{ ':9M=yvP ّ:e1z"`,BMZ0/Px' !2Wb\75!DK -|n6/c)E?_ 'oPv1 ܇%!*0ˮ? \ <{sR_Ӷy7}0]w;%xyu{zkkvwnrQ߲|%kIMPK@'NiHiiE)-a?UVkS~޿FB$/?ՇTL~ 7Wۡ,}J~YMRhky. SN.#3jۍj쑎n`F FJKgF5g`:iT*DS&AB(KC3js_{e@1|Պgj>3rTD Mșj\L.$ib)"|R$zo"őg0?Oy39uC^J]YjvKԶ64]ѣ.!([KaaNF!Rn1DE4./Ȩ9enR֍,'Pu"3)5^x\Hk|d4 tpR̰ #0vjv ~8 g[>!y-䃓cv[h#B(iFd ״e#rHDXۈlF$`VMpcZ໥JZ2͢+]6mz5tpl ]!T=+fEtl ]!\uhl:]!J#:Cᇴn1p k ]!Z+NWҐ5tp%i ]!Zxu(uGWHWRjs<ť-7=l38;Y -wA%g;,g"#Xcol4Lo P}h32,:#c.0Zˮ޼Jإb%ewnַ8/:t[A\fA]36 %W?f $!W$P(i>2+pϷ9k6Txdg޽~g&/ +GJ /#sN9XZXXktխ1saM ҒN7nPcZDW>A 2B6`\T# q@a'`@'_LeJYٕ4 AAq6]EOߣ*b23/jׯU>/}ZZBgb1P'k$'k-`7Mo;Άd rptp'"cئH_ͅ InѝJOe/:sڏRSW-Wђʻ:<;q=Exm`G*/-j@ز`%>;{H8U_|gxxav:v6Nޝ=Fvc5f#~p:Ϊ/7ʧNV!`՞ U[5OBVuZmEtU{vJ-thMwv&uM{m;%&+DIyGWφDŦ>Ztt\ccp=lGz(y5JttiCe [DW؊YBT6%jl]`xk JBJ4f]!] P-+ZG>E [*ᚶ7rߎ6l]Ȅf:eimʳuk[Il)l),[F0c}>R*0"ir.גHw.rs$~V6K]+oE#۠%EY.8 RdL8U#ۥ͟M'W %M$pVօc*gB'Ej@@kڞurZhy% }-۴Nm{!-thi%]!]k &+I[ tBSWHW^D5NS6%Pr;z6t%+6<,wo=vׁ+ 1]H]fѕ\dGW6=eq"B5-th NW@V BZDWXU-thn:]!J;Ci[DWذ++H[ JtBJutt%f+dz롵+@)IGWHWk"d [=K#-nOWutt%ͨ8&@Z:VRڼdzI,";Ndz;վE ݀pnn@t{PQ7hA-+CWȶ(ttteM H[Eh (%]!]YmӞV(B5 Zt ]MKvB|=|tZIwCWkl1֠+զMO%5F5tUµm+D+e Q*"B;?ܧ=]!]ErZ%]!]qk+Td}+eɒ{?<;Jg:i %%_Lw!? ~'+O43uL1xG!v׃w tcx򺻥M߇ɹ|˺8O(=0I)S+ [ KrKm\{xQB{/5F`K|1c8@ZX4@920eTz/+F(Y,9ZA:1q%8MgW~LLY+q<_vH0#lL}./gt^뮇^fm^{>fuJ]pa";n_V-jɼ&Cd'o8Gga4)B\[#M>ƬOaNG |MH#nt,f Ѳ/#J:-ZLhBZQs!%6j`҃Z3w~T%!L⹮}fI]UZ/ZmE\Tm+^?C=û$W|F ޑpdŪ|,cztq[iez+UIēݭ"RU V1+XuSl++dx&^ѝgq6,Uդrpf\nS(Y2ҫ\UZ?{ʍCfyx& &9d^c+%Z3b_e*}z2ql5.UUE҃I:{Cf3kM\n{Ė ۮWr9"EJ}戫>/uծh O'Y̙>`u tpf=j뚾P4Kkj7o|3|h_;2iU7(2peͺy1 ffBmӖsךZs٧YJ'8Uhv,@M{ R7! ۦ_ltkԂpnd2mk}1ǵ76^mT_~Z3D}R* 9Uu ?vLatpb٧L0o;sM:'V 8&0bD#6~{¸I^QPD"XWU&@T"J0ǩTi`h綏;R:0GUF!\{!5 13wvTIE|?SH E+B(N%I"{^_; נi?;_huVI]W䇴ߴ|[oyС ~SMo 8G{.PRM'6$}|<h˩~#Ș?Na_\%E 8-/ gWBFp:rY kxMQlg!}sNU?,tk컿-6sl!!y@:zQ޹6;ʤ+CBP]9ܿ>֖;ٔ|n[v WpqA@^F耒BXi&R/5eDDN #(H8H 1K=??V-ָWڔT p j9)mm囫cP4_jEHD%$Ԕe*읙]-Y챣]femfֶk$|MImx>U ¦}m4ߏGfͧ6_qaihPxk0MEy0r~ڕ ߝCeC6Dp$ιsM*m N%Q8JuA3͢D!6`mvFY,IǝzJ(G{ ͉HjA*` \V`E*La.pQv8h 'q<rlREIt`f} 8& ?:->N?+Q/ЄQU/_II1U|]PVYgVZI:_pSհWebiJ*8 Fgt'5.%l@h+q9xy$q^q&Pcfp}3FC4&DꗥvRˊ6dPB/i@|@c-_χY{d l^jɸ~s& eϯiF)I)ϗ-5g#MpqZڰ)^& :^o#DҔ00ƅ]N\Ub1,WqskFP^ 7_I9:6Kd.^_ ҄:pi/+JE+ijar1Oaꗟqmr$m[v>NӘ֟V$m\d4to|R|IÌ=xƌo/QeGM5Uhg7fNtWm_xo57ydo,?08pbprd>4U1.8αo2^@7]ORӦ4o#݀-5gޡٸj㍴eAu$q-Դދ A6=MDUɐ-/j UO杼NY4ikwNPSϷo  Z GYZ@qA9O&N%ޫ(S<.+CB !x@ޥ1cP=豉hj4O{DdVxgzV>!@#OVk{aF‚ Dc(Q"`j=CHzլ<J <* s~]`DʩR ˽)YP[PÇiefZ 'HH8F=r`TiLf۰ްYcr?a^ lKa쫓Mu\ۘa AC!fH ^ wE%K>YU^-J:{?^u.G /]`~xC:> V \|rkox/:Ѿ>{F߾V7^8 q+ AyZhR[^z!`TvʏAJ b9^ {AaNy(LV Bs:Np~: ђ?>ŕ&:FgZ:.#Uˀyo̦cB)7j<[֑aY-;X+\$JH!+>e#g XA7pAL&іea}Vx^q,ut>V>zTLʤG %7 (E f-@~ZeW9 ;gcY~ 2#FA{&#v#w!Сe`uPT+!h^EzBoߍC:ߤphi{;mX=UQ}W>r_8G,5GS\ `*M՜޳{NԳ%V WS. JTc4=#Ie909gw/-ٰkeOu|z⟛.T4fu u<%[Io+N7&)BoMg@b_.Zuww|tιvmzl~N0L>}2gUkE%mgJ{H_K;R՗#h7"YXoo jDzIJ^mLJ8hy XꧪZVo؊VajvT};L]0`9U  GDn0s2f_gEcSm]Vu ,SR7؇y4GvAѷ s6d A4ٗDmOo!lP[ XXEMYvgiJ I{q-i8@{"\IqgglQbR2>Ꚃ$Jj~DzGA&Η[[%mVR "pM|!l7f"Y'GmtRhzD.K `]-KR* rPaHK]'}쌜-7R? 쑤 @|@A8: QCǥ694H'3 <wy3lR8.s kWƆmsA+LN#d|VS{Uj{'4:'@HL|DCKYO$qқ`eZ21I3c)x>kY~ #to*[A;܁R1p΂N`>4V2xYiؠp|q#/^ѕ~"P x>_F'R4b 9a7j}Xj9vR_sa䢡L3IV*39krieǭ'໙c.4BY_"ŀэto]um~ DbX^{˔I 7a:^_4ɉdz;5[ix8i15Pjc.Pr轢go.˸ÊxA!?So!Gk֗?~ ]dR CMXր^A _kN-J}.ǁcV}X=q$1JEv"C% r S&7)O{K8^!P6nW)D]@҉3ɮ2Bf4)s BbdK c2$&wїmiGLh(Yi[a l,'0ȫ)ѓ `qТG K G;b V@sdXO% GMp 9$H lt:^:J;IؿڷhP*0f$Ynl( ¸آƱ &i>+ZL\摱 RtWiR*Pc h.FWd?aq>*WMf~pZBmU!n)>IٜCn(&8㒉ÏRrdp2%x~gݛk5#$IА94ȌŵjܒtE6Kpp|c}jSG%ГeIUnouMn+gEƹ_GX>}1j6!F{GX|PDg|[|`͞?d=1LJW*L8DCտ> -]ĩl88O$Oh6^2|SԆ[XӷGwl#ݤ}6-砤E q.Ee3dqC Ajdo#=a\FzG# mzr=RYk K(gJ}4\eKs,.0 G[)MpVfQ0}=-~9AV# Jz@9!Rb8,r#t{fC2EĀdInɮY8 tiAMLG"g:Ɛm4 gRV匈ϻtǶs!\1b}ZPV dC?Wce3UB#ȟ_dY֜kɚ~I6v+Mt!%<_dXt*r@ 1F$S,|Yֺ"n ^1R#or.}՗|z'ٮ-˧,A%, Xa5w6LJXc-}n,*# XԠq:3Jd:uBYVOAi#R ZA8=Nz$ڻa%)j Zذ]~iާ ĥYC7'X-KYjt|,P=)=WO"$lPaUMhAz梾i۹piveG% '?p7n̟۸ٸ۸%T[5`ޯ-+?_l{DHK dtsdj/mڪ#(WKP%a5r5Dj26]!mzM呬$,Fbhm&\d1q%HH23WeJ:g̩Z`9y/uNXVݐyXN:m ܯW uh C'zZZսSmt9fIY k.Z8uQfT`Dm5J59eg}~gzβk,۽h>;1)}QTd ƑTׂ🏑 m`a`dgyA&C\MkuZnxy&/ANXNC)3Bx>e<S%ƭ'SzUH[mHȷ[ufYgm: nj{*t~GǪد-T-lNO`F`S,u #/ڄ#K?ysi(tg<v"|p_H?&]^ew"O?Fy`n{wR_䢡L3IVFΚ5z ?1vpܚ6xB/4c@hGNBgڊmt /廹-״ 6a:^_4]8o`>QUMwU@uꧪ]/ںU&Wѳ7c$pPv"aEX]. R@30 M.޼P+.J.b،Wk筭Z_ B#l^Kz/٣gݹg;;DN֥ Rr6dr; gъɊڡ]0'u4tu?~{Еzk.G6+ZMvjQy[]p4QW./F] IjtNҕ1F^]Ԃjehmu(HWKzv5zBWbpWЕu̕^OcݟOlCyݻƍͦn7fКx5{ʬg_^N_Zћ{ZZخalxEĄ]dMC&W;u >&Bb*a`Y/6{пzi{l=&o_1|_SWє6[N?+xc|W{T5)Γ x'Ca1kVެqnݴ6Js _nr"Q0*3Pڎ" E7$`i1tRjt)gHW>x]<`u5b vw N ]=C !z$hb*DDZ]2siYk^pXΚ׀DW@u(>R^ϮXS?''ޯ}?DϮسp BWw>Hoг^nbQ:^=)b0]Ts޻ CEzK>?'lo\:(Ѹt>=;:~w&qTkW]0EYRٿ:GT>N|:7K!l?88k>>̘k7笤jmn8$ykV#;Bkb&rTT1/@$W(oOebf5=??}k~Ly`>z3h9΁~rl8P1dCoSՕv93*7BҮd? ask=k36О7HNZ;~:CkptQ*jz^嘬q]:)m#)`Þf蛜2Ќ1{.ԩRU)d}ʹRj^UC?cTgkKZ}4nN" 9Z(\ #j ͨwmzթM6;MLݻTSԻf2w;{mdmMsMsjJ1'ZjC,:ɌaDN#1-.oL.{PtpQ-^rJ# ̑~Չ&&fRJ=@{9Ye *CQ"sP8'4ޅK@cVYj"}bfg AC# 4b@~N޶TڧMVT4k^ ,s.̱f>O͹xwޜǬ*ګ;;ֽ<RIIbU71HG)]p[c[1ɇ%jѻ)F$Z%9OH }MoF|sU.RH5jC_2"$1F!jT'b>iͬyRLq5R}ȤrM{XMɀ1!KutAڳZNH!i׆R XJ2P4@ZE36Xvse[+AEE` y9VsM+`2~0P8jeWGWHm1lt XBs+]S`17uV6 ]˘ Ͳѱ;u< *InM Qx2pGR7fa=g5.PT%`}5 ɷVzRcs'[Fp%Df0VjVdL1LhI6\ɋ% v V̼޴X sze]R uWh%W e2a̷6Q9 ePDdBE-!NukAx`1g#n nQ!6Ks ` 7đPI!0,gJ@T`-~7ud&v/ōbUڛk((Sѝ*E4GRFy֞"%d~GBYǿjA!N9 m+Wa1j{VQR@}JuzF3yA1$$3'FjeR"ZJ(CsMȲع!+ajE}B\FФi!:38o-aPN -^̈KUUO:G$'7**WeH;L'Q"_ {0Ks7`o\Jwq:ˋZRtj6G mCka&a-A7 /Ml#TYSL!Jrd@h2q)bx蘊'8$;Zg:#.c) >@E&rZed^*e(ᐉ[CLyOC }$kɘukx$ۑ oM`QY,TG'P@-"V1jLø0VA;> "m鶽R/]@,M%C.:T˔1#Vi,!zRn6b:EjC$*KtR:Mu $$hLEQ{X5K q[O -ChgSRC;V8赈 )-f) v #4l5^5pg^ kJ (|uժàe6M̀| =pe.6čtzGzړk4JrlCɵ8@1pQi8 5fSMpUy\  X%dǬ. #p5- 4h4$NO I@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N(`b@@; N,#N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'q}ޠ uk?W/^՛/vU9zx*aI%6v9%eZ%t,ƥ`\lil+S:,\Kք]3+C>Z]p$Z ] F-Z6NW5BWϐxlAt5g;@BWϐ엣ܰuvН (tʩ详ӵ匿{G\]_P^ ˆٽ/VB6 -?ݮtКx5{ʬg_^N_Zћ{ZyiOWɹz#=~iڄ4|a3Ʋ!&|X_!@Η6JVGx/V/V0G讋ocelfm\oݛjXpߩau?9<:=/?]6Vޮ :>HVhW ؒ6J_u k^Nzٮm3U7u\T\gXWgʾDž}=E2\M>;9>菊^6D2bnMZcStj_HRF[o;SrcVC޵q$B)98c?$,N]yIb}Z)!98$M$,Ns0|%gSt*qZ49#K.Xr\NŒKԊ'*%,ВIzN|U"WJWW){A]}J}M[O8Kqo8m:} 7ߴG :,0軐q’>#/@qi_cn5ߎ#HoS2*mC}[;rOoˢIk72hgŻrrP07@YG/~T `BRI /RQhPÝ_Pm]Wrq=a@ۖE|ko7IVeyde{@/E_/}oKmIZ1њ+o{m^]䒸ٸ^^\PhJ|uFOAz\%? MKLe8. ͬ^_B' N副u .赻P0.wp"7Wak-߽foޕ Dvl5qa#}׃;3oƾg.,G~6bܿ'Y^W⯎{ٷ=hsn퀴^iz"1F]dy B]WG:eXI\(ODe p jg>LLj94YGXp4kZ9 YN3$ϰ>6J/y{L׋eBNӔPz1+?ҕM[!DSzJ $-4 mI{G½|أ=n4h"ur]^%?Y@ox|T~3:w`rg˨y)Z'Ġ8@' A"Nj&4#뙖H2 Ft k9&r6ȹuVGG8#aREbD h̝a>0ņB2qhGNUQe_L VP`t>|!I~X"+:**`Φc"MD-;J94j+:*UV>R[.mL8D;_(]VH Fu:-J _mO6~+{m&OT69A~_6nc"HG5 ;F{G4jPAm\W}(;RkKk|9nwb8sWyp8k16(n N%Q8JuA3͢D!A# ܮB`(%qYO rzBs)G ""؂%7fR`rMҖV~QL?&#vߦ-4C\- έ3+vJO8wGo{`ID{Jh=(I1QQ0xF7YoKo~/7^`Q<ՓNރ>iF: wbpWo~t'\C毩g԰o4)觲ZҴN&]mR|)(Q/5{<FLEI!o+2fm]/Y!d zzF'uĜ\̵sӲͰQBipuƀ[~X'V ؼ,5K@_Մ5Vnrۻ%5&zxһDvkvzhJGDUXԴMJ~ջ녴/W71le97!rfh|7^^ FƽF撫KK.?uw5Y}H]AL|׉Hyy~y^WAy'F37cڍ:vQeSevx7ݧ7;]v2[A ~MaD3\*{n jcPS aIgݪNoƎLWLG5X:Aը)-2>~y~Ht0}_ ䷇+~43UjHPO k 3Y}@v1Ţ?y;Pw$f7 wEYl6}wVV{9t7m ['&ɕŽg;W{nca1 U&o cNXy'Sc'"r1?ͱ~3["lX!;㩰<^ M@+#jaG-YՉ//>(K /`d<2 (9%IzCZ+XE5X>nzTl|r3[ǖ.,Ou}ݾoX',;.{o[}ڂ}x\jzX:e1,'ɠD}/9#>%*GkQ,b'8>jP& 3AI֌ٯal0g Š J(J6_$dl.oq>Un>Pdz7W/\c3N )$QH-E3&g0MNBPqK0q&VV42XdOP&; ^Fґ0хT+V"gƶwLSٸT`wVX֗p s[0VAkC( Ef)ƃ'"ȅ4*XȀ 1+ ar$lyc}904&>F~}X(OE#fc5̬3ghl&1(:&$Q`ed< 0YiN#zocx#UV!@$`(L $^2:[X҄Ij$fX2kl׈wWڂ^%EY/A/zqw'ȑ ~-.h2M`K ɩfdNbOO.8}ӇO$E5^x8s !9'g~8.>+DMTg &?1n c$-A"M4ˆk#OYKl rBf`k#,EZ&.И C0iD6pkmXf~wcu`o Np!SZRUçlQ"ő8#9g8}ztUuuϛ&62CjN4yuօ]sNX#J gDNĭv|\|I"7s2P*gKZO(q`c(8 db'D/c-qV=kY;`][mgPܓ;JfE[OO MidU*{Y)|e8jtoqw>$Qσ6=k~S)ɬNV͵`// TYdXH<,W}f23"'IzۚL:Ϫ en7|܉~e([(pQ 'SrID]? Ø8M&5ߌδ69Hg 5WoyMZ8jmoɾqB9h=5jE/sunBu1# zf>UcڪOfՠ=cDcF*o Mb&bI(l%E3FfQ\H+Z[,@QU(= yE`WXc,M+F_1ڤ!=0{.Y{N /eGdͼ3Si@ 9梭xTs@"lӞKKfՂ!-U-Uխ8Y`KRi%L8QVd*ZƵy!vмYv}6 :n:!i:^jLՊW罝IOx6z`p  \<1EI$#M8Q8tPsw˶hlN82ѓeÏë:'Vy'׵Nh5q8Gz$Fe@q̏`4=JzCXkߵ)^ sƙcRT9mNX KB:fB .TIM7uHTΤpFx%w2s 됍6xKkrُ+xinם\c=+2 j,/ y 9GY69 'V+s#"bb\ՠ΀-x4qtD J<Eb:X*5}ޤ18Kz3(eF)^:'Zg.zWvmivEoYɲN9,g9OeNUL2/Ph^hh';}Et'ouW1ď~1 0GïHtXv7>vC+y62 QdW(52$"V!G\j9p&'Sw3 ?ܫ-o2bȾFvni˭w5}ɋ< -t.57nٯ/ӏoƒOfW~=>^ಾ߹_L{ ^`JAA_o-Ӫ p6 T]yH:3?^WC=1@vݾj;KW۠Ֆ([FW]AOWzN. !"Vtj;tLtut%+:)tgJ"ʴV=] ]Ie9]`"jv"tutʕ`CtE ]\mBWֲU]"]i%twng\VBP ni[D_Km nͦr5;z2xQm,NeOLTB >3քog]qQq>厄04/m|βWi"z&"ENfB=1 b_,"{N`QT-d7=+v6}JKƱ+"Z{L> //ى$CO?|!B W$eHcY: }8- R=UКoy({. 6+k:Ehm;]Joy"]97+!2v"tutq%BuLJLp9 ]ZNWRʞ ])+v o1%}m Ֆhd nreߣ tt5 =7Ҩ.3tEp ]Zh=]!JLOWHWhXK"tl`OWOCW( +leg:"`NWR3ҕ2SCt][+tEhje\=]= ]i# ׮N:(n⵷귂#qq8yEUӹR9*Qisbgy܎?Mq<-40VRQUW[Vm/| Dܢ3X]bwE.T{*sU*TA"D%nQ֮&Fy{Y 2$y :,U`s By < ? ߠ `*)eSO5C_RJVf:d!`@pygm-9B)Eo%gK' ]\}옔P> Ze]`՝m=yWК+g @+g-{)"Btl=]J!XBtNH:5]!|>t7z~gl=S޳j;2C6coAW=wY!Bdg9–hh;]ʶ t$t%ϲ%`"Ft5m+B ҕt.Xղ+tEh;WtE(J9]`m;CWwɪCk]P:ҕ6R,8zʼn'i& _wo~k'] ՃV'MZXe  7 \`+zvP. H&CtEMw2תuw"J2HWUe%'26ѮWк; eOWIWe]2 p ]+B[o ʕá+g\{74W~HIN6@$/' }+)$,*bږlXı6f5ۼFoW(X+5*m/WMώ(W w6+ɞ3{ TcڬX6=#u7y)P+i W(א\pjd]T QP y/j5:Pemqu8pP5 }]5SKuq*quP'D+Tk: JK{\"׵$0>bzDrM!ܵI%a'լbBtz%%4UehdR3$-КYw fF-dϥw {:e"9B\"9TB;HNSIW(3rE6OYRWjJsDRd+M.BVtWqeY[j2\Z,T)e/WMϏsW}'M5LSg#ko+MO\g+e6j9Բ UrqŘVf+8W(wSJu\ʮ-q*L k3g++x.BRuWas`VP lpj%]Jr*EN ~ש+H6BTtWS+%]O?bG?2YbQ;5to?)E'U ~RM R;܃ʪ!Ɔe~*onR,YfyJ2 !QŹ/(Iu6x6ȣ3i_b{s09hߪKkSӓ7U K~(`(J$(jɛ{w4/yczOMlfE>!(Wf՚OJG؇j.);[ @.'&\Zj+TTĕP';J P|v!rcYNB2IfCBbp%"; (\煘=-l p%z\=`p W(\pj/(* nW(lprW֒{WW+. W(زlpr5! Ru\U*{q%V4+,ʕ<\Z%+Tٵ'=^WRXsTe+˲jGvWRW+k[dQ+#A0\ΕM]­[5-Vf gʘK[me֤}+gW#I&r٤Ś~+2{*ɨEr QM$r_5)E01b2 L<\!rT}!Hi_u/ŕlprY6Bzt3OI?D\Y)%|+kBt> U9_^p:Hd^L.˪{Bh]'p%JznSE,7 k *6\ZKJeCSP0 {}M7SٵW+W$#\`˳+T UW+rȌp7 jr5G=WRIR{zgȑOfjH=rKp;#AGLY`DK(2@9 P߀jMPig^7(M믴>P=U&WrQ0<˶rzU֛ܚ- Wޱ0geOw:Sx~_91˺,?UJtrq/[3[nNr|f,ϣf~|{hdܿ;p],obv/b6__sWlhS`?mPGZCx'P7OJK0M`A,`5z7zA%h}z7h# -fƳO1p|>:>LN/o)ylq6Nul4T^f+W:.0B AIoNgdod1:/p%P}*Ba@N& Uy &0oa*L"a Dlw8Lf"nMn4ȝoRmзyǶu/UZO/a:g$TCJy>]3)(> K~.#J (j/y?Xp6L>\1yNꔣF)1Hl )l\h K=Q&hHsL7^0-d1qj@Kc"Yj֜I蜜);[P)"1,9AoAkrCY'vw I/ǻdOl;ç޾[.@)/q68C>qM)`,,0)=)8CA$nIJ/RO;GN NQZcc$njW;M%ơIF8;SOVȃw2^3͓hA5pzjscTRhM ҅2 . ,L-/7}Yo;S6t)j; xl_j؁W Sg\||4,<%b uމtzeyzߵ~U"mK✓Or$p~]L q6p 'ܤ­E8ՒfBb% U"V#h4 n疒o93vf/t~C9ej*U15ة_F IR3ʯ$NXijWwʺ1!&w1Y|X%=}N9]\'̭e(BC)ѳ!ZOw;_Lg`C|<-g3pW~2ޠ W-Ė V[vuKF߯M㮝_̎ek/S.η:)x#&"Cwy(&<ٰ77^nidU۲\/t8 oQ ΜKH vDU0}_}Y kZF2 po>_bwSz6̪ᶖ(6VyԸ2ΰs}LV?Tv_ PWX/gwXR6ƻjfXκ+] Vr S02jI]ehxJ?~v(V92k8/W4] ~fIq9 7hUq$?r2(yWgAa43N]?puྺ`uƶJ-Gmc4ϝYݹ nϲPeła ♈ ǒ.pUdb F"8- *y~iy̗Cx>bk2?ss2@!I)oSj;b!*$ &6kzh>}VsE/нvkqkbҔtm4yyس`Qk$3EpdEsb%ZLҘOWUWQ21CLb *xBD V'S.rg%&1ղۚ'.|Xu]Fdq*F qM'IH^qpKl ]Վ?UL8>d8.)c( $qɍT2Z[yƏ*4y]+Qjkv(U>Յbv]z;JVVgZ㸑2E)8d&=5 >ΒHNWyaf<-HVU,V'YCxַ[г%k~\dnՃh%&AH.eER %RR@>#m`uXr.1Z9:U%\<@ 02 RTk،͚Vi qƮЎIEulgqs_˸>l?Wdd&gI}"plrRivV+LTdmj 4Tc/d%tJ6e3E'$*"k95vK&Pv3ֶ1,l} B*2ҏ7@5 $lfَml1CH` &3d YGs1Y$6 ChYF2ʢ*ơhfFG8j[+/CbvN6 ,B%*T豀)MV]0yRc!m/ˈQy'`D¶1܁E3*U!RbKZR-X#6#gFRG֋e|͸dWE7Q/^ (F <{NAȄX~́%S/eh| ~ԋ 8V1}wӇ6\_ȍ'q a}Apc#E?*}E?.fUj=j!u1k݁@ymmGRdkSأ?>V#ޡ(͢4Iy[<+9^Sr|U(zE+ý\` hk(]lX(PVUT&1fioBD:Hds)܂o$OA컅%#qkK6@;0dh+l{ZnݼX]M u,T[ˈM_dO+ J̿LRg?g"y6@cNd!d lDTt=g1VV-՜6֌DxmH{Ț ?ydjezs#pGov;PUQ_S] Qu c=bhŪj|03F/3 (2,lv9I`ZUY #mT},1 z_@DQz\**F.Q3r6ZhHiDŽ2ktY_j ?J|8{}ؚS׏̉p ;Y<eP1PF@bÙbU r{*osYQ(;O59+#@B!d>hd[O޵:G̗gvy< }. M/vn`[IM{'e ߻?Tw{eYu7aQo6|q˖/}y;?N|E.MO1f69kMI8 Tեu&Vl;cdx;]UjC䷣n\0 fīWgU\%?7P^t%w (7|1ȚGxEdT$I栤V: 6"cۈhuR#Gr2^J"C8'ӌ-*P(/)_㻼ty>tt߾/onn_t5BGZIFuJQXI{)9WZף6z ͯFG9g`RTSIgSxd..9y6ĬBs.M,4*(T#D/MI@YIp],gRzc@bfnb3mʯz$ڠL1|Jɋ!PX3<2D7@KBQosq1)V"$ܞͤ EesA1PJgG4IsޕB !',DՄ!׬/r3rѻFqP5㉝]d߽^.Cx+j ϭi\3>*H.UދIRrfF3 g(m<"L)u5k/l(׌E&|8 %d–\#f+ lgK"'_ϔ §aPoؓ J ޢZf3:Gfgww:+X"JQ4MNxp/ tt#V AnvN޾D'g|)c2INOW"=_k^..uJw(eώf{ie׆VkO wүON\:6ʘ_OWwQ2ٻŃ ĠrEq}7[/sdr2=/o5GtXӓ'07tvsY^f:gbŧ'G=iW%nu9uVUze2R;6>OOSm|FiT/d:SzcZ!aZ)Y_O~ -C̩=$~xBVo{߯y7߽z#{?_yrF$;˝0|_]V]sˮmv-G-~;_:ν qu=z|owj[hl$9_DdF;RQs2PCQ:` xFUB6C;exMV aa5Q~桒jHZPND$,dC.N3A6㉯$uŠW5eª1lv4#U"s:oדgP^Ygg?aTxznwgV5nxoFYr7*Rc:c)B3eIZ~B*fq.1@M-%zMnŧWp;ߎ'y5-[jo0ruB]`Q{2o`' |;п9;?=͕w0yyBc;|r݌Hּ7an|bklEjzX ?KuQBzӼH}Q EiNZ5f.vsH8ff˧SZ>%LJ{aEᬳ$SIzr AKjgZ㺑Tv\62͸,WxJ Ӕ,{"X$Eփ\ 9xyמpՏpPA\-i-u /i}'pKji5`&lygsϡӜ?f Gh-/o{sk|~#J]Og?sܵUlϤVgcdUHݍj'\bT-fYsE\)DYIڢ+ISQZx3-fWMH^fswߪ^I>q8?cWžg ,_57?M1J6oR k8ȶʴ,le‡lXfu֣l1'EE>k+ԲQlcIH^|+.\` dne2檥 1In1qvNV řlW:qYǙ9!mIPzSjY;Q.iZU+}lp 5,IxTc8IDgBĚ%;2 [m1wHo,Ûԇϐ4=7ۣIYEms83#VGNգMHNAoy=8!BfmkIC#վMR(G)hzBfe[.磳wO' 75.izw/miQS{-ѵ17;jqp|n.Q1?"Զ>qm4ݛ"S]78瓉o[T$l]ӍS3޸VN+ʿOw5u_;Zv)חIi`~3] 9i}@k.#~[ogP]*I_K1q]V^8Tחx5--Ak䢶b]x*cfsRrwrmU1j|oەJ /7;Yu֮u#kRO"|E;wWY>!w˭fhoV?L|LzjEW]=G 1z.p] ] Jgt YyAt5{pb@e (wЕE9 ~AtCEWFBWsMdx(u*ʙﱸlF`CWb h{rć^]r;GIϛCFl`ax1>'5p9]Y3yzGnna7Gsn|t-1][>͝dzWo;9jա,o]OQ҇T:j9Z퟽ꝽqS"NHkT : ;J\d#T]Tw9,͚s 4!bIXoر )ʥZGJ҃>^ tɱM:27q+ZJUhCk5D(Ѭ[%kAgDiĸC'+S)Cf?v$1Zyh j*zwև Rlr-Ht$5czIEKwA!э#½ E'$Ei{)j'@# ,Q~Չ[6f6r{ dVIXah0r*0Aj. r6ǹq$㞽+F[j( wE呤[:}8N]eAS!.N{@1#3X2,foPP$}?oNBUiJj=8G9PJJ6F5{\* }uN>,\G[$B$JJ]I((|!46Z: \j() Al})X$fk]󵱋 j4U&"ȓ.[U')7Rdz6X sS`r"Sk ],¨ўPKQ# I2](P N|0`%dg{eؠA[BZ ywԡmӊCxkìaP$g_GSb X4yT@8քFnuc/y ,q#YgeдlhN,58@$|/ۦ E@H'O5.(հ֞QZ uvTZZ]zR9v{"pOjmWFRs$F G!}Mh(pu 6'ø9G%*J}3 dLo  $X v^Eo[PB]:B@LUP!ՠ໴\Q?1l aNA04'X !,Lh4v!ߥ{Wʨso͙t,r #]ty|uyBS 9#+tmPFKFrt@6@^s` QL(wKBܳ@R@"0eP_;vgJ q[bH V -ɘ+u@<DCWH!-vmݬ%˽eo;By2SaAȚ=B܌1v0BgVI3J/1A2?AjD7H *Wy*T3u cF8PGf1ݠmDJqPk!Z_gZ{6_!6R wH0 YEn=AKԊZjY#שsu`S5M,DJ YU浆Tj[`xEXYo +FI/uI'9*0tAcua`-4#kl0}3l85L - n,AZ Yl&ALV[6:SkwG(UnJR#dkQ Z8E] C1e0* KC{oWB@xFojR—zs*Zh z!rz TA*Qzi 'O xm@z;ja0MOہB"1\ CQfk }; ]^y(VbbaPaQ1RQi0񕪭zׁT'X` \Տ!JV*((GϢ%9Q6HYA-ڮ k9 u Jr\Df-/1dZUҸeYX=gw ա |o:h upT k 6P v {m`!0#@J-th/=Bw!|?-ASqCwX#1>DPq]恋Y/EQ-onâZ#wW9{!})Nw'k[F(*8ʭǘM*x5HA _Nd!kkjl%7|P3lO:/f3.)?|!69gkd1 4ͺa0 t53dE&HrUq]!Ix5SEoi5P5sPo>P0>pұ#\ Њޯ(%Cpeך Wd"pE+2\ Wd"pE+2\ Wd"pE+2\ Wd"pE+2\ Wd"pE+2\ Wd"pE+2\ zf)M:#\ :{ WhrF+I+2\ Wd"pE+2\ Wd"pE+2\ Wd"pE+2\ Wd"pE+2\ Wd"pE+2\ Wd"pU WPJ+ zy)7\JI4\ Wd"pE+2\ Wd"pE+2\ Wd"pE+2\ Wd"pE+2\ Wd"pE+2\ Wd"pE+2\k1 KphAop(&V Wd"pE+2\ Wd"pE+2\ Wd"pE+2\ Wd"pE+2\ Wd"pE+2\ Wd"pE+2\puw V^Lub]ҷp1Z{CVaz)$ 7U:5ߐ Q:Nc]+zXBt,B_/x<]!Jň~cg]̇#,wɚbfOM"CE_p *GVl2 >?Ğ+1a5;[c!S1AaU18'|>ՏKQO.(|:BRR&m \]Գ y> /.ej kٌm o'R2W7/B C%*Y->J-r]ʺ g瑫:WyK(mt*e4mu!`5J5ϐ{U^9' =ھXY3rPiS|^gAaflX(VӉ\遖&?1/{ųl:(ԉ=:".c~ @S[-&vsm$tN)ErXrL$pK%CR$w< Kgz ]ZzOW1ҕS>!`B1t(-xt<%J2듡+&d>ZXA鉮0tY)-tj'RzŽpaj7 eƮt剮Z\o\Btŕe\%CWWT *wB]!]  .l2tpNxjW$:BYB$CWC+Dҕ²Ʈ<BFDWGHWZkXBte:cWWT jwB+^TcO,P8W_N`:OmbI%*/bV^GJGQ>ӝi}4M@k pKn2!wh}ccFeQ7Xs+ly:27ΰ(>5뢟BP`?㆝1VTvSY&ǸTneԪw'7o_WDd&:}Dt~6!ػdzW{ |ӆk˜6B̎&Rs/&Q<{s ` IXͳ 99;ecE4G%obq$N Hi~uE'-{*Xskoտ,~Ҭ888cG*u֦lr@Birq&>>ק6&,";,)eKvcXim_6kmm_MmNvvp)5sFVd=t(i'j(gRf'CWd Њ?CWݯ5;expu`䁽u;UՎ(MuݟkR8UBtb!}•>BeϦS]= ] ˼ ѕ ]!\iR+D\ QvJDWCW 'YBtIթtՠDWGHW Ŀt :'3vh;]!ʎ_x u vK]vL֩NuW^HXMFdv٭RTȉlA[zI[NIJǻSٌ\qf<b%Ni)QDr׊T"9Dz? PFOg$3 VLj&3 њB@]!]YNKgz ]B䈲3qxy=ISX3i : ]!ZoNWR0wt=V;LB&uhm Q:RW,z~ʼsKq \Lh}?J,)cT[X,gn7dU[{GԌrW{ww6(߃b_}~{?'ooP_Gh^O!UUX/|= MEx3 d'ڷܼXa%ܼ׏a1Z&ŷ&fK[?F{cxzᏦOK˖g\gp~-M]VA.q OW [A^χf!rF{a[E)eQ|Mxy6.Kur(i<Bh3, é*G ^2Z%_U}0 D[TU;. XKWӀy&=}Kp(l}sps*ysxn-7TSm2ygsF]pC˻!{Xnѷeva<w;ZZ{uVA-ږ ]esJv_tٗy~U@iؖpbupkl}!]? y#_:;rXŢV)M[\ s J9p5"WڌO|_Hܢ\Ma u 廻QHf;Rxxj \ZM*@>aO*F#TWd Q4@)?aA7}4>eAƒ {x$Nd1 4ͺOaEm}shS2 R9γKY!MJAF7*6͈5O͌\7(PTB !.$42)ޔ{LrATSbʺK8-:- [餌kQn;UKy>|!s6]w(,S_Mg>::B)tb ̈G[ ^Ь JZmdJxzēZs!.*Xuik}U ]xɽsEYZ_X܍[%ށ\h+` E]uD5qn״t_Jj %ΡC.S9btj~&f;œr~U{D ]SxNj;mƽ(~j݊Λ$$3l_sr9;k2_OW{B+.|r'*yi,d| G*6ucV/NF8z_K䦔׼0I skC,PU }C%-jw3.~ŵP%]UVz`(L\QY\ Mˢ/ONO^WmW(/> @ǣ{G'UU.#TZ޵?q#埲w \{\\j9. 1E*|+_~yH!e ({R%G‘F%uн YL*pN|hDK˓V$MA˭06jO5KHdˈ8a%hZ0ѓ+ TUQhH$ҘOYj֜I4NΔt T @Òԅp Ss7Z)X wj56VcM~j`CM/zgz߆|a1Ɔ5M4Ȥ T謎FI iIJFbF) qSxf8@O/ ! Hm ER#53AocVhFq7h^4 Ot݅IwBȞcqZ'Fgo(<$Nu} hM^iﴶL#%^άiVP^XXoR/(A2%gҞ !  mt P-f,p9KY^"8UOHH . 8XE%$<rˑ1瀎~k{qC[s6]zD ٨ouoFud]ay: Y]O~gsH'7<666Eějoi6m?&ug5gfJ`soԛ?_ŪFjۀ%҅KΗ,+DHn5Ymû^:UrvimlZG^|-O ‹[j%h!~[?0۾yY\.Dc"mt|+qL I:[$jϵֻ{fŎ>o9ܓv p>_hٻnX0W~.!HFbFr\ӌ}P XJlUf=˜z7vqɢLf' Gl!yP2gb%*mIa儔 dtR3A,h!=嬐B-kx8 s=,&W+بvĄaU HЦt RFl?5XPv j"z_*T"$YE7ir)?ƽHrMD?NeI*7 R^tHP r<G(d$jbک,20 "ӏ}QFD=  [s.:9Αf%EIȤeIKO%v]B5d$o4Q 2C4;ME`>ˆXL݈=T{E?~LkŴd_\q 8SA#$ CFXAh:MC^ 3z 'ҎcCv$E5^;rjAMEx !Uwnnygie?P9!qf*9Ѽ\ (A"uz8@aT@ Tql6ѭg?.Ow6NrLϫAx2oqq7X<;GB-?r =xυrT'p@)c3Xnk"K":AEo3oFgGA'C=mvpaȅ.̭*PUI繸\Q$^1\E&Y%J!3TKs8sd9 SGw=Sm<~gwK8GyQ$תEu)CҠ mƌFlt@4̫`]Ty^X"%)p8=1BIt-_L{-OVT+<娮zTlY1zKX5˻WoqO9搧^ʀI\T8.)wI6 Y'LQ[!.4`eD.4Κ 3x%LZУOLS$*&==d# T$RKB UZXʕ7QLjʡ<wX MU} iD Nj5J5x ӹ  (P$CgK2R; 踤ۮjŨ01X&ZĸwG8ģǝpAF4*;"C1mk_;@+'wm-Ar-|Grb_`1e*W3W) +߰asM{[HԦE?YE#QY%zN"$9,d) H !H.+S1qp_]+p<_co/؇O"ٵF$JxtT%I[hLbV"\FxoގvP})ܰq`bo;lසD%\7;}+D/XU1cx%wUH}b$^yW˫яGjП!3ÅH *M T(u1z6&ke[]q))-nD:ik* HZcLH{?L+Mx*MFcbH{}ޞrG,)#A ?=g牨Ed9p\4CL]dXN+@3tڥ|5ϐ9XUIV6Y&=/q-pVFc"k`e,ef彐"hB"ДWeWSZ=8Hְ(7jX{װ$7uSL"Wy3uUDŽO??<|1OG'd1[?ͅ*NN t"\C%(.UI".o}=]ܐARl~>w-ͨ6Zv~1cb 9fB+?)@GAy٪i{GCL3On2j7wRBG;CiiUIJn2/DT 8ZӚ"?^-ovFWĦ~ؾWYukI=neIB{v]͵[E[xekA jt?!erAuմ.x N&;n <yv}f[>6.Wv6/aVn=+kz6oysѱ_TEzhX |@Kk˻1BI=7UB Kt !]圮w?;0 :Oe, #_<v}D)(xJT*\dJgsͫB z4%T1ɧH9)P6PSB('ҵ_Ksj= .~x˳=Mwv8oXpIﶉ@SKJ7 LƷNbD|2_mW|ۼ2iL i<,l^w‹W/Cɫ?'?ůC1Ə>aK~pu%躚UakC0)Z),.j(Y m)Y!53{q߶$?7==& m$mwfnk ]QjltsQW^ۨK]ӺA7QQS\ͧn+a:}Ofl5w=t+z!D@n//3.^LP4$8oI&T_y+`scLG^0)C! 4$Ʒn" $Kh*PZ!]:-)cD{Z !֧z0 蒣 ,KԜ +&n8c1UȰE*deP򞭖(qD7vn3gIISuM>okHiL"%]{oG*ԏjg{` ,pwOgTHʏ[wdѦőEJc v8~Td|s" ݐ93έ0!M3b/22IFiפUJEci(\ c̝cɒ"Y[VJMh dL ) Ad0cJI2wِbg2'_Zt{WrE`TC&lӤ[ NK LQhE(`]z9}iEEڣ1̢@eNKa20PjulIFK7ZWs^йIm$ !HL w)C[Ҳr4u1M])(ٚUA٫ >s*:LcPkRהB2Ѩ^ s (x\i),"D$%MtW%k̷r3$WH/A)9g0jK.?2 B_*dKiFCpʹ\A7JCZKAze\hc.)HD,*n\d#D3bۺ}xR N%s@0 X^4J<9b*`@=9N-4JKq_Yff(M>$̉V%K%z-ʨ$HQC`-/0Z_}Vk@d` ¸%hTANgcY5dm&6X[FO$!2$Mfd(]Ȣt)M+T裒xO(qvUiƻheN]H=~L?K;Kc*gG@˷kM  JHaizԍ"T8MnK#?Rxec?al6!ZFI5-6i q9;81ZJ.ÁNә~:Hfđr;bD^`Ӎj% 7㲂hV\]cSЇ:u.}]#D'i{5^#.jEЇ l5u `]?/h8#M/aT=ҧUx1D{{Ust~*0#,_s>Fy889mV˱]1T藋"G'i-wFO5-Ik[usKgvm351mLq#J&yjhi16gٗ:'VrNku]_9\-[Ұ$O_LS5UW6ƏJha)JQ|1U"/eI-a$UT4FY8Va|N/tUJ`ޮ/$$_>e񳗯gemUkII}+؂Ϛ/дijoѴDؤiߠ]ͺvv]>?h g!(ޖRK&Lz]~tM|w؃Бb>&'T-Bb@){ {&Kn%Pp 7NR W&pYʻh.A8fT2g:kgk⁸dg]tu8zqg.L,Iy<ɴx?I<4QZfkpֈ(fAi3v$Wڡ\9j'2DH?it 5C9+"FIHC6%m0e帋 IuIsY8 tnR9ɡL#Ӂtَ1dglM#x2ig-JO;d~Nn:[cnB}f Vbں0Gl aVI,b!傶 T/B5ZkCܴ;u~"ɪ@*>q13!TВT:ޤ3r7][ZKRuz,IT \Qhf\25i(xmC5$YCC#4D[QȵFV:j#>dc (H{R0kAf5'03 YJvoHQ3FUrV0RhMLu+H2)24,Tڝ1XjJ8 )Yi1sr#FĠt]3rAzgd& fsZ6zM +΄aƃHHj`V"@ArBp3N3ҋbE/qY9 (tad>(#㺇R&n ']q)l\2q3ܗ3a s~VWNjNf-)/&j{\5(@yǀ[7=vGtگ,$$ Ü}* cEڸ‡D&q$2w}2 /FB ΣtFg p[5[p7'Fpqh|Ip;;] >[vM=ׯm=:'tdJsg(Ɉ \$$NҬ$A c3g5arnrLAFJjM!,Ŵ0VfÚ/o\H˷ޮW];<Я'v(pڷfb!d#Y3_7K062ާ젆iT蹲c~!|rPUWߦAn { n۹cn40Zc) 8ewj4n}新bs9XO/n,~o4I(YMbm: V,W"aII`e1IfDZjîx\rءWһ9>PYA,Z$tf:)S\ ZP.sf}&cO!jWLmkLR&h`V ,+t9_sg|=>]nd;nD krT&ڢMb,prMxpjY6cV֫'Fޤe 9$C@̞s醔bzs|vHC(3b/22IFiפUFJLu (\&lcNg{ȍW%Y`xf @r@Ҝef$9_HcenvǯŢ!YB>rVłPȃsWr9Y+W"1W[oohA/SXJ5J 9Z R=HN=="Х_( ?0Gc2[1s;F@Gy#P3gGIcrlb0+Hyouƃ&Ah %" 9mx>e+RylF4Gyc+KX<> {cL=L0J:E'PUO]Q@@$+mȺ@=? " iZiA UHdc!bI֠ LŲ1 Ef!J/H ҽ()U$Fg"aDeEx  "(.R~vz\?To$fϽi6UXה=g=.nJh7e^',;ثQ?L~"\[΃ r0mLA:0(4“F QOkH`GƏBjvN΁wZKwtJt xB/4ٓ%$# 1H"y܆yQerMR8".W$JmN_Zhoowv|{RR渓7j].*s(aռOi)ۉC64?Q 7-ZJن~ytv"0\s? Ghxp8[Bpd0~:a[ ^!؛{:^׍Z|W\dw,Hqx(V+>}^pviW%nuz]R8iWN|ڱex֊:`\Um9ZcK[?+EԾVN?}7\+\mjٹ.a?aXPepȀh<9^0ʋWT:UF9OyxrB;V?wn_*o߽x^fT%L¯"p? ]u-`6ŀo>yMo= go>Vz_ +=R-S),j %`f uǃƸ 1VꔴSB>6,m[)-:Fi%BDZ瘽%dALHY6Eo3{ErLg "v 7,p]zq&m6 ]vF'4^T2ŜýޯmMIB:Ǥ/M"~x|?ƴ2"K6PNP< cP 5DqR:/@JpgPO>/vDrHY!+eaɢqs߈I!dz.l#<[G^5~u(o+zvovq$`bP]* dM'&kTWL@lv.]9 ! ׃!KP,F@6{dGu, 'oD5BExdL1nP^detl167BY$ uR56VBĤ1e,0P r,J@!]:pְ T`̶%3ٺs/#gq3}`s^;;-'eqBc!!7J[| ^J"@(6&9c XZ>}JLQZjmlLJb)RЄ(2G2 )!b,LB"!~"o_N:ԘMff^0 RǠ3r5W k8wn-ㆭ frՋ;8$I}; ~<.-ݮEخ;ckfh`C,H\$h| :)jȇ"ujL 1ĂlCҶ@az+h1|`-aoi nߵDn5@ ""$Rb$u櫓! ΋U>V'Ԛ!3nW-GHmdg04r٫uS?>Gk1߂ofPyplԛGjn\moƲ彶"u ?&fVu!5;C{ p]7=z"ѐ t|DKz!Kͼr6S2;~p.[(4&$BSeWި zPuzOzX+{I6em3FW$ L")!!{ ֺq/fzIy]Oߝ3zt[b.B ,|;zQs!l}EV3AQM@VfR>:@y5^5^{؀/MƢ\{8̥}m۾{#{-/jhe籖(*|`uT㿘$ |>\Ǟ|{͗+a`_Ս|ڇH}Q $f2&THE Y&!+Dݞ*={.#OEv }x, VF2ONfJ42fձwFBB] TJLYkB;/<׬ {g-.vyGee$OJ=ABbShf&'G]d2cYSj9PO=*Z6&}WD?ߵ>W$\1U虥{1=z"HsH#PuBڣZXK']ud@SjL*I|q,}2m_X/,e LJB. L#Ip+] %X0gQXV*m E6՝)llV"$RlХXag+d \/d Vbݲj=_2O}L+TdࣇO1<>~8+8W J™m>}j^R6yg6Nߝ!_Ǔ#¾"1 _Iz}F-ǥvmz pь墥 ͔^2+VZy-z7u~_|zjD|& ]4 ַG1惼/D|<& mw]{ڧ_a{;I~;3h:W{?Ku<Ky4A07>O'+RO&a=v.տ?ƪS rIO;5R I3_mRKp|:!KWj{ci #'tJKŒ1sZ6>Ø7@ 2  #5JgR>d)l_RؚgI89W_/0#K&km`k!e)+,.k-RFIY(bM0 kGl+^1~lBbDS>tn*= ~}KGY.<#~g2 L4t׏8KŔg?̊"=|VW6|j'Jܕ"+q=Iq@kJxw2\\.* k0>D.NOo%aZ;gl%5[@j2]6%n?X0sxT1ɻ*ufhmh*7nIN7 S]YdڬDvzouyXd DSzEH7t߷v!-=C~3͗G3]4\5i50 SN_O:E>i0>/a]X#i3SwUQox$Ë MTNylz3WeꎻjN-E|:qW:9Νnl~(fD9fpUvhcPCra?ޔm1K͍-']#N~X`HǘTdRS5͛._6ȯqWKG/cB@n[.qU$TWɭscJ0GP[Նg~3CY7\L>lpu]<2$7v2k4P]jv q3(K*[$Czj<1xvQeìO Xi]yAq̞x%2)c3 ˣ [镐2  AyqegAh@@ lqeX+ H!#r%.JGT46:*Xi \$=LN'j`Hj0M+} \QQ80I$*Ű[)rO잨ݢ43@EO1/bv6NRyVꡡ gU~*6o+ee'[`+TZE3[C^ANYyS=R ,Ќ21,'ɠDCfA.QB}TB))&@=)Q2$Td WcL;*qag#P.# -WI,sVUӢN矦b36 2ĄA3QH-E3&g)0Mb'!(aHɸ%A8FV42hdOPh f0R:"]v>&f_X#k vkD e`} 0\0cf`qme!#ԲȬC"I~E 6r!c2(Cv4Hx iyQ>rA1vg7F}e/8enj(GFq}q00>cHZFƣF4Ym coy$iEqK0;ju & 9 Hü͈j X4}Zlʋc^T#/=SX(g{f N7I,%6E=6ŦC#/ּ؛FG_PƇG^;rj XQΎ{8ޏ(8XޏM4.\Z!:p{QBʭ4gH\S!uc#=by\~IQJ٭L3v9יϩoʼFu73?g֭8ZYK0X`"k"QK͢+&t[]dù#ξ{X2Xml L&d M[Q'-Uǚ4SC4`CHSSbh7i'T9>ѻD?n}>gЈ%V$/|)gѤj b! mBD;s(joFI*N/ x-yA`ib ;tUU>(*?-!4 |ph.^nM;f*)MA !&0R35p-Tx,':Ts ( ք A2 ƨU1kQRk ЂMw=-d)=)a bk#pi rTcFTX08Fd73&<|cyCm یXőp.uXE0L!e`a;X 3r -%&"4=˝n:hA[t{tͺ]5b, VVD|Ԙ8^ZH09=2Pnؘد`~]m-^:x{uዧޠ/ȇ<٥`F R`8(iڂiLDD !-Bi#8y;z}DXں6a~dpjܫbo;ܣoGM9EGov}T"'Jќdsk,A.ڴň,ӣUF>U#ڪOfՀ=Ýc2#FA Wa<2C``٨D4N ֒"VFudXD9lVJHH!] FYO 2UZ:#KS!ӉXꟖܧP=}P{P,;969d872Rm2x[:;{SJsJCv5_UNxoŝMGڅLq覀\6ĘWIRZ֫oScgIJ@=§z;Wg>٫_M*BSL|q8/s~MRW!O?-Eۄ|J6VX¼( DYzJfs)ͳ-|ε?R:᫯ka+1`JkкM^*Q5u|kɗ{f_^/GʨyYuʯeMG5W*4|gZ̊fc6Ͼ姬Z,[-LQɓvn]/Ľ`4V[9 txy0aB*h0tUUCVUBHW/#ZaU WZNW #]@'+U` b$BW -A}cBq2 J5 ]\P*ŴtPkW/8i=fz0tr9Jh;]JHW/b=V4;Eegy;zcl Um82?eY8(5N ].QttJgjSJ:VY@tYOۧZDWXpc]e9Y;j;]^р)X?pZH>%kXJ8DGWzЌ2"ʀ i ]Upu[ 2 M k]e=th ]!ZXC +h"BUT[ xB.kGWO62`ٞU״2ڇ^x92ʭx@GWO,[n2M'U i2G"KD P,(T@qI|#m0"!2De4n(vvႵɀ%k ]!\e[ *5 e*)"RFhudeUFɻ5HW֪ D2ZM_({qp#ǮCXKǡ [GЕCiZDW|`[]trh*'HWр%5t>vQhQNWe;*tň" 4qU{ W2ZNWHg]=EĀi]!` 2\*BW-Mi_rЕP ؼ{`ZCWWIt6ߺBtt$JrjyR5L6%yw[hZt1"+nvYݍlϓ:dm1fEOsi21Zlm&k%P(..Bn5M6޹^ siҸJ6^ع?[yMڹl)PCɳ,l0:9͂},1b6_N'.f+>^F߯oݴ7@n0_zENIU֢kb-^d;'@TuWC\׃1WHa1Z W*(nދ7鰜 }ԑ (pEޤȘ,OcV Iĉ 9˦*S5}LR,!^'=RR Y*HT(6FvgNϝV;DeTT `Hf ' Eew^c%7tt-x9 l%h[TwZ~4}uի Ϳ+,1_NFk{[v~)維 }S .mZy'ַѮnQU~O֗hgXnh8l|*.cV,y愲'}p9I)$#At0pé+ߒuPb6yִ13E$&ѸV2RCgRiWIi'TyV 5"ZT(_F;V+B}qie{>FRVz+NтYO ,rYRp 1@ 64 ib'"55HNr*hPsO=E[0N LʐU<$p21 yP5΋7?ߝ3lhl2$笏߿Yo(b#`R\͠xWř":{[ Ga!m Pq 0 r4!0B{^Iƀ]fs$^ruo5*yE䈑hyt W1pп[Ee yv*j!^"j~X*b??o_*)(TYV.VӲp]ȭW2J\ZC!X.>=3?W'#ܑfk.A?a,P+dzU~M(X6q8^. OH~z/?埯׿{ӫw@ݫ_<'y8UB'C$೪#uUfRի?^Ilz]/UPQol#jYZMPfƖywG7|$x@ic=])="$'*MBUy^ a"cH_;ذ&Ҟ41Zs΋H*{˧mxq;q^/ 8<guS˚,'oK~3&7 ([ڔGM ydywL C2B)-(El7{S87vU c-󊊉GBFfP; qߙst/w&r`*/t3܌T{* .a<-/(D=e0+ heuN84 PcA6$Ա r$~M 3ƩL@h5DL6dmp΢c<|j3{Ep_L+7jlBC7;՜ >:8oWq1זTJЇ1_Ƥo "emvpff9du&zb$ul4_Iz_jpU dE r"=2CTx [j3B8Li_ps[2ǏST~EY>gi_ϳ5#8!Mj+䂻1"KIp"ZvCڰ?dMouO,ObsIEKRLd329pRIT2G)A*:?!STf>Sر1t뽛‡J|z諘z,YOS& gÐPGũR&Hɑ8Jk4UF(N cڐΣډ4n7hw՞Z |Tu,Fǩ'epO%V\QNyo$'ud,5RCΪwJŀ|mplemejIeV;*vE[I'f?|<)KI'tD1S$;p5`inm[U3ٷQuLzQ=7" "ᬉR?Nf*Kci]6IB7ْdJC)+G"4qjJ¤G\hNt1*-J@=k\ %!x)Vg\&T<%x8&8HfZQrn: `Ԕ;A._JeHuBBlX&6h =G##ZNe@}rbXqǞ5uq5R}bGz` Ydb? 7::+>gb\Jſw 歷uk=< >Ԁv>PlEpg-6WSw":OZ?z"NSCs/ [ΨTxt6TRRZI/Θ QqWB #As /QJ|v'Y @;0h45\Hk0na:̰(#v0"aEAގI8A]Fw-5nq#=՜_6c4^ݚzBuq #M~fW`IA 2,JPi6=SzNkw٘m}NhߧڟQ<.N9jQGTB@l19 %5"NT4tء`15Njټ㒔U&OOrCD ̴yK2#;e|u,&8'S5,ORX4詉c\>4v0|I6v 3j<.`=3.턕rmLB$¨GO?PG[-~h=yҫ|8{{4nl;t0HW3b k'(2)):QBBXQJ`)cx&YMZ[fy1AXBLtMY 1JIF#NZ:`靪>tӰ Z2G/l9^SN\4'O:I%A8Nk4Bn N> ouZه!mB= ~0#,J]I7nyDƒeD2"8E>)#]l6,$`ACփ.i1'wrs ϳEN. 2tfcfEymvKJl:^zD hNoQ7\ɓ_bC(2|xOWO8:׊tk}qR.Ne?&$IyS3/F]XVٻ:WmP_6$]B]K /f_Z+֢c"$w9}һ>.I9/d$(eop}G!p|s9->M8+|T=iw9>f;h1mU.9r2۴j<Vf֌vfiG僆Y;DԕÞ}h,#={F#Yew,11!DŽKI#ֿ.RˊUF} *vޒ,d9=m ܽm/ǹ\HЬgsLFpsR^4yGl7a?ﱵ*[nfc.滜c> -2l_Wi/{B ߻KwtJFAUJs?ˁ!Z4u&oPL!;z w[dq}Sk6x8,S~ S#;۳AV7-\m7O{]=̗9?Vۧ-ٖa0&nn鑻=ţN٠ʒ[(4iʈoIFa}>g ǒ .re2(i.|V9FxDxKy}`A$+.✣ \&QhCRy#{I$.wTrF#)LzWiĊNJSգMOr{NHąo>"ԟ~sц1^\/U.tݨq[6Jr &ɒhW SBU T*/4%?D#jA$2k A2P<'& )Qb4I|:CQ!J&f(@U ,KzF)RYXry&XLn<OϯϿ 4E U4!M{$S@@,zKtDU-[JmS>GpYS <*f\rcU^F51tūRlWT86nRݽ9//QPzEp`@jR61TbL#.&&N&c>n Jň T Hnab*[x+Q~eFkœf7y{]5;mPhr=ֳ7nAiT|'9jzN]NH0MNJq&-gR-I O!;{.2½ge@al };blb‚*H$hSbgq\r1jbUo{"z_*T"$YE7ir)?ƽH1"$vIzȈ @/:p$(<GQ1H:ՠ+l6N<"CǮQExfYN09L: <` j} 6 "A)Zd^}!`B@O ͝"0JEgC.Cu%E[..vqctA#$ EFXAh:\uZ9g>.Oq(f`¶jz5khImE$>-!T+q>6h'^OJgJ+˕$BQuGs={O E_w ڿJW/4 APcP~˫t-w:Vs^^Ub(_\bGkB98 LD˔1Y Y,7J5%ˠXboI^t>dx2T?ؓh{\+w1v1?X;|4@%JD5^Jۨ% W'Z*Ü˄co/K0A R-G/ h[u0+Ňog(zTcзHA3f.h#4귱;߽\7xr=^βWV<{@o,^ oͷj)etIm\J{H2JMh3q*d+FE$,L@%|JO*e& GTpqp p$K 43X>h+(Tn:A4Y$QXNDf2fs))\H^'mmT%p@=Ƙ24tŧblV, ;P=W_O$"nAࣇ/1-jZ:kUŌpWyVzO-m wyГؓ߬< Hep[6 hPhb ^8F`r!5Jq=*Z76xWXP3|156_ܳ§>/}" ˎ1c̏_1g|2^uq- mݏ2aW i gre)R3a`VS<8^Nhro *Bn8U&DGHuKЇrrS{(VD )gƑ_ai07nvޭ3O3>ȔW8[Eٔ-ʖLRT%ElY)]6!kJ)\qꙎ$8ƈ@ 5\k3ч@eD"9sIu<#s$;<D'c;pA_T>NkDJH_k QgO+(a- j%)h t%Y@^=yĉaY =GDQ]_,㛆: (w./?n-y#6K7~T%~eIsk-%& PCl6)4nV$mPm8~]l+/nX _h9PHfRWd]{I +q23[*  ð~\%' "CeT*p DH,A (H@A,ZB<@NG{]Z$rQ:帏J8tT)HhL<G3f**QU$?b {;${<zo7y๪uؕk߼cFovdM>k]'R8YV閡nԠxDp28Bq W(-*Kٚb:vҏzV0Kvkp,\m'-ȗ4-JpS)5GWY`WYZPW(%ky\bLA@PZ論U^!\q#+4%h* GîPZPJWC* ,]eq9_Ra5• qDp3Bh*K+̡U|9++%eW8;'3[7r_1.EbG-j9 ^*vG;ߔ}7Э#TSP|w]}ashi5o7/ P/ۻ783 h^W8/#V'`RT[ubQ-J& "O%?%?/?N拦l]|G bN:e-\͗w~mŅǴQkGCvϺ$epd'K9,%JNx|Z\8{pa5UJҚOǓOyYߜU,%^~?k|ʩCVK`uRuɾ\}dRPB'KT)C*p"nRnDp]̊/t~L΋ӏic] V_.?oDPsڨ?/ }+<3^[B"$0V:PCj4qH}L$҂$o/<_ۅR.jmE{Lj4\\T55pZޔj/QSBo{z [;ޞ*}Vz,L0ǃR/zԸmAL%tEkx&剱6Dzmۮ^S!C[")񸭢)C/,[TѥsE[ݸWr_Eŋ9dʖm$Lש9 Ii&b{xkBzI\\_ Q/7ڡΦr;zr;[0(cVN4 Ɵ/R*^8@Вz"+'-EaVF`yJYQ/ekХX.L L9C'"&  (b^dTYgT~8=UvI*`X\MfJS8;?0ǡt$~(>Ǫc<^!ۢX6vrsݻeû~16TT=Y[wѪmGOf,}mjz_  [ 8(RT-Ng'ʝ@7yəX RR$Q+:[\tK./ⴰ†-wKk9~Q/$uYGu]QU_B]>~~~Z%}I(nis NV-w<GїբPX._'"c\l\$cW)"_ިߊE$b!,ebvL^𮏷.գk9zY=& wD,j[MixePH5£_g1%WNd?QˑNB٘ /YP<'ڈ,QB0IX5|WLjQ21CCb N THR:QYdcgۛ8LgWg3MXC#XMI{fu+ô4)@ *.MWCh@O҇$G ǟ%?h> FAiMP{aC4N~^~r~}(^joz*{RwUͧuMPlo+j6 N=[vzǒ$+*?"cFkEW @u'^$^G>'K4)Zv[B4Qjoƌ2:#FZLH27R(PlD +K6!H02Uz4cW,THm~q2bzYw/v<\G7Br4erD%GKTqb0rƔdtR3A j!ʂB[^$g#9YM;Sv@b|Lkwyվyݤ -PP78Vj vcڐ 7NYOԡ֯:o,"2y;D~&H*CFfHzўG!,mQdf;d l`QLt%dg's~ÏH"Nݭf*X$UTO3pdBdب& c}9w֨_EYTcF4ᕗ!1;'HL!beX&.`ϱ6җ9既B0"aۘ hnAgbېlkjr)%u@ Ikȹ[#-TG֋Ux|qɾzwݠx{ap$&CHA' dB,l?@QHթf2?ŇЋ;~T5Nog =^)CTяN>@ŕv)xΘhQ/;wHP751hӐ"xC;bϋ[U^;RRDieIBb|'*RjeRH"G\@TY_󛓌FI:Zj0|%5]zOI0ە\Wo~1,i.>c#Ҥ*_kq@{;z`.@̀Wza69 \Y:AX6xj@_LS'V0@ϑ3c|@=m7p^hAc!׈2C 1 <Nh*HցBhnC(>" ԨM>dkLYx8p`M!Hb^i?}%Vd1|1K%qun&ZĺJui:rh]O G -sH ƈ2nx+ɉ,SNSY6&mX4Y`TJd ȳ:: ƒֆl&e<2*,ً&Ggx&c2HJ$CBG^^j]Ɯ9,hdPwy3lrW8/4+ R8a`$] IJSXeBr(&K ћ;_T-W-c44Xd,t N*#1ggDXt k!DldG26T<-k_omnmC[x3hp>;mP7Nƫ.M26x(&j+!țxjez.y"F97wȨ/y(HUp1-FV}|,۪Gj؞1)1!`<À"f AI lٸS흵 GHZJʢ-Pj{R"(s.#GEK3rF7PҞCv֎/o$Ox hgK< ;x3}DE,aײ XtCؘRc(:_-ҏXU-]yHr:hHBg6d |ɠ,: 2:vJsޓV-xXUT32"xB!RgxsK k h1eqȽfx=z.e~R+y)kËp>Kx_cgof!ӱ_ )uc(lpZ4>N_^Tl6{ 8rXrdtbUS9\ڛV7d~i!kJiazxℷ^}TPQkWHX2:d"c%?QSJqAdԢ'S{Z:ZPZo"5lgŵl+I|آڲ/}>=)ʒ_}v|uwA.y=B5g5wW|X&cR-K_**קޭc{ Go|7_yv~MouK߁wJ@}Mُ|e(<cه qBZibF#׸Gߟ~Ԣ|Xj{R/trRffj+H~+gq9[ҳ~(/TKb)m}_۟]^|A^Wxku\hRm"G;?V &juw9M+[r2|\Bhg.qaHъg+Oo@x0/ehD f1;vE1Ơ5 M6ƂC$S~̘PܐaKG 8(HADmDBR%ʨW5Yuh5>k0u[Q:[ %P-]5([VPM…PnVL3rv<xC~qmoq;lr~jHhP[a &2*&e.ILsQ"aF.# hJD+&xfJh,eԃeԃR&U2I*d6J'^ځ-) |; P8pJ`d"mlYGZX5J/%FE-?ys7:O$ /”o?G%gl/'eT*C1f'kObDR@)tbJΕ(% Inèf6N/O9;d`R=xJ l eTWyKN 1P,KrD V֋ZxU]&ɍ#Bgm$-MuFΎr=e<>Óo%y_ 5ې) OIP>{#yҽ:`*kTUcUN)_ ]/z=C6Hΰ!9l&]DV```,*VX@1 P3O;i1}b@.,RpOVh!VN>8_zBJ7We]w#`)<*est Q %*@Q$)KC93r<"LŌ'zE&|8 %d–\#f+ lgK"'_ §aءxa/*w!JX:GO)l?Jȏ4#3_@Lp6WvR|n\f4IJt:!sWGGirPP`PSJЅcC*RwՒyNlE-I9CԭP9-r/:(1/% ERU "! 2D]I@w1efu+_I%htIN!$,$(,&yI ,d`Ξ >UUf&89,;Qrl5:^Or:|O`v}ʊ%VJp^:cM D &6؂^lbx1E&! H ^>?EJY#b΂g L!+'rHSOvL*.fHf2f県WodһjW^Dv4 YYZV=<\k~Օr1OX5pT fF۫0yMұ!P2BI!Pg ZNa>x`~ N|Y\U2/r[{IZ)%8&o+伒/}MC< s*VW궾JaY:h%#a,W_i4-6M6TK[?NkEԾVף.~fwW"/u U&Q^}ec4.磴|EK?> -]̩x=J㳷,$`%y݋o_~W7߽:ǯ^~x^)Ruwo~F݃ˋ>iUުi`]^ZeJo/Wt[1_x) syoZӷGDާ4a#8#b&YюdT&=*"bX/1:FmЃΆriByk-5l=yP `)$$13 Đ% 0kaO=^v_H>;;}΍N3\UCӄYet~yF`7E8EV$S*z z+(WEEL`"udb4ausB,z僖-b9TDn}*j,}x6_qY\Fz9gZ{8_kR=nUK;F,zJ(CS=bٔ8E΃}sϩGUZ{%4_ y3xfWD"WqKZ:5f1.{ +VZVs7 |zNy~ P\;[+v+9K6NdvgyTu# " I J`o: fUW-}vlG[>y{Kퟛ|P=|%lSq\s06fYN C&T 1~߸/W{B'ΆpxX9-r~+̗QSú5O'Itʹig^P@jR(Ԧ.FPDƻh#IՒitf$ IV]+U $sώ M 3z27>tLjᇉe? } Vw\O@zѰϷW,lfp2O'ޘi[b T$W?pkβ͓mkX~>~N*P]>I/>2jr ZjfSbM(lHT .SVn |tvM{KZ)ZEN#&WJUI.~x\-pxIzy l _oEO6Gt-(>qӺY.PR[;csdy&K$ӫR&FHI$C IͪTvG<yJ&u4%}_uAmf{"VMKҘXiȿqlz ȳۈXp![gvss=i/W:/ Ӹ Qܛ6Ƕ!:6Hdb6%7@>dRLG&*lw6uhSF֒X6aQPaSHlr!6)ef5U-ΊRaQR2)UbK _̢f68wx*v%>8/<9bs w.IvR}v]Yw+#O~|s&Fȯrěz6Q̲!;$Pkm(F*mMb+(Gv1hz1tpt}/8]|?{8? *sc izw9:[*= }ݿ@/hn#Y?g޹{[OeT}I\hm=\sb~DE(u>yX\6ydA|T/wG'c:ޢoT'$]эS#޸䃶јWF=88w0:6v(WקvnSZtJ()-$APY Y4?%ać73ro[P5!$C3VL wR] yniVpINfr.xCW6IRӭiG&Q;+ZhO#YӃ^5 ,`c/&`+?yjMiw z_Iz+`{c,]C{UI=P?t+^,'x M?:q{9?)_~C7l_qh=- 5(F]4G^ȼ@֯*Acu yTv/^^U^2/+tnG.k벑BDbѹaveX;{qxwdd|tI{_~^,/OZ4>-w7~@4.<:?YC=>9OX}w&˞Xph =w(wHW{"`jkQtZ骣$wHWB0ۤ:=fe-tъrqnf]*ܿќs^"o?ý>*dtȮ6pΓ'LA{T ;GD}/]D}!=g }ڞJu'K NC dU$82AK8ecdqW3Zqc8Qjٰ3r{BxZ(SaO@ &\~k [:`'7j'ܝ܈RvN;tryX׺mUGGBxnc=eW+}s<;̓wtյVzح<=JgVK<_.>;g +wA(S7IgrM?G/A^r?̾!MANpzvt2x+*hc}m-w ܿ#c?sWfk2f O x{שv\v&nBnZp"O3j#PQA^H.Q0We"g5.Wj.7WnoRљkzord_ $7ZUٖt1pCު~P:{*=oOQ_6eɯ?op?.RE|Rn *It֔SPkt_A8a7O{ &DUERhB*e2θS*&khjAC<;nnwhIX+Dj"B +Nւ^dth nHhڶcKQl|HOEӲ޿??֜%CsJk* kgUM*D-p#PC21^#1^66eZIPt)SV_{;"wBB[}謖[% $NEܻml%cM :FUgI>|ShdO"{.+tQ\[f%G# 4D =Ǔw zY,eZ<%%E9%ſ $L!V9 A:ySb4xNUuAofU$Dh# }+W<'fѮu*Z#v1"Wq9rD6o#:e 3Aܜ`80F'.feųhC2)_Ub,{bH cG)$Xe͠TGhUÂP*KIn 6*˄whB;y!0`)$FgbB m>+h-)CsXRN+vD0k0(EJ8<Īɗޔdp/M΍ KDh#sD%l Ց<gge[90\A}$XҼ1Q Ki)>9TMFdBi%E1Ԃ (hr Rsmh'lED&0VN ddMK2 PNl0R;ǎ7Ֆ! Ϋm%%B?NBmTzSAp2Pư)0e:9Z  AdBE7ɗF7jeN̹ߚ  4X? ` JB8nVj T8æ9S %t4GQF WP4k˒ oA#u+&ĥ*JY#Aq|1FPT&A.NZ'.}oGغ1OpvX_/Z4KF#Ay]m X G1^=*4*yQV(H}i--*20:G a 0_4+XX脸`j?O,E"eLԴB&#`%NjEJ'` 3@0/)ĀK&B2ԭͽQx3$nCe6ՅY,TG7Ÿg6U;*@Vɒ3L)iF wQj'yǨ,Cе2 XD;Kb7}ȋY}L! Y<|+H0-| $$BS7&0FZ%8*&EU3cmC9x٧St2Б%yc6LE1YIbD*>P|@1ûx8"xZ,YesMEjY"Jh'KAJv@ڲh5MTѠ8徾jGoEU^`$XHK!ԿYnǒ++@Z/wluTLCzih}{=?:kݙDחy PwM \sFf=*+]Qۆ? vT99Y:U4k̦?k,ڴK?&%. bLDwmI*]\F@puI,D5%)}KDeqկUwyf;\ %8`^ X:3%FhfW?D[렄Y.a3X=GpH4G5(=;R l~g=t5jc4h ' ¼"`1\ƒ "+ӑS0!EiIݳ^OkP-rt?ѴaQ=+NF1VF#72mY_8uK>9. ˃`LTLG)I1?1X]Fr]ruA>5ƛZPٗ:s;JUx:(k 6@na:rfF8f9С`8z+}!F)AqtA#Nd8{ 5tx`X1 TqTԭcx   < 滅9mz!$Xfyeb12 jV5I"CK=)@`H0K /c)FXg;j`8דY87.m[De7r+B\9{/l|OeV)k+6b-vab6l(?'~9xB=)P`|8k? ؘI r#6@ `D D$@D D$@D D$@D D$@D D$@D D$@D D$@D D$@D D$@D D$@D D$@D @u+ [|H \-Tφi9xDI aD D$@D D$@D D$@D D$@D D$@D D$@D D$@D D$@D D$@D D$@D D$@D @+' $xχqe@ F$'Llz Flv?FEΰ[F~bxCpx1ɳYzr5e95fB_wIrEW&U`'_bzv( sy6-,X4kM^?JX`)Q|~0O?|^C^}: +,hh0g  isS n4? nsM+qa2XUByQBћzL//^*Wzuŋ<ɇ(׶,AU`qn|rUVR2 /ptY]kw\>uXisIʾ?)۸Kz *<%+sU`vtq?0UI>/nxh~:x5 S?%դ`Jk|D,v٢_/}%|㗽UQIfqWK\-qWK\-qWK\-qWK\-qWK\-qWK\-qWK\-qWK\-qWK\-qWK\-qWK\-q~\- W [|Z׹gՂ>f*Ze_߰6bM{ v $: $\T.lfV3PeZ>݋:/s0@a=^jVs7*V1DQ10pEKO4ϓڬ"~ ¬gwF[s:|yx7/.qߌnyzy/+ٶ_fu#ൣ暧 A䔴>.<0ˉ+` n:48[ƀw V?10}bq]p^;ѧX xS m!m;CW[|4k5L왨s;[|r' Z s|vʿ6/0(@mg%t:ۘ"(Ҳn]-iH} ,wNP-L ÏAvGk & h & h & h & h & h & h & h & h & h & h & h | GNV[q<g{KⵗtY^[˪X-POI,+a|yv źOCՓDZĠ?,Bxk*'DD4 lxp!'o v7V48 }]MzӬez>%k~g;"E\*pTJPY}qZl=O'>4f ,hsB=Q0W>R+mY-4BU  ԐR:T!&$bCꪈlԶi#,_r3[ǧ%D))Q0HGkqM.AΥ< [XZfazh |HA"(uwnc58m.#};Q4>TLn45#'WnvB.=P36pRPG\9_L96c?mdon^vgoᏉ3 ƳPwpB,-)a'LvgWvgomw.!͋(PcBw%zJ2JP 0\NS ~k۳رnVI/5V|t2F%2CBQ61Z _9I&X#*Uc7q+&)q|qM[0G= <&lsaF`-0qjp;aIyqtj~hKCy/ ,1HZ]{'9f 0 Ά c4Ʀ%MP-?IxovuY[c1h9iGZ ~Z]MOcOJ+\JU"T+^g~5ӏ}͟~kjTβǞFc85V ykE OacƒG[$uQ[z\M +ЉSiVhUljUe@jL+|շ^>ܙxk?n:nP"8N.u֬楖`G#aL'Ҧi ޅvHcWYq@ۻ+w7 kVl{۾%ѯ(V7BsA+e|JkdOt{hAB*/@m+YtEJLdB1-qoxn#ߝ7e\pG }^Zwo{A !ˬgM wș(BI?o8 p0%D`0nr@ZaPzV+Ca25\@w '>Q-5Ϡ-F) o Y8kW;(/t]xTRmO$;Bfpi&&EHo/ߖjl4J!TWߚ ;`\̶',3xr?q|[u&>zMUrtq*g= q;zALGa6+l jl3 F[r}ZI61Vq g~-n~6J'͒ 2U0 AY-Q"M}i^^\p*wR aQ-ra:L eWE\Y>K=5"wro3_:ύ q7 f+=m4/5@u7ukb=dr\xtEpʸW Sfz-wN)yPJk_L؋ KR5C\^{อ ?pOVKP,R,׵OL:SъC˪c RlZִ˞ά//}3'3=WOߘt4@uW|u<ɱϔв+B:8ȶ7>_ WS5 qR,l%`*rvZ L-$>8, ~{4f8Sx yf?g9!rfz<d8bF7"hC&Ξ4{)=B.vS ^o a[ ]r.;󬶊C? ڗnfaLC=?0^+ F$ok?dM!\j} WNr:kDs2Z!t2\\'[ԗ8wv'q`Pkpَ/Ǿ+v WgD%$+4"VLMx66khTL6 |ߋxT?gy"hƅC%jkDxאameb>?dMw448"}=TNUSz~:Bu]FmN}sw7)jU_?s%Eb+tgEgM r"(`F"Syx=^_]~:;]/t1~Wq af3%uGAJ-X{T+;w%vK՚{#-͢!I$@Qx@.Гho:(BTN*~29XU δ 4勑s?:t<([$P uY]>!>d?7FQIͥٽ厩E !xHIAv>cT`^rInf2ӷ'X`x1Lh5rq<`S*#sI ceGq,ȨLi’`bF)Bd9+Fl%yZw%m% .R#Ɔa6ZIq-Q'ft0f֦"pVS/EuLI &k9'!xJ3g: "ZŠV {?S˼s^ \ J3 PhT:*4"rJűSex侭A0U&[tT&gI"*bz'BbDFgi/mvD5BOwEȮ C6 -z=HzPParڨxĂMg8bCaXP<`]bta~Mг-7!u3xHy@BlB&\:o]N L ښGCG06skᬪ}u\ [l6Y:ta`v\S۱Ae%#-u,i<[axwD}zBm^]wǍ4ᣱTu4qZ0-c<Q,yע۽(׫"vLogxU6F ecx!BWo$7Z{Ө[vm9m4]U%fx  fp׽G0EdlT[0kZKZwƶGZ/cl iNCrQ}:<;j#)i:*dUT@lΦӣ7n3֫\e^5`-%Ik pZ)C*mPSR$*ɭ5c2X6ǽ5}IXݻ>|v}W]7mz/%8}y2Aօ$]P^n%fdN.1d=NIFw$#U˶ьm)m#Zʗd&D/(su^[r!s!PP*Ā$ju.ci+Tm~z۠)a`_׍l!{ Pt,p&ch$XO_ơ+gN,B5z*(4$il0K}h$ gJ:2NB %' #~!^Fހ0gX”ipV}׹#'[z|צzGqߵYi`iN_>2YO#gϿ *myӨKم0Ch(S#e)6H]#g[{muM-~πЃkt"^[%kǶg;ɋiٛG⵫RĮn{TڏohLk?`PUߙMFV~ӸoT%T{=hSrcuvAd|zyRu?}k=n~PKEIPKy4K%Kk:cjtd+!9e}SZntzf壼Z4}@r.57 !u$.C$&~Bz#wkx1 kCf6.k2' ^;s/J} fF`IA 6,ZPi6=R6F6:<:?FpE8Ay\Lr@$%c Y \hkE h0[iD:6wo5=-=E~<}XƼTѿBxTQ9H)s[C2:j t)Ј')H2/Zw^1HY댧%O 2XO|KeD=a%hJ0ѓsX|X;۝%="0>ʷ[S #qm5պ{k{fC!qM$L@eild)o"gmx(dv{ճVi@#cbFHk,!&:f &ǬP%Q$'SyJ# 3(BJ %űšK=5!;1>$DD ZW;-IeW3R_Lw0bb> Gԣ@Qin'R %t~[w&=TRFST"& 5xO$!^@yF޷)^?^o#bi,<%bEp9%Fq<")\De5XH*//쀓'QYu9a4t, 0ܬr1XwW;8i]E_GQ/R.rGma`ȧh묻.buX-UMMZ"6x.`ål'?Lp zN|9O'h ƣ&(B)Sp6qxl^㣦>!p9kן\ C|4+,KҷoM|>8`?>~bܛ\,=yy<٫GrUlԻ=].i67Glq$v&SnwL}-.YAQX_O8l4lM p:w oi7p$ AY3Aݨ{ }pL|vw̺vڲ6Cwyǃ[O±$\y u4J'qZ$[*m" 4o/PC@v>er+/6$)E7BJM2pGx)W1iTDoWzE^`W㍍]X}z6*|QF#%||}*wa)x!^o]leܰqbi:ə>.P$K'f]-Lmx uTPkUu/~̺\LC]0NtdqD-賓Df 6HJѝ2!%Y&&}( x5>D 1I^ e@Bhu _3GlP2hCl'% )r\tZ ` (/v m$@P= ) lJM^ d,!em[~Q\q1[ӎcAmݣv`롓@ޗk4=9'MR* >0]vOFt 6+m\Vq !Ln ZR#a}|*@Nu2εs?N"ˢ bk+"#❥XrHvI8Z19r#Ț{4Tl2Z$ǜmR0b$'n рg|kdC OZp‡[g&3?&jO8]S5-e\=.xgt?%Y1ǁPdHc2%0>^ s}ѼiK;q7<ݳ)U{x &Nv" ~(Θ>q?i}JOh.휣E~v68^vQh-cF YPZy@S$218ۑ|d-<nd" Ӧ [0﷦uf1)}t/S5_NBҠ @5\ s Fx" $UV8PScscKvFwa'ƒtr'rhab,}2 f6 d#*MLyLU6+Js[|x]Ϳ}k5#R7UFh0=,%cCǯikkG(7M}37>_GbE۪^ӳ*Aurzh^*JX 3Ƥ4=R(ԃoRѴJ8;K_@TQKTϹIQTĤH ;P xSu'YU6z!Ȉ&0-2 hq SG@FeݽJq!rAKe12( sֆ,7#6L+>&10Bj A߯-;NLcE[w"iE nHMSQLMK9/ M}ϼt;Mx pҜ'7e8>'%iUƴ?\\Nĕ 8iۯh+nQѺ%ӥmR}j&ΒWͨN^wjN:EkC"Gn[S s(fj$cW2y56,Zy,lSQ}:2hl~:kqD-0fv9M= ATsibV|䪝dpy2`hIJKxx8LSnwV]=4u膛z}IM'oNX )Qkpb4Cibi32d&QD=dAw ;]%ve|jPM?x|_.|lOVy~"]yon~mE/6пm0ɍpל޼Ui[gojbA(Y j͒2 'c ƕL#t[Fղuջst~:NOWm(MkXzMWP<]Rי)ϙc'KkDn]hjw*71IEN4@NHh!IF<7+A쀻vĹ/YrZY3XeupJr\O7㕓^vi/բδKEo]*ۆm>K#v;${MVOtӁw>cՌKI +VAlX?|{=n'Y6ښ3Nmeb tΒMi)j++kb& e2k2Th.%. HOz)U"W(4d:ݑ1 pd2et2)$60d ]Id[uOZ~pE.xU4ZT [SoPb=Dt7 uL;P`Nvo='蓉'?JP)aF i"+tVbTE~V%#3JlbN'+'k?m">RBV sHv* +"cq "kz@Xd-^y+HpE' n}ꗷsn]$jBbqсؑ!zt\ҬQR2Y%8đb{##J<6\*vj&]}{tݭv}Ϯk4*s8tgת֡EեJt S3{I\.;SIpeIJJt/rϳUCzxNa4i>Q9C?z0'*qW,7Nro*Vv KBT!:eJ,Mn C1њΙ+2Z@Ƶ{װ=ps~Mzޏ^ߚ_ZArZ;~,|V=-,|3-S>>E۞=yGҽC֑x*; X_JkG'p,yoAZ-q=dF Yi])E~v p&'Bk3 /T ΂JTetmHqu>r m12j6Bۻ0qa#O4ʤ bF7Fx" $UV8PSc<Ս.Qw7Dƒ9K}`5=듔_KR`J{Ju(JeP)U*Ri_v?(%m\yҨ K*bR}$xO=a w@pk%KsF/1E>-n!39A&w3Q'3 :5k6>&U*11"K&@ dDziL]r2'JF=cx}0%}-λ; aRj(tQB.R #RO FĬ̇ʬ!a/PtzQ =0<0j۔܈rAG)b^E T%33 Z4p;5,_eJLɇ/қnt_3A":Zoе~OO z8˦yZe/q7'?^]-nG}:Z }-hQWZпo긕$7x4<,7+y+Wxe=Oo~kĔp6pt6.~}WK7%b)rIn%׫k-w%jRsBν\>-i")Uɺ<|_U9[ٍb{|,uuWs:A 42d]ɥ:_ܿ@εt kplظnqAVr܈u?~z^Z/ %y!y-3y1Kӗcݫ_Yi9q竃'l\ycn5o%!GPvw"!JXLVw湶Byg;)3$1qc<,u6;%Raܿ*E+F._՘'cW,=+y3MwD+:n_y9jEJg.NMR7ը}UxxLxWWb&n_aˍt~u;mC-7~EtcYdij4.޽2s#!bRFS1;% B!A y1 $X֐-5X ϰb'$:Qݛ:FIJWI:HK6d' .GOk[PZ7ӍZ76!.H!J}KIn'-vXeڗt!/WiO]1iQHQ@CDs.$${q׹mZkés,+4"ƌ1QNbLVdT95."ĚpE \b=Ku)6gM[]b+@Yij_}̴dJ8PDJJr%ĒShEqh/|+065T؋5 )NߍT F;F#p6gқyO3Ҽ.? 6ӓCYiRvv1'$JVa7?Vɋ~%+N6^r[A/֪!e{sS^D+1YXEp(-Ro-IzmX%IshPry*bUȖU@(Z{fJ3_L3v􅟕/>eEœ˓j:bet/tttq{l0:Yd3Y-zSN*\MNjl`- t~ 셬QiUݦ; Z`a|N8_xoK"օa[cYA1ⵛiP`n"0ETdd֏ɛAiZObV28c1fȌ %0NoI$g>"1& ?l&nؓuEuCP="q[w+/CbuN6 lB%*T豀)).`ϱ6җ肼B0"anL4AgbsH暚\J̤(!Gl&n[O/.Cu6ӒCoG 8?P&CHA' dB,s @(}K Ə~~q0k?c(vlZnȝb3Nzy$1sITKmrQz6]ZVSCx`Lx;sOmaZZ6p1#^}" ^kW*x6AX@j _LW'V0x 2c@om&8N/ c)׈2C 50 6Gc5gLJLf4^gPdYr !(i9UYm:6F*l)i(bLĂ@I$ιTT? - ]ZfFwАҁC6>_|#Ox h=Ip>O}wG/ `N\Kx-EwtCaYfÐa5\Ճd9Bvj % u0-h#8<8Ld |ɠ,: 2: }%[IemF.ySTB!R|h{=韄x)&Zo=)l-'Sb2=\.'yZOPu-NNm?<3~7AwB#u | IG{vfo )MG3qדkɁ&h/VG:X00>O3*|cO~!kJiav_qpQ̯ɯZOkivKb-SdcWB*up"ŝB/j$#FQVi ./nm|9۬uM>ɣR9#RӋ;Hcs;O*mo}\Gw iyw{^GWܢ7Fnׇlz볻o~߫-\]|DžݥDs<+\?z辙Pi\$8i{H:o"m+/S}?-)y דgIi6 *³<[nCyKXg6 Gϯj9=L\~_}/qavn&%IldVhe)ĹC\=_š֒|.B",t5vk%|W:箢!z߰WrP 'ˋ\y[[`g'Zl;ŚK&d >.>ђ4ńqIעlE5cӳk0mߨ@E(S.f Eg-Ry( NFY&B(w;=8L=_fAl侰F( L`B,iRĄڱ1 {/ n˾ &^/p݀ރňϗ_NTQ Ӈb|NĪݷ#R:(sR,=Ĕ+QJP rYnǨf>M/ςG;;a`R<%at6gM2۫%'u҆U(tΥQF6b5G*r腶9q \qD謍Ekfig]6 3[+89٦|z(2h@ cΧA(ѽ:`*kTUsUN)_h-M+G~bw!\LFxgዐk{6."+g00x0 V+, XF jӑ'<a>d,-OB"xk5;b7d`Hn;]qqZ.krލ5V -5ҁD\ Vy.2HF'IYʙ0lsj/Ћ_30R 3,Gt|$8!x-h"| \>ÆqƮy+ 9:\cp_cnA?py+!!W{|HuB˜l,D{$q/0c8́ގWlR|\f4KJt:1sHW/Հ(5&lH@(  cC*RWՒyNlE-I9cvmꮼ9kٛ v3Kt>)Pc]J: wK-CV)0޹qc 2XlJC@: صaH[ZHj'A[zj%EcgZRE%&AR&@HC &XO;0[نVi-y6L mp&/54:hQq-@ SW8uO7փQBDDJ^;: ][z45Y)XTTrcj:$Xc )!|WoW{],>1I` \a2x>v%&8| t$@4mwIX~p"^!4;-pg3!5(-֊s-񦞼. aj""V5ACp#VA6񓸊{6ڍ_a %S^n\az n]+m}m}BjQm d1} \a2"۬wpG0o𹗃`qZ_!}z0Yqdzr{ w(P_$NI7%!?;ddݛney6߹Ps6|ы472T|LKu_^)QTnS]nbqF0a q0k+:jO^E"j/w>Njٻ"80?߻ c a%Afx1KTJdUAMmto $ꇧ|տӫ'?<Eyz e=ܪ{< @#>JOJ*in9>XignxH CVZ4~4k4Mv}jUFB4^H[ooʘ9Wؽ˱_\m3ۮX/.?‹j94G>WܺRN(d]:2̷ś-+K>6GoϾJ GHES OlUV :j /U{3X揩P])lwlk Z.'a79-NWȚB~B~AnKJhېs^WYUYuML} N3`w791hZ >hglb:V11 r$!]>O|\}4@M" E5* An,|W\zrHe=94zؓSVk3[γ V&+TeBuf4']!pFWf]!R )uUu19 grVHR2%=J_|U'Z%MLWhCΉHW RcW(S*:Jtlt+GW-)ꡮ8Qtg+U,]!V )w:EWѕ FXX|t+n@JJøASAmNϼ|y!. i/'o7JUfQW*AsDF`l&@"J>BJ]>RHWlT6B\VE_؁EφC.nU7\qa]uQfbEW-zʘ:#]Q 7 q%EWHdBJ-z+$*#]!!ŵ h9 )w[+δ"#]jn^ ׍u;EWѕ`VƮX]DWHkRVtC]AXHWlTB\JrҲǮ^JIrk?OSj)ӭv?4hUHA9թs9jSrZo'uhivNM<) Q#Fiݻl+{96LB@\r!#%U#ÎRk[ u4]!-O~4EW=ԕZ8鈛όt#ej;]EW[%HFB`j"rRz_Eχ~3rta(ubx]񢫇= {ܞJWE.BZSPRʊz+&3"#]0(B\asҦ]!EW=j0ltjF+R`u%$a4 +Wg3vF+誇 St:]&]!e ([\tu])vvN8)~6;5')'աBjeK8).?C{KϷ˜#ZK?7 %%naܠLg+&]!ɦR{\vEW=ԕ  ʩtJ+GWHJtG]YEiVlh6B\Kr2 2RRZtJXbHOad'`y9U7\}ᱫNJ^3؉R+AWEOq+HXlt^ ׍u]PWLi(݀FW+I.BZ%RRj^tC]qe 솫D.BZmRR]tC] M4c FWL.BZUKiz+a)cr)-3;k?E8~/Sx>KRVr6t+ٝyd9G_szRgӣA\r ֩hңcz.t@ĈiR8ѽ ϷBsXXŵhi5MPZ3|9:%?NRq>hhM9`=]:]!-O>BʝhEWѕVg5%E>l+5 ( )zt%,z9$.x7\}G hͅxvLmO^AWEO Ve+6g+$]!d )S;\,bFJB3 Z\Vu]PWhKYFB`aJVuRڢJ+hNA:]!f iUcWHiEWgѕTX-E'm؎ʢhP!Q;kMԀ#qZswv|[z :X q %n@ZS̫q \vhH]WH ru0Zd+m%Q<\d.ZƓDFJS+c9=B낫 FWH؏RĀEW4}kr Zk 'jq "cO:^WH)iW+u`ѫ!%GWP{7Kw =F)jWtZp{݀FWkH.BZSPrBz+N$7$#]` qEWH{:Q*RtC]5m3 \t\+,^JUNA64]!e h% )Yz+%Nѧ~:pI1eYuQE]W.mu˗%I֥)*JOVȲ͡a\yv-lt޹ o>,&|6y՞׫U؋u٦tzЋo wG+w?uW5JD*cY5C$gus}?WW6]2d-Q, ?_wqw~2:{馝,Vy8)z߳)\Mé؈_W",(T,3S6n.߮ y{=~{?WՕ%DῪV]H G!NmFEXm⛁`/7n`Hv0Kq-9_b#ɲck3"lVy|49Y Z_2ZVUWt6_*Dp-!ꠜ\Ϳ1gfެG-G~X%zCu8q۞mK~'Ef,?=Ffⓣ8ywzDM[p_c>P*Alxv RPMT24xo3{N,ߧíNG{)pX;ӵ{3P7<@zcЍ1xmE-ۃo{ZJFrf.u̚||:`52K49`!ʆb;"YsX})Vc_c5gtJLf 0@2YrY1H0Eyg")0RZT̖(ZG[,YX)DιTd5V50L}+tL< !ގ73`^]}xϺ/_l_{:ЇɧD,X̉p6P< d2@Pz9Btn}5>]-'DIt($)S*hHQBe&2`-iuY@dtN1s 1z@$"g U U:v&=ؾV^a[&%5uv.+mUlr'0*0"VKd=DNGy! x4;ۏˣ*,q*48M.o ^P<Sڳ+Wp6M]ѹuqjqy{~zua2,5Ca­!Z4S\ߑYRl.~`&Nfn[[9510>h Gx/{s 1PzrhQℳ0_%T58%?| AFLWbVRBr8*^"SK ՓyȆQQW8UqCfgt{&ƞ-&q{l"?nSpΣ|V*Ud kx6aֺ' ᘾ f{}ٜ.]y8j^: 9g WلU k.C5X#I2݄K7\-,Jﯞs<{uD|f[!ldWox?Õ/#y0LԷz?χy6ȇ-ޚWxy佭͹=멹;6橹[%]O?cǍ-N}[Zst瞶|2x0b{3lM ZxI!DlHL1nHuw}&=r񝊷Gt*ˍ$R%F O7>QPFADZen\r:H>c Dt>;c k)(U:vu&ÛK|L{O<7\e\tw4yS `!r ǫWRWsQ|1Ui_DTt}?b2W3tüt ކxaCjoۓҴۨmnPclzv#^+M=4m}hOӶ#09( JdK#D%`fl$curd2l *PZ(p1S(*[P27ȀS. 4s90}=ˉOmx/AAGѭq$XUF!BeA%*3I@~Y ؉2^|Ƶ0s=69q'MvL܋y3dբ|x Nqߦق{+"]lJM!zєhEl8*-@=*?-><->R2:)$0 rJ#3) |;Xq&EJلzMDƑR#96 Kب1b0V&WG|a@Wm11(IC^_F%6>#P|ħ2}($myJT)EK1SrDЫ^s;܆Qtf? lȪIF%=zJB[RY^5yQM*9ybT9N,J/'2gʬBĜ\;8"댉Dֳ9PZly)þʏ%vF+m\A8B!}{aTHA;+]WzQfZ׫@*p{KC}~x if/BrI|w .ҴE9/~(-'>䕂M.p9WW(E R|n\9Vc M^{WS9(_BHeG FY5E6*L5KUK& ;Qe#rMNnHyo65/=Gpk{/:rx.% 1ϥ&$#%j.,eD3;soLNjUZ'-&[ XXuֻi ("QjEp|zJhLs ە{\!Z@5eHVYKק,Y!\yw%i㩟2׏V$;.aI2V`eԣ1`K(_ QzEV$e,mقf9 *Ҕ/;:VJ$nOj}p9īk&Keu.C?.,CD5^pp+bşjAfN?,3_ho p݉c4px5:[[nh ʳ x@u '5JJ'a6x~s28s,uFF*Ghċ^Vq +nqkDy[&!Sީ/t3 iKoZ+tzzjQ#zf8?JquN)Dڔ~u⌗Qo\Nյƿ.Ok|}5?[~fmJka.Ѹ>-V۱p ƓWՈ?sBhI-@-]jFnFlNƓ>|jgm.F}SjgRou]+zꬦԆ5OLV}5  &c[+Ur]i Gimjz7hş7 \Ϸdc- 21L/اeTT*=]5ioDӺU!M.{5.h/onWt[仺1[ۑx) sWIg{]4qDK<*G"tZLPV$0*>'퐊5؂sC@ړРbTF sv66Y#{WSOJPJ!Y'D,fC.QaҪ{:Ukgsw/>D^יbª>lv5"9}G^OqA qJ,mJ0 h$#So=\9r裸##$*hg!HC:2Q7'd"Y]Af`i\)Z1,e48-@ &zk&8^"LIQ;%[ mBmi{DQR9ju!\QR09aa$dԑj~b%#`J3ՋJm8H&^9!eN631+HgG؜k QRBWIP(^Y>0ncL4Ebsm$"f4v7/APOGEHL#}(Y~zH!-yHV:֩*̍e)t8Mdrd"XcMoUkD]l2kҺɜ^&fc>1wĜszPJ++ -I/ΘT,!qWB #2J%>Oּ!A4}r.5!1naB;aQXGa!*a>D5z8HXb$OZ0׃٧rT5Xuq8+~ ->^]\lVYpC[-vAVrPd|)7.x nT\Jbay -BLU(n /bA8nBdݧFGњ| bN|=o[d#>9[qݳ#Ԭ ':n\Ԛ jk%7:2T)-0-TV74 yP]2oGDE.E 23 `^'ӫ f Z%5)pZ)C*M(b:3"h%TIn<^k5 'ַI9Ls#_EկmUmRXR("zQb?ؤvA!pTja#кѰKYH2FPm.3d;8s=Μ{h^X&D /(qʈ^[8*BI'&9Mv^{NgǙSA5|ѳ^buz}z"Ў;*eBhfgBXs)iHą)k8Y.mH5D*(t$XO2RP3 3%:N8NPXÕ18+RuSժTf+2X[W} DNWW=n0ގY&٭>v=Rj0ԃB$J+r66ᚅ%iu JI,-zaNB*חKqgM4 #yZmx=cMSKH*M8QO:G !N,M:,LޤRK[Je&IFdo''mIJ9ig 'T( Tn3A4[pau:sND0lKI),Ǒ6 DHZcL{Kek̙6gs/df}5-3=>3}Nfb=G/1$1)7L-%&G|LJa<S o߾O)} leW1MH_ƓoO,2=$= b~h٦4&,J32E8 û> Um-8KJQ97' U 6`k瓫=TCCo γ?/yZNI"w܁&↚ne:mF)?L~,>bo19h:'3AyWd.&6{w0`R4pZ@i,/'KׇſO/ez@l̤̆XC ^i( lbVSpX.6~JĝǠ*&;h>@r4Ie2c$BN8a \hh^k(t2CFM=MC1BS(qRX5vN9 CΓF+q"eʌt֥`ŬćHPY +&)XbCO G (PMJ:f 9x'dmcZ!D&zzq#㑏gYJ)^MAɇO*P [3x2ez "[7AcCf- ,2)笎FI dIFcF)RLYO33>ŞzROC@[UVnM˷D>mL]g⋻CqDV79.?/lȥӪar~C^2WVMѕ ؉Ar zsVϹ*O(wF?;^Nи`ᰨO'ŧr4)xXR{5JR?'hNb0a`N.'0^uRw_mcǭdʭ[c.݊(w~XG\/+\pN?|0|Bŏ;s,ӎ\b]}Ӈ8*ӡW-W $ kO)TRa۾p6bۯ/~@9O0ZPʝxǓoPi4Xg KK*캙L!z!.|.9LNX=uѾAw;~\6&?9Mܢ@w8ܴoP C)/>#/f RӁ!MDu8Z^ak>pO(UFFq׼{ip18&nWy+bC63.}{FpauN`A47fo^fϜɂFa5±,\yMh.D"'UAy> H2}2אyys!ؘz e@r8D" P&R Q50ԓ^#%0rfOdƭt7:ݽ:/մc$P1UD\)&Op<Ĕ_.J=^nX2˙i- %J) ϡ*z9l֭!B[mŔb:bvʬK d sbK2,F5 ^D 1 qR!e =ՙYDKS l9hQnJd?ؼM\yi1iBBb4gY:0AI^Y8%6 Uъ?1" $ ߒ⯘ 4$ᡒh%7FPeZ\Q 3@ ɷ2/%Jmʆ}'4&*CTRw~{XLΪ,?JU=]52QVb֊꠬; B/ :i 8_BWB^1$5iv2:ǘ'2&fNPO &ɢK QrJ2T؜Vi +r깰S\D6De&9Y&/|/;6 t˟Pp|=Lg/Ai8> 9D8) )t5NJq&MZHϒrVH,5`"#{&W+`lb;blf&aU)MX7g3c(maJG[X[+GҨ伀K㕓JL=PMZǸH8^dQflJ"dPT@xpHP,zQe$ꤝk~@bl #6V>eD0#{F|t$?(A#94ˊzYAʳ@>'.wy"YC Eq Ыj#7f; aFQ0#9B6iGS5`{pƙ)}DrDJ=R}UXGRJŝi;L~~VJ Aڶ=̻Dz% _@r %DL)erít^])BpT68(3INjsK;[D2To<&h8!R+ӡy!3"\ڙy-m_Q~u׸:X!+kMW вC Q2Ջ+)t`vhvBWGRW-SWz=])Q+lLg >ks$dWHW )%BFw]+@+ *k9]!J*zztřK j .%]+DZ "Jzzt%K ն+the Q^#]IN+ ]!\fBWtBWIWJR~ ^CTQKG]x,x /juEN(5d'0yo[ĻrBQQǜσ ²x'. /5翼jH[EosŽ:Km찣݊2=_!|9^"$ܗF_ߝz"ׯϑ%93&N9. W7g܊KU Xuj݀h kn@zAsa]`dg y!Z0Q^#]Rw.L vBHWEx  Ly!J;^]-?9%%NW;v{d -;R0J޲pf2=]Ak:DW>C@Stp ]!ZNWR^!]18]!`);CWWɮ5WW5U1h;DWpBtbm+D{zt%$Kt睡++TW Ѫ֫+Dwm$WN@^0X$8_ŭJPNS͇hI-RT+lcD6Oݺu9ꯓ[KPKY$ 'Ax_l~[ok0}.mKmk;(+JN,ѱlg'?-DO;^.M'}xޝΧGms^X\UnȥREL(.Pvɲ=~M0_\uY@HYM#ɟ/Qe>`Zk%?>[/{4㬍I,Z9$ Kբ"sN^IGJ ~T-F3a"O⻣ɋ$a 엳[w^IۇU&Ǔk7*M~!uYL~ڴUI'm'ݳmG#ś2iO֞*5~+mCvI/7cnS{_5{2j5OF5)XC751cQjLI.-vu ^hs=b*#-r.yؔM7pZޔSw0CGD|Z>' $]ލvşݫ)vQ^̦e#:q=а스Q&״g,v]JWp7&2{sR:l}O6r>=Eu⯫t7gy:O:Bх*:548 q+*f\sRD PT}.t [n_pw^;\bH]4v-|O>[鎵WC |w;7x 6] wHAߡ;VD?>f0es(og3-B3Z)!>{Ll_M331rh,w([ٖ}϶|6kd ^rq>/olL:FP} v~/Z-_Z^> 7Z2\ jw+^p^sxTAs{~-'akh'64)ƢO$܍4w4W}5AYs}2}. kFmݰ0{V>ά[[?lkTfhK7}ۢ-:;^iceX_}o);Fޝi6׌}:٩o[?p'`6#a"~D.ªr" Gڃ65.l}B'؊A+I~ܺ]ɯg3^*~ c?NhCosuj(󍮾an]5OwCw7z*XN,ڠ}G ϝʠCWaϮG_>>] y'` }dZHhJ=!܁7ڻ_\z+;ؑ'LZ.l8Ez:p?_zJq ^L)Os4&?eVgg{kBi|Qۤ3t FCYilD:^[|9y],fs$@gg }7msV$%Z|daj+{|7_[\67in.w}xՀ qO;⺧/G+¸n>ퟞ6oQ@E㢭vܠ=0V20?!594_fљkt qrnsdۢhw}Ua|Q\= x +'Z_^g62(m dEFmeZ&ɚ]y+H 5컂vqh^n;,=_-Y.p,Sp1*;-EdZ#MPd6]P#I_!:JNZ B[F^cV)kU5̒r:;釂-h`Q6LT"hvWP mfB@ Z7ʓ+KA*/|2K:Hf5K`uQA:MwMe3H\՘U#4:BI("5IFs.{. ftcUJd34f5FǤj$%JPSdegk#%G}0&:D%Y:݂6LuڵusB&B1VtF`>ƨ2scrm& tԖ \BQDI?S;Id4ZZd"<%Cdc}-?W9.ֆysl*I'JdUĥ:c$xN2Ej-HY 3+F.[\4/$1¯PV #_3:&ChQ.1 X %$ZA_2$$imEल1;DAOJ=j]L:%Id^&Đ8(\!IˬEgEr+ ]Q#+ %D: d;LrAuQV)$`z.KVAl 0`E'Dg׬B|ZCۊ*eXqx(FM%JLd\56By ڢeUۄsC`1{:*4ŅFC1e}ϘcTl/8+CQ7g-a5FP*!O޸\Q3 EJF4qeXlE[M&kK۬* HT`6$M .^aL +b(EHUG(MRʴg!2 gQb2Vj^ 6czF R ˗3!q2M!̃U0rLBB( 3Mt PIJDuTUoQ9$ XY4j  ?%(0 B+ppRH 3Cu\=P` Ne ǭ`I*- pͬ AP)pF#%JE5n,\ RѨ%@Y_Q`d-" SFb.5UuA$"z_-e5iX| ]թB/EbDiiP"$,VceTYl@DDOͷ;29xZᏅ筍H2:vA &y)mƬR@q|12(*Q(N&0'`{0,rg?``.M߶77Tv nTA6'z|gtMT%MG C;p0G&@[f%@A%z p1[;E k Ȑ`2Q2Q4$Lta,9Ѹ<{ ̋g0AtKC› q+*pt/΍d! T?rKM`FՌQ z,HZjl1^;=,@#6)E6Ǯ0XT$ f2IhS`H?*X>F,yIT3ۉd\9?r XfHG,YaI+ j3K܄TJ9S}Et iڮ{IےD` ᾠkմײU &C&lZih6um3|+g<~FfR96`!T4[ 6,t*!mLnn#EeVrjOT519K o1&P/{зUf*d% <%*` Brh9Čj?v#`losU.q1:RACJ@R)DTiSA)W׻53ld V"b%"9\%' W접#ooPG.!3U@ {ؾP٢RS,dࣚ f9 T,H?/sS+PfvPˮ3(wǏ_=/0yΖbn2Kxm`鴭7Oe?\'G>@_.n{ƵBТj/6"F|IsФIv߻PFpDJgـ gsyg˻f gn7KT94_9 W7oMtz|z3"κ/K7m (E6_̡mYIsK 񘼁oh2?J?Srf'*Qr d?q#'z(`'@ʈ N v@b'; N v@b'; N v@b'; N v@b'; N v@b'; N v@HNQr)e\e8V@@i;8N v@b'; N v@b'; N v@b'; N v@b'; N v@b'; N v@b'x@}`8:2N u1y'PzN1:Z ; N v@b'; N v@b'; N v@b'; N v@b'; N v@b'; N v@u2Ad@8V@@i ;8-N v@b'; N v@b'; N v@b'; N v@b'; N v@b'; N v@b'x@.ӥ^2iKMnAo@˺6uruܞ`LʸA "КSl\q飪.\3\!%#WHDrѲ\P 71+HFא+u*uBJ/YF(W{!\rL>[u>uBJg;J2> RKBAĵ\!mH~ )#]Qf;_`kVd/bm+YBC?oH1d ~~m]MV ],[=}nJ}|r׀_ò >j}~} iܻY𕶗TwB ڞ֙^gQ;RU{t->Fu_ŶӺ_& ;mp:Tlmgj}P;Q2ag'*}5YVP] cl&6ä2*c\觗;H-7 6T5ws:F7+t q nEJ{Q|ӄ rKf iN]R U.GHpq"WH )f\E{JA~\rVP+MERz⸪b*pB-W=qPI rՓ2%WR/Wϲ\=eTϾrB+ĕ\!Rjr5BRшKEGWK*rF.WHi9\hA{:rAQ+}˕+ !`-"B\If iurȕul/բ_vYlaLSߪo'u<~sdR0#4jL$S0e3E19Rʻ^-Vw2nLNO qgV͌ZфG Ǘ%ԓ( BJr5F6=2+U\!RZr5B2Nj;KFדB(R2\PFBr q= Ҧ?vѰ\R$L&>i+A$|R=dsK@utk/O*"UXK:Si{qR:qW\!09/ĥ3煴Φ.WH*HEWj2s^Hkbr(WKm(F=\!n$hُRiF̑Uoki_Q8 ^{ }ܢUOp\VjKHx)~CGWhRjr5BR+ k!JEE )f\i]!d q F\]QLBrZB\IFV%/WHɝQqk$WaT#/IW:i-(&b%K 2Izz %|r/_ۨv_YdAQCD+*i&A/BXƊj+UAa*6e~JtQا&joo7?`J梽c\nյ]wu?f.l4#6vӶMg2V}c7bYeA5efSY(<5=.!`/F\i4*L#ejX_D}pEN+~%W) &Hy*(!$W,iqZ)jr0y !#W-Bڡ2/,W_\#NET yp:u/ vC}Jl!WU/ !+p\z[^J )c\4 _ q]"WH2̋,W/"W:+!B`UKF֊媥 ,W#++d7GprJ2rG?JkX(W.GJlxJ17Pt[ ؉R3 Ǘ$GE/`-pT!(49nOu 2FCf 'K]q7grDh\!pd%3 F媥,W#(!Z q"WH뒟GJS_\#Mᯗ[\CGW=qځ̻(]b]r,WOz)M\!Ud q"WHMr\Pt '#WkB`S+,Wc+-W -B\gz\!ep,W#+-B\!mRrt5JJvV+ud iZ&ڋKj\o-4; F(Yryt9w5- d.~52L|jpLn⹮/x _7Zv?>)wk%n7]0u-xs7o@9>k_WÉ/n9 Bs8o&׋5MU媼nTdpS ʕu3R۠2k]X"y(jeCNiF~Seipzf[hw/mvZOo?z5CoKx߿pwпoTW1߮7okw[|u:/ .C)|Ir.nGNoNɿtBPyTX m‡uu(+aJ]F 8 b$=Fe.!Q!+B#em*T!Uwl8}o3 VfIfɍ§*7jbUʘK+Fե}uFD]'Pifye]z7&|7.<.wQM SRγT/sw9D>1sYB8kb^ʝiA"" TJA፜7jz ,߻|͠ a ޞxc,C|?r ~; QWY-;׋$>n+h{ڽm?mQ]:v(x o \f/v#hUYެu5;h {WWp8W$}^|azYxq<khpz J*?_΢75\lb'.6ݳk|`5"κ/MwSAP1vr/P¶$7!SGg jB|\0%<N2I;q(w:H#_/?~!0o|,3k6g>5YC zpƚ`k%iSoB~]T9*Js,@RD9tmEUiJTC(8s0w6?>dZxb_6tǃ2\|Rlxotob@>s/\[mt8!wBe} >Uo6ٺz9gz8_.~1/1r ,ys^r/U64 ߷zHJ!)) !453U_U]UI]!;'ǁ+4Lk}ό\s$,midqO#nϯFדvZяSyG`Y+w׵:uQZdK)6t VRhV#eȣ3h\1oiFІޏF럦ܢ }a ϟN~\N:PE?j5Fxc׵}ˑOfiTuzSbq-=w!E3eU#V4)geY-#usbV M$ADn# r!F\MF|Ǻ3"͂սc.ֱ)PA;E'M)^鞝 +o[Y {-CBm _.ʳ} XTL9xZBdau=PřȷNJJJJ LuJ'D)}tM\aH4k@gz?R: ozVZ#8r&1fWHNGf1+o(]nPjͅ:$dZRQq iI#ι"FK?$֎VgwYC7R1ZX[EgMY阞1mU- ZIc1PWF3^*}Fm)§8! M4n*&ۡǷJ6usbzV>ǮA=={P\`w3p509{6DT6IĻybƁMslvR>p~i}}Fxy\i9:~ҩ9ӟORsR& @ h,́g -"./x^HIEnFCGR3Kf}F ɓrMblFLG_iV͌|:m㋟:jA vЈY vHDGZ s'4&։ky֌l 16A\L=3 [C Σ8NFh<c.Ic^YF($ٛ<ّL: SL`\)d'XV+#[=w&:s-[@뤂JZF`(neMsZ}ѷƺN-:lv_Q82K'ڞy- , BVK5@bSЁfv~6G/T9#\eqgYL P3&OC6oܼ֫I|Ԯ{~w>П4'),SF]}z4?Kۭ̙G%b6o)|˪ߚw'-(LKs+O_W>|\F 9o =Lxq/z~ӒģeEolq?oS2FW2ݽ2϶e!Bɥ:_~it=Uk5a2wnW=>a%*L&-f$3>mỲ:rג)o-PlOybt)%?>Ǻd,weyE& 8~jGxbI,>g{?ߕSWbq޿?|̢lXRq2Rer>QWEjn]#=bH4#f7\Zr~c<َ?ueFe$ƿv9MߑBQxxޖ8^W2/yfckfG\3#wvdjhӰl9J p9=Yf{1"y'];nbnkPd4&g3l6W7kOQq勖 SY֐{{YC*,st`UE YXgfCc xc\hYܡIn ̥h $zU@h`Ÿ\]_>:]Z^-xzT]yak=' үT-m^-zL?kBxqB/5YiT#FILM6pK𨆲ol5=j{ѣG 2b6 "}R`;/!"gdŘ^ @_b4 \$rTDr\)ge SVgϢݧ0XM_n|)f79,c GQ$I8 qjx8g>+Fl&SjX"}Dht^k^^e"*X!tNqu2*j5qv*z㈭LƒQLRrDc]MA'cP̃U: 0+LưNP1 ) lJCءQ$ɷcΣP7YvcZFx1+.f_Pv̀ZHc DEޗI.J#9d6}RAi'dTbbF$ʆ.EmlHiC&fyI/Pj8F!fԙ%rP[~eQ/XM?ED[퀈"nDL30)+@ã4rY%Y}Rs?#!r*&sLNIyBY,WIĤ*#b5qv#УexbմP\q 8-U#Pr:-+ƓHc,%gX6.,>ks(]R-B&Tы(t"⥓^h#dr.ѧr =ތ$'=נ'p'.k0^.̃.Pw:oH .M2"` Eʣn3嘖"viG>9|wCE+93vvL2~L4% ZV<Q`m!'f%>YbVZM!@<qPKY00Ґ*z `[''4i&\U[a&&4`e,=5%IRj0a$c,^&Ξc 2XJ,WcQ[8&E-Qcϥ.ۜI9^"v٤/_PzYBhCpu1]j$`A[wNR 8$(3'PgnCzKʃUfu듍%zy/H>/=`eBdφzdcSH%緯Ͻi[;~ wNhgmx`|p'bÁʇ67ZIౡ5 l1"߰!yH旅Gm)+߻=S9KPUsm|4#czHDc̣Ӭ0qZfe;p<~^_~/n'7ʢ J(q25C(<0`7D*m1z0*ƻ`o}g]\Mt4nXr͸i0ldk߼cͿY>_^5LR㉮Ѭ6[b}ɛF8'Xlb!#hWsWc_ʫ/jȟ)3#U3D'.'X6G7<W #V%FRk.$yKIѢ7(V#ι"FK?$&FMPҁ e:/i/vdtGK HrlIp>D}JUf`!#s6=/y, g:pi4ɿL?rUOA'H'AC4Jjnc+g$n/{Fr8 8#!yxbA[B?%/$)k iN\fI7{&1n&μVhLRq́rMD4r "蟰zMVqˍ +$~}f#!'q0̇ݼRu2H ,,:i1x:  c LbLc\p<@/Ɵ<؆@iP,eqsE$ S)(su5Vo.~T(}Jŗ.z¹b'~7`JW_T`_@ez_Sr-z2@"=ΓJOUn>RJq_Uc ug[L9mǍuJn,_\5[M YҡYswҏm4?!v{6em,]֥L[ﭽUr;vuWo^4%o-ޝ}9SL+x/^9u0ޜkDjj.,y,w1߼Po*ZT=mOicSӸ2>Ay9~9&p&KB !]霮v\%#XN2ջ[rkpݢ.XJF{5 5@WܦOaܕ bg* PZIa<8]/~E^ >yb6ayp?A?Rt\J2\(7l4)RxX@PΪU >_f_\(qA>UmU$nEѫVOA)*4u$Γ5eR)ͩ?ߍ[Cme  -F"(`^"\5YZ[EHT`\y,rVn),x$4雑^ΎAtlbx1)2ê ,a8kxZ UG<ܨ9׌3e  xL$-*M. &Q'Nu?XUr-{n-B[.5 ;bl\f]phfVcV {;1z!e9` HƹN0Xk-r5%K{]@nj*bLHƓ(vP$ Dp']/׳U LĴ< " Ndyј5@%@ uwVW,̓tF٧YXw%H&'0[%.aȐȣd0[v} k˒T6ImI1$ Hek6A*jzʩ-BG%i $Ԭ cF`D#Y`f! EM%l',!%$hn1)w!y8z qtx>5suzBϓ/iV,C xO=^}kzՕr1 c5'ů{WQ vi hr/Z`k'C6LXEPg5`C0J('i9MMg@GihųFHU9XOZEX^ ڤ*Jn6UD)Π4!|JՇUmbNL_KWHUW˟XdjQɨKl8= ;*4\G U&sX'aHӵmr+mZ.Z+/Ճ7hf ̅<[n})Fяs$k~+AȖ81[k.UöWj6';ژF㊟0"x4`v[8Z+#׺lZE)KK%L?'_=b czDqwƗrziŠrqQ~0 rCy%ua`".R@'uWT5*AoiQs?Iy7NïoO{2s_{qKV`@|U_^Gղe܈]6Mz;[j[zU/\ĊB l Jxl2ukV6ѢR&l$$>&&1 Z H8DL8ȣ :2]T)1%E&ߧ3ж$VlQيt1; *( Q8ܜ`1-S )f/s>ڔC߳+difQn.zjK7/=f ]Se;L6op3 3Hk BG@Y)$/2$+J Vg?DNo*YZ"&Y' :5c2(X́4}ߤ18wKzVBL[9W:+_}{Ů-˧, ӏ 50ҙ 5\ӕ 5V>C P 3XN+lEg RºBWdoC$g' t\C~(u=Jtuߡ\(;DWX¥+Di Q 3+ƕ5CtŸ&tf>lv(7؞]qn#+]!\)BWV܌ճ+!1Ct-]!\ƻBW˶gHWRpJ]!`;CWWҮ57gIWJ*^O@S {1uǟW'TS}:>|VXDC{CrTsw?W-v} >ZrB)Ã|A~Z$ZvLb*|>߿z 3Sy)Jdbi}Iz98yόKzw"IzcuqդfV2Qy"  *d9]Xӧ \ķC˟6~(۶Eo+}§-Ю,CWWun=]!JC{ztŤw4+th%m;]U(mOWϐZ63tp ]ZCL QnЕPT>B KBwm+DixOWϐb6JWXtvhU+DUOWϒ$}dym!h< *CpOJ%"r6C>ygiV~$0-R%l?*]AH[ŏlz6֞j=_VlԒKL$_ Nb.2ÜL 'd۲+L 6sK*Nk] ٍc%0(ˌj-KoA%A09`1ll߅?jO7Gj"e}ݛ-uv&3¥+}#)rސ{VJq!B;~'IgN(YwzteKnr,]!\ jvBFt j˜]!`BZtiv(v߯8hj?cU>vJFH˴+]ٞ;Tslj ] ]!ZFNWJ3+aCt ]!\ͺBWȶHW2NDW4FWW5t(-9՛kCo w+e0KLQyY>D1iQK7 狂zbRNܛaI?hjBՅ~J̪~Ŀj[ ~_$me@*R tK fӺr _U>]C&nXNg LUAhRJ_TM[Lxl:Uugv84SR^"Baa !adkl6/TDX,3,'3#NS̷`!/oK YJ_Kzu}po>F 2ewMܛlGJ>?'n߁ʮuaTT wO̯e&f5Y)Y+XVwm폐 ~G ڋt7i?_ׄa`ym?_LO(uw5R ={&rLlM:.N Fs4tؙ[5hrʤC5PbJUY )jJ[j^UCkdk6d< [Wj%ߺ&XGx|;N&uϚՂ1'„YV&[o-tlbl5Qèwm{թm͢b1Z@.w>CRR̥fcהּVYPR1؍q ٹk#Mj!SRWkpODk3E2>&VgG@ۘR:W1vN9eXDe9w,F!f@.\Y6ރ%v;qQX~B#`DmTi2!Pj36.WP4 bKT)9$rM NԭhMɂ1!Kut,:ZnPB Bтw_KͺnG0L#mT*9ES. |FmQx5W ݠ-;< uԡm#_4 (%ʭ2Tb]vuL%,d0)VN5![]JDyL_6 5dmeL< o 7tX;n>(bzQjK0-"Vw@6tB_ !Hb"a|?/ggx.N=`dd8 ]{ #AKzu}ʑǠXAJ./sXJӳ6V `*2;qOh q[O -Ahg[RC;V@@]C2bB ;h ՌP+{v= `9ZA" td!9v΢$1SiU8Q] C VA ho:"rl*zV,:a!dURh3A78ˤB A3vh'J Ckဿ@z!]tf1mh|^mN3X/Ow`E]1("NSm:X&@57;0J'y/eU9}㏷>kC25(Pk'K'V8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@ M~MN ={ ޮ =y'PFq=K'Ѭ@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $NgB 7zMN f'F^<3xq=G'5qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8Z?6/~<[MǨ .M~8?5xM%?.Ƹ$i=G?i}8C On u>ş..g17r_-Vkyi{{=~:ث= c(A! 4>Mp3LZ;vֹi 7"5q~򓩟Ӆa-suρϱ~~:㶍C9e qS=ɣÉ]. m=Qv m~_/Z1gXgxfR~ _M+o>q`,=G^![Kj*so4n?5M}9G[xD ݺ4zrU?{ۺb ;rx}1ۢ}9ł"o;c;9=;w'NXX s*ҏ"x0W1fдH"T"96d!.DrHTRr1Drgj'w3 q9EV$ "\Gf=b}_;`Mz^-nte=lIX0H[YO)f$WlFW\ iM]Rrur(W\f$Wi6r"WHkerAP8ҩrf#W+ 8]j]\ j()D`ò+ĵ$Zҍr rur%MaV@\!.gҊA|s+%lcjz*I=S ic0$HlC1ӛ^GNp30%^r|(a nkB| gMFrmZ \!I~DRZ2ʕaB)\!U6?''.WHKMrlpWg)WF^B{Z_\AHQb ׿G^,];^.C.{ W.Fi>M/(G8}?^Eo򘘿ػuѻz>GM_w rgQw,x_.nfaѯq=7 lhsd(7賄\\^>J3FNXl"9ĕ2Hi'GJCH #9-p e>OkH.r6'ddx:G2$oI6rd#WHKWSQ\Ќ {A%n6& ֔f7#WȬc"n\V}Ϡi۷\=͠iCiHj{rznSa-S+\!\!ejjW+&<#B`\!^+JXh+.:#B`++$BZ|0--:իȕJAΨ qE媦Q$+wOkN+*uB& r:r*3Mp>L'J{;Ҙr9)M/U%&'\Zf%ǧ[)b,7$P\|}R*93 Zqs&B\Kr+}񹑧RR6ʕQ5Jwrf#Wki$}M/iG:G+'wu"{ոvR22՛+qd֋1Q'6}o۷jEKiO[R&^+1sjtNrl qޠH^jAQL F\ h9%UM99JJ\2B\M0J.WH Wg(WDVrTW\ hRr3ʕTX-FߠE9ψX~RČH?j-J˃O2R>!4lɁ; (hs+Vf#W+qH+wH Wg(WX)r+V"QnhI]RQe"|W\ iJ]ۑ+yd1&e+2wU;\v!-)sW\AjnHFrBl qOթPԶUYKH| qE ) JʅHX|A{9v"ywrur%,'BWX+\FT.r,yw rurlt[&A$7^!f'Ҏǽpáǧ[uN,(7Ը6߀\r9Kߠɫ'NӉ6O@irls+Cc<#B`qM.r.WHiNs+K,#B`a+ĕj\ՔÈ#WȬWcBnly6\ZEHOQĦrznSjDk,\Ix.rT.WH Wg(WQNr+"B\r+U"uBƔAG8U79UXl qE882&q) Wg(WI@6P\!.R 6ʕd\e:B\MW;ҊڑRAQRt1>Yֈ.S܄F95N &49n.7V6; YiĐ9mע,5-! [S6޵D&.6n=?޽{o'i?7wt,^v {V_>x.?.~]^m l1ᡓeHmKfϓ ?ׇ8`w[Vov:}ߏn6ldF3xww0} 7IgCejW^6g~ofzLoG IU,B\VIm1&.B;&KJ4yitUQ)*Uh^]zjoN`!|T!Pt,>b6WQVtl`>651>[40>GkhNƲ%f>dooŇy3ejQk߃%MjSO#.^ԢM6%K8qwv16Kέnry@rKq.|Izysx\Moʕ{9<1ڑxw$_7/UŚ~ w}37w)-C/ݹ+i +8סbR[ՊD9iT*DS&AR(Kl߉AX(P D85e*HM \EA3Ξ}j,/?>`c=÷dc7;jqkۇ%ߗw)~o@tj.W/zuyQz1%ܽ箧\Ia3֕SQ-3B39Z͘ bU Dc=xqeJL5Dʇ tH{BaМIH)ԑTJ v8R8dq3^4&8Y,;U32uY5MUZ=ZI3E*e .ڐJj[F5UprJϥ2F^%tƙs!gjhKqi, cWΑNM1;]yVdE/q7zޝ.t􇑇ݭ [˒E%,)!,#BR(DQFX)h<1ṇȶ2ԒN+,pwxUr1TA,7(UҊKgB0/eeT3 %ք愅/)Ja[G%%L] }UXI y9T,豧]ĚX ctyޕ|OE*y4gu)QAQƌa +"J/55!T \  ,P&<4U ȽSROƵExz.<<53]t tH"{ظi8tr@!M)vKUXgL.FTB # #iN/->+9:wu?\$B4K5\Hk|d0hÜv*2Âx|PJDZ:SOD%6Q AxƒVTp);65olՓӢFvK.lZy7+|׷**;pYȾ$`QBh +_ITweLgL.L.L.L.S@B%2EJ ^4PʼnOnhB0f޹v!JVSBó,"RV(uU,w.Y:9r|s"kpϴST8C#8MV1_iL+ô4T !l @%:@U].Ī*0>pGQB%ьKU лy_u)Ҹ,Ja<SY9y'|CRҺ}ޱC\׋Uo=ĝbuW̉|.X;U8F^Yw0dhTEj.w:ZR>QUUY\L*NHOCeK+GZ;yZ*iag%Z-J wD :S7'O[]}\݇_fj[HJ Pȡ&*mmɉٻ8n$W.m,=$X>WXdFI_GI"i$%]ͮ*>U&'Uɹb\cM Wt1 ؋Y jS;ab ¶P2J"5v3rkl<;K&hfܱ/ZۍZ{Ds>E4l}EL*Xeƛ!,M1S!ZɵE:+ [Ȍ a+:i|L"9E|ku_"QM>9ao/JTŸ/ n~|GnK%k%؊ӏ'^J?JmޏR1ocWCUb+Y$2Bxv_\\8!*t\GR`l9G?=}L{ΖOp9sEirUns瞠Nj\)Y:6щ}ˣK ܮRzW7ׂ֯3Dm\P$L*lD/)[)܂Wk{AMxHk`&@;z\C =j2!vEoMBP(hؼ"s^t^nU2xl-qƺwL_O^Xݎ5[ Rm)CBje^.ɦU 0PgJzA6؀ܬ&sҾ^G`CFxc *#Ip+]0ZـO$M[Rimc)b-PtޢlR"IBHE!zQyK릭c Яnm(;ASb͕N1[A}1Bghxpu^O6 .:Cyٌ69 6ށͯiA}꼻/i ڃk]1)9 Sb rbx p<\,d@~S'|||4ab1ns7,G[}R2~[J|Y5Yk!-f 6y Z%̒@5`D8jhwF0*',as{ 2J MV XZxȅ˜"D*M.l<eÇʲpjlT,wL,Jka*ȘZᚑX48jfM ҫ#t]TO>Vy*)oj^u-WxD4MҖg !cXRJN20*6lzqGճVibcɠY@$DȱJ%*0>+4˛U23°v5OSzL'? !{UВ8xf^S9f`,XgDT;^DAy$763QVoЯb> m#@QkR %^xt ⵛuJ*AFg 4 @ DaN $xއƿ\?kt;%lX̿Ǜ޽?ӛ:[qN 4/QWp嬆\'+q+7ofȗ`FT$Nc<S)ꚲap)ey]p䆦A^VacjMbfcrvr>/śn)%rZ#L<9^DXeWL~dIɑ?tɉ?ᡏb:^ܿVŹ]G0o-W_ثx9y`_=$m-m&֭+z֧y搷u@k6O7/ 1p1a|7Iszi('4[#X=߿ru|^J՗⭒}+ ,8Kg'߰XM/sYB[śxCşNOf._Rp*̽o4BvPvw#˜~J1 w⹐DqZ1ym4ΎqvvSߥ{qPem}wwwnI3/o*Kޤv]Q>1;1 ]}vv:21{OmdFmg-c'1a;X7ٜM¼F{5O 7Z )wںZJD yfX`34m32P$eȂvhK|$HM!4εiLYPx}QCg;b%|I6 DqN(VoBjD/htd]. FDULC7}Xь7۱4Xa{z9w .}ߟ4fK?SV]iډ{UH|' ΀]EwRX~G[?xEK|a;mH :ZBg9͆{']K!EgAVc$ _;E0H DaN1$c H]#)6#APVC:5$DhI^Hk$Y,lKAb}P ! 1y?oPV;6NycRϏyөxf}W`6M GtAݜD;<0~ ب;=uFJa2ɀ&uB?}LWQ9 flFaY7?yj6c0+a|з1_}(]QI3_qQ7r<w\!$ *z"cXrG;HmҾ1'?R>gSqCˋ[W/6;h5zJ*wꣳr_Z~yCէmo%״VY]_"76u籚5=?cIeuqrWGw mvv5n]i7-yo]X̼4r3?L6̷v9MOm< y;a-nvԷf/njC>}Um]{?i{&O?架dUy; \n+d]^w& bCێ "!.ޕ${Tˍ"ҥH'FOĬwZ>B QR;-$2Z弸}3[e)Z$=$"Qi#`6،IMt`tirkFC_zyh <7\mo |rN[:Va Soj 3g«A9g;%)Ikw_ cXĮX^#C5=&; yG g GNZxq@`$'T,11AY|GUGUield,@' >OK'8=z% d$j=)/jY~FZ:UA;܁ ݁+_ʫ_řvFĎ7Cb$#DD`x\G-W?09k2ju"DR%^HQ"dtdN~ Nʲ 7?>yL}%>Q:3Ȇ+TERьX9r 61K;/ݎIv"}xY<||pѾy. ;o=.a[I5jgԗu*>R;:XVGV*?U̪a{Ʀƌ6#E&!'iD>J:ÖM$*sچjkl-% (֢/)5=)$J!=(Х50jFv`tW lki >Zl=Y yAz׾PLG s}ȽTWS3m\g;]cCȉ5$Q,݋LyjK<БEɔMAIT9AAzQsFd7*ZΞarE*p*E1hlbc!6#S;}wxâV)wh>AC=Ƈs~ % ƨ[tlTݮXc5/ҩ8%V{b1L|'(#l!BAXBRP+eӑ$[~"gY^ u=߽XkZliڦXoW3YWOfYّ;ސ%)2xԝvP LdT ϕ@<Ȕ]1pIFY)k˶+HDd$_ r'g㸑0X-Q|c/8`7 N?l Oyь23J]~ŞlklRDLɪjKN}t5>(n4$Adi #5Ӡ#c%UX3HKFV ΰ2~Q xlWCNG7&O?;}>o>u&_t=ТKr I7׏)'@j/ϳ> OϞQȳ'7/uWB{n/yߏL@t9z?,dzf!JkGA^44Rj=kଖ,K͗lO.V07e/ʤ2)bkie]ۼWtĀf׊݆W:W~^_ F Z]5d4]~MN-G/>8𸎠|R4c/#˓ej??CGD͉M@ *9ްΩZ_{T"@ֽuutZBCVb*3)u . i=i $dAx5St&)d B97^-FCF<ϯ?  @  Т`u_5*VDIf%Os q(R!x3䬛LC6{AKLskj&0}|)&H0LQe5IR9NV\;dzXv)42NL%/a"@a1ȤAdelS2Vk$s*zv08wԳF"5[ŵd T&04ȴt)k%&(yEV^=_xߒZE\$dfQXeJd`Ͻu0S+̆:_'c}{缦) Yb&9+h%H"ܑIbSˉT,G{WCz0 y NfԷJ 带B/K,C68U!y%O# "sv8aĹE&"mS8 !^\ADj 9Ĩe&̎z:wP=V*v`vk=F'L 7Ϧ"uSd7lk Wn!@{WeA9kp,άGlCEŠ::QO~[Z`n$K- qjJs*~Q>-7-;w'n! T^vr~dL )l50#kZ׉3T!rE7v/ %~]_vB/^9lz\=3TL)yd?U?i5E# אF}Пgm' Dfwn (@A W8 Zez0]r"`ɺ)pyg2h=]c+w2ҕUV"UAkMP ez:BBK!9(];xW誠m+eD~8pt6h4(r܌CoacėY-585IK]A')8PbƯ&P&Wüê^bnb4@b%]vV8PS7 |Y.iMW|j<@o^ Bzw\s8^'#I>&}״akv;o "YV4hl[~Es?+dy+ 3;Utaみ[k-XtuWh54]P~1Ҵ1>„ `:CWUAtE(9S=]!]Y#QviKzl μֳk"" ;DW0vǺ"TW誠?ASP޺z܋a$ 5t%PBlJj>s[J?O?.ut6! \f_n87s~֍W׫8] 5nt6 -QkPߪeӴU)e^Sra0{vq(]"*D$kgeSpvqkL\a?!Л9"iڽ+''MSopU'&qB3 'P6'N(<AKRUq@59_/f H$P $] 4LR|fůA5ԃQL,";CuDH6{C0NI;Fs W(*-n N '8,Y;G}4|H4'a}<h^0}ڮ6T6Ҕ!L$M"t0>P043ևDMZ9[fhjV fS.a-hP= |шB#i_4o2ƺEPZ=I'D$6V5ޝ77BE7, ZD\F)N<M%Hr9&Ƚk>JpÀ\e]:!$;?{ g #@@0j/Zۥ,=u7& 8lv5ݧTtMM -ڒBG[GWh cVN^Dڅ6ZZ壝pR<@xK dYk\mF/C( I:gEէ (хk§tp}E ܔ43HTw.YGV ZBw$,x,;v4#H2oh|z`L9'X3ztRUAEE'{'N4v_vN݊#Xe~2P&ܪCZ]CA2!Zn kBicmu+yX,čd Hqah VCFzzxK05n0\(SD; єYnXkY;*T*JvtZ2ɷ LI ,>l85mB7BJJJ#L2b]Mhhpu'8F9G!H8(k@oB&uT2UWH,l*+u z^E[PB]Q[=ಬR ++5 e CXw[@0rNBB(2m ؍ttg-JC(]e֜A5 q.0 ֎AC\)L1R"588)WOp b7XrPvΤcB82+B4xoSCMpf+%ZM=n'X7>қD{eh_u6dRݸ+ъa'uYQոYUDI)be+%zb`+/U3_Mr^KYvJDjN ]V+Q"1^ B=Vczh2&2y ݿܠG8u)tǬ椡1&PT1yQHa9I #NX&燎YP;8T߮D5ސ?>xGUе+[w ڦJc[oT^mJw%SdIW=$ B+\e ,Täc*,Ophv=k=̗,,tF]8g`Z R|& L&zZ5輪`>xmBV5 uyy4'*APH܎m }×>Ϫd!T? "jΊfld-bP$SQagϗ }G.O&!z*X}t6A@F6P= R} R{ȋY}L!%:.x З9tLIճ@R@"T2OEy(%g6CܖS5Y$iNJ@A%DEWHPl5CkHhgՌ6XX-;Ql,<3MȚJ\=vwEQ(YIb&#Et4$C VAgUkUOPaVB]$QIY6 (jS,ԚϽ_tXIwHgY g#Mk4ZIPYI$e6RSң*x/GNѠjw$a:J` ئᾠ{oAKCjM.1ڹgy~m^^N7u6io/Ws9dU ԭwH7ۭIFOB=V`&اn Dm Úg)DzI0Ie05AI;Uȯwφo9̈=Xe{8nX0-ɡmM1Wts]`7"VhSunauQ˕,.TP=`e`e!5@J'2(X 7o#3l]` ݍ+ MISL1. |rSp4r;XYo_P* ᅃIP(EcD&7cȌbwXvw\:'XW(]IڈJ5(ZS{N낕٭ ǚA&EJ=3i^& &cip Z،R-E]뜼=NKyA^ Q-DlajlJC-6(#qVp*6m4EO0zʤ蹐\2 'Y¨ 8 N’03iVA F=Ap"\g7kͦrvi J.YxЬ $CtM]Xz%a4y'3tEB`L ΃rt ? ՠEyZm ˰5d`#෠;A-gÜ}zemX|L l2Fns?x1/xC{$Ū7 &풮hG^~Y]鲶9Ї?my?Wk|˗R 2B4VzV[n./_.~".ٺyy<~vm5]]7z^l0[ gRݽB^I_*4HU娑r؍Z a^ Ah9$$.epY/sW(޶eju]O?܋2 9.9t&!drz2 Mp5_\`mDJȷ_۫Yy4AL] 4݇M.aRh?Jp9 С)Bnn~{I!ٯIy_4YWz޼Xz`>\^I?j_S?z_3wZ0)c75Uǧ_NO5ORz΃ \yME͋ˏS=W2\©HNEr*ST$"9ɩHNEr*ST$"9ɩHNEr*ST$"9ɩHNEr*ST$"9ɩHNEr*ST$"9ɩHNEr*ST"t~'gKE1ä"6h@<n`h$'8 I Nq@$'8 I Nq@$'8 I Nq@$'8 I Nq@$'8 I Nq@$'8 I3N+) D7:8I 㜏Q|(F.NhQ4[qixGM'zj].^m4{7a viRY7[!}*:R;I9IPj"^hYO? 9 (N@qP'8 (N@qP'8 (N@qP'8 (N@qP'8 (N@qP'8 (N@qP:TZ#%@1BjЪO@9 6#'8 I Nq@$'8 I Nq@$'8 I Nq@$'8 I Nq@$'8 I Nq@$'8 I Nq@gxkuO{kjJW^߶7o/pݥz{~QVןqE .`g .\oF .(O= 観<ɣ[?]yz6' 0 ]ZOLWgHWAJo@t(++QWVS+Bɱ𳤫(fPĐ_UAtEhɫ+Bi_xkq!$u<6]Xjw\9uG=QyRt{>gꥌު8 ]n~"J:]Jtut2DWlF]#ݻf:Ҙy+0tEpm NW@i9ҕQA[5 ]\F+BO j)H@mNW@)Ed:Grv@߯/h޵L6]׼mSVF!.HaF[}FyJv}Yn_>|/>৶~Q\S/{1U{ۇ/Mw08Tq?qav ~"Zn.!o\{?G$ >r_5q`ҏ߾}Aky2))'z}p(cA3k0"gf|SO8eʾi{бXZdX43o]ws0ڋ㫫#jOȻCtj?]=J2]}ԣ;/+CW *wtE(O-AtUJu 5v"6:]ʨΐNd nZ/N LWgHW*GRW,*=J1 ]ZOLWgHW@tE]F]pPFVWgIW p+cm[\۩&P?Q :9$t zJ VaUئR.Zm%PO)^b'y3@\(6tW~y 8n _-,B&9oCv "ng^wCht^(#ui&GQD8an"cKC1`:`<+l0tEp0zqtA2t9B8gpvǦ #B|?Ğy=J1]}KXq*? ]#'J㘮ΐJʁ7 ]F+=ڛCybg3]}3]ࠇ+(tVS+B)-ҕ릵+on mP*s+zuljuaU sV^6:{.fy@Ǔ3Nn gďof#/4-{t9PbGCpZcNJќFnszIK.mVĝ_ zğ>M`7c=(4MhBpc$ϐ}pг;++놡+B{t^=oGlЕLzjکtFhtERǢ+R%]uQWY-c]1pEWHk+"骃^b{΢UbJJtA])&IW9iϨ+<]몡L꤮Apɼ6M' {t`[K7^-tL3ڛ[3"7p!Z;/LzC )bzERE+…hy ^WDMUueQE+<]5t".1gj htEZƢ+5'J^ѿ]7s{֕_=G seHW+?\mbz}uEN&]uPWXU%";FW=OWD]"JjW]ԕڿ _bG*V_"JtE]E+V&]hm}W K꠮\Ĥ+n~. BUCtI]i^7} [9FYl\-܂+{oB5+eKŸ_[7 pSqc!MSz171`4;Qk'm^n-j KKhw=cQvJ-dyD"`g*EWDu"J:++jKiB4c<6QDi.IP"k)B4VuE']] CNŸa{_NWy~-i?JXcPyJ%]=5\Z ?` p]4BZǂQ.J͠HW˩<XtEBQ:tA]IIG+V6]gk'K꠮*HWlM4" u+]!c6t]%OꢮhcB`i .h'eU'u6Ԇ3MLHHukuL8K6/'@.^ą}GU(%O/㶺"``uh]WDi+ηV]p EWD;&]uPWN)bzEHUt"&Q4Joz} {?)/\`{GޏRt=tzH@4"\+c:]WHO꠮ɈtEE7т ]WDLUu%4G+:].XtE* Q꤫. bNE+"]1HBQwE]5h6h,X4 St{C4<= ZR;.<^kZ;} |45±Z4,y<-ѴhVz(AM[4O>Ѓ9(UWŗ$/>'qF~HJѽrQ&lB.oFp&'t“w'jMSz1~`#1Nã>gͿc_3o|suedZO.9;4k٦ϊ\\Ǖ級)B#N"MC(Eh2-y=ɄUR90 0j\3LwWr95mɱF@0hZX,-ZP~&hآ}vQqs[4V8g=E !hi/r | 2ݕ\qNszc\?oU'4)a}iyȫ8F!:ͼkwq)e°-3V/./v;&_-"ϰ@Ƭ¿kX,y= *+칻iid#V#N[yq5lxx "/lP\N'a'V˕]?|: Ωf0]&VJ볅 ?& w}$.h}$fh߻#m^nm115s}.X4M"^W63"xze7w^Dk!JFuQWɸFhtE.x]!%>:+' o7Q؅¨/8ɔ7m>9_^]NUY3[Nf 妖YeDΥM~goS.hj)ۺ\5\( Iqw?;v/x(vLO@-L[ \jH,+\rYNsly e*% b`\^P/WŻ"}xJ_sG6R?򊽽Cm>I[^nrX 7sr|[QrWI14*-A*XWq-sQ,AAA6)U|.tCmx"hIk;:G!5o>)Jd~*OޛI>=xo6gS|&5 7][Ovn{uc%==0Q%`%fT͉6ѳDz^Qnz{vO?`I~nϯ}hn*(E` UX]٤;^綺"`ppU4"} lMOꎮ$ctED+v_(Ct"&q 1Hbт]WD+Š`eB`l4"\Mhw$"J+f*r}:U){_-vjͬ(Y*Xy]~>bq^3N9~PT5MbIx^XPgP0 ePbXnSE)m( MGVZSn~Pq5ˤ r35><fjZ"Ev']wCٕFb8DU"YaUj -9˸ w(26Pv hg٢[3:;MKqv[rDڂΩ%"-9l{R"`OZVM5.骃:#+Ϡ)•6] ^W e4E]9("hx-?Ɠ(+vamoi xojW~G~2m@]SCϹ6l 6]gj褫JpǀG+6:]XtEN+ :+) `pEWD 7JtA]b"`{s ) 3IWԕ` +HWkm,BZdvI]in4M i2M{Bz$->C6X=À2aM6/:X}/i?\b7RTo`Xk X0GӉL„+*骃L)."+FWњ;Ҧ^NJ~XX%x{Ǫ}W8M3}z}1bd=Xï~H,xi5օf_6pvk<`[KCGU_Nzo⼢퐮oF9fAS'U׫'xT(,72ٮ wX,+jRgY[+$7]zsfd<ƿŶW5lkҥw Gei1_-{q*n&ڐl2G:QYG7(0׫ono*Y-RfC U%SLdJ09̥0'χhX 62½pVOO礆P7=80fu L?Gny铹4m𤡔rqvD?Wn.%~Ƈx4=lWxjh?~r!Tta=P`Yka{GoZYzd~K_泜Fu;V嵔q[e]rkU gU! c.^mJ\Z;뫍oON*&9 mF 9VGœ9/6V'/Ot,DK.8ol3mN H|y}Ԛ\ 1`-@&"R?݌PgG}+of~s0 ͆X.gƋ-#kע]s80 [PNkޟt8J8co[B Dp Jto]S>|UT$==)wCgCdz| Jq}g|.(w ˞Jq*N+cO1 zMv|>+xiK}VINsCRpϚyiGÚ EjhUO crOƍS\xYK{A'5BuxЅeS=Agdb@*Tl2L9}7u->SjSc E! \B'C!{鄻'kOkRx8I"v;,K=)dd\~1rXas)|_ |I XQa1~"8xYECw*eMB :QY/T>h<@ӵ/Ϲs r{>?97VqhoSq&;%z_( ִce<۾Ë.>E%^jN&DYʞd\ن Mҙ, DAK9A[Ëmn\EEE*F2)wL`V5| )0^2oB;TmVf2`3AUS,w4hmVQSR4c4ˬ-tcBbLKop:<;Bv2 5GL_+tb_T\j0O2¼9PUL^.]N&h&v\*׎<Tpb\V3l)e.CNBeE}.*j;FrqؓY=0a\lo8.X^LրvmV xukZzx3Rꑫ&並<{lSPvCz:]? 2~?x𜓢\luEͪBY6NfA((czDF#$X!0\g Y2Ѐd!͝M w?|k7]]3Z@mZ}o>m&RO>~"*QWdzVˣ Fu5W[V+8chV/fgfQG] }GD@Ym*2C 8O$TN.aI Zn+mivT{\jD G Ɛ wqeS)Mr/=h- %l8O<}A\HKc J'Qw{A J.[BhъI\3[,M]"u+ O:*R7Sq;#,N>n䇯^N aP %z.kzn TP_jk Knd Ȅ%t4R[!3KƓ i- *{8* Q)5[i ""yZ"'aqT;0 3fѐ\#øMqB Oa;;>nFM{@|[!r3Vd<~>A%clBbKFya+Y|~!x4E}%/LH8YnKB,AaffQ`g0'( < Wi-&մ\twWU!8,rRrr@Uݚ=ʛ7b0eԩTH8aTO{ 1m8xŜbAr0R(P=/)(ݱ|&*7&[*gykp8+;p"򞩒DNm #L'cA?U%, ]0Or['u{)y!((8 q{"i@0Hl$ŒhE8o& CɈ;zҐ Z k;׳pO;䆎O5Q(=yz( {e[:X J*>[@}=T8k2$ ZnzS /TX<M&b5>\Oip9Jȕ) {a+zfWڄ c=&!A1uc @Hij R5T>]hp3v c(Q t{@gӖ=Xj44!S y: DsT 0&Ty᤼&??nL. V70:Mi![% jT@`: SL *!pY+9,YmET*[E* (c0gej` 0qb$@1;+:AqVh5y!".cۿh8=vﯥ}Irr[}:D*=< ŌP\pTo6,}9G|Uce "ZȈL+VH,?o|S[?~ye#* '&J.Z0_USaY$%)KV XEɗ&RUb+)#%L"ujXJ-HCТK6ҦQPjn21v\@zIY*'BIF%.K@W}ί|[yfi멯&Qۚ53kK1EdT;.e'[qRGƹO/?eCi] _ 9-J*C4dyI9-GpQybu*oI{GTMHTm,e#<-Ǡaӵ~&Q0B*?t-g*hK备 "Ow)59~9ײ3њўa QmZBޣ':;(7[9zO'c[ϰxH̳$F.`vI6{?O_vY-/Q[S4R`rC&DczS'4K`(dlx|[&lӏoؖJ3ģ&>jXWNpIšIEwvwR⺔^>G gJ&E#sQ3[(**eȖw|o[tys#dx MT9lzyZP (0r^^J*T5{H$B`t}S>U穁[繮Gg>m"NڧK9?@4{{P+L!裦*+gaTd41\ɼ3+tױ%c[qf̠r:^תkݥ?]AqڡIw,~ڱ+^mzQj|l=^ ov,uf=$hJ rmRQbi5%Y/!X1P{\AS;a%!OAQ9;ZqÀ |@>(E~sMM@O3zTyե@!aQ~>0H)jaLOЮ!ZM ]PɢLxt@7*.'Sb3)YOm1 U=*ގW?j_ <*JqՄ0eȞUT1vXgQ#&=_#1 Rt C*cDlN`zۥno])zLP%uoVRK{gȴug\<&&BPJBJKӛIv:E. ղv&<\I8SفB! ^a9im/rC|?GI/5:u&X7I׉:? ˫#1xjAOttF&DQ'uM}5+ }B ШC#*UOjɊt~p4`% r| w\#Kt 9j"y$Q|*w" ~ܹyr2C֮I8o|ʺe uڛ.h$_;*RcFYfl0jU2)mt.*VGVӽ%-yoqSaP"8kr{gb,vv$AP~|3-gv*W"A uTy/z{c(ՒFmZ h-(LNP!^C=V~u8pYoy"Ty,^ jet EQ: 0 ݍ @PlMgǓQ=zJe7Qcs8"0a@FTšCBh\`;01E<ϭNHrf:BQ&ܗ3Os˟218\(7hEX.,\U*WUr&ft8w'HTҾW4"H/qgJ{G_خ麏{dwl}L)hQ)*M;Vz9ò֗F_Rdᰎ/04١90,ml8U4!|s P̀Š(ec(ÓԄ`!!`6v=j\>O("T"<|{LJk8:g2Y*XRP37gq0]L% \ޣjS c(}H/0 B$kz#k)PԺWzш0 %`b4Vuf*Huf52+NcH+ pWcB#L֜0Z OQh ݢo„H!Sja{'+GP{Zc>YM) 3.Z_x'85+7Kա(@`,Rd##V%Ga$`hrrh<ޮ͇A%5d,NV(bؕ(*oJV2<. ;^߆kԷmgcۙ ,PNFj` k" -et^8C ŻV 8sYhѕOJ7YIv9=Bǥ˄0-Ծ4:Y+O5h[lu:s,<Hz^Jh&9Lg|=LK/3r]hʡ\\.`\%H/IЛ{K|Fs0ԒED( 5X0V,8&PK'T~*Hr|A9fy⇒XI=_*=2V렇 td6/ Pkȩ4 .-X uwefØ0,rN"Cf|FYH0*;DlIpi;{n)bR!}"" PF@E~{BT0V!GӀt1MN.}uha(|F|j6;њC!!:sUiZXHٜ]kUൎn+(.3rWguaAq= 眛3#{n %rP7}544S,1WKh2IPgP,}-+p]1* e%4ֹx1yf磬,t3D[ ]}!˜Rzh2҂*Cpj ?98YmV9Mϲ:ʷib&TZW]BcxsނOW}I]5;`*v2C਩9 2Ğ/ zpsyQcݳdzepA۫X-T,z\p{@+׶Wqv)h_Z\/Oq{Y+Uv[)6*チk`-nDep$ rVXCksځlh`d=7Af/W[l0!TB7tlQt Kkƅkwh!Qʩ{Ej}fpy^V hpAE/JT-PKg,_JaB0xp-plAz ɔQ5W\Qt~-]]Qʓ͖(.P`#ͤb-0C($\fGHNa%9XF.-%wQCbT$ic ^Hat0n(uKBtC{M^XFKhQ:~LULĠ)d%,Әs;e{bhF&]q c[r:+e2\SmQ%I|JՠV+QԒ"܌N+AMkT\V FKh2a"ʩ5_X>MXx|G ܤXEj$kV>uphxΰf͚ThFe:@1krN۴j 5o,Xo)!ՔZip cՠ{Ť3xpOz9RBPSk{Ȝ+u0_Qy8ԴP"xSqe ^9 u\#v0(-5ZVm١nI){-RA=iȕ\8iWc ! {4.ciBE5~4 ͜YTN{ Z!nL48lx,ar>XBh&3L 3痵 _k UQ+"\q&+n᜙$n`MxR:N\;7:~N]A g,wJ,Ɵ.x 7|n//sI9`ZP(B}GėxK"iSB,Ыs~-4%fN&@boT{F\s$#Cж_u|ujܤ`gCm톩ּo\dw+(_8ӕ>X,GQxZїLjw|ԛG?I4 |ڌ&wuB_grmn͂كeG L> P}si[o24@3`QP +PʅE[-gk凬:[>e{*x~V J_n&*>ND!=5F$GL/V0v2ݧz `<4~9ʦa5R}^R)76}un,`*B)I25s*+DO=WI'cr0UOxP(Ѧx]-?>O5M7G/#-v' kΓ{Csa$N)ŝ)}&> 0jl7˨m(s,Ě!E' "@ A xgZI"VxD̈́n!ZewQH!n9UޅCN22[ ہ>^l(w/!k*!3ʪĨ U =0є‘b,&(@p/ $̪" %4QЕ+.Ff`uCǺ2bt="jY\}yLF^'V==qT3\%>䯽C`&=BY{=v;T73H9K!$"p"P,v#e)γ>DOʢPPT\Z.(O;r2#R9Y7uzM,JW ԿH?Zc DbR9-ܗme8=4aWh[֐)jA>9OBa4, "Xv!RR` 3JYY?$cFP WwLT8,=uwo1 nȌ`)am~kpZ>en:-s-yx!n{sm--`DX|\A\uqX_Cc'GXo2ѿ>Mm_@.i?ORo:]?OךL_*ĩl~A  AvrqHQDkſ;cX %\iyʎdGVJK+񕂔2ke9ZgCU }Aߥytk<8!AuIFV8̑=pjwJt !X RҕȪy)a|qH[us Eαa8ʵj PxSaE!8s3$:_os u&ՔgSTa'{UJ.q$1)X[ٸBvsQZ84|O đWҵkebq$Hp$;z(&JntrG.*^]ftcCﺂTG+_\A=e8Ps B#'ܫNSbx5`+/mZ(ID~0nE sFbVLmP>c/}ϻw&N$~TS%6sWo\cbiۥ8 * V8im&տDD1뾶aVR%+sjs۶g TL:Nf)ٯ\0YRWca:wȞHvpȨh?惊VcQo~ǭkTyAf;c_x,+,JFS4Jmz2i:7O"-Wj~ys7\%Iyן~5Mx;|Ҭ'Ջ|攒~S(/{N9GhͿ^}'t<_/p ܤ&~UumUէg]pD'~:: AuOs/WYϯ.I}9RTu\! @IѦU&4X156a^QeU@ U++zIWO{*Wڲ80نOptwȨeՕ8[sKM||F/ZuӿLhQ ٝu\Iw!mr mik`¨͖&z&5OO-x!I9`T2@}Q(b! Y=?CNK6 ~Oj#/c{J)i<sĔa~a?O#k^H[?_>~ZϣS 4'v<{w?VݝA.NUP=;wg^3,|k* cHk5->K[~GrwZV߲ui柲Rb7⭕>bﯾ&KR"o{)/(YvRܫ6_c!ꢤ\Ot'FIyAOW|o5*,旎rκR((h$޵u$Be'm~:<,B_-Z$E$;j/GC) % ]d=:=b⬛r Gki.9eW9Muhp-Dqt@XBuiAvϦqV0+^**E.F`C0ezj#-}"z|j͙™WZg}o3ʏس3η} -+e4}}s\tE6)>B4%MiM*P"*"oqJ ;aU>Rh $*vTnH-&WpG}?͖}T< ,c[̝uX~`P;˅]T(},R!bk0WiJiZ l]uh_$t<$1~46ΌĊT4Wy~Fv" 9|@=m fV}^l%hʝCieZj򇷦![?qގ4cmIG5;s&QGF<`\{3TCc2lUFiR.|,lN0-"+9tw3cEqT& qj)rV:Ay,s6VK;Ѕ?%4'\m~o9i99)q)cL8VY= :F,NY㩡ܼ1jaq)8~Z-YBm'\Br"ذ=[@d~sɍ^vv=QˠlQloo{ף"xcaFg"^aAb!)bZ.fvh#'8W-TTnL.Anq)4! v4˯ޭ^kܛKyKt8 8Lo9D%?s5tܲaM]&䊰d0V3;Uc3DiI{yoӰB#1DvAȵ4\0H^L]cZQ*HlRF:e해tJ3Zq*13Sr@3C.c ?crNhwb1D\i/~ ˎTTGwe|( wCuSQTgې{=ٱoNݸkT<#HG ~zaXT|ken+F Z+i~o"oJ IJ\:[:ݿ/YoIس|hfo}ٖ7#s8Qs`tCZX30{NNfAQI2D@ ,e qGgڀjhBR*nCU}*;/E 5Rt `9tNkK!R/M|+&`MZY!ZCzivUp}Gh7ӡXWbvs)nl 8 +-e%($Gy*]0¸Y@PV^7SMz)TлR@^ pϔΡbLPu^ڧep-u  B؋WncM'c*G(i \׳kh$FeZ`{dƈ-i2YQ ,$0H ['h"fTڠu) u>p~ {l A 6(|,ס`+5zu&C94T52(WCO?<0+Uy\sk.rrEN&T*En\<8$GvXYt4%jn^Y1sp&>je"/lKUTe[O?Ҁ@ :DHwכKV3?07c,5怷{K1JӚP^/Rvo~?뢬̳?W{~Q~tXbIu3m TF$HJb2RⶄZ]mopkpi}^]?%%'Tz {捁/)G0l9'M:uK˜UK}^pLzӻ7E[V%.D%5NsX% /mWw6[8? ==o6tz[St6vܶMjB0"XW|%E28 Lpi$ M 3hEA3I@},Fl}KbNS9kRc 'A0N%6kMv׍]$DQbrGE.wt]T`٠(&hGa͸! EE8h{44W[j$3F [ͳ\IÒGhJ9In̩,̤G& 1(X )STR=YDU%S#>5k,ƅ Q)Nr{ijZnҧB{!`~J[\|PN~IRkH)AY-Fvv p]kHΆcazrZxGl8N{cpJ[U_A!-%5Ycs*1@"%8), {ohTNjl$R>Zqy |@"eAD ua3VZD漶 {-%Ni%ڛO8\+@)=B.Ρx7|kdv\ 0Q=de2[ lYNçj>B =V.]Wa AAQHleD`<;Pљrz˲+U).47WW% ZT`(sb8jTf҂K[ q`@hdDux4vuI! `jByMΠ j@UiE5zG`V_<5 "SԔ!8>zu9 N(IZ""AV1Ɋrv\2fȹd'~Y ,hMb{RgòAI{+QTÑs##A3敨OMy՗,B0N(ѧp h GiR9$kH"gkx4UGj'_Ƴe5P=i!%SiȞq&Кhˢ,NtBtqGG ʰ&D1dU]&5 YSq"%{hpm;#xʙ Q*9%m:POXL-Mx6"3`߀ )V/\;|@8(> p櫥rE+EF 7$/ۓYd[7eZV qtj$;!ZHX,P"Xv֠aѴZR  TmX@a'URks& ̴"jhnU̺``-84w_ V+17+v!9g9dvwʸvrE(fa (klOE4ED16*0YOjQ {h M?5sNskIC%chZF|(~NN1 HU{uS;JYku%N`3TW 9AqNRׄWhziuL#m_$.y5)钚2r]EAo9@cQLPF}ar^k蝊K3~De?oσZE"'z~"Ng5; Epw8EN1.rԠ@ ^54HQ+5>@5)k @ݡ"ܚyg4\qWf72|*&b+&Tk~e_ SF`!Y/baa/=pëE9׽jgbvs.UbWwZk.uuokί{vq'iv ,vow>Xo){a7[:0lQ-͋~`M[.N?->]iq6iUcbcRǯM9hy]7tCk8kbdp9 M٬l4X]K%ɐ!+za1AǘCRxM!rr#QxJ'|T iӞX28$d,JSy=䖄CnI|Lb䀭Z+|!DSuC77m<ʫ :l~ ǘ|ydYJꮙ]N+|VOB:('fh'kIJlD`yQu)7bbK\nѯSW:VF1}0Zj7 6͜""LIY==ĕٻ8$W |1f] 0V6a*)Hn.2`ʀh̸2"> "K+>__`e(q0' .hZ)) {x(_+aK1_O9XRDLƁ-DX?+m>ɞQbȦ9hǤ\"aߧ4JĬؖifm=r3`D_iT=;ul#Mj30&߯.nڌqH48鬎Cf up5*'ݤ qҪib4֝»`VEyvF)lTGЇ/=݄bGFsNsq8)8y~G!J*cKC sM/'UO!*6 wcŨEOLW\b9HF8>a-+-/Do[Wmk3W^m:BElM%ĝ2ե¹@c! a)2JyA*gܬG.K~Q]Vl7=bE@')7Jz"PGB8?D >@ x|7xޚޏ3%q/X)r$G+hq\bNL0CKkxW]]4aJ/\ɦZ㱺8mu Ky"yUY$/9/p2dklNEQۑ~!,925 iJ lË1_J`v=Gzy 6ڛ;a~uI|~Clv;7+﵅OɈ:2[-5%&Ӆt“:(p'^[pM)#^n̏`sP&_c(zS0Z 5WW2-"!+@ "#_4peTIPM 9Ϫ)T8,C:t.F5-olhUJ-8[Fv}ϥJ)z5.]0y4aʌcx>0^NB/~0l kŨڏOMwJBzGBe`V?}>| h6c %Sz\{JssHA/a![`;*/U(|~Q]R'I:R+%`| f`B؎RK!rWa]xY.zN6**[S^vƉ-FPrE 3Ku'YP-OK`ʃm̀' g2s|N+÷MEMƱ_K{v=;$} F Bz1@{x BU0X:P['˞"HA"Jp삠Yn#`f%BvVu'K⻵2<˟HTC(vQH.L oLη$2Tߙ_8RK=HOlTMaiU8`>?2^ѣ5lYIhjzKa4T?x=V6L=p&F>6 Xhd}ыzltߊ*/2 a~,&識σ}ϕ0(|:nan]S$`%Ό?fx[X1}H8 chzMod1otx@;.RLЬIZ}d|3R۴@P*p!d2eu ͝M?( *1%bH27zӧ$Ɏ=HAYE>PxǭbZYZe\ ȴ$SP棯 3@#7,j kfOBJ8eXT8-V"34E 4{ 9όZ|O|贗Fa&Ztn 9W"nYp 4N ؐ & "ʨ/rLjEЈw3+_~wDtW:4m ,\ āǖ!F^S{ 6s3(8f'0ќ6LqH{?޷ `KqWPRKd,sDߒz8le쓛.+/f[,wRϸ<_>|,@-#0[O\~"g$3X:Ajv@6T\0Jb%*b6ϬK̲ dSnB"%ɿ|\J-JE'Ҏ%nWNEt%6X";s g1O%5Z\*G7Erw&a3Q F`pgAa=yi'*;[8SVNHBfROTcBgDURNRRerrMhcLaÙԆ2a LC a e9W] uJ|sL(B8IV2er)R(KZ(/y kXB&/ |Zhbܑ́N_S?'Lj7Pp-EusϨ> lUu%soU48;N\Z >n$QDBt9oExu 7[ʀ$fSA+T͛-}(o6f$RGH)U- ) o T\,cfbyw Z~No^B$~pQa֌)~-dvv ":m,Ӡ_l9Zq,% tݢE:$w0;X ™zӑI5q1wsҘ;8&ls cJUkLɃח<î&va];œ8V* i!X=E.+8g97iq/ 8Gpx)۳T3}ZǦYFKٌRb' ò ~31.ܝ1kg}u<2zHh΂1݌rSlT~`= `a }aMZmjd[fg-<L9,z>Q&pw`]23VD_HC?UXQTR$Nbp2NQ5# ۡxd` yЎHE,d`|))g]AJ.c8j*qUV f}j)ɭG:jČ~ݐ"/٨ހI-R*\l4ٵHFcJ16T[[!WEaRWGV=m)Ժ^O>G_^%bQbE2H|lċ-U~'فmpZr"H%\XE2!*Q(ղeײQY6Y"=G,dcKgQ$ԡkڞ./pťcףUL>JERoȩ+9p妭9[ʝ7CGy~~RJw8{o.fP_{幞EMsvtrt~X/) (LuHP^H^"dUTkB΅gZ(` @1,.޲ILXJݗh7U/$`''#夎 7GY/j1?97Z|r)ߒluج)PB{LD[1'h@;3AŜS <36cZf!L`vhI[v?Z=k̃~Lnv0/eX:G"<帳7u7EgZJׯp>n_<^|p(IqCDgR.RNL}M^H["11v:=D^ztK)8pl=ms KpIr F*) Uk#cgKL[;+F>7,PCD|M&Ʋ~$!$?B6cO1%֌E%c*#'7@ciTzmBszȭQ)$]GEՁ%&.į4KMfn&T裄Q$$**\yv]#Ut zl`t8(YuDP0 F"FUa07T젖PVdHZQreEt3?6<B+Ym(Ƹj0"z}iNW(q6c grz{ːZvۆ_&FIجB@/2M%Q)g@EpLUJQwWj8+SIY'LYjs/`U5/T,RFcS3}&$Ec(`p{t;NU;W|^b]{S)sx){]< RGKkdgSg '2c{uX m9\K;Q{R!;Ixu+;c,FOAY$'Z YׁC\KS&YsEMq?D R1MC,PkMEMsa˱k[yHQn+qs[U<6= EC.R-fFV{9Vf5Auw-PD|g,?UwgKe)2_( zj/]1v;-9=&B=9Y =vݠz*%L3:W뻩t وJ۸Y'_X'܎ ^UsS!-*O8ʣ⻢- F(~C`+E՘sĘ)՛ ɖJ.)hM򔀁v" 5R5&6I+b}/XtJ.$I"L [ ό ˎp\z_ۆoU#!1HyP.\࣭5YNԶFIۻ+'wS)& qy.Fq2ldշ[#U(Vf UC6bA6mTL0gkSewlSl@rj9JdB3BXd,>FCQ0ώo7yb7C4׍7˂>nL*0ԭĻ,խ\ba{?_*U/?.F,˻btVqХj~M1R7JJ܈lN;0纱]?5޸ͫ("8Z(>`Z?_YWU6J\u9im=#h'LIdY70yVW1)Ul|ocp#9g>@ X!lM2rHm@fN A;u}n~;[ak )D)ma#|wsJƗ k.$rvm[HsOD}1z۶N9~:;{PG|jľ{4䲞藿9'먏;X.G|pxq|ž~k{]q<}Qv߼YPKˮw2Ag yb<yMF%# LV#qb Eaպݶ羑r!̉ 3 vS._f1ڎKG8wx$9WCΉ eEۛuȃO5!̩JH|OĩH ̓R!Ms=_2Sf )Sg*kG s0<5 Q-z+MA ػ'W C}F>!^Ed C c2ԼAp٫{yN.<Shu,+0 [܂kXtBr0XG`O3WFtI!5\?x{N/dNg`HhS|q j )$': 98[2KGСԸR Y6Q  w+y>nz=(L,+7  ny{NFˎ ``g0.y9w7bp2:av=TX\D+% RpAy,rxTstG]ߝ<ټ_ y2G]le9z|Q&9_G`9;Y_~#o=SXu_t{qAefh/sB /KeJ5.Ir ћ!UbQzmBe%֌ 2z }3Pg_sp`Y^~.u_y|t#,^(@){uOc63+|D,TKJ0{= (!OG! tz 隐k$eh쩆Snr@CG:L"7FFhu:/N/~;:$}6M KGoS.-/́ߓ{=hy] ֆU{g9u |۳43RҎzbYk>4l4:r=zBvooO VP6WFSy<)=@TX/0'S&MqEXw#}gj#<yDpu%\`|pweq$|Rqz0f?,kEGER3lRdݤYl*aFhUWEdq5vXk!4>,)JGHf{Vr|֏>S~f{nMaK7ptSVx7AH$ Ku?~>4WuS^8km_ ؜@ClYWJܸM6Gk$y'胫X%ǭEE ʢ;Gr%J!<[l]qG: &-xprЎoY$.]A Y`i>ۣSNثbr=\4)]-"]%myh1O,`<xf5 <m4S51ɚ'[.\ڎuFw aۘ<|%[qtM)#uC|tup8fd 5&v jɾXߦEoZ1eSK!0D։/ ߀p\~wky)5PT<kb)8LjuMK Α im`߄y?0l3BX='[ P͸i'unn=.oYlO7ǮfnY9S~}cqR`lٯ9u@k;)U{xʣr,!K?Md_ChlǣCFS9}59ukzUWnBշ% ;PŒP/#k| |vV1eb5tQl\߽{Wii;ƺJn>RA5zvЍZA$nt0Z*&w}A$uUFYzda{l:7"xzu\di_ UVUPQ>' =<:h+?*8+mi]-c3\Kq/k)hC;nAFIo,ůhX 8^:i'׸'_?gO'/g.]tl1{!c͇n1ˣr}5ON~BU$Kno_cs*j,ɗ翝7GZ>[z{3uu˛gy%N_o,>]/:`h:s5P@}}d0W셺S'/N~@َ/Θgo[9vǜε6 e?I83R3ͯJ_޿^r)֔d b0V2}ΐB"*{7(gӳ*G!(A-\<J9Ƣ7VjWIYIJepO)sq').o.y<.Axd]2Jn{ :ĽL11MޒTQ3@Z&k `V=uPZ .8w霗xhyU2 +oEJ2RٙLMPx]Wɮ+q]%[^"lpUN9GznDv%>Q}t7!Άv-Y^eH1" ;F89;'`e@`x&pы !Q̯%=ބLtTQ9ac{☭))lI!E5OAIA'OKkzYpZmͰ_ͥ \bTk#\#wЩ͓|>Vtq}s <my43Xy+o^%=u;lL`Rpڨ8=MdK/J콋Ҫ_B%J.zKzzu4==q#&d-4b{6 c^2+Sfyn\g"5fd?AL~6U S}lq\][LLAzKۖ3 Rl!PҹeX,Ku(@o@[zET0˔"XS)XGCR^S#_9n'K3Lb7;{s9'coYz+Sr-nQyKH.!ri+vBF#ASF.ZDI; CA9DF1H r* 2`-Jކd?_^զ9nL $glT% lEdj.U99֩dDre#M-JxQGv]Q&*0qb@vgP`!c)6S``o{R*߷ a})%|BtxB4hVTUTF]QFuR4H5zeDgSSLh) H~Ԧ9{-m{BĻ߂$eҼSY֌$ sqK4WelYSMVdMOc ^>D}$Oݎ&ݙL|U`b$ZhN`MEDXv_zM C v [Դ^X%*_iǣaT8v6j~=}r 11Ue ^#9f H\W֏s Zqkʹ"03:ƪ硴rQ<6JjGW,[djlVT- Ѡ6*ʾ]<] X:SZCLJDWr\i]PG۬rwǯ%,#6{ӊTU.ٰF+[JU-7J=jه4#׉8:Pgú(lJ:9@mY I $Ѵa>mԺlT:T}٢ZZJc5;HmP{O`E+,\dAܻU z\[Y;ƪo _:_@)%P;qd+AAk5=q9Q(Yb(8:^U$+a{`C殆WQ!G* A]1I;~bR&3ڱ!@(!Y@Eq I(:<ƌx~I9pp7 SY%Oè7_/~ިz /&Ѝ:'ʳ=躗jPJ>f%n*dP}_@:Aͽwb!zAruzk (8ZUUU/*yfRCdpw޶~HpDxo]biMlMJA!xj9K-0b}lcR֢TBI,kknVE=cUT>}9IU-K(9NE#r(9lIJ@wu/F@CM|&I>QXTޮAࢶ( SD0Q&4q %W-lT\k&f-xuЩE Jif^gϻᬱmT3'Yk,}s Rg9"X9ԙ=2E15٬{"(Q:\\޼=Eӛ#)(Ԛ(P~qn:9_Tuv42v1mK.(E y;ў @=%-N'oniJ.i6_)>Y0ΰϲ1 m_\ޗ󋙋#-\"\>'6]l4[*l{` ܫZ]Qb.OLFQMrq^A $Rjܟ%,i64..5Sn(ghB a5ϬOO^d)BFKs%XC{BQ_f!ż;d3Zk#+UV01HjD7TD6TDb.lLpE猇ya7d6-Q-RYOe"kkdZ^VJuRG E+%*́ qT31t8-%!v.o<`uX8LޙW 2ccUHM`Md(^5-xvMBD HkrSNgHWc(jI oLׁʵnjxL k q9nlfdƝ6߂^^oK۹vǜAd~J}DQI)0cot1I2cLܱ5mh*V!auPVKQ].AHȺ"ŷ^l1`!]RXh}>L:5πX]U]nߔV{uJ,r]tk I4Jݑ*l;\,UڵLdmb0_D-"Ŧ}LfC^h X2f'uOgѪ%E63Y$0+mnZNt0{|L4Ly6b<9|דvWG"5n}U0d8?~Llz^ӴJ\wOs|?9hW9mSط.>K²0 LS\uk`e)V 黧t)=ӎ񡦧QIfbKxMրZwIC)0\)ϩL%Z@9HPmyaGV|בΦ\}ǫ?/iN/l=8c-6DK(Tn #$h1BRH+`KBU@ J ֶ(TD"'G)[TX7~ "< k (iGNyrC@IQ>-g^ʁF!ur(d>޴MJT,ȸZAѪj{g2RW'xŵt=Z"9bh]r ;R韟@wȍꖭuZ(C4˧ͧ :>mJarkA Y>L|9}"0M ʯ68 Q&+mԯM_v7,5nCT^'㤴r*k3Iֲ%sO8X(]1x1+ցs")MdˬI 1 &³5I IA5+Oغ-bUd~[e18M7ɗ۩jio f'dV=nx Sl +u:zj|/_Sdj<<eYg,:Zﲜy1.K m8, ] B&ja% OW8 p̞,o {r--gFDߋ:~|q#*|\O#巰PcxM4jF%`xMԭеF`kS.8~ #VJ=h[uV_4/jEX@!կ6^_{>|#!sÞ*JPJ- jNM[Mdl߬7 6b#  nѪׄNYm#.$Zy#8]V}fyZ dRWPwkZx8kScҳzdO:axp69糠?i[^/l2]iԌ`WfS<O=Tvvwz*;)C`4)kl04`j|%sZ[5 ` o}w«L!xXUaz(ش7!~\f kGC#2UʮZAPTj]ui?06|va#qP?r?_qƯ17̽(\|qFK턋>sپZ<¿rt9◼ x>UwazKe:D7 ]Ai9&@j.˫=G8jhR dvҌ)Ue+(XGZofoiţ#foUH l#u=r{lRYZN1cDYmdVah1Lt$,usiPTd<3UK6=Yg.BV,6. E烉Rڌ ),J7M#lI  $ kԀjI+z&i22z 6[T'Xr2EyR3;m)d\IX{PG=H & h\ľx9{Mg|{$y6F+ ;R߆TZ0SBl y!+f lnF"d3V (IԈbwx5l` 6%#6cM"6]h~,LBxXu֠*Ȇ&5" քrPC60R;9XZˈ8]槻7B2=zצdR2FEUaPM%>7tnGկT~Z@ƨ2~*-MZ%x bg7_V !j`%.jtN>^-/h~8k V< WJ2K2RhL5 ²|ڊIJRr" F3z`=uMGHԦo {dj鮎|fi@ s*v<?Chk>܌m:K/\g ?4m٨D>SxWgôsg ҈ K'@l>LN"&Gd!aihH"D੗`B!8NV2ZL<$YL6LKl:Q:?@K,dlbARbV;:59{A0-M.4TjPԄ$6Q \oLZ܋L"T^EN3``}y/sXo F;JB:W2x5W׷ހ_4QHGwfƙy))~XCx{#XO@q4K n:W˃h"Ǜzu҇(Hn16x'ijI]?񥔍}3g7 loŦډZt1 ?|TGldܿl{I>cTٻH R{;1d!3]y%k:IyH=3Z=kXEG,E`lUHPŰ{Y@!"Hg-fֲ 7m2{w݃P4|b.&ȐC2ib%n-c(c{ 0#9vAq198:8Ȇw(~x&?*Zf3GPڿ\_5˫;hl f!rt]ct1&>JXPBV r* rQQ^D=%k%%&D+(,yb-eSK^#`2־W Q]?ظCxi ƫĠ,[-d)l\*CPDSU  H i\TrI.O%-e̦f SD峈91\c2m;P*H ԑbzcLiȫbQ'pEp#+4X=ޱuY-QYY7D-fUuUv( l7m O+Y\AaJ(xSd]dVRl)~,Ne¼Cˢ)hh vG:f%P%,L 0̯wh7!٧c K<„HOL|N"-d4*iL [2#{HŬlB0G)nCڈyq&`4L#0`%K#*g0Փ ]+R cgJNL: mDc@X)*F7d( @^"R2--(hM]9" KbF+;F$z$sC q0 ijpYJÞ{X;& !3hr9"i# ٽ9ZGWi+zz}ŋ!:7Nq-м lUI{Wտ+NJIH9HBE  zKY@4Lbd0袦%+8lyu!ph6bHb;6MH=6d)OV&#󳡉 g{ݫ7} ]f !ۜ6ٖ4&Յw/"F﹔s v + :lD$kgO|oiH{87uv. x]r4*[mXErbi ;CVbc253vjVw+C>bbbbX+=$njDnrÚcg^~+P$l#sHHH;_˯7}P2TR84=Y qʪDLҬ^Y,!͘u3ªhƬ6-7=/dagyg֌ƨ3jMQ A;]\Kա uc1qm׵IyzІ^XNF&=y_>򱘴Ӳu3͏Mt<h&' ImqR@t@o/Ou2TE&bXa_ k22>@XG}+cL}k2^jkܤ }PBtEZ&}/v>m z2[nCt' OWZpUMx7a}H۸ޖ(iյfgfڏ|}p<:; Gs9=ozr=T؞XwTc&ݽt? ͷLՕ[`Rw`oP+{nkckrȎV1* %F RI 1:4FV7o*if*\nY"c#T4iHX|yn;}ۨlTNۻpK,뻤>?h_,y~.#ý^PN1#2Iт˼SJj1""3| S*oN}i_Ǔ~t?_utZgU_M.ϯfІpbr}"LQl&'dvtTꔉ*ZTsfzeTҰmV٪ojLԃIXoh%C4z$K]#t䓚|RӑOL'*%T\DJx)*z$Ρ6v J!D&U#g: }РL^AsYVAsXH55mmn,1H6I'͚g K}Gh}yu Q Mo?X{Oț%HB1Gly+VP3C11d&=#O&[ -5rRHX o%gq I+֌RDap ?զM tpj}D5yQr5XFї?]CWUS bZ(ٙYh萃~ gSTnvln?>SxQ(cMN92wEJ>S|Ӕ3*IH0XH.SN+:_CO >w̝*iZ *(~{ ܓgK_C W1!e gZEJv cLē6fA }@P/`EN.Jӱh}Q. B^a֥T cxEyAfi#^UԣҜÍ?\QLadCEY@7!T9GFxpQ~2sx0(} & yd)nhQ9Ƭ] h`0!].b=g|8xYz`1{h(+ *A4OB%ɓw8X00)Ab3wA4r9&5=&sf}6b4R҇:U /jM)oWO?eSWIRp|tCq +~pW٭9MgGg}5[M{_g~ϓyR3?OJÈ{4KTV)I:oO< )!2& C+ }РJCThܽMw ٭>]MW7oN7<<4 5+*ձszĿfpnϮ婛J-1 E!nB* Xg<1d:yge4hD?՝>Bc P c^mQ=\?\y:uƢ`ǎu;lCW nN֔lϠ&|,W_^km~$^vMK+cqGgo/5(:=fmꓰv7gD BjY AE" $BhDu^P@9Ah_݌P6@cnPÂwcC۪9;v}xu,Xn,gϫGVKp%#c$+!$.^jl5}Ύ/gd0 b764{r /,b%6QOG2i tr0搵%=!n=TT*4JnPа qMK,`AM!6t6ib1R R/([N.& El #=$vhoNKƳ|Rƺpƫ)LJј  ӫ`9#F/ ` `l46cc4N^d{"0ǔ2EϞ%me.B%h Ms`e\:4&i0"{8ڽ)+vKӾ)?{WǍ/lVa`ܗ@%+#9~՚#+$ M>U7pr6^g-={EjNz6-d%1.=k dHK/P-rӇ#m<+If4T:xzYW!is[묵DH7x-OwÍYafEXj>q&V)3@nLنC76{-ni/{O%~ihɃuGb}v<깆ZnxY FSj~X' $(9{4I{&SFFB*T*7:a"m t{ޱ-}}[]y? r=½7V/U[}:%6׍s=o;=R 4Z*nU` ljO'zlf\X ܔ;X%V''lGV< i5=o[8 RdrRg6eRӝY)"a̱z!Wzޓm=Y'dA/yOvv5 T `]?uF3J[o nAŵ7ߌ#?z V=i_cpWP`_jQg~D3Qg @+c<Q\$#[0zgs>qS^Gx*m|z0c޽3п]߉뭄;|r_ JV?@#44+Lݔ?b̌4=1Y8/8^}rU[EDB/K."(2l'Ga8qsPtr{la'cǶѵ$ki gZ1`Z8S%h˄XL)IFA9UNSML FSržV)†Png_4CQXggt%J/lšHĊ%,]7;GS;2\hE $st\Z^׬?&FĕgO#Etax9ytrGլMqOf &i;a Ҹ~jج)A]6)?Y^廋B͉/\DW9׾Ke,]-;-7W3XaM".'icwͱpeWEIoʇo>4KAƢP;e|bw(AroZz̽wKTBRgt1fq|Tu,3 H|Y,=OBi#gΜ}&'I@gOM 5L_{ z͎_%ʚ>g13FHyH9JT/I%+KJDQq)n7q"-;-u\zoohuk1!9Dۅ$eZ3k)t[o}||)toƨ2lSB7RBkZc(`4XG08R`/E%qsA"ߗjKl5ûrS"ՋaUkF7TmW8;}ńTq Shs+!iڲ tx)͊b"J#];۳N-ZFZyܰl %!I;J/ے%.@AugqΪvVꄝ~F3)3&B:{襡$ҩ Sr$TJ!s9y&e4O}y;>% axE%r0JMGο5}ΪsVMjM>/g>jast=TBKx`BvRL2R9SAWf^I<7woq1Ǿ udއ%K %H\ lBoJ''58 & ҳ QB""PjMBѡь<:ȾpL3;TJ`mKX։-%By^aŷem ֔V5J"b4iש"mkOe o+hEH aO%_Im'j+4ux^cd:6#ռ L lbDDM{GtYmZVG !ب^)rJ(,7qVIkPO)B@)Yc$FEd E40[哗e]GZl9q:uY./8 d4'2!F jcMNRmUBmȰ[h|ҤfW\Q%`WcKJz-Ta#Ni{h2Sȱq"z.5`!zlZABʦ#)$УM~VI->CbP`)2a~ X#ÆQ҆ mȌBoSjZepv^nf>lڿm"_ohC+X~ f5E]\;]?+_/~雗,7$۷o.p !^yoV>mEI-wOWBBf0e_ë߷qOrdP]3RJS;EPd=j&1'x ѳiFмSXiw 1(Z꟢ZEyap"(W1tF/X%DɩH R1SP$ uTu>H|zGǶ,ZPCS}h#[JDFײ)+bA YBfRke[`ļԍh3R({ދ- }+d)QH3Y"_@QD*},l1\;X(h\@fxFܟ{L]rƇtZh^u$n5|^k &Ab4Mb@`Zஜ&҂y#5I%#vv,umf ޸^vOY6[y*SGvrƝFQ/fS փxjwԢnUhSڲۙڌf }>BVZRX: zv˙@aqHޱU4;1,&+/`{ Id]] [C;ylU A;X II[FuQUǫ6b"Xcsr~LK(_ogB?GxY71<̳! <}0LRq"Owֳ:qޑrEMnYR=T2w)14H`yM$BI!^0ry6e5 VBױ t*qz1 ,( Z] >m1 J=P6/罈~{I#ԧS1ٵFwRtu&6n!`??me sbN?w_6pl[Nki3CN9P5Y^NnFp1_ƒd=޽$AR{oo|wX+?Uv]#7̤y E >ar[]',"Ϛ+>ߢRG%)ڱާ`D6>p) rd&ˉyTagj2? -4.{5h\и8'hZ5,3 sw8k>C}b5u 's'6Xeu zVn7C(;GhKzKQM>ys-'#1B!.ފqRu}3 x2nUV5Nv}ToH6? Kk‘"[ϝC*}q"?CDg=(w98d(5]iʊ; a0B!_E+% LU"$2D*/cw݈~4.!j7{ZO>AadrH) %Xs =wswT{N`X?/Ke EYR%9Zq(1I"^2A(5X&XH;TV=j D~v>m<.'N[eUfg= IIy,_ߜ;߭wdO~:.ܿWnXg!c4v3̽yv˱ZGVc[I߬gwza5Uo3Y 7$e$ u>n[ BL'uμ[z#@B^&T =w#~=zTN1ޭSNnɌzMtϦ;rx@0:paS#22$HUH*{5I(2, IH ,PgR 1,rjdC:\B8*2`z;HUl!U;q2rA5.Ø*Bjt4Eg`xgB?Y=bkq@E>9]x̋+av6g<8ZUٍT`߾9gWl3 mnμD½- ,࿒k8'B=?QP F .@BR-WڗŢXVFOZH*!{Q؉wƜ;jN8EEޜj!72XY3(XZ'RJ@K0¿"jFRXHj -aT"bhД_ĔDJDfHpMdAE*@*J2׭ہ˵q }R "fNß{=ވ !u)݈oorX 7$pށw㨶XReb:cx2N;hwa!/Dl7wxHD|LjBxuFHTF,䅛M@cCa`yY,]%P<0ZBTTRmoePYRmKEk1a4P5VTM ,80\A d_!NJF0"fu_.\_ˆXRqb-b(5P㇈(s I, ,ŐМA>hIL똛8c &0o1cޚ;`εx\"Dݜ2b޺b[aozX+v7fJr?l6_\H˨-b'jl >`|[vJZ'CNAEWɧꓨ(ԞA[e fcd3AE3%*-JᯥcbSJ Ce-N!jI|s9"MQA-Q9PEҒVTnsbJ ;%cR'1)nK!AnHD 7!ֽ*7{U\1.yI܄…3`Rr̆1H)wi@$ z=q(Hw:@uW!}`!/DlVĶu:ʻq1n BL'u֡N-Qu%3rX 7$B`;A'Reb:cxuNkq[2-pmlS"Μ>u~(}< c ̇%?|N1gcM;1U=Es'B^_e֜^ug9{OĘ`BH[mO[.a׉(K .嬐Ceši5kə%5 16S *)9qԺ*)SA= :'R;kz5C4^35Pa㇠(1c1A>h Xᐌ 1c"òtq [6hI& ]?ݹ "Q $p`/"۝ۂFq^OEnW7'qq"z;h mP{x +x ϕlP C%A֔JEX h%W N p ᑂyq2$ uUߜjq?P$@:JN*m _EHD{;H! R" D_(.Qdt?㒤P-O'*% \zJٰu}UA L+hd JZ+`5xPDdS@@LIs/Z2ZA@Rerӛ:F CZj#䰐nI6'^cX-U!:FwPHHN-PB^p;C& ھ tRLjn}"3hwa!/D"eػl\A\K˹]'!ݡ!ȧ | ƔIYf$k]CH֐7@HzݻNۀWyLo[̼j?Dh}hճۏ;<nQ,sJ7x=+M~\L}矜Vzu GT"qN8w PЂy+h0 ,M4 PR4&:fJD GԂIN4*Ϝu`Y86Jšo:fJx~FJc=iH0Ǖi=n'ޝWΙy)gJVw8O{}.α1Z(I- ~t9YDyGtL씲bz@+ $7oYHE27UH-nM_Td>&z\! ֙CyrnMX!5fwpDn6%??w;AiIߜjILiBOFBJƩyj?M j{DV}bm\ | c:TTuq3],'zqӤTYraŗJsH*D _|bz|M:#"e?<Hx eq t8 @-Y*T}jy!o_w댜v(jYϭyL3( 7?qH H q@qT+?Dִx ^{Ix *`4*&rX<;^UGɵѿtm1BH0O8 hϛprD |Ƚ_ #i5u1 ͭBW-!ZU%P!A)u7%b(_ Ng!EUaku)k˒3ar͛V UC[IXԕ'E?GYI>?|NOѐ:f型Y)C1N$Lgr]Lv1 ,Zs{!Wdķ}fS-lUTn z{vUۢtF-Xӏ+@# p-?M\kh)~N@s+herS/p$2)%JOhMnYH2wI 7ט۫K#};7rv|NK7/DN7x.gF]-/}q5QW?:8'tȾ]SJ76Eȼ݉wbo[F`~V}Dۣ"{H5;n^V;ǢXA%(2 ẖ4X7hWư.T4h* "x;;pc`MHC8ac}QIFWOL@:FρDx`8+jx-7L# -7F@6UUBTnb6@O  ygȍdy4!X`m>1g5/<1v}:I:(ЋpFB`@r/xcڒ_QC{)NyjG-y3or8z*@>kES-8Sr-&(ՙ;x||j"(S"Z/ā vsIgÀcJ4;o00Bd&C?f]40NYLЉถg|Bj~ +SʂX R)^Җ24{v{ _(G^|DtN\m𵚅in_sTB%fh\HD ,\: S̩GAB0$E#"*z$0wR”E7&AWOM]r3 . O-&}&! dZՎ >_zF9jY/&},۹SbyeNC>n-޲?zX |2-QXa-JG+PZSUƽOab!ԖTA#KQrO+?G}'U{Q@M1_ ~G*olz3-/~?I: AD >5|'@&ɿ32K}Uɒ[.cxpglK(AZX30\3:Ȍ7|f6C 3uVq,χq >l$eiwliRp Xka}Nq(XBň3*,ֈ7P/U`YͧE ˢ>T`J?VҠ4rUN8\EU^DIfJՐ8YTO66ImdO֬3txӵ@b6˧[q0j)lS Q;VuiW7p}c8.o&QZ9lY\ز;ar`,d?Ԥ)c!| zΧ"rNBu+,fnbI8|coWƀ3`9.c|cu1 㡛ռ0l7i&Sf Džx}NӆT"QMƧy"vyvfWXр:Ρ}OZ37;}u!ZT 9~bq[,ܸ-u׵\S-u{V'0G [=v&VהoUG_& hO} D~]U 'YvU`LƮnjdAj*6S(~> `:> x8l:n ^5_( b(ֱudYT0.V89bI$xlbƓh&ݢ*,l |Y.k:_O&O+&π3e>l$e g6xӯy7w8Mu 3%Li똶FId̝1=_/t8M$bR I*2wf1_ {|vQ=*ͧF糂d*SSgvtN2T!jlvb:>>pPnC[ ؍o~;#w&ӕW\~77,H` 6'[gNAb#Cr;>AU׺6e2OY Ǔ/66^p).샪}d#ҏ`Mͻ$1zXztug(j, hiOtͪvgғW<,z$/.'Q67QyדwRryOfzot0xn#~4'ٓVqH>Ot1x'55[Of^C0 ,%Oi1fi>@Z]rf4(Oč3^ x՛^d ibD ѡR qTRfޖyATBS3Ф}$v0ZbQ*Q4"fl>>IL@|umdZ1MaPځ+G!˵R;[' 4g+( kłk_۵WSvKh t0gVбW[ˣ >NAp-apdĿMih U uY7E"{spg0.lfQ&N+f)Dy"!q9ىsa(8|- q5 4He8&C(b"Cs! 7We:uS%8]R=W+,QR8IoLB<TW⨺(ubAU+koZ Y:҂M5#(GTFWb*&֤p\⍀Oj-|80Kg3k7 c˧ȶoO~oMCS{r Dރ||!#|xjt:/%"*o-ӟ'?.pw&Ky?=cABjЉ W͕փK)撏T!IN'& Y*CW#g|$g2}ۋA{svVvpLGz:=w{xZoJcՁHF(ugs=N7 .=; #]F):eW,YWҙHx|EJQKMvx1E0iĥllj(sHecq >w C1QmzW6A^.Ezp$IƘS3XqYj9`DF8-RJc17JDžvJҖs~j`hxwy(QF\%g8A Gcr,Eo3uBA(>Ѝd87 :H*f1 6aI@#ά0"%LZp@'8F(Z\ 㙋S̟('1W%HJ1&46 XF0jJ+iBB"g0Z2`? P,U[As;uq%9^fNzK4C5[KT"Fdjvb T .mvTө0 T5 8T%gjN׉@d5lFkRڻ8!%'.13DU#$ t町> nQưAk_2on:LZyVSVUtLwG(mP!" 2nfOQȝ=30J!%if&5nRʟ m !N(b,2<"+aZLm"s!nESia%UKW0JÆݦ/1CG۞ߔfHqM}RWW|k[﫜Ѝ;&ePS*jFn{4@kޙ 0 nDD6KVuY1 o_ӻ\{W2,ӧUĚ$ڐEB4D.1HF8R[H$tZQ#!*:6 ET+-BC4Rܘ'_:> E>hD> zAvFB! @< YcB0eb ӄlj,̪m Z_0߼.N0竛YIoQmQG"ly}c;_[Na"o.fy,]o'Q;r%ewr٫C? n^wfz72ͬG,o-Y\!K\2{gP[]z ގd 4*'?:`ςRB.uqտߌ 9[h >" (̂uJo5 9,8?eNRAnb.' cCW\`;fLs8G:QI퓗BS]wJ`@ }剖(ӌP) kHh*(U;aY9M4v8fN$!fjr̓^5PBp_:]2uv&Z%L.e&P~w=*I~p ۱UFͤ)BD7gNr 9u F'j"Yc 'f޳6$WXr"=*NrU\n\{Ijk0HZ-P"'@.D 9z[R ʎͻ -`G))U4roz9Js"&=Z'7"*z}'O(ungTBb:m֏|:ބ = xCogYuLq4WdDR &Ct#"0B$ sUKLOqEOkOZAH;j!< jK1~2)3J3pbgXJRԌiHLǘP+?\s32MI=O*4ϧيy0 */1},dǫOC $co0 |eX7VX>^^}:cr<>qGiꀙuN(w˻ܟ ^.S-(g BiЄuBF=cTޤ[0M[hOiz>ɶt[( N3Hg3aM)M#Tu!9?ŖtØsn4h:h#ݞ1k݂m*ݺExJɳqnI7JYn4h:h#ݞ1n kuo-Ѧҭ Y4:\Ȗt59[( N3Hgo)F8M#Tu!9S0JVjaZZƪҧ(B/Cy-_ՍҮZc+m[=siD#+:K|5tw,/܊%5e⥝t<+!GUYOJNN/˟7LwWrGB` J.O(js".:@(*/2+ G( $y6 ե+&fx]o\bYlQ?d%+kVXۓv֦ysL^ M4SSS.jY#^Δ>L޽/0*>/b%ADCT}EU-tPTcp@>_+O.YE>ýޮ|k4(;3J`غ?]' `б~Vx>%%9YJ,:ňSPe%rP&XcKt*߇vXGBt=XǧAB  -ofk?t) ^22Kc>JLȉs  aJn_}[/Ƿ+ *g}zZB?#\DzOW?f?՛هK+0_?nfݗV~;O~z>x[x̫?}./ϫNKWI;/Oz>m ;PՏ v@*]#) |Zj<vrWn =:Fpq3*RKpDJN3ˤ"YIaҌ[$9p;J&|Tcc+6ޚ{ ^KY/Hzvŵ'W H_LvgI5#b$1@Wi:׾y_y>5J7_d_\>v5՟v k32p{${o|X̦ oUYYx$|AK?_E`y:T1j z&IJ1- &*ɐp=1j-MɌ/wdiK&lkGNj2dL[ YK$}ܤ*kHU6BJ.SLL6LqP[hh)W;btd쨆ܬF`/力ňmr3oW$}ָ ~!Ǝwc=ǯ8e!;. J9e:k$9j#i%I:j!*/7;l\nyӏ:#~BՊrv ֛$ݍ[UK) m` IZ/^o{W I4*cI6 d?Ze.w|xi)&pv g2N1#(1$1& X |$hԪi8}Ihǡt`7Ǩx쬔`~ &CN ;jZa### pչP'{%UgZ'ΘDYH$Pe [RDa! V %V 0F"n \ILkSGϴCf۪(͎`*~ %ri2211P<ɜ3)ab99VPd Rd9s7:߈)(hb?lS-_ tN /Ҍk)wJqF(E>O^ ߯^&=2v"^t"e6Pa Qa#o-6 7ز(D;2t+A.2$, 5ARb,alYQmh-MBp$Q0gV)2IfTjc֔èN6 SiSm014"UiI~iXpC1eS1 Du$SH>̧L9yI_H#HIlxNYb@Fo|X+>2À)&I >9J, :F   &@c6%L0 `@0Ⱓ IDJ ! vPJl`o"r "%G\³YV|Unvdy{+ՉЫЫО <6 HG7G3Ifbq.aZH5wq1- H & 6Qo؏Xt`h){F\z5(ޏ!\UE9E'ە^9{@:i S yP1b4= Q03YZ#[ -bR /ӓT6(RiGY;fv-)}0뇰^|Q(*z F]HBG2#-HHPOM-cg_ey<7o(1!?4Ž\5z(~ktL^̢<ݷ{u&i_{Roa:]nϟtJfl& S?q=9z\MIyj9e& aqfZ:nN\&Z'@;T0TN&;Xa$1|]9tz7/(j2(n&lO\jHOl:3BI҇̀fQҲЙ ulYqss:`AZj<_瞣9 &Y[:/\ty>N]2!ڮtriaFߢs`O2boPTr| ב9&xW:<>^986 yDE´acq6eemR `f" >P-2-9HNp$r)1 83y$sX.3g-00ȱez %x?zU$Iu{t" ,]n}.'C3?N6OlFϽL񓐄vŠ0C,酃ɏS.XGRIY 6]^>̬}Ma6Ǔp@1y. T!/Oq6+Xr" |^KM;~"`eZc) $ -d /GΘHgJ >JhF#ۡZ+$r h9Ӣ5f~@BO?SW[o/3iefWgeӡ\Sz}a#]K:2MO':g"Z2suqK dp%`* }'XJJ9a*kN>Mdvr% Af+vƃv$2~~B0Fb.5O.֓. 4>-"!ݽS%F\Ij h!ʵ=c9Ș PlEXe->]J2I{ V+K꧂;."2jIGx1JB2b>L晎o*na5džrn/l~XglOi6CL)/9 [V %P\[L"^e$kfwf{PPD[\yvkmynþ91Hzuʈ ?g.]y }6bigQv(9>~J7!=_%"Nv,݃ϓc Oڂ=i! rjmP4^͡&HjgMN^%&}DvCkQlNfIZUg9=z᮷  1)D껠y^#tǬ ێٻO,@Dl=Xp9Ry$ٽR`0~6/8GNfM墧U=R>^,q7DA;.%FFwo4wZ3Wү6HpFPOS{摫ЪKAV# CbW1en%8}vw/YKӑBk Ijv|-ӊhczWSC(T(m &̮ΈJNjt>'2. 4~9yj_".x C7)cs&$PPx  NNX] L!:zޟ ? Cهstoh8W2 cԧC, K_CbQ|x2Yj'՝7M 7Ҥ L;RiI0zҸ%sr:^s-xRm[Q Ozl_hQ )1fN4Z˰ +;u \!Y [["66"KL;T]dn'd(<6zܹQd#O>4}]RmNN.V}-_v^^$`]9@_?5'ֳ[b*^ţBʿ|l|dƇa6Ҥ9鿈`4'$pѬgqo$%*3 A1-]}fk{pY-Q cC (I%:J\ׄbi}іk:gRw)A cnPj#b2Nf-ۏ.K19ÍQj=0G@ގr`-x\M1QT1,H :: NB\(48gs#1KAc d#&{[H_|%Ibd.1Qw Y+S^W,VqtFvg$Ȃi59Ѡ2ᤁ.Jg8#9xr6'A ۑ=w3}HIAn wZQAZ 3GRRCyxM5YJeUV\ p%8vNR 2Ť(1Tc`A^ D6V]iHp"r[W63Ž3j$y9DPrH0pe9eA"]sxqIUN@6^y*r~&-s]AkogyY9߁Tz #w?/VfgC8l M9͜F#1ا#,!L$#ng$0ufvأg%)=yDHf_̦fwan 9FS~q\K4| ~7nQ ܋J~8*Gd,j5M|4? p8eaßW3hA[GV`f`>)ZFG ef7!Fr'oB0T'79Vr'WSFr S~-|=j6~sEiܚ  `Y>ҹcUI|6:ȏK^cHZ,fxq(uƻ+c V:6?6T(yzk|Ld2{Ǘ+Sj0Ժ _|2V}2^5ہE.$gߕ~;jO$kV8Xb [\zHʕ8Wɻ缾^aLnWUb+LӜv7<3-UYںu]^hXCqûicKP[~{%`[ *45̻;a*tLO)J- u6&xĮ09x$`1{W}n5+3O%7^Ĕuv~vc~]RqڭOfs1͒__xKEk䔵Fǫ_6,ӥ (7~!%q5_\zf-6]5 fw./hhoy0+x2[ufue`RmIЋepӝ~0&7TiV6 ҂ˏ[9QU\p3-;> @SzF>}rb9@1+9@q:@HH{|iݘȴn̫Jԏ{<wL;{1>5zMr){K".%+뒨I.5p|%^|`^|o$ݯK$C2: #Q&y$gy(b3̔ՙdŻyɎc_{(1>,=$3@bT>J*I4Sr(5ePm8|F\Ў$V$"&qpwG%;&%x5ȾG'A({maa3JV[u"J&BGd]cl-RKZWAG1S3^ٖՈyu1ăQ84T; vTa F5ނ1s rDhj,*j{qxM\zLSRӝsu?MTr1D4GOw(rhܔ\9Zt6@x,~0QFQ=,Ӵ=(RN#ʿvQ[ֳ23Aѯ|C(c[OC ~x?>.7#U+5F[ƫ'NQ 4=0Ռӭ" MN+^rӳ8{󥇧\l3Дq25NDX4t\ kyfc9М* 8C3HakVSry" K'-7Nhv#%N$ &ZfTaű9'pQZ#6>4jhkG=Lʿ?OJ2qÿbˀlO:䆍3T.hJHgO$ =R PT5Kh ?Iք.XA12p߮NHa%90S&:-(sCSJ;Rp%"qIHK4Q ?IۈFa3y(vQ25Ĉs XD3U C<+̈H%nsCELFCH;VO6T*.&&nDHBd;"nlވ"}{#'o /LLT83ɕ ԕ咃o94V(̈́ HNE+ EA^ z%K S <,s<ϙnsaRʗ&)5+AE 0TcKC3ncB.g@ H@!gׇ:v_Y^hj_;P }O % aDr}ֳ=r3[҆/u~p:mO`]*NğuG,[|r7#Uۜ")o\ECª"j1[0r^b:Rެg-JA_U˵5#!\Ddn;vKA褮QEћ ٙvKF4V5!!\DdJ{;ڍsLRi#:kTnGzDwݒnMH$*ͧݒ+L(.++piu%Ȭy)2&41#ed"f} uąG0jH;"<%*')U6 RTsTx#2r- \+3 n4)su@ 2d _;%R :u3MKR߬.F4E0n `dRќ}׾-MK{p͆KTVrL`iḁٞR^&(*cIܶ+,}f(>?Lٝ&kW'.H݊ՒFE_-B ށwtƵ^"$B~X \Z)a㇢X_Nߠ s/uugCh1f~)c$E(4a}7-&v @C *SFF6VZbF'2>J$FY)8Xb&lMF'>NgRI۵ 8ӕ[I@mKARg36(,uqd|#=X.X)BvZ꜅Mb8?݌pEedXrnnn.S,3Rzٌ7VI*pcJ@R,XYi4X-)$0f^iUhUIP ZaQPdg HS ;SP_v΍\Ť(ҟO3s2\/WQvp#Jd1ͭd|WYoIREVwy0˦xu~X7`~[gd'pq?~4}8%M`9Ǩ8:=i MH20 ٪xKQe$MΣk _D -JqQD ΙMx."`1f%n9)WX;S'G}(wnB HG~ &5S4mھVQ 9HFv< /Y:Ņg+89Og_fs[y@ >=+*] QW3˿ GZYU3, QLtM4eDQ2V,(9Ϻ*#acy<]Z?\ Ay[֎>c=SlG˒+.Ne7\c\`h7}}ߺXDhTqaFD"G #l 6aC]xo%WڻJcK:i%IdsM"MdȐXkiReLj\`q1Z,c*Fy)֢潛#c*U*_3O`5^Q`̥8_wmp\r Gaw]&N_oG-lvNAFYw0=gqIiiPn˛RC *!ow"P8FJdR2ڰ!.ݝ*ՠ^ai5SX! .Z3}94 f8zv6 ;9I6J4uGnҜvsf YTi&}2J^P 8Fdzs^#:וBd Հք#ܷMFkJzDkY2GO]:P xMu< \]\^4ѻ6kkrjg`k"kK.,Ԭ26n6ƷXbī#Ac`LbͰIPO_9^e#~%f.:s,PQvLj3ޥ 뿻){NEt ;=O[ԛ瀋bw?GOMlx`Ŭ`Zl9XeCm Mdh̤"Nlvdw:7՜S[: w!AtcbIGƦ4%D& :)IN="R2qQ>T`+Fr,[d*h$$4!* 621)2&᱃g6sGT qi@$ZXJ"D\`K"V,Ri\Oܛ+7*] ۼg p{}ZS]~'&}6dR}\Fa2 秽bIG3apwe׽O8ӝiI94xU\P 9O94o{ؕM[P^1R 75 >WG!rvi.5P(xb+ǀO+V~O )E$2"mۄs,[' .ABG3ljjTk*qCz)rE7Ξo5skˍ + n9WX9mqL%Rx\H2JXBY=#d(.Dϋo @[wf*gVvndG=(Q5(=49^[گJA*a|/RAJAc!2:Ԛ:X1NL`x^4:|TxK|I4kbrx/C(;6e)">qBCuS5T)}kEŽvQ\ ~D#,.zV'"GilxFWz+-4-sD5D#-W>-PrJ-mMQ_y o!ڔ}|@H|Aa,/f +pw1BIɥ}g$\z{ia.lK_ @iŐ- LAy>8TasBZK=&HdBJp͸Dlj&*ҢVF #iAMEp$ieK!\r"n{{*2#dO ),u~| 41tzۙNbYq npTCzx1LxijUo;~_L샪Wq'c;>q64öVO ˒h ;p`e܄?KOI&}9v{W|,ZV'I79e 6)Sda( Q#2MiJRq*2IjjIO0*핮^׬X3].dCa `Ͻԁ؝ A~ x2ڃBq]߅.|w .ơ`M&M6d"F`O'`oYN&` Zk(O4o2ܠ?AmmҍAc7šDṼfu0(;`f鮮8Ѷ97 r߮o /Y58UBGbؿٔm\@TxΑ`Փ,OG}6G.czpH0,Zu"hYYr^q}՗ٔ%N `X3Grꈥ ۉidHiĘEj,wNt?*Z 歑7OĹt<2vi|>0|VkH 3xlYU,;ݴo0'_nG2^ێ5z3'F.$uY"`ǥ9H6儢T&ԈC"*b0F3s_l126HEhpYY$hIPR)e*'J;Lp껷c(DXRׇ橫V5|{Dh*$i skyePW0̇wc3gXÔF+RFQ9yʼdž hu>i8d#I&fP$cbҲWml7|z?gk<&o.\Ϳe~Zl/AJj,lQVdIٜ,8bguW4;^!F={E52ʱJX3Qyn&E0Z"Ȧ> 6E:\WV#fiP otm& B3fށD[sFdwnB42Ɓ*. "8pܤz$%XRKZ*"TGKd=b 1ߟ2]~ɒɆw%#Yw'v<18I g3DGd.Rl+T@Pӧ]>}*8}D qh$fݽ.مW&}NSMlQPllO1Zvl 00CϜi D6[}S.{ঌ2n()RjsnY 2m Jx˵ [ԈX$$hsO7^ VQզ/YDZˍka l7*oe%"\n$Z1l@U * oVj!ZyȥA"$ͦ/j7|gi0RK%SlɞiAb-2%]%taԺ3ҸfsJt`A7a-V k 堇TBTgL-ZR\/&uuC殔[W0ss0T-uwt'-lݵk~s򫇃O `1wc/3'U<ŏi޽)Laǒ]Jt;Xiʷ]Gb䒳>,Vz`c[_h-Blɀٯgȗ9}n1ɺ֍m,5g4^R\iWRuFFWقv=yLcWxeҟ|y}c+c-<0&:CPz#YrN+,z5Jqxj%ڻ[Ļ?˷s&n~/?bf5XчHKPq;VƉrVLHCj\6hau5 P܁W3M6q*dzaR!+ǔ4q7O-ݢ̙c&+{6V*3v8Ci^f:1DIb?J`qLeN1mi[ee\"#mxCV2[KiZ)ﭼa ULXŨ~To rs w0N~C0jǥX3[3m]x3qg=8툝QWIO_7q`3dg촱 J#cR?X`Gk cv~fs[*[dtZ㍱sH%UY}Yq-bI01np;*Evkߙofz>c?)|&3\J覲)НEH#`ɄF9@r5םxX 0y8HFrHbp>Z@4zC w6s<$}]^$}Rl@qb` q"xs{<*=%1"݉QWassw1 SEiWf,bU.]F@]7[mBȡe!=St,\6® v(3:%h x47pdfkmQ ,A^L Q˜>eIllpɠAĊx;`*?F140Q ʡr`%*(ɹZnzB[* !8F#<>Qt6U'W nF|gQǔkEzʙ9mݵP޷OG$;jouS48Qo%; j#siZ"}OqRalB[oQOQY?\M_"M:'-JB!>Վ3Y3el`9͑ٺ ܻ8aTJp&_ZjNh;(A^lD ""+J hi@EÉԼeEiޢiBF?6_9.&3M̜vD4R#u5.7֧+s+X( G1V8* W:ȳR(Q,=G![a1QLp'wnf|gaBϛ$զDMņbAL;),p'X$Ds;2_HIݸWsjXNSfـpz,|^]JlE4F)ރLY) )aZr )79LK*J;$dPI:<[q }"<Ҏ f|OFK{qCkx45Vf_şgP3-k_p!e@~H@i;LM2%RY[a X<Z5ޒJJe)]! noܷ|j|v4W避_ |x 5㇇b~gG? "2'~ ~X7-dJw>*fȧo/Jr,V` &f6haŠ 'iqKVxsCESb?~`Nc.y[5,k ynt ymwb玨ԺZ,KKU,8+~zW.R|']М^! 6QD-`l Y R@ HIbMÛj1lRl0LABz01t)jaDfq9L;:iH D`䁷c Z_&*QP*d;͞SB7X =/u ڀ3L:sHy܎EaM:SXBide]S)zݻ)+l@1𙣑ǎC k|Al"EZװ>n{Xl<% pR1X"j92X!>H4l1˟@( fB}툍 ߸@ v;^eHP_a ``K΂$03XB# ǹ grxճpQado^fnNE˙ DQTFBy H#B *q) TOTI7bC$bIb-d틊RZRB VP,֫q(xc_n*[?`sD[ #Y-7i!Fo=;ŪEd=g\^ix5쯱tK7P 0-2 pK;SаZâĚ1c=gqcAV/k(*\(u\<)T]륝i4_\3_}M`!аLJo>ȜF|j"1h D}0D} !wMG Rq /nQXƉW]*n D-E#gTL0XIf`\gqW 8˾7&#M)SƀiyI)x7~ B&/܌'0/ξ gWݻs@c? ݑz5~k3Xnշlm3YoBWGk-iRPR%>7Da;%`|m.C6 ||>NRd?~ywR0jӑ|q=yg˼ZƩg DK5B$ԄNM8Ft!V9-9G*D !wŗyupU_niolX)%> ڬww7.7}wq0o>n65<ٓ/e#cp%`` /tmqJEm$|I¢嬺tVw;T:f R3zU^kgqwJ`L@^m:6XGf [Gr8TOE˦8)E05)$8^hh&ŁO[tƸ<.]DZC :u{F`s1R"׷tV\ߕMp+xv;BQF-r)Y $\Y !F 6dkϙ(0f AT^z=[<}W/;^EM%\~]>Mʠp{M2gxY!-_5o7Eq`66=>TnnU|5j= 4 mk!?_'A1ߋ^ ,ٓ>uۍfȿ|NIƸ BJk⽔^X;9#pu MÂ@0s*bw0otɣ[| ceO}T)75* z8ʼnr\ܙ:Lׯ.F iF8]e0C0[h5`$0iz#pB0˪q+gJkLj3~Ɓ`'6AcxzBp{- k.WSU&մN%wAfV cϿ(i4DRkb8}M~W[ҖNuTHw CD o& r(NEӂP)m7N$quw;wf @^]h8(qfr|PIh޿0l$BXdo83k26^M(/? r҅4*[/V} oq-e352͏O]Ƙ!'%|:fe6'0?ȘqD_W6:št?}Z,qpFDJ(<>xe|n_PgI{:]M ?M 2u2X"&z4WԅVoŤ'm =l`Ѱ3 LJC0$TP[hMNT@?gDBE L209?,y>uC^?b'B=&chB}ЙHI!5I]̐)1-?":)$݅NI_WD)7#NktbN>͈Z[g*tz^뼫.u3գ2n:փ :z{#yJ t7|ZL`\'fOA TnfD!)sKc{t*BhE!d^z & bx/:6n_82j ;Eͫ{sxs2̄ 1`0#ʚŭfꙹ탚(.E^/QO(uBC($,e\ v?Q'xNj|Ahƺٷju20 SO0a C8ӎhc!p \(uv?7H-@s6 ðn8c2hVJɱ )$°_ǡO.z{ <{ 9yjs+Qnj(JH`Z/c@,1N?4JgR6|$%  oe3״3wCpw>2-Q3SiPf傢f _ o0:`x)i%wPVwsyMūp ͟/{}eXV&߁A$3%+Cff8[7t4qBjM(޷(e`TQ! c|kZq.7a !]'[#NnA!M9̽3EaVCK6g&ϑ(^#3ƍjWI)Ғ\Vfw5; 㜷ks^ZB_-`q< 93xaUstݐoTiom∆A_4:7Vǭ1RIk0*'ȐSJ[]sNd AOnoP(poxp$wx7wgz2B9"А\ 5~)JW$1OlGKok }vԬj7/QJB T(hRqˑJ[=v9k: ^s e/(~ Q LraG9H)k Yn^vr_~7_,&rҾ-H o1[!3?d+~zD ]2 :6<*+鑅yͅ@T\(J(/#W0w{5E0nWTuWjtW*Σ$|C1r*(ltL= .@oe0%lU0:fC^K= Nhiu^69f2]O"Yx3iBlvTKv&]Ѭ~v03N\|̔mḛs0bmvmP<_W=&512.| 1L5= ~Xn/Ѻިp] L )yF4V>f!)Gtz #$x 3T8f0DWtd^e_r9m;6v,&%&mRlvU`86[|o#S"}o~GtVl\ł1rZOF9>L>U S]# Jan!]Ƹ!gc\RRh!0tݥT0j))sCJ^MZ)]BJm#Xn0.ͷY* HwW^vqz2]7ņ;޴?4fw5SO)QK#X+Kîa@>~6թϦQKؠ FUeKi?띕YNa^ <{B66E16m#;mnN?zyUDXOŃ:?,^\h~D1a^ۍk '"r-vy^+NO8b`c2ȍʈ!8@ cgޕL&U&B}{%1G.QWǹGj]q3FT?CJF0뙑Nݭ*d?&v^ υsJN8}|lSkfW֮ 7=z4k.z4-bs* g/&NGsSTkyjX1Ǐ o ۼd~h95m\5GL%NYrʺJ64iV{}8%mn1䄠%mhVr*YagIi\P௑HO6v4m[lmI֣8>jsW(TsdԆJ^z7Z:4W>g| c6c1 ~tS>Gpyz  `I!͑\!7)GC7)#H ѝ,% @>IaV1rvB")@gMj>S"$nf̸$H FK!~ަui  ar'.,^RT,vt|ewёN9Uꩢ@-қp[=~ҫ /L9Cw]CP*(G\˛ܫۛЍoBNjRwGpwG0F_7lrrBrZՑ h}G*HKFTʒPLM\?o>Mꇼ ka)TУ @` BT#0\6Bh,dR3da޸ Rr! ^k`qTrjL\jnrA86X2 A8?XGrk@Fj-FMTNRo.mTNHԾÝAP2{!M9L` (5VIn5dYL#iQnq㕽PRv cQJB* AH+mHS> !k);㼬D<R5d 1诺z$fO)*;4p MAAP2p Y:1{sy ^iLs5@-K= 8ؼ`̃GT{3DYaK L*`'<(10$L+G1N)Md3-3")UeJ#&'@"7͙Zt7x>oyōi{NˣIqq4 _fBk uqsL<7zp:;S33v2Q+?\s/eyKhiud~S PT;~Wk;㳤Q{dJ$uR躉_2Nï9q~k.b؍8rAtfǹ⛓L뾕;#} :04Q ])J;0sA +J„SW#8I{mwZZvj:UPùx=׊lis;{>Gn 8iJ ! aZYUR/T aMdʕ']u=ڱhCn%St"ubrwˋ!ˋQˋ%ˋNbVwvttb&b&b&b&bbNszŜ./XS)塔B`O|lje %PNDHIzGa٠I.bbwtpX=G! |(ˋAˋaʋI\t+ _/gӏ. eeFZxIL+ 1I|&XQV{py!`KK`&dԑe Fa1kK񵇺HmY .[# ޒK1r`  V ,` 0E%^ o)ÀyN4"K=:,>{4j ɴ;E6JOAۣ|כ~ x29[Q뿉/]e#{ˉ^D^Cg1 f3w]*C-7M,wf6 yر%(ϖiɂ?Iֆg ?{;~PoHsBPzCB C N〲4otͭYUWt&=* ha&e {N/&x~_\M.+Z?UwmB376wh.:,66AG3zl)ԚКdAW|CfF?_ah v͈KT^)B[vY5^훚eo#4e@gIJS#)|]FJ\V ߔ޿; De͘bnzr]>kwmI @$B ptd?~~NC/'wWyOTnqN nV퇽jm1m1Vam/ @ޅM7iEq7Lvvj }^04nYyS+pK7y3kԶ1s+9@ K!v7)@S9_@rQӀ*fT s[1dz_h>e+?} iku;zsPו旳^[B2&!ިK?jzPj.0fZi5ܭJN.r͒;%F-ӐlpYB0W ]9TmJ/Cw{ev DSo4[RS&J;lڣHZG(!Ma2M] bZO.ӲdlI1|j4!ZCv4thKJDI]^ٮ*ZmQ{,F'"^܍z9|nP*w_)f¸9x"vp?,Lj.ԯ 9pM)FE;n4Q{vq2ٴwKл!I:Eлaދ4F?+E;'L ޾[z7i r*S}wN;n4Q{vw2nz64U4I b[*MT'xݝO2jk-eCֆ&{wDž[*MT'xݝcR,{6nmhȁhNQg {7."2MИx΋FyM[ r*SJMn4Q{vx.\ƾiwwkCC\EtJVѨ6k\!k{xF1gV-˵#Kèl>/۪%u zl%h䳕ptЮuQ;sx^@Z`,gZѽ33|UK`4QZ|$i8c]3Ls@ \2V9ǜsmZGTv/̉9cnȺO9299ǜs̭Z_ .s\!s9ܪ%(c=9ܪ%$:c>%s KdYp-r9[nfm9f19cnwcFs1i սS1sEeY s9ܮ%^Yjs9[Urpms9ܦ%(n]S9ǜsZӺ{9f8!"Ǭ$19cnP1ks9ܮ%hIr̚js9ܪ%0$cւs]Ku.LCc9-  Fs1i )ڹ3c9,7)~ĕF5 0Aѿodz[&8^0`mZ[`iM,B*`fHQb'@EAe\sT" e4b˭R*+HYʜ8K J'`:*SzyhƷ k?˜ĩW?N/aVEXOe|U atƺ]egpDo-t?53-7ɿRu7<, "Ic7p^14M* ^^D*A(B(Dvo r~$"//>|>&o@b &oq0[0,HQVZ퉖mf'k39[EMpwk Xd<:R,Pzj? x~fT{FI-%6l5Ҧ@XxY K+&GeL{Q@y}I `*,1 (aw!dƗ(g'j)h !e eqIJ@))G!UpD#dʘ3@Ҝ!Aapr8o-@4ESwR {`Y5[*V,UABcRc"# K 舧I0xBpHFx5ac0< %(&Rj! !G܃(8OFB7O;2Q3C r=a;zCal?C7჉^5eycFx>;eql6~q| !P/]Ę eofǽߨ?0ʉi w') z[x1?WӻiW}U#Ed_Y=@xeA3D0?/f%rrA2m,TCY#vZmϐ#;TB$V4@Qc^/ ka@b݇zBpr1k#"JN %q`nZ8B4B'rFAXطKS;OqSX: сcŁa>-^3s(||~if~h?&~Y62],ssXՊ_T͏o.s"q,|ޯ}aA^x_fj|E=AOplח^Ly~y CEK3-n_!|?n_N9;b3X{>E zk.`|JW#.kH\编֚{`T(luʔR0 BGV٣QK}!|=I Lo ֻ M? cx zׯvxׯt | Uɓ3_`WCx_+O`f3PdmY*C[Y1xkzһLS\h%ўL ߾{??_]7`LAB0j`TL} !8 <'(S<ϥ]=zئ~({nqMG\9i^4ϼp4mq_IGYMJ0e3jdZƄ䘥ԎJZqCZxH mG@B`IpOOӌ *'/N >x%0vK,K#5AS#=_wzh8'LI z; .t1ټfџ7bYMȈ+pꍆL5?9 *w^jGoxP 6PQ6/>]ؔaKQ+t_T]|gP;cȚ0`:'vtu5.LoOoQ^ |b2|?EɹߪB[d3oXRFK|䛷ǹ`/_[(lQa K\0t <GZV|Vv5Q6M~!m\%KxpI3Y\4 p9n\ZYK5B>#XOfY{!EjQZRk>׺+缠nW\4nu^5Eи7T,tf]%XjmGyܒQZ^^)FQҍ$pVH(~WHVIy^>PT XT:K,3&"(EK%`R oQjy?I$%#(UD Ĕ.qH8"pPhO0ëX0MkBA _|xq{(/;0 O*:?{؍Awl.=yĖ=ܝ-|ѹXv+ȌYUd2R91_#>zJJ[J%Rk aqAyeX^8A o0B8n Pg7u"̙U9Fc"zD O{MDo'}6}ӻr_z!˔Dm71꽸 gWvf?>,qU@3*SX:.f뮶#[hь`"qJF 9].{ 8j@XTȒѨE:VpWsh'fjvf~|L*JI9{nb H ,`'hK'Ew Vb"Ac9*BB3jo8\-Q:ta ͡Ok[sXu7@DDpLǀyYq喠DГ7qS$M.*h 'S?(N* 1 5z.Gh3_ _L1S#k \ >2@:~ '&@&0PKU</ҚqjLV_W GjWIj5a=[N RNp^˻…vqq;OYA1lZPd<} Cm%!e{=?\qv3:L ݾr6% =]]spY(Ŝ}$5[ɀ~D&(yFh铵s;rsN/tUh]eir?_?ۋ+"㎻ZsLc9o? v¿^c<;[sFK.EW3=7r< vX  ťuګn2G?~:]8;uNc$׋eF_v ƘAg+W?#:E +@pVt\jެ/qڶsNcF0H(m.yڛ'Nj7sp5}Je-?]GFa ̪U+iz?-@J@W^sCѵ'Z)iNZ͍2y[VLkz,=˨IԔS%hQK9h\(6URfK%%UC֓mnS]Y25x*2ɛʲ4-l6T"B*QTqOc٧%l p`[vI$@I.ZVhWmV6;4♥hy*J^RhQʝO xR]snY<Σ4lܦCzlQHb>Z4wtRuaCJ̓ga@@\! Lzm#{e,  Ymx~+?r K[֢{JB`J4r]n 5 I#mqV.ίoE}@)8)>$BD&#DK%EMքKNYER?<_h)NG htҀq&Eی~CP#nqy[z\wu[y y&oWgt~n S u@lU.-}dG/Q{V_,!9R6Po/CqSi5CYg# رخt hfX ٘},\xj1Y]`0PMmmfS!2p*pt"D9z;zDZbcYޜj[yaE7JZPoD7I[R',z[a<_swkь.w QslDjs3݅,jlCOrO@_nt<ہ<땬,?Z׎]sBTۈ=>̅==:.ôc';R3fNN<G׫⺰h ~jECʖPb@ A)4}Cf|~]V_ћ C;? Uo(YT}Nai ߹%>x&̽\η*5u!jQMaL 2PǴ)6_ ,!a0pIȘ7l⼕ݒ^:A9KgR_:CjG{Vz/1|1"4c^u=,ګZs<\}U~pҖ>^4M5P95%\):-NVk2Tqlͅ*Cȑ޿륯' *ncAvSԞ 0-Ԇ pcwhi=-F.0rK@. NYrrΒsSJse=5MAj"DIQc0Ni@];꤁];[I%#x9#cN"@KKII\vtm|4Ҕ'#PPxvNZm5@sոh`l&P8En+騠mj3ApZyW,'mKm?tE6Hd*kGnc3앖?nvˈ&(e>]sk[\f. Wןpx;TLftdDoHDCWg{gU?+FI:V+pyq6Mn6/ѐ^H8{rOӔ9q%RpQ~O >uF a8-,:6VY>eSl˼#tP3$Bn9Ri|H?ב]SQ{{CD$"lz*zeͥu ޛgS!ʠ WiǢ9ǧns#yi7%\kB93@xmr|: _Jށ0-YyuSBrc"]eiI87,z͋-}ߔvFEhoTX\2iW7/1lHbw%.ɕ 6QEcQc9LJG*tgI{JhocѾk  |oh__)rdL( HʟV^B^~pxI1-A8@yDXcW B*/t)&hҽς]OBiĻz~Dg i-^1OoP:F.`RsR4R0jM*J ru&zjP4*Wn%`4skC8N[t]G-R Jɿ٭ }^"=pA&ҀH.s [XAlġ@K 35A4G:rRZ'9/-7R|CAΘo%yHS2',}t=e ,/q حʋڇL Q T9+w2Fvm W5vy1Dven[Ѽ/02mvtpDO~, {yBPth[9k6[>+\0Cv̕ZwgaB*.:Lgp^!>:jD䴌mUꗣrX;1d&>#IB/yD^e`wƆ=^@ȫZSIiܶER*I1KUţF%"2;iE6=gBⲥCȅl06}sTgֈtfcs4sstq"ZoR&>orD5k9яS$Ϊzm =}$('ã`/,G<V֐KxQ?..߼H`Y ?7zu( $ ZT(/Y9$gVP/[[*@L٘K1_m*%Uސ{2-gxɐRa?z\ `kzpcoּ׽vbNm7rB w }]z [[9rC"mkZImkm>w阕Kl!gW4jڛc!R:j+nk0"2-BZYo i~)V;Ռ ؚnO|K8 %92*[IxlkIFx:~+75zܼ) RC>^_YX{ #8z3R{ep1 3?e9Օ9OX(Cl$te+3tBF$[") S ?l(oDTdYK ĕ2BIJʒ]ӧuU;3W-9T۶+=ƣÛztUt;8ZFeg"u~{1I;WUd3IgNj_1e3mo_Ώ[ywqV +;_{uO/%t|yY͹|jc|r^'YOMVJo]gV0ZWsgVId$Im/-ĵ{zWdy d27ci$ЩB z+f͛4QnQG@ BbxF= ZBx2(yҥ&U'+yϤkYas?F7atCZM7QO7}w(lv {_=I!\u3'\tŸiH/vnZKBMlj֪u03Hm>h_@Vrbm3F4U=p_ymwU~猔fyJWA6X>5nRw^N:3J}e:{b{Щf{ *e+(E8)L ^ Ad\bmek șV6 #Sm;,n.g{E4>~4mz ~5L9שpY߂2LblT=I ~l>5&꧷b}Mg&aɠŮ[dsCZ`дLwv2V:g? !u: ailVg!ߚ,>!#cBHy"a Q^"3 A`.x[@$\C֘3gοoG΢a4(7VNG=Q ":Y)gA30{0$ȣhm7=XYH%p1&8' Ş8jhÎ0=؃Y{[K#Tz8,xw}7{dTbRx6AMmH>7[QU ]U ]CgB04g%p XRXi7Ud$\ާ{V]'-n=_,n>|[|סp` zdZp70U+1 wGZao/UWI\_%q}UuN؆aPj'\zWz/)$rkjāuﮓvѕWYh]Amh+AP漘Ɂ<^62OZ"2'#(.DX>XVpiHIL$HV^6BYƼ-j |1H5Zp7Ţۋށ5BVCeA?_[UB#tmkWIuӣ8"wrlLD*Éy{/D'U#Coy}jܿ@E!g?]BksuPi[KU%0R ..cΡ4 f2yQIz>MMEI,|CZBjQRZi)ףI.aFdV+X%Yi4 fYVˆܛy6I_ cZJ60վ .ƈ bIa9h+Ml  I'[ CMڋp m)" ڏ_,L0z3i+I4vN9?itߕDQ^ެÂ!d(ׄRv=)E92c @v;7%o:A8|5ѬX z6׼z2yd{0챫D5z= )zzhQJH N+Cůϫ+d ~&޸84\fl(_r.Z-fAJdt1p倽7!<8a(p=9 [ Ʃd=YWZ]2"\OR2XUѲ0<,-+#/3dVZKΈ_zrZ ,ȧbp oZd\a5c^0%TJSlIp=I,cݔI4q4d@mJ D,,phnbK5J匽R=jWpk߷yTc>72Gf@`(PdR1:)2fNA l$pio&SIJ}e"e'/4!eW є]G84a{2Kzmu1'KU5_=FÞlTi=<ɠ^j8?(i*fpC0i9-s̹L=`60T]sish7noP;H68ǁ롪mmiA 9&uҔ?pNœ0Lc7!ϖ̜ɪ[J:%jFVj=h;XǷk=wZFNů>{C j?KWMe#}4<1qb*޽}νd'(q17[q*ALSF_\?z2j\|BhFrѼLĘJ#8pFZ^Wkčoq_GٟOc'/fq_S%eq}V7/~1s=1"CfFFHoɂ&r[aduCKV7AO, :G8H!E'aPV*$Wz wSBXmt+`'Z@[{ٳl0}}6 W  \,sk}- RaLf(c#,HP?sрA9Yd ֊ҡs1[)30AA&Z{gC=NH0:,l.RG\6fnY2-[,Xևl\KsQ^iKMCk43pN#ȦmdJg*>[K`cZmN`W)1D>@_6/(J,\*K%ҭ)+A>[1y1Z'WZ ,U{exɥ nMm N͙5#ZBj3IxY 9p 2PFpkz\^TvDGur.DUkRJ. ǭ.,eQiji:vDPA;m-lq-Sl ԂJ8TV˒Q驡9;IpN@2nYқQz=H<(MOD4"QAjm)ijfn/J]3p4l{~ _[<կ-+#B.llʶ39)K5Tbl8.[t^{"1I%^KcEEm06]3#S{/qND/,Ƃ 3,VJWh b..:KGdTM49\ǧo ѺI]***N~z}WBE<!(aSKY∌z K'BX$ F?|WyyX%yur]e@R=X͆\C5CMÇu@pK3s|dvŒSG}QǴX)2'h{0_T^@O H;ȎFFZzu7nP MQ ƌ^Y٥׾2xbIĊvm^y2hKEDŽ%GkWDBDȍt⇹rVrax.ķ$q?:48#@OaN~6 3$ M3N3*di놦tP{`FւCl٤8J`IlVHmh¡W}ӌ)*@GvmQUé1QBBzN1,TK0hga.Cigh7 iPUg3ʹC)1 S֩-N@kl=/E0zEaf  #̼ZA] ]SL 2$Bg(5` )=J 54*Rn8Rܘ܉ȣxx3t Y$h;?(_]c~Vjq@c``tP~Ii2QhCk;  &a l>`9EBE>Wh`L6*kmFEbmw Þx6fd_`YRd3SlvKnlu,( ,u#cD.,HqYIp F܂M2{vEp (˕p!kX rIBN@\YxB2Nf՚>SܺM(W~Ȯ]@QCV6/u 6VJ@uTR' k)rs81$DhI֪lTcM!7,̖KQ.+be`frxqeр`̯)M,8 ;qG_cʇ[iܿח\Og8XdWv[XօSZHN8SI<>h s_F,Qr1̝P؛"۫O h޶Nj/d̸He  xy/.2]VKaWRu:=>V5Ͳt}7QkXfWv9Q joKY󾚲=kRqݛ %]w|;joc7e:Ԡvw:֯Hԯ _)\ :$ȕ7TҷJ1h4 J)t$0!Jϭa!SJsif"uK?]?#Mme6 >XҏNd́E!B+-9<*xcr#B,/x$RI C!V-rI Kk# A)#{ kᴋ=^BΙP1բ E`7[|k,2F36_)R+ziI^r3)`"P0:%5#7Fck+ cc+ Ҵr.Oʎ}9ȳ#i$JSD<: T1)i*37Bz"7Fy Ԙg }7WI=G1xg~0WGdsyw㛏-eG;oPԨֱUg޻ xJRXBC[vqڪ{W9;3w&F+oòP'fs:?4p3%\|Qdzn7b#fQи~ W%@8u<,Yy7z7'FL.~[>8*ݻ\ף+Kc /.2o%{B{~Ck/ ?|1/^; `:Htgk𠄒B~nu'Z(~X-''Ɍ=\ c_]}Ŕ ١2;^%1N]Q.,f<( l1HT&Ƽ"+2Ь}]a!eJmֱy:`-sՍ@^i?j~olmuUϢRzԛ&蠛G}/Ɓot!qst9g[KR预Khpzh"]9e#PMz*;݆n/;;|3bvxs=&N.)EM@8]6Rpr6#;,l>w͘>\BRyٴ򦅐6!zW|xǓ7'w9Syp=/,.GL XPqMC#)\ߌQ+͜C5;BrY<=A\R5?ǭ) +#\Zeo5 >@s BTNw LYyq1߆ +,EFqVS/Vc:LfMy-s&d1Zt߮s׿JsN&IMoAhqJJF~{?\2tKHqg`T(͔QYN)1jI]AUȥ/ȮMЈ\4D{F .$'X? LH3(8<3?y iDz_2ngy\ٵ$2 9!:sFR xBFE|Egb[dXR ʹ|?~Xi[x%K8F#jW3jlOL5BQ'=GS[qsk9&ps5#D K`.u-.S7˂ZÄr+T|Fj6201ʄԢ(x̐`GoĮ (M'!eRz\Qlr Ɇ5+sbHLx|]Pb]X8{Fxq"}P,Ȁp 9V|l|I>ZZ\n3[2WC$=%Gu0E T9k*2B9g?.@5TU JV"WOhȶGM 9. OQϧiJ>Shff[irB F 2lPLr)JC G#` B h_;Uc ur_VƊ@ b$0cm!F ״]|j(#(/)lkI/jNZKAXiˬ47[*t7R 4CZĒAXmYF 0$ EO$7FG%c4^CZ R8js4=H&Fh+(E[7&A[c24'WvQ)L u#3;@X'YHPRj(&D槍rbG7ϕ45bp SɶnLC1*ܰ?dCܻvmB,C(Y(k3f ܄N!Ӿ7߂|i6$y S?m bȅͬ6yF%W . [^na$IBY N.?~XM[ eVk=8j0:SouWܫYeaoW }Y69|!i>vgCQ+u?`!ؤFv';Cs} ^ſ"V/?>͊,CWO 3,:6ai33患];&2]V*i>w&]'td@un0#* Pd"rȴ \${dC*xaJJP$62_ey332(4N^у1o08lAыhF/^Tmt5<( I)rc<r8wṕ$caR gg|ޯ3h;} 0EB9圃eLi|w6XRDyA#);WZ vr\GPB:Qz(SZ|5 gó` XM f5PRBR\joMNJXѴ$w0;nvm}ah47_P`PNSV .wy\bbFU pjk&bĻ> C1bT4hQ |sj"SPI@VRʶnLLfE,lu,Jr WۿC"0"?jWp؋K>vg]kHVhهSz j2NϡR˄4WX\1Wsbv8ͨ.qW IlVCb<)4<2F)+Y}v<0cx[ҵ^n 5ƙ?O^iWAIs"xJk+Ӥ>tA(oa,պxwڗ<0lұheŎokHs~jLhj⩏d 8y5,:'6CIp(A?Ci#WEEaY7nTCr-/i[X8@/"?~Q]63/a'OQ2 :Qk,VlʕGjx eԦқ-o"z߶iکn~ nG]j"kF7}]m`YA:jSIJ?翣q,W6 Ne8wtY'lHyAΪ䞤 h }X:}VaN5DLm t/ 衈V{uD%_dAhĕܯM7s9;>Ho;jV$&&#gZhEGtLt/w1%^p -,6tͅJx^hCTlE},iʟ DKx'^Xwc;cIqAȽkpc"r,QIimjRwS@m$5B˟lbM0b5xx'bL$ _|89bY?46{sKUBnofMXNzKUԞ-К<"+lnȩkyU⎋N@rbƟ~]xEP8!lAKAMZoo9ā%eOk@kWrL?pL֒rK^š3.({/]g6YۯU{]U{z=+pA$ /ǽ /tu ?D1j5~cDNYQ:p'=r OUZ4.֠a)i6F1JnLkۦ{܆WdAڭ):?,7喯#]&2mx|-̉14 .r~}"ʃ%Ht;o Z)ν$f* W'hzt ?pޟ,>"s;A+.~6=?1mؤ֦zCm@7^oၞ>'T# zW 0ru7kwqr-ZxVq٘VcOC -c 鄹܄ͼAhNiZ6J"E`bh`* 4 iP8h Z- y Rs3TRmEטƨ53<B1-7aVe 'Jʾ5#/RHɹڤ֨L-՘N4 P#cPA՘~:k3%e eϻB&mOɊ(x:_, :Gnmy?.rqwkzQeL)m| p~^67Nm"Dc&t. XA4ʇ뻬-%F`-:nx66Ԧ=[w!5bվMH~!nw¹Ňe  jg tQWҵ@{r Ia}wh~NQuDj8*F`ˀ.\N!k@cd.L @R&?g݋@p饱dA0(CQTOA8{fm{91 RP̳wO^:i%rzeBa@3GsE?J6Fy9n)i|x":508@QA_^eaݴ`R†aw$^8~i Z@ϲ@haƀf hst.#<L?8ɉ5"#'ҸTqMdkz;zkC$zQrUj@'DDAZGsAh<@0%;X;8aح*ƎG'؟\5TD:3@%|7rGD:!~eł.}cYb`0eQ@Xu5cr:&'<6ZD]C\PH-%`Mc(!\3}#]8[tJ$-t=Gp /PhP+} B2Ȫ^<(KD66*X}Š^(8 dHGDPA<v9!#ywj8V3V+1 YHYl pZ ZSbEv2 >uRJoT;.Tf!AP EL(4vM^0*Vo۫v,c h)P^fm;);B q@3p.VOM)ZS}D4/5$TSs-%7ƵN~;9Y-m%7t|?L㳺6FWXP@*J r3H4P4 qG5=>ŀp"" i44F%8(1~cMn<0\]B$YsG$?bj=OjAD( xkI:sV;Gv&wD{>38@oW}$6Y/ s5OL F{zh׿ϫMK ӐOCdv\2dU*TL-cg~꞉z<Lx-UG)=Ajc?[?oҫ?T/zƙ˺w{aɴXq#H<83g΂K|29~%mC @B!Ҷ^941FMa :߭IɌX41qi }^yRr cȥ:|WQfD:*xi-(ч:ët=fpߚŇUi''^ A|8(ʃs/r#C-bH i& i)}/]+P|_W(A|5v*ڤQT(ҔLы⏨f6@x@^$nn XwgoY/RKyF(_WjؗD;Wfqeh%ZiųKK: x1KszOpV!ZwpGyZ^p*xi:ƚ1z; >N#~_YEP(Qrt".2g^0@vFq3}fsc=Mě#3tٖ)&hٶZI"OȎ1ww]=&p뚡HGnbxkG1rGʹcM,v݊m,K@v75je_/ʿJZ­M"p?ޅw7Snܒ@uQ Э 9o?_X8q fyD/lLN# @69=ZehxX5+{ ­V)Qj=__~yٔT^녻Op`jvJc"I4( @@Q1ƛ3i>38dԬR[{ \|NFŸ=DQZ0&J) }^ .S3ASxvJi;E VY)hEu8OKORMwKt>$+E*WW4E%Wc%T(`ֲ$IO3J,Z9hzAE<9\.AdR3Qq9 7\Kqq%#>׆cM8MkMk;wj!UnOM%;#ժ3GE,́ǩmd{6ٔfgL͏yaԥ`{o``y.hR섲S}Rk] 6W-Yׅ3𻺰U6qј͊HAQ a_Σu&E6JSJW 6Xxβoݥ'{lXHu:#Ͼ7 B2Ԋ^#/G!y$٫TǫHQ%2 rh֬1'oO0O_[hklt=du{C=pՌ[%}QjGt/o/<M!xLT17jkX*~316YMҮ41jnHX)8TɟA~fb 7*Ί$s:2]Tn<8Ļ >6 y(aWvrQ,¨ P_=^Y+E&;bqCOL1|"5+#TZ>i:ƚ1"9[7UPOvi!-xHV}k%+sk$mȈCXHy=8|[tOxR*M|]_΄#&'I96ol·}i݉%Ή(; N.kWW-~,W]MQ" ۇU-9xh2 enٻ޶fWS/qc5>E{Ȗ#N\#R-J/1I\^%> ^xu [oV꽥^͒ȇ)Y^yKSPkgO"}DW|^`"*EMi22=)-V병&eLxq\6;S 1s t>WS\3W%:+_|}ybr쵰[Z"OT_ r Ű`Ű5] [_5x*"k %7ôdSīy0KL0,]^@2=Ƭ >C]]W.&X<*2&nPui]1 a qgӖwך+|afلut\<,N4F:se"m./ӜX*i!+R"Qښ(-i\75&,xУtz3D_5Zqɺ<#ײU衈VuGNW/S)[uPg=Uv %Lt# V:;hP!MZj?1qT Y.&̎/%KWUJ~DiS&|9z \K7qtg`OJөBGA·.u&H0wt/=g*RHD-R0~:2QseyI_%;rǏelҀ&DQ+Q|#?;mʭu*4u4'­?dpuF) #vU3u 9Q@1K ւ ;uP1$,QQ!Emz[4{yطb7ZI UvkodF'VJAno4@е2[0S /b)?̑; 2G ʐLG.ꨡ2[P"5ɱx*rhUOG]*ZA8U_+lkcmo?M$@>>e?lCgݍw;k[6&ϐwggcws{}wk?omto{g ÝϽ6ul0a{ztQϷo[[F?zpCzN niLZ#38Kw? <_"qqk1oz{ƴyi醙㋈͵M/@㷏zg$A+0@IUս?5yl>呔&n|;Пi(yZ[pl'UU<ܕ ڳ')?ίa~59XscS'9j}p^sb-f4yyy^Z; uw7魇9K!`?lcN29I^ =$y2~ mu}kƟ]ֳU۽ZV7wz!?y-X >]=IV~ތ0$CH;ʗo+ߘcc=׫ٳ5}u_o>tg=§Ƕ=:}GjK'Gk?"/vڧv.o cW^1t7Ǝ7ׯq~;4#x~엀x Dkku׾N{~ xrt4Z@Ӻƕx<ԥ[֞eafyܗ 2ȓ(p('ӋB _E6Q?_Tfe)f|iLLĩQ:+UXlPrf)Qa$"\TɠIXn/)?M` ,7'X^Z4$DNMxˋIo.sᄲecA(3 |4]KXD"*[*VJ듚pNκINJ]IK;)uig6u>5>hULHYG!QԪ$ZJYXGzpXE]WJ^:+5E?٭D,z,yU)1G eT7QFM`p497D!/H9"ɆH63 < YH"C$RjAS 2_"Q~nBTQX|In'C!jd:2 a&w<y8RϒaiMJ:EI#T!繵x C8z_ W-Zx!0y:Gh!0blGK8BSOnu)3ZaE H RQH ()xBFIǘ-V\uT-RZx֡=>0fFrj0i-F逃1 :*D:K$Vx0#FO`7 ޜMc2, di0%Ly$lčānWpKZ Ea!C64"5sS:ecԑc)\66ƹc\;ܷչZ* mw^h~N;"oEw;njĈ~醞 jۦmk;KO|l-i oF#(ux u]2ޚ^%f.|P[ Ԓ+jJ~-l<$!1RJ֡ON{ 뇼NXd}n%E}\!b~S],'Wb˥_Q3.:=2X]aFb6V1%Pъ[tVlm `#;L,*i3żw,8"M S7E 5=U[` Z2 1#pTP]kRM:[nys}|"W"3Zn 'sw4o!XHmn>x58b"! $r^ILZp$=Bpv]DW1;] VcL缂b_*nj}aC# 1fBZ0b]sFThUW#aI<;mΓ^HV32)u{"yu NpI^;:kcE`]:9 )Csi荡m"ʕ4 I8:# {Z :_G@ۃgJ՜-_՜z mZUd^ ௔+ hV-D4tYBʱbmOA}]{gc.jp?{CE&3u];H 1R3P>#L@G B*dHT !al %E#*Q@@:Z9uA'r홻 `QشJ[\QJ3~̤0&8*KrGЋKct v F@DqEgBxЬtVITzpi .?{WͣJʷ= p]itvcg_qd5nvKk$Q !HvBtA[z[Fy>܏\f4 Ǒ_ɇC̈;ɘ;4YI2{BNhSqdnKm4o]p0C0u! ҏ#ZLB<(o4oRfqf2KiSfzGJA}sn6Lϗoۯ/2׎Jys+<ۿam8x{O{kKF1s>z w;w|c^ mB{5cln \p/^ĩo/\1ܙYfNcqS-kMc*12lg/zĻZPOCͰ|n!:OCVQG>#n-Dgsxr벣r'Q|uWbx**߶/_ ?,6ƽ~|vyۓ_j )h{rqN]7M]NQJ9Dgש mXp렐 Bχ/F5ǞU IuRj|6WDKORޚ~[68mf!^K\ܴZP x5 CMmm½ H'A;£@8JԨ)q`6!LG蚨qF<Cs̽&j ѺيJPzōS RΗ4kaĸ˚XÉ5A/k|'J1}W9Q=5|SS Տ% 4eңR@aN2fC'U 1D_+ءC4׷/{z)r;Q,654̊AlSiO!GbUpIc&bYaLx8"nc6F08GLiS%@/{oz?#P`dC+H22#N*Z5gq읚ff dwdR c[ڙ .a?JϷo]f#~wa:.m?^z;t%? H@3߽LwTz6/_g{׿bCy'BS7!h0FG]Dһx_[ζЬ~ rc>GۧMoRLydž&'vo' L '.eV4$~:I#! &41$5 ܇*' H bqNNloք㙐oEiD]{FdU6QċC*b$`ML;蔔 l)|H 6!\COD@ >!fԚZ;hvnȥk4(ߦ5_A'@`4YI-ۉ&0Y `FSbrwcĚf`"+@YI"S{dc) CoLsG1eu$Mِ*HH+rALDR)DwY Skց a"l%7',I WDcҫ0\ޔ XՄ!Rs6a䣊&'IB!dΉxĚ+Զ'=oEX IĚ{\.1p]]v[fACUڌ`$M@ 4aI=b@ ϼ=^S8Ӎ@V䲴iLLq+߃I̩! >LܘMy1mP1<& @ԎO$2)d13uL.IF$1܃HKGd6ެ@bI)iDwnTQ=ׄ] ͈Ě5o;C SbOҸL)= :\ˇ%y6(٫ְeC ġ qI\3(x;f4FXJjml)og:aKb)Sdq l"%M$4޺hһѪ`CTplx+?vjM0^^zI"v+Lè̑S;Șzv6c.萇1{ǁ$C.%hV"ތ)hJ>```WAFDCfS܅̫(|"4t_kݘ RB2K.qw:70? .P@cm=a))4&LhHTt nFt[E^Nii:ωjp;L5˗'gR &̥Z l"LlsiWV&"Zf@+uj`єMvbMlGy9leT yӈ4¼1jpa"/@ncm Xu1k] .yjj]# ?rvjE9#) )FVoy暱S22@ЂI* LaM2L F'x qH;OwZӫ-G 4He՟9B!nE-imah@;$uѶzI a("lm(0Vtou@ZMƭZh hFoBٮXf3.iLn46mBǰ]gOBe#28s![4EEBoS߿}nxyif_pY 1kGIs\<8$Sv=HC1,f|!C&r?1j3Xzp= *Acoǡ_E8ln`&{1:r! :c7S"YZvoZoo5V]l9KNyjj'87PVCm׷m]c]6 y(3Iť W6lҴMos^"umqϻÐxݞKmu]t<6- |][o8+F,K< { lЅxƱ3әb!e'm9&-Qb;#+ůYQ\ d_V5A!"ʷj|.<f ))iJ-3LsĈNbLQNDPC3vrz-3s0)^vz7'_F޴1krjsYޣQnKǫ4_F ,?̗Ohb:uD#> ]NfZ[|cJ`'Xl SdYIn&[IT~Tw(FSیHo3O❣.qM/ߵ@nhl^L. I, +m\AnvfɅFFH22$JhAjP2/ SĠc8Swfֱ@ecX֙: hXO4Jw1?Qߩ䴜7⊅J9*Jǖf,IƋ5}81a (1*CL4U*NqN c1S9qlt$\e`.{$i 4h [\ƑTv)!q(͂$2B-,sKzd&xZUSgL TѱٚRvbY$YJƂ(L3A;+Q;/ܠp̦G~n-m3TqY>Zjsw$E^8nf\&[, 96 ^Il#,ڎgX$Р`gXn)~pL\U_m}&bG>TO(-MΘ:]j^Fҿ#l$`1 LKm8='mN) (6~[|?ʗ;(j~1*8:Jq\ȗ5Ź WudXnmfT7gM}7F†Yi9Mݮs O1ҁUp~iEڎ.{cj'錷:׷7IHĖfl[٬"Ac41v< P9]J.-\  ( Ew ;O%(Y8njx|$@vqЙ$G?pqWMUJ  uXk=yXo'7Ò֙*= f制9%/ڌu(e.wqL^HHg J`sO,&&&&e+R&bZ) A-%8':˘1δҚce됒PęWEx}u_gтmԎ{8]vSSm'̱;chŚE _6xV _?|㦍_Tfm 1BcZAPs\ Kw&@Qgd;eé}q| Yk]v'a}؏"" eT֫-QVl{..mq^j,"+?t{ENKo=|7>2w3Y r X]S֡o >ӛ,~Kb^j^==<|_T  Wut?ݺWQ?kQ.=>c3W~k=YPj/D #-QV9VNkq"qL f~AE+h$|Z77tnODj/)Q{tWz{jnKdp,~l('@w(t J#}.K&D\#?gY;;9BHwUCO5/RNm~z74Z[MJ%Fkʠ^~ 1)kH0S` WPe+pj?# :'{2cdJHfVHrf4Rjy1&Cr2w82jv̺[vĉi (];pa`0٨lUؿز%Gmo;G; ͗xy3&7W)q}T DYoN8 !OqoJڢfH0f_K XV%kjDt*wѤv.PQLnbMnbMnbMnR625;璃Uq+שI Z 9 ̳$Mi*d#rjQl &JCsn,xúBͮݣN8bg{4Bv)(P(1sLbp22f:G0׆a4G U yDtҚzN ;Q˷gJrNYʄPĘ"uƔ0ȑpbKBtMӊ>+ĮO+zVtD9MY\g /rS\*9I0Y1ᒗ<ؙ3zm] dpMeߎ6]eeedLcX443n bF4 k21sS&fcA$e't}{ L-*#@$Bt> _nj m+(}Q+-=+Rm8D~b3\=3u`즦eѷBS$b0M%ʬ#2z9Mu %b 4OlS }weMSƈҜD @GIcH#4I?,|7 N* 9 8'gg2۵DOS7P-Rq]?.> #ŵ͌-)P1^f6& 'TX>|ppo**eNA;O~ yYlƳZyDM#W 7@h3@{z )`¼풭"y"),Gtj"#eKC]͹ 3`hPh?1QQ`2hm1Ā"([?8\܀a@ C1 P=A@UJ\r{`hJQ#JR=Q.O`AʎT&_ϑ-D׫#J5T(t#`hպ|s<36VQ#V5\!s$v @"\=N<.9#?얄m,;]N,(iUW 2}X6]%ӹ΢43gĜOrjZ}%;&2\E2aLGLHrX?9COz&u %-d#'Pv_8ڼ5ySvo w hbS'9||a-'x=[E6f]5\_Lڬ(O \-o/8RM10gyު!zyM{fO2gؘ8.uv!'ueÖ4 *XlLD aD mŽU 59Þ Vn"8M~@ ^N7°F@0C{×v:bzfY8M:XnxHɹ)ߺ߆?l~/Qz;}-nx~ %~wQx56;q(ݫ(CVB(zo 5 %Y_U H5z{!G|Ua|ڠ+hQr5KrF$U.=\qqߐEY*rF*i$7ABsv-z]^zZTpArw#'::!=#>\W'jıCa4m!DzGןwTv!%7tè ABbΥjCMbu{y G_K78`]ɻHC%H!Ky \hq3yAh(C`%:C_+Ka_oDH7WG1dawӡ"c=g<3Uٻ /׵CPAp)!ި)sKf=CڟU]<7X!0џlcf} 1^#E?ڿ~zߣQ ؗ1W3ҸO2G`x>'vb;o`"ESr&8Le4R.%weI|;#6n{g7V57HJKR(R"l)_DFDf+4F=u~ɳX2u{{5S݉Eqb'w >*Ћh6)M>ʠ&{za&PON6c(3̉2Ü(3̉2jLU> GZ#$iKc&z-b TXmEk D-mj%m|.L7%.%jhbCPQBUШQP$̛L ]%-mj%m%TDEeelT+CyءV%,tg.OVIȘyw+2;o@ë6? DV}--BCsLX @z a01 :2^=6hw %֚ yҕf9ZǀWDɽk{(0x3P<G8\KӫwBGI C1 l,Jk%k/o0UZOIO6ގ'3|]}ZJ96׽˳73@`4waǓofQ3Ii60gxwuZN h䀢|Ϝc٭QfAԣٱ `Dn< t/(2r$Zs(M#c}=H#4-u"8&T]LEz }ʶ2qcFtWZ[o]Lw?/]\K+y&$b*LGy&[\Ѱ˦+WN_׽[:]T:,ɹG0w{:CY, J>H'8v;I*MȕDW uul Bk'bA4}/Ka2.4cusY._'i8 v;rNW s`徫l=F|Y3s·4[l3U<@3-^lOն5'/|O>{>JjA;NR)- mkDMHdTM l\>m)]v&BK%nyZDKIyJf9R,{KXgJGW 5ur,@\&^^_˭}[+N nq;ϣ4)9=KF FzIaHvtqw{wY[ϓ F |bi/MMoe)+Ig皶,Sgm@M}NƇrz+??1֔9-Z`ɪBXf G `Q57U*dz'ψ1(g52zp?d!t$ HQ1^'ȣa { U $k V19Bobm-n<+&@M>ԋon:unMpC'㮑I8H j% *˃yq zAܮzbPX2PDA$!VN*4G"wѤ>NLWvX .gQ<7cI"GdWd5SCj lzwܓed.1!'ړ T6T[)tDWg]^zkwTE7"W_&U#辸뵊[PӠEkV(^UsmAؗ oɇ-jiL;WwQocT}:;lǘt?ѭ2h!S[&TV`=29[:tǦVg=G zWVn{@T%^o= 7edvPLfS+9SFxe !1!k'?)ﹳ>0; vt]H 炀zd

)sU1SbT3LU̞2ɽ #^ Nm>9B8y7K|[:g=msSJj!W4ӡ0!Qϔn8QfuY"K~VAq3F@kƂKΆ8) .fZYgYNBe#&adC7yն|dQs4YMmR:s(XDf8`h:TQШR]|{,!XAp Gt@އt~NK+ޅXF3RX`$Fv+/iAs 2z)21 m"T)/ ?b{8ś@qy\QPNSGrƍJAQ wݚ&K9*i\3,E[. hoE/wZP@izԊ& !`E& DIJi C* &RD#Z(ۑv>zړnWͲKWK]qӧgyx} ^P01hOAGpR[^PLMZxU2!g9O_4KKB5_9C<͠%ԇ s@%3K̗<4Fq1c9@)RklP&kݣ'kS: 2dIq\#xq>+%\(Wqљ!DhmRtќlBs2ƷJ@گUŲGe׌G HɄCDu*`CURz͠\*{_۽:;?/K* 퐢-RlK$Jlo~  >Pn#>RDG"eb\wufeI^| h$R%T*p{ <ĵDž4XM>шΩqFkAk0g$yt.O%U2jJvߖBڤxHڦI*\'SkyK\4+4>|j)ק6RȂQܢe̙l{A- gee㣱uJo^lc?oV 6p :r1\&8YOtWymQ")Vqfl*d̸uĶ^auYZcwmz9`{%_NƻX {f&Ĉv$9$_dmu˦\[TwcUHc휜:萟'PEMZ4ļ-o.Jw;v(9;Qy}\}rZCX B*ƾ㜢e1E\,)4#W("9d݈5xX BT'x9LA9 лА\EtW=ef~?I-ߛ"ӜiNHk É$H*j$6KHt0#)$HYZ!î |u7ׄ<j񐫄Vy@/!,,%-„'e+VBS %@hm{H޽ܴ6k^=r?qC", +&ԸaY#,VqD>:J8Ok_rۍ~ uC8:$&|1:1-ar|41D|.~{3BKiX) 'ӿQyI%'$ry\ oY:ջr\T# !VɈ﬘:蔍(܊JFfńkFAPj,mS>'8hN|F{E)ZA QP I9ђ9"e#KeR#Z; -A[H0Z@Ydv<04~HۍmLé/tzNLbFj$"ƹ 0A`Ȕ Zn-(Պs RsGAػrZsLm -vNYh%aZ%RE\ڮعĶ^v`"*c%utR^ X[,dwBZJbKAb'Jh~u Pr 5U\1 $=\›~.⫓^H!F੠OxAľ6< -w2-|*:]8˛{tc$TC\WiaIb_OK`sgšy% jx"* l6|ׁ^ `q"E-o ʹAt0,oǗ[gLpw9DxYtTIH7%5]=8O,BptR'SN H[cT@0$p/$6}VN۵.>bH;80OOUoV7/7F?c9aIlYTbHݍ/1 ȆRw`כz!_Nad0W[6<~磾cC n4 *,0۩-"Rయ'H6^Yf{Llñcyp\ 2̍I ՇDntiO͜z[(dfQE,81I<㩦$>yj6I¹Hߴ׻BsiI~p8shwӬ!9fN"ditF521^d$}ݫ+u=6H絭kY)K6Qw6] \ƪ觍ŮAN>βb0oOv252A]c &eP)ECO?3<Ƹx+R׮Iӏc7)>H=a>(^b%bҁcE{acZ<ИbDކ Ob#sރR˃ z5O]9.F-W 6/R!蹿1Ck"}fH q؊K7C#H&5l]C XÔ!knT|_4y,ʘNWޅG-$@/Bf Ak0e/c͉w!yF汐g\ ORӫVnNC:K?t:cMx`F?!\fs5,K$CHTn{')=N8]$u`xnXa:t( TJ%^=K^ J7~JRmj" \D\[ z ηJ}"E8*5r%~Q2d;;d E2rckBX!aX aHcTI䐟Fm5zbHX%kA:]QI 2+dˬs쟜 , b y#VHXHr< OԸᑀRsM?8Nڵ.>b̐%@9'z'(ܐdQf ha* rM^/ڶޓml+qpsz''OvC(q)3jn6 UQ:i ;jW|Ď(?rr o׺ P-ObMq쿼 8{Ww ^/}xw+>{A*/~SeDĨb/Q 3[VXp %JN(4DHfo(ǓLpZW蚀d:77 EUQrQC(Q>r;pWz[Z??7v;9smn!ю*[=ba +J- [Z(o pA)Gz1#u|qk$8dipYPjPmӔpnk~?+ϗfh#f, lOLq ٔ<#@HJ8cZhSiXǎ-J ŀ*&0IԇX)QR[> rL2;_oW?y8!*~P0|cQ1~_y'E '] S'rZČE3ڨV erGveykC m4du#8U@g=M#2?0i1G)} "F%>qŒ3-%/%,!P ^9(9;2νǘ C]uq?P`wZVa$4%$A-L: _rjg Ѻ#R3m;e=jiHG蘲`B& !MRE$x"uuHP* ?4LXhrD `YQ՚.  2@KZpkjo͆&Lhk2+"rv/r4Fa?]OzA_mV Ʉ+sh }ӊj-=0K5S˶ s7A2ֻJTn `[ "Nb!\ s_iP)Z,ȩu|X O(w|b#[ÿ "Z,eZ+ \͑8õ"JP,%O.QՑ ̳Lܬ߶럛OAt›?c%l[ ߴ"81jԣbljnǡd\s+%b q^B);rZc▃#*4NA ?QkŏVCŞ*2__f 0 Ktz`}ݑ蛫ۻ倢Gb]֛0_W7?\,[/lsݮ7{v_cu}| Rb_ۍMC8=cD(_>Y%cٳ9gńψID=ė [9>kEnnz'j~X`kn%GiND7DhN*:[rFAľ6cThż[|@BC>r))޷:^ji4'#{_V> 愔@,K!5FsҤ{>/ݎ<T<{>g-@0c2Ia/"?=uب\n&Hְ* ĽǾUi1j"mZ@ 5F )4#W(c;ӥ ;\ D@[cڇ"H,q+mӖRBVL%8bb8g&~Is$)4#W(¸z-tm?b[5oe}{Ytz1(BѶ B+AW//-k}W-W˝WckyTxZ۬L޾:u>[Q~zG ;% Ko/ynzZŮLv!GPG~uDȞʲ)(,؆0V(uLuܳ9LqBn)+̴NաfZhU)c'SSefL?R2g>P+B7S+0DtffI9QӸ;1s&Ֆg 3E`$ElK~/Y}m<)˶S+k4队mvZ" tp͵So!_v?ل6lb ЩBW/ʈl(O/0xzfCzuǯC׋4#<E4{w}NQOj%ͨS﵇)ؽz搔@:5a{2}=^.c!9s7QF;-q^|1VP )& 098a{}/4%ƚ"'~@`b9 AxmzL|As+-F!|c"aѱW5*wأQA 81I{L9hvv }'YIHb1-@p K1WM\MQmD$|g ab{6'ruxϡi(ʉ(|i!|ة2kt}*wn;x13v&)|XK‡Bx!Wi'aR9Ei PA( ]^(ʬ5@‡@xWSzYOH,8UP*l "4)PXqU%i! e*UT@Tf9J]>#|KSJpofwg?"0n'`ހ’ 2T2RVr,`LWH I݊)Ī>ׂ76o10>@nkuKz#c^ZROs/?q*Ԅ'rB4qZg(_pk9>‘? Hj 6΍@" B5s<l %n*_g/-2NYX?v}nfY0RoAd"ѩvp^◠XWnQ6zFO{!eEx> DaO&{h 0%%H$fe,)܋$8L%b2Phj9祕1HKJ}=5h _,!%]fǼk~/|&eS}#puDAľwseyhAC[ MMIpp1up`bȩ~q쎱& ƚc؀vF:)&H<=:׳ KVੑ:N8A%d0mx.I-4LG3KJ[( >Ix,Ng"ȳbr\N*쬘V;gC2)$\$?RF-B9?O2 ?|j,jsnɾYcE0}wŔ" i*'l ֆVWl/N5\ qR[) і "[vYGvC,ͲUHue|)%'& JHLB mE' "ZvZhz!xkޱ^u!E LL,,1 # ,k$?LCLe V\V7 Ft];q'n4v7iMd12E@L[L0B5ˇ t ^ ; ivIIG3do[5;}Z2/u ^?OYrFG\raӓguYUWPC_ @E}2iyM9 < 2Om??mx D紭̵B7?i1z?5r]PԶoyR8XfgĖWh-4ދ\m3=Wrm:ypW8ԺwKIBAgDVc _R^j; ^;X_7 1XTᲲ:\Wh<'J6Aήׂ6OGxއ"<<ۋѝ))ɞu} r9~uzRU-A{l МLO;>v cؽU Ƶj{qu|LbT$20bLP3Bo0ogGr<Ǘ'jY#)B חFmO/$w3iz3,nwO\  |7e!* IL>h1)7  d%Y<@{!t1ɝk=Քrm]VOw~8SH;_HcˑԩeH B0:? BXdqgH!v c*wz[𗾟kqSƝh%0X:5@:nx)¯;5DO0LM ӎÑqq(2Xx@x-'рl<"#^gF%OO0hV*七e ̸Fu G4g vrqLypYGw7jmG<GڭP,)7+cER挔lPnM"#LzcQ0I\de;!d RJg|(!p*G } ep00RVKÜPHLe ̊ !'EeGqe-HYaB!P!CC&GNωOȷN_)j\Z:9'췛nwo..;5]c]&:hoC*lyeUR@Bam1Z`4JT͙ׯ2 p͵ id;Rˎt/>{f* qOaf7U<DaX~yY aRΰڔh%Z ā!ݻ(JX% Veﭔ5tdN=pbzM_qya|F(Ԑ+&% e+N*)v:Pr)h()kj\&ryB4:~U]`sC'dלPM _<yiA &R+*Th\:K=*A oT>@&o~og7R?O̝Jn댜ȍ4 ]1ܴLESVT\9-%@YDݎ o㡋h{+8G1>^C^F}=i#}g.ϫ\|Yz[Ms?l>_,6[)6ݛT:| /|zaVGnZOf9uڸp}~~ mN0=5ijG"e}7cXlJB'xQjc~i,+7(ӟNb#zX BL']ۈop0-hwKa!_Fٔ=[F3הtcEߣ9"Bb1ssUO99Q@鬩PvLmDN2=Fٷc{9wtN^Y輗Br),2Q|ݚэ#zX BL']ۘ[[-|&zdS(O[H1hȎ]S!WMn)ap.RA1JX|Z[#gCJU ʥF}qrYUX|^,1 +OJ yMmy wD"…ɂ{E*j+#1$MqSs K8. Ë Qs;r~\1@ U5HJlz/q{:}y3+,@KR(K+RɒXF/^~>][O ͯnz}4CMy3+~ ,i{R0.D]")u?(l.ABpy|" 9\B(t+C!wqvn6 ?~`J tX>}xl̍v=Oa^m_}EOEK$|H m“@Fl% i73j |~X~)v-b{a 0 0S,q֚b;kvᲽ{ڐ,?<;TƻkWP Y8ge Jl~;wK/ݻwSPɩ#ש(*b2Uj$T"J44mDKDzTL>WGWahRXA6Oײy1=桁H[ w @Ikt3?GktW KZa2*X==,p& [1eW i7VTbc>hy2byn;f !(K& l%R1Κ3'0bJζg&>þKؽ?:P{>_?er}<ǂjJ.sTA ƅT҇hY Ȃ 8^?Y=?7K3Y<ߚ?zuw-B^I&ٝofnN\l>:_G.}^vr G PQ*Fue@Sm+"඲P+fVbUʈE$sȊ>k 0FӸ]kQ_@D5%5(K2kf_ZBXX Pi, #V{aX^[ʝoÖnCZhEq/0 (Mjb7~RФG !fw/2VvHk0ytƸ%e<rqn_uBn1sy H `e]k`2~ؓ 0 &ɼ5%%%{ݒoȪI#.|J}[H(fӪU)"~jEJ)&J[S$i ;[{~qO"Mբ5E&`m)k%4irڴqRiDltuDƑt.qv`׊aJ)Ni3)*iz :͜up%xD!X*o/Ja9J"_.>'W_&Mզ%5l\"8X,]-G/'b{TcU$bTڤ3-W]Μ8ʦ,X,a {L& 0BRF+Kg e[s%ʸ_B%gM=\cHgHIwI#mSNR Zs!=EHE)vH0/ "8TSY z됸_JZ*%N)5 ,Dƛ6}Eé =iABUۃˎmxBN\  tJ7s58(8 ar6LW8.l½jSzdI@{$?j!i !]WL8uٕY\9V Y=t6`:HHN*'u*W /HvEƨr-G|'~M<wyy/g*z©e!o`qdf_﮴MÇh6FdfEw3`jAۖCk.ԉ t5AZ8A'stzpTHHMF1l"*j$kwQg5wf0:4&fxVF51oo;zy<, ]oSz)at*qM,mq:Ǽ O?Ei|O~QX$2-µ$3<6`%%5̛48Xl1Zx\JZc*@hF[ʡM6J嗢# V2:[۝.A` VN!,e9sS+'wY`NA`ƭV4B"e((eYh mC sF PshDlhAR4, Vg։Tֺeu"s[Ǯ' )[)0 {"SFʣvpLH"c\;WJn9O`o1xs^-*zrxr4־(QVFoy V[Pkb&M߭@l~ +%?VҳV.*$!G.92%[rvV떋AźMŠ eC[jDVBB\DdN͘@9 Ϳ&&lx~{'0g֋EA-9K9.%$#u㺈dMT =R:$)z%&%ei ds!]JւqP^ZB _ĶctY~MtO)m_*!!G.Y2t6yǺb떋AźMg9n]:u˯MMn%$E4GnsIp.~O?_T%ڋKN O3Zv!bYӈi  .=rs F`S2tZc:c֚Wf5qX!d rjTV;\YDU#)0Dq0XDR-s nOЃęx#!XI'HS[xaFcEKu]IQO{Ri/] !mR(XDlEv 3tä@6.goJb(pV`5Eq:0|hK8S4 X@59=%LQP7&pyAl, 1.\isF 8Q 7džݢ=}#DeraL `(L}| V5EyO˧9"s ad$. kt:f)Kj@p3ӌYa'j"OxBux 9r͒)_Z7ZLhr1H1Xv)"OxiلZr"%STc4Ud b":ctnfOE mݲ Mn%$E4Kuci.I>ct*I#}\BK HȑhLqZIڮukw2'21:DNv:ydznل&#,]QnN]tYd}lBS[ 9r͒)Fٶn a]E얋AźMmZݲ Mn%$E4KX|֍*r1H:әdY-Z+43r"%SXUk`^4?sN+ɲ#Ox[6+r"%S}/FصnZ:y0HKUjlLBӳy%E4C0[5bЎuD[.);F6]5Xi-TVBB\DdJ+ w'UX\ R%3;]h,e[@B\DdoۈȻM* @*sNgUOx+lBL HȑhL)Y}ǂF>M1MօnKȑhL3bg]&q[.);F6a)km6֭)p rϹj,sd\ E" {+ؓVXbuDzysy((uԋwSfaϜX߫3|q}ߞ(Cpm"oK)hg:0g7@,:s9{c;9>bJg+;0%ן^ ?Zon~ZOh m?tROa1wQsY}v#Ha!CW1=}e6lS6HjS]NӦwT &¬[#QP!Z(8J5&z *5w湆P* q _؉ixLE(7R[fa{PH?z/r)+5UU)|sXEXe8 1ܫUX (l_|B8sxDS(&=ƄrZmD%ǎ8SAkX*כBc|s)XĽ/σ[WGbê;IN|/zխwQWSD`oG1x ];E7M*nZf=>D%AXՄ W | %2ZQKY\ ~˓ΑI\n)x~~wx xɫ輿G TWDe򾡊 QQ5~4kjaQ+x,&Z:i#Z6m6) ç)℩RBa23\x{|3@gf*4-ʗ33,3рt$أ޴Ez腰b[>\^,*@}nciVXuD&Im6VSq.\_?\_ WJ5C.[ ˋ&Q{NyA snm -Bt.Ixjp}~)ÇsX?+ίndªZ L+In kQ"@k6 us{5ADּBy*TLvc9 g~[:onob5wWO=_O+v:eξv=wgxl-<jQ>8{b{;"UNSDMµ9Ƹ#Fؗ˃hxbӷ=|&tG&S6 k9Rf9 =sxLwWHV\A,,v;Q)QFxӣӘh<̎ ̬֬շ O' %I)w0> Uk/ -,1pjR!ƫ)tu|nJ^nMFb eȃ∲lL&=]B`8CBIIF;3%HCbB )|Ą6铂7( w2&uG%NARgwm~93_Cs`w`6'dgȒ"sbmnݤ,$cꮪ,V#X8E+B#YY \1jbjxӿϪjM`f(DHE#{45ƴeCQ-XMB2 F[ћcA5àz]~ AF zF9DBJc5kg50MH%A1R+׆9o` d~sH ݴ`gGIlwt,%(Bj&)FKz[cNȁwMq4Há".%ֱ :\9VPjПR nk&Tj|O2;?[OY~q}1$|>X1!rV!h~*G# 쮮88'z+K# N LUFkVh3_|71MueW>'F-K:AM S6FzdoA j T98\HTRG1* hO8qEZvy3Š!aO] $xmm+SC"c?X@pRgkuWb`cV5Z!Ɓ(ݍ8E b~ʋqU)j'ތ*EDrVlle@$A1^=( G4czMgKoX+=^&*rAZ_́a@(*$E*Zct?cʄR*é&%I8sRL M)c0(0iaE1e,^C@ wyusq5h8p.+(&-#h֦î箒.n,ʤK H$b;.AmTDhh12!6{"LlR9-`pVR 9_lu(eLCGT<2JDҕ+,0Kj?* d&VjˬDՓQ^ZIڅSwaֆnjxΈ^|{@T镉f) J L!$cCxsP"iY\ךa@+|Ss`rmڞʱ wu.(6^_`0,hjYWDLnWBG`+F4h$[ IJj9 M Pk5q2nc:ELjخn1>qVV$ 2k 1F+Cр""Sy!DoONjFDf+ N-FĊr%J=rB?S06-+ U+#]F, 1GX2UJd%g,F( ih4(˨wI(ɼ$eiADϓ !Xi8@$,2QS QZj$SXOQ\$0) Df5BW>6!(vC (i 1݆p`W2+ WHnJBIQ%8@Y*ccEq`p{#-D9M)OhcBc?*޲vw8L|ܚWO.y5jL? qkA,$8nˮ+lmEXpֻ+` @[#@'h4~<ߵH/ 9n8FXf# 6nMtfG6EѡbC0$4-Pp˴ c`Rqx9+BP#lGMQ(f(1J,m%#-?2qmTgH fo*KǁXTF|D0ڂ=[KZ9[卻Uɺrs{%;+ (X-AgLtcsZ"˿UI7[ z8'\%uY﫢P.g@ =[3K* W;Ɵ9 425iYʂ&cl4Z:lI1@(!x9[D,!ѸAQF$EP5A| a¥;Gr ]9'J$˘O׾{e0-#isͫKnԼN.^Lz5R Yn:0\ƜOI Mǩ mR[ǖqH]Cle @') r4G$:N*e蟵WKvFelA*ebm;ɽF^85/g KɔtYJ&*r}.ؾ$YU`-A` b}uCbAIC4Toxhxcj3m5-&k=^?&իߜ&Eg}Y컋ҿ}o෫f6nY kn Ph@D7z ;[5u򃶟˴ gż~^NrrSxmE:Fxڜ ]}ңƖR;rD\fp&)n$u xG&wLCiTϋvmGy7o\L~GٹPP+3 *`_~wjeCǞ~å%F1<̵ǽn<| Q4"0 L9PŗB'LKI#7HH(4~J9=TjD]W7LcRIcs%χI=m~`n\'+Ɇs'fB$F\8k`bi̱lb5_/ [RB8)o$j!l\S>⠈҇2ƀ|tGjҼ|hwls[T>Ak%$h4?E #4%XiX~WQOlBep`ֺay?=ז1#+@}$ԊVH;AbKxu41Щ:!a8@Qcr+wps͓1)ENt, 2~\G ^>0Ƥ!mMٿ!s8(a ~X Oi~h̑2uB!\4xi%#^8P<]'3#N J%},NQY^{}!E~h(Tޡ/ XQDˣ."K2Nx~v3$`pK`JqILLKO0}b~8-Η[4O:#ą0JO&B`QH- A)`? #sIֺ#<4</o&9 7 *#ŏo/t-_߿-n}]yWo.:UXbr!ۺ֪a $PK8j)!FKJv &7z# QDm!sˆ[Eᕒdfk.gˌ!b_zc_3~ydj%Z}gQ<r7I/.?g.Jb0DִO1(Иʏ5 V `6VmFQ ڞ~9횭:uxSRգƿf#qΞw-@YocMmx~66JGFl B4N>x1"ǦO_OxDɈ_&c}p\LWYMDezP]t^q:1.pDsHsg*J>]`C͋s9B|N0XzΎ{!#\WbT편`4u95{ROy$C"p^ @yPQi'3g\]TzIC[gPPqHr]|$'y~lGB\:_mG327MޅN7pK3fW_m7Ѝ!t7k(MjhqSȈcҫ5XSa Rk#n,bϠ[j:8zB!xI5q۷Y='x闩NU#r]JQEpvRqWP1Ѹr9ewGk7YxM?o!x~hz, <f Ts0eZH -aX)4sA-Uń3(tw//>>~|8>PU0 eeB LCB/b% | nQi! S[q0fPC#k(1Va"%ZG{umC֣YR0˪H$l\V'; JL]T/񕄣Sծ%']~t:(B{=$g޵ɡdFacfO@Mw $b߆@*Lit?6|"dsb`N'@o0.Q3 C%2N9pYsEO28pTd`XS8dD`f8a3',4pZGԣ`P\]֩L~98 Fs98ad.]IDT)tSǸ:PA9[[s.{WvTAB?][o[9+vgeEN`Y`vxMqܖӝ<ߢdǺX<Αd+c]:_BVew4 'L_ОIJEY8)91; R朏f-ټ~(8vj 'TrbLdfd$OxΟK S0@i)O3݈yx}%T"]#d?RL@?~<{y}}#0(o(xCB0eӰGDW4q#l$om P1Nm/ڄ.UL[8%<;|V@8{9Ht>=58 ju&T駍'G+@J4ר)B9 G.JNI|0d:9ABPh-X! CW֡O6N=^>[F(HH, "AgpC/.ѵ`v(Yໝ⪔kma|sZy_F°rΚ` 7g98&ѯ8wl!p'-8SBm5: \r` RP!W5 e1DLN pEfbQ%Q"w>8͵5d lW7?*pQ}1Z 16% ޣ4pΞ$gYofS?TOvn]rf6(+ p/rvU!ڿ,^ǾjFiuC4' K_{hZWCZ+9"(s3vZn·+Pd~<◅Q9sٜ mȜ~GC@ˮO4t`IПw.Hf*_n>wd߹v|X0? hJP?1fĥ5J],f~%U[rAjdm?>fM&0j 7DĻB7.??]'!*ܣ Qa}K"L.猷>R*pdra"Ud/2:&p-s,#MvB)vX]1c|H G|;{Μ$|P ZYO>`&kd 7/&/x`]IUg&ף>i9n@F3ۣ^X&O1wiL>bMrSht$6 %5h52\$Eq6_h:8n Sn8/Wӯ>jARZRLK1-.ŴRt2貱% > Ғ "nLr̀I쒬uZ'oe:7 Wqgwi-ç)$"WWg|G׋bY!쾛]{ˮbu\CYZ%gKڃ {/ߋxPUժ?E`rH y P0E-2 1Z$ѥ֪z'ҝ z `< R ,e* fe+NHfESpڲ׺n[C`M 2MQ'ؠ5\u2*1>8^XrTZUTM~K4q=ϰ(Q]!3Vԋgc'zµٶµ>PJu^k$Z2>BDԨY4Gr3m ϞfSNtimĠaZ޻^~V,+{G5U=+$wd7y!ϝ?cn;bFU'PT\J'Õ}WU2I2%;2*vo-*xlo(* cݱ~8|:U>ifB8 7EcUCy:Z Ke[cJuvTU=oΗBXvG`>g?Yhnqǩ+Iח\IcՁteO!m:>K    v%[C/sx4RgLa4Ĥ4+q"zY>j։*|Z'kؗ4E|(xl-Uj+@KH:VEkrj,WMMZ272EtEeRDK4W%%I!BuFO=Nn3D-s(M1+Ic>X-O`s7X#zR4zZRdhotgsQȈll|NHbۅt?УMq13,lug]`f1_-v`0LrR/%dܛ-IvʑhGvG0`.^ھ@U +jn'r!jhMfo$I(%MnA/ZghxdCu[FVk/4b!CU-e;rIX~|B<+B\v>T3 U]IZgP{!ٙ"VwڐHfŒ8ؠ)i˽mꣳ'_3=ZڋEl{>T(Uŧ-s>Jd~ a) Rzr)XL>Mq].j.3Y͔<a#&1]] 6~Uqroﻍj烕FgkՃ#\(u3=ArzݎkҧD֯ q)WqL?˟AzA0G>_1 cE7g_kϼ+-GkWVN P/ϸ}!JŠ>l!FBģT e8{'VB*Ѓ{C <~.Ĝs 4.=y llcz\3N<@DR`NaS]Sf4I0 NI8I D'6+5ܩzXŤ]õ9{pm8sDlgdkUc/fz.2g֗7vٛb ͖[}erWx/K]ǰͦde~➱,9xg{¡mW񊬬¡Y+6r\Zt;|'eʹLKʹL)7N I*n[% /ǒyRAKrL9JI:2` 60WwiH/^$)ҖNgA`S:ΦkJ~Z~Lw/Ͼ{Zξ{=~'Tq2LHgY^f.3.[t8!Q <,P'zDmzq!N%\?^?[#FŴlTLFŴlTL]y5O+8X2k z%Gc2hbZ['j_,όR{8:9҇eH0-!L!AWU@ЄIHC* `ti@ljoI[' |yz掖(x{ۨGC'=֋WQA*=5=@GEo :'f%/ޥW?|=@acm&ZUح7pՌ5a($1\kDjl``^VcMRj*p$"ibH'`^@*S40^D d~OQ2·dUaku6$VU`Z[C&XN)5[5Z(7h/3&Y-P^E-T.$O‡@ƨqEJqmp]#b]j*BWK`L:b%D sfB,-dIuV=YrxքO ]Ƥ6)2xێIvok+KN>];oG94$kW%XnH6,UiXsbR/9E}7(̈8F)TlW~y-HTZYJzTr0OV/w[W̵$_]YɈj%#RR8E$ @0ʋL_>nXs^O3±RgMRh6d/U>kB"ۈCg֝^^(1AD*)&.r;$rM\frXNfh;5wx7r<}X*; ,5yY{Ő 7ueSdQLA݆ɁΤݖ_0/o?$j!UDewBOHE%>f^o%sԒL,IO~gZܸ"̗|? C..=qnliV8vߏlZ[n< -䯊UŪbLfӂ3X+{A`e~/KnP*׿ jɝn/a-R<+rܓCOEU*IkfpF4L`, JBuD>~fUwo8X`Itԍ: ׻,v& %{Bňݝx<__ 2QPB6t߷?q\O WBBijf4ag@QaɅCP[1$,_輍#HmSȴ毇>LvgQ5 =¬M=EK0:.΢HSI]} 9JT5IpT>po»(P-:ڻL''A{ Q WG _GOΰHb{9X;p'.eWskP_WmAA],ǫ'tEC%􌳷1pSS"r,q.FqI=_*," uxRe4s pxW4swk#Fk?T鄸$aY!F8)(zsnwLnoze%D_Gw*ŝMSaR+]a+/%7 'B*~ZToHFr~ŀ4]p >>k#qn1=|YfLTUx;vywGGV| ݣ2Z6q]6'srvW pc5$´9!q9Ou&5tINԷR6SͅSx<\#Qy(O.KҘAE_@g}M^u=W߷W_bsNItJ3ȎI(nuyG yiyxOsJ.mA\?Ѿ\CN mQb<:vol `ּG1, pP@F%Tb"1?Qpd:8{~@^7¸):M(@Ge6eF2>t0(dcO{U_o0}7~t򹵾AX2ﶾaKʙ7{sfQٺ:mgn>}8+Nk]12$o QJ.PkG>^or[ȼ2CiI&d2#cth;J5\jȒGiVB{A.p{;{p)cFY9sf~#d{.N |t<<]ԒM%D9K_㛛l8?B[^__' y*B_Ww5?,&_A/ a;l_3&wKǿWw@0; dP"^[-8 J*sr( 'w^?_.OCnTJ79Rz9x7WـlGϋf`Ѽ1Oa- ha02ɃA xչrBTs3(y-rN2:yI=BL׿jG4䢽DvPLܯ_R)LaG ߿uc?r\㉐N(.itp8|:emPF=Ӂ5xhl$ey$'KǞ]Jj,  AmF!rAHiU#Y<ݺJ;<}$?6)xu wxST"]zFmLphzl.>5΢e2%hЊ' ?_GG/fq}虝~~y:q pCy4ztN883r u8O|(29x%?'n-v1۟BE8aDnt>OA_{{{_@s:o靝l6z?)YİNrوGVJ.uk:IlD<uf)n1~)S SrJ"ȣ^k|`ɫ J$, e4?m'hYYfON) ޾+w tr?p|ōrINs>OLSDM/!yoU&M`xΤ[W~qI2Š)<ϙSIʳ9jߙ"B!іЃΟt侄zɝɷũ֒,k % 6TdDy&jAs|!#etJ͘Q3 ~k24"Ñ1('`5{QxuЂA¬4PN/*D¿ngVf8 u>ʗ@5CC\U00]TzCPL7GOu"!ʵD:iq(Z2F0L"n!['9ϩs;WT2\;)&p{8HG16f;WrS]J275vD48q5S g=ya_Iu"RZ ; ᏧpF.+1E $o+0/%jISwdIYy x֔({X ֽIhU #BL7 l=uwK.D I>7F%Μr5 ՠbME@*#:t( r ؑ.0C#4jJ^].P1V0ս쯬w_K NCp$sy=*nvB8j8zv6NTpSVD"3U.;A9I(%P Sl⇢͆RW8Aey R+z@QsYi0d/Tōp_⒟)|z*d|vջ^b~aoЛ׫oJ̊W|ѽQ?ٯ 75on4͗q^oaV7e_7wj6h<}d>L|(ֱ|ponEzPn^|Rw#b9@/cx8a:fs9OX\EZNjo)Y:^TvRu AL q%z"r;^%9 aKrY ȲL:J3uNq xgK:Հ-I>2F!,\l3fzbHRWW}_)xubF]u)*SV,UDY6U@sPV)JQr\;< awxXmcT+JCmĝ07oH)ֈ9[9:PRwG0<7uSs&F !R2w"%,sXsG\H1Le\ (zR5Pi!%-Tn*KL4tGoKo{L,6ObQsb\B0skha,@~$8IyVߵ!' 1\.d$ʅƂamSj(Jr16;5HD"ҐT`Q  FoH g'gb~ 1i9 T ?^Jf8$q-!I@&X{ %3RC8'DJ*tj[.UHٱI[(8_}:7d)CDX?;~K:*4('"l醇)͂X㝑:c=gNo:;%.a%aQvdU<,Eb9QH̏bQQ4:Rpj a!='Ury,<^"0W PIgB -1L;EdvdBbbF\sƀ'eFevOF)O a!!Yx;!\m{wד!\m-CX/v3{{ Fg>C\)\v;t?+TdovI$58ǜV&(? .P;8{6;L%)Pe{!i?R/a.庯,aB qX˹4WJlND1 RR4a2RHÜ \G级:̌(F\v+>݈JP/+< 2yd0aY&oLGQ*?PptfSHbH(&Ggs7f+RH|DToǜcR%k eFSpHJ6:a T^֎,4l!{)U0TL4I) j랸S@8q[XXy(_.:E|B9p'PBJ^·#)ղi /%A4IWEE2h Es:̏Fr̞SeΈZBJ|J.Vt4I%Aœ!ptuƠsKQ=dG[ɦ~Ϳ @=gl_* 2Ev"8U Kj'sBja9P)B$L#=BMS5A ̴~r淯ÄDž⅒ )F{1";IW=]Vtapl bc w+p3m  8v8v8v8vXrMc 1)tD0sP H I/,$r ErgPDK():lk^:9~5W^c l)aF$j(I+4FeBHs:m) &ٜ*p*tO**AhCqЧ:lWdT8Lr6uQN# x,ÎKCZ4nj".1a@:-km#G%B/"@29b;gws+`enY[-IlUXE%V)MUfPI#?g([w4 /XcN7d~^FʋMULucw[ j"2'$5 nL D39Az eې52]GDwG uZo;rdG,;YjY(񪬺əh=­5n2K3xOI==a {s e'h`\Ho4S Ni)S>l+ oEۚ88+˦V?!\x]*~",>_+F;6U÷{AmGXx7qUuv:c\b4\Ւ>O 'a~bOW\#?!Q9#-x*% yJigxWߘQ?%h0bezd`TIwYR_F{u;9B'Qc [8{;=r}3ZAft"zrq'umjO|҇R0|+wn (ڜh{qTH{k\][=S~lUxUnFOd ԋROjތ|M)8T{`K:xP)ݭ7吩W'4l洼.Yn{.ħ후 1trabZ.#;O",lLL$owG=|"cj QAAB&j%?rf< )hC"YO?RF̩m8e ,ᐗ%٭&m2$cYhXJ<&} 0D!2&Q̄i/j7 ,i8YtEVr'3!)y:e/ J)j]HS(GX܁1:y=s\9c"peh$¨ȻeZ^xN4mumXԳNu͎c&;|1``s3Vsپk,nnؤ]sQ9$uz<+.ֿϥncs  Wsz< "Sˇ&'QN 2_nYRHvU]j;nZv!by._K`L,K*lg'̺;3}vK4ģV9I x 1(fJCC3GwBېKaiZ%o_ۙH$U K,7Hy&Yk;I+"&)^LNEڱ睡ir`E5ŊEC2ВMf dI .m2Ōt)6Zh0r2ig'WF ڪv9R3 U@4]LVg-y !HK.]"oE">@2,m Z$$ې\Z ɳv^cI@7J'ILȀ˘ⴞ]̚4NV?ǭo)Ŋ2Fݏߊϗ)_}7_hnݟH?K_׬ۇd~I?՟yn>]-}D ;h+fPu_إ.Dc$p}_[w_)W:~Z~t˟c@<"JA9}4>& *HK]" QxR/#QOjЀ5|| 匞ܧ@߇d4uP1[5tf-nzqN\FeHq2>ez_xdzR6-vi]oxlhOϿ}Z㩱NJx^^H3&ʶx^K-|jgs՞mxY;P1vx4o"WtU,__kynX0)"AU*wStI)4`ƨx%3 9ͅ[#EBF5ɦ)L͘>&DIÕ'{1 }t)/herB>*ԒpdJZg=zT Lڑ@Fl) Cͦ:(!H0lKꌫ"eACeCkV|$$J {v=T0ki"d\=\7r%FX4͛ɕ_ޯ]$j=E5߳뗯~qׯ>\WW*_UϸWw?y-L(D$L<};rmuH,iZ>])9" ;#=]9bkUAmvy ‰x6$5vݑcݑj仈%y/.K#qqGOd<;TF3uK厡3r S/JeAyJ=*= )n=֯v?4g7fiqQ,]̞JLR;/ >{&ͳkN;9Ir.FH|׿u]oV25doȞW7=6+;Ɇ5< PZv2T0"JiG4~H>mռ ;nlY(k8uCjdkͭ5% N6iĘJG1rx8l klQ3offlljz(Bi0eOIu$,zF4`VүmF^HbIc;@x(GPm$Q诐2,tc~| k萜 Ug6s >?m{1nsEJ*\sDp!~q;΀5/ iUҚQPwqCjˍ9^|4jukB dy'R'9i ? :Mܩs֙;UelQX 2xz0 xSy3zrf&c$^&XGl=!ᕐrV {B& ={߄`ܜfv=9˔#cTSG{]H/5W=KmM.%&ãLnA..F- rfIƶRb: U i$[A'kU>Fr,&`L_>ivk /*\j HI+vB{|lPǹпQo_Va\xߎi,Q3@N2Z_g5z[k_jpuC=8LP}\1sNLh3}3Iq&0h=Ol)& CE6E ݾo</wn#pgxG)8ZxJ.a'^wri8eŻS>]v ˃v7Ewx-!['M[?`1l*v&4TC-zXK򴩰tKӦBްsyy_!(ӝUuK8p~n +WH"-i127\Q]1޼w[k>78%Մas;\(*hxOvIc֝zqT镠m_rF< jcE&zݼ߈n2=0IAѴ5Cvk '`iwmjLzL|z7$bO)ͭ7Qk_o-4N^4iR)&cԏ\Y%pHi1(e=nC[CZ"F@1! I㸣y:簤!2(+H}ZnP؉\ࡑP7@鈔q'8plƾgʚ nv7הqYKTMH:QH GIw&Zt䣟3-FW^ sڢ*O<(%sڲL_bT 9c,, < r!_ E.]VCY*> ;k >:\\0p8Նi,y8m7U @v~ qŽ U+?hxU_>H|e Tw@SoMǁH@Lk<9Ҧ3 O9!H ߆ʼnEʌ̘o4dFZJ(sBmZ}c.[.g*rNq!8yRәsD1dOS@i&iGu>iQ)b`ڇ"x0(}W6;H5ac݄1MPu}́E]#sGDq.o J9xO՞JR$KsY? ft}5?Y/GFS`_M䦦)c\%кXJ]vPLˑ[YPYl: A`1L)ߙ+] Ju7eZe߶oᲣL`ǺlIP1 UG/M|ŧi1_Ub@ƱŘ&xZSDR(u 2(1 d`ZW] M0*2rZWL]|s[g5azYǚ<!|1 Ѿ({}0zH?,(咚–a+%S%ij1SB8DMn\Jћp3BP[mXV2ci!($Y.ss<B-Fy.gqOgsWN ~x_dYEEtt(]qR91Ǵ,BKV!j(5=~|X ;? q1pmE^_f26&JzUlۜ8?x3Ki }@S\P[l+ {e^Wʗy]2jWUQkce!u9&<'(B+JƤud˒2/RQyE؏5X-/뗸R;~9_9_U5\ʂ9`8Ap,K̴ W0aMN,f6߱߱*pK~"EB,^:P3ݸsE)yNWP gśGZP,vLJMKŬ׹?YŰ@uQ5ԉf"XF_B.o"5r;*TR0%&+K\}VޢrT4. hVF!1p+EP+A[Ðr2QFJ* x6vpCۜ$¬Х Lh8+@Q1qKr5M(su˨M?BnIblDi'd_aH"?8k0*pR :ePi- )^O8L<@ p 2 )8.傂O&#& 3B !@ XjM, J8qRԷ?aX9Ա) .w/ʖ{B]! eݨmY82nIkl,V*7lb5Qۆ*͵%xƀ A,&KE%{0qBږh>j7V+5qJS`閬Zt<dVa:[QQeL*u-*Ua\ޔU-"$} (w4j%({VHĮ|fZ*.#p%YZ!{ d6fS)D pt;+B)K<ņ J8iKXׂYRA -kK*r!&J`բ_kWi$/Eޟ~vvQz>,,GL?= ^ )%Z{G޽}|UM?x~G7 VY(lBx<\y忘-@όǕ 0~Mev/nJ.}-"H[GG6|)cL6 tNҗIN4[nDS~ߌ_P:g&RqՁ P&e5m~1;&|l5淪bKf 1=v˟]>}{e(- aOwfՖ[r9d+x?~M b?<R!{qt`^K>qQ D DKVZ{d[\L#wt>}>EE4szX"s+d#2Į9j~Õ=4"D ";'NMIa/w2k@DIޙǹǰ\5%86p|(F'Л5jƬ, MoPP91lZ!lQbDy(F!!CјbD0.MC!,=LMTݝ 4>J&=@3y~rrbCvgaÆP7+kƒ zxJڡ"=(8n aحABiw40 Nل vw!a3Xla8N j{_խ]N7@[ l`f-( j}Iݦ!T98 iJea 4\*sCW☵ Wyy'gnE^PHz"eD QÑ%Xꕠp1&\ g.'stP="ުp<}B*甇A4R9\ nlJ -$U=WEX*yzҤ \Zm`Q-6wVVfFS@ȥ $)Z&6sF$'ؙUڑ >=E?|Ɨrzw{qW}{կ{u7V6++}𳕹#4My fIgk+bHV",1TD7IT5~$` $Ā23NlE-X4˄DСHW\1;Z#ǟMuuW'h?AJn"γ :d&b~4Np4q]wv#Y6(#aơlՏIuDJU;shuG@/T7FumbY:ϙ]Tń6Aj7N,E;+3f)rhH֥zkZv7*a[D\uw`Ǐ 5TEƍwi#aT,X} 8JthR$K6G0`}RKGHDATPvxZz;#,M^--E{:ESZwu1ŜepbuS/U+J{k=(`Q7xXT "4ʨ ;m3tl)U֑x1W'C$J{T]h_]s! ߓgڡQ I0ܕ.#1*Nlκj5֜)M&ݡ53ϯ-dvR7=e܆'_ϞobCf>aRhU彧`'sJOBE+tU*|&{ D`#a?MNlVPf7*{ùN$.#:K:Ԃ /k |v x,"Ͼ6s;^~TI <" )Y$r""2^v !v>X: +=.w3Z4g|Ѓ6(a; qbr% $lx95(H$*Pp`oS4&HaRpuٯi Df~(;xG㳖ԗ<1P@P .k;;19E<\]C8pҍ)n]rG lHA }NߔtPlbYdvI}{=]QW8H9OlZH#Jk:iG+ڞ/Yy"LX݄U]E ^F,gKQ?C^@X!w, yL:J@痢9^1Zwig  d-O7OQn/뵃ڒp?n,+ƿ]hm dqPj"1_EDg^n OUU`n/_Ƥ%>.c;4kwޅ7gښslIenI"QpccNcuOn 4vLHaL!wpCڠQ7>qiFesusіyyWrO_д8R ;fƌjiƏPlk GXyF&Y׭ fa4dfMnp)rNOBMYW~:[4ioƘdAtl$k֌.7MXT7mkekP{B8޲$7s_QKF6cD[ ׺)Filq_ZrS5p36>Mec R7Ҏc뎨;M*sg0RDfx>F1scd9s}YaҭdN B~\bϥ^D1U掍AeJ*2e_]cܮ D!_ HJ@ (:Hֻ#H,xDko }"U`UY^VL6;M@B*ˤ, R// ^mv|X./nɲ c8í<:mi*0&]iT]  i,g7#{ Z(.Ѵ8$>SEۻSM&rr]amFZĩAYO98Y$=4h=ɇpkj61m3N;\;2cK 41ewׄ,9M2-ˑJ_~ֿ|K--jqJ+9Qm1l&\W/R y\ nr3Vy~e8I6EAb)K8KfR7㙰<;ǻ;bb22"1=KHkލbz7>-kp Ϳ^VJtCE<".՘og;YYHEts@*p2{Q~/E(~ˊ1AJJs e19s*Up$O6q{JZg(d ( {(ccPRaCr{aȩWAJ&̑ss,Br^O2 y,#e5YGb`kwG` ^y(;u\6,ν5@}%?>={o}|{>ҨVϗÝKoj lh6~pF {FO;W쯓2;70VӷGĸoP^ɳ}K8VT a8`"lMf˙'Ɛ#R'HXi-(h비,h9CRލډ316"@B:!A^pd]|Ġꗳmhw2t$PfiΘ!䠻DY:K1A`~C'E 9BE rZc.Y8ٝGu >*_]d.>OYvxB<`k :Z#p>d}Qѳ]AN#XNb)$fA ^.kV$eXuLSTlp$1Ԫ,d0,N2rF "^Ԗryc̳D5^%Y ,5p}):`c'!( }W\+π!8>elkLT6X$Ԯ0b6Xl[>p-Zp(W%ێ6ca0u x9(S^`ь7M:7Ұ7X4^# )jzv&:5޴Rۛ6(ۗsF>n%+!hOueaXÍhO`['`4`uqۍxuY17%9'LHŠG['a&! zJlgZۧ׬_ԪAfIe&SO$|~d/l|/f68g6^ҲM+;ilg7xlycQsZ蠝FQ`(t((&kSzDv>ɷ,Nvq{Z_ H՝Hk-5z%ٍZb.Vdg=#rb&j!nfa5Y,C 2r˭9iZj kh,jօJ JZmbFּ6lmJR7A="k$F7\#q'!\Y{] R-mR-DtIgyswӊW.)C30B rZr˱˚FWZcyW_A@;) Gź$An8Jy$8Ff Gd( 97 7 ,ڠ7*Q+R$؁p̤LsR;GdM;d~ۭp~7_juKYRP.:;v.(ߔK Q- sYirGJa͛q82}$8z8a\eVw<4%03I3F9OStHDȵRŖvfT`X>gV1g4dgEJmhאy'0|1a[@n.ieQaW~jQ(%жO]_A xTT9>Ai7w!$VASB4c!5Фs.؏DӄS/ .be~c&P}(NifE{Y}eo>ϓF&-RS?{{i 1KܶJnE.2L1cfmPtM,sE_IM+XD5۲bPV& }wsE_l0SsT.A+8Vdctlj49aweIjffmCȳE7EIJUEYE`-"#tSeZn_}#ۃR.VA1ip~|y6C13tN,}8H rfW6F Ya-.|z b:|H+eOn^r`z8YOڝ]r؈ϿV=5aQCX#/YWFI]UB!x0muUh$8Tں*8eѵ/[G,[xmܘW)B7e% ( a "h}r`wdy߼wN0Jva@X_:7.76Mn}'n%g$I 5o^݊:.Y3eV xãв)(Vc츔Yu{]H왌qV>-l\x1-l`e"Qrd5쥮?%B#e܍ϗ;]/_TXoC9wZYƲ-ȩ\m2^?g {x0i>YMPl#%RJ.E̤NA:ݫh>˚hża E͋g3$He$%BT@'sduǼKIEC M &8cGFi+_4c^P$7ܕS%W1} A!-۲J12FyKڴ%n5 - \/X%jeeSKb{h袔 &?;7:H[vVnm5x5fQv?cӭh?K%\[|un fv Zvx?2C-w'4mHdj%01lۦ%֘wMfFWD 6FP|/[c4Ba'ݗ&NxϷ?}{{7?:_t7Mn),wעU)AbBJS@ǞP\\f|$>NHTO@Fj51x^=t9C{ \ @9"mr[Z&?aT9 /OrH#ȩҀR%)Cd Ued8ξlj(*-N;2S.vq@@*ũ;0Cin^"E0X H47WWiq dY%Hûv&wS.Aͧ:V3`5$< IO?,7;k؝DT)ኧD3Jo` eAck<Ǡ pRs)0^y"* pLY6Ⱥ:DR^cGb\MQd%* #Z8z*z` SIPMKjpE) h IïBlm)DR3l0,/k 5k報O'wgBQtB)Y~YÅF'7]ee$3jnlq M<)#of`,Mu ퟤ:lWYUVX d3j#0P( ]b ex^iAU,9/z9?f!0hazZѵZEqbLPv @Ra-(uϦ- a O{M#W,MHf6L7.duP@\lvfa7ig dg {yobC^{āI-j?HiM 馷/zwȺbFoX;/yUNKa&;\/S>+2pdj%ずĦBkB9FklegE#FDZRR ܆fĀ}@@C2B^$%jy1tJ,: MͫK0X'pũ+dѠ'cܖ1^'x*K'%,Ʈ1*w@Ad<=g0szCs'lVeZT9"8 1fT BVo3]6y ż:z#(5R28PCooB) ^g7+"%|+E[xez;MzvXcD/;gi%E(<MiQĘ첓8ޑ#䁔ig+!D0̆K`8 tLIȑm_QX+!|Gr,(%R 9MRYİq }]Ή{y+RwU]"M% ͵s`<Q<1]~D{]&9mP q L')-@zEx#ƨ{74h$L"i0$D]KH7WNqxS&*+ް_@RڐEUm~uy/%Utڳ}{g= IV mz*I/daY_0eZ f2NLW-\6?,[eY롟kO.c2=O9 2%|2w K&wۻ'74o`V<کx̭[\=dLvNb,m{O!vvS-rDFi"I)#RATh'SI*HP]6@6ђoTj"z۽/ӆd1gvqϚpcDņXr[v`A Ң}쿆hAjt 7Begx5S& tX/Q^ѠrG;^M9Sfٛ!̼ϯklSu杮McbQx&wa+c&kWp~ mpIk?mM5Aė,NL父Z״bChg蝦ZD-EFة݈oJ.`,v%%N0 mΟ4m۟=vm/NHKn4O@|5u0XϗfI d*)QD[:DPAf^x4 /Ξe:PHKSj( u bV@K]ZGOJK\y,vZ*^P" ^! vh.;^ՐJ_:QD3|S9x—":8&0',x^ֈYldLP!)VY,+/)FA~+1?9s9dfj[j)$!E7yN@*͝*hW(D 8 d5C(X6 Y{j@4!]՗Y"4h` RF]T#)qp"8%2R50ç>(FI3qf#ΑN \D1(vd7p-! yj1)jwIEJP+CcC'[lO?֬(%}g=|!] XJq ޓ1)䳝W0zzFu ~÷KJVB0'|ea/"|G7_n O~ ~ J+rX[sSOK.QI`df'͋KNqB/+xL4)d- 9t4IEF֨DQLvջdadl9>lk:W1!Pbj\8W&UsUSy,F{q M.^D4| bdxU%2g<}QǺ/ϟ_![Iݲg}K]e.o=ܾ 9C[<~,.n/3V3a+JA?Oɗ5@F A4mogwŪ=aRBx*4`(K +H~.b˾ݘ:LS:`P3;o:ivӽMTae#:2kl9mV>DU-_= ZćM BEƭGiE=Uq|oUhR{φreX%m>BKp6 Ba)QAg"'BQuis!YpôOws 1 q?HQPF95Bue(%R`RYfDWO"nYܙw_hh߭^.A.wwN`J\o=}2n&eHŢjVBG ;/9 ds k!-DkHW3I0IB5Lbu3 ߭},j`_;pYxG!/pQ[)<0|qARKʼ(s(!x]EOBr2C+2e=aݙ(9wDbl bwۻ͋d;䉉+)8%lwL!ƞaz[0ӏ&D|? nI+0{@8}2Y/_F\`߆ftO OSJdԯwҷ9։\:mEFC]=|9[/!&IB j'e۟xGTfTbMcp-+Fshù.=cZtR }X:Lv2e!/]jj#Utj NDphYv[$ks* +p=Zw^3̈hwOuѡET{Nh1s,h20WDPB2)aN 0*43-6MgZηJ/y9}L c6dH}e)I{|,KZMz[|}V(VmBOnS;WXY:zzK'W#nJkFz֣tҙlt,;PVqͳtp?Z姍^Jd] 8_ksgּ}FB[Y8t ~kl6$o;v>C.td2GL锰Cb&] .㙚QL)|lhb->g)(uv[aIBfU}-Awxs 5@fMf4둙PhT1 u:#_0Hae cVjo AIhjS@F#s:-*_ *A$jQNl[+]ma:/zĬdsT0^ FG.] hGw^T@rsٻteTdZ3C؛Ǚ5B̂ TF7ۚж8.j` e.=9Bcd#2(20Xi:am-xg76RRl tؐﮒЛWKXjg; ӯ~f6lE,Ї?O>(=0[4,o1Ԍ_DDp4r> }t6_~4N FşBY)gdfΒssOַtX O"Ts)|3sw9G4Jbq]c*hyyx`][܄T?Vr|V$2ۀYqXt5`%=5> 3T]:'4("$mKF%ΚT3F{Hch^DQs`*-H!&TF7R3 .ҼAB#G""ook3Z[PQG&wxAB\ *Gr]<ȕ A!/exy^^Z05Z݃6nvl~FKsn?9$e]&zғRKaoe~)z9QUD.u?m~,fvT h_-]̄G\*^xkj(V;1E v_/ڵ:)R]5d}iFhi \ ۷9/얁M藿wϲ3p'D+6Xi*&>;/wky b-"$ qQـ6ӳ2J7v@(z0P,0I,,U5w*QWUW2.vCx]F20tG$t.|X|b*fSYaޮјQ/zKڎ}7aQ9\r-6c#{#S9@|k]z|DYw}/&No?MxOyWcݢ B޿??qaqs| k= V؛8+r`p΢MxJ$L ƥǠݚbPucts:u_h]vYO>*2m/#^ӾJQ iTp7|,ڈ37vm`q⨑}®O5_)vrCXqƨ>'w) ( pjВ-)hRS]&K\}i* |ߢ'l(>Nh. S'x ) }pXIPT^0B'qa%Aӻ4}yk'T44 }pPIД^8) iʁ]F7QZ0$k7 5$"^pi &4+ѲԚ΂گ}c] ~;G7oA(}i_tpɑYE^-Bl9IE1ڥػ{t-c@/Γdbr;nE;=-Athl,_4W5?JFȣ5˪7 @m`xWkH?=}a͔aP3yk:!CM{#A is1=fzIrCXHƟ|yJ3{s[zbb-LCנּs(7hgJϗm򜜓\hŤo-VؾE 2Y 2̿q<y墕 2E>it4qp또f?)!Kyut,],-W@wSh$W%gC/$5C-i@ڄ* *WkSr]9I] }kU@ߖ0qe|oNF(WP~P:+l?"[m~ۯ.A@!e*@z3)g.\Ih +ߐH˂D?W(?y/]9 ^y(`DDrjAVL3N *:@K- PJAůߗghQQŝ-K.> X3)T2]&E* Mגv-3ࣻ*IШ5IPL^bJ4I2$y ̢x(GU\孱 뛔'jfR|& T 3N@JA!):BBkeӅBD파+t"(X0 a%(ϢW )T$  (:Q#"4h1Y딕 H28'sz$\HgQk F]ۢjoi9~ RvȎ},͊c;ZKYLNL5VR ED,ZN;Qm j rStϽBȂNYd|X#-S7ab&kC>܌l?]& rao_=@9?|^BhW_W??+0/94Ծ{ylw =; md:[Rm~JπO;#;}3of O17X9o]26H!]X(Bn)X3$5F+בqrdW%bEj, A` U0gU" 6fWñbQJ)a`dJ=KT% @)R5e J&}b>R >muI xLANEpydM,?[Qi:b4rD$ JWFo g^f`.LA]:rk" ɀ,// yAD@ΦUk:r3Hue*P2@Laj`g6( .J#ڋS!!5qDb]VywB'uM!7QW螜u(YpXTblPY$q^FN猣DE#L޿yhy 's2{Yq*j n,)pJa%񣝽<{N^/rË:+X{x.E"=ZilZZ֛ޤ&E7)&uT%ECbяI% *: Z1(G+pS F'.>[1mw!c$D奐9|r4PQG4:&3'< 'q!bJ&(1D!{2|{!(Ɖ9&VE;F #h#XN\Gxzd_GPrs {m* m :o èlsmsͤYj@r9&j.('!%k'k5Cgo}E'5(r$F' ?hWw("ArOQ;*Edh7\*Ԑ <,nKì;o2l Vp$ rz,r2XV؄a!d`m!E- \.Gh5CNR%ڡ?-dzzv G2Rt|PdZ;z&KA)_ [M!zd1.J C ,gw%A] !QD$Sڱ@-;FI[,#v2 ΩAFM+Uj]u_'kL4@Bz8A2"F<֎xj VHw|4t'$$K|Oޯ6oj:`?[ԹMx^yۧj\ }d /wK"^8Že?7Rz5hT5lR -8fc@wZ`?Կ(5wkU ލ&\G)wV}5ٶU1bhI 5ncka{qu2 ptG g5fBgh:|7GoŻUz`oW;Q+6dl%Yѝzsa)oˣ5}uN t-Bc܆xvO=tNBVe KVڕhwN-ڭ\$'wL-À]!.^Qf<9>xW-hgɣǤCWo8A(\itͷU&# & +P[ m|wv6]޼z'Uq wmUIup~v`jp]%t5Uql7"wFDk`'띉Hٔ Ikd7(<A(JBfDni{4}ai_UD0]j| >hOSJQ!aUO F-A;*hE <[!@L;ir[40c>qoŢ PI?lfa,xjZT2hdn1’6{YS^: mO͇4 w5-{hy"]m&+Rɚ&^ҝy^epMt:|3m*XifL.EmH3 ]\ʔC4~Ρegٶ,@_AܼC헓=E^PoOhq2k֍~ Dux#5%]q>sB;o9hϠ:pC½Κ\lm=۴OAj!cm-Uְ')0~EE-Lhˊ9Z60T{&w`{QVJ,uGr>'jѦ颤.+6MlJ՟ 4Qi B͵ uGn"Fw{K@Sp%v[<eާNЛ^#L'W"Җs<raFcʎB%N 1(-kABKDc=I8k$}9i}t(e#Pd_HuКRꃌfTGS.$%Z_W?wv'Ձd7ugL>ۗ\zu/Rpqhܧꂯ%9JG@WJz` ML=YY9'|ҷ~)kJ}z,if3ˇʿC<d0gX|StebBͶRy:3uf2s.W@}tS$OD JNU!(mt0w,]%oz^ωpA\M&7ޕ6nd"`+@?uw w&Ar'AV[n˱夃 }(L-*"mزi:ux?}"hB{iA(9YܸC-n&3 Ff` gs2/*"gf{jrjh^g/v~Ytg:nb ñ0$%ejN^9hZśU!)ww{ ra +fIYLǜcX2Ee7_W,Lr¤w+JvI,* m]6PҔR(@[?Th @BZ!$zX7~{8Mбf NSp>5tp^T&Ɩ9tcjgi.ok@ ^(ݖp3z 9})nb*X~鴎=j!K[Op⩇zl\d3-^! XuS~s5&:^'s.TJa&e;T[g P১vVsXbsdAΆjֵ0!LH 8m@Q~ViE<i캀{3VG/K֢5L Kw_ߪ;ytxi.88| 842F!(),|)qD0Rتrp! a Oc<]oA Ǝ!ZC bR􍨡ͮ) Ee08ز5/6 Wa0 QS‡؍nwN`K.B1ʄT*|]4sfMm^UB`(%I/N[bK"pE R0d%ZCq>77_8Bf˳WӥĴu倁ׂADE=X09ڃ%Ng$Rz Jn{BB3LKpsS1y2Z$0g;hlTQ%m8/9]D8#:ƒs{e=R;<ާؼ,[&=w+Lӟ_6EVU{u[\4䕫hNRdZ7X[.):iW-4Ѻ5!\EstJ@4 7.۫5BsJ~˵&.3גnoC=kM(t2k 6r NecaYt:}>NՅ\Ӗo.S3}zeuRpA۩ H8n5ʺ)M5d6_a;`LW*+JA=>mȔr΅쎆zy=IoQ޷M¾9 x]6>91C]ΉIuIs;h%|wZ&1RABm;$qN``I{\%,B}M_ &{+XgXQc~d}+*VP[[A m(7Et*!npgĕc»=f,h0t/{.gmbR͒37~ }iH$* pbZnx=A^`!I/O_ B0=GgXERt+r[P YE# Ҙa9.cuC.w%X'<=G[ @o"qP$+%ݹnʽ5Fk&]#QG` Dr !,ޥD/%8ǤSEņ )YZp12v[24^=SdT9s 0#Rh<ݹ7 ˗._7#~|='O/,;$feh/.ΡsAs$:Y^^| ~7CC{PQpA!D-2 M]*(xܽdhS*$&% X& xwaR:^v 霤In5 b@2z$s!0+LOPLǑ8APXAQNrL%<ՓWzah{x Ki+.3ə@!0h@B:z{[kr@q2|D Էz5cLm.Ctw,#A@,{ːaTA! fZP+ee`MZs,p|+cjϦ##܄.b*@t%hjrn3V2ZT;aR]g}í~E뻫K0qNW2`)F^ q lPm1a"@bi0Ì1f9 p xaPD[h^U&f#I;DsFB96,?[ʘ"ĺ:j6]t0lHձ..$(y;'$ 6Z.+8·sD"y y۹.3T?ugI#%cdC MzRhH.A P'W֐i-m{!R5Ie QXsSMJV` ):ްɍ!4ItUZPNMLPl axֳ5KhFK!dB's(%U]E(s+4,~p؁3ҷWXRZKD([o %5FeWi{{VLbQMD8lLTZb 6IۀX` J/D>sفq^P?/t'}Ɂw% rf E@>*D,cTB¥҅RLʃc㋟4*H'lnn[A)+kv@jFpFyhglM+ < ^'Аe"0F@GdK 3Ph)G:8 :*}7p"-~~V̰/XC j ` ӃJR^߲Ϙb05èjZ0^"MHkH~R&Rә=q1W4okE|jM:WiKq3wu؋s3-haYR]!j;_LۖJO׳k&˻}=?ۧpeBͲ̡1scXZ!Ji1H:.k0` WpP\*)sq<f< H'eNf/5"J1('mpuXw(]قw .R.0erE㞛J9#$*̺hTCP"y-8~~: [')k)S`dfm" O6{6.Kʢ>~r-qv}), )3\,:e0FD9A-)3P{4#%5N/U(<2]XV3oOѹ`G{k@ WiN-cȔ@%ik 0EvI"~$ƍq9a|wT oڮ.Yf}szգ/$\KX! !)NsUFmRs,CbxxGg>k<JSaX$HuF9PhIt <@JARiY~ V*&r>DB.ΩO2~N1qqdٍ2ả@@֋2&0نo[1E<:TFJ}@n]hD!u(RD>4ѵA)u&t:5>tQ ,oLâbvj/vZ,&mvVDJ5kj%lcQT3`.v9 Y;1_oU .*^]^R=aW-V{)JknGuC 9Ad-Dvlu/ =53${.ͩi҃8-6O}ߩS,p8_DFK 2`F%|9ڏ{+ľw+4 Ŏ!Iw(%DH"@O=gn8`Ma6skZ]Q-D_d?tӠ%Wgcw>zL0ܚ#knc`oVz=Fl-3H*b"A9`.* R\Rv֔R 0F؅@9t!ʽA> G ,å֘+vZєL /ZYGrnB2"s(*Q}: 2yI1ʨ@(;hCvJNXJE\J(}` JrD ΰApsN# "#0` @},ޔ:s!!j!ZI A Q}d 9u`X9D88z2j(`^@X0.F0YRSPCa-?q(tg( x4APDŽT%-2m0c8~?ԆIi%D3f/..2&bJ'/䇏Fz p2׿_mx$U7rQ{|vu+bd3:d5m!޺&%LboG,+ hFXi%0kb|a{U0򅋯>WqKc69lA]3DhAUl5t @L\?q詎~ x.rvgӃs[:aTsD&euoA!ň1>RڦJQ,lI:pzЛ[k>;^1ae99.`'&vu2ry2*@0ZΫh8{%wGUqc`(S(i ыHV>j.+!21BՁ,DxI[Ȍ=Of- N!5AH?dNo.|g(ijiY\t09O6ޝnݲFr 1yw7;nU;'b ~:7L.KӀަp+p8S(gfrXy蝲>{XQ,x=Eר\FYv!p-)5oۺڙ[)9S6 pn呹֭ Nefq9DZmʦ_RJ08i(so5\df9r5rr7Yyg}e缡!˾%B o0SJ[R5VĂ,t2&$?_d\= ڬ2?,/+D:gp(BzXbӔ؟1o.<\NoL6l%$ =\~j<>ZǤ&- 6BE4j7{f"!hpH^< ʵb)C-Զb/`D;8f5( H}|ٔdBec g'rIm VrkJm &\q=e=;swU Y&)EKPA=8$)1RnoFANÇ˳1 X11t`pLuю-Jb$!!ɪ#BP3i2SZa[yK:({>:/؝Y/HTC˺l(֋oTc!os8X1cA'0`yt1̇B (p%Z5~\cȋo:"u{0V]nKz#K1 .FH#'"H.4E:א-F#ZR rTm֫oVΔiݺА?n)1nhG^)V׿:7;|\RJxjmߘ񆝉%0(9x5ahP;gо`\:1;pKl!<@ DZȓ+傭\b+H4M#cP9Nm>kZhL8GJW.J6jtj~BZ限.QS9~ xFjwĠP2vQ9O)sU#I%G=d\|r7Nu`+lN2ICTK # 0rX(-b>H܀-tfYRM//w["Z3w{\. ?41ofoִL9z2qY,<竅ip{<*`>ށ,SA_"Lvy{{cSy[7_ Vt- [ anDT|h0QmqUTS[pt-U|ß۝l6}L t9¯y *.xS]Ś/2oHF[+,,JI*nL`O|_iY-8ŋa "$d3[Eߧ>?#&o@jQspW)ϕxA˿ nVt4~qÄ|OWNn[Օ?=Pt "N6wAzLCY;[K?u8P;&]EcO>AT :ZZʖA:IM-!{8GHhZP ؑO|zPZLm\{NoE)2a+ze [t=6jSfx' qz*G):9^~?-7 }f)4rL<DxQElZ*)`OH򠅕[ !Ya4 Ϝ .Hw:*Ny\Sc- ) ,}q C#{H]z =LȅټZʾh>ymW$6UC}{oPhu_5=MN =FufnYY%ZeE)QTBh)OqjYsHr>SW6X&a"WE%b8rVk>9-E_O45et8r^Ѯ*JF-EFlt-=ꎝ{_9%fΠ ϕBm~qhiִd˱H"29PL@ꉡ1A;&=1q:8Wn%/&N"b8½9_}*$].v h%;fNac&yy '3Ԋ!E<؃ɨ醌TBEq:.lŀIu;N|t<33,GI3A'EfZHP}r <y>Uo39=9/a ݶ *f/h{njƤ7t7 mrt7F 9%=UE(1G!z֣֛,h^=^S gkN~fmSP nncB<'krM9Ę}_\^?=9[_fzO0*)}3zC8[0p_JW]]mj'*Cy)SmNU [Q+ߨd(r+:0d`R2LLb5q4 +BE)b2,\təc{&/(;gs U[-IDs) 9hS=NeqN nϮSn|CHM?X: !y1G sjoW !:Єq{D*.k9Cݯ:Xxn:6M_)"4kz*[|uA{~oI z~YE~/Bilq#0G·h8N!b@_bizS !s9@cqc7O}צQq1[ͩMPjNA9Yd{Br=Fh%|,hwz⮡sd3eQ"_0|j?)»ㅜ"X4pk~\M;5/3΄+u*XR_Rd=tK'GT N^oNǦNX>=1>}*pUIAaAa˧gq)&p |=|xlg>Os{rX2a$8G5F[U<ۢȰ4D %)HM$qD)ӂE5gOV`A H!"fƨǮI^ в@ 1UQ3Uu< FYtN1+S ۍgkm+7/r&'@>,E_Z|&n%'-ߡdGdI#~H:f8gDH\b9d5e>9Lb{L|-Q4 O)s:qr8eLl(4y#9哜rjVZz#ר" {mKm :o #R%&rq:p׬M~ W4;B m V6;CfGl!S ЗH(FB5; < \j;v=sv9+@NPMNU γCse[&ƐM @C` 3T{튣>11t~cC^I/K9{D5CKs+&͕*+:բjqB#s}e?*ErE?׭r72ci&Rk7C]aɣB8J5X8>77oo}&jQy9ǼhQ=njIF@x7c(zxx=a|wNC^D3k(}LT[AŒs}dsZ6(e1NPt$!/\DdJѝc}zڭѩvۢ<P__+n]H *vKd=l@?+%1&X4\!o$D+tJ{9j L)kӢDъ1+xF0_[@.$p@4j I^ !Q x #"D:xl! XNpcg}_XH}a7f*sG5A~;U9p`Za@4”`- BZ&Z 4iDjJ;9Lf[A]Q;Fw{~i#vڷ[=Rӷ y"Z%Sj,j7.hZ JDv6m{VjTiꉖj.$䅋hLvvC~'lJ 61Zn[}͂Ľnչ[EJyzA編j1(1hmH i}kꉖj.$䅋2%We] pF5w]k(j8ouE"pM銦s$_ {i?ή;} <7AakН=Yw-YݓQ yz/f}!x|"^NBV4ܶXq\džF/Оf%m=rcGHeN'IsY$SLBS,wj&]wɮU 4*tvgSO&OGd? KXvv z̨ktQFүn~krK# Ny7Lb3I[},3Xk֝/`WP~i w=9P9 -b#3ͨ/?a+5FΣs4͑1r}#b| \-'՜t7 P H})TuW\bxs~7W&tǝ-W .hG1Ci^٢)MsX~cBƕ1b1e!Exy^B33$/ږ(d’vQ{0(IC_ߴMhs` *qG N/f)a sxto6ya7}Z~S[sOAlCx͔q`me5Mm ~!N~^^O."L;FEVu-I%@ǀl4:`M6u:yb$mWJ7R%Baiʡ7ޞGo^]\qƣKeε|zsꋽyu~^ *ggUDu7v 1y|'o Bӡmi1NmE:mC$zHyfNڔ4LQBN-tjkx*%AqW 9"5yG0A2 ""-5OhXZt%vI^\Ji`$'+ UJ7n(!bw3-!|WlK 'gk GCvzk7lbӆ">#MBQT #"­26<@:0\`dӚco1҅}9o&=>`Y>bU5~B~b2΃Ϋcp6i Vp8"^0mVX:鳱0BIw2@>ꩆ)V2`dL'jN6JXԺ4j WͲD DmxI5Z#9zCsȵ$[bɑ뇙pJyB57:@3D#RzǗIDGrT '\-AN[;.Q,%2 qc>?\N}ng&t7$p߳_>fSOx8r+ab/=fT$ ~tv3Ϸg}&BLyśSfu`Jdz˳53Hvϲ硷O¥:>Ϸ],%mh劰mԕs| -Uя ,0{ͨ0_Wy-;H#-BQ .Oh*N[k% YTnl>ϟ˽">0K2"iږ a{oC>e 95jw*$d嚂4P d;HRF轍-X%MFX#Z}NBhaz@"\LbFw+[DhRQe"&$靋F+U9\9¢h3%lrf7™Q$=pȩG2cH~,1c-VY2dVٟFWQO's~PJJ9X8rGͧ-܊tފ1^CAfꃢk7ws 6**<gbhy,*&2=eB aZ98IA//&-I,%;R]cUvĝ7bs\*ͬWZ1LRe׋ͬމH:[1xFk5гpPh#hC˱T5M6Yl> in6B`..|q5< ||VjNwy}\~چfA4!tWtOAFZLTr/E(Q1iɉ#Bab#>)cӺU[ɠm68韬:jd=Qm=2k Z11XS48X2P9;4Ɛaȁ8{)ZIzqolQ}/_UbMoLe[UԖ+F^>'W9Eo:b<줝?iv/rpqMQ>Ъ;xӼmO5wo4LK&#Lk "p˜:q:m5y4~=|:: Ճo.)]b)-Fd6kՌҔI!5s4߷;N}}"FH z- 20hksZ;c(ځLwF]M0[aP9-|kGbD$Ezb]\%H`1! OJkatF*4 #6 Ah, -kp(%O sdYz RO-mzU:~zVSnL? ;㰓z hڵ1(.y&hsf6x."„X!C&l,ݿ¿ʥv"h[=HzWp(':zZ>WHRG5M$0f]HZaZPɅ`@˼+HxzxAr%gn< ӝ LE.I}"5s^E >.HtQׁUN[r 4~= )ޝ}p~C!P>#p|'4,Z .+LB4Ph1$ו;޿UuU*TU;H5s8RCS ơ^ 3yc O: Xi:ZE[fJ`YkGm06Q'bUrXK@ՊcQ3MmsOTTBYۑXi=@x{mXv^\'XV }# Ÿ'AKMɩ9)r' W5q&DBD'~i'+L3}!jx< ݦ)tjH'+W+!s衚[!i|$T#]\viv O5VчJ"3L 04\D:҅ pQ&|X6H-2ɓ%@vyA/82a w! JԓdvmOTeetA1qI>)bKiaJt2K$k)?PL,}yU ,Ū&u'%Z|1ɿG>òtO>OM@eԌS{Sy"FFO3=fB1|ω#ycty9J޶MMKQmfxUd"ɔcHKCch?_#Dž#x J<Z:Ny>5Cϓ鈭gs5٬IzҢ:'`úx_K|Q"oM4C*FGo~|wyj@ 0x.%Pl Ӓا^m #4Ez03VyƖp f0pE#Uy\DDHKNE $8k%hFD41$ٙ^ _I*Rˑ4CS$i")˘Жd2-)eV9|M 2m wE6 ΋9 î I|𹴉KÌ@`G5Y\<韍q$cA=܅w\ *NN.>}fTO@*a:;: ]4H9R+ ,c"dB)9"֒$E,g9I~j5 VI R"JKTʕ,"P¿:a$ b4E~w;FȂsLqD\EUSXbӄSm# K(ɨQ`D1VIeDˤ@U)O OˌY4'TXΰ@d<\S9,Yx)2Nhr9f9GN(``%0K.H@Gr̋8BfN@b `JsXY j.-%tg09!x4`0 -W;͇O]J#bMvnuz=>~|̗dONR߿!o^?`zī]~3ZЏ{h#@iŢEWc0Aw^!sYw !]T Vi;l2qYgǕ>ϻ|r@6Au1~'TN0yǐ!Dt3?s7S;''k>hywZЧ$ݍh/gql0DLa:dͫWtj%U&≒E4NiP5 qE9I3RDŁl17VXnqb)U 'rS#J'Z Uy&2 K#aKd74upMfwͻCK#]I Oo饋Jg/ 45,/us)ot=iKAyơ9#v ]Iu7[NI:CG r胜!ѕ<6)zf,5$ ;Imn'қ9ik^>̒"gbAx8䴻lgZ2l`39.Ѱ Wz$O%2bOs뵈r\&g5 ;(㺉oiLᡸͨ1EJtLI[SؓU*.=zE\,CڑЋ@7 p"g5V37*vbD`E0qxt1.lEKA/Pk͇@]b_{ݯlj#RhQ^ --RTOy P@uGP?;HSYg~i՘~O.XWZl? ?TmtSгA^:K.¢rf’_(9w"ײGO ;HΓ>х1=j} q!i幞 :䕪8^nWgNFJ+')QZbp?↓cpTtb7//ntܬ_}S-9.ެ\xXǃXr6.J+eVɂ[w>[._^ߦDUU߇9~wS4]`y _z&7_._l?^YG~mQ>[^e ~%Gίa₩YUA A-#wh^-+et)ÇclQJDjE֘Tf%y&sc4%3a$1>iY)pYxE9 P.mhDbLFOIp辨9i 7=;ܚ 0]ݘ|v׷݁^ܛެ2ɗ㽖c61qR?VXM^(bj M̴T;-6ZzO]bʯq2˾ yA}:0NAv*2#rLs#`bW*AcE@TRml4/mTba㱎])V I2jVi\Gn%AzU.tYCG(&QfZM/z.#ӳG9ڶNս)EyfR9CwcFW߹O:,F[x|w^V([N𰏧ȩ 6?]+$ I QMCT؉[Wv<(dp2, U؉IZv|ؒ!G);hD&c"ՋnP(Rg5#Qxɗ?B >.:ń}*qꨰ˱[s{}8jM;RD.{ޝKuEڽ!_oHng7T`Ɖh)ʤn_Jϑ\7\I8El,2=EdlsSHNx.T;S1?۝ >C+{#&(lœ5ʑu1{`R Xt[`缣 ~hJBsȝ'$.{ņ *y(B!3˜96C9HLd+nOqJ\ *h:H+D,LJ-2Y KIT*uFe Ĥ`0fPc"D(NƒT m8hQޑ$0U,1Đ\ Nt^ds*M@qeB!|{9X']P\qk~48cS#4'cȐs:a'w] O$2)0[RjV,mRJUZs4Ir 2MD4&tn) ӺUeNApC6ȈV/iT>rs|l}/g[ȝ=#"Z=8Z5qV1Ly*-nǧk_<=آI90qԪۿ= X><>խ1C5z0ꜥsE`~cfc*U+:׀GL&UR)TnH,(-Q)W2TLp*_0 +cjrs1(^ E:uXߍb(kN| s\s 9{"8&u/~a-H.&5vc}Xiei N66=Gc~[0+kmHE˞_C0l;KnzȖ$e[MV[(`X]?XXq_" g⑅D)jENADLjb)6_" B"U7j_g~{:˂rV"(vM=BS&ܫ9L'BD7$\z]wh\++tkgw?t$hN'^K!y{AM ĉOlwo]u$}(3gӛ4uTrw)_3HSp yJK# FH.휌kP^C(;irrER#-9$)VVr.#(,,1*I#^ xAm)$HK 'O0^'󻝍oNg |ŲJQ싪k҄ rĸGiB $h3iZDgmאWĦE{bknE /:l\ )<"z9x'DHi)۴nQBDE)$ݽ^4mUNr%/C> }!*x!rd+p"M%LRԵZb(8X]ѩw?^n(@'#n@Wm<^ Q ؤNP[棫Z18 )kUڸ:>|L !Q\2KDismsyGVcEr`)AZgT*sD%iDY3j8R]nZySǧۧcǥ槃@gg.s_ЙSߧ]ؑygY*#w\Y6b|rnBo΋ٯ;d=RʩY2.di*Je.ܺ.{Xf)ȧs*_ԞU3tpm(]kҐW1:>n&oY7N,A$ O\(>ǻ=>C۫Nܴ^ m(/NhS)NwK\onJx~a8ɨ*/͊2_ =š:z=Tx+f!{3'yֵz=J$+G$O= S{do<:w> ÉL8E;;YΙ$c}+Ye^ĝ ON " .矓d}`JT=_=;B`Ik|sC.m6U(~<3QAB>Ishq|NIE#B-}զRHW!*( S99#$S,ް)VrW]Ʌq߳$HP;f[A{ 8%& "ZE cUn MFSaodPaŴ,$$%UO৮<>9XF҇5{aZL}1筤K !ډڿW8 4fP$V4w!Bwa ܲZw}B'pnk̏ZZj_q4W%f0;R$У.rI1?}\DgNwmqhنҁ3}u,Ǵ<8шw3cƘ%^~c5 [/"4-D^w4(THv39O R}v4l2\ruWy\u;#;Kd F3lR)M2ΨV,hir*9OyJkY PC!7?oD{{7"Clϩ_VtOvJnl~Nc%ޑ[ >ެ4cX#*B7sER3gtow. ?Pj?F)^)㬰tԵiG Z1K4]J|,S!$FlVRF ,>RCYIXJ 8QUj!2'\to^hUn&ysRG&ᖣ_LJRƾ>EOQ*3ܑJZjQ6kHgFH[&bCV"29%a5N\Xa˄jd%B9*4&U X1wHPfXѡGy<+Q^9(5h@g c ZDb4=B)u8eVH͂_ԱzQ>~"H)rMƄM7ty 4BCr M83P,$%nFSk:IlV@sbOur&DmϞ!8 AƐܶ ѠrW= S.b)n jAneDtSE^@co6~/3{gpgU/! ܰ\vwgn>yvӲ,?L8 iѴ0;EV~u,s|D^2ҚjvʲY2%XuKk"ݲ[_s޲$f#4=FU^ UMIΞ֡Gbv5'{j2$s3&U\ UH4!$6LM­#\toYIU#+#*%4*BI;kPMgInrޝ9uOǖ_#FG& nVGsGI*29c ,HB5, r0$e(9#’.`IFY#eG$=~)bb&OQ ^) HEQPX~) p,k߂1fR+e严d+繑^nTx ,0zl :1q-,pV;QHz ҇#1\M )qwA>q08CTkNĈcM0Iզ@ p{@f$i\svp!R)+G`<-Q 9şf ׉˄ qk֧cƕ|8* rȻUBpI>H6o&3c-\p32JXR.҄J>Z,X|_ȵ oKJ(Gr(WbvGԪ$q_G0;NAaj{&%>mCI8sF<'\w]!IuJ>s \"p^6#t&_)+).I=R~IV S Qںg&-X'@TrI՞X 6!O̪v:Lp]jU6}E[(dS1MÆ[a24u98ƢT,72O 2𔥆%aby'H X0!;*HT[=po6pFgH)M"07GJlqi%Q(:EwD^s8]v[2>j*(]P<.VwCdjrA 'Dߏ,WV4jk`Xmd-}MR(V:mqz q*ݢ(C/WG_J%E16QxZ0^O ɣ"V}sG }4ũ>,Z>WqfҌyGVn)[$iGT(XXJcF5iqpW-<[[A-A#$cQ3.!+Ԃq˭QKu2.ݜ>K]Q s8'.f]澘\ևߧ6P3>O̲;~ #?o6YInifISu_7f ~9s?Ξ]qү]wt?U|8-(V߯Z=Eg,gJy &r-%IMU4JNs%Nv'`} }wt8H8@@'NU'qB+ vzqQqB'Ժ98"֟'qB+"O/N3>NxqG=} =~qͩl:X2I5 6,IcT$#*r. 2+\'X(!zhE![[ZZ݈\hS^R㝖JӤKÉeĈVp&%B'݈u&rwEκPJ NBhG/*ttḩ>(=drۻ N7CEWob ݜ3/Y8/ԅ‡hEs!'˻<*; D+X2 E ,L,9;pD PϣZx0IUks`)x}3(\|yCU04 ˣ7F`1X %-B8X:ȁ{Y`=f6{~XRvphKԇ衉'y%xL)t-S4r#QYf!Z ̂)1ް%G 1r??w ӏrk$, spN25bY+g!4 ]bz.\]kR7ħL9P`ZvŽJ(Z|A*&Bf{UfDC@ 9+/$ZpVir0w7C'U (Skr[XSTx--8V~﮶ԢJkBaz_ xێlp"-`3ާ 2,B(88Qhme'WnXET1-93- 1ȹ7rdcavx!4JAɀ!Z~`a̐Xp1<hdeT ,sW-'~?x{슓GNN}ZqtPߡR ?1Gϡ27<_>,RwZt{ς*t{O/P^~MDbMPf̥Zk-{G/@t~{@ jSd /O%ZFN{D2Uh _pdQ&/$*OӬVclq}wg ܺ EJ3Y<{)v:hLeoӡLbcCsan6Ii+c!&d(ƕ^7Ep{L4 8/gOO1-:Nmss#DxbF27EpFed]AEǹ%mV]D$-ঈh e4QH!Doh"xKJcL2$ JnG[=&os!4@D BxKX0.|leѿ~ǽh^opni+aoq՞+8YEGn=>`ݧp 88?H?~$Aloϓ/8rsqF b=QG#藟=lﮮ&m5Bo\-ek `lteg&s-2Gy.3AF+|N]ibޗ sP\ >[[_fHnT )l\ZB)F|$IQ.١U٘[HSi'fQj(G'C쵥 'ҔKF 9\ 9~NbIne`9bǩ8rj9jX@TKއ@ y)S 沌cଳ>2ZÔ ?k:P-@AA8u!PZU2d9rW-Gu:Vj]r$DǼ+#-Q[hͬ`\RPAQ\8@aӁ-æ-Tk ;y¬|l7U)JF ׬C*Gr"(cp4כ'79 TXRуc!zKJK')tM5UFy}zrSJ#_Mm(0.9SD-8a,PoJTP#,-WXSivE饠Oq \$B(wN+VVEPNSiO^M=FxEcLB\Jhg8i^*ɢ/qiG/:߄4e FZc-\7|G*2bf:#, h_4QBtrB9Apj!,Ɔ 2@Պ[n >$}CchJ:++< plwK#&)L")q0fGօ3X/`_츞$߽k,y?4iz%]rtS8Ktypt@yjS;t˭AO==Tp4(.%}l  Bl<Y`)F_WL͕YҒ=Iʕf^6q-)s :gn$J)Nٹ~[LQeg㖗rAhȎa԰FCx*k^ޡ8l0C/S[x8ny`CUp]l=ThEw#6Fjt@3XzԔI2ܾ͟R#;GE)`-rnvu}cyuDv`-ciq_,3ĤKY'0[~RZq5֐@B.SAy+!JP?'qH3b` m{snψo.wpMpK,V LD3oGO]z>r2]hs'lɉ~6I2+yaY`z:^Ūu8Ƈu.EMP6,NQew; V@չiwrˣgYh"bC1ݼ{x= ~hn_r2GRgf%a"E "k08 s 6q,m1}u^V\m.vQؓJ_i̿Ed.H#*M A5Y⇟4ai}=-rCNEb3'.\ kֹ L)@b(n"l%vl~cs~7&LA*nD[5LUv6}Qtj޷rSqjz_h:" ZFqq,78qˋI_bs1wIc6sww}w1Y]JЪ5ෳO4.~c,)Oz^l#Ƴ`l){e#hMاZ(uw. qV. $!o\Dɔo$#ңvkA4vgAV֮[:S!!o\DI .J?+5cc%p 'JW] qX2ӷW&鹻]|^9mvԪ7vW{jR[ .mdd=[`V]5oo/)\w7wJAdÿJD=4yH{HZ /KBjf闐RzST-xSM+3aD#&=h1A^X06D [F*U]q'&g"m\pn&=]8dv)zvhǤs[6u7-õ6V*n:U"* ڞt)g'- ;v넂@N1Qz;}{mzoZ9fAGpL!85tlLIL>Ey)epwUC3Wcվ]ˁ83W51 QZ qRFWVմI/`AAJRHve-;LΪZUVI;j9f L N2ZpvjHH Xɒ[#)j?_f?g3Q 5艌@xZSΕvH \b<_R|f_Z05z/ˌq\ cݦ-|{ ͜ĢVO`ɉW pI^Ǧ_tQjb1X&rC iPa @kT 1C`3 6>Ja; M\cW3kP]w>4F?i]|ƌ|aܚq FmqMQI"d8-|ǕkV~@3ěG}Ē|WK5:!mޕQb`?_u1fs%_Xh)#V[^@kW/ʲQE E -V G 29PI^Z7z!r SWsuV(C5אI5de,1Ł"V.hPo}=TnW ɠ8MnoL|ƃ!{c1D)Al .fYۇqDӲ귫y9UϛO:Pl-oNvTcWNIr,䦱/}:~GM>\HziN.͟(1) FE7E.06O77jƃ=iq|BnŽɖUY < osL1 lR4qsxooJ iᲇ \ׂX C@J60C("|d&rt 2Β}Hё&Z ߡXC&XڤJWJ'f\$&Hj \pqD !zMߚȣwr.H!rIIb, 5ϜeأtXfxsnToe鱍녢GЌpN\yuB1p(,I􍍣;oexYk OT@qc %>@K>Z@Gt]C1b+@fp?:/7E@oxƨ!#L*I\cDEW㚇ƎDo?jJ<-|G_.ayśO}x N;Gݥ؍a`U²3&V;`RAqQd ʶ֠Ya-\zp'.EԀ~_ד*mPM淌HKN5w؁K!b,NBrwݱ=݅[Fp[qkH',cֆs#6]?8720G P-˝ &vZ\% t5 g):zV4q`66Z靾:=؎mΎw_;=-8vKny0TkfQl#ܛuž /lV&jYp e4!V-uu yq/s6s^)Cft! R$ }B$=ֺ~\Nx}7У_#[ACʻI5BD#󽬋`ol.7Y}눜\߳:"T|pA͊>R&룉>ڒɷ>X.9 9#[ZF"ueڥ۞ji7?f}{?@31Vdjڨ>0wݼbBֲq4= qqlhv/.f#.*,.GO܍VcϣK,faEe3^ln/G\]e'ꂞuh1﮳,]~y~j̫Ci/8BzE$  ѯj+M8[-%S.mFnl֭ EJnkH%: JEFZkׄ5OMP\Iljj?ʩTL!;JvP\A5¦,Wo~d:N}X4Bn: k~r]k{`\t{ivz1}\̽ҷKMwdz/䯰B2adE}C ^H*܎Cҿgo*ZϛaP+b V7 o߻kp.jjGy<7 C~vuS>Gӕ`,BVi4rЕ[|rӈ'"+"r('_y]7t]= ]zD@Dڡe, -=IŃz;!|P:xz~flzK 08]7E|fvc?iy}ENg\Mϴ@ S^(o'@Z Jn=o06O#]k YJ^߄v@sTW^Ĉ'!7'+Q-m)Ie^3$uOܛS79Muqγ6%Z:C7D SG`7}(nd "d1jd6Vfs7%a|_Fl FS_}W*qxa!r6M{o|5D*=FKz'T.>].IYCBp)yR}[>5^ZX&| kjm/ 'f7k??$A ;3;Bt#7vN Vv?o]>y-#70K3?o?fῘM==P[pK_촠Ns{dFsӉ4~lK`G;` Ha{/`+V %^,PcPnQ mJe[](њ9n"v?甏PTJk5"\'_.7J9]C,~?iſd x<4oOzo'S1S1ۦty0;&瀁,`tR̴?KQ2pO;{t?:z񋛽8;/~/>xyVT|Y^^^8AnȺ$+#oKO c<4KҊtM] gRd#O\D!fccd&wh)z'äp;Ԏq&,8dY:RHLH"FOER^fL@/< 9:M yd̳ڑ͖464| &5vG\o_3Κ`o}S˫'f0]6~+h!5?7~Ժj~׳4wdKqOہ_|#rogWK|ssvF#-N{9̙z93K9?6g6k㫿pb-]nqM*HIn"0Ft#;Zh ^fZS(:-nJa1fe eX֪TCO)IYE>ki&d@?t Qg AHRVJmO@fi ikFi%m,$rDF4^Y]A:)q WPȶl9hn%E"@ŧRH*MQ61/iJQ5T[I&j[ ޣ3Ef&&hh0Dd"MT[=(ܖj-2Ysx\GkԌºĴ!Q `bYRt9"^f+j̠Yz&H%#L1-"nl+rxS-)>r;R,Eӌ0S%=:Bp9hI U(1T#pL٘-,@,HDE_]$)IFȬ m$WBWM9YM+4k %JLk Kfl:0N l5S+W9R2EiFZN+P*[H\NCժ:Q LSkG|@! ̃%}K,pmMޏM$ղG tDN &{{|K11KQ>pM H% M#`BoOb=U]ǝe"@%7h]IT W27rYQgrdWR&j|؂\6hw\z>gJN]zY"^-.Ǔ?cǓon>],_wvCWv c&)"d!ܲw~'wsh{ |prNN?mmiӂ>>^7q:}c.FW̱E\8Oӷ&ٻ޶r$W ; O۞bo`N߷(%٤xQrN"YEV}{bSXTeÌ;]|:[΂ ko , Z8,ymK!Rv6)/7yux=%-h=8ik $X)9F.y?E ;]k77(n>bf\\rކ͘5|b~@QsWLG Dz Nk-p \j8Ps̼ImR4 4iW O-yTKh/#$PÍw,72N}tT9r;G޾.R z+l9dd36F {|y/W:lcX|Q9Bɔ]Vv=ptto8~=XLS;$ޅi(2^nɨ:=t@T4}W-+!Og,[Ou* N ѩ!l>BrF4SVvJ*=bOO7_Ggf 23O!:jf{ޱQA㭖<1R| o*T  'E5j#qQ/hL+5,H4bjia+6W<Ggz yvg4!PtyAf*~e!Ԅs-C?d е.Ni Eb\1'D'bAY Gnm.YMx.;Ӛ8\1N닚sZ_~ %MG١sl9\a!p*! a.!1X&1gp3ui4qL\QJ8\t.8nbԡ7(N$PlB U-|u"3uet_"/+)_%Ut˗@O ϟεb4y( )9nzVȼ^%)ŧoV -̩ Tc gZJ:GJ. w,1Co/r3(XJv^/qjz;z7nqq$pA sJxgܷY}}jr}.EDy\anu#?.?a Zb_D]|,WDրXj TK aP QfFW֧bUO);=˿b /0WIxu}DBs`D>FV.)&e7jݽ%/8)tHvcqWL/nʓI6r*$ZqQYm$Vl"ȭ0SSM/pq׼i3b HG8Fqa^6ܺ_[Dds5_-۟Yڳscθz.gC(!&Bi噯̜ldBVyG}ngb=a~5t B84$F(Ȣ)%ßa( j?/A *,GPa $(4̧%hAB_:N"~1/Qf=+4Q879ݍdJZd@/}%Mr]İՄYbgKy0,GYZ¥ b+H7bէlPe å$+1z!Q(Dȫv&y~*ŘVTcn**Ѣ9!P [- ѽ6xb"YtOqc=nY=X@ŀkvu#jŭ*r=wLJ$)& H;%Cf: ]W!mH4jJ| 51*}f[:䟝D 7 d!)Eþj@~Ur=k[J"$<̜=~yޢ 99vzcҝf2 1fչQB |1[ޑz5wnA`-kAQ w!hB}O"xt׷҃ "$!nƓW3͢!1",6 m (csabʴEO`w:!<y@U[.E/o؈?NKN]_)‡h~P%ӡ~}*S={_EFlj~=Q9cpп9"}9 R o.U0W=mjU[+o\ܻUD^s8\^TH dϼe I }B20͏ý'BtܻPhζ;*uUؚn{,^t}ߙVvwAFR|,Z`<P-mz?SwPt۸^. &8! 1E`(Ʌ2Ǻ`h/˴ZRSi W`sBR6tP׊@>u#h;*f].w(u]9i*-Uj"^>Zu)o>.{]ٮ)> z ||/nyF,+on=~oK3]!;"jw+w$znG.&Nœ>]m{u3+#nN;jVy*YRSn}X+7mrPTP՞tΛt8iLw(smvtedlRF'h_=^3Qf,y(ZӡfjcMM1(4/PC:-$ʢ>U!3cx^W_q^TP9"#~r 9'yìQ8s.O0M3yBd$rAp@O&BhobO]:=!zT/l4+^uI?._9-MtIžŅ4>|;s(sL{{C>tm?%"M,sD @N1D.N%qA%RNe;2-36{<:;YlOiUh~g|K=bJ)ڡn)ɰ})msffhldB͓Ԡ}j.#!L\2IXI4Q-KR^=SW {xu}VW\/LniFG+٪K6DaS@pGh&&R\"LHK |VG[PFܔ8QZܴ53` h,dI{g]޳.yF.S`>-_C9^̝NZn&WXåIb+zňkR2p~Dۚǥ;׽IRuq+=-[ v>hwoQ& Y?$ Ih:+yŰtD:l/'=wkF'5g'ǟ["=#5(J]Q~]];[F02ͯ8 _/N2 i 1/Avp$|oYn% ai@uHD0+t9(.rPL(dAA`;7Âz8&tw©t#g˿ߟϷ?xޟVBtet*?O+GT@z'e"g@tu4*$!xb{rVO nLw*N*EhNV Th#$ VC1iDd@`>b! DAp$JAy>H8#A{*2Ix7_{ ^Ҥ89rɔ,SlJɄIYZZM%1R E 4HMtjW]jw*Z*U()Vw\q!W]u㎇6*%lf͙ RKB06_KЗN3 >:i Z!fFOS@wȢ)%ß$ABv;|([΅`tU+bhjӉI;T`ؔɗUK7b'KӀե*~_(GwN.GTN_⛎L\}8l+ gl=.b:Z͗Xф$Z^N*)bDS[$W~w~KJvA̡(0BJ]{Yδ@,Qfq>s\/)wewdD'mos6  i܂ 3z6<ôZH#ܖxi'g`Lj-mIx\cJ.:{n]] iq9=1ɖq/аy n¶{\WpF{N6$7FZlS 7RPp,. %6̉$r&x<.nⒻK s1Þj5%T Yx @3 RD(2`(<%Fq0'w9-NQ)y칸%dDH@"'k:u;GFg%N?8[5qkO ߥj 0QTqf\!'d R"KifIuQ$=A z! ۼ/APy@ln;QsčaZ4M(FH?K u ?OC}{{:rGm+'1!9o;@r]Vp)D`1mH¿nσ#L8Q#o"o"o"oL/K#ahn9R)cG #e;ơTK}rT@YR-cy] c-)y>E%g2^$B)F L>(@@[NIjLX ҒY)  4TY8$CerjuѤܲ Q?X:PxFqdHhP/'֋^.6-b1 D]'9D 5щ0csT h)6> OiEnTD_$9" nn>xfׯ"i_=߻yc a1BZpN~{u|U| 1<:)Ae_ގnEo6_>ϣctU!sϳ^-jGx~FLLvl+Ͽ5ȁVŭi'& `4]C_1jxfRYzji+Ocaq|1E=L{pG1-SiDz8$lCM(J?9xkThبpЧ5*yYh,E) _,DWn cn`22nHW^uˀS,&Iv&}V?~D`(hsٟ[oq) {5L-ןUpD5^ڭO*olXƂ?C|yv5O6pv;g39 aOKl`2鋾 yumYuzd"; HR"w܍QSe 3NYEחk CLLP(@  N#Y~ٱy]wWW>Xm4YUQ"Aid^p$ ڀj=`% ^iHD. [Ոt,orBjER Qx 15\R'8`Jk$Zs٤FgBKA@ \ DzFaa+a݅U&2@)%ScT D́=Yj7yS}Q(P!^ rf(POkX,۪W z\I0B:0Cq z 0aa:ۤP܏?T`n=`rb1؁bh vLSE0!į_VK|HpwB+Iж'2@F$z_WZ I挷(9E%>Ilw牚}#AbJ`Mgm`>eMĠʱلsYGnzI|x!Bgm`}Q.'0=GNdD'0At*14gYuHPӐ^[y)_)=W Pڋ=зcSV|GS^@9LE1Ir\1F[@M$ۚ2LqiGhh? ӂ?(daɣ/:gaiV\Tc!]sn}Z{GA@A4}i#Z(liJ4P뭢 s* Iƕuz:L2.[ń查ULŧSU v'PݵDpuAPЦ甑Is9WhsCǒ1pV)mͰF[$%PpP[&9iN2A+b0e:}T73Ђ4:h7! %-E)9"2d%lsJ!)AI Q=8BuNz$iEIŰjZKe59jƸ\ 9@X$w(-蚾v]( 9ϻ>enhl1ecw*bfivbclJ^Ǘ;7[|Ll% eYzM}#;WG(m7n \vkHB^f'ggՂi- up.Rw:Kt_i!v@&$䅋hL1@{k7^({qB햋AQGpqh{-XvkBB^-S 4ۖ*Elzw|UP?ez1䆄y5a*6+xָnj2xQã cH\;M *PcÆXP-32X^'sgQ?i7 < C%'`-3 c*TS5{$^A֤5GKqdET/.y u" uC|d${JVh $25;$TqWJS%Rou"CE(y:-6o_@Pxՠ@5/<_ 8;tL^ob^Z|stř.6BR9lK_n k1xR-w{\vt!>UtĔ5-eނ)l  KK<(dX )rBU 4Da/~tЄf~5pBbqfF]p2J1&B3+#QvܱK)YJlbзNt8Ճ [^?~uYMnj5URNjs;g Cj<57ObO%~!Rą5A# N$ 257W-]+&]B.@dfu_:|XmMQ7FDuSQ7nK+$ 5`\0IOx_ Vz(B`_~ |3j[5]Vʑtymc=̂®s8?hohRl  A% dr( \)OYU^C! Jq@YR;R,1+Ik CLLP(@  N#% 8i 3D˴ NK,hDC)1Xj JsFjA8h,F>ܺ^$  26 J5 ~yYAĔR Fc42rG_xMFHc煃+ 6H3xZ+ Sr}6+ڏ ! ?O>`bIyA)yHs"u!.o#!yAicr{BxS7w_&-Pa;dX9[W'n[x=hdKGۇT^}=XE>'38a#O%tκO1l9醹nk`ޣ' R*šdhqnV{zn2 faŕ U߲0 f7IP( ID tJ5dKa&-@Y 1-8SQ-E+kgʚ_a}(V/-;&v_f$%RKR}}E,Jd:gǻOq7M~߹3O-Qq8#h \R-_=IB ޙm+5u渂)!xb D1(;O/Q _fFLҲq]BKx&; zA_HfUmk"hSbZd@rDT6+QxE-4_4懻q>׻ dC>Q jʎs'".\F-LڕTKa+'ϭroC5iʉ R(w f#1 +12j܃?/gODw,+vk; ͳ@'Rk" dOl~2㸯 h2 "ާ]e\J`u>]drG4C6Bw^X3CƏ q5 ׵.St-Ǥ cLRNV70$N 4Z``[O &4c`_f0[׌6O[C4j!Ϩ/]k6q|;8.̼{P3VOT1# M)qZS@p;>?[=xd֭g)QʸQI8^x!'mi3D•Lˆ䍢c!X%d{ Sɮ!VO!lnq_ǘti-캌K3 ]d #FFnbVVzs+YYմSZr`H7uy6ٍ>M:vOlwuyRh7Id˸}J D4Ĥ}Bof8W4FHۗ_/WF_[8+ة svs&\O{? sݽ9lC_59eo䓔1DUݨ+BAl.@QQ?;Qz Szg^9us mi4wGtN,LqK9g`!ʘ49y09v Lf²3Q ]nC[oEE^z84 )淘x)Ǵᒘp'Ms2:"S 3-g~Nc!Dc瀾/}yq>Tj0_ c;vN3CyK+8DP퉜gU~=R<ׄ־s< a[kxQCoWZO6zxvbt7~ = GpeFr~y1_,2^(|g3uIO{L/bq ǮQ88 y n<uA)ڧspPTKe89;KQX{sL `|pLctfY6-e!00 >9IznLoCσm2;[)XVe(.ʔIuxQ~@ ~@ ~~mEQʸaDXUqQЊNJW.Xpǀ1.[*,;&;[A4Q\4Wُ;ُ;d^ X4֎FF)%8`P^C#Hέ2[ sՎ*sΟ-(δ9r}6Jѳ\\2NѬ?;۹,Uv~DII?̫yk(p!gbo^ r&y9`U2WtgSUI%ȫAP I7Twɥ]EHDu%g>*E~ aʼn Ny~4t_ӱײ1'01œA/f|SkW$b"5~{a5fG Os "w~5LLyxwsUn tsg&ӟwFs~}H:"S͒J巓0բcz̼w}q] Wn$B`&hgX(?H0fjn1LeqiK0B)rR#5%po@0/zm 8%B o(ȇ+TmAU9!=,RUL.BJ1$r"`PvA/Sp>^TBQ irѥR$^Hcp~4q KS)O"r#U)R4HM9\MGTMS:g\Z߉kRYY$1|tn% zў!R®0Ri TOHNʹ| F9Q3((|h̏.HT(>BJ0*!g&f^`l#1D$Eޤ [r( N^FdLkKMɝ]idea}f x?jTqXK7#1[c@YT=$;Dŭh ԅ෰ь$S_Gz)x\3z6A8Hҧdwmּ R$gǏ)_fq -d|]$7T!nc$XhmpIE\w8 ϣaz0~g;;lW/v0"7jJdrnbTTz 'ޫN=E |1oʑVa.99w>#ɃO"&bWOf|2r6 /b8.6 ԃ;UƮsYUJl~Fbsvb3 E qh]?pp.Y|o&))W6k9:a<>βV6ɏқ.rg'#"Ti"cݻ%aԯƺ0r5?c)}!'HtAR0~MʣpL#ѩi"_Tgjc46g%#'9E=rF}!Hc=6V7rֿ@ZzØqȘH&J9O:Ұ:nJ+%uW'Il, |4]"KB7qpK1#=-wa_ G~}ouLXV,^%Slzgc^l!!{f(Jpcמ8?R˟F1[=yGp(!uow`ֈ-G\#%67T {3~;<\䰮 xqլͽ+s+2ww/yx3ltdެ06Ͱ69 Lnypgn|]~1F{8m4p=gEV&aL5{RUwuMeYee^ebRQFu'cGQG5TkߋW/hռSF>nzufn"i>[Vg0oMq.GޒAWw菌s 0yg{U@w{b?iAb2=DOn6MnALwqYp3T!@ 8LR6ckao7WdV{Q T* `6OR|d3sy?(*zgUGv\?P1s*p4s *߼?=?L[4n*Ӗ(8\Yپԑ6u84qsڏWo&&=gcgJ lДʱǔ>s``u,h#,E. H xS.X ;]b3 2r*dN.X2/׷dKƀ6LѦYyS~W;G[`]=W)TOh?$ a J^%=3vm:M[qI [?x*'7*7Ń`D}dTr;7DI⇓Y"q n4Y-c${ wXb#(bl=ˬZhC[ͺVY.ڙA  D\$)4' SE2\q!@1B`%pu&,H\h8N{,J&N*y)1 {M5LR9_u%l3DŚYnD4zF X4Zl( 0tliL't+nYH3IȱK|>\nv%H@5\\jдm0xX( ,Uj*FD`4=hw;SO޴&_߄skLdɶMA/sjo찂Q\koМL8piDF·a054odث? >bo U4kHM'0X<*.&1Wt( }5eW$+Ycy+K_Y߽yUPUb C;;CPAJJvG{-D+RKM_~ P E4Aï0ߠb,2@7@{m9 } Kmp+')!ԁ1f< 2 PFkݧ:B2^o R7b7⯤&Tmnǭ_G6ܵ޲I d !G* c~'2yL[ß6Sm- S>݄CbX!Y0w'#t:(v~]WmpH+؍"Y[P2.`5i'<|8V3?LbZs RvK5vU6PY~Q0 Q}hJz>KKAoV]!HPӜ \1؍mUXu!4mH`o>~Z:xmT}G2]6K] ֌$!7ΩaByPZ[-:'֕nY6 S 2Ԭ2$Ĝ5JL9a3^юxguDo|ݦq<=7N=Vܳ`J٦(;R !{ 3.#@6 E؇(a1,yD:Zr LD.!+/51\~2i ԗ,~!cr6{TCC(GјV`VNR5_x0!#9(09ѫŽ⧠XIb >STtƱaQG!..SHvp+a02G$A'?R!\e:+86ʥL2`2GK\oZV9%cwom\A%Q0I00FC|N5cjioX9 %WF0W+y{nZh9}ʒOZاUcv9ĢP{(o$svɪn 5wϼ֙>]Žt'^k^z}4,xVvj87hlP3IyI/ l`2l4HIגg0dBϓ<@0Az/7:{ đC|Jo_] zol6zŒ6r5j_*#dV-l7~LF5fʽB!f A#E(No_jھ$Hb3(B0wF" p`jJ*gFKuEKcM`8h=}7^]ςHEwQ܃-W2!>2_ hIqhfrzr N(4ݛ$bX Fh" H(dLJPe;Ci %s_Q<V@ +< D qW|Q5fkrX>y '1G׆#BWbgn>>Zq@'";kAvһ/7"mʚH ѵÕKY pFoHJ bFQwo>_ %qJ2׮D߯J$@sW"g@>+"5\Eg?Džplr|o]_{#%񃋵ԳJI5\FpcS$„ ?@S/$>EU& PUņ@WpՄ 4yr>vǪpuRbZ;RKqPwm2RLh=-l4~zz"_M>1^}dc8 "]F T4PI <QP}$JJI#ϩCޑu'R!N0.K=?P>_߸:&RNxQ4z*E:j4Si04KᘙlDO5Tnsޫe)`߽׷ x|W-\&kLTA6ݙ8D-*{S}W#ZMĿQ)#KN>&K+ƆWCr2&bP  ب1he863Ut]MCГh`枒 - }J\LHӞPF(!9+ "’E1C(zSiO4pdHTVʓqLG6@RG7#$d!}nҥ|MAk5;)z??}Z8<ܳ06*х: C:_oHf,a9ڤ)wv*Yi01fkP?Ý෷u'V'CD(0$}dKFLJ! غ_33J6Z ))J+4-aԝ828ZO:xc{r5)kJ*vly4 @{l79'*`J܍dĵ% [ӓ_:*PA}'"|X #Nf{vTz<B,(41Da, @88qcBجσA " $z-ML&h'ѕ~H3E < D$@iA#(.E %q %sַ_&I 釡Əݷgzq_1t$`/.i{nWTَ+vU9r ˮI(oZOKSKk6Ovm9u?[՛c(i7vAos9㤺 si:Ov~]#NDS/:p[tQ8;t d<ԯґ@LʃF(|[:ҮgBJ}͹$xm`L%i1!XK~&4PH0k˰X$so_Q/(гDm̻(1se@cͨ^rvjԴJp)X v~r74#V5gū]>8E\dP'_@(1mmO7vq%xmC0siғPVLu-njG3a9nsvg„Ap"-_Z޾=,QU}`9L'/% g3GV/%:NYtKɫMNJ)jmMZQE$%blLD#@Νn-?&E':F[iR rJ!ºqTrWeJͩWuul=&4^aM-)8oӀeWWY:ݛbM?1RJTQL+꣜E">RʩzӪpE70_FQn!Bҫm|뗱(ePۡPuCJ]L&嶷m` lR)\^\29GhkaM%vIYg,O6+(Zwj:91 ਒ 2rf=[׆0;YHX4)H!`cjjp3U鲊IaҤ鈛f ԪPm JvUeJMZ@r=Gp>CfY=}sfvX ]j*$P-ѻryVѸ=SA]u9d\ݷ½PbUDEUBYD-kzXtQKꈙ`|zEisC/@5#Z}5,(huw.Rr-[/\qNB>?o>PETDrQ0ڔ,fKU_cmAa!M t7IͲު4ɪA]=@ڨIщ*f,hʓJD14%F[7Uu`E\\MTonDbJPLhRY[M~0rVF^7eSo(1jiTU%' t1+K`ggo)ym ;oWָ6kkPM=I}(AoǠ`gV˓2pM*XYAH~yshjM (H~5jCdFb^ԺMMc[woFFv 3{D)_zWQʠ{=wܠXUqLf 7n>!RUOdZAOH8{Kc$Hk'OG rx~KPzk X*ſyZ{fj,sެ;H!RA-ƳEg̜ӸQdi!{&ĵ˟YlO|KfVid7ciںSYBoX0>v3LV,nڊV^Ia9G.zQ[nG7_B0X{U~|Sq+UJ [w͡CqRB#LA<4>P{\pkV –eY8-Bf)2\l2 x5Ĵj"bRBRrVTu)`}1]2qDv-{K:,q|34 oj~K{#7?AS-rjۢ]֊,2"s\OcsI}XF()=my{MDOxee/|?ߕS+6ŒIYn-2x:ub$%TbaŘpH۲3CrėѮ9CF5:cﲭJ4\ÚPЄY5wRUZEE}Jo5fĚIPđR$JHD`fcXIJE(q=봀,,TzI׿dNoW'׫9{iWOj%N3;Ȏg^ ޺Kh}HowW DyNwyW']hZx]42[?qr4wSJ?mF px*E@HkHa;ȤL=˯-u??\>}y]3wON]cN'F뮱 5xbqA DXl[۳'TNnλ4h|0L\0HII磺+NkwNڳSQELfiEkPr.sU^%4A7AIw._Fz ~>xy~R{[}$yI_v\v֡C ^eEunI0'_s 4֝ͣK|6<{7|we/=]e\}Ğzwzfa}eGќwR7^|* ?]?^v,xh5/AϚ$^| ??gC I7GY ݿ=}>Ypկ3m>&6[;j%T&}MoA|RomF?K9v]0e zlr4kڣ#ӯmyc~=a9s1h6/:Ϲ;y^v6~W;g]~l^1~>Q^?56zNӳF{>g4ݠ4 (9~zi_ ǿjJҾme_=;Q),1~uHd=h앻#K(ewO{NU&i؂Ο->dM&B|5YLz/o\n7Ƈa %)Y~ /e)pNyvJgB *$xU8l.`DJ}ֹ|S?9>h.#q5^{" CцA<Y 菢{F OE}7r2M-oj ae( XV0 ~o,qS{󰔻mXk p|^re@Q?|ֽUV!πf+E>٬0_? sCǀb5_?`wL$</P&c|7*nP*f(D'gZQhIz/0E9h=™$7kMS(&bU{bӣ#R!”T[jDS%c2}vAQH'A+%RhsBI\F~1Ibv.VR֮N*deFnD˔nr+[R6aQRD0VИNydt8QD7`m&Xl^z(0E:2>x{7GT392*D B͹=Eñ^nC|9~8g=do=gdҩ!4: dhNz\Dd3R!Fi2 1eSQCkIQJSIi"TH<. 7\%$fJX$q("$q!"*0 ?);e"J-Q(g/k殂+SIQFD>G1+ ->Ҍ+R Υ2a`~=$IC" OhuQmX{$bX#0$r1ccb p)Th{2?lHS>h4$X1l'F[ {Z3fQƹh#RՎ9}p뼿 b1M[ɪ(twlpX81"2BXr|t҅- USG+(yG,v9G,8wj-LE+Qc'zt$ Z%+Tb& Z!eL{JE"@>+.!f=YG3-[S3ᖟڭUh|epNCe:Jrſ jH$ aY|ZZskhB{ж߅Pp9Z߯z Te^]Ƌ۽"ղx[>SJ}T/NM,w):jkRRX-~$*WJ X퇨 ZZԯ-R`&MPQ'>%:T%& Lcr>DȄ326}ohL[;"紷NM'j=/M>##2(22P.%}\`I>I)<igm 2Eчˈ k{;ڳoŠZ@-^umRL9R^ۓ->.׍vvXkGA&;6}~,pY\&mE&JAl+.!0I"kP ECf%r6blf.e~y$#1K fnMeUMrf*2^ ^+ ڹ@"8fRqjژўi +T@uMMT=I&̝٥w>hFGTbAT6όRwz^G^D~78L,0nEP^RU>>K'xҟ_8Di9aSĥQ9\629%q(B1,I2bkR ԣ$H cNCU8L n] ԊFQ)`VL'cA7 c@f`,vRaLʑƆ(H9n> )v[I~p\X8+%I+΅ Oq0q$tB8!~!(c,IHw.($ktPDXDn/,$ơ`, m0qı'|E x1;FOq ^%qH *V0IAHb<n"ĢLXԳa``_DڀL’X e&ITDPb,$Qj!WЂ=L Y^dnGUN蹩Fk/OtdC! )2 lObΑ fK/X;#"`&V0Q@Tƚ䠇688 gq:E|u+s3*bcDf@|cRT~R`1*>V9rfex3橨]z6H/l<P!1¡f9sJ wZcܻ@bJo7P̹ A&!HbWm1.Y?Πd)ǟ _bN*A%A>t0fl 㰋3F1댢"c%ZA˒u_lvT {˫k/oUلw@xGS7YissTxDb?,q@t>R$Yx{Fw]( |\H!pJBhN4ԈZatU*IR4=D%+s|^Gv+9\K#Z3gGme+b_T,o^-o%M`;Nq[TNe[JȸV K>O ^se[I[ ٬鵘gU4ZV؃)xٮS&/4/U Z6oet {lj#"N7C ]m%m)x)x)x}\ y1msR .'a M8F-wRumo+!\B{@h#kSϼ6[FiKR{@j/ֺVNEíENJno6o>NPPܹ( C$whc@sX< DtAu)X*PHcR "Υk)钨,?."];:múEL0ݽtZ*|4w@r'hq-ʆm%ߖ>b> 6jkfu@mm!v߿o΁lcF,-8P9&xYvR ^KYD"t$k#HډQidaZx0.e:"weqI|Z/\Rfd`XzOyʴy/odwYTVٞ%5U_DFDFqzi!l)jp!x6ll;q>*Zߢ ꇸ! $-uF-}猤4ڱ9k5{_sή?[sȭ9ּܚ.{vWѳj֩לA|iavȦ9dlͦ^`.͊(2i!f9pȣy<xȣ9h.\U6-Y4 Q[yC t}@%%gͺJ|)94$޿;uDt[67'}o5)5 A`%Kd''51_M\hQ9JOm~~q.>^0K9x@`v:txHYT^帅Fe[oG drtͻ0\|d8, :[nzy~P4WIo!fjY'>.loh.eo߿\f\LJNFO9GS ]t Ģo*glR$UʆbrG{T35^)AdǼ̻LѶT|?"xdgeNZ(&dE&&nFa2`n(^f΄I)Xkؾ|bQiJYԭ$Ŋ,6JcR,`y&̂eH)(!H1Z8klNjM(+s#cKU:G Z1@($ l4BɆAXʃAbWQkEGcMVYd@G2B4Av"ClXcu,)L8M(aOS @ >_̪P,",׊O@eRѡ+1\ 3A[L `mslJ2#+&>HĪR5?cXhr@]`aAM`r A3 ך9mSoJ1'{H8v&&V!HdGxLU§;$fXjp]APwYV 9e`-QP46]9J0ު0_v&&vͽK*ZvX ^!V#,IYF9b 'ngF0!0!h2`+x6*;Qב4y `μC)T,,Rf7{JON| VQn?3"I7_ٱK2foU@b1"gúAJR ;s-60RxJ<@ElTBP. 8% XIw,zt7W}{^Nߜ,GMlBnɯ7OmȹE`ۆ/r&@ȽE݈[Om͇6@-rg6[Ck,!ze5 ц_=hCn[ >Grv-DsԤ9+ڑz$5Jv-/me͓\3 ݶl?~jha+Mˍ/X']|?U^"ݩOZцQ&G-Iڻ|6UhTtOoZ+ 60di2|Si3|Ɯ%93576t{6F42jwԬ3 @QK M0g8 R2O 6BN%]*;WN8|\)c "cg)|QZN9QE(KBdkbXO&UV0N8AZ=2Ÿ[gi"u"k $BҊ6H!G'b,]ZmI~ ]"]-$&0?֞׌uմĴ[ŤDsnXrʎ0 mAX9juc_[q&jMLBk\)P=9o)z)uZ[Jm$X&t U B`#6D+tKt&ܯqq_>@A99YO>(Ij@UR[Di vj{^-aQw^9m`Zip{~VAqqLK8 ̖E߅گݞE ˫fLȠ>(b(,-ч ꘺z}eTPEy`hG,k}E3$ A c"o0*Ч^Gk! tQRw"$HKvR8 DP6ko[҄,ı8=i͝_Sˑ)0;R{PpX#D[+΅@,pŊ}LuMOѓ,l+v!b3&60,[9JU ̧6@4;J(O11ւ'@띻'(7J5G:90 O-UnL#51?=N.s$F\Ou5Gc.˛/ubwLӷ"wQ5ԈOoY& mMddz҂>eYۮx rЀ: >`:uԗ r^(`w驳kxɨ+5'!HW}0ָ{U4mzA9y-|=E *-xwq}7)G~\|:?eJs 2*}ˮ?_8efy?Żߗyuul[ƫNoߟ/oOIt>j9iƎцVFҳbۜafQvj4]r6+9+A{6ZԦyCѱB(C|P*>S JʨD%$-d1U)6ccƃoM~hg,S<#1\ح\ٙ6{C|:4Ҁ՗Ytb+~l{t*s@Zߡe," ʊAB'tL>zFA 6V+іA$V?,ae (g]S=Բ/ڣW06ܢ9C6"lWt>@j$tXdJĬ9ŎmcsQKbxI~h=j/Oө9] B;;5ݬR/f<$000E y#&y?000%۪j؈W߳I}::xPyi}aav|ua> ?S< > > HGKd S*sJ8t0r1MJ_-gyJ~# Y.; 2w:)4Œ%2+U]+ )L^uN@} !WdE9#(;&7d|o=deMD6K1FUP3⤕&,]r}h"gԽ4 ef2}*Zpg:Fg/&ȅ;m(WGKJrͥ<%) V.W]V5hD RP jM笏Pi5D2 XTdc`[;-[CZN& *m`m=偼 mm I!*M *1@UOcϾSNL9slgUbNֲMQYCP,o\mg7HԖf>GiArA?BV:vLթ+N uիC6D` 6l fWr‹@YԁLoQiB^ϥAZ&Sf)Cvٿ`(3IH]p%͵i u[qW0ԻeRPNvܫs5ڿ'/DRkR9;/8d8kИ?z{܆ ɨk P {yw~c!pѿtZQp][o#9v+ Y얇Cy"$&4v{Z-w{&C\T*嬦i\b}!;)hïF8w4 ׿ػɞŅM6,>ڢeĹhҢ\Zt.-:Ң䭎ʢ$ʢ T :y֡)AO9xFM~+BtUzQ&b(Y؆!ml\Tt[/{,DPi<8q\X͔I\r!A kf*a>7vۗL mjAa+yiB*`w#cZ6:!hҁSS,%,9aѦhEhD}vc>G2 970*mXsO.TjR^&L803AktRIsBv% z+CBйJJz+^FƠE)}49{Ǟ'][d׷=GdsD#+Q׊!ALUeFe]/}~˾=f;/#K}ٵg96kԯc96{&l08%JtE˯Ł_廉n/³V٢ (h&(LP7l~ڹEi@|S4@Pv3Nr+X{mBcӔdDٹ;`mə&gcU3;Zfg0NJz,8aE~PF%}w2ϴY]zgMsb?|S/xM[aruDI 3C|g2;h"!)I3;_u!`]qҔo=+gιjF][۝RU]tP]V_~n'xy#UpREUC4ʬ֢$NX8jGM+G)@ # Ț 9O>̀>v)7Cl=T^0mvN3"jkA6wrhA mn)QXJ[̔T5Ŭ[J=nEnmo/gG򚝥TM5!woZa7egWz,5XX(afaʠ *<-^ &Z<|Pn f!!R\50 JbȌiH,xf2KM^T}sU f6^ >PJ=Gm2 $K(M4N#lD=*2℃0͚EI42 |@¡ab6T9p6"x$D3yjlEtH{&|wkՋ}\P!\F,&[ˈ` D +=N9R }ìiPsJKlyO)Jg˜&bTp6Tă\nh#$Jl)ThS5_r-߸f6}@tJn]  TtI5=o[0Rj:%_~tBILv$=}>}Ni x4l(@G>E} 5eD6*1Rx8չ NC DIjj*O9Qt=Gn)T-|^s0fjtLӼ*3/>+KѿrQT61-.H)( A2(>YǍЪܵO!l4dc$fj !Ћ]g!*'ːOc~74:]y8NSC t\'}9Jj!2-ؼ='t 큾=0oO[JY{)۬=/^w'gOٳi{ersf93܆Ia$\׳vl=xhlyMqt{$*)b`0g gA0\*IO9楶S03g kiG L(m<&nW{gdheRI#<%,i  kuB D4ko~kNn_F|}kG/{0J"#~ND/<, /=`cZBlpjX#3xϒpH|+{`}DM@UuDPn TV4hY-hi.5(O% :P'jDiUp$YYͫsG0fySrӪ! A1(R:QJB ~SmpZ^Lngw99fw_\2.%Shj/̮sat2wK[ ZwSa0PrM3 qxF*ߊ94(`hQ7pՠd`x DWAPљ{ W&i8Is/0ctؔEú n?idgKs'n7.2>a4㴒0 ܏^5-t,}#]1;{?/_Nr5ξ|Iچ{?mD^ȧO/ħzG ~5%$m6CģӐhE[ʤ*^V2FLO>BZ?ý.:B"J.8z@"VbIk <*la?jFx+3qv5̓ΛOĆ&&ۧu#IV~9EvW%zm7M^Me/n{*Q ۵{Д"wGfxhU57# ] 駋G7u6{0ZAJe꽝W,=DeKP]վϏ~wƃ2-AmwZ>[hh.NCֆOl=_1l=?z| $'&[;ȋGxSҀ]4wZWA ⇩ B+OeJQ=&B3UXڎꑤ F5ޡ&׍u69r|Aƶ,n<QҸFuNDA˙#$Yʔc^G&jy앾D׌~>2(˺sʶ.}޶F0qx lLr)Ms w\:Showz1947OM/WJrÛg)-fs^ar̝VmM>fݷTlJDoLƎTŃ((EнbZٯ/%>KE<&25M6U:_ C"[j*i@u|S &%gvkz85MY$_}.dSyI}뙙@Bn:Rh=eV~U/2-M #(26-2Ђso+'3DоؐH0#3Й<16mJݻLkqx"&t6D:G֞0Qc0ZkzOj1g)o1!JYb5&f|}6Uռ/М*M UsL\y.utE]ΰuQ655>=Q1Ƭ!A~zϟ1'#V:ff~c! m*|mEzptV%zw]3uU㮚Df" AYjtq,xq̞laylSx0ٚh{A܂R(cfْdrNƷ?r"ewRdv$!4/'oJ^(>4D|y9P <Ɲ=6vmO^^&Fm#?UHFa?aG ^;gLwQÌdO 1rpffdTl1&Cň(0J(LeT`\LdcTpZgt#78Gw]tGpQ!X1 ɤ!c5S&aBrTU{p樐\Ld6*8vxp& NGؙ 7|}6VƬUX_GYgy/>f>lt̡2m}|h6w)l>&]e)WUSj>߻jba j,S|soՔM+u[ݔ~P{))2 `"%t`2-~]Y9L}ȅtN,LOLLWv[+>\md.[tΗ2V]1z*z^:'Sj[5=Hͪ9ʦ YJ٦Q^̚vI(@d8E/:UApXj7X\)H k8룛\V1Y<S>;> 90 ,e,ZS;'yyNnh:a1 sQ:+qQHTl+IB*g!yipDǭeA#g'Kka:{K!I{-UA>TxRӸ&9&?kQz(ܴNg޴eg5<ƌm%0ʦG98$P!s&)FqEf.t[BRŧ5cw׋A(xAs:iCwUQ-3Q`xKGQ1c'JT1(قMp߉ʨ@Hz5qc1Q^yHL6>2&ʌdtzڏ4NbbR@t FD S9q[i~ ~ۻz^!b ޢ97@$um5LƘj^*k&?^۩ ACLJr X6'q?x?|;k]v7DOrqGT% EM\)T2,'TGYAt$gL%iZ0;l_+lю.fn?6 QW F!'뺑+̜vHY+2*2UWV N: i O:k#)^Lp ΃Ч'qWE@`ZGі.U*p}}!}LWwM1:о;M?벗k||d9 /aRUk'Y @cڠm|[ġJ6ӟ0լזsPQWI>Z.` DHe(![:RURT:x:-\2)5֓´.S]]Z˷ ]˷7P :}"}XϨGQIDD*BM>S`>.D!멲y@ /P}H=)\dSW;NŠ}p(QxB|JM@>Fkn+˭ug̤AnI{!(WS.dVe$C=G[ٌ.j4Jhb"~zgDN_D t(zq RR 7xt?r'+nԉsB+9J.dt4kRd3e;BFS Gn[> H9rX]cm(^{N_jܵ(zcgQlӌV(qr"Uly&/_^5Tk)D}ur>Ei^,eV&ј B@-{*0p4Jy#K ,95^+D 4HV$M 装 /8^j(.pfc(.(1&gD H #" 3lE1 [RcdK2۠y0hiI&\f]U4XDS"tڵ5ҤEZS1>#m >]U޵{^IUF3 "J}M)I@G=IfόCF{utg K!B@/ϔ&!+Fw:&n\OV!pS ZY2*o Z K" AW:y ?>8mԣJ h5z4yb2Wsua`ː4(CTs5UF$'X0)dct $?Aq'^-.=yjY(t~[*wISoPHl,H›N~*iS 6 ye"'/_.|žO}Mɝ^VA%,*g\_jFS)iz:]T PG"YFɁ}@>;ёY *Yfa㒡ϘN9*&CEhhTKz͚rE :3Ή:3~k&f 6S;='lP~Û_ޜ,?܆Fy?%;'<;oPh5M]fw$P*<#Y]L$2ymD$+K40I#uSөtٌbd:`P'X'Ad{Lݙz7+ B` [noNuJK:=4=ʭ %i'\9-ć۸򂃹%~)edzۓ\#:1>(c"_G n3:(;z;׫Tkd_w[Jz֯ yMn6>k.aq 枻LGww3Y0hOI5}Ab:ctn]%J2t+OʔnCpw΢%<%QO 8Ӥ`ւa%f J5Hً12ڨZkp"x &t[IX=kKC*T\_^.>(: |: s` xmSTӴDÔFUpTyё]TCOYr[Ϩk0a `VDc>0RZoZA'udYnqhFPV)(Oh+DvIOt3FkumȂØ4\#2(020XP5bh;a4AQU9&]|6eB< lY,Gs|jw#U}EQbt Xqܼyidv"I5k4Ҋq);A 97_+L;u7IfZ։ ehPdیqR4D3m:n91e'4 ñ8W\KoKaFƇhnr?8Œ& Wύ?xk9C'Tu̒%:r`suAxݳF"V.Of=G1Ր>|~x[ИnA*!uQ~@ TyNFI>NUgS" vyֶx \ca)gF˶ rKѩwoIAPlCYpp7(Ee`rRUt{NǽL)YJUoPvK׃P3XfpA@y Z5!(o ĐL AqS[2khJr:1f/فhoœq%huyܝq;:4T{J*!jT&q06i'ʳEbm9C"^7i{\xcb] ˫7'xO)ŒGl\TQIZ =ц8e)U4[FKo)>Ӱ.F4禮ŔH̉pɷ7{OnaU`83+%gO@Vݣw9#mowΉȗTϾ8JGj9|\<=B] {KgX%+Yi<M8fͤS( UἻ:E \"5&9 O~*+uR-\r7suT;'{=ݵ)Yky7[޻Yō"|_ܵbAVojۭzzdUty}]l)m8 p+0"D'?ߟNf>J'OO 0uU짰n>DG$O!ɜ(o5m&7hCVd8Zi M 4 I8H %7YηdM%xBr.򢑌#{էOV˧G5/ݳtFvѵ!sOh>d-5ٿֻ >W:u+K:o膊o[ij/Nv}bXûpb~՛m{%HSD8(Hҁ.q=#hCr\񙺜;u'q@M.o>ON i6~G TaQZSHG|xWQ֤{3SeV! FgKAtc߆Ko6e0P\!b]եpUQxd蔸VuL}R̪qd6%ueK1Q6&`Be!F/Xcu< F##\~I8`|莋!$N'|\Z>(n+c+˯ҹQ([Bd=<"F'_RXJ5+@s q2e/xb9ZڥiUXW_.Į  r R/A˵'2]>(c_Kf9M9^Tf-HVs_wN['wMG[yZ].{vQw݌uExJdޓn.'nN]tuM[vzmV\6|,ZS&RtED a$E!lԖZ+Di&>&1 Q0kqU=OF4sO^ _􇩬-cߞ31Vo`][M͛T1Rm`z?Vp$Bj)> KQ.<C\`D͑=/0)Q$G}R'-@XT1LA䕪Ğs"r JՁrJZq sUL*mфfW)9:m]̮ MI5] 5ExuIY2a#Hp ͌S,s@m0UgJNdԁ4+ne:\~ZB{AHN  +OS~DDV`A H!"fƨ(y)l:ZS KKE(ӕ`vbz+=¬BbZZ͇(,1cA#;޵WmVx}GBPkc};?C8 2tZqn%W7L(Ad5ܰ TUm9_,r@U񡿐?XIb~],WaW1SIm0Ƈo_j&,<4TJ.fgc8<15Y~BD;#y Ծ +aIsy1m:#rV|Msdּ8֝dv= KV<=`TŶт GzVuI'a_KV#++[WdLvr&5jLxSRAt.7%@v7% ͽ)Cs-)3KmtiP.}SOScq!.pikO{T3Dz~p֬P&bctSr"&+Ex)N[d|rPaH1 AgX#Lh%;Kz$0:^4'b@{ $WJ L:EJY "xM˷ʩ 3,oDB425DIsduS;ZMy_7u\16^/_χ<+3sт2wUx)0FW|ݫì6EOUV>5g-)h@giǁGu&{X~M]cr%@kԑr<I3ʖYϚ yT'a"]IJ[ߑyPIثbVgTV^k$ZߛZ~ztzxCVudGΪBGߞ1 EE_X]==yxSPT!ƣhW[6EOn~?ja\8&Z}Z5o}KE gTyMp3*$NIootUH~㪐.ݧ`Ӭv GF2 4  GMt:Bvke~q%E5| ,A9":j,%FFb啣IT~-@V+6{h*#B *MikP\:] cdY+TFBXE^&%k̹݄1 nÔ(ID+@o%Mú$?vhrp>)Q=f+g#cldr {$@H+ƍ],K\6 Co3Ҳ"WvqY,Ew ސ3V؞[8f}LcZa!V^hyYnPP=@3]u/vJR=^:ma9^5v^SWON?h}'J^^u>Ǎ#I/ yz_,1xeva樻C-$L*JedUFDfDddQϯy0z1^Jxֳj3ߓ=}>t~8n[Vk$C)7?ޮ?smΖW?p-O7Ԋ`Hޤ$sO']}G&y& :(´RkD&eH&+XHai!A1FXO&& 8EY h 1qCHX4jN QՊޕ{gtjlx׳9 {Of&̺/{ G,aq`UpN4Tqnp i_52?Hw<~fPO}B߂KG3,tpGJ~ 3Z+[_|zvu d 4\0cc QM 弱\)>J-[-X`\˙1P@ihbm"mIlT7|pH^ŃA%ф/Շ/rwTwo]ȥ%3L**u[,L!jg4 AJRBGTB̖6}ߖ{m:Sw>qVjQ$}tp98H01h0+pSO-?{~K8Q"9Ҿ˂݌ .:幫/[HyJy ~6Gq iW_(*JIS?Iu.}xAxwltoxeTZEa#ip4XX4Ds;4H)aS^Y)oWXd\vژ=C=slqԪ!jOTw }/GxBdA5TìQ!FEӁ:'Ao)1t}%NG(D#f4J{'AfTMJDəkRQ5^PJu'@FC'mM9]TCp2Zt'BB}/{FAt 5Jb`!<1g⅏ȍȔp<ךGs 󞉗:0[TƆ_D'9OxPěy PW7i;Df=qrrnvTFt)`yԐY.1u9ON@!qn֢g0KIYż0DiELePૅ ##ì)c>ՋOErpƤ}Y Ҋp ڕ98]q p Sr\KCvOτBc/Nkћ] vƃi\i+ vHhvBɗh5%Qd$H-hu!s[.t`%1[=# ֖ ;͚+1L~n + bmcGsC,c@Y_=¿TwcƇChc4t$Y/!O:ևOi>,&xǘY94o;FޒVׅac0v  ;Ѻe&-ۄ`8Vv[1k:oBzH<2t^2F&u}PF# H ϭ6<'yDRʥ\԰(dd*")\j 9INLl{?N S*ׇ__+t S5ȣ@ȣ !*#&3Iٝkiث|$Bz[!ףt4CKD;Fq[xTD/,Í0GxɝY,S~9=Nϙܤ%Ϸ| RF`iLI%MB[{U&hakdk{ ǠDP&x{Ԭv(J0<a),@qJ+ã8DJ3R#tx #6wڒi&F;CH..:lڨ棰5#C9PyB5[]G&_&q@V]İvr>bklqԟZs{Bj,9#O _ބbl,KC|Ř5F m9x+mtX#T0]T,vO`.wVhQ?^͠fjDię=i;&dr5Bx-0FA  qWfqj\5=m?|6h_<4pEYz;ƬAdX}gy=<1DH|Yz7Fd9,b=իR_1fա ;z~]5.gaxLj9FR= {#G1.Lo'xz1bw~Zԯ_ƸM 9jz?fxu4rLe'^[_0GaW0,iEG3CJ{V50o{~| r|.K5fF: ^rX`Np.瀣;<{o`L.P )N7< R X#%^3NʎDnL \5!zV JkdWd`k̪<9}H+hײu"yhK Л):q7@b~3%X%F$,ŝK_o'.MrO)? 20cꚹ8!EXNf 8wwۏW~~z;YI_/ ɠP⨂3!Dč99>h Pʛ0M_uzq\\woz??kTLxw$MFX%Î(4Ee-{Y:5-QbJ  1 FL`<k&F7`pTZNrr9GcxWv~+f}; "d)Sq gE)+8y ++%꺋;d#-9<A}኏* UOBWYzfzBj9b"[lHiuIJS쵎XS ypm v̠* Nc-áTfxTikݲ3xY$e7*KnAiGt>(CC-QE@h|?^P~:[v%Rp}9P-y Saî`D7E n?ajzu9M_ay~|V˷!X9{)W\Upwv&+w7y+Y;݌^oK~_&w!,g&%V+QPö,Ց#ףl,|ќ\EtU5 9u,3hV'-hxV zJ#:8XQ.`:Й(O=Ô_vae_0s(9=q8m|E/A>Y҃z=*8,?35 L7%x360H|lL=8njiı6Of.{u/Mm~0h'ly:6ReRnPT˴<|N ior[Kd"KaB"5]g~ D]X@2Nۙ>L?zQdz[h?x7}DZo)"}ܻYBKo.H9XCqN30fyƨ" {XDiLLtbiZv㜤HKLXGESCf2kb_ͮӤ9iR2Y[o_');eft5߀L6|NZ ϟ M gz#_i%132 l90x Hre;6߷ݒNK}9$O~&Y_UdUq[ۇhWdr^JoV)EJ Gb91]T1,8g,撈׎DZ(^Q۩8pH-PYQ J`,8EPK来JXJʱG)h>(" ֫ @Qʨ. w\*#5H>Z0KGk[XY؝Zh, 5Fm0tW7>,꒬S KAx1Q ϰ~-荊T7 "¦dтkKfsV7,}#(,/ᗗ)Tr,9wM,M vԢ˜*D%\Y'4Z@PX`nR='ra=.AFC/u`u1֫{G%EtiSc1lPMZ"">km󽯵N˹:ξͺ DnOEH A'7HtC:y!ҔfH{,K!Se }yOsBsa&E8snNXCe*䝴J|,ޗ/=_ TOp?xg0 crJc۷y۷3FZ'*,8R4s*B?6ݖ̋ݖjQqU* bhsj=13G9Xk kng,34YbdGY@cƐKMD?c 1@/me5>b)nA=}rN=<=(e،WP6֓3p4ϰ*zFA&LκS?Jd#pgOV޻`)vd8Vqt|;w 03='UI}O}SvW HX !g^p, 0KlO98e|8i(khrt7Mew4*]F(UTMu`?Ғq9#]ocd^,u9%^oG@._oNǷ&כGRr9]o`!&sM63Rq[K,:j7:M8aN mQ5`dz kF ZX!O^3RF0 \TGjt絜(WYhm]uMʀ/6xtxusu}؈qbGUAn9,2,0]WiKX2(wVIay&zN49(v]4էߡ"z6p`7]S*%*G!-Y~+Mo,Rc.2݀昀&Q{5g+ ᙔ5.\_*.gقQ8,t %Rp:p6X0kXgwWK$pmH!tj!"-=E|FUׯT1YF|N5'}yU[C )E!h8!su6X{", /*KY[CDvޡ"(EjX0BO(jϔmPi >j yVd0= sZ[yP03 3xe G)lj J6rH L`6 Y*țPxqmL3>dV.=){p؎C %:>i'3D考)ΐQ"tUgDv3OXiMsfU44ix [X{c]< to`ft, Z]unFG Ρ1lu8lu9_\/[~xX+W4`V)<]r\Dvn=Cp^oûFhđҘşÆ @z Bf$꛻|v{v1{Ly!7v'읜~ eQ/j$p[on~IYWO7>ܤ/+0˄~u{wk}X]~O)v3b_>H 7a=*ZK&'XD4 iI%J/.2][0ȯ'x(tÊY V̅=ny *{eofg.z]| >/=&q#0FZwLNpyDNcu]/٢V3Ndu? bqn%Wyo:&YnXNXkb F)yeQ0H|P^Ybl`9G2256y!3u4X~OT54]?j(U^EψB\{%n)=-?OeSϳV5ayLw3`-bJ3Q#ڮ&=hB~ʹ,Y=4ȬA=wY ?7$xA&ѕݦ^KfU,?kI}p5=vz5 _}Z[O|a:]Qo &k؁vfP80ٲpGfnsw`*f+VKMяle?ˮi=9#OVfѩ꿎rsoəwUddWǟV W#cU Ӻiz,0[{̾cRʺLHO)\D$8up1R>\xڽIwju8jau"JM^^b`!-E]Lpe+wx lCO?e@̒53spdVIn6 AL3<3[ K烥ѧ7B-ީALδgXjsJ7ۦf3,8ei#$T?paH=eHS . M!ƄQzM` 5Gi"ߚO=ޣı\<'<%Es)󄗵@1<BД}l5z21MpU;84ֽr3\D P{='GKN3?ӞHO(T:Xơ 5395/ IJ9(rDwDbfYB?yOq;Y-ռv0~3Kl5Y_{Rd^AY b#٥= S/f 2MHYsM>k2K,ïTYcƐ3>,UbXɊE$ljƫsيM)680Ğ9qئ©s]o7W}->}Ap!M if,)3م#gFRkF#vcHc *&lTNgl.qBur9e3)Pz3!F @tfɳKvܶ4SlTm SX )B' R .fvf]2M]A\lz)fӆ)0"ИA=n;)L`bNtDz>3n"q|Q(cT>eO?ߊ/Jx(G6u.zhڬc(5m8ܥݳhLe-o W}j\FͼQG-YF+ghea8䕳h OqDgo?J7P|ҭ)t&.Z[DsYZ>[? 2ث?oǐ&oN.WW6ߧsI O,pwzvWg˅9 }YvcL>*{5 =jfo #QKYʶ|NCL^11݅>7QY,3"P'c *, G3*KFE}4dp@l=Mh7bKb7N-RLqy,ZSL^&I7@?'nN]tz6)ίxmy,S4m˫yWOWfi\~& YV XsXzT֫? C$TpLQL KN1m p]Twِr:heCk~ Ҋym,x]P,r4ٙcKhp Y@cΐ,*gȀ1Q,\}xiѸw9G g5>Zp9,RFJDIptu{AR/!Cc+4֣d eERdSoYn2CEpM\~r_  1/N8M'84-pP]2L%F_-Qx[<gU]8巫k,1ky=v^w]ӊ%xV7UVsVTd#rXwh $ .:vBR4xvQÇ(;s@UL`rţ=VWD _Yԡ[}s<~?ҳz$&|fuoO{bzz.ZO}exUp7[Sz۶ 4㿽q@y8w ;jgD@Ȟ9k,q,*Ёh15XPDȀQ@P2S>!u ]\Ml4wc95KZ a-s%V8@D#Fl@j!gPiCI2i9bH'SsJ){-<7)f "O1E?.xi8̄Si(FNTFPXu6)tR05% RՖ) ޢDY0@y#> +B":X bPF9:Q4c~ςE~/klBZti7)pV=g@"Kq*QD LഺP0D0"#葴*2u'I3x.@'`DExɘ4",ZF9(B jY6j΄=ƣޓT~D[mV 98{\B y9 ) 'C]҅LxG~lt!7BW΢_P0 )]R}Tiׁ30hjqK DQ!Q?CUD<vl UQ͛Ps~ UA蒉rTŅ!BӪB~̵^"F_JCz~_8V}xҩ`ӝU .3a%fN~GAgC Q/`$nC(UrW%3ʙz̀e΀S3MYmJ|sB(T4HA)8'te P'|@ RL خdQ)Oe/jĞo5z6$Vnuzk:\]}|vݮI5OZWQ;=իœ>K:ܟ.X)OjgUzv= 2yt|n9= 1y)i%9U{g L;M2 t6Ϫw׿7wKfT`I*6}쏺Nr5dܧQ9& w%GmSxx)$ez"{='W8L7[D8A؆)/4-QҊ@~qrx{vfr3847;ѴPP$ JAihzM g({3՟63}kLl| | 0j2۬,u-iv{\rxyr̜Zy3;9&Xa~yr+?]s]lK _WYc>؇\Vrb}:Oբ;atk)9ctnN5ѭCp+g"jPyd&) uJ"ݦiVTmV<\6rSDR' ʇf7@0>PGtРQ끣x{tnL" E|i9n 28uՐI5ѠcC==1. :wysz86y9yRԾOۍG'k덹);EPdه8|튭V'QgOI&cŭO[;;O˛뫛wGT& _m4s'ן 咷ʵ:ybc[]D9x"RfN(3 t+ ËQ ?mww*5Nh?F<sKfMJ#ACUD52F#Tyshd;b+s:Ҵ"©@lh,sk!P U0${,H  &GJ{b)EEs>k;Qi5D7rB/;WBg9#|8 #cr$ՍY.l>{(Vۨ|1t~)!3sD%Ho 71gz#9r_Xl`\ amw0cqKUr`2J}dRYŨh`]NU$A2r绵яO_+[HFSj O^ ϢC鳙NL8̚;㟅mainU6v"\'i. U#)aY8tO X9Ť0˦ N9 >U~# ʄF. \kzM\T&6/YtY:1yha45D;/4tcXH gJ{wkXÅڲB3ق&ړEpVgϽ?표%/Hq Oqg =e,{Q獲:k$E;~]$gQ`CBh2Is^1+3Ug` cvQ H+̇c꟠D %셚ɕ\Oڎg1T+܁/R"$ ~PqJ{RG3f]=USVoVnTQ;Nn=,] O'ʹn-4䅫3uJԫLe8>2&Z)5HWUأSP6fr,9h%:L:l ZIr@)[|@|$E*mLY쟸hNƃhfFz3D^D$U )ҥ\,Lh'IY2R,P$?h\4hpfɲ,l2袥c|H8s p I('IB1I s6zQ8݂.nQ)T+fmŠ9cBkoPUT "_RD Y 0(3GctrB̅6+*jR4p!lP"DcMhyz"BtקɇQĬbACF2%ɸXPu|*^HNhwO; ^"e2heC#2HZT#dIIވhkDV; HoND #Efhn슪†S{ΤPUR+{:'(*JʘR9 _=It&Ù&#.J!eTob0mq#Qa X!PxhKPK.Ƶ ΢Кd {3O4n{adUQ3 j ZB^#2Ǽ"kJx>>e_r3'F5lZ=~vh՗(-u%ۻ!]wYZ]n\,MF3<λŨL U]*'Ư+,P#b#yؒ>C?qsF}0/aa6^\~DXBՐ< < %/,#?͘YnNb@ǧtFND!a,Xn إQ`A8L b]).0 :F&N9p_sS턓Dg2lc+HRgƨTQ.x)I2KiFs+OK$Óu=(dK3 [1"A bR 3emM1i#9UP!]6QTFJe+&| Y9j,=}jRZCUs8Pqbt}B9ABUSέغ)2` X~˕kZh WQN'/X rlݜXyTQ;Nn{*ƕK[72s[ y*J)W_2vdp`ݨ2:wbVLr;S2s[ y*JҩQ[ZKKZTm&nˆ)社no!/\EI:%_~lhEGFա֯b*.Bftuk!/\EI:un#‚*GMQ[ϛ݉2um6'ޭb Ȍξٽp%ҫoi;nO=O2%SnmS;U2:Tp%o}=n̔*9C})֭_;sT.kZh WQNؿ`u3KաoAtypiFftuk!/\EI:e?JȺcFաonz T:n'㤊~hq*+Bh WQNXplݜ\u`VO|I;~S(S\;US~ɑuSBUTe0븝*B:NQbAFftvEH y*zNYV}t?7g~zg:59 N=~Y"8x/*I"/*.:r;*]^9uXG+OU3Uㆎ?4̵ R:,6VfJl[0׭R RRFj/7sg.rBN8%ޔ|=\P6$ j/I!=w'~w'9'(x-p!GG籫s0W[ /EJUƬk2A7B˼۾y;ߥb4%:3ځ*eCR#]}+zs P -` npFcqS Qyh0Zvſ}qxC:řE7]GKkr"C(JLcD#!mMӖȒO1!h>%`CpŰ^鈙w:k~skel_T֗ \SQ9ۇ<[dzP=^ H8EgIitxZ1$y]\CfQy/0)jځҧD0丈,0YW'C =jp<wlcޢ0À4l4`HA[d%ƀP1!Q*k>qȘ 1LLۆOaVOC> G~wS@5;/zr T7ӕ Fc@ɘK N&D$p=$[:VaCPb5H@ (h f1CN,h.\4_u,Ii;f@1MM13w7>*d傸0d`VRW;?ð~{'?k? 廛K'q15+*k}+ZZ;5r1ȮkO3j7A!c@/o!#~vī95 [C،0!Դ@f>1. -BuO5޿vGFRH*Nt `pRi0ωҀÉ}m|@ y׋ nnv3vGziؐ 0Iۍ %K(ՆwL6 nqc+BJql5ItzaMIpf`xU0!t:&-ı&W3waԄ\LÛvnK_<8@ncGC@HLiy|r;ӲFP}bO j HTn$X0I] Ō2`zɑWĂVƁ--F6 *2zsM2%~ۖkbjA\e^oYF&Iۣ(3m83>;qXD%b=[Rmxq1#{ & ;ȁmtV*4D rd '/E6>~ {t{$k1Q \ jZ4KTXr|p+`UXB Drk@?]}[HÍ!]_:㟆h[cy=~[I'vS_I3FF׭i< #=TR+FD'؍g_J~M?~akqJ]M3OO(+ƌ5)ښx{vyU!(ݡ79>T%[~_f?Be{p; ~yWW\?Vw_fvxgmwmr kŀ?ܛAI!pX,$J(YF-!.`Fv6W0W8% 6*SZͭ~ֵfc5[>"ʉfeIJ3Es~:q0JJr(DnmuQM/Gr_rg."6y%oݐb_>xU3l4L(6I-1O .Xo \_`d:D(Ʃ4R#J513:NK @K GXC dgqlg5%ؿl\;]yxdzf'pI,[S{{_ߨWl ba`L- 53ec~ܹan 3Z^Nqe6aDyuPh ;YůFMNwfLMro-+ T~u{?o7|0z4/>75zOJ J EONn1^btJRjY5ǯ{y=8[T+A rfqP٠E~29=0lo `cAp(( N q0Xlʀn˕)d_2<~ñU.vvV\Y =Z(ʸ#zb3Wv빎?zr_'ӫޏGag^&OM0~r*L;3|Kx%FnOca6Ȝ\c;[W?Xbfە$v]d/?d {_\A'{уD1׈G8U," HDΥ̱4;޼g3{]~Bx:jhr$Ƌ^|+㻄 Z+dxg`]Pve{`? כ fݶ@Aד2I8 „Si0R#BI1O4A5\1 jM`tʀH2W.~vv\6ޗhi%8&MN.QP1Wh"j (E1KFy60NsHph.IxD{h%9`ɩLQW3$S+`LMY秂r2!hGϧJ6",FVDi~w!5l 9"v|)? {st^`&2dAxȤ IeLi !h[BNXac8T$FC}0a pBIBcZc1gIT(8@c$,q @@HG[@BْQQX @NuSˎ L6(H#7lQoDQI' 9gak2/riQ=و 88'̭~.>^gk/]Ml<",(/[;>ȓ]{dk C$!9M612u 姭jUzwiY"jKv"p u lŜbjSz]QZ@vN!Ќ,y&5,6fѐ2ĕ޸8 sG|YKh>8\^m3R/ͿĂ/ŵ2& N&(nSCYP@v$/+}J`3'&b㲰j&Uy0{vf>(vr/[-!FuZ檐X⪇Hb 1)$0!1L? ZG} ZْSƥ8]^A:s"E(F G@#G{x:y~/u#p궒!LX/X%sR!k۫!>[0>O@κEdm%2ͪ#{u;غ.266U)FJPYζWpNQ'__T;6Z<'h1 EJ_Yr 1#lnTM mwC, #d[ɌD*Ϡ3R|;!NqՅ= %Hu83Kq[Eq;q㍡vf:ӉN-{9gQVtIp)))`~_:p KSi93\\C)7Gsq ?xstіs;=ȇ;jğQA>D ׹@.)U>#|l<;9+~.󡌫R4gχ;%~^\4H(J.aYAK.β f/YV /[ޝao =QhVS;yQb7BtLV~n<\~yf2md`]~Y"۹.qվ;>~YUVBG%Y_l7خzw2"Y$Bg4A)6<6K0m*h!h~/%~~Q!/_Qm >>v؃/Qq8Ll`P/4̢>T^j5C6_Xn^76c624ϠBqb@p iq*MN{ y̌)

+V 3&s+!':jeSƨU0"H!$@$m[\ytTΙs=~V2+;[h/9G->-o/-9goqw|y"O_~ ? [cvZ$Rh9;}Vyr"A%yyhtxgnkYh%+X8m z7p+/,&hUÍ.ۮqHJxtmD$3U@rۥ8uS k#\s !J +$B~ H I"D"NNAJ)Yh)2ɓa ~>_+44Bji+M1` Ai 9KcQucD t|_ :Y6uؾ:r%tӅ .%0ҜTFb18)`Ƥ ,ɆSIM¡JXp[ц em,9|*=f,? Njm@((LbsI9MrA~G|=c!qz!:ړCrjW#PYw6RȖ!FAX%)+6Jk`"$kBXBيIH ?2 dH 3D ~H !iA<n9)FeM @h#<GrQ\Ї0H 0Ǘ-$ `KrsϢH)o;ϻe&6l|RVI!-ڐDQHB(n6>G(%1634JYn\ \%R$iT$Yn--&f>ұBPMɎݦ1'36+y^Y|gMq xNW1gj^*p۬"R-Vb\ЦҜPҗfGrmXY5v5cTGm0bH;sݪwDk n. 52"|- |sZCZb<~-u:Q.`ȫ뮤IH⟚a bԆaB1K"2DHgZmëH|dnknReS.$O2ٺԋ( 4,2cnt7nHE)7\by { C%gIIn3 ~㵪-uwQ<x9}5kQSJs KQD]i{VatXAް h껏vph\_*-sT.FAassk;n!h)_*FyǧbXLP1'#K!Z[ބ؛p{boRkdR\MTi]&O'mNIb$I)=eYF ZӉZk:PIϻj.$xAo &hЛ-KtD1 TH\ r,4BZC d.c[:tFt0l JA7S)RN5ItT kǒ]',OD!VЌJ9Ar \*f^XRObS#E.A>2zp cTNVڽZIT]Y _PlnVc&` qA`4QΙDբ|fl.Pe4S\eW"J+t-_p09:廙zO`πC/s_]M]\^zWa3{ZA_5# 3*DN&N}b$e)G.~pa5:PRm#8e}uĔ,;8Mӧ, J[mҫ'i;V߳߭?*=0/e;|N>|gHM?PBw}6_O]xwҤ3!O;|_wӻmwW04 ږ< u~{VI()5]X9r?h){:HWr G8Rn#'"84J _ ӕyTK+i x&HMYQ0IZa3KMp\J-uM3ἡȸQ55ʚ;vecPd&WO}3Y~Uwmg2i$m5#Q)P1[lA k&./.._෕Q ݻrWW>Z4jwe`ᦂRRM'*\~jO-8s >޵ d%أWa8sƦ 12g i%G|y L&R A{ qu3oBPY&c tμ7ጙM7%N),rPFer3ZքPk:$7JPQ V5Qh8*ls#:ZШz TU5 * \B)ͣXb\8(_^mЭS)V S*:ӱU-*S]YC@ A+PҺӜHFҳjPD\qB)hh\F>ɬ$ R$:1BYJ=3.ON)Ƕ_ 3feZS*zlCy'9}ŸBb+^ϫ~MQI[ŶP="2Oog6\ 2A4G'@擙+XMA]+?I癘q{SseJOe"Rf3h$ɹe_=ߴ4@M1OAb&}*Ъ n]C东T-c'o z&ɛ,WˠK48es3Jx2Ul U(p~FC^:<NGGÇ]F-w &#MAeDM(0Oy=(,J̓^9$7n ;{z+b%b]Հ1cCx~i@2. E0Dqt78adRtIc:f93-yiEhP4ˋ 3Eu) 8ID|$C1PU_n17јFl2.&q [6!_ZM>5UO3) 3OB,h9]?VK ʻEBYͺ)pUW^]!0bwjp=P}v~np\pw?ٹـB)c)V{7Zs$?ዻp/ aR ߺ]uf()4ZWdWI1} yCV*vKћk83lѿ{P'ҎDV)Auv~]td:՛RHfʷxKhD{ѥҜ)/U5m(<*߿&Zhk]j`]jbR^˷xKЊUC^X ߽ל7Նw euO"zٺ[N) ]`&ū%,#j%Oo1~wi8h;7u7~ZTE9d_74Kr'ư<+DyeM" @wy A]5cBϗBdx%#?&`+913D::<:tQP:I^&¦i $3>#ı)a?13?M G3lţ/=D=(A(-wt% +>rŤsImY( fRbwΕ=J|(Uv1wQnÁ9ތ.V~b'?S4;ŖXwSwX"3 'J.57K=#Sj‹ZHusZPr]PSp6h$=u>¥t uћWpunb IJJ2 R[c>w LKtf8J .XJ`6Ck#x.;J"PeD]rj3\|r fo!SXf;jc6}._ S7P'ɨMCn8:$g\Ҭ6 ],O FW5;u 6f\0v1W k'd%!RͲeͿHkB(1WƋ3u%kt7 "xKCVĵ3,ZK4/+/qZ(.J ”% kx J9t y 8I7ܪ5J#S,6+Aq.x}(hgCDN{ WNB c]@maB{&3$tߦaVif+O|ʕ*-ߦݙ_OtzdIf|%P}+ACw i|^| h;K*hm 0z`bύ9ȵ'{> 0$ciyxf֭Jv=}İ+w,[}lφ-z^NA1G4k,pPU<ڻ~%D]f(kddӽ󲚌عAukQ>c3pgbHl6OXL3hIhyLg[fu3L_~=ْ)λnCH>,=/̭o F_[tC3'zy6"YRҵnH͚5Q[pms?l;ogh@N1' v 4Sw "=^rx e+YL!%.,Df%&I)ĸひ- }j;^U`2.aņ9gwem|LZ}1k+A\ذu@j1#Q IqsIJMf)Ң#YwNNlwTF=Fe)LHbd#ل5|T}ּZflE;V F vGpN nG)x´Z'u6>@IN1&mI"%W{ZYCazO]EWS'KpcO'C(,Zߓ8dץNV_T(q(㞁7ɱ•ΝkVfVKx+T&0>SGX+ϭ/LoLI0ম,FN߶ e-+Q_Q:O3BkP0$@[L3%2NbX ЏpMFR9WcJ+twKko<4I)fI5_Nkvg&nk%uf9)G$g}\=V^Mч5ho%9S. ?| wޅ(:,߹2At)jx? pIXU50AZ 5v| U2*8 I닟/vOؾ/$k,ZUo?603:! 2R$;p&ΰ?sr]Єz (AH\`DThA֪ 59`uD` qK"3KbASF1֒idU3m.U09,fb'U6J|yH֌/ϻ/>ያ}1EY^L^<Ϝ<0JwTwe2) , 3o]00_x0u( >0.i]PƑh*( ;Eabۨ1D%k(w>u%Θ x{lK,[q5HFmQ{i0~MuK-rDn<ŲӍvy EfP¦Oґbi ÊF[$SD_8oa-UݹN4+q~~oAt^vcwDЈ" \`QkA?)'1ŘրF1*N}y,DpثPI2t+ noGf y%BaJ1J܏3+NwJ6UrE_p@+vQofA??Կ@I%9f(̛%4U&*eEr`x//Yuƪgwvl6Wi"Kg&Kzhv'eΏHڔ2N)Frtu,BG)I5JTHjIDd˒E.i%=#b aW0Bac`I, D֩J'1+gIl7ҟ"eiMO,|8QeTmlc_H-o:aT.89 !BʸpX__8hA~LNNl 9ߝae_ Ƅ0g'W!c \*[O)JmյSqN/S=k`~ߙ4cj$$lš) wq{'aV*$(Q(QxQtR:Wa.  z>?p| E5}<R};TRWm穥``e["r!FxiJk\Μ>CsB(N ~E7I³PK0F񟨴J Ww1 N2pwR u 3šp-"v~A-(Q_lބhF߻?Of'ҕoM71z83?Zߘ\[O N*֛8SnaXx=gk);X[-a #A*:9bQZ$ kqv;a~gAi7&Rt. ֈ!)ɨV1֧D[I0* FF(Ju(naͩ"+$QNLidP[e0hªķ ҂~rRr%>5`:YF|}+3F5hJ cWԨl Bf f+[R !6Uݙ>84B2Zh/4@SSw^* O]ܴx܎S2O%uwK7@wxdս0 ̐H3}& +ofMrXp_׳ Wix 0׷`h"" jt~ p4JʷwWW.^d>w*Ʈjסh FMt\xm\1MFlBH,Lm*P5~u:tR},Z^+^;VaK FK~v}{FR˖l-SE9}PQB@:"@K5*8L4g%&<6TƘsèU,l^vꁀQ^Fg𛮈ЧOWRq/Rq\MWp8*CWa-ʨFnLQyZfDNWhQtOAH{$27"s\ pv E2`*yTR!AEK8t"%gi,/Ψ"Zl<&ÈnBK0Z)eN%R$^K Z Űԑ /4A ‰SN5!P-{[b~gai7Frg pL[ 8`A hHs"x%)Ej ȁXn#ƜJ rL.Bg,=ʖ(L"hȤ)!@6|G˶8)7a{ܴ6cۈ$CK+iFkP\df]0}fJMk^0w|VUnrjV9JxS,42$,N0ZsV餞&"+R\ɶM'ܱ07UcxU5A]V 510w^st.W'L`[rN C Ƈ$FV} n71ږXsu۴6CوB:ivCmd?Dp>^}{*N_ݥԋS}FR44wZ=e`gZQ ]A :kA9z,rM;rN"%\׳ *m&ϕA㊗koH, b` b;IRqi'DQr Eho xV5ߗ%cmEGߗS'/pNG'^jBWNX "8raCpcSK|%'l?^y_U}Z;+emZ-o[zhMb3HUxNqMxK*Q|%g[q;/yFe_|srZ)s8K"/hx_vm/_4't4'+o (_nPȧt; "Y˳z1gW#7Fݾlz MB"L2NŠ@0(8rѠA!Qr aXcy7`D>k udn>$ ;%zH3L+H3̰%;^9MGLNPg"*3t֮rL(~% {/_@úuR͵/WpU9hLḧb2O89RC8F4>ur K0Ds*sKT,g 3x<\;N@JsAHt1a6ɀ ,\qGs`YRB2:Ǐr{t,$awkQBNt8. ʮQQOܺS8$\ &G 裕V rε>G ոzSI餤Ɂ*rpV(cvfbX%~ϗl^G{P\Ofןφn8 g \Aqn` "r.jH4slm^.:n25gwEM-wlrL]>vX$*.|0{((^BEzumE].*(3C'Nk*0_)ES Ԥ6J%:&鷓:*w% 7\>S+bh;ÇbWl=GfMYy$l0H"Oޫjd'WՌ IؠۓO~i;@i[N ?6%wȟlջzyzP=Lpbgm >Wb43Ԁ_}p5pwa<.sKkd5r `:: L$J<WTXʨ4Igq@$l* 0TqG1 g*AFBQ (pELx03. 2<#1JnɁR$DEs\")av5 5GSbq;ĘW.䪳H`5Ԅ#`Z4A \G3KB*X-%P^R01( |3qkSC]]-d k3۩̚>zl.T1 s,R<-A ЁHN!h$B,,MmT '.Mtǹ^)Q{%* JM;/MKpN}9C*r Tj6+,}C>:|]UuQ*/Yp WKuT?9BL83~::&q7S^?d{CU{]?4p\(-KE팤 KQ4 \fH0EJ (LNDNy\<'<ͮ[m^%I=[r;DXrOq64?-,>K;[|/oo*_y KI](>J.{܈|$T(O:j9!zwlY2[# rSM}H5hpxҋ`e=F0:Нf3:~ 6n*Bnjg,hbDD%먐Sp`٩cOc&EFM1 '2|W A>8buB3YM1"-5ha|:| Z,|J&q'm 0v%4V$ 11McP(sCV&u?c=[S3I+zPnl\wli 4  A%tQkzjkQ";sNh K eFj"F߶  Qk75OĂR@ bP\0R 4i!;LdrPvb^p>.}pJ^DkYcF&-jlHm\J'*%KҢ4ME>PE<(4'ЮYYP͘fC?$pFrDo~8 5rЍ͇b{9T1٫.w!5Y"V~wƹ0BO'wyP N'oo>]~NVYɞ`<~֣Y/ƷCWF*1bzhB\}`\0庥3'nP.;C!8"pU<0vρfPVO]CȰ޺|?]jN.4]?]'*F:3oĵa+Tp޺J2Ah=/;H' *"ZGz8Ta+ԔU]lh*jmuU'ķVSf;VXWcrxj=:bTn¨fj/7\4)RۥՕхj bפ `D=<5ZmPp >VUbIi-D NǝwfJ!%Rdl\ znkHK#*LK*_L`BJ@DCAY ICg!I@zCjY fj+ZRvNr9 R>m:zv}?cc/NټOI5wɌ|r> ?pK\˙ʽ7)ekknF=ȸ_T-I6qqaba0THJl*484HC̍2ĉ}Fw|M|o `Ae^n[)JɷxjBQ*3"0CtTE@J]ՇS}ޫgF^B^w`C6A X vA Woَ:L~<ݼ Xm|œэ{![fqjkFU nw3MIrir)ZV ْeSfq(\zDk1 Ŭp2?'k,%,F` U,謐BKqv"’zwӋ^IkIj^!@ Pd;RQN =45\r<xP# 6%7T3*?x'‹Wuk y84^ DxF)+7Aj gU {oL_^JiU8+ߓn{; #x=yӋ ўky!थOb]RvѠ/JÌB2ӂIGeBԗFaZ.1}2%pvl q)bQ{eIRȆ⼒ņy̎S t͎IL1"7db!Jl҄GJ3XaUjA6{,.߱Z䂃2޽\ 9(9j+h)RWz#Yγs߉JK2L2#T%3J_ yC -)Ҵ ug`;3Eɞ!Ň"z OL2ԝ]1ȍKZ%v%qZ9ժ2kNV$B4qWl>?HO}nDbpPPQƌw^}{[crmKy5bmO1=A"%kV+qK)`jnQ"X*0_P(JanԂk]? qN9R%JHjCd;Vcweվ T zy vLl{9;y`)je,+2I,vsoKPA/vW;˘f-8mPVbF]]btYC+%) uh'Sd/¤\ӊZ-nJZ]l}Laح^"l-9ͱJ]9Bw€ MtClycdmL9mi՘UZ#sݒ Pܻ5/C]N< Sџ{A_笑֢'" 1Lk') Dp\3EZ43gC覯r|M1="zZ!D"swWe]VC`)ƚ~yfdX; j`|6six>T| ;+e:]{k@.N_݁;pCn w$ќ 1MH!R;`ɹDSm.R/Ʊ<\?/Exߐx3 EesoFg?l<6#X]a+Cu诰WXҜZ4Վ\1 ÒPp2KY&G2$Kujs/ҏؗlUN/W,,hO+Vr72~UnӏLf쑡^bCPl ʆ.2@g_y+v;WYf[WeYn|O/oC:s{E|R +6L(a9x16W!oV?+=0[|kF7AD$' "5x3w]wqg\x43%b?ލනC+J?`uйFM:+"?ǧ1B%Q.."aڭZsc#I6-@)%ZKouN0T-iWcx|Yݏfׯlx;܏B8Vo}[sv{7ulv,7'?e'] ,:/_zya_.R,ŗAX  E9hRRpeBEB/'8,R>y܍Mz|>n-L2Nۡ4uX=hT.v7p_x<tr/o^Z}M,6P8|Y@F HF\9Uf I.QZDh-!%z-Z[*xڜaTTCN`[Y"&:;IXjB ףUA$:ו(%R(QP&m]3(RR՞孀(EL!yuuQ*c/"'Vƨ@]Z%VՊd !iG RA=9*҂9\d<ԤVku9u-3F- JkQt-"9Mu|P$sR?rrv5LLGf,`ĕ25+D L.T%H =Wzd<`t$OPkh,Cͼ T!u.?3hB蹳۔W@D림v3Lw*~}(>G#IZ,ON1$ClOVx?9:.aWNP3䝕!Ђ!;OL&֋MWu K( |x9-#88׃⃾e?;x2B%m: N?:ۇ(\bڂAǿ~@(#@3-8Wy K-mT=^pBU>[NizNƃ16wr#ޚ'҇8p$Gt6 mhHPEH$y{#ht~0:F5",N^r(=+BTដC䐊`ZAӊUDAEkv}iBJvcyg {<r*y# wGZȎX}4"xHVΒ.{{ =o5XFƈهvuAYGӭh?*jO>=}F9J1HQ2r!ټ@Kĵ+]VwЂfD:C1F= P=\{ AVztxBZ(:,Qz!jCE&kzS Qެ.FrsG%^1 ~ށLFxt ES<+:Cab0t$3?fT ̈d>\ ǟ-G6M KIRPkW/O?Hc[1+dCCeǞ1xoxz[TQr[pV1*0ШiFlN.<:[RHj+3=>i/o-8؎Umc PhjjIooi !9k&4J%&G+JѮoWWgnjL0g7t20dz3N/c}dw36 {5LݻLW WxCvyvc#+53̰Eq K*}#J7q7EC R]yR@Y^}΁f0"^+Eۅ˵5GT)-}Q .꤫b*;NBWC tU8]Hթ@ORuUgsÚDu#J.j Ul E4DBz}՚rm:qu-åv^iK^vggZ*?LŞTj3:LJFw%J(;= EI5I4n]N96ns9Ywgf-caUܺe90s +ϭ+2fkd#˗1.9.-wWUEW͘S`Yo|x뗏#\+Y>o} GS~|i~0}I/޼mphZ)7 ,KyThqɱZk8 #];5izUFzogxaZ?t쿅Ӌ-_gF2,>& Pb'&VΌm2+yJO8bQd2ļo6lqL]ΕVe'GMyV63$Rd՜jde3cBNʈh;N6M?vخ#YcWaU:$R4E+R7`W`fUKVɧ?NdC^z ԗ엇TEraњ(Py" LĠ猉$NRK5ծ9zVo஋u;@xWϷXƒ2LE]R"8jP:7- FOm~;ˌ ako|:l+&nqm`3WVgy0N ?:fD]KvxKU^!CM>WZ!\PvA]4Çl=y9՞OZ%49z]|zz5AᶀEyI}"Wud*b$Xx)i1;t|3w^+c|b6ᒊK',AFQ*0$_y#%ErCGџ)] F2=Wv$ZǙSUQ=w ar^\r2CV50y)%vFUO:HF_O6F+fȹZKE.;YE 1| gc p u;szNL=Q kYe7Pbbkl:^ Tn4; {9øv^$#v78x@Ǭކ6sp s䛂4pG /%N vτ8nSʳ\Kܙ3TT;PH쥢w-ðg7BYJg3n8`KG5&82 רL!^ɅP\6pńߵNSKQ14& kw}bW+.YfqAfJ%MrBR>75H0Po4[;G+1_7(%33sc͌oăW37#hCo-ݛ0۱u&)yX[$Łpq*\ }¸"d4KSFq<Ŕ)a Q¥Ahi[﷣)lc<Q}!6:!`l^u</?x k4oXdΛY~<1sUsZ"BiBnD*̃:u3xxJa{%_lѯex52mkHKii ˙;ϖO+ NKKP ̯cRuIq.o=^[WUxW "d+ GLi9%T: HR:ak'S8״"D5D!63,DR!ZXZZĥMs*I3 Bm΍w#Q.eQfU 7h ƹŏ,O%Z6jT)U'W1,myǩUkG1Uxr*>ki{,DŒӧI)G{7>qfIjj~I1˥/:Ȋ|t;Whw_m/#k*[L.Lk*Ͼެx^V c$~U8L}gL@f›ۮTl 8>U3(Ug F-iʇAa܍:T:+m{q rIiO "KMhR-lيxQY:va M1zoaGνy-zbSMnP?L[z?Qh;%Ct6vzz˖FCj_IHub )6"jI`8P#'=:O "܍ se@ĻFz:HM;|;;dHV!tw(G]98]ࢱVCPn/6;}\2(X#ICAL@KR7[19~37w9՜w&_mJ&_p.Xɂw7JBM\WsAH.~21tpIZ?$隳x»]Ų~q'~qh짓|⋿\~?-c}1"uJif1YƜtB)G J:3Ce{'P/]|bXjr R4aqMuc@0ҥNJ/)rRsj=2#WؔHT f.&ńFLYB}z#0RFiyC@7R+-6HL߆2Ђ v@2*oW TJ  矿=ʞJ ?2ӏ?-%ҫ?_8wS7= $_~~Ogt||O]8!v;3qdx3ψd,[S%/?={g %GX§\,*%E%pE & e­7"9q%0Iɤ vوP npS6xJ{f3rBT)A`"+mDihF$ *EY( @VVhh590VPZϼOI9չRXXr,e!aی0MҀ~yJW4if6'<0UCG-E l2Vd S.`Ei*0=|$A9TP cي.KƘQZa[LKJtIc3Nh%8u( '"v#$@TTd^s! Ұ3 " ˃KiL;@cnR!+)F )|wu / /7AjK*2G-eL)!X|^B nXjH֊p$ii5D\!P@aJ9Bdz2cуZ[pGͺY}t B뇇Һ`oI\4r KYVuÏ\cơ@}'w3X]Zs}c\nCT!یɴ֍kāX ~9P91Y%GJ@ʭfZ Ej[WqVJ@ b$YYkO(N2$ƄN" OV4#DRY AnE&00@#<'*Z-DLr y3upU9ޤheohmQ'9ai9 KN8o8X|4xʻLLf֝FX.s#i.=Hyb3B2agzt()-XF{adU6r=r"6vXޒcmlyɕaTƦ/0=TS!?icu%)ȥu^=H, + ^iAYH L_G@ *͔KNQ f2*g)KU !̽QBLJ\6N&3$WXKFlmF ^3//?0Bۻeu4;'o߼b|GQF穾>ʢ>5/<sl牁4/gU6+-a'3#x}U`3FOJ ȈjJe+4YjaY.@hCfxȀK\iN( g!b_9QeETol?E]}'Pr:7ݕqtK,#)N&bOD8ZZYP@T\pj"Q10 >(/iE A)>tmKXaB[؏bHN$W >z{QlrYN‡b{:)J \W ·HB2^J}DBD*$`AJZX8WA%*Y^VcjqY HɾZ(*4>lvW Zڃ. iYSUɮ.wie'QI&1/ťKU@ti-vzT4q#ve:,ZE@eshY'& `dw*x)_nT. {Tw[ޠuՁh]:-#4bosko5/vf뤸_ M삟gdN͸/rd^>>]'2ŋ?8}oVG}nt1x^[k-@v?MY\T JS'$wտkƭ̨ʌrV&JwLp[9zF"`!:amu\B:t+Q[T"i1њT7n63ұa%.f%:"? ?- x,~$nz#][sF+,f+~Q\7:'u}8qpLxQHʎ?3D I@pp#8!B nxԭTETk-"zn֍4.𶢐 m(5ȺBe)NAcSZ&erFeL;oT&VceJxB`mr7*ލRTnR A|ǞEV6zmKJFY' lpS؅QۨjX;mT8\B6!BڕՊF@޳bwgLҌ|@yܼI~ق-EO}e^ZeSjZ {kSdf嵱-(e։OG){mV}Ǣy$.)HZ*"EEoȻgV ^B2l- av]BVΉ2 yCS@kPI3c V: Qgx Xl7*:ƀ4@!V( }t#Sh 0 CSș R\uDZw%D' cϥߥ`QywW|W#LX$jr$LX r p\tO@/l0iϛS?hF;@W밾9G3ͯnn:Q7#D?yx5'lͧ?kucv=BV2~Y֝(CGb߾tZMnfUa5lvEU^֨Š;* X4WLIOpl8 +JtcFE 1y8,u2'oQWN2!]\NUP,0zmHV{3*+'gxnS5Jh GUaa&Ym ҇xw:`6BsE?8|g:[K [ӭٖ?vأ9Rˮؚ&sy_"M}_e|92#LeN|J-/˻biΰ5z9fa%jNji2/h+N~d k6?FFaڼFN8G ;.:8P gC:9 S|Wk_վpᴯ82\Nջ1Ъw.ѭ=+au^yRSqji:4I?лTC(0E?ܫSd~2p9*F ΰ5 ֪98Vsԡ!^:e}w Y:n2pQoTnaZnM[r*SZ1:_%0 NnO k°&[†,Y?rtGKAr@7P] 3 I0 !0XBq+PM+J( #ٔ=!P'hEѺ\P}4FL" rg}"4B32@p/@ܪeCI^vYn`B<% şIG23l/f-/> " $dN3l@b@z0RNL%?UAX҅_NbJ/\^ο+b@`0caHd _ &j_H^"dp9%3`pz Yy[f W,g78&lalAP ̱?C5>/uʸ5k wdnܳuAPoVFDd|,U|m(\F ۺ8oRZݗbnL<#s* 糲G*'I"$Z_#DJR3;:ыs,'vb>5&X2{$oY D>u} @`Zc? $ CbA7EKn\D~tE}TnE$I: hLĻ{KZ,Ig}AwP5s9[e+-͘?vDy 7/k!pwfݩ8dS$Gi=|_Nُ X;~IS8Oz#;MSG\W|o E[Ԍ(G7 @!޺=~92FkFHJZBl@!Zy϶nlyD" YAC-̞n hs8}[B,sm6J$}r@؞cH|UY ۴VA,/M_̇r}8XGsӳRo7YA۬|gO0tz.Ѩp]8 4*/.rMʄ؝L<ˤy#֛t7=AnL T@ȟD+S뙑 Mbyi&8{xpdҩMqI*%("H4fxS]rs|1D/>Bd /<|*24 B50TJ)09 TCX܍޿yh_XvW/G,V&i+6@hZfgzin Fj}AкX%,Ph T?Տ@]HIPR )$L9EL(JSB!0b)4 迆w _ף7-vCףAio}ן~IuZ@JaNo luoo4㡢ϣ2{?a .uo,c욯Ǿ!J+<$Ԩb%] @+dIeȈe*Z)  Y &R+A 5RǡH#R f!HC$1%'HK#$c3 aZ// hSG-9h:DNb;TAނ ոVJ)U&AYJjTk!GT^0N1 0b kSa&pldn~o'Z7w>1lɵZm\sb]ewp 6 !B(@a DF 4bqbq ؚ[Fкrĩ5T4Zbe$gzGCL@*B/=ețn9բhmxusSe"$$2 >Xi ?Z.Ҵյ6TӖjfr|ഽ/ Kf8S&dwJjr;7XLS qPJ<`h]N/_m\0%9*O8"ezθi߰ص*m6e- ͽZNү&].W^zophU&)iRoa8R"a̺ҶM8M _#k@@*ƙAftdL'`:B莬qw7WO%,CZՎ4^O Z\9qpc@- 5yi㵖Ԓ-ݣJJ| 4/qbC׬  mhEG+K$ ѵFxy>}~7 Z=rS}Y,UˀGq,h2}RyW )ldK-' fAk4? oSŀ=7${o3#Cj i! O d ‚B uj2AYy-I5C&\pN2梳Y`\C@ i@@„!2 Q(p(im-\U VsO!K-j{;' qQ"\hi9qy$h'(j5g+=쬜 =9A8F͔4 Ljs`:YMȘ$L"2Lȵ; !+$5ц-Q"NzSJ5D-H(C2(Qya%R,AXAp(B`+-HaV,\>_ARț8"79_$G=E udFМpLVU좰wuC a:;YZ7.8npFe|nK.<d]ܓlD(5\w2XQ)Kl@ "җB!կ0-=4KM( VQw"(-Eq<vhH少b8ji1k9\o8#r%نR((΁pꐋNcЇ#:DNhB/{!( #;Q C)\XJ R!@K ;p˥kx1,`G7Dl& F S8Q)UgOcAoK͜p)ZOO.'% k^8#'Ph\ۋSE2@Y4c1x[B1hXWkMb#r<#@"J@*s|V12Mh3 ) ױP~򛶝8=GC~/Wv}9Au>iu磻I^Dדpx*WX q6wN=hsCbƚ[ҍjٝX|M{ڑvg҇s;\V&8$rPطzp\"(/0ډJ~Tfe/y^ Zf{w N#ns5~>ӇFoy5כᄍnj4*HTOO E/3S N Ha aP(TP1v;6ΡLˍYǘG&@AyQ 4"dLe T Eb\BEQj2]̴-$&< 2 &S!"bP9IN Ga1Z i}IAExȕ9x"`,(hc/q:̔zd:oG}h;kmHKNf_a,x}8^}HBRS=ȡ4g8PaX4jp91. 5Zhƒ` Urִφ0e,Y46l|^0=FU Xو"JD;"t2zp>(j9a3"= SZ8Lk:~z6vz)"ۗlvHE㥷CodzQj&* :` !# l#*%(Ƃ0M=t9 %r/Gѥ%X2.1/B 8p!@Hu͚nk/nԕ| 7w.mF2FLa@I@?j\!,JhPJ,s>FK`p K G iROXOfǟ?~ XH)?'?9ɠ Cӯxsw6$ l9Q< ?_v8t`~ϕcgNVLJ8.><9KT*$r86ُ ֜x`3J`/0VWx!BRl<|"'`:"Pz_yGtŌ PyL57>x`; M"0Mdz=ramfa!M߳h_x&V_X:yQ8G4<c됴iΑ|i 7W c"߀Q3$A.*#JÜ<",:Hk갺Ȼ:ATϢ5fmTR.YMRR8/n-CepB`pS00'+F.zb?YASJ'\-%~2.ooWf iPV FJxdM>^M!e e)hlZ#M褘|x33؛K88\̮7[)ӻ!`kJ1քTm[Srв ՠO1(f\#݃KrAEll"F]p&^h܅Ҟ +,E\h%(l ZE&HJMu `r ?Q{a|^E=gǜ ?`#M& ]u\6}c0`'lwQ= O#˳˺bӄF0,d, )dHhˌk$[Rޘ{KDWaKGLcF*ZW.\m%dj4W$RmV32w`$[3%~+D!j|\ 5Y6!ܚP(HMٷ{ a:>q2ҟcQ(Юۊͥz,Pbs饚,8IXԷx4|ץ5KSDk[ő-ݛ~Jn!ں GcŢ^Gc|_D3^\DFQBQK[,'Sb h{P*+=S^(Łcd*GQ˷k&ݷqMfon~mr*p!qNi+> 8'.1s=#oW$D@"$A!RE^xivGTϟQ-5Za3WӜvRB\e$IM*a0}tw=Dž4,f ;[OFt@@~3J`O:Rq~sM;g`9QRsԧ5Wbefٳ%H>qq'岻a[kP0Y>:岻dֈHԣRl=px*}hJ^Vᐨ*u ^UopoP|@k(H!8-aKJ)mtUXJ߿P:iB;'BۺV)}U\W&mvYn;Uy; 9òJ`h#\$"}57׻d^.PQK}V V¸%ZM)<SA79m!V׺ȖO<8ȪUpUzxlcC[ y-Otέ-BYc}T5zթO.rut ,'ЫJh٫5`~={uWdrWuզ0u(&3^,0 {ltoTD,RCmp^R"0(c,7r$Y_z*ZEʧy E6ܡ@A82ƭD3ežz$:G"!XWf$TES[1s !`ș0jbtAJ%Wyj.!š; gG5[gIФUv ypY,M2wS?+߄h.>Ҽ-f2dPhU44c7ܑ-]wֽI:ܫ-?wޘlZ(lUg(B0aknaXE7Ä.)!bAwq)S`S`asaX!v:CCؖGf؈De~YN"J[xF_/gBgJnLƽIHEhȳDjfkܫ 8>T!DIm.e0aSD1 P)Z LD4DWfBMĂ3>5%`O]kZL5m2C DZR ;خ&Yj&YjV[1N#)B|-rrV%3 qDyZ;cPzy[uB~زP_H5I;fg WuupIToSd 'u1 B) Doa2_&k_gR9>f009gED8p(=Pʐ HwP /ÉOg㛖`9/VOĖu=%KtlVq/*{p|uǗLT/@P;kq cwKZlO%LPx\#plsLO/]\S٠T !8 c%Ee{{&d}ZڥTkӣM7cBx'FJ`gpZT,˼,c 3&4a,rZeVTTvNjzݩJ yXM&V\mѬ ?:*58%9jieppU n~K=xq[?:_>ڡ&/8&H~N+aS "҂8h*OSHhLU&&WTBV=1h%Za]yʩ;p=z7aV|gL]+/ UHwѣnnB0-Ghs}ʚq >Dun\Մ5e#rYkwuZW$~9o .lDчq@;Bp]dm\~ۀb0'ǰқ5@a$"*Sܠ2!$k_>~A̋B6ޏ.X jJ{9{*F.}OOʊ;U͚w-j 8#Ek u mJkh}KicNxJ=+iЇU=>:U;}u7PZ¸>Avtxa mS%IrLuюIR!h]T0IMxX/KaBz鐯֕Ņ 崟W|5"z߾)Մ%`~R,:1o*56JgN|hALΆ;3| >Wa{7"`eS > O 9] nn"cY~4 a(|MgiI҅;h^u^~Y`=,b?gXNb60xK\RReX9i# ELzݘ(?٣vkAigښ6_aeO._\-[::ɾ$ 7c+HҀ]W,РttWn+yZشuhuCBrb-{d8.4K8u DtbQź|艆Z:$+(PĨ gnN;X],<-\SlںEO4Ժ!!_Fϻ=K9hM N]K_xxNM y _[&);P9 C 2!N:5qM'0' qB_1"C0 ujǸq/6!NCN'pE!NWTqoQ2 CP&P-!NSPwq!N4Sʟ-krje|>;g/skxj Pe_h Ε,}u=BTtj{7UGs(_Z;Guy|=9Vͦ%5v!CmEj箝5,Jzzp/:&SJy苍Q9Z{AEM5$eP-8 #WD{Т?Ʉb*qccZDBXVdk"_$J^hsg&棛vńev̎ތ'7YmhVW۱u&)PrE7 {cx>x|(fO?cʘ<7q R˜,wff FIA X.?G~?i[wqvuqQo8Kf+ o.l&o7_/x`$"A8ErgÛ pgŚGnooq3BOwn|x2߄t ?&xx6/+ƻ]s^@Aw8"B)&s$T[rUDŽ,c`%q!*1'6Ofvy;N/ׯː.><<Ճa z LInmLVЌH\.(m* ӏ|*iN0ұl g$.MuƉ! TMRp5<\ijRI@Lc"WC7b1I057thb6N` CXV)$@.u.9cdJjt,)E')uo1 rCj-% LPJT ʄNTC9H:r 6voz5D9v[&p&O+SSTc,`!Y0qfL3řD1DPsID# t8I gH1A%)Tj40uVGj+ڷ:%aP%a apL@Z)tD T0p<6(,^xɢTQ$[=U,Qd(|w {.4 +0,>[ QJORw+.8Q/*[}ڻS %|x?(S.L3wb Tqm* *" 1Y*t KpӦ4i=) KL$ENjNmÈsi*KM3 9s88D$E6$Lb6$Ya$@p{N<]E$t5w˙]Q @Ǩ dF1+9V=?RWN+|E sX {)a SuI|qxW0 R,{ϾyC9;›kŶt=Zk=m#AU:വicܯGe7`~~I(MFԇq黸nHB8?&:?EY&$mvlcPxMSܙ[er:M|)Sغwfq|(cðQL<&[{b䬛p#X](Ԡ>mkQh[WV5P Jk쯝n兽"?OUo-eb y0d??۞f:_$4epBrG[㏏WYƓ Q[ ATa @<ܵjAPg.jS y֌vThw;: 9.6dO0~ef}֧v5sP@s$yE'lU!~NwiƺR$G%I:)Ϟq(J3FiCڕ!$jC;-RudHXQcQѢ)ı"ks!u͝8?`ɩ3l=^W㙭!8 D^*!}hRVy! Y7HdvDC*0KVqRch m5¥+t`p ӭ8u@Q´2Qp/VC:hq,bvuR,QT3-O-mzz͔h g!AIQgJ]s:͞h, 'HIYF1!0iTgPykDxaB1+WaDBN0 =o22M,7Tj45qE4JjsQJ)ߌ~[`Cܸq|PJF?wu)-cg195lJ!9F.g$#{R8F2W\ *@.ݬG.br.?ts J|n (Uhv[01 `j8zy`نh) Oآ%/6,{ 8-Re jZTKM"G%&XI\$ls ,o`fW^SCu5^;I +A`S%^rb_oVq_N'Q)E {:y: cP_CS>٬#MT niM*ڣ# ,5YgaF xAtOFL&f"L{Ϛ\%}2 ~D%5i=%M[`T{/tgYJ-Q&<%b@f|n6[t3$]wv¿6[Bh ~(e~/nF.?}.MA GxL<ՅDirf]a6Z2rNlb;'S!u|e%=+Q##!_FѝFJ.XX BD'UiZn)@V|"%SF=ɪ21:?uj=XS ‡5aX5#˶cKմ;,f۝RWIvCnH6>O90BT Qt!ҙlȦW7 FZC Rx-iVYcJuKx=imΑ]Hs>jfIid{ /\ZX($VpKp|hԸBn^VV`LڢfꠙuRmpl~G̚P]gK0zmL*,aʼnL%8u6+tP߫Kuz6rZlwj %ߺUJK4Oh\h*) ^*/Q^f4Z{þ {%tzߴBAjs^W{])Z\CU Qg^dZ^TmT"ct#5t|#W U>2d)ƙd1@/Bia;n7 *^<ߑp_A2lڃP)Òiplm(&c$2XqytY?3y:dq¤3t_rCliֻK~ս'c#Uw2RSDluqCUB%'DHe盅ɒ46f|_gID_)=5GmI?Vya7;وwEO{JC5r_ o>N~l9:&埴DŽ_'':9 N177-=R4^Dn LVsKYD IFh8_|>q?%6.rca܎Sk6>zآS3`7/aWa8]\P+P@\|l {uYwjWc+dvHfACBV΃\AfyE4g1EJ.9QgxX/ ̃EZ1deVkI6.<)c, }/s \x.SU!$MLr:QR5Rx 8g[ց|w`mРu|j~ؗ;<_ϛMҵU Zs֒847OOV1r繕is,)BnA57y"uEj$LIiR3&`J%I4oJ'P,h&"hI6hΨ{KHp14)f!Հs(}^\G\>n3(B$[ZX)5 O)A'B IB'3%<:Ddڒ:Q€+24A4)njFZu8([LO;1`5Oz *  /ɝ{]PC TF+:|p>g-9JRPpOk|=k2 Z&;Z1Fwc;ɗgB1\qHl\?d-'ȹ Pfxq3F^Z<p0QfSMM>fIiҝKA V)5]ĴWtpyLjpƀG%H l !z͕s8g^}؃xw:yw:ŝ3@Q9HDE}JIgoQ$10mP9 ]>\Er5_Tk`;5 0cqe1@ < \kq2*R0J֠PT˷W7UAH%D0FDU@k;o%cbrg(+ j:B3Frŝ`HAŇh)^?1y 9tt$ I(h!x.xryS >$ p 3K{!ըj)ő%dE##, Hl4'P)+yOE" ApF؂2Os5QhW\AxPs '!ʘF!VyJNEYUIUjT'1޴}1Ɍ Yr&R8M$IADGPd7"&i 襠(H=j"A5 ,U)`|2D*544Ir8"S*y¡w( ],AdI'EGeDAF7|.Q EE@Q(| ;EsG8:P[ix@GI f"{X"eBm#[RdopDg ;*yXl7h:r݇b'\ܤHDRr ]@)Ii1a%s)GpڹI%-@碕S-h`!P̳sW6R+hg˶|KÔԒw fq̪T8kNz|ߛbʋu^%w6 \y3ъJqhy!ǟTe*__D}8|>42HG"gڇS% *~pf֏̐#Urtr+3R?Qӣ^^?X+9HؗfvLh\ā*"eCp!;ZJ' |5Mw>&*pS5C: @JWVW˯3 (ݣbc+WR'N^IJ*bV"PIόR\@ L\ b(cqFx\bcu_TXݗ.ΆrŷV{yjQe\iE١ ~~JdtNAϗ 4?B0!6[dNeL_ )c\ ,f6P&'o]GaR91BٙUQ'JńX$)q@\SFӈZIZɔ + 5Q&P3(vʿU\c>ؙإ<M7ͬɲ}c_ӽ'[ʃծ 62(<~ifJnP"Or]e ģ(x<Lk"`&IcxBgts4OqRp>%(3PY"\t,ŐW"1``orQ=\~Rj}(KjE,Ȗi(aQ&0T)1q0%Z"iԳ$ѩbZR6}M$ yT)Y0[i|8M/͡cج4_s`nr(]vp0}L).MJ qMݻLe=ЅC^v]kQ^79WOV2ڑfQnݗ)x'PBʼՔ˔%0:Z^cpS[r B|-hs >'(-h|,Ԭ$ђDdI Sp]?,5ߍ~1qI8d F!ڏtQKxF@ A}b-]RG0GLX+ڼ#-z%Zq-00z=J=n Bro *5?_:^龸qy3k'e$5;^F/5=Q d^yɦ&ަ0mm[o:C8>şohcKڊ~n|0vnoa_=>Yy V;ᥘN-oKpw2^c tvW^u)c!כTήWSgl)w7x2>#- 69BN^dFm'%i4-~u=xkN ]ٶjs6"zFm=9GZXJ ȞZ2&on}n.}jpZ6:{`gH岍|(ȵyc ʘ׼1"  :Zhƅc)`c%5A\Rk%OgOmUO3cU (]kܕTÒn?8^P|ExkN5Lwx«TrUQy1K%IZ}6P2w*H?fxր:ܹףlY3P}zm0~]Rl +D!EN'xzLO NuVD'nS%Rjo:FxY)iP,:{k5_A-[IҴݨgږȑ(1qτc{wccg:zbeݡִ,ktqwF&TEBXERl*D&89&X99%Z5xC|OrqoWCz'rH , _~J,kqҌ'M6$6cDFdvP%ŃUV蜏3 Tp~cD JT~ *^(a v#O8nT>&Al]0IƏ.f-VpgB {f^b"/w^Vv^NN(e nQUq8pYógm=lsf]`C/4xIIXZ[Aa‚^}_"kdvZcj0*SBŖ҅fŒHɗOY?x;'N0w_snweUeVt.qgRV>2wtAg+B ǜ8_ФJ`LL**C1̳>xfeQ^fVF+DL ,x4R;Ir}Z%Jf\-KK+v,g,Ed縒fYв؍EPp\ըaVpm͗T=5i'Kω7NW?͛W4K^\ZD@n<<ȍcf)!dn_ &0s V ˜ y=q瓘E-e\vGZW k_ $SyٲXZ@XsL+%.LFKpּEpo08M^F_ qNl.-J(;%ۮyJ/NaOzA$]XJ5ڦrf\46SYd `ékHkQ5hYȳgmlM,CL"d}(!!1pZ͎l4hzM}ƪl.Ԫ UU'K;-/|[fgȽQͺ:fܨ!0p裰R(ehye^ɍ>/zsx<E&(ĺ&L "ȄZbx2yk*iYYU;΢R\ZqK%!hviFqBENBWԭI8I 20j[<&AR$@:1 C%05Z0öSTV+W\aULh5Ą{d+/*U! 3H±4hi)4^h&Dx4e P[n{C\fv4gPRfd},w-~w)OLJ,*!DKe_o\ۛ38[x574JYPwєN'77wB96h۟0r'1`!3!cdVJurr6L 7o \*m6J35 bV'ƤCP*u 2S%aHZOQ );ox(̋b)fub9rF6q6v|sۓ܏vV&ɗ۫;ɇOWӸiVJLicp1'd #!&\ƥL% 2KnoL2cH1Q-7a,ĵXOaѽ {BZE:OU/yꆴ5l0u8twTNgW]GlAVC׼c!\aŽZ|}|*۵Ev_Wשr_"Xrk{-z47εu}F<1ꐛѤ]_Eb;ɥO_nxa9|/=Z1() 9(Yr}1yxn2:WdTгXl1 ^tLwodzǍIδ'?\n[ehnIC޸)ZAu[n* C.m ɴ"0uҺmCC޸.)>5V ZJLZxilf4}Ho"+0I@]z)2ǐeUrʁ -S92Myƥ282)dw%@4\\j`$g a=5YӪ[d驢)+At֜FFV%ݝ)aǖ "Wy?ˋ\^d^I͋n\Nxfapz_Ĉ6KS뿓s9EZ_r>џMI_|/{@M7r_zڶSv_ J QDfG vE2Vsi+l=ӛǩZEyWBcڥN2ڀ׏Aq)G#Yƨ~b?~Ծ{:9oU-8DI"MPt`h%%_ax!_|L׊DofQ͂ !i/F -KU.+/3EuBGuz`Q=s0)ti]v/].0l˜b@ )-C$$Px o:ZGJ%w0 aú#xiq] 9yάO' "K<`ȌKN!pZ~aL|BAFnԯm]*3*0iJ~R0Ř:Z7'29tϰU$_`%w0@ք|IyԞ *֒fX7Vh 3ܞBI!C z4lTIB/fX U}j 8@r< k,F*U\C!]! +nqɎ*{̯I?щ q'4MT:d;*.&sxMm-VKzS@fsDn n$ {|g$c8߷lj4PO[TF0/owjy)oP%(4gCWAǥÀ&SQx("6[O/ ZmDQ[2!XHRvd!0M:]x#0,DzX>k{/;D6f)s'1\xQzGS<2o$,].š[pJdzh ӂp(hQ5&Prh* 3-)ZE0.`.7p/82wKn8rd"GF-EjM!C[*4M1gB`!fUhBtVKs1o}^Z**/ $іo7KO XOv?OtՈlWgoѭw<;A`>|s2gUylLޚcPz ֠:/ط^ ԣ)sGbYuyTN>^SN5:n8akI߿{כq6A~#6\q-$@iatPy=Է_޼!܄|XRߞu:A`?(fC\4 gem%eF9P, sih`㖜hDOՆާJ?%tq\՞̶FW|uZXRNJ&^Ǯ9h`PI0D#b2 W1pNNF!.ٜБհI\)..T/~|b^?.zx]W?Wa/.&-%3'IWJ\tew쳙Ai|ܧ*>!ZUGY-q\XVShf-,t83Poa5҅$;5RJrxS#h=TbIUշHkk+31nޕ `IG \t; [3Gmqk}O׬s>*qh^+5ZJۿdHlkϧLp,+* eE[;, P.?KMIIM^WC;eBkcVQ^Sd__ j*}G"|#tIi}뿹YYoUNwIr;n}1QbFK2E Y+64䍫hr=%۞[_ jT;XF$STYnА7=tl7Ӎ{ƝiWُs[_ύ؁mˏKho`nܣ# ~͕҃;tǯNO~vBsr[G, NtPUC"W];XxǓTuͬ#& 93raf Wf""&(.(EJ^e2K)ʈduÅid=GAU\XɔBH{U|\a.wȘ L PO.[(+Qvao\pq^64f\mր5!LtnVv!,Dw%kImeweqHtewvH܇"+k"6fv;/P*aRd$ eKT %D& HJ$i!Azӡiy4sH!LIJ) .:7T35fޞxlX>7Z ڿ}͵nƼՍG=޳wLh3wjHjg{Dc%&g:|M1T2JHDHc=0s\,[]0l"J^!!_ SW{x@/xyfT>nweu׎$Rc_ƍ#UxJ]O?AF$eG_%&D񲧏SFz;3Ys 8ћ|%Op)Zݱk`B d]eqQg'eC_Q&O">&=~uI"@ #S s v&)g5Ї8Z0 ߄QRNv%KQ]&Uf.͵SMxrUYxt._]|Q+}xThŪI@@ěEװO/Alz)WkE攤Eq%LBՍgs!;nbTqL!hH8)@"M.!U3,%\#sif 68fw$NHjkĄ:?;#‹T XL-6a]#0d*@s(@GIAù+\ շ; @L SX-~x~gg#߉ѰДv^dos ऌ+j(x&!7# pPUz f +&S{oo&y\Li0sƔ0  ?~yO#3̆q2TT2%^8-[a(A<;y1?;Ÿn{}5]RLGnm<;Dߙn#v^$)ѥ|%U/) ?lfQ7TWQRp,h+[.R0TR1%F`,#7W+ -wi\kĸV*N0BZc21l,QQs!惟q2% ^+/hH0tcpVh1@ @Y@P7@dZ$H(WNZI/bFڿSTM4 ΚJH&&UjO i2D4< |1jLQ`]+E}dNzN1eʷ"k\Ddѿ 3r1>RpL)/w`Dc[ E4KŹ8"nĈNn)m]4Į[Dc[ ELa(]{[㯜skaqͷ"L+՟Lj! _d "3CtoxY=\WC$[38(K!8<ީbwZ^EeXED5wZTSˮm֨wz?* [Q¨ (G@"i z/FAϦж5) pn\{l;y9(o`ߝݗ b{!w 'CNѐn9N|(B%UdJ>Tz${# @|,%:} @,H CfZj @)gq$Ba!f&gg5Q%ε$ ^xala9\| r{. &mbVoY&*;)}9^(-YUcUn RѪ*Lئ7 RVY%LJz:0*]v(\#SAİpDJSئaC+杚CtH;|ObΤdWyI:A-4Ozsu]x,*Om9t=Ƌ\Y__4f-A2AH7u.T:PA%WALyn)W#ztK% w}5"QՈhLa4kvc[.1GvTv"[ ELAeÕブ4ƖZ +ΕyjC+ Jr fc!=ݏhn4D5dzQD}YPgIߧ~d`S7k}Q FOr$I/0q I j_W}Vwf*l;}Q>̯ܷ,g((7us_9 *<4&(ڒ"!@2)4&e7e:~-C& XGLLD*Q1a2+m5\VLP%L5JaHqax L4FvH&c2r!'dX$P@e(B0!c3MpDpgTpxpV4qW׷7^ȸGzRUsL2]ڵeL}+f#YJֆtK뻯Q*U„1N5ح+]KCcᣯ..1'r,殩lk-Fs@drV+ l ӴpCgyk rPf$Mݹۃ&PºVH0 F5A8Ɯ2HZ`Dm4uj\o&:G}m*[Rkw^ia<ܲ6cNZK#YM r:+$a 5}# n4χY=[^BjNK Ր( X i`/(O 0ϻfo;!Wm]LխKlvEZr(&/A^ύcȍnyGq+=Z 9y e9eA h7´Y(%pSrH$lrA8+3.Wá1Oؗ3~]Pĩ9Bt\$v{b0o5I–uX()g;N9-Gjq|1B7\A ,pM\ ;>j1c/9 =}rڝB۽C8~|1p*##Dgu]@zsUݘnLT}uc>T1iEcb'c/[-zg!آE:@ 2^A LW w>~}H9xj_pte =RB:5ܿZĀ^sJК v:-~o+ {ݖ^NFuwʸj*M ;by?]:up)HĘO-&P G|\>']>\wfҞ;-DQꌕӣoB>(H_׵vpp̙HNg/״%8 gs1јH)9b/&C- LJ91J`;S&6r2 [$HsDV~ˑF!M*DdFh!`b 2 4` 8CLH$Sȇ Bˏ[,7 2X^~xx!`kDCQ4a^zY1N[a i'-1gRJ/8o8^?,\BBs͑) ݚ 1ǀ-#E wRpgnɖ"[ E4Kc=tn[| -#E wpT[Z"EjhLq1v1h\ bD'63͏n%$;,Ԃ1X[_C.D-iE IK+e*ħD(i:< nѼL1|jW0Xi:h*$n`fӡt(Qp%xjt`}{r`X Q|rlE>a'\ ^چ OOh'|/SmwFt'L+˔'O`<!{%NHyFc/ C- t꺎8ui)`GHOaoٙ֞ Jާ% -b)߹_v u10“a0A'd?$5Z98{2@l)Y\!Ĕ@PZz'.y'MH&12-";ia,#[QEaI9^*-#H  2'YSZ )Iwŏ)<>|{Ao҇_F^\;}.ntªo RUީg_J_h*w{+9y~#H*Q"q50lЖ GO26˸VZ+C2ޠAJb*;)pz(y,f^gۖukdhd~jT+*?i]UGx#c+A}%6w..P]ʁk:F8=@(!rvB`#gykt! 0uT!u4o#_ʝ^ջz 0WW>#=U'/{~9ІndǛQ0cԐs+Ư٨&),[O/xmU-Ve^TQPQGԵOaK RGl,{ݺwwOeTcm\n")c4Пa+&JưߺԘ7-R,Wւ:F Q'֨epu sz roASm7f׫;f>iHe._<{Q="ʛlvy?u͖ﶮgSߪ'_|IChZ.2ߔ ׊UD!Yr ̱tOvX;wq`4N3fo[b` 9Ԅzgn|G?&-gy P  zp Nfu" "qKHD'J|)yl‹x$|hk(8Z8W*ȁq  i V8P Z`0h1H@oI˄|QAΗSR).  kļXk 15Tgښ۸_aTfd/Ölٵ+ WkTHJVi4Cbk`[p|_wh4<܁ X2÷H~|sp'_+#R I Uy#O;o4w$`v6f- ;m;tW1e-Z}gZGD2J`z#nS97p#qdR"Or](AB/;b*)IJ.}~,,"3~f" &.Vw⺋A99ŌzgfoHSp]+ہlԈ3Mޠs ^+} }N=,$A.*#JF"1A8(:<,)P1)CQi^C]R=Ci꼝]:ƍ 42@Qp ~+Sc‚QueU|7%8ı (3Mo&av'#hzf.0J{4}6̟YfÃS>o—0t߿%oOb<}k.gpW |N/t>4f4̅\jp*Xv_@R (EoƓOfaz`c00e8ޕ}<5SZf>{x׽Tb4CFxi! ;}fCx᝙&x[$4wly;㩾]+Uy{mj < M$>Y8y$|I*8) У'C0i̔[>D&w5Im0BN9,R(:)a %'fQ1!8gXfrGR>!ng7N'39^t;EwwH< Dhy/Jcb1#LbCǗ6X>T-ɺ.*|%_j<)J*nT-VS1O<0 ]QL2toS׏bAPLnruROzso%c[1&kʳvÔ9@T:JcmZ0Y`RЦX? 6bV-+UV3"& j7<<%i7\uv`~ߺ?W'˦6pT^6T0k-1LhJv* Uik w ,J\Xao k()$1Ɗvm3%[]ݚ4N)9FTR-kH 9*)\5.;h H%gJ#ix)g"8fC{) T)8g='㤝:dLꞜ6&ڙ?;mTf).q֢NҔɓٗy泭ѱeI c{ IkƕL#^o'z#]u;`5Ux {W]UKTl0.ڂG!: m܊M-\\ib]?߫&w6JQFAau-0&9]lhcئ %h.TJ%NckoCdG+y&T1z% Ҝ')nv{6PoɿU8;[1*8NoY`m$JB6!diB; SLD5D6 /V]xkTyi/Z{ XI6N(} kڃD|T6lc ױ1% ̶3w%!dhiBݶvn7f !;8)+|yJIɹm6֘L_?_vfx u$; `-ΐL)| w |ve#]LXsI0*ʥjJэ$,%[.GB깂RH:!aBVwzBXK>0G?P}:gh8~[i[tQ 8̽+yB9PDBdV B<656@sI@}U]¹ftL~wM~7ݲ*0ҳO0vildAPp i+TE:8[al(al|o \c*kFET"Ye!Of<8s%5dTBʡ섅P#c 3ADK$ =DU)w8|M^}xr P5Ѯ}Joze6?3@$/>9uWygU128ÝGDIjDFuM T p8 ǔgY>VYY_2aW 43 eCJx02rM<+68sFɚ2- ~lUBsf%Xq=|,G9[eCxIcr4Iˤy#c% 慢+60&ek94]JmSl8J} cnPĨ}G>mՁaVcA`C 0Ԥ+޸3ukQ*MA,7jA \!0 0E.j\T2F#[ &MB,!He`hl K {r JU=kGE*ZK'B*0CnhHIc5Ǩ]닒U mnXq" LWDQI q{_C ~ ZVb Xae*m`Nt3RIthpŤ,n8"&I2B(w)yZdQ[G:t:@Iӹ:LoK,t򕅥WG_齎 0Q|5w! jY`Gb0õ]v~ .7}y߿ fo-<@+rl=Xl=>qp>[RsnbOxTfƭFp~y ZRJ`vR -/(ljó҅Z0)Ko$pͣgpMM!Ӣa 9itt]:mC+f.O(:.4&\xo&_3)B/X7وfiQ/7crFA daI4V%F%_NؘcA;ɂcAPԔ"d'9,]:,4̿J\o6F=OزMglL-FUIsMtlLa%T47dD\{VA3w1"-TeV;@`V%)\CL5AHcT١`?7ç w~9J 9f]?o.OȒnnA  01Qd!e3 YA}:޽SCI~$&M|Nxܻ\O=UWG'>>18ٽhNIOdg#GV]G|((L1ȩT:>}-A9[#"bGV]R0ttj6lfyl5̛C^\RH7+v)! 3y^|c.gŗkWZe3Uk (eŝ^ڿ BO+pE"KĠ0Oan 3wƟ{77Ev*8~nwJV &ϝ ~wgD߾džRK񷳍eB9Ћc֚B2 ,4!>qB -GGB1+Yݬ;R\ W_sO|n9p_9v{\%> :7o.?pLPZhG"EC=Z+ڨ͕ ΂ M òwﵤ,lrEg頋/OG_zG/Nق;Yb oJ%ߗcdz@XOW墋ƑӼTgF)'w#э]EP::N~H ~v۾ kJ ׋˵~|33)~Ƭi@/0/dr*SZryӺCϺ>lqL3yzN1>eZyLv>Lz6¤DLkg.92ʉ'dB #Z\ NnX̺VnuH3̪uӥ Whr1":mbݎF ̺gWnuH3,׺pecźaFNºbPlchU)TLkg.92%(9s/ T]ihj8 =5SxVMX[J%ު~mW&]Gg8Y T(v~qCqB'ԩ r>&NR6Nh:5+/N2 mP&K'(T08MP#+g'(X'qB <8A6Nh4A#&/NqB'ԫ L~B'ԩ mG۱Z{v[nϱo h 'gT4|['i iՔE553֘n ׫i}U[˼0׼f[0e:8 ܺuO.g5N}shEv+Aw@uKonVCe s&T鎥C9ޙ…~ f\N2.Ē#pFgհ f_]Vi$ +,}mF*]UҦR]q/ `$n(RÎCA3yrd ht*c,/ X^`B?)Qkʕ&6W?u0nG!' vɛS< o&`]{v*39 'u_%a]?z _w/~1 LDk)V9nnݸ07KsTN+Q;8i<= d L3SVsO~v|ZƇ)o>z ɐuz E Z 0(=8eF5nq0qL[7.d4h-pUIg`1tUg R䖪NE٨)昶1  ;9b#$: qSV}x<`c!FfFDz8Y62m˔ ˆBY0u; q9Vw"^'B 3\1- gBilŦwT k FyGヽ֘^NT{ B5}5QG'2#D٨3B P1;@-}xƚfg^3BTVKekQV`2aTvWZ8X1Eո"TqhgTD=R'$G/ ^+<$G/ 8X'$x r:\pNPs;YǯFdM[7Hl&;Y1kNǯDKtں%dER$zuD6qJxs=~)Z֍/l&TeeH)FwUt2GF{ 'BD3'<V{o ;{_?~>.|ME_?Uқ޻{qc>B[unsSBh$(&>e0b,(cO]!S(Bջie:~x?FRw҈ܥ0%oqU^7]VZTce in]y?9ZQDOA2^~xcjUdR#;}+ uV5GvTwS HǼcG0ik7 To <*['`L( *4&eK.9E]eVOe-kH> )QaQ-rc1wSP 0+{D-BmNΥII.9hųBP_GاZ9l̀`D5P(\%ƚʩITrfO.9Js:oZܽgCԎ¾` i A#&WNGkӫ!`b Kyv24'}h/ה]%#VNJs<pF#dN(XiQ_|ʫ^-SH}+Je&dVb[m#Ҹ~ n%ʅg8F Z* 5 EDň.coKYt+MfId@ZIknJkѢqi-n^Z NkOU_ͺbp5{y<$X [a}ٜJL°y?^tF _%}Im$*?旳ކ"Y"񛛛&]g'L2Ja8(aѿ^13 twlŰ;?0X3NLnF~2j^QÃ(m {۟yZQ㳳AD'Q&DUUE@Dq>5!"Et&V6՞@Dq15.MPZ$ Z+ј8IWgeh(;=4'J wZƐ X ?W%nB`%M֚XVMePV]O5Je'X[N9z~ 5a] Xm3R~# q3O~_+;pa5LU5 R\M8j8ۖrTu)7ߜeAaꥌoL pi8#ߌ#~2~3Eec&!#HV\( czYkUDc^]f?>r[8v?YHќ,؋†C| ( 8<$!X&̇H[׳@&AyHqڿ~{3@]hGGAMs/Ep_u{.Ŕ V.m~KH$HMHDq >T%.]FsT׌nj((s +0_7y=Wf#b:֚#b2Mḿp8!W fUFNdB^ZN翗1yjM׏zμLLzMsʞ?*p(g:=q"zKO=|74w*hRgX]1Y=S |3{78+TdWWW34IE *e%ɇ E!Ԭ2 F/.$E!ŔϷ%4P+ڈ˱4;,4(VB #;"?R Ur@䄢|U d$LB#x_8]ZTf &D6rpT3_ qK 7?]8p;tV%ۜI\"xfUKk7o81ukKN R#}i˓P˫s[?>ƌޖ>TJ R䵶eIHӈ }I[yG)Z8^0ۖ- I^\Ti1^-wx- 7Ta/)P[v_&ep(y_eb(%)%q4Ta"DL1WՋʑ\lɕckK~hFŞ͸ /I?bmZP#ug8M3V{ 4Bp֩ԃ]-&cl撚9'pe)qoVP xn0q'7y(s)[G41"9\vZqwzyöZ !k$\^&U"&|yL1HgtQ@|'xO关KKm9'J*09RIȇ੨_Bz)s.9̈"w^Bb96ZcEas SmXSR"$<99VN24h4׊ 'xv&{~0>-g\7? uMQFn%@kZ0Mߒۛ/* F+^jOO]cmY5;:?ܼh<I3 ?^h{`ޏ~7DCԔӝcE.!KJE7/}qK :éHuRz~N# ^eN2 a0s!- x丐Rodra9zQs"yp Qs$IcTF{ . Ž[ TqoWUY`bQ2nCo!|}|@!Gۛ/J9>c"'  TC3jߠ4o"AEhPfR7~C6P*}g+ .߄T:"3Z1CtT2E@J FUT Ftz)  ~5q@hԒ4֒jd4OƏ[? MG0 4\K^i:T>vf +F8gq6OA1søNl=?Y ŢmYuw8f]߬>)Bd fLI ?2@fS&MaB06L\*C(-PS/J=뇠Ϋ#kp/tpD9' !.`F"=\GUnE H5*G5.r𹆥xn$GR`'2 #4\go/0O S^O֗AN(N?p  md`,S1MP&t%ͦXseJ#dDJQ/G#` 9aGXù5Y+0i!7,)b){S1SnޜXRL|*}EyNFhKBu-E1ttbM̀}u^}佮ƽQfں6P}j{tfλ4ۈIH& 4bPb샵7L2̝G4+on3~X[-O[h9`{)Q—?y3|oN> H?ݣS!-KS\u_emorO?ם_Lo-e VA_6.3܋5fn:ݪe[7vإsTNZTjV>}߸Ȟ:%)dT scinӸK2UO,yFzUz6. 44=$Q~vjJ+%)wH8^RقOFӪb|NHET8|aحssav0N0xB=R1L+t}L) /w?*?tGlLJ.lMI56w}.kǓa8IT r=m]*_Ow7V\"TN!*S>XnB6XT bT'CGI%Yo :ū^K|5l^q[WK"vZT04-ڜ!ZZtްV\M=Zq~Z,dA+Vw }dkLJ챬ckVL8NƤ餎z'Б&jEao5r%vmKOiٖSa>}4Lւnשdp*([MۼUQEЍ;8_hdȟL Zq4!6.7f1㢝|F0JYx]n9/dɈ ,L--A2d)&1YR5ܾH,  2‸F~>տݦ:s9e+tD>SC燔M*?t\~9}dP.d˽fF*4WW׿5Pp@źKq]x֮䄌 D6sG# ϊtfxsatluquٹncEgFNG[hhU =iJqg3Yσ#*XD^U4DiC ZgFSh6߮U{Iaٱ_>Io6Qfy(|ےZ ½D؈S:\^Val<:*wbZn-vkq?މ|{}<u9+-74=F;߳=es$Z>l&`U6րe88 ZGTWqظ`A')KVk a֫VwFK5"R~#MOݧ]ٲ{mnwkL^]t}:i(gw<ٺ{k_w yH{ ë?:X۝VZoָ=t?W>w#U=U-ۇz޲}l[7jP[^)T*gubv))o5Q{^"̔ TW6uI%}uWP Sᮌiw?|>Ц!ziǖZ'G{Lڱ\w9! 0f.{Xlؑ:6[ӄmhy新1&lk =zlIܹ(8D4YSA$Wǘ7,y+p֠ R@+Ѻ'cug,R^U|K6fYR.ͶbUӟ8* CC5y5l./dS竼Zqly/ @^sSMn=PqDA@LD{#ÔPF=1NԺ`5h+OwaJ_͟9{g c^5+X3;l rlI(MV-{*Uiq#=Yv\ߋ,{KV !Wć޾dSsjKDx N3X"lǓ)owн KHEGZ"VN\nׯ\~G#\!8y:-vZ\^¸S)zx2vl1腱U\>GC*Sնous8_FHzBzѸ[F.7pOONfxe6 )C,A4m$ 6"I+Ea'))IV[,hQ<:9w01vpI9%`"AꙬK:-r5.gZssi8 3=u@=$R%[tJ)Vm_Umu3$0x!b"K$_ gXh=*m)Ľ ppԍS!$~,w~ބjhlբ7,@5)*yTQ;c9Gg P62gv#U 8 hZ֋[Eɯ\}qgLԘ k1qL+K$'!Q)M] %AVM6*P%b SQ%(˓h z$õYlqkwoՄpHYږCm~4bOa)ۿO] Ɲz8q-;Phѕ@343y5RRٛL#*گQ oqV]f |2"y$ iKv 5.*fPqOVFV&rM%,.98΄0إ x5S%Z1)_BYXKG>yf%K1URT~R2ySAͩSlX gb1eN8:_(b\+`a|Hw DyƠDi28EhϾ+)L}tȈHӾ&L#r5 '\]I4:$)[#s-fV9`^˶~xh?{R.E;ZSu8 ^h]? VhcW>AU1G6$vTC_(ӄcyb,pI RH/DPJkɨȂT:MBk ":/R%`,F`iw>@-pd2 [/Tc({|Bi;LtEa *g,rvAuP>Z F\^5B4ff۸LJ^=)@Z=.$x庉A ,ue3"Ql 0hQZ^jJ1b8BeA"VZ0 YEeMͣ^4խ֢8]jY]8e##{R$ƮtՏyj~U~۽(@5ncYNݒ#ڸJaeO?|wqS4Z,@ىBb*?y>5}*'"Vґ8mQY=UbVPՅ`ht BQ8btJ29!&'M f{V}^d)lb6IVJ'|=E{֩8jPֺfL{ڲ|w*KW~B+Lx~uZoVϟEwKY+ OW7ekKAxEmTcE"T I3 !>`Z"$dE@%Cz/ɍ.Zr\,'{6w=EKyrz{\ӆ-#3 i Zޏ&&ѳl6g$lP촠"+2ؼF/cD:VeWǾ0b$6;$k$6 ixpI$ӓxC%`(1B7ۣyzEіsޓ{MzQ6.5.~չn{vGGKX_fs :{']KW.{WI^_ _c챽{W^'!tϟ&720o1/{~CV*ݬ~]L?s!fy6Y"yyX|][ݧ~;+ltzwlL>EnTlo y&˦P/hvÆҌ[_ JLg;̜n;Z: a!/D{T#m -avA6ی̩n;Z: a!/DؔjM=9w=1(r{~F|1._,ݙ`!/D.)n:H"clA!i(TuJZq8E.ԈHj{bb 08xI LrnT$O\Lm*CsaB+Ab^X{@h\'5)/W);ͪ2`2A!*%$Р[)?f?8{ssn+-ZlT.!E㑍*j3#4sۀ-y4q4k24DŻ5/\HțPmBw@X-쇩)μ5K?̷,M4Z:j,MJ1B5Ү{>еi,Ma˻>8ۆ(cr5} vw{ ЭhԲru{Y] rۏ9CjF.s]TCFj ƙJs/  Q.o^<hUUh’U@-q U 䞣иUxۍ~K"bUrd?RἼՔ'OK'P9 lb-]ɭ K6 \'K 3,I]iETf{R}v"4.άZZ#}@8`;k+F!:WQH'Ct AfCENilJA O=ZM c ;%2&VTh8 >(hZ2f2(ԆQ{?(0DUAްI6z@IN@z2RqG Ʉq):ٻ渍$W:2 7Y=Jcbb֎z"N4%)cB7IEP0[ ʬKf0zൠAyI6¼.)ֶĒ1`vR n+yY60&:p M|{^SDaHNsQA% C 0 Y&&Fƥ`䈀9Ԙ{[5T85@TfG4aC=M{#QmRKp}-}F*XƼA EP'UɆ"&FO1j/aBcB08=FR9`&6jD1!XZeB/#UA ADV{'6R (2BFZSKHdAa?aCԔB|ВpZ :!8!k dVYT{ LIdpG Js (3R°MǻO&~fUzrM^,\x8 u>g)8Oy4 _+`cIjD7;J!>yifv}2FZ{?1}]bnJ:xb^)IˊSZ[02[z_7,t0dImڮ9qBKq%b8_LV@$\SsʲWRW[ݧׯNӋ=]5},'t֙'HM$յbSl#y5Ǒ"DhP N0;c"A)3ryfȭ83I҅>/տ^~II?VG z`JR'؃Ch1qK$ SCRAq #xψd;fiDjKpVM7p]~ۣK+> >:]\o7W>F%P#vXSWC>4hDPdhn!ںkU_13L{ڸyGm8M!\H{;[.yض꺥uo-krVu]u0Du6>hՄj18M4+ TAA >hr6tD„ عXd1$A+.,1DiZl`xkՠ r8ܲ@!Hm\,,H#I(uHG6X cq"eP˳UūEEj`REnp>܎~3am!JE(M ɞL# `2fMRELps\lVMB!ftMO}p{nI$qeލre} a!CŸ[nE"1R'#[QIHhVīE'qI!xTQnOԻ3 #Ҥj0(PB4 D'O(!Atp>b k'-)$p5˧ 2=~$o-EQY/q5^^*M< fR>~q -) +ZXoXkPU5j8`4?2Jϴ(5FL1a#Nj_lR7H`/+řHD~8!E6"z|bpm=6}mqVN1HސAAhڇVٴi˧ϻ6F;"jUW}Ýlk#6Szrxm0(IP KŸT؊[rHu-@[BZ[鿔6PzU!(? ) =Y=; j#8~3!"x眳. B|mD"~8݂ =b(.r}`juN 6M=\tѮn`du[1ldu[)lTv sJkEE\=tf"iK`gT=Q9w,뾁^DVw[f;m.U]l<;TAo$)C(PW(P꺿bWu\*]Y _U;|]_tmg󋮝n>LOsϺo8L:pZ(= _7;Q"T5FrA#J}B@bHz*YU*CveV{%2+.^\j/H1DB ܜioFz7€Z#&7HX)gYY;rzJ`Ck[w:á@~;N|"4GS Ih"1z@3~.iBU(O4`OȞ6&FpJis{-G;($&ub޽ے΄W`:P'oLUf% Z*7nə0{=oR>gw.V|ڏk?c&-_V_|ځ,0fs.G&OHk,/<'t18g<'t;'.G!O/O \cp: \ D 1'UۣL@q1O8 JG$SfyyB#AP:RLC,#:Ƒ)k:"G֎'`-GZL 4pXCZ1lXQѢא=y~]8{n" քNw1~Vu 1U\*D;U1BvQtZRcN>,M5t> :#9^4al.Ws>F.Ь2hbG0ۻACw3VwyîLMmE*O7iR?_eOWrʦb[WK9ܸq_k|<WG>3V`Ax<.j,"YzsXnj9-<{9o7ҏ$jH43cuaK/T0K={ژ4K|JV#c_x (CS26I9c-RǀdFb|[/3s8+gj;cY~άy ROyþQt‡ƃ bC ٻ6$r{ȍ2/(8H. {,;$8~% Ňz8)B$Ù_UWWUף2[ʲ,x@ncW:bYI_G.\k3y.%fYѕVΩ jΔ6ƨJUJ:e@'cprj|,y|v+OLʩ2+^r * $PDdQcD֮Jb۫x֕غK$[o*ݡl9=V9t%\rdLRYmH2my%:2b1s3i[mVp{iZ U$HJ}0*$$\"<|"(n ׀r,w{w_.1̟t|2r >)}-TwfZjğwpn4\peۇԜrN Ϥb;AQ|Mo??īv!E?^=L9oO>AhP*<\wZ-|y~;Q1#>;|^ *iAĹ9JK*w?CM]w4bC0-[PbɊDFEd X/yQ co?/IjuL$$=$bB(HŲ$ok+U_fInge^UR@ n&WZ|qϛ\fo'MںJTZcHmJ WquHyGtOneCefu`J6ڳ+mMA Gx+j_؍0%8b3ؔ [Q){uݧx&lRÆoR%ڀx U#B{.͛a~c_!S ~cӺ=붳Fc\Oa(qZ [ <]h0/ d]ӣx|; 4qQ7r}p 1/b7og5@05\ݭqWדQfB ԙ/βwэ Ps;.=77jṶBӂI)ĿNڗ ڗ5a֢|,ܩ}ِL}Yn$}٠,i߾lN}4t0WEg}=_;Q0Amj)gژa[~X׉xdFD6 j1ZзNڙjZNuQ:iw9f1p&$ $FVjAmRfzh+}P},Ze;0f84'} jI#X15x"ˮﲆIbR_^JW .qN_[ r}F9} |bZ/w̜ׅrm%SjԋUmz[[ rD3hȉ[Lօrm%S5c'([[ rD3hROndj.$䕋h+0zzE&Q Gt>v_$q0>(Su!!\Dw)c9hAe\B"4(49#nj:/&Ors;\fqaMQA5ooFlxA1LiŃ=]+f8L 8w>YLX Tc͛iy7~Y=ͮ< 'vkUO42ҨhL}~A^۾f<"H[` ?sAv8Z n=Y 6;dMo t.ӯضȋ4RxOPނ!+Q̙%D >.)u-E->$ů7UUB00/=$wk{ V   GI&F?{]_L*cMXY2myC!mR8,%W=ۭX'dE㴧2ds^V5踛è 82iǜґũьcWɦ͡zKǤXSp3h'ջ-}(QF-Dj'+fk/9%%sw4ʱB#׵v8Ik 8Ov#vu]mlL29E*YP?7d#q4~h7ceހ5R{?C^3S^n69iߍ7DWm^ƞ^NFbI:̓n]$d6c Fz E=ɫO-s>ou>w"zи{( bQKVzWQ:_H0:E)#׎y/kZ褦$s$р=~*[Zʔ,Ji硰XQq:rBK̙Gklɹq[1&/Q#opVz,oy0pqdgnPp^u^ؓG>r1*Qj~ճ]L -RUjl b3V2ӗCiSsһ:׸Cb9^txp%b*kv:Е1,fԭ8d,nYݽՎT.(HC+RڈVFgX)QRIVW^ѷvyYpgIґOm+-Z󑞤I Q6r < )G'ʴU̚X1_htTeJ"!HBJmڀgB'(.ݗt~*anOAx~(gauzRݛ 7xOx;>Jd<|~/}vl~0O ]ø.)ߤ]vߟMtv`wW7|G}a3ak7j1;XxwsC'іX R=b9l@f&[CMq\sJ01 HJSf I{BCTG+A3+K+)R指=_@p "^V RJdAAJ{ISL5v6 b2zqgg:y<cp-+*A;k}LmDwѐk9 iU,Vx*Mb[vHD1zW'ӿu ǽ9 S4XUrƛLkoHbTxrȟJ6qIra ƑOŬ/!`b)[F Z>#"˸ˊbdsO-Fh\(cMT +)QЋ+P@'_%n(n1 U*D+R.V-3>*\Onji"s9G~ dM&% EQV^FU ,e$d|Q, DԥHGm\ft4R6:&mӺ"d(If,\Dnp/8'r Ld`ecIzqCcX '&[nXS~7 #Ǟ!H#XkuZZa#k&M}}"Qlf=H?OMlL5c!3Xq)MDb+J&Yy5+T\?OI{Kf)*}'51z5xdL΢?l.Ho<"WW'n1H!t]9Ɖfi/Da[{[INr?^UY$dOq,-Yay @_{l>:硻KFz?ϚxT&TjlTf'Sk9i#~g$QB]K˕b(S}Mu*Xƀ<ճ FYL%(Ur 3LXy0Br"pa~/?Zb (5(9Z'9^Rzn5^' 9*ƫ!qI;ѪC5-]]$Q EXDFJɀ8"6۪x0[@@ae6jUBagY/ۮ"V緰d T IRD!kkr_G\) {Ep(YqPrb\V.ۯC-eR!bʗZA$B,p !MXX |D'vvBTmJiݚw.Q2񹝪ݗu7 Atv;EH)&\ք&44!x-F6?HOj{]+JhӆDju7-*=1ۡoՒɜr|8'i[ʉ4O 4^& *@:cqke:vRahN-JMMQ\*ɥϩi% ]ZbU)d L]O3s(SR pR>L*~SXj?>*K:mEsqp6lb9 n۵%ϳ&$䝋hLqi:a&5PV؞]P?<Ϧ u.LoeԷ2ma>2XIMįf<9@P[GPNBhfyw~<]TM"s `}:ܢ5oF2V@Ω&O^D٦|9Y ,4 m!l%5X%6hhe`@M3&24["D1)E.3 vKpa_9& 'L\?A _ȧ \Ѻ $=|zDjs J,<e<gIMN”0U`l$ f <FsE"_(vG-"qE(QeQp56lZT^y1' Uǿ~5|7QGhi0n@ꯃmߎ>Qh=–2eטTLY! ZfU<9*h蚅 M5zm96w-;Y'ҝtsg\1nI-}=ѳ?C*&4Ic(,q[+ ܿ;7%RLA@(PQfH A)X#IRB*㈠ 54K\./@%¦`bQ>;^b[MV'G;Cbr謹̓ͫwI*/iYoIAFS'!匢CloIw,k4$CrCG-tǩB%1w n<¾ӝE`}8vhu oU?]rR_&R@[9taD8z W8ItΆ]!aX(RVQIy&!ɠ4FH#jXXFIb&(kKI9A 9WkokK$_U(wXU?rCݻ45x[a>:Yg I14IŪ楮Eϧ_ϼ,j0CaJ|1.уJ -+1 j YUe)~ӔlSBVT7.ybMrë% =:9rC; E녝]'K.#UeӈEJ4(΅7k[2AIDC nf$ӈ O]1^ZV?v6ɿf _~jNhzyu3p#L!+@VT)VExbU Y; -3?>E¨"ZK"Ay?F3o] v3^"! |(~A(MSu*7{67 -'/۱r+Нy{/CgSxS񯻪 j[zm A6G~WOEڸ{p[^SfbH7ѫ̛V{RI}lVZ`]PSnVbR կ;+pwQH̜X*%6+ǘ Q?T0>B/;2O,`,ݢ,U??~)* >O']:Nf -?­ /z lr3 i |!QB42Yn "X3 C d+D~qy_~:B Ekg؁NG2T \nImUaqiw)LL{ 01&Xy!'Ux3fv{YԾw,.ώN$$=Y }d&hj죈chÓC?y >wӞs]r7 RObK#[>Ջ,I9K2`JX1NrrdG!/n}c r|R&@;Zhx뮣AWu-'㱞kTڎ(kqn#ٺʧNXx <,n5oK޵o:\'2h0Y/ VᪿnxMLd &qL&WIV㨫 F߼?.]㶙afm.259"\JKI2K]#0/թYN0l@ @@ )={rݳ?5? \躺ۉV:Ky_QyI Bi5hSrAHR\0*,͙dqJԫC ԙZex|[VCNJdrWԈDb ca6E2J<Y&P V(Mm%yh QbXk,s$&%J32T)f*&ɌM- 7ʦ4wض ^zxTkS6Ua02AΌdXΨG )opQ4GAqyɌb ̘ΒL(IRFٜTbDrjSx)j Rd)OCX$gWJ#Ym 9ZIRX4A)4)`kޫ9@Pb"1j9F~ey IoWzMXh$䅋hLIĖLt^rs׋ t*ʏ#s/wXGX ??'o??x3Ϡ`[x1FxgqÁ9#lXww)p v#Nߙ&PwnV(tw} ;hH)Tn=:+ ևIĎ)C?tcKTDK46w&#I]?"6UuuOwi<^3hQ{W{|8s^Qt LKqԩw4E!kZ?l,,A$Ue6EH-\`XR)A)1#]e7X)M:) 5('BvYtt'a~ ?[T63J8%*3\H+غV˵ y}fĊՐpACȌ'.O Q=fv hLPG|^U vKЛrV=)IBa?/ xjJ%߯JlXDhT{v`G埾>MxfSDܹmv r坪RA1[mTH*Eq/i!K%( hyΐ$V!+,6Xӄ*Z!\eI#ŵL))S z4zD!r )1&IYL+b-B0UL)Qn1J<=],G*hh9xc*0};NJ%(U^@Zy8TcU.zXr#:|9cCPu^N^A2Y_-Qtql ׇ A^ (~1Ҳc _bG+;*@Otvy"XRv+'<@qn:l._ZMn[r:"$LѢ7ނĻǦ^W XK E65723Iưn1Jq[*(]vXa8umHݗ0SաgJ6ɛ+_8p|8l#.`U_fց뫍 fDjP5PDz/[Iח&*p=g*%!~a.4Ft{8!JR&FHjC62O5()nV gg;[Q_Ǻn4<)P;ڽQq")HcݯGW?l. r ̒pJ*XZ-fͧ=?t@Xowףv~zr1u*Oc+@PFty3݅WЮb_dR]`;{h"#6 *.A.voc12vow+]^K{>n nG#~ Q u{a@NlN $1섰b ?d I4:WaHoj.Rr :}竞}aUϾzD:0] `pӝ3zbX!={vǺ{|Xjiki J.э,u구5{rq龪Up( >;VQ`N{R꘸♜5S GBӍGm:Ҍ&E|t1zv1RUh~#p©T;,>E)f1,)DI,hu ֪+yDw ]N]13@A.VM%ͥ<Sұ(P*]? {Pytڦ0LcFkQ}L昼-RբW: 38ۻ BN3LbM\\%vm)=ےt'yx-cB`3ӵ [J0JAUUzϏ%xcMqYt^NILg.9FT+Z;?ط]|$D}?w8rY07QBbACi~,fxHEz@9a [Pg{(ʃp:zh>L0>j ;ժ5u4.h#M|s-'l:+Q~ʞfpR~J=j½ºdZ a5! q ;N!c'1َkY+1 Drx;@q iH|&=M[L΂?bȘsi6q=<-IO44 GMLȓ=+2>N~l."Z>ݹ/'X{KpBgpBw3 8zXj ~%aKD`ažUw*B:(axOdk.ItTV16QՊ-sYoTd@ف5IWb@"h9bX!/%HL#PAe9@!FOd$h^Vh^O5/z_iB5h BNEX$gWV4+ef>K$AbpJ󜦂49&I0f{v]7iT/`PIߏz~rz0"/SnF[=#uߢ6[60r,~(J;nj&\sY5ޅ~;Z^+;ƦPƩ{O`8`E *}#;f..BF.zǻۉ]x\ָhHB^Fɔx0b-щv y[@քp)nݏc/?/3E=w5O Uu "o;]@H?`]x oFk0uYXU DKJ6 θ؋+{ղf݇`|^/zD [m-53*cw}?tHCV0(+ yEFE0C0é(fx`F=KXSbFfⷊ9̾<GsiJ77^mDŤy*WziB a)v>q7n˧6[`VGo\hs◹\IYˀM2ŝήx%!뭬Vߦ+ ڶ5zS-kyL梒3Pk@5Hw?=@=w-$;F^P+yG|^RyGE4BD5@ w-щvC %kj&$䅋hL)&+ ޽|A|m<;QE.U#o]t*Z`~(vb0@@`FsW7B9<͘tTq?,cE'wI:>4f3QTWV  1* ;lyP"KyAsO^rI^vRGvv}Jj< WK,9ijI⡈s!ivٖxh4΋Ln.7LnMoӏtچ[Nb+$~1&OJRT()0pkdOda5v]l3%x iGI$$QQ*QVV+M"{x0/i >ߝdFx@38 P~8h5sط(9ĽGdz#0=bzD#8St9-*ˆwj$O5؉4t!/&Ӧ`<`Du#Ӗi[G?Ж(öBs;|ڎm}[J~j|2jvUIAHNi ~"p/xsrr^}S3-zJӓv>O b+pe[{ >m;F&I)ՎIFQj@KHB'R^)A G('E\Xv?9Lf3shiWp!?g?_p>o{TNX6!r>;19Zߓj1kf-x&>+y_MINj6oz|fO&ip} z\9wT$4N{md$XWmq&X1 `*h FK30u[ϕcS -cѠ ဤ n@Pj}e(: H`J1,=ԤD8Kv"\.|gΚBIXzB!k vOadB:-VX'?+4+ehBBRrG AxBΡh AH1#tU.RSG|ͨTrNs&㧫Q}>_M'ul'[/>pϒϾ<\xϗ鯾MOv1 }I"__g۷JOۛoӧ9#Ogn/ ^3 xw6ɖ@W qg9uS_Eofꮿ+otbSh Tu3Lw/FOHw|G+߬Sƣ:YR0B!b9uEo6,Iiq]|zYR7.b"^qlh)H(*:`ںbK$Jb:;ҫPrgUA Zۀ/Znpi[ԬHl;U2dZIKy_FrtُiXe9EW408ǑQM:@g#]ˠO:aYʔ)Sh(6OZꔨd;O9*Ƣi@iMXٹeGe疴);!YBERi_.&ˡz/1(%$ 5('  kQi\XK#& MKd`%(] CiǍvA*-$j +cD7 X!T[c#蜎=F84"gkgt&U5ߡ--ߡ%}Pi. i["(55%xieeJծWh3bv\'7ҶU[,Ayur_^iF-Q%9 b2*yv!ݚX`kXXoEl,rQ0荎ܻhԲ (6&" wQ/Iz09& -7iuY/߲ѩtuQ;z]#;w1YOeR]ue  H@Qeu-m[lg g j[x xJC zl?R;*ܲȁըUZ0vzc}R wkR=мh=t:bhբ0zTnhQ?M[,Նw<Q?|-z|:AƣEbZ ~2Pp)-\hQ56xUNHto]-[ȚHcrQU[TJEtwK93ӹȮ܇ T!j#m~7 RIr8gff!0W-|S5WAtWEl5Rm=|*xL+*6BdtHn ,6iԄ`mKT($IJj- T+j{Z_ 5c(l֘| 9Ejz-tupZ4"9->c#+ҴQb,+"PABOkG B胩p͊f׾jPKz?%C}o Mg9anVEėtn|)GV9TmBoR\{5tbn+avGm]$qp$srrVМhQQKbt2-lyGO~B)4hb@dAXs)!sowV28e4r'BDCZD'aŽa`,H/-dbSAe̦qhKA8A{ hcBy!>/֏tOo⍛-@d/9̸\@d0c FNP: wF?Vqs_J%x4]0jZD߻O|Cf^lk _V6bZ\?OUvph!y_y+* d- JF:EK,ۈlTC&>GRl^=^/zsciFf__rX/y^rP{-U(-h數^-M]¦o,<^0tPo  |_t_I7& Nq?@J: +[EpmK<9N:On1uu]`K~-˲piYyMu)$uR0 |rY\\ZR-&(W! =z"{/9,j4 dR"&.,*-x@Vɻ%KC')Q AQ !M)BQMKN ?$=9H\+əv"QID]uђM6SK R& ,G޶y6- ^H=3l*ٱ`݅ew/ʗ4P,{|-Gp0o?/H>~~ͤ Z A&7C{P}n1E2a< 2\k47\xx]oqfDhD|Ț"7!TN%.#['n$X> (-4D|k=NR[5ֳ3Q['FЖF&FE=}>^]C5)v_)Of1zjQ-8s/ͳo#HMe~Q V!Deͺ[>-_>@J{duڎ%8tg( d1}PtaأA{[Qσ["B (q (D9) 'Xgpr%ZDhD$ҡk`cÒVVHkYpMBx!nwy]j$P[(0%bBa1)쮨[Of&w^rvcW_֕V+~I/ zMUԪ|iȠZ~&M5K˽)bj<#*-t&[/hKd?l3ZOzZ\]K2i#[6ẗ́mS@,S@JozH]O&].Ϳu *eMVKS|dD4 3VWhj)!PźM$4XR\);RhЄ6& z 4u_JݖL >4KrXCEڜx`0.hY~Sws㡾K K ID9RɑӮ`zCjx;T8P>9Gʼ<)nj^+p3: ʊYeiNvzz*D? Y5|N[p+6'z~i߸ 1C)58Ҩy;KVS\H  ZH3j!=<ʣ>교Dq/ZC"K"*E N+o-LJsVLhQfZH]_ԩRח<9c\!pwm-9ҷLe̯f*iaCiZpV S6:i, wYr-Rr(y: -ChV(4H(ڨCz ^[n`0#Oi|o./).VJ\p&DNPK4.dO$]_Zɳb;uBfn!Ic܇7tǧ|1P_s3ho0&Y ~H{A*nll[>ے" VH?^BƸOY r O /՛Y|J\30}Ʌlf1 ZUD +_]pɘ|U!q9irRhĘJY4Vv5iR`U]>A_]~R*&i%5AFi- [aъ QDi8LhQpX@7E> 2@ȡHncYU>OFJj)tuH^9~H׏,NV|[FJq$ze$ާaP3)iy$]ޫ}Fck(z0TSU8 #:R5U56 f-S rcĚP$l950K&e@ 1z=.֒ڞjr墋&Q&LShD `G`@;}CPӨi3\mNшi`*eqűcS)6HP> SK!|{5" vTk5FaJE1G,90dq4vFl3& r6Ú ) Da@JRVu*BÀ`D"pZp[VطxCZIprE 9&F*HtϠ:(m%ĂO Җ{,]P4!9= FL`  .5s=A32f ,% '5Ʀ~ Q;ŝFנ0$T΄.VXDL*0l0D.EԞž w9m h30AZb 18yMUI oӛp1&VRߧk7<ӌ~Fʹ{uWdzZ;G鎡8w g 0Nye(X#ϙ&r"8ƥks9:!VaOmt f4 -тeҁzNfUMvTL35:0ו9վ9@4O$7ȇLsrC$39ELU֝$?q=nSD[s֣=VRDZ0 ΪY3 gS+?|hH]p+ >VH?̻6G`cCsaleN{aVড়ݎng&t6|Y~>f e6(f'&٪c2WDp>"z嫐 W!o{EA|1kd`0 (A@?0Nc6 amQ}b!)6!s2qO- a<zIqoGplo}#_1642fVtsץEֹpxumb(7|!ZX,?W"Ob@bص}=Sq8v,LǟS廨Y|)F/ȯ`E)-ӱXD .n@l[ .<2쩮/G[G\<.d~:⨈'P )ٚYn rCn}T͠Z"ڐD[j2a?4|v _rq : 3oظhI'`֨z]}9 ͼ[1PjP2d?yW ypypB 93v\: ^w^ªŅˢm"||9 !-_fj1@Jҳ(Zֵh#~wk?-SL^L ˖%hS i@,,¯dBOKnS} eW>Vތ"&eTT_1r{L~")>]|?Oe[,r.# E4G"'8> 행A-죑v;^ [vѺڭ E4K8s}JYr1#:}4nG<*:vˏݺ/\DdJOi7Ku{r1#:}4nrm侵[~Jڭ EL 3- V.~`-/uO{Y_Hm#-z3zD_"V/%n´/i~{\^-Q3.=Uz1Lvf\tAD wf q)WGp,Q1/KೠK9/N'Jٶ Bj1_\D^Oged3G%QRҵlq\/f>v]+K r O ƽŧ4 }I4g1 ZlNםm|]BiU>s6*ŧi\wZDi4b c,+Jh:د>^S|Ǭ u:mhY!+~XSԃC ҚNxJW*ۚt)\mf2|jX3l9⃕ӵ/U>NrUu)ՇΧ .{kz 2FW6fO۔W~]G\Atun@]H.21a*QM:Lݽl9}ӼY;jBDtu[.x`sH,A{ب~UYlcnEb^<>C#n@:/~BWt`Cxj+ "jypK|lwӛѵzo-Z >}>hvek2&#/,>6zC>h+ً? T+,q◍)L\_lij$x9Tإ݀bU\Cqϡ$"6̈́. '\F?܃]%yJv,o|r:bp.(6]8aB&0UpK@|HO.|r`9盩KF `9' I/NDkITNY#NQ3-rVyiY S 1"LEcIRǔ!Q3F-#`lg!JI|:WJ-Zk9,(Vpb@HikT:d&ع▫p#5q#Db[] ~$:SAzbE(TbVaspg:ji S!4܎,u~Z1R`{"qRn=Cm$Yć}+z8L!y{`Q<ޤ7w1Fx\>0cr'r͂o<1GWşwbuߙQ'hq 5Sۣ nj9XktcKauKUt5ņnZ Gc5^8ͬdɊLSUD@;MI%jmcY=刦0ЪzdRNhRI=B``ӱj蔟W!+fİ D][oG+$ƆR/jV`؈݇u ̥G"e俟{8W#pHtWU]]]U]#ls NbJ_(U tXQh. [yɿ=[ f_-^O?b#RG|bP47pv8H.D;.\ IΜIM^gm_^NK[ĔtJL}=>?g^]^2X^ 䕒]ڣ#0K':-BcC߾`S繃=`ArdU!%ҧ|;*S{Q}N5O{N^긜PXNʹEyqI.ٮ;gJ63ʡO `Y. U$k*t*2*-;25fj3n5;AZlmk69i)?,3IREc 1 ň`T?O((0M) CLUTR6j!<{@2AEq2#)DDzkq͜]/O-ky=,aH);k'n09z?6R .HF0 JHǔiE:g^,Dj&G 6<;o1!#jp!v  e;4)%,!u[?HӤw$<&BUydhı}4"M]/i4FuZ0d~`n@E B}MO\M.eFAnjelPa]9͓}Ŭ>kc|ҳ2U+ճ!e2dɷBs;u2vvqVY+6' Vw+Jԃ=M=$-;_&̲ՓqbH?Ή]˖O!Xk9·iB\G-]NseG'=kDŅ-FgZӒָʘYh~eg0BՔ߷Ⱦ$͊1ՙs $X$ y6Rʒ bP*#H-#Q`n@2ɥ&JJ^Sr:s,G:*7/DUE;0,!g|h*SV]beN^4v|쾺;M}T"MdUd4;OUTW|e%iuC]rrkKvqcRҎ_FTv%`P\l>Ϋ])U]~YU[./$c*]QLa٦Nldf.)08Jwu{P3яmS:8Se7[v<мmnݯ0BTfv/lB&UB1,]aUrrPQיRQ2vt,nm=:GU =uLt5w=3_OsM.uIٶKwÞ "YW'Ӳ.H_DҕP\!R0kt ե %utwNP,(]o0D#1#8#0GZ%e?*&%%t+q3 -Ǯ)ehU.XW`^I,ey lCL ~(҆,J!xqnRw$(b>*LS;:C1.2)2x5Ђez!eD-,.gLXz&;ѬTIꋬ ?L ߙDb,>Qgɟy6/ yA\L,?AfEnmnAy T "S$xľfÿ\00ЄB )K?v<\D<EѝgxE;cy3{$LLofId:4{pGAӑ-E8ֆnm8ֆnᬬ| #i 2QSchA c4Ɓ(d$5,q5QBQAX-nu]<^C[wٖ#12eW4K~|Wzr-zd_/D}ꑅ_3~1g!'uqEҏ82Ra "9ǁ(fJA*x+AUKM[ϚD0K{7>97ȧ& ae ?H@àb"Z;ZsP q+]#s~N- P*쁀Z_iK pNA@HDĬ ! yFD (`" oKX\#IVUK1,BHl ZE/%A(4HXI0ޛ\;`^Pb #ssiHR̭PP _?O1y0{=5*dz6 tf`")B1nw (FjЮ"钵$&ZsЀSY!$H8aL#S PX%`KRL̸kOɢ["Hm{zɍ/ @4ߘ#f f ;Hܛ?EO^H yE+RF`3rf{/4i5_З ">ο!~uVt6gƻ6=< x/O c7ˢPtS>}&28nkT~kvBFB4 i&bf)k>_b%L@v&F6&B1HHDl/z -G(?/`"61a{ m\Nq{MI&%`M'Sp/}?@]7ɠ?,N.5=~yA4Envg.Հ@¼J f?//-ї[I'#.q[.+2vA,rdJsdp¯r@dPbmk*X/xQiCEZ 9Rk(JP]\Z#h# ֟V{ඡff1`uR fi^Yx6aGLʨ9Y_]x?F띯}> F;Ka>v6:̰Q 50*(z(+2}7G4 7ygh PKRt(vԝ\o.Zeѱ(kgv2qtJJʧ]1Ȟhsq}\L()94s]RYJ|7޾R "(z)s1?KO##ǥ~s69!Ko#?X߼ǧKuwa7l@^%nCIQ15W^~6{/P?$S$gg>„Jdp4~v=_ fmUatV,yi4Y!8QZTj,o7KV`ۢW/N1лZx<[|Q1Gc+y0gG5AlǒR?쿱_sB\ƓL؅'⮌&fNQ]|"zЂѽom,b  Cݏ$ŋ` F;+ XВtr9'ZsgChxI +)bTZ_u{]kFsDe0_;sww70<+ {W=<;w;.wP3*>S"IWw*wvH .}w` ;T EpӮ5GUז@vK*++Pkƥ2R]~l&+;5g~sû4O,SjSjm Ja6+j9e0hH 4Z<7ówT#hBCep,^eI 4)pYQ)^)j1V!Bsւ2HyHVe|v>>] e1N;g𗛟Bq&#@ΙP?jq#w𳸴=^)bҺ!ț>ڌ:&=>Y'7|m֞Q6JiQ.5ɴgӋs3ߚIh>OFW({}y9tȮ$Uw v[j?NMSumA`od.57?ٛӷޜxKG?ۥQT/|>Vjߜ}uӼ>{)/'oǏ_/7>&gS8穣9q4e̳={g{I+<޳_l.,l=q4(@Y {I2Ha{ #F (J&1u;*W[],],],],J{tE.P.P.P].:lX3SXz5yEE84L9R"hSf"cE 0"ڣc򉜣84F8ۛ{f4N ^6#˥">[%<=.CtsWT`lw6VʹOBeT^y\˅w_y`F+ !l+ʽ'w]=l|Q."BVk9S^"]KUB>"(Y E;+mF3' ~M'gY _sVPjI8娦a ҆j/.uN1A0\kd0.*Tj!esGĥfd+3<@0bbm%qa""w\Et"ָfu|q+ifׄfYPO\7 ڳ5{;l1c+y?x/M7a騻lX]4w0d0V̟Z.~̏GK+A(۝&1ν&1(V[n&zӓ}f^ xuf.0'v{+f_u浬[e>9#MW8ro>]\LRߘH(IH.}#?_X8g1prw.fh݀<0aPUC9h$|M` ebALy(^"%p]fQQ"VjPT3\nQI9ъiECRGFudSG/*# etT3Aʩ=AA**svWEzI%`-L0~f(gK4s);oPʬlR9LETP}c ce0cr H6&"X&5}b=lE3w1 h8ث 7UrNUn>̬1IW&,uiyh&=C Y\ټYWyF~OgZ Dzi3Рh۳e\4:ݿ6Ag yƝv@:>,n`VӆxW}3{_=kz¦d?~`F=^y9C: sկ_|^^g!JcF"Xۛ >FTNEafB+DD:CXeGHZeʖB2/aԋ_C瓛cuOoF%O5^i GGCHJeGE}[ U2U v ۊ9@)zS/ƅ]N$]TE]LiDY'5xmM"K ߺ~/-ZJ0i"`G\CNb Q*0&tzuue6Rx#Y$<* 7DAVZA2BQ\TCœfqp4$:-J2ɱ!fev<nS*AX-U .r=!ԕk ,5EeCKk4sKrzC^ ކ&@9toQLNbF2AD"$##&!` j^H: D|,HLBrF*E8/f<#NM㘓MCb<,Jx)PVVHY/QC$x+^\JKs3{GEXZT]D[mjXQ[*Хpu c]_6&0YwXTBYx s˗ދU+}K+x2 ZLz˵1!`9d~+Κ6uQ9t_z,??%Im@Dv c0,ʅSˇp)SG A 4rV Ⱦ)Pw3?Ẉ5YlX!ebwLwY 0ED+5*)!#zC<34O5>aP Q?4+*J\#*=Jp#̭JG,8l 1HaX+!Bh'BVLpYLiK*+T#E Bidr[\a U6r9w*DŽiExj2&ډXCZkE>xy]?*Y>o_o,j1FȽh*ڝ'(,$N9CO”Q 򛔸{_zu5`l V>O0EaLJB5|>\ \TinIiH6a v.H8G: >Y1Ǭfw'T LbLݝ\z\]5rUָ - 4(vYI16S~VB"v%lXڂ'񦤀dO;Uj}[:QBZ5'T0nғ DfO_?u,KM[Tlр AzKU'iHQ yFXբZȚHLL1aD[Z '%[|j`^".zւ*)UF?)q r0]O &1$meTLU<+vHaǬЖ[D~7@[5q:by-" F#fs 3oNf@jh |UNPeJdbq*CڸHb{*Qeg| m6ṷNsiryD+.7s{̶:Mq7{x[ZT˺JrII39c{YQiZ_lI#MnHh"jMz~Vz̩NI{(QcFfJ4E8Q:"mUW e Iajuѥ(:Dby , σ cwjo@pjz -e$ !ETHuF 0:Htړ#%ueZ+*4޹cNc{m`+ך j:tZ-mup"I@mI8QH56椞 DP?؋dd2 rX`ncuog$dz޺cs!kHĖR@gB t 3h+7fF%+^bcYCǦwU0G"f^NGwv^ؿKM077a|՟] W˫_bٗ{]>I<`q@|je|_t{Zyd,_5r"PF+R7iSدUԜoHtxH4\ZCl؛ȩ#,% +DcX,'>K¨MGE$/5=O5(>Am H[RUL?;EbWOkSST(2p^*kݿTp`xU`N'h/\d*5߿?Wy4Ƃ~17,'h_Qbx'L9B[(qbe<O{IF|Ҕd4W)7nao|nZ| ]R=|M^\p"i3&uآem+؎2sL"үT2REf,~urrkT`yˡ"I,ֿ<3 Ŷ]rp;OO@`Eo}ubגt bUmt3R?%^,D~)xD|p\G\uBL 4gl \, AdVr- bjlUw5nl-p%J+׮7eJ<*(5X3=gM> L9ZZM4D!sJ!,RG.܁ T3_?>\ӳ ߋ&f8 }v8;ݎsC8 Ee !%EB<TRjHߣBF/. C{?MÚyqQƏ{??Wog7Ǹ|xpy] S9+}3-:{dS*dg$16P{mc r#)۷;yZ˥,ph͢:l(Ⱥg5LYvF6EU97 eI,q13 :i,oe.7z'xe߳/^ޣzf7TC_h=61.Rr>NE J h6z ;Tf|9algv Tnfn8=k6\B fnڿQ/Y5/,flzsvb{d/>l7ͷdAk$Prv6bWG˻^f2ÿnPB C\wt l:\շBvyކ_Y͞?{{O=Xڙm >{wȾS??KjYiyx!{8m5@| x5m\o]gVE`E?ۥ9>6wc3`ìdq5]e FX^Z+u,b+ 19C 4c1d^@nίzh|oe?U4gz4)oVsqw7ۿ;7,]^6D<wm 5tC@ 4EqXd]}8xzsi7L{ɃFQ q]prz f?)]5ׅ5vQѳNV<\1*oo i3t(ls}>`DxAW:P6J Ի> IFS|LN6.Ƨa cjrq{EGV>'soo]6I1ŜQrˍR NR6$a(ťV`GerUug'5A2U1ht6tʻ(J R{ɉw3x*FǸ_obx5z 5ex\1?Ds<⻂LbUi?,iVR|cstk^ th^,q"_wTQ*͙SI[XNXPI)6&rfX<-ϟXD!XٹFsoe &RR酺|J)z; 󲲳w;W"%KD6QÔ5m:O=j3fɆ9w} -|:n+,hrQ<JeB*X I)BWF+aDԊօ9eJ @t} 7)A.8D<ׄ[kOSsІzYU&=b` =\5:SD!,:%ѡ`C䈘([F}D %Dv}XXF hdPʯA)!]fxeDh~ymkO5d"*_Q*w0#%ROEהwtLg7V΁fFBE˭@N7"AB+ƸLNxLF +Rf6 Tܦ/t9k$ˁZILjSe#A#2f(- +4Tkqy=[q#mNFaD`;01ƇBFAYG1L10+ ڹR_PہꄗJւ#c-S1>h-LtT2 !T9Y1Dq` m~` j)T XՎw秦8ftDظvji֘QGK  cT; ]4+@9a:g|i6JffJ\Tx1Zra*G KFZ18 vxAh]9Ùׂ:A8"TʕKkU)QOziwy '07cTPalc.@/8I*!gUo s2uI?1$+X8Cۏ dGU`hǕ&6R:*50t' A2pۧ*b/Lۤ˛vp'ʗ"H)1ڀS[ , S&A');z'?#PL:СE&%um(|{>>v٩ZOMÕ (9 #\Kr=AVX̤4w4XYn%V0?i"1J-"'P?gRR :M Pݖ%^H* ɝFR`A-@ )wXF.EpV#e Y+bƄzr):[z+pj?G1n%r)h}> % 9Aa:1zBNo)ED^H'Z/iSEZ$ NqU#|%Eq)qc5s|7b]m; bZTPAԇK7R;qkibefSA {o~dHW =t`z͘D}\мuv(=8jMoF{={|f}x}=ߠvw?ߠvoP7m͹0  %6-+}9!oï[3x˾=5bVG=艸lf1u`oBN!+ȍ\/t*ꔟG9^BXNCwFW؜]aaB/)0 h {zO'sP?}j>wg%EJ *I`Y =3Nj푊>.Yø;1j Xsְorxba1V78P CZBzO\mw FtO JZW*A拟ˎBᚽ:HƒIĠ2Xnɬ tB:J=ȸSȏgU5kJTc׬&.ijG X'*jc+ / WVp`Ș'PHmbEǂ!uT;@dQ`x7 lfbc 1̈eGC5dJ.GN*TeƎap΍d9ZR ϥ%.b&F==Q $J콐R`E5NŀmB&3B䙈Q*Gb{L$2 Yr7)aXhƼWb"24zUrNBP1OEoV ܍Ѥ{G ͩbX_ԔSU k˩ӋTBUN`dڙk\FӰ%w2UUŖ8D[ ˪b_&-}ɬ*VIVU-rGV[)i0Sߦ5262/MS;xj-/YU {MoRVVl{)Zj,'jͭ׺o*䪯ד\VMbJ6ͮ-̮.oV[ӫj?sz5W?O7UM~Ug`VҐmΘuR#OS?gՄBa*ȰZdV{4Ѕҕ>;6ZC$Xu"JNR!Ih,f"L #)1u K-˃@G."åH[x&X9'6(-!|)"lޘtcH {H2uI.@=O5#s;^"*;LjD6c58ӖHLP ' ̀,AK֚c!"+g@Q1I S%^AB4) aRj  1 +:ji yL1plλ^rҚ2h WcҴ,~<{Θ,F= ,qOq/ Q{筜֧"Lxn6M6@iڤ"R[$9$Kծr*8aW|9<o$da:͔\la+өKdb3`.R;vYk]@/QȚ n;%4гwGt?Pw__NeIçPG/^G,Q'|U@ѰVxQSXoQJ$$ Ol n*%؊䢌]Q H7Ո ~+'eaxgZ[~9۬CQ]>˩hzȬ1bA0t=&DҥvFyE 9ӭ_{zѬͅEe0c-ߏ[[?P uxBK>UB 9Ƶv$(NdC^1m-ҳ\cE` tg^8|"3X+H"kk9K  lij>{ "E)W?X!YvV(-je]oR0h|Zj|;❜ Q?ysvzdwxPZYdAo=łق޺#/|tfd2<}ܺupiߣ.)Ջ7OX'o9*]\SVLczKvׁ5Fu be.&DZ$Kk燇 2r9nw<~OI\JkT{[m fp׉rS-Q4TE%Kk~Ss!$Đ[n'<ѮޛF <&ɎeT7M D-W ~ )i)!и&< Ʉ(]tYvOukeMlɝ4fR{b ( 6iDc(Asˆ5܃oIkj֠P[JҲT-`_ffˣEm$zHp%=;EY+pSzZyfmœMږ$M) !F(UiGy7k{Ѽ}7cXr:6xNƛ} ۔wَΥk{^s̺͇&P7;)qns/|3<7 x`C5?2hzC~0yFs{z{7Iwu[upk<ǃpTi1~cL*z]T," {ZL:j6Xzht&.v>FQ%J`6EL9ZǘU|iqe>4 5'oD̑D;5 yw_ 73z 7`/4t^͓P\cigUq GrmTD)+1F_ljMִ̀B©Tl9 W΅R هi[zp`{4KY*P U%M0KF];dE@cHiX 0m]b\evgmZSU nVXt9e4rȯK i(A3C5N{8Tp#grSJ%#$qϦ 7v)iŒ%RH+b < gW+iSu -] \lZiI: )5f'&@u 67Z'Sy/c3 ccxt᰻5(f6:dz#fczxCSB&v`/ (5Јuq[5,4{2/Ц:c{s;bAf p=A&]y2\ɝ `b c+R hUօȀ(S.ӜgTt vu泓%!{NVQLF*JJ<~ywJ@4C7U2#֋ L -!uckLI]hwSطe.owEtdXBfe1 %rgH0$N{^\߄}Q`mZ/oiDRUTSR[ud8u<(-ފ77#:" xÌ xǕx M 3w[OzT^!/f0")L* M 3ltx/8jJw89DqA-Z!<}FPxg9#vQnDz:5oᣞmyJR_դ}B.1R7DmZ!xdS+UWZ,*tĬ aiaB-< 8HVqW@e, E 3L<2qakTmTsT9pT&nPВ6E Sg?;%w/ Ez3*q M 3a:“X}eX4nbB(I5%QnHu۴0Ax7r`}͔ᴤ44jsn9۴0Cx9βwUr*t='CבmҐfƢbmK3T"ux#:IC 3ճ {PYKU8zpDAUm-w2Y·d*uJUj4𭀨$=70Cxxl{Q;-zQehaBa],?p:[Z$b.6-L>c>3ƀ 8L ֻ%~[0Cx>jI1]8-y7mZ!|`mFQ}z:Lu96->j7씆$6JR $DcsSۦ)³=UuZUdž,mDŀ\ :_azXmZ <Nd ulF25!B摪 Fo!_4(:V lU7t>JMUn6JZ=:-*m%n0&:v{k}򈭊6(Mc[s6KB1kTXt^ ?2\[1[arezfGד̯ +G;uLةnx')hӓCp4$Qh2 z :mHZझ1BNq,t|c*7-ZFk^b/oa;??.`4%B8^K){ʩNhɒZuZ3:<6u'E?O-F:m[hh$q* G4tyZg,H`T C~/|C3QLm`1k#40y WQiGW+ڇzNٲݩSyooC!|ʴ֫ 2Ė=WbaԠs.xS: sGBE✴&"qzфo )󏠙ޮD>wH4Z_{0!\L$uA/F˪)܌?}yN{CtoۏdjeyFuÜ4x7Uw8 'gv%wȱW@1jfݸ>;>Z?_T9[_]'WŇ۷X~ŷ_;Z]/X@i03x۳==>>69q~:9>=_I}~<>o_4:;ieEÚ}!'K55/ԷR{zZ\Vf?FG+uzq^V{[?o?Oӯx`݃6 CX>(t] sF+XVfJ߇v-g]-$';\FC+v?:2,C;KkbEP͗[:] imL^~x@8 Ҥߣ)Se8N@uoNWk~kw綇AZNi2tG~7<_LtKE~uׂU  .3d{)?xٽ=:R9>wǭkx bc:4( ߳9M53ꍃ3ަ^BInFSዿ?ޞxZ8'(ӲhˬCBkWupw͏㕍nw9w4ۓzޟ|wRy|ט}aM}ַKjGίu_ 6h{37G%?c JK wsZ Ⱦ="9svկݸ n*9z5}Z?d) -Wz S &{wp/}~4%$)c'Nꉳ7}pfKu>v?'àІ3}z Q~o/n_IOZu?ݾcr_è~{׿h>t<|ˈnzHN>o`w_z7k_q=XZӊ~#@gYI? vnHI%!/;fwwpއ8J S>I9MʠӐ-l&y,otN6zF5xd}r@A/PA,¡Cܻ(q169Q*pՌgu6&6I¾>U8_L\2UsN2mF BP Qt0u>9JUj*_TvO!>}ӿ@vYA^!0LɧsXbR{=N=Oju{]^׿˺R-mˇu蟪Raf{^T?vP?C{S{U5oAa%7 jCzR"{5QmlHy;wR ]$NurX@"5ێC;6=CbdW|3>{Q81 uÐc,* ĸJ%9gF·7 I(5ph5%<&w%pJe'5Ch%HȤšrJ^FFs7Z%/,1uX 0*M;zl}JM ()׍wJ:v++\ڙ{7ؚAX:jz6W4E=`SK:} 暈Tt~H#+0 ,PB|:" kxTtT+ o_.<RmԡaTǵ42uzg~c~V M4Mc޾n^oCx#j~&AvJ"mXiC(fBb;].s65#К0x~ bQ;t.GVHy(k C^N?X 't^b/ic!2t4Wf8M!|xLuk+ `.{s85{q6`o!F+0xj6?a1 vdTSyP+3m%[365|"[ݎ^d۱ ^362-˺@ΉZ^N5h }zSzYS^0~aZ* o:X 8~.WoP& z`6fyeTפ햻W߆{QJy;KWGXTH ~<7E;]"I)0 dpAq K VĜR `#Pr!JH\D ( S1%@{(riKhg[}HsPT=]o~ě彜w;@2x z. { .goE n dfit{GAA{=Az̬t%/SA*v/{g3mx/ ); QA@?*.cK"_p5&̬^,.]EnwU2Fe$ ;SKNvK%U eiOXxډ9SQ\k#Mh b!#FDT)"Sdk!S!,Jbq1"$>/ &#qҍ&6s [ZeE#1(CA-a< uݯgt]cK5qbӴ>;R} PL{D1xCߡ|t~Hq dSAISݔo~^0U*a伿L2Bo/{oQhbeW bL<4H0vfXJp3JLhxLWX0$RXIJܽ0>"F32P0BOp$v0>E*E7)VLtE& M錕-Fzax-8ʴrgk7$z[cT+/ţ*8g!1:|U6r{0ߑdU.Z=1F>B>g5 7J 4>mWFZ0T )AEZ`»K2yRT{ j-笫ҘpP@ TPDw'r!r sҺurA a`\J퀾]F\Y?[R%m2䂉 N]S"j©6NOSI@ܙe;d>Wd#4p8]nQ %f>4%X(`8QTVO`}mFxY9nTO`vFH=bl\0~Ha(bjQr"s7ˇK|+ T Dن ·ޘ -_$=µV!Q葅n[EG_Uz}]p#ѽ5LE%yh+^BҞT,b1Ad$%/+||S#UϨ`*k{{l rR6Cx8|%(b04(r?Gt&TGP8XڔZ*X99ψB7{K=#J ޳ʃ7]6L˺F/ɚ_Xy 5>(U v+d\e9ǮHJ L(6"K?VCI&ېiO0bc*L!W?2x-ke ^Мꬹ{_,~-6vr!UuSk}tFPP=t 6!V͆& 9Eaa+ݨCJ._Y|kP+E Q"+EYL- qPPx xŞBh/{vh&X{Ziẹ =P{*U*nm7R]n>7. -]9/#Z\H2Y¨ S@Pc*'eBR%% EUVJ+R'$91 O"*IsI)ra7>=fxw_42lL/ )D.X B8J:T1C u$q3$= HiJ`[j#q!-ĪD&F*#qBcT TɶDAԀ^d樨tQQi0PbK+NCZ?6LQX> ejq!e8NbL g92 CEq, QQlR &('ܩ@ aDXXA,V9-B~v7`"Ӎ@ ak,[SY(@|hB!7 b@P–,{o6gՑE8tRGa,!4.F"Ǣ/@<;O@uXE8A ؋I0e0EXʄr`.pO7|$^>ƼqAp|%0 2 SŴHm샹`֓*AJH4m!ӰeJ+@HaSRGI /\ -dF!aUT5SUvO-ك~(ԝ㐶6~D ֔6t@0IRF2K vQV;:#܀^T+jMLeGc`P1 F< :&$0 w~4k]7.ლr]PjAIl{ E^n!nNVY֩tZEt $a C?!\qBz?uʂtzk:_]M5abqU-w5Fp龦Z 0O56j#\gmpI*ObGIBkI\a* :ou̱Ո*gK$hkcZCHFɃy'ڳѴ--S&?iΟLUZ '*wGF1arm4]?7om|'fט75}=Xxts㇯^43J*,v+b$BXĵāRlARyP$ MZ M˳OM|_5Hd의(]q/B=}_A2_s_€v>\]'wmqoRQ9<1@3Ϥ g1 ,]QU,", VkF-sg;>]$I?X317磓=LF{p?pO|[o$CphM9RvẺH?{ 4/IO$=O"I&6 ɯIK${Kߓщ-~{% oK:mɇ~[9=?1vz t;2/ƏWi=0{ t YuFI!! . #Lɉ&[vp=Sh<6x%2=w"㔓B69?*Lgx~b.Ȏ;xx3"y}pˣb}G|߳zLx>M^>{}˽$3(iſQ˟^wN+W-9>X9~)fZj5-#9^O~;CN|_˩P%b.o-K[m%O8 |쾂vUCikz3N=:?6"']fø;e>;.qY\@eٸN2؋W֑>I}5[8gOdA>]4.啾|ז+]%z:bxRv8pw?Ow;o@o j{¿?ĝ\X۹hPwۢQn];anһώrW7rN.#?BL{X Bcm_m,:M7;?O睁?}x9nF c|k:OYk J/?瓚4?z݋o-vgY}tnƃX "zrRsoX׮m罱s Uv97y, W1ޫ}x؃׭q;gYt=f/'ߥ}bex݇s+.7uv3sfrmsq>Y߯ټt>wNkbvᏀ!7/MËȟڍVM{CX^h/a???tYNϝ< i8]a=;gWG# N0`?]|>>e\B*47WӈUP"n̯!+>_`t~Va7dys]>Gz\)} 8sgQ*3(#yjg)dJJn'yf[b7oNzd-|%HT٣P ?+fu`Lє8Uo|۩r%96UkNo쨕 gX=Wb3*1"z-&05Bj}'Y~"sJgx3_0/t!%lF4Z@,7pǘ0[\֛WJU^P*P !8n鸍j!.v帹kisIh^%tc lpq6jiodI)θ`_VV{F ҘWdy++)Zc$Vj\\I1˕PDhK%[|d.% <ҹXk19 G!`RB $aRa)]QSDJqF.Im)wr.dèl6il7i&p;:Agm*Fi}/z%&};a&Ni:NC۬)&Y^z2z6H]gJMkJ6Iul4 r"UBvieb+s]U^PBZHsCv;NJ|ݛς7WӹЈ.w9h;QJ6޾H+~{ƥn26^yY%l}bZW+;V+5Zv=OjYR12 VS7t?OXceâ9(RaQ bn1f6Eh?݆y=/1-Zwvj{q6ZSϯ+uT>5PrU!>tV>Zׇ iGWK!D^f#^"MGϺy/$4'ט"n&g^voR6IZN$޽K{#hxK7iݏI)M[_}S2 9R廪-d'\[%8xMwHu8uDEw"*5QFn`[%gǺm"\wܶԘ7.$6;詑$&DƤ$F%:rńژ(<4fѫ_O[k8JܞT?!jzz+\Fcgk*jU?` ?BPO^NsP`BxAMy*ܸ*5hNŪ=J#D֭s0?T!`M'~\V] (jL37pm+}Awg]o`Y?G ssC+ : F2&$c=o'UdwKE'/p,F9: D)ϘZea䫼aWUO>`ÌaI{۸+lf'm27Lv<_xDBR׼ZdSj҉@>^^ipD^3<99P"k5FɪVҐmuRd#OS0Y<jnxp4,:2-j($1hWyBpgd ۛ-zj=3JK5|['bfk^YU )dux*Ohx.>6[B!=@.ԭuи|Nn&{@c Y`&n> eAkE[Nh%ZFߘp"a7#- ~,%L7EEȠr{!u2hьsLU f9{ `5hCJn/*U&9gh`*@eiQf<Ed^@tk"Z;WF*=8g%"Zl:+1w5Kʯf&`sWQ{q~횦gU޲> tE @'c~\-*7 Q$5 +]|$wM\3WwR˗}F$oUų|PE)BzXrR_{/&C?y*̗L y6 #GٝXO拰v<WkOLh (g3Ei1$,)?1YIC oZg'FʟZyT-~/S?̧ `vDDp|F+;z~~]Ki%UD ̀A_MEǂ w7ݓN⩉ΕdnBkLF$mx V^ څDDs"TՐOkFamR%:mt|ԢUs|'tGo]`("F'<<:fځYVCi4*rXiĺUpzBMsyw($ǞL)nsF%`헱a2|ë_{ԅ>0ss.ٳw~ _fs`3ٓ'vw5__&v_o_!Дg7~gaM8|{K~5]L꫘1Z.YƸhl^k sPCJ9"9x:(+WAw5?qzc61"c8p47{pSc`>mIb^eMHR7+!0QI3\G⍴Ea*7 r*nd=3*˱crŮ*70 BPs\wt<~ y\a*MM۹s@4}ܚ97K.[ߘ(Q5~L?e㴆 H`J%e2[^~3j *BF Uhv2z)4S9:PBĸHRkf.U֣H%q]BRe\8PuaZ$ⅎwǑ0KQ;;$ęlP,2eLi,RA(ʄ퀖9*Q8ݬjCQB(au zD*GBTBx2\>F-jyYlu!PMB4րeZhhqKAɝEzdmj%Ú'/EPJh!AȂ,Lܪ ʩW60f=~r"J\*B5ԷXjCr*M(?zkXӯxb ;̸>|rUueq]׋etqb>ȧi#G(Z$7^=x4⌵; dgq8 n)TʥNnnf~wE5HNFM̜/^ٲ+ ƃ%sDU[m !ZcMH5in܁ȇ!uNR 9/uԴ.URӦB˕VE]Ϧ/O>`}ލMRPm@ \nkI}^}+ԋ`z#_qY+qf o yf#}J HXdHޛp/ u`ȼ2iǎ{`%z"VHwa<_~<o_h{<x|ٿo? p*_>RJjT]toS 4D# ^y#%O gFZ(Ǻ,-*7pATt2[/,C {So)EZ#=!c!x2GyǪܤ-U9޼椕a|t*bQ [ɉH1;4c[s~18c㠳H "*sncU(3mP=ѰmK!x3GcTɐC1h,x 34foyb3sj vX;)3x@ %s^:+ RN PvA co>]&TP4 ԖӏhY)Obtyxv4d) ?$, ,>brQŁ3G.jfx'M h ]K,1Qr幈 ~$)>`5ݔjc EϜi7{cuE'!&"VԞYF8d{6Tl؃2."MJn\yNW,RA!$#JoU n!k9TEpQY,iSvFS[TF}-`* R M la?Se>2!6J)j-J1{< KH7%UiCW=E &IBMPŊɡ5;jm>]"^o,:puhUfd;M3Ҙ,m_2 R3V.딵 _9lk+*#D#"T=\FذP1|+cFL#^.?o ~r(朐ͧ@s]1Zi+v)"a+5$H0uk@YS %ZxUhҕs5؊n&?u!iy1ӔP7!Z]RWw |Mb%ـ~IT)ey묷,kFkKZMN\H0RԹ Zoй(a Z;p3:m)PBP8- `'a 0JX.1ZDGr@g" C3Yu{:!=`RFjtl` 7i"V#P@I1p{ dsq:DjFP8j]ɶ \*r Yk9fSw*g.JS"X[鸢`k!P2tt!`h%HsmdxY;WOS)%V{'OoK1 4hQ5l9HJrD~HBœ2Xp+y;.o{Bg"[. e;1d2?qūLƿ}z_lu0D RSQLd[ EmiV4+DA!$ȄӟiGuӺFQ]A IZ W[30s2Z}TF:l)7ZdLx.-qc۠޳# N 3Enxyk{e@!NNT\bI\d}.y=7fkK`\,7.ԚKVDa0~W|m$@䢹{sLS5-)RF6r+isW,VRIsQg2L&6J`?٨%#JhJ9HB`y:ӭ*55HF\e(&85>zc5AiGI7:H9h 1HYӺFoupF-V֪Qkؼ(AMkP%"[&+Md:EUCp<^NT6pcr˖2X_n>/Nz,9;4ύzO9 >*>߼{Y(~P.> !>A 2(C mY4yr}^4%a ިd7mej!SF~'ޢ:{QJK=崁-I<(*ӹBgH{f)YtF&kT&r\Xk(KX0$Q!r1i7]c;6'|n>=OQ߹mR8M,D"lڤ(R^BF<{2hyR *61φ$iЯ!*`033ha>~o~"Ζ۬B~ghN1k1wHL;0jE)P5+j^ +)VtE %f5Ȳ:+{JYnS%.L4_\"&lYODs@SGBQ zR}AcB(Q܈*DppL* i1j2LU76Y>k 3RG^tED%tY ܞdẌ́ o2@i" $0W4J-^OT5}H.z~1Ƽ'="sƕ  '6=&PDVa DG0EAAJu‰B ܯxvӤ€JE]wlw:5`w r-VaHCunf5.EW(Eހ)s0P%"%0L%.^(jsJ~3nZ(ћ^*/5",'yspD̭:B;^uk}0`Sӿʊy92d%vR/FjHɴ8 p0QNȀ|z[!C S>痡aj7% ŰBxrӝ}'ܙE2@)0P@u+J$&*rLE&4d-'as VTXPVGWaAsu|l[]iL9#4*8HάR5!$\kE,bfkV-LQF$J(*aKۀ@4<`h r<ɩǎ[1a!-Mp3)Hq0̵R<EpӀVYK!Na ;ģWȂ(NF%; "2yJ,W09e&LD Q_ǼyzΘ"-sc#GRYׯ3fi,k/d'Cq]ۓp+i~Ou[U͛㼽>[A'=<1r[w+enfwT{F]'[rGtK2lx% /W9<\ ]^vHe:PйeQ#M?ʲՇ3jSy!q^7 gs=;4PI?5r rͬ[ͬV7 ,0hpq32jf5J,~ETץT4Uԁ+kXZg*&Ç}í}/0{S sQ@k3j[2]Z%|^?_)nj'ڻq5S rk#"%DiyFKfxNܜ" 4,#8(8M| osUPD W[ (J?Qjƥ# s/|Qė0 bvꌌYv ǮMt `a?M| s[*<ѠtbrK 3BRKӠASEr6U8r fUƩ*scg_{{C sIF8ޥGj%̃{ZTKfUI43sFb4,+`RT c0} |M|SõtX i uv_aw`ѻְ'%xyzc5Zec;uq9!'[(.<h7TOװJc_m!%Ёɷ8|sJPeOG_N󒒯 -^ -֎VSQc[ۉ{ !X#Ɯ$H[y=* -IGm m zVa[xqpoq{w8Ʒ(Ċ<ŷ(c[ \nي>u!^"?ѡ[,1kg |~~f-GHXK,: vqС9!&-Zʅ]CkLGZ=h{f%a֡u@;$bKXwPzTb]o@&t1'c[gt lw&~k0o϶mzK6с(zvʁdX;#vH3+Ρ{{6<u Ucq ?+RGѳf'P*TfP.E@yFϐsaHpΉu5D5n]Qv|p4@E^PtE]ܣV &+Ko4#X[j8ڥ?ysH7BŲyFǑR2ޟwRq;rM|{ףY1lx}\\L!9,ɗk}cT7X)x3}t-q+rSOWbʚ_Czq[;}Y;UJmImouUޝ~I 2:suvX^ͦ啟 ʫ-^,_𣃴 jBmCmH`^uAS!X_7-Q+WH^ TT^Xer섬8VNw{i^]nOZw xFwx`|^ r5wƗⵦ'e ? xEhZ28oDtXzk_b LM&D\:z,=WOH, ey^[ ZO&GN6&QDvL}4uD;ij{a$p0`@(>ڬ|v훥yjȯ7lF1ɪgdX)+ 3 |F$d?|6KpͰBv_:X^)#+؏byմ7߹Oy!v{2;)'ٞޕW 4%&" ] x/˹pʉ4qGSIzUFTPwp^-M[\=lFy[G>H&k"pth<zI[DGڐFR@oZ訕(ZCrҦb8Ԥct]nDxZ7u̱{hTgHɵu^&=.Gf;NFC6߼y֋rkͯ[z(~&wk:FI@,+o|'<ם}`eb$q!Vlqxw+#u8 ;qdžAKӤB+LVj%\9<{zTrg'?'㭰U1^%1me)n+7_V!+p{5=kP56aRtfSQЈ/ L 3[Ï AuZ8~#F^; rk 䟊G#w:BaMX/ےp#s*Q2p)PBVEC2O%<6KʑT!l2hC;~h/f83 ASz#}Ũy[|l}rWLb1"-Q[OC?jQ"!Ggf{doOk0x Iڣ.ۺ=sMGEDШmI XViw_kIcq!,%&q4^lH<?Clj=thBB,`>< 5ѩz&قB# !Z]4^5{ruORtW2.6?JabJLx5坒*RWR74g%Â\8H- ;5jcj#lbsccg$ɥ:g2x"CPC:lxGH,'~[6g1bfƃܭM~_wbej{Wwo>|| aB#;ׯ '1r1fɻA5#d'ewrASfM%EyH+{&ql5i'ÕƶtED0i͎I}/&.&j8LZch @[n*v/Gxh.F׉8%hHՅ@ 剡yAȝTBW1{Y3RqN5+)ɡ1Ar_.35̕w)toЅ$]`g$Þh= ~#$\pe)4RjBQhBSt;z xs2dp(Eq"߈$Bj* >~fRBzQr>+ds"$i{o_![RP!S%"LJv`jD4S X/$LW6< 1҄q%S"{婩9!j.V7VA%n:[/6'fD>*!ٹf*f @݃<d܄yQ1"6BZE]$'4%ʋ sQ1xVZBl<$0 :&톶~8O:ĀY"1;&'^GEa*xq*WPw[wњ+D.t$e)%zL}^ybހwn|&R 2K$h$|L[sHMB(GkhRf2 4b) .l N=y< 1Ƥpk U͓HMH"cڅt;qxAJ0Q+tFC Jmo|LfL$Dd-ZlYo8 ai:KsKsJ+M>Lk9v u$qL^ȦCvLώ7g_ q\TL I@i9: x4 )Mu(0:܉ѡm{TLzhFyszzdL`4MBgkhŏba2'#:& eƃPp*jקvq?]L5Җ5M 9),g2 YB U+!r_Z_%EH!cDrU\Og~ҾdөU1Ιy3_^lɊl~5Kõwbn\n]UfOƴi ;YF ,C + @80M!>bV2g a}`QM$0 M pEs%gv\T% H V g(Ihfn)=t~̪/~o&W|xwI~!_\7IXe}]&tHqEBK)Pd., A(1Q́q 1,E pM+XBQ\@,sW\)뷷__ßuޅCE߻O5gs%6 r;>6A"+p`TB]-H.Q%X%H/د9 +8B֖TR zHe`樓z*}P?9)$@Fh4QЁ~%W$N^UpzI(HYpj8 "}߂.ϥ[KI.I$cӻb斧.OWWıfNvO{/^y&EvX"jf9ͤ0`HUS8Ρ րH i9¬,8]a`>IǨ|=Փ TZUX]a`C#N{,1hVTGML=dBY& ׄ':LJyp(簵_s\}~tg"ފ BR0y cP.؏ghH!fq;33hoX7lv_v~!?6+JlV<%=/=fN&EGҴ%䓏3"" JAUU$9}9$ϼcY!zl׋AOe_a$Oe_Aϕ}xku_Z9IQ3Z![-вH+EY+|CÁfv!i(fv!6S>$FBb\%"I>IWQYYTaBJq'J ćx{8kAj2_n̝"].V@fӇ$sPuqW5+槫r ÷NA]weaSU^ږޙߣH '=7'q)&'lcnZQ;fL(J%2U!rV+U\'jW~~yW湈8]Tb#sNN.rNO5Ggf(L*i @QEqZ:[tN aZ` iWz:ɉxjH"髰N?_38Q3bl H`02\QhRIs1aHu@>Azh=DH&&*'mt-x!{'Is{N=sOƃv8󿬳;~!;STt}K&LtZF-58 S^m8׻}!-7$S`ӵ?DDO`xAd?|6gaDaΧgq/,} /uu͌έx9K?|}Log*9f:삐'SߡQHIᙫPTmb1\Kj6Y *lEQɒ[-} "bIؙ S+0l/|}U4fxgGg<晹]YGr+ nyX ֮/^Aȓpf3\QX;{k.K Y_DFFdƱZ_i@99_߅EYV?t۲Ց)BNJzEHhpuO.UjS*976+ZnvRhz1֧GCJ9,g:eFqǒ2HB-o>ݛ?i%ᄃmQT}2.>J;YB'm?E ]rX>ހ0?ށXsO7Whdc̍DR "ZEa^x1|%s@^/4BpcN>h&vJ8AB*:wI<壭@n $:|:a@+#y=MïMZwC; d^ïwD@cߐ vfsܹh-^~Gw,rK!NO.i-,ѷx;Em =:}8rQ6qS]ZO?^tx-2^zFe*|{ jwN;zFHoׯW|}<4cɥmt"|[@"Eiyz%ʔMڪ:,|))ۧGɘ?ٮ+@F@Ed:j7һijN]rQ;GnGJu!s\挏((fQzSi R1*_+euIeZzllu4ː)Ag+~r<8r8`bg.y$i]:A@+AIXAEQ0lWJ.e%2 JQTKĪ G+6Ђh9kL'߰S~ %T$ -{-po2i BKA`\jTiNczV}F,to ^b>{±°E&$= s>+dLf6Bf9Ϡ3":c0'D x8ȕn8'TI=U츔1Z"̜1 pS.(8zSt#\FuHM<9 < ֎Es6+TR>F%O'ksAtLkz d˴s_0f0ȽesR[sZ-hH!8~ )#b5%=@B ΤJc"+\j%Y45U?v୳Z<@9 JXpYU&AY/9O=j7J?j(͕RM&H4(iO/mҨ BɈZM^vB`KoSeiǁT Fb&YItt|Z #"aZ0B]8b/|4̲"MwhґD<* rմDLEt_SNń<@[6B,Z>*y BBr"-R9woͭ^_~[.v_i'O·JI26q29Aټ>.#L1'&|.ޯNSLc'Ȱʆ \2gZĴ2JM-='YʲɁh+Q7\X&cvwaZ0BB! :*CFz&仌0-ljmBwa+z:#L1y`vjm*dM&&*bB X9wasz;c#(pK4/_|z5t9,I.дX-AȯLOLiԓH:>\,|]HhmSLLSL^X4O>oO(`tu0K7eQ;x}]FbR9HcQ5̓1Њ6K*yl`ٙwaɓA1c۩.c"ސE3@ǤR4YKtO|\Fc/g1:W.'rlQ QD9U>F2L3Lu:8w051J&*E8PY)yrB#GjHc|NGҨ8tqҨ w=4%.*bL|raՊzNDҲv.=E ZsP %ZjE3ܗFn.ZUs{5hJ?wnbr5'y1Zw{|, [Hv aQ#;!}01z>;ȇc/e |lE#-*vSTV]$QYuZw-[dţIYҩKPֳMM8;a|80G?Xn@KWdlX4k29/7u,^\7_F^4#bVt>>ls|'ޱhk[39;KpE ^8&?vGַ护ʼnK`M}kAz{:Ytbz5p?Q|t퓔6.|{I<4;v>IEl~cI\ }rܥc@}:CGD%X(uX2Qgsӳ38дz V-?e>Ų.MO!Ԍ&@kn =0ӛ_cU{2Lv.̐)+hhԏVeJt-ٵ_~#iB,ư9Z7UYa^ kŠ>&i=3 k+Lkme7'rE}P0u yr}GGh,uqg.ypr5l j v$H 5^6&5T.~PVš̓(P5KW-f dfҙ/L*Sv^>yBϫ p=w pH;ùkBΕo/~C*OUɼsјMM<4w.Q*&mGT9mu^jNFuus]]iqgg5 _i rZmRttoa!=<w:S1kh /C{C|ؖ5VC,E"7^|xMpQ\GFVr9i]MV7!0̎X6vћ{ʎDT,8W~E~z(&+-kAn (7pFV?|? cߢZs&z^7ۙw,Wp.b KqiUPh劫5x |%V-EZY?"eq zaY=H+Yq -Uue>ܛ`'X2eUAaEb[֯>Qo%A!%/m']$rii +OMZ Ȉb(l(]] 9c'1K&;&Tg̣q6Tw6犌8UnnfZ2vsEӳ38(#We{ߧ.|<:FuQ0Iigʬ|;.|ܬW#g?\icFCB ,.̼?ji3il`|<߃gh~QS=/Lxܝy :O7M)\'(W*ƜOpGʨ9ӒEQphh"$0{I u#Ϙ& :oKG1[{R "BANO|{̄Ke\LTz/8Z*F],mJU8P̓8>\V#IpWSBB,0*|*.N8k[UpL!iUQEYFUG 1U)U~D^SvNْ%7ʦ  ;Ec2S*\|Ѭ1#f/ ;)/2>E6CNE2#bcR),XJ&R'K2ӎZ}蜕f@?|w6(S`DMfzWa'$1߷?x^Y嫛{R\A*vt`elM2YZPHY5 !\δhrrX"GmW_ (TOs;Zrh,w !I17&D}+\f L #M0A E]4?. ´%oV!{S LZ~?5־k7"&" tr2̈h_u6Zi9pp0L_i2HQm1Zh#J*^buSMv+slҥ;)dHYY jЕQZm9&1Ψʍ ,ZT$(:t9*餢2#ln [z{@5o?lN$?݃{tۏuQS3ֱ m~f}E(OV ʶh rE?MW7?B_mPi5 ޟ))ut7F,Q>NܝߟLjnX]gZq_qIp"v":6|ծC*ʪTf%JJRI@ r=#Qݟn~hwU:eo{M*%`j1(Qw n\ge[5m UtNY`7j ffC@LͯW!C!Ik8!id5orHuC`*FǏG*f~ li [Z$p j@"y5j&}}Hk`3"=B[b^Щf,;V 2:lVpߢP2cBç;)OwTH]r;B$D濛| 3Uq{zqY'cnUVI9KNy./ B#JꐖسXҥŹ0j/k/Ղ `@jƗ;M_/a3r%%l4MhۮVɑ7'ؖqlql rي6sP>Jlg{B|:"^#S4*c7g`-0L>uA.bvnEwHfG3ܥ4f~V&oƝYLBUq{z1(z4ߑ|2 o#݊!r<5C%Q!B;[.$ 8k:҂3}ĔRN" e(~=c@[D~L|fsָ|?-#+}uKy{y_-K/N5zbZ+cd)X<^иtƦr@/a1.'f^P.%gh~kZ8a$$<%#L|:i/:YKFg^kZND]xR`kpn28F&LEĕ `9*_ )|4$OC٣70AJgJ z5j;|5p$,<0A;PdcLX>AhP$2{S>iap,IΧo82p*<8ħD/J-:7 bFcU22H@.+eRtzY F@DyE{x N۰c<-+H$ڢJt4dƜ#*ɄG6rVQ1~06Q~ףt>ca-%\px>ǭis>F)l8BCs)kW[5XZ JTC9@nՌZ)4;W*vVэ9usJmT *Vl.iaPuԽcc| C2lAǸv1א\E0b݊0 o~|P lnn|O+Hb{Wݟxv?0^]OfPLou~=*aSD§Yjڭl`NW7z /ݎ*kq48o/4X u~o-Y Pt|tJ@$C+Ea+H"Y:ʔ_x=?p6y#&/*<[T0©!̃VSC<|̛6ГN+do <9M̓P/^ ꟣`1N_D!UȐj c-^^n\.͉w<i),{R8rAGኀ~2Jw<1T`Nm~ۇ6߰^_n-=);EȘ_\QfVeTWURLHG,I{vq# \,ȥ?L"K(0ْ]H42TQYΖ3cp:3 65M23zؙYK]5"gbp=` a[G(Eo߻KmDTϷ}_^f{yׄMm5L,/CU,A;V T|!k9^^3[JzKy`>6aqW.޿Tb XZr$^_㰌0<@BK Nq,8͊֫9DC}Rcs`8}VfDBEvR28ù ٖ)2fl&ei)9Cu4N*ix1ky֌==mwD]{ë?HgNj[9~S])A߯HZ"}{w7o>oέ}bwKg:_GO_6>4$w\ćnço}.Iaѝ}⹷`?]\@מABmPm,y^\ɘsD-_DœRsHKR9^ZOJL9Fs7x2/Q?R'ݾUf=Qh?,>ΖXp ga ge^ka|]ܗvbmv]/J]@.i_yoY(( BL!%LdB#l N:pk*rs@l 'Cͽ719f" `œe:s5ԅUΓ:$**;€3PrY|G\ :xƓ5آ3]X#X)7R]x֦hpԢgS F8Q* +ޤ|#]^U<ٷPs;BDU >'0'z6 NoJ|8w=򰳲Ͽ\k~Bǹ]l)쥿5 khMJ䋐m!3{Jޥh5pS@ʴrg*3W?_?%.__Xv'?rFdU !]pS N%;s4BONxCj.oWɀɰ}A> hPaE @wpO)EL#.cܧj!:ㄱ,}H5a*dmLnH!\ȾQ19d?vUҀN98i,H~_*&PFDs6P/ 5pegUEϷW1ϻm5w=&8dz'd'NK!\..o(|% ]6DS#)ϒOj-F(is.7ְРgK "n#EC)}u$4]O3֔o+Af$#DXIg*BU$%>So(4hwg+iYg>2pe{]ʿDyY>Z7>2A'#1i=ɮ.4t3{m]bCmOnaT[ -,Q8gq"3o5;'Ԏ:Yзf٦|"G"ҭ { Q䍂EH7xv!iӑМg v"t,`C܈) [ne(y.` .~>2P˷h.(Zҡ$/P*C ?L`{T(=xE9MwTsmɼ>3 T0,잘2De#0t!5QCqO G?¢fk-ǀeH'.86pL%L&pAHҵA[X.eX"XZe2<.cC RA1צe# ŌTvU1tfm0TGSѺ`W֧wV1Vϥ3'TtpJt:]zM @SQ󼓦44hr鳌$è+ JfoQ fsB$ԧl\V8`l/>k\ͦSBWK/#G0s* {a)i&10VWBw9TTj>HV 'KUF]sfr#4g*8aTWsP#eӄ`Xň$gjXSΡ]\,OI_0ySΣ "ODPjHA`vSԙ,bC8R:>HJ@Y|Z ; O@X"eBAAnm *ޱꏔc'8|*%f)I`4: dnI1;}W Fh|~3dz:?-竅 aA=2`Y']'FB^ WdsAs>ZG|Yh&jƣzF+&77b1mzr(+D2 an :kv a=EYӢ+ŻJ_*J`%pk"d,SDhT<ί糍C0ŚY~U@`#r|OVdןgV>8Wax1_'Sĵ9֜l`,wg„|כ{.As{g:ookoru fsY eݘ !Rzu֣xU/UQ^ỸGVz|$/^8gıVqA{PK~7͐Е}.64[Xr{kؙ4v~-F.@kHH%skVՔt-/!LUJn%t 蛩73f4_\>8rj A@m2(nMYD$ǚKMipԯONߛtn> Jy1U _^{?+o{Y|{v֠:On<<6v'gl+F%"ͥƌ qo6qB3?b *\lcrM)fDk˔\AQ. sk' A1B3\"fE0!)r?x>=r Tߔo˯RְD~~BSX_&pࣟ&0Js} U7~ɨ,WdnwàX }aX !}d45dyDc>5H|kf3xWPlv1>I}2`J{Cnͻ !ǘ=pfz*UxĮgN7? ӻmc5[z8)@ŘWWMZ%+-ļkvݛ9`7cC P G0;T`wI#xbK\!*wwYSj9F,b}sm%hmV;&/N,#lEtDf0"cLex,TΜ#& )e0#MhA,@kc LZ(+ކLmBoܧn@O'0{C?Ϯ?P%Z]­KIk.6G|3q Q\!yU ?}Ow~1B*iU.[j '8VjOavwRB+7tgl~DΙ]_#MH z #[5,khUjV2SMQlOHe.\'VyYu&,Ghb>5)kN?dYumj5P*逄*οT2TOB* >;k.G)_4|G<&pdG[2E_כ"eη7?BjH` [cMds&i{ޑ =h$R Jby抌\fiQ(- C kd+i:IiR!*c״8g C?NT ZD>~8Q%rh0%sA7xqO#n%NbD|\ů<UH+]dć%:P #4P^^v# bHbuρaφ y_nzFѺr{N5t'~;.x"x۾.&, # qVs)]$BXD5ZrLps̿>ofx5:_,k{6j!YZ.׭XKnsT^J&h?p&Y*zӃsquf{HMÑ1LT)bX]8˩ٜ`˱w"hN`d3HUjcK5+ցYLP%<#ST$Wݧvyd__vFbXDsD>rY?DÊדٍV]ojBk=5T?[ RhތS;x_Zy~_D&tNo4aw^ qF䃂iٝ9(0zv%j11vCC,~x_v&W4rIڠfGkƃZSפ35"eE+~_\r}uVkrrF [,riq#5rP-r~tV5kOJEn"{ %b}Q;zBv팙M\IݫYM IK!6 b7_5Ϣ0=FK!21}S~dqjD`cIPFgP4cqD敷١HSVGulw2ʚ+Ew>1/}w\EHlb92x!KIDK! /[$_n8+SSt_mwQ?%T2Z&m%PD>W}ME"k9ccHu:i{C^城зHDP )m$)0I )_- ?m?,;ouNdBjE;aeB\sN<%[H-sέB1vM3'czd$ü&Er5%VPL;/` amy'Fv#`kǝ&1V¼;ࡔZ[0h wdF!- |pq<52#l6jr)83y% KE@_Pswi(2 1hHk\1z}FA-od zoxS;E 7I |OM<9HDoBg;vav8ZÀRӌ8`?oc6W,o5T% /f}<\{!ZX_|WV:<7&RT'FWviDŽ;õv cFY8'4|N ͸rQ=u9g?*?CpsR ^ )Tw4V"ϓ?Yk珄$.NUyi/ &ky&MtA|p 4&|MѶi:6MGۦգm.'RFfYڣDYtZQ2jPygǚ5?Wݺj3i;$5_N8kߣVIXφV9'ʤ_f̷M zF?a,NSsʟTV˟UagAԼVp uY*L^X)UJU-(x0^Ϣ|xy8ARׇdi t<}us!w륹@o{='H D1J77.I8+ɷ6.i*OSq~jqj\4 A28mFW]GQH0RQbi g*۸qi;̜С|fm0`?OӪ4OӪ*_l!cK&ɏ(e0gਲ਼!:yEGuk.EGy``-(Jw;-s)exlwn2^iNLۦ60HCrrԭPʐ~Pr|ۈtsmko[ qZNOD & Ӵ8M Ӵ8.(V4{F3>ZitE8bsPϵ bmY DHi}YЭ#qTCBsyhy0XK"ike__Wb廅unu]l A"=v 肻m5KŻ'7kH=Rކh[NYDQ"Z;(Esَ`5tlĹҜV;,n#/Ђj ;QگS`H-|>6ds*${u;jjU!z|!xUhAP$pC?VD$4cݒy^!*7Ό- zcwseR;|1ǻcw__§#4as}|+MįcNxpS,yjߝ؇E zolfv&9S)A߰=ǁ9Ε=@6P_42Eֻ_tn|o_2Ͼ6:4{^0$g JԋHx89yA?ՈJ&_O<оn|~@57 =ҽ?Hڌp!`3e}Z-[@ޅuyEi }PL}'^;5~JQ*3?i7@FD4pIJsg hYxC.b{kEЪ8q:.91|g.^dqnː+؄W 2S;2svфp_K8$xr l6 ߆5þf]Dc[%(t=Ծbfgkv7:)~&_ZxA~vDi܋Lu*Ʊ_4AA,)}I*CQ; V=1QӉi!!2FB]CqN,1Էqv 8982g'?=NTaM_51zx@[ȧ!N9 myZ ݿژjb:/aIԀfa]O (.T4*34jzJ]knWzrYބNzȚb--ȗ^Y!˰eZ<@pň|c< Z0^U,2hos6s<ڜ:!)r]vZ)h&TY2f^ mB5Oڹi~ɢY ڞdD&_쪼>\dm@&/72)=kW[lqR>*¤DWԥrBG'a=߭sNVס2z2' kPFwGL 螺1׳z:-nv3[|BK /7; %HRҙs ӑ1RXαhcyY2Wڒi@Rc(j0FS(Q&2l"`o!F_LXZV"yYm G2 |d~LCKpww8)'S;©-Ii}ajʊ"Z&ʑ@K4jkRF H#?>T=G32ZV"!Q!2MW S_֫+OOC騧t LĬD߿ ~>Qc=Ylo?@eOr?8[,W}s3$dn{0KN:#.&7vl>[^4Xh6%0y109n鴬f%㘅Va/nT>mSUֹzμ5>!%ITڠ~. /1P{C.A3@i|\ʠBWMQ0H RFЭDBJB  =7\=N~U5ժG$qtz?^?GU*bj;jr;ٺ V_ubџhT`Ԡ,PL \&>^(7\cM44`Н Q (C&&@XT]k"FP1dZ4^CY|vPT_/:Pz|iN`E#섧[#1M"bH u"(P@)E D09ZŹNQ/i*O 0Gc*.ijNKO}TD:JV l_5}I3HZLHp,~f8i$H-tT18n29[F*^3M@D&`r*qlL+/dM}#"z  `Lꌢ i͍GtkLMZ65;W}\?yݦz9wvWiEz=[O}󼼹_/һ0KՓȭK, 2[ҫy797{5!\ʎEdm0xDJ*Lt owПٍdB* n>Z9Ek80S]`3?omnA֠z7۟干ec`߿Z@3bƺ%G988YrP {qAL}0 khpo_< }~ ^U2upOa:`iN0k1`- 7 sX / 7c}2cޛ Ji-aG&4e{0=43!1q[0I;{ kQ}B߅ϳ2yza6=]]wWχ} o 6's74F1cH߀:"b׋ݕwOdgiL+ՍػmdWT~9Sky؍TgRe\ce4#sT(Y%JE$&XFFwmW]wvW`AUvHS*%$4dQQ ^CJ"0)BhiϪUC:.c9;76ƍM8R2$ ca&¼3UH H$BcDApO4$BL$A,P@b{GJ0 JiW!'ͽr[{3Lgq jr[@ VlR2c&jdE/ɂux1z.J.ehYqXt{kd4 kj/z/?*DmVfVEz^S˧> FNQE͂D@JQzJ :a'N(={<T@wγh΍U γN :>:)E~=IW GgN!L)@1KpJ&ۂumTkx#u$kZ{[eʆt hr+ AE׊/(jM8&Gewovg%+CAt B|+,sgVǽߺeW0_. (\{}o80n\w+]T)8ҭeE0?X. ?޼vk~A]fhC_*,ofYo)"we]?G+H<4J1'a( )1D8B4\ոuvܓ`hh(DXFXiDp``!gXDuFfƠcj\{41E7u8V1:L(A|RMP()4UۭQbb1QX\,{DnXR !C+[cK\Ab Y)  ק3"BV֢ͤqJ+x:1P(8d4Q(Nx{9c*7yU~EZ9^~gpl2Jxx'z>Wq}dcA҃vh0y/|ϵ\}A>Vǩ~ YD}^+aY"XINO܉yZ, ^o?ʦqbvGXdsA]P>߃T{:0=8[ȎƃS">D#ؾ?U%E)dh3q#qp ^ }$Zo臚TRCl*g;_v?nKG Cvls! u CVyR挻 >w cP^.l7(Xp&0NM΢$Q3ZN fN&Y~_{_h{ǎLͭcη&i4s0EV,ؙ!zoGӕLdS2uКYlW5P23;]!9wsGyM.`aK|ٷ;}k|py6|n5jE2,6 9VeٯVk4D!$H &tH t&3%֮əl{OYIHS7ץ{2 &}R)sG(t#G-QCg%:0Q ۊ"'MrY]ĥpt.NYT!ORFUE0P,|ע餐Dv1oYr1WvA]^'`ho٢r9TJE"law 85EA(͞C) Km@qyqT ( 53e9^)$2i>]ht$2[;}iMYqhsRbK H;88b2XNَY}9)=ME5|$ d5,t'%PR$J S# $>NT6sJt8`p (ȦPytlϨSӒ  )XišpB-sc})&˫.Į؀"hy0 Td)HmF^oP]#!K; hw JAqrw:]!$:B(EuJrD@5xhlX')%wཊ՚>:C #۲/;آEWK 6&3sEU!.Sӷ]!ҵӅj[P]N'f;BXݷ[F'NHhP,Yh>7\uKLG"י4 ڷ>/>D R7}nƱ``PFa=as ؕb=t2`@-}UJYTlvߨeQRյ͛,xF7Ɵqd)s"2ZAZCRߗU AGryY*+v$m -|#iXn ֕Ӫi.T.Z.B$Eo\TC!RrlXQޣ1an/x q6;w~ɻrtqToI^N;߻_[ޥ^^&o@w84^9"l6 f*g~ 'ƛV5Ơ j9IO?o~}ԯ [vP;l< >ojM`~~>wpNo0VCKi &ϊqQDS*!4q?b 37; _1t9Yl}AՁ^ l`d}'#܉iF3٘ y4ʃb mt*A""oKLh3 >s~o^-%p9cw[^y+Đo{^o?ؼjh}L\3zg;=.VP: CF B4R*"f4ũ<BHSXǒ ŢXYU>:r&ɟAP3|sUעsnrKx*UrH jcycu{~"k͆okp|ג8/8UӛϓV-Û.F|.\:=pd9zd鎹^};X|Qpdmw 5A-cl?ٸlІP5)WYGu|d7ȚfW?s*2ELzdRc2zԜ,'ë,À~N6k!}ݰI}VDZ}pR3BoR;,xq hh\(%NArw{'VCׂ8.9hmDJZmxXu4GNjݽGrفZyd0s< ֏bʌ[\?nY";"rS=.Lf ZVg %H0VgGiaP@sK m/_0#q H8) %RJb̶gxJV [9n=5΅HH#jժ#R J,BJBBj`qa!DQDjc(\դJs3sFqEA뫩6b+k0͟fγwW .d_IRvopY$TS07 W$A AYaXtj|+(n4+wOwp'׻0hMw&'X>=xK(`Ɗ?ykQ9%\ecs0&_GPG̖56F|O-gߏ&ٽwi21[h3X d[2)1 YYw>s襊 +E+`(q*Vbxsm 5; h m<Ϳ}, C]Vmw}K轥x bu{lԍu)n|>ccTN߈_*)RQ_F ;<#B|~sSFvñΗ/Bj?a@ltl<!љ ¯O869Hڴ~{PL/3n}H"\' v^@rԆ[O*VzqmՉR[O`kh3CQL~\c&aCVR "4Z>ekjJAh@~%q_mOy4h}@jGZ!jFtQ cY uumR@P 5{%!ޣwFA*sޓ? F/FV*K3~S +F-E>#QMQK-F$c VzE EQŨ/F=J SBR ^qfːK%jžiTtVj)t7ӧQyO=}8y0#=g2Rmn+*qɆ/3 IhosḰbّEژG-XT"\JpKnO) P'Ӿ L a>76~3 z,p]gu9/6BJq$i4K4"DbQC%t~m+ya%k?'!'vqNFȜԝ t  u;z|y,qp-y->+Hݮ|t­(W Q6ThdZN4':'H1H'ЊrXNc`ݶc!>~Bvb2-JR}U_^jDz+*y ;Q'+11ˢρpW[?~ɱȢwAf,m}ѓlz'Ygz'l`o 1K_c;urCi "-(>V 7I WRz*,:pF.eB݂Y%v컱'q0}n9|p`FkǦ|}u)^Grk?z9l_V4O sgT$Jb'wUBuY9B  Qvf;;n\3UOh˾?lWh]Qy gQW?)띿̏Bu]Yo$7+~1 Ljy<<=xQ٭iԣm{ҕu̬,uـR#,T,T,TP=__5p *UVRRBA+ɑ]L%lTqZSR~Ϋy [ۇj|Q\"o^:ݢLk?]g).FCfv1 y|o~ȵt/rM[pƵ]D$jJ^QQFc`*N=m'^Pa(W JNr1N߾۵۬1.?D{q3bMHbB=4cpqjX]!~oFZ (m+mU++&ȂZL3ץfs!0s*`~Tj"?ʛu&ۖ^~FDcR.RS,7>^\WvbIw2F-6?0#^yus?=gZiq4@$Q7J7^,`%ػ:%c[VnQiT s 䨽iVPi@l`ML\qЀva9b BeUNۅjJ-f6"$OJ;5Pcu;i.tJSv kTth)'ב "2D~J,we*r5T nW ^kZ{T*GVeYջ:EE2] $gv 2[nTH6%2sfhVcDAyFʙoy6> K3ޭ_/K?&EHD7bcO$*Vq(q7?ޖqogUs#CxJs_C` 2B^(I ) |Co*K>e%#qpg <1pEn7:Eh ׃A-}#`2t!]̏0%wW_.7{}"VW!l9lNjoA_fo<^UT4JQy藨^znhjRnӣSq4^*R1MTrãr^ОIhNl#|8[J1%Q[>=JF'cW)dt==1|= =6ٕ9ix3pgW/]Z)}G_XXTuHPe!*"p!1Q;{apgЃXr7$zgA n0p 3א ܩkjWDrˤ$1\M) ZGS&A*@rd1GC27MxJyR6;sjV8TRՔRxQ|Lp"S8fj X~h f8W~rA Tn /e:p=EwL:zZYZf v'j5ba/L²+$,zx$]JvBÉ/ο1Wzt<(ZP?o+S*NdGa) W348hUz=w+ R &[ ֵdq+y{/ A=WW8=P@aAgvfLHnʶ@a?|WZoNTE)t-$. wW8INߒMk@3h)rf9dm\+[TƝڗ}AP޷pp^Ù CLÛ0m)QcvK pJ- 1.(l72GZW}9wQN(THEuїz?ĥG[W~.* G@J% +) >譂1{<:& ɋ@P@9艕}p MՠVq꘡\+"O1;6OccKhKPK!DϒAZW&(@=vV10T!H>q~D+omxq_BN)0E1pC̖tc./TM^".;U&#:植Q3IR׏ñ0xn C6F@:xݧRTQlseSAlQ8MځLoȞ~M N U-r%ZKֲZ9.%|`XW#=+ *XWנ1jQ o g5 +"$)u6I3FNH#>( ~jM>f,נJI[ k("E#>:V& F~ Șu)b<y4Lv>Sa ܈)K8e,:IW}ny}b2Em"wkQyT;PT; .8Ax&+Ka;$>n HY[P=Ø3y>3y>c$!hsɝI‚ BQAb4Zѭ&Øʁc*ߟ90: @cG?]~lnݛ- ҝ(3t>g]}e}Vksa׷{l)lNjo_f/<^5$$Cqӣs4^*RT J}u}F*iqhhq//U0b9;$%ڈ, trGQF4<%/Ik(3hiC0'(p`7{ 2;A4Y . mf- *(}**Pt. ^.KjxCFop 5VrE(Khd,/YR`0kpOjxeYF [Es 8DJZG航I))w.W%N%V@ht?leX$—.#B 3"lp"QFC2',/=<#<&c 7w z8h  kknFԞ=d4Cn::}ٔ WutqDɛd+4HJCC INJ$puoh4zH%%JIKg $XaU^:ȫ!(kg5Ӭ @*锌.A-~)TS#TD_%z;2M=* ~*z?ͪ4k2fEW-jꪈ x!A|F}p#?z!ÃD*ʁI^KT4404$GƂb0A'4N$T"p i0xt s svN$ZTn'!'-y Ȣ%~87IhIЂxFX\ @bzhCtq\z(/;4y 䊰#,άlq@+|J\)XV..+߹ywnEkL[=dwo]4pق_K|-^ tcth%rnQ~r +D5G 47uXa%d;Y$9qk}>I?hIQS.J>apuNi]E8i,xr>;kS2Y#}n >jj֨Zf{LQωGBC09^uT[qu5zt>Sf]HO Fh;/}=m!8 ȈZwdB] j8Jlطw0;Co8jZ~Pp>d}CB"I5)=(8pBbbPpv`V^-(DP.w~N*A=T_M] q꣭kB2㹳 gԖ{(!x_X0>&22 X[XX]%⚈D \,Аͽ@%k. hCpu9@c>7fU)^!IDϩ3AǤF>#㼢IK itP,RdUIa6ݬo^ě%<0ɦ2 <}2SHjA)Z@) ,HP tr'2hCTDBjI krH JZ>>7@섡vF;7ׅ! ۤf@m^H;WSWC"Nw Mi|\Ǔ ӧȱ& 9fVR?q3FB_ Qjj ]*/'xy{e!QxKQo)IE &}:K:Qf`Xoi.ӇsA_$Mr?"^F^eGO¥[".w/[ [%ao^ 8kX,*3_d/> = &$&i~Y'ޞ/?<τ?Ϝ͟-K?so8>OenKNA2ӓ53 [luiZн!ȖiS~u.~}5Fz5_)%=UExq9pn T_/ x]cyم[QAYgqjXA ,?\ċղUH\޾Q.9:8)o6IiE"NjYtRZX$t,xCZ~dZ:(A-R* ήFguPZإ{]]e.SJ&ʮn gP20;^1>􊢬VAF0HM-°%G袳e TM}9v,{u_5UG5Akj5mGbҎK7v$|sg*}xWd G:~"ʕ ۧۜb]՜g:{,4IW+? g>}P֔4[?f&@ɼGR̸hm7x=O|9~仛ZVO16MSp=ZCz 퀫f6CYNi9fb:\Pī-xӻxɺU"k}dalT- X|˓xs=[=:%?k$v]ʶ7U^o<տGm?H5O]N\]mh$qٻxS{ gt&=sY/ .x=&=?r)g[(G֠z~Jq'd,Bu/v,Llq>۩CBbB>lY\?dqCS\OS`IQ!1z)mS9oc(ijLϻ%+^_L\_exC?}ыI/]L8ܢj|77wNn~k~d_H>dT1QtkT?;eǕYMZnXVXP>Ҍʧ/|8?_^^WL+aY0ȱ±=\,U 55c>IiTLz{ApK w 1q$S<˔ͧ0o$z-Z1?i0*򀷈E[qlLRLR3"ٹ.]eY@LO7#!Pkfteg#U<"fOvўS1Ւ lA.Ⱦʞ A'(#[b*FWy!H8L)@*h -%GP]d# J.ۓ<?M__BHbqe }Qi@%@-e弨¹nƈr!Q.(ӲJhMȶ*tz7/P~ (-g*1):R|Kb6y0^S J3 Y*ئWPx3^ZE>ÄVC5 ]ˇ9=czB$Lx@9A ΂@cjV᪴5@* ڌe mu@KŷbzIB{3f&$pco 0R͠=`6 Re HLl1fFPHNfSt[*QDnd{OP9b4ewbڎthҼV]_~ܙEjicנӴA=EUL+T|"ϵ_3︓A}TPy;Ef6)MCNa{j;ӫ~#t,GWE3$#&k9rTqY 등:{0KZ.I}IK*`Ga81(+,^Vt0lYmyX]*>ʣ|W*q#DZ6*T q^e{p&^^S+.5ԅ M,'1=8\h'ynoqX3CsGgf4{T('\cZjm+/[Ȗ9(s{Q}\,ܭRJB=\{!|u9 K:9pCQ9;(897:$&0# #o4ߙ{zDPa7!g0%%j^e7cN2>`gVEȶ=Jx-oo痿۴"x!|8t'\_{EK4C;pT,w0lޯ1ОD #cRHl5ƌή"U<+wB{99 =S׃τFR q}^qCLQ,xXW";{@J5P~BP&P"c.4X2:h$'83; uP6/<"A߈|?8BuOh2w3By?}u7+:_}#Y/g]}Г Soc1);O5)R##gCFXh[wuzx= PKe2 C{oBR C:Ah[A0x*&cGv ^{B5W)&^wi+ ޠX~'[6>N_f~v׷W~)K)ʽ{6_?rck:7@D7Tjx_:<WZ}v |a})->y4V _9kJ`{TR=j\ RNu^3=T yK4cMiQQWk.5ƌ*ͥ =&rr*" ` 7xO^1/fjl_ *"nK-nWg!wtw^"ŷoXBcEȇ@Y6„4Xď3'+!A9lINT:4qqh[>'V' B57C,h#|p$'DkښWe^%y>| w7<-3?N>|_3ӝd@[ ܸgb} kp&{VDZ_{ˠan"zM!_b"q5ASM:zB+,otOYAč _ƍ A Kߩ߈])38es^(:8=] ?Pmwf;-#- t$TqlD(^OLK2<*FE}bO@him'wMt_t3rkG_*ݬb!#)?TbfRS [ԴFS˷\OG37gwx5v͟mk~ry%ҽiRR-k9Vd=-X3cK/>^\D{S`V3'#/ͥS #`t{b# TsN! ['!D}X}S5e6Xɭil̔@|,^#l[0E- ‘.ՈCF9RnRQJ' U&Ra- NtD!G*p s+Fl$RLY-\҃KK&S;WF$aHN|P+JĕBM^r1fbp۸h2 `/y3 DGUS}܎7N )^ޔWB)# TQDSʘ~xʖڰaT^V3\?A/FoA(uH-1bBC ͈IZ892sN'@n0 jSN@I"s_خu#3Z"\AI T*XB˕Fʺ]*:O;&ܣͿ}$eY1 ИC0yIʲBt'Om8ñl9RҀZ+\>\-o>4X5AvlLy1-ሠ~]o5P20D2|a6UMn2 ªӬيdL#yNJ,:]ѹ9S/uL HZ0qjm %]BξptVŭ yô|Q'Liܟ%޻:U5kҬF5%j{7&RN< hmfz RT,OgR0ms}JHPif]цzSPmci-JTޗ 5'¬5p`h -*F@a(w-\Ff T6 j(ޙεS][`>:ofˆk1*X˄dNx}F-3,atϫ5YϛO1kN1]${S5"pcTP~GDcǰBd*uWi'38٤fHܛ/-)Q}B[{~6| >QNEo*lTaa\vNNX274m&G :Q.A9W{kZVNdp%Guj.$*_G9 dѐu 7]MckL) -N,J0%7>\; ݩ $6Jvx=ebҞ#$džYY3pY(5HnpFs}5\PZ*iS٠H;\(M-RS+ șaZzuH5>CHSJU=r Dh[:DY>rKӉebwz|e:w7w$gmn_KYyvl7կO0ts>$^K_ x:x|BOƳ߹n-s{fX# <O/]饔If)!گ jPTS*Y*$xxunXB2%BpE qŠS  &h BxU`ntWlZ1*!\ @ĔV!)S0a&08V@ `aVSDyjlDžrVme{`1e!R)Њ1g5朂Sg U qV됯 ʑ6dA@UĢe] M6_ p u53%w#bBh Q(p{-)jrSBEKf (+BR EZ 1H}LCذ\c F8x]D *41H('(ūd@HJUzxtA!&0JZ[qR0 ̈ma#')T\6  D*3֔6#9I:w:xv YG"E| P_{j j$gLR)+ Yb1mTMF!Fhi-n>y~aJIuF *!L Ѡ8-*xa Hz0C4BYve* }2 jlޭPk.k[5O_ ޅ tE yDU@]l$OuzxM u)RZD1ru$.%`ǂc8C#fVwCJ(xP ®.8`3_CJ5AocHQ_~Fpv+x0$aBv8_7>=XhoӫDBKP"UWWT,fv F8nx)E !stD\dH kA(nD'AHNU..Tʁ )Y"@J _\5SQD ќEtppQe7u U u0!]r汮9x[x|`iH,3~P, r(G&nhYŌ=m\(*\pV }!C)ZaqB(; V!rS%* UMդҌƽKvy(g ȁ䪣%5o^S_+q| #$ŌOA!D^~bοƋ0/fB0a[s!p]Gb`1rb^KII~Gׯ_bĆX Xx'io/ű+̃dKd=pu.'v9k۱Cδ{pf@ӞӞe2L=:rWe *_Foؤe]1F׶&ZuGGI +:GGB)>}]}kn!XPݪI^i9w:>j0ELOwdtʴl9|s=pTJZaU6<4¥Q~(m/m ^cfI~/k )7vGcBwy$=*IhWumPZ-r /hm5!Иi`u=ւUH:j,6mQmwqT=栱JYTg”{x3}݃}ӝ3s3kPU.u2GOwa7=G!Ðj>~Mw/RjU*Xy)(yixy;fB$CM%ҍȝny"Gp>Gwޜj|6R𽮗]7|Z"o=DI?Qjm@I8Di;4鋧;gf2wh9T{=՚H0oޮu˷6cMB!*Ѓ|DwU5h}0_凲g39V*q4s8Z#Tx:rI#J23#U!}|5YR֮IȞ]49$9c1߃+cq85[@A[>9A%޵6n$"_0Afpp 6H6y ֲHg4%YeRl/lJ`fLU*=,\X;T㜏:2!n1c펔cʱu@6P8lI%ђl>Ss6)ՇB[[} EZ?='A>`d_v QmUy^T#n:\!C; r6e%dl+gK=_qC|U}Y+Gk/ٸ #z0"x>C=F55Q7&'`_> vQ5d>9%wٳ :#-oBke$YJWalcUWr3|&OAVW*j1@``pؾ-I:ss(4hEwj($:|gQE !.r>-xFk–M -0,Aq;NXġq8D åf 6]4K%ZSkע!j 9MZ=-.mF;);UļsMf˄OeĬgE&1R+K3YNAYLL+i’ ,%reFٻ^VU 4K{JJ!!EH3T@1)ba0 | `s<YJh$> Qun alaΰ" @BOaJ.lqʊq !:)FCIBkg'6P4r}yoTtgϗA($u Ab5.I4E@bt޺:W[+dYlqwfͲ_|7rQ^eQ u`^UrP*Y)k?lӤTMUp,ͭ&{PseSDV,[sZV  dAC=`Ի\AAD7T}q`Hƨ}bwKI@ y{ov49$GzQʯ?}>1Aߊ1[>կ|VzС<%vKh"(\42#|:{ C8uUJS:[ӵ<o eZ. AÚ_`. R`C =o}Wr*dOa\N icQ.i Po XAւϽzy mω9q6wb.1,yht#Ws[.q;yAw^ZƩCx'nAF ttP4"k6#آo$ xtuI`wn$]eEd |Dgyn]%xO NYEB"2LWC<ΜXao&(l 8EJ Qe -ا؉GP[E6˓-b}G*_sl- ce_OmDn{b9-Fa E 7@%=qQs/qA &{! =v ǵ!-8H5 ?| A]U17,DB@Ǣ s\6ugSB@׮w͞Zq^ ?GY+vQ\c:F;'z\=w#I{I^eze$؛;Mdp_2ɉ?XYM@8n؍,t.pWw_'}%0'yB$R4kT,UAu7^6+6QtntI Ih浗 Hm x)f6Թ~m"E(HRd.)J@ reK 0ȓ{_Ap5 t^ ?2d!vZo Fa}pYܯ?ZP|c\`6!{"#<; ';Eu'8}ΨYD dHkx8FA-*h t~}q6CBW 0'pmOo/*pʈpܳO, "(ѝJ!JPݱ7yZΒreY'_WjM_nI!xFNXY%)<`",9~:j9!pV Vk @Rfi$$( I:`:T`z`ynK硞i Ƹ)jS2Gt5EkmBk|cӁDƥV T&Fk]Jڎ R{{ ZkQǽ Bk޴|&4f agE}CZ{!z2<CIs6b^NZ;o4ǂvNc魅$yqi.8B@p(Pȭ|vssmf-7Z 9 @`*f)uL+pjq"z]2{ըFWc;&V&a"UKi|sI>[nV@})MeB 'de"圓L=|}IvۊuV o<#"9@V$&1!EV$yQu2[ d7Su}N+rJ@ߧ\'m^23)p-'8)bTh!x#-gy{jGb4^"AW+ RK#6%UY$/K:M "!,lfIZ(9Ij}N.鼿]rV HG<}罾sU7. 6Y0Jo/p!>>[*oqKm yk+76e"MBs4\>PP[鵰yws! RThl_ZCł _19#AAWTxb FJw#,G` ;:Q`@F"T8!f`; ϨG*?*^fY92"ߪP}P{XѸ}z_7ÀD]~¾`5]1бz5x5 !! q宭T v.S$r/k†@0LiWE\8r^C⠎z.3{u_^z{hp7N aRYHo2L/P28M!.)b 71 X<PK3Df'$:՛W[u>ѹ[Ò!GPp)uH<\q7ojqE$c丳۩^sጓ(.g!% DJW݊HAOU]qG2Cԣl=(zuŭ`$앮.DRIC4?=O糬}"gV=Z 2lZf\@&BD',SQd[47oPZt pD=GCbJ|rl%Z}K&w_R 4\ lQo\lт"eBw8ЖmBOyDW2qy_,TWK5.wn==LbQ$?S8!?*4]UuR97y^|2UbUk{Oc,X%3Qn0Xd:`h(Ę˅EG\r'Y-{ӯ7Ip,OBf 䗘 0✡HZ/if0=s:7_فz oX  &Nc7.Dfm(VWAÈDn30<aڕ+8 ; nwq=n܏H~#~\,/߿C~@(sD'l8S\qF &}&"0A4`t"9yp!vʋS.$z9}a^F; _eH.Ue?z_laG_uca0i|7[lE&7,-?' 0-f+rO{GYSbQgbw d8!d3vo8 ʞ"w6_`cLr8{;f$=xB.ЊϽCaIHβA0{e~$ToLTP`z:tޛKOm1 *@R7;Ŏk#Sdoݦze[NӶ_7fa8^ΏͤnOzj/ÒK`V@[wS{@aaT/qgNɣylj*! f:#'|ߎ4]Q~kNfD"^S険v4_ux58:vKN%^=|ocȻ$T/M:PW*rP@QZx>@PRd#2 ;<7}UxOk卫L{ĮFKQj_lXC7} e9`4 , SC>-3S3DI&2Sʐ%OfJZZ`Ĉzp!Y B߰ǿjx8ݔ}& -Lv>*pݪ BOI_f (T 0HO$I% 1.TWr*TWH;9Ńs_yM8>n !0!gښ6_ae7;UavR]$)U_mu E9S4 Q R  !E)1rk/ h#86 D"! c=UӊuN: jr9X=mv$_JTfϹ(fvt6GSsY73n^<*&I*IKaL{8&, c/ gP0ة22PJxxiǤ3(E!?*hH\bOZJLrDg8DMa/B?9!P*Uy$G 9›X8 L@j-y.]$Y@vX;Y5KUY ZZ0n\axEb@6 B H iQ30Gڈ"9x!`uPzP+ke "9Er4u9C <#sn&w؟s%!PD ReHَ]].jLk2ysu.y_/s!a6x;zov~D=DcXS yڀf9LgίN@X_7W?ͮxwsv|Wgc\Bϛ ذ :䩶H<\P,f UB-KaN9,) E$뢀_5`gA)%#HT2^j,n')t(dRWԿX2f@\9.׈) LD(!DƢVh`EKiRctmdYaY.>Uȡz~L=}(UQJ+@:5:n0KV P̕l4ѨHիT[@顂hJ #K1rAEp`h t*"V8jZ $3b[.aidVQL(ڕ̮s隄Z&Zڀ|Q KN L3FXk[P9G JJlNڭG2CHccL\qT9 `$XK'ךR> sTC( 4*V5n:aT !%bE)o6I @ (ޢ$ <3EdILN>lz; J:0]QpQ +lJVhV,N$ oQ+$Z Z\`qAA[!XsnQ3U's'9kO {G,-8^hy3!˩2P 0DRJQo (x1XCTӓH:8E,S <`TqKM5ExbV0(bEՕxP1r[ Tpu)(H#09Pj %:L=Q8z 2$՝m:DEDD#~ μ,dZu:*$#Ͷ6EM۸ @Dm0Bc t"džRRyMj]e&CO޺rD's 2F  F8a -A0Eh4/s$H A?YOt2~< JbUteHzL>@&0ʹ[yV~]UlZ\uΆU|Wnva[V.1~P!!f*ctRJ!U)*m `3!B#z>vP:M#aSMKT5jZ #N%`o8A%'$ RlcC! $}a퉓TN*`J9\TLҵ@+&kD-]C"RDg%1j0U/WYᩭwGb7Tx̶rWQ9UͤҔޢi LG WBRe m:TUƈ9GVK_1Q1fEz`˚Za}Q"`HruQ`+胂cT?^T!?3{}EsӞ*@ ՜﯂hr"9pչSNޝ|^T_' @t ZIk%TiTκGC>T)\^L..}iUwU__-8\ᠮ;el*=KP5Qn-q7)>[|kIviv7&:UB)*DAZb5 9ְk2 גaIذݓ+4.)\$x7=;ԮnB59}$*Dwga3`C"yx;(& %x7( sDM[<-^֭a"hP62+m͡i}mͦnQ)NkNEb-g;*]{  պ`FEhOUڍOՂO L3Bp{v8j]b~ʅcGTXM.ΘD/A*0#ث҅9" 'u-vU_VM!BkӦHm֜ wk0k<5Y'濽~yz4wusZ0Z05Ym,}tyKv]iἺߑu"x>?x}=pc !kL]'RPGRt_u?_ξA9k=9f+$^Q"t7mAWųnVT@p*paeW7Z}ҦER2XϮaC/c[ϳ^aqFƅ>(MH_Sْ-nOſeC߃x`v)dI'>\;9i_:cU_W$isT+O:N Fz?>D!N}Jqde^i%??D{U-ι{7PBدh'dQvJ2:GL@Vyh=?9vڍڱnVh=HceG&؄%coz_?YZ33#%:>R48A]h%] ]}dhS@-BP5`^4H鴦c¶KXG`%iI @x XzG1!P$b(5wt$ Ur%)߇OWW_4DLg6ycsVᱲ O2j+eUUb~g,9 (֔,/;|4|T'0F8$/w+"pD~KmCoft"&]ܗF)vex/v~#Z߮]ml'f_ٹ ]i6& |WgƅtSTg?t-M>|QHEs9T8XEyf;]JNKnH$Y San1@X9>y@ ]Vx8gV\̧`τ+l>m[ "/4UNr$tW}qO0cȧ|"4_ ` -h|Ś9 yF ]!'t| $7(Q2(F:.nBzFQ 2ְ!C5F3j(I{,\%]kYJw#Vl vy.^3ǘ2Vd|~[V8oa,.cDGg=F%|"s8O1+g"RTcǬ3(.kw 0W^>8BGPwY=9O[tEÙ묽}"9v59-58v:krGID󹝛w's}vnnE!)(yMoʸ[̱XൽcWsʩze5z׼=hKj펢ۧ)Z޹h6/cj_Ҵ]kJP4E8i>o@HL(J&)N3$E-ER\⦨mI̜ F֚ŧfy LΊbM ʁ6#I.QM==ۉֶ֮lQ; P{.6m@sDeo'zn5/- uݪƵGnv9 $cr9zpn'wf-? G{8 U39 p,O8[pK~ W1I -nӋ&ݻ NW8FN/ߜikӮoOԼ D Rr)\aIc '({)Q/3-hܢqo&фPhS~ 2GgNōD< Bn0xSk`ۉB)PW)OH<)PW)Oh<\R.0Uʅ}}*BJ UJ*ߩZU)s,9`Uds޵@T<)! RIqhRP W U sJ o+,tENe3.oxcHhۻ}e7Xw/7N^_Ff_/׬|2p7fld1b[_{߮fBn7snmK{oR1d4gwL8#k qLcIő~ |q:]a$Ӵ/:}Xц}ڳ!ߡIWq׳k|D0)2cz=t9þͿϗ$)bs1=E? pL* gT%c"HrdbD"1^! a;$a©۝CٯK_M{>ݾ8uE#U#/u!w|Xԯ>IQ{!'Vjh5ޗuY}VA%5 LldH0& 꽇?r=r8 bTVg+VJ~B;ES|J9-<@  E !HF1R9Ks+dEl@ BaсU(ɄQK%e$F - Xj X[R`}C>W,lL2ӘWovv^ga},0yp+ -vTKMQ8qq%CWcTJda ׶(O]X!dpNE5cښ)k*Cq0I Uo9N[(xF1 &P9YD[۩j*0uUmt򰃟*k IMqz?^6\"g;K֖ͻwi.$a,^EzrcWwԡAt76m̝יִ7=d]b8o/N]LÒ7f~J| ~lt:6I`R%-I϶"Pa *5Q*$Jѳq~W QTUj'b ,TX%?y\T  8 _a/i\(g[pyeNu4@4L"h(r;fXeguX()Y2sdBet}pǣ=x2k&\6z05ahy.N'=gh;^ `=:T/>~3n˙01|=[qҘ|#05*`G6~ ˺G0_Onozq0PuRߋhXf' I`JK7B2#1OϚ8H5"j?+wMy\wuҵtJy;mы4=j(%5sk[sX?@cݝ2j`tG {EE=f!"稹Ύfs(fUd2B4sA6gH,(_⎔ey^)rC%}#fUŚ;ʹ4O\U"{ddmE=UDv(X%si>yNL&r;NXJKe.n ;4 ^~ ED[ M39K̈́R|2/^fS&kC^[^%2(+E#BQ!^$wӍ*pU78T`4-%,Έ$rE)IM vl+nMv|wP{/1FXbeHghpn5mZdxpvJ&DǰɂM`숑woz`LdXL*Ddža#09)eu$sM|b] S ߋN[uy H*+. ǐp`9o@X2]Q;Q܎@ 8Tvdݖ%n<Q雡G C5j.m.}|Q{;^Q1dw6zLbMFfAj;^X|ob^6)Z;!Jm.iloޥj,%j]xTG] |^0:}ꎰK"7%8@)M%oui`$m1i]~0S+R !yhv*MAҁ@`B:H/<9au]^5->"˗Z꺖_Z^8ߍw!J_襙 aȯFzo3O=Rhޞ{3GQ=:rΫ3)$FR7s"OŧL[bD{`>k@#P ( 6cN! ED1\S,>@)9]ݡPк ]̭\H޺)\׸.'@djs%QF%:l33:^Ql7HIn ʧOW|n2)Ŏwzh֞/PXO:S1IJ;j >vH席(q0_v^}ZW{Ᵹ,Z ɶ s#=߸ٵt# cЩkҕ Y~yxa%i9d$"6[GuQȲgMv0)hb"$*`gS2r+i3tM"@d;M"6&(U(u,+qZSv¥EwwBD^:IҲI"IU]*pD+GF= GFv$[DL$jj4eJ p7enY5Չxy!Xq }'zQ\ifӍLWG$' ulf fF~v`KޅF x4:p~L ~&j[<7pZe$Xʂ.q/)B5\v;t"v:qRb֒$ to'r&=ĚH}{/!Qξ ^!5j4N%|K)mq]gt-ݚT;؃`*i"sT8u7%yOPF1*?LqDS֨FM(3Ji$nI'Z$J\fVUL ,@lmGÉ]%!̝5Ғ7tYcK?|r~8Эe?魃ưUitaA(n7q BT-uNE!4h.֞tOkX`?_]?P0^Όf_^jd ܏f<}6r$@I￿{p٣d놣̥~{D02r6I"82Ny8hDQg/w>2 b?yJRrs뵅X[$1*։ة'N[s>Usy=;zP=GO5X׃ &|5~Q@1_:K}0gdTVB|\^A>q;8,DE7[IKd"El (1m%8QY:VprMv6C;:h5z $R1 #SὕV(dj$FI-K 2Ǐ'R`S~ 3e1 x&@v^r&;?<~C"&P>uI.=~|2'/~>Oъ^z~'2hy?W-xXU{7J7S~odoHF5rwC ki4 vzLGCV_F1a9G.'Pb}y;bI0;n6ٵ{&_[5` W,[2Ƙa^weWDMx|@\%l3lj`2=b($O+bp+ dQ `sHʼnN(j$ YĬ b^gVĢ/*E__ḩy471ڂ7/Ǐp!8p!8H_`8(6TČS Dl8V8ʹF`U0W0fqf9.Br\_U-dxcBattH;U\eU\e.WIN,F kq6e EV?d#8y:س>Ei.-[j =- -#Ud]ւ hUHbbb9h*0 ^s/Zs/|vL%Ǜ2lW$OQNC3OJ)Hh:r B;ǿ kF=@!jH,\~Q>m  t.|(UOw]o| k3E U_V`{iއCnӹ2/Ot{R{O9%SkA=!L(43pnr\+ǣ2kvOqY.;ޥ_B6*Gu'GilQQ07jGggp gs0A&S3߷/: nuhp/⢰ ߐyC૽O[iG[mWq 44%w4  }i."3GM0R3+a F &'"QkQpTJvVVd fy`6΅>h9Hkp4 Uo2|Wv}]%W, O-KO&6Ɋk0cj=lj Ћ+pJ@Ec) h;?/ହ3 !}{U'/,]AL&󻄞XT[.hz`K>@A[= J]:IF -/,}>2N6b6սA a wF1_FѤZu( LT| -[%S,pjg3"Y~)Äs)m $q-U ‘k̟,h9&|%FEJ^Ȝ! .8u=,L[ ip(܄>G:[h}Q>A0M ysL;P&^#`QAn*o_8yrv ^9giSMt-= )M\G&g{v8TW33ȉ[8>:UYP\X4Wo;p{)Uh-ka0K×uSLǖK[VBh諺1A>(E*ڧFw$IsJHH J0i͕~[uVzJ)O%r*+Ob6 Ɍ@:(MqGiМX ?p6Xw_3S_%1B2K(B`mwHf ,q0ѓ T-R2P1$8d*ZƲ)lڠUh$ba%x$/W`LV$@c@$YNh_&̔B7b$cD`#A$˝)sLWAgSU _a]IL.~qKTp#zs#u2:`4w*}QΥ%i^N*ke)f- F( RR5 ϥN[~̦Fzkg9R$JçNgB)bkڗ 3{/g;99kz#PDZ`&=g+&$eHrKt$!maœk kηZ{T{VTI#:+8yDթBh!A{ g.Ib=T[)Ҽ됑&v$q2ޯUqGoKƒA&3&`CV4zƔwl {%,Rm@TCzk'u7krBoWj<_#hc%WXoHRוSR;RQ"8mZ}r-bf)d$zd0WVѦEP469/uH%bq#!.X3R;F<I "wT]\Dem&4H RQY9ߐ a)ie%+Z,yoB"rT˱`#+Ev"7/ _{]CY]!;YmR"/JT./Qs0pp $:rE0+KqI1"d9RGc ":IF}#7,G Dt.FJ“1p"4 v1.|1;My_ålSLdb_st Pq9Ly=q=/q—V)J41^+^.YWe Z(r"Ipi=6f{ie0ˀ\sr 6㗊\TVw `^lV{ =TZߞg 33>Y"VhrE:`;^lT3ŬS3H׷S˯[N뙌'Ji[1Vn-It1#E#ƔfqËu!Ƙ@q1f֯yLΣ|Kt,APJ_?@si'>_ў~q論*"AtM1<=8v5_8KȍvK^Wa3_6pa&$>qyu6~Gi\^IeG*e;lW539]gBTP9$a Y r-,I,']?Bي`+7ʐ̍2Қ.l>,(ZM;w"hn\ f1yN4KFNSaTVVa [ր(U6wm G`oSbJ1>RA ZZ6R5VF ƀ4j6Bl Vl,Vl,1Bi/w u6_0)K)^d;0%g!]lQbX.|+_5B$ 69R^hÔ^2rK \}L?=Gyv,+ \-Zd]po]H%Ѓ?A^Z]yp{|X$w}ʒ\{t8q%#^[Eơr #7 l˥k<(a R,T8Z;հi)AIU22|8$Mcq4׵"pk=5869bw@m9"-x#/0<`^ivr+.VGr>vm7ov>M#,חY//~xuk_bwWcK92,= 2bhMAej[)1:sP8{)~zr"gT˨, sX/m:l=YF_|}>uke&X%چLIQť$kaȎՒė@"d,4cs/mm:ɯ(mnKU(C+2)3GX" S#"I~YK*c<5zrx3qaB7B9+L2Z2]\eo~؟1$>i U(L51D4/̆< ^("W;τ% qs׹">??.y >~_CC 6q'~歁bͮn|1MfD*->,B~4??ƌ7S}短ꏅT o`tr* B6Bb"umL9k !eߝxFGNɺ+V P*#GVA2cꇯ {fr3`r5:S6;( oc`2&Vk䜀 E˵>4#m"+sAՃP`bpRG "@^4.g+=g1[c4KŮ>nUޝSBKWZ]оF/4/]j4qEkuS-au!=<Np{-v7w~vHRYxcR=Qj៪>w)0p$8?%H{^PlT1,]ژRO+ OXclV^2tfJPa\CW*/\nk0&.,x9)ɹ !/#+_+qZIrugHaȬBUl ]WW_,nnIQ%kzʚʚBe[ՐN nj1sco+WqI=3\~l# 6Xh´<} S7ܹ0!r[w%mˣBdž Fyf9k9Un^V缠x=^'^On0s[')W׿l`fy@o s`j199{t4DA:%O"Ywj嬚8V%d'-\> I(l ˪Ky,+4*TBW k]ڢQ$]Jg{ ;Zzj%;>eXaܝ Ngmv5ZDr">u4vgdѿEAR\Xťe o+%UVP#-7cD63*W('bU:9ɕx?,_HjO040ST0yc(!i$!pQ*"z] Fm #pXL`J#ܘ_,_<![ X"(/~ _#X'Ѱo-;^ #I~6ae %'0Ѭ&/5+k}?]w @#|)"pua/PjUdڏ;A dk)yXeo'7-0w^QRR&RϤVjrK>/#o IuLCތHS6241s(&R8"Z'L:exX!\:oZ7cR`Bhf)`=r}K{z,+ BNd),tj` 'K.xfnHeiA)EpqjќRy0t%[8ʪJqf!F(q& qF";%eJN^NfjpTJ .6-k5[NxZMBQu$j+r<+AXOKi ֜*Nx1&#+ZOD}7 6[/bxn\8#ٻ=JS2iy%G|.cB8~Vd`Qar˺Z4,֒pٕ׳كoBfyW2Nܧ=J~SK|x'ϲiXCdeZR܂ a\jݓ| b惢-,"m7mǘȤF;zZY²7ƍ:9,kV+|ׄ !MI@UBtX:VR Viկ)':;%>CVoON%cל*zQDJ n<ޅAbEq+baܔ!O' z$-ߡ 7}yڢ+QYL}!Wʵ.qIo0{1.'Gn2c9)%"U ؿa B'KptUR'IЫ58v2#dl0e@{WepLcλ7McԢ?I/;Y**i} VݬṴ"h.9d9%qP3 \64E0g VHK啇U1opSBn~֘lH.z]pXk 0u.MԖۀ sG\x-KE( z9 ϩ(5-BeT.ceeWT({\4¤D!U NSAw4 NqQ$5m{<\pA::Uu^!X] ^9i'=%>=YDOZ8t܏j醿l$]C>"kF^.qKM2&g(U 0g17Z ƹIp8]!u*lwFgv.yؓD]/-Tv[WyC^X!0knaX$aqKUUFurPQG'ofS}w'zI! ߑ J/t n#Tљ9EaPxo[Øf/|viG͊%>1fj'ͱQxzۭڿ 5p)%1"/0q^To_4Ⲳ\GAKbq!0FA( 1p(r);2g*%>}A6 )ZdQ]4Cd[5:ahƢ&"#DdGR&6^KCc`Za]uVA^ !AHK*R0 s14%Q:xJ,z/NuXW_ w*<_aEg?mgB*I(Z";^WDaUg0$i讀r?Еz -TWԒߗc"+%VT¸SbksE4SxK%D1FX1.[r^ϋc}3ʟe  Xv/&,z>ՖNU)AVp 4bXX{]氝uj1P6 L#b ؂dB P0#Ļ@h$8I©p)"0-퇒BGI`E05Qc,D4Ah+ 9RziO֊2k l$jnru0U(Yft+rA3ʞªwyёM {tcvQ`l/mcO6_qp㦓s1Ex ~"2c@ " > ν6Ph)~t#}X2aS !)A!hЙ# 677R2J0.UcleucGN䶹Et^dB>_2%~۩KNp]U9jL=tZ_D|,I-mf~nz)Vi-zCj֕هǻDq@?| S=9Fyckk^`ۻoGuiofvh{[/tl,1ܡ,+ono ˫ŗ-lQ߯̿_2Ky 鰻Yc:ݞ\ڷvUFOYpHo[>Wk7džw좣,o u뵪l{]]\ұF:1MrKE`|\*|APF#pCU(`v Pji k[rh*jv9Qcq9g(t#uĭ)&)˜c?n)ܼLBVkY cď;[2v`]x JV#5af&}go~+bʒi_/(I,F*,'c֙Ql{ˏN]*[PTD1l'/fꍸVb }2:AcN)Bm"-AZ 7F^`icڠϝeQ98)3>%;SY8^s ^1aLeNc zEA䏖~=~CwϤfཔw }[jBԋ XyenXnc=rҖ@}w(0OwR^h&xVk_%o3O^uF2v-:&,Φq5#Ύׇ xG <&>Jp/ZShI]jpZ)ӑ>y!_ne?.r)r\JR@E j,jRRekEc&~M҈rv9 p J;DӪSӀ`術GiaV)AEQFHڵh@x7wrzrxRK+R <iUsȞ=d.j懶"DWhi SQU(**a@;5뺾l3⁩ Sm9i(TVspDx !VSHY1h[=tFIPgSfnC)LY82!ъ8P: VJڋAiT`:YS=ezbgweK_k7^Sl.JcIe _S/^O(1/5^k[DIMj/l=(hY_6P{fofvtT1 *RxuX/y:n{}칢B 倠=u* bтJp.:IRpі׳SmaZTnR^ڜJ5"KK8 )}`oο{"53ګ z < [b(80?&f l0S~x(|q :D>,S4zXsdexpv78sXqUr$i!9[9yu}UZ6*ܨm?.켾*%h\+1FQ)((0g*b-iEK(kjthpFHFJ鞄8qEPuMGůqdj+5Ե[u-_+]cle #%U~k hLtum|^pɑdt!J{"TVJQ/k&o%*Befm)鍙  r%'}j }QF֖rwU Ǜi)Dŷ }#˴|K.pV) G 9|K|#1C6\Dή^GO\ /3,eZ zX("Cǘ4J2# e%5ִra5uHj 4|[hT]NRiJUa=QNv2bu:3@''RXse_T (뉞s9=Q͇(rEXm FgDZ|.FGR4ʐ369ns;Kƒ+s]')RͼH*H)* KTxQJ떨?A[#9Q#{D#{oqûD\h$.cpgBj>H:\@HKh1?Mz-N!$ QPnqNU345J]֠(qǓl%1g_M.5 >aOȱp 臒3d%P=Q, Xl.C־<Tj0u@u(GVGn4X霏96tIL}lP8ή\?:a[&YB_v2+1my]Ls>N757L!YgAڕV0VR@QiUV5WH,0h MqLU{w4zЛ½u_:GfB @[ wvvM6Α uȾG'iΑʱ0 Gш$>@#+`7lwB.U V' {42yz?n<8.k3pL - G>$^8Fƹ=4SA8,RFvA֩{ 3Z5k4D'PU4F+'ʁIXajΝ{xbcn!y%w$\P$Z3B^-ן_eIk]ԯotvo۹э$IBZkA7?([1SUo9*#THp]?O#1ִn6mH#&3j2%[Dj2A=d3% )]猊_btQ.";p!z,:" uU Ra\qC ٤ (L;ʸZ[p K<< ǣ8霂 #rƑD_xID#+fI@q, egykSBjP.I)4J "ѥ-AaٺryM5d Jc_U~?}OB_{o)Ds8sEm+xII8EIdTe)aqb/ɧbbrҒ΁@UK#tehe\ے@#MUIUdsȉr#P,npfrDJZm8q.j` S|nIQ&N:e(e L "3J-v$Zq]JkB8* HS M mKXXd씃Kmac]mH-ws8EӘOU,̷7_ŝ}p7 ߜnuXbZw;XƂS6!0J֮m,$z*7wv:xyW}0a/7qk&!Ht>q/~r[xzܣK;kr1td!_fTXMA#g\ bL'llva9ua!_ؔrͦ>"Ӈw {ke3QJo㏿sw_yg=||7 PY):0Ζs-)r:zy$3wK{僀dZ@R<͛MB3A&9|FWqKVp٫W8J]'??,z~XH=xГ@?ڊh+G[GZ`ϿL6!'Xc*jDͬQ7CS 'L #jveSRܨwrcZrVJ!Ǭc7j1̌>>[cvJxҎ\]\*p5AJ:lj{Vnp->`xH3r[(Pcbz#& "A+`Ę# bIAǻN ' 'ˌ2s \rb3` eELKP3T!R]VTz0)PtϲUԊǦO[Ɂ1Rj#5e֠KEBQ3j! VbSgnKpx";=a(뚣HK[G$N`T+NA<~ZnDg G9djĀu{.51os1m#xj);sBpͲcXWٍE|N&Qm$͛ 9BpͲyX=M =-m#ev;.bnݺ/D) JGFJ-/[PG$- #;!po N*y|"zZ2 o^ژJZc<*DߌJN tDZZUڔVYU c7`uU>0A3^]ts5G=?͏S.q@]5An51\,yC*:9X\X1Peh;fh##P[$ fHԗ%qp;ԗMw-!MѪk#h0b*NYF[jzl v$ qMpN{OWv1:zꔢQ.sϷopCG7+ YζLbр?vf0'3 g.HL<߷حI^c'Z_ p5w4Gjп pHN 7ɴa-^3g ޘltLD_SsQAjӍH  gF=;u$ooT˜.qkrX#e$8I# :х PB#)ڕB%Xr![kJz-ؔ{av~%!); i43YKP0 x~lG%R.d#2xS#J=E,iM )E)F$E%',`xdƈdSw k+`${B,ITSEtC!G! ({UPJ؃L ``{~~>TKxqԟ]w~{cPhxI[fMɽTb?EZ{W&>1:t8i/aF#imfX ߿bO^l%h"l7@blX%,h?n E{v>;.9 9b ۠m0f=e4u`W1Xڞ|vg%,a l%'b l%ݗ%F.ڋ)f"wDҟ afL׵,aVRτ1hJ5{K/`ܳeX3[^xfs|m_q[plCCC{h8+඼y$s{bqͲ,@=#)'9?i^^1!uD"~G2OSiZFƦ6 VUS5BIfi qRo.OW-͇"cOUqb)I6% t^RՄQa7`EP %UZ{QV/_G}_R¼[AflO}zz>CV|^1II19X0dy`}iHZʋ kBbz$.#X|/CmHPɇH[Pci hF& 76?Gr/3@}̳.WKcF`5k44,VɁ?'pU!zl@lj4Sy+io7♡h$(0j$HwQ3yMm~_Wk߈\&qwN /'S+K?W~,dXOTʊP&2Vd8-Hക= ̰$u=y o|_G^ƽ+OϦK}yoakrƾEޜ#Lr`WhNm:U2Oίg׳g7_Keo:zSc2ŋw7?};5N2 s۸{wQzaVy|>s7$HUc);死)QSRE .')K|Cl19jy j b}Ss .ɘe}*i59-(y Ns`K,TT'b-&=A/Wf̩\ϵۭX ~󺌅vI^k8gǿpoAV?e$,;c֚3z BN3w8Թ͇~lITR~{rdXja ^MN7y=K MQKW=b 7쌟Uy<=~:8σZ?eJ:C7B#m7{Wru9E-{(!!(8pAu PS#؃jmұBFE0^KJbօh]v%fSJd)3ݡ\z]!t!dI-"tL@JIݩZֱR,Ȥ T8):Ea#$"/IlQd9uNRrKBiXR4JD[hl*~6plRPTb[BlEr=86a(z"0%0N)CQX;[e[&v1Q.D_)o( &Zi҃jXغQ-" Nā\`HÁBJYvxe^7ESm2G=6lacqcK1FꪹUu4J#P;Թ\lHj5:*1H3cJ>;z11 i1z* սJ1Nɓƫ\.f"H!C5qz'(ĂpY~%G*fAT8 rlBMq]v&QC@؏pR-. X8Ya*,2 -T#vLBC-c=% 9 ฽Ea ț(:J%bӝjYY&}*((qp(48EƱ^O=|bJLqF C/y]b5W/O 9pvSmH(9dA5>8_/1"ITK.*ٳTC?a,l={>pYvd4{dP[>84B 䈁 ;&V-V!pdmF|j<9]j>u't gǤ\K٩[ʻ_E)/A ^;^Շlvcd/ct^o8by~?1rXncbUlXj^J\_>ϥLװ4THh-Eӧ|Y#y̓_LLߠ'c6n܉)w{נ[+G'l.9:lފ/+>cXch-Vw6f<+93?X5 A VEԑJ7tFYFԜ9rAۯğH['vZR>>JZnn *fT>o#g BT0k$B שȘ@H^zx 846Y&9 3]w%8S[W%}Av5i|wVi'Qϓϗ>  jwu<ׯVbS)XZִhCADJ2 0SP(^-a· z_ 9!6IZmH)( DDY 1kBO5\25҂%H g}pDFLZ!-^$BufCkVV[BSiF׹v?-^^RM6O-Cٻk#?+9jS9zX"iu#=ٓԜ(d֯ͽr:;ow֨;mn(YOJHZd֨CcehROLK1G(7?Ⱥ$"l̹!oUzr޻6}-j}9tp9ʽˢDY qcH܍Koͭyal0a̫Ֆ=nk0'fjт3+ՃdQ2ȷ.z| 8N~Pc=osCOR#gc:ivyZ49t NCqzJrj7m(ڦ-b/fZig|= srEbҒ{ ӳ [1fI#g̋CdLŅX50ƈ =xiuɮt >=h>tȌ='\iQt=Z4oGkw^Av )-_ ٛoɇmI.l~6la3 e~mpOavz9BV1 2cM=D];3[a6HEC3w?+4 FX)q`J#Ⱥ+3JoHd>[=0RZ9t3C}% [ 1A9Y.bq|EhZ,bcֹ$Ѹ]qKNG8Rjs,~h{='@kCCX1ӫʧ+>c!w/ <D =Wu ޻G2&}0]}z2(O87N,[ArUŽ׍6[j%a/0$G?f_gØ᚝g~qpldG?=GM>\||x}Ŋ_d?k~g/n9;ƭ;y%߱qn6E,7Wrٻ6$W, ^,vvcD0)1>$%/YCn[R2"##XY\/K>-\ZbHNS/4)C3?*\<VD7ZOFqt0SV˻z^a GԭNPF{M. _Tis)R ٕ.uTSاSKPۯo_]׶vZvr50VOlґǪDSJѨ[w>~j_nnlsFد=|KsUIof&vgrO4Pp)ڝN NϮXͷN NXj4q-5"dCCsy4N_#Vx2f^'+/U>-WdE_)G^b_ⳛ+'\ǀۯJ5;(zܬ11VZMN|7-5Xr~ JE}K!=8wͳoV/rc+ s\XͤϳogWO2x]m>J݁\,sv= |>-7^!*O$DيH*E orjntT<\Wwo//$6KUB=F'k)}SjL\$#syC~~?'% ^ Mkqg2kLZ(!t(#y/)Fj9(zaS:pEh t8yUB$Fo>zꮓ}FZW6P` Xk+b*82ܸOB.jOgO&f&ką]BcFTes3S"z#7q̐ORս*/K.\lяiE{%ZE7&YxhM۟4zxS6;I'҆0ZS4~|GGOqs8珔"ɻ(@A?WD !-[G1(_.]ƹXgO$\mUbVY/1Peѫ u$? q.QojAi?aSh!,cYb1ͫ25MFqS sQm#|1=&)|Ɖ6ϿqlcS:ۇw],8߳>xw8~=>C9O!gO.}OtK׀ӯgnxqm9j?9G0+;L{f#ˏqRru!_p׊uSV<֭&SFv@o9bo֭|uBCr]3vȿ)-;p^O 3ᠡ(b3&JY "l%pxɬn3\4en{T]Λ̧W2{%1ҡ΍ʻ5wY=-z{LO=ۛF481+$WpJ7L4! x?\6m@ZICSiMCi͛vvy?!V~tY/*yW[]?y"am6cH s+t(БU 9$ ađx2YJ7ݻ`$%k]KR& }fϫ9x>އ"5 [Q]vj>qi4PvXpagS:L':{C0 l-_J:0N+Ɯ*]9n]e@H Q럮l2+8MZؚ`{JB.k7"M**e+1\*oQ)#%mM!ۜmJy Xo46*ƞca<({}=.R?ЎJDj+dWRN=KC ^(uJ cF:q- a$p97miOZn$U:D 4(EL!;M6!xV 3?K]z[V愤G~//k}/vTQ\lZJGnd afk z4e{ ؟4d-=F>{zbZZk[3ծ!=ٶPcu:ځN4?γfh.Q{ cT!Ll:N=sv0m +~19ؽZs{6].)YBsчJ +ULjLFz;]?٢&m4E"MI&JӐZ2*>%#L3Hu4چ73Rn|7RX=9~ZPĎnhSWqagw7H?qQɑAD9[ӽd.v@w'G]&=w2bh $0N,蘥:Ls9TVٮ&ll|O+gOsOl_ݎ0O!#DHB}D>SDEE$I RIip*趟̉ÅX稹k4~6jG8΋S/>]3[Tyڑ kW+Jꦴ>[9ALHdo*0_"K|pXD8j\p2/5h DZ]4p$e&gG.Tb8-}5wy8 TY,klM>aסYK2RL~JN:t"S2oޢ+PЗ?_%AmS?a0a*tJ(%Pg`2>2XkO5T,j4 ,m6Bsٻ isAdGO0;H#=;u Đ; TJN83V¡$9,Pyj ixi;k`ryLa&MhysT u',Zs?*T(VXR}Ȩz%8A%:9$)!<|0zo2܃QQJ 2 )rYSr'@;+h)Y.\TFm;}W _E*vEdI΢5DQEfdHNKRrTĊ59dŒƵ0\d= JC1 -::1D3q8yzA~BhpC#PF3"S1)r_ PNld,B`AV` * F N4AkHK{ul&{M-])ְ/aN=91< b%=n>0;?=cS`~ۇw]ѩ|}%}~={c7˯_t} { pUM~]_0̙qZ_"XM~?scǓtq4糀kd}82 _>=7tY^~h9Lk_jKYbwmATdn͋7w4p[W[OW͵7}{% VkSЌn[Ͻe A,48sh;aZRA0RPIAjcJBF˸!搿bFBMj5* bb"IR1"hS44]mo*miC8nQr KG,$ٹwP%J"%W@fgwfwgxsl1,(ayNcXZܒi]_jr_P,9v 1l5 P0Ebn$ R csC$ 9P+C g ,u80INIlBID mQ8,2F`9*H0!Qΐ|^BOp¯rcQUbBVT)еJޡvTI_Ep_ZT x7D;׎ZI++V*sr]p.rIaa^-oY0E9ªT[9ʯ9o-Gqv @J{ RJ&{9IMQRY.YiPur:eW_xvY#_nw|%w˭w#/@%Witӹ-ϾSP3lFxk4戀G7EieۘC"Xā Զ oUW>5|Ni.yǠ!ǧh:1jwѮ8~QƤijYˋs mUr/&@ 5˹[XeN!诣=(~feVؠ%$?K5/A1fw_}50}̲H<({_'/ ZEҜa>0R/{hУ;L ޥw^^|Mկ"Wq;ge)eŭ{'ܢ2.uhU,q xSlM@3z !!WNK76}|6Ӏ>O>Ï7I`brX\P N/NA`W@)l?`//?+VS6Sk"u׫YNrf1v~uj\=vWK>0d)>ڜٽoCsU\MֺW܍6IB+Cp0H̗DcsJPnHgA19N-U٣lw0g`+ʥ\tPT'$-}0h9uֺ!_܍c?FZ>wFN;zW73F^h ^*!_aږ=Tc]cX.N}iRmΖ|U&KX,rxK%үq'Fp&s>vk0aJkk$Z."&OFFLZ$ƱǘHMd"bcƀcF0{*S-TWtLf'9WQ ӌ=iJM=sK.t>1F_<K 8Ȅ?utI81qōR$2Ң#DF&8N3Tƈ#]GX$M*>̟mlk VFv-pL'sE*k^kk #f0!`klD PC OZ6HVɠ!Je~p[pik>c!HIB} τs\gonۣkSgt0*U8MY70`'{w/jM?ob%.>t\f>d9SIçr9=2 Uo36ڸmN.Em3va&5[Zzh, vk>\LPDPt~ g-z_@DJ D,gXɥ3m`D! |5嬀Ik(IuRPeX+5AseLJsΪ|dŁ(s࿙@8|?SyӇSt>GI2f 3]龄=>E?>㫏d1ͣt?=(z޼D\X>)ޗa\:x_b|MB*;"QPq?00gA"0`6d0*8LLeB( Jr.Nchz&4L"G$}O\  "e/F=$'ųK`L%b % 7\m.tp5Y\"PGJB>BG B-쿞\Kf/,<#G[t A^oji wfr}1Dx~~gF'<53i) A' Vn,Q{ bVΰetu?8D@%.Q] ۢiɨ]'mjOKZlݺ$uj\vSm;$V[-)%j9Z ($5뚞t)Bl9y[S%CwnJ`SI:RlGKq 纛 a& y 98g Iӎ=Ŧ}[ӈF$B5ؒ1BΜ皕.tez_N;]J"DSGp疠,+'?f-{?ީNLzw Bſ.7cwJ!-q+/x ϙYS\ .К-M3'0%C6L 4D\8{y{T7,3;TqvVwyER 54αbq ~W/+Ħiz;-}1fMLU9QC<H(ceG>{TʵYj;]^c|@t{ 1 B%0K h%3&I4A;^ wyFr析{f#$(#$ B۷/V~+څпR}I%K27}VFkӗSc&7|~}p>Ksܥ6Oݙ$\I'%0"2*b"q6 1 FFs<ޠ.@{2{%G/z3J=-G 1PhI<뷙pg{ 0<_xF1C(%V|F7)ڳo77nnK Ϟj"ߍGߎS[b-u1Ч۱;HoRE߿nJkkO+0Dyy– `26b)a6Ea"k1U6acp0t&„~n6_MY8O;G\ðFjgKGƚK4V[,0lTD B`q?[s} Z2Vns{Xm<v$@8 O>n<稢+\o kF W䘆_&arE$Z{}`Q[:/{$a2wۼVJ#cd<#D!?g(D*B"$ԆȡT;6ZȽAN7?zkr̓bX tnxG"UvFH"dXhh58Lb ~WN팷t^|w$LPWn|0`JvY:|{aX?]n[a؜P#^Xdpj}؈7{ [?Q{`EjoR2@}тj/:v M"t.&a\q;׌ T@juܸ돮ӌ tLW>M t-.MKϋ(}mޙD)ȡDf/d2EbG|rVǎ-R le܄޲W(+RD{ k@KI') .ԚM pߥ[гǧt7!;g|qe,y\ >YW2 Ƨz?O@'/z>gs$ rl5G_i$Q9i bkYZ̧WNj@T- HR @K"!Snmvխ O?:]Q?B /f~iѦz9vXM{&9u w(S'g7^)I9U4쵰UM$v⩠kGktb 'S5O}5mq$숣N .t6g\$銁+K jJ54_ap~73QiFYz3$+ISy>է2z..і9N+W Ģe/'?J4eJ.!cJXJjNBHID0Q뫧y Եi&Sy;X%YKY囹>O-l@a[IpI iѡ6\h:mBsHcݝe`{ s -a\<[E5s;\!C.|HS]--k)YpS&ĩqYݫOJP[Li։=w#]uM ->L[]?|8sMfp Zjpv-z78W^epYvAźvF[kr3}<w:csF٥22DbpQQY;BIMǑR ٻFndW{6yb ;$02$y2sSl%nv2cf dU MP1xX->pwgZ`vj\4P2xmzDJ\ygFSuh886jED TP;JJӔYKM>7+,p|{7uU||n\?8rM ̑4cY:fFd$6XR,;x|uv/b=.c0Lk|bM'/cRCah^ LB xx1x՘]i)&asJB 6H!+hEks`NUaB)Zԩs9d.Ť|!qܲrj$䍋hL dӡuՊ[(ED't2֭'u[؂֭ y"$SRv>z`0םnZ(廵"Xٜ|jVq )xкil-":FbDu[M[E4Hy(y6y 9*4IҷA@lxRMx+0~CW&PdR%Jq'T ̻Л88 CP&H'0w8JM`R0N!N*5#83J8a*Y ܯx1 CP&п8A "8a /Jq'T D0N!N4AKʽMwo~2crB8~̭4! :Nj=y݋,6(m+o%? a[kN[DŜB82FBF5C͢ʗn)[CpQD12ك*R}|ŪjK "{i8AIW M?\\z1ҫ'=%̽h; B>fs$щOo}FvCh M|1v8<1|^q^q?~q"1-eiWĚ# טsk524%J)fχѵ;ݟwgC2nI81j}_`ykOOlα@HE k_37Μ8szS_%JISңJb@$ )ђ~=I HЁD8tEX h ,Ez&v3r K;yKZ)X6h*6Q F42՜Fs\GZgcàCJ+@]%L1pi@ꍡ0bF0$2\TG2q IddR!j% h@"b(aE`b hzBz,Y`E<)/,'!0bR&-6:0A%緧62 (Ó =Ctn,{|q=B/79v\O K\|~z<; a?=@W H$IH$QG"R|ANˑBxA5c9l *+Gs)a4. #ahJ?FWgN R ЮUG_Uşt97|21S\Dc.d Xj)H0T6{CKBDCDEEVBR);I6[a|dA$,H"߼WIRbU21&QĤ( aL"Vr r |BIC!%&4Ж"2LcW )EEpdS "AYs(8Ib90QJX[#I.l,ÐAnݣq[|ᨐuI?#xvLJ6r;/wc`:~EI\MlW@^x&s)zdi#l*Yߕ~8 %J9%+o7;zLCP uB1yqqN!ڔ(D4Qs8/EUH}ݞ>NϱsMc as'HBKp-/e Q,ttkNs8;Ep}` n:]Cq~%X,W( mAU"՗Θ$l^a~Vq^6x?a:CpA9کLgf ^*苹ν].;^z\ڠmV>!ng'մ6hۺJEµ܁;D]\QQ}n"b'>bʑ(BS߁'kg_0$~xgt5xu W{>8}8_}~dg_VovbdzvQZڂ=ݻ4`1 ~G~ A:*'-$uQ>ʨ7Φcjs (nd< *rT3utn?j:}O6$4J;ſQ<8shh ͻM ϴMsKHId_F67aq0&9kA`H֊;vg ّ&|u^lS@ij6{%f GOG%#S6)zT0%aBVq~ȚJl\L Q~WUKS#¡gNPAQwK(D >v:T#| X`auCG}]Ч0-~xj264S|d6K]BW=9wٻ-''`e|,]v2I{bk(/6r9<KŹ xQ Kt41 HDŽlYD_|.PreB6.<ۙEmZkyn-.xZRe Q#ː-'tj&;]!.*?,W;J系,rq>g;p{;AT&~:B껕 ނӘT M[(^Dwwos: q%3Q]-דFVz̕H:?īZ.9?7βx8MFoSxc~ޯ 3c(wt/Of#yXv& x/*n8+eW{EwS/+p:|}v%=馋G0[VT$!o\DdJ'S_MA! uk/A<[[E[E4H]طn1L`B1(":cn-RiW01^uBB޸ɔ~hݴζEA uk1<1~쮠uBB޸)(S)DsϽO/N)ʿ3VKO뱍-38/w#J#l-ն CٖCQ$'ކ)Ȅ Ihs8J!V-N!lR>i(~C'`RJգUbJ_C) 9ÁB BQ>X1N_ZUj5!=P3Uԃs ?xN2-^6|ꥪ'C<8;HsK|DvC]_grAXv58VcG~pEղ%fXlmdX s0#m}ePy0DIkIBp%> $cJ kknVݳ#~qNN*M*\G$?D ùRrn4cV_e]3tA-=6塟2$ZqCm&5EJ\[Wt}[IeO/)*V4Ѫ3fYŽ ꋿk/tc+B+}Ԟ*ވxMTVO'Jklg5"ii6ptD"/g=݊H9 *d9Y,Q>*x67'NԻ=ӾlEaX(wD"0rUSr*r WAO;#Y58)qBі]MV 1<Sh1@{8KvnL1=:mZ4G={I#=9f5z"r ܡC.2$5<Ӛ k# H>ϭbݕX2jkZ:|[tH3zfy3c3M͘MEG)}m^bJ+SW6p\cʯM4Znka<+d7_\+$J58~iڋOЫN\Oy[pS:O鿙LaIH;2D)kDKNSMyʦSLeY ʏTBD ,S.V{&L_ Wp)/vvB*R)5YFÈn5!w}WK H.{ 5 |2.ˋEk,1dBuN5adgN ʸ@2&l[&a=NZA_T})aޅ ;S$ڋR LT/WvߘERHF?z2 =Dt5+?˴N"EdCaJaE9݉C'RctkG7PzC QDOŨ=24 @g<` ˘(CX&91K{J*$e4tbOYS_05Щ|N>,&IAռuXd2S5;;tD)hxIL#/AUMTBRۙcK|5ג珰`a>5Ӂ1eb;(Cb9B"Fh;(ǧzWHR1-s>vp X0&`阄cՈ6P>,)}RB)a0SˠME < 0) RQ^ 1%XlA( <"Y Φ(R&e!]3$9(=Gj p0PQۼbMT n,}RR0Spx"5s/6 NJHBFklUAZ0-qHqS&î-A5f!1# "LֹFYQEA.wn?  zib?MSGDY~N6p SO޿YU17_:x`1 oۯߜ|ZqK ~Bo߹4|);w|X*"W@[˼oϮ.5 K8glYO?zcՠ:,IXpGQ7";pǹ|kg~_;ݚvۙLV?L![pX-h1"㜄h@w jZ6] 4 OfkB҈&~Ğ hdWG@93`gyYP=hͰw器WgP(M.uꁁĻ f&k).T"Òj[ĐUՅObV}]`B)1teIS6Mk&CRh_)jݳm3GKNa|lFVz5"4*r;b\IeddD#"^#"գs K3;ge*8؜"Iqq9C *| npqY|j:Z{?1h1}`{˹(L@TPp 3>ou2XSm/pJ-DA 8P2 }se'0wWu%[ܽ Tla`s4`9b`olϮo7닳z٧78UR7\,v7'Fޭ?> +6 2ο^jk(JOB96ѣ8)c3Ȍ#2fΔ(S 䨤۳'cr;bKgΆ`-X OTHC)4YYs~ޏ~'()P0 ě 8ITy.s鸳1dv#ΑJ|g(%㒧(6"=(ֿx-QGR:f F:Zjv%kWej@^ÛNM> s]yZ:4oPpqoN!ᢾ f?w{I!l_YkQ}B~]uxLLt'7h=o{[p6L?f/77d;(BE_#\Ss `bL F-c6b+uiwIժ d<(t(v`-/+4< PHA]zi-,qB6Z%iݤe57m߱~eVx#N-wɖ~ ,Ra\{NL+Xi öbP)0 I343ro ШhFcfRi2ڭR~2VLDzu2g<ڱ'E&V%R|]Pަg©[gmN+!UI$YH).3^17&z/Ex㻶$M(_h1K4묘EbB-ެ[MA!6nneIB<0:pTS4+5iy} _zs'q~!ha2ň>Xjq5Fn ϨM_i!5lCl&$䅋hLq9S g=jP |D'u[!C.X[xvkBB^TaHr%LS_S>mTJKcvT31ɨ5`{J>j7PhB]5yy"&3%(e¯EX!s;({hT%F*ES°S~c_~cxhV m֏ ĀX v.3N4'҃$--)}./ uSעq2e$n@WNgջwiBB^Ta73' 0v GtBQGx <-X[pHOքp )v Bd';@2&6>EtNx !N2Fr*v%v+XnD:)Qb.,^ͻ5=0m9\T!!cEULFRQ`X"8컠iϢ6qpaT ы %$#4(k* b$<1 [Vy#s̿WŜg5d{! ]G<4޽\YlWA{ F2V =L$.pU?;pc7~_̓WGr|j쐗WH_o䱗C#// nO3/7!kU0iB`N&A(x^CY= njr|ArqrƎq:?*aQq+Lwz fU="=$.V.ƏRQZv/+L1+'Ivzk;"'XC*S/dDk$ QV!`0O80A~RR% ! [(|cqgsGЕN3`"$`'01JJI5I3 0~S4hs1 "H7:o(2RZY o"!s,xh\̍3Jpܬ?ߏO.ŷ?b|{sXFR,NRF4fLZe1R DdakM&2a4`SN:J%c4Kch:{9g\!f^fm>lũ∘6FkRpY, 5 RiLe26UpX9 d"tYL, m-4v,&aM%R")#9 G =OqEi/ > D] jMאY ɲJc( (22 I52@$.)w^`zO*O$4B!ذDk.fHP@t )iz>9\?YGۏߑwW_>_7x{oOGKX;gf{{rYrzs(own `|X,zG`mRH.۳/o _j0+-?[). ԩH8`Q*yy4q2eLvtHZWp(SHW;G9Tp9UKEpA ʎ`ƼG 2pvY7G<6Ip A:6586&U$INqh%fvX>"v:(‰0YJNř23ܘ$1<3M}JY*k)vG$FTJĤ<Ց1u}2ԂϓWDKeX.8^ }Gܥ9<5 BOhqxjg֚g?Pr7F龜j hBW4ݪ7 7*0>js)ȘH~Zr_nGy?~J*3ҵRCzu*_Z* ZMt[ߺo aQBӀ?cCDԙuկ^N}At rJ-[iNZD.CĖ3= l=] CrְBw!ʳ3Cp9R= Wrho^/qZUzӻ}EI# ֢;XsN[W'H$ۓLPF2.d*VеĞ&o%ʯ\q[T樦uKo|I=G:ms4ueɥ3c\\#0 #l&l%@H_&5c+94ih 1xz4F1PS}:0)JcNK&io#Ǡ,Ba` * 1h2l  HSڐXkQw/|^`xwj# zpO+*'wXQyܮxa .HUBngs6Tvnq@  F&Pps{kv:zkz+sXY> a{sqg؜Hvi&-n7vK` ~x}*vNuG3֔zne+1`|B)AzіO[6t]`;hZr&e9#8Nl;'ڮ \ů y"$S hꡰ&dJD䙑41IB E넔a0 ^Ԣsӌ ŢƚM 쭫X9wnȺb~ƗrQmjvrj/^턷X|> KT"_}VjBCAobκ^RM*pDmK+;/V~D˟Muඔ78W]3rM;Zk$iB3j%t<gx5 !/\Dd`}?nn)v GtBQGO%fiOքp Bb¦67hla:N5RMlB+AQtkG'zܡjbmg_4!rf'z 1^:{{?~sjnЇJjtJm2Jc#L+a $*8a|.vq>ڡ -cXql6(ry?gmpsѣEt㯇lNd6kH#," ƆƒFc._QcC] ^(Y ^<SŃ`;#uzZ咑oC:\ a=gɁpcZyUx*U12}J3aUʃoNꛜ'iKOZ5B wJtN&0gh9~ۃrv0˶t/cփiYHK&Z]Hz-LS0NB*@ϰ?wPn -%gƊxO`aJ7-S$B4Р7 sFqXPBNu' iL\s&򷅉yRS=__",ũ ?Hzibo|wÁOk O1$Jq5:5uw9&-"]w/ q^l*BPk.{Ɣ0=dV_.r}2Fqw6eQ Sۇ_?Ý5k< ^=w.g[,{|mً^e_\! ͡&ļeWwr2sJegʚ8_A;;V݇"0!юWkyN 6@y4@Aꮾ VUߗ)Z5\w"CY*5 7e\3i͖@>< :$d*Ofr`f3޺X>,o\d~P+Spdh %TΦ B&5Zj'8q䏋GL -KGƱ= 8>a?)¿r[/cnemlz[(eGziE^9&X 崦#(GzÍ4BI@p8K_?_O.beQ֭~@;_ .>[6ͥ @*z. >JŷyinJn{[6zcS8{gXcb&*ׄq Sqnֳ?s@~QqJ>E(TLo◾o~u6a'Sa_RےG߯»a9.ӸKqjI8Ov#ttѭm AQ;%R#0ǖР=,V;g$`cgwbH`]Xz&X as+d[;1"Y*tz&`rL&}0!Zch!",hԋ&XiPX3,g[ ƣ&@gp|Qpe0y' x_h*?=A47Hk52i[#F%(AΔ(' ͥq(K_cƙ|z{Sa;/qv^l4P[DTvZk;#S:-4yi;h_r8sCZR e9WOpǑ#L4 O`uƦ:(D}(zW7^-?3M?CMVҧ6Z6gG]a.KD'/uD@4xgnѭj7 Na"@Vj#A{“qEc,@rl5Оo-^(숗S<> Ҳu> :TpNK4$Dr,!kBQ>N|H)$9S(0:gT LEǯf$% 9X^\1No>q泑r{ٯ?5Z(&`F# 9(u8mQ0xPZ)(7M<"t\`NZ.w l)@u1Ap=+']<Bd]l.6B[tqj0b#@:1$pZ>9z6=X^摖nuus0xxM5tMm<&; {G@qQبW K F3iwqzs(Z!%ɾq ǵ5z\Z~jE$^nS`dSV9BjNںel}uI ڤI.1)8-4v٣!C6L $H9'k,_샷{8zO^ehݟ׆#P8I,hJΕ%B $:JQ-(_>0c!Z2(=ӸU4Iܞ,Ɖ.bvS|\,lNCY8pAb9M$,Xst߇v.ݡ-~Vu 5䒏$% $1bИXpݛ2Jl2[xRaUo6 \:΃Q%`^ f H4LuBJ:Cq b&GNe+Uy9w AzaR: XxNq:%e=0387Ď7 fn3FX"m@Ǎ`=v 93^Ztwb!,zP|VW.޻7>66&#DP)˒0Un\nXo[q2mͣJV8z4Klͣ`KjCvN9JCr#@d!Za``.v>b[JP kAS9K [k2?驹`HiͣUɩƫ{'p0J5Ng@Q[ѨxZW`0+dYo5+S4*7:K̃U\zJXjl61ayPZШ7" 7Oo S y8R;n+N7K_7[' @g5RQVP1[T-GuwݘϗYzSvtlPgzbWnr(\|h9ӎ~.>t|y7L] VXlI&Oxs6|:?_]Oa5W;Wjfz3Lonq-ƂdۓB׈ h`lx.T`*D,KljՍz%iIiB8,** aNn2aB!bow.8A"GK"LoQz3AYsA} 'Z̠ HA}MT}k(4bJHOuLh.5&mp0tdY&TЪa7^WyIpY]8Sjv[WY@#iWFy,xL{l.h  l`i(B# Cؑ𦪳(yֺsHlelk$il3  K<R5caɄ/h LPqT~{XaAA(.b,<_eA^[R Gwo vs q>nqV@w?3âAH,mş&%$f [cPHN%DDXQ^;.)8-63ٷpԯ<@kɜ\nY͕SJogwI5=BIWS HP`S!d0WTZ?U~rWg7|,VguU` Ea:}6X ;csrl0X.}uNsF 1rPYiM9&CuFU4ϥbR6j]ǁ$B+XZMQ Bcdq B(|Kn`E>)@T:C*$bA[(!x2X@l/K@*e?٪i>~1U?Ճ3G Q\X/"62~z5O?\ ߥ@Em\Je& 3SWEC(rf 5]xP$Yw(6)H+d8(Kf 4nҘTbfQ3XeMh.XRCѵ::Cbr>/>acCr#(fd,J:t`~ZXQyɞŐ6?ad@L0zlaA6GrzO3ɛQ U?/p?긍_٫*1Y7: QK{AٹGcO Bp7퇥`u ʥ oXDb@x◥>bIٝYզ)M'{fy6ͺ 6Wvud-֭$/g+gl۽7ŏOlDN<|pѸ7 n鏇C3g[,fOϕǪPlIB^b6MbxCjL D'p[q|ks;dւp͒)$Gb51h\ RD'uۀ[ʁR8k=Tֆp͑) Oj7Gr1HgnCHziڭ y"XDԩԼ"6(!JСTUBͦs|QgW+_XQ,"T@ )Hc બpe`TK8Z=9D28돌 I qGՒLvy?9z[Q xM2CS{ >g7C6C)z !t9HR>FѮzHs> r0@xtҤ+dgtvG[qۣr4 5)xklm%o', zN {%^)+yە",kfZ) Ӱ RkknFsrvIҸJ[Jݵ+/I0f-E)$eg+44Hùph)I954n4J[CjC-*?z5yN`22̘Cν2@ uh>G@4{i^bO$p+Ry*aШ8h&4l:Dƚ Tɾ #l [ehxi| L>6>r!3+2i2 6`$Պg:B)e}"FI"kk|#9 -Utr<9ta>Ϭ6)E;F#j"həBb,6\fRjE+$XʔMBCNu*v_k9n2;.⇥#/aΥFwHNq.aȩ09ߨQN|FV-&nG\}|X,V{uzji.2e8Wj5HS* Q҇h1;4cJ 9沠/' +wI-6̕#d+QU&ׁe^8[h 6f &-<49*11SۣaRCD5EgCs9ʲpǁ vV(&DX#Hc$(3ω;ΛY(v vf &Ժ`qGE=^\;Z< xʅQD:6rXWWbJ>P=v%#1#G^COi j.aPQCZ7RdFCm2L_[VGDaH/{ӏ:;q~ʇeՋ?;1Rp;n1R(n=ݠtWx >OS>"T{HWKxMw=ᵏr[ݪ#ev>b+a!&_}n>H/ۛE-_:%K _h.L{kWo Mn[~e',b'7dpIw]41p錫`v:9|yUYtc~=E9DВd6oOw.xJPiVE/1k{L,?9/aI7-ZO0ѣ#:f!PFO >i P2cFq&͇](et=̇.̒o&}Ɯ(D=#\"^_'$א0j7ɵM;\)I̵Β]ۃz[s~iXmpmXDZJTSY2Et"BUf2, (ư׹G;F ƠV/i q8}7y+=91GA?ip؀Hk&H%p't* w 2 0sj[DR0m& ( PuO_a,uJ4G]ʅGyUC-ձ+y;o櫻x{\_C71^F:# (U>7 kk3zu{m$xˋ Y 4PG Z]0K]7LkgG Y89]!0F31ɍw5Asp R~ WM+:T3nƺ+lůjb_DJ:~_3"e. mIk*7TngdG\S 9"g=Qxܲ9L9 A6kn Y&wN Ťϝri ʔUtS4.TqR۟@Rzo=yH5pcXJX̗=Wy8v$a6;;]Yܧ} y>nPvW7?^-K1Ôϗa&.>%qB*̋ /c#OKI1 DP{Dccq3g F?JH~FeVRzaQb|)~H3:2c&K簇n51H﨤g ONbj$䙋hR{kC/|++s9-hx^VŽmLBIh9Lӛ0QÍSaՂ"S=RUܟ.p)tg?IꚐg.dJ_'?nJnu1H諸/[ELst.T睥ּB^kirj nqs$&vYg:z_$/WYn^jʅPjL˩|vmX)u44hHXҔ H͈-圵bWEMd8jϨ6W]#rm4zWIfk \ͮ&-e?a߯!Bș`Yɇp0# Ũhp;n˰>Tq5ej' htǚ$1bGTIqb!N?ah@TB64AdCDrȂzP~.N' V[.@V|9whpB+v =M9rrv-a.|{pաAZf#v 9(o_'ŮTo?i߶*w_{4@K|x~(>T֘DntӟG\" ɏ1P tEotQE tg;YdR`כyetխ&m{:;/z^_g`J-JŠ% Jc/)`6^Cy8Q ͕>\B4GwmEl s^,u^ȘzlL޽\ͮ?SY=8M+ÿOg't2NW>7Dz]gh)|'/ Ӝv}})@J'B4 KF]Oy_AV #٣dݏ i9. [yuEԑQG^uj7_ -0&^Imn1C̈G 9bόr;WWW71_}}ѫxu2,h{*x18`R'T <2FmF3Lyp`ma^GM*! wK0Rp+oupBs8HCi4-]?޼5/Yh_ޤ^S"sׂ\(sZjrɂ#uEK: ddYEfݗ-Ś(;Y `k$ ߝ &]8dU;d.pa212u8_[4t:h(}H)i9rgH Uz3roip%ŎY.H ګxtwHB:"A: ,k8D$=e$pbQɚѣxqW,GQm5mT!\iE b1D^.4{]rU9 ]*Zk# ,*EuC5‘T:Je("Bju"?R8J 6EH2\v3tmdxƞ:imz"vs}sb_IbuUg,5>[oo8cQG=9}9#%f/a9/TϯOк+vLк=;a߀WL|lwVt3 0gFZ/[NAW0m0\]:b,= {FMZ5TW{7 *%~!6ty֭MCG0MpĒdf 8r2~9Rsx0$Uj%(@}:Կ/pw9e\w}u"5FtSmI`Vb-#n6^kNk<[OX_x+OŌͲ!<3j!CbnȵKI۰*SJ=R +"0Ib΋-J2_϶cQ5k=^G%Gi~Os '=̆>= ӏU _|%LMjLoi߰S5Kx1ZB΂ZmlZ)v2̋t# hPM/c%|Cv&>%yH @4+X۬M f)tuM!HYi9NPT3 R QpRR@$]a`)$tv<8=R€ߎ$3ØNB9.ˈ$A'ړ`L $WRS]*=n$rzL {| DJҐf#ZrQ&.aP`HL 1E_vtGfS~E8693wM{T(GfZ-eъl'[&;m bĀK,x\nno%%Žƥ1T*]aqҝ҈hp?@Jm/^_pF@@)u>n#(S ~C`f)cf/f 1R]@wր`v] Hk`)M1Ҵ(: pKܭ׀tW Qy{_dfB mZx,>脎x=mȹ r h!(|yd inXN\'$ Dk|ى7 ]h= fk־\{v衟ϋfz1M݋P$f+Yʛr>@%Ok  k}.wsT }?pa @T+4 w;.Az֨!){w\ QEm%])8|R޺)8V»4͚0lc?䡩l~arF,&Wa7< ca}Z1]pܒFFd׏D oFo r3?\C{BFsa gS҅&SPQ݂2Zb|HzFzDwPˊ4K7ʱts@O(80f:L9]o_b}PζQux{]9f1VCQJ1mQ)ŐAu*SI'[^-٥(#&uw-P <3~bNZg|.Y,ݖ.Y(ao̤}ysҵg\jMv,Ǵ}sM'|˲RD9Y d!C'FuAjL"Ge\ RrhLۣNTL_Lۆ|&cS4ꓟz7fP$gh,T }IF&T 6&haH~:N#I9g,20BH^zy(=4=5LFH#4yOlpIm^vqQ?4#I: ωK58=m! ;!HAQ$}%;瘰ڪew݃v;RC P֑^G)A9h Aԇ"?Ow)]^9";㘰} ^)#ݲ^6¼u_=S*$nר0éI,)+7}:7}lAIpEb~7dJG`'d(*~:F-G$QiJ"DhOY/Ag2,@eZ$ԍ>#ϛR*y, ,Ӫ"BeӒHK(yF x?20W`7ݿ)UZyfw!y[O_ߙvz 3:Fe+'51R[5,iپpT:)4UNf*v03SZۜ&$",NRԷ@m}\ij[<|m49ygy~"bl@{UoB~s8Zp5|0U1ϸ{̽UN WR8aKbBR0gP֞yn0uuuzhɀvu.hCM6h^cԦ,޾|p,)mY=S;(WOז:FO\-LjV';",xS}7f3Ogb2./M_'0ŲU˝O~)oR[I'ht~ ÷O >*ӮbV. z&׼:[!<Yn+k+)|/0ʖ0b! ^՞P^U{fJ3ʖfx}I;fxjn/6cNOMaM2_-'TKa@2K.K-WJ$iiC.7ԉF(0@T!`^ kY  &4W0SJdt|Qf,N9Q:)NQ3鴩TKƴDBE$C\VDyav51j}Z e!Җ%|Qksy#K!>\ F]g@wW0~OM(X\-. z/r1 @|^Z8Q߿'=,z`V׿칹<#"#F?@ .Ud1Y(;*?#Llx)3K~烀Od[tu,Xkv.PH;:['tӗJRbh~#>&lBIA9r => q*o6SL^JƌiB8фp0{MzIml?X9(OZXk/dI:?#jKYQ_zOAȺRRbfHb*,M A YlueA]trϻq#}x!aR%d| %ah+fm<쾥#ڂK{|)\yXS(|,om#GE=ߊ ⰸ,n6;#cgmg&3Gd%u[lvVıf*ߪ}Qat":')QbCNBh0嬄0 e/`**b2$FkO=f 7 (3Af8#,=,bSJaٗlrRKk\0R9]%8&NpJEnKlScOx ED' rps~2 6>IB`G$͉G_^?,nG76n:EDnGDbq6K`]rbB/KPy%2xH m4]:/ĝ}!-u;+J%i!nfE5v}{,O 9F g:Pf%(.yKG>ks$$8GɕRGOXwZ) @VpV!zQ|ߥV?[w7?ܑA Í9 ֞&˃u/Ol?7pE*Ds9/$]׬>43#]uK1$qY<5-NO'ft!X 09)T(| ZPkp+<[e*#VE*TiX uwU/1r1ZzjB|S'@ uRq<گO .gBv?Ma߸ "O=MD'Uu=׎Æ@LyZ8 mO4G'ɼSqs^o9,wBIiR"04T*i*{ۍ:V1R s}~=&c fD6X BBbхf$s$l6),䕛6r4/ZtQ -؅Qz$ Z ^ bp(ΠsgáA1v((N M9`&:q`Q%BÙZTxb|,0%wVSс]ƚ5|/NS'^Knu6]:R UxP-A lڼߝ>ZX\_i2|A7fdkq#<؇eC63$HD) 3c L-eA%'.3@,+_dNi i,Dukn<4z|}Uwm_4o)ON1_%Ip='}u EPE7_+lju]*U+喐4:ĨJj`(NBg}3|^W5T V3"D_Ռr!" q !JZ,RKns̹%JYz UGwlKFdy!pZPkDЅ AI yN!ˉ5b ϲM !͙NsDع.}@тr^ RFYi)ZVBr)hEiKʤ.B1Sq(ךZkS6cu{ڈ;NE%d`QYfAg<H{XƦq7E% @\pbp%U'DWCOآklAP< Đ uI+ѫg!2B۶0OI)!bi [dS{ɕ[5 $P&IDUC{j|\tbZFoSLsE`/j$I"v4(IѕK)IwT8 Qjѷ ?ULe5dd(VJ9zb}F[ [\(Iy% 7nK, z@E1Rk5w;lٻ~x*Rm$ʏKqLXԲ2bX B*žKJN(Ϣ ESX+7R&}$( AľwVVoRX+7L6Իk[b11wtn#ް)߾[|FwKa!DclJՊ2Mu?}3 tbŻyF3' n),䕛6E~ Yk|vf2b5]rlˌBp3F C@ٚi}DžR j>R.H6RD,wG{ sLJzi,e8X 6 p g,P0tpi:id.Wz73.n^Us9BZ.[#%}A|]$9 zL~=<WNS"RLnuzC-2y-xb*_Q'nN=[ HjLi.CWR5UA`zA/֣hX≗G|AĿDZN>Z}]xp~lmyoɢ^ݷImKgO?Y?>!!m%r Ι/MHYf"Cc\ں/r`9*S)7|_vUQx]l#ZtGO?huM)@<|!7x,n'T|29"Z*BUҁ# M*BBK2×ĔWE iqW DWI9_J)rɣK+kK!H.i\ r B'ȩ@ \9, ͩʒjQyVPj87En R f4WhNӁ0E90klA3ե(K+HU2UHi9Reͭ&ǒBNSDLH;+4azчo."Q#մdD;AIdrb75:\2Z|%N]IOة5^yW iPV2Zmlԭ82~JD[wY\%c[01?_LmFn7ɭ%sVid^;g$OȔx^8q܎a]&0 +Lķb^amc{y.4PDt Å6oz1mgM[څ{ͺӶl6hG̺y\%}&lb>c$r! y=eJa7PXwQRV%sT 'Y6e_>h(ReX2_VM/GcA ƩK]KwŎAZ !b/"G.$nتuЬ)AV7 >Ah57h'6I57ٍt?=pME~gh-Fb[_6M!Mk%zP UeOf1^R"DA&}<98<a TiE] \R H"OQxJS._n *nq繰{ݷS=>!s{kH`C\o{A{ƃ0x xF=DspQ׎Tgy.;jTnD zćٯo2HA|x4V jE1 :5590_X&uYaC5ORwMRw  ]a-5SGGبbK%k|K.U)7K D]B=5-1w$/i1NgrmbE*sb0CřN + r(M9ɀ}5J4Jζ ̙%&Dd'kdy& dJ=ޭ|~LrG "gZ,EGD ˥nb`o@|X/jn?e'm'vQ~d?mZpWYX||oqSm7v_Nj$c)UcP/ߠ S/ ٢Q8Pja&c94J1rQ@tEj6儾&xJ:;EFV"0rOYR,tΝĦ iyk6,I ߇>:A>zVW>fIo-{&JvuR-4,FLྥ~̏X_wQ1ygR+ d" Q-,_re.%Y:e;P4CQ)bڕ^kZSfP>HԹ.8R]T^iR3q6t-!DA^+nb瀈ð1pIiC9e  !H6m$\ԼJruu+_6{4KI $A D2 Ot#Tar} uy8j ZWm.y)׫n9@ȶ٥Y!z<p1rUG^jWItyiʴpy %_ƌ)Yg"+DN0eyd3F4q@i)ch<"bQrMeIx1;Q&K,I"D$cyjRnr4@(BbKG, @NI0Rx8B=ɗve pascke#2 rQA.`5 LcN>-/+u% 4~L,ͥ>˅\&8  ?G-եd5dutZ|u[,KJƬ7irGuYpi,%-WCg{ J{~x-R%D?uhɯ8TAAfQ8(8EL1Lp뺶$RGk܊,!BGr+ψH qBDt$U9 9ܖw45LKLgLy \J$6FL*đ `!՚E2!.d7vr%=-I-u\i ~Y#n-:4v`]H$U ֘< Av}_NР|4/7?}9٢b;~]Hgr.O. %OoOt8zz.ӇOM~u38'>:3瑿@.G_Oa%@B93JVfO̫cL)?N1_vq[/PnwW:5CFr>sx;a88Hj Оԏh:Ce֊Ҧe~2sOߢ'?We ŠL!gT:_t:墽aK%ka!_ٔRn3n1gTηu۫L]rtBr )fŝ}w1tzV`G@aRn>I>0--כw/e=UJRБv$4#KYT^)FKGEoʥi#e#&jD mwFA{:7W ˇZz'07MO('&.^哌Ji36M]cSJxC~N17W[kvj[z0su{w肊, _hm Ț1]be1{ؚ-/}R$_*\u2ּ5^A]9BsfB1 j}LQCMPTs_(sGTSʜ)!.g $61p槔*spmI%4Z) ssMŨ9F=Svb'7+_zsu\ow$r}&cEZp(u=IWxs Yn^͵ye(A=}nm.XdMaZ >#^C:ʇ3 ,a7=SX~P J?f3 X$Wp-oR>&&B7ߗotcQL>pN"e6pH\ņ#3Aa*pcDBcOX2#Fu؞S蒎Q\d5?AcL۫hp[x7o38mK'cnx NZ9kUꄶHV0:F+z^*KD[`;qT6s \GiO'Hl tkـ5?UB.&Ez2&۫^=ZMurF꼥.>V5w-E JRj5@]>XeϏ> Ni(`[b5寢Q)-UtUU}pU+iH+\C:}ж^VɎ']ܺ hYB rVYIJl .K'񗴩zKhxf r3m:43!.`\hB~oH\ĜmFQs(8` Wl~18\ 쩘o H=FS\q!O}A 4{&+uz ud.SJ&&luU-1k `ra;e2m.VqH T'BkCa$dD4 6V0 y6*ӥr|hOZJx34s;"MTsOZfAީGu5p-}Sj MY_P]+6D9E9_ svOÙ;ͩR$PgWdtNrM:Ct5RM^9:ϻբ5yޢ)r%5nk\6+cɸncդZz;$2p~*ֺw5pZ%6^;]j @MA޷gT&<ߢiBԄ]% 밐DlJ|0iF%n1gTηuK0lzt ߔ.9a!_ٔ?g 2|0 MaϨ滝k u*iw )[WnA6[x`tKB1(c:Ϩ2qu*ڋLSJnuXWnA6%yPj;6ׯk4udג}qQGGߘ;i,$dR)qRz( _?R#OBt7PXǫwBF4qB%RJ( ^lnׯҽu;D7 PХ0TPAcEbm\b6䓋 sEtB8 BS4tfCA:Eo(${ >P'|il(I~֞`? -u/N0D8֞`݋ 8֞M }P_O) wI~Jgр iOSۃZ }NbiO@;M "}.45a 5;Hk"XSc=52/ZF*3[XLڏ+{|p=sdaZϕ>YC+׵r@sKMNJ6ѹ_]rϭ~} jv@xqHavY=7eJhë_j:6eK1_m}+LyhI f9 _EE**A4 {77d?2if^b:?GJR7#.wܝrw.i)M☢h"f4*4;Ie8&2Yu; 7_o7~-\!>`SIO 6z{ 7I~ shx h4o4L0iȱ1r]Cκ8ødqT*L]gI]EiJ-uNSxIWEH! Adň 9 `QDTh RC8Ym%69@NDN#Xgӌ&e81 c"JRoLHt+bX"SKrP9s1EUj*CtaSjsh1FLGZ3H@IY :9Cf8~^=0b1$\Ov+g<_s| "[}+n<Ճd}*/TwۻX,f.a7ﶗ גKZd-EW?Mз^ϏCF:3!:rc;V):8ExPcx-|{[TѺa`j'1^ =]6dGQ1἖>VYPb&8%mZKk [ї6DVR[dZH׍àʴ&Q%cթGT *1ڡØ(M,Ō`R\4)ŀ]5TI'Fz:@j-%2k)&4(=US`5)S*&KMB0 t)7X1'&,u%XZcK=Ր cNb0Rj%~ Z74#,ȈO~M[Eh)2:%6";$\UfWjr#fbkozeadH/;Hx d|#H\v̯'(_26Wy([.snC07B2 #td ŏDYeGn||yds0)"H5c:g9&ڔe)ϴ*S0Sɹ-j鋶ݣ%}H1E՛^HY{n7b*hn\YZ`DƮ+zJې"  C-UkO Bߐz i )X\hF|߃$Û}!2iy:1Wom!04 (=ԌI*Q`P*CH \ O BkQ9_PG(x$*@1u ҕF\dI:<#,nPyhC_32ZZB^ 44Jx^F8Hݭ3X !|c,XL@5Agڢhуw̉ 9%Guʵymҏ790^<+.b4\тҌV[RY9 \b2(=HtocHHd^YF,;h\45'H 9b/ͷA&d.o Z%۷cˍ#c€҇դ1F'`LY+6#&<=Ƿ<-5re1BA)c@u Y.Y8p^)Оɨ1_:^Wę*vqbxE2ƗQ8t&*qSi;Y= Y3нo,ǔTGAkL ~0,[Xi} 7/TkG^ E39/!vfEp9T~q2AIJs3ڄNs*@ 0C ΂% ; HLb }\ r6w!0DK昄y8мz!I(k_1 1n>-Qu[]g Wpl(Έ愗nS Y;"`Ao?o"~Uf=)Q|n>(aT x!0DoSzk~_Xs˧ډGOra"R)nh# >DwAoyfg+1qpv ?I P0Ю=c2nqϕfvۙvf^fJN>r}(C[u1U]231G+b9{j>-㓐DLDA dUÆ(_V4`YݻHJ+bk- :ʸFL>g`1{v_ˮ7_P`6b;mft-g(6G݋-)荆NɌc?0хs#, ;xEݝ3|n8؎[*3RuJy$Ԯ:,-|{V'KTݴ Y[1CGS8c{YB\tӔ1hBnz. P< ᫡30/ H)b],6$~\dzAr%|$K`xP{$$Q>ݑP GՉgvKǜjCIĦΓ;gtULSC/Ee\"<.Eֻ7Q@ sP3z^' Bٕ G|,K% DbҺr4:>v`<:\x~V[K6# dVR!"f guȦDSZ#sռ1,^ bxrl|dO =p a{;v&Hsn)悉=<~0t(:=$9E51y R/K [q3m%T%q IIu{8ʵٶUy1 p- d/X@$8(?J2y\3_6 O0b<#V2wޓڍc%;1 0z&NgD+D*= eҚ ā_|bOBQ~P ȚK~CV@qKO+o=sMGi5П PIL>ٛJY`9V|)sIΖCH.*˹#qՎꪋqq[R6 ?ɟ[uw R5 z1~ 02uP rgFx[ Q Ol|&\aY11 @''Ày8!zvHeqq}Kx*nNwxnyӳDͨ;cܞ{Ff`MrPcǗAmy}G q>Ynzm9J|cB|rwX*:1X%](tpܮӘچ翜aKwWdͽٰkpr&wg v3غs_ #/jG+zwH#nihn.c'ۑoЉtFc S"y3-2Z]:iifyjU %Rm#pv${~vpn:iNwɖ՟Yzo:=JQryÈO׻@ߨ$m}2oT;xwDE>_Fh8a[ Wlc˶:Ƀk;O,02_-qm_#yݥK~?;}ɘ$&~[/rb7mjbk; Gl_2V2>`:/vܾ0YmZwǶ oVHMgVU Qz@ l:ñnwC4Ɣ7ѻF)=5]¬y+M Ƙ޽[@QtFw\W9jXVQWKC08_z-I\d)SF2+Fdd}ueTIc~Xo7~tF( ,|uh#Vv4 2dj@<.L/HY)p#O!ґVLSŃOF ,l@3|FJD}ae> fpPPF Ed1Le.1`fh`|UPz. &Fq@_CA"ߘ]et {Q3"#U4@{R@0 z @|:ZreRLR7$u3ILR7$u3ILR7$u3ILR7$u3ILR7$u3ILp$sfo& Tj'N%!04 (=ԌI*޳y+WE^3&yK[MRJ,ɖnW+i'Vc {73+B[?l65ՐjABaLlKlZ}7h}&> j3gH1'/VCѺ-k4V DbƊ0Za$JL_ĦMiI0-F:i凐fC'0:Jbz$>[$A)oι &q#GY,t>"6ܐ8R\ 9e(ꘚJp$]|֗uPܢܴ/2e2-`*DOȃ8E|Wӱk~崎l֕1Cl5FI$aԕLU" I8r`:WNSqܺXN]%sVv֋|dHr~'AHjXK7HЄ 3.A[p5MԆjlrZI}3=aqEhZ jN(^f0Ⱥ'&:,W[BYܗn@4(8r)Y E3EΎ 0f92fĘuwх>4 %7\Et1Fʘ3{+MyDsɹڛca N~?E *x]/,O-$}e߂H7HܜJm~0(w@z[(8:P*EtHf1-Q9l++de$f>¶]CUy&_t0{FCxQ4^?sǏ)9P4s=bzQ2 |EcׂmHk=wQwm${.NwSw,j jzrߴx7s`Vbaav5f}ޥ5A[7azmmyF V|܉3 ~VV| f:bד]`vxk swٺ[g&!%kb1у[ \Έo![]UX H`fc7{qP">}?W_W֫/٫CzgaGzq•[φ>ӿ.8ӛw{g!S[Gʎ~=Kv'fRֺY'٣M&?5IwmR}5)ޒm{gsK7{o3(ċ[W{vPw`P_=yr`Zf¡CI͚B)?$%bJ2:t3Ead1JjcP6E 'НQWyy˛Bm Eߞ`=w$_F舘Zۭg|gɘ}0[,`V6꧍F.ϜGAlg?ӿ_<Ń&䒸Jx|YMȝ9q;0j؊kZ;T%KֹNg8;5:bԪ8MJRķ([r}G=3q6xѹBJuXCp/Z?/Vyb؜%?ϗhC^*L^?^ͭ%]oo%XWd.E67O$e߀z()y `Y{txat\o(ɸZp_֝vmXBP䫒 8)/N>ݶ!׋pn8ilp!YeS"clے-8HZ'lkښ}ےJp9fϷ&=[=x$vQ9~ { Z v5G<4kpl8lch6\k=Z!sp{ȷehÅꈾAU"bnEvmeED·&鞵gY'+¼`V aYp_'tnՊs *1!3q쫯5]".a ړob`o-SȐ9t3.q%Փz*oʶ6LkASʥ@Ѡ Oѣ&ǜ .G< ُ#g7w< u^D`_K&nYڂTpDr^r 0lbӵObx bE#'glXD,8[gfMW!(0hy n]w;LôƓ4S=x Lk6 P|Cthb[v֎ʡB{6s^Ax{.fyYSA v3f7i{лſ8qC N̺ձDç_8_:$΀"Iy+q5 O~ B(02@c. Wq%*u:\[4 ת}(0R%yG㟯.B{8RRnA7;lH~r&WI(ҺpNolYnhZEDk.mj& ej[]*z Iw,h[+>pjjSN<4)EùTBuPҸJ@yzVw`Dq @:]c:Bjظkzf,ڸMuGl, PisxtvnV<*73&L*ȯJ;BZbqoOV6mwg?ߤa߀0y=ԋQ0DZ6+@q'AGeE**C;OY'b[0yڼcG]kTj%{(9+?H5h # #Ύy2SfvT+I^ 5G~F h8i JUORيjPc\c? |6V.>bH 9=+ A#F[A":ɠ6<۬0d,5[iH-Kop4x A5 Iͨ*Qxsv!YaA{=9mFe(6oc|TPNlI,J+0͞ŷ iL;Oʤʐ@icl{b>۶O:BiPrV)UB.݋o3B'3JI\'+Z)`ŷœ[rcp)q?C̈́4q0v4KQ']#֨$ Fhqs|Fs 3h]F|D61<$$;X|⺔4br?}hj^5 2"砼 ?"f8/Ǥ Q|fh%vصMJ{bX+IzH,+2Q9> g2Dғ㧣 ϰ$`372iq0db^ d[&r3.2B@q$ BJ9@g)6yFt诃lhۇxr7JxR*AW^aeMgрZ?ÍeU-cv.\)Yw@_y+BŒPJaX曚.SQ)r_^2bXuNs9KbaO< e̐' FcC AɌ!.Sp8ɧ~xp |u αD'-ȍ^nFKmP1Ϝ7sn+jF͚kϕ l)&"ޯO~~o@)33Ìd ;A Ɨ]9i͒=1d=ݒRn}p6M@ei q=m*߆ۛ40 *Z+t aB TݦɼOqJC܍e 秬{KC_8p\Jgȶb1DA흋ڟ4^'&,-]d"rQ$Z7yFwj|CƿkH,9 ^M{C%LȌEDc" D _£;))+u& L&I`aY))yPVZn?H݌ߗG0-B*)׉wf׈[)8 &:BTic}%u1W-p ?]h:G$}> Lt{ T_:X mԨ&bܨyd./F$ c澋 8g17a*J]ޟg_\/񏟿{YSy.ۧS>&y*|_8]L?2)?J=!}Jɓ?}?uv4?=(uD_w#zFkt}?.̯WomfĔG3Z.kn˓U0]DrJJJ+!KDJ"U+Z֮]]w| ֧eUs&%M}rϙ҈d}秣9t-ڲY`UTɥbńcAÚB hSd< zLlS:j3Z]4 l|+zӲzV 0k lw5V/`Kq/ytlŷLΌ)%~ˤm&k8D yRʽIiKlL@PI /,o!.$#p(N@4[*CLx ev894uroRsbf#Pji[,!p{r>#$3'aS]۹~2=݋ףT/=Z/=Z/=ZKֶwI MZW,*3$Ι*j+ \6i֥F G 1K6ɘb'@q=P\*CnRI'2(SK|+7i^+fހ8(]Xi"A@N )qCz{is';Y8 Br2vL͛-nGvynWjz/o׭ԉёﱪ-9?DzVIw#J"TfpqWo5@fM//h]1жeC4n'ߪ{wmb=oLLPGKpypNg~@6-QT@֥"gO~4*yȎGZujk~nî]ۚnaiyJ ]g᳔/`atL  kvDwU^PZqb%d\hY+O@U&jI&1ɑRjżii q:cK [8N7j;ss>@ #Ɉ"b1ܨMA_r {|3&njb=wWO4N4NXuBj<8 I`s;Y%DWђm")ɲ#StOʷz(~ՠooCR|h֭W&KWܫeuyp0QisˍSkL|6ԲN=ure$W"suqir5# t_[>7I٤gIZ٫cFus$1$c?9s:j{fi퉽&|~!JH;)DTҞ8T"d'S%bLT0Ι\큤pO !$^dJ0 Bp,e[#MJt|iZՄ^egoOy& kyc̛<67>rv>K,ngx,.R&B6I-qB8]cU;]j lkbۭE6WKώzȉÜ+=q@<6gA S:G⣎@BY wPZqNĂG?8OYYЀKkhuV2u=\":EDV'#Hϭg Վ vU &@Mg37E6 <.Y:+Z% 'Ғїi!V캞 w41q)ېFy] 7V v٘s]jSߘm&?&?&_kaws5,Eu9~i|bH][=1y}Qn37|" L 5jvga5ꞙ|WL-TVpQE2S Oh4gW1Gay7EHsJ`yJ{#uΕ0AwȣIaߛ] PArE18IC3 'f. 8rkˮJMWr`_.^O*$|]HbgLXP#X\ e l:QB$;aŠ`86"alDqa`sA^@(i eaتP8_X n 4i)60='2y9o4`S]s>BO*TőȽݿ&tw OAA.)\wH#'EX{`t P=fK )a Q:5^nQiP:DAFvIQ#  `QaܥY'qTz V-+@$@eA4iebHH&OspmU10)1Rd$w |TYmhKC3,Pj"*zm N(!^(d ؟)%6g%r*u68rS 'cbPxKc1Sq,@7Rd,\4yuXA}U9@KL1|TRkPTDi`S-@ǐ3Z8! H$RI 4^HA 7H',9멏÷yy#qܦcT.)WOKf:xXʂ0xt N@g"{/w|zhG7Yj42֗~vv<۽c>]L_×6u.v?romuσk X dlfx>Sei,o3JXDG15[0!m#TFGl9:>qp:mf7Ӈzl,?f''}f4g) tm4 [<?^_e AZ')lffåȂfn 9/K9p,fJ1;5ln~~oзaEYZOow?e ZK $!4X)hﰼy{h,O^0R~tA.]so' Fef(WGٓJ=N?)Nޓγ??}sݝ_'ߍ Y$KF~~a&6/zh^ZL4ڙO$% )?Jr$ZqkRهw(7F!Ȟg ]-쪂Z(i N qX O^؁;[ |j7|󦹀fiZFΤ'J9jyDJo~teh+eQ+C)ekiU?]RK0Uu!bhv礻w@!;[Pk;5]m}gӹ),%Y*y,Hw ĭVyJCE_bx(4Âڝ˽D 6U;xZ.lwu ;(y ;vkE'n _F(ؽk5z75 tKig ejӼN=0ru1qx3Uo5/z~,Lޖ&aw nIu\@snSnN}fo~kPLUp:{S:̀&%Xn.5bpY; Fh5fNBy>=|7'~ ֫ ] ݐzUZkK /̹)l.OKwFyi|j4ݽF$n9~k yMuz t%<{텳d]7ۅuvt-@Y)޽k@WrQf9}YP$tTEP )ƅտYa* |euR L3kenfR$PJ%,\h.k"<MB`牷HDN}Ȼ DqmUƼttL]h$1v q1jM Z(.zE!pSepDƱ"JAIajG*-<(0T(܅5{62xUfT7 W$pW.~c޿ե7FM!!CB*dvWTQTaZaiXLDJ+ELSoUHI pӧ:N蛑H8Y#cJqSf#FQ`, !=XꙒFz`c.ZQ%n#~e8[sL czLǪ9{nBɔ}a9TדrٜZ뀙$[7~ ;MrЙHH"˱>FC>MpVN *dO1 ! lb(DuH2mri kyD6Jtʤ,zP-zL<;̨1蘨~8keĞ,zRcV{po] [(T~8CVs)XMܣyJ6ȑrAS( FGze4'1g|9ijn'z>wVFyj#BwjN¹ܵaL I+cm݄ re13aTuEJ3Xa+?غ9uX:+8m]1%O}U뤕m? :?1za yֿ a.uXb[uX:ٴ9Й2C|,a#JU-@rΙ6m_\ ˥\sQ܄3j;j e<F1%psWS)AϫI} <4 =9ߌHDYD>LiBftv]0+c탹~ v¬0 3\\J:G/x;KpL0wGZjkW̷]10[b־۫:cHyYopEchvǬ1kwLg|( ;>&'c6B@r-h:Z-@a䐘s%.!sgV {w}&Դq> 9h6zWgK9PJJ(^\^TWr״(gX"rva+]S]IYa.I4 K+ &0ļ +F|{OzQ&TS=+PiO;=!%j>K @HN5;D EA(rO&SwOK|I; WH']nXtvcXvI楃+THC9贛~ <(-4Q%^'_V2h(뜨P6ڭj#yYErnWZf Q,̇)Z2P!ol+Gs1:W{9#1/k|I! 0$G陝,73+(aOuVƙqV+R5Fx=`Ft|X5F f~ZJoYˍ0iq},&~ӵ]M,F?۹mbh;K@Vẅƴg,A#uWLoU,窥^;W}!~?~ ̱8㍽k ayst8yrf`o{^V\lZ~ ^@/41\{F\<-/VA/Kh+YMyytunx^]xz/OT"L %<>-?frK0FКM~f>/&㛇^N?i:gM;"s`OQIl}jacPI&) HWwӺj6\L~7 KQ$b"ALzg8N^MwnƭAz]Z >zbLD }\dpE FzoՂ`rէ4.l6X+}I9m0zy'Z?GHFf%$k,v6& ;ʸk!tsM\_Z.C (;p>ŚܚkgW=sk2O3ޜ|!|Ύy=dz⃇qWIggպۙ6ς|`74♚Z! ,޴a5.iROp!MB^uX /p|G3!9v) |*+x|bѮBϨû%|NJpg@Kp YOS(WM+'+˘46*Ѡ"'o8 Lj%*e8$T 7F& IM F guص0Bß !XD/[ĴuZ~z 㽦*$ToHU83ߒ@oIrSNB"U|}6;K8\<"7&yՊhyb~|_, *b&5΃3H X2pIg5 ~d>GBHVYP3$y9b8rhd-+LHF G,;EC~#h̎$ s+M|>K)&@s[5Z먉FaWVD¡Jc,Vv;;S" v ؂d l>dH B) -ӹGW&e+ZcFa7"wj#FĘ}IppߌE ^6j"J{RڷO^x%G^?q?k5걸e1c݈6 W1f5!UI6Jvt50~͍^"`tVMǝm=28[G%е & tk=28Z>iFmvnӃq6 Zkٻ8$%#ۼ@&(Ӳ#<mXۻ C$z҈V#3_yZ4-rO}EN o\ծ8VZK'dr^Z,hEQ;_ڠAI7~K0&{Kr!j@FcbWfs3*, >9wO:{0}k[_ -yhѲBHqU9WGց'B+>YHQ+ :wx -RXe@+62 4z35d;rȗ.^(3©5l]sz[o#AzD)C{'"^MRڮʶ8-~ (v\r*IGQsz.f@3mJ:rx1h lp4>q")$KC)](W4h8krݥed=pItJyzdkֻP`PXۏ_kǩ O]jG˱|]c=8|ХuXfk;MM8swӬ%]YnnCvM*[T 'f9]U]=n/VIjf7oDd&S 0qp&<`,w:h$&)839ThLؖOjzy>]@ۿoFF)|lDtm9Drz#i _ A ўe%xwJ Σ3 Nsñ˩ %ڕҞ{''Aw!@)oL ЍJV8YIs%ѽUT$ZY9G''ϕ[EQ:ǻ tx(hU\!Ra䨕0+th L,t00у>﾿/<1wxrņߤּL{j[3 !.ȶQ"tnćr'Ji$MHL"x%kP^xW޾jY/#S,l1-p\`7hssET xǘq{E^1K= 'uDˍ-”{;}h;䯯e欩K6f9TU53&2rp3j:ꤛ 1*'}8`戭YYvl9J>#c'ޕi}$cTEB3·o12I"3CA`БD͵q+᭗K=/sdݫf40[/>(2@j$[KVg5(,>?6A-^ !R16}Lr{+>.Jvǔ}{N֒,)Z𮺙m+YbjU 73<\EΞS}V%vߝmc_oPTp hO4 Ll;^7JL5([n>t:88NrSM?A8Y8@b?"@ xzq$tL_ugc⤝k;yd7 OvJ険ž9IK޷$iålOh>h#ˋ4|OKHgxyH;C|+sAc*#ӎ7:OMܧi黩xSQ#dtk|5{zpnXzʜxFT\ka+e-'͙!X&9A&S"%!0  R`IEӫ `5>9 Nc4iDfys4'|Mn7?IZ[DEqBƸ<27pUtQDLi ˏ]_} <\p9E)g2P"m3̐(Ebf";oh/Y S〛= IeP ID4d*KCkp-MQ's J0b#5`j/D+E νMG;x C2KJ[S{"* IӤ [!K ʐY 8j5A(c+=!: 4r-3NH D95AHMeRpP dLRsB%e>w{]p]Js5LXS t{$Y- d, y j~-PB -`gU_v \MpD%I"q?PU8W66hYCFdt+`*9;&BF/-L.79#4 Y8ͳW 2s Vy`e#,f˓Gd'ޘF Xڔދf .8HZ!p1 V<Ճ\UiDR0,d8(MbL]B u !2:. S`U^+"ֆҵ@ X zSaS(-3 axe5X>@t$s I0 8՘W*n4:k}TrSF"ؿ^Ϲ&FvIRKqN4D%YvLsT ~Lg3J"+^]l3ݑB[绳nhD[nhӟ(u6>}X,:2_כnXWΗeݰů޷ub_X!m$J Zz2fJԘ%gD " 4dG,x_M%Jb?ʛ~, ~e2)ʨ4&-,vTp ?hߥ|h(rCKl%R TFSR3ԤwmI_!n;NWۀm|݅oauDIpd3Nw*%㥫ۏP6bbKG=:f~m?Uܙđ?Tu>}PxYC$QYc[ n *1^5ǟ%0"Pjd!PG`Ǜ+ < BUZU|%o?+1"xb-M00tYԲ RYǍOƲPqhW=%-Qt_^1M(:[ +dTΖ@HFoAÐGcPT>]\&h*9' *`3vdCsthr)8*NQ UF%tUm+PACeh"FTdNL?l:]k|0vCjS6Y1@V [ES 2 5AG%"8fIxtxb'aV>sZepUL>0Hqs7@c"kn;Kd{Ў|R, Uy BMJ\lO3ejl*R8֣ʪP癋vd64RHN)J8_\WDJqGe+I?pa'wU/b,/D VD$CD9x)e,@# :ZXsFY!b94@;_;d@\y`$(]P>huvdGQULXB7@r1ǫ+8JN#䞼*"~t9jP@X^ 08]eЮ~11Cʎ2W x~{A)i9VOfzs֒G.FL|n>I\\fK.ΖE:i{Oݖ.ŷgC~D~⼼yt^;<M8Qgw뙽 ٩L7wZX5!.A]qϊrrbn8+g"jGH7 PbЄuJ8F HiZԷt+oPu!_9qj#݀ 1ht0q6Mۦ)nmٛnpW΢E;;S>rvB;;ARi01ցe{"|Pi9kB•&/gH$3LY΂Y3Ro;dҶ{;<$ȨmK' &y"^ m*2OE#H(0÷2$$= m>"Sɺ1j"KP%m ,pHXr*(THb,9ДCƴvڻv=;5\ʔ 4HA0=8%#~AR.Th*)7h=k 8vy:Э0zKVx}5ĄS(4u{E+G Z1?p4m65NC-YPnO}l?a%o&u$QiyKL襲Io8ѱR3qڹ9m3%Ȟʧ3%Z{4Us?}Z`>> Pp¢"L2 ~Cy&_Gkxh+%pm0?b{/9Wڬն7Ԛ6=Pۚ(FWON>#Y; N+}*\;^oZkFXA˽1P<2T WpW΢E4a1nsgkTu!_96zoӖtlJ1h:c#NAD[BJ.8+g"ZPg+kbD=TgvdjԎNw-m?Feq#6RJ6 jq Έv\1e"BcmgjǕbЄK8b=!'"MӋT&w!_9n(r3g#5Z$)h)Txz$M\Zf8;%o^?Oc|P MQixIO\׸yD+N 0\p ~k@7{h c[zbhŇB[cļMh+?Ako>;`аy Nܚ˭AcVneZ⟿ݡi'[xTJkQ78pzZ̗YGK^sE%Je@BUb9ՕsFURDnqt˷6˧ _L%jEنBTkJ?S" b+:pZG\|:߽^O;.~g!;S$KKoo*Q &{- X#P'Mᦡi  J?:(k:6zx ~;?_r؁D/]dG,N.>z⋵滛4w~5TjX HG@Xu6eeڛ"{qw7]5\r:^OQ)X&j2Ips{yӃ 8-"K7P:JA)MAJGd, >J UQZ=OY8Z#A:gP/|r3 ljlgSҋdBz|] (ås럪|gY>2&2$UUhEP!֦ȾxxTbuˉdzBVv/(RHKtCb [(@ F,;wm&+B qJUdRe$*pcuĊ%x?~9 tdQL6E{JIg#㋤mkjTzpMktow8vMSF[!`f5 VqMoh#u`%V?v}%K |Ywu݃B1r>ƛt_O.[ZJpa0vF03Κbg6f;\=lAyC+RZzńO׃f@f MQW)0Vq+HTUQ{ϵBLoYQ2Q/ꀺ/0"8J]@JHA׌ZL/+\Dž͇e`UQYCm%pQ0!S[mmwyaKK@TX?+x^>CЃRBQQ닺K}]o7W} mOo` 6~ IZJ#e$yHyrM60lYWdXXg+v]?\-tAa5A0ZDC2aww;E)"D=R!V@vDG@X`.w \T].]Aq-܊n̬>iU{^tt'x~x{=6;G{.x +La*vVږ+-[Asobi?@z+Ɗ KTa) ^/a &.aq2|a^QoǶB18&$p ԩZİushc&xp܃}@]as _ESurvo>;ro./>s39wgnx|H(eìmin[0W/߸@o: +imRr|3v $j;p^ +`T^ppz7)l>q7t`t2 gZi;iӁрF',}F3LS7vH* l?q ,T(Egsքh`q2Ƒ@J(ι1֍t1fZk㯺GQGp&# cno_8:0lfզHF!mec$ pfZ>/Lσ#I%鉀~(`Ae\a)l:tW}49#\(߬Ľ鴓lDDY GJ hGlL0ek==;,!趫R/ c|mbu CtFJcBoj~)W<^l9>VБ/4)/Χ&):pۆi>&sewi`NX 4u3,8L!@Ґ֙JղiBe7Ğ@s[$ kOa%[R*LȻ{ "[_o6CA?<?>\>\Nj\9Cşwnڂv͠}7]sx*"m+FHąZ?D NA#C f8QQnG)Rj@ eAPcX8} ZA[7NHk}+u $_JEԔt70`*0.v;նkFXÍZ¹lAvD&֦ePm @| >oW.['1SawxQ j7u0* 0i,fXuT  AԱ,<צX23)Z Y6 _ȗyh6f #jNY+DcTMߢBk.4V;OE#?I܍^= [*1['uS[A~MSV%/4C[4eOў _=k֓8Tbg& _̘G\0ū`g-z}F= cמ nj8m['Yd)bK$2Ykq.>tkjwA.Og=koHz^/r% /_ݫُYjtA7ܪՔ~~0x~s~^.X'&X <\2xbgb;}w{󳇻ya ߚ;N}e$ZHn/rSn".Wш rN5쳶z';὎[ܝ{jXYĚ Kw%; a5=/1Ž9By~u ʃH3䔓wFxMj@w!_P)km_"'/ᨒ/Ml0؋iK&jxL`*uS^'ܑ@SQqBcc?MSJ|.\VJc)ůӹ :Vz;G: !a֚JY@K!}>RbkfsT6vk dbZQ*9I<7E#Ĉ&8DZbr9&7~4'.V7U=^Ľo2V/~\za ˆ2=xHJY*8|a{{1^͞1֐уPMEe)l8*fԿΨg _E,(}׬ r!+7{FD2Rq.ƹVfƳ(Qp5 q_ l9B|Z c~o~J2*|Q^T*|61E1,dejaq:n`*F2ڽ%s1'UK&^[;=C}Fed XF9w1=S4q,EӘ߂Oѱ[PQPidƌL¨uPPU,NˠFl SQ؁TYZ<^Jqv<`E붓*ILBKUl]SJGm;1847EMk=C/pd"3/$oYN+AHP˅ôRO Uu{p-jpe;E ŷiL 3vD/MR1<1q&)4oADev(()fLB)crDPHHgnٺM48sJRVSgFStٺ{PbJ'TeRkj,'Q(dRc{穅(mז0m:p,bC\!HO~)Bח*# C2jDZU_@N҇rDJD:)I}vjJJgwWWV۩*ĔgtS޺ǀg!״7B4[FM2qeT`xr<`GaO&+8jP hr9 DV`h:0K#|tH:ȺzyD'J7s٧pH=`)`8׍C!µp4!R #. 4/| NfsrykҺYAޚBpVHy Yttpւ +tYW\9̺9D1txIɬt)HǬ `X15P\P1'$*)hìEE C|YA8|@C=cGOnP ҂βB!p!+L,vRM|WRZ0]`he继1B}*o hl5 J(h!YTMhЈB r'avS^HX(BG&}&)Bxc^]}{*$jdƫ&8شpX-d㕇ˇ) nû:+=ɸ)2_F1?^8#QfPXȻ|+Ė!j يxV(K يPVHg 2a61j\e|p\$%W!ҜI:VB`tީAWYWԗ&f$MѐH1τ#/b0-q#$NڅËasȋAS6T0J:!NËak[kLLU# C BE#)B8jQ/yQ% 5 tc NZ+[=FXQ/E7=3P>ml@FEAI1=x%Ru}b\NaM|<.,mWf(MHNAˣ}б+>ݱ)Ν_+@)c_v})NQ)ir!Oӿ)E5M'jXn͠8#LZ"=, xu|~;DftFK[~JPΩevt:/|kO{xH4Ů8Uώӽ' ʇR fv7rb5(->iDU%ZqG6s(LB؀rjAS,@%u,@\ a*hfZ I]SvS[w$%LHl)G;*R,_I9V 0v@<儕 Hr;-tnAc6B gt{$wVLȄ$Ɋ p!&̌k&+&"=BnmqGgŅb?Q$Y+'BW޽&BHț(]%Y{awgsuWi|~y>oߞ>7Y] c~1  y,^uf;.=Ҍi6ٝdXFTwb]bWn a{X/k z\,ȐZ̀;}͖q7ofq w *s1N=*P =q^D0XqcߒCe&!(&r?(|􁹽!BϐgT1_~t($AJ1>hM0 ,F+EHϸV n%yrmiY}%ڟM944oxIP ӪϝV7Z [MBHn` YNk:H /EZmbesXSiFф62y&[N`|li-Gd"g1YLPʦw`sDbnDv8FLiDDaUSh3',KpMQl%'ǂ%_1iɮ=uhBwLrL%W#G7nYxc bw<*q&U=ITY?+$iY's9cOvŞݳ'k#n7?pݜ!DJڒ9\c'cj" ns5{_o?>yMAtQ:'ɼ`hƢ&"#DdG(Ťz^R܅3vm\ A+\a͸ɂ륌b 2n4<}†r†;w˧p:x`2|zH8lO8$#.&3zLqQDsK^QWgƅs{Ƭj{rbr)X0=HgW3ͩ" I `Jkcވ\P|O Z?6i.vsRx[jeնe9UEƻG%U:i=$N&Vib^Q]N1fU^{ͪg41T2hp!/a.8CTt}(%9fU+DfK^/VS܂d1O"GN 4#^(?S$>pA .MZI0 2BNa1 y׀DQ+ j;4107 cZK}V' wJ4qc"i!`1<@xV!$Q{u:X&!R O>x+{:ϖgVOKtͮ`r0=֤NBLf/ pX<:H))Wm Sz"{$`b2H#EBBxHA6+:TrG᠏ph|sO!=)x`pdIaZ 1H!:Y=ewr0eP8j1d%4_bĭF4'B"ﻠu9iXf, !AIc e4_(kncrLZsÓo>19'ZoY__jM &k d38 $ | n708 }2 )t\_<[}```Nt2!pQ*ܐaFr`hW &,&v!^\/g çxE*%+FvφKI>Ȳ37ˋ$z]61Q&?/ev&KgTnK쌢ApQн!Ak U(L51Do EJc癰DQwE6C\Bƒk]AQsUL$`A9檘 pMuߪxr1nE޺JA1 􊩆t8ni/]ݝߖ@tՎ-;J7!0lLTi 0a#Qb 8j+=w%T+8"4)4 0(|/?/ Lj~bT\?)zI5{L:+C8R|83A(RBTK;[Me7~oox;.o]8Y9ٟF,x.Ko &TO^f>Ztvo?(f}m7Y<]\/AN>3\_ί\uڙ~:r&Ƈ͙f,Ӈ1HG7[ErW6!uz Grv+mvޤºV?T_ж=n)hR rDhBx'/v+hvBB"Z$S㴛GRY1(#D\*"3x^kB F4Zo1PMbuzTדdY>;5l EЙ!Ȟzr:#Ϣ={LQ-p'$řIbDTnǻr 4%a<#V+^zޞ}ӽZwU*Ã9XiZOy%6ok=e >Ti;OqORF5ĝ>w˱ws,kZh\ 8Y zT.xCo0ѭf >$xNQV ;+`?cpblW8v[cn%Mw)G„S3qv5JbKJB t"IDWO(,Nxp&QG\w?o7ܽq8ԉ&NbA.u4T#p-QڦjY};'uWHr**[aa*|gpi 祗`wvJWdv=Z{ݶXH整NEA6"D٭1Qw!:څXa&u,4 MJΌ b{Ƭm+QNI\-ljP;TGVʣxͥJr"{-nx3NޣY;6~3%AlZj=oTtJ$mlu,iR-FT?=n bnxSjnTM+ϵjFSp*&X }TE{x_ VE\S㩆U1bb|UÃӴl^UpاHZEB()gV3;bäDXUL(aA102iZv*8wxW6-eU YnvB;ܪR7@*ToꚑZI"-vDDLxا!9?P+lЌ{I GRcPwc/dTj6BhBSTû:{S պq؟W8?xwO)m֟xن?WO[)9ScۘvN.RmV<\6ED}౏ C SaӅpB@HpG} uWqׁr+)B+n)-wƌ PI ̡jY'(#.F Bb@PuH}tx_"d̂b2(`@Q X$ "EHJ mD1pRXJ >ZKf6<{7x:rWdA^!ف;ipaJ۸_a&plڪ=R9>IpL(R!)'ٔJP!fx'!tO@!X@ZOP9'` |JhdW`ܙ0z JIí$z1RcD,*-c iBE$L XT0kNY3Ҙ=R rq;x\?F#۾=bBs=.$#":׋P'?nAb#:h.igibBs[hL5E<Lb*Bx/ 獡;i$B솃RY!:u=O £%&8+,9 QYds}pglXy6kݨ]ВbLKƒ\n$6mI ͵$]HG.E2%u3&cnN;hKY n]HG.g nT1Ky/l4N74ztrGҊЏ ֹUaU?6u[Ne`axbJJ*āidαƴ<s>X _ݫ i.ht&KjzQ!\m! @tg(/&7?#H bPO7Vol" ,\uæ$a~jPuoM?oIxT nX{f3fC=ZYg5Ao—ԳëhS.U 1Ze4A36S8s#=WKd[ 3̢UP8wg\P%8w24W) 0&pEYFqk3M:zde.~{l򰱍P]NpD]0SWkKʀo~G[tz6\"kt]Ի@{N*`s6`.C6$^ 猰~ @qv86R&6te U\F`Wm %+yUݥ H;5YK)(^%@+:DT[TP0X3,syTF`B`(XbQfY+fٳfXPg6'KǎrNB`>8ygN);aD]ʤF;d.)tj63օ|"zL1؊֊9\1)Z{y l)}ؒ>-Oy^p%d}[=ENҵ% UpTY-dTOW/ߙԾ˜^޿|mwL|^cuLj רk)eiR H#*,(%?A|]O Z ӆB\T?k( '{23B3ݸݮa$u Jԭy^‚ueF  dkG Ӽc,TIoZP1FTe+ J\C89#tBFc%%d@bTF󄄨BV'>g.Wwy0"X*$@S1D.3^ 9N|/th!fxRS'>g.׊v!+aR+O^'>gJP[R1heB/`3B'k5CG+ *f5j -^c 3Fx< !dη49.VVD r>pxH/h7-}Vm1s}SouXNo|=jί?դ lX'l0RR44b4\97—: J'fa,҆obJb V.6e Fy=Txz}u0XNoB6>ǜMl:G,R1LNB_?@VjY[HwŖgoMg<`* ā2LבHv lVkAsZ[dl٩1(/N7Toc@˜ByT(C ixr6˰#(BEt>Z=m5TJ1yš>)ЀωfIkg{&"wk;M7y Ks0VQ@,Z:ݡG kߗ}1i}LԃP,EuuxLj!qw }w<{}^w<8 ΢+;[0`+v{z ǹˇ8ztq^#tggYDm>8lsy8(q, k:roA 22,lZMˋZA!0+U^!AˁhWpn 1W8xpW(Df^qXA-6 .(v^!,?>qU)GZ,<ҞH?PbY&C5O D+R'le5usT=0 1:Y+D1!.*j%2#ǟA{oNW۶e>L1-W'{ϓNsntL"J_\m)$Dm[GwdpyG>0pT2?1Ձw\#ҚEwЪᚚow `>CFgʅzթ;fi[Nox;1erZٜqٜ_=|U%4ꢪտ1B6R;p"KhuӃsq4i {Y⁳k)Ѩ84pWϞkϦ04$0P"Ŷ}ŽJRFG.2z7d!a9.Ui׻[pRiy!9-v (z9^Q囤FrjFA㽂Ca\*/. ߄sKtn;w} 1bX`.D bx ` F?//tڽ)wZU:>2* jIA>sIxO6YbwgAvRח:>\訓- B>Ĉ+#-5,xDX0k9(23 @.1)hvV8<9{ d?qA~8vz-0=}/ ;JM:hBq>Xe*' b( Gapl ,p>_\Ҟť{%qiQY3яd~8%Lq׵G%z ~6$.7e*qXַo LWjy60y=bd&>|w3?/_ b6}tGNLwlfznj="a/ F?ngc]_jIe»e/f.Mr-)M76+]Pb#:h.Hr-)L?{ǭ}h"ENg0 ^mh8Y }3RhnN{bzU_E.k=X7*vNcNL{+֭R6q)=hz"B(s%!dYTf{mqHQ ^جS_dsֳ`2:8XK®S'#EI)~峒T`t+ΐ@r$ jTot'L \9TQRqءm^ncG\kD#<̕w2Wdn@pocoo^5ՇRwAX3Ʌ˝ͬ̏/]/bݚEvO?y 'Eڑ1Wѭ5ʊ.$IH2{5!=1:^88%G)d>Y>zQSi2 3Zgy*=}Sd{%2 0mvGAۚcs}h(Έr?qW~tYzμ.юR:P *xoUx QB7;+KZ>NJ>0ƚ!7̜`:<(X%\Amߞn~\wJBb^ޠc1޾jr㪿 ̃WcBK|8zHkfZ"mp<<{OGE:zx6,֣Ke=$9YOpG iƄD$HzA$B$Q GvOF,U B}ZW\|F-Z crq&pj94,X X*0%I(K >Jg\0|,9wD{"<,R$sGh-~8}[|{jB3j,+Ю9Vl-2h\3V1V_UH.H: '#-⛀蕄\F/W{yc#u:@j}ws^cf$0w]0B${T43!60@al4(sczQʌCX+"f23 SB\xHT.Es5UөfLJAfv"\*c~9HD1T%OÖILTSj[+IGcLI5vն-͘CC޸t^к^cZZ JTv1; N$iW)ʡк͡!o\EtM]}XAֵAP5P!m>2"P5xn PٞAmO9`(7./n3V*xIlN (&~^ ] OW"vQLU(u.Ӣx=irh.X停'bPrQ;ƘCLA7;*۽1#²k У$.{[*kW-B|8EvHv7~c*}`5C.4_6x?/Y{׌ԋ_LX^lpSpS]<8 GRs )n9ݘ'ynh Dy>*KEd ЮpDrBJC!< VBKO[#jgN oV.qh *lQp sҢ(9S$ҧO+xpCdfBr;aDTqҫ p>JZQE9#p!t6x sljI,AQ=5(d%c/|X+2&bzA{¯2,yá^rU2_%oyɤW1_~\\jݾ6/gB]n"T@PMYaLJ.K:};/ J>ӻ({|p1R_3#{5-׻1]S=rZmoa>0ƚ!Gv1ȋ4'vya-!n z؀r|8-ynÇwqԙwLjGfez m.ݰ'|ŵ9x-OLУ$,Wpl8(t8 5U<0|e9=(oO4ݷO&.)afKkchﺣv{/qkȒGg[bYB@P2\߻|w!u@YΨȕš&_+YJɗ nY3t(!x_X0>&*2x`,FMUJKS5 YϠk"̎] WW rZ9)Bj2p*'N0+ >M3+ų>j#n#R :M<ʓRLG/ŠU\ژ%hf:<$7R%SSa.saf7!u|gEm:Q trI虴&BCNՒ#Zd-Y ̫aNCF漂jno|+F: uKd{A :&yD8JT'xy%񔀪Z# $t*%U.3#;6L |#Oh0ӹ^Yh#hEtA3-d5NܞJFC(F% 5DA\ńjt S)'d#C$X7\DQ:n&JdT3/;k-4DQZ.9=%CגxF[O 7\>R@ST;!4Gy@"U8EDO֐j0 :m ^FW !m*(@ 袢ÝcJgr[R֓1m.VOTPm6SKģ"Xi4ktٽGt(X$zhҙf#Sj"h% #&#^!$ gT+:EjWDHM&C yQR ƀrt/fS=T[[@p"YEG}twk uOpzP]n8% `wm2q\G"qa `rwBqI,cry q1Kˣ' h.Af-ɱh%(%HfLLp\TPHQm*bPRܨv1tƴ.XJWXk y*ZSRNz1ƺQVmcTv2j"oY;ک3hWqu3t֭թcuk(ӡ&Ѯe5m y*ZS|ucZZ JTv1֭LʴufԺ͡!o\EktJNIcݸкbP:cn eP]jFKUJ$]Ǎ(*bP:cn e:DVR6q):"sX7PgQ-E c|v2h`ɋnn3hW*9w(&*|Z JTv1֭L(y[jFKUJ#wuӽSFTbPt^9ƨv2j|jF#BfА7U::2X7IqVAԎ1ƺ5CB;VhuCC޸VԼ3X7zQUĠȶrQ dP."[4䍫hNq֑a8ʺI ߭`1FŻPUÝi5n3hW*|Cf*|Z +p]DH5Ź 3hW*X0nUnNc PLifݪ-nshW*&C&%[uŠDujcNY^ y*:Y(Hk۪ʲY`*\9ߵr{p~}^^Yɇ{M6=wժB>2ɔ|~h&SkdzQ 2ㄧr p|9656"T&٧WyB\J|[{o;es{='޾j oB[Mm}U9>)k{̯y9ϮZ=|6XkžI uFT֮ [vstsn?su #QjFÝ8\d&sKHV w-rCCc"\L/WD8x"Z^u’wy\Z˸Է8m[[C ^O[3HҒHYwü%Wi4- 껾ٲG%37|TI/ULx !b Oщo1d;1xT\XUP`!">4$LR* T\0իe1k="_AhdڞT,2  ՅwB?Lf ǫhs\×gw?mφpy%եx"^akT.=3eט/y-c^w=T؏[ m L<n᧼f2;hmLZDŽETLu"9 $9wq-_n!7w>s=G<6Yhv_ͳrXor~59guz}ȃ}ȃ}ȃ}nsKBwtjm7e}n??w/( -J!0'7~zIoߢm/Sr[n'n罽ܣ׼UͬۂhT$E;P4f;:m@?})-I|~ysy_D*$GzkX <υ?#zǛkt=߃xb9Am'޵ƍ[e/^6H| l`_, J"M-#{D3E.19?𼉬a cM1G7p/QXPE6z Kr9`f@Zi*#Ŧ(xjWh5\i8TDe\z%D8h)`{,9 GlAea8\ ӬrcFgX0t#1Xߥ.PaXcBH0H5&;sƒv?M>nz;a."{3ob2q9DMS_ʭWq`(L9 :vy.÷Zw!ad8`8it, V!&t2ߐaxC(i `"tUڂQ CcD򅢄;в~kmI,-7:|x"rZ1byg #<ְxYIRVRzRTIF7*pY!8TCwbm3UC0*/B ÆzYꁋ G}ӊ/6*, .^TDNTT} xvcсQ(@$ kJ%Lh  X.)QzuDld${ P${= |,nܢUd+#G8:%F*mND% eSJ" Ukm>ĚyX$-c+-Fy%xbZ lm-j"p<=ԋU\H&U90:UwsBvc0e <)Oy)xYlHz!AzL+A8֗+=%,hdB Eu!TQUD]ҵv3TM :Jѩ캚渄Rs\sHZeX3Ͼ-Sy)"KI12"AV2rHB &j%IEzJ)a`02 =L;~Hq`<,@d i$99@Tiɘ!f-yUxhڂUV8JD`JK~gmS/S268*=p7C$:_26=ܒ4%AMA3`4Zz*_м h7$/= =(l捂D+׮oiQxz |*{:$g p}sW" $1L2&GBHXt0")WC\bTwpL"[8b.]5*= V/PQz!*Bl(rW8c)A2]4g4H;D4BF7b *yLʪ$*} rsT*#G##";h}@RO>ǦJe: P+ANu%A#&Tg"I1ykS 8 io_ItИ6u3 ;؊)чZTbEdh|,8ǫ G&^uHz,DC&x;86O~ pM:VާDtL CE3w/΄hnǢ$eJi'DJLSxf~є'9+dK-$rwK Hr^C% \TRjjf)X u ^ȗg0H=h=˺rz~zբ^ Ca/JU C@t/ >T޼3)!!:=i\މup )Y.ё2/7rf)H#篫1i3uGH6?F̟wNtud/K`#p|h\ju`j-5Z~کaIN@pjԩy9%Fan;0e0_r IcMro /榢{u%V 0 %18n1}3#v^-X=\8f)Vc18[}N;ZkipLew#XV?b}k OvyQ[[iu?ݸr ^rȶRoNg/_ybzvO2>y]1Ppc4aO&cS pJrDڻ^·[=/\Hſg^lj^ئ*OJZ5<Gq~ft/fw|'ŌņE+G蓩(^3adМNIvW"5;s@w*HF ڻ`Ųn^l .z.9iuO}qv\Q|U7tu jFlKon3oufjdx/}+= 1g'7lu>竓Ng?0.I-.| 6= V A~%a)?\$D)$] MF!H!=`BGb߾xqyUѶwG^W;ɭO<&0s/H>\݆p7r쭉> AcrFT OB!OQWT+tR h`!4L}๻S"&`a&_ FJ89cUyLw++X9 xк8@Ȏ6zHw@;HpaOǀ:BQ\T/u3| ݱwV!lR 8rwfgO_T=?ª'^B.BIhaO' y# 0(r^Mn(|ԟsi#FzV{!l鉭J_1 񼤂( t \(WcZnLT>DSVl8uTQp͔No u)aSQR[ 6ԩew3C:^Ǐ(#hzq=yyDcѓAHػ{>&2CݪPKfJsҩ8e](" ecy"y Ŏw[7lrʖwLO4*%)y‡[3ź_>)`ޙOS,+:FQSѸZki˜DȡH̃{iP{/ZCs<%3@"D{q!7LsHYL9e9N߸{q/}-FC\5cLE+eR +S ҎuCPutïߨc"J7̼0HD&qҐ~d PbD" > ̅/]9,8} ق/ +[/$r;٫0lAK6LnF"g^bޘ¦T+Ctg"ܜIG 9W@bqٿu0ؙCq(z4:VE=GQ+9G^u`yτ䭈Be?F f$Qgˁ<9kML&um:(9`2ڰ<{RT#\yLJ ;[_Hˤ &F}Ο"$ 2j rpg )0:R"5^d>G>9b=I r',!)Qra( ȣJ)(nMsxԁNy1k I-J c?bJK%%=1X<ڭRIDa]? ^Ān"Z/fi#Mc7U^U+]-juzLt Gv}t~[VշoޜT׫+c9mVxͿvY N1+$|~ۘyeKr od}w9_B?pz|C̠rWj+V?ܓӓ7E2+~]su%$DaYmRN1(vx4$zmB&З{- JZt`y4X(\&֘Rgz5 `oiclO|l-źnn$nQsJr Tev9:lz8ؓ8iO 87t I ıbC:'qyL"L H~Y\4_6 EtHӚ44@~DhUS8o"ϛi5.w^+іwtI5Q D!$En<2|P~dhNԢ:Fb\3Ny6^t=I@-2 *[MLCEȩqaiB W& HL=Yp8@29K̴TP8_`g %"n)RiLW _2Jsq{=4<S<ݙ.Yi~GzTv& ;bV]h; P19; wi"u7SPN83e=y<\džf^xHа;.uHZQ<6EMPDVu ( _8݉ 9 wnAY*By4 w  ^A@QHP,s"ǗR&`0X܁) 2Ɨ~ci ErP^!@fŽ/3$ʆ_읎!%G`DfшcDi9pJGNI>ȢRjbe/qW" KipK+3J44fEP3'<7N>f}%ê˶Y}2|Q_XͲ}U\Ww1JWKZ-T ' v@e,ߖe|ۓzuse7'vmzͿv-Vy:%Njx:?a#Kor66cW;y Q͇m.kTm ixiXŹ+MUuڝ53*wg) b1:_`DidZlü5F%3\J0B"2Jf"m÷ykՕ|ٞ/ӱ R_zkxVv6fYoY?K??TmwOvn6F~ހęvo~?Ӷmj6j9 f-*q?'a݃e k*Lh%H54e`KzJFf\nCNh S7k#t{jr$։?јRM1`2PpByY(8t ,saoi pu*˔&Xk9)Nu n6ERD%ԗw s2񶓋l=,s=,sκ^niIV3+v ˆyn~xHR ƛؽ'R8߲qw/Mo )J h+3s una}po/ ɯr.o<[&iREjB~~''^\ߚGRC_ƭ^LѨkSM7W]_qԗ~r ~oEf[Oe)+&ު sR!chHZ% G0ѐ88t?qW|ql"&HվM3yXd A HTLaq ڜ9KY` G[BH93N3d 5  728i&HA닿fB-vq*!fm N[ %cctv[*D 1[4L?S1-U8niK4R3Í` LӆMЃO}\{mN#Y7,^7uýa)^NW󂝔C^<@}XcҴ6R0V+Idhjlo N4pv; 8أɷgobj]7ֻ4N#p486eu%wC&1I4}Oܵc1yS&?cDGԍrσ2][1Y'SVWɹ+_?~0OLǜ)y; >*@O啑|Y0=Y KwC]<.ǵأCyԅwL Y6=Ya_zq]nu5~d}f"eDILr ,8r+G &EƐ{a.:q(ˆyZU sPq8 &0̎'zC=cG >1YS$=&JN$3;;32BA03+ :3kQ Jmy$(Vy呴BGhF?V4#+yi{3j4PJfU4yGhp2ܩ<"A3KEQ@1-bQPٙXj9ؙ0S:Ϣ1C3, eaC# "70X1eĂ%ǃݾfWW X oY+YcpQ!AЙ+yq3GhBiH+Rb4 }y'.SrB<^"H +{{3~7ظ9rۻ_R]ӋO/oɈƈsNQ'ލ@\`rzƙ! A`pdhp$Y& eq=.(atHs8ttfD.=PawuU.F43Wd,ftj~0yԞ(H:@rDA P]^F ux3|1)߲˄ DDg)Qg㴦s`u F2of!KmL<پП %E3B/m7z_M/0LKzm>܏m-L}; / u87unVRLY0UgTSu5UO0W5ZkZQ˙HZM@N6V2m%'JkykՕ|ٞ/Ø*Ewk+IVm^޼Z~<_8;FF}6Ӆ*ix<93:*jBNS&dkZ`?Q"&2Z4R65 8دF?Ņ e 8[d,@P% kV6I~䇞bjf'On7vJrRK/y֍P39Ì"ǔ n8AieeWTVXKkLmtkWK+p Ygfnϥly6:C" O)4@Z-=iv-Vz > ǁoUlPSP|r9 YAp(Hhd6Hu&IM C"&t.g[H* XQM?Aw_C<2V\A{$nMzQ XߖD/_VAtzɗf=Շ[bWKCKvQlWw^UE\y;0O"{~*jzPɠt k8^L>pkls\GM~&ej gUϭ~{s}yi-Z,ng0.5%:Nzu4~߇zW`0sh9 PTGLȪ<=f^m-#Ƿp1O/Fudk5eL>ZKؙ,$3*QhkHkMdd4w hGH48$V+`Q1ecz6  iz\iak >1ڍ3V ,haȓ'_|w>IOsngjALkYbMbR$LXǚ`|rwvQFfZK>فC\L+RŴ_;BLO~ ̟yBN.]aݼ\qY8;?I]2l3^LͤdO0F %fٷ0Wbrq ~o`R|Ɏd?8Y!C'=Md+cbJw;"Εb(nM)*W2,ӛ g,HzT=r&]vȌ:6c<LA3ӪuI[cTa{5B%uJTJT & _%Lbo-8b;Y&&QwWzCٰ֛ X[T8pu~ GgZznJ9? &mB٘`DCPL%Y5}RR@0M|*6={:}ULX{9b\tZXOm yOg?z/N)\XbJbUI @_WD'H@m1ǁvsS.1p[{őe4^LnH/([G :+]1@4!zD?7K:}ݶo}wo.IiyWڴjQZ$O\1p̍FIY9fEJ!8u۳J0A7Oك\fG%w!Ur5ُ;rOݙ{ ?_,5Ei|uWÍW7`)-/qt$_Oo2 OΌC8`` ',IYٲ9;fXް|wqA+/œvI}ЛZUW/$M3xO#UWg\}H~=pp{?,$VQ =VԦ}h NjP^~bߏ}?.mߗJ W$$@h%EL=9AɒBtI\[p="RH|܅s4ԜaP4@i2'&G:c"kC.<53t>p:Z!Q "Km&+ Μ -PC$|@2~(rvTaŚ*VpBlD)Dc6!i.0P&!vO['$EAFÈ geD Ө݀!V'.- Y^ EP"_AxcL"oY'"K<t6VHӹ(r۞(Z|(u{P֥e.WmIDY7IeAv[f3'`)p^N3NNfp ;7J%w^$G!Ç%C^nk\ׅgˡ5c`8^>x¯Wa|$, y%ww:Z%}oⷼ?;ݻ[_\[?NN86 v;|{H Geҧ,0f}GwyǓ|u0EWnN:^Gu7 _pHS|Q.N 1k\*)+]!WN>~E2UwКzF,-Zí,[WU$=ő!=i8h97)1m[Il)<= # vў#rfs:m+#Ppm!>'ax;'>o~0;r9E,IOɍUCuV[wnP.6bYHn!qYCYpW H)~9SLoY W0yw oa%]Z(쿧|.?,6Zqo9=yw8w>]zsȻv}n>[!SW@Kj++RYs .FKNl"<ʊ1̥䬒%GQN*mbV.6@ F/yzdEvsiE惫@{jEi%֮ejLtU&R%*32u$4gfC 8#hUʄ ^3ΘMI'crڛ L@FxUAo ݦ}|[U]{{mGZjZEI޲z=^ Gr% CʙtK2Dkc(2cQc&#e!rsRP!.E^"r>EB9p\R`Vd`GVtːa\sS7IzKN~VZlb!YE~ԓEDL)Le ~ - Op[_λl ]+#Jh+v[xCI>z xRLQ|;nCUJַdb ljΨxb%pvI3 +n\ #SR49apG$%QHjnHb9+!"P|0sȑi9)H%ѱ\~abUZwA6X`Zl] T5}UUf}KY#E˜9ۮ !i֮؁G.Mm[_x% RmV}gN3-vNu7㚛O'p9ycl Mse ɫܢ\s,Y[);ŷ.sqi@fܾtth œюaL\(FOL.J)a V'mD$?0:]PDf̫hF UYEaR\gTFgtQx/ uQHAǤgF cVR#g;츷¢g/C}H3&v˧V{]Ww^G*73P3.Ma !oi^}Ƭ~k7tb4Jr9Q烲,5Zn$ % *ቢ?oN[VRsglM-j8vN /?_رd9tgi4yͽF^?Kصcڹ[ݪ%i zLJ ъ,(o.YU Yw;,j>w}w`mʰs?K_NBtZMn@~szvu_{8+$kΑa{=ޠ8df>Ø5[x>@P[滰h] b5^d& ۺ'Ϻp2ILVEN-Xl??rx\$HqeFN)oQ̶Dk }Pr}"P)pą|hbܪV]&Z2CHeqsÄφEtS e= =1i#Sz`\?;L(x! uBu! -To9 صj  )R]/WMH?I7c8<#B 0zd8ur`RsNI?i4gmco jz+PcAsibOis{F$Vn16]e *+m5 DK")fb}HȈȸ*7Yn3 l±x,>FAc/a[+2%}UR`WCk{?˔@;p=k^,WUٙM6Lif8+RH3!C[q_VwZluߛAև5;)4q,$sQ<ۭ ^ɥjR/vc][/_ae']^+#Hnl.h^-B,&,^kw~`jmAjDjTZW6é2[J "lFpx Ա+RTegX}0OV Ѹ?܎q&O Ƅ.ٓsGb`2ۇ>LW{n:[2_ o9^Qz3'f ?A>dHU-$V,pϳ͕EfC@.PfPif !M.L" LXb椑ኒ$\ 7y/(̰ O  Uk)t3se&;p@W vMq56Pnr!?y)w 4c ˮG@K1 a63՚>hGygF4Y+%0o<8 ۖ[z$0x5n8o˥'ԠB;L@#Z"JTQ}^ieG١Zy-yc_ۦ=75bMUD0L7̍`\M) BB][jf@Kwjq |Dl6] kq–KcF<lR1yV6R0IL҇4t /+JOƑx$.ͳ:`/ MFS_lrKFw~"YVS `4C:&3?H}ϹD$ɂ<@@?l01a7#d*HN$/L_e:(eez7&$!H0+2SXfu³\#=UdNJaLDAoa?Ψp\0h1]V ]~Ib*vty?[4oҧš;_Zhn0k/7CK$2UX'bg,4t2* ga6C̯?|Y<{χ݂#*GhpmEvhr?]燛jH?gn?G.xL7Hµdn@<ùSKxR+"kp4;GV5bČȪU|iZ\:.Tq"U^+DSguB2 4G](]P>j2]3R9*^wZuߛ 083FTTår.p\/'\d8`JYQB,R[& _:(tz3jUW@%J]PMQ7OvR#܄3Pi_UhD)vJyCӊ!#x FOxҭ [׉Oɼzr(ekGX]vrۺ[`ZTh`I`Xycnnh -b}uNCpM'[ h<Ć-[FXHSi(,| R̤I?xM+tl6 b|7}7:N[x"R}GoC+v홪eTx|sD.MkE2Q$ڥhd<5V^8?K(JnY<~OFIW)gXZ@<^W_V>Ç/ 1ͳLh2Q^$µ̔2ט t_p3 &d>LWקuM3NLo܎o'߾A>* ?[ξYaO/滶k{(Qva@-=w H1Un4K;TI`ZOvJ[8ݤŢ$߿5\,RZ zQ,)]&ng`j4NT$1+:@\=YmhWP)e_Z"(as vjM!im&0ONn<\* p>?ϟ*aZ8"aoo~$H ~:H"Jv|߯E=O4 ˒3]zRJڂpBЕ*i;F Y_8g7,M%܏NmIIǪ~Pd%Xm+慪88_H[ C#0Dj;ְkM7XАk'߿gEUĤX:?cLqD#0S܆j@ZCӳ}F;L&9xfa^=+1@KӃUkLAP@5*ЃJZƩ %_IJ5)(Dp4qe䚘v^/JX^ H`FH!_&V::Ȁx1r5͵D"vQQK>MetH9̳kAC]num'8a\\_|*'ԍnS4ojMӼqZԨw"BSD@@7ZJmw$\_CDX(\8fi7/ԍ3ƶi/\x^݆׀ V d,Zt2QH8! 4 rծаh6py1SIqg64J>^e+w Zm^ B vSZ26lKy}].Ed;lN&뚰_!|S>}\H# ū~ecRn5Etr).-?gg{~#;p9 [Y,, daIPvpgҾsOr ϸF;u8$muo=^?~^t<9:=_?pT3Z,0Iۖ-&)$*㵐lxfT'(B{; s0`aEGshHp4W-ZƋ2K\4x‹+Qu -GgidQ ڰuH2"PF|a0p8ɟ6#ԀXMƺp2waCs=\_,J]{⸨)Sd;QpcX^y)#ql0"!<#@^1(@ ( @!GT;Sמ8PK^\'#P iHhK h$eY:y yt a+4".wIڭ!Ԡ3:MOJR;V` CQ g; j\ h T9j &^#;3i ^-Aa+R [au_@5/'MY5u(-Ji5y⬡SPOR0h4#q-xQmwׂ+ 9"UEG"R(Q1(1 R$*m$b>OVmNR$6^8OtS\)&*igȡJ'c,3h$hN@!"%E3Wu LIJ!h|DQ$7Ȑ<>416="erh]ܚYXܪ]$VLv*0,0 0.*)gHI1[0Z `M#ɧN2H?Ȋ'绪\Q;0VjwuPxa(Q)Ej$"I[2bl%j`M;;I4{m(FtӢkJy7*ݎwLy{RU v$c3T>v(6$ Z}uׯ r]D~!̅opk4j%|C*o+*{xӧa14 1mgMK^IC-hn58/{;& 9#t8\\!^\|Ev]'o'w~+F܃Ak+=kVTYMw^{{_:bRd;kOGνSާҌӥڻ{ѣss4ƿ=xT%F> +s1}M0Ķk[3tt,( ;6ra {x|Vxӈ\e&Ǧh%Ŷ*>w35 h .1m 9.Ġy3$Vs0 ~3(]4Dn}yy{چh8?eS4|N[gӑx^O翻WG` H}B8?Ӛ>r}v=:1\3E8|&qmN S*{ &uPFRZmHK<''>GJBS^P *$7N8ނ*P;#x(E)@0az k$V0%l.?7Hڤ#xzKQ&_}IJCu!8zμT{ :kd( KxgvB3^(˒AMa`u4Ƈ (bv4(Ba^Kj=x|  L+7g( 2䒍B!DQ;S2x! 7Z͐pM["BRKKPa͋A gmAQM:1~У<Ɵ1>{H8i93- lT߻{߬R::aY!Mnr\\dPÄD2y@T;>9eKDD}pT:g"8Hc=WG%Y "vO"̮ ^L]'jM I':Tzb:2Rx1ae6)fjVYXIp'q үWgկҫ'Z=(*7YyW]5q_|ΪVc_ݞ^KTƹzsA爪qɵOr}0ƭf}) {}fuvH'^ZYEX|.3~8fg/sLsRְHˢfMӵ6XƨZ%Akإt*ZBݺ>ݸTznǥ]б9W]jؔyp]2#vJSvZ>L_~y' )kXD5w{@7k *Ab 1EnpIFԨzIvoaWm*c풠5](vV\DR *ev͹B&5D|Z| 4'~ 36HVS3:4iR^EzIH[ 6V-{jR96'}C28rf{RrYO.I/ψԿͩ$3m>X$g8y;O^-:v|r9QZOczz~a֌ gVT?|fY}nL;Wۚq6x}COonI>O5((Ui^QȆGYYte-,q%ՆweoCQ^yFn4Gykm΁~\t *o)%vWF+qRlS9绩ʉ>}ryiH_PS*72vUƾ%nJEI&}DhCT6$+\)@8/IP9h$cY^8 TVmy * &yMx*a򠢭=.X'5__1@F_NfQd3w(ѰP)iPEޱL`c:/De@`u Nn\s_/LingO}?{Ʊ T!{=gc$F_-²({EV)qH"%A .]U]5#XZUh+ViX^Ccv_y>U_?/o'Ԉ4 K/V &d0ub6ԽLwWo`_( B\(/ژVy [ )uBP j|YO-*uVJ"7A' E_$ĸC3E~xm%( *M%t$r+%"rHтFV,"IT]#9= |k*B uڻ}&uz:y!Q&' gI ao#`M$ @R5塪~nײ2qxsڀE@>"Y ŵVS`4WVt UH]%{fm VQFy(@Ȝ!1&^k.: M5DP"FOJ&P~HGXgZ9~a߅fAg\v tEW汪H5lOĝBCsG9 9`M{5DqGK92\:P@ -8k<;݆al͛O_eeށLƷۆw|̓lK (jRL7[_5m on'ӫPZD_ս@í6= o9>L†H e8':~؏Yb?f%c]bcdFX8MRJ pkDͣM !*D^O:(t\Zk(%6h% heʧ9Sh!S9ΫUHG |v=5ݪqVn |gFd(B^YIJ6tH 5^c$6i"QEeH)|Dik5 Ghf#(lIZ]Oit:7~bX5Ll6,rU%0p%{"ɦSP. tHLPWk΄ 9>'P K{*'/3B  g![Vmb ć2^X">Z(nq9jd;t($+9իQH:qPHQMxڔPH$~ć26Q+CbY"D@+^y+KD;tR g*8 ܺ\70@8D&Hw'~J!7䎣*0&H:):*ЊY'PxnNL4EFDD6Bht(I?F!P0kЪl뮢D֒JćjAf7y{T;PKqwhq)ZC!D̹Sr1 S ؏p<]udZ!.ŲD8W= $QLr5V&\̊YX 4+,ZAQ-Rp 5#EŅ``$DDq´l()Q*1ѩi:uh")ݐvZ>mr rDPnV3HcdCbD(evwPی?C@!U"uZ-i-T/'.= WtoO yStq߃%Gs"0٫~ƫ5ՙ2J3C࿎Y%ç 00=q^?>`#Gd<'Tz(5\my&;ss,Y \ek_>+C$Cr%`oHDu&ڞ|``čH$ 7A qf1%R`5 ^`liq4An0FKH03. EI&!PKS!m ) ^q^%[5Je3m,d1!+zCLc1; idNP0FzN\ɜ%!06=k\@S\3Z 9Y7׸PFgM6 *iq_4fc,%nBc ` ]3&6[ߏZQ47{):' *x~)Xp+qctep &)zHb5[gJ*C *T iU?> mv]2uDdq#CJn P-.EO@6<\ lw)҄kʏmAH2qQ~F|?p7" HDgY|WQWG5"Zˡ[0 k_f1"޽<| Z C6]9Il#rH˭zE;BӃb߉LwcSyS8%.۝Llzp6ppf2 wf3 gf2(QeR&&F';gW1˱*JbT+ [A>1ѵ}%'8n^xPM:59xvE$Z3"vl^SYϮsOIlԪD޼nWL@ o·+V}zДdquZ>M|͸cs?~_)V3bjh!^i&^)˳WOۋ||pIU,Q"X4y)&EsAgW]5KJvF^PN2` I-;|շ FKO~<`kv:˶'=#Se|;0Ӆ0"pS|j6z~nݚr|jFF_S9zӳt<]ScRm98/?8弟ʨ骴eT7ծECZAe)@ T~?P I 7*PDT\JZ.o,u"HN\JABY˒ʬcA~OTV%TZTNlʜ>H♎ڂ`QȔ2xM$I1˴qjUrR Y~:dMuᅃ:S뱺sPG3{B“{;D5ɯgoרtaHo?U1ro^ANF,t;\MNvN xwpf&gl(|^}PET"?<#t'd =bW9o \i<{&@z#ZϤ'=55V"S=I?T򩺉kM֭6S.x?v0V<Ѷ֭ 'eL 2JJ8I+;'8tm9oa<|C_yʵ={u-s`\i.wJr4NpAC3d_'0Y@ wj{5NšƜ2KXyLRɺ0ۇgԨgW9ejD_zW^OlL`yD%7|aTNbzϦmeȔV]56˳GCAk)5*05!X"sBB&'ω5?\OgV9sv֔N.p30dJz3Xª.8v/4;nu>m\>IJ>p)vR#f@I F :DccFbM_2|^2@?*v}r5)♆h#+<2IH{^]j Sv9Nf?'pvS8glww٪ᬺu$e*uЌ~[q󧉽ʖ_KJ۔ LeO^6 rgqK9뇳#κt'z(p72wr6Hnh/8%]O1D.B$ $O\(-"q@r&kFI_!Tg+ե`lj``s RǍP"O.n8t>~V3'yvG}1> !mT `d ;v.QFC8&ݚ( &n; Ɔz3զ]p`I#P{ &̚wl^&FDzNFW0&p)F9ݝr⇧|=؝~*_ 9Ʉcӿ&?%ld [J͛g2k[8qI Wv~ff9yJ߮R {Gi淩 4jR`a}۵\;-Z.U%|*j_*4޽-N~KȄeTwi6Ea3Xg2PB7X)7H~+jaŊxZcz4vG.LOD t39Zɜwdv[[k TN7x%go(ڰ-58O(&'?NgmMnJ! =$,(жlxcf k5ڼQ.U-O9"7Uh◻0c_:s pFq<R{fTmBu#C^+[gȞ]Av ڠ=qu ζגU#kjڠW-`'n?!%8^^T]Z ˼敾ļR*$A-SR(5[^%ؼU%CAEoIRha2/@:LCD ؠ7Z V~-_RhK P;&^kܝgZbK!~i4kMҢJq,$[(pfH΀V h06Y2nBy嵠 CBi+B!3%^mMр(P"E潉>ye)W` 1UUv:ެZA9t]ֻ Buj%*w@ WDBptԠ%Q oIqgFl&Ri8ah".Fu]l8eLSTk dlJcώ 7^N6!DkTcZ3ZP;{?u&J+ҟg*MRVWNԚrIvN uDcNeJ9\6]~QB%U;qVL6Ux3 t_g̅EĝTW3V Tݕ!WaAYI-8"%bh9k7򋃏Uf,j%Kj6<5.䁦0ҩq}ᵠNd_0=۽kqItCkkv|Nz(N>-?xIg09`;vҟ?ˡf7ճMR0ܫ\B&QM,Gt6rqL3!ys2+/fk:ЛDHR,xH<)"/Cf!ݽ'ۯCM4n\G6Ԃ{|9fD2Sij'kR '<6B7QȅwSԯstDh pJֿ)k YH4=&@~O=]9v9j&G '8Oqng/Nd:/qZ'Ӻ Mf2cɕF h jCAvu֢*UclPpVTq" @0ƓϱVIA-d(3mj餼DZV?pp$zٷYH0*M ( *jҡu+a'X$(6z\ya!,H\J3 UH";hl=5 b4.[1Q;G^|pQnqaN=֙+[[qE3"KN\aTu0K01}3Mv<J1\(8]iP>uq"ŒHR>t>Dω{_17i̓W?Emz6tPpTדN0]՟YpѼ?NRN4H؞ i*H1hkyFKv[rF Ǐc|R+QhґSqJ)?qWxw+[ Cg;18"s a?'S U'_~7uq= j t <8NҌ5 ̷9O/:`4|%0|RNsf1>%Uw nEƙv΋x i&r+ O76m Xz7.2P2} MJ{ԠM/}e3hJ7F&-D,1^Z]޶LwӠYY8U)^ 2Vo*|b*q%F'6k+* |#!ESMޝR&sCR. hUtDTp_\ߔ$y`!1Yqd;<2fofSc]8ZmIv61hn 麅n]1Řv71TV ԫ/cV{?|Hw1-\8rK^3wEY9? :D~Z-_kK.*v4[3CCj-2f]Q P)A+픫D* .կ# wz{s c f*1ܖum_PEB[&F{gQ7.uf hqCrP=E>Zaoܙ_f|wGW)nğ+O Zgx.7CtGC^@-K M *i,hNX8Y#ccB)6 d=t瓮|uO']Yi ,y"JQFEJg:QQCqu*u{bp(yrQf[\JtnQ |GYdeQKVbI^)Xr_S2X)D40 6p|Ò8H *)>\P(aSq2#-Bd섡aT a_$E4JBȘr6xB`tDyL^2P^us 04KŒ.%(X~Ss4Ɉ ?)Ĵ&)ItR넢B·$R _ȁd3$D42%h9If|r x4 Ri3)|[A F%1*nSPv!9uE!H,I9E9]^~lp\ש!9Ğeuu*Rb,wmfµk.:|mfzP9 +8Bkޠlэs8>FbI7 $TF3]H^@h0LjD—D4­b Np>j#Q.ɒ 6Q? BdAia(-丯C͔"l>N8 Pi!y,č,r n'HWQ vsB <"d X+xX!Ƅ",՝PLVNoתqLW Sm-H9Wr͢EV;=U9y'a3 鿑k0i `H2buHR;)S@"X&@k &_@ hv#ATtQR.8*[J&I.LL1(I`F=P^`n/E t[>6KMdM/$h υ6#cpfp'Vwv6^q]&ǂJVla~uӓoOD4~"ګ*.촦?2t#6\͓F97?n ÃQ/!j5%{wV fJO7m_mR{Y!uۻ@n UJn~v@A7\~_o?8?içlZ΅(5nnfg'_ݿOe?{u-;P ga [xOQЬn$lBc٣Rϗ>~o;1KfpvDB"Ψ[uޯzPE')r2J|>rs:SO.o0C,S:SׅzFېfR\=^/=31|+dT"?'mXk}>P z"4rmxP`ׇ+5 lZE:H;JTEiEbJhѺ(R Pu&TiUIʌ\3?B1Be˛[ ݵ}JfTIz VkLJZNPBkǠcѵm6qC`S:nТ.ACGvz!ωn l%7eZ|ƣnZ0_Wn_Łyљ~}n9%Nlaeyڧm4z gٲr`d g+mݱ}QExQl`><96X5[;B!ztDmf]l6&G?ނ2T%YЧp۷wn? 6jC-v(NKY&D3Q'dh[^|pQnDx4Keͻ9T6t'c> Tn&fh~V+{`5cGOsǼ6.RGgio&%w’hsUL0؂>Wcw$rTZe7wK~ ǨyeP)8!j^QmD}Լ(EcSBAFL]'j1%N9a0E L3{v°+3upne, B^`]@4/0u! jPqL]'v ymR/K{v !D{.CzF_ě%q1;,9 &G$ <%F*26&0CY[rկwQ@fss"6>| :ߞ&{y5ϧ"O|:cϧ_/y Og忹|..se)k2A9q7Oa'߮zT;_h#䄴' TZGAqW?ˈͬfJB)ŀjEUI^{9R ͸eOYOpۭ2>Qnq6ʋMD+L@3bj%Rs,bw훯(Z1HNĮW^xӘ!Q J8Sȷlk%kWp ŭ@D /?KUwJݝ*kzwI M&'7i 3ҘvBmp _~!^}V (}{UPcٻ&qcWTrNCWaڪVdk %K$V`4HhoyE4k4ݍu=,=\ 0{ D%" 2q ˒G;P\qQ9~A6^#4xh8PfïP 5Vy"@Mk22.N^ Dx+z/pCNI5K˩QӜKslsdy_l-rj 3{oY@}Me\ھ s컧_30%J;zsh(`詣>):KXc1ԅ ILC.)ŔapM5/vvW⤣1:ˏI*Fx*S@vrTϞ ʘu.0?@C4J֊̕fZb* ' > 8WYƨ9˟éyI[:JEXP 8VSdߐAZKseƈ2;--B"@ Ֆ};[;unbqq.e:C\Ѵ#ě$A^zy:Ksndh`P%2 LN5@rM.'d8 b5uĥsY ~vフw_~_2q~;//JL,qv7m&b1o˳{(~d'eo )ㅭI0Y>Z|>7&igMš?&^媉ZCr"#S/cn<1h,Qk[EO4T5!!g.Q2uB!by":cnBiu/_.$~C]Zǫ[uW۝35~^Ӆu;,S)Z΋ jT8cn%<ߪl_>Ym>TJ "B5'$ULRfd8S)ȻD]4Y۸F40h:B $6 IXc Q+-w0:p!{R c B^!(+!,a-;[E?Q'TWXhz@^S(5jny:Jc5Wҹy!$b!5V~' bWOO"IϿ(aJJPz|o;nGlzܼXd)ė% JcLp77֠4EJ/*^ᰳB c[FV̹|qoս/#@}9|f:uޥ.,Q3^ͧ"*||ज'oڟEQtFrۼ}Z_fN|eN2ܽ_, [:/ "CȦ t$6 k T"1ue@Xe BV qs{Ҵ"!r'*h.O ,@+WГ~05S^Ấq^y %c!p =~z1AZ10DXgǬpT8"0b%`X1: o @7 nę}~5%[c+p ǿ}*J;GgYyO#xƙngd`#ٗ s<@q! zpHrkEC@qhaz !8۔ A:1* l9!)1‚޴1I),zx!Og}kV|$/ '@腸\t2l'~fսLJI6t"'LU'iBPiIe~}Z-o]\~V4\}$2 y[&~3m; }аؘuD|EZb~KJ3_&;Φ:Y|Qԓgq )\)`F7@R ھb5E0)ƝD *Z3^K?-N]zuV̶ wp|QK}31 <_[0))U$gکN~YIb{j^ڼW6_wi%47@!U"\ hhS)}B)LsJdw7Ołx vq>]OFY*{Obz;Tńbd8w&)F_bx~.o&,z:[nD lI? )D}64T _;X% b_ҹϝ ߸j^sذ#P@Br͓ `.\kAuشmZ@~p c a_aU郐yax"AR%RP]$4O@f2J4ix2P]8o)cAUqbN lkrH!o{SCKuX/}/{6o бB<(_=  @om=XP0sȠCPi&i$ࢯ66";좶QO -QTvեٿ' f@þ];T'(m{T%zp^2½7iW.-ʥ_+?-؏a4Sc+.@&3Rβ @"h%ep>vx};!Cz$k-WZ˕r宵\Z|sQ.*ly#eBg0Z)殢&0j B#v(<\>~x!iT_#e27=|!8Ѻ\arW+wʿL/ s (s,rd c1sϨ p9P^pQЯߓ2U' è2cX`&EUGz ł~ty8w=hYRq_IMk! MS ^kkGßޝO8Y  doS lc!\g} 9k V "Q-/p~ŀOyM0[MM(f{y{ܢq\$JKa z 5켭)N1?^~t!K+U@"%kÍ訝Z8 lw蝭 M[IcJp {NgL, Pcݙ'#0.8F}iiŽMa|c1 6B ~׹.)Uɳ/[}"{<[=kQcTQ_b뻄뉽Jre_;~Ç "e'+,WJN "\{vVsW9΍*͵AgZ`F(KG2]sЮWs\%Fh9`""00' Lʨ4ʌ$Z> pQ\(k}?g!eTS׀gG:lx;;8 j&[_DӝG+?cr )xTO2_]G2TSqky3yu<\d2eijePk 2@)6Xg&qY⮡/ *2uftNUsgLw2#[ks$Yƅ v&b`"KU?hʌ)& "%kY-y0WT__lQ”nq]f9T!ϵeH7V ±5d@1k$ir &ZQ•g*WUD"@" "qF7(њH% H9&ՁPJrPZ+vK8nR'SjWMbNfґ&m06isTe&=?p<|drt)ERZ[8nRLW5T$r F,[ [y\uFa;jhc60?n׿91Ȼ Gn~]/Ձ-bWl_)-%4mu(gP&H` LID,zE"wϏΎ/9/+|xWw7q-#>vP|#Q=' 3:s zøizvt[(L.X5r(8?{[){}=5Ff!8C1'[)tn5:1nQ~a4h6|vyd2sWVAGwS;fq0?Gr"]=,lvOgӰ}ֵOúiX4K]ah,uڱH݈:>.߸h<Pzk z }ZCi['݆G[qdh{PNVl5 Z'O05qW&3̸E]UOax B; tQx0<x.]>^xa ( B&NJy\xGCضQLGFQ໶_ ?7|fHU8Ə`u̫jF*Wba=lsdL^ݸ u R{ZP''% ftיH=9A/У$[V]^"-UR*a!r[KA`!wVT!sZkM17|e nNe*'I*@2l+@Jpw#ZB[cNܝ쏺 Mk10"gM6UW㘁 2UA|`zs/{2Jr܊vēY $w5yNM|gJ;ܭ"S*^'y#e}v+'-_}xo?nƾNo Ԃ4 (շ؏_'B "~`!TL;(:vqg&C>7<4}HL>I >0vO'wf0̋=/a1=V2~,JBr?M##h6͵i)m͆kgR䙏݂k\` "ez1't:dfw8R7a*R*[w ͑jS%T;n8E^Z7gȟ0J.G(skQrw2Pzr<%"z1s$Ǎ^;EQܼdѼej@ ;Z|h8{&gbk Tf:5!"ie὞nF(X?&v w&`$ ϐDOa?? Bu0 t!$CD !2Te`lVdi[~C]AiUxK5t=ke1ejP`Z/U Vty{KC06XBW?ƛp5u LulZO'<+f98烫ǛѸі!};(>|wz(%Ҫ CǷ ,k.P(ftv+ϿoFR1и 1%CX*?:S嗷͖DU퀻5ZRhwW>C>T!"9`N4C-x6|Gx 'UT_D )O!p BhhlJL5*Q㡐望hWxR%7~}v/T JDƐA|dkB*ץ 5:uSy~Fxo1kQwZMƃ8Oa; WōOA&ӸZ N;;@cILʸfLܝ /b\o?j t+e,N:LK>ޖiS%+܋>zSA>z oljO͟zr` RT*;B5RmA+P#/ـ5h"{*RyQxE=j1 (ʨ[Jc"&Y{Reז4B 3hO@LVP>O@/^Ⱥ<.d:vƫ!Q5ԖY<-tH*z&DY0+ C"CL)S`Q[RkdZUp{W4m eDIi%tƠ?iڹ4 um?o"JMV3f#ܭzJX"@;z M8{AnǗ% "x*u?}$x [n @A ) qhZ.$%EJOE'>Hd1EkMF r<{H]ˊ±pO~|nB;>gfc%'8*ɸξ[5/0Vy4h0*cθuN7޲J Roq^d,i  SV"i7eoW.&*@ηnujqf.D'YHd( 9Whe!Iz%`/:;k[Rz8[!b%M C"7d.Ym]$1C3>U܁!g-ik:9dϞ6|x2UiNឨ}-NTgVé iXwT\7B9 JBC@m}9GT#l>N^& .:h-)t'ܰ&j#ntuSRVHdoI0ʸcY0{q|y׸~7`_ߺo:pA+fSc*h]lQ2%%K4u-np*Hw|D/azC]VxW*e|KtֶE1ޞG[6ø]Mf.gu ukKbWc\P%7g'Go>ɛ{s7ebϛ7vkjg aT*@j#т Mtisa<Aua<da%Pɬ /O> -L^0uBÎ͓Ϥ6y_>o8{^9=>!&{{^I,75{=n q` e[^XhA9oT;'"7H¿q;9eC( ZE\T;Nq'`wC}C8CQJW[N#\"^^C~-p[S'"7 G @ W&REB9Di'̖o֚i%!'x]ǾJ%Ke7g~t dB^dҐ99#)Rzzޫj7o{TV(o7Pi 952D1xLMD $?xIȵ\%+=V\q*}r46ദӣQ'G`7/J*.kqvssVHEמTDyz$@H3 m$ÅH"JT n#`-”Nb+0.nig#M1Eu tpqNA4b̢.RTɺs'beADt =/>|I2[3j .Jjѕ1,IQp I1Rq# vk5_e26qזhqiIT_??T8ly== SznO?|v,QB臿޿#52Zoq-gŋ#daLVۣQO7wvK.9Հ?珿=8+8LcҌF8ӛ/ӳ7Lŕ-O PhturbߧѸ3}Xd$Hv4/}wK%bP>7YDcJAr:YW͇} k.@Jk'hSrWW7ST#E8>;\xP70*^Ŧƀ˚k2ߩ0K#~h78 ܊m;[Xgypfʽ,0V9oFGM05D6 (c4UJnۅWcug䵃v]f/O_"httLo[;]ZߗfÅ1MFiv͍-:ut{,{//ǣ緧OM_->ք!I蚡6|%e˟xTfTsU$V靺{_i5$+{\=@) s j퟿1NB~g@{fK2lfSݙ!d_].Gc?ڛ3oa6NgT{)kfcN1~;EANz9F#ɓ;YÝ̻?w'i+$̏׭W XVФ?Β~wg_kꐿta/\\o"ɠe ` 2RVS&II J'k:S(Q?r(kE/n`4,鿦j,b3+Mi?HAY`5\55zwcv4Hʺ룧JNF%jK@ BtR SC@׃Dx8+eIR\Wh\252r4:#9I$*Ji49s'1& \:;yK+P}BOw13h7(/yC2y iۂyV@A3sZ@]I^|74slF&lO#؀(l5V3(&l\]Bɒt  SV ҿDE뻬T%h[N-)S P$R Mд)(Ό8S,$FIpr{w wr Pgo@)XB-iAAD JK ^D2hibLҡNM{m* dG,^HycNU Sġ,=HMޑH`•4zb}G"MM4QRT\(`m Cʐ2Wgְ$2R*XPr(Ie-n'<-jrŴF!k(OƗU[p_RaO,s`[6j͎VB *S2^Qp\ T J#AdUQ#%wpTkAȓad62+K,qt3BhcWezJ"j{(K9 NHTFRz#m t#JgE;Ϊ/O.tt5^;D:kt@-gd/P"]䅏Fi76-ϟ[uXI}yڢ_@[봗{2DKE.ضKʂ^C&Z3#sLo]@2)x(ؗj Ղ\JS]eڻ3cj/[EwٰV*DzˋAJ5_?:0sn(Π^vmVyM[YJBzYir^UH/{qF%:lIz +z|w? n.=hر.<wu5FˢFYek0$7zP =Vsd- gьv3`WřkPȉ0CVg Ph\nfgz8_!zYY`kK 2RHIS=$!9g8bؖU=m B'VAt՝nY `]sW\nR4}"w߰m)YwyBjnJUaTյEgRg{l+L{|U$ #i0<|le^p6Tj2>Wofry;/m%.yۻw^RL:0[nuq"Hf2rA*F8"t,Q 5L8X UP ȉun:rq`LĤ;K>2( 36Ȼ`M{(w L*#Xy2V9.Q!+\ jhXH#HL(^ӸK3Z荃5I7)jwLz(@G(%Aǟ!PX\[EԠ t%*`O.k|80x~{0~kH3 Em̒#[{6<\T=L4grX_PCl#]l9/ṈXaJwu j~(FӒt1ޮ,XR,%nx.t}: =<%Ӛ]oS~s'wQ̟oHLPݥfXm2\sn= NKTNĩd"2(dLNJicWn{^Sy,5 n]ظZ6H)@HE2kMidQ!cI+h@|LpׂJϕ7P iQt I_#R F/8TBSĐ4CCAHӔm6y1#4W*]V*2mf坭7HQ!+jQ!%iq j(/UM||&c#ƍlZkNJ5*He XPhVI ('WP $֘UE SV9\Sk!WvzJ-bd*J9"zGi-jG^(ҡ HƞeʐgrWsv[8}/4]aijܻFw(jtBb^wLinKU8R3O:jkU%3HC`MǻzPpwkMyBEY]v !ɾD-Mfoi"Xv|)Wii5ÝA〉ӖV&UA5W(sO1dɞՈdaY FC\$ڟV8ԯRK{0o 2_퐥kt^ѩvvN^yqlzaЮqĚD_j:W? m'p-1"Uə rBu/"UBc{Zp t\ý :m$8H3eB:ʹ,*k)tY|YΪhN@ ]N;%Cj'MUTRnk"8}0-mu1in+)ƈƶ@ c6)oKǕ2}GpLS7.tL!@)*;bܢ*:eП 6fUD;?T_(ƖaahPܓo% P%} '.K"y(~Bz**z{⇈F?I}N0A-QG[2R qOTϢbxOV' wN| Dm; jr;m!yaY|r>s`(n[pXk*z}  NZ;/ˑ87,${'55VNs (S?_ *]Ae)1pFBCiX6^ /^ΛLX.p9o{bLR!mT&*{q@&OZDz)([B$芔|IP /Oɷ8iN'-:\OZ|K+go=7(p)ysLp'QH(G<`% ʔ|MjD״ S, 7{qW݄1pVypB2\2MXH Wg`Ǖ;M0Srbdj k9Q&ܴF&ܴMU]:ƙugҪ/aK|s(FBe4I "K^ 5tmB;Qc*`,~?5LfAELщ+)m0u? ^sO!M/ņ:çOk1<$N>CS<*K{F<*($#&lr{5'Rh4"c)8PʢL `eX @^i^*蛡jV<_L\"D?P! $Kqa֑("{&J!"4.^CWD_ !8ިr!s 42VVq-&EZFIp;cKLRkiȣpJJ| EF!QAXc 5suIO^?m,r>:ݟװVK3ham~2/=OS^ P|ŷh.Q:ϟ ~X|>,U!>XR)PiѸҍ, M1[b>c>i4 q?DgcYPX60aG>p, 2p`LI2&Ud|YwqaS$XP; ǠBh<QI%mh$E`bEqhWQD*6j#Q~RQv< ,#}<&!/OzZX)G%G}-vr=;"4JcKw}*]~**JaP4*_Uµ]- F6KcmVg%casRVv|ZWEl~ZKj [ NJxohK^o _K_|fpOa2kCp3uxū3bĸtt3)Ae'(@)hit"WuU^S =k@s˛4sM-\C^BQK2kxڢk+OX t w-[mί(mt&C*Tm:>ιڼ~ࠝ· ?ǯO4Dak(b]&&]19 Up&fN= .X[E(tJ(ف ,7z\oIId@BQ$#: )V{Qy甴!0#!jwR`J|X^\ۢgE@Tbԉsޢeǖ%Q-my̐3,ʕ P ٖgl#U tR3b p+"VP*Eh/I !98JJ+W(5̤Px#k1*S ',1Yc)lEǂq= f$SDx"K2903Zd/IV(~ ˥0F.#*WF߈{$YεU E/,g=+dZ^}tZ[NDz½L!K(S*(F`Jَ*͋?vru9C+I.Vgȭ \Dmm+:bLĞ#}f.Ϗތf\#l}|f^G;+V@`LXjq8(ƘXLƌ‰B%pp42K?8 g=[ڟS]^;4-EΠ; veKҴ|ᝰE5=jJ+9O--[\(}ˇJJq6;L **٥W ?@[ 3.Zoo4/-S}hGi !4C;"eb0FbKEE HvJ0 k6f:]9l+DK0Yw/'ϳ_UW 1` ~f015c6*Ҍ`/S(#] M2alU(< F *;&ۛj'V"!YGp RJ1F̂Ld$uZ^͇}>Mp\J`qk3!yjZ;FR4O0 ySIYSr`b9ID3P( iŏgxB95IC =l*+戊8c.`>I`gٟvZnd ϐ8_30\2z'$P…֨u;5( E%g7p%#7CJHһ5O`D : 8VƘ4B` Į>"-uJޞ|}u LtT^x;rJxSrB~ihdKÛ{ۇx6>n15_RxFw 您 ;"j_BI ( ?]#{GBNWÄc^:v?NjVD3li]f)ūKo :5ny}?¿[4Y / #I ̂$[>.C>.d8NRHJ#f.\Zjgz0+Gqi(:Y?)sN-vp7&nknsaMv fs0{m.bD#k ֚oU`m̓_)P>BX@&O] CKEzSAŰVʄݪLxR[8,sDZgDi"0,X'6z)! 5Ғ8olk=T e[J}x_ (0rFyMuNYD;V1Rιz{qǁV`iyN m[Gbk휷2tMDuxX*}8t\EMlz(8_rQ|QEzzB=&-Υ uiT 0QX6"p*&ǐ8Z&QVQ8Ze8 -:t?:>icp{nRq5`&Cn m~2,:{ΆFL$R<2(g KK$,qNdf ;62ed6Ա40ƤNyeF`:K sp&&iG# EAU# ?{o&=_0Vbr.&iX$ !XiNAW[q,R(E3jk*2 3e,rijl841i9EAIHFC{5% J i,1b-BsM67K`9ƀPlv/(jI]\[>#E//PآMl˄MHE\vd ܚrnSRJxymaBXIwj%ِCK'"VL2)SsbE` ramPX\g. 8)X<|%rwWZ(؝ϻc+`1VDxLRoǠ(RZeFJHF.Pa%8|%]y>ƪD X!KM9|J)L7Db *(42V7 w35 `/x X@6GKMXnkuQ>|&"# t'_@]Ώu,f+O-?vG>}j9=͟D?O^AD$' F~_|2-L_jB$gmO;#3}~gg|pl@ HPۃ%k  <}iC4|)b2 Rlx6ݦNFM(|d\?+P9!_S`9?A*Fp*pg->XZR51C)5*H ~&V@G^0\ @bҵt2&wU\_7sJqQݿN""HB؞Dk"ӧ.ےOPaz'<)?8 UnIn!uRֺ~2K/|_l.^t l\컍e.im%UZ .l6(Yaʼn51Dv3߄XQ"sںxq6{qo5cdv%sJݙKsj!cĥ: ,Qhc٢i/?fHd`}ʜyIͫIxFo,%Af$q%4E ")}eq/RGyi.4qQ3_).u.BJ`6E͖|ߒ2xP;6j_~RN`3QK~ͤ]– ;0)z!=ʛD)wݱSME,.Qw4T+S 6[~22i`ьL"Evi.UV2"wj;|cM2+r0j'/h W G+ I`R6Ê:hݪ:O&ωl 0G+.?s-0m74!r|y[S?'Hlb¼?xXss_p?~cЪ27s} __vU D {&Lc[ۚVw}Qtu5 8+5 xfZ|Vrm\Gs;/6x@Y@IӚ?.D#*;7f=`SA٘_#8f[eENR"ԝ8&|}'//>8pB3 ӛūoB7wG ^`Q滔rAK$ΰv /9~MB_b[jt=jr z'FdXJT`3 Sk0ZUPԈ3{卖>q\90Ȥ&g{cL0%&F@7}aū3$v3\HxwРQMV}v>ưajV M'{zj-$%ҡ%:z0H ThU%VW|(+#&juD4&!g`邏'bG+}R`rYy"-aPpWUH?O$yaK.X. \]W'o wYr8Sp$R};l_N}pw_tO8R&AL 1Tf2"Y%e``:Myx>UnџP=/ξVOw Cl ~0yY="Cڊ`"c 5DIq R#n-q2}),8J]*h)0Hf܌ lS*Ք8Ж[EŐz(" Chp`%L;*pΨu[c`V9iNPAnu!L`&ւrf@s&t7̀ǚeLK'׈f1u'ȩBM֊w d`QhtX:1Mr޵q$EЧ ]boG~觭,(IIg3Z$$[3տGuO6'4xhv䓠L1fFD~ѝfYC x9A"NSyt<Ơm *8BI"Fjd J[g:֖ͻ V'h]hה>Aˀ%oIXpKhR VML)6TP*pzjj9QNC: d,[CSFn>qC\aTghܢ,Dd E+h1mvXRB΂qCQ$| ~ "TjBDIa>9 g41Fzp  ;*qؚCT+cmcQ8]5@@v>t+RP*h KJg‘4C]fcM%-ܟjA^1M,#F$=\!ޕqS/_ģ'3G;J#hVTĠG Ȥ*q$ɣ!bͥxӞgsT*ƦV9mdJ/FZ7hVjeV V3X[ ЉU~Ɔ"M3;LSm$_7[V yn̐Wzt;2S&&SM9b|;zGp=p.Q5ю1H).dȜ;Ǧ$@&ߢTPҒ\k߾wntnFBBsyN=p`)Lt(vDC^Wo&^Ko8 NEM5A:E_KmvY.(X"]s|?1$8_&&l.&{qn}<ל_u޿ֵ"$̸ۧ!l&3%: DBY79|`c@~&WXVE.R- J ˅ rd%͒q=Fro8\sĀ/9ђqD1Ds'X4&!R,Aj)D/If!Y.__)~i!'ꋵtI33v*7:T]Kc#mܫa?xi6HG{M $ u>Ox*-y0ZXQ&<$Vȸbc&??v8;w[&veb%uT 031:dbfb.B%_^m&M34DL;.pP#Bm -"zF"@v0ٸz$iIpulLa&xܻLIyt,rA^'ב `ŀtbBWZq#Z|sз4ǨDV,12#4+Xd5wR(BDS"TproS-)<GC4*ڊ_/4Q(i QLbv|SV)bpn]U ]IC*kǬnbc t@u"ȒŹw"qn6".<rw>v:6ƅ+wvMvՏ-d-azR3VC쾃ꜫ˓˫\so{OįaPzhYaJGݯ/~LfWݯ8>4a뵲lعFe\sd+/w ;hyQ2M#,zn06 W3 %Cb0ߑF {7 !Mt#/uL*>5ZsO5ԀVHҠ\iIDdm AS)EŨ!J5n<Ⰺ7?W_}P-8hNXjxi~5 (`#!޺p؃q51TE2ԉPw!MӟjAi ,zPw.rSU6FR йfkNdZV 9/eiNlzs: 1ѷxsW?g.΍"qtWWcf_ޮv<+^燇x b2t)[϶y >[sNTaGD$UP}NQUxk45ϑ)^'.];6!K~7;|o׷qѧQ8#gT׆Sjs%gs{q'[#2pY7Q X8p/w)ڼ#>|2;il P-l^^Ld%@cʐZ{"荒_wYUg])TfMWVPMwL0u3uw迼;F.Wwrqafnjޏz.ޢmV?20J;鯴VGn2u# 9*@߆j>7ƍM";Oe(;}G˳B;'cɃ"QMኒF~YMgǰ'rq tW&2!Aj <Z-@Ȉ .D䄌 #Ao$"L@#@& (JyܴjWP|#=2xɰro99rq 61PA^!ܱ2EU皫IZOF+,yʩh .!oI.z#@& VCV8+y 'f&Jq 2LcHL7K$"Q3H(;#A(,P\ppEebiInfcŠS;;5y8HX^zlEJOnڈih: Dݴ,\- SUN\K3*jԲŹDlZ JD]!?ݪ'Z!!\DdJ~ڭtDo%fNT#y*SR&lЂg:2,RqaAPSrReM7VT3NlZxmw OC`ߥʂ+UR]&nC|'f|] פ:jw j؃t^H:?C p|wTkFK #.|R*ΨQ<Җ6% xs\1IuDz:9 M -1'<h862Og69٪ }d6 Z =)7:shd-Zi֒P3oylo "1@93rd%R&jzJĠc,3UE60f%ldqA&51H 6<꾞T=ATt1Q ^IE dd\ DNT o .f#Ax!6^X5FЭ5@&P#!FC(( ,xt>jxD[ (2DHN7XMZ2UOeDer}|PE6(Nj:K{D[_ * GuD3 dC^?{WƑ YZŀ6q0jSBR^f~O5)]\NgLQݧԩSUg!`8¥3 *2àX GC 6Na%`}y:P,uĆQ-r4D/.[!n3V+Pt\0Rj-5` GI"M) NX,e)sGKd$P!5gd,MCUHp9˜8@m:SրJ!udZur#kD|3v"U:1( ғ@=8RRKLn6NDN\ P[8W[ nt,Q9a lH(l5.HP$7#XF0@Ý!fX%(v[ Mξ#XEpāրAiEI=XD=DF24D!RLj`IEаu!p2 O`|X v^ Ij RC_7W; w(E`X ́=,lkJ/`9`Y!j)-(c~TNC}F6lFd!7= 3Mּ_XXc[u&?R9f5NbW0DUK1/(k\VXJGJqj! p+7Fq;;縆<;ꗫS:QI^Fa{aإN;}@=Jn>Khd˚ H0tOS_hN`CM# POČfP4;걹U 0rcDꁈDTLrj;; (at\`oVJRV9*yޟ꧱z;rBMT(VUOo.kSFOrcT Pmj՛e. rzIfCO3bOuǭy+݋k!7e(1pE;(\3FFӜ뤜쥜|3 %RJ.7bP,+ m~-ix#^QaGHu6a¹`W,6# D$n:'{ÔF6"'"2";!9f 1'YY h}{B>R-oK) bSef֤֕rAhRp6qZw|f.4O:GY7^8uŠ侣u;'2JfjZ.4O+:EjI>=Q*%:RR jVB\-tS6ƭ_fn f": xT()駯ZH`di˗Mv:UVE=,õy07 u0H铥%Vbh 1D$Çp]v$?x[f:]}5 P%)'d1Rًcd@?&:ȢCf@씎m _gf۲VlEȞġ(K'ߊуeQ!(i?uJ:Yu4!IAlo%ɷ* |"=dܧp-uax*Uˆ4y,މ1C"f*c.aOJs3pyMD>Q*>z3]}ߘK\o u2¯|0nucF\XhHXD S hw+U\kQr'!4>(]퍿^}27MUjX (炜")Y?5kcvm_}zԗK88?S ۲z^* )HLȚQ[ RPyho|_ XHYI >p*5T$J)p;'s ,i&B}tkn-Xz$U|B!f`h"RИz3r>Oږesw"J4<ǥPJ+׾|oZ>EZG?_]'?wQ \Tתg֫tmٖtP;@Yowf:aQv4'7ɖ`iAo.N D63u.`PE\g/$BNMDI]pp I~I؎V(K"Y1moI4WbOqOfE&b1[;r$'WsfM{j4 f-=}59Z=;i# O5\JexvT|n|kɘ%Q3\hU! qOEwGQ^9׬DjZ\7< vd,Q섙7Dl`ԑWu"SXIKXj|JI <Рԕ2-ݪ^Lw -, "Dc'UOpqc[ c9" {X˥b8a0Ÿu 9-/? ީE` }b+]vעcX}/?b,EDoulW7m84WR0QL@#0Rko͐T ;R\\`Д38>Xȥ+l>~%?0 |eA%)#[],)R-Ւ^$TT܅RQT#ڦ䊓|z [I؂œvrBSNaVIzX=D`2BNԕz4]+WlOQL0yh(lP䱱JñF20HZŨ N5[$sh=ĝ R"$CXNkt8h R2YeR}6AX`E05ɖ=gkm0Ursݼc%5+N.8sNHj夤2AW"%{CwE-}]APw]=h.x;rTJNBoS;>S隝 z ؋m A]׷W&9U'Gs}/GU9q1x3=|g*}0?/fx bT+7m,.}i-1=$a,;&vPu(DI廎־zd;W+~o oA;;t|Xt<Ⱦ{<ƾt1'^2|rፙ}~AlCs/u8讳Op΢guc0Rs{$mM P(~`!!Ḑ2 -#EGQ{D8DYt!xGa5}Qg숁OjG(FӉbxm  R mQF m'{ e1J0$KGڨ]6_byKBkx1]6YʾtdEDCAUrɂ@CAԍmŹ&}9cɄׂk#;Ȃb?N 쩗 [tmD-4r{7<).~_][7p!LJutnQ۹T$ >V| `Zy- K7<^R; -!uҒĕ;t;WRjt#_>"s4ʅEEɍ'T%ɇg?}FEՃ[k6~l5ǗK,=܏ôܱhNliֽdF;r}Y;z%3nyL[(ZX 56?!ceU\4[FC*SR#n4`r1:hb\{n5['W:ňO$5%\rJtmtO>Fm4xAv:T86卻żc!GFDZWm$~Uh铥L+'nz^ylN.GwT-:6bC d@Vr@[F)|$3#fE2Ϯ[mn!(`J+Ċ1GE2_ԛ2j I u{"^ly]6ujfgݩu Ȫn'{'k–fwό&HE0tnl;oo|o7r{c)uuZdڏI1T%AO X<^{ Rܦdg]Ys[7+,͝le*U7q2yIJ֌Go$P$El8 $]o^$8jI`7a+z G:tҒ➖ۜ>ؾ !(w2T+>*mJVm)h|QN%9|40\6>:F |ט87D`*;vsDSVvU~#JY2*VG 4Bʎ*D]#ə y}3êU*qq㍧dajA^ E_Lތ6W6 U>늸!9,SiGR莝.gbL=e{w~5/1.tAlbT)]/W|}/Vdkghb7r<}E?\|@ab]:`-2KTKhDX8ƚڶ+(hIjhD)j/NsY~Dkۊ0|h*D:?n0*%a7d#L5PApF^1a-=RF;O@u``nJݳ:XO\n'bZtc) &o轲?61Ïw_[ TWРuvq<*=u}TPwA@n?^~vi(*22Wl }!xia,kHtL$]\ޜS7[`Jjy@2+SWViP#D7(JV')"Dڞb#mKM)yK?\,^ٹhd|P4rh 0iC0ޕ1CLЬws~*/3΂D NAvC1c _W1"71jȥ Em07OU{G -EV@{vhZ6|G;wr͡*75Ʊ)COBx*x^jd|G6֜fgT8Ew>OT}8XfgJ$CJlۮS2Yr/~始[v-w7j~XD9=ŋ7Λ~1 #>r2?N-NuuG q:xF?c`}*wK([K™PӼ`YnYx7o^0;(G%< yv_5(r$RZ.Q\q,)4Q#JJ$vyCxj ,mAXzɢ@-4J]@{ǔf:3'9p%0u m4Ӛ!ʜtaabHpfrJg4]'{ p@NDAÅOQb,( SNzl,p L2՗7Д5XKPa)в*8ZKY(HF &"Xkz2+;d,ɬNҵ 4e#8 dC;2wwozgSrgǎǎc׳7N_pz͚"b4>Yx} f|{vvzD f&ɿsfo^(\.<:oIoO_=e8%wCw0}) b\#K"ԱZV%9F±&@9 :!C"`ci  csn'^Ʊ$ie V,cQ{6 619kY ,፰H: ,\6GyUPޠh7j0&.QHXl6r8y)*mV$Uγt}7%UtI['INVI՜oa턕c:QP ߧ*cqxͶ+p1jHXAdQ4S)RhFh= 'ǖ̽OV{ _zC]ّyfr[`|݀w57(<ݿbnFF0 )nmR Ot\ZkGHUwTkBE/[CUcz${)om]'Gzw都aq~ȮZ/d|n !S-+< 1SȅevB[Oȵ%`$y kvSDhzXpIK9\OG"*6_h);\("bM ZO~&x&Rېe?KеZqͳղkhVC!|"f Xs7IVU:h:Q :摰tÇɫ]|lj~vg~rq=D~磗#n߼ϩ}PƼi@/%xpp~)cdi_g'>M:+OQHsyLmg^}9 _s1w>Py=v$3*hU&ǡj1(6h7S{n-nmHg.52vzYn[-%SF6p]k#ڭ EFp^ n[+ڍPLƠj1(6hǁS*{n-nmHg.;dюvsCOki5p+;nf#kO.懽FU DSˮ?ZFZgfIw@x aB; %e+ZUw+SkҺ,|%G7U?e#܊IqbCd KM]a⊩,ɜvHQg ՇOX<8I?gQ`n޿:꣹~ht((Wwd)jGX0WsG(n(Rް@'9ք\"qT#({@#UJT;Ȋy<:E 1H.&i5%3)CfoP@ #vJ~BYj8+HaEڑHZQ" qvT x-D*a@*A8nGTt.G[S`, V*J>TWS@UfV5sjfucSm-2PCS "T)X'zA0+DMP̠HzFi92-Xɴ<ִ;5b-Uũ~&X4 &J[ɄA4a L5db;rͥe`-Vؒ3:v;3VL<=[+BTK1/KQu V Pd?Y/QATЮ@OJSJII1<[ y)^ie0in :$MaQ£Xҭn?^(>ǾRjDs-`;$T) FٖLil,;a= JRUIsF,Y8Rdճ:&s"wQI0ZV$H 0)*_ஃX% Z\wb4yU!rHf ^y SߺM9+|ikI_X݄0SyHWGRM(kbUY6Rprg۶H?6V>}wq29PP$U5L!E(h(k(ҬMN"ՠ܍JMIZZc,|Qh ]U.Q;jXs$kd 9c5=6`P6cIfG5+]G52/noleVzTҊ$v}Ͼb0T~s< nB2y]C>ܞx;0[qa6P-ѯ~4i(DHT3uCen6/K7TXB"m^ȌfdYx7̲@cr(0lmeӹT~O#6E侟MoPJGdB3#fNJȜRIO[mlfqJTåT*J}@VsFxAsn'y MG_e90ZKU.sNdI ,kBP0>r`f[w6^gUe .!2<5`B(-FyB$"uJUf)7F`a#SSMT+8˕H" Wcng$5P?gZ< D/HKm(Ub&(Y0y"o␣jl_\*. `W%qd@7|^kQjSFfyݒhGdq- pU5uA&5ʜ!ea 0T8J c(rIiCKY}cU2EX''A*^RHa G_ja]*~q8g}omlwѩÝjI5>݅PGwW*CtG 鎛;:[ F(C揽&_=?-=7[zB ᫟@.AD$'eGfO/ƓvmIЋoO?@|7}|U5xp-í'4gBj "L`hTcE/%eZkCza!a`Lj9*TCpNxU  BU!=Ɉ E l*D0[j£ZMNX-ϸņۃm3:3: =x?1&-"]M+[͎?>rb_< gZN4΃ ޅx|I\TRg -C*Cmݳj Ci9sDKƍOR蘥ȯşxwZi΃[s['έEק$F?j_f@XFpu=(X@A%Gd4Td/*FZt+Dz:u6!X,8+zyF±wsFVAщcvDk->hvkBB"%SscYnjX Nlh&q*9vݚ?F%gгݴ> 4:>턇wRh->vkBB"%S:Duљ@19=uW7:w/3%~Us!A~#ů~Ohr&p W?љPήz1~@諟p  c'\FgO /OHp |ϋ$U'\FgC<'4;"'(O L &cUy Ƕj~욷V%mKޯ7kr@ɖ8ps5%sMj D-A}`n`!L[*%H{Su5"J{ |]3LhzAJ@{Y΄hZ_}ps\iJr b$ C|k O$%S1Eu81ÖnlG/: 1;Uwѝ, ?LI=TJŌƀ@xKIw2U1?"pE7똚/vCKu7#'r&rG Da}A@|VI+\qo>uaў=q@=m ͬ0Z5of}wL+u4^#@3kRԥBkm szPa 2BLHb$DJ\UriyH5HL0&ϨԌPn!f$ 0אc5c=!.I߶ 9P+㘫ʐ &d.`KDjfVlSRj[!}L Y*/3pEL1} uW6hfZKqJ2 rR3I y,ea$ y\.cU&Q:wjoXa0D1mZ#Zc܎=,%znq;|cu.Swј # hζ8zoa(w8x|&{t9i)"# )|W? ڃzaA^hͽ7ɽ<8יاI -g٣P>CX}Wc0#j2Fr\j'~yQ !D#8Mi=S}/-loP(uue݇LJ}å'z??#+Ɇ>lQ:ȭ7(mlj-HtnDŽx0.{JBu- 1-b`haU)^2Gi#9o$nomf BZtۂ%Y,Ь7_t;y;~?mbN_g3“Q|WXԙ;_=v>vÅsGan[3~$sn}nGEקTejȹꧏj2qtJ2첨7r0OӸ96m'r#˞?:Ϟ;Ƨbr!9='oPmdVţG[ f! =,x,,di~^pn~gN0[kw8_ ̛tGffOwܴh o߾%=T`<~} K*iq%  t%d1P·^v'34׊U<= I_y{(8֤=C!j{auŨh3nmU``j{ ;.'zSUR5TGFE޷}hpC 4{㛟ςfޔ,WNB5yi5OӻRb,|M;L|,5]1ReQu^0?>>`fˆ׉a@脬_ꅯ G_Uc|1'ʗO:hh#daL(_Ύ"{eؙeO` { +l4|k%H&դwoX:4'Em7^͈CVǍ)w8Մ!o)6[w=nZy)7;$9N>Ȳm$cO>%TYbU]a[R-9\$υim7gՍ󯞇 dfWpP908Q$͌b]\w"X:bM M0WP[%|ٗAmysMhkhyw;N^խ]݆#|σ _a? fts!h*+ τO/Nzm9 bfMO1N^ZDѮρ-╣qKg{sP2>Z e B+~ڷ:V"^Hýgw9g10<I7Qw])z?*SJC4DBDRӜ/DRChݙ*SF~.D~D! ZI'J4-k8y|:?uAIKX~ƫB/>n҃ӡC].v:wL5n0qa:&_M?8L0 AehTK.;H3ܥ4 op[;-k.{.C˩h-a\"$8j/BUwqL7Ldt6EūJP-)Xؚū*8Y\aTaTR[⸆&ZhZ-T`YZu~svem$#wZe`S2*P)[eMjβ;\zD1-e2pHQH~+FAT 4ZRo}QDd-)J_#i M14YRՒ&B>b2I~p*:Texs4(:/5y5Tr`{>?m1[ +઼`"VdLADQZkfX,Sɬ9ɱCjϻsr4/R#e2t#f;p2sͤx`ɭ$_藯ئ+$(oߚ\%!WC@ Xċo JZ >BMl<ΤJ/&:dY'&`;d0˰EqQrEq1|Eqi "eQ\;L@ x'O,nR&`:W 8P5zH(5/ D"*S92"\9NNI",sr5V-c4389fLyRhv(h7gs"fqV!©R$Aq_v$A o_F4HR:Drd2! BRzP&0ͤX?\K$QW%48]!Ygza8STzXf~^tIN$b,e~o*t^!͏NW9wa9fcsX/׋_:s3F;M]uQPv o+VoDm,گb"6zsx5ƥWt#P \:;ժuc7zM5WXžQUHfjФ+D`Ro3RrqR *[PZg9Թi>)H^j$=]5(Tһ<|d6#fZ7QA 1e.)m擽:-4 Le*LEk TL7vZ\g5P]vQR 6)t[κ"Y0UHcHyѐas.)hq5bi-xy^zO ݡcy8Xk':ՌUcAA$@ vݧh1u'5'òI4-?D$ O|MjQ-4Km$qr!qml!11za-hūΨ5 qZk4ժV!ˁJ4|m)W^~}*UE;KEQ}k5VBI)iQe`խsuV`TqJy:a:~-vÚubSܩ!tu5l+ekZ &`K܇îk,oZZ}@j io~Y4V5\ g7z,B/C4Ɍf:PE;%:n\|\'Qm!Xp5/ٕ2AݞX͹^\dHM3L[O>OMJTͯ›hTꚗkfwat ľvXP->V"bUB~z*xj PKrU"CB9c8E`BRp’խ',rLXuGLX¥`Zur>ݱs>qE-`]M6g0=|*]8i#@),NRE"aW:M\m&.g:= 2> y1gt ʳreJPtjhb i+ "%'XmlA}hBo&Q H[uicQJ{յ:bNS/]or8?2w1bcB~/o+E$>鼈|tiC?}n@6LOM]I j| ?&)@d/ |~zW^o ^gO}ﵦ)E2SDjd07n@󴩶Xn)Yαfϻs;k]78Cl%侙&jF|}&_axH)QR.C&og-c&7i(DurS|T8Y7?6;9ʵne9nܨi2z̲h?g1gf́?\KpƓsu tݩC8s݌,QJC r,Sn!U\aMw "?f%(y}Rs(Im.Xnc)7;[w쥨{ FVZؖw<74n>+B$iqī+$۬&(D(ᩊx>Bh[ՍdAlTx.puu Mg–.e`o 6utE]3OϭHz >mJ$w%uzHRWͰѯcƍVڃNԙ7N[韎BȃyB2&#2y'O 1O(gE4ט)q<Ãf⏿y?ÍbS9='5K?pUð8G>)t#O݇> BȌ? lsRO}-Fn.?{._yFWEtO#BQ*- Lx 0(Q QIK]zَ,4`B^zx+!ZbvLE „-z̃萰Dp'+?\<-C3lg:Df0k3+0TB0cye싢-c_>>e㝇Pf̱@P iBqb_Uǩ%Xms)H!ZJr`fI`5gNH8 "X%M)'( `fgIȜ KS5KhKk۠Iz eCSYztA-]魂$dWbĹD֌))cW+t ޝ% [Қ0X rAEIʡ+e,cN3&a2Jk&p0EMI͌ԩt:&Z0TR㓝ɘUY204Lgш޵>q#/w[QxUp;[]/R`06o%KRN[_cHC#b8oıѿF?FB=Gg*UܘT%Y-(t[r_,W$JJ"31(W%FHTKjV l>9&B01QBJyPpoB<>ce9xx!5!i֙CTdFŚ(I*JJ#c$="^Hy{cz.NíQ5@Nl LG&A+K/ J8+Ӵ-/`fh%|]޺Hiq=a1/!A4CSt>V0ASe :v+N0"rQ֐\wnݼÇ?(oc 7n~tW3; |60Ĵ\CI %-u疱zV ]S)缓%!+6n]]֔ܐh4e" p6Lh"3qȀ.-wF| wlf_pL$ȯ QH4cdp98 ]5pu'Y<#/ HJ%8f]Tnuk$df;GqPtȖ!20\sŻoJ`C;`}A6`$f)W6Ibؤi s>m! -AO(] @J40%<}Lr-:KL<>R>-pF =o2-<~LWb0\ud1 }@Aa30vQ/iWזg!ڼufkcuƤdNM Ѻ˳Ɠ/aVQI?w M;GIcRHz(l PzP xH[ҜTeyLq*5x cdjm/X>=^@Ck/ pvjEs0C _dD}hqT]A}!a4bU.!FxQ1#Ev!!@yz_\* BCM \EuoƆ(h4{dRVbB(jl78؋ (BжB_q\y/P?qar_-GB:sj8xsL,$zZZ4|Y꛸nk+2Ʌ;sgrZ1Jp.C忉"l7ۍ/v㋰`(f"'9McTQf4 Q6V'LB ,q:fĿxW^M(bt"'@&?).N8:?ˇo2Pc9Y~rA.?Mi#紀kպ cʩ0@% bd2Exi8-Ei-}\BKse9}2Z@A2NDY)Vvat/?Z~Mmw?Q4UMD*sn P#Pe&2VXĦ ìLQeeH VXaqvD*K戄L1f4.Qctj2nR>#+w5XzY7(~YΑG]‘A{'js9o~׽9Zy-}sz黹ǎ>3JTQw7KwW58noW['-~6^,ΦO߹_LJ ̓(Q7jq J E՞̻>K)䲠t%>df¡Ϣ8ĦQb`SQ\KBkȀU_@Ȑ0;vhtH)aT9B炐5$%tx %X$>HF2}.nEmé|u!6MÀphUisQRk8҅:$k],|m 30Ƌ* :N^S;'1D;%9sVZ *Se45ȔL *FF#.h՘٘'4'&Ȳh2cJ$OSTҙԦBFJ U 41摋މѵĨ2c) e%qP\8B)Z '*І^q2;BJ]ӲqOj{7ȷ,s4ԱN8. &&H9G:u&vJ!+PvjWjQ/rVA}P/3,1UW9p fr]l?3 _n/>yq2useqGט'`=؆:_/s>iZSjTJU0xLMuB"M$+?|)}xYt) U}mL}Y0bP=dh37o;WA>sRc劜n16߬ꄦb0Ru8GKSH'a|~*4kϨ%N)мIz|v<4+VhğQ4oM$TB}ʼncl PC,ll̹$MLsW'Qhmvڶc%e|ݿڻqyϧ:((:Faow.~aAov{ݵOd>v?絏O]ӛ7ݬ?q%-ksADnœ@K|;EN~f斯Si?j2Әܮ/x}=*E_Nc?q*%wi/ Z~ ]UfX 40S cXepZSPn,oU9Hj= ޞd"A5xI`r)V):x KCd*4ag6;_gȄwQW9Fk~t+n=Q'o>NgJxƷd x a چ OO ۀZpx WFT= #z%ye@$m#Z/-/4< CxƆ <> DaXnkc}жҙ'ե->VupUgӌ{L{/?Gv*Oz [enǙb8EkG<)EQ2CmPm mGU•ъ6+|jD SS7,j1@#dyÈT&dTv9xqJBF˹)U1Pk6.cn$%⅚<7Zg^O} Y֗ ٽvՇ o^OzkR)>[6>\Ods1+^nj'`bÆԯxPtwN~\}y:|֏Ts?euy3ELattvN5PAiWDPkwiCۿojLs!rvjP- e-JB3D]nmºHBUX!:;Cc?ٹLes[nO͝[]&VN{OhUbqvnw3y -l\7P;WS@E%Ԅ9P vh/I ;!?|Sup3IIkgpRm))FҦJTx: cGaC^iR@nY'!h֙BO':!MFB穡nNyOpU؜q.BQe뵿 '4m?b zmBB^֒)cD & DtQE-1լ3Vm'tM AXQ㏖M e800Ck~_v^"Ȁq6cE;ء!E@һ$Yb n%cMiag[4B󗪕/8<=e%blg?W-R{mp"6^즉6e$ǩ@\C \>Z;> .pQ#"`G6ߋ /,{ &3)_c:O/oe[m[VkL5]|X,w5P"?ݬC~+t)ZuțA7W2{5F~`=A)6P˱UP)BH? 0SDҎ>|[CKAɍA)/(nB)A8p@uɸ^SrnUڡE"@12RYTGC-!(eIғM'd*18N jK4ǚB$5RK9EU V>9rrU?UUft=>gup7󓳯vzvun}`OZtws/æCaT^!TpZ!(ϕ Ihn5kHæxɗ `ViJKJkkz.KIs 5: q\S^y!p <|XRr6(Qh)H2N(HFM8  e($ ipa%ctJQQ/#<:= .v,ht\Z+i A@pQ>2 nG'}gxd Ȥ@ )Tˌ4QZV${%NGa\7qض] J_tw`V4V~ĩqn`Μbo=0o ^ψ7՟ol *-T,&l\'QqXrhi7;oz㚛fs %4ЯslSFkؐЄKhc\؄a\sS1/<6!]hB:9pLgEn5c\5ıNB7r·(QxaQW).ϣ gDѤ1hO`,bu0Ig&dl&)>}q?\_Wa mAW[{gac]WJ܎~W0 {o {oNi e(m"!SIMg,xb4%IψD4 4sZJ".'G6[mum{] ʻXm/JYgNNA~D`jQh?PgmM8[I-:`=~yJ=YvGHX J1j QiX9i d̸)q [O%jܺqN5"Rp*RφgxE~ΎTtGF4qԔkZ;D8)ڦzM@J:h>:ucׯ1v?;j&npm01-Vo@d&R[f}_T0)Ieox0)GmД5rCɀ2P^;! [EHfbO2)e&(rxHS#nXgi[э {ck!-UOŹֽ+p]6(!NrS>rPR!TSriJ0Ѓ=X5 X5!./EдWL?=GR%,f'EWlVb#%_Rwrr)>*x `c^ w$-w ?ËrU^sU[Mqn#i探6fHu9Kd叫F)D`בS8T :E@:R"@E Kr#|e[ }:@$q(%cU 'OT?otuQ槗7gg"JE@OGZͭFke.sMgD0.DJ@,9'm@Td+k\8SpeĠruP^84y( I!3V@\jߣd/'w@+)~$bx~,?|~0Sr(Sj >{s $GT-<zxC^^+ot`J dzP3]TxԶqgQJf[W s#p{Ttw IT!E@dZ(r.˅B~/EeLD͉@NA 3Pb @7H5 /95 ;})fpm7foUoӶ۴ƛҶ>uł&A0AQ*K=*N]v׋Uo*L_-wwHOJmBUρ wJSi'6Жƹ I[Ծw-ng I%[hQݧ-5WpdB]w |oHP_%NL#}҆;+r5@# AWP[q`̣Uj+]Vm6_G)rN4BpL"x\CY<@ B'4 0)\j\SVn=K:;S\zBzsU."ęUXVoWZc<:r񧟖"ꋶXտxoq^ٟ:r{mjd*|p_L'*E>rTEN~\n)e]J˕gGMMճ↺ػg5zNg4nDۻ[RݺWnmlUBC*<; v}#9AڋpZ/g{`l9OQr1f$XM6P36ަ$AÎvCj'J_,NYH)^>LDytB6<%zTMc~Exx8a/=mc|Jwe!=NM!dg2艢8?/{ZtLzDw24bWi_ 01(4 Ca=F3 ӏ1FDDB٩\*rܼr)%H4KSUT~5*&hq3\T3 GVl;8?9jgWgEn%T!] RM|R7Ijh@T8pvB;MD !E45›$К&騰A4:V1 gc5I#q,E jp2M`3WtvԔ{HIeNIýN*{IVLdLFt6/]V/f!D$\L -^#g2.Dh.w.5^ Uz´*$N|=RA!2<.RaQOHT~oƴk8Q$#O%\,m<#QEÈ1tF[&y)yB&hQ"Rlq'Q.HG&syp^:Ù}N*Ad$dE>$Z)Aw-Bp,"0p &%hHC1ƤH W]Oљ,PNw9[g4C'ClhˊKqЧ%U}Oq/_DdW9. ާWD.F >Iau^S1ri>~jڼҜC`0ɪ̌vdYTp7s;{FUdP6^;ZO@y"/a&ZY\_:񾑝$i|9c,ч]X3+B+ͨɟr0j`qwM4`0P_'k&0* F zTaUQr{ a^#+eUd#$x 1q\AsS0ΡhlLyvҸj2)Y`1aKbv$= ;K!i\#Vs*j\RMc"RWA[#=<1"0[ SCP$JI\J-mjC2GoaD4 nx"/ DǙĝ9p2+nBjD#"MH&bz%e6fa̔AIIBjخRKW&p*DIG..m8E*{_ρ6kqԸbf N֠hlV]~U(:8=LieEW9ox@|7%'6SFJTW܃DZxUڠx{1dȕ+BaQ;;V1fEXefo1Ž@=KEN"jٺw+5Tnǰ P_0,mu@էCUB3+0J/9.M%PĞc× a/P$P)ߍj6jvW7P&GE 2@ȗZ16G5ƃpJpDRYFΒVրP#hL 8'Rr!dI5A[w^r B5s!! 

o1Ik @hV_. (,0R- -iR3-"xA+1"ت죈d{:89\u 7r)aYꪶݐXw t*(n۩$|rMtM 4*V|DjjQŝwX$*} XY(jN(ShmJʨ@W:NF"I #CvaFKWtgЄ5geA_qO!,Z`Ե/vԼwbԗ@YR/^F3aH/jHQD 2E+e^ )C t P~3ssHYPzrRKBhb"zm%q\z#ob (5K3x-k Jٿκnrl _^*}&E"lFw' > K-bvjכAڐ=VaBQ G-NV?m}dAӹ:o_:Uq4Fؕ]*B_a0dfB qxn{9 _~'_Jwp1c=OFaMIuo|7A)1?'R*@3!΢\̰P&ic8%IٕkιM8STgcلSK6E+bɷh2v@>Gh;(Ft,9,S}E 0_o nrT)`{ͶWLRZ5)}iBxdWaK)|{'Nz^dj\i//.AI31@ǹtzS7xl5C͵}h7k]?^jǕK&i_V Z?ko+E4`xG"PVjTaKAQ ܱԢU=͇h}uo6W"5\)}ϯCu͘82&O2&e]Wbsퟏ!#f|١O&~DŽ;>A Va#hsŮxtܛ l8׋ށ gb3{c2 $mZU\xBr҆R Bɍ[VZKB@d h}m!o:LN9zb1MnJ 2 SVz>m|טSɗWszGI\[C6rJMދC)59-Ԕ ^2*8uDG8+h^ʗ4F#':^H 4#+✲szqJJNzPHݍr'1FB"jH).\Qq93 $"wR6S]rKgMڲ$7GჂ$-$V(4PWhۋ6ڽqOL4v SZ.J$_fLze~p|iNU^K3 M?{Z ^Fncg֥n7D;>J~ƗqB2kY RlEi>2:F[۰u5ʴ *6wO K^ '3XOnG$#Wm|r})>h4|cBOOJ~~?*$7#9ϔ_M%J&[y&[Q\}e*7ODēə})9&us}ďy,r +4JkVNGYh%35w0f"6>jAAYZu J`!`Cɥ֣מs~ $q6f~Q}%zxsyp4@|AhzC77dYr4ߋ1GsFeĠN2bp\L.,ԯ4NœJA$UV8cg| IQAqc0QX%>6d@ey+6("ӫ`508uṨ- kՕCWG [)͏w鈦f+_2 ǰWnmqAjJr4nv\ξ~j΍*WS7nFAyQM綀i&漕7pJ #OPbDܭH~?m$qjH}wsy8\xÕQx).@jWT|`WQbp>>hӪqP̜ͷҤSK9k} v>/ .fjV^Z9fզ3֘j}Gic_p_peB4Q \qBle3+mV[i#۟7wi{Z绻߽cp]?k"{1kh#d(w66NU8$5B?>'4u|9KpELE+F )/(E2o/DK1|)L" u.mU(EI5R.+JZ .&/$B@/{ M0ݏ/})܏<ܵ࢘s7Ə5~lc̹ZFl&ţvZ6FʂC\ RT^[Ҫ&Hfέ94/*Z!"V iA;ʩo!ɞxskW㘉 5%幜DR3[,H3-!"͜4ӂ)I*_ICPA'af޼" ?ch߿n, UO?k`'lS6Z&~B%ñiHk Oz IPM4^ќN~zi7Ʉ˵־I'mD?J;)FP$aơ+1`=?3=`MƲ递 Z"tڃÅ VFkӘk$3N^Xf'׀Gn}/X]pS{wEMM|˹,{X0䒣 r,Ru/[!dnζ`_nDxOd8uiwТәp|>'_[?k3 OVޑԆ'+>qc=8H[4vouR/l^<:yS ZVsͲrG0K7L >z6`;e} z?q*9]{d]Z`=-"G{g 2?餘>zY﹧3fO͡HK-Q,ՁpIeկNKmŇAvV*;!a-9l}.M:NԄicX+7*2 YTwSW >r$Nm%mFuGSک[}OwB^Vٔe쓋ExZ rL6Nw-X3wmncX+7.ޓ%-ȗj11>Jی:NS3MtM1e%e]̑Ux#>˜@D X"fZC\> 9B 8*9Vڣ.m*MuOc٣NS)ٱJ'ZB'婢9 5j` Ob [_D-cβyaf|W9(ЛYdfe}Hkgkk{ȎhwN m. 0RǙ)dY\U4z^0NUtdyʇ4Ǝy$͌H΂~9߭72|gzBs:W˷忌RHOg~4ScbORvRKfZ'yjv .D`BK.3a/Kqq.|\x5f/ { +FZʩ_x SӜm1|99;`dieŵl#4e'E.M8_6هL]~釛x'l$G /c m[k%@^2Ŵ9hRs[rSCsZ+L+d&}aU *޽)T}8{u;mZЌmՏ0[p$.͂J0ߴfgM|LVYdUotM& > T eyHK+2\K!%ͫ!%8ʽcP)Ө3ɖܲElԲEj44w>M[-`YNMџB!SEd6a"8]ʐhkRVAi\4艍?k38 Tι*P9O41\zQjE> !tjQh J7bZr`C\x6518Hc ːܑNc 4FU!'@ g\/A3{R"f%FxImm<Ñut=<Ζٻ6rcWX|n6pJMNrZ&/Rb_Nji )jD.4nnT{"޷Y덮0oq0]kͫ!b`Nò9rڲ=Gx͢g}{Q<]ڵT3GlmԠPp@ͣo>I#:9QZg=WXBŮZBP`"2!hUa-GuJQk  R7vZό : V{AZ(lp(0JCxs P(&WR˫Q|u/%?|<[ߜO@gs~N^L17+JINY3\M2{2l<7 x% 3̨yy }? Jg}Vնo1- 됩V*+ TTxҀۇ(T V?mц‚wZ)?H!G AZk 1BH% A#êE(<L clPJ *dL{C3!(7JlӶ!9<50T39|`#pIPHRჲLA`nAYeUI"n& |V̳L[8[ Ic+EE/GA5-_ t :3¶ÍDKI \υh js 0L? p/,a*pEh',eAa@I,䤬ʙs55UJohIEIsHPfMlG'"\v_ZʖW\Gt3 vbgxBo Soۨ\믟~F_q6 r WKlbQB&7b܄WhڛՉ ˗$jBQC.Y͘qePxۺt#Ap%C6Yö.tPL;gep`V2 ^, s)G7W a6@Lx[=٣}pRJErUYN"<ʅqRj"raKKwO| Ҡ:gzZ#(-8\Ǻ,ALβ_}L%ܜe ,:p旅v -eb=x,- C5_[a%D㹪Zz#{U>ww[Ä#OUYޝ~9f ewj3*j9*@NU{QCѨۅ( 8dY%`$~iN$Ba -*͇izs$\<`@9.𤼸K2qIO$!6p8-n$/,~Y+5hM`GòNwP?±[%#c&ܖa+95GTe´쵶xk.J wMt[[ń=jmf1B2ֱD I ӜX, -g/,0MTrqYF3w{ݵq-z_qR~~=֬Z ˎ/EL39;vl5B{r-rZi<+!iB 79t9bx+ d!ۭ$S-8O~? >ؙt4oe!"X)֬ݻ:P:"1Lg?.mEpr%߻Yy:KI)DOf.$?M%\J8 ݦIvTdOMmN# Hndt7~ޕT?|(ZRaxWٽ0-K{qARMn]f`N{{3,~(f>.5J\Yg]kJ^Vz︯x^ixQ8(حo~ +a^ChLQ"nB[.):ڭC>DRאv/OnMH7.uT* 7Dfrh]hT_/̵X`lXqgavO4gKٿ)T~ުW] J蝽3>\_^ ]9 V4*//\i,/ Ǚ|R90زmϛFB1LTyD9{c?<ⴹa]P\mt~iB0/{p(qk=w k,fLyܚZ#+]76 6S?Y՜)gH0)M3-ۋ6 8Ew-3P4VHf tۄõOx02!kF@7OFK\܌+-[T"mp,Y-8> KsXmd?Z EZƀb#Ƀ!"KpDmco/@>(ͷfdM'IN0V)NҁFg^n]Ѝ&/fUq_̞f)k1@3w% Ī E|;΢a׌p2/Ni-2; Qͳ:<z],$xqfgcobw3,CY؋mk-ߚi䣙GdꓘtԝdixiO > WHjtyZ'SW#J9BqU z3*ECerWWHp:9u`r`ζ_wZ"j͋5/ּjdt3Jig7X/uVSTPr `7+4*CAXB+]+XuWe{l9j?,+}g~:wTd|gLTέnEtk.[sQukԂ@wf|EYGCP#FFJ"i d}Q||g5vM=Z3^Ӗw 4$6izzpL?PN 7r``\®vZό : f'L;QAGRŸZ Q8*1oQuM s9%c D ,rD:C)SHm B`ՠ#Ӏ@0]<9J=FIټΠ1/5eq)4-9,WVFkp pJbWqc /,"&RMcEZ9/iJ Tˊ^4O}G^$Jbi-jb&$RN~$"Qb":﨣ig$ݲݚo\Dw)͵tnnJԶ9"囕дZ[eggwCf؁UσT;nA?H5p(x0b :8mLlA!ߠ6uahkT$DZlhc]jLQnO. Z91ä2 ʌ\9k'~y(\! +V6!Q;i#)}t 9kW9HC1k#I>`Ƅ hBk#m2`;&aAí0?k&G.t*MT@,Ztb{BDůΠFlM7{g'фd*cBQ= J)f-7y *DZ%s!y$KABGkP1BSelK?mOYFlɎxtWCU~\6NHp$pm qvBJΨ҈̛.aPIRH~(Bl0v*B&%Uj6 OƒXxXY7|0 F ᡒ+£ft>- ^vkk}6R6RbU1V!$B!T*C{0k M31JUG[HFITJÜ iʐg$NH  G%1 GM'NUV,S"dX hX8?N<'ƗJSDr%T|P6l "u; 8v/`| \"]&R.%o(>On 6#LH\ɝ$cI __z3]~:|Iot!1!GIlXN^$.X$K$F5G8x$jr6jR^i5Wiʰ%ipciw?,O٢|gs~{߿nNN07w[d\C0yq-仳\a4Ȟ܊t '3*[i,)d}#(9BF,毅Bh׊sӎI-AtbFِzdž(c02a6V(,r3%[I f 4w珨aO77ڟE)So~7z!q: r W˖=g3 Mq?{㸍eQvv|?A fi'^(z3i[) IT幗s;4}hKw]9D>UBO7s y댙90s!r0H1P`^y}oNXK`>Xwߪ5^+'/axgDl"&o^Yi٪] -p7©jL;,-M- ӖߍӆVvs7/OnkIf q&Iց,-hqRW(2aB&g2ɖ5rrɞ#!> o-$2dv $ÐPIfuZTL@,mC^]s -]C2l)+|fqfkJ-T|NTQγ\II%bHpr^U)g+{ӼՔK*^--"T0^Ls癰ZlBa%d́.u#?Tm|AK9IM֟eʺ䤰3`5 a!ʠЅ(43h\W9$ڸd DYH4IdՖS  I%` 2$a&hF(2J gf8Ǭ 1O^vQL2dEK,E"TMѴX#YQH "p4!"ݖv|2\}5v*gG? Uyi0й:)oeEv+ "iԵ="I%Ί@b«-( .1HQ Q?'G}:iwftw0&lݵ;lflg "Tͷ}@L*kAqd 6 9%VUakےBf=MrAhzaxiE28;#WKҧҗc׿+ W #)vh#$Jr(Q Ź">:3{KZZ1CN<_8!O6B)!ӫkvw蚗gF̌wr a3Y{[m_9bLM5 F /\^/\>7 p>Iİ02X'Ii0# m}{Jms}z\2Pws쭻~:͈ƣml16:]B-b5<}*fD%9@'zEN(p*q[^ϻ[-(o`>~ E5Ǜy10Zjc]m}mz*Uoӿ^*WYrs_2}6w5Qfy!S~8QgMhO~MvnTd-3ňW!1弱#g7[(>6m!S.e39-F )FxݘT4`v 'tB& l|ꨕy<[y!S p.A~AN\eZֲ!OWYpq%tNyd9/g8\7W /[OkSӡn5_Cq=;1IqH2C-Ii+uնVL|-3$(>sϲ:,ު|~=<=VGgV2lۻ7_7olx|sԩS:CO5 t:u_n?2?Ij9OťWPJ')-'=X=-Ctxjgբ^+y:*}evm!C8VQ[vkt \oF$٭6$@$FV b qy+GJ…HAl3@bQ eN*{0XNx=q8ՊXH) l2Tq9`2ӠPf2SNGqmc9"E㗧.p"cJV\t 1p,YarJ=kiq:^`ST /(nS-G"˂02He.5K Р3VfY @Y htuݜҪ5k32&1)2M_ PDڪ"qΕDqti 0_p Ԉc E=HjQ`I.t~ ܐ=Z,}3Ʌ/x2ňW!1!81"F=~7Z)8 a|PoC{L1FɌD B$lqVb\B9Ε=ꜣy 5Kk,9@~pss=gbĘ݋ý[o _"43/?:9~?9G#ZC,8!^'𘙒=Rq#O!%B>)a/ 7:9uYŖe:zՁ @d,,]:Ϥ5xJ#{IM`# b'K\dѓeRxuFp{Stht $k7#iĔ׽/5âZ2esEA(P%TPZf*-Y( : 7DqPSd"1VBfhYhyIAL 1 %S $2p3'{fBB"G% $D(dZ`<+I9&$奦&49 @Jna;} M_سcfhw+ iˊϥDy2TLB#JfslJaiu7R@H  2)PCD@51HqIrޠJ=|_|z2/#@`/#۳tyzV>C4U@|^~! y+eoT/L[ew[}f?۫^<-W|x]~we~ 3Vs-Wm.2:ی (g $v={gF~ ,٪nqy)b@ aaC͒v8Y^$R z ef/Bf h~Z LsP3Ӟ\`eޠ5Y_0|wՕ D/[T`6l,Zyer&U^Ugl{g_o߼ |-P5ATĽ0H?e'! 9cܤ߿:PNyQp emЀpp aaP$n!@A8!PD6>Wz+$"ӄ-b;0&lw>a3\&'P;Lo)q'j5OTڡ#ܟvʰt dF '6L~Ui&ݧc鬴[ۤrJ g(o>=0#ja:QBLl1B0S?2cTP#ęP$˖@8K#$4?8Uiut5O'/b͈)ʣԑwW͸'wˇP𐤌QypjѓvdBbIH)ֳNZ+R;?#c%tbO~-\H|\.줠i4[xqqpܸZ۴޽ڹg"Ŝ.skȔvY|˔&]&J) Xm Ą_cCZuk =B]l7+#UVGqkSֹZH-@wU >[j>jD!fHnM6n2WgvLћPBzsd@Q*o"`- &PW8LCb nhoz  \x:'mBDh˱LjxB<%Sqs8XvsRrV0t<͊xbT鰛gjN(GHCŦ?$Fr%iO {5dKmjsj <7GVeZC1 Fb27=eZcD+ѠB_>n\1dP |B'mۀ Ȓep#B^yŔsm[GٍCuO~Mvp^ɲ[8gv!6m5Np-gv!Ǥ ?e }'sSbar%BXL[XJW6D} CBJ^AҀ6&IQ.%-VA CԚ?<c-DRm6D6?p=aMJɢ+҅ ޓ$Lj՟i~ʹUyUWYV<8t͉T g5^Mv  BW [+ER,жO7z{fuz\OPr+Zp @Jŀ.J/|pV C8uQ! .qI=1ikbNx92}q8 kƽ.5 oMn.~ӫϮSٰZZ $^,F4ݮdCDmd f -[9% }30.Utuzģ`F3]j#j{Ch%ߝ>͌^zɗhK+uy .B ;ОnI3~B.3SN^Ad\ʕ/~> &Tx 4F};Rf3?N&6=SwKDH~b co,{mZPKh g!Ud VcQ@K(~IȺ%"-}no~:qv">)?>)y^R8-Z!w1ɿBr,]77- ;a$K"-c^ļ:a} 3Ș")Iځ" &9)jxk09Iƃ 3Xh$o$ <t!A[!2I!1+NZ^v `:g`QHi4<`Zk@iE $kU&8N2&#aLH;tnEeagFGR-8RVl!E%Q֞E*EV"5yK~~&={T)i8$̣/ @# q h4/ $(:X{Dj =/O-MϏ`@PF?=uhyf/Q ݒK&]pDZ['%ƚ~ef*ɄWuQNЂЊ]}-Fk^zΧJuBFkd-=nmx_M˒xE$5k5Ow_Ղj:aj#E[BLLWYWb\HE_yJiLZ+(dϞW,2J&xhIxhDUpJqfKA_Ac$A `by?׫;{㚌:&2cCx#q5豲$۫b҅2 |u9m"F=C1K(0LsAiPR9خ(|Zk3qkCM9 T(TёX~ԧR`ghxԅlǦϾfknzϼ1MfLK7zE{qC6^9Z۰fH?f҈``ђtxQLlH8`Bh`y%NJh4<+ 6j3q ,$p  ǁK@ۆ|4)2hfc9e0څLV 2lx7*T)T&II@ŴEN02kWKur=2G?cUK[ZWx9,h0lA@>2]ᇯbmj1\UL#{[,Esoʹ_֕sI쵇w'HօeƩėF:w Rr5 ϕ WY'Ls y/V %H]Qzdn3v,ب `eRTz_5 M:v# yP,(Yy3(ƂnxǂV stanѠD2FPP٨xr\Fijav-y7$]5ϻvDᶝtfvҚA9c[3|G\wdZ#&zB^TiTYO*TJp&uQ-VE VvܕٶdˬSSpI'̥8GLH _7i } ({y~$hE_|r݅o4k ^<n d0rp@Œd&pƖsY&aa[#`Yv ']BEK(镨Odi*)T)!m@BV[渋R5mfjl!%#uz NoѿCGm+ydW)];mc`cYk ړ #E]H\M%?(q2᭻pҰw\[j1JE,jIQH]iKAOQc.BvFNiH4i%8my'صjgzM/IDw:iAk=x?:a,*&[PPP2",CGb,s,nʪ 3uݞB&4ۊRX3 I$)4]pՖ9HV5hfگ2m üۨe@L]hý_'!GВdWC(`P& ꤜ.=VRLk0pD=F* Xι8B` MoGBv蝌2T&.1KuigUA'D HD~)a^4km[7+#ޜ- u%!qi+QSh s꾟V"oxn;-kR%̪Fy~7C ޯQ 6L>.AEg2.ޟ/Apߎ1vE.48#i{KY@;&y-+lHcqcy_<ʃ.Bn9,*!RjTgG^v>tt=WyDž{(h Q%SJxۃ/<捸jX5эgzAC~ ^Zq í*PWr#h}A=!Vj>W!|'njq7^IAUA[E+OW Th8 Lo;wCmm>")@pI {m\WAwd[aao<0?tkkf@?hr~6LIrӢ]_ԟx_8F?93M%ҀȃD2pF-4ڌ0ZνܗDz9 A,+:eYʉ`MJFAl)(9>6#'=I}%Dܴ`aLl@ H4cI8}X0S9ԟh)},f,Q[jdLԙ9&Gt8_tCFJ.̌R%RQUdJx+ ڥc~یp/^;+Y,%"V^T3L|m' oxCRR{*VVΐd(-]ysDZ*9>^dvTe[D^*JAdzp $abg=}MO7_ u,vgl$AKSac>8cӰ~ePGBLC@)=(P+\2oa"u/>Do"7-~jYEaS@7d l\Ft˼CP[ѶDqPФb>!ٶ݁'l1Lt_YVRWZ^ EazidWbR\͜kRHo:q֣I|7 ߟ捝~gt5cUJ$!xL@ ZM-N5! f 4O<*W'IO簝uNaӣ;l/ֱu };c 0]ofJ.oJ\ t!:QN TzGJu`d{Th(jLD,#&JILTJbbOAW@vnG¦]eu-u.aVeWvE2q2$a9* R@$&*sx́Ca43Ԅ"w<5ټ!_}0f 5SwMoE cnz&KD ?viIB|¥NZ patV|.\c&k ;aN ؘ*(%Q{ל,YQ;9I6{WLMG*M V0,ZBHVY@dt3fPd1a8^ʘ#EpgFᨍRA@2HEs0CS$QJR=*tb ;ӾMW$%ySrFI5#dlgJcW#88 Aށn#xZ ʤFy" 3I,8$(ȴB0cGtR#$]sq,p2$![]ũ P@nQjrӝێ(=E1IZfbj>W 3s&"2,/, DHYq ev/A)쬆lr-()!+đTc+%EG $h ;NHŎ:mPQ"H (5pxp6&]T` c18{[- + XcDpq"T9{^by !& ]*r}wqII%viD`Q.Xg'F4Y!4JTfo+Am! f -x5yu>|['5}}iHFr[ʥT<kQLNj"s\ZC^##EGD#Q+-V斌ewwKD6u$%}ǂ'U*%XZ 焉ZET Ep9vH)X@HD[&p 9ŭHָ9$6 L}o pqr[8xlw8j>K ! zT)8..XĵMۏƃy*9@$UPoOZL pY I?̫ hʣp`hxNQ)Lk& UPzP KX yXlj&wűTA <0Ic؂(DiP($Ֆ2kZcz咺n8q6YPɨA)M]TdTXefĬN:^l}dhSVĠBz E0mDF) J! YB ] \x^q ZHFx2DΩ,U@Rlܬ]NuǢ2 #bL^XIbAq?@)5hED9@k %#jc5 M`*!Nq.},f@hƦ|(Z'8r=-XywG09Z}VX)a :b`I ;(8?V;~ޠ#𩛗Ĉ9o`ie9C44*9RHR.IiAkcxKrz= k"(ehqAn29$`9' %s|vV׊5쑍܍Mfb#T`JV 2E7Z气 ys:,֪%DǏZi"88N x6:UJЖ[PZ2` F3EUBGR WvSyhWp#V È$Pɵ "b q(O$Fa_cV7V;텱IG/n)g~[W{ͅd Lb+M37co+δr*(`{G5]hԬg9K" VJ"yo_ئj;SFZ|mHQ`qkإj+c&t1ܣ?N[ђN'gg8V?/Ye2x$~"^o.{g8uzzt@apkoY nc? xԴk/p{6=}pyfFΛ~*}YA(]+((K1F>fQw5]W ܹtP 5-[4-I&k>,$i1Ho)vi1PI-+5Qt-R(+ںx4VH&_*G)uVN,-r!>HU_3Q@!Pu,T9UkPvw k/^(&HBrCLHrolAJ b^so%[̵Jk^nĖ6I\nR늟Tt J;KKлedj~R}Y9,2"1@ӥ6;]oA q<k QϿ c4/ԦmD^mDVqЌidƷ8{CfePiQ 8CTsٴ4ÅU Tg kr B7)lW@RBwrxRXV m1ź/#SJZ2 nPzoK!|O)$ }[%u]iP,/:k·~/'潎yz,8_J.߀2upϲJ=i_vEM?`'_e4x $[* u)vgudɦWdZ4mEr 3zP&= TV*G=JE)Bahdsp6>5jHoU^GNaHvU{D߮R`=*]~WުIgW)M5.]~zqd:/,O[#ڼQGZ@!/~uN\=uZXquPӧ[2j%gLbWn8̪o 0Lv `t}02N0pЅ<5JE-Y:DLq@y)); o;.0+I5pV3@WF/7$S%֝rNZ٫ GoJ&-AeM`ԋ&P.LhtU^~ BNWpjcƚ kX3gG~]$f_!Dj<i6W*LTQjIkN={sXɜ"Qko 엟*,`oRReG%/6yٍ,k\ŒjZhD6bbHݯXV/^,CϖԐ,#*ΩTSI99?Jbjf&Q4w~9I Dig<yJ󔊰b;!6ݝ}f5uE(~&$}HEU:\\5ْ/P戀EϷNҢNQ%k0XZYW (PAXS$y{N0>ta~zˍy?1\;gL"F-mteD#uIC BKEL,)GV`G hF!#_=A=]e_?]m_e⧋˿?udV÷f۱e0C=ҽKs3:mgӒٙ'0ѻ?ɋnlN|{_&jn8==pd__eY~%EF)'mLgy?;y5}B7^>œo:<B=SN){3H5ߓ_;5(nvﳫ0 vy1>~x^lYwanwnuzygM`v3JS(}wi}]QvKQ/4a2=;T,6Z q0?-9oq?e628n'g=FmdFϿ 3~z+mx=nK*|xCh ]޾7Lpٳ/},_ك/.S-FF-=nGArQ|5 ۻ!6~L(!)NJR(Q830g]UO:^?<9~e>w7'>lD5p{r:F.z\SO\%l]s Y :fexUZCٖ ?r8~j <ꔥ4OYFO8.z;J?{ҝu~KXnڳݓ Y{pSՔa|w9egqwv0g8Xi&9uei`h?{tTw"t4~_seX3cU˛X0eM6qdS{騛?m/Of?j!~~<5]Wޫ4g/5c@QI֋ OЖVqū'!d}>?<~gjE u_*$W;? !9z9Fx1 =i|6ē֓l]|IkE?AU)~vfKO'h .AhnW3aWo2Ӕ *-VyvS|h$V]t7,˨ir"~irqT֖}YgEwƿvCviPG^xWjUn|^(Mݻ+msۻѭ&V,T[js|H>UUxrǍqm9u#ٶKPs{CZyjv{jt2v޺|5;_CL)Cբf3Y95Wg4+5Yes٬t\賹;͟i3]f^y$;k BgƆXAdmD-)\L*n+ m\7"s.7w/+MNCc 1.ag;G~;7DRV8Z_ iStT{Og)t7{.|cέ0y.I+SZmFBNs"7 r'I*'v(I΢94SnP2\ܡr:X9tKIc;.VD2VDY1G[~R0Ja9cńW( --p|H Xn#"0YK51w!A(xDl4BI![?\:O􋮃J2Μ⧽IE ^8zc^e*B_ 4Y&+R7k'Z"RTߵh>kp= (cL /5PQ=Xta#kb`E0`4+4SNYceb:XZ^Ѿ 9eM8hBI0TPI+YҠ5{[A jpv ! yQyȜ&$ R$A\D q;ySQ \ ҘP7KɵP*I0BP0) 0*D[HqqفS:h(AkT5jhQ;2)əd[nJQdíűѭFsH5ղ耑7 y&uۣUQe~3ZkjE[iR=VkL0 jI!k7Ĵ+PѡlT^!ٺr #ڕT9t+/T@^Qg-ax@=1YZ4C IdAx(QYhcic#B\Mqdp"TJ+…H}Saϝ5Q$BaZc-ͬq eDIi%åq5ZE[2𒶑+бY8E $AHye_6ۈ/PK5%*ztaBv$#TRBC!T+,? D㨁pgkc1Ѥ蘄`bJ!0&0s-l<c'y0Nk#dKuXpA=퓚sq"+!sQ2e/WR ~h5U#&%\0 Iyn!:ʑS(2?M4,+DeAED':HIɨ =<C6/cKZA\u{|L[E b]J:ol {b[Wk̜}$z<.M`5-&|a1( r**o`P@!+#'gY/hR.;ӂqaIѢ(z_iC:Z2S R\rBMgdh$"/"'VH|  DZ@.#!9 UCWG`? eH *PĢvH|,\@][Pٗ( hFnރ\D,1"H({:Ѹ,:/2؟ªo6gZTQ˧^<;X\{oφ{_zxEr\_k 8QϾ_jyp`bQɸfAͧed2E8T0W9o\m.%gCz>ZJRt @!*4&eC?n믂Q!DpX;YeNƸ/㳏+gPg0ΑB$͒S1(^ROb1(A`u`ʂp QAn r]@ZZk_>J.-3q%`?&ه1uOUjPiN8Wd-px(pq9{lvoC xm$Ka>dAh1<ο>E'SUCS{xB1c` ;?𺱕,%#Jϟqi)sN_zk^aQT !)T5,||Q3G.><%^V[#Q KHPUKh5ŹרXǩt s:iti6 TLR=~d4+ \]0AM\-T\9RD$'Aq'8j4 M)~QsfvV"*"ii8 xd 5x GVE&-}_c9k#1QRhAPh"i[T W_^.8XdY-<Ta#~HSG4!$Ke&g bʐ85+,r-"kCqsɕFo}"HQ%G MQL4H@ҨUI#%%)f@ýrF"ª #Ejs M[kWk%^ȵcQ"(`NB<&"prkC,u^`zX =[jmB6wuк]h݃ [~"[kkmkm>&qZZ6wvvbZ6w1vZ ]].O^k0]p J4 HK.P |NK9Li(88ADTFc{(ǩ2霩H¦W mP_ӎCt_t;oP&Y osF]{$fەe-."I kqjY5RfT(#࢒X#BH(ිep#ub d %A`su[UIVM3{%wfEFP 4#J- Tw$2d<`fR%․#! B<(!d\":p  !z_1e'C/Śǝ,0fEk[v$E%_dٲMN?$L>֕XSaJe*H6gGb)al.CPuB--*ZCMRiMA F6<ۜ@%d+]0bWh 0XvJP JJe2 Lu`H MPS^Z M W@s.JvyThj-2KDhZd08 (ج mCꂃldց  eQ/1qh!Y*.oP\UKA:E$yWblAXxc}E%%VR2)`OX9 `+FUCY2CrpXq !ޯ;C2nf-رh;dJ9,Cad"2={_IZ_?VoJ=SUFdn7Ǎ o9P-o0$(IjlQJeU{Xxe \>[VA)X؞ɐRTOZ/J@}lU%d׆;ZfdUfxG8LZыUY A _TI*[67Ns"|~4FZ F(ZXLTCxs9K gKqXk gO嚂>2l5?YNÚʰ#Yd[<ॎ󂉕ҒrϪ=[(vu?, _byC:|w}|_KfwwO-sx4tk\?pͰG_O"v<㫕V~RՊ'꤇3 Ț\%C{=S2ڃ}'ׁ}aΓDY u0fH"yk`nX9:eJ/k%?dLo?ُZ?ۋaEl1mWOY,ױkO'P'4.w|`xaYjqͽY ,L''?~n <ɧqi}R`~Lv,JUFρi+5.[]7׬;`7SJ$ z)5IWc*ujm(8NJ)Mز'[;QE3܏8ll.hcd7daݸʆ_ίߖ'?͟.Nv 1.?})w֟>{ثb>&nD ^Yɼ*YE̺.>}[v9Ķ($#UJbݤ)ՄS?T3x'^^r.[MSg'oƵC[7 >$Yɟn;Ga+AnJM}',=ؽba~ܤ;kʼƈ4 ٲ'<)^S/^OWX˳s4z)~ϋ܉הy+^ʢ6/&nޘA<MU&]:F*3kGC`4/R'^/yRw5eޙ-^$uizuMHEtF_F;ֻ/ry51.p8ЬHeNkeK$ѮɊgr+&"T|Vp:^\|ܯw8QؽcHoveMl"l-x\509ruq|W]@v8aU)H2DW0DCTxDz@p)veyqV̜NH6f&a'nLV&abbT:H >T'T)@aH5S 2) ,VV˲LA`Lzyy6aga>=JSE`vp;tԆq*qrd"=+qJLϬYbt5_]0 jI'VՀJVMm),+?"͇>[~?@?0v(qoJZ[$a!POF,&KD)i(Rb/=cʄj}o[!1^ : $veRq%)CVH2>$-YF6Jҁ-TM%`Nf\"J.Q;[&ϋ-f^fBo4;DX*i=2u{0Moo^&rFλkad[_I%K 29l dt9mն8&̀şhd zȌ㪙8H4F y8)/>D;W7M z.ntavXfXuoLevdSU05)}N*?ܙON{%0rሿC_lb/(t10LXSXgu5xHT o`W/5:L͹v-?ʈURDhJtyzR ]ӿe)w냞U;O zoox|X~n32Yxqi>̃2G|sl{T~?/UzڰwzkgjJQ) q^Ni}VKf(Feral[nE<96"VZ}+&Hh3G h @zT;BV׹NѤ(u#Z ۭYI醴]%|gѲ<(%6ܖy/Fa6{}1{p釔%_{0wK9ԅctRSc ~~^ooV*tMԹyQ4/ԧsM5lΖߦ7w>R5OW4xUܳLٹ.g4/دYO2K'ݤd)4)T` |2aJR!T)&QN@\!S ꭠI,QKl"(b2D2i&8wRjH$|nIok}O2Ex7(.0Y[!ND?(4pÉ1e#e BRUԕ ,ަ`=2"`#-F80 SUI*,TqlpjGAp5 U iK=yP Q0ʽN C<.XIYD] dhe-ЋDy~ ,@Xrƴ rN1%bTyaIÚ '@\#N 8^}.?T©kևX@dƂh¢pރxac E+h$bȴ_76,ڙm(6`(IKzKUsp4YKi7Ʊr'ZP_mTCc,/W¿rC "i hGMef2Y":/o])c2ܾPިƈ HE1@:l5W&%!CAx&V[S9hlᴃO !M?&$Zڠh}q8rwO/';i @nr9, NqRщ@f:/.F/7p^(43pTjd 3yH۷mCȿ}>yK{x7\j 9=O7Vֿ u`Ԫp/?zl||;oUCYSxD;2߶i}JѧW RwXKq{mDQJ~"z6D) ڹPySdN6q=R,>!%_uu:^|;&o2(Z[ɋ/NjZ$Up""C8+㣿"7ߌ_P.%1??G9\-ҧTwarJO.ɳo6suլOuغb:/Ns\7w}*^b,M ^5]QR7_;7ԛG܊4Ç0^e\~uNG$F Do3_19 kMV.zn$q%52n;M{sD n ~ȮXJ4QdR`J);[ {[3{;h(P.ðtG`L#rVtt5i[MT1h)h`x)O3My__ (Ѡu5?R|r&tiR7 8Tsg )|fc glNnzax(?#|]X~HֻFKtVp]a={L9Zeu~# 0AgPķA/SwF 35b,FmufO(y5&RjO|7RV R4S2F ֢G"AF۩m]*$ORG\n>X)MYb-۝"R&o# 5ŠNcSGnq"HI[! [ YO<Ϛ&qɊvF:tc[4DYvZ5^h]|,zX2ʞԯه0'1EU&ghj40~xߖ0pwY܅WljYƗn oTC"erP\NɔpHIoZ>@a~c Po7E^p(M^a$#LtP~v?o@(Z :DπSAT(01bdZj+`? KBY8 >6FϢ1嬏 2a=1$ Mk Y^Y&Ep,-kzK$nɜ =2LccPRDBۙA\Ex` &>4,S >$!3{|tMfh@b@ *9Gw-DǙx/Dj1D΋6 ;fvy^e0S.sYЪ 4oPg=A1NqIh3ʚk52W J(g8]~Q]DUOPT#EOWkShF}f['qia9ΐѧNE-թ"Ҍ<ˁbkehz%6(Itpui;)3^DֳDj>8>vc'y'U,@/=Iq +r`#I!cX5AT)җsF,#UۙŪ5L g{o6,xpKoMZ>QnXޝDq>y(ioI$.c:}sZDz(0YBN3 (:RK$ϼ$Tye@Z꣙]ݍrrU؍0jy`TYe Q)nrlP_6)R-/ :#JcQ@bV&_t" ݫUڱ8].ަ,^Q5`5'$$/?vdn4{?k4y˻4yjl}5ZSZ0/ YIB>|G}"բ3l醴5`kZt9`@1AIuJb=Z:_.!v'JWA8l(ݹ NU0c|>>nĖ;x_4O)|j;*з >WF ֽӲz{EE9. NO4.q</P,GVh1G* Q8j8gDeZ@4(RO8M^&Hڑ3pO^kw1 ^5GGV=D0Hю u$2DZQxʐ@G;nwh qEt7|&Ω!i `#QL؈2,xd s(K)ӃwJ *^oC9bsMDOu-C*~kSUm$܏?JCJP/Y2d#=dPLzj6c>ʧ\ dHAC1RPDIyssC{{3(mhE%hZJڨi%{F &pC9'!/ߦ!_ݗ(g)k'xMkӚzAu0᱉)R0kjYR+PIϢƌ NQKf!ROFnLeAD*$W`r]J_wLrDY~:E@lh8E^$3A-,2jE RL'Xu>pALԀr5 I*Iv4M䄿<<NnR"L 9;Fe;Wߝg9PO)Sv1k(c@sМԢ i= fT;VnʽJ9J:~ݙ9}Tݻ>T#w[U` WpHN8JB; 9j5PdkrDz s 'u*ǔ ;Utprkd@A d@H$z$ S]?fΐ`F GCt3 R6-}0>C9xۢjpdzgftq4E|P&8r#(c~n ݚq6nvS/W/g"s9.m8b~*V)\ *?W&ZoD/")jߕ:n/9cV> ~ jZ}4H.x9LVf: )V C5ӽ٪ƾ9JR0*mxaTu6[C'$A|S\v~F[#>,Kt6탧 n5)Dw+3IX9v3Qݜ"Ҕr_<IxyǤ!/N0Ԟ #Z)OYI $*i{J<~w?8Z.{K "L߅`'AQ9nP8/΢Yv"n!5*('ɒ˥)i8w]+wכԻ>8/΢y! V~]\Ft&xJχ-<7x4ȽY'`d^nGΤddR{Ht<6<6%6e/ 6Rt>-ک DII:ZNkZ30U5X0RԋJ" }Ry.3̴9wP1-3ꄘ(B2Q $W8vWGޱ<-ӆgB>d܇A޽$!~\щDG% QSN)f' ԃ1;V a$b;EA9H"}GM5GC:#=AH򢲷j {yZb"&bF$1C3b[C+)$i; 6Yi@^^bH\bI.>t%6s9HO GE=EVQao;\*owc̡3到r1o0c 7Q>2RTrL4F(? c#{sڊ {5LBLLKYR!TSc Bzp%i7}|R% zƨ2 4B` 1P`i=3弮}[=E#ӹ~Y,U3wb/4[8Nji8ѿܭܜȐ?̯ ͛Ĥϗ}l&胙X%pŒ_8|T<;T%[mJѨ[,?R鈬hEj+ Y*SZ z'f<&/\UOQ e_ayrW&R"l}}jPd֦nQb0DM@ 2A`cȱ+89o뵯no\_nL+#M4S%d_E;@*zNIdb{-0klxXyǾzw l}t5;x;hdFc7ۏޚᮆzH)n$wۋYSLz,X ?\NDTW8gWg'TT*ԳOf~ YJ8dvo-VL&[E0u8fNjP8G˖l.y(tɣ xF #M-ŲȆÄ K2ݾBa_!ԅ cEJ՚T,:DK *e͇Z^MZ1$T&\NP9`ͭ' I?{(rnV? &}\M@O"Ҋ 'I@y)b)H$\.:')O352 N1RQP{iX=Tz< iB 21H5B3-۵t>&%P FckX"ȫPt%qs+?rD0G ;9TVLB:QDm 'tnđvlMYm+:'x+{RM2,Dq}^.xJ沶 FWHk,lnUD>/z>+\˄NZt!qU:`o8eú.d[)>xK]/cTQCŔ_{]q#uaD8 zyfruS$kνse,@rX\~W6FmlP?@)J-jsG6 d4E511Aq e 8τSJR` 28xE@LטKc`Rݶ-ms@- Fvy`~ȂM6E40F>N#lk}|;-BJXA"8*`cj6ГN0=8*ÆSapK@كb0 Fzp6Qc9m9R.NÚRpC B 0L-֊w{T&-BS~LDd">L{_{\-Y % (!ڋ]0Kȱ] `As @YaDKǠf$u?iECKŨރ zS3PJ`6_AGVĆPr3S/%_^JqWh_$d.9Bhe( L!ހ ByAH`0 ?TA X]roim $ :8#raY8 |,ӿb+u]`L Bo =B'WnU6%mW0AŁ!ӯO8x^mBw{SSYSU]׶1)/jWan\-hSB^ؔ#CdRB$Ycmka M`"K:)#TJCE9N|ŜN%Wt} %Ξf(yA{5wi)|TNC]p}Q0 n}:/'3+Cz+>Qr]I[B׼qAQdM證g$M)G_uᬘPyQg20pF dl%cI۴2zmZ@x.3iQҴ#H*Ӌ9-PFb0i^{Qh&Զ~N};=X@RKV_V>~,F{J#MF!j>ث,ϔ{C!A^@^HYޝٴ}\(' @(7DSE&d{HTCp&_ ʹ#uF'tckU~ RaҤ&rM*|z~շӃJ4)gNPAI9(jpAF1< h$HR_SulۣxTV%|,IKZ620/@>ЬIK&ܙ^_IaͬP39ulYQbsDY1rL'`y)+G8UY*y6BGm oư)Լ!qY8s2LB#tR :@MAM ts'-/p9Ƽʦ-r&REL~O]4WdB5}E 30Κ̹O*$&st}V2=Oy*zi3!;]/ #Y$J@j,a.%NF2#~H[mxQ;NR#-Õ۳\g;H 9ZWnqw!Vz&N`:0tΊL`>im8๹$QeO 4)hJ! `2#6D/LUi y<컍\p?/~WLF}D0rSN%5mbPdP™^UŹ6+mLa!DklJ%h8z7ӫwŠtj"yjAKMʦȪ ZRGfkb/YGRJJm@6W^itQ^k%X262:&MDs Q*A=$TwCL &i!~trjI1*J1s X5䦉9wǙkFڨ(r:{Z@hX o{`1/yIQ Nx\| ߍ (W ӇI.i Pup U8%!*T/n*BbP#SƐMJPZ44/]i8r)Nֿݔ$8ĠhAG"mNu ՂL`!Dl6,ѐD 4Z'Qk=z ӍPV3uΒuߠKZKroVi[CxAB \~/!nn.ߞPQZNɇho>(э<ooXܯG]^RT=-TTc1i}FteuBY6+"ZF')J$SxM(Y_ZjϥitmH[ o=ܔ;~Qb&5+f1`y!IYt0;EJW7D5ˊ.?`k\rEP.Jg 3W}6˔?+.\T + '*䶌6.6D-Ĥ[Fφ09묘,MwM='0%J;+ 'glz[vNppt촺:'&STꬢG S+ )X &g+c9D>y+~>X`o7_J\ꎷ)h_/'8HCF$\E5sOIs` GQʘR9T,R4HCv&7([Ky:p˭ 05Xb;S,\y2vK!?A2XCi9>3SJM0VdeՑy \h%*DC4r51-p%k(tT)p/h OXU؜ ӧZ J2Bk\v4Ղ&-Oa!Dl [ōZ JL!mEEioounV-hwB^V:IȈKtr"%gFuX(īXk%d4M͚m9cRUG'Q͊ Nw E0+&/8]w}Kɇ/5p%"iϭN91)?=:J$3>5TZoDoA_e;<|xj"&y#1*!X]ͽ;>:9?s'ۦOr7lۮ M#<#㒐:D3X!)i ,/y+ˈM`NtKpCs a ]O#|I/y$apu ŮQ%{މ/yvxE%bwѡdcV1 ^—aၲ(i8b-LDU27J_ oS{{c5ѫK9yr%>K<'|&2!_OUV՚LeFUPYҝ#+MpE. 7Le*Ӛr] 1qg:20B/y3I|ԌX8,vd"18yK08KN-|"YcD \w2*Lx^7L">1 j B'5]\9$֥/x皾߁|zb$f~U1o]eijA &WnU6Un[-%SN)anLm y&:ڦ@ld"M\t 3q\D_~q>?CCMAY eC&Zy"J XځSoaZFk[l7_ߑ*v`_?/..Ant[ yЍ,{,_92῭G c1X02ڨDV7&飠\Y@xʬR[M DZ5z{wIbY]GZRXAи[9L(3c : ib&A-xi$mާL<E4A J8P4g9` (U:F2bN{&D>N}3JArn9clIO 4`= ty0 c=N^ҤѠ'@j"H5лX={ȓ Đ&8|qv%bL%4oe9!(D˳i#yz?st/ j0p)%8{gxG?(]>P Ǟ*1(uR TF`-pݷ/O>o /?$/n% sKp\5#/CW#h&;Ň(b5iraTR7x s-fY~ږ[c$JsrbZy9VGnb>1Ӫ10ݎ}ߍpN Rz~Ï?lpըh؜~Bŀ g߻ syx}߷ld?DکC tgA+1FbN}gya7lx{:h+xsusʼng7y)d#DLuv@;/>3C_FsɌ>J?Ecktk_͏‰WktJ"Ɉ[v;"T ]sNL̸?(;f}VuQ+2b8!⿃mHp qTd֦BtNIk K޸q@ q@. %%".BgA:~Q`dh0x@tIPz"72ސhv(!o(?0&^Ч5[4+ܓ~sj1(ImcHWCZ1YWuœ),䕛hMώ:yx7JQөmcw[. :%ۻU ZݦWnU6FzjV<5ElOQoqOgHfT9 A ΣI3oD`*5B3y d1Z)j*NEϦk1(qm S3v=N=r)I}wO,j1(1'v$ɍ 6x'C_3|5'2#+zyqX K : 0L ^aq@p5Aφejy!ِ:L1_\L02cFɜ}7h;ӎq((; #CAjXFJ-j7R[F//1_8FhɤYO8S88aGC ¿GǸ{5[%\WR>lrܾ;+3eJyl`jb='4y'" 8o{KMtw'.ϸL⊟seP_;;dH')3WQRf)4oGP.!= c>4j 6\b fV1kc/ׁr>H^>F,G{mw7^}K $ -EK]cL}/ɘ1b/؋.ɢj&TȢKu:y;5=w}oil +aT |jD [N,ly:)`u+ ,ag7?_wT2B](02+reBc|ݾf|=V[l^)Ք'otνjZjkwqW UCUbhx!抺 Cj *0nՀggh }Zl K c̨6mY,YͿE=#NYvs(ю6&E$\0לŠ|z%qS3:*пq?|r]TzGkȿdu@eHvӃ zwb3 eb~9+s_UBcSx]#ʚI|~ыiC)ԬtvӨ1ZȽk{̐ᓸ5[H̤<>[Fh#\n0b:?Nh^e64ZJUCpn^.GwQ׎W>Dz7U7UՍ܌7UP5{`]JC9>gcNd5I;m@mQ7,cE5cT\)*qp_M,`-!)Әrm=ەRM0 LT&̖=g2 A.]BQrSlK;y+W.E0uHIL(Ow{Q G @;njR( <-{^(Y=/fMns?4H-en<6pZĿ;ӽJ/g*qsUFb>燅vl 40Vf4?dEXG/+-?`Y^>nraJxP^>jM9oݧm'3 #d {s A{ݧ[;"C1c˔"?綕:0C.Ɛ%]9_DV'`]\UXb\lE C М/G٩P:b)I5m?Y_<y5k{xݎpnYjh'Cacϰ1Cb3tl gHd6e?eq7osÛobKwl3 >Nǫ5udhXmpK!+eGU^0!@ 9zg }t(OQIAX2!zٖh|8CI;}RggDRH2J.*.7nPB(STr;v?ͿgaOnlGG?(+mp,7 k5FG2&%@'.`LM=Q(=8Va% 8>,\ڏa@WtF+43i.cIʆhȭB].Ps0jU= 3^k3#Fs!uJƴgTjq\=B;u(.HXf9?~= T;frlP ƌ.t(UqC<Έ~~(OcS4^Xcݎ(v;0gǰCGA2(dZy)}^YXt/FOXC$v2B8Qb,9!ȏV4#,rh5Lq!Iyaz!ZF=WzZkQ[bfI._ ?:@{bX'(XFEEq"7B)bLA^ULj|;yOb;<埙bj E`d?P$u 練ݸIaׯ*,uTG%!Lk/zM6DArB2Lqt{?84G%8Jas`t1WqϞh@ $NqgD|N[izw;B!UFT(ᕴ˷ُ#ԁrJ\Z8!V !%bgxT/nuև`(͟WȌ0c( kk0U2t#J\cCT6*iFղ"J.'wdmH2dss'U=J3ڴnMfk0vfK1azL !x;]1A)kB iֽej.jHRIC9RC# I3ŃhrC&]nFiqS^Dz}4Rzrq^ +j]=cK1V1p=߾o|ݻKMݽT?moVFt|5zzÙeFc gjNrflݪl{&Pfʄ>#(@Qg6|oO)/wo7!^MSZﯮ7nnwֽy?n uwcx^fc6|;o^}pCn/=}޸?M=р`IWӿ_3D/a:CoO۫/ߗgEJSg;>3wRGOgэF\ b\'8q.tthltC~.Sm-,•sǺsYA\A'W\Q.&qRҗҔ0ղwo¸n'c1F*ZVEP9}~ JNq\lBgN_W"H<Z*ϨD@0mj4V+C18Ʀ+d:8QiLRT*gsFFO*%7_gH%ىӆ$12$Zjh`$9m:&KɖsYa2c=5@ºc`|ȊbɩI hK]Zl"'tߊGsUrWD↦$!4Mdn2`Hd)Mf/7Ėfc*RtˎiF[=km2Ӕ![&d9_S!>:4P-7^B)z"gDb9 C&.4G9C~.Sr~׌0g1>~$(dq^y)=+ ŨZ-Yv1YsFddK9E4'b_JYGlҨ̚f4dFЁg$E#(&FQ4D#H2.)T$L$O7+J&l'YlrVMwVz?G.6S6$7y&\-b4Uۜd=4[sM&{0[ts-,**~h@ԧ_ּ;Lj*mTT%ל ds3Mkw#x`_eyR+\r 4LHFWeΙʨxv!LFgu(V]O 9g.z4edi6WF;tXqۓpSOAZBb2K]!Σy%hԗ6Ԏ0^Ky '"Khz{636*qֳYc%N=UN9=ޏ]ļ=sEeAT;`2uvVDRP*0ʳcr2*0\ I={23<њm-j,LKӹET<Jg4f[ԜYWǖl s&+*LgT%$ByK=6ťBY2G`4:<^{8f*0dS rˆʏMtlpƦ(Tp' >qͰ9\3.Y Bp*DTe[˶+kB-FjK^6˹{)N "WTdSp`zvU}}q}H9!*rT(5;Q>865GNu& 0f(<Gt\ ;~#5 ֧]7Th@Hŏ6̘[Z3dÊ5zCCfKoQ `T݆u5F9҇3uhѫh\UXj:< 55"jqUٮ]]_m+KYVm]]smX~2"_/ߥTįd]~`b 18D0J"(Ha B{ǬH F+:N,Śwx3/=',Ò}pDPTHbFaTFZXZHH :)⣷k8ך^āFSy3Ip*9k<'@8ȼ )NB` {Q9BA QQA1 mG(mSF#aI9L:1gڲTzHʾ߼/oO[󵼗g3pxjlOo{njՓm}t ԂS*t~zYP)0Y"kϘ*seVwE\]/"΢9Hr=;qm}X-~^{Z;>,.zk+ී|nRu~16'^uX}O߿i݃#e4{6G5MbMItւ|ӿ7w{]ú t7ڳQ_V=Q.7WGCr-)޻K릐>VAꔎ1ĺ&nŌZhNn nGT~ɠ|I'ZNѩ14%b|hZ 7|YpxXkccAIа.N204x/aXRH#lK0U2T&cYet٦6NeOCuzSBzWug0sz捀L{7Kr UD(=~u=wFVAꔎ1ĺ R"̺3kjhWE:F:8)6;5Bqgf" `֊T)8..X@LWc~An+B1e$绾;{AT=ǃW`)&t"$N՚B\UG5Ń˸T5GΎ>Sj !x2h<Dʠ 㟒eҳv!4=ZE1$UE3*\U.@LӫW}PZ%n䦒tR-ğ60E$8z$%=h)C(HFrŀ!D]֥̒w x/hy$LC%xu 97 I$,MPD#ԻB_2ObW5hأ5)m#k%5h"xܒI'QDIָ!Dh#s /+'J?R9G+-4(KBI&Uqh/|Z. xc y1L_hFMx||QCCr-)Əcb`i>tNCۄ2}Wu+f4׺А\EGچaOGcפ*g`n˽]Yﴡu_$BkYDw B{ I[>g'O{vOayw(m8Y\߭n@~;yZ,_b`*VR$:M-@;:mJ4;`{ќU *sǷ&rߨ<nߔHD=|:4;U5sHn ]v7W}A; / 457G+Sϫ^I2󋋤ϖր{Ü4fRF̍4c֒N:B5ڨ>wy:Mv^3Ll1l)Sp4R0Ncf\{[cЊ ?y>hO™3 ڮ/AHwP)+좱ƁHw /AI80P G$uڐH >g(齥H EB 0KKt2BTcބp)(c)",ooޭ?tQsʌ-ns{zqnO>MXpz?I Q{ sV}P~J:V|N5 o>tV# G\!d a+3/u!:I.8f?9=Ta^>D^sz<.Ϫj0OKOyy`.vvԠN c 3.i&4dp(N{~g4CTS 4. 2xiGl:%*jURSf'gut2 ԃoc!h! ֒20X&.D d(Og,d .uЦ3Ԁ2h S=~ڵSfTXHMbQRch]b H$JM~)aGl5 OnSVb=;9`|i/jyiﻇ+sq+2o$T6jjx\=-NٱXG0i 38xmi4uK NݬDT*XP@'K\@d3H@j>2K 1= 4vIL5b^X;FwhO'B J{W}nCUgcƚj>Y*K{!Oq̐CS$kl>|J; hEo2/ظ5xQޑ9iZYd&d}͔f &ӥI"dIܴ o%V!Ւ2 G$HI7@i,c;e>KTzy7Mc yv7L_h"HN|8] UtN1Q-P[vv%jSQJ^ϩ6fx;͢SQ<[MQQzI7[Cp:ol9"cCģӖ:v\}Podz6x;ͷi}z]wnmȘ~0f!)mm'6"?! )@,Ѧl'ßG8V}qeF1V:l5ɜ@hV=b4l C&6Q-:Bhd$C%ҏ:ϫ>\^gz#9_i̓ ly+6`{ iK9Mw7uBݑ<}YUթa Ů8nE6V§ g&磓V˻?O~ymR*sw.W^\ޯWvq{yi`0_9T6 %V&*HBeoT4&S-xaޭE:l\r{U׊dT|ԜRP^qL6QQ\魨 դ9b%ΓJ5D\pHH 4>a\ !48G`uHy+(%]\7LjY}P@6=i#`v}S8 "!NK*RFl5<3ôv2kCPWԕu񩠮S_zbF4WzR@](-Ωz@vu}o`&LFʂpt(0CihCDEaHcWic Γtt0:^ \8:}L U콍=7Kx&zIugPp=_1A1I K5jIÂ]PGA Fv%ʍzcNT A: ُm 9c $hHLjTE]D{.Ck8HQj@ Z hL6Z*(Zb| ye§zY޿71-^liAkx*>< ?=?ɜb[]rm-W$w9F̙!zEsE@JgX4Lk|EFfCj6l%3I X!iT ji oRuP* V{g @8ok_=7GA'%.(yuAi 9djkZeB5c /]{/}{U#͇6i}xz3O޿y SRSz\9sjeB|i]HkFl[nD["ܦJ;1ziԎQHd FI*.ýF=GH]'){|t\¥%C.C4㥣vD)T(ܘv:_C~ɖS]i6,vctmcq{Q74i VaĆ=Km^ 嘌4(ȀBCl=:z70L4>Kw&H#@H\kzSDp>~VwYygvګ~Oɟʺ?}l/2p@7KS:e*$xXȿkB3*\RTIjzl?lz#ov7nn<þ?#5kgA҈4[1|Pn@wWQ]pn3Є9}^L"6[44.?og^=.=uϏ[21 Gg38%;{ )è!-I|ҝ,'w roY7ܣsn[yi9QZ$A2.L"C&ά)>V*ْ*'~Ԏf{@9dl-VG)k Rĕi I *JjFO;9:E^Oxo/$ehVxv7[>oN.ÅTUOkVfl^>?.eZT(DDC](aĜ7EluʖRsǥQyϑBrI|ðQAkcȎXcdհFJGN)L\btȅf;""IIFQV(%QZXW"p/5\=:|ܯ;rmx>ʺt|6obÇ׶3{jE3v]%LxWzSMpu;_Y T7J} m݊kАwE:H-`J1Qgn~N*9u+huCC޹EC&hJ1Qgn gNU֭x֭ y*ZSW"q`4&ULK1QgnK{INkАwE:q1އ-I[u+ GuJƺ/n?q*[4׺!\Etu1#кFsbu UVggNY4̴ y*ZSU9o`%_G4o)YDhvlin͎wAC޹锤gZ7 VA>u;[h┎Rn*NJ1Rgng.O[@[UHT ubWVygB8eh<ҁf!\Et+Z7E* k˶NW8mTt٧ =h;WvBs)!ab&RNi(`th,&=Ü` j6$#DET0DETFgМoä7W8̙d EsB5ڨ?ݜ4ᙾHA"(vqXwfN|snjӬz~Zι6~Ɇȏ9Es/mٻ7ndW?mC->,bi"iba)j<3miTFiVG"U(pX3 {C++߾_|Hn4O0h Q%׽ `\t;_kd`d'J:0OrMoz4Sd ßc3?E"'zPh!Ra xtJh۹YӉ9pa6T%Qt'veGvDAYpbjįޣUN{&ٽ{Kpf^Ɍb3X ~y&峧uHj9\Ƹ:S81Bę In6JU\ajs 5t)X $ !Y$W=F k&!-d\^$ȝ a&IuWĢJ%8wZayony-GVZW8qB| |o_B޹뫚x܋֖3zw4|8;kCWemItI{EkηmfySw{xe|nnnF 1f,rl~穛ؓI>>1}FrO]#8.|xGzOh=-+c'aʝ s#Tc y'pO rUPNӫ H9fN Ň|Qbj  sĝN! SCRüc@=>FXKe;'u8Ssy*^d)4ȜU.|L;9Dt דd9*^+ӕĖd`)՞76w\}⡸{_/;?0^KaD0nC>!Ef])1Ũ h7˯P2gB=O%/?Eb2\ sÁ'>YLPEe: SgTP)Y:D&*"́GN5֋.C^:BF90x]H^ (~)NJ2ZB>n- RG=/dUyᢰBy.&@hKPvjM,`A19+P&<+[س5f_|ͩS OhEj70QW*1Stc[C0ɴ[1c[EsJٛ 9hR:cgF:v+n5f_|ͩSoП>n_NBpdm#A妦wV!)zÀ]rrs*O˳fܼI.rCq~K~cJd~TbKGڭ+j}o3<,彺IjFdno>{AuvnVK4 X' Q~9Þr=nW_!cj~xr=q-޼F6or|w=<Fz].`y`ϷWŖ cY'MRQՑ1$6 ΃Tisϸ)Pm\ wi猠,D!AڀQաȧ1 6X<SGRS%{yWII8 >yZ&E 㶁<x.뭒1U[%].`y`ëM r\׸{qp\猲 rcF`y ZeGB)j?C1(4埻Oѽq0\HJ 3~)*>Ǧ*Ǧ>ſ>2zaOOgo ؠذ_f*}&v, spr8W?/_Ozk!<ݻ,='zv+4n_]3'Bc&\҆uDiYwq[H`;ε3,Vp"&vA jb!_RDz+Gs051R ";o#RR a(j_-Bꫩhw|UQB ]P C>mzcXBi b*`un4wQA2h8Z&/}?{0Ai(;-lg<}EmUFYaLcisxhM59XZ۸?8:@UW P[c0V8̶jQ;1L iжTC ?p)0Cvvo֛f٥US4f(Q5;O4<jΈXnHf '_1 (&1 y5M F&1˘83S>RR)@7(O dd@%]%}pJ:Gr5)LQ?D]DZ: %O8Cԙ;=w__w[-yM!$nIԊX`pL`*a:I#qfcQA()R0^"NHB4<>tWBDJ&`3HWy2Q0)s,A/hQQ1䙌h~]Ĕ21 ت֛$Z]kgAB+8iY8S9I7W80̩mNbbKz{.^\;-peg~Mf*D@ H x9 1B"Y*v KsQ8$Q?K!7 491NXU(FfJw)CT5VH E˲9wnxUHxPY"|$!ш+u2\\'w!LuK$1lȆ0?q]\S~1@i &Zkl߭BIX/nAH;4& Ja8r ˑ'4ȍTx%6YZ~kd/)(@?w5VC;1!P(QRPh6T)L(nԁM7cfSSAٖ>* (m_ۊ֛2(}:`|.YU ƹg5j,jgM,8xi*-G 9@bi $A~@q/>tS0Vͻm%oh tPX޼7P#8`u@UȺuW'er|Q`"ynUʠо $h*KH}sRP,wUޠT=mӠT^@rnG˰Qtǟ {yuf-:Q^uk[y9CUlZUVJ'C۫|êBY2-GUΤfVI+)ھ0aiETH<,v:X]EMVRgq'$pKNUgqmx*ܹ!Y5Y޺a>vM+*.:a(@]Sb :At `93Z=gf}l췀-4߬[~vѭ7*O*>_|R\֙nfZiΣM 6($7WoM _?.zڟyXKߋ)"X]u# vϸ=8"9J??[۔%>ܬ>__l3c&(z{ŠA\**V{l1Ƃ:֦[WW2BNkMeyj Q>Bo.L˄>nuBl[I` bq:dX?֕PcE(t{Qpa(&THidՄp9Xͯſ;'gPνͺ mHpdO=hZ 0ל>2VΒs.>|?%vFw;Y:3J{O* Dr@CpE!vXH$0q0C1BUX5onUT}F!y6R2_tfa8 ~_I>k`kq؊k-f"Dg-hfd_@)Ry1!>E+ͩ%E)Vٺ8^j$ep41LXc~.Xc"c <2nqʯ08q^UbtJgYN9) ^L^3§s@"vD0 ڿ?-+W2Ny%l, #[4++VF[s`/*iaxK.bK8igtrXA(+^fm~/3.(+hbHodH~DD{Ddbz26I;YVKZr۰EᐭxDlvKcK~-)!R@7 !VTS-hA#)_KS 34p:t(ȼ*X5kLvn7~I{Z1Y[&G{TIQ hAeAj Nu<ũN3@ۮ|%Yhޟ*)8& Ƥ2K9c ) a @N?A4 8B𒐈J?=xy^Izx8wˎA]dȰ5 8̧j~^~!64}ys.KȚ:7`*4| 7[o[IЧ=qUzLg_0gXB$Y XZ7=~{ 1xʒzLʌ{_],әoVe/O>wѬlf]Gz*^>De|j<{+>7;1xͼvrBLAw$LRH+7o1w5x9xzE l(g}m+n-N1LCs `R<7@g9JSg %TLlu2~"D]y&]:so7ܖUUq5zK IsVwX0e Llz,IIRɸ4Ƕyd#;N;o 5~2rxBVZ=jM=Ĩ/8ªHa/03d*ci[fCBqm:&<ޙ+(JNiB_06HA]z=ژgDz<9EITniXbpR ! Y=&G<-3*.as[:_{3!:y Qڅϱ7S@/OD1km"Nw)!5Ѻ#[FBA84Jx}U7ؓHW?MSOm (/;ZLBexZ뻝1? k~DhN bG)Z5DarZKaRTRY8Yu);=*7;=;<"`JvzTXƞ6Y7(0`J+ooe5SZ?zYnSHKf%B!*&LtX)3O5 hdG0- ;!T֘5Up%c&sTIlJĻ6vH^d* ,jqr%lv6V^Q7@[zϜ<_ ҚK*N>|_E|hs/GPQ0Jv*F'F4`l)CLFxRX &ʹ4ZZ=9 J6o/ `"AP+'ctBt!b$Js%HU,Ȉ_8SzleF~3%I~e)5nԨsIq뭿?SiZa(%5Ό놩X{j~seF,)w$y'iLMd4qCDϓjf6k-@vTyX9R!h򵏎D/Ah68A-U%q\ KE؏iG43Z e#{ax"rWx1N a^~ t M5GpLj5{%*CCM}Hh;)k>سw$nӛqskFwcuJ}H-H鯩Yo+ J{Y`SLhS+jE ul >$q0 7p@_o}Dcv8 vFbzI6s-¼1 # ֊ wO_U vOIA`Z-DZǕjV~Oc)!h{--ڮZ2S(@5yr% ږ&ĩ\!C8B $1`B$3ZɍBBH뀅:#oյ |fQDݜ!AZ%Ű/"hO 9ivc'=~??Dیz1oם!-Or3$Kʮuh{kYTBGW{~,:L8laglό9մˋdPй dlȶc/exv->JB84.:^1X /08 :X{O GS}N.0쑱@HǺ`]hCA[Y57Q#1anS^M%N5ņ@ݗp BRv#0 Sw|ٹ]AspfS YG/y]%~mҔ3vօ0tj<rfVv9)}~3%‹}&j~_zu"뿹oΤ@$X;?i94 ;M # V8>! :w(R0 !? :Xr|V s26n LC1\#cW&iǣv{g}{g}{704Q4~*\S1Bbg@jRI 7ͻn6؍&I惪ds?>=yݝl4ȟ9X࡟G(#2"F/|ABeQҰE J{J-8f=qJrb¿.4/%8Y;<_B_> S?Lכib_U&VR.YkU~Vl7*F&Kd2igDB!"8ԩ=XWe:{V׷?_Z<>XNo=(nvI{i?| jZ)b|iNbQeБ5AQ=ED4Fa0.E}qP͞("Sh iOE 2ebq޵'A 0ǚ86;1턤VT0Ǚ  7Ì8_ OPyecP; V65) AŬSb)p3 =n ( u~3/UBEP C)&LӔ"`)r H% ~ 9^/!PJ) 4XhjPjH9ks"XPYDMw?nF_K4ߤIH[K>H{A}N>+mľӊpf86@1G;P|Ok+=}H[Pu4aiπnVTU\*"v[hSqCi33xkIʢ$q4\*/5/߷/BF3jJzh9Vܚ¡aP P[H‚dH2@bCJTU"cn/0'|kA   PQ 2T y.Zfj"EG3P _mgk=<ʝ7ъgSFX+dUEa9s 4͛9 GeD0js}L9_ѢErS1/Kq&X|i[]U4n*aym8}^L{g~%USEO..]j[{!uM2pZ$xC}Ip^:HkLBk9nhJA:]ͫ9ˆxA>7Ze2qY"2,:0&/UjDI"$>; IkC0xn$(%;$6DWn)ECŃv~J7_qE:wyr G!y(mt=oیe;)h!sShy 0tw|5M/Ԃ*[>=3C;\hΎm$\ɴv?rBu%I@>Y[1Kb.Q)U("B O.M2kE%$ѢWDP(%o6W\H/1A%!^Wh#&n=]\Rc ֡x,k=s̥3^\|v)ɨMpi21,>.<*N)K(%,0‚0ſϿrL(nvn?LҵXMNfT@~BMl>?,9'+[a̲ړousJRҎ(JNCcUNq&M4u  v j"DYNzwWcr+o5 N*)]L?^mkuԅ7^ҮW!W0$ EUk9QI;>X aL*S66NGH!ΉȓCt>nӯydr>;If$r>s_[D4c軟֏]<=% ܿB$*?'#t}&텟/-~:^,2-¿sȫ?<RJ%oOn eO9 M.VNAWL*.$jw@!SH p6+|.=Jt]z ȭd-;OWx;7xlhiִHͅ>^CyxS"⸈8diƈ8aHC )^ @4&0D!#Hp;CHqUTċ}/6;s=2 M& %HS?97aי5QK#RYwT+n"V:g,> x{គ>^ӛy~A؜!Gn<`<^\y;9/ιZOdO|[:t|'<\(fn<.Ԍ)+E215 j%L1cﶫP/ !^JhS+Y3H]i , DN'4m9}~le-aAV{?4k֌ Ӛ!@R)!%)tiRRALk=)Gc &SCu0d`6ݏHV =R:p8)Q M!ʄ'Z q"4TpE~9Y+ 8DMb D'$F&d"RTt&:kR1Am3[;,{VcAp@amcx cLw.n\8+Qn$mcqnB^s|=,SbQy ?!䄫 (!<8>A@g$-ܛo__->N7-I znTWWfNt1\ T8^=>^A(@_JD_'T KUTb ,PzxOT4i Q,#eJ e&]!BR3BsQV5+C!~JT=^4 T(j.E4 Ye6{ 2d ?6?B,iNϋ,yK6)-TQ ]vJإzJcٻϷ?ſf}s8+}v]M()2[n VZoQQn1HzROD-^?"]hbZJnPf˥XLDMjLcj&z !ޫ9mΥv1)hfA l=H($tx+f>A(췟J MNz0Ǖ81ĥI/N1pWR{ 4T-ijPTZTLcF'\S-3I?3W{ªnWw YVHEs(g.-*

#Gk._t%Q,kxL6˫UCЦ^ZRA8R dUPǯZ/K/uUJk9UŖICLSEQjca$.$K xp]U݉jU՝b,SBec8 E鍖,rB,WPQ4$3b*TV$"YJs:3Nbrl &I\{-i4r4%,MbdJ 8w6"MDcKp)!87F R{ͽ*%aJ|>7T, [?P$Ϸ!Jq)UoQ )IŻt%r_͍U&Pxv.\VvQ^0Jo}O:~Mf |a/ ` kG@ҍ@[s'eFX'jyR]ꑜڙ0ɔ3C 2mKJ5pʺ[/L8<ݭ9Y_A:-ױHL%5]wI=帽ʕjwE?ͭC)[7H>N9!o<h#rkCbTmPg(͎Wb˃|?K9gDd'Ff*G34"с9E;+qQsJQWeR9 QwPsbB띓?@s 2cxriȯ F'Cc= |HL]T&W0[u\]x7+AwP9?T8B #MT[t͕[|jhFWRl>ExZ>z:Yr>=#k|3yrA'wAA- )'c~*֏I=eW,vYR@"YsWk\ov7=kB&bw>t>u;-= tgדDD/]ҪYN ?3~gD,d_AH*j G !H=͋ݮ5-̌A+`-B̹?4xkg2$oS}~Q(MΏK/R䠵mx5ʣCm)(-"\+y 7=WsOwKs31[wv{cW&+t{~8ǟ.|OQCO(~$-Z삷k21<_zc=:]fn+꘲ܻY/KR* P % ˒(ᩌ\&N+9׽]!3#Zj e4Opʉ#Y[x %q58z*^0tڰ+G!W2 0Ɋ^6*z<ϵi:N tmgIvG>|c:&m]n\{̀NT3&"%hGĉ)(e~˷`B057m>t(/UZFG2*?݋᪮ R!-EwAURnǰAlfPPc'h]:VuH)~TBEQџH왣a@4~"7nX8}0L68p^?MuQ16p6bY`߬Pԏ^l6+"l{m̷뵩FP~&mR +dk/[Rν)NM}"wR92 gR%RSp"9p*G8hCάF YHLniE~Kӛ)zqWM<-~0[^M~Nj+߽sr%;Ǎ[_js_' ȅ8܁Pd- aI.,X*ukƈۭRsȳ~ f="x~/#6V3SS5w7/(%c9љRe8,T;bQm{e6OUBݟKJG^Fl˄?.O ䷠æRjy>p_d[oTq>;] :I۲Co-;n`GWwT cLԚpbKlsQbJ5|zѯCE (-I ] >T}pPF 6s0jV{UD7CK1iػS3^gf {&`;3N&?p1<}_gzsvQx%~܍>H_`cAZB`h ?D 4&by5rbYK-18b #,Q6OC_~NFoX\m}|ͳwSy.e͞VQ/~~Dp|%o?>{|נ']xl"O؈WdUaj`#BgE|ç7 ?_ί(jZrE"jgm)5x24)}nFE̯O3>ZQ9jyku 2xM?4XྈiShӝvC7Fs fh=~*LܤL.qF_hBfۤ|D"N5TWL!ARʂ~*!-?e_],#-$8=ffi΅EBR {#?9Oc9QiA|<<>ѽ8tj Ф. ";GX.[Ԧ=g޴Zђf1̊`"b& * .Is:h7(4'pdɧ&Cha"@[!a}5 ~z\6kߣ^bu.8h!;YuLlMWo⨨q/sZL} M%mYMu`>|_Y8hpr&A$.); $K .ؒbu45K A K&b|ɯ]Z}NG9YCӓǺxFRՄ>$N|);d;uTt7 ݗd(.aLF;e៭MD(v8m@T| 6o' ^EHO !ϏOFweu*Y 6GXhyx}\ FkEѦ S$Oq%O iZ0\6yb#r!=Vk 6'RS}?o}3 9<ی 2͐$epKreW#fc̐Av!ms' u685>Z 돜,iz GP;_AK)((|R#J\X㔵lK:j%*tH&Dŝs$ OdOR:j i)m_hЈLIᤝB5A_I(GPZ{E,B6y_޶pʻkhM,Nx׷ߩ&'}"'A6J9S?ES?EOΧ,pW2C0bln$lkHXQ^d0FFؼ$g@Qg{33ܘ~ޅs76>5GYmCo 6V۰nչH8KDnl!rPF BZ %-#ZLrG@D ؁;vh #-]@L8B2V:5>QPeDqN 7($HvJK 3NWrtw7ɗoV:̙H;ڹ$TfݘMZhcK <%I>eE.K3a*Lipב` [BcvaXVBFi$(Md6ǥ[V)ؕE___;x7\챘=8ݯ/~g0a]˳W_ѫ+j6+yGx_I<UIQ$hFZ1UOA!ɩz$"ϱXqx :&ɋ l.p0;Ժv>/!p v#sXhܳ 1@+A݃B)A{^G+NVkfǭAoj2mah݀` ]# Ή~m5(GZJ 'AXb|<ծ-n8(V?1"RuŲ70 z⮢@ 5 ޮ@RHjpF{Bp8e;m$f>ݹ>\?m6uvdHJ2h+!K47T>bRZȍee3*IjS\`QjI%n nEwM.FA+4;ԿG mk N20wKxQJEXKr3FM1*9/0i=KcGwGU7_ITZbz~w=cOi=٣\e_FmI!DZZ \:Tr"}<R`V.EU`{5WU˵Z=/vk #Y־' p էP=Y*PZ W lyQҵD՗(T(X{J5Tz"R>d~i %jӇl/iBrw8]}Z>FpQ '3Bf6dE!MգFQH(>\?'!t03 )-Nd'1ʀqT;P\TĚBɻ&i}Ys$L$"̐(z[ Llf m3BP)$Ep0dJ'6aBAQYîPr/6eY F!Lx5glO*a3]lfO/tRƔ9vDp"Om*`AH&{xz1/9EXbfu&vj#1e *㪰c%r ]`KY as7n5CعI$0<|Ow(ED c<ragd{{mS/BfFAwzل# !L1碈0)ߣަOwΊV֛֓φ]{5WkF~=[-(:Vgk$gʌXBY5bud ~N}–pep4DRꌹ ɵ|>vBpY; ͚2f=:2J%zw]`R+œ۔6O|juri cA }ݰF3HmHG)Q?k/yH =mh½;2]N{a{!|Eܷ4G'Zm;I+M5'6 [t%fVvC4m @tz1OՄWj@Jb~kmȵZN69C19C1@z.cZPjǹac,&yu +L8% SVibz>T4@_ZWUZ؁.a<?7}n Xj䳫 |ë"zЖoN_#x*57dJϨyJ.ԅkyjUubfB۔3}gp#[~n9UR0lی%s`[/` JNW\]((^i7q2~ιTz3 #/DFIO )( i)?FX10Rk 09koêYi`]醠E. 1\:\* _HDUQ,4w% 0e)\!.*EAq LZA edS3V<3"[ÙE4.9 k~0%ZikW<rt^:`3d-[744zUNaș|jHY)]{4|D{=yk'ێӀL[؍ZϞӳ=Tׇ_ta "[;߮n` Tu{}0Of(~y3@Su>40\77J2D!{IOTQȻ>Jj7Dwo5 EEfAvw \2Ef XۡXۡcm.DaY0T,GjYevbJ0cβ9c kJEqMu"КM_JNRnb62\KZcٻHrW~1l$# A/x}2nc%Mϱ`JաbVUnX/A2'Zh!_ejyfi\RlyNx[Vcx⼆370tZѡ{F(wao]T:@]kI$49 Q{\"Jp%Lg *'džXTmAqebaӫm)&2I4EդU^TMzդ])`e6SIGy(#$D4Id?'(ᴥD %RSTR4iMidW6VKJ*ybĻv+M6+摷hLRuQfqV#: H{:;@0x–9hi Aو<=Iq5eA֘EZVNN!El-vrm7'VH371 71G A$JfcK3lŮ`B-0oy1C ty_o-ϫO>fANgkB`DtjoU6ZLa%5Kڧj}} =oQ>Q#39uri\{zKzxTn |mݍ1JU v:-J6j_ɞ|B5V^ e+sFe["mVm:;6anj8&>lW#WOZ"X5Aj4 J7`_Pf9U矓"f&TTN y8)foRL&Y 8DJ pޙ TcCP W.@kO*84d;B2xGa4Shi,Ɣ#Ȫ`ΨA Q'}PQnj& " 7:)-' XP]Ҳ.RHЪV rdj,|8iSAf3,6$mfkg蜷);e &ktd9e:p!f QH DlkLbcU& 1Lv)-%\WZ"^"ɖ#{7 DI)`Gd;jGajy E_'Bfkm'Ĭ!zjDP jotSuX =꯻c~ JZt}^[*ѣ#pPbqS7{  @;\ K?_ IQ-xixF7iz N/턐9 sh:r_먵RM9R-J`,u/1c 9vnr0_k% <y+ _M:e;YD8o)51 t@7i@;}Ve~+45Nnv'pv>K+ZVR4vp/ ڛmY4YVr6ލl8G*7sns4Qm8Nwg1?G ZRdCCB #d"8jRpY&wY{L BatnKQeGpvŔok @zL*Vdw۴ӆiE;'[jFKoPfPKtHI8J5V?XSH)ZZj/IS7H>:}-!ҨNٟӭ>z~5nq8%V"|!*Ti>SE,2X; ybL\B 5rP ѷIE0uC`}PP!ƞSF*N nq({VC/IgN!IgRϞ1Zk5ICІ SXQzTS3DQ3yc!9"ʼ':oM)B @=e[4IeQ)')ސiK 'f@ ȘYxtyWdc)2$vS 2 vdw$#tVNG OZ票c6UBʬgQ!DTy mbO+HxK+tH&vwmlrZC8KEţ3cK9`nN-(Nc 9E1iN[+<<`IkRA孭 =)$M[ɒ (uUI֛I!sӯ)^rVƐےJ#^q.=UN@K 7<#Z.}(nQEw#}ox7ۚ^a0flh7~e݆5%l\տn7O&_=ZlKo|ח>{A薁?ܭz!}e/wΛXGBvuJ+m*3KN !7Zل"{z(C:HҘ,StjuGJ dA .1ճ!+ j!u:E3r \zbҒc xTuvԃI:2:qu,vшBx\rfUD;vP"љ-v|?܍[4M$.W9f_ݲnnbۖw|u#YWr7__tyxP^or 4g0~BWe75N7K~O_>.>~}X cwHQ~Ux@B׌!ӯٗCNx\RL^=R`jo9ղÿ[Cl_W~k_īmg#sզ@u.2NX[;=]I.xwWiWSza/+r!,* G߾y_ o_FmywO3+st#t#q}}˥|­+.<>?j7lޗWRo.c~q\Xof??,ԋP/B P/?-2@r09} @I :[GKPCAtJOKN|#k^gczv~6Eߜ<8CfuJ}^W.yv>w^4+Z CBMCG(𦤡HEF,͹ ݓ4q^{H>AI]¹lւJ􂀂1ȚsїFF,JjEƔBY@[4vyWbf>sJ*ǣRS˸4;8>N{%h6{iz;O!͞_"mQQ0VVePJ+)vRv>t4N5Κ΅C-I{_w|||T<6zyDžrXJPYWna١\ԄJД"$b,"yOdKe``e؝eO E]}@5bj(12J uF`MkBXCdYyGAPPD1`)Cmved|К{soMe=j5MƊsֺZ)VUro㦇:5dDUQHjź[TMH &6 dE4`Lsb[YĜxY)|T l%kfʩ( )CȬ&']`Yf/؛&l ̩~䫼&Gmqxa_.F :nw{ M=0c(3[ˣƶ4 ag,?Fo;N/4@4*ދe2sYi/1qH;' Guh+7 07 eQp<;3ȓWǟY8N o6}-ۤH=&sl&EaW rN 巛 jP\ Y U]>3!,`{48=(ԌAe;WRvHRee>}Օ\ * Q>)D'y(D\'"ez#1ժI]ӑl]sYෘ\(b]B_JVdBd1C؜M*)>DH^NJwٌ?JH Zŀ ۦ S\PAQ19RQT/]V(dvR)Ź!aI^E$RƸ@eJ!Lt9;z^u2@tEE&6A6袐.HM薍.f?m(ܟ.åךg4!P,wmmzY,vbX '؈ݗ,nk]栗s ELW}u*,* !Bt Xʩ1[4S/kBd[FR#JB:ӎSTK]XZV`1OpSB- QDldsq +32 Ys} FsDhgU=Jr[Wudq~Ū,:ݹ|o_FC J$9&;=G2bFl "p _x0)J|IQXag8SB`)ILdvciҹnkkKt\D Fe, \ Yb^I, heC!'Re rS\QBOY++ʦ.8thaB,VH츽DD@DGo`hB(* %#J`4Cdw.E;G.%Cs< ?ldj(5Hdg䶗J{Ȧ8.efB$eSȐE=~: gL?N[6 } ɽ<1?Ӆ,F>3MlAd6,,c DcW0,6>T^f7؛wQ .PӼ00Q`J;PK=9!06R",7*r͂ԗx=1S( dW"OqMv@vԜrs 0TN-e땇4i^D ^фBn^rnHBq͒)onj7[.bD')ڭG0\h[eݚo\D)M& 5l-VXF>Í׎vUk9U*#8? OX6n}}v9 )\S8_1oJ"'advV<4aT+^;5ݨVbwJ_VzRU@ҎJl. ]ӫ<ʵ1H\W·شcI]Sѭ%ypʰҸVC5%>50$Hu_`*j?ʔ}c`>uh' cY;-*x]^׀:epR.] Jq“kD{=%qe) ulJw(eЦ[# Ӯ)cư`bDX5QيV]!Dz6/~Frm܃X e,=x\;0b>M>J(y{- vHJGw(J|+EQ7f fGE؜]8X ˆ4O`fA`痳P>)bX Ѷ{~sCB U߻MRP6%maw ?_R%TQ 2В$-p[+7R更D mjlqq.^!؈iauؗDS"Qb>Xc9HV&g=Tou۶OO5ccP/x0tA<3㭴ӽXG1r:;B/Yޝr~JYe`;.cgk8g45k}_?N ʠI0. on(=tlŪM<`HSmi8ϪSj58<)7D*A]-uwS"b,I b)QF`#RHaI\gDe:Stp\W2OvDE_]5ש;ǿj~Q3|rgEߍ N f UhbiPP%/+$2 :nv U/,ҒsUj^?VJ+ʤ}*J< xF Uh8]Lϱљf }W"9 %P"9 %j `t\- ע±Bk VTB#Tj4D{Sѿ=o@__lGåFz1Z9fM /ywn}4Gܼ5~\"A:Ư^HjA*wIlMBdk"[jd9umʂ m8 jp Ɩ3k +'aK[aXP,1BR:0*^%grak1 :&]$TuTyg2btqyǥ[WsDst0~(Ks?0k1ZxVwwO= ~$ݓ]oTbGIx{bUj*.EibtewCEʷ:J"4 ?.p>,޹7F^$h+#/ßx0=s<6Q Y ,!6{A@##ctK-VBYqzl-?$(' B+C3aCh 5 g<* aD1*,mp̄-5c # )4I# 2Pw  F36X8, 8 DJ䋔2h C!K$,96DWyԘ~k;^P{T(STPxgf37CMc&ac2)EI{mf+5hG>r/?d=/IX[٫ߟT-^}@ ~V\~gc C_?q36ۋ9`sQ?vT\8<mm` <GkZ(Kq?)BTRf/SˢE)(4ss>WLw0`A=;Cdyu vM9Bp>/yuj{I{" y};w7Osdu帇z:t|zu N>Zímt0Vt=X~썅rֽ)GeD"Jۖ&TChos޾+ `{ T; )0BD _zC9.A%FFUk1ۦ|3EqtPt7ԙECMijXpg,I\&9,׻57v?2ne,jKL$e#i>d9j? Oupљm;!7Oa4Cdt|2ҙYkD)o Y[JrmАej*KK$ WԨN$A(A׉ aܥQB{SUͯ zxbMp/O-¸QBALͣpҊ1B(B s˧;?/qL?}MQdwFttfe(Fʜш. ھ^j"_kxsh2G  !V7 *Kd,;*KTg V,Ѽ*NS -2'4Q^NĵN724ϜNZeN ׵/50hz"Jz7oҧ1PTRk]Iy3B9_hz/+OŌwGa,Uyɦ.8ޡ\KJ"a"UmMVXH*Ml䃱-H"D`Fa+ܦT<6$?<ܱML#)cDHJd!}@FϮAjӽ<9ԁܬNK{HdAu@GnA-_n$5p$qTH&AC|<8$|L;*d߼&rLލ*v1z`BLKbl`_fYrZ#讪kU]յ|zUƭZ.D"!jh@nYS(.9,T <6@{x#hH)BsJ̹1GWktc DXvKkmW2 F?3s~h:b1y)'ʹl[E0|uO.r^P@X[2Mϯ.AUN* J"DeB)[ YښJLz%h \4Gp%&ԭ;yUꚆ-Tr 8=u&B @m%p+A["Q#"jLAFȶB "oD9ۂH HUE{ Q5 @ Nj->NnaHm2,!.![1 +qFEٙ<臁0 $]תT}`FJu%PJʆ `P=-]o? q5\/r~WoQe>ӏ8$ή/'׮Hd>y4z5|r>Oϳl2SS\dy&2&JwktW{Apח2,(=m$tnJ6gR6-[M"5y̐vRSf#.21rt"ѿZUxjRޱwI/8E&9+=V`ma#/wo6юBS{qPM4\tQT:[eh{h +s:?77Ys՗̏?{ħF0}lM&'1,nc͌A95i4@|YRgoZоocDnzqJSaĦ=kտvv Uzƺ+2*f DB(nUʾgn䥝P4rAhrD U&́½P|GR̖y웽h+&Ħv [1 zcZ|G,MK+0]c!Dz]5콘tq:BAGVhL53JFw>ٴ$ $Ѱ4S7>_T;b1(עUi)̓ӏa:co+"8['b4LIeǽȥi[i $3mq\Mj{ YnL[]`Dy9C5l9T@m`8G8y)sYʗh3}xǹnbXHyfZ-A6f'xKL90%~ 7tcZ',r'{8R)~g ,:jHuzO1D L h_L#ix^FAji*4Vne,i|4=#x,{h[ܱE8Cx^;*vf %"l$oqXAXRϣSFnYN L}A'9SL`w\azR\X}tӤp+S:pj FUƘIA^h}O')S/(BV(a?[fb@kF;mSꊉ&Pi4#h0 sxIywdFoiah*8PT'JO #!* sg& OCUPDs ­#TUZSqOjqR%9,k!)(訣8z0e_H] z)ix5BiD~Wׂ\sG]^^=ݦÒL`X^ ЩGtU Wk{@%0Њ71Q.. 1Q7:S<.Zl':n6WZѡT%)g;S:(mzCظ$+== $%lŅG`c qߘƄ]31ӸZf@.L2IH똶B]C{"LL\g^fFNsJa#Ai奵\C$^Bm:oŋ);"b$v,R|Vc$-'K:J*DA(;hs4byd-n=s<)3i0>ޝ(jvw|0 l5͘&tpW4LVϏ5&Ɗ**<Ǎe*+{l0eG;9a|0͎u@a-S"JYLS#D.SJ0ZZT]WMWsNW<,,cT_D|si>#}(˷LMbS9Aۼ7JųJųJųJųOu9p,\9 \/G7Q XeNZ4sx>.8xKva6C劸Ǚ^Op[4ٳI|DMW> |S(ti2|kuF/7\Qgeuϟk*3'o.r+b,kf˷eCPQkYI)!Cw,|EI7k&2OOl?E$]6sPj|@l7wXv7^+? MUUMӥULs&7ӫU̔[n dqpٟÈCcva4XYm\&A;qid& @fTVՂ,Kxժ|wW+HYxQ80@xSw4 zcJ*g4_j=Td(&_Y>!H⒫::%Ls lݰY==~ {p|-4A%`:(Z>R@RHK{-tWqǚjEV4n/7`9uK!l\E0Y&d"B̜ , 4UP' ui+WdFA8R`϶qhgW jvBV!i:RkP/IRޖ ) %%)A-5nE^Pnv^hq~"0n˛Q@qϷ7J.:i5.!N`|_#} &:ˌ>T[LZ3-">L`2#d|Dt!y ,19` >xoF8Ľ¤ `!QAZ4hk y)/3NSG\SM µVtsq'-e.3AD͈jtIP":ft()8J c  @)!tqw7gE{+l> "ԟm}FYXOOҽZ7|Ǽc Pj>>v9vxw/K|&3Bq w'89bzuK?B.)2~J=lswu/\'r\Ғwd@)MN@?%?uD'Z4P#%@u4NKҥt)+.]ʗ.% )j4qVn(g\0 B=Q󠥎(aМ3NSj SA)q 1,_ ֙5ĐwN)P:JpT+hi=f:erRhqƒ3_cMAȱ^7+|7+|ro C`w*Д%(5:Dt$Źf:u!˛!شj]g* "%…4!FE@5 7 \s{EA%#W-8B6TyǏ8Xfu4d{㚇* lB+2#&վڜT(>J'ݡϙC4m`\N? /zSJP8j4V )_ܱpJס=TA)eӊX"AhBCQ2ŕFWo!NN +S̒iIF"b% $eب V5n18aV>*@U+[9`Hdąy1nUJ"Zs%<'ZCwhT/P>8Pd&1iyb4i nqq륹pn\1nE JVu>B%8ګ i%Q5k'yFtwV|`6jL(qcg"꞉ C͒itS GLY`BwW+#8A 櫮t ɔFzGqT 2,§4ž7osT㺇.dU@pii .J~Q e~ X:~ソ޿K+<=J4$|ο;IN3Z5\sv8~"1B̫\y;EB ,uY)<  ^1jEFjqA@\{`{@90LI>Q-{>>6l]}?ϋa ,,,A8T{cn w%tⓔZq!jipui=ZAi~Z*A܊Y<ɝ +?NRJEu+>&![{iBh "Pmin {I DuMTZqFgx` 7|7qLʾOPQjPP #}(Tj%<}M/>,Wu"Rx"=|` 7)t͋&g5Ǘ"HګDMR*eW <^v}Xh?dףyXh@744`6rHrNJ?X[#YgvIbRSʺeimwna4_n{(_, Q5mC;+ +{[mRgQ ?3f Jdmpҡ~DoO ;{j}(;ܤ/_&%3@ʹt޿tj!io?ٌ K-SzJĽ?{WƑ /&>(OLg0-:oV7Aը$ЕefUn=]¿~t+ )m,zi.*9̫T}\n.ʳ&{ޤ4%HM3Ct_LO'4a˯,T(ЏtIo$QY#;V>A9g'Q4Zv|q*v+NqfK)|`tt[&8CT&g%FZ&aO4b&6M3 /0R'Pj%#]MZf̈́ ^A)zxeI\ !Z1ٵ;#"KB-?}ǏtCBv>zj::;%w8dx78A\W  i%4b YQ)8ׄ\-#2 ĎKzwejfOx~\5a%~O8=QZ04W>PZΩ@B޸zɯh偋>uks%b1/GVq).OXm:3>h =s1Ʒ^K:ϾMGXY-^OqH Ca|>1 RXP{X]]OvmheWѷg$bI uFq4 4MM5{ͺYh<,ZU%u~09Iv?9]-uy";;Z6-åg(6(G\-/.>+}9|1E^vyNl̻8+|V''ucAy/*s 838jK(3-GUv]mܸ}}zR#w6;qniU5n( f;U! rTXt8uH#>)E_j+>Xϙ jyAÐ  5A|`&XvqtsAյwѲY)s\kd39ʁUw_UFmֵJ! kđz3Er?xF16SPk93EFcdkc>/I7nWSj8߇e1D*ouK[)wp+(?_R26d prf sGn r UBE[{Mݖ+Y9}$\ZKOVVcU9$Ҟ6160T Bߛ|[eS\qzB"$فJGbC#%{k(DTܾp?׹5so;xu]aNARZ3זt0U1K[ <85ۂ$ &`_@whK}NY|1"@[8yk'oM8 uLAc)IB)I Bg !m$i*@:a&X xH&znG37kG>"ۘplo9|F#`8S`H@ AA Qg`SxcvPg\Qrbc)"ciTB("Cθ[)5@1ɋ`篐 @Qv+ؖL(Mpo` bVaF8n'u9{5ޔ_|>Ƙ5/M@_M(uTL̂pX2&[vSx֜510`dB"RL5*րaeaT$16DqNps3z{'K+pp7 t/Su5*PfXlw>wef:oJwD02˥2VYPĔȈq&#qJs"?4^QrIOS[q.{r\V{2]gq+݌(Ee$QI%%Ayin~"49Kb [X%'qtevZ&jIbh sK!>)͎3b}߫9#dCfНDF\ҍ .)J'J^N׃OBMLMY"8Df))5ZkXq&+XU`ZoeD+{WuȮ4_+|q·8f ;գHkD=B?V|).g/]hߠGf!;uRصؗT8dx,Hβ$°) ̨ me1 | gXb^k5|MBtө^oelpt&L_boMjDqxkKAa?S ddZ@W>ڭtnfBR[V9}koߚ۷m;B1j,+)Q72"M&$)ATD<-D@;'}}z2lyOo8d6 |&]f>-TZ5U*[|3ӱYyM>X?o62-Ȭ=iZk^C*g>sNnFi.QSןòGğM^dx0Ll}KK,qƿ8 &WEF9 > &5`cؾع>>ܚ5NoYh -ly:{znx~ y,ze" BjC/cF7|1<"?ADDk:/ >alxx?=Im`bD?=_p$/J "p0 {O] a4n|&bJ)9 (de}r[KGN9w\tgnN0LS|čJ^SgsPkV +2-\~yF9H)F궩Ʋj1olU ؚNixx4 :*ã#ͦ#EXiR*JFL'Y~%qǚDaPpi:C#5҉C܎g'xt5d/_WoB*| bG ;&;+%SM[?q쌵K'a.Bɤ-3K`Ъ=oPtx;񆌞&&z%O}c/Ϲjyu }UH0RVFH93Bd- ƣLF"X%ݧ,Ϲ}}4*Vl/:"=6%-v5-Kgl3БB , F, 8f<`xH (SZSV#&vfAUk ZJwaO)LPwФ7v78Ȧznz lEŅءZ4hIO9lS}]yy H$ $}>4ts|(lƬ~EV+e=qVQwұ83muM^ P7!%mt-0d \lOmha+ף>uQv<0: 20}z.oǰO(}ignkFΌIkE]&c"K2 ~=y4g ?VFTlAC/ 3{8j)Y:gL4ht{.*k}B9gvˀ ś{ţ0H&kqHEny8r6:Sc%i iSF#6$Ey&ΘCv_uuUuuhG̨m_~>Sh!nQA#ў`iS)aĭO9%K}v@$C S-#RL-"R`RA;Ϧ3b6q_eB c+1 Ni@cXm6fyܪl%ьկP1TnjIΒȳ]o.̰u*29.CeP`ˤ=Dkh C/z__oFoF"W̫;OL>hŞR(,u(8WhɠH [r)w, jqK#"( 1R v5gީޏnk;s F1SRg*Dy !ݓ RԆ3|)|͐Cz޲ۈ$1@a'\}W-6_f׻V[Jj~K+=t*mW쮽rڅObP];<ގ/P_cG.*XZBSVc۪.w%U;.?EysD^Bɪ+Ltn4hYQ=CTNJW[ %h7{a]|]p Jxt{GͯEf0ZͮI-^jX[T6Ys9U=!{[}L-?8CLr֖ %Bꪩ\P\Q7APq{I {7O7S#*=1E캭eih6@GI Opz >4[\KqFW|sG/Ri[3?-(4Q+ U {֊ɘ53Vpd:8P:{Ul4۱F4^P 4~܅p| O2tR9 &߽ꁴ hI/F)|?^_ic3*\.21I5^ 4pbj)Azb)gPL5iCDKQ_k++Z5 88ÕK)TKV9$:&c~ΨQ*\0ȝ*@@ |RP@E 땏 Q"GƔT $PSy."!fd#R2P0R#%㢴03Un.A@.uZym?^_/_<.>s8g_tz"yviS@-8uNP:~Έ$bɘϤ>cԺL`2MB(R:C(AA,T D>; H=&58I5-4Kx(=4DjKxPB@;Dqk+'է :PV1J쵣h:ӵ#`Js[ԥO7Na4̯tcOok7zzOU*b34_Yl=ټB D#E9]k7g71VţkU):yΞ7ϭ*+ht8p*C/o~:8bݞͱrVí0F]ffcL F7#ߟ8)R?N6Ipg\$C!&iAE!*]ݘ˶NcDDFb) uQYA'_,Pv"a>6$8ϴ Sr*xyV sLZBpY4OZ-.T{sGt 9>v/NƷ}l/W?ݟaM65eTըws2vwC4eꦐM'#1 (a9}1]S\pFUT\jܣKP%"KMVmťHz "aX `XeRQh哩4l!7VCd;_$gGEnKbegfX9#[9͆hpw<Ȳ:l-EϞo|dim5^4;pLX_`Qq5@5Ǔ?$9],?G7 &.qY$f1]OZm`PJ֓-YDƧgCo1t |8ӫ?@~S@mP۪EXHA.af^L;a< л{ZέT8fLI}3(ctOO'/0PbO aBљ7zVoip0 Y(i28FyjΤ>23%/{qo[J0b1|g:ado_^*V!=u(_) ^k&"N`#֚"LM{f2e4  IB0k:.`B#8(@ ! Q\vYBĠ?C),G5Wa &"8:cBR3$;Kb)tApd(L͍Lcj04Zar+sd + $ (/IhTAH,RԀfK6a2pfV9Ed%"`%a/fD"s}}9͍N+ @2%xtwn܄Swvc&$I~y٭/C]弄_m~ J|pna;$o߅YLQ.홁V\x l'+Jsndɼ0x]l[yz]"n[^^D0YtK0X_DϕQs`IOsZ2BnGB(LW9(<ɉ6?b(PG 0y`c eϿ>!EO_d10vP>c`8`vjΔ櫍kVEű@ ,Wg![@[#iDb0 {.s0 %wB@Sq R#e !epOfnّ x3I% h)]߷VpP'iCF ߞ ERA,:ic" =$ٔYmXBD8 @I!2 Du&Y/#E3t8w/?mk068"Z5H:bT%הnB-#,=pTռвKpzX>H(`j3.*‘@P^BA$is䊰%7T(?:4Hgf պe9Q6Cd>U(㑣]=Z }H`I&mr 54MyLz|Fx@O4~Uhj E}`ucH Q#<@ZE-RTf "$H=KJ" Z{#!GKV獒Ӕ(!:jY:"6 QN\k54N0DG1iCr 7e#CÐ3 &:11j[ PG_ prHJ8fT>Z.n!`~ E#(mv>kH#tnFX'޳ X!k/=.Kz_j1G/}( 5C$Gu%16Yεe x(;Tϵy# hVi~z$fX3Qj6bэac5,łs^ɹ(pjFuGkL}"Hnjp9Tw ~W!!Gk2 ksvv4Z<2́nܹb GqX/\Ik(95?mIq<׃3"Px FH@-l@8ہρo98y[fV%yMHqXƺA1B(҇SUOmvZ-(@#ps)jpA "%L |.19~՞˛]2x A-E ,pK  :PU!Y,WY<\K-{3nDY:yqGxN1v,;J)Uhj#ˀvS ;cq?) qv?׫?B\z_º17.`ib)?ǖzu\˷\8]`M<.v1G[$5~Ŵ]]M**\[!_9)Aˬm5X8;kjx=V-৒ @AGR٬}fP. ẗ`R/!L.-NW ?J"JjR47Tt{JZWsUf&QIm@l*#1DQL3Ϩ"@Y$I1D ,c Bn{$]3N9*oѲiR]č9sTSAdwE;ɈANn-=@PƩc,ABIl]$|jaDD"F p¥"XF*DHH嶅hZdFmSS94wl:Hj*7qT" )I2JrsI]$OUS;1O@i~97OBF(Hy⚊pu=9I'02~/7(j*=(uP v 4Q硼B%PSD"EC[8boF'r@;)~&MSh%lm7QF1Qtl#*8 q)W|g Ff'B>Z_c7 :: "2=ucҐ6xGyrqv3, |BGSdYF * "Z gD*;xoebDgNH>P^<ޮ""'ol ]So$?~SM3gF.yi}w.F$B7E8\x Q b4: ?_ɨsP5߹Ԁ +{q^'EѦcDK=D,G @[K|1ۺ19'J@A{훙Boff3ƽH]ͱ4%\~U%w3–xœafoǿV*n[u;1|8L7Ux- 8bQW ^ʵZ[p.b@#ojg[C3\w28-ٸ "sʎJ!˟]<r8.E0E}o_ShsAb pu>ߜ.'?nod=_vvʠG@H3S򱩥Pi NC5zt"O9r>r` I1;Wd/zϽߠ!=&N0_=L9 G%9.&H! ~9hOa=_eW]Nhj1M t | e6%5 (&b$9TL~wSǿ L$!H =E9ȿ8){a`Ծ'.U.̦]MΖm57kt -_]~d)r~IB^yOe/ :Ge"V:n6g7 +/ȧbWxO%<MIƌT͂tw=]wJaC+{/S '3( FGa8̀hBoq :-QKbi*UI}8}L)2)էxRb |5`MNX>p,79z6G{i!=݋wwR^M3[vr>fh)6HǷfQZxv}o߼}slʕ@r[YuCPnJ瓗}O^#sq38.&h]itp_ f lLiA%.M=hg_zv0yLS&'ݻ.NPɡ/~ߜ㓝۫e<ŰMM3c`fFLAT5$LCdr># BjQ -n<^ѯ1\i'49&2?^/WE.y*6+<y>Чu+_l}K@R {yix 9IWh~UuR~9s^!&s kcx[M.zB} @(zHgV4ihQ('Ujį6:D~J[hvAL\8FLjeN=ܨu".3054Xe $Kċ?k`;L+ CT휐}!~TT DZTBʡʢ@"AƼa\)D$kw}X.3MU%%Dʢp2mx`sQ/1NҊٮ҇,lӊkrZd6ŵy-cuHȇ6E~i 0ZOInD [&z\m3,1!}OyF#h+BT6Ky~tl߳}3{m"=V(Ij%vh3*$O]d?9Zĥz %%mkz~{/9IBPYcD n'I\zA=6).Ϋ/J5y1VӽՆ! " C4Th挱CXPGeg?&qW Z$6Ӣ.11X|KҲ8vD"P"]BP8vIP X6+:; b 3MQae`%EC}b ̻'wa\<`M#rA(&+37T6Y!YO7,ޛDS"U($ҵ1VҺMk|+PE`$<҆ ui>!֯`Qy~A$Adq 3oɦܸߚ@8ۥ\H;Alsk>w{ $vPHT2:u> (!&P6!O۫9/W/۫{fd:^fC*SlD<aJ܊2 C@~CzR%"YT|FߞUAXB¯@"ur'(X gSPQ ^s&"`bl;c!ekLM2SjUۚ Nu+@<HBz :C #r0u(a!v@a(bp]FrdxLo 0Tngd5FD(_N<(Yn{:͚>s Ie.nQ/S$gPCIÒSq-H Rg򄤑lK+&>T -7mw'lbEX[kB8> 5wۗիnќWoO/VcN=>GbCosf?>^ h2-o_FO_o14 05}3zMgdw/OOH>ڢj`3To(Bא`q ~}<3 ?"b$]ĿwHd4:xC%8^LѾTQ^ݼH,[7˱L`I:P:d~鴲ׄv&=jp y];!s!V8n^x20Pш?T+3ص,qqu.sQy-(@\=VuQ.Y*;ܟlX :ScHB F .JT m.qcyG5d"C.(l!]AHIV͍JN( bǩPHkQ(.,4P)&Q% Nj1.Jv\+b;A4 [BNeT [ a!S ,EEpl y).rMqTۄHЂM)H)d"`c]LBHA ? WPUaB9Q[E zȹ*#"e HU@=D[k$A;7;d 0ǵ־$9GNeiAL23"0r0 .IxM`aKͷsE#u2+@(p0N| qPP,WTqq;)\2H`%vɓ@a ˑ#`ÜpHq9Zq 0 ;c  08E-T:~b^I1@9i@wJ[#q TAc! <)R^ R\dTJ|#-5?*Gfvk_&& >̛6غ[PZ6'1mW?_%dPU]=PǢmz =@H w'_#@Abi=8=/t%4pjoEbo5OԩĘ)84^ݼH,sO Y}:J QA`s傋݈^^Xx=R! pK+єW-UQH.p^: 1ggP<sÅ,ZY!.E4^NO%/ ~F>Dž%!3yԵ̠Q\az$p"㌢6 ?^P|-ϯm/I :+6HL+ Q"DŽ8.zy?K#;a/9߂"d`W@)8;fI0$yuPJ-8&Dب#AGpWFhzێX̓n.q RTE{0y!Xf5#N͞&@2iIŨq'Ge 9Rb1N8VӅ@?uIDS$hP?h(K@>iҖet4 $ʞo V!bFBz WHGQ4"V3t̔1V!]ppS\)%$P}X*_&E 62D$ o[ &܇輷8$G kR`rC~}64Wba_C4=K U2bMhp"hE]z;cޜN7\Z_̮oe܄q?cڼ̉C̞fza~v }3kC!0L*A5;[Kt@SLՈ^" 5(pB6PD%I3JfeMu"Uت5Ӑv}B2ߦcm2`CFfW;%#RDΑDh9Ϡp[0"=PgtqiҪ 0=vMrK䔀 fe9Ӊjɳ3cL+T+x9oWJ̓ Yah9}4EsM?]_/8ܠ.έ|Ngdߧ4ﴛ*%gbg_>W8WFUS U/ ߨj77Z l&..uplyp=Zew9#-+$)7ΛFŋٗ&M]UpBxic{vϰB" hU[+?\/ۑ[/x[Swܿc~H=Z玅;(3b{{tFNeI{g]Hf2D ֠<@ߝ!e?}w:8%Dj;GCK0de}|hgTAw ^*0=`Y c+&uҠ[jzmUl~uD䀥Lu܋6giDK뤷ǻq tY~zJtrD=TY3YU3k&"SyS} Ʊ12|9}ЦŶ(0#Gx9HL#F#ɞ0hЦdO]29oFg/FDj%$ %$hUVXn׬$]H Ri5Q+wPj(U2qu!*E %FhpBOu.ûoR"x)ѯ1j-e4N9ʜXB-xi:>@q _;:v"PǢ"Q"ר$0E$e$H 8'V $(ՖzJ+E)Xx+($31R-*VVt:jS )(ep, UDma/QX"vU`rψ"}R):H"$CkV,cjh" ɣV!6.TP{jN$dH\J;s{5uV d5TF%S JPcQ[8_I$ όSLj"+T3XDkrG!iTrX%LHb1gZ 4gaA+TrGE%)(#ݥD@ "E@FDd6XQxנ =HkwiRze#b^o1][ƽR`Z%Mݢ+j1w3+} E`uAD25(=] ) p\H\YZOJ|bkH Jr7ք !~܅`% Cj`L N^߮Q[AQgv.Qɯ]$gKQ8̧Z쫱%Eρ4U:eO"M8VA>u;cnmW*Bse KnǮ.%wRم&R=wƽ7ecTna?EO8yʀ)d)Whe:ă`Xa(#ceplYo߼ҹ{k.ovvDZ .}nܹ8) |*0g~2]ċXF'v:={waPi N Z[ UୖFe$w482A5XO\~_8yЧdjrlj $j-. dZռOTLhj͖_/հ[/G"q ʕܩtr+u"-765?ywzzrX/[/~HV{<2g?ze$cٚ9 hV_+!N|z|0ώ-̴w<~ͷN+6=V4ڈ/!7f#Nҗ<:ǯ|;[JZ՚bW]"k5Z+@!($jc~LZIA%7' boP RQ a\÷Pep@]$]yuOH"$-?Er&0clw]C֓Gg&jVB]כQ/A39?{L9= =zg;j#gv?YlS.8Xy]r*󪗨~V|Ug2~Z6uKʊ{4Q;Ŕ4㓖DTxcHH VQ*eb-)xÓaͭ(hRsKQQA (nj<_$S %}%&/;x/f'{͗˫GfaoՂ(TRX-{ױE ד8(Gç\\zS M{ ?Q:LgKqp3/'~J!sn.KsDǛK>6e._r,%(#/F̿w miδt+ DflLP+ ڍdK`(=U9k jne$]`` M`\H8֎oo;pc}\nє2%)E)3e k~4"w#Iy$8Rv톸$A< [0g, <e캹<}iKѫɴҚaB,rS^Ħ[/v<۫RC>rc {LaE?su'%I."] \v0 hFPUY +t`ofh>"|8)@\_٬v,|=._= cX- ]~xSFLcP*xWP)"-YRfcUg.Z˙h-^vR);&]vR)т1&Q)@Mp:Ϝ[A`rj%#Qi}[/}:vR#leU~ .U\e{4캻[nhH^kr8uqk^1@Yuʻwt}8飤SZF?iPuwh}Vae/*P:{ZqC Ч#Ew0ڝ3F=8J{qZ&ITp0tݭ`{Y6Udk7 .qviϛG_C6Hc$ƝJ)C9k$vXUMAJMd4ix"X-"'I189X2 .Q H 7b V2nJ{,߫0TIb5I pmFzF|Cy饵E@hs*ho\O?*-2+0 zt @BNV(9) n4]K eӍ23RCk! ꁷ??vE楁9yJ5m$:bZ J>K#-5ʬL/ly hNvOu Yn?ѺbP:bݎLa8V(nА?V`l˙%pfDZ]M\i C6\9/4{!3=EV}\Bz+rtQajԈTstx2!b4dGW$=1t['EH9:) xeq[&IbSM ތ1GԈ0p\'CJ霐c aK1҄ˠsvkBsc$d fȝPFz!|hN{tqysE讂swh4BDJ 2*/Q8JƸ|`U!]?ܑr&&VV0LY[Jk*Df$بw0tYo eV$Ez|MW+6A^B>D W) bPeoxV#ha6j#T2<1K_޴É}}p/dYzuv3Nh,lFCϝ 'd-m%%B.krU *[&Xc$n ̥|/ߞ哸, ?kodbךˋ<2-?'C6$kC6$ ̛kyt4\E0Z5F+q ^gOA@Cyx~,o᏷'smCoOf7i=r_֍o-ez9FʻlEM?IzZՠZ]A^=C[A0&x`ۇ)M?Wʅd EpmJZhQr?ak-z`{:Zk 4cI$hwc^& KQ[} ^,0MI@ŋhsĖH*~0H5Z!InFz]&"8Hk$#5+#xel]$CZ- aTgR[XNr>O|,Uer/x{rswq:9{$^4z>;}ݽofMofmn=xICn w=*_>/y M*+p!^; ?-p1Wkur 95&K[ԙlV=Ra!٧r/40 \&( j9$H|diFs:KmfWl8]e4m3AQf4ݡ@No4>Y݇Ӄ,WLvA)rr|B_eFz S|u^rI _b"zw7_TOk8b1 :urpGݘu@u^={$>Nlar'Ïm Pϟ{t+q~ ]3UWN~SZ`fw)&NN?m0~f?V"jzyr\*8SF!n}Kp\R3嚷He)cBYσƮQ+9'zB+nOikWzmІ}ɍ~Lu 8aB9̯ZZ{ؒ=.>Sխl G˴3%9xфȰOSumV<)5]ˣ~ Yv]N Lps@gYЩ0h+tr9+MKe՚6z<'% t3׈0>69 !PAI2lZ WN9J(CEPӄjwrS\<;u;T\eD 1sjB-㯤򕳩ތN_E"[ȟEjhI+PYe" YrDAumcAam?*9}NXmg=.\"[87BɁ"G9;diÂi!VV0 QyB2JpJB2fVhDrt<jDفTC%ToYx{ GԾw V~-"{bM"B+Y`4柕b<\'pq`B{>st:Lpv5fEO]צh@%Fy|@(Iڻ*jxz-)8?xud{92kz>ls{MH,am?*ZwuqFh%BBs 5{~/l?e*ډd,X4C*@]`4;TtIM %܇8ElH%OvSǷxXcv(tcFuM};JY)'g55(o|Tа$F62nƖkOߵ~1+hI!Dr,}6UVePe1ilVXHRJtƩĠP PP>VѠw#s@c\FNuՐ45γ\p]Qӣ=Qa-9|Q+&P h}&)%倧K R:Qq/͖;foO_Sq,9QTp!kcF L%-C'|"B9!Z);B,yh< K}am*Byа$$ 2dg2 8$sdJȝu ~6vuZ^X kVlLQ"!MZ٥,ҜImJрY5-NmKL*K}khWfmWxWIRhX  7I ~|<;z|~s#!%b RIr,qQЯɊL=9gyqǶ ض-%@@1֙}M,QEKS弎Q:r>uYQdFTY;jS "0kG?J1r 5:hˆhƷᖖm6G#2f2]Cz {m`]*9|PGM0\R~{,i -@Zb hETKB%&>k%Z+)&,Պ,=i^G eꂏ15Jk.`Vt4xk/·E崲3`e]u`fAa2\Wz#R1\Xbe|iZ:Y4,u AUf^ nZ>?Ym&Ī `d5UO(xU[mkdr 鉱R&m0Bs)(cڇWv\\Xg>_{H a[C%7p&C]=цh M 0`eE}&G :XEʼn:IJJ'V Pn A; B|zV籆ɂ%8OKt*\\_WqR NCksH 3çoNO&?ߝ2@OΠM˫S|7/FiI^ՇUk12{>eݨD, B#!`8m%lwa e͸=\@j+bEjD!ZW8Ӄ0j@k;saF#SGn2+%˛+wly VÐh^3H4g<zGQGftCYU@n T^(+мC+MO_E4뛈>[ i1*/Q8JX- ےoa[x;lӹ5mrݟ]7K1# L5B]̄қwi1 ז9\[pצ_ >-%nd+xߤیGoB­,eQX)LtiO 36 J6*QoTN\VwcJŤoF %NJpW `2VLMbYݍ]ȜZbFO.FZeFE_0ӥ'׬K]ZjRZW<:8+04>>. wu_3; k3::}sp=+8oguElڵ+^ Cw?q+"}„A mDCu~{0hؚRUJ7Z{j|t!U1exPSv^Vhgv$SW}SXIe)ɇ; 5s㨿s}dbNBE:x0XQOwwvjDT7o߼1+Lt)EJQ$-fYS1Y&&"e!?@ h; ;-HPiʁ'+یHlJobt]uV:;Y>ʌ05zʹztm'Q&#Ceu5f }֣k78>[MPuˬ,@!Hʊ%4DT)zNފ&RGEΔS mE /kV)9"-F2@?rI.p\"Z8Z B`"lⶰQ]o@M N? KYp!-H J`i1d5JFD6񴊎hkq*FCO:զ D#[n9HDEhP]kдHkG1X\gJ{_e޼/a^ d:Hg`n!F;mjXVtrHsy] neȁ]gVϰMJn_W ), 0 7~sOjwx]l֏ e/1]H-R]?gӮL9g+陝2#sIͮuu>6ו7ⷞ7 #͵/t ڦ &]5m]LjaVCSQ5PSvPETQ됢``P@ ^ھL2yC+"w^Z O}-9m$dk:=];Pk錨Z?\>O{6w?ٟ\cƵv?0z "tg~|f?6oW['4 AčLwBBX]B^LZ1+3iu=n1Q@b{ n.]Q~sV~P z\(3Owkfr>2n0 ~ַWE6s>tӳͫX|[6Q~ZS ޙyqV#lMЩTFQSm!C6*&3US`hKhM`nލI"nĘNjup Yy-ޭ M4Ŧn"KC1˨5wnS`hsK|s, 7$ "ƺnaZ[*1ZF6 &ݒڰ/DBCwPܪ3//8iis?C}0sSaǀts1A|<\Ga5 %Y!@Yp[Oh˛cZFh! ,ZF8URL6R j~\A9n]&ঔq#y,8lȔzʢ=73N@\#ry FRsaiǯn(ˉ!e iL0WpVe@p0B{`:B%84Ql넰RU-p  5u =Q=A)qLzxT>:4'(ӽOP{t<8 O:s8yVV)n)Rd\EFp3XK8JG!.^\XC ̊Ccda #|_P*ԟӽHU)Sȁ- KͰFnmb1 Ijf4h RA+OUD$bBVf_?h!oj?4h30)=/WdqYjms'"Dҟ\Ddm&s"il(tLh#&s p(vGs7u:\?DO=K_g +^FwԂ&[{Uy" @&N_"&W{G19Hp w$tW+H xH)ӿpbYwszB.&!%a <6buP׽u0@SAu3mP$94VSj( Ug[5֥|IluFem5eT)^J -J `˨<8*b8ܷFDuvз:C1Y0Խؐ\CkC)rR+9)]+QݪE'.qE׏od_E7j3:E /giŽS}w>թf!rw" PDˠ ڦ k tiF қ5_{"#[8P7Nn۸xCMCs#||Bck-أQ uFV!bԿտ_ ;٬HOxgdrh30*r+Igo1)N˱>"k+$1J7IG|He{x2Ƭw|]Lǖ8bm$h/+嚕7WFScՃbV[N-?OO?,?]}g]1o8]x (rfOƴlǁ8-XRGZx$,G_i,X?+VtH!Kt`ώ},q >N'@ˍQue9Z @  [<I1XX}36=u+S(ɵaOCqfٵjwWʳG7[mlAlnI*_z|4jsm`҆՚} /vTpe )v}2gP1Z+A92N뤆P_9^"GU񌙊ja-VN.Qa[8QPS(qN,ֹ,$ȥ5TίB#F&) ԡ!O7%VNG<ϗ˹a Uz{a>Iq_mv_/zwLv'=&mN[=;VBxF@1^&ĭ.}ne^v<;;7>V§{zfҞ -HA8n]C sU]8HJ?ڄ#L3Ndp8n0t G txBihOq bzp%!=;gz70BPn<7yC9FH,q4!ƾR %. V\PB*: !G*jE ',[uRJ8T(Q`ctRǨ0DJň)!FB$2 0b .H y wSM2%8(z b]}S2v:֜[fXs 06 䨅r * (gHs#VCSs^75tE6y Al^K7~G@{[V k5m~S D}Ps`2Y  IEj9!H!ɝ{MӮM!Blӈ'R{Aa3y4A^z5M.`AdPdu'4F߆{l}0wϧ;]][ O}T)BUQ O?'l3)?V{2Pˋ[׍Gb9;trMYOb/YÚTtkrkQY}L9'*ۊ-YnI6\ z˻!ܺһ%be:en8Nν[ZwkBpmlSPzL D^,T*X.3 ʹV9r]7!8wsb0\G1G?z::A}]4VuFwK(|L1pj"S?AQ)FQ#Q(RzWQ<1䕰1 O$M;29~Z8I8;h̑dqt 1p0RaqEMgMA6aIdw7ksAdgq+ Bݷ$,Z9N"l ͡{v/US냥ӻO<$r859 PW%Mؘ~X9 Qfq#P㸺~u /F$,z증pʁ" 9!EeQd:QƑ!,5c8`]Z#n KKNϏtLai ٮ'8#0wk9JL9 O;} 2 (79>}Wk(1b% lDGQ(%Oj.۽cDJٟK~sNݢK-z$ =j&46UsRk=l Ag8F3HhGۗЪAczuXMԪFS? h ]4j0/HRӤ4"!uJsiE/?J/'>t7fzÝ)f7\g20G%_?+JD[Yyʷ|4鿦T,'e9ff~=[ɥOrA,0{j?;!ago&_! c)Wq_5kyB^8f/e^2̺wk潮LGnbv:zeύy19Noǣ;uy%bRl7[+ q)&RU 3d).YD`:VvMLX$& :Yg*Ahec9g޶&7U ^|30)*f%'+YW֑v.}TڥJ Vop63Hs ."\RkiDsm8:ǀ0/@q n&Ϭғ|{ޗDΪi{n-$*c$d#*̆7+ onK" VQ]JrK^ yST&k7%BkʔkR[ġu(̰Fl IV4Z͌Ϩ.Ճ Қ6b׭) CJ5ɈR0ceXiH_Ua&%:sHaFYi?Zy. \Y ʵlk-)/WƏhr%b%e(bXLp*d pH9 q -)e!&otR}>µfOqL/_*qS&j]]ɐ|+m&:'QoΈރ|(mzQTR(M4޵q$B圜}VFg_=}yV"r OGH3Pc9ӗ.]]]E=y&q&9 =#9C[RoI/xNQ =|FN}w4 c0 t0X0!XIǰ\zƑlRcToˊ>QNVyM\x>&g2;'׆m a0ϣ0Ϯ~~seK?߲d:f,a7H Vr* wuۀ{?f,Vh0~նvA ffES-X(`cTaLBH!h!a' fe,4.a ^nԠ=ˋ_>=;Յwwo3CX r!/L7``o~Fo·xu:w:/m}L#A;}`V3 efn#;YqC1ў>Qd Ϸx'(1`JKĖЂʢ.ڻʍ.֦d\ 0TS-\QuFPDCH)PQ @&́w@8a ]۹X5&uR1R#:eFbۮ,TLY4h):ySqJzQ7+JEU7,.onVD+jC^8SL"Nwߐm Wu9uWua;íW7rXZ\R.Yq4]p|k3\rZZc'QXD (}3Lo>?G^Y>x4UZXT+angd]Ȍ#_jd¬V^IW{e:@/V?I _clʼdp@2嵿^D7x1> `B%N$B;$S$, ٠HBisl!R)¡ٝDOa IJU%7wIbR[H]k%\ciÀ!FRl~TnPX\)PBRPhTP* }Jg6D_dL1V@CCXcrI\2CV)nn&4^d&IĜUNwpE֬VUCk*у€e \IMnnE|gE$_ crl f_lmQOpBP'9;yӊ!j~RE9dvoW xJ @_J?VH; <\'G!G Y>ff,Y*=H՘eDZ4mO@rfUhK}rÖ^zb~TӘNoǭX(v -!GW*&[[@3rD C,\Ҵ3U~QN'WȳfE833ma=|E\xh<:dJuk7hwW$mV}&DuGzO%l?υ.OFS7*,xNu]-l򬨅 QN[?hyw?7WT;x^d.-=+ڻǟ 8m:\iM7Q3APLuU̸Ζebm`4Z̸,kY//ۇ%Y ~c}Cҷdr1F$b '5R; U 5"Xm5WK DK8wѨ5y9rOš%tUm}OjQ@Q^U="'#Dc?x:>U{@Tы<%Oi_OЯ \ONi(}u݇QOvۜPadWB YWh wD[L[.ûWxu߆!k{|*]n7\E!M+\ |AMɧ=DZ5NӞv I8a4(tm$95Cޞ-xtwﲞ`ȋ\ ̚ɤ 4cAj^˅ujZNhbRr˥tD>B*y˃'H@JR=EU,]G"sNruu4^9J@ժ7}(`S&ގ)C)1 +*MǴr895>Ԏ1csh6 CoEd?j h|tp)T3[|Y+rfp`S5++"Cy>'Tg*̚Y CrFy^qHE+m%sl;$`Lܖ )7t Lx,ql. DAF`BNޜ3dS\  @=y&q&9L`a$J s]MA 1jǮPr[ dMWlT'l' 6d[ic`KrIR9-gwe6(xd乧dܦJ%Pad??c%E8Fc(:Њ(1Xn@" ͽCQrH{4R!rI:ւJ\vj>BBꈯ'8]2heBb0 YQjsA81NQ.t@L3,QDv 6,dT/+;Lrʹn?g4Fpw1Z!HQ0h3 J1c,7dIB= QiJOnr6#>tiz)9*%-Qϱ<\B%8ӯD}QknJ4;u-ȏT -ɧ|Q:o+㠅2W_)r%מ\lZ,]Tx'\((2fsÁNχLN?Dqvr{<Io6,1킉ce>#zwGh6M_ޏEnyޗ꼧5h-H[M! _n'4-gJq+R pM)&zῡ-?vKŠ QG'Ttݒ'ZU5!/ExJޟlj7^ =vKŠ QG h*Jnݚ΢Ix} %'5) pytw@}N~B);>?Amp% V4> '?AI{~Gp% sO~OhR0g5='f%A z|~9 '?aI 'F%Aq|~ńМ$NPGWtK ) jwZkTژ%Hʤ6cOjG?to0syYϋOhPU};39]5a>滿@e,P _ /[0Ɍ e1bzEpj:zz8M0מWχ_Q` qbԎ^! 59*`ٯ%w"O3*Y!b$R(YcLکfxXN(\Y, WV-%?,rFPrXx'P@iј,X'2rKbV_͓z^%@0w;In'wF+ZVHsa- Il r`ܸu)| ,WRwy>*?,˧vC"-?kCR`~x{h_2N0VSV3烿!3˛!,W|IWW\[.߹2h;%p?ߎ|නE`!doK|)A! x̻ḭQ "Wt7'qk"?ȬE͕7^Lo>?GFZ>8piAƏ؂Mf5u(d .Fax?_}1WWղWq=|UW˾&ҴW*s䫴1SɃrà\& |n3{ڌ(V|UZ5y]q:*h ksj`9kNhИ9>@-$KO!1%s+6'q`,@rF28SϬSCNR?'d&. iKppY6ڙT<> s%2n!R+e?P =65Xn ۅW#BE=HpҨHFpme`'1tʀub뗸)@Y: Y!8`VJMS9"g\9Սt#[l(o?.>m X"aQL8, Pƍ )d~sj%: ^=3S;r͂̄ øi<3q@ )iyƓ/+1[;L3g3h2}˺Z&K)u~ 蔢DOTX h 94x ׹b`zϹH͜`%  j9,,[ 8t) 4zk뺉)s5`VlGwi0QJY)k0Z|֕~,uDtt!ޒ*MZö^JndpnD, <śو1¢Í rܑi iErh;c<^yx鶉,^q~5WXRM&$t>yJem.;i e ] %8 Vh;$D'_x }{o<CbRI?&]X뭢d2 5gO*Ξ#Ο``4Z ٻ޶r$W vrbY Aezэ\yxNYJzҋ#9u1 %UX,!̔ *+8(F [%Ym7V0H))ɢZ81&x)GOq5X-&࣫$c4 *:Q;ފD cm_6rjX!f4`C\\Eʌ:^|40ᗿϣ->\G1J[ԸOߪR;Ȯ#]ԕj'I Uݫe.<"IS?{]/nެ_>oYa{7 \#^ wNs-솮Mٮfm+OԽ$2fǃD9ZMs< | {)سgi3/gIdI',ZȞZ4T2WX 9L,Hʎ[&4Q*7BۡhYI8ĥ;5jٵ1(g22s> fFI̍t[Lf >DI@ZU f6ݡ_EíYfQ ͇T )+Ce)eRʇcwcU7֋ewJ4ꁨ k*75jRlwgCI\}wgǖZI/$E$oG*o6$Qmƹ"SWu7bLqvTkp1N1b ӅR\eȇIiPu9_C T u>!'krLBC}|nIV<vxɄMOlβPzpGH`{?[=(v֥>_6nВyDhIng΅``z/+xw *BU1Q0/t%IcMzn|*Mpz4@A)Кr܌gk^Y 6vZ6(lׁ Ѫ`b,ՠ{["YhDh7#g(qST =-檬AAò/x<2aE[n3t3aPҮ䶹B!j 뙻Xqנ7lxܥDse1/;c53` J:_Y5yI\= I ?¿u]'R #1-pmVJbĎIO(G6L8> ul}KIpˆMzi?MeꚀPUO[3Pj D]u >b/]%Ϛam":&ˁiN6y-#|MYRg&鲿 5 ~/HCNp[k2ܧIU{$ߧ Z@P@lVm=1ymU* fĉdRzr2kaRg0UH!BITBX[3&+ QQ^r/D@'0zÙ,wP\\aPS{;Sf :QkMcQP:^U@.EܕX|}qs?8+Ԍkߝn\-ߔ;6)Ooo(q 77{YX|#ə tZNfZz@)Dhk#g#V u:x JYdX\cYJBzƹx\B(BLgb')EڛGm̗<51k,M%K4X 2]BT_'G?9jά;Cu& a Hs\uN1`s Y ݟsʊ'eg<]mpa,1K[NcěrSl=ENcyT;d=vJ&5П}(S>-4tXCR"`WtgWtRd J k2e2یX&"   c9Y2Y\d[:!Tzv,| =8(bzX=ڀ:c*==L|1A4 QR0JOfZ6ӳe-O5n ~wp.H6$/?+ w!~w-3BΫ퇳Y[_=~ru ])㘑DY۳wby9fWnт29 ijh뗛8V]EiRE32"))[fN;Ud`GPi=2$Q'AfxQZ(%Edl˾ FMrok#^z ٰ/driG6 +Η!V([|+O9 BEzmGx^]f [cu؇{;gyTѾ WdX8]VҎVX!yoTI7'{>Y~ $c Ǽ=r!25GGTs/es ŁQD*LH4 X%쓢$%{&sL $q(, $)5g+ߊ{;{X܄({ YӉޯP`4_n>j\9[~+X~-Bɏ`jaVtoS6Zj 9;tf /3LE( # րA (6]#xNF,>t\V>U`,'J p,J9m@o)EPPzC6OD4Bɏc _Jm`cwqFT`H۷1$L+8_~џ?t5A{<Ţ`-i3SK:FU2S&9s[XGX z鶇GC4u +)j̋M{J ! ٌvye$e2Ao6$mu~8U΢4Xynfn]4Õ_bLjb50GY m~_H%/7ѻ>Tx9CdOo߼9g]PӘ{r+U^`4ЈW PP$R~^/5Sx [ *| xcN` l`bP I1ny 5Ϋ,Lmy]ʀ  r4xaY:۳ Plj}F>Z_T`N\A$*$Eyv+"XJ!h~R;|gNi5gYq%Gח* LQ*sǒ2,:`rKwnIG}v ^h\L͐_!4p,)昞o;l~ OMېR*mhw!BLB1jG6DZ䦒k"2Ć@0ݭ9[0J Eq<^rp*z12mLjDzBV 0r=]vlJaVi5_0°D""9G6G (f9d&<EIX6k`L@@%V6;g/{Q'43,7}7FxzNm//n[}y<_lުxċf/0~o gk 5l vfw.+Ƃe m3_~ yt('m[fT˯vxʘw4k̚t-i4,eɥ+DAa**TgJocmU=HZV,&]$>U0 (sǩµb:ӆ?lد_ɵbub9xa9_dޠ 8 ;0 pxg٬Xhлy!hk|Z=xmjN-U lHK)1|!(UK(&sgb\D) IId E4faMI$$&6*M/ OjrF3 Ϥ\K&$wanR'*O4߄41䄈NL)v&z<{pˊ=ӞޣXK B񎵰_ߞ8P Q́,0QcXU/9QS CF@hxRR۽z1عԚrcJD嶟 ja!@SxFDiz~M!19Z!k'WgK-0:R U+< %%aFW'"J9vjY@3Up7y=[wƜvNń />"_Yoq[3~gU~OzVϋ[K~9 Fuyn D3{=^!wvɖ*jlS*PEi [>"S{kA̬R;{fm}k~x}`mo,mc#}ٝYo?,#-Rdיͦbvs3t}TT8Xdm,W6֓彩}qnoDCl ;4lc(wRi獟($UKpȘ<5ut(|6E|!{Q%|lhuF^"Q%|EsTiBq )*S|5W "s4%Oc4&4vI^0]߬"VM*QqH'FEJ($ Z < '42,z飺2Ht ;,KZ$'tRz F=3uƠF_?|㥗6Ʀ é 8->ʊ 7~IG:'hISBVGЦN~t 87d P10 .zv4ڰSS2=SryI)Ô9v )B#q=7tQEsO]~9X r_~jD$Ғ AtrBEa3@@5B;*Ro̼HN>o .z0C7X.FS|54J#2338Sd[|uTHRY|]*a>Nmq1ª!%QU9k05k=:'qZNViULMD=(RSA[hM!pnJz8"0xpl 5Jr J˻ew`ހ70 Im&8[V~tȡ> U犨4YpNqI(1%ܛr~+ۙ௪! A<ك $siB1mmU3BSN( (iBq )>9ECnNhu[SmݚoDCl #4!rM4·)G&2 Ә4TKR3+\-\uX@,#FEO(Oa%1m*lj`2x e2&HbDRʶdn w4S#`TLu, UEd(ز(JT pyb9sW?.ZECuT@ȠwJ[u`Gx( ] ­ ϡj/[EbJ?_xxF K"K]r)8F,ET/ݣ0z.kp Gk&)U\y .)pGjn<B6~lffx=B+.FD_FؗyjvF lrκ~kkoޮ]CE0F4cRa$4˄HH2#DS "bCZcws<:ς?ۀVpOL!-<8y5XY<&ģ6v{p9.?ܫ9bT>`?I9@mEnkP.`w*A[vlXI.qֺ=^Ap]z[Sh{,ˇE+nzH~ZUDzkUOnxH;sۦ-HŇ εom2DD}x=GTh{U7rl-s?* FokhH!Ieg⛫;W`MQҨҬzW7V2ւ@;VCku @ڐ{cg.qds+x,z M${$z=lƴZE(oB!L}~E=~~\!y ۏ~B3=\zzdEo`|z}x(vOqTz]߹WF-T?=,pL6&!Ed훿%e%+.6ZocMqj8lG"L\MM hn:EH&J*?&`ڝ&nZY<֪T}7&+0-/^e{6de\/_̓Wۦ_|:m<?M=scYJ1~ShO`疕3O ¡)Oh'%Ɨ'MŔ'LyB=As5<2MyB=Ɨ'PNє'LyB= T 1 ShOpwĎ&O?Ae9p֣=u&52@E:]֥[s;?~OZ?/nm?˩/%/-֋^D`8ks zu9}k w;4!RB-ZhaaqE)Y,nHP&16B6{kiV -ΰ/c59tXN\ҋWa Z1ڡԏE$Rv?fhO%BB ]51yW,n+[oZ)XũLb1R[] ֔a, ' 9}[r<[хLwSJ()9 ݡR$}_KmJ) u%վIQ3i.> MpPR~K̻T^D;sF^aPL_@&ͅAju]zs<#*&LfxobUj0?>_}^S|,SLnKS`mAWJwd/?zWKFxSdO̓+b3OZMP<.B!J WNՅf2q (Sss#xH7;sBD% w犿 q%ՉLmĶ~mAȺn; z ̸zfYs]~Tmj XI(E $>JIG>tz{= ae»CsƸ MXYN k\\UX\JlP]C\jtjwkЖDaixxBi':V!ǒ=J3b: Y(t RVaA,<-rFjoycWxy:/mnF/[u[hn/feS*ں%D'RҐ"% 13FMbKO7F| MGYh^DMyi3CBhDbsg0tJ:1|6!@@jㄹчPWC @!9FOfDH룡Tycg0*\;F-`MS V{ LˣwD7MuڀfJ9\G(Q1 d&{QM$^sj28k7p.T$hZa ͸nk*tG,.lMd4:%bi[@ 8e sG *J|.ݯg2l YZ+y /CJ1w߰"A߱w~Mia~)`>}NsBT?*o+|ÛWS:or΅Y|a~j߾\^3hK,qߞ}zg&5?*}B8/f>~W;TS*8PQxj'0Bctiw8x*x 7ůg.52Վ ՠz2XD+UO馭@J[w}ԎX֔:UxV،D%@59F1s-*aU HFZVq"QA &ֻ9`9?Fu0="C׺-/@CϨ9|_eeAvq0b)+hKLՑrh_5jG)%h?SU$)EvVInJDEjl59|г}LY!By$4 F &p~ljqXѮ#Q ٷwI6-/WW忿KS2kDu4O܅6bb'Le<2ye-79(n8paMBo8F%f*C6qD%4*)z:{oG5vQq˔OǰyGr[% ln9|"ygsܾ^ :7^bR@/r"?:7O>  +NE!HZ4ҿ*XDo앳4)%F:_Ib[~T!xK,$ÕAZTbQki dp19Xب[lڶPЖ`l{`hzKvp Z9='\+z%_+4Qz BJ\Zj̠q>SI8i󤉥q 3W01oY!< g0"phR'XbU2.vٻOЗ ,`5_Qpq% 9ayh [ʴ⢣SBLos>9l#˅ٽGUIaበ d'@2+$$mt+WX[-x4bLTL]M'Y*fq)L>j%<$3cLF6fT!Mհ~iͧm'55M2A2M;iltzIlå5'ڇ(F1:3,3A"v Ⱥ(<뢯U9E,*J ;6q7t,UƘDzƎ#ty. d:Z.GJ\`~ T45LN&*冦R}tP"ZOR>J'S *K߮JZe? ꄭlz6SDZR1"xD&OS=bkE ܀c C+t%eb%(b:b:;p!דZs{tǺc:q CL瘼&,G#?t]5( Zq@P#Bw= 6{Df!"ʉű!]\?J9(@Y@9)"=8e xK!-#F:x.8 Q Z=1rAu.^XMQH %uㅳ$5pJ@@S0F(K_.!b MH&qBr4{&2h z (BU&CFד#>6xL :Hhr'6ipR(1i#Q  T)V$1x*J#Ѣ%um*uJ1m!>NҦS0ѱoCNP@iX">w%D%s3#Х3/>)8ۚۦ tokn+Gl/ Ks;d;ĭ!K{4]Vn}F' NeWPWɗS[v.VTC C֪!ay8ȇ:ou8}H>0f+ߚY_tqa4t=}rZ7zIȂУƭ W7 oU֪dS ոw10w^|RsjvR p+L$p#Ѧy:cxd"\aiy2n $j>اPVQ}3먢:6hɩ׸ t*!𑤃Qs#R]2yv 5V;*zWk ^JzSY  A]pT:3п Ot&moJ|QW%oiE]=0I#y2O>}^:e/_9e@U).ukL$[I"bxP+ExcʐkC j>\q-VA\E jF},ln[أ_ӣ3p;}s\]):z~-Jۍ&_0X$mP wvq3s!cs4 vJO;%lrEo N+߂c)C.Pw/4 ۩u##A vm}Л9%eH8( TS!`\Q}yÈq6Wvg:=<1oW v|ztn'8 հ17n<jZ7&s1G3O( ՛2$8R+b"GN&c(jJ}l ``ԇ4?W 7D2O0.|[cfG\BN ![)r}McS'*( J9mrURGK j w1J89ʜ0]D~:Ouncך_ǁEu}{I&޾gcX|&}'n84:& o^]v/W\-m>p˅_:ON7-y/jǸ/du_Mdeoٳ$ROjв}r}h9~k:-v2{)|Z*Uwf~Orq0e_V%kԕ-POVT!(+2$eH!U7225rWD- VMmq)Eܧbڕq*S|EST-UFz>ݽTVNH؍%E%"ӈ4{co-(oBܗGW6̷ 7}~חb~!_CNy;ki‘VC[][+#K\?2{6Iõ>l]1ՄcR g.52·V"o5jZ{]4!IFM 5rEBt]qYC#RT!VRG>5p}u2UK+tɊ*jsL܀s L3#a6N:k%ooꬍHacyPJ6U:̉a Fh}{In1ę<&>pz |(ZO8VjLt-RRȊ-dǃn/TzM+!r w]%"-IQޥ*$ҩ*UE u*EcCUuB4 2?O7 r [b l_v6q2Z帶1`Vl,Pt _6(O߮2,Ϋ܉`*9.@?rq}4-Oއ٣kn{L{J<.#/'. ~bw-j ЊT?(}Bo~9RO&ZVX)g(%&TN4AZCcEr>Bb;I)f U}Y࿿8S2kͰs_=6~ֈe#vY6bgm#֖HGN0œmZR>3'`DbJ>j[kmXfp`́8 Z\PE:oG)ΰ!+6ElTU]Q^lATC4Bisri]6:(eFm\vR 7hJRGt^˳;Ws{{3f(}>a/=w]Olw-ӑ\J*8x D6 kB8HA0T *`ˌ14*OiZT(H,WD"jmՊq%I<I)tYO(xD x"SFV`#Tã@tgŠh"h5e13 8 %b;3! PF RAjHrw.,qo)4[zTDE4,Cˉ1RDNޠ%dAsQ؃oiY Zntr'|%Jƣ9- Q%Fp(( nN F#r[E6Gyē#Q0RAL FR vHtS)bc܍xRC3InLŹ7ZmQ#"ʀ Cǩ3hHQ T˷|SB~QFE]b^sq‹0ks'2p"(;2A(dK :Fkhf6Qb8AH=pj|F\)"brԉS-WFTLx}*X6zeA:N1KN4Z)_@@ $\Q"1=A!\D&e(8v˷Zj |'*o+Uqlܻsj1k2⛝\ bj5{xvѾ/Yd\5:9-=sL1B%We6Nh L BX%:pBKp fBק GG=ĵFkit]`ar_S%1l &&ht #A"O xG\vչ+9&b g Jjt:RtIc\%Е ̉)MS T _{7Z@?{tCaÝ+8)yA5DjT7Ƹy\z2wFt,$Wr1__c+4>~KVs{go'jg![h~G>g` ?o7wvEݦ߹Y@l4Ue>`!Qs.ݻ1;yU%?4Ǖj w5+hҭ,vFŦJb_Hz0b*ъi4jsӪIi6hTHeͤsͧQju;]Gr?ՙdC6$/6*+)ڸZE|)vDGAtC DsX㴊ɲO/vy䖾J626W"K ZnID?gϳt|ₒsz. h-/+?I?p~yV8?jȧ'?q@Cr6"5C~^u{>,=B%4&.GͣgGƱ!a #^4<8wWu(0͕Wf8)Z:ձ߁t4Cҥvo Uf֑KSQm¢T[d?KT`ÓW!b rr9c,F` &j&RP '\s&Ku \=VDT#' Yb4] @ZfY;-Y '.$ Rs,9 *W G1G-qT䵳%j0/TDߖ"ii?pRϛ36 OBX I3yf.%A\)0Y{gj:ic=j:O#oR?rP~fts.tBЉ&©**1 5w1`s :֖Q;[x Kʎ[m.v 'fr"uȇ8Ov(f&>o9GNW,DacG}M1BL0aD!;PH(^kE҂ HmЭ-UZ)Sk$'B9'BR oQDvyK5jb\v0 #uh٥Ѱٷ,[/^ ҴdW-PbHȄyƣtϵtL_4kVSc`$UOQǍxC2Tvǯ/qh}gӓ"LNd ] ,k}½8!uѳ :ZygL pȡ5lf2c9.pF}`l짮R=6KD1Gjw Bn(wkRccwdSƀW˻rA؞ 5[C}5{XM^Sh]6;ٙzM nةl5?f|+6ZYݰL2EReo&ܭpv,%2$Q<E k\c̏׈(jtƆ=Te9m6J-~!YQ r9ݔia_аujs2<nV7hT.vm.M>n&9{a~N..p.+oT5b쳂+&Ď*Ko2úś7@9d&׊V@Nn@Sd,/%TI$|&*! \FBQA riP b4"yMq!J#F=XY&9P"#JWdz zN5 (SRBhjͨ`:¤ m^3Ae衷wqaBy(S*Y Q )¡@{%7/U xZPȤG ;eDk3#e:AX+![\nѽ0GimiNʧ?Ot*A.>|wYm[G/>zcS}B GԂ׷g4N8g)W{ֿ3ӿ/ 6_f Uo/dsmi0Z@>4QhܜTV{sх =ړkDs?#:ʹr$ @* !I\Q*,I|or饕K ,r\\CatڃrYbXjsଦII@{'OPun4MQp#YL,9 K"QxkTJc@9?Ƙvgk='Nڐ@]Z;X7%wW5CpR\E<@ rk#)XS,8X>74kSm7 [+Mӄxub>81ZcP#Bi$ȉFY\y5> U % "ܵE @dpU`=e ͷ6,@Or'r;٥(s[)iN(Jb g Jj?Et?7J@@ =$`~zLKc0!m6Qo/-l2AOD.{H/_^w ɏgV+kQ31]? & {*yD]Mja[GE9n|R1۷V $SE=fhfbO5[k.G,6K+-6=/5|Xe<2n=Zn-Gtέ- HqϱNS&c@Љ? p;Ze?|?xW[r@wwQN՞{X~># RԓG4{j>ކg­wɴ6o-Lõ-qCL8)o}}Psۤe+Ga Խ^ݷ2][o#7+_fټ^ 4 a'hHVLg%K\$,Mn]JUwxx;S$ c.۳qTrW}r3 6E61.JlFpR*e :DH) (-el^x,=Vtl4Cj@;U ,y+6.ѓ'$jm09 :B]bS.ʧG-T n7.x7ͪՖ$Qz\/5Wyi&*j;#/;CwKz!b{$ хtiIw=_67 tfw!y-TiY1cABfo49=J#BfP_sv9śf͎l.TJۻ9GaQ $SV(Y01:5W[o%[~ F\ApEg(vZiQoУ7uA0D'(vY14~wb}8ퟌrKj?^KZ Tr 1 y'_k`dfٝ /ng|غB}bNnnNNΟo:r<4ITݥuOiА7Y:%BXbA`[otܭ y*SM[=֍ҳhr1QgXYmں-nmhW: d\OM W7[(J_'s/l@_dݝ2bˈjw?Gdd=YMV>Ӈ_nǏnvn_vM"wx˝5QG ]/xEG6&+#s<XAWO+r jQ@,iTi-}WCWNBNĈ;|qr]_gG稜ggg7o< GK/r㮯Կ1 B^/JDf)3=ň"GҚ 6M_ѕʝtS3RhK-1BKQɬ˷1b fcDS1zMyd$uߚLjoFi!dirWQ).U[ jPJ?/% Ƈ(N> O՞&|dS@ JKd 8Ydi )>*KYZ7#Du:Hilq_)SR˫{'qmLa4tWk\RrzɕjT)9D[&d(DiϥRl=ƤR\>u dUk~Dz]aal^`B P%AD%}_/CN-E9 IS$J8'Hq=e^IDdBwBK@E1V)3N bĴPA(VVG.e/|YP>޼1 4p0޼Qzg u,pMa5C {qP0B!} M3Ǒ5*qW?!fu}I1#u$g-Q?z=o-:rlFgsNIauǢ谺սn1 K`T1 kM)aQzXk [\q:.Z쓻n )al2Al (XK ȌEe@a.:G1$ p"ECieLeGxZW^\?*U FG\H$VZ"(†)N pJWiW^Jg H|w U{X*<>f]~s7=.X|뛇r^ɷa?χ!oAaTʲ!gSH-L1sZ5Vb^ vT7[MKMxKf1 ccvaȷ_ LQp&-X3M96 Y", l-eX$taDr@ 4\zVEY8-)RAɘ[,H8k׺vpqMvK B_nI$w.1sR{@-%ª"s"mhW,blږuϡnDN3p3)Ѻ64ں!o\EbvfS@ko'W9hm-,ׅI< xqnKhv@ym ~T >k'w=_}ȿd1- E%#W~xogvHG4E4%sN2ަ`kV^,F_O翙/L̄t|m$2 9je E)/UZ}iЏ` z[:,L*t Z" B]w{s0#<(GCkt ?ܲǜYq}#5\Y6g@K_o&'=8 BĂўF_ lj>-+ez1{Sʀ*wqph2fɢk?۶Ѥ y qZlEeȎ6Ѐ85x2:f WOP1 8BS% 1wkW/kAi& y!e/IU ܛN~;-̮,3cзA>}X׷v92E b4*'.ea!7F"J fLr?C{>T?'(q>|d~a/+/gf?+Ο㹔X1wؗKD6^b` A1b17&lT$'vFbJ)a)10[UkY-y+l(7nX[6(9R3k|T<C"2qc@q5QYaa%B(r ^ I/ 6^7$h"} J7d5%@m"ꉽ_΁u_V?|*\77Zx_ڇQ6!/ws5.wwRF˕1ilaDV 䮓 J E}J(t}BSb]åUNkT0@a&.QPWX>c]!& I'27 D@8xۄ0'H\OzmO[${45S'i>ݺ_|ԩ.:yIO>ʹq.镢2<[kSyggٙqz_ꂢJw59Zj<{,>_ ,@+)~tŲH[ݽ{nq`qOK!Y(ywKʻ,;_K S}xifaS]1Ekpr7㏷[g)K%bK&$.B',@ P̈́FAJ̵؝BCN 0lw;}KM@}5|cQ ! a=߂WrZp 6^))zMba1ª) [0[ Pk[~p?؃(  0 ܛ)\RXI$6¼)C2 DL, Gkb┄*DOUo퐃 :\kוlcuKl@N!Җ?WҘ~fiI}sSsNh ~Oք= /vmh5 LyJdvgzY7|M8cnn۰o"}vwf|57'NއSGOjcz^Y>q*f7Op糳yަ@ޏ~mN9)TpVK}wbH*WH8{HzMW72kC}FBEdSS"H:]-#+-FÀl簸Mov\`Ҟ^'Rz߯{HIMr(EIXhWU6]ǟ-lm ?7_^\Kϵ31%#|0_KTlĄ'_N0)ѐ-(gwRYODiDs\sbvo&yo家–Kw"= Ϙ4M/>SwOiHå/i˫Wy*0ܴ%(jVͷIu] L};$zImBN }7$Amvd{ͻd{ _ٺ ꚅGLP,w%pwq3s!sA($/zz3P욋F0Z)qnRlOxX01'\ ײ\KR[uh;(8_ioK:LFi 1 YTJ!!:cD BSun uǟHu뾀j}'0[UoE vI%UC^.akІ 9M5`hrY $(D. \~E &ة*[TdmZ7X*c]>) fo4RbsA377n^T|1 ,49<ٹGO>%Rw׃֨Om(-V.v$!\D d nqqe2]eTO?u$*I!O2!IVZ`Q(F@kd~@baD$OWZ}^>JA0k. +'kMQҞA夅tK~ݧ+wT<ܽݏ? />__`Y!=VtTXZ5ڏ!D[gsK$_qgwi`RYgkEDx8yM;}MyGO,}Jļ+~1wMqo2at p] b9]AtWrS#s=Иtf /wz蹽H~!frONbF&]jŔ@Z6K/S&Oxp RA18<8ϦDNإK&=^.gQ.$䕋hL%Sn#  e s)"`@T$"%FaŴ6D#dC?9]n"or[Pv78p<٬-b~[tr̎g sIThp|zgN6e5겶}7v.`?d ɁpDh+bzC Tʭ\OKuŐƻKbvvsw{^=YV-ߞ}QwglueU]2ݼ|s/sgJES_ѡlEo31GW'%q>ڣJ:: 3//POݻG1/eu=XGS]!ݣ jD;2at.GW)x8Kɼ T$dz763zy4s84q/S(AVu|*hO&Dp:F]E2jt]V 0A"(s$y8%c5[;tvVIB< qsP`'(f\gIf&FG3vQXZ0$)BX81Yl0#BeNQ_!w-݂86NWKvٖQfŕWDR^I qV Uhv3 {$FM3ZA18,s&4'zw´ f!}I,Ao(?v)离Oi8pnN}^~͚sAY2C{` <.rGW瘈of\^=6p24:X6܊N!y|b@0Sbݹ g!/ Mm'.}[ ͼ:zͬt0~W8o_AWL^ 1q`v`:v<( !cͨ""G0|=C;/,TLah!P a.h2d\n،J/C7IN1" HÂ%%t`Jx^ V0Ч-E' ƠW$7,iqHGtL߁F{pn(pS6^[k"3"B~S_Kb1ݟjr >Zj,tQ9a US`̓1CVB -`IMHIH ^b[k.lA.~бO꙽_z-pq ?pK =>$`^]PE\\稫p @JS~?$llwŊ?_^^\~{U 7\Ն;7+~GhoG,lӏ=z ;0dL~U sCMpl)uW$P5kYv"Q vߌ)G&6s(!'nJa]+üP:̚Re0ry/[6JF*Æ8>Dj=[O >qx[1 YEۉmdI|1:Xlf$ 3#`3(A4@J!WH*R) tSh*bT,8WPWS 935>iMVj:@ v,<8y>!n`;_\(YƎg Cx8y0ňIՓl}Roh2iQpfB dȁI;pWJac+K8Iei4hfpMl3!”3m%nJXw攰.$䕋hL1>CMHv[)9S6măSn/ݺW.%2qmxJbXҁccN {B`j\IeI[N|ۡ:#t:cnS;|<":Q{J"d\ʙ4^ G Xw1Gl!Kzn`vl@{n'}7Pg~hHQ 3&o)rD$wB&} +%aU1.j$jy㏕$;^~*O";?Ev~Jٹ JzH(X0aaQ#tZ ݇5 ,c/|~{3uu-yV7Qd#eV!Ź ^6bFH\s:{Mic\soJJ+AvZq wbEI[pT(na 79%uC7*:HRtsc9hNZc7P5kǕ `_]7D ՍY0wU7>\ТM1PbBDq@l~,hUHn 1-f cOҲtQ֖[0PɬۓT!,y`Iɼv$#I`$?3gO1 U(Qj?T$'ut <6r+eO΅З;rds5_5V~ 6\:TtA{Gccpf7ۓr࿜/o gN].㙽P_Q_/A9ŴyLeu+}c-' eݏ==,<sy oCsxVd%@9NoՇ5yEOn~XfΜ , ^jBZi_/_ `yKSB.؎tNH_3 Pl[+ D8<,?>ݲgGZ4>/Jq"? bAqz5[Z3Іq1./KsMpǙ*y@d~UFVuwߕ!>>ZIem{ԛ۱ЖD#D$ƙDS} ]yV(v{"NC~0$xpq7AG7aPIb`Z8, A;D) $eX_ؽ rl(7 aMYܕĄrN% pPi He% +%Kra瑢NqLJs0gbM#±S z9@S$l)O2 K)AT*5<:1$g|,4aA"|b݁rئa eHpv2PbKEAwΈNBt ^c<cM@{JsP;|+ xf3kKc-T"+rhեHGdp A{'A>cBPiҝ n|(RN9' (DLQ:@B sJUPfδ^T{[ %%l!E1ꜥS h [{bmP*1` @GaRa!سq<De ҚP!X&',lzk2r۞4R .AbO^daT4Kn|FZ|ɘ|bG'_XK˹EJ)vaMw~3eCBK"% NBBJ|x)NO|Vzvs oOqKOy*\MGR)ٓN֓IYxS.x!XB`Y'@𱧗dL{s0]2>hX́[ ɨTVZYFfQw_n wAt!vloAg )$vo=7* ʺoAm\:l0'l;de5).sF<.=~6p(CsƎp,3$K@bFdm6 F%y?ZJR >(Tu–7 }f/Vp>[{laѹ%RuGUþ%EؚMQ_6@jKMtBzzw]]'G&Jzp-ip E2VnZZ0YurB[+GMUhm8G{[u>(}Bwެ!tcw~z-[5/וiE56fa\}}S/t|}Ƭ~cĝu b:wW_SyEcjkXg-[7wo&Qu Pˮ  >I -X4$&{&&W* D!K7:C:CV}y[gGw-ƭ[ߘ[[( ׍ugf2FڋFɵVV>,{~.&=R8>Wkߜkk$?Ct{Av</`g lQvњH$[:z%IA!)FׇdPa7upjTtF[ _@1rY,| Swׇ-+nYC K  L{s1ۣ1LDf G2k~Rp:dpLO[92cpnTC[FgPpNDġ S`EeF5%'A쇡3pvSgO2r<r":H- [ndP jDg3h4S7im 9q"SNk(rC10OM ncHȉ xz]NjԈgnv66:R!!'.Y/m/(I 3GHԒSz5IvH['NYJfX$b3}u>)a]mQ׃:_ŮfC1I5 tΨl}e:ɁbR 앱ȧN ;_\062vxuzFBgS-G\6b۵=jo)A!m>^JRZND=@7YV#T#ѤЛ&ɍB ]:jpT.ҋ) ˝^Ŏ]*F[PɄ ]Z:+}{ֱaŏ95 U|(ƕ&߂*x 7#'zQn%·UoO|?4`w~V++L:-{#},q@}ٗ0N5ɀ? @ j27r"рHw'mil@gs=®?g~T##|6Cvc+4zlhρpw:k0>{/"!>F*¨ #VO6j' c '*':@T(y:p]J5+l即fm]j{% ֊ U:g]PfV̾X퓏>k0Z{kS@HE^I2I)g"d)6fQECVNf ׁA9(~wNt>:,X$0\0RDڷcaEqآKd U"GVkǫ1 s$-"mZ'/ +1]?~/7^k6 ;_dyrWyG-WJG!j;|˟n?|Ong ?:2˶cHןOoj%rÎ:m*hnKn_r+d+E\sտ/?=Е]bͫkFfs6xl+ѐ(Vq'b+~x=Üi z\py~y|FaЀWbց}0խiB^ʰymXt[:ǷM65RIĒ)YM(liT "9D-Udӈu4z1"JFrWCf5EO^]_aV>hCx&l Sj \}lO6겴ě^+ a}0X8_w톽yAmFkWm]?'񜪐hp4Q5_(k+&6:uZb2d9ueMAQ`"Ӂ=DRbЈٙH !ka8 2,*&;]1pZ;I9;h 5^;;G4zf9^ 3i!\=|֬<ͻatG/K\U !9MÑMhD׀Y8R&N}^iy rH1O1 +sE "%`sOl - DzW|}-[`xo=R;b"{{^J6Xr4 G'IJLkYmKjŸ_P,m3fB;Z;|3D3xly_p%#h(e5/Oht:9<<хivɽM*!h9B׎7>1=[DFU`/Bỉ&Rҏu˧RilIDs߶/\R'Ѩ R F(t2[a.9V'i%)8o)]epRd `6a ̻/_ԏ!r;u`zm+H2mvZi徾.ڂcr&0Nd!)S臧b#[]s nR{4}Sf̴JC*e0и npzh;ZU:k3_m9ftv빢ZrߠҎxKmKYŏ兲&+>± ,ǩڂ ՟ t+6~۾f_dQl)c g(RE_Œ&zZnK䱠N4dB-l JVbs@zK…P"&xyCwtiMjnnI#x3Xw 1޴B@)D%6Հl xzPxpN''jD.1%3Հt;8sd妹a5u. y<"QP s`γl[~M0dQe-rAK{H'[Τ&RӤ=:UE PTɁ|@l-31h4M{Ք(3Y3P!>6)3 7x8dkYn e2Sf B6 PH$Y2; 9)Җ2X=-軫7w`ċO/2`;4|" V Wh/p8߆u3ts[@7?+hlw{Q,lXO3o)e+=^`ղ}ODKLa?ha;@o#O0S]8q8 jFٛ@z<6؉oۛZ^ym"Nk[XcyMqdTFiF7m;*"†g<+MDG#NIXX8E25g?VĜ8@+8aQ--pi vHxz nIP(DIϚ(IډFំ7W*_Q&tR*^1)~I=??{WF ˊ̌'{>DjHbFTxUU ;Zi%d- hݏmhI2Qٍi8%ȃ"S~qOpxcVpDK\wkW_-̕!7bɣXy6u?7x_m?įm*keaG BTΎԽ|O+T =:q#5= #X3h3=#-l%+k[Z TZwSrbҬbrdW5֒MS֐ak _ ݵF62PH8ލ,q(cxj:eZ^^9zsx35/'Ƃx0vk[%mŪoh8:si?2m~ͮZiƟ:Z;79\\[zK+g bҗܳKsճ|3q؜ttT,jk++h+|L\)ޡm5͏2l/N)zQ{q(qP̡ۧWa|5S(^ ߬8į¡'Su{O`Q\`g7)3}켿>]ㇻn>-(ݔ͈7%{ xܦeo"&ܹ/oMOҠXlN*L(I>i ʘJAJ0% C"QӘ`9֤rnm_N?[8ڹpb(8#Mz6cO KJ'I?8"7I s2Ɂ%(݈UcAоP'.H=\'MeoR4:+P8s<-*K4g7)G}qSm:HX]f DOGKC"9d'ZmޞO7@Rw;m+ڜ69_Z v!/t-w̑TF0. 2Q@Ck!iɂ|^u @(~~[Y]͍I]vWآq8eۻٗjo~0՛|f7{6ӱJ/uw͠]z| o6gZ' Qe'qʐ#ߧ3wv3%~ ^SF]r#dWR@Rgg@wS-FY27O dE8r/HmxmH=ImHߦd;yK|$7y_*N%0㞄⵿ޥ`7߆/Wvp0X 0'[yyc#@bYtËvlJ@sb|Z8KOcSQ锣9fd9 tpu̱3u# T2,5@2D9  # o%!%cF܀%ҟtt3I8m,QHтAX;Z?u=Iw76' t>&ޕ^dd 2:](1$DMp3gVu/f1$c&k93,$yu.Hh:]B]Ϥ>3!uDni]{&'Ps[;mr kn<mCRZA"11 Z d"ZZ;S.roQٜ:$ýStgG';{ySV2XRW#:(YAvzRJ(gVMCbr.J3w4i4’ &c3$j%K ;Fvn|R[]{Q*AaَɓTOD!"0f - [Rk9bOZ 9ڈԢT5>Ef &PkMl+/nwQpN9gD1LҗD1A*60&h)bL+]^GiZe|T-!G=Bѳ9l_ {1Ҭܪz+#^« =2Mt>U{[VT*s4/֌ H6_uH \ZuNtO̸䤽Y{i$ō2֜Am#lag'SNi) SG אгT2W 5΋"9rA/MEw&`oN΋yofH֛82@9/^;ec..7F_OVX#tΙARnp#M$5% ^fD V"ut1:cB*wٓ+Rs2A߹LmWhЁB# eƨʋLY; d2Bm/ʞmuz܍{ܵHI$wt eeCK'tЍzmGһu>KFþc$cҞMǦ5)VRj`/ɰ.[u!nڊR2|;zGo~|nYmn{GG1ח+Ju#5 eeO"Cތik%R 譳$J5u u`hkPָ+d5dQJ M2 i4&i=mlJ q}hQD sz 3kmHٳX}.~,''@rֈn ֱ$*$ O5)Q#!`K$5n]]-pr,O2( ZG)x>ۉH7k@IfFyrk]fL~yO۝А&@D;5FSH\8>޻>GKw}0YY2R3E[J3h ˷7fm%+wZ거j^gŅ6]Fh[3F+vPln3@5TѫiE6\PY1z m~<3l 3LW8 ]D_t7otQQheЦjʤ53L+.hƉOdA}ר.:ΘdJ2φkUR=>q7G?.:Ϲb 9+\)D2F/w2Y"9^Z%iBF;jEM7զF1ɠ¾2M9TZ6{ u N L=fo:BK;Z6G-O -7oaO[UGÏ/;ZA&{@ק}zF!jP뛳K{s:)%sO^Q2{oF#i\X!u@2p8f5x9uHvN3p&zۈNx|rZ,aHm4 3vV/z#~ j;v6/ч*~Hf#݌F.q GR,=Ӂv]35"um_Gth}y;]Dre~ʌ#17n鋷SnFpZNQYHtx7&4^7~]kБ;GCb^Q4&v dmhfzgfd1uOQe| *bNkMr{uz;ъ0P "}\'p[~㛋ˣLM.Lo7Kn0w:f1>HR5*A ,آR]WTݔ-Ҭۨ,Қ_<}ιdڤSu%()VD "%z W+qW˲^nvoy@(NcΦFX,;(5}¿e1RC>.:,yRվc6`;,iMވ+;=+=bV%yU!g`U!?7nFh"  /&pU&652 Q:J']4)2x1Q %\Z֙jr[Qveo[) EEw+F/F>TFr7y1 {?m'׺.d8|w:smg(:78~?'wy :Ng!O|v?5*(w2ruK,]8c3V7b]E[V|qKδ/~x#fpF;bbf=d[k+VEtCNQd֊ΰR+J7c RvsâMP^kb_&3|;aFaz<+~jr;H$ۓ,X-x'?_b³^hgʔ2J)5]ڈ"\Is&HdG'f{w%~7(Ӻ/$-u؄$p֐E*aS@-? WkhvL aK 6L?c!LuwohTP0S2RU\LZVKpN`o6 {2#)@y8ǷkE{z14Ol?jgȁx˶J!ko5y|^2'3C6a] шWd<9*-sT0lvn}Rئ(G4/^;Q5璸),B;j\E㳭fW疒Vp ]ʛRa\Y~6gԿl-)ɪ]RJ:raOMDV~s`VRJ;'jkqg< #oPce6NA&?__y L1ɋ/>ߢObBOV2MQ3<T3>J2+!;S2 Mt!0Ϙ5y06ڔ7 %N 8(y# 0qY6T,\;ab?_}75/W(;ϧ p8xAǨ/}fr2WdĊ.>qsjpѐ&5k!EM((B)5Ks塿x4S0hϸsxgAp/ njd4I~ShD7sz[~[`@D ncߵ@iG ~RO&a/J+w+!x>vFCw"goܭ~&CK';мRJP*TSI&pֻ[mY;{j~t2{oT::vд. b-H*s6 qR*B@P<7HM9 sY"D(/IJ e^+O"C2ED*0!@p"A $n*%=ce`Vi 2N38O,*-' 8AA  F)w$rkx,cs?CSk{ x211yQ%8 I4Zk4`z̢b&ɴ]ARr3:M8Z/:c 4JP&8Nm]DK`=vft P; VB$¹Y3 Lc8Ȃ0H"=N Q'pIkn dx- S4A3u,Z7ArX10#|^1wkDq/[SLJ*A׉Y `O{@ \h8%u5Ɛ$bOYGJ *s4c# лs1&; | sIi,M@D0 ;3R&+a+=Ř :41$*{ʋ&ߙD/hDueF 06B3^2 9Yтcچ{^aV-v[vdP<_ 1&CNPAo@a+$v sZ?]&JZf)F ,D|PNd5㰼ᕠ ռ5sώ(K*; |E~=@}HW 9u̡EyL }Ƀfk5QX7JPȻ{d!BmY>N3zۈax|*;}SFջb[' [ #vֆq$~8\ b W2gۊ˶Ro.(rU~鸾w݄x;0=tRУ*v/X JWa?đOk&ECPj+_M{`7EYt*lJiӔv\$5DzWy/ߤgz8_3m׽C`{$\eIɷoӤ$xfw51DbT:\+4Kʷfm&{?2-ȸPZiWB;\u5xhG=["a%XX 2z6mNwKy /_qN̉nۊ*+4Y&ʷdT&TKx֖Սz:'U8.TL8i)A BQƵ'J ovJ麐nIPRV_՝j/B6|?ԁrF4/gM/>w7ҙ&IG*h^ܺV_re& MtR{,@-vˏ@.T3 +5+uZz21gX/ɞ\B[&v ~7O)-%]N b=_/z(4=|Sb9!W^{El]m:=fMsi2򺤵q:-H*Mk6X}p^R^sO"s7U7B|mk):wv^2A@*f`wvA){RgOfV܍ ye3ż$yJ0hHQwWeD]E< pUݼ"Y9HI+hѷXF9ߺDZ2`8派7[|/5]oaǦ[??u܋qly|q!f}?kNY<9dyT'7vv& %5sw5O83tbKsHTjA;!4P6AFDHNRہjxL[oX)Bh)lյ$(S(>kCufK"b''F( Ҝ@1wq9 U7.0rD:Gmuבֿ!\+qȐ ZNJp_~%"h}z\ve4ʏz!&{wy3+pp+RN ʉhşXҢ*g5wB`o퍵7\/wzX ԼxYQ4^'Իw9jPĘW-_~@%y4X&\̢~O;]~p8M3ę>\,~~s?p絝%/5knȆ_¤bţ>9D٩~9R52C!-)+.C0*?U~ W.*k\* ZrJS.0KN9:p1@!rso24ft:0co‚("x%!=u+ tfr%wbJc4爦 W>oJJ +#Nd;n%W[>JmU*XRӾKN~k>H0~udzC2}8ddF.kІ~@**79Ş9ԡa2͍OVAsq cJy V!t[Q,>]xa"]Qw(S]x;z,|P%nFy׈,f Zi򦭖4L͘ݹ"4gLw޲6#N[Pqqʸ[2]믷dy@^֞[8]DR+"GVxPƨFxu E,^ys_0P5C_^FVzNvK P4*ev2KJ$JΧ> rPe2y`tP(u+<6OQm4U AhH<'2ymA}9;SC%p.'>&oHW?L g Xʹ*R=l|O^1- ޭwB/Q12E8:{iGOT0uHknc!o'6D2iLj R4Do%w'5R1v0[r l1K#ajIF03nm\{lE+uoן>wPzN煟ڧ\\}p_?}|3o~.Fwuk`Vl5Q`l`U2Nne儌NDTH-t"9A-KP'tJ $G)Ήc$qQ*i4y]~ Aa_DU ă2Px`a0 \[Aۓ3YBG {+hOL@ST  +dVv\ϳACS&]3fdmJFĒ 6xx#OAUw-ϡG ?_-`S-T_ }eN[2*q<CgiH ajKLYXHG&MU9I}w0MFpAAC1dGŴpSa0eG-&uZ!E-͒в $4nFd)0"@nxT]$muWiG:" i80)F@O6'xj+.ɇ"1AN,Wݧ̪Ż!"kpnKN.䱼>^}ZAt@]夌TJ(o܅8t?(%PSL"։K `cIj';!(e{P 7=3pU}@9_4Sɬ7^( dQMdiKr BFRyF$ `gGvKeB u(k%N1x¬#AVŤ"JVHp ' cb$!.TKU*+q.M& /}݀GĴVVVWyܙp 8AAPў1V BTNFͿ(D_-du);6~o; ܱ'~pT^R7bc͕ٷ3'qk"ܢDן\7ټQW ]ؠUhțx5QԒU/zdrvT3Lkz`Z^=g͊ƣFn<x +X`,In0{Dkʗ`H Je_rȵ3 *b \8l&cux /]5~"j:j Lfo.j5 AP)R]V aͥX{jKw~ Ao7Jڸr> [ƫl(K82'H]-g>@< JV{=;3;z'_N?؛<]1a.t#_:\Oo+>]W6;do&74)8Rݨ} TjЯH @V2J38 #\IF9mȯK>юVM?OP!}s.+V(9B\ p c`[e!Ws-sחoogT;R[.$ER2Ϳ mEU/T>$vd&ڍ {5_ u2?R)A誙||tG Fa8i:19 O`@vf^r摰s؏Z׆+79 ǜݯyL5G!{޼`h0J6j[TSzCĂpsR7I%A(O "MXM $%!Z$k>5+-OFe&7!5*EW!iyF*Q+6[ <+g uԼyغ7lºКw74/&N.L&(VL9[,t!OΕVɓMt..G|V7!,Nnm1wim!޵>q/^r*|$9RJRy,*;]`y"pٞ_cfLĉVgpOAcGwJa #]՛ $>i\5Ro?].~o>\Gc,D~\]g0Wqd̏S75q۬7Rɱq Zڏ \*䔻m{`ݽu?FN8fnu<4}ќ a&cgbqte/}]+fHh;pBA{): Z[Wpf]l3RZe!%[?J՚s 4̜]ڧA˷bjdovRAn*]ۉ| 8|a/YZ-Ke;GFzl9c'+Y^Wssi-I2eaIE;dʄ߳{9L̠[mF׋*'C2ΰRF,21*,ɳQiM/e>^ɻUHb7'Z,zIZ_w,?KY,?,/J(s^ddf^/tr+$͞ (>sWdȐܔ!K_dr$Ԋ_ՠ&sQ(Wʶp~z9AiIk修Ɛ }`7H<Ov|bW?5eBgexnV?J.n Obs[n.m/-w9vWc6cQ'oݴiYcZƑ\D)E=bW[e rl N+5/C9#ɾ֛J*GaŐg-9՗pCҟr-idnoO?a͋99;lm2=Me֞fUtkq+P2VB9Yy!9$ b.КtwVd@;7t1& 4r:=SћnyACna:q2m r2ϯ>.{tB6AC+oCf ~.}5v ܽD*3b0b{Z%tz2w$˸ۓNݿG=| ƭks/-I0QCd -_F #R`cvX&;[2tv+s3,E8PvpƓZy2\3N ~" G~+=ry\EX#,+ /q'6-emn}ėSCɋsh4󣡶}2Iaks-n3`,vqwj[Z $4SOY|FlڜMJ1&'GOϾLS[T݈.~=V}>;^&kqj'c^leLj844WUx2?]^]*ɀHg1Si_Aifj9$ Xռʕ<߿ߝm2H7rO'wղuoJw[PhdJTM*DŽ Kg\o˞٩LDrLR,56! !7  wZsV`{jBLh"*OTK; 8ҁBECzyH]u92!s:[$#sm`dXm =0ݨzqN]ː\_/wp4 RL*!+ieB 9.8Vxւi:#Jshc4mI+C{)-xhǭdŔ۲ˏ㧏;xf jk+p22MA.x  "kK>1z SX$ku*Kz^\xҋ9+J.*iUIJUOD!s:r.n?/]W9^a I\Y'v+WV9+ڀ:~}{iu7iOO6)22 픅>=ӓYn)S՟Vsn-.7?^^ ;FZO>5obh麜0Zَ(T\,zq sl7-ҝ| 1K'3yX~j0k$3!M"7d96ލλJy={֑`yRVֈ,UȴS޴FFI/Y ϠO3q5sńr5ӷV:u @&W;=eqwv>Uv`H03> }L[ɝ0Ѐcrn* lPXL%i7J[ CѣՂ\.5ZǑq!L` Հ@1aG}B\uZO1uߘV|дs@3dPAbXqtٲU ~7JꀁqEz٠zN?\_#2[ KHY D'^vrTl`(X(QBq׀6h#ʃ8ȃn['翂Q._)Z:sm%fCBwNHcu.BQ1MNk#j aK\BĹWiOMڏx̚YtVQd (`9I_Y"}J}XCVu@mbOSDrvs:†S2`ܠGo[ DI}2CdIZd鉨Mȫ ]6|1n:7p Jtc8i"N5K"c]rH3#ThhjS*[86"@ 4,)L~J Gn)9]f.)@ +n%*uq2Ym4 .;JNhKrAeHThOIke!)'k N\:ePlT\z',v&9RvOqX\#cB8}\r!e`v8$hZ'+t_DTH=9j_LPk&n 6::9l'`73=nx f_/aΔ샰(.eU}@/yR?kǑ<^pZƑ"ja^{AlL\V+ { 57ebw֯vdSo:_5;XhW!TU 0EMo#Ff1Yv%`!z6rFrAk:X T yfBIYxNXZl 'FฬP*ƹ4 5NT LkIєt|1&)C.<䳕 '#˄M*9(@ ?_9~PܠwvR)C:{}Iy᪲VXЖG sfI%J[R ${V5W񤟂2'ri-:i<[?i4RfdO1%V`Bf% m #ifB7OXEr>ɛQQn>ҳyqFK94+)H;!ja@8t!1i& }kq6EؗD_3I`' oyHOQiIRj E*Vb& ;N\d`AY`'ۅF6O 63` HR`RYeo8}⸏W+uv6]XB:m1(k7€ D 7W%w 4_MYz@ `ϳ/Gfr?UaΏ"%>9}iۣY Ow=L)GʃOđAa)o"c`Ovšw ~:Y[5F홉62t}L0#> >/ ?A?'eX8A!RĔsc. ^ ZP&aweetM:hԚl4ycsvŴ{_otL~|W~\L'Ӊ?s 4Zop_*[`s)Pkw 9 3px՛GfeDqTLt E5>@ o9דfqe1a2JH؂OM&X,: =ɠT L)T$8A::Cv33ᱎ̌< Q%I%vc;s R\z!m;.`njRv,aI0Q8A ]i5Y\"(ˀm2 ~ ehcA$bdIZ-TeosrF5A䔪 _=N:գy^*5.6}V[] |}Wrv+-k.Rt]R㲲(CNB=Z Sb|]Qyåo okj=UeOT^Ӭ=emXFD6wMQـ%AE=[ekbfˬ )[[ 0JВ$j֬xUޱwkoD4q8fL4kL`wTut۩]UXXbKX־sf9G5O-=;h|*fdᢗ9.W<ᬰULlϼF0޶5tUB,h8W7~+A nMF7s5CpPgn\,]>K1/q+a=%xM̛.=o0) \늺ķbN {%nߎQiu~!@B҉a,=H18qS*O$Q8sK5kP)v T9e5Biբhd&01 nIų̰Ll r\t-{읝Ms-KR.So}{]-;/qO!Q`Jm's1?9aPXQ¿|󄢵E`9ìۈ=|>z< />Og't{%cZ A9~ TR^X/ A MQQ.9DBoN´ƬB$_1]ߏR4s9>%_*47ebR߼}Oǚ`e\!mw] Ͽ>IS3`SYH4wC+ej'cܳD}KT3/n-߽Vo?pەV{A+D0~俌Y",C`?ΎKčcod\lHĐoڮ+z$WjĈ]z*j .W;A;CglPKtTdȽ(y_/A9o>ŏOo_<* sc!dꄯXIՂ#Aעa_aE5Wp5 xw/*"/n,W+rjg&FO5s58" xhBpCi/Cf( }ߩ8ndƃ}`tC J ?au0jD8I<{uW$mqP!IB[;dDءV{5Q`mGT#vb5HJFGMx5}ԄyMΘä0ʰ^pK99άԘ{Ͻ0w gTZ\M=ujꩃ/R!tɸj짤d\T'U*St=U&8Y,R10Vf9@y[ѭsLG6A]JCQbt]I#=,3DK'2͌`pV!;.:II ~)0k~s`+%A~ǰnCX.^[pf98-ayrEvtV@ ͮ"jQ\h6נFYB0# Aޯ ^,D)FWh6a1ST ^;}P$DrIWT+5T /b7&DՖ-EJeHJ(ت*VH"eE%L-"n@7cLyĨl蝗C0PF11fAlJ| >SO w#" >!+$X-c;ϣ@! g>3c3&dRRz6j|fHvKTL+ %s~d@jdYCH4!W%Pjf ByH 'g:!CSB D&B-.-ך_< ZaaI sSޚ<7,'OJOGIV r䵲eRԁyJuHLX=ˤ^-ԍ'\txC dY i^p "} CS7]q%r麚SPviYkZwD+Uj.Z&j[3@cWu }=qh`vϓfbt]EklU{p>Z1ȴm5~bN_Z 1z PAv?Z;=^Nx1{ 2oO;AN)ǎXhkк|ՅN+Un=Uw>#jN 劆p||V'd 3nZnZsWYwk̶dV{6l3r Hsвsp H[ 87 cnGD{!?W2ٟCY-\2F!T >YWe.< &)5]f >9IsRi2 "nqi\.*8*Y`n@ȓP+.TZAd:#n%NɄLbS^ћG5I΂u-ys`dѻ P\MP0GwQB"ɻ{&2UۃGa)qwWo+QZw~~WL-5drэTӼ O}(Iuy`5\R8l3'in 'A,wΫ Jhէ嵽,gӶEz"D=ȆsY($PP@&;`lm*Uxw^ڵ HXV{ɉYJ:y^]qt,fZxE5(F`P)o3AAg2b\۹M{"/ fLT~$~O̽,ɳK-(x W+B c+3Zp@ @ ɲX/Р$ƸS|$ `XL2-MNivyB "{JeFW\-4d(=2qalfʵMUN@QsKY?~磇#i/`M2fuD|40ȐḲ9乗Z!sxm#!_3X0,Me^tkڒqf0۷H)lYdr7$v&oS{oUs=-(}6Vc*+>a呔S-r|= `LGw#f™}*3¼)2go]ˡ |LfE47byQVs7eGuj5)$)>(rc7;(ʙ$LV]Gn  xy=nqO MmдD2pɘ"҉Mϵ| dOD2& $Ӓ ¸VVkL&D&Tz"mvfd?j~x8ju[k~o>HN>$Fsl %O* Z&`zeyufp*)3y. Oӕ, hDΞ\$l!FF\rpK028C:&lGILxa<) >j “N[ATn\ ݒaD&/ tnO2Njr7|R|5^rh/ zMt It vK¨^e/\`7=< 1v (Fy{r@H;H.w<EbO$c3toO=_OW<=z3WLN޴EFi1Դgt5 3;-y @ȿiC/LkP#YziKA)%٭6歁` >0 2I>psԻ:JHvG ThP(zJ\shH8(#MuAQBc]Re8 azi).SvS݉JavfzpnM!Lٓ'mme/ =S|oaeϮ= Xxx y@%$✟?ܵPP٧EGdu=AV۠xۻ !LD5TszJ/%}{ !i`EjQ~8x9ضG{H%-!dNF7H`,߸S2c3墚Wq,<ˊNfң{w;[o_[jiWP y1҅>uvj>ӝt'OHNsmB 9(fq[88˙ˆ 1!r^ yyN_.'=z>`qlD 5|J>A 9vPWBҎ 6}×~~yv,ROE08ڡLƾ $ѿ|; vP>\%gؓMVm8ɔyq $E}ɮaUHh>dbyx-$N,(B #ց|nFtzQF@nN-&T)|>*Ӿl)C(&G/!"x7[@ ;rd-s㕛2?|Őz__t^Ye˂^E^ms~黵o^?(NNr2sEf!\߲2nZ53C -\9B/Ek>~={^ϗ,Cg;3;w뢖+;7gkWln?޹/3c!Iv\= ٍۮf3^$K[$f)|ts c YB*_-@t+Yyh:zE%&՛"J(ydJiV6)c)G\T>:oU5$&y&=\4SHLAҏzT3"K!Z]q[á26 2:߾YY=76GKw]ƿ%[f'(s5WBU6_bPNL=gZC?x8v0EYT!gQ1w= cwFQ2殧.x8L2>-{D[? h { ~6,`j|qmΧur}oLJX,@bq )jiϱ^5P,yʑʴ@`UwZdSY` <[DCfN(r&O.Œi̯LXyָ3ZKo|_{ b G@sˮ[ۀE~ fM Hk)rNK&%2_B0n7:K~#;rsSn0<ٻ!8T0ݭ;?(ah=J-„)1kc|'@^$5 QJ1#X%;(Fɜa>0ʏn17 ^K1 Ҍ@3fxEʇGp(i` !#*\ G $-گ9{/il B-{K>-8K`;& dwK(X?=@+=%!C?<@b n% ~HƂ # /s '5掣wdqxbB/#^(S.HaY"5'EILI{ >p _z9_~z}?f}vQw+iWnSnHs/^ {+Ft'(~2%:+$]n'n5~qC–_/kߕHL$I{P8!ItPa]A9g0u?$.~|PD Gd=A[rƏ H$IvuF0 8MW32[k{z$2hDQU,Rd7~yYgzfEdvr?;~}+jS1$N;n~DAAX7|cʐٴ[3>96IL) o鞨 1s>Vf z)sR?)xB v/f`Pw/S~Dsa@,*d)Kﺢ5u&HG\@@ӝ0\0=z3:aԂQeS}(T_8C~N^m'2,(9 0Pߵ,X|1p_:P y<bm+q1eI΁^`n&4EgGl,+Z/^#vIU tc\IA1Ii5 8\r.}O/O0Ippz4#}zKQC}k:"^ iQX_g1*!/()cvJ)-2x(LLP'r  (Êp\ZrC2=<,<Ab5u `P+<uRL E`"XM^9anBh``0u(& 14sdQ;##CCT,n]TaBUv\1XQeQ]eFP 4!r!~#QOh DgYd98V UD^H5&ݏy ޾Ma߻DtxNݬ[(ɁJS2b1XJiR*㍻)7@ei>8?il6O˛RCkk\ƢȩJHN_ :K^j)+kђEȴu#(qj(M`PFqȃk[jH{i(N*ф7Rx߷8&db@Tm_r)䑒*ۢ[1z#s2Rg5kq#t8aHݶI]g9>5|>uػxCn_WʭY^V-`W&GXu7zV/~v|ά*:<.:[,fMHT;?(I2)9h JH#l&+UJ3L؆&&+hn,9V& WͮGD; D45qYPjԠ.ϕ9:IȯpNikDWsY.4Kam3)買ZKdAHl*XaBa+mȶЗy3mվ&Î3_-r俿[MjXHlY,vWs֭F g)naR@=b[9id*H' e`ܩ~FFjv#o XͿ5|ϧ2厃^1 Ke[U‰aq9SqaU.7ӷ]ktˠU*Z>C h1D 7B33Zm 㖽 fm>)A$H\`z)vbǓT63j$9z% qGwmh ѷ:rOKfѝ!wG"jt{zQ-jɚ~LD[FFl'"dxQZ.qq#pŁ'sJ/uP0o*cɐ!}6.,ĒV9򿗿ܦYkA2*043 N Y^li Hk^ϸLUFq 8pȕ1i46FJH5o4g7fKw }zi(.m~$5DpTIP?¸3y&i5!j E䀴pf8<m֋z)[sNjZC}ލjWaz*]wJ榶Sbe0ߓ̔-Lj:/˼,v^VYy=9GBbH [дw6w 5UbH_+ BÚ Jnv gaMZl9ƖXLcņهyݎn|q8xʅwfv 87C.QvP-F]b[P&iv:z->v/(^wAT ;D"Y_qg]ԺRuNϣ pX8FQjfr>ƗƎ^fzrz {{/bxE (.G)% m:(cŹ0 VT=yIǖx>?Fo'֯2kQ,oz?1`zA(os/❣exZ@7~y}PHK"]>.E~~e?|` s8rc3DymFHq@$X̙t4(hf <)GӗWqJ1FxbJb~ L3𦾱\}Q0~y]5U& h)7siyYcO}sr?N֯{s&NQ6UW!Lqrᇹ'#ME#]exϤ]"ɔ⹆e05)pFx 噠h )'d"X! (bHzXjGYy8e1.~66~/G;}`pns'"8Gpp=iesKu f6'A#!eH*B60qGEyĴH .6f5mMD7e7&u =2ep/XBvu\;CJY%X3i]j8>(fgx8̄A 6 㙳؀ aa-\kU5nA'_ϲ"Uܪo 6TsDZǜl24Y= dC^1 š,}}i)R@En .s4X.$RA.') !,_EҀ-;aGrrة|` K%4VmEya0(`L.U"U\Je.ld8B:Pgl#6<>C=FuZ0=o (FV*n@g+&'-J (ԟID εT+sb͗9ؙ"Ac8j9>o.HJqHSQ>-M$^$ОLn~T }>r=~|:g}]$|#@7EGb`ߍ@?'w?;?sO

,ݟ( -2 D&'!Sȩ"!΢x5k==@psMUkKvuwHAڭ|b"""d|Y5_MϖDYgLYF`<i%V[h ??9l&WtxMwŻp R.rS-NO&'"TT.Rc۫df%` 3D3U(1'l.1ktQ_a.2H$ҥ$]x6]3I/DI ;TѳkLPº[-n)+? ]1M& &Tёƾ- ];W) +ISLbu%)­r1@e߁mtXlG~~VT/և}sR 훇OJ .aqpCDDp8h1; O`JL;?OqV7Bk3}J߹[`n8fR/G7'W: 1LO'?tㆇr<]tWvv @YRlh{ N`|`3F }f3Þ-p=U@`3t B@/9 cPA9:eP+ΓpJzy7wRU>6c4P9$ WRƃ 4gy})lJjzm)3 s( l!, wJҔclA`%].e^+X48ؓ+ʸt 2Mq>nXIr,<0z*/կ.hX s|}bT*\|{ivpB\a&Tb5K=T2s0 I~M-N1,_a2٥ NΊ̍GL+%>q!Jŭb o_ XEz?E\vb]VrY;EȞdZ5_Ɋ(KڛsT;V$C\36X$7 []2v*p3}$ # (Gk=Qf{i]|:oNnˮ./靈,:GC^ e6^7-v˧iEZ1Oߑ| SaW|>m'Ojxs!v~BIjJԓfSϑeힼ(5? D!>$䔘Sȉ?Ih) kR$f KD}+N>M@ xyzٰӷփӘ5UbΖepZg5Lmep0f|obIQU:h^hoNWSp>ֹq|Mnۡf[0ш"&Z u:JEEF$+14PRvK$ 2(Cxa69ЃL!huNjm(g""` ^2op{<ؕ9Hqro4%AaWl `3Q0a3*sM!mXNڋ ϝLY}` n8T)RD[c֌RZJ7.`ZmQ ?DꃓeEBèYED"iP)f9(liu}7H[ Rl6Sc*cʓXo̱a/1Y| W`?LgM0s ~XtXt[sɗU?Yyګ$O.d~k R=jNwTn{h4fݚמOnmHȟ\DȔ.Zɸl%?g%.Wa$BR iJy@ɋ>}%>RIxoyǐ!|HzCp-dE}MZd)؍/ṋ4ϼ0Zڗ`]?f!&߬zL($c 05BfASX6nD/+v&<&[(~`M c6=2e9/]vǓn.C+QW#BuՆ" >_*vAzy`/x0O"JtO-%e5nNd ˂z^XQf)} /x=tx C9.af=ë\S۽-2[I75ufo==̀jPci<GA,44=w٥=l\Y` Ey{l7t̟nh`H\o$p}-^r!BK RL=ގ_LFHJ4UjiUjR+<:o+=h{[IfXu)^'U#{Y>k"F^'C YB6A.dB9ͧHkq{1l͙imd=ܱУ5=#AI 8lQc4'Еئ}d)z GS< ഝx$cDh*nPFQ܍Nj뾇}6szG JJ%.)0uwI}޵T!GPNj*$K4K~ZZ֧iU}փ G)_K~F{"{ )´[&iGϐR3 xn$mͣs-Sp;Z^H{c~N„Ӗ yv!0W"L#!qF@mDD6?Z0uؚ=7 Dj8\xk4I@gYvAoW<\݋p03h yif?1\hM (% 1.0 e9 d i~9Jc^<Q lg^~'F)UtAi b`]7tW7yu/7~ai_$ro Lc NB3R)ä1rkEb1",sD4G0q,*O.,{]z(Ҋ6#19֐a< `+V{P! XD' ,DM :%|S&ٖl̛`JCbt}g`_^μ$._wY~ |e, ܄(cԂx $Hi518*f.d$Ux %j$L^ *rш4܀ (Lݧ`0 y@{J, @K ʍ2Ēx^z)h6d\pOL|~f k.cY0BL"Dy:a9tb4wF2zli`qh $h>zdzH*D+דRE) ]Th(J+HT(* GSLkiuP-!ASHCB%f>8AFڅὖ:i\Ew?k'(U˶b ӗןf &.WWh:Lvxs x)x|߸qZ\_8z4_8LLӚ+Ko֯. 9b6OJ1y7q&`L$oG g}IJv 9bqVX4(%])L+TNWr-)udUUQb#:G6HVznnuH+"j7=ha<3Lq XƨfTϜc#"NX`J&\\D~axeFSMXlPʗ?B>IrFM?Ӟ*B_%*\ɥsL&O w!ʮKsT5DTL3Y!q!m 5G&xVTڧTJ`B_\Pp@lY)$!C)8 WQR,\)%c`j} ޕB_mw@nn/"هY܁)Cr7 cvc? F7c$tӝO'WCܹI[~En @- aqThAni,(C 6$tcH3Q:+ɰ7MtXk ӎ=sT n▇8VLN dkZdIԹi)CDXTr,E)/wnPqK^׳tKNnfY_Z"-&bhͽ­soB`G}kT]lڱgrj%:cGkC";O/3a<_ 3c:O7CH'22`2 rQs(1Jo5+isy|CN?CQw4^ /\ӣaF=,Eu_S|gIگbAMx)0&)T~xY(!iɚ)k/sx!J14m%E)w>nի[AN~d;I=^$ QZ oA:$䕋hLqxԊvD.hR rDhSZ9ݴv+hvCB^.S޻kݳv2x{?F{AaO8R@0A@#V{\wlqB[λ",ˮeuْ›Rsܾ\Hv g=O eр7D 8B]S!&ԝmfE&k=aih9e䓠ZOuq)?Vʤ/IsQDQWS֔]C)f{d=j'SBYC͔ɀ1pJ]څ>E觬}=  %4o=ʬ~;/I6*f*('9^rR[=S~PxRBsyu3|(z7}\olqHҪq|)DN)Ȓ @RhprU#A1zH8m LDH֝r 8ÍoWy{چ܊:cC2­lfbkїY57 $`v7tS,gNMsSє:!-lvܲmIPP C9Dw"K Hݝ|\ pAN1$GZ%?@+Dhޕl׺(n <,D"ѳ*^}$D8$d$z[JR)N]9m%ƍ"՛J+:E2Zah")du\t'B"ճ|{L*_y{ӻ>sBbl0`ayʗ _ t1NbE5B17wfrhܿMMN o]2xZ-Unq6:95K4lBynhZ W L>/89_(9A]g{ҊrԅwifB\i8 ~VvAoW¼q8N~OU3M{_Y}*,B}1_)Y$x0c.iǰaʢ`JH3s `3 1䑉a l汱䨶A1 x²d@Xi2:JA\Nq"!tbːSlGȁOaWUyL&|/tp1(@y2RI {-`^t&z1a)8b`DJo@ &# zd nay[j'b.mQ e4kH[ G$|(>- vʱ@Fm, XrBjLQDY LH:,}F,( ,)"t|)rP朤Ɂ2",i+;qS+q H.΅p k!(c?9]03 oo>qG0 ] rѦ<={v"Π_'+!A ߑwreX5̚x +@!8Ssu"b`9P$zGOA'{^^GFIL0J_]h&L/d{t;1"J{L ~`]hmkKwM J{Lj.bL?f7wM 1#QOߣiMpw~Bck15w"WҸpqf w)``wMetUQX %h`0MD˴0XO/+y+–eze8ӫR٤$)$4u!?>O\YxoxG!gA_C+yT#B<`G˕,-"} nNL㙅ͼ:}7=}[z/o? G6 uooWsy OC%MsHog} -^@kfHɈ ա7R \ߍmt.QQ]sc~#gZKvfV=w+}hBA0YB{͇ycxף( :r{ *nyR5@iƊ19(8ӬǷYPNŀ FSD;'o1FE}[a_ ; {ʜ(=11H+}ӒnmTQ^!EOr3r,㝹¿Iɴt.(&;I<;|p*B#F0X%1eHʀb3G- jhYT 4/ Qr*J0`ͩpa%ZzWD,2Zmc8][oF+^d7~1`gaO 4M[, Yg;U2 fuI՚emѮX:lst(M1vip?jDGSt=hDsTC9=s$$iFv#8V;o%dtp9u ,P-^dMO5WoQќ9q'|3}9%@@}]^))2(&}%G_Lj齕)ǐ܅ߐBQ"=F$ǜ6`\ -&Җbf5_o|aǫR)R6B1LrFIm$QB7)@]]H5Cz) d͟^T 벱ph4vU kDll%϶VΟ~ RáΧqD{+nR fѳ{h;h`*Db$AM5a D!,)ܪ|>`:SrA0;`4Yy鹽Wg8REP*͵D^CG|&!x`~`xn>_G뎽5OF }pr?|'5@qew2?Sh}V,qS _ZY0iAy ;giÏWQa3pGs)$BS$e( JS@Q_L9߆sܪbٳdUGz]{0o'nM|7yB7/o'A,-G'יm<~6V'FųwGq#Pa5% =L>jniW6!'TBqxy 2?Y3ŏ{Vđ3h"ɄC Az`kf@qڭA:N݇g!V25&k(1@8dD$kcb'Y qC"c%񈅋g`|n`%cEK*OAX[cWz[it]C &AGyO7a}p~ MrXv *@;5VVW6g%cЖ&zզG,oԄ5:at$c%e i/CAlc>P\J57IBtm tWMۨR7BRN: 80d'_nÅ!97$b(ұt1QJ '.* h!ìmp,ڸm @ Y7 3k ~Tħ=X_>-lm{P$jF\6D\kI1u-9w$H՞d DO"uLmqnRhOS;g;,_y1煚q*@G`rga!W d_)s4#OMpb'|T ڮ"+_#ٍ}Km7b@E;@A1,+&dY` . 7 6.\t<0(vZٵ+U8W+Pc!>)6 Ӕ8 0ES@b3!h<aPɯo/zpk\BF.kFQ[AI]s.<<-u< nM r0=0U6J?XpwM&ƢF +\ԕf+!rl{sosnQWo-R*$6B16V=juX`)?d>9Ş 98>G4:tC?  bDȮOFzw;ofhkZuON*ҔԏS _`16BDWa[tԤ-xtl^3f݆wjR 4 vgj}$˩{swb]E l]"7zѰ3%spWKߺfvn%HT&=8aqW0aZBԙͣKbU;M}Q\!PN.Aa)T.5p9?F|o.:״M.xսۚ~si_\}{7勮4 bR\b(~XAZcbbu@@9@,{䣻}qYw;?^]v fec&sHFSjs@4&)gPgdm%㲡P6X’zw<0,~Wdqhe#"('D"Vo-+&f8N𒔖iKa MbJNcX1#L B*L tTБXk.e.,|Ʋ<^tlqC#։H !($Te0 l(m u"ҾndɋN0w˶,XF?dCa\_=~=0^SF4cx!:Bӻn'O$ (A"ξӻ-a8gYv/E$gkG(~53{>+:T$z{F@yɦxung:zX,7pl~#Y)O|*Ly#O&~gz|bڎzfhn窃a.@4`ݕ6"&>ep]U`qXY-o߻wsy#j݇7 ;ڮ+Έa-ɟỳ 0[AѠc(Wl~Ogז"ɶ\4!8vRf;;”~GdQN:îc&Q_+_Ҳ-ͶA VS"UVi.j+F5M4ty>]&s{\NWb,M0Wȇf݁ "z~Po($zVI_?[ϟ[/xGEXY1{ J]ń1G-,a4K';d\L@adT7i!o0a]6?f٘ٝ ?ʃWAskx= 5^^?XJh\dJaCR p&;dR'|]b(?T?L \R1^C3P!4Ϸ4IG 'i]SIzmZî-C@=np51(NJJPyqÉB5Ή1 ?lOWsZ@kv"%icU*X ubH=09BONw/5/3zsΘf! 񡄇qg4e+ZlD(#DU&j6u8N+RR7Cɪ(IY1OrFPv/>V3vewg_fN.KZx!%B@N<>>WXv8\S epÚXt%nV:RjJ JRLJOإcC8@H b9qB+R7EM>ũ6`pz81% jӘp]Yo#G+ wF臅5k;dSiY(]o$II%RXUj5@Kb2/#8F~@, R*>oqg˄HoXr61{rH9ȠPs=})V)b2)@bu*!?U`Eȗ  fpbd(ҁ.Wx`&P$IJ`'ؠ.,ii2*7J4HǒԬ2eSVP TQfq Jk0$axSۤh0ٲ&R`a2C})p"W38(Mh?4/DqwUX{E9D}#{>nQD~BH}Kwdxl9G"`E[gQ%Z>H(2d|C BGq;)1+[A*%% v= ꝱHb?D>)rh#o^츰FzqKeI: Dé.>tF8-..;#l;Z|^??ls3j.i3jf~|h3eeAaϔjYS|:s𙖮n.2Y|vyKQt&q]b1(wJ)Ȏzy䘥H|d5r[W>y SGMr^ }Yb}g ZEaeA$GF&Q&4qp?"ad>ӡZU7QK㞗Ya9'B!ח5@);Z:4]U&]̳f(9I,wiSxXm^0Jrd$,!fe28g=@fCF)t1*O hhN!e.{H_G"3$/- t(}:(nGF:xR흤˯obzϕ{:~ȓVx?+P'`^D!'"e^Qٕ],RNy9hڪE4U*L/ӳ6[ijԆaNZ'jKs >i`~>2&Bo&qY yeFh賂 GSlzLwrFt.ӬȘzVlE}Ҵ U_ߺ!7 L^f$ⓘaZY7m|qI@Yc=үK_n|; )gr~3s1eȅd'r j旍ْ|I.Ng~%E^ћ˃>i >Xds%AF٢*H' !gPaA>\>Vơ%}P+00vlW<\S;\unA=>1jiYёm*tt/%ԧ)g/ubC& )Xi4%H֖< J31F RxH?:ZDAiE~‘~@dGH=i(ޱ6eYX\׉QOs@TaP Le|YҨ04,>=A>@~þ[cb B~5U>= 9 cRV}8Æ=z 5#̕>Niq^Ɲ'+l{!.m"a&4Sz2(Ÿ's#LG { L}Yє G&"Yy2f#:B!C+\(S~Ex8#KGOo@4Xz!QD &z dt@TLMf<8ؼ:a׎1ڏtthvGJ5 "P YZcRq,>_9/ tu9G}X笏׫:ugyq=8F㚾C3S}UA{rc.Ut.c`!0Y2Ed+<1 $-O}L w*5X1'Ju#g9I3RbvuVgu(q.1)(cɃ+ɓy|fDTB㱽Q $IBzIG%3(Q읏-yR18TƊ^+ ^Jt FL倾>ekc3 ek£2S C W.ΣYKNU) P!bJIMI iKΌ b6*(1<Q%EMfQ6 G3,"5#J'1e0e2j/mbޜNݮBieBer<8+E:s+Qle{EKV+] 4jwr %lXS]A3f^H82ؓg-Z#cBaA{'?"(T3*LPa].N2Bh pTJv]Oo#f":+`>^s/:jN˿  k9V9('g ΠOU>{R?fE5e gt?+h;0n47ίn.W;%LzsSpݑ٢x~P~Y%~59ˏ4uZOV5ο\gOfϚJe2-\!o5RL>Үv[-mSF6^J"FSkꁶn}0SSZkK-FV-~< {\6+R˵6S.VdԹc:OmE֊7N*N5nrM5vC3Z P.mFiꁶn}0SSOv9 VAԶEwȴqڭzm[ yӿϳݰ*;vŠ ujƻUdj[߬h[C8E+8n,7vŠ ujƻYdLV7N*Nnh'PAԶEwUh[C8Edxhyu3Ԉڭ6ԩmv/7Diꁶn}0SSfI0Ȯv'VblSd:Z).2hȴqVpsKݸVb+1hu$NG5UdjLt yԱ_ݚv~oNTFo3R-Ɠxho=0SSǖ0Ҏv#v7j1hC6hfL5nݭzm[ ya|vdlyk1hE6:iLNv!oUb w[3QbІ:mtn#& zm[ y= #S91&dDz.;9dƪF_a,ž[_o IQ j1i b)PlކM2WBe 71RIee$aS\En(JZH8_xj Ї2!{M$477˻%c&dWFt_w97&W`y<%= \\mֹo >r`"r"'?+у2;!2fƀA9:%VA x?*H1ï+۳?V?y,;+/^ߟ_Wsi螋6oش%yw?d9I " R2;tɊTE99rI*RtǤaN', GT* -e֨p P0fM,s:&&#EOy픱7}cO\z~Eژ2RR?W 7^;il EUbX# ⧳'v?Lu?ܦ϶6\^/ޝzxw}y~www\) 2%5A5\b֓jU<(}CK.˩%ԇ;tPd\8PU@Iiy}tx ۇ'%PD:?#gFcCK b/ŲPcZr]N+@9.Xu7VW~QN eaVN('EC'3c߫no7i136CF53W/$ZHc[Wk˨2gJ62ЗErwW oF_hFN_394iQ>FW:T G)PhࡠV!+)<.rFd+V+țFsLSdcDZ4/l~u-w/u1CrbqKJy޽tqjbb(9T}"ϽtpFnXC'^o XAiHfjBC !#:mxϪNqWv~z7i.n9b@)0}81Àm0Ä.+b"t瑸A}>\.p:=+;޹pˆ\G[eTOJuRpp~S=6V#5ۜi4D 3|zy].RWpUqaH# QV[qgܣa]c Q4]OEƈmc|rp[o2fI[ozcz Du7۷Bx6 =z޽Rl=Ýo$,\ym^g妓/iSJF2*Nj3V`Yp<1jsY`DV4' iU?M]]m (@m4V9╷2تw3Ɛ2D͵? V28+Vp)H%x@{+Q?6t- hX0I ( GN, )іIˆH?"}X! ͝%?PU *&HX$4FQpcb,SawTS@*mtV!yam&uFǽ^j,E/Q ¼.%w&P9DEC)*ft1HICelUeRCPmsH+3 G2pa@ 楧U2JᵏVHC) kKmфeS5PxfhxͤYf;\FB -y*EŽB ]!) 7Q[<%a9! z/+XDH4jN B@O xm},=#tYjQi⹣%+rR1L ?~p^;!Dj|L`tLC)<( YMQER- DN}w| zœ\{hmH5 DqC[8MM[oFtD}"}MG_}>e`o zrcweIgwP5h{+;55ՁE4Ka݄|Pㅽb-Vq>N2ڵKlQf1E6j%~5c?/?]^X(Zg"l bVsv((M;eT:Җin긽8t2Y5EB-IS UXg==+TLw\͗&o@ y`MMw$7:e6N TS& [5yOF ȢFꤵPO:Fzr7:m"=;xB2=6.$/.÷0 _:`W.;9\RrFZIřȬ6e3zgo'W7g߫-gyjr㼶7'>ۛWsjW#ZWZ>;xAѳЀRUl@G]$WmtJpQ,_Q, |xRyZnsj+ڍSAȨ+JTk, Qw*Q:rto{mbgEoX_?rn%ZfSױegNgN"{=v.i9o ̛n{?<rri£Mw\Ba@m~Do{VF> !ߞ՚_MĤ-x6<!/09 6ӋF:tG!s~%ʨf׹2]AS37*ڎ0yѺmy8ěNNq__I!E_x%3ADŽ0^,1Ē>m~ N/Id#![ eIh~CtŀM]1/Jh?VpiTǬxQ>K)V>/^ ?71z Iɮ_(M[&H6ⅶg!ud>փeo6h鋦 1zD;4pB82Adh ZqJf~ rAPV%8^`A <Xk"G3J^D)'iƄ.AR)K CR'<;BPڒyb@EnF2ehA PFi%Rewe ݙv(u9`@Uw?N<^/7Eu0%Uu{W.WOkRZ#w7xE);O[|`V?c/ߜ0dAhqO+5oӏߜL|>N痟BzBkBW;S;c97;WKnX[" 3 =xw@R9'ܟOnʁF5nm=t ;Swӵ̩k777wҒAPTze%GAguw7ӅbEOz셽~N)(g̳qh.Gb]H6{?qWw7}Z. 4=ʝĭ-xL k f}ϏQ Rؼ `|)#J~erWʋҽ4<ޙEo LxWpGh 甪k,CZjh5-O$2#T#>.[F/Ri~k$p%aN_I-u')%rr'Nb?V؏~lJe0S#G BHJL$z%qF_=?ԼxM|=yWS>>}ɲ+ZQ+m@+۲!,6<[9bKg4b8/!E%bMLODvØ㖔XD)p)DE-*@0@91%!HNV T@&[#g# - uir.Tc-ux+\EZ B"&ijrmMtE"QD *שBJ% CQtV:hF T:aks_vPm#.>ߟ,r[UW@C)]!"{RXkMFK:wc'`.z)Q9Ҁ *gY̞T\Anl3e?Wof-7 u#/?ON(;$(xRg4/ 7$X-qe`qn)!xXCHu̾ci5G$:E&.^r)g僣 gGtazrOkg;+s#n}S_Kqf0F/]˺zU6~>񃝟G!]1"z̼ntpݰxDҫfova0oee<{T娡q/ڥx)0Ki(zPM:<`0&nζl]Gugo+6\:87ahJ- a RepxhF>["2XWLvN9Ndc|L d–xD!0ȢRK}WuEΪ9Q' AcAvz\ƪ%VU>d:}%K¹ڶGD[rC`B^{$+yIt5Ķ|h,r>\'u-1\2)yWVWիRL~&z-Ȁ;Hі8Z<޵q,Bmd_ !sn8&0jk#QEډ߷epd+0YT_uLwP}\^޺zYk)za*(5BR%LAٹ.e`STZ%{c- 3Qvb*tw8cKYG Q9sYJ=nKB#{jv9daC1}Y皋dホ3pu6߼z=opwpy%FWjZߏsh#gg8[ctbIH(`Q*؆*rb y Hys)\ZؿWkG`$*iAlVTLi+LZ6FC !E7+PT fV31{G lQpn5v=# d 3]:0v0G }APWW3z; u~'cm{H.yOFfڋ(xJ%m$kz0CD2dD>2k;db H* F[21m͙qIJ0<(-2\.J:5)Y%4I͘A2%. %DVrl(s+s"RG8Rn+h]ۺ﷤vrs Qqv}5M gX%ff7nU:*mavz5?Ӟ[v׋KS]Yo'[^E;VLplSŘ6kdJW)}[56{K[b=q]}ר\;Ґ\Et ]V/!sBVAꔾu;^D|9 \4,3R{dY+uQ~ʱmB`(hJTeb|+Qsck_ C1F+d_5hUlcQK#`n(Y枖*.`s%W넀̷=- Y*` A^!Z1Y}~ܞa }sQ(`;6 +1p0H56% , EqknMY7 &F 3/aBx4#EK`&.(K t$O敍4 uc^~E& tQޞ>;!J =T6Fz?w۾ CB LLtO|zpQ6^K1N GdOyZP(ⵔq`--$L4š2Z]] d5N62ÎU~Vɬ=|Km(!E_JEŵwT;]1ce#r{)`\ҥ[ͭj)GP HIN,`%kcZFEcd~<T//sS~|t߂d7{u??+뫋6֮~&uy@zeBQ#_޼7eA:'r呔#I_B~LL*TŢ` BUИ+#ޱemO\ |n,96/XQCȱR;#h<_Lb8ن( Ii_Hѹy͵d=^_oR0{÷'W֝ 5'3w~o}+t}ӛ9Hf?HJ sހIQmgh=P%UXam%&Pn]{~ *bX^/7 ſ/0:'Xs_jŋ_L?]ƖMaf_#-9ѠSߕ/3#r UQR _nm'^p#]4g(70]{s-o Ku7vnj.7t,֜bEkS[.[D?ӏ4=j9 n?C U;!iy㥐k޳hsޓ1bjwPHGJAnvp03NgZ,1f9rgڏfaɃ.? `f:A][Mp+;3Vx=m98\><o&u5 xQ-dK6Evhù81;4qqwI#횶3gKZd(^AOy@_ N)ogYx됒뿁2LAŘ ZN^`x>ﺸ:{E5 4N;&};{EAP+&a3CWp u&HDY7dU Һ**@#6 m֔ɁPQb.3t3PNT8RJ9e)ҖO:"ҔPL*:@ayJRw%6%R+5E簿A"ez:DǥT#( ALY9FauF WBeSsǗwuy=Ci`K2X,`ω$ b˖[dVk[nuOUf:R &I,3M៣c&Y^6yWdjVGZ_a!D 2ȭØ2&EfrL\W%M C NBtBaΓk3VQ5hNzB 32%}B8uH5Ek&CUFk ËRNs.ko/>DUFK F,(׺_ qMѶ1##tsûhûܤiuY!TH0 /vRE q0Wā~CF7?,Sdo/?1G$/q 7q-fi?\ūǶ90}O?bLf.>~¤HTP ;*Y7OUR3%u=g cΘZuI ~} |6x6KwzOw,ZTd6>L+29 [VQ;ѣP'; I3qp2a5J_TsbPR}FZ*Du.dꁖJn56%}fûi\=j1(1gn+fޭ~л a!/D)n'UGo蠱N8H* U:oU( IXa\Dfv<4ٜ: g.#j o Z뉨^| l^S'@Q0cG{QKa=܋p2R-*i^Wg4@Ym.c]{Wqa( D7Kq#Ik5"b Λ`ݍ@;y :{ӃHٚ_^?A9Ӂy@ +VHsɨ1’(nx>8M!%O1gEe3gK{AxRAA-|94@c7=F80DA N62%MVF480vs|3tC5)BlJ kǀ9NO,@mRޒHv.<\~L3QՖWaJVU{t*3eo􂜱H5Mow"w>bΘ!ESg43b+xK} F3n"^,4 -g l/wY~ߗ]}ۧ>׌{5Ă~=ԣIkNvxQLzrnc(r0^QP$7)r,n>* ̼Tx]ؔJ.c֎av]?w 2F֤5^-'-J 'd8n`Tt9^CDTL497!)h(0Or3pqӹJ6ўJ/| P5bi`b"lCi tFe" g+vB=G pԃϘ1g+z?\Q60^<^~z /xxz0 F/I ˫'Wv1/3RM7SUNuGg1ݱ1`p TXM=ơ礼{\:0 !'UCJf4&& 1xrQiOЌrMIk)cc<+UJJa9л6$3^FA~99/儽g7oo//GgMX?fxeM|/%o+8[;Cᄍ}<˳w_ןb87< }x#P@)f%Rr l%oMC,[{>\y+-C s01pkLBS5F0JZI}[׎M*߾2^y"z%gBy"@!zHi7Q17DOQHH89ʅӉH,iXH&RI\`AFyNzqzK)$kbXG$@5EFI`RhIB(ᕠ ܪH;OØ`d:|kcT(":Y$F\9tSeK3Ø=-;Sm 5hRi=ٌ"MN@FAG4;8n!h *G`#q8 1@WN ,H%ͨ pU+g 5R+0HtƇGF#lDj齺@R*3i_oL?NQ:(X} e՜pʿEjPe 2J#?qAlilRQH?(}29jpYǟ(F"Uo&҂I@87гok{,d̝aiMf͞Z>_Kq'g̜ڑKNzǁ4!? & o㜛#LMޯdΖEHǴׂSM LH<rcF SJ(lV@tྱe]}c  4Ms P?xnG ŧoz<#QۋTOyEA'd'h@ Ye86[>%D eo: r^~ss1f?-mw3bDW\䛋^-y|v*v<잛 +nwqp)Q4F' @4#x!8uDqQ}yRIj\GXT4$KIp" d=T!vP 7 D0OM4DE9m-x97)F+$1c%Q[j :^vGr]~X,ӂ`En3$%aG=QNlp!RāL#I!FUTb-(*H nDEeʂ8=i+2'\&\ 2:-3;(+Քkɢ -ZTiXޫjkrggC8t.6.-!p\&.:H8!,1xhJ\1o,˓W˕*̉Fh"Ry ZhL1J\PfE%V 4`##V&)\ȴ!6ScbMҹ`y*<Dѹ\S1n<{DkL@yuTA0?xyI+FSk*oߚQh) :w:Zˣ$jW~uWSLjz嶂з ӛ791MFx+ .8L A U#I@cI|2sJ0?R8z7OTa8NpNp?뗽*~Zo [dtZԝR:|Ru:L(헮@-xT@;ԣ;ռp8gS6}UO0I6:Sa/%y%Ԡ!"'=693'A N|ՂbnG;m+7ݣJs-Q֎' 􈌉7/KԲ' nQJuΘFA'I'dY~]26ԔƇ7.[^$0NQrmMT(%Yyy䣻FZbios3* vYFѬFf_ 3}>RN9hIm3\!<. _\@`L0`i_[?N`p?TBt;-E8L$Imcff鬎 70>ߦt>q4%]sA#Q$>r"AwA_I $k}Eæɽz"wD\,g4`A ># G7(ڜ ynJo,ޤ=(m㐝c,i\ Q4rovŌelAO%GH{/@z؀_I4\䙌 G 8u>7+! j ޹ܑOZXDBmr|I]B,;z=]BV 63{e(7˺',R=N\B-J%QR&iHed dP2BZ2n%5AgWl$E(ݎ4ˠȰ(3C1J"4'}p`&L0FR$Omj +?NtZ3Hpϫ ̩%D[j 0;r?48fks;0Qb%bT @iXEʛF("3Nӷ6DM=}*AgQBQJ3z?CYd>W 0T}&2zϭ^bA>&۩C>>sNϲjA-.֕o~_˜/u+pQI~_JUҭwn l( [+$FJ$S? 0Zk$F d~< x;j,m`UU{)?j->R#]g3mQGBDf0:y4+^T =c ]*SB6ֻwXa-V-sMO İejZԐͅxJKHv-ېodxMٺ]Wy;39#u߸|w^e r&@P=w?cQK%2|zl&d~٢Q흖4$]-D;86}LF?}w %6s6;3i=]=IC(7Ek\qVJk\|fMkt趦5yָx#U"5*+XW(=\Һxz"B!]Bh+RN\mw}丌nBցߍCp>IYDD'Onw H<]w\>:{ж%T-7E˱} :Kq$ I?U[e6&g'k ?8? =sʄ29kJaT;U0m$E/vJA_(&ŕ<*S&z$9J CNU ΑU&DzkIhE=Gv2UEy@y7NKBYsTC#H=۔ou2t(,6A:so!-%x)k(?U6?!k`SG_>YR$TObuqAƯQqO[\`Lnƚ,iI**z0ӳTx΀Fm dw8C .%!G%_VaU !*BڵHcah}KDžq<Le-*b1=J`fDfaR`-IX }EXFBc[AyC1(r7An1q~˞Ү՝ա!I: 9n%TF∜}'bAsʿpXvݜ`)hQCLQK: 5\{AAWzLw0ANJ$%hh%^0=WÄ:W B"4LY$FXjg,`cu6qLkI{?PI"SI@0Ϝ8")&yYV:tآHDęD3gnݞr64 s.4DRR/(ui1} p3gN|: OK^]jI T2* k!Vv$` (.=WS~R^l(N нMF76@Sn:+)y pVMiuGMSi4Ȭv{|b?8@a(mjc [&D[jpƟWJT0PA QN*O>ǭ2]W_5<&&K?7|m=ǜFOZIv1s8܀y>ٗUq}T`\\#)*taywne1fx%&EJ?E~ǻfJ|xwsz>Iܫ+w2m dĉ&`ZE0dӽUHG1r֘׀S򅅟i@ɉID:1BI5M˸DR.\Ǽ~! W/Ke"1 Җ%mD)!2b|kPbx ٗ5dJx6PAI-mM!XJn4-Ɵ<jm]!1hnDXt#75̥CJH 6Fp66zCXySQXy%mJEҦ~C,VMZ 3f[ ,™ݽd&a4%snXl9'Rq yc08f"_`_ Q7ѕ 6Myήxx'?-wn/?b Un^:{`Ix3 Z8Gy&b~|. M\)5L"PXnuBz*eJQy|1'ɟc8mɼ׻f6-ubt?ox< BLZ'KqC76~3foko٥gismؔ|5gm1F84gG75.sPHB)0H5E- 7c]NsJt_BJ \eB0\CɈƤB<ޢD%|J5 J6գJE]GHX)zErë|嫁ի'״T5_8 ;wVKBhs˺#U/-]z;}ׇ'IN4%S%wǏOU|g v&nY@T<,l&wz|~U*nGa~6+50 ގxYϯ_<(n [5&u]n X\fߋ`J%tgFCᑲRg(]mCy9n{6 uwb`A>WS6nl.܇b+/}YIRvzX \煛eO!o"#QOH'$dF.Is^a%CZ̳jQ<{0M Cu'=LbVKàkc:}HS;VLxĆ7G#)c߽]o+vXBNٻͥB{u~VTN>|VAi̼96~%/ױ=S6⣳w3~ [00*nIHKRXI mNȈbB"$RxG6$f d?іDdkFGpr89WmPWc"]^0@$$e~_Lۏ55$c.)"21C 1\'8#O? IwI| )w"@ #o(9g-QU;j+hB[ۖV`G 8$Fw~&HynK@ u|sy$Zkv=+Nͼ~o@ca?>PJ' 4E*0g qb)4иT?A9MՖXfnJ1)xo;!5 qnD;3H"K:١kup:}L @'b XmDLJTsK,ҏ-_'y؀i)YB8u"bLfr&Ԫu`#! k?`2z op,!S"9e RZQDamy4Gl< qXhP" %rEbH"I5"&6EQ_dqՋA$%)$\ t* ԃ+ieHh㇜ CZIĴ/OMYy/A,9or !5?~C7s/0?8<|Q,T.QPˈv/H&#FG A%]/@x}GՇC}>CzdV Xty&>1J8EhDC #)Qw_k-.hs(g-󺜶3/Dъa7> ~{yq d{嶍dm:uE,%y8 1/,YCIb)U 1==}MOP[/"6ga-ݯ2lӂKl\:qwA?FOtEhʶ^ H5!y:'REU{T !PYjLOJXpƱ +neKk&p=״W {ùYV\:"Um6>e<"1'w/XFU@7(Lfr7ZީBp݋/JEՋD.WhNsX3#eDh|7xp5c"!ajS—qDJJ! +RU€)&s"!Jc5x{[Q$d:v( >\!Fj6cp<`PYeXXQKqHL(ɂe8Ʋk4*KXc,Trz [.k3d *HPS7b;QO*K%cmn{`SJc/C0ґcihh.7 bVYX,%L(3FF ofgXP,Es|-#`=Ѹ| 40.|cRB$ "Ɩ'S蕕Me&)H-׼_“.Cs<:RNJƒ9)<⌀j6<ӊSS Gz|RR`u=57J9KӔ#'/q$e'J{+`Ț:pꮈRH?pIz]D G$hdD5Ofb5ho%=$zsUbh!ϒ'e: [ t*7Ω|R!b]ek~PWN)|/usaO6K21\S 0qjf$Qő-3׿De2[i Z-{fMEC{a[ڵa(שw*3e= "0vt2#-C:"$$cG>c2U sƱf(xn"S/oM Bj:[>V"PZ7'L"L S7.NaZf$"?@ǂl4~JmȆa^N1h2 ցS%'; Lak\Zw?Ҁk ꪒYJC0D)ȄrI8bKBP m0Xj E]Z B`]9tbO1e={s۝^ֹζat-{@hٷ-ǒ?&&C}HK;:#Dws^qY@e0Q}ť I\Jžu_5^wBق eB!F[h,PV21HH\ f:Vu@Hljܱo:BzD4URGK`T%H#n*2bE84: c$R>̊J~ Co 莓vPU b8|miXYXMc0*&EQT!A؈ŖΡF2,kx*?tFH`,K魞C4KP#US˲4^hCNkaX-HqhㅳGі1p=&!85:2!}~ni; \0d/q\gn]qð<uw;1T5s񶉉6I3h"d ǎJU!D.VD[r!&~L-Xje P`CCpNDCL*Që~@M4Hmp5H1aa017}ZR )S}A0RpRo>XqP|ax(E.V磷qRH$ $fsM- 0UmNV5qj9{u>]G6Fͽ5Ik46޽_BhSYҀ[~9Ad *rƚ8UO,  XQu]o|YS΅VpNy'𘣑1d` mRwٞ+q0oL1D̴0."Ors~&ԗc& L/dysedu$/7m5N'v?|߿ CطUgLCˤ!IB()6 ! tƁRŘ<-{AK/Ǟ4E_L?I/0AM.5]_/<xM&_Γm/2gނ(~ݳÃ{Wq3@fMyǹ~NU@I3_{gWy1~>\Az|Hm&ar1 RtsyIELK`Z^|]k<};~ ѳ燍/Eڝ*3;y811-6:X45X:L?頛 \`/ƒA?_~:>٩x& _f'4͋N_NU&X93Z᫳߿<|}bh_yL7;yG?zǯ/nnOΎ晵g_VGȌzR`;7Z$vdb#8W \6Թb+jm5 XSmtgOk:ͩ5"SCaB)zKe'(I0hb%iĶ3Wyua\mfd#+kԃiOۼ})Sw}v04Û?v{\"AWa¿_;vṁfmFmN;gOo=|>ʽ^Ѭ/ǟNm9rp`0P;@6x|ځ.M3 eLoVҟ(I H~iy͓#en@MlMӎ1gvz؝l_Jlpp5YvgNW]6Ҏg3uV]a äto:璞r x?b_Yplȕ&Jn?:Jb^әRng3̹́^79#;'fU@u pPvg]|ptM%k_?<"y_W/՜ˤiA88+[j[tZeU_n:@D^ŲR轫CĴdcB%,Qq=T-W^pq14K(RKcefGED0@(2]6Ӣ2S_ "[8ٺ R\0gBN].AΈ\c qn7(r)J 昩8R^iWm,KG!@i}D$:& ǫe}g)T\+bN*x~ sFUzQ\q&uJ|E.0/jDֈ-j-[l`̃e -yF\yUmwp;mC\ 5$`xnvձߜwɉ @F Uw  W-l%F%hfi.q-[=T橞ŏ`mo7s8%ʝZհY>%}yZ8uЭA6T$Tۉ#[TDK2tb?J>߅V\qK0X6U_mӅԦ2sJ꧍&p4!g5H5X t|m>o!J&iK$W5𛄟Y/`LXEv*QS14y+] zA$yH-ʌi9T|ÚP2t]r j8iZp!1V&G jXk^NQ.&_, ߿ 4+3k{g^ha^ўAjp`~ )c9`F\<$E $Sڢ+jqh@sf w9RT IiemV:0X3SW#Ӿ̌h^Jd,TOzMz -z{mP7o aRW HWҗc|VƒB7;(V{clMܓ[p']~M)@L"ۖmM pg՞IݺUw:}NH jL-aӍ^f*Y,Ygm|)T kwrimZꥬKF)֊M*h+ ubA*p,&1MM5VoIM@!4VIOZGf=@\-avV5S*_SO| CxI>Rİj$ZiP,68୰P F(0N,QC&apđt))Zך1a %߭&xLd#E)[+Ul ؍ ,Lm$Gr9_R=9_8_\lE,$, ۘOϮ$@]@uU]vzz=݄:#I87̨Ք|9}NZaVTqukN?W\N +ފ:K2thVұ-eD:`xČ$/:4`f 6#;jR$ F|d D-  _B(@cJTSik>o4gCJQLOF5k3j r_Į XiRspV(Z,YNVgp-d_Aex+5 |wٳQSXIdM߽tJ=MRmTZ.ZZ@K^?D`Orܡ&A2!!*q4['VP!u2F`%Hf-6L1>P=AXRl)ZYYrޤ~ѷQj/[V^KgNATRFA(?a5ʃ/OKY}=q;k0`X*B=20Ss\F'0#٠Z"]Wú/g92Z N_ګ?Źt2ycWnW^Y?9F.F!=uȬTqjkm?[h:k~iJc^_LD๔,H+ r) Z)`/Vq6@pɔC2^Cs Xtt3yoqL>Z"u)X=Tdps!199Ϡ:i!osuc]U\9c?Вb]&wӪiɤj]Zk|\2!N&(MZR2Ui*&Ѻ:9u׳thy >|jn ŨSye{s~Xk;o=??b;Vwzz̺p<D#uIC BKEL`V+F511z`G gšQ7~uS`/7Ѣ'}jGak= L?|6^z'5Yf|-VӶƷ_>nYf Y*mҌoY9Pٻsߔ~~~O;/]bu[P [bؼ 9;$'icy|z Tm}e^fe}JAf|eV5Y)^%/Q5{_fX&>zSހnyzw`7{:{jonc+lVeF/6lӔ!pˇwv_R\fp非 F2#ݗAއ?xyAɨ;ZgZ;8_y(Wg4Ow_<|iӯك&2/ZF\%e^?ݟw//oNVz)Ykc9d@ʂ]{B@x\XqV_8Oѡ"P<Ф :wOVdJLݞ&oΉ9*X~9"yiFdx=ia랶qޣhxh xm6@_a@7/k\:Hqv1/9 %>|zԿ,ouʨͫVU|E@ &7*Z/zsb O s¬_/^>}ym~j{闒{N;&/^tگA;&n<7zp\|=` l/9=).uXMfT+M2Z! _Xpdm|w4LT;s,Zdy* hMpMGݏcw ӺJW&yQ2 V؍O&*4U"6 Xb 8b*j3MfNL+s_T*ieű7&7'lm6mraeB䛬C&N6 'MIgC1TnjSmq2Wo͙qrE ޗJ ;6)'WPɼqt2-,tB9WԹM9P E'ZZ}b(XR–:̕*e. Ϩ⮬9tDi"Q87^H\R!EpԌYS*Zb"zZT3#(eިbtydL$jSI B(ctr+ UjNкzt9\3))5枦b@R9Qi̼wHk jk6݂G9~gA` W(1k$@ϒ@Pg*PFYX"<-\twCڿ01@b ZYIO#ל),"?BpŃE#oBAP+8[[yMf i`'1C*BR{RҀD9E^ qT-Q%28kљ_b[3)46 dGמc@(2*eJ{EAJl+_\kHd Pz<"!m o1aFYGJ +)-żRl_r5L= M=P!0x?  +㥦Am88*ŵрz2Vcq\+"ąxeȁ531fQ '1FZ)2t}ME`R{oK#u^`,q>$ LA1bԄP k`aS=@ň؂)aZJʃ2-at"cQ`X$\O[Zs¾AR&z@(CEƩ$ 'zM%1";6Li  ~ Ka aa9VhDF )&*H!BSP8@Ea)D`UzOdŹ56aIh=ச[5 *I^iKEM&g`8f\UL䨿 sjo\1.I=k'y}"4 ҅`K,&:o5GI2!H ~g3H/X)^96XW1P2ܴ1hrX>S$@'k?.ϏM9嘯4yS@~|N'0 Vrk+| ̄y 1m*ocJLj[OXi3xl,RVJD°2LGH0m`SkDg")xTDt&Oo ʳ[~r!^v8x0rs=N*T÷xxI$HUr]IoG+o8}g0g9Ln浿^/OjJ﫷ٙ>uWƀ\>!/}%Wu3WBkN,-$[_~6 i'0O}0'{ I1V?^UΩ<㛕ߏk39z?56N*ćLOgϴX@qZG>A:֑ ; '[!_a 'zn>/XU{=VTUٴqM|cEXQ{IF]~+Tn#sZUj%mC=c*ǎ5ڭ5ݝuKj|} 3g_zp}aճcU:[zpl?=͵x1)tDdλZe)ICUsx(;i۫]f{jEj" H/"R7ΈB"WL Lhwg #\k՘x! zDLzUDڤ6Yu-CLB$̬FͲ xN! ~tb1r1='\k_'nTY9Gtۏ/3?YطXZtKf_VRjMRixoJ4K[gC*QCFw'[Ek<JK 51:b`s9YF62YP̘:3Y\Zn{Ђxg]kUqdLhr\$W$2 )^ !|O&cc r$,egYC~+ oL:^)6CE!Yě EdAYL,siȼ -b$n8=EGp)-/aQ*wgW "[o"NOh viY &K,ސTmDrNIWWb<# -V$\"U(0$NKڊ bI>B-6x9=V:GY}uJB xx#tݦ0 >=X %#=$ȱ`2_ @ADž?˓|sOrk!2gTSǽۯ4姨̣YDHPh%"U E鐽 PrjW)XXFH1x=eQ$)9[+6fGL/z,R< #omr @3/I7 )lĘD3}Vh-#Ɍ2<9"T)ڠ@o&[iSd[.:]8МNy ,GQ~΂3eGa.<]`E@b2%:A p w a0g펋y.:k@;"Vh]&"P|XkȸKL&Moi  7Z斎d (7!\_:2jrqQYk B~#Q(#mEV * CIWVM!ajtqQ`F39IˆޡZiotT}N6fo(=ns\MMh*W4$qwYbZ7Q0\oH_K;=O'uP㻅sDrp9hɚVdh%-3)t"(>+%9rQxOh(rC;k绻봅 4koER ↧`8ஈAB&dClw {$WnH_&=-]̿7n f[0*qp>AFx䩉ؼ6, hl>t~+lwCfsn#yC|4n϶~c!3zO,zkOkK޴st/[szF^\f{{3?&0wS՚ڕ-YFa헸h-}~R!01ڲUI<{<8ԢM!BV;g,irs|Vw7 Cfyw`LR5G{qLdK~wd-D~w2[_i:6tͤ0ܦSh~;?nN{A)/o8^D Bi@p%<=f^wW'.HY!Vù323kBB): !-Fo4Ã_@Nk:󃁵L,Nk[Jo<%&w=iY(<%;NSTHE?d)iZy, bc3|s:D3ji<. f?Tz/'lRuA9/ǻAqJ`Z3׮,dŔC/^Ľ6aM$Da[^}^<8l1b(vS;*p9-zA$'KP"8ρ'cEZ* Z%}J[7(ݲ,|=waXW"Wَc> ܓh~X>fCnwI^y+d:z^{n+}hhwYvg[K{ˊ|e\ 1™hsgL1}d_-y> y5x(Yl,CA{<{vQa'#h#}+ q;0e9<j(Z/k2sۯ$JJE̱(xM=m}jЇI>&hqfa@ Ti2= r"[FiZ;,ŷBs9YŴ}Vk_Xge` :nE' ?Tah ;n__a$rJ{6G@XΊ#iOd XѾ6ge\K? K&uZUj F)Bʜ6E^\ntȨhW*k| (B,Q(mZ_ˤM-j.g{8W$k }xb82+`&괔Hr|}ųEvSCcPdwﮪWs=vE S*P\k-2` lݼ KFBhIMH@'XVKPDAhP+LH΃Auv~pLM)G2 s/e"@H[N=-GYLipG#% G~~d? ^y:4)N,C|u[VVV!GG@Cp^Ǽ}Ȑ?uy$J`-μ Fo]}Z,G(VQMjh>vkrMi)(lWl Gk9z0bfo`KV- ­=}|p:XFitOl^B앿zb1dI-sLJ9ez4]Hgg>ك_q>!xΙ[u߹Q<66` (bxߧuڤ2ZKL(Y۱o ='UԠTz~𝥪tԀ ½=ojaθ컦N?-y޵4-0S7sEb)/A3Me5fƔ:d *wy_E};ܳlvgsXMF,/uNI|5YoQm]T[@ę^/,S !jk\k%1!xpVTx']5Gக/ qq'NyFo4IQ6tuR:1L<ۊsCW?pfjeVnMcSiM-7&Ӳ^҇^*{K2uiSCUHpڞ:st?x-05::vabGv}dۺ(jLR4'Ԙ^:jLn@1S{Pun2kt cЎ0c7{!v1a0c`1!1ch'1=1TQS4^>ߦoq[U[B-If +ZL]Wؽ`1thrж`1t(fŊ-]NJ#?R$ 3 )fXuqb>!|BY- H[ֵn]Hȟ\D).pq_a-I8l˷++R_ա \P9%!AAY912* &~ensO+.e<*qr5Ō[{^9n)$*䉏:`lPLpᄵSݜFƄ:Bů$)=90VKA^Bc%&gQ6rQT/Z !*ʘ ؒ q W6FAjQDJ2R Xtǽ;͖B"q{RV`#oEK8XN#$Sː Rq=!͕&};_d.6$!BpvfJ2QiBK5(t"XP.jCpDB:c)š* ~|) .hL El-H/,f N;ZQLj0A0/>Ud*`.2 w@-r:KL W,:)!%;$2t§7%M$- A7-@NGq&El$/ 3C[ _/Wח¯EfEt B ٬IAh0-,#17a"lHg2dΝ E8&sڿ>kcIއ*Neϕ9ATȭL/F fkV["a%Gb`L06DűŖR)IE  BŨa]08ʁ*jCࠀIMz1L@PHATܥPHOaQ[D#laqrT KEY'az}^jf&fYGBMrdIr׫61ظj  >:c,2z1ARٸ}N9eB]Vvf9kѦyڋ]s,:ٙ;9lUYe)_ڤ*SZx=̟(Gת\4EֹúM:[oR< 6w+[dO+ekr2$6>|MQca'>U$!90 B'7NrWujWlB" kz^A*+Tao MkO<Ծ:ѾAWx U>< Xr7VŎW+V4x2Nʍ5$n{toԞio Kj6LswCDJ$euMpv1bB)Q*\-̡h7#/[>H\>B`PK깛`HlG`uUnþބQJqT353p1}`oo> Qّz5 w*ː"%ø\U0T΂˖EEXqF+LңiהndNFT@]߶ro i,S>3:%ce咉UqO#1/^팢To`rCW& Kr%εqD;"EZ]~BB4Ŗ*MI8Ajm5"x#Q$J !ʕ3Hp֓kC1HYQ4M>K0Yq'0HcCD: XHt_}mV̑YqVYqK):MNxKHHJީT^]:i gNKiP^9|{cԯS}29%i Z4^-D7*b{.X wO`^h<0v `>ZOi+?FУy'3;MP{4;]ZwJ(=;]-ZUm%7w3 X'MC> {ӎͣ &]4yŨy@ P#1X+ˏlXooQ}.1Pfy SzߚuTྋжJ6ԟ-<4ylpuB.iY`(ĶF%VRmcּM|Z̵[>ɔ/c|Z\6)gK"qiC0 fR9<+zpcGT閣QڻݷT:l9<[w0a$ &ձ߀&~rh%ߪc[\x <Iqn8n;%\Glr!W\NETUF*ΚϽA 2%X46N0B$u d7 E0_̻fƈĸAqҔ1.YC{BB) Vk !`EMy,eN%)a),ZH CGe8RcpYbNku!j`; UXigt,lҭ@amt;E㞝$p+%MO1S]:!tGNj޼9YZDcINܝZD^;E29uqD"AʮG GQ5+@}Cr=8bûwpR ǥ;8i{-%^_mW8Ju;n@kt==7'Ha;(7XhoߓLLRΔ_O\%G+qkQ=߯GPLxЫ}w =_zo}yځplr`  >b'vYZ>sfE^jXTb7U=rt.LftҜq:t?:8e*ލ>_q~ln'Ì5^]^gfl.։ `F?eI"E>/iș=ŁMgʳ ߖ4Lt=s 7|~ KKGei?\J7)V8Shf ɚh'eIp(wP}9'}-6W`p㧣>vG'ǯHlpy4?*X9qR#worn!ס.>G]{M/|7&E⇗6"pP߅MNSv/&D~|u||2:|$Pdۋ80ݒz{>_IN~9PzSjI]LF83'[76czh04g9WLB!0.FeRg[(Ͻ6Oi0Qš~9~3x܆)[]vt3fBy.OD{5t5C@[ϊj9k/<3C{d]nzF}{ͫQ 4xۿ*x\t+PrA5' ?›I8zy ^ j/i4_t.oݾ i8ᾔxptt>}9~38$o5J&6J˦*f"nʯBĊ7q3\ ˼15{CIO粞ON2{7G?lDU 0G6cT\Jf:=Vpr,Ἳ&MX*;J/p)!8O kAvu)U> _75>գ mL}!zืsZQ+?` pd>  ͥ--ny%S(,4M5% T+;<]R_z+^$8})Ƙ)o(eMJ&s|4[v]'9=ur]V`*[3%w6":UsYoX_g.c%}&bfP{Bڻ $].RU(IsepG&,^Wݖen'[w)02HBYQPZaCbq2騩0-'v27-KRɠ ^(ㅱS1dd2=֑z'jmV]҄'BemDPL1YOL+#T`jx*q)K7JeuK7v] ܦ?V3i4Zr3iL]p{t42)RqzuZnɝ!cxS;3x .]S| gG7ԷT3p:\kac1-!0bwpjcނjT? f $Tdlq3w=2egJNM**?y)?SFLStά*,Pz JSOOqeKJdL5ssɤ p9N;OcV/h?|mVi"=%+v`\*Z3 52x@E@͊2]m0Y@B]GGl J2ͬ1.0X0/XYNB9[(#8ڎxڎcZq/BΫ<-'lLTiechv5;2ۂa| Su 85&[ZC\5/Mo #t i .M ~ls1j|BKQtu5?DP>sPK{֞%otR0F49ٍmQF+m6&_R+\Ȩuvv%^ӇubC1k3M] 4ErASҜ)i a`˃Dp[@k[)"wT*!-J6葑eҘ 60~* ( k< T",F%l_q̛ sSގ|sZCY" w29͘:jGp"b=~m-LAۘJb4(r\KZƹ#T m44 Z װ:C&[ڏM v _ VPT?f89/L.rBLРfoV)xM5a ,HA"sN 2Ӱ_ A1[J%LW-4Z'̮QpqhSf}!PoB 2'T @X~Hlخ[ܨza ; Rb)4̢Q y+ҢQJ[^v[2ݧM/?˿j&@z /ʉF|pLL/5+K¸V]fŸ[6ˊeaԩ3/Z,%Vdi@<{+#3wq͆R'j{^&gMF LH _kB0"v&VTF3"kc0.j1Uҁ)t= =_cj;>gnlPbHbj`o T7JkI0R>\JDc=3h?*/iyzl0:;eO`zqcZGrݾ&a4-䎾7Exʽ˝}KN$k߽MyC:-f:$*5 r~w6f'Rv$裙||)y Rr_Uz>TY/?=V:uHJ[3 N݂]z(6cxȈF[rwO?KcgߍųOn:?T3;;o'9r.qPS.0q%|4Lxā3+HK + ;Bl,m 1.  Q(5Z% V\QQC5U5{MVm'ştW>hG$cy>_^JLH;ǰ0$`E"VekٴnF5xޜ+0 y eHhkmi!M+KM$ga8W@N`"),a@,RPEØdV /?mZozMJ ]R.(P{ lRx=.0Qg.J0hWX&\c!z ^T0w]wT\@S;Tafӈ-rqVV@  dbAd[5d8p.(/H#cRpbJ$?S?84pW7ǣAy:)"I&LXdv`BEpZřsqL'@ b4ygNX%$4Oyv*6#t :I:2v'gA۳tn۶6$)^: +yw4Lg$}cI~YB'gzf<UL߇S3BiX⑐Js0UAYi&j/e%:4Ũ NEI ]ɄP z!cZs j5D:id+aYE-"`4M: ;쨣 $czAׁ{Sʭ07j]ƞ]z`B4I&\/EW1JëKW0ĩw0osKu$Vo"S< j#p /?Lpyc87~-+)0>oqo7VA:WA+TG[G TeF+(XF90 Ԋ:: c56ENBdX-Vlij/_TvY'ΟS|w5 ZC`Ͷ7gל-;<^WqqlJ5ʔ KN_T25p>; Ȁ!i13/V3Z Τ3JŦU_j@&p2}R+y+ip6p#qr?TTf>PǁI HES[$5+ G)aHP ` ) D`mn۟IVffZ8_ef~KlB/b0B"!==#, 2u׮Q3`"bIi4ԂPNeMܤsH=0vrק w睟[--$[fŽW) %IN1=%9s_GPJ;Sk|h!lcŌN)j@R1{Θ)CG v&=tJ{+<tJaUƃhb6,b~ө_L^śOC`O)6N7:0eĩr-24wR=WM^>!1,f{!,qpBr걶2V4hd9QBm -s)3w!&O}Sߦ`6"M:j~Dtj`v.53F)88_\GsUPTaRt∄tR yp @?",]И%)QQM pږԜ/Mt9\p+(8tW--V1Z VK3Dž&3Xs S1Lqˍ$&&I( HZdPX/TRS X&shdn=wB~\&(Ptz'\p'XiawPd,_&U&vkwpNki |re{C7 Ύ7gu^,9ޚ_]M6mN< ZvVvC)/WYb(EsX| E~hKs-{tw?o9jp뻭.23-{,?gGߚTʼn(N' NNYaNqv~\x#,SYuz]tzˬ898j! ʔK .+@^}2[{ugow^ngvp62s-|)ְAaxrvz `݃쬷HV" cW7r#`;7{dϮYCϪz~ǽߗ_yÿQv5;~szgUݯٳ&5"<˖ÿQnvd7WWOZ/DD q"b'fCA ܣtpjm-ևdd3mգ)ge/4 nKty,\wǕVN]:TigR˜ ez1Ep"PQ_z*oH7.kj=VgӿW9L fkXͯ*u:~?nYOYj?:#ON~aJOt]؜Xz|mx:oz6{LtQ|vy~}Pj8)F-ʄ`z$mL$A~^5\~:=0jjJ0g^syTxVy``'{:R)~p~Y^j|5y[^)O^?ncw #0$Jϛvܥԟӥnӏ9 }8~6]ջ9)9;Mʐ&F:"񆫗TW-ツYoo^ټE'Z1-zvV 9y3Ojx o*/˳pRj+yl7l\#} NVKfKx.ez?>blF.x^bß@TQ駡4kHk]|byc67I׫窉 3 `fu(o6*(w-`wb{Mą}dҚbZ.Ml'se}`!ۏn[f=߭Uỵ;7zcbxw H#xVpu\A] *t>(KiǗ㾭! m\ ` R1Jh  M1ă5Y! u)Zi**gCFHQzEmc,S >; {ӏ7EYBYn.~ 4)pT'<Uc:qͩ;gAW|pױH69ld; fa#{YHg{/|ekS8ru tNYqNTkrYhJ?@RFl[i::6O7 UOSVϵNsf~K=DRB&`K s7qCm0\K?bFAyO|Iy!ΚYG~yɩ@$H=@ `es$5F&asy@R$;%!7@ ]=)ډo^HI &hz"mJAӻ&" }\$F,𝏵#ȥ/S,U\\ g & .[2,E4=኱&0Ky6nm3ۛN)fG/TL;Ck1"L%bO1?ժ;"QIRgpIKl\Q$bb#IyRF| B؈$6#Za ByFS)4W%pФ7*R$Ĕ+fu*d9c! )Xy :ꀰNXgAQǃR_TK@H**iMZUA@iJR KTdA p"0K͆ reX) X0V;w!hfe8zJ%F[UA`u `p 8J IN0^ 8\3a ;|41 +@R=G;Xä [Pp%A@JLlkJQm<B*( KO aa9,-(` <`/DMZ38[R{0`Fi"EᄎK*dZpJ1nF2\HKt"YoT67TSWS#x5P!Ec 9,rfȉKsf駫!{ {ioBOW6!ٓEA34pgr"xCrrk}fqt֦͐OמkkvQP|@ʙ.`4V{h3.I11AsJbN|00E" #EڋT >p2"R9裔/w {ݪ.NL)2rbn W!.pT ,ũтLni*R(ΣX ֖$`L#b 'A jnA%6Fc2G3AOsx)wF8G-e? Tz.5c!ݎmqhMՌ !H# C5I7exrb6'c7JT,SRp;m=ƁZF; R+әw!Rd1#]1M)bfN)0tRb1V1Q߸*B xT&NT d|NŒ*py 6ǭD|/&P{ϟK"AF9&^mDiLUU齭f\ӥTӼU'xiE!2 ._3D:>.`5y .k}5cfsFd] .Z4O+H<{p#0Ԗ{3Z e* C1 HXQkE Nۿ.`—xo3G*ሐjik:3t10Uj"eOeM !];]'.\u 38JU[hwztH՛ /.sփ6/pwѹ;j JQqEׇd?Unc 'b5@K@W@WmƐrЍ uCcAApSLEͤkA kpV@K3<>IgR(4eG!FE B$ AB&SM:TOvPgy#?m\˿beGiwpYE_ ڠ&NgڋOŖlZ$@66Eͺ%@O]t~]}?>ΝOJ[?,Ko W q_E8)2jdWž/y\[<3ҷoO+S,bWAUûo^O x`:)5Ez `jS}>{u cg3X6Hc!e mbE?-x&Ocͤ`/EP_zs^zs^zsoΥ ܙs5///9\ ؕ[Ff9w!83IW_zqYإgc'Ϊda'l|pns.=8]KKKbxsԁT d b65yY̅;po\)YlWyޕ Ёj/S)B2g_/|N1tǃ׌pRhG{?׳ݗU]/Od ˟29S.'{|_N/'$53Ȝ8_,{8;H k>;Hy:TjLURI8 ĶB8ɕ&Isyskg\L b>W؀4¶3=@B|' ItzeBC*>xsGw}O_Z 1O 1V 7ow'N{3rz#)a/b6z딋7ңV\,/v^*R[WBuy+0wu*Q9ߧqR㭄i!Dl(}E, nn*}onӐIa.B "P~}(c=>RƍTk[7Z6i0&vr`%/'~x5[d9W"k2@ر DTK;}u"%C8Vd 4m"i;7$MDҦ LQEt#HL3St8!=M~XfiHO΢m|7jसZ$(RKFP&bqc ?x.jw{W8tJ!OdžHyr(fǬA!(6ZLaSfb)OK/K8#Jq)X+}2O1x.U$bљPrTjb40<| Y;-!in@DXD\1bO!fWx&K 6T_JX.TZm]o2C):368~-K` Hqeݥ,aSf&"3%h Sӭ\ybPDnE%,B.rxyIKx!xsP(76d[ǖGݳ(U@>MP䥴FCK)@4@LQ=Mjwz8xL>ჟ?w ~IzIWL?{=2?cj&s7Y ߯8c\}t9x!30'lnC]ᶤ /9StJJ96l7an|~ WwDg͹f> "F OၔȉNx:# (%5~ČL""f1^:3^7EVR`euB*K 0#R&=X`ZM=lXSb(6%-).m^N(ۋ]\zpM޼&,Sɾ}tX&!^^? /"MMi*J!OY=r\DuS&\] a2io? 7wo{h%R$Xb۞J-\#4 BӆwD%ʍ]M}ZG$1p;Ιűȼ(jz՛Ӎ8k6ײ7Wܖt"4 kd"̑+B wiGX/5m_\QʡuE)NF-ۇ$kS޷!bTm\RkAu-V?e^2E2KLP>ڇG$r>_2r sݟڟ[e= ?LJUH1ⴹ yI53s1"LN:yN8C^g8a3NztQs%{E9el^ qR^VA?)WugòAseD$;rj9$?r͊9˶χ{XPfk-z'uټڶ <ÙkW4ǚ=DJҌӀh0$xd+-ӡPiSa#|  [*%n\" :9Hy"4j JyH%;EW0Xg#ow\ B;_\[ 8E%/s-H%6ľ6ԫ0=_$"nd؊. =&i-m)1 g@{zبLvؾ8V~F\vXJeݣ֛n]pnotm_Z.pZA4[c_l l>iڛy&?;V"%{$1.Ŷ03Y/IJ=/˂d^H _4Ԝx?_eYs:eYOʎ\_cT6w\J>N;eW~!1|?/?y'赀!6J'^) ^{d;ؖaq-+玫KJ8cb`zư&8" +twBA(TPI\RRsq0isI\_Jj50$dcoo@I,zNf* ]/mob雸C6N):],+҃E^dkSM)WJgQ%2S#CZ`^"14J#@PȈ>p*%/y\8M,S+R2b UmJ:@?#7sqGNPxLN:#_]PiD%V8߅ۯa^cV^:aZw؆K.(kÑ(kD<+]Gc3sU2 ZxjzKT(Kub@j%Paé]; x>Ňa;i^q@w He`Jey%Ȕ _ꠉ*Ivm?yFQ&&Àobf^veI oP*$D)3nWL~ #Ȉ/|;p7O$2K;: ;ކ/;e1|qibJʼ7sϳz>Xݥclcׯկ3/k7?6._E$E43ǏZw\JN%/qgeT|z]++#DjM'G&La'0 +3W=?3V_}&.]彼C]ٟe}-˟6+FKih-PA EwFhJ4Q>PL?=)Ճ=3U`9{h6I]'D !qKN JNPym;AVXEZ/AH+$r sp5>tύ(ز `e!)a|d%ؕ#@ Q$Gk[Tz"^fUj_X"TBީJ!t1`0#)oe*9ASi)*S >F91+_|w&ߊ~[6WW5j4lhxSFYF.8{l4ʙ޹J3J.:'#!PfkKo]ּ/0ة^[˩Z[ {fl)i\ZNu˩⌊c̷M1Ҙ Y8^{pzYrZ' k\R8|*R p43=keVq{t=Zenqq[%"w~[Ir)<2휶?>MUYᚊqT-{=4=!==T\agJzL[Qi0i=3brպ(5 g(o5ZH}_KI=j]] H$)CɈxI@9 @ *@I,Oޚd%L&5Wj7LQD.Up9Li& Q(-a;mK;@*&!q 0'cR1D{NCAZaB&_!P#y5.-Aw4ë/Kv֮mTK ZziU](.z|k ŘfjI Zs4"hڀe *I)-B˻dn-RXT$JJM9~ Vld|+SھX~!Gja4((8m9,^JB~$?~~+]Xޒ @h Z]߂DV8kފh nAQ"cf$ř`TB=`*k#K6G28#T F?{ScxN̑hɈ"Č D&"=F?#BUɓv2TH(eNOT$wwa29w[;ԬoÕ%{ne@xl{/(J?WjhΥ 'D4yߍkW/@G9'&ki)yw^&m\P4WiQe,g (\R&<%^@ʜ8w$0LwJrΆ!]79/yly=SÇ`n2=XEh=9_%/g.?20 ͔nv3:o4~j(IZtIL)q&a^ּO9"xA咟YN S,9g8Vx}2qr܎e~PAHKc/*>,JEо?}oLVp{wB[}Lr 2>A„h = 4Sghr"//`I)~&,يHZdր)O[!͜Ɯ7St)>)j<4QG<)F%V UZ.5e #ކ$:`*j_rF-׬1GmFam U4u}͟! |F9%Fޘ\mTHb()ۘ.(/gӧ W(Dgb"br Ĩ(:Z6*8_^FSs@rQl8pjLo%n^Qo*oꪣ``=M󒧇tqhc=J-@(y)>`e]du[+iyR&jUduq)3*IhMijdJev'-,M׳4ez!2>q?_>ƻׯqG篮j8latuN^Wqnu ʻ0{_*b =RH"ZXZ4XTq.l&Cp!UP"[c|X$wmQJf(j܎!AڬæBϋ}}/+@ w,ʻ7=A*F/}vH|~Mٳjk,JK_54.5{VQDKZyd!3j]xg\dFvqΙiX=-c *'ei &8y*ndkH1 ,Fx@(ER GwWK+77~Y)>^L:Yx[>K s]04#n9F߀sA8)o>!?,;:KW>&ǃw(DbX.+lZ 1\bֲJvFkبQGDS1=Lo55V{wK/$fB,FKLzh9/2/uP3h9j74BWNθur8@C/c(6ybX{Ϩ sfZxj3G<;Έc/:⢷v ;t F+%A$rᅀz$z gIa5XP~<E*j +Gs-IK^C17S;D_͝ƵQBHfq"l2  :(Ũ&ݸz*Apߦ(0zGc^+/ᮗ?9O;uD{Bhέ"D+u08"ф=d^1EP"A$)(HUbwqTʜgW({_ؒ ><W-%Єj$YT YvvmBy|zwr`TDNrRGf]0Qh:sF*<C.#)92QZ&CD#df<>eu IJrgC2t}$)̖:|he:? ӌ"95Vz)dE֜cV9d&&/rU3HgC; %h$Mpo@SR3%/hٕ5WH+#D/)OLgVyEJ XN9U~BXN0!wOY70B&h݆= (L@@<y_x.v)j 57G Z6*\~&g_ʭ"ǔ5/rOѽ#VMWC 2iSv8H,ϲ?ǁ]>?*c'M5t5UNܛ|pxoSVZN(&=`lLP U)@'జ,ڭ>,FwE;!n۠11>Rt) 7)]M2m] NpHmtOPiAJ5G2ť|#Ls6@y:єO[XtyFTJeC/>Mw[Jr4j䌉4S#W-8*8zxEGg-7*8/P B|# hLdި8Q1NۍԙYLuc+wV,}hE'\PZo O+l7zoqga v{kV6pKe81c8aü^tuH@!w/+){YRg!^z]t](c:'i @eZTAk)<7s[4U<[F9e[IAIIymz3[j$y{nQ}=w}&$޶Z?8HwOvf4.y=.kdm YkyvW~~As#@U<{:+dA/,UI)=َ>ZBi[]cK̥׳CXf {@p֢3.1 5%K?M=PV!We=Q!G(*{~] \}W*6V两3IGۅv۫W3\!Px'M}a_~RT%OasԨ',?pM0ϐS]"d;HJA/8yi30k3^<5]UjmAJV2ɔzf ÖTL&dt۱IQ&/?&r U9FLnqm^=}XM3ͫG1\y(ل\qѓqJ*JD] 4U0J{bLИ5?b>-֚xjx$YbynjJ7r +6^R^w˛K^/ۅ]|xJ۫ƝgOGϳ8|O}N&{s_?e+*G}N9Y:_㯛ߍw=Cz2:r؜Γ)~~\/8QiَP7}VykZ)̷7PNŠse6}=[|=دҬRnk7EHFXw8Tɴh@K[ 'gQ VJ' r "mO-Q_4.e\%"5\\D,zU||-IZ5+PN@5E_m8#C %X9NV=Ip5EKEN*)rfDS8 3iJiX)jUNSĨ6"5w/i!ͪMf"@u};i1ک[^4ٿ5̮㤁?A7GUU*ͪQ[q:,fxwsyƳA5DW. N.QWkpE1L݅qU<~SJv6fEu)'lyk"a;Kug4oY\ yt@\+SBɱKR |G%9s5A\'yٹ :nuT#G wrIg2̽@g5Eh6tT jTA^r2ΐǰξٳJȢDйh $dzMrwRnwh;@ 7dXy)QDUP(!)ŁrK넶!QF\]ߡ0!+C'kZ8\_R~q%9](InT-⩨q|1pD+x3@.Z -EyYONN-]؆7ZSnq礖L=Q9.Դ_KyU;o>rjnC T)Xbqlgb]"GJu{.łҝJ^Lw:Tl{FQ0ш2+bI[3 /3>*O2v'&̿m 3=^5\w'ЫS!jQb{,)\qBՔ1.)":s('UNU:ͱB$U~k"B`8cP* 'O:)S!0cH9&GY;#/)gGq]YvvRGRH ^6?CY`)ZzUvsBHٞ`՟&eO,E804w ЈrHV8\ Y󕄌!`v PPYIvFq|F JNwk*yR&b^Y⾰UVΙ9@ũlcD`m;c@i\v]K5&#$F}PbXַ'C(˞w4E؞׼W3 cƼVh^t6gKCՆ_]pnW.!g*>xYʢ3H,ş2(vC UQ|?_|}v!.Y:O)?ŏh\RҋE~CVG}#iJ0)LCFMvϾ}lB$Vb:O΢x 0u DDgqR18FΩ[2k2D4K. DFZp*xQ!oΥ[ڎQt{D/Ǵy"xU`՞()P)4zƢ:V<_<uso+oPw>xT#rr Vΐv7d\19+X9 $ yJ7( 8A^ :|onIBb5'G-GվtCᲯ߯eAo?D?<\~X~jiU >dsTǹw!POvڽzz|dWO騩J4x%3 W^Y;v2f$)!Kf عbHDf;Kz `AjPbBdDj:ruDPxCe5[zg9q"y.y:| UG2% / | f|MQ-wFDV\|GR*y=PxEmmN* 2pT:uNKC/x}>ENh0P{|N#Ad*K`GEtg %.,J#| 5Gg6KS_2x%>] 1&()dwſjP~~- 'T*EHHKu_"Mu{|Y2G l+LDswEW|ROÍ:KoysQ^4|$ʍ܈ RDS<)d2_6exz3K衼l8kr,E"Z]bϠ2~D(ntaeO)xZˍAӑp9%oYhE^ XKa[6RX2I9Ӆ V'˘>LQL/Fds㏏QrˆV1kFO^X|6TLw8 EM.s3NvfܙOc3 Z2_ƤW~?94 XF9K IBg+jx Q @Udƙ(k=cd2I2!M%1D).m̖Q ~.'oLu3On] 7>]"o~[!u೷rh}8JA^qYO0Zè 5~K(5I蔣S'd41b'&+; LT4Q=݁$&vA n3F+pOLRkg2)<(xGTuQTXBLMͧ"Rc"2#DDR!i/>EeQQTy"_)aQJU6\8F5^;rJ-J3UOJw*l$ zc>HJ@w%BMd>KYu?3 VCQfD"$;ktq*L*4d$PZ)/&$TSm 2X@)Ap`aѐ(Gj#ԛBp>sg^6JwLGQHP % {`d*Ic'hСN* hY։hpZXH^'el4+?F$n4tÁÝ4␽gc[G^D;pu6vq, BM􂠥) ?ՌX$#VZl!jRc54? ^pHtz;Gwl w6"ijurt5LDwyhF(v qʷ%1wv5F=o0*ƧO݋h&\Hb)5&%EUyԁO0=DN@$ACX!Y0i[d8(놅kVU*1s?G<ދh)>&%cY X8ԕz)$?wyTAezv`nb&1D%Dm]H4};&5NH{5hIłWRJhmU} #hQXHZ=v  M=K=ؾ|JC~rYu #SS;%elŖxNQQ=p9L!BII%6pX[ަ]n]ݚ;'2{}0Ut6)<7FϺuHt7F>\fP@J^&`!ˆJS{,M{KPFl5 Gyp%gQb.D `Z!_& iI e4Tڼ~ٛR:b2b*%߷iDa\Me"z2W*cjzVdS}ߕ*&"gBE680sLi JZňba5׮=j=@5 Aն\逜+c4KVfކ[:<2 <4s mI%ހu"~EDxuz1N8sPah)Yd%NOac#\Y5A:w5"I:WPPIѯ+:C@.+j EFSC$؅8 D9#1E ,[aUd$Cb D~ժ4RC *JD@́M  (ĭ"HK԰xƶh4A<Cז`Xoiu c2 Drg]~hë]1dYw+j\ZJ}f)ۭU)i_+fM057F#H&TbQA7=KXR7sP#rU{x `0no5,?b~j&̖aiT+]p6 bog!s[Ÿ_\ړ*l ԓID/#|[)yw@b(k;0/m~X {pեI^p3E[ϼcʐ+G~B"A9^I3`Mi6*LDOcҐS6ANyA"-(SNPM.NCXՙ]|Z|`UL2g1aÁ+2i{5>'9No?/>4fxKrʂonK}z6X8xhnC xm1O9N<)E=e}(o"IJ|\{Kh&‚O(H'Ib$ LdSTbJDj˘z{ӭYIWY)],>ݍZ Qш`7$!Z!Y6(pb3lw_-ϵǘpTR]qMҶNԧiedٽ qtۨ> [6S=M Ddςג+#7(aFD\bjB i쵧H5qn3zM.o/@Vn]߂F}ڻMw[lՑqdsOS2k tâbo"Q# Dw}%^j98[,e^l;=W9P-|KDTh  ɮ׌q|3r*M:ݻ>\>.]a1Xq2uq_s]|w$-x!{j_칕!W#cn}R !6x%Xz !Ș "Z]0?L= zb87-EÀիa&B+TZ9C3d9^cYDM :beg`1?@4]fG*3O1)xYW%r8F6 F$UpQAE1Lm͑\]FD:`j0|]5!c<nFKǏY;ULj% n? ?tۜ\CCoPtnM_9чA\ DH SPʈr͋":exh (o<bt# uOZ 6 ͐WUWJN1{B얞ǃ>r8korTe[( 'ӓ%9b"mo>(T9M"恒CׯqDư92ĻĽ?QU9u*_U#!穧ńR,|=Fxօ0NZg =e 3BSZq 㻄h^jUK&#!Js_]ƣ,:b# BI܈1䑉0ꌳgM2ɒǭ/6-^~CM`m1as6F9题1dBװ\3j.3s CvHk-n4ˈn@O(`>Xx6^Rψ8&?OB[>D7'd !̀[&IQz F6gQF).~$zte'Ii/;\dEc1_Bp˜v {|;&L&}c0se9̷}lu>8woP9ynpoU{?iǮμ?tvurZ5 hX@"(>bER}j]s=VSBlz",]4MŨwGժ)_\ͨU_6*f+[ίbݽ^wQ+f8@!#jgͿ}.gjś7O}o3^NcV۷'S3ńR%j1 ULPx!`4:h/ClS X2Z"NёjIC$%beLI`gcwzzڛK;EEA"K$zB\2p妈|L6hpNP&hv1Uq'FY6qmyI#sIftmxBBbvQ`uyK7')O'5>ϓ\4eOLvf[TW1觪'd5.fq~;aQFa>w5p)Mz7} ?6SfKp ݁0>63hO?~ؼ{E;YoۇX*<]/ejeڡwh+zo'6[b40ZcUMv 꿮5=_ڳɡYoӨ6=Zo 7WLއ9nwGnN;.n Sҩ^ںe4պ !*STNלAg׶W![~[ )V\H/8q)iacL`$ǵT:2 7*b|3vNЊ)g%&3wd1 |Ig9 xJ,򮭷HA/I^,Ҁ_ 6HǏ5$ >G΍؂aY_Ū"O7uRi?JE/£{NnG6BנBp:sn;khHzrn5ń \n4]-Yw].v_)& &c2͆ 5Oc)_C3l57Ǭ<0Mά\-A&&R4 F3ZHcyyEZ6HdS蘔F9(sh Y/%(g 6@/Y+KL =ITrO$f`Ι{{#璭R*n*BpϮbUH^9N^" F97 &BM iB5M* |o n˵aD]f1v`"tVY輗ֹ!Q|#eOA%YCAEkƶO-+izÚEJl] ql;ıktx{5ı0qҬ8cıDl}y_Twrq:Y9ExH\L(YnrBE(pHj UKL1Wj*4,X2{q~SLX`oփJms[%m>G^i2& 2q4|fvM _>q=7HjJߍH5ͿWû];0\Də\ϨAcydsa9uaK3IO̧ŏOX%[O0J CY7|(T5hqTz`v|kSL].8a3)&`_u۲)8΋x})8yɝ)Da̘kZkZH(+*'cR6[#HtOW}O}e_φ m9;u" *襲Ks})_&88z%4j!pa906I2)}0tH(,NѾeHpicy\ F3\ùWIyZQ\Y t0NI#G S$ 6*`&pA> ٓtu5FW^>|?E͓eR:`}3G, -@[ɣ eT*~&;2Ih.ۧ(n^"<^!b"C ^K>afEF xdCJp)9h11R?X!=mc}iG?hM4t&gI fd|'PZ6ϟwںK`ˠYbGQ1(%$rW`y+0 ܜIAzvZ$mɭK<4E9 <;` u\QL$ Iq%bVg2Q+)hT h}Q3zusr>ozއM_$7*?}؏]_aZ/.?vL;E$ ,EnyHX~|͸ynBqߓotR"M4 u3e,9a(U?7 |q!n ~^ ǫv_ c{ 4.`s?}c81_?x`ctRxzc<~M_cP.r,6 ђU }u<$Q%eRC0 (L!))2 gRy2Ѣ4QNJnE 05Cw4|U}_Zً i_&aW!.kRdgD}A06B"O/IQ)*fbyFbf06HpOAQKF@3ϭѢȭj*fD{Δ|{zvFlpPΨ7[kfjvμ4E3 _ ̊6 AN MBP7f[;~xqۧzۗn7%G&rSVR{;^$l.(_cƇ12;֍1T Jwc̴~rf& Zjj2㬛 񀣧0 mނsA:5!J풡^ rSm=ߍZ`hn;=t, xFJH}C~3Th5 hB(TarJ _,-(-m OϮݘ@"M#/@"*ǥZD /!rTr4ؤbB-M8:Vc!l6N1C Uz=IƂ.)[#kЮVHiNjk9iEVٵ%>LLk$w{^T JL}9gjn}Fj:mG#7>DǏ)mK,UѣD<άnMjƚbfM;YMDWww+ӆ7^𯯓>_|ax|9ͅ]',;=,Qan_]}{}>mhmI o=Ix˵4QDŽ/iއ"A/Sctσ$O_B-=Ol;% Y3A $O_B Iv,ʣJTJ|=(kNDž/iJCed-"vPtheb%-T,W$ԉdȢ.#CH кQ٧Z~ϕ_̇"N·(:܅uqW)/ԜCR,%P\sDh45`RƁbJȢ+3 16 "G5^yPYpsC7vzPY*4̬U/ӊ|r|ُjZۧ]O,[񣻍2?^Gآa{Uy>5Ck {X^μUkyX6`Tekp L]|_jxebmo )*yF[i?b_ 'μv<ԃgtSCHSy;&xHId MOϮݨ7uFu8>><>V1#<"X&Kc )ŝPK4~?;H&>5\4c> әĕMi咭0\jZac4juRVV6+sGͧR>F1:t6Hu6|:3L5l9m[n۾+Y-ؠb8w.P W7a>X/pgI=g"-^ݼ6?6Ǐ6c( ^o.{#yedE!3v(R?r6|q<5䛋͹۫twA}xuߧG{kGI~L\||}/Z+d^>Nx|e`B~ПS5̒]Vq=>[\4+i>{gը0&z|ܓYp{nGwWrp`tzG1J"A9K=`KkbCelg̙=yuY4r釡u@"wP8b"C:{jT`+q(MG-ul%.zh G-T\(mpKa[jJë{T©Y AXtJ$|? C3C5'^k7Պw1ǎLnmj{MszTRNթ۟x%]GF]ga{Y'6glٻFr$W ?6ivz=0igPY6e{%auXSYcʶR`0=r R-Ei=>yEG'PiJJ$Z}uߒ1jew͐l]$)l VF0&" UI+Xx4ereE}$Azb S1=2b/{*9"N1"@Ċ4ӼL`& )ly(ޝEђ ʴ2@*yˋ /* tv&P^75JgN1;WWOd9՚ V <gR!qQT)+PGǹ TQc} VQAs1P{MVP=' f0_JʦMUka֥-kUPÃQ"YԸrn4ۀ$t.-*/V #JT_~a_ͅ]L%s6VFQ/HˣP[.ِtvx8_|>3 'ͨW n46-Z3] d-Ef-d[⻽[=|Yz@k>g˿ߡh_k76W߲B׉gJh$;sh,3 8jGJW1&2yhFjWXuvGņ龽ޞNʹamKfF玑ՁZt0H-ZI0y]b,ϟ I~! x_׋Y-j8}=C<. <=sٱU;Dd=+&c[&|'< 1jWu!hi^>\y[В)gw#K+w%=.ht[VW׏s"SfTm;vAt}GvFЈڭDK[EL[Y)Z ͵2$[Ѩhϒzbf DQE:BVL% \$0(R+Az.\jdZ֚jtWw0zxxV >ڼR.gO˟jEJ{P&pL xŹ%VV*xltQ쌰O]_} %#\aZ~&h}aB$+N(HKFcɗ|5_ׇr0}/ه?◜4 D|s vFտOyqs ?+Jep,LQ⸖X1VrNi,WV{._FAHV![qb5A+wrp-_v ®Д,@%gr@Ы6>dK/.>=Zoȷ!kԡ+@p%Ќh-իg{}rJݹ 4#KJU+òrSɾMЗ|WWYX,ݾ$ ,N +2~NEp*bZ~V1eX7Mx6krUuť'"L-ڥO hѯ;֋JAyw}';n{efܝ99A lě Wb8r\**qkK'5P gG<sI84Ri= Tq "5˹q9OF^IjeN2*unNOTZҊ+ Tt0PQ U:̸"=#| Tnp/Sjc{wd}ӅGl@*Y79Ef&G*Y+2zJoQCf-Q~zvtn"x)(L-h}ĥR""K@5M/I;l.A/՟6dG]1( &SFniF!!\D;TTw=@LBuŠDtv;U#:OT!!\D[˔PO8_ wV1' )%VnݺEyǔ!gdYWCNP4.|UYmm)CuoK{.zhkߪ2Uqi&ơ02hY,ͻWq4iVé"N~mBaeyʖ?Yo]\ZK7dK$oJ7G_L\>1:綯[xΠ[&JU=5EvwN<-*_zlPtsp p-;Lu !'MO4a${/l/ L/l/Ët9JrY~ϋC;ס2 ,28UuŽ #*J)|5Wu(L|a"DKqnoAW>S»g)f7T횿.bgn`ESDTLqI9CO==;? cO|(8P'^;itv-yG1e5nOig/j.WD%x>%zEy%M-ip^{R]==:17ħ\ҾLJ^[*lTY$⨖}nt%K:3 ӽsw=@ 14J))ѼRa۾)EeLjŶ5(܌Զ]֠sCYS5(&=4( +NiP =bԈiN(Sh(MĞtBC7<&}H;N2ڭn0QuŠDtv;SyZ3}n#r [EL G7n3ZdnA hfZn*EHV)('iƄ.QH-x һ }TYƐ'7l9wF3"ljnXj+ (&G>Zv;VK oP/䛻R;xE;<)ZED;xE ˵}ȳ~ wyMp囖fBY-JZP,7w__:E]iƔ_򻳾wMËw4 fw%H_eh28jj!%^Rq"`nyqejf uɫtt j$$*Yʶj^f-o'##!b߽jwuw9PKvKV(tHUHhGŐ)lbT+,UB XzI$V0&ܶ(ST+hICΫq4I*@e+Kda+Zl>{<&*"ePT9ƤʥY7\eri8DQRzȺesXcA1cjLS^ArEal?PY7A6!ePjoSޮ~ϰԺU@ȉCucbVE/cL:bDi1h 9qb0%̞-T+o69;tB JcM^@d;j&@ȉC)3ӺQdD9O"9ӉeXʫ D*K[.MT/|w{e? ~<鄇__^ۏo_޼9?so~~06yɘ|/yC ѵ NuƀzSs+R B;/3_̛I:gIV lhg8]v#{|*0W3/:"%o8Ƽ,d^@Rpn/}|6v7,}DwAL=?y[wcUӓ{.MWxƅ~Cy]$k1lɖ[ DQ#Ưx%?ۗ'F Ϋ/L)MeF~,S0SguQR{G1tBC V{<'MiµcHXhѩs0/MGTD4Zpfܱm:Y&#&Ya]N$2? ,0"jzjӜf-p!{Yvr-7/%,ec,[bv[H#Wu2@Zw۫`MKZ-g.pvonxL_e!o-(8ϬRCP+iEYɇ,}dM\.ޣeFF{JFx >=XQUc!F$S762H.7͑}~(kA{}TTV,=ICJ-ih:hs AAM G Udd:ZkwGռ[[ANͰ-ʽfEel֒%e EeEń` E,H갤5ZdPYt0HGR!9wI:e2}(1pG/$58C$UyYA|c}Q*G1Jxm2Ts'u_$(Xx4=rljHozOYUl`ql̺pd ax$z甈XCNf-U slrP?{߳@*;[oM]4~G OYx6H¢Ud-2BdҪH@z-<&o5UD"h_))FxA=ЭڽϩaePr;Ɣp+ew @ȉC)5nZy_5+L6!q2ڀ뗑Q-vr2OB  elG5/2.#%WCwgy44BIذmI 8#;+ ݻ>Ol yGA }˅o/@ɣ]ۘMiMoW3 oV}]5F Jr/S/K^unwtu^(ip%4Xb^m:`vyM!+gG9{μ;"N9`ӘJ\TJBr&^geiER+͙8Arw:1bEL` dyJS]e 8ogkI? ͏>sCud(eࠬ]pr ML%Uը$j $!9~v&Z!刋ЏM-? S5h}ac|]VzFHvs'}2Ȱ1 cPkou.a)JWrp@%\Y$iRu!!F]g"O: i>5gvR#m98^u H?cфh6عNǠDbo+,!KHCBTer%z i'y`/㊎;wuu!1 T As]R) SKBL GO{zJ%6fJLT<¨]^+;w6B8|wW;?FvKf(F[9dၱRb"s\gfݾI0v?wX.G%y^M(?ظw7W`vPEy Po.2+98ePW; *k2Vs'/蔂l4%Pe{-hY,P jȤ ם5g1B6ɾ]8+qn}NrqcM8 D*wu#GxZ<@ jrH}"_[/))gҜjUW ι̑!vt6Y ʎ {sfrvZ┣fl%VHUA~{NmedHo< :i?^,G46_JL7+y9@E]߻~?Vգ [دG>@gZ_]V x8 kL:;J +ற*P7qY^vpl*Uf*r$QFn? JiI"2- l & w#mqb 3ټU}w!P"ǜHo T=I׀Y H "B&2qD0<k^Wb@iKCGfN:p;%Hib"cDd΄LNٞ{#_}%5`d{U|o8U0žV/,s3[={M.TQ-iu<;_HXGBpDhto~˻b& &b ? ¦̀K3Ö=+$W!e0Ϡ!"3bgkQ^v;#EF QP'@@D'&0^dYAgi@*#*<3֘A>gTKY(6g̤ d:ZTJnp q[ "2lk|=+zJ Dǃ=\DXku#6'J"f zptH~ [Kn#94:2Y#_*)i0HT"i& ?O/ ?O/i;!0`P SQL*{,*)`G8SwD>04FI$uUE`cZ!D`:]%|w<(zxBFGzQ-2 V JaM)kCu[aME}U׀8h !5`P{^Kb,Paָ #;Lq:_9a*,  +v:^~T734z$>6ԋ/% @~ZrOK#TpnOe [H96E#pr91cv*{V})n&X`0klpMN8Mz~1 SW/Q(yzBīHxO0k(ٹS ٽct^^1h`q~ qxϫ+h[J5y>޷T9y^'?>a*ue]8I7TsH5' exTB\ LqҾ쳖ahlj (zaEf8Wz$vQܿo 'J~g4ECLQP.WS_DSк+d@ l%^@{n:T܇) Vk^fU>D$ TL3ZWPL( = J t7 P๧. ag=,d:Ao8#䋵|H+DR(=V׌aEQ_.8asrsҶc,g!}qnp2+?`*}N8Gt0R߽1) \j)fr_:^fi-sGXuH'YO^~*g'OwYi}PTes6[玗 [t꩓&#Lw~վ[UDreHTE$k}{NP^PԵuphMnngU [iU6^')>t 4\]ڙ߂B.UׇY IbD~hܮߕ} ߒ󛯦67 m#&ov=L5V7߽櫳d]HovFϟW~umik1'ls+MF9KOZqD,R:ՌK%hjJ HB+8֐ql‰TSR^͒rPqf݄Gl1[gǻmnnྡྷK۔F6X'X4XQC3%D`M&25"enudI/`u R&=&ɾUMI'*Ce'ՠuP3ϻRcyQNu(TY2θB3"$S$8aTr-1BL,66Vj#p|4f'àg]cwMFSzfJJβOmpˤ$ 1ucuL34܁BJLbkSo[@#8w@w\%\1a0oAfBQ9<_T{{KCg6`m Z>m ce޶[fyRoA%!ZoՆ+>~,m}}u\OF6 ʙ.&;}s2AL3&I}RcӰ;5(Q5m&P%ing;++$m{kL M_mUEt]\D[ͷ`D?(}RQi ev77ҥ أYH:IF (Nz7>8ԶG@E&KwOjU|Y>1ha1,F6-1O)zUbMu:#ڬu`X*= B)xs8{q@%&(JsT)g' L uBJh/ܧ(tן:haOڸ!JI)5\L(h7'V+&~7FfUʭc11 %2&4Aif+ҌY}PFf"щ-'bޱVx 7bܓ}Y?jR-,ɴN$I e I,dˤXI 1DF ?lC9q"p .w6D ܮ MOiD YD>,eO~+*x>*`Y+(ެ)y"L??`\Ujoٻ)UJ5[2kԬZ=M4ELh62(R PX~6X3z3$vȌ L%_І*d *#J+ s"q\6Rr* 8iL< HpVQ޾߯[`o?#RAא>CsD}0&i{D") 6BHʭT*dMd<)LVYj2j3RcmV"|YaU6no`\]UQz*|dOp=9`Ǽ$^AVrG?B( ""7w~dI.vp`}ى ;ۣEфNGx]s'W0plz%crN0Ռǔp7'( 3~h<5wհ@h|o呆+a 'b:G1ϲ#tlI^tܣo@iHpM-> oV.^k,;#xb3GuP$|UfKڇn\Hxhn5ϽLv"5AՂD*!tLgSnDYeF"& K +i$6;M>*&0R;0diEKF<HAфu%;0ۧR?q"C0hLu6&6 Q`_َ02^\4?l-H FC){/9V g7BhV2flcN054oF>RhvF|Pp4!ʴ;YR7\2OfӓXC<BmBde:fbyPJ~C Stqġd)*lnm}-eè -|wz(7\>`v!.9IFJ?s\kdk>oI9l9 Q(/o9-/$?93pQ!3;)q%P1Eqhq~:.GT.3p"L3P:pS;][x]>aρtaŖ@u ;q.hP"R+$4K Us]D.=/0t qA#F bLd1F5Of"9 \roLQ7Bsu|(!'}#Gj@c?siG@ ;>~2E`I_TxV>BLa9">zv㸣K%GՍLLA"}@ɶqMB y@Q]joCxa&@!X<{a@K^bdX0/ǡj2^َtv_21ƗcS‰<|p=FK7\  EJ 'E .By t3_qDGA p) ^Z܏c!KDNA[Nkц3đĸt7W,oW9vuRt{9vs֜QXz~ͯ03{?pU\KWurY: e$H$&!"Ĺc]mo#G+-4Ǫ"}IrH>:YrvcXe[X#R7o"s(7؃v96 U^*ozi~=wWW泣rAw?Mj4)wG\rGe~7y4Xn3 , كܦ<YSM6zTj]ec^t"',9 Xb V)[β<:3 a5Q`&OνvSu oX-ųim.|nYe E ƪ?5޳zAE:"NXC:u^GcqxŪtթapu%,<:VrHot!^Vᐸ胄$Ynh8Zwa{u9(8fD5uk?yhԁ o<9VP|c23^Tt'X??muml.~7_>=~i$r7w?o[Z}~;/?:P"CH7"gj2rʃJ@m6(dN5*ś(|v-B7Cuu_G ѕ[l7Ѧ{mɫ2ѬM! yBlE=,.lpWMzVX6[)%1* G'R^(m!,2yZNMʲm݆ rAyFTW*TE_7[ y@[y[ݛfSM歝ŎM2FM4KfɸK1iK1byM@5oE*_S]Ei;Ut6Co4MЛ,Z W`=+2:KӁtbu|Ew v&?^| Џ$kgscq=`lh6J-MW r&)(+$+G*U̓ La]Ga:Xg)G9Q%hQ~ʨRNZ0Y{οrufwAVzU.M-rDY!q&)^PX LM.Il5llRb.6FbqŸXPujA&kPV0Q֌wc!mqѥ zD?ywa2vūz*o7u #Bw ;s@bLQ3؀g3(}hs;5TaG0h@Z oa/፷>z=>16s/T:^-PL>{ݎ14o]8Hhh'F6Ӿ:cO^i~KqؓlA[ |jњIHn';v;_9tn"1`8>‰]Ń ƺIE.t9 90 aԖ_+3H/߫TljI!LNWmR_gvJmPdoB3uʹWߟ6lFh6?.@!/W-T`uyc/՞B;}5jDž.KDž}Gthdʬ:yy'b&G4'Ww]paѺ s3CHf&Ca]Lotqn[liC dho24pyH=k.qH#Y>r B=mPO[/^Gr-@2+m#HpI</> Ǩ:}dMzEق%v;$aFcRCuq q"0LJ-) >wfrQHV_<0.|J.o "oi/O}/}D bVۻ/|`:7"xO=[vjr ?G7Gj:'ǭdwt?ggӏݿM?^_^hk`7BM5/zeeDV;*V`g[gu6W^1̾N..alantYb7b A#&@[*Nw= CB^r; 17jBR|IemXD򖞺9 .*d*}F**Lr,em!h̤1W!qERlF{qt^*0Hj:Nb01Z#hM.ThRRsa ܫ:xv=d9k TgN-D:ᄹ2l\!{ q^sOBx(7\YWFM@)UYϾ&X dAܳ}\BY ey 5ňA5.#Y"+7^CYnΣHVM[5f#n1JT1>z1qLܺdv6WF2=pTBFBً` ىmRI(@YsQP)T\KE2(jl['&CA|Qi_-0 #gjk)b%nm*?l:mr#DX糓FUȮgg'՝1w?3BVpYݲU'j@>]iXv;sw'݃'->D:}.n^pi?ݼų?w\o̜&gm 'A4w꾾i7PckV9})U BͲDYW IJoXh-.hm֜c-X|BXj-E5 ]J1PQeE[Vc]A!&@5l:+zL1)rfrjڒQ[T%JS{RB3.C<Z@&XU­HKۉ7$R@X$%@'($6DDj"P^N(=gjTHdu:X裭&} jBT4jQ "VJ(ZѨ^⍪BmUɈl2IL].{Q-  콪rLUu$z+,Z4BXӋj $JDqqbhf7f.^6 `m1rjԊHW"&WArߒ~RH`( jPǤ2!T{x|:Ȟ{B~6GH$[fwAIiVNgm Dg-4H7qE{n6c2Kl$d̲kˍ5Q#U3(Yjq%?=|bϦ@ǺuމKݍɻLkɱ: Xd}C­p|:pgAp|s{V^*qD,ۑG2\q%޵m,Es *~ȗۤܛK aI.7JrҴ~g)Z(@2͝ݝM "U.rT?X E2DQ$NTJI%QA6|ڽPHVJJ~*4þy)%ָo揈)آJ"Y3?޶UI\>=4r JQ 6b7%,ⲓJ쥔|R]9ƚ0]O win`t.S&#<1!#nH} 2JaX>yJ($DI.Z~&U)tyJFNFNFNF#S3CB~\j6Je-C ̤HJd+i\z)s i,n;PTOVa?kS4TC)Ndg<&eXL0S\Ei:"hV_,b5ns4st(Y[C#dmT+A%\՗ΌUUdXTTPXd6ł 2\da9b_QU >/%VВ"++|4%Vn1JńKO삏{^MXrҁyZb>Dk)83…ntg )Nȷb`;Wr^4e~X8 л +ijH~ 8lKӞ}q/9@#sm#(|y#iKr˯YPZ^=H')olɜH^N!]CbmPSS Fqg}6% HqqXaPzH{x7y(C}& f{"b[eIj1IX2%9n#duɖ*z7&!o;zcRccg W~{~(@"P#;wtFELTYO1+6Co5c\hE/%},fs{mD yu5' :pr1UYg;0U$_VY[norcd^R[|q6C>`6ǚLn;FѕB<, ַ1Ezrek=h<c i\]Xǚu!}Dx֥MN Xowb.GCrx_[0q= uϾ.`7Gu :م޲5᥅6+dݮ^dm y]H}r\-߿eۉ:!_?vܙG|}.3f\#~gxQߌqhlx@dT|*ȓީ[n;Y23PkNOdCDت>:表iBu̼]od&6@ lzܾ|ܠ,Z8R$aH*)iو4D2뜐, Kӂj1w$_Y8,sU`L5>liF| zq2z{1i+x=J!}Xyc,wx=c]af]f-c1f~prs;w͘Kul;:koLojO.|geL~bׇ< ˿'w7'ꟃudniAVF?ݺDRDfhWf]t9Æ5U-MǍGuD2(qFcr[j~uM@6[RAVZ3Qsy(OJ~'8gFČJST^?vΨKP09gC0?gt.';VCzgLϸGO|@ĸcYwtZYVhID!1HLÛ pBX |LS3S1G=jv:/0 %ȡ^p-UaV"VS۩(#+RlUAR {k ?uMid^h2G\5-v߿߷?SOhU9" IaU(u.U*/%9=*t&;Ř"DyMț[2p>Ж[,ߗWwqݕ_|u5(.J+?9C{Y?gnB:%\ܾ}]sulp{y~ abټ3nɔ 0F[$Ev?s NssNxyq qB{|wI & y*SRwZ7ğѺ DubǨb8zHVM[艆Z:4䙫hNisǺa̺a"y:cTngEzY؉[4䙫:ť hg?Z×X8u=w{wb|p.d+2WW/>鋛rr!s]TЋn$I&N m<}CRDG^ إ?"07¢pldG ?RY ;[>X|߃0ɸLd8$v" ;X3c"- ){RZD؞a1ZNRZ(夻FPE0$bJC6QV7Iނs.`8IEإfbLEWJ9+hR|Xt2, ʼn7b w!p )>#wLƚ^Тwd DaݨhR\ΦzQYJ;pp*/!Ȣ# Rhkf41$LN8@9fl&.=&PCO^"rNT,ՅEK98 i,?8U(n{yY͜Ґ~}MaO_3該u~wz݇D #,8n8׸=T #1b&+ڃ|X_=@Á_<@{f an D]?&)@ϫ+p+pN7@}d*ϑa"71,UkIʅ$BecP|ޗx~~rtcz9X92Ķ\R?wv:de&)ر 7=vj"F&bj"F&bD]Bк%"4mD2x@TEAem)KfZkw@Q;ڭ4&!ՙR*J;*ȅ#J\(1C U"RqL!5MmKwIeX*iLLFIF"K *cZoThDvKAvU1Dpi }]3/WqQ89g8ph%~sCA.3򤈢2,Yk z[KA5º!xrtOF[Ksj><0 J=;bpY؈7ɗD?m]IʆwҐ=!Ul6՜UuuuUu1n;ޱ:ATqՃi{%PbE0S=@ϙI&y>PTLW~b^ Jɮz6d>)!vmu)Hi͎?"k9F(PY^Zlֹwr 2= .hJp7 Nq`R` N@kKg:wě9:;k+]FWx9Jx9&^VרHr>FiRSfKrW+KDžșԹXnQ~җ.T IX"AT0t?)$ ~ /0r ^hƂf6vN/' mEm viczB\^0d1iwECmp1Q;n#ItG >Z^Rc|jaCgt8ɜ?kнZM'+Y)mE?}͖,xeʆ=czr,<03!+L/TٌPHu֊Jtی YbKW|mA&F `c)m(:~i0J)0J)j @5D4F-qO$s.bZgʃ($Mnn,:mCW0=SH@?+ބi./H-OJzܷ#]Z9 Q+pЦҵB'?\XRɊM M{ u I<}\b%bz_BJCVxZP5Ü9-6 I,53 mHEEҗ.J JNL\&&|s4o"7mP9p;V!gT7g nP: ?mڻoў`ȽXSfM} ]0Y]p˒ď. ]UnnP3ŏwpA}1{YBzNaY{5q{3/&:|=}oE+6tNѮݻ|O)!_ҪsMުY{mCqs,۩Yz&P+.ז'XW`]O&}/v(]H] q?5!HJgQbA{~+TYlF3Bl;<CtGΗMr2r<ުQ^0h,(r%I1PĽk^)< A;3BP 0RDeEn6UYqU&Ao# Z-J&EZ PG&*0>Q Ak$BaKT` aT‡X\JIu6ݐP{)00A07z+5Nngr+MB~6Z9CtOhQ$>A%N^ƱKtc/MXȩ%ИyiKf >eR5/Vt=+Zh+VԄaG79ݻFiy6^g1?IxU`kSHk=FySpmN'ӆLpdGǪ8"HH#Z?Em4m9ߩ2t@$1:J4ɀ쫥q"os"`xX7JIf#?g-g{xpYbZ}[S^In"#ǻހ2b']oDos3%t Sw0q"T ݥ,;W7p@5V0Jd !i!\'ap{?]u3lvI~WJju1qjjx0swsc_झ*OU4_8e ȸ~ wsi1\§ {5gXS gdC!ΉnIx͍$, Hˆo4:r ࣹPVq*)HտV[PsAGnr ϕ ',X 9n%RlE]~Yjj 8I.rJx zM%{5OؾVf&Kͽ^7± ݜKlvVVMM (o`Ra ZL; 0~; iVt&f1iCmau_J/[#{Y/I"`hG|Dž^H Hh*FiŖOZSL(Dמy7վIL)kAʺy"UnG0~z3Xwߏ" CӂӂW9{)iB# Q0^ɒYO2r̔C7΢ cpE{Fox٩Fѽ/ijԈJ9>\?9*c3Lj7Ge Xӷ38}ܽ3-SZp$*#J:`Qk0~?Dx ӉAY&J(1UQhKA88aB+rX,BmB ޺9}u#ġU"DoIٛq^nJ+!Nv_u!Z T:b{M\~|a`$y퇓ʲXI%?U%KFW yc7gȏ+lj}UI"CzR䡟{V2\w+6o% '237 w$sOZڝm1'mNXJ~s?]Ct&IIЎN=TVSA^M6{T~`;S\OoS6aZׄC'(*ΔRSVX)8yPiL[r\Gge@j.m+"R</(#ĥ-dvT,,ݵ3ٻ ks|\ b]d`Ri],|8˳fr1W_#ig9_ɰV&B`J0ee!73WΒnF\Ԕ/bt̬FK&W|'.[ \ȎWMX$X_UQ-6T%^l=ST;^~XW!8$r {8XdZL*9TC022&2kx 2UN&9OL{׏/[Va]6s D1-iSK2*FSk!a-3ã0gykd10$pŽ m#v8(8v>|"(*#"(ZU𭅥)|GT6W[A3%Lxͽ%g[/;oDZ=0%u`fx"|P&:\A"QgfaJ3x6)ELҊ( &.Q*&dZ:"'U`CQJQH!p- Or2j4['4"H R< ;GJC瑍|_n)MN}ϙQvwN u4@z7$}=fuVgreK kaIY/p~Sg0^'Nû80ՇT԰Re젫VU?QG%1j] '/K,C\:5̩Z`NV#êKG%]DɮQG%]0gxAݦ'?0KA CD{Q^;&x;R~P@kK.=0iN*8QTLۉh;1\T_9Nf:$DĜCI:GǕv΢R$"+q2I}C fy Nn6؇?|x+Jf2v8!Zic.w՛L_*~M>l$Ǟ yAP,Qj02ɢpv4?U2>4ۛ]汽mZd92•̯*4,Ge%jvr^Rc-E!SNe`[a}Оr$TG,-LP`,bE$#QA4=g,6P*P Vq9E;i"0)ϐ/ y Q]DXfclcljNi*Pndm$I lə ,vf1~V[W;HJ<*̆6Red\qȃFҒ9PM9xoD"5Z$+-cRWC~EVIfZ͠Y9'l!g! A Cq&:"O3%=CfB !g˰N+h2 Z9Hĕȳbt(<{K#wɦu9 iP,AgRɫ]9O5ftd$࣐Ӥ'CNd*=1o-'e"d+͑Ny, (q̃m_p g2,ɰx&úgR/g2R#´X/E i.ZFoF.qm\uN ]ِ9 Nms`dq!jL{&FMDH֒.z' Lh"x lOZɞ+lu僝GT*Ŏ gWMkQ[' 'T ԅErpB"1mVґJTX!cꨑ 9Y@n>HeI2@hR_k2 3:#;|Cq)e ALF I#b䅑o!YyErOOÒyq~NF ˊYWӃҿ:)6@ťJ=#sj4(K2N_ѷ0r;n^MhƌGUL:l /2}.xn2La|Ƽ`mAzy9sOH ӟӿ_v3sja[gjZz:nibd@"L$̥ HǒFAJSsd 56Pb鲢r#M5+0~%+L eK4?BXяX\N+V9Paw \ V2φ{P0%۽ԆY+ڶe/L1i4i? R>+ b)D6^zKވ,08< 3^iQ$gZ!-iQjWҸKs %Jf ||h!"hM& R$H Nvijx\RŸ mLDSlѴL AlϥF>O@ _?Rybge<mH(8(e^YF '/Lt(b Cd-S͈v<XkSf60 /noYBxhl* ;}w6Uή\@?V]\ӷӶ.Gn?`?\nZ}My OF`_-} ~S# d=uwet"7H2ƳʈXaӻ' 4- wr,yXoIҦ7[AVٜ=l7y- eD]]|D;-:wzBѿ-_L&Hn4e.Z& [ 0K[T&,XL|aRFs*-./^ + Y\bieKfh,H)dʀIɡ9ӭlc̗vm-6ߐnEyz҄wb)tA% v']ƻ`Gu0x=?ceyyIH's>5UKw6QOz>ӿuF$]1U7ݏyW}HylCsS6zPuv5 vڭ#N5Zi wݺnv|,9O{,Mh[Ā_(nK2f/!CR7n٥5ׇ^@ /InU+8\J6Gbg%^f%nZpƖJZPTqeTYI3;^}-F@hvJr;9fHFvkk{zU>2y;qzE`[ک5rхµ Vן_&9[1㱻UerftagŨmxًn¿-`o=jR4ۼ Eu- 1BB6zh_w]E_W#F_?yo`58n4 SCzNe߄ISIZ_uxZ`odWz𗐢)Z.˕]8b\݈#q%XKr./&ݨR Pd­X=A|3:Aӝ(VkyoTJZN+ȹ-@-9c*{WBp}a;J}MuSIdDLMsx{ Fí"rV{u>MF7ui o.V7!gU[|V2ӓzV) -u=f2L #%FhV$w kGV1(Zeֶ#jwڼsZ#:&x$sك 1*[qJ|~ 87Uo-m#$ҋ1 E?GG#TlGI5CpHT&eC!D೔9b˻ zjVhns՟N w4d4usbYӚWF'_㕷>V(,$VqOoVMVYK;D*1XlayjB}M0<c7'Q#շDǫh;NҞutyfJ9rC)F@!5xr?N 6&pvOoIcqtOMqvu `p'/E!~/c?\ ˃3QONŷ(h-S8(V! Y>@&"a9oq9(k)M:OH})~p:; D'MJ_/7kjGStiB&Qh-WRQ!te9ZfYwC l*k։ ?-|&T\w@JzGzQzGI' f`;%uu'8m9}DM\n0lg^S1 ռ'sh˜ ~D(bwn6_\XWyBeZg"E_@:k+RW贩,XIˍDpղˉ_6ضn/Ow[ ib_[X߂RBQZUF9ǁ{b*ly{Y7,/ۧSE]\#:Cy$4{{^{'UlciPף{MDe.4,ߑ9xJhN^YY^A-G ~8<'IYJ%dDK-Kk7NVASj(\?~'=<ͩ"M_T9׷&-#9]nPnm$l,ݕkAmN׷kL׺=y$&B$юd"JЦ䓟ҔZO&6EsiUϫ&Ǝ9[Y[ZxPUKo_ jfj3M8W?>L'X9]OzS*#_TN窕]//BzXyRwTHfXdHfXd"JHWjM&3g$pTtn`WZe!:H fK)Q^?^ޔO .rn<2$wtiTM'14ɇ4?"v^|cS?m7[ hX hX :7Ks&j2)7ڑVW*̵9,Dׁ;!(o]]lxΘr7Aϑ՚ퟻÞ簄=g)rdNKHD 6r#UC6M9BsZ򽶌kJZ>H<,Aa KxXיY YD0"2E3.:1hD 3̊[uNu񍙙X{s撤;aS;aS읤wmzyrNf%KF$?EIi2j6/`3﫮<@(Kt0^K"!`07F(EH$)GILCN՛`XT학ATeY2+31/%,ɚ3`Ȏ15Zf& Z]?̮?X9*w%*a;Wy']/F|ߞDL&Yitv5MVjWPfT09 RPaF2Yؤ*xJDłhļC.h' ֓c;$~û??\{3Φ!< f|>w/'OzLNj }leؠPJffS'cTR%t€RX8fH HTKPəzňp4o\,3w(G苙p0Vbӫkg&BS9Kfhxy )fT4#Ņ)iN8AGv<%_Yv6.w808N4>S02ɟ$Y0ͮ}0[vHF<1Sap%1F"Qjei6G&"01S*"*HaWjs=2er; Tf%;E_#)g`Q-Wa .4t+CJ @joUS%rwRXAG28qzގ{9 NOT=!șWp˴ͪWL!%<6UvvVrXD{E%j!UoxaոG.Tf-sޑ{lO7b4_ͤ1!׾UZ[Qݥ%8+Ui`a#/)'k<W36>WPg(Ts- +tZ{mQ`|_U XaoK"IAz65ex/ آEo2ҥEONHov@a^]l)'wSNYr=yJ rŕsnf/6Nd F^ %u­1{gZ >y%⠋ϣb BՋϕe]&~4esѶt2fS3Ocl׻M8fpa66O,2/oڍ "#ϟ b1݀h}(/)ʻ5PTrbHQKC)(WR,vq݈x\js@t*kgT;aD4r)(x#Q.]U0ztCڼ` vv-C3Wc׭1d`(y(g=k]looFQAOq\d&4&Syfqz4J1ζ^^=r5 (uS0EZ?gxgDmPlXY- ڧޮ \! t=zjp}_\kx3}^y¥ו{2/zPkNEVozMk$i~ٺ.#-dn7Gr 7C|a~X"EN/c UXv՗eVVoF}^d(_-k5a^fz{9YloRbFxl"1uۣ#eSZJY8CwZ!*zKD&{(eɚnE+QN{MV|~yOb3%CJ5'NKs4'pNM[[s] T]SDq 2rXd@:q11J' )iwQNb^[ W/!/mk±YeΙh3qg>ր=8Հz}jݪR}59PsTKWꨪQA|9XCk3ٙn\!A-QC]r(KF>U"q Pt۫=VZ~1< -CsJ~BMyCC]5'XubT2YHXndOJxV*= U{V4u:'qs*q }Kȵ L=}!,]ՉR~Ѻ)v_;l-@ǡbQ]Z $, ozTACm.`oT3ua!>?Pda*<8'Yy/oymPHyb\eC";뇍֊ o&" a]XA~dfv 6'9Q@5T1٭KyfODJA'8SCOpye aۛk[9Z^GLףIl4X9ͫ7mFFFFygZO:\dǮ:E h}],6Ab~ KXjeF-|2ט˿~} yOZmԳC>8mhX|q^ Ν%Q?k*DSk❱e rɬ&pLhwr80!H6lbATbOwhwP|&> g ڗ@x7*HAXeZDET@G* DIyWW\vO>aƋX.qVDƑVL ?i&摰<0֪=@(~͊{ڔҊ;BM `eeLhpTURsz:TɘQb-f8~"I"*H5X6XuK-yWͯEޒq'AOtVRrlv}O N8^F75djHL1bOUco;vXwusjʔ?}$NU)ϫ$];6HX]&5\:< LZۛ햍wX2n$/]Z VŁjhBdGO<,DvIpQ3yWz5 AŚECx EYpz %=8]Ċsl{:_PT(jP޸8ZpG6 g/(UGzh ܌yt M.xInMJF9nqβ%ÞDri& <&ƕ^Po6'N2Fp"#MZƕ9QF0:͍-V!'K"DF=&@A;iJ `R 4JH$$=:E2ށq1GD3mtQt?%PX˝b )&L9!sR ֆ5$s ȇ! \?>y4:~GUrI]8J7wIX].$p[秵GG?]gr]Z=ܚ'&ϯOOfIo.?:Oi5d2979-jRӺ|\X4soʂmwd~24/mݢXw[?ۻӗC>}O_>O_𳳫 5% A0PT]"`3Ɛ@DL#a^[0VjHΝ3ݜ.ߑ}nePdEߝ>ɍgvvPfΓO/g7NBo6!~|)]O󞏷:Hʸf-DKA $ eЏcNآ>`lrml0R9Y$źlM !A'KMTEf6I@y휓M^0J֑so ,H~i_ZWqBǨH>roop@ku7kyYg`1~|& 3ݼ: e0`-a8L Vb\@!d&ˈXc{&"xD0hXH(# #@lOXd$PԜԢ:#"XDT_J*B]HRXO.J({{^xݜu)y:II)jE< Z0c :vJm2<όk͕j6XST@t7  P@Pru A-6<\:寋 C0Ω7ʎ=0'g7B|E@`߿7gwP/ᡢU}<9 aKr?퀠!A(tz}DGE'v):FF$Y0'Qh1{lxT.A@P@@њOb 8Eb4pٶq?@*L()< /F jj86>>BǷA/ng> 1My{xoc|e4+Iz~Ep"s=0v_^Cŝ݋Q^eh'/˲ 0ọD1"֍m /~vesݚC4~E`w_h|K 7  '\_x~rYOd񔿞_-A\*ԌMJ+VGMN?EȡifCXV5x KQ.c/ɗv:0ju(o gy/=EB^]\^[5ǾsCHL нj078R.hBz%9Hk=J~  `d[ e9T-︃5ˆ\>ޖB^E?赓16l\WSvi!]BzU,)qbYj2N,'TXxABA(2uۿ-g1[ޱ.Ꞓ OꞒԐ.Z=4iac! cw؂b|7^ pxysfѣwc)J: [w[%;&qn|.a dGb"Q^VftCaj28).R-a#n},M]=(>f,]\Eۂ'>T1ͦ;<.>_|ǣyӟ#E'\'0,ja(sONOO!K.})S[ٳCb"<-3kO$sVi쒔e,FʦIL- Qe\.FONilA.D-||D[Ȇ9L-@ 5;* A  !Eb5h۳ c "p]8M -Mi'p"oX~=?yy瓫O3Jˁ'9O)0a 1_ĕqUD07^^W̗_<ݛ/>fbdIF;!7L4*&4DyΘ4gT4 9cvhFEס&z ;&  mp [#ڣժQW/FDahR@3LF,MT%8w+"N$fQNsI_y_&C`m} W7'gtP]zbYqǾ@ 'Nh&)6@RI)vLUn8%'UWZ{nx>le !T! $„4Iϭ&U#2& / ؇? ?znj]dj YbȝUNpcHwm8rAJum1 LRw9n)V7]z~gpH9*#M]ќxHo?,~-iݍ=`:?9Gh7ڇvt'"^,$dEg/W54E!ұbA늿QI(l8Q:6[,EIyOLA L%= х,Bz2f)4P0eZk ھNl@l^GȚcvr_GPms27_SfE% 5Nmn8KwC48r,jSzL]-&PsfL6|X]~b@Gb_ç>1Ғ Ŵu /Ad/ǵ@syay^]^V5SFd\pŃЗp 0T}wxR#h'DIRC~2sF{[Ȟ] RR,m͍ BE m**t`I QEB W5gmLU|bGӖm0;XTF rFeߠž"oSgA4%,rwf,RF׌N2y% $js"As5vjJ?/7ZCR[2J+qA*\hkKQ^#QJ,QXʤyBaVoQ%aSer{HTN(:=YMEiUgi5҈4*htPrecPPƘi+P\W. 3uij+y_9 2t$})RJEگP ˕Qr2Nj}-pWdZq 1, pc-S:H(z1gon-.HxewZ|IVn! tW]"rNרs)ۅѡgnN :ZRpk2r4dh/;D!ɷdF"18&lMlK rLH1 <ΧeƲǤg7T;fhe{!# 7{XD9&Zv$1;^~sYb"+rL܀:cd M$\d &1e=~ =fP^1DZ:-M弋A/śҟ*?6 U[dcl*~6+&I `EQq!=֡c1^)^qF@v1s"]S5n'0][oG+_fw=y X'/ZJE9.O5)JCj(5rH0|}꫾TsuiS`CT؛f*'~8=nle|ӂ9 P+QkWUL(a泉 hx}Qh9q?jHBqwBcxvEv,Pa.X_Ai53RΣ`-;Y"9:^Z. @9b[C"O(A?E$i* q9E=PQn5?6{(%]KKG#%m$Sakή$l' |ϥygђ,,P>.)an6! Z oC_;MAWWTN_W]ՕN!j]DaԙQNR8ΕH3"xHkB\;jOgs%+4֫ju },bV|q};.nҋwd*Hk Q ']ĔNfWB0%9KHͺuG(MײWTi/qtC2]# W:9ꝣMRO8 ~*{/Zh&zߜ~;˻/%Mqi/d#) }i00^:qn}Tu)4cGJ 37CE9gK>X_:fo}J+׽$Еh'm:F9/1Ft=KLEѭ>l(FjSCfbZ'd1nZ]Fglrӣl9a$uG;}nxJ(fUTTsT*4ݝvGo6`(\EL _O)؄ZJ5KotTw9ffs%nB&|ZCiLȣKWEghӜ<(K'y9_>oW<3By:뿫g&b|ޢ E]dJ O1$i)Aym S"$(pJYte9N`mki5\EJ:؎-8iI L2l,ؚ գ` ]R+tEQ'}(^a$U'A$ftVYzsn%.#'C͜oi?kUAݞ_Uhl'[#EJ$.,9Z9nM|pqTi{nPU,8ѴO0);A/)gfae!Lr<6i B@y#+__~K^?]^_cN4p Rr_q6)e9swio K` lŋ@{|q7b]y`Ue]B>;h@jnCJ#U*!@nc\,/E7hbqg1=WWHoBvF9D2¡6Ąt?~jyj} ~4.,Qd ڣ%2HcZ*oq51<*lJx4x! z4ȟzG# 6ibzt;$'n)"|;#PQmףoep)*d(ɘ2Ef[17t^Ҷ~y5bHВ-$׾/h+FĊiO)o<ըr% AnO/޴#eN$(ݥ>Du$f|2 s'N.@yۯx濯sӗ?0ݤ06zGS%_T=SS \\&VGƸhkhfFVR-,trѲ=Fm.JK`UJk~J H A?d'Pkjk ,>BtmuwCT|!cq,P2ĦϤ׌o<\,Z Úo6Pb;{th ggvz`9(Y 7ztᙄ6V,KZzM฾yA/eiTeƗz~Ҩ\#f( u* 3OOg$A ~67]n.&/CV%8m0HɊBPK@3*h'#k~Yo'RA|<$:ɲ"0 G͍ Y:ĥQc2Lɲ6(YV_<9@e| 8v_>obyBNBL,Q.["V+mF)SVXBGmDmHmzs( pD$$4O紤΃%FX操&4S^8oTh76 \(β9`c(gcrG))ez0DV0g#%,}0ֹ( u(6zEno(vٔ/$q>~Ma4L`4}(XaB|qK 6ơs^;" Љ "l,^'9'p{G~(82EtX,ȡ "RJs)b+]7-.Mo+r;Yd #IKj&Y&^d?ww*)uݼF1M9BN4И\7Y# A|yk mWca~vRʃ6 ~Κ='ADd_·M=GA]ԓW7{;pUOE':6\ ҸX(5ENtE+l)b\sݛs=WgzTEyCM--쁟ll=nf{{4i(gCY)D ($Bd.*%H$* 9F覤6a03ToB_V ̴Dnb#%MAp&]P$[e˄GR' h_%ekZ<AeuQ 1Cz$h`=9&x*2k3Dcd9XQN9"x71-)C^#y-͜y+4IXYR6,Qp4Zoc"Zx#to{/ G.*L.%GK$LR8͟D$Fc'za*p)ຉ7)raHB~ݕ c0 ˼+MDA[@ "W ṂBV4GΡ:mƒDZ>%CMYН$i ymc}P^QtH[8zwU`DkZM))G'd %ؖ[x, ?M6K}¬)`Lw^>ǪJC&}_n]zEb^=82IJ$-^WqTS! W^T[[ΘZ)**jF uM;~d1?kO]:1V:`ԞQ8=N$$R譭c l-X\zk:aA@L9Jo͝%{Pu abUMl/ 1cEzڼ| 8sSq&HЏ,F}(D QJ>8VT7%ґٍ:uٙ|k^]ԋG!֨4c~^EUfwbVk/.+QIe0;AMtg'"Ҋ֐^i-8kὤB.zյy [>9-|ѴO7OV-{kz]Ob?u`Xim??l QM'J Z*ˊ[dE ~Flw I[f|&G(iaox&cѳuG;}nbig:vm6030&=V^ZYc>\^D{9l<},BLr\?` eޚ0ƑdhFֺl>.b׃"vt yXZMPb )&\*&}@(Ba]1Ev"`mH5B$ ?NJyXΘj0Yo09]jGdp为( %[ZYdy$ZEqL\I6#;IuYdiRȦ2TNDI8 yBOA >| ̖!{ISR?Q9YMQ4Fb+ \JʚѨR\aR,9Yx-#.,QE;(ƶCuZn0uf(9IA\,qQ&EZo*^JH{XZaC'r]-$I̞,_6%=, [ j?ioLrj8 e(>ƫ'}ĝ;*N[vzSZBjH}Y=n7wG.VCCWGag53%%ޗdh ͏.C74gN~?:K!4^]ՆVONv1Tm }'5k֞uF^(*;)<=VόXc0-PkLՉBPp}N]P9ڼH9к *b[=ZFTH,j %xt"f)StI^ֹ^B))ϐ0gWQ~f )'Yx.GjfYDcФM ^F$%$WQFi4 6Z ػ~8g+FuBAP^mAA8RyϤD(z9(ez W"Þz 6zB+vtT4'1C^Z((JJ; R"HXvIފh r:srr,v[:6)D'lOR}R"iAr3$.-X[Xf>A3G#6c(&J`)HY̅ B Y95EzᄊK̀F#E-C6^# Zj9 U%!қnBC .: le D6ХRы;Ftk'x1*y \z]B 22k@Me'+ttVTYz$1"T&=tLkvpJ,s(dm^$.Ґ:*Wr1e`R֓~Fu({ఙW8DrL߶&M+.ў@&MRLIӒm7)ᜭ:S&qKj&'|/޷)0# FjYIs),7f*iv,V*ܸ{ύӜ(Px40L>sr9x 5R%c Iy68穖&_b`@󖷇j&(jM^nYUƜqqE™G6!d7 wԈ!l()y[QR 퉳d}߃jيx+te5_lveW8?#ٚu1\脖VhK)Rk#U;ۚ ٰt&iOAKMF h_>$7{7RLGZБX-Bk%sϾQWؘA~-ñW ] TJW @'$l9XcYZ!/-m? 5Q薣sxJUݐyDn*&;<$W+ E$ Q[\8JzA P*Ƌu%0Rۀ8i*H Is5cXQ%'sIb2 SI7}aJΰه:ͪ F.ȍW3f㖗jȺƖ5UǎR»1k)kzE/Qf-MPP(\H-((x^mhZl:0O3bzl-X+-)")![xfzO^qtAkbXxh}-6R2f! 5VF󗜗uR|zw pG%[3SdwV{#Def:;SvŌU*Ι6&OϴKi"|torڹ*o7unu1+P{sBy3C!gcrm!4!-,{I֥&np)*8h\ Ĝʞ-93H5Fe bQCDr̓u{HNֈCRl[.q7l~Z}D6Z١#6>/>ث1[SnNj>f>Qw0fo5)d8-}0̀H\afc^ \ìٶū^7M($epth-ۆ8Aj4xP\1;?qqS0^Nm4O-1#~#H0gf< D7",Dl,ΨA4Dt^$J@ty=;=;G rKe8dF=k8cv- \r$ny&nc%)D.*,,(N&yIGu\ C Y+Hs>A>=&NgaYkqm\rPfApn U(2jx}%tC-<jk]v^S{D6nDZ4k%xnsOs$/?f9ڀ쟜в]U$؋1 R }֔}w.bk6Z&Lmw%N2-2dLZP39ϼifXIōK>C;{ҁV-GMGQcXjM\V ڒ6رnw]յj7i#VKwaɑjMCz_1Fr-]d]J FL MݗU*)ƐC"ݮL >V&HH{{ ƀn85Љr2"ҍ̯UfM-td >R',1p(yq3T1SnY|x.,K%'p^]d/ޜzlN$Q2E2: D%)D'^tT>h0V)KF8Q'$~zHIp'CmcƘذ*EQ&#j6F,'F$A0{ͮI?XSpyPLRI$C/$1vp(*?j boX+; !L8ڰP˔B2J[;v6'7YږYT޾a>Ϫl8,_%OUVܻe1w3=j؏偼͖K X4e+/Ό8p7:t7^?,#t  O.zQI2rL 06eɱA\P~1~EYC~u}ȋ*/|beb@)8x<#޻`E9[leqL }e;PVl}1X40z E#0Ȉ>B. #'#h{yTA$ʟ>mځF$]fk-L"b+v} W*J,S>itBڧ?Zs;GA3EXnuCmKI~_}wӷ h9 A}-#;vZy^ \J]F`NKltr] [D1b X/! ͞)]]ȹr8g?EۦT-l}Rǿ~ g#x0hIBǏ>uk5q׻MSN-3 -1C7]/d?ѡ\w|cʐ3ۀR$6< ҝۺxKX-V, &wq"Gr 8o&/O=-7+\=^]/~Z|Ljӻ.~-Ob xjl=ǝl==1:[O~Qu|bZ~}:Zo~twˏ~U78wd5v1^Zۏym[h=2v){K$78*CRbhhi7WOچcGiR+lsӴ-Ʀ|iQTj7wOϼiO-%JShs0_lPpLki #`2Ӡ(f+2rRW׿岏~j`r|0/ú pKw>b@?{ƍ /9qJ{l'UǮu} `lVt INRARċ!fCImЍFFwJOyc \o?k|dE{x\? @ W>ӁLF./n_5tEsTzx8SʲHϾfq[WtT͡Gt;h :}5-fr{fA(~h*#E!#9e^h.6}bRy!t25m'yJS L +zbx -1]Z3RN&VVKHvw[PJE__SafEI+RhYVInņ6n$nJhwkLN/t:ψD][FFYUVà6V\RꈽfklƝCmcڭyhÇAb%xxzFVt_w/xo X ;Y~;kXvvĩDiǥ&eƣ0f1,,n^ґa]C(2ȗJ{P!XKRdxkuwDlJBȂ"Are%V@4* j *0ȅODꑚj Li# lXaE(7;-}qՆ6 a eC5a%fqw=۠ ȶ 뒤Qf^;LiH"Z9;k!9APo,=aH\acQ FL([1GiL0eTj?^LP-k'ؔ6JoT|3k׷wwJ%q^+t*8xԗ (]U)vB(YY j He};[[]*2m# !$Hw fvί>05aJ<ӛEA(1r0=|OPC#Ѭ1iZ7&{ar1H:mԱn;P8̵mS'Z&F39c lnūM 떋Amu@TwuNnMg>DsT|@bakTYsLI5GsKZXm9 $ݶ9/ h!|f)67x"vK@d5W!Vdt-,8ǕڪXryeƈ6'4~y&G=4 _)b)*U0] I -uXJdN/QT<+] |L:s6$ r\ᕌj,$axfmα*BbRb^+O5ƚ0YBu.ݨG:lIN@MC)PvMdy6jq;R_ͧjض2}mNfw=M̒4kC]\]:{q5{SώW#: jUH `ː@b5H}͢gog(ajtCȮ\C&aNJq kuy,\πmѣ:.CeGվ;q_!Lq1Y[Ҳq{#'-:n|o0t?C”Fuq8LG1%M~+-Gd#L"!([5shY|նMcxyL2oŵsNj{{J_M'qƵZXe|[ʐJ*J:7rL3E/3p 4ZTMqe*kXB*J"8- QIhnhJD,C'j)lnhPx\k)SO 3gu !x0>א*DtYz4XhG;< Qe ێ;q '\fĒ늗Rd%Wc$ <9n%4;J7˽9^ɬHћn0rH'|?29>f$8 ~; f^sX~/p€ƫ 4.ݢ;O׾z6 ~R/c6>ϣjZ,=ukQ*$3șP 䨤>^<қ;>;&4(  0J p9Дa!0ou7kt?Kݕ8녹4xn;͆I.Vf>9&wD"48 L7RϷY,Ih~@|=F_gy\i)XHؔKDe PHYqq{9`N !4D0TиUgX|'{uM5nWϳA9'+]HQJ (B̪ q'[cF )s%0f4#gQ5-GR x1άq:LW2\B#x"eVR(֜H@Gt1Ͳ.+(ͼH捒Ɂu2,%p KeI#l7^[)hnU\|OU-anvhE2X_*XD,*1ՠ K'C0)LC"̕#:k$ah%*iUZ0,F@BUXt8mc4F=cV3@O‚ Rǭ1r r,9v2~bj~ ԮmwjEel=D^G8ڏ( ka8z Pb"-Ox9m u|aֿ9ۖ,/HtO=фg'?y3~}ryf@ހ_6owsVx\5G9N\š ޝ|]?E?ʟ |[Yw&.!O9#UlhwD= =4%'+H|y%LMfxl|"V>DQDCOjbм9?]~Z|oD3~.k s3H} "[y>! W |}sy  t ZjyEwKAbxӂiaIpzZ07.(T}~j2X]4S}p7gQA]eTVᏩ s%A45k ȴv gQ`}مk|: ݇d,wOZE< UC@c/#'QPf5*:0Q{h ]QXy_] l8[pw!3R}Gy€Tf_:N 3e['S,%X`|mxTbsߧur6j~\ԔLc*䵔ڸp(MJ6͚Zmi͚$ja* ;0~7ihudy26۝kO)!SW*갴i19buK2m؎t@%:DlmMҹ3[Go14Ctt vE Ͷf4ĸ#`:p>Qg##'+;AksVk5L)k%B^GӅ 3&{a5cٿl׬l\|#U~ٴ{kk'Sݳ3/Jڱ=t(٦>(IQSآH \Qn&k;۔kiaJs+5\wBcuLh=&=f*խ-s5bP } P@B JL!hapcVLzB=3<%׾W8c1!LIhhҔZPp eA 3HzN*ycI_ !´3AM_Q1GBZHSxs$ PҴbVOWi+Z?r{̷e[Xb)az&('1I9BQA[? ͊i 6[ϭ]O\\0^HgeqkleRʔPR2#(BҝyQR.8[v/U/ ʀ!,N?i (g'hu4ȩ<׃Yـpv`ŧ>u-ݵǽp{p `i4ozǥeQIVkS+IpH{G qZ+Z?_olyAMb`G#8 q4CGVP A?@8DusS`dO`4a˞`c4a˞æ<4>xu(+_X˘oe..%Ľa~U`L+>tg`uojw3 < ߟ/|mjgq0;jhEFRC\0FLW!T]s(!}svCH'k;6zbU*ƪdUM%Lc#T%([f#(j6QJu8b*.;5CL X]oynFW՘6+[MxD٘ q hb@'yf9iO-JC%OK᠂ay1+ 2_!Cu^rЗ ZWp>@u0o7L璈00TJK$F A8+hAFi2:NMx >vjK@GOi7+*c2Z-lJ}XՏv``A]mtЁpv{_$sel6VnXT<$d1hHo 8=:3V?jvm]nsrdG]_xM8=on[w >r:XQq=^F,ϜjJ  J)-梩bfJW *NI줊 HVg4nPő+RTN:eЮ5bnwP9σ5ҿED3Nx@U'sQlI8 &^Q$E8 %1Ga3$WCNB"Ihk8 .L2s!(mE;GGz)P2²?xI^O2/؟893PKcqK2M⯞b՛m?eHdK:¤ ¯6dd4]^ (Wkyޝ`Hv=ks~J)y`CJRTJnջ~^_,:r?pd.ŧ˳Z_}"4rp.V|( O%Ij <'f`EUrA9MAteQP$ %̅ %'b]h0HHa:t"JRDɒ`-D R]܂Vc*R{Qfta)0̰"G"Cd#8IA[P0L{$~ԣ:9,}ZbC&I)J 7ŭ)tŗŴcxZ=N̆<vXꒈiL,Ti馒U)KAV|~PRa-\Qa*B` f%5xCʛ/b, V}URQ꬐B[t~*5W3rE"V2m_\èAd!Kyw;94;BStf+xpRTŧdT[_cP. IH:C  ^1bEX޲,7"E0uh)xVפּdJɑ+ 2=R6OدYzJ??A 4 w؎2Q%6s#6H@ӮiD0E=g>Ǟ^[Ϟ (A$qq`Z3(S6m1DW+|%{ƘabL~1E!xȘ11&A>[9KnW8v;į o 1#dt~RLn!G#3YOJrXYy?ʓ7ܻ9 GAPj.53e gl5Pr|LPPuӗ7C Xt^$d뱴^Pe*Z A$JRK ێPh-Oby{㽅/[}h\ߓV?%i`1wvH|~X0?){1"Mf ]E<xY(k3HRPJ% ϯ|?@nxƘ!gzayQr"W[bK3E.V9"OWRCTEV!Q7<=z_Fc#BRbTfFz1QNϡmJ#]1n>rJbLq cL4KN^^=8UjwccsGd_ z(l=$i~dz\/+4_b^d&gc2˹>A`wރ<~^CP68=Rm^"<6!Wl˶4q GFA. 2ޕ׻lkFGb@Xiil0J)hq`J)HH-g#,R3NG[آDʄ,49J3l!08ŖImg28]QN4`NF:ۘ[Z!x7 W&: A#8@G _+&OEi}\) 6Љú4r =&1aQl~VDB_ JVR!'D.u<]DƸ09z \(2Xs=f3)`h)I*&Y1hgij&iI֔W~Ɓ A3,DR7rs2mk6cJ#R~YB #+ÛqILM@uI \W\K'A-o-ˠ M,xڶ\N|ݗT$UO^&ߟL'ڪ_>IKM+կ?_hQ','Y2Yާ=ԫw+?b|G??QT`B^2ƙ얋A>#ev;Qf2[MMI"[YɅ#0m$9UءB\Zd*S(Sb} Ԕ (5,ڑZf$m xQx"cfjx3R>m-ݣ!E\lzQL[&0@' ISf3aHc+-u!uUB CK!@q֗;M@| W߹5G/^T.$M3^Q#ߍc@̘os}hmF0uN@f*IYC)'fvٕ*{rL|xd+g"_XYzM% 3|/|WIc^GEA;W}#}o\6Z;n(;c $[؈I6<づ{Ȋ|[%= rH$^G W>zH3B[YOУį-GEQc3Q]ѿc6FlU^*Eĩ5.RD=Uw[[<|R6l:еp뜨u|5tסGrXB!ף6vX+'wC [+g8:aumk]+gynFWr~J՘VpDU+'B`R)r$Z9 l)U%3A~u^eЃ^x2ި9Pyb#Լi?+p ˪"jwb!긌Jofݞ߿w[PtN^S%[$fɷba>tu 1oФExh8>8~i/jT!|ABPzP~ռzBU"cl R@i)-Р,YIr*dASWNs*hT0k*T(1Q1F;sqEWЂ1N+3ƄJ8g]I1?"dgz8_!0J" I#m@,Iqxhl/od%&-".fE~_qԔy n.>lfOח,RcI g˧ͭ6?vaM9^!?w_-xO~99w ~4 cV,. oNOYyw̟f럖"Η%^9?竀OVZٻKCIt|i~ħ$d IJ#< (3Qk߬Ɣ$:xSC%Kଚ4搖E#hnc^B {IBēQa.Uۜytd0'l>=金'N]g qB鶿RyS0ɆJ=#66X @1#r%tM&9 h'bD&*?h{/RKNdxǞѦ݁gW ʖrfrqz;͙R5*/$i);rm3az }+,p>s_rDV"!+z6_)_ (n_#^`/iǎumԮ_j Pn5~Wj[M#-?mm%}:~sI0}Л*%F#'=qc55jBA#ԥʈcdھK'O4^-%"J0o<zG|q|괢GX-\ '4ۛn#IL`L2|:毷?>%{uGxel:ѨtC=M%⭂,IO9et8^6'sMj BK1~9cq|'^  (AK2,S$Y›CDEAfW)1/E @(W$U$MXudAX#Q#eȪGF!A^@֒<|<__x%]Ñb[WVn豭n^*0e=7o^Qņz$zE:l}͊w|;FjS 𣻩9bxe@.ooG{T3lYCܘuF45}t 8\tg sDZ_ble^'Y,wzxUVWo6M?̔ųmU[w[RTFCzI~n@HpQxd-(E08dD09+yZ@[ԥzh@p)ؚGƠN&R9Q&<|d&g)'qao)+ܖԡtlN5O&VGNHz?µERDkkѰ qU9U0ғa[ƴb{p> 7dc:iEKo7$VvBWW%{\oxp"ccHM(W G^|$yX4QhdHM)*$g%۬x7w6~1gfVJ6&`_ϸ'L[Y'? ܅2](xy7 mMN ئ@] .5Dn.my"gڹuЧ0j,*3[+F_G.+xIճcx_jb ir>U^Z'Dβ l7\`U jIlR A6ȪlɾhLd8MO!oY Z+Є_,SF!zyuupe }ob>=]D夛1D|%l%]ƗmXߚ`^ 4Lw6(n8-o6c%M ?*gF4ֈ'PU9DcYٖ}&wndC8736@ ME:7DoԹ4j@jȅ⍔X~v9j]ZQiX@̬B+ר y6JI"hηA]FFKD'͋ !2g4=qP/X1g~:.mٕ5gnr'xcU:Q;IR?GZNيwV-ȱV}]m%٩֖$$Fwq7b:r(͈63;J&7橨hx;HTVƵy. V&;L'3cGicgͶoGH0s~MNƪx(ߏ C!cw^B^-ߋ u 8Im߁ *v-\<_ŠA;‚2 #pR윶G<>pqcb?:1`9DNUo?~5JX[$##e+AW!gc(RFQtkpG%||Trr&\/NIQR\eI!\T's2 B?`j9z}k&IΈ!k5GVB48ɽPt#WAq <h纞"U_@[l#=5ֲrIo^D>T.}[;Bƺo{vm/A;ˆݢ?dĠ%kkHzQcjVn{ GonE@Hu&fANL8s:̐Owo^ivbSDMõcy~;tXVc4R $$K 8 UkIJGEdYkО !f0ApXGeO/%!NL&0H l(K = b9~|hwtV-?^BQ嬕Y$C#2Z~R+iѧ[W R3[Tn{;h _B⥡qԬxǽuga. w[rf0!FǽcD'ˮ//&AB8S< vsYml@g}FJPy'&҇@9vT|V-U3ނP#&H?\{.Q6J'}@wi0AvۏxJR0 \[;j}0jliqw>#ICrbfMf2^ysr$-ّ B.-x@ FP.RK$^2R4k AJI1ARxǐrA X Ϧ^Z!i^0L~ԚF+)u퀾s; (!8fn*N` VϺoo\ }_5$~냢vЙǶZԕrӽ1,sPP/+#PNג[]ɒeݼ86BPAP5estr} 0kNlEЙMBuB$t7EGIlqL$"xT Zojy{:8ryy}1XX_o#^ơ N7xo3.Tb O~oOOpַ]Zx|ۚ=ć&;,KbIYSFyHDOh i|& (եB*hA㜬ӁGW!TVyp2(I\rGa)EDz,Жg[3R#*"R-*)G`BV6VHw8}/`ٲ &Ⱦ+˘Bs5Wt mgQ:7a<*Z ڊ+݂8zfvY)'2ϭ%S:Ni.Zkec͝S$fAR&UJ>*EI! IUrR϶ahigs(g$掹fU"(Ɨ(=HBăE^sP³uG2m >z]puAEJmbEK\\Ro₎1I_=b3Aiݺx]: zG+2R" QEFbVZ@3[6O S/(cʗ_fts{s0 8~o-ÅTP?|բy/'l^ ~4 v՛j\^]/)"S,Sue?n*S1 ww}ttƼqluǖusq|Q5^*aj|DZUj@h5'j )/䲳G -um}q6nTެ^{+ޱZ1ȝZxo"7@kcLh]} qP<$I ;gA[v: aVfE  z>ҠI&'Vbd_>?%i,':a^@pMř?-q<ɳ굀rۓPԫh1~d;oNkm#G/s)|?\,&lfdlƖ}<=~Ŗ#[$ArYUXzp۱\$~r!퇪>14XE`c&y.JP+]1h q-|z> Y1]o2F=]bv0I| /r tq4-ߑ(lƪ˸`d;RCD8͉Ţ ^1cV1)F]1B,A 5T-xaP"hBH *BP#FRf'V&@qGB!aZ!7.7' ,:,0#܂& c%,K.5r Ȑ PYqEgH +hP:X uWs:]pur*-@ D>AO &%g0SJU`J Pvp ")^jŖV).`_!yq.YkO>qԆ2Ơʐzs+0l0~ {U`UˤNђ(8Zg Ì7bkw_&@70ińV٩[d(>(C|dsu\岱ay)ET"$ E:`j@ x;^rVJIj]DC^ޖ (ӗcI#tzV$ Jl[F>XnklL2)8`ٙo2Ii38̣26ڣO 9,(xk_*b@EO% {<2/|&Q_}qD,GiE뎏˻¿@9`}r43.{ & @-Yk`]<}lz{rZ/h\HN^^xJ.tH8a] <9n%9`Y*TޥZl`B įRףZ̷/2q3atWr/xukG`SLe*˹Y|,O!JwˆߢhKF[#եdL~R:>NrԪnW,`Ęah~~K?gOP鲛2R-(~هNPIgHVM&@<!^G{UVLJDUص`b^@AZ^R)CAG1oe`zb!"- ۘwBz3u۬:?<w+|R6&M{ț'77Ӻ^};Z'seqq5XOS\.GbHioyYhE09U_n8̻3g`f3ɱ{:? _5k̽fwEi:uY~g^ٟ ZӱS*ASQ8 " &XEA5UJh50a羬fP.c;vUG#phJ9E.@o&t+ĵV'8^3J] ɪܽkc'F 1\{)Ic|2SQ1KJoVJOB3 k6R$\J),J.mzʻn"*iItIQҌC r7rӽ3UpT)E\+*1r&,ar>nJn)R=457}^*FS0zFfT d9+lID [jQjH_Ujp0c}c~t3 VH9Q7G27\~ŽM|谏wFjTqc0vqΙtJh(} Ê[NX iPFpTR.g5}F'PNxʆ#D6ߌh.h%JJ+V=i[k_'ܬdXhl9ERqiԋEzn0ӦWaKm락ͧϞe_r~/(ʂ[O_8f ;Tlk0#XċBEu*į]8;"/0uP=,tzoydޏo m/=7%!UIʊsJwů8/+5+sM,a+"jPk6N:ь 5bbT/fh]~!{헄1 9?ڽʲD,"DvwXGsq1zLtf3>J<'=|(UCXWtBNtZ=;hҩG T&s=sBSҩ VcIN}2JR+p2z\^ӫް"8.KʯNtꄞ/Ġ3Lƪr\sM~&xA+Mʖ?3#%*D Po0)$;IRq8Cc$sL&b< /773fU{Nq%jE1EvH$l#,Zp cC f ,jd2$B;A<EhZ =ST>7+i"!>"Қy  >0`!6s9 P&h!d̒BNii 9{F=&5N8"Lmhemo4ik2W`:H:!F؟?F Z'dḃzjNtmZtڇ[욳N})Vt }6iVr)?cy|yqGb9Sŏ8*yVPoxuI=t4S ]}:?_IJ _VXҒ|"#SA{ڍxF햋ADѰW;Zw)HnmH.Y2U)Heq2 (!ꔸHg'B |EN1a^c t_#m]w~:@^i[6?~ K4 [~UԽޥNJT?|53seYMg =lhUKԊGb?MaV^y!XLg~<5̨x-0zYl4->m2(Ɩigq#5C[+oRCII԰E'4t$~Y1=U~PjaG9AY %{>gUҔ;{&ga癧k?V/zi"^+R\-.+ƻV3nȸOޖs;=q=W6RSf̙>p.FT{w8#^7ZB0%1Sw <% L/xH !!_==F%v Etrhx$Mi[~Dֆ|"zLI =-R|"6_n j[f0O^%Q3o#%Qp]}QƸs,k{H3%ӃIl800;9C$]4x}jaA!Z>U̜Ҏ`ۏH5@VTUG odMC*8:PD$ZO iGgꡬjGnjpEJ\NH^IEZK$P̯HKXWʼn^v']? 23C ;oa,DW='Cߓ.[ yrhTHԑ}M8NfV՟X'.V?q/`lAI2 K%-WI.R,Enq| gbQb)MxJs&Ȏ~uP~]"v(vQ@gog.;]]ݼi=̑Z=M(>dyG$jOP Sd#jj/.SbuB9ymb΄њKuƅ2efauBVm/Y=lPd/u11BzωPQd;Z&k8e{- !Ā L3#cD TPKQ%&p(v.W!dv$2]wIf92±~$Ž}cB6Bʛ\w_[^,ZTĩS!B& e)V[ܭSMҙuk[my^& v3sއjzj['(fS}6o66먴݊I&Yx 1 &({"\V2}sY?],},}&7589$tȶa!bX,LJ!$כջ?_/x _xrs "#=rU2l6 >Bwfr}yyO#pm B(͛99 l}cKK3L8qE_r"ܺ{;;/;)5K(߁+KMӋr w"m;fBxᚙ|X)&&&/9-^bw"To7D&`:ս)ۯQHxף%O]Rp ! ~qK%R+HZn_?PF!5ɼ:3+ | k2h/-`dY׍ERNJA<[] kb_3vWrWje)s1:viF_ և;D#BlװU_i> %JG"B AX׍_=ʅ$+| h&ǁ8t|NP,Hs|TLo?_.)9֕uY=E{f1[-có*z S %1RF@7NBc20Z17 nRL 5bo RͫF|Ё06U_+Y~u /Yc|mlVx- bKjʯ/Lb1'KT<6;ڟ's(Q-dPB NdH,QX+7 |&NJ)|-˅2bd!(^7qCNKnLU`g+X"&YTF-IY"3L1JTW$Ǎg[i$H&8Zd,eB:!X<ùL1eD(̄KJ.nNlžkp@ |uf$8Xa?SX ׻:祔 %Շ JUϯ;4 ,>X؉ga'N| U隧9$Mr%XH3A(NΓr4SPW(u;Ae:bL+F&I(bQ i`HZ +q"]YI 30877u=%M-)Qb$@fkϤ@ŹLxBcډ9E0ڕl'RΏ֩ܮ׸s.q*d?uCX:G^!Sİ{p} ҔUe?pp tq7DL5n(oxLOuie aQY<7Q18\̃?hC2hWs@R:,\a]25]W?bgQ=L^,zFׇC Qh[ou6S5pUODWQ#]udcD7Ajr=$K,˝`g&;ȜbuO12M|},98FXIF#fZuyRP׊>݋f; z~Xsay+Ϛ:k.F}y3ŀsZi@ VA ֗ zxhH)]&up&2ps&ܸ5~2z$TvW몓XO-  [ d2vɻv2kK u\w֯Xp]Iw-k՚|Nm/I9"@*&XID(ULX :uzY?$<* !VQ-w83jL퉚+I'Ad -a(v.,҅h+ 4MhL45odQ%id*iJrb)rf$MĢR$Rx$[{ʵEP3 }%>(w<1 EeuK>ZbֽD-rGXH B /t4UΈs2e91I$E` ޅDNM"9:VFtS ҪZcxD ZunwOWdUYoqաRqP=]5gυ C;eIvySjjO-kޖvwf]޹;|}qBRǕ`'^)цvq3ʂlݷobL ,ެZFoP\N?U??e5jEpD ɑ'-aeSS"7xsLvA(17ݫGfxQ!/cq`<=/N!SA7dJ 1pD iJ ^~}C59BSXG]al`*^íj`CaJʼ+SV#Р&rSșu03%'Z9̚nS>޺%qws >Pd%yv {,@O9 cA~7Q>?])2:j ԵZ4fעm!4'dnC8 d;FSJkB;nW.Q2]]ȯ (9FC0kn@B^FnOv;ob%D.dlN6dX ib@ώvjY}.狉n12֡l! ~aGH(xY?_Hmmy9dz Grq y>_?w1qw13opr5vu6Ig-Bf:1ӷӟ'.?cD.NJk0D/v1p^gV2e&آ2xJ^ %|cp+GyDwpГ8%⇏![߉YN!lhBr)ڲmLKx5f}"PgqNLH`M0c Cy% mf=> v9}O=ӽ_QJ1BC!Vb0kD-o#NWr Uu@𥸗XO7 zNF'+s]2@/Ѩlɛ8ɛ8ɛ8ɛW,&ZȌ!s1XPm-*a8:URI$X9 [&Lѯn> /_+ ZmUet9*/wRj~S&nB4`\p|IuTϷ)uf2lPwſ(sY]~sv%Y\̦E9xP۸r쳈K*EÓ+C?yw)$ҶƇنF y XP"lF.* uy*l*Fg37KNmQ 7N:Rܐ!1%>CrPzY a9sm3IQ+x" $&6 7T/qB\ 4%OÁs%h/x-kmHno~9X6r9諭L9${~j88TBҖ [9j.hmŤlY,.ֹ>˫k3pDKCgR4,IIQ!GB`q QHL  I)Du`4(3W'*CgZn,]uFJ=yP)"".&b>g(%LZQ#p,uP"h/ &(7e@Ǡe^x@;]JOoʂ(3rRVzʔEznV~~* ~׫>g=h졙rK/j,!^j+M}A0 B!׸_\0_,,x}su |gw3Wvn/3k, Z|z 53͗/*{;w  [pjM*l$eT^.6k.Q l-;w8P"˴ma\k]mJ1 P8TV&:mgأOEu7*TJR0TKMtDK(F39aQ3jJ!hh+p`nY2B檲涬?;Ag UEb:\kZN6Q$e6"5QVMQ.(mَɌfbKl2tS_Z]gw:ru9xz$CΣfˑȐm9C.j icĀH)̃x b% CZpӅ_)WMpsh^>uDD2WC+fL#ow_M'Ͷh"c:y}urf*Fe_{uWvਤ%w vqA>.蛅RipSwkGcQئ.֛S$ڝgh{鷱:.K?wG>Zqpa 1LŖ(LTQ!Oj1eÄ^שP1`Ȕ{)i|FSvGv km:63 ؅DUvs22r*(J Q)lmІrC;qZ陰 0B*&{j0՞ ;݋#E&ĔI?!=t 8ۂL͸;++Ͼ'ʀ(sc 4ECbJ) @pV@#.!mQ 7"*#F9g z)/@K40TyHNB#3Qj-g  @XtAO(BaM*iTyww8nT]2J-k=J.[p5Qۊ 2p_B2ԺhEjе,3IS@]Ia=SM9'{e8t)+t>ё(!<"s -~zeX>^QvsNUB^5,sujR|wƿBDfnU .ޕϫJҕNiQ!|wSbwiYl]2ݯ- LC&dQMnL}+?+y'wr(x8)6 O\~F_#x6߁H~OJ|-kEւt.UCF"\K)Xcg` %ix٧ZQ3.Y,CJ(@DqC%@ّL)w=Hryؓ- d[s[~6\PN1TiN@Lٴ&0J ҁDCjofKz6FVe?/s"vvn}m.9Gm$k Ť(0"%Q)8KP8ur(^+Ksx^;'Veeᱬ8e.Yx<6 ,Vw*nY0%ꄐ`HT6: _Z׫Gn2zuݚԌT(Gg퍻Gn!_f^=r|= adq VK588H5v,M51g5vܒD {5QɶսOOED#Ԓ?9(( Dr}UpP(ta|m01JRYX=>7͢ 1k}r[T5,DzOp]jޞvX{rCSԃR弡3as6TS4g$k++1 >F(*5OWgs-/gG[ELRZgY\'|1펙p~K)C,woSwќJSh5-~bE=Ί) x] 99Z#~V($'s i3T'r LHc#q-j#Gd&怔i'v&칓I>yaj1T8yet"\{-ti7XBk'}k':y8JKhS~Ek<3'W)Je]{$R,W׹5Q<#jTs(..}pȱw Vߔ77T(nZ$8JHGx#@$)Ih<"6r4%S":B뾐sVPQ((p;2& `Fi$Q 49:%EN:=h8R-\s䉷z!dyxr(MXmՌ-Mޔ3rm[Ƹr8}mӮJź #bV_wd/*]Fzo o?<`ɂТMkӏ/./ky}Suw4LdÓϿ愑)-ea.߭K7jIM=RT\5f8V9],U5.÷8^eLtA p Vx}`SrjSسKyrTE jZIfahS* $Q2iq10SR$4NE ~` Rʨ8T+:3]i^>ÍsԂhu`Nci̬uKKdJSζm:w|[-*kr3ԢjjV tN4Tf`@ 3:qҍZGڜiwKV9gc A=.YG$ڻG"U, dġU,@rJz-(EQ#bBDzp7q6\Q:ڼ2^QD>͹J̹$ /V,ڂ(%> z['4jJ. #dgpOp=ңQs*'/"+Iդgn 3|3f!9|p†ӯ:m(0Ўp!7hCA(9J./,?Š@UBH 9)DR{܆셲ߒsG[Z6mT^bؾτ[FS-Ţx_zz9gG%xTښ5 '_h&xe)l>GBjiN%i#XGOiup#6O`pqIJ/N8hYǢw{sq#k;A݉ HmI@|!Ca"ieqku }Sn }kQ5Ⱥ8 lOzkRfwOm3T'z02P &w- 2u[<~+Mtf#gJ_qeSOq_]YFpykS}fnSLCk7ݧa;û6 ӻW[~̠nBw{{JaLq3+%_n7Uc`|]'|wN5x+Nbn3'qg>j>c;GieZEyl$u%~2X4;5R w|JbEjAj+sk^Yz.fKڪe6lo; n7ʎRWwl_"VӆT;$~WKD㙬\;v܈_^f%Mq/hUGC 7~npDAVcAJ1qF*j.Kbpܢf#D/tg-.~ ju?&oΊ+3W1[sL^%hj~ǜvL^CS@^ٵq'mKNrC| ݾTU<8.wTs҂U2 !#Q1'J'V.g)L$"|#BijOلB,Z6&ZT/[ u%j|-Cy_TՋL؈,O[_ymy8zAEM߀ Ko.1n9'hVCs !b}-6&u㝭BJR `L+sPrw=k[ES{kNdOj"Ő]ׂv/^i:sVXwIyw=x:,6$ވITz{"_:3H!VIbǒ1Y/- Z9_0Q(a0 2+P+)u-;[c0Й֚|j@ɮu.%:w8#q % -y>^8G6$*TK}G?8B-@ۄ=R "Z r~T|s hgM@7y N ǣm)ܬI;pR ( 'ό6t6ty#0['E/CF3Ѕeʠ bExat=( }]q!$\G69g|[Sǚ{<vVˑ]% &C0M9h<}_ۆt~k_^a~u9yQ*?QMQhDqao`-!JXL1d|7WO & ^Izޗ+ŸUv:A[, m4p@8q*cη>_i]{z@_h7SI#@ ( J\ٿj*jB9=ْ(x'KUo:IXvBcqW{1,{יI̶7 J؟ /pLQM)qeބ]wrgXEiտ{ti>/?lH2Q422r?o~ӓP|uJp@~`(q ~PJx͸U6EI6B E` 5r뉵 v}rrm_WӪ}N/?ɠk˳O_~.}7? ߽ۯg;O9io{B&aoߥ҆j wCzqs9>#Ty| 2~۵'WoŃ+PT3 j굂-E\1AlA\ə=j9j^^W-_h?;XÕnv8;_6m<Խ^|物%pRT }ahLLE;imy`-L!GFzDs͢ǫ7%``(miX z@X__P?sRťa5{Y,!U.P6a[w'I͉ǃ k r }f\7`FmYQ\M[[Hs`V*lJ|\}.܃Y)i4}PkU$2E5XQd*s7=qSR}ȇGNRx"G&>9GĢ\[>&}T:7W+ vD:|~m`d~*07Kh (+k,>Po(3{˟?so M7^o4q\MuOs{mŰ: 5_t[uW_!Z:ӻSnP"^V x/Ǻa|>=9;PyWWIGzѾBpm}YKwvN#e+jwĽ`j|i,D u5HᡓkqmqkN?.ntbr4nI3` 5$ 5 O[',}Ϳ?\ JJa1`5u;I<97s1 %x;t4 \Z]r+yg.<ت+Ԫ SA05bg?,0%W.N& 8qߗiskCiTY\"k{u*.6lD̆;R tMBρ9~&SO`Oi0S2%|+kYiQUz6B.jčH)?ߵWvzϤfOU} }~S;wkb=,ZY&_z?pczqRW[aU#B! ?8ǭ^7a\Vrh3/d7.?&/`;wM y NL=gn]p'Ni%;lva嬳%^ ‡uBkEX5[?m]\wW3l-۪vF Ac&VEU<Ņ(9.L/ju:ؔ^Sq`2 Q}# Xfd0{Bgwnf"!j'bro*Zh*uns\~7$a<.&~6e>DJOp>'l֖KeЁ!Kסc7w7Wlgu'Wo׸Lx3#e}”3{ILdz +ѓ !=hu%r$aa 78r";*Iwb5dOE I@b\RH*jk!wȱtxK5eULz#(@ tӵsl55m@CIV"} r5hjm醱baNu;a²Z*5~}11}83ilG*ZKӱ#4 &3 m5ؚӫrb8vǛvJ@ˆ̨ˋ~ =۝5uߗF?+ew4OY^)~.OO2Ec#&G 6{3!!l8ݍ×7}o:iMx *Ju2-I YNO'+JK.-L o(8;W )Ҏ۽OM6)Ν~Q ᆱM!Veܪ,,u2&"&0ghz 2hgOfd9lJydyFes; FpV弎z׌Z +VnV[~5 -e;ֺ z `wխyۙ,oq&ٌEs5KbY2**:X5Qr"W {uEZ]!'wVk ڤ.bQʺX>_R'R. Z.\~R-6rp|bƱ[h[l@iHeȸmK/Iۍw]֧EN`˜2@:*&#ںALLD@R&S:E5H{\lK/Xl1c#NǨyBYRM` v;^:enDsot>ypm0'q1uŁJwvHa硼8('1 ?zZ4 g74.rgcRHK ŀ&艃uD^ųAS^cvT9frB3gNkR*z'K18"ht +J@XMjX~?痣$HEB~גdT,KҪvEV W_Rmw)vJ 9`"io vh.H3Ⱥۓ\FkkvIYVaqӱxfR[si@`$;y&t& ~lx_O-&5_ҕ5dS lceT^uu {wp}0[dQyA4^0A/ilLF`HL:PH/HC@ xtV,A2hqȶ>֡˷Wo_-]gZWZ N85b̆=;OWoÏuPڪbkF'vin PFu+ w'rQ%ULB`:&#%![ښ(eP>yn*{J#넷XrvJ벇4E5? F~mwQq͔8 1.1_[)}Z//K߉Ror. rh]2f-ukY,]ȋO-g|)7(۳Fn!f!#1w6}Ev݈x$:}!oT'Rf(xhA'}3 ǘ%RG(lAGl;/(B[V lYd\DxF Yc2~g P;KNIM!c r,H&UBfT\)\>.;ʽy0 3d+S\w9$2K\%Q-4/2.5%dD,'G? o$ qQ&82%NTpedpH6 `F.ԲZmXTYTEBN,rfLrpyCíRk4!w5 m:jՖ]r5՟wzͤ匫 Ɇ^e,DC H6FR%ݻ t)3N/cjGl0jr ɥR ¤"/ S)\.@5,͢ggIT)\5kF-9Pd+B6mn(}tIK@.J:A$ c"̔eE2]ޘ}G?Gєv # SH G2b$2pjA>=ٝIHsKAsg@@ŷz!xsUi3i4lUWhR1B~iRB(<SH*уru6HybqdZ(j >_ `wѾQ˄^dz4j"Q1?ݢZXldbY7V*44MsCuJr-0i!h~'΢Lm;*#[{}8׷:2_~:q$\2) q"v-\ Dq&S۟Rvϧ'd3x]J%PbNv0;hiAs-Wki$[ $fMv֘NѰx2s=d^Zc‘WC31I^IX I07BtHn(2qP x BRS$T6&%W"xV:(9o4jeyHڗmP?yB./NҸ5V|t#u烞giuȟK^OFBwF?-Ԍ3"E6m}NȬ&3$D}\89'dJ<ӊYa"V 6WTe 6vkk 95&k1:NFlJK-LsesķjtXƆ :͍Ѕ&.GʮMzv֩[\kyba8.b촴7zwSBf R>b'H~Qޟߴ˴dkFg닸.0Z7X?qKĝLFgc!/:\B $q+5"GbTJ ::ƕ18XHK{n̥s&dɓAL%ʙ\-ږmk^*iӘίZo|[Z A?\Ǜ;vt>_|ع`R@%R !A`Dp^Slѐ>eM;xvsF'}s6)]_`l u4{Z<3$J09˛7ঊ+"eYUY6gGX F&kY?!4&?Hrg)2U&8NLH1bWd͆ݴ9Ō!O} ] KPq"#Y e70 i`$mKhXv9I(r(Ph9HF*j"1HE׀6dCNĸu)GYal._] []nWsrz2vfR.|~6^l!]z]h7ܢR4 iE7U>ʡ>lصGi1kX^5~MII&4D z!tBI9~FJ4BE>zdSsrâN&bƚ-BThjN㈑GpF2Ju<|TɬgD͘ASq9$ܰS,p%3KGխ`HY`䟮,f8.)+aInVRdj=y#3 >4e! 0$`( a Q2@ew:0 YH^ 6h]]2G.}o U-GYfG PmD/`9v^-ZE"cW ^ Aox3ֿdQ,yU>w+BnV/Gl bVr=!^Jn]}M_IgqI\\8'IȚ`dћ;ws{%wnSH"^(@e}/ǵzp -8 m6oz|pI|v#yV>w&[ga.]4E%4ޓFOy&cV.dm%DV'QN [MfDKcoQj޵q#E0w&p660'i$'!Fh[nx0lͨWdXu/rCq,f8sh㎇J#TTTb8*=TB˫WTBe'+*KJDaobgJ12ahj,ʼ`ו<t7)4Ƹ⊤B<Uk .ĩ[ʃ́hCjAAbwhu"n]4_/)Ӆ!!Ԕڧ[*ɅaH6Y0⍈v~{RuGx\:цǔN5oX_e(J?;Ţ'_\;椷Ao #Ź\?g/[bBitkzlϾm9\.YP /kU /^)VwicH~ԑx1 }XҦ~x6/ڕeeH}6Jt~݁DB)xmy]{mJx{~|*O#*\x 8% c6Z8EXk4{O[/,Z/friDz>_ϢFJY)7WU龑BE؏LV_0 o ˫iDWQFUh;ɍ:)')EAlćXmw E;| W?[/"Z#L7kh7тlwK32tJш2(Pi[ˠ(xr|HS ;M5ǫ+[}k[e3XR0rՅ׉c~3|\|N.L7q!{{87W~v^<5a- ǍJBΰUM"xn$|%Oc]{8# @ޅKqpD< iuk4Nk$h\ 6f$;'XÄd=1)r|ԯD@1^uX 9aw X-}R LrmOg"HJnS塐pU@H-6e|OUYD5b3hV,[cFi]Xq&=$ˢ*$xbYpPRBB@U9Z򆃑 O0Uaf;eΚ")yǓEtR͠9OS"!A/ұ(=e$ C;^֌b0Cf:%j$"=GE?sM _&5aYtLUnz)fWqӢ 쐵KGa .^H< &3I=F\8i*>%H9V,I:y E`_ UV3t F2vmB0|T(nHS pW@RUBq%^jr';1軖fՄeݞs7j}r,R?ru3BSL)cv=`'u-Y-N| e%QK}7Ac#AePZyϓ0 ܀kߵ%vUB`XkϋZEx:̫F¥$V]͇_fBnw, eu͗<񞅣st8x߿#ᄒ,`I/W\/oޞ%fg&'B}uvr1~;_pS5'YзU=+ϯ;ڶ1&=|'8B)Ǖ&MC8>|(FqFm++@" mxS2Ýv^Ʒ;ȿ;jURnuR\5 ;ߨU(*w8,xvYy7;(%NW_[If;pD2ªT3e_"uC,ܮQiQ9х̓ SK +8{ލAsKzRarzEׯ(=&u tP hɽhaP#q p0Zi<4IbR$h" i@{e+,r(Iβդү_(˄3s"x8$ /"QeI<\ =ɯfF1bl-ɨ(-jSeDy\AAL@ڮPbt\d_+Z-PF"tR%9?Q,;z z"#O?qk Jc ސQÄeX7|pP 1DŽ;kK$)-Z Èc WBS0#`\sNJtC<{v$7Bc\g/ y)_ e wSS$ц8hѢ1 e:B qq ō $Hi+¤̈́HSbss,AD?̞_[VȽ:d?TXïiBj\{4א;ȓk.r%hiKäi◭)pcmDZ,!4pPʃ2!o Q`k3 qXSoq32}mF1^˩:b@$s4:P!H>5G)egt`tO=vI!Z~jp$JY0%t^[TEH,pqzĄ DOIE>1!2p/l86zV z”Z Jaj&et'IYzM)[a)[]g>DS2܍]VpERIPP")a)a4踓M]n06 tC!`XgCjKLJY6) Ayǥ6L G=0h#Ie]j$ Õ|FBb31 $;99sp~BfEozN}$dwoj }i _ %;2tc_D2K#x(bq!Dy.AH+$1Q.a ˑmjjdH]^}_ OX Fbu!e,{;F 9Agc͹;Ysg˰~A-v:w* E+}@ǸӼk*׼Eyǘ!"-vl8F |v:Ԯ(.1⑑Eʚ+ 9*^^VJPElnqĩM}vNXIq;E.q]RBѶn ;ʹqV9`%%'.\KhyCQ1%5p:5\GW.DN%zpiw3mbZ>^OI5--)ޓ΋H^1y1X[dْ]4:Wpqd!D|uIzج^Hb(\UB`0p(WD@i {#`!DYe"D4TE:ww2 %BE\$S7| Q< ;y<+y e-ochJ$=Rp!ͼ$FʊVJ#oyR]R[8зoo~");@CCjc"ЁˁʁKr jg2 Z/93?9t4 tOiklL+ltsD¥*~}NAw q V5`h$ xUClНO#<ٻ6+EZ^  .qQMhUSG'PUw{3F=F\(r2ggWw$/q'۽$^r(|3"vbJ3fІNZJ=O"db@LekS@ZEsesiȚI+i;*ɟCpYaۇݏ}W rR]:|U>*p*.d7eI\DAAF!Tkw+ 6 π*i2ў3C}Yœ/8 "b?> 2#"!R~O{J!+!L]K-rq<'3J7R]F~p }?qHK&p ȁB D,G\M:?ծ6dG-xWxSk=E\ꀼ\ [.0h;|hv#OJG|.^P:<[ݟu}PL\w̱Zl ͎qs[VGO/xjq*بUmp4m{jtfulܨ5Vq>RH5TG&MvѬvM(i|me|¸m罶1HOEak'  "&9*4<^$2~lШʀ1ŵp1 DI\QCDžN(QIZb9ZVzv_Ré /vW5UjJsܕ⤲ z{m_r{r3\=CvœlU<2Ь$BU`R"畦߶oNϿĝ߿]k~=x5E4&Xa@<Hiu Qg^ *mereY{8HaE5a!ۜM[*p֒ h1\FxY ً`N3`yBmGe@)mW敓Q^LܢBA25񜳖PNP5 3$|!ȫ(;\2v Nw$XY.o[= ݥnWo|k11ݔ'ZS@n,X(=jQwk~wȕ2QBcfXP],#L*EIrzY.<ƺ^22jծF{akwvۃp2ISn;z1Sp]05BL)LCjSJ:u DOFB )'qZPyqʠ\o8nQmW4z:k ,D5h2ƽ4 3 T2䘪,d8Y&A3+e"y>j)0DAHX5(c ƸKYeDEX ҂ތ]{NRi)lA@~eU/+=%~,B'hX>0wi»:a7'ǏWI1g?Cػɧx=r1VjuMJo7b 5b,s;RTBF*ZB=D#뾙~҂KO(;THs+iRz#=0LVZ҄CO,dr瓷r%MΝJ>|ƍ˗Ş..Zb`vG9N^ w!Fjo'[$v11}΍HrC@MTU{i4gyqO=T\ܜq} 9ٟPH9rmS߃Yn FZbPuZv۟W%Phmr])^!ntn[ K5*~L%2f~O7KlM5X3~* rmtgM3_l趞P yC(h `=Wr}=F6h!?â}8p%%̨ݏr`2"fl_ixq+NNJz-u 䍱Jh(814s%\6p 8(Հvf/޺Q)|qBl|s7+z2jL]+%22f{={ uxz s!p[}4U[֖fۖl*vi ꨊn[ rh6K4l1{>)3f^9).jؓOS$%> [:7G=?R+:o5t k7&b[SjXBG$A||؍6&7>:yϢ Մ xM̚p=Ķ8cr>X7+H^f.NWacM-,y7i<ۈ~$1`<LDМ/ĐAF+NOo ;d)*ok|\R3rAb^Z#(x.hI22tZr蹪-%ȽKm/X s4^^bw)1BR,8ӧsoMfooB|CZAP40?BA`A$p%'8aAׂko-FMÄ赃M(lŵ lS2gR!JP^JQ3>̣@ܨAEC;* CDC!Ff8M-MIse)REnM2z#kX.IrJJ7+TD8 o19Xhm VM*q9&^X3>8 f:,~rvH!Pޑox泟 N7w-jzkGQ}Vl{}hgo\k f/uaӁ)6c)DN뼫)t[JhkEsl0uvۏ q,!D Z=R`bbC%Ezњk几^QmQ䅝"%%ŃHQ}Q x`RT_jDb f({juV[+r1Z IiFV8jMh8%^I#.$hus@9=T4.W@4hIQ,S/))'##0b\יwAp J'ݿݸH1i^ѷxBXጬÔdIʘr6;Ivd{Ԧ 1.kgV׵58bu .3p)8NF7i`w-XtcJ(d0_}6ӳ={Y :ݻed-؏X &,41`إttmǦ^W njgNL*ǪF19&#&u5.#up(xJ<i؀ϮFAI8H9g͑:)G^Sc$zaۃV2FiI'̡:8NsU%g2kܫP֩]&w z8?`V[urm׷@݂S|$=qp=t.Tt~`8h^̢PP5z#}+N۟aF. LVp{EJ\s|V>;x˻GZ&R+5{u7?GUy-c]͜Lˣ?3U}([&Iiѹ6F"nKy,?;{oyJzÐUyŹ|l DlӉJ{\'~'H]x,s&JG]Buw $9Ld|Qa@P ľ:Ņ+~ߺE!S(ás4uV$??Zc+$tp+m=pH<65c3/=ֈV$ps<ڭ5*JHXH yoY)E@uPjz 6݆. 40Ғӥ@ޟ%Fnfj/.LŭՎIdⷸf( H'`9P n]15#SQql"2DL w ]Ё([0`$>h[ ۆs9SD[rKı2g *MJZ$A#)S3 ,YAN4P3"rE $ȠA;Juxprbb`MQ̐ c@~~(#z)B6:":QAcR)R(@V aQt5@0 AHKYc*{밂cF.樺z+T2= *Wv  o6Φ,9XCHb,FHt֒dV}-)l,  ԂJnWGQR8x:ELKW,_j&C^2ϴᗓH aXТ> H9lq0Y4ha$#jXKi9 KuATJ!(hp8`l\zzX32DRpvq`&sO"P0':E (xE|*ରq ~As7cbix#VĪZU(.zę-lQzԋH3c,pE}*a"#LUr"5Qex-0(&EJ8*n*n0$!H:X]@@;h+߶ SEwwUtiB$#xCnry-ot|(]0z807~xgXD+% sY Kq*\-Cҩy"D|dfOƢ^ABVɾHMZ0}bbZaBrP.}) Ц\/#-L,B9`呔Š 10 Lœ(aQ{n2KKEDZ["٢&"B,=B8#AП *HfQcE2fbHPޤ0tsGg|atr%Ǯ?'g'.Lź|vY^->_oߛz=_ʶnFkЫ$boYOrüHweuRa`,'/TgWY=C; ۱b;K]TR|C7US}ciذK3\$c ;D.y{؞ʠ)ffWPOmU⚍Ml,Pt^oxc~xiM&l=La^NXҌNK_L)0~=|&cS"/e'Kwk&w~ D?ynzYLNvW#Vr,R#:-͐|f)J7Tv[.uN;hB'NK]knm̐|>uS!t:w~q*ɻ3box;۽>y XxS^?"NoK\dDE .@Qa L1"o.edy"# Hr֖hFZm&G19;mgbiMdm>-rQ}'#^O2*#&;Wz9d4â)JC%{ k>ue-~ hRbJ/ҧSK-Z* h0-vo.4lv3;2ŷ^lUp~I)OSms:Wڜ l6~LwA芀g5x[ݧq6 ҎX8nfH ݺ?*w.wcFѮ9nSnenRmd/3Y[6[U*R|stc qs쁁|31I5Q,.*!?lhG_mq1 n|ݕA #aT^?n"R|<¬%9oC_!s ęd3:][oG+_NWz,`qĈ}9}P$CRv阮ːspc+ⰧKWuWW1Y"~բx@8gZra.K]qJ EY Fn/U7&ʛRÖwtكQn<yŻ <e4xYfURod]3W3xyO{*bGeF[;x4; {Q{J/Sf{J=5(\$Dlʇ^2xDէUy>l+6?:7~ge/KN̋Fɧ9drI;t~Nw꫼U]a JWd@F5jE[yֽ 4Y(-#X؞.*e{v vut긙*rT!g+?~޺gu3/DwWlmr&蔒Lݮnl_^: -l< \XG2^I#ֹ%YH~_H%3 (FIЌK2dT RjͫwΧZ\~pnֲӽpٯX7w?< w$ woQX~K̡gH{i0W!:9}f?=B,3 F٧?hn %A[ ۽?Qk 8zD{_+7 naW$pd5len}wl|?H,rV_*Й9NOE!e-e H,^FV@W3w-ХjE65"߽$Rۥ㔬0kA {eW_wOW.-Mu.79h߾CEq6܎oh ,P,(sxDeE9fG+olY%ۊD~HN=ϟB&q}γg?jUiڬ:Ќ6.^;ln8;dYNOD@`꭯b/J {^ë=+Y\YLƬw/6Nbmse5g FiaeNCn~ʱy3僠=>._٫eyslQCyXX!nñ }Ʃe ;Yr?W{E64?=~  yzKVß)S}>GKg 4'/^r  I.IoNwU#EC9k7܏')Ց5rFQyr TyDzY`D/IJ޳4W%z9vٜR8YRZ\O٤nB,:1jE HO Py}XenF4xO#uMimDڲh}R t U 2\5)(6ZZ%RwTot<ת@_ѿD8-YQ*46!ċ, mI=ٽh )3^ /44 3bCܐg8 8gats0/n.]^6"9mKNeFG,eZN×\K!l0-:v,e}6v<#+$:]nOQ)lyy1KNu)g+ m.6Ⅾv>'s.I+5{EK]c67;ÒEdn4Z~5L-2ð̐$"3>ϓml;M(jr7Z eۅ7f/'jF7l%g2sMKEx͈_vÏoqlRud$t1eS ]Ƹa%k"oOTXƼ6DxmY)NG qBsE s՞҂<݃LZX)$uLY WZ&&cD[ T+kX1RPnѳF9*oe[$hD5D? {kD|u;YCZo/`[ةL˩;/Go}x9-"m5|%AMn0b< sNy#peR_{ڰӝGJMʼn^@k@#%"i T(8Ah)~rWIz]J(j%"&"TN[)&Ah &?N2/ A@('*WN&R/Npl.~^D}6/:GϮw.ƹ**Ąb@PZǽ$hJ6x%I.>(gysBY( ̨JCOAVDP\$q2I{ۓb`7}TJ4l2BVwO$/Wla$"6SkkSu ^h޸$'`"ZbQR㏠:RH Yqkkƽ=gY!e$$Ȧ6; ANdeҟ$9jMHtQtl:%x2HL@,yzzU>րYL.)mL2.FeJEC2a1"FSs`c4)z2|gȍC"1ew;_5m;jQlgp1q+BъE͒$3UxHKihTz)C-͊x"ɈOJ3Y^)3բs̴3Mw`&T$HLςŝݏĪ .ķ)?pXk zJGyXf gC&Ks%q߄8.az> rjaT7rOgϩZclKH,꜕*79Q @NJGWPIY~^z÷;^!`Kz \Ѓ&5+]K6o}iw.@f/'Q[Ƃs)xǹv'RCLl]q:](.kM"-i?SXm %: +-&?M\kݛGj`B*uZ~^8X紪[(M=s*&!A[ *)![DQ &:ėP)w#~_ݘ-.hif7,~04\dߥ/=g,*}N֢3J7 SSQz $팠 f!Fa/i]~A]:DxFInz(k) GJMwR}I@mw_fw\I8tȩMj* [(;qz;""ڸ&=bۚu Q$HUEjy@Mߚ˯ q눀g/vX- ?HPOGW-x]$8[W7L0لC?HM,÷FhG!%~ݤ~Qfo%]}wǏw_F}?X0'wAٻ6dW=~ݳFq "uxO5)3CJ"`5j4鎭ڎDޕA j=y}zwG<7h| f:KKo?"yS^2̓//s#~ѯdBq~:HLqɠ†4D`{/~LALAjP`jB{9 18zF ^bsJ:ϘCG<%. H{h?dUdܗ[Ҝ#Sw'PwJ tGXt]+䲵GHa#Qۆ#h "["olӝ+x%*M3JUdhtSr}7HftWp#[>>FNu C9GO[}<?PՑקݜ:ćdy󹶦S( '4wx i*eP9iwwEHÇ3NG0] hs]H~0/ѧLLh>&-憶:tFng4{ͷlA)22#[DmD"e_6e5ITOtG5i@&AlS _N[Ϝ i|4wUܢ)˫ݡC(Z]3=ghu:U帧 n{m{z@hx@y@yaV5T7?jq|q]Un:9 <^ftcޥUN=NB%˧7K/IKrDUE~حJɿ]0Jn,dz-vmi,J~k VLjt.K7,?&E]HΝK_סϙVwݼ8e0>^WJ?ʺ@{TI_/o7d$ 'ӕ|B mA"Ϳ毳ޝgLB&~MeGSsʎ{0->$GݝOѼͦ{=駤+wv ^:X:ɸ'\bmt^΋!Od:AYv,{$eJPMZZ\Qۦ^PBj-[ƱCjUAA M-3, RXT? w*X< FlQA{@Bas7e^*e,pJd3p`H` NR_XF)XT{sT` "`G4)vD–Za{]ΣF&p4N*s3ACȬn_=Y޶g )T3ʾ^lE<$#"n Ýa3}8PLE1X*PОDy'ܫT mh(i LK$Ga&r#:8DZc"1168 c`Q.#uu4ֽFt%!`'': =䃊WCs /Ch綞'/CMW'?㟿f3֤r}jg?{wkY p%H\8h 7 : d֢;y2.e1ezK*\].| `,L&ô9L>2ҕ%9TT躧bޯ!r`DjxVIE_C<2gE%U{*zO>-+ V⎊xd Mg@e •+F:\-CD!*\CuyopSeؚNzqp9Ml`nOg >8O.#!~vq+'2z˄TCdl+Cpa<-6cr#υL&_3:QJ=k#< +?LS\A%)l)D= Z2Qz_lh R N[[oKmJvGޥ*j%^τ-UwlqBOknx&!=-zVHҡv0}˶̚(UFτ;$e`R$yύn ]O&״\+tĮ "Whs30>gzf}痃Z F\ ͷnKZk-ًB_u͹DɔΥfz_jNvdhASa[0A5s Zyߊh 6 zCbZ/ˌ(b^=SBʼmﺬxq5^,ٸAHD9mk! #F`TJ0 E.z*pP k0+me?U e}O~zWL 0 i@(Lox|i%Bu٥OY]VYr u:(L:nHq\ξ{-@)=(laZLnHB^Ȕ뱊MZt@햋Amcv2B#^ݲZW5!!/\DsdJ=c 3z :a2/$x;&7_,Ń=جZ4)q91A^,H\jU~h;gD(zluҬG7``K;]XؠHyrE%J@''tHO'fS'`#(kހ:xuVQ?yR_Rk;9ĠE`:HRB6e>* 40Ғx״f9ҊWJ4]JǁWSwt36eq1kjMjx}wg:Pw0Xb1cBoAח \a..3`H|.uD']iS-{u[E4C`˅:DS0T?kǥФa,L)˔-hE]^Qؕ!QShc"Pfe`uYq {U/ [-.s껛kc Ԣ8)՛Lj4Dz5wKxj1*0Bn6ZR˷-a:v3~7:\ȗN៳i}df>xz:4ovy`?,?0ij{f0'3+VDO F?9KTeS"Xی髍\ys"1K$P0pͮ.7 3~c*L|(SO炳{Tlubd% %YŽVt\h]vRqNL L2Lc>jMTFeVe`/F!tSXo>J6(KLN +p0BdE0] ܦ^&t{7Mg0]*1^+)ŭ)||0'· 3_Mk x, FG;Dw+EO\.\5wLl~ê`ŠHyEs{:_1=Hjy|Z?8Fer|߆<6l['B6,*Qhb%ņ6n! σmrDNZ(u#mU1f8T5v*>FeZw5ZҼ#nu /,|{xh9>O /O(~:#Aki1>)C_mrY?ue7w,S1Ru[2ZẌ8J; pCzǁ"V CV NV7@q!"8 W[.ҽI{oa`6IrU{[:\Vv];/.D.y猑TRU P)/?}?Uȍ m<@}x,|OoH h}y]yC m!wUle~~;%JB-a*7UmE !5:A -U[XGuaX\ww"Mr$pFJd.ȴ' ڀC,TVϜ_>FpHA(R:FE4+H"H$6BEL(arg3RF" t$ )P!vk[9bZ"-5Ak`535ev*qk*38ږ+֒?[*܍WKd`j3q0-x/ 9jGHHϭ1ji1B(N|=Q2:B0EL!Qd(/R 0Ȱ7+\+*ˀ]&O]aeK߯8ƭQ=dbA~MY BNQ6v&6> 12s!frk Ĭg+=Ǿȉ3FNؙ6>J}$؜M>[1?.L<5O*z~[WEZ,VB8udyv.,!_^moU"_WW7#.s* N OvhP v.DIư0-V}-m^?߽׋,#FyU?\_7C Vyy ;;eUyw ,]~6lT#뎳ݙ3;^42Ðj"cBE9HÙqd#*kմ5؝j}SHXөJ $C+I~]H$91'+ffƜꖓ]a Yt怭0Kn@ϯWLGDwE_cJv6λl0D9Fc}q/< t$%s( MN"x6oËӓ(ڼ@ֽGPuB=yB̧g pNDXAWyz w ADqjZK&^ "b*=I_(SV.7oKSF1r47Td\q^W[ 3k\; jKhXbJ̎,h}.3,eC,ja#v~ Մ5a/UZ,{ {ZX5>cl&:!3 GqH"[gT!KF$G=ٛW#IcjN<'?d3d)%9{|%"M-\nv;s&FxD\V?4SpH8_DCCrm)Rn 6j 'c6ӛcFwL2GTA^yv+Z=-3TunA"TTgH S:Q꾝\R6A!x+HԋJQΖfHQ$ΥBGa )|,MXښ奈um{Xo_oѩ$ d[\Rh] ÁvȤ4‰)VY6Hl n+J_~Fљl=Ƥ Nk69ʬ_t=QpC/w-֟$AU߽`\ǟ_>_OcFF{ظ]9j꿚mѡ0 !2ݘjHKFS0z˜:q׿4/6i%5<{ȴQ ЂaRfܝ-wgL>.Y4uip]xVZy"}05q{po (*-~+"Y;M]72_+%n^.$բ^.n~:w"2$$^' cVZmVmbͽZ~XMd7Bb`Ck*1| }6aFÜhcSWyĕZegs:Yr0î 6HMCCC &4d)e?},}BcO)'tLӣ6:=H0ԆUSu6GR Z!Mԃ H9G]=8P/Yr3ok_XTv,NqYVtPd ;~AF!R̠$ԣeCQҔ&%f%xy$NF2g_)ȇ?i2aLԨh^F (R5RKq,{ ^%AsG q-Eʙ&sl܋ω67.s H!fYzɞv^4.6P^&kNI :@qݯŽjIz¤w 6y,-yP| :$g~;. Vbl\핉J0Oʼnj9jTͬCmA$y#ՓB~Ub"+m*љbx#%Ԣ 6Xit iT́%bCeIm)cmk*;5ǁAAVM$w^;:3cboٗj1Zs9("J*ƒ 65x$FB5ETs9-c.OzGo7y6N2KVX_\}q;fj'Yl:\,g Ŧ뉎ƙ"$HDػYDqL2[L=.y_6 {o6"{|Aito3GLg>df2L{&wjluw:z{vx-a2 n߭*wLjԐ\]{:Ӌh.s9FejKc=ɽV]J{- [{&v_Qr|n~9 xCs:?;n>6J u郋t^%*Ww`Jƒ2,tʊJj]6oPmowfЏ9R`Ah/~1J<[t}1Ro?bkwge<C״ /MvUᨧ޳*kgӳ1@ g:Po}jVPᄜķ+fyfJXS}Z$кY}|DmZ;5ӫVi9H:2!@ǾT.yYsz!FKm\O*JurEƜ8 ɚ j6|A$Vv=i;goob꽶7î6Gq#p.FU?t|luj5W-ZE;2@ tPdTPHr&p>+,tþ"dNKh+;IJV=[ctࢦLQ| C*N>qZr&#E!lcS2Ōtdm=a {&B9>}:pd+m*؃cIXNtʚ^"m;TQz aժbWI%],(@{g${0BF);Q|`YG !Wx䰿%J` p&2'c Ɖ$D̆8\I!uN*gdBtR~v jH!wZ jk-JwUox!J_Odd_q`PG]%h3-NPdZtxZ״D.t]-Oqrd"9ś~PFc!Ĺ̿O ?H_% tbǩy;SʈYz-&JJ+:K_cE1.Ԓ\==d)u3S*ؖf_8c{0\Olb O ߖYdH,n ƶ1 1C%)Gsa9jLxs9L=*E]5qa:/vK|,>ϲz09.?Fɕׅ6g%mN^wp$y1[+xsc&( ( 0޺L\7>KbG3w />m)x~[P"0G <*ՅFR V Qiڍ!+fg'O2Pzw6X'B03zX/c@59+KJjYxGSw~+($H\EWI[ӳ(SWIz0;3͊s6x?%`ݐ:ZGƕq׹U@&": ~KѸrZaכ"F/ifyLScNRb  H"1#hR?p E1np;n`VP*j\[ՒRiѠgKMVhqӶ½;/SBY6͉uGx3cuE7em7cQ`iuϷ5 IrWhnNJ _eFe||<kv71ނKI7X 2o.ʂ+Y?H]Zoހt"HZpJߒwR2RMA7 Xh^pu9ےT"chEz( U 8KĜ0|E)WNֺe~,߅rEUC i2zNn?ǵ-z7?3%SD29bIʲI.Ss0_֎Wqջsϙ-/>KOŋOǂSBkQ(dc%% m\Y0bc_^|m,^JZ^|PpGCj qYR'Ḫ~Q FW(gv)rqR"ԶZ?L@l?}KbE0›:78TQq k ;N{SPe߽!b SQr@]M5)qhq$$QpSa<0& Þ,Lmssax#c KLARܘ8yŢtN9wO^}|+!Kt$s}t}gs{E"eʩ8z9H+,:Cz/;At"%y3)cT O{<r;zMI}^ yZ^M$qчʺS9}7s&[[٥t"]vzĶE P,D̚^R,Q Tz|t! w JWې7^mAªz%c!|HͧM=&"ƉzWa=xY8 ?O[YPM ]b.G|x}ŗwO׷֬=9tR~2zwm%.0H l{zlIzƒԆQTKONxkTZh@ٟLj ^i5PVJz^C*I;py_!W (ėwJJ1 n~nNjwMF';bElج:0= R$GRYt%]|X4e }x 7e[/V+a:β19k|P|pz }/џ{3 Gb-ȎN $?틉^:N/;mf^;K'Lk[YQŒ#w >ޏV}F1\~򠾾-~qOީc(!0)^Q\0VaJ* 0m`:6"./ORDA̜e˫|9k\Є7z @=OU-lm/_W"884͜, 0|'Pg$z~?rM—ڢ_K*?Mo6zNTs2*hX?p| ܹyyl׃,k${w͠g1If|vfN&lL*[=u) Ld޲A-=^O龟X+<ڻ?0eyHŀzSngg%!Vq2$\ƪTwOI ׼c+@%N$//Q' iA<[ ټӀ| Y_'0;$֣iI:fiK\xIיx;ɼ[ЪFǴ2)Q?B)QHS[YO>ESʵzDj_~riyxiJCڔ S#ٙR$RaBYКHJq=f UtVDHJƽy0׫$,E@XLkYw>o1F-`_q9uaPSRrW\U,W~˕_U˕F=XHI1ڔ-DqJV"aVJAIz>/hn!T>&DO"lځ란 W\;GPy/ѭ~y:q?0#,a)rMS΢#|P!Oghx w].Wj=qW}PsD#/{Ko!N1`={;C6|atv4+_ ?ڴgh`[8y!!IXOs7Lh'fӸ=Z]MU꼝رd6+c?k$9^ i- 8JnX.Sֳ0AiDT.I0Z!`Rqb9BR.5#Fa>GPP6m>¯ZVmhȡ=ج)Lt搤bPDb@.'hd9:lKF;)7(mCTy9 XWbrԉ%&\2*hɱbR(F9a HE) 7y@@k!7ʀ abX[qe"߲`T~Uho%)=Rˆ)@6iYP8"^)̓ Wt!7j[ZUDZ;%*znn;&|mFS^sxb^62 7N -WH eY|u@L*ߍXXGY*%%G:X% %($P*X+ 0_,q.ZH| 8Jk Tppă6KFF"Ox{DJ 5[ijrM/3߃P) ʫZ5xꧻm$:bq^` E*Fk/ 9\mQe9f \rrf(/t`r.5h+E\?TVf*O y4qu>𧿞oJcS։7h'iu7ڀ\1Qs-N>Q# sJ(h}|ebk5͖QҊR_:+"Ѕ7X?\݇M# `K|/(>C|ao[ GnZP3i@Ӛ&?_?_|a3¨MrZp,wӰ?7oҚo#9jjK#g!Ω+8@X>џ1M{gb^q6oR#bpr3{]^y?N҂rvR?{ܸJ/{r6qf&Le2\ 8%ǒ? JQ6SX(u펣Rט#G++s AdpސF/}@C CU}^,}lV-Բ6&Z^\{aԎ){|(-PUQ\ng7ToBgB\7nH+{ן Kx;sz!he0U|LРps7'SY`eו wfylMa m)&O~=?1[ͬ'[U(kWnUls"K ujʡ)BN8j(倏fvAB+=u5X+7*6E ~Hh3G`  R*2˹NFTK R`N H(LߡxCD)9n 6ĢŹ=l(! pwf@#jzoœwM]ސ0˩,yn.Ώ~Ow$A* F>(yaI~7>Gr᩷wp dX1;|3u?ܾQ\S?.wndT.;9V?Wfw84}EO$ Kdz3v>$CuI&a(ѢNr˗QOSE絞+8޹8LĞ앖|3NU'jb[f:ȶT_X9(C'SQoҒFm/qԼĴΌrRx~Gc'IQ0oZ\T1!ؽq-w@[7}:zRHO`UQ{OP|Ά7k.CIUp$*w)@?Sì,"7v7> >69M="-U($k;1 P![9i~&Rx@IM:~G24j*_47z{-_֔7VvbU3.k~ˤ&t K&-͐ũcp>v JP^2.kYl(j$*뷽J)AlKs6u8X \U4>Iܡ Zľ @1V3}.Kj )8 OX#O ^bҰAk\Կr 2T qƸX\:Rt}@vɎ@I?gdd}X_,*|F 2~4. Y/ I\2~6'O>-MwO*83*: |' S1)FiƨH2L3e-MJ%ƈwO=r/_w4k^x5D]J1Clm0ڿ͋N?׮6a,*WL8/޻0zw~u1yw]9/1$ ~\,*KbX_#'1#K`6w]4};{cRٜ'/,n %4OIs^c^Q,2/v9u/vEx.)ݕw/uPwm$#{>mqjKl7n"#l4vuU^`aLSmŮխˍ6Ң3<^rBHvhYrVTF~)՚6\ i.Jx:D*=@'QwD_ωڞ+^ irӴ_( s 9ѫM\e(9WR 0%-~'fg?~R":Dޕ< ~zkԍ¯7jҦ)਌F#~F >Z^PfsU {W1[gkGkYnIXh*T&Nb0(z<%[+mr'f+]] ư-G F*A1 7iy(\Pv[IQ8;x쨓H.hz =ƭ;VaQDv;] xB g?zsvh{]O@|0G~vb26 '`.lh?yt=}ï|{mn*h?nUma;U֕]Թ@^R*R[.v*rU.@}buh\.=}޺ܶ'uomDMW gtJ[>em)XHactǘяG^!~o᭟+ Q (?nŪc,Q4tu4:mRk{Yo҇/iwKL?fmxb':FgX`q+WJ"pdωӚLwtiG*^٫#{Uܑ- c#Fɴ$)f*%d6%*j |DnepIG>hՇWI t  GѮpxhS3Z43aˀ,dLxLhW Õ1I| JQ8U*%⊋[#8CXuR xSȭ^E/ ==,]SWSK" V.iP4 (yo8J% jIN=M=Y8iu)hөޢ{qL 0DRRU=&O=Sz/X"df]晆c@T ܤ-fºh`עUcbPWTA Ґ
Jjk2r*LfDtB!8a`ဝS0`epkK30+<8p΅0ΒףMdr*4N *zpTC UBBa i#D8coaT֜kR1.JO4RTk#p*ЩL `$q2ih݂]XvU;dx7m$JL J`Lrʷ0jRs`\); rkM5?ׁHt-eUͅ0x0>WX+y_]MO:L >Axuц+R$$3(aBIgr3Ia9cu28?!c6F2IaIm2덖&ZBh /ᬕF^Rt8 s Uι+ >=h߆VDij3^tKÊsu+v8f#.C :je6N[?Ł|\jw?iN&2}tuęDt+WR{UPHW<J[*5O"i6FuIA'%Un ڹ`Ph%(pL%ik֍q8JȥX{U,uZTG'?:GN+̺X$/CqX3Q1yA*| HgIliCMVZ?_?} 垫~a^K(B[j~ ɡ0NM!0 @^zS F/!1SH&i, RJR ɣr'Zڐst?'{ۍ4^mYF">w$Yc+! ߒ"PA!ԾS MZmwxkF1{Y:K0C NAC0kh#{Xd߂bkЮacuذ-gSBXFsVa )V"+% -)MRGTf:r%_>=&iSfcœ?6w}b,5[J[zݒxgb*g&%YԁSsSql΄5s`z͈/&/>Ve-uEb ,8#bsD-b5v[4t}'z0E5M>Czȸ!Bv/@`!#;0nxޖd'ӛ}:T3't/G6&i= l!+ twC&E5fՀM=9Svza7rދ}}U^}AKtxC{?i;sv>Ow1=Ow>߻:tdli "Z3FI)<4VCl{[wܮnLsX^aT HƼ"b:Bds0\Th/p!(/ք6ESڻ,ݾlܞ1{BNΛQi X9qߗpon: {~rI&K! lrrbAڡbMHRB宅Ѳm!U)/>^LkAqڹڪ1KnlĦ1B;Y J2A)ya e1SK`4wF-ÛP"ij 2[{pLN*" miR^d9y3BY iLv !3I~` {x[~TO;O&8RgT$/rn:׼jcwQRf *k!nYaAAr?@2Qewg)/E,J"\k 6phg{d3Q%&b>"zwL.w,k9b}!YZNh5!?6cPG=#YOٲ_՗|`*S^8YXO{Z0})/G^2R{}|2nْUE4#f4v=ZV N7X=:,SYSm;W:e%LS~q畡߿׾ɵ ?Z韋=OwMM4+%hU h-{*3Ե80cw2ͻY@r |E[Z?}_*O rqBKk^?VtWH[T"%RJHǦ͞>6{%!oI~.'6:^n./92F o``&v 4BD 6c`Բ3%{Zw8|?k9 >Ը8}m D)s-1y8~q0 ()fp 麁mQiۜy)*zt!-y(Pi|k'_l~|pN8!)@a2DB-\D(6*^&LnzqFj%j^/^*k&xZlA0dY,d〕Jj~Q{_0k4rd^@t:ˌl(B$$PV^klGZn!It#8Mu' hxͽWp/^Z`6a`NQ#2@n <1[RW-w~+͎x':n-&xj5rMw"F`gCF aɜhRFcZzŲyz\%SN>AqD$"du9<:|=WX-)q< 9)T d42p2 ɓ 6FNl w#ɨqHV>FG $^Fr*b󨳋שaF%V3!kua=b"s>*h#S"RX  L Jc67PGMr!f[l`_IB'(r̐ צhײ 5PI50A@m K9tUZk( ךNki8%"RW'#lSbkWmJ[m46RB:4x-E3r5ֻ(a"]RX@;yA 5;_j%i+LiI0L5~iDZGb&V9+(ٳYU+wAbKh/kA{c]Ai$cVEdj y6w)k{\TkZzR2mٰcR.zNbLD/2i ^*6P͛k& !5F'E !MRe6tl49V T1ID%C T6#̛xLmP d 0l%m}U&U-X;0cO! dyO;ʲZݐΪ Ntru\,Z";#Fq$`!<e,SJJĘEu1jIujV;=ke@* d}J a2jڳsTa.ҁT)6^,_9mة8}m &/df0U趔4,^Dͧ Z-FBp~L}|I1LX9E2m/3 lxF\emb7u;˾?^]ί~^\y{(%Hw~}z)^wna^+%|v1k&P3$PL8JvU}o|Fn_F1r<>5~OA"RixsWCDW\{舢}x. UqP~]^Oקf;(d}j,\VeWx#wP a%>l= ips =ҝ 'n2u)(޷h҈{g8e^Io@^o"\$\%$d0…^oR.Ӗ|v&K]&kgfyCa Pz|fgL6?=&F5qI%DJkwC $\>j:@6P=_c=nɹ)(+/P~b+Zu5D UIgnlO54VSo\mƑ^MNQ#m%)&D0f>u Zh4@'ޯQ-i^-( Zutp{~OG׿^]x%~)u/#Ӌq,yFvQn7_JJB,Hkv >v_X`fj~ЌOS?!%-o@3kvI""\G~}oGy#Pl,075uI4o\Գʽs!Q MGx]=zH.:fCy-ֲty{'~O,8k9LE*Vh(2 v rID1H6(q\Ygz6^0WDՓg'y` 5d!C! [Ef%~W0_PZ++1F>dafl3圛9A$Pr>n|\GbqҪ'|GXqoFWjqؙ;c9^\J{ݕMUdjS9,UNw~8}LI|\t7 o?#}}<_쉁 -ۂ8b]妪e0PFaLH 仃Y͢ úhlHAݙN>N2AG财ۃ0W[Xc3;~9ʁV$ cyW3'{6S,:ڂT::ͼN'RQZڅ4k0nxވt]3aWgs|o~X>-O.CMmq=W4ή?6&>r \C3*UЁfn9gz㺱_)e0s-?3`znCj-KB-=!oJ6ow]I`G%#ɳ@)T$W82nqL)t|~SBfg TRiU$g(pRTJ ?~(H?Ej$1f0p0 3{1c,:t5ݨvJ؝3LEq]W9jLH8XV1DٝQ툱eX˖L^lk4j݄0_']E>%b=c2WXWJ#d<b_)ԐΉ Չ`"W5oc2$WsfV#߬$WjG>iޟ؁6)dym FGŷNÓsvn]9kei,(6wxo+:׎I9?4\sC 11Ը9RLL5ں)<c\mww8¬B_'GZ(U12*ĥY 6{I[b@]71)Tdث))HGĜ/J0]-mɤ :"|uԖ ҕ;O~)iC%錘l1,G0lP&G{5eI'LF]Aעc0F^7m=Zם}[[?)d,=]I#tr1Es{cau6=z+wnV|E3T;Mƃx'$x7)/1XDt:SSj8%Ԑ?x[G'vK4ɾtOa.p-nܞ`C@A6aVA2o3F#B)K*5ʇCʰe?y%RA'nVHS { Rey4>Ƙ!g$O;A~Fj(0?*iaT!meu:ֵ=ټdەdٸh/- ni[V.i)-]Ғ rIj)w5 imȋNjl WBKSp Q>Χc>ҽ(܆Qe_^'gR\N>mC!–!X#ht0AIkz t'l b;[w<0"BCZYl"c 94E@of鍊ŀ#q% v//>ƨ!h|/+mgfs%a{P;RKRj[%I%uO#!VIsg tv0!@ y~(@BRCGE'@obew =}p+t qtem{5/(ҽsĮe'7R|x]tv3M&T e)@l:Z#Bv!7g" 32sġ[bDUDM 4ERoɀL)eB qN Z;o%!: _YHb [S6 Eſ_]D0Y8.I^]o s?K)t֧3AVU; o8/`% {  C QcZ WVawx$~S 'w8?pD ەA^rh"ƼTYaSq *W'Ac#k+lszo` io,B,>ɘ\*D_lq3_:/~>/7"}yy p{ße8]ndn';E3sC /fQ'4C#mc 6ȵk"818q$(^b , 6ތ9’;b)‡M FPH25w|r:FYytJOb993nt Cm`ð?p] zCFyjկןB uN~EPvFWŪownlJIJ ,noC}}?ۓO z(L $?sr0n5 {vw吒ng$Xap9^gR sp &nbBpVPbTІxo{)IÐ/>[~Tجo@4~CLG]t:R~6c>=/ 5]ο iI2Z^!-SXhBB/ -6hy,PBCFd dDIfעt3AѿmwOL~k>.e0UE3`Rh͵q+|`k~7Ua_~U\W!w{JR{M4PҠ𯆌-'A%@ fV+EtJbx$ d.#N[Y(?mk)gB>_yi9 @-FfL=E5QETtQ%TP)(@gJ4nVc; 3Z~gj2AR1k _}e z鹱fQCHg+Yr73sޚ'* L_ֿ]ERMoI5HuWbtYDrQ~V}M>2~^cX fgb# ebyp  pO.K@)K-KNnJ׆Q*k %zgb5$sR>EPN:萅{oC_4cz)mX!FpUL }4aQZۀ@o"ZH. +YL(}Gгe:7m 2 !>Dq~3CKAOgCaRN ]UcKZ ϡ5-ӃJ&1!!P.p9*OrԮ.no_cp:ӻpnb/1`l!|-ACBVYEXɠ'=ًw˴[oܲY_7ڬꃿ?8yloڻɯןa } 5~G^@Ɂh]ƮbcWfch1|l58)ٻ6n%WX|HWWrmq%q^ W%km2")kF )ʢe2_htc 3 |D|*%ip'ZngOhb|JF`3JZ8U&-<`9+I#(9O*e!tbv"ǾZ\ :x|[G&a`=kEСtmXts Kԯ!xg ŁDQzk4>v4nQnWX~iãu$zW>zm)ջG;#,]=樗oх3nq'KrhdlFQOkx ?<(th:_$+b8AOIӁ> RW;*/o-GPKd#7>@ @-!2Q &C6y9G>ѨR'3{nN\=o$&PN&i!Q-mTT%9ͥ(s$88\GF_N6 gZ g {&f(&2GL!Dp^%eeT뛬=z0s]v<=rQ+kijVoPݍgm a>M\:M {|qLWnSP^We޿m]ޔ?(myЫ|0hx:_i+9̕ 7(odR:۫\9gEq弡v)Z)Jad8kyBVAꔶGQe:kV<Юm y-89KV'ntNi}eD޴[@jm0SS)O3n9AhB :Q^m2}.kҁvn[` h=SOnt(l׾dD)Jym y-┒q:vu [)]SFG_߾[@jm0SSZ)^ >VAꔶGQL}vn` h#b<{i7:Tnt(l׾dDIg)Z)Pq/A<3-Š uJ(u&j}vn` hF>MIunt(l׾dD PL݊Um!/E3k7hxBVAꔶG=e[@jm0Sќ\B{ս\v;~y5:wydCU\c9WdEsf'Tb쨤R`[2|Xt.ZbH&1Kf9~I0N9A`u&yp$;`5'`ޗᧅ~.x]rvxmBpAޖJ0*c"sR%f lŇx!~3ſFSLi9<6szp@ 0/~z[7A}9a8gwl7(? s_ck4}Ÿӂurk.l2wK}Nϛ{y6l:pB擠ϢZ%% +Z&+:5`S%Uζ:y"|poͤtC-&=]_iT'Gv(C 4A9+)Bu@BE-)P cdzͧ4_SP% V.Ize:v^$!d^a T)E՞ S/R6Ar^K!̥$QU!x2W :2 Et4(b}Ж)G_1p9f X4 3Yvf2R} ^ ʢX.IX%A|39f" E&cI`c]T&YT2e݈]hXj/U^H]mGmӔD%P5 Ń٦I*o+?Xh,.Nt]n*,oEHe$ηnsCF Oe0hZ\g&.Ŭ{NHB B*hfO~vvds-qq͟/otg\qǪߜ O&ͯ~20^&W=vQՎxY6X6_w)nƗyV%@i 9rå{>yRe+A4L9i%o\dO|q+}0 &9]SCSBpnN1r AEāvdJast= 8I7pvn.K69* bY>͙^^&0].^ Wx*?MmZjgCJEK˵#B'[ ͹v _6<]g83O/gr]~LYRaq\ 8% P$=bߢGLGQ<(p徍a6}ށa^4?Sx=6(K{5dq<]E}I-#:c*C"`ll:/Boz+byW,ԆDJ Hs8șcZ߂!H0Y T|U/<4 1ga QB%16IZf VP!63DCబpOz Dt_\&aw߿noo(.eU zͥzO $N y 4$˜Y@B‹ T % B M%zӄ(p=vT@V)9SLpQ5!`B/HB=g2E((Sӆ= )¨%vƓBfU*ټ+3LF,=d2ERqt.Ũv:~AJ\ *4.F vY[F1hkhD! 0JWx!p" \о)=)eqYI$=|2!9ZPjm;[G=ucmcUYn,7vnd>!9g1m*LvIĽEjikM5m"j"{!՝+~X"\ [evso.EbM0xq1|rK `84=qdf CFiQt;>}nK [a>pz(#)[cloK![0+nlqX鵦\ٳ Oю>Mo&5 ](}ήl:Ɵsф|)V,t {y̞ _r獗 SHX V$3f-VO|=QΩz?흿B1K\LAB̻v=m<AG [6ꍢ[%j[2bW2:G$@nGD#W-ʉ=gdՒqr)4[q%'gڂle;9$ۓ欵yp۴GM۽ג*wPWB9:S(`A # nM8j Asi-IԦR0bC&f\˭|Jl.)Up T^l2\Š4=j|%%1G)dR y礚LgOaiq"҂|MAPIQ$B%"H"ѹ ά'&:E+Al vM7.r;`,f|=͜J/MptS6ܬrIv y'T"Ύ;U(3(T`ITGDi Ţnr=zChl+*N'йvRߟޚ)!d(%~]\\yuzJ ꄞ0&_Z`/k_LDy r*-1<gEH7ٙ|7К7+j  wzTPK^9 5٫ǜuu֪# Mھ^,hK63z(n&qBiP'"6g1V.HSy%p~|$scy#s6Zn-1t 15!zQ0'N:YŮzT5X?C} y#kCT ޵q#EЧõ\|a/ao~Y`RFx߯3Z{ӲUXU,V.x. 9)(B X! uGeA DQz Qb]wellyyPI6z<ֵ%Grs!O|\ErqV#UXr㒨$D4Izc^GzAhEH/OyFGP{ᢣ:X.wqz` ?)gY\bԾ]⿯[J_|y,"?me {6!9|2/ݦt&n5B~k-Wn/eطYսߤfLl~AMF(3Lt7r_(YaNJ dZ] ff;$l$%ppN+$Fu L!䣰<IpWC#;њ"z{j AwրӮƅ2Jx#l"mj0kb./R4l804"$Yp50!뀐Q YH h9OB&w w)WĥRM $hؖFWO<+f-T'=4({hDH@MІdIHB⟭[dG1ks\4O gLBo2pˬ`9ȡQg#)" bgJ|lJj#82PR;TPtO#Eh5b j@p4'x((5ȭ~DUUNJy=2 C4;NhVTwچ[3Evv((ڑ*Ӣ4W4Nn)B֨%=KL@\D^d^Srg~<B9b?kjX'\wOV. ]7QIv,T3~g9 .jVg1iSe6dz$ZjX?]}>۔xܷC<ڛrCr)-f/K7BbP:cn'dإ|rd /ocYձ~_Aio޼&1!1~ɐ~QrǑ+B0f{U$?깆-DT(=$wx8y83N?vK'&:=]|-G1T7\沛4ȎhzCߪu &g?{݇|a /mlKtreˀa3z L6:lTxmPA(=y%^iJ|6G*s 0΢ J΄[dIMAFa'6-J.A}ECRmc¦ZG>+5S=} ϖy){kz: -hE;g/{P*9P U#QRr z6mp߈7uw?%mom_bv<Sd #◸D15w(of $<` ) M@W{wm}!{Iz!nRPbHŋ ` yDZIRƽXT\ㄻeqZz}k/4[}վȞYp/,!.*srF$\Y#'bu"0ǎsӷAK^b|ru^9/s̈́5CXlh`TFi${sZ_e~h9IuIAăw !@ a 1,PP]v:۸j_+pm~vq: uógL]vʽZ*?cW,/,{Kzd[U.,yKz_|Q)62↠*SԠ|> [T"9D2' hɨ|4X2#\}&h'&5^s͹Q}>#ՀЭG0(;Q"ƫxנl ': 煂ٙEWj)2 KЍTBH$ 7;`D&|KY4h&tR7IĽ /6 u-Z~>ia0*|TF qچ鬷KyPI'uGԯt F4TI@ #ׇO8tX%Ҁr퀘:&3BĢrkD2Tz3NK.z%$#P e'{op Nv!bsՊ: JemzU]HMDkYq[ rT 1dFj~JzARScN[FTVr4RBQL 2Gy[Dd FRɁv37@?ͨ@A8ͅkO`x!0cCY;ašZ JʹԎѧЄ4y &8To3VCr)硴nRI9VA ԎGMHӟ7idҭzmYh&u< Ί栱 LdDl}9]x FT0gbTq8m\W(>^]Y%6`z臵e.}+Kz 72?V"4h0Y& mI `{t ,ki=*Hw!}rΘbFm#{܁V]v;oev5c7bY\*6;5L-yEpcΐ3>{7HN3Vq=cyb}\mrQ]ɏCP~Xqr/jI4j=;brj===$%too֓Sq4z3ՃT2zN~|r}xV B6sv76ZNO)LM?cTƱA':.F,k`ի Y`ruTDהzp&1GA=}|]|np7wm> }y ṽ].]_# +(Ge$::m'DɤĀ$cG.`yhX$.ƤBPR7/S5 6R:'A1Ov i5M˄E.Y* 4-$'p)咁H 82L<GCMjΌ6F1Z2mYZŔ ASfb `Uj$"+ vȂ.h)>#$2$yH#/ &@)ʵ9np Q攲<ٮ.Ry@6WhU $'^y=F_\ pf֊[hPQ>ECX'?L/yc%-J`(PTh4"!F{aF":pt֢ :ޕ=6v`qLs^E͂%bٻm$voU=Gwv7_rxZ˒"_cf4% 9-'eD ᎄ XTSD-e XB(Goճ)ڨ9fY`vf/_3TNG[*B֍\^#JdV8Y h9?Nwř :|ۀϗ9D&I.1r]\yPB_ NoB66Cb)"Y#퇾Ik{ A*((B 1ƽ i?%40F݆A Liw̍(߿5PyqW7tLOqZMW}4ϭ ްRh;nvyE(V[^?Jh=v[ς>j kYE3wy" "Gc"(j(SGcNm[G'1R 9eUPmLS S35Uئ4A F'*c?Ɨʟ.c)a!`ZZ %>^IKA:ʤJD&_BWr -Jhv8bb*wɕNH=6FM%yȆ>yf?\owk tVD7\/zq=Aջx@5 uAޤgWJ)XjV٢~wYtzSooºVJWO7ZA9&tHCΐ*IxֳZDYl`TS>]n y%iZNO)9aT*VJ]^K\N m%+ڰBoẃ+>v\KQ?Ri~:zh['aye;K$fyEۢS^o.pR]}컦]> RrE#3it텗F;|:) 1XaDYb=Ni>o縿"::K>:Ճ=*}ѫ iN$myt>atq Xȴ38(&}o zֻŜn &"O>~?G([Sqm6@ʼnijB[+mTލ}|98O>MiN. =O>EƾjF g{8PN ě$3RPKP9Cp(7yAe iAL: 2be VZjkhIG V JAR[&D&K)G$NDRb)~3K ..h2]mQpOɌb( xgeF FR~>lNdk˔C&:"jN0-}-EPRzj[5y"`(G^UfjgFK| IեdMRDӆkr<3YV!$$[)Y @ԃDS);W:q=pW΢y $kGfݼP(<4iXPq;ژu5ɌSlumsGj%qш8nju$Cp nyQ9g6h&Јiz-h h⢢ske V*CSm#AF0DV&G.c99|7}`N e7]$$8#mu sŒziRV tJҦހrvM z")㜑Qۭ`y8,IFFyLP`6##@=yu: wJiU^[wk ^=jƿx%.pF!ҶyPy)xvʚ=Ƣ9Jj[# E\e+^"lޕ:=eWy(0Br'Aj;ZElO59Y3f;0 q'"VJ&RjqOSw5c&f"~9ÜT(vu.3 EyUusݨJƨMcLNEf2IqE[d$8p~}?;Q~<7xwv).>w-T#q o_Fј܈3vc92E.A"]Dׯa"zpk?s Ζ:wwsŶ冷֊]N÷>PbNpƚ]/ZG$ŏ3%]^ҼdJvB0G aj;UkϹЕ_tXVVp+Yz xZmЈ9h F;v'D $q_g:tf1_G|L23jdž87(ңD)N.RKICʹ\8T~o1공Q+ԮѺI/\fqm}G^د7fZkQ;R*VV2Si]J fvƎwg4\uzg6(س>t!W't; $n|eq-o?nC$-abbTP=0G~%>R!1VWjie+&IPCO; gp zʝ5*uV=T+4z xF h˜_Rͽ"hёyd11oAJD*|eN%0Y*/҃.~U#(A{Ȭ#A6T[]%_B2Alƻ;Ýa.PyG(#e!A$wVUv*arbVzMyOF})[XAU%a[[j  X~@ewՓX*#Ynhcʐ+AWJ0Qs4":5W``].;U.2:T堶BcٙMٙ}MٙčwQv[*:[BA9شFn>8S7 ͐Ru Woc( FGeZ1*Җ]TPgSĚ{Una0jC x- 1fzB?)WR>yix-&K5tCmM$JuVRew ]0'}>g? ;{P4^1aZv!{w!Oo|v?(]u7߅К(_vOj]ሜ.&:*: څv!D uPf~bB>;នōA P9Ѷro%EKBѤhX?юJ [@g+B4b#D'nQv@E3K{ `2oU.v^jYI5B1 `kKi8&FI'[;^sI{HR–a`Gr2,6=c37+FrZFet$9*9qΖ$ad#>)tFr@{jFA(y0Qa 8FSht z\j-K޷|F0ʷbR36[393:ğat;`>,} ~߶罨}kv_8w@QǪ'g.z)go?:?{!R"s.(u (`B3E Xx.O.7YDQq{2j6P'2CVDHx0jΔ5"#5N52c]$(  s[+%,gZln:ˣI\F.Q9$ROf6YU7r!IȸA $XOF}|?6 1Vj=8NrsF9쎲r=uT0{4Y=ut) Kv8Qcw MB%#pD.o`DLF=d) QBs=~|qu/|}h|aHG.q=Xܰ?n/\g6oթ>r}@` TRYX" '4QpEA)^# fxHNF[$QKX b@vI%Qêb-j{'/L<ȄҬ'&^V$44@Tf3`.`~j8 c_V6 ]enٍ>9Yĺ.={}9X\~^}_hଜ>%4o^}1WsjʷNXjv9-9Ax4?ZdrBZZ/W`BkO # /ڸQ&)L'[A$P$km)bˑ^ D "Te_$gZ?mS3w1t.q,0!!A"ej.dNFX%'V)nq@!x=Hle3&&ddDR>YiX-K"BKWN\ mLr„!>ZoI*4/b['&qs#8L={0-Lz8k\wcxQ"%u}7gd+tWQe$¼ֿ0ΗgXUTD HɑEY.^?|\Qz~֥YnCgW[3EկZoUg5u<ٍ/TʦF:sW|S#{1RjU"cK.}6D#AO*njKYS@9UnHNVIu};Y.Bl&y^[|-^~y┐ۿ'(zj O3hǜ]3"T+f?j!)7GW\z\-6?_}Nu}"Ɯ`A>=;{W 9F@F^XR\% J-<ը;˫5Տ \.JV+':JW"xA3O0a ?嶺}W/_[$IAYTh8-\W^@C.iN_Ҙ!m<*37bd)NiAKD@!-Z!w{:୯ed(K Bo=4GA0Shb9(sfn :d/w1L-ӺIy9}]YXmmnVn>fx}S[Nq!͔P$f)mw޾яtu]\//߃zTaT9UVRW^-lg+H;X?9H+h~~~w+dy@,|\ K!* f/0T x@g v-cY5+ڽVu^gmLaS mM)zʁ`{I`?dNiEoH^|J߫KwI*Ev'Qfk|&-WQ_U!/<ɛŻj<?\/bK֔gbʹrUN70Mrnh*GN8UXRi$aT;Ym1pUT9:±Bq@)8yuu5|e' I~kgYO#[ߍhoQvVv}3wZeͣYӇX\#1L!9- oHk|`R_]Sү3XLqwkGfe68c ({:GM=|> igcdtiJui#]m?{WF C/pГv<+I>_ߗ3k():`?Ƴ`H?EɛE{ RzJQ͊eu$E4Fik"%AU!j-XQ3*vyL-@D{uVgנ馮Pw&PaC\pޡCX: Xƈ0[mw!c9 \7UV x6D miEnE7DhyAv]ftX$rU]$r]-{dk]ƏuEc9e!M 9Wwdt_A3,8オМ.[na~h3GasJtr2g$4ŪnճkOw*82i !%j0'jP'=$5HџuKE감xӉ،nmBbXTyKAnXn)xO5CvXJ xkH]ȝ#g Űvijs+ 7т!CN^SToi@nZ[ TœO0gybCXă. sd4QMHgvT ;1hI?DkPI Kv2 'k%jdoN4:|<|{ EfNxqt\5dOOlT?-a6ǜdY%Ւ3#\gc 2gZB@ԞÔ9}MOa_Oɛd:~V&ك3quh5i(PuS'UpX"0yP=eY1V͖V;_KyAr~љ 1:G+-3J&a,M@(-->tŗ}l_Rh \t) fj K劦cYi)42U}uM*:SEwݎZUw֯ԊgfoK|>8comSӬw_7A<MDKQЩ=`,vȲY˩ϢF)VKsf-Ƣ\ %)X#X*T4@1юUY ^h$c`,cB0av"LaSXZY!3p'glD&46ِJ Q|>b?onֺ,`̍5zU@`R I@AD\"n؏d7 OH_}y3ٺ(X5(k!NA;irBt*A_c>(BU'NeBXm.]:fK`R:KULfr/{5J9T 4 ̒4ѓ },Wu!A&t,F1*1a LqaH.h,Gk1G`nIvEf?Pn LH f 9#k@1*^P^=? ߊqQPfB=}a뗟_O*QwGn6꬚}N-|/ ,E7_#Ί=7f6aІsd{7y4'T3{t{=~ɣ򤜨ㆇ2D( 9Ta#ZSlXfT1 沑i?%`¹YPNHëBy/ΫrwV,t`:M?9pkuo_ON1ӛqzxiN><؈)b&MPTcQkSwD)&{֌$bAg,7݅t8U>y P?cknf Db@I㞙ӐE+*t] $'t]ѻoʉ$۲  )n9er$4j1rC !dXFeĐڇ¢T i0$ETB,9I ׹rr:aȚln2=Ȍm Z׋.F bɖw%shPCOeIp%~cWX BZƾNW$Vl+s|w`W&$E4J*61R(`##c:7+6T2rNfFcd8JywZ"kkC,e^hGna#%D`k,/pAxH1+rxFR`oA )6Nk#b%:=[qyr$'݄!T lUV ;CidhCx´4NK8Ʌ8)0S>VILCpjrAyw^iLoh3d w0xK9_syFh6\~_X6\KM-ؓ)Ccp=:Qx(I)j+Jfq(0f.0RX 8(ucl(1g2; i5s]!l;rF;(iKzRu) VfR7M? =pNBfq7:>|.o6!!.Q2sAk7Mb1ڙ|G{R؁'4 !.2C9bjN?:F~}Z cF + "M;-A}~ v.v{u ')EZit,wNpK`6^hu暂}RW~57}-w<|"X%z=Ymċ (ɔ|\_~||V<|AW%qL~dB\>bt~!tV|U1x%P&6nJVwnϿiF`jѯfn|~ rwˋNoqaZ^+KHeRidiITbi`̯1 "rl 혫ADD "ۭA"42Ζ<}) XDxq@f+wErd YE%RrVsq姲`Pʉ@ !Eq+[rHSN[COTSN^*f~o?;3}ds|43>;[ϰ 4.诓.vtyp=2ē7濌2=X[fOʩ vη 4YHU[]V/[ͭ&졭|uLLK|DQ*9aeEލh*ۧ,xѻK3W& IRCgvX .\VQx( C"A]zވYc@[1pIkR:bzYM }b$c-ešSb .rq( ˲<1X"1ikAY+_ئr% Nm?фA@6rFC-2Ģg_Z+h&ዛ5Hy.Y1ĉ%:Yp4&wAXa[>^fLOo PR8є,T*3sz{׳YsF{lU,?{ǭ@/9-΢=9bqa`b+%e$9 bHHawO%'ua*E(hqRHJ:il4<:aAij 9E]3T]F5jKF1Ojt]`FLgR%8y{)-(A3; 'Hy'M+Wuʭ:'1;g6Զv,MЊԯe9t|rPZe6,rNy]R =t驝kdST)eqURF| rjFM{mȉKRA-)8I&zD5RgcΈƁBE6eh \ $kF1F`o7su僳X|RNUJQQ[x{䫝'WN.<sՎ+tǓU@VHqNOw>: =w>8˂}fޠ55Z|N tQ8qi i'5JLS:<;p?D6Rj~Y=5q){z~yᰙ?,T?e~[I  !TKK&gkb =pAyI^ Qf$ta~S5_g dDp,.y\gɺ{ Y"}s~]^lň[ ̈́ "BZ9Vm?Y ͂t̔Cϯ1%9엩E_&У쐵~FY)-QK GcƂݩv"6K T2=`H VOHl7 j0_UCdz]޷+u-_CK"x= B˭y[yB+zu96Ǧ;@ɞ~]6 dWnu#S'#?Zp,ި27ޢw\&JXHKH7d@zr|#'[؍Ѡnr#>܈)7cʍ΍Xgh(qgg4?&L_]/dM0։i q:S$"JrA8BDkTUD*D2Ќ"Yi׃A #J-V/ 3 D5妶YC_pMhYMydOe5崰-Cq*H%0q]J`g袭TvZ̘Lr)VeyvprslP%5ư Ak nB 5pofVK,#ԁF냚 85 u4LAgHTȣًn@4l SbjpRIL@Fy8Ӽ 1H {^j:24p<0@ǥ V8R丧odE+0o UhʌTp,`D D bf 0~B-#Di'p10MAD2 .ؾv"J>wwCN&(^zp@q+puHTj vs^t|k(,̏p/uߎRF.gKuBA2{QOU,8O Mf <+C}3'Ԍ3f_~jmv-J 4f3hĘXӣ[E3_|8~.o=2`dムnMgDJ߱qiai|rOoa=>\Sir1ѓp-)EPsveFW_޼5_a;體xcELjYdI=:yrizTS8~I6cwP2LUxө*H;:0[g >m|N*LOI;8 :iK*3%m=ޙmUڃmMѕ'aj=o9a{cWcn 0޺ nnhZ, &y#i`8=>)t6SsHVƉbp{< Vp$>q@DCw9n2PyP<*cƒbQ/G)MSA15nR^X#VNGFJ$=FUyj&A$hU߽tMC@k2t8(;KvBwfr]497Ucxtz ~O@xQA@;^q4K$B^|ƤO)9S.lzv+3g>,䅛hMsdZ|#Ar&nsh@5uMq$@Jl8#Pt{X:J[ =ϕ")RY87hޕJ(*T~X{ Щtp3W]t$,dVdpSHdev)3xq;GI3WISJ8^'SBR+3b+2w& jplɳ(_ "8¬PFztӢ釚$FieqЂ(ַ[nO{&=lA5{'f] /ďVc*9Br-_2//*4T~&x;hht~:Ϸ~(\ *pK ^2T[qg9mE L59g|x͕u +dcϴ6xOm9ﻧlz8w!F_4*c'vtjqѕ^:(I Yx, i5OBO}0.4{yZpϖ5}0󰍺':a]Q"hN]7;z<ؔw{5_}nNY*6r~q*Aܛe49pv~}U@+ţUfPA*\1sc`G-Mn` TrPV:%UTABhFhDjHjN rKfZŖ5d.ܺ0לuB!q2f)J~j2oW)›hq] &4"\nD3#RK|񔗬XZpr>?-(r(1*^[$iןp6[," {`Xqr',!N8N2v"J>iC..okNaN6jMvF} `XImz]w A#mN7ibb9Wh.JI+ u$h\4QX:b8ү~U ͐N:Jf!f͚3Xj\yqz}9 zx/^!Gs{}p1pRN3\go/.ԱM! P$^Gqr,@&$qn Jt(Zഡ^LunMjݪRGR r8JхvG9S\cG%,h.-aMȦLyz<ɕ{J11wtNa2w1[MtwRTL,ol吮4Io7o}w?#M3|&\r/)7piSj6ɪlpmpS.WgX1Ɓ~q˭Fzv SAYkd1q'YB`yU$Tԯ*P/;\K]!)DhDZEdӜz:sǚh=>g'okkVP*?8:Wb c޼\I鿧gZwg֢l4ݍ,znd @&3#wR:pc(nF;4F'NӠ˒bA >h4"jrnARrp\4Js0;K/`f'$ /1B j.Otv`] 68Dtز^l&l56Iy}@ цQ0?`9!%4Vr/9E%A'ꈦKK?R]8!qϾLˀ6MM)Q8hS8IeE _rWowFtN &N*:GQ~cW^i;ҟ^fW׶㑂 ֙VCeX]}sՊL^{mkI~[zY~bտ(m3]h4f G `hpJLpӓc{}DfR -H歵RnЭ&J*h!p R**`.Zv;3TDXCgUdM&XdMiK pVc>i 8 FUւePJ]_! |lDw}9'va*ʮl.8!# _% ]=i xSJTO; zK9Gxk R!L> D`K: D(K#> G7F*2"*KN=Su:}kquv,*o%E?KIɈ#&^@,R^VQe 4Fq aPyv)b:Pyr=:ۤA٢Jn`(, ʲ):ALf؅6 Vr%bk,x/{C*W7{8RJz8xo#P{4 kl㒀u90m\u͏ A[Z%"5ޔu/]w"Swꂂ"Gꖱ5s|=H&G>!IYgTXZOEiFK CR*v`˪8 H@f;)=!̆sNhG)-қѫ nCEC$Z&JDh?t$<MӁZ3TȓOh cPAtzjAG'yX$dssI`̝aHS4ܖ!ݕh#;\ R4~˭6JOv6ߋMu; ʧh9Etasx`Raʾ/9\CDa쉰~\Row_{9^l91<9qZ{8!wIEuTig;Hfe-h+ ٠`$6X7dg(̨ŬʂhYWO<෫OMm[zKIs#7B 83E9zt5KKU(B {cu#esi@޺pho3פPvP*O+ێx>(m.C[[ rP[OLOϢ-35wBeT"nW]oqۅ) 0դpcNӐ%|6dSߕ~ R?4Ŵ8'tuPƩ+ 6ȓ8 #j/M# tɷ6OO1- zaR{ D7 ǽ3tʛ֖ьw&9҆&)*mUϝ&|߀QhQ6˟& DGSWuߌKp_+\|_3N)1r}kGE<|p&9MUuz 4\ /3 /6QnvkI_f43S3 ey=e1U)dGw4G;U]fLV w6( X2\v&v 1^}[TPh0$?$(IJĉa*$dsf9$t lcn4qèLnIFtsoƓ%jT3m p;F8B%@>hCzpTL<>i7*8bS>h(H-聛4]>֤) M: ^wIX yc)ْs Ƙ-#1 uKiR]X:RѺXVTI?vchy HW^Gl]e 䩚6p"/6aIԚ\Y偰\u6<LiRDB'NLfMw؟RV*B7,E#h~Q0$0-Ee\~t9R%dt pJ\G`=fZPrNCP[ʻ퍽l?Uh[>2\~xrs12ܬW@nHTɆ:\o.$mğ}Ï_^\W},Jj61KIBM_^s襟-hP 4ֱԬ,<]4HŔ(`nnח񲬣,30о8t1-l>Dr&EryXŌV,Gj~#SecBt0L(SS \-<*BR0{$Z y,Jd(q"a AB#ns e[I 6*c$m P،)xʇ-D= riݤ<=W.7CJ풾zd6N:_s$6{{B^{X]OU- &./ǥSXK}mZٱ)o8Ծ6}*2yKÛqm6i%qfbo-c[]zjHNI"p@,-)5"AN7Z rm9<'8,c7Z|cUvAt͈`Hp (5PK]G?;FpD>5ZseҔw˺fm.V gۄ z5J i(D0\aГT_è^tu{VyhgVJF{tYR4%8&5ARƈ[@^ zTp2eAgc+`E4(N@`ȝ}ƈaBA:2 4s U+smۂ  YIv~jUR@yp^E,2R6rU ,x\L+¬QBNG% LLl\MdW\-(|Kn.]:AnTLB5@lr̾駬In^d@وU_mo_2Ҝ/tu8|DoN}tW83z FYrF벙lm5LmIy?X[.1CW..P5ʩzPNOȱ(cRa/)60tmI (mUzۓ`r aIۏV"*-zUG"n {>M6iT>Vgԡ?{oϑE|Q1a$ZB:?ߞ,l1tUxd9 f ̺gz9_!23;C@-/X ?l7<[wKZrcodu bVWC7,jG>J6FD c@BZ#%hr[]kk]Z)N6:!2eAZh1:Ês'4#:&̂qJ!qఫjx229p\BuDU|XXuHbpNyh4(w,CIKMמ@|I3y! fLf~uR ymn`%rTJ4jC:M:->l ԺuތoQF%H6JU-ׄا!ҕvC"?i* Ul/tKI_%3ɬ(_Lud2/Ieh 5z^8.QN]Or-Q]l1F-lbM|^^L<5B0z;4hG>L?yyw뻛|3hCqV~N:\%W|^4[RK]1鏧 q+]pnP4LLcY,䕛h+t 礪$Cnm11ݎxRۻn]X+76Wx:uy|Gf ((e' .rUD:Xk 4`y1:JtxN{voA$qC^GC&[j:e6xaLtZy]ri~h.v4 9}&Иg}oW;AT(їL0s_A׼~l=a{^,nk/#APƲs+`+NŮ,hPv|މApňD̼cY%oz'Vm>F.//;?} tW8 n0a8>v ?<} ~Q~Jm7rPc^{-]X()Z[s Qp'FIi"!:r))H]o7xa.(&)٦x*nBJ* D(𬊣BVJJy«O楤)9x~ YWpJ}42GE=@|g4$bh(-ZY$2b@F/:F-@PXTḳJsX"\@rW!/Ub򸨯r׳ :M}g`Qi?ηe5t{[<*/"W<=vJ ׀_'X?=Q \TK=9O__d:[e5XL =S*T^裙ξ]bО1f1D#sԱ큟<liOH4CEؾn mjZ;iwj%Zv\*,/Ra9-*yM0R:+RΑ1uk*F&8B^a(`tc-䡨]aѿŚ;LV1TyαM6RCaJ)pǜ >Je-kRL!ƅ8@-Ԣ=~GbRrW2j E\^0)Z/Ru?nQ^و(^GtaԟM^Gr`=!t;1ǚqV2σ?WSS0Iazd8;ޱYAr|^f'֏9A{I9^ ᧢L pf*cI脤@}4ƥ0 Ru~Fծ R ! `CN*}3S˼ $\ڴ L$"K ϕLnNX5B$cs Ra@jVg?O~ y #E,*ӌyO>W/Mzweײ`kI[.>&#;}Lo1Mq?)Lj-|R[?)ȯ6. 6Ί~W(ȂF# f.P$䨤:ؿ[r~7/H|V _B51Yq$B}7S\\ K4*l$O?޺ y`ݟN~:'~rт^aPu qEKc<3-^GrZ#҆cG?Xbq[TfTu]Ebk%{ˢ&_،)Q?sf̜4dUya`DaK7^)L@4$Ao܀BO3v赵 X&zl0U4{=lK*7H%uܕ˽| ?z9ddZ r,k%sϵqK0-%uڈadWȻs{6"5:ԕZKd$#l@!0PaGS0 85 {u `-A|#gJLj2Z`WցVs3Z56ENBdXdU6^jXRsZfTXeQ6"LPFN;# ةh.*pZC-TI8̝Xae;[| ue=lh.5l( Pcdrc%%cJ0 `zLy >)lXH&sN TM3M0g77cH'bB_ 9ZGLTiUqLU] a;☪0;yI՘HB)ުTH@TOVv>amk˻0IC>~O'tGl0|1h%X?{$$=ADM_ wqڼA` 7|6M*LZO9!G]LX0 ̝x~o6SXT{Pg!c[PH,ټ]4h㛼 u ,_ $jH)o Kt8 =e!}KsSbtŞRCW+kK4`O9"bOB؁a{!1{JyCƪɑQb5+&Jnɺ @ ؐNN` JWpXiKhN`#Fh Jv5[rRqKZ 4*$1C ! #4\`, #/ՆԂl[}&W@}}G?Jwe@T5t-8ߌھc.yej߇i'B!Ybz5-%7$,u}ڧ?|+фaVQ$# y&¦خdl7ӭBS&i'Ն"jl25w=xFZ2CD+ݚ]fpärlst. X$RD6dVWwvnìMRɏ#v~uWתLln:Pf =&u:u@g<ITs/`a d-ʓb;V7Ag%/KE(BlMO}iOs3s/ACYPFۂjfiԥN.^rT>8^Tb;C0:W7'F!x*G $:&:;Fj`ǧ{II5)T L0mڅ,ӊ l}0Ma'xƕnnD_iaO+9nj{7\+sad:|^xǓi,H/@dIY`43iـ$3!޲"JjI]AvEVq"M+$ɹ%IѨdAGvuGB é zRJ|NtjXA!-\3UT}JAW5jC9;^Jg[0"d JÖsCQMPP @t,ZV{u4ZC'lilu@$r&/&!Z7w/A-iFnŐۗ VV;qZK1z)+N$Ց2/`tFGh0rfUXDad[UBݭJJ@ͰNjtC'. !)-9mQ') tZ!;)ۅrmlSHpIUOT NlG2p) %ڐTUR;Ȉ"TY>za]6:(9|' F̤&)+!?GTOD /;Q( -0 (T"`L8-7 +9/wɨ "rXnmF4'n%$^ePwTp^ BV %z&>z˫}%%;<dj $>CTN;e٧03iH_bIP ~^ v cߏ]XJd^yq-U_1DrHy [5I?4ӘNc:-}!QKuLE1 <$Ր5LB0'p5)gԟK \?/GWꤼY9}hq[Je&_/on {E~>؛>=~/DiQ!؁F JbJţ=^P0=X71mf!C 1% VDZ^!CX]~imJP/O/i4G9de4@d_5qB-Xr{VI*;kv1רFw+pl+s8E?WF#A RiPb)۴0W<&^Q`󜷃 kC2!cª<7LAYgRQǕ8bު`xD#&$saHgz:_4*T/Os1ĸIf^Jd١}I-ˑ$e#}ꮪ^+dRN;p̜ڦz2If%FK2wazKjG;^C59*br7W:JML0[+='rJAR:YL҅X%$t2y \PG2kcɩa ew{D1J7Y 5ZBQ+w3< 'ŰJ/ fM5 i.γ_ܜ%: X3 J,$FkmD4Bihj}kri~z_.'O>'q=}oW+n放?O Z 'Jǯ?>f4'\~&Yo4ٯ|Nfq7^]]^{uNc-Կsn?ޅɥx?n,."H}هWd/hpWG?7yv,*T`z 5 C^oJ d!/%}&u3dgC|SK߲n(wm4ҩUY-nC|vSdb. Ո)IiYwq-X(Loy8P 9آew۟jPtv'PћKf ?( flANb; m]o|)z\;pw vQwAS@z*V}NWa_^/]HJ|U;A'xPL#4  184L']39z0E%+s1$4]8'H)GIO[$Ǩ~m"4u  ̂5pBK(=Xq|-.7'b9`+b4 8J>'7LsB]`\;:%xY®Om40/٥)*)SgȜkO1inv1%h*^BS6R ]/\ΗСycv7)6OyJO:CH I*H`EJJ3C ޟe۠j#}1[zΓ909哅R WbOOW.1Z]^_dM?of碏DNtgLaƭV&p$4=R;c^Us-Map1$K`KE I;"A5>^oȽ=<[ejEVZ/D-MyMv㻼! {4rZ *5 dZH>nN&Xj3Ójr +.>9́zP$t᛹˻xd~_/nruKo{˺kW098tb o};[f)q'a[rP(/\#Ť}x:KYfcAeXYlP# .ZkyA3vRDFtΆk-?R{81ڦQ;[) рO}D0"*rsŜ#J1!Y/1x}ԩJуV$tJi$MH Qx%`lQUS=mߒah!8vx8{'3*r} wȻwK2YsA!Cnd[ḣQ#4[cGuڕ܉As+a%h xI1, " /TWd!p%UE?ӃZyaeԶPj@}:@SSno7~q=|83 ̊YFb%Dƥc#'$L{1~myUO0vTI{1-2u ED:3۠,!]^}$vU{o#"wq"?C\FW; 5E4Y8 DJU3`39 xnq|sk`ݷV`]dD dJ;W*U Xv|. `W}z+ l;JkM]\\VawKʝ{KQWUuv\UZK&&R-`5NUlPSl@9eu #98yzZ3*JBYdEbՀ(kԱ@y h K'΄чV <8H&[ȌNn-&~iB^Ql0#^-Vt]|_=-y%9˻;Io3~pĒA&9tg%+'knu;d~ o?)"{лn9KEp=wL{}(}z`kW=xzOJ2=k5l /,-`[?h>by {~v͝V2[(NFY庄8V瀽VکIJ;ңF7}% V[rzoh)iqCp~a \HX&3K܄g$ C=4F/4@kKZ/#a6<"=K;wڢ%!sy8K2dUPE%Z˨DiR乔^ڥ[GPAĩL㷒<ߞ?J 9'{VX4qd3)$%-40B)/c ,%4j J&d9 REesb9qmqxRr%G&%"2?Z{< TpMr\{t#R fDw_´QsG}nJk$#3UKLLу|Cc;?ߒeJp6/Yv]U뛵5F-Zz]y"d-W~.ҟw 3]. y1oZ xAz0Z}m]-§O¯SX<@>ṽI I…z4&E4s؝PRHu!|o Ap3lD~ jl9F9[CZ \Eu;~PPMwe Kìkjes ZA̧խ3CT>1;}|>)/Èy |7cZr5-tz_!qNIAo5Ҽ*kO>a_t]ut๊b|:DըOOb<Tը6P;?9Di}[ON?DzF"uvبN2LBàμ*|̭o3{P˧֖4=]#>8VLO0* ϒ30~TL;*dܓQ{l^A'nHasfGz(ZZaP1<.'CxJ݋Q6p5UT0>0|blFښ T$F.' R H,C-h0m ;' 2qvv%\1LQwgΣdR9vvgIe>C.}+(PL#\kG>J}ۅJhh{>#WLe~noʯ0)ǥ:u=".CG t4J`헁W|$O u,' i ֠{9 >8{/bhT О pkƬ넜vv!ϕ@10ٽKZh/<;;#])Ѣ;sl;ob*Gyo=]/V5kP6AQ+͈[Fp^U"qȐL.exL=`h8V; /(Kd/(zCmvnM)|VOOb ESQp4"MMh,"Wqd@`Kk4*8I B̅6Gz{\D (kN}ZdXX~7sBcQVdp|!X#(9h~酡:ཅV=QBvR<85(F1PhS+q*ńdFe-{wZq\?%%9A&SIUٔ4 ㌜u\`t[MKQG%ѷ DʓޕHq"JXG[ut ^۵Y-׷"hnH^+An D2.$3S_ M}j#KK J ®3B`S(`QKוeeγѧ%֫1|Y/ټ_ad AIpv`։[.濟$-e}S$H.~_HUswW_8l bYƭr{Z_Ƭoc%yЫظ `J/FRUŊ8W޷LJ9r5,7vIf{7,s ZfoDVڳR)%*We+[ҐgY:g]&u EurQǺ umݲ;jАgY:%WM6Z\ RT'uېZ> ںew4պ!\EtJ%:]%NK Jp D KRG11."qrvSq`sϗvrXkJ _;/"HqgBC11*)[>),ۥ=lW[5-t}?3wrX|;|ƈ)#j|p-[j#DZJz.VG;Sj\CfF=F67{'Z"rk0lLcaEk_ FrdlPBO)N&Zqzpߚш\z:2\1ƌeP6@&(D1U|%|uJ*b yHSxV1N@aR1BPF#/,)8jWW9-J9<&/(8A^[ׂ#q8QŔZB  +1đ%@(T>gzp]OgC?1d|>p!%T[MfU}L{w;C4J,0i#VPX ۍU.>]!2)B j$!P+m0OdB*I ީ"`r(E9 A6V 2kJ9SAIJ+j3T+l:$fZYPpRW-1%=BY{Fj+r̔ɂL,9UVĕpW{CxaU0\/F:0<D 8 iVm&=kUQT[0ASfZT[uU}kw_ E(Lيd9S NA5I30}JˬAat@BM}  XS,b` 6WJ H OW3YSo~nf!KuW.l*I%y1>(9L֡>ݷN`q0gEaYqbR0K1Yč-iY+[x5ג#kvTzսfq-"n`Mh+`#B\)A3zV:OEA!ؾPn[ꒅsF6πU(97:BiJ:r,KѣgiD>58g_W+"xgc:n߮jp.ww>s#VR[Ϗx6J\GX/O"b6&>>v3_ruW=DO.O1n˻FЭjT(D}0á'WgR7̀׷M 7g)St0 ClD1IWƄ򕚶=ant1#]Nuy-1CYwѷWk@KBxE_mAz;B5VĜڹ7luXo`?[[ʻYw<.j1Zè@"9ګQ$g- Cedמ#.EB~t~aKr~/e;ێ=G✎ݽ^* P.")ER(HzD3QC%;e\_u$'#ƜQ) ៯w)&&ZOOz0ͱCw]'|k|N1i_'ԝ%wиr`+ˤSP_)ԁIDoOVON`:P&ԡ˷?tbZW5nhb]Fi\?;")2//?dfM|>F&Ёox>ia u#|n&˯U^qof҂2'ןoяGӛˇͱKRТBŀT~bT`T ݎa(+FRzCn6.g~B>7%>W'&q5ofPɯݓ>gp.w/hGI[Z}r*o~,f[l=hRS,hY!ۤ)K5jxej,9m )9wB 7u;{r[B~$sbBU4KF!hwd| -}G6)ā֣ i'iDzD0fq[?Qߟ0cR+$XEq(42["Ł>'_)/J⇨Gt(IG(xDt; pF;Փ~w(r46{"+CDlbt]]+zPEKLC`=/Emt_^Ib;`D*$9|v^Γ20/fl-C"L/;51v 1/ǘ;3-FU%CbvrYK[^אRrH7jVQ[ʫпN~F3Zfᾬm'^z&_>F/wjAP-d 3gpEy Yp[p7WSUy s䐑:/a<O!Zh ^8*$RlpMa+ǫxjyj#$E&{ [lqHi}k%l:/oL@~d)ֽЇ{uc@RS+M$!.P"R Nš:1.ӔH-W2pR+q;[-?K$"|/zruseuNy\f7ssrqnO֯>$Ol\a0-w>-+cOi!Jb05ʥ߷:;>qJUmTNϵKyz<"=qXg@ΐA]c1"d,*!U[8~r (?sY^S]?\8OBﴇ|1'/n?@ɪ[x2QMI)|#F$靺#lq=5e[ϐj٥j< ( [y}aL,*&q&=d𼤞ycPrV``m,YоPh갍I71P"PAfí\S8 Ƥ'N1:.T`(,BLH r(0c(P"7/q>`0|$ej!,"Zk$0spc"hs<>gYu}j]VTY%fgh|NI Co7w?>z|kX{ş||z? Bʟ0lg@r;Q*ـ$ >=Kaf; pFHD=e4} 3!8Og0]K.? I 1aarD(.2ALv|x5,jY/,<^+ RxJB`b6FZ DSpDSee]X}yc5栭T ΗbR"F,.c裣 dr-2X #J DZ豒ռb$՝gS/w!ޔ#0:Jna:&Z( s*D%k> s) -_N; {wJ Ϋc0xٻFncWXzqrH~٪}py+ĥ`0$&~#rFq]b_7Fƕ3HFߞ}P u۳9Q$+*'09Fj4y$[14SN5K2w36WG.7Qhs8~ Lr9 tŠc1gQ2\1CBtcMrVi}{C|Q=. $;lt9uYS `V-#4=dj1Tf@f ҄QQ3qᴵN VpbxutW@W(9z)Eb=qXnE.o1Jw?d3W兗 /^./rY)&"Y3$ E˥dFL"cVd$"˰̍R_K}t@aY 3g21 $# d)0BZŬBQ$#ZZpTf]Ac8S͔_{GB{H2@?Eb+N(C&~XMxxmc. {wKh58~\۠49yuiBQ.ؘQYcrr4?BW2oCyۂHya{׃Sq jURIˆW_H]O`qr~!g/~ǧp!lWw:ń20xl֔JN4!Db.uE#bߢh<5@mEV^rP)0/-|JGm.{C_Rz{akGX]t jRYa~tr]~F>[j[k& `D()׾@6OZp_,f7{Dt'3]҆I拯1bv{@#"iaWuPJ@)3< ϐ}-$1m҉`&fDx`.`O/ppuZy * [%Y΄+c1Grz`8G(c UHXuL $\NJHX# %h\p5׻CuJp%"6! Vp㰑D̗aN!#ɩy#y) `cF$H83Spz{5.pm҂jCE4hI$X"}@&&8 wW;x+WoPЕ1܄pAkqZ㮻nvb ڲ!;TʙA AGONL$uuI K'"M];O晫ڇ6;Pօz؂noF7ʖ[x;Ժfy+ X2[BP2 ڂ##ZU"Q[ӏ:3_`٤X9 l[4崰ޞowT7 ~r`1t%$ 0:N+%P&=G#IڥDx~\-IFFB ~vdTT'We=jPI} uk9?cʼrRN,h1)`?p9H˝]8ގvBز𻽽pZJg籠|nmlXS'ߺ#0 S|@EWww5V,`Эmdx-=CՄK,r.I2E-CnN;b[֣il-fvkBB^&T P-CJ[*!)$қXj&$䅋L@wz%B-[ R"^n|PF#T6bԙҺAr{}8K178U>yC.'rFmW`_L(H![ 8l| hMw=Dsy PRys]B\JOH ۩6IpAw-AJF]o<2)J$0+u~sQ&G:rb[YcDQϿtnM]86n-fk#rEϝ QȺU*H/xu1,X H>Rn lNs*$;R^%PT2PˏLK]p<`FrK"AJnm<݅m$䅋hLn u;nXtr-I}Gvњ%KLN/#ݚ.I2%-C lD 6wDn=".[ݒ[n H =R2&\rdépaRBFKӁi'b:{[S;wx>1:{Dh-.9 9CƬ XgҎ$S Auo"( N54@89ՈY &e Δdθ\U1!ѽPFa?məWgCrLHҸCc?H\7q4Y맷忍߇<&,RʹG;],qAp5XVxn&IHH5p+~k xu*jJ|X:-acsit aU wH *U̒,&'J&GNN8rRO51FY#IkopO5,A\M ȡ([[Bsk@B^/S ~v#Eͻw]j:mܘ˵aA?K"'O wJ]R]ZVp>nb1^e4 B}7\.#0ECLI![4ituaTt> abK L^Ҥӥ[zMXDWKJ糨`LW;VWB}%~ Sb0J#,)~Oib X?B9ʎ)Ig?MВy<+p En]n;t`j3g7*/o}l6䃳sX+'m_jϾ>L/~ ko WR2xj3}ӂK::@5a27g7m~;|\G\c> H ec!k1ET׼cP`)9!u$:fRŲlQjE0='ͯr;fV kKU2o4.9z[K-Ŵyr *K>jaUɱ%Ͻ'= )&{^ͅh`;^0A0soK1Y5)bBTΗˏw be |3 xϓ]ue a 2ո,QQ_p8 S]>hB$Ջv~ryנLe9N 3rE4!JˌrguFmNQNԔmq D䚞$ _|sj6B(QqeRrʄ [8p`Ŝ FyYΚc詫[_ͮ6'RdTT1¹HSXMцKŌrNrLs-B3=R)b]dc} ś,Ofi6ם{yohe ,fS~g@+Xe om o\x8eg3v SX[}Hi2!ǎ[=,gf)SdXK<χooF>|x1hs76͢?mRҗWnN- {bV\¥wp.A0Xh2j&_I*(GB P_oǵq#!P}AnY7$J[TH\ݥM^FImÖ`S'*mu6a,/dj/EL^q:yc# 3K&# Y+Hf\ÏlՌ0Ez9&#Vߪ]9%.|:|߲Vk<=מjYX?lGy H)֤KFX N;x1Fx\ ) b+@wYJ=~ *{jhffnc<9־W=pp8O[n70xsCyU @HoNt05YAV b(wYtIYVQ'Gl*zִY0iP^24#]]YQF%XԹɨN_-PHhzn%-܉;zr{:/DGf׶T2%dR7jQHsN[Cڷ&[۞a\wj9bpwOl *ATܽVq܂=J]"<$5R}8;Y^v2Zv^1lgM"\1qV=|zv!cQ{oZIJZ,0O.܉YY8a = ݫ0DzMz;b5M+ge8#Zn9˶/~I6!(mz^<=HBpĿCX-+휃4tBME7uiŁ;^tm!iea۽=k^pwmMf75m81=\ V#MȴC1Ū)=x\!0o_s`G3i{?2'\x+XgA"pmcqW;Uq59ޏjvB%n0U41դY'k ̭1[R>yX:Ap+s?к$PR\>PJg6+H7 %#h9k“G$H" LzL:yI1}]Yo$7+B?.Լ`{ w 6 %FUc0}UuI&3saВS 2`g3,c =#elE֐VC.lA4Pfmq$&xN}*VzxS@'H8rbE+ޢRd'1-RhRf"R|0I'Ѕ#`p7~. MU/=Y_.[g%Pܟ"rwUsqO0@Zb.>YcE8M %.\ PF1Re>:Ż6]6={( ėi)Rn%RKhj -{= l# 8$-]l)pIFƸ҄mׄo˻}1D k(4F( 7R!g!" ђ` u+O<爐+,8Jj #NVA-!֪#(E/fR"C$^(+ΐc:J-THg&K͉@QJ yhR1$Z]j ziؔ'|9Z"4 1ўkc0GXX;E+qR4W KD]SߧU> ߧ^b!ڐi&· aު=О: jøtEYM st0z6<9~-iE f^R,PTR0psJ;AW`JDĵd.UX@B/1eCsT%⥌ (`Kt B-r29y-k $VAa:eeB!X0H@Ir?oI> ߂5zT哶WIk8x'‚s鉞K.iѓtI<4 xDB^o87q3l2)EC Z\#Rخ){a!ў¡I^ Rd] J,IkS1oo=}\ A1fVl2|DW5yŃ9 jf{4S&X/h^ۼT\"+/<:*L^-  ӳƷǍ"+۽X[L('D`mz,{flV ?;۷*2`ꊝ:{UIvgV)T/r^q9,k08C{CzƟϠ\+"`9' u„m; ']JIm g<A-8 {f' SVilۮzqdF3̝ܥ~o~o]<f ifՇo*Y4&+Lׇ}Ө.d pW5C[6o;_<<eO itu|q)guaAZ-{mss>x濮7G_2!"j5pgt 3b~Pt~{zyȈXKD&?~`َߝGݓb+/S o?5v]7g滾іp@3 !3JVv> E-9W/~_?1lM$a+ 2BCRD?N۝3+Y~ZIXOj6fNJ?#a 3S N^#ׯo:t6@ i<0h6&DR WFfq xged;Tr#{k~nK6!ýBr .w ?PPF l"G ATPw{(>@m ӄ}KDa+"69Qm+6c1Uo!< ^0cb^ZJv hcjFL/l+c4,z(&dDݹk% P7ƈ4!=_\ѵϽ;>Wh66ifIWkcݎ-zO}:[OXT=OmQ]-0D(g^[:-P[62=Üo:^qtO+t=5oC!BQQx$hrcקĈ0!+.Z}QS/62|B8fkh;w&ortn/b>WϦʰQg_< we@p#.pv-zyvyp-|`ޏ}$>_!\r92.=˓Dw>zUݷQUyTUp&{ϗ;g8K >AǤ|xebZSCjCO(߮$HP<) G<JZoc:o$ 0%=a HcFK8Q&꾇?ڜ}ɱ$!}`Q&6k&$53B5ìsK1 륷m ?$/[JTi#)(),p_UKzUbHPPE팍^Ef L[GC "V<llw8%RH)iYW̫IWUȱUF#as*Jc[T󪪱8*G5/:/6O׷GŨ}AlїQ"gV^`ۼ8yT+@KCE#2tCӇZ3XLjrL!*`姙Tcq4F@ç g<hC2F3dzx<+oƏK.5˨WOM8/M6 tǭTNrz2O\7ɞ S۱~;Mߒ@;-eĔF@-e 7Xxf_|Sĭr1HuH N #^5nٽJ]zH͢ck3NH֪׮wu'z0i;cS:7cWjw;%ԄJ[+1H7pSòU{gj^`ϼ1JyQ& Q($UWVKy -jhϵ17 !)NW3p0+J5+ B2!adfl"ky\; x;[1QlT17nQܨ_ U4G`tAfpuQ}W\ RT'mV|SZouhVBCp){ώfH͂[QCc㤎IIH/5:qM*bzeToECldulX6}q!DKb9@Tf%<]}8*F-0QthG~IhyxNݫ7VUZw \?6if 2l-zaCȇ9h#ulװX)FU7[]ݯ~H$-6(SLH~Hgh9iVł̓nRTt.KT/6$e`@lnb\ys!>CJ`T%Ҙ!@k @C3(׵.83^`h h:j> 2ܯQ2DE+^`,$W[kLQfDtRx#f Sr-ake 3B=H) s`( VM 8NP1@bzx/ q߿2l~s@37ͬ\q?>o֧ߪMB7aq߽~7M"<4g0GofܬȒaٷMf`&KpyUy)̚!3%Qa7/hO}z;X>Kq0'6oɉ.j9sbKhY:YX @1S9eJe"l E <_9~Vhs&U+-ѵK%T8nA!Ā1Qx$t#V'YIpO7w.?grM7 z>l.SEt%0z 2sұRQD2@RCbLT,Q.S#R/j/Eepj8%d Uz0*T$`2L}^Qd~+o T@H>TK.IOFHH7 ]WTE8 W. X7n-z~cDTۄ8s3 Bv6h➉:nhj*ԧ̱,)Ar#;Z:HK˝o2a0UZhpa%KBYM sD8AƐϭ.]j;'h$aZ!-F`C!*@eNieݜ%  QP(W,d+XfZ =SZ1/!!lkc#Ц솶waxp 47pPwytap: = .X͓VCz8JYtpDf 1 ~E|EԎl|O/3`ϥr8G@iq9L'Hr+=.ĭ٠p y/IV#eLr@+n76MҶ#zv$nT9eQSyldWq]qFbPhh_' >l$K%Mp)€LCk޸0Ef\.pwP%YߘrE<ᦇ5sq>=ҢǿoaL?[N DxiF{;);AI_ۢ8{d  *P6lc,PU:`7+Nq1ȿu%~Eg~~WQ9J }RaX́r asbJo) ;gzȑ_rv#w ).Μ3\v^f Mv"b{}0緟bK[7[ZV[&UXEV}${#MhUb&4U4E(טX7fuKŠꤶQǺnH3`bo-yU[r*S7ˍ?AiR`v8InRj9@nfOě+,34یNd&n'Ѕ({i@;g&paD2b/L&ϽO"/h IkWm% j [swex vVS{ x%Z} ( *H'IzJDSYsQ)!(!*g>p %"JZYU.pnN4Fx+-5pIiAmI︙ΠCS(n"S h4_Kw܆鮔ƞy] '}^-(Vg\S6t9qp "׌cXw((ge \TP2"2! t.` :ʢfZJPY?RJҏw6t,޶_vtܒ-^3kK?bJC%=M6HKflq=8㻢?F}LRC4lPHdž :d&4wL;sV_]|w5}}Q  m|h9)m8 WxCG75=@V)PN̠/$Z-&(ox<8кBnAj5dʓb*؀DIm}E蔣1xNb r\sI5U[Mrܩ83iX gALnqkCsx (RJIx\gx;7C\glb [/7qz,R sӗvTػOop Ǐ~c`?3 S37g||w0d| ?o;{z|wnf0}x}N)r1rnfdgo+t㚗jʙnèrӵNIWNl>$ 8p-:U `gI/bAhP;θBO=k;+A(3P,bhYfA 5\Vh B9a`qlRˉ=cb$Z㗈$#?NVO$ mn^0Rƭ(=zz JMY;_=p_h|( =/cy[|} Ptģ17hbQ{#Dg.=/#|VҾyWJNkzӅ%IeJCQ^ʬl*L,ƮDҖ+ЭT͑](`NOKf|RssssTrY*,![('@m,׹[+HW,5ƗTThsN@ A }ABج*y(јMŨGP_b<(]9R g > )WR-H@a6%KyךIa-VS9ZhD HqeweUĴ6 eսS 熥͈nV)rFB6VeKijxiԂRDJ2ڲqAVt%UTN!t>!l>omV G*š)$LHš<Ҏ@cҚf ZV;b(dsNb!RB'OΟ؃Zy<~'t yT3St%}WTWjO}0э"Ⰹ'< |L ʤur.jMh<)ɯTS(8+#k_󵞓]H]34"AmƇ+)!ߎ[eլf5E(i-̨_ԟ k̶˿ʼn9W A 17C#بOq:W9 Ԙ\qaq\4cʣUiǤ̔wsOAG yfIZ Z3E{mtN)࿙b-_H@)yHa*Y\ R5~I'7#48aَg ;&;F(-XcT$Gv sֽ A5cֆERa/zF1o#Q9+IqGUP`3,(/t,.4[j2e~"ԆRY 7NS61=^ v&4Yt)дvb ;*2?ɧV9e%Ӏ|xScyj:%vt1u,` ʃ2uFgUyf#zb?iѓj2@wavp&k2i!qq&/[;M`3m_st!/}yq,#K1>' ɘ9{Ϙӄa;g0AB'0hdL:.8FѯT9._ȮQlq,Ls$Rw N\YiH]~Tz[Bp<3x)OVhZ)0RY]~hľE .RKysm).e=;߉WIk SQ8ЀquѠaFիo4(HUC(Exm5zqK1*5RS"GCo:y.=j#:˵V#7)V9k^1CowQxjZaq\51Ҧ[Vez?^<(j3ڦ ͨn 8wiZC$]3䇹#(SY+}0wS[\wD=}mۀ ;!ػ7^VK A]is߆l*BF3<+n/NѬ^_/7;k|~a|=Ն!}9~<)Qϖij(._t Ki]BM8{E p}7^f%]sQFYyt\Ӈ=OJ4["W/}KSemʃ{J-b OH R/b aI/]<|+}~Bf>|p?yeb2vXU /3bcLPdێ,n6>Lonqb.ކ,s糟1JҺ {U<%z'~?߭@Qhqա7~"&&"&*jfmJz&z d>M -vI慪Dk@lV&@^M\DYb )Ld.|nz}ٹ׌ ;:Ycw5zw 0h/)ݻ_0iRa%/==9oWͧksOsl\])i{U!qł9.+vbmL{t_Y %r/ {6qMDmY1(Ǵ{'ݗjO>Jր5!'I:UBުu+WкbPEuRۨcɠLܛuK2*Z&4U4IJyL|7:@_t+(Я [gSFSAMB!rI-(ir f%жn 3Z2Ύ rAVjuq}N='R r1*ءN:@JID_Ս`u(prJ%,/x.= ┃mL>u6D#zȆvTI[I1&^3dPRIpB;\ZXȊ!W(F4‰Qhs ZdWg鹒˫8x~Ar\zN ufaޫ\HMkr  h%gp]V,r_uSϔվ4_I7T/ ?{WƑOw_  \e?0fgl%)'bU4|geə穮Ih6 c;}0۲,fK)S<\L7*NŬ [/)k*y. F ]٪0!B*Q売TW7P!k17KlLuYFR.Fbch⒞.@H/UJU+Xi$EqRRsZaЊ']C{b|r3L08B1 p\JZ0V4NeJS#H _XW`ij(¸%+ۈR,u2De&9X܆HWd ( ZsrOuiqPV,Q6<]i##Z)c,Ǵi0s")*@k uN8@X$W~Z{>-y޺NݓFB `DAs{ջfЎy 49]%0:EsLs[anX d n|:&9qޛa8"3x1(7:s4~_/.~yy;orO|o] }3'vap-}W;" ,G^_~YPU?Cz,k pt+&ye7;Urk{әLx/x$j%^xwcʐtd)JƷ.5̂1oMy]X~1-"& (C-9l'1'1eԙ*n;ˠm|i]/.hoUm*}afo1|d~kd\¿YZwܻX2;gէ~?RzL?c<$%\5{ Z$WYVq~]oЇW{Ƒu֒k0AnvvJ#VÇ*N M[x`|i@ÇvDr$zwluo֠o$'jwD6U 88d_GL)9%R;:9~vJQűDCA1oO3Jx3Z~_9v 5D3/w&+Egrw @~ǤQW2OFˌT+~8 zi4S!`td◶5IоA%F`>yw)?i[BNsm tmNcE% )`W+ j' *}o$R2WvJFNGDQEYsA=sSߘZ,dA7}Igd=hL "GI 2jVwIa2OY[O7Gj =K*Jp~-G9)1Ҥ=ŭǭ+=N {~NFWILSu9s|^`v`D"cix/aT$uU0z CE&Hn[bݘF%/Zָ楨bH(NLҊ5YbҪ>ҷ+դDRqDȀ_*-\EQ URb` BheKʤ Mnb CT˫@T+ADiHx%J.5F ʯA9Cѭ ø:=t1l%3C#1 -DSd"Ş{l\2z`!Rom "DQ쯡3 DEMh+X q օ6#=<r"i;x"qV#ZM<\DdtB#r:<%qQL7nMV' C}@e#,aɟm(JũH|,3GCx~_߽h yH1Scjt /L~2$=TWwԻϞ2ʴ2s./pW2,ai\wI"XLicU:nޔ3Zqev(\G?>eN'OI}O. I:/-q}/ ъmhFJ$jk)z!JFR(@oHɔfgpCRZNžETSD%CVcI,r.4zPHbk  yTY};;JF`U\jv"!d\Nxَv 7y crjkfgK1ub;a|oUlq{ ?vZL1_TJf0Hs[ڢHԬxcy2|+GhQU͸%ILz6Bx\ߡ';',O>]>um l/33ۏ'Β8'K}=sH~H+"ѳN?L8T1uFRD8 3*!q0c>$D %[tRL4Us`2s+kִppr''Ń&:QRkHk,UG,MQ"JIVJ9y |M98q2'CTdC($?$TJ)ƍ͞j-.T/~۟t8 bI8oDĦlBgvӠđi8|X àSDT+pE]W\ۏ#]?~bB5"ے*5$3duTs bJjߠ(F5K~7X77oÕ࿂`4aR5FFmIlickYaHSVU,UFjkVc)t3x&87e ײl]s۸k=`Ԃgsc9BZG{!NϦU1}hQG:~mT/`Lt*&L(ԳE']&N1a 4Dk!׷V⮼B BK>t%S/bqm1SPrWmP^#'qLr'XUTjj(¸%]XFTBUe@bp *`H,hd4D2dKPVWV)KmUAR*Mg(A>r&@;&%)HM;pK`t.tXExi]K*4.M]!dXؓʖ\Օ  4nY:TONS![''cH=4]ﶇqzFk{0hoo./j20v>P7oCl}.xa빁ۇk{1]8Ķ_ͻBB߱;p{[ϗ3_LH5d蹝G*#W՞mp?q\9>nhˎְ-ÝD-OE|%~q$䕋hL1zjZn\ ik~*}na=\YiyҾy; )7ZbdGQfQb=H 7V<=Fbg Oa ]'>%}v=l;`D}wE4H@$8<|ԄV.N$磴X:sL !t(= pzH 4KOx0+/VS(2f')8xQh`"*Jˍ ,/@~/V.>W#Hv4*9#[[}opH CCy=頁S < Qna`BFY*Fd;oO G\3=X1<qG``~!)`jz3xe4#|]^܃pu_VҐB7FLY]T4aQI~Lf ԣ[G27 ;wNEhǑ|i#SR7颡,X%BYd U !2U4ߤ穽R1GM2㽯{QdSaKkbgjLȓG^.?u' ē*Lʼn=ָyF9F''\IŽ|[13VDИִ.i>lm O=E16qJ4h=^_şZkb'E `>)/˜X8ESr}8x-&pl=D1V4V}IJi دݔnteb=OG'/O@|4/1}ŔZ'B .E¡bߧ=>̞ [;OΕCqK<ܼJsj' 2N|~9뱵k ]UWj y[;deȓ}h>O0yͼW*`9SkNSOK>:qPT )('#/A+:w'Cd/w~4<[h߬%IM5ւLJ;V/B!/:&-o2{`SA 93*Ql}΃neպ2j XPch61$䕋hLitr].%SHO|iTrAG. `|M8nd;&mo ;XSzRp|ap|3bJRrrL ,SxFG89PsԄ#VK^ꨲTW7#!q"\bcgz8_!r6g3v_ aa)xc>B_m&CRA&)i(Ґ!" 2ꯪ]]PZ-ATfcĴ=wL#Sb薁?,gK?-5|PfzN:F նGll! L۪eQGzSpΘXY;Qe<bZ *~7<.d @O9]|՛N-&.O jHg9cv>׊P#C!;1@A~}{\/y͹X&PrKtZo&B1YMo/sve.ŗ7,"Q 1"'1ߒ .xEeY1kcgva:ߐ?t.1!|\t+K "gϒ_ݓOa^EHՌ2[0\ILI٦*_|˜o~/)G -3BZb*7=ʝF[GC;Wg匳|M)Ri[Ɉ`#B xC6b!wASrI[*IJ ``8C&A* GFSʾw @,r^F3,&}O;ZdM#t@Ydu M zVf,XGe hk|^ hF˄. 2È< [35Q릹BQO&t9|r)UTͤGMg5l'vCoGNڶPu/ m:CVH!v՟9ڤ P2DxIh*k:Hfm+x)ՙ!m*uPVphT8T\9pe8t{ rޭ0E.IS.[#:2eJbU塂T ]U;Q{KM5;S+*mHȑh b{jaR6}]ʮ$jus*NF=?WŘs"sК`h<-P|3K?\G?cRɰαxwlz` ~y߿̾X1ם# YaTgYwΨ{b@$H˘47+tt}I&/ Kk{m'SM@Նq/QT}^hp5Ry=~UdLYJexn:wzk\h!i/ Yo>\k*tqzQ9Ԡġiog 9j-V5c0d]r޹ *.wFE&h~V&={,j; WrElJApE+IL幱:- a>քT!)~IW*qd;)|-2eăJlXJO=5mk&i vSv0&칇US٥[q\#Sֶ묭|c3[ wBY /B_Q cM ~/wxcuO]^0tONU o@yB'A7~ﻬ5=Jݻn A**ϭ\*x{_c6oQ#O8HNw!AJy ArGΚ TbI? vȟ7TP #% 笢G|#QE U=ĩ8|f J$v=Z myE{-1Μ zє_-dezKfjZ~,zVoCL[^ x+RR\}6=>:Hw>gCtIN:pD1US>4oG.tŷ_{ޠ7 "^SGlq8B -) DIk(U /Pogxk"D}I)(ed7JxF@aEi߽[oq0Myav>do{_.tǬ9_%w Twڈ90{GӒ _@`!ldapFhO=ۇYT[Bc'AI'bZ0E1.:+MBN*"'[@_!VYwmьyųnu7W"RfLC{#Պ!P}Ym?~|Y;sZ&eD& yqJC,fBBWt[Q|iy9mkŧ:Qkfj{$:gqBx,Flc<Rg/3okʠDpUS1bPXRz Ab ըõgqF}D`^t㓽EԱ"0dbAPSÆFYpFD UVRSG !FqFi( Dbx"DB3bOD!!*YT,U@w&2B1_'!/0,ub*$cX=[L5ܐ-zd@##+Q:C筂Ĝ"ssJo_) S!6W\oACRj 0%RƆvbd%CG]IpׅZX9~` .= feBó!BHğ^NĊ($3Ms&(`q`-I5 RYcCQԿ!!js5[R[3Cin`3vOGZvʠ"6 Gmp N}Aˆ(-CLݎ֝B Chv}tWAq`k hPjl.q)aL[3"Rֺ'!]BSKV/D 3BἎ/%hcM)4Az#Tӷ" ۟{I!jD*hܭzj}s5;_|sP8Q| ,!oo"h,Fst1vOtُsIɏԣʫQ+3xfөt%FHGgm 6YZyd)Y#eSUI^JQtKTA"PJ3?l$xވxqҝwI2ZK%ܛvWCV!Y[r*&+Dw7wnN1>ӭ/J:-LD/L)zSG'W-SMXU@ͱ*cي& ~$>?RY.[ƻ;:D L e@BLIஇ & ՋPP)Qn?T"Ғ'q?HL%AS5MV#{;n2I0oyƐ!Wl_lߤX |YCgx7͟7;xU |Y Rgi[SWS6كɞ1:lLN)8{K:d&m6&)?9y*[&zI1N(ǃfFo~{ًfC<4ف%GGʒєkx,rSt$nEgGʢL2\:<8T^;?sI $,^J^ lFRZOשZM}Vva4 3&hyrc[l0F!C3Eɰ(<(;ONhJnOo?3(건ne˷*Wc3kXicư&<%2V[}^Fl?꼑XpT R`Y0(R~*,{U5XL3/%{8xN1#Q # xF9FbFA2zXCfџ(tcC:EA㞏et9uРȌp~sܕNeҞft:{Fq*<Sx#9jHN|խƅS#ۧQFdEkhM*o)uIY) N y=VMK"tE&QQ| ~.EF/ lU!z8.Ƙ0CśGsw1G_e}rųs&}T]ŗ3^&ɉ>LE! (BP b Pzp]s Y(g]ZCqm#G@H02qcec%B"=JZ*BQ(O c̔ 5#sDipV[(6F$CN○"53SRi*Gy`XSrH*8jrPMIǽK6gR'B`X3B@nwr֕ ,8 4.`BAm+ B'U֣`>IXQtB@.%L-$E,NEWΨ-0qGNID[`kiy(r{mue399lgŃ"/K"r"g03\ H Q+V)q3u۪L0T.8 mr;)8Ս0,-qܼ)CJ!D(WVz5lq[fu~'j Ҷ;6P Rk#E 3W3RҎjԚ$Or<\r$" یn-A%߮g۞/! 90}K5\;"XS)MSʌ_( e@d-I;(W{YIp`e–`J[r̀ LI}dwx$^k sp#G704Cw 0dݪ~XV654/9MHt4zKژQ~cr_>|6SsY6FKto&>tO^TXe84^m^|#{Qyq>kj.>ͮ.נR1M"dӑ([P!+g*St5dTJɴ@FZAnVXQk|(D*>"ǨboUc>LNHTk1a%SSa08ɜ!d3 2$JZ10"Ǟ mYnX_<1{nG769ٶW_Ι )^yS[#AႯ0\{!ô{ 8fQYdBWdzbʈT>}X1b(lN2=)djU~wW[L+/#N}A8 >Ppwx̄#$wtiTݧm_%i-81x |osZl!ժsԴLdWT4DXfwk%NO>1 QmAen; EcRI<մf=zX6$iT]іmZ>8|cb_g^j'|Ј|1F苽Pf$Pn[ZNꜦ{k-$=q?#֕9ϥFΜ0~C2ҽM̾  8}3"'oD ]YZ\HᴘzSѫOA zH+2o54߽_,ٸ}cDpTC5㢳0Sc2-hiK4^hijh9\8!}S2A{|'JnנH.% 9%TH:%GH` a$tPJ D @) Fk5_!5#+nڡ5_%nTwW7ܼ X֣A];E5#ju C-ޘ>ThR c%7On)=9( SYW\sh`EP d=>vmJRQBAoucֱ{s}og|7ڜeg5c&c&\1)˝j6-j{;f vtswesy Gqx|]LQ.Rpxv$,sivM>QvB V~--PE!nr\yB `s͉TePxقb[eK b܃d!ˀ+K (8j1m@Q8 JP?P>۫֡L1Sti$8Τi1r9ƨ^瑤YE'2t=,f fF!ŭ'+kX+,uA`^qJPe!.G dAuKMhj  f ^j=q伇'>0AX͝%*ԸJ6.937]HJ2~:p$=|˞K9D6>/ 04>/ֈ1E>8Tqܼ)lN pIҔi,q"rYyY>]r{\6Ͳ3c'pc?vJ#6=KúCF[bw28=73 } A;} A,7& Սܘ!b G^O'I oyƠ!תCiQ5Qe4@l݉p+SUey!,*+Dz7wnNO[`t닒@61Sv磿Oϟ]}jZ~egwn&pe[1ҘZ=l31PUΗev`LG7vi0˾WcAlz1;A'][{_O޵\ˉVE5Cm[)w뇱%*2e4y?+VINYgyg A=ķ뻍+|ChE6 eVݿopnD"x6 ɞQCG݁#HJKKD0>$-yUM\ڻCϊ#-y;K{eOo͸ Ïjr#Ty|Hյ̏)E.fS{ ]ޙ1W{P`)(8cŒcmrI}sVՏϒ}km+GE̬b^d<4b3AK ^8!4߷(uѹV$MS"YEpz߼=?`בPuȭ)J? rfXRǘ &p6YqҀ԰_sԺyk_'E߃_PaX–f짗H+:DIgSdg9L ).F+}vU1Wl}_{A\t wTXXTZq ڌsh9SR?.d堲Ae-lŮ< ;D۲I ia$ITE.$B /pv|:HwUj Ȋpޜl(/lqEhoxp!W/x蠗<+j%8箵H:ixu(i^g?˒o™FL(.qjH:m)\Ӵw ?!QܨHJ[o%!Q:ۊ̘i#vNl &:d׵LEw N5~9wҺ$JR!Y 6JNcHHx *ELqjn9dAÖ+ y.&YLET1EJ([jM$"5Iy5M)A:X +ZDNv>$@4K%6p3/p\&3DR]\2s)6MT^sF oЛ"D0D$3UN4/8c: BCJbT{cq9mb DiOڂI9.uѻK]1 -7(%d]J }>%[ IɆ=bBJ4Fjdi #('X\@CXPM+RK6]|bO~ߍhqa"N}}dlr7n7^>4/f_>L?Gg1Fw%zn[MclIQ͢y^>?)&RI>#rO=,4[֨>\{檔MqZr~y% y*H*jînF!nM1(Q3/1pָ[UN=m7J1CO?:km@ ) 茀 5u3>Ok!6-_$UǏo0}ikL`ؖj{.JD) Dbx" _,DnpxJ$bf>$)tC%`{|n~CJE*58,]~l]P|gm/Ǽċ|GE=a?c.?_ӃJ6CQQ\K? F0rr*]2a.]LUԾ}YCRŞ9I$F8=E_CAw  ڈJ~<)*qured4_'QɯU>kxcth{]9,+

{eZ~ 8B`Gxcc:l\+Ԡq*0>*۽b?Ti"1Gq eIWݧ?}AF3.p' EoHtn2>$"'!IYijVkH5]w^6nWB-FJ-czZ#+Z4#: i#lJPq*YF n2&>xMa"Ɯ[DYPp%T򜊚5_V wcOeb 2vW[_oi ëY6K`H+=ƅq-v 3]- ibz ;40nctc J- !5Ty۪!?RPw:Vfb=^+)چvn6d(` I! uKMvOMfi-5M{7sȰƙ#Ѯ#bX>SFӐ ͫ͟͢T!5M{gr }Sh]wM.؁ q~b@T7ף\ mǗ~';!898z 19DxR60#Svٍa;C ^'td55bK%^)w1WYd]|m.y;~}h8 +ܢ;;f'W|X w7E> ~ry3v4HS.!$% k*M(0`,DK$AtVtͨގvʰy?I_"JiNU D +14/E^ۋť1&zIi/B8o !-i4p*U!/j6|˜ clVQӟ?.4caEϽm^عdjBW˅OfGg>!ng7@Dm bю*.2uR |!)+SD0љpϓC' |1Eh`%~ho~9XP$4+ $Dn}HQZI3k"\P)}́c m}F~eu]VْE㾡MDk5NI5(].˔,-![Lԏ+JmZf ұ/ϚpQ`_IQbu{۪ͥWL*RϻIߤ svasZZR.oE}1"R]姀jx/I"zo˄/[>Zc)!*@eGiu\uOHڙ((| ٔ89'ElK_r )cՖEDR_/xSZ- .pwU4Y5'G2zg/zO <tHB8W#_d<m(eNT}i8P Ift*1C ىzt!h ߾\߆3siS9x!g]v=B+ԸN |LMHO#z5e3V( ޵s~jHw?D alޗ–K\FM{W)i[E–Mc7Ѡz| c0kU(ZRHKY뭈͵^S\rG̶5 iKA biʴP۩ia_ \٠np"ꭁLjR#kka 3%d>ZiɘQ"x&H..sCCxH'/}n$7nrtQ"ѕ<*G$ ]%!`鿚ƆCwCiMxAT!MW,TA+T$|̂JV{N 3+Sj{Mꍀ%酱C1$a($=V8C ("hܱR"<\05_5Nq/>==" =ChS;}ɕJf +V) -,dbĿfPzp Nn@&P1(Wig;'rXQޚϤ(1鞉Q]tӃFnWB%IA#N 00Uk8; arzr(H 3R}51Z hu M1oDzo4f3 ~}fHSҏ>=w'ُz_db`L|'F?(]N &>_]]^~w?ٟ!ogw//ヤf֏> N3@-PO}}'T*z.ټ}iC*>dB7Tr2K&(W-Q%/Eg,*\%xxcK,O&gq)J3rR-uԲ YfbÅ$Q.5Wݴw{ne0_{~XbHօ0Ě`)JɅ]>[OG1bf=ϓXܻ{nv p On A$DzoBb6vfQ1Bh"w1-i5bc@AfkydL ƀ-iS#A 0_lGxkGw#jF*p Mk.$qӠH ޶ py^_.zjk۸D4O_ʕݘfՌ[ھΔfȜDطLn,^GMa ?ٴ$s'8/&6QWq~rXQYk,O9͂"(=|1Ԩ \(}b#l<|]1Vh;#$+Dz?S6eÛdozwUH}k'k-3.z`^{n0rhwq[ޑZ9lVK!<]QWD(m$3F,7;M V[Y qD^vڟUtKCYQbȥ>9əlq }/Y4h|2^IIhх`aj%%~niaE~W!_~N wwwu9Ia'>0ekJ짳HpyeqZJ-K Q8H}Uʈa9B<,n5lΨz{JZJ1e*U.TyR:XH VdY˹vJBFeLva$P"#GAeϪ cE=5,зbH4M >Y)K]5ܣ]fKo?Ml(t?Ǵ_)ƲQ,ra;!wG ;ns`w|P~0\=+Y.bW!{%dʟt/>]ͿB}\Q/VZUVgd̂blSv-g>"?s.$(\EνV$ ~*7xsd{?M<dž΍⟿O hGgph3>(f܊GNǾ{oEG)kcߍǷcf] ,Jp*C~5~YKP$+/$*ƻW&ju{:VE)E0*gX'} !rƴf[|+rL`S4lFM6roRYNoN3dVϊ !ͫqI4LdCNJ $U1:RYp$@b]Y"qzbUd ПIsw4x=ѽd:,K/!#) rBg"Y-.z2gn3=#۲B?" F/Z37"Ϧ7q(˻"fwu]|4n tV.eǤ.Я%q6!7L ]N]} *>yuGiz{K E~S>yƊ?!wBtbwwwɻ;;xhiL>ۻ{kv|BqXqb g*W!yf}53 $Y$8'A <G)[I䖓#qOg_>4FjFIfzTZ1I~dDIܨD\S\lU϶T c< ; ,Bx([ f}}I~1hOQW}v EuDbp>yfc$P\ b+ñ*j@غv_ 4 ^j"*h6 1 xBXHG' dlS@G6z@n:'kR$i *XI i?PWyR2Vڻ: Z -]Hi= ''sѾȲdX9El\U%W<(Kzdl#~i zH1* <$AۍSdhQk+SD#H-1y#ƒ NkH.3Mv5'\˔.{.#%bc&1T7z֓Bꃄ7.du{_ߴ'ό?IO1j,^&"Sz_Njaѯi뛯ʷd"_t}reA_4-J +2 :Xv."Cpn,g9JxMoZ&u#]rϒLˠvOr,ډ >,&\niЊu:c+v´~zp9tCNEhZq]T^3+@;a"-Rg#UaiUXkZ __j~}W4د )ܩzqCU{GNlQCE[ԃM2ɥ O+¬ 6[ljtMR2diڜ|)YV(?=jaKU-UXP-ʙ]rGȪՐ_}s`& "55q9l# vl1%4 cb$[sLEء`)piG$ V9FK1V'k,X6p+Féq+d'\x|7c{Wx*3@s $D{p$9# Ct0Z $!GTBG肉\G J'T&$|VFD ){kP/=Z!J2Ĝ]`!jdFFyH)!i1h/Qہ910-!BC: ppkEyZ_M߁|Nj\>$w=[^ rF<1uYOui QmS!ږMI{VyWGU4)f[):@Xtl⁌4bAaH(Z/Lb Nk .;1OǜQ cJ9+^A) 'w:^h?`&(|GPeu;3Ap3?+H糰ёr-s20젉:gAY@^W51՟cEB:X)+2V^Pbfyš"sQhLs2!qĢb`ɘB s_}9Tc9>uJv,nY<-T]dLcg; 聎|6*C;oLiI@Vץ09RI(TL9|eZotefy-OZ|ʧ s2 q 3!&`&kWiћ@+0OB@.2im0 Skҫ$R;E!xp6R$ u 8>:PǻKkw:Ec1?4Ylpx*,+mE7n x^Lp $2/AӶPeYUKK$gg)Q,ځ7f/7ʣG(ie:dh!eŜG:ƓJQFŤ\k q8Ү7>̺ƅC6PR$hd@Ӯqj6gK&G!b[Wb1RvS) L'Sk>( oUgUVlϏc|F VG=zᳬiZI5AMEGG0O*g*Yev1jelA ɢS "u$R&97D_EV b-A]lF[wFZhbW-)d}m[\;#vWH6¥E} *"} Z'o]wsӊ{ǒ2>7q&kw)v@U5Q,׮啨Y:n[k2mf};0n4K;ޑSՎNJpTTVqv|9Ky΂^,Үɠ,UVMhp;~ !"oOhe߸Û۔F @*,nɋ˭3!@Bseƨ\V*oAޠ)5W\h/J4H!Pϫdr$H(JE5xm~eV(" OU2Z%rFF]t (JjwLA{UV m j7_8rt0ʶTzzdsP !{vV'AwvL14Z kH|K1Y49g!AIgJEKË]"u{1P xѕ :F%Rt䴫2 vYi-Й`Џo Lht}=#B~ѓ]8g}iZ m}{wlY0b^m=شmiΙF$F df4%S [5C&ْU%ˆq^FreU7(I4s=Z{06&邦iJM2[ȱe`3 Ʈ8Q)o_x7HpT9 ?;4ւ QeISjsI)o,gxQ bNRA%.bbI\ $it@UrXeNƓ S\I/wXu~2z.=Cw'qlZtusq9֍bԩOntop)cR5WuFOˁ8h6x.ä\Gׇ7sYhG&DlPAV~Ҽqr9&sT-Cм:n E7RmdڢdrV5IA'Bx6N[YzTyp/$ A'HӔ@P:Zx?ѓG|h*壿;^ߞpф2%'~:??OeS >zp}A̩4dpO+3J*v?<xG|Ze`K7M|z(+n>VSx]T°oPl}2+؎ ئ4tAfj8o>86ʔ$ŗ4|S ΁Ý?ZTDH,5rАuAjUBnR9[%7qG7׃GР%Fha߆fTfZLÏÛALw,9n_rx0Jw(8w^{s-UL^uʫ2Hؼq$>&T:i;{ | "px2܉PsTpurv7:óۋQEȨǟ0އMRX5<ᕹ S.$P'te;Gs"Bt;: $P9Kq rB-\d[ZxAv)pw%t *aN5FhQA|jA0U;>VdB+䚠*2I$'BM :2$8kPefHO>Kͣ `m9߉a$Z;Jn^Ul(j Vމ qn:篊u(t'\g Sԕ">08gQgf.Vt(K1X'DvWҋ>-g隅k,2TjSK{TBp dQxeJ\ꩲZQQ>4S<3hUmJhp.t^0i$PuCclZawz]!c\nNhid:A 618U(A7PD8ڶxwam>*T3%=#gpJqbs@h%>rCxFqF=U Y{cχ`9_ * u1 7p6Sa}b pb+% /}gaP;h)h'uG^hҜK¶[ˬeQ3%IA90/sYjcɕAuAҙjy2^8)i(Cڙ\jr ^#::C$f%IƓ`sΎ; -;]jt iT۾?F: KV[:+?%{Ge1+V2(QPYV' Bqߧ`dhLx/N)H.S$gz%R"!pPv"VoKY(/chɂPIDu4`'hi(ť7kԜ3.*N9h@wƳhlݠfMgXz)hάKJfҒ1=-{ҫ]4)h\6kzǧ#_Oʣݏpc] v&:N)ccV.$ ҈<$bWƖG gfa[UmGU\R)Fw"Ug(vŨqux[q(my%V8wʹx+4]5Jid̶p9I| uh1= bv8&x8%iД jqCyǂZ&gשpR]{ȖU"lG-vt<#OEViZ3(Z!vMlzvwvV`/L-v*\veMo2lk$ls種w ̜ѓJcaJb[ǐA,M_ }<\5R*kf0C2FˈqpAP0II K+=uLg&hExV+uջO!lѥ~w.c4T`<੯z6V%[}ݝcٲk[+Pf U뾚5XM&F oMрڜi.$8zI6K)J3$pk)qݯ޳dUZgIaQcWQZ (zI_Mz-\v^liڕ\mBD>W+ׂ;| OWq۪*Լv 8$Fiџ6lPF5 s0mwן^rOvE%U}KzuZ_ᄮzVcO֒79|dUk~Ly"0Y^^zyb/!m.f# Y'R[mɴ\lr\l,0Xuqӣ5Z稳ْow[*\5& :m:GmVC"+o$l\eFV:&fa^Q0Jm5t>*5 ֹTN{uΘi:B WB}= s?@Y ,>"m9"[O1PNWEM-B4\C&+p?PP˻ZphVwK`#9ෝF=pjXP;IA?4ѻ<χbRNxOH3}- 733C^ %7 Z5"|ݣz㟷)<2?RPxڗ8s8O__\]1֓2o6ٻ6$raa'a$}Z觬D(;a&%rDr!{f84 <_Wu0]='NҀۋ;H* #㍂σw!9(M#T/_ 90 d}oB%i\ uܹ/T oNivCQSy$:7N7d є{t7`^V# V n:QDŽ20*cX @m~/V.Y>%s*drѵ*X9+b  8Rf8d߄u9%i^ )mlN (S.~rF`ZjHKjzxc4s!2qy" Jei(`,=M?ڭ 3ˁX]έ_FPw6Hiiwlf-er6W6R.Z6&;l<%9[=ܔ~u&0ONbUK ķFsF&sQO .OCb,CYNiaP'rjt6\S =0I',Vqu(u=SxQua֦|Fh_0gِۛ?={sGv 2#CQ{^b"4E>D#(_0˷ 6^Lz -z$/&N bcͬ$r' 6nwl?=M~Yhstj =A?_41R~_.DA(VE䀣'ҔU*>WR%B%D$ÿVjQ&Qxm޾]qTV;pؠcQL.ZV^O/ㄭjWlZj5C 戵絛c# Cp2ni.h>l]?<$2ނD?5< źjͺ钨%ѯkLDX3F9C)O 0`4>$m??=n҂f'($(0]0> 8O"ޡQi^^^t\c͵cIe-@Ž`,T0SiDnjƳ(7mؚ FA>n2fls/:(&G1u@U+Zuj\ca8{p!uf.0l#,/UIBVLNA0{G]2xuaWPjJ~"2NUۃ@uO%bCO y9#NL˽Wg2ʧXK#3'@Nam0lH^r;d@ʱ,bTj%xٯSDW)K Z7z ioG#8Nu a/ ygHX"Gl|)p,'dDWW\]燂 J(b5^TЫǒ90Q Vլkt%d>nZb 7ɍnj^\ⷶk}à:۷m7pQ;Z0IՊ#6ރ4'Xդ=Zي"%)X-=~w?雳ݰg <{ѷwϿiW_A>kXN%^8DK@K230I(2"q%ap">֢6Y2m;7szqଡL@$iNy ;0Ϲr:1EiCrKdG >!8K 2RW9YfB MHɦ548.ܨ "h?{BIsƝXjRNR-q K[<:xq5 \ުI㵟iWtnK*M$('RHc:܊O/], ̂6Q>yK21k' Eyq>筏/,7ќg7aTd1uIT/4sGf?oEtys)vŴ#'MփkgA9T$0$D7Pq\T^dxR :O5EPF`8zqbH.Esg|Њ䤘ry\,8Iͬ#;DM?"5N~%hq$0ZdBVK"D0"M$ЪbdH4(FK&AW"??{=gۙ8Ci>.q^ٸxfY :|c?\\D[ w8=ޣx$o!~|lszKWhޜ:j%Mժ&MMqr FK Z2\H,Z(1\ kcBg\;AmBpu6iN'vTZgejs_}2)_4]h6)'\.hȡWL'J6渦3B)&ʠ\??%׭ۜ]IW"\MX5oh$RU9g(uģ*hwdC/XC웩ݨ#Ôo9ّ s$lP$ƣ+:1hCc¨DF1><^M=+~G6>N?S77 )})ګɧoξ[q!wi;x* ?p"cr;6.CJ:qU3"n";\VkGw_-7 R\-@󃫁 (٤0};@yf+-AqZ, R6?ѣ2OOq)%K—;wU&kSAQlB0*Li XCg`AQ$D8'Z؝J2TQyMIT?}xɶ(|u$kX1%w thj@͸ EZUO T2op1wWs?7?M Yy׭=={Y0zEK^zΜ]Tᗻ?\%B-(y7M޵&{™߱nOsK VmSJO?l.A֚Yv-Ioϓ}+/e5!PK$l89"zk]P# XGX4YOmp@%Q1^t0h%A t(Cj= J*L/C+EL}7:YzQъo%la30laW xnf\Y;fՕmOnԵk3뽌f":u6L^1׻7_nך/J[j^MCΟ.*@D]Bz-VW+WYJmT#_-{X Rn8):5۰@^4%m8Y)wOUQ_Mng<Í"RVjm@Ud S2ĨмejÓJ@+PFC+3I#RTzBjU@=IP=e8`GRF.T̹Pi M Hu7QP]II & RH>]&FJtQĬ}zxof2^3\DƍxO^R2x0R{Ĥroxwm1,.0Oy ,Npp`k[J 6@S|ibQbs CzN]Nzj6ArW~r7A:48ĥ`Cdh&Mƥ:4Ram`о]xPL{\,L.HN±SaXO8j&J؃ z̦u5lct;Y˲6xTAa8QF zۛ~K"^}.ޗϒFAr޼;1+IR  _'#ZEj4?zDTH]QQQHE$#%OݫzV|M-{o+Qnr}q}_+/wq?_qS: E աD̽ f9>^"`lad.p ?K8j`%n`l8z<]0#Fvw>\Vh{ .7]~i\6(>V" I)F0є" !4(CIx\rC^Nr2{&JPg)xUn 5MbH$b͓\D3@mTh8ٛ)q_ܠ2{V-JJymDcQ;`N DC:$Fokp]RH]/}B@v'/Gۏ"N/[칭}chksC^3zB(xSVe^tBW\ ;"Ś_ ;WaML?Xmnׇ  )/zjqw jz1.bw;u/ugav]wJm{)';{%d2ebu&/\ryuj[rRpk#~6+,t9jɼHgNSMC3@}Fl_3 %yo-68cP}ǡAǯ-zwtL54jFiIFG5i;t\78.sOo3P,~qK,]\,_ "eg8pA^ula+ݹU,-Ň!1qIԌ먼0 d"řf80pNd1ӑ3W,6hX) ^r~уO;a@";T%9c6"(B +reJ [Ø4cc[Z_3zGfi=@}fq`Ao2ڈ%1[P}yOGۧ<1s<朋(eWd}\}Z4OuE<莙>چ)!\CmcG  MQ|^CEfPuZuxu% =~_gIJs:#kAp:N@rC8oqnH7\aRї)&~yp;#>ĵD־㉽6][6pb6!շTB/<@1Pಷxx1\0e 8KNU&:|/'x1^_Y׾,M.7D|կYpSoW8ͿOy'O)Y?8ww.jD͹芫H Deh%Ϊ&R`_пח|= ר2ߞzzo_߶;I{|}>^zոui Jr_N ՛B_-ڀX,f̐͗BGD`9ѡ.xMDj|ߪ"f)6Ȗ'D .p4h3psO!cڰFǷfy9u*@2F7m**#^/7[GN,Mgv=o$o!7_?1>F\t؆s7$6thS+Rt`rkN1%}*7{ #K2"TSqÙֆa#xk `[ERV3;D3)Sjna$DxHD;E*YGHXZ:E` /-q"㼤伌 2A&{dFP AP&c5,Nu˪-}sAP_ Vf炠0Rz#CP_{4kTc=ZCE5I `>UV`sV*`CH'uOW߽a}׏"#8WXqPZ uԫL9^Yt6XLjTG|PzIu>y$hrEy>"!r~*Z JJk 5 Հ(*wv/w$֊Oi E|ZF_Fpe{ȠgpO}A?#zV+сE#vBF8N$($1F)-@%C' *Rn zt!7'vݽeQ{,QhFeKUka\RAB~<-#[p4.S :dϸ !hI] =$/cdF{ &%06+/P$$ bЊ ЃHZ2y#!ҤswYK7Aɂ2yt߽0W *娆=39LE;LD1YcD>1LE2LPh Sʡyzvm]8zMˌ JIp0XHv 8tBfB Mɦmjq,t-b\F mSÐ{ EqXE3JhS?Z:p EGLn6rg"hBG<'ZX'tRm hz4+owhSxCm ~G4Cz~mA_#k0Z.$2BDEc)(eT%У2q C$y Ҕp$FתOZ5ol";䎂ڎ:D)Ȍ!C~WE돚ʼ BT)@0∣k)Y:`Ct!NjPV(eίWj=KECꄲ5᝶2&%]$K 1WY`Ux&YҎG#r"$BXdg JԈ:8[.oS#FLF7&\#1(!GB [dFs 2IhibJ& g@G{.-EbZ,%,1=[\H":.K}Br V☂JZg9y ! HF-Ho3|+(%󷳰Jgr@;dra*$EdθhB6btTQե &8җS%xz-գR`J7oĘ=cAw*Oۡ#$::NIDk"@=%gyfI+RTtJwDvUDtlCKvh= iK(hhAĹ/hS1D;4 ׂCa,($SwprӼm5IkLzP \E $m{B@JD*ѝ>8n.{$Q$LK2tNG6z8j'.*$6'_IX:RDj^"CS5ks/gȲGn{if*۠2v Qx~z?}[«p;/֙q%^C/_\r׎7z9}(E/8vT'$ ݾHB(#pV#ώ: E!&JgBh[/ Ǯף۾k#k])8A7Cvv-㭧 [? "X}2E m'/=λi' @]" P*Et!&|Xw>apCV%CyG֣=cʐ3BމCy2Ӆ\QawBnkVe0O{b/b$8N uOstOl`2iIݔxfҁL}bXrIWh=eqNY;$mt9*AZQ6%ۏ4F[NJ4 wm'NLeo>.&gՃ)(䔨t7n*>Q)>)OL#E>u=!eH"Xj ##Rզ\NU+䍜N 80H%iPearT7& \ (P3ІTG$%1u+vɅՃ"U>˙!'9 US_+YRL(:%Js t:v? lee8] !q= LI*|P!)a4ƚV8+r6TK8 )r{tYmӾh1ILI׃Yk<0oX$ TUR ]AAe"S*]l6ezbhqqpHŸ\rkt~̸I17Q *_^vJH#9LjB'){zΤd5=;#IecGA+ׇM^t;/ 5pG^cǻ;xOq2ܺ8CMaL]Ypv'{1giHp|Z|Y;v<[sOY+0Ãn-Μp$Isb>+)vCǶe)F6%n")8.MFݘ.a!Oq^931~y>qwP6n)@L7L 9Ыnʳg'[{fA>|.`XoCwb8S÷&+V=H"{8S-o5b Eo7I/h2pc1 7 v JNyRk.FҸqb#RJF~-8ULgRy!5Ad>s'lp&eykB o&Lmф7ӄh5O#vGʏ $՟>**zE3BͶ9VmN!}9,a  eU) KQɣa"cD4ގ4TCEo50w-3 ;N?wAYŝo.kqY+/?ʇʮ49@TD>[Gn2΃y6]n]SL=&H8!Sbo0qC{Wȃ$F0u># :/5q:}nO1bH̫na֭TQm!Hv?nK\E% ɶ)񷅜ߦvθElScWʏ HDP!dȵi8U"6Mnu!pHf2MgؗP* i1[Lf\j +[1/' 00N|Se,pj6"zc+n%Uy%޴N1J,(_/TA^vzy2$|V/k0vz]jKHη]:( [Jf=Ӹ8x_{_ FN0&w^gp]}{js.K%/YHiP!DZPB(rT2E7Rߚ%xyd`m0rԿ0a<!OZit=%RN:\hLZa(z.U"N9ifr;^ [c(c'zhSm}:{x }eh 'xXNı\+8:6pqG],((@`FF#ec;z|$MN!B[sDD?i7{.7;ch%FjF9r6Kl>P'ȝϟ1ڊ衜cU@o¾.).q_ǢF42Y]EQy"HKA9;]рܭ'd$WW.9KR|&mnBSv4N:؎f Cc>wsĖx%9`d"h?|Bkz*6 N\|9#<Gˡ\hN_@>#TVgl^X(mj4\d#\ L7vdW?PSo ܷHܿ:&s`2F/pIgTdGH/0\=\ۅyDL+^$;dx#|‰p՗/S3 9=ݮD3Ι=6,Of-絟r 28}ϣxHowxERFM4=~UD0XyË onnWo?1'ir$Nz$cP`98oC]J2x 1.p` !<9^HSr>;Ӗ@J!ESFHX@ct8sMHvh’(83ie-uQƮn1?X$CnVKJ7 'D9@c1ew&:64QU!*9tLc%/'ө%s2 %R]97gB3"G'CHb|\8;!,&(!dQxӅ3͜?k̈́?ÅAV.S 'IN`A< 2i֢4PTpH꼶C3HdT`~Aaw$Pѹ2#Ǯ` 03)P[2mѬ 5Ny#YRNuH?߾(,a~_~vL~/]%up*<[J" #rNuɘ͵F*)R(̭js^0SJnEq^i&>h9l>aVA/LTǼ.}N CYZQH[m.JBJWu>,%GaHJ %dIuk\JmzD#\TEH-\#DFskSd)[TRMYuHǍ! B1Z2Jn"G+ 0eQYPE+rѫ@\+SK-f187aMiK(pQrcTh)*+[q:?,w TƋǺfqK8?te}Ks* 0HYZFA*J7YPU +s `hL?{G\bNI@ϢJ+)(RR(YT1 -ѕ@0Se.$@!D0$8/L^@LZQis[/oȁBs '1,"E- *krѥ$1276%,PAȂy4<.K$! kdMDh֢?׋(r _=dklkX}wEo/^]Muv7F)cA PkZ椂m 6{TAcr3_|Kwg ;[ 0x˱x{ޫ=KxHf & QiX ˽]hkms#Ņ̘4lz *NZTR 4K,bBTDR%a\B۲+vhk,}UtGv'NJrGU%/ ^*WR2bLYT4 l۬D؄[ z{3WKm:n=;'ɋuq5rv^jws& Y n9P}05Rdslp{qTrU(M ՇaK ;aBJs(-gUa9XR22T#PIY !Zޥ>zً*̃ɷ·}M,KPA 1MhUjɍ` DQ#&-)`B`LmH獘/} GE8Jy~sk[z(`] Pd ,63.,̀MSHB0\BPҌ ab샭 2^R\xk-ƊV`M mnFnʕY+p𵡔^E;Ja0 !ȄLl7D iݡO *1i돍# 獖(փڶ(7>;浮ϻgqXȇ/ G~e s "2a3}w\ˋaj_׋Kg?},Wy}{-jxu;^w5/n#\X_*30ZǙh"X J%EGw#]2Eh ΘRh6HsŎgR4C$CCM͖Pu}`XySGwGd@bDZ㡒(/WZhND<|HQX(7wus[@"^ey. bt3^o~fmg?qjܗ#yBbT\p^*"$E@Yr]YHzV]~74Շ  G wQv2\|#.h B( )q1RQhr-ynYʍ@ JQ1QID+Ys6&HnB0hJFJQ 1ZHda7QY' "(悢Jh?*|طgYZD4VKl5 -& .ivΙiS 2ATg|Rd!VP^TAP]9eL'̎m_\: k .x4*xa`ٔaakم9fPz8 $N9>2R1{[!>_.gTâ*PRW_)lįQfٕݛ_9_^2?Oyo2<#WBװ:9ю0l$4/>  ^ Lg12Q m3p]k] q]))bQ3$ d8 o81.w <xXE .pipDc9Vdd,ia8yT--f I1 }ng 89w'I-1KTXƼ㕁Dk?H C9ωGBE/x ,cC8=XIB!y8R$N;9|v"'n0VY V-CAR20<;+!<}>s,W8wo~x6l,DlDV SO}M!*M^?^{{At_&J#{+zK|bq~ťOo9*>b;ɝ}q}q!Zum<9W` oA|Y_,XTc9|˚&4*ǢGkpCEDHFy+كC#瘋k8B/*R28%q͎-÷6J <>Ho)R-mp}Đ*IU-I#}y'vG8öy$VIlEX`H&ZGu"Q0}afAU$p;&?dۚCq~$>"4ek v{ 5.vqQAkjbb1si`J pBih  q + 'T2Ec$+R-<c,I=kwQ:*giUa435H8:Yi-412|050Yj+Np̐Z" o,ǝ}. @ԠOS6I'c!ə6bf7 9O Ĩ6UxXRL'OʖI PwL;K<.Z!)c0&DZ74 i}Xm}=fqw/~nfKWqB ,Y) Yr4f`) i@(w>:J쐓Ɇ0c^bc;os-ZAY40G 6 J|~%8ff|%8"c':H6L ^b\W+M$#5X.d(ZB Yw饐b= |LF<2Qi+W>8B`g1 slA~s?`,)3I#P%d؂q%28R a9{/bBp#TC+~VO2(Jkk|ƺrX~vP| : zt9Av̬zy$%6ܾy#)?ݤNz<cYΕIUy<]4!llwS[XFagՕ$|}V̈,|Y LgM9"tU,PWAd+ K ``HH4N9ƊTBzwdGXu2lsBs}e;j;}}B,,(vkSph9bS=f2*t%%G~1.ќID0ZE1,Cj>vnfDjRG˷}7Fa[>eM}[_HF+qQ_-|$y?t kċL㍿y\.jуiWzxcأ' ;Sjo4dmvTΑZ2Q^٢jX7ɮЈ?kXqD4g8'a@SMIiXJ6\T)Gp֤7))6/x(zZ:\ޜZd]xt VًͨCO9ǩ-1Lip5o*$A]>1: ,%LJKn ]DTBS46IEϘfӅ-Fbh$[yY6~/| ՕL)N ȶT69M4ǦT;y7 SnN7xuk Ѽ[>qwB&eSX^ܯy2:1f_wB k 3A3!@z;NWK58=F8hJ+>y hҌ5? c$q9Z]5_ag^Ĕf)oi=j sɎMXpSߧá:EXR{;$d۴szBlIeC"&A+$o<脀]q|ovQK$[1i `?C?Z1a>djm6bSĘĸ$ymlGX7~?eKN3v)&`~yHX:Gcqctݪ#kuyP$]0DžTީz|E v [yvÖŎ{W^%5Z˪$[VMe0U-UG]4.{ń[T!F}t-*C`B$aZ4iTl40MyӾ'\~wϝק@b#G1a9sTK$`~5zH+:|ư7 &G7Nk(>4y!%?$y>I|X7Nޛ*j~rrA~o5{^~RSFa9&:픦´SzndEbBܓ;]$8хPyFSc& xw2zg߷1a.?^nn/yU|ؖRa4(z}}nVH"O=h"Mn_ah,0tŔ Urzs0tѐB#Hz3F`6ޖxW(#(]IN_&I&VɽVOb kFD[u %B/)i3Ё$)M5/n*/1s~xZKY OF HaI|"S7&k#PK.fCʆJz^exoٖ_x_KDHj5H)ݡjU,~#^ [SW:xn=s17WP$TKj8~F ; OC)o>OJq lWkTټ^|M vRa+yGx-WCೃPJˑyvk.(|#De0NO(j磶 ( G%/ :Q!X'x+>gBQlf.`qX^T7$[*FINktzO=86`l00DQdMoUZ׽&ެN>{}A[DXx%!udXfJ HRFП{NC n{vm ]7/>6•UuszP3gu9A?{KSB8'[sݶDЋ\R,_tn0cU3V\_V fi= B&jyk0TVInnK1XׇWe̟jWW7Ɖ3+ڭԭR0#8kB=-e& 6IOdIflHWV{kMH֬aEOvdI[XYVG"9uLHsLżdw,P") *󚍤%KA,{ u$zv4⼩sm)7S ,Ē;K15xIl,j^qO:m_48Mex>m J/>Z*Z9njL=>jn\<tMD5e?KWf#\z!q2Fvd_ ˿6T%kq`|?`'5ox 'WNlt,PC#mEXġ$Ꝛ{GT(9 ߸tsg YQLӽ'ʞWB1Z +Y+|۬Pr^5M9膫f TBon Ҡ!nA Nzm,,#;ǠJVd;x+2/ºn!LczQME!adKq29Fe7v;4D5Ơ Nx'"D,|uIwX3si-="A [2Msozz_1r^Ku\8PVssu HB9%U ;KN@8KN_}ʋRݚwTY(]y$Zυ;Pۨv%M#91sѰ1uL(T;yִ|}p"*unf//nR{uNNh9 d6jߎZqֱԂCS2Κ?#*Je"GJ-A?؎2#){ڧ?cc817\C) :es\/m4E݆/ne% &O՝*u5os".tPmȯlpPs Oij[{~@%-WfN&Bt~霈%9g%.|NٞCK8L)yrM*]N*4A1Ԃ%bsMP~Y3ښ apR5H.š'T "]tAJP08-}T yH'*ɤn!w}yȡܡGheRVRFiF[ceןjoڜjW$7z;6X j\\Z"C % V.us$fxŒlcywT(: }:WU3^o Բ*8s qA)R7pkvno:J;7/D+}\ Z`F, |4jS={Ս.=1E vQf,^,zY$V?gMl(Sŗ[m림Q x/-q&;)u_|/^S*ydv$(;2PlˌP=5-{NSB#8-#3OR7Pu,^zNfr"H%h8*EZŗkPⳌm!8uxE#~X'DPvö/3B ^7qيh6yn RЅEQ%'L2w$ Pyj?ӼNnn?~Ldr3~}q+}Ũxmםs$t|ӻv&UAqp0M ?$^_߿h ,V?Msloo>{Ÿ&#<wGyiu?݉3?{r^pm8!IJZ%W8~c1ǟ x9dh={{yw}3Jӏ~ r5ӻxI_`rM ӚHΑQnf$u)w-;O.]Քڧ dU:igʦC+ϼOZNWAo3(nt.%IxA4ME~D|}g;syט.5 s.cGE uS$>C ؠKcQ0`:qS$|lã(_?FNO'+9R,5Q#`s)l@-9b@'lLR+2K@34%eJF$(3ѠB]ZC !j4C㼯mp # s0E1(#;0TifSy.oR[moq\"h1,|D>@!5*IElDWۿf>Z1~YjDYf&GAJOc~&VeNBVE6"(",8gx(]rkꔤ&2A[AC63'摹)pDجLQ-hmaRep>s&Fy!. Ls#5{$JTK4H]%9e]`c/#WjsG ܛ=T]\܎袞Ճ㎔a )H_@cc2d<\DjqCr<sB̖*HR3r<ȆvLrӜsDx5dguE(ՃDe kt='>8-ǓI]ѼTbZ(Fl@}TVBBJkF" gB4iao\t5-OL,L FHi'L(2M9'Yf.AF=2|}qg! (sK(j7'BaA+(E;$%blKR)} ,h\jqwƎ._D9L"y ώ &C_ 'NW2ͦȔS2ideQ늴#c:1jv[2-tPÄP*>t!/-+nD|,Zoԋ Sx(m1Yg$eD5(Nsh:Q("IА2TLH9Mg" IaA*:cp&^o'aui<ִ."L0V\>~9²D!JVJ4CBR %cTPV Q*YQg+7Pzm W4CKs\7Vu|y9/YAWUtvk5G6277~*@`!"am+5q a;P[$hAوKМHΘi#|\jwA8Rxu!9g.xIZi 4pLΉ\ ZN.khv!%Q9 @:E' N/ФX!Z$hafG͜{0rls0m…[90!`> F0@ [$sT,D.RZ:"?1 xWݳfZ\B;/@H /4F^[LPy ?{{&LyF]^(D!㊐7֧4DM*GkWKՆ JK֎;Ғ* qŦ*&Ka SD X35El4uo)ʶZ@E{E0w F˛4Ɖ.鿾WN ;C_t&gڽ(v>&++-pLn]ŬҔQ/sd U&m[&֚^!T@[a~jȦvR_744+̜Í\Pq)} NLt' JFmd D^hWfMj…V -bWKͫGE؋YpsFo.GFJ`Q?][o+;6,Eٗ{v!"K^]?YH"Ĉe M~ua.OY(|[6Z}Np3cp^T &M6_)~Mrza3Q55Qyr]+IOrOB| n"_=ɃWm؋5an" :dz+ OWW| k;"y{@c>}U%JccB`6#cjkߡ[q}U7Rո~p bmze(%ډ~:ېgWȉHUh8bǹ}f V<?H9r/D "٩Tiqu$Dr϶\J*_NH@yJozY/eħJY5PDvuEX,.CZ,qbS-"2h2%St.S@90T;ҥƮht:+@&ƒGRLo&PBHGRӥ&&@ɮA={萌6 LA`ƆJ2FZCkP9"&5U"\+\Eb!PeV,lH+U z!A1҄$Rm XPEMJgਦBT܈<[X'IT5Zl*dBWJD4`Z[E 7N@nB,2U1pD@=S'N̽jS@1$Du,AdFS&deǮ/ S(l,w&۪J6*0=֪+r0Z)}HA *Ʒ8ne^xGC:~"2^ &M̬Tm/LN %F 9ŁϨ#a} YMcWlnAjy\?a!6fEףQN@uX; V+fgJlMu 0@uq. h* `uIU6ҢEu.䳮^4Fyrj2$*ʬ J-~Ž5TvE2Fsbf݌}}`7^لZ5G ;֥WϬuF^Kl- D?:``;7FDhϱ /.kf Y$z,Ac8:y " b ֻ0@p-`;Ê-կaBp65/j3 :q$ v0Gm`H5[ n4C {LJ"J`RL|5+ d“ lI"eQ3l > _ ۷H^L65ja;:f}.ڄn_D"n` YLxw(nQ<ݟ :-Q;a` ZZzdzϙ/.~^\9vI:40U_4}t&ҷ~Ӱy~i0iB{5{t}fc 6ۿ5n>*њ gZXe.P[@7g=6a}x.WkXIgX3#;_$:Y\^\\tzۇޜ'ۻUޱv?e\{g9%t)?J'e[62Hbe ,,ߥRU~'7WE̶y 3Jeeͣo5OtiEvcacВq;]A =yvvMq;vY G[9 WczV nj`{o8*,r۽)}a rtXֵZd 贻"@+9_ݜ].2EusW&/o>__zT'־oA{~sgןZ$r̹a)$oœJ?%yB7 tQN3˪f9~r7StYnh䥝|L9I9x++62FZWOC[}nj'v[Bz-mO ev5Js{t IOkt|^>OtH['D.c˸Ho~Hf|>P/wUʍʍ5,`V@44& bZU38ދLuN#dni`/̢@ N(RȒrGN2RLp ci%%f(k<Lc  .X4A`݋ nTTF\Ieo}R^1'0Q|e!֬FT;e&7lfUNM2NSϗ)*HJ℗>ħmh/6/ ho f'lmvD(vdtJXy3oV(9,t:8n(V4l/ Z EG9S1V`'O@Q!G>' TN\ mA[J r]N|P5 )JXL=,u D+\&gCQ#rR=[\fe%JgUrh6sĢ)&kٹ)b'HwK7!g9r,v^)LX ˫9oU+-kF'ytBU)57!k$MlRH)!/ ƒl^!H2QuDV䐕-0xR (#y:AosǨ5D FLV(iSReS,kW'lYTM p7; #J[cJR6^.WTa0wGm e*'[lmj6leR ʒ`$M']t$Fp1>8<_^5.)p_Z@NR|gY>f ԯɿrE3V+z_'Ojӫ룹4u*neS=VŖOyro.kՒ{;^..X]/~5{jr=)WH'gށnkkZ1:ւ4vkFS!rm)Yו2Rb,T,:-Md(@:0%kLJaU;Da-Ǝ@w?cJiͬwzp]vB]|70l &k z9)`>0Sewˬ,ki2 ʞV8=XeOɷ5 { ]y83e} =igNjbᛩÐxDiC!>nlIٲ߿H?ߡ;q̐;GR]%PRW]]]F|I⽹Wx).ϒ$><5dJ2%&\ ]I E, a>Pv"6s@˪/ԉmѧ:!Ru5Aa J`$U4ް٭.:F T2ڏf5Lf@>EZKAܿ9gQQQa>Fb!f)YY**C.SICp$4'yA1=\69ՕxNBZ1$!yLt8_SMH<18O΢M^hf1d_O;lbeƒb"{ L c"gTBlOܒrk[juzr|-H/F]+~Hv5B Sahjp,7B[:H@3>zx 뼿PDRzk4‰!~kt5+BJK׎Vkt-Me|.D6%Z|5kv%>:|rppFEt"I}p.Ħ6EitB.ec(kKFv!Z|9.s3ʪ 9}ޯu %uyylu(*C8b !Uֲ*Lj-#,I%)i-'C$ُik]~lW̕=Ĭi<||vG|B!NW./ʥQ-ɡ6{WƑd _ޙ&>J]G̮x:E)57 n4Ɣ($zueD^{Љn:9LȺ8d%gHnao QFy/##ڡ{~P 1ZE4۪s:a D)2ՒNmޣafU"V.urф xILƅ$T hM 1ۆwcQ0ebPF-"Vb-zgmhm,wnmJ2%[Nv=9undջ<'L2*!] :0~6/`=_>| +<ʆZnP6S؈Rk5uzJ2ΖZsP+4\6;}Hk>ߵ[ 3Pc{_(fj1YC_hRᨊjr."k]<2"əj k9SQ/.\e/޿OytX x?Ղ{X3(ӌ+;H}ֱ5B2u$2$&bM80T/?㸧10ш+SuRx`qf6 Q`5Q`TPN`b{P *quwk[ J6hR\S$K`Z1Dxbsq2@0\W = I< Paz2yC+D!14 aieqgcH&lF) ٴ=RӣRZyA IX3ʙGZ"yxٮU>Ϭ/@ (75 rbB5(RAOҁ 邀CRsBl"C0AUU u ҖY / ywY\(JtS*d] %5~yx^ftVsKa:Og[oĔS3Nߡ|tfS߹q,U~dY7_QW[{WuLZu=(%קG >tb(z mTM"Ͷ35> >E5A޳=5 8-1(D n$Kl8|fKlgȌr IieXx%,Дl@PsG.WN(:˙ģ0x 3IvZg X' ?BXvϜ { >-ع0A i)PJ3X[罉JsVt^D }sڕ5yn^wj=<^}6w"B ܦ%L:7iVf4SIbiٖ mRd>/<|N!'f6tΏڌM`DTM\6 rRQhAE%}> (AAk B1."7ڮ|ab?/](nof󴝐~ڢ- cUuXn[R_S=yrD#O rܑY6A*!wdyʼn)Yf6*Ǜ(g5A/UPζheL2eX6Ec. F~;@Jɏc%W/B1nך wغP^øε9Re|Nh4ָ$Lx2 q6@tƱ@_2tAMzoSJe]ҳ*~?RX/=eɭz{՜%ĚQ`DU7G9Pte@7K:{-M>m~ky 6Np f *ӶPbL{Ŏw_ѓ.u0։%OMϐMMn! .К⡎"v->Xܐ}ᐼAxxѠ!Glb*5dh˱Q".^h%YmԄE$'yw"m}rg6)&eI*Mygvbj>_| )gIJDFX+% :X1" b@Hn Sٌ2Iqo\1 ]43y]叓|jtiFUW/OF.ïӿ_ x t6w4`i%">#?'t8Cl譁 JIo_yP_6Sۛy k[s,_f6累uf PE]VCՏZ!;u"r5Qwz/08't `HbXZ `sR?{,RNz0ob7@2V 3=nw} #TbJz*uI>ޜ" #YR0KQf|͐ (g T"n53pf ?)~FVW^1fc91PH͵bR{-q‚|'l7WFWLDeWH*|܃O$QFMqi{g!!%;<+'MP^\Ǜ;]N>liʷ_;'PyiT/ϱ]8<-\*-\媺pZInHDC+}]d tR`cDQ`lFtLazoo?/SMreweq~ e;HmK20zJKraj[b,bk)u~g'wCcǰN~ Nc$ ~Fpp-D Nb ?yI/)q2PG*xӥUPp9)pxd>z!P^yaR|gQGILEz|i;Bvwri6[/oa~~Hُz*+"]1 XJP(OۡKLޗj.fX(ܹ-CNecCM=S;j#w@I^pF(N ’asiLeD.Tyf%!BBR/d.Wˀ# *Y&tju"bFtء~Zx/1(K ~%BAhdcN꼆W1Yd" tDI_\_9 q`O +$VYD1j2IU{ K S ghw|>iUfy.@-Hrk*0m~홾E^*|׸**F,S*6iJmv,9vfw]stU9']ǡ;6L="-};BfdAW}&9ny!Hk5t( }Git#͙44EKi+189gr<%]ʮ=uYzDalY߾βGѳ/K'x= `._>~!gBVsFo,YY9 b5]gqힾb^-Νŵ#cY\c8VɌF`;-c戨UptsCzTP !PO^xW;&V5y7hv 4i eY#^-RDt̢aض: =\;-atp͠=::ʻ5gcsN Sl2ːvI!cXH1U/MZj؄rp_ 2\\"4ΉQ{#׮rCm ΞkbB*Ƥ)qU0}kܪfN>y K3Ҫ0Et*\,m *b"Rdr~U<$kv삚쪘rLVOD.Q\uO;$ۓLޞddULZ;ed %6Gbu#Cmwov@X\!$f"[?NZa)C?,R2D `i]tg%W $0p6 Ó $#9*:FXE5R"`cHTEXA&*^q GTL[(t|g k'ϳj*0obcH܁Bˍ{bDG~kۏOA?Ί0'6ƅx ˸tƬsP:WhLc۞t: C %"FDbc!N.νAv"O`9bڮEkwحPEHlIVG=F0S #k 7Bpev1nfj.ρ N"7t d#ٽ/8 @$ϫ^ޚKyk1V #y1TY./ /$r"p;CGa̒C+`NX1mʦqHur'qQfA]Ifi0X @%Hk|-֐8!+AF`$GAN)+|;j 8U(<;»EԢ()x估G5('2JJҜ3:EowslK+%!-({f Qj=AE?F7tWէ[plP#Y4 ˆaG Y_yڝ[&t:i$5x6!# kD'>j-{"?\7nNj<ϻ3< 9dG޾dh4j[|Oo+m>HQ8.Zk. #yҁq6i'V>@%Eo Aٽ@@%;'bHrz]lNK[iUj8=}`ezr^Qt|e S =* fnl+fˠaz%ǧ[?0tT'B|4!|>񐿁/ I=ZG'v#q H]'zETYQ8bVl~\L[p @ɸh˕_ߦ |nuws>c=GA.(<0fA3e=c+9mC?⺿R||THbnLw4E/XbT i Vl#{T nT0mWv!cMo}s37b&卋*^#"OI/g{a#cXHɠ\)_yysa(K)gR)tB` zhZF5e45Ea{nihO*юour`vAsm]-DgR!4WSڻUQ d沓Ar3 vrblϵ*&-ר`w&JI9gUݣe {9"OaZ,|02M@Ǎ1>m<6K7k k:L1m?0:xkASTk"#ޯN50Z Y0=Nԫx|xL5Mi'mIܬ/Nn˟O&9аټ{t&VpNe}wɫ@q{9_ܕh*ΉJ߱<-~?}uݏ' a2} !\l}-#wy[3Fyׂسg'J)U$!p-t^&E=XR rDcۂTf֭LuCB"Z S ^pSd,rfV 2E ! IHBJkM;EHinLTS &Z(tY(K ;b@CTk1kww2J'vNM=SW1hάBC b48Ě'6Fx6rTS`KoJw=wFue\^ >h1EY?-Z-V;7sqgJI9@?[#҂6J?esG3䬯TyR}*UrQp\A~sΗe>gLF!T}%?kimsJ6܎b0Q{x.]\NJ^= ܂8kX?9Ȯ=ݜɘGL}-^D̾rP(~ߜMw/DxEtš{ y\{BBkq]_;ߡXC7A*3R)ݾrxå+!wefN+]B팅h5dwŐwӝf`_K}Vv{D\_mg}=C.qM2MHHOܒs:D%=[۳2洓,p)%UY7P%pLj#Z&.B~Uq49FL0~uYUfu`5,x'zVMLGڪEZ{LQivp3o>]Mc`v kvO7ga2&0bvǤXi7K&kԒ\)% -yA"S Lbqv>0yS҈w;K@1* ͞d6=C-+QpYT7YU^؃y_8Yu j5B;3|qj{'xLqsy\:uI<,ݛ/fsz~fOo_}=du<} , l2j:-Sm6vor=4rpx~9_EQRBya#7FakcJٻLQ4&NʽFNRb/E_'lXOT ш:F!TUs#4we oi-@<;R"ZJ ?GHG XnM=tlP)(JŠƣ˧bb;L]f 5 f[+LZuHjp 1It/s1`u:M%#VH?^IG/'-<<\}duYߟ]amx0!ӫ| YsTiAD9l[ q#c$(D w"d϶Yh64~irJ_ޘ{N-švՂ bgU㖱@\G NPIK"xa>n ^v6kPMIjon SOv]+&K)Zj|bwo>cI z0!Y"\lָǟ3&suusB 8dȑ. qBn3#g`50+Ɲ[1eJVhMPMpZΪvw-.aNmH][Z'X\W'16 H0"XtָB>_1%td``_=qhPbsL'rƜϡ>B.>SrIjVѾ[CrX67]Sv`:bGi1[a4JY:G)fFqސ`tqorYV,ZdXgA" -9]/Qb}h Rz DWL)cK3~: כoC~Pw,,|KYA\UE6(b\'s92f\t~pFńDgb-vX"m/6-ZЕ-ENBrMuc B::CvuWkfAmԖv~91]B.o]z0oNߤ06ndTY4Q>h~j&ZR}ޣbITW5VO4c $QEƠ2D4Y@\rֈ>DKNyOՄsgo -7@s_!tf6rZY`Zwj)ilzWM \qù]h!~*ŵys)Uw$\",bY<yΗjJ>  mZYV!} o70 }Uh_K<Kc<$So$Қu孛n؇2~yГ|1 s.J2d0xh^|w[ҽhR2(9Y`AFL/>ܞMgo[lo/vrk0ʼnamHFt\\OcGeB=Nv5zK!iUOchﲶpOY棽p Azpٶξ fDX2cVw,; 0=g- X^LUPg8ۘ*z7Wjv`׾bI_.q/%u m_tS,2kx;p6 n ,гI`Zd_^bw|j?^߬t:*YKPVdwL}iфFX (g"6e, ̆m{WO3i-+UPϗn2yRIޮ>aQ1`N0k3DhJp)1v1OUT4x[(SQs},A)ɳ^Q=g6pDYM*Cf^OJq] S=JJjk@mcЎ[qK/  g8J4CHsZQ) iHAFX"Bi9E;IC-q+UA:6xѷ%< /pX(}/tk=2U s;EƓrh%Uǁ?"(@?dJ  &z&)Ba,G1Vf۩ujk2x0gz݌|> ^ R0<۝G{1As){?= [/~ۯ": wۻA J'<|=dbQ37CFoBv40ۃLӑL]#0LC[5Ph2IIht9?N8VZغ)X +:c>{;x J\0DތM4+ÀwyG7oB9,4竤ʒpN7TQ"إGw#9l Tq|<0xхgTV!jّ3'3/aa!A׹XÍ3̾P@~Ґ+T֚S:xЋT1j1 > ޤK%HmgK~8/\H['}x2LD̯_M}W@%)iά !mƑNx|(GOEpv:g YiNsyApe z Ƃ)\&=ʓB($OQaAFm S sP j Ts߉J[{i\%TzVZkN`.~1WrhiIN@ Bl@K霶$sJ*R׌᏾4,\~͙Kj* =*@Y^T£^Uk 9=Us%Xڱ`H6W})Rp^ݶ 2ݚS/?BǟɅGP*f7+N~1=YU^w,!_S%oh̙ڸ/=OVh=5X̯cŪ s$1`^ن2C3~\w!qxo ]O./w[[vo/K<^ϊUnijr4l)UI~|z tr $E oᖑ?ZTXBȖYrh2;T.L)Ӻz:Fka'q2vxCqo9#菢.xޠu"f1"DMlYmMvj샛Y= )m!(#pr97x!xRX͸1@jVDgŹ )5I@;!x-'4( j>OÝW\^'kA4KtJc"%''5"4tED3Z_\Ivs&=* PA"t1xVjIR uQFFIK-}">r TrryܬaCy-b{)QEsG[TӂY`D^Z+r78M6NddK;R-~K"ryd鳥z:YVt1Ԡ=S O+JeI gDk[9gnql&oe"{ʹ\eV1ėSuOZM=ABd9܊Siql&'ks@>LD_.W)_=^+[Յ1҄tiˉxK *(sƃ+$243A2‌2zF=R`VP*^ 4us1\X)t*{qc!PV5R~6:4w}U|vJAVHeWd*([Q[JO|'Z`%[f#mkv1!$C1B!Hm]?L!eLjMi(l,^شTˆh@ Lut%v[bNrBPɵzS%|g/jw-}ZVt:*8 z*\җ|,,j5VPV>55鶡X+F:Th⫅bЁvkGjMk[WnmTPڋVMo3h'\q T|/ @( %PIhM7?{F/p0%\a'hhǶ I̙~ln}LvU*HTݰz)F#P3StӯWD*g>JJ%mW$/;(TU;G/?_!o,XcBҦ \2bK7Q̮G+A%V:WWF‚q x`;CHbK7*6*G5D8[kt<Z!M ~DqqߝOQ,zmuVJց{$QB+Q5wiv 8NUrk}X41%,0W5cW$&C>KUmܔ{% wfֻT- =SЗСNZ1ئL & @D>Rӭ<&[U.w6:<pԷ\&5[j~0֤CoV _7=k>ŸDuP%ӿ+x#uWbh?9a:4؟B\vUtKz9.= lixWIK9.m]a]pQ>9,$)Zl:Z#R7l:g?cmc~tePe]>Od-]oW6q>AUfO T)j'ɱ[vR΄dM_wLS盵3M`9s0NJ= nc}В3\0BUǒ +x˸K!"HTO:ΤS_ZXu弫د΍ r^Q _(= fiɇ͍邘+3 88>s)2}.om祶*pa8-, zF\H?W%z刪bC(A ȟ!G=j޸jE{A5ETdKf )nܹ 4m@<Wcxk n?:`oicА+}qSr(p]0nd*4 uQ㙐yzZ`?47;HE{HVTU *0bx[22!/~fz6|y):]CTlY,zbnV_ޞ__.?"/8kx2k "[37_ՎkC&)ۋ,‰!(ʵ|;ão@YΧo1=||<'QH/uVpHوƢm״VOy6?Cj)wJ)bԶBlLcB΁ƶ(xm2"7a\h02%ХE+@}Xrt9Q1uY4v᭙~GA>-N'Bɶ}yz[;OD7w!OJ^aiwVFOwmFՐ]*› EM h` *ERsh Zbݢ2;A07c D^ߜIY/HEd XaK,62WyG$UBfvJ"d?@ SE/88㔆@,@n =:S1&wXc a bͩV8!| imxɛz}׫Op8tiM~f[dlQ&":Z$ӖjKf G4Fq:P΅~KvfX&T.\L>^7M9BpA|Hq!G;n7@JE Rs\Ua= ߍU#I$ZDEoTD4B5bEL/jx@ʩl%G뎧 {Ǒ)+B^8Ɓ@WY#3@` rAx 慻ҾlVMoYd/A4Gr ]$B cW6U?ׄx{ ~2])RjὥC.DEڱLb7p%8jNĄ#IZAU?RWUpGW/m}oQޢz+zE;G!Fϝ[tt=DtW{ ZntCos7EOyXXf@3$U\й Ը#9HlhtP.;dW]k›WS~~}2M.:ߵXK Z >h5 #|P*`n^:WI`ÙƐa.]f߁+XWNj뻽&2 B t?z6h 7N 5afGьSeNN_L\b2M;>^9-U\|9Mr2OE=R[6Tk>GoΖ Jiw6=OI-> _Ƴ`JN+~ Fby۳'\ɢ,}0G0,a"eMeHF,M)U|*gKkBfٔ~BpUC`\ Nn[I^i%kd&,䙛hMitp[0Re$Cpً  VwxJ D%^! AG%71xAX#CfN/ TCr w XP|IczF4Z-o:˴Y2uյ rH&" Y$HKupAÊ(<ՋqTPJ)FhMTʓDI΃ k=|n빷x+s*.78 F*5D0\ Xpm)$PAӈg@@m`iҸ {;(tс+!"$lXXvUB!D5 A$ƨG%!emS$( 8lyDe_*ae@eR$1YnҌ؂Z RA9C#] 6vOHF lʭ`>`ub8%|8B ` : վWrK2W2QɢaĴ`;`Q\h$MrSV`uzUSҜ2攩4,px1j0BX҂J1 S#By DII R!bQ#[r`:c*GXPG@ N;]\2c 1#S,6:AtPGV0,30Q4t q2<.' Mۧ&NԃF.|F9Jˤ( aVp"68(H4dIv/„iq #R>G;Ƀ} qs]F}(P;A@H*>•r~0" Mtƙ!Rm%-!Mۢa`[Y#Bc8ׁi8\`B5 alH%GbV Dd0Bp0Ib$EK d5KTZGK G MH#رuL0'#\E wmK_!iy#} F/kC#↢zx %Ra mfOկ{rtl |Sz#M8S};RqTx)!VkD5 K A0a9)*ɗ7fI?|"M[ﴉco3ebuG,ΙMvuto7]o7lPPmcA0=RNHFb&a(6.ܩ̥FQV,V"r0 P+p brLrT Hϻ~9pv \f7OTwo@ݦD'o1Na%;eNU"U3'dOdOV>xƲ*a>ޯ2Um r!xkiYH"2_iqkHJ\ uVTua$ZkK VmSfRCe>UZpbD3ײ5ˊ o mAUߺsOSʼA2א(uUHJquF!HL]'D DEst֩ }q׹8ARu7 W?|ru?fBJ9%k^fͱ ,ؐyc|f2xuri1i _G^!")]!S,8rYa&2Ri=rW pѳDŽ,yĝ3{ PiÜņ(ÃR9T{,CN'WU3 f8(?f22 R%LaF/NP򍍔KXF/ESU3(E c+p/1) h[;+zTZW}1HȎ;b&SijSLPMr)B97h"YT:U=F?qqN,L>?Y6JܵDYna^&%QBרʔ(8yKLLrvMa9с6X@ه$sK=. GZd蟤xQp'D`4`r <b\b\xПf3 ɘ>3[CB,Szg m%NYA[:7 1ɼ&SqSLq bBJXGN;$Rָx =0X4!7M9ͅUq#Y"D"|1 !))^[78>""V]=!BJ;', :\L}F Y]4}ebY&4oI:hMкbPEuRQǺN"mݒjݚА&Tr;sd˅nZj)-_,|#`d[~[a*?{07E{/g{/g 񺽶#g>bg8>oPi/w|gO3*L|*D$cẔ D .%,Y,Y9 @s$W=A+]<[eOX͈<@ fGש~{viM쏶V+K8K\ uM~ 44ŭ70ܷ>d'lL`/ nFa=h<ݗG}<Ƹ<׸>˸\b_/?+~x3߿|G_*Ջo蔸|n- oe|Ep\w *‹Qt,^)ȞVi{"cLex,Ty?b<_̰ <( u]¨#"ْS ` RIAPxDqgץ+/fke:l}nvU2Zt}opafo1X5*ua,&ba,&ʅq~ঀI!%t%M 歄BCJg #rɚhϓ"iMZ=f]9X9rGLbI0NFf=]BaM%oL &zcb) Z:4BV t2>7(^ʼщf.Yo.9zi]/-N -R\b/L;wy¸[/ԇx`/q/ 'x)'O<ۥq6hSWߥ#&p3 {3OGsW3 "'fּaJwΰO3 6Um=ܺhZRkƇZw2+WDޕ;~2DH4|vp"J<+v'+9a r,J'&o w'So F }v!]KN*:Y] WkOuvxNN & ӝHX/nI+Ad tZZ@FVSfTsMvfxf8 8\.$\d V,9Ao'ͨ+6i6ʡ}ݤ{.iF2%AC(ŎM$Ӓ8Y xI!9 6'*j(e`~"3 h XnFK%:T(wySPO07!bp{(jK2'agy93XaZ!5Ii Az$[LRj?퐖{@38Z , s kMJ\"l=K;TJxZv,yKsu:)tQt3?,j=pS=m7.IA||E^?n?2{h9C8+g ˫!xp|eG.!|gͿ_y~Usd#{h[ꎢ0 w/XIpۛ7;]xb80juھzD*UVxLy/f˒kd>gr;@vO齍Mct1৻&A4-y \)xa9&s^Xsx#K#C/cϊ qKWآ Pvℼ\.&ea~/1Kj!]5tlYLɌԞ*y7?/ry! hNk:Y7FJ'nTQgԱn't@;떾RѺ5!sM)ZRW˺%g=j>l6j?xrV ourrAn݃ Qt[ #)PSU#^҇_'-:+/&eT*=(=PXa%x*=bVd^X.E.&7ތ7O'ml&g3iu$n>& QqGE[j|loI`іM mS.XE.|gO{ p{`ݥ,q:#AtteH?)rXg6q%%*8N ZĘ[ka)4+PLȅ1)#؞s$(*GV۾qm|Y#.>+w[Bn D @q#K@#ݳXH/EJr }(Y*I\dEV4y[{e:QL)T7nyjoI/ 3ǹ~7\>CF{s> *K 8ODT"."5\J1xIJѾͦnü7W>,WyG?7heU>8$#yqsYBGHk/>ޞ#HKZj?qz#rtEN7ALS/#(*E7G!'" H⍬B.o~$/ ΁{E/>T+ u^ o_]"x~^#SD;&AHu6-'`gC}՞vIycTLy bІUѾêϟ msz*ׄB9cf= !wIёn~X+&Qb&2-@ f M:2]xw>Eo4 OДrN XֳG$j}P#QK<ÈC"\JX# !($jFd'" [ɞ_u  %v`lՇtiV,diγdGqAPTs$E<֌ OZ? ƀ{B٬^ڈۇL/ 8y"H2IrB+#97>o1g2 QcvP5Q@̤|zx1čȍ^fIX/r"j\ީݭe7$SWT=<ǹز(̲~z_Kn!OO>UBd317?I&1=rZRB$r(鉟N=p>iƒ|Tӄ|.@Оx8.uQ9 ) yQ1i­" =0ƢNx> ;Qo杏"]/'['| T3ο2U80ѿM,}N ?L:&jZ쟟l%^ƚ8h] z+Wߐg !4-8U0OT A,Uc\!ȤFi @2/۞/e|%r&eCC?t9YN@kG:dM暈vj rPQDL1&:XzŻ2|> _! 0/LIcH*"IY∉eVAk5 5Rkfk")QRZ@*@493-gE$e4 dP11,;Y $%yԱ%: 97ks x9KI!BhbfT#QFs NJ294 #q3UmI@ܬz=e4n,ayMhv <6Ql]SM>~ǧ[Ԭ ɉH\uif1#ZbcٰҚ;d&fY~+I* 8L$!fEf%xꝫRJZt\ A2$X1d yLR57gL3AݪV t5 xjŢ0 NPȖM6[I!$Ӆȡ5Pff$ubW$cC_ v$cN[/mǙ' soOhO?:ޏܺ-f%Q%sT_[gafJia_=E*V>@|cԐ-B`Q%?on"OCEqc`AL[_ڶExT"|Tnp}fҶZ*] )RT-F!("C6 , qR'&;F3^A@ 8'Ž-&Q){`b(t⽢Euۢ_9+QFVQF rTob]MSCwy3ӁՔ1T `7Tp9}3g.<䍻hO:yptat uBdtp-#8@"9u!oE|xBaǠЯ&2p,7iu&B*V,!D=cRdsiLW'6FƒZ~{w5O_ű v p)@kYAf)R,Gv˫v3c6h; {W2{@iśѩRRΟ,;^pCL>/`+dH-E#Tp( ˙0no:"_>fd? or!TUV&RJĸb+ K FlQgtf=Wd8闀k>1~+"t*Wp0m. zE "x?x0v(&0;M$\@Qˀ`) V9@9N<\! )9 vIV#@ȉƋQ.!3?(Wg"X0t,h?ưӾ}9eWN] ,Ս?F0QczFl[}u^>X̮nȦWP `& oCLjCA:hKIP NċvWP\: G28">:n[LS]0%m +!A]+]0&?gs#懤)|f0,uFo-49A}jzHà7:[_c&~`Ɛ nayڛw;p;!VR [V`1w @9 3&2GSixr''W/݈MYH0J_ #2Mg=-+[&^wskUL'P,!* $EUI77\_u4|^]&iD ")mo8xLH)^$TdKxyp<$PE|铧>~(9pD\॔nhnIBEI!:j-&W9)eJY{BXcn8, B1QFQN^wapmW]xw hc[a- Fmx@)W*\stC޸T؈t!nxMhvX[pjڭy.SΥXATat uBdt o.|9u!oE[٘" 6g? )z/I Ҙ>߈>!'ןHSΣH5I̦u~˯բIaٛkw}A}'^C,_k 9zԋ h_S9T ʎ'paN n>;@]}$W ۅXH| ٹ_w絀馲wy큼8MEf⚍XB^wr!d6*Pɖ^+չB65tezcuaL镥Rze)r)]IaF2*@42CQ9 4A* R)sə*rJ#BKb\g= q %6l,MI4IzzHICRJV=j5[绺vlu}h}*:&+wY>9^>}[>x>^p{Uidlc{?*@ٕ;(16ޯ 6irEXgGr䣛L'E^Z =Y?{ BXZa9&Fc.ƃvNda޹,%Ȗ!X]+.KI@`.[ Y.H1$fݡjI95рJWK9 :DJt cv0ipRH@;uޡnI9=xck\4ʜSpHrYf(胗 FD,CPjeSoP\<e/aXE4OH%'ipV1~p:̉JI(f#wPxąP޲)+Ü}rxwYkY2(0=/JN6S)_[, WF!LѶAYYTIc-KJ? :FsD;|Ӎ|YVb %AgX!M;ؓy*)z\@tþe#~nIym7(* P6KѝpGТ NWy1`Fme)L` r@jߑF<-ܨd "+@G}Ȗym11#eP>yI^SѳR׳ee~9/az.6k_)^2Ձ zcͧq>Z7|ſ7f6%ig^MA<~l|\ 7ugJ_Zӣw"GMO@)nQrSX>Ǚ0'0%)䤮)%U= g=)_}uE2Qw{QI,R-]+OLj5#$ϯ豣,mQ呧%wY/zjzIlőffym¶%rr|yz!!pj +|se*ٻvTZ6XÒ{@*Qe#R9!ZpOli(݃*it)s0ɆH$g'Dy.~3 uh -K<() $KƂ"eZ*Y#r jY~J=RY,̂/:h&_alT|9aO֮Zn JGWط TN VI( W(^0peՇ}'DCjYxR(A02Hh Sr&Xߞ4FrB!rȁqdԢrY\8 pNBC-S(!eb2S|t"Ed 1l_Y-,mHk=\^O7=4jO{ ,u@,lua(~}:%2UMM]ǷYy V;-TbVۜĪP`7[mNOFy0!/Η(tI6"9>%&^@+&RDoG:ڭ]ű nĎ"vN`mVU-se pwnMrX(.OG\c=9wi:]U}_wk54ymML(CHV2WBm ώ)ceU𡮙6xh-hG[ $80jٰwݵ?Yaۿ2A)Ím.6 P)b޹ʦ7#W;jFm} ZQDA\*dTdb;EvFZr :A,2y+Of_Qk'_2]n_VGϲ_vA8ezdQ PJ1wC9kS8Wt AZ6's%NPf6h'TvJDQFx4D"4hVU)j$;i0jW7}$B%aP9 A9z w[thuC[ ڙ @y-\OyD3iqJ =s6ׇpR`=ġwsNgA_px}mWf͞ nJ%׼e#us 3l#^ tqMG_~Wґ[,?\^y2 K&Wh+wL~A4DçZkM${tL_RΡ$TʣdιleѫDmayѳARFːb|ywQlu {,v.$WEnI+#d÷K9.&3VE=D,rd!A(kل,e.ehWFd_)o@*<֏#Ij $aEY} 2Pzro 1+"ވΙi^M Vg,9~6Iw&J*T!bXx$ym5Yp>1&vJftBB=ziZw~w0(z7>Irو^TŁh4,&C{h #+[wEǃrކ8L]T3h*\]|P0ۛ^Zfa噌]$Β3`_~IA(F'`2xh-k(Tx//z ^̾`S9ŕ7]ُk?"};P %4?f6!0c@_i)uW%#Azdh,X1k) (q&ɄJ2@(P.#v~rnf{/?uu˄Xᡗz#e+gSRwk=@]؞7`|-/簜~ͷO@/gOKZRI} ٥^Q&Fd`. ң7VJr>=VZq|MWdj(TmprqV9͌ 5R6[F'BK%4@1DQH cbNIyB-'Zڒ!nYk}"]ȢZ| 7և*7R{Y뫍=`9*9H1 W֦PNh u{j@ "uQ#eU] HCvҝjfmDڹ(k@Nա]̞CL{U0K1yKYWG |,}kI +*2V"\7T.ߚf=<T=#/Y?=kbld oBȓ-{G.2 K AUYu^\<;-׊4HN| LL*++XʽY+g2ѕGnooC۱qh)soL7*@S+\;eX2._VkdL$XH9r%>$.UA |,v ]Zp=9CuA2 H(i Tht*_>:b~G$nd%Ұ̈6$ySLah2|5#5׵9GnR,#ON[Wf3GLfH'LI攵s}[{vCnғ[!DELz\8q  Bޒ޳0q$Wt8bαq:bsb5O-$Yۄ]I@Ү0$*ÎVt7Pw/t[MgC\G@n9v"`709gVkIH$$("xDjfrobt/@%"i8= t4[c!]Nh3;yEJ22PD $ !X bS8f4_ tZa-}p%x%S(`aRm/x!@!J:͑}ێ!G#(y)V)Ղ9I>q1LʶaWxn fo.# +qQH4h5>Cx`2qSab2=QJ ,6рBB1 H%K& "<`:P : F:)>Pn97m4j`J@A`YPD"83!,SBws%Ԏa Zeos+wN{JDp-0tF;Db, |dA7 FMfWF bNT" gUB@͜,1r Za}ϣz^ܼ5hNkf@extb% څZAqG (hE$Rb qGINxo%8S!5+qd[]T Nn3QލkY}) 5[#0RuzzaYS&hʄfJ= SMtJAPXME,w~kf?%/O4`^n(WNē!S5L#.E>G5`f>fTe" 2/4j DIEI g`F͋t`C{0X!O~p5v7F#eRp:T  [iXaCEeuLTNKD 4TaVh\]RmF}:Gt]b˜ՔQU)ZDee SBVQ0P>O&@]fi[`НWٙ`-bbS"*3%4`yؗBX1|`ITp_w_2uW.< _L~/x{e.[s91m0XًInwew~mLV$?JvX<6AOrx0!dX:QCgAmlNn*1u(Ɇ92(Qt0ĺ"ƃ~I9(UfL_MUf0g IqKB1(Ř82Ħ< [b薈}QGPr `qsec$1rM=FY@_1TmEP6g'-&(Nl=ۗæmd\?x2bR)Cڔ"R$(;9g/vOԕG״}0P`*NKIqȚ8y^W~pHSZbI{1:>ɺO"|XTsT&r‡R)2Sc1L烕8U? Ғ%^e"k8AJ kzZ\$ zmhU.-$U)KaD]e :LuMP)&e&E*S(-%x8pbgI\hֵkNW4I2"u[$k *0{s&#3 #J5AbZ3-VBW&ۨ $.\ tWHM" `ˬf<URs5gyP+`"NZE^3sշ/ f"ūpW$ХQ-"IfK`h* a猐2-sx\M?@a6ۏM߉2S8lf 3t77đBu3@u.aP0x19a9vt%~z.{DUxx&pu9 vc J)1vuiv2^{jRwvΚ*ף)t[?0\8)-߻Uk1J69Ց,xۈf;n?x~ !I\ F\ :1F#Q!H8;}j-XX_FBw~N)a#sOk[Qr8xHUXe10 yKߑZ&14CyᓫR]dõY1孄65œb_-qf灌5W(Z{H\BGJG{cI$-gskAyCRJHAǘa]d_>*M,H>!F!oH Z1 Y뵢G5 trQqv|%L3&]m'svr8?1n> iq >7@{- SWEGl[I.1R 6YiKFʦmWˎƳ oZظ,wٟT&lV\\UH? N,?β|%(f~F)Z&i,ogChdo]w~~|C`FOxzd mC=s mQ8y2<v5m8*SLeN괁f|g͓9>ʆ$%4U%0lIȎM?k<{ ){gۍN(΢Vn^~1F CiYwɁfwʗ~ : ?a`@|3bFV"{:٣N=L ^fߜ]_6ޫV*wV;1w>ZoBHc1b!:A:VNo( kdOF^^¾TPknKB'؏,ͬ @rNg)ud`;|7%%?dy}ݠ@XJ*eO7x#_%ӓYB~[SXVon<~}K~ϛ}PRNM g_6靍~i Vu2mًfhW!nW4.4>W8Qo'O uzgOۅI Omktwy1{tϴI|Ϻ5U0ln98lOLCg-Mo0NF۸)&9^g?zXȋ? C:Oi"aFy,ZLh`E[UTr-`]zHPb5rB%QWփ+Ʃ%jeM}pf=8Ywٙ5ުuzf K]?CVz:Kf=|Zs}Zwtj͐W>x.d{pkݢ[KDzpku|6CIx+Qn_K:Ð3&2ـr%])!nAexW[bsl]Ri$rڢ1ֈ3aW/3wx`B5*{ׄ-Nhq)g,*o79ZY'Tft"8_r5A(I&Y'bY'T^u"B#N. KY'K{r~瞬gك5'ϞL`:]$wY֨|&4]f\hzMI6FUAW(T+KcL/sd߳%nJ;9W>N8HSI'{QACz b; Tԓ$ݨ4y:ixw gZ>; u>F=:ɬLz`TȦf&؆4IwbboCo Azw54ޮ5a7p;e/>hͺ P7%eW.ou0Oסrnp#~0_J_7v{vɸ~kxlsʯ3kTA`!i$bii=FZPCs1εc!n*q j6Ս?,qRk 5W~ݾ1nLځmkѻn`D h` :Ḭ6Ll2\5dXJ!{wjvwn'Ivxнc@0s!׬wM*}'n͈&;tob^]>.]\5S\_ڡ˞94sYEvLxF}:6ӽwcV chJَ-ֻ\:&kv!yf_k6Oun- Ab3VEoT{'cxS v;F#2OT^yz7u1X5<aF#18cPa'4**#ՖZkLdL-[\#EOWrOy518% qC< zi'%+AG+) -jVRS %Fu5~"cv0N4f GTqaD%.91A'v\9zLGlciJ'̘(vD@QzR 0=kV7(wkK c GY[8wMHI2|0?~`DщvZGH\Vvۡ߈)==TZ5);8\v!xhH<^F3p1 #EWӋCW72H[xh4@*匏6Νu .$+B_1K2(xKR' OĞ8p 6&d(`yzoMrE5q cO!ˆw$vluGb\RٹVgF& UEuSuA큋^n <[a-c~=-feCdWW:2|BWjƃt2`#B/uKԩ (2Jĩў&!7l5<sQO`cp^" TQX8C0 ~N)QjNRt `bcLNIe43N@.|{V`ݺB3l2DÏg[ ɠ5ҫR J/{3'TS%Jo\ɺ;n1pB,v(}]ن (& % /)r1 )X`.QtJX#|qy`]](.<(٘ivO_cʖ~W0&S3{Ou,yڣ>xyFև?s?L!= w'eu[H+_d㴰 z\7ٞV_E/C3övӌss><#8jk,mb gдLu/f&Q%LAVlL!|1b-MM9;,x:~cqp0~rڦ$f T@bxj90y|$vt)~ HR!y5P~c>ɾo?mSQxQ͞>9~Od?39c.Od_&ѳgWOO'ًWW7d_47JMX1߆} vpdTBZ +MYDOn9H {*tV6u gWSt??ZD< a/~ȧ.,"}yf>C=m HE/meȟH_Ξ=ϊ.{0vzpnh# _vuOh䯇zo{Г'K,Ut:v0ѯb;;Q||c'W!)7n>3-|;)”_ON M%}xaJn]+cׇLlB+?z~vzE6ճyg0Y?(Xgs>_ܸJQقtuJy^\^U?]$Oa8,LGKՇ'C[<1z׽4oo?] nw>I)|5!Lc^fս*&{} nUQBgg7_@T_Tއ ߥ2rxv6bwRR`[4(Tz걹4Ŋui>V=Wh^C,^YbT~ٺţ$Wϻ`4>'Gi7)k*E?D^)~\xXV-M+Nx.%i+ywU] 3eyE}#SXHO#{sʞHk1bU*˪n;b'^@564gZO+_GƶAL)|Bou8h:xtb\U7\?XE]̃1 fcup0WF8Q5a_׳B s%קL45^Ҋzù+ru^Y׷s A>mg|jN3mqÇBҋylf,hosg@ͨ95ZCЂtmŻ=`Dώ50׵U )ؓ=d=B'hy^ǝq'{ɽǝP=M<2mN*daSw8/oҔ-|5Yk2iGFLF}řLUy2bP&o{.1&Kcǘ1&c:K&GP,kEyD!ar$| aU,| n.YLuJ:sA(nC}ӽ̎Tz.; ioo!Զ(J ϙ4(ׂj"KJf GKЁcI$)-g,{CHA-SOt01K]bFA,jb(2W*3LC4óAgAŋ^<}rpRxz27=$iQkZ +OomШo2Қ8 xMTi+|&*RDqIK&`ji.P  :a y[w'ۗ{'q/%0`O$X1n>ƪe7ܓ%?퐟.?Yvysnx5m$폁 Ѱ 6̦&!-E o &EWwTU91[pdhdE&2dkKfY &5)lᯙ'/ N OTev2§HBO .<9($ 'XVl҂e\1%ed,/EZRxeebGѦŌ(){sd %4r%ʒ X>3y3CjqɱXXXt_@TTFD Iv-Bב!\T@sr-x<x&;2jL d挒wy_,>5o nwo >)3+͖ &,}.,DykRJ(S1z&dEd#+BG PAe$v`%%[-K  oYD:xo HF2x邋qH$Oβ*EXrpb QOy lAڬ^6A Bb)Aa6Tb c? Ϧ*6 K֛hagHGgճ#?ȂB GgQ̔W =I[XUKT4n"KUƏ6Y!k"8/#`֎bfX&32UϏ/yW3 ? Hy˖o3)JdQoB=٠!9*ųP.xE۱Bݭprgs>n9uӯwv}ȭ*tko9zy;Ї{<Csq.<^}ȡzuGM:SZq6E-*PQ9-R[rj݈9omSZSv6 o.:"@2IUb%&yj2-ٔXīH"~'NsKngi{u?l-I ?cj rv{2x3f3t`5$aZTp1Kaֲ!'i gciXIk٭*z#m-K4Zzd1{tXW P٧n?&(Tɂs"`ʬ,ZNɲ֒P#ϲSTfv2$-MhӢ SE FN]|J 3 WBgI c4GJƋIjF *AÐwb=4*;Rʠ޵ )6LFԌXx 6 :HWqd?NI![Z2 ĖZؔPڰA6=dVnn\ukh;m^N!>)_e!KT)ZJlZ[vlԩ:sw:1 SlI/wW:홵]ޜIfbt1a"6/~]$bO~*Jyr| h>M0m6.'c²5o(:zwmpQWNW2[+kZawrތZNY52V&ƈ,PB׹ CbN=O{gwUJ); UtWMJ$2ă:eKY(emYY:/ fy 瀲/LhrU,I[nK-ۖeز`x?F`tQIә=:t*b٢ķVU۬jNoiag-[b4,_O*ՏԳڂkFiнY>k$Fql{'TA )|E .z ܣ;f-PgJ7-k/=9"!@yJ09/75lhAҼ(vh_UզmNNA #pz-ؽpJQ%A!0-U..AWz2FвM\%&WJ>յ6d%;1oߺu'rjQz&M{/vf5k3VBޘ|ڗ|L^|[W]W!{kJ֔|kS{{$o-_CKe#;ˏW|zH֑#E niH>$;!@|)*/#Qȇ~"am+aڑg%LscŃl^MY!X҉քS(HS؀~ kū'CFxxwxj"%"^5;յ]N$~o7kzw֙(/EZ> RȒ #oUBEhWtl&~c82&'\Sޜ1NBq:FAzG&?Tl;h=;W?9Uw~o8xmf;>Grɮ6tNCGyl`oD~|o&{uiG0O"a:( 4R*T>3CFA"[ɆHyl.&:K-lFx3\r֎P-C*  I[v?d174WX1jxݨ@Ce=wXCFxyEmfSdUR*|Z7lgwEy;lc,rl!j[Ua7.Wݍj5CFx+=L|,KQ YPB ^&TvxCFxV%38SIZd0j zOEX@ha%[Yvz=;$!ŵ<b[^| FڍJ6Z)!,''8蹺LCYRx{n,I.׉N͑]nn }~_~s=k/U[͋)5u+c/3vZ Y~l?1fsqo$)U.uY1"ʱ9[ҭ!Hg;tg2P!s⩕ӇWg*a' 辖(;1t›Ns'cTL@CsUP fWV%֫2X_4(jTjL5nJG lkp5|ҊO,_I;XL;s 0n]`;pp@Ko(tBIƹH˒n_>mh#@V]SQ0AP--6vsV9]nbup+p~~1=u+sxh.!6Z6wN~b\dN%RV()c_kmLU\\.H˨t$L=u{a,/ڠhEhOBg+E`WW `E#vB.2VڔRVR"vˀX E Z_fL;*{|mDR`/O{2ϟb Opq}LܡzG\O O%8.įhUabm7&+:+OEY#{yXs Zi{M+~MxM:qk=!yfQqWckM*+A9N""b7'77yآa6U:Bއ5UeNiGNmr*քڵhfJ_5sh )I( _PLlv5wRLVVcZ=pRُہ:B'%cn|:m5i)17qu7$NJE;wڜZx=b;&CX90ɣ GP>ܜ]T 㰂L" ]Ď-2 ğWؗ"{m[/YJݠ!Qylt3-o@uͪ[={ogTo";%[[1tCպ'sY+T~7O 7u5_o~o\}+ HqIRHY뾑Ⲥu1wM(HY9x|6\9NFϨ^jH\tL%@y@fFA#~65j ./Ayn~#?8=ٕ}O2Ryz̵[̧Gͽ>!Cn:L6,5[K @q}~°m򾕭0 =Nw|r8RȐuP;zNW]wۃsi3I(%H[[S^:oIr /P`wSCL /Oy))&'}͝`/Qѫy)8!BR}]b<{qoRwVCWUoYtOQ$=+jOO!^X>k%G HLŔ#="@ ԁ↳K"J2u_{ѱ l/.V fM6^{ |R>l<:)w)$99pOd"$&*8Ѵ>ufbZz&MU7{JՏMnt C,{+xAY")g 2!> JP!2+ԆYdHa\^ %i{ DJ[ T,H\.8-g3 RΨ03["j+z< VY ܆Oŝ ԠHFT萃فa^jGa@8(5:52H9a^]NYAj:qp9kFFtx{}Ѷ!uL6/K`V`:a+^æ(ErT*W>le4"#ـq$sj)N;' IyꑁH2 䩒LmG/k 8;vT5.w! BPjmoyT"\ZJJZaθc [ >JقcTg #));,Z|W0T-jġZ?lz)M>d*[OrbÞmpȟEϗZ4n&Gμm_ɣ<%WV1%+TpR9+ \|ɷ7q-C`h9wO >a.xDvN-ʋkZm-58H/sQVF6Z!^@B 9jAG`DŽjQkh*A)$u[ZjQx`蕎H:&[B>$"DErɨ=ĩR%.(w £qRJJb, 3Aa㸸s4e`"eh&O)P%Pijh5?6Jm[H;R+_TT#6TIe ua HʓNQ{ #J ^p%k@Bjۃ莖v5]wt-b0 mqPKVbeU40ENr9 0K!04a 8E)xKVJX4 "w4M99nsu4S q5g -dF/18CK\@e\R ts[)! '2Ɣ\4'36)<Ǝ`h(#rފԄ;qx4] ((G(&vZk$$ >Ma߄ ' EP 5Cj-g مBS?Z) QUȩߐ`T$LَtQ4/&SYGHXZ:2ې.K HIB|@O>i48BQ" oHA8BPG5,FQ7uRRʡc*;ʔ LD:P29fHMBau $8iI0pg:@#hȑqPԔ0bovmSH]@i_ @> Ef`.y+&RَP5 oU-F.X1#"XinO <[GX1K} ]Q/ϊ¢[0՛%\f.A=[[7ًcE$sgնrSpwpirf΁A>WqoIB!ekVdAN&lz3 xl)czR6ײ(iQScA>?q憒%Z|nq&WSxhW;ί(Y8N}c:L; vKn p`g͛>~*qCvdWdq˿1~_%'dd,U;c,kn28ju"kKh!k]?foik'1uDպO@0ojBec*5\e)Q/wQfh{|OKlj8B1-(첔J_Jt3X|`P{"xIv{xN8j Rj-g2N;GW3 aBhT6^ 4jn631uPr^=۰-A1{~l}̧oU=J\ЊT \@3+OQ&cȮz9s4͍PrA J*',%*VG9hXbHHJɉl9iۘK|$bZS\GRբ8] ?9SRQ ȂOIB .l<"0]/Ow~+e]5m}D+aV~.9ZfK; A^+zgec(K4v!,P)!C\+G'*d-7 ncT/p/Up x8R1dA 0&>or"{>~@Sh!Wn=D3݉!5fBE6}VU8n:uzbƣ~kx4OtFFWJ0eL$Yxǒ}5B4yDN"9Q&"'ͼF'ZԢlt?ׇs58O"C# :H {'.BB0014_];! tD6F\_}QzEWLQ)>bt?I%Nuȡf*mB4\J~i<~콺=d0[/+TZ]/KH=8He0sOe qSEs1 qКQBhM%|GImc~< ̥e~ca7r~lMezOkl նqM[m"&&w95A)lgK0kіu֨u>ѵ]}eWx7Gv=6@Kw.:^|)N1^-X.n\1,6x>%O}K T{H/Р(J{`]pY_FG9\:'zoGau[\D+@7^}NLx^4}u5ɟr+uz ӎm$N(Ω')hUYNuN<yoy|u5oϼh(<238\bo)$)A VKˬ23%kՏVՋWEE*(S8 >>WuLQ9R4Dtsm1>*`&ƌ(fdEP痈نhHgnG(F𧺒N+\VHxZB+ZG}O_<enty 6DZIi6!x쐱յmlҩfB&֞;hh;d[UnbrBZCXo6>-sw rݱ'wwv=huw(ۉ̨=w^]D(-;wڶv0y}k?p;_L-(O|)vZLt@H+(wmI_!{m!@%qrE6wKCR^gٿI4ǎq̞_UWWW#9&h_==p[|nN-#:F1IȘra DvT _L9Zp~3ŏB~UK0TOxiJ9qX 7gj#kOWJBm" 9ےY؁+|^uIY}m0bئ6FmMH)Fu#pܸ=stBqbtu}3穑Xqt9bF,sɴzKgW㕳g [,Xo30|([GBK`:ǘ(ep ]Zt!s"9ب8N%oxAqGo1s2K`hE"z~K v-6$7( jyLjD_.5E%'0QD$Ze Vf/yC+LEW$@WYNp0F9/yC'-:&>acsnx'&KyO|: ud#xnqD|u# չat?/-xGzhx iTIE.ytv@Ke5^Q4z4nQ;[͝K(/[9PNs;Y xz|Ҟdgr3 v睷lSr[E#y7,[༷-Jt{nt2KJ`8s½qH3 䒇 o rxzP׮wHs9F{E*z_!jɀ# CbI逰--Qnޫ-4u$$= ɪp볥g7)vH8-O)! GD;D,VxmfxA޵<U.#o Y ! ˄\$;1d)7 aQԻ =n G{:q>h{t"gas S1&IEZ.:ƀi{#ߍL;/^9h@W;kQE^>% ^(ʇf-ݦ 1d h}y{JKD|}}d oe.noư".J H}jx@jX1Nzɲ5>H=z)6S{=遱gYG#3TwXx^')1`Qpa9-yDYLCdR<:(K>:'1sh)r{[J2lc&o*-61|e@EN-a,mqsv%WՋzۧU8+eF^b4?N|ϕo&fBzxs .FW?'o9QF9zzG1GۧqQW{o 4̡fǰPrG%qóQZC˹n\7Qc DREv,F@)I7p")p4C>tAy4}L攠w<[g"(6rU.87z)hIrdKPn-h1B2Bk U1FQf xĚj+fL˷=e9nv}8Ǐ2{zuyI+J+Hrk4GFI,F-8r8Fce}&{d'M؀gTu!}) 2=N$*_8hغ,ε I9RVveկWw 2z=IKiqR9Y"7չa<1{1 ۘCe8b1$x₫ϸ[GP +;Y1bۘA}i2%MZ<,yA]6M5`k^ͳBw< X E8.0DJ"p7j f(eي!ʉ^"lU$Һ^ÏZ*{7 9L!ڛ2e n"dc^HXչ(?Tչ DCu V(Ykҫ[pֆ7}I{ ܚW8h{8DWLjI&$ρ)BzEAKm+Nb.N[lӱG;%qfa=bB0ƕc\J=PEt4WYmҏZ#ye_-FWO~^Yh#nPiY|&ֿ{jϹf߻i|f̖Gq! -3 y_ypO?_3^"6|`J܃|ό J\ݐgI -Ԍkj#IF1:b{k=ɦ[l MN}^m*[[Z zH:,=,VIFՐ| (z͍'%dْ(fXμڲέ-)vbP$,F=#4o_n|d'uVFw0ri`Q)\d:]~˻[wLy̩E].4|-m|Qpq{xh]ԗgzńotf? g;%W8=_Љ1gĤENl/|^PK*ݑml4,J1i#򨒧cR4`r.1)DZ%V;,4@icmfMcY)?l2[qn6׮/Źl+7a< xUѢ@D;rPAh_>?q_rۭh7Ya^l&b/'v&{^NDl)#yc'?3\$vfb#Uu]Q)gDgIE4r \ֵtfIU* z('Q 9JJEK)uru爯#ŠXњ7o?5Ҽ0_QzI"^t%ޖtZr$$g$ޢi lBo ~Q*Bˈ dҠ@rHu.L"#JZM"-p#OI>50vю!1T8Z\ 1 Ǝ|C2 %\[xH עGWےc-M"]rUK$p-{ aQ, }F e.~hkub.wE, rQz73'}B.0FśFE>~lL⮲1iV3&:iA#ݘSS H1=-#$ԔR7FʺӎU= 1*7Ի j^!5Qv2@0\$F !eKtU(Y*E#ۨGQa2 ͡R:g'=+F1g:'{=I&\*D[ښ⌲jzLi]ɼfkhIp4s+tU j`="2ha""RG<_=5:'H@`tS=hY^H"5'-E瓜AU/: *q|)+%9]ʭEL>b!N<qA1ŤYB^H@gL+NicΔP¨,>' KlԚa>x,YMYѐOٷhJWq@V.-4qBp6.\7w <a}QwFI^ԑNm;%a؛:ۏWXuw K|Na[?%AhjL[>3E[Dz&4ŭ7<-,e4<_#}8$cSe+cRWUƎ9kxNF%yYVDU|qNգb6 )itdnFWwŘA+u )I=C8 xgntF3]3X_Hw-UNRSP>pŨI޻֨Q'y{`\e' [-V i[NgT!aM73[!ZʖSF:0Xp!6Z*fLRkN-|W6)bI8ٌS RlĨOq  T6fY@жT8*iْVz׬5+{ Y@1ťKrbΰ,I.P3Ef$%1ahڎk9pu_ wkHؚBoW7W/.›?xI2.K}tJ 旯+<}C!a:% /m .GY|yBŋw>[xp4,E"Pdd:!H5!FcD`a bT->Q 8vk#ALi$,0j.*;9e*O,Ʋ#Jd^ XE5جÍK. ;7?!w PȒ(ϩSÖcD&ÄK:sΤ%H:O3"^X'-r}sdOlzE~虒)ϔm9+$8fQ] n21ܻ"X!"K%Qq>:/z)Ñ{T! #88&i%x`-2(vLHDdLr _A.n+؃_²m@||33/UOz'SRllЧpu$l7NVC2H$Ċz 'Ģ#pa4m}*crZQU}#!=蜀)c!{ ?8/5r.ѡE#˘_DLJ Jʘê#S,"iRh%D 2$+ gTCf%LbfDG^Yx5\2o\ضOC;H9Hx棆HGEh"rΈ1=,0\VhȱM+qG#{ɠSIb#R 6FBtE E$Z H@H!$d L{]pIH,B c1Abep>R&,|4Cdd `#?&|'dz !ta &^@V /jo)]Z@.8ja5<@y̭C?|h3'0lȜ慜CТJhXՉ!=ļA;!6 4`UGYZ#XEC[D #*@oګK7+epy|mg{EiÏ>q:aTIn#R~ނ}=ឺzHc\Dxu*{g2 wI`|M#-dC]gLNxN/M\xaZ־Z]o'dw 9'Jk%7KlFWi1jQm=4K|(4K Q@>\q˯ qZ͝ZNPtCtfYkjd=v$Ae[WvfnlRZۮ%1FNՄTu(84x̢0=C?MǢ(4_?c`QL&2{e挗Z3;wXϠ*cFFo8u9zbWj={fVtV"Ki#Zl.?lRɫc xX#%GPJe|PT#"W^Jhy'7wE\]Xu ="Vv"w_tN::uLn/}w|Wz +Η/6~ؓD?.Etz; `?$wfOa]yoe[F ϭ.׋O|SwBz/DlAZ]fql9`j)źs1iv]q5=T<|&cSrLS(2 !1(YQR:p!] QR$Řb9Rwe2yJ .h !Fry!bOIbB3Z5ZeXP`$TKCH/HupX 'b5IUSThq#GPhiB N:/"XG`!3YE&VJacA}F*B3ẹaRǙrd2gz8_ie<!@/ 1mˬ!d!sL6&)cod5Y32j&Y_DF^_@{yl aư#GAK!0q (H{`3^:JeNIO ҿyϣk%G՘k k-$G5W1"#qV[!N 9%nwV脧lrt`uJt B(t|&cS}&)w trŻM9*h-[TVBpͱ)z}Jj Ql5'e!Z ~V# ^]U`w#A{7.P~qEHdytz|lÂq&koƊ&xX9* 1B91QGp8 ]RA =u8#|B %W§=%|J 'T)TZxLRZn.^pZ(!&/clKkU heH]1B _*>6ISZ(!⨴I'OiZ(\ZxkW!Xa -+0GJH*j{F ODiXO BeO˒<%|J %'>~B -³2K٪W % -JQ\Z(!<> 6bU 2XWiEyIsi>3{l032De`\ iSZ(!@9k ?^4V, S _SiSZ(!,?'1>ƒoO.Ay^ö cRځxXݓ?kR1`4NZY5lsVon06i>o@'Wn~y7Fk)@$eWbm6 U`qc)P(4Cas)N*a54g| VLͰP 'iePR@Z֞S JhǬ{\Cc <܍R54:hT0-F#LD?ꊓwіDK[%*a*tP9LT8TN*0$h$E0eOIw1 &}X=~A1l=2EHqPpAA(PcE,l@_}]lZ^ݺ/i*~yᯌJ3 J䝲s[R%#V!&EKn^;ڇ~dbm@aszi֛I`e<UF_-n^{hC?hs4VǍDQ",0þAkDHƍ XxKuEG9]k{-KA`텔͑YP*&!I ue`(BZkP-[ HZ[^+Fg6ARV;:N@- W, 7,tھwrr1H1wtnT@ZC{lAS[ MT3|8=k<\au\=Ĉ覯""*-7Fo7_S*%Z D^|גՃ;x uŻuxԟWP//'p\9m-9~)Eheq1)p$ঀP2wOMQR:Ͷ׵1Z^.U!k퉋Mu3u>DC!|lB6a>6B$e),ЦiOG!y̞.y+0U WmJ@UеXY9,٦|dxN-4 D".ضw)Fb~!I %ҭd$|<[g(O[.GK{p$}\J%Z\9"Mp /^4;clc%kthq}IJMJ/9X Nv(35v]k_s1x[)79m<܍ܤP9ŧ-Qn>]e=|V*6Ϝ< Tș {咎rI\9i+;m&&HHs>( OH|Pp.=4{PLd>&;F9V~|ێ*QuzCRٺZH{|j m^gtGnsYD hylsu|x}ka/cR5} 4 nžH_6=kp 59pgrqYcÚg w[r: L7hGXյY~y tF zLpF[ 9Pkynj!Wn;~~(-ESROmEO*6cEGxUyԻ(Uc'?̓)rӃ T()t8sRrѕyiTN|x,|5uRV ? 9/`6o>ą3\|h).} g)+><HYEj!Wd%L4;̞c(ZZό'Й`m? ]D$ZiIG5"1!'B3TdJx&-'`! [k+XB|mR-mMU(pB:NQ]hZa|q7 E,raCKcR8Ɔ7 418mpDzց )P$vD"$#'P \TCB(1H>fO^@e\ GhCjJ%Zp,]88V1:{HS5WIR*^*B mF-""hf)vs"&|xKWz |s iO"C<9G#\'[ݧ !2_jNp#R愃}Znh 13p[̈S=L>Gxm'vG\/>YC8œaN%K/AdR&+;DXIN[֋f(mX项 Y8|_p >d#{S[o#R[CuolZ-\H9 ojqk1| ZO]nj 6TšJ$)#,>P*Ph/n.Mp/7Wq7." Lb"kxy5]b҄Wq-_=ᅢP`GߴFS^˃ODAϖ.I~qo< >óO8J1P N*e-g/je\<,)!DoT*zD;nl-O&fçRV뛔NͣPG'hRa<2GstդTv9zt%sٯ]1>hj23g= 6Ǻh:6Ղq5e_47-6H043eMgӲn CWʊ]JA;\7-SL)>q&.?qS6%9Qe"$)2sBEͺ/ Qa3JT o};TԐ}lBR{ DJ4?Cs9ӷK11pϭƑhSv=?5CHʭ 9{oc"nW~qu7_ܦ|<^76 9iw޷欇x}Hz՘ο| {4b~I?QuwԿW>,wB=&[sBX|uwc) gFr+'vĿΗU 3iw;""+jT|/8&03pS6]9ڰhy# %+tBp<'Ej)&Kq&mM#E&T͈@D4[t5Nۺk5zwٚR<0)O NJ4bIjdgI6j7yV[pW:t| ?9kUL#yn4U1Q"<8tt<^$ۮQ{uZsP1uw]? rH-5m"!ۈAQU jwJ X+7&wӚobPb:ߨn'4wkhw[B^6ٔ0#@EHBtC)vښiG*mjZѶ pn{O͹@$Is{{wmI9fV )=J!1c' 0!#(b31 sDcTq@":]@!+ZmDT8+bb Rs%k ,J[X ̾j4mWSQi=%\OkUk47 R#pC+^&52:է.ioke]*!PjK1#˳`N%vV3㊑2e 1`xǖ9R@}Gݘ& сq &#s$A֩q4G%=WM* X mpyUagHea#aRzԖ[o};<4Aj`B|(+TC}xߠw߸N1N5?NA>mlZS$ "wO]#-XlWPӸm}t`~ƥ<[1iiTͽZjLpV$ /Q@BxkFH1_ 쓰 L^s3boX ؖ =Qi+oK d[QCx8,F&< KX+7&B&л)ޭiFw;vޭym y&:ߦDi1BLuJL$?AvN0hUP.] D@Rfy"k0I(//(-y47J zxj(ZzE1'_'r-|IK ^1_V05x^8x~@wP(ţy^\,k>D_ߓpq^UKg |<22p^_gYؗ:$%ɋK-M+ ?WxY~*|[|8 ~.[ Χͷb^mk..ZԸq+RWg8I}k]DŽxJx,K#͈7 ]6_cw=F73W&CSL/3Q3qP[>~uVگuZ "7~W=:|TKiAD>#* EX:ɔ;xN%"t=Mhb y2 T (p|͇ؐ|Wp,T-@[ ylZ띴vRF|Q^w&bHEB0*3\W߾\(/Z ~ D;a`n?^]}v]/W__Gqr;2vɞ_U_DO_.'Iחq6+B!Ol UFox1p[S[DBEh=jl$#r-;j"aW& U%W%W8զ`~6| yeHƖ!7ϽקYI-xAx뽏c:rI|-ccU{ilz+jR ngݮ*4E@O|c+t|z6ŤO]q)VHՊ8u>5X\1kJ[p6Xo=u\B1h {,i&zVj dK[V^Ϻ{CLWA0~g;JOnExfΟK_7[8Cy1!HhH,HZ$E;i?(&ɡ :ȫ˼@|K޼V'LbPmFMख;&G۝la` y&:ߦZ,gS6z%B͓bwyW bvO>WdL)93Bw>q}~ \~loBK.aV#-w=CL q.c.kȮܬ(dחW_>$> V(Dz:׸IzO+lnjSx_.vP~Y0|_A1N$V1j fwɢ/<$O%TrEDsFT¡~;bm}\E%ݫ~Dj9﮲+5dPmJC}z;"CoJaF76H#=UiN9mI]om .i:2"w0y:SCgULd3Xu(O@s_WGf 1bW(7I ,zHK Ȭ.j4<8ƞsQϑ,cOlϑ ^C'{L%_$92:s4Ĭ<=¤Q1mXTA)j'I:Θ' RBA^1Ͻ.59 5*R Rbz9m3-+4Huh‹pT}ztI$V JVߨ!Ω:-kZJ\B^ؔa)>noֈA4~ʻlCu*Ź?vcwPgQ ǭ%.(oɄx@t9?@hd #V+g%=8iF{r$X7ѦH$rs?x:o dQXDYd%.7ЬdwgH cp7,>wsW/3d&5 D\ 5GPVq%73#O޵-q#_}ؙǍ \m}[|lkR2Yf1ܖCbwYyɓ@h&$HWKF]BQ@#\J%j9l˘>M1OT8R]$u$i(%vG%׈HfPT@|O Uj2#5W- )8閃|wxYo?нȴjׇ66q&xc,?ܟgOyWN5V7/y&WѳQO B8o|!e+F^~XguΛ&ԯ}Vݘ{^ ݌/?>=vW<">=CquO6R\-׿w Zx֘OᄎOG_j?_;%|s߶qJTC| &g__ ;V9VK,Ul'S Q\9) :bԕZH9Ks6ZɁ\ih6ոF1ԥ1f.VdB9w&|X\!{wն$1aei.VvFʃl3c'[+^5Bh5z4r eglFdylRa!n9TTmI4LHYdOC]-}nJ~ W5]ΫlGzIT\`U6I/NOOԲr<4sRZtcDB1X4Xc3R8tNrI\4<4!$d6XVxJk jV(+4=DODUU4ޔ)ji4#ڢ43[Hp͈nmw)-mf-ye69}w.Q[Y8`uĚ֘zX Mjr-ߺG=dG珸iN.L ㆨuR(R5*4fxcsW3RIFR,Go7]R3Ml39]ϸwFֈ;"vf#Ct&MTWln:F7/r 6ksBg~ kM+z.m!oF7g-۬ g\n5Uӊn[DѩZ;ct rn68'tfq 4|nӊn[Dљҵ{\<t93KmSWCiEE-"䍇TLp=jvDY:ϸ3>=DmtV\t"BxNŔ[ЛX3Y3mfVlt BxNq9B7!n68'tfq oimZsmy!:SNn~19 tY Rϸht]<;4|ZѳL77S1o:F7XUYUmObe:ٕCt*֞pōGऋm3.A7`z!ӊn[Dѩb &ygmpVb|EmMUӊ67S1epnm\7gnB{ev|z0M*ze}Ct*b]MmpY:ϸv|K]X<M651MP+E=u5 #fJJr5F -IN *|HZfCi֚[[/,TVqTKf\ [+™b 5 OH-v%j+}jV lmoĞDXqdK2gl+&E w5 { u(#$Fc!i`K=~ZKZ3Y6]S/>'*2W3A Xŗ0!s縱3$2#r{%0 1ⲩY<mtɄBVWF~szJ@uh.'T8I%4l܄)?DgAJqΨ_{ > K&YOu-I^lKgo [1puEJOR:k46i'ocTrc/A@ gL6vj|\d3SҐeCDF SfZ1i({ES9`!,FV[iB[rUat0NHU]NU9Zb)AhP )ړcWuqNqd 4AFK9)Y^njH@li&7zD [/e6b`G x x+@ڸhE:(5C:evgSZF2䀃6Fde]%l5I\˞P9G|0ZR6k" _Gu4!5GI=m&vqgqIf@2-PΆZݲjRJ`)'nG, +qM8#řT :8lСN:$ q%A6H P}A@gP,;xARWܯ R͐>8X Fkc5@T UMHՖŢBj0(#H ң^d`?i4'L,㑛Sґ)ZQo%mE9Yh7Qa*6jcѯ eE ti*oz*%H0 0`)>! DZ Cak#+Ј^mA0!55/ԲH1_3'%0 P({*20~u Z ~1m "JHȄ:sP Y>dEu.!_;kcq-@yPO؟(=Mdh+9k-H#S#lBHC Ag B$uDZcxBj<BRN'%1t[J%" tQ'0{;hNc؏S0ӘG2NH4]y/ΒښoAy<;x=.WX?~ =A=rb5%4wZB$_zxn%Hp0X}z92n񝱴T~ >⸕2Z/>lJcv2L.Yo҇{o{ip=8(fxD.gm]P(EdSGwn9Ƽ[f8ᖃ~ H̽/H@W#g)øj;'\a7uZ;I[(u;)z}:fzZ ,<~u30;rIn۩\(Y2^.ThVG~f5? /7Y>qɴ"vߴr'̭̍Sq͎b[/tNYVRz43h }rA? {iג⦕8Sg^4Ze]R"6>R7 ӏg±KO2 Ÿkeok˾.Ձ0O2͆" ;xY/cSFGɄ]fwaAƭn޹"x_ûl5>a|Y׆B _NCMW!%VA;1;~JvYr5 n˿8%-],'D5&OIio Uow ͦ+q-,gVDSjݼWx LF+(]XmDb\Wx_@_cmc9i'V}xnc:oz]5:0 7u~}1p F'mо'l6&j,A/Rwχayx|z~n,]O )n6 es`3χ[cwoتׅ٣b^M3JX}BhI nqɢY|O~Z7(Nضibb;<️n yjRvI:pʶK^Q5cښ_Qe'[CKW!R̤&E[,9ܷ HٺH-$m[5@NL22,/R3&,ў{s/&+g8zpTxz^p֡ "h*S)2$>T/.<4II\<OUj^AEjV`;ki.v b,nXPPÄaeӄ~h㣔<))(JYdRC/xFB$`!q4$41FEph{!˔?Mgqecƶ|p(&H&.IR(;Ĥ)ev:dgO.#=l YM9P f.z0 #Rtb'֭R)o7 .Mf/?bv?/œmZ|/P=}?*a#O wv_|pnd sX9ͦLIea4'._}>ͣ줯|F<0f;,!ϖc-g(jo޳ʯ|QvYJlbr Au%zgܬ.3~v?ebO@>]-*f3'S6[HҜ֚`\Qjy']]%~>gX;Z;]"`2]m>3)>3Ehǁg4N#)J~Q3r 9b/?Zk݀P6p[8x-&U~{csm^Åd--zq8 `@0 *] % )v%O?\ի~!\_|׺ϟ67}u,/ a.JW\p T%U(%e++ cK"4y]2R`\$vֿx;¦XV.4د dr9/1QXy kE5J SWc)ql_o>z.`.A|'#bu>O%IH^=m4m[.46-Ixbۚ. 8A@Tf9:}WfR׏|s5-|mp<ר}G;O4O׿>0݋|7V{ 0''蛥"ZMGeZ[Of,B @@jqT\?:/ƱEJEt 13 gZg}13h 5!S}1pC^^;+jƜ]"#ژGct\:G-DB&'KRwj);d@|`9 ʏl5$r@BJ?$ N];뎄߼~]_ #dXED,dԜUyiQxr|>zi b+~.e"ݖuCpg67yQr#Gm˖z&3p:_`J!پq b3dM̖MG+MeUF YKqH :q 1x9lk!'mfq{ki?)^ y>Ggy1tG?ZH" ?Az;!Hȑ]c|P69zHPY{h#b]?[#cw8ÔOa6P ˠ;cPL,,UaFIkY<g6I*N>"Xv"K: Ӕj7C(M)\!Rp~B 4sgDJB 9Z8d~978"1d<7lsTF(\s!(L!no<6s ,I$<=K*H ?G?&sd3'!S;nByhr_9wZfm6.'-Rh +, FDDӒ\¢,iŘrsl9+/5+zڸ`OmLKv23 )M V ĝ,'.Zgj%EPQR2a\S&VPpP#InKU"VVy%-Z "}s졺 !⫵G퍜?o? 6o(A-N'v_e'n ̳¼"4sEa6y[x˨VJaF9%d.HI876*'uᅴ`>o-}Htvu#r<P;l/Q]dG9m_LCȧ')E7lZSP}% Z2?ܔ":χ'-)Acď  n!GrÕÏBb6/Ny0\&\qc},C=&C-01\ؠ$Nˁ˲$OHN$D:asYA$=[#{`ix ީkǜ3P]8@4qb6z+0F싈9ЈkR`QPBqhW@ dN` rH@A %q%ƹ #e$UPrT!+%LM PK.Dt J=X_*QdidS0h7T[Zғv26-n$Yvmqp (í]%*wV&6BCv؟C. (Uʙ\r TP(BH5X[A уt<>\ҏb:G~(L=ċSF>REO¶=8/N&6UG]=$2]Q[pv"!ދ'j(\8 (U $R<b9+Ζ Zr3hA%dY%w43ZOqhiZ1g~ $O)U^IDEP@e26 I#5|SI SI9g$$z+ĈP6G0\6X3Ɉӯѻ`@3A{D{$xE %-, L 1Z8P԰B$*Teɀ~Ѡ~NJ@W`bLDRA`. s-fy!J3&d46_~\+ֿշm+$NqpW9òpQͺڭ HtVne|dVp_8/R ^0V2)CRd:FOf(H14xγ?(Cb1I'"%mVC~1D΋%$ ^]wF{?F4~ud]$im۩62_S kXİOͯ<-ѝN_E6Pco89w0Ʈk5l @ Y(<0 1GSqKͧ^- }?ѮV"Cc!_6_Y?+0Zej'qKcVʬd[Rk]oKRߌPdz BW@BB(1Af XJ9c3 *g͊W?FYxӛQ>2_/FF2޼!jWO79_a"YE./Yo$|I0h%F/9Zb=3-{꧊UbU7Wp7clMdz{3byT+miz M+j)}MJ) xMʲ"| JEkH7|"iY,Y;b6`UwB((WZV05rט2Ԟ+|z%%Y(MP@_0!8   0 y.a$;4ι@&x[oyʕ9I tn#>LK:mJ{,|^xPH{,k?~X?St I菐~x|y\6}=L|u/n8Ff+O9Tj\nT7(p\TK+zȍiW.+kżɾ ֽ S7r9xѻ̻Bl`/.|~Zi-Q̧/MiM4OghȤJ6-s,wlmkxv;5aڑNN`'j8AJ VPwg߿nT,{UytF*`y`ToOKGf*TJtqxeL♖3m/vt1o&El M-A)"<3bޖ'W>?.P9_];Qb_,ʆIY3z(\RJb7$H%!$cN&:yC (Yིe&/:!5K+4@cݚFz̝ ?7dK>݇zWz/~=@WۇA+PƊm%q$*_bsLN@hP]brlLn;ŏaAPjVZx.$g=-M㑖hՆᔐpJ2!#R̾nY@$XF`L*w ͺ@/<b]s@,~m Ma(Vt:pDB7}we[At1{aރtٌR}jͤϳrX n/Ǜ0Hc!豰J+Q._vؓEz)+R>w ր7`'IH?Q '5N(5':(L;&oA9p%6x"Ԙ'vU y:Q ̊GʻdGɟ_/\vhXş-s%sԘzs[*vw=*X 5\~(aʌفkqt1،Sw,hz% N8?RBr>`^:v{K61*Ĩ%||킢h* 2jU- AA ꜌,WAwsyA-(c)Oiڢ,j\rV~ wvHO{N&ӆ'pڃՎK-Z}]Xa[hOJX+NnTU7d>wCYAҨA,i׷6U@mwދtb޹]=>:)*..04-4[^O:"B5hO鰢7A!S @7rL˪U3-f^5؀$qSBʠ2'b4}'՛/c+B({aUf-Ϛ"E "Hۘe\%晬&FP%*OZB`y㠗@P,b yYo+Aw/D!+!;2ɒsz u5t[LZL>qn\Y73+*a9?&n_ru{<|16w:']["]IW f]֌cqO xkhfs uX)znfFPez)|Po"bqgG(9<4Fv( 6Ϫ!~9od&3s-kw)V{=ՂzWR3}!Ʌb:B7 qPo.JCُ7ׂB 3V<ϝ']lǸp===d1OvIEz/;!"o9YkIһ*}5-jZմUxHuV9S5:NYcT&1 8UWW^_,ƶ+fP5zIC2 fuإ@F%_nCe-NKZ%T'2=D#7!JSIkA |=MG^u =2F<+i琍ퟷHGf/EErNC7=/z=fgEZw6,K2-[գnaP r!㤯 zѮh&-_*NXnBZsػЪin}k 1ۮލtP Tf%M(}q77/sDqyz9bVbIò^甜(iOV%+4Y~&̭^ φv3F_=o*>m0+/'C^X-ȀZZ!kHRW/5RI7u1GaQ y1 yĦ'QHlI,Ng5Sl8-3юYv%ԯ*Nnm:CdJ>9kZul tx+3皾9ډcFX'S[ޑq0ʶrJDo.?LO?\/t!VZ;/`[1A0y'E/D[F{ahJpkM 0!f" |äl]-iYiBY6/q֭S[x B`ӺpJ; !`PE`̡aR1b7ٷR$mHV"]I05ްm16Ud;18AE) LfbPIӠбmsŵ9;pm8C#AأɄ7W[x3 %P,| 8sZx@Z#y'ݶcݦ@y'isMJ<䞫}!B!5DX:MKӴ:MKӴn !نRƢhT^G n6G@A Ҍ4.چ}'/gU𽲘4I%=zEKMmvy>-r>-x{2YlU2: }TbH* U DDEw^/oji ܁MTΗ.!TFRc)%`:a+`E9zLOQɯAQg((HȧcVA/"Kg>aBLǐk Ub^ʸۚtX9*/ůmOڞn{RZ%Yg}Q8(Kkɕ`\efz5\%?-i Oۡ'<{24efw!! I!: S0"뷆}'+/gnŰҕk6hj9u+FzIJ`(m6*EZ"iA(RP,^U ؐӕ:֓h\)ُpKY>ɗC>V@σj?->xp[{1'm(_}>_p{Q*6. Nt԰UMÊXyB!b20&eIEP yВ5RE=P"* lM#2owQvωQo4{Q;0\F3P+nSן/CI"9ޝ˷z }Pq|9:[z`3ªO7M}KyPW*GyE&*I!SVHlxR_?-?ƻ]\w 4j?,&wgi{~o%M…/.spIR_!RHB2{b~<3 i|^ =ኾؑ1 bCcbVj?_.|V 0,T^즹ݧÛn׻,mݨf' ;op!.ek[$jtosܙtv0_\]} NкЗݢ1;4?d p*ɽQY ~TW7^Y C1OAۻ#*YtՈ1-.g|Ѥ;0[Gd)3i@$r( Zδ5шZgZ{_1Xl2@>ͤ}1Ne C*6q3.|m)&EQA;i|C\cdfoZ~Y :auENj 0Q"TTiGwM UXGQ<ʨ}ypwW! y/q 2뇞+]s%rQ\8>%r+2;w] fu%`\ 4̶P~(#@I.0F@^'}U0&9-ye w-%I^i9;v Pi^rhvS"gC)u~tk]a$ - |޻a=G;_6I*w3Jî^> ЮC)EGF\/Siho? ȇ-_Q0 $ c:BE.Gn&50\8Z>I{|[P$CiWujg\g_*Kn*NApژژupk&9`Xj iim^ 'L7SΜ0(&K4mц%\k`TʨsvM )p sw΍psZahܔlzGo~7mG5X6(Yv}6Q3qwVhV{d\x+ٓSL@P`\6_vznJԭUc[uF pp>pʙl`<l^Nj4̞[B}\˧"䜡ǩ=6#w!yF r澐Cqbv8!Аv4;-4 8-'O77gSͮBTjkS 1L]V{Mvw[6xٺXݍ\\*憺5֝/ntϨM,xh f1)^kOcSb~/fQqŇeE䬐֣wR7,ˇP#ϯ?mr?6EJlXN )owt2 _StoK(D+U^(`\ƋC78V<o@isa)6tsOAn4Wh+zV8Ox}1 4_ĝB[3cnE?ͥlFoGe$`r{d߫B5lJ:}񳶲F|>=ɇrHGMMjUypE/t6#a]v$r-fDh(1&Ev!ݭ(<׀!Cy}/3T0%B+B[_ =K ʖOFWc6t[{%fG DjVst{o^]璜`Lj+Sn2G5[.#6v YG0-%#+f*aaCߪF~Gr'#[O+jqD1(uNGE380JD,iD24W/0{DNe,j\8ĬJJg@qfmUT/qYQB '. L0 P/lpKGwdGw mY-ٚZԼq#-J'WK,@ Z_ ),G.^<v,KIJwg2觥XOg3/]+^`T7fI6FA5O"B ql,ıe q-"L<62yld,˳*AF^IAJp Hʥ NegLL8"Ƴz<9v$r Yz{YJcn #ǜKɔ_[+XpN6I12D5phRgg*WLybSV .:ܰ<3惀fs=}}_US/伺%nw]uMNKqA#I,O6/n;Kz $a b`RGu׍m~<>.zurQzGd{2:]RɢegT eEh5Fŷ3Qxu?}s~OgXo #R= "kldHDD8(4{Zq)_N!ZS(<!쵸0 : 2M$9~7k£wl/mrm^dm2v 􀶣/'@_Y1UCњ!-e}ߣl2# ;bvm6_ ?Qp~9b8{R09#cp #8zLagS%l^Ҍ(ϭ&@cE9+RD4Rq}a28'~&g%yKzV]=՝I/Սw8OX?{eBxԕ ^.2z2.J9qO|A1Z 7$g ΂ahݝH-C?\5%Cn &iՍMQ-%[o-l"m&RLV%)IbC3B9# DPjUqr)&,g,oŻL]$&a$Bܭ{larco(`)l<(훾C9>zTlD @f8bI#X&)#T& TABr*T/dLVD@]Gg-i$Dol}3% z $}`z I/s #g\YHЬ ⮬hkİXE윣a#>S*QBX<$/nQ\K9D/' XknH4f3{J(怔cI{~iupįRYyy+>v }={zgz04&bz:EG '"NxGB;t\%g0=O/n  &D(HY#^)"C-kޱ^d;W AtNJhgy\-tKFifw~zT*Aaj#Vv9t/Gn&kF|h'ݓnǖjfDrs/~X u)n[_ GIs""S^ӢٿR1^?oRAY LlmjEa΅aN!~ <~b3z-iѽܼ9V5m$Xx=n%kY{}𠭖Wч)}m|Yݺs.TZekF% (ÑSH`M)4&crKYq)1At[  nKoJu.у517p5 O^־ÂrQ%֝Xv\a⒎v{7 fq$aSRSH γL14b#Fb@ͱHrn+ T~&+}h)20ID$CXpM(9O2-ye e`7ȉE{ &Gm,UT&l8cXi5L,EX*'j2SJG.is0 i3b'* ",**ydb,g %cqѥDZ ٙNj*9Ldf_tͰVGZǟ~~,zI0|5~`^ &sj Q4BF-\%dis2:8V<- ӈhīT(׿\`/ehs2dՌwPrZ:^."KO{V0dd8``>$Bv2cST_K̇mo3UÑ!5\$S|Woij7wo:|z ?&PT 3JLv 4[Ю?l#>m9-^ڪ/Lv&D-- 51 ɆZ+o +o u@=hՑB!M;VY41 ty5ey֝Yݍ\łmlZ3lݹ  ֺO;Gwp9B)!i; oS;j |Xkum>c @v7ﻩj[wbpRo?o=w[͂g̵ gt7kqXR&Su6[sޣ=IpGs6._\֓L;!J^ޣ$6v \FD%"D'DٷGB~^Wǧ}-;󟅶Lg/vlk Sba 0(I^/@̍{C8>~'7{-6z2 J tA<:܊D%\G^W} ze0%,mI*hola5?~)rf𮽕G4tbUDŽż .ktU ;w47)k3#Ng9U}Z e9{/2t4;^pPB]Ɖ6(8 Fq,@A&RR&UJZl4]pilvI)= ƺہX|YM5Ys{ˊw5I U46=\ .s55}͖;7і=P"HGc%"#9ķ,10K`$cD(J1" [sݭFEwm͍IHq*/v7WR΃\*\w\,iٸAJ#^DH3Fuk >~$Dx_60U"g \ ٗoo~We{yyausE\ƃW_[N:D<Ϧr; N7U.W˪'~{uWlzhQ^9#_k+otg 5_ݧ#7W[W1nr h4bUտ<9.󟸁_={ajinݟhHPWGZ#%ɱlycni*0' fȌ>dR2Fq&/SRS)O}8| ..+#bgy2i8tj>-AI\FJi  =!:)E8ksSV+]FW=_sdhW{CQgGq+:-{]/,Yu5 _W3|5Ïow7_I{tT-ΑGPdy/*EN}s(~*7DІ>^\]Naf:ѳ$bD5B 7Dc p$bCa;=j[䤜4znz)i{W`;]r;0QsFeBXLj=o7~yY#2S#Ÿ[]p79 lE.u="C9S`D>$I["r(AX5z!(2ѩ[Y@ԝ)2nR55$A @rzM >4 {1 dOp¶;T\ A$L| O[2˾S\.  ֺK7q |~1|@=/n=[wc^uz83b܎ggN!Eg' QNT "Di|Ӊ!l6Eϸ]/Tɷ ثM 1DUMvCY?=i,` z)*t?@Ф28C$c{ +7v~ Znm$>C:=f/1D:U*+8-;@zޤ^gס qb9q`'7[f|5#kA<`(^T1cSqոǏD3 ֘fCΦ,fs{_~Ab , Рi/\62F,?f'A gcp䶿q֒der TŸ̤,M|` w;BF/6=!뇫Pc/X9@yÏiPNML}1Ԋbc"aCK:། 8=ssID6轢zm,~ZCr]א뺆\wא.udfEEjSRV *ﴐقUGH$[L3ІZmhF@izs( OVZjUUZծG֋"$D>P֙e!PEd$K"(xhCxhX p)(א4->I1`Z 32enU;cPG%KDDLE}>Pr"#erA (9CZ?t}&*Evې /k^ hJ]胍,>g F"p^ӊ<:o O[У8s% b˧c?>~vÙYeiQ<üR=IB &}V*d(I̡vz*ćpoI$w[zԄ!n0C8qv&/$-4rEtRa3w)\Nyi#^\aPa%v';z}PXOf󋇢>뀖e) -."^yPdNd[`A)U!{e-@Q09ʍt;m;S3Y+ϲKB:*ba*P0Q%bTd`Y)?|lXr?<_=~ih=::PY2Ʃ6=JU]lxZ\F%%Y-( v)"JhKA&(TغYVRwPV'M>x("%QtC S ""3FbEFvj^gͭsM|i:~^B,|7./1, Jf8-M:4m#ٗ膊Oƕ+{(}Na2YĹ_ł[vV}I4U/TO} љR9Y*^.zsb]_%sqѬ".. wqAǫHC!<<ܼZ9b0] =y/z=;Da`D;*N2j&,p&V(lYJУWÕ]yd5s,bQ9^Q"9Ý|7( d/EtަDFqvdJ?l|i[~FFKHL"UkTTʱ:ҫNRg&2fk5ےh;P"y*E%޷BvYf˗-tO9)sx:6uʱƯ'$u&gHkpamPռQPk<…\R1lUb+Efɑt D| dAJT3ce_W\Bg9ER 8a=`ʝx'j CF:ٜSXr7z [ ngzD7~Ӹ0ߕiw :\|(p9{j7 i" ~w#QNk|~PLhv qQ; DZ ?%oHUj ޔsJ]Q9Ԉc܀XA1&]t$둹|`JMh٠WЁcch0Ha@2!dx,WPdKkmr̕XV+&@9kC+?K}/5=,w"xB8\v.*aMLTX~/b1]Hqq|RYⵖyeLZi}F˙VYg5s,NFI44f%HWDV;w.Gi[."*LDτ1a}jV&ujѺwR~0\RŠ^։~PVxv 2P%Lp'G!1#G\Ƽ0d []H&96.ϭbb7h NITGj>P]o$~ z΋5gkH"Zju@V !B(~fXgJ"u W@HNm!d&1-]{ʑ/HV3򍩭ijcIv*AP csdf0`2sGB"h+fXyu9 9|9$8;P圽lYvc5F3kxSR_NvZ@8*N&qvg_Rb2[ƿuJ`>ɚ$,RyfAuCѹn)te1IYVNeQa[fd~\_@dAaJ߄2 wT ZaqG2cT-C\VC#G PJ;ʨ2$3Qp#YV3j1N4{LJO}.^EY3ȴ'maPh9ܮ+d|FR;[>]2k<͡%Y=G#nɟdo$/B:K2,tqF 鲚9ֵ YER1D+Duȫ=Ty)}c982 *Ե4ΡO[ ~\% :}iM<|>&j1%1$PTξR95miO䢳Mr>~%gs }z8'k5c'jwN[jae9B dg<8V]_?'[TcN;9/ !dF7`;pP]VU?z;+X0^Buk`ɽ}Ƙ&)iᲖΖ1ζtakyf$縰[=#39E^J()lA&hgrFsx. D5aU<pZ:Xɴv}_ZgCZ!}kÙo/mOfj?7ᯰPtPaQo?f9Md"L0¤~0i;NR똭25`Zb*4xCPhp < S aOB I|{g%D7T T(Ujj4U:}{+r 25`zc}rnPL{3! xwgK'{?_)8)8Y΁ UpƼlOmy;xN>kVMv2./ pbkr OX1N VJcc*^Dc|1[r$n~ۘ18q -VCnqցg$D[&H Ops3tc&+%~Ӹ*. aE!./7+G^I"Jɔ{x5)i7ʀ߄gG?ꂹ0\N7pPt.ObOqV!tU~ghS9ys^sPoFOW]R㶇mWCJ;@hKY) iOj1z0U- .1㶇RQ4 ⊐x߯T@-Ԃj1 ~6EECz\Q-3&N%H C'k1z0$ (1|P "CKq0 եk 񐺨bH0Dۤ vOp$ChmHcLTN 6UU1p \U|P%m**)ū L^M({{J|x=5QވX;]o;3ԀJ%5LF'@N`(q0$ 1^>y805vZz3̭%Fx#'"UP&}y#/@pb= ) i88x٭_Rݶ&H ugouWрxat mm@tP+{;`H"_=UX~7O啮)( 6QA+r /15('1F*Ĵ*Nx cVQ},|?J1R6-9el12QZ`+B`4 \%~"rFtu4pLqS,_ӊ(gݟ[BW$LE8%nSnwu7çh__of#%kik%}/ItɕV%*h^8`O;)0ŕk!xoUDp̜)4P|pxŮt Кaj|'oob9ث~ޟXA)@qo.Ċ7bAwAk,%`-/[˘nY@ @ nf Јv.NъT6:5=FܢG |ɤ/aPa°Nt6}pan'0,^D!jX:l͘E2s`:cI(+ys+:Zb8@ &_^Y[ׅm@U ru%Er뭯jσ?$<>+7 RWEg ,D7Co±Ӹ0ߕL{cPr8/S_<*1r9{+?ǍI^Mag1^tcݽdVRߠTRR%f2/I0ܮR*Ɉ/#*x;_U S5Wfcͣ<Ɛ<ưcpT@Iw *K& ˑC}sUWH)pj>}u.sbaëztLsH](=RXj+(3VHzNO@]9zjPaT$6]PJvdUfcRF"+A%pw-mQ)SHt;OXϝvbz Qy7oE(g%(="~n(iVDq]׉f8kp;5fI b[JP< iC O$(KNPO`RsIՇ$_\Mn?4Lg}6y8Bmtbf@̜w;Mtmw&i~ŧs\'#Z9&x/~xv+g|O :ߙ0^Xn1˿rP2Âw=Fd0 hfK1ꊿ|Z3yM)iIM4 9"qJ z TCFc"Յb Bx[юCW-E46 =8Fu@ä/vnR߶^2㒋}$DP ;D6.-/r0nRYw6e [qU/I RHh&;eۋU`4 j!1!DY?zaW v-XÒsܴC9}ʔLd"321ζG  q/FeWr}'XPSDq&,"D "cU)Keqj@@R 5C 3\Be ShFDgyY#b~#R 9,Qp_$-+O`Du DqZ*8x>ӫ Jt.0ndzh#b-ߣNHtUl*?jt0WzW6)=cZúËZw 䛹7=1`/,[~ =.SڟZ~l!@@do<u9"1XJ X:)iL+x 8noɭY~ ˏb1V/bMk[kF϶".:nl%\jb賾nN荖#1ݱsq==Vlp?C!۲kF2z8FiMI,.)zVX=~u"Dݩdypr4?|MM_j@+$<w9L jãѲaѸ >n "$q )Ͱzz/rV bCckŌ>tc{ vL2zݼ:g'j02(z(5WCҍ$gf$E?fq^"JZ.x#E]ŏ hr\u)*ek:*!c(\KR5 _l*'_28qgۭ{&+mxu7ncbp$xS_uZ,pxo@y32PgMM&P)l5ѹFIA9OY:a7HWb=4`i-&|X݄!6C4n6q@es ~-?NfYCiŐccD-uzU{ +F^{O=L|3mG"jmN/TyG$72[J`역oڿl !'D!}-QF0ޱ.N]Hm ]Z_1f ey~0Q1D˭O2C_TÅlȫf[2Ɉv+dFSGXnyiÏ:3Ɖ{!Us*0Vzn. N믬'貯X,sDpq.=!%TygfZ q2:t<" &22R]ZC`9rHiu q'm1vIqP톻 *.MEdZ1aC.cЬ2BvT\pHAǝKX7E`,w҃ywke+DUSS[:A & ͱqls~p!=cl-pu޲IZsHӴ➎71dZq4ԧ UbXAQmГ«sg_4T&ʓ5Uữh]0ߚRG̊b\ Ɠp$8鵈ٙ") x&hrɛ6 &^Nj\XIsmŏUh9(7?<y?y@q=z${Ű`B1/6~~閏%/% 4¬S hᄚGc+CNG$4)Ce*Ж%/!#DH2U=Zҁ_a-4C輀[W!Բh9S9b~geE4"#Z\RB$k"b1T09?99mepˢ؁Z^[4B/ӛԇ>4i1Ti#Y[ Ib 9F)1@&V幱2Gwe*p(lL%ӱזiP*Ur+'~:g"̹dBi4_ q9cYZsF6܁Z\3Fuylp3*ri0XR8r~#9'r"5 B0F)ŚqL$ErdTfƂ-␌9=@(SHoFęLs45c帵\+ 6˙,JP։|wYrg*Vh +aƨpIxJ!xTy9~"! $O~௘f3Bb&Me/U$B%ņ$p (LvbXU(z#Q]3V>qJ\o9S5g0wrHYKWf"}ud ӶHo}:4MW?p Zfg/^gkvk 6><~,`Z3ɓ_ǣ)~wy̋ͻ #3o'Q8 B(x0K Y *d"vN$xqcHqrU0~tQՠڡE |i:5fs*uW[#pUΛl`46Jts纝('5V1Ҿ#_W1y .^yRu+`0&a:\ o%a)AB+S<"Z a;ս"~L^0g3kgb8”%FE T`k@Xh;VyWxM`q3곜 T"9;ypIx#,#/6 7uF\fǤ<`~|9ai yCju9~2~򭗣wwIў)^|=ʿ8=“an1=-^6OtN}&lK;iH'8CP?9x5*M3eжp/[{;)NY~4/PdCR;WhX2>Hp^ K’1tM!m8Umam^5[R tSxYOs!*={h;s1!"Mp#~z1HZ<}xƶiD%9!,5d'5C\he8UBx'i*4tJ>M$(SP0@l,d{|[Юʓ]&m֨8B!s}۽法h2M2V !h_@q)[3q!Y|~ (h0&&ܷ:{>^S[`pZLbou4\.ȉnƂ1$U8k@0HnE%H ?<~VvyR 0O`b/A"SWL莞1 z'aS?8-[e\^2H}Gޫf`vPOLcUA2R˂h,׋a*]iE&MnX?QBr(7^e̗#%l(t+wdݙ3V9Ê!Q;og'!ZGvzU0c@8 q( usc σl8ϑS۔NPi& %Mku w|X+AթsÉ1tNRL}ֿZgKEFGEn,¤&tKH %4.Sj. Mv5۝f]݃;^ x_7]C ]~j=7Ko]G)_ &Z$D JJqcD#`BcSj\lm̑eN2ɉBv۳;E[yi+Ӭ9~jlhB,N87)JĚZSa=j0Nw'F(+b+X7e`0RH%+0T&1)PKe+MEJ19nuH'ʠA_JI1%5:<ѡ?A8*Bp ^ia8jBp4ܬX+E9r))10?&M}e)Mq"BTuR'Kg#}=RDrKUkX$6ĉSPBEX9\+psf$u2ifOk*Jv&~%;2{q6QF9{1lMASd& 5QxOЕboǟ9Oیla/0 s1{sӋ`.^ܺL3 6qw_  ⵋݬN#|f҉OK^}wFArmћwރ50vr&>F_BD!Ug"0* \he #ξMA :bM\D9w,tQ1]0jBECzB:zSYjg|>~qU ;펀/LJhQKj;S--r>hȀ-1p>Uj#Ѣu>BKŃ^y'Pziq>:B-> F"8Org<#|s4GB Ʊ}ݧ7 5+=!x2b=EV{Pަ( Q z%RPrVB0Fz Mޮ&уր+Ad٫N(S='dqf3ҿ\P$q}>I mDJ.VCVVVFFz5$z([s6aF ]n_y^k#5-T\z킅͝3h:{#Y|튪u)OWn_-_}Kx]U&MOh DXGxD01N#)|_hbZP߉LS1uaW++e]oꛑ&Ď:vyӪJ.׫~WX7c7Rb2ǜLũQqjSCb*.9,OJ Mwusx_MCVLNREONqͩJ\JVfp*AS!8Ie,".%\^i6b85/#*0SFUK)upJf(3F\Kc jQ#^*NC4@ؖ%# HK@9)Ieӂ;Xl-P ђfÒ`8IfĐȥE IB(i9!-!qyʵb\! ;nZp%LI 1V%La(z !1b Qc-HĈEgb:fRiI> r}4=g۽LJwf&Gpe-?s7-%Q=AW߾ƗDP^?Y=v#%_O`>`Ep-գ_}(%3ZKE|{tw$T/F^|d<~?{W#Jc쁕ͳHȇ=X y ){3=qo{5MҴÉmD>Y* 9}z&D0J/&w;>[DJ>.&+=JcN@ J߷=|4J'(E˽/u_ķw䩋=Y*}7Y=\aîclnMzꩡ$^.׷7k?@Sy8 3ĬݫZtKYO">Gf>xQ9>`Zes$[oPwGq_Pgͮbz5<*Fիq*-95xu9v,ca) Rê@$Ie)W΢ !ֿ9`ԂAH8fM6^Tyq,|.MmJ0qAf Q┋bKq匂;Ћ|F1Tks rbψ}ned U_[=v6x:E7R|baA+!LjN-oV)5 U.{=% P~HԸ41n!ݒj@]u$*eDį>쌙KF\7y%ڭ#_]d-hL,gS>1Jpiku_"cUAg_W~PGh&i (DZ<+v(V2 t)&ZYk%+'5&Jlz붯z~:>csCau#9jS{1GYi)-%(WҋkTҀno\VH #y'i_Q!c0 P_X&9@'R~yg^v}2hV*] e[U0+]-j $.B!R$ ؒNxLZd2*QY(CxtW{s|^(v2Ӕ N\fnSl=~=)6TM3T oҝxz3yҌ5 4cCfB@1'7H8\lP ^vEAT oY&5F}kF5:{j[j\Ysh.l=No]x*_OBÄ?K(б(s"@UUѢ^*6OBRxE)pcY h'[/ KeIZgG wXxhޘ#5:{ EWk&:<)( WFFVG]>w՛:Tzo7R{,~D _ѯ~#5B2P0J ]*aʈ6e:1C=\:g7RJԪ~T2猤"tv/z()>H#,DB82ȸi& G9g=^%Ux:Zݹ[ pZ\"]k|3sxyqV͕$_6Z^t|}տT3xA/'aRyrJ#,4#Oߠ{5U^*{։zKT=͸f1gl=)Gz~u?[kbFy(8Zct,FF YfO'?igֶђ?݃[!V]-~:\^"w"]7Z%ˇSzGydB|3ZT'nNg/LVaM IsEVGS 덵 *o1~ZY|kȥY|?z]$;'aSeud3N"ADqFHg`9d` : 9{(e2k$uҡOL,x5Lq,#)*QQB8;A8<Hƀ:Qi!]4j̈́vT:˔I•SsVjZǵ~іME[IGunpe(.?\.Wk{[=~:`JZWrwy54P G;4 +5/5@@6 RtACˮpl|Ɔ_vgO,ȕ*p/pōR\DuHԮ:bcj Unٵ^iʓxiեHO8t3:KeYEj1S~ Ƴ vDLz<埛x<LJgSq`&Ͳ85$V&!5JX'-Q1̀9Pq,5 ɔIDk{1ILW.T@ T*.,K41kl^8YFţIN A<2 =X% \,:ikjVLI K\q%I!Ӑյ 4]A -8cbS#$<g(8X8!;,%* /VOVVŶc7}խ,IaVYTMaVYbU Rl 6\Cs R)O奬֑T)7ĩUjðdh'^¨9Ы"Ffx.00+?2Y10JCrRpY厘<*)`IP4ʐ0JQgʁ9j߉)'F&`%hb SԮV%m<YP~"ɺ+S!9$Z0v_^bt$DE&:,2,Ce3$K暰IU%tN,p2PA5ڶ 5ĥUL $%ciAHʴRZ;IX*iFGBpR,#M%( 4ĒE)T E hX+R\VZ'`F#1.6hlIUb ͚ -\LAƅ?x^)%wogwJk e5.iV5/ԋRیcPrkjZ y0'|N 鬒X'*Lf*3EB EPG$KLѡ?f^>~OG^$!CfBPGcr`x&LBQ3E+h뵏Lsy|$) +> VanP׸(Ͼ|uTKl3 ;T<_2x_67O;[Eo<_ JLބ"kx4i}I}dn'=Qَ{]Y=SkT.7˯rKʚ\ ~OtAҩ"ҭ TzyFMKUҭrKʚN@?M7j'@0\]EpN "Zs3p+̗h5%}=Oҭ*!K7HȩZ6-ݪPJ:Vș/JkJe]7TnэTNFer^Nr]5 吷rW7~5\fpA 7G;aj 68;uG;a y- PLvh'Թ; ,tF;ƝkjwD h'vB;GN`h'vB;ELJb'0&Ns'0BNa'pdxv7 G;a ȩTLT Xhc5! ? &;Rx~T{~;\w\A+Ⅺ׻z>f´t{1#9)pƕz1BzՁ$tӱ8 kc%P SƲӁc5 Ri/̄s+(3lnPb4 BtDv>!Уkљ(u?(0t.zƏ#(/9;#ycON'#Z_BTWUrz]~`t:B:mdٸtJHe#!tH8eR5֕A%%_l>ܭFnaAn` G%Y'60b7E AN:{P "=@QoS9*(z6 }|Z+zI?oDW\D++ [˭ fZ=V`8je~8*X3JjW ֚hfMoUơDp{SaScX"@bz\!|͚B͈Y+ u50݅k|5- |2/![K0$ՋqLQ0~#Zi,U!^FW[Zk=pШlD^Q}5P ^֮Y;Pv}Wʗi{?Kd|NPYuU8Aqe!j62.sڦI,çQxv6M>|m}߮2y ^P=L9G\1CpHSILLs<'&:&jVvq}Z\ YdNs"'$Jh"jq&&,XIZxyiiQV>"uVB}% "7E j:At9 [lD:yE鴼պH`5չ|PL&+f+瀵+AP}837Ҽb;izv`AT/9GٱbZ,E#&u~FgzHr_'io]XUd?M`deF]c8r֗d,K[n-%ˊlÆV]>X$HŠ9 6JS6vQ x96–9ŅBrOp!Y`mU']*2}ʵ ZKİkY迅~}pvBY=`T( r7xiLHoO1Y)BP:hhX I1"H4P6<5j5xUcht:*&x  '+:$^Xdha @[ S \"@J (X`p1bNUF.̉Xn ?D$9XR,l})*‹$C OԒR(YHጒqtЉu GB~տ{'eUރǢ:h^VZy_|>I㐷['ROpO\^|z.f̐m?9?tLȪUo\zlk}'|ςF~~oTƧg+x`ߕg$O3gN.dvRBxѧ?}`XuqF[7O±Q ykI V- XɄuB5XffY9,6@֚&$[46GZt*EI8f,k1g}-zbUt+40b8:5/O{~ih4N;Z*u]LJvKh1XRNafi1s|r_,ymC^Xf{X,T'U%ʰJ4']IF(sq!h=-X7iO:.>Ň{֙ ],}b l!"kã E. ] P=ѡ:A{xK͝ 8;2+884&I3IǯD4{˞&/Qz#WM.l:I9[dE6Uz=j4rR +xDa>hw6XK%W+i؈CQoiпnsoNNoj@40*?]z0gVB ~m2Ǣa_|On@eѬ|*u0&[gG_V"GȖzz%w!OG^kpA[ ɪ#o`b\7 {JRhoY~[svS^#yZXXUڇW沐qߍz3n?)]$hQl9f #H ?-iݰwO S1V999_GIПnZbMU)x^.1 ia9^Ѻ)gv}w~D?7ꌩQWh!e H.ⵍ*jm C$"c!"̋Enh n)=?(>EjUojܦxx}(>4>`t+>zj ot[E t*8I2=lc3iG r%4 ;Up@y TºL5f p*d~u:-2,حeUiU~%kJk=^釴Z ;S yHbC-$ɍ)h]C( 頟d+܂>Lεg>]Û`j{ܵ4.uRGtdiًV.: 6|gյ} Ś[+O^Kx o*߶GbŞ/@+`NA7Tx;' 欣M\I3ښ~±QX QG\Ux8q)$ZEWmgEG]g]]`jT[`QqfR+J sBUT$*ER2x JNXFyVXya _6$I)Y`  @bSE#D"QbJA5_,rVER$֩(HBKV*ND)XbL4 V8Nu?^ƭus)MGI|Σ藘#vuNܼv)> `Q&$C)(^d0Ibcz,%0(ѪRV?16.+.9m3uRdYǂ(à$c"\ĒXWx)0DbkdHbU/:HB(dmQZKy'%q`/*6AmAӋ97efa}d/Rg }:F 8k5u|qN!_2$lj3;bҾp<@9`]Έ .ׄ;x>K$ps}0aϊObFK;l=k UC%2?]Y?8ZĸWfp_6NwǓٛ n4kq?'V'#Q`pѮHM^/q U2}skc}+zO"ꁌ(T8.?Y)'$Mw:&E9xkV@V SH+ٻ6ndWXzڳgd/ÖN/RapH-/J[`HP΅CiDZFht~|Xcc{Jc[@Yӌ.,$){nIKEkfbh/$v@)#E ={2NaL3˜wq:[\I+mz6yQ.Tϱ?(&ՀI\Mbq~rF)_c( Fe"6AUקN^@ _2*-|zYa­sAP=q $DpqQc0|1i3iԝ3l˄ 5w "K!+(N!&FH-Ć)Moi2\H>LZ牓m1^\P)`+ӥ怒Tl0O" ݖ >'(nqqH XuF᷹w'[QnAƮ>؇e8mֻnm48@H F%/6.NYRCMO$8?Ep!jrwGNr}|4K‘MS_hN?EEQˠhVG w_\ AX^76S_ fD6qd@K Ps_O^)×WJǁMMs٘;ޯ.`!,Wx89Ho3\1f\{qk r F[1o^}C k*nq»W(#^vf@tV{N^'1C"{BO6^{]a*uL{QhXQW2$yKX QVk2 1dy,i|㭳]bk,\ 6sbީqvb C)b'mrLٓx ef4.rJf/PZk W\c%gHYBDI0jd&^{l 5 %D I ^:Sd:rk>թq sLVHPru!%9%ANS QhCFt/})T G^lS'& 2+8$!O kJ}ܗJ7ŭV@L[ rJKTmQ9,QLX.Maa" 7yoqf\;L^#&!ƤlFS=+,0rH =f+QB r+g>~2DŽβW|f,|o@,0Y+JO3W7+>/Fw5Q|{p'L4b] Fj6nUF}!4Wi}OI#\ &+|,cءy`@m}1! GA\gxT@FJ!|]z0b? |ךL?zpLBo=0}j]𹢪ѽ[} eH֓7Υ <ݡٯeO{z+m(.؟+X Z>EzPf4[h4]P`-d=,mK*rr,u3 nvw ( HK1QNr.ڕI Q3Fu;r/o3CI~Qf , )]ÆMP&a8\*őEbbx1ȳCj"(V". YswER| ]@(]hd]G7dV(yGQTƫ "Y9x1Y]@HV|r9;{zI:ʞ (aR˽a0Lз h+inbH xd" o,}}G_ ]I0W ܪX{_t&5[^aB`fwKg'7J Yy${"šNY3"v'G1#8wx@xq)f( IguZ`CR0 B%! 9t43,JI**6|#omcFq JZl PI#քaJSsଖVۮ"Amlpfy:8M{oJ& G3Q8`羞u[|g0@2z+( C&&E 3+03&ހ*m]̧qbdߒ'/Ӌp{^ge$OB2k+EC?dQNB$v_ )"hA#@YO+!O+;ӏ֌2}0i]?Z58m|x㽁Qage_^Rux;?^2Q$hq7o9o~h~VtF!&hÌV\xu"p%R[Img*kmz$F/^ؓau5 ҡ2'f JF?ڧj^St0Y&ލgL[ * RRJ9#Fs XPi A!),JKR)Cȝ-5Qv TBTT?ٱcHb 7C尲J&Ji![NOK4 q|?"PWUHL~4P#~qci<9 :VW%refP MaL8j tUp9Àa0 h%| A)$>AR HJBlrctVzO2 @+ ;>TNj/ǂ3F/".K?.nX-{w_^1 rr~!,2:!ufX\8ە|!*c jb`6ލ/v/d- +R2;k>(p3<>}?? 2CE[2{$̏%|,+}\Rg'O'*$j PX3uLS;ހ[LؕubLgB7Ħ2Ia+@%{~>--w7 rJ"Q4:Ȣ-|V8~K^α(Z ҩF+$: [(O\]9*rr$UG_{a!.TձS:-ړoSDI su&ݣÀ ey#Ĺ%x0G%HKPb2 [0.l|:yC 5Lf_֚.IJ1W)3fa3l*Xr$[ZmY_?;: Z\@Aw+.\焐 6`'9h| BAhI5y'Q F6`ToFu=NC9ה+9Ou)5)7~]Sf:c @ZcB߃>_3ރ{?4N±f+fP@u3ج5o0m߆Tn,1uH-5F X?o8Sŏ,H{PދIjCOS2s؏٣]4~ΓK?i2}?Џ02yr~-v0Y|"~D{xLIcOd7Z;h!a1 &<%| 2{$p\= ;F&8 v*C2"@rt/8W~3%oV\A\0(!޻ڛ#c㨺ZjƉ i50Ֆh 0e۴ Ҧ-%% LɸD22 1fL! D Lr"`ʥQۣ/[8|%ośmΚ?ӸW:oi1\( 9KHFŔojQZAJL$_N%tl;G1?Q:03*[3^=[e̘˞Wg[fДGC:/^c]VQ]>3p۹46T,yHE6ٳRJٝж.iF'ȯ|nvb +/H58BC-ztA:)^JQڒ ",bq_I>uLg{R }Oѣ@;Ֆ&ƯHbGM+ʤL @Z9 Ix$"ֆ6 M-`maV TEBGg0,Zy c CFc<1礼q3`w? Xә_,g"](}H3]WOcݼr̘AQR&"%%k>u"T-Hc5@bWb9H ੨ܹ/9jO)O߄ϏU)M|8+knuZ-+\µ$!\DSdJR=zx[1*unĈNup.Oi Hֆu!3EN喐|7`JTp+Of)q\G1XE`=_:bsBK7 oǡ䆳cVtԟ繌Њ́1ΕAs(_8i&)\KCׂ_x]4Ͱ쩬e﬊ea3wU,5큻Z) vτ? b#/i)B#pWr32pfPi]faߋ=s|`gu\Srzp )D %%:ӛn ޯdzQ~!j6XfU1lW/?뛩^}.Rmc7'^ Z5,FtUjZ)/R X_i #UT?߆BKtUj0fjcf`΃.$}P%*yT*YY_n #FXjxUNB s{*IdS )D%m;.O]j +_ցˏn;xZ$%8RqF .*gEc*ZӁl1E@o!\)Ȁ21 D&(T ,ʉBDXgD<)BT?F㗺v#@Z:UT ʻ ?nA=aoY!ܨdUZzؕ{Zo+< tl! Ɍd rf KjP>yV&S{Y1\j/63 m2s!_؂R>^XQ$E)=( ͑N3 ('A0zY;+\kKsֹ-9r栎lx R[eP+8Đm0]$}Oy AJ<>ŶӚF>fZKv$)\0Y/{ 㞗J7%iЉwPI&Fz cmڨ`F@`Rv7OMOThڂ$c?y$|ˋU:Xk(]~%*~J% 1M]I/ŏw&J9fپ1_vu` rKM1^pllji|*]=]j[g_,`8lݼz)gV{aH+WZ'5!YKVL12^p[+ǟvH!Da{2xHs%á%cDZ؅w)Q7 crG]7Zw6,.gvF=|hg9jOPzM)k.*6ԴOdi_wjcn4zw^F&jrT#ظe \WB&zzNA4iZ 9\W3ˀJ0yS)&D>WSpN9xo(vU|ݗ(C)p˾dD&X w@"NJB $A9έ1>Ȋ8 R 4DC ӌ;!&M*D&D|RCQ8֘z?Z ֘*vF19z~3k]s@*1eSǨ9`8=xZut"s@E4I>8Pb#:cnSIo-=P2R!!\DdJEʔpn;9X2r$#dd&cMp+먉h\ 4"&.4M}AT *Ϛ"$]H ZlH5 j[VO'H͖ÌeQEPYMofأq'%ŘxǍFAs 2&`0,2pnӐ͋P '*X(ڰe;a 幰 ۓ2'#oŚkM~8QT^xE),Ӧ`Z3sdr:T0#*s<7VVE(p S9υɅ' !rBf1bDH9*܁ԅyx֚"JxY1." 3Rڜr |6ƪF+e=FE VpΕ[=,gAW-w+1[ItnD xʑ̏<",a& X\mfk_= WĻy&mJLᙠ qii7$[P&yb3~Mq󭽙ٸc%H"$ػfm\wX7ᯥ E'rj:U =u }zW^q?&/aFĎSAeG=a vW(#3f陆]JQI)!IWPr()strվqx֤Re-;@UM7b̥Հ2( p'^1pjfw*?OehefBdX/+߯ul´Aɴ_!k6)']ԺEz%bz[!?_U9Q&A5\YcOP37a,/nW_B)e0S]ͬ)@-3%؎OfnF4l/෸Ab_;}34 gPz;q1DTfٹzO 9W30Q)&M=^( )MG9nJZKxZrg e{h֚oovx$:_͸.`=0bt粦-ᴯom'dU>͘R㑷%QNv,^N=2Ê+g .E`+0+W\;=ד2ソ2x A7rhwę?*@M"$a!yG#PN"SwςVZ0jEzs3|9#*=!bkxVD۩n4L{q6i6SmyEwԡLQ.[ b>)6Eے֤-QnM!J8(!<ė"Җ:yY(eBnG!Z6_C 8( *H38YX( %Ňfv JW6fr2"tiJBNUuH@m:)t:-1abZ| gZxXC5༭M8e*aX菝2a|ӘYd_o1u&>r>M~,,"@=v#v#v#v\:{ ΥplajT4wH0PSL9^yBURFDŽuy?3T=ӥr1EǬSX#;]Eq;9O|EJIq1\(.m67Ӆ^xMA)jD2 EH-=hm,*,.eLaKBIG`^M=X1†2e%˄U*AD+N,OCT"6 ᩜ>i%H)eA`!^ ¢B X!NǞ ⵷ tŜ8&+rnL y0^1c{H [Z 8SӔzVdlZnp+{n<;U$"Z IGH,`k+QJUcK%]ZΓ-ω,DjWiNg)YKRTol{DBd'(u.FE~* w_"Ұc$^}M/`ɧdI`!HlؙC`n^ VH"̣﯃l5Yg'V_yy]7x|f*g?vo0[x2Zn%nC;'Ol֏I)BDy-Vs-N`הlc73χiY~Ag0w]υwf >86%Z];#kA8po/uɛC<:xThx+G]Prw=?=X&bqbӟ' ht;9%}^A9 '_mtgGxލGxޭWs/'ծ?]t~ #^RnC<܎No0k |q/ ֎E$rJ1oz$++&Y *vqRk!㯊@(0;~91%ĥKVro' I J1#ʀt0PgBX#պ z]=ǯK47s YTZ faҎ՘z)pmzbwl 'l٢?Rn4ezL . RIjW lY  GJ*|F>$t: IDe*#K#ĈrxY´`2Ȃe6;lԘғݚl?JNvUi^Sf(5HODLVsM9JQ?¸L؏oG1Uk yN曅YMrZDXU15hR>~7݉,sa֫nv{s9C O'g4>HS̺> f{bL??2B'O 1=/øB|,SQ|Ȓv6{nNuHMݚE)^t{ԯo65T,NCEW`"/54{tfv|nk3ŝy_Db^^M\<='H5gsF樅Ne᝙Dy[豆`̖'qBVZOna]7's ]H]9Ls^!|,|ջ߃6Qrpup_&TbJ\*+З*E(g+(Hɒg-%SƥjzW-5obXZݵ瘾exSJ~92/K mI!ZP˯O[]%-/e5Z(AI_+=>Nk8~bqX|Ѕ~U&+f yfy}Ӧ_w$fnN/6rwzg/v1cE6 KxF#.bEnlu-i 5fڝ-#9Bߵs 7oӉB xʤTTnȏOʘ}v cy9]0' U 3S\M6FRL8$c v`T,B{\J:E/Y.LJݢ|<]'jφ`kZH]YIr߃TY3MFBU_vzqYRj/o=/SZo@ ~aܡwmZ -s^^GeH1 \(,CX8aJUm20hC-)mWx~WVcK5c|?B:ˍfT#r08)҄5NU+? L[ɚoT4g8BN>vtq2[fpbN4lp,n"n1srA$j7,cS[̚B\ڸW%abgx}m9cEDm”)+c9ʢ,(X ԉBЈ @ŭcO ؊2Nޢ E}N5'%!Lx&iEwtRӏaBca!1X +Jȏ%1c秇&`F txf[_oq{@6o36o3یjwm ՚Ҁ1Y,ѥ( `0Rk3,ΛMb+bx'wά Cl+G/ =%z>{9L:W1$ッnF[{N 9nڔh#F%Zz`dX'sx@Y;D)Ʒ ƸOgO"bW">t^NKo:WoݼH[T{F?te$2m NOE =&p7iνw5lt}Tx&LfYp^d9ƲZ}|b,8XSZVZʭ~zT\=(JeDela%wN",Y qڼN<NH=[b1c=7 >( 9cJJBogR*0 ;`X0Ŝ)N8OHsJ:=J S\"z3_@j9am1'`ˇbKQ`{r0"?IX/tηf1H_n/+)E , ymܻ#l~gb_O`„+J${4)7Ii~yJ߱y$M|u`/GsIyJFA.l4j{JeԾ? V&Mz;|6 feRz:'\,ů EH;-sڭ)}Gv:)ibBs[hLQ>zŮvAb#:hN熊ɺٷgg}DK)nD(ÆöCB#8X -X:[cE[ PYJJgmL]$Si$䥈C'=|CJP&A?+t#3A8\(oIHWrQof}H*ֶfڮ{v2{\ ɖ #&l.3?3m4L`ԑ$!]/K*b'~Cb1/A5rL2yTJ='T[Zv{TL~kJ0n5Wo26Г,.Nbaҷ)0%5\xdBEK4du(aPf EbȦdrFs^ZF&s}(FS2ڳ}8k?kN= $ H0\QiSL8s*4 Gqk!ᨏKѦ% sxS7 1O9qL_}I;ŅRO|p7K}j@];޶M!]'4]fvҗW ].f1d;Sd8HbDMS/2be=FP#&{[39yol4뫹s6&A,a_cc#io7l&6c'؄HTwӴM #b)# v4^pcwV@ڹxq$Sn ~sg 2PiC(&]bvs0]ɜ9d[t{$2r#X㗰@Og3$yXZ$sgvB`cҞr9`|$lDgt V@90zńꂍRX%Fe@IT@#iUzHqDM;ሄP !UNـ Pc1Ag^3o1ƞ`܁z ,pY}"Rւ .n#ZG4ئ0 xZj)%xRDm "hga0f!(!FJxSN9g HK1A3i)XIC`5̬_)OVHK Zj;-uC<&LŘ&"ݳ׀Sita?*$B&1˺@8aǣ䳺7W y+L8Wq_: !g~8c_3El[0ȝߋfQ ݕSOw8ͪܩo^毯fկd^}zw{o8Y75e".tT^nԁλ]t.g$MJ ǐ _~v>t/=.Ǐh/i^;WPg W|gaȨsހH{oՅу3~8FS LA*bt a!"qB['t&I,Պ`m$v޵kPS7p$c9c9_O%XZ`4Bs: y v4_ dwX9p0gryyNw<@ᥐK6Bu~u2F!?e;KyyLj58ǔ hej4[55]uoY(靅,y˚YB& O KqBHZzDf{_̋ =-녞5x P|@>7i c 'l.GߞHD|Al([yxaLogۍ965/g| zt\6O5XUnBz`Los2ǙnzOW 40~9K>0njRi0%Ob/MJN9ZG5m2ws z7|7"I|i{1b) t05_H۟b?=XKg<#^ 9cLA@PJ7N+Գ,icX3 d z` ,8Yi -5UB",Jj JbǨ@8:=(-%TK!8bʈ{J&I)~z$!QX!4v(E&N#g15:.%)Y$lRJ# =!z#D%O6E2ms)vPkE J3QAP0 hY%h`#h٤Y@"HǹA0 [x—SoJFO=)hO.ĩDWBB/upkj Wr-tWtWWtWt3@'}53EPWY>{(.%CcDztRjE:ݍ*DRNDJ 5JeW8=rk6F$':`D^T[҈oFE οÔpa &ŴAۑ*U4Q)I՚bRFb1X %5I*?ZLը@ɒ*3xn7ea1|`䘮vc kwl&B+gVLc.=-0wC¿_}&3ɭ[z<{}V aޚ_)3NķWOT4~?\`_=|$ ~|f3ٝ8RX{ VU*ɸvGEyss AAysc&61HT{ #qHCѓ˹T 2/R߆~N^/ʩ&һW$cD =+8#ruZK>*.iL#b8v3W+_I2XCDBOcfJS-%"CESt; ,VYr2&c`kźC)9ώoroL,f=7!B+yj  ANq)RFm$Q";1v:,CV L9@)—()9˰ʨu-?X㶾l$ECnx*Txf0L , )eSGm09 uཔ*5P'- ?z J R[Ff7FIwT10$Qjx76,=n(vb11wTnݥ%yަ[tGC[M4Ʀ\m*q0=o*9In3ffZh2c,LNiUE( B!: Pjguz44HSE?hT;\ WAZ7\Z RN kQjwTthJb:[?魾W♄ƵEWE r6hW(|yhQGL jnŇUײ-kHS8YQI-CXX5-CXçD b`4i4V.\3(euݿCeȶHr'}jzzqD!й@:6qbXƗ4S`QŦSG~ROr1^Rz5*O>=V"/^ĥۦj%"Wq{5sJ+yuݕж )HL{C^ApF8miM,~MeoP7eo宊j+.DlŦ^_F! e@iߗli5Tfw o2qxG1cz j/E-uX,\fɅl0 4}NR>Qdž#*Qd)H[N | aK &&SX<):U{xA0D 11pbb](V2<3:d;, a<,n]^ @ )MHDqSM210wH&JZkIN2cqVIJEtyJ/Zу a,!X"c#;N4J#oLiQ'Y-\2u)WTac/U:Sj93i2ɨ5J8"m6)0=r[y_w]緷A9Z34k aMꃽi) JYz%A|XQ9::M5N(։L R0@JH)$s4$ R⌘n:K" 'ha<^Hgf7j5ݖ7#r&}IjOXE*@\}t@8S>)|~gsHM$)69a9b3 PYtܞ0BYۃ@>) Ya<~|)g6?&*uoItV?_T *v ~z~Zx`7ܱ7 f 3UaGo陸l\/}A B?ܚ--}o[x{d) %_On~&BkN4kch{K0Ç4b:bW˂}/9 zRgq!҆".A HqHI0HR Ge+&*7w|psFg 0b3P2$ŐJul31 Ä}Jht0yQ VӔϛUnd§JoS;ˇ߽mC <`4 'jf,%<~3FnaBowY0aWm6I 񼰟һ "w4=nOY_ U+:VK*h^"D!ȽX~/]Uc"zҕHp*{mC{BJ[f i.(Y~V_pi*h`s gg,Ay@°qwU)`kNrPC+`fU^`@  TU[\. m)9\O"ƁQnqh5Q%q2)2ܶǞ@D^>ê 1/G׽4tl  jDP) c;%!zVuO]D׬1TIBST8WF\NҸȺWO]y R<φEtQFAQWZ~`9n*K׫Å'*rQ5@ {\Vv!. z-C6I#YC_n%XFsV&5(ᣈR "}7w{xqVJ-ąv{ bEjb[l=ɶzç_]]O%m){]WWڿmK%^*Sj(R{j\$C=#`ӭ+\{*hcEDauz=8<I I/ȗP/XX8&:A_3j\;Y4Z]i&Z&⩺hyib(""V6W@.ks/\[ SI8l:5fL"AhJӄ)e|R|= mW_P 8It|yb ݾ!(DA DWw z5A񜰥vJB>;^x!hSb[MV[ uZ__*l Ve:3iԙ903!b*XǶJq$Bj!4{=|Pq$VpH-" B!0LXg(P>&"K+r@C %]ȱ(: ܵ_d0R.$j3MMiD6i `r!+]<}F)q^ 0y_S"FƿT9Qcx"'PCjvD]9^Xך/%Js%9RnQ/_ xy"ƋRCzE5!:E>"/p@)Ze&'0W-^Ϭg}TP5n.coI>*InQSK,bӚ Ȁ)kRGZR!f1c"ʡL.*H iz%hY&.8JkB'PPH7>} s=;zUMt!y*O#ο Twdfm ExǀB(dHc\.5R^RKC5-3A) &t=c2]z3cM'ry3]>XsfgO\vx&? G~iݟ~ܑC|n(6_{sNtZ^Ngۚt58ҳ|ާ>mDC6[L= }nc=S9_uAc!oDlpM؅;nN;xNnxf[Q@VqܦBӧ}?IMw tbQŻu;iͻşnuX7(bw#RzX BL'U[k$SZn3[M4ʦ4](͸FSH\ӡ1w$(籠jܯΑ 7$d<֑\x<Ƒ6Rc0 u\t<Α !y˜':/O1OZGhxy$rc0 5Ip>X1O:FbxyTBy˜'7`\ J/)_`/Ri XicC5kʗ:6K* +M~,iMTCH64:4,į~MןyƠaIt^JhT3c V*[qY3cTBScXaT{c;:72᳧?06|$y>ϲ{1$2Ss79ȇXM_W{³}GCONrގ_oo_nt .SͭYrfߏL6ЊD\ ד)R) Ӭ׏p,i2d.$$)&kIS[%\D!$Tx%~[҈ Me|4bU-icIB4 Вx%q~I9yܱ9C8aa3(cڤRK4sA&~ܵ0"93 aM#&.\DS b#'A bTI:e r#ڜ2: @Ԩ ˹qc&aR:Rf]9g`E.Ҕ%9N]'$Xub qf$%0qIZnsg=,I`{3yAoV0MmL/^$/$X۟+>ϧãftWSdHh}M~ɻuCQH +йLsyPunjcm3'7g9D%3 ;c<3$+G з"ؐƯIS( kJm91-3eЬLV61S{~қ8NqO'? ["ݣg᜹d&gwx4/,ѰF,B &G$A <R.*lN_3[}ѫ"@T|=umh:J~Kt@yKO6~$#WT ٣޲F5!*;9u+Dqqiǖ͎o.BN1m 3e{Dh:!8?O6ӯjx{ȑ,f YOe]Ng68t_y|_O<-"tsg }\W^Ujm/>V4t%%=VBO~g]q鱚6B1+yOwtvl2s7(&u>ď7O  B+9$u[{w ?z.'~F޵-Pi`e[gٷ1i9z9$W-ΓsmXZaV^<:KhhX ޭiGJ`KBc&^#<.byD~^("D %m4C(tz8hLjI=~%YH'o>тUXLgwyQ/&T.=0m1IeA1 Al(x-4KXTfh_8Xo5z aE>QJfS>^NU( 3#f1YU~V!֚UwB\P鿾ˏQ$MwJSzqU[~RڶIq1It5l>Ə~߉)B8.N5ijImSHau.:%2w[. &\vj@o.߂bӗ-t}r<-ܱtCցuHșhLQzLn9PbՉ8p*ڭ)oO6ˮAB\Ddpk_ vƅ\- vEꐐ3 RvcJAb#:cTn]H4Ǩ5iꐐ3 "bhI@,T11FSJ$IpϏ|Jlh@5CrF߯֕prt~~'Ժ\t͓dzOu%`x7 &,%Ou%4T( c0EZ@"_[$2DZ:a,ϓVCbl^{5 1^BŠ07aiQSKͿdב@8E[!|]vO^C# Wj$@.\rSj@I+d#l%&!*Sgˡ+pU4|Ҟ1(\蹂J݇):o=L0 RŨ*pdfBγM]eEF r5gx,=IėcM{D"J]c{$&&D0iHJ%(h2mJd lU%8g;G#UJϙ1Q0>"$1JFXm ce^&8551N(EP*C5s\wbO>|=m<36Jm?>F7Y|%Jy0ɤe+WYpOgA6 eR8hGTA ;X@oca+3keݝQnR NưL\%QBH A R/`Oӆ*#)`{I*H',u15) ̍@bL2 Ð1}\fXNaOS\ 6mV[gw:3_??cw$pY弭OK/ /=sڄ-?G޽EE7=3 ɏ?dq?,M:PvZXiM?pCbtj:G\WJ5)2F%2Q<TBm6<~Ѳ"",p.dbʧx駋ܝtq0yaěyq,o,s1>&-&sd53K_@'2gϹtLhBQ*S"#"r.xT"E@JIݧ}WY's zP<3D d%6I#8'Y:hLKN4"ւbKvJȩi:%6͍xr 0Jh~/ S} 2Svyr2 >XNC>@.kz%Պ`tRRPB[;"l\O]2%aj-Z{)Nr;fAtQ.jӹ&Oաnug|[T&*6/QZvA  BwH`J7KkwO [ `x՞ ~厃TDžcmiW¥EO']HM[kzk$E-zdCR%E#UE~_FѶZƉ<\qnteϼ/Wr:,g5ÙY&X#\J;Kܗtbޒg Bh7Ȯ{p KImҫomy `Զ)]^(!K1YD!2x`B T=c&_2U]eCsϿ۾`#u&;P*E1m!0.^x=c P7f5: Ѣw~{}=<~oM,_9% yCU""K4TjrZgerI|O?b,Y%}V:Nܾ0 /%Jv^O;(${E|{߻;-// g$ɔKƇ8\38u0/A8& Gi{FFF'%b*_xXʴGbȕ`fw냌^V]yJTKVxvB]Gg]Prl é) y8xE-^@/oay5ޗKg8D^*+肕˂<Uⵐ̈́G˦-1㬐)қ*@`T)'/ҟhʉivO4Ikbb2%n/z@-ÔfX\Iȵ/mDݾLF0LZo===e*i1 r ϫЯu*JQiMvxUs1Ov.PL Xch%šp4H8 2Va)IH'FH+NJP2T ;Жk̝UJ0":`fDXф@anʭzD75%x}4 1!Id,\)'wJX&ca QA&z>Q>x#,K q0$1΅U\&)R+J#,<:caJ BL"K :ImQece38&%8㰂 dD6M|-> dSbPJcF f,$Ib#YseX:ZZD-8b=S_^_)m~w<[sJvt/ !Pr SK] y5=Z*Pm Ub9rD Ap[!0VEhR;aLHW`~V%tWiMY?]Yo#G+^^L>[36B^M@-Im}#I,%e1EܒReGFdDFIN ]bQP:;a5Nw;e&W5Z?ZQR !wo j'QfK@ِ$Jӷ_xIR}OӤRrνɰп3@8hZ6w:~ͱ~{vRB Ԛç71|zç71|zS ./8% ,X /x`{xIPd(ºckHnas\p^'`r|Gom; tI=?Qf1G2g~kZj٢&O-ŇslȢDWilCmw&C$l-r(}8`摣mw%9t<8l~J$k>4HƃIR0qI\&Mۛȷ7U;PiX`H56D9U4N[pA1F<ш:˥r}C@Y S&OǩcNK 碹ouқ' K -Ï०/iu; ̤ϱ6DB &bb!0ÞYfUVcd`{N.Yɴ)]0,0E20d,e&xB2x MYAvQ% %.%3)vR`1PK]7qKɔ(ZG;Mi4*XQ ,Ġ c- + >7ʼnfqPA A.)=FkSbaW1k4fFpč-7X'L7}Od>9ܟ zjP nkHI$k(S`{Ru@~h)u<(xi DQ)9<⸏mbGYHZmTͭށIq,^ ܃># ޘwV8y)eDi6DѠ%9/Վ .)c$A FW cNZ!o  =jSQڗ4O9`-؎$Q]Lq3ċLzNK'_<硄xuWǛ͖)Qhv{3?;0Tkj4y ŞI\MTS|g2=DYQ-#VV(i`c 1J)oFY|e@,-! ᝴!b+=Cq6+&[y}n~Sc!OU}qHwsL$fb [O:QZHl{`vW"gw ix^o0w ji$~xct1-ȳ;&!k\:ۇ5+h.ڮ FOϥP$t?ȹB]jTu} sێc<ٿLgTXQ<(kpϣSp&!Qap]IJvg@}SIZIxh7hc*ƺ.ѰyhwoZ P``4 1KJWx2i%H9?Ƴo|{ PD.0yF;ݘf<<ֻwj0<_s`&lF W #MU,%Ii [R'H wUt11KXzKҘhĎxy@cOc ,}l 45Ԓ࠭F$ eɒ-7CƟj=)?301.`o~P$9XO nok݂<͛{ u8_[fƺg*leOʻߋmjNonF^܏`}{5̹B,﨩EpۤNWODqwGht(`[@fw_ x52!Z{j Be˼ )JQ V Y, *OLs:Jp)/(.F_9mD{ʘC\ԈGPZ'e K p '+KJbKf^)3ѡ^oOJ Rڀaf2& pm5Ff:nQKXؤ0l\N9aGⴵ=btHrmRؔ:t-{$щhRI&mL]Tz|'V@9C5R\ gmi!VO{64ھ=Vk')^;aeA61#H8.f n[\>ktZ6\K`DZڜ]x{t@o+]Ċko/&ݢZ 0z;wQ|:?쪋l8/rqwBE< wギ=w(1 yE{5})V 5RL.Fg/{UQפ.دy,S 0uG Bb:c4n; JY-{ڭ y,S vH[.);FvДge/4U!Ex8TzGI\񧏨r1Ha1hcHLnڭ y,S8%k1.qrW,,8$Pϐ匪)/{SP' Km{ŜM'I㤈0=鉒Q_4h!t8D\: m :"'I)wsޣ9tU4è\)t`gDfUs3.+bSOf@K"Ԓf@B3f?{KO/m --7Q>ۂU%BJ|nچ26&R%>[sh)ehφƓSZT V)2g%y1\[9ūiSQm_u =<&1稷QdY?lUQ3#맼"i6~h&h@aPޫ-ACl_qu2DR]蚦/I.^9d@uIV06iu~ ̴"MR+nX GTס]?Cyw4АNQ.R}T]Uo@B)#9@LcEpͯc&d}Jy-!PJZ{=WS5u(%;BCdO2*wM+`2~)JeŵK{<ǵl2>dTI@׌1dS;J"ͪ7M;gdpH47N;T3zy|PO}h~?PIְѼKvso \8YA4mi,ko_ANA͍ϓz7XkK8\+K},Ii<R| _]"Oh&$FK ^CQ ]?o_Ն*=C5mf4I_폶!I=2f!W]}je_rL,r0'yjb5dpJ FE.]WH1. -A_:DtߔJ{Ц$^!YԚ ʽ5Am@(E/a焐IcaKJK/A7P^ӂoe\oZ#=hI]#ZRJhQb]68䕳hOъHeZSl5.: װZP}ԘXhY7]C뎜k,8 mu Uv`:䈊3KQg^ޙ0m F:*-w"1 mZY椽vYăZT*Е l~$m ¬=O]ACsrz;Fƣr"0W΢Y<0[.);FvēxG-?̟W΢Y߂$h\}*LJcp%SPvb*VPv*ԯoc4t pk/|~\ؽ6FxsdV']9uXC| wn<%WG\{;vYèE?^:|qNAS(V'v$S"Fz?l#TK1O Wsf"L_^Dq5vDP!DX"_YK!9#) 6|3_^jӋyd}eOD'k{N5pz(VNaJa ƙvTbS<1[p2ҳ%uEMk^+hJ-f$T"p J$ [ڲ v Z%BѨ~whFy /',=5+3Oħf;)'J͞:F',.GG`_ij1۠mڬ|BJay{} O"!o @N o|1-ݾJ6j8 ΥZWH7&\bS_۳jDUic.qK✏S !vW{L·OA#2+F|؜ѮeHŤz߱tϪ9hBd8@JWkc ^(fa?[ Ssp~|ixQ"j{=G2!|Vqi%ùl>s.;7ynl<_=混KGǥ,FH%b^0Yaab&=6.>xL܎o=7)ox;c 1Z^|Z61-/>SOyC⅒^KTP5I?oR*fJm4u5 f>.[$uWh4|~90P2w]&r7]n7 [^ˠ5R!< $#Fn^`uIa*PYq=_Wx{~{(wѲjCl|[I>o}scJ1?Sh`KK!㩅 [ND0@, ĭwܕvJN ?F1 I&GJUڷK%c9+8{uYBAVVIRƐƥ"K%]59p3#0/Ap905- \RjBf!'Ur9|EASX+j€a( ŀ2F2S0U>@ab;Rx&Df(޹⇅xӼ+O7{RI >B+~RS1#CAre2A`ñfXKc)8v Kj2cAwD$-- | \FgˁyNiSEQ 4B>iozA* Ac3Gb2'DZmGV%^:2Eq0Bg]J Q0Y|&ΤVHX>W@=LF@LJO[!6ۢ#qׄ*TiܧJG :;. 0={2)ո˿Ba"U6"!. #\ְ>'rwr0A>ctX«GST{o& @x3pw.MKH#w:]x_oz`q ݯ~!ƒ9Ԅ˲;g`9OsJU>͹]f//K.휑%4t>)5_,\׵ϻ5'`]sKz\4޷Sb*l  NS'Z^c*&5IP stq & 1#*J ZM{F[C$rg{Xw|Rş[>$(aFY"I}FSE}fu-Dsƈ#ٔK+$冻ZYP[`87tO?;'N7_ Ŋ 1ǃ YettDZr$C=9`sr<~MF+<}VKEO1 PWd%&΋#B'wسͨPۏоFXwKw.5?؝ L&'/M,})5-WƹhHnPpNQGǏSTH>-q]oBcU1(̕w+!ʹIh;1}T`Yu.Vwrx"#5EOCl4:vq2/.eluql@킌j+qTѮث( Xh{Q& yom}9,EӰRߗU =VY3bȐVL(ZjEPc $n'TCU&6ۨץbz~Qvc2n}׭DbvOQc-Tø +;U"kuV$K٧[+NT GR^ևqM4ܬ9& ww}rڣEByH_̝VQĖ"T09=3Ŋ MC~AG/N}%QBulWCa%ų$Dڴ -=byHQlFkopk25n3guOZ=m#dOc8Xp`e~_ŰJC2:z%š+RXKuR*:jquӶr*qTfd9'hpltG;U7$t7Oܩ-) Vinu7N-M 犽Vy]򮝼J,gv,OB5}{Nq}tQ9m`5yڳV]ObUp=A1穭e"g0tcNZE%8izBLj }Kf_1tqKҽ@]ػv h-<8C#bNe1z#2ҕkO\SV"iAm%uRiF".uKY(i"l*)r_Fs_^ͯrm]z؛.]%u"Bs"Qrv/:).Pe-eɣ &1s~p;ǼS {(E5J1Ջ<^ VE_}Ǻ֋p$tr hպ.6d )g()`ź{ 5ʅiHBq%SDo&W^ߚL﨤~O(t(ڝA~o\DkT(x yqD.H3)z~H^l&49(lT 7kP-ܳL6\zَS0^kGflF+x5?f8fںXZ\טbiH!N+L\qGڿrm`tx7$ﲂ6ˊ-X8^tI.3Xə5a+ĸe/z5 h@uGe(D-Bux# yN'M##х5)a#Y ,ӮQaKHN?5 r椖^41s4V/'j7: ? |*v]nPw2dLyh-]$8FxHCMRNUBJNg~26E`m7)/fxOӊ#OjM5&\,B)Qi(mDx1Z5.EĐ f+;n2w@S?j:r EL){0Ūڭ.1SU .NOonMH7.udη,Fj]7a籢H.6b *C]3,0a@K'K{/E)f>hˍP% 0eqbIiyӇmB-g; ~ x~S!J+ VING2`5 ALD^z tΐP3u8Ƴ"Z`Uʛc:XZXF\Vϥk%ZT>>Ä&uA!?"x2%oBW:ô7.o̊B<,`%5,\Ctȋp~g[]d[pV>КѽaX/' 8i\/&@cX_s_R\FaڸU eu uB21 #+N y"&O0XTM4OW7~,_}Cz,]g⹙txj1O?xZ t:@}P8;E'kYlb׭̋e(آ'jGȗe&˔aXj36X{R0`oP`/b\M9~~$'|ҫ_ׯ~U`?_^:zk[K@e]7 N%so IgЋO{3P{_1\,-2E' V" M'~j'ٴ\-!RNiq|Tg:ne6&MRLeE8󎍨}P bhQrU!v0pKuȭ&kEG>3~ފ>"Ϝ?Th$JLh&(z]%]|xǍ[Ia^N8,^C= dcěƀͶ34z-Q6b7&{`F=WbY>dgC`T0e4J,̔aɳ 0ΞyS_j$#i'ELqFr)8F0;s5j 5EmK@)(^O-≖]Zp8]ORq .G懿>+ϮlyJ4u8TeT $ϴ43$@Z:%9$Q~ySH|1E؁.F.Kݛ~ ^"<0&Y\3}쐏 ӜyFr?>0j yGi {ȁ;S VEqmpB&n;;}-D燹.s+%aF |m:q>z}~r~f*g-$^?hMI2L+n[Tvw;4tBpvl=rϿF&Bt[Y5iܡQjW>Nɣ1fյk"=?ٸ([OՍY亪GS"|l6y{(]qSO >T~:Kh{ Tmϛ@nR?XrͽC \Fr9 d1)7c(>>0F=pCkȌCQ'fLg4'@mz28YB5:602_Z)i;yg}\*9䶣NDmNSjm*L^+wfB)|/+o#@(fs%pljLkeQ\0g[_YV+)) 7(. r-b,ΉE{ Q!anL'hke oPJ?lܥm,cV" dFo:K$zBHAQpH\V1i.wfϻv >cq{7 C}4_ζ3 .qo ٍM?Q3P(n^N"sK] &{[w{_]oO 0㭧wormk1Z9-EW +xZ>j GtZE~!b+n .,cQ tv bxA .xzY+T{Y+bx 2*WJ[^9l5pӪpHRPrqrV ]+ +P`,ujE6TFЮej&M6d&M] =J VQ,I޺>hR m>;m.fr İOgRI&[,ôxA&lɲZ"t;ĎOmF8$d6uy[K[YJ!ej OH9pu e9sIbpbZY}$dj)\H^Avi.@͔ZY3GP7$@2@' 6eSJMkdxVLahDz/ _:󣚒T,4ׄtB!EPɈ  GA{BٌqI!RSU@ 5ˈ,הJTgE4(n?YK{1O\SJ˩D+7a5r-E-' nUbYEY*`1gq]Ԉx{b@(7 l3u3cO4#rheCZ7$'nKN` %̟N}?UHH?)<8'P-P%@%yGL㻷Е 7^7!%c yG c!SQXDZՂws?4&0x0nj~Jt5z@ "Lay`;D D ֞iϯgMc >8Ā1T7rz0ڨr_vܵ/ zP|Q1SES *E=[{Ž zxhah._\!73dW㲆"\_zH@b16~o qjtEf]Yue1&ϖ \z(̞ɴb0QzU/J>Aw|jҬ#nU=/­,^-?];X(9AYA>zsf"czsDZ1A * uQh0թmwv|;T.ZAtR*8:V)[:E\I#껸m $/]wrr5E#V#S>(If_9 F0a)Vf@b9< uVA$ry]j RqC|G !P.r$OiJ ge/-.BDU3nq23a49t"L֬`VAfɫ@R e*z&%t6ըכ4Vʵ')ha9}Нx\НާG-?E+27Kszp-jYm4ʥ=-~;ƺ Džyvfŏpӝc OYHbGz?sZE^*oo_~%V.4$!_FTvPoڍZ(b11hlS:n9ڭ E4FD)vZwXw>ՔpsOgv^udo+`J[?\sJE5|eFvmD O__55nͺCBd}f7== *%cALoJg$vS_-#4#]ҿ\ ^G1C^Q=̐IfTlZ^qP"Jѐ27@=X3PV*9S) `;ץ`,z*Ś&La\\Y\ \(.O ~7zcwfHõ]hGzY%>mسg>q+qr;w[{=}Lȗ`&rA}‹G؛;NO]|?_XUvMT&lJ9ʯ26f/IʥߒR|ۋnO)ݱZ`d'6 Gv|\w#8!P!ocoSFyP3l9k^ G=_}WfsJurS)>01X BnbǨrTuwxڄ|"#S]&JW~=jX BD'v*ڭ} #;n[hmkv꽞OK<ǺFp# m'!5=/-|SQ xES 7nF]at^ŏnxl6V`D}/1ǼnGMސhb|T02 첨nߥ>Ҧ"^˹UZZySZknA4b3j\FV2szJ4h(bH$rȁ[]:0$cn뜨1q$KMnPɚ y]+ΠƑfpkjdoP 7c C8W T@GN|: <'gBM=JVA%􅁡]U}"/A B,j0+Dlj3U^@L5}@0P-Nh ˣgknuCJW՝&89soyuwߕ*-ߐN'wۄ];+2B+ˤվlSi`ri36iw?7ŢfQd;hs [?PI=xm)@Tgzc!>￵VX.` Z1N&:Q||`(7z| ư]6?@QHWn{uJ:l^_ _<0޺cád4.GEjYq%T[[B! g!c$DhA97ȯR1&Rȯ=* 5SEp$/->`A*r#'4LrK U:fI-"վ_&-[ Bz2$5}APpz@)STn ~\R9:B *gNY Zq4VYfUs(%@n0Hx}wm~9YCx`^2g`yمA##{SdmulՊ;{6UXW@t]NfD_*w cEdHTr`5pq+C' @kc,*SpnȖ:S)-TAF;jv?LӇq=ܦp1CجR?,&4/15#<:PO;2*oͣn GO]_/}`e"r.t˨ږH;,ցBsb{H%7ތwrY6t`E]E<.fsm5W*h@T*=k0NVsͻ@Տs>^B;wr8̑ ϓ 3rMHiSg&Z [>|ʛx^Gr*9oqbΧ8=Z $Zy_iMinv>u଺\d.?x]v6~Y=6.鼒\}ikPg;/ NcE4-s<<'vĮRU=Ogd+c$T^rts:g DmȣAb+296;ϨEEyF/?ܴ ȑTpe ?ҦgItP7q9\*EW)rGqd8 #{#Bxa+hZ%8z6]Pc[.{h`3f2]o>=2 gaTZ$oVDo]tQ4'`;EЗEwtBO }Z^ zJ2I?-'<7 J-Ꭺhw`$'IL大UdXp)e!n]ÍUF(8(8(NY)8@Eho \E%k%AJ6]8UJ#4<ֵZ4<]$E"kzXot{ >$0R'?=pj">5 s\?ozq(⻇>\t3#Ownϛ߹`_$`lIP_}G& b}#M2Nw `v:雽)f6U%(goy8L0Vh)Х`BYџIZ QQƅ5 9*C@!'= 'c#HSШdT P> [lBwBv@hiԬRDKvplhT}9 y$-9&=w[~Hl@Q}. ,tY:iIO"-ݰeAh\\c,93e i`,=:=[]cM6;uD/.or^Ik땳zdg?g3|+SOG {!qΝ)mu}&_bO߽Ű$.V#*jypۀeAqec' VUsStA7u7[ARjshTkXF;M5F4X"0S^9y"gpK݁L/ ^^}{d'u}wxj|/lF ")7}els<B6t0쒙8_lrL6/{BCc+md~^ʖ?_SSPSspZp֎5 xPоf;\r^z95z"y8W9#z$^sNdDNufʬw(@."0e fSVpMRs)bDY@)GF,(G:)^(G:?Wc'VT9F=Q E C0|N" ^H0-eT*rcB2|"xqʸ+âbHm4> %L wt!8pF15 DlIчq=ܦAcn)! ![T J#ROBv7KWFRALP :Ę) 2/o7ES:8弡?"ѵa%y 5#1z"sK\s[9&j9^fi$#+9^yikLk g1~KH8| F19O>rHg2bFVyZTDL-N'JZJw4)gUMI-gg%S C MH+7Y4;_?{[IA'ɁSd ._8JO+rX lL-dk? 9ln"sg\sSkʇ˅۟Vb!"A{z+X qϤ>'aJ**+ ' 9_^.g F}Ut ]9ec{o)E@2ŃU}=IhrZ*x[TKuưJ1/* E#X˜`T+Fz~TWQs⽓ZDa5 *iErN YlxLF,Tꈇ2hdƎP3Z9pvcx^ih؊^l&lõ6ZH]aT1g1$J8Ew R8$1@ Pвp8>Z;t^SdrEjr%b{lէ ? xVzbj7ndT}X=.}w KzH{<.g2\7{xs2q`{~x/z"-Zhr|>O72}SnՀI/CxsH^439 Qv)^jtkkCh5×Պ?`^ݼiNue90 "1ȫJ)͂6}G!ZKJ<`<ڲ"j#)bkerj$:;P84|64%4dg. +7S5A5Ch%56bh.}_Hc(0zIѪ@QB:m,coSJT]!G#{m~ՙk%c˭ӆ1 N3K&9w<7iv$]JN+1䋇s`_ͫE̵v A;ihpmߢx- "-))4m'6lyHjۨٶG :NbZNL[OU['FG=Y] f3D|w}lrʘ*iVTΜz**O[E4-&%H JH1DÁ  s_f殟}o/NlxUSҺjH FaRzkj#3TI)] 2B`Ri)pܣ3GPckKf9  xm&Q(a"I塘3("|7~LޛޛKN87GK(O+kM4{ lN6Az=5j ~ާ[7PCuF:j+l*ŝ(MX8Ehi] <X8peC'ty+{_"*"]t_\3p}rqଌ)$I]Zјc`?@b5GTL_jP`SRsژ"M sdMڗ%)ut%Չf1ғ^ġƴ!MoyC49rz3ZqzRR?՚P_;(=ԏki(fjb_ſi(5/ ~تp)ei,a\L~wӎ#, :a;<ގ#TՆ4ه2 ha!oDlon86J11wtnO: I2[MĦuz7I&Q)}Gv㫤_n}X7"jK]f$[)9S.8:ޭ4ӻa!oDDzN uu9eƹsJ=UFRC^Fe-5WksD)Jh:> ETR3RȟRDih+ujyS҇ fv 9-' @]K5"N"4}Mȴ*}65 >[v;쌊`?w yq j^cyJ6O eHlF}|i  gHZۯla ߇?Dq3gW]G83ZNj;'=A{^D]53 Ӵ vB^ ֻq#@WӞ/{2[骈XڇJ"jhhuWꮒ]%[ݦn`I 4.nm FEu0::CM2 S`JD |=SaVͥΔ_2\~[~//]W/?؄9M3J$u뀜PZSsA<ր}sZh)H KΟf6U*h ,ZzB ? @:WtlX&r i,rNZmu R[N D+O)8{)X.񫧈 9HnJuUĩVU0Jȩ" E5}V0EϩZHks<9~u*7{i1RW P$3RL8A7i6L,hIhF$C2 m=g+ҵ*_~Ҁ e.8Pvt/bOE/+åIa2C- g@ ) jZV]NDTj=}1!:U] ݦF1yio"ĥlTuP;^u Φ qIn -@֊Ri!7JP%hrgTі'DaZDDϴGVX`nXDT6U D mRSv*j{fg?afNݙM6Uw&֝n@  4/{DXPs)W[P.]]Gq3tqϬ% TL|HfNx/p >%)BTvoJ=3"@ t9u8 1\0+S\QJNm'f>4m}bS/;o=p[U~iKHU['FGЖ!UKxުGW(@ICtCk UeOf1 ^5*(c&5^o#ȴ,ٔеѵ(؊8LK; v5>:we_ɰRܵ_%@h.nXjg+N*| %ԓƆd V;_=Y (Aٻ6ndWX|ٓP*=XڭSgSvEqbjX()9?@rHa-e[putݛ2i`ق-vC50'l+jFq5'IԔS Ϫ&!G]Yiq.sl 2)JK"%3%O2mNCD*@ Hvr[j(b~/j1FeجM2|?Z6}x0_|A'4.4i|in,noyn_VOy>w`an4ly d0i8ٺj%$)} CBkդ9LOi5YNp0SZqᮥ:A (|i$VD 7۸gBwlQRsAM%NU8B$IO*S 2B;5<ɷT(U93v RĔ S0jԢ!Ͱi#\&@ %9ydǀ13ȅ5GX<*ŒL@Kf},$,(ψ2#,";oF*9ã6;?*ܖZ@܉V̡ J+NxV/=[1ǰ7nA6IO wߦw tǨZ;U: zw-q )DC&[nNU[{ V9ZbX7u>nahѻbc:cTnmHec-<ӻŰ7nA6%eR4Ԃf8:$$}mx>L06EP^1)C7~QgEwR 0 q'D /N ]'D 8-;z'@7+h8!LǞ'@Ԩ q'Ę  PJ4 Cs& @>aQoD%!N:\ @< Cs&`pl@q!N X =A+9rҊV hBN=<]%˲̗@zWɭQ9eQd=㱲^v}݇W$h؂%`uUVKg)Q蔒JiJ5@rRs;Ÿ *&Fls))‹Soh~l1  /1@ܳBRıCjD1G,tCn ۂ>c_̣5a Yl=l hMGѡH n"ؔqYa&SUHm ࿖XɧF/ќ(n3}|wsHpkWԅ3a_Ym܊O"uM@¦?hnf6*8Y/ɣfAG\WW`Qݬc rjIk'۩/'Q?u\[gp:cpf*'P"4L$1 B$  *z}" y6tEfЁF+{VI#[=킺i<ѓ q H 5P8=V"^/EaNw:?=7_]82av\||5NЧڵAJqzt{>KvۻZx\h$sm>ʍNxdLki[= hQYV H!g~P #|,"*5 5$֚(%(9&&Y/t©H 9$l9Q.D0쀉X:ɾհv6XGwTw8xЭcc8Y9<6]n¦@j]ĖW 0jʓu2}VۣqnZQM;l=jԓчۧ/[{qhM_'sezk(}Pea2ytviJkY(nVfz)؂x?,Q\1.m [[ĜvcԽ~s<@.ʔ4zdkW;!-:\NhksPΥ-nGU( "xhf_BB$FVp$`"ojÄA<I%@#ɰ0!8,Eyi  L$%B(*EhA˔8GR ako7"& W)`X40OD<#0sn\> /nnOGVt'GN2J)FhkT'ij0Ke\4'U5+LbY$W$4& M8p ry N$ G@zt; G1x-KvZ8nG 6(202nᅣ)V(Edks^T#(Z76B<|Pm4Sz;UHY}-y?aJ`&_"3 iMsl"d!i8- lͯS[tgl{x& 7nA6ut7Foӻbe:cTne6؃4׻E7n!6EeQN@@.f gRlb"|BTN'!>ai3H5zfUy"AA@jѸ$oVx=cF!f> eZݽ7z`6m[%͚͡b?zBc,{6nEuoHPzUf>a^8% y][}3H|7'؜.Al{ +6ϣY) 6c,:)4>!/ Cct1My}1IOBrA#0\%{)q%u"̕3@"H ܩ %"6WAb7;#vxFFe(5R7,TL4%iµJIS3ɔ<0Ε4!FcMÌ-*(@γTYs U)fP' \ -OUx2| C+l J P̰3O0/4m gn<*= D4xs>>,e$aNoo7Pden 3g?Z91Ғ/EaF.Youvz.|3c9'T}/on.{Pvy1Q(ZA8 ŒcA7RT(}Էk o+n`ާ6x^]=ʞa6\VW~sThKtys;llŒw`N0}xO*ᩰ)F&|?sB駧\o,ͪMӣ` 1{= g%;{>XvS΁ olveH(nج@!U9P Pd͞ܦvUfF;'AFGIh'KBl*޲Rn7j$( QKEo^|$x=KR1l k  6 A3LoD+F'lfMآ33O 1 g?t/<zL!"q~рc;a /i%Xֆmm;-yֺwge."vBx!-Qˍ;i׬:ִ/W>V^c:VJsR1mVHmie6Eѫ^}c_v+WAF@6:mtW7>ňFF!7Ԙʁ^>q=J a$锆Cڹyд(p;Ge7R)V$uA1zX5ĚzXUQ\lux^wԕ Pa`=-zJQ'#kpFBܡ+bQ|oV1f SƘ6Mܓ!Ϳd"*CMޟԊ ᘴV\ =&GhZM@Fc`C YxXJz^789^jʑ.4d>t* ڋN)m!Cx»gCM4Ȧ$|xnNU[!3R<[ y&dSnpݸDP |L't*ޭ؇ 0<[ y&dSu>nN tBǨ<#655wa!oDl{? z]'\\}ƋNj {1tE""L}D"pqф|1^΄]2K,w̬ GmYVDVqzGZ/ݒD]_6z8b12+ZWU0D/T6YJ$;M1@[T9m/H1K'6BϿZhE vѩIZ~X˦'`f3%=+MfVU$l $ZL 4 ovF{!cIFn(;y2yu9k<+?.?T S=ZOS_NR#΍db~kkuꮭ]V D!zVߐ7dn[W/u W}fJhݏf(%X26^ b6~ol Kh+۹)Ѐt +.,~$J(.Đ37}7"!pZ+DqYIF='/>}=(sIsݚDnM M*08bVb*(=ӒS Og`EW2%1%k_W[c?}=@dߦ)֫;mq#\hۺ{akso;yT>burci:N@%0C@ :R ,HZ?Ҙ[EWDh=eE 3(8!TE%"RpOo;.h1(W*èHJF$0!j5CmntBP$k;@,HŢ%Z7 Jr%Hվ,qpgN7 >\G&[F4rNm1&ƎRS-"cd0x8,Nx~F&,e qAV%Ib`՜O+urpm8Xd[ e J$X"4AƉՠZ"*NIPVi˜IA06"}P d"g4z5HT͝-$ԚD*ID9. EIYOFA B$AN}`f)\n< ('%8Y±Sʈ:TB2+&x5/%2$sm=&v >ֿ9X' {ejPM9f%D&$2ږ`и- 9a\IͶF.@i\ 9ɼ, 9ю⁵ JDm"Gs01'əIkXmJ+R-8BzMe($(HΐƂr8|e': (!`XJRFU`:]kZ1`P \E5)wXY)֭N1*HJe:,Ւ)J)*o`uq|6TXa];mŦ~X6ݠ\rԮaMX!Cu%U)"qe2Yiپu7f{p2=+%gў`ӆZT%7yWX =D˜η7ag(vOXB5#߼eآ$]&>߰)TSSO.p,BRHTs<ƥRu9*سzT1Jg{T,wT]2H=}K[6Fh`֒QCkϦ-0EĚ3 #d@-F+3yչ"wjB00erG0Vtnc^Hj#~9lTSvUG\"N`(UpJr H JٞR_gMPif2qbAyX!$v>`H.K!J>{5s`|JG%>養%G+D+ݐb}:%,SJ৽Tfoe( ;J n6>~tf]Fk%np x`Lk;~~6Lc9, '&YH쎀U;ak3m)-]ػt]U+>u,cx+E4[ :Tl-a-(!a<m(p8!%da΃S #Ęp!:4D vR)T >"{;r2ǐnNêg SҟԆ-^OG~zt_}kWe9LP8";c)[y!C\@θc1׹c9csYWݚE|֏X43/zFv83hi|0E8>p O˶CJb{.-3x@,[{P޽wQVWټ8n?nH+v2oVk:xjx9P [+MBjsYmՆCFaQהY.^F>u=y,mM{u%Gר4ѸwX&[f ʣ{KlCu8;5p{;pK$e愣.p{p/w.l};<ƩR=JE~م(vy]X`ҽ}B$҂v휣9Ĩ˻{Tw󜹠U8pE5av|ߋvsu0aBh?"*6&9J]\ nmu^>.nϖ*gKgKB!DCvpyos;Wl(ZĸY 7Nv9#shl ]XBGY ]$$3|t#˘2')m\~u-T<ŮPӦk?wE%֮i}Q\`k-y6V KjӜNϴWPs;NV"SA jĄմ~6Rb c%Zs:pPT(gxhR'3GK . o1RK'"k5ą!@eމz$MFqvZFPdº2nmX-gZ2 23NN߷OFJS+-Q _SJ-Ag^)@$Av|0i[n`r^ D SBU% g\u)? š1xCbcY :1&lф+ y.'5Tk͢+r]XqP `Mxk&?hM`>N<7L : La|)Pd(3j_G;n&ڦ|">p ӧ2ڡ<]t/zP} 9,i(U&Y"3/M.A鍖,W(g9\0%]μiW{gruSsR9mR ,`*蘸/ݏA?0IxM~fK-9{BQ E}s&|?> 8:+ͫ*==l*ݏoՠY OX52,h&o5̰bUr@& bZ.b1 r8)3aBx%VJxno~6 _ͦce}ipKXH9`B 4 #s/ؾ7ӘT&EǢ9,xY0'"\"5`Ay$Dg2+MEPM0+$NT{s%(d6h#ƕhшUzZ_4kDk#&>IէQi zѺ#SH $M/GJ&]R-& .ddƞʹ4? Dm$˟łgG0mڨө `eH Q.$`\ft!ٌp*:I0x"XnU.O~:xo-ˌ8ڵ N<Ÿ!NcyfS19Y-;J*ֽ"-Rw|ֻ>6O#Z{ISPg2S2Jhrgʚ[_Qe9ΝTNeyN@<֍-{$,( 6uڢ@Foh !9er!Ls@j"RM4TRX#Q ,Q_Y9Ih1 B&AC ɄpBBj.z#/nƥT'+b@*{| Bn_.E1%8'Ϧzot %-?'t ʥ0gq]J%x.&t8 Rh(luZ~v8yTf}Q ^d,:ca SXs.;8S>wQ9!7aTmcHb3TyIEfu lw{2/sp0V_s8HN˕! v˴xa|LM6Ҟ1_ 3B=c۸3׋BHl쓡c>OLP 6@-tTvBPU2(/xYZ:a7 ˷\3wɻpY_q KS#gtq3ՠ׈g2˼FfBBT_S,_v?C= Q՛ŧa5H??KF7f>Tb\).d@07 "O,$ьj5Rixayyٔ~㇛^LJ&G:pzc,jmD.+Fw MD(Cag*7b"BN 2^Ɵ^0OKOƨ$ ;Y${ {*g#.a>Ns`bl7q+4 )L\0Ru-g䯳Q\.|+sf+NC?f}Nlo,B -Wj{vA~wmX}nt]f,&@s4B<YSӏvtz3HOg> 2sZgٻ[,^xHSd {CPrXgj4Ua/Yhf1e"y{wCu/KK1,i?gg4d,Jt)I@ϜīLs ,|KE(btA6X?@m 9t('HEI@@dX=cV=k3&dVF"gDܐ{Daݷ?5Pt˷ JS ,Ro %XJYS(%mwCФ}-) Vz(vH&A'EѱfҨd۶F; d+Vl,ttZ XBQ&{-ܩEIf;.(\k6k!>nLBi-Aj]8ny ,ۉB& zw}LeSj:p7&l&ݩzR*+AFrVL"rWMi@)5R0މ.e~ "Fk;-ЄF&~i_YjRʼn,p(8 O_+2fY9K5; r,a;4q@Q #ٯ>ݍiN(N~feB|=i}.E)bB.]/D Ӣsw#;%Gci6wyd(z ^yEj' 8UҮ\eL%<-3)J8'חrs>3Ƕ/u^oUߨ0ЊTuѵ?# >ӄKNZwv Flk: uh "H< ;!n6G ֢IRhzϬdV[!D M$'@ɦN'} OpYo*?hWk&Ufs7)B| scFIK/j C87xjIJ9`!l$e蚭D T͔mJIoK̴Nih 7+$?Xty%# k?mQ3CFڭ;s ʛƫhnFMdQV\ȏV_QRX)}L=]Ӻ'0`GGZ|Iqqua$OdZ2cx$?k%p$Zwh%nvIH N;k(:b^yKxzp':S63bъw @XOp U+6QiJ_kpK 5] -FRra%ІI$gZɶ":AW7vdg1u{seX. ?|sQ̋ vnyԸn䋏~܆{]W",;dƊ77 S9+.Ǝ'߹Q맇[`EC#87E.AWX9EB)#5Y! ^+Dގ TWIEc!ZбjPt+Bdje愃"QkYA\ (ڕ<1E^ꄒ_fb hw/XMU5 hea'B²\2"V Wus* JTiBIY]K?! Zh-IQyj0 ƍSRgC6֨5Zn}xB{m}8D2 yFh¦ܦ*$ф^#dkPQQЂHWBڜ0b^**Hކq6qZʁ|dr#JSbREZ[A1P(Z1JpLغU7 e BR5O\.~=?AkMb:ʽpo1CYnY,٭ZTj H*&10Q-zQFx&hg g 塹jm1R SV 39*й!YFR(g Rrf>Rz_霓I(]"q4!pc'$g~s;P!?x~ AR,1>LM B=a & bVe6݇5Bыj΄#X3MKDP uDnfBR&ULI oד: [&2*ͬ 5&ύud9:gDYiFwË@?8gdeiˌHk!Mh$,)EN5qs9Mm84_'֢Dghf"JZBph؏-wEq= eDO,E:(*C{VJ/b5՚J?o]МI<]ۑGyv躌wY"N|^|ar7:'g% n+N&a9ux]-'r\ 7\ \m7IK^R0۹쯥=o9 |{Ud'!?t7q2 XYG/aN5aq>K\;ּ㍰˔a۹MHgwYpt#ι 6be/yјp?.-WoC\hpQ4l>]4<@nCa7g^>9V/=ؚmNGI$q4 Ku/0Iً0-Cikn!Lwx+R2h G,%%G˷6:.f,%%GvvH;9LrGL8_$;C1.ߘ+Cߒ7JՂWn ܑE%j {ȖLPAEbqI|C8L\%mȊAX^ڈ&>p +$&Bl0Bl6xv ?|$Vل7 M's^{{3Ҕ[̞ʪ[; @dW ]>'W8E s+L3{DBZ)MΥCd8GFHTkc4$tƤy bf? {ϥhν)7A #? dlaxi>-c"x4ՠd%4_emCfHXXr]Bg^qFz^][o6+Fn]b13(Z4_ۛ:q83-ߗ[%:+ofX!_}}+ݺɮb8DO~zu~z^uyd%J }'@@YLcԚl޿D# -sp <ͽTl]N2;!x'2_iݵ!SӸCd%64]#%!Ѱ)Kcp[ p^HݾLB7dĶcɶ[ց\҆Wlo3LaV8&жF[Nio~7@ UUdl['r0řa&&F$Jkjb Rۘt ' ^MS`Ĕ:' 9:hg{S/^W~qĜ̅!)mu WHc}9`NdÂ3\$@hRcLIA2FW8 gdMscK9il懹> _Q+5evpN)ePkNg;L ?5%|g`FJ':2X*nvs*»iYr<Ps4B c >8Iˉq$M# k3JC^$7;B5G1xZNrR4U,ILf!ȑHLbDJ6na)_$YCVF4u_0V ?'J!$Wp aJ;C*h'B 4n9M:"t^@@PɅ-?>LqFٖ@m|Jp |D ,twL+BQ/Cސ{YeXזsA}H5JEHvGC(‹@yWsp!P%erv\&4BZ|/V]5fjg?90+CCCళqpߜO-~⦟mS='і<rF3/|Y\ly~mq5(tYPߧ{הve}nܞ׳_5 m Eo @.:2m,'§ qȌQi׌Rsb;b^!.\PX/I,D,Jkx( ǡrU.S>T`Zg -akIMsxkv~w ?X8KՉ#D+rBdfZm?M"Ówv ߧ)]_&Fu=]MSl/^қ&zQ7k]7!=r;֯1-O44I;SŘc0م&污E'1VH!-()ց@uBw4Y+/x)(<&.o()%Q"{LYOD qc^"/3 Ns33U>dxM-\=81}wvY-.fbU_).Veo<ÌR4|◷$qX3.Ɩկ?&9-6 ']e_;, >iqxP.AKaO{Ro:X{ і>YC#!H #N =9q\KZs1sOHKb{\˃Z?P.;oֆ/@Xݏ[316 qBA$!&EMT;UЃb/`8%pȐܵ`|]?w|32v5!o`w6$9@P 7 .u$Rch1-G!s #c4`s\4KEBu}2>ٍ$$/so@KmGdT ߸"R&PḢbF䰁kBj3HvǸ{m#c4[FU&@%ۑ(d]Qkbk%CҌSq} l7XL>d$UN"WW3cˤT%H0"qH7CY,X0Ͱn1h%x&HB)c&Td3nK]*CRY"J1U'2IP@aOK@c   GJ5DG01ƈ6J4eh+f;L mNN3p8+,p8 II=775լ09,"R~jFġiD Ŋk8tE*QXͭn:AFI}(V2!SVHXc# gBv"^z [4;j"`8cw8b$ҬTFqKHHps(&D8cK%1  sPԝY5M}I  ᭁaV8!"1nW P2jO8F>j>vՐN۽ߏZe (a52E+ (}ԋCu1cJ8 t6g_Z P7aF3~yѤꞇTADwtx'Z<ݷlSs3 1d{ Ėt D_7puv z,nZ`&ۭDϥ:;_UoAd14#k3]:,6O>66E1@8qw+WiVMee R{ +_eSU:P?}>J* !,a8OJ8{'me(ƈu]w)[٦~%~o`ؠ`Wwkkϫ^chu \Mbl^n?]{bܚnmgd-}Mh.>O㹞8/p]يf3,K>JCJvxyLmAh,zJǸr ƻh> hݚbuQź xO)D7ָ֭ƻh>% $`LV2 *]8vH2_۫AAs6}V>G\X.b12d`7s a7qn]_=vc~tIc"0>F`.WkiƤ¸@1JhXtAn (}/6Bnj4djCՆcOt6 { b~%Q-f@+ k zxd-O#Z?T 91ϒLRδ2';b`sVY2~f?S8C;INQ88 i;v)B ~F%uv6Be@[ :"s0j]f$?SF׿,gON`q5ShnN,aTzNbG›OMg1V ZW}6?>* iZLZy+Ciuvw!n6;~o/_TwxO@ RJ;Ƣigx::Pbj}Tksv[+$nxi}pO$?]']}D*U F7ʁDE0&}tS[fۯ}7~(ja ?~1R$dڪ?|w+,Q.~xSn|@7Z:F f7Ni+F%@?ic WeoCLӕ]N/>8{Z b7ͨ'KEE/ "r~`"Zh>*j:*Ƞi=_|s))bCEޱ ܸ)Nz"'+'\F YId;JU6nv@`f>ߘ|tO>IBEgx~e@K#Sc"˒ӄ\*Pu˰߀oeV6CܞcƏZx]L>b>s_2; TfH[K#":a}V9s45wG1%j[dsEqT;1{R쏗Ar~%l>GQ@ "@~$ESCDDP렇B\T1JHGʮ!\GRI㧍F~o@vpj w+ nX5E 8UqR yGd$856oxQ|.F[cjst:em@ ;} WvC<498Ma :B5:EmT!Jufw sxadMIB&~9޸xfBҶ4nR"OHi> bZj$0&ɮMbL8K 7ߧA BX#QWhGXD&Mrb?sêTy!rwdՂ{Nʶah}31(CRXb3b!b8MT{F,g6RWKJ! ),APeB*$3Hc`!q# w "k}ycn ֋N?5 $aH>!إt{pS0J fU p1 "D&hY-roEZ@4@)\$AC155+|;;op)p{k&!AuxTjݺwejݎ w7+]g t2z?'}zϾoD Q㻳jq1/ s*IƇj|ZhD$ץbk#T$F TD bظA1 :, )꾳-qʋ B*)v4\GWKejG bPW–`l ?Ľ.Bfg-hWTz$gcub `@A$E x pСJ=99|l2Ү\POȆԠc8mrʡ|BŸ|^H~`h;B'2\ Ιޮ:p1 WDJvM V"`3ABbaז۞?1f=+ֽQ3*a߫i1mb4%aC$>)F3oTb#p> &6b,teLQ"4²D,5E`'8JUy WFX A(܅\ ޢF'\$N @(JZDS dvDh׾ppcݭ3+DzdceFQ%8MQ!`,B x?#f/_~xAP8jf:Z]ӝ@u-Ў9椿B́~бGu$B-b `Jc |Q(91!IFPBID*E'tc&8[Fem`OgZR:o=CN4)vJuI)$VDP+iZ|$HB|SÊ9NnC1)>{(Pi]huu?|wZ@G:ezP)8N⚐iɐHrT2 E9zPTJs\iL&Pw{ؙ)c:[ GfNXs"<ǩEˊv #4^P->o:/K'\l;f E((F!1aZ(E(n5jauFx,2 +̹FXWЊk<>ϵATm :Ž{VCcH6Ycؙ^i)E'lXEĿuU܆J.u;kAv47@ %_[珗h Ƥ֮Ovnfݶ{s;rz\B:FR3H8#^ &4fZs,e*B8MzqVjpJ,bGQ3Q `+\YX$\+#`Ph 7 "\h*{02t&Mɜ|vR f;Oxڻ1qUr'wf>ۙgtq'SY缿dfm& c&k%lJL_ܳy5DvBBEj/zWŀ*҂޵ߴB7mAi:J$ $ROO("@@\] -;()6t"-u0s;y(θqTƥg>J؅?X/2zřfJ=w'3cX`h9B4;]m#QPZ)F5>zzO.rYہ.\|,Sdh%ƄvŠv;(Tnwӊjj68 gQ/*|$ \(FemJbB>dI1'\$A!ʏ"rώ e~ 䡊UΫS9 xL 5QȢXF<&\Cmd^`,.Su +(#5m88"Gx"L-r2#ɹ6a_Žf8 YݿsӍ`_xîqbѸZ}i(U|Txf_Z$9qQG JRCP7r7tkѤ8X6srZ Z܏JwiI$d AHԤC=>M4HN"1t&!C|,F-j4r6O:a}L>L""z/`x26ϊ"/9v'Wh}AI0X Q3oO 'Hvz !VtX_\0Ebt;_NQ>(E;fK! 0I%r~s]`IJpMl k>&M.=rs}0az[/&2XFQԨ G) w]K_xC>c}lkZ Stwom`v*R/hTvncӠC=?h5 &mKǎk\wJ֝(y9b{R`xǐ!W;nO;¸Vm;H^Ԓ$܃Κ0[BRk\=UtW_o'7iyŊG?=_*m[A)L&0d<}, ʾYugN،rɦ28$1"! &TPpkژ^Vr>0ߎƵLD\@dFJ0AW7T"| o}[X `+uޙ YQPߜ}s(Rbr*'Τqڐx26cB+YS(FA 9m j} 'Kk%ev4/пo’|~ekH9 2HnƎSͲNIv).%ڥ^SRW#7 %JD-3X*P"L26H#(FR3(˭~n`=+n[y.oLZ]flLrIs #»S.w_E1;ir !4\u~i['_k\믋EiӮ&*RbA G`7Jb1ecTȦ\jK姳UרG;ITiq<ĨT!l#$$B)Ҡgʚ8_Ai<>Yv83RX_V:I@ 5   n$ ʣ*+ 7:0v%&p%rWSc̛K>3sSڏ"d?wo!~K~d7i1Ѧ!ZrZV߽i fŻx}W0PM7Hxz&@<"흼֝?kLj1IN?e3 4Vo| ^,h 8HZiZ*У$HkW@a~у9UZfQx*,<k5­>/q\eu3.-4y H_%|[ &bI=s ƌR8 h9 $qY1D -Rc^gPu[񲅨rq๠51?W:C͂$rAK'Xx\m (J! l ;#x 与&]J-|XP>r#`k:TKք b^>Z{3f؂M;0BX:7tR~-VDXs]oh $% |?){׃[Б{!+JŊVi=6H6lN Bx,(mL?<' e(=$& ^|W N1gN5< V:8C9F9[9rþ@r#iE70o5`u/92{gNS6%vx Y) *CSRSg~*6whxڔ{V(T 9;^jYT&kd;ۓ bA* ID.SMʙu.H./H\hG)&@wy56ݹb_3m9s8>iŎcU䊤=|{Eg˖Py˻Ը$O?z,y&BH3ʮ|txhzqLyX oekʚr\_3ELivMH~ڭ.UDnhO%ڝv=Ъڭ y"ZKou,i7JչbPEtvUi)7mV?vkBB^֒)uvLvAѩ&mk$\mV{U[EtkRT\W-z[Ѱ}R nq]! J2 i'ZyOݴ@b'6 )tytY.oސR9,9 +ݸ9*%yJkWG*"O"pJg{l:ٌI$V'+LRB<ڒ"嬽R:M{z`'89/ȜBJ,b2f,$;Td[xLnλg_߳gӮҨϪp6[c-7+3}c¼Y2:2J"dB@qs%%{"!=ki1(eV  ZK9D0 aD  噥 TѝWJVA.8pf)c d*xH19 gy)bu<:;wW}iyJ|(ӧ;nlO?}xhἠ DnqQȿwt3h~kH{dʈ~op}ޅoqtzi! X|?UVۓ6J`ݢʍp\Ѩ`Dm! XRC?%Viꢴg IUĦ5}q;-ZT]T|㎞O%Mri#N㎫'MWbMaRZ;2"#by 1 R@4[W#&Cવ@\ [<)uҕOa/qU*F) s2(B;,qZR؂;ĝLQЪ` mR8 l%nhɥ}s!oo?w2 2&ׁWah]>óد2֚m)d؎VZh@)ޔ+R=8 O^_hj*N~$X&S eZFQ%;lD[s/W֟w"/#ĝTqh [R8>:jgK66$W^>Sd❧#2ww}[ݻĝ5}fa ŭ$Z4q<;̼~w۳s*-p[':>W0'hqXl;fH*Z͠9뀗zG93'w3(-5;!NF <L#3:Ll0Bkd 6aX+I1a5\I {.iNs1NQ&Rrܡ Z˹kR壠bRGqZ2'xRVAFk~-Vrߋ^om\4"9VL.8RR>wprmɩsS:('G[bۻ`68=5 H\Fz*T(m[9dtitͱv`ˉL=NچY_S8Xqs65YeY7'76+=L1郚OcW:}nt}lTPdJ%o@Z@z32 ^yFMޛSpw9م X3>X#7p{J.?WSY֦2Qk!mq|_7})3{x'REiy%f˪{xtB ڢrqRYypf=sǹ2=!^%i+r$W}ոk8` ϝiycE3\uҲEsᣙG06&[D!]s۶Z7~zj2f4@(=PWDı%vj$8$3rr;gzRvWBM^=,QG7awഉ E6&EZCu \|?\b8(/`\FFq8 5?3 )Nc[mgzX'CDzywt/QJi|~ǗAr4|;3#3`0a,@;1'[7~]' I< ?J' gYL 4٬Ug#@/o7K;C]Z=c?t[kQKWjL9)7؝߀LS[1\ U%J֞D<"UYΘDI E5 {RF(22PdX)-c,BN !˨آTaN09#z>J]EN_,*o&(pPϣ,rGFy4%uPJ׍}oB;ժ% x"pQW4 {ȽGSǰ(K,L8rGWjCX *`#RnV|pn$`PDJnEEA"5YG_(2kT-f` L=d%^X$h;ce t:zIͨK=dxiu "C #)|0!ty㭰uVlx_z~ykgGIP WU/Q%;UR:B!;zQ! 4*L *qh ZNGJïaFuӓՄ \2(--ާ0}jR3)5t WBW-v5>鎿/r6\r/OZ:lj:$z%x[zX|~BKYPC]pZFx]gOwOO:;UN1~0h( Bܞ :1xL͇ʍp\()_\ j-K }!eV;"NO}2/͟knVE㻞sd#3nLt< ښڲFӦgߥ(˔EJ@RI &.vn4=+淃. b#9لew~۳yo)!jM(vYsNeg=d}N{rt4|qeƻع䢔?/27j Ie l-m2#I!J,75$鵑H"` ؜{&$\H>XPM5_bH}[ ZyW?^ռ~_?Y"?_CS+QeǤ^m*Nڃa`e=;n1z0[~^>/bx! u12}O~sgf8Sy}*ǂQX!#L`ťNZͦ;5#LPyȋe!3D!쾱iR(RyʘSNAa`i41ZǀHh$wECX5,WpP!|E8W Ν"7QdÝ7a:]|ˁ7y91g_n\,_}ٜJw?yM9=hYQVHMNo)坮C: B]sxz4MZE "TaR"^&qRQEl(lޜ5$u' NElkj5[Ԃ@ vJ' 4|"/Q͔LG3sd{0eV窻Q* UTەH5k8k]qz5k;?::FY4"I(wZp,{(DpBbdHIg,cS)r6%܂<%*I,B,u.$TXXkA5fV֪Mw?^aOG!?X8  _4d_5!Æ!-)jvg 7ki G+8=HV5a? WA5wf>OAWϐs˞(@Z$̯X=NϩwSKe[& C f1:pIv|5`3#w;O$o;z-κ2Z)Fo>hKڞtd0x+zL {OeSeeU] z55*TTka{%[z/>QncjM3j3&-7?\zs6=|"D$k壗< [0hp* sBYp;⭇^XµBΎy]<=㬞9l<tH$@oSW8z ɶ ihZ8}΅?h;)8?"+ kK5 lb1: lxT_ l`ӄq)q^j7p!-щ}GvLG0}-zڭ y"%S|Ϝ_/=W<#EYY K)(2 Z,N0%ǞBl = 9n4"nĎS|L;F#/n5ݡF v]k,(i9mW./^NBLk >-V1Qpr&&N@ΔmL?%4Ep$-ڗFWx؟jXgҽUZ )SP"նU;/TGe G9BY݈>q,[}kdoP=#ᇃF)eLF5X &w;t6y*dgg0 =iBB޸Fɔuki7zb1wntSn[EtR${5#pg46Qkx`Lh:j-**P\$竟>v Umc^^OK8!YvX&0ZySeA|w2`ç 5 gx KχC"<8xNX!~}BrIO~nfޏ}M kQ5 S[ ]&Q85]_ -jVC+hLU(^I=()^DtU qGau81g%xfbXscy_kB$}J云Sh VL/vB>mf@1o^uC _b% {hgFbiQ[)棶 u= +8ےSp*7?9/0&c|ROjJ:*eoBoa?5,G^ БcդJ* TNתl.߭ jh򼫙i0E_Ts)R}(YAup31djdA Z8fSCr;.?p0kGž"-:)>Q*O *P&,2= ي`- yN% #E 7J[.Wi.+gxelcLH3^ $}.Sj:JU ެ.iAٖŐTJg MQ/ʁQq@?ikyߪMu {www #Vm?ĝ7k_ūNqs)Wfsp{L8YDjviW`@%f,!D4~dg"ঢ়^SQr<[YKwd'vuruM3%W_dI*AM/@6ʞfûT8#pjJlI•f)V\xB qL'H)VTZ'@,֧ŬS2A^t;,ZԪ4AԱYe6{(cֳ%N(TcI\TJЊH\襺88]a߼/~<Z Ͻgs׉ fˬ3CPGI%ZS4ќ;G$AXhؔHN5Q9,l9DT!diu{o5t))0VE*99b)m#Ej 'KiaUqITޗ7jK)/tYlFvaiu+>?+t4n}=tq#J5xp=f4 u&Am(F:v`+PY-Ug`3 Mh 9,@&J%L:]ENJMX@ %a lc{'ʤ넱L{L#qj{܄b IDY),m _Ny@TxIJd$K1ELW_˧ fG|} l/g1DxXZo t6_2b >-/̝Ϳ[ZПyXx 3h˘kӃBs q6>XPAs@QԲ`{\)6O^P?:p gJ][lV8=@/݇ks4+c[ӼnnB! vksetIp= v*+qo@k<c0i e?{coA GnƷ0Km { FQjbl@mQC*[T^"ړ齓ʃޖTG1mOERM w;z 9yH!p>śß&8牷9^%Ɓߜ~_Ǜ8 |?rG<ݞfw/2"ioX~(GLUJga%tl_?^wq_) n~UwK~J:SCR^R䒻9X_<5BFk d9;6W605qspv ᖯN9L !*:Zm`rnzJ堚hNSophv4!=Mw=I%7M$&?Ԝ?*q"Km.A累keuh#FҌJSǼFDk->N,@4Jd$iŧƹ5YF]Gݽj0aTT# km T x#,J2`)fUB4K]ab`%Tg$IHicF) רFZJJL%y% KѾ #mLkOKs}f XGI28ZBi m&`jtX*vW=tDQߩoY 4t;_ssZpl 7^>Ǎ黵2z/W#ŝ˵??Y<`yA­5i+<*~аLKf|u>j1-@mp}@q`0z4:{HVkþmC//*RXsڋWEBjU)QVשc޻ٷ` (h||_傇n.x8ѥ]*hֺ7B=Ͱh+{ D66FXТ<ѥi\yY/Î78]$͓]Ft}u]w/\'<[ R׻-N0=I(ҴCSER+#՘9p ݸ3gךLx!m[˳"%+acF'F/+fΤg> sZρ ^De7袜0nGm]˳7T7 vE=hseTV-8M𜛳ܥ~J֠bALnvOf6A͛a~MǎzYD Gpό/>s4BPZ#Kv1mkz=1ݰ;ojWkR]lLSvv-k3Zӫa5Tw"zo-w^4@=F`MIY;S9.?@$"%~1\ʍ'B/hNZb:*c#ћ"YYzϬȵu@)Q%L"9$>$e})1Ɇ_GF|ll'f-+c 71Ekn$VF7Fi@GDmed'uր:Jͪ`$(LJVxݷGkWG|0\PC5d>7Z|:3[!놹',,&t0^ʌ\``x㸱-fߝ7jYˬ`#&71VÂW޺mAE|pU=[|'|:9Zd斖$LxnEMN'Rk=I :OD هtª=D '*8[㧣աDqc]Wċaα^NNÅ9< glhVĴSBU `o')Sy4>.|5 BPd+)T1' ڱ{GmΩxIMJSVY=nQJcl"I%-|$ .sRֻTJX L9j}y Lu${RToϿ3^TĭnotA W(RuC)\ jq@;CP\..-FI*OގJIjH/Ր9)^ǝV1Ah k 6YZe(Y  %\=HDT5 |4|>E|l;CPxRTUCbҔRD>" xajNuhF 8Y<_hpz?]_~ZɐS&1I+RcH`H"e>IPV7€a,jlC 9 O uCZOffFL%wҳ)q-l ?ۏx[$_ӔZd//+lH;zu Wן?Taa @էs.]ˏ>}2fYӴ"0u>_Z+zӫ$E5~`>wu5U~5__{]2~'~|ͻ~W(龑(,oyև 9.o](o!=vM2c|F#;5h6qrbʼEXuڌwl o-^5܉gWO44Fo ByC<ߍnJ;[3K Xt(RPg)>79-Z{arȣM 3XgZ#Ͽi(O7 '%/uxu9|_͗W96|W;3]_݃:(%\tf6!9/;*ӣ]"6ȉ. nn)Kv 4ҸUFX2.o!W^Ep_1Dk/ 9m8ŀưZM`j6)5WZZc dI?h78It͢MT8P8ԗ+-$n3DRh?')֊\ Oƌ m'Z+v7վ{A˵@=cSe=7Fo93ͺgy*dUXX`F}qh&$[k@{&-|69芋 I64Gc,EhJ:(Yp:ȑ(ʀsְ2z!Α4v soX3&F(D&ᜐyV5}O;#DY1JNeɜxYSOG\.< hڢdB!D1W|Z>]$>cqNX񴬈?86=\Õ퇳N I"BGW{pvtӆmMZ fq|ЎsW>(`ƾfpEnTE%`:¾L,'U͠Y'_HZDrC@@a$j\DC`2$τT * BcNL$&Ju%c ]rNuI@Qd]qaJ,%' kHfIqXR㲊Xʙmq}P\Y9ɌF=xF.mIr'mo^L{ґlj9;B=WFr5u-zV?u8V?yOO~'Q>kwt|Jx;o9;|UfOh1(r~Sj |eCɌHYSX tpnֆp8;e{aN޿~1,ͽv޵q"EK9R|DZ)8m߻=۱F-{_qm:FGpQi9_5 c&P"2Wo= J; xi* 4YTމl7JB" SA.}*͍@Smo:@qW>uAㅮ[co ρiB[8 5[DbzVȇNU񩣤f/Xl MJ:8zVz"IIhDD}VG{8xJ7jh_6@WJ˥hNL;mj NYmҎDN7o^̖E•V|érxɧ)^ڻ9.$8]%LdM "k?WQCU8:hI\t)'durL4"-/73oG^Ғ>~ j6r! sPAA K[oFF]25dS2j5y$ZI ,7ꉍSdYkS\o YF.{#XH\)v0N3K% <52˘--C#UDBK)v#X/(jB/M_.:.[$q$ïES;OΑ>=77\e-eyXl8~z/⋲:Z\]⡘BB\[g5&|o.8ILaBH;] ɤ#Bکoړ.woܳf*lxwK%" m;ijIzNRWbڏ^K[!xŗC2JK 6( ֝qWw_AFSj%jV".#ĨTecy6Y侪e"*;QjR_ҽ9k7|ņMIV,OOIEy1zTEI;up´&$rL|(XE@)DP #>jzJ+)q!ܲy&eh7ꅱc8SB^3OOOOy58|vT({Uh:_渌 uD{WY UUV)D۠Ys,aڱ"3^ a/!ҔPjb>9ڴ^cr 6jB )dW7jJKBjcP>߄-yTg"JHTBVAIʵ؜QSjnm+nlG{fqS#V aPDiM) F^Ҟl8eg yרY C*6ں\gHT3,ƨ_;6h+nRn4H0,L`% [d&ڤ|WEz)۱T\{)IVhIN iu,Ϋw 8}?sFkj).mq(z/³T 4N%:&{]!M~bޱi83p~ȍ" Jm(WREwƳ%+P`߶I3' _4!4P#dv V*˲ 3IC@؂ G HKN)7:'Lduߔ-gce7|=zw3?͉\?d5*uDV zćLte#ju:/ 5\ɌS4Ih|;4Ik1*VȈNZ: TݫXOaҽUT99}DtesieD٥!TCWwy!壜'26" ONU&%UqY%bEDl!d}ԣ->MWױ8"X_{`2;keRN#TVP{*,!tR-!NE!Yt7!h9#'nˣt ;8<vn= l )b >#m+y`<]兘~VR|7ʂ?[ߎoVc+׏\_p  ofwhVێ"[Z})|OVv}Eߴ((|}_үNjk?箈t`}ӧŪw >*+Ҳ+Hf%%M|ࣔVT^ᣟ nw{Vp{:b! aamoOl`V>K1KPJ%%Hk~cᝋ+];>$ zbQd4wr mg;#A: YA֙'; щ%fX!mpKtQ໬+P~nRc|J'W}3Ѵ!L +_?#9Wl*⏈eh6ۦ~瘑zG᳖՞]TjQ%a)'#N`$C$Hlae$QJbPVVZMtcqf>*+/ed xl&TU!j] \@%Q0fcJti Wwő;'2 1qY8 Z( B 6c5Zcfniգ{u_R~.ڻL"X%D"b" &U|L2$5R#9K-r %1,.W{4CJ;u,9kJfc-'$#yxnㅍPVHCQoyJ7G ])jdʕଖFRvQZ _ɹiʻW׳8_Wlg#\ۜcXf]> 0dW5R{&>jTV ҈6s&VMI s'keusY䣈/s HX-0"rmЪrqD=z &V^l쵳z!S̈́ĺbn,WSk#Dlm~wmMj!B=X8|/m-RqoڲǍ[ɿ"SH͛4Ⱦq/k#ӣg5b5c%s,ΰ|h!;@EvGk %bEL#q̌:HB& /p  kɚ>Ev aQ.y8H.TC!.j]H"9m{S`$`;04'lК`cD\hs!)0ѷ  f=}1z͘VT rJ P`өeZ'AhM;׆"f V YH ToVuH_O4ǀֈXCdk!?}*E2㈈L_Bav8aX1 2>ncN~t?d4(dR"'X9f8TBLRV/ _A}q݊ V Q(4PD-1eqm6_#hAH薡jB:@&(8#*Db0V0CpO8T`.d-TP@+*@ӝ" 7 4Sco?<ZTPz klQIbgЫ{-;֮XLϩoQ "7Y0&;D)H_SW Ԙտ@mm$3sUc~!P2)7 \u v\ !oW ښp*cwP,ʍ=d! w}G!1 (Kd@ًA3(4k]P_Uqݭ<8F-] 8ԓ2ۉ^S1Q(09[='/POXKT7muO C_&tGޣR  mB߆&t T:7 zE7bI3yzK3 nwBS24`_>/ÒP@l^Mߢ xVnK E(2 崮O̍w5h(Vz4 |3XQ{.hs;y#$FJz:b0D Z؏Y„(ڽo(f6FESt?ڻ5XQ(8 ɋVݏݛf/[s_VD 0كw2Ds= ,B[ARFGŵ{60Ew(h0mBQ=>\t/ ~m@Q Hݏ>R#E!oc b*?v ~m@QS)CRn[:Wc= L^Lhćbz(^U2{N?~99N*KS*Li1* F*2DeTk1K(p(O[ծ%,|ISNb7f,fˏfͧtR/e2kΪVw&}Oϻ'+CcT#<&ӯei$)Z-d]0vjÚYnfe]:|%!"""eµU$d<Έ4I?>9Ȣ=T@;<3i΋A>=jj~=Qt|8P8m>ꟴݽX(9NPlNr B;\vw9GCNNQ="El~NQ"_SH2涳SDuy]\"ɲS`XWГxBLz8E+HbH85|k)L p::V'\[ޯO*N1@COBO)fVy^tjGbpḱHޠNb%޽D3p,v"v3sumj]WVZ18Wi|Y6y"Ɣb?tל9? ?~-+H=Y$y@16HUdUUyn#%AG:`1^:R ijIfe hsAx( ׷߇zߊ$GkC%l̋Π!EHQRhK`)BdT\ Ifd>Nuof;۵ն_TT)wUuy9a|1y!d^iJfO_L4y/4 hmI,ģ~pO>'}&RSPO{5E_GwUrL/70wL#V^M;Bo0EEжs%l?~co'ȣRoZ$K펣O1dn ó7Ͳ@9ןt[- }.zxQ6'?Cl6˅>TPC(B 77i?$ A A$ Wϓ0h]HauWja 4tkH4+yrͭb>GFsͷ <ԏ0Y%ׁw %!=I5,5DxtwmU~ц1[wjHؐ$cN&M^eT|_'B eH1vv (aNRCy؏ 4kb(F?!;/Z/*(8m7^~+wTf,Ο܏|Gk7t7 .?ey|`jS A A8Nho рܒ, /5[enI `vK@|kv|~Ga! 5iE_b F]&t%vI0Eҏ Vҋ(ԍ{ ݗv'nwĺ;W6=5x$-b@/qÁ3WbA 둛?l8S)P4ޭ"r8yԀ -Cʋq]MT!qߊ58C*U߉4̪՟4* زyhd F@jlsV>e,H P|ӛ؀%0ʶwW?VM՗]IDrbVelbߏ`qn}t!B8Vfِ|7?}O>)gn_ўqi4繌hE,*Pn=rcj>Q f$/S%Zs7c IsQQ$QLRO'ŗYJ˝q+XV*ǹz_΅CuD/w ~G=g?L_?F;%J)S1m*ÛJqVw2c,Y4VHSY"m05X}oK29SeTp3h5i2"%(i*+HzgKΌvXlX B4dCq"MEDMKM(h&JB8dKĀZCaxȹ]s0X-nkE&:h|&xˉKf}VQKx>|Orƀ5>?n XNˏj4c`*ƫџwof'̏wgӓcW_G}|罭S=v?-4ඹmH飋?fU PGTV@}{f"d/tv~yAz$qUr]۰ˋ p}V]~1ЧrD繊D4l# Zt:٦ĕ$&z2IYWqd] ;9`MZ^/;+Q5+Tu۳LLmenm蕆ս+gy@1[vO4:0wAVþ7[C>}rv^3ʠ6j0_Ӽ^לq(~Mzx%߯ZfA~MZ b |_vb@&]5ê7 z.F{QWkKGe4Nb%}^1ġV`žZvm(7/Wۃ.%o~~k\u.ZދGr$T;08MӖv?zVd>s(@}/Z5PF%g"%huưC"{WyřzRJ~:?i6Ho,yN$@_mMj9gn9"|$f ͂3C47rGf1LuYF* +-QP@hspn^JJ%$t# c(;>x_ʬRe^Ik[鱜By_@ I`B2J.L 7qM[09[Y(+!07hˀ7 ?c쮝![2M-2Ets:3+5re 1')j%=F>PnSMYKr*Y+Nfp Έg#+XLr-l,K+"M`I1/&`PW D:Ó Q8y܉Fra1_)lRWO*5I*UTIdV"dm#?c&uȚNӮx*ǠHYi?`>yMx p,=쥖>3ժjwAA[ʺ_o)z ƾb`w5W b$AN UYU WiM%l^k ;M]@#l?B}|Z3 ȫquWI.CyEN˷Ѹ4,bM*O},lfmfR[ETj'%fa&Da Y0WT.)Ze{wĭuMi…K#sTYw-8֪ʘU&+"X] l$j4GӮh|NpF"aIWhE(p㳠V7&&8!3KFC²֔$,T+՘ doϠ!˱1LLp_PqpCQ 壥S88&8!&-a19Fw:7Ü)-ۂ-CGݖ9LBIRszn=t5~N&26E?L.ty./40fN2t0Ζ9ͫ@SG"WQю Vv͘1FT l xIӟPJ1<{ Zy:p3mDPO7=zJϘDAFn#6q pÓq|SC Ϙ+- K=n[{?:o,YNRUþQ|Z}|pL{V cP=e=00&@K@I99ђ$=XOgrcQ?j>Vxr=9]rX{jùFK^38e9.c`| P=8( $6rZ~Ѩ,l5:tlazrnRSvwO՘՘7qH*J\ZZۣj]Z̾׿}6,S1A%83SK y;ȧh7G N:@ޢT [O[Æc_oobu}؇lRų2p5}_j18K ӛrudT6rAL`qNj@KiZ6+ 聍GKu XyXEsDE%g'-bw fdSw*Vf[|y?rϷQw|ǜYvoq??:8_1c̥>3P41_]a~)H8!J ~p}e)Oms|8KάISm$ vz3h#OHyJt]dS- e'y>]<($Y%};I}"}/)GȮѣiLCQ=Zv|m׎&C՚81>c$Jŧlr:q`Fa3aA1v뀱!p;qC-ݏ~ I?IݺIGwgtAAf݀..Z& zn8rpZ]mӥjɗxPug !2j;nXJ(\?w5ήͺ$^1d{p΅yB}.[Pk,AQlhsgH$L$a"4o]8Lsu~WS?rntH/ѻ߿uH' ߜ75;,~n c9͟M6MkyYhH^HAU/:;˽:|؁/<߽oQ5чnA{%F)]\~Jny-)'g#Ұɾ<)"5ھ|~yzuըzSEXd _>$h77gp[5AtfjzڴX&tH*֖uvfvDo[ш@|z+cV*$ю~ !l2u# 1&VO=6>(~H3ҤJOXx6K10tյ2kq=^Is Ma _pRte4-a[n[3\bo;JhyXKՋNu_+*4|wV㌘3^ːѦh&)Nژ0xgI Czv(3EZl1 KˢX '{Qj)!@ptIe. Py: 7hin2kجzSX"JOoϫs|}yqtTj;^3z梎`(ŵ:%4.i rI[e2x1+X݇"G;ZϷW9Esi`FcPZ9f.Hb[$Z+g[Ey_rR,1d˪En>_ U+ܷvW!Ҩ^A^`@0yfz=ڟXNR@޹TWͧ} :dV++{:a1e$1|T8(#Q@j*s G4OآpxᔅZ/k#޻!_> 99  ׼ 3:FrخՏƓ䉪 %avE\TJ66Vڢ6.iUH{V\TB$LXIìXdRY'P 1H`mM)Grp48,D#c) zHwٸ%w6~qQ>[ݐ>rT%cE,(]$fZ;ϗ^x@4&)ou VIIΘg4Kc dFyݾ%u;22̘}^ snp& U;q#SM5Kf SAC"O*25(T:(eԨM: 2 @$ ҅"&;Q]*:^7+vF篵J{\QR>L99㝒L$6>p䚀 xr2d! _&?ZZ^Z3ƺ9Ij- vYi^&9ZH0H7%Ɍʐ^A&=Y*(Pb6Ewl`ymkG, >H]\t5zty>˂օ.9G "JXwH1_/`>,,{ ~(cˎ${&?R^,lu[[.Cɪ̕dpYwV)d!ۃ(Q^vF͍K|5g=_h_}ƪ*wGF P(֩PF{ 乖mfݴLC7?vF8ϙ}寋UH.IȳpVr, *3j1%|3乹›!^7C;L4Q}T dc,u4%AڒPvSd֞`r)ٔ?}]ƪ)O9r֒$&" ,R/VTGߪy @,%3SN >^BFi%Z)RU(Bm`n)JCA&%mNX 5Lڌˊd0ʚTQUVܑZRK7`[ Y0k-t4x%;+Ɣ̵fyQ.JM2lή55_d?ܔƝF//f?9H I(Ϡ*[N2#99p]ܻ̮n&Wt򄫧w_<ڮg pUgpknR7qKmK˷mfj9x#+$eUirG-)!ۃyB+E"4lA2r4YRo> #22nm#\BL/𸱍΃6Ֆ%"Q|1* _O;V9M-Ɍ;&No ^zIdJdj}ƪS21o%3ѝ/vRs*ġ;oHrFnRB"Fh3$$ɖݥr!';HkL=<Ԧ&c5M,Uac4䥯n:/sgd͢ԬΎ& wToj#(̽Jm!#5!E~VR#g ),_OΖ\GˬԜ1Q (Rp3/on ~\%J Լ=}[|`G;Wm9 A[ !<}0`ec>7)^%Rܥpѹ7+EIY*5y`\ M*eoie0,8#hD u΁8w[')ղ/y%Q({M̼ W$sO|`2,0-'_YǕ/; {b]OtAypB1v۸"D>O'DÅss F{ήd=ͩgvH`]նЮ6zܑ1=IYɰRdQ mŋxؑ*M "VH*PqP!]JCZp֐@[" #Tl8)ձԌ+*jmcOTӦ:fxsw4mZa ]ߍٲze-./>?|;Eg>\oޢ8c;ݿ󻇛XԚ._/ML+6^~+/W|ѣ Lz EHU7[zL~.&j@蟫ٳJ~׹.5(W#͐3NĺU;ukM[nD-njb̐3)N q=w&zI"n@jJ53[Bj(;Mʃy|#5<{6"y# j$QxQ"Sڡ< ,vb*i+LP^YӥM3iٌ-̰(ʊ< WnDc"q Y@RkDk"B5Y[VixkAph~pv x;6CqQ3a6D#vR{fߵ^(Ma=*CB lUe"JbKvXw<Tʮ!ȼQg$N\?w/OVc*{u=u|ݛŧ-bCOB7%+SӳW|pe8\ν~tN#8AX g_.ɶAZZLI\U3?pv 9uXq"~8sӿ8L)ЃN"GLhj>dlL N9Sy "U1'SG|\̌hxd(ϫP,a[3wZRS~_~A.EdO~Y|fOܺKV-z/__VyL?\l!/L'O߹{R>L^]$'ኰ8p&Y~{(P;IfFFzR\(9FwZQM! }J-fyj"aƔRe%+2tn]~ ~+ِA$LePpYLFʕVpyzNj8 ya{ v z6ɟ6扬T$7 @JS]8 g{_ ktB^i[t-Ylxch8Apl&w5Δ8rh ֛ۨ6+w_+hx33}/$'90kNvdbH;^̙, *^=ȕ>QAT9c?L!lځ(lU&NiϛoA0r( x9R=R>z>㽟fbǝd÷7O_*"t&(]FzSV>6 ; zL }oVk*kk\I™?xPjy{%{m{v04"Rl1RoqN843Ԭn%`a:_v{\l2 dBJ+(_vR`F,YYJ%;H$MIk5nOGZF{z;Tu\ٷb4McS^ ɾ!8uO'Lڧ(*7MhtgDK@&#9&rFFZUʉ?K=YőJc$}Uψ7"% ;UGHɒFFHS(T^̦ eP<^e@Be{_ʷ y~S9Fw ښsj!s1d@(=}AB : xi{=6]s"ZV oCUPMlĎDn1[PH4/u#st_ΉB@x|dXissybQM@`Bn6FAҾ)+]c')y&ȿ]Ģfd;I$Mm4os̪xŗqt]qts.PV$C(ˬ*Z(?E?\񦛻]9 ܙ6(QY*yAe)XN+*I/ ,`ml@d|`%9%HY4LՕd rhWXb%AXY̑q{H:x֮=>EuUJ@H1(WR!+`u3x:Ca3|,[Jiį\u! gkC7x1!j<΋z@!m b"o/#gBrYp UIUN~j{'Kzv-Ёޝ:ь7;7XGco1O?nYG3;XC}~_ϞM`5ll^6^w=>Np}9p͑ KkxՇHO ^ob g֌Ȇ~KM"SQmG}oFHο 1F.X8$% l g/$ Υҵ! 62?8qX{=FuC2U~Z Rej9oƛUXB8\~{!;Ձ>46Y;4e=јJFvb 8JQV& ]Ӱ'K)ZfWt{e퐧Oےg,sW(|Dq{9J{ë^DzKdxž5-=}z#j_idp_M9/2P/O`/X~L"PLOHxw|B*,>D5 QdAV}7=zo.Vj _'^>t + 3,S:c';mKyK_%%8Rx$HqcwHB0zqk"-ʞhQuZxtr 5琫 հx-IȾ4Mi߼MyvO".2Oo{yNߝUBiR%۽OQC?jۻD` /ߵw^gL>kz$,T-,oG9Mjt{yGnVG*oY䜍+vSP8o`d(^TUP T(kHP2)5F!E$?^8D 4@yT\_~D&0DT+u#hu#s( c-2%C"qɘoztLB!CNA\6V)W 2l8F&j E|sz2e Wsuu뛫a:` )jx+}\`W車wxUc{<^M-H'<5Di0 jg'L%'u1i0r"bNZ{JJF$B ڜvҪG;9[9.^ЀPK;9It^ɰLK,%By0׺A25b1g]7W'O[;*ҎNX Kk%;T*-%̶ EW4c$ai<"!w'a'fv*\Vj2 ^HRʕ 4-|Ĭ6@!&^rWί#|C >=nsw/-ۡ͡o 巳o?43 lUx9뻳.k Fxk?"lKhcYLL9beu7u1tuY-2y!bIL9cР*Z^UC)STMULhP1ݡͬg4MugF|,Qc Q̉" ocџfh-֡LOwL5t'[t8 j03xYβe@)/[捫tlK&Ul!EJVq3NW[s{` omo2tAe;32[irWoZAC33Z3EN׀y;W:.?Λ2 ޟ%\emەSɎ63#V6y;F}"2xoo!k-y}/K0yk4O^< |7ۭ8n\S;Yn>|amE O|kE. @ ؛":SXM/3Y|{;YvANxv@ygPҴg+ŅTxz}zagCDF6*<QY 91o{񦲪6  t54¤{>Y0u/ԊVK? lZ/$!EWpRyް\dOc,k#/>>%|!K%|r!aؔ&GE&lP`PAAa=ze؋} S4-)VQ@R%7-Gc, yT*Ű̆*HβI@e&oӋ΍\ kiYl=%lrr0tڍl mT n& %0~󦥶k Nv \4R`J, $݆`8C4$; `|{dT)*Wݓ}:k dH]  9xx[X54Sm{CFth?%s5M|/׌qАoz(OiCDZc6Y;Ac)i@}|wv?޾7m_.k^.ޚ1sJO6w!51aܝ<@`6FT:;%(>z:im_7r1_>f^xi߼9Ñ .YN,O8ᖃ褧Vp-:MNO~cT ,\5'tqr)<qs`e|-yz;3X &[4[z$\=(zmaq{n|o.ZRxո/E?E[a#SStQӽrqIEkh D2eIZ; ptg] ~&s+q;)&7nY&gc fҤɞ"K9`Rp6kn&;ޟn^ 6u*M},Q7U4Mު|:uԀ9[i MEDKNHpL~doA* [7,(b+%:c{ r5―WZzcTIyLkZ.;JnChd=%-YhzD7YOT2hLU,~2P*2;XsFpސUMм@{)օtt7@_uX ĎNtMrf;Ul{ PvfrXXZI9Gv KlN!2]-,')^Yd/CRت\\n av~)Y&! xlc6̡i6eY)l=ld*T(ZEXzJ8.ytC#_LwAE|9T̞M=([)$s@ʱ0dּ 8 4[LL~|KϪ8sSpwln 9IF},[*-]0Ԃ4 `e5/#Gj ﰞwZ;w ^DUdvLhݚwa\ݩvџw~z;R~ZYK9k)?gݔc2TxSERt C^Uѳͻ5R6?>f:Рc桃[,IkЯȔGI%ѥbSHB+̮Ci^;31IdT!-]τc_1EeXW"UtS$eֶ]γWdfOWA/FUrl&;l5X 9"# vFk<f!Ag+Pu@$g~a5Xl32N CS>u鈄mz)M٨E ccaEfAdxTt4*j+ 5t]NVbI\cŸXYv wgsu>l=qNѵ7yt mNjղY2dGl4 *RZ$6YI.( +363X!dz5>VMjV)^ 3+>]FK5d; ώ 9{lrHYWTNTڜ#F!{1rփM6;ymBr?{Ǎ={,~leqd$6r9SZ-#X7\ɛvA"UY»?({h"@o |: } PzA&Κkㆇ8T/0L^]n܋"U(EGcO4Khz{ʜ+*iyHM%ֱ @(X{ڻC`f 6?^!LG>> ɡ,%ɀV֒@ Tm5fnm)AڶY6^.Sm2Ipbc/#R0)~Tqg1)6\ r9cpltg-j4m ljͻL'_tpi)jϸ?Eڞw'_AhmMoǘ>j~]VP}]hʟ;ѼޱtUwptUNA]|1~\)2-gʊ &yøXqƛB('!:F5KYUGUs 3;g%2&v+e ;լJ6aTE(E̴Qjt˙u\\ަbH5_4nM&ҭQOJu =RhbIFO:=·*ѓ ~d)3!M]9i}l3sǣ\=^@Qg@LW~:Ch9#׿B_W^.ޅE͖.e{)הvݿE}sgUK{nV3i Un;?gF1ЭÓs0^5 (}dݯ % BRXugv޹˧imqi{lA*jOG^Iabj$2ZmGeVد:{ZM%}TJ M; mm :?,ce/Q6lcaNf>j6=fZ5ˆ4fzݻ ޽{Hk2&%ǨĖ|mJl91jYyS"] >oo6^{t !"aVBne%-oi" ~))3o=I[ 8uyVK<ո}pgkm 0b(~>E aODPnœU6Ǯ]T ]3B1oL͋~}r']1DBWeoP")-x꺔缘R@ҍ^8/E7%EF>(& zd[b؍AD>EAK:tCzzPp:Ĩ =(&p*}ُWR[bҽc8I=B/[gWE0;@gZK3%FANb4+˙g͉D=Z J׾K=iƉnh ]zϜE)lij 1 *GQn Fa$hB@j|DKd*E*0}4(sQ4#)0Ԍ zLˊ#KS#E[Zn;4wV7'1zɃIi(3 9_*Z0RYߗWE<.ޞU5+M$tLF*I!H%5!9$ V D{z\?j!gю[zLLVG lc{REO8" @oC+P. @8B$6A?I3}^zwstzN=UիX@<̞؄uC8gţGFfHDUHK0i#R,DL3i:N+T4*\T~u&k&"rn,N bm*؞iϮjNgF& \AP5U}* B1Af;>8P7D#mpŜQ tJP c}^6͉E2 HFWL5)k A?*8-!uX"E/"EVO-A*DcPPԒ(5UOkҺ6|~lrbd'O)oDPoֻ$6rXII 'iQ |H5JRc3_I'_tkqhDjfFM^KNL[rEh((AAW rH9YvNuk1(Ʌ}Gt3M[C֟{!9V[!ҭd|@VA Ծt;e45Č&ݪZ*ϜEx[u0oY3pQ6/C@3M빫fk_+^ ə`w6 ҂ ږ|&n=wJƇcVElFۨО}}Hn~zçf@ܹVSLސ?El*v܇eL')0}}f>w(PwLr3bNf3w#n)ݫw(dDLZ+nWOCQՇ=Ź=.1#:d&Ӑ.4PKY ޺bp4o,*0a=d/#M2ŦpnRAIP]M-.\8I}kIh Oq&&oI7$[%ESNp-jY>t]htC>sݛd#KᏠsu3ʰAWo?jv8pfb.ѻŪgs;@ehޜExCT Jj)mrFHkHlhԂ0IG_^<;u/~Ӏ_~\KNq}CIh;},YDt]F8[r:9ew9:אY;Xظ8 |7 |]n.xAkX>:ژD$FGU,&oVJ9 94y=ϯ"::~+#]WG A#|2p!>OCↁjwK-}E{J  )ƒ،꡵NP0C慓/MN˩ܡW@޽MځZ(ѹtl؎fBdG!b%EK4'6}FɉSk(W( מyjei'4v} b[QYV\_7XrKD@% {hm5(NVK Z+ZEc ߊYeR uI2x( H@704F%%4)4$Sۈ@+2 "W̭$Q!zh6NjPH_岬|.Nvzf$tASĉ#rw| dF t%zBBҴE`hVpҊM)`PdI6%8 u`"e: "uxY %6U(R/'"U.08R]XqS0璘C*{Xx6 pȂ"wIܬ؈\Ta)%Xbb=ᆜ)!5jM ra%ӊ D"'sz~ئO#R>hU>oހlr\Jnjo3 o2pֈjBU9|lenBW }.@?ߖ 'r؀K={dis 3M~Q_J=raJ-j]G^^S;_?Dl~Z(mOa SruT+RA#$$( N($uȡ8 zCQcTHkcv4O3^Np)^qŌ!Q V 3S^}F29PbZU>&\f-L9 ԈHʡB 3bkW /`\@#! ) 8(,(F4ić8Bd >w eC%C &eA\"T0q4I:fȡB m.hcEĤq2X2XFlL4!ѕ$eAtG-V~1`vgØ!2_@_`*0sڨU\`_^-"4(BK"gymYa&SNq9)N]U Q%){Z4.x!MS!5bS7Y V± n_~9޴B< Y[O  " ('a#Q>G/̲@+fQ2IŚA"Ou+cN[dZI`nA9*dC$lQ wJ *ڱ"]Ϛׁ53C8D^oԴRa"W #BHyj&Kj( BW\MIQÂ9`)% sR*`YR;}#{z &+|o  A B2 \݉U7!kAD+*6%{<E]qpH!b"$4ѠESZ{SqiJežGp"D{#8iTyPfcpD.8$,dŶX`$T W8YAJV1\DJgM%icMSb ͐$mŮiPw7S\*p+lg4Km4&dUToWHIs $AWvbeF`LG0,VD.J NK2&6/);nϴWSVd( ItmsCFRcFdgP^N!"H4mKktP-*h, ;)I%5 #(.@) '{.hG4,I#58\WV@.31 9I(K-NUo]>@J^| ٫V(jT5BKy NJBx:ŇS_|MWqʴRpi.x1BıJ' NJjp t!Ҡw):s)=k4뷬$җz"\ޤ67ww,o1(7b|5_>޼.]G ^&լG &7zYVWi`~3y*;0}w:sƌuyC(QV 9 Y T:&\@D/KRؔ`jU `kSrLpč@B@^Rv( t֧Qqv@Pg>Oy:9ej׍ 豥ʴOd*La iҀ6+HP{ ^9iP<Us#{ Iv5Go7pc9kQKۛ>?Pfo)3yҤE 6%p_FM*]8K7:ZǛB?T'(~jf_7bLm lZ*D+$~D: 黏'\Zt]PK&5 > g`呹u㡿@C'CS^]+;x1I$&j4}GY"HȭyV[t"H: |9}،f7[". n8xwIq/I.;Y=)AE-C-$lzgG/^p\};PpG~*Q'|I{le 6D55J/~>!ga+om}e)]S%b5yN(=0B@ރ)G MRNbuDPb4v4B^ҙ5Dk@oz!y}A_UdD(S@ D_{h,T2@P-Q v:^1^PuRmu.!ު7ê@i2(W&b &LfsRG"V*|0/lfհowI,!;ZN.z;$Q(W*)KDdo 26ChOFVd\8\Fϧ¦?'˛nqeX ͙̎_|k_4Хam,ee7o}҄.G𧂹cHM3.reH(GRaP_/xrbܫpW?!gw÷?C~p/3x0»\8Zw',O~Ђ~a9 nu5/iκs喁~o{_0ʂWP~^Z~lz`&'<-ۺ 4?<`SKDzw;xME5O_F{`qG &m'{/#w.읋; eeEng`WwͶ6Zzt}Y5隌C].Gu6+f%8B]jD4뚣yչs @UjB)]XtIooʯ.bݕAxr0/ .d}2S+ !mC ޅ콙>fslߠ0gQ6$=]w .w;|99~p#dDCd Zw*7g7w{![X9v'3|Q^j] q5q>SFը;# r C&8!ZԲsFT:nsxK9z3tRRSW:oYJ-UUyxox*笪ůMg'9m-}8v~Qc"Q#{۶n/9 1Cؖd;I3CBݬ!((ĊLάo5)ږ6Qe&8RiN0q}8"Tl,FẀ O>7_UީA:ڹ]+ >7CCVN0uWOf'2 :]DhVOqg`BT0(zc%HnPXѭkYBYg_T])d)h{!ܿ@JRsLfJij߮XHR䠖fhV B w#l[Syt<-_4]nE jP+.*54TL28e 'xj{3Ï4Lcs<~I!IAC9օrjb[}xsdlM2>tItUV,=ZKr Wm^= kY^[o e^R_p ),;$@->2pIhSo!۴v nup g *M[ N$c)-1YτLQ Bwɏ^ә=+hu&]}ž@[}(d|c凇 GcF4e$ G!iTpD/K&l7%a{`UlnGJ JfþHu°Jf?\%1;q`I[D?N>fh$ sb3gwߛFgym)@Tp0?p%X* vFxzuXn$P@44 [uap.I'wRAP'D)4$-\BHB By\W"i;b  θ,?w5n@Gre7܏wdx C?䔃j5Z " `+ CH#@e F2։Ƥq YJrcBš' fv \KW]2Z;O{p|bV9p+_jEN,^?SA7NSˣK|+2f؎VXIDFH9̌d ,"|Hf˱Δ A-^ V߅c߅pYwkFf""'r3Ԛ]>k&k&=Yik(5 kF{? HY"|"S'*Оݜw%+g?O90J)iAbKԜĀ4@)&Dݾ6 O;nH1T)V&nb'N׊i?ǐP8V.1qmtQZ%H )d/BX&HF)Z) R*bj͗H~-|%*h?X,En#vud3/zI~8JoC1kED!#2 ȗF]K{3Y K&T"!QGfR V FDI7VYhNaqZ*^_mY."Ѷ$՛оYxӻL&3F%+wtudưWg wievV]Mk m@wʺHiĹ" WC-!D7ʺes=f(2h1H F΅(loPqۭ}u *.psPqb.7P;=jT\!J]B@J%mySYwgcevd~MydD O:%*Ժ'rA5uyFix;pľ_BWZs0>vw&bR):{bqr9m/`|qIce"MMD1pAn.5s1D16)aAh LM%0cJƔʓ LH/I0-ϱ]ҭp3V嫭 [!sc2/,dBSD_nedr}p0jeIqhDY~ &brPXPҬR&xUŒ6`-OYmzyjX -iCF8c:w}n4hnq"yj4 5$ "$Ri"vd J3I,ܔ/tyH L6e?#yJ|.|'x|8"mb <;VJ@R%ZQ Dɴt &PBґjI/[߀P-/fXG^lq !A&dիжt;.n$(X 8d*Ry\!ՑXKZ #KIZO x{kf屧 %/I!:/)QJ4:H1hB 6B#U\Mnk93[#ۓ5f5ym%ƼJ'wy(XB#Ka$X "11N]㍚|V桤v6r}ó+K -maީ.dR}_+Q `vJ;&)<8d&qz]udž!Ǩqp8ӧ8dX0ڕq/ے!0\3eK|+uO:ݎՠw`;_ 0}y. "~VC/3"8l%GL]&5.BgUv/l._+ fH. j鳇k\fּԍѵ'Fr'vc‰J҈1#qF ~P=+V>o{XçR m޽2Dࡨ N L{/QYx*H&7 81bGv|*hζh#TpxADВt!<:ݚ.tuǓOf2 0pNfjWjw^ FY9cއSKv#:sP`Pn\t2 wj7ۏӍڧVוY& A-:HLD6J RB 5_D #҅2k Ȯ><]e6yzYm.(zAT{h1ܙ꺗x`4W{Kc!؁@ybJ"i׉_^Aw{P_ȮYqM eBi;N 6"d} & i;SVB$iHH8iwyH/A-ban_#]|+YPjp=, cx 0e]!2!.7 n[O< V#G+}`!.?M r +rD J=}ޘCOak޼B%)2+D}Pǻ hz,$ߪ;D3-!~H&̓K~7`nb?Zm:}0U%?[_WekUl?{&r7IsHq"51$\jtLҔS\96c~˷x~ 73|0b?C"7~Ӈ-@{?y'6Ȓs*Tb0O/~ -/Մ`&08-08YH[[ns{~!{;Ivs"'01GxgӋsg M1>f0{lٹ{Ρ3K7W>Pg2xgZx>=Jd37Fv2}SY͹.lsncFމba 1k ƣ#{+Wܥ5@:ܪmwq ôŸNBQcvȻQ ]w[IUcئfc};co"!}PN>xXIDn3>X3}nϩa6w;a}XY b9~_6h{7vt~~Jhڣ=2-)pa]I Ey%*`ځ!]zQ̍$"<.{Z)m v]HE;IXiַ 릏a =l{8*[R,xU:h'Q[Tˆvn`h-ڧPgRx`,N <ں"(K2 ۣ P);IR?rS}d;X7?yS_N?:G ewKiJaBIMfP}+Q Ue$&0i#I8ΛJ?XM__pTǛbƛWmGw7GnAH}Yk# M/uu1M>uU>I4BG$lj'F,6Hbh,W^}{ۏ=P\uRGttg9Lmi}sF}QKHTȠrLݗ,7[.W.@3Rնb+sGiW8x88?a{8M"ϒtkgwsĊ 7Dd@1*ktU-IJI5prWK^.NNlz?@[X $^&:f}7Y'igi{{~0sЁ?|axxRN`@Sٙӑ!XwCM@bN0SjdmD(['5B >5Ѵ#ӥC9zSxa_o>?iWnJ?T@rµaI!5Oiu8Xqѕ0PMz㘢ObN’$/X88Mwq1ulm%,&)g:"ad'>hl X,|Y3~`C4gS;$OkАgZLo};}?ZxOg>Նxo>r.ӄ J\ kћdݍŃ+T'39E.3gQ狶+u^;_yCQLZGXd&[b*I{S")C-`@;бp| dE Mm~U3y%]TbK"( mCXlDuVӈ(k&BIHAZG1586J zMOwx+ 8pG8:RG^X0%؝3*R5yB0N%}PG~uf! C6@5 X* Nt 4!N@KFԉ;ul04,e tJ0KР.Fq$4B*4i [C !J I2KWi{4CЩ HXf"܎dfg`{ܸ9y.+Y]OΦ~hW?*>O|yRxB \H )ˆ.`k'A 52aCNaL"8 $/P mE8hĽ&D\\x7 bxρ ]xI0&v68S&.fIxri *AonJ[I\dlǶO;4;s;cjD(G *yVVƊۈGFXrwÒb)0q]Jcj㇏mͩD2+*XT6ݾb5Ť>y]E{׫lޗݿU;2  ˃Ww༄*TƁG."IsHa bͰǬ U%1k&ףO%k9ic.+gJ-X8Eoμ×Ϳh{7w~j 1ڲ%v}uҲ3MsV{m}ɷU}PQ3Zm37=„ܐa)RH8KnvrI&D,J w0 h _x߼7I6]w_9gW^*DT?[A@*A vpE8IwOE19(<}V޵3F.TM [qW6ʲLbل#!1 Ĥ1N!cIH""YlR8 ACXs!I=:Ú9٣Zy2~̔zMa[0h$k )2PBT&a$ $)!< pHTCM6  'REj+9{);cH݀n.mMvY?zCqοD2ޥW~U禣/N{nF 2p7 >wn(EFJK B2kyF%y,k$5'*.ԘXQ@ ÀZ0qh!ƙpFsikX ؎h Z6 1a.$ f#-ڸ{cu[pMPVΪ;>o1,/dOE TJAX&UW RS)1'%8̢삳[zS;I/7iԖܰ~\z;yXѦ!EDZ^嵠LtOE%F*Ѕ{y4W.Lg|v4GSt} HD&y=0_=? NìKnuB:zU<彝eLh4B!zX"Tu2=KE`&A qB>07&>[Vľd&![ˇC/b|``y1PAo nt2=Sh/- !NOKԈgqt3?l]g%|ߎ!!$n6Sp<` @0?3Y z*<IҪ3*@5;xZB]N(soM>gmabMDNCIb=nR3W7HSK:w嚘,Sn]owsκnVg]1%$Jeq1f#-Fz-o(h'a|O|V0;Sϒ k ˣS"Lp_!F}"0N\1G pX"oJSJhaTŘz 3 8 {o,.㯠.x!*>`Zc,ldu 2wUhׁ5dmß ̝ )kJl3JV7dg:VPFQ6O% Ѻk,4峥Y|M-uZdN+9a f1G .P6jYRR$aO%"_N.f~R(3Zd5ȿ.C6f+3ZĖ%*nKHg#~8F ۻ4X#!RFZ7gr+[`;4^`3L"L8EBJ1/̪ʔG u{7 gJnbԕJSZJtR[$w։"C-S(nQX&QHk- tjmZ\;Ihmm ֙jp5d9ls5et—.n<:}NTy d8>drUq6uQ=JF4UzG63T7PU$&:GFk~0Q\(N|Kkɯ(kF~+sF8niTQ :q)דy9u#Q"h%b9R`g@؂a鎰\nPE߉«2Bt.=߱7ˆRBqHV( bi@̚\p)1L5z9ƂsqGb<8$LPs9"rw+ [.b)PT7Y^ާpC_ %q΃EN+pNC7B{ H$iz~q+Q5bBQ1(֝\mPP[bX#ƭV ʼnF#21`M`*'IʷYVmPctfːٝu+ ' .(i"FN*XhIwʠdMJW$pP"1A, +€ xu!e>@8pt\ XhB8BSS) b4_Au@vJ)FE)T]xh9sW̜22BDJ@!B$0'O B-S!\/#,ḭQFB%h +D ^A4a\KT1fs-S2 1bzz$ßx2B7a!m4 oj H6IboHcf-F5/( ߩډQmT}fC>Yǧw̆зl_a&[?/w\1)`u`Ϭ\D(Wo}LnoQM3b5nJ>+k,\F>B?{& Sܣ1O]qY`ƛvI/WTMaֽib{)Os#%VXjM`U)9 ac f4;Ԅtд~޼)]M&.@?a޶ VѺK4fc\b^ƽ!o͏o×8:#vs &>dQ1^2m79Fʆ\]"?j7|EG~9moG|dT=;qT(#AH|%"9 5e4T/=9&4qZn~bc#.{BxVG:&OaFEuw~#aUKk/Wt?na3"`U~j?]׼P˴ӺeiIGH)XviEtXcH/B{5b/<"^^нD,R❟=?i\GX#2'0Z>g1: Ă'%K1-}r"BHd,NKAגH2觭 KI)eyUDIsqR0^FU?/<,^ށ|iUe.A)G3&4 A&CZagWMOFYٷ1|iOA57;`y/(RRˏތ>=SXP4RXI?mQ]-22݃RÀ:W[?g|̾e'߭S*R.R\9}AH,/bXg?,Juq|& ֤ȭns1 @t'l>D!0|(y 0>d b0hU`09zJd 6c qgyW~:{kWR_tu6`mZk1*ͫN?J.UKZXW-ᥤmUZؕFfL:/)< A!?Dl ch[ ׍a* u g(P]/o|B=[kkں3lV.9{w"VZ>X0cZCZ%e-ۨԣ쨺Su %aQبS\ݩ[)q'eۆ`C"os{'}[-2Mv,YxsW}R))e,Tٌ)2-(slD*mOY?䚊UzQwA#*nHٝQ..gkjCFvxHp/i՝?|w70Ɗ^O롡X5{=h-HK`%ux`vM;Ear w{CkWY<bwXN#?$gOwoWG31? *봤*r'+N{ްUDX+'wJEDlX854L VNZ*E(L>UC*+F$C"om7p'wNݺ}3'EpȄΖL1wK4'zKp8Rڙl|S fA91H WΞvlȥHBR=͋ F4qn"a^ŭ/Jinc pG YFŀ,H0# ʍBrxvAjkt-ut()ꈨ#my8$kEpB?Z+g& ȩZo=(L=DKW5j1~g 4 yG.'9?<Y &cseH뙡_c&yUl=I+'<)) >8f'2ż:T̵}ynǷ)Ŵѿf9EoD嬴]KhNQn)fVO1q*gUT=uAuPqKEk܆|*Sif R4q[%yS12@SQg9acVqiCCqmSTixB!!c2!x4>gȴQ\nN[,x6pA10Ox5r`{}Nq8ԨCpe:UCIo <233sfi4kKtvMR}Gc[^~ =(Lug%=QS"Zf`egbb+8Oė^W3&V^5FduF #:&c1D#o3PmAVEg!ˉF8MJ)ar Z:$nbQc^|oԐ RuWzu{{G{(my+%"͔Ogw7uڽMwem{+a)VEZ z}dĴ+UHKHr ~9--sBO.;-BUB:}R? .fxnN(ܟDa~(/h2-],{vagi'~Z6C[>c'on~c8!TX~}MqpShH1;P&-ȓ6kQx-Z$֌HI*Z\qpS1Ht|kXVEkL9K7+$#yaEY:qQ$ k nѩuQcu+Ïbt}DP9ߤp$LRXߖn'g'˫ ӸEKׇFYILzQFcA\4&(#zW}R,Eƪ9$"F?T GD'e xH iwzH ՗.+`@DPy?R (nNoH4EO;rHFD6ܠY,'E+7x8{[kV||S,QJ2d!XN448 mnUr& %a&8YtƳ`y!}5l?Ͽ8y L b OKTH7,72r/~{:U'ɝ<+ S sYNgƳ⅄?Ơۛóg.>ι4%ϣo?ixcH PD=@Ge(`|/!1ȋ^ CP-bm*]kS?L%B9LKG#Ҍ.J!e/LnT_9xܧ{o, 4.<{ǃv-,`忌ܯf<+cR=EK}w~ߚUoeljg#?5XgZ5~nuL.e~DfeJ6nkJ[y"{x.!-=Fz uq6\ں6ж9aK{==uiQ)hY2x4yc]غQжI<ĵxUEP!eĺ ҍbBieL:l6 N!9冞e%kЗof*N o|w@l5N'_dƳDk"UvLgce_,'>MS(xziVqFqsߓP/^QجQa/~(;oMa ,xN0'35Pw)g â>Ehro?&aw~p6߻Oas8?𳽛]N,PlN_{ia3NWC=RBj?G.]PE7秕|r LXĨNgn] bYʌZ:4䍫h%5,,d*֘aܓʦzeS̷raqW c}I]w}T`~f7^jK^MgÐdEQΣPUwG5ŭUDΒ(<א ݷ(DeQ[H۱-Hō璝B9HW"-xw!JT+R:l(5.huqJ\Ė;oP>^AY#X\XC*Us٪\.k]Ǟ==PII{5= #(jDy)zLK#X6mLå r -ptB54hS]7T9;.S26*1ʪ>aLͭ6 y*ZIXCMbVթ"֭pS)gYڑ֭ y*ZIڷ\Q}|uCܓW@55> =J(ާL-]^ tDdj,',q^[4]k'q֥"k j{jbBi$ڧ l)D&1u׫B]HDT]S4ʽb-OH)T8$븆sF%.`pc?M.:|56O|<{ZR R!Y dNP"8Kt6>t=q;Ӫڸ",pzJh +ؔ7Q#EF4>48IsHIKk A)s'n}dsگEW 6o̔(Eu vNscbJҳ,/fohY}`ՆżzwU'f|HF"4/xkC1? owGt4Ζۇx<:;O03@4Lg?-~n mD ="a&7g?PI1Bc ~}GoJCvTACB7+p)Olk;Êx5CЖzABmw>x_ޘ[s TXO̟9~S PD7&:;q3*Ƚ7 c'SV݇ϟ~p7VA(QTgGǪVb^U3qu1wyꐫC5$A.a(n _qh`"91)ð6!]:1Br-`Z5k rȗ6. Q"؄p?З=1E`[޽jN[\ ۛ,h sŮ~ ^jY,\ߒT%-/՞P%=G`m^I;ud(WNŪ@Pz8_|fY`Q+q`7K@2ސ6~ M) sx⑱,VQk!AECPbf07އF ^L,o8'?|XUiQ3k냵8T?X]+Ì#KhyndA]\_F7?X1hݶ y)dQZ[5O9Z `gVoL' \ny$Q$ol*jH O @1b$U|HEk$%o?E0$Z0,1 oyF!nj4yhH «ѐ>JPf~GKl9({XGfL5h+ʆL0^?{Wܶ>{>R/8^TeRisM '٭R(@HJ,q{z7$n]FI܆=19pAVF m.y*bϷbcތ\vU;e(=P6_->ew/YEsa7RCt[o}u[nBƱ 3$:ӂ[4o_;LAƟ0Qrm {H3^`t^0Q)1I~_ 0Մ|L,n7iׂyJ{Nk43xTWO{ӡ }QgaBDPvO%EYoo|l#blڔk~5`"N;;k/ˎ Hͺ+xQqdexa='szf嵮$kBqW i@dȉbXrw-HB<7R^`lĤuπW Qvˆ/ުj&sr9 =Kd5YjE`˭kJǹp5HupR";=K[ .oci:a;vG6WwC!byAXf& ; CHs"Q11+pߦsޠ,Gx곤HR%^\4ĨF4R+b,+hfh< VhjEqaphMl#f(ٸ~l'9ELX(X!i6dh-3TsE&yo9 bN( bNT8Lx 0A|^Vws~B/Go;I#"&!*LApA`HLs)U hR!У+r#EAo7/ Ql\at`h4cX.0Fzl56_^KváӽVIFm-8u"xGZYw**ABrJup)"8qaGGռRPu%'b°W5?O$,oP}*O:v3%IpuWuc"7էۅY~J~XVH|8_4M2ۗ|sѥ _lq] ǧi\Fm|/BQ*=&U](eQ. _?은z6yiJ&KIkzfŠ ~19! 19㨃S̢sf^]p3SBJדxIUo1,ufv^Nbe1M'a^֟Yܺ"0y34K?z'7U 8# pU)~?ٹ?/r߱ d V@&WM! [Jf;R3S;jc) :34+Ԑn ;CHwc 0 'Rɡo[/pqQU6n,"`{)[pTe v$WH??Vs.) GA`%fRӘGU3hϑŧSLG3DC+RTiN2,N[Zj+.M3FaR۲Q|ƕv^9R=l$.wf6@RQכkMHH2vL:|YNOv$5hY+u~sYM_'=Ҩ,2&=eo 5ǖP$Ƅ=wM=0g$AHcC&D-6c@.8j ra-l|&erTq,\ @wFp=̐߀Z;oX( 0|C k޳7prC$Z ycT3  oIőؙ S6I#y_Ą4iG *d `y5DAQls(֊eI??)`IF`+CPR#+M@:2jD{TJPxzp`hW#v^#|Xà݁O,$A 5:]ly :8@ُ HELu)/ڭ?Rvk&4 j9 `O ΠNu2(Ay]5(OќkB* =OJW0VY&/r2eY>_rzsЏ3sBC4>hЈ*9 b l8 ZRu4 ;=)G3 c7 U+*szW?{6+)S4M 9MR n6Ko @9ڬ#1L)QF̈́# !m<1"5_FHJ#f|3]1C0Q_PWihﭠ0jn6;M6#l4aS wSlЃ{$r98-O[C+vHy)oN\dA r/pxrw1G+6Y8cl31̰|!c4@Z坓pDN*B+=2GyneA5_<8IœIh5td:i܊ J8䲰 Gdbn9B~19 2qQK L KJ9\ ΣG&hK$y,Rt->KADe?!3^Mt\m2*| BߕV33Îߙoi}sh^$˛?)L!0߫ɯf>o;=T)쁣0 0^?a`^օ贫mO}شfrjl^}H%c~4OEozu!0 c: 01KEɨNu/k#C02yg9TJalV \(Z䴦5r$Fv*BXhHla 8H<6P#L= )Ti+"QN%RpFC$4Y!{(!_R{H hAyhUbS_дchLUZ-UvŠ侣v<ŌÇj.$,)V/>JP=Ӥt˞'t8Jg \"E낄٘ b+r\  Pq(cťB)S&рʀ)|)BsNGbG8Jc,Hۀ{ PҬ.Lg/Y64Ք>]z 2X:U"X`Gtpq*!({#d`dX(c@H(00a@Q$Q< iU4Ia>U (T`Rk{0j/\XO\je v;w?WT2hƕ 5@*6 FsfLWi(h &Jm,݉zi.$맰\V|{^ݮ/ULqS5|\z7v}|^"g$fx$&)DURg:|H&&E;*Z4yFE-@D%(~6YX6&ydtU GB:ڮg6~0ee泙ٮތbn1gQ͐>C.Q%k7_/%kr}1)V1o3>ZC룍i_//>l̬|m^a>"LjTNwuZ^0',c5 J?c!uz11d`~9bn$J:R]C8/תʃW^Vf#FNUVQ[Qr?4'*QBB'ؤѲ&W.LmӐH*QO1tﱲC{5|Gx'%SRtc V5X@ FYFXnW,ʘ9rZ" ^Del&3ո/ `RF``Z aal"8C5IY\d*)4p)hDJ,DNÄj$H+a6Ck|$?ɗ\Y-!T{HXe):_|2E)]J,ARmY^0<&>{Nj<}8r$`kGIfjHYn(3J|F ї_Ffa﷜U!|W"gTn 86uto<ŠKNy~BBwN&D$8p Z oM9n/\pEB4^ ~"q1^VOjS%DXx}$JMW'^Y_[cNł95I?BaS $D6t^hg C1l#'ۢ8!ylnFy-zc6Rf®T"%S?I1p>,ORJ(-SF9Tk%DMJm'  N*nBMFBH|#\{ȕm~Z+C:ĝHx*E>LK-3`=\N4)aVI T>\txT0MbQujn:z٭'_[$9!^ ƚ5AݻrJ jDݔ* ˥|5PZehz?-1\~kݪf"}J[ {6L'H$+FGoz :G9 SeHQy*J@GaʼnIuҷ!@vi(8 ċ%jB;Bt.^Oʔ# N:#LtJrqxpzj+zKO`JvಂugOi{99L)ǔt DQ먡^m虜JkFwOΔ`G䛱.Uσ.sfyxtmJ>Wo܎I}.i$/{I!Qt (pkXOc臈wYM$f(9nW`"h&=h{?)oBZbc1ڎ $ &5:{/ٷ(F] 62z*Lc:_trj*X8bǨjMI~mꐐ.Q2żT󬰻awuawAĎQewkmIļ=||Yݭ y"#SVS>1 -!;F턶>4W ꐐ.12=I -!;F>)n/6,\Vp)B;wۢ"/^>VtGM_Ŵ{{@8{/ ~WJ^ ܥbNs% kz;X _o'Իg'a;Nw%JNN:W Nu%`NvBo'Ի(?C;s'vB+Axz;WtպdjjY SOG~y|E-#/$`@N"tJ p{zdeaB | ^Y׹@ZP\5:w/&v:QŅqgjՍ$)ޟH?[|4 ]*Sakbg ͝jY"YT *WW;8_8= @6qj[aۅiƼKN4_k_;umŴEC-@( [mѸ(鼰$O?8dvn]\?t']Lnm#L\ mֳS1k7,Y@ƝˆLV88vÇrsE O8I3TP {<𱜆H4=YփE7Msn2BA3FY.Z7^ <Ǵ91¯}x\ ƻSV@˅>9M9Z}{`Jx:6x~uwKԋ(f)K .4%fooh= >D9E-9 Tsp'kwpBe݂&mVsJhq, ~<ɞ@%{HDk$~]mt Aox}&Uj[lUɳ&yu* B^uG[*|fNO'6,K,O_Daà8=6uZ7v/>F *޽IA:5Zi^]kr٘zSm:[V ygDZz2 *ز_}d7ËG")bN|MS+1Ts̛z:O)ý0ثS{~p,΀qB@\HIr}]H+ {1JkFz;}ZY{cjn( w6/vr I%Hƚ@ќoXFEfۀCЇ<n > _Wx2np `s7ven5[pStoP[F941uXVS,Yn^i@'GO[H[ ZGj8oAh!,59bY‘@Ȝ$e 2cH^$E~S7@Z \\ ;]o:0 R53zYc̭vI%YDcS7MfJvg,Ԯ@PyxLǣ`_>")Gfve &Bv؟2~gz鏔PHSWQ !;s2 icnjP yg]z. /͛)GЉ~S:'|Cқma#a>z*֞?ϲSAA^3"{E ˲:i[Kkv0>_9-骪1 ,g6I 0I-ApjeO'5ҋ." c&/ 5O E&Fф,MIys,CP%ɇn!o8bZ< xwjn2࿗q΍j.ZB~s^A3spYmK/MRn;|oSnͽv.7]*IrbWeΰ$MM,,7ٵ .Šj&Sy (LJ_n>WEkkRW[/z;,7 mz(%ٵ sf&~}wP4L+ f^\v4G(3;Z_G9r%QZH0 ,C2DFKAF$\)ռ*g?Z,-SH\g$Fc0@sk\ ,Į@ ֖:Bp.vk¹Gk?|,]ŵKvi~ i*cH(Igz8_!JuГ 61g%6rl,TdTfÑfus\6+dpa,;T@ fqQV>m_J}3 V83lKE UϜbgv"DlWP6.fWci#yZ"U_}&CZSґ q,Bp`]&F4HLi1`2(*?vW^%OG]G"o쬷2xΣ!̑"qB|?b/,ERAIqȽ Y@Tj! c{ y1͘!b`q GP eq ڣx**RY,qN $u}=%Aq͆rwDFh)N\}p0w~ Ju(E߬x&HIK)iDR,;pF\ u~8??a'iy2*0~8V%phNB(M:D`4Qi4FO&Lj#/-&QJ:rǭu'[G1'(`4qincjilE-QE)_\,0eT@UvbI_F =ЈLb g:xp JrA~Hru*O~| ˛h'W>DtyS{MFclb}Hiݼ1D!^bdU>tq~y;>ǧOư< \^Fk=F`<GkAq) TWٱW /+8T Bq1(ǣHzpedRuhPȏv0<#UTLx*MPt!sF0)ʰ. XcS$0sXF=%G3T;`k17K#(,d%2rOS~> ` zQNtc&-cVڀ8(nk'@ A3"~Y\N;y33-hbR]V,93'`-`j;P$@ƒ!䉘IuKWy'7(Gk޹|䓒6RY|W|ϟ,k1daR]h>H b+-qM] %Ɲ!D;&7~\ ]S!0b׀=W.,ȀϷ0[TUv'8L~utazyrL*'-[qmd FlEx׌[kxw"+K$L.b#=< =o$Hm TKZţsWч)TRҧt2+|Xfy 쯮}񾙃 _'.|0/o-Qx@YlDUsa%jHڠvXѻnJ*"#?L)*.r4>}KZ=P#C=0o4,SSuv%O]g? Ȕ:xVcE1Hx%xHf}!'8%8,Q;ycet1\^ bТT0y*M4rZa(PXYD$އ Q [/*H!daZ凲e Ƚ LZOJ1ǶRqL߮N,%#3!BhS)h@"ZIa"=0 ͼVx}C]_orJ.fY9h0JwAfٍ[zJ^=!`]N=؋"P&o1H |)R;=Oza^||1 ]?BK~̍]cۀUlâDҍqOr"BXuYVb!kZY󿟻J]QjMu-UTJat@iL_.}ֵ\ .RΧ-pq.< /N|l!Ihg˼Q*x{@adh#XI)qJIS3zNЧ۴wY+>8[f6x;}rvN춎X uQ}֝ff4'=5$٨٦˂VJ |=اg=֜1S*fYDhsВ[+N埤3K3"*M"]HC SP2G(CJUNMPO58x(*G`Vk^9=zqzߝx{%=AU!*`DYxGqy$OnY)0HJpTP0n *H`/}4BRCS '6Fxp2|N[744j}jE f=&aVmKyAUmtPU%P{rTe^>OZӣ(ў+*ujϤXiBNJǁPJ;ưWz|p$8ӑJmW"sX+1L1Xa}ce K$($؍Q) 3ÍQ2"4HHC tKZ.$iccF=kZ/XArDž1v9 hCU.WZ=-Fygœrn-G&!8l,nS$DZ vc# cRq+qe1 ED"NT F04JmhRr}YVDm@L{q9ZaڹwX"Ίz3sQB ? 0p`k"5۱TTI-@ȆfD NJFA0ew]FW(VwxηKfyۙښaـ["):v YW$ʂ /oi ן i]ҹMn+MSdogROY ׼cȐÆ3'W`:_daj}H+QgHýok[8l>s_y'_Yf^r~5`!}*},ɑONdu7tL82|j }ݽ9^}2T2,革fK[i;$Qul k=, wJ6'i{N6ހ-%Cbk{m}5|0$7U<;}BJ{HFÔr) {H -Ax<abH']\s_%z=#B[ٝbBqkL9w .e9uJt޼8 `R ۛKsVqW3;Ng5ݻ^hF)(53;q檘Y"lon.??@S96,*ׂ-U L/tmHcEt56Wcm=uDyjZۖiQKM[=bD>Zzd #=+*{Wn1ںQ+7PcSE%V  ]7 lh#nt[Jؚ/Y iȼY?gmϭ Z_:~FP^?& m`t g"8csęb݂ :_ | 7nY޺Zȅú壭3?];n4}w Ѣu'1opY8,+VnL·! .x%ZZП ZFq*Z.e;cO}=v(tA\kSp<]zFT3L BCS[)YKK[Vƚ(ZUH[)BNE D-iH>Dхt~Nx's594s)T3/ ig4R X9l%q H&h|\]gH+km8$i|0(p@ZULWViFs((x=$2hv R$[A2ᔴ q0(A ΉrP* hJ; x=.&>%'#>xp%u3 HZ/:h^s(Z& Yw{7/ |?%8HNƏ7%8ryj́AH(ףe^6f//մ`Ӈ bC쪎x͂;~^ |"pl#zhr6[!_SB~ܲnu f䶑bN7fݲ;k!_=~L kG.BO#)rJ\9 }h<",pyPsuB>ߡiN H,<̒&{b42SFo+6B'ʠHtfd+c@+F4;!cQ̇t 1ׯTc(1d"GCZrz9~Fs̞hmH9Qrh&E7LMIZjqޕz5 Ր L&EJƒTD3Lhԓ{؟N{ʣL?{N_ɵ'\Ln=GX_F ;VD Ƿ.ej@JNy5Ah1oIeS.eY9 *Xe-3Rpƅ)(4#b+1I|eKh2A[cbS:G,yRPC*RFNKèD$SlцKg `"(Syc+pb=rlLY!<%+*9,rTRakK>zBF-hr -lއqh3_+SybHo[XABI/:&L!s*PuD̕5fny^RNC5ꗛR $ 9bÒFJ!-ԠS+SBm|fPmu Z\ bNn)묠Zfwڑ֭ѣWX)[y}̸d`!ITjǺпff~7<^0%_Q:ϧ7`?dORϋ8U_8U_57 ےTɊ[c `7hì&,2Vbտzཚ`~qeÐ{ﺌ]"p 9WeQLJ_/X!*- [XS?,j^ĴqsQjx\f*a}uS~' :*ѽy⣙\\ۋya=z1o;BlJev>.Pcް44Bo+/t䯆˜?h09X(Jte8sGmxvn/OͥǛjeN Ep+ϯ'@AVLW ʝtoVol8ӣqu,r&EJ٫IUJ3(.!F/d0~X/S\F4sc(xww4'BQۣۗsjWĽK9 %[ÆG~N5#SK'Q;!6zp *bſm~up$U.uBklyQ 箘4`ԧr8JTwǧ"c"mk]~)qKѽјHW &fݭǷ f MSq0m]o2ӡՖy=!e R3 =m ray46($Pܑ16]zGSDB`H3"dܖGs ˸ޜ2}uH+]LR: CHzu%Iݲ>QhއZa]MP!!DE=ݻ.wV2L֤ll\q * eW[aO]YPs6Y]Z?uSZ.Ay k84'dіV8\*wtOPt5n2]b";Qm)G`lZA.%ȌjܒS=\ bfrH!A: :79Ѭ1Ոuٲn 6u f䶑bNNu-jXukc|C4cLi@#+3ƅn?xÞ+$5ApzѠϐ[FV#7fR EH5j0XvqN -БLc  wX`uwFrC@&uXuoD|eЀ'Qޟ-1rle 1M_VϹ:(WR۬+&LnzIkz:Љ匶e*, U n4+Rz~1l+aGL\i1(UZE CGWqci+&(Ӎyag#+Jg†kOC-( liC ZHI F{,qt++TrX*A~a;ĕQ `R.7ִQǧC[>/V,]}fl|Ɍbõ(68' E,,7`B) E*A7v}Y͔F7ID9ekws6tJBCpd> N|4\ I$a3FI;E/C[KB""hJJ'=%ז,bdz;lrIfcݐ<2sũjh\6a+\"C#~1jb޸nu4ãeXF{~~6 ؈Bxbc*S޷k"4J$ ם%CV2<ڒHӖ i^hKimIN~s2>hK=Ӗ$xi7x=]MYEm\MӐB?V}hVӽ+p-$h59 +I숕$EFA(V>.+BJI[p8dTHVCRa6LC&Ey\0N)ptP3;82ǝb"d< z;GtAX_(pOGjkuLl^ p=reK0׳C.Sxɬ|7TP IJ~__~{R.r3-LWA+Ny}v8\j/OϊIHF?s Vh>A޵q#"圽<`='F$4o @8.OQhZVVb$cC*b+~~Mt~~`ȸ{|RjvښǛTJЩ"CøRy5@#<,NE!5[2䀨%@dolL! `?3M)X@ ,xg> {6ZaD+ڰo(xς RmI7T47\6R>^=o$= b^=?BB^zԍUxP2>v:~mr<ձʡ>&+$z@ 6f`9T]LKɃ;FT]'BIxoT]mH7.Y2%h7v EtrhNihn[hLI6Ts-"L;z~.aJuuoH{v-uH0N7tV9n̍Űlc/oƓ[آwWM̀HqiWW6"،GȦVQL xs LĪ.JB5DB)/3O6+6 0*P!uVε+r\hBvJMe j7XA-.( ѠILȤq,?AbiҢ5|>653D0j[K0q[ab1| s/"g"^ 7vYP8cH(sb 0NBbH1q'jwWeSՠD#@KAwXLpq=(/ELTaT m}?̧ pƍZT%qe, &^Z0Ea# D nnWX/2r<#;o`fѠnjܕ#Xf%_bA+z sYC@@n@6fS \>eKlQye]lsq)c Fol>E6VBnj h\S "l\2-T1mGikesw /J0 p/J-wlu9='2&#  9s ôJZ60OhA=`,&V& ykFvdh$AWA$+mnB?zӹX!]q7&䂥ӍSx,e n0O e5-8!#kEWE&+-L04Iu-->ʕ!=_ƈ_ ϝ]8brT,*Ƽ,ec% k%`@aDO0OLNƄg/vQhg-6Rmʟ/UoQ f,4Nqi#;?%" 1Xiky .!Xz ۊ1.˃6Ed`%(E`cd^7;$J@-KQecqYɆZ(p]I}o9`nw~>*gw˧ aG% v 2"Q% w ovɀVHTY d4vT¥Z`GZݭT q> Ō &$N͐9P?~r/OlYz(jgOĚH= K҃aM\lq.'7̾A2Fy7ڣ3j:8sNnn(}lV>1`)ý{d2voݣJ#<|JR0 DK F:%Jݾr<INw#و)<_qt칪$ftGꝟQu1liyWkk7l] nU ŰOjs{ X.aˡ3 B!2.EKB/pedNP;`8;YGD=-DKɶSGK2$ ۍêeឦp }c4,%CPoG$CbVM-κmcBSvv HNwc~)&-܌()CZ)8Tfoeh L@0N8Q"Ev܅hi]gL jX8&J~I\M nJs՗(ӕݡ2]`~ɀz8QNކK…Pa_ 5iҼ Vjʅt͕"$&,Le w`v+?o,$ͅ3k(+ )!ޗT% @a `LKTڠ]ԗVBgX̵q*.?,ZbfAr`sʦJYR?ӲH@vό jk\@DH͔⒖ SA`΁T\w$`Zvl{(jIx7Y :x'0I7E"V YKߛN DWr[?wY+Z`EC{t+4eEa=X=cCL+9{X0TQPhIx\0I!1n2 7v*Oé K#doQ|=&Yp̤T*F@5Qr8jwѤ^4s7; 5ؼ3WkT%U )ѫ1BcF?WA0܎&&K)}݋R02}YS}Mc9=R\}y< J0P/ Xm.P$},@up74By37B~:&xkMwaFe-//wU?z+[|;ƒ8s7)?nf\F'EZZ\{,-5awܜ)`]^qdORzXO\UsnGBq͑)ZIq&+F=q;" x YY0Fqw1DZ( jV nhbZ}X\&B?^4KĀr:}؉|yT2d4鯯|;#]LI 㼘ҧr:"^#>EqF^Դ-&/f0~BOj &ןKqshrRsBk]&GIM2%mTx/;{$•$&CnL!h\ RD'w&턷Jp{nMnmH7.Y2Uͧ3H] b s1H1hF({3Q!!߸fT|؎v-v;aRfO4U!!߸-Ss@ҰDբv_ө&aU ۭ$,`:zeO'v{9Wٷ&/̗h8%.iAIZ t^M}kk_ױuP"(A!5ե'[M"J)-VAhII^dd8w _/K%XF63ؖ2_E?h`JuBz!PKʐFJib<(6pI$KbSX$ R#w,h-Ljƕ nJ@r?0 3ȥ^z9z"rS׿|(ZE]}C&WI#.{r»eS\ )9vͪU7"nVEuJ#=F{c)Cbx9zbOs/woOrA6x{B ᫱/6M?G[upn歖1߭^K;Eɧ|n)=M'N_QQtTa0;ϳ94* \20,' c;z>-)IbM#t`{F!Y+ .XB ɌT&8  (N"ϔ%кքaJUs`vpq"budKM ;wZ f(SB!u%3ICnu`Q Ĉ,#&\ KQ;1Ou~~ O~bޝ܎vNr떔їe,iQz ԧG{fQKWZQYPa5砢D9O7^u '#fe+JvoNi/RAK(TTs(b T+, 0R1 dSly\@ *P'XfTw8s(qb G_ ;l=d0`,YEl[փ)E @{/Q5T*w g - 3 К yFXM4v})9X" C%Ni xs)ig< =et* CNi`.5tQyjݟ$Q%v[f@zb|շd(^1棘QW>rhɵd/kZ'oZ C`dR80@kM꙱9p(BҬ%T<~ !A哠IwdfXrHV{5OɊ7]\# sW >xM`4:vܙ7q廔F;0zJfg@F -[{BnW5ǔI#G,ssc4L<#s)$ &BP7fLչ,ԄZc2Er(T]qso-~Yn JjƜ kD8Y4*$tc9%~ UW;vcFʄow8tv@c÷Аo\EtJг[B0A6_e;g1N,FS(c;UA`8xiАo\E D_p:yrwa>B(4?&5*W+qk׽A0^MG~d*Ί #?#?ޅG|*<;AQ T0*2 hSdHy2LaC`w03鹱`䙐bȧa$ B$9.Qj[RR  ossp=O~p?٭k)2u2˫Cpdh=dy"MH<=4b(r jqFUv P23=(FR AōCE(n nsW1N r??ĩr%"W;)A_E%!&.gTr੅!vZ$G3WerI#[1ASL\)IAm.'p(@PVD\}0_w^ Pjfr,C(%BAF鲉*#AUغZ3@jrK!0Fk&r?U 3񺙹^0(К+G}@5cwNDdžȐ a{?\ _q׍{ zNYN;?ES.aLNbL /# Z@ Xln xFG|̇P xʝ$n@:YX9̅!%2&.dBqHA/xA@'fkDtWyn[(zgj:ƳHZT,>Jqx7R39cX|9E^4WK/-v nݼ]~LGGw$ڸABg7}w$LguSY})z.E{w& ~Itg9‚ /)?t㮇„HAq[5}w)wݭ89NK(>8!W A8x{&pӟb(2Y[\"_Ne wn XZz((p8}^K)/6'/ ,r2;( xMZ靯S}.[X`;7|I8>Oؒ&l(6tEsjy@-tk30c J*50˩̈*8)J:Ρ2II}%xpgwISG?6 OͤbmPh3ﳓ?x7׬8ཙW8ygݝy-[#`ڛU"E+$wQ&ntہ L9|w{mw4MzqR+SbQW7rA*51-!<j^B.ap.sTF 5Bp:|UFuB᧌3F\]Np>IeHh=>k2VTur5'KA4ZYl=h箻`Kfݭ[gq]R/qu-K)[v\Gi2 aʛI j>k["Rq,m"AmS ri?M'q[)qqZnpܦҞ;o- 1$(} ӗCD Hbr4i4zaI[Znkm\7KWVk^6 Np}Syt.xWa{]AKA7h9/'&%2rNud fFdHP2C$P0& LYr7_`ACP!3k@T+%upԜ^ny8tS.e̚6^]DqUuHv+N 'I^G~X)hdrUn6u?:ntwҰ]h]wN2v= ;(nO`ƬglHJ,UDzMAݓf HwK]Sj,|2!,휜Eci a `(η#'G{'IgxhSonIBY:$HŸy'~pzY'oju uKjg[R3F_l8pV{%hP_)V u]$ICmQɣ^ݹ]{<[^nOy^+d n73mo["~@Wt.+=E.}nVvF1ky7\?VNϩg*5i7I:*N"xۺ~ZT bT'2֭}cdY'֭ U4I鿍ٲnѭZT bT'2֭7lSZn"[hNaOXHA΀[WuCCqSagȎ3Yof2_M#fsF!͹;Y65snЃkJ/'8( 1p9FXʰ_LHz9 C͚-^NC 9j"b7ꃙ;e#{AbC1Ą׌sp!D݀_6G@/j e,%IZlB!uqp?(1VD^*8#'/6AH"]tx \=)Y{32ʻ==ƓEÞ/P2Q>ɫ0{uTbW$)5,fbHWVaEJ1gX!QQqL5H+)h`EPYw84` k)xgܐCH@dsŸΤPX#j!ʘZα e0ii4-x$rC2 R8d"3PL+e{# 1_$'"ʰ@Sh.VՏw3ـ4dOw]]]]]Sz#1`T&ԘH-<&HP>Sm bU;[(t눨bG"$}<]]_ؚчqa\h,6 b)PRBRG:^W~JKǨ[xq@g21VQg ӎG'ids<,|CQkSV#)5~1.Qjͥ+ɼ / 8A` HI3K.$bOV=K۠=2rAueA"c0¤VX+nq-3ZƨMRV~Uk@ⓑ U~"Bmefvxdl-dԙdcDֺUX # <~>;rDسƣ#*g*ovZ$F0sE)+29V|F(,i+9u"ws$Bg>;<ΒrZ^_Yݞ,dzaGq(ًD)-N޻S=.6Yq_[tfj~8nuo~q?=8wU^Yrq5bpέ>fԣig;μ0tҸ 5B 7+{5ppmA08蜘IW|؀Z i-8e5[#3CD`͛<JcoS7& n^wA'=]ٽS8{I38utk8XŘq1EZjT$ak b#s)MlW#%{$b7Nĩ{TK'hZ %i`^rO)!cZ]𦽽[ӵ֕ dx Ԋ~6PIw'l-F\CR wBHKrT<-;}Xܿ d?b&BP>S~{tTJ=bI,tEjs>E !nAݟnoa@% 8I$|_}|;邸~8 Qq\ﭣ>C ASlj{zWR\_o s2G ^U*'Ԓ QZig8`;AABrJ5a TK9'7 Oa&6ςCP_nB-IQR_&-穮%b_f)<M?]O}~(USO Xo˼3ֻlb4Si٣`ؒovN]˄Mޗ1ƨ?nPJOdA3DJYT(xb˚vhWYe,z\J%W&~<77TS.(ǥE xT`q"$؟[ѲNJflH&P_nB =nۻ/ץ?ǫeUvd^H;;w9E ."haE V3Xc/vd7L}9d?pΕQW1u(]_TKڕ_L YNjo~r0V&VWjuUقV:߄A.stzn3*I1e/ڃ/jgwߪܠƊ%gI5Z*_圶j9b<8cZgOmr_O;.=zw6=oY\%WO"BG(uT[h#~ °T÷p|Ee>ܿx*,`y  ɭTr)-k=ν,uxU6뀝L-!lR*L-²ض1g'^׈59')HEC1-ru&NbY6=pV]W}:FPd_$H?CRKĝ*襝Eb= .Gs O5qRȴw!6ZHņS=I!Kı2AGp&mXcD*m*eQc#&i4*HLP! % A8"u$r<ˉW. XeדumI)[$$=W#.0ԗ[PKY#k4SJ阤BXڎ-|(X{/䙕VC^ϑĔꅞZu$תbmXb"<6]$g >pXPcY8G0Vd8?| F'sjr4Y(U}6eٷ|xưBǎ,ᬫ:)q50vPu^dP7hJ ӹakOD~q$fW9~T.`܂UWr;7OԞ5mtr(왓x坳 }& -jfPp6 YX]5^?#zNIx91#1gx!B~;o5͊&8]\!0gF}-3^¨b5R*@iQyoH7\2PP "(3T4D+%Rz묈cEq? $0d镍P UhM"a^Ñp J6Hj"q;)b1h^RqhYd^yxe&)L+OE-eyB`XsL&p96XguDbz( O/&fs)>vyϿ*|5F{D;t_Q~6 TalA" ,`ijіScNL .0d?K8E+u͢Kb&* R%/w T9c0" FbQ ٞ/2i1'I9LmoBi| "f( JPYcY>lNUWa=7ZbT5© zBPTĀ\ɀj͔Qx"-\rPS*D!i Mfi rAf (H0Q S &ȠPty`RB+h<.!jcI)0V2j`͆-uek8F@Gi `U3)VK %&ԙ~UFgxo]< fv4hNȿxD?җ=9YwM$~'r دyy4F*wfF #X檱Q wM‡l qOׯ^6xX4&:(e M"OC%](U#Rk$+%p$>F!iT妄ۛ񘥜uP=f) ޗZPIװ٨|:Du'WvEy7KQI#g6K6 MWKɧ;z>ӛ~0]8DF<[gk羷z$흳"Pt^W!IuDMo 9jM[\ 2h5>q̥Z\쮐E LkJQg.7^ KTK\>^-\Kh{zܻIWjz3߻_Z<>LDm%d4+|?Zɖ]cU\u߅iXCT '뼞|$&Kuxpg{a3fCUX9;HB~"J(:sz[JSj4kn޵[H\ցELarvӕ:a'nmi#:mרNyPZ[LօELq(:*3ÞDg"SBg5i43Ԗ%4 A{4+ 3',2W` yByJHQZ)#7)#dCJ3;fQOp 62 %W: HU?|WW 5$9R{yÊ]?#5xs |dd,״`ʇJl:L=6!yk<& g,u71vjFc粩Mg&YܞcD, i Ɣ*w  L<+,Y( {$j~BQN+,.Yٳ)B8`>YFX"ZR Z/}hJcQ> UObUpgm$X0bVG.Q֟wǩz Dϊe2BB;-p+/ Gl8=|7By9ke 8jhݦEdF.o攄@pf4#pkl$w$ے J y>s^FЬ` .A(dM-#kH4eeE= aAX-uڷQ΅iXJ!ÊWsSB/,[3>4N,HEVcViKebU1"$4%Շ2HB,X'!x5dȏ_ɬ֖d%{{{Mi"TqHb&BD.ZD}b!E&]L:T k;u~t:wA݃3N.zGxq,3ǟFI$s_U ^yhl b%TdcHUZ(!ɘkSM^?4 ~FZߢVӫѣQElq$R%@ $!T&I L C_.-sJ.y U5)hqībr7,nq]El/֌jMzGJKpQ+!G%84L$RM_p,s-tP zsڪ)ȹZuV`02si.bTbc&!Um BC=7So~ɗ R\ͦ?_0 R[e&p&w uͰ5øGդEH^эE%}m%E0cQxWqȰN,EZ>c=p\`[v%OWٚnLDbH&2Eci:άR#%)Ů<Kbo[E~\E~xE@;skX-"U"hm0Tl! HgRI+(eOkD\SecR2&*'T;x9(l9a 6B11+J%jpQ E$dzzvy@^>NF&<셅4$414Xk眉WHZgLh'>ƒSV%0B^?kOE=J W>VzPN^6O#@vWP(%RVlhb\H'P!5DdDbPcX\;m2c{ꯪRHbZZ:i9N0odqE|R $n-ث0H?ΊP򎲗*J˨p'kM.xiJ*Zɂk%H=NR%BdHECm>Jk,Š,ߐʅt0 刟XBB؜3ǟ/n@ )]RfoXa~bq$Xܑ`^7{ɏӂSѡŌYb8t#$%.fF$&Z.0m 괲DXka}:V1TuP,Oͭj_Vjn-ɜvTZMpf_ ܖ2:j%xpTTӐt$Y䋥^ !`! aoYJǙqErw~RN)xCO{-Ͼ-"(B UE&X**T%뫲!y8}(y!}TI*aYk4H*Eb6 Lsq$|Kw3ݶ^2^o_=scV;*q/;q/Sft6k2A߀p4g:b|YHIHа{ĭ (&*X%3QLF`3'li iς0A7^|'O{xtR04+F_?N|Չ6׳,Bqotӳ_]>wMuq3 6F < px(ᛀXH3 ~ v0*g>3#?߿y}|sxrfO_ӎzYz?7χλ M\gVo.oxNm[Χ~<ﶚqޔþA3;dW6i9.8vx?8| R~c#0hm"D6]a|;>\ャ坃?'uW_/awG/ׇ<:> eħS=ϳV'֭NUF_fro/~}5+VY,d忑ۗ?<~(8||u5G7J'Ŧ^xV:5pIeA=)g.c~1b7O[vo(95 -CSPYoV h3}{Y*=GfɆ}/{gTN%t_G&' }zd2Ƒls,{ܞ:YGz;RF0u2P|g{z(N﴿ˇ:߀>75?o_SGݻ/5[gƏ5t[^V^?JMײo]n6޺zz;w.ϲ>_O>Ձٺa蜧-jzFîw>V?E?u^_4ϛ4~͘}s'sm^.BUtCF/̧au5 F=8ۚk&o M^OS[@$o]<n2K[˺EgH?C3v#Wz7aV_7nD64_ƭӱә^kewasym [7i.^&;Tw~~U`F>R?]ZZMv'wv˫^}ݼ5/-b!xg#L]ę1:D)N'xH/:_̀(7F|-xg7 }([$jBXݳOw+)ˢ[]=D#QZ%D,dz"k:?} nA)V}h$o~vێ @>5SR̹H@ "n_5^5F,k7J)HIׅ@]a \t+D봥:˖@*?Yr~^Qi{ZEXeW(AxD@P6E)\*B1}0]QOl)zeE:jVFWɉM7ֹMunSs&gėz0DVtjS4Sx:li Ou܄sfN{z iO#̧5f>}2̧:|`si`L?1O/.DSI"Q '* F,5X(v(+ UHQb1uN;M*$sʐI(2 3,pHyPd.Yju̺|fg^K[\V(U{hkm}*i*2lkOm=>6='*ZuLg;gɴε-nO_{p̷Zŭ_θ͝ǚs'ڛdnεGw]:GqmƉtWD2gCʥ 5f"I"ɰE.y;gW(#1-HʧKaa<,_\<&7XA^&#@[OodH'P#nqJmMbX|[EzJigzZ>F+ehLQ1NJb%NZ#zvGJi >}5 I %sylcDXkayIm=<浖+SQvv_”t]t*Ɂ0^3Gũx9T)ngزg8ѯ_np7zژ_zrCo 4濶B|{K 05r9͌ro&܅QJFX7d`R+qC$иDN`Rd~W"@f_7Ff8et5-> o@@SJ% ; ηA@$al,A2o7vŶyc)RИ7͏lA,5MYI}-2.U%%% :]Ըͪf)p Vff"R󽜹\ ۠ Wɓ̱H D#1 ̝EsK%bcMJ*0 Ɖ% ȶ&6AᙾOsLIˆ M$b-J4WR;bKQg,aG9Z [lXy@b~ E#vj/~B"p*0hֱ*F_}aAJ4ѓ9@KIwbX`\cD0@ ul#H)$xPpM!1H3"#*vT9#A HL0bd'҆pPDx5톃^( iv BX-Cc&4" F$t/ em(m:=#eqQB9a?d#4pc戍%Si.< ];6Q phj΀A$z$YYn6lid܇6~oKκV,%y(1/ϐ6fxI")^CQ.yk#%Ԉ0&0 LvH5B.aլS=킯{:QJAuxsCH %8ֆ%1ĉ`(f)X%Rv!>xL̕7X,=O8i@ |J< ;ZИurú]~Ŧ4]AǝMER &0K J _Ȅ[0x6 :|X+8֙'qA6'x8xB]y(-ihk8/ :؃1aJS IL4ɵ,c,1!58Sk6db̜NEQ´pbs2 ­ÇnM^ڰ)yJ3J;% ČRb@$tӯipSn!$F'>ES_?S9S`)mi $"=SWZZfu܀WGT nSVb7` Onhsн̛tN- KA Z(Q /1c|҆"-_+{7sMZ`rC9Qw0WF`|P!pM%ӆ9e;\Yȣq½' l9PsaD Y#D9A9GK5x3iMLtcElO 0DH=Yq(ąHc F) H 6}c")( Q`R8'H 8MM{4;?J׉w`^lƈ25*c-X(Io45X(Jsh`cĘtg%,̭撨) `6\2V#FumԵ?ƃ%:dLiW4fp̽nQdA!vKڟʑn_&F}y¤8Xs F} %3Bsch)sVjx RrA\ L[pةDӚp"! 5:9pC\ 㹲)5C#E\l Z`l[xgo{ % Y? cˬR\O?gԟȦ?~IZPR e ޻y*0U<4ZҟrPuCk"Crs kѵFBQ5Z9n^X "Y'kwlJ_%F&3IJƉ`bjJS\Ŋ)uJ42|(gՂ]1j\Sk-nt1Bcfz;D1Y""L`pC Up۶`7wJ9RaFbP0"miq,V;U_'\0ޮU>+ԚZNJK!Qi7Vi=9#)I*S} Jgcˆ(t,[3.λ7WYOEʧr7"u?*$oc@,#b% *`&yxq nnH( KX8;8͌Pf0+׷$ Uϒ3jg2312#m {BA"Eڀ W5ΘH|<嵐rEhIӉ/h'5hَ5ԙXjnyG76Ϻym?I>g&1gzLE%˻厏¼N=.[?_n (e2@^v;FZCz™Hxm+"86 dȾl%oZW7;*`NpbUe܇:b)}l3 J}l5:5+a[ kZoaXʷ`F<~vsa|} u^| \<[\Jyr[&ħ<_ eħ͐ r XM]?v-OXETyLZ/4j<.H L)8Sʍ25m ]ZpB6E{~/{Լ{y[@!{t=&;hjMZiYI*lnH.Z.}gm٪)O]47fǒb@v> isZ>ڜٰ`(wкa2lnE}ƕ/{o+ءLju)PHHܕ8kOJܼ4Y|khi[>TNqA>y:De $_T+UjˣNxh79fȻE7{Egde6M^) w%ýnV {dp/]e6ēL jte8R߹;X*tPpqNX0i ymheuHrWk?<<ȗrl}_r/aؼ@֠5,BeJpiBDyb̍YBUls1OV>U/ D^o7mt)LVl^d茭[Uafɡ !0tC$\$vܽT)$XI8p6'Rnr0w\, %V(TJKeE2%m}@)ugϓ̀y_<-1xz:M};$4^DV&DRoԻѹ2 %pU4OyTovu[FЋzyx; .--n9!:7yT{p0:~ CPIk]'{}6Bw03X Pӣۨ^s:)5F vf('?<~x }~:|>>I$Ț'zR>U4e]%mqܞX/~|sS/3 =ITIW\GGћ/~ytIt:꺬x=dK1Rkau2>^,!ܗzz ev"aCB.jH\R?BwJzѹm?}Y ٔVZ m(DP'>嚼vEw*-Pל/e] oHc>~CHplTxinFv;+ur-.g })C7X|2וo~{K;qPvހC2\}9l]@)g]%S/?/{ߖO E7[ꍝt>/bL?rPvR ><ή'!5YZM*,}󮐁hvΗ ;! fȾ7J7ճެwGj7˟7L0(/ B?em$T{<53ոf=S I/)j+gy_K~ ?ְn7?}̱eV.zs. ;}1~9 ~>W^tTOgzT3}9x/vG߅Wq7FM\˽6 >L nWMTx}z>Y?~ټ}^bpxTo>Lv-,>_],NװvS<tJK_48P/p*tf;/sbqXO v S/L"VI0>Y%V>_Muy#9haLrkV+&?ɔz[EAS^G~/M tQkMIb&R"ï(MMn/z^48c)k8^T.6tGq4k?Fa2 BU@JY^\) !(l`A(PtੌDKVxY0j3êPtg|VC|vv6'ʡnw轶p $QOyc81ߪpx"{WȠ5~ ?C"-wh!p>k'!Pkջ dc1>5~sma;?FB~co1s]4&C$)wSQJcƵ"6o1\;fs'ŭ +zxI.Z_CFQyb<1uEI=ox{qGB_.ARUwU? Ý [A?eɐl%~ջjif1@PlOWu_}vusN@lQeZQIyJ1u(8zP4CVxzC'3>a){ո±Xr ߉OJuMM+_}1ڟ4hM?m3ᎉ{hdȮ`=~) #f%&L,83&(NEȠ1Yc}ZpYa S}(O Xke J>0S[KZsQ *jU9Ar5HriO?d)&{BSmQ%#v6p䇬0y~l, pZ^>go/.V YM|pV< {߻^.ۮ}*WY|^7?_龪uxB* iB* iB* iB* iB* iBz,| b:OaKh!@Eу*h}֐)tVf de3u CҶbEQQm&4Gi0 ^ۉ62UmDil?HZ4CVx O^'GQYV8QrR0Z~~Qrw;d?8E<$:^1;!UFEpIVQ#dP{@HQn\telb&҆h;ݵԯ$O]~LXe%U% \IljE buo pr jZEosAE"T]0L'ВBìD/RX~ <1 lroM#X?`)gARE8άޢ+Db!7Ax)N-Sw=;w7*Q&G;HseMLxq;_wיߍ۰Es˓ްc{JA$7. *]2-{bl?/.z^!4Heij713y=%ߠ<ʘ{ET!Zio7Bz=1}XV/ߜ^5 jPX)E ZLfJu.ET[i 60zDb}Zəec ҵT+7q֍k $tΓI|e>3^W=֨Ш!nt tq XPmb(Ԓ8l+4h%5t:zZ.OdeZ|aH^HݝHNYaFZ50Ŭ v64bfZVc? v:$k$ƒ=a5N KflE|)X66Y)/rZjj("H!c:]}[byv/K8TgeYNeWI[4_Kb'qf/O83(33&F83>v,Dp߂x|њJ(S"e6-F5 7¦v>“~9b`E jCEa"o蛻[uwn}ϯf yz̠͒ٷc* /Y~߹wwn{޹wwo{q[x^ܹwwʖ\gI7zqyz{Q.ف%U/QaiQ{$=ܹE+z-XTLOYF.kQ +XQ^ƌT_/6y UłC>>x^w۞e'BHl( 0#߁9/ߜwFI"o%Vtw7o_H6'w?\3f`4w٢u:2`]Y똴w('Q3p$ִq*YCy6')pѩ$I^dٴ)޷",_iNd [3B@ijrHEހs&[fq#ty=?ջ21/,\ R]cS;wq by |Jw?^јlqԄo7ǩo$_#?VUq9BtM"_JJ̱ *j6+q^ %j jt _pRW|cʯDJ>kYv aV> ʻC @{TěWЛ& ^8( l!VX%s+;*E +vl՟˞394B I ކ,DSɱn)|7gTFaTUST .* -;r4lεi8ϭщZq !2R#Q@u}Nh}Uw/V\<#6lՏl9!ݫ9_$bǦM3 SF< L=NM8ƀ Y/.3b5v =9Fr(kRԠ(k4aT3M2g_Ү߯JIZ ђ͵XI4^.z! /Fnfy>LȆt"֣~,rShCr)q,ube_֦jbJbÌ.yB[gqvo~yxLlr^]7?Q,#'Xј>]) *5_r>6eք%Ѕ RDrpBt5h5?!852}B;ӂ"lqb.zILX|jrʠ-3th5D`-,umHQ*50xx YE"9RްJ}x:͊-MNƗV]M bECh]j"aNɀ/,v^WJW}1Ьc%8KݧbQMecZ"h]1i¶ǩfR7׏s(37QH[f𿯲igfQ*&liPVoNٻoYڇ+.4_j~OS^j 썒ewXRX3렂ڿޟ{6 p~ =p4Do4 I[D} esF9nHYT*\S]&4{Gw=YO?Y۰8(3-5FNi_jMc,]\fp&qw%!T49<Æ84,T}4sjU<`Jϓz|Kվp@6mj~6??;JP f,OLf{#3w )<:vO*L.?^ڡFxed{Zw3S4w`QÇÇ|z|w84Qmw{fgw8mxv2 u[*2螦c+6{!}yoȧ'= {xOBzL^ je?"m"n7.չ k_͠wo㩈+Ls(YW P߯3o.*Ы[L#ohK5LbwE)fJe-޹=VAVP#jOr.Y]k5@N.ѹS AxQ 4\Kп>!؉qmgУm8.,s>?!B9R؈=Vfk/> %9Qd>~cq<}oCG9m,r#x {%/di19ؾzfufWUvg3)A #1;%Nf).$/xwip D:{ώRw9ZBRjͷOD.9]g7rbzFukHA 2{Hrr\|4(kl:7 d&wDƜI;捋)(il\g]B"L&HCȀ[nw;5))ϔgJ3/0rL9~s\]#L#S֟KSe;e2>}3e2܇1RU2ϾJKZC2k#zT#omBM^ɋ>yEM؍wO~ɏ>f?f~!{t5tO+ݕ~`KVf[o{_6'wz;}g}П+̓CmG}`o0y'QW&5dOjR?S&/0[HrTkUoOtR=cLN&+ VƂ@xV9 8`獁12/rN fx.ڑ K2.ApyZ;oR-z,)b`dJ oCLnsNurhhIB EA# 9lf%q%S!gAh-֣INd;Թ HY'Y ]`ZF3U$VxqD='FjBywMx! -xa5F\jC$8!(:JTyR?Mx!Q u0ͭ /Fq3JS[*@z<;M)^\D4*1?r[Zၘ(jQCbkQ$D .7 ~T6PbQ#{X$)fOO^YrD ¥)6 2WNR̴BŦEȁX-2䄾X 2"Wl!"WlF!/b 9\^ +sŶ~rrO%.2垄= e(JeXeȉ RGrr+ ǵc2vnEF,y|l7J$($oǵ#mF >I_HqɁzpK77`hI\,g5Ppj;p kt^ZRfKlNM[p{=x-*N0BBgM[%GrpΐWH蹳twy3s\Ax{eK%)%pNp&:Tٔ88.Q Rv0z܍P4jZ04> ~bZF-~z@SȁyM,-@S:pz׷4B*pWyb2J+j,8iȪn!K[A>K$ A竫D5N 6XѢ[]_t.D= 1hv<[/[Ɋ?=!}V_~_(~g}-YZ~}aW%e6||?Hi ǐWRm/l۝''{F}rLXK 1F#iigG\p͖׍hL)u U, B{jYB1WN/۪QI4rdE,"}]Rt`Ml~vI s]ePM,2-h 8&~^`aG95>"IB 0'"eCL9 ECbGv I4SL)&B2+,TUOQPSTgWX,֏w~-+BpɱEgYNnʛbC̴Em 5z=Bm5B]Poa#C  _l5ŇC3.6j=&ŽZ<[]Nj.h44Y ~b$ A+{`I+CprM ]|?r&'[xع!hܛOJ,` Mݱ0ORhM8hut4+dA6 QOP,IW`6<(2+e.dc2ʻR&E ::9!y)c=QQ|C|Zv)ȭ{ G)dMp|7|"&JCY lJyO* Uso<=~ !.BIBr4V:c$؜9j-K*du{H]'ĐZXw$.er:EԌo1pPIE1A:FUaϡtD)#>^dEN_t{#+d U`"nh*T@ hIߪCh_T`L~0aBF/սsyI;iU'>/Gt'\iq^#rz7x9-\UnE):%idzb> 8a:OV)WM]] 77bh͈~7{#WV[Mo} 1[$̜M|6Oķ6>\P8. F!Bŕ֕ WL,%fW6lD<#<0qv<R*6PpsK'{ń0Z q˨Eo Đ^%-BP D" " :&k nhl]!$3*9E85~NtB0:VD(qe\>M <!'FB݊ձF:j=Lp 6}1h.=^ v'NFe?P;eKj, Hi Rd7](4%qofeѣ8XЎS&TaG%E9pL{V>d6Kxx<]6輑cF j EjlJxХֺ+[XjU뿴Ӱw?C3nZ"Mn?3QJI=VI Z s';MBv:TZmMٚ cs̫#o-}W R=2,al8˜d|F؛wاƜXΡ`A#%MUι@C3UͷYw2םfĹI@]NnXlfiŶ5 R^g{r%yo^x7J|ns)as;d׼xG ¢$u`Meh85vUGIgşS]>|%xS{?|a5oM>?\]L]͇rbt̒]{3T osNlA,,%-t oĭm|P ~sv̾p.V5x>?g1HeUEĹ3)WRpjhc~}צ_Jm*\(';N[}q:^eZʎV﨡yhteHK`ih 6F A~}7Ϝjk͵njdGe)Dv{ԨU;flUr4WэYS+#{Cnr-*\?ZclS/17Tld:mK\mݸ[n6m~ʹq^Fgk[b6=l)bdF:բomgkkqE::Plå8nj*>;zFW3.jF[n;naFS7N9D)̷(ݹ&WpjDf[^Q0An\=@Oֹc}_gڍM=PAI3Q?LU ^Gaub9,fR`t I4S\ôn8n> {-1W[dcpqݙ#Kh4 n``'OŽǖ5%`~H{tvSoTWt8ja)5cm(eq0Ɣ]V6oF,3Bd#bayv5o%^Vq3y \J$ 8^+QULs6ԐnWi-KO=̏fCYY: s+ޯ{l8y'k{g+5?e-^VhW?bD8qT!&|3-nߥ訙쐯42⍒q <=>xJ[>l+:wKJ+EqIJjX%v| 0Wfy-K/mc^3V7; gѲxDgN;:KYGl(-{DorH^9ZYV^7<..vx[vmW\w WR>69Wi6ur̝}b+WɫX93^ݔU$%HwΚQ^:w^RtCxIV>B`q&]yW+/`Ճ Ҋ!mHCy#N3CG<7rHB0(BDG1ǴYc4#zP0Ҫ_: pqqVd톓8#(~ )jn4^n76fllҧmhllGux>_3"'J0t-xf4 )0A`EH-iAg<`L$ZDHl*@+<` ,3$BAIǕI,"9 ^2AP X(\{yf 6Mb&ke-/F12iR\G3 b:!2鈲B$hM)LTՊ3nȄ+ݾ~5#紺KEh V%|(|S+%Y媌Ϯ{z' ȗK%. ZMKńN;+μTpn:jo)I b_* ٦FR1E`R1Qd!Kc4zd>8wDHdV*Rǹ(V$8jHP!ȥ*>3xڌ("N"/QclF0GgD+i="lHӞљX3EB9JDp¤ѵjm\qSò,X2(asӖ1(C 2lbEp!)uX[*fފ!A6>sHKʳL0E\ ufyCD C1Rd HyH-ȝA3`` 61,(@s̐Equ B*8 =V I Gm6#?.Ä6ՃeAV]kFD:r kA!5YP( D NQ!R$„N(H:tF(f? P`\Y20ThޒXɭ@@2L-NL ޸xUD17 R֔CҭHop&lh20(c[^jb Re]z-R%#F~y18B+R@~Tqxk37ȼ5[\2@x0€fp]azG,xCi9 '@  <zrAW~ & 3188)X,.UGrL]aΠ,0LI+ 7A\MPPEAC_PV0Šp=b ) ̽NL&J[kLBMCjMd5Ѭx?ϡIeNHQ͆_فF"f}~ :E7l@ ,ôqeifW tD!pxۍR] =|8|f!Kb2J10(XY㤧XI Y=A{!F'*YVRdktBT=4||Y-FEAkH3y2g4"a`VL)ӒϐV=otIx"*P4Z3 b( y@ZxtfFW|+% YGA<1CR0d`ź r~2cETȥ#DhX-Uj)nj@ǙEx"K6dưA;^{ڛ37RmHEmEZNx~qbF `i{qn!g0̱Sp JR.9<τ܍X"}؃G?iRޠ)a@"Q# PX)Kach^@WN j$G$J)Z}(f߳K!x}6?1G@Ʉm?O@ ԏ +g|$&"SQ- h-o%EPApLUVPJYXnwILU8jR~Rz'Z4U=S<+N*0=}+9asBV-A>[U2-+a1Q$aXNvsưTrkglAA'k8[Z0ZcQ$?=鍊niS*ւ! BFQYnY`ȑ(nIr:Di]2VZlZ$a< ᇸz\C<|Ѧ#6 FVRL%#m}8Džo]~;qCm 磞Qo1e1˳r]m8LY?f8Bup}ĩ>X+ 0) H=0ں J@Hhk4K=-=xߞ2Iݺvr^Qm׮h;Cΰ;*ba 4b;шj=1 ZI Xj2eda.`XƯLy68JROŅуCq'^;vD*w$>VnMDz>+lAGi%(s69ƛɠmaʟ.| ~YZ@*Kң$IGID1I$"XEC2AB4s{[>lol}ܬxQn b9j*a7KQ+i/y{1anV>9҃|uLv曝ZbFq&&<5͏縶XAbxrқ:`g,s܃ {3 1T"U7#?/n1@o/_$w?O1뇨d<ד";>k<5rċ&]v/$O3iIZoD)vv7{/z Ss?}|sr^~]krp_\47#9^O30#4(fZ 3]! 抢g1)9ߘBl&؄ MƳ)zustlD'4{߻?x0\S7w>BT}:qeĉ7UI [ٳX$/l} ˉuhd*7RZa0OZd} ^ E_6~}sd;o"<5d㍓k' '?;XkE ݍoMw"-F'o;wy' gƷW%u';vM%:o\vltůSu x?[E;4|1}}po; bD-#ōXO@UXryR^tjH7_+JTOWk#Sw{G/??ZJ|.4B~o[F+\ ӊm4cQ~]ہhu귻꽯kC,/F.~!SHBHocy*G"nLeV3+Ob?1"' ^/Vz劎~KyX%å>TFg*eҊT;BeFxisK!,WqǮeb▽?qggNZdzz>zv=߳`ыŽ^(k:t'i_gv0g>؛}xZ=\۽&sbP}t>:].iVD֘jIR&N5c!eD[=I{-3):qFʬ%yWwbO1z(wkݪP4ӯwfD9<]qxm\Zj # eK [mI,k$X)^<Ǹ30 {68Vٸu1%nIpw!0.X~uA!pES6w /)sfOJ'-'좛*HB "&[-\W1r7[9,m9^\/E49Ld>huw nPLNBQG[IM9u)$n5RI$mrtJ\d$՘L(Ȳo5Rk򽢧4۫PcjEJb5L&PM~CގjO}NbIB 9j]\UsaGfMSZȶg5M 8(MR@NJ sLi(3F,s:0OF٩7E6){feBNϦ./Gy2r1M/m|\f.`^@|cƜ?[qRN.;{g#)oW+@IRA$v>~%Z)AdHo>ۃI{oTQOYEQ|Fe'dt=;|չo{QӝšH6>B39&#P!*s: er,mmBJ $Ƅt6nR͘fZY"Mi J0c-=PO3c&4I g6=+@4@Q!uZ \k+09F5Hεk{{э/{6r,O;fLc頏24" eEG=U)m;.GXyQV J-7K؁-tIz ƍ ^7A@ {Shw\¼&-WPYjdjkѷoY44g]4vI(FU*3Yg*Oh3z20]HBFHqduYFA 0[FˆkwL' Th+%D?-K^0GSWF) 1,-QBS O98t:z-0iذ%FCN>I_$9w] O{dqkQ4:7L-Dk8hG(#+U0d(JHSrBYD Z9˄ftB }~|y]fycշQl sO;=RygFD[.XY6 7ioɎ$k/C7h@$YK75sɴ8݄˳I5؊$#A+~n:<`5=0Γ C8DCEC67cn"}`gL5zCE۔̖R+f$%'/Qo߭Gv0/[@vO_1HZ?w|ytt<"3EռHMihJ %tbX5Ӳd9Y=#-V+q QJ+'˿\D9~1f(*3/t!_8S 1rioV-EHRN;ngL*3t+4Wu!_8X%~c)]%PZpoEՔ.aSY!EG 1r2*pNiHlZDSMqm=<Ye9cH4 S+CqkQ nwpo_,k}% &$W|v=96G~҈_R*&)EG4UmWmͫS/˜^n] v t <4yHLH121FYd1CEh En)C\#@U2济3fx1g.הt-BQO3C'k4i[YALDe왣.g.7Lu-vO3C9cúfh'HhKWTH$KcoYEx|1yO3CݵUhLP2&tSCQrrfxӹ ^1 HW!J ,z}^rS w-q`j4sh1B?| ]5S:r&JubM%M넯iqa|WxbOǵ }3ZMrSz0|swiƷ4Li0UXG:tURF!Fo+ި]Ab?}]q6Sr_,Jo8wQ7|νUVҢ9.Ey>50څ 1Rusm94bxU iuC@q֗~(6s}̂=LECܪp)՘Γ`.k{D;{֫8eu |W͉/m :۱]hß.k-}Ӆ JyP  p я-)}[ ]8M!EڗAJA򈢚s1m 7Y, iZHEIG+y1XM?VdfedJ|^ b?dA[}3-}D,MShsj3{ (\䬞ސctuq#2Q&i*> C2OlFw=s6oxI,n4ܙdFQ~sY~O/U0DZ98H0S#4B"Tx/?E>sA)#q]jj]@Az=:{@yR͋u>ʳ;.9Ql4d!.(^4nn2 U9PoB(ޟ1pƵZ|DZ; ІFW QiPH9{@yBf.Pa]"mU+0o{/YFa,e0K_ǩeݜd^u]2g%ePzF.KR5:Q'tGsu[\5H{: ~d[!Ul[Re >D["Įɣntr޲%[a)>\t_ov=оFp= MzR}gQwΦh=zp۾P7JYD2AZ |k)*n#tKBBnS?8ף3ڗզTU/0!ڔ}Olʽgؔ{kdSABqcxYr5GUO'j/!.ý-!_o?Oaϊ$C_u,9}m}ד$Ư_֕zD<<*W4ZJR'ROkBVD]F O=.MpJ~C^2(L g1I]t*8ƖF=ha4)=-pŧts9hVۑ^h1y*rz?tI;A3zEb#gМ^ˑ3LUXxOϓ'w/Sh9 AszE ]Jʼn9#J 6jsIGR[ήd]L'M]T-0@qF^%<82UZfH`hu8,"K`g&ҎSc巀+\ ՚R l=g3̞|*GTQ}Ԭk!yj<)1e#lXRo XkN"ItUkKٍY>l1QK;IT[mTBc1qj-.5Dpn{l'!z A<'yEXUX5 QU1`f > +7&pJ 4-eUkҗkOvԠ+D.&X.i8%h*aLA  hf^߇Tue~5IxZVwa9e|\^$% _i`ou>; e5m_ vGkuyXY,t?Q :gc@8 21'?U$P`k{vZl nd=rO75X}M:rb@;e+2h Ph{C7 `MD: ۭ_2sAsBS.JW͍ݤCu%?L}6֒d^)r,ꎹhrc,gp靕cC305 ASB6F~nwŴa_!h1nlڔ'A 0D yLLm85LqǑ1T@hRFXn=Qb[ve_sb#l,P=fj o${ܔ/U@h!iʶ0_*Y# Ldt۩Ӵyx| y\ v7[-_ֿ<鷔]G\Hmtr .~.m:H4rT:!\FS%0S`~bM/gAyJǟGUAAdNUdG.h]eZ~bG(`IJ)k*4*"H'_Iq!P5osȻ0WI"a'pe+= .XĕFA3bc)a/îu] эĒ~K=ܐsWcpL."cy ŲU47,WIFNTn|mLӝ4)Qu7z׵?9,cguJ;G ˋ\h5 ەWc& F?_bOS~枝-qoVT_Cp-)MhԠL]ұڒv\m;ڶOֽ3JygT)'wsFsg܈}F{Fu!_8S~S:6hxox@`UΓ+V8;_q‚pœ~9Ax+,hT|l I>D&q +)=w{؜vѮnh2ajoJcaE??&{+D+mPV tٓ9s57&>|vɲtJr 9|v⃲tcx6i*SK qU3+Fj|#L6^;@ Zc~]dŮ PA G (إdIDCȧEC@SQ^ \57GPuj}~ƹ =*F9iNΓ\qK[z >E['!L,5$թԐ4fZ„pU^EVIȉ(p}(P^a DZB=xDt"7Яԇu|I%<_R'/@Ѫ@i 拓3t`X:ݧm"xE"_92>nA; CiA1(ֻ Jms\^|"3MQ,l9#j(!V]KlE2)pyDkmFoES)hph4Sݓ{Yk{rx 7@pȍOc 'V(: t(R%mKz+u%B{#t& Pn2 )脊;x삒f9!;/ezݹU>;¼G6/n! j? ]<;@wH!kUYDUUcF++!HVOW:Tacr4֘B=O.SBhy=WDi¶)!}IHK-+.~ ;Jjr^Eb&)AnR"JQcHO /%&RUĹ-)H}[<1ESOk(H$GV)]؎l3_QjJzE&;)hj0ݺ@V.c-8}ݴ&$Wzz?x6̧Wnj͝{jy_}7+y[Fk9sɤMӠI%,cQN1]SG q$af?Fc$ ,²Rq੬YQi#fG~ PP)ɣ!&4{6@倅|&D,)1e1-8x W)'+Gos[$eG Odȵ@#PC5ذԀ=_q ڇ<\PQy4Gt `c\7f9 9!zٶ}(eF lt;uG3bNqǧ1VER:k:ޘ28>|I8a_asg^ bB,U2!{vLPEDJdi|~Y}[Ɖ޺Z=KhS7[c` M-U4k3{bkƌ()V&#awK 6}{g=v5锊u1OqIqaA/7ao8j!15微5cՎ+i=cSVn{>+ݖg}) $w!!_s54jY4[l=3\;`Bu*g.nHO6 8UOJ;fs ;wg־3 N) ܴJIy+ػbJشV!( .G S46?M-ˆRbAR!/iCGֺ6f*:bM hRIo ´AF֪Y|1F3IW jFNmzF4_f!p4叨4"{,'Qm8rZ T  Ҡt,1DF]Չ?qcӂDY DM Vl $`{\Ռ EvI6K3mDNP.r# !ofS|V93~~O7p݆mA;wup JJ1)-M;Nu`3ζ~s~̘y"nrMU?o|*6,7vF !,S4RFAľt.i')t^htCY4p';t1k9VW,~)_Zx_򌎫8lxL^FBA(^ _ϜB ħ{)4a^ A8P"ʔd(>o#t!}*<@ ~YPq[7hpa|zrz74F&[kxSZ\E&j}Lc Lp0g,Z IvG搝:Y^8"N:A0YtѠ| 6u %uz I?fG}>2'Ҳ4V/sA@xGsJ1~i(qN""+E<ܨvoFg'iw֧Wv$9ݱS%gh9ПLJoMC!QFp9i7SvXm(¢U>uu8{X<~Xk;p֞rArfb_X/noZUZ|H5vJ%P1Z .,> c,s2 P մtűE̞_|X@T#i0LKnD%f^S'xI{>;Ň ,^t\|XsyZґo|BM %nOŇ~j"uvA^KmԴUpƈAHb'JQDëV͟4@?i?_ 5n <ٜwYݎ8wŽ.jG%ń_O4=D7%D .&D2`lkVqfn;K۾ԀH`mgzZ!Kfښ8n_aeJFj hWe_6T?mj1)[99LVu8i|@tR-&cp dvnnrX |Ն7&pz 5iZ1)15iWy*w3ILhmM9$䅋hL rj7c6Z1wnGg-ڭZ6pmqMZcBGVPԖtA"JBΊRP vGZ}1)jo/TJ9@N$?CEhNz"&%) 3% IcuQ)Eӗ8IС5(6Y[9𣎸bP\Z1f;V*N6Z!!/\Dd q:PMXTNeqNz}+ EIFf(m*A!Us"I;*{R .1kcPAڭO#'0_^2ߥ8}=o c6X)x퓷VyeaSt=_^ח]K^U߹/;=C}|njGcf#4)\,!QfojŚ]18zpץ=`bZ=؜W5݁1G7 Yߖ? ]t)wfՑ܁7g'́̋|1yIpڂvY@KT>kJ6%;vnvJ0}$4 rL8֛ҊA 1^<=DA5Z!!/\DdʩC6z\#UwsEb  EIPMLHHޱXT4E*ycuEΓچn$vrsTyH%)&!XBG(h!l-H6I[fAl7;GIZ1Yq0Oz;I3;EEuv` ڭi}v}VA[3m y"'SR+A&>^EHП޳_l^)pwG?:yXx_ﯾW:4K1E 4+rwOao>8͟׿^w+݂Ia? dj/!&!NY38J N9BI6xfoh Bʬpb R@`#"v.{pL)3ߝތFONS/{i~Csȕ & gYDCJ&EV()o5"g&:y%ǀEl ;lye19+}bL-  Ea X&cV$~VTKHF"h%DWBR^gDB6kUHp9}V|tK(u_OuE~FI~)5 =~/eina]HeѸ e %WvyZW#4n/[mP#RYUߕYV=JߕQ+%#!,q:15w]iB%)jO hmJk׾@',<0TA[ O-P:a9P>4/`b#9Q١Ot1j|zt9Ry I^?t㺺?ӫi՟lGaC*!DH:iX؝dSVT%-Fr&CmKȓ4_B\6L#gx!\ʦ.EзȀ}7YR񮈒#X/ +b.rZPWx$>y*VeLxvОa+Bugp%Љz#b1BZPeu2Rd%ȨbOfM(zcpҀ-pfA3K ms34[Ӝ[*61)!S()z-ڣ %$`B}ldv<ƆYkԨ7Gyau08@Ճꘫ{]Yi!ƨŚYxΆX9gNq O9o]ًz28}T9iJA"bN01I2Ė~ޮ5'Z[W`1l.n.}HKܖcw[~o.:?vUL={e}woi\׌JW>yǖ5f{yvgAYUSG;Ge/N GNH4Zϣ>~e/&Q=v˽4[U=y֒\iM4K&8Fz~ӍDWץOcD?d5j<|xG(!mIeয়βO/^[:GBm!EjMC)R #GWgsX e`k3O z[ *tM_bufJ 7Zccpb SAFrIh/JQɡx>hi[k?&gfXWVi++VgtJ7܍; ܰn()oy1>v,r}2}WB ꆚ&;n%̻:P#IOwA!0LE7\3&+Yu" nD z?fQp9^,bbQCآRd\E1!sdJТ(@$8NvǢx@^lRb[Zey>[ۆi;a2>^чr*e>??!_<<+?zσ>2ڭY3qqMu2:E' CE`10n_-?_^_'n$TԌȣL?-V"I~`=悉өt=NNm)PCIRDޡ-H(b)V"B|PSY[DZr 0}ۻ7즱u\i=9A0k]cko׌y_U=qBeL753bn^OcQֹ-(ϬκW_FlKC{nuO`m!87׊nIAcO4&PɻER(ưP)4Zp)LoQ32C`,)1p1,t$9֭qXЪ%Z|%cluQ<ޝx蒘 Ld(upُ уnó.Gg{9TsˡY*Zo!&rB.Y)9W`o T $xVYfFeRNlޫQ&A #YFd#nj@(%*BK1p{ӰvGˤ)<;}V C$&kmgfoֆ [h܊AMwi|DI4X-r^y y":YXCo¹K8}.^z>^|o<;U0*2!tѣ-y :>0@~_?wg_{^߾xmtA.x):<%65JRO_3 ēƅ!X@6#m.NѲ45#AؓW0 G$K˾V( RJ))]a5|]'aYē':^u\BH\&f9'hv}) Dvy^u:59a4sO_3 ;@L| NrՐUVW$h ׌0 JIP,lu܇E}&4[Ovؤ$,Z9%e$EV!G3)/URShUxP9E2UUrX*AqR/P[2FY-WAǻ,l$:d6T1Bot +RBlpٱ[gˣ P,s@ܿPܳ'XԄ ڟ=dN&(5:lZt2*VM&'`G.Ң!e$=$1 ~^'gޕq#bR} {Oǥd{dUuw֑ULIyEf" 㸋_٫8N{dsdLRu#0#$p Ʃa +fVL23$ ٵ1{-ϐ\:~pewsso+u'5?1^(3;Jݽ hrFNAzhA=|[]q=^v|Sp>hc%Z̜=0FӐSB&8:|gqݙިU&޹ڴ "aI ޔR,ohY=%î[YWR۶:UwwgQHd[ƖY@\ 4*sdF0d`<d Q)pvẈaS:?5\ϟf3$p2xߨ:FD%ת:٘2H,dCH(ʧ4^r3+(ɞwB!MWSiK836ΚCAZWi,셁5F{1>֧ѹ^PyD(UmCLR -)G@4FSp^4jLܾc_W$w͖S%w?("{QiK=3\uV}/ 5sc{D/ R vYf72g?1oj\/R]%P2ޔB!rߢPytE!ùQ-(u|YAWM|2V::5^}{.D]5=.}BJ8n֡<5Nziw9ȡI%iЗq'--%k̗1 ,h͗q20-$|Ϻ/.xͰ%|9E B{~bc`!z{$$5lhZ,)qZ&YY|6dE:(z;ca@6^0y6'2m TQY+y2._&0B—zdm(勸g1N [@Z:|XR{(zl!Ja ^7P7*JcLF hAq:EiMt@$$HPQtr(MѢ4 M6& ϚCiv4T3RLGe=h61͖a2([U#Di,J.c!Zr鯘"@Qf'7XQ8Rۓ]B!2.SR!ْbnoݛwku~Rq3ۏ&(zI?=DmX\8 M V@!~ޑHrc}~tʆfŜ;j^Џu4sکp;阓L7K!R m1tJAY"zl>КXc^(%A-2&HkeזC0NXop݅V Ly+nWtZ* kfel<ќ : g1_XN%bhRb[Τ *M8Ą͙8G@p󆇇R䀇Z{q.x,Kq{"9+ zq@H %#P1%.3 ,?,ȣ;dA0 U9/3ĐᡀH:䮃H81i— UX 0VC esŰ,40B m\ȮqlpOfJZ fG^WA4 GND$H@b^!0DLJ 1.& Ϙ|P,4CN %s$Tj⁲PaҞCMzhr|iP\zf=Y0򀞍y}~^l?M6G Id?͘'h7AbΨN_̔fwzaiOF!Mi!8Ϥuh[FeX?>>tO$p{ -\ Y>եŧa1JZL!WggQ[(K h<'-'SL?/_rυ7j0WZ/'^-38"MH +:ɵ|)5޷ 2N*UtjT.fTX@ ;EAP;LQD^Ť8RҘ7)RM.-R Nje4(L՗,.lϑ.%#]Jٴs^fjZ$hrkw1%b7{ئ5m?"h\|?qDhriC#%Hf۴s=hiaC\ԝU isbU $^ev 8#⤘S!(2|Ovٵ@b6Ʃ"iw @8E dZ)yaOѤƩMc y=N'StzOѴ,2{&5Nn P Z=ok'A=B.I9 Ƽԓ jQڢV Wv l8)jApTE7E &p`ٖ2iTKpA)b挀l9V߂@1>[GUKjIp mE14;Tu( B|2( L.mf8'C}Tu# N&@Ό U1ɴ4N͛p%KǨ J_y>ha 3!R%Fn?lY oj[n6 FA(shw$ige24Y挂i .eLa( }iCj"w.$$zȲx.s'L󯺴/7^$.4&m>n޹\{ݽPoRO)="ӚF CBJ½'旞[z=~e#U%=0FːR:;33?u.JMp];uiN85\o7\z0,MI|}ǽD% B錂3Hsm$LG'- ,A$C.#>G LAQ $\U/ <ꋩ#OO_F162Ph(;ʣTCNsSVkHOLۣ|lY7Υ/ {Tk G@5V`8xOVR(*=c05,;f-x6K6{M9#g6ǧ))= )H[<a#J>>:6tjf#r\6r3񝓜 l'"1IO6oljiOlj0I0_"Ҳ#G5eQ.Ku[SK Aߦѕ^n~jU0z/>MCNov㎿Ņ ضv2=2޳:qr~42=؍DV' U'9W VA4NT'$4gvh2)^A$5y,1[&rHȡq=3ak^;lUÞ gj5l,BmuPs@\,)j.t 2NL K&T cP e8^J 9X ,S"4y=qKastGp 6Ai-dG[lEю!!FB4NM5$<Ӵ: ̡: %i^v2HvՉw<ڔ#鳵fXPؼ* y?mR1yچp}ƭ7΍WxrWs.nGƺ Ǖ1ۻu6}q1v3 &vnfp<23Mx+Nm/.?K5ߓ"  .܅~qώ{rA ,"y|*D/тm.)3w1msH$IfD閃CYt:Oq]dH<%Q5(d>_,Q?EQpY11A- k-Z dzW4}yO&6E1#͞j(xɽ?۽4UU=^< )(r5><<Rh5"#ԠN3:1BCn瓣]wzvi͏B-bm$A<0FːcDA.:3̽~ս%|u24ODֶ,۪^-'pq ,7%5ԤC/Q/=>/ X ( 1wK8s, lZ7w.5:H^DRLmMxq: ^ZT'YOQKϭQ%9h׺:O>E@QӪb{2U%A@v'إn.ӛ1֟!WQX9K)՛n9G>+kF}DU/kKP"/7g/c<nig[\8栲cٻ6r$y] ?؇d],07r3IvV,i$9<~ն㴣RQMҋuWկX*VEJ AJ8Mɍ *'F$iz nL ili^jhBK0sRIӖr(W Z$o ZjP]94A`S'9!i-D+Twwz3JK 1;mY,$J64ȵl]hm\ZKyu+oS qG>1'omKL\H[%DZTP+ SBJ-wޞh{T%o9oZcckXC16FeAU`>-5;'T>PoU mcy+ɚe2iTр 65J؍bs{ Ii Ѹyτhhi0љV2]-fTî%5j@ $ =nZ{`)2Iȝ1H&B ]Q!ZT/By7wvdh#[+89oJy.7u/-{- ibV -K:r`; R2"`LYou2A4@UCi,nEi4'9mYi@P};02)g )t}Y`Z=Śd4 ӰZ31a9n`O l | "ާhJm >؀!2|NDev9Fٰna'\w yH@?Bo$u)q`h5 U0G,D#O&y'}9ݍ3IzT"| 5֭\Eic[74c[ 9sEsWX*L2u |^gŪ:Lty74L3hș(Jo׺I[r ƨXrUbӝkrhș(Jrw׺:V̴X6`گ>)g4USBUc4Rww+a*ty7,n4USJV9kݤ+1}>֭\7XwC3:ֺА3WQN9U}u$2` ߱u+5 ^ѭ-tJ;w+Q!߱S;])eА3WQNZ2Ă]fnXw+70"h-t?dǺY*Ȼae0: Ntj^4;2hșw:G.ڴep.>'.>d#oU?1?\m 9M l%%Rd/t%ye`=t/|ۚ£( $Rt%IA`_ \`nެ>vW`DE7:]j5+^_~{7هEB !{7fj߂CIMA׍  ]#fhwcNr(@$H@&fa.sBRK̏14@>G;,< i>g}&v'BH!T XL2Z5q,D2Hڮ.6ȷ<YDQ6(El0XXje2bZ튁4T&fD+%a&,Mw  < s53"VIqG/2rX\qhpހ ڤ۾ꅏa21 ktđj""9Dfy0m$Fh`%Igض^*֊8lIY9[4 M)^@R6oJ&c$IHVKӞ:%OOg4{@hJr4v <)pX[[nF$a\q"vAѽFMWi!KݱaHÔ!:T4퇹_Dݮ" M͔T2hIgy-DǜFbnb5l^Q5&0 ^s~YZï`WYh- G384+ˌʰhc^Ym3Fj ${%i#SVm֣mf='gb7ڱF:,QD"YSӀ/g4S,7@<&~Oi2 %Oi2Lo x:Z8(,Qp HV|U7_,ߗ-B˖$ T=D,9xM/v\dȻ R&e`̙b.qNM6 1eRLJ IN Ш$c-`ʃ%WZq'v͵A; KQ@L,DmEr ;#+iMvv- +M Cp)WK[ MN5 kYٳ k]F5.2N?ڛ]\^G_[.ʫFp0KtRv A(఑؃ɈAzj['"N-3u-7G- F Wkp9k:[9r\e-)Ri#ׁn4@d 55жpPށ-s{>%;`JUE#[#{05<-i<5o h%?]yo6,4WԘXeSsTA| 4FL@5ڵ %HbTrAiLOW T~͋'WephS"C/77HX["?jB ᆡ 3Au Fy F]q3/(Ø,a# VM/o&Z&j:#̐K4Ly Dh0eJb# =Ӡ͎0t_t\E7 NU gYvxHS0 ڶcT&X: D1C_jF6_U(^z=[̶8@+Qs[>&[N+idJъ #{ihV*+2gT b,}}`62`eLgv)ۥh֮̉Tre3Rvh=G2wdԦ?CFڜ6Fz|WA˨vmbLj%.Egȹ'1( ۼ/JfԲF)bFiZ(3IPr.QT`1*o]T\`F*r&Tȑ0TT{GGr:]Llќ>ZלW†6:9 %@kTu)9|ӫ.7>PbͩW5Ojՠqa΁nɍxhPOYRl7ѡ.:drDM-u{MMHfjoՕ2Ƹj98Fr\Dk\)8^B{W gI_%Ñwpe|ڛl~REl^uGnWM_o?v1-ZWe߲ ޾#x6mw_#]PN}BS'[.jV7s߭~boNa ]/ar߼{HZY~%ղw&w$CZo7Y (u8j\.ه՞W PE=h7-٠\I'io屉t\id?VL*sY^yP|-6h7w _ῗ/|jY„Joo)W `; ~=,; o+3rޮsοtO>a}s,fܤnKOxZt~!q~GMR@ v[&3vrNy%(Yd{tOj'\͗º ||( :w{»ϟ!g ow^iqO |ZR8>D`NO2% _.6ra<g^"O*'vX?eu=ZPjW;7\Tѣ|sl`,#S| /e(s8 i&&}b;ńS3hTN{tx#[kc[9!R젟nVRZƏ%V[/WHҿ݊#R0 q,tgIYk׽/ ystOR0,yg󗲄'tGazn'ѢKeu'/K^0[%W9zɞ@AY ?Of!=E>e#cjϾ^l=H㿾 ջ]8ϒ؞a6<$w4/Aճwʏ`sx)*{7fҏK]/ӚGؗcSGs/:OIX~p\WĜG◃=1ТwˑtLӜ@O9HXkܒyA4 "mǎkOƼf*$:F4ҙ;=L?pRO5@%U-z&==LܔƁ|1'ЍT,{rOKwBCӃU0: N% M *+O6/oGy(iB3F=1`b*%ig'됑3N$\ˉz.pq3KwM0աP]Y&:$Q\V1pKQ)=/oI#g|4KWGuP=;%0 UE Y:BYC9nBY^Mn R^C9Z[Yy5s)nJz3wds<JQQC!hcau9Saˆcv[]Rvw6NȰ6㿉/T#To2T#VJ+{ّj~?"\)n3ֳ C2lR|a?;Ƿ z_qyy$yM }5."E#Gg4 :E:sȽÙ>ٴ<H2_ky da|&{ $G_{ Yd}d ,09̗;:lbd;nrܭVR_$uIER!]36vH*l 7 yg:k6ew/0awl82gvP L_`];GSL}vMFN\Ϻ|:='H-r&t1[䨭/FG\{.L 4V>g.̃|0q1Z CcZ% 9INUCqُXv;k>qѩL"I>G]#ZP'-vd(9&-3IUgUrSɐdk \J'gmv"b7n3@QޣD2Bd&`)'w|MDE&8VGp+7A/>-↙0WTP[Roz_<# l+vCƻkv|I}ܬgzDCւI+CJ P>y|Gn6u| >bOLYJo?g<%gǟ{HhDM,ށ^H{J6[Uds^yK]v&ڊroۃzFأmB 1ֽw_anSI떶ؙO5Sb%SvskRxw'^L\-UR6 b$p%'KHLPHyo*C$bGs)ÑJGbFF[/UUy:3&'(эbGf#;fK4m%_~xu0fœ/.IZ BퟯEʷB/_Rq{5R~@-M㦍M6wۛAN' Ō윀G@&T(eJc޴7՛6+2Kdqd-;]L%r֠ʄ[X|,ѦbcIDRp},|lgN llG w;zÏB7 n{!B Et ΁Q R㲋#=H*3"\]t/?|*_^-:|(悒媕U,Su^@`ld m;.NPX:j J3@ 9:@b̚BUl1 K;ȓZbc"5!!i1AH )o㕧R(R$Lq&jTs!xXjE ěU)(FC,+b?& -eNYTeAFըƔKBNVTRc26S\Q>\s fu L"%ψVC0%ddOLZLP, Au^@\ ( $bdIY,*KEes(T kSf|P)#vƘyBCmFBL~MmR9Z:hy>ÂbȦ䶘[ vm08)ɣE Qt@#:kjYLS&"U0hFUaT&E24FLYZ%t򺼩-zm-XZ.U׎ ^T\VrB]塒R))NOe 暫J#F$mWڋh)vHZ XWM *GJ @*ӗ#,ը0f]\poj-CȃwTJPǰ:ՠY^GY 5^:1VE[JVQ|L&D?j1Qm$",Yr%Dn)U$;/69ULA5yV\ABo#^Ԃ0[1* Wyo1G UT^)>El';ɒ58AIl(@Nx>Gk46Ĉ&KlfHm3Y!AX5};]hw!,Њ^F-)/Y9$&)9q& NJH/ MҬN8 ϫzdX Q&0 Ib"_i$"uQl1UkgjC$kl(NIps}+x-6TC)Sb? b-daz+Fb5 l#HxV~#O!!%T:7Uxҭ1 i]L[VcH$!>~ 5YoП9Bqg\|~ٮɫ(=zݓNиvr.s yjCŕ9Ug=V/Lw Ĭ'ňA Z71OQX'r]XVhʟ>4҉Uv@}>pY"%%v#ˮמL.S,rB!{ՆqzxHʐðB}eZYPcP'>^$3_|>8, v>k}v"^a nM6nif=}!\"{ ዟ_O':.v }j}{Y|+mkwg߃͉uFܭ7o{o\(=hcgϡ~!9T  Ly3΃]}:@Zy'+C$-HΊ JtO:0ZTAkzhjd:ުM7x4iqvG49]Nr򁊎FjJ`<UKZzh{Q#޼~\/![8_4v1яϿ7#ӻ%<팟Y_vfڛC!17lf\8rYYy!{h'*x{ƽ⊮hC bj ])9אmFE/{\;|-ȗKrp`s;e%dn~$-Kn5CŪbb"& MhRn{B>kJpO%R)#ۥg.oLk#l:A#3RĄTB1~5\MCE+7]s I*AҾi Opy%"x#bP Ցv*0zi)_o'(\3 WOp۹fYBC>-J )hpԎ[f36(3ic(uZs|*k|)ae+Y= O7^^*) XiQtܗf|;{+ZRtR^Y /)K%V(SROIP! U ުVښ~H2ky֌{"C`T;RMa@]@70Q+6tu7MKRz_#Eut5ЃLPM*lǹ_/W[!qgJZѢNҹbG6G&G7+DQ7ͽӛ 5e0#;q|SK!E- \s.N^)b]m'@5D#@ Z>cZɳ82V9Y/BRe!9aȜ+FW><̓+xS=҈?}EFJ/("+-E"Z!D؉bOۄrک_ke Nx|-}>]YSoOg!͚z9jf68ϸГu\r͸d?iw>!A6/{f"Wx'ePuh<ҲR|,!$ 9ARę@Y') C*u .Se AU8L(^*˵xL*WJ=Z.((.ӴFE|6~p+&3\wMO}HUG^sLr vKSA?~T`Rdj].`)_6+ G cRX#;H=PB3 4B ))zBWT߯-i^ Ym# j_WKm4&"SVvݷ kNR&LY*΁eir *wu묱ܗ)T2hkHu;bOo3=Z.Ӯ2Z.Ӯ岺kLp/͘AGKquI0K  m35mԴ80)]lGNd"yhbw_TJ{(%F‚jBqq ff1QӸ| Ө%)_;ARtkM}!K14U6@ZDjFjU|qLZ.aDo u4d2q]h_4ڙZ!v=^n6GRvgS4 #DrnFTSG&J Xϸ3*D˕q(U8+Dh6rFe;g9KeGBTA 0_*"Kziq Z ǘռȣ7Yڜ=~XQ;뭀@%=i`Q2 _k}&Ӽxɣit˯6Ufu_~gDLj]-`N &u~`O74i]q_0o \voQg G%P6oAng/tBFb.mIY;~ ㉚_[B[3$]j&WLnqTBVx-piK8&I彏7/m%&kZ"H{qu_hg'>"VA{8Gj9QO5)Pmx6rUqy\QD6I :Ǫmlk>|c3?IH@s@U+}ayREYYx&%vrd]7T"Jh9N=ˢg+IQ9olґEў6 a{7 0ÓꪂxqX-%Θo>E {:;<8"A'OVd;a2]|sėzT_wΤ'iór>>8<emC?z#('Jv}ʸ? rXIOr:Vc-ww[֞ +@Do< 8zJ#笃Vg~qaK>şEwpY.hosia"Ki>*pc]5o-+-^ބ)5I8H縬L|~iJ+Z?bbK$-;?^iޘ0Nj-8ECͮ(Zڕ'LB"7..C,Oϵ79ky\:]p~Ql#MLn*b5*2f]㺠7(+]X |oEt IVvP ":U _g J},'񟹛\҇4/n+hܻ&B[FG4W\tm$x!"XBaEɜ&ʬIGۣjU w4<^gՂ eu*|kANWY4iH{$Jí,N6qVP#+;dϊFԇ\`&@s>t}_#w>wR<|Tg1Irƥ0~^ |toig|C1F>Ʒt(SWaɀfQN~ZfL)!Nn? bdq*~^\M}Ədߍx75.&-cÝO߾?WFROk@ӛ $(+H I=͏PJ݌butvbRzAYl!4-Pa Xiq)yGe{|c=A l(=#./(_H!-EI,R|6ot\ ~)/ C愷`^:o4ڛ񿖲=6 RFml441`ͪQ ?J.SGý E!tNQZ69kSYB$,x[K=HN!xBHwv@+x# bU=0fttn1Nh*VG,lQJs:Yۇ}RQJlfu*;T ;B$x V{rPbpm8/1}VsF0aO-Ye5;oF5ÎjMՈ5ku40 tͨ+ƭJƿ2&X/e` rQ\էQ*5\v=^Fm,Oࡱyׄ8B@iwF Q$~B{:H>Hƽkҹ-w~㡾a99+RI&%hh*L4aCYJ0hdG"ǤD)FT+e(KK=D[n3,0!$F6BN+ ׵<ƻ k52~e AR_Z,ArQgz_1XKE.d1$5$߷ZmKbwU$VUXdW}E&РuZ+2nw {8)]$3-[\4|7Ꮛ;װ/*4M>.ZĹ˫O;ohksuf}A|ӧ7x@CtyMsן钕tpz/?_hh)N+w˛O߹]b×ັ5H!Cnc#5ŵ>] },#k A0X6$Ϙe.D>$ѮM>7 VqlRPY>nm:ijSw='Q֨{PQY9uBQ F]TWYx겼R]#诳Tܟe'4z\-۴\!YIk0 .*JHsZd> bFO3v9ϳd 1^[\+%[Sbz%kpC"mxa+ o6ݱ1 kW귻 LIFs[E,{>Ђ%fn4~V!G!{Q!9^ͳ&G3k#L'6^>ㅏjiEnMdv Qp< &vЪU 3 ɜ|Ӧ6'U;9x%cK^е0}0ۧ1v>VY*D1_>1i.Ư97ąOėal7/$/Sco2Ί7}PUڇ]h7F_DtVv4gh9J 0hsc᭓RzDhuf?AԿjF2+'&3Mōvtbh/,F̤r7$ilRZY=h-Gy/@WV[wJ*& rN\s'/ѹgeVţB^pӟ wZQ;REOZJ(op++npzteF_ :iO]\=]?5j}9n$VދoPxHp$QAǟL; t !38j~ُgw01nbH>h+j%U'١vKzsigHvB ۼVÕSׄrnO+RJgt ?\1㝯dG?_B ƌw<(>Y\=2Әr"vϾvףΫnGHL!$挃 d$ud$ ӑ8⬥lIY,j#|Uy}sb'hwQzN#LNP 6^tUE@| ~EQސm1.Ӄm^icYZ*u҂i+ֱlQb}#jwS^ZVMSHө\7EƍSEvtjS7‰+H'Z9S3bf?}۴ WSmAqh̕jaaߙ C"p yCq+Ej|Z5Q<a| \<ܹiAoא}+E M$~Yw sH~]|dB*ɿ}A|[oH裟.Y3]Ät./;N'a|0> cs.׌m )"DKgdH!7}qMipe i 9h/spYF!$A$?(@ rT9:4@ Pi(7E *AKCb56X$g3 a:k6"\sS*=V Ѯ?ZS Mw>disHsw?/,\WS z@v:XP:<>:1X3:@}ޱ΂yl"PJia[]) [M~:w*DѠqNe4O:C\ObFy\`^bHJs 04fۻ@>5)fkXƳ V Oö6 7KHqt=oQ=V(|zT?l3ޠj֜Fۢ݇?t[tu} P>2ơ|\_Bo13q1Y+H,ǕRS79i{IT~\iuRM}!}i_;'m- uBCH~E(7S9q.Vry:@ `H11!# 0ȜeMS2O<8BFǛ\)GS!X|wpsUr 'w?1' UNvçw_ݻKnw]+û_Y(<~rs'{Y6/7Cj.27"ZBle2Bp &T%,x.CRB`E*|x9}ȳyyX5U@)XSQsk.ǚHTuAyb;or4Crc&j9_(v@7bv7&8ځr+5~ pl08E%g#9=5nwwDMe%oSťDaDtI3v=YT\;* 2.ZDAOI𩎧ÁTbZ*>:8PY=p%Ȉ  c9dQccU:z8^N{@cȾ =̓b](>L)꾒^E u1kM_-A+FZZw+ݷR]gaP1ӍΔ Q,bkqYMCkwwIv6Q"] .pSPmT^Ű9Uj:crpfez8_!،2 8 6ٳ/Y}uD\~IJ"gH,q8եjp46xlF:+"8KB0lAK5p8yAWX(o[9m3PP|p@F$Iu2Tz0ΉT$9+҅gv[A(=òi/<(œqڃk򚽰zAKneqF( 4BjV8I1OF |6ErH1kN%^qKҐ';A``^7ֹ_ԛeݬt,Bn[?+F^Fhkj4Z %& 9&R'Q7V1xxPM+/Es#,v hcy?8ƝFqd1&D-5q:N_l{'Y!P3F yC4ۙ۴%K->Z< e5Ug2 :f&E@o|pRϳ۝G= ϮZxvEzveuhƋ@uKZ׌N{ 9u$Z-.Ƨ ڨ쬎tJ);#FӺuyG(p[?kT<(e Z'Is|I9ZC7! {Fluw, ~Ūf843* GD[J\Rd5 cqUjIفD:,PY+f4c[GȖ6,1Q50 80=Ae8;Ҫ!3Ә_|7{50/Z'9' =Ixs>&&{=r$[?aZ`lP/^.Կ~=?ds}g<zoWhNF4m3m|53X-s^ytWB)1J%\,\<#:DOIK .w+}RCX$;:SSGM\C{pn%5'3Jލ8#. )Z&Hqqu16Aqu~({S©=b3 `BnXֶ?ʗ+rF7X=|ZhdžgMUI) Cw[WhA> RUkXk0(޽(Rؿ~QnRŎ@J9r|8dkU#eaM!mFqQY8ZZBb#≄!'*[Uoڷ䑄J̔h3P[[xH4"i-uOѦݽ [ ԿQIƐN;\i3#Rrbqx,EdxΘ8 [%+Wr frr~ 걈9a+p{m[{|~Ƭ5R3[i $E\ǛqC@ބOAzPǠo0,mѮHY"R?Xl+(T]@F,n[](s23{P2E1I_ҕ %oc#kƱ\rkZ-T>8s7uxN@v䂁jU?f<1 .bhmr ϼs<s کF54DAdoNl֟q7;O*5}>ڪWZyQi3P{#_ j)i~06hw Y(C=UlIӍLz*"Zj>2j;0GXl(%% J;tTboS~F/'ePj$u7B*шDb Uȥ~5@xƲûMHWbx31Sc(N)<ϒ[F C9w:®(;'DͩiQ!OX6ȜHF%/\@2V[6'FPjRW*W[EC~f_qh濘oᱸejNVY4ؠV7HS}|?)ً׹Ve=V!h"<t*$g%er6Eϴ2":J-^tRh 9  ĊK0Jx#xQSR 4h$:ADFp0FZKCJx ¼yjIUkҠXF'6j|%(^TOTRѥh:TZۥ U륤ngzQ49-&%mӖrTPƐDJ|CL8LE0 cmL5uL ".tWq9<݌(T)LS.-;xxZPЄ:.'UJ.U/rXoA$O 9 5mW MfoWHom,+aq$m¯@Mf}y}[|)}fjBv. 3_@}rs;]Wo_Oi33!O;vB~z?_]yֆ`$W=|{90rQej۬ooz㺛r; iB W9.x>L"h&T8)GU-3.d*}&njlg]Rl#t}%_xu9@a|%Ny?v8=]L!] 6JXwjT.U5@3%z 䪝!1Ԉ>>ڛ'g˲7f~~NY\bfr;|}u5+8|{ 0l2W{xn034 t\\(zJENҝ{>vٙto&–Yf#Ж|]66C4*'MG>Lh30! oWn+,}U r[+y{þrExN긳Q{a]|;i$,KZ&aJmi9(o!{c=u}567T~?\#·렐@=5 7$8=ΎpP/.T_=~ǥ(;WLգm1>E1bܤrZĎ>jE袩ś.F}xq@/@5m F|c͍wwRǝnvzk_ԨQ|\MG~l ˨oXxp'>,XâYRbr~\=8qQtw<\0ztse2Ƨ(.2:ɓ!QGG+ǰ )o yFY~ygate0,\v_Ts2w){|dY&WK}Ź]˩]Џ7R8T1+{!WlPF]$iY Gouc'Yl^i!i?cJ#>wu3 L_[{_."kR{?n*5ʩ]+9c)P+6'3^#ޛuP*9Ctofq׉=ƌ9gz8_!r-׾C{r#NfnJ怤 ӤH5%Rf/&Ȧ>uS^%UĪWeYse?'j,2ikϹ%sql:QnqU*psAtnMFU:F +ϸ4 f\ 1GSV}.x7  Tl^]euR_sn=_2?3﯏m%X >Mh gcs;c]Oo5utvN'ML,=eTc`oѪ [suo_Tin%ͻZ`!j/-lb㣍P\qZ%Ven+=}= 0C k9Ł_̿,NIGs\^&ژ%gu ͂ 8^]qX {Y w~C]@D򕓻i?;g?}fczgїz5ߙ"{w51gٚ@Hgt'~8Cªg|ODe>L'*NA+]+ u+75 ^W;\:^¥ MH9.h\R ^|Wysu(6B _UW꼆0,׶o$ʝaӚaoy(g;Pzi)r3oJ/]FP“2[zmm]RzU2J-aZݪڢF/P qz3:z  52{ ;R~|@}vm*2\?fd7Wonwcަ ˺)phu&QiJ-@r%蕤~X[NkέD43BNG_>4X~N.g L1x ?2J,Ȏ̯/uD jmgd et$}~`~.EYu?oj OAh4 P;Kg-KnzJ 4 KOl&i* #^*w#ޱvlRl/\#=8 DMnJq;iGpQQP?tDvhAE/t E*#3beJG:L4W’#0ӹ"|X"Ra(>l!ǯPD)|ػ{;r,Pn ܰ8'YXQr׼8>淳aS|(<# A0F2oM)pwr5ٻ﷫7~cba>0K|0K|~z;;8L~jg cbV1J>jKUstVS)sjЁO56"e): AշkIoEM} <">*\J"tCry-o gX G!J:R Ar ߾dO&bM! Tϣϓ̆5M4MR0OFML`6" }䐊yMQ*3,%7eH'vWZ}e7;nC/V;߃M} Rho`\nofNJDy<љh2IxMBq]IE= NEތD|5JyǙ290z4Qn箧p>A|n-IG8V<7Pnk;JѽH~j :94O+1۶i`(ˢͫ{FQh?[(?.jp]Lx.'2s8Y|^c|l -L:L'`EF{q^FeQ(: յK 8D]Ͼu֫agڲ?ůw'-,?rb}b,R1B($>'y VZ]uMUm{,k䯆__Ʒ`T 4 >l%m}Z %b&.9# R\IkMSZ?D+ƗYP}SU4ILKQ%X$Ms܊O=1<GEdV.7qkܗ"`&g^$H* ʺQŬIx@&xa}TiqS5m+ ,Mv^QхC Qf?#X #nq"NjBd;Jmk{{鶵Ph*_rVLPκqp`cvja/\2OIF"3q<xaaȚ#bU1||a ]N1t)8Cň~B@Yh ߀73Wi|NϜڜ~0C"5{SZ]id+H]-X0݊B]dٶo?b A=S-:%.yH)m!ZySۛj l^|JI;/FUIs1}t)]-e-ko&ś o4\"uQ a\ vkjXQbahOE*N.5d2ޟ|N…w$c5 =%!d'B_ZU=g*EO8(d>Fg+"c* Jf @Bp~y=s㰡\КuhV9B[jG`FX .6dq1 ̪}K(mp,AOaɅ1e-aψ"K+  $K5`15XZn ?)b@gFyMBJ2 sӾe9Xb^q-q[RϾ+S%bD o0l(U0_x|AP18TԸB0!V.١5Oj'&B.!QQTkD)ixCCDׅEO;)uM G"\lKK۹1)Vj;m)xm.^BlǬFř^:\R\ⴅB*et,4c[b Di=8}ęl@<aLDm<]}AX-=Y(J+Xp\}W,qXUާۇ"yO:mSŔ(p-uaYsKr1ӕh٭$jM5bBKH3qRl @5z;ysWk$Z0 R!R BlD^CϞ53;r)OJ`$B 6YAPBx'5jm#/Oi3}PjDPI& kZ1s9$:,gB֯%+ڡT`$5<^("P'ZpdQ" *$8Jrn'#ppKWn0(x;~.p]#Ҷjਭoa=Չ{iNWl]=:(""*3(T} cOW;~QR{y5Y;ּv`d xsMҌx>|,&-ìeu a n6M))qrehc8R;o(Db V^` 3GOKE~;04Mon-W/z;\ 1`+|(X,9X|~->Fi- 깒"z+4zv~+9 ~NaI9:)Y'0I~̇T; `1I""I=c ' r#M9WE_ٗ-ʚx[#|Lkhο2#?zY0{2ML(HͅX GL#'Bإ փCago@ eJtٗ?)-I[ KeOk̊iosV`-TM.Ŷtg(c 4#0d}Gie[iGv\J‚(J>J2)o_{bL9#GhICL1 @;4hRFXfe\(WK4U`1{`׊UjĦ`Еeʲf)V2%y=<`xPwFEH+e-ᖧL̔9p^7` 18 <:"e8H5i}6c]b;ijh;# Reo-'84&E˸@9s%D%!E%DHkLdS"Ǻݣ4$KGI0pTIN+.A>xhv/A`kYe(#N2 #C i af[a&{-*CO|Px&& eedK٘o^".RRIpXT;(bŅh] B8=ɲf}㑼:i֔{a@J3CC󫅙4Dz-2A /QHB9g s'/I5)lL`4vV !)CZsYg9'Ӟhp4 (,5^4,Mq8)K¤C`zQJqN$s˞[lTTy?zz@Ѵ:Z|w ݋,@9=Z 7IXX™K0(Pc&v4\fR?{㶱0/9}1cx44cnq$Q%6l #SbW}UUU,qJ&dk1wҁڭ GT#;7o~0zj2L vG]0}sTF%0p:𬸾(z1|ʅjʴm.RSX}m2Q.E)HT"U"vr2'睉.{W"lʆù­A&֟cY`F{N^Mq6;Z) ,+v[/̪AID"]%-1g3!41<~M(jZ /ǢՆѴ$!U+Ic$%1=$IF8Lt&V  ~4Ʈ$KI"P;J5=Р$?nQ #*P1/,ar_kx}nms@ rj-c7t>'r䍵@47nsNOiCshro]xjfoڊEkN3[#;ᴏFTFZ;t+kGe]Zv>ʩDOkOb=4ȟE6ŊۣXzP9?UhAn3!$Effm/yKek⼐^\CA}l%gͯ -Їh|W>[MFh ]%\گ]h׿pv36)n =.,{g,3?-7zd^ލo&GHy}c8~O#i8WӭXc4ipvwxY=7B3{N hD늵yu'h1zL5jO:EK_rQɲ!]k<NalY;ӭkY2w6I u%-5MbhuV/>SFhI_jdmjo !}Bh?&|G[ǭ` mC]z*)رߍzƥ jVk^d,#t_47㘉ZvQv8.\|}rD_M}h֥cWҡ;靖xej9&2sgtB[6$^Ir+hljZUhԉʷkQ{Bah?r 7ffJmM'vo,7&~x|N۬9`m6Yz֟l hr @>n}7>Wڃn'fA7/&pE.u]VD&B@kC(ܵB}1QZs En^t# KzSH#K;+sN1q&~ ,\璻@YtB\2CF7+ & <>-o+t >wj햭\9^5+pZv{%mdߏxǙY|Ʒݨfw+:6DL y;XT;,>mQZ| w+~d=qnɮg&z(rE'KNp9;.=UE?2cS Iz|jr :OtMUakLyjf>MHȅL|uZzvh\y`#:bݺ0jܛRYZ&$EI$?Z5j}~ՄӫKÞ0 eh( /LAbW4#1dʹ/!F!C^BR?$Aԛ ++r 4zf ,Mc.RA@5HРj ׍*cE zqܸ܋Dc.֐B#ѷp@#͍l]r^ʞ 7EK6y4Ԓ')C6ZdbƋ' ?]uR.5 w@H9 DrUj0<9ނ|"UŀapuZK~HrDZ+uDaH>$ $BMA2sAP&֋ˎVa>3GUc:Շs+|(X\Hsa(m!T8G?.@ O_Nѹ=>s7{6g8@Z rӊ"2:f*L~xa g(&c:$j(Ewgd|dHL`ɍHeaɽ S b%PwQI"|g9f6a.$ 0HҔK%0 荧NXޱg1jJ+1VQ9-p荥-noWN ۘŸa î\D(blPxGśy]h׿pv36)>[̈́,{g,3??]~Lv_ѻ$W?/r| Ǐ`8OA^h 6x]saDi~'Dk\}:z12BG<B5wh=ۺ-[k˙=ݺ/ˉིX<:kh[)ɴ6ak2e|W, [ԻvA/\$M_sI=bN'i]*geW>92`9s<};DrAg.tFۼ2W!O'zDˉߔ7)mWjMB6hNFݼh>}ՕVe*VּS:U Jy||rd*q(~տ6ՉW?燯̖QÔ0mIL$&ɋ!CQab0I>+am&Z1xs` 8ۡ\8( $AL+&7ir R ܉ Vކ٢bӀݿ}t71_g|wƀ+u.\d8<-wM/k!ߴO&w/*tFPPX"J(`,}ǓJ`IaDbIRL42"#ba*ixWxz\̦"9?ܙz#TJZQ p7*C)& VDuJ(D4P0d b d«t΅8pÜHIb'" Ri$$ҧr@;4H @$n(B4 aau^0!aƑN9;I$ҏwD`E7w܊meZH-g r('v;,69K"rJ%D)Fa(X'$8(JABa"֛Y Fg) ]X)e!PjbI,hnh$<{#e!uZsRSAQjcabq$ƊH$@J#%e܌2 Bru1_hDbS}GXW그2 *077UNtjz\#|$Uo,ra}8 o? iu,KÒx3M]rEnzu^` 3L}WNْћw+wsꎆPr*C \štM3@pSRj?'Xn}el_dہwt\uHk|7_`M\D?=ݻF̸2dէpg7/.[;r,`a QoFV Ik"24h6(OȣDE42FvAгy=zM\%$}>!YBxL;o.T ƻsqKVtX=wjf[gZkdزCOx8qH(T=xu:=vD< $W՘!8a1յq3WdxHE $%!wA@ɡ":j!B[>\Y{#%sYe_& ,3O!c! @Ԩ楠 儈$f2S {jZ. [ sY&EoRCPSB Bn)a0<蚀"=d Ae@|&g8+8mnZvʐ`@Rm少vITv+zgTTJ .F} ъr6=8.b'RaxNqqCsJ۰%JhzMd?.w\i0? >(wrA{We<$)>.qJ?y]fjh!#Mloؾ^6IM6=Xysǒ ?Dt `پ@`໨{F* p+zyѫ. X\uկp!KedShYc&i vta)9f_ۼ{@L $Zc1`j haρw `X]m>gS 9ud(4tSE)ۂzrMYo2N#^ϭlӅ;Ah/ދnO out$U0ai HY|WRJd~9s UD޼ѝ ~3V6ƣLJtegC#nD"J+mL˵/0 ؈B3"3Ej͖mðE:[թpҪ1 +/8L)Z`&W7f߳o]I>ofw|~V Jَ-B(8cWi!RV; `T'ym;迭O KX1HAT%ӹ3w(_*ufn> '~υB0&)tEC"ݡuQCk(!))V81;:r$‰BXZSw.VM8*$6#\`wދ\J@I.5(#/@*ثS۰7o$b:@b8c XAvҳITAI~Qr@9rNaY$H$lۙBHwV{:>V7k^B 'LS\@oBb9S @пׂ2 $M`A!ǕFJj4:(H*q_n8 ;RejfA1ԲFA I âUZaB(˾Pʹ?)=:H m'1J.%H_ sU.$<*adpa(0)E6 |v,Wb$6eo:f%xXp1^F`b" \J ?)\1ebP44I-vGl@ `]@@I(`.f^"`R371@—]%'LS08|)JgME z9DD.DQ{:~z}4,a:d'x_@\~O5' #8Nߌ'?ՓM%hOHAa 2 4m%$`=a }=Nf\iMiwޭB|:)[:zњB JCRkvZKժ@]g6U¨|fj=Wo]Rၯlu' by^nƷշ=0fRZ0dqEHk1[.YciۣJG/8ܪNSx+% ]I\$Vu۶]6Rh~_/Dg4V<{9*Q剳"/LXNBj78ӌ6QZ)VijqT.LzrsTjj5 $jQ݄eYT"uHe].e]u,|ڻ VrςL 1ja2 샴A TnBnGYur;{ B8wT"7IpҰ-Zg/hݰ IP 4  E6ی$v<6m֖14lJJ7D ^0[}[BӦHf$l.J|bq$L^OmH5iH.h|>caKBbh R#DELդu4jHfZG5U:_QIjܢpzT.D<7}_ }rPfUZ41^zpҋPxq t0/ZrJm3e-Жm1}cgP[6a0ݪgTzQG@a[jEAPOvMs`P!#qNL=8yxbVz{COEfV3>ppFQ[FZO4N@-jLgO7NhL2P5~IYHvvtQ* T߽]\ڒyt}[m7t~lZNČ J3kWB|L$bBo1sUs ˞"\0Xx7`Y"e!)Rݙa7I޹-rpq݂K鎟ã99͝4D#x;'gǺύg [6}w ]SL(.:ߧ&;~zh‰AneH,Gs+8½ʻsɣzhw7$#ſ^GH6!2@,,wTJ<AHl1PjP`weX""HrJ@A5*P,>PL/X KI21XGa: +gcˆV00MDLF:A /z ˈHaMma]@j܇ᅵpBwݲ- ^ ^U(n@s3} I"xݗo{ٱLɣe5A|cn^A?|"~gނytl*Lѧݒ1NzD|Zh d0PB4p=7$9KqïxEqr%B.-NE}a@0/& $>芩v{83)a>} ckxDk,3缱1~ôewY"OKp|S,.Ƶt!(Ly8D,cLJ cw}5\64|9Fޕ6r$BewvZyD^`txƴy5<[٢nIRHTF:XʌȈ w^CR-u[͓u2 .| z\gL$an=t{,|lvHh=qc4_)LUŘBrч){Jf}z;xӶy{kXSfUW4&gbZcE!Z_t~O#Jb-y,ekˇv#˲QB2Kktg_ۯY %מj5drզU> dbcۅt>d|yI+M[H"E </Ld*Hr&( _nڲ㔚0T}фdg"]e5O Ȫ) +xj[̏ ?ɏMAR[|j'-6uȗWӘ>积50;ęGşW(᧰W[h'#֥]{|?ܿ~]UgH;tv"?ڢ8pqs_|'뼚ō_UO%@Q.;ǧ n~ԯ?~TY,{Ѯ%Ux47 ֹ;^[.8G^w7z '9ҧ |Vs6@  G+$ZΝ]KƦʸUW[R M^REQSk 8eMyᨴ'PΥdK8o-_e'oKB;t5~R%2 )2E'BF~}cwnpݡw:UoQI#CmKL&k >mT DI9s '7{IT+0[j%ega:.3Q1ޞ8mk5Jr_pQb߂ۑ3J>i#ʨ8| C Z1ϊ|4m֤6sԪu?Q 8K/EfAk;BYLФ5&+vd0-?EQ[_j kV琷TEPz?lw4+ʗt6{oaњ7̈b fŹfM{0 b 1/,Q)4U݃AR65ۃσ}|YA|ǡ>9gp]|B-S"$O+JM/s؍#Æ/lcTċ믩os?F+7ึ 8E`ipɸ$B{ELsi!IpF֠Z xzzn- F;*Onင8n[k·?S c+xx2Yg~h'c.:Oޟّ3W C~go2\M9s>WVm}GVa m*S,nWòEp %H ួdf'@1CԖө.{cj*9> za˖<ߎeW@Fe M@y]Y Tʾ NE(NNPR{ c Q$l0WD$)5BdԡZ(U~w Ͳ "`"3@<4E! ZP R/T_jTt#B3VFmri'f eάG1f0\-ozpF?,z ) (\2!J|ί.glZRx~5qGỹΖ=קOMAq AN/LB\ g2:3f~r;| jYu4RF;ǒBy0A3$r2%ː-ҿ ?:Dl |N tC?߆?~?ҳR.d ̯Y7Jq[] N9vn;yZmVEZ&$䅋hD}xغl\uAѩ;.p)Rq\֭VnMH :2}jͺ N"2Aѩ;.pySޝuU[EL vy59&5A'r|9[|}MROd[_5 ȓ?P0F8OqB'4 (S\̨~߯Nj8`\㷪2\\LtLno~lr;bhoFJnN.iU}(Ӈ2e Ʌ2B20}(Ӈ2Mjd{O%Ҕ·-/[|o<Ȳ%)7hAeJwJ%#j՛7TJR` 6JICEL5|uhPZ׸]e'; 8]}d!!7c{w}-/&._mPltx6,Ѻ8J).A5׼ɂ&mʓK(M}9M(R򩛍Al8OYĦ1W-'AG1Rp4"hfS3.+ 7 :g;wk;'ڗ4.*>F}ē}{x~yaUki+14I/(/TJ2i1.W=oZ@|.:Mo[NqO|9l^-@H Aqs 2ĆMU!Ԋ՞f֧}TFZɎZFH ዁{!ڨV3Pu,R^B<vEy޵D4-˽n`ކSbLv; O9[  j7o/1^¾H.Txo.:\rǪAϥ 1M蠭cjEc٣ORaM4FLDy2\T1CSRZAD6Pq\bpkQbp:%Pݔ52Zi h<,Z8¶I$._/hl6\\ EuS@ڑgn[#9SE (aH5 @Vd,!J8f(@hp%8Q[c 3q1Ց(_ĵy˒|eƈ֐'Lk 0F ֏q:kD?UaER*稵pԐ@ 4ZHRi*eP1̺+r`]R7KS#"skR!q1"EF c4I$8JKFru"Sz{sѯ [s]uY==K}oWl̻o/STz[~揾}퇛!#L_[J߿h:Xx<~1Oi7 |7c;=ͮd?oL&ڲE_^rݯWܥW;WVA5H&k8T]N+&wr%[cTk7e=]p$W+Aa_@{Ji'AachŮ"s&ջ]UgzYE=~J4? ?=aYN-bD6n9,%^#,ALh?)g1^J%x&.zGXৣIZ=$ $" "1lb@z$~Y+nsm>Ƴ8|>WSw!@Ώ=<ͯAuD2c }Ⱥy3 pmH=L"Fk5 ,F8nI7)Yg[-BQm)de\l7.5a; =B" _ /gAf ٵy=6oL7W t򕓄*ՅcTf/B9NWx|ί.gljK]j#)S',q"r;i˽9; kqZWbpD0fa g-$./QOt9Ѱ` IJb,ɜ =H~#ʇ:M`2%HH+kn8c"9WI~QWcf5@q47$9FW֗YYyUЁ(pĢ`YCʾmH1&\K3b[rAc {,`> `4B\B xɍF咒UJI^Ϗq'$L3ג `4-E0f"'As*[x*漺KLG#򹱏+%vZN/vV&^(- Za6iPERm D9c<1,L{b3Ģ5ȓɴH朅PymU=q.G[Mv|Ό݃Wcݢ:.ׂ-)ꄊ} >=DZD9{c0&ǻ!`*/X1,k Wx{_&}:ȸ|/F3GP`?f'pTrh};g1i5Ѧ9$v\N?9 sI'el\ec;g>aKW! ]2iXq0m;xDnxֹr{N_y~,G3.u&V* BUQ&* ne:FVg/4ZDL+M*NҜRܣv4vofUKzl-A *[t<WP)E `%=y{VXv _BxkyS3;?5Ke{vDXpb[z=YB0UrKRuTyQtI)6QJzp2jY7s4jрĺSutkpĪn[$hk *Ӈ,,YVNTVQ, ﮔ{Sgt Kb b®$FA<Rnn7rq2j@tC" ssë&e`c3+ KЙ}MUS{ԠZf]ceױb>1Đ$$AKRjT_.3I aRߚuK`MJ,cǾ!eq#OQ~cL՞<M0߮~?κ39~hHcw6 ?V^&`BÎ6poA=C=b16p̅۰lt1qm,d϶wO&=\?L)sdnêN\>F|ӗ2&ʘS14sְmT!ɚ #rzFmj1#s$̨=QEñPji Og_D9;"I[AJ9Ջ f"ue #dzaEʊj~:0Jρ idڴ1 ;_*_/i`u̦qmQסW#4[ a]\NLUt0[L~\ 翺Zt[4t]?zu hׄ1)k`id\ddRI֓.\!: &GTc!f7WG2Oߥ!˱_E`+0~/WjO;fclc$1(>/z,<ݻh& mrL;<ŨֶWZVҳh:޲r9! y"ZKsTSjcnhRvjgj$䍋h-ܒl{>X U>{ \˞J)\SD)"*KO?~[F 8ҝϧ!Mn!}+: 7S0 ~Q5t _v*nwd!mPq?,Λ/ \^8|Л GN6T/aNbuD0Kvi$]%UNi1tE$KePdϑ6NrGi" T[rgQ IAc3"bVvw.W9d-ĩ1^TLi($%D.os&UgX1}7H 7:VY0{_dL3A qH!^<爜/hI9]g%/_vp;.v^m{ǩЎzbᩂ<ZZB~NK&j+B絋? 6r/Y`t1_S3yAZZ!LrtpGY,t_s_p8y[jRR%/TyT` !"ΰKXxUY)+.iLf^b] 7v.nZay8 )VFgi6P0o g=H;ޮEy Tg E @)V^~Uz]Kޛ\ ,ԗJ/nMq_0'K;0_ҫDź#Du 1E5X%+fEg^P*h/Ex[gBЇrAy??@I׻sEx_jBВ*Ըv Irj[8$R`ٓ )!:u{bĕ<;3s,H ϛպrrxqYfЇ,NjVwιIA0Μ|A+FD,7mjMI`rI%H!v@Syc;lŻר)o*r=2n1&ɐw@7imN(fQ0NaE:p甲ܤ^Fc#1E g'D>}|*<}я<<3!l㇫A2k|^ǣ|B_|h2t -rsv~{pG L L1f/}KUAHW(Zft2ʺ$\}z/s1J[]Qp.TY/CҲk ' [24` rzCy{&\&Pd /_;ezx(qPrxu% !/-Ў^;x \5g#:l~X7Z`lf?lfA) A‚1/QRG 6caU15&.x+,'A4p,ħ+*C/YINS]<`o&ް"RJ[0(}} {MWk7f0J:pጦD5U`uYH7s_;]l2,`Lg(߰-AP6Ds9 8fG(7ݲ߰ Ȟjw7[pDCsXxnWRAV\k,DRQWna:R$#Τϱ\|DJYlBF B]#̔'.}쓫d1Pl3ՀuCV*h%ϧGGtM{I\v- $  O{m+o^y^~àI)"PtDfr1(JDOV̳@q>_RG~i6nuRӃ|Ies\ %ZsX˄TuBQEV:h[%['5 ƙe@`{P$CQ!*PY5ִ++cNy&{C(R쾿D_sT9'Lep<|)/LG/:klw9!]gs3lHo*ҹo$낄kl9Yo7Z$[hugwMKpIxHai2dһ$ (jziڎfCz8}6D0'UT$`$6LخAF *#VS,8OX9M{"@F֜$,G2>`A-E&QB.*\nFᲅ;ƻH<~;o "CΣ4;F;O@2tsIet?w8 p;dٻ7n%W]5 n^ ;yIGkG%5Rh [Tz̴fbH!BZ+ $IH괻)%kݒ-e"!Yg:Ƨ)츃 ?0AM&&x"/p_IH±>Ha1QBao1CJK! ڬ(XC !+(ƸsEg(bß@ܹ3Fv>0pDt$gLcN!:rZ}GDTa}$!~^BW|Ʈ@T伕J-P*)0`q>tr7j `Js|$ػ?Tm*aB4@:r'.ǩDZs4*01H2iY긘գ>`)}CQN!`@Bۡ Dewzb\ k%]2Vtw1rV,-crNƖƓnS\8v&*y']nN\IR J9MRkB\&ʙ KL) zPk :sPBk8A Ec !$ RN/e~e_kT NEA>t!v}aE g${*ƕ\>[Rব!ͦ1]Kl$[ʋlpC}zĉ{׃qP@ܕ{OM1 ݩ ͹fAW=5[ǗbgOdġMNlRFa]Eģ>TP ۄrWxQ9+>ɢ¯m8k?zz_N=pO'V՟$;9rxuYop+8&ФPqHRPKTJ1( ),H&)JO?w9WUt:rñ]= n{sn8Ώd"L)!:)đ:GpIx], |1ˡU^x:jmٰg6g5eח~@].ւ8p{ /#<}9Jp jok{0L-%T#guaPR$u`wV>/]\4K`go ?u{XxZ$fT>÷h#qhFi#ц(Y -)cD!LF"ԯ0R D"a,rh!YGK\1iTѡK~dgMb 8y2VO$۸iI$j&kdSǭїSM;oަb^PI)m]\lKwwsr bb:хS9a~xN5o=^ z>W۞*?*v/މ%GAЕYfz|3kq^E&6םU{8k<74d(v^ber" y"%SRt^'j7Q[,!GvuR[Tv;cֺ?EVg( J[:_&wI!Tq5tXzZ ByoP'#+AnƯtP˲hAd="p."M o/@%xYKܠmceZZ#FsYK>ufd}޺pu| *4=u=Un8QT b]sGmǺSm t['jwJa!;6Ihl||PxrV3(s C&vL[QW^b!һ@Qknҟe0iO>ۛt{11" _?:1`4S׿DG][F <%ӵo)m!. yղnKbj{|7GJ8N_ 25"v#_ʩ3x30gI:6^2Te /xn?e8!֬'sx3g%D$Y4Xt9zľp^iǙ6Xs4/ [\icYH?JDUHR!Je ɇr%Z J3J&`qBNR)O,S;*X꿋Yf)7k#3d ZdAB"%l0g4Ů=%9!.=dĽ0׺WgjA 8hff4g@ 2`/D` sg +#{:'͜WƩ~tG}JEeP)Hho^뫧z8pxsx,HdΖdq]yK Xi1nslhSW:s7_]gF^3hl68GPrJ .#-qiu{QFie;cڪX~;תʑ>~+sS#kCX0:X;El[b 1N?bp\r̆V/0sϗoh,|=??5)ƩuL{qYz^OE,!=wq&KwMfkdlmƾ>&RpdE"qEltT8,bbY^[{ݾˏ`U~ʟ,&ȐM"In&f},޼~ӿ0ѿN^:50[4#o5wUo?%mS_mgE/_y^?ju9-|Gw@{~HKG~lpW`~úow26Ǐj8k'Rd>]|Hg襾{=Ne!u4,uq'f}[x5t,2fvz;})xۏ(R܌g߬@>D᪫Ն ߿Xmj*7]{{>Y;\ثb7i.Ox ě޿n_QaA^{GѻtJ74LyI&o#/(Zr0zUL?%W͗ak~㒅>77IEqԈk;5[ڀc*q=u"envHxDӮІy݆Ezfy][o#7+_,6YqS9ߥO>`@~p`X*Zߟߚ0e:;XB9TUy3}=h6t3>EU^F;.bN#ޒ[.uC=zp P(A kֳ c=vUEŠE֯E9 R7~z5YO o|YwM;>w"3Y"xۂ=x۵J ] ;W?tӎO%ӧ &ϒ2-Y"EP{%bֺXMI"2_V@irksQ!+W^sQxB `Q`e \i M҉Yfs3BJU 9H\$/@( D0@6{kq\SS\X^0 >T>A7tKAq| ؘqs+)tR#1Q)}Ք;u`fƘ&5j.Pt'r{<(?aSyU 1FŚվ&'Mx9VJ0w#gF*L#z0U~6W( q67Ff1ׅ͚b0 2a^zqQzE/MP lxWHGHJY-~-N58}RQf6(kC vmdnW!*cJL oC1XjZ# ϗn=&9'6zFP8sl *t5M$(&e](e/ˆe#h{䘹ZWQ/[H'nsxDѮ(XXr@x\@ ]>\I&WO@$U^9$;$z]/J)?̼DYtCJ@5 O=&$N%z&*SoʜN]KC-0HFLoԣJ[$ 7ggr Q@g"7Z` W!9&dp &7#ʦsW>Z `S='0)q. sD_ TzY7cvpxA@*;j6%ax.c E`zc0.BA:O6zyOɄ5Ppv[!s!*&@|~b04D2E)[5H& INCV~l.4_B]⯑1$v= )ŒwD{~ =76[ẠD \6^U=)85\ 3;% 󥘏p&6Vs҂/;?yIRzFprwBlyl8L2S@llz:Die| hL3" v'Z,mq-}͢Q)$[iA.cLl}FųeH'(J.k^\ba{G O#ӫJf=W2 y8aэ0J2VqKa/kNZ-;(Ƅ.LA;ǚb[;4qV0 =:ϵNffNj8qk0ȏ;۵ے!)ݪ0mJe|Ow%D0+cB¶2*X?ʵJK (qŔ1.$ʅ)L!Z=kՂ`o6>p 莅EW"y*$TsTB#A [rZOLSPH-(aDW }Onh )5hTk'(入Pv32EL$cY3R#โhJ:Nȭ湥'$aSSpAP9- EN&/I$ -$HE QMRRhb3,ɝŨ$IN_ ڽ&Wȸ1 5jȅۡfay)'ni-@!SR eEȞ(q'K֐t+]7{g/ Uj,пV21 h h rRv•;3ƹ2XK r[/Ii1dB~x}ShU6לT[%~X .{ b_B+ݭցa55CƙWWpR cH}b+U7nJc;bUqޢN޲OrB2fn*d 9@{~V's_1fv!L|6=/s_~d;cyN+O="ZȔz÷lDqI\me]n:B@Z̞T[G3Pк!|̩j&"ҭ -FvFqhk-'(ݚ!|Fͩʵz[l5{M^Gs3~(OᗳH YsDt#&@D"\bµB x)9#ܸM%zW "K ֲ]E-Di/׃o~BT0qv**K9df(s}x5㥁 t$ P&LE h. >;]b89TO8VzYhPC@9ɈBϋu}2ReSgB!wpw} UNQ=DvX*}R{|yFѐ!-Vh貄WۑQKW0f7"CX\IGx[8>!ӫ7k,h-&uWY#z,it.ka':#,+HJCLh`f8@5\EJq,Нq~cMFW/.RT .kqK!&z6\ꘈP?!h;ʃ\)pN ~ ;s]ժɹ،$v&}xr#{ڸR b NROP"<:OE;G8 d2{vr=ޮ]GZT4Sʺ=6]U!$OzGLjL J^ÆEj[@.ZC%GoUMK@v8nS $}t0Mo8|ٚp՛|f4,,~:yl;fdGM ;47#Y=5ĕot#'g^L:):I/68I||0(T hӏYzm[W惗7͇YC(92V3 4GXLEn9Z@t -CR $pM y_~aeAlC7e@7qzQ? VOBP `Bvdmexk!E!0hYH I\bKshņJ B"Ǯ݆A H8<LI&GgҪLC*;qe#.2e&¬˧o8v-6FŒJgRqI-Fxm~îpT9O08>?m ~W[OAY8I=A0qb}QԑO{GdTbXc̊Bh3XI@9A #8"JMK5I4P8_,r)N1)z߾lc- j4NXh{SISsGE-CYuKLi+cѿ:x SIwzy .cDX AԜVTMVSɥZs[rDDRXN%r'2",w2acs-YUuu(E˸f^b][oǒ+_fs"@v Eiyq]o7q=3j-{[WWx 1UE; NVXER醍֗m^* N,^P!Y_ $2M-+aU!ڹ1>|n\aΰso>]9^"Ӎ2^zq '~ͫpEyZ͈ ,̀y+1*pRw=G6Fe\I5&¼8vOSM/ys YN^S"xJs'i\]$al,uo('sNj:F5Aʍ{:OHYYQܦԋ yZ)Ѵ~Ӎ1?sK\ aȒU{P`M^>iSF1iSFbx!HF$P$ A!K$xDN,%q ֔ڛ/?%ʮJ)0UH8?Yb(Yb娘gYoK%G22)~,$\ A#US_ߩ/?a $oW0RK猪_ PԻUVyLhTpRK8e!p0%F\gV2Gɤr[s \k8&eRajӈ2!-0k&T챦 dЎXe<=Uޡ$bj9$ Xvǯ9bD+^n#*aV gDo" &A"clg9 c$.Gn'e_C޼_x`* <7>AXǟ_A65I-'pkqAC*)~gb2wnר_N|ྐྵEb=onk%*p @oPNaxzR&YRXUH",`v(st0Gxt0GA1F5gHG[{pN\Fp,›@ 5B(0DHOꋒ"=//?nG`M1& ;l0f 2ɵtx%E6LXk {g? lLFLe C͇0Y@NeALsHF\fvV>)YMl%_urt&cBc%XZgnSD5J_nXRaהD&{?._%X)Z G1XXŲX6|^ 'K2_0I=*)TsbT03N4x`MhN!)z"%2BpFAL!+5|@t7Eq2)|`d:8f#FA>5Q cE +|fHRBV$"t" ؝e.ͤ5>o)X04:|,>ï~M ~uAei<%PUs L~VQ1*FC4SwIEw|^fz.Vjo<_}oɫKY⺫xpRC,(ꠁX\S= ;9v?IJr kmHX Bc,3AqtPX=:FM>F*.虬2ń,E+r:THD gJŪ'UȞmZ*k#R TުA՘PD3 _\UR TSQP N:I2J'NP\%R TvuOPIkREI@k֚9C+.BN o'?잴H:5'^5 9'$'[ B"b&-}zu {4?U L<>|lti\6)zr4EWOn(1!=/Mӭ>E5[6fd8hǢr<ojF破zY\ie WBU"AHauIxU֦x{5E^ܟ$Q]~U뮹_B_NB&vu2Jk=˪uԍn%,hC1 WF9$yu -*< +o^ZE h%Lz3R+V8/J]S>͌ =x͸RuZxcRi#"_;żn6^}]rպ˺(eE<4ppNJu"E"\ $8e`>:;,<IoK[PG&$sZBWV9 Չ;cqwL\,hj}eWەc(il&̌'Ye}_>,iv ,76^6"(?*ѕ;QU}!6,NB.3hN߄W@}, mzj#U5\Hɂ)w 'Q|Ŕ=pl0`8 ! fvqWo6X~oll(ZcWu: \W>~`OӛtĂv ſ WV[ȚyX//P`{H,eD$XATi)$Z^nM =Ad;(Q*Fk\% C` 9(TRt{.RrJDUQZBںB1hM;bjT5.yT)-M}+)ZW6uȳ?}1R],M[&S~뽊9Ln: ~碗noz6f&Ͽ] /,'=;WW(sY4،ewvN\ębm%5q*)G%N+X2(gn+aLZ@(L`6P a. -x}x xa^7\ Hk%oTX^;nBI\'6}m%Ղq>0&^<$Go/bn@m^|7X>[AK26"FdW9[WkKh=W$>|BD4wnITSפY|_ \_nb/苺bCIG r%6l$ll,h|2yC@`K@ HeHqj%; ,dXdAt ؤ)c:F#Xލb}-f&&@1P{ka~*Or"&# _1I>O"joț !^Ť% 3ƒi-z~l L!O3uaCf m"[١ʜdO!wqAI7`ꁂJ`xWcOn?r 3 5Н*)e0K(89Y̔;v9z-T c00V`%  l9g Ψr Ⅱp ,\$w41@ sxu}rq__9ѐ`9CL&|9"뛷'ԻO+}Xq~ըYǨ AR֩P Te5T:@H^ ҝWOO(n'8 xy$SDZ-^ i4ôy}Le]nd <:̡m I6f DL4KƤaH8_Y#r %- g _޲gSYoЩ"ʒ(! ǜmqd:!DILr{ dW%Hn+Mʤkn#,%|^ғ㤶6ٙLi\h4ZRr&I d7nRqٴD\=j[*v/֯ ,Am2U,wZ*%U|KOVk>hZS%Y~ڭݔ:i6q&sT $RFHR@RhH*PnT.0L5jP]cCѕ6oB |rڪ-  z1SN&ȝ+YdyZZcjZ%Wj5 **#$qk9 xk|۪~!VNrT1Sq7tN EX߽6~Թ͝*f i+j.؍B!tJgi]HsFFxkTZՓeAZdH"]|mMm{ aMT|= RE]yhfp9sv:}ˢ n9.^|%a*yrq YE}M9SZI%Ny&JhBN[hRPrEUTRŌU1BOBfq؇eq&82ሕ F t}^Ma!~69[L}.yǘ!T$7{\cT2Qu\[Umu%:Re^Vr*lQ8R/p\KU@J#3JPoFSszKXC_C2? d_)r̒I$ :_btӽX\3(1Jϵָ?"Jk\BIR ‚:N&LJ,gk9l$Wʴ#@Jgk GӾW ytz<ob"ylt:fWu)P(]i; GkՆRW'Ro`8݉[aBq#h~ߥĉ3!)N.݊CGy lo1ǜ+-޷fn#&m*ϻwt; #ܙQ" t$ۤQmSi c^P%,|ё@J}u %HG5p.ثcwl zL ՌRԀQJi>(jagO1Ҙiq{u;8-(yw Bht0@N ! F*<&[]3]j6m]I&ԙ341VIJSὶ33ZXR5w4 2a䴦CMj-*0~w}%_1a!(2Ck"6h(\dZoSbd4V7^F5&HMS'ͼN w&WcL |(VoF&k:i,V*i\*X_WV }.rK-Q1Ъ1f(ڐ!۳O$b,=K#j$%-WQ [6$Gq4$#3^GK&@ ޽yZւF}U\ܰJΫ-FZ )$Z҂ |+z݉,k-}t_e_OɂF\R][q4`T9['G[lWKN4hBZu|`jtM!!EҢR!'(hݎYG0:c͟Jqn)y 萳6Rrlj Ӡw,2Gy:Y5FL0}zIgr6 GWXyl/G(ֹNJTaõ|CLio=$ذiq͆J(FfF+މz{luO")UXѡhjStCtбt3_C.?Fs ՏD j$VQ)Co*u*IߥNLu;y~}RE<'փr4DϱЌkGX*i &&˜_ G-Yݤ&%yfnaC?aړl0[Fuv=͞?>4v6 5Τ;; p^w)Ĥ6uԥN@eFrgM&ݙs՘V4;B$UQR - TJ9. $5�&B)E~8^5lØC)1: *hASm% nڪT+s .BeaH! R͛`LRK!]*l).ݸt)e{^ z'.s%RVy",ѻDSz2J"ᚁe:%R !$B;ޠ!5h]C8u?t 68)&9&)GFoz/g^7PR1m>u. o][kv9.Yswq\FY^69(d,V.8<±?_rY~>?q[N}a῾0i/Wgh-]?4}ǛPxϣ$#Dc/+7l\^6uAHzV_sڱwnMlQ=WptkA4}Gmz4uv=5nhֆsm`S K&u;jaMUzrQ ~jbkaދ3De[-l? "e -ҷWgUW^*-hkW<{q\0J7UdhBF u%*tgT4"'=hPq4!'3u* (Gi (@P0_Gp ˫ސ7voBH-p"}y&2ͦ,STLaTy1eLKQdۖswbU5I[}=$6=;Kry0EUc")&'ͱ~ʋ0VU(^&@>URK0gTRD k^ѬG:qG#+ e+j4W}y]ƭb͈dr i.Sg-cQ-Thi$tOҜZmdWJܗxYZ8O}@H:.a8$ ߹߫ZX#v (m!Z*%^T+b{>*:Dw,u tㆃn}̏npzE3淪`pìfċ="S:&J dzhJӂzU\B`[᠕GŸW!#fZ_bDZ~}r ,`9P/3y24?S@\,e %IV\?=I_Vc\B?[z-/RCǟا߿J.k̹j,#G?Ͼ+췧+FBvR5&$k~|=|)εFΌy1Z1)%~&vuJ ;Rj)BNg5#J IhuY*${ODB ך [eq4lYyGBG쑠 2s4z$8RWH V?_7kB*UKB$N*Y Z8ʮe[i_fK~wS~ z1Q\!n>LunnǒWq[N)KWF&\ LܷVſ>|}Lv߾هt}l&dznOOd Sѵ1t7/M2{y2O8cLa/?+7|ڀ Is1*ae[nk张W eMyfnd-*rV!ʫY.|o2҅- s6uwXbys1uSl g=Yf1m>F79{{ on ;D͛\r3 Sajf ɞO~U]djvБ\h\ϊ1T'9vtn>`8 ;Zmڠ$]5QǴv\Qf"_9 `ޕFr#` ~ۋx~م@23V7X#:,*!Z`0Ȉ/}rF`DUyoS FYt1GpHkA^Զ~b rm"!g7˯7?w\|QGA٥6ǩo7qY;h޳V 4fDHW%hhx$/kpX)+>qGZ&?Z⒝s0$y7aD<U59l7^a#2!_u7K u%U)v% =/VK!Ľ<Ļ?}CF]!#>2bb$C%8_N6 ԰G4[];dW 5 pSi=SwЖUƩ (:uBMG] ?qKGp\&8Ro)Q2XG  )Se) *R&襣XY3ݺv#6KU/'+CN4SAPA@A&< ߓ^J-C{!z7 [+ͺeV<] %-'AX@T ƠqrI_??'w:("tn#jS9lp[y[m׈C ^> U,\ 2n#vaF{_Ё]{QGK"b`YV tQH]j^ܧ<͟Ϟ(WdȚe;8| nMwaSuNΑ`aߑbGZ^ePk؜^yh]7Zz zY<] .6 W4NMj_9c!lNgO&-&$t"EamMSLFJIT&z Z3b+l@؄@ CC4r}AMD ѡ XSEQ;И56 QTFa@ W5|:j=6,V񭺒nТ0k~tD`YC0䟾Ě[ϝ54h15rʛuNgWt>k_jЇsD噾;I9bZ*;6[wME\ևUݿP=݈.-)'/#k.ثOWO-zAF(۝nhA@Y (Q h4ehO]ؽSOQlEC x)gk|zt~ ]4kψIewo}Cz7߾khPeVydldi:u%&?m? w[ߕo0SOASb㥶q㜬F3@f7l43m4A[ѹdK˻ tƔ~81]?t:&= NUΔސ:^jn?b?~kj8>q$@J1%V1~{q/7]{NmgcJ*7s' 9q-)gnNYxR rL]tIZ֩jڻn}XȉhMV\7ЏR~zR rL]tI֩n!ڻez>,MȦgnF9xR rL]tVdhVޟ6ӻa!'nG)EJ3L߾DDX^?ED~N }+l;;TAӫb/o^h0m%IsRMI kh@;բ#J]Ez !:']_ g xX}<+W̛;Fl O+"eZ5PV%;#J}Dôii ޫ\06 j*F|QzOFv{Þl+LQ?7mnD?FvocM81"19Jh;XF uY.9f2"ЇVJ(ȠTS+Ef*E^ đأstXnHvN-C1L@8 iI-6vR%0d"R:%T?; Q܅H5ռ*GJQEZIQA#yj&Ej(\h5lY1f" -7ұbDoIz/D뼩\H Cv$v4:X&)9Jvsl$N(l>OrlD48xdH~ZkȦǏ!oGxݐ *V\2j;ngB:yדMpVYz,L\rWԞ~@ka~n< hh7ْSku)Xܕ"4hxi5\$%(h_v@.' E$:tX[/4FcwbLbb^!T]`K0dij2PkX"U0PUbyWK9@2sě0gAϣtWVj9RCs XV{Ηܨfhme."Ohkk6i~slY($s5kq Rz7*AA (H@4hAA ?NaǗ:Ɉ Զ[K8J^^R$06R3\!YFյkRNĥXY66V"m3<33q:ZLY %@ff'\j ~Q2Y33ϱBHGQf>Ô%8!ֈ48 #2u'*M 0rn 8VTXyl rJ=`؃hlFGki>8kևN[dѿLXB!BR8պi9H=5KVo?l?H-j$jПt+'./0$ho &RU*JW킉6wQ mHJ4:'jPut*N Z Ufs{iKV(|Oϕh#!)Db 5ǠVT9މ#V.an﫧V I*f.3o<3u!֍5F:/Y Χ/R '򇝩|еjJ_TtPC:ijD?7|R o~}2/i𧿺w t"zImt}&Pժ>]UcMxJE"(z>P6j@k4U^W?1 7 p]7D)&L.kl۷{Y ɻVEs'}^o.` }9/_]br[Ezj/ڿ_7?·{|plk6F~՟AHDOd&670~(9ŁEAYnQ,pV̙TYVЌBKWUt|._/Cl`}E UrQT5hr6TLKm`!;.}QQ]IY'Ynɕw kkR.zHMK)HQhWVkr~uђKpQBYV-^*ԕ) + xn7@\]jv/ZoJ=b#GApTL$^jIgS2A,}wVcNFYR8ZDKI(dtI$p*+&o+ŮM\n OьK2jlheZ1@%B04Qæ{"@bivy[))=򎅃 Tu7i6M($O\:fp ܷ=3P&U"VMS9fW+Ħ y.-'j ^ t][o9+^{ve~1E `2 nvwF [HrvS%jfd+d,~U,Yŏ LL()c,cdIf3j7 )*`]%e)3Bu`]l߬皲\aMWI=z[/"6Bj4.%/err20 3uf2ˆ6xD!UgRT7Gˌ2.ƣ uj~`H+vp 愇mNI%+<Ć+DzBS7' Bn3nZ즉ChqEfݔYP`.9ĜAsﱬ"kSɨCQz s]z\wA{B gy} [NTu nq~+~g?jYQptQ4>5:r 4iVFn>OgG8 Q5T=ML}Ut ,9nPiT i Y"sZ,hX*6,n)&nLJ6޵@ێZGIZp<g<[h*2cKOO(P ݇bY0&-vENB5pV@ X}ԃiJ(Bu7x=?:Ffnw;   @ A#kA5WBHt5~trJ-rbǛXsE@{y\[|}z7nPozH.(5dBI+:b8K!8O@Ǯ/5~*Hɹ`sӝ+xR0T[zODŷqFSn+XG/';n.+Y_Y-ÏSx;QrjٔFuDO8rz}/:hC5S44$K5e#PDNۈ`甴-Iɮ*]̢ _"=E Hq?WU$NI2(BK7G)Ɩjr7]%r;rGI4o3MFhPO㋧U]º6 m.˓9 ,Yb8FZa1uc1otjyRs[HZ/5:"Ao .8t %/ ^6hT,/`~?/O {<+ - nSWIřJD4BOeb>V-~ *#]'F|7&atOK%Ps7[ Zvx㰟= "Dgd< oq(.̖:q9F 3FtQsa\Z Rh4kz^;EmkB(K0+xlf^ m rJ1&`5v;JM)W.#*h!%eCd$ar8G*HCw|@(w|Ɲ^OUX / ͒A"ڳYL}ݼrȘjOn*kR1`%qRNBj8DјY"sF닗̟;z]#a5VK޹xXIC5i%ƚ0<,m5Rw$A*PK*PS}gkS=o6`&&[F V{Y].ZiA0뇎)ޓ9Cu7EwMvǵ=yFmV-$ : 1zcӯ76zcӯ7nuDӌ'Es)E"dBcD3b2Sh><Ҙ $R"#!p f^hM|{rne}tk4 ns]cwۉm<܏_8aTK8ϭx]Tpn =>4I3%L2FXG##*F#a-()e)g>}s95O"5jg*'`*]U])) d8eeqL,IŌXFRFa(Q(K25,dt2,}A!i{4/e'8w>8}*JcpXS(m$R 8M$D)3`BqB$,ȈXdZY3xSxR).[$/#~/IV!MlLQ{)@CZ<[Q3BzȔ^:|8ufΆ˸꩚.!!yB'.IM"1Q) R ߗJM7ڨ,rbC(G UL_I?Oe]GK8zEI[b;Eouԡ q EX 9H9/PDʕm1#7mVE/B:*OGRM+VVЖYN>C3۝Br׷bB$m#20'MC6 O[R// [I2'_kop˫D !]698}&7R(W]j%W% S[4UkFDLpa 2N&Br3Eq3g-r Ę^cS.;#Ƽ*6 9dxLp<<)eQWql>y= a qqw 7ht ,) !F Sd[UT(1 kQv)B(Ǭ6Tϫ :VQc~ɪV{ϛt8?#0%Ow̍JJqS.s#bLU%2It<*>L,[3548RB0Psch=>DQo+HkoҽmVJNҧl%+:X(21yd3|08 5Đ3f )R8ԘHdb1Gq"i0T|4mDgH!vX:as\T#H!x"QA fw-BjyEɔq ?QYHdIl##ɍ:#X FE3dqsx+, f%0[UR=XzZ,8Yz2]|Y9%~B5?Xc9?azEw QĽo~~r@no1O#.wnl;5b?fL6P̦ͷGwc3=+Dq~#Le/XO7!-h(e\ ߣg 8|W @FKp35aH:(6Dr8"*g]۾n'~QIe P @ `S,p"e@E8/臼Շ)PaAvA~ ܾ]Bo,g:" !xh""L $"nXARsI@oaiBB!"u Ylbg2deA+Dc9R!EZ`X/`Mud2K9i0 jGmȒafX9š"SJuƹ`ڢ ,C _}G1>o8tEk)Go4Zf"u_NOW_'sg4)^XJJr†Ws5/CDڽv킮t2 |zZuI Z`Bu_.@zPBs.zTQ4 b J5cҵӻtxs-ҵ jF5J׺Sd[kUT(L TXtK5\-{XP=`Bnd]|=Xc~%|3&}z% GaEwjuʍ0! `4^L$}z)lF%?FЂ. 4MHD $Q5J"r7 a$q 1%XIYh̽$@j(&ijVL گEkI@gt:C&KUHH')qb ᘤJ$&2U&bDh]bǓP";(.F(EpQ(:FH42ۈH(⌤qXX+ѯ&,&0"#80GUf0}EZp>1ʑ%R*#RD"aX``w[,Vf@8J{|BpjIAdAL$%DWSe/LHr{1 s@w|^[r!^`?"J% =!.I[^ ^H䋗6U?il,ݒNɥ(i >C4A69(EƛϽx+kiBC,:o>TResC,Pͻ׏ 3뜢^:oFR`FjDAwC@TWZ55 k"4L Kj"TZo5qJFɎGUzO:%9spp06+Fn+P~sJL!P.+D;=\Y}cE._9߀2=nk)( *PYYNJC" 0/Cڻ>;HB'b@⫫Y>{]mkJQʹFvv ݤ\vϮXg|1Gb~7v`R.|k*n^_f%-r%'B6.4 ڦ0Co< _*&XBYDA!dQ=C\j@Nby&s_fٍum|c;y9^ܛVƗWlZI4o3M -KN/SﮊNml.˓9-r՟N-Q"2Y^rV[]&>Xӎ^֓ma?̼U0f,䍛hM9^+x7[(>F֡Nשs@-`wkB޸~/{i?M}%94'4:k>*Cz&9 U=ScQ%N.: ?铛7nmJJD^iflOb}&W]s㸑WT۪Hƣ*ݤRwLen/ApFW#<[W߯Ae(5v6kK4uhcvBwWO*iBiV0rJx_i.l*y,1kh^{nE @䵹EG*k%/Dnte]8A‘+9q|bJNxB}wD\b)hC`ViDouDɞ`~/(M߷5=O=TjWCZ, O~}}J;QcLw-hFDöKz53\~7:%hRt wuB(= )Sd1SpqcLۂ,Wӆ1RQļ>-$61{bѼ >'`$3lc$Q- UzI)e/@l;u!;@O^Ÿ^Ÿz16op󻩶lf3cBdnrc"0dqZ+ [ɬg|*Q@ac+ca;Ϣa` hO'+@*j/ & +]L^]8b)zuqL%{owQj|A缬\du~cƯl06uj &-Nn ȩN)dz B͜(2<'S&|Hha ~1>'r[,XOc9+NRֆ|p?Ѐ9C9 0r]xe4x:()!$ǥ1*&DF(Ԃ"Adt>MM?P)G? F+1Y1? ްmd~ČH6;5ƈd9_}: 1F wbj٥71f̵车Ju妐u5)dpS!r*Xoxu+h˯Q/f ES DxD|zp٭'&m8 Zm6c][q#V +]/ +ڰPzL,l0l"d` F/ǚpP@zzEGy^}T= Jkz=eyކ砆7Ne{}\̳Зw#r3v5xm1oHVD9{SinBfq<'yGY, !SE};ܞ>n0 [HƲLKFUQ"L34O zĭz^LP z"ހQS<ĥ"+y8(!c 'elFDh"zR5z^)D+3 ^UDS-dk7FHa qpeOHIzBWJTG+CxY[vŦk Ooo_$*8 =a e7XjŞK%üBثvn%4zEw.AR6f>T( p#(n I:!JAKh堭7o}º;kj}Ew=A~ZR^tj1´jao SK5~Y R8OVrSi,KNwK(+aO" #43b!RYvnb\RK}Kys)uA# A鱀_x+ОL.Prcl Ǧ cUCl%S8qКD;1Qh xs  \x HP 4JvƵN@1/{{{{]elua}ad!&Sez| 8DKQe2 LtԁZ'~d+JRk%"5FT> B  2[e`dH9CK>3+Dnmq'=gRW X$xlF50Nn?:|oT3b8H/"hA 2S\DkbT)~p9X 4Z9Qxe$x% uBs\š w̰z#Q2)yL^AFfD!&\Τ&iIL*u/JƒHف@;E4K.VIn,Ŗꜣ0%FT}㾪o҃3JvuWM8\漮@C׉<`F+sfdghk ϜNKfW;{R6d=ɯ"8!6\gFƥ%1 ^S@h^!Ô-Q+Hơe$T=^QHR[@UQDɢAZtg^H Ea@;}A=_ά68+9$PoT#&(à x]o45Z9 _;#H2*&gnܛo{+xxqц`48yb<5ks\_qZ&(֦$&ž6;H{˘T4NLA;?E+L T&@&* 1#ą7tլ벛EWQjc& "Wׁ#bEid_LѤ 7#eƖǬYW #n">TRZD0}\mX =QTk*; ]15i4_gS< Ͻ0\S3N:o`.nCj"43N1tܺT -d@&%u؈N7)E̅mN9T I&cDQx)+xY!U}EaBxV:o>v?(MZ܆+Qf.\BOCAT-b^XRpدRc؅&mҊpcn?=)e\T+ںEn`Z2cTJA#/*VqFP/fA/~Lv^wVwiky.N#qGZj|qw'ΚEO43J 3ՐҖr-x(g3 }*gy{*ʀ<\Fkcj6hLЊ S{x/#-tm7ᇻ_o:#z?{fR 7@G5q794&ꐸd?#>v< WLX@lAް!?q"bj6@u͐^ ~s`0v.kW[`=[TCG- gC*BWGam͚3'/w- k?nd1{s(pI ݖHpČT#ymdQ[!s)Nѓm?,w3ggR&+qF陋 +G{}K&Yom-Z (C [HƲLKFUQE g25~NR@L3ȥ4W4‚U!3i gaR,|rL:p?U^Ew s0M:YjHE'0P%}+8LOt ND0AxmA|zؽnSt\&a0 ,+gPAy n+)elFDhw hT-LɁZH.ߡPp w'W sB~by)[8n9n)z@%+# &TPP, 6Vl=Pe ^F[sY ʔhWH  d9u-L>4JA.{('!99TA{rI6tgʝM/l@zJw?q<-'j߸Pdtu?Q<5P!z XZd$8rF2)p-'6O3jB%RKW ,#wXfl TޝI o7ߵ'r\?ޠrY0F.2uzWYƑ8|ivHUnp51Hfn&#X*y%ucQ|f{/[XMk_~uI {W&*DulPKzPPv-=4}M6[ oQt"a*|w;~Ne_/N>ե=[w}rtQչujKK) 8jhDrزh|@^zBϳB FP կKrJ<a5ncE;jU!#)=0==N{Z :u8JQ'lS 6ZBR:iK"vN0,=\Cw8wFf!(y lFldR8In0dF8Xڝ{J^9ng}-@z_ҪΦ:`h_boWTQ?%oj >c>H7#77'˵tWل +X^ssw|"pm89G?~w{nSXqBȓ]~(nk4yM DbDQX E?]H5FR$vJі2mz t*b$l$b-5'-iؾhDh#}#V`n%dШûQ ”/\rYqYy/e?x`mXՀ* ]׏e%Vqo5Ka2^Cb\ӆ>sڨ= qZ#ҜI35)劍DfTP1Ԑl3K74v1f!1(ogLj!' ˾/a5&^>\g\i!RM^_J0ČlKmCDñ'[sg'=Y=Zi4\>WOdtOG9"ristE%ט8G%T ^pϳ=2D75نjUpMa C2 -lyJDŽNs`)$ pC2pNKI䘸pȹb!pTgrT"$W'e*vtub1fϡ+_x;^QpඪWL!2Os+ n:W nz:CC?]Mtod z8&$&o4 NQ B j-#IoYψgû2p_i9w`;ia+ dyWF1,M6G2k61rB捓jF:z%4 q&`+l"BI͋y]@µx햷72n|&]H+jG\1KÙ7H㘂pX 15:vI)]SXJ,B Q. Q#mXXKq48 )OH.A V;hliVo"eWUvd~V@҂Fo?&] [ʃſN,V̩N7ӎjOiP7JXQt7aQb9ɓ\RHe3puPU^ouy:?5[J4Z-ޕćD=²U@#'̆q2b Z(+s; 7LTɽ1>8RBJr]I%#YZB*\XLVzs&@PA:.p@=)e=|CXЅ|"Z$SJ~}| ڭ)v;=r`ڭxڭ EtRJvrpM*}%SPa}Ì9X~é <,a ǖ! <ʘQK9xf 3j!P]ZtrcYO$V :XRyfMPx*7]xa%/􄝦zYWM 25Y>X8%RW3,Fz+"Sc8.HS#;,%i?\+ Aٷvc: OUdbLS=zEi.RUé"geքY8dxs!Ms8xf,DZ)Ȁ] pOSnX?[> (AGgC;1O`n_5#8)ҝ 3)˄ W[t \Ҟ5(_yqt_[Wj|Ckx_` Ws 4t, -TJu;%Tn.`v>(hN1s?'KI ä пO~|Y}Ot.x~s4&Y4}t!w͟LL2K3efʬ>S-`rK x"4JX2̣yo"C|k sm߭s?MJ;:#bqIX^~ Q3V۰7F,8/R;KK֥X`F<'Zxg$L uQMGЊTrC :"A7>/Xp2-ťIqi{K1Wq)ųٰDtoI㹻o?Vd$mtk/N>ե=[w}d&ٺcYǃ f{i9-4 gF.nαn~:Lc6Ga0E07\*r! #g&z-i08E53rշOc uE=ـ "G`)Es[c"- FhY :j+UEq٤ch, JF {G  (MJ)E5 j@*ĭ)qlH턌 &/#JfHõNQ/-hjSE䅸iznJp娖#%wLy̖sdm`Φ^kb߭Nک=xB7pA*(F?|7P}%KADL*GooNƝ/k>twu66؃PL:~߹2b(߹YCkx>=d˓O߃N5V+Aj.4 ,[L*T۳ !:I 5@T?HԞ8LbrZlp֨J!yo r8x≦ ߝL'Bod,RU??d4SXR=I1aSa2FU451H-BD"@8,')Ib,)QsvY`]eb%6 ]tsoؠeQ;D'^Dw8 b=;Sm~w=\|ߟqڽ^'&EbDzvan~12/q777_Wfǘ!'͇^FQCS{dshpn '^owpha$mp+<.kd3yw:fO'?]}^0ZK,V8Pz "JgmbsfǕks撰ŵyRw ^C*;7ד딸X6w!a,?[ t1Xo=)}3{h%_Z^Vxmє˾9e۟xtQG{ HFdL{mdGfaJ,ͼ{|L{|hn wm ܛv#r^C ` z9@"83 %r0?v$G@2$b%aV$,aTFŤb@(*AAXyS1R;mxV6jQ=""Ln5pǍ؀%Asrc1W3SlĨ!'XD#I_ns8]j ,bo8m__j+KE1߯zH#q(L ɄA`[p晧LD`(@8c*(`(j !H &0 P3Tg-^QӋںv_}rqֽp2f[k)KZNWXW׫EZ*b@ W"ᨚP8CP:&qy|BR TwiRmsPB,ZDĴ AS&C-ƛddnCYdIr@q$}#kh dǾUoBw{+)P!6cG71ˤ"6I2&]h%g)IYhujjQmnl.̤!941I$DOurxӒu/PɰL<㴄"&$}ŝ$Ĕ]>{ 35: Yp1,zdBA pd}Qt?Ԗ0B@I0-|@ qE) M.tIJ돚s9؈íӜɣ{2fH=Ks;GMd,Ks]ck2fƅӔ6|{Xc֜uua<+>E7 ~?G&h!\ #AǕ"`* |VmPyIǒ'[–s" ]!2AOѭ655„} N43` ¸L[pB53Ǖrcա'ɬpO.D38 ]S͐a_3$՜Q#y˗BEJ?xe"Jf=sK?}{@K6x&٘/vgPR!.o);w#%r_mUɠUFT+/U@ߒċLe 2z@"§$;ݘ3z^O,v Zґ[>}sS@fbml (+Ȓ)%7٦ߩT1c4sW3lO9#Jh?Υ@ն`%;cع'1F57S+>T6G suKRt/\nj @sG-M82iؔZrWڠ{E`$Ɔ{x¥ ;zBH2Pd_ACsa*AiOu%p<9QYH(I]9jrSr3”ĉj(@͍=>g(㨮A#, }L.ab !xaBy t{DNmE7Ʃ6&r(:΁D#ZEYOA tHR{Na4!bM0{ij5w<F)U1&MFd# YC1Phlq8J= (ܯ˗Yl=V8 X'͌yZFOւNG%ZNZ^>@[N4EgZ(ƅυ)gG;L"FybP!= (jè\g2% AmFU1S+DjNi%20)(s]$w('*)+5 ܡP-zk`1h;hD;ɡ#dӘ!b 4:y.*bAqt8{OxFϸڍPR D.`rY,g$F)$o!J鞨uVPiѡcj9wZ E`6gAnrhvB}}>+N{S4J< b1E9"Xo[gkUBP@*IhL$GIHLT0Ѩ=EG$8tX)} -j i mB0;en$9$Š)8L&-l |ʖA+tXTғkK?&] ލn)'[x'kP?vZ RŐ%9-atY+37e2xSIr1 -:^ %d6ϛFLҠDtj)w&-r./U1X}هqYtlֺC7/ׯ^,N\;RB9W$)nfx r@?yn(V{q 耼;؝wΛ.θaֺ&Na; Z̈8R%9{g_㟯a_y 뫫n!+*7n:I)I߭?OjvlW 5ܼoC|M(P{ X |_Kz n,m{hN 0n8`UYA"{{b 4!4U{nhsG~mK(QD:og.xt!rB4vxL)\ٱFCJ2JP!R @eё$y@"h -j9Ά%6#WcXk6y ȑZY6Aqs~Б -)oJiwէU{o k[ޕhZv3+!TG#{V69Jn5`w2ӊ=8fsԹgZ12 Q[4ĉ(7}BiԆpdb>Iꝼ)  d{:? GowPy8K!nofiyiڞ/GާBgqs6gskx_^tHWy*b\vfm{A3 7=X 9|yجW%My8nJv%ȇﻹ9f=U)&4E:%V۱n\^кrFuJźS2ۺnCh\E tJۓY8yf|rNJiS[oW:Sroі]?V1OY@,?mجl}p؛ŇOC:X$zM?m̅ȯ_G2.xh߁_L*Z] B'P.߸ /o |خxW:W\k7m.qx"w.Vqye//n~%%=̆)3ӟo,4jFh>I%yn\rf'yy}_jTr]Yu.iy3t҅.0ʥ$ؽ]bz|&$XQʠG67=Wdt3=/ۈa3NF @Cov#bA-ϔ=BNhQᦻAFuܹ3{IDDl&܈?;57}i>{-\a=K:eJz#Jq9W?~n>-rT*`'u9gl`8;j`iBt C)kjY0C{v6 d=gXř[[u[][ȢHUik}>Jn͈.\q~Tswo~'1@-jk(_kUuŮMmCȬ {~sxsL(\w;s>l[ga8c$APhtdNDk)s.PT MHr>*_{5{U6+}Ы٦L'QrFq E(~xo UM\MVCwMmWZqA9ލPU"@,˃T:HЖrj děDy+ն"$S4k& -!F8M!L[pB򤌶zn K=Vu=xAjr" V.@㏌S&y(b+zb{}}[Z\2u^5j.,6!]QjҡDQS,,bZɅV' $IHW׫EZjaI6#j"f)L0JB0C@㑍ü ;v~ &(J=XVqWZ&YTNكBc3~Y3]*RTxٻn$W?bHt dO30xִ-{%9cYm}t(>)~Ud]H~\]j+ Hf8Nncfn~s\0a<;Z1ilE!̾H{I@'=]GݩҤV> {:R{t.]VVw9kug fHQ6:AxQMV"(hT]ϰΰM=FK?M D?z?2̣-Tu~ N:+&Wӯ\%?aVݓud4Ѻy]yf=(X`ǝ6DrA]֮πOIn $4m9)SʎIVClP+6ljx$0%,Edh3x0/WJ͸CC.EZE:,UAWH2Cщq (^"zr#r@yY% I`3hp3fR2: +(;m(zQyM^hμCfVfO0?_T͈Ԩ/NVؖp+T<51Zw٢8tC9@p(QrN,p/c%{V@UfO/3Ljvhh/XcUsWϿF~^ݱԱfaׂs厉ub8A.ӆ @U勓*0x/ǽԅٙ<O4ry XZjtUi5}n'~f zkZ|G+O8PuBlfNv}6LyfYjMr6#-!GZX~CҶx;^~"kq.s6=sնE^0:0ϝj}B<_9ll L-HQ=CA]؈l|:3<=T=}jͿ?]yKΫXtňIwԿx<;\'% ''MI #d] :WZc*2{l,]: t.mSPEԜ6Ɓim`^*͖#B*RJi"(Re+-M e"T|BG 3 $dh(~19a (~0SD4X Kڗջ&Mo;nh58IņM CTf^*_pfewr:JWG-ބtf1~6|龏]*O^#-;;W-9S Q4t;T hf/7v\Gz!hlb+su=t;:tk^)q-VU]n&[9p^[Le5ݥ4,&@1A'^4&&e-b.D0J2)X)CJEi=Dedq\vcFw=O|ܕ>ECIfd5E$SI3ZQ)X$_86;* o PK>$ڃ)EoTc3 (SlxPUIW|ZkRJ5LkOKX[j4QJ()Xt hóXk2PV%$a{3иe7'R;0HMP5%]jTT[1vF}]^]}ی&r<<{` .&ޯg0q}ޮ"~_"I RpiˎŋiXOXkk(tvN{\QRѣ+7fȽ(ƪh_U1 +T1N=<};$GPzuni;JܬU8U1RcD MViK -S(3%Dt0H"+8Am>HacKH$oqIuiĽX;ViۛUc[2Zı !{,67.,KX4K*} _Uv$u^S#P Փ?~g{_tZQy. wPl&+QMw=~q庲sBxdֈؠAEqI?'rN?-(Uʄ P+h_7)_HTd㚙O8u,gɓl-Oڇ7d9$.yckƴ6KOqTB2\|j r \E=čwG<^ o[R]~||RE8>qo?ud~ccm?cOn|k?q;S576򙭮\]0n4<,gw$ ax3}s}Hzyu"֓8뙑q>Af4')NwS RzOw->,/ҾB YLg!OxBLX$YD#@ eƺm̅<gTsRϜ󉑄=NoH鹗P=~Bwεc- IU^ȂܡS#T[I &D n(E2 \0EzNx$v8PxF$aQTM:(h'1fa@󜪄Q͹%vh.*o cYbepEK&jGA VUH1b$neӻwR⑺F),k7 rhPCi~ - z=6b .(!WʜsZ 0:OY$*) DwKbs/Gv>g>CR":2;QCIY:ǑBK m8WT o_ydRCح6fk+~(RA$T9![L9%58Ћ9`UP<|Nbr< S`ǜHDYY#a |W,u$چqTqۻK{;@)׸4W?\o<1 3j4Ss|yj=?ُFO͓#퇅6qVvzozuz!Qz ,oƿ3E6ZAuqpn!,TY/A"V J1m2)qRYJ$K@L:|$Ҹ o^\z yTҫ$T`A8X;m|HEqs;EKm>W ΁>sG)9xՈU6cc -]ZufCLjɴw77Ч:)h;ދ]f|5jbnvlhȪ`<M9p-./ƍ[IZ'Zp_[;黃V-7Z Kj˧Kd{]iVӕ iG6K8.Z6-vd~()03y4c.}r/ R$i0OZ}hT7zͯD%kknŗr/ƞTmͤrٗTݰ((9ΤiR  @B~Hl߹|G8!-PhV\pm$1H%& 8\t;i; 2>V/GͯC!(%:iU5h̵"e?mֈ>C!4?Ke>DKȸD,g2˱uCDy︖FI"{P3D>3 X;6r$.F<;~}ZOe([;mI2E0,r2"L rg(גi9=:i?5?&eW5xmŘOFT)Г6&MS˰~DM4J昬(کh;md3cw{^pܣK$yqWa5މξEӖߥL0cR}Ò#kK:"a?<}7O=Sֺl߮7d: dڃ6ysx)MlqUe }Ri,Q.ok^'VnZg~nfAC$0eg ȫ@!ϡܼ3"0ɟja{x⫫ɇAn?+mogڳet Lֆegprip7%,)Fm*gI)EI:\"UJT+\Bx:;tN޽vj{nRW{۞w%4T3ݻD].3C+Anx'%tw T w SjL[1ôh]h(,}d,ЂhMTTw9.뉅3B8_nje(w Gd"g!l.psn0PYqv.{h!0ruPVT mjpv![dKZX#9)WKhay<'N(sEbPpbhq {l`a1.a\[,ŢDZvn+zhNI UD[!hJ:uPf:HΖsIb}0.KG#=qlJA FLHϘ' B:QRB:[0 + /tGOzlm Etl9'"Ǯsmu)Nw}[^ˈlR>6d)gsf<)ΌX \qf*`p7q}Zv>,s❸lƺ1H Aʞ18W-JԟM$STyMZ0UpOswb~( V-hZhOmʲ>1ߍ>]F(gU]js;M1Uh6͞2nq"LMkK+H4 GgƼb~ļ8ʼnbR \ɵi_XW#v+b~B Zk{.ջ.A :CulW$rlT=Hv^z#Y e !W^Z'1*qV^kX$l،֮z"H%?V0!f$%?G#jdOxjD(3ۭ&H63/)"Ö,xhfJ)I@.U5qD_%ŴK(E JsvRqrn ^f (cr(XL Ч%vuF{OPVlM9c0sB34GV?JxbP!DBOph9 1aJ5|)EZ){ϱSٱ4 CW4B ୵uEl VM$XlT5yΝֱ7ם+34l׷ole6->ߙEbvçw7MǛQR0H#U& guDl^?]BJC_ZspG$PegF֬s;6S[Cч&)ln=ffb07!ҐY6;AlT-Qgބ[)w<>bĘK.?*D1yՏܿm^=O?Bұzw>b[t-ϭb.j|_M~8>5׏pe+9q>2BF4ܽ(qHJ6YJwz}Vzj+tt OKR6uχ!v=j,D3̫0-v Ҋ EM04L1xǍCa GJk* L#"5# QU]#3*d̆RS<էn-+[BYy*UJ)%UosZ|w_`޸c۾c)VjE&sLd9:q.Z_O߬7̓o͕[ +,ɉv#hnB#! \1ws`̡]=]'1U/oFSq| 'ŎP]կOTRKT z)Ǚ]fkꃍg{>_cHLfZԅ3􂪜) lXJj <IE^`JB rF%~=|^ p8܁F[< ]~Vv߭!lbvҽR m A`y A2]pD !f13P()$#3 ô9rŅs@4BBSJ\Zh}f4&U6.wؘ,"Wqʱ11 y{2q^6 ~0J \P<;1ia0ˢ=J*b"wvDEv(j6V~Np|еVIN:A,3.Bz zk B 9rkaJm -5r*gyq[+AK}fZጧ Mg9L3xչk 9/=ln5ynsn5ǹמrA=ᜄ\APMiٙX(ml`}ez$Q$qdxP;u ($0g0v sxID})|d ^b7,FvM֤VnBӞt5 t4g-&4a@$E:C(^+*$!`` tB`Tx%-+%b 5qyNPQ8jfG@;?mal?1vґKE2cCaL &S0i!60ɜBJI`scSE0nCFh'rёo?p,)' C6L45 Ha20Xta JpX`ԇq!$wh8Q, *@ H%&0yi@P&aI<__c!|ۻ/b;DlD̞FDd&0aeI&rFᶔ`b¬_Rr9#L|H: *a!r KjpN惤5xmg`tj;( CʄQvq{'\:Ywط!B"n0!QW)o~-?h6B/hHLK!`lJVF,z!yq+A&{?n_?UCf(ZiS+$Ϣ\/)MGx7*v3![[ bT3neA [{HBC\E,i/q>%ohа7#/1Z}q5lYQ;ܰ5.R4l u211{{<ܠtmVv렝w3Ƭ@`=JPSUBkp-ԈWlW_S|[53HQGtԙjLբ+ D7o1_H9[Der5s2'iwirO|l6w;N@vR=D~n{F>f!}t_٨WvqP}2Pd3m%.\t0YĨx GKkRfBʅFc=ԯ˯{sds@.O'ӊMa]jCd-y-?1U ݒ%7_wVFݒ( ]]$ѸNO8V@ q{>v"} eY]ӻ1H@Vg0C+rv=u cɹ{%!{-hz.0t+G*։|h[2dwpWⓠ[OiPz>숐u(U{4<ys"3g!YO`8.|2sr{w7nN]'fxΩBЌz\hsI ݮ& VFЪ6m][%mK?zrd].!TG91ŧ,z?/h< M sCI*ngb mFƇA(!ID $zIMdƢPed` b&;BBI9<͈ 9h>Yi#vyҜ !A4nUKk+K=t Hti0i snȖcRd)ēsthZua)`/Lc=Y-*Bfcek }P%FѪJF#&\ _*b~|Z3Z|֮?*Ksu&fǏջL׼hf7++hQ#ؼYt>6AI;$`z[MG49Sxc 6ElUvP5. Kʢtq^Wպ.y%b}$HeuJo[Iʃ+ۃMT#) qgʗURWyDY#B&KW*3ArJ $~&{|.wyC%>"~γB?gf@֤Cuɳ@=܎IDկ#Ew}ԨwQGN]8H;6!Y Htܕ5&}djJOE)<{j-n:کzh S3}}V +YBZU hL:W ۥd\S.1h=.m z 6V򒵱õb3\h!%K>i[kGxIX ԕjɴ8w̺KZʼn vDClSܩy> .jf!fN["]` #VT=Laݨ\Jھ|hA(M`)Lacnxa^yk|(տWiԾL.tWj= (*tnLA9d!퐡^@FJ3Q?d+D q>X !|/<-x5]M6,Cޅ-= T}<f7sb4txHwlzi(eON*5N(맂NJ{.w~_D+@cPI{osQ8os+9 (TQ'- 䖴%+Fy4z?BUB˝vqsmDNj?_LOY'Ƌ{k2u-үV/.-T@ӈʼn %{ 4/ڶ^_k jvfJ}&xRҋ"aIޑV`k{'b&`X, Rly)Nn2.C1k~ljbEqUBx | ЩB ͐43cʤ4 b1+{ƶҀL;kf?O*"yɓFVD:d!᳟%9nȡn)ʴYnϫ_ڽ gێ;!ۀSo78Q#2+M1BPUp"Ff: [7p:dIQ8|pLڂr:<06f5$`'b%MAARrǚrVi2_VWn(ŏi5fԻmo3Qʈs( X1MLDDb+^&Q"oF-5j]p]LzS7I]MOiŬ\NY6 > 7Fk՛zWco)$su44škpiM+Cks 75{v,Xk`Q8@@nP"LowUT:nhm܎Ad?ϋ%}6i0g{8ϲOO{Zs1P?Ldy:KxڹcQtz-  ׌#gbm0g Pʝ77b5UF)TQqPb*h̄ 21$<4cVJ.UJ ML +yfH39τ #/1bOReE0l| d5;9)&VEY;&Dnxˍ>uK:a?ZH{JO7FcqIF".p e,bVӂZUdAqMr>fj%gtA7S2r}O>q*KYM(AΤh&7a>%|/uaQ_%QGLPpr6o(ځ1}8@u y闣8N}R5|[(*ބs׏4;4}@d|tG3F++Bl7OeJ$SvC֍+ ohH~L:~IOQ_qyRF+"Hq`#J XMU#!^T_}N6 m"j棲pK<@LG(! jԩ2kb @k7cFԋ bfSӈ!7M KsaF@!c#`$$B(M""`յSOc41(aT9^bүlZ$<3EẻpF1J#Ы B`,ZɣT)Y*CUJ٨➬bk&< US܊(3ݨL2&6K~1'1 iE v f*:8(.D4QIS-`b(`* ,h$Xq?N|iM p vOJFYLU*ox mg1GCF,1c x0HF`.{=*y?10u/簋|(7+0W6^9 o6Ki[r{Ua"[ E;؂:|Wa2 gBo?LblSX6XDTrtAp6WWOf~37h,{ +,u\|k]S律r,]>,Fq>͓/0vӰ4|~}5wsf?'pw\+>ݝh\OO口ެd5c{ga'by1RMc{tݤ3ySt9!lʑи Њ{._8R`GŊ96)Zqv[WE/-([Fx1F #l8r0rb`0i`\s]q'lۯn-fsgA<uzz=!O,켡<M80j`6j&z l^W bKuѯ(bVuY7eu^(Q#D3;CM8(NbZbXzn :+q(OM*h?NTKT$C}R{^iѰeg)Z|LW_"+{3?"Dw+.&O>qѢo6E0NaG|z3yf]#b//k:J<;]pK307.d ;J!]> L $ $3B KjG $2WGޔri(kAHuư.+( $ӨYDESUYpek 戸 ]c T"rc6BpwReH!`@p4F,4j&ߨڣZWיlZj]mQ'P D*7yt|ys)s{?3SaF[ͻ%-yҁ#'w۰eINɭ؏~ûba2 C2BF d81geIiFQ y0$.;sfHFg0")E3,)Z/;%YgmV-zSgq]`,u?N[?Go0ȱ?WVUy{fֲrzXnMjqY66 |m\eu Yg ,]3YrC&F wF*5ޯ/;8hhWO =23<,a%QUeuⷓ[&`(QԊNOM.ۢ䡄`˚l )GEnFޕq"NiÀ?N2bwA|¨b,Q[KMfW5V#A"[d{VG9Qr^(*1r DI%_ʄu]V-p~e-Ѩj>ڻ6Y!}D"2Amh}Sٰ@N" ő2dnbOy.Z gomHA_ t5㘜5&+8e5M&'8GBKVfnЉ!=$C$! 9r/?O0K ouΡ UEgJqhT?B1R-П7]T*|b>Nl<;O X4?Z_Ed{%b+'hUoy.i*(. s\( GyY8[%/,GrJ꽅W)r_,,cx?O,3\.qѼȿa8;_?ҧFe AP3C16fX)%z( ˔4Gn5PƳVK`gYm =#>ϛG|:,]bJ(Yx+!KoxCN;AXNTgU3ϔwքhaz!7i;8C&Y ^z ~ϸOvg_] "CJPodȟΨ?#,r1!(QIgc䙰c! ;_0;̉ $h<\Ao2hZ#X8HǫtEiWN 8K7sP!f0BIٯE9.8>lWNk*6S\&u ta/&) nM_" i@r:̽-8ݍiz)^x|1䵧=^Woy pW##?^Dv$sE{I(nk%|Ԙz'low\y`m@?g& }=rSr*'jQ 85@Q Ƶv*_ťV\p#,CX y}[iPe(=ߖ׸cY/R)tEY'1m6qQ:jaXZŅKbȘo/Jize3nm3|:;{̃jͼAB@kK8b? \Qn)';i4q&uq0?ؑ;K>6I_+ ˈ-hјq (KM1)£OQv0_,lB~\9٢a:A\"Qу TʆP`Ua!:Zsxweʳ U]1kIFEre9&l t4\_})H 1"+-[$~u{oAHs`Q}S*FzȄ)-9,=QoJsнjp*)&y9gl#A?[GѩaTA}ŅmPU )1-oǠ}Mn+=JckdOch dk>aO|?xTXَ}v.+8CK"~E UC% }ԀZg`<:"FVKc[#歁6X#TJzi(JD`D34:Ђ)W?y }1xu*z6}(eIQaf+:5 E}8^}KIjU"u***ˣb/VbugݕߤZ@ FbI#CR쥨V(2 n;Jຩ8u3@ue=O1gkxUJ~:%Ld^ _SY46UmŦ4]pè0%w̨$P"Ψ4;Ui!YIŦ{jQH3@%+ @BP1jrIZm'S9 \gzi,@߇63;h򾆟fh⺵ӳ {HQJ6 FJGUo+Ci;ϼ7=nE7b K$RQ'"ɁLIM VJ-aez{|AșLȗa4,0^`0(f:G, C-9*)EJRδIEe.^l C:(K-rh,Do&@0I U^XDRgw , j.s@^qX0!%xyJ,*y eݥ<Ň( ("gw TJo׶PYj7M8qUng+!>2Q-zK { )=umDrKk'->q J9=܇AsD19pNS/MOB#9ˀ @C/d!ǥ6:l֤6YNG2h}fՈO&%:lhΡ )a6 BAPtƍSSA3&Ͼ;{4^5q7EAWcj%& '^OyWr2`1u[_uVT -UGtv&Zy0pl,Q#\R&?ޭt Pl0~lb/5RrL lyqG 2 īq←>,&]&d,h,Т4oL*7]yilI5$NkBhh,!P aV3!8 Z#CFC3:<8<Ʒ. PHMNQ*RʉV<~PpCׯ?=\/}(<~,{,O^z🧣 .2?|{Ɇ@'*=Cн dA!^ѕ ȣ^+^G\Gg1?x%`,z`C [gqCPDk+.VSeީ(?~pRڲ@x&L!vm 5iﺒ+@Vi1[=}ߋRͼߣ9 `]I4ԩ<9BEkO샗ɵ).iO~G0ڻ./zj[딷ÅmKxkaO EwvY?oyZ/3_̹wj:1tǪI͞_zqR׷m|SEeVһBPƀEEZ/@ۗo܂dwڛ˳dHƥ R9y? Q}HCU1g}{b}NH3![y4RIaJj\w,J@t+k@cRxW@9n'"qGnG1hc yն;Q ȧ-w ~˭xUbƦ(E Vzx?9S/Ʋ>g(0ǐn}0.tǻ"MhFv^!j:/GYC\J ? rim!X1HzPOr!|I@Lp{|U bnQ xk(;o</(Go>`5棍ʡ̢_lF;~s~5wC4{>7yk"߆baޢxv6DfEYp;E!0Ǡ!礿;zm(e0ǐ` Q~1d1&0s]<(}Oy^8tw4,/\lG)ч ix s QƔg ïҶWԏ? "#O|wC(wNٮsiQa)ك]dacP' `Mo&!1<9{CdbQJ'㌅)\@oTE8?+mt^H]RSO_uܘIMj I^81xfTH=VB! &V v x)\=1# b>IZF G4O'b@DE w%Ǎ#Ia@O0`w<Qﴺl3H^]dUʬ*ʞ0d_yDbYAпWWYq,xb>kU[hR$e9:XY4$%W-''ou$+/'_ !^9$:ѱ%gv>ژ(uipw[IV_wJt&Ğq!h%n8z0)ڧ%Զ}瞧۴;?KtY);IZ"_/ I.1|Ni8%CMMoV3W҈,Lo R>?[ zbDöck/7)k@(K6i^ y帳َ!߮ʁ3_.5woO΅;Raj?=cr܆~-6\H&1c7~׷{V|ź'h?ozw5)AߝFLu(:5bIx$}8ƭ@oR}[cɖ"Tard\}A;-}o5OF RCݨ~G<Bjؼ@(Ӂcf11۝- ܋A;Ju!b]xIJ^ qg+Y)w$'!J8k?v"W; i yVҧfEUkzZ19">Gtƌi8bydxdc٦Jmv:0:@Y@q`*.6IqWe'{ 5߈HIW#s[ JTZ,Sʯ wuq$$U3z`U{qI轣$hPaZ=|Rsr^Y:x:(nnQ]HZN;F2+-O)ui5cxsyx> ž5%OIin[䑂l@j^lP(㺔}|H!+Q/mHV|{ҸdSQ`t헜I0IM'[i?Ȅon+$g:9< ;m=>Mwޚb ӉљzIh29cO"ʉ,|p䢰IS=8rO8ADAR]j8=@(':(}MQjtXmnn=1C#ɦ]z2fܤvmj &::'v>`LTpE%Q]"ӭcSbtFnxtezi`8v 3nǦ[׷׏`:^JAO(+Ma`y9VĽv>W 96E5%Ƃu e@ Q#rH0|[!.W K?Xjm+jtӈtUտ~x}m~}mq|}Y5q6_rpč_79M̨1N#F$Uq ~K#믝kF2q$G.}YY7s[<-oLϋGsݧ-a}bb`˻u&Y܆ېW0#U nnV'Ɂl1&6`H*a`\< &Y-qN2N0s.;o15A!_>uTI Cګ&;x!cYCglemC515(ojLjǥȱC>y**;1a j񿯮jyO 1 8n|nб "{Ř0LFgᒔ_62R5ϩ `G{XX,v۔_6Mk*˾ OWwMz${Ƹh5Zi_+M77;Ig !z!uɓޯK~X]<@T>R|sݷoq)mF`CE \hg}E֙Q06^Ϭkʝ\RF#\*)W䨐\-:I`UZ8݅&qyTr}^Bhfy1`iH`c5F,<͵|f|vx'5,y>dE]]:=ݿ1p>]>^ +2acZs_Ai؊Ҡp;*ZĹ1ߐ7_51uj:џ?遡jj>x e@^X.=V<@OD2jeo5|mFZ@[Mm=݄VFkIATlD$IWU,5u(L(PMHTi9\`Q0N*̂TpCq5Efûr;$u2C.|_2@ä=MzZ68s[M'ϛ1u.\r DM΁n=v4ρĈ S4/{#4!/54]gk? bjdBDxåI3LMpofl{ 42ҬH|h;e1zJ ԜcvZp=2BBHH/Xp2drOԜ+!m{i`)y 8SҞf`UĖ%dϠ'N  5*+é (1 %$2 &XR RLdJ\Dd(8T9tZ  rn%C0JZQ_ s֊<)=W)ޥ=c|>E-u\1.0V9|@[୰ >1NԂ-Q06o 朵K:%~Ɂ olgѯF΁.7`AмJ3 9sl$$puYG㖔c9iy+ {|uM9T ? Z=lnjKH0Tל3Ip"8CY׎:+wlYgm% ?ٛk\y! 2Q=6pb9N۠m]u9? "o5f+zܻd^ NR ~-cy2ߴf z`7kuc׬C|z\wQ{J0`%^]=ܻ3\ܟk <>:f-/Aovռx ^a,(Qt @S!Crw3{V'+u3cFrׄ_ӿ{YD~ܳ1*w3)U# qȟExJ.S ȤWTVJ^EŤlx%lpyYsxhX.&`SImH10eFZBvj, 7)B櫇],s Ezr8Gִ%ylT_C,SӪ|vT֬9)a3L0($: 6_rc="-c"TIJL{SNj|o"9fyP$R rib쬥ʐP3ՄJd !XLi.΋cz;g1n$DL+ӼV4Y4Y6_rPT!QjJ)XRN`] Kv1Fp.*&ԈɚJ56<)ᶈpX67<q r: uRkS3⛽*k!<j? {_CMd=p)SfL&vc(n]]`iEIMME?vq)TZ Rya_m6ھ Nf+B /^5rTLP:Pp|)=XD+)/31RhdVKk"r0xU"1qHRn R#r"ځ\„rJzraq_\"?1Ix iӅrK#l&1U\ȯ%mWEZKW7Q]S({\VѬ}D\-tH+J80|Ziܨ1=^\GCZoBNArM&:7ccBm)LAƃgʚ#_ˮ[3ẼVCG8be;,RLԁfsU/nTB9K$<t.9J8[&JO3Ɩ @\Ia8ac=~ibGSᔝm \G  _(e+9IȈ3H!2woe諱}25b6Z} Ѐ9}[raz *deS-ASZpM'R6/!i ZOcLDddX|tt[2!4|2)ըp X1ۦ#IfQ SQ(:Υ:w L&KNϞK>wμ4UK,>Tb֍Bu󕁋u0Ycdznun!4UK2Rq/]WżB~%sRAVBT zui4fHJZIR:ߚY(pG񮁌} V(Cxòs1~ke⋉aS PDr}&԰PAq^ck1*/JU8+I69G5 m9, \}w9NkL]D]O!4UG0u% W.>m}V"`3u󞨫u !'^:v;>|0=)Bp#_1 ,Fl{zd'EȺWDuu4t+G/ xaNMˇ]~ @U^Is/W>k͆gf]tJ`jí]j>[gq5o?V-RM(pHܪvnΫθ3hB!qڙ0xA5 F[n@Lv-Z|0D&F iO$2jq;B[Gs{Ä5^9FBVn],*/@Șo *j/I=u/N0Q Pd `9&Ʉ^P=}7 Z iՖgo({}L3&@4,@nU;zv_ܩ9d|3-~4bd@mc~K#3dM 8€{c0'E5f{⒄ră&x!cyHI!_N8NR$$X!a]*>noO?(_m<0%-x<ñqk[dޓp{"d" L|Deڰt7_u8H0Հ{RDLŒP$&$Ovd@]2g@=Ol=Ei!cw =1J8\{UVņ ?Rxf)1#X)? pʑ]WYߨr]$̾3ezpO7 |o~Xfï: SE{)0}=SyuO6`̺ܧxœH H*=ыGu: (ANԙG5zW85t \T}ۄUkܸD][ 9q)0u y`!1[K`T#99$G-ĂƩk~Ddk :Rw[|EOf(m^my6\XvR)fvf>E/@ۇ׬uȄJ!KچïpNxG"B᩸.{8씥hr86Nd%gB3:9I)]IlO}R%{;Yb\ø0NYu/vAa FЉWG}J%,vQ UEɀ @Lr^ZB(4 Ipv![`F{C{5޲J p 2@p{J}%}90P!! 2_ zx%A_''PN GeS }NWd̍qj |eA6 X}e <9_)U|i g_pHlOܿ;9a`pA" !9$O ,1Uл9f m(.%!e!ԓe9&5 !QC jo;'bwD:8 f j27V~QAD;$O*ª4TH߮860/۷);S`t.҇o價$(Y$1)TqQ0XfQz %w)MTݧN($+(=-?2zG Gtb!OĕߵhW8²I 6alZ\f@{R4#_B#tʲb\pX7hYj'S)i=j #<碖~@9m]1_~(>o휹th10ZjQfSZuu_Ɂt;>yhޜP6oǁz8,֡Lc5S dC^a^vCW'#N :| ;T0zz MtIXΡr m9]\ b4\ F}1(vfni pף]m^C/7΢<矞9Co>G3/.>.Tvظ2^85E??;Ҋ7iHS & z<٣>x [>$ !x]Y6CDٖ|Z?ٗϟčĹZn~*)}er}GCG*B=QC 9q)YB]8j'n2pQwn4gݼ'jBhȉN+ĩwy k["F@$R*|%L T$kh-ǽ 4V's,6/s"U璿4,ھ|9iYW_gNegӭ h 17Ov5"CR1V33C@(8 ĉj ]Ey>xvDq(D~VCZ VWU` Wu)xU!r-L4-]U)# Xu:H/}@aiui>eĂS-9]1vy\+Er n.s+`J!'r ʱ9l .hAªg*JɤHi,{iI +ftzKO1^K<3DbKLZk⭍+њ hϮ$Qஷss1D6!f|ڛ`l٘ Bt/3A)LT`}02c)]Wp." RH?=VAc+zeu^\H LeuT0Ρ_;6|† }x0zm%@΋uFr3Y*õ9%@?NmA<BPk3ƶQDm&CeC유@Ѧ%?{ymj5w\ s2srt,6] C@ވ;7J%w"}N]Sv =y ujl ӛA4r#Jx;x:dp>sOxxpB ^7lzs{pv>5L5 [kAŁG 8=эQBq-{_?,q!î:LLwp<4ՎN(txZŁJT!i$z}_t'lƇ U?^Hq:qtܨR 90Q0@|@*(G|szzA4ϣhJA5\j+f5IEmК̰io * kc+$,mve$ejfe7 ulĝ֚x7 3ϋ-EPgwj$1bbv^}7J6bZ=R $r"#hgg#)x Ii5Dyw djE'j@4Z9rG"no˜lʘzKX`,?d> ɼWuـ@|@9jisYTwyH&Ĭc(k _9|eƋ𡫆5<8#cb\8L'_Fޢ;uիs-W*rλhapܺ , ~6pmӣ 0V H3 HJtO'y0S $½=g䌐TWO,HE;;!Q86ώ䡴nJ- !-gZ2fu>K>ol)зzԏooK5+-@5U)HAjȕUc zO%BK*!* cV?{WF E!@Kc4лmݳ!OJDmOFY*M&Ȉ̈`5`Aғ(G'7"8(htއ<{ ML{͙nHc&Be?Oo|s?F]~o\By(w|` :˿3ݙ~66A??]/D`DLPXjb(L(PI:9MWf& o!JG(ǖ`QX# iSB8kWx p;/˾4i9i4zF9g $#7K&N|66pe2 ] 1S g -'H|X1$VHfRK) 8jh\&HѥV͑sĿAW <$0c P [ XPcPTCa$RiU3TEӽw43E~Zc#R"Fg ƞ8`ւԩ޶IY{>ͤ#{7ow^^Qņ1SN%Yj9A{= ڸ)b:vd&ě'4GU2q!.$"nMu] H8a}CE9"`KHL)B㖖mVHmVؓswr~)a`J,ըUBjVWPnE"[ 9<[ \!78Wͽ?`)D 'd;<5wa 8 /@zE4Qrtw:M8>~-Ԡ7 e}X#^ɁS8`֩ކAURr6;QIo*G &fg>OD˹G-e_bFnnm0(1Uw3mZh`U:Qj$S[@iIKNVtW7$6?o5nu*V}/MiDErR:d?9,ҖȦݬ*Y缐*u̙!,k'&˫q-"6 Қvx 1JOh^/>L`ʋng .<~qwkևeFm] Ltcy{c ~_^jj5$˨Kw"uGG`<9G$T.u4bf7>TߕgSxA7O = Qh)Ņ[rA讌JRW!w{ R\:DƘT3pTR:އL2%N6FcY0& FB(P*]vȴ*'J9>{Ԇ3tНBU|os" A˸oտU c9шӒ_^-:l{6e|jp6αa{q;mb'TלbsLtج3`ǚO&g.DS(L$  V>Z&>i֌f8S4܏/1:-yB<~nI84)l}I"L')uaw *ۛ M~̂ _>aC2jư<3/7^Sᅱ,"TXQ#Q9]R6IাFYNyV40"˳B%)$Ʀ@\s~BE "M mø:`gw_V|=U. j|< c4 w#C(jmSt~8yyu>ILew[Vv=ɵ2~x+oxg\eDRm0qSVJq2/k_D(;[@#M XDV8_@6kS?OFhDͿRB(!ºl/[qs]f5ޔ~9g$'JjQ-J6գ59juV%6_7`!JVxEg+)TO]/X.6r^0..Vs&Z[uOC.<pp94 (ч+08X? BQF>P=]B 8$J($>Eߑq >C!&C1t5)Iw֦p] ZJt;\ٯX;C2e>'E6 4h܁BO{=$Zfwvm`ڝυ9.ڥ;*ri+6|E ! C 95sX_T{4ud*}j4j0"ywYxišBN/m*$0x(:!ɑ-*lZtB#X7Wgw<9\W\WP&ګh ?nj'kAcY[W\H)d #E鄓SN˪C4؝d$#EKqQX/o^Hw,E.g)4\Ew&itІpk]kbi-Y5^sV|s} 9X;CFqcux,]>vl'D`K,0QEҘִڝH4kRJbᨹb9;.L^BxZĸ6/K HRX$=!rVtgQ8jvN18)"fmƆV$/2 GTѮ(wdŽ0 𗕾[胃[DE]O·UcC(myM':Μظ "3V[~'eop%WݹҼ _G.Z3~F&Yjv3"ǒoJ;sf}y|_ե*3lkT2Fv >%rTiO7=v<ݘ炩 9&wۻp1>3F^/0?y"h/R"h/5?Schf~Ԃo҅䋾J zwB"Q(!'T5LPaDE5O_ӪM?t7s j g%QFZD$` kB>0D+kvx u$R.B\"CAj#5O ^F(5hEZ$*1p#r(JԐAu4>ګ&;xe0yv%nAgJ gsXЮ?k@BGO֍B$Rm(c ۃXPyCt 9/-J ;t +C -Lj1\X$f]{O:f {)ȢQ1ز $`j KM,.MaZɧgp>穾yN糱,^'/3=y"vgmS^xAVԕ!0956RDZ^Ԃۃ-`+KFŊ!}9=xFky5xj9tbaŘ0<X6/8B/>+ya`H]9L=8-d4F<{hcdiYjoJc&Us_o&`b̫D%]g{n -ݍ?2r:)'~W z{m֢h#샑0w#X`Bof<8hǻ;h&X7do )u)HcT3q~tcM\ 6^ɯc y#d,칣ڑ킓CsP~X$laA :RϘI9=x50B(]JxZh#˙_m\ q_M$pp%"J?[clL$ *PPGkUJ2CeZhʘwΨSU|BRK ,B"@bD.&h-Q 3(m0 5Q2R ~j9)M.oDS]hH8a@7(5'a*&Ypl"gg/llk nf_h{k2aonzUgg{rP=Y UgTI%puPΏѷK0=g -R$RKM-';UvC^19ANc &!XORYd][o8+_vgѪR@zҋۅt#H$NUW/)hKr( TI|Cs<"] 8aRLtiZ O,4i'$apSξÔPlKg2P4sK`HP(B Ahi0< -^Pv_G},?>6>C,wglz?>b4r xgc>Yc߲JFר 8ׅUz#Ұ7F"O7~'2_o{Q#&`QSXf,B2`nD L5}^QC$)~*iwy/0啓mGv:Ϲ)n'Rp+g^|(/G"PS8Tih"-Pb/!`0}p0 }1"=/yp2s}<ۯ!; ;j-$)`\W @)Mo놮sZWD7\Vk e˿V)_V>r:ݚtaXA Fu |Yf f苓fbAyz/F1> h<Ξ41k 9L.BCX-E> b:Ȯ!~z9zJ,+ށQIDͯY7]En| ,q~C nrbM4*FhՂiD0[D +aa Bnv km aP%?|~.n)vLvntE$oa[H6o/كi}2'8 Qg` -:$xꐘ!a>8 aoIp]! s!0s2O FiMC+>/ZbS5.9=0ѽˆ9Gv{p핞#Epf~2y0 =À !ܗJk>#༂`8 * 3%!*Sb Kv76|r?OP3:O[mJfSA4葔NKZcMV'QR"J8S՟œ%RP|1zC ^⓫yJp@U.ir G1`p3@2. \4R% R6x /VM[z{^U[j#rIȟ㺅+X?y]X6޼GeC_'7ux5 ^|bOwwZ0+?N?L&o+~zzxp֍͓ S܀K?7DMʹ (sAI7xSja-->U6 WD90clX'eGB5xW$r35Ev`npi'#,ɥΣa|317hc2gAܸTqUPgYC&C~َ8;frg#LQmG"N;OLo,lT֖6 ºCִHxG⸬ ؋1Q|E&6V "8N9#y)uQ/r2_<#ZL"4&7u\<*й`D֊;E UB"pr3yss8HbΤs~gÂPHbzxM#93bD)fb|j wDmEI!d]Ĭ"rά,'TH$q##Ÿd8GRĊCQi1>d<1O"Pi12qZT&Ho'Ek/ A ZPG+wF#Y@h:0wR֑̈́׷ׯ#}B\썳=(S^NJ v0o.dG_s!{@ nƦˎޠ _|/9;jRi#kQ C䴒ʵ5o<7"#:J* r@jI5<`nm}̭Xi9c /g76ɘslF٘T\E}RPHl+=C1ͩ F{%<LdgӐ#]+X")3 $g[z[z85F1vT۫jn]u֫1-]VCA$Th7TLRL)5Xq\cˑ/b!L)H,yA_F-6k'S`dۀ La$ Vh N VHaip.9d@E}`dȳ B;eMdʝɈe&u3`L-Ǫ( ng}Ȃ@pdvZ1L3 XTn)Vz7 S730}hvƍ!dyD5X0̇DoEb$tDYZq%fp4D?ˬ+9|D2`~ogK5[Z$kז%LP klBFye[q#<45Lu'FM9 5S%pT/FB SUls:Ij`aő{4Zv"h+wOBDh#{h9[.vy;#;]gH;]9&^;w>M%ڗpk,u᾽7hR>hZVkݭ_ ]0ԧ5}Ist}4'o?q6'?WϲVӺJSz:M\˾S`@/IVߖ݂KK2}+\2l:\a7MƵni}weeUY>%]^f k M&FYU׆e(kcy#m1kN㤉ElIq]! ɾ.>L^4?2Xpĥo`ycTyDl/^zx-f+Y,\=˹6?k^J5O+aMK Dƙ@"pfLJZ?t2`BG"à,̔<#L9|2u 8X.4mc|>q/<1!|^_!HС$S%~#w&`. cu3 WJc\瀟_'7[)|GW= qqZ;x'8#ð`o3BDP*7%a6[7c^)eC9O^^a2hk yx7-`wqʥ!`@)NM!b9\a49(, GZsf LJ gI$>ͩF"a(pSE8B\;,J ?gp|6~.H MĠ tXpL$@aI.4Zhj5uuo;q>N8(Xԭe ?3fh֟ٹw<>V+{e1uRmfvyͽevcQ'^ E1`r=<"yuRV3FxL)(¤)' u>I4V-@hq戻2R#mA p$4UzegÌZ^|>޹O_8w !~5L8Ġo[*ltfsf l3uHڙVaXn:gM*鶕PA1 } IRi"IMBV`x4դ;U8+]:Z/4aԊԁu%Ftq1ϤX>Q٧,+cnxva#?B6 >U qowi,UDISmjHT"`E1#E"7Gc0DH[m CɹiMYHk3jKkʄr[1&L|yt٠E vejϯox{-"{W㬦dk.!2vL[9ֵW70Zuv龬p}hKY߅%͏Y~?GW \} eeBY٧PV^V/o3H)J [B.4oPowR)RPaK!ҟ#NJ?(P kޟXm3Nx(Cprf;l^Z]_>|+KAAHr\FP/s-1 $h:USLN.JP)_=Ȩz@Fؒ;Ƭ!5豀EKWJngJqzt%$6` WEi,cHfO(E<"1鳵w?f7+}x" vqy큇S _^<Mt``s|ŕr~8OiV:#}+Xya+Lmn"*4FDEf\'h8dtCǖAѦ`0Kpѐ'WJ8qЎpSD)\TDYwUG? ninYQH텲JZCVpRh *VrǾ|kw䎚aTΊ;!f@uԉ*n4c9* ZAR@K3e4?zUmk&bFA8#\541В)ɮG%Fwjy1f"+֞1y9+tukQ|+[8ŎbwŤ,\!(ۚ:9/PO r /)/׹'c1C=4f)dMSC(2rP\Zc U8eE)(8Qr ȩ(Bu#!2íޣXINiXe(tb5гH= WjW[WJ^s^$5kICf{9RMk^r655 dhĭ#ҦYD5WL3%d]}Y)HOgGVo9HDI( ZAIHVk#FD'RBdZo8j(^17%+B()H62rR]~P=K:J YnK^2 s#X!W9h (4aJe!w%}dĞqF-vft%}е{+ζP6  [K@y54U=J@Ѐؤ4L6s ď-E#C Oh[8)y膢uP@bd-,r㱐, JX(!C+]/ X.g*!+eܐ23 ,CQZ0PzLۇ󧣭j{{?ȳ=n;JfQkĻGe.<>Z24yq>%jYy_ժ|$9_e} 1Ljϯ2$?8L;Ϙ2N\L) g%ȶ]eҿFfqB\ *%ϞvhK(JE+ EZ)2˵ɤxrmukxS7<ˍb'⸘Q/sN2Jt\\3NE^J\/)㕸^Tsi(N#C8J D= TrP+r akE RR$>8{ 6f8GgҬD1:]5L;D (EL8 Z:O77E,>Q Xjӝa ]/p_|qciL}uҹk.[4~xzqȉ?4<=>)Ѩt3~pV0?b{@_oyzP \سƫU'#7E~~(,܍zzif8~=Cɟ*/„.|$u*r aqz*WV>JS.HWysVEgpwhY̸0:327HqVhM'l5^X SǰZb.Piǣ_ޥefW#psjRrɰ3'gsɋ,GʧePs'G S\=g9߲rǜ!_;!Y%͐!zjƓ#AzX'+ LȆ5$ı跹&<7Wmb (ei4s͠N ES芖m t yHꁁ I芕H+I(^w8*(!+U 02jb? waDPTp%p:;`Z"I5?Csky|0cO-eԲp?Gv BKOaPq\ljZnČ8cb8SeC8V->;Zd:Vz,,|Ś=-,yv?}g=(V02iN‹wf\4Zqju+#P$Egt c$rd|h@g%0+T5JT:am#C<$i+$w]_Mߏe[$/%J7i[$5MgÙ|lU>UݳewCaJb=}\{|^=bug=0X}Ƭ-QEw!]ST xbn4inOAA UϞzMgC/4\W3u L~P\ZNm~r̓()nQ(v?[ٟ7ߍnw㬿ϠyT)h{M**(SmRʅ p)U\"5c?~>dT\Ŭk>nB;ЅFl]V&.خfyn2Yݍ&I'"!)݅6{U(9HJ_4bqzmy\Y\P~/iionݳD(%![n%Mb=Q\{r5/=T^ͨūYFɷHxhzFqanzUTiVbf6;\6_bnx;U?Nю !)t sOL^f(86 BCօSKуcbkrqVb8fp ΋Zv%&a"wtݫ'<͗N{Mfb-WYD$tXn' XGPMn{ I")rNF'H^qdY2 I B6f5 cuwqp oNat,yE2q*fԸORn.e2?^v 2$" p11RhLx=Mn&{EK `,q'bibU1F$) 6261z{]cvBSsL6Ē$" #L֎$d)soCQP;`  Gٍ(Rh.sہQs Z #7'ȵ(hp~mи +6kcr\gx ԈE>Li,&SlPd9D1p8C)wm(IҨrh}Xi>>) S2$ h`B*-4գ5C {R79Y9,(aV?^hQ3 1'9i5+=DnovIU\P`Hp9'!X_}&osDq@t?o#@BB ^ :a '-$fO!W$Z.1ZF,,alwTw=VzjiPD1;րW|bx)uV aBZG.Hڝsv^:LfjFY[{ 5> vSU |Yy5*l1~1vTvˍCbE\R"Z40b"sZخ@]z)JHo݌#+ZYSڔh4PiQgD,4s $QBcPGX{] D@i#!zD7á<\TF 0-TLzZX8X@ޮ͏$U>z[sZF}f!~~|\̦xio{❾bubc{=(oU_;S\0MwߙX{8*؈HFV0rUaqatJ1VҐzOcŤV3O {9 ?m5u!~?^$( xb&S^z"5Os{I: Mkjs`A8kY +xf>|XJؓ ]f:Ggh!ML%4HĐ3d-A` 평N#0AhcN!Ʉ#Xҙ NNz: v=[3$IS'2FTK%*f:1QASka&XskK3ag>gS* {6x6D@`+32Q2drruD J? 99rB吙K ܩU8Z?mǴ!Ru+tL҉=; Cfu:tj퐲}Vj>rjhf%|p+YK|Z\N'e3|g{GF~sNw.'A6;r,qA {D1?:$D$ 9gZ#'!zoV;0^6Gw%ޘJA/l@X{F%"3#5=Ӊ.ܭb'}bcesgZ%;qM5}l/gskT RqH,-3V!1i Tt̤R=p<{$̮ Wdh 4I!m8ViR4e&. s;YZ!5ɓJ`#9ubH!HDH$)dBӘ 0*K(ɓ=RՉ4VXe (G0@Auՙ+f7ϼOޔŹ]`TֈoA{}3DKU#ZHň.!ڟmw}84NFB~b%*7o*yY5cET@[>h}&8J`.*rQ* fu˂v6t~SݕX\GNH\9&F7-a21)؇Fqg? 2"p#31^㏘?+ՆVsVa۹c2ee=(-"oМgޅ=7,@iK/ёwUqfWH݃Ҁ.n%bvEJe$!0HCu4CJ4y\ʩ+Q}Aqиf3NSiq6wW3vv;g)isJeQbrYGG`ݝÉ9H2;>DG^\tc/^8:bL\cnT'זd UEo 4J9lU^_gj߶wIݧך>Q#&5ī%|{6bj/qTFJBn="*1S0!R^ʺկ{de +%bˆQ?"Z.20!ڥ(IeNN\%.?-񙱦cױ>U:NcNjjV|w~$/I' ɡɢv9,j"?F .KN'rS-3hVȁ"w蘪^񁺸~'8 ˾ j . A5.>kl]|ҳWqİydGAG}ri( d ?k. (]9Y_\jwb _|"QI БR1iL%(|KH1XJDp?H9VܸWrY̿1h$eѿgB SD ,1!v\brW/{eEcdҲ5A*1?=;!M:[rWb`#  k,F@H[tiSN2TC* N5!n79~oEBCDa԰D Xo6A{o\_{v{S8+BaPh% DQBH-R!OtlG0cEF)$)(9 A!gŻds0X>RGH^P ˊRBb+j{I|L2Z$chE Ajjid @vnFb#%ߵ(3$ VKЅ*k]8O0#qYZ#>e GjbΔ&%Vٿ158!\c.fZim<ОX٥b sQ +=`0ͫ57uK5v'MͺᗫMen!I:~Wc:p3BΖuZ[kqk~Gʵ#Y4 DPK"PuBh"iYW-xҭ Y4 A.t Ł넮D4-[oWu!9['5!Fi9p(ۚJ IJu !3VIx[M_qOIA#!#q1cuOOo>9 6ҽ=xzիqV ^`V?Vd'ʹET7 }֮m{ 7~Il}}&ZjmLR(u*ҫm2{I: Io$/o$i3JHa gs&vuhQ(}Sfd?;|X=EtwO |k* 숒ms^pvDbW+W yXʨ!g@ʶG n5&VU2=e1nh+UZ^c@GJz'5UTv)W%c0'o-L$esm TGE'E jXOk m$"Y(@gthO ]гr xWi裘` !Ƥ_PH!݋s˘VE?SX⢣_!9Sb+B+;Z?NDdgxI!J%yu՚N6ɇx T[Ő ?rӥbvaL tu!PS3c2==ۅv/RdVLvМ'QC!5Hh:y0nI训p9m)zbI&.lI&}dVnepNեItcӝ{M5v*<\pQ'"0@&%q8ҚI um믍:^fs!xAfO, +,'}ٿOch8kNm)uX!M0X.X ż%6;3֘\%6Zۆ B):D9-N7x÷woVK0qǗË?Εsːz14$BefRe" 3)lcʤjQQ'AE sLOpZc,ˬ4$c*i) #բ"Q|ikds e@?XpUq"݌& NrBVRc4}!F)+ifK)'`{O7Dm!( Pyc uԐL ΅\)ڂ|qR@9Ф8yyMTR UmgY0k0}ϵ1Ew0|EL3*/@rt^h8ˏI gπa}+x  #6AK4 ӇN^Bhc qGhkIueٌ;m]\ ^ǷwņbGt60@Ng_;ӏ~~y<L{j\@OVQ !S0Ui 9B;l!r2r#ōKWf$4J`E:Hˢ " AY.A\wv.rR o},剅vvX](zWǗs1u1yXEI#P'tC+"U.Erн^+8;׽~(Wksɖ?-@9_3YO•Y_O-9}nui:uQF=jԙtXf& />bË5tn3cczM+k E0]'4@c qW&Pbx߼  ŗ@bFJǓo71 3J-\7A/~jFM2®cYx3S f G`PR8os04Y@`10ǓBSFj$,)p^k1{Òʑ}\zJ` k [Q?EM!!CF(Jࢺ슋p-||v5m" Ef Kͺ6gjs֘s(D̃&AUgL~\[0gwÝ;P-\y|c*xJ=wCtpRS7h<.y3 Bwd҈f?nn0sR3 \_ y&9qנ Ie,"F1!  }|@#,w RK%xuX4t >?*e9se=\௕gJ;٧_ԝRskS5.`D< $T*Tz2r`sdǚf&YPl̤FX/R D Վ8bcXd2s['Zj„ϚWUl^/X.o7jUz~m%'~߼y%hɔȄv\b҉ke ߪ.ѤbT/&XZPn2\|~p2į/$2v=W?Sn'klƎt5qs\h3΁%< ^孟d2psy%lCB @ 7e]:by mH$qu@a?&(,B ӿ-"C9ہ |]B(/ <-0jۘomϦۛD:8pɟ7qZ+F:Vk30\,َD\nZxcP/$aIN4%Ze̫ 4#/LN3t ݊:̬.( kԝ6^KZ-#6qXe%&&!iFI>K`4+yb-Hy@IkJ©Iv4Pt{pQXySvT.;>c?Ϭ 0[=̎I;0Gp4lDK|VU: >\o|Ϭ $pGWm^Z`1o3,A0݅oN}IΨ'8Kb$IQH9'H\8]BWd)b)@QᣳYÎ`f}(Aeh' nBWkE!%3h2z%`,pFg1%Dv˜` DJ72>>;pļi+:j':<>dmArrFn:}RpEI=xMQF:!:TB10}Dᖶt."&%<(D;JN=t}Ҋ L0ڵ[ƻv79MlK}>R ` &jsv+@B-([{:L<[?nwwәRF䒯}pr"ƅf~rVS0 'TIhm# h?)U\5}kaXo݆i2hl6q6ujʥ<1[֘/(xM[mrwGJyGEE-4ռEPQO> Qgn> (zyGIk+2JhtEQpp'8H#-neH{I[[m (A;W[ ǽ܀=`޽)$DR/N吡-dN ;$B@$n5t(Sp_5@l#"QKkC7 _kތ_~}؂ o=s{R_0Zߞ ~ٜǾi \>nV|F_jOBKl Wd{[zXT{!4_eb#GYtCDo`!TY.]ę!+G@r4PT,pkcL!}̋a$Rk B, ၅:Nxf_DE )d )qh Q1R~u:DDZ2*ƭ#bɻSbj%y)->gv E﮾$@<祭huٶ`cMPF(%:IqFu&ENfRD Ha%sTFj% Q~/^MDmH yR2t[N˺튗\Q=\9EyZZc^ u b5K-5Đ3Z t8ԘD@PNSdQD܊ 867@XC@~pn=c-f ^ef|H8ƿ-ԃ 5!0,jjbdT,3L&)# g9#*1X:'VI+kMAŖ6TԌQ} 4aDaU[' 7f#TwB{NHFs:Xq%RPL%BYjJ̈)ht&8pQV;A? 2uO##-c6nu$?8#[;t}5/G^r#rth8y 8 Y_Nz֓|xbwdm4vun}QeF\7#+OEMY77>Llt7]:6*/ibaņm(ft;S7|q:DO-bO ȷT>IpwbgAR,[G05}[W]]|z ֟_׫GW`~ 6xj ~|50/v9Oq3IoLD  |;LIIDJQ&>u*8ǰZ")9zUnΚAT# "q8$;ħn:&\bdKCsrj6׽k>Y<<O೷;Qhԋ9+ɿb7?Ɯ QxoYLҬbtu1]u"q*_rL(H>8tA 7I.5ލErVn"MfqIbٛd%׫G DǸ/r 7Ug޴K?x'%j%R[B] fyNΨ#8;SdL a6ɣ (R#*R.ckDaK03NeE+ ~$q B)Az*K5jE5ž%֦)'pR"1ɌN3*IRJ1H C(%ԨTTc s[3ͬ#HqB8sIP>ueqPb&,(Ɓ;_cxar NIٵP6yI JᎳCIM4@p'5|"IMJMnGg0ʵdz-T&S`hzܟ!,zÚ#"cpԍfLkTf6sD1Lr) 4oj )M;$ QzVҪ1[Юh fXB#5D$-(zu؆sc[C,E$.Kv [NXp58$hVYK\ʟ>!|P[`| -yRdAӁA!m0^zn4S> X l+<<Զw&D\fNJDFjgIT/zgTb}Z'$B'jaG&Pd֡ѡ$ɾJl`"; Xz3ՕQdڑK:ᖖǒ͟`jѺw&F>T%tHo^ޙ7#EKJ>zZs$p7_,os9bafGVj~ﳢ˹[-WywΧ`9Yαqrx>+} el>yO`,`͋Gwžg_b%4ȇƓ-Y+pa.| (SfS5ET1bR3NxQbp&M J.U ⊖N PoҥsOz˝jh0x $3D:d4DX:'(tR1e[DeHF] l1mob1kT{-9.&P%E0x{;Z_*$͈ʼnp$ K˨%%l/mޏ8g*˱|N)L epIIb $bX;!0<3Q7"/Isk%5 '$@K j+F1Ս ؗJt<)eBH!6 !# ,_ˁ[Ld2ebl 1qJqljrT&t wwr-IDl{^CzRtQ0Uu\A*~lc(_f=j, fx=%k _UHy`J<-\kBj~|,QUO<3.C-u(|9lvދIh D@{+vGŻ`%΢qޱTʩm!X =v-w<:vIg}6K!ToT=yE9^9qApnQI ?ls6wms6w]-{bs˝%ր"ɘJ@Y27Mi"q".2yBtSF Az89붎vh ہzKōUƶ,+GXא꽆/^3$I8ٯp~Hu\1 t5ǯEr!0SgbzP4٫KaV!6< @W_IiDmTc<Y>X"AӴêpna0"cios@$ n%2CZQh0FO5.150`#0mML L[= IdOpfuު](e8$(B !Xq,H(13³?³b0s]7uXȐ=pK3Xȹ[5=[͡g*9%]1bh-!qߕVVּVugx}V`iIa#:25)p6cZܲ _];AQC)P ΃m$wg=ul2i U=^!ҔI[4"ӿh "W1`S$J6-zJ6CB*aӁG#vJ.z.jU˙҅ln^GA%9:>sqezܸ_rN.c~8v'ׁ%Ͷu"kHF~F--RhZWbXKPp:(o b]V&PI@c@Z554g6PEO,PW6=$Dp{vh~vX<Ɉ`8RmS-pKp+Y/,5w^0;37&{{aŻ*'扥Wqli#x=\e#lU$-G-=|+"\MBw@x"&(&t t=f _E 1 xnoa2rIO;9֘n]~;|{qbK@Wi-.̐g: LvEu HHzn">|#좹@,/R@`*S AI$009P+R ^Z! .!=&r'G@m9)W,l46(nҾк<4h`7#a'$p'ƻ}j*ւBptS Z@FESHTqȹ}5H3JBl3e(.kk?5]+6T#ԗu6#=VhN ܌cFМ+;p,с9`9kl\tp^S(hBUJxHPRZ0FuTzw/B>׾K' Y<4-ōޕqm5|8xg1yQt21#۱?ر3IEd2R$H ǩ,Wb0cvL4`BN^zaQt p}bCuۋQ`2^&8j֪Y {.B]7)Yy/h$>YA E<)7.𫻯^'XuPYVՃQ!| za~9uCN Z}tůZmpVmy5kvՇn/DA Ii6R\T22 y񞶈u54K=lH6|f03̳۠Ãi(dhX`.?ބ=L"|F'9iҘCċl~JDH%2UDYn`,~}J ,}/ ^"+m 32CU%y"Ɔ[-b񉴘 g &$*C:D#";PR$2hӌ휯ؙ>>ig$N}vx1dP >&כOzB}0 uӑZZŵ=Y :aXD<PHN)4B[9Ċfz4d[l$vߎ;pcPZp|) tPfG#Qc\w 5PP(ɀnjڵ.ǒ~ű1põd WS[;HIhtXC r?\㮎BI5Ȫėp^#\ϔiW/^DeT $/n~*B0t M<<+Ԫ/2a$%APC9x?*1v[yqx{wMz2杞pշ0g tUMbVb"'*Tq~:O?ML -ݮ9o)5ʧ.netd+jm[vtIs爁<v=X嘵Q"曥x7׽@%lOb/mfȜv\Yd17l>?>ǣC[ܖe /{8-{q''Eq.( 9QЫ>C}nNB:⩏郯L/^g9&Dq!QiR1ƺ;2^"?%4)>j 0yx>Z)2x85ndy] V/݃Ҋ1˳djhZۯ3ȭ'30$LnTa퟾ŐЛ#E}#RTOkjX$ :[] wҵcŋa7H1z8 X`[w>4$}҅mxbF)4Rs4RM]O#{X<&CTri%RS`j ӈc×O^=+>y=G>!P#SūUۓUɪ3lkt;t89VۑSmv ف5xY.U?LZ[W%uƭ V br.L:,ĈH,lhր1=`!f"%RvX!}ck5fibο戛b-C7CDc;ƐWVyFY0]UQA#na2QqZog9DIr;,fٺz'?{Wȑ g1 06<[\)-I67HIEHVjiI,fE~WFfS| [IK@0}cKFV eSםC!h ]|`20ݬ2!USʧMТydG*Mۏ{Z3,!=vF$ 3f?nsu-4I%޴+^ܮ iHp"'Zog5J:uX^q..r[\w],t?^t~<&2a8sf>{uyQN" / USC*Ûᷥ~z< EH|@c`~5y/ +Oz,mM]N*N.mTIlH$x"6hEШl`A&m]kϨaVg :VYFy$4ձ<6>5Ͳ6rDCht^4CNSGT tav.҅Y*L"N 8HER/#&PNrp坲JptK=@I[Zd3,B-y;"]hއ '8N'ͪPւq -oASxIMuhY ObW@C 0*R+b%MV.Pڱl"roՅ"NA']pPSP\_,4k"%ֲس5l Odji7y|Bl"8)njpكQZӓ5fg4 ̻'P AI65@IJ/I)AAd)c+w5̱ڄdkcS Sڭ\^cJ fL(=&s.)jR0>i9bM5KxV\/"Y) {=&5Nn򟓪Y>8o{HMV;vt^Gr4JۗwL'_ǶWɃ^-OJW N[|Y&_e6&/ 0^z t?J|iLT&< #e:9E=F#Vv뉪B*gI'V(+wcFx'e̩};u<)H֣{ O ~MUyLE>ln}J1X%l;EݭNf V:~^@,VBͮ֟! Vit&TgE;tV6x"̇hGbVyNYfOk9b4Y d,HP {e鹸R`P+(6?<Qtc&d SYאּHjZɪZjVpNmW`x#JKc[|+AV٩|XcbWYw'J+9G%1'H+\͟wϻe[qWJ~Nl7wИn7S~]Tͬ6㤔='Fۗ99ˊp#7;Zu{݉ܧ/nߪ^19T1Jۦ JCmve).QպN}Fd'SRIIU9MEE:WUaBU;;Fs5c-fu­JR}eWpCC|(+4AŻ09U2CkDAKYǘZ]K/۰gỗ&G/>"Ӱ?VpZ*mb,м} +/jsE~HDIF9,*tLɄs)A-hMh8/e)q4K  OX qTEEy]cz2u48MBƚq] !POeBr(Fm3hŲ6a&A-`^^~RsDD_\川 KI_bҝU1b}g;Zٱ'tSszyAm=5!Q}&B+BP {|jF%l3V(QwX u6Zijɱ~s\#y,Fs b1(-0v1@O];3>m値Js5L֍ywz-e{nE$$/dRh}"^8zABw/_ 0?Ll+.Ʒ2h~3|5DVsΜe֏f1fw \hE4́Mzq2߇#ڞ@u߮QM*\z;%L )@$|?Om0etwt0xr8Siy4~4ޏ8mF#'xfzjf7iз1&lZql0 M,_W䫳t]f"_ȥ!s%q_()w6lEUsfTTlǟMT7֪ ޭmLd@NI@;iR( PBYp=s&yF P1$Za;QEݛ~x;n.\ ș>OՓ?W'kd \^Q@dBfn2OWhyDlbK]q4#'EHpiER7}Q#nENUxRSIaҢ21CדTL0&Mf.J/Jl%w֯;2 :A(t ~R|aPW9AX>W[2 /&s#PMP0I)I=JQۇ?3c!>|)U_?p"] uTt`00'%0LDdg0P$x=pW qpy/f > (}y0ϓ˘JJku-Ya=J ޑ]Ǒi`VZR.ϙ4ʕ~Z4k arV+^ RY PsGp agaxD,yֹrB,rQ g[d o9"xp!"JuD"lEpؐ9NAI8…E8#+Aj×Ap 8C pm:2.tW^Khr_Um2 /Z Kj "0(@D&cHpqO)e#n3':eEhD  PD~ 4D; zM3xSTvdݶgVrah(%%I#e*VX܁dւkGZJ5r @ OAATPh(jfv^{@`Ay&RgT=WTF݈sGC:}C0|gsp#RW_@xBx&ۄB\刎\1!ʘL&LmylLrb\=x!hT &q0<= έCq) P˘A z.qzwTգݹzT!WO \'/P,y`L(q\=*Wy:{yEDR9" \N-R%8B#X5hXcF<>k!(#oczSI(F:q"Gt-l 1 ȁ I ̇HXN XO .rZ+"'Sd:Uq܈ Ĉ>"e% fDipqe9EClzlPET.Lv:LFӱR,>Q]):b'fA%\ |IU[q,p!,&xj|jOno>Sd^яΩ5/PR](IYVK\wUTt1}8R[zcQtqj$/X uD(̣,GrKLzY:i x)B4Ka:bǍoJx-d-FHd΢5iV8\6&{jYj "KJ$ B]ko7+>]D6PnPl1M?f8G[Y&(,#ͲPHeyyC9 )):!E%x"eQUBm=3)H1Ng0YV80mR23?g&µuA$$Zw-Q{0wqWy(9'i8Imy I#F ޘG$3|dKcYE΄A<wJ^I:w.a8Y/Am1$@XҦB8hp(IQZ Ǒ:?d.e"üm ڴ: c"X ƨdU%Gc{FB=aLjMkADCNA@Ä́ʅB1B!&@Bq+֬mkZ.9 ?FAke|HL"H; cbHqp"V6.VGilw{_uv><܌鱀 Sb4Ӌ}!B_A3_V޿ݿ^vAއl##U\OjD+< #lz~HK^һ"?\6!xݾ 9l2Tܶ 2Kj:.36l6Жs/OST߀Xı?Up/3j4])^<!&UE=@ӻ'!l!غrsQ=>sFo>Y; CڹD<2#@h8xpNekCvjrVeJpڥWn8G2 _!}ǩV++gӗ<v֫h84a[0ZoЃgrFDm^[ CnNHiz~!҆VːȺ:(#Mp1xl+{Gb5Xa4ߌCQQh83~{ŨۥN_ve45F=+֥Yn][eW96T2 D`VTwPsI8)clϏS/7_-#Hn$mu Cb> |l9ScΦcƉZ1ْZAI;浵r8m(:1B8\\~8d(dQNLnHۼi$b@t|qrlhB3IJ+e.?vf E#!V>idiruȂN[J5ʨqcCR!Px6h|(6i^x#x7bt*&mjFx?^&M[@+:5O60`XVN0!".Zk0:+Ñ3bp̔-kM,s/%Z#){ ,+3r+W.R=lwwUпWuu9rȑqZ.Tcǟu;d#*It@&im1m%C \>p*DESRNJ9گVSn)6]fl6ì}[Gl&/eww|k0`"R`6t$-@Pfݼv@t)x_tabRÔ2Z,ڭϹlM|Wm^<<&07/2>{ϧ fqXw련>:0i d:SZ\,)+絧wՃ,l?&3t'Yz+jEdas3ͥzb`~wԵSH- &Gp̮mW^[9!@ګ|A6nMJnϫD'ܬ擄n <䕻O )|1pqwZ̄qHsѭ y.Svj " Hds~FNt]xXdkvF{)/Z2uT(%6=6c GC6؈1>M0wP 2BDGhr2K.&:^fMv2RqNl$8R8͑r%q'~U>9( S3M& [)χH̘@PgR,8ԘgcrMPv_k)GE]Y3_}ԋO| 9U\ƃP`0Tg84F]BଽY2ԉ qIºҫ<SzE`DpCJ޿`$ h\5Y5Zq]_veI` FƾNAח>H xmCe܅X?I G+QvRf(mJ'ћZOWG'r ڙT'//)f[WUpd=׾(zsV%Z@#E%=}w,6'ij8$PE`oK^lp[.iֳ-F[R8 9܎թ9 CI1@AB!]zU_ֱ37ס4ό-܃8B)iD RdBz @ !Q&vVyoڇx`Nٵw 1\ aqP!X1&$0A[QȈ)GB/{AX-=TxVJ~k5n "ݙ.XAϗE洜0'ti>xpsAvֆ Fb`hIæ&}4TԆt{."͎)tz ݕTqq5zxҐ]\pRvT%H*Y@nE &P0F#.@ΕcZGM]R0A/@C꙰E(|#M9ՙC hU=I>7lQj6ZY"rbgFp䩦qp3cKkqt'; 0@տu05?Ri ~ihHqUkN\M`uEۗ<kUa|/!Knkr]miTOƹ˱y ]0Yd_4Nh)MJ`U$S ivq5*2;ٸ%L@NHr,0g\eno^R+j$̍ם5d Rd-*\2'rDPθiOԐފH&pa2myeDd'U|H57 mЫ+Պ ǤSo"D{RCAy꭯u텁^2?)%֨q OĂuWgbK.DRPuIL%v/asZύúLXޱ۲б !m6 W<em>{o_,\Jr?%g͔)r&LRpݳ³MÄQ">XRv@JLkB$ [,`lJǐ*X aHE%%1(ąRXDȅ4h^b3Jb\XGDBDk2@ȌpXeb1@1@("8=1VM Eru22}Nk]&}1rв-W ս]ԨxĖڰ-w $]A=8G-l$Ҭ>ӈr@n["~'wAweҜ}k wtrhO &\]uK:} mC7fMt1[*aY- 9|q6n-&l#ۍoFU70*+Ul)om*8EzW`uA5]~~ &L`F¸~U[gN^r/tJ&p3זl&P~OZwӫ>|`>>xюdeɰuQ`E*|gla0vOf߳\`JrP+}eTyZm/'Eؖv 6hC5]iF+ ~ZuJČW P.5$[w}@6[Q-6d̪æ 7`4)-6egGlv#UrRzk%{ړ2n'Jy8}Ks z˲qa͆mD^#m#6u(~QxT.Ik @ 8BGD,,}a"Fgcd)v^N~UxTi*AɊH.GLоϘ!+lv[f=<ohAu28d Lqn^W'}LV((BR]wBVSߕؗ1eWJݖ$$u8/ . kݺ9H<ܺOc=~r$4Rv*]0 @LkΨs`V9i08v^ZVX3%kHvCjV~7OT` pͷ鯣{p?-G l +OH/ b':rѻl"]w! 0&~Y}~%Wn^PǭNS}γPPfCY e|s`hCݲx"9"<6LxMX\87k6{lWr<2G_RųSϐ ^i (SܢL*|;42x]Z1Eӌ)?~ͫr/F+FWwpzL1XfynlJ᷏M9Z&Eᛘj(SH%scQ˨K%\n9M i\ PrH;P|bFIRXI2!kPZ%2t,YO"Tȫ@!O@cPXgʀIC>Z` 3Y*"3(F ˝JrSNz` %*@k bTJ+69Ho-40x W0w&`r0@W%1Z!*![[HD\r)4D֯ԅ"z5ǘyk8:~~LpuW|&7 l>h!"W,r?-yaiq0m_4nՐ "2%H9װM9cXˊ şB?쯣wp{ G5\s-on° XJ#|fUm7ix+!J Ԗj{Mp!Zlסu^k%dl:%uh㶛rB kOzXjֲ=i\D-ٮI jh&l뭤k9&-qvSDI2>dӊ2G;ƔНd^naTnI)΍Cps`ȱ;a\\ۭE >1梱Űl2\MM.As gPx~" ^F"TS]qn @I; ã庴3[X>{ҷm$jHlGOJ|$NJgM3! u[S]sM Av3'XK.Q VHOwjx')W>97ejD,(X"a8_F׀J#R^"ZvοKX[I6kߌVrS/Gs6 s.f0PLKg5>g XNիU*5+ULk%s#"f4N[*s/ bE+wHMNDhR͐BS0ś;{S :=Tc8L ڄ`hr…hGVV4\4'͑BQzi4X &HLh4+1E PlՂ\k%s)Mᖲ=^f̸,oxK_d֢{u};Zxbͥ;oץf+ kbٰİt&w7E 1V#E!ɽ?M+¿o-A%R1xL.e0`Y5 s^ś;?->2N]I죋NSo⿟^$,\ӿ Kz I 2DNl'cX9{Ibf/^~'㑹\<2#\.}*V =DJ!ψαfR6YB RJ*3[g`SߐO$uiC\ cLq)-~%}ڙE\ Vr; PIɯd[7+SX%Majg cn&(lyRTXYNxݔ+$_ޚᮕ슟pUbđ4w1f(gBho) 9yP?*K!0#dx 5'S)` QtG"wot!u(-m7mi>0N]dc)%Q؏7/|.PaW&>1vk ,gd%o@߫atpGN>tWý}cnMeEJ?{WFd ;[Rޑi@ lwcۍԀMD *RQXKm:2ߋ"3<}&ru_[G)-Gl!'n#YkG4:>QRF4Qb6 ƒcԐ^`k+bj ̂wגvH3P!]%(9מ;fh9#˴ 1b+(.`;YQp^٘e{奕zHu ^MXБ ozzؒ2n+dʗ5My5aSڻ̾%V:cneL#x]+lՃS]5`TkANĺ#/| )(f2s',M= ; Yawdce_'6LƆ4OEZIZIQ֍!5+O}[w$oTƸ!5|V~"},41PKTE͟zӟϓdW/T..i?ƣ+cIi1Omߊ$!VCaEh9jgkm`bUBsyV~~q{_|{wqA9+甩w@_vxOvkGFMD_k7,%34 ×?'6lcci=Z7|Rkq.tcQBPNDjGs7bq$E H$JQO/,ѦXsgJxNǬƀIW–fd+)+a ;!߽yE7$>" Q2Brc)l*hJԔE|r=L-v %LrTs;:+f7ŗj#fr919owl`fÍP&p.&pVJdoUI>QQ>2/q)s8hƁ$Bd?͞0G z&2FTw*]]~u;F d!#Zjda!N5_!/CݬSmhĖ ldleK Q(M0^B 5Py|9F(!'ٱfJv ءdG~0dlɎՃS]Iv3 W pɎK 9CRPj8:UjW9CG>ë"fzL۹.J0JCFwNR*rP ]+H32BvһڣDZ׳ۻ~,c̐sJƶ5Q6w3 ֈ,dẶ0FLeC^3^; T=C49x0bL@J#}-}a o:tWCa5'Wy*nT~̀ٓ9f,\6{_+go,8f!6{r{cql-t|>H݂EH}Yף,[?byX:1c Un@pn~Wxk;8ףϲתKu?$vW:Ymbm=&;'!w6\:|gCHYDaCKwCKR JE>!5xI%aۣ&2 2r\s)J׎_ T9ì.y(5h.iI]U~Ur*Z&xpR:~\R- ,[X4RVC/.]OgsWsо@OWWJx),GGlz%.e+o(E=SqCE" TY h hgt[%i3e_azRߥi"we[[ƿۀhjj/Dw$1N ^ֻs`T,AH&ب@)$Oq5-Jm&y-6:5E^m%DIXpN Άq. % 8<|-3AG?83IA1p=ps`ey|\A#6f@™)pm0e qDRDv!W8іZ/Sd?RxtT( /0 ь9x-CnZ\ ]]ҋK5 6qFRsE %@$9$Sp#41a f8:$c26(0y1 k Fs*-d\FlLhEtHXpte*gT&u8 ӹP=YH %4z+33[)_?|8J?>/0k;cQѨqP*[R>?%0㍥k[*H +Q#~U$(~%nyVv)2Dd4t-+o^|ndžd-t,m5QkіUKuvQmP9QE yfg.F=S([҃~=r~^4\}s9ϥ߶zTzrB޸)C lһ;zNw4tAzݺ7nlz7U21dD!5:잙ֲJa5 "R{uJ\6-ARA]L< uHŅc I+ KZ;cTb9-J }wܘq>Mu\-],NKFD-c/q5Nx*DДLC qWȼCBHӞ_d)wz`"Ke 됭ڞ00ʼnYmykmopv5biTKefdӺ&Y|-(r4=sҰ_+ECP/۬o hИ ƠpEpZ7M_{xtZᷢF9-["i_Sڽ3鉣B={*P/qd@F6kOfh| Rg/ [`eU_URzxd+1$I@tPNSL8F 뜄J %FNtCi >-`t i 7B㬻ZbQȌ6*`(s"# xB!ӚBaD k9hcpIGc<7IY0c'o{J//B08/E (NYrk6[7^FpHo$s1$OpdAq2q'x )Ta%OU%/99sk9)OH~yӬW9a ż~q{_|;iw<%9=gD8D\|el;LiJC WM#"T- G:^ 4HSu*ʊdzna ؒ JKMI(VW *{TzlYx!8+)Ncn=k~ .:2Ά]sGIbdmEeVW"f+HPrŗ_o7vWA$i"iIc/F&-NXJF`8f'f}Eٝ"RӍ7p4h qEڽIaX_>ͥ@a_iIHoӨcFjx-eAω]>Y3 O3$b|,ZoyX9zt,ӳ@j@$R&9ϞPK"ODɁԀCy]ϰggeD E$[;S˨a<^GK#g(M5a,S\;)-ό*枇E4RQzԁϔQhn9SX$Rk!T.SQ q4=zNX2@>^NXDB±T8T gNjHn.QǬ&&!| dr"ɀE5-u"YcRkc&K%X!dGgpfaV6@kB7C'|lF_ޞ8H)/.ς&NnSѝF|ry>N[gۿL?||Ռ}_4Y+4|K= z@KS'%$#l@b5SZI@sQ Mk8.`k% A J5X`p.Rt$%ijtBeh!)W^K r5ct?ȁX(78+D`~eiP%Ch7ƨYDYJm<ЁJKZqʤ'J 5 .Lڹ`lyi^ nl? &xhH]`FK{`}=WW)D+7/ޭ3rBؠЋ;2d/ EU/(%BM-8^1.NNK#iLjz16ƔaBQX+g14cD 5ÚiFF*k Nׇ'%Fϸ)#I )aI*IxF @QPqGc;~<*EP:.Qy !C^a(Q=b/7Q0@ Fh.@^Qn4l$P5Jdt7Iۄ-a@=Ԋ?;%t 5yz\,ڋJkᏗo+a {'hɭHsZ _[i=E`EĮK`uPQjkN 5LX({v$Ֆ_,2g%sU|p);j.ÊԦQnlݚ" "A (%"A)EbP[v[F% Ӓ2/zzDymCdPA c"($B\\Ay)N`2" R< LT!hz7A4P0TzJE, !dH԰epp$PDƤTk9tHs$V@(V GX7lL74pl{0:0 %{.ikR..P @,HOs9z)/U*>'}ؾ ǞyZߗu,g{i?=@*ϓ?=X|.>3f"3j'>>}:ALǰii6*o7WS0j>W2mQEp8Q% .sHކuA'iwwg>3nj"wfĄM.gg@Y#=Սo? { |tTpZj+*:z\pKLAּ%;P5}savfq6|Jr8SҼV`2T[ AG2Բ>1!j 2bEy9m!j38NrX$7ZHSk8|e F#Q!ho( #bWAA<6z1ظmGܢBto̩WOJ36NTNG`P)#{Jmܬ37Ln Ea]~SJp;Bc+N;?#@Y'of*$ېn ^>W/3ql8( y9UpgB("frfe74n0^~޴;{sigv%,k Af5ˆ.3]Xic̐M +RFrHsڟ!>T0?^W`JFWdϓNC0~]ǼٟgAwR[N&\n~ɏ/~7_oR7G TyghaMy(87ru9 L~j)z7p[֧7bd;Pٱ0R7Z=6\{`YV݃VdxIs$ T Ur(M<s&Z4L=~Y!ť߫~9y9yڌz6YFĢgۄTULG5 Y2K%TX*f6Tm| hS3HXd38 `2W 63 ~BidsI.ر(vOIFܦ4T  shzS6j^"'lsp!")r0ǬVY!z!qnAQP`*Qrl^jNcD> !H'SI p@/S(7st8N"T]x7\ҷHvT߽_H7lB7g.Zg] \eHP)$V25%Nc IP_`1$Ť[ "'f-"nJ40"aRq@rÔfB%\aʓw~M^+6[hR|[iKz:}&i@F4q%?<DM^*@xdDj+RF‹OC6}ͩ<~@}|HT*9>修o=~Wary}b|w??yʨ>CU}=VKjgs$iKlD|??̥bh Ml ,'/;!G>DS0<]͔^ JcObf qϱ&3[ȥ!!d*cE_ۼQx7x7JG#qw}ƱIb~^7:}ڸ)yIcfxKc|M̈́кQ|(Kz„w?'< <І7^Eϊuw0\HnɩnnK?8L'&nQl\G|7; z\p1matcmivUU^Y|4Vtw|C/Fbnڎ\Ôx L>\*^!%l^#'x9|OV[iT w FqcDAX܊]xէlK[=l`jWIE%I۔7vGLF)%0I&fg}}H D|sЯD'( R8{a+UE {V褎^{T J&;7XCcO#u0&SAtY&/rF1*yFvNVԄzUjuK6 _,CЅ2KYgZsBִq8*DXa;Je&ZcWpMDK\ wHNV@FN'fa.!4fDn<`RM14)CR'GA,3ٺ,3/-L6e&e dS$qMsœeI{Y6*%0l¯JOuiA[бvv_Dq֯g:_EY4s j#tʌ%Afd{9|Д;HMx \TMMR8# Q*SŁ'x*icA)Zl7()Q$JE#@cg1%*ɽQ*hJ&"..r]!")9^(!?!B\OSi91IzTLAS(l(EvN:aJ_tAsFQ1%EY1BHC4VXDQtNB/l: Ur<( =|i/u(O!wu2$Ãpޙvm޿y޼8wo~7E&>}gKww jr<X'q%=ZްV+וZĴ5[W"-JDz% ]cDFl(w/l_pka1b+ VdӗC"״1j%ϝتVwKV1VcVD_gp mmś?T) =T)zx]džwR٦0>\vqӃ+]]zQ5Ӝk8;k|fp5|-lqGu|}}ڲCk0As(7z*EBqM b=hKSȨ&rdV\o+ <%@yĵkytp0 ՌV>&ֱY7yAc_NQ^|$+Ib1N+IV!^{O>{1P#qBbĵ%E;A(=ĢWAAH%;cހ&k"S JK#FoQH6(\i՚ZjJQj:tDqf5HO \R hdc٫Z Jj նѥ V0}_ ^bzLqF`gdWr*]0eE4HYUP?r9XMP\`8.<RSk8:ڛLuOՊ+]O 2NW'GW6#My)VQqƔf?k0OQZ&'1tbI&4::+:2}s$% 6\Hμ־RHP2,%QuNiƑ褋 Nj˥,5kf3;g}iwقZ^'1]d*6Ef/)nvbp;N3Ba6 uţ 5}AVYcx9mx۳vhO|5rSs+uV-k fAQ;Jl{j "6F y2>fػcD15ɄxC^Qb+2Cd=3Q=/]3JL5Yza[z0g9CҚ`WWwd]<$sU0Pz=cK밥i. 'qs7J}(FU׏nd8:M7R!™`ۚ)ӺV\aKSF)յkJP^KԦ=Mlr4 J}65_Xp86bSP+[֩¦`혎6e UרAﲫHx,^]E62$CTDx*7A:G8`ie Lq)rfeK kM}^sHBrot#/U\[#k=jC*}2*+Tpq*"Rf)dcuZG+8J&I-!"}T`^J sHRI$n4E%\2S%|'M7K~_LrւaĞ_0n mfϫ&7q4}NxqPN ߦB-;7ʝ I,_(R5?5W Opr+ncao@n"6|j-l5 )K'ш ,* 9*RwTuNMs.(Z(;ms w7+y le 0_]h{᧳<\3WqNX{[3Z.F!i:>Rb]4F4 ]1"M9²('cFߋ40%.O/Oг.v^iS*d {4sH'hne(W>w7wcxXJURxDnDDqQA$Đ;ɨԷnzì\kZZ]T|uϹ<* YDt0"֔\3(Q9vWUjcUF$Q@h%R5!+FPU^[e+Wk-8pd80d VZ(2Z w+̕ARxz1Cƻ9 fH:ne? z]LVڴUG+k轙D6<6j6wtcT Uk7睥plҹqз*S LdWL:(~3^1˥d0aTGCnww?pWkW6^F}iwc\SQh1$R\\qMESоaj$5m.|5 =1)f=pp[pތ)bz3d~y+'n+u?uXksc,vU<KFa@Vwpv-3~_?`_n|c@J2qԬoJ+BT[B2*u: }6d($[1dPRcXڑA6gz~;^rg:dbr@z({Y52pbjPi3+'y5kFcq{6rٹ/,1gC,JW5kWX .,ՅImmuN)AcJkMo').nNdR?a6 | * QY j+ W i ),]r8jHJ?VY4mT_ ?RzoҴa9ñUOEѪHHᯕj@m6 [Rz-K ٖ66%|XmO53{0)-'%5bč9B2N~Oj=Qsrr2PѴhjQUx)W|z(2?(Q)9-']D&:D"Ih"AiRu:Y-QkdD*sJ0DIVvYGpȷ}8\ y?/!Cpi'X:wRUf 2\uV6 h)ne^`k -`SHsj f[ݎ}=ɒc}="#8w)%t?RĞ̈!0FXvkSvr>sԸ\ ]Hˇ`RUkX./5ofv~5˦uYrem/8>O`k9wh?xNI^Jܔ'3%g eϟh5Β(#-f$"ք@}4Tsw۞{2`%Ҁ9iiFn+ aj$yH/δஞh4܏؂9!z0/(IY &mym|#+8h0@[GtyJö \.!ӱ_r}򔖕TKzZ"z``pa`)(0h.J/J}^y1*,nYZ_s{>O.&:fW73]d")ˑ0ylxn4vL].Pc Ɯ¬Ä.,GEѬQMCΈ,+\*S~nZJ6OcJ$􀸂= ѫ\'ca7l`IQ;^nr«=L'Ͱ ubhכރ2?׹WdYLmi(-!Z(t@j螑/;],G7!hևώBoa Q?Lx21%Ut6^&Ar46}2QRA$CCZ -{Ks5ƪc\ |IXTTGC]f~ AR'[ = uHo{0"5eUaJo0R6M1hG!$>:"E$N( zG" !RJkڔzPnQg$b8!\z$#&I*Hx DsFumT>D"V34}}Xsu:tWfPfZG+(BzPƝX=ܼTQEQMuA`5ag!H( %X((]8jv/-DL#tΧ|9|` Đe^ZY<2Y\li`y*#dῚqpWWΗ灣ay|e2 FVuQ;G.eb,]5烥pazu{ ZD{̍~)cKUPkNGS$%((@/Qo ߸b>:ɑ t-}~NMAc5fy@[_ 8bm"K44K~Jm}18]/<Ҟ=OKaWl̝8QHN5n"U >s*)꬇2*45*`+r'[J!^BB=;C\({^ QS.otXєL 7ȈVan Jt54DG2Г;@~(#Nv !r1d6䰳 :TY&?D^p-n9CHCi(Ov< (㈢w".bCJG$r )$-#j T'[H`j92˪&!btߴ9xTG`(Lфjek\9 9лF}a3!S;M1T1f`-i,MH!ڪnm )Z-2'Q2 Ú#bD"i2^pJHH"I&$lPĽEǹh#JHQLj*z%3Q(ǁqc1Nϕ) ucRz>~醋dp˓M>X `IX`BPE&pXj(DhYbR\{pXxm&3a~ `[ {T#zW`SIeeʰv_gЗbQJOo% gy?,L6 |\>۽e1 ?}|->Tp} " 93Ah?Mtv?_L??\]Mo?OqQ tQ~ƛ%p<V¨fӷ'̰fS73w_ʿ#n*@L՘Ix1ŨEb, O@r _nOv 4)]h_M]xjͷRϏˤYLˤYjVwpmv!2R g[ıE͂*(o&"e81*,$(#Si\?.&g2e3<_*I6t1T|p$yd5Tm::֍7^iI]\k zpxTY]zۿLe/~YŎq<%sYgpƢf`' b 2g?mjm,~+])zbʼn}9Er>\svvwaNtVvה&?'\m_ߟ3=_6}JFvl峌3B"^p ~-&0Bu^[B$xr X)I4laZQnfvJ|Z(jM0sj"DQODc/Q%Kdd2J"M/t 8 P$Ƣ n: T g2s"Fcu'*F%M+Kk@q )rjñeH[4g0V#LO7X+ܘiSP81xa,*G-CQ#iA<JKW{CZ(W|z.BhiyL7/3X'̴1Vk=HױR+ gz8_!A("@ N&7,*$e'O4Þ-"@,Qꯪ2^^\$_-C>+R_yBrT {+tؿ]m畹INԂ{ovlGv$>h~`#9Ųw*%W)+IaM 6G=k\)3929/coll,2 2 B(Db_lQ%i:FVUi4!5lR S$,7cL(XSZXRI) }Xu'Ky~?G!wԯh)cLuȼ7g6]3oaE֏~~w:Z丂8[s `%7*PP`HCN aas;t BMn6_J+RtJCLJ "Kul#%SS4Zh;fWi zgosشgCni:DZ BCz `2֚1(J pm\jO-Fqle?xk6o">-cH0o8MѯW03zy=A>g6*ik0>#0ץ~~o:Zy:,e{( [IZ$A"Tpf !ʐ oݯܬpC\'(MylY{9ܛ[oɗ ϚG%~q>6ĭaW~{?Y~T1ųG/;q?gwd{lEl]M+e^\I܄Cݬn3]k6Tjz(w}QNc VX(Ek#x,ywFHBܫwkœyTiw%}Ѯ[sKH[p= ?O1"msSnjKGVr!rJ y&կwН]5A2`;ïDyp=b9Qgk3*Nzw.V3G]-(>iM߶Q %!9_p[n.Z e`IS) e#~rϐ֛G>!S6FWLv͐93,6؇GAn+6UF@E-^?PR`S'lTZiC+] qM$YBHЌx@"D FNdn'PIIG4_!^p:tTKyr uT_("-)"%6یKU!dȀ Qa䙣)TSʻ1ٟJ|>: ;6ɢ8V(_~]˅ z^p7YGӏ5OP.{t@qM!6сDu3nj]͋jaXA"-Btkj" 6e\Y ZxW1d1.ngvu+)NU ZD96c/P+K|uoY^bGɮٓtdclkǘ@ZV ?^zD بAYd o"iI@^rF>$w X/_|棿Yz&% bM^1'1DG} #iIb<03Π!LuD |x: ւOglt?㬄3k~ @ɓ:W Ct0I*zh :#\{:P~as.3\F>Ga68#{vq[.'B>/pXӽh+GcrS -Cڶ8nsIԪQ(͜um2nG17y%*~ӻk,P;!c&&c^Jj:ƺe| Əb0Q]S&epa.ٶjsNUϾ^5MV܎gzxe^bKE4I4;`'.+M5p[IiJsCRNsU0V?!!\D)A0݄^gG_XaLGvy*c5L?qr30cf%kkĘ0v#K IaRa3E)Pie*g^;* ^uMzu,3 :Y.'n N2AuX{ rYm{KJw['5ލvX5n/^)xW}-=3}yU0Fh#w{҈#2p^DRuKp7:wEJfXܑ`0{ydOE(I/̄G<ͦ,%Mp1ZLa1֎6A)uƖ|l2X:c1{q4 G-~WKDZlL:")"}fONʼn15^t nw=CsypV#)&\0QqwSgJJ OBy>ݔoB1(Wn.w% K흅%4B(-hRA s#JĭJ<;v(yh.:aUzWgL }CV@ԭ!$csd0 1YS.p@Or xdJGϙ'NNʜ'{WY tAq9Tx-l.#gEjΞ.|وuNX%@3dεۧ62F)*2EɎr:2Հ_0ٓKtߜ;8A}dCg6 (51`XW3۲=mGTvA%aBhsbB~gk2)Z='ctOfp๻wg=2rEԽ,%I)&1*kZ1tuP5*cq fx#!+t I{e&RҝSVbH@zki.d:4Sn"cc&g8XkH3-5v:}rkG/4!C ŀoq=7S,ņ1)Bǔ}nџaG5׮〜g¾ڏy|x: XP< GR Վc@d%48tA*/lUq !BL$by?:N;¤RU.]Ů3(|=9[pwn~s[Y 1 Sw i%ׂJ"v1wwkjj{wH4G3F-x7]C!ͼֵZSҮ+!H?kU*:Z᪢}bF#{: ?9EPZ4jKUb6/޳Ag|0)Z_9BwR?9w-DR)f)ST1GJ*1=jꬾBNbV5Xll(a`Hq2p&X`],8L@};Ed8(=  Ȗ:,-bZ\;<1õJX  @=@zϕkͭ<ʀo_r<^dl&79p3WW5 Wd&`JgLĐ>=\Oeb&0 6* ۳}x B)z NPB12 2DE*;Gcy!9Tx+@M$}Cp9- = o-W}|_[O$^EeLpqT<.yH^`iPDVS>xk$% ,`#42!43Nx41Gi Ïewclng*j|#*f1f`%a ~ͽަZ{=}qH~ "[͗UCgw7I ZhBhPfWg[΢Gѿo& 瘹9b@W0 &chf~WK`r~AK./G#OKe\\FKoR ݒs~dzx ;]YKYR4E#!\DSd"qmk7I"?vKŠ褎DВќJwҏB5[E4Ik ;}͕ 0GGwբ> =!y%p̕RW"p#k_&{g*/ov6O~XyL|}FE4~>ױ}٢/c>] }w`AM(3"3H8#0Jege+j3 |"p"IF$r Ƃ–|_qt9Zb[ #^FmtO\B8-繜_^,v4~,GBTޱ8̰04FxgS2ŇOzb.VC_D WaQ((&aDZS6E >1:ohc9_yCysJ$&l^!i$WrfY `})BizV(s`ncdV ɤ LDO@^"5ñG?[Wg-KoFTGK=úM^: ,#b`WkX4A)u\=N zaE[V@VY(5 1u$?fn,& ,b'4`[ Z =l@D8fcrJ2o1:c3B31 eXo38$H[mb7Z9M6isDc`$ze(H?{WHr yݣYLϬ_hi:3 }#Y$b)-bVGFdFF<-ڃ<2oIqȵ9 ł&v#}_nSdRbXWa*krb0Ə~dn|"[?z?lb>YoeͿ_Ʀ~qڏ>XE[ͷGW;#9Cj ±rP aGf:HiOxX&4>>pr]D0wqk./O Aew m$0IYN6ZcaX@{TC4<JRW"D`r"nv 3#U+Q=v٫R](&$C(ёuHV"Yv`q &@V""-W le_wWnF#1OLZQrVg6n3Yf0pYK1&6\KCYpاfǎ -|.߽G[+!^vxNQ9 ࠨ3 y(1jX.x?xEQȥ@g 3?038Tnyx` Hǰ@6&1Ռ[^5A"Y" &/|l.(Ss&J"7- zM@=ϑ &g9ĭ1!ʔ(qq~l^h?+Ǎ+0Î2)1[aʭ H!&pGp׉ 漣8"칙CLWoOz)VSt -uZ)[I2%{S~-w^X+,Rgrt Xs̞"o=y|@wF aRys #H8!53PoLke!n١t }}`8Ǹ!v qG2F3 ap,r簐".:rYw+Zw69ka]jAKN V`Zw :R:n'uO[K6֧lL'l-V;5M .5 kH#8ueی/VһĥCVBJ.f Z~zv$ofe`>pS\P˅&R#DZjӰV\Mmz֛+Ve—U뤋'N(`][qݛnʽ['RkAʲ$h+"T/sy]n``d_SqVG]yx.{jM?ÕkQy46W %ln ^fẬ6.dteG"=SǯK8:XCRljSЮrW{Wϵ=6J]٤,noOKLFGFLSҔ%Ý.\_*ZeF&NĞP_iQg!qg:"λ[QطFLke'.0u "ryO+n+ | WDh{DY,xI 0t0Ғg9F\pkΓJJY89,{WlIʆfA0|JUul<0Phn}pϚ X$okL[/'f:93?]DI4D䡜fEnwo TG&쵄~I5oc:ni8c6E\y7~YI^~a6>?X&: ǩ[(儿+khkgg#K&uh[q՘AtNA+pʊ*%NI{QVuR6 qYG6 )D~IdwOFYlWz$'â֊#+EZ#u:ujSgei[PM[hMASOR\Y=5U4-WoWYT yZb˭;Xl,{T?ql#.tBX~ &-]X/Xv _ q<5o^ˠs(\Ń 𷅾#E9&zryU,,YZ{ҪSy3k1˘rKDO^zebG?[Lԋسq1Zpt gatAK%%{i|9nWC yL8>ɝ_twaȑm%Z#+PztM;Ky'|}+A^QJ^7'/8ZWH}Ŀqy㦔}n W3CE)D<Ɂ,}gK{-M7ޔ-jp3-7>w(r{@re^DUQ<7>^X8"CpBc%z:},*,Vmot9XTj1@i@`m^s4%\Iar\ ^M%eqBWj4FNjs85|n#")FR/V 9<[J\DI`m6bu)\y8hu%Z0wWc5ޘ/'vΚ6Q=P{h"{BTN6 Ue5~bEؕ"/ ϴZdC]JBjFZׅikc2 >;WŒ]yʀr%NV A8AlenKH_TM=NrE.fz= #͐T}1DZ*Jt =)g]59}@CN"||X k>!$ [^=8/WXR:--ːJK[:,d* ,tpV04 p7? gC3OE'i*"c͏V2c60! +.}-zidO8wڠaq"2U-c?ۄa„"sKJq8oE̕y X9{/̓g5LcD=Cd/)rR{ၛ\ kX Ke@D{*a!%:$ +5A P+iVJp3mn6xcb؜{ͭ xECs @$is(l4Tk!JվkQݺ[Vc9~N,mN3 !'/׻Ts\R1ei>E]Ws tz5>et^=,iL? f$f黥ሉmﱤ<>e=YL޳<I}t7kn\ԤZ1-[cyLxk.{9K Rqm#STȓG(U{Ei{:}Y-C[/.e"AQ?=D7ӌUȜ(8IDx|u){WėG` -4(D,rv܋v#DrmsKRy2u4S$rH5 !\7n#m)#ꎑJpZZ`zI7=0 /),@^t&(.?p:}=Z#$Q}Q&Ϗ8#x"#%Ps{-%8%Lx*Xqa(p˰sKL_ь?|9WH$:jͦ엛nqCc_]=revP Å>H\xI2-¾GQ+OWc@e2E3c𪧳Jh~XL&3U߉fD#[~Ac3]&f6~Ax`߈0N]pZ++x\<GI\(%ap.@48'zaF#IWD&\Z5H XH !/PԄ#ϔ(c9 Y.H93V$Y(QPc;^0lM`jjlEj=(&: [SP 7[V>](U mİzt?{Wƭ/wwoK,$Y`-47 _#ZvtHF/#h-i!sè|f 7v`l7`[V ŠoFR'en-sN[cSD/)9m-B5W>.{`r9>>P%DӔq8AkQ/_ҏ?UuD77NECҀ@:qo|ڌx ޢa7zU4?֓Wo߿3n`fo=Gi6q!Sw;i1j46(Idc젣5QV!~هH<9l< Il(Z&lȨ;<4=99G BzG1ac ]g(gԙU3̲'}$h|uKԍiq\;:}uC"Hb=nr ō'`ٓӋaTAʞ̛ѓI\6RB^[Y\{hpG0>}pL|nz>rq #Ua- BB@TCe-Rཇ5\ ӂA|Zb8F6/1꺼 d !l5v˻ VZseqܧ k}B{"*;Nb*tΊF"dC_gd<͊= rF&| Y1ax ט0A ~H.Oqd'WtXL<("Aת+9j*Ч:r݇' G(3'?cL!?@wDRv*|CQD癟7uh3)od:r-5KCŹȔ4VR+ADŽh%P91O=I;R'#L%Z^9h*œQ(8*0%+Hd!D  s,MUs7/=\ϗ+Ԥp u&{0-zU z>G\8S;?<c]~1%wc jgݒK鯄T2$r5 d :.6c?6w+G~L-5[SB)ibQMc\VbGX`sf B_UKr6|/zΆX{{Sg+lP;-=ڽ;ʴ|K.ʭ80azp.6o `h9"'f96ۘoɽ9lDr>>vDB.d%B^Y/hd=-+4[_^.VcBDapѢ ORizA "kǡ4 br~F ;Yv*k%;zM{xp>Ɋ_jл ٓO[Hd N=D>օ\h%ac&nV{bSTpAnJN G[!˯E:D"$Kg3o:*>vgaהN ֕%ERLSIw/bBkD1ܽՌ袉{!Fۋ9"O"*^DRڥʰ,kI*04#*얟uIy{JK@ןKg̈́.bݍu#( B)[piJ^1SV^c4$uO6ZJ"9)}RI"RTÐzi b)݉9"0FA&KPeBtxRcZX[/)g s@Ҿoؙ2y& On`#[K)iQV[PTD"PH d*t %,WH8WBˌ X&6eGAS)t+>'"nIA.!4om봞+ݡ\d07G  rՔE6jJdGxJ|GkܓQM!V0n0܃@Ra8f#dp6^Y۔rԮs 8>X3qA4qp! TVCh+*@ -Z|j JMPҎ;X qKּrs~T[n5PZ@FZ~ &f('^V3rPW| 'C(@7cQ&3_WfE1O*Uc^1떵a/vHSqox~W[ [1f aP2F`s Ew솙p\Fhk-b-AZ==200åt"gqw3zIbT }# GdTH 1,5b9%r a3┍^rC-KnHqͰܐːG1q.sSـcaOcŽѠ(pLS=_ xp 'aY1Lnz>~ϐx`\Z"",°(V(Vkxrxc7z76Ձ頦R4N֟j90rg9JК>{ueݵřVlQ/e1W`n \6F.PDknW-~dʫO_jd/Nj8H6{Jes@WVT@H)Gx4[KD%ɍX\~q>8r%XI)\9i ,o ҦT.w8h|ht}{$=f(X@$jE۩m~ %"Ѿz (ߍngw˟]=8DZnA;x9~^L/b>/Vc7$HT;ƕ+U~\~0_mq\VQ+_4,/8):͂cXF&{fƱm/Q ('B-koƊY<̩w\wU3*z#kI.xW̳#Kܓa#NZ/R! Zdb"sZ-KNVǖ%MnY҂|"%Sn; sIr1H1h_Sݟv[hcsEP:&L/$eXn$?vS8oWGkߙ l4Dj q WIla{łVG/߹Ն$SEjsm;[ ="  T4"H$W+:W.⋑-H$%7Vr4$WJr%`_#,,,K$礣#xtrq`ß- 5aOT!ЄY&V;)x`S0͉J<|PO~`!Y AxY0%ް >aE.)w^]q_̀e 6ݫ۝ӣ~y%S燷]BZ5y/w\߁ <F/|9j6_t &'Bߗߙ⯫ܮp < G0E`ͷGחf N07pN~=/$b;]pe-aڕٱ[”7%LdB54~ 8;iMWq,Ch>h0)'p7zE15 ;M{,6cw]jr4 s9qdkr/=^ h<5jt@<δyIy iR;#`И`)}FsU {*NȒ[x974/)A]`E^^ԍ1"pt5O`RJ5l&4%AB1ED9:J HMWoAn|cb_Hl@Q`R8D<"6<$ ꤵ40ZIJ:cBa`0/w:ωng,m;5x9D|7zuwVߍK{=wRLލ.8R(jA9jL݆0bh>yj5LJ(t#^F:,rl+X ˉM4bM߾Kc_AŦ^(B!T=Ykqn D=c rӉ ,D,xS!#u:9)?#囕w1WM|KNKu!cJO3 ICD;9ONwA*:%a{,wHszka)=#. c rצ4є8=@꾽c{NU4if'CmvQYO1N-P_f!SHWR':o<\&7]>ݻ'ƣ58*)}5d{gt!fjYe|Ye+ap}D3'`fE)A5>l5h-j-ZA58:Ya5RNݞv!PVp +Y{5gi稗f`8 U(# i I8 $;92.ҕ R p&xW5^ǿV1풰tu{~ pIX_`4˞wreŕndvb໇/<+-8lĚXGĚګ۬t+0{#?2`|t#I5m2y7ۻlEeT!kHd$BvOG\Oe?f [˂3 [bY>L/~w}h<"źKnA:ŘA:{wA+{LbI0cȐW A,D{B [ \"C+bCD"_Rr}`:UE*"Cy[Q-d! tgH7hPCV)e}N{ Z L;VgT|3 D6*Nt/@q~bL/.v p)e8%f薊k& [{)˥,蟣"v+iHJJ%XcjtJtJ-}hH,lG,G" qǾNqaq޾tf8"/=I(r7bBCyΝbf&wJWj%0QـF~0'2w[ZkAx(癧6d(uÖNϡTʺw_E)aT j `MH&X+>zjR6P1Ǵ{q?h7?`gsb);Ekl.1XHsi" J'*J&1uh+'Wum1 }_^y`Tni 9YS iySŢ(%#cb[ۥv>E<$+D ~n`ّ!h\ RD'w: T [L%v(1o(yV:4ݵ9\})'i^Jvd"R_K{|72 wsE*'AF[e/fw;Hh; o#4!B+ԿMỏA"xo OS;$1d%UUfm uTӦ'ƥY4%=_b\-nQDNEPy^q0DAO"~On)DPYw,ӽsq?me(:ljXfbqš3üy7Za~{w.oF ذF OL( ØrdZ9s5 C=/{Йgg/i{/wAB?7D)*SԿOܒq5)&O473pȥ"C]'^\:GZH4$gl&ݿ67ò#h-ofo6,~!V}2սL s26|[;|~lx 0f3Қ oOf;a7*q #5 vO̎zڪŌJ|3fHV U R9n:ժ8;/b0=LbLzhœ'O꓾ i$OXZl0UQplð!*5V)K=nLfrbOI<ak{!U*O.L ՉE-Eѩ5u ddEK^h&4V SV1TT^k9rw?5):/ޖ6ސNS:~1՘ugCďZ8tDfQPMuMCsJӆ5ܲȚSa?9pHT7~84݂ujH5@Q|IxVR+eOXU'%cԩ#Ow%>:=!!_fɔøk4Pb":cn=]*ڛv˞hvkCBr͑)0L ^Q5v둧?Tgj&j6$+,jjOj7E[.);Fv3OeO4U!!_6)LPO5b|o{brEDW('P3aZ1E1:B@b_EyjVj[MIO0ߠ"T{j&(IL8^,A1)$㜘B)J2$`:YQ55NSLG /bB[l! {qOâ(ipsSU4ˉ骳Wu;m!tGH?{^+ݬ#}Dth4̳tx;5B- }d(Tx/Wll#Llg0\~W Ҷzx6].χ#ᖢmG:)og<yOlNP:AJb:1R+"[{ꊾ]ҢHCY%|1-m`[e*-<hx/wJDJr\cUSp s5EZ`Pyd1/i=1TCiS#ՎD\V:ԊY` c1s +٦HBT 8 3ک c#Ղh(ٯ~t u ́1n4~oǛLY(7 x)vYqS7?ce˙Ctˆh1AH^ͽ{ȱ fLrd Ҩ)T Q?F11n,Q\>]҂gZ9"KθUm`v`ΗO ~J2ݵ7F$Gjf(rCbaOFY;ʘK_+7Q- ~4e4uD5zRrVd0 <ʺ@Y`A}!YI8A^NePtCFgC.ZHAXD3Nbfbd\r XQ) >>sӅV 9B|Z_ ?]?} icS63M_=#}5LJO҇y?<R$o?5ɖI!a~S4oؘ]N ucO>~gݙ5耍ɕsgF`ZO3iQ& v?X֌W3OĂ2:#͑>iTf,pəb+υ\ ^8WAH6{5 {TB5)  )ňtrjг ?NdW]\i4N:Q'폝>5F N8 crQ9,)2Y`G3tY Bz1RDuK69pv?NJdvtrYmΓi]]:YJw1L2F C.XyO4Wg',lՑvM,HNc)KzqʎpvZb0ՇT~ jq $Fx KEj756 rWm\,T>KB59/AvB[u$@1J(R# 6+^p4*ic%"&vqϟ&KF;_$Fsk@t$[(Kj?c~񦺹L/Қpv+fhށcM0A-q9ۋ_1O',utpaُ!X_eXB &r48jexj)u5kw.&ZUCJDLAF~2"'(\82s$1pX aH)pK D@VcQM7K EzPeQ++T!`6bDeՖ.L@+}1%9LP[HQhEJ%qbC kG:~[\Q_3܋ShaNWP8kq[64#9)O%tWYi3lxjP.Y=śa#8D\##ˡXŤ$XO**h|cP*vM0S /‹rW.H4иt%4Q`R!G-T,P*ZSQcv5enVgZC0zF/ȃ^:-EènX_j`\rdde'Mru{>*?4;z<),ޞ̮O2۟T6 s~kninʘ8e}/=g,zE]9@"cx3?B |=8 ?kK նT^6{R?z6V رa-y}`#fd0֐Q==Mx^MS"޶YfŸe جƫw!h1ۆ }IJ¥^_IX mEz<*caQdkT2DJFV\IuȥsUXeBQ$JUG/=\zC`wt丗>xK+l7.Gj޹]K>.9O;w([⛸li9;n )Psޭn&ʽq뭝_U;1eB?rNi jo8u7p˘F[sglbѿhS˛'UV%ScOp`t9I9R]Hs8Қ971?6\Lͯ}HeI%`F^ Аvomx߯&Kt@Mid~YV7q[W>{2b{a=FCt'O[Gq#dpiφkɡq"®*<:'꺭ʐ}d`A2*z dq[vlujx]әJg:_OVҗZ2/c+QӨTBڇ:C0KQÌ8#5ّN7 6T Vfzcm&+)6r)樂W.jpf_ת]οϟ؛s;rAjZkދdA'׍H7kF!m1˔5' `RJ?_f}vՇ˛->דL>}fѩ:9o;gË_\4ƧlZ|s.-6L"|a9u)wIz{3۾I7}?^^Gcز:D*޹$'%~DeXvJh`CSHIraWE-M;Nd]rYyY A131aq>3PҴ8Hnxh J ^P[.=^2"6[-Xh$J1FSQ:oL3fQ7_=DW"VU (Y,V!Uu beiߡhm  QQIuiGF=^XyM4Vq! bٲ˶w6BvQrʎ?]iuR[Pi%.Y[%@me9XJvJ`[+˃*$Ud#8D I6J%'3HGQ82Jk\k 7(]r+(]~&v[Bp?4&nW_UQY<`5oP`Ok:}ܚ=*J*$C5n]am-I1yR+tqF*T_VieYK)i}8}Pត6[zooZЖ%z9ër2g&xZ$]W:tV1v64APўo Ġ1xXW %[QFɁ?-)IA0)A!v߆:U[ *, ! eXXS|EGcE\XKC`Y=l@^ 5+eEM&K4X`^ 7%$+AB'|IN XwXGl!!\B[_1"&(+2ÂXWX䓁R| :xA6C}c=wWѷM]H:X(@/[!Ll6Sv*eDl2L tf +a5*Q_NZy ,eQ,E#b1*l(bjZvIwX~9(DlaY&mcAF摕6'!i5)qJ;f V#{~$5_ J8vI/=If[ިDcY8Ym6Nvw k5$8Q"$EĄ b$ITl4}^}3PJ41 :x{H%.FV'&1XZ4JNw$L#.p,BS!VLpP8KjJja5 &~trjܹr;Kk޴ܝ1M@PJog~lZ-|1#F9dIuM~ե{Pu%?C'Eh%I꜕HT> 9`|Rlӊ& ߨ&;lÈ23q@XC$Z)Cp0"F+J DMڡjfΎTdKU#s%dT 'dC$.Z_&ZRLp2DBk4+vZl5mѳUYiB^:Sap5Hw ;Rw{r̷r=9h@(wt r4e%6 E#l0;A1)9((xß\zZLQ6qSkT#.8r)g Cp֧ ]a.FA\˕ `M=O?/`*1YW7x YP/1ozJPnW jΛU"# sf|zJ̊6(2=`j~7ѱkDWS|CoZ*4=tKlD2X7c/V֟{JɏN̠m6^695/vlT4Q@zC])9_cC Ee dFKEL*)!UZY4Z"ZӍŀ@~6;9ywttt=k `f`|z ~볋6A63skY턧ݭV,?β4KXHֱv~nie?#C?_~u,.G]ahf _`^8$Ṅ 0>/GqEO2'n3~fe% N)h^.k#ȴM'i Λ=n7 ^o`OG^&c{O<i ?9^7٨gx/P)?.]r;x>c}sx:8ad|RߥΡOPg{ѓr-2(i]n@{^<}}䳛BbΘoOL97c~{_f.ҏO4W{ YOٯ!tq(+mYȕNS+^MU'o.([U_RV`K5` cKiV8O>Z^g=w9ԗ2x!]?n%싊_}),E7*eNdO'rPЭgToUx+r]j&p}us#@ً?nl7qg!kPK;5==fp1`oYyaa]i~&W[i$O.<ǬK/T>^T. i|t;監i(L;MS[1~yǪ@۽}%o; gr?u!9:O68=g7*s&qcvaeu{Gܹ딟mh%fއr>=].Չ/=8O;=n7_W=Xj@ZT鬅b_W\옾)l6ϫ.OGw^мڝ"~{էW'apN_m';r!+l}=:WP]ī. M6%ǔۃޠ d`5J?i*){uir3{cm2BIVg+w_fŕڄ %qWϏ؏:HZ{"It>gr kr3)\k^퓛[ qR/ 0귮\ta-CU,k1El(99T)iCF6çgx*TXx4gmkͷoFhzxӷkn:{aq7?T*_:}洓Bv)2cf\l89M~!s9O7W*P+I}<X# 3(Db{$waM*% QKc0 %b:{ww|2֒cC&Q9RPXI%)RaLӉ"1dBȇ.D"b',%7_k"8$*Á3;@p3CTn@`A854d L 4TOD @Cpyw+7{wPp#{IEI sX^MBFM{ˮXK,k?\T*3;x&xJD% {-re{ww}w'fK>W {&N(.k*?OA+ #Xp; `t_# =cU}qeR诖+ߝB6dR&zy a{&4y0B\n@8),0X0<af jP8Ēz8hJ'5q*I k Ϋ4MI 0^BKb@Q\o@m%1ԭ&Mo/0(`%c>+t`z+]IE$N'nޠ\06~1+[MƊc+hθ`SP-`*"P(WxیZSY^ xN}aqy&h en%\ԃ*6GeprY1(Wԉ)1]349+)^Զ+MAܬ]"r5VKnSsp@ O5"FS!2G`OFÔU|"K"2׌uF%$cofHN,Z%H$J0g&ŀN*(.4n$rQY3X6&FDP Mt%4oxƥDޙD9)?HtKš5᫩o53j<5y=e75/iuK=hw.Z/v[y?ۗѿTFΎˠkENP7bϷaЎEvΠׁlY &rx7rhuNBp`#КdȵE":x 169} $EcmbWrVNyKPբU ? y!#L_q_/VZw$Wm[Syƪ%+2۫N4R% ūQ-{]uT`0 \hEt%cHS{ Y3"aU^k)FIIeGA0[Z;}r k5Y&z t1@Ԑe֬bΌ K^}V?᫕j[y# R&;ZǼ8$EBSo!ԩPIDK>IxvWv[K%gm(g̉\7{Q+>o6#h 1lR,N5 CaY nH9: =]=.=X=|uD1krύ""%ϛX;t zGblX{ ,0B9(8SI5X4`]#*^$tsFD4zbH4*EЈPRJ1[L93B^ao 瞺tem3xfgh(Aח 8gl5?鋳ZHcnjr^k,kQ"!uZiѪVk=]=]p҉"T B^^\{GuVrSNx1-ZwPm]h}&XME˦u?}'#"Z|lRQS8TWZ6yx!SgF/zZ/wV2%KNp"}~T}:2PCT\+:2KNˍM^Z6Ct zӇH !IJxdfL~3Tr>h'S )̧}nuTIn܊"_oU6)8cy QUYX+ZeX%9-`#*"LiHknD^bV{5J7!C}0 kwX{";$DXf ZH: [ c ؃ӬHAH( UBHf"Zy"q _D94Uc~xJ2$G yTe+[A2-'Ơ&=ґP .(,U+f8U)P$X :x!tT`Լ!HQ)f`mHGBEUBiٻ榑lWav7~;;ܻ%aN TՏ8r`2{ZC#-!w U+Vw`,^'Z%s ȨNqm,P0tR$C`~j xB N NyE.EHto, :-v@I'RIL=)E`Èr`)jdAroR^#Ƥ@FbX.$qk-JD: /4B=aj(z9?y7M>UΗ$r^۔Wg YvU+qN˹Qu%)per0^s =XU+rN=XUq?\UqN$sjK.*'U8'"sӚjs 7c^[dcĕZyH$DL붵Pƣd} Y,X)J"A"MJh|BCS%$IG)'^BP% *e 0A>a&e`o*/3^_l!ݙVeu&rt׎[v-ܡ~K5Ty*ez2l6;ָA"Xȗæ51:^[̓7%\6uz=<{:=0ZX뜞>n7X+e#ʺm} 72G xGP럢8epmg(_C+|ӛ{ˠO y]a/`tR?TPzdn${@Bz;o |GyjpuWl_o's ETΉR z>_zee+ gm: ^/\qm^ 7>^g4xtڗ;ޅ Ⱥ;sSË4cpsKOro~킔u梗(Ț>|Egđ=pS7Im[7/lyg.MtzgN>+Mx<0 Λv9d7;92&*u3yYjڠa;`6oz/^`esk!Y1n89h? xRvYݐ@E73XHdLCN 2wnwwv+_>bx wAσ&PᧁXJ X* {nY6o'dzy9vp_>AV(':y)wQXuN,¼͝Pĭ<ȋK o. :.*ZԦ^n^{E{_-n*7]q8dvbΝjE۩?تmRuMvmgp;p{CMQޔnDvx|S]s}>n7M6nESES% Q>fXI(E fIv1"L'-E]>F]vL(ʛܭ!8z֭YHoa;߿?]<}USaUXJ'*Lr˰0nhQ ;~3]9NY heVk&Q9 Hjpo7̨QȻuKq[w@3[ UoE4Ɏ QNBiSRVX+H,#!|%{dգ.GH.w_dEg=drpx&\ L}A"8igm[nH)JW> Y L~c'<|)}ԚoDz#ʎm6sX0AoBXțsSnm/+Oz2v|2'+Rxbz,? ݿq!KhPYLYq-_,R+p_(Y BK&j) 9rfXĔ2`fq>>l.o**.Tv7MeaTvV;>+3n[T~S][pmSTv4[yS}P]쮻{'n*n Ȝ gϩRQ#Y̬tƉIeV^e+OWvmj7';m*> Fabٗoϝ/"[ւZhZ]u Bfb0BjH&炊XB>M4QfS%vRz5 &HMH!Ij )w #Ś:y$Q"ٔqE",4nT"ׇ12w c+H61Ȑ2*X+J4CB> cItT`lKN% xmN1.;XlqU2~I 0eRBȑpL.QᄳD$M[{< +fn$0Y@<瘢/;N7pԍg_8>妕Hj@i 3꡼ sY Fy|j8a?LgJ" jQ̐q*A$ Ig|P<)?d}5G6ng~* #"GsTpѳ_!=PSz{Q'poaˈd~ BW%HCf`$&~;@i]b̸GVm+E"x*΄-SFYM:&YT6e@'\$7V8NVYY y?4ښuC]~(CR:ƌ)-Ś 9[$J8MJw"ο$f&1֠EkP7+J@KQI F$Yͨ1)0 bSj!\kW{zĞTb gUv.r3T]8BW<%IYhUI㰜oY8fZsQ c{^:AXAZdk 5jכIfG$KZX,tTX Vk07TX}qQȖ n(j*K5u7ٳH98J! ,Ng-`= ;akTc-x .c/ %dZCD#[5aO"~3weIJY:nNq/00ZPp6{鳦z=xIAmu֕Fe3ۗR3%瑿RJkeo3EG>"eAEvQqVCb`ymjw?,1heM4D*__T|%2돚mUOW,)TTZ4^{ֆ J룲LF6x\EN{/-FbQ`#"WdEtN ZfZޭHkO+!΃6e"g6H*q'2lvw`]jͤXc I5Zi;+'Ȥ67ddW$,x~/>q'aެ|ъuN֪m d*$#$;Aa4ĦQ{Ao~ZNl?N*PEfE)v Urut'eVYآB(R/Oeu F8wdF|O(Ӂ3qg"qC*^@':S>;&]Ema-qYE^'3tb&Hڀu Zr(mRLV`ԈQ88\%+]'GsOZ1wVp˖(]C6F" [ZXx|̊.za/P{'؂e"@6V-jx*֗R/w.d#:LfF=Nfϓ?4j|0UT6_>\ip- !׉u8 6(u@)PJH-$dq1ǜ*%əRs5:=R+TO-;ӾMvYru`"Y8<8'KCOÁ{HIS@eD6(ϒ` B@RT> €ɱV!_ؕǪa1!h|R],uP4erBlgS|rT:g2DlӲhp  R-٣!RNXj */)@C yK:~Z\RZgkFMQ]VgvX囜P(k2kRuzDv˳XVHs5 K:sODez!%Y$B%>'F*6UXp̔,2bUT5V7)W  7^P_0k`WIJő/B$>P6& 1=߈**uT4\](rw!/VUa,jQ7--I uGQ\=@rJaU짮TUf[|0}ݒBLY0U6R-lE;'.RȘSk/-!nXB謦Bա^ 57V/kkʯl<7qK~6O  7vc'?6\y-Wr#; uu_Y)7=JvaǁI>.%>J ̂"8>R!o͖Qn"B,oIObO*N&[$;@>玽2E{+ X:G:=k=ܑZ{K6 #';ARvJ_־vɞQV=zC|Pq]?&+<DʫX E@N1Cy9%|9VjYҧFGTٜO*s-6[0R9TD.5\vF>)}1#xɼc2dʀy>*U(8ؗR&95X5+%X#2O|oL{|K}ϻޗ #LI>&x>)Z}#Z^KXڨ. &ߒq- 5CUɴcu$jmd5VdPU6aR4/_*g9+Q{ީSԩwNz;u:Nݍީ1zr;u :;SMSީON:aw8OSww*LS;ߏ;u:N]tƚީc1v>[Bs^]p~'V}CVS89Z[Qӯm?7>l&kM2åQI7ԝb;>G,+E(!KHT8Uo 䄁Z,`ZgS:oVRC5X be_ƕȟ֜]$?)g7H}KA`NU\Y4Ry0꜕vSC"tEV7;'%S ^ Fb]BqVJ|6`Y'1uwjfB3dMQNq(hq{lboSTng뭃!GV]#evr5 S%ck[0(ᓏ‘׀Ţh*ˇ ʘyAE[ES5BR΀ NE_Ir>^64jvk/OOVi#/};w0]rxm`1,|CgvhM*'=]Up{;G`#4}1q%#_AWb(: ,"H"i`6$ʫf)ZmHFEc.ѷG,F_e^> ~lt*䶖ZHv90;88b17rag) -#gi^8(A>j'~?eDi)-pXrJ R('d^;_|?&jS(t {<% T.xX铙t)]piuM&Ds+L831;1;.z M^:q;q]ԏA5&{1/[TwPVz>{bp7`p ۉ]O.䉸 n=?y牵Xۉmjc eS=?l(+I01Y BLB)ek͆|y`/K}/K$g{l^{X}wbU]\{[`enH}|q|OlMҕt+gqg5Y"o+7잗̏cEpP*0:9N_xHU_me7&0_)7\QXfE㤜)q5/_BFkWWb,~vfO?,ofWߙ݅{r4xu;zgWA1 ͟ѾH}=M\")=yׄ }0; 4 2ĸ~j) K7zx_IIh+08J"$)Emcί[cw~ )|px]JiPd ?ΨCK2d狕-kII=WhrF#6SP=[H@anUAe㪲Հ?D s"։.* /Z-g/ӹ˒Z,Qs xJ\Yӓ~h(^;tt,LW7_ G\/t ({ǝy>8n㴟:{Xm?4kɩVJflȠꌼT[>.vͿ8;^}읖7sq ? v|/!q3BIzڊ FM[:/mI :2u}+üi+~;a6qܲ"e8"|vх":EEgTЃi,>3.ؿm|yyv JwsS.`pAQyV_/[^ 9Y!ke0:b3Q&c(6׳~#9ڶdc{ _V&$N%9P8Jb%x-Ez}6v{j6W#r`Dn< +KþjlQ{r~pv͐fPorz~ y܆f=ÑYϠ1޵q#e0ؾ ܭQƒƱ~EiFcDV9ef*V黣nn?r[u{@v^Nn_N0/V лӻ|Gf|pW=g=BJO!զN!1'M!%T&)D(eȻ??OJ[k-o3ѫ#o^~{[(6_azOsr m| v߃Yz[]߽MV_ /W7˒ӫՅb{jl5>xkTrs$H='"˧{WӸGfsZG{g9xγ_)^k`0E",3`nm42(*$z//5NK由;b4M &HP {e8Jj/aHDMѪQ6bfW!>cKZKZj\Xd949# >ŨLE.h3FA<#yAGY3Mh#nGjω-2gM|5'+#%0k!%h5y+899# K DDul(yeFk#qc{}5ooAX@|6ZH53yD9# >-FGjhLe7B-i&g c$qmN(! X@4 +L( kt:3B 5=m&YfΣCϛ@OzL|CίL``PW9iWܿ#gVF%E93[V",bȘ35,*u"҂ޤΛ≳${@se!eNzVcR 2LZu>SL$.ƤJNIWy8Z%UE\KU~F'$[vP1ԬQ6D5'#{쑍m59?,TK$Hlx**dx>^^y=ī[rQsݮ,۶b"tueǑue95{έ+[M2(bXue+V{|ӚyW=8y,U9`vgOo Ceb9]#06ulzg 3p|3p|4D^P:'J1Ah171vJ( "R24M@D[,L 5 ®GT XtZɄK 4ΐ^zKOm'/2t0qOn7|@2Ȟ~@V!{+":!w r岿=:h&Z)8jþg@QDNb!w/NqrSL)^AB$=f9c?.0=e齇/{HU6,<+ק^,ӧRKyo3cYE^;l6Pj3:*G{׃=s[RvIeDgeXM %dyKrKii]_[_(51 p Kh5mQxv©cȐޝ M:[#3h5}w ΆeF"}Xt `/- QU kYnmx81CÇv6"E8lg/Q}Y0Sԉv!KzʪSt\TN1K!)RI_;<3VnzfZr޴dýjHQMßSu^Ѕ{Nu^)]*ڼ"}v['z*u~9`{tq4.QAΕ,KTӧa:z{}}5\xq __p㖎yY2Cu,u oׅ{+KE5GwGw3Bj7' ۳\8Ը&`Kj@Y1T!3;ZJoXxAARN:1NgUG)8t9j^Rrk*0*![{|6lպ/[BeJ1awcp ?>g?-sWq6ڔZmqu[33BDu+l0L3j'7ud1lj5՞߼~bsn zSzFWVg)ܒTl%Fn4V_\ۛ^[q4͗VyQgFpڤ%A&h 0qግ|6D/|qx9ֲsB Z\/3/7w]Jf/yR^0E% N8 xas]{b㴭p0` S"^ſu<\fz|5JuׯW% $*G{W=s]jJOܹH?!1DjFW13r-bJ+SKtlY*TA:g2(mrQε\($.sʼnA{)x ,>JS LX7+X,b\#S_rY$_[Ht\')3 aR+ q!(j!1Ġ\:Hgfv`V7P0Fc'Be[DAz1h*"ep<-tX7h^H]  .bRdibSJ 2 @C$ kCFL$@^Hb o4π"(珥UGDHuC#U-)KGF Ƴ* cưX> [&ї [EkN@ABp(&}-"}vHx.(NG$ӂHyqr#1F$HB K%O7fyS1qdǍ[miʁ#W j4QV9M~buv{??l(;HQ xhɜmey/s'5 !cj鬙xYDQ~w7x4Rx>Xv٭XBĔh+1Q'/;c 6cʥ%3@Lkv:f I9!AOhdv(.E0s5'$,2K KDI4*DCBڗ>U&SjކJ=Tvg=: PT[Us+Xʠ끒BY-h@V*|V0w|yżQhrxp.,c2"۳&oRsBC`ɦ_wձnW#ĕW@ rf׳[>LgiQL1swӻu*Z~.ۍ2wfz0_~K|1 06ڤ1@E7+}૖k^ʺԯ=h߲7hRq (]XX9__ hL3WZKY 'VXt`YK"#"tGΦn̙8rX ,:EOv6r:ukBb_"T*QF*C4WR(;\/cƊcJsɄW>[ce~>mrT6ˍ(5*U)Y) 7DYj-h#02z]c-@c|u*5 .TGF9Ft7GUT>ؗ H*fV4  sOaJ[PPˮI,&E*V>zZk H'`aVF幦\HAj0a9qh`j#mߢm!Bxd!EBĹd|` Q!n8*T@ޕ57r#ˌc x;&a NIn)i&xD"QU,Y|閨" !3HajLI(SJkϣͱeJ8T_;6pC1TK@ n=؞"@ ͰS|!d'4>MRd$@5T" * G^9WIFp!Qk5,Favr <n>9(@k4$:0UN! WY)N X/\Ҩt63BNΗN~xW#lWJ Mu9g9X7Yާ2&=<ǣ磧8׋o3r'M _|ypxPx6q5ROjM*Q2<1^;ƕ!r1p;=ԜwǏźSNjﲪ_Q fŝǛuJ9f%OiD#ʏx /waZxZkNjB]k" TKնЈ}Bl Jc$8YB ؎;%4cI!Zn%ᥨep!|CHffreLƚ$)bi:I0z`f; 6ElsA,S=.a}G<-fDզƌ,yb/__I8*ק|O -%R4DnqQ50k^>]Q%w˵?K_4ڬYtv$YDצP%/[]9a뵣L`4'cb  o(ʶ҄3(SUi}ۀG8TJD #xQ mf鯗V?uW{B٨M=[\,.cҌ'*&o8''W/Ʃ2K-d*kxow]>#*ӻnEJ#[Q/l*)~r&+1>Y?v9k æ^N<R;5׉xoujam/.ATteεלU`큹uS}BK"'?|"}%J+P6'T:)C)kO[ k;~29,2غq7Eȃx %{?mJc;6#ut-d+ 6tɵQ%SU|[}b/uSW_!Λ~_%o6Z0L`'{ɜ{")t"COR#_.b7cw!EJ9ti4|8O<~_,%w{`./Rj#hg”陹? `/qq٤ ^9vLwkyNސy3ֹۧ_NWGƺ8X:r=bZ.R܀aSjuhㄙAvpghvI6`- ~YuaFnC[y0KW&#,?!D []i:uh#sg#20FeYG|oK h+E``tDѾG!H0[NׂRqM^QXΫkքRso?lXO-{3K/9QDA/[X9*HSi_0C g ~F5~$gkڅ.KT4G;R|HJwQTAt4NjxF &,2=χْED8u!QIl{c TQ% O6nZрmpZ:$S" aXd=Ub g>$'[PST#EL4)Ivx&1SIqmA'"['uldiukC-_dft;_ O Jszpc` kFl[3JcQyІD);Yi_aS~)8m|-vT9cC \6m8 1bϓ,CNLCIz.?;8v24+{;`at&kMvjB0ݡp<$I&0Ie+I@nDdWdk!i>!)"3 G&Mr2H4b b*(^&& DV\J]qQjPWIkɆ~٬lo6!lnWJ7 ڧ,aq9dG i0vD$@)=;,dt?'w+!Vz.K2ߪ$oZp{yOhSgGz_=k7?z{{R_NZ054r[iVqcXBhG ?ݦ'4/d!o>dRVBm;6|qmڧ7k W̄RqܥJF  a2H.ڃ-Ԇ&#,0;*ݕC79X҃ۿ$rTIlU8łun06=e1] 9G%cUt; q:ڕ4"ZϬ(F E Π4bsZm.ssyu0\HϿ{h}a Z Af[#aqmKpwg'ai-S# Gd~ }ƙ|?@'':oD9[e%Q8ΘL k()rKHiPl3q-9DD s8zF0-/4'|<~=(CP<|^bC;)Ր6ƃo.3zkِ#C+g9:J3bWwyg -J6 < {r@G&`1pSG/rzLr7 4;fHNw vSL)8eWmʮk\꯱kCC(oz1 )︧J;-^55]e; uОI/ TVҘw*3FX2 CS- L)|embk7f(~֘EZh.ZBܻ ~As7J4y5Whs~p39Zr d&;/@|Ji\_^\NƷj|pdŻKRȶCpC6 +~ FhxIZ{2촤5gܔ d5Ұ#I}z{ּ-9y[&3fSp1 Y<̟998?7n[eI&HAQ)6a&#A<r )S6pEDTͪ$m/L+ilN&)3JeM| ^Vb!^dY ]!7 m15Hnq3H$u:훌鶷{o. .U%1%#=7x)9+y)}i(kx ^GkAkǕ:7}X1% ӌN6(lZ8Djg^~]Ir 'Oե0hw(VpsZˣ\֎͉yO(( hx{ng,5VA%%%)>7m7 ZC ڦŽ&La)|AV\! .ŠI-wRtE`Mz1MM!Exg~nh7# GbЄuJqv;]6o^S_bBj68䝳h OJ)fFnJoXB 7lM`QFU֦$0}lxg" d)@cNHjKw$5) 4%{@;?.)dnH ԧf 8=eEu4KA$\z%0z9ުtz\t=^v;^)fX!XpiABqLYG#+'/)W @J!ݤq|tKV<|}@DɌ֣H\NeLi82\X[oіw,!o2F5ㅐW yC^&z< yGIQL\%0(4xoU[7zSKE6q}5쐱Ռ"nƏ"0Ihm,rFiSУ-XA`*/\u"Xބ74ccOL0vcȐAjw"Һ;^:Q"uiy&?7W#ʘ@%lV4x΅QaD4©my&cT ox)B:Z4:R#͙c;>!ju2\,.,"=+)e6#Z%qRJUrTlҊx櫴bV2Tm$脃|X\܍%$* 3O$uq#;܍%$ &hn[߱wcb%-8G'FKtۺbKHq]Nوun, Qpּ˃^^8(Ĥ䦾c )rL)5<ج94W$R|놑)I}\w'^Ln8L 1>d .$4'CC>qM)1~NTsOF3ŬsϮyԷӈv8yLt  5Vg.` SwSru~?itHfŒ[w(OUE)Z %+8x~pw Ω>%*1dԱJօy2VH}-#fCB-8q&gDdH2Q:SG{16 V(|p f ZEsB9+G8FhkϘ|F5>s~٥5=>*Dzݘtҽn,!Q9/H,=6(ht1-L,"ZVf69o˥8oV6yͮ xAw@}E@EͰ*q}3LK\ CvmBv(?4Bɂ Cy07{%(σIg|O b:*ʽf  3 &dzV(^.TB#A.$1ᗱ*7P<7-.>!FZd瞞~Ϸi]\X+/?y*K=8|m]=L -cPl*%:h.4W$"zu;0%Ṩzdr-Z`lqD0$KjLBk5?X)\ J9|V&%C z8]gY`l8QYt>]Ŷuqu *ේe!g 1/i/&1גcj⡿ nuܪ-D?ԉRHέg,jQ!I8}vJAˋKkZSepipEAˋ1{E+KK'MtIF&dL&OK1aCx`ۗct_ DÑ6~NK%nȪbUт)!dD#pμFE/5CVH*9B֡oFBџL ˱XrFO~Vz~YzNO<|VpS%>+8Z/E40='Y'Q)`Qܝu$vVGeٰpa},ouNseGA C~ӊgL+w[n1uO7my[<#DH'SRt"wT2(tj:0PΥʒ.3OndL;XvѬR^f6?m8̜ߡs0EwO`~ݯ)l|*,]VLf/>g(NQ( oa+t͗scB[b|JU(`\jBޜHʱGQ1O{9j\{x(+=}͸i]\ݘEeDRצY{~WTeR(FpT[IńhbR2)RG#KX:[%A_MXoᷕkXwMArEَNϿnLG_)8JQXlԭ2NH-IňJ#U.-l& f, ^(dO5AX1wV18=i{=-E\cg;Hc)#j%c/ ))yK9̀y4'RCf5WzX=i+࿒`# 2V &J.Rc!` ,DC$ Zi^rz) ;O%dk!=S!QFE#g-G1f*F$&s 2*J(B:y4c-Gt&||>ZQtnRMLrf/4, "=kkt[a_ X۹Ig+,Y`+0Eyg^v`?>BO0i~wA]JÎ8jJവyʅ73ާcX(Eg7f ]дڵ HЈPlAqZLyiW[Sy"NԐIUJE4IAOpuh{,uuy;Zf$dds"@I؋4%*o߇xhJM;7m,xԭ❟%?`Nbᄒ- SzM59G}zwu,9?ft;mDĶH,c޹q"YC謑ʸγc12 D(Pzi\YgZ`gqXq2| X"ճ)z\Fo0#+l y ']sh֚Vto *Go`9)I1Epw\*QF; 1[Ѱ~ڶ[m1iaR|vׄ!2Qy}Uqd[nQj!!80I8-x+ 6 U.xPA ω"0O#-,j!Z⍓\Mmˉ5#M958I!({ AΆ?~׋NW`41m]a0zq;h5Bsڀe֡yݗ*iLz-x#t"jiC!P^& DAօ<Ԥ^@r;>2W~M"\~lipj \ 儆>ǑI~IccʢcU-t?.z?]jr*1%iS1jrjL).V59=(3jhNj=~>nwuK FuR1ƺ-wTb-ym U4I^%G&YZ)c%{LJ-Cz4qShiEi%QͨPA+"yc{匜EZx΢(fȵrEjqB c0L0BH|:=# !BzQtjSXͲ$_>?5x<.D>kn/λu쏨+Nӏ_XnnW?_ç;CͿܘ>y3v?VоP:@K}v{GF\K`]}P. ֺYd),KZ3i re̲uSQ[YNrS' EorGS w;0I ,Ÿ-Dϣ+arr[t!C`f:L$Z؋(~zD;H9s~R@OޚidV]CT gu5r \{g9w]*~ XඕWD0ֳĈAiZia^)+9- /_<=>|+??Λ.o~/NE@Bp-*DE=Mq-xaiC~qk:hӆKS፱6Ẹ⋕#>Xr]I'k"ȥ)(CFy.g}~4BBNLc$-h4 T6͘D.isBKaLBd:<5:+ I!I^[\([a](\_Y3l^O'6 sVB_=Z#l(o׉F?K:\'Ԉb(xߦ(F\b)JZpB;jPU.0v3pLa~\__]>N V&0r2Z(K&E|l+txȨ/ЩOG]b6;*>c߶..nX˥*$;s@"*nսŵmatӿ\ZJ\u_qhiGݷT!Q/f4u_|*S+FR5XT bT'cr SQ1p?YR3:-hBcz#_rl$Hؗly9A8~s`Y2zs~gh.fa`QUX$>v'&.n/̘BUwINP;X1,+7ҝM.^y7F;s\AŹu|KdԩbAvBr-)Çy: hx ̲dӚs0%q`mp@,cѠip=/Y+|Zl&`3)RZ%8At:͝ JfW6zbJؚN},YWk: Pk0T@%mcideQ S WD+5􀅶t@OF|(oˢ&kt׆YERf>_ncPPߖN_ 6vKw&3aA~c/4%4:gPHk׮9*\dysW;Y_yd}@ 2?( X0䊋l^=ee=|(zՃcQC"E2$94bk Q0dD9RbB1a^ #'NZUAf~xT]L\ !&UTOt\ >]8;?OsnL8Pij2^ VAΩS;w:ła!_T'mM"jfidW|ӥ} 1|&ZdSz`RYQ|EQF$D:48+Dlfi&pl(MT82 bBxq!zFI&N2]kJHrIcIt"}%/?oIo עqyu pA~ׂrټWY>|>թ.X| MĦ̱K_{7V) tJǻh"NmV,hwBr-):L>uǵ)D @se b|ヽY43d˹ FԞhnuC4:=~nau/I朕tʜiq11HĉER84(cư)!w~P j`BIO8M *$?/mL:xrsR>=x/~C+[72l<.OK<>m/ߞ_lWW]pz󱅗;\n4e@u${,%H1uBF^i5$O@51\78PܒazBPGWvtl 7Q.5Ɋ0-sCq#.iX7# >Dc_:&|N cODMtњ:]1qFhN(›*D׍ 6ٺ1A8p%yp4c`F^02 K>4y`$EՉ1(c uQ3ZCxn`lϫ=&|N c/0YxKu4t#@㰥M$A qb-ȜZ3 'ʅۚi[> trƶmkPڄz|N?ޟح]=)էݗ?v%Dn-,nX(oz:mya)4Qfqܖ07@<4`TRWVsNZ?S<KkS+/ݛǖsM>kwB-Zŕ,7g |wvc}ഥ&|H ~=߾9{akcS^]P{&ٱJ 3'ΐv'z_Yhѯbhi]CNkCmôPI6m1~#x6~fVvY1̊0NFfy? cɐNnؐ6WT'&'Hl}i .EVj9}vE(w$Q*.i^66 KEZ"cOpp:$C9ٗ|Ỉ}frMu_>$U_dn\1hR&ɒζ+w\z,*ɘZG>ƾ.B[#=CPeSa?%W|i!d+Lޓ_8 W íQBӁҫršik8뺓Ub+q okbWmVDpuҤ =0h7p+N1meZtu{p7md De8@l Iu=CdޒP -- \pA.L b-(ߊ Tдs)l[U\5An$<AIn,F[ 1Co:*^ *_U/z -:2 ){G8J 6uRsCuRpRRQg #ߒ泴=7aOmDUxQo<{ EW{pONSEo]b W "A*e)EL&<(aH2Qڏ-88oEGm\|zX]*pnVmlxn'DT1*(pU,D˨_ݹ)io߀0=mÀڀM<<\-ky3L$|lpJ#p,5kTP"۽{j _1vV,2WJhJ\shʼn[161bV|JL!`0ޭo(DjV%WUbrZ<\Vh;Ѣt|"Z,Р(KBZt$]a'aI!/MVjSI6dgU~Zȕ߷Vf[zne.POORsZrҙ!8km|*=6;O= 7 iM#f犛;kHo:(/ђNd:Brjd"uZS7dl9?竝\_SU`0mvOl^y{l o+!hP+z1,;m| KbPg9 ~j.C- ^7d ԫ|k+d/Z^0T,Ӽ~ xskV=mxWǫX ,t£m;s|ٮBo[^2b`ĚMfVfV_4z:֗98EG֮(X.>MEՏYPiF2gTf)*UTaljʖ3iE||>u JU{mHrsRvUcӨ>AϏL?7>0 K}޴}h>ھ}qMT(DhA^`%mJڜwhνAKEb,u?Cf|_@>|?B; "bAw_[?ʷ+zssTDWtQsOm%rd)89%e\8Z-ҩ\iʒ7beY riE CdD"Bx/UROcpn̰¹O=V˚l!zA\UfB˜"$1L < yTTX+ii1&M`fӤ87AOsCGpY L&]¹|(">XR<$2##d ~R(e Zza΍^toYp Sl(V]v]WځT ,Rdh J$Ngz8_lK c؇5fv ,Z&ExUd]-Ū"Ȋb `"HJ-:7u1 Cdꚦan{ [tzAq9ۆj`B(\21-$z׻5C)ܣqPT'M5zƺt>؜_8Y};Px I@A ʼn@nˏgrZ9[/wbk Vέh3u۝$QC̛-vgwzg7߾z"T6T}K'괲֛J:Z$#kaw;|=Ylx0b8/^Obc3YL)ﴉ~ry'cwjG9E>6oSŔ_T1*bM Y1'g4ˏر;X 7cws~9QrO8]vO@OW5_%Lk'uU5dkD#3>aйA<ѼCY|bX^^H%#vgp65ϱ<ga^!X^dQq:g9r>Z̾ik)8ճ;:VO\C_$Bq{MKC$}HJS^ߩWUo=jyDK+BWЁzfh|bH xPŽvD(Yy*+W(N p;)OqbM?^6 Ѣ@#r$/'nz6uij)Xӏdb=9Zi@Bq*P\lgS8Ab<%<=E-tpB8W[t;:("jo_602/Ļ綯G pon񛿌6Ioޥ>eyon]Y:=2gpGz";7noyk~wﭷt7w+gG. >Ks+8ucmA,;k]^}qxOLÑg,~'h=+:Y^P(ǰBq Pmk@кub%wҿnsSCSh"qϥ:Dsq)S. hw|M5U+USQ.bETljTǗFL^[4Һm抁Lv}TRbZL̎cv SA4xP珒8X VKeEb뱑Kƅs T 9l1q5 sM|rךSERMj dST03_I]Z,5A _FI^b͈EBE΃O LV-Y}6IjKR,b7Æf|Bhb{( $ y/#)ckTCK@męxQ @4ک)HE2=&Ph!"f]*;tDG YbМhxb̶k;BiiGC'Lct ͉qq3ac9 %iGBNOTPm̤k!*f PG FKME:NÀ.3GZZjے}(n.!3.{_|lF~TJ"K4Ӣ3ή(fmI!yhL]b4^ ;6S@ 66 ͷh{xYPMt0q &!%BO5x?SB|ny|i) )%L?6I9 I@KɕZkBf~E`f$ sv60=lj IʦtdE؍k㽋 D֒g^6Гskía6gNIia4e K^aWq̼=q֜QIU% R7 ,bcIlar56 e͜3Ty?TՉs&l~[}wvhs9{lu0Y{ S:3=fF k=XW[*<L[&fH1be-{^y‘B;Av2Z)lA9TTTncV aܳPpmK[D\륃LQͶ1;M| bxw1'=:jF)mݗ-ll皏"]@AEf}H7sW W@95 MYmGoq^O,&m#3G˝ OӓBV;1V(XL;5+hY {@N~'٠@Iruy2xgۧ7"jj޼󏼓L |!-M2cH9em5@o5j|4@kee:xǾ{AA+E<(ȯeX㷓zqÏV(N ^q=<֑XdGwKF\G# ^Ɗpd.^8/{IԥL2HMT84^"$f1"Ԋ3ԝy4t?Χ_gϝ_c}K?p.:ZU&J)aFDcV*d_ \EPNwɹ[Mu)::ԹKI\=ofmG"dk~$&80k FǦ->=6xۦ9{5Er7W<%yLIv){tX,mr|ёغsf-zcOO#O~{51us51u>kRW.TV'm Ƀz$xۼbU ώp$xa\90FKG3,Y{*B!)鲅 )l}Βh ;19+\n 2f@LK KwAGjHPɁ@*<쓚.8YH=GZ-)4caG{9YسfVGRwrNjK,boN]m*ceTnhb)KHAOrr4t0Ab&1?K`̉1GKZ<.~oc\%44]f^(-MIbdH㨜 :KP,- BK&sRsQtHz&G) @4ZUǔ aȉS4Sǎ$v A urݦKX6wV4ջ hoRz7}-ǻMmڹolhw!'N Nqn\"Uw :mnmyGvۻe+݆`ȉS4S\TFDQ)g$mN6eܺ[3rԱϚLn [.)mw..ڔnيz!r┠g^x7IbB6xM;8c{lES 9qfqdM*r1HNn}tɨѦdԱ[m8E8D ;MQ]Şi.)mw.ilڽxilhw!'N,NqYA]z\ RF65"fيz!rnqA ur&<6ݲMnC0))N?X$B.)mw,Tf+݆`ȉS4S[M#Dr1H:ėFs.6-83V4 9qfqXMH\E얉AFm[(3V49v!'N,NQV&; #Z4ǻMw[Mż[m8E8E9EPNx祊ROY0AU` ƚ[m; f6 ҘF[(R\B}?>RA瑈⻏DBJTYOojM@5C 3+3,;V-G%mr5 kF5j7~GܗP8_;v5,xx-!8[-Sc*D#_}XSu^H*_{b '&z!xyNE[p Yq&.ǐo K~1zEU fa:ߋ}NFT]ns{):ܑ ֝_RIǺTI U=?SrIiTp0%FEa'k'X o\PǸKiρz>0e7_-0-?_5%жa j$(KoR ֤ rs&88GHᕶ(V.Z.0<[} ȼTf@Q'2~گֆgwO0Ov.y_ԧǽקw8:#oa-:|`^2u)-mpN"ÉlLX饣\s+K圕 z]=,qg\Ӊޞ_|2 k[ZoZ 'okWE˰mHa&n6h`0%ܴ/\7T ӽSD߾`vSܸR `"%15SbО$t}[< /ul83ƺs%GiӵȀ=@+3zx mQ a"K!O ET)z3C/ ]'˿_- ߞkYod}w3c^ ?sJA%=x0ȕ=gHy۫g!8,RJ,~ܖ`^ŭE$]F'' (lEo!'N,N^ݺڄ-ǻMM%-c 9qfpJnW~qk ^6/ Sf;S*$bIŎKIkyJӊgB.\]دiQ|iHXcBq\ѱD96W%[;KVD 5X9w̎\]5|<>?ݺvwcFdvhI÷$Q5bjuC֋.i&.w0x&L۔~8xp"pLušq Pqh u']pSi9VL*7bL%о3>ʙnck 9'Z4k-"+Ccd]kI N}]Z̄GMp`!|GӜWw}!*֚& A2&wXb:xЄ{7JPqM&Z{$ RAHkL+bq"{X\!/n'{g͊o$}!3Iٔ~XMii}6pjF'|-w8٢MiɢIS-cJIY#'Gl]3d[R22Qa%6YFJR}X7 ecIۍY DxϮ7מ-s*k\J[PB L4nqB!D RAg)=Rz/oza3nKf4;V 0mX󋹹{*gMsbMڨr*yO2IVo[ߎ;V -C#zNw;-[hޥNdRՈF/z=x4K*|h&k'qфi?=wGa¦x올QQ뗑;*&Mc2mGDA b.;*8d$٫# bfK)]HOI -z_)yC YJBҶ`‚ Ax[$Egع΢㐹vϐZ@L;yX)dK!6zb`*$ްfh*G>zI}``_; xH#[(@C >g9B&`c *8ϵ'Z6 @Źbj@@v^ H1'JIזˆ0x *qp!efX$iІ@LȄua\CqCa"P }0T@̿XgP0~,`a߯Fr!mN^ܥpms7@o(Ɗ(ŕ;dl2o@ R^|Pse[k06PڠA4 \W nA!tWS. vu^OHQ84 gyz 5 1S0v)(sVeNE>z̗ /:\3:%hwk-Z$I$|xz2*[(²mh(CyQJc0Bc&A K bQ* Zi`IH<:ɍK!Lw_l9_X5˷J}s*b?s:1;.<4e^c5,#<1(/|@02E'Ŭmxׇ)%qQWfu< v5"n&sXb|qgEڜ?{{89_*s۽y7VlO6U??mc#yTX# z(YWO]q6y[] qjmȗ:ŬChW7P)c~0))v0w :mnE`pthw!'N,Nu2zy= {q||x Vtęn5h(a1zldVJrjDۀ[bG8f4z.п&+lɸDiA|%;Rˎ0 ehmO3Ϗ6̌'H$F?C0))I'x7d-ǻM9ȫwV4ջ h0:rnJSV6H˽F)J<"X1"Wڢ [MpJ!5W[g<#DZjTG?ZR@hfH a~lLAh*0vmh/8X" k"\gH&ǎV" B)KaځT;-T@-AKḱ-ZMk~ '5t>!m@LLHt Ԛ@5 2x prى'KHwChPh< 8>C1TLgTRs^_H " ilW\7vvGrM`ȉS4SRW?z7n$Q'^m+˭e+`ȉS4SW'x7unP'>m¹ش5w?]݆`ȉS4S[Oz7 Au2&\#[m8E89p!;2Džy#ة`reXoַ K@kP" *!T̫|_l4(]F:-Pg˟fww63ߟٲ7~ovDnk~0ˏ1[XmLKpoz {Bުg!lq}8 fwQl(aF5x,Tϻ^i:B%4moRK)Зx%~꣼0ʧh)5 FJ|U6)R+= ;i_S>A^e8Ip|J g]3F0%evR/,swb;9w1%i}CByxۛxqݺ1P:g=_\<ĹN砬ŌO_7yƻЛ8g Yv ՍYwV$f%ۿG r߮ba: `-[0 gXk zJgښ_QeƵLAd/LJӄHZE /hS-7Wbn %HoC]Lvb>uB&F sj,ErL9H*u'. [{2uAab-y !|%p`j2I DiT"nβ\dnK</(ojCoBwgjjk2J s7R(]Z@eWf'ˢČrւdk.CcRa>A4QL#(,zIh,+o¥60䵨Bpמ F:ȣ Y*gM@ .Eݸv_C"D#R3?Ai{(>Xe]5Ύ@r,g%.DFvzRB &e${@J=̈Fkg (s!,\1MAq!O3A!'`@S!Ϙ-h_>&^ɔ @/y"P_/|6ija_^  4X )Rt,1X^܂sE!Ec#Ӏ]T1mPa ] ID nM5Z+QVc1Y<[|xc2c{3&0&TZ8rVR'vRZÜ7>'翳}rcW+Mqej?'o\ʱJ Z,rRh:=.VwuWIGg&p-BR.AUC_.MuH )u6yUE`r۲9P#PT+sbŷ^N'MղBi:O| &i]gֳ׋FVnEFȸdMmM9+>[k۪Zg_\\s5W4uc:SWTfu6[5L<:5Fl 8$J;E5wz=\Q?˝ ,|z^=»ŪJi w=N^ghmVDcx,EL =ψvн2vpl:xJOj.k[ e酄 .ޚϷf^@kf6~[NV7V}F|;KhpaY4vXW (H.ݥ4B\ؑJfzV7W&[+,'rn6텓|`+ N]$)(ɵPYfCZ_ͲGJw=yN|}jb>yeu?N/鯟^K&MlbrQ_MP SseTx @Y.56ʐLӌ)(yJ\ihLp`7 V^p/$mؚmlUն[׵w e"XSPjʕSRTh@20Tjf%Ӭzcg6Ɠ&?_ݼ7_QwdsfՍ fԑ'ۡMzOn SoleKh{nգv1 UjAPcU7OCؼft?Vo-Ύh0r0wT gC 36 SGefmFϼXɊڬ:'yZ%"V9)(5(<p֫,Eoڒ޳83(RFH1x&34Q D Ʋ\KiR_>ZF9]=>.vPe[/ں&Uo&Ͼ]ezZ֫tjj=F kU'yj-28MOEQ~LE1he_#Z p_z x_dw~+ZpN}*2C$ցh*2n֣;? MĵC:Ru0ᄶ;^\p/GBdO&⦄{ONrt0moܓۋ⭴w. :1}䞀IMa'T\P"l@J=nyU=1Θ#z ~OCcӈWqrLݏz*J<ɰH-; |1ldȜa XUŬԜQ}7ձf# Cu|J Տ y: ihf~d0HQ.;+zYM6鬝rh${}]FplR_>Z4'=-(eg ȉֳySUWb"oLda!/ _HV#5$x,B [m] Wmn)y}zD`寏 ~kڧ:9 m[ʢE%v"1՗Uro*uCm*2B7sS7WUrz@%@L>@@Ni5:bVjգ$3%(a4,L B]S(ixr_$vl7ᶍgM ƣ}l}qoڴoclHI_Po˞g)< im=Zuޝ %L(򡦔ۛ' ZKndx?';X ?Ilo$=߈9R0\vz6X#XK{ޝY\C8Q v Gu|ްI67⯹`|k,- Q\s "8_O9gwS7 `)<`oO)SyP?%,.Üd04>fN udHxdeM c P fO0G{m'vm%ݎ} ҍ~vVt"Ԩ,̦qZA-?z뤄P+LǗ W04w!$e_ ˎ TE^,2gr;<9f "'%tw(pG"B:r8z|gDKUv'<9@% CGV$Nت>>LrxG3d{H?־1r=~a?p@@-qsh5x(q~lf䑁CǨX>9C^!y44BGD~y "cnp` v¥;(` $0"iDE#iLLs 8L/pFBTR^NG5m B@{nO:D9<KK3_8!"`S22vc!%&w! COe ]f_x#]Nn}̎h6.|0@y_L_uz_L4lb/54KX@)Q23\i!3U2CXTЂXNt#R0 wh!!ZcLJDF)Eyj ,Y$RfyAGW5[}ٻFndW{{&,]edv|43 [f-'xlb]ɎO"VJt-^]?/A#3&t F:o5?BB|4SHJ~Ђ}PL2 5~ B|'z>5ɩF =:Ejma!ƂX[FA{x$yCxnI2M6sdx"6R.tDv>Tpү\cb&G5Y9`Ĥ:Vlv0Xp*u4o?q],G=D^V,{W/nEJcS&ZJHO11yQ ,EK n8B\vLi>c2# {L33]: 2$5=N? @FW)#͡ƌycCÞ%ա~tÙ׌v34neSazx3nD1?d?0G`X$.'B|B0ZnJwYԚ+ <,/ *ue$^;*i$;|cdRZk\A14BA `=7sf;L+fz)BSϹ8| IŜV쌃89F(IO za=yF3ԛur:P* H6鐢Q4A}$Y2"5"0 7uPuq 8DV珼fP?oK8z4~}\kQR(Da9.h"ʄRrκ:e:L=Ϧԯ r;LD7rsɂr=<{?$@KbQH}=PD" t?zL:r]pKoeӂӨҝ+]PSIw~j\fDuw7)j1cTNK*CnF@I=NquLr%!LOB40f)@u 4] 0K,Z$JGJPҤ;B5"3Iw'wP.JiؿZ1hd| җ$7dR٦Iݗc3W\%;sUάF}4ab BZDeBi4V[Ɣ(m3?bǛɻ u8NVf.>'L~P͆6Oj:&<]\Oe_/][+AՎw ;oA>#5bqKwzs :dV_0-;2v9BI\*ݘ.DEUb2H ĉdLPIKu!Mֈ8醋u@4qQNKzPV2PQhnցHBRcPMKuի{ƵEh 0\q᫋I,X\$+n+cF[wPۇozPL M57ܓP!yekYd/fy*~pPKI˥ay,o8#.2,. tFFB0@N0ꝁmk J.5&LAg@@W_Ds˔R₈@]ɢVA4>X͂” %U@h[jO+ppQ=[}&_wUCEUT W@B.3rB E;<1KsB%KG*-dJ uG=|jr~ Qq*SC= =24GX*&VP311hLu$95)(F|t9(5V:Ӗ{ {<3Έ(`I)5zY_@bTRUcxglII6!jL];-~rj|J ;$P R+84s]TKk?(eՔW|n?G@`!ZEKMXY"z&M<3H"Ia`)\u2 ~A5#b?P$ڲMGꟊd~~wڝ=euQ)bR˿.n#GӪ3;NvoJ10.-''P-DjG9tv~RO31V4.nqҜCɠ(BXf )*9pWOw۟PSϪDƤRZA( 8+DQ.`[?/߷yUbnoوp{~M(('||Z2jOZZCyM[Udnj:؛MA-t=J;N*BTVAB50qW`atc6gFJG)>l`rON^&@Cjr鋢OtS㺌,)<[qtQfiɳ_Szu1LHs w9?ΒϞ9(A:$ܴ0=vd;\)%'' 2fO% @u.ohw-N3$ԉ4uwWsaH cF0X 83 mE9)`^$P?l@/>V7J.cܘ#͸89$d=)IC 8JHRNRS9{]歲螔ƌ:r|WZ#m>Cd jЊ[{zi#5"%2dm]pGoȤZO{ Տ)%I?,tҹJK窾tV#`~_0l$)z#$1+Қzå"n5Fɏ}~xfDd)&w?׮3k;ћ"&|DS&ΆE5dqjViz Йj,KcGͷQo9F4&v>q0=>[;&!3^hPo' v=P%1LXjswZ%ÄEXڷ y l3،{R{mm+iy/5/4KK(s(Q)$ (чDO@DjJTKC ޷JTkPaA^y:E@hp(]j!jv=p뫺mM&&P7_2#1{O:C /"LG!H+;&S;_=B:E;+rI͖F `.9b}h KG&f sj6jB!v&-(gJ0KgqzmHLKzqy+Jm:t)Ç_ܬlCdJدda_O;'AGfpIxK%?-%'BDB AR't6EQ(hI_< :K"uϘthOX1yN,5UهY`̓9}xAs}jxE&.HY{\7Uc.P^=zߗoWaBĚj_/q|9T7 0.p9Lo&[ TmV!%P()&~)[N$!.Zf]ڼj\3 ph(\MK`Hqx۶×qƊH1x/(J:loIT81ũr"썽巻w-lm =f]}oz9XF}ɂ諥X$%YO5II#g8^ I ^]VU4XvLǷl\"e|3~K j/o_ h#nsxˢ|EdxQ%WPDB+oD~(B-J>VːUUӷkTq~c%¬x3aw0ۖ#hg;W2z5zTҒ_ .{?neS%n.RsSmWDPM_bveu ]+aD HŸ25}x޷&]R5 Rc4mX_K+VoNmxp}HKkSD&Y^_KϱZAR{ߪ"|E`J?߽8MF9ow7:y+b84ܧM_xV8aw6p( E82"@nKDMWg h<wǧPLz~?saǓ·ꟹc.-Di1RCNهq٠Z/lو3!@5!"K{cp,AFCRF8*$Q-`-̩ҽdž9#RF ZS\vQJ)TfF󜠥TI8}j"Vj9t7_˪1.W(+H +uy UEXUe0e_**GԐz?`q٘UPF*Pv9f9Q1zSy9zK, "@bl$=QZ c4FZEqKg7ΌWW((?ãIȼS:SBnB]K[?K6b"9n?:#-rd @mKp)R ]*8PRC:59SpX̴ֻjORdJ_{bG<mvjF)Z/N n>ĶvtiLm8rIRٚQp8AV\<#VV'QUT2,~?%{iӻJ [= iI7\LOC&(΢J%hȊ(RD0J|8 4A&M{QR %˻$W#TUT.9ryD_pThAu0H(ģB4 V:E( )$~A="f l~W'僥D5Xj4sDE<x&Be2qG%5)X`JP6M6:!+jo@V g#@P`" 0$_RmZ*ucs!%έu2M S^'8{ 00Ȅ.= #@@2>(g$T9p :\@VQzIC8d,DjCAM"BsO1KD.OQd *o#TbKTj|-PxobR)Y_Z'YC!!pb.PH9"@0tA6$!SnVQL ͧ+BO@X {$_È{˂]VzQ54XRCrC))[p!":0̐Ƶj&VmlHh@l(Ȗmt\0Z8Ԡ7q{ZxBL ;: ۅ,]ncͪLYf)2$8*jK\%J5S):@gЅ%:݆o^cA\FrxY},r=fpi$rE4vp*Y愈)D9;\):˴AyWzv<><8>I`q)Woe1P`S4] i5\M׎׽ Qִ1a/91ɒ d"Eǐ# Wri UX]!QP]8,8_8s;Cple&wB-tmRMؿ{<_:\ۮhp3ԟ^xq~<ֱ b-;vzf΂ :*]0kc5'-A?_1AK@xU&YKm$J4Z-yLk]Pےc4jn-Մ=莑(dRzmjN h/2<_a_(B#Kj"e[Z".ȟ[ s[C8u y 9$@|aقDid́u,*Vy$]mvqn (rpԫ jӗ?n.4܊9^ sS/JndRZ*gEj( MEBiYcryr*C--ᴗ먴J kwݳT-{{Ano*Vn7n:\ig&?_OI}ctn.QR?eA.o3$7[,${fohCSF:)thv=}qx:<">gק}J,d }x2go..(?0fU̅O~P\1pg<WO.OΙd'T{_ğ}:=)љ4P }|=mڔ>'߽,SMaN~̰=W}dq~CzӰyҼŹ,.㼫d}wi< A>4(xGWLAPb HdBˉ@OբMh TAJS< +Ӕo_ozG]Nތ{p4 C/NgCt,&z>9띇ƓoQ%܁\NP~A פc ]E WrE We=O=u>)툏S]r'o_R@U8G'ڑB(g_-@_ʤERu9 ]{7 9o>(˺68M41T RMȍR<%8 be]tFs€V˽Į,u`Tjv2:UjFJҚ qb ot")X*:.2XŻz(Z,4ٜJ[iʿ & [ S{՜?{FᗃsrX ɞ`0xM%#9ə]?m9n[R[A0-*X,V+SQYiG:Mdq<6:RWa( zX&1ESǠA;%4N,( DApA#Z'Nk@z(fcI*n*VY%F`cpSabT'*SLng #g@ˀd,m?B@u<-G/@+9A4XvDT”QS?fD$lRL@4msiĨ\:P2ZJ<8jj\UDkX E!-ꘘa,p1F z@(;j$x~wYIO4G3]dN hL6Q;)pad3u;!XܺŶJg|F&B /3mrWFB@ IEnQeA@`0;Βz$O4MN*Iol,iNIPxQNZ=Y41#[]KVt:mE;@DWd6ԦR,jj1II X%xpԗTKLqn"QGʌ FH%Ӊo1'k\^ ?jС!qٺdh @5,6ˎvIX( h1Ocϐfr I1i..̙G 0xM@Zzu%x`?\ڱЬ=DYk7}}wrmݻm nonv(cyvcIکنwܛ%eWM^[ PrOs<yY*ȑ13u=Q,ؿb 0{w[H7;9Q^{3Ї7o# Е~pzMC >WKCQ}H(DçpI7eYV5>h jNEo )/),j|rF<=w'ʓۇQ+ݷ&Ō޵|\ߍ=0i.m'M=D;+vc2ڊn 5##C9ДNn?9&8c8*8FygޘH^1!QeA"]z3aBTvp[=u &>CHc|g3&7+aj u3 Uj1๩*]GW18d ((ā^7N5QYslFb3(9xj^=)PG]MRwE `i3'Y6B% b~ZP3kB!ҏV)%h"0p Y7L *4eD,R]I\zWʢL87%hǢ]LLY%x%/r&)eYHP'e$U/I %c(OJ(xZu2'toN7vM9Z$<4eH'mxzeo~ihqCG AuW^rXbo}sz/%|>Y焱?\//˿sԾ5<0Ԙ?| #56|0{ NLdG~|OxrgCJJ=+wM؊tu%p\6`~?̒e/6 +7pѷel[# \X4>JWwJzېC:ާX\|n">5u,}uJFў"CRT01uDu]w#jZCWO;]ecF^Jr@WYփ貝XWe(n]:xRck\Vjѡ EN^e.KB5,=P'; lfCCX˚HU`t :9GuXۡ5ul -xETG3Ǯ_/ޯn#1dLay,4Msb}xN iO~rN7!#8}uJ}=kR[_?撾ݡBt]l109d L־>k!^"ΰikG^P?aO~Dug(̿^Qϑ<+;XANmq ki[T } Fl%)cFؑH2%múa}ᗕ?cQܵ;VxY`68BC2=a&?ʯ E$ݷ.ο)mo4!}rΫ:oTS֗_c˭P㸈lkO/&)R2$πk>+q,2mcnѪ+eAjev#sJٺsN y7Jo4,)5ZÄgn5;(̥{^%mxMF>/ dV/UĈ^8lA0ԋzr$顷/[3qJЃp;y r(8{ ܍< ~yiw9H wݶhG;aώ̡ZF,w]WUvҊuv"o?<$ebv'3{y=OW**-4jZZ B JxfA֊Ie3hHcvY4dTK>dEYf!@̳Q e'/Bk2B3Q^jlA@O]ܾ5>fiZZCFF>Yݺ,l,lY؄8,l ,jV23_6qC 9ZN jJMloJami")dS(T"ĨZ[56z)0:nwjxiLT(D#jag` P>܈i ɜ1#Donn7V_n~O5-|:Ķb(;h!]^>D;fmU1 mBF#@1geUipٜҥܥw{񄗼n'|qN\ULi6Zԋ& 6CR͋}2_fpc;HnņJG^iSwx>ms}^>4| FhQ m9f~1fi̦bowx&snל ("V,Ē<-bA !&d5 qO1^s. 8ȭ=vDAnu-Rt*Fl:%MH .Q yL!Ȭ L8Y6k[ Q@Ibh˹bj\Xi&ulOsȉA[k@K~~}8 kEpR]Ab{GT  Xu<Pb2v`L\,P&pTR6G ^f9+w-k\>mkmh/w 휇;1 5` qiIX>?I ?IoBƴS<dqLNhu2x<"l5W̢ H"q Փ;@tQ1:_~+6oz#R`AQƍN\8 v)B R` e֪UMgE| NQE(% m1:.bWaUy{E)(\\ow"OxCXKE &'ݴ}M[,X:G1J#*yژιWH9<Y9&q*n<)Grp:3+[BR')ڀe,nj & Z8+ ה(aNs}/v=ʁ ٬t']"; %> {xQwZpHػJh, ,d1FXIx^j V f,[aɣke|lwى%N^-""{ћnt;0(ˁj2 ށ~uⓙ8/SH$ /soOc*Qg)C/@3VN!W)MK闠Z}mo6W꽔U}hJ;|Iazcr3ded׋WNQ~=y8'o&v+)?قElе-DZ[ QT_+GǤn㖿 HȬ麠屓E!2g`$ v{c{b&j%iYIsQ+]LL Npe=H.hK ŪC֚sƞ9Ƈ\9Q)=QI'&IYkEȾ.Z9+֘#ڎw!EjnMmj!˒2dq SW96`S~@ XԬ?HSIlIԤT0lZدJ 63L(i8x߿h>C[`IHDQcJ>ާ'+e(Mh2b**R>3u=v,em?uUSh:)7ȇq{3Vƚ1 0HJTXXʌkZl,LhYxz{ p/4]آ#0Ā++2F.-x۵;5NLrSMgQD>}ȨītLwaVJ+Agi_m[9F+'{wLٖ3QS`bēUT&կ:]e&{,IlK>-Q]M $"k;Jo\ 4d:UZsSҐ# I[MŠ~?GlfE+%vCA:~W~Zm,{~$[{$.=J1l%kV5`MZxҼLeV$#\fcit2`bB|=sl99 2MXfJpV;Z[En_sW.F5ZwG1F"prDisYH%K ]%F2Wrkr|Z 5>\v{:8! QAbMÃY5NZoN}ɥV=^_9eASјyQGSUpVYm,xd|@}%LrSM$W`oX~F8 V'Crww.Y6X)J1u*jg ЅzՄYS+=S8|oѢx%鹃n3TqZ*ޥpd+D3bW+S&Z/sM{E0j+z:85e\Ea^md8<745̥sʺLvq`Zid;߷NĴm}v|fr9rkMs4 < D}̦<$e~eRtВ"H%"Dhr 3h2(ˆ8:$a赻&éAdZזJ0IbOUZ"} Cky~4zu4qsqMX ^_4R"?]Z=R0EyoU1CJÜfj'BCde4##dP6Gi=c]rp%CGꮞ q8^K$eM0bQHIdpLxR1K0dXq{:)~C 2b,׼"*[ʩCz#!!^QFaH~2D+DRic*[!rNRNʲ+a-|)ZPH?7"!"ie~!P0",p+lP({e L*gZ!YTkV0f ?aMocb܇%&{F ɼYvW_- pc'c1 xW1 xW,tr;NDkDe +CQU[R$pf=GaHϻD-J T5#Nrnz*4v*@>ڤ󒢓x㷝m/Ŕ#[.͏⥃x*^:_:K3x{u@Dnb ʨ#'`aj:/RHIDZ+Fl/GnPt\`Q!#'YGP_U7SnE#ֱra V$%@sP{ DA|()ni/s( C["! 1BD]?Kvbʝ썝- 8JV*T+*h, ZcrLlQC;QsڕuJ,HҭZ]+AUbp#{5)W\l*Lۑ ,/΂Z<*(N p,XOzU :P?f-FD8$SnnU95:bXpfp$XN> oh̻"AQ@% D>ٮ U"w ڦLhGeBU'K \[n9cZc ſW /8@UKtJS+iA{oP D@W?\{ڣ7r%@sx8D7x8PT—?y3W͊,a構yYxor?]fs ;Vp9{`hAT4`d 18@m6qΙ{_ F9J1`\h,]z?XnoF7`'Z&{ifK$bHz=ZG()H3ˀQQRӻE5~ 5@)9}[]h8kM_`׈V`>jf 1M=R'J N'/@Hu./ ڬ/r蒊:gejzܗ)BkG,;~2.Z mz#V%,Xh5Za!-XwCMkJ -ފGRɯR4v3@5&Q8;hv3ogLy8XxfaZ>¤Cf!Ň[R%>J A" }u:ޘK-F/|-OZ,PG펣Ǜ0ΕeQFh{`B2G;v/{RJ09%= N+_-K8܅ v}j(/!l@.V ڙe[pӬJk>[4,*85˸nPQwal^믥4xa=`aBXxՔ´K& 6 "ݱ({,EBY$9K,Fww/dgY9!{sQ1sܚǨL5nw걶 ͻU_>7aV!ѪGHhDgPZbyj .L!V_8=rgWAP"OMVżKiA3| SϰHZ弲 wlc#Sw>܀ZurxKq\4* J FG %ΗAD`zRD #|:itwoͥzGWo5]rHkCR\Yl<q`=H6mc.eUAtlu,ꦣu,ñ˪1* g@)\|s"KdxRk8hGe wi/-U; J"&hX0bkn!ݩ8h<)~_+t~y\{~f-('WLw/hzuYRVtቬJ=,Hw}{Zh!Liagy?}ޡ#]PdoO.|lS{"l8P!"x> G>ϟ_,;BdgM?jy\UHi! 5zxq3aZOgW %8ۆܻlo Y&-[U$t_[? Jo.پ/ƌf%f T-A?zUЭQ1׻bhcQ Mkkx3(PAtRP VM##KBuЏr Lf\ q0V% k,4aGGY)WڈϮC[ǑՌEh"PYvlq2wqAhq%ZȷtutB׎B;wu\J(Z CXi˅x~㌫HKl?=|/u33\;{W,^h)nDifuJjX,#і҄˔'ŷd=뢖& a ^*Q#A&̠5Y$s "pZ ϲSyHy4G.4 \d"V{ g4&yAd9z>~OCv)h԰uěb0|h$R#$;rMpfF!+&(i_1t5&ŬQCZ.@<6}qdy  | l&lzք.w(rJ?lvj_=q.8UտZO!L@4֡1:kŠ*Xx/iJo340Mm5FijAڅ 5 HfR)7nhY4ydupgҖҥ/p6!8͙6^q-GD[%dލgTNsyfc^o@#0YG2Gs#F' 3Q3gxזw ^}G<@Qq6xٱ&I2w\(AfJXcyBBZ.jtEVhJ`D[&tJZur֧~\z,rݟ1γDtZzoFG0DX4WMߞ6"\DbM_\:M1ԍ߇9-phy Qc(D qhj2V 1N1G_T7lg+hz (71CWNy#Z .eǣY\Zb,݋aBR8>nix:vT;g0^Bۻ؄n(ux / _z ȾmGZd0۫QgxC#hs7ubJ)[Tnj`:B3?Pc$m9>b`TS c0"JO ݳ@P9&Bi"έ['W5/=K ͯl**'_Ol?|odW^AMk|-$՟~_iWZW F 1TiN,ኧ{d*ijr˵ `N\6{ϧ%oȿL>8r\˳ ` 6|REo&vq{38EݛMm8c|dq /:]OY<%vٶA-l V\_of݀vkgzɛNŒNŒNŒN3O*O[siß79;N;0E[\,]eiWd뢧PtNCi詨Ȅ>MLd& pZ%ci&-'(*\ j)/y k`S8 L B诓}>7KԛɟkpLl~Wx4R-8K; ܥ-mxR/T ;Hqp&[o(<*G#MQ $7_\-饐mK'ZFgi#̛Fڑ87 vh䣴gZ2AʜǢHQ+B ltYl+:G}.%V<ɬ B ,O}hL^wS"*PJ UQ oM=FZU0ؒ*3AJC-: ݏ$(WA~$\(@p"~VV i6.,T} }%+ԿE4^wj.0b#|J&1dƲO]y <*=MO=|o_CU[q`º^(ʞ~hlQ!13G5^" vPbN"Gb:Ȁn㥕N7D%1ke*[Y-\݇;veVbңf [Slӣ)UYY ; :KSp.= ek/FTJ"!Sq8LOL0*3crWy@% u0:kpS"@LsIJ܆O&"5:Sg1P4vPv&1=sDʌ带qz;u qD1%6J>ϘIGS4: '5ATr4$1L [sIq:@Rt JFDA<3@Ԕһ7ڃf)OpFt$o9s1R4 @XTyc4w"׹.ՖIDX0FA-9qqnOw)ЏaЄ#?7~ `'}zݮ؝O?~[XFwMc돝<|,n?ez/oO(2!?Ͽ;⎕  !oߙٛ_ӛ\%~>X&Pp÷'o,W)ɪO]TPtZn*-b+5m. ݐ ))vCYuXɂ+/݄07" P(QYE&>C΢6\ԙ<ʉFhRi*fRLM}$FA1$92KX0Y[6-E#h8ҭ~Ù"i,SDZ졜r)L.*_)Z&g?~Gtp`F#M\+ 2a23TLrhǦ}^GˈNms*p2"s A Ն)Jf̃uTY+Ķ!pd2ߨrL^T ,癆ܥ=p3攪,&/rS4KHScs)U n p4\ۧV6EU&giVДwe$cF@j#?p m<J if#% 1r#˸209H]VLq2KYˋK9DeW3.,s RB[h5 t<Aq8L8+qfS2ьT8ҥukyNGNp|ʹ}61Bu47&Wo^mPq9̷AσB}y b wc3kkj,YʩPN-S HSM2jQK34-gsr44@}I不Y"A Ls0G1BmcIa^vs~ $# ^'k=MQ$Rj/li8JA,WED2V>_OƯ%.8g|QR?r۞ܥu߮;u#1.7WlƳċv-7~]qNVͳb69"@$CZݤ:d8jGP3j"]DH!iuy"Y+ZD!vEN{h\..Čz+X޷$+ 5'k\Hp<\-OH%rlQ B~RjjVUm MjG4"rv&M *h2i7M˱GT$TzDT(ٗuă }n&]#iLg_w7W#JL~TDDwZ eݩp3~Bt g^{™ȇ4#DI3“Tġ RO13H L\;\G/ Q9{TS.Hْ=LvDyk/ngJ??owyZA^7셩Wדfnz=y;}UxwoQ=i閙l Pz}nà%CX) AjP JD}#EUH5MflǷVwcnݐuYRNr) B^q(rxU`vLo*œp*>bQm@c%Bw=NTBӂ|iȂ$MqkA]zt-fCYmWmi.cpnbQh6M-gՈs+ z7|Yb!-V g@ȑE#=?3=;ZT"|+dz7 |'@TH޲Ù;[AkxԻl3ctI#[Y\q_?՗!D*" vt{sS aS/:1hK  gh2At8t;ꧻ۩܊Щ)Լ wǴSyXJ٩p$rPÐ: 9(x>]."y+ҵED |2!!Tq QHfgK >GR#F]Fcmm]`#!RD`>;ٚ8Y7{ܜ ݩl֧w#BhڟnrdBhΟNr2U"Y)_|Kզ?)9ER  BB'PĂeP92ǀ$1V*Ra)/DyiBR`g@mǁWg=Yjx< |и-!{T|z]ɍ8v#;ɑCt!r~R79"y!r~Ӕ#0n@3:Ϳb,T-zO$:M̶dEa~M_)w ߱oon17wo N?3:I [mVJS';Wػ@}ò$}~_M.Cwo߿U4\)CPD-oF@`i? " kJ,!ʜ~T\(/PJp9AZ$9(#0uE2d c"i+ |0A7c]X*[Vǿ>w&IOI5~IOMvBmّ{)5IOk@'VnY %Kg/fefK3В.CN7.J,M} :^3ǐzQbڇ#RoőC CgIkmB`FML2qYIws6lC,iYW|QV|z=e??Nl* Y~ ~fn*oRLQQ^2M\icQr5?`ε[>d+lj]n6[խ3e[k wyS9l}EU%ͼiҞJ%V3Jh WQ#ũFڑLe:݆+v)Ly-ڻrjz7UH0 >v;Z9:oskPTZq\5%P.Yqj@wA"H~O5,݅ t]vُ”@+yCJbCC^ 'z7Yg@f*1}G6:Kekf~DӻА:@~&qqyp~{/kg15a\- .Nw#qoRb>U_vU,{cpBL" $EqDD\GJ|cnM:oX9@GpG/ yE@yIONBgq8d"1N#P + wWM]]a(XvrEzrwͣ/ȨUE-\u &_kBk. D n[Z$`l=np;7u_U NU #O|z;Phw⢜D(HN ق4= ÃcN-,$ :x#su{ѲSٞ4L;AшOϷf9XUFcRZ0q 9[A{YJ@)󴆏iA\Ec3μ՘jޛB#@q M+ ǘ@O^JE9E˱;[,iU_-W#cFpOC* -g];W}GrX@V ČGIr1QΊOX7jNYqMD~肁{g},ҡn55Eܰ Q gQ%ݎ [PK dZɎy`J??ȰzݻTH,Q+n-"Kamoa)9(4U2Ɖ$I֧^^_/^MWWR?| 66a*vg,i%RA$QH8fi*3 IF?2PQDm.H59PPew>ʻ(KT6Tc+VC~2T&'ooёjS)pspXt˺4fm<`nZsMn:&anK6; [H=%[-ZZVR1$BBhog^{;bxGkW.WO{wd/+DFzڻi61k- b_ۣ O1sZ)s%0g0fTðв`p>8Rw]rJ&~'sӵ)LXt{Tp)Bp)p*)KPbnO~.ukEݩ4n5} lY*3)koMmu,*ISaͷ,珋4}ںwz ?͒y14 اk̯֧C3uoIIyS}WW?\}Q/zP8VƸ+S\W7n+̙,2R,& 004dLb@$拈"MK}D1ԅ74؛}Y~Bjn6CBmBpѻП0M'j{g5OgL1 ̗ѦɀR %j#:)CTŠV4sjt? !8gNHN$oR&ʷ,y;J7rSP0b IUHPD"Sa` $tVݧVڡnw.^T X>Qْ V8L2u;]싂FJvϋ[U;.< PlCrUDf$GS>DI-$% X`ˁj=yӔ2%NM4zMɽ&€ /Dq)7ޓa!s~.33XcScǶ |BSZ8y޾f3fۺ8flV0leLڵ&|m rvCzMslm&o<1N^Gj\WnYH?4m}=@D;(70>N0n&BsD\6RYVCI%d=AKǑm+7c*RZ'I)C/lE/EʊD$G9Q^$Ũ [NyIO651vJWFw3b"\IQ*£%J<<yATuzC]ORCmB%Ot!\ c#cUh_.pi3bha!7(jt G`_bshъ`B<p郥b%,]c#&s`S-ݘE,B5cE a9dzۗc_fIp&AZFvP8oۢR!CiW{%VZ^,1ӚvX.F ޜ=E? 9mc "c/b:Q)=vX#)&:U)Ԏ"6!/҄&YSLIHQEXZP&S勦5tޠݓ'%>'o E& K;o$gPjTŊ3G@_cfVpV550eߟE˛ Vhl<9N; Wӱ 71klр "Į%~ xh~8AW,NyMKKv{,祖i%}2ƶ} D`ܣ4Gpq ^{+mX /ٗ>C`)Ayk#> XHʱ}yi$HC y Ç8UuUJt(10%vaRl\gWBvj 5#‚6~`ܚ /wrQT(5xؓ84KID(!i}3v%.Y#С`wٟ p.mKi*7ɿslG߫,\RRz/ IGH(rVrPseSKE sQBz6u('r(8_pȑ"H X9VF 9e b9AAVnaRș=R1LRtI>m1&\Z΍ s1,9Hِ{ʥ"YQ8#(;0`ſotڏF{v7=G|{z%|54#3n8!dt/[kP8d "Tf( UG ձ$hʴ*vB4#Ca95"8@,.Pp2ja!b(Z(Ash(Hż VD 6FRf X ϦxȍRÿPVu3ox9ڂȬ#(c$ dLYkddb];*"K1QW rK"(KfD(Y*<y A k M@k@3*[/9ǀzt-D6:oV>#'ZId4/do%^xs{' EQ~>Kv2-OW|0%<v?K9(xx~^g'ѺB գ@`!bƊ/V6^Dx@; H ef]A*tM\=8t؍Ǻ.,f0\lʀ7/NQoE&NWt8~,iӬ ~;_Ǝ엱#e#rqoiVXG`sjVÎ [H!"g4R%hzaOѿO{xnG"uއMn2`|6P&1ٛ]%OB3ѣd }ZaIHWn12]|b[QD\aμ#Jhb֩ 9ҁ;gd.,0X&B:Q@N@@-;{{UUJs孖ɥhZFJ\\%ᅩ12_J#1/ϙݪP6:smp܃ ɍFa6TV"%)ɷpER/ӹzz/~QqgL+g tdisx̮oɛ M7uFx{7%BS#%uM [/$aK>n^,VhgͰ~=dqN=C/ gNEk8?>I}Z_ 9rXwS[t8Z}όlaC NF@[ -ͧ=@l/>@y7_xw.b.r<^ZNzF.N}{??x ܟfxO&q 6W–5Duj! 聭\O|]Ou!K*u a7EXX`ѧ4_a'F{ pƑl{nT.;_[mvoCMǕ,+z ,'9vQiRhAIo0Y1~%=2e3 !Hx]C@ { =)Cl7AAE]26q–}`#H-UcFd- h(B8+A)VqÃXj̬ cIRj82%VXi.j xNLJ'w?!a12zҍ糺:Wܽ9᠓)Q[jQ9,qJ" j27:Xm QHu`/P*Ny5&`//(%0"*-xe;9I%[m(+eVhʃ98aG쌑L8XO%B.(nTP`~T\u<d@d@V#PGu`s 4:qT7̙q6N" 8a4o2`.0DIgƉzUhZ_ǂlw [I Z KqlL{䈹`6&#  h;DX$#C@ ,}cruXr8P.(>L2&ځ2s!tZ4ruƂ2ϴB 5F괵?6\ ARSI 43tlD@=6qo@QUp(6T`(X 5(`=<%ӔXgIAEJ>E$˦DʕM lpTx>wwJ/𝚗FlS&OwiMƛ]KU>Tl b*U:7ڠJޞj8fOo-#"p2"{MoNťմ^I䮌 ^as\ q"9U}Xq,4IҸ+2ZƊڕUgV\u3]\x=Ε"ZԌRƾŲA9uBU)Zrh˿FsZ۬G}Y8kpF3먴AŘ*ws"űE(IGOqsfM >G<y@܀W%l<'J`%*G_ 宒pԨ"wăSb1m |5ggOc7]nza$QC]fא ,eCN=%J ?$!!H."5I`8|%J&KE@k%fǒAd*D%<)wvQ!F@BK4E*i&F"ߥ~xz@(7LebMy \uo{F: 4 -^tۚj:/̶ZboC/uj^(;_=LjGڨa2-;!y ~ ڭ]MwAz3oUbZ MO|>.PbαcKm<]哼>XќeB31uCqY+zoR W4ȇ~h>McbD[J7$& .Y0Ԅ'&XQTrCϻ gqy4-+q "\Wb^Hu:4.P3B>ҨV%+IЎesƅk|tQHO %>AR2% @.xc'܄TϛzshB;Lc9c:?[dǏ'ﲼ<YǓqBDIl;;!K)senݤ4a}:wx @>h~ }/.isUؗ+pp& < s`auV3.C^aOJB)(ΕYRX ՌhGչB9d~C9d~Q׫s1\bU cRH dlL2(Es}ƂL DOJ`?X ]|8U43g%#Yt b \~JB$J7A$Ef$NFb*YgOB+j]Z:ޏn- se:p[W5+mӺ~^!M'6Q]zP>Garָ E5M:2´NKM$h} '$ĽHJZ#Ies .9`P;rTmzv:IlQE3*f#obd,TDPu{7iᚦq[1H2^UƫxUu `4}DލZэx4F:WFT\tl`B:؝`Li;o""p.\;$̓!!iQ:>sJ&%ȡlJ.<"0hJ [ǚ 1QS X? M)u15D1eKwP%c E&&ъ' k GPyY@} h2Z:t1,̒Bãp1OHb꽗rvYH&>6-_<)@.{|!l3<-~cExx>Cnkvz:": vs,;}zl,:k6Wpi')[ۄ'pCAQF,D [qgW]WT&m0Ayz^N!rK@1OΝΝRKdJ0+\{tf~ 3@;AxHEJZJs eHb1?VK}_ٵZIr̂ -΍ݧYiӃp?%k||w~0)]0.??_?|~R{`4+/\ T3u2Hûx&:_] ߿=OI5;8ʎ2Ks3C_|&pmyFT3*zۿs8# 'KߕA0ЏTrA*y|0Șe<Z\]!+GH6?uY!ޤ|%t mj\ ]º0l$S\h[%dŅ:U,H.©*j'%ھ:D^cAٺB&G9(\^V_k9hdOW*oգm2haσ;Z쮛y^~rWĦ8|fIJW{{N9$۷axAP)CICVH"{t:rFNH]ǥ^$zYZ) ܗ:Oy6 OG|dˀbe祸FP(-MU_Wo< WOTk ]$ $㿸= P[*e/n5BZ8IB_Klyw\.UV"[w~XvqE.GL50M.φkl-Ý,z;r#LsMTTȇgeՓfEʅ|xV!pH >:iu96O@3y+ӽ3r9JoQ=G7W2Lשׂg+䚭I>ӿƬwn skBY>yMY,̵ɖ𻎋t(z`t6}?QNQXy^.k;gvXEؾ!&~JRz ' BwUJI<4:},wF}ԑ$ FEX. KٵD{,^r@^RZeur@UL8ȤʥI  3:O,vZ LTsrUV)ߏ W\9!vn;'VA-BwzEY>iZ?Ҿ`H=\x5cr|D*{ɤ~] ѝЭ~ |kC3گL hV*سn~8`ۜ^*5},1m1f7=[1e3U~1ݥۊH*֖VS/(~8_\kmv!!8@j*b+w=< 7Q;+bҊw|&\B^{\=.fEYy=@X6g%%59-TjK:_XfeYue#)t~dFSf,mGsvq1 4TSP6$dj!j1u\zEW Y( \ fLԶpt*pn̆ƼtAT:j >߰!Gkh3VHEٖYqdo}CdI$kIx@ݜs8[Cշa^E~PwM4? *OE97m;7K<`07y#4㯕)Ą" E΋,uᜍMR*Ȁ)r3$Iܲt(? pe0l Dc>P`chnH򡡈!I-\h5 \J D4*WOҼEIRhB;# [XϜEY(ڬxw٠rlj/[ _MuDthqld2\fP cc!zqqPpn.D\(WX£f߳n?!Q{*U0-QN:{\ q3O盼Ot=3)8+=ڣhe啗@4PW7"7Xݜy㬔\x&cn-2+yJGTB@I@1J1g2OS~2 Zdf aRZS+lH,9 B#{뉪RxO0=53ud__F{5Q|W0̰Մ j<9e!mK-eU+?|, k|ljhjō!mhwP=oǿWaxWtȽCmv_Y^bjxgƱ>Z9U:h/oF?][sF+,j(/k;LRN[f"Jr< %Pq!(E)8}OnX ͛d*S>WU g rצ2WvHSԦ'1pl%޸lUھfUe7"OjոfKhͪu:q[{ajf~8.byuv墯-n߫!w"wJIz*6$ kGꏬV#`Ȁ0IB߈>'Y7}#SHj'4j9e-Gqȶ3gQ(PSLds\ӡ+s'D >z=Fy <;Ä3" /N;:P|θ <  j-)^bɂ [ўu`dT>+g|;כjۍGzCʛF5K-,8zTX@V_P#Z 8ہ9zժMiC!h*-RkP!FBeS1*F~?\k\9Bcy~3Be_E)fW129XI3>z 2q/Sf&C²cJ~o˩ܒpmSrƖuM [S NgԱnGSݟuk~ PѺ!/\E!w'η9^Zʉ'r\5 (aBtȽ;g Z4) ԦP갞Iy&d@:MG@x\ P_EN;8y$S=` BYuLۉ^ܽԸ7S}Mȝ`uS(WC(,@ˉ8EYڹ?*O1:pJ*/CG!yRϣhm( =Qhn~`qsfSSZGTB#RZ/TX U2->wԐJl\:qatzj+*GV=%%8-Rꏔ;(Uc_B<"gDEi}h8a%ʻCJoP< U>ҁ]=υt|tNס[E/hogTz*6d1=qu$U6 .]}N[+% _p#=j C|n5]`b#GoGba:sF?yɃUWFe|aѣWRReG> Ӷ[ݏ}su뀏nG9  YBSve`Q;1KHG\X7#AW[S%G.bjHV L`QdPOT5,dFҁ(WQ&H٪fW9A> 3J;G2S!Sq&Z{pD@dNPC#pg=)8^*ؖsE 8\0H(hAg$PɢDX=>"*cbw955WЭ&J  FP5Hzriت)#T㎛ FЛΉT=/Qg:QP Lm`Y(FJz 7e.! jS}>|B)IlV:.S 9_կakXCVMa=nK`RKꈭ^WwǬ.40&٩)!Ċ'BI%5vO3fh1e>yLپI\?dJ{}޴ôY~g =2L7gσC{Z Y%J,ℝ@ ^E<9OPyDSJh1 Z 6eVFU :Me%C}}$숐qW_IHg-|=*(k'u.tlx;DkBU9IyHt:2vn8!D #Z1IU'vȮ圖: BLUC@i^qpJuu,0vPr_OtGM`ji;RIfFΉPk僥h@nO!lJd&YNo TWԭGJ$\+oi~.lI}?*U^♰@$b%B=8|0H%K SJ/\]WWbϊ6 ܸ'{/9zKjCk݆5F +j-@eOCp0G 3K_!zU_Q\b@TnKj~ rq*[tP^(*smZQBSzܮ0FKpր4] w~qR%{S| STXgyM#k' >f6=us-*N ], SO (¨Ui|}yyl:uqAUHGE+ sHI Ցr$3P53+4`o* KV),j(Ψh3uh@Sx?}RQnԓT䖮A3K/iNdWшApi08d50r֣q# -~tek7}LՇ:3MZkUp6V辚t] ;x.\Ά j46H[@۽5TzKQ㸩հc]714P"Pec%zFhȈb#1iwM9^/2j= ^R55Tbu#paGyuZ9>'1DTѡ]竸]Kj3irܔ2}5L7׻`m\,m0¸vip9|})'?^}]/X_&|7 ث;yPrpD%N qҝ-[)h:#*joy$6ZwVgm@JxOcfo.HϿhi@<_us;ak~]4}X6}~T̍1qvfNs_e=Cx[rzےpmSiDtʱm5Џh2UE> >ӽ[6 y*H ڧ?@is&5f:I?]jJd*#雵,VRx`/ Ep|eڐ~aǿs>\׷= `oRh3`1:ʌ~':3bWq;TgҐ*g8M C4r^kP4ܔRu8-KϛFk\*u>:4J CQ~X̦q>uAZi6b*w~ez/栎CE)y$ rJ:9&'t.![)'Njf=60[ۓcpq͓+Ό-:04Ql-:Q/hJLW-T]]3-q;级ni6N_͗OVZB!Ƚ^ʫ(WGort]qAtmwT[0Tlr3 p^wUQQxqΨF ؆, jĠڐeˤI/zXlCƲfȂSmS s[j9YցgH܆!kYp4;RgTL3?"A"ZYEq-8W|tQ*πQL,cL10_6ւ'[>_7윤4DADk2m3~: ({0R\x??ypkkocRB!^-7QY6 Σ|ֹ>˔}Yegt㪥0_2#9SaLqdjXTs!oޯ5Uw=<7Uy˕ԠܜKeɹJUj}?濧wu$jg_j.VH+CB"go^V<:q<h~8+)Ad%kbxZ=>qOk8qLϝd&)8$,*JL( V&)B ;nbQ (zW;nʈ02ǜATaagk/"1\ TjѡaL-@贕!p&7@l&I&t&FLYPL8r Z{̢ 'WGJ$ ˴70ѿD%рQ&: OuH8?{o<2,!;d}ɠ!b;Y/Y ]UU}jM֎ Q;VSe !VD:tmX_S%Jl0Ļ0 `,UA~Ea^ ᠅-Wyy~*ǐh>\GxfKRpBU4 UE7,^<&j)ˆnTV5J:{a(wHgLׇ}E3pC'd@:fX3UdR79 =GriA*?I*TZ|n * }Uג1]xڦb=<C$Cw*rXٯ,ճWg}>XNok/n'?XyXxJN| 5,GNշ:6)'uW7#:vvaL1 SPCxMJs`j#s-. .㾾BzEB#rkHK.륦~->" wLrOqm}oP}yZ8W4ZMohW695a2{;emi֖f]h%M7zH'Qtk"i:\Owto.ۇSONzI7zuQ=,=P r1Uإ'(np4 ck(uC6[͐n[)AVL۠|Vpc  m˾;϶d P\TDH1858s7M^jb2u; {gT$aRR[}:SrpJ az|ónNm?|Ԛ S2fDN>77߯ىB#TbGK#1Ы [7.I2%M&oL했A$vўR[rdvAB޸ɔ)fFHbmǮ ۘ'd\NAI$QKI]TK%m\8m 4h Hp,~X>B0z i9LȒX_\_|3f_D*TQj ˫~q떇McJ ٺIS_`zvVNϪΌ3O(R+ӧ7w߯#}j'M#ssIb簅"%c4~&wy*F*K]:݄q@-mʛ^gϟؤ! {!\m}Ӝ⷏HΣu5QJgzJrh6~B\dZve7a&)|IS011๯hgZH|8nnKG!d޺13%1;](V{`1虁nM QP9qwva S}&p58Oj'E!rJ_ )R5)$‡B7J[&a5NG]]/ΛsT8Ey ߞec2"6e2Vc41LHk?=yo3g-R3slj;$rGpUs,24e%kšDxvG+ t͍ |*aeC@CgsVu *Hp8q,]#Knj*t*h+jšښ+h@9:"NIV눛I&u NtK͓rыk_^ p%]l )5{/+*O!(sBKM[ neXEhYN%ʒԜp"j$W$}~O-Y@FJIݒT(uIr8Si .I%BCKepJha"R!ȥpW:76ԔPҐTG^($cn͖ؖ#$9mkv,lO7 ۑ"pF4:A(&nYGl# M^4i2C@ԍxKFA'qnNjŭf6+ܖn^nm[%TlANÞSFM[Tˈ~Ə\>B"]! '͇ Fr`V\ZȿϟFV+:CgC~c|?˦_t YַPRW_rC[τ3?X;?|lF4\6IҠF')LK%oPBX/[4g$Mh.*t=&(ڬ אb $,';Y#"ߪ+gn=m.ZBFg'[F␧xHOvQa2-Zasqߓ|j0jPo,躌y$䍋hL HܨZf>I] űE)PDٟ>W"AF;_+Azn?Oq%h)9LOyՋ.V销*o.E`f{0*'Iq á߭Agu~`np 2q@z [ Rl8 ||ss_D!w)j_>I9G[  i Xo&&j=TAV.qA fB=`g X8,yL721lNHɅlޙـe]Z}Y岌IǛEYs a`gg="@ggf Z򾡝V,{4jz|0$n?ϞbL3FKeW)+?lP1idgd 1oZZ2I*\TJJ"ZZ*-yUU(,Ei͉!%Fx|a9J]]. V-B\W ,kMHP'iCPS&q9\yJpZN̂s^8we%i C\N*aNN %#4%B 7Ks"3@C|8|J&•gRr}[D۬/3VDN43t!%(ʴ[bn@1on](b}(ZqdQ^/s4eukV?UH3Ih.|%~}hnʲ`Bn/bU 49 lPcV,/|vňw_>whpJp.p]h ԲHX:GHY[wxċf~MaBU'әg>1SPtĺO. n̠vVdy`rDOQA=i>@!LK21i 3_N#Ja}Gt6R *ϚWDn)F߷tQUy?#R%cwlJ>X$1/7{/f!u^N+CoFMSމe}5looyPr^5N+X`1R.G.c9x#X`wF{~1;/ӫ}ͪBf?^ъߑ7t5{ r|2CƘ[7:Ug]!CN s\=-Q&m4QxKB{s0׿Mw_waYS+p3o 迦o|tb2]ծ}h9&sy F1ƨk!k{tTܽJRI4#-jichܱez"X82bBy6Z:m` ru~_{>,w_OGk01g)}6qᓹ ?8HTCrh(VQ}Ntuvw{awp"8^J|J;PPGhq"4Ϋ*JN$2b<"q&((P̤5>jh U释^J?~8׼PqF֞Alhۓ{6_l|Gߟ|76KYtVs?ο]~L&)QbSch#Ă1f-4HLje*O("#1R*SNP#E, huC,փ,dQ % ϡ1V^bltNX"z4@\lMa@/‘sLZ996#^9NFn1^1&D51D$WԚFɁ-Gc!+eEsVg1El%zn"Xu5⹅vh% A3+83|5ۈ{+9E{lN t3jf*jq)ONTHUR!uNa㰆 fX"^*~\3Թ7h؜$,`G ,")%7E ֑!PQ"/lHGp9O Y]t>?y}[ӯF= ,ŋ)0~bw7?O`! m1Fx\MsKrw' jf>]^NoONjGY[K3JD ̟SkmRTbPeÅOcP=YV|̟t9+AO"{p"$ U"Ph!1c9㕃 pv=@u9~{?J@ n {9k$P m AR0v O>jge :-N<|mG_**6FEbZ{9*q5x: HTcBiל ypg.$R5WhP9R1jUlÏf*s$bMyOkʙXS0:,aܴAyf"=xDyh+\Z;#@o&ȉjbݘuE銯F`^ȅ!3JToۉɺI^gv >_Ƚ:cڡ3Qէ#Lp7]<+D%yjՑWyU%<[d́lU6IlBcJA~HV~tgLVrT;wnr-ʋ\EYsݢ2wD>5xwTKtps=P.GN8ơ|AnC@cJ0=d6&aq$\t\]<^Fr)0:ig}ݝ^=Nr=ttzc)8_վ|3p$e.P4L]x2`j.D[M̫' \å]#{߬fF3>3>cE;Ncf%HAf*t0i+Qaޞ澼 Mor!nX\EPTDqiyFGp4ՆK;Ybo)*A<l HXi]dٔ"8,H"g"̱RXPNcdK>ByJy]/Z/BDU JyTT? {W*ed7%&7 !=.Η{hE|CNz{rCkqr樨% ٓv3@=k*ЬdT`}@3jv8@9X`t T T߽ <%z5D:cwXm#M¿#MY#z}<HRY0䐸Mr4l4ܡ&ÑDlcdwlۤ9-a{ŮuM0 lCf{b{$6Zi{VXv54>d-m(^9KxWrdao-J#ۨ~SGTЩ2qD-WȂh)-Pep:ޥ9,.]/c>3nGqv}5)}{'g',ݿ㹼,\w7_f,s fGY}T%5޹f^o,\0|j230mv} ?2I)ljS;QγEH|<ʽ-Q[$ jA}3c 2|q:HkK7EttlBG!YNl2=S\ohլj~`y`-Eb{ q1D ݴQwjuC%kmՁJy@JY* <~;Ψ.ЈZ"5͢m7|@h]9%Yo9%Tuƫ4ʅ1n.[Ew{!+oZV?]?~(]T"Ev{X<?67PK^#>rm?3EcU\7amf rzlqM.GnH^F[Pc6q{{9~MWQ[DjA;x5D [?F{;㖉M% =k-9ȗmWKE86Ni5m-g)$C߶%Sp&!1!N1B:1&kTHSpTyn_;ݽ.?.uHjn<,P/A?~gq@p%UVM?R^j*~,K$?@@ Gj-qA%eg Էϥz(40e [*M=CZ,`y&<pLY•bHᱠp 0̦X 6Z ;@`C"3w%Zf=|-0ֺۤ>v:s NQ.X5Ttnl&“%S%b^jh.k~_,B?{WF C/ӔpìؙuǸPpXE-I"% :XaYT|_"` #>W'YyS}pռhG eECJrHLo4Ur !Vk(VE#Rt{CêC In1AIouC\L~'Dv6qĺyv)FlaGR_Ί~k_F-6%Tmr73~tM *Z9w YZ>z&[:|9`6}p(c`?Q9[_=EL Pv#pvtDVѩF"XjZUw"MnuH+J2Up磌͚vjsLV$ѩF)v<)T[e_TVr"SEk7)D[U RDjeSޭzHFvCB^VuWzMFSLg4-V'Bg$~zۯ֙ !JN d 5N)9=;s PLz;֙K?=;A{;j d섘ƣz;֙:=;Nu&NNLz; },ƂvBo'0$RINo&F5T<|-b{la@7U(OvEX'ʥb+ާnlsrfPѕaO'5v\ؕEdn>#!b?#oo`K1~h=oϛ]z>~|qthϾ,~#Ó衢9eh 30n6qwc"<*֧Sǘ}Ut7wc׌~D rf>uEb53fh!V:g|q3aX.͠mxu&7)$KR)Ї)5oPx"c==syxli8%v׋= H/o_DCY0zI+FQФڿsiteLz(88ccpڻs1cɳucj)bSlz{zj䭕6~gk-|xtz{wFӻ&\F6wꯋr)gN&l`fX $ DP20JJ1f8֞r>QoKTZFsvq4R7h˫IҰynTt,Toq\+#q943>Ns0Š+/+ZI1cAU{EΨ3CG2,Pj(me$l܌˙̽4^:}eVR%G: U8J}TSZTDvmHoh d=ߥpHp鹱J?a@! AMSA''5X"raۧ*L1LPA[SAX5m2u КjEPQ%L9[;HTҫAY+}˻wGճ3D i0z id%);ZH+ŕ{aK5^Q1;6ų'P:1|FE!@}*'*dlOTʜ(/?P*r"QK%! 6IeG2V|;]_RPy3>-3>ti0p3gg0/V.54x<6g[aES @𣸻yOQLylйZbkbs-wfZf:mkW.UdJZ,E uM)q[U RDjeOˈWhvCB^,SXp/h<li8a1_+FBI懊1# =ČR>!;ͩBLWJC{5dn:&]x/-i{l;ӆBOY(Y$n€B,{Drs!|bO!9@ΐJehRIϾYwbGoS=\J\.ފ>s$t9&cukfYku` `!,)@R!OIs1PtRbpbJ*LeF1,. uḭ kBdJ2A2N0w6Vq4bfWK4W<|>Na`Ϸd?qMku\ӯk `LB*qzlT,1> h3)}8K'?|ȵ$Fn%hkP7s?0@1w[OW3kI !=eH Qڕ! 2sa8HK(h-WyN)i 1 R!o5i >jMϔRJr:!+@gb ] eyi0 BֻLʌ(^f!/]An 蓿~T̑qߣxu ȵ|3fF|~I@>,)(X Ņ҃6 XCl³8'#MMYIi/>%E ӓ6<}]nF(׉3z};S;r“:qd*%uE4eA,fY_vsʸ*=de%:'TsxLZRrC3Dp@ua<##/(ΒAUf lq`4Gh-~|DѦx5 ~zi9Ͽ}l&;(0ۃW_3!/S``̟?G'.PsDqNuߟLg1y( 1 EKxs_ncH rvX% K,0HT")RBB LDz-LG-J@b*'5; M9X&"*cz'b'=?.?wr͉{PTr20PG ݗ J{-}yK2u~?,%J+k4N _̬! ^Xr+aC P/{Mۤo4c%agOW~V 6 [ `=E9*V2iBHTt0$N"Bt't$2: TP0?$OK P!#)ĕ,H$B6X6-DQ00[ {r)/5HBKI4QPS47(ƖT{N1E 9/!($)?:Vd(Ib> 8|ޞwWF9/_v}q^83Jky8,NCe}&\,pwn:\KӸNEl9^w3XY2.@hːd2ْnV}8[HKjCz2eehCuW9.@8XxIJȱȱÇo>Vxc*EERuYW8ڰɮA<ѸnndwK-9HQNj R+jzK JUÕ/!!Db}PeO7|1sJNãoHuf[jfZj=g7FmpDdf5fP- h"hnӧ ~Ghݻ?ճ_tҼ<&WɟSgN [/kncsyk]znaYv}.}]drqPp)jIw|1p16xNNu{7;Zޭ y&eS{ {ލtd拁QĻ_dtjR:z*,䅛M1=},/.F޾ѩ$y7ώޭ y&eS}$M% h{4UJmq- p|#ArԷ>9~دґ?RCs"0_sp 0޿83%q9Nt$H.'9N8 U B yqB#A& z"8'T:UYq9N`$ '&N%9N8 ©UŦ,B#!Q(, 4OV'"HDjm޷U8J]'o:h9924Mi;5miAi֖v@7j6cPP Ն njTկo~)V}"ʉ< "E7""lMw'%z|k gCwOxf~UI֢> ۏ\Gt㻇YNth_]BeWRQSN}UU-Uhͩ*Ps.ejoU g[O=mԔ"v[NDU4{T:-u\H{/$2~\jy|6UO|ﲰh> ͖(_᫭m>R J,/1Dj׷m'R\ҽtyMk„ד$0HϏ>c1`|ԡէ`j2_-~ezwZ]^?RN0K[2Il:_iP }c#`20 F|DDH%agŢVwyoF^OҺ5cJ) jNT5~K' )MHhNx'gN 9FB1mpx:ve:͡^.9gLS%N0Sm5^*TRNU܆Z>/Ƭ.or^L²Wn .o~^& &k-9]Md7,d/%O>_;ϓ3L6BJ5;3\8/09]ˮ(Zrgq88G@XŀA#onr[] GD׶oʪak]0Z[j33#xɧi׍IDTk Fd%G:I2?Xݭ[cu-ߠ]tc!!AP"IJ`o<~رE(V܅%I HQI_u1G  T}r)I(C"$]%"Pq4"$$HR`RFxaClaj|5dc<ݛ|y5`*"Ez{ot*hqIBb=bXDdX(FzH!I a{\/Vt߂mMv1/ˍd=f3S0Q2͌ i}c5 ⷗^y_on]Q2DyUO!a;3"L1uEqLҲ1;ej!Ni r ,}~5G[sླྀan9 ӲA(X_w?Yd1!?!7W+g>En~F#;r=l>gt5ԟQge$?}LPwty#`lZJ.%zI zeדp.q~ƜJ"sR~ҧdd7dsF(nFdsG 8i61"M>: փ+0}U;D21*bn,%˳ -7:-ln+ts_ʸ4+on QD56+$<>jqw!JFqϓAރqc釠_@?*p^("T7{{#b\)2_2*0UaJ%崝P~S!bO*g%*Tڦ[D 5hguC g B`L qA1E0J:2VcBq^8c@:V/[T^hR}n:z$i p !1F$FIpEpT45由08) ͮԒZ5L6هd!V-➔g4;oG\kc캧قc_l.jD,CkMQ4M#fI6}ufȧfq?NF;O-Ng@Q h$(W﷼:"˥}G[.0֫xn EW,a_ZlN' moe."-jDj:]YbRet:)Ƹ[(Eut}b"PTeb 7O VL.b4}rTŴŬzmwP['֦V%?Mc#N1"‚ 06Bv- w ,yU ^9/n\bTS^DXbF;5vCãoMHF"]_5<MYKBSK *$wAy@4U@DHdt7? \P"|7{)*- Q=zoe6;t!4DԵM@)+nGSe$+-%ot"i:).z :l'5`A5T%>UVbӷ*9Ÿ^TekJl֪Z(ι WuJ`yBR+DPԫ%f&*xcJpYnyo.Z@7IvWlU2k2Lؕ.ә.(j%e1ZS 8\ 9}.+'HM'r!l+r&׃uW 665?/WɟSg6N͏L"vSta|O(37M2ݴ;7\$cVd!/Dl MPwt|( :99ݪn>6Xv>nʾ<ڢwt|(Zޅ"7[MǦ("Xbb:mnm(n7[MMqnVuLGp5J a:RD !#vSkם,Xͮ=G%2,ϡ~'`]g'4zT |l 's>CMzY J8L ]+nBܓ4r^)pP2%mz8cMHtv܂S8z&y~Lgeښ۸_ae+;KwJ[38ǙTh.EiIIfjդH;+l ׺W<$/7Wo&7Qf7a> қջ WY Tv:CԎ|u)xl!}i >قZS]}.S(rcnaw>(D AF6f=ĂU}V2Q5}6na8X1^8^1}lKӑSg4RlzZ8 /"phgMnYquߺm *sZSPcYfTN۔szIR_aȱknsYMS=(Ŕ!B i'M˄`,5g% -i%u\6 Ic8ArVH5z /yAûdqopl/x%A@JCu fx)Rc1tL 4qݕWN.y(&Fw*DqLs&!m2ӯ9pQ{qV)B$9݋VoDrejʆ)"'FZD{ K!tOᐰwu‹4 mDc쩴^$S4#[R8]0B爝20N!2$=ؠma r<8yIw6]v@.7$(ya\q:P:m 4zp & #OnLC; hrA-&ݝ f>y$ (5QHBQw^v*' 䳈@gs?wn>* #B+FY}EQ)4mD8 2t7Su)'r-49KHhfZj[ d3LCZ7,KF7z[Y'vy g)Cmd5GnWbWRR .k>U`\Dr[gNㄓRDq%5e)N7Ffc~2-<'v9yś?1όUTǕf(F{ՠɗ=wIhˆ39LtC65)yNj%M(-񋡕~bt9kd1ytz&jqf1Cպ_"CTC~U&8ѬŽIxN9o/JX?+S{ ~p'FK[O.f2Pګ_B ]!r]YWZ չk.]eqxmU]"jء˫"N}(rcֿ/0`ntxML&w gÐjqQ$Ul;Ζ@dw+Oq61rq:XO0I;q @t"hʳ%ƘlY{VANά&k85/Z޿,KZ 35Kc:\e&KuIg\f4Pm8do G7 ҡ3HJ AK Aw'%ɸ҄P 5dLHpJFIthP JǴ<<{Gq<HC 6dR:VX3x4AEuڂbk2<ۻWɔY6nP-j~x6I4,.a2" әY(gH4TK!Jĉ⦬cV@I#P&R@eCs$Mngje5" wo%pQ0" 6,M쟷) %+">=L\`owC"߳*N~K+Րa~_7W?+V~|QTf׀_d4p?=N& .+b<=\3E!dn|Ǩ@7I3YЗR#x)jN*#Ŕׄp3+m%R KFb2\ y؛0xdacq| 6$O!y m>yZ=NGCq9jYBY :ea,",\3:Øzj0 6ޒ{;x?If`<ڠgA`揘fxZ/9q>/?Z%ZiQwoF 1s"7,a 5ZIq%C%$EXSKҐ?i8%F(Cًsa.q9cCgqT,c\L4{yȨs<ɤz`;>m!*f\&*TAhgܣQT8C}wFCHjB-&2D[Dt!W0CufCΘ*GH'8U:VdH/сmiʃK!nm G꾹,~gLBU<%5>Ui7hԀ#!JƸ|C:%:4ަspIJexX=2ݗnYu@HN ZcU/3Ѭ4Z N$OXp]Om ϲg 0xP=0EK4%.QXP*pOx('c:(MUxNyp⚽vcBEhw2. +6O8z1F)uop.Lݫ;O8QsPifLi09kw=M1-;Hڬ(QoY;Q4r+#`V5ʣdMcM$>l. :4c!U3;B'a[t:k1;Su[l]} Hɵu^ʈ݆JDob:}̑bqS0F 5^GNqft"~n{nbzc v:޿}r?.}I(om~Tm~'맍?jGp˭M5' `^zyuM$h%,w fiGý S89Jy@KͳW[&öq}+(6@;дk-hXIt-hFw"p|Co (vG‡ǹ._%%egdγD3\L_zdW{x:~IƏP;žgnsh~2gy4ws_~-3tpcYEWUwq-kʼFZeFFlhF^1Wn\1XFb4h(SZ7G&{Iq/+fK-1R3m6_nͭ^|B([ ~Sh(=x9X x (SЯ QhR\+8\VN5Z|VLnu0O:-„]oaC_>6w-R~ c4!e 0`wZ9CPӓG.\3ƲLsRW=JH<]%$j\R~S%FА\]a[C@/4'U`~U̢rENŐdE!'Sj*vߖi\^8"T>tt[\ ښ<00eE缫j`ނCfw̽f sUgki ~z+wKUQKÀކgn+tWyw84C,tZKv{/ѻbnӿbC7Z),ǯ0~v+iȅh)28V:9^jlSiޔvߟ֥qQ:@;ZjEpα"RޛU/D6^WPV #sf89ʏ}_LnX^l폟>Jf-b-9=zBbI,=Iӱ>W\k&T#$4~g#.)ZRAuPrwrb۹hT;I2 ˴31X.WzqDܧa~[7{4*X7˿c:-3Emq_N|(*Fa__Jjygri]WSXm T]ʾHarrj4V,YBC.\EKTnVW@/ V)"֭ruc֭"suBC.\EKS[P\Fϗ :NO}B32CZt]TJ33uVS\9m-h2wI2.Y B1UDέ١+Bl;􋛎M/.[AC9 B0e)F)jǧ= iEbшٕ>8~WIh0d, y~MJ{ޱAճz{EE<=JP%Xspf?OjEqpj#Ur|<cV;y%Umҫl*ZQW\јpߎ87 q2>Nf1xQss`ZU]J O{qTi@{*jvRC ݆ޣ xg=;i]@YRgbR8 ^iKxZRDmi BUmrO5]S00]|VJ> #hvJ7(^*t4]4\},<)3\?{WƑ /ɺFe%zbb=+e_`ib  jdQ>Д`/kL~Q\úh<_VzKQ5c%B: Jz2gWr;ERFg  5wB< $Γ8 e n:.kYPAȠyl.UEϘ` ))yRjM(J'3DǢB `H{'z1F2-Z*$ +؂-hW (N2;-Abe6$J$u&"@0h%WJ)yDV:*w%7`6Nib2\w,kMa(NƱDmGQxx@5N<_K,Hۛ;mG~\;X_::h'=Y6}'˶ ^3!Z|^ާSC WS%.Qрf0BN[5b*(f jr+Pae4n2}t:}βfEjTcSuH֌!lEKQcLFTq|) cd\6!c鵊Do ( @ X",iͥvz{s\?S@eyf֦K,=&4Sw")$ik9{sWRnVzG?/}yeETT^^?KL@7 /5wx{uS7$ ' w)vL%P [y6%L+)+A HT[X:%l+rxB`\B{+eiyV]A!Sn g x/z WxgӞ_ͩzUQ3&yo0"ΰ&(*y7Lհ|yKÅ6-|# ff{@TtL%' {ZBFڎ(J罓ᧈhR6+BۨT-;M-1Sâ֪֊vk}虄Ev}Dl>;s*ż֧{RZwf~_}h -`S$jBzWWw@ W}wSxH; 9JX-f+$KvJk$9)o&6z? f+G׈S*ۿ3:RVlH Ye ],Ŗf&PWH`4P4JҍQ kC $mHA0xҔVD]+S? ]S1 >LnS?S==[멮Dmdg?axm^5!$3#IīyA MWfL6ՙ`ZTw7SRXZ+2Ň3g}0Pѹy tER!E'(^r@SuV(™~ 2 yG |<$,*miyJ4dhY8GeSS5_RBZ<W{q^DUnt, >cjn;ZsЪExXEAAVZ@ $zϱ!AR筶! p- _D"LJE)Eyjц9Z/XH Ɇr[`!rG7"_T F7"y{HtP.0☁06|#H b&1BJCp/\:Uѣ[7”rٛAnICD:-+g+<)7t) !Ϩ%E{e X[QJ6.R}fh˕Py|7g?q߯wOv:Q75},l//8;I A跩qa͌sFfx ҺP0m+\tp٬V&ɉ ᙡL"QK1 G#h JjKUXp/RIGIG+agJk!wl^P3s$[\D MCK/?`8e[kZT5 Ύad^uy[ۡvjɁ\v?CL.6#./%Y f U/;NHȦ)4Ⱦΐ<eus;nb*E"ovTԥ3GnT:%5Ok sQ4ִF{hnE%֚o?}c+$Ԉpx=e|jq$L~$CV~^7:JР-o[ޅ߶,;3 daw\ֻ)S63P4`3ڄ5i;cb/r熫GӡC^Sј>B0v)uz3E}ca1#tKvd(gzCaK uld-4@^p$=H ıj@5CWU6܌*~TH6W-^#0yitcRDIbNzYIG?4d7(;W+ )QհCI]^JI;}iƸ`4h9cț$ƥ?\Ƈѫ\Ͽ/J|9o'~m2oNo>4骅?,X!yE6dNS:ʯl>]&}mb z QIcn^Gܿ7?Ypς R.h[␯EodפFJN6HNM++n͏d3[h#BP/:L EhVż /kB@yg_ɨdd|5+3)f]S*8}8y|>鯎EJ PqG(.P`AB+& n|p**}^fbnNv~s _ pt1ߜ`tLI* yˤY B\fS&E֡ G G*:}9i u e-SE ]ż9D9'b!zE;D+a`e `.J`HXG\ђb.PQ4UjoLI#T6bjnHA{ި@=a)j( {٨(.Fj p}Vd4RVIjttE\Ȓ]ىPdAc90Otj+ѓ+!>0DT5gRy~%xF wM8M廏a$l}Pǻ<-Oiy:OyuyZV{]XrRc 9/2F|V4nE5k)sEI0im]Oo.ʿd˂f`3xutLO&c{l$?YaOm7,)v'myUoW=#_^̿t\xypew6nogſxX#]gkMjE M)!8qQ!]0Z pS& <Y0H] RgcPu:ӚEA >dz%؍Ox_=C7*({譭nc?iלv *QɆG`tIi:%1|}Z/HS7뤑l?sOZ '< M7*'1> aHa8Ih%2N]Ld DNK3l4C7آ | 'iґB@6F@šmށyli=0Q1C&JLs h%\фLl4TO}f=L41Xf2[^=,FJc qLPCk&(m1&FN׆]/k*d`y2,);'4}R2b^ކ$5sZX\1 0 --MF")Ba(Y%8`B{kqz^ CRkeFv7 h$T51V$dڐ㥦p.7W{yNxS@-LNeVN )(E*#U~SkAIX:s@<폯 U^z(`#H8Xc=VOpȍe Trଖ0myVG[m!aUVp ´9z QgPc"ABq"PmuEހ!30PsTGqO.\.O E):Bsk *-̨(v434WSf ], Ҕ_x^W"]M^6UIꃮ&́NuAY5_somӚzi]!զwY ALmQo1 H+b˜ LJc(-KQe֩40@ɀTL3ZܦM+}nm4u&"tnT(zm |SM hB L#C\LRt*;Jz;!-3ڒd0Z.BW{R t?C_nq-}gxw7_\.*CZkv>/6+$~[ŧfw\o?OrݗD's w X=l[Wc7Sq:Uˉo۰*rT2fZķzlE%z^<"f-䡻V*<%3t?v(5s\q?iS +%ВO:>}{{d$@K Xpi/Ҽvr^>} |\"hEtZp+Y9͔Za v c'y.^jHm&JjfVPL<1[B9esjXQVYUʜ[ARM6˫ϚMa`x06ai)KKᄖ9[q|dxnȐInEQ2 5'λ b)5h*URZURWS ؊f= ƈz:4`f}'?:RI3Ko,tr=K͋{7 t:ݷDiᳵdׅtt]]ֺ9=?#!U&v{5U.W;'n)6G$z -I}FwPשy䁆z.,䉛hMOѮлbb:ψnbyIݒݺ'n۔rh|h8e:<ܩqI! a83IE< F _jC SF`N@2Ja '5>l}{кN1Nm;_Vr㠆!wu!;s˫W`́F`0wn?i?laR2.nPns y@ɛ滽 iHH?9H+ JuS0J3&>9]S GuYfrrS|3߬wSGb$Q{F\Jt=1J@ ;]=YUQ6R UL%J`9I>c+kl9W3&{|UoRdB]E0U/gʧPddjl.[f΋fnSu(I?};gĨ\R8TZ-܈ά?zʛ)9G <fB*s| +riG̉*DFtEHQh22.u!i~xK1LJ.<Rox.3P0jeiY\(WX-cBҋ+bJE#ð #BhL dMcDײި#2'u%Q=NmkTJ@TމDW3H<3 -G헧Ir%[]h#h%Tp@q SDi)1Bg 9:ffXNHJJ;9%IZ*qiJ#&_㥖Ԙj&vB3ةψ8ݫ:L V/ ta!ODlJ=ƈcnN3b), nzwB&ٔ8S=8śZ܋)19&t&(ƍnZq+O,M[)ڦj ׽ƞ Bnlֺ7"")B'A.o9f@,>ֺUNؾ`'ⶂǨ;Wf1:wҨbbS,7.4P[ȴAuꐺ:5Fo&sUm ryө^sbc&>(a˜ KEt2%?ؿty쇇]vY1Wc_/[ko:n+{3\-&?kdۻUg a_%uʼS}^2u+KR 7PJ "Djj[/ɒ2 u$]8%Ő7(<3E*#N}e( _Z&郱SF.`j$Qup Z(+S ɣ958'J2wicL QU, ,$ZPZ.z(L0G1>Rݨv] Y`~=9sXvh7߉}_E wP=_(i}_ t3Ǘ*W'-8;e餸8t|?_\|w1y>oٻ&m,WTew*#lTvfc拧\]iwՓxRwC)IQi9.mQ9 pPNWmAf<,>SqL*L4Б4dҺPxaCy0, CŏϚOM*~ISNCpz5V0,Zz Ĵ R )Ig Zx N!-c FRUw4[ᆱll߇mSƃ[Sm:ZEճa5oS⧿}%a_7ͷ]D׭og,౟`@D=g.5?YOxyH1O{,ztK"Gh״GQRɦV(cr7<䅻hO14^SnN3D9~H{}ևpSREKh3>7޺yx+\(p8z(#F S&d: p .=OLCC^v))eٳ@nǹv,hڧk_g3`W4MuVC#u{;z8ʤg:ml] 5+ZnBh {^g0]1}wl%bdIV8 HiUP+RS|8Nyn>FR2 =;ƌ4t1:Xuԣhk״FW;t0 10Br{!9ltd Q}uMO<Kó<$~|1u(y/Y8#4m:"BH;!"#‰xa$=l18ݱ")xNL9/K'NI&KȸrBԴO_PzcEs.Ȉ \Hp;gK9h.F"VFЌߐ7>~~]p_⛅> ;򻵞NwnPpsl XwS!^t*CC<6ic$|f?aKbƏ?4Lqӱ!ѭ,ĜEq=/>TGjKgZ5O!jBL2= V)rkXC) |tf+'' 9L~<\ޕ (K9!u2F%JGsSc(^Q"9?5 L1 !|qTrq ce ,1#6 rV͢z}X;2nO~pO'{B֕~2'Zʝ?^|?CIH[琶 pbJ΀ d!E==k;&͟)5Xs 02#M&A3w燏ΖBo3ҭD&\݄I0u[XxƷ.X~zzAhv돺z:(8gsPFaAA v8("WED"qm0a{I&WHRı߻NiKl\NkrN}&FNakVJp5DRi]z* QGb(>sHh̻[fٻ;[e}`#ԞD*kW5dsfVU,rYntr1Hqgn[yF2CGMѭy.S[Nwh[.)6x8S*Fnyѭy.ݧuk-bk_])kY19Sϕdh&b)*)=XԭrA" 5*'x20%!#tC7}hE5,~ :|ymS{CCP#6iYH'!1}jQ~$FL"E ,b5h zomeD1pM9!}?Ow7bCEvJ"(Q؍`~[rWuNfoq{a+`9B9 fDT=A|A.4s 7L yHy}5ybtWq639?\Z?ӎh]rk-IL:Bs~9pЕVB`~FVmǹƜ͟>>}}wxz;TXB$TqFi򚖶 3oxpв)?B{~F_^ބ{ն Y^[\b[ CWVG}~xΡA>i]h"~DG:2XͥvG.9.B G<<|tcMwu+U1Fgz8_!]R}C`;bw8SF&@Q2/zH#f8AbÞꯪJR;AʨQAAc"I C^OQ@/@dJϴʼ"DjpaGF#'A3Om M fsunr۸3A T "qzpR!T`NmVJ/QK/'Urvrq~ 45St%%#FՈd/RpIߎ_2_׆_^NQ$n/vgy\8pQw8'7z;/;klr"f^qB!#@88+#ǂ|nhP9QrIo8G茋72AE[DL;F XqnH*%lP9䍤-@媔k#EI*"8?_:(otN$.0BkE5+FjHOHU.B#*Ћ8>Ւo3|f{M6AXox{7__j(5{jŊdd|\*~۝&Oy׺]| n&(=aDr>޿L|Ӻ#i;1"J4 Nq4}%$5G ' @ˋUPMAԐT/p3N4 ~:41tGhd(5BL owmPޒ7x2DnM2ilj(NgÈJty$#][+5㍐NgpM W y G {Igc?%^$^;@ BY=/YNce &^#. ag$S2!,pk Պ֭ U#;7ΎS<jqKnFm9pd-%㑔먂C#q^['IZ6.|,}+Ԑef/j%\q]WHxZLTsC{AW`!d"Dt68Ah+ҴdD/PT#шk]h[of՛6FsOPzqWe&K?ܠ]e5ɷ8"Ls|S~?µC"]8 uĝ> FQ}Ay #%0k*S?G+_iYy5|:#V }#Rȃ,VaK{!q۠lD)GT;=sҚF!Pqߘ:D9s9ecuW W6)Gr2)Г@ QQ,P#3J tpleNqm7M&PztDEؘ9Oؚ$t8=Qn9 n- ^TT4cQVyOJ~VZr`5;-;lZjiNhV#ڲ=Ib*i .kgZRYQyѯ^|\pě%;Fb̖ekdKZ+\=hị Nf|jw;Y⓵UL[l\nNW5,%?hS'Nî qU:f:T,*ko#~GuD@?Ժ':׺W@I 9^-q 6wΦ͋cjMsH 6vd;g~2c*rTm՚{h^};OvwHxEJƵG8½wŽTtHbN0w힉4tBɩ`b]m9Ӷ/[.k;H9H YK1 c.3F!d]:!kX47 ~,UH+A\9\ Ր@2J bIZ`8S(V/]G'8Zݝ >X=9!Pnu'c"UEL#]ԁcySQN}|HG38]*& Į[[:e+KGhkvwn.S%>\LF|iTKv̈́TkVhE "g}V$]ڜZIziЌ`GD)ɵ@f{:UZagDX4`pv]ux@OPR "OBJ΋U P: g{5 M;oVÈ: ?zՅ\ޡ]/4#Jna'? '>Z޲P ^%vo":ً K'v/[SsعaktM/V1}(tIf\l-^:}wH#+ OŊMZlPlN. $QMh>B*)TS]ٮf.`?gܗx*u7f4B\ fS2F!=,'aX4?jc6?W+'Yu߱xu-mԟ!):K!Ju_ 'Dq Zʜ\y9ŴzޕY{+[$l{+e2x'2O 1pg7,˸6"*DŽ &sDzQ+M '7*Ô L( &BEg}RJp&*ɔVjkB{JȈ ©:S܌GqNX%Ec @h=p3y--YLivʫQ pZzcZ1Pl5`Ƌ@I&\0c !DM[jRiIhjr9[AXq" E:hnQɢ.{XVBNSJ9NQ2#gBqp:N[ɂ!f~;.NYh{At3#ꨐ84{~W96W6$7 " _[QK/'U r\.bE705"Z)LxqqH ?-1U"QڈpA>#1/vE8Pyq6ȧ&~F\*~,L# u~2DdB.ՆElEM@CԦu@2Pd7ts -2DFTbg:9;sc^P0ˍmHxv@!tI՘.֛?M$*ņiӂnn@C aB)%ɹԘʃuk;[|F C)tun4DqByh'3"Fzu2P^hU,-W-ۉ1ubz_3\zN.MY՘7 ΤU!OנA;QύTjO GCGgBwlr#Z|2{2_}GAvǼ(7a[ {Jv>Y֛˯72oB|hkmq݋`^vfN8g %Ț"%nYn=%K@u"YGYTQA"RsVqͼ6.n\bD};'wb &b74[QٮMk~b></o}rft߾$XXsJ3KGI\`pɏo-?Ky[T<칎W ]yQ`B,Njv!Q .t'wQYjfѼucɝ H  / gҒ'O{i/'w/N.[n~.6P$\m6nNEt%o$H<<*[)bEfUN0mMD8hxI˳3g1ۛ !) zfd].^o-Dt19>l+ko㿟Jb=w1hShDHg!Iq\VP 1 ejtH[." 5CQi. xıfk=N~5z Tn\h']3b{o{j=es ƝP0zʹ\GjQ947{Y{ZpMg?c%F!`>g wóCs0|opq=+bUsB60wK,o?y=NnZ\\./'q4=o *VrǪz0EMvN>xVC@VcY6Vq B*w\/@2P>ü)#8;[.Qu>g+T9Lɣ"&oNݰFin|K8E/T;k%`i_Ww4-~S~%cy⮮F.} GD4ٗ&WVp)Ej,]ޤuYS(UWX"MiHǕ?,߸s{ƑW-,K'5)wz W_JuAmLy.ַ765C+!Ɲ](=VE4=\{\ˁeV(&%’^H9+`T\J6BiX|' ~cE˭Le㎲ُA=s w#t+^32N;Fcȹ[XQw/?1kv7ßםd XX6R0Α7̏a&F` UKB)'>6.FNs[9ɩb%8WJphhD֢f%R WPrh Q#HۿEK![E0!E mm 6OZ0BFےŏ`(}x `ea~#ペdFB1e\$AFwȱ }yI5Uf* aId&ȷn!]J*`@ [I'qw҉-c6KuGYX)whZM BrPc7nP$v ✳JEBN Nv#6f=1׏jq{ȋQJP0>ƻ`0%X4sʓsCd4I<(Lp<`hʼnNhRSݦH2MBنd-XQU肓.~?`.ɼެ- "c7dMd($R$h^XRH)mGP ~Ǭɪ=55-wqt, 能9;V0^v./o\)Sa=jKtJ>b:Vou 7eɰjnx*Y&c47sIjli 2Ћ2dQL.J`VDN{ɂPiEu{?_qBdRp:`!eI9z ɂ%RYXFJ+p4WF~]ǪߧS ,nax`mcϽXqz9[))DO<3wzeXa/3Qp!Jƌѕʖ^Rrc*$i!஡kp2 5mQQs0xmEc2Z2;d};p8U!mɤ9TZ/8"bBmgBɈ:CsKDm]I`Y.Y˭:51̠FvԞ@ cP ƽOd1gHP@L R98&ḡ!I̅:TnEJ/GVa kdL'jIKx3# 5OF$Ґ XC{ pqKD<",2-QdEru0+ni !&٠i>=MM wp*xCȜqA#YbH\٥Uz.ǣɍ!C RR"|t&J}H;ΠN*d 6 iP5" 33G,h'3P$MtJ87 4">["sҦ'1|6#:!zK:ELV6b& zR.%*[r?%&G9$0%Q;It#1x$I$VmIe',&c&%8B;'I"q7D}`;?T>e <7`cFÚ>* 7^LĚD0[ك̒lHH6Pe̴щ)c0VRd TIJᐴ:V l%S:$TDpՐ? },GO%|EmذrwRWm>,}[k QK:~>ժrn tCD1 OB#m68|l^`o?;}__FCٸ<Qw_+-ꚏcίF߾?ޜO2ѷ__s͈$7//N:eXM@R0c&##&YԐ1FԵ. ] 'yO5Qabs{sqmg)}4qs0IJD@'|1ϬitnT{EYH(sXq47TUh^+B|uosn ˠ11}%l]Ԩ+/W{6ce8pٙ9%D%ТH/eXsAh"bAU%b9zzoL(vQ]ǔS` ~,I3XAveּGwz-nT0{*3:,p_hܼ$yXWІf1`^q@ăr9`*F֙FFIp[C 啢gAQT{s4RE B/$FL,CE$1>0@bc13"_, 4@a2pIՑfH,Oh`"O \=,!yc\z SsO/ ^?U p¿@ۗLKQRu~>;ׅ@κ,0K(^dC]"5&J6LtRȖ۲saM |!$(hɝ 㹗Ai¹IWc'cIRi"6M,NHi ޖ]ʟ.J+:qxE-Bj ,Y6[ʳsr Zs|Cǧ4oW`.&MUӎ.K͙ &Ð@nGXPT[ d΋~"XʨXH I1j Oc.1Հj¡ESM_ KxO9ZNx RI)5qI@;3`^ȳTJ6AmZd"SK{DKSiCpFSR{@lw0 xhѝъޅ6CTg2 ݞfmd¶1S2փ# nPV_(ΐ^bg_yWz;}JXYrAsK㖃 a!͉k.#GZwGh1`anVF18%iw7Zrjۥ}WuH %* DhZj\G f -N3loÜ)s=潿qyY2;peU$pI֤YL _ KUp3?_l;ͅ`-@&fTf:s!8OFb/A؂W ¶'I<#񨼛xt *GK-xWyŲ^Pe6 plh=/yf}qrb͚ƥk鶝%sn;R$b9>bʬx>iyfP:K*]RTE\ƥE7nUqm"lsɌV-̨x{9{JfbDX1\#>vO^ \h-P5Tl"Bajx Rq vExN m1)ސEeE3 6i &C[W1ƪ QAzKL4EmPvB@fb&δGZ&1ڹ=bN)c G |H$%RR/TJbҔO [IwSPg`R(!̻԰4 %(H)D|Po8Xm)6p-0w[b)JJ v> _˟Prɫq̮d!,/ 3ǕWe SAi/S`n23$|fB[)(|e]=+XQpʬ&m(m,.ME6EF`b1)b3QϻEeҞK),J1$p/arrlbblu̬(EJ'yg΍ԈJ5@nQ(uRh%AHR0^Z)%ڀ @"+2Shuˏ?}en.GS% ĉKՄcDyȐ}Brhl兆=C%MN.U`u3o{o.aBz:lׁLf5bh+c](LXq+bH@%Ӏ:Fz4ZJ DOKx]D~AbyP,sA8;:ً"޺0٧yꞫ^b3su_-1CG0,_L6!5wX/2//(EQ."@TjHG?Fq3 @-/Q5+۳Wgu72Ii}}}8aV]/%6\tQ^GV8ُ,#j@p#h>gQݨz7*`M/@ճdWð,6PYT{(:y?zw|^?e+D0@a?,9j9x}m28dvN m]ҍ=:y|}} +:z{rߛӳਞ;jg/ZY_*7'tR7ޞyUg_MrU&{xU2(q9>it6.=Stvf|);JH ZZgˀ=cP_h^A-5(okw`V^D0 L{:v[~)919W Q;T6K]6P*CYJ A1f6C[?)߲ʹf8/x-.?,6݋)T Qli1;Z_G^;{"kh;j^A+oչʅC_ag.^?ͥuװ?h{]!i|9(/w=h;5O9:7Nt>Mf51Jӽ=oz5'T|igOݧ^[e;q=  dˆ_0N3…o?Ka5]g]w0Hl28\#hGw%޴ OVKx/+o;xi6+'hggӚDO}js;kkaY& :^{7ƨx{~s+E|!}Gd, P`s,˟ggJI74V1 [G-N ?D/-lcqF|lD)OpEHZQho'N{[zˬY};wowmyP;5#hpYl*:LwZ܇wSԸsոk ȼS>EX&w\qeq4;MnF QTJP{LJLcK1ĈU3r9+2qu\KP-Lj "xg/~T8 j%C^5-U@C>ihAC^/%bȆ>`D c\Of'Km>,bdY=0`9&r%ȲTEd֊ȄTT (*$5Ī_'zYf)he+PlF gv5 VC֏-6iJ * l.mrChOOX{*O LP%r*noc -" D$юhˆr`HjC!OP2 -X/-\o܃RhSitkB ̤b救\`TiϸR&iNRarQ-ߒ9\/KS `4#NJ&z[RO$Vv@iUyY+KH(c>Iyl(q>^a\i2dl>rqh(OBq :j^XI8 2 qh5C&'|j7fY`l2YSuw1 xΤp;\o( ewY,%a$@ Hg~^F/LSF7W[Kwagʱz}8G{WV߀1OА.#R MQ.(g߿{u=-8*̹^~icHQ8 )tDj۩Xkg!KDe NPf K! /VYǞIn@;Lc) 1A@*Wtm"ɸ y~iVHrOcxPAtuij5v 0[2Y#IJR*$5hL/ '&T2QpNk+iI'u0/R+ҵZ&$GŒۉ+oJR].iƴaLx1LNwL]60<*^;#.[fqK+AKkērC•W.Vi47Vidma4E)=(.Di!J{q Qڅ(lۜ$㈷pġxs G2,07B&'1ϓ R1D|Q@zR7R+MiۨD?2b&j.3&+KG㖈ɟCdn}V]0nts~(YpD"3 GnóKD'$9ܧBbB7~phB 1;,9͹{LGZ(SE UBK",OlCXOAAapKߵ 㦈Nk kTbUWGg!ѴGyI⹹* PW.j5OEhT9}?}PX..;dr;Am 3fljQ4@ O&,xl86׎ZE9p (wY8uKb872YDp{Q^C\|.xS'ħ>vXF1q#SK/Z/(_wmu~Jmo_اyˣЗ*Yn =_P$YЀCg|SS%󹳣iťV,^SVCxdYBqL:]}s:S[x5X Q+5i1ZШ>/)+"-&N#(k }4KOXafs/5\طBWc {ϒҳͪK!z5V62pDVCxiPWe#-(#xvh*R)A  ;_@7eg9k@Y!PuD+1+/bPw Aoo OPPPjLɮZ},z,z,=k7'9޼=l%[E[֋?n/ҭҭ[Zx*?o=] d˿=}XXXNU?<ZL/p­ J+ҊKn{+[קA и<BOZbu-tsP!Ja}~xVɺbh@"Q\вB Z^|~̔^mN Np[1q{ѰVv52w48}H]㋽/³Gh\}bfl dvzIx+,v8ª1K',|RuDIru4}Ҳb앏8SVAxOAӲp : 9fGl>wE\3y]TF1Ꮿƒ '7mq)pU5R>/)+!w7ߺA9]Y)G/杶FO=SVAU6w%}+}M } jOXa#ꮹK+q-\ny[y HԪZ}S|zf(o9h7gEd`!~_ۖ"_=QQ>ϴ NjuEqU2 /~i70YgW0jj9 29i Y=(ުa+d6٭wl%:U2I$IX׭k4F!&ph!W&~53ۋW/=bKqd Ö)25= R];w)#7WpF Qu-vCpXn_3vHG`#&ɯ|,Wƞ.L^C%z>z]-u5i'k8/wZsI_lo>N9>#dbȍƶ:;$'vՔz=9o QjmT3s2+=5=#CKD=ܫ2ȍv܀0ydVTCwVz?gS~=ǗWSJ_x|<>TqCr=tӌpkՏqҠB !}@uNW&HSŸΔÃͻgwƜw l*3rMKrM! SJIFIαMc7#c=9)|=&Nwms}qMcuYy7e/g7}aUOn`Ջ8{]i}ۈnJvD1 䝶TKb{Ϥn}&I#oL\w񇏾wpaIW_|_x tp~rq0QSgմLQp8t#2s[p"-:C>`ޚ&=Ӂ#{,5­PGl#و^Ԙ$%w<8oΨ/^~)cn֜!OScN ij%ik]zW}7J{ y[i@neꄺKD4q{%x`*2z)*>N%2r| 6-垸헯Q{ 8l|^[DvB }hu>lXLkKMosמha`(#ୌrRCgk".:[Wm=4j؞Z!aB~uyD^ԣb -Y.M,D~ơOuk25~;DSp7zP3:B&Kp'5*5cHlQSD.ٳq>'4)cBik憨Fó1^kou{:4CwGL][xk᏷t=(6. ׫/o(峷v\~~I*r{(xjE%J >c ze!a{ϐr$iey5ρj9swazvɽ>fW|˿N_wx?Yg_ z3aKUܢ;c-;:A=C(XCSA7td-t)Alƹ^Z a,z|`bJ|Mw&r/Jt(dh w*t PδVjSȖBCqTĝ)[IG5.C3{ӂJse}h3!Q9a-a@ȼrZ^}iA(N"w >n"|шoYAUjCz_p㊭G(lyDȈ>銏Z1'd}8GJ^;A*t=UjE`aZJ&`:t.[/AjO#CꝌs`z}/0 Jʓ@#G1D|AP@˄#D56%R},5A:-"4Ӏ3@y k0G.bG# 883s(apȇ!HHƜ;B sOd}9A\X A!`ꑿgeP4q7^}Af3S.8B4$ZZ,rE4s"%RA`1nzkrkgv^zӞ$Ď؛S-9J,r3mzۜnC.=\6ې`ۜFoCPCй6<\ۚoCnlݜoCN.r6!}}m>Y؆ܡ&!Bl=؆\|smȥJlCndr4Zo5\z51)|+峋 }jeyoR}f52U}:~ WG0)~|}74$[ݢI>x+se'~՛ZjYb P[kMUE^ꕠpG՜v$A=&{z_8bMy~gIS͘nPLyp)is}i v\SF«ӓ>-T{f )`!!UNP5V})S3=-9'u4mpd}j'PuDm:I`ZU|Y*L꨸vHQ}]"@.yj{&0BYnmm]לT7ڧV uLDk^|t=8(vZ {d7@TL:(qB>eUAΈpVAkP2Vg(f>\9_g|I͘n(rc55Y|95(sp{8<;([)? p]rFKptq̯B'""!%UOڅTfцX6NOS&+FERk8̐6-7"K i4mz* V0 vv`{]a0`j|o BSYGUJ݃*V %ž+u^Յv| fG}VOJƔ!<̈́phg7ϖ6# 08$#場V7so->CRZ笵oo:]Y篞^B迾3^7V˯DOzzՀNh\o;$T+)!!q2"̂揟?oV 2?Ҍ.=_B3__uiׯ޶彾9__u=WW#׵`^g[`ϔ޺OU:6"i3k5v.5=rFdb{FDwkQl9Jrȥj4b^!HŊ! ǻѠzA@Jl\,D Ӆ[B^2=ͨFI*?(` pmcDDCpA$BXAC8 \VWL0]?]?X﯌[B" i@Vq|5ޘ^/Q/ml\zL䃏!$HM8QK GKJb@SH _b65S(^(0W|Y1 6>lb&\)cBApkl@uLM)Y1XrLv8#pP+h %ԜA pEU/,Uꜚ-TB2uϖQ@P Q(B{ Ġ{j+BpAܮd:;xD8fX5ݖ؃e^E›yzx%1a_7/RU+ք{b ڎsX&4% -"A_,)6$Xq 0r0yjX_^Iez_P׏9t)/}|v_\f~ٸM_Xǥuwn99^l}W{];[xmkzwcd΢~6_{=ģ*&/^fMOW/_|pv_x,Si} ջj9TzCNnUjm1ykmzI]OLrAR9$uy%0{:jNSam {ܢM]yFOGs8*ݒ:Y,o|pt~x \47#9Ȯmrj/TF ˚ ڠZ64mړڗ,Ttju\g:ځDzY^=O$Kߴȋ|[h4Cu=cӹBLeͺ=N풘z6/ONoһtG;JRCn߿>xs䳜A9[Ը Yvzv'ky{3ꝳ^7*#!6ʩ5N(9|&%u[ lF:f?}s˘#xلC'K%_%5?lwL_|yUʣ<'z} jSfc7v+֙G}[|;"ǽa{a5'&[P9 fsNi^ =o5OAS^۴kٟ70A;Nt&JSҌ,NVbwrjy]{ޫts!ۯY1Q* gtxH|dc DvYP ̋%)r)ų1<GfK?%v{t3颖vZfi] B>@/..Խa2[B m}eF7?ʞg0]r6o]Xad(kZ#+%bCo_ o#3#E=2FΟB_a58j.D5Qm|5w}!1W &>Â2R'DLkbNJFD˅ηn#GhYvn(&Gizn0N;>"Xjvۈ#`nc3ckc>0e6r[q64ܘmvFn[W:u׌m^%c"qRmvFoH PaFomv3X^,_Qg]+.V3W,^|-bmv}8gŐ65cň'ۊ?W6zU)2#z[,crUŜb,T<=PQB8zz8ܓxQe$LM@>Ѭ-s+5CQs?KP GUWϺ,lj7{NZBLm.<1sQFws6f-BT@UFLAMmfL|remYeP)|ꮗv4reyn`"? +%E#$)󨗯{ ۹pbyfF~hV|?«-iQU9 Y rdbs3Ҭ2 |WlJR-*.8_7 rLeo%biUji%d8V@"ZV^@06~ S"+ !Pa2> twj U%%V$ u!WBe:"i)ylFF!dA k w&mNDWBrFsF`91 h B bC&@w33 @aDbu beug*&Vkm`RGҀ_f SV}E[SOl;}3(Vz}xtx~88Hu"\_43ʰQ\;㺦1z\ 4ߓ;ϷMx_ӛŦZxpCA@~t~,b (|D!K-7C<yn;!6B0HA5i(CT kB{ " bOf:T,%&>hzQDH SH"!rx!,uUn-]tmt rzjf>Wm|ٚXD Et1f^yn2>*kxVI 7}n^|b`!m›_pa+5PmRpCBEeCz|쪭nDQ9xcAN(L ”KCl0ẏ̪׻\]ҏ=[y=.9'\qk;gmk A}xCTfs|{WIsf Tj;^IjF))q-RL##:m>@iEt8ljtE4:*Ipӳ0w;mVy'}d#x`$`K( $8Su$Fc!$$a`cY)`6nD 쥝Cc۵qqOWp*8)[T;mQVT׋8~k) ->6Oti4fӽN RcX_s}TD1ՓGF]-VQhe{+ +VC;*C0,2BV+#[l'}&gpbrRQH0|P8c4'گ!|d`3FftmiL B0 KC-iiD@[ rYBѡ Zp&A5Z^h}$/ﰎK+ K'jO :RTgï fbq4: ȺZúum!XZsn_W?(tN,LBp>6M~0xuH 6ʛw W iP3Ӿ a 15ҟ1]> 5_䘝AWAOagX"?C)$ğ1e(\AhƴK*Xf@1 X*l\a@Deng2]__Mӹmw J[9~Hű O}2 ].MY{9ʻd-PL')R=cMm!Vw( 6Jl2%'+f4LHvaY]R*N d Z&a3[_nCGwW,}WqE A):)o&clQx"CpҲp+t(/3sfZJabQa^mvesK*K;8%8ψq)`rBle `vnFY;e0'+ˌ2cp?e8@!njW¹\G(Cj(Jxuru~+\[Yf!2K5.+Wi,K>7V7BKc4o]9{N:0Gg-Ѽu:s4րQE34QU} !XU9%s:}(Y\X ݄;_%ߍ[2l(v2J2}~ҝfҼ0B^`Z]Aƶ;/;(]FݽQ~&\WD<-9^n_.@NB`OWwKy1P( 8o_*A9]ߵviW B"ԕG; ]ANJOԯ8ҳˆ5jO$K;s+QXnR3g~k={3SE&yV%f"vJ)s` fS' {DKV. /_⤙6ZDCA0XK(HP5vu_ &Alcdf?e0V2bcB".kYzgʚܸ_ C "ఴޝXe)lLoG,6DUnREC"/Y B{;"% $dzA6~6jߵyu]:CAW7rX^ԃgV6B*P=g͌l>jR2)R/S:Gt']z)(&>ϧMFLZ)cS& >cPQ>%6eGv^^ZBG%\$rԎcHLp <[ޔRosRA6\nքYwZ'!L^Pűc)LU[_ `0pAP[ŮvH,)9Rc'UQħ?L^`$^Q JYwʟS%`mr䎬^ ]-cm \&IKRF礂V dH"h)9j9ҀohlAx[1F\6 (p8%.J%S!KљF7Cahw tz,xYs{e{򧻝0N:!7WfG˔rI342St111" UTD^ĢEbeI [2O*kZy\8\Pf<>e)45FlOw$07.z ZLoQvZ:uȲV4(ت^lUo ،[ΆT0KtRwŹ`9u @L jTF5toTV"H=6Tޏ^cF4*b"Gd['Z6Ȭ 7LNQ`SfQ9S 3)j@r #O;5({ ooN 'հY1zz߿zշ?1JC#d| '{JՃG֖COpFYߖ_dKy$POB,4ުۺta}S|3(p"xk+G+l:tw$>4t>z޻7iaR>/~IqoS7BP|BRYrx1%t#As+@Bkab aȻ\kX1TtZx"֛Dd@2q3N>)x*pQkQҡ 0-' ,i(u@4d#Tg@5 FY7߽ێZ+6 8}`VtLRoM$Q,u{p+r_*NoN͂_k~W9lr7q!@<\n?g4Fjޡe^HtIbFΖ+)Rn,ɠ$QDݹBH&L㥦ޞCa֓雅>qM[)=+ΰR<͊u¥^^BBԾ^>P|K ~p^߯ӧ۫ZnWՋ2ڏkY}ٯle(b/ֺ??ޣZ,c}avC@ua%|(/{KCrS{ +ݠ8tŠ Ծvɀ7CKʩ[hO5Ǿ>kt2q;l:;Ye|$KKՌ ( <zݝOi=^ӝVZ OHLV;sod qpV.c;tݱ/e(cd޳[q؛ڛi<z 'J/׌V S?*ݞ 3en{" d0l6bT$1)R"ۜ\l4ؤZKva}[vKg =d`dOo%8fUjSpˈ Cߎ`4qu$]1Tt~˳TN !9S%o9.%#6PɌ[UY ;~"V dJ܏zF0{g؅pWonͨ8gs!D UYrlwT4*40'UҔ_*kH|6Dj3u1{|E,P>Z{LR>~6ý+1`�ӑUu s@Btʴ[M5S7+.f&$%˹\ JEwQ'qP5Uy 0s^iR'q;P^GF}Q0ukeGMe% /r|ww5iJZe5uo)dn.Mw0ri$"ho^}pifZo^>V krm]2}P߇a. ,}srYxn5nr^\mrǻ:vuzAγ؃˯=hQ5ӈƎ?\Jv^nu&:yNs,tkllGeO ({Oŋ/~fXghnQSC҆RֿC+=`((B?JBXGģ*; z?1j퀢 ߁QSb08gH-Di97-I[_-JqY^JrgwswVu>,U[*Gcn]1~]:yq4IƩ8%5J '&w`@3TpY6f,sX7/{Sc*dnZݽ{9E*PB]dٮy7w]`l.B]{Y=Ir%WtvvwjUm8o0G7{>W~^ϻ|%coⓖHRK$LJoJ-ga^7rM&LJ\7x&}}9e2` BhB8@|z!^ޯ$^P yT(dZx\Up"2B+$ Cn@TdPa>J@+( - L *s(4]Fx0LuPj=)"D&qԑ9xe7T|4|F.DfJT-K%.#B<#yTzBijǮN9$\>rU芭(5$&Tc6·w2$Td :xxZ:vȍ÷)T!m8+g*4V~z+kЬƖM $.fp Μ#Lsx!wId4R,ERNА fASjPSs/R H޵q$B$@E:Hv>FFf,Q2Iَo?ۈHMp||ȞX,H .2kZ~1{ƼsdzPYlcDkkFF@/Q@xy0QU2But0jH6;oh ]ݍ<ddA,}Q"I"Dwj W] i'L׫˭q]jwWUVc5Ǵ6b.Rwb#57u6b~bD Y4Xm]#OmTB>(ݟkU}yv,YbUob5n5+?^ՎcកY\:ĐPYJvM %担q7{n~{v"FOOLR=3krN9oNM ^b jRQc]QC~!LYca 5-1-LJyv?yNY26Iv; 9Z(=8m[qIV+?=Pgq6PvhHh;F[t"Eĥ@w:l`ɣ־w$cf|}fkG0*80F[37tc? %}A,G1e>T ۈZ%OQ I=׌JBڢ#4>.'ʈo{ c䗙 T-_)JqaI>))Zh֒vgXIڣ?Һ\}錴^ g*0*ޱ&S)iwA+"䓏ח;ͤ6XQ -K^(MHPU)} Ok 9A>\C,e(b&+R/Jq)} Ok 9gl'JI "ostnzrl$?Px`w~إe;&5 P:SL”Ix$ԈIxvq0 S޲&aRAh;#>^M>#^]FgXSzvLEޗaRvNd.ZgF%Rp?!wͤ[K}_3&s^}Ep8^uWZ(OxSHZ(p_U4hR{<߁lͅ*WuSj+jk]IsLWi&1{;wzMq&L̃RC{3uqǸ18"QCwq;ƻW))qOQ=85v{gi,rF'\]RZ+Vd[[;=ZHJd j'{em{3+$kiwfXMjEjj[\曆Gn1 /ʬZifOÛ7x"0;O5{?."Sj ed4ƼHx/'ffspXAA+Sz1yʣc9䀮ിΗ$1dȥ쎡uD&N!$X ٍ𱾇0n+S< 욱:nxǐ!׺ئ3'?`&Hn{7AYطoupP(+CJӑ3YbKeNB,߱<i3!'N& %, m;d@qw*۔z`t,$*j W@0ɆO$BܬO iκrusى(;dQg% fkobbJ$m=bxLI6]Qy[kZJvDy`H O#Nb4!4Y7&vt=dnp+l~sݑ}1Dvҕ$x]PjDGW4^OشXt>Njt%aꇮ!g5h!_\gQTz*Y6y7rkQSz[3jR&Fz Ǻ&ѩ$SikI+]Eȑ?|[/hQ'(sqj0RN J[腡ijGt)Ԇ!+|Ͽ1s h*TMYXޅ|Z|Ѣۋ?uVnUe]8|;{|_9 B>KT92"bEfZ #^˹+=p5BWPC3$Uc 9T}`P=9ua(1L5dD^:VnvIR| c1x ^+)! YQt@H"4RS.8 ޑpO͑$/٠ޕ/BլtM QY$ zFwWW6ߍh|m._?6K I&Ӌe:; ބ+/?Yo# {t*5+_: Ra3X,!n4{3Wlc{yV Lܸ^]xxz^7ӋV6,AE]=vDZ|ȋPcH^l}K~:sa%jJݾ8qOMܲFd6(֣ULk_|U$Z 9ylY¡}?qϺ8ɐ&/"l4~63՟B՟3Wf6rA>s[$ P-E1f^HtNROhwf_$yL(((Qꭕ$, C%xPlo 8P_zĄ8L w^z ʩϝ*1l׼4dP!lθL`>c]Fw8?LT/.@q$iqus! Z(gbv}OU kk Q!a@p%`Wxk}9 N2U4kc5 k6@Xd!Tt|/hRk"5Zk q'uy e`7R+I T)Fڻ'UxShoS>C{wwP VghsB6iPH~"@Jy;S&U}H㛁ߕRvQifU"dɌ@4\s)87SAJ% $u݇}NB:] yJHZ-Eooa16${|e#1NB^ntcAR{6Fj,jQ9,nm=fձ7˳;3}z5OxfO?Kbcʯb|)ruǤZG]@j$tUG i?hů~pzR$a .)K{Ś:6nt:wZg^t -0F1:DK^w!B!9m[sb$j $:i&ީ-[á%GVJ xXgZ-P}u܀R 6$f{G{)B*4n]@הۺ6' |c-dVݕ|t˯21Ogn:7=v]W]➳V\龏__Iuj,(? W䏟 CqX Â4,怢RP%p$Pƌ,v(f A @A1j@T=Fis4#zhS-};t4D0\앏j!ܫ ƌG=w *elYwKeѷJ/h#ZoCp)j>Oyrt#F-}!ҭGr"7=T|,S5M ]8c\*cׇcЇ|J ,51^0.@6BYʷ~7M>9OS^Y9O>0ُFxhćM i1ܳ rJx:?ȉTğ(0ʱ`U r"E8ʏ֡N ;> (Tm fKI4ixg) wilj:]mug#f1sdwͩg>_O3Q't=>~[y5hDp+{tEBsh磍V{ ? `lSK."ؖNyzܫxi%k־ڀٻ9n$Wz6-އ=~X0Snid/v~FVud+y$5!jV& 2$#h:m#T.j,Kp8Ҵҭ\K8rh0RLA|!Q& 2G41VA Q\aBȎ? OȹPYiDB:c]^p;D|>}{)'En{ŔB=Y",Gli%T%\ k<3oWAUⶣdB$5rxA]Q;ι\tkv-huf;YcjOX7&+Z\ RN;αnl@f-[TVb·h֘Tvo}>L/.,l2 )mB%=k9B0GńJ LP%ёQcAK3JAPA Z4Ɗ_q_QB%ݯO{ED|ЩYt|%F|f)A_V0[.)C'X]%)o-hu+1B~CtZB-Eml0A(E,x<2囓ص9k#Kr- d$ :40l`d2GA iͳfv~$=&iHϼƘ S`KV !d#lӚgb;M-r&$1Dݣ^qՇ sCΒP>|v(";ɏI2ƻ1=^E ;<p{q=aڥFGȗt# wt ytIT JK7Qѳcgɸpydn!3<$h<.1wuIߤIb= +j\h;G`L#j!aq>>Ux60bn{4ۓ|9R0HqHCr:?gSK?LK#p\g$8%\HzGpqm=wvGI\ )O{Mj6ȡzԳ9%t|A){R`R I7(&ZW<0*`BO-UX9pUElUu$*=ϗ~(c:4&e96ϳHzMɘzN51a/m-};'=bςGUg jBϵ=् 7RTx$i#vmc'Vѥu0ƽfOb(Վw=#eNë5m<ލOWN*mU=.N;2?q4Y!<~թ%A*F*?2g}0tPeJ";HzAT'R/ʳ#;d̑ʳsIw^_<:n3m^c7U᾽hTB3{<7^n[7wawc{)V4^^>ctyl@RmdM#W|YwЃ ឮi۝OM;Ϩ~U޹|GePȗjp,2XԵKÒue-듇*䪯nTH,Ze_"ZG؝I[ZDIi8*EY>I(eb,zKNx>KiE MIqK#~vD36F;zV/ow~y銎Ă$w7NGXvB4~t0ϟ`~nʇ ׋۫˫o7w?_ | c^7/뇒RICVwwPفMLky7}QG I)O{TRm˔wr L:h|HOX.?-yT%䏘>t&U1Q+Ree8I[UQj%ujx>s jBq>VSUiZa"8RicML-UkbRq}plJ0U׼Sa"b /$MKG|ѰLU(+zҴKM`aN*RSt0ݝ]9<]Ϟ>h~?I99SMnrt\FxµF!c"`MtHzY'6w Y%Ym q{B'^M9{;\EYTa/bf`&Fbm`VI<~SP`Ņc[b`:uJ 0@%HjFka#m\@a8ՁIoV3y9+8ϟ>Pgy#)ԔZ/Hx[rS~)qj}|*Ym.OZ 䓬$b ˠ h*ׂܣ* .3P-BX l9YFUL^+{1EU<4]lOXML8'-'/]ad:psi;[Iecp\{Ll!DKJ8N8-(#Zc`/|+KT(ECR3_gڤI6yw{!4G§pu?H7o3DQxZQ+x~Wa&qK;gp?92"wOu ]x uҼvDhzcс ]BhbzZ zRݬ.7W\GZkKD+6y#J7 Nb$ /֍fNkP3ju_0#&&HD2W6H0>B߿^o>n2zWo2?՟ .+W?IosA/G? ]ֱ1!1c5ZVԊ@g#w!Jw+vaKS9[vJl|QǰUبD":4hFIxp""֜ 2aЂm'[bmIW&l 2$l X,tt x#a`;`!ilbNRrz.IN(!"\PG 4F|QD^K#,ҋ< a4yQɍys &bx|_ҹY[V9W@&F}q4j-qR )*"7%+]O!}c,ryDD0/2:080}HF0$F ~O>$19^ɳzuD\ :D9!vRF2* 'Ah}@y%352x G;# PM]wI,zR!8Jp+ E1,zۑ04 (LX%9 ݑCMj摈<5a(LZo`RjQ)瑊+9&x@^_D.|*oāt` $tW 0#5Rk' vY P%%IPLK8l u&a4e$e+[դVg@5'g- !(:L}CR rvХct9nkdz_7!O1<54䅫hNV5߹]MV [)9S:F6/ 2k[LVCC^TP'f#q%X:P#\[ FEKUR/EHZ(e]՜J7Nw\Ge )13"%T̠uF -a]WvH=uz e7gؓ?nrz2d5>u˰V.ј Շ[;=:PhXAQ:8\[lE1F(Qz㜤͉1G.XHĐp$opH-~z8cIR#U蹡9SQy*f,њ3]HH;J ZiLJ9cΙa*䋐`ԾTxBq$|!8T)C4+)9HPRE􈥄j,αғuwTVHޔy 1JR8FD-XIPKTAC^@b+uӄºb:ctnzE ˅[1֭p-)3dqfT3Fj<9F#m0%Ab6窒Ge4D*idPͅrX?]IXa%) ge%pLxBKd$54䅫hN1fojWޚкb:ctnS w3kjh W"}sGFbf=GGZJ"2xqCD  (g[My\;Ĺ0)ED߲4̡`TQ(Iφ!t k^*HwLX9Q\g!G{tng|Qgg}VlNoˬ3MEPp{xip9OKF)T>1ƌ!<ta(N@c\=zԷyCeMנ0Ug #2鴃uY 'CYSqmqKk22RVE'q3\#5h hvq2Ə]EWv6#w>=p;)ꛎ3nuc A0E&$J#P Iv*rI oZN Jtxow;'ropl}+<컧h7bff\=:=$Kَhb`MQhtTJFRG(iӜo6.ֿ_b ^-6oV˰^W˻t.&"FZ*EE|zWƕ~+G70 n{U˚|Ȏ[Pޛ})B΋$@o5Cl{6 )&8sa7( վ(}6$POq2(&U?>8GV*n /({ vdJnn喨n>CZ\JwN|)/av"0|JKVIR:Z $8^2D9 A%R &] #V V%=kVv 4nV/hS25O%iMCLT?Y:qj%P]ׁx N8 jK J#=0OU"ޔ4jVGٳrX؛jEBU=P*e0j!Ɇ-mKaI3,5-I7z)D1He%l<&dvF)-U1-(1YзV0ͲlY6  |&5 1*W8F Ӷac:c y*ZSdKu|ZR rTt.m:K &YbFs[ y*ZSCsTzzcE`gIj8 a=l`P: 4j1ՌYɅd9!ct򓧛? )~rk\AC^{5M>n]qyBVAꔎźMK/Zhu!/\E ŘGDJf2]N/2~\'W=0F\|\ț[3|{v?uDI/pi$7I ֖5H6DdІ_$k~}~na~4tUͷmaXr| 8SOQCL<̊Ս mz:}ނ737u-8C0%./p) ]g`hs 7J"۸H.:AiK)LNR ZɤBܞ_kwW|m웇Ot@rӚh1 7Xy#=W"QmA9_,0RE$7 w<^uoz&?~aO&G9n udB#%Oo H 6tڙ)!f+j^d]ń깥n(sC9n< ng˅L gW5Yc,̂4o%ev_{ d1f [-o}3ƔVa.1>iie?+T'TIEri>^&%;7(%3=̄#ZsP̳A!olh*K+3 9sz+3/bxø01HgYos>|NWHQD3C ,ԖF1YO((h:|F5e}j0ϨT}CCh,H!F:3gPyXUjfGt 6`d 2GA<|N5V{mgTm@:g$C楨818"PZ'?G>csz<82zOi0ƠTH/r>B7;s'!_=>5竭@ּ%p{jMʫkyë0-VG1f 9-r]]`ņ8.{7gݯ q~2ȜAoGY n }3Z :_~(| <ܓq^ݣg yyXS•kw,_8slQH3uP,9ͧOf #N=UP`_z@n76I7_廿vN˷M]0dD" bOx36@b f0aShk뿿~a󦟥ekn!H(^^zWj.[ԳMGaw^J[U|즽(S]۫&{vQUPuw s9Fby|2D4~zSH;)J$=Z]qH:돛X878z@9bjuס, g {4 T/^Zݬ6gA* U/r?~&\ ,YvŞEW^7Qg7۷y|1^(zqy57O_6v)o|d j;.(!aa!g&1CS(;F- !#R3Oq-6)$ `4X{^"QzuIp=G*Q 1 $'}":)5!Ǥ%JW(Mu K] _|THL쿴'oe=S2b QGN}<՝N@q.ݩPO_Q n G Dtl8 9V̥"!P+RqpT0HXXZ$ qS;v0y·Ym9ZH QeXlhrVʐ#֫X^8)Z3ݧZ+xGGN'M=@  QJ$ PN[,!dRF&%SOa*eG_:n,0O݆X;8h?K`(#Y+<0@"! lHkpZTb|qkM(dt/qs&>yƿfrQ3}_~*޽~߇yL}BxbzѼW1YMu~y8uoNjd~[)r{L?D=v1hgM3U/gKsfI˵ c!ؔH)qA(DڮRV, ێ}6ו&U:ٛn-~fL2іqP{0f:Fbd/wQq{oڍYG5eriA[f 4~H FcnG8\zy:|=VczIQY\Ysm2ꖰ`Im ǟ: yK{$O M(! A7=<88f:0gd+s_mrqٯwkOɌr fWpK>uq |Wj,S1f_-0(6C0?;i@P0*4.L͘El&"|o^a`ehT)) R+ܳZ((km77Go(~sBv0S0%C>t{l)o`;cOyC!Xj|~gnI$;߯܋}yCQ.DBG B$<SJs.ť*D #ǴzDSDT2(c|ѯXl%RŸ?R sYjHy?^{\м,elP12iLSHL EL *:yy'd UR RJ~ȩJ!DCFU׾ĪhMQ:y0m݌uBVA~#ǻrŬՈ[ۻ4ջ a!^*T ."dK a BKM15E@ e!j'\K͑ӗI]+ u9;NiO5thMcb|F 9Zᘄ2QBy3f I;@. LO > ]Wl,9!]V0 vCRD__v(\vi$C$a@A;Pmv~f]()?F1 |Hk_7qv_ ?RQtl:VM٫siW=vyA{U=%mFQO-%c-j"Rz\"I5ndO^Yx;Wqmގ;2U@b]O[W)ԗ_DZ @$u} LHcv~>=㚪r=‘X站0O&uWK\*Jc,VRH.ԀeE-G*☣JJ8DZK,A5ˎ\~NoB]pvkK"w$8So&fw<a15N֛h:wih寫%f UhSxH[UbU K՞al/Օ0VjsT ^%W:N & w̋)լc&)Ҩ [qx&/UecWHJ #T*lU*91 |߇ S1B0D^m'=FK? WsM&hJZx%78%7yn4Wr,0 ZH5bK5nLfGܯ7Esw9B ѨzGS4K m\ozLML|(f8x=Xk*LH=buj7 8IylxFYhcR,!}f~wju{r; 5 ].`9,ǫcr T=E7erOD6NpȄ;2RRq9lv-ٴ2 v ]6=m_!CaïwSTf8CݰדN2-`\JStjv<] vO fRB U:}@wstr7%6 3<xWsPtTbH/HDž]\J,2T0vVfB8 %z2*Yh!$ G^.ZDټAvk(9|C@IZC .-C piE1EUeIe|C9C&.,/G-Q36::Yh'. ,K拰? vwNVR+oq񕔁̐!Xj]edG{gl 9S/9hD* 6`k˝y ߫ZZ{y[ naLS\`IVSf1 p`HZ+~!էͻ9*f F>5( 05)$6,@BhB)H`J|v6:yg9ї <[q(V2nJ(8I`bJ1s+U}K66eI|ܴ+]cYW-,HziB# E$*kubsUl!?#}(8b*Q]ӤC'!plŤdTIIfD$ǵysIk IF 42sT(=8["tsD{"|-F2Ge'3Ij[F5F+9*J"lD Rs4[г$Aıl80o"^ FQ\j"oADceMkvcLf'Iht_O^X]ϋPdbfoQ&zy5o?C#k܄@Z/[0v,v2Bڃ!xGIk=rK ~ޠ?ۭ Xָ-oSէq\M~⻟_M6 Xa5mzzBёB1 u][wiWWŎVG _zV}]/'mu̯) ~nZ٠wvU}l_w>hon[/o98n2Ect/AV!UQ)m)_ՊV$p2Fx۱ڮ8i$WŪ$Y!IX:PxlѪؾk??Qѭ#QhHHs"$RgTiP۝P/߄ڟ,l,/w_A΂5iE6%y0}62Y.ݩ\ )SBD' ֤?~tgpqMyuϟV~q{ۄ7!<͙^a.yH,QBӗюaL?G N(gnw[ݛ {1j\֟ ypfg`KD#W*WXC2Ӱ}{vXgawy؏ȼP}oZpfB$](vRM8 xt:L,_Ȫ2W`!YPQ+,)ղ¦P!sl0?ζ/`Lj4|q/YsD2Xz{j_42N`Jg\ "L+3pmQ'`~K=`v$F)uax ϾX_,?,}4_|vrkǭy'n5X<1tlvϾ^Z̯(x.,g†~3W%$|-/‡O.1"\7JN4[SlUnӃv2wRI;dC'Bx}3)N E2,Ze5#Op`vIOA0S"n̒L'2(\ ["6wr:'EmGAhb#XȧLqtw/HPH`B4)ׯ(䄫(ͻ>|Lii=^X"P \iὩ9n-' we"l<0BNy$$Bkild#\܅!pz?2{jp*>S:iiBcoӒZ9=xC*X)u]ꠂ.ZӔֻ JF VNDɁ1>?p` U_x hC[U H]5|rUgƸX_1fZ 8.#tJdF P3I5]U*p娀Ȓ:L+cT*(Z>Ү0֕4p)E-='^Ow.==8d?{m K/9N2UlT*N햝%v0DD)$>v4DsHWGFwhtP tD` !%;鵐 3VN}4}*#8BN_ȂH\^Qot&kVkϮx4؃3uduW@;`^v4exgz/υRJgvvFTEz1!&^MeL=&AdF:UA#͂Mi(jLs!N9մ+ѣA^ϴo```2eXT 2%s=ǯ0~1~p+ZJ||>8#xcõŞ5ж] .$[ _l[ M(#d Iɶ$u2|I}r5i['!I',lF:Pi(Ie'ϸ8Y_6CkX>@B1dOB\ZCldh4AU&e%*1R A\9G \:ơ>Prk1, N]a%-r4ƒvS Na#5FRgXbD. !ն(7#0]qS8z4D01pSEVRPDŌ6֔V&WA8#+܄:wgf3UO˳69(bQ~O6plMG_>{ 1ZϫeO+TSG+/iE%~[y\i5ct5g!N`Lg tw7\8sg?/yXnжd =R ??3 ʤ";_-(-*;U0q bՊd`2 +lEWo'puX&U͓Qhv n ,x7׌MT^8(fF:3Ș.x3N(#:ff`5/NFyixHHb3D2\Y*%bo8%e-%FsP [ LgG>ǻYS<χϘ$V،c/l;vיc^|doRxp`M־'=QTpm J7 %[XwkPHP$q}A pywj2U q00}]ꇇbʗtnarRebIAy n\W\C}U:u߹VUZӴxV PVOvnr6J$7Z˻f [)k-6}-5QyB_,p ~p w1LX8\!V,W3~m1}3hEQ7l"c9нH;y "9W0s=Q\3jc.O)D7%[醧qu1{; )3b]1LBe%ap޿ kt(ܺX.ų_o$ixǻAO1"rn&()(=DX $|/IJB'VOR^sO P86Ќc HˇNRCBiX0f>!"=ͥ_G1iَw<1{*F&H>x}Lԃ<(n^P: ?~3cqp,F$*nAff7exo#iB\ 3!kg:WeFpu)) Ю y 4-j7>sNs̹dgVb$B6++F6)yoJħft1j(J:DI, w}D[-Vr"~BJ "Wc0ǹq0%q%hӂd7c B4/xНnkj+ՆKN3ډZ { "a*\ST$@]R P)B HRXlEBS e#VH"gV9C +@M q䏋BwHv?7dkN$ڛ EZ$}5>1x+8|-%Ƨ̿#d& ) NңI0~ZbUD&1ľ lGR9%55KIj)q\ :J ,s:3+%eyNKiLSE0,&IMi,`ǗSa rpjoX[fF3m9&jĖNk I,(.-!8Ƥ&'-ֻ! ٜ$zr9?vjIan4/^Wlp猿j{rx[}mA16z/ο(?_ܫHTmݵ&/jxq&3®G6)xH}s*0g;N'eAJ#\†ɥt\j#4B km$n*ѶrB;VM޶StWʗbVVzm[ X37N| Զr!9 *uTQL*>{tt3](o-e% 0z2ܱePn*?L~ O9%d([ 7DSM`ᮼsvqLq@wd懺U_BB>>ۙE{5j7($BCXVsYfNfuXIJf \Q9Y ?4wӈxD aa3N,׎g ,)usÄd;|Uu1۴vXja؀XWHVF~PulnǗ⤰Nr_wc'$>HO&h7[^y7waO|gtjdEvp)-d` pSo-1g(Kga I2}ǹe! JYNцjbEZV,7V;xs̐fRr͸ %YiLelz 6˨v9O(UO7MOUT}u\>,JWmz1g|\ӻu/fϖ-A1g?gw+u|dQ+--p,Xxaa6%Xk% nU+ =@|wZnw[BƋ90μ? JFj2 fS4fWS5T$`߈[˿uY11mj.y׍PI5FM&@W~ٱ*΍L:0Vڣ! =Pn}d/k`gj8cQ7V+2iR~u;1:Z(a5~tz $JTwCjwt. P$ | lx.T:{N6BVIdn6W6XۨJ5^2ΗS7 8D,SHe@(;]bn8ׯ+`s 5Fhr4-Fr ѶԆҚܪ\?USpR :u[Ӏ9 5z+G5F0zI\ 7Н8 hjn4_tiCIՙao484BMkmۿ3I)]Wy -L=| $ VGo'$5-vhUO,Čyv ݺ6Yw,g=-Aˍ:Zn_ӋM'D=`}M."xY`IBb i;vT֝qY${&] )2LRu\:br1{K#Ze&o>đo ]H+LHD sY$pP)CKIM:YEl@Y,Bx)[a4VbU;:Wj482!4M"[nthfK%sE B܂F,pG9Ƃ,E)D%(6uwFɛ~l@3ӬB(Bv2H@7 1 :) ̉lk;$B2D2i 1ðO4 T;ͣ1 MQQ$%Q CGoA|zհN,cԦc ѴǤڳ89%΋;Z X> [<1TZTcCW6]AШ$ہP |F!>xa,)`dpB`sQ+,N9䡦eAP,{#26g #x{¡ a/*׉{&[wYx7KB P5#N{5srɮ&8`o${4~Q{,f]eRN4BiF,$R ,9ͱ*IaJ]X8Ǝ3)(7r5X$ؐ#Κ}=h)%udT!U )Ñ"RRC `C-`]ѝ$[WHtK‡ M䑪:xڕB>Akm>@ƂuVv?,gZ$^-qZ) Z+jk~y+h$QڎjSX u[ʿӝv2KChDӝՙZJfzNLgGBJ:/IdD$<CNocg46jϮLNRDxͶ+dJ>L8F3ú/¸ j˄M` K}h;_&V=9+&oG͑ѽ<<fgq3Xҷ9)!_(3w[_38X}|zY%:LAO/˳gH5kE,hiD љR9qY*vF֑?{WJ_nK/"diaH$5Iv~=V)QRy $"X$-52gC@Bϵ)Ӫ}U)JeKi\G> R?]7;}1 OuZL?6k9)DфtWwg;ޢ{+ vМ m۔ AtqKIeZ$:z׮ᇞ }ֿjAJ&CÁǞu<6b\%1M:8v"S0﫞T5>m>KiHɾ "J@'5*u 4TӺ c U+toeWLXBSe<'HH@_[ݦ?9Kup/2u}?Z=UC?&ӈ=#<0b#^]eX-h!7" Nda<'M RpP Ogn*B,֖OU x.q\՚q*~mWɍoԈI5"l6%$E"CeY>6(4R1Ē+EKyDJs5P2֝5FGyJ#E@5r.:3wJB)m .dQsKUK;hn?àI[X SMq ;*>q"B(*es֥DF@Z=ܞk]i93rLjpOxKD׿dYUw^OWa2#4p3-׃Ug4gY׬ܬu>tpO*`OPgW R9%@V/}㾗*ʻvۃ)ң92ZV 6QвS3nj3ԫ Zӫ"( ^7қRRFْY%A^*̔9Qkp%9Ĝ[xEv UYjV/u'kԌ‡Nz0٨\1W"X$#;˵,9 y^tZkJ)˩LP6N0E*;2%y 8g_RϘ7 sZs7LX SeS%y>nR.)ϭԎjFv|!| pr dt}Aw~RId-%)+AvU)zvᗮX8K dEpr9x.ٲz[6@r]?,<&׏)}Go Mr_.oD;[-~{q6;XMwV@Sd!:(3y@ ,k|:kW%1?&=``낍kaiLT.ƥx\^ş?GA 'ѯZE> P?imFHO0N Fd\ A\;fTΐbN^ mj+8@r6AbF +lw1R#- ύ@fL XAAD0J=#}נ(C^j4 R#Q0^TXa΅R:#PbN%Zb]}KtyE~)%2㩧Gu\{{T8S7-u}aRz KYJhOq[CވBzBm "A!)n{Qu3)yj$5ScVfe0PsF"Eηl"VJ&EooRT'oTgGcԽ 8ء􊜱 B7 фkR˰f+E=K\u#B`D9~Iю,AP_оԙ^GubdT~r|~bp䞛'N;Uk^Y-ztvu&XhDvNtь&P9@3BZC_7 hpH:Zif]˂GdRBS0.%@F,nnhI#l߬$mQԬ_TkV+SRۮcͽIY-a2bH7AB)>\xnYᕳpI= YФ~y#5AQ=Rc#9PR:Bi\Օ Y'T9%~1=au7)~NsF o؀q̻Ph]@_3,Sr%G(;ZuAxQ 糀E΄w5Q@ 1V1gDN+:6OӞڑ@xš 0eRwMHU;p9,;çahORSPi3/{ڹ }_]r%a؟ay}냃tST5+Y+y 'sg$!T^$Y0%xk6>xjCISo=52(}3,NSUCYAPʾ%X0B)pN0bZU i@Ri樲`qJ8RJm%3 nov^ȡ' לkk9Z1CG6 浣<Rq>>'L|ˆѼ3>}RPDfv|VޗJSu)ccsܜ YjzXtwpzYgMe4cd_/5? dyS%K ]8 h]p Onӣ9_%'؉~Y9jl\?}M͋D_r[~PzЇNvnN'FWv'5Z7W'<#]',\GHd4tB<\ҊI`sQ }I9[z8i:YiFjΑ) AcZt;6(1% YPD!U8!sRV:B 3kY#0M"Tc9zll%8ilIsjXZNeI2*rS:i L"R AmeesjlCeI j*'W^[8mΚǨUP:XjD RBqEQrԉ Tr^ ^=q{x:Wjj+5ƫn'0nzf /~hpp_MbnܣXQ?ֻ!x+j|~Uy;ջb>V}lX%q8>p?*/zXJz>=ӝ49Rp? yݏgG w~pu졨RPVSb19Ehcy$c11{(ޭ7|qZT߭ty-z--ש\ ,gwUכֿ$S1)plՄ @8{΋L-BqjⴢCIp5YuyZx\h*ubQ9 ӎ3%"-S,/IY5%/91rSacR6e$\Uf(Y <VdL*7DhRnJ Tj=qrQn}+.ߧL!(77ANw{s%(Ȥ-6TBrTBo^9BeߵvJ1jnRLS)6Qe[gr+vmu0"v|"@^Iۀc{Op3w?Eǜ1[ޯob4L#ԫ"R0D⬞^r9XcCqV*dZKA{&RpL趠^ \ԫxץ4CA^#_u26Fj %7{䶉)akTRhR1-{H/.l~\rz #aUZ>a5w.[2e1ZP_//Ywaք\"AG#O )B32!sg}0ɢwfZ8J98vgKIL ژȲ+`u19B~"߇o Ed%д4%O%rC:t:An֔,;_d Ǘ-bsi=\SR-pF^.%~JɈpy a.BޛsBXbse+RKg,qjW,uBsmș"'DF{SJ6nS2' %8B^"lh .UQ/F PD 8FiҒ"$ (뵩˥ AJcQP~т2ޕFr#"`ռaa{awc_v!$nytY*i<*RLXRLUɈ/ E~tűV 8V$Rai[Q`+2`=ނsRŲĂ:`O bɊ+9gJz} s$ڄNf wQ&+׍Ss']1M7-kpW͏R$NNj(dlMv1*WLVЧF} K֧WFeɊC4Yq#/**lxE<KV\c"b \HJْAlْpXԄS.Ri1,sJTQ& Hhi@՛ `ʪ Wm5Yi.^ QΓ&)Z%{GK7J^<c.@U<~w~\Frݼcq쵕,i=)I1q- ƣ9G#+5O;&s"*&EEK+伥/]* -@45E4¼)ccDii7aFYENC\(,=;~M;?gLa]/1 -Qv@£ܻ^Z1x vwhiCO1[!ovCo%YMhS@Kf_ ft> ">QUTc. q<$"9 !9Mꄄ ٨&2MmNߧb`^UwZ4Hi»N'f4m_*9##|D) 1cl'aDՍr'#'Q)nM#YwWTtY~##;1gc`KW9ՐІoէWJX9Í3~.y*w'w]j7W1W_(U+ªѧLG-@R:"ZѰF)E@JyuGg{wH< 4cx4gKY /B~tƃ%~\KI!Tpn˳ ߿mv HZ쉫q۞'EQٛ үp_ dC݁-C݁1Q1DT3cL1r..l HM(Ri$DbUrHӀTr(S9Jp=r4sͩի;'wB=GC8X8ʄ 9Z>1f\Է8hh: 7:4DRptڅ[*y3IGH5+7 6t>Ir\IJɊʕ$]JK}$ɁыH+I20!3V '`U{wx}[浻ܛJq7 a0oF ,jx<sHUPY?Yf2#PbD~<% !aQoM(θv;cpt==[y =3˂ӗnsW6d0r6pu]oFV=wb sIXyOmwVۿm`ig Pޜ]`mpsUF:3𳷯nk\*c>_ 2|[̌jX0__%*i_0߰QRRm3:uJU.o<ߝV4j( pd^m>Wb"dumkB=bx5K]MW1z;A 8|5ϳǔZH(;ZMt!¬ԛ42Ħ^[zRLyN j{5ωq06.rwm􆬲R"5w%Bm80_:_wqz*U~A'i{i'6Om _8i7N)pڟ`ۂ X_&0WNkf' )Oi}ZDk'/+~9jɽ9E_N=YO 9\;trΓwL̍~`'SKuſ,7cU󫪪4RtAFwh! (DA48̩  auk=q!XGLjlJ,RlA &L-J [ (a6Mmh4zxWS72I$h`5AoxüwD[q];ԻTi%y÷MWOv0jʝ(HV5w잳;/?Ww DWUm? NjcdDX|`Kk50U=` x>zdō@%୳`9KZ׉GĹ^gTiԉ`%4h(5JJUM>%=0N)Iu]nѾF u^]t۷ /wA;GIz~6֔oRsO!2Z5yfѾ|YdV}#5<&XkJΪ+G*uV 2ԑU[:d sV-aIM6Ω=AK}Iuz/V +ac%?fx{{ {@DdSζ J5&Xh>_GȦ wS R *E+01_Udvn.NL4Ӷ+Wc÷ou|'E+K߅J~9i`Ag;Y.GkZb=3a M^ޞs:?9\#=\?[ngKIqYYC 98A|=wnۛE&C`ǍmhӲѰ5m#Rmֺf;)Ub޿]U`3mA]?w^%`e9SRcaE'S&= ?qTu 3 hb*iUS\Xn AGG0rΚkAw4FA9<(Ql {Ǵ[vhw0YyeF`eFc+vqk i9H =9].Cϴ4{hv?~[ʟ'fM#`s79>*5'Y1|Sy\o2[IxJQ.zF}6 h%x/m2 d\-MB(#@|[FZCQ ]3R Ky|VZaEJKe$e׮>neߗ*݉z4T2?5<}~}/"zaF??|~b$~:_b`j~gaLϟ.o> ^W&‚_]D. zX׳>K;xBB)"P3|s DPȘƭ aB?'tw>SK`./#xܽ5)P1?G+$O2- L2 B ԔgdRUsQG5B(XUN6y^/*/}wP+S(L5tKļsT^ 1^N !\-jd7B4ԊP1nnnh ๽y}P%b(U 04a(QĢfM6 ׍b7=Ϣ"6 [.tLiB j)KS^KQ28)bo4&|̰)B,7c%NW3h>~@'i,=ᯣV֗ՊNو…|pӷGsw3z.l(mORHTeN P歷Mؘd+cZIH3T;;&<0UK_TT5坝^JuŗVn4RwNj|HF0e@ :9pNM޺Z$P+AtF`VFXUKtЩbH5:hȪ5/0Ew 0kA!Uc8)a>[8,3!8MKmd+k*$$ "{e'*̴vNI1G.%gN1$LA)T#,WFqr3DwJ7P,+ww=dظ;Xi->F&1KFT+E%wǕ1&[8A*B-\TǮ p(ʤj:_.oB7j/zh䅁-.*HD؈+mBW!N XREcɆwoD {Yd ˝qxr חEO}Z-fmA4RaËN8-LD\\|ҋ=x cc6=ve(1YDU>uul"gd)Ӣ$I#eQ-7 @! @|*Ř;y>{{nBcu+NPR6 ?"]es"kn* >RS_l\&\&\&\6]͑9P,$WJJ0hir- @/ ,;EE/RP͜WUBԬr&Vh\UZVSW%B@uۛ z z}2 ;hC5Tn}Q04KDZ"ivא%@Xl/=$ÖFMc5?Np:UCCO;ZgtM;xJ#:hRx;pF)8/{L4y1=o \8?9 gu-vunmd.EQq`ݦT6|\BH mŽd_aũ]U7>]έleR+·UKAQ"_T7"YZf_~|\n(2bBTKPWw8 {0F{N'3 8̠ K]Lw%]5R2'Ee%je$1ڲ/}MhLP‰U[E]¹EE%K_㣅a^}hrB~NmR.>L vì$_9ߟ??&~x擯V~"M[bWC (08 Ivs@ Fr7_1͌>K 8!x":F;D[B$U:uE[̜8DZ;[t jCJ}>ƥl#:;ْ`D[.š]9;4(EfK[ nbV-uWxw>bb (Roe#%be63{dUß?Sl4iE#+._,9ZA`8}c5޾0V,E }*Fy_>FUOu1J,Ӽ"^ ̕W^ha!#*HP`la"޽xO1Lor\{.n;bJ1|-u2.浰@HU[r]S-P b7>+ӯLT\ŌH8%dȡ7$0#f}X`\]C wy.gMh<1wǼ>Ƨp&GK~- =2`*.ln?0agzaVʌ_gs!{.h Y9ZT“s"{k9 ˾;2y9(gQFEW)QbU{dXrCt)={ANx#Jw̔PGH{yGY_wo@Z4  O?Հ&[Iuah#тU(y4~tylz#LDbL6Yy vA@`^z:5 8Zr3P-y6`0s$<ĠtMfVTj&5U\̳V Mqqw~{o66EnʓG kDjŧ界O1LdKL_L1}bӛ%チ#1Ԇ֓q ^9GL;RʠvJW+KƗd|;{qbna !N8-LD J(@焄Rzrނ (SɀTԓ i49t>w^cD]İ".޸Cr]b)]Nu/3"V9Ӆpi,.lp88 `5w Q%#Ecj_]hi/cfC2 c}_[fs53H[ES{ZPly b/'(nd%`& qXHыZ01j{ V1JaB]N !\-jdA cd$V P$ⵚgEfcE*pAO2~mġ$heeU+GMGupu+1-bz}6wXH曗 ldfF=mmGVw7_$%d" vKuAF#L"Du?i-.d5Pk HXNKOຬ<{ݶq#!TUdjꏱ$CX`؁bU ۅbvhm`1 Q0YƲ1}^.8aݤ{&D2JE}(&9iMAX$R; JR|>q&Hç8h:sA]%Y_%<j uQ( pGQ%U\Bt;"IϾUfx_RM AF /5YU˪>_k0>};숎 y~_W͂jE,ӆE\ϰ\ Bߊs9rL@6N2`w^_5H+=! s9Nzн ;ErʛKEIK2~ >wX"r5ARڄ\:ت\k Ѷ9\kܦ͵L$yw4g"&ȃ}KK\e1qF+TF}a#[;69y?# V(>쬗0~*# $ +NUЭ> 3 %µ[@)sN!DBM О$Kv~ÈQ0)\d$&))fq!@NCh8N@m:=AUF&4)2aKb%Y C^ډ {gC۠H-g<ٲ{&mw3*RĵT$WFTrt],(Q"?8I-]LcwrP"PHY Jc*%qRySRe1q1qtz7z裺CPej FM .̑}~ )(vFMqK!xGyB fGj7u{X\0 qǁ1vA1e|2mjw>FVuPwC)?ݒ..z}utm_gez#C "p>AlI7&u?oyh5OOY[c=IWޖP la.0*vus6/s|?_ۼZ{u\s zYV)#Czic#H儅Gi "["=}9ڢG-\В[nbw5t@Ta)Yboo0>sQܯ@%+zrO% iQ`sT:8uf^ Gaҩ-\ vsfvμ)DyI-0)@*+VAʌ4WEFu^W̐<}ȞZ}HG:OPv k-u;xp?O_Tp5y!=bk\eU LWOVG3Y eVP{H^`( cWV"R11Z3]H-A3#9̶rn&Rzgʿn[Lm):^Uc>}[7zOd605{zkNa=enA93PW#A&5wM~åw1;BxC+Z{f$T&yj`S6,Bc*"nZnD1$[tA. K+q8vxbvWQop.OU#I3q/R2 x0%$}Y2:}$LH"}у7ϼ=cwKs{cgp ~.I5w:Nc}:nttL18D%0)5`XQ';Շ,dWhc bQ*%IQǃ4=aGcψO$p !.A$+2zGx.uI5Ne`>USau{Vbc}3}CY|ݮjXp+{kK2OwVϷoë_r7V )'s@z6Gz?~ɬj!9|chiQ  |ux#pxHH)68v`~Q,1; dVVfH,׏- 0F+oB$ؐ6=(u(*A|7,6i@UO_%Gf(v#ʮϼ]A][8ݚnS;g.a=_8[N?eյ '> ʃ:ᄰpN(*:c* Jr8n4-@Nk#)xgZ Č(/7Nd08!C5-!:1 SHh] HBZi.8`0EJug Ǿ!)FI/??23?ș&wh4-^?繶5㿙? &/q-X-o@$uɬ2+1.L * *Ǩҿ\Vs;ǻGC@\4~lKۣ6o[UO,?;%W0 =}O hy/,5EܿdL(TE%UP9rZ +"G>U.!~3|0w>._}2=țӳ& 㺸a?凇o' R;|Gߟ0&0ߖ7&͝~1GxNL6?__ޮ3D0 x|{vOL c bg~3cUz"}XH +$f[~.]8בzPYpDQDfv? s?(˰T\Z5]x\R al\:Qc,)DL ;UY$ dxn &Wɏˁ@FgK؝v佔$NUHOc#IK4H@<{Fyk%,-hy[:|yct*2Bx0Nk}@0!qQxBو)| 1 dzhcc7;Mi[v!Z߲zVt6WZG{jXy< zFLJz#[6' % ¾?ha\2ƎSƮj;i4W #:׆Ni]2^(%t]0E=5 Ep T B5hX!ÇzFn %&z)P0/IΡ mմe694rxIr8ɤp Њ: %}ɹ DN5$eхӫ`N# %^kR->߯oo\KffÆ6A'C*T:n/QO2mT[1iS11ݻ6Idt5E+PB~nu&hd1"=QB$*(a` -gCꜭW.HYzbLjz3/ ^K4?e2GVl~&袡'w'BMZ/[S{IZ8VZ-g w{ po%d8RbFۀ8:$`dUpq2ӠԆ!w`B W4 $$U=@JZ[XH(4[/gq:k˸dun|N0qawmq%7~YC,Ū2{ؼdaգD %G>Ӄxj[Xa {:ȳ9]n5~CCԼȐEz:/}l˷ Qx.< *Z7EzcK Q.{B+{n -914hQQYdbX P:?>=q#KkԓԨ'TCΕ14!1Rg+Q 9'.10!9F6e?~;k&55xPA!f#mRKRZkv/'RO{|;C$;7 !Qtėݏ񦧄 dd7X occ(f7b||UhZ_w[_}{od7ˣ[_Vi ;[uAOjxєm#0昌`7_h|D:&mkK$h}'^$6˯;ީlN1NrW3)>m*Ml[qjy`mqO;J]؏¯4[#ۼ?lh-ЎbaG̜l,=V$m\zYc =$+ hpanc&;tM8lx&ws+ٺB~h7+Thɬ>=N&y v<c1I=󉃫utߥ6nSQ3Ҩ כ o5R.tU7 \{)8 ;Rx hb`ӜZ9Ei0쩱)T-z:ťipWMCNyD]Uε6 em0R;-MmSd9_?!Bo#>ϊr!QA k~clAuF"Uȴmn|vcɩb˦:Sd]P\&`wS߳Qs zFpSAbD,-ѵpn4&Jbwܜ5,d%*٫@ 3JYci 70 j1G2{n̘KLg"dm445\|Kg‘^>fƒ"v(`|3@ɋ3&t;ojÿθk%N9~z(Hg|! -u9VWxF?1I` x z⼆8qnc+yMGMB>WTvk}6(_5]#V2"'o+N<</V_<nIAb2l(s$X4aC*j5jbnY}i`g7zU`7ݰ]>zcFU VV#&g)CUL 2Xϥd <7s<9)"(4YIki &TD969B#ɎFc*: u dJ] @)cX5M72RREPm{_yTY_2\Z9;aJB.vc:ZP-&\R՘s6IIŜN-DNWU>A*.k0SK"#)Ħ~%j^ـJT{]_T<ͧ{7cpoLHZ=F#1;G]<ۙewaΏ]Y$Ń#ohdh۷WǥWG}yl2hdFх 3RZ 7ҁ(lgEBƦuh}Z#,'+gS*cHj(ty{z}!dȔ4Ojjk茝ws]{q:Ҍ`h&fmJ*T欣45^dLW},`i%fcTz~V f,,*A3 |0~P=b,EIr4D@X# 鯫'gk 64XoaMmLOk1Q8Gmޝ &,'d?f={K%w(|kPԌZ+ wK-T[Ld5G)F50 V9(1ڬm)Lo&[~7c:\ú)cWY,7Ȼϋeg/o^/[L/#EMW4:AtG|դ|` Kb]Mg9xQ_u/Zi^ʂ(2,i>D㞨gkHQ,ƒAEoXlJ1;ѽ)T IMɶ#_դ.9S4xd(z5[)4Z@u 3=eQףt@ͣ@kV (QLѱEz>[ZKD2.G zh[ 芢'|Pb檂ޓɒ5/ך<<8&ɪ#*ǂV}!jVO2^ cՅx>V׍YM&6$^rVktgE 5Z jq]*QEhd1"O!3=u1/Q 7^LwHE)UUMDtC 0d.X2RyMtV6A QL3sNjMw5g?~ag+/=9~|~y߾y?GUyS'˦ۻP?!yӥ7M־8rAt[s(0Og% C0kb*w ttŁD\dpl&QM0ϟǪʬ-*gE{Eңiz8K.5~E Kx fo<⻫tY$?|Y$y;v P&OY7YP#v\}{5XO!\Z* Ȑu2U!7^)t#"7bSXq B9[BwIt` ;3~OG`[(6U19 Jb U,m960tGo&gpC~yfJ?j'*|x 0K}؄).~N=Xs8.];Qf;Nz!<^T7/Fu߆Zsw4̷D')msyBGfF'Ƚ <^xOKaaa 9Rcm;!=Cǁ=d],x垜L`bBܠg7CeOp׳pF=Z>,kkwH} FQ$ CzPoh~Po?] ui ȼlWި]Ng"׻U W׵a_?u_z=_O/ޯLcK+DK50J?u~ ٫ޱcݏ,ǥ/zqvۆt>_n{\P(_>HKYUp7fR)?j9EGM펦j89 1y/5cJ3s$WG––sV3߈QCtbuMR﹡$՗6h !E6%]K6r+ؗ*_tS;h$͌FZ; VWϰ^n^v'If~DHUI~f?RPDΐc6_~~խunhP&8v.[xԻ?ҁep#F5U&{3Fd1j:kJLفMj (jqWWǻs11jqx-GulcɽW*JAv䇀Kgөm(7(^Uitz}(g8 *\ήJQęY.@O/.Zmw5T_h)XuMy6ӨëvΎ"q`5rVPLJv hWg Nt9/W)~fni|S$ra\vЏoe_焳ze(DRj9DϮ>hBhЏC}uNNoBo~}eq)Ϛ&~/OZoʔ4.A>cY]CThue]NP%+گr/;g݀=cu&9egQ.4O1 ]p<7ɨeMKVcob߽0ѿ|v\+պ188T01PҝKz t;y_wo>/& [9B[3Dv[E?&Gid!Q\k=.БSL3߇,&p=au%~X= ңMI Y:'uuUo?Q(E;`|oOALQ6h=”gaj?'pPS /L2+A?~Dg14J7`%bb LXؠ+焼C Z[Ru)Ev9J`*}#8쀷x:*ߌ,#rB{6պծ,~W5_XwkNih{!g9`xOiA5UKv#i[f𠁟y]#-9kw~ލwuѬԐ~[.c![Jh[.E:w*&g}KNB 0eZӭ:^#%/,ENx;p "rt)Ԩrim AUi 5Ǽbh"xh5,\WV~\,a4P}q toYSV ^3dڹWg|CrÐ9|,ɦ׎jhikU7PjBi#1fs`%l|zoy;-~?Mgt0Lgt0_U u!e1-6vk۾*9Z諶WO}i|9(A3"3LDٵ<5P*dorwY'詋CV!.N.ց.{x+;Wnвf*J;Un{XUk{}M{O+2zNqِyIb&U-G65MUB5(+5_>YhAF)C56Lv.fVLk zH9P9j!):R(9׬' $ u>؃r,_4 u ׊< Ɛ1B8:C Ia_I p=@n ȚeZ[ ǥ񉟔+q49etD\B@++[$ʺk!}>vd\~u﵇Ű:KCVQcug|}ze`|S3VCnf/blq@ ZnM-PAWW ݂"_g,~k[F l} JTDh;Nm ;?Lw 10@1 ZNcsF]oĴK}"]NeK>.G~blIEQQK/; }V`F֓V`ȏCu?#ncr¿t7lnЏoІrCvz-RvVjyY=M ezV?xwyDz=hM876vu Xx"l5F 켱(kf(֥WtsV"3h6?>v;?ǟL PeURl/T1 @OQ/+4i3œF4`PmD%ߋ͘sI|d_<^"<&iYfŷn;g}Fd1Q]ӻ}w~/mF)\W\:y)J3_p=_XscW_sB]]E횆;zM\=jB [){K5pŽa{R"B;{B:<[^ d,bU2s^YLAY&b)BS5( h݈nZ[hG vVȲ;ߪ&Q;W? a < a0&H4WMhہh$q̮$UHSkմkn;Aclթ/J:gcV3]2!3DG_4`qoSoդ{۶zںn@rԳP遷 z&t}.UMtUQ>2n^mJ%zUUs8QQޓ|;嵿7HVY2plb?}+v)vUk(RR. t#Q.`06ffυg;Kcل2Vo!lIM/j]~=}; ,\gyV4A?3r]ÁüGԨ%ð)k C6LO\! g ~wCWwa.Y%/-颜5{FgUw[?sJɣ{7ӆph?>cI'z8oe/߿?֨ZA/vKy/}:6wY!ï w K📳eY讋Ԃ(7D+J[N_l$9Gr"9L'⬳[Vv/]Aɾg8+_5ٺ$O29x_ځgOO^J:eG4dFn?R&,&x7ڑA2n"Itd&RҹIB\wJZld90jnLa).!bJ-dIz J!uף1OSA7ޱAiCx b1ܜ{&dmTXpEL[|wl8&ӖQsEbWfn4>U+ Js8ۡ+:{N^9p&ѳ]?dnHv!UBUUUc2=WM۴ΐ5 2S-2kНiv|iJo®[V*e+]jkX1m%mӓs)6.EJu"ij0h4`XI<١LgXPi@k@H;~ j[;_W)/P[%Qc{ ~El9GL`zA\ *h$̛8k͆DSa{G$7,5{I-lUUݐR*hun.[NԝZ#XM`㯏a%q7<ǡDAPi!F8?Xq8dS'rrXM@w``+NYAtN)ޔJREpւodݜ}6,pK2N7"KݓH8{~@C&Pb` p^Gv4S]<}s':= dͽl<:8n d{ZUܷV剅E@@@!L}OcP;)jvQ+ޱp[UX۰iA3 (͂V03m ߷ ;6ЈgmYx-hXd)e-.*97f6#!#cmSc"LJ̔-.&i`) qn}TS6 S:ʖnC? 108Vmg ZZӓiZUKou[]LFik0VխR_nn <?$GR=|qpooǺ" !C[{z wշmc=3joЬQB}f%>W+pcu-X#31!Y`3Z%+Jh,%`G4[~~jd=bXVջמ9yX@'G{5U0=IIVQ.mbٓ_Q_*Oan&Yr# [ƯS\ s͔{W jVfy~_,HU>:ʄ##ǡĪE Tu~Ƴ)W是ZV!Jֲr7ݗe+cYyVN1CBw5푁zW19(qq3M8uqĘ?;mR8^)歜Wv^:x4;;8]&ů~+!J5Y֥7n p3Ʒxǭ16I38UU0 P`ՆsE#Aلգ|M|hK&o`q`p[*.YQ6ɮQ&ϼ | <KLg!M3S*6;jNU}!޻kJS]9Zs(I ;6=~ixO v.˲Kq =Հs t,T,{˽XASq6D+346Ei6!jԾ ێj0(Q7ssGiO0DC?#L#nl+m+2-X.zcXYmr @ڪԀ"3O`iꋒh/?zFYP?L\BruRy,Ѭj[;@5udkkֺm"hx9-SGå~Lʬx?-2`g$M8i26tP[@6eWGAUalG)\<_:#ؤ_+5J;+l`pvB' Rɹ45ff ( GUJf,c7us llL+ZD,;o @ߓrtzSJ}l+-A?>~FJiCORZ}qi?l9 o8pUjeF{eX  V-fuIr6^r^r{@F]r-8l]qzrZ̀pC6rk{=xF|].\huU/r_ pGe4=XIBhWw`0YM`8M0b82΄8U ޵6#bU9=  е9$;_Ev,DNFw"KYL&Y'A0r1?Q`ةwGa^GR.*k3,LR2V"S,ω,UX]}uy٠(J-snj3>,s*rc!-c'RH5E_WȤT<5hr LMqTX1r%O\=y)mnibiZ ?M(\tsaQJ4>A#Sw) %ሢD+"/%m6*/ZȼP¼@CivLT:WΙP7JH@ )˂PY*,e#+ tY4ryNeC~')_¢yD%,_bb"R\+;3}k ʶٹ{"KX ]co!̑ )گEwMaER1SsAJXt#Z_.aRArɱ"N7ÞvT{Y|K,wZ?U4Zib)IJԍ fVN7N粕O9n˃y]V6lQe+`l vÇp Te+w|ADD1k1jYl,@tYޓ1EN{uuUErGMoWaX&/7nGW.#T :y~ߕGVj @/7?F?yAfklj/mV`Mf6Kk/_Y$gR׊Ȃ親2Ve0ȹdJRy3 u3j~oA3K1L`BP5O3jn=bu5[Tch8|nt.ҥ iʊF iabbPPM^ rZS$\5rTiE0zMM}m9V {FP|G(¹vv2qfrĿs60i3ak( Ei µsiO)1w4JBQRロ|I͘)oM}:G96q9Oƪe&|a:f- ":`kuwI2ɄH wZGB`O'C%IpR&O "8)%$ $!AT;I`*Ol%IrZI )G!$"nfY%WYv)Iuʚ4#Yt>am'è]F h.H+H[G>?4+RB{D):t{Qoh9`SMS{( DrP'?⃣0㎷n>v $>2%D]O3:!vAt̝G(sZNe:NfV=@pq< |ъQ>'D1ҁ4NP}&:3z+yRN,_c㤔cK}ƯM/J,L; ^k2?{O 쭿Q9*GI?GW*i*! fW)2ձ,4]O:c.2jz@PP[;c ?QETKPTG 6cUTkr8QCL[\P!T^]cjzv~VMcDʘj#lqc?wSǓu"]Oob)nV"|z{ypb6a`ak͘geb۽ZPQmSȀF_2]~o ,/G_?j]wLLڂh^R Z1g&BVO{H&8_!| Tig`Y=X~?az1kon їO{zXhu~SgrQ\חţ5ijf6k+sRژpa)c/?:&77.šNPd;a_n-+O|~QOB!p ٕB+ys wG`t?Un&UdXYs`RxXڜtXBVlU'xj>h1=zrBtO2Eo9s"jq8!Izznb7(Gzx䴲#H=Pf,÷3B Nq ؝xy $Sj>-6}3Dn5OO>p6h&u.7SXIʠRl'HعS ($)ѤaSMIKQ )1HJG9E_=+WK Ҭ)khl,%iEY%4ȋrl&c h>#EMcZ Շ/!m6dN42wӒ s?3?5ާ|  Z0WDlwJP&\KMa>ND@wvwQzl =S V8]J%PT2Y]36Sy]mp& I jr?S=`PORT:Srpϊ^HHz9 -TBrz'pm`N4(8 y޾}8b5@>t M7ipBɜAKgzSKR 4J6z4N]RqzӍ.[^CP(zn>I-NtސM 9$]yP21$~>ȡlZƦpXLzs$,1sze1b)]oaiS `cfBJK!OKΟoČ@p/v3h<=2?$aȡ&q7RmG$)9d;-C dPyƠf’My>*o hZ8K b<~F1:A 6Gy}8m' 'kx)[5S鱓uC .O{'d ~h`W?^m@f8  @4^m@ILN$O>L%˼6 ԑ8/ qf$n]3ik\LAB"ܛ>\91Ӧ8+th^%;M$S`O%YE 1C=6TEX@VA}2Ir <9i{@5eG@V+Z]Nϫ4 ń9< HHuIժ? bf.3FL-&Uz4?ϜC,VWE'@<o/pN\=5B)a4d-e3\P\J$XB(*N2uJ U4gSX;Xm!Rv)e, o‰$oY8gDҮ,~UI W5ب9)~b΀'f^4Rޛ2)%ΠJiڇf] Dz{f'ͼ hG3x(djfs%n9eːwVE*q,2"wg>ni0A"8rl$"9H Mr')"ͧ|$< ${%@,$=3x 2G')gȱȱ."v $>k3xNwW5AT`eM ~|p5‡?{'^mhvhep)9'9=IǀYʤ`>[P<i2xc/tȳ xZĂ-Ɔt*OWLU󓠺Yt}R:+NBjHAbfQw*eq 34 $A>Fz9^j- JKVνT{w~~)m֢PCMLt: ^Vwznq%|CBťψ54.d^20 1[¹tZwv|cgd;=^^rhB!;vKiG!8hNޝ`V򙮁fTET3J*xBVpMyu1h+{ ';1AbM=T#^8ށە2Y?.N{/}4̵1`>W 4{@E0ͻ}4h>$;icTRu"0+QNa?@)< o{Nӫ'hPI}^x G</D8R&9\ke\ZCri m(md3Z1Y̵`@QLj#(T!b*()uXJgE'{ďݥ,@J']̛,4&=׳ۇ˧+3e]Vw??pVzuOj_Vl5퍙{_0 T m/= E&Mku5/6C*;kRP eF\d,Gm ZJMR;? xϗOO|XJG*T%jR\R^%$ ƜHRJ"Jl%T{D \9V~sM&wQ[q6xP(턮(Hb/Jp:S KqNm5칟d`L gc-J5`΄Ȩ?s}75 MvMmnO$A(Νk|8*u+YQj /+!־~}W|b|v G2 +JʲZܷx m4nP4AM':*r):B5BSes75 6o}ZKýi\绒mF:1#Y'kJ-뵬jɍ|t}nz"5zƒPcM;X_o}iyAOCeh}i lrw(D{@ Zt2]kJ팵\t%lZ=~~_-:pEIWjt9)U$TST|@}/ޠ(\LІO#fjA03dr4=F~zNjF(Ѷ[N$(uy~w.QS.-:J'zw<5eь7DHɆ8`둜KO dHg|NXTP;ϴbPKd {+۩7 y* F]mݪ+#чLo_?~h'~Θ03%F9fF>iifY/]厯6_PPRyֻ;B$>M;K\x-.q!PV k+Zj[N% a]5.TDmUFc%m8@@4Ȯ:WƭV.fؾh-iOnTʽ%KlQ W2h5iyTٗC%FA;Tt2v]uydC_M5W#cs!RQ!V ɛX5m`dBƮx'42ptH{D\'M͞>We,R3jFk$_j] y_2bQ70P67((͝\IQZ$O2N4O7=a0&V'0.}]#ZqCB#,(]yK-h|Z``p~aJ *Vʓ8XW IbӍ#N;ò:WԁPI>uM* ǚkT-үHuϐ֨cZ**R\ wY ]\9nr's% CQwHōQ&IsWs|DIX5b?|5#UNbWo\z/\9s¤b7JUUA3Qɜ.5JjE{QTV/Y-Vtؓ9WeTY||+kɗ|L3`QhjV\xZӂذCg(sדQ.6Bh1%5,rQ0Õܰac,fcvfFW&:mn<1ǀpM븲nE !mVl[.O*1 nhEţi6;GܱOo4jO=>aC |\WzQQA:?sM*/TWMLÕo3 ^vS r'=aJ?/9x-SWl8M$ T-Cj`(Fڲ%[]䠨s˧*Wrio[ҜDZG-Z 9nS-ReL8M'&Z7*tB00ڳ]< !ȿ"jya`cV֡dlyU{c:ѵ}? i=wtMѩA~h l;o:w+tti+C U46NO.FKґ߰(7wX$QO-1^ek#yWʖF8 L˴8{6gE EEz"UK1%(mS8zka\JZm: ψ]lzh5^׏?=goO _hpV)7v $ 3RnK*bD)sJl rX<,DZc];4]+dɄCTp k>OIW 0syZцCJ$Z3->~u,DAֽ^u>VfD_fgJ@j#( yJW=QnBV R%$6skSeǤ Wm1ʇgګJϣ5zA-]ϯ-a97-u=",\=+F*T>G4$+>#Ņ},Ix&ҊuJ(CrVпx/*)^&X9I5$SthsNtL >Jp!?gR8AUSDWHL ajHP zF?Zqwt֭2^Ҩ]/ߛ@߿cRnmT;On=ˀN(HXNujriVU.p*sķ ]*5ՂH1)C5q*;W$hdS͏V"E voL!n~T";uV,m(;v>Mqιxe;tl5.סc2}QO-{oQ_ !n>}s=7).@H’̛TY#$aAQߔFzA٤DZc([U=f(0"})P_+\ml[XJ:@u!6jͫ$ka] QJ JewneVqr+lc|b"!icI܆e%$m] ad9QFe?nǼ brfɝ?9rR\Lm& k6As!) 26$HHJ?s JY\WS_M~5ElW*f(j3krں5ƩamL(;!/ݕA_nPiGlWsڔEb&\UZ Ult7l(<WWepw@|{TZe%68;`CJ%7"E`\Ub+)l%lIcDsJ>~2M/4ݾyů?})÷?ߗZ-8|.wh@ns{o>A_7~p7>=ps/? _047_w7ݟm >Ci_ߦ&Vݗq~N6J;9捷Ow:Ն_zp;CT܉߻hac{,Zá߻bF)ňwI3 %KƋ>;)frR?^Z-V0)@L\rjfƨ s8MUu"\.By5X}]N@`ӷu;鞛B:L z2qprt] H% |Ÿx:XȜx36gsLw(uH#!ERa`w+N\̳AuWCJFxu. #x!3֠@GJdDHZg4H_2B Rđ-O"n CNZehJi%#T l)%#/ĝdLB\RtkAӰm_2B ʞ֠=A~ˏ˹7X\}.35x]a;Rȷ *%懐Yki@m1Whtw8pߺDy 7YdzE|ҧ(JzM_C5c;bU}:bqN׻t畅gHs,`7}&?e7vhir4LCi)İ;~|dxr߲-x'4Vs]٧NJjj4͐,}cu oSilg nh1LRh{ SMSy',R XMXLEˬ޾,DUr$.pο/ ƹ`݁Vy^ YxS2(P8vdr@ ʸ*1[q2+0LvlX$LU5@FV^8ף`Awt(M h gd2q21j6dީr?QHF+u -{8$ cT@.]|7M!7CAޒߡ۝\.kL"p4 E,%(cN*~<Yףy!'?h]x⨋lӶ[=~7B Ib&d5&Y-, Yǜ*7p۸h[z9 ʷlۋ2)(Kܯ,Ax17[ԽՕ'IKEbړT^lَ Wξ̌gKRY jUs ˪HBPB0N ;4@9A"5%d玱PqkGKtA3Ji`B頂LD ^ilNr&QU2qKeQmWʂ%]J~"/u`%85+] vɤBk0,F=>ȔQʔJؘr0.J7/{sslQ:qKN% O(acύмv!wYKfV"VY j͸4I"A<҄M;'h j+'K-iLnMI> diS:! "/Eօ#T"%,0F)28 s勃ՎZ ^5KJj[{$d\ Sruj|5+k=uFĹ?pə ދɗPТ6xpW4ppFE&$PB-d5~\<\-ZD"E&ffUO/ x#j\`u (I͕ r*tR,xÙhs#E(}5,,rl,k>B ,QlB< 5 ܨ"g!Xii#Y0Bo# &)r҂/zcNj<- FPYK^|c7a='Jb tR, xEj$@BlKa ,t l7kYKFNF d)e#kE,z@ҵ-x7;`N@I#!a o_0B?o܁ b2vRcm3|UYkuHAz0WB,C~N{Zےj6x&XL\ppZ}|5 fj/ukI F0lQ"hB{[2B֎ )WL8!m5`xd[p:*qBAZ6cIN2Nos g} 0ӣp#+nFe'RCtoL]g%ϗ]Ն]-ug㴼=ۙ`&ضfx% T8ߡ;Y,=F Atp&:㦘{# hTpi@8'|Om8-(Ae{ xNHߟ ¼T>\qݗE$ÿG^D5v,+|[ kTs@Ŏ?!V]EC4>c5cwD)|QW%,묦ީ2 ߨ &V+x0Kc2FhqB)] N=mm "<PZ*JFض Ս斝㖨ø3\Cu 5@ Ô%a8`o?uw7l&spBc&`zmor9.0CL-]+썱g"˛7ifa :q?E;;:3Ա.q~凋ג@|-\\-{/ǣL_xt<]8b'd'Qi֯ZC\޳(^1o_;@Qun~zp=LSJ}˕JʹlI7[|fp5_nqWDeܙ9%;22"7"x۵vVOƵF0Wyf>36Ɋl:Gi[%"u` މvqҪu;Q`z!RJ-x>+$Oޕl&A~8Vu4dʣWƴ 3XrWF0Z}u4Bݙs|RcRc|Uya[n,xH)mO3SՉqXS Izet:!g8OJ.Q`C!R`(J~ʅgD Kuim.ChLH֍nk F mn|~r4<{ড়gL" ?$kgza?ޮGX}wuOaX,fʂZ6|`|o%f\Ռ.igJeC"J'WN:v+c&uѺuAt}.m^ Ta1u ԺА?>[]/(˄!m4N.Y}BzLLjM:*1?{Fnr١~J_q%[mʅbLS IyI~/ f#y4Πht7%֚ k<9ZKr"D1Ns9_Lj8ʉܯӓ;*:l'ő1ֺ?$Gv2B`Z)3%AThb0ےZKZ o($$ ^N!\#c0et[b2{(PJo|-E^hwRSYRj4NOFaj\B4,'Ryv zߢKwy.֓e=j LsA b[t]iuw0ᔀ(GxHFpއzng4 jl겈O>ud3C4/Zxn4K"<Pv+)f蠲߿=Zv/?z> G uٿW}B~8 _^ykwX뇇t2xs?Ewo;SXim%~nͻf3`""=|"$/_P"_R~ <c %#|aa] ۩U3/?^lA7dزmt]/j~=W_E]ߝC<ǒ7L|\9cpc1P)Jods?ň?{^T9˜̟wf_+m+=s;qbU nf_wBj< 7$~aywo7ГR\Z5]|ƃmQ3 tdq|am>wUJVϛGUs,{'dcn/S BYZ-ڑ|z?#F h2sǥak#$"i0[Ť޴{POTYltVfbfyFxyJw5Ϋ,LΎ+żqW+Z"m%)6Bp״T_6XNjQ\'EޜL^q1u+Ţ,'~|tu:K:ni_gIG%] ,D9s˩&Hlwp$6 3!fIGbC# qJĥ_'1":?J>j5l'CVgfLbp$OHa9A .1ZI99Ю^Gp1\Ƚ*BvG"G#AY 'tZh;_./g]2np{2?*0'~|Ko`Õ=|3 SbZ~/_/2P2G/d6Ya.A|Z_ǔ+J{2 D婮 :PqV1o}6O ?pcH8~oK}ݕ3*vu}uhwv ?ѿ( M. u[B/9$P: C.,1VEtjF\;Vsx:0] "I?@IHB,VѩTٗ:T-&mq?|,>ٯ :cS 3 ~& $g6{3d_;G`FP- ")(`aP"uYvPcFvօ"/d!07Ӌ餈soF+44~?" #`8 Y3.rs-l!a{?kl e]]25 7HēD0)-C\/qKϠf'K `cxրl/dK^+~V%a.??kX"0%30~ ra,=a|;iCr /5G҉ǐKȌz$0 KB&>Ӧh7aj]'{FtJNj_7':/lj!La"|C}"瓻B!R:!q\1PڒA bC0tYJ]jѥBC0cb@ 77U!+7/z3Z@F~y3Jz[V-on~2䴥Sy2}$5F4eTL"YM $jJX*5bm Kh `[ %qG4 _9F`ULjrǂbއu*RK^K {%HCP"w ~A@BA{CN'XKRC#,RP<R|Qv i =Z/BW"4#c>fr ;//Ш-@WQG't5.V+4\ NC?ohRc}BpY 7r\_ p-^!Pb[.uQzdʋiC(+RۖzTyVq *&+CV`Wwwa\QڏΡT:Z QJ?3EP0/;nm>c=q]$BǝiӘfN\4/6*ci@.no<_ mG"G &r-?cB fvwO.J3ΐ`mPL{E0[6wGYOg"ҷ㩥0[ ql.?F ]霘X,4sp2/TTk]9P-0/>kg) ᗯo4bDAJ'P҃JIcgRn RVWOaA;'Z&vHb{<LqJݬ$~4`CC99=xsfYg<X9 dA4J8G ALR‽^ pdBC䏼ct$'݆U<FzRo_d=}-:M0_aH-4hV ⠵a/\gq0φ1RqNa GPA~$J0jOIM1s#$Rp\: + 01Mq" 44@1\7ys# 2PPK N)qA&&سgq'L,å WSi1]NIm A-)ePII^‚^!RP$@Ht$9qFvǔs,*Ƚmݫf) J݃!bB"kRv>9v*dуVVۛVyGiEmTn.7e=u}YS_Q˞t5l~ROS6}W˛q9/֞Zl9" Z!*4sdʛo3.^91NdtN=EŝDNg%O&3-k W>)BtH`w:'<>!ETіh"mﱬOzDYZWg:Ʉ3{Wv1Z"yIkUB" ?R?ځ虾yU VvDTSVO.w&E|:I~z;Gx2<a#n6"J}dYpQY# b26ˣ"H"p- }MGt3(DӪ| RSZ&*K')R:Kb#@&8\NIINIt&jwzŔ^󆸭YL"tJN'r-LeT0y:3<Ȃri`iܻFcHWˆHlGN4a~QꓢXuӧ6rlܥ5)OGReJoߒU2pL&$K %mR>e&,ذOFpۘtpz;CtEqQ.vveMl81"gfԭ~DZX R0eX:Q>U맴ȡQգx䆥&ik;Ha#cfll/OQ$fŴJyk)ٖEm6NI%O#uy%i!α xbW1J;ct ⿔EBS!K=bȀWZZ,!PX6lgz ;P^ͭ xl &P xPnR9!RK-LA@0톷a]jq a-;؄Ui䙼Ϥ( 2E}\|=x*ǙU]yc۔"^Q(eRíaDFvd鸸10680lTXCFkKgag&qތyo9b9\Wx^$1Z*GAgiO\)cEj.ioP:?) 孕M' /[z);i`tk>06 8aec>?dK*4'aIp*Bs4$kÍN=֟]{~3ptx|{S}ҝ 7۟2yf;R$#V6,*s|,* W~* }TAJBɈ%D*'uI%e#fi8U<%9V+I7+6D}A'C rå7>yLv&,ut DGߛJ݁#bD9b1Pgj͸sKJ F0()7!N3R+-1 t` ĥڧ?*?oKYϺ|5U.-|6SB_J6$3#Z ȱnsNimں5hukc|C1 F;<ɮH$1B0]Ioli;>w2fܾ"S.*w~!`ge\/-SzKzxW\/E󞺛QE2zޙn9HR$-jzOtxm%dhT!h{wVH޵Wy[i:Â"}-GIn_gHK,^*.Ы`j2 5Xm:ƽ[LіӲ^NiaWնJ).]rn/ϐP ]pnE: s#%a \wvcrĻ^V嫋mt0$ x-+T98/jX߇ܛZm6сeG4XE?{qT}p?MbEk XsPV]jRnb9^"Ɩ\C)3kg(*hlܜxeih$._(7Ȃv]ne5! }9 AJʍZi8r˒I xKbL#J0G3RB@pRj2M9\!(4w8 ?,Z`e K<`x<)D#4c4SiA0fD0WV]>ȕ:WLJw1}gx7H/HYq7#˕Ǜh%ʧϯk̪&Ǐ5Ր z/(7}N& OqQ tYggkVw.<&[T  xNO^%d 0!pQ&G4C"6p-BpۚUFW"#acf~dF  yA;X='.&jȮ0 fi7rJ"mXP ZZQ M3RFy"᪗W]uyq%g6x0FxϹŎ |;iMAcE_zKqӛ U7aӛ mŷHpo"oSP? RqEw{ #OV c *<=jյ&Xp<\h%NU8S#s!aiVvct>F Rv DJz?/Rr6zAWҲEfc3lvSkjT8AҎgꚁK1ؖTl[v}E= @)vqgC'!|xbr>Taq cNa'{_MÙcp4|RҤYZ.NrzUӨiYqrFӔWv^Lg+4M3ʨ^N@IUv/_} NfW?@&aYE~5 U>:[>|+_1-177GCBxt/Zr3&iMۣ^*]ق_pqUr )elå o4piR(.Eo,[VeMΈ0Ψ,b؃y u eؒU\Q+k +5lTR:oK]:ŌI>ev"ϷnOGnm\X7BnCBwgs]xC$6i* jWZzPѩ-/z/T zyf ,+ #pB00x[<+؝ij\}U K)_-Kƃqs$D>~BXMi*5) *QXSVE)dDT4zC( *XMӶUv0AedfVZ'Hj8lǗh\ K'0yA'!;_lzуz5'^hг-z!kULJOHE6X[APG'aVHn$=@}c0q"'<:6=݄l*ي<$~㼷{"qçďfq䦫p]aPU%KɎ I{sJa7p%: ;J6ɣjϕ8cIl!^dhQbf"By_%iYzƨd-]pl9NYd Pb[0kW!6Z+ K$#r#`'^klg7/DPq!sW1}d\͐TuƼh(o=T~m _ kY_> –tou7?W۴Fd]}|_5\AЦ 鿬3n:s_VlIYWv T98!mYlЇWP؂Wtj}޹a{^uw;߽"C mח|-ſ||wMϏ۝;"e+j۵H[4ySWdXk- v )}Ν9d=j{e7m]m'x%] _[چtRkx#E]:B8xQj4ݵ;A y%pailWݣ/ ѾtI0lMj^O]T񒸛&7惑Uv׷H3Ɇ &j;7 gԸeA4Gvެc@3Կ3`z=3:?#yC4-o7Op,-o;hy):2le9q5 shya7,-/k<Z A(QS15- N7yOTNdB1{ 4YzbWk Yt9I/J 3@p?.>U[ADzs 6PN/nICzI:~v] À4w?A!ת3$gIJٹ6MC-錸)cw(;p`'_g{O*qDb4m P6M۴D,%pVh<5s neaVJqJLEGVq:Cdxb'%s0h=fځXIAcCA ch!ёzMOzwb:9\uݘEEYnSq䣥}kn bҶ|U!igpiư!Z6m0,0-\ӪqTXծZI%Dnln6cG%m}0S:.wd}5bTɐ;%0f2B)qxIʭ5krPo|XgΛf?k!5}SeMn1k2Qo{qukNkr:x'`M&a =kgMNЀp:dL$堙tiTMR$j #ڈy4:*k2[ḗ}xyErc4{՗ރN}[y3CR&C[xrif2&J^24 aDg 3h9CLv;g3LCdN=w8L3&I3VϢ&;dMag̐+ dWh'󥮚)rhu122kwqSD} =}8D!y-DogLZn v٬3`D;vٙ/0N[ tB̑lP@‘-b BT-Я78WY"+FTg5uEbQ,HR1eS pQH-l/U@`ypF5hZtP-d*D*TUK'װp4XStm0(&i2ky/&#lOrT<ɶ.2LN]7gT ']\#l=Qdq|!?5]8D.O'wǹ Q.w {LO۰9I8gӈv'̧{i i W )9::ʉ,pF'I/=td,E` ਤlr8Ax[sO@?J6U[Đ6<8Ou-q,DhqGJƌw_i *F)"I\c6v28GvBn|"IRwm{Ba4p]i*ؔdE+BWJKF=Fwߋ Ka;jΎk{tr|Oٶ(O|c<ӾǸXV WggºX\(LļXKl{mn瓾#@{.CY,؎X Kޒ,j͵~J;Slᦳn:[BT `ZW8kj0mQ=5؀,\RH m]jk{O~(TGd)µeq2'nJB"AVBd7$%_KbeC6`4p/jD,*#Њ)YAM!`B My*W'O1N5a'OuB:73Om~&5#:GֻSa!2޹S dэ6po=> Ax~m{ȅ[ߣʢ v7)6OʒTw݄ȊNO+֕OwȊf9^;73s?"͑4dn'ca2b.3T^IJM,>}hs-Q˶Xx"RrGX K)wr<cDFPA,uUSL&urYHdm=53?IMRqdK#0A]F 9U/VI *nyGB"$]seW#vQ'LA8~3D.bDJ;yTX)ByI+;1AřbsGDέ0vm|Y7t\U#~27u𿟪7&} ->l+ yrJT>Yf4ŀ_ԿJσ#la;V"-V "LZ uHKVA8}s!_a~t37̿>²`4ۺ5%UEmrT_.5pvĮer?7?"0_[ꧏptKX+chF:>ײc) ڠ|ju皟K5ڦm+ZA n5:JUQUZ+ތZ{rیMw50K:s ݇~z_ߗ7|4lޑH]n,7A/iL7~q`Fc^<rlm]o2KKN&dIn9+'H8睍RU EP*ڔREYF':B$.HUm[@NJ.VJ^Ƃd%~P*cbCm(@9ņ&QBR>ºP)7fh}'\HNK F N f(bVi|Bj\0R!z=>KJC~9ywɃ.P2ڼzc ρc_ t0G9U۪ܿp+"€۾90KV]8_~.gIp O0Y /%L6VQe2:ߡ,la+ϜpAN8ŬMg*PHGm]|SK:GuEy,9U$Ž=ur[ʍ* _ffzYխMR=*n{3w,PTo%p1, NxŷzXY9Yj"xrBRzVRMbL]PLHz S-(uxQ,ʧqM,`@͡*gb53UztjD0W^6=}ۓi ML RY鵑CsSL +OL`O(M?_DcOM p"3`,?EC_A!H? DoMѰ }Bd<8)v#AdLcw&.WILQGp[dI*|c~zrC0M+-Oq`EYy9 Ӣ.s9Vp9H&[rLOk\!W<g!c6itJ"d͋bMіӭ(>]xN }PXb  |=̖=mey'ueU0aCe"RN=XfTPfh3!ӥl!M,,Ptl=FUϞ⨷t_;\pmõ1% A\[+)V!ΡRFU$EED|?2$kooڌ6%a Kg\O Z.jׅ?eSժMRUX6oGv~ȃ1JBa{={8kt3D >LQJ[=w4zTHpjL7V:y#J(bK'٘ ܻl,%Q6'\}lUd+ec\w<&nQ,)NN8W\=xF҆Z}i6DAeBfi49ǝ|D@C 89z_(+m9CW28R%)C3ܰRO\r&Jq(]5A ?|E|zQsGgx>=Q2NCjPr x<91$dZytr<7#d} :}Qrp&}U(Tq$>fژ#Ya><ߎFGݢ`'%ꇷv}'1>ҖF !t*9Ǜc=d Ư'=|{"9voNR2WwxfǞ 3 ?6OxN*[{+DQES̿y+'ox1*3Q}%rO?IWz{$wDh{QsrG'eO'NDQ09B<-zrRN6^Yk6_˹XܙꭼkUmq7ziՑŃ<@#хCd.vHd01ӎ!^`j~՞6GFa}g(!D^*6_e׷N0h0A!Z->!'ni`o@w9KNIl2O` щz"\?g[ï=]r.o/Ktyu)ϳǛo7dW7]ӵy j2 BOv^:>ͦt>y~{4i7:l|Bd>+Vf\{VƇ"|<|v2>ZQ@`k$T1N7Lȧ 8`L)h|)-RJE)MAYip%]S[Ny' s4Eb4SKv?CZ'H">'"3zwiy k&M_/2C-IQB@ D&P9^@"hRZ~Kdc"zSlЫb}hv;jNn|. SXnBRɪQmKIVDMY.JE+Z-n %bQUe:}$c%(RR1&2,tPd̔Pma[ `&#!uiZMFMBQ3)Ƅnj2ͽ^tU\=8@I0td'azkD0n"ZR`M JZլZjN/u/Zum\_ϧ+=T?_?]5 WKu"lF.oT]h͏?,3@~{Omkt`hۮ<Ճ}u׽yZ_l~ m MfRr7UTPՔe$9#ym ٷ:-<{`)hYM*q=[DNf>-D1=;_\좿 P\)92tyUG=-0¼Y.حCsqse=BX{QIw\ ߼}hO4y]iĻo_ n+Zv$!]Xm) + .^|Qk_\=˫Ey2GJVF\}6|\= 4A_ywo/ Ų+ ӧ^ac~$I0,D.}+s~_mg$XQ9%vӥ!vpS9(W.^L%9$£n'9:X: Ri4i(%&;w62m!SOz=P=&2xvQ&duQ4hUj4n*L$<6ΰXQ^Lq%gPlVTfТb٩/zjӬe &8/_@<Θ5$U{W۟ZoLBOQ&I6)EdDJ h=PH]W N1#LjܤSlbb;&$WC =rk|.cQ#O.ʘ~hЧ8KDӧȰ+9~\%;3-3 X쀘9C2)`*28d );@ SO+q2TPvScin}{fnp9}ZMg <5%Ӓ0- 9;j#l@Oma|ipmõצ;\æ4/(*VJKYMi[SET 5PטdE M-k0\~V|eÜr@=Qp`3(HqU˚BDuCd XGRuCrlB~$gƵ%#uyp p6]]< C*X !eսOw.sÞ %,uQ' syc=3A5wăY*S?B"Yu-/ $y&N)[z9# +"Xt!kSS?-al+GGjՑ'G͑%No=,].!zrswZ2FR 8gBg3ö`fsAT)J/r 6MM0xۣe 0I3@*/w0Qf00sҁX 'tG2;̺#>lF)djeR,%ɸ)" C< ƒgY(9rT|qCSyPE^,)N2z*OT_d ~>č')Q}ypפgQ#d- G/ K/9{-Zn)đ˸ՙdzWz#sx8Wѻga) )xw(6]'4FzqezǝJ2B!atJ9Xm\HA %d#dU,W0';8f)lD\گG9A:dg:IҖM 8G_c~khIgMY)(AS_9FY $Zuplj HeѠxl|~3*|c}vq#7F+ >x H$ͱVZb+1tRa5e9)zBi"ќ)c c3VÚҳ+B”y$"4!96?SڝTȧ>挎,r3?C=8\ T!XZZck!9KG6tkO]׭ׄ N"QRT5h8$Sڲ(59ȱ}?><恐xEL◦]6/!ĺ??YY7?}"cwh}LeL5 we-Ua]s"0oE*͡fMHZYR%-P7lDž6@UuSU j.edݻڷ,5@gVJNvJ:vf(JP !2SAּr((I{U Ø5oA(_HvGu(E:ozsޗ VJ]bS"ٿCm_jV|Gn.%xUxx9Qj<k kuT&Uc;n^@#Iw10үgrڈ>+h+E;Z cSYWK+s`૵/fKKg҂&+`ՑqaڽXv$ %r<,X3Ets<"oe:V:`,De[fFL)N{岮3A}6bT_zIC,C(:`z/qv[,hn sCzz_f\cXPH*cM>#C/doGpkչ HpUb5j960J$n &E|gN~UG'yYbo7IA/; L)ã [S}K,/i5*5 w4^kr1="k6ZowP8C<_,\,yR)_y 1(wg, g-B2մC)5k@gc86K6lb;kqIQXxi E풢u% !O?\~;N;7M;I)V#'}pKU:'&qEB\U1)"Uu>+zz4vW{_]Ͼ{YkX7㥭n?i+)f%+GkA'pemVK#1? o7>!crt'S6Eb c ("Ձ6Dt <7QG~PW>9&o&AZ{UZθ BP-r{gfN0\[D?[֊S)PiG:@]lpF=pe%H (3)D8R.K9 8%֜r+dUeed*alQJܕNsd71 tl[ᗎfo&n@7nN_ֹG~ytM޴"nG[^Usr($GኊHY;̀;  *$dhJ7ZV8P(GqB).{1$mF]|/fD*0smlO:QL1i>nsс, `jh=Ҩ)V `za[xɅocƢxP,Ѫl'NjN媇,[< | >'Ԝ[;҃2Ys4%XҺћKl?g9kP b54G11gUl{OҴl Qt݄~!f@n%ЕxvM"fHFI ffal׿F J%NШ׹j=ȴ8.7=KbNY}fߝ!f`9Rp,1+,9ӓX^>wk%@s%ʌL戁F;-{L#<"gm\ t=Jr6i#c+:mGAdž~$a<=:Vw҃QbAr,Ic`fs$VS O16Za/VNlCwـw9G{6 V~\@ ~g 5!GV+L*gs:x t I?G/j0̪H>\_/Yc0Q̓mj\,;;#+2ՓjP#eT<9Y=Yb/Q^os5SR,94ū3H5Kz)x([ "@2|u@r!WիӖUt^s֦T.wt0P(y p5-؈Мx)3[r^ +Z!+4\a@M\"v}{J6G)]8z dQ%$̱G1&$*HPׇY<y4o LtY:By)`8w+U4 O~%f)6O6 в/ʓPFC߮6O߅F.ߝ\om!p3L{{{xO) e]j3&iݒ,~r;2$0[Rw~1ft%öa SVB3іBjXiy%>(K^p1V{R窃ak8zqL1K}疢}ͳ8nw*=J4M)Tѕ' |]6n邉R9D -MJ2tipSf ცPH_ J^[}=%UwⵚG KGD1ɏkw.ǿ⺥NGvE!N=tc6iW{7+u 'm40+Y9'pemVVOgXv˧kr/on]E\SÅ;yKsy8f..fߍ+L]bʹ`V\6؝Sz/Pmӊqu sUB`%65ߕ{,7(鍠SJ:F6uND14Z G5sq}q|6|Lb)jv/uieI:4obi1c[f?\a-{靷Yqմu IiPϤTꙤQ7̝ 39h!Ӣ3(<$] >b{̃x覼} f *=3's%3wQà)3Y9,)DKZ^S2קvJ w43Jb9٠Ic?:4 ;aYTز YzYY~["wYMp:-O:C߾ZJm>R)īHGAZf=8{)wXH&` /qm-#,roLe+UoKù,䙛hk4_6.،((9c"c3Z.r LsyZF) ݖ`zj=?s${Ld2C̓9Sj<{ȟlP.lP>H_zĖ0kn+A,QɄ9 M06 MîY8Ys8o1G˻'$z02x ul !qUif.b>u*~RFYg7P6v\ s_n4 ;}ZRWf!vp1eq.,Q $sCZlPեsݸZ^|Z Q԰S/HpzHWtGʤ i/]LyZL`]a{bƹu*OCO]#2:FKRiKǕ8YRHs#fLbF/lnRu56tv鱰bNγ.-ZdyW}vu᷏u-0ïc18b̃' Kg^iV+0FP#p% rh32HIcO|6=RɧU< 4g:H;]!H?DJ@h=fnʗi[Xs@yFH.EڎX/a@fxfQ%D%9T5e^R~iegk4xQ!]&3PYb*hZ6CU0#/Q B I-[OS=W_"I=t Z #vQ'ӢgF]"Q5Ҍ7.e7P@fzl*M=cXm5tKsL旇(vqדhilD#? x%;ԋhÅw /ToV͟G?6mi7*;^UxS22L?{Wȑ `wbeg{vŀul]Ke{YKd,h@X<"PZT E)MDRYqff=~vk5dm_~VVtC'çj[5lPŎOavY)lnq=t@31>|p.Ż߅6ݧt)o?v*qbOT䈙 (rڬ"ѶyrH\*tocad<RpimQKsJ^uQVj ZhոGO O!8Hz1mWxzAY_?>͑1&E9x.ơ u-_`o3@2̚!( ߿bo;aT{B~ßok|u3!j!͞g\Hnoo}Y}9GD-͹0b5/t B P i2KGIjۨ)ӺOLm[e6uFsNdyC(:TnEb/}=-/B!$`/I+vqz9kfJ5!+S*Yy4(Ur,ԍu@c  ֜9SiZZ*̛0ȐҠGs*RFRʁMv(RJ-j2'5 . mH둵T8oҁ 9|5P䪒kmQ@:Yo345_1m \qgrK=qS{K uQB~h/}{.c^h/u#F1jzt\qact# 4ᢍ}0š..>PEH{(3\FݚP't5:["r6~9OXAi9ˏ)N8P?:-|Voz<,BY!!lgk#"$pgk[Ι }tm周/}&nUr)n8eO?u_,7,XIuo}`E-~S6hZ&ۻ0P}9߂fWZGN1ͅ'dnx6+WO@gy}5TF HTRa4Hƌouw cwδ_5 ' cbwkRuzҥ7KCGCW8W;O7K0 yD+J#x+>b" U_-dQ<&nQۿ IIұsmX\c0  K_ :( `l#%pk붌cƱbM|,4EoU_2r$mnĨUNԾӊCeܚ㷱uћ̀mk K ='_Zi{ɗVw!s%9(SY!XS!nLJ] DlYeȻ_&kXz-L%2a-ϸ!iBhʢ(#/zh|c#VGj6= xDqR~̿ņD{S;xnH|io?4F5:9/yi^-RH;Mx{2qʘitp$2|>5uW#?u^֝N$յ HlJlxK91ud[Їj{=UbI.TK4e!i,Z&%rrAYJXT?`v HE8 S|M`j!Ɂ- :EvH#H#,U[4o R,n ~b CJԊK.?;]=50ԱŞ { *c*+]\ Ch$]nD5{ eFHiv6*N: +ߏ\%8NŊyf|u(xΊR~d?j}]klf =׻H+1b*hzg_]9/ďu|{Ga|J S໅]~ 巾3~t.8Syw ,gá=#aouIF Cʱ[|#oi%q2LuJB_9T޻jZKLÔ v"5lSa$H7Sw>mG&`f(Yc*HeEh"-(jl"qq+L [c\pcՌ\qCb@5PaA`Zyrè\N8f<}\==8ѕe51c!>z º9f5D=p44"_O tWe9AjХlo(ѷg*ںVƙ׾m#s=R5j}]ktrN{ υVA+}֗۵M=ƩV "DvoL ˠ'݇#ʒ7Iu)xrRd9}z\S4OyUʆgkd9^2׶9ix[<J6řŃRk7nD')Rbݕ4BD+rAIvh`= mYeGwG&*, %]F{2*9fAr+L}Kg1:;sYշ!=~&~7 xD.jĺQQ\5̯X=wc>-9<e,/}HC4,q}J3 Hifʳ#dKj41 4t,43u11,Pq=Bɩl#?d$Qi[L9D NۭyCL j8i5wh41,A + )EE6QU׹hVT ^h7fB@!q3PG"T4xYZ?{?Щq˃D\itǨ f(P k1BzU#D\*.Dg6P7Ӣ9Z/'E^D=|H/@= $Iͫː>GːDeF/eӗ[`{梁-":ӟj."-RSF-R?:.//*aqC}.`F!b4i7ʣP}_L,=u *+{[~kјO,7~ӷ> Z_%.]_wa:V@e{r]uZ]>͜o6PEC%fu7֍KX!4K $dgRƒr~TJg#Zs݉DlL$6wԠj:?NBںV5n 3x{0mu-")v\Y37ܾz>Ɠk*br>gIA!wW.o~M+y% R@V K 2R ӝţm^Jإ֠fWҎ@pUUyB9XuUAW\@H%H#- K7R4#ϑa3W[@B dI2nWLܷQ_ p2ЦpRdT*RFCu{6u^~~M5E}EL c^:F+jɥQ_\@Qq%)f_;{|'N%&aG%Io"$I*Iq_~vcc[*HЌusJ4S0 u@cw!/ā|ِ\ksY =*BZ;~S}7Yݓ2-v?-U!*16˵f 2nR(]0߹(~kH+Wwˑp$Da14*%s(A]*hRjV LbejS?ֺ"WDs^!HJ. :tk- </ᱥYkP8Jxu*('/"p0.*I]h.فw38{͗ -SsB5zvz5_%] 91CtbA‚M/UQdJ +r[`H9%9ˌ+%xW뽺Vfz[u  F#9H< 'pږ9C[E !/TQb+Sp7|Oڀ>;j-VclWﻠ&gwO GDA%݈Jۢ-0YAOo!SүQFw*saC+Rt#4D &sv'8`tT} :T2:%7ی\-̔z#U_E_n*++gmYuئ ??Ԣ'h-=^ޅKGpϭ`(6+k;dvy_WYk.[ٱݯ/vVpعfwSkcQvMma)I_cKDi9֬xctS%8)Hh W$!LtP@S"@Y2 \gLy_X66;cԸkS35^bR׭9t {/+D`~ J岈v`1Ԭlڃ<+QɃ;/q>D^` $x;u˭;[eVugtB;\FR/砰)Yԭ䦍|)~|TR$79`MI oNV  3uɵ:Lմzn5&^҈r8jʜi}۲j)4,P;k=Ѽgfz. Zæf\޵Fr"I̤.,Vр_lނ0΋.^{$_ =Vs{zs [Lwd]QyѢ1=#?v2s)? Cǂ}+N\m˦6u}ǧyw1ˊ2믻Yn_gͧU`U*hѴDk㪓Ϊd1 yj[ JG'ϏCgt1l}wW?nz ^+΍~ m<-vy&T|}XǗ݅._w!D/ Z4Q[6JtF~N&xo"ȶ1*9Ԉ?ʶUG{E+p_fd׊vHە9oNZ+.д՚\4ySvf3gʁ41lp2`KuB[\f{6j'H jn;J)*MoȬ pAL0nUl{>V |L>~4Y5zE%&R坉Wf6\hXyI:]7ɟvUz~6ո|ѼGD>0NhGFtg; O>97(^\WoΎCop&2wsgvV$;u.[[2JB 8]jl7Ƭ4PͺG7rF`i[nڤͺUiek%co.4s?/cYٗDzCW7}=ž۹PfpN|d.(SjRF8mY3(*aaӧ{~w$o?_='=QLoNUx'icB$Z@ɫG>яPܿ7szͳix ښT!!tS [FmղrǪG5/e]T Xyb.ۉN,/b^V8cKgB/HLM{)oZ)zO-1䥶tHH6="F 5aN)I5,L4mnZU"ۘVtj% G% :M'1 6~F$͑eGXviw*sjF,N@HW&d+EuQЙQ9'=QW,g1xY&p#j"OFF6&^`lkHѦψ]kUԉl+9g*Gڳ pfbmW%ceIu$/I5y8R| J5Jih.*'r[Hc\j#!h:W\\ݜ+RO[kŮ=\nFеx%H\kNx~Su۵Qu?Fj:Zqm&!g^i" TGMs8G4yL)@[d~e:TGG35/R|RJj5MicPB읕fGT:וon-=2Oӣ1eЪ1 S+Lld35k$6c5{N/co/bqr|/.s~y+7/rKܡ7HW7^vW>~Wur708bвڷzPyKTK G@/fbZN>G>ּG>֬c}v4PBpL8oO N#gG0I+" krD}0N咢싋2oC'txG/L;G*Oߚ3?N’|8/=\$ |B2 Xr6'4:v3B쳫|cxR e t wRtlN'MK(&ՓOY*f*ǮAIM\V2L˽#uiK>C~pc}x[_+]pT / כ^LVB9.9Խ>6 }﷌CpHk(?~+)ZIx߼7[>K0ש H+zm:?uE]ȳ!-\}xc[XUP(12h{wA{BR?K>dQyɫx򞇏`l.U*$%mtlxfb|iͦۛ?s 9$1D6kHG I@Wf"B8SR؟/LB dxIK[ " Ąz2|PBn˔ԭ%Ie[RFG5j&Ez|Xs:ˬ{4˩ ѡ֖&.(dR'Bw|wvuz$i}[YB94sP9s$LMF$pMdPMJ65#o|]ڏ!OG>?|=:jVeRh@&sQ-t 4&&2GgiL@@ڸx{q[6;?/Ww|?1߿I 䛹ӛ (O#7z8bY#ܿᶹ{⿱WJl;ҕ 8xn/{݃Y?nzcnjS!?eW:5FʹV(b?OόR0㥻xC2B}C!j-gVy'E"H|$@I c}P 9Aa#/VVnd9,JX['R~z7ؔwQW0_kXR:@ DM|d55ng#p_<+D4zIHec0oXM[}մpe%B,3MKEVIbմW*z 0^Xz[9~J)\JPGtG ;.`P`svıBW\\V0#L4/ָT#^rBTŕz={)# đ볿1 M'T^N 8JEe>ܿV4 9^H"y3Q!{mе}X,WP6VgEPԜ); ?`)Qs`j'N@X z8DԜD%4#Ss6-5' '@SBjN>O(_=OMN$3Z1ovk \P(F"gW'B5>,=PdF'Ī}qvtDԜNqsf; I3ܨr8;|8]P8%o $$Cl"FJ5ZX2muKq[bԹ¬Z62P8 g %+lZPb-iDAjjij&heH0"H8Ɯ9@QH#G U.vL);N@ (Nx_ L1C u딳/B;>!1Yp(_g/K>ܴ%$>[}05e8_q2S5ov}k~e+Qh~3n>O0\"PO2[иs\(1t^^ٛU\o͐TTRJjb,FU_B>muߐWƐ3t}Us} ,3[Op-ы}(A]<ςL߳C=U<5{0k~(p%)ěHu.Iѷ1ן#F8Yʣs&BPOp $;+wjt ]Qީmw5}?p.6.o x^.ge>0r.ˮ{zAZUrj5k-VB[nh+eI-TӺn44?mpqxfmt1!g74vӚYoڼX_N/Wft3#ڍ؀g*;F֞b[cZ(׭R\}m'-53ns?)0H_Ǯ6}owo~pvo{w޵CwCgI݄]/nt}>jӈ?J?\yKݶ~~7ߵAnl|Sc7o*bˮ{jDݭEHJ!nh04Q) T;' 𯿼;5+qBrs4BTdl-Fs"֊̨MKcZBC$JTIYg?RTcc2ߢ _/FhES2+{ 9ᢺIJq-n]?6\!Uvj)jp1Y ^s.PȗmsNٶ})WTئSCSm1UʶDfJ6K b0%Jx@Z|M/ S5yʶYd82RM:4{ʶ '/ϵтVP@3`5RX\7n}[kkn#w\6T[n^hRWoVU3 }'i>Lϐo}HQY,,`B$[}H^_O/!L!SH7d|O΅aކu>:M)uxĹ5Tjڿ9̌yQR!L^#&)z2;"xI G*7x5aTFPTHI*+׺uiɴyݷY:9gV T|H*29x!ebI8UM /.lo3`qKvi~s3I\ JolG4\ν5 8L펵ȞK*}mlϩ /b@n^OΓ@!T*ٳg ÉrU {s&izLJLrGk8o+mKցwfziubZ+3;?9<#@1N7f^5L9 ;PbxY+UHy=Dqu֌}{Z&{^Ogy\@D*NES8/19ݨ{H%:W<';Tҹu B5wRo|C@|yk"\i t#iDp)d АDO)J*O. \z&p5ny"H͉qX$2)>m8,!\ 0i`eN.O4Q6>xă/43͐c" T4[}3&XXUaA7BۢкS@h)(P7~Z'T @zK8 ZdE<q?`A8Ea%O6SnI.0H*KlD N(rJD4ڨK)3c 8>B$3ܔl_]l~~'t`L5߽VdkD1dTz}Gf@`L`3Ar?h_q&J$U1OE)t'D\-2VLb)̹QmfNx8l@< j.K5.,ڹ+- pY1QHb ωWx2|*R$*=eF9lVMJRS֧YK>G8*o:"/fB$=Ñp*O:5%x9w<]H5V@|+O%öJ"=WLR#1 nݻ ؇8TvԁТ F/ܯ~);ί KYY;NZtf=3$#-b%Zm^I,GV Y>ԏqHvD%O z.Jt=R'q:v9 ^LwOdGotvh7m8zNًʎɌ1Lɧn {ntw{UZqJpM`QG;uThryT7' M &)u$xJA2$L˺ʘRkdZ+aJ}m2}q=0𨜋|)59+w Daﬗi$#R| )QZ1|s-by8Z11OO[}]/oNȭp 0sҵp,u0ei c**+7\ ѕ6 X:yeAQmȽ֫eu{7 5e6GV855gRF ҚkblC }Ǎߑ1J}/)$QГKw}~!&_@c ' ! QPa[؛GӰcd\v<4]89l&xK7?Y7 nosJbw"kawoTdV*39p3֥Z׶~p^{#>@>O]\XwGX89VaiftZ>WnVBO{# HA69Rن " a8+;GuaϟkkƶڙteoG,3Mʵ 9$S`_+=JVJ5|JTBF*2 MfCB?1e7!#i1 b0<6SUn}}|K;J.|8R 907Zb-`(]u/F3YQ g(\|ÁSlΎnL$QEd4f^,ˎa[$L,U$*J J%U{LÁc^R/KKƻwEaa֦@[[j$yKkCmUAsU,Ht\~wlCm|ps٫rR'n/> 5d*A^tRb)Zɲ_Ϟ-Z$#,7Tw(mpZ_?& qEݟn?C/!T\ׯx׭_3 ՒI{-o] \_l]oR^9gl)aGh̅fy}'!W[./yMH極z9S\9=0OS7gH6^y|bj?I;5xs{~2m_:{jv{ lOvGαp \rf0PbF&zg$BBFN'qS9UݬJwaqI|@;O~iw`"vz8d A Np|gFXpM7z( 7Vi8GH?(K'ws=4䨏!z)=ԏSLߠJĔ|EpsNr댽e>u#0FPHmNS.)P6 堚4zߠPrں'iꦇ/@#hh?U/F%.;KE8ٸ 4f|ʕ $?w1KY#7;75"0F`kWf1!@8p*V'UnxS:5 C_$fHڋ`x=WPq`?Glt ;VIF$=)uT09x[ㄐ%Y7R /W߳{?#N޸щaƛC \poY ]咗%'U^8;rA mAB#mơ9e1o!dlH1C;Њ)l}#*B`].LtP5!wL! @RqxL'3k3K ) fH ff4SEbNIZcƢ[d(-"SW_T(QB YIʠyyP]u5R{9QoU ti5c(BϞ`"B+ZG -Y{H#fLP/>5W #Zsw7b E cIӑU3Ɉ n;pӥҢzQYe,F ×kTHsRIBR%b0-U-F"aowOLh+\9aJbidsV)E=Oh)K Qi9m1͙ӷ77yըs+-P`YAMkrp̉ –:e\KKϼfy6ԉwk8c{:wQJ~pL+ Q32lJ˘,әd-Ӱ J8e,P:Y9mM qi'c2hZ va}nL04?~\~ŝTDDhiz4("^^ qb DLu Wݽf+Mǩs Wv:T5̯ Mʿc$*gXr [9%c*.9㙲JLV$(D΋RZQ }wz,#/Wks/lRkg#>usaqWۙϴޜٻtY$u/oE>gŅv̜FoT:Gg^ӵ[ڕ ]̮#|iW6{%~_ymff|r9.Q/RdHKcr4X)Vpͩ#,>hBT cB:+b F#`M$l=#~uڤ}30|ᴶRD46URD H~dzzR G1~ < m鰂t'8DhD= D AW瀔`Sx<Ę UW@%84.= qpdRѻxA A9uG3fر&s!5$nE!%rҠ6tZ{{e}ˈ1g4(]g: n@IqN lw,͍_>{Lw8}rE%> BHkwbNAb)dvm|AONgcN4*( $@MI I$Jh#`Ώ";\8ړ gB:'F-4+v1eVbɄsc( koQ ! {4u~*khB.d+'ƏdwJ_0!~r6fo!Q ck0~œ']\.U9qKrM)SYZ`4#Z)r6s5@4[sޕ*|ηRZ3)&f?CskOӡǛA&Ô"oNGǕ ROhQ<l71I{ඍDGnC"(-"G#0w&k]<AV,Psl. b̘5R[h"RE_yQJUY&%PFUnrԫ$(,2$J?P T\1ИDJ`%v"(!M* yՊ956s0y^y-F\c=a`W>6j=$f S'$z|Is/l?肆iSQ9ORR ^Szd`#IK) x.zQ&Gpˊ[oY1~hz!ˈŸ;$cV0ё=܍H"UB6p٧ƃw(S!%ԣBQr Xj !ي $V!Kd+F ;{c.6H)t#&|m`)\BfKa{dY*f0mX6@lLߑRܩ|E=}I*GھLTQh)T$DsjD(jBi$yj=>CzT֤gr̙[uŠi @䫻1:Q8pg ܽ{ W2>.69;[.OTK"3 \ia* ˵3UAZ Tg;ȗzMUXVdZHR.PE,DYi<3hIWeY9tiRJ4n>,#Y77Iria*JrRBὙ(/ʠͅu#kh#Q.r*I.q}{,F[`p],]F%U(in#GvLHô۷ً7I(EZvH}DRXCψ,V꥽qg&2c@j ey1k[jEںiHKZԬ4jf,zuIyLDwkAlD ;P)5eMe4)SQW8C4mDnVb6DY&B`F ֶ4†44T 謌h罷Gm2cTLcic\i2#gjD ؾ^u}uD)StWM-凞`%uviJvUl@h;HU"5аw,BPb0]]7֫ro\o{4c#HEȁMgtX39-wrڦ(nenn.&ACp1 瓷}x[.Nf $o(~~pB051a>= Ic)l>>Ӌp,$[Kˍk&0f5`X}IZv̺IjAKgZ(dbZʳ#!ZOZ? ߮DT*xF;8٦sQgY7&wGp~A-<< Qi!#,Nw('g< !/6`#15@( {W l1$Kk6Z DK!Fv ATH散#,̽V=94.-ӅQnjUR*Ƌ{ndޞ㷾yRRF#S\tw_'ijp;'̃SǏ^),#Ml{hk#<+*3IlnUޕܺojQD0C"7Xx@_68xal, )&Pg?^܏! b,(a K(N`C| 8"Nu|tc<-4bTFF\YZ2PIJJRB?wG{H͸WiʼnF>|bc2X1q`襽YVNXd*`EX)5 JjA be?)Ek`u+tnZJ9xefqg`~af OfϮ_Gn=2wт;vtѴO4ȅv1um7gV]3S2nc1<΂oze~Sw̋GW-D6L`J,xi{R)k AՏy5g3}oK҄|G@ր,i]V UA2{+W7jfQe ;VAC{Aw4z]PdHP&aPzM  |AZaUwSVr1}uA\h!\DZ FuA-\)D&N'@.{ZW+=%& ^= 7˂@ݣ{VOzWsWy2[Q,PjۏH2I(ˣ@ޮ1ڵ:O-H(M!ɻ8F+K V~roWT|b\ o17z0~žb\f %(\LFNIvbh *? 0&j*9#8Sǚ1hb렽poCɓLA/]K\*)P(8H{F!l5R$ s_Kj#م3a, ꘢ b ~!ľH+.H?JF5P@!|ʙ&yte{,P< \brZMlUO/ZlhnVZ3՚MZ ,iL(Uق@pMǩXB>dGC)tLsAm=?w~y34%iN %|/'V *ʂ7 -*ۼQvaSgӆoŮτzq0CrcnaNԢϞGe^ۨƫ욓E^yM[=Ǽ^$ӯ9\Kv4s" D_~$baMV$Li|ʐ#DeLL'cty̢ZS4gj[kO JКyu u>j(yQ!AD-MÍBjYJXH5ҔM˴lʉ}99J LjR+Vi[)H۰u ĈB.JYq%% XaIae%bRɒgMJ vJLPɵl`Z-=12,J)R2_d0:[kg0n0ﺛJ",p|863JL,PDap<ˎ;_(9: (p1j!/~}j T8huPwĴVs~6j4ZnMѮve/#+:J'#` j!}<>+B5Sg݈˫of7R^/1:{\1FB7]M BK8QͤVctiJ`?kIvR98&RwG۰BaWJhR'ێTR0>}w:[U^3˓\?J"rZ <njIޫZ ~7ȸlKQ&]8!A2 Qdn8xnp?߇_mm;v}TP^Ҡ:s3o8߿oGfi4v~~Y Uv)nGb5zfT!:\pޝ]^{# y1@ɃP?)(AT]^gsqTj.c1Ni_$"i,JDs4%f,]g >fʮN//#_&ܳûCs|9bGc"-;ic)RA{C0qY19\=O!!rۻU[Dk~,zնYT|w_Ya"֔VpJ @5ZF5nZޯz/ɻ5m7[c}]vU}2|vue:]Ȕg\ɇśwH?-+6ܦ Z~l~:yוSGϖߌq1ziDLlҵ,k,vIE9=3U]TaSbK *R("An?AOSFBRq5o: )T6Q?!O;Y? E4I(^#EyD9yMVBB"x$r@.oο@hd$"cj@X:t£@!e_׋~9@D,+>aC'L; 匳<4r%updꝉD1unѮ9Vryf)ØA%\U朙k>=Qx_7yx탩M]ƠBBʕ"TUFTO`\1`$Rħ(,.+IAf{⩾qQv~ʜTf듋Tr-ӯS=ij]/Y|:$\fTt:W1$Η I8܎/Lk׮RCLH޳>w$LdβU3hAb'K&ZO8ddݡ rfrDBI:i'9w&= s5rA<#)N:) 'Wf[T QeD}jP!jkwnU̲V"v!G(.:P3ـUrT!^m0S_=\K y?~%(r1/"W+l,i%6#gqZ^'j9C[۟&ĝ1O ,W޹T( )v, B_cm3y_'lvvr1m/,> >\Xy@Մr~kmPܐcBDָ)I+^iEe1}V^'g_|(_TȖ /e03m]'Lc͵N]̚E9`G.~玱v&Pb}w[PAdE))LYQF+%D)%qq<^.OqQąWM˫7%J 4* yYh 0u(noz[`z||쀎aM)GOos3wD#!D 8$GN m)cdNЋmNYmINڸ_֋ xi2+u(ës $Eنg<Г<<&@nSj-ynu5 DL`.K!) : \1ũɬ"sz_r{.Tlw䤝|ky5YεOwm(t'թ8)LdXmD\^4g'w<,əvG^0>XX\G7MǓ;sjuەέ}ck ߈7Mo&(c|]}AtdӤao\n+]mE IuԴ qˍ0Jb* kKG v`C`m6Z(=s35Ir1J!1(;8rP@ɈϹ@Y ($-IZ*|£e^-wv&YUޮA2;YmcQo,^铰\ˆ7}TPjA$ޛ$ 72տ&;]3%&J^sW*uby_oS*vhAt$T40,SK=U W5oʮV1&0B ~n=7=N5Y㌃<8ْ% !+½o^3]xhtfxwޖX].7N UZWEI+]EESl)+{mۣ7e!<'ͳc5epcGZ HaEYV&e M vUԂhۂQZV4x; 0,xuUH: ra(w$88xMHRN˙.e=Vklq!5%Zbi]p!+*~ l5-=0xZ)<jBYx:a(e*a`4OPj`~25OպA9&hvIuW5A(KSmqx>ww<;g"ȫg^!cժ}@- MMPk^@G>g J#5eR(ҡ9+@^RVګI^s^++SK:RIPyӬ# *EOaPFЊ{| %W Ha N;Tj_ ]eH$2Xx9+@^3^y "eTEܯ0nvFR Buqm0MEE,CEPf.p^` B +1$V`Thr pCA>c"ȫ1]Q+SIxH(:e\DrrV(U*<6ϤXtBGޛ _ jdn=gPyC)}QP*NxљrV(莒BWTjN $2'B%Q77̩R`\\ycB %/Gy8"9} w.fv=J -mۇZ+4QPy)SxG epG$&g$zi+Q[^6U}/*sV؅D^%J2/xÛ~a$^\_?;z/-, hC%0$dlFʠ\ 1Dx!RGj$N7u8']H%nțwD⮌[jWy$v˟_dvaD )9}Ѥy@MVGAA0d)ՎQS|eQS hBH  ҒndZ0TuJ>V>^rJ5Ҩ!04j$&mQĔCok5;Q[ HCH_ө[j63GbGU<-!8pEqf Q+,#eѐӁQ GP+M]i"zay6VlNkRS g7.|3,=΂E](_Ժh|RMcBvi4n?p|!8|nf.ɝ~GKۻJo؛O7̗KFK\WϧS>(LTb~DK|WijΥ/vssOxplClΈTʔ.>qˢr)o&5#mYƚ2Vn,yc\i]#>|JןT5Tw'qTρICԊ/_sd$m+K?Q,LC~}Fŀr@[^sXUQfRꔡUrhkuh廠-؈t܈txp>^_^r,xKdP1'yMb?>U2 d'ބrnng~tH>\Elet L%3]۞i0fkI#9ƭ0WS']IoNՅ 輍J>]eI?!xs H-hl[`c"OEQV@X#2V 1τqN]߅kCڋpCYN:@FcHKXX*mgOֹvyyou6O~s=;Waa\z.{(ԋGHZV!]Ѹgp5^h B@4N̖$5#{%32Z"QpF](tn:mFI M3!V`'ijTq(V%uL m?>_3ChRC;E]u $5>xjzIr'A?n^}]0Ǎ/!BsmH2C"%Hc$ʆ՜՚i+\\zN:9$ժ+vƜZ@i=LF{"Z֎D)9xqVTkM](xg\jrqoTtƮrbK*f=~8!Mb`oH#ϸ ZZݾjG}6Cau( 0;4O'h9FԎ%Y4{`$L0$wů}sFLzE.V}ˎU^G;u3X Reip%N>ۮa8nT$G9C}hk!<z܊g29STPy'7{r6~ǷS[0 05_K }Ix}}z{Eȱuj$s,%2[cI}ƍD}9o$%jGlJiP^S'=aDe9QK\($$l iEJHe{2Aͧpu鷉]VʌEA>B%#BMi!TLHr8HSߑd?s@ ٔJu\H- /SW%B2} xo6Ӎb3ݕ՗Ν^YQN|닍vh Y)M+ W{i~9/__=$ܛćF'cԉ _k@V4XѲ@oˎZдbñ@>&[%8 93&G㇔jFnfR+vx1`mc ghZ*Лe}Bhtn(*GXi(|\hOɇVE~beF2d }-0>WY2.?GpLe).dǏ$fu#dN1u2Np'8fU[5^]fľ@m)wD^9nͥLvOK/%+4w"]787)@fѪJXfP3:R x<&o~}W{v8G)ö< ⹎!9+ y-2:K+N bzìEchVZS Px"*N1ZxƮCKȨyFf֘JfcdzQ@I>vY\j6^F2 %jnyJrŒMs/^!cju.W| -E]-f}=ig81ZndBmZ_ C(!YZq:Qξ!1P2 >(ͬ`ԉI:J:>Y?(4+p3)otpE1NG71B3 EkH@XKz8oymkkA[H%_y֙~O7ͤ==R\WV fj^dpT(W$n rI}G7o9BՎgœF,9q$rb޾TԖjAp9Rv678WQJa~m{Kad<#L,U-@Vp^A}E`uJM{Y={XDcSŪVEk,~i(M< :tbَg|c{J/NVUlsBujN\/|B<ѐbNҫ- t>¼9iCRzēU܁Cd0+%^u5yr @QSLh^n~iFQ/'9O:pQ-a̙ '@ŠEƿT$WˋMet"6 !r4F Mz#19 xה!܌ 8@A~~˭2Ғ2woT5}Fjjp +9J-@?Oo3A594n{p>nT|*"#Kk6F?Čl !G4zEv9$wWpYΠmKL3a87PmNI5nj*b`PS뚂@dE`54$_ۊ 4_}C?.E}݅:.^2)7zKs PSɠG] AZ #!C beq} f|T$H:uۀfv^u&LAwsyǰv x=WAt5q1혈@HmuOkyĄd&ۅ5Qݻt|!6tfv~>-ޫD?j9 ߥ&DwMCZW=Ҙݓvv#%݊|  /Wi)#LUV3/hj3!K;_cm~fMn6cs<:@Hz:l5ȸmQ^t/A^JUFy&>欰u#-)K]a#HcQy)sC=֜ O  iCy]RHQ^!Ew ) TSRd*:cŠ,:鎶OGۏ~:.[ڟ>\z(DGĆ LL3U@a6m V2NZ`fH;ӰQv񂌁1άqœS(|hB{p|.- 4q[#%z-B'htR>X8r߽\{n b0'[giטF()}~j]\9l(-%c+ }(ɹ59&6w7G6o6J Hlи=OoF~Q?/,ݖ}h6nCD 4 tmP15ϝ-U>k;<-H.p`λ@jT" $ؙf˿ZCwU"QsUl\Gݠ㘇<.eQ+Bђ^8&dZ.rvl9޲⿅?'<,}umg zͼW lj'XoG{NG://x.ux.BH]P$[6LK߫3l3v2 YwrgCY1uk5L[^~<}IB/kZfZ7plW5v 3f;ugV-ؑ`nݑ ]&{6/C! \D+JmK)@[{7hdW8N70ߏ_Ufm(!;U?܄g=۔O o oc S5V2{+K1oSI`TtĎ-x*V{[Oꑅ1Ɨpc_O>E^j~8Wm8';2&-mhq+A ܚ9_ީ/Y@JHǺεz[hk{~Ŷ5E4un[()[XtmH#ck٨KQ>j5f?U5lcSUKszw5T #X=[ymY`8@cyλc;σk%R\wp"Y;kyЙlAO.JPZIY LחAa&P( %Գ!2\'2.E26f%Ԅ gؗmǘa>דٮF/"Nks/u_]UV R3:GbUalxxNU! .=Ìswc|˶OtE>D;P œc&k`?ѺMt?[EA~{qZ{IfOU`/8_^8NJ ùɃ+#,KCkP7X`Xa=85g@XӠRhΟTeY͉8C՚VrD-WWx6kimJԕ m{Mjj3}.Xrui?LnrW:q9`) k^e~߾Z.?Bs2Xo"~ӈ*m.:o}-|<[w`[DTT+"JJ]̲:5:> j5j)%Йp:6Ur߭#s2t  Rk;3ẟ("Jׂ4f9+WI0\m UVt:/ JB C`Ay'%dmYV&'lKvQک)RVVJk!H iOgƋb!Dm2Pd[T(餵q]ЪdB !D@ּ)ǧ_ۻGB]#} ߮9cMzgMo>.#@݁k]m醯w]]RGyͧGqe9ea7^' ~Gwo7&=4[z\>fD4nȁmhZ-ןki,(y雓M-&ۤHrTل^ aZH-[Ze R 6޷!ḦQOcб SynVbFԻy|5yc'0#5S{gt(-nV*y& Y=({6P6u(}̵!Ԓ&! ,/ѣ0Q0_,z)Zayy6 ̰`%+zNM/Fo^*Q/dɡ*-or6i3b5(GϐAOcSʢE>+c ؎N.lG^bLvT(nV؝_c9`X'P,7pb65P}L=F]:OAh-+hzfEcY3k,kjTSo3A&&$nr݅e(-A1Px|ջ풆TqG%V4%Q[ñ"*NH-]\Mp +zk*%# 8o> 7K>Fi:a x$TDT1HvDJBA:vpd+!=&lhȇDA95tb;_*isJ(ZYɖ"YvIB=-rVx )r~/[a%* vJV|X BEz~M'z uNYab8-聤\]HiK+2ӴfL&vbO*2-kz=Cl|^k.RLN&>_Jora^~5Q%.߭k"\^ruگnL: ~FMNn/|;.#L?V%|kr;3 Zk rQymU@[f.Z k{AldZc#ʶ+CC KjB`>*}kSmZOh4\+ѝ Rea`RhC0"Al^P3G7:Z1RكFH"(A$aGIz$ mBYˆ#fZfrhi=u<Fq>hbK n%R >R ٣!2``%DtJD4E7oۖ"*:cq$p&;lRk>Eֹ ]?BC+rG}&7 E-Ne?!eH'G`dvڒJ)}NQ˼\:LRKȝ&uR,-eeo9 "GٓcrSJdCI >]jcAhlLdQԘimyEI␧i a,f6/.;t}ӻ$F={ƶ:%H)/y &nr^['i|")tiHv9 Kum} 05he 0FKddycc{dOVHZJ`iǘ+O9y˰J䥯FДC["`bn[̡au%M^5y15k9,٪p7HK/RVM{ ?88D+w[2^ & Yi.̍4@kx5c܅%hN67(hK(i1%=R'Hj#sLS'ig5*[.Z}1qd]yV'XjhrkY{9֎nv kh*=sm :A~lqz:{A3v8)̠g{YkTz;jf}QSI_aVWu :Hve}U!7x"1(T2'MT4d45^'lL\;T#{a%e9|^Rb0wEޝY} e{."ZIGa1)֤.T$T73T4F&P{uq&M"=Ä@3NNE\J[Ĭմ@j)LT'Çh Rg@@хp" "6HZE\-*;kTƺUɄ@CT]Jr>]אUa LS&=4]xow.S6F+T2e+ f9PL:ra޶IfBI Lhx=SHYTf2]Amܗ3<>BTj* 4;K3Xj/S0ZM $s5վ!pK55&KXctaRh=v{hE 1]4o59O=k4A4  >x,";fk7$v[ ՇmYD"ℨ\Dbt:28cK0_n̉ -V,բ4w0ـ} 7YҴ@zVwGWDƙqƷ/M{7.z,f4+"nŌrB&&c? rsg0\.o&iUX]%zWXWnkpK"5Oe\ld-/&hVXbjhnsmޛ4/WW$~?%r\bžx00a ڸ?[V)J +$j"r4A L ImO %s:辏n -Σ723 'U1M\7e8Zؒ&q >#8 > :}llh}j6bIor%"g^>&veyIYBeВ =Zs`?RMم8!h+=Ă)gө$D6޲W jaKg~\hǿf\RKjLJ ޢV68ˀ5ѡ? _L/2 4O66FpRR)D<$COC\^X1E,DQ*GwAܣ`\RPfkQl ȧZrZe )И;NVs fGˀ{Ct<ݶ壁 n[Y)+G =5)c]R俱ڡۀ8[m׎1s G[X׍{c̈́ΐI&H"t$QϾr-W"=sA}yܼ E5Ԇj!SʣX}b(6Waq#S {b㿝 ?Gc g@kkblfpJg3iږx';T&actP-VXl6cinv/RzK ED̓ ^-AGjЃ* چrnPZ}tS!L ()cӮ7HΣB.y; $z$NRS R+% 9$Ejs{8u$Egx28ƒqcC}J$*z3ͷݰb̸Sx=|K10䭓4coihiPF*IU ފ0)Zڝk\}6r 多44U!5v{ߪy9os\9U'L?wT VsϲӀԌˤ M?n\]X6CZ &[]r<4Cr\9A Ts ALNZ\ BO=Y^E.Q=-Se9!ӲkdF`oٌ)0of~ +νpކc&{9*%8Y"Cae+16v :o)0sXA:t@ج #('9[0R=zjuG6M d`c@y+QbÛftxP[ě!quCF?Z"s4,c BuOƣP#J|} C/)K}!GH}b!N1׏蹣Ҙ Rl ƳpV1wZZb-gD&+`uIǜOwFߏ\FOß,n7&E?}t!t8\z{IMZ;6Wr_USȍo ģؿewF_&ˇE59FwH6yy?Uqu~W{*&V(;Y9T OpJz!x3*MQ S`N:5y1։}wuvr6#9i ӽ``աPQ6g2g=?V(xS(Kz\kL'R8.3*T6.dQLl\*;$Jߞ ߱cNqI "nWeW4 -՗LR=kf+th|~s+2Yͪ楫syQ=8͍Um LϏ WMeUSpT&\+6O(۩YՐxBbtB'VͶ6 =m?3X_ۺlϊ\TgaT~֯M`߮~~hҮ9OJ]2.Ӣ(+e֠- fmT. TJR)I[櫀y^.ސm'~YJ8{_UaZMb)PyPYɡT$ʅ2SUYr\. ܖפ q^sO79J9CU>^]tQ~u4lqIcoj,͂|XyOr񻷏ΰakӿ_o [ Ayz7jO?oH3).oϟyc~6۽1;>D /ߞ|gd̸f]gwLf\kRz<#jevyV#ؾx+<ΛVa[>wCMiv ޜn[*s2 lh@WU?kPZC~[H"`>;98IhO41ۻ 1)Y5h0f1ȆRӳ[3ԹP05r aLO6!T&c+Zz6 ~@Ĩ}+:PXI *3SEkz(pa2>gj#@Z$YGD 3 AZF,23 `:ZC`^35E-7k^l5&*RYDS 3x1@L1&""i63 0Чi \ۘ1 9jpאqSp"{˸ );bLķIp g}؈TLL,4F>U>V71$"4s`-9'EgR&XKCex|nF>_zYwd[=:\5q0Wm&&ҤjGHrI7!T& IŲBSM kb/DLqkJ}ǩ)l6rR)E䟋Jmޚ HZ` yɅZ!b MM8C n)9鮆Ko~ ݕB9;(|8 9);kkP(£\ST-ԙRC@]1rBW)Ԙh3>ӭU7U֔>|.?_٢n>8)=!>SW_4|Vؼɵk G-(*˦W%2RCaY,KV+duYX!3:35jNh66J}ux~ίku' vV gy>~fLW)]r9%~W=So4˨6:z% lh摋ljfC1NTp#;u|hRi}sW6M7gϒs.A {5Ň.B LO6&>EE$Z@x,8RRFd<7etJ>&^]wдOA1Q$ 9p֋WO xKr %z"8- 8MáqKTSp, rSrL鄦dcO-7B0+#kT3ȁman ?U2ύCR6C4Yq<4Lc4 Fmg>g>WU.FԞ7ܧ{N^yH";Е3Fy)tۘ y9*ˆ\hdFːb#f̼Dd%U༳2 ɞ**UZIo.6 hlPalgۙy TJy5G!~=|.ThiSĠB8t, GOypwMjuj{28 ~AƠ2z2}{i18ӝAe%ؤ{{aP{$3TJ~z#P8,S8.;͏? <*Znc9$D!xDf DGu3LPagxZWs٩LuGiE-p-kn8w bO4$:!~jdɞx`Fzƍ 8APf"$p>< 0M#>ԫ¢ί/qܮ̗jjC!XYWQJ dg2O(*%ʂcHꍒQXyЎ`L|G" PhPH{JpD)ῒ?OٝMD==X޸swrÅ+Dߝ@ξ1#Ө幊OٜJF)w. ^s L¿}Bw)-ǻ󫏧S~~X߹^wWWhk#]㷏.O-$:W3sz/U7PA7&t}؛W)/gMG~*yuvƪ{5)1ÛߛᅧfP(H}[!)()%UN'>,!l!ڤFIp\ EeOif&fU= 𥻧[Za*^1Im݅ /y(\_6]Btju޸?Ͱ`kly׼5ΜY:7Oqִaɹ =̔]ҰF4.-@d{sy5ەt};k:x]=l"Fk5r<Gΰt}ZRA !ׅ٥-YEaFcg/O؅([3)]j7Ikf*?'nn? 8^qmf3ߖLi ! V\=K4Ty|&7 Ź"R 膣#GMdʝ72FtΠ3'Ie/f93e`6b@x恵QJ:A*+-`QSIa8x l)6+R{BXgIkW9M~bBv/F ЊQhEIvȀZڂ'T*90KuJy }p o/Dpm4ƍw(^`m6E 18ўrX%/Yd= X`TݍR*Θp? xzt%>OFO@% 4ؽgF^tw:?ڂ^vL_aT\~?nxP8 9iܯ,K5w5̥0~J{|%Oi%mk/RsRq?V;Iex Z+;y_ϓG3Go G57[Xy!$pQ4k4'!SOQ+ݜ7o߼i"ZУc2H^N"!DBPp|׳d3/+U3Yf4X>"%db\<9xt-;hO˄8(Q֐bv4a=#ҍN-3 0~>7 Lع:(zɩCw:T *Q2ZĞpzzǢOGZ;~z@/nx<Rq;ouS"@y< \OyϽ*&k?v,C\ }hOudTQtИ_f@61Ҙ j̸hUV4ޱ jNj;2A*]RjʨY؍;7i L5K YtUPJ09`˯%)~#V._nׄf߀AI/a]_ HMVȻdg#4HySHZ ic]@TF> j00 ;\FeU^+6$2Qj'ײ{ymg9#l:̋TVSe O) K:!0*J&E`ȹk%Øh,8 kK|t}HiepVg̔<y֙. " `f-1@`x &(r-xȣE|0w BޑF ЋBmgɚ ڹ2k5wUu'Cjs4#Ag{1; 'rOyNϳl}OeJ&H%zNSBU:ЏkVPP߯].(Ҿ_gF6Xʯ3PZ׵^/ pP2N̎NҎ[uЌ6J񜢓'Dl5ƣ_}Z|=| GsӟL̜^cc'!&M6Ҥ-CkZZ쇮.Gp7MД`W_R´,Z޶P8ˋS˧"T幊OYKeZgϏ;·x~ezPiAw~w[Ζݼaq4Go翻߹0^6;K*= mbPSTo]~''-hNj}r|fPA$%Uc isIBhBqzu{ J^c塢Z!lZo8Aj@=BSt4@eĥx@p#Hd2p{_h) 4wWͭY޼]V@UoL7N9wP}FE WJBqetT=gJsɝ#=Rf߶GVDyg,-t;S?@4C?-Ǚw<mt֚> '6sp2V7K7tW:YQ45l"%)͖6:zprI0 %n{‡v-$LN^^uCbg4*jcI73っ- Lv0svt+Bt(6L1{_ˠËr v˲ zEL"LnqodAxT*(5Dg10=H ¡CaZض ,HР \j[ Mj4 drēB`lkm1+cT4`78iARlΥCLw.j/dx~4N}GIoUi|~Ffo!,䉛hM!ůtfnN;JیQ rl2V=\64MO)]*!,fsP05?nZ=["01N|d[ErÊd(6əJ~tw]~KJμA*'-Kfz VBmhL쁄RGo os:PDuKc&̽,wj#-I#S>A{70kDf=wɗZhMƌ%Z2;d*/]^q&%ڿU~NjΤUE24p Nnrn~񅥺&J8JM{FWS/7u {kX'y΋6k{66.l=ЩHHRw 3IHDhGl.q͙`L.p7(h4c4_l:wK5I9vp(uȽ֪Q (kņi4$BHO|+,B(%Cyr9Ř%&5` o1(Yg!.I% :Ҋy2V15#43Y \+ գBS V&3āl"j笉As#TE!-td*4WXaʹ\~/i477v̊/ P7Mج~_>j/ ?{u{m[tr-̮iU *m$F?O_?l\s/埿Y灛G)o?3 ԊU7#K\?=KkθeӵųSwyoonܭYi1= xJV1i~/(Y 8}*D5_‰OwSy/7+K[?X ոkˢjhtj}I5Qe9p'1$m NRx3J%\C=0r2eАdR"Xd 18mS,`<Ξ^@- 7;&c~x㱮.Y.Kx<}s; Φ#Yb6MOꎦ'\C,hpOdԩcɵ}P$K%:Ct}<'Ǔkj镤A&q0ݣTE6Q~q^~d%V{(!&6h\Tt1PHҞ{8be6VBzv:#]at/}RaMl?{~.m>N|+7ˉQN;g J7떖;MusmȳQ5d(rև[ٚRnn;ܖN d!ODlԥȻiU>w tjQf N;c{z!,䉛ho"8TMebo[KÙāro/N+F^+59 /8c}2^G4F186<2&96C DGf}ՠ^%=h(9J2 ACNg%lxgf7֐3i O+"EDt[> 0R5ΉdK:` GhB&J afad#2@k6UW7D&zjj#c?E-FL9>Q@.A %y6@\KlՖ[>w}oдܬ2%%g9SdN:b fd8IKv\NjB ețZ aoF<"Aɞʠ$¨ٯI8>j)">I eD__?Й%C%t. &p[LV6Bk1;Jꠟ'TA4*z4URS} 8;Cl %>v2ꐰ!]=9u89kCRK1eGEt(jeUtZ"/'W)9Yt \K tX ]v@XF9ńDP+LC$be#-purf"e]4KaR߾\4&&Mޞ*}b_WžZO+ clC7JDBc05~3H69p [;7x~~e~:[վՎ#3hm~yF&#raN1f%T5K JFqI"(a)jbULEJ J ?*v@d"6jRI-uȽ֪Qxۊ݀2V:8Fpgzȍ_ s[H4qv7Ny@hMJK0}r\f$$vw"Y,V}ҷ!(*EkT ı+\Ķ]Ge!<+v&kaXQr<+ mDPH-c6jCb6D d/-36Į?tς,Gs-@)>g(x0dU0Xp]lظ;#2NßQ2>_oZ -/ ÕBW]Zآ[Ʊ0;9_+x^g&}뜕tY:w9Uf* TiЗͪFO\%\v&҉8lDoz"MH&["psvE;}/_\ei|MIחJ;IHK(@i2t($%_N[cI8V"3D5,-g9_9cayѴjPQ,|cKƔZsiEki4>BX wr.Fg8d3em~ ~y_eڊ1SVFAN.. F Jίpί"=IXΊ8԰_EJF.mb 5"*P-1򹪌.pJ ζ2 hTpsV4 >UZFR$ϫ'ORK_P=hu6^hu6^wzr0:mMISP$E9`GU 2eד~㽭*x97 C/R("VI!|/̮ѩZq\܌쎛\`YO Ak3!]$SVNmsO\A`4zwz:<{;Xi͜Vg^ygNS&&[O$ בTpsGRi@*6&Z󢉢kRW$S5춦t@lM;a8_J.\J -z E 㡗؎)̽W;0:*OOqвh7>CW&F/ď= kYDN 4A ɍ" 3%fOfi<@^tZOnxe)>6w:L`(un>Hx70} (h%LD[m,GEgOS hyĽ8ٳ@%7/T~sggAߐ/ ܴY)/ʵ[i5`ۘW{uHq9p?) @/$KGIz>!?/(K[Gg. j3靅qxBzx,j&T[<چ.R9fhU8 UEֿB/w͒.&aUҬc _V+vBVYy9 HѯAA^v2p+gOotXʧ4j"_vAFJ#] (^"V t *Gr3OlXx@lRőZ,Ś)):nUIB`8t2(ؗˠԢgj Zv˗]U0 $[Hyؚux#kԨVm89gLO7{RNۇ]?ՀtAxPpXȍƈWW yr}w7J;O7|=|#^_-?n׏A7N/7G/f%BgHb]4^M~W1F r{[뉏#o*uFY7nU6[VNƌ[-9Sm> Vh2jqz%_l@`eyS9o8B eȎZʹBY3t4U=#UHBak ^ƸFG䉘UJNgO-Y Ԃ[ srmCq4c]"}t ۴Ӫj#iQDg1FV 464y$ iṍm0ش L 9 [ִ,4@C' gޑ"E;h+2\G阊D!hq&E4h$=! R nQ~R3ް Kl$SQIhqRE& F@ I ڑEcAVq1tC[p%A Fxi5Rӄ[Ită6FO+o5وwHCR5|$ё`Rd"RK.g99g Ȕ1㹶45wrr*& :/ER[N;hhIc1 j"id IDVehhGj)J!W=U/gh 6 Za-|C\όw#V86aZB-[I2%hMV `U7:1mӾm)4y:6qmMH|cIpZ5Ki!1a1*ǮV/kmU~Uw~ΤJޔKh2%4.DU@x>Oxovœ8ӋmN ƒw(Bw[L.t:J|ٙt?"QBT!5tmغ槫SVl~+_[2͍ʍKٌ*$Ƿ{S#o )sh2nAoonWY۪\C'<xzjiMӵ!A7 󽝐+SAV!G֬hnɛN+)@M~ݯJY0s[Ђ;KG#v&Nkۏg`†puq¥/ a" ·pv*rmn/ͽ잧4n]p'u1m[E JϤWhC*-tRW%u۵`?HHv>|?]goۻwhp:lW4?ݿo><¶ᓿ<剩ؚKdrƚ츜 sƊhyR\:BBÄS6J"m֠D MץgǕAEUy[zz랅ֹDj~AMw \cPF񒧔~nH՘65 MTpQBYZ'/z,s d'1ڂW.NJ V?zvޏ#c_>E8EBv? gruY8]RHxsۧ'_1\F5Xͦ*]w\{ݏħU3,h58֤[hKTiMٍ>M)陌Qbv[T#5r3_SC 7ˬ=IQI2xXsdgu B)h:y{JPݳQN ú!_Jww_S=YR`@vٙN7,/ԩhh :tr:/Qwt yGA ba=>H]Dx2 ވt͂yAEt+nvpC)qz87^cxaF0a`dZNgöZ,R`'b %l0FhP3#^oO;6nO[Z3xˀr&sԘ-\?*D ~5L '%rº.`t ܄蹈!@]LBq#%GT\-0:*,B;qcMOz"Ou{%RPoc߂ڈq8/A0Fgh4]TM#nsk64] aIZtO9&? iMF3&?S4Q슧""5tBw88-PC?N(,4ڎ0%O: b?5&`Rd:Íd6}NTBjBIU&vnN"-0#Y4o%[:|Aog Z[%62(8 >x4#8Vr{ьod׮%VW 8RIĢSΒ",-[Y rL.쎄VTy)7(<<s0୴g/x'5عi)޾PL.xpxVyׅ{Z -#RÚ2VL28!v%Dm5˹?]鱛-Y^2mI)^n|${>qڻ6~fEbr䮓]'U# VprԌG 13s)ZyօȭCd[_^_Imu7Ms~Cjrw'sCwnPlS1 œ䰄¶ȣ3kThUoO9bi:0#&IQ؂}C'MB31R:ط-<op2j)\+3Uqep.D/I5 9g "!G,βg% NXd&m_Fr0HU`栗+^  PO\E{ۧ'b U.ZQ ,-\Kr:Ci`L팶D%pv^_RDf_eBhq,c"dns fU ̂Ԑl@0$c2Z9W>FOfOfzة<2MUgLK6dlVrsRT 4?EӞG:mHwrkc$wX/A;b>!?A"vMXV5+!Gu(ëyv@AcKF~ >|E*Ϝ70hO#xʃ {d oQFM{Y4^{k )=VJ_+Z5.y. kNuajHwJ̒Bvc:}ar\!]N7}uj|!P6:7'db^w-2Z Xp4q-Z)BGO?˽ |dىJxo Rxӫ,'tdN靈@MO/]\JM>D(83 m 8ǘ,2PlP;ZS&E&ȵ@f^񜋶iaL k@mV:At:Z'y4j?9?X/el_>_OoDĴMUHkΟ]z xx*u+mF?۟񻭔@U7U灻+'ӯ#}W֊UF߿˦Sw|c67n#鿢򗫽;xq?2N%/IH+#[>Yl*R7 ae*ck(HFw'M8vv2Kˉ,Qwoz-3iIU{>>>֯b۹^-WYJ׮LOյNWv tZ=\]^BdQ+5|uDd&WT]`Uxztpd2+ c37W++'|mo xklv01׈gY榪Ld/yka0O;xX1s&н\릜|5Iрi@$j@6eD{Cyy/n5LdD!Sd p gbJSтJ"6@ê=H( T Thl=!.YۇlE¯ BO2!e^mj@Q IU->ys@;'BN VM/F/'|0M TkSHw~^}߼>uZNcHxMB"l-':(:/j2'ҮEĹ}8N j!P]E( QXE^TqUP! PX'pT!\"g0%moV)XΗ`m=6VnwZm˙``ja.[#I`PRY;94~ KeXPB4Ea(9(rRF9vvŇC#d{u4nEzSry}|Pu9L'[.kj4f߸?c;.۪>}0^#j>31YN 8h^y)qcمnuX',C}Z{ʗ1̪AeHCr ) o[(>6֭yF-x֭ U|B!y٣6~Zh08ɲQű" H*؂ t>)6M)3HT S{*AQ)D*2??7)_ ;>8H|6XvV5ב& 5^#0mU* ٬eڎ<6O.CAB7ʱY17m}>v4n!㰯 =ڿ闬} 4m>ύɴ)}?N@tELYa IL}!B aBJH8nz;s)Re$|Y 5+y% [P~@}9yֽ7w/8᯿~|Hy|}yÑ} gٽyX~>vbqӑ}~q*ngTkzWx0#{%'~tO$_q~x z#~/Y6iוL0@a{7!X}X>];Gǭ7`iXQI|tct?eQXcO leұUDRwD%TN$HCC L$D^ .Ƕ{?8EBZŷh B܃a!<TL8uv8GE1f8FW򏟖LJC!THNjnLwjc^RdEay4Q(@=\cŘ%HXmt/v~Y_ݯ#5^lyƗGuux)hr!@)- b ru>o|[Zr`Ge yLhCSİC Mb@fTEv?7rsdwA|tm!T0P34PSrFXs :p m"W, ׇnBkm' ?·4{ Z" D\jSR?Kfia*@ ahb_K4଻;_*'2Ys \ncSfWnxǾ@G#SB`sRB2MA1ɢ4HiHɹs]@ńh{"!1fTZba })ZiM %h MuO닥ɵͣUEB10D[D"'JOȡL(jL-Rd*+̪??Lw_NCm'?)BΝV955n@*~1Hؕ?Mjڧebp^~VncyfCИBA)?~}x8[O v!k{um주]%VR2WvV6{Zv+peĀ M*T\,V`/  PdƏɏuy YR #q'ۊ\`R9b2ךB-`*Y~MB>=Lˇg 'zdF]dQa\QZi^mgOr>+.׏trswxS hT\ ٪ iEPhK-優C$ܔQaocХKQa|:A: !9J8#$C%vvɹ<:5Or%m8ǧXP#*-⠑khzߔ/ AfQAhzLG&(UJ>*4$4QQ(Qa&~B:*#0qKbx툭<TGZ{]P0l5NDשؕYe'ӔguY8$fUW̭ C̬YaH!L'in^9:W"cP!` 40 ) )AWeN"`})DžB<%IJi_A$rHm/M\g"lKN63q;\y`P+߱Nx]|,ג(/HJpnHvH;{wj}ur=>,FWS[0Jnm;'yMr*ن)@Hu3%u#[VHOEkQ-U*! 鳈w Tw)s$,~X,`+ǮV]-vfӍs0?_\˻z.vdBqw8r` ACL`eB^-$dQ BW1%`k v- yxb%Բx튇Q#n{{R^p;VZ`"c& 3w דo5q"Rʽ>nWA*=?yr10Wj2yH`IϕU07hFWԑ;`N5{O)DV_/J,K*[WOӋ|13G^KpX(J'ш*5UJYT)rTRZ%;ψ)<=L [j e'+"z 'JWzUk9$قQd0-V=Ӽ`K+?*=%y|) B.|`n BaP'HiҺ+BfBRtJ+YYyIR f)iFssL{42ieSc>aRN#(:uVFDNÕBG,ٽ>*)}j- ,~RrAI-)ZMI&0g3Ԁ)r TK"c$h<,+M,Pm.H+q$ J+=0X1~ \Tm p]z71! A J9ƅD )<yU1bIPئzdU͚Yf]BݬbE/qNH5("rZ=?\z9y4 lZl'`fGy*|4dI#qi/׷\sp1& ᪙`-&3v1|O;7i򗙕w?= %H)ӰŷGT3FUnFSSM|pSFUeV%vf`}LRs9]XD/12RDvQY@54ފj\yF!CFDE,m,K92X3b:bRTs`ݏjrbQTVX ? K rq6J 9˸9 VH7@ZQkV.)1)5#M5uAhmd` "PaۏjG. ac@WiG@!xɱe( ;1$Za)hl% O)`;v#+$ `$Qd5 /@!slYY*|ɻ| 8S9cd4qg7B 4|bu;ktn ͵n]h7:2˂:5,ؒ.|m`K la[J#*z@ϛ5O^~N'F[=ޭX_[ǵ%ƒ:۫zR$tzCнoɠY6 "_5&#n% !Ԁƫ`D@<DXʙnB ũ zƗߛ*yTon{317.Tބzx nY} _nePulG}rsaAp&U& h`}@jeUIU?yù*@IG?BO]Sp|p{}g3V^E)-|/!۟WF.Avz 1ad.]ip4U-r%ׁ J,fej<^H{x$LQ75-ЙPV<iCGU_GۧΦuY Xwaq!Y\VlGHeYoB4>g/Ws3KPfOT;}c"&[?/Kfs2p EiK;WC%ɐ<ƝHA7(-!Z#(YnM y_,2n,ʛ_(-T M%oձVAd2=JMGZptR V8.u/9N–3;)zm2 iXvf w3v~spq&yLy3$1%:Li;43c\!%N=ǜ-j%L-)+qUQV։7+j)k!jV!tFfTt9eGY*^Ugy45w?=M%dDױ%t:bN"iا\sI0[U&LpiJ6-3,0ӴMU <ϩns~Vל6r^>޼O К1wkTKȓHԤ%_Q.Ԡoa+>O*g㤈P-Do6U@BZFcm^]I5 tTigʶr1bꋾO3O@Y:o\?M~.btMU&X_ mE⪗.~HܶѦ< ߗǫ~?gc3ޢpx@Dt'.}~+@e!'$VnaVNvf5SkHVoB tJ@ƾF.ZfPY((kVk [$+'w2JgtRq_aKNArޔrYQjQPLpeeX^fCVjl$a i. \8ugbK.n. 1@ y˸IƷ"⿖aH4cE<`MV:+-6M=]x0S[;9VgG{d6z0=MU_ǧ[ytv&Ծc}n䣸4.b(ھ>~;RE4?Ɖd^bzϜR5яϡnw!E|JϤW{7hcVkש}GIQbInՊn]x;w%$RK&8o4/Rh6f2fr~fWi%r"U<b.RIx|{xœ5V1fG!.' '*]\_<W84OAWj*{{w9vOIBCʽ}}(`7W vP)?e=ic! D°/e^>6 5(A(;Z5Z"hK-Hp̪E2RоUYL`#w""Acd)!.IT3,Iq^ɆqLԍ$gM :LR>-BYFPG5w{叫,H6CS$=4 %2_FP Q9ڛSw w`g))`E.Y{&NK 0BVRd*y! 3I #g=u:ɂRpju,PBa8eRyVg:֙v|q4 @WX(M"M+,q1 Hb  jEsBHƥL Rf/$Ҕ@MeFpskjd hToS dM}CFunqސ/wn:c|s:.у=Zb?qILƧ&4yVcdu8Q/gOhgdi(&:h&0TR>'τ?^>LEd Sg4(Z @5*)@kiZy8fQѤۛ+ >0uxpٳ7 0I`^ ,c/g%AaōA8U8ډ?;E̵v\4MtSFTF4^b-Ss,p? vBEZ5B[qTfz ,KnXw@5]y]YI1Xă #^j$k>Cu`54XE"A pQv_DbrO9ET hzPx`N1%zȔN0Brm$_8[H@r^QC޻0ݹ@(ow?'\=xױ^ % JG˒2 áG|Ye[w !Ꞛb7(PU-DW-fn8Bvm)ʪWQTUe&p\6^<e^V pysPY+-loλڛqrnhFQ` rrym>>fU9TG]<uaZZР1'a&Fc '*` x lf>uݴ"09h7>xS@ ?O8E_O>& 1bDNf$ % Ȉ> WZoA*-mj†4;M)8zKbYw09[e(Pb)8 0_4z 7nͲa.eI.S 8E268;e*Ì680^9lH;ˈ#o Ϙ]cucbs5ND-nYk>5~5JOvYS*8AccI 95ΧS*A]C %4,#8͖=|;CyOe~봟-]bz| QjIѤK3^vU`bp_~v9nǒH=1т[+ a0#Wr(¶0?Ec<^y`zXsAvJlO wax_׻C`~,iʼnhm]9pyjg0`+ eq %ccAK$#-H*_TǮW%^/O0)p?YM^T .͢K4JdN@=Ze.O朶(K9 KD<`CSI4H'j TNo`*KZr&ȧ*|+U=i$Ia72E' K_ļ+$E+d !l7$kEE g3XwCdlyʡ2]TGM6jݫ&^6b]8c,UÍgRF$WhmߒW1UmwZ@:]'twz˩ej ).McRkifni8A`pZ_TtIP25h*:ƾ kZaj҆ף8A=T\i(9ʴ`D1[L 7CPbFJ R%dcYh_]n!ITcӆf58h˘AO,ڮѝ!AGk# ޓޞ(_f\`;Я@0=|`Բ r{P arW &%xl-\?0mΜ0#M? }N#$8= {x8*A+3a({ءTc#3"INSMZI)ݸ YXQ"ɲ$˛,j)f83Ȧ25zt=~#dA fs(qQdX&<ϛZBZ=5$L D[]\24EdU -(hB`Q2=f=gmG Nɀu VHmXcB.v#G1kfH+!k0Ai\$lSL",>s6 1;Ɩ(5[Y I6H 4u.d7d5Q z@WD8Y*J& Ѝ ;>,t" ^N J [t;|j5CM[=kr "K2 Iq Ee3l kmf0bp_cQrW:o#8~KֽV:pN^J+]Y܁t%,8؏&|%p G>Ѝ1s+Lk'o\(G%t'0L") , EpF9cS\R":iV\(FkLhU-RqbRbsA)5P+SfQ&,F5Y |KuRS@."RѤ-Aj %a6iZV.̧˟E+_̗E3PFڪӳie?sY'xwݟ9\/*/s?g8mlmߟJntvvz~kkPܗ#FQp'3}+'c#m5%vh4'+Ǫ9X9pfk<3W@X쳿?z;A)tFoŦyx!Z{/G@@mÖ b˺UUʹt΀=xqɣ" }.~ó@28I;3 }7Q&WJ䤱~9t8X>Hoĭ:[=b9!Iԭt 80r81ڥ @~d)xnXr"xݣnx9nP(mrʊɠ} 4ʱg˻4lp`]Ϝ:qր@MPĽ-MA>72j⿽=On$Z$dҴgkv]w{7"*JdyJrM]p$&Mޑ Zp1:wkizʩոu162Bz]5shFh<Ԓ#_;i3kU'#K6p{}JyQۢG=]Vksh%}] =Q4_T]wu>-Lp0G7=> 4&TvM>Ѝv/=hIJP|5-PZ vڵ2ƉTS-zXMfjWˀ>A*]Vd$R$rJ*[ZJ >7wh.7l0E 2*m7 I\\u`&`|d2zRHaUj=dM+nZ1ۉ/+jնa:6)팂~vfDh lĿP)5h$Ѯ%o5:MwJdApS\ \}}@"r#(Ry>䨤h1r\*|A5+SOxc[::E1: >C $F01{$ph 6Q1Z1sٰEL"$5srYX_2x ZPlJ֬fO vRU2Qyk%m ,x") plxYYI1k hS4rtQ Ú,/yS rh OZ5]RkDJqEx ?h ]NQs5&{ND'0F;9:@cgmTQc7kvX;C+V6ƒݖ Ym\>[iٺ[!9&{)0{z]rMM2im$.؊ ևItB mytl9k3>\]d,vɛ[4YMD+ 7w! L<ctj^ UNķa-10$VQ (+Y|0 mFx'hhۙݧo3B :ˁ] L3&&kb ]` ?i $<,Z@=8aiy@ا|NZ˧e;i u(; o2z0'(MgYXeT܁Ŏ1AIے]?WE[v,]}Y}G/N)|?C[E:xd\c>Z?ڵН65qBAέT&ڽQE<-wV!i+_UlKsɭ>mJ=JHdٿu]{f&P?gﮮ>\}7(YNA=cL dxU>m@0 mc?ͫxБvF 9F]e`XǨa_iX 4#'>rh;$XkD6ݩeÞX\+ȘZ։+1^߁0r AIhEZ1XC[7IZoy)o+ޏsC),YO1DeQ6J k= q]5Y[rLgVj_3+Q3QORs@ *S@GRh*x"P&IoqD;ORuN.2CUfB)ݖK4sv|Xςk`,ZZ‡\oloܿ_}|[LmͷLr|~s/Nubuլʟfc1b( dڷ'lޞ,yW\wmxx7I9;/  Ȕiv*G+̑&LRcH*M Irw)zMCv05׼2jAܧ"]h0 yҩ,fx7H@:ݳ ],~τn{gvq>36QCCkvhVrY:m.A9Cv2ɖ$ j/iybJh۳߷ʻaU󗶨e;TݸӅrϧ7?=\iu\ :65m [x C$*] zu|9&-'(EQx):Y/ و>sRQl<YVq8zS\Bl>g'n뎨OQE $j@Dd.0xUG1$EI^&4RboEIu.C.ké!(*!ӧ#:["kb1 %{"A4j/QQB3x_S Id8=)Yƒ&ˢi+v,(uLG)Ňۧ3L_˵MLӦ})X C#9h m PR3*WP[BcMAXpҫP'BwicTQq8UJ(_ħA4D` ƺ˛^$u{vLȻ N@rAو9)-J^-l*Er6a348S^C 1i ߷}BqhQ"^\^sָ@qz'XٔbGN&,UHW֮  )vq&٠Ҋbi|4B]k7\w& #47>*qzdړ/ j &;]MPUf)`G2Zi 8g))l VBPYHE)=l^%ԐM b !ޜl٧}q"H|BC<ۃ/xq8hp*ƕ/<'Jݖ2ݍ~ZZo rD]iExfSlqXiiF'^O +W>}ڇ{}*ndBIs@u4N&lHkH&شԗؗAE!U3vGɡDΜshG+ЅkwWLLsEe{1WM5 Rj|F:m1Nx.O9d"R'KKvKRkŁbxߤZVH%Rcr9+62CML!WvT>f6*Ykhnnug7#\“D!-0AA$$\MHa[AcWldB?D 3^_ _MjHdJqV9> \Az7B'gHS$nsR$˫Y6Y5xj 3dDI:UI2$bTfZh !*K%?<)Xx=jPog]+mFvOowtR'/St.fC0ߛRLHX&kXe5dMSl\IE*k 7'jqֲxx1>g_"/x]F8VOrx7*Ff%Ec'U&RڪLrK N}{=jR檄.Խ,%M+B9lrѾczUa+R錜po䞰4]j&SF@WAP5MI^75"Be2f<֔F QkwV`." TvR *?1&S= Z\Kn|aWkJځ I|4sA[$<ڬ~9E\̉Fo=Y$neT.rZiYὅӈg<$2ɲԽjޣ9JRlMJV-,H_P}=/R輌U,=/ZkYxF9rOŧSduiwDQ Zk)ݴIU`K%ٸ,Q(l;L4Oww+)oA\qBTf 6u) Fp3mB,ԣu@SkQ7+AltP3jE]LހPda,r9OX6pl55|y@L"}d" JM {iwvw}8T<&;P $++>hY|t˻J]v#uL|dv7$&r%KDa¦ʦ\m{DĊH( Yۜ8&Os[gRYt((}BHsͳU|Q?i\}ْ#.~)<@ E=I1!k4 dnM}g_?#?mJmX-Lބ]Q~%=^I#To0פc2̉Vյ+`zo߼Ӷ5Ŏ^sugn-rP?pqL&O=aJ[I- ]vY$CUM=akT?Y I[EihV$L i 7H!FM.e(<4Ttʴ{{z~=$1\qNY+Ho8#oۛp"sId z"{9 A8< IDtݼng}>b|I,Ф#nO]1={8DΙκ&|Ewu%Wlps6Osђ+lLYf"·bh|oɼ3~&|XoZŋ][wznMͿ돝Kn"b?Ez7_^Wqygیwg{kel[YZG[$;? ~*4F_-1ZtCHOm o=Կ~Fٿн6Gww0n&^?2~Is&1=&eI]Q0C"&T\W(1榬̥_9yx_$t9qVjnFԂΙ\F^Ǎ>FsowV+4tXlwl,=xn.w==: Cz4yyMkRQhY2T;,c@=įa$)մblR~B8YIЩTh/QXApuX Kwp,1Xv9, j 9.Ǟ|aeΐ`V f28Cc=hG1!;#xDÜѓ < u5 yÉ\1ލ=İ` sMFZUHM0Eyvd!., ِ]D}NǴ< od2n 9qb(οFW!L}s&vjڒ#hncjl4,݀pJyjr*&ex^6{sQGTG[ɧ]0a,NOi55!z D&v9 ::-EkY G͓NΰQ1<}aDa:.7Џ =>L,5)6I(E/K n w 㢗 kT6Z[J5b|gBz^[_nzj [_|Բ+V{: у{ZjD[8q!6z or*Y32)1TBuv Yn@@n$uA$Z)&-Uk2%G_;ZI mOvŗ`c0!DTGL&sKhۋ֏WQo*}wwqFQ`ᕩZEiV#A- 8ŹL>IMV#g:63XQN.h3mXGH!`ui"j#Z*Taz DR/U'v`t*[^T,Bb(ξ)U{y z! Y8c? C3 2<],u+l@xCI8>v}_wcZLj8m!n~% ?JL5΀7X8msP5`jv7j(F !5 TGჷB2eC" Gj2ej4WT/ĈŇ(,woPBJ2xu}:=^E "epxA?}z :MfB8',[ ,UE#mtߑUA0P)]SAA#RHl$YB$]xmڻO\KA +__: ҃k!v27~N@e"\ q5^Z6_њ~/ )?--[Dn@8yi|YR˴W8a~sc0=;(OНa-^^Ʒ]W.`o[|{vw>%@|/QSο[]D[=Y꺬/onwzCKC]'67yb=#9k,'Ϸ,(_'<տ>lS7\]}VH%n;0:gߟ] L)mʟSy&y|wynw߇Vt~{^˸Q|Ӓ- ;~|Cb_Y Lw,U[/j\m޾ge;[+RVDш^^%՟Ο[?jdMѰ) |.Z8;qgt=?'?]Hԝ֎O# aN˻up=̑gRzвvR-Q5IpYZO8$~:HrH3n Kwp 1ݐZ䥥諡f-1&1#EK2 m2j e K7 _Gw4h- BӔ @Oֈeۋ&܏Ox_8.ʑaNNK][o9+1S ~XtzA_e3hȪ؉,;| JI$FcwSK'*YdIL-*4ւ[obra;^O3n1IxysW$KǬ} agA^ItCOca zD:-pf5xoM=ާ?K2Tt ,Q8>)ZFŤ1`;*[xcAP& F1@պm捥$IF7]}끶N MCӵkkRk ˶7uך8с1T,!}JFN7 =@ IsN'QW7pd^5DHO۸6;2ʹe ȵcN6I .)I(̻KFNjKqmar&- L c:<Ѭ岉AK| 5'MO !]8xNF :eVi885*h/ ljjjФic 㙓&N)5T-"ߤpj(lH>l[ l!0fJX71u2MUÚRѥy ~ϼ^[\1%@ ̫aP%3NpɠVMSK5`w˴-kJ8l\ږ2xt%l9f'l]:e{_d+J; zJ@dIfE!A2%z Yې\_5*΃} 'c70 507’? m&೶Wٌd+sPtL\m{7;+"+F+%bfwCܝD}lmP≔kRnOx17Վ>6cQOr+</&bءjlKY 7u[sD}sx"֔ ]aOxMhV60崩j { Tkj[ u1m(ljQ)7b6SɂYcC98Dʙ(OLjG[ÂhoXP≔^)-H~ԧk 26#ʼe IyTcC98D /VcC98DKnF}S6tF?t%Fy O9N̆fٓ Qį<~v_77ygONR11;ȉ Jcl9%tsi*J &( >5)1Ghꖌi]_$nO:h[0i O6aj:mc0X?n4aEd5I5(JnGhm+7F;M V6N#h7hT ]h'Gwj14M/BވRN~V*f,.ez-6J\6U:0m.nXjo<5Io'8xbq jSUYbL; ^XTU͌E CW8Œ?!F;M!D0Xv8Fc\\n^z8GdS(aQ~2i7˿7#Bא=\ѓ.~019c d|0Fp +(=-Wo a$0”V#J*#Ph@}Bγ| ÐfDm ,OAbyH%RPhM}E+i+TQ0"\m8 ^S6F?%buru/,Vk8UJԪ޳x^W5SbdžrL=q⩔|/71DJ SVmeL&hU3eeos[k2E(DVij0'[h >#s'xEl>ED+Vs)lr SWnHkkkvFrx*Zw# V;؆#)\ѝ/pJ60&:1+CBmSchO\ O>!2g_wScwhwO\rF~pk R vFp ~5>161(T Xn?SDJ# W;FJ#GJ)CZ4 QHeS4]PNr,}CMfF'h rѴ6Y3'WhÏ&uJc`)~%+9fS4i>r``|U/1D K̬Qge : *VY(D`57IoX`KR5:ܭNz0QF*979\Io2y FW8F/<Nz}0V0Q.eFF;M}TDQ1Xv8F"_27vgc5 UVB\wjp+P|n{.} V󂛝Jz̊>#>qe'4S" wCk^kV(aFaApM!N}/ndw5RrEp᙮`c4(酒_F}l|_0m|_p)/(N.?`(ޅPrrd/}EK(&a%yߚ J.;OѠptr?d%ON@P!ȁ5lKeQ'[[@ c/a,I# 3%}_{ӮX~X>$d,J5NeHUkc={W+g¨ĊP JXG>s7:M+盗yjkXLVTiΧfry^,jKJb"ׯ<.>;y5ռr`"˘3[w@f>b8=of_4뤹[۪נ?tWje[՗ G^7GoWD>`w?+X[uwkN>NOl=_ԸVSHE0[<Eڟ5m5 gwR@rmaٹߟhkaֆ i*kR֫" ,]_vٯjU?4=;;O?47`$6hDzΦk?'|}ꤡ E+!IHge%)!Z35'86ܘߺk~47 9SV9 ƈTP q)<1S!vhaۑ&ia p:{)xD c焌ÕX(a bia ',~Z1Q[ =Q k v$bZ̕#NZ7 8quǴ0xMG|pLsH$̥,i ,5hF][sF+(5c+IbW'/N@f"Z^{S9̀8$AilƑAw=t-`L%PRQ2eǔ!aq/bʄݸJF fQ^`Eyb/rz=![rJ%_E՚b0Ȯwq t"H:U *+v*Ɔ0vI9G-Qc\?ds-rIm9 -8ͧBijyQAd2]B!Nk]cy_WS[&Ww?"fվL,DnfVT BRX!2.˲Z,>CT\äXFv,=r${Tlv "8w O3l6'?8oZ-n1d>VwfDu|Ay3}i*3hrp‡bI_|,IbfE}f(ʑ.6sϙe5;.]^SE'=~oLЖĒY߳r7١^s`tB.>6뫱o_Zx{Ħw.>ɔq 0!H;%Sw^: m@AG@@=[:aBAW׀xDJ  x]b#1F}8:Z:엠(DG D~m@@BQG@oE/ ( #k pjc ڀнA7@ഐ_rAG@.E/;D߇@R<6 +DEet#z qWD~;m@/FX@t,tt$z <̯aG-uoݢuB $2+lcVУӎD߇@< (tܑ5਻e26 DaPqfU4tܑ5-~m@B[:, D#k QgȎ9aX C:P6d>`JjpRiu+iHN6DeR#r(]nF-G_w^Ɉ3YAĉ-u]Կ4׻rwqܨ,`h)'@ĜUYR$ReӛǀM׹p9"Q- HKA3*h6JggBAPP&xF0%gb) d*0Q<+@DJ VT`rm/_m,b4spB0YUQ^EPlh] /qr4}8ZoE7pTc]5τV_ܙ /͚8بPKhL@QFp󬄹0+.rD)BHI1ƼLL* aЩ ̍U\koN^lN]r!>2FP賂R)YF%+\f "%4>n`ZH)>Xt{6{jͿl(aƌE0DQ(eiH5B Atg®Mp&CB$򸠨Ԋe V,XA$ ׹`(+Yꂖ<0 sX/Fprfz]E=s}r6'HbGf_b57>8Mmn`sBs߹e_O7q] w7?߽\}_쾨A;6vbՏL/Nl1urdLPl VGXUHZscm3Kr[Fc38+ѕ ͢`.a-!|.Sn Bm:hZLQw-XQCp )DxJ)jNƏ]JBy'F=_%a&5ޅ߷y;NTLrn}7)k!l٭O׼iƁx~˚ß9oa2wz[; ^Niԇk|7qHafy=l_joG/fi @ZGpGs_fzGƌ<7p9R%HZc` {D1 gFЫZ O'USN>٥KmOV^+|7xhƃUcl}٪!$ 6D{֌CLDlh^ҪMR&'} ?@ K!p @gyi#,ʴε2K@ h)((BpTvr3oPxtSy8L`JE F`*J3CO*%B6DC(0E&DR+=U KdeTJ~Jܔ(`04˦ny{uzZf iO'w5[]|n 4q %|yǜc1x1կ~Pۋ=LL]ځTf*ż'.9nv2oJ7@`cVKwoցx&0H萘_v r21cYƳHB%f0>}B@L:^%ۅa cn i=~/rFr`98% TG,˰*=z:>/y5W'z9^~zҴR^MS A˜)-EAV@ 0.&XęP%DJ ,K~ڼ>O.mHU Md]WE٩.uޭ/?ڕm![}e|B-weF=mH,p)xYcꖖr* @tAs!*d=7e 41O!BBCC!&(+#"*L< ŸPHd; UƢ%@PONxʇBA_0X ē=6b`W_dو*4:aJ}CJ&fE5um䝽ϓGvVsG4hl3Lx,/gg!0Kq^{[ZMo'I11?]Y͔˹բ;i6->]MƼוzwo\ P㦵&{T̸,H}0Rg5M8jF^ /oh̠ hv\bN5FIJL(!c̶1x2 $`B!)~YrxC6ayLʋ?uu}gP89f'&p5gUtݳL[b(0XC#=arw< D 6G;cˆlo*CC@TYƚؘvPXw= $Yię<T D-#5 @>g90ņ Eؒsс G'> M"Qe(o(/#Ɛ0?5]鱃V!e_*V?XhjHgP Pv+)9_/k̢I`a%ֹ͛io䪇2hrIh{$WS:A3#Ed#dž#'֤"~+5ÇHE6~H^LjD &\ s.:՛Jlt\/F*fr< S<9t8&F.Z W8"KCPO!6ڢaFSrM#ೂ cՓk7Єb0HԔ0py%< }T72!i1vfR!coM4MF)᩼r3%|ޛ =)o'lrI%4BwX{( j^B`r[<:M ~%@4Oa(ΆMAXtt(֋XQ [u/X;:mϟ蟩_wl˛]wѳjoɯjfifI2cd+=W˹;=Γ]:wҳ-om#XF7t𚠙qFu7'j.<:SўAtY6: ?:)l_-lklvg=u?p؀NP`=^j)H&azgi",s_X̐ ٥c(5RS?w'[cP4}G>Sw>>1< wT{ zn6qw4n= B-3/EC| Cp+F[ |\'M[LmGp&ECp)| ڮ~g6xQA?2Langgc5"IjMLx{񐴗2yiG~菖kq`}FehU؇cBz XHݼ H^!9^H(1J|O"i_H;,QX?*,&?j_9)Xɇވx%@[O׻yo 47b.-*>ዊu:ƙf6-}pJMf,dHpj?F33==`bj9:ElxnjEp_``^_{ !c`TL( %H3TSTj|qW$" ]@}Ƨ)>Yqi %\ O%@CeR,S (Qg'_ i[ݓS8^ 2{V1d<7qg@E\a\cRH ,l!34JK"((ϊBiQ&>7db7i@BD3A*ˬ4i`TYz|E+t>:+8WgJzH_!tbyB@_=Lq7Dnd*l< c-ݝ5|#4|T;xr֧ Xȍ&Xe}#!<5ӖA=}8n f#&d·zYi9o;~"xV<7E{6!Y1ÁxP7hI㊟ ZҨ[(̹htHg3A8F+`L,ۯϡLٯ/_LaE Afeu$!J1sESmw{Sf r@1j?@<^)k:^(qa%,wnm"Z=x`&K7<ṛn$|]{"@[zܽvxjY(;eX/Bс~l?O}@us҆i>y7gv  ?]n>f, '߰] 3pyvbe=T^媵í>UC`mAw׀s}~\E^ `N}I5MӲ.+5+(VI;K|b| ̫C.MvN XŜ9S?}co˜ĄshŝMݳqx"_D҅XyAsf֋ 0rTӹ5MBD;ZE)&QMKQ%,6ֹD-;f)ܦMLQgQu2sYXJ*ֈW+UU-Y E*V4j׶LfRm` Ӝ|nw?q̺8@Th'0JƹQV![Z++KH9J*ޥf5[QPMRwP5vKFOFVY7쒍>*6Xpjՙ9=TYNΣZU`فlt1։QLE͠hdQ=4B3Z͔,ӕ}0GWR"$Yg8c"6+gUPصWfڵIƔY\|J;듲) P.&'INS"!)Tpό`$R AhS3(,yM x'FYY7W!dlQXgms ) m, SQݥ wPM\oD,Ce]%UX0bQ RoO=m+v0~=T}LY{X%DR%IjrdQC x}( 'gF%,20(&Q_VdvP#0N/b^*1:j/136AD*"ZT Κ3zETP]sbX H+xxmCy"m`Y0֐"OOTLo̾EqΊ$œ`(\+qt"ѠRH4^}pQeu|hf#s#Hk֣me0PjK+`֤YR $Aה=5L.sx,HT'RQL˨UAu!H_ oZ!@ A[E)􂑖ڶL5GLpGD=OF'fǺDBJ"Ac;F[4{T.hZ@AІR21Zأ37$4K[P] ܾQ) ZĻؠWAZf-cz] OHKΧZJ5;rx! &ZlP6ZF4"z I*vP,L I!A Xl=H*t4hqHj4@wPMxŠ֔: !ǯD`.k P&r%͍ ]+2:@@襺"",\JR.M蠚[11zBPMS RM$j:TỴm!P2iQZ6'D "B-[AL+S4"( ! Jj? 69TV'f>%R"zdm#9AAN[hvtM($:EU0h2'Tёc(wfC4rbgXICqC£ŷ r2PW46T hR) T'Y%W +v:Lt{'JDslG┈חn#'2BoBKz.rCAT/Y%w#avpS4O+bORçؤh3PZm'Xt&L΢gv公NkW![">].ۜ3︿)m1Լ!r4S| ex#OjR+ .],T;Tm =CcÓ/RV ZT#MZ;EC]º;E*7/<)IAn׹%ogm KRZ?gp\iR?ڥ7lp뻂ˋTD`<w 67x 6HW:Rp0q}Tl,KݐTb5VSjz#~{scY7:Eo6wI ս}W^?9Ĺm v@1_Qj$#vz$Q$$r)vHGvI`t(1>FE^#g߭QH ՘nT!y)TsĢ+፳ѲT 7tK_4Y!TeRz^[ۦ6>*6qpVus0DwgM›vm&Wnl)Wth* R/%i"`ZķMmY{sC|e{yg{<ȸwz#TziނGXEnDٯPV rv?1Tv\߀ʎs~ EїQd(IIp燎Ѹw> w.$:] U*pÃk;Nj ۼt2 y"/U3ZS8l9W8{.ƾ|dIr^3e1D yvn>ol/rÜSnHDL!f r,04!F ri`IhW@ ^>hȔÒ 7Qӷ(ZGϡcK㴞? DTdPUi aSVPcŷD9 >zaE ? r0A|xe -uLRe_W98BŏuAV VZ9(J~Ezwaxv _چKzP6wy|ӵ6tn޵/ VquPE]#`ېE|Ao_4c#lsCzv&zR|?7MC n7F"9RAf`73޴|tks#$U*]Zߧx&qW_$'~g@\NQttC@e#NO9` +6If9EYZ Lb GeZeiqz1ƹ,jJTRA(r\u9y"3En#9b2xiFh;JgW&xwv$yyQK[,|\ ᪢YaJI/ d"ZEȊYZ{ڱX;^z݆|# 3n;Be"F)Fo=5M7iѯe .VZ=a1oۮc&IT]zdk1J}[&psbsj>KgJZ1;%:~ KȓۓNp95 s\f뤞N,}ps? ǧa)uQ;Ef0 YfoeZ;Q}OŰLEI_r IG9sZ'-ZGDw2}Qw˷C4qydIEn_YWDmD7UvH)*UJŃG㼒%%m=i}9Y,}wNF E^#X##'[R^DzlXo\3T/--zZpT#BK\%%Ł]Ү qRҐ΃C tuUu N(5Gw&_Td0"20=AC삼 3ןZu,V39s}Zh>~W ͖Slw_=k=kONɗgm[ 1xV2?5:֗l'{TJٹa騪MҶP{om;9(rhaB уz ֶ:qMr8wV=[Cş/ x'[lBDq- A?R0.t,&ǫ>~?i_LK.8]& YDLfן ߾9ūŊK9ӗg7ĹNf#)u66VWH^WK0YTnDgs6_|ޕ2 H%{>=i%“<[^~_v_|io(@o2.dJ=tŠǍ^s!DcFZ^&G2ѱ&9h{oyy)ޙsvɇ_+ݴ^tZ^/m:w;x#^ҹCj_9' ߋp?O$S.B ub:OTx5[| U*#;qT[x ݎƵhB; q>PjjmF+0?ɏ>ѤMU"HP{ ^ /h$y/ڣiomPLp069*)J!GTӕϮYHMF-wlXD*l*yaq&cQFt6P c5 5,9E[D _KA2R@ڿ_J48XԖIL|E7y<+RF 'Ү``&$S*GɹM ZFK/%M6vaȚylKtmr\t`ʭ V٬D;V_a-hʔMwŸJpn/fiޱRW>"LRӪڲƫfodž[>foGҌ#'ڴ `S-ۙ%uز [ޅWV2 l #]p $\B9@8NPo!9|Bs˃ -6"5(j樦WO?|.yDHR*kODZ%x0ZnjM^TƤ= ^WOw]StfheɖE[gG#V JӫR2iUjRpi*ZPTp΄bҮrz٥\ϖuEARYx Q~\taW&ۺ\C")[P3r( In>p_jgހZB-C>y/6N:ʾܴY꤬Uk|]ݺ[|gy]]0ýK_,O8ݦSt?t]]~?%r'&+Χup}Ox~u`0 Znv¢Aen}OYtJ(mO}j%@0BX'\s (~ED{( {V3GT!&rr_zB-Dy"9*s*-A`N'HD1/Oh)Xbb(IAkKyζI1.OD=ĕB{3_^1{I@YUc| aw.DzѨєN@k $ߎ(6s.Cw-.vKTс dp&L|>yCDGd7$ ّ!w/` ^Dζv[|2J?D *6k |剬M $jTxIG| :h49˶Jt0*s򏓤~@;Iqh(6:Szul^Y'sj8Q= F;uO磨GbU!LpI!ͪl#h`{>t "A&D^rE`@IRQMDFEfxSۡIddLH4j*M^ 4$Po,A6zJ"j#D4LgsyNuo?խP #wV+leyV j"O"׉pRYJ@H-h!p̰$Vk-ad$Ȁ01r5P}rPъਖ.(Z QrSD }=Yu̫Y~eP'cXe_1=)|K_RPr&҆4iSִ!hH8mʬݴP T7oNat*(!TV"FZca^u-+O)A_ec5VZ*cMO{V'>V=cN6~l̹ l|=ўF阶2l:tnS?{ PttqQ)3tO ;S[蘉k~Nšb^c̋ '.UZq*A1Ȫ"´US̬2}>q%Am ȕyTH@M=Oʤ!9JI : '5HO:a71A'Mʈ!:ܒ&IP!]$x]>lF]%Ŏ__7ƨg|Z3 1*_g .,$g^-XUr)|;2%veVbY]6X?eAD4^j* .8FKD*r֋@9^FdO8uNu"K>|i$Ve.?w˦?c`т1eC`A{i΃@ qׅ:q7C.]g ?,sߞF'1άI Jot΁~ Tgm'qcA_W>%n.AF0t r-(P DfFVp\ ]H,HVn}-.|R_e}%1% )J>8ŵʣ<$D]RD) 4зu^񠯛>ec0S(o0HX FHIn;O6@8k6?%M!hb"V4rx Me,统} ;B4W鷳l?fg5QwY ?1)-aS633o[bca>[Ϡqo>Y`u&Z_|u͎/~R_Xƍ.ᩬ0eP)ΣC:yWyCZ&i!y?,+W0OKiݫay3EgvcxW6 ^%=tAbpTr۫!uD+Qȷ>E4A?Cr٧oثٲ}>2}'m#IW;< ۻX`m4նHJ<,VIi>be\GBh~*"7*-)Řl,vJ˔5mpguH}ғzD>_zlHZRZ[MQHzI 'D+Xj1lͅ@>8JΑ\nP%KHҒh|RDJ6ڔ u[=E{g_/gqGO=σ\N~?}g m3==O+^)Xd;种x3ӏ~-yoLəV'a䗟ߐs19\2e}aW\wy߹kz>)'DȈ/\ *F/(Q%]E`)r><&u3]diV$Oc}99ҩ Dm`ǚT)'w4=\t]-Qz@N@[{%maqii}Ruz<AF$&?Kn;YyI uxu0loO#H 0V@#_|_Lbo:qZ6>  $w!g!2ǍO&:{TI(j1غLzR`ƘH#u5~7 G ׍+rmU88A^(:X̖g␻)g;S[`B$^H,?Ԩ,|-#meZjVX_2RXZe̅fJU/ e$ϥy}} wKPkҼ]!(Fhۋ4mdLn~F0Gvȇ9u!bzD%)iq%771Αc0V!uOc|@6ڹmDe{LϦuJ[ up!{v|drt7/KO_]ګ;to++WXp*1QYf572%m9܆!]fXnm;3F6;9nNRܐ>z`6ҕ-2Iہ:j̳Զ,6 .A3*O'A[ ʱ q0ȭ'&}jSUt 9kg9>G;wqϯB(Ggc{JJ^0%.VQ&D8# !Ψn5%}nzc 11]K>`VOͿlё'wOgcBcz~r{?yoxnoϦ3&$N,4z{s}3mA; ),-³юr!00#lMAV&l48ڕ/W/),3[P]Hn{#`iac=~{zO/ ?OxOش9 -nt_׋ `t5d8דً"o}yOOv`d693W5>RWmml?Ε\޵59ytjl|D7>o&^c90{#qc/h>q:չWr.Bݜoc !7?yVdg-J۱S#fUzXf.yOܻٔ?X}&$ =1lJCMPlgualB푱1-(B܈I&Q\ߦŕv$5Tr mc d R+&rIZ>ȱ>84 suTlzvn+l$hvMz} hLՇQ|r&38fbݓx.a!S;4Co΅w>,볬Ϛ`]0*a-bQڋLIm:WsU$8iE Zku;.mn5IBer\ES:sIC!Aa\iV!GtX-}w P[8%PJqDn9=gys!6*9mm<ّ,m#Jf@<<co*qwP=qW`dR\x*%@28(ژ(ᅴ}n9gH XmS{:%z2%cdcZl%;čwW.܅wS&igQ f鼔s1zCpaVI )$2*jkaMmE6V-0ՈÈqHC Q9% 8%dbQbΣ{$2d5DP-0ڗzڊ8T{5T8@5-BX>&B TS ~X!= ,ZFўt k4hos)&D(8n)(V$BAbg/*0GhrZ!aI8,>*҅`D<>|w֠mA$yNnV;mReTJZGD7j$]0xg%3靳p5tkQڶ˧ZN?9L!{̈/ΠsO6=Z7hY;(qTw\ }}3:c Fx#USR\QX=Nn26k8=;X4o>o \=Ft͘ް@0T p7ˡoF>xDU'xlOzR"XP!mA o2*0+UIڳT={CAF}&/e˝Ta/#zn_A٨u;{ض13_b5"Q Q/ڬ碒:Y$G5Āc6 pw{uqMTIAAaRGz1:À^7AF*&״5`Z.jG7a ƈ\DA[ Ȝ^h8j B#$gn*[/|_gxixTojIjWl;e;g,`R.ڎFiI0۵tNU/U9qW/BFΔ&osG{=KBI˨+SH86zw0Kmv;,GJlj^݇*!:͓W2+P$e% oVkG,&gD%/8Vw]ꮋo-ϊk3H#xS*7u%b! ȠF.kFd3%e @WF_۫3BY](s\ēגrtNaSXpq(nBTɀc90ROlf6*e4Rɓ`52˙}0)y$|BOr܃Չ- m9!5'ev$@?2)4md6E3"$”E%;=NZ˥tnn/srj&"9 P72g|iC0Q5 &ۚ+&s#r&)Ғ<%˘ %8bȼy ߆BѣWZ*8{䉏JԞ}Nh1,[f藳B>=8 xfդ>U8]xxFaMF7} rči#v>xP7(V~#ZUS/x ;0ȆіC{br(oؘ Aoj3łɵMҿ5XVh=qP/Qᆏl竜=\jt+`j#_=K->#gKsk淋hd0$ҖԈݽض n{ !YSXVA[)tz1k|}рM}C#&ftQ鿙=ٽz!-vm_yUI+{,BMOv$1(v zՙ]TR~'of[38Nt1K`.rRx3gs(t W~l ],Y|?B @(w-@5o/BiDxТoSH##e}M9꥗7h4beiC?"˝ý)M RUo*vkDstO4?j /p u=gDprov15GooG

?__3^\^0>,GL&`;;xUKMd,U'vq's¾[^UywͱzEuH &OnF%Eü6'qѦOEozyce ]EtM6's)!DB_~)= u_;e^b3ADDw/XQ@9DuK`jV=/%zϸDJs+0B!#Y`ԋ(JKuf(E*DjD,۸j, ''Z1JaG@pMyޡ iR-)" ހ/ QKg uB٣4k{eסO^tP(0|8Yx71z.Bl%uE jÏ.Av;zV=E^c;:p0b6 1UP]¿΂Tޚ0=@3p2b4erK?{ѴF˃wn_E}f7d.9؏77u~nW}qSwb!VIQFyazNt ute's dठtG_cر !T!= cTtjO%1/NDg3w=_rQ[2RUMf =~;w W{E2 ,,,Ӻ/pw7c5e=w` Jr] SEVhMPƍP1[ߎxqW^Q~y~6B 'Us#햄yh16*9g7՚_V,%FͯbJpdB|G\ QQ΂ȥiBG(ti!Q%3}Bl٨,ΪW"g6@ JY{ԫ2\vO󣔿-U9ɯ`hڹ #𰺗^%&1 &pxZĆ/rcBu$7ΒMd)?_^&=oKBRף$ $B <]/iG/HpHh3fɆ__2bK2P+u04RAёM0@a9A9ίgNk P74,IZA,G$}iiii]@IR] ?=uS%Qd"F08u)^2ꍡ2Zi$iۍZ%In&P$򼚫+Z%d\ձF'4_ !VLA[p2G!븑FkFoCHa;H-  |k+  $q0v$LzF aRu+^d$ d l,&P,MVU?X'IB,R>*n]TK’][3jXΧ/2 PכNYטdPJDy{J)BZ"Tye::FОTh[Y! !jUR}D#JĚh4r뽈o7,ZbCd`eZ'('3fZ@('(JfTTGwNHgx%`2XB̺u DDN$:l>2`.?(@NX&¡0Ng*;iݗIP1"w൉IUUI?x (ʸt*.0_UGEr |xOZ.^SS贮;p035gD,J܈d.c,)X4Y&ɹP:!KoG=IYhgJ"}\*J[3fVCXq,y_raDqNFL4:E4`&rB`Ȥr /+%ƽ>#"`"\TݺhbDR:ڇhHU@Evic֤1֧ ȡq?&%`0/ܶ:!}`'pы%OlU>p(D/OuP>J6ɐ %FI;pUf=w%cBך*QtBp^Cʨ'T uö8JGQ)PҼR;E"9].+CM1 X8F)a"p:)Qc zfA;X. R/,"Z RF)b#t ӊ8"M2l3@?[BVk).jXHg%jJ8IY:PРOбt 0)i!_#/Z#(ވ \DZ+ ,jҩI :j#ʤӡHFGQb4ָ| @ ֈ[ZS|] "8n~fuؽrl yY_^A^,i"!{O#APt 5)*, '?s .<-6p܂%KN-iZh-&^qjL z/şpn.'[0 ޙԅ>~g?=8 G1Geꁏ]2v=;x:{K2Ġ&ztK,fL-GzgLkza #ift$;󏉊4*4]T̞\GkZ!'˱ݥwnԩsUwncac" +=tKly|۱ 4a4ڨ1B̠RtuAHJw'zif~䮥ySNrWV^ETVg$_>f_MpN΂*6hE Tҿ![_3"eKV Nq _azxճ2`Z"أE)s[kM \vĩJ1U6,계-&AXÂ{sy֠P%G"t]g, ܾCdZc #ЋyXo.1jNS) ],b3{| ȫo f 056"<ƍ QNcWT}[r&nbm"FƯJ&u|_ǬӢOgdaoqz(w0ȝZ ެ"SHsOI5bHڷoU&|eMojZJyL. EW{ 011ro`8q4+i]9Q--Ib!X*g\94ԙ0$gFQ h3P<F|uuD񠚧Y5Ojf<|p#8*@EKwuqH_1tb/=%$Hv ܚ {#ađ]_,Ū_\RS0(hPQ*}VLb_h5NyMkɞaҺI>.DePZi69@ݠ0 S-bRD}~_OrHr >&.ZFZsGZr76-$A4 JPJ آI´փVj#FF 8\Rq kso؎$wB,HCfbpKCZ.%3*-lܺz F!cZGK쯨ا/V;DL^0mkt:-(E/cnq%X%: +hQ8ZjV H4i@Vh/7 dI*u>~}߮ZE/VaO_1BMXa^%*yxzǮԊ4loW#QyJE !]Wp"V}y͇w~giE:mZd_U3%ňŻ˪O/e[KzUyFɫt!5^V&`hs<T0hB|еxw: oBm($6TMǍ*ڙr@()ڇ/JvuiyCvM,1.-z->{akCЇ bثNFUU ]}wURv{ou7+1;_~Țzl9LPNu?~|C~۴AV,6\rX8Zˣ9wsHV.ޔGtߴI锜d] [CZe@SA^4Ń j2)'RƱn.=}eMXu6H'/; 4is:Z!ؤKRo aUWhѩVMսk~AU :u6l]C6HgnUZQN doˊrƖ־ TG9JzHUJW,mJяu \{I5єi]eBd8* ' 0B,:{zuRvu:)z䎌NzhԐ ;Ft>{}fru_Ԗ, TjLl%)":>1ߋ"kg3l2gouiQ]sF7ս )6#)?~N4,n1846I5@~»ZM ^B' Õ;R@4@΄f/rt2?~ZwU$:eqz_rgg 'cr%iZPHf<]RV!:lԸ4iLULh(bpsJ..Y'e)3䒌C=iޭJ9( W\1}JA!Af˓NˣMEוalF,OK^r<>ee Rj$M\B V7f' 8. txlO`QߢS2;%H I^2%9<%q+A. oBJ$B$}*S]Ȓ5ZtJG, H1OrYY^gfykݲBtDVIM1xbʈ9!(:HY.Ծ,G[3$f_mZ?U_+­Bn8LJ8pJqV\T.V_o;;|mM&^*I"qT<%qנo沢 mHᑕyQVLFm`[Z!4 Av!R2:s!˅[!?ꇯ)jIQ&DZnO@4+`>ZQQCN>~xv9EnʉWyJ۷o>}J?]UU|XZ)Ӵ+fƿ~MrH>]-C?4-c~UKb|ߋ۴U&` 9(w~Py}-ٿx6.q9a%/?]uӞ't)x馾JWCA7ٍdHHB %uG^I#'c1@+c% Dc;6utVDdt&F4;wmL{ڽgiU>\Jm`)8#K+ˀH,[㙥B Ŭc)3.Ut*ޝ޹ULPn:d%(|* ( V*=lOp5K[3Pld'Q sSRP!ukˎը߿> tǻb|'Wn2QViuܽJ>3/|Y8566=6\kL9#aPI5w˄,Í3# 1ҋ{) Ph )*UB3Ti4QD{ CK4d5ړl<FX殂9&Qzdiʺ/W/V3 x]徽ع[Z ]i'Nh_FmeiRaȊVCrHԥ?Jm% SU4М{ikTܑӀ"u+(-ehpn0L=#[IPHmK 깂<,-)\k( P&;qZxiSN{ isI(ar,BCL5OZ 9MsTK;<`U=xbVl]Lh"h#g튓쥒fIJO}]6#`\d_({f2 B|{kQVL}+#]}TbH# ܸ^*VTXRl/%XRApʩ 7I d#UFFڴ h^HLev| sCi<=Qkj3N6δ VZJF c#*, FFr l=<؈f lVwC 7Kˍ\*&?mիsGM#k6,4"+L=)1y&Ռ.h.܇NH%D;ݙ/ sĈ–Q9ؓP4?F(Y  ҟsVk?-(!1櫖s 1Os,i#!,42R zbv%= `'m)/>B 6OYۤ} [0"F[:ܘݘ~U6h̆woDM{t%a6ł<ɧ:]87XS ű=0MSeq_vêNu1vɴEG F WWQ,qV|B?X1B.kΐsȦ5ST| Yq%о^6SE˩E3f~+YX5~:{9zn?K e jTp>bld0A.p4)bq?U}ܹ͌ٮUjo!ܠ A`Ӄ!FP4ȧb |),I['u?H}-!jYB%?KDmINYPx5C"|#[H"N̶f&6fb$ΥL"oE+"1)-ɞ!Fl9khHVqZذDyhjV ƪXy"ݚkT _,1jתz إZȷE()N؁s_9Y9YGāʂjːw$dcweP+-ʃm}A\;IKChl0`eMZXaE`1I&h6xw/7IAolɼ 30vvm/fٖ]`tLY̫JRV`# XU\%J:"D9^%ۊSX_7P[o?k@(}sY9LC۾MFWJs{N9+A>e`X#JJ_knrӳiV639N|[rդN,ZrK3\$nfށQAX; @{~Fw0p1)zhi kŶܫ= Z ̖zUqOn};59'K,^W0w  rVv ?Iseŕ3scgTr8PYfcC9pɓm%c$i94Nƌ[_Ȧs sHe a@kp_g: ]ْ,Q~t0<}O0OxyGWSSkʡ.:`>kPR;l}Tm̄>UJIvX>G҈(gQu,6: [Yυ.[aͱ71)L >/}4Sm)e# $|6[kOz`hAQR;3{uąb0Rpmސ"ch#< dBH+66WR>"Ml.{{pPʁ"jj4-pO4hdxI˜s:ql< j6ꉌ\MnKPYG9/Qm>(dnsC WP-\ت  ,v|/,ѕXieNy9S'-JgbCꃩI[s>/un=(x &]^xbVv)酴s57X(fU1{0/P?O{~qĽޓ{zO{=w68[X`51!52 ()tg=G+#/m(b7jeͫ'LzK(T I鬱+.f"8eyY!6)-fJ"1#9u3Pϱ+n/O7xHyGf,d&MNvmV-kҌpj74ϡm;j.9D{u(Ĩww_tj;dD=}Lm#j0;=5,Jg# Vez 97/m:zr`[qܻ#~fTN~NB"8_FcvQ4;{;m<#\]iŤ)}+}fҔjQ ( t.ZH( xԦSJs^u]SL.$V>nANW\:n$Kn3|7NCIBQc`Gh+͇zۖr>Ziqϋf|Șހ$3aPf$ks87,}$20?]VF gW?JQ8; 'qG$(w2!!S+(]Q))5WTN"H([™Ϲ em|cu-_DtYmgz3!'фDrMI݄ԥZi[yc93eUjI1/B(W{\DgȃT*չ eIunK5Ze [qIՏ )U?feYBC+4x^UU% +.3rJd4ӥXjU膤;$8JtJ) S1Ϙ JcV<$_Yqx[ R >02VUUB 4ZHbV!iYs%PLJH?=8+bRkLJBzQX.R2=R"w%!1zX\*=3BrOgQUa:ޡF*#%S HyUlծ2;j*y`Zfn,0s;_{L9U)JQAsRLjECRy΂6+Jd&V uO32mbge\)tB8J0ܦ )<$i3HrZp<3L9׵pxU+Ǖ"8+ dh\*IvHy[.-n1œINR s0sr1"*x|Jz_o-L_Wa)UHp\+hwo;lww?=pzŗ$WߟS`B/_ŐyxP.oI!/V?}]]\=;ԷH&_#>|oBiZ pr};^jy&3fRĂtkqZz]iȓDEf41og:J /5<0cL8bȴOVJp&vqð]x%{mIzvi9=|`jL =e CZ__BfJHZ=[tS3K`ZWe{' Jh XJ_ے~<$Y`d;P9+ w | vFJ)K$O:e*`a(.-2z,,3+/ iUB WeY\%BR 0(C,MXHZ!Wʓ2R\!BMᜓ%WY`J&K&k*F4AAQhQVFU&XWU tNgwWeArRHΖ$̝4כk Q2^RZ"cpg֡`e-R<ٺirAőW*r}OTZ{6o@ΊRZ&T(,(b/4 Jd5ʣQ;Vck >R-p2dPGm*4 /8>#5KFrMjUxgKF3ώxmGߜ|8X28XGwvCOу|w3}a8Bg3N$zP$f0`ŠCWvwxt`'MrEYCC)" 1gJ^%vmuȓn3V.`rTv6wc )VxDNU5Z#fJax$͖^ e⨱'\eDȎexՐC 'p,7B©vsJe UrUV=_?V89fYrG @=\,,ߑe4̶^t[@( #vƾc*UI.Xp~)o;y Տ}X"~%PE b*˧c_Kw~cwQڎ]9yPH]+NɠI{!ɚq%c-"֪SBj zz.姒Sos{+9zpRՊc A~i,jZTZ \Ļa-k* CӠvx߈}B7$U-H}SoG$$Ԕ]1-F֐eYBC+4A$I$KRQ9cd ӥXj{4G·+n饪j}?=BR1zzWhH lHdM͋JwtN9e.U W$7ڼݲ=)yo?N8"^kR&+ 귀]\T[roJ8%Qӳ crm>᪤7g:s|MO_mc8/eZ} {15=g'>Πu.{ p}~M1UZbrV^VZz֢C+um=I,B]#ђ!ُ{ 1-3f.V3+,jY~bGid5K]~99wz!8ǛYu Jl%k7 Q d DGtn!9Ԝrʡ5PەVb סF!,BibC(k2ܝ{ި=!!\DdC ] dlowpw y"$Sݵ9|CD G;1w"NGp v'a SpgPc$0s $7>x'aMtUroF[W-֌/=1D+SI muj  (07 (W~)ch[):hbH+ |fsS >~ Á &Z5M]5$䕋hL!|p998o|G 'Gp v&H"j@fwcd(ǒmB\Ff ]6;) ^‘T yO"L!,(yAS7ի5=\իOS'VAJȻB 1Fޭ.]y1[]Ȼe3TJsU00j"INdYbWMAh.W9`BT K$jԆH](Kpf*-9`P Hr3^*,i$5EEVe{}~=~~b--vZ,>U;{ <#G?i+F}{%y:W7'S,~F-~LlK"t]E?:ؖ!ݼ Ɯ -ѓ%}cC3! YoYm7Bi(+8j* "Z?z3-8zņݣqaMptbH+ p:ySG=jPN;|[vKۙv bH+b!&G;1w"x@q Go'@ 푛7r [V/ h B~9u ڢ)͈̽c$G w]9HJ[Ci"vyEi塔35rEqw v%~Љ«p%p@~4m1ưƬήScV2ҪN@YqVNqSiøylCAVXr@G,l7`awq,twXF>t g.Ww;聣kѰ,OG^lcU@@z1]s Q$d/<){QAb LC3x9V#H$,"M0"&4"*q XzŮ[Ek(skT@O+l[5!PئCt_tjz,^$@ۚFO>vۓyB^Nuds ʌ`2MʹHKE).TK'aŗvMvy¶I^Ky! ;g;85_a|,B A>xϜ;"@@ьo*>å5]s517.eeVoDG5z$ >ߛ1 ˛fپ?]Go?|(ͮ?hcج3`1\d`FϊYm2 Z7N"03DFM*.QR& B0<.ɟQ!lˤK7aqF`[~c2]?5f?:Fa%8lfz@c5*/ 'EC7fSR×g,:ºFMa傑kX$,bCڅD*mP5125Ns "|-a M 1{q @f~)x7vmS~auIU)64oMw0CmegZ4NNOlWŰ&*>MhIN&+?ϗ)v.  ZeBH x\MVI.oXNTjѽUbP%4і_ӯ@0IeQjC,kOZy~evL=J(]9M'Ź=7uUFPopb m1e4acѥ0PG?enע,V#}Mk:6UVFaj01D"2C6'b[=ǐP UB(\)@i$2>URfD6f@>$eeYsI'AN7~2k2}׿ϔd1X SH#Ss7 M[b۩]<[J9+3~=o^D7^HC' 2%K͟u9OV=bc#R($!RT0aeI%x{w+)\ZL~QA!/*doI8B+ƀ@H;m| JV&B&9Iigl諚gWUܿ? s0 5M$(in:Ioƾ]viۥmߪ'h. e”ȌpUj] `Y i[Dy 8Pp Y+,oOwmL[96 }٨rLYwhU>:~yx I-،..iME2\)A TP,DF *Yegy))C" IL97"݉H+Ev&nW{v3qy¶v&>Kb7ReJNVIaLV'VIx-Փ8Б8p ZzȱQC/rk<@-c ABt0!QĨ$3[qoxS*lC @i:Ԉ9r)[;$lj&^ ҥcVtNuˮa>a1yEb;34UB/Q<`P,b{`u֠ͭ^綔x1U7kqR퀬ϝ/Uylz?,vo,hry1y{Z0[<⭉`~1YnX`bŸoOFt6a2D ŏ]sWz6 }tX3ۣ7?QXxzt{(Hu.u :+R괟VgKu/ n=8=z #gccs+&iM·; )+8d{+z8? H_bxf]IDsqu.XJ"|Uص$b F/\Vf֭휕u@bd5%'*9pP\5K;a 2.XySфӤU_ө'IH>X k@axGݼGn  =ͻcA XuPMͳ,Y2c7 >BcgǭF eFJzk1Űn5hg}S")wmHeon6_\嫚K[[dfK\ :Ȓl%˔%ڠ@RmQy TS^K}0&wGZ:ibQ5st8"ŗߢdMߎ9حG8n ~&Ak7,yښt55c}uMi;>{ӊtaIek.NJ74DC\S1ٸ|[,6Qfwx9L^=,qhDž-s9]iea "}Tw [[m0ΟX\Zzy Aɜ6eM"SӮAM "Xv%EOri&:NƥQgiE+/;>W;?3o8Siڕ(ylaXi$9՜3%0p ezQ:- XXkZ7& uLiKa_zKxgrSSh[*I41`iKE6ەQͧ-o͇CN"Ei)\xlym}^\lm΋͉$d7uq&ϋ魝]o?.`/ܷey,߽\‚tz2@f4ZfJf$=ff{Rs$|Tj9If`!B>hv˷9փHf  -^ᘑ2{#%#r`8pPJ=&AjL~i^2"[O 8qPm祝f~{]ʜ]3::S uN(澑RR^..oWqM(Qj7xa*0![\k@Yu'xV4%Hj]1.dr.͹ڸ0yKgeQ(jK0@%c[U*ՠˠ%ʙPe=^ΰ|˴i1=7/'%fvAQSD8qMH,ņ*!4FgV|:I{|ѵq5m#{LʮӖ]3EW'SQQiIVerTAxaDQERJyD{^^f*ܝɯLY4-f˟lj֙Y|:l(X5}Fc/Vm/Hc+|Mr4E2Ÿ"\!y AyByC!!^Ga2@Lcp ΩTì<7o( LI3q,KAK e8A2ht03M3g `R]Q$11Kwu޻Z/hk]^cգ(h> =m^Ϧ9")3&3E LRCB2%&4ztr&x&>?E Wi7V{wJpXG1Aֆ9S s|=jt"zg"O;T?ixgHP!]=aNeJ:t! + PP֞ iU9KXa%Gk/Y mMְ̔sqhߝGL7j0HNP;T$2<8|m(]=C~2%ĉz6ͩn`muk#tpCa8h^3} tf%\a]q&גvWG"hJI"!~02sc58 10͔a')i` ,*%/֣7mbC~65Tpr.3z\.ll|a]#ј|^s~<)ݾ;\=%6Z#Q U)\īb.(!"Uώ(~7΢tUba Gz%ts6=w]gS;/|18%JgL(ǵ@Q=Ms?Ga)ǫR{uP;qM'r}Q9։f ,tP @;WsDb'kBx ',>1H@-d>>/ x(Ң_[ 6 C*;Ab@<xrDµ: i}=ӫ 9Id(Ԍ]ٽ0΋C +z=XQU9,Eo4PtU]붐Þ+{Z;*PՌB}96, T-h@U#F5b?F$z3|Pgw7('7gdYq F9RvR5R=,+l> hڤA"ْ)^2RuNlw6#ᇫ)&X^wh.INj(i{~d(`D^:ńͦDW 5J&bfQ&'VeiF4HI"(2yßTjkZW).-{:/N;&G@=hi4ogӤ?%nA: L2Ua95駷yOxEsPuJ5W;_]f$& V\?Q=  Ëi ѱR TRm>_.>hJn^s p7caܮ^kg`cWgMq1%/w =/h{3Ѥ̭s0t[e2J)ӥ dI鱦ʅ2 ']*%pCὔj4 &-)O`hL藼;% q^).`nt,DǨ[j&Ӝ}Rnu}("Zj\t^aW̹[ ιߓTLkڸOyG%DFKxWp,f 4}WOjίjͺOnͽӖKJ{Jx7 O=.dIM|{Io%B5zKZvm/6Fk Y)-`[Rnyv#n{o{ ϥ$ucI|݈ni*XӥoW0^E-DT'D U*McV*%0\$oe`=i¤6fFOOEX;'j#3AXNJ, Ü`U:%]Yˋ46CMHBr-SҠSp`bm.Y"V0V,T3}])E{C(A&${'x~&jfϴ9o,a{0c { Suˆ4{DƝ'Y T?8@W^rDEr]6^]Df>Lndq8IaIJ; ;J2J h 5k)ޭ)FFMFT5ma/#* ó085U^He*P;Y(<!%QYe}Z Fukv)^:@Pڞ8\4Të[(EQ6$ۡ "l$Eֳ_ -,GrO^ׂe++R̽ˋ)ceUgӬYTKʒ,ųq"n%Pul\}ZpVrr}+srtkQGE\}*9sDΰ?u?T! o:e ϸ82-j;8qs>o5y#sQw!F+]-x XolNK~pv׋MRFy/¬Yg(=A4K(NtĢT`JF- @'XCt&.|Kh{]SU^PUǵ+6kއٻYsm]ݘ7˯o̯rL-//=w0wB1mg!ԴT[>c/C)u?h8 ?ʵi@@E$cl\0WIM)#+rf ֓&u-6~)h=ߝӆ;y ͯ_ oO{}uQlD!EhjkTD&<\̩0ai3&b>7߾KK 1iw[ʵG;3zvHMsc-Ixh}}AYyfѩYpCfz߰wLX@;J4DkTzD0=~I?4ŭ#i20nCB)<ݲ1q;(Άlܠ~]2 $R6 X?n|z|k4;%֨{Jp,{2EO9Rk}@fyKB>ɇڛ5o8{Sݺf񰨫-wVj`EC\k/gߪϽ ]ͮ/b&l b:NПbjE;6s/.,ǫdBItVgEu^jY:ޗ>cN_E5{u]eA]|g _hQ_?Y4}z/ݚ}(r1Hawng6l[BS[Y3O1&#/=nB*񘿲+^_Y9t3_%%bC&vZNZJuei4%"`3rR\ K;)u]Õ'0t3wDAa#ȟylDK,݅,gJ -dG:LzErw~1aKX\uScFs< Ve_|ʕZD1qaׅ0zP.%vj#_R 8V u%(e(cc .ia),@#F[?_毹| pSK4F D%q#mfS`\ph:6Cz.84=B#ד;^[_ q-L}^*\#?y.84 k FuCrMy%LhAU'-yJ =4"쑴Ҕ65#[,#_g|F2ђ l'1wLr4ҪODzã&ε"tbdkܭ|Yh t:[^SB>S,=/B8ÒsW@:E ޣ{FUcw\U_HQJ-5%C%.PtH[1WƂPtB2HպV̿u٠~ʀ2fLVYs*^Mv5:3k6;vv!ƀ%oG?]TE +p̯;?Wam }W񘰑Vwe9h ^lCWtm^ߥLcUIH̘@Ch[s|hvp4OjJA ߧ,$nv+Qs$?W )-9ε!^j 2Vr8\Qoˠ'D3B&BjZ5 ة/} !+k"9y` f3f bޕ6ˑͨ3 >8䉳hw`~pRʐɂ,94\gwxVX?Wъ8ܚ7o-UYIV""ɢmzAIP ZXQxK P݊K T ڲ; %|633si ʅb5:GUX*aH;ͧ9pk2±f/OŧD=Xe 㜼\njf> \ƥL@Q5A F hLkDhG5 h_GNTcL :%MePR|`5:n$l ]fI-*H8 b*0] ĘDYX^cJ:nK#!`Z"1k<߽eܐS#/qn`G0(gJ-aD!i1#KJI,;]"5XP[m0ۻn$-5j|v9H0fKPnTs4gA2R|uӗH`ٟ-y#$=E/@m;Co~.\4.Г?3qW!)?mܧ=r ]`ཡ'e_~$cKW6t(s`}nyPlbw'Xr @Br+)=bzwOBWzDbl>JŭF'Gނ *ZjX.Y\UW{|nqW xq(8uA|8GT*9zSj5޴s^@I:NXTIQd).2 k-̯@竐+( c/4 {=Vt4cͿ~RG1eȅ?9Tf#t{=򔱕d~[⳨uD'K‡ǟ4/{Odt~Xז ٛA_ېP +rb=JF'sӆly9^ۏon {Q?d;V_A$=<攡)<.\W/XWo~ZL0IRzlg1eEBoz871g؏T ZM)B^h% /+5e7 s,' z<` ASG6PJf%W! y"L?R$Ak\Bu+8';5#;U3"O!$ᢖ'~}pA \.1rP=`8g1F0be 2! s" m%uN?% P&ʎuS"cegS"tL[`m뮢mv+^Db@g}I ,g +”ăT) \vC1J* ƌHku`EjitQ9ˊ${z;[!g-ܩYFJsϥpg dPOv'i}_Fq\ v]Y iu jm7خ'\&"%@kG0#Oy"Rfx΀J%6PHP 9ZMd=9Oc.)rѦ@SIG+K,֒9r^ns/(>lĔ"#,?ɕY9($=a`7eB$R'S(u$Ǝ;5"{b&mhgМfrYs\ R\;ڜg>@SF;>8䉳hwRn+›{NW-]]gqxu6`+p$.@j"FГX0NFb Ӳwv؊JOI@yDŽ!el v9U>2s H8ѻ3д*Ф݀6_>rHFNbsSmwS#ӣ4@(:iUy;&##9@XIIˍ\R6_J6^>62?sW`P";SeP4FcXǠg18*EAQ] #%V5Mw ~m㠉[93V]5hl?3j9`:W?mKnaW=Vce$HéF _"eBi -lcUD9S}b/u>,y=ܿ{/ 2~}[ͤ;c!-.=UU!Y㸍+,}9rC+U>\JoNy$zIY߯gwIrK`fv$lKKb~/_rv.>Vwnuwϓ-xhiܒ_4/>^Gw~żo͏XD)b`砸5믆}< \@ 4e"jLE$P~0H{#b`s~k~j0 Cbϟͻ7<%wA#%stFʆ$sE6G+NK&y71vloWMQ}pWk0αYB.ROFۏgz:E? m|mDޝzVϢ~#vd:?‚-GpU( .1R~29-4;vVep?b cc|u{gg'a¥'Q&{R=z x)xVÏ')G4s3AnRgSPcOq9D9J<4fۊ7+09ۘZpRnqTjZ؁{KױC׾ |VF&2ƌ//'a,=r)#Ujf(8φg/u3{)'T$+#Y[iHg/D33mgjS6}/KWmwq{W0~gw}LHva³{w>hwGP|^Xױ+ІrGk |9KQ\Z5kף؆E]ޛ6mkYp+P -Q9/I's rVR!~ؠD>:)wM];c(yw.hzWkg# N$̡ 5!a0b1NW3[ysG] r`FRstI Erk!E=v S}߫y??{U/*`O>&`17=}6g٧Ge{3 )NN[m`=DLϜw\˔3Jop}?d==vWDvl [>g̃glw'dod9Mb GCd+iMN1RuG)%]V pP~'ABHO ;]d<UۆFi8-b%2}Ma/1ͩ#+p>M`؋mv>- ֜/#%F`؇ 5yUҼ=^}_QH$JO<N N9B'B'HOA9b"d䓤+I A?ɵ4(>[Fɧ{nyکF8ucHw.52eXkמvSBEhJN^0Au=ɵ[-m EJ:Ŋi3lȲo۲Bfν5 c CLmiLF@UJ.E$`KeZ Pk-a 5:i"D%b` e!8\}JF2+jJ\<{YY%v> $OmG3zcHw.U2 YNnw~BVKѩ]v;ݹZU#ZƐ\D˔ju:i5ēH!g.V;)q8QݧAѹ 3NJ  j}P\? T9ktܚ?XuҔp:iIhՂK󄒰 B-E=O>Nq*St)/{AtQji6׫+uG2Gj7vXޑ8ѠBXΘFOv́lcq#+䁶KR^+ڐP t8pd@IylPIv;q6s%aڴ=Bt$ݺNVrTI5jRU 0FzHiODV5Vnڋ`IX$+ke3eйy*Ɛe>ՙ T)D0abZyڪ̭stAZR(!0 CsLYs9&%Zjᤤ&4ĝqY$I!n +`.L?2J6d+5"KWE^R6b՛rc]pxig7o.ӋL{{bǔ֙4s<"kjn.Z[ʯ9|ny.vOz\M#~__k̷ͣQwm dt6x &#yj=vlGyi%.#Mfq*RƑ 8Xsor &Acȗ0Ez\ˊCd17FF^mrS ohdy^=| c oQ|Y-cȗ0 GoZ*4%k,8:$-3 fG^CdhxǾ8Z'±{> 2',614!}IaEZrov˴/j'Ӿl~}/*>]:lG'KƏ/W>_%#'Oͮ׃zms.9{E=Xކz}8p,_Ż=;#c,Umb6Πٙ([  9ވUy k~(hCr\6?+qz.OR:`=ɧXc)v6K5(\(lQ((' ;վL&9D~8Xf x'5+녣`X[|ȓE\jYRA]k( FK"1 SˆdcȢWf Džt~BQ5T(֞7WꐚQ+K~. 6߉mjR" mj٨ &(ADTT.;5F:<$ HՒ>2)?^PxԶ'qZ$ծ'GF8\LYh!!߹VɔYmچCF(ąH=_<ރZukOh]jiPukc]NS[zк!!߹VȔN uiݞWNbd8ǁXfZx弿}[>ܒdC*;[:\d=PzO^TCoݴ5A0'c=N֘h(q 9eH5O>DEHVq&1<,'"s^zs_Y|, &|a[pu1wVEV ܨV; >ݒr/j߾2=$f} "֙.׆悾V7?7ZZ-gt{ͽx\)kD@æ-ڴ-@Iju` L:(#*{֙$6BJ$|Ry$&2#u`IzbyH8v(neCc4Vz!yb"$ oW5Be8>NH\G5qԐHktcpJ*SdnZ@lĔ&$@KFt$)|i5{Yt:͞OkV+7H;x;ϪZ~oFeDP,wAlBXv"R'"+rZK(AD:-{P+΁4( $]O :N:֥ޑ-uCBsN~ڭlZq>Kl'yBpxyjٹ=EA\E<hkRjG\D+d efe4=vIԲe f8T eXS5mNd*xK8| c c#_tw@0ı `-]0(ȣvY23@X|'125R7>~{Q Lv/BiK & dzjg س{MСu[LbbbDA\FX[V5 Ӗ>Cރ JN3_+O$4]è,gl[ m4^hLl"sSJyPImkS#8k ˊ\ d&ECu+4nmr䨓U,j-zѓ ѹleA#Cc$Mҷt8YT f~BY!Xf9WӉ? ]-0r`\X;w~yɷ;aeX=D6o"?]Rէ{E>rInU~2[̱ :|OuuN<\#7gg`܏rLvWu7zu fWU)9Ɲ*cʆ+&_ G )r;'CX:m=0\; [[Sv[5EK ; 2j_&wG yIg?ߏVoNr$s!RZoƤ9Rjp)c'6IK,T0pG e"dǓYQGZ3|DT@m(VQA-g;;_Dd5Os믟S 7>DJٻ$W^fggO.Y7E+3/v뗙 `YY nu. E7d旕U\HbE/ pIX R*A? J\j'^Hi$:dRʅ<`QS)D$1V[1latBN10vWŹof2ʭ,GT _od#lyǭ*yF_}_5rw f;viw3s@9aO5@P8;~g);0[/%N'ͲiWOYv$GSYE{6KVx B$\%n1SX^6q$@i`uBB4j쫠ou!~瞂v^Ur.J[/0gK:Vb]-߫dV(Hw"D1(KђA v Id U/ w8Zڇ$ZdpվK_p&hIamaahNuvS-ҔC3wᲂ 7D(FIT7. mz'|*=HPFJw(c_}#ۭ'yʠ$}GDŽb2eC<ϊ w}h7;$j)o|yp8np$kev2 #wjpDXTH"#-\cOɰ*@#aS/C=N=k ]pP\}:/ۿ~Ȫ|)8sA %?]()[89Be["I^Enax-|͖> xp G5-?/oWG9lJe|=gaЍi]Sg2b u/nsT)VO^R_a* ^@1I5k%q2 Ra h7Hoȣ0VDp1d4ѹgM a\kơ|y,=`) ZSɠg?POyS [` `H֫S.LmQTny$7z]tFB.Yu]2} +!ǐPҫFqpJD#ML0nIyrģd;fq+T"3 '7VHLe'DF"}_wtcq hz$ǵN)Oc>_\!ցXoG_qָk4 0SyHD1VJdtÌmY 4vlCb;>G֓EZI}xnz8DcXQB(4뒎Ӹ61زVçqYIqgA*ucy^[Y P #6Np8* ARC2^8oZ:*evq qI9F"EذkBFfa$FP;iVA-|gP}G"LWr$´|qo.U!S}=2  1f>'0E`,{K_̿Q6rʂ=ڑ@Ov57mqdQTCfb|Tz\{1폩`\S]x@IZhۮfbڤ=ɯA(ڳ44r~Y>ɓDgقU&N׭q,ǍԾrYBPntt"KYJWH͙mre+ M~t㶶vUv!âwx z\Oq .{u<<(F=w-k/@-(nW?лsFLb㌄]5]&/_WL/&2 cBoovhSl K(ybXB? !A3T7BC-%hs R 4׿\nnKHH9I"l,$F  \/GWQ/✶ S5J]B;@CZU҄) ǓPbJ` q P .pKnd˥j7VcZ2m@ BaS_ тQ$d6o`!p@dSJL J8E75(!(.1RHa\; p鎋%oQB&ČsZpS9'u`!d+Q*|V 5{?l忲5~GJ1٘]x̑%w~?o8kq㬝m\=O/QYEh;关(kbT@|]B`HS:5sa;e~AAZb$4V-??6;V2_vL@i>tRD24 Z'(Ϣ%u~# (&$GcU7ɌFC[(=8[5(sRs-zjI/9yBԴ5~%OWQ_u[-q1_#`VSBE-,W>7XFy?>{6 >o?>d~Qx?O~~_!L66[??Dӌ.j7ŹwrO@w}p!CNNWx!dܸ<3# \.VϟYhʽ:?ORB!.⇮yBGTP3ُAQX켾~qۯ"/\W\:yIF!*8B wutM8ZKںέ1<\mU}vkcdY/t6W'!} BL[`6žjl *DSPD01\w*Ԭ(-Ĝ.P.tߠmu&{:Ն\l[)4v1m5f|m*lۃ|*NQEZ8Z͇[)B%Sb閡Y-t+)C9zh>[6X6=O0Qpka y̤" x-y'܍Do8wt.+D}Q օ<| yRK%O4| bW,_+N7`<+谶,>ya4Z6VJ1doή[%Khc"lD [~3I(t*5NIՀ'qӆ㏹ $`-︽-v;g:'[fb<kQҷ}vZ=bApR=|mXLa+d(+*@(1qd  C-t5KokpTLm8 ܞoQD X+՞saZ-_ѯkÌխϑUE 7J(.#QPVô5~{ݪV-uU+{bFdSY5+Iю I OپC9c" iH0I2b,dI0R'txB:XZZaZ39p2rR{rnf: e;K*GQ{)P(g{(TZ%;Uk$٫We~U]mث>U}؝@RJw%of2z*(gDz+Gk`P]`@ݭAGօf}`o:(bԀpTpԌ(۴oeon9)դ]Y}_yp97:jQ"j7PX z&& ߎem6KFHFL]'~G0疥7ggUPV;x 7J\}]'1Vo XDUWl m(qFTo.q3gKt/.>5h)ΐ$mCﻧ{ρ:# mzny$"MCH ?Q3t[ u]5v)k<.?K(w;a]ַf>[ESQ|&zou _.& GG-W. hf:Ah׎,Ζ`(-|OP7.L+3^/-"/IMj\ `gk,AE8yWPrnc7@sE! xg< v+~y]lܷ'=T_qtw7)gm eK9ɯYeG:M*dlJn:6|!z1y'@6폅y _T%8M}2>dՋ`(|KVb4`riCK ,$4jق[NY (DlaHF.}D(`X-QoJp,j=1\{z37J ;02c1ƊFJkW1R|Z馜[R`-®pM#{n5⫄2by#(-o%E,p5yA&Tpx˜ΰ6ТMiEans nCͦV+ye+LJ[j]%59V҇3Ag2Hq Enj7mX/Ug֚Pu',t*EO_o3P}3.m׿,AY1J{i#K95cXWڬ}vc|BbV28ߝ*ߎsd(v$/ϳ#{9 v$HFO9%C dCLTa@\Y=R9Hۭ~=qfŭ/b\5 us~jB '4/z@;fzOfgbOo_1S%["[1\м6ed5jSAZgbHTjo=m߶ ?byv?Bwqs;yg*~9ߦjG}A:RH68D#3b82L$Hѿt"*W+pN)ӯ\w!1# 4lO1\@蒨BH+Ea>Iئ{2Q5׆ dBT9mz)ɖ NØc9o9WУ;""vs+ʷ>+8εIZkV kNiR2`!$1x# ѧaԂĜ9hnSY;QVQb=j(wZK ѧa@P1vXTKw-1Zh"+U+gC҂-tX_$(fΨn+ MvMD dw1.ooСۣ%VsKnM&<Yf&w2: I M&pVލr YbN F ̯`J~C@lDh`.,B˰\B)B!#߀B`Zʌ0 ZF3JjExaNZ4Zᚧ6{6^`?=l47 Q݉Y,tHlvnZ{7IZ]ݷg}7KNt=-g[K1~9tT$/ESӂޤ]7B2'rh] 9Q5I.aO:hM՘rO|ڐDG)mCE_S0P]wzty9P P|%&W6jVtrn6c^kܮ͘UAIq je ͥ0~kp 8m\{I'I!&e 0B8)?J6F:)}000͗7^GQxmXS >G\Z\G+@Wҷ҉JJ'_df0SIe#FŅSvP>̦_:NWK'o}k7>3~PՏ.Z*[H˔-n}TNK?;cZ}-H`SW`Q1/pGQ&d64gwOͿ>{_uދOwP=lGWl5fX\^tYy~Qc!%6(tȤZ5 Fp'Ԇڃ"PϨ‹e]U] &L!M ": ,mI@\IM->SBV8!'FDickeQ^yCysHuP&p@ee'1&?SZǴm B{:-]FE4b-A`N­2IuS354J/5ho?$ߴF7棟*L=>C@VVɟ~ӻst$GɌ T(k;e~e<HO8ennoqe%2Fh |j@9B k;:CAh&EУzoU2 Xh+\u򹎨\Ҫ>#M~g)gy'{Lr&Quf'4.T3fgNWfWCV1OKr"*yzaNK СB>=x\6H{@-s]=1֮2{5G\ΪU.Bxͼ$i- agTl=:c޴mYCn@3g `'l vI;dGe dl#ՒAy`"!s'bY/L^Ti Y^hJz`df8l:iF *$Jhh@.*!'\5(wp2DÈAҒ9cҔ+gIV^@F(C:P͌hE5fF^\N0 x@C7`Y d`4$JƜ4;BZVT\h2KCK= 9o-X!B1d<{jRɉV3yi:<_5+-TFq4Xx‚U wdmJ>fKɖgJ{>Ov1QW֓tqcXCD廛kǥ{-S-ϙx0v V騕2Z]U"#Εt~ト>TnAƗ-(a!pQ?a\&` :, S]Dd,K[?޽tFࠁ   )f;G0:t7|rJJ1^Wa_eާ-֒/7v N爎hˍyN7]*9rIu<}O c58a5n$Xul ƙB3HQY)VGYG8 z`cD2Qi]WdPjR|喆M H ndŸ/q_㾨w+3,h`F=Je$@$*c9:K^]um\S)t ho˧xFpgxUv@Pf ny/1eR+cpGr2V-5[ Ku*v!-cew&yQ>,7ݳ} !ӔuI|GQ5o?49+DK2ϟo|? 8׉;?VÏ& =xuE~@U'ofs[5ۅ(} h#=0kD\ҟΗџYk^j-!h&h n`ؽrzGD33>堬Y3v:pShmm,l0#mkudjZNUUR _9Xpuag&QKP%A"VnC{aihk=ؚdBa!7fBQ"n/݋k,Sz2܋bp/^ ^̃9Bd<<$t(--_tkx@F!J%u*Iu|g6̮ MZBWq.HP@0_Fʸs& R˘x#N'J6xρDNZ0Ei9NfR ) /H+z6oϷBU ^ZXn}X X.*OP>XBʉ"QO&vV EQP]R41nW"ƚ!ci yrn>Ov,_"cV)wfY:a.pSO y{W7')nnߜuvZsq|WʟO,LU6Wfӂl˔9ٜP,wCaa]=ZclX& b M\PBa c4͆g"eM w4?dzd]r%[mj9utb/;V~eI֨9(0f++T==J=sL֌&'w XFo0L!Q-yN}@4 P<(vDEOzϊґ3BIsG}M:ZKkVGl2{cZ=&2@qV=+vʣZWjxy7qT|]ZSZF>jyZfW1;t\j!ᄏKw9g@05yc/b)}u.l'NP2f3Jls0OL2 .4O.v9 4Yk\A)T2$fTNLL*)K!6bqn]:[dviV.LKG&/ K*^Ĥqd Lgϟ] pFUaLnT5u ?A~>|K5<fPY8-oO!|'i%rq@W_^+ubN;n1#pRc͝ <PDK[Q)3)PVj<3>CH.vZ&%3ajJF⎉r)@DW:Pݍ2GnZ%E$T)6$n)5wZK-؎1ΰ` `V{nYEI@S-15#µtƓջaE\4`V'Xz`nVOm|hW=,67rM{Iv`7XH_"/ҋN7roI v%G~gyղ;K'uYWɧ]uYDSi%+뽬2")ϡ0r 4xƬM*9Z#];W:UgXſua&Bx` mHٸ8_.6&p5OR KYc Qxi"7 +%^'AG?{U!m#U+&  * R8,)z뙢_ 0:-tfOwϨeFi\;W{9Ї;Wd.+LU~Y+ๅr\R%@s!Ȗ\6*ϰs+0ZݪnEF~\h#ذ&_RYW΄2?=?hO@{}u<2"]Lr6?{Wȑ/ yD^aa;6`pD:-4R;&A7fh'#P)%-& uahWas?GEyiWs%t"41J|"h)u R@n}) -[‹L VҊ M@_` T:'@%[P*Ξf/qҘ22Qcw)X4`p 9/Bb9_qp+@i:. :%++jD/ )8xLr8GsjqjAvD):ԛNh͹31CF4d6{[o"N_JYdoP՚ #[3[ feET(Jŏ<q ?HǵRI |r:nUa!sU98W9Aq !RshN,/Y^ fe<ɪȌ"5.IĢM`w:"P95Lp J"i}@U'IG s(*]SDHZKMM0a49cRQ=Cb7 />ge݅O[ATki|T#{ Ό«y#vMW@9RKŷYr6T('53c;gM\ H0EHQ[T U(QpqT>2m>gZA`BX5뺵0SJɪ$wwܸ5n&do3GI?¹WL@@Wf,tjBn7%YmWZ2=^~)_}mQj) d9 n$nT,NvQRf 7}+vjPxE7~-/?Q~(s~,uR*ĭ8 m4E'+ֹ/ά rat"aH2]H{(cӛO ",᳟HssM=!" p&VE5q^#o"oӟnO>oq;M΁p⩲eɀK1ٻl{OPyƝŁ+ ^ӤTDEFpiNI2$9@lL\NZC4L0 `f5/fyNOi-'.YЁr>VX~%1,c,Ўم97I5ݤxa(eiuÄqF5ڢEpq2Pn3aхg?k/~7wӳ?__Ʒi 콖(=5RӜ.y-,Ή̹Ź{ ] TU!(H{ K2z$L E 'G׊ ̓|ًfuﳮoJr5}1DtLXS*˼c8Y0U,ݱG7M)%+:(d^L3?7eZݾp%VrE9SI)HlD*HZD8g@$y=7纭=A kT)'`*gkOGQ1GM(q,lv2p5uQf>ބH="N\< ]ɿ[ćYP N>c・Bwxo.I8Mqb,Yb,YY 'q1;6 ޸3-ghErCYȘ?XWq2nonTSW%D'Ѕ)AEJ`jdYo"==qF2ޯuOLFAY?]}.wh n'+je4w^ $"(WOS=o2sP#JOYRE%J5Bd?[Uψ<]Lvयn~kr …'&̉=*tlK,DuF{o!){߸WxS,(;r]o+(ʜô!YrB9\ /Hp'" %Akfarݽ nDKкwfgDǹ(L+b{h~%蒥r8㢯 # b!Nmѣ8:ɾgȴޙ= F7G7'9h\(IoFP\U ,,,賦7{U.8m`\"0#d`#I:Hକ$+UU;PUפʹU, Q|5D+p(h/Șy;3bk&#w.,y~j //!º~[J[)FbZP:RNL5Se156vS&>Km_?h6wq' /aD`'Dz-ÜCC 6{ ^*Hn]}RLXi{uh{X̘~gۣ|֫Nm;Y5Z,dٖ\9xr/T *HBf* j #ze0Rhׯb'Vu$!(T7+4ͷ^1(8gGP*8s*: F%"7w"]]M-Ŋ/gmL5k~t d=Xd>/0+Z87$L;=yn򪄁h 8B̈́:HToL?Ձ~KysC" "MH29&FYrPbc ^s+yHxu>~Vsh5<tcѢ+ЍoҰFU- 7nԋm'ĘQ $Qǰ/գGvu׸nzY7a^zΕV =%tLm=uSH'k[wW#;׶@N^$_I0+Br_ţDE4G )걲O4VD)N!RmbQ,*KwNrbKq-M4Wd68o͒дf)hאTJQ?ćOE $ńrx'9βe#:Ft4dxIu:)¬HutL&9 *  'T _;|C2=TpԖ8kzUeS%ɣ2HR`[Ԭ:ǭCP)ỜW`,ϘV4UlwUZ>Z5]7wK'}W nT ^k@3SomlкZ(PbTiMI϶xzp}}lTK!TJqP7q:^_,a;H 6J^J(a 5ɟv}\A8C#(B:TB oͶ<н/rY~+M.?CHtzn7抌 UӇ7UTqB7zS-%ZRXgo㒼:Fdѐb5XzlX8鱁Z9 T6 TqOW(^Sxént3A%F:]wn4z )00& R61L$D @aË]q`Q2z12Hu6|Y6*bӎ1^P6wux3E1FBscǰʫg%٨8 yWG ՚ Y._<.0ڄrܦ9>e v yfGyA.gY.gY.gY.gMl^6vL1Fek5J7\ #SQSjfC0 ŗkl\;YHU6O ҵq? D &iqX&Lo,lrSw.`Wm[B,_>gh&gMxƬ߿dJ;ĝ̋HrA){wX1d3Vpty@ t{[ւ{ SpC FݕګmӸs㷭]ӚOMc_ u}2+zQHi .RKE*Bh QHes$NYEs^B@-$iҘxj{FRUy5JHB(pr) Q\! >"pFhB@| \!jY")T:R@=O_-8+o2OoՉ m:_ae>1%B@nX=7yN,6H*k6g\)JR!On{KA6|zY(|rj@)_VHJQ֢[X(EJ\GIA'\6,SAŬPVN7}yn̛Հ9QwmJ~Yb[x` }!m#%' ߗlVfl+2LOwXUdU}eTj ta6"_CldZ:i ʲ%KmuHZW X3kӕ˔[ngyjyܽz-v [77%*(d OXIYT(26gBІ3y?S?3`a@ P aSoW߶gΆla@6Rcig|n  L a _J+<fLp ''rf'ZLﶌkl=msTow~yŧC *[6xC$ʹrbeYnp) ~G&ë \A+2D;1d+sf KoLҚ31V+ٔ d?u%zyvRdhy;­.WRĚ?\QKM. kO5rBr*N϶<,f۫ն̔5})u񊍚>\Pz۟]Ts4~PƭA!}p//zUVThLc0+m㟍rkr;HC޹蔪^>n*]P#ZXN7BۈH^)ۺŗ'zZ.4䝫hN|yh!Sn^$sěZ!\z ,hU~RJ v\^\'_*sW<#{8h@ڝ*Y 4NrI2% $$H„ș{Sq  GʿK: xrpc_ͳPB8 lNpYװgUh== 0lbg]z`B+Y*׌z@6 gLsNrCuƥ@sXF)0L8%ƮOTIrBYU+Y8JaRh`9 TI8Ќ]`YBưJF@XfML=~iTY67oxpf4^!lΔ*qHF[>{Tm'Hz%8k?ebOc7H $Gwn=aJ-cNl>+ rZ[bM\1*TQÛbxO9ÑERn]QpA_&+N$~FYmHC(H2n=Z\HJSBX~b'.),Ѐ nEi,|u+*cI,aX$bBir5Uw<>>aÄ#FBNnW)5|-Ȫл ʷ ĤT,ڋeGO(kUMk_P>gg&Z%g&99O L@l'zA{I@A%Mq^ΡB!x1vއ!{n9QL=J:LO5j9ejנScտg?Y%[|IfWۺOZ^9rjy򪪖7giuhX_ILsA0T(?T04fU`e.'"0Jx0B1j@pNc u"㭵Yp͵+;ƒ,O@s!B\eCf3kaYQ4E<@A1Q i-QVLt uCq\2̏qUo66.̭zEs> ڸxG^ʥBlZ7t[}gGIFo60yڶh[C&{n]sR,ݿpw ?E(ýx3yZ\MDo&|><Ua=ֲ}8i{Grs <}Ы>AG.}tv\Hc_ȏD#6AZ'CbC7pb =32T׋zit@K~IX,Vbas[" Cc·ID!\ a$S$9H \Tzi<餰'x%|v}B93: YR"E&5\[&' 8(ow)+$-*'%6W)| ^"yOuErΫ P:hNv>tҾKdO.͔04)l8A"9:䨐 gXbta &6~ 9 UHB}8GLJJo%.>xP5Ӆ"qe 1̾7K W RB€ ,\&q"T1UdE&ʴf)q35 kQH#H#{\3y1+x*mjVqbm/m(:q|ȫK?is ּ?%\@" aα!$I6{)!Ke].9R I\9 fnn:KWëY=~]l*ŷq@BUD.]v"Z[q볇9J4FįA->B%\牲N:!aS4|)mdc%N%N4`B;|vfy֧j?AhV:NoA,aG?oV}e{WLFTHG׺g;BՎnpO~c7.ȄQ"{݇t8CS>T&[?pOyTʩSͫjVanYDʌӔr(.02%%0B &HBӮ5Pqk<ͥPA1:YdJ HZS*p&3Sd&ʾk(IsRTM@,`SOz?kŻS#$Ludb0)#g \˴0eRȘTo"%qʙK`0=R`|,f0j䡘+d#V buʕ4֌)Y.uIjHjgHaMLdJk96YbhC{HI38b=6#d4e}d4=>;΋r%@!軜ۃ*Ґ>|[JD} 1Q$ :>.*$Kb{ZԎkݑ*E-;ko*Q{fG.7 9a{U< T!* Wxƕŕd'+/W?}"^űu{{><~Y[Cmڠ5 -m`Xή[3 &0<ᷤ@1M./}f+Ը3b>Ñ۹`"% ! a"zc󀉎)"2Fy bn@»'0~j #p/?4p$'j8\*_>=.~D5g ]{np) ~G8y&1?p=xC aDE)Q*7Q { +WÎS<ئUn)h@H1T^GpveF}EB/G/9 ^K?˞C,ȮrT*h2hVp(l T":a[V0oognۆC3S^-6t8rC9IЫ/fۅ6?Gdo hR߲!j=L5reA /b_OLbX^1F{sM^WDIu!\EtJNY7^|#ZXN7Bۀ2=D^$ܷu/En]h;W&FMUhby:nc""m<[USSu wZ_v;unS=MhKNj_҇mvR𺘿~xJO[<C_~V9W{o~_f>ܖ$}h`f'Bfb*,7b9u]@ޙ^EIˏ|QDb{h.˄hHs[U&꯮~]/>8"#,~P*#"㋤^%5H\B;zثQz_uE A1:0b5Qƞ4MOHfi%/LO*!䩺gy4`kDXMy0h^*ł+Y f1/xbkn?zɨ' es$P<[o^Mk6ATNf^ra2r2Jߐb ݳsp׳>s;1g߰ADgeOWCg@  a` ӄO6\)w>R {{GiDIdNX 3*"& e:Q0r5S FoBp3S(- 2L.IEXkLҼE) ai(1Ct!11+cbR:@0,E. a(C2r+ T2EB"/b(geɖʪ}^A`BKm%+:VA0>8+8/gOz#Ic)ezR1z#EY~0È*Z(>6@܉2)>t_(W;75R([S&ְ(38&O,0Dz* r׸7<Ǩy•B WyJR@> o@*;.<ժ ܽ%^2w oL>ď9{$H:ݣ0& ^hpL̄L?Wa[(i$nL1՗}ҡȹ?[6csS 0*\[v2+֢m‰gA",EtYκC<`gS;2#R %UY:xm V!]IM p^e76B6B{[eQ̉ 'B2ZP:V3h~'yTT΅yIZY:[ƊYq4U%m͸XnKlԎJ+RZ*kc+SK(j")+(rķ%֎P3 Mv ۨޔ!n`JAkKj WQ%"TXA$F6gf R*b;cx]Qס@, v=) JQwּbNRpSו5 ]De'z]Gѩ jJ|ɝIVk"L2Y%UUHAMc~wArBC{Z#(A^w{ygWshW$JMh:E9Ҙ1G4KwHrA`$=hl{OAD ;S޽Qn[SQQ86)\ >0o/?~~fUsz\+/?'s8JCs4f#\6/1oE9G7?sksH:'mFwsiG{wIF01(q8dgP*MaObpI5NHyT.K%^4WYRI$36' g|n^w8,hv v{*ƅ=ߴbVZ/;;?Sjykyw|sG}^UjJ|M^zOuAIUs|ΐlfS&7_?Av4fcb?Bdw_sv*"[ٮ>iϬ\5aIB>s͐))cl<Q|f=XETdt۪xG]Z\ iI|kDF[=@%-#Q^-2#L2b|bcް,6y uœpRU%rđVµWn=8*݂%-D~(gDɂs[1:#"Ō,m1fŘ_0ehtNG]is-r<zEzs&9Qbw-Α dGu#Mٶ`ceQlF^௵(yIl'U|yztc(5=< _N A 81;v<-6A2]3tSAȑR A,kbHj`c#qOKi< ֱ\6;CMA\~ӆ#sH ϥALm)iSJ"x|eއ|"]0cu#\$6-ϐjXJg۟-#ðZ'm`$@J ` ``=ZJ>?|g6㿑 |:o[V?Z17mP`x8`d4;`?hP=W6(=Wj5Y=%8iN_j-0u7ƺɕbNq:3 $f \xsa=zgqˉQ0aag3N۠;d3D0">D1p#QK0 FcSs~,U>T.Т٪M㰼_ 001a7N;;sÝ~; o \XI %" LI+.l) š0YL(L4b=>oBZM܅^-EP~Qg/Y2*-͛O1wGە\KvS X 9^G1IG*5;%C'#vI4S1%Q_"S yAK).>)8f$j& ; O`HXSac >5}2BbRˊ3 i[k!j#@Dkd %ʙBk[Ib))DP[A$~7 ٬RVB('!diCI!I94 x%J#FPe61/w,W@XFb랚BT&o| O߿!o^?~K: )!j>}sp`(WԷAI|pkٚYt89w"-.!!q==_|˸~DkŶ ER ɸ\[U\,%D^}NWԆjU@{H@"GZ`V,lɔXLrzNǃC̨{%ΕI|rhn* Y\ (DUHc`4#>>K&Tm1 r5sD0]F=[ݹ$*H՝)f/[cY؊4]gk0>9q.8er-|ft-"1gG_=׹'pK`@MPũN,NdWHP:R֥{8_""]2 b}ߙ%QQ<\첐tx<;4jkƅǏnDj`Ai^OL!?A5b "lkd(sOmyN"\N%,JNW&|Mnuß=k|b5Jc}?Y:\pRąA$)AӶ+ŎP(h6K9j \F[H-5JkmHBe E^[;ϋF^[fKjg*J*I)knȈ'aQ`}8Doѳr!; "&JwIE 8ø fڭ^9Cyjo op9&F[f^8-ɺDRƩ8#N\0B=OKҍ­T,*qbRH.^8!c)<Q7 x{\h -虐k8'2R,i~zd BDmDG#C=+Ӣ˱zN1mWyy*m`4YUE0<LF ͎1#H" k2JJT"N9);lqCxR%$@Y/}o(b Ru ٌ?$00~܍'}=翦󑣣kXu돁!Ft4*[&Ϥ &>|<6|1 #Gl $G q!0/<1]FF3H$3Qw0L6ls4΋N~_fh;sNyiڀ jfS_m䧲u6}ft Ax8&c-q/ҥ"}/erBm1Vې&P7S)g,PE\E%SϘQz#_Uv5x^9[?t(QpPSl  z F.JDyH6Bsf4 Ԩ2ըVdoQ)C.QxqX8 Xɹa,eH*-QƔuVs22s[~NF+,8ɪLsI_aLnQC= HT'W->O 4Õ #!5T1wV[Rq E$H| z=FLV&33Iadeip')]gsYۛ`Fӛw>`a ]s*]@`X+_jK)`UH/S!L25LZ ϰ&JkGGj`MM1~ ǭDэ\jdm+ Y"M!3SWt6gTj:亊[Id|@f}y_|k %?vnR(:/eCTk.2 wʻpvS\Qͯl;w|'&ND,jvnŶX`&e9\(q坆)3)wv|PQ\uPMaљF"Wp鳂N:Tpӧuؠ>8nZY>q{ݬKX_j.>#\_jX &j9m?wyb uKML惟Wmq_4$U"B$f3lG!ŪiYj.~y6wu@t݇akԁ9.E0uz 㙮"=߅I#H&Ӄɦ 53!|11UY9詡ra0e}!q`4VtoPѻ :}%*3V^67LƉ١fM tޭPY)|G'UV[nemy% S paC>Hpz7 &w76^g_cQfzsyvŌGC{>{yRڞ?=D(E\>oݧ2U^5ҽ4ä#Hvٸa]m|3wk@3/FE/! ֕Ղi.=Caz0<̨볛>/.LC3M}oho>eRC|$_~Yd8|Vojx.d w^234Tz"4gR 9*vjog&i2g0M}D QD-0l`!Aj0| ]S>0/Ղqt)4-Ѹ}v<%&i0j$V$j/&YKL"(Z(ƍn;"  -7Jz1/mO@.i?}Hj:R_K%&?&Ʌ Ҡ vlj}_Ը6 bZg4FDlGaİxcEtKyM팧F1aj-V\Rì|S?Ӽf[zf9:{p!;";ỳ8O+^S>x'tIȶ~8Os /GcL3^)zH.1ˢ„ 1L`:J<:X(5N-yơ ӾIw~ĺs)DDIeBz)Ę -*H$68)%cJ5lK͂6AB*׫z$i*љs+迈Z"t $RZNr9@Lc9 tV%:DX.Ve> />!#LvܚaR҇\?5T(zHQpf-!jiFB q&JLd4Z_jXXSj uEE#5"G,vN#jjc3:"=H{-)RK׭!s U `~p-! B^@ s.K2đR5inu:[鬦!y/#7A ;( DHMFRY Sbb L3V9 Z5I&l9r+oQ "TP6tP ؓ] sx{ 4X39a"iYp( wtEKj)5jT!0.0TT8Y("EX;)Brx :'k0v7PɐK9BLŴ[#Uϗ$b;gu7fE WrH84bR9*V7a9;d.BbeVglO~=[f77Fso(m%oXokLam^K^1*ʷռ-~ 8 0Ucӛso{O{t־s}Vc(.&K^@Nx^=eo9)+%\SgIr/\sY|k(vVC>>"CB䇄 =w}01x0⌷4"*$VaGd:H=_{<@*m!} ~q@7]Zso]ˁ@C3ziokɅd\ĹyvԊ܎e FQ{AYq0gp[qy.TpEã^F i+4qkj!jL龂ZWn*؃1$2((]_< YHߊB9,(%L]6Rf"r%:\IynZ]FGj)y\J+9ƘUk{Bl ܫ>'Zx|Wqͬ1 9:ѯ1-.a0e!q`J|dq(wڣw+AtJŻ1;μ[yHwB8DK0E;sѻq :e&ۙw+n@ژ IaTWn:ʷ@R򳫝!lmc_RJ~˒t:~ef>H(AqDZH|rԷZ:ր ]?ԞH(=7S}ur{^g_|4WY'gn s#1E_- r7EufS=k :̦)DbcD)y()?{W6 C/s#rܞvti:u I}@J/@H|P`!ˬ#/`0-2MKkS筥i)W|9MTۤZIy\KUgA@6p!#•V#WUtxE3VkaXY:gii jMe\R8ʸr%BL\i#yʅ%UU@u LI64n"K5Պ"HRQK)Ǩ~In=c-5I]MVy +.ΫྺPJrT[7 5/'Ņѿ~Z}wĸ{F}@~"^?5~~ j\>2\.:{ǧ}ǰڍt$vQ8"yȼJÉxHstl&yg EcKL#3}mJ !L!(n"fYYFp9zs,}Bz{S,mL+9{urz"h2#?7) .EO/WVJɺ,b<f NM]~OOwtZC/7<P]J). 0)oYf%:7,_7iKN65M4m ņGTTM{fWwb`=4iѝ7<'יx4ݱxNpPd: \R=u}ASM iC9ՆRr_j[mAUZϥUӸ3Zp t!10!Xhx)=$wE}%W/{Ÿ[cb[>٩̘jg}PHH_aKo`ԍK=8 e<&Qh&uO0״擬R'yZ.h &+r&@E'8NUYWS(ahU,XщvLDTH?AM%SCMR̀BK&ՊSZR:rHҚj 椥\KW|XB;B t URjE$UрBΐ;-C'碼yqmRtzZJ!MKi CǡҴj[KHR: bZDT+IK&A_9v(("zwch@f\ǿ|}R%(ȽXs${U>dmB9Z8,Je}3]i㟸OaU>M<禆fv} TM6%ZJV!K5 2ԚSU&v[xR'YyY\.nno~^,n^zJ9EC:=s(/c^^LfD9rH,TBR/Ƹ:}n0?,+fﮁTۨ2o8һYg+bۋr{9B4MYk$\ht>QۂlISx4YL ^T^^/@dKe ? J7JB~w` ":^PE)*\x{"(e'KdkTITZ%O̧=shӘRwM-mEb{inNuBPu'YB Е/J]QSTxJ2>)akݯR|d͖ZPˉd_Z4$T£P T _htJa +V+q7lȑ|hn]9ɢl&lfO+&ߜ]KcgWK.v"YwUfyeJX]9LJjy &ut|oNo/ʿuf:"syЍZ٭3ΎִIp+dxЬ)x}rDk!Lp(@/,eQɍS:YqWiiJ)Yj%, CVe]/.o_ >K/C@ftc4}BR*~g-%jp2^1FJd$ 5 4uҵ' R \٩fD~,H#95G>DrRwrHѩͤ|DỄWrKA5\chBl즩'q\m!ZtyA $1dGSW*>॑6^ʫXK4k[ɭbWg>m6 EAqpQr}?O~, @z;՗z=C>T⻺/6dr49ɨ  cJ7Cɸ(%>k2Hø`G%^0?b2ZA }&#MeDa$aF6҃-+J -Pt%K˓҂j/Uoms[Kib?"EcFNw*Ҫ"?8w<텫qN#vj}^$콖z"E%g ﬒/A\zEmg{JdGaq0z*\'RV0;.GV>?ZCuo){e07Ry)#$Opd O}wn@G&vfwxRHhlB5?M'|wOWp|r-/nifۚՅn2=*J$fdoc+&] t SB0O,Yx%k2I; gM]r>w x6 G]{GlpيWYwc^E+ c?쟖H˱#Bɣ X2[w^ N^nj r~'r[m ;n\G_׋upsw׷Ti-~317nr( K#C:p74_ƦLMHkSI3<6A٩@] ޤT}oA#yp΀nW`0S|h1Bیo= pQi+*,@[IKj1`F;9 tRMFGQm$yPˌN=ED9F q}헠vq}-AAw 0)3ps(Жxu*.C ;-1e>jh4ݕM N@kr %bZz wڂ^t1Ы0Ȯv0Ղ&GoKmQaRjs&xC情 Аo\Est*6Je87r -}Gv̺W"Ln]h7Y:wM5"hr1HQwnu-z$V'~,O7~5B~XX2XwшCȀHP ˢވ~DQ矮.kј!Ŏ.,4"-,I㎰ydw,)Y m:H%P$]h֓.f ☫I'h W7K<옦&z@g_vHA05#Zkۍ#G"ed5$AyZ3ilwϢ}RId$URsx ':Vf鉘6sV;oj[!lȞ\hZwl5*G"rԁ7sP 1~X(Ӣ[F4t|E1T]/!xIGLH;!H˗~Zˁ3>Ma}NXZJ?Ju+<)eh5e@kix։^Tꎠl 7*fXo ~0x Z*\!pU `R2ˤU?1tO~*(e{Y0%=X?04(1RwʨQb`dzC,N %2'CGfn-z+{s˥ɲ[GE=xR 5 8/=%1MLm}&I 捎Fm([^kl󄫀S4U]cH> 7#暭6V-EZ%@j9ƪ_!J^F!ϼV)J,r=HرȖrƉԐ]ɛrA&j>OUfvu /8q47Vh$'h1Qqpu-w|E_!8YHBwvтyy,bF΂eGTѡ vPq/]%#Qv= \9M^ܭ :B(w5HS+±x[ѺtkwS 8Xj"~s|yaAE?]^zˌ5Q%H˩!obyƖk_} Oubj3!rɯܻ#x6y$UT>UNi& a8 P'bF -7jx҂IB$vDS"##kZx …QiRQ=H7}Pۮ{2#M@x0X8Qt|QG-FC%@<u=ܯ,Sޝ$&Ɛ.,~$JZOp/@Fl ^yĻ͘?73X[s=7;> Zs%TI A:>:?:V>yMd1m FO u;)8FGϺQ F@c-(Ty6=QB`JY,I>78uHDI.: ΜH"%W)"cHZAj/{U\euGhmo= ,Ri3I)m [larEbBdtZ7fS]-cSM~zY=/lnm"^0`*f-ӒdUQЬn]-3f6dNd 匿_~&ud0>fnt.їI)H҈CZiW!8-AʘMFø:"%)h֬C)/<[Mxx 4L{gOak:Ikv @MMg<ŕ@L"˹m|*~k;[^ة0XIlEeN15 0']#5Ӎ TuCnHAŽ};R%_(6t!f!džk=% =;| `re||o/pڑo<&7,h3yh짧-|n\Kt '8a ZM;n i:9zc a54% b 'm\ʤ01eɮЊ/҂F&#~,'$/M7ιsM9_v#v}.^hp_L Wn86.7)/}=nT_wb*z1BB d!BiU:,'_l'p϶=l9y|yتu.[g;.J;,S|U=ޅYؽ6ˮ/yñ8>Rȍ/ t%oo<bElP#d )eRCmF{Ō^==$~3[@R6` qs:ZjoEF?\NZ촺uUE9ӰG0iy/%ʭVI[^㨰qAԴS |u;eBP9JHRqjFPS)e([r<ŭe()jvV GYYPڶB\1kDsg?l&]J3\!5ƹ'>g4\~fW7|(aӾ%WHG8gM޼F&I[f.u7X{WLē ,7ҷ"v|QC5gF3D)):b8C 4qfiaւmf 7FZnfpOK_I:k+aV:8>&B0B2HE嵅 (șs ?^3΁HK$Uq<"o%d]ѫ ]0ڔ$D{tB ՞oOV ]f-k~v[߫Lgռ=~_#ߚ&^E灋+~j}}ƛ\W:zq}ܱ_}xX/9cOwvGq|!?xm 8|ǿ"Iio"Zc70R\;UXtK$9FI*N' \ LG7 ݾK{Wml2wG}U(&{T}F5V`k/]XJᜑA,zz7Y&.2c>R^q}'>n=HJY6ElD_y ~2pd$c}{*ߙ[a8#c$MYA}xyqmhN~ +}eS cGs?WXj׽c66,&l6utu/QLjT@e)̏.PELy+~fF2 i*V}kw t?tP FI#ȑ71M{~f?Yo9)L]xs9* 89W}l5e#F9[!) f2!vW?rC::cȏṊ׮B)G%7 (2@!fM=[Q|Cз<'X8R@6Ukń}ZːW-,gcӬӃ8O>ϽarB{#X_y zٛrkJik_SKKX=h IINGklQ x^v2!"G^ "ȧ"W:ΨB'F=9tV(SH^ԀH;G#Y:ٸu4Y9UzsuZVRnF.kmFEK2w =;Ȝ$/ ږ#3 ݲݺY2Ɍ{ɪ%FuP[)m{'%QBpT$aގ0j%'Zl) ck-;삾&d#|PS2@!"obE1_l>wAc^+LP_A˹~z斄.vhQ0z|N]ը"\bI4CWN2IcT8nn} 5OD}%(qFpoK2H0(UlGn^,p$!^&5r8Ʃq-WMMZ(նvY\Qf&?Ho+WM\dARHyP K 2BEfdž)@E)F D&O诋l6mjQ.Fa>-6>/FQ7Gkz4)?~2Kz e^-rT.OS!9LH5RHDJċT%SPH8)Jk(?)dK`HYv6~}wf+ 7YU0wֳ;3*4 ԊB4 Mn a̍k<0:f/O(rRe~ɬ"7%"0fPE[r#Umb`G%Sfx}_̋ȋVN$6y`A)7_!ctÊpP % ? f}^o+L*W$@2D*M"r(3n˛YgQulHl[2+H nyNL2_DJP9# RVRy&,o?`a:A伎0nsu,(!ž w@M RY~r"ThzuW9_Y|to1Nb\a TZyJ4gH,DT0͈JKJЏiuTfZI󷶽_[@!R1e Lh)&X+Atʜ$R띿~RdCc"Jd ia$\d\j (a&3 <1eh(EN|h-i@4!uрɀ)Qi"rt߅b(J 5Q2H(EUH1ԣIxhk:A4g(,} X3P ` a)ƒ0D㖗R,H ո=%Z:Jdc NZ&S*Ͱ6xУSbeR;ALicBkLU]UNR̛-@bW44o@ѷӻs¿ѝYXSڮ'"Y{..Fxei=4˸Mwg#\Hc:Y\|v1*&hߏV|F=,sjHeMCcx>Ttl}w(UK;?jArD㲳$z$<lWy êpBӡ ;ԩp Τ@8 R9yJDkfMQ+MNW,̿BD({LHBs2LXJtVL*28Ke dYPb@d" y+8$:VéV 1u1#Q+,8> Cj"f£cBr*S06RǑ\!\ܐUDWQ[cF6Gu6/fs:;dO[ 83Η> `^C-3=.ohyNU; ȼ™6wuT,$sۗSޱGiVXt/ | dN$s.wPʑ= HdM'_ //7hVn6K?՗:Q@O -I쟬^φhC}uo﫬Sb͇>L2'nJ/7o# 0 GѸ0/. CZưeʧߛxF Gq44:wY1maC;;釳BԎt^xd"kdKؾp0e[)ԳIݩMo_zQT`xtx]ݦ')BP&Jscqiʒ#kx*˽BlX|*F f c~ov?,vn7^!z9@nE̫JfKBe3}|bi@36+l`G@[vCTĹ;+R4;D*,d+Z.>e L&D@<,Ja\LJ[;ZRHKʂ D@h7rFN";0T.J,ʏu,M!rr…') (DY.Bha<ʨ$-`Xk"Ӵ &Q1 $J02IZ h\ Vb)R/a1 12BQE   1R95?=g.o^nPŖ[um::g!UE ګ.)Ndg]Pwzbk* lݦ]V(vwp_*m$TS(΋ ͻۖX?^bc3iJ2| N0/8>b: E#ZӼ  {y `,,oՄ_!&} մoYl|EY&6vxw$&R18(ӔJ;PALTHm,!9sqPX.tX6ㅡ&_ 8 4){My%̯+?cWɧ>ػ)O>Y $;*F~ g@[`"[.txbЛb*HQͶZqAi| Ϲdgix\($:Y tꗋ]ްx݇y8̼2# hPD'Z0=.ohymns;|y7LaݩmPDϖOkS|q:/o&XjKȯu^_l U!Rx~3o̰K~?+0l0o0)4x&f4Te" eKfOuJTehTzcbtiGVZW4F-SDFhr(Zr_bl wv*RҔRr 2R287Q0Yk "/tv$vv^'Q _!"ntzZjT%m/تnFviS-Y J dkmnƲOj J_LmmvJfS\PCRxhRb  tӢ8v_KyU1[["ER9EE Jʖl& Ұ}vdkO,DiE&3EqZjJQ)UQPkWV\b eAHZA)|D9;sW$Vjzh.pgp]ZWL委ڌ$bw^~9øS>--vv{>m{Rhn@bcULU1ND ? 8ShЪp)OƜa- ,f% F0[pFpe^GfgTǘvp[ k_#]ՠHѾKGp%8j8S F'޵b]*Ɓҋ?rh} AA{yUTO25#&"2Gd#!rл-C3t9/.~sv ] .ѱm 1GVxx>Iq !D[{pgJp)/8{Dζ\˼;SDZKޣY77m^Vi}&|w>j!2vڹz& yw >"M6a /P:PdYz(b7E ,h)PąQ 7rODRZ"Eae: mlIl? )V.p&1G,qAa wϜc WZ08-'GgWN0XdOOBېKw;Jۼ︡9ݼDx5x_f:bO`nQ%;d$[N+4OXjpA@v~X`piZPhww~["%XeF'&LT\/Yr;cPllڼzL{zUUSPw=|R$B#RŅé. .!ȊKHa){01K\u8><B%L81GznPin% bG^ܷH>8& z 3kCqĹh;EB^6/4\ ( GQ_(== ˤF"H=515ڋ9G?.ʙ~MyHqvB>l}enMi7>"*"TnnbݝQD,$Oƻz2k.GPWEVM$dǨYMRs.U*HDR\gUf{6wUۣ^Ŵ|,Ə^׻Hfˏj:YRc׊ePfZѣ.pu3ԻVx6a'M9jo!%)ٔ#Bu^CZz?|pλ|RusVԭzz7U.U;Nk;xgiay9LjX|#HM5RC#r?tdz:c07؆0YD00Tl֝UԜn)9unplm#}S |;PvlH+7F[n$R#ron@ "on)$m o^&w$A ξy@aN;_g2kJn)H"os{OO ^f.(]sZVh[×~(s7۴EOEA!0S(YhZL 婢9Ld܉pCQoEmJYq,0G @HUJD*PTD)VeI>( tEDWRɷCv6^:\w~)Wm*غuHWtգ{o]MRҎ̃SNK({(`KU=Z= dǷs+/_[!ȣJX"Qy(QOzqkT{jر7&t nTy8y|&%b+`DV\1YG3CQ`}")J4WsWumE}q022LKOƋ$/ 4n뙪!eˎOz/ձ(]Z=x7e~+n VnS_f{%0E&`చ2:N߆Fh9-?k䵕鵰n?GZ~W}*tS ;ceoȦEvǟ4ps{^bػA Jܓ%'D~<`&%h*SqJU# x *RTs L+H`=`kUX:`_ހ-!fJMX4Tjj  .Ɓ-z} e "KLL>M?z ,ZPC8>_JLVammjk)!Tgc&Aݐý 'EC)a"Z2ǎ84*#>DsѸp]ίг z$0xB8DL;IIO,Q eI ,HV;T̳xayF٣2bXp[zԎ_ gܒ3>BHq#\'U@T]R a;WbZ pAh v͇" y}U^dIlI0ЖZlZPUr~+R!*\e…*8sR(ȒC R B%AgTXmI@x=$KrBbBQR (U*+f-JrqL%* %*4%B)zDh~\E˃ a@}otClz_ &z(Ą#lp2LWgxhn}gıy*#|QyZp;ک|bǂc(^a8‘B^O9B xĬgYQfՉQʨRZQ`]ju׾E`7(A!xΧra02Qe_F삥c}k`HwKAK؁RR1nj*$Ė`7% JǮH+գk[pg8\G9DݜennUaݜm81*@Er2}$pk9e6vzZ\3&C[\[3m@tO5\5DglIPَ{$Nm׆*ʥe: mlq{zn$mO!:*zgg ;ӷcAduxtqI샙xϳ%(8AQPakZ<!$&dX0j%@Q|_`@.Q^{NX J.vɝ!d-V0-y?=OcCOO-)nW+Ër^HOyEFiCuzӺݡ 髉ܬ~iIn[ ShP l&ѦIT`,U#ɠJ>172![s!dpE¶h`iʍk+b|-Z2^6lУ FB 3|klFm+6X|/.혪]Χ W/TsۢɆ g#vM^%lZm1a@1CNݲ`v|AC@I-yҩ7=&=|kܻ"U[~'/1|~ 82p^HcZZDW-72!Eѓuay:TM|lBc>t?}ƣfSfn${k1n//Mo0p" !mBRѡ _g "0#)*P TU 'ݛ wd >k#lY)) lEioCɺ39šG^ID4KXIrM%J Eqr;lnMM,^cM^{o64YKcKENfm8wrיaLEkiʍÞV$F- 9 yQqh\0 ]:"\6)y 0Mmvs!DC&Nǹ\ RxHS&sce(3 @Txz'HTy`vSEi)J4a1dc/<'+oV*][s#+,MUzZ:ulɉbj.]-?0]ޕ8 u=q13B̻bxeK 1BWHKcЬ`I6FPd6 @Ta͐Qm=)Bd¾JhHU{.l=6>B5 !RO!RWkG31%^X*7FeRajÀq1aD.0N%h KvE+lv^n-#Run/)5/ %Efh.Mi xH!-6̹@!ҒKƳnȾ׭*ZMb"eS%6zj@؁> c#yw"~#h@1nRIoUbDp"?]`.qIm[R e+w܄/r\b:z=ZG[  lfy1.~I 6nql{0I2cjk؟O5MF اUn2%brW]N ^7&BΫ߹I-i|sOwwyG6ڬRPS*P˷'Z$VwgރĴmPaHJOCf.uR5ʢ0(xuhNsw)UKqk1? "dR#~kx%CM#D>n|ev'web̟z7rAFW_r&TbUW>-ct-dDSm|oRBW<|$?V#/(8eP 3c&_v?[.z U ;H 7-5%{YaAps4A9Dz&a{{ B-8Yhad!ɑH,q.~wF"XERQpyD.nGqL(Xi0@CL/;sNZe73ӛjt߂r8S 75*ؔz&w4oAW!.+tֆ}JdO' iѾӝ­礯4Sl GRVx:SQ ([䜂4 X K򒱮B@6k\ z9- aJJ Ory7?4 jݦVfypnnS)e2cR͔˪)1Uh Zf-uSnS/6֬{6iW>~ʧ ICeZ(M慦RV9ȍ+k _AAe\PA)R5f!JtӠkFIF{e\"νg9)v? %u[W4Ďo剒d:(eyupG8`G8ȑ;O֘ ԻsF/]z=[޹ 0*)*lwx6\M( 4W {vj|pmI迱P߆ӒO׷>4ԨSB=`DžBFѵnjøG_?W)P_?FWi09à[^Ӂfכk;r-s9v)sC0Ej,ԙ"+huG9JN g ,,/rj _g챢Wѿevu>ә8+o<}9"vZ֪^j%rg?nO iCY:g}YN-yVU8/{֡魟$ή~?]Ԫe2ae*LA'Je$Q`ə}cmfq~pw{5xiSߞ 4rx7*_I> JbN^A RQXD|~Rld5 O\Ѫ$+R ᔌ D#!;1[ڳ磃~$gRɌ<Iz0]06!䢢ۘEI.&q'h¤$Yd6pJrtGMծ}ޡ[Dk⥖2R1xdT 19UMR-__rSX/xЖ O4h %C⟦Gj(Q!|QEExPxP+Ώ =' (X^ωByt8p;:<׹-g7@i;DArsEV5 T$W"T !O3A啳+gWuC(9:%a9RFʜrJs(I\Vp4VbEi,!/}y:<빢|Ø|</iU9&Og, s7޴uw5~~ D)ۑ\ 50i>U35ea'&mjE`aD̳!."ez՗M%O)ӯ1e{pRk)[Ǵ}T_6J+\Ju %.# }N5Xnw#flR-4{LSR Sչ4OCA5̬uZ׊ V._ V" )WBbnR^ VT)|O3n}r)^g ; 7*!-W:?.g_+/i-rM%{:;wwIU#g_ a&:`'yiϊzI6`☝rħy%2˄4K,IsXJ. hDg1}7RF=$޵Ew>)ns73 6PizEҤOs IMrRLA;:9.UA8gRt,7zSkǽQVS2$aʢ~]Te1NijToWӿ-Ow٬ٵ&ML=ĬؔY=*B)e ~Vh,{;N7E&2 lϩϽnfp8񊦿 ar]brOˮ3\$d7 6À5Njg-խjiF7PM&JM Xh+I~&THiĚLR^+4j%"s= =sdOo܎2O+zAΨ@tD4h10M` dʛ?|ݣs{xb54p)G>;Cj>yp3nC1: #s,s(Y}3VMS2QyZOjZ)LXB欪}/J #^SSp* Nk9" *KڅllyĤcQ AB)8wIfPHؤf@/;%pXݟ?k,|R 0PUQ֬THU̚Dv56LKUY' OX'K\'BQ‘6M s{2[s,HĿe0/PK`Y?A fGEWE+,,[s#tI_@s6lK`0q]JG[s 9JI;!JU\w#B $ !?5b  KO;Rvq-c>0+-$ &Qӷ9^[ZFUHioK#|X{~{P)rto/o^" }zWfy|Gٷ$ (BRIJcAX("$Q?wwZURv,Nǎs`6H52ђzG1|8Pb8)e ЍF42&Qhh((%=U< m^VO_ d$aD 0PQZ`fl^Y}KAhq@n-\{,Tڮڝ޹[gɍn׷-a6&Lyt}mr'"6X:vӐ1 _%~懻wхIX|mPG$Sli$TibF8s$tB,\8$8xhܾx ֍ E#Y P]E?p3RI~yp&7n><, gٚsQ*n NOaߕ EXvף4םl M9I+=/}309ڈ,#Pb{wW-2b!t&&URNtnbR Ĥp-) |fRy!Tů)T鹠L. KȑɗD&ݏ[KD\C^8)mv}|jQXeghr4z=4AR"Pɨ{J>G7}?I*0F1aS)S~ lLD̈Znr|NTt'I;Q8;E*(vw7~ħkXGUl<+zvyճooW_m*ļZ#|\͍?C3737So?ގٔMŷW۾jse]LE"S2jcfKk)5ݏ?)O}2}h$?d9mgՍ}A{w>OBh^[rz#p?]߯~[ߏ2Xlj>|ѥVH 0hI'tX6-~< s!ON?~@W<[kexd%CP;w=\G.?Kԍ_^4L[ ؙd+4"ԼzG!wgkNZuNj.vfU#l~uss:㙷7yPHܯI,+( {+Օ8RI%r{ȹ zE5\<*1Y直-OjBA^t ;͞a3w41Ggo;ͧ|ArҦ̾e*@i[u2 iI_@(Z}ҊjIAᔍVUA ֎ѿ"``DN8YGR&s&qjfUI/ ־( @9/.g4k_{u/jp g"x'(Rj ҭ)#G]RRLh7VTVC^8iqwɘj}^wF;RG ؠ47CР|b51K 19QeB8 z〃\x*$UciC/ PD܇K_q0? 0PQZ`fpd̬oL2҆dfEȩlW^GfpNOIB{K)م}eΰd9kO730fx__apdj`?k6~].nȍLKC \"*72sD|c?/RTwӰY(U/>[:̚[OdF]S(v' .BB5 ؋*E\@ĹWrr2g)))z;J'?y[LLBJ \?esypZzQvW'=)9y.dSS^g=ª%oh'өW >:jH$R`IdIWʹJAxs@n-\{,T;EWQD42 9pgETX ;Ǥ4j:f f`ՠ"2C6IAőUD(0ncf瘾Wѯ;Ý `x sXq#O"4N0MOA8 쫶+:3U(Ӵ{%.`i9WiDBܴ%D&y^gS %XByj8YD$y))asD_Hi*[$yI^C^8𔘛QwI7L&=#/(J1Ha9r쁑v[FS[ y,ZSsmbwdՔ&Z#c3"D!8➊#h93 {Ft#iJhd`c;% e\ZDE laD%KvGKDKʶR%B3FwM"_B|Cd^Bw?BMΡ(fe7 PҹewCp(Kؕ2l/skgR҅Ѳ8%PL]6O&'I?oQ\F$ *.$*/$- ʫg;v(jfKfϾi f\F)&RQ%Qf0* FU3HU93A7Lɸ@qeO:!q06¨vSmRpMS#*o>Sͧ|M+o>ͧ|)fl͖cWWW4fb?ѳ%8Fhnۨ Vd8 VnOre(ޢ}=1c`c FF `b۫r#e"D,EN3dL㽧pP @š2&$t`.gb} x H\g["0 ƨuenq*2c{_}~lwIWVS'3lSZpc-Ʀ QZVqD8 Y$I'`!r82K˜AŽJX`(1>XNtA9OXI $W-:]nOhJL7\--:P [% =WJXn(B[0P!sEs,Q+)D&$n}u}=ko7E۽C}E>ei;,E[=3[ҌnH@4=zbU/nfá05?+UN/b>p"sEjvDwg髂o [5j'7qcOO.7}5V콫Tsxhs7Ϊw:o$Ycb-g[mXsr ӭAu1A9J>fg)426$sYsprE}GǃaӛfBD1c`{jիzלw;q֬1U㫎!9Z,g@ڀкzրɀḪ*,Thkpr՝EuhB$&kDtbf1ȓ- yUEskWa 'v^9? n$/q#{#P%%JOuºc! zf{$K!Yr$[=ǐI88ꨙ6GY@4 ;sXi{mc[iPçcLp?I@\SvQfj hLa ҀW Z+*Ǧn}Pğ)+%ڸ~L Խ!:'!wm,!O[INjʁs0T*>y1L( ]xy^٩UcK /~"g57#&8C$$Yp x! Ht))meL, R@,%U2IAyfb9mCq%ЇKGН!(^+ ER(OB:Q69F䬵"db+=h%,snRs$+qpd>_' j+jĕkxUNfAN9Q$7=VQzgc1UUx{#nW-՜7Y:'uլz6c( j*c5cfwXSQS9s(Tcɨrj/~ǚH+[=bz+Jnq8d58D5$wnt0{QҢCLzor֝m,g5x+0V5r-i_&ܝm lS®1ky7cֈ|-Ya䝙5.Xc; Y&?5+#oˑ9sP;/G fHQVE &:92EdE `&^))dq &)?6H(0m4Ӎmg X|K\DoVKV}؏s›˓_%[kl-H0t޺w!(]'GVj:0Q|* euwR2L{\LK0ItI: -*Rl TKԞYɏmҳApag͵`MJ d@x(kD)(aRR2X5rRpM \It2I"dmJ g.%9tb*Wh(},4F͏3LAp{r@t{3"B]^Im"oy`P 2c70,ǽfA^ܜK߇| n-YڼJޙEL9Cf@MW[jKcE@uoHU)f6I!)dvr"ᄒsErD[y :`H>4c׀XfQ-a{&HfOt;quϤf=3Ar!ؚ.W4?WOoh+0*QIvZ; 8XR{YT+ӚgĚzոݎw\i͚5ZZsɅۻ֜Nj~z5ݍ֬zVkKksT!,{0Zx &eNgu0ѣΑIIBJL8r&4"@9'aMt xk5EFW4ɵ|2Ĝv>IȽv->)ri zȭom.K 7cmun}yv9 OZ9߇| wȻcΚwJv$ YD2d,(D.Cy+j ĶYRZb$!˲ ;yR]2ZMYbhL9 †\fERFk lPӭuL}~O"uoB;7Ȣ3D1YKK:€Cn%z TԮIA &!/=2}O) B} .%B` P=qxQF*X`ؗUDyǔMa6h2*yWTri:(1GAz0>9'X#y濋YÁQXBvD"] oQRYYvlnDT1zP9}'> JBf]]VXDj1( P+7q N>¾,ʺ(Ye :WQa Ig$SγVl!uӠfW$@e=IHBRj\Caα$7q7"[Nd`˫Ϛ  8 9 TIJDjKb*iP2;'j>Qt6MX#ze$Y?זʛWzr?]>? 8D4UVʑ>|]y5޺c'ZߔSWaMvxup De_?>ܷ_SS[֪NJ-P"OXJ]zn^Jmy|ҧ(T±tq2ͷ6_7XǛ@6Oه6׆W;]2ŢqCeH dWp}R=?&{}E1=_\y$\-Lo MCx08ƃ@dc3b`G^8%Fe]  o+H5ba`Vaä|J) dy#}8qB$N,Y-FŊ$|؉caOL%w*UKd+~ Ԭa0> %XҟisJ;I`Yyn*'MiZ-,02;)#جk$R[ݫ&;-YcYn7dX*6% %4`:"gT0hB1⳩ij`~:jwd w "s]p/?uVk Q9eLoKTk0 [XC3zTO QsX7.^Fj7{(bPb,.+rjjX2͆/Ьn7] t9@6Ҹ B C9͉ġՓda9ԩ vʌp9TbF<I)9v?x4<܌g8XA׌ơn%3bs1#18-:{)_B֕R HX1(;]yo.Uo?*j֭OCB2^vI\ vi|cyՑW ts~zNx+jGAE?|73^(* ꭭y?)=<Ș^=?kw1H=rΥw @)x3^=QrУU=hz 8(,2ᶲc{)}xv@C̪€~/8v _ ịtGꑐUt V6q\޸6zc\x7FHHOq.k_ߤup(@oh^e>zl6O}u'S~ƿjxwʉ_o> ng/Ŝ_\ Q1@bt$ CV[CARvDU Qbu;+䰹rūR2洡B/ϟj9—|IXp͸ɳ&@zy25 S/#%= Ҩ5WWf#hPJ$RNQ|&O5C Ve6P: iD۷JMfsGzGY\j( :m`3o d'Gۭ5F|`EZBz[/H7¾]2Z+ BS@16+a ɷ/bJ?b~dFTwa%"f҇W"+¼* 8ŷW?[!b'gHӁ?sߟ?;*o/.7jwn鎜c>}?[cA2;q[,'[#+I gGZVosLg<;/]w,BK{fJSJh5h[L?1'%#iHe'͠އbL'bPӛuF|3ڡk01<هb,YF GrNԠ),S) "e$DM0lJ$8<#H [fjtWE֙I0hn wmm%G~ w?M<@Ld7/0*;#[K>#HMrVn*4عꛭߘ(sf'x>aj]h hR60w}k|'xA~v&oM~z qJ|j㼈EnnS|M~8**Ta Iw9-zZj:zR 3}I x˿jS1l4Wy2$ҵ~^ -VXFSfF/{I"* ӹ,3A9NF=:1ƃU.sP@ 6Ͱ5Ȟ`&0Ț|G:RXJœq%t7ڝ"Pqj``8QV,A:fhUYLLih ZlGBZ@9@}T&8VA. F%Es!Dm .:G,<Eg_ϡziοT|&\ҹo!Zä!K:pB7 ln,@C|Ϗ-vQQSSۅXRߩ٤Q[a=r 25722EqZ$3ͼ eLzk%&[llq  ;>NE8?/5zmհĊ5dCpi-<5QA9 cn=a&:m4W a\ۛV6%`h5-DS8~޳pf$4_ ;{oDBYb(߹5-f M7rn f7"zgf0GJn>'aɥF}ݖ][P1kHEiơ216 ۋu?x;7 |ˏ\dןz,.c9xFxx7OBzDiO]kD3m2GrCU8ç.)!T؟$-~sYk۹_,LQL&˻V3l?*(ZHͲa]?6W\m#T'h *{lBCyr' Qg̣AFvw^(iyZCfE ?QP8̺KǸlT|h&fJ1Ʃcg7^*22[ +'\n^P= _'Zt,?DJ%-xւmѻD 2Mj(34ۘ`4a}LFKS,(=a1Y(eMهC=[̦؋=a1OT~^?YR&"Cl`P^U؅KUW'^~|O< IJqx"hR7_&w}LW 區5j?i򓂿iX+We!u(MVԙ $3A dƣM l"&b@)yvSPX+P+Cj Lw,h1-G71NxKh mQTOTGӣ-b2yE=Rh( v}` ;4u)7_(O"zq%=cř&d,hg)g eTbg59瘅&lxeFJ:||¤I:S卟`Z+Ep8 Z,C^vߨŹ 2?sH}\W̔j>(ݢjk?G4.f`Iqa,^jXot_WGufS޴& 1fz׵SqJscB,8dzP8;#rrM72D8d3; %Il(ԌtFvJ˙kKƍ9o܇<ooTygPOsl` GŒܪbJIFR\AÒ6D 2c:4"5;ΜUtb p)PiYJ_E#,.pA<0F ߢL L=-]7oP,k@r}նd(~wT9 ZF(@۵({y .·y đ(>fM $Jhy0ahߕ?tMޭ7UXhQۅS7_^|R BHu˧9eƌT^|$#ו 4u%"=&뇬~Ȋ뇬~+ġ$M:*m1%%N>qt:aSy Ŷjv H jS\*1'cQ!"ܚdZX _rXYAzի(3Lg,cSxE5⤩gY*p8YbL4)RxGmDr ԣ+1K<$YOdT㒆 ] \5uZ"OcnhԒz Nzm^+dM+{(ŔR8K.(dLo=j|z3i, ~xfx|(3i$y"k&paT 7A$d%ZP?]oGW~!C!`o/A|~8KY[I3CR&UWW K6*}eVDAUH*ʍD?5=jiz!%m)PJ6om WdmM- ^:pSJҧ.!3#'+Y.P~xi9ۉHZ`(M{qGoo0:vP?n//]}џfAYZ5w3Ge;:[3ZNjr.5)NmiCL8Um!::*vHIJUԴmNNM{N5NJGVkOJV6+Ӎjku( :FT`N/ GsLL˜hDcMag01rN[9\\JLB/~Ii qGH;DPPi(7'oϽZEs zR ^tZ5`[$Ί0omtTzPS`GWXQ)jt v_V:7ݯ֘)@rRYo6MZYhDSHSf\P 6* -㆗rPB(C8mSPUL(Fy@MR(=*TuQRMoՁb1VuVlS6*&9K%S,qR~D#<ʖ?f8(|5\h0ajp;тw>,ĠX3=ZȮF"# d' $^و -'f^_Ӑ?G47k#f=,hSaaLmqk (-(W6h݀7kTʙø? )@|Y=joxdWð7,V%Unx{@-Q!M1B;4L}`K_m|u!dM xY'3,T:"3r} )ePjoy2տ/PS )x 8ez ũ{yf&xp+D:ܭh֔nҊt͉YG2oalE8Ai5A#$sur=͝iak0F[>2n`oob 6鸵{wYdkv2"Kce_V>c1v9x}_\t &7_C N$t%DpUtuyx<%8T+х?~Ӄ_ŲrM<\UN2!XN"VgawXj?E}>l}] F( ̾~>g;йAckߐ W.>ۖKu [ӭU)gKBa͠#jajvQ Fu],7i ÿ|ģ lËJ.d?xG S]nuWW z5igsx@+G2eri@x}eS*\i0{-z-B _IR[%l☜gґBNW6RQUtw=ϚCӕL֩JEme' ғJ`]n-OU,q)Y(g͕Ƅ1A\k 9y0J*ݛA x/E;V/ MpWrkHo"/ȵZRN !&!kwAYhQa% )K{a>,9hc"qDn^ zs4y*e|͂v^07Q&حxv K~0 a  _MC# y>z殕~VLt*H }EdvWF81jv nfTkR߇/dtNV6ϻa&U(Wq\ʼnrU( /?y ,8Q ŭE,]Y$R&jjM(N8-z'\ & OLQ6mWN$A'lQ2I-uv& c=gDϞ@qz6 FdQ"zӂ.no hKTKשo" {v{ؠz\Rˬ !c؈,W ed%42ƲC 2 cP<(+PlBb1{%ՁѴzỮبQ|]d6_K4:f.O|K4(&:c/*^EŽ{*x7Eij3Q!$ idpt9Z@m{?"7o a\/cJ@>`N2E FӼA ƵqFBpya4XJXR"҄` xapj(㱶xsΘ@p;7q_!Rm6*)9L4͑ODm`Fh aD"..-QGLc&Z{]FLLT6UXR݉ГM/e0fċ8*.9\er#O롤fzGxzbve)|LmF4 ovSVuK*\V0M{Q6Z?f47sqnemM6_^\6UJ,8~E7$uǑrښtSlP?i-m %B 7GWtm7CS ͮ-h=Zru-IK ?;bi'f LZEeiaN' m-8?o;~uesy,޵ Z2߿,-⃶cMf6?eZٲ|)Vsը*O|Z ֿr mk?w!p_Bw0٣lPkƈ<`ALnW>/xjpj5(8uYǷh!/ztc˛:ٚT պhE4!mc8Aqmen\DSxjGu?O_s˰(c^ lHY˙l5(e̖5Տ$ aӉ7ӛ)Q%"@Y,3wYRH1?m .)!k>J*"zG<&t{do46'^|̧7b1E\63/`s_&vy.Jp:0RLin(%Vb8<֟[Hx-I$@mp;&\y)68"rd<c`]?g^Df.c8KKvۖPهS߱muVl6. |Yu3mN;Ir2YUk%?y]>IEP2TD(t^q&e 8u^i)?ɻny^a#;xDA?کtit(b{t>\VMH.Oa-hB^ }ss"&Zi@wf<,ϳQM8SeA)RG |b\ޥo$! "nx 9!ar:2FR))6OA x2_zirXy2y8+L诼e/132\4MɓӇU}{U}}h BPeprsy<6Z<=ra &i;P>4m_aٹ;-Hk}٪YCgfkuW1cL;ƉneD|JcJ+1#0uW3; ,jH .q!P,k74P(E:n=1aV9-(, $BS6,D3Dhǰg٢XAXQKʪ2SF9L|Ŧfxd\76#\fx7oK+`aN@PXfx10e#p"Ve5+dJfz}iJ4BᕊR»!E)/Byby 08)Ň'cLIe E#[X`e@r3BRlS!j41/kݿW]F8s[I*f)KWG$%x‚=8Bpx =ā$^`P0̺ -L+B 6˄H9E=RrֹR~%3Dk%YEj+z%$x|͝ٵ;3Uy@땞%vQJu+T@_JfV(gv9B/bP"aaˋccl_<+U-c$ۍD)T$,AM۰Ȉ8T6&sB"Z>g䳻ňn`oqk+AEΖ\ 53B=] VQϜݭH ~!gwg7̤*%R5b 5PJtiFkw|ݼ,I`$<%:J $*'xBr/mx$2_6zx[{ hFW6LU4xqvӣp3}lQwmVVίo8NQsMV۠I-d_>MEjFe8 M{?hGgvפ>a}rQ n߸k::c 9_ޒ@?}Zr4W+$Σ?JC/,] = Xƅg§Q:.mnȄGRs7yOנ7h2L I qpRF G>MؕuazB˛EGwWu0XP< X@'žHpo '#kn09!R=:ڱ?JjvTގǪɈ]{h1_z<~7*U _jBb $fZ>S-&h%7,*DHц*S$_uxsP6F$xgRE@0C|7GyuRU \4;y4:`a!m.s(/}$tC BJu?Ў4l(ÓK G*cƠuŖw<|ϒk7^AI9rhqJ~Cl@?SMZ uo'of2t|ǪK۴xwկ0]e5zwJ.U؞WPU zv󊕋$ цLeV3>%D#m1*^%ω?qv ΍<\ʍV`BZQ|reވ }vu^y'Oz~מ7=|Q#f9b#g Br"jsaɇh$.frdAs&nAl *&RF' c苣)A2Үy"b q)g -ȟ_ϢM6>IIj2zcN|ofo4e]]Ydo!9 U+S@dr!6uIvJe,bKݚeAb%!&Aͩ:hͩ:;PROa}NlFkuA\M!TsEn 1 1BSj-z揞*CUL|,qQMVgq2N7Z=-nxZh^9TuDX*8|<bign:Ƞ> ]d}oo!yt$:ۙz_]`悫}S%u`Q;)UZ䐅A}ڄZFMRE\>Õ(Ktކ:yC`.9@it{,-PE4aX9a7)9Kۣ}ªJ^ȏxe1dGF{MQ;w1c+.Mxd]~{lyǫd(8zbh/y|ij bKoɆYǃtg;f+fy4A[oIѮ>H"i+ ^3Nȿc3ӳ_Ӊ7].柫û8N~Z&Lj=N;!Q^E^]:ۤﰄ.XUz Žy72M(>\Jɷ(qi/GT:M_Ru[2$n",n 7w, Y-A NxQ~M}D+kˈ:J ^JׄOߒ,2SUd$lz" W~p:!<8 :jº-Uّt:YHf~˅RPI@FoZ$Z_i'KJNpU Q$ MTo.u2~.jew'=Ue_}_VS:v6?|, W2u\LQJj %Fh-h]W"%QȔA]G#3&JAбhPan֬z+ao=T..7 A-p2'9E:^dk`HY*`ŃHUs m)Q,|:>q9;ZUJ-82xxwxb;a#keh2zy,rP#e,P% G;&IiGwǽ78 |{REХ(iI;@-=iM*pL\Vr$W@@M8jeXK=S4 $[ 4U2C ulwn z8HZ e^H!NZRgJ vUu Xh53(;FFKd֝ :EK2}GљQ{qU p8 wrQMvn߸k::c9u;ޙu.'R19OP+Vrx'̠0SfV١ hf?FzBK3FF6g94Nh0^ojUǂ-8K1|~$wEQt;.`VJ#yv/Sp0_~._]8:,5q(T~C'3y{k EYGtш $ B|}NbW0=F'W{c Q-B)UףOVx;ZtV F9m&&T+#CBْ$a pxA(,L /֖~؋덜ȿ[PbD~C,}O)|lVx_.m*<ݑXhPG%;<dQ(cJ]x܆3.7 '%}shȌ:IU 3{ȔpRsݼAϒ%A?K~VzZyRSnRF}VF*y2%-F+U \V%2ymuj^{9j\=&'O_rbN-eΆTJfS !!e*Ǡ:OKd\8TI_TKW OwqSv'%F6~L)07Na %!䗟>A=RP;OiQ7!m_ofn.o>[+*\K*˚FRN>qӢRI֔SC)2˄q~3xjpn+*fᣙ0m 娢34s1ΤֽؒR;K )IM`/xE#ax&Z<(%Tyjwx}豑V5',gbgboՇ=-}c%Pw:Y+(p pѫW:"]Qj;^P6tq|x 8E;ϐD%Aʱ>(E^|FaAE> ## mhK})!|9X= 4z-YRX}myQ"*Sr)bJDVJ)7=>f)ݣ*x,4k"Mبxt (UQ 0% <;܋7q5w/gޝx{) vn7ov[O^UV\d/כ"5lmwh_BB9K\Ϸ݋wwwJXq_awkw?~y"igHY~|˟^k㮞ZT"#?&Rnٓ6WHOQ.璘_OZVۧ5icfX3dƳ (ڌDҶdHtŒnͫ x@%Y\z03RA9@~tKv/ > /2b5V_ t^U)*o:N"KE7$?ܿ}[0 N(KsdPu{EZuЛG5JGfF (D%?K* I3^{y6_W-jA5z׫ևD /W@ʱp2>j˨vfE+f]KHo@c'|2 Oe*쏤ÝwYz<_Cu֗.ו6k{1 ;'m{]Yj I\G w\EyCUz"MEC403&@ZE{Fyt!៤Dnt_'[Qm|9kt룦Z S}tWm4!0yl4* P 4mI{Oӧώ^/^!ZcC*D{M(!G(.QqsnB}KxrLzV}Oͪ_}z" Gw%˄ElH&;{=hܔY?JePJrZ>6:F>z!}a|pn?7&`g'{x;?D )jHQE.Qn;/Zd6u7C۠jȷ `KUFx}wPݹ'opuO~/t2S rI*'@]E잒iCr ] V&4Ak YE#11ݠ\4DtG߱6íY<n5%avV:*ìԜrmh:>4G@ZQǵ#I7:h'($ 0QoB]w RqWvӌwV56D pg߫dgTR/ؐ4TTG#DZөVVV&Mh~_1LbaΆ54=-VesZŧۛ?'o~E ?v;^n_׼ZU?vj'3I 3Ч/k{fu? cufnFAʛGjDF&W(Ztg D_mWv^Ic~*}΃O^PvzPg?bN'-bE*zUhUK~ʮX==21TDFt=ӓ{d Q(΅/d EaI9ÝcT2`T>|M$z pwWh0xNHg&h'>.R01!PPf2JTsw:s$g P2tcXG5wuF`aU؇=o軯3oϧR@/|^JLIl_ަQ}Y }MƓ9'nOI?%DC.ݻ7?,7K)ڹ] +k h雒_3TͅyRO y/9TW0Ajj p{/_<@.o<Ȱ.2[+)íBA_K$f+Vaf0KeZ|)I+:2>ՃnhW7.U?äFʻX/`M2y\؊ e6bGAX4S4bgzEEEE:m֚F6 'i섳 4FmGtvv퇪ڰk?~9 hsJ~ex^Z)d82CJ:LؘD ӊ%=( Yֵ!^[TZlH5AD# /A@$yhX$ΝպB ѯ;*jvƝ5$<)Zi}6 Ш-eO4 ϊ92FˢkXwK9nh%l{rV0tGU!\ځC7"> Y& ${(QLE7Q?r>r _WըPiM5nU+ [VP0cC0..-/8/)l,@g\He`d14o0;}3ɰH!bT1f:51嬆:ifY@k;R*cGy3sA鄵e_+aF RB wOR?B^= ӵ5.5.5.5.ָ13N x h8*p0Q5B6ε3C*?T73'2#6cLy+Dᛲ`*"4"IXT^4;4>xҩ%VUlCc4QD- *c d/zLD?{ ?,r?- 'e4?y;(M)(Q&_kx%=/x՗' /ޚS_7?|۹o99VXbMs 2s@@9Nya-xn{f_Y E,\[Z{,z243\!kT90uyAZOsf嬞~-rrY1PNFZi+3bzXm1Z!oEW 6 a+DtRvo%xIxxDq%4_NP: U_!r=g?()?b3]j ^-z"Zk`hY j.=8ؤ220pxIIAJ+儅AMtf!pt."޶쭛=Rz7 ԄM]-jgnlo\W<XƑ֮~yB=9ꄧ!JHlZf#vRYXW;^}g=v- 瓕d%0A~%0WOZSh3lO퇪j?~ rh>}$~T4 <}K6 `sl] QCsЉjQhyԆ"*%6U]G-Ш|ù6JJ6^&?޿sv@;hԌG4-̔ʖ+1yNzW浣n4Mw̜U Yfu|l;Љ1tX,y~IkϘToXY :TTbۏ Hq724{%e[B^kaWEP8i{Ovh(u?Ch]O<_4#\?Bd TY>"""oNoyE[kI,;t1tC5) 4q̚m1)oHw0@D[R3$ RZ@R&N&@L*vjK6ʲ{ZWY&8ڊ*Qkq+ M/z,O!?2zLIhqDߑ/"ePEkDN9SdCkz$-P9f}_4ӓ+r?M3)ӨΧ9?f~2ΠQD0!>|E᩽(3Ei93kɵłN)PqPSN_=Yi&2>񀟃d{v=6)?EJÌ WsCJW8y Hߣs`={OƑ_!e7@`,7X&HvfL iqLCQAjRӘgϲN*~=Vy!\~P'D;-/jk hCt CI+ 2لLd3C biħ&rƏ 뎧\lzP%'= Ix`!ii29Ww;KB*RtlڅiĚnmwoN[ZU?gGUV_dC#P:..'FVu&DÂcU_@fFD,RG,1 ,bey`VB 9h8\v"g1c`o>.+51VQ hܽ1*z=9CK%ԕ.$de]=N1Yq9 zMkcoK8Pߖ7"x"]1B/JI ,G6^^)DWi{貗}V(ls.I\~&qMg  e] HHdy|dBbK1

X&3{Py73K]Hډs91'NU.y=\[4,bp@} /v9^koSm_X&?|$\_AHpSATV vCKRL+ pijU?E^ 7LIeX3);u)9eas&(|q9o=mw^4*t$u ǚn{wm$A+<րsm?k»%JMA0B1ZA>SIEYuBcN1;xsS`Ox*"^ &,`V4S%> rGR><Ϣt'#{$‹/V΢ǺO:[owX ΢цg88G"iDMZ_VQKUWHT=4؀ wim-UF\,稫Ⰻ9Q[sA^N+$HdaiւKfgYMTp&1ZT@Tuw1ri":rS.KL4NPTևott¿"sD퓊+K@6<+yEc U_h Ⱦ"D-hyluȣH+iŇ~ ۬ҹ@y@ !1xg>'|GˆWZ"u _,|zp*u_vOH":"A< Qu+"R3✦ 3`)\3착 D`OR[$I* meF1Jt`,Z 3 h@l?[&*%B2TH VლUh% Lpp;#YWbpjMRI8\]̉/`PYqi1%Z9S0!6pבJ.icYĠG<aI.5˜`iL#Q{UTD-'p#g > iFk< J!b2NC ŃK-Ֆ5 .&dKDpԍAȀ "F HY."A Pȯ4AqR) F+, F"LRtylCJJI7|}Ӕ)\hEr>ζ)ӬCFbjV %膆*멁) z 8E`f+3""dzv:Hl3J]ǀ~oU71J6cƝeIkX'3BHUe-z&kYo޵r%֣ J DTcز.bq3Yy6= Nо%\T rV 6lDSk6.rR$As\0$q h ݱ-%7:.aXf P r0yr j~?vSj (RtQd]hurDRFHAZ0E%uݠQsƒ[]J~11@YED O;ZPkb qx+#4b ^yc9N,BA斠f%=URU̬Ou/ҙ"IĻ/serHZ}y hL3Cdx}uр`9k>2ϓ۫Ç܇/w޿&u>?JݩU;; k ϒ"Y.dmPצόqr2`ikU5s; fO z&* jm.oNfN+#Zer}yR.^K'ݼh8h O2N균{ky&}#@Dv Gw}0UKEe}3CyIץK[%"h[Z}`/*џ>{aJpX"J5 (VbZsS*?l= {f0z[m0s+_d ,ٚ;f}ʓy7N4*#J=Tf'Z,[]^f;:$1V~cڞEٙiA)0J- zנD;V¨F3"6AD4J'";~[Za}iW,L~뫋w۬;-E ! |bگ<ã@xijq1>5/J**f1'x@"Soyv+G&t`8 NQMg>@k'>wIRpenc7|z#'Y'a |=>_w;Yȧ1&wDyz4.\p?XTji) Kk7nIjzsJ7޾O3@ۻ&H&t OstVEPJOdA3DJYT(r5ؕڕ]j4BTu,U`JP65g|KI(zm/Jb\JR;At,^Kx[^rANS[8fPmX+u>f.si}/~=Oܰ05ga+ $~&_xryIUH J̋ (3)//I\?*р9Bl-)-q%߷Wv}Xo<~O}b群$JHWT _\a݀:s!(kI]rajZnq&P/yRB2&tt1lKҷ"&xt>gQ#fiIb#8!!'37>U;̉ܺܺ0 l4STS4u|a׋qДl۸X2|y@m}ijTv9&uAy6lj؅x?=,AC$:S 'B4޲d"B6GDNQc4WrdE):ʔnHnL{?ޖN싽A6&<޾_}2^}Tgf^ Ɉ'Qj wm=nJ4e, =gg0yAC"g;/)mnRd9ALjbY,Va7UcRx^byΤq̚G8 rfVj%b^jo>pfr?*[K_xJb J>Ѷ7 'go~u*l>~q&lLcPlz7Z0z -ը3Qp?A@1D;zVtuHpmwR")1-YųA0Yy1CYS1 Dץ*cpM`&: dH8\vWN"I'!2sB_a 3`< (%9 $sF LZ0BOĻFAP&X1}6sgS`~KYSp/PTiK3yf(s)sKf5%BTTP%ժCQ& sT<\,ɱd>D:8 Z2c+\\hJ fK\: [%MAb. ,/0^2wxK D*f5FÚ䦦}P"ʠHA ?mW>gӭz>cϟ WxuPzb%*XuKak߿wo_8[z3W5c7/wRLWkҗN!<\_Oo?)gTIKML;#%狟VZ=<"X e=2 [akp{NBpG'I `kCb⬯qZ$Gvso}F |U=~>T(l [Ar/ծ/nvY@ !5 a ѐ9 wpb_Eu1TJP 6qs;Qlz7oq0Ά=~vzxD!kw 3y6Ш*Dt97t؏9 ද-V d(묐4ό"%XhCap_B~%XhQ&3U2oʂ(jxAG@,eQ''MR}9 ݭ/Hwc7WWQiƫC|.>:+a:kM[Kspf?L׬v9lݹ`sy9_a֠c-ǝ]z٪g2lE!HZ1Vj#Z ")XrkP3nC\;^GVJLTF`C^]Ts~XGk,&;X ȄBkSC" wS (/J%6L \ g@Is{_7J wm֪' `sR<ʂ{6s\*R0NýtUs1 F^/0Y_w_m9!a]z4~Ucz'zSeN7A*!E"gWӨ4\2+c ZӘJ֙1Kzpϩd(%߇,I0(48T 0|&3JrQRn`4'm $3_r_ Q̭uroբߏBUP1Յ]/ 1j{1Bx]e5Yxa1[/5kC*X4:Ci.$=x7ơW_Ö󠇛ϫvqA @E[Jw#S}}__Y>=85Fc faoS{fp/qw͙T|dj|IQj!m2{[Gb P{Gdpzܯ:t~?βO 0@c ĄM O2sQ`(as0L"kTIT(V B ʩq*sfI)p8TH ™e,)cu((8jTVCM*%eΈiG Ĉ©:D@kU TJNy` 7mT1a{b BZh<h; .[PjAs`%uGϑ[.RA.[`Oa#Nӑ TwcUBDy:閁QDdtڰlQZ!O { 𹔡Pa?eT2N}RDyxTjc"k8p8ܜZ>W$fJ֢=W7>${z3z7z۩9]L:sm.nݮm~S}m=^.1uˇTWYqYUi(ndp_I˂fەkxl͒̊;u+ 8B53%!€H̩kIYLHA-l9PԴJ$jc9uHcVD'4+ЬQ\C\N 1b1Rd$YAofP,ԸBp=5Fxb dPPX%jà УZ+Z [_CQZyCjp<܍eĬFCOc(x/ b㫏$l萐ur%>RHHm@#$WD9| ]'WT tYWXn."W^b$5cB:>tUY Ts!E X!* N3kA\HL|K;y\R,ܻpQTV8y) kKBai"-xDVZ6kf0 ȉPTZ$ i5-3fh?w Ϩ;+`:)QS6Rn jNG'AHw=fJ>X>䁆BB^j:y_ƠR1whcY&Lo,[EtGdLI]lMwg㩘b/AQް7߾F5B }}@u^T"BU-GSkaݤl 8\̙$ԝo Ս^.uxoۛڍtn%Z=v4 ] BvIAdU8aXW=n<x(y=HTP~,3#QA"l+hPo'(` jf?kVΙQ1T!Df<vH#{ o|ǮwufS k)7 )1dGj4Gr4AP|]Ita!Z +d`o#0EViY-^ w}vWr2o}j >"';;))$LYdڦ#z-6aot "6U]]~pyy+]g~utR}"ڛ Tj$w[~}$7hE !}ZV}UP咤Mi՟;ݯ} ~TfIX1-q|~vv!.&R ?>7p+5"` ^,h_k.lCH<<#B'zE¼yRkɐjl7}ӟ qD}pD^E?a _1N2Fwi~~jO ^. -H .l!1@:E<'@ p 16esb|rvq7sz4s/RtX;N]aSA9K /rvǓA$g38;C08{#XQ'VnP$Б)dPۄ8;c'2gr,ȿ,׺^a:6bǶ~NsOcZ4gQ2^;cDWx.~?6zm0919 m9c}Ab ȿ@?~l)յOcIH8IgJɑ$2ur=8G /ׯn7.И:1‡z ^"ؔP6(#L61hMÎ$Ex݇&K ,Djq}8L`~mOםsW՗z_J}T&멶?f--W~_է=eByAY+׊H VBZ^R[r\j&~<+^o.Ubd/zu*lU2G~j_Z`{-l+eB\z\EY<θ&Fh%X&)@tA@\U抂P69)e0 B" 0d!rrhT:'L."4XQp,r=0p\\Q%1LY ; 'Z-fe0J A,5cNup΍‰$ٻ6r$6ؕ#q$6d2r0$D|<3WlredI6bͤ>XaHDR!RRf;F3ZjxxFsͥQ1[3О"oM!I3Cuz]& 1Vy YNc2mp ^H@,D=m~w{د!BIwYQ;vjeݭ: ?Q`_ i\a$߿%oVz`Y zዯ~^-<#<._>+i\mn6+ B?33?6Mscf绛x>x ADO/&aN(n~=#RF@SDJ9>nqFvV!lT?"6`#ptq-4__L0w7 cXlG:oVv>Fa x)unD cZڔBfpD1 ("d5-DJ8*&ST"$P 5V,9:'qUr{"=ڴk|W(S3G鼮ٖrnBq;Ms' U,z:?\)f0iVclp9/rp4K,}_#Q!m6Ov&eG4(lm7Ï9];R 4Dra{Y}2`Hz& `L㔇՜IjT1.XR*NabZR|4]+ENT+DIuF 89 XZ3m5.; 9ȹaBS#s&! Tp]k0"IfE#cǭĹ G&N Y0 840\hH{SO\xs]f{7>{1Jq0Jܞi-h[d|fDftk0o۪5 piCJDBK 9śś7hxw;?zf6Fk\a2˯7O?MݯfvO&p2U2[VJ$?9G4QYvjJ("wNwA!h9>9 g[ѿf¿nF7Mǃ)OgOnfMCw)zws,>?{<-YG[.]pڥp֝N ٝ;Uʼn8S?X'^]Uφ$;߈Cst`%N!N2SQewuMr.;۝';5!!߹&ɔf?;nAhT wT;27ZS'm{7 !߹֖)MCY'~ PQmz8]q佰7>t, AUpM[H@Dt/߸T^:LJ#ypeaz ngބ v4_~.̓0V 8;,g|Ao޽/\%4 y y(D %hRqA&6[6XMgɯs[gT&~f 1?l6 |X]>Vȇ& ^)3,ܹ[}>5:9]qF+{;:n`e w%U%Z`F;sOn57Hյp, X QYM/Rm.Bkc$X*r g l:}I4q¡L"͹dVc[&3zTL䞆5溸mmGR*[S+Rj!SOKJUC^) ,m:ib D=/B9;•!ɺLU9ZD.=DIkEY.F<:cLӡ'+ZxCZw")\*G1; 0]5.nXt$}xlLdN1!mu]~juA[’y$byPnP5=U6ݓe Da S\eo`V-I9Y@>T PcJu!ZGj57H䄆;Rϣ"/RfIkc?|>;&-Spza;,o;OFfiu$+q9A.un'h7<gs"dV238bW1<jLs#̀ TƊx,TO=:r|]IRg[nĴ9Π)FGBv 0PaZqV :Y5 K3zV yńAek2G}}=.tti+r6?ߒŃWS25,\mvcRx0oc@T9RfLfT2'ͨ*(=Q!z;UOV (QJ:bjs" FbF&K `'5& gcy[k%A^ #f cΤY謌gS`#mƋ E(-!~;L|EU29Ϳb^ʉف]Ե` υJy_Sb)+c]Omaq»k;9:AR3Ļ};9>QV˸@ѪEnGQ>ָ2tJ+%Uݷw91A>XJQIs4R`Dv뭏\BΨRک_ÃR-nT. YNipGMȪq"&xwMk&d;q2iBV\)tVTDӍGU!9"6AWҙǷLd. HLhBR5 CPT|>%B,i@NCNa( NIw9G婚yϴn7TG-R1闕*-3性t-mlք|"$S(M/q{nĈN;hiDj[ggFj&$;$÷8"hǛA褾vӋu Ej&$;2Ł^/nk}3ޗ0)9"fZH3vt5hQnI3V4 ^'J%ۏҹZH^8],9V"NɾjF (ȷ^$̿lr}U9IٌJ}uNL"&JETFJK* #4(BT\4Q*> F_O>`>fR| ,]'>6$֌3X3$F2$7V)׏^XK#R`TF)ʈx*Һ#W,S^+~] ?V)l {ly&09\silnA8fq( 3=[cK]aj 1Ćkg,3gMr19ͤV)Kd!$*C%Z>RNǡ+b)_2HU"9ZCR_]#H}f- N˄3trgXfE Z[6w8xQjt%ԧZ?Eõ+.%]3‰ڕ&Lkzq7jUDk̸lC~_+؜./aq;c@EU#zRJj6+b/$%m㊱UR AuBkbǿX*6zWV)a*~cR+1X2v 6n-_m5 K ?ޭmM(!غ>^ϽdnkN] $I0 ST$%{]=!!} j4 ;#r9pPS{ki|rҘjF1[_6(9FX9H#S~ 2|;wg"hD%U2K 9mFtN3qX˸EFlT2ùtPJƑV GN\\csn[8Jkb`$8\klWW]cTNNGdr:ADf1@'k\JJj"kfI^N]aB0%c$L/kg\xZc1 q*Fe 9RS 0N[0=lk3΁ ZL=lN!du&;u]N}We뗄nx}Oݥ|Rvnnי}?z 8^HPmH~K5FWmV!euaT^==z JVT(y?nT?~Kԩ.qԘoOиwf-y7]q0Tg'2*@%|B0ywp"e(9fx}\O[~/8-Y,WCۚdsu %eE(u +M>t#bմ[|e1nV6n= !A~qnY} ܨYZߺɊ2#FHk35T)V@QQ<`n>R'3 fcأY٪capS diP]q>nќBFGqT`iiMfCpUj(aߌ=or{#^JxY {pU νmJ$8vġruuΑw@9XHõkn=ZCLǷk4#VB.xU5rKOeARill6133JÅ5Wa/dt؄I0T{ǭS4,"YiqUNsd4/WSr)`N֡(B\1 V1DPBc ϱ*Ub1 H{yA}v7 7UBrRr(,Re@ b,ByvT'[SOt mT٧C!4_2O*uaVIMJ}2 k>]ѥ|`/؄ דWH6!|`}08 oUlRtS*<ݲ0]F$#;[ʮ /zk/|BV,[^E4*;?6~B'_uJxߢ%%E˵u#!)"$Ewz2o S 2vϔA jjo 17$Q"ԃc9 W!!TYNU.0Bй`.(F:#p;"'N#A"8=b8AB8tВ=OPY5Gg3+&g '=%I!={6M%Ơ  `@4y+~7V2P~JPVk|.QlQ(r*6:|QbIlPZZN)=or ?!S5+dqrXj/^n|OT׼?]i[l1ӏ ~j pq. U|@菧YeggF<~>[ ]1|2/N ;_r@X*^[?ϡ,ةz[֛9BL` 1Tb'Ň<0󄺄&00TBLIK%~Ba})Tk㒪cJVgŊ\T}s1HIG 5Vܐc֞Y7np08`L;UrE<b Q!U(у38]ksuoi 6PKFFLg a  ae= O/mGQP >Q0׵748oBޤC[Jlp"&";|'nx-q J -QG 0Ku6l]-lؘ#>y @pZ"Yܽ{553bo.[0Nǟ?fl];eqd̠c;rIrZ\.V| /Yh/v}xoQC)maA"j̉eqFkR~ ,]`qϥ ,jp 쀂z5b!hlײogy{WM2>֕c^Ϸ w8ZrgDttO>ڕ+Fyj?:x~o Fw~ ~ݼ9VUg1|x$pﺴc`|:aaRv D$zuΞ6g ?W[[|\bf0(0uCpʋj2@tpl< O\0ݭ8{ޚƻjo!)v9RB ad^McvQ7Woo&F=(=MWe[]N~)sw{^^^LDTR60i7ҩ|P ;hbV u#×!T'I)Ұ VJz"*  OTd?%@'EG} 4_IW܉H{i sD!v1L* /9 'Iu߄Te "j W*wJncRb!4nk1)Tfl )e,04.9fbb[pKs"TziL"VU4B-KbQ XB(sFrˁSo B󭜠g 9Pw&04gT"p/k5X*A(9>VrpX?hR|J)烕O#Kw!!ԙp$r -I}GvFkDZݒ'ݺ\Ddl\5ږwS`}ɼ-ffqյɅ0o \g]'"FgFc94PXіg<Nrp0|l]:,H`X-Tj`WB, !( ZY踴Dv̀ 9tG#7 '@7SWЀPbpgb!#fE*bHKeJl!u0ϝ G\qB s D-}Ȥ]- r6RSqd2A.;LN:l6X:jE#ODI2"ȍ(?3Rk4t` NP'_UwS7ڳQlҿ+d(NzsYI~ 8zLR7.WU8}Ø=V̩Q9W; 9l<>6NǨ!)>XkǏh)pm5rL2A#.$LЈQ6sKDWMx) e執WQ|}V~>Uy#",F:t(<̺n%P92VAn`3tV\tzz,r)OnK 1*;:yrHȤN .KR;_LOJ1k2 Hù{K *.kiq#X:zG?7HRa0d4Od󳬆WlEm.g paO$gXg77?oQ]{nnBe22e+ ṂC&r&03@03>EOO/ B~O7Fn /~va8nV[w]Yc)J$/Ǜʌ[v3nxʥ 4؅z.cd=wf>\w;t++Y@5 9 Ym2'PCrˣݫM.{${rc3ؿY[q(:q?RP0L . ,o[g2X%H3mvԘoǿbrMؓV[׻*n0g'2'x!\"M^É!Q}\O[~/8!˯/'oֆG/ng|OiHn.69T94W 4+\&W7O]L#^ڃl(.Tnȩݙ H;yQ{?R=I%F\ueΠyx{2:`=zp.'PQr al{ưg[I34:h}SUJgO8p 9gj?W?>H+(XwSWmkhVg!BCBe uI{8go1c5'b`tשWCvlw)#Gn\G}w ) }i-i87'Ps ^^>flma^clŬW(-qb1em?gH.)+dκĬ3.$`.i4:#_i6XXF ;cV4 0$c!9wF8$(G10eY.DvJUQ?AũfZNi蒣RsP&jcuy :] yBP<= U3nsUjlP /\̩ZQ؉# pXp&s*#YтΊ9(Pf?g:"ܳ;ΏAf#"!Ӥ^:+-=g5oy޺i lhZڷ4=[TvpMjCo=t4Q6q`ݶ*)N[/H.izhsջFa<.\",T,(ςMJ` vWA6 l¢7n#!yJ"fm̤lOO8ɰ)ݦemV+ؑlI"nu܌g Knۄofܛ/۲/C_z>ؗM\̻=kn{5vUwߜU`xޒ}$lޏ~{7XQkٸqeޟRse}k+revh['M:|6k4dѻ<[lZhC @dQh)habY)1y]Tlш1z Zh ܐu2m:X31 :f*̐&%kat%І_.00kvs"]M#[g+_=Wt{G1hTv=0-k {<^xN=sybP)m}Gu]pZYmXV붆*:SFe֭j dP5Xke~XًS⵷V[72k@O[\sZ7mlPf(u |[uBq To [s׺K9x9+k7v}]+ͷTA~5[BazljSWoo /ggLw[&)+Aǐ*JHK,&XrHu:dtdGf-h_`6ư%] f67|=H]tcï/#(i-xѢ<V (lA@4¯s=OS #G1hɯ'xCMDeg%[[hSkhOC:5AuY"v~Om#356y&RRO[F*\gs߹(G'񦄓?û_|zSC׫ݔMvSU&iդHʜj_/ ^5|xe^>{Hx^ېfK͓xb5) uY'!M8M'=Q\yWKG%s<8$$ن s͛Ӧ~*_/6,=7U cV{K IvHd}\gR?cVb-Ai :3+Bܝ}'?Zƭij6<r>MC' R^Vc_ k\] TK0M%8S\E٪˒A(W[(%c6JV"Z̈X}.VJ>SX*NO]n$=BU,(\Ժ*X6=5$[f5&4 **(> (Wه-n']˜QFzHae~Mm/j 3BXae焕]3nw1ނ%`~za=lk^5GŘNWtkzoے{rw8wE3E!8} `JnC۝fz?|hVz?}qѳ_6Sw kf5#|G5Ŝe냀7a[xm kls8AjcEEF2 4fd{lkNRRpu0 be 5 d֔]{t+cOƏV116" >Gg>&MQ Gq$;CQ>1kJAAZAteAWrS.kA.:yl֌Šd=P7 !dMgu,[w@PkhO:_]mص-5*qC;$ګMsפWf=O@ObbF'ohJ~\CC~r)T5=3nriƾq~ܶ<"'+Rs+ ߖ˺$|5K\+tqqbD 9de[pNxn_'|iSz}pKtR[ 횾WBH+r&(+ƅ/=ӱU[I"vNvsZߑtNf2QjZqhQu%ԕvV<98FٯɥMG(Lo4s6'\V3GblsJPҒ,J0 IYEEWd5Z 4EerZʉ![H \O܃لoĘDŽ$[V*{ $!XjBS)+΢MwnDo-mɾ"w,$;k{8Ym\iƴ!ӄOMRgqd=#@7m+2巗hs~~C~-<}o7 noOcvϊzv"-H[I< 6߲mޱrZ¾k cisfٽ8nENgr#W.HgF}玝h\?o -xEFY3ߍR֔-R2nǪ[cW%tNu&~2?9ӛN.9xTa?AtPmCwImйz-"ֺj ѠOE>gsZc˶`q 鞇U~AWP'j߹OϫְS;|N@wy#?\c쩠 T9`<SS.zJQ&adpe*#A  :ho_=:B(- pѼosx54'Wji^>ko luL%"Ã٬ [Rb>.*}8h֒9pJe*:S4nQG1 A{h/ŀV47#|UqZ"8|D+^$׊픑HLp0AybfgO%,ObWP+ ja0Fjͬm Q-zM<.Um! W] )\ ,;d YM>'TJdPL-&WqaNY򩯡zQ*PIlsd ^)2&OpVsҧv')% eUDg4:ȒC|V5v:9hA95\Pס3qT#kQmmc7;HtNSΝ z 4%n^Ag 0upHA(y5#t7ف׼E-]C"w6{KHH2;yWʂ4XY ]LʋP뎰}:l<+q( /.}L>8͋3/^JݙG[Tjyzwwum3/[1HP~n|BWfr.5Řű'㶸ZWaN>S\.[+Of+8 ^/{[fȞn[42Oؒ}agWls <ؖu7Ñؒ8ja/h@k0mZ^QRT-RT\jT<%eBiB:g%#ENDD,$FoJ4IeVrox}'Jh2EF!xmNVtnshG )昺Qq ?efs[dDR7bĬSgV.D4j2&4^s?^ ^7 C7>IedϬ?~H :gOjӻ!ѿ'5=\Zk4؟~wzR ?~\ZR-> E\*/)DxDw=@Be@h62Z! u9Xئ*%w`p80\jA_NOe<78FdN.=%-=%q&sSL֨'3?_ZzH燅\.>\p[\-y2jTĵ%EȀ21Kw0:J3OBFC>oQYѸxD炏<ȯWldImg>4L5&HeX3o깛WTODV}32-;3 z`2?ΰl6;(Vb h;Jh!SA&UD~EI豰p?4 aV_8wo~\1 بs1e<ʮ(^t`tΒEC6z̓u]"}԰D hGkOAg%kEPalc(V ӝA|.h o GYFJr , 6lo*3bs1˞_ł^(Y h?C Y@嵀yxh а4/-B`.Gc,J\H%Q=D, sɞ{zQ746JV'.l% φtRFe[ Ef )j2J٩e֤Kg @$ZKYKȐRcy\9H:Mـ..Z/.RB)4rA.%Q"C:F- L>aRAfg 5[n5j. FX4_R/!h[C&u%i`Y4ymG*c2("X 5EY L X* E2 /tB@?ްͬ!*}P0r rb"\I6AR(Σ=֏f[iŏnxv~}vgƞ~rdA$%lb&GB` d&L'L"(lC儰#M=1HU# ;[f !3.'K91Ś1)d6ȵ_MCmZ¯F^dv!uG0OsT1_F#`L-=3_[c+cX0pť-HU'#72!9JE)$AD[)1 3vQO$v6(Z*Vñq_%wsC* yɹ#7#mMy+F P+u>\g[bIA69/GW aZŠD$ 4͗#K-mL+]cPJ< þF F 1XL(+Ӆ陝lDnI&tHXg|' 懏F 'o@IƕY. b5y|Xu>#pwc\)c -:y4u277E4/'ܴ{[]u˓==[.goC!݀o,vsK~|XyRLt^\f8vQXHWo_NdśmZ{}5-694R.IT^|G8-d;0o(ٵi+/.뾀ę} x[}I_?^SֶèLYBWF-QɁR=攵H)P}ږ@} )kUxᵤmMW%A;h5Zi,媖4 ZsaNn3`±,7#5-nρt}_?lcbX#x."I 2A+yn0*\'|\]ht&'(Kz$Dmm T4F*Ǖzz9@"D$KJo+^&Rk HܹWN^Cs;B XIzR HQx K"kO҈>QPT/3y˭"3dxEcf,me`Ia4ډ&h G$ج㫬:ۮ^ Ę?c2 Q'XuN.P[pMCR@Ey !۪lf?vu:SY=S-]ۈ5(bE e 9f``\GUj >ay*ȯM=웻}SD(.7L$6M=&Tv,h滖?!.в` 6 RLm yRԘ#h 5_( #cqIk{D$<#wZM㮁Ԋ2cYKA4XC#A.͜N(A! |  'D/7fx JXWМ^{{aZ<| 0@rp<Ԅhch, A93& ɰ7XR)WZ hyĤ 855gj`ZbFƀav c ybi؆@G6@'Jkm(B!M\̼uZhh[Ukmh[:,U҈RJ7X#5P #z?f,FJ)Ɉ@$ Gi\1byU,_s\٨_c0HaxfV"2GtӐ h'x0wvIfq׺2qU_{Gq"Jpglٍk %J&[M@Ň]@z6>DRm)L_}1 ɋ,)Q|>d7app63?~/fbz/EnP_v-zfRo>1Qntdzc_5Xt;"q`K{?v{.U_m#WDo7A.3RzI[>&&IxR%S)FdV<o !֓h5288x tRGd v6T B4GxG .wEU1*QhtQHDR´Uݥ&0wݲD.Q0FDA5I!\PZ ae(4[w:I$[ʈ9SwIQBDXO%6Qр uԺul{R'񙣼҉sL #BR#J;RR6H3mߪ;t?7ʿJt L ٵ5u\?K^OQ5iE9kdQ2@Fzߴ8/LWOa2'NS%EDb@d:rNQS'~Rg&٨mj 5TH!ieH6jŸĢ$5I~ZILZH6jSulӜ8jH6R2SyJ,s}lTIq)HGH6*S5)hWf\aUD]=UPs0&r{.r~RGn2Z/Yϋ99XA4+BFWx $`a+?Zs+D bb6].:hbf_`4Xy!i\vk_*+ *B1i"X[, &WGkMULiawU  y9eV:)c!e+Fd_Ni+vșН!*س_?>s|D81$Ar4KB62ϲc<~xmJzxOjM+QF)y(% Rj9~QJEJHc(}L)yRP˽'t+PQJK%(U(-Fc?,+=i3 3^\C˻eHDmshz._ ?Exb- &J "Rk9,rƊ{Wb}ʷx^|je/[>cK)şS.Mӥ\;:(-_Q)(0smHEW;' %c ۟'HH~=q X +exꍉKdAK^[k~ wW;XX8Jў=pv& U%kz %Z.ѦP2azn9M,^9A@BbIcTT3$1s3#A+$&+%r&P{ w~_w scSgwr| jxkCi0F8Fsq"V+ :sS;q#bYYȇr9YL. A2Wr`(Ȍoԇh} 6$b bX0;) )QD&p>b;R~ 5朰;dgyu>60'w_3:*IWaڴ~痫NV?)?\sJq 97< ϔL&^B_GiS ::"@_ݯKfo1D{sUbۿ]U*I CT'L~ol܏G0p%^*5A{ӵ^N&VRl[{ɍr6`XS}xڼ[0 ۀ˄^Ң4j[yg-5.o,R9"9O2; Ll+בBb9?/'/>|iEU@1N=X>Ue%L)ӯq w.!//*>A!}vU`݈69V *9Ζ\󛨗DW(}T͕wSЃ׌v^Ǥ9IZK>/C髓ƓŕoK%O&g$טZ-="[-58꩝T֝w-_mbr]CoFQ M+~>4h7fV6zXuk0͞=,&U]VkfS+=ݠdehjo#/TN/=_܋x̣7V٫G!1@h:-Cg-!L}Ug^z>`Zf`"qolFk!sqfVvd9C 5M t#6^Ӏ^d=VZm&RuZPC]xeƛzK-%c;Vm|raw5#zIS>Ѵ4Yݫ\,8^+j<z[a1r,0~.'u_˱~nMmjs1dzGAw$O JiH=_'xxg{FsƯUS_g[`Bh[,a^Wӿrrj>:on]Eŕ=!h#L1^tÄ6D6ȁNwԉnO5zlSftBq63M ފ[Cd Qkv, ssn{@7ѝ1%Tljc]^|cS}]O ̔h϶/TXR aadz*9<$WZn hfy՛ή>ߧ~~@̓PU=Yztݛ`u+NÕVǮG9U-G 7ZW&.7G }DT BRC8޲u*`O cM4ɦ{Vm&jz4 L'qmT[jYwK7nI6#mN»%bd:n#fλ%׹z,䍛MaMB(j,a uǸ xdgZֺO«@GȭX}@Sϙ9H+v1\ > 徜~y܌ǻsD"zVjhouv3{4w'*\?\Qޗ˃wo<-k2'xdXߑ?rQc뀋Uu>HTzEhP ɨY.Z)(TJ!ԢjG%PJ"H1- $HؽAzv)CB'x~?8ڧG^A)[d̖j^LP3% w}'$96 E^s*d,Ai'rjH Q1ZnuaN|Q%k(1Mѓ|~H)Z#JVGQd2gt}'ՂnRYmRc߷\G`֖N! $W'w̅-ʊE̅ 0w?y3QdenPwn'<ܕ^YZ߬9of]z ; 9A;#DD#MgqtGBzw~O}n ~vx&cvMƩm鹠~OkuNǽQ-!'Qύ񺀙Į|?_"vv nbu,>u ʼ^͵ӏ˽SM~S`~|2+oW4i^Ms.R B,2dƔs4(!f$g(V%/ Rp'?4?U* \.]z?ظ OяIu֬*;9uq:73I%-Y\gLܟ1`2(%h&^A+3j-:GAh_';'iv)1S#C.(1:uNI0+-FTH[\ !H$LLATS VBZJ! _uq^$ aƲ@J*@pFd&0'9R, Uf͝LVkv0jor8P!se}Vc gSٗ`JwYq7T fy9|Zh%(ȱJ `R1H+< \ ɍ2PP(E7M ǤeKBJ"J/eTYmŞɂZ$ s6AJfT3rKxka..bEF[d ui . J4T-,-gږ)\؅R)0qPmc7-i7SUƊjez:mlu"mRa5T)o)slamc酐nr2> amc.% yI4IDgmR@Fi_ǒJ.#p]NLˏL:;ƬJuI L( 5#``x7M.@)<_(8FLbb3`eawa7= |3խsƣ4$x;EzFʨj> *7 _ 9[s8 nϢdعqG1X` /Aq\E8SuAkSۋ Jg;^  M@nM.錐D-^CQ1R[ѐva*4YG4-{'ʵ5%; ֜{/㏗*$mm99\O~^nZǦbҜ31c^5ǦGc7nI6%W-jR11wx/[@օqM)6sۻI:-1QۈYMcdjح y&nSx ȂH( XʶCm1lEX!\ V1RœX%N-Q`U#MrbŽ8[ / [VAVX- T WX OatTXQRͿªSaU$l2^-P-Xs7eS/w_?{V"oe;;5?W>A4w)C} ,j"A.>M" $tA4e;t*{LwkH zv͛l{Q+ݯ RjեqlOG0Ѩh詪ea2qQ 5/| '#drTs9dz x2مqM)ԓ_ⵓ[*!6b%.ҙE[M4ɦ|Ĺ$DUY*!6f䀱[z@օqݰ)hSlS:> zw@ T:u^C }`H)< v9L2x'oaq d} {P\ȄTjw~~~smKܔUss{s.ݨ{49_|6{)lJLf4|O W9|WémhXxi}wkSN |Sv~sKVijndW:])6d=;S$V}*2Vjjqㇻk'ta0uIX7/uYcS~=KJĠBPS`}Xb!=ډ2[%h ErRJjr6gEkRp?slF:?fFo^8vqշ}j[>٩Q]4ƃ"霙Cӛ$F"Hp;ߕ8SH92?=l yÉd.=?9:ݝl*X\2O+m~篙>cgyx;?˞}l"꠱We߆US JԚɥ:GΒ%[:tZV^ŃˍQ O6RQP9 +VZZ'+=j+媁!eK\p/+dY"3Bi)5XOXhiᨁ 1jX~&X587QKinܢ$(]9} S3S}ңR¬UR6 +e,JVR^y% jV}i5jje'+=F+fV>3樥"dGlٻq$+F=6`04O3,m k]b})R*/r롺,K%N$UyY/=-MSjW^T4+wn[)oRY;X9Z)PR8)ዕBKda(Ҭu91;o+՘f;ˈiJm^KJյHsQl` 级iJm/d,Ri Ӭ*fJQ vfa?gR6eC?f_KsW_nܭ9-MSRhҒg{WYPzڅ?Ur1p\YfIñ;’L E%筪1H2p%E&W!܊Y[\0n5$ Ւs4;r^I {_!,_0k"I9Gqފ)42."o%Z^>.|>|n>SzU+hc'kI?a4#~g IkKji]ըBשuԫ2hZ4z5 ݃/Jhn11=2=WlNgKkv@IcyUpZ٠0@‰_imJ$1I/01IAFhal¤}do{!|y}Uy#܄7!4 O "غyyV"RanaF [x[/^|hkiqL0b(Pהoδ=G}`5e.C  VɚXWs ,c-~T(_wo܇oZ<, }Zi4d6ihӼ-8Z`op&% ЍtxsrVMӹ1hm(s_}@_}6g6:n>#4fa5.u7,4t}ǩ!5$J ,}Ӣ)o8wb6K88-MSjR)O6Z4+8QjyK)TCڥls.ag'݇QEz$4ղ)4?~8L "Z/Eӆ2k 7;{(n*|&zg@ۊ2seNR[lpg cag# ֢N3qiyxM{Bvf#|[(Lz ŪơD6` zAnBf^SШQIm?m)Rdd,,(й*iY_h\(t(wZ6B+ΉsmIDѬECa)Ryg1X& B"KL i`s i47M+Dy-H\$SE2[Ye-("Fi :;oE|;B+9MDѮ15{\lZ\֊̵H:/[ځ\2-1@2/ʹC\GƙRB(HEadPH813\sj>LM+QhJMZ%-I'(PHf d$*rDuZ>5Q>Q*,cFI+u nW=ReLD+Ԟ-eGA4=C ALO(qQLt4HQ˖XF18Q\^xORz8Z|!kS;&)mzT2l/p<˟^倮˗XS̆7QA*uX9d(;{~EzU4 7>MM۳,䝛h/b\|n[_ RL7xrRh'n%z!,䝛h/mO-5n[_ RL7x9WFMrOnCX;7]oKF:S} Lb=JZ_'/P]ЫQY&\|L(Ӿ᧮dQrcMjK(n/ \,C{?m8G f-;m_>Q6|c+]]w#nZ %T?-w^ny_[x|l\̇)TS_C.J..f@VB ^k:"}t!sNGowO {ր!:N>EƬ'CW]K%Z5JJjsϒm9O>=O,+3!x} $8,DR^4ST**ëe $*r(4pծM(ES?_rLRёo|+|I:Zs*2Vʥq g}(rXc:LĈz )-uBR߳B] ]'6<:/ -j} H1ZDm^Ke 6)Ŭw/ ʃG6cV;!wPdzcr$1͆\y3gڤv R{ 7! gi@9«@)W8Nh2UMPyFօ=m"vM,Ry)P X <⤤Co*,4ydS`9Yi0*adb5S=NF} Nmp35Y[?=h %Q@͝ A%r "*"kmeaFX!AEJ~Q UےrE)7YXV"1BVZZP{u*2rHI24{k,F1`| C/eYFJn*qM5IaUL]Sk3&LAbnxm,H!G88%ɋRLL^PczBZ1y`^ޡ8/dD31Nc *9=D hJMF 96_^>[M> wD2K"zx@hQpĐiꙌ.ቈ![j@9h|bv Ѐӱcv\ۧ'7b ŽJJҜPOv8 ;f;iS_E 9i9*r>bȮh6XF9EK)`oFHLG^  O[`Ȃ9 m%brQEm#ch0ݥ nROA }Koa/2:@ CCX;7^6U`XjnAF6cuZ=w-ѻ a!DTR3}fy[_ RL7x9Wq6wD6sm$di[ϧ!Sp yK\(ޘBJdk/҅6R.?WP!io֨n7 S54M4xtX >8+"]HA("\b\KL6tR>0Wq갛z׷;b-χJouGg0n.K8vnu6.@[V%ӽq8vA+:^]ov@oѽB:0RؠQ[yLI{v.bSpNdH0\xjnD(,tДydN!TX&)r.\ ]_oN|Ǻϵ}zܬ>uCxX!OHn-)*wtC2Q7щp@% gWiJ7N)}q@-ȋQ!JQΠ!km>zP*=6OB41Drcy5ʢ?";wyywuJ%ek'磇!@Zw'w }(ߕl2XoNlQ2&Jq`uٷlw1ِ`EBgU.Z2VGW^0Z{J=,`>b[H/RHБoV4We´Ed\Hq0YN 24 AE#  ̼I֬ݷNÔ}<(2I[Ne!A(S\3 e(,[ZZ`wߺ%(}C)'m Bl,29 msS۱"ڐ @Az I`NK☼RxakY 1̝1T-1QyS`=:X ZH>:X{ uxxu0T?WB\WsZIꃍdgLʵDA1S W%oqJ5uTWz QC* Ũh?{ƎeNv fw)Oّ߷(v[nw˖'_݋,a9=N4Oj/Lpվ e-yE+Kro~Z e_HCHkZ{㣺z{TD]Ld_ imQ6Qgm,(Xifq J.8zhuyHQ o.r큲=VVD%LDnR&oFn9cڄ2&K)7ML*Mry IE2\?v=&: /γD]P6h(uY H!,8rk^ ,.O_ _R @A7h Fo׶O=yy+#A /FW̤6$s)8ANɷLmH`h v.QΘU$3dh](@Q{ȷ ~Wk<t풚F^:X<^\4֗TG%Ig޺m3Fz0hAp_WUKnr1 a.0"S1RXR\ck['wvMv-ºm%pn@*,(l^tU"];pv\}*2][sq6dZ] lN7ևiENŰD:M6>;ga1AqLqʹ#GtPxRL6:v\Il5C55$4"s)Z:|"$yrt1Q!}OZoRzl./?&hr@,LG9Q+/eV= ;c_kV.![daebd̊KP"~TݱX ,UQ$>_-fcbfrzє\0QD̒@3mf.&u9dH+k,n>}:],Nf7@/7`<ђGOqz$ͷ:"RJxet{0(OrHtr F0PKD, ~8ϒ(Q L$2Np;Օ![ )bpͧeã,-E"yK8MobgzC?,ra duafvbf) o NVY-'l ]cP b:.S.}>p>vLk )|}>дF ^yd.0ace$y}PAw;C7=;r;q/ 7}%$Hg48o _v7wX^|9ŗB$T^jqoGr9eKґEfO~]:D`-hꇵSa -u):Ik-!v=`r;R;VHc_t1jDW=Axxޫ2EA+!`Y_rqRM Q3vixfhCL i:tZS d|t8! WJ$p%)nc" Ct(ց;eH1袤|,˹(VR#LlY4q'Kj4w yE;FNQPfwKF#DGIɷ!{Cu{2VWē9ZԨdM "m6'JvG #chR~C;(v$;Azd( vs$54 r%ȴ ErɆPthQ nmGY)K=ޑd+إn:s+' hLjHxPƜ,(!}# E_[N_2QYU`$m"r惊$Ai Q;]]˴Q5-U+sk (E1>2]<2FRHtnb*Jn?v`>**K%t| pxa~Ŏ#WFV-&֣tGKdٺezȋZ$UCdx^~/xĞw }uܣ:Px7x./Ѫunb7 1 2k?s(W缾7˭oN7I</;rZ/쟷 rG|SX ~cGc:Miz̦)ypvW߬ݛ|5?W5V5ɶ xr;zNVVH@<(alk?#هM۬"Lgű(+ "( O؃k v,RǢ}‘;arY/:w|ZkL}A_yӗ7xq+]yu/y@c+^~ơqW_Mxx1Π$;(x$]ֆ[;ECݻj.I!Z}k#) W!GUt+&{g : ć@m;B{'}k}055A~3l\6U1zjе\w>zV(BFvMtS)GeBu׹fW[?㜜ly7_M/۳ڸr1MqV]sZiK((>^3>V=nˎdӚ?l{^`;4TǪ߇6=t=!9cZ; xPG4& QaP!dWEno𰚿5EWҷo|g!-Bq)2<p򸞄gAi(rN}IsW ؽotuPڽ>Q(z)tzr*ӭxsj膣`.C|f b$zvh`(X- x?(w`!5]vxBxU q`Ni|2խ"'xH*Jr_l47v{!}ݩn#fl2?>i`=}IrJP=m߼IV|Y;<+&jɚ=]=㣿fnoW?]~-$[csq4 UF娓VC6|ٹ9Hg4olL)_kgv*ƛGz"kO7`<{S3 #$BEz!ri)S#Aݧ{' [˄{Cbi{' []zr,OCiEYk!˗^X3_iVw}{1E;ODl[W |9kĽˈU Ge~h"T׶)eut~^XC?߫@ns<2MW-k,:)J!PkV66lCq5 :x:ˋկ($Z,ƪVj{gffMU|V0)nX~:]1.7 6*+6F\Dɔq{[7sߗnCiEtu{KnGf݆WuncH.dJ8[74jPtc^2plmm E2]˺Ѭ HԒ$+l6}xny}q_Ώ'ٳ}i_}uw&wov]t} ڋ;Y+_/ڰ]Bka68svMxLhtFk6p?jƠ#h86Q:li`If5sT,q d'n-p5Ph{Sߪif竣 b lN7և8N+Ŵ*a!V9$ull8|gqcxuxcN1~ ~}0PYo(-O,Ua1AqLPvd$2]$,^Fuz(Ӹ`p\'K䄝@B$R2.`1pҎӴ{C~|> Ib] 0V' p/1L+|OyE+Kv5 /vo`e>Kf]룘MMԅʴɶ(^(@36Phb$nDod`jv?Dezj"EI@7Mc m$I^v{AՑu)cbv?̈́*SvO*@PE; 2:Jr ,(JT2Z2Tf\]5ۛD$iԍ%\pcB2@ vg9I/C`!{~7T\N-`8%vxfORQ)%i X^.~$%!A~ gMy7*a5D ¿S얷o&:iN-,hy[܏S}rB%Bj73y trmBk 12H*j/8Z4T"'ʿ+8@Ke|g26AدQi`moB1qk9(`$84F1 c]+I\8ѓ0Kr6oP$Ċ4v%:1\;")-V 0姦wX&[ F3}npok,[ڕnb*?Ͳ/0s sX?H4ӔNvj̱>+pMU)˲"e|W_r0\sgn(dB '<&tYY IfT;rLjrз&\%w9ɬɑ;$8Wt(v֜&VeI;V7-d_1}L,zSZMoG"Z}P(D:e2HDQQ4rUˀ@xcݧ33y/RvQ}]9ShAQ/P/tuvғR\7H zUR-ٖ2셖N Zht7uҰҰSI@r|SҰ){/ӂjj@ZzjZ +>+>ǴZ +~Aq>㟶2(@ZTTsζ]?7TUZ BVs-Aƣzl1;ܻ"d(1j,s<'d7u$yn/-!a8ۜ"s{}9~HV]݊[{[?$ F&E# utHitWi(Aab,z˲]e(U_O88_PPy`?K)ךQT6k ]"?cRjjq_p?}ʙ#a.[n3.] v../*w Qʔi;n …nGɧ?s>L`g o~x[7:Jgw;__?Fn0/;j?+L?;|Rr4@M"HQd-&q?YRxUh $plCBl÷}r}!Wo99Ix[ G(%5XnD6g;yXlۄr]PQJGlnbkϋv~~0{p<Vl6۸lΖPpռorIJ.9%d-Sh `oj|(Sp0C/9Y . \fip##U g>fT@uo7}g+#jF8.`?|mb93䣧ȁśtAbEeRG"?6ZkS.r B)$ܐL,7ya$\#F #ldKeK|UjҪ,Ԧklk8{ݧSm߮{|G"}r0`C;S,;H֞ ݗA/Sݽ%P3f!U)߰6_p$Q~(?=+ph[a[}sV_ ɶ1c6LV6?9TZETQ ߹/V&cK/ڱvWww;vozc?}?) PђttPap-X   Fѷz{ `jэB^.0n^p"tfOWNS4w1J8'+o>(EX/֮F0-Lz/ )( Rm:dT˩5J_DŽ| uJ(fs 2E蝚0PuD9Vf ($*S>=f4 Cl$NjכL?A4X F$@@ 6BD dL-X^ma,%]͵Pz̉y{{?4 ʸ'a>,bZem3pWwg ?*geZ>[&Hjv%m3Jz^CPIܻv8@(%D@A"ܞ"U_CArPq~ grkl5S ,ӜhYPn!L5ޔ,ý(_m:qp?,Uxߧ.-UȚ%82e0I֜Vʏū}F\,y6RU7 `_,2MqF0M1:9F=xYBvD7Ъ.Z $( "X:d,K O &R1h堣ΨHIXf4O ,ݡCD@yNGEY!(@9A Bfh }˙1fGNq)Gna`}Ĺ$Qd2͵FW7Χ/YQB\OCKG 8Pp"_ caV zj~q. I{W~ ْlx<|3 ӃWUX! c|fxrym

L&n,@q%%0&(n:^ sA8m-pm>0. XnרTK'tDT>TdeqG~XXƋ«0{u0hi#>I6{bSʉX"Mq;- |ɩEe5+ϊdmN&s=6G\9T65|p[;p=\ڤgV<=%wWћŒ˷"ҭjx bSYî|Ljåkv~h+ySL:xھ5ER2HZ zx[ӫw%*=r|R8RuzN=+dzQLgr WNh`a܏_ޣo/48yBw6:?go(@ghݟj ٳGvSo=GZQE5zjYvGb}$ wc`q6Sn^ԭOZkL뻇)3nG7Ÿ"ee x;;+>r+lޟYܶG<]2)zjSTtv4ѫs+P-i W(dͺe/[ T'v [tlSukAC^Fq[7E[ BT'M1O Uw{@ֆpT`|'C +3OUѷeu]5a\xBHi /BԄX.Wĺ&m4]9M!#li"n|ME0.T+>Lc(wx P/t5謥P0!+}iiT&M&UH3C0uKvte(3D!D66[C7Р,jt܏T;sZ*T_שDг"wsP/CT IR´(o[ -Euj+k)0-ƲZ:վ+YKOZKYϊZZWjdSRqV|Ǜ>WQ} kDՂm{R@ĩtp;XX 1r_1k%uRdBks W0r$õ|3W=3iъf/{.:fa$R4tPGA1 ٟuHslH2^c@0=`UG79BKծ/'@9h3J#D dMA:]k*<@(G|x0!pg;s"ع9T@7pv^vME(=}o*B~iM@ 8dCr{fG;1#)A+*dgM?Lu݋{Ѱ"V!cѤmôz% %А1:qu Dubĺq3ܤO|YukCC^Fq[7QѺ Dubĺ!13EhА5":ELPh=6nwac uh!ps%(C>YS/՛^׉H6 :RCu:fa\R͵RrPab,L]%"1@ix{5͍˚[דy|WqH@-lṀ9N@Hf0SJ+DGn]%1 &@DB2bv? j;w 6n2, =LclO:?@}MrW}#/kۍ$G"mlyZ0 bg0%adԝdJ* 03NeO I7K2כIιqT.}uRNħD-rNm5*#瞤xZٹC9(V))B@JņjBUc\r/.\OЍ[xʞoWhYb'V_n.j Ҁ578.i*h?Qb ȑ0Z1hKFW#;H յklls|bV:\TzFQJv"&.j ?^Ԓ pȯ N#]++yNñVׅQMdl-5v¶ -p PwvCraf  т(+& =DH۵ ],28Mk盧vGjM m_ |ץa D-oB;r9PaR9z DZ|<8a#j&$â[9S~NgF6F 9=+U`kaQUNw?ȆeeÒPg@2h'OΖ$hkܷO{9K* Z u;Z ^atJTi%!^7,zT,x"ݰl ugKyLQӉo/[۫'0Jʨhŷ&FGxZX hO'wHȯJHНOP=>/H{ɳ+>ϛߛQpsi`x5W>|o~}x|Gzo;7r i?5÷Q5P7]}nW?Yv媿=˦yOoC1;TjSRt[ t+kKXOkʽ]{_*3n<υ̅^:'.&| ȞZ]?@xs]GDC Ax Zs ߲sB$=AMR [Y>n^=  TхP~dψ4UhhkͭbRZ?A}lgWYކuP jrrJ>*q G69`޹CbSZ|)Jʠ{zP($__25@ w0PQ"MpR6PVP.HQuK 4RNTm[g*=RA1V ˲-0* Iڒ[,|ۘi4J Xj=LLẵ.@~yGgkTwnN[RX9;),=qSBSrP<=)s&O#3WZN(~'ź92d3Ζ`5LǶtuI6 6D JH3{m쨮TV+gjYQ)ʶ^i[mJ:o`%y˧K:Dl=!Ң%أsHvƕ`ThAˑ)_bj)oYhGW'D88IGkp̓xJu΂] v öe+ چ[0mjr ҕ5i`Vrl.Ͱ؏Rv/-eGYb(WU[VܔE{l1dReL!E i]U"n۶ifQ:`DlR6-xʁuUuUeظCYX&qY#F\ GQnQD N#^.QXKMNV|j*K´u6e-c|qTe;L8%AqAvڄ @ 1oS'AdG5B߻k(-@x؟ HhM֚g ܃7%1Y%ztْ%;d˖챶d{#Ж쏝BA`ӄჷOh:Áȩ5 Q"njռbȱ5҄'H?dD4#ڏ:z],[I.}Nv2C$+k Xp xÁQ:i G.at$Zk{Y<Z5Vi/SN~,!'?CS-{%L'H˧#ta# l~V£vt&9' _W='ĜiE׳qJt. znw5wѮuW8<wmN.~c6χ_Cע6lS)2w!LJ}CD8/bx?|;]hKo?$B ̫V>Q%=mr靚 v|׻Q[lĹs,q,%XJSQ_F͆ҳfQq,5A3 ލvOt^Xz~,un:\Xv t{QM䅥g('uwR%q,D'ҳfi)\{,==F-D7KIű6RRc&j7K űP(r`驨wv.ssfypXB-=nԊu,S6[ElcRTq,8Eҳfi_.Rz,M 9Ku$K wY:y]SZTֶj[;e@s–i:5;8qt9(^:' 9>Ca-δ%j4E$ʜ.la`)r_n'2׎HuJ tVgĕmӂ+*(sة4P0˲P`c" cE6THe c˯U.* VaSBJֵe JsMRU nWMk^`(( >a#i"4我X#1H"lѮY0@P=cj_Ag$"{hs~#8L~C7вsQ䈮Q٩ =oO!dqGq֞ZdCF->M(rBЖd:+=O={)>ٽq$ȵ 3)KQa+K12Vǰh/f9v4? v8nVMt㺩GΨ'2MWd:6^=y1nRFYf9V3cw,Bg,Ob?a~1n1b"#A),L?<Kq5~Aeŀ4_=]h?jm 7Nuu>]hpC(MՓ~/1?>+ȗv~~1 9*痷ѿX?>}>}>royy-d>u¶߄B(FCquQf{ov?g!3' p8o9,k#t`nvZ);t Ͻ?z7juRXKFU+bN*RJua9)HgYKXx,; }3`8?_Xz,:$ -,qSǨEi{aY83;'Kc&j1sfDΞq ==Ee,Ec)*`)rK7Q_Xz,6z/t׻QËy8 .]Kc&j2?g5. ׭:?ZG_o?n։ҷi'Ds.ӹfF 4{ZN$&AGh¤o7向XrJ[çOh9<RdIFD,=kڜɕvpjr-zm*i"y 3v4P<&V}HnYg`,#4AmgF:3慸ۼW$d8=ҿ'eٚlCoKߐ֍uH qVtt9rl6n WAY10Z vuZcf, ?n.ZUB,*GmTTYhҪB+TUKULk?c崢{*?C9BgR[Pme@sE][WM%c(zDWMQ)JcƲ.zED4'lk6(ƽٵfJ8ǥm"%ȁYe- #%8.=c 9wǐIpƚ=|nNG}7ݨɚ 5{7/K&DN4+pjohjwoTa*rlh]ko$r+,(_(İYC`IlJÉorfVylk{w:X|P"9vOj~lJ+: ʙbΞcDLI%AgVi(aGz L2d8 6PtgH%,-E5é,*a˧gqw3:Q cT*_p,şaEPy˟\>ej AAx*am',$nSEc/;A!Z? qv} jysV7OO_ae0dVsI8YR|޾<Җ>^ӾՒW-S@8L>\nZE9QAJAohN88t8͉OD]ȧ88)DR R\9mLT[ydbtC޹ oG7VAS7[Td@zax;w Jӥd`W ~vaO0J'F'|IʇZ}jI~q')C1E&Z8X3Nb? h4"+=B@#Vj^)!b0C絠2ܚh^#vqUB$ Xb?>7EJXr"oNbo) =1Hd +cC"ֿ"b\(s^֟mdל5.~MY>=8хL&Q;Wtv! 7W?& 9c -hXٍ`V:?*^5YQH۟^6o66r0'>NX"k> #`6!h:.?)M|Fj3.Q*n x'OaWh&'>t/=؀@%''[-sB9C $E&I1A(k4aI>|Y|y^ &MxnN9ae><>\^. ΚE٧{wvaS~V5q}<_ 7aRYt_ݸ?~e*sٹ\5Y ~;+|(KRlƺt/Aq(]f- \觿`npXuDӽ^ T+F K8`lzbeQ*=(p"\ D᤹ۻj'=mIOzxN˧d<}4I?Y<=8yHa+CZ=?mpg IBC#9gN,BoLl<7x?l1[Ql>zЉ c`( ٗzA `:=:C 0XW&me/i5oNn&@DRSPVdJ ^d![%ǒ˭VơĈX~#Gcu!p%[c|څs-)Hݰ98bt+ uJF}N}G;ѭ y.ZSM~ƹ#b:ȉnc"k'F.<䝻hk ƎӶ%(aO2O<~tN*-XIi8㕷2ҩ&Қ001kkNjPs`Z72bါEǂb_]T<$"bξ]yU߮s1v5k7Ow>P$K/0xetQuT|wgO,gqZyCnvgm7AҼnRy_Oc+?gHof|߻el̜AM zEW m':|@V'E"#> dJ894Rm'\F1 i[/s#i:M?xI%JQ&wPNf@2dηj"m,,Y’-ǸGkUqV9K6 h'ҽN{ D<Ρ=ՂK]ĩ09HP(rp\0Gxx/"a cjRRm K8A),p%#LrZW=/$lTI$ǖ\D6ڻ|Y݌ND6U5X+P^$ $r$Qع`ʠcuh:NT<čz`sP묢|ݹ"*dPCL /՜0M%a4hne_|Lؾ48(^@tO"Wƚ<+wڷZ2`trR)j. n7r 57>DN 5ρ 5w!EK|F$[))S6+j [y6ZbtC޹ⓟqnE7h#FR R\9mU(6ܭCbtC޹(!D"i[χD6RްSdCª9S1b c#T̼b@Qq{r^$r$O9 a$H4 *rM;/Ӑ(/l~Lrٜ!gb8$Wqu.@)?H<]xa5뻻eg/xnw~`{)F2>fdUag3yw=#nGqWR?|Ah$~;,oN[%\Di3RUVsRE;ZR<#"QX,eT$ʸZGi;so$P/7[)y)`[0ݘ^ 楱ՊqRAӼT8s>P/Zi?f/$iďs2/=V+Z?y1z)MR }e޻yl]ݕ-lS]Nj /BMAÂ٤H9)%FC.ElRpTR0yɣXpХ$g*$Lku~ƂfQyk8g33O]pJL;1A|Fɪ׿ҵ'r#+LeG%=ܧI"g7(jOG}u1XO"D{BSsiӸ *h߂ eZ+ը mX>XS۞Ʈ9iJp&jp `MA2)s"Q`e,HHb FhIlfņɝ̈́@%χDP9kXa] pz#uXáђjF*$BFRF"C1ȚUC.* rW MmX_3 34xp"D# BI>jV)-x:}>g2m><`wFLcttT"j.s?Zh2DYVǨtI^U \wm;rW>.żEFzF]-Z#&Zkdu"+3}XQ*vE8y?2R2 jB.kp&a=lϡzSPgaDbx`7܎PCo=)3V<;ؕCa9Y@)ˣW uf;ꪏʎӿ=p؝2wiF[>,7l8v^;,|l˸~k4ᇊakTϠ8L/? E9P }. 庿2f`F>p蚾l8~M9ZdZ}`چafdf3}Y\Iʺe#S^n=={O1}`lFuT"TmaϭmFBлpW6J_0^$C<8joZHE>7~a]!;DH&{ O},)"X)'`?J/P^ I@gVu;:/E}7qWRN*X괌]Xz,#c)N\v,#ci5: u_3KN.uZ?{RM2[UT]Ԩ5Xz,͞p;aQKe/Q_7Ke'͞UT6{EhoR w'QKe]hm,;%>ײy ,aԄFXz,E!K13`Uզ">o\(VKzLcQ.`o{c܄8|J.&řE҈iɃ/S+I雱͟1%d Klʶ&xgTҶMµK1c@^~MʛFs;j}![Vuh;DP H^#}[° Vm+#Jy]LJzDy. 䠝߮R}"mf`L&?=&3^9˛ ?}Z.c[FW*RDFrrfV~ 9 å*TDI8uCd}ҫL5C-enM}Fϝ6@T,M~}m%tOi+%g }+Hs&m\T6M=:6s,PyG*Sچo:sSݘ۲ V4̓ ξ4e]sЌܤZOڄ5nBsz\ӀJ M Mکlh"{LC(:hלSuV搲*! i0T{ŕIzɈ&2[D:4"6>={O1MTFI5:Dy8^\/9 Fj/~ڃC i;PG \7:s+Zl-}yn-mV"!MZk"Ӽd\Jюs@X GUi _7/$GQ Ӓ@U\r0PG4C^8ES8WߓW7 D Dxro\nt@F[)I{ yM]Wo:nK"E7[uK"nc0SSF)zdK٪%K/;+l'Wk{6DؔGk[V-U4~hDAzgP4FueHLuYȡ+B hUtʱfCM uT:ϡA 'nz = b2!,=AY%َaczd X]#^5 nZ!zȓ'w]T;w.f+xjw<#T#A0C'T٩|:1'L WTڊP|Yz"2Hr CG%* Aˋcֱd8D3y"}yn"3ت{=CkvG "1*-ɩO*%yK%JZ|2.^ӑ9C^8ES8ŊW??~__?ܽyCT#dz=rz?z,?#|oCwݼo6@ݻkS|9v?tt-F߽ N.OU.ds- r+u#Dcw^!KXs#t3k6$[$ݗ$je: Ђ𯺭ov[[r|v; 񿆻kwfN݌aK<<;94Cy kJÑL;^BI3!dN),?1 G#9#6M'ݔL|%ru((蒴i*yM4P٭@YD> D+o}~|~rGRmsA/)zǂҝ=sN)މS2 'Rf5_i38޵d"攡;۟ ohma"1xY@ZՅfq`NPqԆ}6ԪUb9zG%oxZqJ1Z^SםG , /{&QμctLjצܪZT>1ч>%,*i&[ S ]L%'XX+Vrb+kۀ'(3[Q6$:H0,j4M!WWyi Tuut@_S 1YX^`? %vOm%g^]%zB%-Bk1N*ҕX5%j9zE!^}# aLu?s4We kku25`e/-85T k]B9 ZTQKnWi$mt5U5RaU<;*MU 򮪸סQ:=6Fa?iPY{e96~Jʰwk2^YG2*h {W|w6kO2_rP;eN F WtVeF ʝpPډE( ~Nv" ȰO/#"^@KwyK7dֻekwK})0#l2 ,r~jy^;+bo8rprvYOS==< {d^B=12 0Ќ2KX}T>(2QEgŹ"(>_'a˟n9#JT.>X0.4rv͊y͊8tϦsmؖg2~AF3pH ۝ɩ1# S&VcⵎH}x87!H ְ/0jV )T $:\vٳIyRc0S4SVSW[*ꤾ#-8 9n~MX` hV?i:;ؗ5ۦ.(N Mي/7[1Xz![pS/icm 1w vZM8N (Z<%TbS.M8)9-L8=dUKWwx]gwک?lv}#j7bMi9Cu㴢zn*ƥfv `a`<.Vw秎l-&/kg6\o\EՆJo,zMUWpޑV[Nj1,QA|}mڴR}D}qs/SiY7w4_5sӃJl\7YeEB y𼼺]rp!.ӊ7lifWTwW߸ܟ}~k8e\T/qc{o?vݷn)g< ~ثfqܿ˿~|=ߏuӠ5-I}Ahh%!l koiqQMdKJMKdH'.yΕo%WK8F%UVJ*hn >;va=|u 4PzHm:Qz( k CR_oK:Ra(EPtU)lY#)p6%ޖԷo(,dNc*nh1Hݱ >)joT% =]Woij< w~QW}М[3<(Y=x9ެQ4P*g:[|Bmrho5N-8-z9yiDs"( {qaVQ}V% ֖8{kz3,P[3uc3i]b,|6@kB;|6).EbNpi;J$ 2ր R Imnl[()e3q*Jt6A;NV TS.5TVU*@sKWJm,f[`ECIox=ɖ֩oWuJy.gnFn׏̞oU+j3}KOԄ(X%#!yDr YVjEn32e!t,1B+&9;QZ8jrrcT@ ӧr;P`r}Un:sڡiߦgéj,1/( (Ɲ O*l|~w#3[oC^XϫB+TJx] -e.Tom|9/WP1'0/9^8\[pg -ۧg_D/\,Pi׵:pC-ܹ!p^*ޞ(A5P @쫯\!k ﹫"su2@V$B0.'p;R wQ.x hJB9w+tT '|B4kHÁjY+ RӍo R#c5FX[#VBo*J5wz[j. zȃֹWDgl>;.y%r f_"/q?b ~sWK\*QMok?J_>߼Oկ\ ?Z=^E7NG|~ӕ!7Ltlg.k\e:Bh{LIBPN~ޑ Gn:N3bۈqdyޭ |p&aJɇǻMnju41&>#*vo#7[%؄n CtS@0d e k>{{ϸAAjG<|H8F>4#gژ( ]K-4'g4JY`"uDM#EjM'R0"TGb3!v>&2J% ׹$fR_H(Qz(RuJAZ&5Go1GZKrMcToﴃWWnf1:aTTqb_"_pV/;~(@v"4ΎVα? T2zۀG(Z,,հ|NjbR;ar|DZȾDZ tsl"P )yK~ p:5¶'n/5"aln/a#oLj+dI>%u`~먀a] CƝJXdSMBљQ!dMYd@h<݀Oû :ψntPҏIz.!)& z҈-U!I}FwsJ 2[m)q]CB@]b %V.pD:9,WRAҁM:tդÙOޖZ!'RPJ}QI0U7(=iЫ`$P֧b- N8#nuR +PQ`J*$&z[j KOP zJ(Jj)QoV]WLfj( ZR<9mWUTIRa}鉣TT.4AY=u)ol:ܧLJۍ3vGVBleҊ7`sX RbidA4dО%(Li!4X0$a t9P]ӟG\`2  O*P^ #%'a&+|˩瘗z΍\X>ץ],^7Bpy;RQ&R(1у \nSldS6 hY('zz5gP|3gkJte}P%d|h?%G|(#nse8"=| HM؋: Nnj+q˼@f %̔"Xk*.Ђi`ʒEkmKbM (鉏f6IHtw$Y萃:IvpretenYRLhG4: iaF <*d!"{jD>,r3I(M^(4 JbsKa2,Q#R Ԓשũ@9D ]H)xB`(@!U}pD%Ef-17EJ N-\ 8TmjX PKܰ+s֬:q kֳЊnP}/zJ=@`zQoE)b \nP1t8Œ%Rr&jvQ ( ZNOl@خr&tj{R}7a*G6Ji9YR[%%wW ].RzJ=KpwՃ[Y.+YUGh@?9G[VdEN)sЉ{i?uWjh%DxZ9 {6WN+m.(Vwn AK>dӧH͵x[60)[1򢆖ٯ{R"履J/ZD˖\4K}Ԫƒ;"zBإSC>lk$}@] C4S@g#zT@'1mhõwH?4ݺ@hjQT#]&jgGn:N3bۘ\[ ]Hk¶5 CS?(8a ĶޖZ2r>|!-Hu]R@iP[%3i4-Huo jpcL/@)BDY m͂P&2IM[nt*(g@J+`c iTR\Л3nv{}ү*<7wK_kH?oPijv 'rS}zZ9_G;^>/> rO7lifWT5on|~+83?+Q+cySjx/2W/ loZ؟re.0M_u[8%3Bmn>>1Jv: l!Z\޾Z#j4@ CF? V\nWUx_0}k'w=`{ [Uf hz(h,LFi -aZd=4ՍUUVϛN]:jwg\L8mXYw&m@) Ę!Ù\ދ{ ) MR{)7 Jʋbl"gMoтv7mԝB&?NTg@sgT랛ZfWo[K3y1$vd!>hȎ’ޞ}Yf]Kw-",[l&cjڦ$af2UW-&/d&M I%dN.N%eF(&䕷ջ&oB52^_$TLTe"#vFq=R9*qvxOCVFt<8(0xRa(ۀڋ]n66M'fTa27tQ-KAhjbD/Ҷ=Dl0)4jBmR9ؠ͑\Hm+1:C%p Z4TK!j«pcol6RFzW,&1kK |aMddo!y<9v57(ݭɇtSpjpq麃\|G6 C=xN4YA =VSVս Wvar/home/core/zuul-output/logs/kubelet.log0000644000000000000000024450041315133756733017715 0ustar rootrootJan 20 16:41:45 crc systemd[1]: Starting Kubernetes Kubelet... Jan 20 16:41:45 crc restorecon[4557]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:45 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 20 16:41:46 crc restorecon[4557]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 20 16:41:46 crc restorecon[4557]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Jan 20 16:41:46 crc kubenswrapper[4558]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Jan 20 16:41:46 crc kubenswrapper[4558]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Jan 20 16:41:46 crc kubenswrapper[4558]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Jan 20 16:41:46 crc kubenswrapper[4558]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Jan 20 16:41:46 crc kubenswrapper[4558]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Jan 20 16:41:46 crc kubenswrapper[4558]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.437615 4558 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.439777 4558 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.439793 4558 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.439798 4558 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.439802 4558 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.439807 4558 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.439812 4558 feature_gate.go:330] unrecognized feature gate: SignatureStores Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.439816 4558 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.439820 4558 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.439823 4558 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.439827 4558 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.439830 4558 feature_gate.go:330] unrecognized feature gate: GatewayAPI Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.439833 4558 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.439840 4558 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.439843 4558 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.439847 4558 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.439850 4558 feature_gate.go:330] unrecognized feature gate: OVNObservability Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.439853 4558 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.439857 4558 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.439860 4558 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.439863 4558 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.439866 4558 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.439870 4558 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.439873 4558 feature_gate.go:330] unrecognized feature gate: NewOLM Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.439876 4558 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.439880 4558 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.439883 4558 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.439886 4558 feature_gate.go:330] unrecognized feature gate: InsightsConfig Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.439890 4558 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.439893 4558 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.439896 4558 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.439900 4558 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.439904 4558 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.439907 4558 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.439911 4558 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.439914 4558 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.439918 4558 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.439922 4558 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.439925 4558 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.439929 4558 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.439932 4558 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.439936 4558 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.439940 4558 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.439943 4558 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.439947 4558 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.439955 4558 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.439959 4558 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.439962 4558 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.439965 4558 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.439969 4558 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.439973 4558 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.439976 4558 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.439979 4558 feature_gate.go:330] unrecognized feature gate: PinnedImages Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.439983 4558 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.439986 4558 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.439990 4558 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.439993 4558 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.439997 4558 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.440001 4558 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.440005 4558 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.440008 4558 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.440011 4558 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.440014 4558 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.440017 4558 feature_gate.go:330] unrecognized feature gate: PlatformOperators Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.440020 4558 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.440024 4558 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.440027 4558 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.440030 4558 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.440033 4558 feature_gate.go:330] unrecognized feature gate: Example Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.440036 4558 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.440039 4558 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.440042 4558 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440110 4558 flags.go:64] FLAG: --address="0.0.0.0" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440119 4558 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440125 4558 flags.go:64] FLAG: --anonymous-auth="true" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440130 4558 flags.go:64] FLAG: --application-metrics-count-limit="100" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440134 4558 flags.go:64] FLAG: --authentication-token-webhook="false" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440138 4558 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440145 4558 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440150 4558 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440154 4558 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440175 4558 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440179 4558 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440183 4558 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440188 4558 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440191 4558 flags.go:64] FLAG: --cgroup-root="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440195 4558 flags.go:64] FLAG: --cgroups-per-qos="true" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440199 4558 flags.go:64] FLAG: --client-ca-file="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440203 4558 flags.go:64] FLAG: --cloud-config="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440206 4558 flags.go:64] FLAG: --cloud-provider="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440210 4558 flags.go:64] FLAG: --cluster-dns="[]" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440215 4558 flags.go:64] FLAG: --cluster-domain="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440218 4558 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440222 4558 flags.go:64] FLAG: --config-dir="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440226 4558 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440230 4558 flags.go:64] FLAG: --container-log-max-files="5" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440234 4558 flags.go:64] FLAG: --container-log-max-size="10Mi" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440238 4558 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440241 4558 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440245 4558 flags.go:64] FLAG: --containerd-namespace="k8s.io" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440249 4558 flags.go:64] FLAG: --contention-profiling="false" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440253 4558 flags.go:64] FLAG: --cpu-cfs-quota="true" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440256 4558 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440260 4558 flags.go:64] FLAG: --cpu-manager-policy="none" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440264 4558 flags.go:64] FLAG: --cpu-manager-policy-options="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440268 4558 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440272 4558 flags.go:64] FLAG: --enable-controller-attach-detach="true" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440276 4558 flags.go:64] FLAG: --enable-debugging-handlers="true" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440279 4558 flags.go:64] FLAG: --enable-load-reader="false" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440283 4558 flags.go:64] FLAG: --enable-server="true" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440287 4558 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440292 4558 flags.go:64] FLAG: --event-burst="100" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440296 4558 flags.go:64] FLAG: --event-qps="50" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440300 4558 flags.go:64] FLAG: --event-storage-age-limit="default=0" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440304 4558 flags.go:64] FLAG: --event-storage-event-limit="default=0" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440307 4558 flags.go:64] FLAG: --eviction-hard="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440312 4558 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440315 4558 flags.go:64] FLAG: --eviction-minimum-reclaim="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440320 4558 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440324 4558 flags.go:64] FLAG: --eviction-soft="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440328 4558 flags.go:64] FLAG: --eviction-soft-grace-period="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440332 4558 flags.go:64] FLAG: --exit-on-lock-contention="false" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440336 4558 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440340 4558 flags.go:64] FLAG: --experimental-mounter-path="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440343 4558 flags.go:64] FLAG: --fail-cgroupv1="false" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440347 4558 flags.go:64] FLAG: --fail-swap-on="true" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440351 4558 flags.go:64] FLAG: --feature-gates="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440355 4558 flags.go:64] FLAG: --file-check-frequency="20s" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440359 4558 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440363 4558 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440367 4558 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440371 4558 flags.go:64] FLAG: --healthz-port="10248" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440375 4558 flags.go:64] FLAG: --help="false" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440379 4558 flags.go:64] FLAG: --hostname-override="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440383 4558 flags.go:64] FLAG: --housekeeping-interval="10s" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440387 4558 flags.go:64] FLAG: --http-check-frequency="20s" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440391 4558 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440394 4558 flags.go:64] FLAG: --image-credential-provider-config="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440398 4558 flags.go:64] FLAG: --image-gc-high-threshold="85" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440402 4558 flags.go:64] FLAG: --image-gc-low-threshold="80" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440406 4558 flags.go:64] FLAG: --image-service-endpoint="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440409 4558 flags.go:64] FLAG: --kernel-memcg-notification="false" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440413 4558 flags.go:64] FLAG: --kube-api-burst="100" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440417 4558 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440421 4558 flags.go:64] FLAG: --kube-api-qps="50" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440426 4558 flags.go:64] FLAG: --kube-reserved="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440429 4558 flags.go:64] FLAG: --kube-reserved-cgroup="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440433 4558 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440437 4558 flags.go:64] FLAG: --kubelet-cgroups="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440441 4558 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440446 4558 flags.go:64] FLAG: --lock-file="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440450 4558 flags.go:64] FLAG: --log-cadvisor-usage="false" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440454 4558 flags.go:64] FLAG: --log-flush-frequency="5s" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440458 4558 flags.go:64] FLAG: --log-json-info-buffer-size="0" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440464 4558 flags.go:64] FLAG: --log-json-split-stream="false" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440467 4558 flags.go:64] FLAG: --log-text-info-buffer-size="0" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440471 4558 flags.go:64] FLAG: --log-text-split-stream="false" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440475 4558 flags.go:64] FLAG: --logging-format="text" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440478 4558 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440482 4558 flags.go:64] FLAG: --make-iptables-util-chains="true" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440486 4558 flags.go:64] FLAG: --manifest-url="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440490 4558 flags.go:64] FLAG: --manifest-url-header="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440494 4558 flags.go:64] FLAG: --max-housekeeping-interval="15s" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440498 4558 flags.go:64] FLAG: --max-open-files="1000000" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440503 4558 flags.go:64] FLAG: --max-pods="110" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440506 4558 flags.go:64] FLAG: --maximum-dead-containers="-1" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440510 4558 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440514 4558 flags.go:64] FLAG: --memory-manager-policy="None" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440517 4558 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440521 4558 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440525 4558 flags.go:64] FLAG: --node-ip="192.168.126.11" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440528 4558 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440548 4558 flags.go:64] FLAG: --node-status-max-images="50" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440552 4558 flags.go:64] FLAG: --node-status-update-frequency="10s" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440556 4558 flags.go:64] FLAG: --oom-score-adj="-999" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440560 4558 flags.go:64] FLAG: --pod-cidr="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440564 4558 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440570 4558 flags.go:64] FLAG: --pod-manifest-path="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440574 4558 flags.go:64] FLAG: --pod-max-pids="-1" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440578 4558 flags.go:64] FLAG: --pods-per-core="0" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440582 4558 flags.go:64] FLAG: --port="10250" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440586 4558 flags.go:64] FLAG: --protect-kernel-defaults="false" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440590 4558 flags.go:64] FLAG: --provider-id="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440595 4558 flags.go:64] FLAG: --qos-reserved="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440599 4558 flags.go:64] FLAG: --read-only-port="10255" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440603 4558 flags.go:64] FLAG: --register-node="true" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440607 4558 flags.go:64] FLAG: --register-schedulable="true" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440611 4558 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440618 4558 flags.go:64] FLAG: --registry-burst="10" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440622 4558 flags.go:64] FLAG: --registry-qps="5" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440626 4558 flags.go:64] FLAG: --reserved-cpus="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440629 4558 flags.go:64] FLAG: --reserved-memory="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440634 4558 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440638 4558 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440642 4558 flags.go:64] FLAG: --rotate-certificates="false" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440646 4558 flags.go:64] FLAG: --rotate-server-certificates="false" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440650 4558 flags.go:64] FLAG: --runonce="false" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440654 4558 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440658 4558 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440662 4558 flags.go:64] FLAG: --seccomp-default="false" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440666 4558 flags.go:64] FLAG: --serialize-image-pulls="true" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440669 4558 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440673 4558 flags.go:64] FLAG: --storage-driver-db="cadvisor" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440677 4558 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440681 4558 flags.go:64] FLAG: --storage-driver-password="root" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440685 4558 flags.go:64] FLAG: --storage-driver-secure="false" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440689 4558 flags.go:64] FLAG: --storage-driver-table="stats" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440693 4558 flags.go:64] FLAG: --storage-driver-user="root" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440697 4558 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440701 4558 flags.go:64] FLAG: --sync-frequency="1m0s" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440705 4558 flags.go:64] FLAG: --system-cgroups="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440709 4558 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440715 4558 flags.go:64] FLAG: --system-reserved-cgroup="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440719 4558 flags.go:64] FLAG: --tls-cert-file="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440722 4558 flags.go:64] FLAG: --tls-cipher-suites="[]" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440727 4558 flags.go:64] FLAG: --tls-min-version="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440731 4558 flags.go:64] FLAG: --tls-private-key-file="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440736 4558 flags.go:64] FLAG: --topology-manager-policy="none" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440739 4558 flags.go:64] FLAG: --topology-manager-policy-options="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440743 4558 flags.go:64] FLAG: --topology-manager-scope="container" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440746 4558 flags.go:64] FLAG: --v="2" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440751 4558 flags.go:64] FLAG: --version="false" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440756 4558 flags.go:64] FLAG: --vmodule="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440760 4558 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.440764 4558 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.440846 4558 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.440850 4558 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.440853 4558 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.440857 4558 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.440860 4558 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.440864 4558 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.440867 4558 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.440871 4558 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.440874 4558 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.440877 4558 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.440880 4558 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.440884 4558 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.440887 4558 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.440890 4558 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.440893 4558 feature_gate.go:330] unrecognized feature gate: NewOLM Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.440896 4558 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.440900 4558 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.440904 4558 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.440908 4558 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.440912 4558 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.440915 4558 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.440918 4558 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.440922 4558 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.440925 4558 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.440929 4558 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.440933 4558 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.440936 4558 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.440940 4558 feature_gate.go:330] unrecognized feature gate: PlatformOperators Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.440943 4558 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.440947 4558 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.440950 4558 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.440953 4558 feature_gate.go:330] unrecognized feature gate: SignatureStores Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.440956 4558 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.440959 4558 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.440963 4558 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.440966 4558 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.440969 4558 feature_gate.go:330] unrecognized feature gate: GatewayAPI Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.440972 4558 feature_gate.go:330] unrecognized feature gate: InsightsConfig Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.440975 4558 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.440978 4558 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.440982 4558 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.440985 4558 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.440988 4558 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.440992 4558 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.440995 4558 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.440998 4558 feature_gate.go:330] unrecognized feature gate: PinnedImages Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.441001 4558 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.441004 4558 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.441008 4558 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.441012 4558 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.441015 4558 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.441018 4558 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.441022 4558 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.441025 4558 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.441029 4558 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.441032 4558 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.441035 4558 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.441038 4558 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.441041 4558 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.441048 4558 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.441051 4558 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.441055 4558 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.441059 4558 feature_gate.go:330] unrecognized feature gate: OVNObservability Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.441062 4558 feature_gate.go:330] unrecognized feature gate: Example Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.441066 4558 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.441070 4558 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.441074 4558 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.441077 4558 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.441081 4558 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.441085 4558 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.441088 4558 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.441099 4558 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.446871 4558 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.446889 4558 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.446951 4558 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.446956 4558 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.446960 4558 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.446963 4558 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.446966 4558 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.446971 4558 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.446975 4558 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.446978 4558 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.446982 4558 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.446985 4558 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.446989 4558 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.446992 4558 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.446995 4558 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.446999 4558 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447002 4558 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447006 4558 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447010 4558 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447013 4558 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447017 4558 feature_gate.go:330] unrecognized feature gate: SignatureStores Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447021 4558 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447024 4558 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447027 4558 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447031 4558 feature_gate.go:330] unrecognized feature gate: PlatformOperators Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447034 4558 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447037 4558 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447040 4558 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447044 4558 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447047 4558 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447050 4558 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447053 4558 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447056 4558 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447059 4558 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447063 4558 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447066 4558 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447069 4558 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447073 4558 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447078 4558 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447081 4558 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447085 4558 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447088 4558 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447092 4558 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447095 4558 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447098 4558 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447101 4558 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447106 4558 feature_gate.go:330] unrecognized feature gate: PinnedImages Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447110 4558 feature_gate.go:330] unrecognized feature gate: Example Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447114 4558 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447118 4558 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447122 4558 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447125 4558 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447128 4558 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447131 4558 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447134 4558 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447137 4558 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447140 4558 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447143 4558 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447147 4558 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447150 4558 feature_gate.go:330] unrecognized feature gate: InsightsConfig Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447153 4558 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447156 4558 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447159 4558 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447178 4558 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447182 4558 feature_gate.go:330] unrecognized feature gate: OVNObservability Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447185 4558 feature_gate.go:330] unrecognized feature gate: NewOLM Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447189 4558 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447192 4558 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447195 4558 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447200 4558 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447204 4558 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447208 4558 feature_gate.go:330] unrecognized feature gate: GatewayAPI Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447212 4558 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.447218 4558 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447333 4558 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447339 4558 feature_gate.go:330] unrecognized feature gate: GatewayAPI Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447343 4558 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447346 4558 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447349 4558 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447353 4558 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447357 4558 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447360 4558 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447363 4558 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447366 4558 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447370 4558 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447373 4558 feature_gate.go:330] unrecognized feature gate: PinnedImages Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447377 4558 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447381 4558 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447385 4558 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447389 4558 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447392 4558 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447395 4558 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447399 4558 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447402 4558 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447405 4558 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447409 4558 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447412 4558 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447415 4558 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447419 4558 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447422 4558 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447425 4558 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447428 4558 feature_gate.go:330] unrecognized feature gate: Example Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447431 4558 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447434 4558 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447438 4558 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447441 4558 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447444 4558 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447447 4558 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447450 4558 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447453 4558 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447457 4558 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447460 4558 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447464 4558 feature_gate.go:330] unrecognized feature gate: PlatformOperators Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447467 4558 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447470 4558 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447473 4558 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447477 4558 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447480 4558 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447483 4558 feature_gate.go:330] unrecognized feature gate: NewOLM Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447486 4558 feature_gate.go:330] unrecognized feature gate: OVNObservability Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447489 4558 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447492 4558 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447497 4558 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447500 4558 feature_gate.go:330] unrecognized feature gate: InsightsConfig Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447504 4558 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447507 4558 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447511 4558 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447514 4558 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447517 4558 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447521 4558 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447525 4558 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447528 4558 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447532 4558 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447536 4558 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447559 4558 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447563 4558 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447566 4558 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447569 4558 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447573 4558 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447577 4558 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447580 4558 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447583 4558 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447587 4558 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447591 4558 feature_gate.go:330] unrecognized feature gate: SignatureStores Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.447594 4558 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.447599 4558 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.447684 4558 server.go:940] "Client rotation is on, will bootstrap in background" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.450332 4558 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.450391 4558 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.451186 4558 server.go:997] "Starting client certificate rotation" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.451208 4558 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.451355 4558 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2025-11-25 01:15:37.962653755 +0000 UTC Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.451439 4558 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.462426 4558 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Jan 20 16:41:46 crc kubenswrapper[4558]: E0120 16:41:46.464029 4558 certificate_manager.go:562] "Unhandled Error" err="kubernetes.io/kube-apiserver-client-kubelet: Failed while requesting a signed certificate from the control plane: cannot create certificate signing request: Post \"https://api-int.crc.testing:6443/apis/certificates.k8s.io/v1/certificatesigningrequests\": dial tcp 192.168.25.8:6443: connect: connection refused" logger="UnhandledError" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.465995 4558 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.477274 4558 log.go:25] "Validated CRI v1 runtime API" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.494340 4558 log.go:25] "Validated CRI v1 image API" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.495630 4558 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.498647 4558 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2026-01-20-16-37-35-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.498669 4558 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:49 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/containers/storage/overlay-containers/75d81934760b26101869fbd8e4b5954c62b019c1cc3e5a0c9f82ed8de46b3b22/userdata/shm:{mountpoint:/var/lib/containers/storage/overlay-containers/75d81934760b26101869fbd8e4b5954c62b019c1cc3e5a0c9f82ed8de46b3b22/userdata/shm major:0 minor:42 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:41 fsType:tmpfs blockSize:0} overlay_0-43:{mountpoint:/var/lib/containers/storage/overlay/94b752e0a51c0134b00ddef6dc7a933a9d7c1d9bdc88a18dae4192a0d557d623/merged major:0 minor:43 fsType:overlay blockSize:0}] Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.510823 4558 manager.go:217] Machine: {Timestamp:2026-01-20 16:41:46.509272198 +0000 UTC m=+0.269610185 CPUVendorID:AuthenticAMD NumCores:12 NumPhysicalCores:1 NumSockets:12 CpuFrequency:2445406 MemoryCapacity:33654116352 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:1def282c-e24e-4bc0-9a1b-d7cc30756d3d BootID:bf027402-7a62-478b-8585-220c98495823 Filesystems:[{Device:/run/user/1000 DeviceMajor:0 DeviceMinor:49 Capacity:3365408768 Type:vfs Inodes:821633 HasInodes:true} {Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:41 Capacity:1073741824 Type:vfs Inodes:4108168 HasInodes:true} {Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:6730825728 Type:vfs Inodes:819200 HasInodes:true} {Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:16827060224 Type:vfs Inodes:1048576 HasInodes:true} {Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true} {Device:overlay_0-43 DeviceMajor:0 DeviceMinor:43 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:16827056128 Type:vfs Inodes:4108168 HasInodes:true} {Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/var/lib/containers/storage/overlay-containers/75d81934760b26101869fbd8e4b5954c62b019c1cc3e5a0c9f82ed8de46b3b22/userdata/shm DeviceMajor:0 DeviceMinor:42 Capacity:65536000 Type:vfs Inodes:4108168 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:214748364800 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:3f:55:36 Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:enp3s0 MacAddress:fa:16:3e:3f:55:36 Speed:-1 Mtu:1500} {Name:enp7s0 MacAddress:fa:16:3e:d2:73:8a Speed:-1 Mtu:1440} {Name:enp7s0.20 MacAddress:52:54:00:d0:9f:bd Speed:-1 Mtu:1436} {Name:enp7s0.21 MacAddress:52:54:00:9f:21:a1 Speed:-1 Mtu:1436} {Name:enp7s0.22 MacAddress:52:54:00:ac:00:29 Speed:-1 Mtu:1436} {Name:eth10 MacAddress:82:e8:39:39:80:05 Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:82:97:78:d9:ec:1b Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:33654116352 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:65536 Type:Data Level:1} {Id:0 Size:65536 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:65536 Type:Data Level:1} {Id:1 Size:65536 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[10] Caches:[{Id:10 Size:65536 Type:Data Level:1} {Id:10 Size:65536 Type:Instruction Level:1} {Id:10 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:10 Size:16777216 Type:Unified Level:3}] SocketID:10 BookID: DrawerID:} {Id:0 Threads:[11] Caches:[{Id:11 Size:65536 Type:Data Level:1} {Id:11 Size:65536 Type:Instruction Level:1} {Id:11 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:11 Size:16777216 Type:Unified Level:3}] SocketID:11 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:65536 Type:Data Level:1} {Id:2 Size:65536 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:65536 Type:Data Level:1} {Id:3 Size:65536 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:65536 Type:Data Level:1} {Id:4 Size:65536 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:65536 Type:Data Level:1} {Id:5 Size:65536 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:65536 Type:Data Level:1} {Id:6 Size:65536 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:65536 Type:Data Level:1} {Id:7 Size:65536 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:} {Id:0 Threads:[8] Caches:[{Id:8 Size:65536 Type:Data Level:1} {Id:8 Size:65536 Type:Instruction Level:1} {Id:8 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:8 Size:16777216 Type:Unified Level:3}] SocketID:8 BookID: DrawerID:} {Id:0 Threads:[9] Caches:[{Id:9 Size:65536 Type:Data Level:1} {Id:9 Size:65536 Type:Instruction Level:1} {Id:9 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:9 Size:16777216 Type:Unified Level:3}] SocketID:9 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.510969 4558 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.511050 4558 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.511558 4558 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.511696 4558 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.511723 4558 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.511872 4558 topology_manager.go:138] "Creating topology manager with none policy" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.511880 4558 container_manager_linux.go:303] "Creating device plugin manager" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.512177 4558 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.512200 4558 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.512282 4558 state_mem.go:36] "Initialized new in-memory state store" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.512346 4558 server.go:1245] "Using root directory" path="/var/lib/kubelet" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.514137 4558 kubelet.go:418] "Attempting to sync node with API server" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.514225 4558 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.514257 4558 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.514266 4558 kubelet.go:324] "Adding apiserver pod source" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.514275 4558 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.517055 4558 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.517685 4558 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.518013 4558 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 192.168.25.8:6443: connect: connection refused Jan 20 16:41:46 crc kubenswrapper[4558]: E0120 16:41:46.518107 4558 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 192.168.25.8:6443: connect: connection refused" logger="UnhandledError" Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.518123 4558 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 192.168.25.8:6443: connect: connection refused Jan 20 16:41:46 crc kubenswrapper[4558]: E0120 16:41:46.518243 4558 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 192.168.25.8:6443: connect: connection refused" logger="UnhandledError" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.519490 4558 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.521271 4558 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.521292 4558 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.521299 4558 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.521305 4558 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.521315 4558 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.521321 4558 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.521327 4558 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.521338 4558 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.521345 4558 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.521351 4558 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.521360 4558 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.521365 4558 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.522275 4558 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.522603 4558 server.go:1280] "Started kubelet" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.523202 4558 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.523216 4558 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.523664 4558 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.523682 4558 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 192.168.25.8:6443: connect: connection refused Jan 20 16:41:46 crc systemd[1]: Started Kubernetes Kubelet. Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.523996 4558 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.524017 4558 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.524045 4558 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-25 03:08:52.009494154 +0000 UTC Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.524177 4558 volume_manager.go:287] "The desired_state_of_world populator starts" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.524188 4558 volume_manager.go:289] "Starting Kubelet Volume Manager" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.524232 4558 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Jan 20 16:41:46 crc kubenswrapper[4558]: E0120 16:41:46.524238 4558 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.524676 4558 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 192.168.25.8:6443: connect: connection refused Jan 20 16:41:46 crc kubenswrapper[4558]: E0120 16:41:46.524730 4558 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 192.168.25.8:6443: connect: connection refused" logger="UnhandledError" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.525378 4558 server.go:460] "Adding debug handlers to kubelet server" Jan 20 16:41:46 crc kubenswrapper[4558]: E0120 16:41:46.525415 4558 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.25.8:6443: connect: connection refused" interval="200ms" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.525383 4558 factory.go:55] Registering systemd factory Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.525477 4558 factory.go:221] Registration of the systemd container factory successfully Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.525784 4558 factory.go:153] Registering CRI-O factory Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.525800 4558 factory.go:221] Registration of the crio container factory successfully Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.525855 4558 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.525884 4558 factory.go:103] Registering Raw factory Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.525897 4558 manager.go:1196] Started watching for new ooms in manager Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.526622 4558 manager.go:319] Starting recovery of all containers Jan 20 16:41:46 crc kubenswrapper[4558]: E0120 16:41:46.526627 4558 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 192.168.25.8:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.188c7e035ac45a7a default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-01-20 16:41:46.522581626 +0000 UTC m=+0.282919594,LastTimestamp:2026-01-20 16:41:46.522581626 +0000 UTC m=+0.282919594,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.533494 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.533560 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.533577 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.533588 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.533619 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.533628 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.533636 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.533649 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.533660 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.533671 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.533680 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.533692 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.533703 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.533721 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.533731 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.533743 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.533752 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.533761 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.533771 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.533781 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.533793 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.533802 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.533811 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.533821 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.533829 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.533839 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.533852 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.533865 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.533875 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.533888 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.533895 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.533906 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.533915 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.533924 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.536934 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.536949 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.536959 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.536968 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.536977 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.536988 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.536997 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.537005 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.537014 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.537023 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.537032 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.537041 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.537049 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.537057 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.537080 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.537089 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.537097 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.537106 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.537118 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.537128 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.537138 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.537147 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.537157 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.537177 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.537187 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.537194 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.537203 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.537212 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.537221 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.537232 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.537241 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.537249 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.537257 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.537266 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.537275 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.537284 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.537293 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.537301 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.537309 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.537317 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.537325 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.537334 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.537342 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.537350 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.537359 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.537367 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.537375 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.537384 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.537394 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.537402 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.537411 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.537425 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.537439 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.537468 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.537479 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.537487 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.537501 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.537509 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.537517 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.537526 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.537535 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.537554 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.537562 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.537571 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.537579 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.537587 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.537597 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.537609 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.537617 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.537625 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.537638 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.540086 4558 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.540114 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.540129 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.540150 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.540182 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.540192 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.540201 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.540211 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.540219 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.540228 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.540237 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.540252 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.540260 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.540269 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.540278 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.540287 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.540319 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.540330 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.540339 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.540359 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.540370 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.540379 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.540388 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.540396 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.540405 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.540414 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.540424 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.540438 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.540448 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.540457 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.540466 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.540475 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.540484 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.540492 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.540501 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.540511 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.540521 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.540531 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.540563 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.540573 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.540581 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.540590 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.540599 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.540607 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.540615 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.540625 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.540634 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.540642 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.540651 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.540661 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.540669 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.540678 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.540688 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.540696 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.540704 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.540712 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.540721 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.540761 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.540773 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.540782 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.540790 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.540798 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.540806 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.540815 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.540822 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.540829 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.540838 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.540846 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.540858 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.540867 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.540874 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.540883 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.540891 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.540899 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.540908 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.540917 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.540925 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.540933 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.540942 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.540950 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.540959 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.540967 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.540974 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.540982 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.540991 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.540998 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.541008 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.541016 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.541024 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.541031 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.541040 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.541049 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.541057 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.541066 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.541074 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.541082 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.541091 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.541101 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.541109 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.541116 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.541128 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.541139 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.541148 4558 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.541156 4558 reconstruct.go:97] "Volume reconstruction finished" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.541177 4558 reconciler.go:26] "Reconciler: start to sync state" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.544479 4558 manager.go:324] Recovery completed Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.552998 4558 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.555501 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.555588 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.555600 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.556931 4558 cpu_manager.go:225] "Starting CPU manager" policy="none" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.556971 4558 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.556988 4558 state_mem.go:36] "Initialized new in-memory state store" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.561493 4558 policy_none.go:49] "None policy: Start" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.562024 4558 memory_manager.go:170] "Starting memorymanager" policy="None" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.562043 4558 state_mem.go:35] "Initializing new in-memory state store" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.563099 4558 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.564521 4558 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.564635 4558 status_manager.go:217] "Starting to sync pod status with apiserver" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.564705 4558 kubelet.go:2335] "Starting kubelet main sync loop" Jan 20 16:41:46 crc kubenswrapper[4558]: E0120 16:41:46.564792 4558 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Jan 20 16:41:46 crc kubenswrapper[4558]: W0120 16:41:46.565065 4558 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 192.168.25.8:6443: connect: connection refused Jan 20 16:41:46 crc kubenswrapper[4558]: E0120 16:41:46.565121 4558 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 192.168.25.8:6443: connect: connection refused" logger="UnhandledError" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.604897 4558 manager.go:334] "Starting Device Plugin manager" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.604944 4558 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.604955 4558 server.go:79] "Starting device plugin registration server" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.605298 4558 eviction_manager.go:189] "Eviction manager: starting control loop" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.605312 4558 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.605449 4558 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.605585 4558 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.605593 4558 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Jan 20 16:41:46 crc kubenswrapper[4558]: E0120 16:41:46.612385 4558 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.665683 4558 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-controller-manager/kube-controller-manager-crc","openshift-kube-scheduler/openshift-kube-scheduler-crc","openshift-machine-config-operator/kube-rbac-proxy-crio-crc","openshift-etcd/etcd-crc","openshift-kube-apiserver/kube-apiserver-crc"] Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.665783 4558 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.666653 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.666688 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.666698 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.666848 4558 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.666998 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.667028 4558 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.667868 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.667893 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.667902 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.667974 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.668002 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.668011 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.668108 4558 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.668236 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.668284 4558 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.668893 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.668914 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.668922 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.668923 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.668936 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.668944 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.669032 4558 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.669142 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.669182 4558 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.669841 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.669855 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.669866 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.669876 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.669869 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.669943 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.670027 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.670052 4558 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.670102 4558 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.670591 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.670610 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.670618 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.670923 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.670944 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.670951 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.671354 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.671403 4558 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.673102 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.673132 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.673142 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.705377 4558 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.706132 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.706160 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.706183 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.706202 4558 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jan 20 16:41:46 crc kubenswrapper[4558]: E0120 16:41:46.706607 4558 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 192.168.25.8:6443: connect: connection refused" node="crc" Jan 20 16:41:46 crc kubenswrapper[4558]: E0120 16:41:46.725831 4558 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.25.8:6443: connect: connection refused" interval="400ms" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.742927 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.742965 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.742983 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.742997 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.743012 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.743053 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.743081 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.743112 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.743144 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.743171 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.743197 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.743230 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.743277 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.743310 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.743326 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.844156 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.844220 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.844241 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.844256 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.844273 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.844288 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.844301 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.844316 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.844332 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.844345 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.844358 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.844373 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.844367 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.844403 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.844382 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.844426 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.844449 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.844474 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.844386 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.844416 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.844452 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.844457 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.844516 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.844467 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.844474 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.844556 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.844356 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.844436 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.844588 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.844725 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.907346 4558 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.909159 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.909201 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.909210 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.909228 4558 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jan 20 16:41:46 crc kubenswrapper[4558]: E0120 16:41:46.909601 4558 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 192.168.25.8:6443: connect: connection refused" node="crc" Jan 20 16:41:46 crc kubenswrapper[4558]: I0120 16:41:46.992209 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 20 16:41:47 crc kubenswrapper[4558]: I0120 16:41:47.004963 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 20 16:41:47 crc kubenswrapper[4558]: I0120 16:41:47.010336 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 20 16:41:47 crc kubenswrapper[4558]: W0120 16:41:47.012640 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf614b9022728cf315e60c057852e563e.slice/crio-23b16cf4daeb6b2d7c26b62af8f33cb8bcc7507a722a76fe2a9df73dc319e52b WatchSource:0}: Error finding container 23b16cf4daeb6b2d7c26b62af8f33cb8bcc7507a722a76fe2a9df73dc319e52b: Status 404 returned error can't find the container with id 23b16cf4daeb6b2d7c26b62af8f33cb8bcc7507a722a76fe2a9df73dc319e52b Jan 20 16:41:47 crc kubenswrapper[4558]: W0120 16:41:47.020577 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dcd261975c3d6b9a6ad6367fd4facd3.slice/crio-19adc0f331032412bf818487f541256229831745ac46a89c0cc9603a9b3316ab WatchSource:0}: Error finding container 19adc0f331032412bf818487f541256229831745ac46a89c0cc9603a9b3316ab: Status 404 returned error can't find the container with id 19adc0f331032412bf818487f541256229831745ac46a89c0cc9603a9b3316ab Jan 20 16:41:47 crc kubenswrapper[4558]: W0120 16:41:47.021264 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-b7e4946f99059717fa939f1f8245fbccb8a3b2bf5aeb1abac55bcb9c0e2a7d7a WatchSource:0}: Error finding container b7e4946f99059717fa939f1f8245fbccb8a3b2bf5aeb1abac55bcb9c0e2a7d7a: Status 404 returned error can't find the container with id b7e4946f99059717fa939f1f8245fbccb8a3b2bf5aeb1abac55bcb9c0e2a7d7a Jan 20 16:41:47 crc kubenswrapper[4558]: I0120 16:41:47.028003 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Jan 20 16:41:47 crc kubenswrapper[4558]: I0120 16:41:47.032659 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 20 16:41:47 crc kubenswrapper[4558]: W0120 16:41:47.040990 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2139d3e2895fc6797b9c76a1b4c9886d.slice/crio-d4e7d9fd74183ebf1d24e1c8b5b5f36633a22a73a6e832973306eb95088b1905 WatchSource:0}: Error finding container d4e7d9fd74183ebf1d24e1c8b5b5f36633a22a73a6e832973306eb95088b1905: Status 404 returned error can't find the container with id d4e7d9fd74183ebf1d24e1c8b5b5f36633a22a73a6e832973306eb95088b1905 Jan 20 16:41:47 crc kubenswrapper[4558]: W0120 16:41:47.042573 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-ce68a083580a9c296df40020c4ece760be2e2998b6e01629b7dd111e19a4f9fe WatchSource:0}: Error finding container ce68a083580a9c296df40020c4ece760be2e2998b6e01629b7dd111e19a4f9fe: Status 404 returned error can't find the container with id ce68a083580a9c296df40020c4ece760be2e2998b6e01629b7dd111e19a4f9fe Jan 20 16:41:47 crc kubenswrapper[4558]: E0120 16:41:47.126983 4558 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.25.8:6443: connect: connection refused" interval="800ms" Jan 20 16:41:47 crc kubenswrapper[4558]: I0120 16:41:47.310313 4558 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 20 16:41:47 crc kubenswrapper[4558]: I0120 16:41:47.311764 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:41:47 crc kubenswrapper[4558]: I0120 16:41:47.311801 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:41:47 crc kubenswrapper[4558]: I0120 16:41:47.311810 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:41:47 crc kubenswrapper[4558]: I0120 16:41:47.311831 4558 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jan 20 16:41:47 crc kubenswrapper[4558]: E0120 16:41:47.312202 4558 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 192.168.25.8:6443: connect: connection refused" node="crc" Jan 20 16:41:47 crc kubenswrapper[4558]: W0120 16:41:47.505225 4558 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 192.168.25.8:6443: connect: connection refused Jan 20 16:41:47 crc kubenswrapper[4558]: E0120 16:41:47.505785 4558 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 192.168.25.8:6443: connect: connection refused" logger="UnhandledError" Jan 20 16:41:47 crc kubenswrapper[4558]: W0120 16:41:47.509152 4558 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 192.168.25.8:6443: connect: connection refused Jan 20 16:41:47 crc kubenswrapper[4558]: E0120 16:41:47.509227 4558 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 192.168.25.8:6443: connect: connection refused" logger="UnhandledError" Jan 20 16:41:47 crc kubenswrapper[4558]: I0120 16:41:47.524929 4558 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-28 22:49:16.55010971 +0000 UTC Jan 20 16:41:47 crc kubenswrapper[4558]: I0120 16:41:47.525351 4558 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 192.168.25.8:6443: connect: connection refused Jan 20 16:41:47 crc kubenswrapper[4558]: I0120 16:41:47.569629 4558 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="6e4df1c3b2a1aef9adb3da64f89a1b8c0904fd22882609dbafe2fbc5718b8016" exitCode=0 Jan 20 16:41:47 crc kubenswrapper[4558]: I0120 16:41:47.569707 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"6e4df1c3b2a1aef9adb3da64f89a1b8c0904fd22882609dbafe2fbc5718b8016"} Jan 20 16:41:47 crc kubenswrapper[4558]: I0120 16:41:47.569822 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"19adc0f331032412bf818487f541256229831745ac46a89c0cc9603a9b3316ab"} Jan 20 16:41:47 crc kubenswrapper[4558]: I0120 16:41:47.569956 4558 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 20 16:41:47 crc kubenswrapper[4558]: I0120 16:41:47.571214 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:41:47 crc kubenswrapper[4558]: I0120 16:41:47.571243 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:41:47 crc kubenswrapper[4558]: I0120 16:41:47.571222 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"168e8f533aaa108e4160f9ae4af6bb7944edba7a596178ffffdc5325073fc3c8"} Jan 20 16:41:47 crc kubenswrapper[4558]: I0120 16:41:47.571253 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:41:47 crc kubenswrapper[4558]: I0120 16:41:47.571315 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"23b16cf4daeb6b2d7c26b62af8f33cb8bcc7507a722a76fe2a9df73dc319e52b"} Jan 20 16:41:47 crc kubenswrapper[4558]: I0120 16:41:47.573302 4558 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="c39aa6c3942134a027be33cac715f9c0381bce5417754d5bad045c42a07d353f" exitCode=0 Jan 20 16:41:47 crc kubenswrapper[4558]: I0120 16:41:47.573341 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"c39aa6c3942134a027be33cac715f9c0381bce5417754d5bad045c42a07d353f"} Jan 20 16:41:47 crc kubenswrapper[4558]: I0120 16:41:47.573367 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"ce68a083580a9c296df40020c4ece760be2e2998b6e01629b7dd111e19a4f9fe"} Jan 20 16:41:47 crc kubenswrapper[4558]: I0120 16:41:47.573453 4558 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 20 16:41:47 crc kubenswrapper[4558]: I0120 16:41:47.574348 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:41:47 crc kubenswrapper[4558]: I0120 16:41:47.574383 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:41:47 crc kubenswrapper[4558]: I0120 16:41:47.574394 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:41:47 crc kubenswrapper[4558]: I0120 16:41:47.575621 4558 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="baab628ee8aadc67ea829cc46639be55ae63281a9ec9a89737260f558a9ddf75" exitCode=0 Jan 20 16:41:47 crc kubenswrapper[4558]: I0120 16:41:47.575648 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"baab628ee8aadc67ea829cc46639be55ae63281a9ec9a89737260f558a9ddf75"} Jan 20 16:41:47 crc kubenswrapper[4558]: I0120 16:41:47.575675 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"d4e7d9fd74183ebf1d24e1c8b5b5f36633a22a73a6e832973306eb95088b1905"} Jan 20 16:41:47 crc kubenswrapper[4558]: I0120 16:41:47.575778 4558 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 20 16:41:47 crc kubenswrapper[4558]: I0120 16:41:47.575792 4558 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 20 16:41:47 crc kubenswrapper[4558]: I0120 16:41:47.576619 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:41:47 crc kubenswrapper[4558]: I0120 16:41:47.576638 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:41:47 crc kubenswrapper[4558]: I0120 16:41:47.576655 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:41:47 crc kubenswrapper[4558]: I0120 16:41:47.576666 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:41:47 crc kubenswrapper[4558]: I0120 16:41:47.576655 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:41:47 crc kubenswrapper[4558]: I0120 16:41:47.576720 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:41:47 crc kubenswrapper[4558]: I0120 16:41:47.577070 4558 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="ac2f374f27360645aa18b003d1b2415cd498c17c73812950904109dd4b77cbd5" exitCode=0 Jan 20 16:41:47 crc kubenswrapper[4558]: I0120 16:41:47.577103 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"ac2f374f27360645aa18b003d1b2415cd498c17c73812950904109dd4b77cbd5"} Jan 20 16:41:47 crc kubenswrapper[4558]: I0120 16:41:47.577142 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"b7e4946f99059717fa939f1f8245fbccb8a3b2bf5aeb1abac55bcb9c0e2a7d7a"} Jan 20 16:41:47 crc kubenswrapper[4558]: I0120 16:41:47.577242 4558 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 20 16:41:47 crc kubenswrapper[4558]: I0120 16:41:47.577918 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:41:47 crc kubenswrapper[4558]: I0120 16:41:47.577940 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:41:47 crc kubenswrapper[4558]: I0120 16:41:47.577951 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:41:47 crc kubenswrapper[4558]: W0120 16:41:47.763646 4558 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 192.168.25.8:6443: connect: connection refused Jan 20 16:41:47 crc kubenswrapper[4558]: E0120 16:41:47.763717 4558 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 192.168.25.8:6443: connect: connection refused" logger="UnhandledError" Jan 20 16:41:47 crc kubenswrapper[4558]: E0120 16:41:47.927594 4558 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.25.8:6443: connect: connection refused" interval="1.6s" Jan 20 16:41:48 crc kubenswrapper[4558]: W0120 16:41:48.111195 4558 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 192.168.25.8:6443: connect: connection refused Jan 20 16:41:48 crc kubenswrapper[4558]: E0120 16:41:48.111269 4558 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 192.168.25.8:6443: connect: connection refused" logger="UnhandledError" Jan 20 16:41:48 crc kubenswrapper[4558]: I0120 16:41:48.112390 4558 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 20 16:41:48 crc kubenswrapper[4558]: I0120 16:41:48.113404 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:41:48 crc kubenswrapper[4558]: I0120 16:41:48.113444 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:41:48 crc kubenswrapper[4558]: I0120 16:41:48.113453 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:41:48 crc kubenswrapper[4558]: I0120 16:41:48.113480 4558 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jan 20 16:41:48 crc kubenswrapper[4558]: E0120 16:41:48.113925 4558 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 192.168.25.8:6443: connect: connection refused" node="crc" Jan 20 16:41:48 crc kubenswrapper[4558]: I0120 16:41:48.525188 4558 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-15 09:06:56.598038991 +0000 UTC Jan 20 16:41:48 crc kubenswrapper[4558]: I0120 16:41:48.580415 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"a426b97002a49938b18009db49867dc9a15102707b42ec46bc64093c0ed3c2c5"} Jan 20 16:41:48 crc kubenswrapper[4558]: I0120 16:41:48.580451 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"bb2e7753f75b10088eea9204511ae78e4be9a2b5be8c17ed04c3c972b13f8190"} Jan 20 16:41:48 crc kubenswrapper[4558]: I0120 16:41:48.580462 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"0d16b973c217656d6a8ceaf684e9dec82daeb583209943f6b2979be47ab03fda"} Jan 20 16:41:48 crc kubenswrapper[4558]: I0120 16:41:48.580529 4558 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 20 16:41:48 crc kubenswrapper[4558]: I0120 16:41:48.581189 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:41:48 crc kubenswrapper[4558]: I0120 16:41:48.581211 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:41:48 crc kubenswrapper[4558]: I0120 16:41:48.581219 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:41:48 crc kubenswrapper[4558]: I0120 16:41:48.582835 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"c87f08993c2b47c63ba9652c7dafe0fdbecdcb03a7b7080664ab1f1bd0e1d602"} Jan 20 16:41:48 crc kubenswrapper[4558]: I0120 16:41:48.582857 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"1e0141aef1babb57501016cb09787591b5e6d9136b8d7dd4ba64393f0be5a027"} Jan 20 16:41:48 crc kubenswrapper[4558]: I0120 16:41:48.582866 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"9030e970e96b6909d771d4fa0c3b4f493b4a825cbc43ef7e684272284329c7c6"} Jan 20 16:41:48 crc kubenswrapper[4558]: I0120 16:41:48.582917 4558 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 20 16:41:48 crc kubenswrapper[4558]: I0120 16:41:48.583434 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:41:48 crc kubenswrapper[4558]: I0120 16:41:48.583452 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:41:48 crc kubenswrapper[4558]: I0120 16:41:48.583460 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:41:48 crc kubenswrapper[4558]: I0120 16:41:48.585322 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"f2e22f1d4e87dd06a67f49aedad280da48ac1c4ba2e438a4925cf8bd5330c4d8"} Jan 20 16:41:48 crc kubenswrapper[4558]: I0120 16:41:48.585347 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"e4850bde1fb538e21fc91949dc584ef0a791d718844691a44559e0513bb36203"} Jan 20 16:41:48 crc kubenswrapper[4558]: I0120 16:41:48.585357 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"3a8122c3b57ac4f0026df149fe622450d07d8b0d517ad018decd7b21d8d9c04a"} Jan 20 16:41:48 crc kubenswrapper[4558]: I0120 16:41:48.585366 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"7444e8fea1853bae45dbd3d52d07e3f7b94314cd29fe8c99891e515a892e569e"} Jan 20 16:41:48 crc kubenswrapper[4558]: I0120 16:41:48.585373 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"4ea68e442bb2143fbaa5e2c9fe335c6b023a31c2eeaa78d60de45c6ef40c2894"} Jan 20 16:41:48 crc kubenswrapper[4558]: I0120 16:41:48.585426 4558 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 20 16:41:48 crc kubenswrapper[4558]: I0120 16:41:48.585953 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:41:48 crc kubenswrapper[4558]: I0120 16:41:48.585971 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:41:48 crc kubenswrapper[4558]: I0120 16:41:48.585979 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:41:48 crc kubenswrapper[4558]: I0120 16:41:48.587093 4558 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="d5faab5625a16e1459b23ae0f4cc8ca3285bf4eb1ba4c53c1ed15eddb9054021" exitCode=0 Jan 20 16:41:48 crc kubenswrapper[4558]: I0120 16:41:48.587129 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"d5faab5625a16e1459b23ae0f4cc8ca3285bf4eb1ba4c53c1ed15eddb9054021"} Jan 20 16:41:48 crc kubenswrapper[4558]: I0120 16:41:48.587219 4558 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 20 16:41:48 crc kubenswrapper[4558]: I0120 16:41:48.587811 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:41:48 crc kubenswrapper[4558]: I0120 16:41:48.587833 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:41:48 crc kubenswrapper[4558]: I0120 16:41:48.587841 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:41:48 crc kubenswrapper[4558]: I0120 16:41:48.590251 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"3ba1f84f7ea0577350c7025aeb96fb1b329297f4d6767727f9a4e6659d3027cb"} Jan 20 16:41:48 crc kubenswrapper[4558]: I0120 16:41:48.590301 4558 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 20 16:41:48 crc kubenswrapper[4558]: I0120 16:41:48.595092 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:41:48 crc kubenswrapper[4558]: I0120 16:41:48.595121 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:41:48 crc kubenswrapper[4558]: I0120 16:41:48.595129 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:41:48 crc kubenswrapper[4558]: I0120 16:41:48.620591 4558 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Jan 20 16:41:48 crc kubenswrapper[4558]: I0120 16:41:48.752605 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 20 16:41:48 crc kubenswrapper[4558]: I0120 16:41:48.757158 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 20 16:41:48 crc kubenswrapper[4558]: I0120 16:41:48.762555 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 20 16:41:49 crc kubenswrapper[4558]: I0120 16:41:49.525629 4558 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-09 12:03:32.449472266 +0000 UTC Jan 20 16:41:49 crc kubenswrapper[4558]: I0120 16:41:49.593065 4558 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="f913fa07ec9571d1c61f9b7dddb46614846aa99d6e0b5b75709e9fa5a18a09c3" exitCode=0 Jan 20 16:41:49 crc kubenswrapper[4558]: I0120 16:41:49.593153 4558 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 20 16:41:49 crc kubenswrapper[4558]: I0120 16:41:49.593529 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"f913fa07ec9571d1c61f9b7dddb46614846aa99d6e0b5b75709e9fa5a18a09c3"} Jan 20 16:41:49 crc kubenswrapper[4558]: I0120 16:41:49.593624 4558 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 20 16:41:49 crc kubenswrapper[4558]: I0120 16:41:49.593885 4558 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 20 16:41:49 crc kubenswrapper[4558]: I0120 16:41:49.594403 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:41:49 crc kubenswrapper[4558]: I0120 16:41:49.594423 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:41:49 crc kubenswrapper[4558]: I0120 16:41:49.594431 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:41:49 crc kubenswrapper[4558]: I0120 16:41:49.594512 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:41:49 crc kubenswrapper[4558]: I0120 16:41:49.594527 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:41:49 crc kubenswrapper[4558]: I0120 16:41:49.594536 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:41:49 crc kubenswrapper[4558]: I0120 16:41:49.594865 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:41:49 crc kubenswrapper[4558]: I0120 16:41:49.594896 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:41:49 crc kubenswrapper[4558]: I0120 16:41:49.594905 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:41:49 crc kubenswrapper[4558]: I0120 16:41:49.714403 4558 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 20 16:41:49 crc kubenswrapper[4558]: I0120 16:41:49.715352 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:41:49 crc kubenswrapper[4558]: I0120 16:41:49.715384 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:41:49 crc kubenswrapper[4558]: I0120 16:41:49.715394 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:41:49 crc kubenswrapper[4558]: I0120 16:41:49.715416 4558 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jan 20 16:41:49 crc kubenswrapper[4558]: I0120 16:41:49.974867 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 20 16:41:50 crc kubenswrapper[4558]: I0120 16:41:50.526710 4558 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-05 15:45:21.669182476 +0000 UTC Jan 20 16:41:50 crc kubenswrapper[4558]: I0120 16:41:50.599406 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"d434793571cbba089cc34029d951bee9063aca58728b791f2d1af2d68229f778"} Jan 20 16:41:50 crc kubenswrapper[4558]: I0120 16:41:50.599427 4558 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 20 16:41:50 crc kubenswrapper[4558]: I0120 16:41:50.599450 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"c889e2b0e14244ef49ec057e3d5fd91687e335a983fdc99a6816abd2af643ddf"} Jan 20 16:41:50 crc kubenswrapper[4558]: I0120 16:41:50.599464 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"f7e18f59b5e6b6c0be141b7efb3ccb1df4ff6e3c394bcc88fb1142c8df433493"} Jan 20 16:41:50 crc kubenswrapper[4558]: I0120 16:41:50.599468 4558 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 20 16:41:50 crc kubenswrapper[4558]: I0120 16:41:50.599472 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"48592e72d91cf527296f06e52fec1c16c1da21323c0644659945af109a74ebb5"} Jan 20 16:41:50 crc kubenswrapper[4558]: I0120 16:41:50.599480 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"89f7ad577df11c509855a90ac8791da6de3f0d2857a1ca56726a6327f40455bf"} Jan 20 16:41:50 crc kubenswrapper[4558]: I0120 16:41:50.599588 4558 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 20 16:41:50 crc kubenswrapper[4558]: I0120 16:41:50.600234 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:41:50 crc kubenswrapper[4558]: I0120 16:41:50.600263 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:41:50 crc kubenswrapper[4558]: I0120 16:41:50.600272 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:41:50 crc kubenswrapper[4558]: I0120 16:41:50.600808 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:41:50 crc kubenswrapper[4558]: I0120 16:41:50.600829 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:41:50 crc kubenswrapper[4558]: I0120 16:41:50.600836 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:41:51 crc kubenswrapper[4558]: I0120 16:41:51.527858 4558 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-29 14:52:33.577682861 +0000 UTC Jan 20 16:41:51 crc kubenswrapper[4558]: I0120 16:41:51.600641 4558 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 20 16:41:51 crc kubenswrapper[4558]: I0120 16:41:51.600683 4558 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 20 16:41:51 crc kubenswrapper[4558]: I0120 16:41:51.601317 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:41:51 crc kubenswrapper[4558]: I0120 16:41:51.601340 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:41:51 crc kubenswrapper[4558]: I0120 16:41:51.601349 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:41:51 crc kubenswrapper[4558]: I0120 16:41:51.799875 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 20 16:41:51 crc kubenswrapper[4558]: I0120 16:41:51.799980 4558 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 20 16:41:51 crc kubenswrapper[4558]: I0120 16:41:51.800009 4558 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 20 16:41:51 crc kubenswrapper[4558]: I0120 16:41:51.800866 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:41:51 crc kubenswrapper[4558]: I0120 16:41:51.800902 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:41:51 crc kubenswrapper[4558]: I0120 16:41:51.800911 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:41:52 crc kubenswrapper[4558]: I0120 16:41:52.066429 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 20 16:41:52 crc kubenswrapper[4558]: I0120 16:41:52.528400 4558 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-02 19:04:38.011211458 +0000 UTC Jan 20 16:41:52 crc kubenswrapper[4558]: I0120 16:41:52.602192 4558 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 20 16:41:52 crc kubenswrapper[4558]: I0120 16:41:52.602836 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:41:52 crc kubenswrapper[4558]: I0120 16:41:52.602869 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:41:52 crc kubenswrapper[4558]: I0120 16:41:52.602877 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:41:52 crc kubenswrapper[4558]: I0120 16:41:52.975514 4558 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Jan 20 16:41:52 crc kubenswrapper[4558]: I0120 16:41:52.975600 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Jan 20 16:41:53 crc kubenswrapper[4558]: I0120 16:41:53.132748 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 20 16:41:53 crc kubenswrapper[4558]: I0120 16:41:53.459487 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 20 16:41:53 crc kubenswrapper[4558]: I0120 16:41:53.459643 4558 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 20 16:41:53 crc kubenswrapper[4558]: I0120 16:41:53.460480 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:41:53 crc kubenswrapper[4558]: I0120 16:41:53.460507 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:41:53 crc kubenswrapper[4558]: I0120 16:41:53.460515 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:41:53 crc kubenswrapper[4558]: I0120 16:41:53.528644 4558 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-16 17:51:48.405265805 +0000 UTC Jan 20 16:41:53 crc kubenswrapper[4558]: I0120 16:41:53.606355 4558 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 20 16:41:53 crc kubenswrapper[4558]: I0120 16:41:53.607252 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:41:53 crc kubenswrapper[4558]: I0120 16:41:53.607284 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:41:53 crc kubenswrapper[4558]: I0120 16:41:53.607293 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:41:54 crc kubenswrapper[4558]: I0120 16:41:54.140760 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 20 16:41:54 crc kubenswrapper[4558]: I0120 16:41:54.140906 4558 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 20 16:41:54 crc kubenswrapper[4558]: I0120 16:41:54.141826 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:41:54 crc kubenswrapper[4558]: I0120 16:41:54.141866 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:41:54 crc kubenswrapper[4558]: I0120 16:41:54.141874 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:41:54 crc kubenswrapper[4558]: I0120 16:41:54.526745 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Jan 20 16:41:54 crc kubenswrapper[4558]: I0120 16:41:54.526867 4558 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 20 16:41:54 crc kubenswrapper[4558]: I0120 16:41:54.527768 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:41:54 crc kubenswrapper[4558]: I0120 16:41:54.527795 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:41:54 crc kubenswrapper[4558]: I0120 16:41:54.527804 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:41:54 crc kubenswrapper[4558]: I0120 16:41:54.528868 4558 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-29 22:05:06.601361555 +0000 UTC Jan 20 16:41:55 crc kubenswrapper[4558]: I0120 16:41:55.502063 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Jan 20 16:41:55 crc kubenswrapper[4558]: I0120 16:41:55.502225 4558 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 20 16:41:55 crc kubenswrapper[4558]: I0120 16:41:55.503261 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:41:55 crc kubenswrapper[4558]: I0120 16:41:55.503299 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:41:55 crc kubenswrapper[4558]: I0120 16:41:55.503310 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:41:55 crc kubenswrapper[4558]: I0120 16:41:55.529471 4558 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-10 05:53:37.383609531 +0000 UTC Jan 20 16:41:56 crc kubenswrapper[4558]: I0120 16:41:56.530608 4558 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-09 20:55:59.187395665 +0000 UTC Jan 20 16:41:56 crc kubenswrapper[4558]: E0120 16:41:56.613337 4558 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Jan 20 16:41:57 crc kubenswrapper[4558]: I0120 16:41:57.531281 4558 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-26 05:44:10.213062272 +0000 UTC Jan 20 16:41:58 crc kubenswrapper[4558]: I0120 16:41:58.526115 4558 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": net/http: TLS handshake timeout Jan 20 16:41:58 crc kubenswrapper[4558]: I0120 16:41:58.536210 4558 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-13 21:32:55.179987676 +0000 UTC Jan 20 16:41:58 crc kubenswrapper[4558]: E0120 16:41:58.622336 4558 certificate_manager.go:562] "Unhandled Error" err="kubernetes.io/kube-apiserver-client-kubelet: Failed while requesting a signed certificate from the control plane: cannot create certificate signing request: Post \"https://api-int.crc.testing:6443/apis/certificates.k8s.io/v1/certificatesigningrequests\": net/http: TLS handshake timeout" logger="UnhandledError" Jan 20 16:41:58 crc kubenswrapper[4558]: I0120 16:41:58.964738 4558 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Jan 20 16:41:58 crc kubenswrapper[4558]: I0120 16:41:58.964844 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Jan 20 16:41:58 crc kubenswrapper[4558]: I0120 16:41:58.970186 4558 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Jan 20 16:41:58 crc kubenswrapper[4558]: I0120 16:41:58.970441 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Jan 20 16:41:59 crc kubenswrapper[4558]: I0120 16:41:59.537071 4558 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-23 21:37:18.142708913 +0000 UTC Jan 20 16:42:00 crc kubenswrapper[4558]: I0120 16:42:00.537738 4558 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-22 22:22:23.833843135 +0000 UTC Jan 20 16:42:01 crc kubenswrapper[4558]: I0120 16:42:01.537829 4558 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-26 12:30:13.578620523 +0000 UTC Jan 20 16:42:02 crc kubenswrapper[4558]: I0120 16:42:02.538821 4558 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-09 12:49:00.819755657 +0000 UTC Jan 20 16:42:02 crc kubenswrapper[4558]: I0120 16:42:02.629701 4558 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Jan 20 16:42:02 crc kubenswrapper[4558]: I0120 16:42:02.640638 4558 reflector.go:368] Caches populated for *v1.CertificateSigningRequest from k8s.io/client-go/tools/watch/informerwatcher.go:146 Jan 20 16:42:02 crc kubenswrapper[4558]: I0120 16:42:02.654083 4558 csr.go:261] certificate signing request csr-bmmnx is approved, waiting to be issued Jan 20 16:42:02 crc kubenswrapper[4558]: I0120 16:42:02.659773 4558 csr.go:257] certificate signing request csr-bmmnx is issued Jan 20 16:42:02 crc kubenswrapper[4558]: I0120 16:42:02.976146 4558 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Jan 20 16:42:02 crc kubenswrapper[4558]: I0120 16:42:02.976214 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Jan 20 16:42:03 crc kubenswrapper[4558]: I0120 16:42:03.135448 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 20 16:42:03 crc kubenswrapper[4558]: I0120 16:42:03.135668 4558 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 20 16:42:03 crc kubenswrapper[4558]: I0120 16:42:03.136547 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:03 crc kubenswrapper[4558]: I0120 16:42:03.136609 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:03 crc kubenswrapper[4558]: I0120 16:42:03.136620 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:03 crc kubenswrapper[4558]: I0120 16:42:03.138936 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 20 16:42:03 crc kubenswrapper[4558]: I0120 16:42:03.467229 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 20 16:42:03 crc kubenswrapper[4558]: I0120 16:42:03.467349 4558 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 20 16:42:03 crc kubenswrapper[4558]: I0120 16:42:03.468146 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:03 crc kubenswrapper[4558]: I0120 16:42:03.468196 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:03 crc kubenswrapper[4558]: I0120 16:42:03.468206 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:03 crc kubenswrapper[4558]: I0120 16:42:03.538937 4558 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-03 15:59:43.81981252 +0000 UTC Jan 20 16:42:03 crc kubenswrapper[4558]: I0120 16:42:03.623137 4558 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 20 16:42:03 crc kubenswrapper[4558]: I0120 16:42:03.623187 4558 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 20 16:42:03 crc kubenswrapper[4558]: I0120 16:42:03.623798 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:03 crc kubenswrapper[4558]: I0120 16:42:03.623825 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:03 crc kubenswrapper[4558]: I0120 16:42:03.623835 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:03 crc kubenswrapper[4558]: I0120 16:42:03.661296 4558 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2027-01-20 16:37:02 +0000 UTC, rotation deadline is 2026-12-09 23:47:22.212044369 +0000 UTC Jan 20 16:42:03 crc kubenswrapper[4558]: I0120 16:42:03.661345 4558 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 7759h5m18.550701684s for next certificate rotation Jan 20 16:42:03 crc kubenswrapper[4558]: E0120 16:42:03.966892 4558 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": context deadline exceeded" interval="3.2s" Jan 20 16:42:03 crc kubenswrapper[4558]: I0120 16:42:03.968860 4558 trace.go:236] Trace[1774606925]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (20-Jan-2026 16:41:49.662) (total time: 14306ms): Jan 20 16:42:03 crc kubenswrapper[4558]: Trace[1774606925]: ---"Objects listed" error: 14306ms (16:42:03.968) Jan 20 16:42:03 crc kubenswrapper[4558]: Trace[1774606925]: [14.306333722s] [14.306333722s] END Jan 20 16:42:03 crc kubenswrapper[4558]: I0120 16:42:03.968890 4558 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Jan 20 16:42:03 crc kubenswrapper[4558]: I0120 16:42:03.968860 4558 trace.go:236] Trace[672459274]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (20-Jan-2026 16:41:50.397) (total time: 13571ms): Jan 20 16:42:03 crc kubenswrapper[4558]: Trace[672459274]: ---"Objects listed" error: 13571ms (16:42:03.968) Jan 20 16:42:03 crc kubenswrapper[4558]: Trace[672459274]: [13.571081196s] [13.571081196s] END Jan 20 16:42:03 crc kubenswrapper[4558]: I0120 16:42:03.968931 4558 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Jan 20 16:42:03 crc kubenswrapper[4558]: E0120 16:42:03.969689 4558 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes \"crc\" is forbidden: autoscaling.openshift.io/ManagedNode infra config cache not synchronized" node="crc" Jan 20 16:42:03 crc kubenswrapper[4558]: I0120 16:42:03.969770 4558 trace.go:236] Trace[1571805611]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (20-Jan-2026 16:41:50.349) (total time: 13619ms): Jan 20 16:42:03 crc kubenswrapper[4558]: Trace[1571805611]: ---"Objects listed" error: 13619ms (16:42:03.969) Jan 20 16:42:03 crc kubenswrapper[4558]: Trace[1571805611]: [13.619707639s] [13.619707639s] END Jan 20 16:42:03 crc kubenswrapper[4558]: I0120 16:42:03.969799 4558 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Jan 20 16:42:03 crc kubenswrapper[4558]: I0120 16:42:03.969909 4558 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Jan 20 16:42:03 crc kubenswrapper[4558]: I0120 16:42:03.970080 4558 trace.go:236] Trace[231280183]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (20-Jan-2026 16:41:50.705) (total time: 13264ms): Jan 20 16:42:03 crc kubenswrapper[4558]: Trace[231280183]: ---"Objects listed" error: 13264ms (16:42:03.969) Jan 20 16:42:03 crc kubenswrapper[4558]: Trace[231280183]: [13.264695203s] [13.264695203s] END Jan 20 16:42:03 crc kubenswrapper[4558]: I0120 16:42:03.970101 4558 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.216361 4558 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:59196->192.168.126.11:17697: read: connection reset by peer" start-of-body= Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.216426 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:59196->192.168.126.11:17697: read: connection reset by peer" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.216376 4558 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Liveness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:59206->192.168.126.11:17697: read: connection reset by peer" start-of-body= Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.216514 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:59206->192.168.126.11:17697: read: connection reset by peer" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.216698 4558 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.216719 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.525937 4558 apiserver.go:52] "Watching apiserver" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.528247 4558 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.528512 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-network-operator/network-operator-58b4c7f79c-55gtf","openshift-dns/node-resolver-d96bp","openshift-machine-config-operator/machine-config-daemon-2vr4r","openshift-network-console/networking-console-plugin-85b44fc459-gdk6g","openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-network-diagnostics/network-check-target-xd92c","openshift-network-node-identity/network-node-identity-vrzqb","openshift-network-operator/iptables-alerter-4ln5h"] Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.528827 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.528848 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 20 16:42:04 crc kubenswrapper[4558]: E0120 16:42:04.528907 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.528915 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 20 16:42:04 crc kubenswrapper[4558]: E0120 16:42:04.529481 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.529319 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.529289 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.529353 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 20 16:42:04 crc kubenswrapper[4558]: E0120 16:42:04.529654 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.529338 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.529367 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-d96bp" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.530911 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.531062 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.531902 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.531956 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.532057 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.532197 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.532205 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.532274 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.532344 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.532761 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.532782 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.532908 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.532937 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.533029 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.533239 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.534274 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.534919 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.539430 4558 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-13 02:51:22.691937785 +0000 UTC Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.541465 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.547634 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.549852 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.562561 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.564323 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.572586 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-nv2xw"] Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.573149 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-f5t7h"] Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.573280 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.573825 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-jsqvf"] Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.574025 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-jsqvf" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.574060 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-f5t7h" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.577432 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.577834 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.577940 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.577964 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.578066 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.578202 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.578368 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.578490 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.578627 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.578825 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.578936 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.579032 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.579123 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.584371 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.589093 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.602283 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.612396 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.621150 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.625076 4558 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.626549 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.627897 4558 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="f2e22f1d4e87dd06a67f49aedad280da48ac1c4ba2e438a4925cf8bd5330c4d8" exitCode=255 Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.627930 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"f2e22f1d4e87dd06a67f49aedad280da48ac1c4ba2e438a4925cf8bd5330c4d8"} Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.630541 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"68337d27-3fa6-4a29-88b0-82e60c3739eb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4b478\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4b478\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2vr4r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.637805 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-d96bp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c8d2c8b-fb8e-4bff-b8d2-69570e5d9e57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4v6jx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-d96bp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.645492 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.656665 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.663388 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jsqvf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bedf08c7-1f93-4931-a7f3-e729e2a137af\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xxtbc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jsqvf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.668386 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.668865 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"68337d27-3fa6-4a29-88b0-82e60c3739eb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4b478\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4b478\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2vr4r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.673279 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.673310 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.673332 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.673349 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.673370 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.673385 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.673402 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.673418 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.673433 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.673447 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.673462 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.673476 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.673492 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.673507 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.673522 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.673536 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.673551 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.673567 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.673603 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.673619 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.673637 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.673650 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.673717 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.673735 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.673750 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.673766 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.673780 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.673798 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.673812 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.673828 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.673842 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.673857 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.673871 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.673886 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.673903 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.673917 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.673933 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.673963 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.673979 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.673994 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.674011 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.674025 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.674039 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.674062 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.674079 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.674144 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.674176 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.674195 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.674211 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.674227 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.674241 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.674256 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.674274 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.674289 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.674304 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.674320 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.674335 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.674351 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.674366 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.674380 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.674395 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.674409 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.674424 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.674438 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.674454 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.674468 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.674483 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.674497 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.674517 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.674531 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.674546 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.674561 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.674595 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.674612 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.674626 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.674641 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.674656 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.674671 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.674686 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.674701 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.674715 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.674730 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.674745 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.674760 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.674777 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.674793 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.674810 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.674824 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.674842 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.674858 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.674873 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.674888 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.674903 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.674918 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.674934 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.674948 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.674964 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.674979 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.674996 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.675010 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.675026 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.675041 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.675057 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.675074 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.675089 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.675104 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.675120 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.675135 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.675150 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.675182 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.675198 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.675212 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.675228 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.675244 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.675258 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.675273 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.675290 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.675305 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.675321 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.675335 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.675350 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.675364 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.675381 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.675399 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.675416 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.675432 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.675446 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.675462 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.675477 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.675491 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.675508 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.675525 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.675543 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.675558 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.675572 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.675601 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.675625 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.675642 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.675657 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.675672 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.675687 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.675703 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.675720 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.675737 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.675752 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.675768 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.675783 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.675799 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.675818 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.675834 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.675850 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.675865 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.675882 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.675898 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.675915 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.675930 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.675947 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.675964 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.675980 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.675997 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.676012 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.676028 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.676044 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.676062 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.676081 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.676097 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.676114 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.676131 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.676147 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.676179 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.676195 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.676212 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.676229 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.676246 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.676262 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.676278 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.676273 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.676294 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.676287 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.676311 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.676333 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.676350 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.676370 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.676386 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.676402 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.676419 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.676436 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.676455 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.676472 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.676489 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.676507 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.676550 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.676567 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.676597 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.676615 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.676632 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.676649 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.676704 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.676732 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.676754 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/5c8d2c8b-fb8e-4bff-b8d2-69570e5d9e57-hosts-file\") pod \"node-resolver-d96bp\" (UID: \"5c8d2c8b-fb8e-4bff-b8d2-69570e5d9e57\") " pod="openshift-dns/node-resolver-d96bp" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.676772 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/254129cd-82fc-4162-b671-2434bc9e2972-ovnkube-config\") pod \"ovnkube-node-nv2xw\" (UID: \"254129cd-82fc-4162-b671-2434bc9e2972\") " pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.676788 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/bedf08c7-1f93-4931-a7f3-e729e2a137af-host-var-lib-kubelet\") pod \"multus-jsqvf\" (UID: \"bedf08c7-1f93-4931-a7f3-e729e2a137af\") " pod="openshift-multus/multus-jsqvf" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.676808 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.676825 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/254129cd-82fc-4162-b671-2434bc9e2972-ovnkube-script-lib\") pod \"ovnkube-node-nv2xw\" (UID: \"254129cd-82fc-4162-b671-2434bc9e2972\") " pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.676843 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/bedf08c7-1f93-4931-a7f3-e729e2a137af-cnibin\") pod \"multus-jsqvf\" (UID: \"bedf08c7-1f93-4931-a7f3-e729e2a137af\") " pod="openshift-multus/multus-jsqvf" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.676858 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/bedf08c7-1f93-4931-a7f3-e729e2a137af-os-release\") pod \"multus-jsqvf\" (UID: \"bedf08c7-1f93-4931-a7f3-e729e2a137af\") " pod="openshift-multus/multus-jsqvf" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.676876 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/254129cd-82fc-4162-b671-2434bc9e2972-run-ovn\") pod \"ovnkube-node-nv2xw\" (UID: \"254129cd-82fc-4162-b671-2434bc9e2972\") " pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.676892 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/254129cd-82fc-4162-b671-2434bc9e2972-host-run-ovn-kubernetes\") pod \"ovnkube-node-nv2xw\" (UID: \"254129cd-82fc-4162-b671-2434bc9e2972\") " pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.676907 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.676925 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4b478\" (UniqueName: \"kubernetes.io/projected/68337d27-3fa6-4a29-88b0-82e60c3739eb-kube-api-access-4b478\") pod \"machine-config-daemon-2vr4r\" (UID: \"68337d27-3fa6-4a29-88b0-82e60c3739eb\") " pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.676941 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/0f90cb04-2e7a-4ee8-83fc-d6c0ee1702a4-os-release\") pod \"multus-additional-cni-plugins-f5t7h\" (UID: \"0f90cb04-2e7a-4ee8-83fc-d6c0ee1702a4\") " pod="openshift-multus/multus-additional-cni-plugins-f5t7h" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.677130 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.677150 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/68337d27-3fa6-4a29-88b0-82e60c3739eb-rootfs\") pod \"machine-config-daemon-2vr4r\" (UID: \"68337d27-3fa6-4a29-88b0-82e60c3739eb\") " pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.677184 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/254129cd-82fc-4162-b671-2434bc9e2972-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-nv2xw\" (UID: \"254129cd-82fc-4162-b671-2434bc9e2972\") " pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.677202 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/0f90cb04-2e7a-4ee8-83fc-d6c0ee1702a4-tuning-conf-dir\") pod \"multus-additional-cni-plugins-f5t7h\" (UID: \"0f90cb04-2e7a-4ee8-83fc-d6c0ee1702a4\") " pod="openshift-multus/multus-additional-cni-plugins-f5t7h" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.677224 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.677239 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/254129cd-82fc-4162-b671-2434bc9e2972-host-run-netns\") pod \"ovnkube-node-nv2xw\" (UID: \"254129cd-82fc-4162-b671-2434bc9e2972\") " pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.677257 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/bedf08c7-1f93-4931-a7f3-e729e2a137af-host-run-multus-certs\") pod \"multus-jsqvf\" (UID: \"bedf08c7-1f93-4931-a7f3-e729e2a137af\") " pod="openshift-multus/multus-jsqvf" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.677272 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/bedf08c7-1f93-4931-a7f3-e729e2a137af-etc-kubernetes\") pod \"multus-jsqvf\" (UID: \"bedf08c7-1f93-4931-a7f3-e729e2a137af\") " pod="openshift-multus/multus-jsqvf" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.677290 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4v6jx\" (UniqueName: \"kubernetes.io/projected/5c8d2c8b-fb8e-4bff-b8d2-69570e5d9e57-kube-api-access-4v6jx\") pod \"node-resolver-d96bp\" (UID: \"5c8d2c8b-fb8e-4bff-b8d2-69570e5d9e57\") " pod="openshift-dns/node-resolver-d96bp" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.677305 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/254129cd-82fc-4162-b671-2434bc9e2972-host-cni-bin\") pod \"ovnkube-node-nv2xw\" (UID: \"254129cd-82fc-4162-b671-2434bc9e2972\") " pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.677320 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/0f90cb04-2e7a-4ee8-83fc-d6c0ee1702a4-system-cni-dir\") pod \"multus-additional-cni-plugins-f5t7h\" (UID: \"0f90cb04-2e7a-4ee8-83fc-d6c0ee1702a4\") " pod="openshift-multus/multus-additional-cni-plugins-f5t7h" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.677339 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.677358 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.677374 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/bedf08c7-1f93-4931-a7f3-e729e2a137af-multus-daemon-config\") pod \"multus-jsqvf\" (UID: \"bedf08c7-1f93-4931-a7f3-e729e2a137af\") " pod="openshift-multus/multus-jsqvf" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.677390 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/254129cd-82fc-4162-b671-2434bc9e2972-var-lib-openvswitch\") pod \"ovnkube-node-nv2xw\" (UID: \"254129cd-82fc-4162-b671-2434bc9e2972\") " pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.677404 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/bedf08c7-1f93-4931-a7f3-e729e2a137af-multus-conf-dir\") pod \"multus-jsqvf\" (UID: \"bedf08c7-1f93-4931-a7f3-e729e2a137af\") " pod="openshift-multus/multus-jsqvf" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.677419 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/254129cd-82fc-4162-b671-2434bc9e2972-etc-openvswitch\") pod \"ovnkube-node-nv2xw\" (UID: \"254129cd-82fc-4162-b671-2434bc9e2972\") " pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.677435 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/254129cd-82fc-4162-b671-2434bc9e2972-ovn-node-metrics-cert\") pod \"ovnkube-node-nv2xw\" (UID: \"254129cd-82fc-4162-b671-2434bc9e2972\") " pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.677451 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/0f90cb04-2e7a-4ee8-83fc-d6c0ee1702a4-cnibin\") pod \"multus-additional-cni-plugins-f5t7h\" (UID: \"0f90cb04-2e7a-4ee8-83fc-d6c0ee1702a4\") " pod="openshift-multus/multus-additional-cni-plugins-f5t7h" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.677468 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nrmwm\" (UniqueName: \"kubernetes.io/projected/0f90cb04-2e7a-4ee8-83fc-d6c0ee1702a4-kube-api-access-nrmwm\") pod \"multus-additional-cni-plugins-f5t7h\" (UID: \"0f90cb04-2e7a-4ee8-83fc-d6c0ee1702a4\") " pod="openshift-multus/multus-additional-cni-plugins-f5t7h" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.677484 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/254129cd-82fc-4162-b671-2434bc9e2972-run-systemd\") pod \"ovnkube-node-nv2xw\" (UID: \"254129cd-82fc-4162-b671-2434bc9e2972\") " pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.677500 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/254129cd-82fc-4162-b671-2434bc9e2972-run-openvswitch\") pod \"ovnkube-node-nv2xw\" (UID: \"254129cd-82fc-4162-b671-2434bc9e2972\") " pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.677518 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.677536 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.677551 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/68337d27-3fa6-4a29-88b0-82e60c3739eb-proxy-tls\") pod \"machine-config-daemon-2vr4r\" (UID: \"68337d27-3fa6-4a29-88b0-82e60c3739eb\") " pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.677567 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/68337d27-3fa6-4a29-88b0-82e60c3739eb-mcd-auth-proxy-config\") pod \"machine-config-daemon-2vr4r\" (UID: \"68337d27-3fa6-4a29-88b0-82e60c3739eb\") " pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.677597 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/254129cd-82fc-4162-b671-2434bc9e2972-host-kubelet\") pod \"ovnkube-node-nv2xw\" (UID: \"254129cd-82fc-4162-b671-2434bc9e2972\") " pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.677611 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/254129cd-82fc-4162-b671-2434bc9e2972-env-overrides\") pod \"ovnkube-node-nv2xw\" (UID: \"254129cd-82fc-4162-b671-2434bc9e2972\") " pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.677629 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/bedf08c7-1f93-4931-a7f3-e729e2a137af-host-var-lib-cni-bin\") pod \"multus-jsqvf\" (UID: \"bedf08c7-1f93-4931-a7f3-e729e2a137af\") " pod="openshift-multus/multus-jsqvf" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.677645 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/254129cd-82fc-4162-b671-2434bc9e2972-host-cni-netd\") pod \"ovnkube-node-nv2xw\" (UID: \"254129cd-82fc-4162-b671-2434bc9e2972\") " pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.677660 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/0f90cb04-2e7a-4ee8-83fc-d6c0ee1702a4-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-f5t7h\" (UID: \"0f90cb04-2e7a-4ee8-83fc-d6c0ee1702a4\") " pod="openshift-multus/multus-additional-cni-plugins-f5t7h" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.677676 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/bedf08c7-1f93-4931-a7f3-e729e2a137af-multus-cni-dir\") pod \"multus-jsqvf\" (UID: \"bedf08c7-1f93-4931-a7f3-e729e2a137af\") " pod="openshift-multus/multus-jsqvf" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.677692 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/bedf08c7-1f93-4931-a7f3-e729e2a137af-cni-binary-copy\") pod \"multus-jsqvf\" (UID: \"bedf08c7-1f93-4931-a7f3-e729e2a137af\") " pod="openshift-multus/multus-jsqvf" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.677710 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.677728 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/254129cd-82fc-4162-b671-2434bc9e2972-log-socket\") pod \"ovnkube-node-nv2xw\" (UID: \"254129cd-82fc-4162-b671-2434bc9e2972\") " pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.677746 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/0f90cb04-2e7a-4ee8-83fc-d6c0ee1702a4-cni-binary-copy\") pod \"multus-additional-cni-plugins-f5t7h\" (UID: \"0f90cb04-2e7a-4ee8-83fc-d6c0ee1702a4\") " pod="openshift-multus/multus-additional-cni-plugins-f5t7h" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.677763 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.677812 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/bedf08c7-1f93-4931-a7f3-e729e2a137af-multus-socket-dir-parent\") pod \"multus-jsqvf\" (UID: \"bedf08c7-1f93-4931-a7f3-e729e2a137af\") " pod="openshift-multus/multus-jsqvf" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.677833 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.677848 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/254129cd-82fc-4162-b671-2434bc9e2972-systemd-units\") pod \"ovnkube-node-nv2xw\" (UID: \"254129cd-82fc-4162-b671-2434bc9e2972\") " pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.677864 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/bedf08c7-1f93-4931-a7f3-e729e2a137af-hostroot\") pod \"multus-jsqvf\" (UID: \"bedf08c7-1f93-4931-a7f3-e729e2a137af\") " pod="openshift-multus/multus-jsqvf" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.677880 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/254129cd-82fc-4162-b671-2434bc9e2972-host-slash\") pod \"ovnkube-node-nv2xw\" (UID: \"254129cd-82fc-4162-b671-2434bc9e2972\") " pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.677897 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/bedf08c7-1f93-4931-a7f3-e729e2a137af-host-run-netns\") pod \"multus-jsqvf\" (UID: \"bedf08c7-1f93-4931-a7f3-e729e2a137af\") " pod="openshift-multus/multus-jsqvf" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.677913 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rgzcc\" (UniqueName: \"kubernetes.io/projected/254129cd-82fc-4162-b671-2434bc9e2972-kube-api-access-rgzcc\") pod \"ovnkube-node-nv2xw\" (UID: \"254129cd-82fc-4162-b671-2434bc9e2972\") " pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.677929 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/bedf08c7-1f93-4931-a7f3-e729e2a137af-system-cni-dir\") pod \"multus-jsqvf\" (UID: \"bedf08c7-1f93-4931-a7f3-e729e2a137af\") " pod="openshift-multus/multus-jsqvf" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.677946 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.677962 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/254129cd-82fc-4162-b671-2434bc9e2972-node-log\") pod \"ovnkube-node-nv2xw\" (UID: \"254129cd-82fc-4162-b671-2434bc9e2972\") " pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.677980 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xxtbc\" (UniqueName: \"kubernetes.io/projected/bedf08c7-1f93-4931-a7f3-e729e2a137af-kube-api-access-xxtbc\") pod \"multus-jsqvf\" (UID: \"bedf08c7-1f93-4931-a7f3-e729e2a137af\") " pod="openshift-multus/multus-jsqvf" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.677994 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/bedf08c7-1f93-4931-a7f3-e729e2a137af-host-run-k8s-cni-cncf-io\") pod \"multus-jsqvf\" (UID: \"bedf08c7-1f93-4931-a7f3-e729e2a137af\") " pod="openshift-multus/multus-jsqvf" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.678010 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/bedf08c7-1f93-4931-a7f3-e729e2a137af-host-var-lib-cni-multus\") pod \"multus-jsqvf\" (UID: \"bedf08c7-1f93-4931-a7f3-e729e2a137af\") " pod="openshift-multus/multus-jsqvf" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.678042 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.678054 4558 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.679264 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.680282 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.681088 4558 scope.go:117] "RemoveContainer" containerID="f2e22f1d4e87dd06a67f49aedad280da48ac1c4ba2e438a4925cf8bd5330c4d8" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.686044 4558 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.687741 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.688191 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.688952 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.676271 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.676372 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.676487 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.676541 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.676556 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.690279 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.690326 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.690600 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.690921 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.691192 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.691272 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.691363 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.691528 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.691571 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.691815 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.691887 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.691664 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.676703 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.676734 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.676894 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.676903 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.677008 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.677040 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.677054 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.677276 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.677328 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.677335 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.677352 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.677429 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.677456 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.677461 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.677496 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.677570 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.677632 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.677645 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.677653 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.677670 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.677817 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.677830 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.677842 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.677873 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.677876 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.677939 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.678483 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.678770 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.678912 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.678982 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.679111 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.679192 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.679212 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.679314 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.679422 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.679489 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.679502 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.679617 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.679568 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.679709 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.679882 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.679928 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.679961 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.680089 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.680111 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.680158 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.680200 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.680378 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.680550 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.680622 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.680631 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.680652 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.692383 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.680826 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.680857 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.680943 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.680953 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.692440 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.680986 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.681103 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: E0120 16:42:04.681232 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-20 16:42:05.181214574 +0000 UTC m=+18.941552541 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.692575 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.681275 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.681291 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.681330 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.681345 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.681417 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.681522 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.681537 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.681838 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.681893 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.681932 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.682187 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.682250 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.682272 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.682522 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.682627 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.682672 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.682832 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.682929 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.683012 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.683115 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.683153 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.683240 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.683270 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.683157 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.683269 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.683855 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.684095 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.684309 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.684426 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.684669 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.684667 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.684681 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.684545 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.684906 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.685007 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.685247 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.685251 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.685290 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.685313 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.685398 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: E0120 16:42:04.685459 4558 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.685501 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.685955 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.686259 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.686302 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.686316 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.693825 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.686441 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.687066 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.687075 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.693848 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.687288 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: E0120 16:42:04.687615 4558 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.687611 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.687627 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.687867 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.687996 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.688189 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.688567 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.689534 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.689661 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.689957 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.676672 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: E0120 16:42:04.692991 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-20 16:42:05.192929623 +0000 UTC m=+18.953267589 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.693028 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.693256 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.693343 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.693444 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.693486 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.693627 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.693743 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: E0120 16:42:04.694030 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-20 16:42:05.194010105 +0000 UTC m=+18.954348072 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.694470 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.694706 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.695516 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.695599 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: E0120 16:42:04.696148 4558 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 20 16:42:04 crc kubenswrapper[4558]: E0120 16:42:04.696198 4558 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 20 16:42:04 crc kubenswrapper[4558]: E0120 16:42:04.696210 4558 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 20 16:42:04 crc kubenswrapper[4558]: E0120 16:42:04.696283 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-20 16:42:05.196263893 +0000 UTC m=+18.956601859 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.696551 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.697828 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.697847 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.698181 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.698203 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.697528 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.698281 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.698907 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.699198 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 20 16:42:04 crc kubenswrapper[4558]: E0120 16:42:04.700033 4558 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 20 16:42:04 crc kubenswrapper[4558]: E0120 16:42:04.700058 4558 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 20 16:42:04 crc kubenswrapper[4558]: E0120 16:42:04.700071 4558 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 20 16:42:04 crc kubenswrapper[4558]: E0120 16:42:04.700105 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-20 16:42:05.200092902 +0000 UTC m=+18.960430870 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.701386 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.704392 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.704530 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.704712 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.704728 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.704789 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.704976 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.705006 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.705116 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.705234 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.705254 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.705652 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.705313 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.705321 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.705686 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.705414 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.705435 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.705443 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.705641 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.705971 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.706595 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.706866 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.706944 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.706966 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.706984 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.707007 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.707174 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.707201 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.707365 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.707364 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.707377 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.707412 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.707460 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.707642 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.707813 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.708241 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.710530 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.710698 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-d96bp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c8d2c8b-fb8e-4bff-b8d2-69570e5d9e57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4v6jx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-d96bp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.722068 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"254129cd-82fc-4162-b671-2434bc9e2972\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-nv2xw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.724420 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.728646 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.729271 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f5t7h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0f90cb04-2e7a-4ee8-83fc-d6c0ee1702a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f5t7h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.734696 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.739708 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.779547 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/254129cd-82fc-4162-b671-2434bc9e2972-host-slash\") pod \"ovnkube-node-nv2xw\" (UID: \"254129cd-82fc-4162-b671-2434bc9e2972\") " pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.779601 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/bedf08c7-1f93-4931-a7f3-e729e2a137af-host-run-netns\") pod \"multus-jsqvf\" (UID: \"bedf08c7-1f93-4931-a7f3-e729e2a137af\") " pod="openshift-multus/multus-jsqvf" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.779618 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/bedf08c7-1f93-4931-a7f3-e729e2a137af-hostroot\") pod \"multus-jsqvf\" (UID: \"bedf08c7-1f93-4931-a7f3-e729e2a137af\") " pod="openshift-multus/multus-jsqvf" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.779633 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.779647 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/254129cd-82fc-4162-b671-2434bc9e2972-node-log\") pod \"ovnkube-node-nv2xw\" (UID: \"254129cd-82fc-4162-b671-2434bc9e2972\") " pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.779662 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rgzcc\" (UniqueName: \"kubernetes.io/projected/254129cd-82fc-4162-b671-2434bc9e2972-kube-api-access-rgzcc\") pod \"ovnkube-node-nv2xw\" (UID: \"254129cd-82fc-4162-b671-2434bc9e2972\") " pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.779681 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/bedf08c7-1f93-4931-a7f3-e729e2a137af-system-cni-dir\") pod \"multus-jsqvf\" (UID: \"bedf08c7-1f93-4931-a7f3-e729e2a137af\") " pod="openshift-multus/multus-jsqvf" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.779695 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/bedf08c7-1f93-4931-a7f3-e729e2a137af-host-run-k8s-cni-cncf-io\") pod \"multus-jsqvf\" (UID: \"bedf08c7-1f93-4931-a7f3-e729e2a137af\") " pod="openshift-multus/multus-jsqvf" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.779711 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/bedf08c7-1f93-4931-a7f3-e729e2a137af-host-var-lib-cni-multus\") pod \"multus-jsqvf\" (UID: \"bedf08c7-1f93-4931-a7f3-e729e2a137af\") " pod="openshift-multus/multus-jsqvf" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.779724 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xxtbc\" (UniqueName: \"kubernetes.io/projected/bedf08c7-1f93-4931-a7f3-e729e2a137af-kube-api-access-xxtbc\") pod \"multus-jsqvf\" (UID: \"bedf08c7-1f93-4931-a7f3-e729e2a137af\") " pod="openshift-multus/multus-jsqvf" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.779738 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/254129cd-82fc-4162-b671-2434bc9e2972-ovnkube-config\") pod \"ovnkube-node-nv2xw\" (UID: \"254129cd-82fc-4162-b671-2434bc9e2972\") " pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.779752 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/bedf08c7-1f93-4931-a7f3-e729e2a137af-host-var-lib-kubelet\") pod \"multus-jsqvf\" (UID: \"bedf08c7-1f93-4931-a7f3-e729e2a137af\") " pod="openshift-multus/multus-jsqvf" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.779768 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/5c8d2c8b-fb8e-4bff-b8d2-69570e5d9e57-hosts-file\") pod \"node-resolver-d96bp\" (UID: \"5c8d2c8b-fb8e-4bff-b8d2-69570e5d9e57\") " pod="openshift-dns/node-resolver-d96bp" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.779805 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/254129cd-82fc-4162-b671-2434bc9e2972-run-ovn\") pod \"ovnkube-node-nv2xw\" (UID: \"254129cd-82fc-4162-b671-2434bc9e2972\") " pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.779820 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/254129cd-82fc-4162-b671-2434bc9e2972-ovnkube-script-lib\") pod \"ovnkube-node-nv2xw\" (UID: \"254129cd-82fc-4162-b671-2434bc9e2972\") " pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.779833 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/bedf08c7-1f93-4931-a7f3-e729e2a137af-cnibin\") pod \"multus-jsqvf\" (UID: \"bedf08c7-1f93-4931-a7f3-e729e2a137af\") " pod="openshift-multus/multus-jsqvf" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.779846 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/bedf08c7-1f93-4931-a7f3-e729e2a137af-os-release\") pod \"multus-jsqvf\" (UID: \"bedf08c7-1f93-4931-a7f3-e729e2a137af\") " pod="openshift-multus/multus-jsqvf" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.779859 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/254129cd-82fc-4162-b671-2434bc9e2972-host-run-ovn-kubernetes\") pod \"ovnkube-node-nv2xw\" (UID: \"254129cd-82fc-4162-b671-2434bc9e2972\") " pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.779875 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.779890 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4b478\" (UniqueName: \"kubernetes.io/projected/68337d27-3fa6-4a29-88b0-82e60c3739eb-kube-api-access-4b478\") pod \"machine-config-daemon-2vr4r\" (UID: \"68337d27-3fa6-4a29-88b0-82e60c3739eb\") " pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.779904 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/0f90cb04-2e7a-4ee8-83fc-d6c0ee1702a4-os-release\") pod \"multus-additional-cni-plugins-f5t7h\" (UID: \"0f90cb04-2e7a-4ee8-83fc-d6c0ee1702a4\") " pod="openshift-multus/multus-additional-cni-plugins-f5t7h" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.779919 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/68337d27-3fa6-4a29-88b0-82e60c3739eb-rootfs\") pod \"machine-config-daemon-2vr4r\" (UID: \"68337d27-3fa6-4a29-88b0-82e60c3739eb\") " pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.779934 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/254129cd-82fc-4162-b671-2434bc9e2972-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-nv2xw\" (UID: \"254129cd-82fc-4162-b671-2434bc9e2972\") " pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.779949 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/0f90cb04-2e7a-4ee8-83fc-d6c0ee1702a4-tuning-conf-dir\") pod \"multus-additional-cni-plugins-f5t7h\" (UID: \"0f90cb04-2e7a-4ee8-83fc-d6c0ee1702a4\") " pod="openshift-multus/multus-additional-cni-plugins-f5t7h" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.779962 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/bedf08c7-1f93-4931-a7f3-e729e2a137af-etc-kubernetes\") pod \"multus-jsqvf\" (UID: \"bedf08c7-1f93-4931-a7f3-e729e2a137af\") " pod="openshift-multus/multus-jsqvf" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.779986 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/254129cd-82fc-4162-b671-2434bc9e2972-host-run-netns\") pod \"ovnkube-node-nv2xw\" (UID: \"254129cd-82fc-4162-b671-2434bc9e2972\") " pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.780002 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/bedf08c7-1f93-4931-a7f3-e729e2a137af-host-run-multus-certs\") pod \"multus-jsqvf\" (UID: \"bedf08c7-1f93-4931-a7f3-e729e2a137af\") " pod="openshift-multus/multus-jsqvf" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.780025 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4v6jx\" (UniqueName: \"kubernetes.io/projected/5c8d2c8b-fb8e-4bff-b8d2-69570e5d9e57-kube-api-access-4v6jx\") pod \"node-resolver-d96bp\" (UID: \"5c8d2c8b-fb8e-4bff-b8d2-69570e5d9e57\") " pod="openshift-dns/node-resolver-d96bp" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.780040 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/254129cd-82fc-4162-b671-2434bc9e2972-host-cni-bin\") pod \"ovnkube-node-nv2xw\" (UID: \"254129cd-82fc-4162-b671-2434bc9e2972\") " pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.780054 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/0f90cb04-2e7a-4ee8-83fc-d6c0ee1702a4-system-cni-dir\") pod \"multus-additional-cni-plugins-f5t7h\" (UID: \"0f90cb04-2e7a-4ee8-83fc-d6c0ee1702a4\") " pod="openshift-multus/multus-additional-cni-plugins-f5t7h" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.780070 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/254129cd-82fc-4162-b671-2434bc9e2972-var-lib-openvswitch\") pod \"ovnkube-node-nv2xw\" (UID: \"254129cd-82fc-4162-b671-2434bc9e2972\") " pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.780084 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/bedf08c7-1f93-4931-a7f3-e729e2a137af-multus-conf-dir\") pod \"multus-jsqvf\" (UID: \"bedf08c7-1f93-4931-a7f3-e729e2a137af\") " pod="openshift-multus/multus-jsqvf" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.780097 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/bedf08c7-1f93-4931-a7f3-e729e2a137af-multus-daemon-config\") pod \"multus-jsqvf\" (UID: \"bedf08c7-1f93-4931-a7f3-e729e2a137af\") " pod="openshift-multus/multus-jsqvf" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.780111 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nrmwm\" (UniqueName: \"kubernetes.io/projected/0f90cb04-2e7a-4ee8-83fc-d6c0ee1702a4-kube-api-access-nrmwm\") pod \"multus-additional-cni-plugins-f5t7h\" (UID: \"0f90cb04-2e7a-4ee8-83fc-d6c0ee1702a4\") " pod="openshift-multus/multus-additional-cni-plugins-f5t7h" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.780126 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/254129cd-82fc-4162-b671-2434bc9e2972-run-systemd\") pod \"ovnkube-node-nv2xw\" (UID: \"254129cd-82fc-4162-b671-2434bc9e2972\") " pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.780140 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/254129cd-82fc-4162-b671-2434bc9e2972-etc-openvswitch\") pod \"ovnkube-node-nv2xw\" (UID: \"254129cd-82fc-4162-b671-2434bc9e2972\") " pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.780154 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/254129cd-82fc-4162-b671-2434bc9e2972-ovn-node-metrics-cert\") pod \"ovnkube-node-nv2xw\" (UID: \"254129cd-82fc-4162-b671-2434bc9e2972\") " pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.780183 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/0f90cb04-2e7a-4ee8-83fc-d6c0ee1702a4-cnibin\") pod \"multus-additional-cni-plugins-f5t7h\" (UID: \"0f90cb04-2e7a-4ee8-83fc-d6c0ee1702a4\") " pod="openshift-multus/multus-additional-cni-plugins-f5t7h" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.780203 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/254129cd-82fc-4162-b671-2434bc9e2972-run-openvswitch\") pod \"ovnkube-node-nv2xw\" (UID: \"254129cd-82fc-4162-b671-2434bc9e2972\") " pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.780230 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/68337d27-3fa6-4a29-88b0-82e60c3739eb-mcd-auth-proxy-config\") pod \"machine-config-daemon-2vr4r\" (UID: \"68337d27-3fa6-4a29-88b0-82e60c3739eb\") " pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.780244 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/254129cd-82fc-4162-b671-2434bc9e2972-host-kubelet\") pod \"ovnkube-node-nv2xw\" (UID: \"254129cd-82fc-4162-b671-2434bc9e2972\") " pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.780257 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/254129cd-82fc-4162-b671-2434bc9e2972-env-overrides\") pod \"ovnkube-node-nv2xw\" (UID: \"254129cd-82fc-4162-b671-2434bc9e2972\") " pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.780273 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/68337d27-3fa6-4a29-88b0-82e60c3739eb-proxy-tls\") pod \"machine-config-daemon-2vr4r\" (UID: \"68337d27-3fa6-4a29-88b0-82e60c3739eb\") " pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.780288 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/bedf08c7-1f93-4931-a7f3-e729e2a137af-host-var-lib-cni-bin\") pod \"multus-jsqvf\" (UID: \"bedf08c7-1f93-4931-a7f3-e729e2a137af\") " pod="openshift-multus/multus-jsqvf" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.780302 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/bedf08c7-1f93-4931-a7f3-e729e2a137af-cni-binary-copy\") pod \"multus-jsqvf\" (UID: \"bedf08c7-1f93-4931-a7f3-e729e2a137af\") " pod="openshift-multus/multus-jsqvf" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.780318 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/254129cd-82fc-4162-b671-2434bc9e2972-log-socket\") pod \"ovnkube-node-nv2xw\" (UID: \"254129cd-82fc-4162-b671-2434bc9e2972\") " pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.780333 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/254129cd-82fc-4162-b671-2434bc9e2972-host-cni-netd\") pod \"ovnkube-node-nv2xw\" (UID: \"254129cd-82fc-4162-b671-2434bc9e2972\") " pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.780347 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/0f90cb04-2e7a-4ee8-83fc-d6c0ee1702a4-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-f5t7h\" (UID: \"0f90cb04-2e7a-4ee8-83fc-d6c0ee1702a4\") " pod="openshift-multus/multus-additional-cni-plugins-f5t7h" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.780360 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/bedf08c7-1f93-4931-a7f3-e729e2a137af-multus-cni-dir\") pod \"multus-jsqvf\" (UID: \"bedf08c7-1f93-4931-a7f3-e729e2a137af\") " pod="openshift-multus/multus-jsqvf" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.780379 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/254129cd-82fc-4162-b671-2434bc9e2972-systemd-units\") pod \"ovnkube-node-nv2xw\" (UID: \"254129cd-82fc-4162-b671-2434bc9e2972\") " pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.780396 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/0f90cb04-2e7a-4ee8-83fc-d6c0ee1702a4-cni-binary-copy\") pod \"multus-additional-cni-plugins-f5t7h\" (UID: \"0f90cb04-2e7a-4ee8-83fc-d6c0ee1702a4\") " pod="openshift-multus/multus-additional-cni-plugins-f5t7h" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.780413 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/bedf08c7-1f93-4931-a7f3-e729e2a137af-multus-socket-dir-parent\") pod \"multus-jsqvf\" (UID: \"bedf08c7-1f93-4931-a7f3-e729e2a137af\") " pod="openshift-multus/multus-jsqvf" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.780493 4558 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.780505 4558 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.780514 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.780523 4558 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.780531 4558 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.780540 4558 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.780548 4558 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.780557 4558 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.780567 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.780575 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.780599 4558 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.780607 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.780615 4558 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.780622 4558 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.780631 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.780640 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.780647 4558 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.780655 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.780663 4558 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.780671 4558 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.780679 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.780687 4558 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.780696 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.780703 4558 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.780711 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.780720 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.780729 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.780736 4558 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.780744 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.780752 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.780759 4558 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.780769 4558 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.780777 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.780786 4558 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.780793 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.780801 4558 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.780809 4558 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.780818 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.780828 4558 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.780835 4558 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.780843 4558 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.780851 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.780858 4558 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.780866 4558 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.780873 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.780881 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.780889 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.780897 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.780904 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.780912 4558 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.780919 4558 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.780927 4558 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.780936 4558 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.780945 4558 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.780953 4558 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.780962 4558 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.780971 4558 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.780980 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.780989 4558 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.780997 4558 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.781004 4558 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.781012 4558 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.781019 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.781027 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.781035 4558 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.781044 4558 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.781051 4558 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.781058 4558 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.781065 4558 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.781074 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.781081 4558 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.781089 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.781097 4558 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.781105 4558 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.781113 4558 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.781120 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.781128 4558 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.781137 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.781145 4558 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.781153 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.781177 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.781185 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.781193 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.781201 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.781209 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.781216 4558 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.781223 4558 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.781231 4558 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.781239 4558 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.781246 4558 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.781255 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.781262 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.781270 4558 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.781277 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.781286 4558 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.781294 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.781304 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.781312 4558 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.781321 4558 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.781328 4558 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.781336 4558 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.781344 4558 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.781351 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.781360 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.781368 4558 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.781377 4558 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.781385 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.781392 4558 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.781400 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.781409 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.781418 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.781426 4558 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.781433 4558 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.781441 4558 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.781450 4558 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.781458 4558 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.781465 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.781473 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.781481 4558 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.781490 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.781499 4558 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.781506 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.781516 4558 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.781514 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/5c8d2c8b-fb8e-4bff-b8d2-69570e5d9e57-hosts-file\") pod \"node-resolver-d96bp\" (UID: \"5c8d2c8b-fb8e-4bff-b8d2-69570e5d9e57\") " pod="openshift-dns/node-resolver-d96bp" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.781504 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/bedf08c7-1f93-4931-a7f3-e729e2a137af-host-run-multus-certs\") pod \"multus-jsqvf\" (UID: \"bedf08c7-1f93-4931-a7f3-e729e2a137af\") " pod="openshift-multus/multus-jsqvf" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.781570 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.781624 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/254129cd-82fc-4162-b671-2434bc9e2972-host-slash\") pod \"ovnkube-node-nv2xw\" (UID: \"254129cd-82fc-4162-b671-2434bc9e2972\") " pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.781523 4558 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.781653 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/254129cd-82fc-4162-b671-2434bc9e2972-run-ovn\") pod \"ovnkube-node-nv2xw\" (UID: \"254129cd-82fc-4162-b671-2434bc9e2972\") " pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.781667 4558 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.781683 4558 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.781696 4558 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.781709 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.781721 4558 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.781732 4558 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.781745 4558 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.781756 4558 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.781766 4558 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.781776 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.781786 4558 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.781797 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.781807 4558 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.781819 4558 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.781831 4558 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.781841 4558 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.781851 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.781863 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.781874 4558 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.781884 4558 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.781895 4558 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.781899 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/254129cd-82fc-4162-b671-2434bc9e2972-host-cni-bin\") pod \"ovnkube-node-nv2xw\" (UID: \"254129cd-82fc-4162-b671-2434bc9e2972\") " pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.781924 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/bedf08c7-1f93-4931-a7f3-e729e2a137af-host-run-netns\") pod \"multus-jsqvf\" (UID: \"bedf08c7-1f93-4931-a7f3-e729e2a137af\") " pod="openshift-multus/multus-jsqvf" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.781467 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/254129cd-82fc-4162-b671-2434bc9e2972-run-openvswitch\") pod \"ovnkube-node-nv2xw\" (UID: \"254129cd-82fc-4162-b671-2434bc9e2972\") " pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.781905 4558 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.781956 4558 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.781959 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/bedf08c7-1f93-4931-a7f3-e729e2a137af-hostroot\") pod \"multus-jsqvf\" (UID: \"bedf08c7-1f93-4931-a7f3-e729e2a137af\") " pod="openshift-multus/multus-jsqvf" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.781967 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.781977 4558 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.781985 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.781987 4558 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.782007 4558 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.782019 4558 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.782025 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/0f90cb04-2e7a-4ee8-83fc-d6c0ee1702a4-system-cni-dir\") pod \"multus-additional-cni-plugins-f5t7h\" (UID: \"0f90cb04-2e7a-4ee8-83fc-d6c0ee1702a4\") " pod="openshift-multus/multus-additional-cni-plugins-f5t7h" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.782029 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.782047 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/254129cd-82fc-4162-b671-2434bc9e2972-node-log\") pod \"ovnkube-node-nv2xw\" (UID: \"254129cd-82fc-4162-b671-2434bc9e2972\") " pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.782048 4558 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.782065 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/254129cd-82fc-4162-b671-2434bc9e2972-var-lib-openvswitch\") pod \"ovnkube-node-nv2xw\" (UID: \"254129cd-82fc-4162-b671-2434bc9e2972\") " pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.782068 4558 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.782101 4558 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.782111 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.782120 4558 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.782128 4558 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.782136 4558 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.782145 4558 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.782157 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.782193 4558 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.782202 4558 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.782210 4558 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.782218 4558 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.782228 4558 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.782236 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.782264 4558 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.782289 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/bedf08c7-1f93-4931-a7f3-e729e2a137af-system-cni-dir\") pod \"multus-jsqvf\" (UID: \"bedf08c7-1f93-4931-a7f3-e729e2a137af\") " pod="openshift-multus/multus-jsqvf" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.782308 4558 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.782316 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/254129cd-82fc-4162-b671-2434bc9e2972-etc-openvswitch\") pod \"ovnkube-node-nv2xw\" (UID: \"254129cd-82fc-4162-b671-2434bc9e2972\") " pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.782330 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/bedf08c7-1f93-4931-a7f3-e729e2a137af-host-run-k8s-cni-cncf-io\") pod \"multus-jsqvf\" (UID: \"bedf08c7-1f93-4931-a7f3-e729e2a137af\") " pod="openshift-multus/multus-jsqvf" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.782351 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/bedf08c7-1f93-4931-a7f3-e729e2a137af-host-var-lib-cni-multus\") pod \"multus-jsqvf\" (UID: \"bedf08c7-1f93-4931-a7f3-e729e2a137af\") " pod="openshift-multus/multus-jsqvf" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.782384 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/254129cd-82fc-4162-b671-2434bc9e2972-ovnkube-script-lib\") pod \"ovnkube-node-nv2xw\" (UID: \"254129cd-82fc-4162-b671-2434bc9e2972\") " pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.782472 4558 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.782493 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/254129cd-82fc-4162-b671-2434bc9e2972-log-socket\") pod \"ovnkube-node-nv2xw\" (UID: \"254129cd-82fc-4162-b671-2434bc9e2972\") " pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.782504 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/bedf08c7-1f93-4931-a7f3-e729e2a137af-cnibin\") pod \"multus-jsqvf\" (UID: \"bedf08c7-1f93-4931-a7f3-e729e2a137af\") " pod="openshift-multus/multus-jsqvf" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.782517 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/254129cd-82fc-4162-b671-2434bc9e2972-host-kubelet\") pod \"ovnkube-node-nv2xw\" (UID: \"254129cd-82fc-4162-b671-2434bc9e2972\") " pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.782625 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/bedf08c7-1f93-4931-a7f3-e729e2a137af-os-release\") pod \"multus-jsqvf\" (UID: \"bedf08c7-1f93-4931-a7f3-e729e2a137af\") " pod="openshift-multus/multus-jsqvf" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.782763 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/bedf08c7-1f93-4931-a7f3-e729e2a137af-host-var-lib-cni-bin\") pod \"multus-jsqvf\" (UID: \"bedf08c7-1f93-4931-a7f3-e729e2a137af\") " pod="openshift-multus/multus-jsqvf" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.782644 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/254129cd-82fc-4162-b671-2434bc9e2972-host-run-ovn-kubernetes\") pod \"ovnkube-node-nv2xw\" (UID: \"254129cd-82fc-4162-b671-2434bc9e2972\") " pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.783640 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/254129cd-82fc-4162-b671-2434bc9e2972-host-run-netns\") pod \"ovnkube-node-nv2xw\" (UID: \"254129cd-82fc-4162-b671-2434bc9e2972\") " pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.783665 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/bedf08c7-1f93-4931-a7f3-e729e2a137af-multus-conf-dir\") pod \"multus-jsqvf\" (UID: \"bedf08c7-1f93-4931-a7f3-e729e2a137af\") " pod="openshift-multus/multus-jsqvf" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.783689 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/254129cd-82fc-4162-b671-2434bc9e2972-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-nv2xw\" (UID: \"254129cd-82fc-4162-b671-2434bc9e2972\") " pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.783765 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/254129cd-82fc-4162-b671-2434bc9e2972-env-overrides\") pod \"ovnkube-node-nv2xw\" (UID: \"254129cd-82fc-4162-b671-2434bc9e2972\") " pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.784152 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/0f90cb04-2e7a-4ee8-83fc-d6c0ee1702a4-tuning-conf-dir\") pod \"multus-additional-cni-plugins-f5t7h\" (UID: \"0f90cb04-2e7a-4ee8-83fc-d6c0ee1702a4\") " pod="openshift-multus/multus-additional-cni-plugins-f5t7h" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.784243 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/bedf08c7-1f93-4931-a7f3-e729e2a137af-cni-binary-copy\") pod \"multus-jsqvf\" (UID: \"bedf08c7-1f93-4931-a7f3-e729e2a137af\") " pod="openshift-multus/multus-jsqvf" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.784272 4558 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.784275 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/254129cd-82fc-4162-b671-2434bc9e2972-host-cni-netd\") pod \"ovnkube-node-nv2xw\" (UID: \"254129cd-82fc-4162-b671-2434bc9e2972\") " pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.784295 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/bedf08c7-1f93-4931-a7f3-e729e2a137af-etc-kubernetes\") pod \"multus-jsqvf\" (UID: \"bedf08c7-1f93-4931-a7f3-e729e2a137af\") " pod="openshift-multus/multus-jsqvf" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.784379 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/bedf08c7-1f93-4931-a7f3-e729e2a137af-multus-cni-dir\") pod \"multus-jsqvf\" (UID: \"bedf08c7-1f93-4931-a7f3-e729e2a137af\") " pod="openshift-multus/multus-jsqvf" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.784397 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/0f90cb04-2e7a-4ee8-83fc-d6c0ee1702a4-cnibin\") pod \"multus-additional-cni-plugins-f5t7h\" (UID: \"0f90cb04-2e7a-4ee8-83fc-d6c0ee1702a4\") " pod="openshift-multus/multus-additional-cni-plugins-f5t7h" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.784724 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/0f90cb04-2e7a-4ee8-83fc-d6c0ee1702a4-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-f5t7h\" (UID: \"0f90cb04-2e7a-4ee8-83fc-d6c0ee1702a4\") " pod="openshift-multus/multus-additional-cni-plugins-f5t7h" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.784751 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/68337d27-3fa6-4a29-88b0-82e60c3739eb-rootfs\") pod \"machine-config-daemon-2vr4r\" (UID: \"68337d27-3fa6-4a29-88b0-82e60c3739eb\") " pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.784769 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/254129cd-82fc-4162-b671-2434bc9e2972-ovnkube-config\") pod \"ovnkube-node-nv2xw\" (UID: \"254129cd-82fc-4162-b671-2434bc9e2972\") " pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.784796 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/bedf08c7-1f93-4931-a7f3-e729e2a137af-multus-socket-dir-parent\") pod \"multus-jsqvf\" (UID: \"bedf08c7-1f93-4931-a7f3-e729e2a137af\") " pod="openshift-multus/multus-jsqvf" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.784812 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/254129cd-82fc-4162-b671-2434bc9e2972-systemd-units\") pod \"ovnkube-node-nv2xw\" (UID: \"254129cd-82fc-4162-b671-2434bc9e2972\") " pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.784835 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/0f90cb04-2e7a-4ee8-83fc-d6c0ee1702a4-os-release\") pod \"multus-additional-cni-plugins-f5t7h\" (UID: \"0f90cb04-2e7a-4ee8-83fc-d6c0ee1702a4\") " pod="openshift-multus/multus-additional-cni-plugins-f5t7h" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.785207 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/bedf08c7-1f93-4931-a7f3-e729e2a137af-host-var-lib-kubelet\") pod \"multus-jsqvf\" (UID: \"bedf08c7-1f93-4931-a7f3-e729e2a137af\") " pod="openshift-multus/multus-jsqvf" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.785246 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/bedf08c7-1f93-4931-a7f3-e729e2a137af-multus-daemon-config\") pod \"multus-jsqvf\" (UID: \"bedf08c7-1f93-4931-a7f3-e729e2a137af\") " pod="openshift-multus/multus-jsqvf" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.785265 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/254129cd-82fc-4162-b671-2434bc9e2972-run-systemd\") pod \"ovnkube-node-nv2xw\" (UID: \"254129cd-82fc-4162-b671-2434bc9e2972\") " pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.785278 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.789371 4558 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.789385 4558 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.789397 4558 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.789406 4558 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.789416 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.789425 4558 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.789432 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.789441 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.789450 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.789460 4558 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.789469 4558 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.789477 4558 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.789487 4558 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.789495 4558 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.789503 4558 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.789511 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.789520 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.789529 4558 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.789538 4558 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.787726 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/0f90cb04-2e7a-4ee8-83fc-d6c0ee1702a4-cni-binary-copy\") pod \"multus-additional-cni-plugins-f5t7h\" (UID: \"0f90cb04-2e7a-4ee8-83fc-d6c0ee1702a4\") " pod="openshift-multus/multus-additional-cni-plugins-f5t7h" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.785425 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/68337d27-3fa6-4a29-88b0-82e60c3739eb-mcd-auth-proxy-config\") pod \"machine-config-daemon-2vr4r\" (UID: \"68337d27-3fa6-4a29-88b0-82e60c3739eb\") " pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.787030 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/68337d27-3fa6-4a29-88b0-82e60c3739eb-proxy-tls\") pod \"machine-config-daemon-2vr4r\" (UID: \"68337d27-3fa6-4a29-88b0-82e60c3739eb\") " pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.787359 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/254129cd-82fc-4162-b671-2434bc9e2972-ovn-node-metrics-cert\") pod \"ovnkube-node-nv2xw\" (UID: \"254129cd-82fc-4162-b671-2434bc9e2972\") " pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.795337 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xxtbc\" (UniqueName: \"kubernetes.io/projected/bedf08c7-1f93-4931-a7f3-e729e2a137af-kube-api-access-xxtbc\") pod \"multus-jsqvf\" (UID: \"bedf08c7-1f93-4931-a7f3-e729e2a137af\") " pod="openshift-multus/multus-jsqvf" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.796915 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4v6jx\" (UniqueName: \"kubernetes.io/projected/5c8d2c8b-fb8e-4bff-b8d2-69570e5d9e57-kube-api-access-4v6jx\") pod \"node-resolver-d96bp\" (UID: \"5c8d2c8b-fb8e-4bff-b8d2-69570e5d9e57\") " pod="openshift-dns/node-resolver-d96bp" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.797542 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rgzcc\" (UniqueName: \"kubernetes.io/projected/254129cd-82fc-4162-b671-2434bc9e2972-kube-api-access-rgzcc\") pod \"ovnkube-node-nv2xw\" (UID: \"254129cd-82fc-4162-b671-2434bc9e2972\") " pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.807240 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4b478\" (UniqueName: \"kubernetes.io/projected/68337d27-3fa6-4a29-88b0-82e60c3739eb-kube-api-access-4b478\") pod \"machine-config-daemon-2vr4r\" (UID: \"68337d27-3fa6-4a29-88b0-82e60c3739eb\") " pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.807792 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nrmwm\" (UniqueName: \"kubernetes.io/projected/0f90cb04-2e7a-4ee8-83fc-d6c0ee1702a4-kube-api-access-nrmwm\") pod \"multus-additional-cni-plugins-f5t7h\" (UID: \"0f90cb04-2e7a-4ee8-83fc-d6c0ee1702a4\") " pod="openshift-multus/multus-additional-cni-plugins-f5t7h" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.839647 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.845864 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" Jan 20 16:42:04 crc kubenswrapper[4558]: W0120 16:42:04.847140 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod37a5e44f_9a88_4405_be8a_b645485e7312.slice/crio-bef61d31e1bf98fed51ac00edfb7f088605a8af7e0801e1028e29930d6d782c9 WatchSource:0}: Error finding container bef61d31e1bf98fed51ac00edfb7f088605a8af7e0801e1028e29930d6d782c9: Status 404 returned error can't find the container with id bef61d31e1bf98fed51ac00edfb7f088605a8af7e0801e1028e29930d6d782c9 Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.850786 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 20 16:42:04 crc kubenswrapper[4558]: W0120 16:42:04.853603 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod68337d27_3fa6_4a29_88b0_82e60c3739eb.slice/crio-018da4592784da5b6b8293e5579775808cbd3d3399533f46b46e921a1768b60f WatchSource:0}: Error finding container 018da4592784da5b6b8293e5579775808cbd3d3399533f46b46e921a1768b60f: Status 404 returned error can't find the container with id 018da4592784da5b6b8293e5579775808cbd3d3399533f46b46e921a1768b60f Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.855637 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-d96bp" Jan 20 16:42:04 crc kubenswrapper[4558]: W0120 16:42:04.861183 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd75a4c96_2883_4a0b_bab2_0fab2b6c0b49.slice/crio-9586c805339ad582856773e9889f4ce5027cd1ea7d1192590e96331b05c5e42a WatchSource:0}: Error finding container 9586c805339ad582856773e9889f4ce5027cd1ea7d1192590e96331b05c5e42a: Status 404 returned error can't find the container with id 9586c805339ad582856773e9889f4ce5027cd1ea7d1192590e96331b05c5e42a Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.861555 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 20 16:42:04 crc kubenswrapper[4558]: W0120 16:42:04.878056 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podef543e1b_8068_4ea3_b32a_61027b32e95d.slice/crio-f4283834d26d0f9db0c512fdbcd656c9d4553a9ecc416e480099508b6960c78a WatchSource:0}: Error finding container f4283834d26d0f9db0c512fdbcd656c9d4553a9ecc416e480099508b6960c78a: Status 404 returned error can't find the container with id f4283834d26d0f9db0c512fdbcd656c9d4553a9ecc416e480099508b6960c78a Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.886938 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.891909 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-jsqvf" Jan 20 16:42:04 crc kubenswrapper[4558]: I0120 16:42:04.899335 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-f5t7h" Jan 20 16:42:04 crc kubenswrapper[4558]: W0120 16:42:04.911092 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod254129cd_82fc_4162_b671_2434bc9e2972.slice/crio-93769c5fb7661e2ef34ef9538594b4c32ba73d941226786897ed3ff239b9b8f5 WatchSource:0}: Error finding container 93769c5fb7661e2ef34ef9538594b4c32ba73d941226786897ed3ff239b9b8f5: Status 404 returned error can't find the container with id 93769c5fb7661e2ef34ef9538594b4c32ba73d941226786897ed3ff239b9b8f5 Jan 20 16:42:04 crc kubenswrapper[4558]: W0120 16:42:04.927568 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbedf08c7_1f93_4931_a7f3_e729e2a137af.slice/crio-193a1cbe43e3adecd280e46e24f61ebac9e4557732cd058116adf6c2c323f8e3 WatchSource:0}: Error finding container 193a1cbe43e3adecd280e46e24f61ebac9e4557732cd058116adf6c2c323f8e3: Status 404 returned error can't find the container with id 193a1cbe43e3adecd280e46e24f61ebac9e4557732cd058116adf6c2c323f8e3 Jan 20 16:42:04 crc kubenswrapper[4558]: W0120 16:42:04.927986 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0f90cb04_2e7a_4ee8_83fc_d6c0ee1702a4.slice/crio-404f9362b7db6a1ff07c9b4f9698c95f790a1178361bd2ffb814d9f9752d8585 WatchSource:0}: Error finding container 404f9362b7db6a1ff07c9b4f9698c95f790a1178361bd2ffb814d9f9752d8585: Status 404 returned error can't find the container with id 404f9362b7db6a1ff07c9b4f9698c95f790a1178361bd2ffb814d9f9752d8585 Jan 20 16:42:05 crc kubenswrapper[4558]: I0120 16:42:05.192135 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 20 16:42:05 crc kubenswrapper[4558]: E0120 16:42:05.192304 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-20 16:42:06.192277954 +0000 UTC m=+19.952615921 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 20 16:42:05 crc kubenswrapper[4558]: I0120 16:42:05.293544 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 20 16:42:05 crc kubenswrapper[4558]: I0120 16:42:05.293603 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 20 16:42:05 crc kubenswrapper[4558]: I0120 16:42:05.293636 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 20 16:42:05 crc kubenswrapper[4558]: E0120 16:42:05.293645 4558 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 20 16:42:05 crc kubenswrapper[4558]: I0120 16:42:05.293655 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 20 16:42:05 crc kubenswrapper[4558]: E0120 16:42:05.293698 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-20 16:42:06.293683053 +0000 UTC m=+20.054021020 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 20 16:42:05 crc kubenswrapper[4558]: E0120 16:42:05.293724 4558 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 20 16:42:05 crc kubenswrapper[4558]: E0120 16:42:05.293740 4558 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 20 16:42:05 crc kubenswrapper[4558]: E0120 16:42:05.293757 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-20 16:42:06.293749408 +0000 UTC m=+20.054087375 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 20 16:42:05 crc kubenswrapper[4558]: E0120 16:42:05.293760 4558 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 20 16:42:05 crc kubenswrapper[4558]: E0120 16:42:05.293773 4558 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 20 16:42:05 crc kubenswrapper[4558]: E0120 16:42:05.293801 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-20 16:42:06.29379263 +0000 UTC m=+20.054130597 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 20 16:42:05 crc kubenswrapper[4558]: E0120 16:42:05.293909 4558 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 20 16:42:05 crc kubenswrapper[4558]: E0120 16:42:05.293967 4558 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 20 16:42:05 crc kubenswrapper[4558]: E0120 16:42:05.293982 4558 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 20 16:42:05 crc kubenswrapper[4558]: E0120 16:42:05.294062 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-20 16:42:06.294042048 +0000 UTC m=+20.054380015 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 20 16:42:05 crc kubenswrapper[4558]: I0120 16:42:05.540399 4558 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-10 08:52:55.932326255 +0000 UTC Jan 20 16:42:05 crc kubenswrapper[4558]: I0120 16:42:05.631976 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"e190c24c1d8a3bc0241334598ce23829ada9afee282d49a0c82cd67ad82bcd02"} Jan 20 16:42:05 crc kubenswrapper[4558]: I0120 16:42:05.632031 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"bef61d31e1bf98fed51ac00edfb7f088605a8af7e0801e1028e29930d6d782c9"} Jan 20 16:42:05 crc kubenswrapper[4558]: I0120 16:42:05.633788 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-jsqvf" event={"ID":"bedf08c7-1f93-4931-a7f3-e729e2a137af","Type":"ContainerStarted","Data":"9e32b76495d88d356373f9389e163a2f3a72e202bb454c7015594c1f5a41b511"} Jan 20 16:42:05 crc kubenswrapper[4558]: I0120 16:42:05.633816 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-jsqvf" event={"ID":"bedf08c7-1f93-4931-a7f3-e729e2a137af","Type":"ContainerStarted","Data":"193a1cbe43e3adecd280e46e24f61ebac9e4557732cd058116adf6c2c323f8e3"} Jan 20 16:42:05 crc kubenswrapper[4558]: I0120 16:42:05.635270 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-d96bp" event={"ID":"5c8d2c8b-fb8e-4bff-b8d2-69570e5d9e57","Type":"ContainerStarted","Data":"fe858db22acc96b062f1bc7941e05c823f30d1f9ba50bd497c57f38d57e04293"} Jan 20 16:42:05 crc kubenswrapper[4558]: I0120 16:42:05.635309 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-d96bp" event={"ID":"5c8d2c8b-fb8e-4bff-b8d2-69570e5d9e57","Type":"ContainerStarted","Data":"2c44ba2baa5a79356e24e9f6fa8d65050bd67f69c551978cc10a5757d81efa32"} Jan 20 16:42:05 crc kubenswrapper[4558]: I0120 16:42:05.636763 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"f904dd3e4b43e51569b6532f00fa2f9edf912c1f1969ee2ea00c99d1b7e35a41"} Jan 20 16:42:05 crc kubenswrapper[4558]: I0120 16:42:05.636790 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"789c877f4a456b3ce64bdbe6fefd50f1ef3f92bf9e26e6296005cf366e0ce265"} Jan 20 16:42:05 crc kubenswrapper[4558]: I0120 16:42:05.636801 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"f4283834d26d0f9db0c512fdbcd656c9d4553a9ecc416e480099508b6960c78a"} Jan 20 16:42:05 crc kubenswrapper[4558]: I0120 16:42:05.638044 4558 generic.go:334] "Generic (PLEG): container finished" podID="0f90cb04-2e7a-4ee8-83fc-d6c0ee1702a4" containerID="fd8a8162080a700ce21ff3b2cf7c28fabfc682758eb69c483909d4008b07daab" exitCode=0 Jan 20 16:42:05 crc kubenswrapper[4558]: I0120 16:42:05.638070 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-f5t7h" event={"ID":"0f90cb04-2e7a-4ee8-83fc-d6c0ee1702a4","Type":"ContainerDied","Data":"fd8a8162080a700ce21ff3b2cf7c28fabfc682758eb69c483909d4008b07daab"} Jan 20 16:42:05 crc kubenswrapper[4558]: I0120 16:42:05.638099 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-f5t7h" event={"ID":"0f90cb04-2e7a-4ee8-83fc-d6c0ee1702a4","Type":"ContainerStarted","Data":"404f9362b7db6a1ff07c9b4f9698c95f790a1178361bd2ffb814d9f9752d8585"} Jan 20 16:42:05 crc kubenswrapper[4558]: I0120 16:42:05.641835 4558 generic.go:334] "Generic (PLEG): container finished" podID="254129cd-82fc-4162-b671-2434bc9e2972" containerID="aaa12ad387fc5d0780f6602a74013bb70c118f35c3a16eb4d136df7b0c48db9e" exitCode=0 Jan 20 16:42:05 crc kubenswrapper[4558]: I0120 16:42:05.642440 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" event={"ID":"254129cd-82fc-4162-b671-2434bc9e2972","Type":"ContainerDied","Data":"aaa12ad387fc5d0780f6602a74013bb70c118f35c3a16eb4d136df7b0c48db9e"} Jan 20 16:42:05 crc kubenswrapper[4558]: I0120 16:42:05.642485 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" event={"ID":"254129cd-82fc-4162-b671-2434bc9e2972","Type":"ContainerStarted","Data":"93769c5fb7661e2ef34ef9538594b4c32ba73d941226786897ed3ff239b9b8f5"} Jan 20 16:42:05 crc kubenswrapper[4558]: I0120 16:42:05.644635 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:05Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:05 crc kubenswrapper[4558]: I0120 16:42:05.645705 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" event={"ID":"68337d27-3fa6-4a29-88b0-82e60c3739eb","Type":"ContainerStarted","Data":"de7f7c23993a57d30f08686e1cd4abb5b17e108ac12dd0c7929a31574c69eeb1"} Jan 20 16:42:05 crc kubenswrapper[4558]: I0120 16:42:05.645731 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" event={"ID":"68337d27-3fa6-4a29-88b0-82e60c3739eb","Type":"ContainerStarted","Data":"b18a59ecb802507e0d5988eacc915fd7e0d6954518de80f61162c97f680fd8c1"} Jan 20 16:42:05 crc kubenswrapper[4558]: I0120 16:42:05.645741 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" event={"ID":"68337d27-3fa6-4a29-88b0-82e60c3739eb","Type":"ContainerStarted","Data":"018da4592784da5b6b8293e5579775808cbd3d3399533f46b46e921a1768b60f"} Jan 20 16:42:05 crc kubenswrapper[4558]: I0120 16:42:05.647903 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Jan 20 16:42:05 crc kubenswrapper[4558]: I0120 16:42:05.649222 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"b6438ffea42f98178c4c2c21ae01725a8b54d6a6f790f47d64faf5ecd9f1c00f"} Jan 20 16:42:05 crc kubenswrapper[4558]: I0120 16:42:05.649787 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 20 16:42:05 crc kubenswrapper[4558]: I0120 16:42:05.650641 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"9586c805339ad582856773e9889f4ce5027cd1ea7d1192590e96331b05c5e42a"} Jan 20 16:42:05 crc kubenswrapper[4558]: I0120 16:42:05.654596 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:05Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:05 crc kubenswrapper[4558]: I0120 16:42:05.665395 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jsqvf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bedf08c7-1f93-4931-a7f3-e729e2a137af\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xxtbc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jsqvf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:05Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:05 crc kubenswrapper[4558]: I0120 16:42:05.679321 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"425eab90-fb70-4498-a98b-b95742033890\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48592e72d91cf527296f06e52fec1c16c1da21323c0644659945af109a74ebb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7e18f59b5e6b6c0be141b7efb3ccb1df4ff6e3c394bcc88fb1142c8df433493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c889e2b0e14244ef49ec057e3d5fd91687e335a983fdc99a6816abd2af643ddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d434793571cbba089cc34029d951bee9063aca58728b791f2d1af2d68229f778\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://89f7ad577df11c509855a90ac8791da6de3f0d2857a1ca56726a6327f40455bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://baab628ee8aadc67ea829cc46639be55ae63281a9ec9a89737260f558a9ddf75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://baab628ee8aadc67ea829cc46639be55ae63281a9ec9a89737260f558a9ddf75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5faab5625a16e1459b23ae0f4cc8ca3285bf4eb1ba4c53c1ed15eddb9054021\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5faab5625a16e1459b23ae0f4cc8ca3285bf4eb1ba4c53c1ed15eddb9054021\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f913fa07ec9571d1c61f9b7dddb46614846aa99d6e0b5b75709e9fa5a18a09c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f913fa07ec9571d1c61f9b7dddb46614846aa99d6e0b5b75709e9fa5a18a09c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:05Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:05 crc kubenswrapper[4558]: I0120 16:42:05.689708 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30e1ff9d-8dcd-4754-9a7a-c09598fb83db\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ea68e442bb2143fbaa5e2c9fe335c6b023a31c2eeaa78d60de45c6ef40c2894\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a8122c3b57ac4f0026df149fe622450d07d8b0d517ad018decd7b21d8d9c04a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7444e8fea1853bae45dbd3d52d07e3f7b94314cd29fe8c99891e515a892e569e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f2e22f1d4e87dd06a67f49aedad280da48ac1c4ba2e438a4925cf8bd5330c4d8\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2e22f1d4e87dd06a67f49aedad280da48ac1c4ba2e438a4925cf8bd5330c4d8\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0120 16:41:58.859734 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0120 16:41:58.861094 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2179835226/tls.crt::/tmp/serving-cert-2179835226/tls.key\\\\\\\"\\\\nI0120 16:42:04.205247 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0120 16:42:04.207291 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0120 16:42:04.207308 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0120 16:42:04.207329 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0120 16:42:04.207333 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0120 16:42:04.210615 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0120 16:42:04.210632 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0120 16:42:04.210641 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0120 16:42:04.210648 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0120 16:42:04.210652 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0120 16:42:04.210655 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0120 16:42:04.210658 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0120 16:42:04.210660 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0120 16:42:04.211964 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4850bde1fb538e21fc91949dc584ef0a791d718844691a44559e0513bb36203\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c39aa6c3942134a027be33cac715f9c0381bce5417754d5bad045c42a07d353f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c39aa6c3942134a027be33cac715f9c0381bce5417754d5bad045c42a07d353f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:05Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:05 crc kubenswrapper[4558]: I0120 16:42:05.717323 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"68337d27-3fa6-4a29-88b0-82e60c3739eb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4b478\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4b478\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2vr4r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:05Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:05 crc kubenswrapper[4558]: I0120 16:42:05.734125 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e190c24c1d8a3bc0241334598ce23829ada9afee282d49a0c82cd67ad82bcd02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:05Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:05 crc kubenswrapper[4558]: I0120 16:42:05.747845 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:05Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:05 crc kubenswrapper[4558]: I0120 16:42:05.757207 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:05Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:05 crc kubenswrapper[4558]: I0120 16:42:05.765730 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:05Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:05 crc kubenswrapper[4558]: I0120 16:42:05.772314 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-d96bp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c8d2c8b-fb8e-4bff-b8d2-69570e5d9e57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4v6jx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-d96bp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:05Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:05 crc kubenswrapper[4558]: I0120 16:42:05.787322 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"254129cd-82fc-4162-b671-2434bc9e2972\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-nv2xw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:05Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:05 crc kubenswrapper[4558]: I0120 16:42:05.797350 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f5t7h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0f90cb04-2e7a-4ee8-83fc-d6c0ee1702a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f5t7h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:05Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:05 crc kubenswrapper[4558]: I0120 16:42:05.805760 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f904dd3e4b43e51569b6532f00fa2f9edf912c1f1969ee2ea00c99d1b7e35a41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://789c877f4a456b3ce64bdbe6fefd50f1ef3f92bf9e26e6296005cf366e0ce265\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:05Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:05 crc kubenswrapper[4558]: I0120 16:42:05.813370 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:05Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:05 crc kubenswrapper[4558]: I0120 16:42:05.822999 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jsqvf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bedf08c7-1f93-4931-a7f3-e729e2a137af\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e32b76495d88d356373f9389e163a2f3a72e202bb454c7015594c1f5a41b511\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xxtbc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jsqvf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:05Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:05 crc kubenswrapper[4558]: I0120 16:42:05.838773 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"425eab90-fb70-4498-a98b-b95742033890\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48592e72d91cf527296f06e52fec1c16c1da21323c0644659945af109a74ebb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7e18f59b5e6b6c0be141b7efb3ccb1df4ff6e3c394bcc88fb1142c8df433493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c889e2b0e14244ef49ec057e3d5fd91687e335a983fdc99a6816abd2af643ddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d434793571cbba089cc34029d951bee9063aca58728b791f2d1af2d68229f778\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://89f7ad577df11c509855a90ac8791da6de3f0d2857a1ca56726a6327f40455bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://baab628ee8aadc67ea829cc46639be55ae63281a9ec9a89737260f558a9ddf75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://baab628ee8aadc67ea829cc46639be55ae63281a9ec9a89737260f558a9ddf75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5faab5625a16e1459b23ae0f4cc8ca3285bf4eb1ba4c53c1ed15eddb9054021\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5faab5625a16e1459b23ae0f4cc8ca3285bf4eb1ba4c53c1ed15eddb9054021\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f913fa07ec9571d1c61f9b7dddb46614846aa99d6e0b5b75709e9fa5a18a09c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f913fa07ec9571d1c61f9b7dddb46614846aa99d6e0b5b75709e9fa5a18a09c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:05Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:05 crc kubenswrapper[4558]: I0120 16:42:05.851384 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30e1ff9d-8dcd-4754-9a7a-c09598fb83db\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ea68e442bb2143fbaa5e2c9fe335c6b023a31c2eeaa78d60de45c6ef40c2894\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a8122c3b57ac4f0026df149fe622450d07d8b0d517ad018decd7b21d8d9c04a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7444e8fea1853bae45dbd3d52d07e3f7b94314cd29fe8c99891e515a892e569e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6438ffea42f98178c4c2c21ae01725a8b54d6a6f790f47d64faf5ecd9f1c00f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2e22f1d4e87dd06a67f49aedad280da48ac1c4ba2e438a4925cf8bd5330c4d8\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0120 16:41:58.859734 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0120 16:41:58.861094 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2179835226/tls.crt::/tmp/serving-cert-2179835226/tls.key\\\\\\\"\\\\nI0120 16:42:04.205247 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0120 16:42:04.207291 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0120 16:42:04.207308 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0120 16:42:04.207329 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0120 16:42:04.207333 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0120 16:42:04.210615 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0120 16:42:04.210632 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0120 16:42:04.210641 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0120 16:42:04.210648 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0120 16:42:04.210652 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0120 16:42:04.210655 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0120 16:42:04.210658 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0120 16:42:04.210660 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0120 16:42:04.211964 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4850bde1fb538e21fc91949dc584ef0a791d718844691a44559e0513bb36203\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c39aa6c3942134a027be33cac715f9c0381bce5417754d5bad045c42a07d353f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c39aa6c3942134a027be33cac715f9c0381bce5417754d5bad045c42a07d353f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:05Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:05 crc kubenswrapper[4558]: I0120 16:42:05.862249 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"68337d27-3fa6-4a29-88b0-82e60c3739eb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de7f7c23993a57d30f08686e1cd4abb5b17e108ac12dd0c7929a31574c69eeb1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4b478\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b18a59ecb802507e0d5988eacc915fd7e0d6954518de80f61162c97f680fd8c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4b478\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2vr4r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:05Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:05 crc kubenswrapper[4558]: I0120 16:42:05.874727 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e190c24c1d8a3bc0241334598ce23829ada9afee282d49a0c82cd67ad82bcd02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:05Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:05 crc kubenswrapper[4558]: I0120 16:42:05.889557 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:05Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:05 crc kubenswrapper[4558]: I0120 16:42:05.901029 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:05Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:05 crc kubenswrapper[4558]: I0120 16:42:05.912123 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:05Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:05 crc kubenswrapper[4558]: I0120 16:42:05.921416 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-d96bp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c8d2c8b-fb8e-4bff-b8d2-69570e5d9e57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe858db22acc96b062f1bc7941e05c823f30d1f9ba50bd497c57f38d57e04293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4v6jx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-d96bp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:05Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:05 crc kubenswrapper[4558]: I0120 16:42:05.943448 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"254129cd-82fc-4162-b671-2434bc9e2972\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aaa12ad387fc5d0780f6602a74013bb70c118f35c3a16eb4d136df7b0c48db9e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aaa12ad387fc5d0780f6602a74013bb70c118f35c3a16eb4d136df7b0c48db9e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-nv2xw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:05Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:05 crc kubenswrapper[4558]: I0120 16:42:05.955470 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f5t7h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0f90cb04-2e7a-4ee8-83fc-d6c0ee1702a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fd8a8162080a700ce21ff3b2cf7c28fabfc682758eb69c483909d4008b07daab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fd8a8162080a700ce21ff3b2cf7c28fabfc682758eb69c483909d4008b07daab\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f5t7h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:05Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.202422 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 20 16:42:06 crc kubenswrapper[4558]: E0120 16:42:06.202627 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-20 16:42:08.202611057 +0000 UTC m=+21.962949024 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.303656 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.303700 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.303732 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.303753 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 20 16:42:06 crc kubenswrapper[4558]: E0120 16:42:06.303842 4558 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 20 16:42:06 crc kubenswrapper[4558]: E0120 16:42:06.303887 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-20 16:42:08.303873067 +0000 UTC m=+22.064211034 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 20 16:42:06 crc kubenswrapper[4558]: E0120 16:42:06.303901 4558 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 20 16:42:06 crc kubenswrapper[4558]: E0120 16:42:06.303931 4558 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 20 16:42:06 crc kubenswrapper[4558]: E0120 16:42:06.303943 4558 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 20 16:42:06 crc kubenswrapper[4558]: E0120 16:42:06.303994 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-20 16:42:08.303977794 +0000 UTC m=+22.064315761 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 20 16:42:06 crc kubenswrapper[4558]: E0120 16:42:06.304071 4558 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 20 16:42:06 crc kubenswrapper[4558]: E0120 16:42:06.304081 4558 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 20 16:42:06 crc kubenswrapper[4558]: E0120 16:42:06.304088 4558 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 20 16:42:06 crc kubenswrapper[4558]: E0120 16:42:06.304114 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-20 16:42:08.304106225 +0000 UTC m=+22.064444193 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 20 16:42:06 crc kubenswrapper[4558]: E0120 16:42:06.304299 4558 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 20 16:42:06 crc kubenswrapper[4558]: E0120 16:42:06.304425 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-20 16:42:08.30440686 +0000 UTC m=+22.064744828 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.452193 4558 transport.go:147] "Certificate rotation detected, shutting down client connections to start using new credentials" Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.540610 4558 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-05 09:52:12.669140076 +0000 UTC Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.565038 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.565078 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 20 16:42:06 crc kubenswrapper[4558]: E0120 16:42:06.565406 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 20 16:42:06 crc kubenswrapper[4558]: E0120 16:42:06.565406 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.565145 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 20 16:42:06 crc kubenswrapper[4558]: E0120 16:42:06.565476 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.568131 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.568759 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.569374 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.569920 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.570476 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.571131 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.571681 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.572353 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.572928 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.573396 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.573857 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.574452 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.574920 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.575427 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.575897 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.576267 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:06Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.576366 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.576855 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.578760 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.579435 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.580021 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.580509 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.581046 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.581475 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.582921 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.583549 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.584645 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.585350 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.585803 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.586574 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-d96bp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c8d2c8b-fb8e-4bff-b8d2-69570e5d9e57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe858db22acc96b062f1bc7941e05c823f30d1f9ba50bd497c57f38d57e04293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4v6jx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-d96bp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:06Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.586769 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.587240 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.588052 4558 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.588186 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.589756 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.590639 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.591041 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.592517 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.593109 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.594493 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.595206 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.596448 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.597094 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.598207 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.599353 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.600102 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.600701 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.601561 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"254129cd-82fc-4162-b671-2434bc9e2972\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aaa12ad387fc5d0780f6602a74013bb70c118f35c3a16eb4d136df7b0c48db9e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aaa12ad387fc5d0780f6602a74013bb70c118f35c3a16eb4d136df7b0c48db9e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-nv2xw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:06Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.601698 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.602614 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.603282 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.603780 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.604799 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.605247 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.606099 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.606635 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.607062 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.614523 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f5t7h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0f90cb04-2e7a-4ee8-83fc-d6c0ee1702a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fd8a8162080a700ce21ff3b2cf7c28fabfc682758eb69c483909d4008b07daab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fd8a8162080a700ce21ff3b2cf7c28fabfc682758eb69c483909d4008b07daab\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f5t7h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:06Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.624670 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f904dd3e4b43e51569b6532f00fa2f9edf912c1f1969ee2ea00c99d1b7e35a41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://789c877f4a456b3ce64bdbe6fefd50f1ef3f92bf9e26e6296005cf366e0ce265\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:06Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.633449 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:06Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.643668 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jsqvf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bedf08c7-1f93-4931-a7f3-e729e2a137af\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e32b76495d88d356373f9389e163a2f3a72e202bb454c7015594c1f5a41b511\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xxtbc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jsqvf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:06Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.654505 4558 generic.go:334] "Generic (PLEG): container finished" podID="0f90cb04-2e7a-4ee8-83fc-d6c0ee1702a4" containerID="e77617c69434ce20c448b525c7e86f1ece18d51ce6e14167b4ca7e99de8e5709" exitCode=0 Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.654589 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-f5t7h" event={"ID":"0f90cb04-2e7a-4ee8-83fc-d6c0ee1702a4","Type":"ContainerDied","Data":"e77617c69434ce20c448b525c7e86f1ece18d51ce6e14167b4ca7e99de8e5709"} Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.657778 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" event={"ID":"254129cd-82fc-4162-b671-2434bc9e2972","Type":"ContainerStarted","Data":"4264ef8ee49a8b172fb05ac5c9b0bd32350b5d9ae95293079dfeaf44f021b455"} Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.657808 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" event={"ID":"254129cd-82fc-4162-b671-2434bc9e2972","Type":"ContainerStarted","Data":"0889cf371f8b06cc2d255c15fda97db1d5000d6f6cff29d2fdcb8667cede8a99"} Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.657820 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" event={"ID":"254129cd-82fc-4162-b671-2434bc9e2972","Type":"ContainerStarted","Data":"cd8b93e5559d95511a1285a9aec9a7d72df6c330bcd1e1daf3e93f5494f70eeb"} Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.657830 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" event={"ID":"254129cd-82fc-4162-b671-2434bc9e2972","Type":"ContainerStarted","Data":"1a6a184067bde115ef5e09fdd90e7dccfe89a9dc347af450998b91c068e7e803"} Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.657838 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" event={"ID":"254129cd-82fc-4162-b671-2434bc9e2972","Type":"ContainerStarted","Data":"e4265d3281e954ee5989ee008da28c6cb9fa29478e5fdf23325521d455304da9"} Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.657846 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" event={"ID":"254129cd-82fc-4162-b671-2434bc9e2972","Type":"ContainerStarted","Data":"186a1d2caad8865abf7f565405c4a4c077038ccb67ba77927cc2392ec075a811"} Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.659812 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"425eab90-fb70-4498-a98b-b95742033890\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48592e72d91cf527296f06e52fec1c16c1da21323c0644659945af109a74ebb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7e18f59b5e6b6c0be141b7efb3ccb1df4ff6e3c394bcc88fb1142c8df433493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c889e2b0e14244ef49ec057e3d5fd91687e335a983fdc99a6816abd2af643ddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d434793571cbba089cc34029d951bee9063aca58728b791f2d1af2d68229f778\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://89f7ad577df11c509855a90ac8791da6de3f0d2857a1ca56726a6327f40455bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://baab628ee8aadc67ea829cc46639be55ae63281a9ec9a89737260f558a9ddf75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://baab628ee8aadc67ea829cc46639be55ae63281a9ec9a89737260f558a9ddf75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5faab5625a16e1459b23ae0f4cc8ca3285bf4eb1ba4c53c1ed15eddb9054021\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5faab5625a16e1459b23ae0f4cc8ca3285bf4eb1ba4c53c1ed15eddb9054021\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f913fa07ec9571d1c61f9b7dddb46614846aa99d6e0b5b75709e9fa5a18a09c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f913fa07ec9571d1c61f9b7dddb46614846aa99d6e0b5b75709e9fa5a18a09c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:06Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.671339 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30e1ff9d-8dcd-4754-9a7a-c09598fb83db\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ea68e442bb2143fbaa5e2c9fe335c6b023a31c2eeaa78d60de45c6ef40c2894\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a8122c3b57ac4f0026df149fe622450d07d8b0d517ad018decd7b21d8d9c04a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7444e8fea1853bae45dbd3d52d07e3f7b94314cd29fe8c99891e515a892e569e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6438ffea42f98178c4c2c21ae01725a8b54d6a6f790f47d64faf5ecd9f1c00f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2e22f1d4e87dd06a67f49aedad280da48ac1c4ba2e438a4925cf8bd5330c4d8\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0120 16:41:58.859734 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0120 16:41:58.861094 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2179835226/tls.crt::/tmp/serving-cert-2179835226/tls.key\\\\\\\"\\\\nI0120 16:42:04.205247 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0120 16:42:04.207291 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0120 16:42:04.207308 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0120 16:42:04.207329 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0120 16:42:04.207333 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0120 16:42:04.210615 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0120 16:42:04.210632 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0120 16:42:04.210641 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0120 16:42:04.210648 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0120 16:42:04.210652 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0120 16:42:04.210655 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0120 16:42:04.210658 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0120 16:42:04.210660 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0120 16:42:04.211964 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4850bde1fb538e21fc91949dc584ef0a791d718844691a44559e0513bb36203\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c39aa6c3942134a027be33cac715f9c0381bce5417754d5bad045c42a07d353f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c39aa6c3942134a027be33cac715f9c0381bce5417754d5bad045c42a07d353f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:06Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.680052 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"68337d27-3fa6-4a29-88b0-82e60c3739eb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de7f7c23993a57d30f08686e1cd4abb5b17e108ac12dd0c7929a31574c69eeb1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4b478\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b18a59ecb802507e0d5988eacc915fd7e0d6954518de80f61162c97f680fd8c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4b478\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2vr4r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:06Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.688878 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e190c24c1d8a3bc0241334598ce23829ada9afee282d49a0c82cd67ad82bcd02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:06Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.698393 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:06Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.707377 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:06Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.716544 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e190c24c1d8a3bc0241334598ce23829ada9afee282d49a0c82cd67ad82bcd02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:06Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.726546 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:06Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.730629 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-47477"] Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.730924 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-47477" Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.732491 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.732660 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.733960 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.733969 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.737372 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:06Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.746219 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:06Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.754009 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-d96bp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c8d2c8b-fb8e-4bff-b8d2-69570e5d9e57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe858db22acc96b062f1bc7941e05c823f30d1f9ba50bd497c57f38d57e04293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4v6jx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-d96bp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:06Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.769776 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"254129cd-82fc-4162-b671-2434bc9e2972\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aaa12ad387fc5d0780f6602a74013bb70c118f35c3a16eb4d136df7b0c48db9e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aaa12ad387fc5d0780f6602a74013bb70c118f35c3a16eb4d136df7b0c48db9e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-nv2xw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:06Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.782760 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f5t7h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0f90cb04-2e7a-4ee8-83fc-d6c0ee1702a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fd8a8162080a700ce21ff3b2cf7c28fabfc682758eb69c483909d4008b07daab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fd8a8162080a700ce21ff3b2cf7c28fabfc682758eb69c483909d4008b07daab\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e77617c69434ce20c448b525c7e86f1ece18d51ce6e14167b4ca7e99de8e5709\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e77617c69434ce20c448b525c7e86f1ece18d51ce6e14167b4ca7e99de8e5709\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f5t7h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:06Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.794800 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f904dd3e4b43e51569b6532f00fa2f9edf912c1f1969ee2ea00c99d1b7e35a41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://789c877f4a456b3ce64bdbe6fefd50f1ef3f92bf9e26e6296005cf366e0ce265\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:06Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.804145 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:06Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.809754 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/e026445a-aa73-4a0e-ba35-e2e07de1278c-host\") pod \"node-ca-47477\" (UID: \"e026445a-aa73-4a0e-ba35-e2e07de1278c\") " pod="openshift-image-registry/node-ca-47477" Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.809803 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xcnwl\" (UniqueName: \"kubernetes.io/projected/e026445a-aa73-4a0e-ba35-e2e07de1278c-kube-api-access-xcnwl\") pod \"node-ca-47477\" (UID: \"e026445a-aa73-4a0e-ba35-e2e07de1278c\") " pod="openshift-image-registry/node-ca-47477" Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.809832 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/e026445a-aa73-4a0e-ba35-e2e07de1278c-serviceca\") pod \"node-ca-47477\" (UID: \"e026445a-aa73-4a0e-ba35-e2e07de1278c\") " pod="openshift-image-registry/node-ca-47477" Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.814191 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jsqvf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bedf08c7-1f93-4931-a7f3-e729e2a137af\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e32b76495d88d356373f9389e163a2f3a72e202bb454c7015594c1f5a41b511\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xxtbc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jsqvf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:06Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.828377 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"425eab90-fb70-4498-a98b-b95742033890\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48592e72d91cf527296f06e52fec1c16c1da21323c0644659945af109a74ebb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7e18f59b5e6b6c0be141b7efb3ccb1df4ff6e3c394bcc88fb1142c8df433493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c889e2b0e14244ef49ec057e3d5fd91687e335a983fdc99a6816abd2af643ddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d434793571cbba089cc34029d951bee9063aca58728b791f2d1af2d68229f778\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://89f7ad577df11c509855a90ac8791da6de3f0d2857a1ca56726a6327f40455bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://baab628ee8aadc67ea829cc46639be55ae63281a9ec9a89737260f558a9ddf75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://baab628ee8aadc67ea829cc46639be55ae63281a9ec9a89737260f558a9ddf75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5faab5625a16e1459b23ae0f4cc8ca3285bf4eb1ba4c53c1ed15eddb9054021\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5faab5625a16e1459b23ae0f4cc8ca3285bf4eb1ba4c53c1ed15eddb9054021\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f913fa07ec9571d1c61f9b7dddb46614846aa99d6e0b5b75709e9fa5a18a09c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f913fa07ec9571d1c61f9b7dddb46614846aa99d6e0b5b75709e9fa5a18a09c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:06Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.837803 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30e1ff9d-8dcd-4754-9a7a-c09598fb83db\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ea68e442bb2143fbaa5e2c9fe335c6b023a31c2eeaa78d60de45c6ef40c2894\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a8122c3b57ac4f0026df149fe622450d07d8b0d517ad018decd7b21d8d9c04a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7444e8fea1853bae45dbd3d52d07e3f7b94314cd29fe8c99891e515a892e569e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6438ffea42f98178c4c2c21ae01725a8b54d6a6f790f47d64faf5ecd9f1c00f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2e22f1d4e87dd06a67f49aedad280da48ac1c4ba2e438a4925cf8bd5330c4d8\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0120 16:41:58.859734 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0120 16:41:58.861094 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2179835226/tls.crt::/tmp/serving-cert-2179835226/tls.key\\\\\\\"\\\\nI0120 16:42:04.205247 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0120 16:42:04.207291 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0120 16:42:04.207308 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0120 16:42:04.207329 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0120 16:42:04.207333 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0120 16:42:04.210615 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0120 16:42:04.210632 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0120 16:42:04.210641 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0120 16:42:04.210648 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0120 16:42:04.210652 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0120 16:42:04.210655 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0120 16:42:04.210658 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0120 16:42:04.210660 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0120 16:42:04.211964 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4850bde1fb538e21fc91949dc584ef0a791d718844691a44559e0513bb36203\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c39aa6c3942134a027be33cac715f9c0381bce5417754d5bad045c42a07d353f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c39aa6c3942134a027be33cac715f9c0381bce5417754d5bad045c42a07d353f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:06Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.845750 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"68337d27-3fa6-4a29-88b0-82e60c3739eb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de7f7c23993a57d30f08686e1cd4abb5b17e108ac12dd0c7929a31574c69eeb1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4b478\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b18a59ecb802507e0d5988eacc915fd7e0d6954518de80f61162c97f680fd8c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4b478\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2vr4r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:06Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.854445 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"68337d27-3fa6-4a29-88b0-82e60c3739eb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de7f7c23993a57d30f08686e1cd4abb5b17e108ac12dd0c7929a31574c69eeb1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4b478\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b18a59ecb802507e0d5988eacc915fd7e0d6954518de80f61162c97f680fd8c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4b478\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2vr4r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:06Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.867988 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"425eab90-fb70-4498-a98b-b95742033890\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48592e72d91cf527296f06e52fec1c16c1da21323c0644659945af109a74ebb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7e18f59b5e6b6c0be141b7efb3ccb1df4ff6e3c394bcc88fb1142c8df433493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c889e2b0e14244ef49ec057e3d5fd91687e335a983fdc99a6816abd2af643ddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d434793571cbba089cc34029d951bee9063aca58728b791f2d1af2d68229f778\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://89f7ad577df11c509855a90ac8791da6de3f0d2857a1ca56726a6327f40455bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://baab628ee8aadc67ea829cc46639be55ae63281a9ec9a89737260f558a9ddf75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://baab628ee8aadc67ea829cc46639be55ae63281a9ec9a89737260f558a9ddf75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5faab5625a16e1459b23ae0f4cc8ca3285bf4eb1ba4c53c1ed15eddb9054021\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5faab5625a16e1459b23ae0f4cc8ca3285bf4eb1ba4c53c1ed15eddb9054021\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f913fa07ec9571d1c61f9b7dddb46614846aa99d6e0b5b75709e9fa5a18a09c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f913fa07ec9571d1c61f9b7dddb46614846aa99d6e0b5b75709e9fa5a18a09c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:06Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.877100 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30e1ff9d-8dcd-4754-9a7a-c09598fb83db\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ea68e442bb2143fbaa5e2c9fe335c6b023a31c2eeaa78d60de45c6ef40c2894\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a8122c3b57ac4f0026df149fe622450d07d8b0d517ad018decd7b21d8d9c04a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7444e8fea1853bae45dbd3d52d07e3f7b94314cd29fe8c99891e515a892e569e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6438ffea42f98178c4c2c21ae01725a8b54d6a6f790f47d64faf5ecd9f1c00f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2e22f1d4e87dd06a67f49aedad280da48ac1c4ba2e438a4925cf8bd5330c4d8\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0120 16:41:58.859734 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0120 16:41:58.861094 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2179835226/tls.crt::/tmp/serving-cert-2179835226/tls.key\\\\\\\"\\\\nI0120 16:42:04.205247 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0120 16:42:04.207291 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0120 16:42:04.207308 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0120 16:42:04.207329 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0120 16:42:04.207333 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0120 16:42:04.210615 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0120 16:42:04.210632 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0120 16:42:04.210641 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0120 16:42:04.210648 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0120 16:42:04.210652 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0120 16:42:04.210655 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0120 16:42:04.210658 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0120 16:42:04.210660 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0120 16:42:04.211964 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4850bde1fb538e21fc91949dc584ef0a791d718844691a44559e0513bb36203\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c39aa6c3942134a027be33cac715f9c0381bce5417754d5bad045c42a07d353f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c39aa6c3942134a027be33cac715f9c0381bce5417754d5bad045c42a07d353f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:06Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.890958 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:06Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.911079 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/e026445a-aa73-4a0e-ba35-e2e07de1278c-host\") pod \"node-ca-47477\" (UID: \"e026445a-aa73-4a0e-ba35-e2e07de1278c\") " pod="openshift-image-registry/node-ca-47477" Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.911118 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xcnwl\" (UniqueName: \"kubernetes.io/projected/e026445a-aa73-4a0e-ba35-e2e07de1278c-kube-api-access-xcnwl\") pod \"node-ca-47477\" (UID: \"e026445a-aa73-4a0e-ba35-e2e07de1278c\") " pod="openshift-image-registry/node-ca-47477" Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.911139 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/e026445a-aa73-4a0e-ba35-e2e07de1278c-serviceca\") pod \"node-ca-47477\" (UID: \"e026445a-aa73-4a0e-ba35-e2e07de1278c\") " pod="openshift-image-registry/node-ca-47477" Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.911215 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/e026445a-aa73-4a0e-ba35-e2e07de1278c-host\") pod \"node-ca-47477\" (UID: \"e026445a-aa73-4a0e-ba35-e2e07de1278c\") " pod="openshift-image-registry/node-ca-47477" Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.912040 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/e026445a-aa73-4a0e-ba35-e2e07de1278c-serviceca\") pod \"node-ca-47477\" (UID: \"e026445a-aa73-4a0e-ba35-e2e07de1278c\") " pod="openshift-image-registry/node-ca-47477" Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.924670 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e190c24c1d8a3bc0241334598ce23829ada9afee282d49a0c82cd67ad82bcd02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:06Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.925699 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xcnwl\" (UniqueName: \"kubernetes.io/projected/e026445a-aa73-4a0e-ba35-e2e07de1278c-kube-api-access-xcnwl\") pod \"node-ca-47477\" (UID: \"e026445a-aa73-4a0e-ba35-e2e07de1278c\") " pod="openshift-image-registry/node-ca-47477" Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.933926 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:06Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.944033 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f5t7h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0f90cb04-2e7a-4ee8-83fc-d6c0ee1702a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fd8a8162080a700ce21ff3b2cf7c28fabfc682758eb69c483909d4008b07daab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fd8a8162080a700ce21ff3b2cf7c28fabfc682758eb69c483909d4008b07daab\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e77617c69434ce20c448b525c7e86f1ece18d51ce6e14167b4ca7e99de8e5709\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e77617c69434ce20c448b525c7e86f1ece18d51ce6e14167b4ca7e99de8e5709\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f5t7h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:06Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.952252 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:06Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.958994 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-d96bp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c8d2c8b-fb8e-4bff-b8d2-69570e5d9e57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe858db22acc96b062f1bc7941e05c823f30d1f9ba50bd497c57f38d57e04293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4v6jx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-d96bp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:06Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.970899 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"254129cd-82fc-4162-b671-2434bc9e2972\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aaa12ad387fc5d0780f6602a74013bb70c118f35c3a16eb4d136df7b0c48db9e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aaa12ad387fc5d0780f6602a74013bb70c118f35c3a16eb4d136df7b0c48db9e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-nv2xw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:06Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.980056 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jsqvf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bedf08c7-1f93-4931-a7f3-e729e2a137af\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e32b76495d88d356373f9389e163a2f3a72e202bb454c7015594c1f5a41b511\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xxtbc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jsqvf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:06Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.987797 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-47477" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e026445a-aa73-4a0e-ba35-e2e07de1278c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:06Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:06Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xcnwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-47477\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:06Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:06 crc kubenswrapper[4558]: I0120 16:42:06.995931 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f904dd3e4b43e51569b6532f00fa2f9edf912c1f1969ee2ea00c99d1b7e35a41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://789c877f4a456b3ce64bdbe6fefd50f1ef3f92bf9e26e6296005cf366e0ce265\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:06Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:07 crc kubenswrapper[4558]: I0120 16:42:07.004849 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:07Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:07 crc kubenswrapper[4558]: I0120 16:42:07.042494 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-47477" Jan 20 16:42:07 crc kubenswrapper[4558]: I0120 16:42:07.169934 4558 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 20 16:42:07 crc kubenswrapper[4558]: I0120 16:42:07.171438 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:07 crc kubenswrapper[4558]: I0120 16:42:07.171470 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:07 crc kubenswrapper[4558]: I0120 16:42:07.171479 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:07 crc kubenswrapper[4558]: I0120 16:42:07.171562 4558 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jan 20 16:42:07 crc kubenswrapper[4558]: I0120 16:42:07.177202 4558 kubelet_node_status.go:115] "Node was previously registered" node="crc" Jan 20 16:42:07 crc kubenswrapper[4558]: I0120 16:42:07.177379 4558 kubelet_node_status.go:79] "Successfully registered node" node="crc" Jan 20 16:42:07 crc kubenswrapper[4558]: I0120 16:42:07.178489 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:07 crc kubenswrapper[4558]: I0120 16:42:07.178526 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:07 crc kubenswrapper[4558]: I0120 16:42:07.178550 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:07 crc kubenswrapper[4558]: I0120 16:42:07.178566 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:07 crc kubenswrapper[4558]: I0120 16:42:07.178591 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:07Z","lastTransitionTime":"2026-01-20T16:42:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:07 crc kubenswrapper[4558]: E0120 16:42:07.191596 4558 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"bf027402-7a62-478b-8585-220c98495823\\\",\\\"systemUUID\\\":\\\"1def282c-e24e-4bc0-9a1b-d7cc30756d3d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:07Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:07 crc kubenswrapper[4558]: I0120 16:42:07.194400 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:07 crc kubenswrapper[4558]: I0120 16:42:07.194430 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:07 crc kubenswrapper[4558]: I0120 16:42:07.194438 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:07 crc kubenswrapper[4558]: I0120 16:42:07.194450 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:07 crc kubenswrapper[4558]: I0120 16:42:07.194457 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:07Z","lastTransitionTime":"2026-01-20T16:42:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:07 crc kubenswrapper[4558]: E0120 16:42:07.202981 4558 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"bf027402-7a62-478b-8585-220c98495823\\\",\\\"systemUUID\\\":\\\"1def282c-e24e-4bc0-9a1b-d7cc30756d3d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:07Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:07 crc kubenswrapper[4558]: I0120 16:42:07.206188 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:07 crc kubenswrapper[4558]: I0120 16:42:07.206214 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:07 crc kubenswrapper[4558]: I0120 16:42:07.206222 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:07 crc kubenswrapper[4558]: I0120 16:42:07.206233 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:07 crc kubenswrapper[4558]: I0120 16:42:07.206241 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:07Z","lastTransitionTime":"2026-01-20T16:42:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:07 crc kubenswrapper[4558]: E0120 16:42:07.214277 4558 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"bf027402-7a62-478b-8585-220c98495823\\\",\\\"systemUUID\\\":\\\"1def282c-e24e-4bc0-9a1b-d7cc30756d3d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:07Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:07 crc kubenswrapper[4558]: I0120 16:42:07.216861 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:07 crc kubenswrapper[4558]: I0120 16:42:07.216897 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:07 crc kubenswrapper[4558]: I0120 16:42:07.216905 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:07 crc kubenswrapper[4558]: I0120 16:42:07.216920 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:07 crc kubenswrapper[4558]: I0120 16:42:07.216928 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:07Z","lastTransitionTime":"2026-01-20T16:42:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:07 crc kubenswrapper[4558]: E0120 16:42:07.228353 4558 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"bf027402-7a62-478b-8585-220c98495823\\\",\\\"systemUUID\\\":\\\"1def282c-e24e-4bc0-9a1b-d7cc30756d3d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:07Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:07 crc kubenswrapper[4558]: I0120 16:42:07.231499 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:07 crc kubenswrapper[4558]: I0120 16:42:07.231530 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:07 crc kubenswrapper[4558]: I0120 16:42:07.231539 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:07 crc kubenswrapper[4558]: I0120 16:42:07.231551 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:07 crc kubenswrapper[4558]: I0120 16:42:07.231559 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:07Z","lastTransitionTime":"2026-01-20T16:42:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:07 crc kubenswrapper[4558]: E0120 16:42:07.240095 4558 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"bf027402-7a62-478b-8585-220c98495823\\\",\\\"systemUUID\\\":\\\"1def282c-e24e-4bc0-9a1b-d7cc30756d3d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:07Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:07 crc kubenswrapper[4558]: E0120 16:42:07.240249 4558 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 20 16:42:07 crc kubenswrapper[4558]: I0120 16:42:07.241329 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:07 crc kubenswrapper[4558]: I0120 16:42:07.241358 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:07 crc kubenswrapper[4558]: I0120 16:42:07.241368 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:07 crc kubenswrapper[4558]: I0120 16:42:07.241380 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:07 crc kubenswrapper[4558]: I0120 16:42:07.241389 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:07Z","lastTransitionTime":"2026-01-20T16:42:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:07 crc kubenswrapper[4558]: I0120 16:42:07.342922 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:07 crc kubenswrapper[4558]: I0120 16:42:07.342955 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:07 crc kubenswrapper[4558]: I0120 16:42:07.342964 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:07 crc kubenswrapper[4558]: I0120 16:42:07.342976 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:07 crc kubenswrapper[4558]: I0120 16:42:07.342984 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:07Z","lastTransitionTime":"2026-01-20T16:42:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:07 crc kubenswrapper[4558]: I0120 16:42:07.444860 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:07 crc kubenswrapper[4558]: I0120 16:42:07.444891 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:07 crc kubenswrapper[4558]: I0120 16:42:07.444901 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:07 crc kubenswrapper[4558]: I0120 16:42:07.444913 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:07 crc kubenswrapper[4558]: I0120 16:42:07.444922 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:07Z","lastTransitionTime":"2026-01-20T16:42:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:07 crc kubenswrapper[4558]: I0120 16:42:07.541713 4558 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-13 00:05:51.09804416 +0000 UTC Jan 20 16:42:07 crc kubenswrapper[4558]: I0120 16:42:07.547406 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:07 crc kubenswrapper[4558]: I0120 16:42:07.547434 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:07 crc kubenswrapper[4558]: I0120 16:42:07.547442 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:07 crc kubenswrapper[4558]: I0120 16:42:07.547456 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:07 crc kubenswrapper[4558]: I0120 16:42:07.547465 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:07Z","lastTransitionTime":"2026-01-20T16:42:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:07 crc kubenswrapper[4558]: I0120 16:42:07.649460 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:07 crc kubenswrapper[4558]: I0120 16:42:07.649492 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:07 crc kubenswrapper[4558]: I0120 16:42:07.649502 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:07 crc kubenswrapper[4558]: I0120 16:42:07.649514 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:07 crc kubenswrapper[4558]: I0120 16:42:07.649523 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:07Z","lastTransitionTime":"2026-01-20T16:42:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:07 crc kubenswrapper[4558]: I0120 16:42:07.661124 4558 generic.go:334] "Generic (PLEG): container finished" podID="0f90cb04-2e7a-4ee8-83fc-d6c0ee1702a4" containerID="453c67434281e0261ec6799b30a548a01efd72d7df96632409e745b2e6a94546" exitCode=0 Jan 20 16:42:07 crc kubenswrapper[4558]: I0120 16:42:07.661187 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-f5t7h" event={"ID":"0f90cb04-2e7a-4ee8-83fc-d6c0ee1702a4","Type":"ContainerDied","Data":"453c67434281e0261ec6799b30a548a01efd72d7df96632409e745b2e6a94546"} Jan 20 16:42:07 crc kubenswrapper[4558]: I0120 16:42:07.662946 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"3e45cc56934a7b90e9d47f1c6d06d8dc140d57db4a216469e8ae112f398663e7"} Jan 20 16:42:07 crc kubenswrapper[4558]: I0120 16:42:07.664529 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-47477" event={"ID":"e026445a-aa73-4a0e-ba35-e2e07de1278c","Type":"ContainerStarted","Data":"ec2ee3ff161cba7e14ad26763081c9f092f0dfe969cef113631c27e0db53d157"} Jan 20 16:42:07 crc kubenswrapper[4558]: I0120 16:42:07.664573 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-47477" event={"ID":"e026445a-aa73-4a0e-ba35-e2e07de1278c","Type":"ContainerStarted","Data":"0757a1a33e40278fa2ef6eb3374042759274c58643e23db6bb76bf1f1e73976c"} Jan 20 16:42:07 crc kubenswrapper[4558]: I0120 16:42:07.675025 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:07Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:07 crc kubenswrapper[4558]: I0120 16:42:07.684821 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jsqvf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bedf08c7-1f93-4931-a7f3-e729e2a137af\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e32b76495d88d356373f9389e163a2f3a72e202bb454c7015594c1f5a41b511\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xxtbc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jsqvf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:07Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:07 crc kubenswrapper[4558]: I0120 16:42:07.692013 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-47477" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e026445a-aa73-4a0e-ba35-e2e07de1278c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:06Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:06Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xcnwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-47477\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:07Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:07 crc kubenswrapper[4558]: I0120 16:42:07.701903 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f904dd3e4b43e51569b6532f00fa2f9edf912c1f1969ee2ea00c99d1b7e35a41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://789c877f4a456b3ce64bdbe6fefd50f1ef3f92bf9e26e6296005cf366e0ce265\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:07Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:07 crc kubenswrapper[4558]: I0120 16:42:07.711307 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30e1ff9d-8dcd-4754-9a7a-c09598fb83db\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ea68e442bb2143fbaa5e2c9fe335c6b023a31c2eeaa78d60de45c6ef40c2894\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a8122c3b57ac4f0026df149fe622450d07d8b0d517ad018decd7b21d8d9c04a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7444e8fea1853bae45dbd3d52d07e3f7b94314cd29fe8c99891e515a892e569e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6438ffea42f98178c4c2c21ae01725a8b54d6a6f790f47d64faf5ecd9f1c00f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2e22f1d4e87dd06a67f49aedad280da48ac1c4ba2e438a4925cf8bd5330c4d8\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0120 16:41:58.859734 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0120 16:41:58.861094 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2179835226/tls.crt::/tmp/serving-cert-2179835226/tls.key\\\\\\\"\\\\nI0120 16:42:04.205247 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0120 16:42:04.207291 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0120 16:42:04.207308 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0120 16:42:04.207329 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0120 16:42:04.207333 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0120 16:42:04.210615 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0120 16:42:04.210632 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0120 16:42:04.210641 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0120 16:42:04.210648 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0120 16:42:04.210652 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0120 16:42:04.210655 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0120 16:42:04.210658 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0120 16:42:04.210660 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0120 16:42:04.211964 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4850bde1fb538e21fc91949dc584ef0a791d718844691a44559e0513bb36203\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c39aa6c3942134a027be33cac715f9c0381bce5417754d5bad045c42a07d353f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c39aa6c3942134a027be33cac715f9c0381bce5417754d5bad045c42a07d353f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:07Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:07 crc kubenswrapper[4558]: I0120 16:42:07.718895 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"68337d27-3fa6-4a29-88b0-82e60c3739eb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de7f7c23993a57d30f08686e1cd4abb5b17e108ac12dd0c7929a31574c69eeb1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4b478\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b18a59ecb802507e0d5988eacc915fd7e0d6954518de80f61162c97f680fd8c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4b478\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2vr4r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:07Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:07 crc kubenswrapper[4558]: I0120 16:42:07.731389 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"425eab90-fb70-4498-a98b-b95742033890\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48592e72d91cf527296f06e52fec1c16c1da21323c0644659945af109a74ebb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7e18f59b5e6b6c0be141b7efb3ccb1df4ff6e3c394bcc88fb1142c8df433493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c889e2b0e14244ef49ec057e3d5fd91687e335a983fdc99a6816abd2af643ddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d434793571cbba089cc34029d951bee9063aca58728b791f2d1af2d68229f778\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://89f7ad577df11c509855a90ac8791da6de3f0d2857a1ca56726a6327f40455bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://baab628ee8aadc67ea829cc46639be55ae63281a9ec9a89737260f558a9ddf75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://baab628ee8aadc67ea829cc46639be55ae63281a9ec9a89737260f558a9ddf75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5faab5625a16e1459b23ae0f4cc8ca3285bf4eb1ba4c53c1ed15eddb9054021\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5faab5625a16e1459b23ae0f4cc8ca3285bf4eb1ba4c53c1ed15eddb9054021\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f913fa07ec9571d1c61f9b7dddb46614846aa99d6e0b5b75709e9fa5a18a09c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f913fa07ec9571d1c61f9b7dddb46614846aa99d6e0b5b75709e9fa5a18a09c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:07Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:07 crc kubenswrapper[4558]: I0120 16:42:07.740711 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:07Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:07 crc kubenswrapper[4558]: I0120 16:42:07.752313 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:07Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:07 crc kubenswrapper[4558]: I0120 16:42:07.754093 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:07 crc kubenswrapper[4558]: I0120 16:42:07.754114 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:07 crc kubenswrapper[4558]: I0120 16:42:07.754121 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:07 crc kubenswrapper[4558]: I0120 16:42:07.754134 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:07 crc kubenswrapper[4558]: I0120 16:42:07.754142 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:07Z","lastTransitionTime":"2026-01-20T16:42:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:07 crc kubenswrapper[4558]: I0120 16:42:07.766152 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e190c24c1d8a3bc0241334598ce23829ada9afee282d49a0c82cd67ad82bcd02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:07Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:07 crc kubenswrapper[4558]: I0120 16:42:07.798600 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"254129cd-82fc-4162-b671-2434bc9e2972\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aaa12ad387fc5d0780f6602a74013bb70c118f35c3a16eb4d136df7b0c48db9e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aaa12ad387fc5d0780f6602a74013bb70c118f35c3a16eb4d136df7b0c48db9e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-nv2xw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:07Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:07 crc kubenswrapper[4558]: I0120 16:42:07.817130 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f5t7h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0f90cb04-2e7a-4ee8-83fc-d6c0ee1702a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fd8a8162080a700ce21ff3b2cf7c28fabfc682758eb69c483909d4008b07daab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fd8a8162080a700ce21ff3b2cf7c28fabfc682758eb69c483909d4008b07daab\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e77617c69434ce20c448b525c7e86f1ece18d51ce6e14167b4ca7e99de8e5709\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e77617c69434ce20c448b525c7e86f1ece18d51ce6e14167b4ca7e99de8e5709\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://453c67434281e0261ec6799b30a548a01efd72d7df96632409e745b2e6a94546\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://453c67434281e0261ec6799b30a548a01efd72d7df96632409e745b2e6a94546\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f5t7h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:07Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:07 crc kubenswrapper[4558]: I0120 16:42:07.826546 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:07Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:07 crc kubenswrapper[4558]: I0120 16:42:07.833728 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-d96bp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c8d2c8b-fb8e-4bff-b8d2-69570e5d9e57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe858db22acc96b062f1bc7941e05c823f30d1f9ba50bd497c57f38d57e04293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4v6jx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-d96bp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:07Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:07 crc kubenswrapper[4558]: I0120 16:42:07.847337 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"425eab90-fb70-4498-a98b-b95742033890\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48592e72d91cf527296f06e52fec1c16c1da21323c0644659945af109a74ebb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7e18f59b5e6b6c0be141b7efb3ccb1df4ff6e3c394bcc88fb1142c8df433493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c889e2b0e14244ef49ec057e3d5fd91687e335a983fdc99a6816abd2af643ddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d434793571cbba089cc34029d951bee9063aca58728b791f2d1af2d68229f778\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://89f7ad577df11c509855a90ac8791da6de3f0d2857a1ca56726a6327f40455bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://baab628ee8aadc67ea829cc46639be55ae63281a9ec9a89737260f558a9ddf75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://baab628ee8aadc67ea829cc46639be55ae63281a9ec9a89737260f558a9ddf75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5faab5625a16e1459b23ae0f4cc8ca3285bf4eb1ba4c53c1ed15eddb9054021\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5faab5625a16e1459b23ae0f4cc8ca3285bf4eb1ba4c53c1ed15eddb9054021\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f913fa07ec9571d1c61f9b7dddb46614846aa99d6e0b5b75709e9fa5a18a09c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f913fa07ec9571d1c61f9b7dddb46614846aa99d6e0b5b75709e9fa5a18a09c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:07Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:07 crc kubenswrapper[4558]: I0120 16:42:07.856321 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:07 crc kubenswrapper[4558]: I0120 16:42:07.856354 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:07 crc kubenswrapper[4558]: I0120 16:42:07.856364 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:07 crc kubenswrapper[4558]: I0120 16:42:07.856377 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:07 crc kubenswrapper[4558]: I0120 16:42:07.856386 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:07Z","lastTransitionTime":"2026-01-20T16:42:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:07 crc kubenswrapper[4558]: I0120 16:42:07.857105 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30e1ff9d-8dcd-4754-9a7a-c09598fb83db\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ea68e442bb2143fbaa5e2c9fe335c6b023a31c2eeaa78d60de45c6ef40c2894\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a8122c3b57ac4f0026df149fe622450d07d8b0d517ad018decd7b21d8d9c04a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7444e8fea1853bae45dbd3d52d07e3f7b94314cd29fe8c99891e515a892e569e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6438ffea42f98178c4c2c21ae01725a8b54d6a6f790f47d64faf5ecd9f1c00f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2e22f1d4e87dd06a67f49aedad280da48ac1c4ba2e438a4925cf8bd5330c4d8\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0120 16:41:58.859734 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0120 16:41:58.861094 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2179835226/tls.crt::/tmp/serving-cert-2179835226/tls.key\\\\\\\"\\\\nI0120 16:42:04.205247 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0120 16:42:04.207291 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0120 16:42:04.207308 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0120 16:42:04.207329 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0120 16:42:04.207333 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0120 16:42:04.210615 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0120 16:42:04.210632 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0120 16:42:04.210641 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0120 16:42:04.210648 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0120 16:42:04.210652 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0120 16:42:04.210655 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0120 16:42:04.210658 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0120 16:42:04.210660 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0120 16:42:04.211964 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4850bde1fb538e21fc91949dc584ef0a791d718844691a44559e0513bb36203\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c39aa6c3942134a027be33cac715f9c0381bce5417754d5bad045c42a07d353f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c39aa6c3942134a027be33cac715f9c0381bce5417754d5bad045c42a07d353f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:07Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:07 crc kubenswrapper[4558]: I0120 16:42:07.864975 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"68337d27-3fa6-4a29-88b0-82e60c3739eb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de7f7c23993a57d30f08686e1cd4abb5b17e108ac12dd0c7929a31574c69eeb1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4b478\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b18a59ecb802507e0d5988eacc915fd7e0d6954518de80f61162c97f680fd8c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4b478\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2vr4r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:07Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:07 crc kubenswrapper[4558]: I0120 16:42:07.874567 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e190c24c1d8a3bc0241334598ce23829ada9afee282d49a0c82cd67ad82bcd02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:07Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:07 crc kubenswrapper[4558]: I0120 16:42:07.885970 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:07Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:07 crc kubenswrapper[4558]: I0120 16:42:07.894543 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:07Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:07 crc kubenswrapper[4558]: I0120 16:42:07.901503 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-d96bp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c8d2c8b-fb8e-4bff-b8d2-69570e5d9e57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe858db22acc96b062f1bc7941e05c823f30d1f9ba50bd497c57f38d57e04293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4v6jx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-d96bp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:07Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:07 crc kubenswrapper[4558]: I0120 16:42:07.918057 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"254129cd-82fc-4162-b671-2434bc9e2972\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aaa12ad387fc5d0780f6602a74013bb70c118f35c3a16eb4d136df7b0c48db9e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aaa12ad387fc5d0780f6602a74013bb70c118f35c3a16eb4d136df7b0c48db9e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-nv2xw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:07Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:07 crc kubenswrapper[4558]: I0120 16:42:07.956095 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f5t7h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0f90cb04-2e7a-4ee8-83fc-d6c0ee1702a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fd8a8162080a700ce21ff3b2cf7c28fabfc682758eb69c483909d4008b07daab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fd8a8162080a700ce21ff3b2cf7c28fabfc682758eb69c483909d4008b07daab\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e77617c69434ce20c448b525c7e86f1ece18d51ce6e14167b4ca7e99de8e5709\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e77617c69434ce20c448b525c7e86f1ece18d51ce6e14167b4ca7e99de8e5709\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://453c67434281e0261ec6799b30a548a01efd72d7df96632409e745b2e6a94546\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://453c67434281e0261ec6799b30a548a01efd72d7df96632409e745b2e6a94546\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f5t7h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:07Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:07 crc kubenswrapper[4558]: I0120 16:42:07.958299 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:07 crc kubenswrapper[4558]: I0120 16:42:07.958327 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:07 crc kubenswrapper[4558]: I0120 16:42:07.958337 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:07 crc kubenswrapper[4558]: I0120 16:42:07.958350 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:07 crc kubenswrapper[4558]: I0120 16:42:07.958358 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:07Z","lastTransitionTime":"2026-01-20T16:42:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:07 crc kubenswrapper[4558]: I0120 16:42:07.994473 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:07Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:08 crc kubenswrapper[4558]: I0120 16:42:08.034693 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f904dd3e4b43e51569b6532f00fa2f9edf912c1f1969ee2ea00c99d1b7e35a41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://789c877f4a456b3ce64bdbe6fefd50f1ef3f92bf9e26e6296005cf366e0ce265\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:08Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:08 crc kubenswrapper[4558]: I0120 16:42:08.060008 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:08 crc kubenswrapper[4558]: I0120 16:42:08.060044 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:08 crc kubenswrapper[4558]: I0120 16:42:08.060054 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:08 crc kubenswrapper[4558]: I0120 16:42:08.060067 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:08 crc kubenswrapper[4558]: I0120 16:42:08.060075 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:08Z","lastTransitionTime":"2026-01-20T16:42:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:08 crc kubenswrapper[4558]: I0120 16:42:08.073524 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3e45cc56934a7b90e9d47f1c6d06d8dc140d57db4a216469e8ae112f398663e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:08Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:08 crc kubenswrapper[4558]: I0120 16:42:08.115921 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jsqvf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bedf08c7-1f93-4931-a7f3-e729e2a137af\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e32b76495d88d356373f9389e163a2f3a72e202bb454c7015594c1f5a41b511\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xxtbc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jsqvf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:08Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:08 crc kubenswrapper[4558]: I0120 16:42:08.151870 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-47477" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e026445a-aa73-4a0e-ba35-e2e07de1278c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec2ee3ff161cba7e14ad26763081c9f092f0dfe969cef113631c27e0db53d157\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xcnwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-47477\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:08Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:08 crc kubenswrapper[4558]: I0120 16:42:08.162152 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:08 crc kubenswrapper[4558]: I0120 16:42:08.162205 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:08 crc kubenswrapper[4558]: I0120 16:42:08.162214 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:08 crc kubenswrapper[4558]: I0120 16:42:08.162225 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:08 crc kubenswrapper[4558]: I0120 16:42:08.162235 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:08Z","lastTransitionTime":"2026-01-20T16:42:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:08 crc kubenswrapper[4558]: I0120 16:42:08.223763 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 20 16:42:08 crc kubenswrapper[4558]: E0120 16:42:08.223918 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-20 16:42:12.22389232 +0000 UTC m=+25.984230297 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 20 16:42:08 crc kubenswrapper[4558]: I0120 16:42:08.264203 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:08 crc kubenswrapper[4558]: I0120 16:42:08.264234 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:08 crc kubenswrapper[4558]: I0120 16:42:08.264242 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:08 crc kubenswrapper[4558]: I0120 16:42:08.264256 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:08 crc kubenswrapper[4558]: I0120 16:42:08.264264 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:08Z","lastTransitionTime":"2026-01-20T16:42:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:08 crc kubenswrapper[4558]: I0120 16:42:08.324324 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 20 16:42:08 crc kubenswrapper[4558]: I0120 16:42:08.324364 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 20 16:42:08 crc kubenswrapper[4558]: I0120 16:42:08.324390 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 20 16:42:08 crc kubenswrapper[4558]: I0120 16:42:08.324414 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 20 16:42:08 crc kubenswrapper[4558]: E0120 16:42:08.324497 4558 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 20 16:42:08 crc kubenswrapper[4558]: E0120 16:42:08.324516 4558 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 20 16:42:08 crc kubenswrapper[4558]: E0120 16:42:08.324523 4558 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 20 16:42:08 crc kubenswrapper[4558]: E0120 16:42:08.324544 4558 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 20 16:42:08 crc kubenswrapper[4558]: E0120 16:42:08.324554 4558 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 20 16:42:08 crc kubenswrapper[4558]: E0120 16:42:08.324520 4558 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 20 16:42:08 crc kubenswrapper[4558]: E0120 16:42:08.324605 4558 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 20 16:42:08 crc kubenswrapper[4558]: E0120 16:42:08.324556 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-20 16:42:12.324539504 +0000 UTC m=+26.084877471 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 20 16:42:08 crc kubenswrapper[4558]: E0120 16:42:08.324612 4558 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 20 16:42:08 crc kubenswrapper[4558]: E0120 16:42:08.324651 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-20 16:42:12.324632479 +0000 UTC m=+26.084970446 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 20 16:42:08 crc kubenswrapper[4558]: E0120 16:42:08.324666 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-20 16:42:12.324660331 +0000 UTC m=+26.084998299 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 20 16:42:08 crc kubenswrapper[4558]: E0120 16:42:08.324679 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-20 16:42:12.324673566 +0000 UTC m=+26.085011533 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 20 16:42:08 crc kubenswrapper[4558]: I0120 16:42:08.366001 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:08 crc kubenswrapper[4558]: I0120 16:42:08.366032 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:08 crc kubenswrapper[4558]: I0120 16:42:08.366041 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:08 crc kubenswrapper[4558]: I0120 16:42:08.366053 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:08 crc kubenswrapper[4558]: I0120 16:42:08.366061 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:08Z","lastTransitionTime":"2026-01-20T16:42:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:08 crc kubenswrapper[4558]: I0120 16:42:08.468144 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:08 crc kubenswrapper[4558]: I0120 16:42:08.468199 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:08 crc kubenswrapper[4558]: I0120 16:42:08.468210 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:08 crc kubenswrapper[4558]: I0120 16:42:08.468224 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:08 crc kubenswrapper[4558]: I0120 16:42:08.468233 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:08Z","lastTransitionTime":"2026-01-20T16:42:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:08 crc kubenswrapper[4558]: I0120 16:42:08.542646 4558 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-17 20:54:05.620012901 +0000 UTC Jan 20 16:42:08 crc kubenswrapper[4558]: I0120 16:42:08.565235 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 20 16:42:08 crc kubenswrapper[4558]: I0120 16:42:08.565280 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 20 16:42:08 crc kubenswrapper[4558]: I0120 16:42:08.565339 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 20 16:42:08 crc kubenswrapper[4558]: E0120 16:42:08.565348 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 20 16:42:08 crc kubenswrapper[4558]: E0120 16:42:08.565421 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 20 16:42:08 crc kubenswrapper[4558]: E0120 16:42:08.565456 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 20 16:42:08 crc kubenswrapper[4558]: I0120 16:42:08.569595 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:08 crc kubenswrapper[4558]: I0120 16:42:08.569620 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:08 crc kubenswrapper[4558]: I0120 16:42:08.569628 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:08 crc kubenswrapper[4558]: I0120 16:42:08.569638 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:08 crc kubenswrapper[4558]: I0120 16:42:08.569645 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:08Z","lastTransitionTime":"2026-01-20T16:42:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:08 crc kubenswrapper[4558]: I0120 16:42:08.671113 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" event={"ID":"254129cd-82fc-4162-b671-2434bc9e2972","Type":"ContainerStarted","Data":"f41d488b0c24e594a2f5cad36b5f8efdb7d867fae0e18a74fe3c396a203d162f"} Jan 20 16:42:08 crc kubenswrapper[4558]: I0120 16:42:08.672125 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:08 crc kubenswrapper[4558]: I0120 16:42:08.672146 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:08 crc kubenswrapper[4558]: I0120 16:42:08.672154 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:08 crc kubenswrapper[4558]: I0120 16:42:08.672197 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:08 crc kubenswrapper[4558]: I0120 16:42:08.672208 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:08Z","lastTransitionTime":"2026-01-20T16:42:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:08 crc kubenswrapper[4558]: I0120 16:42:08.672974 4558 generic.go:334] "Generic (PLEG): container finished" podID="0f90cb04-2e7a-4ee8-83fc-d6c0ee1702a4" containerID="06eab4daa6c8b6a564cf1bb1403b7b3b259a234ee09f23b4f4bc0628c2d58e69" exitCode=0 Jan 20 16:42:08 crc kubenswrapper[4558]: I0120 16:42:08.673028 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-f5t7h" event={"ID":"0f90cb04-2e7a-4ee8-83fc-d6c0ee1702a4","Type":"ContainerDied","Data":"06eab4daa6c8b6a564cf1bb1403b7b3b259a234ee09f23b4f4bc0628c2d58e69"} Jan 20 16:42:08 crc kubenswrapper[4558]: I0120 16:42:08.682698 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:08Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:08 crc kubenswrapper[4558]: I0120 16:42:08.690611 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-d96bp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c8d2c8b-fb8e-4bff-b8d2-69570e5d9e57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe858db22acc96b062f1bc7941e05c823f30d1f9ba50bd497c57f38d57e04293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4v6jx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-d96bp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:08Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:08 crc kubenswrapper[4558]: I0120 16:42:08.707605 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"254129cd-82fc-4162-b671-2434bc9e2972\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aaa12ad387fc5d0780f6602a74013bb70c118f35c3a16eb4d136df7b0c48db9e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aaa12ad387fc5d0780f6602a74013bb70c118f35c3a16eb4d136df7b0c48db9e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-nv2xw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:08Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:08 crc kubenswrapper[4558]: I0120 16:42:08.719663 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f5t7h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0f90cb04-2e7a-4ee8-83fc-d6c0ee1702a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fd8a8162080a700ce21ff3b2cf7c28fabfc682758eb69c483909d4008b07daab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fd8a8162080a700ce21ff3b2cf7c28fabfc682758eb69c483909d4008b07daab\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e77617c69434ce20c448b525c7e86f1ece18d51ce6e14167b4ca7e99de8e5709\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e77617c69434ce20c448b525c7e86f1ece18d51ce6e14167b4ca7e99de8e5709\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://453c67434281e0261ec6799b30a548a01efd72d7df96632409e745b2e6a94546\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://453c67434281e0261ec6799b30a548a01efd72d7df96632409e745b2e6a94546\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://06eab4daa6c8b6a564cf1bb1403b7b3b259a234ee09f23b4f4bc0628c2d58e69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06eab4daa6c8b6a564cf1bb1403b7b3b259a234ee09f23b4f4bc0628c2d58e69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f5t7h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:08Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:08 crc kubenswrapper[4558]: I0120 16:42:08.728984 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f904dd3e4b43e51569b6532f00fa2f9edf912c1f1969ee2ea00c99d1b7e35a41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://789c877f4a456b3ce64bdbe6fefd50f1ef3f92bf9e26e6296005cf366e0ce265\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:08Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:08 crc kubenswrapper[4558]: I0120 16:42:08.738613 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3e45cc56934a7b90e9d47f1c6d06d8dc140d57db4a216469e8ae112f398663e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:08Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:08 crc kubenswrapper[4558]: I0120 16:42:08.748192 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jsqvf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bedf08c7-1f93-4931-a7f3-e729e2a137af\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e32b76495d88d356373f9389e163a2f3a72e202bb454c7015594c1f5a41b511\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xxtbc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jsqvf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:08Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:08 crc kubenswrapper[4558]: I0120 16:42:08.755811 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-47477" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e026445a-aa73-4a0e-ba35-e2e07de1278c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec2ee3ff161cba7e14ad26763081c9f092f0dfe969cef113631c27e0db53d157\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xcnwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-47477\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:08Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:08 crc kubenswrapper[4558]: I0120 16:42:08.769661 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"425eab90-fb70-4498-a98b-b95742033890\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48592e72d91cf527296f06e52fec1c16c1da21323c0644659945af109a74ebb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7e18f59b5e6b6c0be141b7efb3ccb1df4ff6e3c394bcc88fb1142c8df433493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c889e2b0e14244ef49ec057e3d5fd91687e335a983fdc99a6816abd2af643ddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d434793571cbba089cc34029d951bee9063aca58728b791f2d1af2d68229f778\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://89f7ad577df11c509855a90ac8791da6de3f0d2857a1ca56726a6327f40455bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://baab628ee8aadc67ea829cc46639be55ae63281a9ec9a89737260f558a9ddf75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://baab628ee8aadc67ea829cc46639be55ae63281a9ec9a89737260f558a9ddf75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5faab5625a16e1459b23ae0f4cc8ca3285bf4eb1ba4c53c1ed15eddb9054021\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5faab5625a16e1459b23ae0f4cc8ca3285bf4eb1ba4c53c1ed15eddb9054021\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f913fa07ec9571d1c61f9b7dddb46614846aa99d6e0b5b75709e9fa5a18a09c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f913fa07ec9571d1c61f9b7dddb46614846aa99d6e0b5b75709e9fa5a18a09c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:08Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:08 crc kubenswrapper[4558]: I0120 16:42:08.774349 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:08 crc kubenswrapper[4558]: I0120 16:42:08.774466 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:08 crc kubenswrapper[4558]: I0120 16:42:08.774482 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:08 crc kubenswrapper[4558]: I0120 16:42:08.774500 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:08 crc kubenswrapper[4558]: I0120 16:42:08.774515 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:08Z","lastTransitionTime":"2026-01-20T16:42:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:08 crc kubenswrapper[4558]: I0120 16:42:08.779309 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30e1ff9d-8dcd-4754-9a7a-c09598fb83db\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ea68e442bb2143fbaa5e2c9fe335c6b023a31c2eeaa78d60de45c6ef40c2894\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a8122c3b57ac4f0026df149fe622450d07d8b0d517ad018decd7b21d8d9c04a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7444e8fea1853bae45dbd3d52d07e3f7b94314cd29fe8c99891e515a892e569e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6438ffea42f98178c4c2c21ae01725a8b54d6a6f790f47d64faf5ecd9f1c00f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2e22f1d4e87dd06a67f49aedad280da48ac1c4ba2e438a4925cf8bd5330c4d8\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0120 16:41:58.859734 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0120 16:41:58.861094 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2179835226/tls.crt::/tmp/serving-cert-2179835226/tls.key\\\\\\\"\\\\nI0120 16:42:04.205247 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0120 16:42:04.207291 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0120 16:42:04.207308 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0120 16:42:04.207329 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0120 16:42:04.207333 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0120 16:42:04.210615 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0120 16:42:04.210632 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0120 16:42:04.210641 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0120 16:42:04.210648 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0120 16:42:04.210652 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0120 16:42:04.210655 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0120 16:42:04.210658 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0120 16:42:04.210660 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0120 16:42:04.211964 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4850bde1fb538e21fc91949dc584ef0a791d718844691a44559e0513bb36203\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c39aa6c3942134a027be33cac715f9c0381bce5417754d5bad045c42a07d353f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c39aa6c3942134a027be33cac715f9c0381bce5417754d5bad045c42a07d353f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:08Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:08 crc kubenswrapper[4558]: I0120 16:42:08.788966 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"68337d27-3fa6-4a29-88b0-82e60c3739eb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de7f7c23993a57d30f08686e1cd4abb5b17e108ac12dd0c7929a31574c69eeb1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4b478\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b18a59ecb802507e0d5988eacc915fd7e0d6954518de80f61162c97f680fd8c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4b478\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2vr4r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:08Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:08 crc kubenswrapper[4558]: I0120 16:42:08.798687 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e190c24c1d8a3bc0241334598ce23829ada9afee282d49a0c82cd67ad82bcd02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:08Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:08 crc kubenswrapper[4558]: I0120 16:42:08.808925 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:08Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:08 crc kubenswrapper[4558]: I0120 16:42:08.817878 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:08Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:08 crc kubenswrapper[4558]: I0120 16:42:08.876755 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:08 crc kubenswrapper[4558]: I0120 16:42:08.876788 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:08 crc kubenswrapper[4558]: I0120 16:42:08.876797 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:08 crc kubenswrapper[4558]: I0120 16:42:08.876810 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:08 crc kubenswrapper[4558]: I0120 16:42:08.876819 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:08Z","lastTransitionTime":"2026-01-20T16:42:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:08 crc kubenswrapper[4558]: I0120 16:42:08.978854 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:08 crc kubenswrapper[4558]: I0120 16:42:08.978890 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:08 crc kubenswrapper[4558]: I0120 16:42:08.978898 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:08 crc kubenswrapper[4558]: I0120 16:42:08.978910 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:08 crc kubenswrapper[4558]: I0120 16:42:08.978919 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:08Z","lastTransitionTime":"2026-01-20T16:42:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:09 crc kubenswrapper[4558]: I0120 16:42:09.081528 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:09 crc kubenswrapper[4558]: I0120 16:42:09.081744 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:09 crc kubenswrapper[4558]: I0120 16:42:09.081754 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:09 crc kubenswrapper[4558]: I0120 16:42:09.081767 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:09 crc kubenswrapper[4558]: I0120 16:42:09.081776 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:09Z","lastTransitionTime":"2026-01-20T16:42:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:09 crc kubenswrapper[4558]: I0120 16:42:09.184190 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:09 crc kubenswrapper[4558]: I0120 16:42:09.184239 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:09 crc kubenswrapper[4558]: I0120 16:42:09.184249 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:09 crc kubenswrapper[4558]: I0120 16:42:09.184266 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:09 crc kubenswrapper[4558]: I0120 16:42:09.184278 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:09Z","lastTransitionTime":"2026-01-20T16:42:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:09 crc kubenswrapper[4558]: I0120 16:42:09.286396 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:09 crc kubenswrapper[4558]: I0120 16:42:09.286435 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:09 crc kubenswrapper[4558]: I0120 16:42:09.286444 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:09 crc kubenswrapper[4558]: I0120 16:42:09.286458 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:09 crc kubenswrapper[4558]: I0120 16:42:09.286466 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:09Z","lastTransitionTime":"2026-01-20T16:42:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:09 crc kubenswrapper[4558]: I0120 16:42:09.388286 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:09 crc kubenswrapper[4558]: I0120 16:42:09.388326 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:09 crc kubenswrapper[4558]: I0120 16:42:09.388334 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:09 crc kubenswrapper[4558]: I0120 16:42:09.388349 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:09 crc kubenswrapper[4558]: I0120 16:42:09.388358 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:09Z","lastTransitionTime":"2026-01-20T16:42:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:09 crc kubenswrapper[4558]: I0120 16:42:09.490680 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:09 crc kubenswrapper[4558]: I0120 16:42:09.490723 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:09 crc kubenswrapper[4558]: I0120 16:42:09.490737 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:09 crc kubenswrapper[4558]: I0120 16:42:09.490751 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:09 crc kubenswrapper[4558]: I0120 16:42:09.490761 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:09Z","lastTransitionTime":"2026-01-20T16:42:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:09 crc kubenswrapper[4558]: I0120 16:42:09.543323 4558 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-10 23:07:39.721314584 +0000 UTC Jan 20 16:42:09 crc kubenswrapper[4558]: I0120 16:42:09.593606 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:09 crc kubenswrapper[4558]: I0120 16:42:09.593748 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:09 crc kubenswrapper[4558]: I0120 16:42:09.593821 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:09 crc kubenswrapper[4558]: I0120 16:42:09.594010 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:09 crc kubenswrapper[4558]: I0120 16:42:09.594094 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:09Z","lastTransitionTime":"2026-01-20T16:42:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:09 crc kubenswrapper[4558]: I0120 16:42:09.677429 4558 generic.go:334] "Generic (PLEG): container finished" podID="0f90cb04-2e7a-4ee8-83fc-d6c0ee1702a4" containerID="1291ac09fc6ae2a79fdeaa0fc47272f61b97f45f67ed3f7c90bdfdbfe3519ff2" exitCode=0 Jan 20 16:42:09 crc kubenswrapper[4558]: I0120 16:42:09.677472 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-f5t7h" event={"ID":"0f90cb04-2e7a-4ee8-83fc-d6c0ee1702a4","Type":"ContainerDied","Data":"1291ac09fc6ae2a79fdeaa0fc47272f61b97f45f67ed3f7c90bdfdbfe3519ff2"} Jan 20 16:42:09 crc kubenswrapper[4558]: I0120 16:42:09.689009 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:09Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:09 crc kubenswrapper[4558]: I0120 16:42:09.695937 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:09 crc kubenswrapper[4558]: I0120 16:42:09.695962 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:09 crc kubenswrapper[4558]: I0120 16:42:09.695973 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:09 crc kubenswrapper[4558]: I0120 16:42:09.695985 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:09 crc kubenswrapper[4558]: I0120 16:42:09.695992 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:09Z","lastTransitionTime":"2026-01-20T16:42:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:09 crc kubenswrapper[4558]: I0120 16:42:09.697912 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-d96bp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c8d2c8b-fb8e-4bff-b8d2-69570e5d9e57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe858db22acc96b062f1bc7941e05c823f30d1f9ba50bd497c57f38d57e04293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4v6jx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-d96bp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:09Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:09 crc kubenswrapper[4558]: I0120 16:42:09.712503 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"254129cd-82fc-4162-b671-2434bc9e2972\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aaa12ad387fc5d0780f6602a74013bb70c118f35c3a16eb4d136df7b0c48db9e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aaa12ad387fc5d0780f6602a74013bb70c118f35c3a16eb4d136df7b0c48db9e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-nv2xw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:09Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:09 crc kubenswrapper[4558]: I0120 16:42:09.723116 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f5t7h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0f90cb04-2e7a-4ee8-83fc-d6c0ee1702a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fd8a8162080a700ce21ff3b2cf7c28fabfc682758eb69c483909d4008b07daab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fd8a8162080a700ce21ff3b2cf7c28fabfc682758eb69c483909d4008b07daab\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e77617c69434ce20c448b525c7e86f1ece18d51ce6e14167b4ca7e99de8e5709\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e77617c69434ce20c448b525c7e86f1ece18d51ce6e14167b4ca7e99de8e5709\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://453c67434281e0261ec6799b30a548a01efd72d7df96632409e745b2e6a94546\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://453c67434281e0261ec6799b30a548a01efd72d7df96632409e745b2e6a94546\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://06eab4daa6c8b6a564cf1bb1403b7b3b259a234ee09f23b4f4bc0628c2d58e69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06eab4daa6c8b6a564cf1bb1403b7b3b259a234ee09f23b4f4bc0628c2d58e69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1291ac09fc6ae2a79fdeaa0fc47272f61b97f45f67ed3f7c90bdfdbfe3519ff2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1291ac09fc6ae2a79fdeaa0fc47272f61b97f45f67ed3f7c90bdfdbfe3519ff2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f5t7h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:09Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:09 crc kubenswrapper[4558]: I0120 16:42:09.732070 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f904dd3e4b43e51569b6532f00fa2f9edf912c1f1969ee2ea00c99d1b7e35a41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://789c877f4a456b3ce64bdbe6fefd50f1ef3f92bf9e26e6296005cf366e0ce265\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:09Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:09 crc kubenswrapper[4558]: I0120 16:42:09.741151 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3e45cc56934a7b90e9d47f1c6d06d8dc140d57db4a216469e8ae112f398663e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:09Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:09 crc kubenswrapper[4558]: I0120 16:42:09.750384 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jsqvf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bedf08c7-1f93-4931-a7f3-e729e2a137af\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e32b76495d88d356373f9389e163a2f3a72e202bb454c7015594c1f5a41b511\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xxtbc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jsqvf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:09Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:09 crc kubenswrapper[4558]: I0120 16:42:09.757919 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-47477" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e026445a-aa73-4a0e-ba35-e2e07de1278c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec2ee3ff161cba7e14ad26763081c9f092f0dfe969cef113631c27e0db53d157\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xcnwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-47477\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:09Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:09 crc kubenswrapper[4558]: I0120 16:42:09.771191 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"425eab90-fb70-4498-a98b-b95742033890\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48592e72d91cf527296f06e52fec1c16c1da21323c0644659945af109a74ebb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7e18f59b5e6b6c0be141b7efb3ccb1df4ff6e3c394bcc88fb1142c8df433493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c889e2b0e14244ef49ec057e3d5fd91687e335a983fdc99a6816abd2af643ddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d434793571cbba089cc34029d951bee9063aca58728b791f2d1af2d68229f778\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://89f7ad577df11c509855a90ac8791da6de3f0d2857a1ca56726a6327f40455bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://baab628ee8aadc67ea829cc46639be55ae63281a9ec9a89737260f558a9ddf75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://baab628ee8aadc67ea829cc46639be55ae63281a9ec9a89737260f558a9ddf75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5faab5625a16e1459b23ae0f4cc8ca3285bf4eb1ba4c53c1ed15eddb9054021\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5faab5625a16e1459b23ae0f4cc8ca3285bf4eb1ba4c53c1ed15eddb9054021\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f913fa07ec9571d1c61f9b7dddb46614846aa99d6e0b5b75709e9fa5a18a09c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f913fa07ec9571d1c61f9b7dddb46614846aa99d6e0b5b75709e9fa5a18a09c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:09Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:09 crc kubenswrapper[4558]: I0120 16:42:09.780503 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30e1ff9d-8dcd-4754-9a7a-c09598fb83db\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ea68e442bb2143fbaa5e2c9fe335c6b023a31c2eeaa78d60de45c6ef40c2894\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a8122c3b57ac4f0026df149fe622450d07d8b0d517ad018decd7b21d8d9c04a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7444e8fea1853bae45dbd3d52d07e3f7b94314cd29fe8c99891e515a892e569e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6438ffea42f98178c4c2c21ae01725a8b54d6a6f790f47d64faf5ecd9f1c00f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2e22f1d4e87dd06a67f49aedad280da48ac1c4ba2e438a4925cf8bd5330c4d8\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0120 16:41:58.859734 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0120 16:41:58.861094 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2179835226/tls.crt::/tmp/serving-cert-2179835226/tls.key\\\\\\\"\\\\nI0120 16:42:04.205247 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0120 16:42:04.207291 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0120 16:42:04.207308 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0120 16:42:04.207329 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0120 16:42:04.207333 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0120 16:42:04.210615 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0120 16:42:04.210632 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0120 16:42:04.210641 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0120 16:42:04.210648 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0120 16:42:04.210652 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0120 16:42:04.210655 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0120 16:42:04.210658 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0120 16:42:04.210660 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0120 16:42:04.211964 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4850bde1fb538e21fc91949dc584ef0a791d718844691a44559e0513bb36203\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c39aa6c3942134a027be33cac715f9c0381bce5417754d5bad045c42a07d353f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c39aa6c3942134a027be33cac715f9c0381bce5417754d5bad045c42a07d353f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:09Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:09 crc kubenswrapper[4558]: I0120 16:42:09.788021 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"68337d27-3fa6-4a29-88b0-82e60c3739eb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de7f7c23993a57d30f08686e1cd4abb5b17e108ac12dd0c7929a31574c69eeb1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4b478\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b18a59ecb802507e0d5988eacc915fd7e0d6954518de80f61162c97f680fd8c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4b478\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2vr4r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:09Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:09 crc kubenswrapper[4558]: I0120 16:42:09.796908 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e190c24c1d8a3bc0241334598ce23829ada9afee282d49a0c82cd67ad82bcd02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:09Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:09 crc kubenswrapper[4558]: I0120 16:42:09.798612 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:09 crc kubenswrapper[4558]: I0120 16:42:09.798650 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:09 crc kubenswrapper[4558]: I0120 16:42:09.798661 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:09 crc kubenswrapper[4558]: I0120 16:42:09.798677 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:09 crc kubenswrapper[4558]: I0120 16:42:09.798685 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:09Z","lastTransitionTime":"2026-01-20T16:42:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:09 crc kubenswrapper[4558]: I0120 16:42:09.806035 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:09Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:09 crc kubenswrapper[4558]: I0120 16:42:09.814523 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:09Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:09 crc kubenswrapper[4558]: I0120 16:42:09.900539 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:09 crc kubenswrapper[4558]: I0120 16:42:09.900572 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:09 crc kubenswrapper[4558]: I0120 16:42:09.900597 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:09 crc kubenswrapper[4558]: I0120 16:42:09.900611 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:09 crc kubenswrapper[4558]: I0120 16:42:09.900620 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:09Z","lastTransitionTime":"2026-01-20T16:42:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:09 crc kubenswrapper[4558]: I0120 16:42:09.978675 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 20 16:42:09 crc kubenswrapper[4558]: I0120 16:42:09.984381 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 20 16:42:09 crc kubenswrapper[4558]: I0120 16:42:09.986195 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/kube-controller-manager-crc"] Jan 20 16:42:09 crc kubenswrapper[4558]: I0120 16:42:09.991794 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f904dd3e4b43e51569b6532f00fa2f9edf912c1f1969ee2ea00c99d1b7e35a41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://789c877f4a456b3ce64bdbe6fefd50f1ef3f92bf9e26e6296005cf366e0ce265\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:09Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:09 crc kubenswrapper[4558]: I0120 16:42:09.999640 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3e45cc56934a7b90e9d47f1c6d06d8dc140d57db4a216469e8ae112f398663e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:09Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:10 crc kubenswrapper[4558]: I0120 16:42:10.002246 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:10 crc kubenswrapper[4558]: I0120 16:42:10.002286 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:10 crc kubenswrapper[4558]: I0120 16:42:10.002296 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:10 crc kubenswrapper[4558]: I0120 16:42:10.002310 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:10 crc kubenswrapper[4558]: I0120 16:42:10.002319 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:10Z","lastTransitionTime":"2026-01-20T16:42:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:10 crc kubenswrapper[4558]: I0120 16:42:10.011097 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jsqvf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bedf08c7-1f93-4931-a7f3-e729e2a137af\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e32b76495d88d356373f9389e163a2f3a72e202bb454c7015594c1f5a41b511\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xxtbc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jsqvf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:10Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:10 crc kubenswrapper[4558]: I0120 16:42:10.020331 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-47477" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e026445a-aa73-4a0e-ba35-e2e07de1278c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec2ee3ff161cba7e14ad26763081c9f092f0dfe969cef113631c27e0db53d157\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xcnwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-47477\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:10Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:10 crc kubenswrapper[4558]: I0120 16:42:10.033460 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"425eab90-fb70-4498-a98b-b95742033890\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48592e72d91cf527296f06e52fec1c16c1da21323c0644659945af109a74ebb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7e18f59b5e6b6c0be141b7efb3ccb1df4ff6e3c394bcc88fb1142c8df433493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c889e2b0e14244ef49ec057e3d5fd91687e335a983fdc99a6816abd2af643ddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d434793571cbba089cc34029d951bee9063aca58728b791f2d1af2d68229f778\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://89f7ad577df11c509855a90ac8791da6de3f0d2857a1ca56726a6327f40455bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://baab628ee8aadc67ea829cc46639be55ae63281a9ec9a89737260f558a9ddf75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://baab628ee8aadc67ea829cc46639be55ae63281a9ec9a89737260f558a9ddf75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5faab5625a16e1459b23ae0f4cc8ca3285bf4eb1ba4c53c1ed15eddb9054021\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5faab5625a16e1459b23ae0f4cc8ca3285bf4eb1ba4c53c1ed15eddb9054021\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f913fa07ec9571d1c61f9b7dddb46614846aa99d6e0b5b75709e9fa5a18a09c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f913fa07ec9571d1c61f9b7dddb46614846aa99d6e0b5b75709e9fa5a18a09c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:10Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:10 crc kubenswrapper[4558]: I0120 16:42:10.043792 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30e1ff9d-8dcd-4754-9a7a-c09598fb83db\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ea68e442bb2143fbaa5e2c9fe335c6b023a31c2eeaa78d60de45c6ef40c2894\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a8122c3b57ac4f0026df149fe622450d07d8b0d517ad018decd7b21d8d9c04a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7444e8fea1853bae45dbd3d52d07e3f7b94314cd29fe8c99891e515a892e569e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6438ffea42f98178c4c2c21ae01725a8b54d6a6f790f47d64faf5ecd9f1c00f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2e22f1d4e87dd06a67f49aedad280da48ac1c4ba2e438a4925cf8bd5330c4d8\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0120 16:41:58.859734 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0120 16:41:58.861094 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2179835226/tls.crt::/tmp/serving-cert-2179835226/tls.key\\\\\\\"\\\\nI0120 16:42:04.205247 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0120 16:42:04.207291 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0120 16:42:04.207308 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0120 16:42:04.207329 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0120 16:42:04.207333 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0120 16:42:04.210615 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0120 16:42:04.210632 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0120 16:42:04.210641 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0120 16:42:04.210648 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0120 16:42:04.210652 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0120 16:42:04.210655 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0120 16:42:04.210658 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0120 16:42:04.210660 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0120 16:42:04.211964 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4850bde1fb538e21fc91949dc584ef0a791d718844691a44559e0513bb36203\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c39aa6c3942134a027be33cac715f9c0381bce5417754d5bad045c42a07d353f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c39aa6c3942134a027be33cac715f9c0381bce5417754d5bad045c42a07d353f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:10Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:10 crc kubenswrapper[4558]: I0120 16:42:10.051781 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"68337d27-3fa6-4a29-88b0-82e60c3739eb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de7f7c23993a57d30f08686e1cd4abb5b17e108ac12dd0c7929a31574c69eeb1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4b478\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b18a59ecb802507e0d5988eacc915fd7e0d6954518de80f61162c97f680fd8c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4b478\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2vr4r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:10Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:10 crc kubenswrapper[4558]: I0120 16:42:10.061000 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e190c24c1d8a3bc0241334598ce23829ada9afee282d49a0c82cd67ad82bcd02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:10Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:10 crc kubenswrapper[4558]: I0120 16:42:10.069810 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:10Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:10 crc kubenswrapper[4558]: I0120 16:42:10.077602 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:10Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:10 crc kubenswrapper[4558]: I0120 16:42:10.086026 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:10Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:10 crc kubenswrapper[4558]: I0120 16:42:10.093226 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-d96bp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c8d2c8b-fb8e-4bff-b8d2-69570e5d9e57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe858db22acc96b062f1bc7941e05c823f30d1f9ba50bd497c57f38d57e04293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4v6jx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-d96bp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:10Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:10 crc kubenswrapper[4558]: I0120 16:42:10.104205 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:10 crc kubenswrapper[4558]: I0120 16:42:10.104224 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:10 crc kubenswrapper[4558]: I0120 16:42:10.104233 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:10 crc kubenswrapper[4558]: I0120 16:42:10.104249 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:10 crc kubenswrapper[4558]: I0120 16:42:10.104257 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:10Z","lastTransitionTime":"2026-01-20T16:42:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:10 crc kubenswrapper[4558]: I0120 16:42:10.105839 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"254129cd-82fc-4162-b671-2434bc9e2972\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aaa12ad387fc5d0780f6602a74013bb70c118f35c3a16eb4d136df7b0c48db9e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aaa12ad387fc5d0780f6602a74013bb70c118f35c3a16eb4d136df7b0c48db9e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-nv2xw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:10Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:10 crc kubenswrapper[4558]: I0120 16:42:10.116237 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f5t7h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0f90cb04-2e7a-4ee8-83fc-d6c0ee1702a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fd8a8162080a700ce21ff3b2cf7c28fabfc682758eb69c483909d4008b07daab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fd8a8162080a700ce21ff3b2cf7c28fabfc682758eb69c483909d4008b07daab\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e77617c69434ce20c448b525c7e86f1ece18d51ce6e14167b4ca7e99de8e5709\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e77617c69434ce20c448b525c7e86f1ece18d51ce6e14167b4ca7e99de8e5709\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://453c67434281e0261ec6799b30a548a01efd72d7df96632409e745b2e6a94546\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://453c67434281e0261ec6799b30a548a01efd72d7df96632409e745b2e6a94546\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://06eab4daa6c8b6a564cf1bb1403b7b3b259a234ee09f23b4f4bc0628c2d58e69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06eab4daa6c8b6a564cf1bb1403b7b3b259a234ee09f23b4f4bc0628c2d58e69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1291ac09fc6ae2a79fdeaa0fc47272f61b97f45f67ed3f7c90bdfdbfe3519ff2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1291ac09fc6ae2a79fdeaa0fc47272f61b97f45f67ed3f7c90bdfdbfe3519ff2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f5t7h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:10Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:10 crc kubenswrapper[4558]: I0120 16:42:10.124723 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3e45cc56934a7b90e9d47f1c6d06d8dc140d57db4a216469e8ae112f398663e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:10Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:10 crc kubenswrapper[4558]: I0120 16:42:10.133212 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jsqvf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bedf08c7-1f93-4931-a7f3-e729e2a137af\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e32b76495d88d356373f9389e163a2f3a72e202bb454c7015594c1f5a41b511\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xxtbc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jsqvf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:10Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:10 crc kubenswrapper[4558]: I0120 16:42:10.140648 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-47477" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e026445a-aa73-4a0e-ba35-e2e07de1278c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec2ee3ff161cba7e14ad26763081c9f092f0dfe969cef113631c27e0db53d157\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xcnwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-47477\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:10Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:10 crc kubenswrapper[4558]: I0120 16:42:10.148726 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a13bfefa-f525-4ecd-89f5-f10480532bf6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9030e970e96b6909d771d4fa0c3b4f493b4a825cbc43ef7e684272284329c7c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://168e8f533aaa108e4160f9ae4af6bb7944edba7a596178ffffdc5325073fc3c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e0141aef1babb57501016cb09787591b5e6d9136b8d7dd4ba64393f0be5a027\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c87f08993c2b47c63ba9652c7dafe0fdbecdcb03a7b7080664ab1f1bd0e1d602\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:10Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:10 crc kubenswrapper[4558]: I0120 16:42:10.157849 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f904dd3e4b43e51569b6532f00fa2f9edf912c1f1969ee2ea00c99d1b7e35a41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://789c877f4a456b3ce64bdbe6fefd50f1ef3f92bf9e26e6296005cf366e0ce265\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:10Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:10 crc kubenswrapper[4558]: I0120 16:42:10.167244 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30e1ff9d-8dcd-4754-9a7a-c09598fb83db\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ea68e442bb2143fbaa5e2c9fe335c6b023a31c2eeaa78d60de45c6ef40c2894\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a8122c3b57ac4f0026df149fe622450d07d8b0d517ad018decd7b21d8d9c04a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7444e8fea1853bae45dbd3d52d07e3f7b94314cd29fe8c99891e515a892e569e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6438ffea42f98178c4c2c21ae01725a8b54d6a6f790f47d64faf5ecd9f1c00f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2e22f1d4e87dd06a67f49aedad280da48ac1c4ba2e438a4925cf8bd5330c4d8\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0120 16:41:58.859734 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0120 16:41:58.861094 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2179835226/tls.crt::/tmp/serving-cert-2179835226/tls.key\\\\\\\"\\\\nI0120 16:42:04.205247 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0120 16:42:04.207291 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0120 16:42:04.207308 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0120 16:42:04.207329 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0120 16:42:04.207333 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0120 16:42:04.210615 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0120 16:42:04.210632 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0120 16:42:04.210641 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0120 16:42:04.210648 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0120 16:42:04.210652 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0120 16:42:04.210655 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0120 16:42:04.210658 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0120 16:42:04.210660 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0120 16:42:04.211964 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4850bde1fb538e21fc91949dc584ef0a791d718844691a44559e0513bb36203\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c39aa6c3942134a027be33cac715f9c0381bce5417754d5bad045c42a07d353f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c39aa6c3942134a027be33cac715f9c0381bce5417754d5bad045c42a07d353f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:10Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:10 crc kubenswrapper[4558]: I0120 16:42:10.174375 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"68337d27-3fa6-4a29-88b0-82e60c3739eb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de7f7c23993a57d30f08686e1cd4abb5b17e108ac12dd0c7929a31574c69eeb1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4b478\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b18a59ecb802507e0d5988eacc915fd7e0d6954518de80f61162c97f680fd8c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4b478\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2vr4r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:10Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:10 crc kubenswrapper[4558]: I0120 16:42:10.188211 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"425eab90-fb70-4498-a98b-b95742033890\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48592e72d91cf527296f06e52fec1c16c1da21323c0644659945af109a74ebb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7e18f59b5e6b6c0be141b7efb3ccb1df4ff6e3c394bcc88fb1142c8df433493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c889e2b0e14244ef49ec057e3d5fd91687e335a983fdc99a6816abd2af643ddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d434793571cbba089cc34029d951bee9063aca58728b791f2d1af2d68229f778\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://89f7ad577df11c509855a90ac8791da6de3f0d2857a1ca56726a6327f40455bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://baab628ee8aadc67ea829cc46639be55ae63281a9ec9a89737260f558a9ddf75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://baab628ee8aadc67ea829cc46639be55ae63281a9ec9a89737260f558a9ddf75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5faab5625a16e1459b23ae0f4cc8ca3285bf4eb1ba4c53c1ed15eddb9054021\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5faab5625a16e1459b23ae0f4cc8ca3285bf4eb1ba4c53c1ed15eddb9054021\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f913fa07ec9571d1c61f9b7dddb46614846aa99d6e0b5b75709e9fa5a18a09c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f913fa07ec9571d1c61f9b7dddb46614846aa99d6e0b5b75709e9fa5a18a09c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:10Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:10 crc kubenswrapper[4558]: I0120 16:42:10.206050 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:10 crc kubenswrapper[4558]: I0120 16:42:10.206091 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:10 crc kubenswrapper[4558]: I0120 16:42:10.206100 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:10 crc kubenswrapper[4558]: I0120 16:42:10.206114 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:10 crc kubenswrapper[4558]: I0120 16:42:10.206126 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:10Z","lastTransitionTime":"2026-01-20T16:42:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:10 crc kubenswrapper[4558]: I0120 16:42:10.213604 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:10Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:10 crc kubenswrapper[4558]: I0120 16:42:10.253455 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:10Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:10 crc kubenswrapper[4558]: I0120 16:42:10.293548 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e190c24c1d8a3bc0241334598ce23829ada9afee282d49a0c82cd67ad82bcd02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:10Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:10 crc kubenswrapper[4558]: I0120 16:42:10.308157 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:10 crc kubenswrapper[4558]: I0120 16:42:10.308214 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:10 crc kubenswrapper[4558]: I0120 16:42:10.308223 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:10 crc kubenswrapper[4558]: I0120 16:42:10.308237 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:10 crc kubenswrapper[4558]: I0120 16:42:10.308247 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:10Z","lastTransitionTime":"2026-01-20T16:42:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:10 crc kubenswrapper[4558]: I0120 16:42:10.337536 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"254129cd-82fc-4162-b671-2434bc9e2972\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aaa12ad387fc5d0780f6602a74013bb70c118f35c3a16eb4d136df7b0c48db9e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aaa12ad387fc5d0780f6602a74013bb70c118f35c3a16eb4d136df7b0c48db9e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-nv2xw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:10Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:10 crc kubenswrapper[4558]: I0120 16:42:10.374219 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f5t7h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0f90cb04-2e7a-4ee8-83fc-d6c0ee1702a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fd8a8162080a700ce21ff3b2cf7c28fabfc682758eb69c483909d4008b07daab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fd8a8162080a700ce21ff3b2cf7c28fabfc682758eb69c483909d4008b07daab\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e77617c69434ce20c448b525c7e86f1ece18d51ce6e14167b4ca7e99de8e5709\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e77617c69434ce20c448b525c7e86f1ece18d51ce6e14167b4ca7e99de8e5709\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://453c67434281e0261ec6799b30a548a01efd72d7df96632409e745b2e6a94546\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://453c67434281e0261ec6799b30a548a01efd72d7df96632409e745b2e6a94546\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://06eab4daa6c8b6a564cf1bb1403b7b3b259a234ee09f23b4f4bc0628c2d58e69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06eab4daa6c8b6a564cf1bb1403b7b3b259a234ee09f23b4f4bc0628c2d58e69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1291ac09fc6ae2a79fdeaa0fc47272f61b97f45f67ed3f7c90bdfdbfe3519ff2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1291ac09fc6ae2a79fdeaa0fc47272f61b97f45f67ed3f7c90bdfdbfe3519ff2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f5t7h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:10Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:10 crc kubenswrapper[4558]: I0120 16:42:10.410812 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:10 crc kubenswrapper[4558]: I0120 16:42:10.411050 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:10 crc kubenswrapper[4558]: I0120 16:42:10.411129 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:10 crc kubenswrapper[4558]: I0120 16:42:10.411224 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:10 crc kubenswrapper[4558]: I0120 16:42:10.411298 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:10Z","lastTransitionTime":"2026-01-20T16:42:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:10 crc kubenswrapper[4558]: I0120 16:42:10.414547 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:10Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:10 crc kubenswrapper[4558]: I0120 16:42:10.452326 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-d96bp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c8d2c8b-fb8e-4bff-b8d2-69570e5d9e57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe858db22acc96b062f1bc7941e05c823f30d1f9ba50bd497c57f38d57e04293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4v6jx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-d96bp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:10Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:10 crc kubenswrapper[4558]: I0120 16:42:10.513494 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:10 crc kubenswrapper[4558]: I0120 16:42:10.513523 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:10 crc kubenswrapper[4558]: I0120 16:42:10.513531 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:10 crc kubenswrapper[4558]: I0120 16:42:10.513543 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:10 crc kubenswrapper[4558]: I0120 16:42:10.513553 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:10Z","lastTransitionTime":"2026-01-20T16:42:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:10 crc kubenswrapper[4558]: I0120 16:42:10.543890 4558 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-13 05:45:00.145400183 +0000 UTC Jan 20 16:42:10 crc kubenswrapper[4558]: I0120 16:42:10.565289 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 20 16:42:10 crc kubenswrapper[4558]: I0120 16:42:10.565328 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 20 16:42:10 crc kubenswrapper[4558]: I0120 16:42:10.565301 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 20 16:42:10 crc kubenswrapper[4558]: E0120 16:42:10.565388 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 20 16:42:10 crc kubenswrapper[4558]: E0120 16:42:10.565468 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 20 16:42:10 crc kubenswrapper[4558]: E0120 16:42:10.565790 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 20 16:42:10 crc kubenswrapper[4558]: I0120 16:42:10.615339 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:10 crc kubenswrapper[4558]: I0120 16:42:10.615367 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:10 crc kubenswrapper[4558]: I0120 16:42:10.615376 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:10 crc kubenswrapper[4558]: I0120 16:42:10.615389 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:10 crc kubenswrapper[4558]: I0120 16:42:10.615397 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:10Z","lastTransitionTime":"2026-01-20T16:42:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:10 crc kubenswrapper[4558]: I0120 16:42:10.684196 4558 generic.go:334] "Generic (PLEG): container finished" podID="0f90cb04-2e7a-4ee8-83fc-d6c0ee1702a4" containerID="fe484514f50a14e485d4a97ea3ce9401c77f3f4dae4717719be7ed6c4d7b1a59" exitCode=0 Jan 20 16:42:10 crc kubenswrapper[4558]: I0120 16:42:10.684246 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-f5t7h" event={"ID":"0f90cb04-2e7a-4ee8-83fc-d6c0ee1702a4","Type":"ContainerDied","Data":"fe484514f50a14e485d4a97ea3ce9401c77f3f4dae4717719be7ed6c4d7b1a59"} Jan 20 16:42:10 crc kubenswrapper[4558]: I0120 16:42:10.687036 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" event={"ID":"254129cd-82fc-4162-b671-2434bc9e2972","Type":"ContainerStarted","Data":"279a9cfedc0ee702bc7f170c8f36335470a947b4e68889c542867964215263ab"} Jan 20 16:42:10 crc kubenswrapper[4558]: I0120 16:42:10.687292 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" Jan 20 16:42:10 crc kubenswrapper[4558]: I0120 16:42:10.698438 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"425eab90-fb70-4498-a98b-b95742033890\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48592e72d91cf527296f06e52fec1c16c1da21323c0644659945af109a74ebb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7e18f59b5e6b6c0be141b7efb3ccb1df4ff6e3c394bcc88fb1142c8df433493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c889e2b0e14244ef49ec057e3d5fd91687e335a983fdc99a6816abd2af643ddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d434793571cbba089cc34029d951bee9063aca58728b791f2d1af2d68229f778\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://89f7ad577df11c509855a90ac8791da6de3f0d2857a1ca56726a6327f40455bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://baab628ee8aadc67ea829cc46639be55ae63281a9ec9a89737260f558a9ddf75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://baab628ee8aadc67ea829cc46639be55ae63281a9ec9a89737260f558a9ddf75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5faab5625a16e1459b23ae0f4cc8ca3285bf4eb1ba4c53c1ed15eddb9054021\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5faab5625a16e1459b23ae0f4cc8ca3285bf4eb1ba4c53c1ed15eddb9054021\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f913fa07ec9571d1c61f9b7dddb46614846aa99d6e0b5b75709e9fa5a18a09c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f913fa07ec9571d1c61f9b7dddb46614846aa99d6e0b5b75709e9fa5a18a09c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:10Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:10 crc kubenswrapper[4558]: I0120 16:42:10.703931 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" Jan 20 16:42:10 crc kubenswrapper[4558]: I0120 16:42:10.708857 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30e1ff9d-8dcd-4754-9a7a-c09598fb83db\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ea68e442bb2143fbaa5e2c9fe335c6b023a31c2eeaa78d60de45c6ef40c2894\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a8122c3b57ac4f0026df149fe622450d07d8b0d517ad018decd7b21d8d9c04a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7444e8fea1853bae45dbd3d52d07e3f7b94314cd29fe8c99891e515a892e569e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6438ffea42f98178c4c2c21ae01725a8b54d6a6f790f47d64faf5ecd9f1c00f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2e22f1d4e87dd06a67f49aedad280da48ac1c4ba2e438a4925cf8bd5330c4d8\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0120 16:41:58.859734 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0120 16:41:58.861094 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2179835226/tls.crt::/tmp/serving-cert-2179835226/tls.key\\\\\\\"\\\\nI0120 16:42:04.205247 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0120 16:42:04.207291 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0120 16:42:04.207308 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0120 16:42:04.207329 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0120 16:42:04.207333 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0120 16:42:04.210615 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0120 16:42:04.210632 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0120 16:42:04.210641 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0120 16:42:04.210648 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0120 16:42:04.210652 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0120 16:42:04.210655 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0120 16:42:04.210658 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0120 16:42:04.210660 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0120 16:42:04.211964 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4850bde1fb538e21fc91949dc584ef0a791d718844691a44559e0513bb36203\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c39aa6c3942134a027be33cac715f9c0381bce5417754d5bad045c42a07d353f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c39aa6c3942134a027be33cac715f9c0381bce5417754d5bad045c42a07d353f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:10Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:10 crc kubenswrapper[4558]: I0120 16:42:10.717142 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:10 crc kubenswrapper[4558]: I0120 16:42:10.717198 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:10 crc kubenswrapper[4558]: I0120 16:42:10.717208 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:10 crc kubenswrapper[4558]: I0120 16:42:10.717222 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:10 crc kubenswrapper[4558]: I0120 16:42:10.717230 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:10Z","lastTransitionTime":"2026-01-20T16:42:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:10 crc kubenswrapper[4558]: I0120 16:42:10.717468 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"68337d27-3fa6-4a29-88b0-82e60c3739eb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de7f7c23993a57d30f08686e1cd4abb5b17e108ac12dd0c7929a31574c69eeb1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4b478\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b18a59ecb802507e0d5988eacc915fd7e0d6954518de80f61162c97f680fd8c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4b478\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2vr4r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:10Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:10 crc kubenswrapper[4558]: I0120 16:42:10.725429 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e190c24c1d8a3bc0241334598ce23829ada9afee282d49a0c82cd67ad82bcd02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:10Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:10 crc kubenswrapper[4558]: I0120 16:42:10.733810 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:10Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:10 crc kubenswrapper[4558]: I0120 16:42:10.741555 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:10Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:10 crc kubenswrapper[4558]: I0120 16:42:10.750602 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:10Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:10 crc kubenswrapper[4558]: I0120 16:42:10.772957 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-d96bp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c8d2c8b-fb8e-4bff-b8d2-69570e5d9e57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe858db22acc96b062f1bc7941e05c823f30d1f9ba50bd497c57f38d57e04293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4v6jx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-d96bp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:10Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:10 crc kubenswrapper[4558]: I0120 16:42:10.817178 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"254129cd-82fc-4162-b671-2434bc9e2972\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aaa12ad387fc5d0780f6602a74013bb70c118f35c3a16eb4d136df7b0c48db9e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aaa12ad387fc5d0780f6602a74013bb70c118f35c3a16eb4d136df7b0c48db9e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-nv2xw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:10Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:10 crc kubenswrapper[4558]: I0120 16:42:10.818680 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:10 crc kubenswrapper[4558]: I0120 16:42:10.818712 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:10 crc kubenswrapper[4558]: I0120 16:42:10.818723 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:10 crc kubenswrapper[4558]: I0120 16:42:10.818736 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:10 crc kubenswrapper[4558]: I0120 16:42:10.818745 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:10Z","lastTransitionTime":"2026-01-20T16:42:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:10 crc kubenswrapper[4558]: I0120 16:42:10.855272 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f5t7h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0f90cb04-2e7a-4ee8-83fc-d6c0ee1702a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fd8a8162080a700ce21ff3b2cf7c28fabfc682758eb69c483909d4008b07daab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fd8a8162080a700ce21ff3b2cf7c28fabfc682758eb69c483909d4008b07daab\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e77617c69434ce20c448b525c7e86f1ece18d51ce6e14167b4ca7e99de8e5709\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e77617c69434ce20c448b525c7e86f1ece18d51ce6e14167b4ca7e99de8e5709\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://453c67434281e0261ec6799b30a548a01efd72d7df96632409e745b2e6a94546\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://453c67434281e0261ec6799b30a548a01efd72d7df96632409e745b2e6a94546\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://06eab4daa6c8b6a564cf1bb1403b7b3b259a234ee09f23b4f4bc0628c2d58e69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06eab4daa6c8b6a564cf1bb1403b7b3b259a234ee09f23b4f4bc0628c2d58e69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1291ac09fc6ae2a79fdeaa0fc47272f61b97f45f67ed3f7c90bdfdbfe3519ff2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1291ac09fc6ae2a79fdeaa0fc47272f61b97f45f67ed3f7c90bdfdbfe3519ff2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe484514f50a14e485d4a97ea3ce9401c77f3f4dae4717719be7ed6c4d7b1a59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe484514f50a14e485d4a97ea3ce9401c77f3f4dae4717719be7ed6c4d7b1a59\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f5t7h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:10Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:10 crc kubenswrapper[4558]: I0120 16:42:10.893024 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-47477" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e026445a-aa73-4a0e-ba35-e2e07de1278c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec2ee3ff161cba7e14ad26763081c9f092f0dfe969cef113631c27e0db53d157\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xcnwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-47477\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:10Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:10 crc kubenswrapper[4558]: I0120 16:42:10.920951 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:10 crc kubenswrapper[4558]: I0120 16:42:10.920987 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:10 crc kubenswrapper[4558]: I0120 16:42:10.920996 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:10 crc kubenswrapper[4558]: I0120 16:42:10.921009 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:10 crc kubenswrapper[4558]: I0120 16:42:10.921018 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:10Z","lastTransitionTime":"2026-01-20T16:42:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:10 crc kubenswrapper[4558]: I0120 16:42:10.935419 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a13bfefa-f525-4ecd-89f5-f10480532bf6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9030e970e96b6909d771d4fa0c3b4f493b4a825cbc43ef7e684272284329c7c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://168e8f533aaa108e4160f9ae4af6bb7944edba7a596178ffffdc5325073fc3c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e0141aef1babb57501016cb09787591b5e6d9136b8d7dd4ba64393f0be5a027\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c87f08993c2b47c63ba9652c7dafe0fdbecdcb03a7b7080664ab1f1bd0e1d602\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:10Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:10 crc kubenswrapper[4558]: I0120 16:42:10.973919 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f904dd3e4b43e51569b6532f00fa2f9edf912c1f1969ee2ea00c99d1b7e35a41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://789c877f4a456b3ce64bdbe6fefd50f1ef3f92bf9e26e6296005cf366e0ce265\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:10Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:11 crc kubenswrapper[4558]: I0120 16:42:11.013808 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3e45cc56934a7b90e9d47f1c6d06d8dc140d57db4a216469e8ae112f398663e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:11Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:11 crc kubenswrapper[4558]: I0120 16:42:11.023553 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:11 crc kubenswrapper[4558]: I0120 16:42:11.023611 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:11 crc kubenswrapper[4558]: I0120 16:42:11.023621 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:11 crc kubenswrapper[4558]: I0120 16:42:11.023634 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:11 crc kubenswrapper[4558]: I0120 16:42:11.023643 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:11Z","lastTransitionTime":"2026-01-20T16:42:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:11 crc kubenswrapper[4558]: I0120 16:42:11.055602 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jsqvf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bedf08c7-1f93-4931-a7f3-e729e2a137af\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e32b76495d88d356373f9389e163a2f3a72e202bb454c7015594c1f5a41b511\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xxtbc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jsqvf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:11Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:11 crc kubenswrapper[4558]: I0120 16:42:11.093431 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e190c24c1d8a3bc0241334598ce23829ada9afee282d49a0c82cd67ad82bcd02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:11Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:11 crc kubenswrapper[4558]: I0120 16:42:11.125632 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:11 crc kubenswrapper[4558]: I0120 16:42:11.125670 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:11 crc kubenswrapper[4558]: I0120 16:42:11.125679 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:11 crc kubenswrapper[4558]: I0120 16:42:11.125692 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:11 crc kubenswrapper[4558]: I0120 16:42:11.125701 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:11Z","lastTransitionTime":"2026-01-20T16:42:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:11 crc kubenswrapper[4558]: I0120 16:42:11.132644 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:11Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:11 crc kubenswrapper[4558]: I0120 16:42:11.177572 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:11Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:11 crc kubenswrapper[4558]: I0120 16:42:11.214143 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-d96bp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c8d2c8b-fb8e-4bff-b8d2-69570e5d9e57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe858db22acc96b062f1bc7941e05c823f30d1f9ba50bd497c57f38d57e04293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4v6jx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-d96bp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:11Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:11 crc kubenswrapper[4558]: I0120 16:42:11.227827 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:11 crc kubenswrapper[4558]: I0120 16:42:11.227865 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:11 crc kubenswrapper[4558]: I0120 16:42:11.227874 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:11 crc kubenswrapper[4558]: I0120 16:42:11.227888 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:11 crc kubenswrapper[4558]: I0120 16:42:11.227896 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:11Z","lastTransitionTime":"2026-01-20T16:42:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:11 crc kubenswrapper[4558]: I0120 16:42:11.257741 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"254129cd-82fc-4162-b671-2434bc9e2972\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a6a184067bde115ef5e09fdd90e7dccfe89a9dc347af450998b91c068e7e803\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd8b93e5559d95511a1285a9aec9a7d72df6c330bcd1e1daf3e93f5494f70eeb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4264ef8ee49a8b172fb05ac5c9b0bd32350b5d9ae95293079dfeaf44f021b455\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0889cf371f8b06cc2d255c15fda97db1d5000d6f6cff29d2fdcb8667cede8a99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4265d3281e954ee5989ee008da28c6cb9fa29478e5fdf23325521d455304da9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://186a1d2caad8865abf7f565405c4a4c077038ccb67ba77927cc2392ec075a811\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://279a9cfedc0ee702bc7f170c8f36335470a947b4e68889c542867964215263ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f41d488b0c24e594a2f5cad36b5f8efdb7d867fae0e18a74fe3c396a203d162f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aaa12ad387fc5d0780f6602a74013bb70c118f35c3a16eb4d136df7b0c48db9e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aaa12ad387fc5d0780f6602a74013bb70c118f35c3a16eb4d136df7b0c48db9e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-nv2xw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:11Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:11 crc kubenswrapper[4558]: I0120 16:42:11.295207 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f5t7h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0f90cb04-2e7a-4ee8-83fc-d6c0ee1702a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fd8a8162080a700ce21ff3b2cf7c28fabfc682758eb69c483909d4008b07daab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fd8a8162080a700ce21ff3b2cf7c28fabfc682758eb69c483909d4008b07daab\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e77617c69434ce20c448b525c7e86f1ece18d51ce6e14167b4ca7e99de8e5709\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e77617c69434ce20c448b525c7e86f1ece18d51ce6e14167b4ca7e99de8e5709\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://453c67434281e0261ec6799b30a548a01efd72d7df96632409e745b2e6a94546\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://453c67434281e0261ec6799b30a548a01efd72d7df96632409e745b2e6a94546\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://06eab4daa6c8b6a564cf1bb1403b7b3b259a234ee09f23b4f4bc0628c2d58e69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06eab4daa6c8b6a564cf1bb1403b7b3b259a234ee09f23b4f4bc0628c2d58e69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1291ac09fc6ae2a79fdeaa0fc47272f61b97f45f67ed3f7c90bdfdbfe3519ff2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1291ac09fc6ae2a79fdeaa0fc47272f61b97f45f67ed3f7c90bdfdbfe3519ff2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe484514f50a14e485d4a97ea3ce9401c77f3f4dae4717719be7ed6c4d7b1a59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe484514f50a14e485d4a97ea3ce9401c77f3f4dae4717719be7ed6c4d7b1a59\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f5t7h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:11Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:11 crc kubenswrapper[4558]: I0120 16:42:11.329395 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:11 crc kubenswrapper[4558]: I0120 16:42:11.329440 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:11 crc kubenswrapper[4558]: I0120 16:42:11.329450 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:11 crc kubenswrapper[4558]: I0120 16:42:11.329469 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:11 crc kubenswrapper[4558]: I0120 16:42:11.329477 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:11Z","lastTransitionTime":"2026-01-20T16:42:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:11 crc kubenswrapper[4558]: I0120 16:42:11.337333 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:11Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:11 crc kubenswrapper[4558]: I0120 16:42:11.374814 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f904dd3e4b43e51569b6532f00fa2f9edf912c1f1969ee2ea00c99d1b7e35a41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://789c877f4a456b3ce64bdbe6fefd50f1ef3f92bf9e26e6296005cf366e0ce265\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:11Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:11 crc kubenswrapper[4558]: I0120 16:42:11.414675 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3e45cc56934a7b90e9d47f1c6d06d8dc140d57db4a216469e8ae112f398663e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:11Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:11 crc kubenswrapper[4558]: I0120 16:42:11.431919 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:11 crc kubenswrapper[4558]: I0120 16:42:11.431954 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:11 crc kubenswrapper[4558]: I0120 16:42:11.431962 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:11 crc kubenswrapper[4558]: I0120 16:42:11.431976 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:11 crc kubenswrapper[4558]: I0120 16:42:11.431984 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:11Z","lastTransitionTime":"2026-01-20T16:42:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:11 crc kubenswrapper[4558]: I0120 16:42:11.453388 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jsqvf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bedf08c7-1f93-4931-a7f3-e729e2a137af\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e32b76495d88d356373f9389e163a2f3a72e202bb454c7015594c1f5a41b511\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xxtbc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jsqvf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:11Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:11 crc kubenswrapper[4558]: I0120 16:42:11.493854 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-47477" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e026445a-aa73-4a0e-ba35-e2e07de1278c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec2ee3ff161cba7e14ad26763081c9f092f0dfe969cef113631c27e0db53d157\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xcnwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-47477\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:11Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:11 crc kubenswrapper[4558]: I0120 16:42:11.533929 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a13bfefa-f525-4ecd-89f5-f10480532bf6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9030e970e96b6909d771d4fa0c3b4f493b4a825cbc43ef7e684272284329c7c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://168e8f533aaa108e4160f9ae4af6bb7944edba7a596178ffffdc5325073fc3c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e0141aef1babb57501016cb09787591b5e6d9136b8d7dd4ba64393f0be5a027\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c87f08993c2b47c63ba9652c7dafe0fdbecdcb03a7b7080664ab1f1bd0e1d602\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:11Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:11 crc kubenswrapper[4558]: I0120 16:42:11.534283 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:11 crc kubenswrapper[4558]: I0120 16:42:11.534314 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:11 crc kubenswrapper[4558]: I0120 16:42:11.534323 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:11 crc kubenswrapper[4558]: I0120 16:42:11.534336 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:11 crc kubenswrapper[4558]: I0120 16:42:11.534344 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:11Z","lastTransitionTime":"2026-01-20T16:42:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:11 crc kubenswrapper[4558]: I0120 16:42:11.544604 4558 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-06 18:00:23.270246335 +0000 UTC Jan 20 16:42:11 crc kubenswrapper[4558]: I0120 16:42:11.577972 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"425eab90-fb70-4498-a98b-b95742033890\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48592e72d91cf527296f06e52fec1c16c1da21323c0644659945af109a74ebb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7e18f59b5e6b6c0be141b7efb3ccb1df4ff6e3c394bcc88fb1142c8df433493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c889e2b0e14244ef49ec057e3d5fd91687e335a983fdc99a6816abd2af643ddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d434793571cbba089cc34029d951bee9063aca58728b791f2d1af2d68229f778\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://89f7ad577df11c509855a90ac8791da6de3f0d2857a1ca56726a6327f40455bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://baab628ee8aadc67ea829cc46639be55ae63281a9ec9a89737260f558a9ddf75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://baab628ee8aadc67ea829cc46639be55ae63281a9ec9a89737260f558a9ddf75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5faab5625a16e1459b23ae0f4cc8ca3285bf4eb1ba4c53c1ed15eddb9054021\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5faab5625a16e1459b23ae0f4cc8ca3285bf4eb1ba4c53c1ed15eddb9054021\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f913fa07ec9571d1c61f9b7dddb46614846aa99d6e0b5b75709e9fa5a18a09c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f913fa07ec9571d1c61f9b7dddb46614846aa99d6e0b5b75709e9fa5a18a09c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:11Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:11 crc kubenswrapper[4558]: I0120 16:42:11.614084 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30e1ff9d-8dcd-4754-9a7a-c09598fb83db\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ea68e442bb2143fbaa5e2c9fe335c6b023a31c2eeaa78d60de45c6ef40c2894\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a8122c3b57ac4f0026df149fe622450d07d8b0d517ad018decd7b21d8d9c04a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7444e8fea1853bae45dbd3d52d07e3f7b94314cd29fe8c99891e515a892e569e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6438ffea42f98178c4c2c21ae01725a8b54d6a6f790f47d64faf5ecd9f1c00f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2e22f1d4e87dd06a67f49aedad280da48ac1c4ba2e438a4925cf8bd5330c4d8\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0120 16:41:58.859734 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0120 16:41:58.861094 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2179835226/tls.crt::/tmp/serving-cert-2179835226/tls.key\\\\\\\"\\\\nI0120 16:42:04.205247 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0120 16:42:04.207291 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0120 16:42:04.207308 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0120 16:42:04.207329 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0120 16:42:04.207333 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0120 16:42:04.210615 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0120 16:42:04.210632 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0120 16:42:04.210641 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0120 16:42:04.210648 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0120 16:42:04.210652 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0120 16:42:04.210655 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0120 16:42:04.210658 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0120 16:42:04.210660 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0120 16:42:04.211964 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4850bde1fb538e21fc91949dc584ef0a791d718844691a44559e0513bb36203\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c39aa6c3942134a027be33cac715f9c0381bce5417754d5bad045c42a07d353f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c39aa6c3942134a027be33cac715f9c0381bce5417754d5bad045c42a07d353f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:11Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:11 crc kubenswrapper[4558]: I0120 16:42:11.636318 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:11 crc kubenswrapper[4558]: I0120 16:42:11.636358 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:11 crc kubenswrapper[4558]: I0120 16:42:11.636366 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:11 crc kubenswrapper[4558]: I0120 16:42:11.636380 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:11 crc kubenswrapper[4558]: I0120 16:42:11.636389 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:11Z","lastTransitionTime":"2026-01-20T16:42:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:11 crc kubenswrapper[4558]: I0120 16:42:11.654139 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"68337d27-3fa6-4a29-88b0-82e60c3739eb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de7f7c23993a57d30f08686e1cd4abb5b17e108ac12dd0c7929a31574c69eeb1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4b478\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b18a59ecb802507e0d5988eacc915fd7e0d6954518de80f61162c97f680fd8c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4b478\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2vr4r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:11Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:11 crc kubenswrapper[4558]: I0120 16:42:11.691727 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-f5t7h" event={"ID":"0f90cb04-2e7a-4ee8-83fc-d6c0ee1702a4","Type":"ContainerStarted","Data":"21a318f03d0df490cb7bae11819c52eca011d163081aeecff1b2dd8f72caabbc"} Jan 20 16:42:11 crc kubenswrapper[4558]: I0120 16:42:11.691800 4558 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 20 16:42:11 crc kubenswrapper[4558]: I0120 16:42:11.692385 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" Jan 20 16:42:11 crc kubenswrapper[4558]: I0120 16:42:11.706099 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a13bfefa-f525-4ecd-89f5-f10480532bf6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9030e970e96b6909d771d4fa0c3b4f493b4a825cbc43ef7e684272284329c7c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://168e8f533aaa108e4160f9ae4af6bb7944edba7a596178ffffdc5325073fc3c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e0141aef1babb57501016cb09787591b5e6d9136b8d7dd4ba64393f0be5a027\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c87f08993c2b47c63ba9652c7dafe0fdbecdcb03a7b7080664ab1f1bd0e1d602\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:11Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:11 crc kubenswrapper[4558]: I0120 16:42:11.709473 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" Jan 20 16:42:11 crc kubenswrapper[4558]: I0120 16:42:11.734778 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f904dd3e4b43e51569b6532f00fa2f9edf912c1f1969ee2ea00c99d1b7e35a41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://789c877f4a456b3ce64bdbe6fefd50f1ef3f92bf9e26e6296005cf366e0ce265\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:11Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:11 crc kubenswrapper[4558]: I0120 16:42:11.738075 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:11 crc kubenswrapper[4558]: I0120 16:42:11.738107 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:11 crc kubenswrapper[4558]: I0120 16:42:11.738115 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:11 crc kubenswrapper[4558]: I0120 16:42:11.738127 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:11 crc kubenswrapper[4558]: I0120 16:42:11.738138 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:11Z","lastTransitionTime":"2026-01-20T16:42:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:11 crc kubenswrapper[4558]: I0120 16:42:11.773602 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3e45cc56934a7b90e9d47f1c6d06d8dc140d57db4a216469e8ae112f398663e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:11Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:11 crc kubenswrapper[4558]: I0120 16:42:11.815657 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jsqvf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bedf08c7-1f93-4931-a7f3-e729e2a137af\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e32b76495d88d356373f9389e163a2f3a72e202bb454c7015594c1f5a41b511\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xxtbc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jsqvf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:11Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:11 crc kubenswrapper[4558]: I0120 16:42:11.839648 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:11 crc kubenswrapper[4558]: I0120 16:42:11.839847 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:11 crc kubenswrapper[4558]: I0120 16:42:11.839920 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:11 crc kubenswrapper[4558]: I0120 16:42:11.839991 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:11 crc kubenswrapper[4558]: I0120 16:42:11.840055 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:11Z","lastTransitionTime":"2026-01-20T16:42:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:11 crc kubenswrapper[4558]: I0120 16:42:11.854176 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-47477" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e026445a-aa73-4a0e-ba35-e2e07de1278c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec2ee3ff161cba7e14ad26763081c9f092f0dfe969cef113631c27e0db53d157\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xcnwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-47477\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:11Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:11 crc kubenswrapper[4558]: I0120 16:42:11.901284 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"425eab90-fb70-4498-a98b-b95742033890\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48592e72d91cf527296f06e52fec1c16c1da21323c0644659945af109a74ebb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7e18f59b5e6b6c0be141b7efb3ccb1df4ff6e3c394bcc88fb1142c8df433493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c889e2b0e14244ef49ec057e3d5fd91687e335a983fdc99a6816abd2af643ddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d434793571cbba089cc34029d951bee9063aca58728b791f2d1af2d68229f778\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://89f7ad577df11c509855a90ac8791da6de3f0d2857a1ca56726a6327f40455bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://baab628ee8aadc67ea829cc46639be55ae63281a9ec9a89737260f558a9ddf75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://baab628ee8aadc67ea829cc46639be55ae63281a9ec9a89737260f558a9ddf75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5faab5625a16e1459b23ae0f4cc8ca3285bf4eb1ba4c53c1ed15eddb9054021\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5faab5625a16e1459b23ae0f4cc8ca3285bf4eb1ba4c53c1ed15eddb9054021\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f913fa07ec9571d1c61f9b7dddb46614846aa99d6e0b5b75709e9fa5a18a09c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f913fa07ec9571d1c61f9b7dddb46614846aa99d6e0b5b75709e9fa5a18a09c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:11Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:11 crc kubenswrapper[4558]: I0120 16:42:11.935037 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30e1ff9d-8dcd-4754-9a7a-c09598fb83db\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ea68e442bb2143fbaa5e2c9fe335c6b023a31c2eeaa78d60de45c6ef40c2894\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a8122c3b57ac4f0026df149fe622450d07d8b0d517ad018decd7b21d8d9c04a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7444e8fea1853bae45dbd3d52d07e3f7b94314cd29fe8c99891e515a892e569e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6438ffea42f98178c4c2c21ae01725a8b54d6a6f790f47d64faf5ecd9f1c00f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2e22f1d4e87dd06a67f49aedad280da48ac1c4ba2e438a4925cf8bd5330c4d8\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0120 16:41:58.859734 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0120 16:41:58.861094 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2179835226/tls.crt::/tmp/serving-cert-2179835226/tls.key\\\\\\\"\\\\nI0120 16:42:04.205247 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0120 16:42:04.207291 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0120 16:42:04.207308 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0120 16:42:04.207329 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0120 16:42:04.207333 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0120 16:42:04.210615 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0120 16:42:04.210632 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0120 16:42:04.210641 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0120 16:42:04.210648 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0120 16:42:04.210652 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0120 16:42:04.210655 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0120 16:42:04.210658 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0120 16:42:04.210660 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0120 16:42:04.211964 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4850bde1fb538e21fc91949dc584ef0a791d718844691a44559e0513bb36203\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c39aa6c3942134a027be33cac715f9c0381bce5417754d5bad045c42a07d353f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c39aa6c3942134a027be33cac715f9c0381bce5417754d5bad045c42a07d353f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:11Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:11 crc kubenswrapper[4558]: I0120 16:42:11.942722 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:11 crc kubenswrapper[4558]: I0120 16:42:11.942756 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:11 crc kubenswrapper[4558]: I0120 16:42:11.942765 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:11 crc kubenswrapper[4558]: I0120 16:42:11.942780 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:11 crc kubenswrapper[4558]: I0120 16:42:11.942788 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:11Z","lastTransitionTime":"2026-01-20T16:42:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:11 crc kubenswrapper[4558]: I0120 16:42:11.973706 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"68337d27-3fa6-4a29-88b0-82e60c3739eb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de7f7c23993a57d30f08686e1cd4abb5b17e108ac12dd0c7929a31574c69eeb1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4b478\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b18a59ecb802507e0d5988eacc915fd7e0d6954518de80f61162c97f680fd8c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4b478\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2vr4r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:11Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:12 crc kubenswrapper[4558]: I0120 16:42:12.015244 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e190c24c1d8a3bc0241334598ce23829ada9afee282d49a0c82cd67ad82bcd02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:12Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:12 crc kubenswrapper[4558]: I0120 16:42:12.044772 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:12 crc kubenswrapper[4558]: I0120 16:42:12.044808 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:12 crc kubenswrapper[4558]: I0120 16:42:12.044818 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:12 crc kubenswrapper[4558]: I0120 16:42:12.044832 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:12 crc kubenswrapper[4558]: I0120 16:42:12.044842 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:12Z","lastTransitionTime":"2026-01-20T16:42:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:12 crc kubenswrapper[4558]: I0120 16:42:12.056367 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:12Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:12 crc kubenswrapper[4558]: I0120 16:42:12.093451 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:12Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:12 crc kubenswrapper[4558]: I0120 16:42:12.133932 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:12Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:12 crc kubenswrapper[4558]: I0120 16:42:12.147733 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:12 crc kubenswrapper[4558]: I0120 16:42:12.147768 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:12 crc kubenswrapper[4558]: I0120 16:42:12.147777 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:12 crc kubenswrapper[4558]: I0120 16:42:12.147791 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:12 crc kubenswrapper[4558]: I0120 16:42:12.147799 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:12Z","lastTransitionTime":"2026-01-20T16:42:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:12 crc kubenswrapper[4558]: I0120 16:42:12.181442 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-d96bp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c8d2c8b-fb8e-4bff-b8d2-69570e5d9e57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe858db22acc96b062f1bc7941e05c823f30d1f9ba50bd497c57f38d57e04293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4v6jx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-d96bp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:12Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:12 crc kubenswrapper[4558]: I0120 16:42:12.219079 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"254129cd-82fc-4162-b671-2434bc9e2972\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a6a184067bde115ef5e09fdd90e7dccfe89a9dc347af450998b91c068e7e803\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd8b93e5559d95511a1285a9aec9a7d72df6c330bcd1e1daf3e93f5494f70eeb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4264ef8ee49a8b172fb05ac5c9b0bd32350b5d9ae95293079dfeaf44f021b455\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0889cf371f8b06cc2d255c15fda97db1d5000d6f6cff29d2fdcb8667cede8a99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4265d3281e954ee5989ee008da28c6cb9fa29478e5fdf23325521d455304da9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://186a1d2caad8865abf7f565405c4a4c077038ccb67ba77927cc2392ec075a811\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://279a9cfedc0ee702bc7f170c8f36335470a947b4e68889c542867964215263ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f41d488b0c24e594a2f5cad36b5f8efdb7d867fae0e18a74fe3c396a203d162f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aaa12ad387fc5d0780f6602a74013bb70c118f35c3a16eb4d136df7b0c48db9e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aaa12ad387fc5d0780f6602a74013bb70c118f35c3a16eb4d136df7b0c48db9e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-nv2xw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:12Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:12 crc kubenswrapper[4558]: I0120 16:42:12.250102 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:12 crc kubenswrapper[4558]: I0120 16:42:12.250132 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:12 crc kubenswrapper[4558]: I0120 16:42:12.250141 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:12 crc kubenswrapper[4558]: I0120 16:42:12.250152 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:12 crc kubenswrapper[4558]: I0120 16:42:12.250177 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:12Z","lastTransitionTime":"2026-01-20T16:42:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:12 crc kubenswrapper[4558]: I0120 16:42:12.257530 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f5t7h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0f90cb04-2e7a-4ee8-83fc-d6c0ee1702a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21a318f03d0df490cb7bae11819c52eca011d163081aeecff1b2dd8f72caabbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fd8a8162080a700ce21ff3b2cf7c28fabfc682758eb69c483909d4008b07daab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fd8a8162080a700ce21ff3b2cf7c28fabfc682758eb69c483909d4008b07daab\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e77617c69434ce20c448b525c7e86f1ece18d51ce6e14167b4ca7e99de8e5709\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e77617c69434ce20c448b525c7e86f1ece18d51ce6e14167b4ca7e99de8e5709\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://453c67434281e0261ec6799b30a548a01efd72d7df96632409e745b2e6a94546\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://453c67434281e0261ec6799b30a548a01efd72d7df96632409e745b2e6a94546\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://06eab4daa6c8b6a564cf1bb1403b7b3b259a234ee09f23b4f4bc0628c2d58e69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06eab4daa6c8b6a564cf1bb1403b7b3b259a234ee09f23b4f4bc0628c2d58e69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1291ac09fc6ae2a79fdeaa0fc47272f61b97f45f67ed3f7c90bdfdbfe3519ff2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1291ac09fc6ae2a79fdeaa0fc47272f61b97f45f67ed3f7c90bdfdbfe3519ff2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe484514f50a14e485d4a97ea3ce9401c77f3f4dae4717719be7ed6c4d7b1a59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe484514f50a14e485d4a97ea3ce9401c77f3f4dae4717719be7ed6c4d7b1a59\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f5t7h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:12Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:12 crc kubenswrapper[4558]: I0120 16:42:12.261767 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 20 16:42:12 crc kubenswrapper[4558]: E0120 16:42:12.261926 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-20 16:42:20.261906984 +0000 UTC m=+34.022244951 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 20 16:42:12 crc kubenswrapper[4558]: I0120 16:42:12.295286 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f5t7h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0f90cb04-2e7a-4ee8-83fc-d6c0ee1702a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21a318f03d0df490cb7bae11819c52eca011d163081aeecff1b2dd8f72caabbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fd8a8162080a700ce21ff3b2cf7c28fabfc682758eb69c483909d4008b07daab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fd8a8162080a700ce21ff3b2cf7c28fabfc682758eb69c483909d4008b07daab\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e77617c69434ce20c448b525c7e86f1ece18d51ce6e14167b4ca7e99de8e5709\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e77617c69434ce20c448b525c7e86f1ece18d51ce6e14167b4ca7e99de8e5709\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://453c67434281e0261ec6799b30a548a01efd72d7df96632409e745b2e6a94546\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://453c67434281e0261ec6799b30a548a01efd72d7df96632409e745b2e6a94546\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://06eab4daa6c8b6a564cf1bb1403b7b3b259a234ee09f23b4f4bc0628c2d58e69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06eab4daa6c8b6a564cf1bb1403b7b3b259a234ee09f23b4f4bc0628c2d58e69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1291ac09fc6ae2a79fdeaa0fc47272f61b97f45f67ed3f7c90bdfdbfe3519ff2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1291ac09fc6ae2a79fdeaa0fc47272f61b97f45f67ed3f7c90bdfdbfe3519ff2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe484514f50a14e485d4a97ea3ce9401c77f3f4dae4717719be7ed6c4d7b1a59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe484514f50a14e485d4a97ea3ce9401c77f3f4dae4717719be7ed6c4d7b1a59\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f5t7h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:12Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:12 crc kubenswrapper[4558]: I0120 16:42:12.334373 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:12Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:12 crc kubenswrapper[4558]: I0120 16:42:12.351818 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:12 crc kubenswrapper[4558]: I0120 16:42:12.351850 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:12 crc kubenswrapper[4558]: I0120 16:42:12.351860 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:12 crc kubenswrapper[4558]: I0120 16:42:12.351873 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:12 crc kubenswrapper[4558]: I0120 16:42:12.351882 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:12Z","lastTransitionTime":"2026-01-20T16:42:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:12 crc kubenswrapper[4558]: I0120 16:42:12.363215 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 20 16:42:12 crc kubenswrapper[4558]: I0120 16:42:12.363245 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 20 16:42:12 crc kubenswrapper[4558]: I0120 16:42:12.363267 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 20 16:42:12 crc kubenswrapper[4558]: E0120 16:42:12.363348 4558 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 20 16:42:12 crc kubenswrapper[4558]: E0120 16:42:12.363358 4558 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 20 16:42:12 crc kubenswrapper[4558]: E0120 16:42:12.363375 4558 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 20 16:42:12 crc kubenswrapper[4558]: E0120 16:42:12.363386 4558 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 20 16:42:12 crc kubenswrapper[4558]: E0120 16:42:12.363440 4558 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 20 16:42:12 crc kubenswrapper[4558]: I0120 16:42:12.363622 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 20 16:42:12 crc kubenswrapper[4558]: E0120 16:42:12.363714 4558 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 20 16:42:12 crc kubenswrapper[4558]: E0120 16:42:12.363729 4558 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 20 16:42:12 crc kubenswrapper[4558]: E0120 16:42:12.363736 4558 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 20 16:42:12 crc kubenswrapper[4558]: E0120 16:42:12.363761 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-20 16:42:20.363647984 +0000 UTC m=+34.123985951 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 20 16:42:12 crc kubenswrapper[4558]: E0120 16:42:12.363775 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-20 16:42:20.363768431 +0000 UTC m=+34.124106399 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 20 16:42:12 crc kubenswrapper[4558]: E0120 16:42:12.363785 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-20 16:42:20.363780173 +0000 UTC m=+34.124118141 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 20 16:42:12 crc kubenswrapper[4558]: E0120 16:42:12.363795 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-20 16:42:20.363789401 +0000 UTC m=+34.124127368 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 20 16:42:12 crc kubenswrapper[4558]: I0120 16:42:12.372649 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-d96bp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c8d2c8b-fb8e-4bff-b8d2-69570e5d9e57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe858db22acc96b062f1bc7941e05c823f30d1f9ba50bd497c57f38d57e04293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4v6jx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-d96bp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:12Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:12 crc kubenswrapper[4558]: I0120 16:42:12.417239 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"254129cd-82fc-4162-b671-2434bc9e2972\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a6a184067bde115ef5e09fdd90e7dccfe89a9dc347af450998b91c068e7e803\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd8b93e5559d95511a1285a9aec9a7d72df6c330bcd1e1daf3e93f5494f70eeb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4264ef8ee49a8b172fb05ac5c9b0bd32350b5d9ae95293079dfeaf44f021b455\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0889cf371f8b06cc2d255c15fda97db1d5000d6f6cff29d2fdcb8667cede8a99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4265d3281e954ee5989ee008da28c6cb9fa29478e5fdf23325521d455304da9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://186a1d2caad8865abf7f565405c4a4c077038ccb67ba77927cc2392ec075a811\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://279a9cfedc0ee702bc7f170c8f36335470a947b4e68889c542867964215263ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f41d488b0c24e594a2f5cad36b5f8efdb7d867fae0e18a74fe3c396a203d162f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aaa12ad387fc5d0780f6602a74013bb70c118f35c3a16eb4d136df7b0c48db9e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aaa12ad387fc5d0780f6602a74013bb70c118f35c3a16eb4d136df7b0c48db9e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-nv2xw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:12Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:12 crc kubenswrapper[4558]: I0120 16:42:12.454303 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jsqvf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bedf08c7-1f93-4931-a7f3-e729e2a137af\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e32b76495d88d356373f9389e163a2f3a72e202bb454c7015594c1f5a41b511\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xxtbc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jsqvf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:12Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:12 crc kubenswrapper[4558]: I0120 16:42:12.454410 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:12 crc kubenswrapper[4558]: I0120 16:42:12.454435 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:12 crc kubenswrapper[4558]: I0120 16:42:12.454443 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:12 crc kubenswrapper[4558]: I0120 16:42:12.454454 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:12 crc kubenswrapper[4558]: I0120 16:42:12.454465 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:12Z","lastTransitionTime":"2026-01-20T16:42:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:12 crc kubenswrapper[4558]: I0120 16:42:12.493090 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-47477" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e026445a-aa73-4a0e-ba35-e2e07de1278c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec2ee3ff161cba7e14ad26763081c9f092f0dfe969cef113631c27e0db53d157\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xcnwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-47477\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:12Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:12 crc kubenswrapper[4558]: I0120 16:42:12.532804 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a13bfefa-f525-4ecd-89f5-f10480532bf6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9030e970e96b6909d771d4fa0c3b4f493b4a825cbc43ef7e684272284329c7c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://168e8f533aaa108e4160f9ae4af6bb7944edba7a596178ffffdc5325073fc3c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e0141aef1babb57501016cb09787591b5e6d9136b8d7dd4ba64393f0be5a027\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c87f08993c2b47c63ba9652c7dafe0fdbecdcb03a7b7080664ab1f1bd0e1d602\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:12Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:12 crc kubenswrapper[4558]: I0120 16:42:12.544742 4558 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-17 18:15:14.678268888 +0000 UTC Jan 20 16:42:12 crc kubenswrapper[4558]: I0120 16:42:12.556675 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:12 crc kubenswrapper[4558]: I0120 16:42:12.556706 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:12 crc kubenswrapper[4558]: I0120 16:42:12.556714 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:12 crc kubenswrapper[4558]: I0120 16:42:12.556728 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:12 crc kubenswrapper[4558]: I0120 16:42:12.556737 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:12Z","lastTransitionTime":"2026-01-20T16:42:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:12 crc kubenswrapper[4558]: I0120 16:42:12.564883 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 20 16:42:12 crc kubenswrapper[4558]: I0120 16:42:12.564899 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 20 16:42:12 crc kubenswrapper[4558]: E0120 16:42:12.565159 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 20 16:42:12 crc kubenswrapper[4558]: E0120 16:42:12.565081 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 20 16:42:12 crc kubenswrapper[4558]: I0120 16:42:12.564911 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 20 16:42:12 crc kubenswrapper[4558]: E0120 16:42:12.565270 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 20 16:42:12 crc kubenswrapper[4558]: I0120 16:42:12.573692 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f904dd3e4b43e51569b6532f00fa2f9edf912c1f1969ee2ea00c99d1b7e35a41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://789c877f4a456b3ce64bdbe6fefd50f1ef3f92bf9e26e6296005cf366e0ce265\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:12Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:12 crc kubenswrapper[4558]: I0120 16:42:12.612887 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3e45cc56934a7b90e9d47f1c6d06d8dc140d57db4a216469e8ae112f398663e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:12Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:12 crc kubenswrapper[4558]: I0120 16:42:12.652569 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"68337d27-3fa6-4a29-88b0-82e60c3739eb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de7f7c23993a57d30f08686e1cd4abb5b17e108ac12dd0c7929a31574c69eeb1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4b478\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b18a59ecb802507e0d5988eacc915fd7e0d6954518de80f61162c97f680fd8c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4b478\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2vr4r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:12Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:12 crc kubenswrapper[4558]: I0120 16:42:12.658813 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:12 crc kubenswrapper[4558]: I0120 16:42:12.658856 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:12 crc kubenswrapper[4558]: I0120 16:42:12.658865 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:12 crc kubenswrapper[4558]: I0120 16:42:12.658878 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:12 crc kubenswrapper[4558]: I0120 16:42:12.658887 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:12Z","lastTransitionTime":"2026-01-20T16:42:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:12 crc kubenswrapper[4558]: I0120 16:42:12.695396 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-nv2xw_254129cd-82fc-4162-b671-2434bc9e2972/ovnkube-controller/0.log" Jan 20 16:42:12 crc kubenswrapper[4558]: I0120 16:42:12.697298 4558 generic.go:334] "Generic (PLEG): container finished" podID="254129cd-82fc-4162-b671-2434bc9e2972" containerID="279a9cfedc0ee702bc7f170c8f36335470a947b4e68889c542867964215263ab" exitCode=1 Jan 20 16:42:12 crc kubenswrapper[4558]: I0120 16:42:12.697326 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" event={"ID":"254129cd-82fc-4162-b671-2434bc9e2972","Type":"ContainerDied","Data":"279a9cfedc0ee702bc7f170c8f36335470a947b4e68889c542867964215263ab"} Jan 20 16:42:12 crc kubenswrapper[4558]: I0120 16:42:12.697798 4558 scope.go:117] "RemoveContainer" containerID="279a9cfedc0ee702bc7f170c8f36335470a947b4e68889c542867964215263ab" Jan 20 16:42:12 crc kubenswrapper[4558]: I0120 16:42:12.698806 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"425eab90-fb70-4498-a98b-b95742033890\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48592e72d91cf527296f06e52fec1c16c1da21323c0644659945af109a74ebb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7e18f59b5e6b6c0be141b7efb3ccb1df4ff6e3c394bcc88fb1142c8df433493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c889e2b0e14244ef49ec057e3d5fd91687e335a983fdc99a6816abd2af643ddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d434793571cbba089cc34029d951bee9063aca58728b791f2d1af2d68229f778\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://89f7ad577df11c509855a90ac8791da6de3f0d2857a1ca56726a6327f40455bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://baab628ee8aadc67ea829cc46639be55ae63281a9ec9a89737260f558a9ddf75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://baab628ee8aadc67ea829cc46639be55ae63281a9ec9a89737260f558a9ddf75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5faab5625a16e1459b23ae0f4cc8ca3285bf4eb1ba4c53c1ed15eddb9054021\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5faab5625a16e1459b23ae0f4cc8ca3285bf4eb1ba4c53c1ed15eddb9054021\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f913fa07ec9571d1c61f9b7dddb46614846aa99d6e0b5b75709e9fa5a18a09c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f913fa07ec9571d1c61f9b7dddb46614846aa99d6e0b5b75709e9fa5a18a09c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:12Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:12 crc kubenswrapper[4558]: I0120 16:42:12.735111 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30e1ff9d-8dcd-4754-9a7a-c09598fb83db\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ea68e442bb2143fbaa5e2c9fe335c6b023a31c2eeaa78d60de45c6ef40c2894\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a8122c3b57ac4f0026df149fe622450d07d8b0d517ad018decd7b21d8d9c04a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7444e8fea1853bae45dbd3d52d07e3f7b94314cd29fe8c99891e515a892e569e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6438ffea42f98178c4c2c21ae01725a8b54d6a6f790f47d64faf5ecd9f1c00f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2e22f1d4e87dd06a67f49aedad280da48ac1c4ba2e438a4925cf8bd5330c4d8\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0120 16:41:58.859734 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0120 16:41:58.861094 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2179835226/tls.crt::/tmp/serving-cert-2179835226/tls.key\\\\\\\"\\\\nI0120 16:42:04.205247 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0120 16:42:04.207291 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0120 16:42:04.207308 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0120 16:42:04.207329 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0120 16:42:04.207333 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0120 16:42:04.210615 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0120 16:42:04.210632 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0120 16:42:04.210641 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0120 16:42:04.210648 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0120 16:42:04.210652 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0120 16:42:04.210655 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0120 16:42:04.210658 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0120 16:42:04.210660 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0120 16:42:04.211964 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4850bde1fb538e21fc91949dc584ef0a791d718844691a44559e0513bb36203\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c39aa6c3942134a027be33cac715f9c0381bce5417754d5bad045c42a07d353f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c39aa6c3942134a027be33cac715f9c0381bce5417754d5bad045c42a07d353f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:12Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:12 crc kubenswrapper[4558]: I0120 16:42:12.760354 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:12 crc kubenswrapper[4558]: I0120 16:42:12.760385 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:12 crc kubenswrapper[4558]: I0120 16:42:12.760394 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:12 crc kubenswrapper[4558]: I0120 16:42:12.760407 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:12 crc kubenswrapper[4558]: I0120 16:42:12.760416 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:12Z","lastTransitionTime":"2026-01-20T16:42:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:12 crc kubenswrapper[4558]: I0120 16:42:12.774473 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:12Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:12 crc kubenswrapper[4558]: I0120 16:42:12.815198 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e190c24c1d8a3bc0241334598ce23829ada9afee282d49a0c82cd67ad82bcd02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:12Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:12 crc kubenswrapper[4558]: I0120 16:42:12.853038 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:12Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:12 crc kubenswrapper[4558]: I0120 16:42:12.862287 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:12 crc kubenswrapper[4558]: I0120 16:42:12.862354 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:12 crc kubenswrapper[4558]: I0120 16:42:12.862366 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:12 crc kubenswrapper[4558]: I0120 16:42:12.862379 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:12 crc kubenswrapper[4558]: I0120 16:42:12.862387 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:12Z","lastTransitionTime":"2026-01-20T16:42:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:12 crc kubenswrapper[4558]: I0120 16:42:12.895297 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:12Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:12 crc kubenswrapper[4558]: I0120 16:42:12.940425 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:12Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:12 crc kubenswrapper[4558]: I0120 16:42:12.964401 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:12 crc kubenswrapper[4558]: I0120 16:42:12.964435 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:12 crc kubenswrapper[4558]: I0120 16:42:12.964445 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:12 crc kubenswrapper[4558]: I0120 16:42:12.964459 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:12 crc kubenswrapper[4558]: I0120 16:42:12.964468 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:12Z","lastTransitionTime":"2026-01-20T16:42:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:12 crc kubenswrapper[4558]: I0120 16:42:12.974707 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e190c24c1d8a3bc0241334598ce23829ada9afee282d49a0c82cd67ad82bcd02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:12Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:13 crc kubenswrapper[4558]: I0120 16:42:13.017951 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"254129cd-82fc-4162-b671-2434bc9e2972\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a6a184067bde115ef5e09fdd90e7dccfe89a9dc347af450998b91c068e7e803\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd8b93e5559d95511a1285a9aec9a7d72df6c330bcd1e1daf3e93f5494f70eeb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4264ef8ee49a8b172fb05ac5c9b0bd32350b5d9ae95293079dfeaf44f021b455\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0889cf371f8b06cc2d255c15fda97db1d5000d6f6cff29d2fdcb8667cede8a99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4265d3281e954ee5989ee008da28c6cb9fa29478e5fdf23325521d455304da9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://186a1d2caad8865abf7f565405c4a4c077038ccb67ba77927cc2392ec075a811\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://279a9cfedc0ee702bc7f170c8f36335470a947b4e68889c542867964215263ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://279a9cfedc0ee702bc7f170c8f36335470a947b4e68889c542867964215263ab\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-20T16:42:12Z\\\",\\\"message\\\":\\\" 5842 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0120 16:42:12.346255 5842 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0120 16:42:12.346261 5842 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0120 16:42:12.346272 5842 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0120 16:42:12.346279 5842 handler.go:208] Removed *v1.Node event handler 2\\\\nI0120 16:42:12.346284 5842 handler.go:208] Removed *v1.Node event handler 7\\\\nI0120 16:42:12.346290 5842 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0120 16:42:12.346315 5842 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI0120 16:42:12.346513 5842 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI0120 16:42:12.346548 5842 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0120 16:42:12.346724 5842 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f41d488b0c24e594a2f5cad36b5f8efdb7d867fae0e18a74fe3c396a203d162f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aaa12ad387fc5d0780f6602a74013bb70c118f35c3a16eb4d136df7b0c48db9e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aaa12ad387fc5d0780f6602a74013bb70c118f35c3a16eb4d136df7b0c48db9e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-nv2xw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:13Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:13 crc kubenswrapper[4558]: I0120 16:42:13.055317 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f5t7h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0f90cb04-2e7a-4ee8-83fc-d6c0ee1702a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21a318f03d0df490cb7bae11819c52eca011d163081aeecff1b2dd8f72caabbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fd8a8162080a700ce21ff3b2cf7c28fabfc682758eb69c483909d4008b07daab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fd8a8162080a700ce21ff3b2cf7c28fabfc682758eb69c483909d4008b07daab\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e77617c69434ce20c448b525c7e86f1ece18d51ce6e14167b4ca7e99de8e5709\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e77617c69434ce20c448b525c7e86f1ece18d51ce6e14167b4ca7e99de8e5709\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://453c67434281e0261ec6799b30a548a01efd72d7df96632409e745b2e6a94546\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://453c67434281e0261ec6799b30a548a01efd72d7df96632409e745b2e6a94546\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://06eab4daa6c8b6a564cf1bb1403b7b3b259a234ee09f23b4f4bc0628c2d58e69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06eab4daa6c8b6a564cf1bb1403b7b3b259a234ee09f23b4f4bc0628c2d58e69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1291ac09fc6ae2a79fdeaa0fc47272f61b97f45f67ed3f7c90bdfdbfe3519ff2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1291ac09fc6ae2a79fdeaa0fc47272f61b97f45f67ed3f7c90bdfdbfe3519ff2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe484514f50a14e485d4a97ea3ce9401c77f3f4dae4717719be7ed6c4d7b1a59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe484514f50a14e485d4a97ea3ce9401c77f3f4dae4717719be7ed6c4d7b1a59\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f5t7h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:13Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:13 crc kubenswrapper[4558]: I0120 16:42:13.066752 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:13 crc kubenswrapper[4558]: I0120 16:42:13.066791 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:13 crc kubenswrapper[4558]: I0120 16:42:13.066801 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:13 crc kubenswrapper[4558]: I0120 16:42:13.066816 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:13 crc kubenswrapper[4558]: I0120 16:42:13.066825 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:13Z","lastTransitionTime":"2026-01-20T16:42:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:13 crc kubenswrapper[4558]: I0120 16:42:13.094530 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:13Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:13 crc kubenswrapper[4558]: I0120 16:42:13.132528 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-d96bp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c8d2c8b-fb8e-4bff-b8d2-69570e5d9e57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe858db22acc96b062f1bc7941e05c823f30d1f9ba50bd497c57f38d57e04293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4v6jx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-d96bp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:13Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:13 crc kubenswrapper[4558]: I0120 16:42:13.168481 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:13 crc kubenswrapper[4558]: I0120 16:42:13.168519 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:13 crc kubenswrapper[4558]: I0120 16:42:13.168528 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:13 crc kubenswrapper[4558]: I0120 16:42:13.168543 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:13 crc kubenswrapper[4558]: I0120 16:42:13.168551 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:13Z","lastTransitionTime":"2026-01-20T16:42:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:13 crc kubenswrapper[4558]: I0120 16:42:13.174182 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3e45cc56934a7b90e9d47f1c6d06d8dc140d57db4a216469e8ae112f398663e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:13Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:13 crc kubenswrapper[4558]: I0120 16:42:13.214462 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jsqvf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bedf08c7-1f93-4931-a7f3-e729e2a137af\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e32b76495d88d356373f9389e163a2f3a72e202bb454c7015594c1f5a41b511\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xxtbc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jsqvf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:13Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:13 crc kubenswrapper[4558]: I0120 16:42:13.252500 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-47477" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e026445a-aa73-4a0e-ba35-e2e07de1278c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec2ee3ff161cba7e14ad26763081c9f092f0dfe969cef113631c27e0db53d157\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xcnwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-47477\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:13Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:13 crc kubenswrapper[4558]: I0120 16:42:13.270071 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:13 crc kubenswrapper[4558]: I0120 16:42:13.270105 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:13 crc kubenswrapper[4558]: I0120 16:42:13.270115 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:13 crc kubenswrapper[4558]: I0120 16:42:13.270129 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:13 crc kubenswrapper[4558]: I0120 16:42:13.270138 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:13Z","lastTransitionTime":"2026-01-20T16:42:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:13 crc kubenswrapper[4558]: I0120 16:42:13.293902 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a13bfefa-f525-4ecd-89f5-f10480532bf6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9030e970e96b6909d771d4fa0c3b4f493b4a825cbc43ef7e684272284329c7c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://168e8f533aaa108e4160f9ae4af6bb7944edba7a596178ffffdc5325073fc3c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e0141aef1babb57501016cb09787591b5e6d9136b8d7dd4ba64393f0be5a027\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c87f08993c2b47c63ba9652c7dafe0fdbecdcb03a7b7080664ab1f1bd0e1d602\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:13Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:13 crc kubenswrapper[4558]: I0120 16:42:13.334474 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f904dd3e4b43e51569b6532f00fa2f9edf912c1f1969ee2ea00c99d1b7e35a41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://789c877f4a456b3ce64bdbe6fefd50f1ef3f92bf9e26e6296005cf366e0ce265\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:13Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:13 crc kubenswrapper[4558]: I0120 16:42:13.372189 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:13 crc kubenswrapper[4558]: I0120 16:42:13.372236 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:13 crc kubenswrapper[4558]: I0120 16:42:13.372248 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:13 crc kubenswrapper[4558]: I0120 16:42:13.372280 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:13 crc kubenswrapper[4558]: I0120 16:42:13.372290 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:13Z","lastTransitionTime":"2026-01-20T16:42:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:13 crc kubenswrapper[4558]: I0120 16:42:13.374445 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30e1ff9d-8dcd-4754-9a7a-c09598fb83db\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ea68e442bb2143fbaa5e2c9fe335c6b023a31c2eeaa78d60de45c6ef40c2894\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a8122c3b57ac4f0026df149fe622450d07d8b0d517ad018decd7b21d8d9c04a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7444e8fea1853bae45dbd3d52d07e3f7b94314cd29fe8c99891e515a892e569e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6438ffea42f98178c4c2c21ae01725a8b54d6a6f790f47d64faf5ecd9f1c00f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2e22f1d4e87dd06a67f49aedad280da48ac1c4ba2e438a4925cf8bd5330c4d8\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0120 16:41:58.859734 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0120 16:41:58.861094 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2179835226/tls.crt::/tmp/serving-cert-2179835226/tls.key\\\\\\\"\\\\nI0120 16:42:04.205247 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0120 16:42:04.207291 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0120 16:42:04.207308 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0120 16:42:04.207329 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0120 16:42:04.207333 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0120 16:42:04.210615 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0120 16:42:04.210632 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0120 16:42:04.210641 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0120 16:42:04.210648 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0120 16:42:04.210652 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0120 16:42:04.210655 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0120 16:42:04.210658 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0120 16:42:04.210660 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0120 16:42:04.211964 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4850bde1fb538e21fc91949dc584ef0a791d718844691a44559e0513bb36203\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c39aa6c3942134a027be33cac715f9c0381bce5417754d5bad045c42a07d353f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c39aa6c3942134a027be33cac715f9c0381bce5417754d5bad045c42a07d353f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:13Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:13 crc kubenswrapper[4558]: I0120 16:42:13.413351 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"68337d27-3fa6-4a29-88b0-82e60c3739eb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de7f7c23993a57d30f08686e1cd4abb5b17e108ac12dd0c7929a31574c69eeb1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4b478\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b18a59ecb802507e0d5988eacc915fd7e0d6954518de80f61162c97f680fd8c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4b478\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2vr4r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:13Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:13 crc kubenswrapper[4558]: I0120 16:42:13.458549 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"425eab90-fb70-4498-a98b-b95742033890\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48592e72d91cf527296f06e52fec1c16c1da21323c0644659945af109a74ebb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7e18f59b5e6b6c0be141b7efb3ccb1df4ff6e3c394bcc88fb1142c8df433493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c889e2b0e14244ef49ec057e3d5fd91687e335a983fdc99a6816abd2af643ddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d434793571cbba089cc34029d951bee9063aca58728b791f2d1af2d68229f778\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://89f7ad577df11c509855a90ac8791da6de3f0d2857a1ca56726a6327f40455bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://baab628ee8aadc67ea829cc46639be55ae63281a9ec9a89737260f558a9ddf75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://baab628ee8aadc67ea829cc46639be55ae63281a9ec9a89737260f558a9ddf75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5faab5625a16e1459b23ae0f4cc8ca3285bf4eb1ba4c53c1ed15eddb9054021\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5faab5625a16e1459b23ae0f4cc8ca3285bf4eb1ba4c53c1ed15eddb9054021\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f913fa07ec9571d1c61f9b7dddb46614846aa99d6e0b5b75709e9fa5a18a09c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f913fa07ec9571d1c61f9b7dddb46614846aa99d6e0b5b75709e9fa5a18a09c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:13Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:13 crc kubenswrapper[4558]: I0120 16:42:13.474070 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:13 crc kubenswrapper[4558]: I0120 16:42:13.474108 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:13 crc kubenswrapper[4558]: I0120 16:42:13.474117 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:13 crc kubenswrapper[4558]: I0120 16:42:13.474132 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:13 crc kubenswrapper[4558]: I0120 16:42:13.474140 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:13Z","lastTransitionTime":"2026-01-20T16:42:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:13 crc kubenswrapper[4558]: I0120 16:42:13.545784 4558 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-09 01:30:49.976542393 +0000 UTC Jan 20 16:42:13 crc kubenswrapper[4558]: I0120 16:42:13.576378 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:13 crc kubenswrapper[4558]: I0120 16:42:13.576413 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:13 crc kubenswrapper[4558]: I0120 16:42:13.576421 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:13 crc kubenswrapper[4558]: I0120 16:42:13.576433 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:13 crc kubenswrapper[4558]: I0120 16:42:13.576442 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:13Z","lastTransitionTime":"2026-01-20T16:42:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:13 crc kubenswrapper[4558]: I0120 16:42:13.678568 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:13 crc kubenswrapper[4558]: I0120 16:42:13.678613 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:13 crc kubenswrapper[4558]: I0120 16:42:13.678623 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:13 crc kubenswrapper[4558]: I0120 16:42:13.678636 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:13 crc kubenswrapper[4558]: I0120 16:42:13.678644 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:13Z","lastTransitionTime":"2026-01-20T16:42:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:13 crc kubenswrapper[4558]: I0120 16:42:13.700478 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-nv2xw_254129cd-82fc-4162-b671-2434bc9e2972/ovnkube-controller/1.log" Jan 20 16:42:13 crc kubenswrapper[4558]: I0120 16:42:13.700880 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-nv2xw_254129cd-82fc-4162-b671-2434bc9e2972/ovnkube-controller/0.log" Jan 20 16:42:13 crc kubenswrapper[4558]: I0120 16:42:13.702567 4558 generic.go:334] "Generic (PLEG): container finished" podID="254129cd-82fc-4162-b671-2434bc9e2972" containerID="41b06bf319806f8cb4eb26d154351300538b7998dbff8ab608b53a397a84aeb1" exitCode=1 Jan 20 16:42:13 crc kubenswrapper[4558]: I0120 16:42:13.702620 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" event={"ID":"254129cd-82fc-4162-b671-2434bc9e2972","Type":"ContainerDied","Data":"41b06bf319806f8cb4eb26d154351300538b7998dbff8ab608b53a397a84aeb1"} Jan 20 16:42:13 crc kubenswrapper[4558]: I0120 16:42:13.702696 4558 scope.go:117] "RemoveContainer" containerID="279a9cfedc0ee702bc7f170c8f36335470a947b4e68889c542867964215263ab" Jan 20 16:42:13 crc kubenswrapper[4558]: I0120 16:42:13.703089 4558 scope.go:117] "RemoveContainer" containerID="41b06bf319806f8cb4eb26d154351300538b7998dbff8ab608b53a397a84aeb1" Jan 20 16:42:13 crc kubenswrapper[4558]: E0120 16:42:13.703235 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-nv2xw_openshift-ovn-kubernetes(254129cd-82fc-4162-b671-2434bc9e2972)\"" pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" podUID="254129cd-82fc-4162-b671-2434bc9e2972" Jan 20 16:42:13 crc kubenswrapper[4558]: I0120 16:42:13.717298 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"425eab90-fb70-4498-a98b-b95742033890\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48592e72d91cf527296f06e52fec1c16c1da21323c0644659945af109a74ebb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7e18f59b5e6b6c0be141b7efb3ccb1df4ff6e3c394bcc88fb1142c8df433493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c889e2b0e14244ef49ec057e3d5fd91687e335a983fdc99a6816abd2af643ddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d434793571cbba089cc34029d951bee9063aca58728b791f2d1af2d68229f778\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://89f7ad577df11c509855a90ac8791da6de3f0d2857a1ca56726a6327f40455bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://baab628ee8aadc67ea829cc46639be55ae63281a9ec9a89737260f558a9ddf75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://baab628ee8aadc67ea829cc46639be55ae63281a9ec9a89737260f558a9ddf75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5faab5625a16e1459b23ae0f4cc8ca3285bf4eb1ba4c53c1ed15eddb9054021\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5faab5625a16e1459b23ae0f4cc8ca3285bf4eb1ba4c53c1ed15eddb9054021\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f913fa07ec9571d1c61f9b7dddb46614846aa99d6e0b5b75709e9fa5a18a09c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f913fa07ec9571d1c61f9b7dddb46614846aa99d6e0b5b75709e9fa5a18a09c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:13Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:13 crc kubenswrapper[4558]: I0120 16:42:13.728987 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30e1ff9d-8dcd-4754-9a7a-c09598fb83db\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ea68e442bb2143fbaa5e2c9fe335c6b023a31c2eeaa78d60de45c6ef40c2894\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a8122c3b57ac4f0026df149fe622450d07d8b0d517ad018decd7b21d8d9c04a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7444e8fea1853bae45dbd3d52d07e3f7b94314cd29fe8c99891e515a892e569e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6438ffea42f98178c4c2c21ae01725a8b54d6a6f790f47d64faf5ecd9f1c00f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2e22f1d4e87dd06a67f49aedad280da48ac1c4ba2e438a4925cf8bd5330c4d8\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0120 16:41:58.859734 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0120 16:41:58.861094 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2179835226/tls.crt::/tmp/serving-cert-2179835226/tls.key\\\\\\\"\\\\nI0120 16:42:04.205247 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0120 16:42:04.207291 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0120 16:42:04.207308 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0120 16:42:04.207329 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0120 16:42:04.207333 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0120 16:42:04.210615 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0120 16:42:04.210632 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0120 16:42:04.210641 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0120 16:42:04.210648 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0120 16:42:04.210652 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0120 16:42:04.210655 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0120 16:42:04.210658 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0120 16:42:04.210660 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0120 16:42:04.211964 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4850bde1fb538e21fc91949dc584ef0a791d718844691a44559e0513bb36203\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c39aa6c3942134a027be33cac715f9c0381bce5417754d5bad045c42a07d353f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c39aa6c3942134a027be33cac715f9c0381bce5417754d5bad045c42a07d353f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:13Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:13 crc kubenswrapper[4558]: I0120 16:42:13.736854 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"68337d27-3fa6-4a29-88b0-82e60c3739eb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de7f7c23993a57d30f08686e1cd4abb5b17e108ac12dd0c7929a31574c69eeb1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4b478\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b18a59ecb802507e0d5988eacc915fd7e0d6954518de80f61162c97f680fd8c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4b478\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2vr4r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:13Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:13 crc kubenswrapper[4558]: I0120 16:42:13.746623 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e190c24c1d8a3bc0241334598ce23829ada9afee282d49a0c82cd67ad82bcd02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:13Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:13 crc kubenswrapper[4558]: I0120 16:42:13.755953 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:13Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:13 crc kubenswrapper[4558]: I0120 16:42:13.764477 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:13Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:13 crc kubenswrapper[4558]: I0120 16:42:13.772424 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-d96bp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c8d2c8b-fb8e-4bff-b8d2-69570e5d9e57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe858db22acc96b062f1bc7941e05c823f30d1f9ba50bd497c57f38d57e04293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4v6jx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-d96bp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:13Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:13 crc kubenswrapper[4558]: I0120 16:42:13.781131 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:13 crc kubenswrapper[4558]: I0120 16:42:13.781174 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:13 crc kubenswrapper[4558]: I0120 16:42:13.781184 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:13 crc kubenswrapper[4558]: I0120 16:42:13.781197 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:13 crc kubenswrapper[4558]: I0120 16:42:13.781206 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:13Z","lastTransitionTime":"2026-01-20T16:42:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:13 crc kubenswrapper[4558]: I0120 16:42:13.785736 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"254129cd-82fc-4162-b671-2434bc9e2972\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a6a184067bde115ef5e09fdd90e7dccfe89a9dc347af450998b91c068e7e803\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd8b93e5559d95511a1285a9aec9a7d72df6c330bcd1e1daf3e93f5494f70eeb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4264ef8ee49a8b172fb05ac5c9b0bd32350b5d9ae95293079dfeaf44f021b455\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0889cf371f8b06cc2d255c15fda97db1d5000d6f6cff29d2fdcb8667cede8a99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4265d3281e954ee5989ee008da28c6cb9fa29478e5fdf23325521d455304da9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://186a1d2caad8865abf7f565405c4a4c077038ccb67ba77927cc2392ec075a811\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://41b06bf319806f8cb4eb26d154351300538b7998dbff8ab608b53a397a84aeb1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://279a9cfedc0ee702bc7f170c8f36335470a947b4e68889c542867964215263ab\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-20T16:42:12Z\\\",\\\"message\\\":\\\" 5842 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0120 16:42:12.346255 5842 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0120 16:42:12.346261 5842 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0120 16:42:12.346272 5842 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0120 16:42:12.346279 5842 handler.go:208] Removed *v1.Node event handler 2\\\\nI0120 16:42:12.346284 5842 handler.go:208] Removed *v1.Node event handler 7\\\\nI0120 16:42:12.346290 5842 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0120 16:42:12.346315 5842 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI0120 16:42:12.346513 5842 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI0120 16:42:12.346548 5842 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0120 16:42:12.346724 5842 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:10Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://41b06bf319806f8cb4eb26d154351300538b7998dbff8ab608b53a397a84aeb1\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-20T16:42:13Z\\\",\\\"message\\\":\\\"rt network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:13Z is after 2025-08-24T17:21:41Z]\\\\nI0120 16:42:13.263941 5970 services_controller.go:434] Service openshift-marketplace/redhat-marketplace retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{redhat-marketplace openshift-marketplace cf6d00ec-cc2c-43f6-815c-40ffd0563e71 5558 0 2025-02-23 05:23:25 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[olm.managed:true olm.service-spec-hash:aUeLNNcZzVZO2rcaZ5Kc8V3jffO0Ss4T6qX6V5] map[] [{operators.coreos.com/v1alpha1 CatalogSource redhat-marketplace fcb55c30-a739-4bc1-9c9c-7634e05a3dbd 0xc00757e68d 0xc00757e68e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:grpc,Protocol:TCP,P\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f41d488b0c24e594a2f5cad36b5f8efdb7d867fae0e18a74fe3c396a203d162f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aaa12ad387fc5d0780f6602a74013bb70c118f35c3a16eb4d136df7b0c48db9e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aaa12ad387fc5d0780f6602a74013bb70c118f35c3a16eb4d136df7b0c48db9e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-nv2xw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:13Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:13 crc kubenswrapper[4558]: I0120 16:42:13.814936 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f5t7h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0f90cb04-2e7a-4ee8-83fc-d6c0ee1702a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21a318f03d0df490cb7bae11819c52eca011d163081aeecff1b2dd8f72caabbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fd8a8162080a700ce21ff3b2cf7c28fabfc682758eb69c483909d4008b07daab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fd8a8162080a700ce21ff3b2cf7c28fabfc682758eb69c483909d4008b07daab\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e77617c69434ce20c448b525c7e86f1ece18d51ce6e14167b4ca7e99de8e5709\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e77617c69434ce20c448b525c7e86f1ece18d51ce6e14167b4ca7e99de8e5709\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://453c67434281e0261ec6799b30a548a01efd72d7df96632409e745b2e6a94546\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://453c67434281e0261ec6799b30a548a01efd72d7df96632409e745b2e6a94546\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://06eab4daa6c8b6a564cf1bb1403b7b3b259a234ee09f23b4f4bc0628c2d58e69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06eab4daa6c8b6a564cf1bb1403b7b3b259a234ee09f23b4f4bc0628c2d58e69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1291ac09fc6ae2a79fdeaa0fc47272f61b97f45f67ed3f7c90bdfdbfe3519ff2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1291ac09fc6ae2a79fdeaa0fc47272f61b97f45f67ed3f7c90bdfdbfe3519ff2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe484514f50a14e485d4a97ea3ce9401c77f3f4dae4717719be7ed6c4d7b1a59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe484514f50a14e485d4a97ea3ce9401c77f3f4dae4717719be7ed6c4d7b1a59\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f5t7h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:13Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:13 crc kubenswrapper[4558]: I0120 16:42:13.853252 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:13Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:13 crc kubenswrapper[4558]: I0120 16:42:13.883152 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:13 crc kubenswrapper[4558]: I0120 16:42:13.883201 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:13 crc kubenswrapper[4558]: I0120 16:42:13.883211 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:13 crc kubenswrapper[4558]: I0120 16:42:13.883223 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:13 crc kubenswrapper[4558]: I0120 16:42:13.883231 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:13Z","lastTransitionTime":"2026-01-20T16:42:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:13 crc kubenswrapper[4558]: I0120 16:42:13.893535 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f904dd3e4b43e51569b6532f00fa2f9edf912c1f1969ee2ea00c99d1b7e35a41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://789c877f4a456b3ce64bdbe6fefd50f1ef3f92bf9e26e6296005cf366e0ce265\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:13Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:13 crc kubenswrapper[4558]: I0120 16:42:13.933108 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3e45cc56934a7b90e9d47f1c6d06d8dc140d57db4a216469e8ae112f398663e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:13Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:13 crc kubenswrapper[4558]: I0120 16:42:13.973595 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jsqvf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bedf08c7-1f93-4931-a7f3-e729e2a137af\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e32b76495d88d356373f9389e163a2f3a72e202bb454c7015594c1f5a41b511\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xxtbc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jsqvf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:13Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:13 crc kubenswrapper[4558]: I0120 16:42:13.985302 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:13 crc kubenswrapper[4558]: I0120 16:42:13.985341 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:13 crc kubenswrapper[4558]: I0120 16:42:13.985350 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:13 crc kubenswrapper[4558]: I0120 16:42:13.985366 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:13 crc kubenswrapper[4558]: I0120 16:42:13.985375 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:13Z","lastTransitionTime":"2026-01-20T16:42:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:14 crc kubenswrapper[4558]: I0120 16:42:14.013011 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-47477" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e026445a-aa73-4a0e-ba35-e2e07de1278c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec2ee3ff161cba7e14ad26763081c9f092f0dfe969cef113631c27e0db53d157\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xcnwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-47477\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:14Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:14 crc kubenswrapper[4558]: I0120 16:42:14.053960 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a13bfefa-f525-4ecd-89f5-f10480532bf6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9030e970e96b6909d771d4fa0c3b4f493b4a825cbc43ef7e684272284329c7c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://168e8f533aaa108e4160f9ae4af6bb7944edba7a596178ffffdc5325073fc3c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e0141aef1babb57501016cb09787591b5e6d9136b8d7dd4ba64393f0be5a027\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c87f08993c2b47c63ba9652c7dafe0fdbecdcb03a7b7080664ab1f1bd0e1d602\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:14Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:14 crc kubenswrapper[4558]: I0120 16:42:14.087556 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:14 crc kubenswrapper[4558]: I0120 16:42:14.087608 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:14 crc kubenswrapper[4558]: I0120 16:42:14.087616 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:14 crc kubenswrapper[4558]: I0120 16:42:14.087630 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:14 crc kubenswrapper[4558]: I0120 16:42:14.087638 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:14Z","lastTransitionTime":"2026-01-20T16:42:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:14 crc kubenswrapper[4558]: I0120 16:42:14.189773 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:14 crc kubenswrapper[4558]: I0120 16:42:14.189823 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:14 crc kubenswrapper[4558]: I0120 16:42:14.189833 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:14 crc kubenswrapper[4558]: I0120 16:42:14.189846 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:14 crc kubenswrapper[4558]: I0120 16:42:14.189854 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:14Z","lastTransitionTime":"2026-01-20T16:42:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:14 crc kubenswrapper[4558]: I0120 16:42:14.292130 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:14 crc kubenswrapper[4558]: I0120 16:42:14.292191 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:14 crc kubenswrapper[4558]: I0120 16:42:14.292203 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:14 crc kubenswrapper[4558]: I0120 16:42:14.292217 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:14 crc kubenswrapper[4558]: I0120 16:42:14.292226 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:14Z","lastTransitionTime":"2026-01-20T16:42:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:14 crc kubenswrapper[4558]: I0120 16:42:14.394016 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:14 crc kubenswrapper[4558]: I0120 16:42:14.394049 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:14 crc kubenswrapper[4558]: I0120 16:42:14.394057 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:14 crc kubenswrapper[4558]: I0120 16:42:14.394072 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:14 crc kubenswrapper[4558]: I0120 16:42:14.394080 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:14Z","lastTransitionTime":"2026-01-20T16:42:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:14 crc kubenswrapper[4558]: I0120 16:42:14.496428 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:14 crc kubenswrapper[4558]: I0120 16:42:14.496652 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:14 crc kubenswrapper[4558]: I0120 16:42:14.496662 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:14 crc kubenswrapper[4558]: I0120 16:42:14.496677 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:14 crc kubenswrapper[4558]: I0120 16:42:14.496687 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:14Z","lastTransitionTime":"2026-01-20T16:42:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:14 crc kubenswrapper[4558]: I0120 16:42:14.546717 4558 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-26 19:44:02.215914044 +0000 UTC Jan 20 16:42:14 crc kubenswrapper[4558]: I0120 16:42:14.564975 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 20 16:42:14 crc kubenswrapper[4558]: I0120 16:42:14.565017 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 20 16:42:14 crc kubenswrapper[4558]: I0120 16:42:14.565025 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 20 16:42:14 crc kubenswrapper[4558]: E0120 16:42:14.565112 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 20 16:42:14 crc kubenswrapper[4558]: E0120 16:42:14.565199 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 20 16:42:14 crc kubenswrapper[4558]: E0120 16:42:14.565275 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 20 16:42:14 crc kubenswrapper[4558]: I0120 16:42:14.598347 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:14 crc kubenswrapper[4558]: I0120 16:42:14.598376 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:14 crc kubenswrapper[4558]: I0120 16:42:14.598384 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:14 crc kubenswrapper[4558]: I0120 16:42:14.598399 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:14 crc kubenswrapper[4558]: I0120 16:42:14.598409 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:14Z","lastTransitionTime":"2026-01-20T16:42:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:14 crc kubenswrapper[4558]: I0120 16:42:14.700427 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:14 crc kubenswrapper[4558]: I0120 16:42:14.700465 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:14 crc kubenswrapper[4558]: I0120 16:42:14.700473 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:14 crc kubenswrapper[4558]: I0120 16:42:14.700487 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:14 crc kubenswrapper[4558]: I0120 16:42:14.700496 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:14Z","lastTransitionTime":"2026-01-20T16:42:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:14 crc kubenswrapper[4558]: I0120 16:42:14.706565 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-nv2xw_254129cd-82fc-4162-b671-2434bc9e2972/ovnkube-controller/1.log" Jan 20 16:42:14 crc kubenswrapper[4558]: I0120 16:42:14.708901 4558 scope.go:117] "RemoveContainer" containerID="41b06bf319806f8cb4eb26d154351300538b7998dbff8ab608b53a397a84aeb1" Jan 20 16:42:14 crc kubenswrapper[4558]: E0120 16:42:14.709045 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-nv2xw_openshift-ovn-kubernetes(254129cd-82fc-4162-b671-2434bc9e2972)\"" pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" podUID="254129cd-82fc-4162-b671-2434bc9e2972" Jan 20 16:42:14 crc kubenswrapper[4558]: I0120 16:42:14.717674 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a13bfefa-f525-4ecd-89f5-f10480532bf6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9030e970e96b6909d771d4fa0c3b4f493b4a825cbc43ef7e684272284329c7c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://168e8f533aaa108e4160f9ae4af6bb7944edba7a596178ffffdc5325073fc3c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e0141aef1babb57501016cb09787591b5e6d9136b8d7dd4ba64393f0be5a027\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c87f08993c2b47c63ba9652c7dafe0fdbecdcb03a7b7080664ab1f1bd0e1d602\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:14Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:14 crc kubenswrapper[4558]: I0120 16:42:14.726186 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f904dd3e4b43e51569b6532f00fa2f9edf912c1f1969ee2ea00c99d1b7e35a41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://789c877f4a456b3ce64bdbe6fefd50f1ef3f92bf9e26e6296005cf366e0ce265\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:14Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:14 crc kubenswrapper[4558]: I0120 16:42:14.733490 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3e45cc56934a7b90e9d47f1c6d06d8dc140d57db4a216469e8ae112f398663e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:14Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:14 crc kubenswrapper[4558]: I0120 16:42:14.742233 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jsqvf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bedf08c7-1f93-4931-a7f3-e729e2a137af\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e32b76495d88d356373f9389e163a2f3a72e202bb454c7015594c1f5a41b511\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xxtbc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jsqvf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:14Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:14 crc kubenswrapper[4558]: I0120 16:42:14.749077 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-47477" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e026445a-aa73-4a0e-ba35-e2e07de1278c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec2ee3ff161cba7e14ad26763081c9f092f0dfe969cef113631c27e0db53d157\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xcnwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-47477\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:14Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:14 crc kubenswrapper[4558]: I0120 16:42:14.761748 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"425eab90-fb70-4498-a98b-b95742033890\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48592e72d91cf527296f06e52fec1c16c1da21323c0644659945af109a74ebb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7e18f59b5e6b6c0be141b7efb3ccb1df4ff6e3c394bcc88fb1142c8df433493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c889e2b0e14244ef49ec057e3d5fd91687e335a983fdc99a6816abd2af643ddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d434793571cbba089cc34029d951bee9063aca58728b791f2d1af2d68229f778\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://89f7ad577df11c509855a90ac8791da6de3f0d2857a1ca56726a6327f40455bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://baab628ee8aadc67ea829cc46639be55ae63281a9ec9a89737260f558a9ddf75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://baab628ee8aadc67ea829cc46639be55ae63281a9ec9a89737260f558a9ddf75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5faab5625a16e1459b23ae0f4cc8ca3285bf4eb1ba4c53c1ed15eddb9054021\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5faab5625a16e1459b23ae0f4cc8ca3285bf4eb1ba4c53c1ed15eddb9054021\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f913fa07ec9571d1c61f9b7dddb46614846aa99d6e0b5b75709e9fa5a18a09c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f913fa07ec9571d1c61f9b7dddb46614846aa99d6e0b5b75709e9fa5a18a09c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:14Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:14 crc kubenswrapper[4558]: I0120 16:42:14.771236 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30e1ff9d-8dcd-4754-9a7a-c09598fb83db\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ea68e442bb2143fbaa5e2c9fe335c6b023a31c2eeaa78d60de45c6ef40c2894\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a8122c3b57ac4f0026df149fe622450d07d8b0d517ad018decd7b21d8d9c04a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7444e8fea1853bae45dbd3d52d07e3f7b94314cd29fe8c99891e515a892e569e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6438ffea42f98178c4c2c21ae01725a8b54d6a6f790f47d64faf5ecd9f1c00f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2e22f1d4e87dd06a67f49aedad280da48ac1c4ba2e438a4925cf8bd5330c4d8\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0120 16:41:58.859734 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0120 16:41:58.861094 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2179835226/tls.crt::/tmp/serving-cert-2179835226/tls.key\\\\\\\"\\\\nI0120 16:42:04.205247 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0120 16:42:04.207291 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0120 16:42:04.207308 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0120 16:42:04.207329 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0120 16:42:04.207333 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0120 16:42:04.210615 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0120 16:42:04.210632 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0120 16:42:04.210641 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0120 16:42:04.210648 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0120 16:42:04.210652 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0120 16:42:04.210655 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0120 16:42:04.210658 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0120 16:42:04.210660 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0120 16:42:04.211964 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4850bde1fb538e21fc91949dc584ef0a791d718844691a44559e0513bb36203\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c39aa6c3942134a027be33cac715f9c0381bce5417754d5bad045c42a07d353f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c39aa6c3942134a027be33cac715f9c0381bce5417754d5bad045c42a07d353f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:14Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:14 crc kubenswrapper[4558]: I0120 16:42:14.778832 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"68337d27-3fa6-4a29-88b0-82e60c3739eb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de7f7c23993a57d30f08686e1cd4abb5b17e108ac12dd0c7929a31574c69eeb1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4b478\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b18a59ecb802507e0d5988eacc915fd7e0d6954518de80f61162c97f680fd8c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4b478\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2vr4r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:14Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:14 crc kubenswrapper[4558]: I0120 16:42:14.787440 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e190c24c1d8a3bc0241334598ce23829ada9afee282d49a0c82cd67ad82bcd02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:14Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:14 crc kubenswrapper[4558]: I0120 16:42:14.797143 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:14Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:14 crc kubenswrapper[4558]: I0120 16:42:14.802522 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:14 crc kubenswrapper[4558]: I0120 16:42:14.802555 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:14 crc kubenswrapper[4558]: I0120 16:42:14.802566 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:14 crc kubenswrapper[4558]: I0120 16:42:14.802599 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:14 crc kubenswrapper[4558]: I0120 16:42:14.802608 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:14Z","lastTransitionTime":"2026-01-20T16:42:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:14 crc kubenswrapper[4558]: I0120 16:42:14.805156 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:14Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:14 crc kubenswrapper[4558]: I0120 16:42:14.813994 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:14Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:14 crc kubenswrapper[4558]: I0120 16:42:14.821197 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-d96bp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c8d2c8b-fb8e-4bff-b8d2-69570e5d9e57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe858db22acc96b062f1bc7941e05c823f30d1f9ba50bd497c57f38d57e04293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4v6jx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-d96bp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:14Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:14 crc kubenswrapper[4558]: I0120 16:42:14.832453 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"254129cd-82fc-4162-b671-2434bc9e2972\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a6a184067bde115ef5e09fdd90e7dccfe89a9dc347af450998b91c068e7e803\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd8b93e5559d95511a1285a9aec9a7d72df6c330bcd1e1daf3e93f5494f70eeb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4264ef8ee49a8b172fb05ac5c9b0bd32350b5d9ae95293079dfeaf44f021b455\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0889cf371f8b06cc2d255c15fda97db1d5000d6f6cff29d2fdcb8667cede8a99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4265d3281e954ee5989ee008da28c6cb9fa29478e5fdf23325521d455304da9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://186a1d2caad8865abf7f565405c4a4c077038ccb67ba77927cc2392ec075a811\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://41b06bf319806f8cb4eb26d154351300538b7998dbff8ab608b53a397a84aeb1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://41b06bf319806f8cb4eb26d154351300538b7998dbff8ab608b53a397a84aeb1\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-20T16:42:13Z\\\",\\\"message\\\":\\\"rt network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:13Z is after 2025-08-24T17:21:41Z]\\\\nI0120 16:42:13.263941 5970 services_controller.go:434] Service openshift-marketplace/redhat-marketplace retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{redhat-marketplace openshift-marketplace cf6d00ec-cc2c-43f6-815c-40ffd0563e71 5558 0 2025-02-23 05:23:25 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[olm.managed:true olm.service-spec-hash:aUeLNNcZzVZO2rcaZ5Kc8V3jffO0Ss4T6qX6V5] map[] [{operators.coreos.com/v1alpha1 CatalogSource redhat-marketplace fcb55c30-a739-4bc1-9c9c-7634e05a3dbd 0xc00757e68d 0xc00757e68e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:grpc,Protocol:TCP,P\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:12Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-nv2xw_openshift-ovn-kubernetes(254129cd-82fc-4162-b671-2434bc9e2972)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f41d488b0c24e594a2f5cad36b5f8efdb7d867fae0e18a74fe3c396a203d162f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aaa12ad387fc5d0780f6602a74013bb70c118f35c3a16eb4d136df7b0c48db9e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aaa12ad387fc5d0780f6602a74013bb70c118f35c3a16eb4d136df7b0c48db9e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-nv2xw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:14Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:14 crc kubenswrapper[4558]: I0120 16:42:14.841339 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f5t7h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0f90cb04-2e7a-4ee8-83fc-d6c0ee1702a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21a318f03d0df490cb7bae11819c52eca011d163081aeecff1b2dd8f72caabbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fd8a8162080a700ce21ff3b2cf7c28fabfc682758eb69c483909d4008b07daab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fd8a8162080a700ce21ff3b2cf7c28fabfc682758eb69c483909d4008b07daab\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e77617c69434ce20c448b525c7e86f1ece18d51ce6e14167b4ca7e99de8e5709\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e77617c69434ce20c448b525c7e86f1ece18d51ce6e14167b4ca7e99de8e5709\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://453c67434281e0261ec6799b30a548a01efd72d7df96632409e745b2e6a94546\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://453c67434281e0261ec6799b30a548a01efd72d7df96632409e745b2e6a94546\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://06eab4daa6c8b6a564cf1bb1403b7b3b259a234ee09f23b4f4bc0628c2d58e69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06eab4daa6c8b6a564cf1bb1403b7b3b259a234ee09f23b4f4bc0628c2d58e69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1291ac09fc6ae2a79fdeaa0fc47272f61b97f45f67ed3f7c90bdfdbfe3519ff2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1291ac09fc6ae2a79fdeaa0fc47272f61b97f45f67ed3f7c90bdfdbfe3519ff2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe484514f50a14e485d4a97ea3ce9401c77f3f4dae4717719be7ed6c4d7b1a59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe484514f50a14e485d4a97ea3ce9401c77f3f4dae4717719be7ed6c4d7b1a59\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f5t7h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:14Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:14 crc kubenswrapper[4558]: I0120 16:42:14.904401 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:14 crc kubenswrapper[4558]: I0120 16:42:14.904439 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:14 crc kubenswrapper[4558]: I0120 16:42:14.904449 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:14 crc kubenswrapper[4558]: I0120 16:42:14.904462 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:14 crc kubenswrapper[4558]: I0120 16:42:14.904471 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:14Z","lastTransitionTime":"2026-01-20T16:42:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:15 crc kubenswrapper[4558]: I0120 16:42:15.006599 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:15 crc kubenswrapper[4558]: I0120 16:42:15.006634 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:15 crc kubenswrapper[4558]: I0120 16:42:15.006642 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:15 crc kubenswrapper[4558]: I0120 16:42:15.006671 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:15 crc kubenswrapper[4558]: I0120 16:42:15.006680 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:15Z","lastTransitionTime":"2026-01-20T16:42:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:15 crc kubenswrapper[4558]: I0120 16:42:15.108713 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:15 crc kubenswrapper[4558]: I0120 16:42:15.108748 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:15 crc kubenswrapper[4558]: I0120 16:42:15.108756 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:15 crc kubenswrapper[4558]: I0120 16:42:15.108769 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:15 crc kubenswrapper[4558]: I0120 16:42:15.108777 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:15Z","lastTransitionTime":"2026-01-20T16:42:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:15 crc kubenswrapper[4558]: I0120 16:42:15.210371 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:15 crc kubenswrapper[4558]: I0120 16:42:15.210407 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:15 crc kubenswrapper[4558]: I0120 16:42:15.210416 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:15 crc kubenswrapper[4558]: I0120 16:42:15.210430 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:15 crc kubenswrapper[4558]: I0120 16:42:15.210439 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:15Z","lastTransitionTime":"2026-01-20T16:42:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:15 crc kubenswrapper[4558]: I0120 16:42:15.312256 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:15 crc kubenswrapper[4558]: I0120 16:42:15.312325 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:15 crc kubenswrapper[4558]: I0120 16:42:15.312335 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:15 crc kubenswrapper[4558]: I0120 16:42:15.312348 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:15 crc kubenswrapper[4558]: I0120 16:42:15.312358 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:15Z","lastTransitionTime":"2026-01-20T16:42:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:15 crc kubenswrapper[4558]: I0120 16:42:15.414472 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:15 crc kubenswrapper[4558]: I0120 16:42:15.414505 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:15 crc kubenswrapper[4558]: I0120 16:42:15.414512 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:15 crc kubenswrapper[4558]: I0120 16:42:15.414526 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:15 crc kubenswrapper[4558]: I0120 16:42:15.414537 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:15Z","lastTransitionTime":"2026-01-20T16:42:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:15 crc kubenswrapper[4558]: I0120 16:42:15.516064 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:15 crc kubenswrapper[4558]: I0120 16:42:15.516101 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:15 crc kubenswrapper[4558]: I0120 16:42:15.516110 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:15 crc kubenswrapper[4558]: I0120 16:42:15.516123 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:15 crc kubenswrapper[4558]: I0120 16:42:15.516133 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:15Z","lastTransitionTime":"2026-01-20T16:42:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:15 crc kubenswrapper[4558]: I0120 16:42:15.547754 4558 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-24 03:16:07.710475445 +0000 UTC Jan 20 16:42:15 crc kubenswrapper[4558]: I0120 16:42:15.617897 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:15 crc kubenswrapper[4558]: I0120 16:42:15.617934 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:15 crc kubenswrapper[4558]: I0120 16:42:15.617942 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:15 crc kubenswrapper[4558]: I0120 16:42:15.617955 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:15 crc kubenswrapper[4558]: I0120 16:42:15.617964 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:15Z","lastTransitionTime":"2026-01-20T16:42:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:15 crc kubenswrapper[4558]: I0120 16:42:15.720032 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:15 crc kubenswrapper[4558]: I0120 16:42:15.720076 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:15 crc kubenswrapper[4558]: I0120 16:42:15.720087 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:15 crc kubenswrapper[4558]: I0120 16:42:15.720103 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:15 crc kubenswrapper[4558]: I0120 16:42:15.720114 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:15Z","lastTransitionTime":"2026-01-20T16:42:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:15 crc kubenswrapper[4558]: I0120 16:42:15.822035 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:15 crc kubenswrapper[4558]: I0120 16:42:15.822075 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:15 crc kubenswrapper[4558]: I0120 16:42:15.822088 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:15 crc kubenswrapper[4558]: I0120 16:42:15.822101 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:15 crc kubenswrapper[4558]: I0120 16:42:15.822109 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:15Z","lastTransitionTime":"2026-01-20T16:42:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:15 crc kubenswrapper[4558]: I0120 16:42:15.924092 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:15 crc kubenswrapper[4558]: I0120 16:42:15.924126 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:15 crc kubenswrapper[4558]: I0120 16:42:15.924134 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:15 crc kubenswrapper[4558]: I0120 16:42:15.924147 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:15 crc kubenswrapper[4558]: I0120 16:42:15.924156 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:15Z","lastTransitionTime":"2026-01-20T16:42:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:16 crc kubenswrapper[4558]: I0120 16:42:16.026687 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:16 crc kubenswrapper[4558]: I0120 16:42:16.026731 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:16 crc kubenswrapper[4558]: I0120 16:42:16.026742 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:16 crc kubenswrapper[4558]: I0120 16:42:16.026757 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:16 crc kubenswrapper[4558]: I0120 16:42:16.026769 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:16Z","lastTransitionTime":"2026-01-20T16:42:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:16 crc kubenswrapper[4558]: I0120 16:42:16.128403 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:16 crc kubenswrapper[4558]: I0120 16:42:16.128437 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:16 crc kubenswrapper[4558]: I0120 16:42:16.128446 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:16 crc kubenswrapper[4558]: I0120 16:42:16.128458 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:16 crc kubenswrapper[4558]: I0120 16:42:16.128466 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:16Z","lastTransitionTime":"2026-01-20T16:42:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:16 crc kubenswrapper[4558]: I0120 16:42:16.230711 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:16 crc kubenswrapper[4558]: I0120 16:42:16.230747 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:16 crc kubenswrapper[4558]: I0120 16:42:16.230756 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:16 crc kubenswrapper[4558]: I0120 16:42:16.230771 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:16 crc kubenswrapper[4558]: I0120 16:42:16.230780 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:16Z","lastTransitionTime":"2026-01-20T16:42:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:16 crc kubenswrapper[4558]: I0120 16:42:16.332998 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:16 crc kubenswrapper[4558]: I0120 16:42:16.333038 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:16 crc kubenswrapper[4558]: I0120 16:42:16.333049 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:16 crc kubenswrapper[4558]: I0120 16:42:16.333062 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:16 crc kubenswrapper[4558]: I0120 16:42:16.333071 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:16Z","lastTransitionTime":"2026-01-20T16:42:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:16 crc kubenswrapper[4558]: I0120 16:42:16.410689 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rfwnl"] Jan 20 16:42:16 crc kubenswrapper[4558]: I0120 16:42:16.411099 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rfwnl" Jan 20 16:42:16 crc kubenswrapper[4558]: I0120 16:42:16.412352 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Jan 20 16:42:16 crc kubenswrapper[4558]: I0120 16:42:16.413506 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Jan 20 16:42:16 crc kubenswrapper[4558]: I0120 16:42:16.421438 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:16Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:16 crc kubenswrapper[4558]: I0120 16:42:16.428704 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-d96bp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c8d2c8b-fb8e-4bff-b8d2-69570e5d9e57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe858db22acc96b062f1bc7941e05c823f30d1f9ba50bd497c57f38d57e04293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4v6jx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-d96bp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:16Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:16 crc kubenswrapper[4558]: I0120 16:42:16.434890 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:16 crc kubenswrapper[4558]: I0120 16:42:16.434917 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:16 crc kubenswrapper[4558]: I0120 16:42:16.434927 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:16 crc kubenswrapper[4558]: I0120 16:42:16.434941 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:16 crc kubenswrapper[4558]: I0120 16:42:16.434950 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:16Z","lastTransitionTime":"2026-01-20T16:42:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:16 crc kubenswrapper[4558]: I0120 16:42:16.442725 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"254129cd-82fc-4162-b671-2434bc9e2972\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a6a184067bde115ef5e09fdd90e7dccfe89a9dc347af450998b91c068e7e803\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd8b93e5559d95511a1285a9aec9a7d72df6c330bcd1e1daf3e93f5494f70eeb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4264ef8ee49a8b172fb05ac5c9b0bd32350b5d9ae95293079dfeaf44f021b455\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0889cf371f8b06cc2d255c15fda97db1d5000d6f6cff29d2fdcb8667cede8a99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4265d3281e954ee5989ee008da28c6cb9fa29478e5fdf23325521d455304da9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://186a1d2caad8865abf7f565405c4a4c077038ccb67ba77927cc2392ec075a811\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://41b06bf319806f8cb4eb26d154351300538b7998dbff8ab608b53a397a84aeb1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://41b06bf319806f8cb4eb26d154351300538b7998dbff8ab608b53a397a84aeb1\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-20T16:42:13Z\\\",\\\"message\\\":\\\"rt network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:13Z is after 2025-08-24T17:21:41Z]\\\\nI0120 16:42:13.263941 5970 services_controller.go:434] Service openshift-marketplace/redhat-marketplace retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{redhat-marketplace openshift-marketplace cf6d00ec-cc2c-43f6-815c-40ffd0563e71 5558 0 2025-02-23 05:23:25 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[olm.managed:true olm.service-spec-hash:aUeLNNcZzVZO2rcaZ5Kc8V3jffO0Ss4T6qX6V5] map[] [{operators.coreos.com/v1alpha1 CatalogSource redhat-marketplace fcb55c30-a739-4bc1-9c9c-7634e05a3dbd 0xc00757e68d 0xc00757e68e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:grpc,Protocol:TCP,P\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:12Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-nv2xw_openshift-ovn-kubernetes(254129cd-82fc-4162-b671-2434bc9e2972)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f41d488b0c24e594a2f5cad36b5f8efdb7d867fae0e18a74fe3c396a203d162f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aaa12ad387fc5d0780f6602a74013bb70c118f35c3a16eb4d136df7b0c48db9e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aaa12ad387fc5d0780f6602a74013bb70c118f35c3a16eb4d136df7b0c48db9e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-nv2xw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:16Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:16 crc kubenswrapper[4558]: I0120 16:42:16.452995 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f5t7h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0f90cb04-2e7a-4ee8-83fc-d6c0ee1702a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21a318f03d0df490cb7bae11819c52eca011d163081aeecff1b2dd8f72caabbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fd8a8162080a700ce21ff3b2cf7c28fabfc682758eb69c483909d4008b07daab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fd8a8162080a700ce21ff3b2cf7c28fabfc682758eb69c483909d4008b07daab\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e77617c69434ce20c448b525c7e86f1ece18d51ce6e14167b4ca7e99de8e5709\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e77617c69434ce20c448b525c7e86f1ece18d51ce6e14167b4ca7e99de8e5709\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://453c67434281e0261ec6799b30a548a01efd72d7df96632409e745b2e6a94546\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://453c67434281e0261ec6799b30a548a01efd72d7df96632409e745b2e6a94546\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://06eab4daa6c8b6a564cf1bb1403b7b3b259a234ee09f23b4f4bc0628c2d58e69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06eab4daa6c8b6a564cf1bb1403b7b3b259a234ee09f23b4f4bc0628c2d58e69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1291ac09fc6ae2a79fdeaa0fc47272f61b97f45f67ed3f7c90bdfdbfe3519ff2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1291ac09fc6ae2a79fdeaa0fc47272f61b97f45f67ed3f7c90bdfdbfe3519ff2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe484514f50a14e485d4a97ea3ce9401c77f3f4dae4717719be7ed6c4d7b1a59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe484514f50a14e485d4a97ea3ce9401c77f3f4dae4717719be7ed6c4d7b1a59\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f5t7h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:16Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:16 crc kubenswrapper[4558]: I0120 16:42:16.462467 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a13bfefa-f525-4ecd-89f5-f10480532bf6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9030e970e96b6909d771d4fa0c3b4f493b4a825cbc43ef7e684272284329c7c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://168e8f533aaa108e4160f9ae4af6bb7944edba7a596178ffffdc5325073fc3c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e0141aef1babb57501016cb09787591b5e6d9136b8d7dd4ba64393f0be5a027\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c87f08993c2b47c63ba9652c7dafe0fdbecdcb03a7b7080664ab1f1bd0e1d602\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:16Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:16 crc kubenswrapper[4558]: I0120 16:42:16.471131 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f904dd3e4b43e51569b6532f00fa2f9edf912c1f1969ee2ea00c99d1b7e35a41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://789c877f4a456b3ce64bdbe6fefd50f1ef3f92bf9e26e6296005cf366e0ce265\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:16Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:16 crc kubenswrapper[4558]: I0120 16:42:16.479803 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3e45cc56934a7b90e9d47f1c6d06d8dc140d57db4a216469e8ae112f398663e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:16Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:16 crc kubenswrapper[4558]: I0120 16:42:16.489088 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jsqvf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bedf08c7-1f93-4931-a7f3-e729e2a137af\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e32b76495d88d356373f9389e163a2f3a72e202bb454c7015594c1f5a41b511\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xxtbc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jsqvf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:16Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:16 crc kubenswrapper[4558]: I0120 16:42:16.495885 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-47477" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e026445a-aa73-4a0e-ba35-e2e07de1278c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec2ee3ff161cba7e14ad26763081c9f092f0dfe969cef113631c27e0db53d157\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xcnwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-47477\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:16Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:16 crc kubenswrapper[4558]: I0120 16:42:16.508232 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"425eab90-fb70-4498-a98b-b95742033890\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48592e72d91cf527296f06e52fec1c16c1da21323c0644659945af109a74ebb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7e18f59b5e6b6c0be141b7efb3ccb1df4ff6e3c394bcc88fb1142c8df433493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c889e2b0e14244ef49ec057e3d5fd91687e335a983fdc99a6816abd2af643ddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d434793571cbba089cc34029d951bee9063aca58728b791f2d1af2d68229f778\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://89f7ad577df11c509855a90ac8791da6de3f0d2857a1ca56726a6327f40455bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://baab628ee8aadc67ea829cc46639be55ae63281a9ec9a89737260f558a9ddf75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://baab628ee8aadc67ea829cc46639be55ae63281a9ec9a89737260f558a9ddf75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5faab5625a16e1459b23ae0f4cc8ca3285bf4eb1ba4c53c1ed15eddb9054021\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5faab5625a16e1459b23ae0f4cc8ca3285bf4eb1ba4c53c1ed15eddb9054021\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f913fa07ec9571d1c61f9b7dddb46614846aa99d6e0b5b75709e9fa5a18a09c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f913fa07ec9571d1c61f9b7dddb46614846aa99d6e0b5b75709e9fa5a18a09c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:16Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:16 crc kubenswrapper[4558]: I0120 16:42:16.517411 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30e1ff9d-8dcd-4754-9a7a-c09598fb83db\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ea68e442bb2143fbaa5e2c9fe335c6b023a31c2eeaa78d60de45c6ef40c2894\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a8122c3b57ac4f0026df149fe622450d07d8b0d517ad018decd7b21d8d9c04a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7444e8fea1853bae45dbd3d52d07e3f7b94314cd29fe8c99891e515a892e569e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6438ffea42f98178c4c2c21ae01725a8b54d6a6f790f47d64faf5ecd9f1c00f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2e22f1d4e87dd06a67f49aedad280da48ac1c4ba2e438a4925cf8bd5330c4d8\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0120 16:41:58.859734 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0120 16:41:58.861094 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2179835226/tls.crt::/tmp/serving-cert-2179835226/tls.key\\\\\\\"\\\\nI0120 16:42:04.205247 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0120 16:42:04.207291 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0120 16:42:04.207308 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0120 16:42:04.207329 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0120 16:42:04.207333 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0120 16:42:04.210615 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0120 16:42:04.210632 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0120 16:42:04.210641 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0120 16:42:04.210648 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0120 16:42:04.210652 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0120 16:42:04.210655 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0120 16:42:04.210658 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0120 16:42:04.210660 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0120 16:42:04.211964 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4850bde1fb538e21fc91949dc584ef0a791d718844691a44559e0513bb36203\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c39aa6c3942134a027be33cac715f9c0381bce5417754d5bad045c42a07d353f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c39aa6c3942134a027be33cac715f9c0381bce5417754d5bad045c42a07d353f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:16Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:16 crc kubenswrapper[4558]: I0120 16:42:16.524591 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"68337d27-3fa6-4a29-88b0-82e60c3739eb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de7f7c23993a57d30f08686e1cd4abb5b17e108ac12dd0c7929a31574c69eeb1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4b478\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b18a59ecb802507e0d5988eacc915fd7e0d6954518de80f61162c97f680fd8c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4b478\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2vr4r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:16Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:16 crc kubenswrapper[4558]: I0120 16:42:16.533506 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e190c24c1d8a3bc0241334598ce23829ada9afee282d49a0c82cd67ad82bcd02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:16Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:16 crc kubenswrapper[4558]: I0120 16:42:16.536596 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:16 crc kubenswrapper[4558]: I0120 16:42:16.536629 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:16 crc kubenswrapper[4558]: I0120 16:42:16.536639 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:16 crc kubenswrapper[4558]: I0120 16:42:16.536653 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:16 crc kubenswrapper[4558]: I0120 16:42:16.536661 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:16Z","lastTransitionTime":"2026-01-20T16:42:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:16 crc kubenswrapper[4558]: I0120 16:42:16.542344 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:16Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:16 crc kubenswrapper[4558]: I0120 16:42:16.547827 4558 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-13 19:43:02.390995062 +0000 UTC Jan 20 16:42:16 crc kubenswrapper[4558]: I0120 16:42:16.550090 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:16Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:16 crc kubenswrapper[4558]: I0120 16:42:16.556979 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rfwnl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8fe5472-7373-4e5d-87fc-a9ecaf26a5cd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9nvl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9nvl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-rfwnl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:16Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:16 crc kubenswrapper[4558]: I0120 16:42:16.565180 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 20 16:42:16 crc kubenswrapper[4558]: I0120 16:42:16.565193 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 20 16:42:16 crc kubenswrapper[4558]: E0120 16:42:16.565273 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 20 16:42:16 crc kubenswrapper[4558]: I0120 16:42:16.565329 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 20 16:42:16 crc kubenswrapper[4558]: E0120 16:42:16.565386 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 20 16:42:16 crc kubenswrapper[4558]: E0120 16:42:16.565469 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 20 16:42:16 crc kubenswrapper[4558]: I0120 16:42:16.572859 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:16Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:16 crc kubenswrapper[4558]: I0120 16:42:16.579465 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-d96bp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c8d2c8b-fb8e-4bff-b8d2-69570e5d9e57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe858db22acc96b062f1bc7941e05c823f30d1f9ba50bd497c57f38d57e04293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4v6jx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-d96bp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:16Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:16 crc kubenswrapper[4558]: I0120 16:42:16.592519 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"254129cd-82fc-4162-b671-2434bc9e2972\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a6a184067bde115ef5e09fdd90e7dccfe89a9dc347af450998b91c068e7e803\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd8b93e5559d95511a1285a9aec9a7d72df6c330bcd1e1daf3e93f5494f70eeb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4264ef8ee49a8b172fb05ac5c9b0bd32350b5d9ae95293079dfeaf44f021b455\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0889cf371f8b06cc2d255c15fda97db1d5000d6f6cff29d2fdcb8667cede8a99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4265d3281e954ee5989ee008da28c6cb9fa29478e5fdf23325521d455304da9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://186a1d2caad8865abf7f565405c4a4c077038ccb67ba77927cc2392ec075a811\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://41b06bf319806f8cb4eb26d154351300538b7998dbff8ab608b53a397a84aeb1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://41b06bf319806f8cb4eb26d154351300538b7998dbff8ab608b53a397a84aeb1\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-20T16:42:13Z\\\",\\\"message\\\":\\\"rt network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:13Z is after 2025-08-24T17:21:41Z]\\\\nI0120 16:42:13.263941 5970 services_controller.go:434] Service openshift-marketplace/redhat-marketplace retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{redhat-marketplace openshift-marketplace cf6d00ec-cc2c-43f6-815c-40ffd0563e71 5558 0 2025-02-23 05:23:25 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[olm.managed:true olm.service-spec-hash:aUeLNNcZzVZO2rcaZ5Kc8V3jffO0Ss4T6qX6V5] map[] [{operators.coreos.com/v1alpha1 CatalogSource redhat-marketplace fcb55c30-a739-4bc1-9c9c-7634e05a3dbd 0xc00757e68d 0xc00757e68e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:grpc,Protocol:TCP,P\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:12Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-nv2xw_openshift-ovn-kubernetes(254129cd-82fc-4162-b671-2434bc9e2972)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f41d488b0c24e594a2f5cad36b5f8efdb7d867fae0e18a74fe3c396a203d162f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aaa12ad387fc5d0780f6602a74013bb70c118f35c3a16eb4d136df7b0c48db9e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aaa12ad387fc5d0780f6602a74013bb70c118f35c3a16eb4d136df7b0c48db9e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-nv2xw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:16Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:16 crc kubenswrapper[4558]: I0120 16:42:16.601782 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/e8fe5472-7373-4e5d-87fc-a9ecaf26a5cd-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-rfwnl\" (UID: \"e8fe5472-7373-4e5d-87fc-a9ecaf26a5cd\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rfwnl" Jan 20 16:42:16 crc kubenswrapper[4558]: I0120 16:42:16.601829 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/e8fe5472-7373-4e5d-87fc-a9ecaf26a5cd-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-rfwnl\" (UID: \"e8fe5472-7373-4e5d-87fc-a9ecaf26a5cd\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rfwnl" Jan 20 16:42:16 crc kubenswrapper[4558]: I0120 16:42:16.601868 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9nvl2\" (UniqueName: \"kubernetes.io/projected/e8fe5472-7373-4e5d-87fc-a9ecaf26a5cd-kube-api-access-9nvl2\") pod \"ovnkube-control-plane-749d76644c-rfwnl\" (UID: \"e8fe5472-7373-4e5d-87fc-a9ecaf26a5cd\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rfwnl" Jan 20 16:42:16 crc kubenswrapper[4558]: I0120 16:42:16.601895 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/e8fe5472-7373-4e5d-87fc-a9ecaf26a5cd-env-overrides\") pod \"ovnkube-control-plane-749d76644c-rfwnl\" (UID: \"e8fe5472-7373-4e5d-87fc-a9ecaf26a5cd\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rfwnl" Jan 20 16:42:16 crc kubenswrapper[4558]: I0120 16:42:16.602067 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f5t7h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0f90cb04-2e7a-4ee8-83fc-d6c0ee1702a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21a318f03d0df490cb7bae11819c52eca011d163081aeecff1b2dd8f72caabbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fd8a8162080a700ce21ff3b2cf7c28fabfc682758eb69c483909d4008b07daab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fd8a8162080a700ce21ff3b2cf7c28fabfc682758eb69c483909d4008b07daab\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e77617c69434ce20c448b525c7e86f1ece18d51ce6e14167b4ca7e99de8e5709\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e77617c69434ce20c448b525c7e86f1ece18d51ce6e14167b4ca7e99de8e5709\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://453c67434281e0261ec6799b30a548a01efd72d7df96632409e745b2e6a94546\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://453c67434281e0261ec6799b30a548a01efd72d7df96632409e745b2e6a94546\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://06eab4daa6c8b6a564cf1bb1403b7b3b259a234ee09f23b4f4bc0628c2d58e69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06eab4daa6c8b6a564cf1bb1403b7b3b259a234ee09f23b4f4bc0628c2d58e69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1291ac09fc6ae2a79fdeaa0fc47272f61b97f45f67ed3f7c90bdfdbfe3519ff2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1291ac09fc6ae2a79fdeaa0fc47272f61b97f45f67ed3f7c90bdfdbfe3519ff2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe484514f50a14e485d4a97ea3ce9401c77f3f4dae4717719be7ed6c4d7b1a59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe484514f50a14e485d4a97ea3ce9401c77f3f4dae4717719be7ed6c4d7b1a59\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f5t7h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:16Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:16 crc kubenswrapper[4558]: I0120 16:42:16.610284 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a13bfefa-f525-4ecd-89f5-f10480532bf6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9030e970e96b6909d771d4fa0c3b4f493b4a825cbc43ef7e684272284329c7c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://168e8f533aaa108e4160f9ae4af6bb7944edba7a596178ffffdc5325073fc3c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e0141aef1babb57501016cb09787591b5e6d9136b8d7dd4ba64393f0be5a027\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c87f08993c2b47c63ba9652c7dafe0fdbecdcb03a7b7080664ab1f1bd0e1d602\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:16Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:16 crc kubenswrapper[4558]: I0120 16:42:16.620960 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f904dd3e4b43e51569b6532f00fa2f9edf912c1f1969ee2ea00c99d1b7e35a41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://789c877f4a456b3ce64bdbe6fefd50f1ef3f92bf9e26e6296005cf366e0ce265\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:16Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:16 crc kubenswrapper[4558]: I0120 16:42:16.628062 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3e45cc56934a7b90e9d47f1c6d06d8dc140d57db4a216469e8ae112f398663e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:16Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:16 crc kubenswrapper[4558]: I0120 16:42:16.636202 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jsqvf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bedf08c7-1f93-4931-a7f3-e729e2a137af\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e32b76495d88d356373f9389e163a2f3a72e202bb454c7015594c1f5a41b511\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xxtbc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jsqvf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:16Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:16 crc kubenswrapper[4558]: I0120 16:42:16.638668 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:16 crc kubenswrapper[4558]: I0120 16:42:16.638699 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:16 crc kubenswrapper[4558]: I0120 16:42:16.638707 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:16 crc kubenswrapper[4558]: I0120 16:42:16.638722 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:16 crc kubenswrapper[4558]: I0120 16:42:16.638731 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:16Z","lastTransitionTime":"2026-01-20T16:42:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:16 crc kubenswrapper[4558]: I0120 16:42:16.645061 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-47477" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e026445a-aa73-4a0e-ba35-e2e07de1278c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec2ee3ff161cba7e14ad26763081c9f092f0dfe969cef113631c27e0db53d157\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xcnwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-47477\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:16Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:16 crc kubenswrapper[4558]: I0120 16:42:16.659149 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"425eab90-fb70-4498-a98b-b95742033890\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48592e72d91cf527296f06e52fec1c16c1da21323c0644659945af109a74ebb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7e18f59b5e6b6c0be141b7efb3ccb1df4ff6e3c394bcc88fb1142c8df433493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c889e2b0e14244ef49ec057e3d5fd91687e335a983fdc99a6816abd2af643ddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d434793571cbba089cc34029d951bee9063aca58728b791f2d1af2d68229f778\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://89f7ad577df11c509855a90ac8791da6de3f0d2857a1ca56726a6327f40455bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://baab628ee8aadc67ea829cc46639be55ae63281a9ec9a89737260f558a9ddf75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://baab628ee8aadc67ea829cc46639be55ae63281a9ec9a89737260f558a9ddf75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5faab5625a16e1459b23ae0f4cc8ca3285bf4eb1ba4c53c1ed15eddb9054021\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5faab5625a16e1459b23ae0f4cc8ca3285bf4eb1ba4c53c1ed15eddb9054021\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f913fa07ec9571d1c61f9b7dddb46614846aa99d6e0b5b75709e9fa5a18a09c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f913fa07ec9571d1c61f9b7dddb46614846aa99d6e0b5b75709e9fa5a18a09c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:16Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:16 crc kubenswrapper[4558]: I0120 16:42:16.669376 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30e1ff9d-8dcd-4754-9a7a-c09598fb83db\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ea68e442bb2143fbaa5e2c9fe335c6b023a31c2eeaa78d60de45c6ef40c2894\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a8122c3b57ac4f0026df149fe622450d07d8b0d517ad018decd7b21d8d9c04a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7444e8fea1853bae45dbd3d52d07e3f7b94314cd29fe8c99891e515a892e569e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6438ffea42f98178c4c2c21ae01725a8b54d6a6f790f47d64faf5ecd9f1c00f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2e22f1d4e87dd06a67f49aedad280da48ac1c4ba2e438a4925cf8bd5330c4d8\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0120 16:41:58.859734 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0120 16:41:58.861094 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2179835226/tls.crt::/tmp/serving-cert-2179835226/tls.key\\\\\\\"\\\\nI0120 16:42:04.205247 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0120 16:42:04.207291 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0120 16:42:04.207308 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0120 16:42:04.207329 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0120 16:42:04.207333 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0120 16:42:04.210615 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0120 16:42:04.210632 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0120 16:42:04.210641 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0120 16:42:04.210648 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0120 16:42:04.210652 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0120 16:42:04.210655 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0120 16:42:04.210658 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0120 16:42:04.210660 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0120 16:42:04.211964 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4850bde1fb538e21fc91949dc584ef0a791d718844691a44559e0513bb36203\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c39aa6c3942134a027be33cac715f9c0381bce5417754d5bad045c42a07d353f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c39aa6c3942134a027be33cac715f9c0381bce5417754d5bad045c42a07d353f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:16Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:16 crc kubenswrapper[4558]: I0120 16:42:16.677079 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"68337d27-3fa6-4a29-88b0-82e60c3739eb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de7f7c23993a57d30f08686e1cd4abb5b17e108ac12dd0c7929a31574c69eeb1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4b478\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b18a59ecb802507e0d5988eacc915fd7e0d6954518de80f61162c97f680fd8c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4b478\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2vr4r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:16Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:16 crc kubenswrapper[4558]: I0120 16:42:16.688668 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e190c24c1d8a3bc0241334598ce23829ada9afee282d49a0c82cd67ad82bcd02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:16Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:16 crc kubenswrapper[4558]: I0120 16:42:16.696435 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:16Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:16 crc kubenswrapper[4558]: I0120 16:42:16.702552 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9nvl2\" (UniqueName: \"kubernetes.io/projected/e8fe5472-7373-4e5d-87fc-a9ecaf26a5cd-kube-api-access-9nvl2\") pod \"ovnkube-control-plane-749d76644c-rfwnl\" (UID: \"e8fe5472-7373-4e5d-87fc-a9ecaf26a5cd\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rfwnl" Jan 20 16:42:16 crc kubenswrapper[4558]: I0120 16:42:16.702604 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/e8fe5472-7373-4e5d-87fc-a9ecaf26a5cd-env-overrides\") pod \"ovnkube-control-plane-749d76644c-rfwnl\" (UID: \"e8fe5472-7373-4e5d-87fc-a9ecaf26a5cd\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rfwnl" Jan 20 16:42:16 crc kubenswrapper[4558]: I0120 16:42:16.702653 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/e8fe5472-7373-4e5d-87fc-a9ecaf26a5cd-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-rfwnl\" (UID: \"e8fe5472-7373-4e5d-87fc-a9ecaf26a5cd\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rfwnl" Jan 20 16:42:16 crc kubenswrapper[4558]: I0120 16:42:16.702703 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/e8fe5472-7373-4e5d-87fc-a9ecaf26a5cd-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-rfwnl\" (UID: \"e8fe5472-7373-4e5d-87fc-a9ecaf26a5cd\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rfwnl" Jan 20 16:42:16 crc kubenswrapper[4558]: I0120 16:42:16.703372 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/e8fe5472-7373-4e5d-87fc-a9ecaf26a5cd-env-overrides\") pod \"ovnkube-control-plane-749d76644c-rfwnl\" (UID: \"e8fe5472-7373-4e5d-87fc-a9ecaf26a5cd\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rfwnl" Jan 20 16:42:16 crc kubenswrapper[4558]: I0120 16:42:16.703446 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/e8fe5472-7373-4e5d-87fc-a9ecaf26a5cd-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-rfwnl\" (UID: \"e8fe5472-7373-4e5d-87fc-a9ecaf26a5cd\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rfwnl" Jan 20 16:42:16 crc kubenswrapper[4558]: I0120 16:42:16.703783 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:16Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:16 crc kubenswrapper[4558]: I0120 16:42:16.706884 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/e8fe5472-7373-4e5d-87fc-a9ecaf26a5cd-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-rfwnl\" (UID: \"e8fe5472-7373-4e5d-87fc-a9ecaf26a5cd\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rfwnl" Jan 20 16:42:16 crc kubenswrapper[4558]: I0120 16:42:16.711993 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rfwnl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8fe5472-7373-4e5d-87fc-a9ecaf26a5cd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9nvl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9nvl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-rfwnl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:16Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:16 crc kubenswrapper[4558]: I0120 16:42:16.714471 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9nvl2\" (UniqueName: \"kubernetes.io/projected/e8fe5472-7373-4e5d-87fc-a9ecaf26a5cd-kube-api-access-9nvl2\") pod \"ovnkube-control-plane-749d76644c-rfwnl\" (UID: \"e8fe5472-7373-4e5d-87fc-a9ecaf26a5cd\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rfwnl" Jan 20 16:42:16 crc kubenswrapper[4558]: I0120 16:42:16.720339 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rfwnl" Jan 20 16:42:16 crc kubenswrapper[4558]: W0120 16:42:16.729376 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode8fe5472_7373_4e5d_87fc_a9ecaf26a5cd.slice/crio-a23076fec9ee28933ae51102113190180672715c2302b13e47b83484c393e71c WatchSource:0}: Error finding container a23076fec9ee28933ae51102113190180672715c2302b13e47b83484c393e71c: Status 404 returned error can't find the container with id a23076fec9ee28933ae51102113190180672715c2302b13e47b83484c393e71c Jan 20 16:42:16 crc kubenswrapper[4558]: I0120 16:42:16.740675 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:16 crc kubenswrapper[4558]: I0120 16:42:16.740728 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:16 crc kubenswrapper[4558]: I0120 16:42:16.740766 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:16 crc kubenswrapper[4558]: I0120 16:42:16.740779 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:16 crc kubenswrapper[4558]: I0120 16:42:16.740788 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:16Z","lastTransitionTime":"2026-01-20T16:42:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:16 crc kubenswrapper[4558]: I0120 16:42:16.842962 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:16 crc kubenswrapper[4558]: I0120 16:42:16.842994 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:16 crc kubenswrapper[4558]: I0120 16:42:16.843002 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:16 crc kubenswrapper[4558]: I0120 16:42:16.843017 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:16 crc kubenswrapper[4558]: I0120 16:42:16.843026 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:16Z","lastTransitionTime":"2026-01-20T16:42:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:16 crc kubenswrapper[4558]: I0120 16:42:16.944929 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:16 crc kubenswrapper[4558]: I0120 16:42:16.944966 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:16 crc kubenswrapper[4558]: I0120 16:42:16.944975 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:16 crc kubenswrapper[4558]: I0120 16:42:16.944990 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:16 crc kubenswrapper[4558]: I0120 16:42:16.944999 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:16Z","lastTransitionTime":"2026-01-20T16:42:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:17 crc kubenswrapper[4558]: I0120 16:42:17.047102 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:17 crc kubenswrapper[4558]: I0120 16:42:17.047146 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:17 crc kubenswrapper[4558]: I0120 16:42:17.047156 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:17 crc kubenswrapper[4558]: I0120 16:42:17.047187 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:17 crc kubenswrapper[4558]: I0120 16:42:17.047198 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:17Z","lastTransitionTime":"2026-01-20T16:42:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:17 crc kubenswrapper[4558]: I0120 16:42:17.148787 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:17 crc kubenswrapper[4558]: I0120 16:42:17.148826 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:17 crc kubenswrapper[4558]: I0120 16:42:17.148835 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:17 crc kubenswrapper[4558]: I0120 16:42:17.148849 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:17 crc kubenswrapper[4558]: I0120 16:42:17.148858 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:17Z","lastTransitionTime":"2026-01-20T16:42:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:17 crc kubenswrapper[4558]: I0120 16:42:17.250614 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:17 crc kubenswrapper[4558]: I0120 16:42:17.250646 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:17 crc kubenswrapper[4558]: I0120 16:42:17.250654 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:17 crc kubenswrapper[4558]: I0120 16:42:17.250667 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:17 crc kubenswrapper[4558]: I0120 16:42:17.250676 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:17Z","lastTransitionTime":"2026-01-20T16:42:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:17 crc kubenswrapper[4558]: I0120 16:42:17.352266 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:17 crc kubenswrapper[4558]: I0120 16:42:17.352299 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:17 crc kubenswrapper[4558]: I0120 16:42:17.352307 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:17 crc kubenswrapper[4558]: I0120 16:42:17.352318 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:17 crc kubenswrapper[4558]: I0120 16:42:17.352326 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:17Z","lastTransitionTime":"2026-01-20T16:42:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:17 crc kubenswrapper[4558]: I0120 16:42:17.406838 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:17 crc kubenswrapper[4558]: I0120 16:42:17.406882 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:17 crc kubenswrapper[4558]: I0120 16:42:17.406892 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:17 crc kubenswrapper[4558]: I0120 16:42:17.406908 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:17 crc kubenswrapper[4558]: I0120 16:42:17.406921 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:17Z","lastTransitionTime":"2026-01-20T16:42:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:17 crc kubenswrapper[4558]: E0120 16:42:17.416420 4558 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"bf027402-7a62-478b-8585-220c98495823\\\",\\\"systemUUID\\\":\\\"1def282c-e24e-4bc0-9a1b-d7cc30756d3d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:17Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:17 crc kubenswrapper[4558]: I0120 16:42:17.418991 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:17 crc kubenswrapper[4558]: I0120 16:42:17.419027 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:17 crc kubenswrapper[4558]: I0120 16:42:17.419036 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:17 crc kubenswrapper[4558]: I0120 16:42:17.419049 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:17 crc kubenswrapper[4558]: I0120 16:42:17.419057 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:17Z","lastTransitionTime":"2026-01-20T16:42:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:17 crc kubenswrapper[4558]: E0120 16:42:17.427366 4558 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"bf027402-7a62-478b-8585-220c98495823\\\",\\\"systemUUID\\\":\\\"1def282c-e24e-4bc0-9a1b-d7cc30756d3d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:17Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:17 crc kubenswrapper[4558]: I0120 16:42:17.429363 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:17 crc kubenswrapper[4558]: I0120 16:42:17.429483 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:17 crc kubenswrapper[4558]: I0120 16:42:17.429577 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:17 crc kubenswrapper[4558]: I0120 16:42:17.429658 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:17 crc kubenswrapper[4558]: I0120 16:42:17.429733 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:17Z","lastTransitionTime":"2026-01-20T16:42:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:17 crc kubenswrapper[4558]: E0120 16:42:17.438191 4558 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"bf027402-7a62-478b-8585-220c98495823\\\",\\\"systemUUID\\\":\\\"1def282c-e24e-4bc0-9a1b-d7cc30756d3d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:17Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:17 crc kubenswrapper[4558]: I0120 16:42:17.441264 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:17 crc kubenswrapper[4558]: I0120 16:42:17.441405 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:17 crc kubenswrapper[4558]: I0120 16:42:17.441495 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:17 crc kubenswrapper[4558]: I0120 16:42:17.441569 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:17 crc kubenswrapper[4558]: I0120 16:42:17.441648 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:17Z","lastTransitionTime":"2026-01-20T16:42:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:17 crc kubenswrapper[4558]: E0120 16:42:17.449675 4558 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"bf027402-7a62-478b-8585-220c98495823\\\",\\\"systemUUID\\\":\\\"1def282c-e24e-4bc0-9a1b-d7cc30756d3d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:17Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:17 crc kubenswrapper[4558]: I0120 16:42:17.451815 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:17 crc kubenswrapper[4558]: I0120 16:42:17.451853 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:17 crc kubenswrapper[4558]: I0120 16:42:17.451861 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:17 crc kubenswrapper[4558]: I0120 16:42:17.451875 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:17 crc kubenswrapper[4558]: I0120 16:42:17.451885 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:17Z","lastTransitionTime":"2026-01-20T16:42:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:17 crc kubenswrapper[4558]: E0120 16:42:17.460281 4558 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"bf027402-7a62-478b-8585-220c98495823\\\",\\\"systemUUID\\\":\\\"1def282c-e24e-4bc0-9a1b-d7cc30756d3d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:17Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:17 crc kubenswrapper[4558]: E0120 16:42:17.460519 4558 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 20 16:42:17 crc kubenswrapper[4558]: I0120 16:42:17.461399 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:17 crc kubenswrapper[4558]: I0120 16:42:17.461485 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:17 crc kubenswrapper[4558]: I0120 16:42:17.461546 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:17 crc kubenswrapper[4558]: I0120 16:42:17.461622 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:17 crc kubenswrapper[4558]: I0120 16:42:17.461697 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:17Z","lastTransitionTime":"2026-01-20T16:42:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:17 crc kubenswrapper[4558]: I0120 16:42:17.548291 4558 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-22 10:12:10.939667863 +0000 UTC Jan 20 16:42:17 crc kubenswrapper[4558]: I0120 16:42:17.564209 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:17 crc kubenswrapper[4558]: I0120 16:42:17.564250 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:17 crc kubenswrapper[4558]: I0120 16:42:17.564258 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:17 crc kubenswrapper[4558]: I0120 16:42:17.564272 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:17 crc kubenswrapper[4558]: I0120 16:42:17.564281 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:17Z","lastTransitionTime":"2026-01-20T16:42:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:17 crc kubenswrapper[4558]: I0120 16:42:17.666187 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:17 crc kubenswrapper[4558]: I0120 16:42:17.666435 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:17 crc kubenswrapper[4558]: I0120 16:42:17.666515 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:17 crc kubenswrapper[4558]: I0120 16:42:17.666601 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:17 crc kubenswrapper[4558]: I0120 16:42:17.666658 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:17Z","lastTransitionTime":"2026-01-20T16:42:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:17 crc kubenswrapper[4558]: I0120 16:42:17.717126 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rfwnl" event={"ID":"e8fe5472-7373-4e5d-87fc-a9ecaf26a5cd","Type":"ContainerStarted","Data":"a14e9281ff19e324499722aefadf1e1f2fd005089c153918ad4d8a7f5bdfa853"} Jan 20 16:42:17 crc kubenswrapper[4558]: I0120 16:42:17.717326 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rfwnl" event={"ID":"e8fe5472-7373-4e5d-87fc-a9ecaf26a5cd","Type":"ContainerStarted","Data":"38cd58ecc67d337df2c29b79e5460ea23d4acc1e66de8b08ea70d7ab7b30691d"} Jan 20 16:42:17 crc kubenswrapper[4558]: I0120 16:42:17.717391 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rfwnl" event={"ID":"e8fe5472-7373-4e5d-87fc-a9ecaf26a5cd","Type":"ContainerStarted","Data":"a23076fec9ee28933ae51102113190180672715c2302b13e47b83484c393e71c"} Jan 20 16:42:17 crc kubenswrapper[4558]: I0120 16:42:17.730974 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"254129cd-82fc-4162-b671-2434bc9e2972\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a6a184067bde115ef5e09fdd90e7dccfe89a9dc347af450998b91c068e7e803\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd8b93e5559d95511a1285a9aec9a7d72df6c330bcd1e1daf3e93f5494f70eeb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4264ef8ee49a8b172fb05ac5c9b0bd32350b5d9ae95293079dfeaf44f021b455\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0889cf371f8b06cc2d255c15fda97db1d5000d6f6cff29d2fdcb8667cede8a99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4265d3281e954ee5989ee008da28c6cb9fa29478e5fdf23325521d455304da9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://186a1d2caad8865abf7f565405c4a4c077038ccb67ba77927cc2392ec075a811\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://41b06bf319806f8cb4eb26d154351300538b7998dbff8ab608b53a397a84aeb1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://41b06bf319806f8cb4eb26d154351300538b7998dbff8ab608b53a397a84aeb1\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-20T16:42:13Z\\\",\\\"message\\\":\\\"rt network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:13Z is after 2025-08-24T17:21:41Z]\\\\nI0120 16:42:13.263941 5970 services_controller.go:434] Service openshift-marketplace/redhat-marketplace retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{redhat-marketplace openshift-marketplace cf6d00ec-cc2c-43f6-815c-40ffd0563e71 5558 0 2025-02-23 05:23:25 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[olm.managed:true olm.service-spec-hash:aUeLNNcZzVZO2rcaZ5Kc8V3jffO0Ss4T6qX6V5] map[] [{operators.coreos.com/v1alpha1 CatalogSource redhat-marketplace fcb55c30-a739-4bc1-9c9c-7634e05a3dbd 0xc00757e68d 0xc00757e68e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:grpc,Protocol:TCP,P\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:12Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-nv2xw_openshift-ovn-kubernetes(254129cd-82fc-4162-b671-2434bc9e2972)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f41d488b0c24e594a2f5cad36b5f8efdb7d867fae0e18a74fe3c396a203d162f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aaa12ad387fc5d0780f6602a74013bb70c118f35c3a16eb4d136df7b0c48db9e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aaa12ad387fc5d0780f6602a74013bb70c118f35c3a16eb4d136df7b0c48db9e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-nv2xw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:17Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:17 crc kubenswrapper[4558]: I0120 16:42:17.740766 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f5t7h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0f90cb04-2e7a-4ee8-83fc-d6c0ee1702a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21a318f03d0df490cb7bae11819c52eca011d163081aeecff1b2dd8f72caabbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fd8a8162080a700ce21ff3b2cf7c28fabfc682758eb69c483909d4008b07daab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fd8a8162080a700ce21ff3b2cf7c28fabfc682758eb69c483909d4008b07daab\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e77617c69434ce20c448b525c7e86f1ece18d51ce6e14167b4ca7e99de8e5709\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e77617c69434ce20c448b525c7e86f1ece18d51ce6e14167b4ca7e99de8e5709\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://453c67434281e0261ec6799b30a548a01efd72d7df96632409e745b2e6a94546\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://453c67434281e0261ec6799b30a548a01efd72d7df96632409e745b2e6a94546\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://06eab4daa6c8b6a564cf1bb1403b7b3b259a234ee09f23b4f4bc0628c2d58e69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06eab4daa6c8b6a564cf1bb1403b7b3b259a234ee09f23b4f4bc0628c2d58e69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1291ac09fc6ae2a79fdeaa0fc47272f61b97f45f67ed3f7c90bdfdbfe3519ff2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1291ac09fc6ae2a79fdeaa0fc47272f61b97f45f67ed3f7c90bdfdbfe3519ff2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe484514f50a14e485d4a97ea3ce9401c77f3f4dae4717719be7ed6c4d7b1a59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe484514f50a14e485d4a97ea3ce9401c77f3f4dae4717719be7ed6c4d7b1a59\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f5t7h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:17Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:17 crc kubenswrapper[4558]: I0120 16:42:17.748826 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:17Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:17 crc kubenswrapper[4558]: I0120 16:42:17.755151 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-d96bp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c8d2c8b-fb8e-4bff-b8d2-69570e5d9e57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe858db22acc96b062f1bc7941e05c823f30d1f9ba50bd497c57f38d57e04293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4v6jx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-d96bp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:17Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:17 crc kubenswrapper[4558]: I0120 16:42:17.762750 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3e45cc56934a7b90e9d47f1c6d06d8dc140d57db4a216469e8ae112f398663e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:17Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:17 crc kubenswrapper[4558]: I0120 16:42:17.768375 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:17 crc kubenswrapper[4558]: I0120 16:42:17.768502 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:17 crc kubenswrapper[4558]: I0120 16:42:17.768563 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:17 crc kubenswrapper[4558]: I0120 16:42:17.768637 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:17 crc kubenswrapper[4558]: I0120 16:42:17.768689 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:17Z","lastTransitionTime":"2026-01-20T16:42:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:17 crc kubenswrapper[4558]: I0120 16:42:17.771423 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jsqvf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bedf08c7-1f93-4931-a7f3-e729e2a137af\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e32b76495d88d356373f9389e163a2f3a72e202bb454c7015594c1f5a41b511\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xxtbc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jsqvf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:17Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:17 crc kubenswrapper[4558]: I0120 16:42:17.780021 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-47477" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e026445a-aa73-4a0e-ba35-e2e07de1278c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec2ee3ff161cba7e14ad26763081c9f092f0dfe969cef113631c27e0db53d157\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xcnwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-47477\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:17Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:17 crc kubenswrapper[4558]: I0120 16:42:17.789827 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a13bfefa-f525-4ecd-89f5-f10480532bf6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9030e970e96b6909d771d4fa0c3b4f493b4a825cbc43ef7e684272284329c7c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://168e8f533aaa108e4160f9ae4af6bb7944edba7a596178ffffdc5325073fc3c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e0141aef1babb57501016cb09787591b5e6d9136b8d7dd4ba64393f0be5a027\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c87f08993c2b47c63ba9652c7dafe0fdbecdcb03a7b7080664ab1f1bd0e1d602\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:17Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:17 crc kubenswrapper[4558]: I0120 16:42:17.799568 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f904dd3e4b43e51569b6532f00fa2f9edf912c1f1969ee2ea00c99d1b7e35a41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://789c877f4a456b3ce64bdbe6fefd50f1ef3f92bf9e26e6296005cf366e0ce265\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:17Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:17 crc kubenswrapper[4558]: I0120 16:42:17.809364 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30e1ff9d-8dcd-4754-9a7a-c09598fb83db\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ea68e442bb2143fbaa5e2c9fe335c6b023a31c2eeaa78d60de45c6ef40c2894\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a8122c3b57ac4f0026df149fe622450d07d8b0d517ad018decd7b21d8d9c04a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7444e8fea1853bae45dbd3d52d07e3f7b94314cd29fe8c99891e515a892e569e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6438ffea42f98178c4c2c21ae01725a8b54d6a6f790f47d64faf5ecd9f1c00f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2e22f1d4e87dd06a67f49aedad280da48ac1c4ba2e438a4925cf8bd5330c4d8\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0120 16:41:58.859734 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0120 16:41:58.861094 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2179835226/tls.crt::/tmp/serving-cert-2179835226/tls.key\\\\\\\"\\\\nI0120 16:42:04.205247 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0120 16:42:04.207291 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0120 16:42:04.207308 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0120 16:42:04.207329 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0120 16:42:04.207333 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0120 16:42:04.210615 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0120 16:42:04.210632 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0120 16:42:04.210641 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0120 16:42:04.210648 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0120 16:42:04.210652 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0120 16:42:04.210655 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0120 16:42:04.210658 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0120 16:42:04.210660 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0120 16:42:04.211964 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4850bde1fb538e21fc91949dc584ef0a791d718844691a44559e0513bb36203\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c39aa6c3942134a027be33cac715f9c0381bce5417754d5bad045c42a07d353f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c39aa6c3942134a027be33cac715f9c0381bce5417754d5bad045c42a07d353f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:17Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:17 crc kubenswrapper[4558]: I0120 16:42:17.817229 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"68337d27-3fa6-4a29-88b0-82e60c3739eb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de7f7c23993a57d30f08686e1cd4abb5b17e108ac12dd0c7929a31574c69eeb1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4b478\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b18a59ecb802507e0d5988eacc915fd7e0d6954518de80f61162c97f680fd8c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4b478\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2vr4r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:17Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:17 crc kubenswrapper[4558]: I0120 16:42:17.819654 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-9wrq6"] Jan 20 16:42:17 crc kubenswrapper[4558]: I0120 16:42:17.820037 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9wrq6" Jan 20 16:42:17 crc kubenswrapper[4558]: E0120 16:42:17.820093 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9wrq6" podUID="30032328-bd33-4073-9366-e10bc5e2aa77" Jan 20 16:42:17 crc kubenswrapper[4558]: I0120 16:42:17.830854 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"425eab90-fb70-4498-a98b-b95742033890\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48592e72d91cf527296f06e52fec1c16c1da21323c0644659945af109a74ebb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7e18f59b5e6b6c0be141b7efb3ccb1df4ff6e3c394bcc88fb1142c8df433493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c889e2b0e14244ef49ec057e3d5fd91687e335a983fdc99a6816abd2af643ddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d434793571cbba089cc34029d951bee9063aca58728b791f2d1af2d68229f778\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://89f7ad577df11c509855a90ac8791da6de3f0d2857a1ca56726a6327f40455bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://baab628ee8aadc67ea829cc46639be55ae63281a9ec9a89737260f558a9ddf75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://baab628ee8aadc67ea829cc46639be55ae63281a9ec9a89737260f558a9ddf75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5faab5625a16e1459b23ae0f4cc8ca3285bf4eb1ba4c53c1ed15eddb9054021\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5faab5625a16e1459b23ae0f4cc8ca3285bf4eb1ba4c53c1ed15eddb9054021\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f913fa07ec9571d1c61f9b7dddb46614846aa99d6e0b5b75709e9fa5a18a09c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f913fa07ec9571d1c61f9b7dddb46614846aa99d6e0b5b75709e9fa5a18a09c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:17Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:17 crc kubenswrapper[4558]: I0120 16:42:17.839924 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:17Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:17 crc kubenswrapper[4558]: I0120 16:42:17.848255 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:17Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:17 crc kubenswrapper[4558]: I0120 16:42:17.856188 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rfwnl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8fe5472-7373-4e5d-87fc-a9ecaf26a5cd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://38cd58ecc67d337df2c29b79e5460ea23d4acc1e66de8b08ea70d7ab7b30691d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9nvl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a14e9281ff19e324499722aefadf1e1f2fd005089c153918ad4d8a7f5bdfa853\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9nvl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-rfwnl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:17Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:17 crc kubenswrapper[4558]: I0120 16:42:17.865559 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e190c24c1d8a3bc0241334598ce23829ada9afee282d49a0c82cd67ad82bcd02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:17Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:17 crc kubenswrapper[4558]: I0120 16:42:17.870269 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:17 crc kubenswrapper[4558]: I0120 16:42:17.870298 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:17 crc kubenswrapper[4558]: I0120 16:42:17.870306 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:17 crc kubenswrapper[4558]: I0120 16:42:17.870320 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:17 crc kubenswrapper[4558]: I0120 16:42:17.870328 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:17Z","lastTransitionTime":"2026-01-20T16:42:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:17 crc kubenswrapper[4558]: I0120 16:42:17.874317 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e190c24c1d8a3bc0241334598ce23829ada9afee282d49a0c82cd67ad82bcd02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:17Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:17 crc kubenswrapper[4558]: I0120 16:42:17.883299 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:17Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:17 crc kubenswrapper[4558]: I0120 16:42:17.891142 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:17Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:17 crc kubenswrapper[4558]: I0120 16:42:17.898332 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rfwnl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8fe5472-7373-4e5d-87fc-a9ecaf26a5cd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://38cd58ecc67d337df2c29b79e5460ea23d4acc1e66de8b08ea70d7ab7b30691d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9nvl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a14e9281ff19e324499722aefadf1e1f2fd005089c153918ad4d8a7f5bdfa853\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9nvl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-rfwnl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:17Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:17 crc kubenswrapper[4558]: I0120 16:42:17.906764 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:17Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:17 crc kubenswrapper[4558]: I0120 16:42:17.914384 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-d96bp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c8d2c8b-fb8e-4bff-b8d2-69570e5d9e57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe858db22acc96b062f1bc7941e05c823f30d1f9ba50bd497c57f38d57e04293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4v6jx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-d96bp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:17Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:17 crc kubenswrapper[4558]: I0120 16:42:17.928296 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"254129cd-82fc-4162-b671-2434bc9e2972\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a6a184067bde115ef5e09fdd90e7dccfe89a9dc347af450998b91c068e7e803\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd8b93e5559d95511a1285a9aec9a7d72df6c330bcd1e1daf3e93f5494f70eeb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4264ef8ee49a8b172fb05ac5c9b0bd32350b5d9ae95293079dfeaf44f021b455\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0889cf371f8b06cc2d255c15fda97db1d5000d6f6cff29d2fdcb8667cede8a99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4265d3281e954ee5989ee008da28c6cb9fa29478e5fdf23325521d455304da9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://186a1d2caad8865abf7f565405c4a4c077038ccb67ba77927cc2392ec075a811\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://41b06bf319806f8cb4eb26d154351300538b7998dbff8ab608b53a397a84aeb1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://41b06bf319806f8cb4eb26d154351300538b7998dbff8ab608b53a397a84aeb1\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-20T16:42:13Z\\\",\\\"message\\\":\\\"rt network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:13Z is after 2025-08-24T17:21:41Z]\\\\nI0120 16:42:13.263941 5970 services_controller.go:434] Service openshift-marketplace/redhat-marketplace retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{redhat-marketplace openshift-marketplace cf6d00ec-cc2c-43f6-815c-40ffd0563e71 5558 0 2025-02-23 05:23:25 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[olm.managed:true olm.service-spec-hash:aUeLNNcZzVZO2rcaZ5Kc8V3jffO0Ss4T6qX6V5] map[] [{operators.coreos.com/v1alpha1 CatalogSource redhat-marketplace fcb55c30-a739-4bc1-9c9c-7634e05a3dbd 0xc00757e68d 0xc00757e68e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:grpc,Protocol:TCP,P\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:12Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-nv2xw_openshift-ovn-kubernetes(254129cd-82fc-4162-b671-2434bc9e2972)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f41d488b0c24e594a2f5cad36b5f8efdb7d867fae0e18a74fe3c396a203d162f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aaa12ad387fc5d0780f6602a74013bb70c118f35c3a16eb4d136df7b0c48db9e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aaa12ad387fc5d0780f6602a74013bb70c118f35c3a16eb4d136df7b0c48db9e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-nv2xw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:17Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:17 crc kubenswrapper[4558]: I0120 16:42:17.938653 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f5t7h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0f90cb04-2e7a-4ee8-83fc-d6c0ee1702a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21a318f03d0df490cb7bae11819c52eca011d163081aeecff1b2dd8f72caabbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fd8a8162080a700ce21ff3b2cf7c28fabfc682758eb69c483909d4008b07daab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fd8a8162080a700ce21ff3b2cf7c28fabfc682758eb69c483909d4008b07daab\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e77617c69434ce20c448b525c7e86f1ece18d51ce6e14167b4ca7e99de8e5709\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e77617c69434ce20c448b525c7e86f1ece18d51ce6e14167b4ca7e99de8e5709\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://453c67434281e0261ec6799b30a548a01efd72d7df96632409e745b2e6a94546\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://453c67434281e0261ec6799b30a548a01efd72d7df96632409e745b2e6a94546\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://06eab4daa6c8b6a564cf1bb1403b7b3b259a234ee09f23b4f4bc0628c2d58e69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06eab4daa6c8b6a564cf1bb1403b7b3b259a234ee09f23b4f4bc0628c2d58e69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1291ac09fc6ae2a79fdeaa0fc47272f61b97f45f67ed3f7c90bdfdbfe3519ff2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1291ac09fc6ae2a79fdeaa0fc47272f61b97f45f67ed3f7c90bdfdbfe3519ff2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe484514f50a14e485d4a97ea3ce9401c77f3f4dae4717719be7ed6c4d7b1a59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe484514f50a14e485d4a97ea3ce9401c77f3f4dae4717719be7ed6c4d7b1a59\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f5t7h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:17Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:17 crc kubenswrapper[4558]: I0120 16:42:17.945765 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-9wrq6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30032328-bd33-4073-9366-e10bc5e2aa77\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6tz4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6tz4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:17Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-9wrq6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:17Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:17 crc kubenswrapper[4558]: I0120 16:42:17.954210 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a13bfefa-f525-4ecd-89f5-f10480532bf6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9030e970e96b6909d771d4fa0c3b4f493b4a825cbc43ef7e684272284329c7c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://168e8f533aaa108e4160f9ae4af6bb7944edba7a596178ffffdc5325073fc3c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e0141aef1babb57501016cb09787591b5e6d9136b8d7dd4ba64393f0be5a027\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c87f08993c2b47c63ba9652c7dafe0fdbecdcb03a7b7080664ab1f1bd0e1d602\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:17Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:17 crc kubenswrapper[4558]: I0120 16:42:17.962542 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f904dd3e4b43e51569b6532f00fa2f9edf912c1f1969ee2ea00c99d1b7e35a41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://789c877f4a456b3ce64bdbe6fefd50f1ef3f92bf9e26e6296005cf366e0ce265\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:17Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:17 crc kubenswrapper[4558]: I0120 16:42:17.969870 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3e45cc56934a7b90e9d47f1c6d06d8dc140d57db4a216469e8ae112f398663e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:17Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:17 crc kubenswrapper[4558]: I0120 16:42:17.972080 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:17 crc kubenswrapper[4558]: I0120 16:42:17.972113 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:17 crc kubenswrapper[4558]: I0120 16:42:17.972122 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:17 crc kubenswrapper[4558]: I0120 16:42:17.972135 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:17 crc kubenswrapper[4558]: I0120 16:42:17.972143 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:17Z","lastTransitionTime":"2026-01-20T16:42:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:17 crc kubenswrapper[4558]: I0120 16:42:17.978936 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jsqvf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bedf08c7-1f93-4931-a7f3-e729e2a137af\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e32b76495d88d356373f9389e163a2f3a72e202bb454c7015594c1f5a41b511\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xxtbc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jsqvf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:17Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:17 crc kubenswrapper[4558]: I0120 16:42:17.985505 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-47477" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e026445a-aa73-4a0e-ba35-e2e07de1278c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec2ee3ff161cba7e14ad26763081c9f092f0dfe969cef113631c27e0db53d157\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xcnwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-47477\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:17Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:18 crc kubenswrapper[4558]: I0120 16:42:18.001303 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"425eab90-fb70-4498-a98b-b95742033890\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48592e72d91cf527296f06e52fec1c16c1da21323c0644659945af109a74ebb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7e18f59b5e6b6c0be141b7efb3ccb1df4ff6e3c394bcc88fb1142c8df433493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c889e2b0e14244ef49ec057e3d5fd91687e335a983fdc99a6816abd2af643ddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d434793571cbba089cc34029d951bee9063aca58728b791f2d1af2d68229f778\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://89f7ad577df11c509855a90ac8791da6de3f0d2857a1ca56726a6327f40455bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://baab628ee8aadc67ea829cc46639be55ae63281a9ec9a89737260f558a9ddf75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://baab628ee8aadc67ea829cc46639be55ae63281a9ec9a89737260f558a9ddf75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5faab5625a16e1459b23ae0f4cc8ca3285bf4eb1ba4c53c1ed15eddb9054021\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5faab5625a16e1459b23ae0f4cc8ca3285bf4eb1ba4c53c1ed15eddb9054021\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f913fa07ec9571d1c61f9b7dddb46614846aa99d6e0b5b75709e9fa5a18a09c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f913fa07ec9571d1c61f9b7dddb46614846aa99d6e0b5b75709e9fa5a18a09c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:17Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:18 crc kubenswrapper[4558]: I0120 16:42:18.011112 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30e1ff9d-8dcd-4754-9a7a-c09598fb83db\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ea68e442bb2143fbaa5e2c9fe335c6b023a31c2eeaa78d60de45c6ef40c2894\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a8122c3b57ac4f0026df149fe622450d07d8b0d517ad018decd7b21d8d9c04a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7444e8fea1853bae45dbd3d52d07e3f7b94314cd29fe8c99891e515a892e569e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6438ffea42f98178c4c2c21ae01725a8b54d6a6f790f47d64faf5ecd9f1c00f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2e22f1d4e87dd06a67f49aedad280da48ac1c4ba2e438a4925cf8bd5330c4d8\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0120 16:41:58.859734 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0120 16:41:58.861094 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2179835226/tls.crt::/tmp/serving-cert-2179835226/tls.key\\\\\\\"\\\\nI0120 16:42:04.205247 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0120 16:42:04.207291 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0120 16:42:04.207308 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0120 16:42:04.207329 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0120 16:42:04.207333 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0120 16:42:04.210615 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0120 16:42:04.210632 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0120 16:42:04.210641 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0120 16:42:04.210648 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0120 16:42:04.210652 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0120 16:42:04.210655 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0120 16:42:04.210658 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0120 16:42:04.210660 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0120 16:42:04.211964 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4850bde1fb538e21fc91949dc584ef0a791d718844691a44559e0513bb36203\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c39aa6c3942134a027be33cac715f9c0381bce5417754d5bad045c42a07d353f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c39aa6c3942134a027be33cac715f9c0381bce5417754d5bad045c42a07d353f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:18Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:18 crc kubenswrapper[4558]: I0120 16:42:18.014632 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v6tz4\" (UniqueName: \"kubernetes.io/projected/30032328-bd33-4073-9366-e10bc5e2aa77-kube-api-access-v6tz4\") pod \"network-metrics-daemon-9wrq6\" (UID: \"30032328-bd33-4073-9366-e10bc5e2aa77\") " pod="openshift-multus/network-metrics-daemon-9wrq6" Jan 20 16:42:18 crc kubenswrapper[4558]: I0120 16:42:18.014695 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/30032328-bd33-4073-9366-e10bc5e2aa77-metrics-certs\") pod \"network-metrics-daemon-9wrq6\" (UID: \"30032328-bd33-4073-9366-e10bc5e2aa77\") " pod="openshift-multus/network-metrics-daemon-9wrq6" Jan 20 16:42:18 crc kubenswrapper[4558]: I0120 16:42:18.021085 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"68337d27-3fa6-4a29-88b0-82e60c3739eb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de7f7c23993a57d30f08686e1cd4abb5b17e108ac12dd0c7929a31574c69eeb1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4b478\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b18a59ecb802507e0d5988eacc915fd7e0d6954518de80f61162c97f680fd8c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4b478\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2vr4r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:18Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:18 crc kubenswrapper[4558]: I0120 16:42:18.078437 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:18 crc kubenswrapper[4558]: I0120 16:42:18.078663 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:18 crc kubenswrapper[4558]: I0120 16:42:18.078674 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:18 crc kubenswrapper[4558]: I0120 16:42:18.078688 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:18 crc kubenswrapper[4558]: I0120 16:42:18.078697 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:18Z","lastTransitionTime":"2026-01-20T16:42:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:18 crc kubenswrapper[4558]: I0120 16:42:18.116057 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v6tz4\" (UniqueName: \"kubernetes.io/projected/30032328-bd33-4073-9366-e10bc5e2aa77-kube-api-access-v6tz4\") pod \"network-metrics-daemon-9wrq6\" (UID: \"30032328-bd33-4073-9366-e10bc5e2aa77\") " pod="openshift-multus/network-metrics-daemon-9wrq6" Jan 20 16:42:18 crc kubenswrapper[4558]: I0120 16:42:18.116116 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/30032328-bd33-4073-9366-e10bc5e2aa77-metrics-certs\") pod \"network-metrics-daemon-9wrq6\" (UID: \"30032328-bd33-4073-9366-e10bc5e2aa77\") " pod="openshift-multus/network-metrics-daemon-9wrq6" Jan 20 16:42:18 crc kubenswrapper[4558]: E0120 16:42:18.116234 4558 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 20 16:42:18 crc kubenswrapper[4558]: E0120 16:42:18.116299 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/30032328-bd33-4073-9366-e10bc5e2aa77-metrics-certs podName:30032328-bd33-4073-9366-e10bc5e2aa77 nodeName:}" failed. No retries permitted until 2026-01-20 16:42:18.616280269 +0000 UTC m=+32.376618236 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/30032328-bd33-4073-9366-e10bc5e2aa77-metrics-certs") pod "network-metrics-daemon-9wrq6" (UID: "30032328-bd33-4073-9366-e10bc5e2aa77") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 20 16:42:18 crc kubenswrapper[4558]: I0120 16:42:18.129676 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v6tz4\" (UniqueName: \"kubernetes.io/projected/30032328-bd33-4073-9366-e10bc5e2aa77-kube-api-access-v6tz4\") pod \"network-metrics-daemon-9wrq6\" (UID: \"30032328-bd33-4073-9366-e10bc5e2aa77\") " pod="openshift-multus/network-metrics-daemon-9wrq6" Jan 20 16:42:18 crc kubenswrapper[4558]: I0120 16:42:18.181035 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:18 crc kubenswrapper[4558]: I0120 16:42:18.181077 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:18 crc kubenswrapper[4558]: I0120 16:42:18.181086 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:18 crc kubenswrapper[4558]: I0120 16:42:18.181100 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:18 crc kubenswrapper[4558]: I0120 16:42:18.181108 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:18Z","lastTransitionTime":"2026-01-20T16:42:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:18 crc kubenswrapper[4558]: I0120 16:42:18.282998 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:18 crc kubenswrapper[4558]: I0120 16:42:18.283043 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:18 crc kubenswrapper[4558]: I0120 16:42:18.283054 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:18 crc kubenswrapper[4558]: I0120 16:42:18.283069 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:18 crc kubenswrapper[4558]: I0120 16:42:18.283080 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:18Z","lastTransitionTime":"2026-01-20T16:42:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:18 crc kubenswrapper[4558]: I0120 16:42:18.385843 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:18 crc kubenswrapper[4558]: I0120 16:42:18.385879 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:18 crc kubenswrapper[4558]: I0120 16:42:18.385888 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:18 crc kubenswrapper[4558]: I0120 16:42:18.385901 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:18 crc kubenswrapper[4558]: I0120 16:42:18.385910 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:18Z","lastTransitionTime":"2026-01-20T16:42:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:18 crc kubenswrapper[4558]: I0120 16:42:18.487682 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:18 crc kubenswrapper[4558]: I0120 16:42:18.487720 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:18 crc kubenswrapper[4558]: I0120 16:42:18.487729 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:18 crc kubenswrapper[4558]: I0120 16:42:18.487744 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:18 crc kubenswrapper[4558]: I0120 16:42:18.487752 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:18Z","lastTransitionTime":"2026-01-20T16:42:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:18 crc kubenswrapper[4558]: I0120 16:42:18.548672 4558 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-10 13:52:28.124026076 +0000 UTC Jan 20 16:42:18 crc kubenswrapper[4558]: I0120 16:42:18.565270 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 20 16:42:18 crc kubenswrapper[4558]: I0120 16:42:18.565325 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 20 16:42:18 crc kubenswrapper[4558]: I0120 16:42:18.565276 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 20 16:42:18 crc kubenswrapper[4558]: E0120 16:42:18.565443 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 20 16:42:18 crc kubenswrapper[4558]: E0120 16:42:18.565390 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 20 16:42:18 crc kubenswrapper[4558]: E0120 16:42:18.565554 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 20 16:42:18 crc kubenswrapper[4558]: I0120 16:42:18.588978 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:18 crc kubenswrapper[4558]: I0120 16:42:18.589005 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:18 crc kubenswrapper[4558]: I0120 16:42:18.589013 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:18 crc kubenswrapper[4558]: I0120 16:42:18.589025 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:18 crc kubenswrapper[4558]: I0120 16:42:18.589033 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:18Z","lastTransitionTime":"2026-01-20T16:42:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:18 crc kubenswrapper[4558]: I0120 16:42:18.621459 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/30032328-bd33-4073-9366-e10bc5e2aa77-metrics-certs\") pod \"network-metrics-daemon-9wrq6\" (UID: \"30032328-bd33-4073-9366-e10bc5e2aa77\") " pod="openshift-multus/network-metrics-daemon-9wrq6" Jan 20 16:42:18 crc kubenswrapper[4558]: E0120 16:42:18.621606 4558 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 20 16:42:18 crc kubenswrapper[4558]: E0120 16:42:18.621650 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/30032328-bd33-4073-9366-e10bc5e2aa77-metrics-certs podName:30032328-bd33-4073-9366-e10bc5e2aa77 nodeName:}" failed. No retries permitted until 2026-01-20 16:42:19.621636737 +0000 UTC m=+33.381974705 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/30032328-bd33-4073-9366-e10bc5e2aa77-metrics-certs") pod "network-metrics-daemon-9wrq6" (UID: "30032328-bd33-4073-9366-e10bc5e2aa77") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 20 16:42:18 crc kubenswrapper[4558]: I0120 16:42:18.691112 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:18 crc kubenswrapper[4558]: I0120 16:42:18.691141 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:18 crc kubenswrapper[4558]: I0120 16:42:18.691184 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:18 crc kubenswrapper[4558]: I0120 16:42:18.691197 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:18 crc kubenswrapper[4558]: I0120 16:42:18.691206 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:18Z","lastTransitionTime":"2026-01-20T16:42:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:18 crc kubenswrapper[4558]: I0120 16:42:18.793055 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:18 crc kubenswrapper[4558]: I0120 16:42:18.793109 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:18 crc kubenswrapper[4558]: I0120 16:42:18.793119 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:18 crc kubenswrapper[4558]: I0120 16:42:18.793133 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:18 crc kubenswrapper[4558]: I0120 16:42:18.793142 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:18Z","lastTransitionTime":"2026-01-20T16:42:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:18 crc kubenswrapper[4558]: I0120 16:42:18.895453 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:18 crc kubenswrapper[4558]: I0120 16:42:18.895484 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:18 crc kubenswrapper[4558]: I0120 16:42:18.895492 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:18 crc kubenswrapper[4558]: I0120 16:42:18.895504 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:18 crc kubenswrapper[4558]: I0120 16:42:18.895512 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:18Z","lastTransitionTime":"2026-01-20T16:42:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:18 crc kubenswrapper[4558]: I0120 16:42:18.997512 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:18 crc kubenswrapper[4558]: I0120 16:42:18.997545 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:18 crc kubenswrapper[4558]: I0120 16:42:18.997554 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:18 crc kubenswrapper[4558]: I0120 16:42:18.997573 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:18 crc kubenswrapper[4558]: I0120 16:42:18.997596 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:18Z","lastTransitionTime":"2026-01-20T16:42:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:19 crc kubenswrapper[4558]: I0120 16:42:19.099706 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:19 crc kubenswrapper[4558]: I0120 16:42:19.099748 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:19 crc kubenswrapper[4558]: I0120 16:42:19.099757 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:19 crc kubenswrapper[4558]: I0120 16:42:19.099771 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:19 crc kubenswrapper[4558]: I0120 16:42:19.099780 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:19Z","lastTransitionTime":"2026-01-20T16:42:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:19 crc kubenswrapper[4558]: I0120 16:42:19.201901 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:19 crc kubenswrapper[4558]: I0120 16:42:19.201935 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:19 crc kubenswrapper[4558]: I0120 16:42:19.201944 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:19 crc kubenswrapper[4558]: I0120 16:42:19.201957 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:19 crc kubenswrapper[4558]: I0120 16:42:19.201967 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:19Z","lastTransitionTime":"2026-01-20T16:42:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:19 crc kubenswrapper[4558]: I0120 16:42:19.303880 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:19 crc kubenswrapper[4558]: I0120 16:42:19.303912 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:19 crc kubenswrapper[4558]: I0120 16:42:19.303921 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:19 crc kubenswrapper[4558]: I0120 16:42:19.303937 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:19 crc kubenswrapper[4558]: I0120 16:42:19.303946 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:19Z","lastTransitionTime":"2026-01-20T16:42:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:19 crc kubenswrapper[4558]: I0120 16:42:19.405745 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:19 crc kubenswrapper[4558]: I0120 16:42:19.405791 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:19 crc kubenswrapper[4558]: I0120 16:42:19.405809 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:19 crc kubenswrapper[4558]: I0120 16:42:19.405823 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:19 crc kubenswrapper[4558]: I0120 16:42:19.405832 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:19Z","lastTransitionTime":"2026-01-20T16:42:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:19 crc kubenswrapper[4558]: I0120 16:42:19.507728 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:19 crc kubenswrapper[4558]: I0120 16:42:19.507772 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:19 crc kubenswrapper[4558]: I0120 16:42:19.507786 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:19 crc kubenswrapper[4558]: I0120 16:42:19.507802 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:19 crc kubenswrapper[4558]: I0120 16:42:19.507813 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:19Z","lastTransitionTime":"2026-01-20T16:42:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:19 crc kubenswrapper[4558]: I0120 16:42:19.549588 4558 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-04 14:12:33.148945655 +0000 UTC Jan 20 16:42:19 crc kubenswrapper[4558]: I0120 16:42:19.565496 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9wrq6" Jan 20 16:42:19 crc kubenswrapper[4558]: E0120 16:42:19.565606 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9wrq6" podUID="30032328-bd33-4073-9366-e10bc5e2aa77" Jan 20 16:42:19 crc kubenswrapper[4558]: I0120 16:42:19.609578 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:19 crc kubenswrapper[4558]: I0120 16:42:19.609613 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:19 crc kubenswrapper[4558]: I0120 16:42:19.609623 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:19 crc kubenswrapper[4558]: I0120 16:42:19.609634 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:19 crc kubenswrapper[4558]: I0120 16:42:19.609643 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:19Z","lastTransitionTime":"2026-01-20T16:42:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:19 crc kubenswrapper[4558]: I0120 16:42:19.629217 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/30032328-bd33-4073-9366-e10bc5e2aa77-metrics-certs\") pod \"network-metrics-daemon-9wrq6\" (UID: \"30032328-bd33-4073-9366-e10bc5e2aa77\") " pod="openshift-multus/network-metrics-daemon-9wrq6" Jan 20 16:42:19 crc kubenswrapper[4558]: E0120 16:42:19.629329 4558 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 20 16:42:19 crc kubenswrapper[4558]: E0120 16:42:19.629379 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/30032328-bd33-4073-9366-e10bc5e2aa77-metrics-certs podName:30032328-bd33-4073-9366-e10bc5e2aa77 nodeName:}" failed. No retries permitted until 2026-01-20 16:42:21.629365915 +0000 UTC m=+35.389703892 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/30032328-bd33-4073-9366-e10bc5e2aa77-metrics-certs") pod "network-metrics-daemon-9wrq6" (UID: "30032328-bd33-4073-9366-e10bc5e2aa77") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 20 16:42:19 crc kubenswrapper[4558]: I0120 16:42:19.711147 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:19 crc kubenswrapper[4558]: I0120 16:42:19.711179 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:19 crc kubenswrapper[4558]: I0120 16:42:19.711188 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:19 crc kubenswrapper[4558]: I0120 16:42:19.711199 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:19 crc kubenswrapper[4558]: I0120 16:42:19.711207 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:19Z","lastTransitionTime":"2026-01-20T16:42:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:19 crc kubenswrapper[4558]: I0120 16:42:19.813479 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:19 crc kubenswrapper[4558]: I0120 16:42:19.813522 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:19 crc kubenswrapper[4558]: I0120 16:42:19.813531 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:19 crc kubenswrapper[4558]: I0120 16:42:19.813546 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:19 crc kubenswrapper[4558]: I0120 16:42:19.813555 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:19Z","lastTransitionTime":"2026-01-20T16:42:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:19 crc kubenswrapper[4558]: I0120 16:42:19.915200 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:19 crc kubenswrapper[4558]: I0120 16:42:19.915234 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:19 crc kubenswrapper[4558]: I0120 16:42:19.915243 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:19 crc kubenswrapper[4558]: I0120 16:42:19.915256 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:19 crc kubenswrapper[4558]: I0120 16:42:19.915267 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:19Z","lastTransitionTime":"2026-01-20T16:42:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:20 crc kubenswrapper[4558]: I0120 16:42:20.017518 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:20 crc kubenswrapper[4558]: I0120 16:42:20.017558 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:20 crc kubenswrapper[4558]: I0120 16:42:20.017568 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:20 crc kubenswrapper[4558]: I0120 16:42:20.017593 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:20 crc kubenswrapper[4558]: I0120 16:42:20.017604 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:20Z","lastTransitionTime":"2026-01-20T16:42:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:20 crc kubenswrapper[4558]: I0120 16:42:20.119737 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:20 crc kubenswrapper[4558]: I0120 16:42:20.119777 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:20 crc kubenswrapper[4558]: I0120 16:42:20.119785 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:20 crc kubenswrapper[4558]: I0120 16:42:20.119801 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:20 crc kubenswrapper[4558]: I0120 16:42:20.119809 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:20Z","lastTransitionTime":"2026-01-20T16:42:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:20 crc kubenswrapper[4558]: I0120 16:42:20.221542 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:20 crc kubenswrapper[4558]: I0120 16:42:20.221592 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:20 crc kubenswrapper[4558]: I0120 16:42:20.221602 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:20 crc kubenswrapper[4558]: I0120 16:42:20.221846 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:20 crc kubenswrapper[4558]: I0120 16:42:20.221869 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:20Z","lastTransitionTime":"2026-01-20T16:42:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:20 crc kubenswrapper[4558]: I0120 16:42:20.324136 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:20 crc kubenswrapper[4558]: I0120 16:42:20.324192 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:20 crc kubenswrapper[4558]: I0120 16:42:20.324203 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:20 crc kubenswrapper[4558]: I0120 16:42:20.324232 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:20 crc kubenswrapper[4558]: I0120 16:42:20.324241 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:20Z","lastTransitionTime":"2026-01-20T16:42:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:20 crc kubenswrapper[4558]: I0120 16:42:20.335441 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 20 16:42:20 crc kubenswrapper[4558]: E0120 16:42:20.335626 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-20 16:42:36.335603246 +0000 UTC m=+50.095941212 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 20 16:42:20 crc kubenswrapper[4558]: I0120 16:42:20.426051 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:20 crc kubenswrapper[4558]: I0120 16:42:20.426075 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:20 crc kubenswrapper[4558]: I0120 16:42:20.426083 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:20 crc kubenswrapper[4558]: I0120 16:42:20.426092 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:20 crc kubenswrapper[4558]: I0120 16:42:20.426099 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:20Z","lastTransitionTime":"2026-01-20T16:42:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:20 crc kubenswrapper[4558]: I0120 16:42:20.436664 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 20 16:42:20 crc kubenswrapper[4558]: I0120 16:42:20.436696 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 20 16:42:20 crc kubenswrapper[4558]: I0120 16:42:20.436719 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 20 16:42:20 crc kubenswrapper[4558]: I0120 16:42:20.436752 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 20 16:42:20 crc kubenswrapper[4558]: E0120 16:42:20.436847 4558 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 20 16:42:20 crc kubenswrapper[4558]: E0120 16:42:20.436866 4558 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 20 16:42:20 crc kubenswrapper[4558]: E0120 16:42:20.436874 4558 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 20 16:42:20 crc kubenswrapper[4558]: E0120 16:42:20.436905 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-20 16:42:36.436895584 +0000 UTC m=+50.197233551 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 20 16:42:20 crc kubenswrapper[4558]: E0120 16:42:20.436917 4558 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 20 16:42:20 crc kubenswrapper[4558]: E0120 16:42:20.436951 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-20 16:42:36.436942202 +0000 UTC m=+50.197280168 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 20 16:42:20 crc kubenswrapper[4558]: E0120 16:42:20.437049 4558 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 20 16:42:20 crc kubenswrapper[4558]: E0120 16:42:20.437090 4558 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 20 16:42:20 crc kubenswrapper[4558]: E0120 16:42:20.437108 4558 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 20 16:42:20 crc kubenswrapper[4558]: E0120 16:42:20.437066 4558 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 20 16:42:20 crc kubenswrapper[4558]: E0120 16:42:20.437197 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-20 16:42:36.43714872 +0000 UTC m=+50.197486697 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 20 16:42:20 crc kubenswrapper[4558]: E0120 16:42:20.437218 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-20 16:42:36.437209063 +0000 UTC m=+50.197547029 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 20 16:42:20 crc kubenswrapper[4558]: I0120 16:42:20.527924 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:20 crc kubenswrapper[4558]: I0120 16:42:20.527955 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:20 crc kubenswrapper[4558]: I0120 16:42:20.527963 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:20 crc kubenswrapper[4558]: I0120 16:42:20.527975 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:20 crc kubenswrapper[4558]: I0120 16:42:20.527984 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:20Z","lastTransitionTime":"2026-01-20T16:42:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:20 crc kubenswrapper[4558]: I0120 16:42:20.550383 4558 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-22 16:37:11.069549266 +0000 UTC Jan 20 16:42:20 crc kubenswrapper[4558]: I0120 16:42:20.565648 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 20 16:42:20 crc kubenswrapper[4558]: I0120 16:42:20.565685 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 20 16:42:20 crc kubenswrapper[4558]: I0120 16:42:20.565842 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 20 16:42:20 crc kubenswrapper[4558]: E0120 16:42:20.565992 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 20 16:42:20 crc kubenswrapper[4558]: E0120 16:42:20.566114 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 20 16:42:20 crc kubenswrapper[4558]: E0120 16:42:20.566314 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 20 16:42:20 crc kubenswrapper[4558]: I0120 16:42:20.630797 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:20 crc kubenswrapper[4558]: I0120 16:42:20.630860 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:20 crc kubenswrapper[4558]: I0120 16:42:20.630869 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:20 crc kubenswrapper[4558]: I0120 16:42:20.630883 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:20 crc kubenswrapper[4558]: I0120 16:42:20.630893 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:20Z","lastTransitionTime":"2026-01-20T16:42:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:20 crc kubenswrapper[4558]: I0120 16:42:20.733060 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:20 crc kubenswrapper[4558]: I0120 16:42:20.733086 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:20 crc kubenswrapper[4558]: I0120 16:42:20.733094 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:20 crc kubenswrapper[4558]: I0120 16:42:20.733106 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:20 crc kubenswrapper[4558]: I0120 16:42:20.733114 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:20Z","lastTransitionTime":"2026-01-20T16:42:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:20 crc kubenswrapper[4558]: I0120 16:42:20.834731 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:20 crc kubenswrapper[4558]: I0120 16:42:20.834772 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:20 crc kubenswrapper[4558]: I0120 16:42:20.834779 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:20 crc kubenswrapper[4558]: I0120 16:42:20.834793 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:20 crc kubenswrapper[4558]: I0120 16:42:20.834801 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:20Z","lastTransitionTime":"2026-01-20T16:42:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:20 crc kubenswrapper[4558]: I0120 16:42:20.936532 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:20 crc kubenswrapper[4558]: I0120 16:42:20.936705 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:20 crc kubenswrapper[4558]: I0120 16:42:20.936771 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:20 crc kubenswrapper[4558]: I0120 16:42:20.936855 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:20 crc kubenswrapper[4558]: I0120 16:42:20.936910 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:20Z","lastTransitionTime":"2026-01-20T16:42:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:21 crc kubenswrapper[4558]: I0120 16:42:21.039304 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:21 crc kubenswrapper[4558]: I0120 16:42:21.039435 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:21 crc kubenswrapper[4558]: I0120 16:42:21.039493 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:21 crc kubenswrapper[4558]: I0120 16:42:21.039556 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:21 crc kubenswrapper[4558]: I0120 16:42:21.039628 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:21Z","lastTransitionTime":"2026-01-20T16:42:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:21 crc kubenswrapper[4558]: I0120 16:42:21.141155 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:21 crc kubenswrapper[4558]: I0120 16:42:21.141217 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:21 crc kubenswrapper[4558]: I0120 16:42:21.141230 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:21 crc kubenswrapper[4558]: I0120 16:42:21.141251 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:21 crc kubenswrapper[4558]: I0120 16:42:21.141262 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:21Z","lastTransitionTime":"2026-01-20T16:42:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:21 crc kubenswrapper[4558]: I0120 16:42:21.242877 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:21 crc kubenswrapper[4558]: I0120 16:42:21.242919 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:21 crc kubenswrapper[4558]: I0120 16:42:21.242931 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:21 crc kubenswrapper[4558]: I0120 16:42:21.242945 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:21 crc kubenswrapper[4558]: I0120 16:42:21.242955 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:21Z","lastTransitionTime":"2026-01-20T16:42:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:21 crc kubenswrapper[4558]: I0120 16:42:21.344773 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:21 crc kubenswrapper[4558]: I0120 16:42:21.344818 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:21 crc kubenswrapper[4558]: I0120 16:42:21.344826 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:21 crc kubenswrapper[4558]: I0120 16:42:21.344839 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:21 crc kubenswrapper[4558]: I0120 16:42:21.344847 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:21Z","lastTransitionTime":"2026-01-20T16:42:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:21 crc kubenswrapper[4558]: I0120 16:42:21.446845 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:21 crc kubenswrapper[4558]: I0120 16:42:21.446872 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:21 crc kubenswrapper[4558]: I0120 16:42:21.446881 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:21 crc kubenswrapper[4558]: I0120 16:42:21.446895 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:21 crc kubenswrapper[4558]: I0120 16:42:21.446903 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:21Z","lastTransitionTime":"2026-01-20T16:42:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:21 crc kubenswrapper[4558]: I0120 16:42:21.549066 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:21 crc kubenswrapper[4558]: I0120 16:42:21.549093 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:21 crc kubenswrapper[4558]: I0120 16:42:21.549103 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:21 crc kubenswrapper[4558]: I0120 16:42:21.549114 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:21 crc kubenswrapper[4558]: I0120 16:42:21.549123 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:21Z","lastTransitionTime":"2026-01-20T16:42:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:21 crc kubenswrapper[4558]: I0120 16:42:21.551261 4558 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-05 13:16:31.020459345 +0000 UTC Jan 20 16:42:21 crc kubenswrapper[4558]: I0120 16:42:21.565476 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9wrq6" Jan 20 16:42:21 crc kubenswrapper[4558]: E0120 16:42:21.565630 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9wrq6" podUID="30032328-bd33-4073-9366-e10bc5e2aa77" Jan 20 16:42:21 crc kubenswrapper[4558]: I0120 16:42:21.645264 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/30032328-bd33-4073-9366-e10bc5e2aa77-metrics-certs\") pod \"network-metrics-daemon-9wrq6\" (UID: \"30032328-bd33-4073-9366-e10bc5e2aa77\") " pod="openshift-multus/network-metrics-daemon-9wrq6" Jan 20 16:42:21 crc kubenswrapper[4558]: E0120 16:42:21.645436 4558 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 20 16:42:21 crc kubenswrapper[4558]: E0120 16:42:21.645503 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/30032328-bd33-4073-9366-e10bc5e2aa77-metrics-certs podName:30032328-bd33-4073-9366-e10bc5e2aa77 nodeName:}" failed. No retries permitted until 2026-01-20 16:42:25.645486145 +0000 UTC m=+39.405824112 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/30032328-bd33-4073-9366-e10bc5e2aa77-metrics-certs") pod "network-metrics-daemon-9wrq6" (UID: "30032328-bd33-4073-9366-e10bc5e2aa77") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 20 16:42:21 crc kubenswrapper[4558]: I0120 16:42:21.651070 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:21 crc kubenswrapper[4558]: I0120 16:42:21.651100 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:21 crc kubenswrapper[4558]: I0120 16:42:21.651109 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:21 crc kubenswrapper[4558]: I0120 16:42:21.651122 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:21 crc kubenswrapper[4558]: I0120 16:42:21.651130 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:21Z","lastTransitionTime":"2026-01-20T16:42:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:21 crc kubenswrapper[4558]: I0120 16:42:21.753017 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:21 crc kubenswrapper[4558]: I0120 16:42:21.753049 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:21 crc kubenswrapper[4558]: I0120 16:42:21.753058 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:21 crc kubenswrapper[4558]: I0120 16:42:21.753072 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:21 crc kubenswrapper[4558]: I0120 16:42:21.753080 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:21Z","lastTransitionTime":"2026-01-20T16:42:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:21 crc kubenswrapper[4558]: I0120 16:42:21.854783 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:21 crc kubenswrapper[4558]: I0120 16:42:21.854854 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:21 crc kubenswrapper[4558]: I0120 16:42:21.854864 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:21 crc kubenswrapper[4558]: I0120 16:42:21.854878 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:21 crc kubenswrapper[4558]: I0120 16:42:21.854901 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:21Z","lastTransitionTime":"2026-01-20T16:42:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:21 crc kubenswrapper[4558]: I0120 16:42:21.956979 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:21 crc kubenswrapper[4558]: I0120 16:42:21.957013 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:21 crc kubenswrapper[4558]: I0120 16:42:21.957022 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:21 crc kubenswrapper[4558]: I0120 16:42:21.957036 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:21 crc kubenswrapper[4558]: I0120 16:42:21.957046 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:21Z","lastTransitionTime":"2026-01-20T16:42:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:22 crc kubenswrapper[4558]: I0120 16:42:22.059089 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:22 crc kubenswrapper[4558]: I0120 16:42:22.059156 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:22 crc kubenswrapper[4558]: I0120 16:42:22.059181 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:22 crc kubenswrapper[4558]: I0120 16:42:22.059202 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:22 crc kubenswrapper[4558]: I0120 16:42:22.059210 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:22Z","lastTransitionTime":"2026-01-20T16:42:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:22 crc kubenswrapper[4558]: I0120 16:42:22.070912 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 20 16:42:22 crc kubenswrapper[4558]: I0120 16:42:22.084982 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"425eab90-fb70-4498-a98b-b95742033890\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48592e72d91cf527296f06e52fec1c16c1da21323c0644659945af109a74ebb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7e18f59b5e6b6c0be141b7efb3ccb1df4ff6e3c394bcc88fb1142c8df433493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c889e2b0e14244ef49ec057e3d5fd91687e335a983fdc99a6816abd2af643ddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d434793571cbba089cc34029d951bee9063aca58728b791f2d1af2d68229f778\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://89f7ad577df11c509855a90ac8791da6de3f0d2857a1ca56726a6327f40455bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://baab628ee8aadc67ea829cc46639be55ae63281a9ec9a89737260f558a9ddf75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://baab628ee8aadc67ea829cc46639be55ae63281a9ec9a89737260f558a9ddf75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5faab5625a16e1459b23ae0f4cc8ca3285bf4eb1ba4c53c1ed15eddb9054021\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5faab5625a16e1459b23ae0f4cc8ca3285bf4eb1ba4c53c1ed15eddb9054021\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f913fa07ec9571d1c61f9b7dddb46614846aa99d6e0b5b75709e9fa5a18a09c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f913fa07ec9571d1c61f9b7dddb46614846aa99d6e0b5b75709e9fa5a18a09c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:22Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:22 crc kubenswrapper[4558]: I0120 16:42:22.098257 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30e1ff9d-8dcd-4754-9a7a-c09598fb83db\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ea68e442bb2143fbaa5e2c9fe335c6b023a31c2eeaa78d60de45c6ef40c2894\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a8122c3b57ac4f0026df149fe622450d07d8b0d517ad018decd7b21d8d9c04a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7444e8fea1853bae45dbd3d52d07e3f7b94314cd29fe8c99891e515a892e569e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6438ffea42f98178c4c2c21ae01725a8b54d6a6f790f47d64faf5ecd9f1c00f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2e22f1d4e87dd06a67f49aedad280da48ac1c4ba2e438a4925cf8bd5330c4d8\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0120 16:41:58.859734 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0120 16:41:58.861094 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2179835226/tls.crt::/tmp/serving-cert-2179835226/tls.key\\\\\\\"\\\\nI0120 16:42:04.205247 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0120 16:42:04.207291 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0120 16:42:04.207308 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0120 16:42:04.207329 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0120 16:42:04.207333 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0120 16:42:04.210615 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0120 16:42:04.210632 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0120 16:42:04.210641 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0120 16:42:04.210648 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0120 16:42:04.210652 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0120 16:42:04.210655 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0120 16:42:04.210658 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0120 16:42:04.210660 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0120 16:42:04.211964 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4850bde1fb538e21fc91949dc584ef0a791d718844691a44559e0513bb36203\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c39aa6c3942134a027be33cac715f9c0381bce5417754d5bad045c42a07d353f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c39aa6c3942134a027be33cac715f9c0381bce5417754d5bad045c42a07d353f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:22Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:22 crc kubenswrapper[4558]: I0120 16:42:22.107097 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"68337d27-3fa6-4a29-88b0-82e60c3739eb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de7f7c23993a57d30f08686e1cd4abb5b17e108ac12dd0c7929a31574c69eeb1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4b478\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b18a59ecb802507e0d5988eacc915fd7e0d6954518de80f61162c97f680fd8c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4b478\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2vr4r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:22Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:22 crc kubenswrapper[4558]: I0120 16:42:22.115791 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rfwnl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8fe5472-7373-4e5d-87fc-a9ecaf26a5cd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://38cd58ecc67d337df2c29b79e5460ea23d4acc1e66de8b08ea70d7ab7b30691d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9nvl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a14e9281ff19e324499722aefadf1e1f2fd005089c153918ad4d8a7f5bdfa853\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9nvl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-rfwnl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:22Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:22 crc kubenswrapper[4558]: I0120 16:42:22.125153 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e190c24c1d8a3bc0241334598ce23829ada9afee282d49a0c82cd67ad82bcd02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:22Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:22 crc kubenswrapper[4558]: I0120 16:42:22.134127 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:22Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:22 crc kubenswrapper[4558]: I0120 16:42:22.143518 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:22Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:22 crc kubenswrapper[4558]: I0120 16:42:22.151401 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-9wrq6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30032328-bd33-4073-9366-e10bc5e2aa77\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6tz4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6tz4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:17Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-9wrq6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:22Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:22 crc kubenswrapper[4558]: I0120 16:42:22.159566 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:22Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:22 crc kubenswrapper[4558]: I0120 16:42:22.160834 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:22 crc kubenswrapper[4558]: I0120 16:42:22.160857 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:22 crc kubenswrapper[4558]: I0120 16:42:22.160865 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:22 crc kubenswrapper[4558]: I0120 16:42:22.160877 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:22 crc kubenswrapper[4558]: I0120 16:42:22.160893 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:22Z","lastTransitionTime":"2026-01-20T16:42:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:22 crc kubenswrapper[4558]: I0120 16:42:22.166502 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-d96bp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c8d2c8b-fb8e-4bff-b8d2-69570e5d9e57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe858db22acc96b062f1bc7941e05c823f30d1f9ba50bd497c57f38d57e04293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4v6jx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-d96bp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:22Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:22 crc kubenswrapper[4558]: I0120 16:42:22.179152 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"254129cd-82fc-4162-b671-2434bc9e2972\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a6a184067bde115ef5e09fdd90e7dccfe89a9dc347af450998b91c068e7e803\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd8b93e5559d95511a1285a9aec9a7d72df6c330bcd1e1daf3e93f5494f70eeb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4264ef8ee49a8b172fb05ac5c9b0bd32350b5d9ae95293079dfeaf44f021b455\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0889cf371f8b06cc2d255c15fda97db1d5000d6f6cff29d2fdcb8667cede8a99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4265d3281e954ee5989ee008da28c6cb9fa29478e5fdf23325521d455304da9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://186a1d2caad8865abf7f565405c4a4c077038ccb67ba77927cc2392ec075a811\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://41b06bf319806f8cb4eb26d154351300538b7998dbff8ab608b53a397a84aeb1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://41b06bf319806f8cb4eb26d154351300538b7998dbff8ab608b53a397a84aeb1\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-20T16:42:13Z\\\",\\\"message\\\":\\\"rt network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:13Z is after 2025-08-24T17:21:41Z]\\\\nI0120 16:42:13.263941 5970 services_controller.go:434] Service openshift-marketplace/redhat-marketplace retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{redhat-marketplace openshift-marketplace cf6d00ec-cc2c-43f6-815c-40ffd0563e71 5558 0 2025-02-23 05:23:25 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[olm.managed:true olm.service-spec-hash:aUeLNNcZzVZO2rcaZ5Kc8V3jffO0Ss4T6qX6V5] map[] [{operators.coreos.com/v1alpha1 CatalogSource redhat-marketplace fcb55c30-a739-4bc1-9c9c-7634e05a3dbd 0xc00757e68d 0xc00757e68e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:grpc,Protocol:TCP,P\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:12Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-nv2xw_openshift-ovn-kubernetes(254129cd-82fc-4162-b671-2434bc9e2972)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f41d488b0c24e594a2f5cad36b5f8efdb7d867fae0e18a74fe3c396a203d162f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aaa12ad387fc5d0780f6602a74013bb70c118f35c3a16eb4d136df7b0c48db9e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aaa12ad387fc5d0780f6602a74013bb70c118f35c3a16eb4d136df7b0c48db9e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-nv2xw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:22Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:22 crc kubenswrapper[4558]: I0120 16:42:22.188684 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f5t7h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0f90cb04-2e7a-4ee8-83fc-d6c0ee1702a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21a318f03d0df490cb7bae11819c52eca011d163081aeecff1b2dd8f72caabbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fd8a8162080a700ce21ff3b2cf7c28fabfc682758eb69c483909d4008b07daab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fd8a8162080a700ce21ff3b2cf7c28fabfc682758eb69c483909d4008b07daab\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e77617c69434ce20c448b525c7e86f1ece18d51ce6e14167b4ca7e99de8e5709\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e77617c69434ce20c448b525c7e86f1ece18d51ce6e14167b4ca7e99de8e5709\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://453c67434281e0261ec6799b30a548a01efd72d7df96632409e745b2e6a94546\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://453c67434281e0261ec6799b30a548a01efd72d7df96632409e745b2e6a94546\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://06eab4daa6c8b6a564cf1bb1403b7b3b259a234ee09f23b4f4bc0628c2d58e69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06eab4daa6c8b6a564cf1bb1403b7b3b259a234ee09f23b4f4bc0628c2d58e69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1291ac09fc6ae2a79fdeaa0fc47272f61b97f45f67ed3f7c90bdfdbfe3519ff2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1291ac09fc6ae2a79fdeaa0fc47272f61b97f45f67ed3f7c90bdfdbfe3519ff2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe484514f50a14e485d4a97ea3ce9401c77f3f4dae4717719be7ed6c4d7b1a59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe484514f50a14e485d4a97ea3ce9401c77f3f4dae4717719be7ed6c4d7b1a59\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f5t7h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:22Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:22 crc kubenswrapper[4558]: I0120 16:42:22.195825 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-47477" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e026445a-aa73-4a0e-ba35-e2e07de1278c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec2ee3ff161cba7e14ad26763081c9f092f0dfe969cef113631c27e0db53d157\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xcnwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-47477\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:22Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:22 crc kubenswrapper[4558]: I0120 16:42:22.205008 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a13bfefa-f525-4ecd-89f5-f10480532bf6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9030e970e96b6909d771d4fa0c3b4f493b4a825cbc43ef7e684272284329c7c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://168e8f533aaa108e4160f9ae4af6bb7944edba7a596178ffffdc5325073fc3c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e0141aef1babb57501016cb09787591b5e6d9136b8d7dd4ba64393f0be5a027\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c87f08993c2b47c63ba9652c7dafe0fdbecdcb03a7b7080664ab1f1bd0e1d602\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:22Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:22 crc kubenswrapper[4558]: I0120 16:42:22.214338 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f904dd3e4b43e51569b6532f00fa2f9edf912c1f1969ee2ea00c99d1b7e35a41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://789c877f4a456b3ce64bdbe6fefd50f1ef3f92bf9e26e6296005cf366e0ce265\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:22Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:22 crc kubenswrapper[4558]: I0120 16:42:22.221465 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3e45cc56934a7b90e9d47f1c6d06d8dc140d57db4a216469e8ae112f398663e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:22Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:22 crc kubenswrapper[4558]: I0120 16:42:22.229638 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jsqvf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bedf08c7-1f93-4931-a7f3-e729e2a137af\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e32b76495d88d356373f9389e163a2f3a72e202bb454c7015594c1f5a41b511\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xxtbc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jsqvf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:22Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:22 crc kubenswrapper[4558]: I0120 16:42:22.263391 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:22 crc kubenswrapper[4558]: I0120 16:42:22.263437 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:22 crc kubenswrapper[4558]: I0120 16:42:22.263446 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:22 crc kubenswrapper[4558]: I0120 16:42:22.263461 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:22 crc kubenswrapper[4558]: I0120 16:42:22.263473 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:22Z","lastTransitionTime":"2026-01-20T16:42:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:22 crc kubenswrapper[4558]: I0120 16:42:22.365825 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:22 crc kubenswrapper[4558]: I0120 16:42:22.365872 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:22 crc kubenswrapper[4558]: I0120 16:42:22.365881 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:22 crc kubenswrapper[4558]: I0120 16:42:22.365894 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:22 crc kubenswrapper[4558]: I0120 16:42:22.365903 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:22Z","lastTransitionTime":"2026-01-20T16:42:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:22 crc kubenswrapper[4558]: I0120 16:42:22.468317 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:22 crc kubenswrapper[4558]: I0120 16:42:22.468350 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:22 crc kubenswrapper[4558]: I0120 16:42:22.468358 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:22 crc kubenswrapper[4558]: I0120 16:42:22.468370 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:22 crc kubenswrapper[4558]: I0120 16:42:22.468379 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:22Z","lastTransitionTime":"2026-01-20T16:42:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:22 crc kubenswrapper[4558]: I0120 16:42:22.551952 4558 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-08 07:58:05.689667789 +0000 UTC Jan 20 16:42:22 crc kubenswrapper[4558]: I0120 16:42:22.565417 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 20 16:42:22 crc kubenswrapper[4558]: I0120 16:42:22.565449 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 20 16:42:22 crc kubenswrapper[4558]: E0120 16:42:22.565526 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 20 16:42:22 crc kubenswrapper[4558]: I0120 16:42:22.565561 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 20 16:42:22 crc kubenswrapper[4558]: E0120 16:42:22.565684 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 20 16:42:22 crc kubenswrapper[4558]: E0120 16:42:22.565798 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 20 16:42:22 crc kubenswrapper[4558]: I0120 16:42:22.569516 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:22 crc kubenswrapper[4558]: I0120 16:42:22.569542 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:22 crc kubenswrapper[4558]: I0120 16:42:22.569551 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:22 crc kubenswrapper[4558]: I0120 16:42:22.569562 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:22 crc kubenswrapper[4558]: I0120 16:42:22.569570 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:22Z","lastTransitionTime":"2026-01-20T16:42:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:22 crc kubenswrapper[4558]: I0120 16:42:22.671341 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:22 crc kubenswrapper[4558]: I0120 16:42:22.671370 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:22 crc kubenswrapper[4558]: I0120 16:42:22.671383 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:22 crc kubenswrapper[4558]: I0120 16:42:22.671393 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:22 crc kubenswrapper[4558]: I0120 16:42:22.671401 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:22Z","lastTransitionTime":"2026-01-20T16:42:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:22 crc kubenswrapper[4558]: I0120 16:42:22.773314 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:22 crc kubenswrapper[4558]: I0120 16:42:22.773348 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:22 crc kubenswrapper[4558]: I0120 16:42:22.773356 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:22 crc kubenswrapper[4558]: I0120 16:42:22.773370 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:22 crc kubenswrapper[4558]: I0120 16:42:22.773380 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:22Z","lastTransitionTime":"2026-01-20T16:42:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:22 crc kubenswrapper[4558]: I0120 16:42:22.875600 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:22 crc kubenswrapper[4558]: I0120 16:42:22.875656 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:22 crc kubenswrapper[4558]: I0120 16:42:22.875664 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:22 crc kubenswrapper[4558]: I0120 16:42:22.875682 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:22 crc kubenswrapper[4558]: I0120 16:42:22.875690 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:22Z","lastTransitionTime":"2026-01-20T16:42:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:22 crc kubenswrapper[4558]: I0120 16:42:22.977300 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:22 crc kubenswrapper[4558]: I0120 16:42:22.977349 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:22 crc kubenswrapper[4558]: I0120 16:42:22.977358 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:22 crc kubenswrapper[4558]: I0120 16:42:22.977372 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:22 crc kubenswrapper[4558]: I0120 16:42:22.977380 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:22Z","lastTransitionTime":"2026-01-20T16:42:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:23 crc kubenswrapper[4558]: I0120 16:42:23.079649 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:23 crc kubenswrapper[4558]: I0120 16:42:23.079691 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:23 crc kubenswrapper[4558]: I0120 16:42:23.079699 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:23 crc kubenswrapper[4558]: I0120 16:42:23.079714 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:23 crc kubenswrapper[4558]: I0120 16:42:23.079723 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:23Z","lastTransitionTime":"2026-01-20T16:42:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:23 crc kubenswrapper[4558]: I0120 16:42:23.181783 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:23 crc kubenswrapper[4558]: I0120 16:42:23.181834 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:23 crc kubenswrapper[4558]: I0120 16:42:23.181843 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:23 crc kubenswrapper[4558]: I0120 16:42:23.181856 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:23 crc kubenswrapper[4558]: I0120 16:42:23.181865 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:23Z","lastTransitionTime":"2026-01-20T16:42:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:23 crc kubenswrapper[4558]: I0120 16:42:23.283985 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:23 crc kubenswrapper[4558]: I0120 16:42:23.284018 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:23 crc kubenswrapper[4558]: I0120 16:42:23.284028 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:23 crc kubenswrapper[4558]: I0120 16:42:23.284041 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:23 crc kubenswrapper[4558]: I0120 16:42:23.284049 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:23Z","lastTransitionTime":"2026-01-20T16:42:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:23 crc kubenswrapper[4558]: I0120 16:42:23.385703 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:23 crc kubenswrapper[4558]: I0120 16:42:23.385750 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:23 crc kubenswrapper[4558]: I0120 16:42:23.385760 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:23 crc kubenswrapper[4558]: I0120 16:42:23.385776 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:23 crc kubenswrapper[4558]: I0120 16:42:23.385786 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:23Z","lastTransitionTime":"2026-01-20T16:42:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:23 crc kubenswrapper[4558]: I0120 16:42:23.491713 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:23 crc kubenswrapper[4558]: I0120 16:42:23.491744 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:23 crc kubenswrapper[4558]: I0120 16:42:23.491752 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:23 crc kubenswrapper[4558]: I0120 16:42:23.491764 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:23 crc kubenswrapper[4558]: I0120 16:42:23.491771 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:23Z","lastTransitionTime":"2026-01-20T16:42:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:23 crc kubenswrapper[4558]: I0120 16:42:23.552365 4558 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-04 20:26:17.590386835 +0000 UTC Jan 20 16:42:23 crc kubenswrapper[4558]: I0120 16:42:23.565960 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9wrq6" Jan 20 16:42:23 crc kubenswrapper[4558]: E0120 16:42:23.566079 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9wrq6" podUID="30032328-bd33-4073-9366-e10bc5e2aa77" Jan 20 16:42:23 crc kubenswrapper[4558]: I0120 16:42:23.594271 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:23 crc kubenswrapper[4558]: I0120 16:42:23.594303 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:23 crc kubenswrapper[4558]: I0120 16:42:23.594311 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:23 crc kubenswrapper[4558]: I0120 16:42:23.594325 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:23 crc kubenswrapper[4558]: I0120 16:42:23.594333 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:23Z","lastTransitionTime":"2026-01-20T16:42:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:23 crc kubenswrapper[4558]: I0120 16:42:23.695780 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:23 crc kubenswrapper[4558]: I0120 16:42:23.695809 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:23 crc kubenswrapper[4558]: I0120 16:42:23.695817 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:23 crc kubenswrapper[4558]: I0120 16:42:23.695829 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:23 crc kubenswrapper[4558]: I0120 16:42:23.695838 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:23Z","lastTransitionTime":"2026-01-20T16:42:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:23 crc kubenswrapper[4558]: I0120 16:42:23.797987 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:23 crc kubenswrapper[4558]: I0120 16:42:23.798026 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:23 crc kubenswrapper[4558]: I0120 16:42:23.798034 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:23 crc kubenswrapper[4558]: I0120 16:42:23.798046 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:23 crc kubenswrapper[4558]: I0120 16:42:23.798055 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:23Z","lastTransitionTime":"2026-01-20T16:42:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:23 crc kubenswrapper[4558]: I0120 16:42:23.899713 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:23 crc kubenswrapper[4558]: I0120 16:42:23.899738 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:23 crc kubenswrapper[4558]: I0120 16:42:23.899746 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:23 crc kubenswrapper[4558]: I0120 16:42:23.899757 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:23 crc kubenswrapper[4558]: I0120 16:42:23.899766 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:23Z","lastTransitionTime":"2026-01-20T16:42:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:24 crc kubenswrapper[4558]: I0120 16:42:24.001896 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:24 crc kubenswrapper[4558]: I0120 16:42:24.001921 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:24 crc kubenswrapper[4558]: I0120 16:42:24.001929 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:24 crc kubenswrapper[4558]: I0120 16:42:24.001940 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:24 crc kubenswrapper[4558]: I0120 16:42:24.001948 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:24Z","lastTransitionTime":"2026-01-20T16:42:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:24 crc kubenswrapper[4558]: I0120 16:42:24.104482 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:24 crc kubenswrapper[4558]: I0120 16:42:24.104527 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:24 crc kubenswrapper[4558]: I0120 16:42:24.104536 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:24 crc kubenswrapper[4558]: I0120 16:42:24.104548 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:24 crc kubenswrapper[4558]: I0120 16:42:24.104559 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:24Z","lastTransitionTime":"2026-01-20T16:42:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:24 crc kubenswrapper[4558]: I0120 16:42:24.206792 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:24 crc kubenswrapper[4558]: I0120 16:42:24.206826 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:24 crc kubenswrapper[4558]: I0120 16:42:24.206834 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:24 crc kubenswrapper[4558]: I0120 16:42:24.206846 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:24 crc kubenswrapper[4558]: I0120 16:42:24.206854 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:24Z","lastTransitionTime":"2026-01-20T16:42:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:24 crc kubenswrapper[4558]: I0120 16:42:24.308762 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:24 crc kubenswrapper[4558]: I0120 16:42:24.308806 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:24 crc kubenswrapper[4558]: I0120 16:42:24.308818 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:24 crc kubenswrapper[4558]: I0120 16:42:24.308832 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:24 crc kubenswrapper[4558]: I0120 16:42:24.308843 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:24Z","lastTransitionTime":"2026-01-20T16:42:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:24 crc kubenswrapper[4558]: I0120 16:42:24.411796 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:24 crc kubenswrapper[4558]: I0120 16:42:24.411833 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:24 crc kubenswrapper[4558]: I0120 16:42:24.411841 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:24 crc kubenswrapper[4558]: I0120 16:42:24.411853 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:24 crc kubenswrapper[4558]: I0120 16:42:24.411862 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:24Z","lastTransitionTime":"2026-01-20T16:42:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:24 crc kubenswrapper[4558]: I0120 16:42:24.513909 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:24 crc kubenswrapper[4558]: I0120 16:42:24.513949 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:24 crc kubenswrapper[4558]: I0120 16:42:24.513958 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:24 crc kubenswrapper[4558]: I0120 16:42:24.513972 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:24 crc kubenswrapper[4558]: I0120 16:42:24.513980 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:24Z","lastTransitionTime":"2026-01-20T16:42:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:24 crc kubenswrapper[4558]: I0120 16:42:24.553649 4558 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-18 13:52:14.538001502 +0000 UTC Jan 20 16:42:24 crc kubenswrapper[4558]: I0120 16:42:24.565930 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 20 16:42:24 crc kubenswrapper[4558]: I0120 16:42:24.565987 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 20 16:42:24 crc kubenswrapper[4558]: E0120 16:42:24.566029 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 20 16:42:24 crc kubenswrapper[4558]: I0120 16:42:24.566086 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 20 16:42:24 crc kubenswrapper[4558]: E0120 16:42:24.566088 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 20 16:42:24 crc kubenswrapper[4558]: E0120 16:42:24.566124 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 20 16:42:24 crc kubenswrapper[4558]: I0120 16:42:24.616089 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:24 crc kubenswrapper[4558]: I0120 16:42:24.616119 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:24 crc kubenswrapper[4558]: I0120 16:42:24.616129 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:24 crc kubenswrapper[4558]: I0120 16:42:24.616142 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:24 crc kubenswrapper[4558]: I0120 16:42:24.616150 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:24Z","lastTransitionTime":"2026-01-20T16:42:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:24 crc kubenswrapper[4558]: I0120 16:42:24.718300 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:24 crc kubenswrapper[4558]: I0120 16:42:24.718334 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:24 crc kubenswrapper[4558]: I0120 16:42:24.718343 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:24 crc kubenswrapper[4558]: I0120 16:42:24.718355 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:24 crc kubenswrapper[4558]: I0120 16:42:24.718366 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:24Z","lastTransitionTime":"2026-01-20T16:42:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:24 crc kubenswrapper[4558]: I0120 16:42:24.820509 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:24 crc kubenswrapper[4558]: I0120 16:42:24.820540 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:24 crc kubenswrapper[4558]: I0120 16:42:24.820548 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:24 crc kubenswrapper[4558]: I0120 16:42:24.820559 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:24 crc kubenswrapper[4558]: I0120 16:42:24.820567 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:24Z","lastTransitionTime":"2026-01-20T16:42:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:24 crc kubenswrapper[4558]: I0120 16:42:24.922509 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:24 crc kubenswrapper[4558]: I0120 16:42:24.922539 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:24 crc kubenswrapper[4558]: I0120 16:42:24.922550 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:24 crc kubenswrapper[4558]: I0120 16:42:24.922562 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:24 crc kubenswrapper[4558]: I0120 16:42:24.922569 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:24Z","lastTransitionTime":"2026-01-20T16:42:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:25 crc kubenswrapper[4558]: I0120 16:42:25.024955 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:25 crc kubenswrapper[4558]: I0120 16:42:25.024991 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:25 crc kubenswrapper[4558]: I0120 16:42:25.024999 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:25 crc kubenswrapper[4558]: I0120 16:42:25.025011 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:25 crc kubenswrapper[4558]: I0120 16:42:25.025019 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:25Z","lastTransitionTime":"2026-01-20T16:42:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:25 crc kubenswrapper[4558]: I0120 16:42:25.126908 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:25 crc kubenswrapper[4558]: I0120 16:42:25.126947 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:25 crc kubenswrapper[4558]: I0120 16:42:25.126963 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:25 crc kubenswrapper[4558]: I0120 16:42:25.126976 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:25 crc kubenswrapper[4558]: I0120 16:42:25.126985 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:25Z","lastTransitionTime":"2026-01-20T16:42:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:25 crc kubenswrapper[4558]: I0120 16:42:25.228915 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:25 crc kubenswrapper[4558]: I0120 16:42:25.229122 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:25 crc kubenswrapper[4558]: I0120 16:42:25.229224 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:25 crc kubenswrapper[4558]: I0120 16:42:25.229306 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:25 crc kubenswrapper[4558]: I0120 16:42:25.229378 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:25Z","lastTransitionTime":"2026-01-20T16:42:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:25 crc kubenswrapper[4558]: I0120 16:42:25.331564 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:25 crc kubenswrapper[4558]: I0120 16:42:25.331617 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:25 crc kubenswrapper[4558]: I0120 16:42:25.331626 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:25 crc kubenswrapper[4558]: I0120 16:42:25.331638 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:25 crc kubenswrapper[4558]: I0120 16:42:25.331646 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:25Z","lastTransitionTime":"2026-01-20T16:42:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:25 crc kubenswrapper[4558]: I0120 16:42:25.433438 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:25 crc kubenswrapper[4558]: I0120 16:42:25.433573 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:25 crc kubenswrapper[4558]: I0120 16:42:25.433631 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:25 crc kubenswrapper[4558]: I0120 16:42:25.433649 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:25 crc kubenswrapper[4558]: I0120 16:42:25.433670 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:25Z","lastTransitionTime":"2026-01-20T16:42:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:25 crc kubenswrapper[4558]: I0120 16:42:25.535693 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:25 crc kubenswrapper[4558]: I0120 16:42:25.535733 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:25 crc kubenswrapper[4558]: I0120 16:42:25.535744 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:25 crc kubenswrapper[4558]: I0120 16:42:25.535760 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:25 crc kubenswrapper[4558]: I0120 16:42:25.535770 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:25Z","lastTransitionTime":"2026-01-20T16:42:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:25 crc kubenswrapper[4558]: I0120 16:42:25.554159 4558 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-11 12:43:25.078044864 +0000 UTC Jan 20 16:42:25 crc kubenswrapper[4558]: I0120 16:42:25.565411 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9wrq6" Jan 20 16:42:25 crc kubenswrapper[4558]: E0120 16:42:25.565500 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9wrq6" podUID="30032328-bd33-4073-9366-e10bc5e2aa77" Jan 20 16:42:25 crc kubenswrapper[4558]: I0120 16:42:25.637897 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:25 crc kubenswrapper[4558]: I0120 16:42:25.637922 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:25 crc kubenswrapper[4558]: I0120 16:42:25.637930 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:25 crc kubenswrapper[4558]: I0120 16:42:25.637940 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:25 crc kubenswrapper[4558]: I0120 16:42:25.637948 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:25Z","lastTransitionTime":"2026-01-20T16:42:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:25 crc kubenswrapper[4558]: I0120 16:42:25.676424 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/30032328-bd33-4073-9366-e10bc5e2aa77-metrics-certs\") pod \"network-metrics-daemon-9wrq6\" (UID: \"30032328-bd33-4073-9366-e10bc5e2aa77\") " pod="openshift-multus/network-metrics-daemon-9wrq6" Jan 20 16:42:25 crc kubenswrapper[4558]: E0120 16:42:25.676569 4558 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 20 16:42:25 crc kubenswrapper[4558]: E0120 16:42:25.676632 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/30032328-bd33-4073-9366-e10bc5e2aa77-metrics-certs podName:30032328-bd33-4073-9366-e10bc5e2aa77 nodeName:}" failed. No retries permitted until 2026-01-20 16:42:33.676619402 +0000 UTC m=+47.436957369 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/30032328-bd33-4073-9366-e10bc5e2aa77-metrics-certs") pod "network-metrics-daemon-9wrq6" (UID: "30032328-bd33-4073-9366-e10bc5e2aa77") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 20 16:42:25 crc kubenswrapper[4558]: I0120 16:42:25.739614 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:25 crc kubenswrapper[4558]: I0120 16:42:25.739640 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:25 crc kubenswrapper[4558]: I0120 16:42:25.739647 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:25 crc kubenswrapper[4558]: I0120 16:42:25.739657 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:25 crc kubenswrapper[4558]: I0120 16:42:25.739665 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:25Z","lastTransitionTime":"2026-01-20T16:42:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:25 crc kubenswrapper[4558]: I0120 16:42:25.841543 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:25 crc kubenswrapper[4558]: I0120 16:42:25.841578 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:25 crc kubenswrapper[4558]: I0120 16:42:25.841604 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:25 crc kubenswrapper[4558]: I0120 16:42:25.841616 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:25 crc kubenswrapper[4558]: I0120 16:42:25.841625 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:25Z","lastTransitionTime":"2026-01-20T16:42:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:25 crc kubenswrapper[4558]: I0120 16:42:25.943409 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:25 crc kubenswrapper[4558]: I0120 16:42:25.943440 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:25 crc kubenswrapper[4558]: I0120 16:42:25.943449 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:25 crc kubenswrapper[4558]: I0120 16:42:25.943460 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:25 crc kubenswrapper[4558]: I0120 16:42:25.943468 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:25Z","lastTransitionTime":"2026-01-20T16:42:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:26 crc kubenswrapper[4558]: I0120 16:42:26.045854 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:26 crc kubenswrapper[4558]: I0120 16:42:26.045886 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:26 crc kubenswrapper[4558]: I0120 16:42:26.045894 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:26 crc kubenswrapper[4558]: I0120 16:42:26.045908 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:26 crc kubenswrapper[4558]: I0120 16:42:26.045917 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:26Z","lastTransitionTime":"2026-01-20T16:42:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:26 crc kubenswrapper[4558]: I0120 16:42:26.147978 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:26 crc kubenswrapper[4558]: I0120 16:42:26.148009 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:26 crc kubenswrapper[4558]: I0120 16:42:26.148017 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:26 crc kubenswrapper[4558]: I0120 16:42:26.148028 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:26 crc kubenswrapper[4558]: I0120 16:42:26.148035 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:26Z","lastTransitionTime":"2026-01-20T16:42:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:26 crc kubenswrapper[4558]: I0120 16:42:26.250056 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:26 crc kubenswrapper[4558]: I0120 16:42:26.250086 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:26 crc kubenswrapper[4558]: I0120 16:42:26.250095 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:26 crc kubenswrapper[4558]: I0120 16:42:26.250106 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:26 crc kubenswrapper[4558]: I0120 16:42:26.250115 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:26Z","lastTransitionTime":"2026-01-20T16:42:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:26 crc kubenswrapper[4558]: I0120 16:42:26.351630 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:26 crc kubenswrapper[4558]: I0120 16:42:26.351776 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:26 crc kubenswrapper[4558]: I0120 16:42:26.351842 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:26 crc kubenswrapper[4558]: I0120 16:42:26.351901 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:26 crc kubenswrapper[4558]: I0120 16:42:26.351959 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:26Z","lastTransitionTime":"2026-01-20T16:42:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:26 crc kubenswrapper[4558]: I0120 16:42:26.453197 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:26 crc kubenswrapper[4558]: I0120 16:42:26.453232 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:26 crc kubenswrapper[4558]: I0120 16:42:26.453241 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:26 crc kubenswrapper[4558]: I0120 16:42:26.453255 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:26 crc kubenswrapper[4558]: I0120 16:42:26.453264 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:26Z","lastTransitionTime":"2026-01-20T16:42:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:26 crc kubenswrapper[4558]: I0120 16:42:26.554266 4558 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-22 02:25:34.16799112 +0000 UTC Jan 20 16:42:26 crc kubenswrapper[4558]: I0120 16:42:26.555347 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:26 crc kubenswrapper[4558]: I0120 16:42:26.555378 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:26 crc kubenswrapper[4558]: I0120 16:42:26.555387 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:26 crc kubenswrapper[4558]: I0120 16:42:26.555398 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:26 crc kubenswrapper[4558]: I0120 16:42:26.555406 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:26Z","lastTransitionTime":"2026-01-20T16:42:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:26 crc kubenswrapper[4558]: I0120 16:42:26.565723 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 20 16:42:26 crc kubenswrapper[4558]: I0120 16:42:26.565758 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 20 16:42:26 crc kubenswrapper[4558]: E0120 16:42:26.565797 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 20 16:42:26 crc kubenswrapper[4558]: I0120 16:42:26.565840 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 20 16:42:26 crc kubenswrapper[4558]: E0120 16:42:26.565928 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 20 16:42:26 crc kubenswrapper[4558]: E0120 16:42:26.566023 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 20 16:42:26 crc kubenswrapper[4558]: I0120 16:42:26.575715 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e190c24c1d8a3bc0241334598ce23829ada9afee282d49a0c82cd67ad82bcd02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:26Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:26 crc kubenswrapper[4558]: I0120 16:42:26.583288 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:26Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:26 crc kubenswrapper[4558]: I0120 16:42:26.591808 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:26Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:26 crc kubenswrapper[4558]: I0120 16:42:26.598926 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rfwnl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8fe5472-7373-4e5d-87fc-a9ecaf26a5cd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://38cd58ecc67d337df2c29b79e5460ea23d4acc1e66de8b08ea70d7ab7b30691d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9nvl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a14e9281ff19e324499722aefadf1e1f2fd005089c153918ad4d8a7f5bdfa853\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9nvl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-rfwnl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:26Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:26 crc kubenswrapper[4558]: I0120 16:42:26.606713 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:26Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:26 crc kubenswrapper[4558]: I0120 16:42:26.615003 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-d96bp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c8d2c8b-fb8e-4bff-b8d2-69570e5d9e57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe858db22acc96b062f1bc7941e05c823f30d1f9ba50bd497c57f38d57e04293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4v6jx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-d96bp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:26Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:26 crc kubenswrapper[4558]: I0120 16:42:26.627677 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"254129cd-82fc-4162-b671-2434bc9e2972\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a6a184067bde115ef5e09fdd90e7dccfe89a9dc347af450998b91c068e7e803\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd8b93e5559d95511a1285a9aec9a7d72df6c330bcd1e1daf3e93f5494f70eeb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4264ef8ee49a8b172fb05ac5c9b0bd32350b5d9ae95293079dfeaf44f021b455\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0889cf371f8b06cc2d255c15fda97db1d5000d6f6cff29d2fdcb8667cede8a99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4265d3281e954ee5989ee008da28c6cb9fa29478e5fdf23325521d455304da9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://186a1d2caad8865abf7f565405c4a4c077038ccb67ba77927cc2392ec075a811\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://41b06bf319806f8cb4eb26d154351300538b7998dbff8ab608b53a397a84aeb1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://41b06bf319806f8cb4eb26d154351300538b7998dbff8ab608b53a397a84aeb1\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-20T16:42:13Z\\\",\\\"message\\\":\\\"rt network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:13Z is after 2025-08-24T17:21:41Z]\\\\nI0120 16:42:13.263941 5970 services_controller.go:434] Service openshift-marketplace/redhat-marketplace retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{redhat-marketplace openshift-marketplace cf6d00ec-cc2c-43f6-815c-40ffd0563e71 5558 0 2025-02-23 05:23:25 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[olm.managed:true olm.service-spec-hash:aUeLNNcZzVZO2rcaZ5Kc8V3jffO0Ss4T6qX6V5] map[] [{operators.coreos.com/v1alpha1 CatalogSource redhat-marketplace fcb55c30-a739-4bc1-9c9c-7634e05a3dbd 0xc00757e68d 0xc00757e68e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:grpc,Protocol:TCP,P\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:12Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-nv2xw_openshift-ovn-kubernetes(254129cd-82fc-4162-b671-2434bc9e2972)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f41d488b0c24e594a2f5cad36b5f8efdb7d867fae0e18a74fe3c396a203d162f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aaa12ad387fc5d0780f6602a74013bb70c118f35c3a16eb4d136df7b0c48db9e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aaa12ad387fc5d0780f6602a74013bb70c118f35c3a16eb4d136df7b0c48db9e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-nv2xw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:26Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:26 crc kubenswrapper[4558]: I0120 16:42:26.636929 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f5t7h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0f90cb04-2e7a-4ee8-83fc-d6c0ee1702a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21a318f03d0df490cb7bae11819c52eca011d163081aeecff1b2dd8f72caabbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fd8a8162080a700ce21ff3b2cf7c28fabfc682758eb69c483909d4008b07daab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fd8a8162080a700ce21ff3b2cf7c28fabfc682758eb69c483909d4008b07daab\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e77617c69434ce20c448b525c7e86f1ece18d51ce6e14167b4ca7e99de8e5709\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e77617c69434ce20c448b525c7e86f1ece18d51ce6e14167b4ca7e99de8e5709\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://453c67434281e0261ec6799b30a548a01efd72d7df96632409e745b2e6a94546\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://453c67434281e0261ec6799b30a548a01efd72d7df96632409e745b2e6a94546\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://06eab4daa6c8b6a564cf1bb1403b7b3b259a234ee09f23b4f4bc0628c2d58e69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06eab4daa6c8b6a564cf1bb1403b7b3b259a234ee09f23b4f4bc0628c2d58e69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1291ac09fc6ae2a79fdeaa0fc47272f61b97f45f67ed3f7c90bdfdbfe3519ff2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1291ac09fc6ae2a79fdeaa0fc47272f61b97f45f67ed3f7c90bdfdbfe3519ff2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe484514f50a14e485d4a97ea3ce9401c77f3f4dae4717719be7ed6c4d7b1a59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe484514f50a14e485d4a97ea3ce9401c77f3f4dae4717719be7ed6c4d7b1a59\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f5t7h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:26Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:26 crc kubenswrapper[4558]: I0120 16:42:26.643474 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-9wrq6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30032328-bd33-4073-9366-e10bc5e2aa77\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6tz4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6tz4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:17Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-9wrq6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:26Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:26 crc kubenswrapper[4558]: I0120 16:42:26.651242 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a13bfefa-f525-4ecd-89f5-f10480532bf6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9030e970e96b6909d771d4fa0c3b4f493b4a825cbc43ef7e684272284329c7c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://168e8f533aaa108e4160f9ae4af6bb7944edba7a596178ffffdc5325073fc3c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e0141aef1babb57501016cb09787591b5e6d9136b8d7dd4ba64393f0be5a027\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c87f08993c2b47c63ba9652c7dafe0fdbecdcb03a7b7080664ab1f1bd0e1d602\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:26Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:26 crc kubenswrapper[4558]: I0120 16:42:26.656549 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:26 crc kubenswrapper[4558]: I0120 16:42:26.656601 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:26 crc kubenswrapper[4558]: I0120 16:42:26.656611 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:26 crc kubenswrapper[4558]: I0120 16:42:26.656621 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:26 crc kubenswrapper[4558]: I0120 16:42:26.656630 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:26Z","lastTransitionTime":"2026-01-20T16:42:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:26 crc kubenswrapper[4558]: I0120 16:42:26.659806 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f904dd3e4b43e51569b6532f00fa2f9edf912c1f1969ee2ea00c99d1b7e35a41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://789c877f4a456b3ce64bdbe6fefd50f1ef3f92bf9e26e6296005cf366e0ce265\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:26Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:26 crc kubenswrapper[4558]: I0120 16:42:26.666867 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3e45cc56934a7b90e9d47f1c6d06d8dc140d57db4a216469e8ae112f398663e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:26Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:26 crc kubenswrapper[4558]: I0120 16:42:26.674750 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jsqvf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bedf08c7-1f93-4931-a7f3-e729e2a137af\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e32b76495d88d356373f9389e163a2f3a72e202bb454c7015594c1f5a41b511\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xxtbc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jsqvf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:26Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:26 crc kubenswrapper[4558]: I0120 16:42:26.681005 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-47477" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e026445a-aa73-4a0e-ba35-e2e07de1278c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec2ee3ff161cba7e14ad26763081c9f092f0dfe969cef113631c27e0db53d157\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xcnwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-47477\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:26Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:26 crc kubenswrapper[4558]: I0120 16:42:26.695792 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"425eab90-fb70-4498-a98b-b95742033890\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48592e72d91cf527296f06e52fec1c16c1da21323c0644659945af109a74ebb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7e18f59b5e6b6c0be141b7efb3ccb1df4ff6e3c394bcc88fb1142c8df433493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c889e2b0e14244ef49ec057e3d5fd91687e335a983fdc99a6816abd2af643ddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d434793571cbba089cc34029d951bee9063aca58728b791f2d1af2d68229f778\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://89f7ad577df11c509855a90ac8791da6de3f0d2857a1ca56726a6327f40455bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://baab628ee8aadc67ea829cc46639be55ae63281a9ec9a89737260f558a9ddf75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://baab628ee8aadc67ea829cc46639be55ae63281a9ec9a89737260f558a9ddf75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5faab5625a16e1459b23ae0f4cc8ca3285bf4eb1ba4c53c1ed15eddb9054021\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5faab5625a16e1459b23ae0f4cc8ca3285bf4eb1ba4c53c1ed15eddb9054021\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f913fa07ec9571d1c61f9b7dddb46614846aa99d6e0b5b75709e9fa5a18a09c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f913fa07ec9571d1c61f9b7dddb46614846aa99d6e0b5b75709e9fa5a18a09c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:26Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:26 crc kubenswrapper[4558]: I0120 16:42:26.705803 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30e1ff9d-8dcd-4754-9a7a-c09598fb83db\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ea68e442bb2143fbaa5e2c9fe335c6b023a31c2eeaa78d60de45c6ef40c2894\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a8122c3b57ac4f0026df149fe622450d07d8b0d517ad018decd7b21d8d9c04a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7444e8fea1853bae45dbd3d52d07e3f7b94314cd29fe8c99891e515a892e569e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6438ffea42f98178c4c2c21ae01725a8b54d6a6f790f47d64faf5ecd9f1c00f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2e22f1d4e87dd06a67f49aedad280da48ac1c4ba2e438a4925cf8bd5330c4d8\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0120 16:41:58.859734 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0120 16:41:58.861094 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2179835226/tls.crt::/tmp/serving-cert-2179835226/tls.key\\\\\\\"\\\\nI0120 16:42:04.205247 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0120 16:42:04.207291 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0120 16:42:04.207308 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0120 16:42:04.207329 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0120 16:42:04.207333 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0120 16:42:04.210615 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0120 16:42:04.210632 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0120 16:42:04.210641 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0120 16:42:04.210648 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0120 16:42:04.210652 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0120 16:42:04.210655 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0120 16:42:04.210658 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0120 16:42:04.210660 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0120 16:42:04.211964 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4850bde1fb538e21fc91949dc584ef0a791d718844691a44559e0513bb36203\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c39aa6c3942134a027be33cac715f9c0381bce5417754d5bad045c42a07d353f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c39aa6c3942134a027be33cac715f9c0381bce5417754d5bad045c42a07d353f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:26Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:26 crc kubenswrapper[4558]: I0120 16:42:26.712921 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"68337d27-3fa6-4a29-88b0-82e60c3739eb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de7f7c23993a57d30f08686e1cd4abb5b17e108ac12dd0c7929a31574c69eeb1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4b478\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b18a59ecb802507e0d5988eacc915fd7e0d6954518de80f61162c97f680fd8c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4b478\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2vr4r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:26Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:26 crc kubenswrapper[4558]: I0120 16:42:26.758538 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:26 crc kubenswrapper[4558]: I0120 16:42:26.758574 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:26 crc kubenswrapper[4558]: I0120 16:42:26.758602 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:26 crc kubenswrapper[4558]: I0120 16:42:26.758617 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:26 crc kubenswrapper[4558]: I0120 16:42:26.758627 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:26Z","lastTransitionTime":"2026-01-20T16:42:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:26 crc kubenswrapper[4558]: I0120 16:42:26.860279 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:26 crc kubenswrapper[4558]: I0120 16:42:26.860301 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:26 crc kubenswrapper[4558]: I0120 16:42:26.860309 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:26 crc kubenswrapper[4558]: I0120 16:42:26.860319 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:26 crc kubenswrapper[4558]: I0120 16:42:26.860327 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:26Z","lastTransitionTime":"2026-01-20T16:42:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:26 crc kubenswrapper[4558]: I0120 16:42:26.961710 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:26 crc kubenswrapper[4558]: I0120 16:42:26.961739 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:26 crc kubenswrapper[4558]: I0120 16:42:26.961748 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:26 crc kubenswrapper[4558]: I0120 16:42:26.961759 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:26 crc kubenswrapper[4558]: I0120 16:42:26.961768 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:26Z","lastTransitionTime":"2026-01-20T16:42:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:27 crc kubenswrapper[4558]: I0120 16:42:27.062845 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:27 crc kubenswrapper[4558]: I0120 16:42:27.062876 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:27 crc kubenswrapper[4558]: I0120 16:42:27.062883 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:27 crc kubenswrapper[4558]: I0120 16:42:27.062896 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:27 crc kubenswrapper[4558]: I0120 16:42:27.062904 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:27Z","lastTransitionTime":"2026-01-20T16:42:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:27 crc kubenswrapper[4558]: I0120 16:42:27.165067 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:27 crc kubenswrapper[4558]: I0120 16:42:27.165101 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:27 crc kubenswrapper[4558]: I0120 16:42:27.165110 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:27 crc kubenswrapper[4558]: I0120 16:42:27.165124 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:27 crc kubenswrapper[4558]: I0120 16:42:27.165135 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:27Z","lastTransitionTime":"2026-01-20T16:42:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:27 crc kubenswrapper[4558]: I0120 16:42:27.266678 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:27 crc kubenswrapper[4558]: I0120 16:42:27.266710 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:27 crc kubenswrapper[4558]: I0120 16:42:27.266718 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:27 crc kubenswrapper[4558]: I0120 16:42:27.266729 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:27 crc kubenswrapper[4558]: I0120 16:42:27.266736 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:27Z","lastTransitionTime":"2026-01-20T16:42:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:27 crc kubenswrapper[4558]: I0120 16:42:27.368649 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:27 crc kubenswrapper[4558]: I0120 16:42:27.368676 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:27 crc kubenswrapper[4558]: I0120 16:42:27.368686 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:27 crc kubenswrapper[4558]: I0120 16:42:27.368696 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:27 crc kubenswrapper[4558]: I0120 16:42:27.368704 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:27Z","lastTransitionTime":"2026-01-20T16:42:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:27 crc kubenswrapper[4558]: I0120 16:42:27.470412 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:27 crc kubenswrapper[4558]: I0120 16:42:27.470439 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:27 crc kubenswrapper[4558]: I0120 16:42:27.470447 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:27 crc kubenswrapper[4558]: I0120 16:42:27.470457 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:27 crc kubenswrapper[4558]: I0120 16:42:27.470465 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:27Z","lastTransitionTime":"2026-01-20T16:42:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:27 crc kubenswrapper[4558]: I0120 16:42:27.554767 4558 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-13 03:13:53.001861634 +0000 UTC Jan 20 16:42:27 crc kubenswrapper[4558]: I0120 16:42:27.565061 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9wrq6" Jan 20 16:42:27 crc kubenswrapper[4558]: E0120 16:42:27.565322 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9wrq6" podUID="30032328-bd33-4073-9366-e10bc5e2aa77" Jan 20 16:42:27 crc kubenswrapper[4558]: I0120 16:42:27.572263 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:27 crc kubenswrapper[4558]: I0120 16:42:27.572293 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:27 crc kubenswrapper[4558]: I0120 16:42:27.572301 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:27 crc kubenswrapper[4558]: I0120 16:42:27.572312 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:27 crc kubenswrapper[4558]: I0120 16:42:27.572322 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:27Z","lastTransitionTime":"2026-01-20T16:42:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:27 crc kubenswrapper[4558]: I0120 16:42:27.674533 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:27 crc kubenswrapper[4558]: I0120 16:42:27.674835 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:27 crc kubenswrapper[4558]: I0120 16:42:27.674907 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:27 crc kubenswrapper[4558]: I0120 16:42:27.674980 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:27 crc kubenswrapper[4558]: I0120 16:42:27.675038 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:27Z","lastTransitionTime":"2026-01-20T16:42:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:27 crc kubenswrapper[4558]: I0120 16:42:27.689513 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:27 crc kubenswrapper[4558]: I0120 16:42:27.689544 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:27 crc kubenswrapper[4558]: I0120 16:42:27.689552 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:27 crc kubenswrapper[4558]: I0120 16:42:27.689570 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:27 crc kubenswrapper[4558]: I0120 16:42:27.689579 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:27Z","lastTransitionTime":"2026-01-20T16:42:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:27 crc kubenswrapper[4558]: E0120 16:42:27.698702 4558 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:42:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:42:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:27Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:42:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:42:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:27Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"bf027402-7a62-478b-8585-220c98495823\\\",\\\"systemUUID\\\":\\\"1def282c-e24e-4bc0-9a1b-d7cc30756d3d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:27Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:27 crc kubenswrapper[4558]: I0120 16:42:27.701206 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:27 crc kubenswrapper[4558]: I0120 16:42:27.701241 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:27 crc kubenswrapper[4558]: I0120 16:42:27.701249 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:27 crc kubenswrapper[4558]: I0120 16:42:27.701263 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:27 crc kubenswrapper[4558]: I0120 16:42:27.701271 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:27Z","lastTransitionTime":"2026-01-20T16:42:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:27 crc kubenswrapper[4558]: E0120 16:42:27.710340 4558 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:42:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:42:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:27Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:42:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:42:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:27Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"bf027402-7a62-478b-8585-220c98495823\\\",\\\"systemUUID\\\":\\\"1def282c-e24e-4bc0-9a1b-d7cc30756d3d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:27Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:27 crc kubenswrapper[4558]: I0120 16:42:27.712764 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:27 crc kubenswrapper[4558]: I0120 16:42:27.712793 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:27 crc kubenswrapper[4558]: I0120 16:42:27.712803 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:27 crc kubenswrapper[4558]: I0120 16:42:27.712813 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:27 crc kubenswrapper[4558]: I0120 16:42:27.712821 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:27Z","lastTransitionTime":"2026-01-20T16:42:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:27 crc kubenswrapper[4558]: E0120 16:42:27.721402 4558 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:42:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:42:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:27Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:42:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:42:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:27Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"bf027402-7a62-478b-8585-220c98495823\\\",\\\"systemUUID\\\":\\\"1def282c-e24e-4bc0-9a1b-d7cc30756d3d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:27Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:27 crc kubenswrapper[4558]: I0120 16:42:27.723658 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:27 crc kubenswrapper[4558]: I0120 16:42:27.723680 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:27 crc kubenswrapper[4558]: I0120 16:42:27.723690 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:27 crc kubenswrapper[4558]: I0120 16:42:27.723700 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:27 crc kubenswrapper[4558]: I0120 16:42:27.723707 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:27Z","lastTransitionTime":"2026-01-20T16:42:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:27 crc kubenswrapper[4558]: E0120 16:42:27.732545 4558 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:42:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:42:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:27Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:42:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:42:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:27Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"bf027402-7a62-478b-8585-220c98495823\\\",\\\"systemUUID\\\":\\\"1def282c-e24e-4bc0-9a1b-d7cc30756d3d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:27Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:27 crc kubenswrapper[4558]: I0120 16:42:27.734529 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:27 crc kubenswrapper[4558]: I0120 16:42:27.734553 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:27 crc kubenswrapper[4558]: I0120 16:42:27.734561 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:27 crc kubenswrapper[4558]: I0120 16:42:27.734572 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:27 crc kubenswrapper[4558]: I0120 16:42:27.734580 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:27Z","lastTransitionTime":"2026-01-20T16:42:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:27 crc kubenswrapper[4558]: E0120 16:42:27.742705 4558 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:42:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:42:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:27Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:42:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:27Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:42:27Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:27Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"bf027402-7a62-478b-8585-220c98495823\\\",\\\"systemUUID\\\":\\\"1def282c-e24e-4bc0-9a1b-d7cc30756d3d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:27Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:27 crc kubenswrapper[4558]: E0120 16:42:27.742830 4558 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 20 16:42:27 crc kubenswrapper[4558]: I0120 16:42:27.776724 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:27 crc kubenswrapper[4558]: I0120 16:42:27.776750 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:27 crc kubenswrapper[4558]: I0120 16:42:27.776759 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:27 crc kubenswrapper[4558]: I0120 16:42:27.776769 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:27 crc kubenswrapper[4558]: I0120 16:42:27.776776 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:27Z","lastTransitionTime":"2026-01-20T16:42:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:27 crc kubenswrapper[4558]: I0120 16:42:27.878293 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:27 crc kubenswrapper[4558]: I0120 16:42:27.878330 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:27 crc kubenswrapper[4558]: I0120 16:42:27.878338 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:27 crc kubenswrapper[4558]: I0120 16:42:27.878351 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:27 crc kubenswrapper[4558]: I0120 16:42:27.878359 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:27Z","lastTransitionTime":"2026-01-20T16:42:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:27 crc kubenswrapper[4558]: I0120 16:42:27.980311 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:27 crc kubenswrapper[4558]: I0120 16:42:27.980332 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:27 crc kubenswrapper[4558]: I0120 16:42:27.980340 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:27 crc kubenswrapper[4558]: I0120 16:42:27.980350 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:27 crc kubenswrapper[4558]: I0120 16:42:27.980358 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:27Z","lastTransitionTime":"2026-01-20T16:42:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:28 crc kubenswrapper[4558]: I0120 16:42:28.083455 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:28 crc kubenswrapper[4558]: I0120 16:42:28.083486 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:28 crc kubenswrapper[4558]: I0120 16:42:28.083495 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:28 crc kubenswrapper[4558]: I0120 16:42:28.083507 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:28 crc kubenswrapper[4558]: I0120 16:42:28.083515 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:28Z","lastTransitionTime":"2026-01-20T16:42:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:28 crc kubenswrapper[4558]: I0120 16:42:28.187466 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:28 crc kubenswrapper[4558]: I0120 16:42:28.187700 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:28 crc kubenswrapper[4558]: I0120 16:42:28.187770 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:28 crc kubenswrapper[4558]: I0120 16:42:28.187907 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:28 crc kubenswrapper[4558]: I0120 16:42:28.187973 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:28Z","lastTransitionTime":"2026-01-20T16:42:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:28 crc kubenswrapper[4558]: I0120 16:42:28.289736 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:28 crc kubenswrapper[4558]: I0120 16:42:28.289774 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:28 crc kubenswrapper[4558]: I0120 16:42:28.289783 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:28 crc kubenswrapper[4558]: I0120 16:42:28.289795 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:28 crc kubenswrapper[4558]: I0120 16:42:28.289803 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:28Z","lastTransitionTime":"2026-01-20T16:42:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:28 crc kubenswrapper[4558]: I0120 16:42:28.391483 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:28 crc kubenswrapper[4558]: I0120 16:42:28.391517 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:28 crc kubenswrapper[4558]: I0120 16:42:28.391525 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:28 crc kubenswrapper[4558]: I0120 16:42:28.391537 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:28 crc kubenswrapper[4558]: I0120 16:42:28.391545 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:28Z","lastTransitionTime":"2026-01-20T16:42:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:28 crc kubenswrapper[4558]: I0120 16:42:28.494261 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:28 crc kubenswrapper[4558]: I0120 16:42:28.494301 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:28 crc kubenswrapper[4558]: I0120 16:42:28.494310 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:28 crc kubenswrapper[4558]: I0120 16:42:28.494325 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:28 crc kubenswrapper[4558]: I0120 16:42:28.494335 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:28Z","lastTransitionTime":"2026-01-20T16:42:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:28 crc kubenswrapper[4558]: I0120 16:42:28.555139 4558 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-06 15:28:36.780736238 +0000 UTC Jan 20 16:42:28 crc kubenswrapper[4558]: I0120 16:42:28.565599 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 20 16:42:28 crc kubenswrapper[4558]: I0120 16:42:28.565632 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 20 16:42:28 crc kubenswrapper[4558]: E0120 16:42:28.565722 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 20 16:42:28 crc kubenswrapper[4558]: I0120 16:42:28.565767 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 20 16:42:28 crc kubenswrapper[4558]: E0120 16:42:28.565827 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 20 16:42:28 crc kubenswrapper[4558]: E0120 16:42:28.565943 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 20 16:42:28 crc kubenswrapper[4558]: I0120 16:42:28.596534 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:28 crc kubenswrapper[4558]: I0120 16:42:28.596562 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:28 crc kubenswrapper[4558]: I0120 16:42:28.596571 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:28 crc kubenswrapper[4558]: I0120 16:42:28.596596 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:28 crc kubenswrapper[4558]: I0120 16:42:28.596605 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:28Z","lastTransitionTime":"2026-01-20T16:42:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:28 crc kubenswrapper[4558]: I0120 16:42:28.698687 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:28 crc kubenswrapper[4558]: I0120 16:42:28.698719 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:28 crc kubenswrapper[4558]: I0120 16:42:28.698728 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:28 crc kubenswrapper[4558]: I0120 16:42:28.698741 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:28 crc kubenswrapper[4558]: I0120 16:42:28.698749 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:28Z","lastTransitionTime":"2026-01-20T16:42:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:28 crc kubenswrapper[4558]: I0120 16:42:28.800938 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:28 crc kubenswrapper[4558]: I0120 16:42:28.800990 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:28 crc kubenswrapper[4558]: I0120 16:42:28.800999 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:28 crc kubenswrapper[4558]: I0120 16:42:28.801016 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:28 crc kubenswrapper[4558]: I0120 16:42:28.801025 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:28Z","lastTransitionTime":"2026-01-20T16:42:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:28 crc kubenswrapper[4558]: I0120 16:42:28.903219 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:28 crc kubenswrapper[4558]: I0120 16:42:28.903249 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:28 crc kubenswrapper[4558]: I0120 16:42:28.903257 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:28 crc kubenswrapper[4558]: I0120 16:42:28.903269 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:28 crc kubenswrapper[4558]: I0120 16:42:28.903277 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:28Z","lastTransitionTime":"2026-01-20T16:42:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:29 crc kubenswrapper[4558]: I0120 16:42:29.004503 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:29 crc kubenswrapper[4558]: I0120 16:42:29.004525 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:29 crc kubenswrapper[4558]: I0120 16:42:29.004533 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:29 crc kubenswrapper[4558]: I0120 16:42:29.004544 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:29 crc kubenswrapper[4558]: I0120 16:42:29.004551 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:29Z","lastTransitionTime":"2026-01-20T16:42:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:29 crc kubenswrapper[4558]: I0120 16:42:29.106706 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:29 crc kubenswrapper[4558]: I0120 16:42:29.106746 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:29 crc kubenswrapper[4558]: I0120 16:42:29.106754 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:29 crc kubenswrapper[4558]: I0120 16:42:29.106767 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:29 crc kubenswrapper[4558]: I0120 16:42:29.106775 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:29Z","lastTransitionTime":"2026-01-20T16:42:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:29 crc kubenswrapper[4558]: I0120 16:42:29.208362 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:29 crc kubenswrapper[4558]: I0120 16:42:29.208384 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:29 crc kubenswrapper[4558]: I0120 16:42:29.208391 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:29 crc kubenswrapper[4558]: I0120 16:42:29.208401 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:29 crc kubenswrapper[4558]: I0120 16:42:29.208408 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:29Z","lastTransitionTime":"2026-01-20T16:42:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:29 crc kubenswrapper[4558]: I0120 16:42:29.310570 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:29 crc kubenswrapper[4558]: I0120 16:42:29.310632 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:29 crc kubenswrapper[4558]: I0120 16:42:29.310641 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:29 crc kubenswrapper[4558]: I0120 16:42:29.310655 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:29 crc kubenswrapper[4558]: I0120 16:42:29.310664 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:29Z","lastTransitionTime":"2026-01-20T16:42:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:29 crc kubenswrapper[4558]: I0120 16:42:29.412512 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:29 crc kubenswrapper[4558]: I0120 16:42:29.412552 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:29 crc kubenswrapper[4558]: I0120 16:42:29.412561 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:29 crc kubenswrapper[4558]: I0120 16:42:29.412573 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:29 crc kubenswrapper[4558]: I0120 16:42:29.412593 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:29Z","lastTransitionTime":"2026-01-20T16:42:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:29 crc kubenswrapper[4558]: I0120 16:42:29.514734 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:29 crc kubenswrapper[4558]: I0120 16:42:29.514770 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:29 crc kubenswrapper[4558]: I0120 16:42:29.514778 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:29 crc kubenswrapper[4558]: I0120 16:42:29.514790 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:29 crc kubenswrapper[4558]: I0120 16:42:29.514799 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:29Z","lastTransitionTime":"2026-01-20T16:42:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:29 crc kubenswrapper[4558]: I0120 16:42:29.555428 4558 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-20 10:56:06.658903902 +0000 UTC Jan 20 16:42:29 crc kubenswrapper[4558]: I0120 16:42:29.565755 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9wrq6" Jan 20 16:42:29 crc kubenswrapper[4558]: E0120 16:42:29.565857 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9wrq6" podUID="30032328-bd33-4073-9366-e10bc5e2aa77" Jan 20 16:42:29 crc kubenswrapper[4558]: I0120 16:42:29.566409 4558 scope.go:117] "RemoveContainer" containerID="41b06bf319806f8cb4eb26d154351300538b7998dbff8ab608b53a397a84aeb1" Jan 20 16:42:29 crc kubenswrapper[4558]: I0120 16:42:29.616629 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:29 crc kubenswrapper[4558]: I0120 16:42:29.616676 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:29 crc kubenswrapper[4558]: I0120 16:42:29.616685 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:29 crc kubenswrapper[4558]: I0120 16:42:29.616697 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:29 crc kubenswrapper[4558]: I0120 16:42:29.616704 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:29Z","lastTransitionTime":"2026-01-20T16:42:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:29 crc kubenswrapper[4558]: I0120 16:42:29.719064 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:29 crc kubenswrapper[4558]: I0120 16:42:29.719131 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:29 crc kubenswrapper[4558]: I0120 16:42:29.719142 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:29 crc kubenswrapper[4558]: I0120 16:42:29.719153 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:29 crc kubenswrapper[4558]: I0120 16:42:29.719179 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:29Z","lastTransitionTime":"2026-01-20T16:42:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:29 crc kubenswrapper[4558]: I0120 16:42:29.745799 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-nv2xw_254129cd-82fc-4162-b671-2434bc9e2972/ovnkube-controller/1.log" Jan 20 16:42:29 crc kubenswrapper[4558]: I0120 16:42:29.749136 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" event={"ID":"254129cd-82fc-4162-b671-2434bc9e2972","Type":"ContainerStarted","Data":"3d28c2f9293ff09b5e7d4ca4df1633cb114f685ad9a9dc8697e9efe3e9603259"} Jan 20 16:42:29 crc kubenswrapper[4558]: I0120 16:42:29.749256 4558 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 20 16:42:29 crc kubenswrapper[4558]: I0120 16:42:29.761360 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:29Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:29 crc kubenswrapper[4558]: I0120 16:42:29.770317 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rfwnl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8fe5472-7373-4e5d-87fc-a9ecaf26a5cd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://38cd58ecc67d337df2c29b79e5460ea23d4acc1e66de8b08ea70d7ab7b30691d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9nvl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a14e9281ff19e324499722aefadf1e1f2fd005089c153918ad4d8a7f5bdfa853\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9nvl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-rfwnl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:29Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:29 crc kubenswrapper[4558]: I0120 16:42:29.780087 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e190c24c1d8a3bc0241334598ce23829ada9afee282d49a0c82cd67ad82bcd02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:29Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:29 crc kubenswrapper[4558]: I0120 16:42:29.789454 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:29Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:29 crc kubenswrapper[4558]: I0120 16:42:29.803755 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f5t7h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0f90cb04-2e7a-4ee8-83fc-d6c0ee1702a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21a318f03d0df490cb7bae11819c52eca011d163081aeecff1b2dd8f72caabbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fd8a8162080a700ce21ff3b2cf7c28fabfc682758eb69c483909d4008b07daab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fd8a8162080a700ce21ff3b2cf7c28fabfc682758eb69c483909d4008b07daab\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e77617c69434ce20c448b525c7e86f1ece18d51ce6e14167b4ca7e99de8e5709\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e77617c69434ce20c448b525c7e86f1ece18d51ce6e14167b4ca7e99de8e5709\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://453c67434281e0261ec6799b30a548a01efd72d7df96632409e745b2e6a94546\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://453c67434281e0261ec6799b30a548a01efd72d7df96632409e745b2e6a94546\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://06eab4daa6c8b6a564cf1bb1403b7b3b259a234ee09f23b4f4bc0628c2d58e69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06eab4daa6c8b6a564cf1bb1403b7b3b259a234ee09f23b4f4bc0628c2d58e69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1291ac09fc6ae2a79fdeaa0fc47272f61b97f45f67ed3f7c90bdfdbfe3519ff2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1291ac09fc6ae2a79fdeaa0fc47272f61b97f45f67ed3f7c90bdfdbfe3519ff2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe484514f50a14e485d4a97ea3ce9401c77f3f4dae4717719be7ed6c4d7b1a59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe484514f50a14e485d4a97ea3ce9401c77f3f4dae4717719be7ed6c4d7b1a59\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f5t7h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:29Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:29 crc kubenswrapper[4558]: I0120 16:42:29.816222 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-9wrq6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30032328-bd33-4073-9366-e10bc5e2aa77\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6tz4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6tz4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:17Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-9wrq6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:29Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:29 crc kubenswrapper[4558]: I0120 16:42:29.820797 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:29 crc kubenswrapper[4558]: I0120 16:42:29.820831 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:29 crc kubenswrapper[4558]: I0120 16:42:29.820839 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:29 crc kubenswrapper[4558]: I0120 16:42:29.820852 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:29 crc kubenswrapper[4558]: I0120 16:42:29.820861 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:29Z","lastTransitionTime":"2026-01-20T16:42:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:29 crc kubenswrapper[4558]: I0120 16:42:29.827327 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:29Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:29 crc kubenswrapper[4558]: I0120 16:42:29.834906 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-d96bp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c8d2c8b-fb8e-4bff-b8d2-69570e5d9e57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe858db22acc96b062f1bc7941e05c823f30d1f9ba50bd497c57f38d57e04293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4v6jx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-d96bp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:29Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:29 crc kubenswrapper[4558]: I0120 16:42:29.848193 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"254129cd-82fc-4162-b671-2434bc9e2972\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a6a184067bde115ef5e09fdd90e7dccfe89a9dc347af450998b91c068e7e803\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd8b93e5559d95511a1285a9aec9a7d72df6c330bcd1e1daf3e93f5494f70eeb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4264ef8ee49a8b172fb05ac5c9b0bd32350b5d9ae95293079dfeaf44f021b455\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0889cf371f8b06cc2d255c15fda97db1d5000d6f6cff29d2fdcb8667cede8a99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4265d3281e954ee5989ee008da28c6cb9fa29478e5fdf23325521d455304da9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://186a1d2caad8865abf7f565405c4a4c077038ccb67ba77927cc2392ec075a811\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d28c2f9293ff09b5e7d4ca4df1633cb114f685ad9a9dc8697e9efe3e9603259\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://41b06bf319806f8cb4eb26d154351300538b7998dbff8ab608b53a397a84aeb1\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-20T16:42:13Z\\\",\\\"message\\\":\\\"rt network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:13Z is after 2025-08-24T17:21:41Z]\\\\nI0120 16:42:13.263941 5970 services_controller.go:434] Service openshift-marketplace/redhat-marketplace retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{redhat-marketplace openshift-marketplace cf6d00ec-cc2c-43f6-815c-40ffd0563e71 5558 0 2025-02-23 05:23:25 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[olm.managed:true olm.service-spec-hash:aUeLNNcZzVZO2rcaZ5Kc8V3jffO0Ss4T6qX6V5] map[] [{operators.coreos.com/v1alpha1 CatalogSource redhat-marketplace fcb55c30-a739-4bc1-9c9c-7634e05a3dbd 0xc00757e68d 0xc00757e68e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:grpc,Protocol:TCP,P\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:12Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f41d488b0c24e594a2f5cad36b5f8efdb7d867fae0e18a74fe3c396a203d162f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aaa12ad387fc5d0780f6602a74013bb70c118f35c3a16eb4d136df7b0c48db9e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aaa12ad387fc5d0780f6602a74013bb70c118f35c3a16eb4d136df7b0c48db9e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-nv2xw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:29Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:29 crc kubenswrapper[4558]: I0120 16:42:29.857227 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jsqvf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bedf08c7-1f93-4931-a7f3-e729e2a137af\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e32b76495d88d356373f9389e163a2f3a72e202bb454c7015594c1f5a41b511\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xxtbc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jsqvf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:29Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:29 crc kubenswrapper[4558]: I0120 16:42:29.864370 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-47477" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e026445a-aa73-4a0e-ba35-e2e07de1278c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec2ee3ff161cba7e14ad26763081c9f092f0dfe969cef113631c27e0db53d157\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xcnwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-47477\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:29Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:29 crc kubenswrapper[4558]: I0120 16:42:29.872300 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a13bfefa-f525-4ecd-89f5-f10480532bf6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9030e970e96b6909d771d4fa0c3b4f493b4a825cbc43ef7e684272284329c7c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://168e8f533aaa108e4160f9ae4af6bb7944edba7a596178ffffdc5325073fc3c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e0141aef1babb57501016cb09787591b5e6d9136b8d7dd4ba64393f0be5a027\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c87f08993c2b47c63ba9652c7dafe0fdbecdcb03a7b7080664ab1f1bd0e1d602\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:29Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:29 crc kubenswrapper[4558]: I0120 16:42:29.880630 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f904dd3e4b43e51569b6532f00fa2f9edf912c1f1969ee2ea00c99d1b7e35a41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://789c877f4a456b3ce64bdbe6fefd50f1ef3f92bf9e26e6296005cf366e0ce265\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:29Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:29 crc kubenswrapper[4558]: I0120 16:42:29.887911 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3e45cc56934a7b90e9d47f1c6d06d8dc140d57db4a216469e8ae112f398663e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:29Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:29 crc kubenswrapper[4558]: I0120 16:42:29.895078 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"68337d27-3fa6-4a29-88b0-82e60c3739eb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de7f7c23993a57d30f08686e1cd4abb5b17e108ac12dd0c7929a31574c69eeb1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4b478\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b18a59ecb802507e0d5988eacc915fd7e0d6954518de80f61162c97f680fd8c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4b478\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2vr4r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:29Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:29 crc kubenswrapper[4558]: I0120 16:42:29.908113 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"425eab90-fb70-4498-a98b-b95742033890\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48592e72d91cf527296f06e52fec1c16c1da21323c0644659945af109a74ebb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7e18f59b5e6b6c0be141b7efb3ccb1df4ff6e3c394bcc88fb1142c8df433493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c889e2b0e14244ef49ec057e3d5fd91687e335a983fdc99a6816abd2af643ddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d434793571cbba089cc34029d951bee9063aca58728b791f2d1af2d68229f778\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://89f7ad577df11c509855a90ac8791da6de3f0d2857a1ca56726a6327f40455bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://baab628ee8aadc67ea829cc46639be55ae63281a9ec9a89737260f558a9ddf75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://baab628ee8aadc67ea829cc46639be55ae63281a9ec9a89737260f558a9ddf75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5faab5625a16e1459b23ae0f4cc8ca3285bf4eb1ba4c53c1ed15eddb9054021\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5faab5625a16e1459b23ae0f4cc8ca3285bf4eb1ba4c53c1ed15eddb9054021\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f913fa07ec9571d1c61f9b7dddb46614846aa99d6e0b5b75709e9fa5a18a09c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f913fa07ec9571d1c61f9b7dddb46614846aa99d6e0b5b75709e9fa5a18a09c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:29Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:29 crc kubenswrapper[4558]: I0120 16:42:29.917335 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30e1ff9d-8dcd-4754-9a7a-c09598fb83db\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ea68e442bb2143fbaa5e2c9fe335c6b023a31c2eeaa78d60de45c6ef40c2894\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a8122c3b57ac4f0026df149fe622450d07d8b0d517ad018decd7b21d8d9c04a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7444e8fea1853bae45dbd3d52d07e3f7b94314cd29fe8c99891e515a892e569e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6438ffea42f98178c4c2c21ae01725a8b54d6a6f790f47d64faf5ecd9f1c00f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2e22f1d4e87dd06a67f49aedad280da48ac1c4ba2e438a4925cf8bd5330c4d8\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0120 16:41:58.859734 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0120 16:41:58.861094 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2179835226/tls.crt::/tmp/serving-cert-2179835226/tls.key\\\\\\\"\\\\nI0120 16:42:04.205247 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0120 16:42:04.207291 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0120 16:42:04.207308 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0120 16:42:04.207329 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0120 16:42:04.207333 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0120 16:42:04.210615 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0120 16:42:04.210632 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0120 16:42:04.210641 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0120 16:42:04.210648 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0120 16:42:04.210652 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0120 16:42:04.210655 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0120 16:42:04.210658 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0120 16:42:04.210660 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0120 16:42:04.211964 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4850bde1fb538e21fc91949dc584ef0a791d718844691a44559e0513bb36203\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c39aa6c3942134a027be33cac715f9c0381bce5417754d5bad045c42a07d353f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c39aa6c3942134a027be33cac715f9c0381bce5417754d5bad045c42a07d353f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:29Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:29 crc kubenswrapper[4558]: I0120 16:42:29.923012 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:29 crc kubenswrapper[4558]: I0120 16:42:29.923038 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:29 crc kubenswrapper[4558]: I0120 16:42:29.923046 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:29 crc kubenswrapper[4558]: I0120 16:42:29.923057 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:29 crc kubenswrapper[4558]: I0120 16:42:29.923064 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:29Z","lastTransitionTime":"2026-01-20T16:42:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:30 crc kubenswrapper[4558]: I0120 16:42:30.024514 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:30 crc kubenswrapper[4558]: I0120 16:42:30.024549 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:30 crc kubenswrapper[4558]: I0120 16:42:30.024557 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:30 crc kubenswrapper[4558]: I0120 16:42:30.024570 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:30 crc kubenswrapper[4558]: I0120 16:42:30.024580 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:30Z","lastTransitionTime":"2026-01-20T16:42:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:30 crc kubenswrapper[4558]: I0120 16:42:30.126461 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:30 crc kubenswrapper[4558]: I0120 16:42:30.126494 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:30 crc kubenswrapper[4558]: I0120 16:42:30.126502 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:30 crc kubenswrapper[4558]: I0120 16:42:30.126514 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:30 crc kubenswrapper[4558]: I0120 16:42:30.126528 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:30Z","lastTransitionTime":"2026-01-20T16:42:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:30 crc kubenswrapper[4558]: I0120 16:42:30.228408 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:30 crc kubenswrapper[4558]: I0120 16:42:30.228445 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:30 crc kubenswrapper[4558]: I0120 16:42:30.228454 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:30 crc kubenswrapper[4558]: I0120 16:42:30.228467 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:30 crc kubenswrapper[4558]: I0120 16:42:30.228475 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:30Z","lastTransitionTime":"2026-01-20T16:42:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:30 crc kubenswrapper[4558]: I0120 16:42:30.330155 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:30 crc kubenswrapper[4558]: I0120 16:42:30.330199 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:30 crc kubenswrapper[4558]: I0120 16:42:30.330207 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:30 crc kubenswrapper[4558]: I0120 16:42:30.330218 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:30 crc kubenswrapper[4558]: I0120 16:42:30.330226 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:30Z","lastTransitionTime":"2026-01-20T16:42:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:30 crc kubenswrapper[4558]: I0120 16:42:30.432113 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:30 crc kubenswrapper[4558]: I0120 16:42:30.432145 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:30 crc kubenswrapper[4558]: I0120 16:42:30.432154 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:30 crc kubenswrapper[4558]: I0120 16:42:30.432179 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:30 crc kubenswrapper[4558]: I0120 16:42:30.432187 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:30Z","lastTransitionTime":"2026-01-20T16:42:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:30 crc kubenswrapper[4558]: I0120 16:42:30.534532 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:30 crc kubenswrapper[4558]: I0120 16:42:30.534594 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:30 crc kubenswrapper[4558]: I0120 16:42:30.534605 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:30 crc kubenswrapper[4558]: I0120 16:42:30.534616 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:30 crc kubenswrapper[4558]: I0120 16:42:30.534625 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:30Z","lastTransitionTime":"2026-01-20T16:42:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:30 crc kubenswrapper[4558]: I0120 16:42:30.555906 4558 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-15 20:48:18.899250591 +0000 UTC Jan 20 16:42:30 crc kubenswrapper[4558]: I0120 16:42:30.565200 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 20 16:42:30 crc kubenswrapper[4558]: I0120 16:42:30.565235 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 20 16:42:30 crc kubenswrapper[4558]: I0120 16:42:30.565197 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 20 16:42:30 crc kubenswrapper[4558]: E0120 16:42:30.565296 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 20 16:42:30 crc kubenswrapper[4558]: E0120 16:42:30.565384 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 20 16:42:30 crc kubenswrapper[4558]: E0120 16:42:30.565442 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 20 16:42:30 crc kubenswrapper[4558]: I0120 16:42:30.635866 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:30 crc kubenswrapper[4558]: I0120 16:42:30.635901 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:30 crc kubenswrapper[4558]: I0120 16:42:30.635909 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:30 crc kubenswrapper[4558]: I0120 16:42:30.635920 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:30 crc kubenswrapper[4558]: I0120 16:42:30.635928 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:30Z","lastTransitionTime":"2026-01-20T16:42:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:30 crc kubenswrapper[4558]: I0120 16:42:30.737677 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:30 crc kubenswrapper[4558]: I0120 16:42:30.737709 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:30 crc kubenswrapper[4558]: I0120 16:42:30.737718 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:30 crc kubenswrapper[4558]: I0120 16:42:30.737728 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:30 crc kubenswrapper[4558]: I0120 16:42:30.737737 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:30Z","lastTransitionTime":"2026-01-20T16:42:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:30 crc kubenswrapper[4558]: I0120 16:42:30.753319 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-nv2xw_254129cd-82fc-4162-b671-2434bc9e2972/ovnkube-controller/2.log" Jan 20 16:42:30 crc kubenswrapper[4558]: I0120 16:42:30.753924 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-nv2xw_254129cd-82fc-4162-b671-2434bc9e2972/ovnkube-controller/1.log" Jan 20 16:42:30 crc kubenswrapper[4558]: I0120 16:42:30.756151 4558 generic.go:334] "Generic (PLEG): container finished" podID="254129cd-82fc-4162-b671-2434bc9e2972" containerID="3d28c2f9293ff09b5e7d4ca4df1633cb114f685ad9a9dc8697e9efe3e9603259" exitCode=1 Jan 20 16:42:30 crc kubenswrapper[4558]: I0120 16:42:30.756218 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" event={"ID":"254129cd-82fc-4162-b671-2434bc9e2972","Type":"ContainerDied","Data":"3d28c2f9293ff09b5e7d4ca4df1633cb114f685ad9a9dc8697e9efe3e9603259"} Jan 20 16:42:30 crc kubenswrapper[4558]: I0120 16:42:30.756262 4558 scope.go:117] "RemoveContainer" containerID="41b06bf319806f8cb4eb26d154351300538b7998dbff8ab608b53a397a84aeb1" Jan 20 16:42:30 crc kubenswrapper[4558]: I0120 16:42:30.756816 4558 scope.go:117] "RemoveContainer" containerID="3d28c2f9293ff09b5e7d4ca4df1633cb114f685ad9a9dc8697e9efe3e9603259" Jan 20 16:42:30 crc kubenswrapper[4558]: E0120 16:42:30.756987 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-nv2xw_openshift-ovn-kubernetes(254129cd-82fc-4162-b671-2434bc9e2972)\"" pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" podUID="254129cd-82fc-4162-b671-2434bc9e2972" Jan 20 16:42:30 crc kubenswrapper[4558]: I0120 16:42:30.766676 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3e45cc56934a7b90e9d47f1c6d06d8dc140d57db4a216469e8ae112f398663e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:30Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:30 crc kubenswrapper[4558]: I0120 16:42:30.776088 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jsqvf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bedf08c7-1f93-4931-a7f3-e729e2a137af\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e32b76495d88d356373f9389e163a2f3a72e202bb454c7015594c1f5a41b511\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xxtbc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jsqvf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:30Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:30 crc kubenswrapper[4558]: I0120 16:42:30.783008 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-47477" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e026445a-aa73-4a0e-ba35-e2e07de1278c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec2ee3ff161cba7e14ad26763081c9f092f0dfe969cef113631c27e0db53d157\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xcnwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-47477\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:30Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:30 crc kubenswrapper[4558]: I0120 16:42:30.791622 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a13bfefa-f525-4ecd-89f5-f10480532bf6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9030e970e96b6909d771d4fa0c3b4f493b4a825cbc43ef7e684272284329c7c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://168e8f533aaa108e4160f9ae4af6bb7944edba7a596178ffffdc5325073fc3c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e0141aef1babb57501016cb09787591b5e6d9136b8d7dd4ba64393f0be5a027\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c87f08993c2b47c63ba9652c7dafe0fdbecdcb03a7b7080664ab1f1bd0e1d602\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:30Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:30 crc kubenswrapper[4558]: I0120 16:42:30.799784 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f904dd3e4b43e51569b6532f00fa2f9edf912c1f1969ee2ea00c99d1b7e35a41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://789c877f4a456b3ce64bdbe6fefd50f1ef3f92bf9e26e6296005cf366e0ce265\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:30Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:30 crc kubenswrapper[4558]: I0120 16:42:30.808517 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30e1ff9d-8dcd-4754-9a7a-c09598fb83db\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ea68e442bb2143fbaa5e2c9fe335c6b023a31c2eeaa78d60de45c6ef40c2894\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a8122c3b57ac4f0026df149fe622450d07d8b0d517ad018decd7b21d8d9c04a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7444e8fea1853bae45dbd3d52d07e3f7b94314cd29fe8c99891e515a892e569e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6438ffea42f98178c4c2c21ae01725a8b54d6a6f790f47d64faf5ecd9f1c00f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2e22f1d4e87dd06a67f49aedad280da48ac1c4ba2e438a4925cf8bd5330c4d8\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0120 16:41:58.859734 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0120 16:41:58.861094 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2179835226/tls.crt::/tmp/serving-cert-2179835226/tls.key\\\\\\\"\\\\nI0120 16:42:04.205247 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0120 16:42:04.207291 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0120 16:42:04.207308 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0120 16:42:04.207329 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0120 16:42:04.207333 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0120 16:42:04.210615 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0120 16:42:04.210632 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0120 16:42:04.210641 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0120 16:42:04.210648 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0120 16:42:04.210652 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0120 16:42:04.210655 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0120 16:42:04.210658 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0120 16:42:04.210660 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0120 16:42:04.211964 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4850bde1fb538e21fc91949dc584ef0a791d718844691a44559e0513bb36203\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c39aa6c3942134a027be33cac715f9c0381bce5417754d5bad045c42a07d353f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c39aa6c3942134a027be33cac715f9c0381bce5417754d5bad045c42a07d353f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:30Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:30 crc kubenswrapper[4558]: I0120 16:42:30.817094 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"68337d27-3fa6-4a29-88b0-82e60c3739eb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de7f7c23993a57d30f08686e1cd4abb5b17e108ac12dd0c7929a31574c69eeb1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4b478\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b18a59ecb802507e0d5988eacc915fd7e0d6954518de80f61162c97f680fd8c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4b478\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2vr4r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:30Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:30 crc kubenswrapper[4558]: I0120 16:42:30.830089 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"425eab90-fb70-4498-a98b-b95742033890\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48592e72d91cf527296f06e52fec1c16c1da21323c0644659945af109a74ebb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7e18f59b5e6b6c0be141b7efb3ccb1df4ff6e3c394bcc88fb1142c8df433493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c889e2b0e14244ef49ec057e3d5fd91687e335a983fdc99a6816abd2af643ddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d434793571cbba089cc34029d951bee9063aca58728b791f2d1af2d68229f778\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://89f7ad577df11c509855a90ac8791da6de3f0d2857a1ca56726a6327f40455bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://baab628ee8aadc67ea829cc46639be55ae63281a9ec9a89737260f558a9ddf75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://baab628ee8aadc67ea829cc46639be55ae63281a9ec9a89737260f558a9ddf75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5faab5625a16e1459b23ae0f4cc8ca3285bf4eb1ba4c53c1ed15eddb9054021\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5faab5625a16e1459b23ae0f4cc8ca3285bf4eb1ba4c53c1ed15eddb9054021\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f913fa07ec9571d1c61f9b7dddb46614846aa99d6e0b5b75709e9fa5a18a09c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f913fa07ec9571d1c61f9b7dddb46614846aa99d6e0b5b75709e9fa5a18a09c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:30Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:30 crc kubenswrapper[4558]: I0120 16:42:30.838993 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:30 crc kubenswrapper[4558]: I0120 16:42:30.839022 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:30 crc kubenswrapper[4558]: I0120 16:42:30.839030 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:30 crc kubenswrapper[4558]: I0120 16:42:30.839042 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:30 crc kubenswrapper[4558]: I0120 16:42:30.839051 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:30Z","lastTransitionTime":"2026-01-20T16:42:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:30 crc kubenswrapper[4558]: I0120 16:42:30.839344 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:30Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:30 crc kubenswrapper[4558]: I0120 16:42:30.847794 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:30Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:30 crc kubenswrapper[4558]: I0120 16:42:30.855311 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rfwnl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8fe5472-7373-4e5d-87fc-a9ecaf26a5cd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://38cd58ecc67d337df2c29b79e5460ea23d4acc1e66de8b08ea70d7ab7b30691d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9nvl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a14e9281ff19e324499722aefadf1e1f2fd005089c153918ad4d8a7f5bdfa853\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9nvl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-rfwnl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:30Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:30 crc kubenswrapper[4558]: I0120 16:42:30.863657 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e190c24c1d8a3bc0241334598ce23829ada9afee282d49a0c82cd67ad82bcd02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:30Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:30 crc kubenswrapper[4558]: I0120 16:42:30.876728 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"254129cd-82fc-4162-b671-2434bc9e2972\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a6a184067bde115ef5e09fdd90e7dccfe89a9dc347af450998b91c068e7e803\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd8b93e5559d95511a1285a9aec9a7d72df6c330bcd1e1daf3e93f5494f70eeb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4264ef8ee49a8b172fb05ac5c9b0bd32350b5d9ae95293079dfeaf44f021b455\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0889cf371f8b06cc2d255c15fda97db1d5000d6f6cff29d2fdcb8667cede8a99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4265d3281e954ee5989ee008da28c6cb9fa29478e5fdf23325521d455304da9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://186a1d2caad8865abf7f565405c4a4c077038ccb67ba77927cc2392ec075a811\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d28c2f9293ff09b5e7d4ca4df1633cb114f685ad9a9dc8697e9efe3e9603259\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://41b06bf319806f8cb4eb26d154351300538b7998dbff8ab608b53a397a84aeb1\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-20T16:42:13Z\\\",\\\"message\\\":\\\"rt network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:13Z is after 2025-08-24T17:21:41Z]\\\\nI0120 16:42:13.263941 5970 services_controller.go:434] Service openshift-marketplace/redhat-marketplace retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{redhat-marketplace openshift-marketplace cf6d00ec-cc2c-43f6-815c-40ffd0563e71 5558 0 2025-02-23 05:23:25 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[olm.managed:true olm.service-spec-hash:aUeLNNcZzVZO2rcaZ5Kc8V3jffO0Ss4T6qX6V5] map[] [{operators.coreos.com/v1alpha1 CatalogSource redhat-marketplace fcb55c30-a739-4bc1-9c9c-7634e05a3dbd 0xc00757e68d 0xc00757e68e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Name:grpc,Protocol:TCP,P\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:12Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d28c2f9293ff09b5e7d4ca4df1633cb114f685ad9a9dc8697e9efe3e9603259\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-20T16:42:30Z\\\",\\\"message\\\":\\\"6215 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI0120 16:42:30.148863 6215 obj_retry.go:365] Adding new object: *v1.Pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI0120 16:42:30.148994 6215 ovn.go:134] Ensuring zone local for Pod openshift-kube-apiserver/kube-apiserver-crc in node crc\\\\nI0120 16:42:30.149000 6215 obj_retry.go:386] Retry successful for *v1.Pod openshift-kube-apiserver/kube-apiserver-crc after 0 failed attempt(s)\\\\nI0120 16:42:30.149004 6215 default_network_controller.go:776] Recording success event on pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI0120 16:42:30.148969 6215 services_controller.go:445] Built service openshift-kube-storage-version-migrator-operator/metrics LB template configs for network=default: []services.lbConfig(nil)\\\\nI0120 16:42:30.149028 6215 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-operator/iptables-alerter-4ln5h\\\\nI0120 16:42:30.149009 6215 base_network_controller_pods.go:916] Annotation values: ip=[10.217.0.3/23] ; mac=0a:58:0a:d9:00:03 ; gw=[10.217.0.1]\\\\nF0120 16:42:30.149066 6215 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller ini\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f41d488b0c24e594a2f5cad36b5f8efdb7d867fae0e18a74fe3c396a203d162f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aaa12ad387fc5d0780f6602a74013bb70c118f35c3a16eb4d136df7b0c48db9e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aaa12ad387fc5d0780f6602a74013bb70c118f35c3a16eb4d136df7b0c48db9e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-nv2xw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:30Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:30 crc kubenswrapper[4558]: I0120 16:42:30.886847 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f5t7h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0f90cb04-2e7a-4ee8-83fc-d6c0ee1702a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21a318f03d0df490cb7bae11819c52eca011d163081aeecff1b2dd8f72caabbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fd8a8162080a700ce21ff3b2cf7c28fabfc682758eb69c483909d4008b07daab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fd8a8162080a700ce21ff3b2cf7c28fabfc682758eb69c483909d4008b07daab\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e77617c69434ce20c448b525c7e86f1ece18d51ce6e14167b4ca7e99de8e5709\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e77617c69434ce20c448b525c7e86f1ece18d51ce6e14167b4ca7e99de8e5709\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://453c67434281e0261ec6799b30a548a01efd72d7df96632409e745b2e6a94546\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://453c67434281e0261ec6799b30a548a01efd72d7df96632409e745b2e6a94546\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://06eab4daa6c8b6a564cf1bb1403b7b3b259a234ee09f23b4f4bc0628c2d58e69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06eab4daa6c8b6a564cf1bb1403b7b3b259a234ee09f23b4f4bc0628c2d58e69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1291ac09fc6ae2a79fdeaa0fc47272f61b97f45f67ed3f7c90bdfdbfe3519ff2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1291ac09fc6ae2a79fdeaa0fc47272f61b97f45f67ed3f7c90bdfdbfe3519ff2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe484514f50a14e485d4a97ea3ce9401c77f3f4dae4717719be7ed6c4d7b1a59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe484514f50a14e485d4a97ea3ce9401c77f3f4dae4717719be7ed6c4d7b1a59\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f5t7h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:30Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:30 crc kubenswrapper[4558]: I0120 16:42:30.894280 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-9wrq6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30032328-bd33-4073-9366-e10bc5e2aa77\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6tz4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6tz4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:17Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-9wrq6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:30Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:30 crc kubenswrapper[4558]: I0120 16:42:30.902438 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:30Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:30 crc kubenswrapper[4558]: I0120 16:42:30.909732 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-d96bp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c8d2c8b-fb8e-4bff-b8d2-69570e5d9e57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe858db22acc96b062f1bc7941e05c823f30d1f9ba50bd497c57f38d57e04293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4v6jx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-d96bp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:30Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:30 crc kubenswrapper[4558]: I0120 16:42:30.941437 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:30 crc kubenswrapper[4558]: I0120 16:42:30.941478 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:30 crc kubenswrapper[4558]: I0120 16:42:30.941490 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:30 crc kubenswrapper[4558]: I0120 16:42:30.941504 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:30 crc kubenswrapper[4558]: I0120 16:42:30.941514 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:30Z","lastTransitionTime":"2026-01-20T16:42:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:31 crc kubenswrapper[4558]: I0120 16:42:31.043052 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:31 crc kubenswrapper[4558]: I0120 16:42:31.043080 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:31 crc kubenswrapper[4558]: I0120 16:42:31.043088 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:31 crc kubenswrapper[4558]: I0120 16:42:31.043101 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:31 crc kubenswrapper[4558]: I0120 16:42:31.043110 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:31Z","lastTransitionTime":"2026-01-20T16:42:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:31 crc kubenswrapper[4558]: I0120 16:42:31.145031 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:31 crc kubenswrapper[4558]: I0120 16:42:31.145064 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:31 crc kubenswrapper[4558]: I0120 16:42:31.145073 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:31 crc kubenswrapper[4558]: I0120 16:42:31.145086 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:31 crc kubenswrapper[4558]: I0120 16:42:31.145095 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:31Z","lastTransitionTime":"2026-01-20T16:42:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:31 crc kubenswrapper[4558]: I0120 16:42:31.246913 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:31 crc kubenswrapper[4558]: I0120 16:42:31.246951 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:31 crc kubenswrapper[4558]: I0120 16:42:31.246959 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:31 crc kubenswrapper[4558]: I0120 16:42:31.246975 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:31 crc kubenswrapper[4558]: I0120 16:42:31.246983 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:31Z","lastTransitionTime":"2026-01-20T16:42:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:31 crc kubenswrapper[4558]: I0120 16:42:31.349263 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:31 crc kubenswrapper[4558]: I0120 16:42:31.349294 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:31 crc kubenswrapper[4558]: I0120 16:42:31.349302 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:31 crc kubenswrapper[4558]: I0120 16:42:31.349327 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:31 crc kubenswrapper[4558]: I0120 16:42:31.349335 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:31Z","lastTransitionTime":"2026-01-20T16:42:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:31 crc kubenswrapper[4558]: I0120 16:42:31.451408 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:31 crc kubenswrapper[4558]: I0120 16:42:31.451460 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:31 crc kubenswrapper[4558]: I0120 16:42:31.451468 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:31 crc kubenswrapper[4558]: I0120 16:42:31.451481 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:31 crc kubenswrapper[4558]: I0120 16:42:31.451488 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:31Z","lastTransitionTime":"2026-01-20T16:42:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:31 crc kubenswrapper[4558]: I0120 16:42:31.553325 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:31 crc kubenswrapper[4558]: I0120 16:42:31.553351 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:31 crc kubenswrapper[4558]: I0120 16:42:31.553363 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:31 crc kubenswrapper[4558]: I0120 16:42:31.553374 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:31 crc kubenswrapper[4558]: I0120 16:42:31.553382 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:31Z","lastTransitionTime":"2026-01-20T16:42:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:31 crc kubenswrapper[4558]: I0120 16:42:31.556567 4558 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-19 18:32:10.176739818 +0000 UTC Jan 20 16:42:31 crc kubenswrapper[4558]: I0120 16:42:31.565787 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9wrq6" Jan 20 16:42:31 crc kubenswrapper[4558]: E0120 16:42:31.565874 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9wrq6" podUID="30032328-bd33-4073-9366-e10bc5e2aa77" Jan 20 16:42:31 crc kubenswrapper[4558]: I0120 16:42:31.655375 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:31 crc kubenswrapper[4558]: I0120 16:42:31.655407 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:31 crc kubenswrapper[4558]: I0120 16:42:31.655415 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:31 crc kubenswrapper[4558]: I0120 16:42:31.655427 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:31 crc kubenswrapper[4558]: I0120 16:42:31.655436 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:31Z","lastTransitionTime":"2026-01-20T16:42:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:31 crc kubenswrapper[4558]: I0120 16:42:31.757485 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:31 crc kubenswrapper[4558]: I0120 16:42:31.757524 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:31 crc kubenswrapper[4558]: I0120 16:42:31.757532 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:31 crc kubenswrapper[4558]: I0120 16:42:31.757545 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:31 crc kubenswrapper[4558]: I0120 16:42:31.757553 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:31Z","lastTransitionTime":"2026-01-20T16:42:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:31 crc kubenswrapper[4558]: I0120 16:42:31.759360 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-nv2xw_254129cd-82fc-4162-b671-2434bc9e2972/ovnkube-controller/2.log" Jan 20 16:42:31 crc kubenswrapper[4558]: I0120 16:42:31.858940 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:31 crc kubenswrapper[4558]: I0120 16:42:31.858975 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:31 crc kubenswrapper[4558]: I0120 16:42:31.858982 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:31 crc kubenswrapper[4558]: I0120 16:42:31.858994 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:31 crc kubenswrapper[4558]: I0120 16:42:31.859003 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:31Z","lastTransitionTime":"2026-01-20T16:42:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:31 crc kubenswrapper[4558]: I0120 16:42:31.961379 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:31 crc kubenswrapper[4558]: I0120 16:42:31.961411 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:31 crc kubenswrapper[4558]: I0120 16:42:31.961421 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:31 crc kubenswrapper[4558]: I0120 16:42:31.961434 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:31 crc kubenswrapper[4558]: I0120 16:42:31.961442 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:31Z","lastTransitionTime":"2026-01-20T16:42:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:32 crc kubenswrapper[4558]: I0120 16:42:32.063460 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:32 crc kubenswrapper[4558]: I0120 16:42:32.063493 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:32 crc kubenswrapper[4558]: I0120 16:42:32.063501 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:32 crc kubenswrapper[4558]: I0120 16:42:32.063513 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:32 crc kubenswrapper[4558]: I0120 16:42:32.063521 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:32Z","lastTransitionTime":"2026-01-20T16:42:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:32 crc kubenswrapper[4558]: I0120 16:42:32.165353 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:32 crc kubenswrapper[4558]: I0120 16:42:32.165390 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:32 crc kubenswrapper[4558]: I0120 16:42:32.165400 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:32 crc kubenswrapper[4558]: I0120 16:42:32.165414 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:32 crc kubenswrapper[4558]: I0120 16:42:32.165422 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:32Z","lastTransitionTime":"2026-01-20T16:42:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:32 crc kubenswrapper[4558]: I0120 16:42:32.267346 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:32 crc kubenswrapper[4558]: I0120 16:42:32.267377 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:32 crc kubenswrapper[4558]: I0120 16:42:32.267385 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:32 crc kubenswrapper[4558]: I0120 16:42:32.267397 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:32 crc kubenswrapper[4558]: I0120 16:42:32.267406 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:32Z","lastTransitionTime":"2026-01-20T16:42:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:32 crc kubenswrapper[4558]: I0120 16:42:32.368885 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:32 crc kubenswrapper[4558]: I0120 16:42:32.368914 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:32 crc kubenswrapper[4558]: I0120 16:42:32.368921 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:32 crc kubenswrapper[4558]: I0120 16:42:32.368931 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:32 crc kubenswrapper[4558]: I0120 16:42:32.368939 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:32Z","lastTransitionTime":"2026-01-20T16:42:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:32 crc kubenswrapper[4558]: I0120 16:42:32.470994 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:32 crc kubenswrapper[4558]: I0120 16:42:32.471031 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:32 crc kubenswrapper[4558]: I0120 16:42:32.471040 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:32 crc kubenswrapper[4558]: I0120 16:42:32.471054 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:32 crc kubenswrapper[4558]: I0120 16:42:32.471062 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:32Z","lastTransitionTime":"2026-01-20T16:42:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:32 crc kubenswrapper[4558]: I0120 16:42:32.557032 4558 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-13 15:05:41.629654713 +0000 UTC Jan 20 16:42:32 crc kubenswrapper[4558]: I0120 16:42:32.565297 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 20 16:42:32 crc kubenswrapper[4558]: I0120 16:42:32.565339 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 20 16:42:32 crc kubenswrapper[4558]: E0120 16:42:32.565400 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 20 16:42:32 crc kubenswrapper[4558]: I0120 16:42:32.565309 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 20 16:42:32 crc kubenswrapper[4558]: E0120 16:42:32.565527 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 20 16:42:32 crc kubenswrapper[4558]: E0120 16:42:32.565574 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 20 16:42:32 crc kubenswrapper[4558]: I0120 16:42:32.572597 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:32 crc kubenswrapper[4558]: I0120 16:42:32.572625 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:32 crc kubenswrapper[4558]: I0120 16:42:32.572633 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:32 crc kubenswrapper[4558]: I0120 16:42:32.572644 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:32 crc kubenswrapper[4558]: I0120 16:42:32.572653 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:32Z","lastTransitionTime":"2026-01-20T16:42:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:32 crc kubenswrapper[4558]: I0120 16:42:32.674883 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:32 crc kubenswrapper[4558]: I0120 16:42:32.674919 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:32 crc kubenswrapper[4558]: I0120 16:42:32.674929 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:32 crc kubenswrapper[4558]: I0120 16:42:32.674944 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:32 crc kubenswrapper[4558]: I0120 16:42:32.674954 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:32Z","lastTransitionTime":"2026-01-20T16:42:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:32 crc kubenswrapper[4558]: I0120 16:42:32.741449 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" Jan 20 16:42:32 crc kubenswrapper[4558]: I0120 16:42:32.742056 4558 scope.go:117] "RemoveContainer" containerID="3d28c2f9293ff09b5e7d4ca4df1633cb114f685ad9a9dc8697e9efe3e9603259" Jan 20 16:42:32 crc kubenswrapper[4558]: E0120 16:42:32.742212 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-nv2xw_openshift-ovn-kubernetes(254129cd-82fc-4162-b671-2434bc9e2972)\"" pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" podUID="254129cd-82fc-4162-b671-2434bc9e2972" Jan 20 16:42:32 crc kubenswrapper[4558]: I0120 16:42:32.754837 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"254129cd-82fc-4162-b671-2434bc9e2972\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a6a184067bde115ef5e09fdd90e7dccfe89a9dc347af450998b91c068e7e803\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd8b93e5559d95511a1285a9aec9a7d72df6c330bcd1e1daf3e93f5494f70eeb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4264ef8ee49a8b172fb05ac5c9b0bd32350b5d9ae95293079dfeaf44f021b455\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0889cf371f8b06cc2d255c15fda97db1d5000d6f6cff29d2fdcb8667cede8a99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4265d3281e954ee5989ee008da28c6cb9fa29478e5fdf23325521d455304da9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://186a1d2caad8865abf7f565405c4a4c077038ccb67ba77927cc2392ec075a811\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d28c2f9293ff09b5e7d4ca4df1633cb114f685ad9a9dc8697e9efe3e9603259\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d28c2f9293ff09b5e7d4ca4df1633cb114f685ad9a9dc8697e9efe3e9603259\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-20T16:42:30Z\\\",\\\"message\\\":\\\"6215 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI0120 16:42:30.148863 6215 obj_retry.go:365] Adding new object: *v1.Pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI0120 16:42:30.148994 6215 ovn.go:134] Ensuring zone local for Pod openshift-kube-apiserver/kube-apiserver-crc in node crc\\\\nI0120 16:42:30.149000 6215 obj_retry.go:386] Retry successful for *v1.Pod openshift-kube-apiserver/kube-apiserver-crc after 0 failed attempt(s)\\\\nI0120 16:42:30.149004 6215 default_network_controller.go:776] Recording success event on pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI0120 16:42:30.148969 6215 services_controller.go:445] Built service openshift-kube-storage-version-migrator-operator/metrics LB template configs for network=default: []services.lbConfig(nil)\\\\nI0120 16:42:30.149028 6215 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-operator/iptables-alerter-4ln5h\\\\nI0120 16:42:30.149009 6215 base_network_controller_pods.go:916] Annotation values: ip=[10.217.0.3/23] ; mac=0a:58:0a:d9:00:03 ; gw=[10.217.0.1]\\\\nF0120 16:42:30.149066 6215 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller ini\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:29Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-nv2xw_openshift-ovn-kubernetes(254129cd-82fc-4162-b671-2434bc9e2972)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f41d488b0c24e594a2f5cad36b5f8efdb7d867fae0e18a74fe3c396a203d162f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aaa12ad387fc5d0780f6602a74013bb70c118f35c3a16eb4d136df7b0c48db9e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aaa12ad387fc5d0780f6602a74013bb70c118f35c3a16eb4d136df7b0c48db9e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-nv2xw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:32Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:32 crc kubenswrapper[4558]: I0120 16:42:32.764076 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f5t7h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0f90cb04-2e7a-4ee8-83fc-d6c0ee1702a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21a318f03d0df490cb7bae11819c52eca011d163081aeecff1b2dd8f72caabbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fd8a8162080a700ce21ff3b2cf7c28fabfc682758eb69c483909d4008b07daab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fd8a8162080a700ce21ff3b2cf7c28fabfc682758eb69c483909d4008b07daab\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e77617c69434ce20c448b525c7e86f1ece18d51ce6e14167b4ca7e99de8e5709\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e77617c69434ce20c448b525c7e86f1ece18d51ce6e14167b4ca7e99de8e5709\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://453c67434281e0261ec6799b30a548a01efd72d7df96632409e745b2e6a94546\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://453c67434281e0261ec6799b30a548a01efd72d7df96632409e745b2e6a94546\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://06eab4daa6c8b6a564cf1bb1403b7b3b259a234ee09f23b4f4bc0628c2d58e69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06eab4daa6c8b6a564cf1bb1403b7b3b259a234ee09f23b4f4bc0628c2d58e69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1291ac09fc6ae2a79fdeaa0fc47272f61b97f45f67ed3f7c90bdfdbfe3519ff2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1291ac09fc6ae2a79fdeaa0fc47272f61b97f45f67ed3f7c90bdfdbfe3519ff2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe484514f50a14e485d4a97ea3ce9401c77f3f4dae4717719be7ed6c4d7b1a59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe484514f50a14e485d4a97ea3ce9401c77f3f4dae4717719be7ed6c4d7b1a59\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f5t7h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:32Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:32 crc kubenswrapper[4558]: I0120 16:42:32.771914 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-9wrq6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30032328-bd33-4073-9366-e10bc5e2aa77\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6tz4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6tz4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:17Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-9wrq6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:32Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:32 crc kubenswrapper[4558]: I0120 16:42:32.776630 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:32 crc kubenswrapper[4558]: I0120 16:42:32.776658 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:32 crc kubenswrapper[4558]: I0120 16:42:32.776668 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:32 crc kubenswrapper[4558]: I0120 16:42:32.776679 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:32 crc kubenswrapper[4558]: I0120 16:42:32.776688 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:32Z","lastTransitionTime":"2026-01-20T16:42:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:32 crc kubenswrapper[4558]: I0120 16:42:32.780338 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:32Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:32 crc kubenswrapper[4558]: I0120 16:42:32.787378 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-d96bp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c8d2c8b-fb8e-4bff-b8d2-69570e5d9e57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe858db22acc96b062f1bc7941e05c823f30d1f9ba50bd497c57f38d57e04293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4v6jx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-d96bp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:32Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:32 crc kubenswrapper[4558]: I0120 16:42:32.795440 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3e45cc56934a7b90e9d47f1c6d06d8dc140d57db4a216469e8ae112f398663e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:32Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:32 crc kubenswrapper[4558]: I0120 16:42:32.804018 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jsqvf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bedf08c7-1f93-4931-a7f3-e729e2a137af\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e32b76495d88d356373f9389e163a2f3a72e202bb454c7015594c1f5a41b511\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xxtbc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jsqvf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:32Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:32 crc kubenswrapper[4558]: I0120 16:42:32.810564 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-47477" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e026445a-aa73-4a0e-ba35-e2e07de1278c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec2ee3ff161cba7e14ad26763081c9f092f0dfe969cef113631c27e0db53d157\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xcnwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-47477\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:32Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:32 crc kubenswrapper[4558]: I0120 16:42:32.818699 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a13bfefa-f525-4ecd-89f5-f10480532bf6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9030e970e96b6909d771d4fa0c3b4f493b4a825cbc43ef7e684272284329c7c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://168e8f533aaa108e4160f9ae4af6bb7944edba7a596178ffffdc5325073fc3c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e0141aef1babb57501016cb09787591b5e6d9136b8d7dd4ba64393f0be5a027\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c87f08993c2b47c63ba9652c7dafe0fdbecdcb03a7b7080664ab1f1bd0e1d602\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:32Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:32 crc kubenswrapper[4558]: I0120 16:42:32.826881 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f904dd3e4b43e51569b6532f00fa2f9edf912c1f1969ee2ea00c99d1b7e35a41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://789c877f4a456b3ce64bdbe6fefd50f1ef3f92bf9e26e6296005cf366e0ce265\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:32Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:32 crc kubenswrapper[4558]: I0120 16:42:32.838661 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30e1ff9d-8dcd-4754-9a7a-c09598fb83db\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ea68e442bb2143fbaa5e2c9fe335c6b023a31c2eeaa78d60de45c6ef40c2894\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a8122c3b57ac4f0026df149fe622450d07d8b0d517ad018decd7b21d8d9c04a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7444e8fea1853bae45dbd3d52d07e3f7b94314cd29fe8c99891e515a892e569e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6438ffea42f98178c4c2c21ae01725a8b54d6a6f790f47d64faf5ecd9f1c00f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2e22f1d4e87dd06a67f49aedad280da48ac1c4ba2e438a4925cf8bd5330c4d8\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0120 16:41:58.859734 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0120 16:41:58.861094 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2179835226/tls.crt::/tmp/serving-cert-2179835226/tls.key\\\\\\\"\\\\nI0120 16:42:04.205247 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0120 16:42:04.207291 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0120 16:42:04.207308 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0120 16:42:04.207329 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0120 16:42:04.207333 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0120 16:42:04.210615 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0120 16:42:04.210632 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0120 16:42:04.210641 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0120 16:42:04.210648 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0120 16:42:04.210652 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0120 16:42:04.210655 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0120 16:42:04.210658 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0120 16:42:04.210660 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0120 16:42:04.211964 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4850bde1fb538e21fc91949dc584ef0a791d718844691a44559e0513bb36203\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c39aa6c3942134a027be33cac715f9c0381bce5417754d5bad045c42a07d353f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c39aa6c3942134a027be33cac715f9c0381bce5417754d5bad045c42a07d353f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:32Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:32 crc kubenswrapper[4558]: I0120 16:42:32.847540 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"68337d27-3fa6-4a29-88b0-82e60c3739eb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de7f7c23993a57d30f08686e1cd4abb5b17e108ac12dd0c7929a31574c69eeb1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4b478\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b18a59ecb802507e0d5988eacc915fd7e0d6954518de80f61162c97f680fd8c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4b478\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2vr4r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:32Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:32 crc kubenswrapper[4558]: I0120 16:42:32.859891 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"425eab90-fb70-4498-a98b-b95742033890\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48592e72d91cf527296f06e52fec1c16c1da21323c0644659945af109a74ebb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7e18f59b5e6b6c0be141b7efb3ccb1df4ff6e3c394bcc88fb1142c8df433493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c889e2b0e14244ef49ec057e3d5fd91687e335a983fdc99a6816abd2af643ddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d434793571cbba089cc34029d951bee9063aca58728b791f2d1af2d68229f778\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://89f7ad577df11c509855a90ac8791da6de3f0d2857a1ca56726a6327f40455bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://baab628ee8aadc67ea829cc46639be55ae63281a9ec9a89737260f558a9ddf75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://baab628ee8aadc67ea829cc46639be55ae63281a9ec9a89737260f558a9ddf75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5faab5625a16e1459b23ae0f4cc8ca3285bf4eb1ba4c53c1ed15eddb9054021\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5faab5625a16e1459b23ae0f4cc8ca3285bf4eb1ba4c53c1ed15eddb9054021\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f913fa07ec9571d1c61f9b7dddb46614846aa99d6e0b5b75709e9fa5a18a09c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f913fa07ec9571d1c61f9b7dddb46614846aa99d6e0b5b75709e9fa5a18a09c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:32Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:32 crc kubenswrapper[4558]: I0120 16:42:32.868151 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:32Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:32 crc kubenswrapper[4558]: I0120 16:42:32.876021 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:32Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:32 crc kubenswrapper[4558]: I0120 16:42:32.878279 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:32 crc kubenswrapper[4558]: I0120 16:42:32.878311 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:32 crc kubenswrapper[4558]: I0120 16:42:32.878319 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:32 crc kubenswrapper[4558]: I0120 16:42:32.878332 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:32 crc kubenswrapper[4558]: I0120 16:42:32.878342 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:32Z","lastTransitionTime":"2026-01-20T16:42:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:32 crc kubenswrapper[4558]: I0120 16:42:32.884970 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rfwnl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8fe5472-7373-4e5d-87fc-a9ecaf26a5cd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://38cd58ecc67d337df2c29b79e5460ea23d4acc1e66de8b08ea70d7ab7b30691d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9nvl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a14e9281ff19e324499722aefadf1e1f2fd005089c153918ad4d8a7f5bdfa853\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9nvl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-rfwnl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:32Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:32 crc kubenswrapper[4558]: I0120 16:42:32.893847 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e190c24c1d8a3bc0241334598ce23829ada9afee282d49a0c82cd67ad82bcd02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:32Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:32 crc kubenswrapper[4558]: I0120 16:42:32.979974 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:32 crc kubenswrapper[4558]: I0120 16:42:32.980010 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:32 crc kubenswrapper[4558]: I0120 16:42:32.980018 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:32 crc kubenswrapper[4558]: I0120 16:42:32.980031 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:32 crc kubenswrapper[4558]: I0120 16:42:32.980040 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:32Z","lastTransitionTime":"2026-01-20T16:42:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:33 crc kubenswrapper[4558]: I0120 16:42:33.081789 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:33 crc kubenswrapper[4558]: I0120 16:42:33.081834 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:33 crc kubenswrapper[4558]: I0120 16:42:33.081843 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:33 crc kubenswrapper[4558]: I0120 16:42:33.081857 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:33 crc kubenswrapper[4558]: I0120 16:42:33.081868 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:33Z","lastTransitionTime":"2026-01-20T16:42:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:33 crc kubenswrapper[4558]: I0120 16:42:33.183907 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:33 crc kubenswrapper[4558]: I0120 16:42:33.183937 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:33 crc kubenswrapper[4558]: I0120 16:42:33.183945 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:33 crc kubenswrapper[4558]: I0120 16:42:33.183957 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:33 crc kubenswrapper[4558]: I0120 16:42:33.183965 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:33Z","lastTransitionTime":"2026-01-20T16:42:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:33 crc kubenswrapper[4558]: I0120 16:42:33.286015 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:33 crc kubenswrapper[4558]: I0120 16:42:33.286055 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:33 crc kubenswrapper[4558]: I0120 16:42:33.286063 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:33 crc kubenswrapper[4558]: I0120 16:42:33.286075 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:33 crc kubenswrapper[4558]: I0120 16:42:33.286084 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:33Z","lastTransitionTime":"2026-01-20T16:42:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:33 crc kubenswrapper[4558]: I0120 16:42:33.387740 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:33 crc kubenswrapper[4558]: I0120 16:42:33.387774 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:33 crc kubenswrapper[4558]: I0120 16:42:33.387782 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:33 crc kubenswrapper[4558]: I0120 16:42:33.387794 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:33 crc kubenswrapper[4558]: I0120 16:42:33.387804 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:33Z","lastTransitionTime":"2026-01-20T16:42:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:33 crc kubenswrapper[4558]: I0120 16:42:33.489903 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:33 crc kubenswrapper[4558]: I0120 16:42:33.489941 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:33 crc kubenswrapper[4558]: I0120 16:42:33.489950 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:33 crc kubenswrapper[4558]: I0120 16:42:33.489965 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:33 crc kubenswrapper[4558]: I0120 16:42:33.489974 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:33Z","lastTransitionTime":"2026-01-20T16:42:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:33 crc kubenswrapper[4558]: I0120 16:42:33.557718 4558 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-17 02:07:34.653804992 +0000 UTC Jan 20 16:42:33 crc kubenswrapper[4558]: I0120 16:42:33.564904 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9wrq6" Jan 20 16:42:33 crc kubenswrapper[4558]: E0120 16:42:33.565009 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9wrq6" podUID="30032328-bd33-4073-9366-e10bc5e2aa77" Jan 20 16:42:33 crc kubenswrapper[4558]: I0120 16:42:33.591620 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:33 crc kubenswrapper[4558]: I0120 16:42:33.591662 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:33 crc kubenswrapper[4558]: I0120 16:42:33.591672 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:33 crc kubenswrapper[4558]: I0120 16:42:33.591682 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:33 crc kubenswrapper[4558]: I0120 16:42:33.591690 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:33Z","lastTransitionTime":"2026-01-20T16:42:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:33 crc kubenswrapper[4558]: I0120 16:42:33.693088 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:33 crc kubenswrapper[4558]: I0120 16:42:33.693119 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:33 crc kubenswrapper[4558]: I0120 16:42:33.693127 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:33 crc kubenswrapper[4558]: I0120 16:42:33.693139 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:33 crc kubenswrapper[4558]: I0120 16:42:33.693147 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:33Z","lastTransitionTime":"2026-01-20T16:42:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:33 crc kubenswrapper[4558]: I0120 16:42:33.742680 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/30032328-bd33-4073-9366-e10bc5e2aa77-metrics-certs\") pod \"network-metrics-daemon-9wrq6\" (UID: \"30032328-bd33-4073-9366-e10bc5e2aa77\") " pod="openshift-multus/network-metrics-daemon-9wrq6" Jan 20 16:42:33 crc kubenswrapper[4558]: E0120 16:42:33.742795 4558 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 20 16:42:33 crc kubenswrapper[4558]: E0120 16:42:33.742838 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/30032328-bd33-4073-9366-e10bc5e2aa77-metrics-certs podName:30032328-bd33-4073-9366-e10bc5e2aa77 nodeName:}" failed. No retries permitted until 2026-01-20 16:42:49.742826853 +0000 UTC m=+63.503164820 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/30032328-bd33-4073-9366-e10bc5e2aa77-metrics-certs") pod "network-metrics-daemon-9wrq6" (UID: "30032328-bd33-4073-9366-e10bc5e2aa77") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 20 16:42:33 crc kubenswrapper[4558]: I0120 16:42:33.795018 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:33 crc kubenswrapper[4558]: I0120 16:42:33.795065 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:33 crc kubenswrapper[4558]: I0120 16:42:33.795076 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:33 crc kubenswrapper[4558]: I0120 16:42:33.795087 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:33 crc kubenswrapper[4558]: I0120 16:42:33.795096 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:33Z","lastTransitionTime":"2026-01-20T16:42:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:33 crc kubenswrapper[4558]: I0120 16:42:33.896822 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:33 crc kubenswrapper[4558]: I0120 16:42:33.896861 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:33 crc kubenswrapper[4558]: I0120 16:42:33.896869 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:33 crc kubenswrapper[4558]: I0120 16:42:33.896885 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:33 crc kubenswrapper[4558]: I0120 16:42:33.896894 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:33Z","lastTransitionTime":"2026-01-20T16:42:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:33 crc kubenswrapper[4558]: I0120 16:42:33.998618 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:33 crc kubenswrapper[4558]: I0120 16:42:33.998682 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:33 crc kubenswrapper[4558]: I0120 16:42:33.998693 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:33 crc kubenswrapper[4558]: I0120 16:42:33.998705 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:33 crc kubenswrapper[4558]: I0120 16:42:33.998713 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:33Z","lastTransitionTime":"2026-01-20T16:42:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:34 crc kubenswrapper[4558]: I0120 16:42:34.100870 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:34 crc kubenswrapper[4558]: I0120 16:42:34.100907 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:34 crc kubenswrapper[4558]: I0120 16:42:34.100916 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:34 crc kubenswrapper[4558]: I0120 16:42:34.100927 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:34 crc kubenswrapper[4558]: I0120 16:42:34.100935 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:34Z","lastTransitionTime":"2026-01-20T16:42:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:34 crc kubenswrapper[4558]: I0120 16:42:34.202463 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:34 crc kubenswrapper[4558]: I0120 16:42:34.202494 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:34 crc kubenswrapper[4558]: I0120 16:42:34.202504 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:34 crc kubenswrapper[4558]: I0120 16:42:34.202515 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:34 crc kubenswrapper[4558]: I0120 16:42:34.202525 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:34Z","lastTransitionTime":"2026-01-20T16:42:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:34 crc kubenswrapper[4558]: I0120 16:42:34.304395 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:34 crc kubenswrapper[4558]: I0120 16:42:34.304423 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:34 crc kubenswrapper[4558]: I0120 16:42:34.304453 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:34 crc kubenswrapper[4558]: I0120 16:42:34.304464 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:34 crc kubenswrapper[4558]: I0120 16:42:34.304472 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:34Z","lastTransitionTime":"2026-01-20T16:42:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:34 crc kubenswrapper[4558]: I0120 16:42:34.406746 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:34 crc kubenswrapper[4558]: I0120 16:42:34.406782 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:34 crc kubenswrapper[4558]: I0120 16:42:34.406792 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:34 crc kubenswrapper[4558]: I0120 16:42:34.406806 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:34 crc kubenswrapper[4558]: I0120 16:42:34.406815 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:34Z","lastTransitionTime":"2026-01-20T16:42:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:34 crc kubenswrapper[4558]: I0120 16:42:34.509055 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:34 crc kubenswrapper[4558]: I0120 16:42:34.509090 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:34 crc kubenswrapper[4558]: I0120 16:42:34.509098 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:34 crc kubenswrapper[4558]: I0120 16:42:34.509113 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:34 crc kubenswrapper[4558]: I0120 16:42:34.509121 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:34Z","lastTransitionTime":"2026-01-20T16:42:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:34 crc kubenswrapper[4558]: I0120 16:42:34.558191 4558 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-01 03:07:03.292227786 +0000 UTC Jan 20 16:42:34 crc kubenswrapper[4558]: I0120 16:42:34.565674 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 20 16:42:34 crc kubenswrapper[4558]: I0120 16:42:34.565713 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 20 16:42:34 crc kubenswrapper[4558]: I0120 16:42:34.565714 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 20 16:42:34 crc kubenswrapper[4558]: E0120 16:42:34.565785 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 20 16:42:34 crc kubenswrapper[4558]: E0120 16:42:34.565896 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 20 16:42:34 crc kubenswrapper[4558]: E0120 16:42:34.565963 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 20 16:42:34 crc kubenswrapper[4558]: I0120 16:42:34.611674 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:34 crc kubenswrapper[4558]: I0120 16:42:34.611713 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:34 crc kubenswrapper[4558]: I0120 16:42:34.611722 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:34 crc kubenswrapper[4558]: I0120 16:42:34.611735 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:34 crc kubenswrapper[4558]: I0120 16:42:34.611744 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:34Z","lastTransitionTime":"2026-01-20T16:42:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:34 crc kubenswrapper[4558]: I0120 16:42:34.713756 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:34 crc kubenswrapper[4558]: I0120 16:42:34.713792 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:34 crc kubenswrapper[4558]: I0120 16:42:34.713801 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:34 crc kubenswrapper[4558]: I0120 16:42:34.713814 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:34 crc kubenswrapper[4558]: I0120 16:42:34.713823 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:34Z","lastTransitionTime":"2026-01-20T16:42:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:34 crc kubenswrapper[4558]: I0120 16:42:34.819124 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:34 crc kubenswrapper[4558]: I0120 16:42:34.819216 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:34 crc kubenswrapper[4558]: I0120 16:42:34.819230 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:34 crc kubenswrapper[4558]: I0120 16:42:34.819249 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:34 crc kubenswrapper[4558]: I0120 16:42:34.819264 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:34Z","lastTransitionTime":"2026-01-20T16:42:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:34 crc kubenswrapper[4558]: I0120 16:42:34.922220 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:34 crc kubenswrapper[4558]: I0120 16:42:34.922273 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:34 crc kubenswrapper[4558]: I0120 16:42:34.922283 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:34 crc kubenswrapper[4558]: I0120 16:42:34.922297 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:34 crc kubenswrapper[4558]: I0120 16:42:34.922307 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:34Z","lastTransitionTime":"2026-01-20T16:42:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:35 crc kubenswrapper[4558]: I0120 16:42:35.024948 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:35 crc kubenswrapper[4558]: I0120 16:42:35.024991 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:35 crc kubenswrapper[4558]: I0120 16:42:35.025003 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:35 crc kubenswrapper[4558]: I0120 16:42:35.025019 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:35 crc kubenswrapper[4558]: I0120 16:42:35.025029 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:35Z","lastTransitionTime":"2026-01-20T16:42:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:35 crc kubenswrapper[4558]: I0120 16:42:35.128239 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:35 crc kubenswrapper[4558]: I0120 16:42:35.128479 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:35 crc kubenswrapper[4558]: I0120 16:42:35.128489 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:35 crc kubenswrapper[4558]: I0120 16:42:35.128502 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:35 crc kubenswrapper[4558]: I0120 16:42:35.128512 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:35Z","lastTransitionTime":"2026-01-20T16:42:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:35 crc kubenswrapper[4558]: I0120 16:42:35.230400 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:35 crc kubenswrapper[4558]: I0120 16:42:35.230453 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:35 crc kubenswrapper[4558]: I0120 16:42:35.230464 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:35 crc kubenswrapper[4558]: I0120 16:42:35.230477 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:35 crc kubenswrapper[4558]: I0120 16:42:35.230487 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:35Z","lastTransitionTime":"2026-01-20T16:42:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:35 crc kubenswrapper[4558]: I0120 16:42:35.332777 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:35 crc kubenswrapper[4558]: I0120 16:42:35.332829 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:35 crc kubenswrapper[4558]: I0120 16:42:35.332840 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:35 crc kubenswrapper[4558]: I0120 16:42:35.332856 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:35 crc kubenswrapper[4558]: I0120 16:42:35.332866 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:35Z","lastTransitionTime":"2026-01-20T16:42:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:35 crc kubenswrapper[4558]: I0120 16:42:35.435432 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:35 crc kubenswrapper[4558]: I0120 16:42:35.435472 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:35 crc kubenswrapper[4558]: I0120 16:42:35.435482 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:35 crc kubenswrapper[4558]: I0120 16:42:35.435496 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:35 crc kubenswrapper[4558]: I0120 16:42:35.435506 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:35Z","lastTransitionTime":"2026-01-20T16:42:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:35 crc kubenswrapper[4558]: I0120 16:42:35.537045 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:35 crc kubenswrapper[4558]: I0120 16:42:35.537095 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:35 crc kubenswrapper[4558]: I0120 16:42:35.537103 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:35 crc kubenswrapper[4558]: I0120 16:42:35.537118 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:35 crc kubenswrapper[4558]: I0120 16:42:35.537126 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:35Z","lastTransitionTime":"2026-01-20T16:42:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:35 crc kubenswrapper[4558]: I0120 16:42:35.558557 4558 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-01 14:04:20.531455962 +0000 UTC Jan 20 16:42:35 crc kubenswrapper[4558]: I0120 16:42:35.565911 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9wrq6" Jan 20 16:42:35 crc kubenswrapper[4558]: E0120 16:42:35.566016 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9wrq6" podUID="30032328-bd33-4073-9366-e10bc5e2aa77" Jan 20 16:42:35 crc kubenswrapper[4558]: I0120 16:42:35.638607 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:35 crc kubenswrapper[4558]: I0120 16:42:35.638647 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:35 crc kubenswrapper[4558]: I0120 16:42:35.638657 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:35 crc kubenswrapper[4558]: I0120 16:42:35.638670 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:35 crc kubenswrapper[4558]: I0120 16:42:35.638679 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:35Z","lastTransitionTime":"2026-01-20T16:42:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:35 crc kubenswrapper[4558]: I0120 16:42:35.741243 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:35 crc kubenswrapper[4558]: I0120 16:42:35.741277 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:35 crc kubenswrapper[4558]: I0120 16:42:35.741285 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:35 crc kubenswrapper[4558]: I0120 16:42:35.741299 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:35 crc kubenswrapper[4558]: I0120 16:42:35.741309 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:35Z","lastTransitionTime":"2026-01-20T16:42:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:35 crc kubenswrapper[4558]: I0120 16:42:35.843397 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:35 crc kubenswrapper[4558]: I0120 16:42:35.843461 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:35 crc kubenswrapper[4558]: I0120 16:42:35.843474 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:35 crc kubenswrapper[4558]: I0120 16:42:35.843496 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:35 crc kubenswrapper[4558]: I0120 16:42:35.843511 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:35Z","lastTransitionTime":"2026-01-20T16:42:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:35 crc kubenswrapper[4558]: I0120 16:42:35.946212 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:35 crc kubenswrapper[4558]: I0120 16:42:35.946263 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:35 crc kubenswrapper[4558]: I0120 16:42:35.946275 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:35 crc kubenswrapper[4558]: I0120 16:42:35.946293 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:35 crc kubenswrapper[4558]: I0120 16:42:35.946304 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:35Z","lastTransitionTime":"2026-01-20T16:42:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:36 crc kubenswrapper[4558]: I0120 16:42:36.049148 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:36 crc kubenswrapper[4558]: I0120 16:42:36.049223 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:36 crc kubenswrapper[4558]: I0120 16:42:36.049235 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:36 crc kubenswrapper[4558]: I0120 16:42:36.049251 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:36 crc kubenswrapper[4558]: I0120 16:42:36.049259 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:36Z","lastTransitionTime":"2026-01-20T16:42:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:36 crc kubenswrapper[4558]: I0120 16:42:36.151885 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:36 crc kubenswrapper[4558]: I0120 16:42:36.151930 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:36 crc kubenswrapper[4558]: I0120 16:42:36.151939 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:36 crc kubenswrapper[4558]: I0120 16:42:36.151955 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:36 crc kubenswrapper[4558]: I0120 16:42:36.151965 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:36Z","lastTransitionTime":"2026-01-20T16:42:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:36 crc kubenswrapper[4558]: I0120 16:42:36.254351 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:36 crc kubenswrapper[4558]: I0120 16:42:36.254391 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:36 crc kubenswrapper[4558]: I0120 16:42:36.254400 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:36 crc kubenswrapper[4558]: I0120 16:42:36.254414 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:36 crc kubenswrapper[4558]: I0120 16:42:36.254424 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:36Z","lastTransitionTime":"2026-01-20T16:42:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:36 crc kubenswrapper[4558]: I0120 16:42:36.356275 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:36 crc kubenswrapper[4558]: I0120 16:42:36.356315 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:36 crc kubenswrapper[4558]: I0120 16:42:36.356322 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:36 crc kubenswrapper[4558]: I0120 16:42:36.356336 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:36 crc kubenswrapper[4558]: I0120 16:42:36.356347 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:36Z","lastTransitionTime":"2026-01-20T16:42:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:36 crc kubenswrapper[4558]: I0120 16:42:36.369070 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 20 16:42:36 crc kubenswrapper[4558]: E0120 16:42:36.369236 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-20 16:43:08.369222317 +0000 UTC m=+82.129560284 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 20 16:42:36 crc kubenswrapper[4558]: I0120 16:42:36.458467 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:36 crc kubenswrapper[4558]: I0120 16:42:36.458501 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:36 crc kubenswrapper[4558]: I0120 16:42:36.458509 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:36 crc kubenswrapper[4558]: I0120 16:42:36.458521 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:36 crc kubenswrapper[4558]: I0120 16:42:36.458532 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:36Z","lastTransitionTime":"2026-01-20T16:42:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:36 crc kubenswrapper[4558]: I0120 16:42:36.470080 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 20 16:42:36 crc kubenswrapper[4558]: I0120 16:42:36.470112 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 20 16:42:36 crc kubenswrapper[4558]: I0120 16:42:36.470145 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 20 16:42:36 crc kubenswrapper[4558]: E0120 16:42:36.470148 4558 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 20 16:42:36 crc kubenswrapper[4558]: E0120 16:42:36.470230 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-20 16:43:08.470216764 +0000 UTC m=+82.230554741 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 20 16:42:36 crc kubenswrapper[4558]: E0120 16:42:36.470237 4558 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 20 16:42:36 crc kubenswrapper[4558]: E0120 16:42:36.470250 4558 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 20 16:42:36 crc kubenswrapper[4558]: E0120 16:42:36.470264 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-20 16:43:08.470256278 +0000 UTC m=+82.230594235 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 20 16:42:36 crc kubenswrapper[4558]: E0120 16:42:36.470266 4558 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 20 16:42:36 crc kubenswrapper[4558]: E0120 16:42:36.470277 4558 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 20 16:42:36 crc kubenswrapper[4558]: I0120 16:42:36.470179 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 20 16:42:36 crc kubenswrapper[4558]: E0120 16:42:36.470304 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-20 16:43:08.470296253 +0000 UTC m=+82.230634220 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 20 16:42:36 crc kubenswrapper[4558]: E0120 16:42:36.470353 4558 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 20 16:42:36 crc kubenswrapper[4558]: E0120 16:42:36.470575 4558 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 20 16:42:36 crc kubenswrapper[4558]: E0120 16:42:36.470664 4558 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 20 16:42:36 crc kubenswrapper[4558]: E0120 16:42:36.470810 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-20 16:43:08.470777178 +0000 UTC m=+82.231115145 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 20 16:42:36 crc kubenswrapper[4558]: I0120 16:42:36.559209 4558 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-21 06:57:57.610429034 +0000 UTC Jan 20 16:42:36 crc kubenswrapper[4558]: I0120 16:42:36.561202 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:36 crc kubenswrapper[4558]: I0120 16:42:36.561267 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:36 crc kubenswrapper[4558]: I0120 16:42:36.561282 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:36 crc kubenswrapper[4558]: I0120 16:42:36.561306 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:36 crc kubenswrapper[4558]: I0120 16:42:36.561322 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:36Z","lastTransitionTime":"2026-01-20T16:42:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:36 crc kubenswrapper[4558]: I0120 16:42:36.565461 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 20 16:42:36 crc kubenswrapper[4558]: I0120 16:42:36.565480 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 20 16:42:36 crc kubenswrapper[4558]: I0120 16:42:36.565522 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 20 16:42:36 crc kubenswrapper[4558]: E0120 16:42:36.565637 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 20 16:42:36 crc kubenswrapper[4558]: E0120 16:42:36.565734 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 20 16:42:36 crc kubenswrapper[4558]: E0120 16:42:36.565895 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 20 16:42:36 crc kubenswrapper[4558]: I0120 16:42:36.585219 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"254129cd-82fc-4162-b671-2434bc9e2972\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a6a184067bde115ef5e09fdd90e7dccfe89a9dc347af450998b91c068e7e803\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd8b93e5559d95511a1285a9aec9a7d72df6c330bcd1e1daf3e93f5494f70eeb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4264ef8ee49a8b172fb05ac5c9b0bd32350b5d9ae95293079dfeaf44f021b455\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0889cf371f8b06cc2d255c15fda97db1d5000d6f6cff29d2fdcb8667cede8a99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4265d3281e954ee5989ee008da28c6cb9fa29478e5fdf23325521d455304da9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://186a1d2caad8865abf7f565405c4a4c077038ccb67ba77927cc2392ec075a811\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d28c2f9293ff09b5e7d4ca4df1633cb114f685ad9a9dc8697e9efe3e9603259\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d28c2f9293ff09b5e7d4ca4df1633cb114f685ad9a9dc8697e9efe3e9603259\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-20T16:42:30Z\\\",\\\"message\\\":\\\"6215 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI0120 16:42:30.148863 6215 obj_retry.go:365] Adding new object: *v1.Pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI0120 16:42:30.148994 6215 ovn.go:134] Ensuring zone local for Pod openshift-kube-apiserver/kube-apiserver-crc in node crc\\\\nI0120 16:42:30.149000 6215 obj_retry.go:386] Retry successful for *v1.Pod openshift-kube-apiserver/kube-apiserver-crc after 0 failed attempt(s)\\\\nI0120 16:42:30.149004 6215 default_network_controller.go:776] Recording success event on pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI0120 16:42:30.148969 6215 services_controller.go:445] Built service openshift-kube-storage-version-migrator-operator/metrics LB template configs for network=default: []services.lbConfig(nil)\\\\nI0120 16:42:30.149028 6215 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-operator/iptables-alerter-4ln5h\\\\nI0120 16:42:30.149009 6215 base_network_controller_pods.go:916] Annotation values: ip=[10.217.0.3/23] ; mac=0a:58:0a:d9:00:03 ; gw=[10.217.0.1]\\\\nF0120 16:42:30.149066 6215 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller ini\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:29Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-nv2xw_openshift-ovn-kubernetes(254129cd-82fc-4162-b671-2434bc9e2972)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f41d488b0c24e594a2f5cad36b5f8efdb7d867fae0e18a74fe3c396a203d162f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aaa12ad387fc5d0780f6602a74013bb70c118f35c3a16eb4d136df7b0c48db9e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aaa12ad387fc5d0780f6602a74013bb70c118f35c3a16eb4d136df7b0c48db9e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-nv2xw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:36Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:36 crc kubenswrapper[4558]: I0120 16:42:36.596994 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f5t7h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0f90cb04-2e7a-4ee8-83fc-d6c0ee1702a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21a318f03d0df490cb7bae11819c52eca011d163081aeecff1b2dd8f72caabbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fd8a8162080a700ce21ff3b2cf7c28fabfc682758eb69c483909d4008b07daab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fd8a8162080a700ce21ff3b2cf7c28fabfc682758eb69c483909d4008b07daab\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e77617c69434ce20c448b525c7e86f1ece18d51ce6e14167b4ca7e99de8e5709\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e77617c69434ce20c448b525c7e86f1ece18d51ce6e14167b4ca7e99de8e5709\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://453c67434281e0261ec6799b30a548a01efd72d7df96632409e745b2e6a94546\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://453c67434281e0261ec6799b30a548a01efd72d7df96632409e745b2e6a94546\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://06eab4daa6c8b6a564cf1bb1403b7b3b259a234ee09f23b4f4bc0628c2d58e69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06eab4daa6c8b6a564cf1bb1403b7b3b259a234ee09f23b4f4bc0628c2d58e69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1291ac09fc6ae2a79fdeaa0fc47272f61b97f45f67ed3f7c90bdfdbfe3519ff2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1291ac09fc6ae2a79fdeaa0fc47272f61b97f45f67ed3f7c90bdfdbfe3519ff2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe484514f50a14e485d4a97ea3ce9401c77f3f4dae4717719be7ed6c4d7b1a59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe484514f50a14e485d4a97ea3ce9401c77f3f4dae4717719be7ed6c4d7b1a59\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f5t7h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:36Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:36 crc kubenswrapper[4558]: I0120 16:42:36.604950 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-9wrq6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30032328-bd33-4073-9366-e10bc5e2aa77\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6tz4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6tz4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:17Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-9wrq6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:36Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:36 crc kubenswrapper[4558]: I0120 16:42:36.614069 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:36Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:36 crc kubenswrapper[4558]: I0120 16:42:36.621448 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-d96bp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c8d2c8b-fb8e-4bff-b8d2-69570e5d9e57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe858db22acc96b062f1bc7941e05c823f30d1f9ba50bd497c57f38d57e04293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4v6jx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-d96bp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:36Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:36 crc kubenswrapper[4558]: I0120 16:42:36.634795 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3e45cc56934a7b90e9d47f1c6d06d8dc140d57db4a216469e8ae112f398663e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:36Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:36 crc kubenswrapper[4558]: I0120 16:42:36.644521 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jsqvf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bedf08c7-1f93-4931-a7f3-e729e2a137af\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e32b76495d88d356373f9389e163a2f3a72e202bb454c7015594c1f5a41b511\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xxtbc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jsqvf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:36Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:36 crc kubenswrapper[4558]: I0120 16:42:36.652726 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-47477" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e026445a-aa73-4a0e-ba35-e2e07de1278c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec2ee3ff161cba7e14ad26763081c9f092f0dfe969cef113631c27e0db53d157\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xcnwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-47477\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:36Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:36 crc kubenswrapper[4558]: I0120 16:42:36.661448 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a13bfefa-f525-4ecd-89f5-f10480532bf6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9030e970e96b6909d771d4fa0c3b4f493b4a825cbc43ef7e684272284329c7c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://168e8f533aaa108e4160f9ae4af6bb7944edba7a596178ffffdc5325073fc3c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e0141aef1babb57501016cb09787591b5e6d9136b8d7dd4ba64393f0be5a027\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c87f08993c2b47c63ba9652c7dafe0fdbecdcb03a7b7080664ab1f1bd0e1d602\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:36Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:36 crc kubenswrapper[4558]: I0120 16:42:36.663503 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:36 crc kubenswrapper[4558]: I0120 16:42:36.663534 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:36 crc kubenswrapper[4558]: I0120 16:42:36.663543 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:36 crc kubenswrapper[4558]: I0120 16:42:36.663558 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:36 crc kubenswrapper[4558]: I0120 16:42:36.663568 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:36Z","lastTransitionTime":"2026-01-20T16:42:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:36 crc kubenswrapper[4558]: I0120 16:42:36.670116 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f904dd3e4b43e51569b6532f00fa2f9edf912c1f1969ee2ea00c99d1b7e35a41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://789c877f4a456b3ce64bdbe6fefd50f1ef3f92bf9e26e6296005cf366e0ce265\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:36Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:36 crc kubenswrapper[4558]: I0120 16:42:36.679111 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30e1ff9d-8dcd-4754-9a7a-c09598fb83db\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ea68e442bb2143fbaa5e2c9fe335c6b023a31c2eeaa78d60de45c6ef40c2894\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a8122c3b57ac4f0026df149fe622450d07d8b0d517ad018decd7b21d8d9c04a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7444e8fea1853bae45dbd3d52d07e3f7b94314cd29fe8c99891e515a892e569e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6438ffea42f98178c4c2c21ae01725a8b54d6a6f790f47d64faf5ecd9f1c00f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2e22f1d4e87dd06a67f49aedad280da48ac1c4ba2e438a4925cf8bd5330c4d8\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0120 16:41:58.859734 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0120 16:41:58.861094 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2179835226/tls.crt::/tmp/serving-cert-2179835226/tls.key\\\\\\\"\\\\nI0120 16:42:04.205247 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0120 16:42:04.207291 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0120 16:42:04.207308 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0120 16:42:04.207329 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0120 16:42:04.207333 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0120 16:42:04.210615 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0120 16:42:04.210632 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0120 16:42:04.210641 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0120 16:42:04.210648 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0120 16:42:04.210652 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0120 16:42:04.210655 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0120 16:42:04.210658 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0120 16:42:04.210660 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0120 16:42:04.211964 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4850bde1fb538e21fc91949dc584ef0a791d718844691a44559e0513bb36203\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c39aa6c3942134a027be33cac715f9c0381bce5417754d5bad045c42a07d353f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c39aa6c3942134a027be33cac715f9c0381bce5417754d5bad045c42a07d353f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:36Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:36 crc kubenswrapper[4558]: I0120 16:42:36.687012 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"68337d27-3fa6-4a29-88b0-82e60c3739eb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de7f7c23993a57d30f08686e1cd4abb5b17e108ac12dd0c7929a31574c69eeb1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4b478\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b18a59ecb802507e0d5988eacc915fd7e0d6954518de80f61162c97f680fd8c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4b478\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2vr4r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:36Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:36 crc kubenswrapper[4558]: I0120 16:42:36.726794 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"425eab90-fb70-4498-a98b-b95742033890\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48592e72d91cf527296f06e52fec1c16c1da21323c0644659945af109a74ebb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7e18f59b5e6b6c0be141b7efb3ccb1df4ff6e3c394bcc88fb1142c8df433493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c889e2b0e14244ef49ec057e3d5fd91687e335a983fdc99a6816abd2af643ddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d434793571cbba089cc34029d951bee9063aca58728b791f2d1af2d68229f778\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://89f7ad577df11c509855a90ac8791da6de3f0d2857a1ca56726a6327f40455bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://baab628ee8aadc67ea829cc46639be55ae63281a9ec9a89737260f558a9ddf75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://baab628ee8aadc67ea829cc46639be55ae63281a9ec9a89737260f558a9ddf75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5faab5625a16e1459b23ae0f4cc8ca3285bf4eb1ba4c53c1ed15eddb9054021\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5faab5625a16e1459b23ae0f4cc8ca3285bf4eb1ba4c53c1ed15eddb9054021\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f913fa07ec9571d1c61f9b7dddb46614846aa99d6e0b5b75709e9fa5a18a09c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f913fa07ec9571d1c61f9b7dddb46614846aa99d6e0b5b75709e9fa5a18a09c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:36Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:36 crc kubenswrapper[4558]: I0120 16:42:36.742682 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:36Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:36 crc kubenswrapper[4558]: I0120 16:42:36.754813 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:36Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:36 crc kubenswrapper[4558]: I0120 16:42:36.763852 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rfwnl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8fe5472-7373-4e5d-87fc-a9ecaf26a5cd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://38cd58ecc67d337df2c29b79e5460ea23d4acc1e66de8b08ea70d7ab7b30691d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9nvl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a14e9281ff19e324499722aefadf1e1f2fd005089c153918ad4d8a7f5bdfa853\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9nvl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-rfwnl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:36Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:36 crc kubenswrapper[4558]: I0120 16:42:36.765310 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:36 crc kubenswrapper[4558]: I0120 16:42:36.765423 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:36 crc kubenswrapper[4558]: I0120 16:42:36.765509 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:36 crc kubenswrapper[4558]: I0120 16:42:36.765570 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:36 crc kubenswrapper[4558]: I0120 16:42:36.765652 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:36Z","lastTransitionTime":"2026-01-20T16:42:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:36 crc kubenswrapper[4558]: I0120 16:42:36.776033 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e190c24c1d8a3bc0241334598ce23829ada9afee282d49a0c82cd67ad82bcd02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:36Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:36 crc kubenswrapper[4558]: I0120 16:42:36.867956 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:36 crc kubenswrapper[4558]: I0120 16:42:36.867992 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:36 crc kubenswrapper[4558]: I0120 16:42:36.868002 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:36 crc kubenswrapper[4558]: I0120 16:42:36.868015 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:36 crc kubenswrapper[4558]: I0120 16:42:36.868024 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:36Z","lastTransitionTime":"2026-01-20T16:42:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:36 crc kubenswrapper[4558]: I0120 16:42:36.969819 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:36 crc kubenswrapper[4558]: I0120 16:42:36.970021 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:36 crc kubenswrapper[4558]: I0120 16:42:36.970092 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:36 crc kubenswrapper[4558]: I0120 16:42:36.970182 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:36 crc kubenswrapper[4558]: I0120 16:42:36.970240 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:36Z","lastTransitionTime":"2026-01-20T16:42:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:37 crc kubenswrapper[4558]: I0120 16:42:37.072315 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:37 crc kubenswrapper[4558]: I0120 16:42:37.072344 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:37 crc kubenswrapper[4558]: I0120 16:42:37.072352 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:37 crc kubenswrapper[4558]: I0120 16:42:37.072364 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:37 crc kubenswrapper[4558]: I0120 16:42:37.072372 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:37Z","lastTransitionTime":"2026-01-20T16:42:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:37 crc kubenswrapper[4558]: I0120 16:42:37.174688 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:37 crc kubenswrapper[4558]: I0120 16:42:37.174943 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:37 crc kubenswrapper[4558]: I0120 16:42:37.175049 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:37 crc kubenswrapper[4558]: I0120 16:42:37.175125 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:37 crc kubenswrapper[4558]: I0120 16:42:37.175199 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:37Z","lastTransitionTime":"2026-01-20T16:42:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:37 crc kubenswrapper[4558]: I0120 16:42:37.277896 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:37 crc kubenswrapper[4558]: I0120 16:42:37.277934 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:37 crc kubenswrapper[4558]: I0120 16:42:37.277942 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:37 crc kubenswrapper[4558]: I0120 16:42:37.277956 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:37 crc kubenswrapper[4558]: I0120 16:42:37.277966 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:37Z","lastTransitionTime":"2026-01-20T16:42:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:37 crc kubenswrapper[4558]: I0120 16:42:37.379994 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:37 crc kubenswrapper[4558]: I0120 16:42:37.380034 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:37 crc kubenswrapper[4558]: I0120 16:42:37.380045 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:37 crc kubenswrapper[4558]: I0120 16:42:37.380059 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:37 crc kubenswrapper[4558]: I0120 16:42:37.380069 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:37Z","lastTransitionTime":"2026-01-20T16:42:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:37 crc kubenswrapper[4558]: I0120 16:42:37.481655 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:37 crc kubenswrapper[4558]: I0120 16:42:37.481694 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:37 crc kubenswrapper[4558]: I0120 16:42:37.481701 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:37 crc kubenswrapper[4558]: I0120 16:42:37.481715 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:37 crc kubenswrapper[4558]: I0120 16:42:37.481723 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:37Z","lastTransitionTime":"2026-01-20T16:42:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:37 crc kubenswrapper[4558]: I0120 16:42:37.560236 4558 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-16 17:23:58.224222482 +0000 UTC Jan 20 16:42:37 crc kubenswrapper[4558]: I0120 16:42:37.565509 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9wrq6" Jan 20 16:42:37 crc kubenswrapper[4558]: E0120 16:42:37.565630 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9wrq6" podUID="30032328-bd33-4073-9366-e10bc5e2aa77" Jan 20 16:42:37 crc kubenswrapper[4558]: I0120 16:42:37.583267 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:37 crc kubenswrapper[4558]: I0120 16:42:37.583293 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:37 crc kubenswrapper[4558]: I0120 16:42:37.583304 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:37 crc kubenswrapper[4558]: I0120 16:42:37.583316 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:37 crc kubenswrapper[4558]: I0120 16:42:37.583324 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:37Z","lastTransitionTime":"2026-01-20T16:42:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:37 crc kubenswrapper[4558]: I0120 16:42:37.685507 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:37 crc kubenswrapper[4558]: I0120 16:42:37.685712 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:37 crc kubenswrapper[4558]: I0120 16:42:37.685820 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:37 crc kubenswrapper[4558]: I0120 16:42:37.685892 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:37 crc kubenswrapper[4558]: I0120 16:42:37.685960 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:37Z","lastTransitionTime":"2026-01-20T16:42:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:37 crc kubenswrapper[4558]: I0120 16:42:37.787601 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:37 crc kubenswrapper[4558]: I0120 16:42:37.787633 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:37 crc kubenswrapper[4558]: I0120 16:42:37.787641 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:37 crc kubenswrapper[4558]: I0120 16:42:37.787654 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:37 crc kubenswrapper[4558]: I0120 16:42:37.787661 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:37Z","lastTransitionTime":"2026-01-20T16:42:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:37 crc kubenswrapper[4558]: I0120 16:42:37.889761 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:37 crc kubenswrapper[4558]: I0120 16:42:37.889950 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:37 crc kubenswrapper[4558]: I0120 16:42:37.890042 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:37 crc kubenswrapper[4558]: I0120 16:42:37.890110 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:37 crc kubenswrapper[4558]: I0120 16:42:37.890196 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:37Z","lastTransitionTime":"2026-01-20T16:42:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:37 crc kubenswrapper[4558]: I0120 16:42:37.992655 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:37 crc kubenswrapper[4558]: I0120 16:42:37.992684 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:37 crc kubenswrapper[4558]: I0120 16:42:37.992693 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:37 crc kubenswrapper[4558]: I0120 16:42:37.992703 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:37 crc kubenswrapper[4558]: I0120 16:42:37.992711 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:37Z","lastTransitionTime":"2026-01-20T16:42:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:38 crc kubenswrapper[4558]: I0120 16:42:38.070087 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:38 crc kubenswrapper[4558]: I0120 16:42:38.070122 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:38 crc kubenswrapper[4558]: I0120 16:42:38.070129 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:38 crc kubenswrapper[4558]: I0120 16:42:38.070142 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:38 crc kubenswrapper[4558]: I0120 16:42:38.070150 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:38Z","lastTransitionTime":"2026-01-20T16:42:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:38 crc kubenswrapper[4558]: E0120 16:42:38.079245 4558 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:42:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:42:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:38Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:42:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:42:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:38Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"bf027402-7a62-478b-8585-220c98495823\\\",\\\"systemUUID\\\":\\\"1def282c-e24e-4bc0-9a1b-d7cc30756d3d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:38Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:38 crc kubenswrapper[4558]: I0120 16:42:38.081757 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:38 crc kubenswrapper[4558]: I0120 16:42:38.081786 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:38 crc kubenswrapper[4558]: I0120 16:42:38.081796 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:38 crc kubenswrapper[4558]: I0120 16:42:38.081807 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:38 crc kubenswrapper[4558]: I0120 16:42:38.081815 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:38Z","lastTransitionTime":"2026-01-20T16:42:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:38 crc kubenswrapper[4558]: E0120 16:42:38.090519 4558 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:42:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:42:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:38Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:42:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:42:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:38Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"bf027402-7a62-478b-8585-220c98495823\\\",\\\"systemUUID\\\":\\\"1def282c-e24e-4bc0-9a1b-d7cc30756d3d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:38Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:38 crc kubenswrapper[4558]: I0120 16:42:38.092906 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:38 crc kubenswrapper[4558]: I0120 16:42:38.092937 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:38 crc kubenswrapper[4558]: I0120 16:42:38.092946 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:38 crc kubenswrapper[4558]: I0120 16:42:38.092958 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:38 crc kubenswrapper[4558]: I0120 16:42:38.092966 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:38Z","lastTransitionTime":"2026-01-20T16:42:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:38 crc kubenswrapper[4558]: E0120 16:42:38.101607 4558 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:42:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:42:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:38Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:42:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:42:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:38Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"bf027402-7a62-478b-8585-220c98495823\\\",\\\"systemUUID\\\":\\\"1def282c-e24e-4bc0-9a1b-d7cc30756d3d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:38Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:38 crc kubenswrapper[4558]: I0120 16:42:38.103979 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:38 crc kubenswrapper[4558]: I0120 16:42:38.104003 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:38 crc kubenswrapper[4558]: I0120 16:42:38.104012 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:38 crc kubenswrapper[4558]: I0120 16:42:38.104024 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:38 crc kubenswrapper[4558]: I0120 16:42:38.104032 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:38Z","lastTransitionTime":"2026-01-20T16:42:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:38 crc kubenswrapper[4558]: E0120 16:42:38.112532 4558 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:42:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:42:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:38Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:42:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:42:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:38Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"bf027402-7a62-478b-8585-220c98495823\\\",\\\"systemUUID\\\":\\\"1def282c-e24e-4bc0-9a1b-d7cc30756d3d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:38Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:38 crc kubenswrapper[4558]: I0120 16:42:38.114729 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:38 crc kubenswrapper[4558]: I0120 16:42:38.114769 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:38 crc kubenswrapper[4558]: I0120 16:42:38.114778 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:38 crc kubenswrapper[4558]: I0120 16:42:38.114791 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:38 crc kubenswrapper[4558]: I0120 16:42:38.114798 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:38Z","lastTransitionTime":"2026-01-20T16:42:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:38 crc kubenswrapper[4558]: E0120 16:42:38.122928 4558 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:42:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:42:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:38Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:42:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:42:38Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:38Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"bf027402-7a62-478b-8585-220c98495823\\\",\\\"systemUUID\\\":\\\"1def282c-e24e-4bc0-9a1b-d7cc30756d3d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:38Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:38 crc kubenswrapper[4558]: E0120 16:42:38.123044 4558 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 20 16:42:38 crc kubenswrapper[4558]: I0120 16:42:38.123910 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:38 crc kubenswrapper[4558]: I0120 16:42:38.123935 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:38 crc kubenswrapper[4558]: I0120 16:42:38.123945 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:38 crc kubenswrapper[4558]: I0120 16:42:38.123957 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:38 crc kubenswrapper[4558]: I0120 16:42:38.123964 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:38Z","lastTransitionTime":"2026-01-20T16:42:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:38 crc kubenswrapper[4558]: I0120 16:42:38.225974 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:38 crc kubenswrapper[4558]: I0120 16:42:38.226018 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:38 crc kubenswrapper[4558]: I0120 16:42:38.226027 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:38 crc kubenswrapper[4558]: I0120 16:42:38.226038 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:38 crc kubenswrapper[4558]: I0120 16:42:38.226046 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:38Z","lastTransitionTime":"2026-01-20T16:42:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:38 crc kubenswrapper[4558]: I0120 16:42:38.327882 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:38 crc kubenswrapper[4558]: I0120 16:42:38.327922 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:38 crc kubenswrapper[4558]: I0120 16:42:38.327930 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:38 crc kubenswrapper[4558]: I0120 16:42:38.327943 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:38 crc kubenswrapper[4558]: I0120 16:42:38.327953 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:38Z","lastTransitionTime":"2026-01-20T16:42:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:38 crc kubenswrapper[4558]: I0120 16:42:38.429548 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:38 crc kubenswrapper[4558]: I0120 16:42:38.429573 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:38 crc kubenswrapper[4558]: I0120 16:42:38.429595 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:38 crc kubenswrapper[4558]: I0120 16:42:38.429607 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:38 crc kubenswrapper[4558]: I0120 16:42:38.429615 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:38Z","lastTransitionTime":"2026-01-20T16:42:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:38 crc kubenswrapper[4558]: I0120 16:42:38.531535 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:38 crc kubenswrapper[4558]: I0120 16:42:38.531572 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:38 crc kubenswrapper[4558]: I0120 16:42:38.531580 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:38 crc kubenswrapper[4558]: I0120 16:42:38.531621 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:38 crc kubenswrapper[4558]: I0120 16:42:38.531629 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:38Z","lastTransitionTime":"2026-01-20T16:42:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:38 crc kubenswrapper[4558]: I0120 16:42:38.560970 4558 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-01 12:02:49.492665104 +0000 UTC Jan 20 16:42:38 crc kubenswrapper[4558]: I0120 16:42:38.565232 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 20 16:42:38 crc kubenswrapper[4558]: I0120 16:42:38.565286 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 20 16:42:38 crc kubenswrapper[4558]: E0120 16:42:38.565351 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 20 16:42:38 crc kubenswrapper[4558]: I0120 16:42:38.565256 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 20 16:42:38 crc kubenswrapper[4558]: E0120 16:42:38.565431 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 20 16:42:38 crc kubenswrapper[4558]: E0120 16:42:38.565475 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 20 16:42:38 crc kubenswrapper[4558]: I0120 16:42:38.632919 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:38 crc kubenswrapper[4558]: I0120 16:42:38.632944 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:38 crc kubenswrapper[4558]: I0120 16:42:38.632953 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:38 crc kubenswrapper[4558]: I0120 16:42:38.632963 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:38 crc kubenswrapper[4558]: I0120 16:42:38.632971 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:38Z","lastTransitionTime":"2026-01-20T16:42:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:38 crc kubenswrapper[4558]: I0120 16:42:38.734732 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:38 crc kubenswrapper[4558]: I0120 16:42:38.734774 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:38 crc kubenswrapper[4558]: I0120 16:42:38.734783 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:38 crc kubenswrapper[4558]: I0120 16:42:38.734793 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:38 crc kubenswrapper[4558]: I0120 16:42:38.734800 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:38Z","lastTransitionTime":"2026-01-20T16:42:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:38 crc kubenswrapper[4558]: I0120 16:42:38.766496 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 20 16:42:38 crc kubenswrapper[4558]: I0120 16:42:38.774299 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Jan 20 16:42:38 crc kubenswrapper[4558]: I0120 16:42:38.775872 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rfwnl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8fe5472-7373-4e5d-87fc-a9ecaf26a5cd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://38cd58ecc67d337df2c29b79e5460ea23d4acc1e66de8b08ea70d7ab7b30691d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9nvl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a14e9281ff19e324499722aefadf1e1f2fd005089c153918ad4d8a7f5bdfa853\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9nvl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-rfwnl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:38Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:38 crc kubenswrapper[4558]: I0120 16:42:38.784976 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e190c24c1d8a3bc0241334598ce23829ada9afee282d49a0c82cd67ad82bcd02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:38Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:38 crc kubenswrapper[4558]: I0120 16:42:38.793611 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:38Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:38 crc kubenswrapper[4558]: I0120 16:42:38.801930 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:38Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:38 crc kubenswrapper[4558]: I0120 16:42:38.809355 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-9wrq6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30032328-bd33-4073-9366-e10bc5e2aa77\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6tz4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6tz4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:17Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-9wrq6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:38Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:38 crc kubenswrapper[4558]: I0120 16:42:38.818186 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:38Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:38 crc kubenswrapper[4558]: I0120 16:42:38.825060 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-d96bp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c8d2c8b-fb8e-4bff-b8d2-69570e5d9e57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe858db22acc96b062f1bc7941e05c823f30d1f9ba50bd497c57f38d57e04293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4v6jx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-d96bp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:38Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:38 crc kubenswrapper[4558]: I0120 16:42:38.835996 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:38 crc kubenswrapper[4558]: I0120 16:42:38.836025 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:38 crc kubenswrapper[4558]: I0120 16:42:38.836035 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:38 crc kubenswrapper[4558]: I0120 16:42:38.836048 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:38 crc kubenswrapper[4558]: I0120 16:42:38.836073 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:38Z","lastTransitionTime":"2026-01-20T16:42:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:38 crc kubenswrapper[4558]: I0120 16:42:38.838406 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"254129cd-82fc-4162-b671-2434bc9e2972\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a6a184067bde115ef5e09fdd90e7dccfe89a9dc347af450998b91c068e7e803\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd8b93e5559d95511a1285a9aec9a7d72df6c330bcd1e1daf3e93f5494f70eeb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4264ef8ee49a8b172fb05ac5c9b0bd32350b5d9ae95293079dfeaf44f021b455\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0889cf371f8b06cc2d255c15fda97db1d5000d6f6cff29d2fdcb8667cede8a99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4265d3281e954ee5989ee008da28c6cb9fa29478e5fdf23325521d455304da9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://186a1d2caad8865abf7f565405c4a4c077038ccb67ba77927cc2392ec075a811\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d28c2f9293ff09b5e7d4ca4df1633cb114f685ad9a9dc8697e9efe3e9603259\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d28c2f9293ff09b5e7d4ca4df1633cb114f685ad9a9dc8697e9efe3e9603259\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-20T16:42:30Z\\\",\\\"message\\\":\\\"6215 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI0120 16:42:30.148863 6215 obj_retry.go:365] Adding new object: *v1.Pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI0120 16:42:30.148994 6215 ovn.go:134] Ensuring zone local for Pod openshift-kube-apiserver/kube-apiserver-crc in node crc\\\\nI0120 16:42:30.149000 6215 obj_retry.go:386] Retry successful for *v1.Pod openshift-kube-apiserver/kube-apiserver-crc after 0 failed attempt(s)\\\\nI0120 16:42:30.149004 6215 default_network_controller.go:776] Recording success event on pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI0120 16:42:30.148969 6215 services_controller.go:445] Built service openshift-kube-storage-version-migrator-operator/metrics LB template configs for network=default: []services.lbConfig(nil)\\\\nI0120 16:42:30.149028 6215 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-operator/iptables-alerter-4ln5h\\\\nI0120 16:42:30.149009 6215 base_network_controller_pods.go:916] Annotation values: ip=[10.217.0.3/23] ; mac=0a:58:0a:d9:00:03 ; gw=[10.217.0.1]\\\\nF0120 16:42:30.149066 6215 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller ini\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:29Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-nv2xw_openshift-ovn-kubernetes(254129cd-82fc-4162-b671-2434bc9e2972)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f41d488b0c24e594a2f5cad36b5f8efdb7d867fae0e18a74fe3c396a203d162f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aaa12ad387fc5d0780f6602a74013bb70c118f35c3a16eb4d136df7b0c48db9e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aaa12ad387fc5d0780f6602a74013bb70c118f35c3a16eb4d136df7b0c48db9e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-nv2xw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:38Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:38 crc kubenswrapper[4558]: I0120 16:42:38.847971 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f5t7h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0f90cb04-2e7a-4ee8-83fc-d6c0ee1702a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21a318f03d0df490cb7bae11819c52eca011d163081aeecff1b2dd8f72caabbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fd8a8162080a700ce21ff3b2cf7c28fabfc682758eb69c483909d4008b07daab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fd8a8162080a700ce21ff3b2cf7c28fabfc682758eb69c483909d4008b07daab\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e77617c69434ce20c448b525c7e86f1ece18d51ce6e14167b4ca7e99de8e5709\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e77617c69434ce20c448b525c7e86f1ece18d51ce6e14167b4ca7e99de8e5709\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://453c67434281e0261ec6799b30a548a01efd72d7df96632409e745b2e6a94546\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://453c67434281e0261ec6799b30a548a01efd72d7df96632409e745b2e6a94546\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://06eab4daa6c8b6a564cf1bb1403b7b3b259a234ee09f23b4f4bc0628c2d58e69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06eab4daa6c8b6a564cf1bb1403b7b3b259a234ee09f23b4f4bc0628c2d58e69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1291ac09fc6ae2a79fdeaa0fc47272f61b97f45f67ed3f7c90bdfdbfe3519ff2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1291ac09fc6ae2a79fdeaa0fc47272f61b97f45f67ed3f7c90bdfdbfe3519ff2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe484514f50a14e485d4a97ea3ce9401c77f3f4dae4717719be7ed6c4d7b1a59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe484514f50a14e485d4a97ea3ce9401c77f3f4dae4717719be7ed6c4d7b1a59\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f5t7h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:38Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:38 crc kubenswrapper[4558]: I0120 16:42:38.855265 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-47477" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e026445a-aa73-4a0e-ba35-e2e07de1278c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec2ee3ff161cba7e14ad26763081c9f092f0dfe969cef113631c27e0db53d157\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xcnwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-47477\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:38Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:38 crc kubenswrapper[4558]: I0120 16:42:38.863330 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a13bfefa-f525-4ecd-89f5-f10480532bf6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9030e970e96b6909d771d4fa0c3b4f493b4a825cbc43ef7e684272284329c7c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://168e8f533aaa108e4160f9ae4af6bb7944edba7a596178ffffdc5325073fc3c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e0141aef1babb57501016cb09787591b5e6d9136b8d7dd4ba64393f0be5a027\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c87f08993c2b47c63ba9652c7dafe0fdbecdcb03a7b7080664ab1f1bd0e1d602\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:38Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:38 crc kubenswrapper[4558]: I0120 16:42:38.873072 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f904dd3e4b43e51569b6532f00fa2f9edf912c1f1969ee2ea00c99d1b7e35a41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://789c877f4a456b3ce64bdbe6fefd50f1ef3f92bf9e26e6296005cf366e0ce265\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:38Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:38 crc kubenswrapper[4558]: I0120 16:42:38.882484 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3e45cc56934a7b90e9d47f1c6d06d8dc140d57db4a216469e8ae112f398663e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:38Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:38 crc kubenswrapper[4558]: I0120 16:42:38.891065 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jsqvf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bedf08c7-1f93-4931-a7f3-e729e2a137af\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e32b76495d88d356373f9389e163a2f3a72e202bb454c7015594c1f5a41b511\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xxtbc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jsqvf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:38Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:38 crc kubenswrapper[4558]: I0120 16:42:38.904259 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"425eab90-fb70-4498-a98b-b95742033890\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48592e72d91cf527296f06e52fec1c16c1da21323c0644659945af109a74ebb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7e18f59b5e6b6c0be141b7efb3ccb1df4ff6e3c394bcc88fb1142c8df433493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c889e2b0e14244ef49ec057e3d5fd91687e335a983fdc99a6816abd2af643ddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d434793571cbba089cc34029d951bee9063aca58728b791f2d1af2d68229f778\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://89f7ad577df11c509855a90ac8791da6de3f0d2857a1ca56726a6327f40455bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://baab628ee8aadc67ea829cc46639be55ae63281a9ec9a89737260f558a9ddf75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://baab628ee8aadc67ea829cc46639be55ae63281a9ec9a89737260f558a9ddf75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5faab5625a16e1459b23ae0f4cc8ca3285bf4eb1ba4c53c1ed15eddb9054021\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5faab5625a16e1459b23ae0f4cc8ca3285bf4eb1ba4c53c1ed15eddb9054021\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f913fa07ec9571d1c61f9b7dddb46614846aa99d6e0b5b75709e9fa5a18a09c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f913fa07ec9571d1c61f9b7dddb46614846aa99d6e0b5b75709e9fa5a18a09c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:38Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:38 crc kubenswrapper[4558]: I0120 16:42:38.913057 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30e1ff9d-8dcd-4754-9a7a-c09598fb83db\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ea68e442bb2143fbaa5e2c9fe335c6b023a31c2eeaa78d60de45c6ef40c2894\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a8122c3b57ac4f0026df149fe622450d07d8b0d517ad018decd7b21d8d9c04a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7444e8fea1853bae45dbd3d52d07e3f7b94314cd29fe8c99891e515a892e569e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6438ffea42f98178c4c2c21ae01725a8b54d6a6f790f47d64faf5ecd9f1c00f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2e22f1d4e87dd06a67f49aedad280da48ac1c4ba2e438a4925cf8bd5330c4d8\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0120 16:41:58.859734 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0120 16:41:58.861094 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2179835226/tls.crt::/tmp/serving-cert-2179835226/tls.key\\\\\\\"\\\\nI0120 16:42:04.205247 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0120 16:42:04.207291 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0120 16:42:04.207308 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0120 16:42:04.207329 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0120 16:42:04.207333 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0120 16:42:04.210615 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0120 16:42:04.210632 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0120 16:42:04.210641 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0120 16:42:04.210648 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0120 16:42:04.210652 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0120 16:42:04.210655 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0120 16:42:04.210658 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0120 16:42:04.210660 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0120 16:42:04.211964 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4850bde1fb538e21fc91949dc584ef0a791d718844691a44559e0513bb36203\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c39aa6c3942134a027be33cac715f9c0381bce5417754d5bad045c42a07d353f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c39aa6c3942134a027be33cac715f9c0381bce5417754d5bad045c42a07d353f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:38Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:38 crc kubenswrapper[4558]: I0120 16:42:38.921115 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"68337d27-3fa6-4a29-88b0-82e60c3739eb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de7f7c23993a57d30f08686e1cd4abb5b17e108ac12dd0c7929a31574c69eeb1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4b478\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b18a59ecb802507e0d5988eacc915fd7e0d6954518de80f61162c97f680fd8c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4b478\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2vr4r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:38Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:38 crc kubenswrapper[4558]: I0120 16:42:38.938480 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:38 crc kubenswrapper[4558]: I0120 16:42:38.938509 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:38 crc kubenswrapper[4558]: I0120 16:42:38.938517 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:38 crc kubenswrapper[4558]: I0120 16:42:38.938530 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:38 crc kubenswrapper[4558]: I0120 16:42:38.938539 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:38Z","lastTransitionTime":"2026-01-20T16:42:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:39 crc kubenswrapper[4558]: I0120 16:42:39.040952 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:39 crc kubenswrapper[4558]: I0120 16:42:39.040989 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:39 crc kubenswrapper[4558]: I0120 16:42:39.040998 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:39 crc kubenswrapper[4558]: I0120 16:42:39.041011 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:39 crc kubenswrapper[4558]: I0120 16:42:39.041019 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:39Z","lastTransitionTime":"2026-01-20T16:42:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:39 crc kubenswrapper[4558]: I0120 16:42:39.142741 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:39 crc kubenswrapper[4558]: I0120 16:42:39.142772 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:39 crc kubenswrapper[4558]: I0120 16:42:39.142780 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:39 crc kubenswrapper[4558]: I0120 16:42:39.142791 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:39 crc kubenswrapper[4558]: I0120 16:42:39.142802 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:39Z","lastTransitionTime":"2026-01-20T16:42:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:39 crc kubenswrapper[4558]: I0120 16:42:39.244993 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:39 crc kubenswrapper[4558]: I0120 16:42:39.245025 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:39 crc kubenswrapper[4558]: I0120 16:42:39.245034 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:39 crc kubenswrapper[4558]: I0120 16:42:39.245046 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:39 crc kubenswrapper[4558]: I0120 16:42:39.245054 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:39Z","lastTransitionTime":"2026-01-20T16:42:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:39 crc kubenswrapper[4558]: I0120 16:42:39.347113 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:39 crc kubenswrapper[4558]: I0120 16:42:39.347155 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:39 crc kubenswrapper[4558]: I0120 16:42:39.347190 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:39 crc kubenswrapper[4558]: I0120 16:42:39.347202 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:39 crc kubenswrapper[4558]: I0120 16:42:39.347210 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:39Z","lastTransitionTime":"2026-01-20T16:42:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:39 crc kubenswrapper[4558]: I0120 16:42:39.449208 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:39 crc kubenswrapper[4558]: I0120 16:42:39.449248 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:39 crc kubenswrapper[4558]: I0120 16:42:39.449257 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:39 crc kubenswrapper[4558]: I0120 16:42:39.449270 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:39 crc kubenswrapper[4558]: I0120 16:42:39.449279 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:39Z","lastTransitionTime":"2026-01-20T16:42:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:39 crc kubenswrapper[4558]: I0120 16:42:39.551391 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:39 crc kubenswrapper[4558]: I0120 16:42:39.551421 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:39 crc kubenswrapper[4558]: I0120 16:42:39.551428 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:39 crc kubenswrapper[4558]: I0120 16:42:39.551439 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:39 crc kubenswrapper[4558]: I0120 16:42:39.551449 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:39Z","lastTransitionTime":"2026-01-20T16:42:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:39 crc kubenswrapper[4558]: I0120 16:42:39.561785 4558 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-11 21:10:48.22689501 +0000 UTC Jan 20 16:42:39 crc kubenswrapper[4558]: I0120 16:42:39.565015 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9wrq6" Jan 20 16:42:39 crc kubenswrapper[4558]: E0120 16:42:39.565102 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9wrq6" podUID="30032328-bd33-4073-9366-e10bc5e2aa77" Jan 20 16:42:39 crc kubenswrapper[4558]: I0120 16:42:39.653607 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:39 crc kubenswrapper[4558]: I0120 16:42:39.653639 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:39 crc kubenswrapper[4558]: I0120 16:42:39.653649 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:39 crc kubenswrapper[4558]: I0120 16:42:39.653661 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:39 crc kubenswrapper[4558]: I0120 16:42:39.653669 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:39Z","lastTransitionTime":"2026-01-20T16:42:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:39 crc kubenswrapper[4558]: I0120 16:42:39.755823 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:39 crc kubenswrapper[4558]: I0120 16:42:39.755856 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:39 crc kubenswrapper[4558]: I0120 16:42:39.755864 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:39 crc kubenswrapper[4558]: I0120 16:42:39.755877 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:39 crc kubenswrapper[4558]: I0120 16:42:39.755884 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:39Z","lastTransitionTime":"2026-01-20T16:42:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:39 crc kubenswrapper[4558]: I0120 16:42:39.858016 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:39 crc kubenswrapper[4558]: I0120 16:42:39.858048 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:39 crc kubenswrapper[4558]: I0120 16:42:39.858057 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:39 crc kubenswrapper[4558]: I0120 16:42:39.858068 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:39 crc kubenswrapper[4558]: I0120 16:42:39.858076 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:39Z","lastTransitionTime":"2026-01-20T16:42:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:39 crc kubenswrapper[4558]: I0120 16:42:39.960156 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:39 crc kubenswrapper[4558]: I0120 16:42:39.960201 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:39 crc kubenswrapper[4558]: I0120 16:42:39.960211 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:39 crc kubenswrapper[4558]: I0120 16:42:39.960224 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:39 crc kubenswrapper[4558]: I0120 16:42:39.960233 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:39Z","lastTransitionTime":"2026-01-20T16:42:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:40 crc kubenswrapper[4558]: I0120 16:42:40.061823 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:40 crc kubenswrapper[4558]: I0120 16:42:40.061855 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:40 crc kubenswrapper[4558]: I0120 16:42:40.061865 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:40 crc kubenswrapper[4558]: I0120 16:42:40.061877 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:40 crc kubenswrapper[4558]: I0120 16:42:40.061885 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:40Z","lastTransitionTime":"2026-01-20T16:42:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:40 crc kubenswrapper[4558]: I0120 16:42:40.163643 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:40 crc kubenswrapper[4558]: I0120 16:42:40.163676 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:40 crc kubenswrapper[4558]: I0120 16:42:40.163684 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:40 crc kubenswrapper[4558]: I0120 16:42:40.163697 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:40 crc kubenswrapper[4558]: I0120 16:42:40.163706 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:40Z","lastTransitionTime":"2026-01-20T16:42:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:40 crc kubenswrapper[4558]: I0120 16:42:40.265500 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:40 crc kubenswrapper[4558]: I0120 16:42:40.265537 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:40 crc kubenswrapper[4558]: I0120 16:42:40.265546 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:40 crc kubenswrapper[4558]: I0120 16:42:40.265559 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:40 crc kubenswrapper[4558]: I0120 16:42:40.265569 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:40Z","lastTransitionTime":"2026-01-20T16:42:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:40 crc kubenswrapper[4558]: I0120 16:42:40.367439 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:40 crc kubenswrapper[4558]: I0120 16:42:40.367473 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:40 crc kubenswrapper[4558]: I0120 16:42:40.367483 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:40 crc kubenswrapper[4558]: I0120 16:42:40.367496 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:40 crc kubenswrapper[4558]: I0120 16:42:40.367504 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:40Z","lastTransitionTime":"2026-01-20T16:42:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:40 crc kubenswrapper[4558]: I0120 16:42:40.469674 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:40 crc kubenswrapper[4558]: I0120 16:42:40.469701 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:40 crc kubenswrapper[4558]: I0120 16:42:40.469710 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:40 crc kubenswrapper[4558]: I0120 16:42:40.469724 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:40 crc kubenswrapper[4558]: I0120 16:42:40.469733 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:40Z","lastTransitionTime":"2026-01-20T16:42:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:40 crc kubenswrapper[4558]: I0120 16:42:40.562662 4558 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-09 01:30:54.146417627 +0000 UTC Jan 20 16:42:40 crc kubenswrapper[4558]: I0120 16:42:40.564900 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 20 16:42:40 crc kubenswrapper[4558]: I0120 16:42:40.564928 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 20 16:42:40 crc kubenswrapper[4558]: I0120 16:42:40.564949 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 20 16:42:40 crc kubenswrapper[4558]: E0120 16:42:40.565012 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 20 16:42:40 crc kubenswrapper[4558]: E0120 16:42:40.565058 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 20 16:42:40 crc kubenswrapper[4558]: E0120 16:42:40.565109 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 20 16:42:40 crc kubenswrapper[4558]: I0120 16:42:40.571176 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:40 crc kubenswrapper[4558]: I0120 16:42:40.571205 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:40 crc kubenswrapper[4558]: I0120 16:42:40.571214 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:40 crc kubenswrapper[4558]: I0120 16:42:40.571224 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:40 crc kubenswrapper[4558]: I0120 16:42:40.571232 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:40Z","lastTransitionTime":"2026-01-20T16:42:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:40 crc kubenswrapper[4558]: I0120 16:42:40.672784 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:40 crc kubenswrapper[4558]: I0120 16:42:40.672820 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:40 crc kubenswrapper[4558]: I0120 16:42:40.672830 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:40 crc kubenswrapper[4558]: I0120 16:42:40.672843 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:40 crc kubenswrapper[4558]: I0120 16:42:40.672856 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:40Z","lastTransitionTime":"2026-01-20T16:42:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:40 crc kubenswrapper[4558]: I0120 16:42:40.774615 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:40 crc kubenswrapper[4558]: I0120 16:42:40.774643 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:40 crc kubenswrapper[4558]: I0120 16:42:40.774651 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:40 crc kubenswrapper[4558]: I0120 16:42:40.774663 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:40 crc kubenswrapper[4558]: I0120 16:42:40.774671 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:40Z","lastTransitionTime":"2026-01-20T16:42:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:40 crc kubenswrapper[4558]: I0120 16:42:40.876994 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:40 crc kubenswrapper[4558]: I0120 16:42:40.877014 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:40 crc kubenswrapper[4558]: I0120 16:42:40.877022 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:40 crc kubenswrapper[4558]: I0120 16:42:40.877033 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:40 crc kubenswrapper[4558]: I0120 16:42:40.877041 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:40Z","lastTransitionTime":"2026-01-20T16:42:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:40 crc kubenswrapper[4558]: I0120 16:42:40.978922 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:40 crc kubenswrapper[4558]: I0120 16:42:40.978956 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:40 crc kubenswrapper[4558]: I0120 16:42:40.978964 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:40 crc kubenswrapper[4558]: I0120 16:42:40.978977 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:40 crc kubenswrapper[4558]: I0120 16:42:40.978989 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:40Z","lastTransitionTime":"2026-01-20T16:42:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:41 crc kubenswrapper[4558]: I0120 16:42:41.081006 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:41 crc kubenswrapper[4558]: I0120 16:42:41.081038 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:41 crc kubenswrapper[4558]: I0120 16:42:41.081050 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:41 crc kubenswrapper[4558]: I0120 16:42:41.081063 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:41 crc kubenswrapper[4558]: I0120 16:42:41.081071 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:41Z","lastTransitionTime":"2026-01-20T16:42:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:41 crc kubenswrapper[4558]: I0120 16:42:41.182872 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:41 crc kubenswrapper[4558]: I0120 16:42:41.182903 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:41 crc kubenswrapper[4558]: I0120 16:42:41.182911 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:41 crc kubenswrapper[4558]: I0120 16:42:41.182922 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:41 crc kubenswrapper[4558]: I0120 16:42:41.182932 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:41Z","lastTransitionTime":"2026-01-20T16:42:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:41 crc kubenswrapper[4558]: I0120 16:42:41.284513 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:41 crc kubenswrapper[4558]: I0120 16:42:41.284553 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:41 crc kubenswrapper[4558]: I0120 16:42:41.284562 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:41 crc kubenswrapper[4558]: I0120 16:42:41.284576 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:41 crc kubenswrapper[4558]: I0120 16:42:41.284599 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:41Z","lastTransitionTime":"2026-01-20T16:42:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:41 crc kubenswrapper[4558]: I0120 16:42:41.386716 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:41 crc kubenswrapper[4558]: I0120 16:42:41.386745 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:41 crc kubenswrapper[4558]: I0120 16:42:41.386753 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:41 crc kubenswrapper[4558]: I0120 16:42:41.386765 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:41 crc kubenswrapper[4558]: I0120 16:42:41.386773 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:41Z","lastTransitionTime":"2026-01-20T16:42:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:41 crc kubenswrapper[4558]: I0120 16:42:41.488420 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:41 crc kubenswrapper[4558]: I0120 16:42:41.488457 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:41 crc kubenswrapper[4558]: I0120 16:42:41.488465 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:41 crc kubenswrapper[4558]: I0120 16:42:41.488478 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:41 crc kubenswrapper[4558]: I0120 16:42:41.488486 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:41Z","lastTransitionTime":"2026-01-20T16:42:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:41 crc kubenswrapper[4558]: I0120 16:42:41.563460 4558 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-25 12:25:07.502819214 +0000 UTC Jan 20 16:42:41 crc kubenswrapper[4558]: I0120 16:42:41.565838 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9wrq6" Jan 20 16:42:41 crc kubenswrapper[4558]: E0120 16:42:41.565946 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9wrq6" podUID="30032328-bd33-4073-9366-e10bc5e2aa77" Jan 20 16:42:41 crc kubenswrapper[4558]: I0120 16:42:41.590867 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:41 crc kubenswrapper[4558]: I0120 16:42:41.590897 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:41 crc kubenswrapper[4558]: I0120 16:42:41.590906 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:41 crc kubenswrapper[4558]: I0120 16:42:41.590919 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:41 crc kubenswrapper[4558]: I0120 16:42:41.590928 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:41Z","lastTransitionTime":"2026-01-20T16:42:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:41 crc kubenswrapper[4558]: I0120 16:42:41.693029 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:41 crc kubenswrapper[4558]: I0120 16:42:41.693065 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:41 crc kubenswrapper[4558]: I0120 16:42:41.693074 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:41 crc kubenswrapper[4558]: I0120 16:42:41.693086 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:41 crc kubenswrapper[4558]: I0120 16:42:41.693093 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:41Z","lastTransitionTime":"2026-01-20T16:42:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:41 crc kubenswrapper[4558]: I0120 16:42:41.794636 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:41 crc kubenswrapper[4558]: I0120 16:42:41.794667 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:41 crc kubenswrapper[4558]: I0120 16:42:41.794675 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:41 crc kubenswrapper[4558]: I0120 16:42:41.794685 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:41 crc kubenswrapper[4558]: I0120 16:42:41.794693 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:41Z","lastTransitionTime":"2026-01-20T16:42:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:41 crc kubenswrapper[4558]: I0120 16:42:41.896707 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:41 crc kubenswrapper[4558]: I0120 16:42:41.896738 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:41 crc kubenswrapper[4558]: I0120 16:42:41.896747 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:41 crc kubenswrapper[4558]: I0120 16:42:41.896759 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:41 crc kubenswrapper[4558]: I0120 16:42:41.896767 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:41Z","lastTransitionTime":"2026-01-20T16:42:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:41 crc kubenswrapper[4558]: I0120 16:42:41.998738 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:41 crc kubenswrapper[4558]: I0120 16:42:41.998768 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:41 crc kubenswrapper[4558]: I0120 16:42:41.998777 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:41 crc kubenswrapper[4558]: I0120 16:42:41.998788 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:41 crc kubenswrapper[4558]: I0120 16:42:41.998797 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:41Z","lastTransitionTime":"2026-01-20T16:42:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:42 crc kubenswrapper[4558]: I0120 16:42:42.101198 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:42 crc kubenswrapper[4558]: I0120 16:42:42.101441 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:42 crc kubenswrapper[4558]: I0120 16:42:42.101503 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:42 crc kubenswrapper[4558]: I0120 16:42:42.101575 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:42 crc kubenswrapper[4558]: I0120 16:42:42.101652 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:42Z","lastTransitionTime":"2026-01-20T16:42:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:42 crc kubenswrapper[4558]: I0120 16:42:42.204546 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:42 crc kubenswrapper[4558]: I0120 16:42:42.204572 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:42 crc kubenswrapper[4558]: I0120 16:42:42.204580 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:42 crc kubenswrapper[4558]: I0120 16:42:42.204752 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:42 crc kubenswrapper[4558]: I0120 16:42:42.204763 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:42Z","lastTransitionTime":"2026-01-20T16:42:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:42 crc kubenswrapper[4558]: I0120 16:42:42.306519 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:42 crc kubenswrapper[4558]: I0120 16:42:42.306549 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:42 crc kubenswrapper[4558]: I0120 16:42:42.306557 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:42 crc kubenswrapper[4558]: I0120 16:42:42.306571 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:42 crc kubenswrapper[4558]: I0120 16:42:42.306580 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:42Z","lastTransitionTime":"2026-01-20T16:42:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:42 crc kubenswrapper[4558]: I0120 16:42:42.408369 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:42 crc kubenswrapper[4558]: I0120 16:42:42.408398 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:42 crc kubenswrapper[4558]: I0120 16:42:42.408406 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:42 crc kubenswrapper[4558]: I0120 16:42:42.408417 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:42 crc kubenswrapper[4558]: I0120 16:42:42.408427 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:42Z","lastTransitionTime":"2026-01-20T16:42:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:42 crc kubenswrapper[4558]: I0120 16:42:42.510829 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:42 crc kubenswrapper[4558]: I0120 16:42:42.510862 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:42 crc kubenswrapper[4558]: I0120 16:42:42.510871 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:42 crc kubenswrapper[4558]: I0120 16:42:42.510882 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:42 crc kubenswrapper[4558]: I0120 16:42:42.510891 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:42Z","lastTransitionTime":"2026-01-20T16:42:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:42 crc kubenswrapper[4558]: I0120 16:42:42.563783 4558 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-07 23:27:17.042937149 +0000 UTC Jan 20 16:42:42 crc kubenswrapper[4558]: I0120 16:42:42.565020 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 20 16:42:42 crc kubenswrapper[4558]: I0120 16:42:42.565044 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 20 16:42:42 crc kubenswrapper[4558]: E0120 16:42:42.565117 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 20 16:42:42 crc kubenswrapper[4558]: I0120 16:42:42.565030 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 20 16:42:42 crc kubenswrapper[4558]: E0120 16:42:42.565192 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 20 16:42:42 crc kubenswrapper[4558]: E0120 16:42:42.565257 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 20 16:42:42 crc kubenswrapper[4558]: I0120 16:42:42.612630 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:42 crc kubenswrapper[4558]: I0120 16:42:42.612661 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:42 crc kubenswrapper[4558]: I0120 16:42:42.612669 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:42 crc kubenswrapper[4558]: I0120 16:42:42.612680 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:42 crc kubenswrapper[4558]: I0120 16:42:42.612689 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:42Z","lastTransitionTime":"2026-01-20T16:42:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:42 crc kubenswrapper[4558]: I0120 16:42:42.715005 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:42 crc kubenswrapper[4558]: I0120 16:42:42.715032 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:42 crc kubenswrapper[4558]: I0120 16:42:42.715041 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:42 crc kubenswrapper[4558]: I0120 16:42:42.715052 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:42 crc kubenswrapper[4558]: I0120 16:42:42.715060 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:42Z","lastTransitionTime":"2026-01-20T16:42:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:42 crc kubenswrapper[4558]: I0120 16:42:42.817118 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:42 crc kubenswrapper[4558]: I0120 16:42:42.817154 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:42 crc kubenswrapper[4558]: I0120 16:42:42.817178 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:42 crc kubenswrapper[4558]: I0120 16:42:42.817194 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:42 crc kubenswrapper[4558]: I0120 16:42:42.817202 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:42Z","lastTransitionTime":"2026-01-20T16:42:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:42 crc kubenswrapper[4558]: I0120 16:42:42.918998 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:42 crc kubenswrapper[4558]: I0120 16:42:42.919026 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:42 crc kubenswrapper[4558]: I0120 16:42:42.919035 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:42 crc kubenswrapper[4558]: I0120 16:42:42.919047 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:42 crc kubenswrapper[4558]: I0120 16:42:42.919055 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:42Z","lastTransitionTime":"2026-01-20T16:42:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:43 crc kubenswrapper[4558]: I0120 16:42:43.020520 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:43 crc kubenswrapper[4558]: I0120 16:42:43.020558 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:43 crc kubenswrapper[4558]: I0120 16:42:43.020567 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:43 crc kubenswrapper[4558]: I0120 16:42:43.020581 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:43 crc kubenswrapper[4558]: I0120 16:42:43.020602 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:43Z","lastTransitionTime":"2026-01-20T16:42:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:43 crc kubenswrapper[4558]: I0120 16:42:43.122537 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:43 crc kubenswrapper[4558]: I0120 16:42:43.122575 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:43 crc kubenswrapper[4558]: I0120 16:42:43.122606 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:43 crc kubenswrapper[4558]: I0120 16:42:43.122621 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:43 crc kubenswrapper[4558]: I0120 16:42:43.122630 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:43Z","lastTransitionTime":"2026-01-20T16:42:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:43 crc kubenswrapper[4558]: I0120 16:42:43.224595 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:43 crc kubenswrapper[4558]: I0120 16:42:43.224631 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:43 crc kubenswrapper[4558]: I0120 16:42:43.224641 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:43 crc kubenswrapper[4558]: I0120 16:42:43.224653 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:43 crc kubenswrapper[4558]: I0120 16:42:43.224663 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:43Z","lastTransitionTime":"2026-01-20T16:42:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:43 crc kubenswrapper[4558]: I0120 16:42:43.327499 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:43 crc kubenswrapper[4558]: I0120 16:42:43.327533 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:43 crc kubenswrapper[4558]: I0120 16:42:43.327543 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:43 crc kubenswrapper[4558]: I0120 16:42:43.327556 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:43 crc kubenswrapper[4558]: I0120 16:42:43.327564 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:43Z","lastTransitionTime":"2026-01-20T16:42:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:43 crc kubenswrapper[4558]: I0120 16:42:43.429448 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:43 crc kubenswrapper[4558]: I0120 16:42:43.429484 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:43 crc kubenswrapper[4558]: I0120 16:42:43.429493 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:43 crc kubenswrapper[4558]: I0120 16:42:43.429505 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:43 crc kubenswrapper[4558]: I0120 16:42:43.429515 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:43Z","lastTransitionTime":"2026-01-20T16:42:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:43 crc kubenswrapper[4558]: I0120 16:42:43.531545 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:43 crc kubenswrapper[4558]: I0120 16:42:43.531580 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:43 crc kubenswrapper[4558]: I0120 16:42:43.531603 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:43 crc kubenswrapper[4558]: I0120 16:42:43.531614 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:43 crc kubenswrapper[4558]: I0120 16:42:43.531622 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:43Z","lastTransitionTime":"2026-01-20T16:42:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:43 crc kubenswrapper[4558]: I0120 16:42:43.564233 4558 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-07 09:51:23.812909686 +0000 UTC Jan 20 16:42:43 crc kubenswrapper[4558]: I0120 16:42:43.565490 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9wrq6" Jan 20 16:42:43 crc kubenswrapper[4558]: E0120 16:42:43.565596 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9wrq6" podUID="30032328-bd33-4073-9366-e10bc5e2aa77" Jan 20 16:42:43 crc kubenswrapper[4558]: I0120 16:42:43.566233 4558 scope.go:117] "RemoveContainer" containerID="3d28c2f9293ff09b5e7d4ca4df1633cb114f685ad9a9dc8697e9efe3e9603259" Jan 20 16:42:43 crc kubenswrapper[4558]: E0120 16:42:43.566420 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-nv2xw_openshift-ovn-kubernetes(254129cd-82fc-4162-b671-2434bc9e2972)\"" pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" podUID="254129cd-82fc-4162-b671-2434bc9e2972" Jan 20 16:42:43 crc kubenswrapper[4558]: I0120 16:42:43.633120 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:43 crc kubenswrapper[4558]: I0120 16:42:43.633148 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:43 crc kubenswrapper[4558]: I0120 16:42:43.633156 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:43 crc kubenswrapper[4558]: I0120 16:42:43.633186 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:43 crc kubenswrapper[4558]: I0120 16:42:43.633195 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:43Z","lastTransitionTime":"2026-01-20T16:42:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:43 crc kubenswrapper[4558]: I0120 16:42:43.735267 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:43 crc kubenswrapper[4558]: I0120 16:42:43.735299 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:43 crc kubenswrapper[4558]: I0120 16:42:43.735308 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:43 crc kubenswrapper[4558]: I0120 16:42:43.735318 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:43 crc kubenswrapper[4558]: I0120 16:42:43.735326 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:43Z","lastTransitionTime":"2026-01-20T16:42:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:43 crc kubenswrapper[4558]: I0120 16:42:43.837004 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:43 crc kubenswrapper[4558]: I0120 16:42:43.837041 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:43 crc kubenswrapper[4558]: I0120 16:42:43.837050 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:43 crc kubenswrapper[4558]: I0120 16:42:43.837063 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:43 crc kubenswrapper[4558]: I0120 16:42:43.837072 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:43Z","lastTransitionTime":"2026-01-20T16:42:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:43 crc kubenswrapper[4558]: I0120 16:42:43.938924 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:43 crc kubenswrapper[4558]: I0120 16:42:43.938953 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:43 crc kubenswrapper[4558]: I0120 16:42:43.938961 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:43 crc kubenswrapper[4558]: I0120 16:42:43.938973 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:43 crc kubenswrapper[4558]: I0120 16:42:43.938981 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:43Z","lastTransitionTime":"2026-01-20T16:42:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:44 crc kubenswrapper[4558]: I0120 16:42:44.040522 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:44 crc kubenswrapper[4558]: I0120 16:42:44.040557 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:44 crc kubenswrapper[4558]: I0120 16:42:44.040565 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:44 crc kubenswrapper[4558]: I0120 16:42:44.040577 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:44 crc kubenswrapper[4558]: I0120 16:42:44.040603 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:44Z","lastTransitionTime":"2026-01-20T16:42:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:44 crc kubenswrapper[4558]: I0120 16:42:44.142133 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:44 crc kubenswrapper[4558]: I0120 16:42:44.142181 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:44 crc kubenswrapper[4558]: I0120 16:42:44.142191 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:44 crc kubenswrapper[4558]: I0120 16:42:44.142202 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:44 crc kubenswrapper[4558]: I0120 16:42:44.142209 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:44Z","lastTransitionTime":"2026-01-20T16:42:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:44 crc kubenswrapper[4558]: I0120 16:42:44.243865 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:44 crc kubenswrapper[4558]: I0120 16:42:44.243889 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:44 crc kubenswrapper[4558]: I0120 16:42:44.243897 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:44 crc kubenswrapper[4558]: I0120 16:42:44.243907 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:44 crc kubenswrapper[4558]: I0120 16:42:44.243914 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:44Z","lastTransitionTime":"2026-01-20T16:42:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:44 crc kubenswrapper[4558]: I0120 16:42:44.345665 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:44 crc kubenswrapper[4558]: I0120 16:42:44.345692 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:44 crc kubenswrapper[4558]: I0120 16:42:44.345700 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:44 crc kubenswrapper[4558]: I0120 16:42:44.345711 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:44 crc kubenswrapper[4558]: I0120 16:42:44.345719 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:44Z","lastTransitionTime":"2026-01-20T16:42:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:44 crc kubenswrapper[4558]: I0120 16:42:44.447612 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:44 crc kubenswrapper[4558]: I0120 16:42:44.447642 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:44 crc kubenswrapper[4558]: I0120 16:42:44.447653 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:44 crc kubenswrapper[4558]: I0120 16:42:44.447665 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:44 crc kubenswrapper[4558]: I0120 16:42:44.447672 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:44Z","lastTransitionTime":"2026-01-20T16:42:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:44 crc kubenswrapper[4558]: I0120 16:42:44.549872 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:44 crc kubenswrapper[4558]: I0120 16:42:44.549916 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:44 crc kubenswrapper[4558]: I0120 16:42:44.549925 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:44 crc kubenswrapper[4558]: I0120 16:42:44.549939 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:44 crc kubenswrapper[4558]: I0120 16:42:44.549946 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:44Z","lastTransitionTime":"2026-01-20T16:42:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:44 crc kubenswrapper[4558]: I0120 16:42:44.565307 4558 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-09 16:23:40.048360867 +0000 UTC Jan 20 16:42:44 crc kubenswrapper[4558]: I0120 16:42:44.565383 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 20 16:42:44 crc kubenswrapper[4558]: I0120 16:42:44.565415 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 20 16:42:44 crc kubenswrapper[4558]: E0120 16:42:44.565884 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 20 16:42:44 crc kubenswrapper[4558]: E0120 16:42:44.566014 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 20 16:42:44 crc kubenswrapper[4558]: I0120 16:42:44.565524 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 20 16:42:44 crc kubenswrapper[4558]: E0120 16:42:44.566087 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 20 16:42:44 crc kubenswrapper[4558]: I0120 16:42:44.652184 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:44 crc kubenswrapper[4558]: I0120 16:42:44.652214 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:44 crc kubenswrapper[4558]: I0120 16:42:44.652222 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:44 crc kubenswrapper[4558]: I0120 16:42:44.652236 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:44 crc kubenswrapper[4558]: I0120 16:42:44.652245 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:44Z","lastTransitionTime":"2026-01-20T16:42:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:44 crc kubenswrapper[4558]: I0120 16:42:44.754208 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:44 crc kubenswrapper[4558]: I0120 16:42:44.754236 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:44 crc kubenswrapper[4558]: I0120 16:42:44.754244 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:44 crc kubenswrapper[4558]: I0120 16:42:44.754256 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:44 crc kubenswrapper[4558]: I0120 16:42:44.754264 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:44Z","lastTransitionTime":"2026-01-20T16:42:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:44 crc kubenswrapper[4558]: I0120 16:42:44.855941 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:44 crc kubenswrapper[4558]: I0120 16:42:44.856015 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:44 crc kubenswrapper[4558]: I0120 16:42:44.856024 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:44 crc kubenswrapper[4558]: I0120 16:42:44.856053 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:44 crc kubenswrapper[4558]: I0120 16:42:44.856063 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:44Z","lastTransitionTime":"2026-01-20T16:42:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:44 crc kubenswrapper[4558]: I0120 16:42:44.957753 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:44 crc kubenswrapper[4558]: I0120 16:42:44.957786 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:44 crc kubenswrapper[4558]: I0120 16:42:44.957793 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:44 crc kubenswrapper[4558]: I0120 16:42:44.957805 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:44 crc kubenswrapper[4558]: I0120 16:42:44.957814 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:44Z","lastTransitionTime":"2026-01-20T16:42:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:45 crc kubenswrapper[4558]: I0120 16:42:45.059472 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:45 crc kubenswrapper[4558]: I0120 16:42:45.059527 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:45 crc kubenswrapper[4558]: I0120 16:42:45.059536 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:45 crc kubenswrapper[4558]: I0120 16:42:45.059549 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:45 crc kubenswrapper[4558]: I0120 16:42:45.059558 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:45Z","lastTransitionTime":"2026-01-20T16:42:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:45 crc kubenswrapper[4558]: I0120 16:42:45.160938 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:45 crc kubenswrapper[4558]: I0120 16:42:45.160970 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:45 crc kubenswrapper[4558]: I0120 16:42:45.160979 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:45 crc kubenswrapper[4558]: I0120 16:42:45.160990 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:45 crc kubenswrapper[4558]: I0120 16:42:45.160998 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:45Z","lastTransitionTime":"2026-01-20T16:42:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:45 crc kubenswrapper[4558]: I0120 16:42:45.262901 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:45 crc kubenswrapper[4558]: I0120 16:42:45.262935 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:45 crc kubenswrapper[4558]: I0120 16:42:45.262944 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:45 crc kubenswrapper[4558]: I0120 16:42:45.262956 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:45 crc kubenswrapper[4558]: I0120 16:42:45.262963 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:45Z","lastTransitionTime":"2026-01-20T16:42:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:45 crc kubenswrapper[4558]: I0120 16:42:45.364360 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:45 crc kubenswrapper[4558]: I0120 16:42:45.364393 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:45 crc kubenswrapper[4558]: I0120 16:42:45.364401 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:45 crc kubenswrapper[4558]: I0120 16:42:45.364411 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:45 crc kubenswrapper[4558]: I0120 16:42:45.364419 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:45Z","lastTransitionTime":"2026-01-20T16:42:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:45 crc kubenswrapper[4558]: I0120 16:42:45.466385 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:45 crc kubenswrapper[4558]: I0120 16:42:45.466638 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:45 crc kubenswrapper[4558]: I0120 16:42:45.466732 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:45 crc kubenswrapper[4558]: I0120 16:42:45.466836 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:45 crc kubenswrapper[4558]: I0120 16:42:45.466901 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:45Z","lastTransitionTime":"2026-01-20T16:42:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:45 crc kubenswrapper[4558]: I0120 16:42:45.565067 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9wrq6" Jan 20 16:42:45 crc kubenswrapper[4558]: E0120 16:42:45.565207 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9wrq6" podUID="30032328-bd33-4073-9366-e10bc5e2aa77" Jan 20 16:42:45 crc kubenswrapper[4558]: I0120 16:42:45.566187 4558 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-24 19:41:27.917442054 +0000 UTC Jan 20 16:42:45 crc kubenswrapper[4558]: I0120 16:42:45.568646 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:45 crc kubenswrapper[4558]: I0120 16:42:45.568753 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:45 crc kubenswrapper[4558]: I0120 16:42:45.568815 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:45 crc kubenswrapper[4558]: I0120 16:42:45.568881 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:45 crc kubenswrapper[4558]: I0120 16:42:45.568958 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:45Z","lastTransitionTime":"2026-01-20T16:42:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:45 crc kubenswrapper[4558]: I0120 16:42:45.671719 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:45 crc kubenswrapper[4558]: I0120 16:42:45.671757 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:45 crc kubenswrapper[4558]: I0120 16:42:45.671766 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:45 crc kubenswrapper[4558]: I0120 16:42:45.671778 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:45 crc kubenswrapper[4558]: I0120 16:42:45.671788 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:45Z","lastTransitionTime":"2026-01-20T16:42:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:45 crc kubenswrapper[4558]: I0120 16:42:45.772947 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:45 crc kubenswrapper[4558]: I0120 16:42:45.772978 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:45 crc kubenswrapper[4558]: I0120 16:42:45.772987 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:45 crc kubenswrapper[4558]: I0120 16:42:45.772998 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:45 crc kubenswrapper[4558]: I0120 16:42:45.773005 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:45Z","lastTransitionTime":"2026-01-20T16:42:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:45 crc kubenswrapper[4558]: I0120 16:42:45.875211 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:45 crc kubenswrapper[4558]: I0120 16:42:45.875457 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:45 crc kubenswrapper[4558]: I0120 16:42:45.875544 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:45 crc kubenswrapper[4558]: I0120 16:42:45.875650 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:45 crc kubenswrapper[4558]: I0120 16:42:45.875735 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:45Z","lastTransitionTime":"2026-01-20T16:42:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:45 crc kubenswrapper[4558]: I0120 16:42:45.977906 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:45 crc kubenswrapper[4558]: I0120 16:42:45.977946 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:45 crc kubenswrapper[4558]: I0120 16:42:45.977957 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:45 crc kubenswrapper[4558]: I0120 16:42:45.977974 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:45 crc kubenswrapper[4558]: I0120 16:42:45.977985 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:45Z","lastTransitionTime":"2026-01-20T16:42:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:46 crc kubenswrapper[4558]: I0120 16:42:46.080070 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:46 crc kubenswrapper[4558]: I0120 16:42:46.080103 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:46 crc kubenswrapper[4558]: I0120 16:42:46.080113 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:46 crc kubenswrapper[4558]: I0120 16:42:46.080125 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:46 crc kubenswrapper[4558]: I0120 16:42:46.080133 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:46Z","lastTransitionTime":"2026-01-20T16:42:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:46 crc kubenswrapper[4558]: I0120 16:42:46.182138 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:46 crc kubenswrapper[4558]: I0120 16:42:46.182223 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:46 crc kubenswrapper[4558]: I0120 16:42:46.182232 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:46 crc kubenswrapper[4558]: I0120 16:42:46.182245 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:46 crc kubenswrapper[4558]: I0120 16:42:46.182256 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:46Z","lastTransitionTime":"2026-01-20T16:42:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:46 crc kubenswrapper[4558]: I0120 16:42:46.283822 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:46 crc kubenswrapper[4558]: I0120 16:42:46.283852 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:46 crc kubenswrapper[4558]: I0120 16:42:46.283860 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:46 crc kubenswrapper[4558]: I0120 16:42:46.283871 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:46 crc kubenswrapper[4558]: I0120 16:42:46.283880 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:46Z","lastTransitionTime":"2026-01-20T16:42:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:46 crc kubenswrapper[4558]: I0120 16:42:46.387902 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:46 crc kubenswrapper[4558]: I0120 16:42:46.387933 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:46 crc kubenswrapper[4558]: I0120 16:42:46.387943 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:46 crc kubenswrapper[4558]: I0120 16:42:46.387955 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:46 crc kubenswrapper[4558]: I0120 16:42:46.387962 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:46Z","lastTransitionTime":"2026-01-20T16:42:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:46 crc kubenswrapper[4558]: I0120 16:42:46.490009 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:46 crc kubenswrapper[4558]: I0120 16:42:46.490230 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:46 crc kubenswrapper[4558]: I0120 16:42:46.490304 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:46 crc kubenswrapper[4558]: I0120 16:42:46.490362 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:46 crc kubenswrapper[4558]: I0120 16:42:46.490385 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:46Z","lastTransitionTime":"2026-01-20T16:42:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:46 crc kubenswrapper[4558]: I0120 16:42:46.564891 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 20 16:42:46 crc kubenswrapper[4558]: I0120 16:42:46.564916 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 20 16:42:46 crc kubenswrapper[4558]: E0120 16:42:46.564989 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 20 16:42:46 crc kubenswrapper[4558]: E0120 16:42:46.565094 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 20 16:42:46 crc kubenswrapper[4558]: I0120 16:42:46.565113 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 20 16:42:46 crc kubenswrapper[4558]: E0120 16:42:46.565201 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 20 16:42:46 crc kubenswrapper[4558]: I0120 16:42:46.566393 4558 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-23 17:13:14.282211801 +0000 UTC Jan 20 16:42:46 crc kubenswrapper[4558]: I0120 16:42:46.575752 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:46Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:46 crc kubenswrapper[4558]: I0120 16:42:46.584612 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-d96bp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c8d2c8b-fb8e-4bff-b8d2-69570e5d9e57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe858db22acc96b062f1bc7941e05c823f30d1f9ba50bd497c57f38d57e04293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4v6jx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-d96bp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:46Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:46 crc kubenswrapper[4558]: I0120 16:42:46.592401 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:46 crc kubenswrapper[4558]: I0120 16:42:46.592431 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:46 crc kubenswrapper[4558]: I0120 16:42:46.592439 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:46 crc kubenswrapper[4558]: I0120 16:42:46.592451 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:46 crc kubenswrapper[4558]: I0120 16:42:46.592459 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:46Z","lastTransitionTime":"2026-01-20T16:42:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:46 crc kubenswrapper[4558]: I0120 16:42:46.599676 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"254129cd-82fc-4162-b671-2434bc9e2972\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a6a184067bde115ef5e09fdd90e7dccfe89a9dc347af450998b91c068e7e803\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd8b93e5559d95511a1285a9aec9a7d72df6c330bcd1e1daf3e93f5494f70eeb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4264ef8ee49a8b172fb05ac5c9b0bd32350b5d9ae95293079dfeaf44f021b455\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0889cf371f8b06cc2d255c15fda97db1d5000d6f6cff29d2fdcb8667cede8a99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4265d3281e954ee5989ee008da28c6cb9fa29478e5fdf23325521d455304da9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://186a1d2caad8865abf7f565405c4a4c077038ccb67ba77927cc2392ec075a811\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d28c2f9293ff09b5e7d4ca4df1633cb114f685ad9a9dc8697e9efe3e9603259\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d28c2f9293ff09b5e7d4ca4df1633cb114f685ad9a9dc8697e9efe3e9603259\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-20T16:42:30Z\\\",\\\"message\\\":\\\"6215 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI0120 16:42:30.148863 6215 obj_retry.go:365] Adding new object: *v1.Pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI0120 16:42:30.148994 6215 ovn.go:134] Ensuring zone local for Pod openshift-kube-apiserver/kube-apiserver-crc in node crc\\\\nI0120 16:42:30.149000 6215 obj_retry.go:386] Retry successful for *v1.Pod openshift-kube-apiserver/kube-apiserver-crc after 0 failed attempt(s)\\\\nI0120 16:42:30.149004 6215 default_network_controller.go:776] Recording success event on pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI0120 16:42:30.148969 6215 services_controller.go:445] Built service openshift-kube-storage-version-migrator-operator/metrics LB template configs for network=default: []services.lbConfig(nil)\\\\nI0120 16:42:30.149028 6215 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-operator/iptables-alerter-4ln5h\\\\nI0120 16:42:30.149009 6215 base_network_controller_pods.go:916] Annotation values: ip=[10.217.0.3/23] ; mac=0a:58:0a:d9:00:03 ; gw=[10.217.0.1]\\\\nF0120 16:42:30.149066 6215 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller ini\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:29Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-nv2xw_openshift-ovn-kubernetes(254129cd-82fc-4162-b671-2434bc9e2972)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f41d488b0c24e594a2f5cad36b5f8efdb7d867fae0e18a74fe3c396a203d162f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aaa12ad387fc5d0780f6602a74013bb70c118f35c3a16eb4d136df7b0c48db9e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aaa12ad387fc5d0780f6602a74013bb70c118f35c3a16eb4d136df7b0c48db9e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-nv2xw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:46Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:46 crc kubenswrapper[4558]: I0120 16:42:46.610978 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f5t7h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0f90cb04-2e7a-4ee8-83fc-d6c0ee1702a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21a318f03d0df490cb7bae11819c52eca011d163081aeecff1b2dd8f72caabbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fd8a8162080a700ce21ff3b2cf7c28fabfc682758eb69c483909d4008b07daab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fd8a8162080a700ce21ff3b2cf7c28fabfc682758eb69c483909d4008b07daab\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e77617c69434ce20c448b525c7e86f1ece18d51ce6e14167b4ca7e99de8e5709\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e77617c69434ce20c448b525c7e86f1ece18d51ce6e14167b4ca7e99de8e5709\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://453c67434281e0261ec6799b30a548a01efd72d7df96632409e745b2e6a94546\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://453c67434281e0261ec6799b30a548a01efd72d7df96632409e745b2e6a94546\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://06eab4daa6c8b6a564cf1bb1403b7b3b259a234ee09f23b4f4bc0628c2d58e69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06eab4daa6c8b6a564cf1bb1403b7b3b259a234ee09f23b4f4bc0628c2d58e69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1291ac09fc6ae2a79fdeaa0fc47272f61b97f45f67ed3f7c90bdfdbfe3519ff2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1291ac09fc6ae2a79fdeaa0fc47272f61b97f45f67ed3f7c90bdfdbfe3519ff2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe484514f50a14e485d4a97ea3ce9401c77f3f4dae4717719be7ed6c4d7b1a59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe484514f50a14e485d4a97ea3ce9401c77f3f4dae4717719be7ed6c4d7b1a59\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f5t7h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:46Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:46 crc kubenswrapper[4558]: I0120 16:42:46.625913 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-9wrq6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30032328-bd33-4073-9366-e10bc5e2aa77\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6tz4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6tz4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:17Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-9wrq6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:46Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:46 crc kubenswrapper[4558]: I0120 16:42:46.636063 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a13bfefa-f525-4ecd-89f5-f10480532bf6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9030e970e96b6909d771d4fa0c3b4f493b4a825cbc43ef7e684272284329c7c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://168e8f533aaa108e4160f9ae4af6bb7944edba7a596178ffffdc5325073fc3c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e0141aef1babb57501016cb09787591b5e6d9136b8d7dd4ba64393f0be5a027\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c87f08993c2b47c63ba9652c7dafe0fdbecdcb03a7b7080664ab1f1bd0e1d602\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:46Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:46 crc kubenswrapper[4558]: I0120 16:42:46.645085 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f904dd3e4b43e51569b6532f00fa2f9edf912c1f1969ee2ea00c99d1b7e35a41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://789c877f4a456b3ce64bdbe6fefd50f1ef3f92bf9e26e6296005cf366e0ce265\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:46Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:46 crc kubenswrapper[4558]: I0120 16:42:46.653857 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3e45cc56934a7b90e9d47f1c6d06d8dc140d57db4a216469e8ae112f398663e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:46Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:46 crc kubenswrapper[4558]: I0120 16:42:46.662808 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jsqvf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bedf08c7-1f93-4931-a7f3-e729e2a137af\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e32b76495d88d356373f9389e163a2f3a72e202bb454c7015594c1f5a41b511\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xxtbc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jsqvf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:46Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:46 crc kubenswrapper[4558]: I0120 16:42:46.669547 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-47477" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e026445a-aa73-4a0e-ba35-e2e07de1278c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec2ee3ff161cba7e14ad26763081c9f092f0dfe969cef113631c27e0db53d157\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xcnwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-47477\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:46Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:46 crc kubenswrapper[4558]: I0120 16:42:46.682486 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"425eab90-fb70-4498-a98b-b95742033890\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48592e72d91cf527296f06e52fec1c16c1da21323c0644659945af109a74ebb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7e18f59b5e6b6c0be141b7efb3ccb1df4ff6e3c394bcc88fb1142c8df433493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c889e2b0e14244ef49ec057e3d5fd91687e335a983fdc99a6816abd2af643ddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d434793571cbba089cc34029d951bee9063aca58728b791f2d1af2d68229f778\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://89f7ad577df11c509855a90ac8791da6de3f0d2857a1ca56726a6327f40455bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://baab628ee8aadc67ea829cc46639be55ae63281a9ec9a89737260f558a9ddf75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://baab628ee8aadc67ea829cc46639be55ae63281a9ec9a89737260f558a9ddf75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5faab5625a16e1459b23ae0f4cc8ca3285bf4eb1ba4c53c1ed15eddb9054021\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5faab5625a16e1459b23ae0f4cc8ca3285bf4eb1ba4c53c1ed15eddb9054021\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f913fa07ec9571d1c61f9b7dddb46614846aa99d6e0b5b75709e9fa5a18a09c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f913fa07ec9571d1c61f9b7dddb46614846aa99d6e0b5b75709e9fa5a18a09c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:46Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:46 crc kubenswrapper[4558]: I0120 16:42:46.691456 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30e1ff9d-8dcd-4754-9a7a-c09598fb83db\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ea68e442bb2143fbaa5e2c9fe335c6b023a31c2eeaa78d60de45c6ef40c2894\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a8122c3b57ac4f0026df149fe622450d07d8b0d517ad018decd7b21d8d9c04a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7444e8fea1853bae45dbd3d52d07e3f7b94314cd29fe8c99891e515a892e569e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6438ffea42f98178c4c2c21ae01725a8b54d6a6f790f47d64faf5ecd9f1c00f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2e22f1d4e87dd06a67f49aedad280da48ac1c4ba2e438a4925cf8bd5330c4d8\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0120 16:41:58.859734 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0120 16:41:58.861094 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2179835226/tls.crt::/tmp/serving-cert-2179835226/tls.key\\\\\\\"\\\\nI0120 16:42:04.205247 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0120 16:42:04.207291 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0120 16:42:04.207308 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0120 16:42:04.207329 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0120 16:42:04.207333 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0120 16:42:04.210615 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0120 16:42:04.210632 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0120 16:42:04.210641 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0120 16:42:04.210648 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0120 16:42:04.210652 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0120 16:42:04.210655 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0120 16:42:04.210658 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0120 16:42:04.210660 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0120 16:42:04.211964 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4850bde1fb538e21fc91949dc584ef0a791d718844691a44559e0513bb36203\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c39aa6c3942134a027be33cac715f9c0381bce5417754d5bad045c42a07d353f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c39aa6c3942134a027be33cac715f9c0381bce5417754d5bad045c42a07d353f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:46Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:46 crc kubenswrapper[4558]: I0120 16:42:46.693919 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:46 crc kubenswrapper[4558]: I0120 16:42:46.693946 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:46 crc kubenswrapper[4558]: I0120 16:42:46.693954 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:46 crc kubenswrapper[4558]: I0120 16:42:46.693967 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:46 crc kubenswrapper[4558]: I0120 16:42:46.693975 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:46Z","lastTransitionTime":"2026-01-20T16:42:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:46 crc kubenswrapper[4558]: I0120 16:42:46.699128 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"68337d27-3fa6-4a29-88b0-82e60c3739eb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de7f7c23993a57d30f08686e1cd4abb5b17e108ac12dd0c7929a31574c69eeb1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4b478\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b18a59ecb802507e0d5988eacc915fd7e0d6954518de80f61162c97f680fd8c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4b478\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2vr4r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:46Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:46 crc kubenswrapper[4558]: I0120 16:42:46.707236 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60aafae7-2e6e-4d21-9ce6-8ff5382ef642\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d16b973c217656d6a8ceaf684e9dec82daeb583209943f6b2979be47ab03fda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb2e7753f75b10088eea9204511ae78e4be9a2b5be8c17ed04c3c972b13f8190\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a426b97002a49938b18009db49867dc9a15102707b42ec46bc64093c0ed3c2c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e4df1c3b2a1aef9adb3da64f89a1b8c0904fd22882609dbafe2fbc5718b8016\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6e4df1c3b2a1aef9adb3da64f89a1b8c0904fd22882609dbafe2fbc5718b8016\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:46Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:46 crc kubenswrapper[4558]: I0120 16:42:46.719339 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e190c24c1d8a3bc0241334598ce23829ada9afee282d49a0c82cd67ad82bcd02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:46Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:46 crc kubenswrapper[4558]: I0120 16:42:46.728519 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:46Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:46 crc kubenswrapper[4558]: I0120 16:42:46.737013 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:46Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:46 crc kubenswrapper[4558]: I0120 16:42:46.745149 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rfwnl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8fe5472-7373-4e5d-87fc-a9ecaf26a5cd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://38cd58ecc67d337df2c29b79e5460ea23d4acc1e66de8b08ea70d7ab7b30691d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9nvl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a14e9281ff19e324499722aefadf1e1f2fd005089c153918ad4d8a7f5bdfa853\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9nvl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-rfwnl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:46Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:46 crc kubenswrapper[4558]: I0120 16:42:46.796059 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:46 crc kubenswrapper[4558]: I0120 16:42:46.796092 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:46 crc kubenswrapper[4558]: I0120 16:42:46.796100 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:46 crc kubenswrapper[4558]: I0120 16:42:46.796111 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:46 crc kubenswrapper[4558]: I0120 16:42:46.796121 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:46Z","lastTransitionTime":"2026-01-20T16:42:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:46 crc kubenswrapper[4558]: I0120 16:42:46.897846 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:46 crc kubenswrapper[4558]: I0120 16:42:46.897948 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:46 crc kubenswrapper[4558]: I0120 16:42:46.897957 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:46 crc kubenswrapper[4558]: I0120 16:42:46.897970 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:46 crc kubenswrapper[4558]: I0120 16:42:46.897978 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:46Z","lastTransitionTime":"2026-01-20T16:42:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:47 crc kubenswrapper[4558]: I0120 16:42:47.000239 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:47 crc kubenswrapper[4558]: I0120 16:42:47.000289 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:47 crc kubenswrapper[4558]: I0120 16:42:47.000299 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:47 crc kubenswrapper[4558]: I0120 16:42:47.000312 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:47 crc kubenswrapper[4558]: I0120 16:42:47.000320 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:47Z","lastTransitionTime":"2026-01-20T16:42:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:47 crc kubenswrapper[4558]: I0120 16:42:47.102415 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:47 crc kubenswrapper[4558]: I0120 16:42:47.102443 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:47 crc kubenswrapper[4558]: I0120 16:42:47.102451 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:47 crc kubenswrapper[4558]: I0120 16:42:47.102462 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:47 crc kubenswrapper[4558]: I0120 16:42:47.102471 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:47Z","lastTransitionTime":"2026-01-20T16:42:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:47 crc kubenswrapper[4558]: I0120 16:42:47.204683 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:47 crc kubenswrapper[4558]: I0120 16:42:47.204717 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:47 crc kubenswrapper[4558]: I0120 16:42:47.204725 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:47 crc kubenswrapper[4558]: I0120 16:42:47.204736 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:47 crc kubenswrapper[4558]: I0120 16:42:47.204743 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:47Z","lastTransitionTime":"2026-01-20T16:42:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:47 crc kubenswrapper[4558]: I0120 16:42:47.306869 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:47 crc kubenswrapper[4558]: I0120 16:42:47.306904 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:47 crc kubenswrapper[4558]: I0120 16:42:47.306913 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:47 crc kubenswrapper[4558]: I0120 16:42:47.306925 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:47 crc kubenswrapper[4558]: I0120 16:42:47.306934 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:47Z","lastTransitionTime":"2026-01-20T16:42:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:47 crc kubenswrapper[4558]: I0120 16:42:47.409794 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:47 crc kubenswrapper[4558]: I0120 16:42:47.409827 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:47 crc kubenswrapper[4558]: I0120 16:42:47.409834 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:47 crc kubenswrapper[4558]: I0120 16:42:47.409846 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:47 crc kubenswrapper[4558]: I0120 16:42:47.409855 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:47Z","lastTransitionTime":"2026-01-20T16:42:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:47 crc kubenswrapper[4558]: I0120 16:42:47.512059 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:47 crc kubenswrapper[4558]: I0120 16:42:47.512091 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:47 crc kubenswrapper[4558]: I0120 16:42:47.512099 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:47 crc kubenswrapper[4558]: I0120 16:42:47.512110 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:47 crc kubenswrapper[4558]: I0120 16:42:47.512120 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:47Z","lastTransitionTime":"2026-01-20T16:42:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:47 crc kubenswrapper[4558]: I0120 16:42:47.565904 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9wrq6" Jan 20 16:42:47 crc kubenswrapper[4558]: E0120 16:42:47.566012 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9wrq6" podUID="30032328-bd33-4073-9366-e10bc5e2aa77" Jan 20 16:42:47 crc kubenswrapper[4558]: I0120 16:42:47.566910 4558 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-20 08:50:42.567544322 +0000 UTC Jan 20 16:42:47 crc kubenswrapper[4558]: I0120 16:42:47.614009 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:47 crc kubenswrapper[4558]: I0120 16:42:47.614056 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:47 crc kubenswrapper[4558]: I0120 16:42:47.614066 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:47 crc kubenswrapper[4558]: I0120 16:42:47.614078 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:47 crc kubenswrapper[4558]: I0120 16:42:47.614086 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:47Z","lastTransitionTime":"2026-01-20T16:42:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:47 crc kubenswrapper[4558]: I0120 16:42:47.715654 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:47 crc kubenswrapper[4558]: I0120 16:42:47.715691 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:47 crc kubenswrapper[4558]: I0120 16:42:47.715716 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:47 crc kubenswrapper[4558]: I0120 16:42:47.715731 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:47 crc kubenswrapper[4558]: I0120 16:42:47.715738 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:47Z","lastTransitionTime":"2026-01-20T16:42:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:47 crc kubenswrapper[4558]: I0120 16:42:47.817300 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:47 crc kubenswrapper[4558]: I0120 16:42:47.817342 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:47 crc kubenswrapper[4558]: I0120 16:42:47.817351 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:47 crc kubenswrapper[4558]: I0120 16:42:47.817364 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:47 crc kubenswrapper[4558]: I0120 16:42:47.817372 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:47Z","lastTransitionTime":"2026-01-20T16:42:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:47 crc kubenswrapper[4558]: I0120 16:42:47.919336 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:47 crc kubenswrapper[4558]: I0120 16:42:47.919370 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:47 crc kubenswrapper[4558]: I0120 16:42:47.919384 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:47 crc kubenswrapper[4558]: I0120 16:42:47.919397 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:47 crc kubenswrapper[4558]: I0120 16:42:47.919406 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:47Z","lastTransitionTime":"2026-01-20T16:42:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:48 crc kubenswrapper[4558]: I0120 16:42:48.021213 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:48 crc kubenswrapper[4558]: I0120 16:42:48.021249 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:48 crc kubenswrapper[4558]: I0120 16:42:48.021257 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:48 crc kubenswrapper[4558]: I0120 16:42:48.021270 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:48 crc kubenswrapper[4558]: I0120 16:42:48.021279 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:48Z","lastTransitionTime":"2026-01-20T16:42:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:48 crc kubenswrapper[4558]: I0120 16:42:48.122690 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:48 crc kubenswrapper[4558]: I0120 16:42:48.122725 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:48 crc kubenswrapper[4558]: I0120 16:42:48.122733 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:48 crc kubenswrapper[4558]: I0120 16:42:48.122745 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:48 crc kubenswrapper[4558]: I0120 16:42:48.122753 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:48Z","lastTransitionTime":"2026-01-20T16:42:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:48 crc kubenswrapper[4558]: I0120 16:42:48.224822 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:48 crc kubenswrapper[4558]: I0120 16:42:48.224866 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:48 crc kubenswrapper[4558]: I0120 16:42:48.224877 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:48 crc kubenswrapper[4558]: I0120 16:42:48.224894 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:48 crc kubenswrapper[4558]: I0120 16:42:48.224904 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:48Z","lastTransitionTime":"2026-01-20T16:42:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:48 crc kubenswrapper[4558]: I0120 16:42:48.327476 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:48 crc kubenswrapper[4558]: I0120 16:42:48.327733 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:48 crc kubenswrapper[4558]: I0120 16:42:48.327808 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:48 crc kubenswrapper[4558]: I0120 16:42:48.327878 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:48 crc kubenswrapper[4558]: I0120 16:42:48.327943 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:48Z","lastTransitionTime":"2026-01-20T16:42:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:48 crc kubenswrapper[4558]: I0120 16:42:48.350904 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:48 crc kubenswrapper[4558]: I0120 16:42:48.350957 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:48 crc kubenswrapper[4558]: I0120 16:42:48.350965 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:48 crc kubenswrapper[4558]: I0120 16:42:48.350976 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:48 crc kubenswrapper[4558]: I0120 16:42:48.350984 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:48Z","lastTransitionTime":"2026-01-20T16:42:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:48 crc kubenswrapper[4558]: E0120 16:42:48.359629 4558 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:42:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:42:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:48Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:42:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:42:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:48Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"bf027402-7a62-478b-8585-220c98495823\\\",\\\"systemUUID\\\":\\\"1def282c-e24e-4bc0-9a1b-d7cc30756d3d\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:48Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:48 crc kubenswrapper[4558]: I0120 16:42:48.361863 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:48 crc kubenswrapper[4558]: I0120 16:42:48.361906 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:48 crc kubenswrapper[4558]: I0120 16:42:48.361915 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:48 crc kubenswrapper[4558]: I0120 16:42:48.361925 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:48 crc kubenswrapper[4558]: I0120 16:42:48.361933 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:48Z","lastTransitionTime":"2026-01-20T16:42:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:48 crc kubenswrapper[4558]: E0120 16:42:48.370155 4558 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:42:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:42:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:48Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:42:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:42:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:48Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"bf027402-7a62-478b-8585-220c98495823\\\",\\\"systemUUID\\\":\\\"1def282c-e24e-4bc0-9a1b-d7cc30756d3d\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:48Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:48 crc kubenswrapper[4558]: I0120 16:42:48.378302 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:48 crc kubenswrapper[4558]: I0120 16:42:48.378365 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:48 crc kubenswrapper[4558]: I0120 16:42:48.378375 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:48 crc kubenswrapper[4558]: I0120 16:42:48.378389 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:48 crc kubenswrapper[4558]: I0120 16:42:48.378398 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:48Z","lastTransitionTime":"2026-01-20T16:42:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:48 crc kubenswrapper[4558]: E0120 16:42:48.386543 4558 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:42:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:42:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:48Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:42:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:42:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:48Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"bf027402-7a62-478b-8585-220c98495823\\\",\\\"systemUUID\\\":\\\"1def282c-e24e-4bc0-9a1b-d7cc30756d3d\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:48Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:48 crc kubenswrapper[4558]: I0120 16:42:48.389849 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:48 crc kubenswrapper[4558]: I0120 16:42:48.389886 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:48 crc kubenswrapper[4558]: I0120 16:42:48.389897 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:48 crc kubenswrapper[4558]: I0120 16:42:48.389927 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:48 crc kubenswrapper[4558]: I0120 16:42:48.389938 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:48Z","lastTransitionTime":"2026-01-20T16:42:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:48 crc kubenswrapper[4558]: E0120 16:42:48.398119 4558 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:42:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:42:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:48Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:42:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:42:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:48Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"bf027402-7a62-478b-8585-220c98495823\\\",\\\"systemUUID\\\":\\\"1def282c-e24e-4bc0-9a1b-d7cc30756d3d\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:48Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:48 crc kubenswrapper[4558]: I0120 16:42:48.400706 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:48 crc kubenswrapper[4558]: I0120 16:42:48.400773 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:48 crc kubenswrapper[4558]: I0120 16:42:48.400787 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:48 crc kubenswrapper[4558]: I0120 16:42:48.400813 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:48 crc kubenswrapper[4558]: I0120 16:42:48.400825 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:48Z","lastTransitionTime":"2026-01-20T16:42:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:48 crc kubenswrapper[4558]: E0120 16:42:48.409729 4558 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:42:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:42:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:48Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:42:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:42:48Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:48Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"bf027402-7a62-478b-8585-220c98495823\\\",\\\"systemUUID\\\":\\\"1def282c-e24e-4bc0-9a1b-d7cc30756d3d\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:48Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:48 crc kubenswrapper[4558]: E0120 16:42:48.409852 4558 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 20 16:42:48 crc kubenswrapper[4558]: I0120 16:42:48.430375 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:48 crc kubenswrapper[4558]: I0120 16:42:48.430432 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:48 crc kubenswrapper[4558]: I0120 16:42:48.430445 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:48 crc kubenswrapper[4558]: I0120 16:42:48.430467 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:48 crc kubenswrapper[4558]: I0120 16:42:48.430481 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:48Z","lastTransitionTime":"2026-01-20T16:42:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:48 crc kubenswrapper[4558]: I0120 16:42:48.534180 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:48 crc kubenswrapper[4558]: I0120 16:42:48.534211 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:48 crc kubenswrapper[4558]: I0120 16:42:48.534220 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:48 crc kubenswrapper[4558]: I0120 16:42:48.534232 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:48 crc kubenswrapper[4558]: I0120 16:42:48.534240 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:48Z","lastTransitionTime":"2026-01-20T16:42:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:48 crc kubenswrapper[4558]: I0120 16:42:48.566199 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 20 16:42:48 crc kubenswrapper[4558]: I0120 16:42:48.566209 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 20 16:42:48 crc kubenswrapper[4558]: I0120 16:42:48.566264 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 20 16:42:48 crc kubenswrapper[4558]: E0120 16:42:48.566289 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 20 16:42:48 crc kubenswrapper[4558]: E0120 16:42:48.566357 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 20 16:42:48 crc kubenswrapper[4558]: E0120 16:42:48.566405 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 20 16:42:48 crc kubenswrapper[4558]: I0120 16:42:48.567713 4558 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-18 09:24:58.989777157 +0000 UTC Jan 20 16:42:48 crc kubenswrapper[4558]: I0120 16:42:48.636280 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:48 crc kubenswrapper[4558]: I0120 16:42:48.636315 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:48 crc kubenswrapper[4558]: I0120 16:42:48.636328 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:48 crc kubenswrapper[4558]: I0120 16:42:48.636342 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:48 crc kubenswrapper[4558]: I0120 16:42:48.636352 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:48Z","lastTransitionTime":"2026-01-20T16:42:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:48 crc kubenswrapper[4558]: I0120 16:42:48.738729 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:48 crc kubenswrapper[4558]: I0120 16:42:48.738782 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:48 crc kubenswrapper[4558]: I0120 16:42:48.738791 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:48 crc kubenswrapper[4558]: I0120 16:42:48.738802 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:48 crc kubenswrapper[4558]: I0120 16:42:48.738810 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:48Z","lastTransitionTime":"2026-01-20T16:42:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:48 crc kubenswrapper[4558]: I0120 16:42:48.840025 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:48 crc kubenswrapper[4558]: I0120 16:42:48.840055 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:48 crc kubenswrapper[4558]: I0120 16:42:48.840064 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:48 crc kubenswrapper[4558]: I0120 16:42:48.840076 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:48 crc kubenswrapper[4558]: I0120 16:42:48.840085 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:48Z","lastTransitionTime":"2026-01-20T16:42:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:48 crc kubenswrapper[4558]: I0120 16:42:48.942915 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:48 crc kubenswrapper[4558]: I0120 16:42:48.943079 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:48 crc kubenswrapper[4558]: I0120 16:42:48.943180 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:48 crc kubenswrapper[4558]: I0120 16:42:48.943200 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:48 crc kubenswrapper[4558]: I0120 16:42:48.943209 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:48Z","lastTransitionTime":"2026-01-20T16:42:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:49 crc kubenswrapper[4558]: I0120 16:42:49.045287 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:49 crc kubenswrapper[4558]: I0120 16:42:49.045321 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:49 crc kubenswrapper[4558]: I0120 16:42:49.045329 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:49 crc kubenswrapper[4558]: I0120 16:42:49.045342 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:49 crc kubenswrapper[4558]: I0120 16:42:49.045352 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:49Z","lastTransitionTime":"2026-01-20T16:42:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:49 crc kubenswrapper[4558]: I0120 16:42:49.146968 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:49 crc kubenswrapper[4558]: I0120 16:42:49.147014 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:49 crc kubenswrapper[4558]: I0120 16:42:49.147027 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:49 crc kubenswrapper[4558]: I0120 16:42:49.147041 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:49 crc kubenswrapper[4558]: I0120 16:42:49.147051 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:49Z","lastTransitionTime":"2026-01-20T16:42:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:49 crc kubenswrapper[4558]: I0120 16:42:49.248972 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:49 crc kubenswrapper[4558]: I0120 16:42:49.249007 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:49 crc kubenswrapper[4558]: I0120 16:42:49.249015 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:49 crc kubenswrapper[4558]: I0120 16:42:49.249028 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:49 crc kubenswrapper[4558]: I0120 16:42:49.249037 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:49Z","lastTransitionTime":"2026-01-20T16:42:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:49 crc kubenswrapper[4558]: I0120 16:42:49.350708 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:49 crc kubenswrapper[4558]: I0120 16:42:49.350756 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:49 crc kubenswrapper[4558]: I0120 16:42:49.350764 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:49 crc kubenswrapper[4558]: I0120 16:42:49.350778 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:49 crc kubenswrapper[4558]: I0120 16:42:49.350788 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:49Z","lastTransitionTime":"2026-01-20T16:42:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:49 crc kubenswrapper[4558]: I0120 16:42:49.452707 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:49 crc kubenswrapper[4558]: I0120 16:42:49.452743 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:49 crc kubenswrapper[4558]: I0120 16:42:49.452751 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:49 crc kubenswrapper[4558]: I0120 16:42:49.452765 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:49 crc kubenswrapper[4558]: I0120 16:42:49.452775 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:49Z","lastTransitionTime":"2026-01-20T16:42:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:49 crc kubenswrapper[4558]: I0120 16:42:49.555100 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:49 crc kubenswrapper[4558]: I0120 16:42:49.555134 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:49 crc kubenswrapper[4558]: I0120 16:42:49.555142 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:49 crc kubenswrapper[4558]: I0120 16:42:49.555154 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:49 crc kubenswrapper[4558]: I0120 16:42:49.555187 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:49Z","lastTransitionTime":"2026-01-20T16:42:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:49 crc kubenswrapper[4558]: I0120 16:42:49.565474 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9wrq6" Jan 20 16:42:49 crc kubenswrapper[4558]: E0120 16:42:49.565572 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9wrq6" podUID="30032328-bd33-4073-9366-e10bc5e2aa77" Jan 20 16:42:49 crc kubenswrapper[4558]: I0120 16:42:49.568582 4558 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-16 00:32:17.915323182 +0000 UTC Jan 20 16:42:49 crc kubenswrapper[4558]: I0120 16:42:49.657613 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:49 crc kubenswrapper[4558]: I0120 16:42:49.657818 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:49 crc kubenswrapper[4558]: I0120 16:42:49.657887 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:49 crc kubenswrapper[4558]: I0120 16:42:49.657955 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:49 crc kubenswrapper[4558]: I0120 16:42:49.658008 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:49Z","lastTransitionTime":"2026-01-20T16:42:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:49 crc kubenswrapper[4558]: I0120 16:42:49.760452 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:49 crc kubenswrapper[4558]: I0120 16:42:49.760713 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:49 crc kubenswrapper[4558]: I0120 16:42:49.760787 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:49 crc kubenswrapper[4558]: I0120 16:42:49.760860 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:49 crc kubenswrapper[4558]: I0120 16:42:49.760928 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:49Z","lastTransitionTime":"2026-01-20T16:42:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:49 crc kubenswrapper[4558]: I0120 16:42:49.787920 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/30032328-bd33-4073-9366-e10bc5e2aa77-metrics-certs\") pod \"network-metrics-daemon-9wrq6\" (UID: \"30032328-bd33-4073-9366-e10bc5e2aa77\") " pod="openshift-multus/network-metrics-daemon-9wrq6" Jan 20 16:42:49 crc kubenswrapper[4558]: E0120 16:42:49.788048 4558 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 20 16:42:49 crc kubenswrapper[4558]: E0120 16:42:49.788094 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/30032328-bd33-4073-9366-e10bc5e2aa77-metrics-certs podName:30032328-bd33-4073-9366-e10bc5e2aa77 nodeName:}" failed. No retries permitted until 2026-01-20 16:43:21.788081999 +0000 UTC m=+95.548419957 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/30032328-bd33-4073-9366-e10bc5e2aa77-metrics-certs") pod "network-metrics-daemon-9wrq6" (UID: "30032328-bd33-4073-9366-e10bc5e2aa77") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 20 16:42:49 crc kubenswrapper[4558]: I0120 16:42:49.862682 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:49 crc kubenswrapper[4558]: I0120 16:42:49.862712 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:49 crc kubenswrapper[4558]: I0120 16:42:49.862720 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:49 crc kubenswrapper[4558]: I0120 16:42:49.862732 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:49 crc kubenswrapper[4558]: I0120 16:42:49.862740 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:49Z","lastTransitionTime":"2026-01-20T16:42:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:49 crc kubenswrapper[4558]: I0120 16:42:49.964867 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:49 crc kubenswrapper[4558]: I0120 16:42:49.965059 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:49 crc kubenswrapper[4558]: I0120 16:42:49.965130 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:49 crc kubenswrapper[4558]: I0120 16:42:49.965213 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:49 crc kubenswrapper[4558]: I0120 16:42:49.965298 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:49Z","lastTransitionTime":"2026-01-20T16:42:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:50 crc kubenswrapper[4558]: I0120 16:42:50.066992 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:50 crc kubenswrapper[4558]: I0120 16:42:50.067031 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:50 crc kubenswrapper[4558]: I0120 16:42:50.067040 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:50 crc kubenswrapper[4558]: I0120 16:42:50.067053 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:50 crc kubenswrapper[4558]: I0120 16:42:50.067062 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:50Z","lastTransitionTime":"2026-01-20T16:42:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:50 crc kubenswrapper[4558]: I0120 16:42:50.168694 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:50 crc kubenswrapper[4558]: I0120 16:42:50.168729 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:50 crc kubenswrapper[4558]: I0120 16:42:50.168739 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:50 crc kubenswrapper[4558]: I0120 16:42:50.168751 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:50 crc kubenswrapper[4558]: I0120 16:42:50.168775 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:50Z","lastTransitionTime":"2026-01-20T16:42:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:50 crc kubenswrapper[4558]: I0120 16:42:50.270995 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:50 crc kubenswrapper[4558]: I0120 16:42:50.271028 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:50 crc kubenswrapper[4558]: I0120 16:42:50.271036 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:50 crc kubenswrapper[4558]: I0120 16:42:50.271048 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:50 crc kubenswrapper[4558]: I0120 16:42:50.271056 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:50Z","lastTransitionTime":"2026-01-20T16:42:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:50 crc kubenswrapper[4558]: I0120 16:42:50.372830 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:50 crc kubenswrapper[4558]: I0120 16:42:50.372866 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:50 crc kubenswrapper[4558]: I0120 16:42:50.372875 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:50 crc kubenswrapper[4558]: I0120 16:42:50.372890 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:50 crc kubenswrapper[4558]: I0120 16:42:50.372899 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:50Z","lastTransitionTime":"2026-01-20T16:42:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:50 crc kubenswrapper[4558]: I0120 16:42:50.475138 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:50 crc kubenswrapper[4558]: I0120 16:42:50.475195 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:50 crc kubenswrapper[4558]: I0120 16:42:50.475210 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:50 crc kubenswrapper[4558]: I0120 16:42:50.475225 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:50 crc kubenswrapper[4558]: I0120 16:42:50.475234 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:50Z","lastTransitionTime":"2026-01-20T16:42:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:50 crc kubenswrapper[4558]: I0120 16:42:50.565530 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 20 16:42:50 crc kubenswrapper[4558]: I0120 16:42:50.565605 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 20 16:42:50 crc kubenswrapper[4558]: I0120 16:42:50.565655 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 20 16:42:50 crc kubenswrapper[4558]: E0120 16:42:50.565743 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 20 16:42:50 crc kubenswrapper[4558]: E0120 16:42:50.565844 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 20 16:42:50 crc kubenswrapper[4558]: E0120 16:42:50.565937 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 20 16:42:50 crc kubenswrapper[4558]: I0120 16:42:50.568991 4558 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-17 04:16:41.141069144 +0000 UTC Jan 20 16:42:50 crc kubenswrapper[4558]: I0120 16:42:50.584533 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:50 crc kubenswrapper[4558]: I0120 16:42:50.584564 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:50 crc kubenswrapper[4558]: I0120 16:42:50.584572 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:50 crc kubenswrapper[4558]: I0120 16:42:50.584583 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:50 crc kubenswrapper[4558]: I0120 16:42:50.584609 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:50Z","lastTransitionTime":"2026-01-20T16:42:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:50 crc kubenswrapper[4558]: I0120 16:42:50.686306 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:50 crc kubenswrapper[4558]: I0120 16:42:50.686341 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:50 crc kubenswrapper[4558]: I0120 16:42:50.686349 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:50 crc kubenswrapper[4558]: I0120 16:42:50.686361 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:50 crc kubenswrapper[4558]: I0120 16:42:50.686370 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:50Z","lastTransitionTime":"2026-01-20T16:42:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:50 crc kubenswrapper[4558]: I0120 16:42:50.788656 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:50 crc kubenswrapper[4558]: I0120 16:42:50.788685 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:50 crc kubenswrapper[4558]: I0120 16:42:50.788694 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:50 crc kubenswrapper[4558]: I0120 16:42:50.788705 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:50 crc kubenswrapper[4558]: I0120 16:42:50.788713 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:50Z","lastTransitionTime":"2026-01-20T16:42:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:50 crc kubenswrapper[4558]: I0120 16:42:50.890125 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:50 crc kubenswrapper[4558]: I0120 16:42:50.890158 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:50 crc kubenswrapper[4558]: I0120 16:42:50.890183 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:50 crc kubenswrapper[4558]: I0120 16:42:50.890196 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:50 crc kubenswrapper[4558]: I0120 16:42:50.890204 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:50Z","lastTransitionTime":"2026-01-20T16:42:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:50 crc kubenswrapper[4558]: I0120 16:42:50.991729 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:50 crc kubenswrapper[4558]: I0120 16:42:50.991756 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:50 crc kubenswrapper[4558]: I0120 16:42:50.991764 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:50 crc kubenswrapper[4558]: I0120 16:42:50.991776 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:50 crc kubenswrapper[4558]: I0120 16:42:50.991783 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:50Z","lastTransitionTime":"2026-01-20T16:42:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:51 crc kubenswrapper[4558]: I0120 16:42:51.093980 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:51 crc kubenswrapper[4558]: I0120 16:42:51.094012 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:51 crc kubenswrapper[4558]: I0120 16:42:51.094020 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:51 crc kubenswrapper[4558]: I0120 16:42:51.094032 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:51 crc kubenswrapper[4558]: I0120 16:42:51.094040 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:51Z","lastTransitionTime":"2026-01-20T16:42:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:51 crc kubenswrapper[4558]: I0120 16:42:51.195814 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:51 crc kubenswrapper[4558]: I0120 16:42:51.195843 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:51 crc kubenswrapper[4558]: I0120 16:42:51.195851 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:51 crc kubenswrapper[4558]: I0120 16:42:51.195861 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:51 crc kubenswrapper[4558]: I0120 16:42:51.195869 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:51Z","lastTransitionTime":"2026-01-20T16:42:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:51 crc kubenswrapper[4558]: I0120 16:42:51.297747 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:51 crc kubenswrapper[4558]: I0120 16:42:51.297835 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:51 crc kubenswrapper[4558]: I0120 16:42:51.297847 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:51 crc kubenswrapper[4558]: I0120 16:42:51.297871 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:51 crc kubenswrapper[4558]: I0120 16:42:51.297886 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:51Z","lastTransitionTime":"2026-01-20T16:42:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:51 crc kubenswrapper[4558]: I0120 16:42:51.400930 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:51 crc kubenswrapper[4558]: I0120 16:42:51.400966 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:51 crc kubenswrapper[4558]: I0120 16:42:51.400975 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:51 crc kubenswrapper[4558]: I0120 16:42:51.400986 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:51 crc kubenswrapper[4558]: I0120 16:42:51.400995 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:51Z","lastTransitionTime":"2026-01-20T16:42:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:51 crc kubenswrapper[4558]: I0120 16:42:51.505139 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:51 crc kubenswrapper[4558]: I0120 16:42:51.505221 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:51 crc kubenswrapper[4558]: I0120 16:42:51.505238 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:51 crc kubenswrapper[4558]: I0120 16:42:51.505261 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:51 crc kubenswrapper[4558]: I0120 16:42:51.505273 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:51Z","lastTransitionTime":"2026-01-20T16:42:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:51 crc kubenswrapper[4558]: I0120 16:42:51.565125 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9wrq6" Jan 20 16:42:51 crc kubenswrapper[4558]: E0120 16:42:51.565402 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9wrq6" podUID="30032328-bd33-4073-9366-e10bc5e2aa77" Jan 20 16:42:51 crc kubenswrapper[4558]: I0120 16:42:51.569302 4558 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-29 20:29:54.31154199 +0000 UTC Jan 20 16:42:51 crc kubenswrapper[4558]: I0120 16:42:51.608003 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:51 crc kubenswrapper[4558]: I0120 16:42:51.608326 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:51 crc kubenswrapper[4558]: I0120 16:42:51.608406 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:51 crc kubenswrapper[4558]: I0120 16:42:51.608484 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:51 crc kubenswrapper[4558]: I0120 16:42:51.608550 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:51Z","lastTransitionTime":"2026-01-20T16:42:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:51 crc kubenswrapper[4558]: I0120 16:42:51.710321 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:51 crc kubenswrapper[4558]: I0120 16:42:51.710356 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:51 crc kubenswrapper[4558]: I0120 16:42:51.710364 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:51 crc kubenswrapper[4558]: I0120 16:42:51.710375 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:51 crc kubenswrapper[4558]: I0120 16:42:51.710383 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:51Z","lastTransitionTime":"2026-01-20T16:42:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:51 crc kubenswrapper[4558]: I0120 16:42:51.808925 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-jsqvf_bedf08c7-1f93-4931-a7f3-e729e2a137af/kube-multus/0.log" Jan 20 16:42:51 crc kubenswrapper[4558]: I0120 16:42:51.808965 4558 generic.go:334] "Generic (PLEG): container finished" podID="bedf08c7-1f93-4931-a7f3-e729e2a137af" containerID="9e32b76495d88d356373f9389e163a2f3a72e202bb454c7015594c1f5a41b511" exitCode=1 Jan 20 16:42:51 crc kubenswrapper[4558]: I0120 16:42:51.808988 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-jsqvf" event={"ID":"bedf08c7-1f93-4931-a7f3-e729e2a137af","Type":"ContainerDied","Data":"9e32b76495d88d356373f9389e163a2f3a72e202bb454c7015594c1f5a41b511"} Jan 20 16:42:51 crc kubenswrapper[4558]: I0120 16:42:51.809260 4558 scope.go:117] "RemoveContainer" containerID="9e32b76495d88d356373f9389e163a2f3a72e202bb454c7015594c1f5a41b511" Jan 20 16:42:51 crc kubenswrapper[4558]: I0120 16:42:51.811505 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:51 crc kubenswrapper[4558]: I0120 16:42:51.811547 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:51 crc kubenswrapper[4558]: I0120 16:42:51.811556 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:51 crc kubenswrapper[4558]: I0120 16:42:51.811596 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:51 crc kubenswrapper[4558]: I0120 16:42:51.811607 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:51Z","lastTransitionTime":"2026-01-20T16:42:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:51 crc kubenswrapper[4558]: I0120 16:42:51.818561 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:51Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:51 crc kubenswrapper[4558]: I0120 16:42:51.825223 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-d96bp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c8d2c8b-fb8e-4bff-b8d2-69570e5d9e57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe858db22acc96b062f1bc7941e05c823f30d1f9ba50bd497c57f38d57e04293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4v6jx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-d96bp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:51Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:51 crc kubenswrapper[4558]: I0120 16:42:51.839902 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"254129cd-82fc-4162-b671-2434bc9e2972\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a6a184067bde115ef5e09fdd90e7dccfe89a9dc347af450998b91c068e7e803\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd8b93e5559d95511a1285a9aec9a7d72df6c330bcd1e1daf3e93f5494f70eeb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4264ef8ee49a8b172fb05ac5c9b0bd32350b5d9ae95293079dfeaf44f021b455\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0889cf371f8b06cc2d255c15fda97db1d5000d6f6cff29d2fdcb8667cede8a99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4265d3281e954ee5989ee008da28c6cb9fa29478e5fdf23325521d455304da9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://186a1d2caad8865abf7f565405c4a4c077038ccb67ba77927cc2392ec075a811\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d28c2f9293ff09b5e7d4ca4df1633cb114f685ad9a9dc8697e9efe3e9603259\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d28c2f9293ff09b5e7d4ca4df1633cb114f685ad9a9dc8697e9efe3e9603259\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-20T16:42:30Z\\\",\\\"message\\\":\\\"6215 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI0120 16:42:30.148863 6215 obj_retry.go:365] Adding new object: *v1.Pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI0120 16:42:30.148994 6215 ovn.go:134] Ensuring zone local for Pod openshift-kube-apiserver/kube-apiserver-crc in node crc\\\\nI0120 16:42:30.149000 6215 obj_retry.go:386] Retry successful for *v1.Pod openshift-kube-apiserver/kube-apiserver-crc after 0 failed attempt(s)\\\\nI0120 16:42:30.149004 6215 default_network_controller.go:776] Recording success event on pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI0120 16:42:30.148969 6215 services_controller.go:445] Built service openshift-kube-storage-version-migrator-operator/metrics LB template configs for network=default: []services.lbConfig(nil)\\\\nI0120 16:42:30.149028 6215 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-operator/iptables-alerter-4ln5h\\\\nI0120 16:42:30.149009 6215 base_network_controller_pods.go:916] Annotation values: ip=[10.217.0.3/23] ; mac=0a:58:0a:d9:00:03 ; gw=[10.217.0.1]\\\\nF0120 16:42:30.149066 6215 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller ini\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:29Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-nv2xw_openshift-ovn-kubernetes(254129cd-82fc-4162-b671-2434bc9e2972)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f41d488b0c24e594a2f5cad36b5f8efdb7d867fae0e18a74fe3c396a203d162f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aaa12ad387fc5d0780f6602a74013bb70c118f35c3a16eb4d136df7b0c48db9e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aaa12ad387fc5d0780f6602a74013bb70c118f35c3a16eb4d136df7b0c48db9e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-nv2xw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:51Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:51 crc kubenswrapper[4558]: I0120 16:42:51.851372 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f5t7h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0f90cb04-2e7a-4ee8-83fc-d6c0ee1702a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21a318f03d0df490cb7bae11819c52eca011d163081aeecff1b2dd8f72caabbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fd8a8162080a700ce21ff3b2cf7c28fabfc682758eb69c483909d4008b07daab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fd8a8162080a700ce21ff3b2cf7c28fabfc682758eb69c483909d4008b07daab\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e77617c69434ce20c448b525c7e86f1ece18d51ce6e14167b4ca7e99de8e5709\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e77617c69434ce20c448b525c7e86f1ece18d51ce6e14167b4ca7e99de8e5709\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://453c67434281e0261ec6799b30a548a01efd72d7df96632409e745b2e6a94546\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://453c67434281e0261ec6799b30a548a01efd72d7df96632409e745b2e6a94546\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://06eab4daa6c8b6a564cf1bb1403b7b3b259a234ee09f23b4f4bc0628c2d58e69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06eab4daa6c8b6a564cf1bb1403b7b3b259a234ee09f23b4f4bc0628c2d58e69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1291ac09fc6ae2a79fdeaa0fc47272f61b97f45f67ed3f7c90bdfdbfe3519ff2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1291ac09fc6ae2a79fdeaa0fc47272f61b97f45f67ed3f7c90bdfdbfe3519ff2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe484514f50a14e485d4a97ea3ce9401c77f3f4dae4717719be7ed6c4d7b1a59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe484514f50a14e485d4a97ea3ce9401c77f3f4dae4717719be7ed6c4d7b1a59\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f5t7h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:51Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:51 crc kubenswrapper[4558]: I0120 16:42:51.863255 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-9wrq6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30032328-bd33-4073-9366-e10bc5e2aa77\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6tz4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6tz4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:17Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-9wrq6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:51Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:51 crc kubenswrapper[4558]: I0120 16:42:51.872320 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a13bfefa-f525-4ecd-89f5-f10480532bf6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9030e970e96b6909d771d4fa0c3b4f493b4a825cbc43ef7e684272284329c7c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://168e8f533aaa108e4160f9ae4af6bb7944edba7a596178ffffdc5325073fc3c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e0141aef1babb57501016cb09787591b5e6d9136b8d7dd4ba64393f0be5a027\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c87f08993c2b47c63ba9652c7dafe0fdbecdcb03a7b7080664ab1f1bd0e1d602\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:51Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:51 crc kubenswrapper[4558]: I0120 16:42:51.881488 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f904dd3e4b43e51569b6532f00fa2f9edf912c1f1969ee2ea00c99d1b7e35a41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://789c877f4a456b3ce64bdbe6fefd50f1ef3f92bf9e26e6296005cf366e0ce265\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:51Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:51 crc kubenswrapper[4558]: I0120 16:42:51.890364 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3e45cc56934a7b90e9d47f1c6d06d8dc140d57db4a216469e8ae112f398663e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:51Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:51 crc kubenswrapper[4558]: I0120 16:42:51.899445 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jsqvf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bedf08c7-1f93-4931-a7f3-e729e2a137af\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e32b76495d88d356373f9389e163a2f3a72e202bb454c7015594c1f5a41b511\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e32b76495d88d356373f9389e163a2f3a72e202bb454c7015594c1f5a41b511\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-20T16:42:50Z\\\",\\\"message\\\":\\\"2026-01-20T16:42:05+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_149f95a3-037e-4113-9baf-ba51c18b4cf2\\\\n2026-01-20T16:42:05+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_149f95a3-037e-4113-9baf-ba51c18b4cf2 to /host/opt/cni/bin/\\\\n2026-01-20T16:42:05Z [verbose] multus-daemon started\\\\n2026-01-20T16:42:05Z [verbose] Readiness Indicator file check\\\\n2026-01-20T16:42:50Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xxtbc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jsqvf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:51Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:51 crc kubenswrapper[4558]: I0120 16:42:51.906990 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-47477" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e026445a-aa73-4a0e-ba35-e2e07de1278c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec2ee3ff161cba7e14ad26763081c9f092f0dfe969cef113631c27e0db53d157\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xcnwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-47477\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:51Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:51 crc kubenswrapper[4558]: I0120 16:42:51.912926 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:51 crc kubenswrapper[4558]: I0120 16:42:51.912953 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:51 crc kubenswrapper[4558]: I0120 16:42:51.912962 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:51 crc kubenswrapper[4558]: I0120 16:42:51.912976 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:51 crc kubenswrapper[4558]: I0120 16:42:51.912984 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:51Z","lastTransitionTime":"2026-01-20T16:42:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:51 crc kubenswrapper[4558]: I0120 16:42:51.924757 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"425eab90-fb70-4498-a98b-b95742033890\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48592e72d91cf527296f06e52fec1c16c1da21323c0644659945af109a74ebb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7e18f59b5e6b6c0be141b7efb3ccb1df4ff6e3c394bcc88fb1142c8df433493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c889e2b0e14244ef49ec057e3d5fd91687e335a983fdc99a6816abd2af643ddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d434793571cbba089cc34029d951bee9063aca58728b791f2d1af2d68229f778\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://89f7ad577df11c509855a90ac8791da6de3f0d2857a1ca56726a6327f40455bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://baab628ee8aadc67ea829cc46639be55ae63281a9ec9a89737260f558a9ddf75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://baab628ee8aadc67ea829cc46639be55ae63281a9ec9a89737260f558a9ddf75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5faab5625a16e1459b23ae0f4cc8ca3285bf4eb1ba4c53c1ed15eddb9054021\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5faab5625a16e1459b23ae0f4cc8ca3285bf4eb1ba4c53c1ed15eddb9054021\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f913fa07ec9571d1c61f9b7dddb46614846aa99d6e0b5b75709e9fa5a18a09c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f913fa07ec9571d1c61f9b7dddb46614846aa99d6e0b5b75709e9fa5a18a09c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:51Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:51 crc kubenswrapper[4558]: I0120 16:42:51.935964 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30e1ff9d-8dcd-4754-9a7a-c09598fb83db\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ea68e442bb2143fbaa5e2c9fe335c6b023a31c2eeaa78d60de45c6ef40c2894\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a8122c3b57ac4f0026df149fe622450d07d8b0d517ad018decd7b21d8d9c04a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7444e8fea1853bae45dbd3d52d07e3f7b94314cd29fe8c99891e515a892e569e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6438ffea42f98178c4c2c21ae01725a8b54d6a6f790f47d64faf5ecd9f1c00f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2e22f1d4e87dd06a67f49aedad280da48ac1c4ba2e438a4925cf8bd5330c4d8\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0120 16:41:58.859734 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0120 16:41:58.861094 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2179835226/tls.crt::/tmp/serving-cert-2179835226/tls.key\\\\\\\"\\\\nI0120 16:42:04.205247 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0120 16:42:04.207291 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0120 16:42:04.207308 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0120 16:42:04.207329 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0120 16:42:04.207333 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0120 16:42:04.210615 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0120 16:42:04.210632 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0120 16:42:04.210641 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0120 16:42:04.210648 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0120 16:42:04.210652 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0120 16:42:04.210655 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0120 16:42:04.210658 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0120 16:42:04.210660 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0120 16:42:04.211964 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4850bde1fb538e21fc91949dc584ef0a791d718844691a44559e0513bb36203\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c39aa6c3942134a027be33cac715f9c0381bce5417754d5bad045c42a07d353f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c39aa6c3942134a027be33cac715f9c0381bce5417754d5bad045c42a07d353f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:51Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:51 crc kubenswrapper[4558]: I0120 16:42:51.944950 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"68337d27-3fa6-4a29-88b0-82e60c3739eb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de7f7c23993a57d30f08686e1cd4abb5b17e108ac12dd0c7929a31574c69eeb1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4b478\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b18a59ecb802507e0d5988eacc915fd7e0d6954518de80f61162c97f680fd8c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4b478\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2vr4r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:51Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:51 crc kubenswrapper[4558]: I0120 16:42:51.954375 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60aafae7-2e6e-4d21-9ce6-8ff5382ef642\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d16b973c217656d6a8ceaf684e9dec82daeb583209943f6b2979be47ab03fda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb2e7753f75b10088eea9204511ae78e4be9a2b5be8c17ed04c3c972b13f8190\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a426b97002a49938b18009db49867dc9a15102707b42ec46bc64093c0ed3c2c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e4df1c3b2a1aef9adb3da64f89a1b8c0904fd22882609dbafe2fbc5718b8016\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6e4df1c3b2a1aef9adb3da64f89a1b8c0904fd22882609dbafe2fbc5718b8016\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:51Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:51 crc kubenswrapper[4558]: I0120 16:42:51.964251 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e190c24c1d8a3bc0241334598ce23829ada9afee282d49a0c82cd67ad82bcd02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:51Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:51 crc kubenswrapper[4558]: I0120 16:42:51.974937 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:51Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:51 crc kubenswrapper[4558]: I0120 16:42:51.984447 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:51Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:51 crc kubenswrapper[4558]: I0120 16:42:51.992955 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rfwnl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8fe5472-7373-4e5d-87fc-a9ecaf26a5cd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://38cd58ecc67d337df2c29b79e5460ea23d4acc1e66de8b08ea70d7ab7b30691d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9nvl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a14e9281ff19e324499722aefadf1e1f2fd005089c153918ad4d8a7f5bdfa853\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9nvl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-rfwnl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:51Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:52 crc kubenswrapper[4558]: I0120 16:42:52.014572 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:52 crc kubenswrapper[4558]: I0120 16:42:52.014621 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:52 crc kubenswrapper[4558]: I0120 16:42:52.014632 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:52 crc kubenswrapper[4558]: I0120 16:42:52.014645 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:52 crc kubenswrapper[4558]: I0120 16:42:52.014654 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:52Z","lastTransitionTime":"2026-01-20T16:42:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:52 crc kubenswrapper[4558]: I0120 16:42:52.116870 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:52 crc kubenswrapper[4558]: I0120 16:42:52.116912 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:52 crc kubenswrapper[4558]: I0120 16:42:52.116931 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:52 crc kubenswrapper[4558]: I0120 16:42:52.116945 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:52 crc kubenswrapper[4558]: I0120 16:42:52.116954 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:52Z","lastTransitionTime":"2026-01-20T16:42:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:52 crc kubenswrapper[4558]: I0120 16:42:52.219646 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:52 crc kubenswrapper[4558]: I0120 16:42:52.219698 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:52 crc kubenswrapper[4558]: I0120 16:42:52.219726 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:52 crc kubenswrapper[4558]: I0120 16:42:52.219745 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:52 crc kubenswrapper[4558]: I0120 16:42:52.219755 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:52Z","lastTransitionTime":"2026-01-20T16:42:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:52 crc kubenswrapper[4558]: I0120 16:42:52.321860 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:52 crc kubenswrapper[4558]: I0120 16:42:52.321916 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:52 crc kubenswrapper[4558]: I0120 16:42:52.321925 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:52 crc kubenswrapper[4558]: I0120 16:42:52.321945 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:52 crc kubenswrapper[4558]: I0120 16:42:52.321956 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:52Z","lastTransitionTime":"2026-01-20T16:42:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:52 crc kubenswrapper[4558]: I0120 16:42:52.424016 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:52 crc kubenswrapper[4558]: I0120 16:42:52.424062 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:52 crc kubenswrapper[4558]: I0120 16:42:52.424072 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:52 crc kubenswrapper[4558]: I0120 16:42:52.424089 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:52 crc kubenswrapper[4558]: I0120 16:42:52.424098 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:52Z","lastTransitionTime":"2026-01-20T16:42:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:52 crc kubenswrapper[4558]: I0120 16:42:52.526254 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:52 crc kubenswrapper[4558]: I0120 16:42:52.526302 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:52 crc kubenswrapper[4558]: I0120 16:42:52.526311 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:52 crc kubenswrapper[4558]: I0120 16:42:52.526329 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:52 crc kubenswrapper[4558]: I0120 16:42:52.526339 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:52Z","lastTransitionTime":"2026-01-20T16:42:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:52 crc kubenswrapper[4558]: I0120 16:42:52.565755 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 20 16:42:52 crc kubenswrapper[4558]: I0120 16:42:52.565788 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 20 16:42:52 crc kubenswrapper[4558]: I0120 16:42:52.565921 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 20 16:42:52 crc kubenswrapper[4558]: E0120 16:42:52.566050 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 20 16:42:52 crc kubenswrapper[4558]: E0120 16:42:52.566132 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 20 16:42:52 crc kubenswrapper[4558]: E0120 16:42:52.566347 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 20 16:42:52 crc kubenswrapper[4558]: I0120 16:42:52.569529 4558 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-07 05:34:06.420562517 +0000 UTC Jan 20 16:42:52 crc kubenswrapper[4558]: I0120 16:42:52.628710 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:52 crc kubenswrapper[4558]: I0120 16:42:52.628774 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:52 crc kubenswrapper[4558]: I0120 16:42:52.628786 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:52 crc kubenswrapper[4558]: I0120 16:42:52.628801 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:52 crc kubenswrapper[4558]: I0120 16:42:52.628811 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:52Z","lastTransitionTime":"2026-01-20T16:42:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:52 crc kubenswrapper[4558]: I0120 16:42:52.731335 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:52 crc kubenswrapper[4558]: I0120 16:42:52.731403 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:52 crc kubenswrapper[4558]: I0120 16:42:52.731417 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:52 crc kubenswrapper[4558]: I0120 16:42:52.731444 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:52 crc kubenswrapper[4558]: I0120 16:42:52.731458 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:52Z","lastTransitionTime":"2026-01-20T16:42:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:52 crc kubenswrapper[4558]: I0120 16:42:52.814029 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-jsqvf_bedf08c7-1f93-4931-a7f3-e729e2a137af/kube-multus/0.log" Jan 20 16:42:52 crc kubenswrapper[4558]: I0120 16:42:52.814120 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-jsqvf" event={"ID":"bedf08c7-1f93-4931-a7f3-e729e2a137af","Type":"ContainerStarted","Data":"2297634e20683413a5eb643517580c9486303dc9616a7588a4ac571f3d0e53b5"} Jan 20 16:42:52 crc kubenswrapper[4558]: I0120 16:42:52.831637 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"425eab90-fb70-4498-a98b-b95742033890\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48592e72d91cf527296f06e52fec1c16c1da21323c0644659945af109a74ebb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7e18f59b5e6b6c0be141b7efb3ccb1df4ff6e3c394bcc88fb1142c8df433493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c889e2b0e14244ef49ec057e3d5fd91687e335a983fdc99a6816abd2af643ddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d434793571cbba089cc34029d951bee9063aca58728b791f2d1af2d68229f778\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://89f7ad577df11c509855a90ac8791da6de3f0d2857a1ca56726a6327f40455bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://baab628ee8aadc67ea829cc46639be55ae63281a9ec9a89737260f558a9ddf75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://baab628ee8aadc67ea829cc46639be55ae63281a9ec9a89737260f558a9ddf75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5faab5625a16e1459b23ae0f4cc8ca3285bf4eb1ba4c53c1ed15eddb9054021\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5faab5625a16e1459b23ae0f4cc8ca3285bf4eb1ba4c53c1ed15eddb9054021\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f913fa07ec9571d1c61f9b7dddb46614846aa99d6e0b5b75709e9fa5a18a09c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f913fa07ec9571d1c61f9b7dddb46614846aa99d6e0b5b75709e9fa5a18a09c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:52Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:52 crc kubenswrapper[4558]: I0120 16:42:52.834239 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:52 crc kubenswrapper[4558]: I0120 16:42:52.834283 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:52 crc kubenswrapper[4558]: I0120 16:42:52.834294 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:52 crc kubenswrapper[4558]: I0120 16:42:52.834310 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:52 crc kubenswrapper[4558]: I0120 16:42:52.834319 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:52Z","lastTransitionTime":"2026-01-20T16:42:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:52 crc kubenswrapper[4558]: I0120 16:42:52.843419 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30e1ff9d-8dcd-4754-9a7a-c09598fb83db\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ea68e442bb2143fbaa5e2c9fe335c6b023a31c2eeaa78d60de45c6ef40c2894\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a8122c3b57ac4f0026df149fe622450d07d8b0d517ad018decd7b21d8d9c04a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7444e8fea1853bae45dbd3d52d07e3f7b94314cd29fe8c99891e515a892e569e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6438ffea42f98178c4c2c21ae01725a8b54d6a6f790f47d64faf5ecd9f1c00f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2e22f1d4e87dd06a67f49aedad280da48ac1c4ba2e438a4925cf8bd5330c4d8\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0120 16:41:58.859734 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0120 16:41:58.861094 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2179835226/tls.crt::/tmp/serving-cert-2179835226/tls.key\\\\\\\"\\\\nI0120 16:42:04.205247 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0120 16:42:04.207291 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0120 16:42:04.207308 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0120 16:42:04.207329 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0120 16:42:04.207333 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0120 16:42:04.210615 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0120 16:42:04.210632 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0120 16:42:04.210641 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0120 16:42:04.210648 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0120 16:42:04.210652 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0120 16:42:04.210655 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0120 16:42:04.210658 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0120 16:42:04.210660 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0120 16:42:04.211964 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4850bde1fb538e21fc91949dc584ef0a791d718844691a44559e0513bb36203\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c39aa6c3942134a027be33cac715f9c0381bce5417754d5bad045c42a07d353f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c39aa6c3942134a027be33cac715f9c0381bce5417754d5bad045c42a07d353f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:52Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:52 crc kubenswrapper[4558]: I0120 16:42:52.853665 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"68337d27-3fa6-4a29-88b0-82e60c3739eb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de7f7c23993a57d30f08686e1cd4abb5b17e108ac12dd0c7929a31574c69eeb1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4b478\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b18a59ecb802507e0d5988eacc915fd7e0d6954518de80f61162c97f680fd8c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4b478\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2vr4r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:52Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:52 crc kubenswrapper[4558]: I0120 16:42:52.862765 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60aafae7-2e6e-4d21-9ce6-8ff5382ef642\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d16b973c217656d6a8ceaf684e9dec82daeb583209943f6b2979be47ab03fda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb2e7753f75b10088eea9204511ae78e4be9a2b5be8c17ed04c3c972b13f8190\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a426b97002a49938b18009db49867dc9a15102707b42ec46bc64093c0ed3c2c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e4df1c3b2a1aef9adb3da64f89a1b8c0904fd22882609dbafe2fbc5718b8016\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6e4df1c3b2a1aef9adb3da64f89a1b8c0904fd22882609dbafe2fbc5718b8016\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:52Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:52 crc kubenswrapper[4558]: I0120 16:42:52.873673 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e190c24c1d8a3bc0241334598ce23829ada9afee282d49a0c82cd67ad82bcd02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:52Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:52 crc kubenswrapper[4558]: I0120 16:42:52.883457 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:52Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:52 crc kubenswrapper[4558]: I0120 16:42:52.894335 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:52Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:52 crc kubenswrapper[4558]: I0120 16:42:52.907127 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rfwnl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8fe5472-7373-4e5d-87fc-a9ecaf26a5cd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://38cd58ecc67d337df2c29b79e5460ea23d4acc1e66de8b08ea70d7ab7b30691d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9nvl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a14e9281ff19e324499722aefadf1e1f2fd005089c153918ad4d8a7f5bdfa853\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9nvl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-rfwnl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:52Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:52 crc kubenswrapper[4558]: I0120 16:42:52.918746 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:52Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:52 crc kubenswrapper[4558]: I0120 16:42:52.929572 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-d96bp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c8d2c8b-fb8e-4bff-b8d2-69570e5d9e57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe858db22acc96b062f1bc7941e05c823f30d1f9ba50bd497c57f38d57e04293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4v6jx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-d96bp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:52Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:52 crc kubenswrapper[4558]: I0120 16:42:52.936725 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:52 crc kubenswrapper[4558]: I0120 16:42:52.936757 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:52 crc kubenswrapper[4558]: I0120 16:42:52.936766 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:52 crc kubenswrapper[4558]: I0120 16:42:52.936780 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:52 crc kubenswrapper[4558]: I0120 16:42:52.936788 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:52Z","lastTransitionTime":"2026-01-20T16:42:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:52 crc kubenswrapper[4558]: I0120 16:42:52.945288 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"254129cd-82fc-4162-b671-2434bc9e2972\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a6a184067bde115ef5e09fdd90e7dccfe89a9dc347af450998b91c068e7e803\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd8b93e5559d95511a1285a9aec9a7d72df6c330bcd1e1daf3e93f5494f70eeb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4264ef8ee49a8b172fb05ac5c9b0bd32350b5d9ae95293079dfeaf44f021b455\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0889cf371f8b06cc2d255c15fda97db1d5000d6f6cff29d2fdcb8667cede8a99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4265d3281e954ee5989ee008da28c6cb9fa29478e5fdf23325521d455304da9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://186a1d2caad8865abf7f565405c4a4c077038ccb67ba77927cc2392ec075a811\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d28c2f9293ff09b5e7d4ca4df1633cb114f685ad9a9dc8697e9efe3e9603259\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d28c2f9293ff09b5e7d4ca4df1633cb114f685ad9a9dc8697e9efe3e9603259\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-20T16:42:30Z\\\",\\\"message\\\":\\\"6215 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI0120 16:42:30.148863 6215 obj_retry.go:365] Adding new object: *v1.Pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI0120 16:42:30.148994 6215 ovn.go:134] Ensuring zone local for Pod openshift-kube-apiserver/kube-apiserver-crc in node crc\\\\nI0120 16:42:30.149000 6215 obj_retry.go:386] Retry successful for *v1.Pod openshift-kube-apiserver/kube-apiserver-crc after 0 failed attempt(s)\\\\nI0120 16:42:30.149004 6215 default_network_controller.go:776] Recording success event on pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI0120 16:42:30.148969 6215 services_controller.go:445] Built service openshift-kube-storage-version-migrator-operator/metrics LB template configs for network=default: []services.lbConfig(nil)\\\\nI0120 16:42:30.149028 6215 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-operator/iptables-alerter-4ln5h\\\\nI0120 16:42:30.149009 6215 base_network_controller_pods.go:916] Annotation values: ip=[10.217.0.3/23] ; mac=0a:58:0a:d9:00:03 ; gw=[10.217.0.1]\\\\nF0120 16:42:30.149066 6215 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller ini\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:29Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-nv2xw_openshift-ovn-kubernetes(254129cd-82fc-4162-b671-2434bc9e2972)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f41d488b0c24e594a2f5cad36b5f8efdb7d867fae0e18a74fe3c396a203d162f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aaa12ad387fc5d0780f6602a74013bb70c118f35c3a16eb4d136df7b0c48db9e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aaa12ad387fc5d0780f6602a74013bb70c118f35c3a16eb4d136df7b0c48db9e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-nv2xw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:52Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:52 crc kubenswrapper[4558]: I0120 16:42:52.956850 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f5t7h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0f90cb04-2e7a-4ee8-83fc-d6c0ee1702a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21a318f03d0df490cb7bae11819c52eca011d163081aeecff1b2dd8f72caabbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fd8a8162080a700ce21ff3b2cf7c28fabfc682758eb69c483909d4008b07daab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fd8a8162080a700ce21ff3b2cf7c28fabfc682758eb69c483909d4008b07daab\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e77617c69434ce20c448b525c7e86f1ece18d51ce6e14167b4ca7e99de8e5709\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e77617c69434ce20c448b525c7e86f1ece18d51ce6e14167b4ca7e99de8e5709\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://453c67434281e0261ec6799b30a548a01efd72d7df96632409e745b2e6a94546\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://453c67434281e0261ec6799b30a548a01efd72d7df96632409e745b2e6a94546\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://06eab4daa6c8b6a564cf1bb1403b7b3b259a234ee09f23b4f4bc0628c2d58e69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06eab4daa6c8b6a564cf1bb1403b7b3b259a234ee09f23b4f4bc0628c2d58e69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1291ac09fc6ae2a79fdeaa0fc47272f61b97f45f67ed3f7c90bdfdbfe3519ff2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1291ac09fc6ae2a79fdeaa0fc47272f61b97f45f67ed3f7c90bdfdbfe3519ff2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe484514f50a14e485d4a97ea3ce9401c77f3f4dae4717719be7ed6c4d7b1a59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe484514f50a14e485d4a97ea3ce9401c77f3f4dae4717719be7ed6c4d7b1a59\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f5t7h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:52Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:52 crc kubenswrapper[4558]: I0120 16:42:52.965278 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-9wrq6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30032328-bd33-4073-9366-e10bc5e2aa77\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6tz4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6tz4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:17Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-9wrq6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:52Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:52 crc kubenswrapper[4558]: I0120 16:42:52.974411 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a13bfefa-f525-4ecd-89f5-f10480532bf6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9030e970e96b6909d771d4fa0c3b4f493b4a825cbc43ef7e684272284329c7c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://168e8f533aaa108e4160f9ae4af6bb7944edba7a596178ffffdc5325073fc3c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e0141aef1babb57501016cb09787591b5e6d9136b8d7dd4ba64393f0be5a027\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c87f08993c2b47c63ba9652c7dafe0fdbecdcb03a7b7080664ab1f1bd0e1d602\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:52Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:52 crc kubenswrapper[4558]: I0120 16:42:52.984253 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f904dd3e4b43e51569b6532f00fa2f9edf912c1f1969ee2ea00c99d1b7e35a41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://789c877f4a456b3ce64bdbe6fefd50f1ef3f92bf9e26e6296005cf366e0ce265\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:52Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:52 crc kubenswrapper[4558]: I0120 16:42:52.993530 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3e45cc56934a7b90e9d47f1c6d06d8dc140d57db4a216469e8ae112f398663e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:52Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:53 crc kubenswrapper[4558]: I0120 16:42:53.003538 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jsqvf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bedf08c7-1f93-4931-a7f3-e729e2a137af\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2297634e20683413a5eb643517580c9486303dc9616a7588a4ac571f3d0e53b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e32b76495d88d356373f9389e163a2f3a72e202bb454c7015594c1f5a41b511\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-20T16:42:50Z\\\",\\\"message\\\":\\\"2026-01-20T16:42:05+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_149f95a3-037e-4113-9baf-ba51c18b4cf2\\\\n2026-01-20T16:42:05+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_149f95a3-037e-4113-9baf-ba51c18b4cf2 to /host/opt/cni/bin/\\\\n2026-01-20T16:42:05Z [verbose] multus-daemon started\\\\n2026-01-20T16:42:05Z [verbose] Readiness Indicator file check\\\\n2026-01-20T16:42:50Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xxtbc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jsqvf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:53Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:53 crc kubenswrapper[4558]: I0120 16:42:53.011151 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-47477" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e026445a-aa73-4a0e-ba35-e2e07de1278c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec2ee3ff161cba7e14ad26763081c9f092f0dfe969cef113631c27e0db53d157\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xcnwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-47477\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:53Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:53 crc kubenswrapper[4558]: I0120 16:42:53.038961 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:53 crc kubenswrapper[4558]: I0120 16:42:53.038994 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:53 crc kubenswrapper[4558]: I0120 16:42:53.039003 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:53 crc kubenswrapper[4558]: I0120 16:42:53.039016 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:53 crc kubenswrapper[4558]: I0120 16:42:53.039025 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:53Z","lastTransitionTime":"2026-01-20T16:42:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:53 crc kubenswrapper[4558]: I0120 16:42:53.141229 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:53 crc kubenswrapper[4558]: I0120 16:42:53.141260 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:53 crc kubenswrapper[4558]: I0120 16:42:53.141268 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:53 crc kubenswrapper[4558]: I0120 16:42:53.141281 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:53 crc kubenswrapper[4558]: I0120 16:42:53.141291 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:53Z","lastTransitionTime":"2026-01-20T16:42:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:53 crc kubenswrapper[4558]: I0120 16:42:53.242986 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:53 crc kubenswrapper[4558]: I0120 16:42:53.243050 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:53 crc kubenswrapper[4558]: I0120 16:42:53.243059 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:53 crc kubenswrapper[4558]: I0120 16:42:53.243075 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:53 crc kubenswrapper[4558]: I0120 16:42:53.243083 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:53Z","lastTransitionTime":"2026-01-20T16:42:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:53 crc kubenswrapper[4558]: I0120 16:42:53.345060 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:53 crc kubenswrapper[4558]: I0120 16:42:53.345095 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:53 crc kubenswrapper[4558]: I0120 16:42:53.345106 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:53 crc kubenswrapper[4558]: I0120 16:42:53.345120 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:53 crc kubenswrapper[4558]: I0120 16:42:53.345128 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:53Z","lastTransitionTime":"2026-01-20T16:42:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:53 crc kubenswrapper[4558]: I0120 16:42:53.446852 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:53 crc kubenswrapper[4558]: I0120 16:42:53.446892 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:53 crc kubenswrapper[4558]: I0120 16:42:53.446900 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:53 crc kubenswrapper[4558]: I0120 16:42:53.446913 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:53 crc kubenswrapper[4558]: I0120 16:42:53.446925 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:53Z","lastTransitionTime":"2026-01-20T16:42:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:53 crc kubenswrapper[4558]: I0120 16:42:53.549111 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:53 crc kubenswrapper[4558]: I0120 16:42:53.549149 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:53 crc kubenswrapper[4558]: I0120 16:42:53.549157 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:53 crc kubenswrapper[4558]: I0120 16:42:53.549199 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:53 crc kubenswrapper[4558]: I0120 16:42:53.549209 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:53Z","lastTransitionTime":"2026-01-20T16:42:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:53 crc kubenswrapper[4558]: I0120 16:42:53.565567 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9wrq6" Jan 20 16:42:53 crc kubenswrapper[4558]: E0120 16:42:53.565831 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9wrq6" podUID="30032328-bd33-4073-9366-e10bc5e2aa77" Jan 20 16:42:53 crc kubenswrapper[4558]: I0120 16:42:53.570046 4558 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-11 08:00:17.022963858 +0000 UTC Jan 20 16:42:53 crc kubenswrapper[4558]: I0120 16:42:53.574323 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Jan 20 16:42:53 crc kubenswrapper[4558]: I0120 16:42:53.651315 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:53 crc kubenswrapper[4558]: I0120 16:42:53.651347 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:53 crc kubenswrapper[4558]: I0120 16:42:53.651355 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:53 crc kubenswrapper[4558]: I0120 16:42:53.651367 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:53 crc kubenswrapper[4558]: I0120 16:42:53.651374 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:53Z","lastTransitionTime":"2026-01-20T16:42:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:53 crc kubenswrapper[4558]: I0120 16:42:53.753237 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:53 crc kubenswrapper[4558]: I0120 16:42:53.753288 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:53 crc kubenswrapper[4558]: I0120 16:42:53.753297 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:53 crc kubenswrapper[4558]: I0120 16:42:53.753309 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:53 crc kubenswrapper[4558]: I0120 16:42:53.753318 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:53Z","lastTransitionTime":"2026-01-20T16:42:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:53 crc kubenswrapper[4558]: I0120 16:42:53.855249 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:53 crc kubenswrapper[4558]: I0120 16:42:53.855446 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:53 crc kubenswrapper[4558]: I0120 16:42:53.855510 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:53 crc kubenswrapper[4558]: I0120 16:42:53.855567 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:53 crc kubenswrapper[4558]: I0120 16:42:53.855648 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:53Z","lastTransitionTime":"2026-01-20T16:42:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:53 crc kubenswrapper[4558]: I0120 16:42:53.958216 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:53 crc kubenswrapper[4558]: I0120 16:42:53.958252 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:53 crc kubenswrapper[4558]: I0120 16:42:53.958261 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:53 crc kubenswrapper[4558]: I0120 16:42:53.958275 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:53 crc kubenswrapper[4558]: I0120 16:42:53.958283 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:53Z","lastTransitionTime":"2026-01-20T16:42:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:54 crc kubenswrapper[4558]: I0120 16:42:54.060260 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:54 crc kubenswrapper[4558]: I0120 16:42:54.060512 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:54 crc kubenswrapper[4558]: I0120 16:42:54.060617 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:54 crc kubenswrapper[4558]: I0120 16:42:54.060695 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:54 crc kubenswrapper[4558]: I0120 16:42:54.060755 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:54Z","lastTransitionTime":"2026-01-20T16:42:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:54 crc kubenswrapper[4558]: I0120 16:42:54.162521 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:54 crc kubenswrapper[4558]: I0120 16:42:54.162556 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:54 crc kubenswrapper[4558]: I0120 16:42:54.162565 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:54 crc kubenswrapper[4558]: I0120 16:42:54.162579 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:54 crc kubenswrapper[4558]: I0120 16:42:54.162605 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:54Z","lastTransitionTime":"2026-01-20T16:42:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:54 crc kubenswrapper[4558]: I0120 16:42:54.264637 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:54 crc kubenswrapper[4558]: I0120 16:42:54.264678 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:54 crc kubenswrapper[4558]: I0120 16:42:54.264688 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:54 crc kubenswrapper[4558]: I0120 16:42:54.264704 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:54 crc kubenswrapper[4558]: I0120 16:42:54.264713 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:54Z","lastTransitionTime":"2026-01-20T16:42:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:54 crc kubenswrapper[4558]: I0120 16:42:54.366825 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:54 crc kubenswrapper[4558]: I0120 16:42:54.366875 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:54 crc kubenswrapper[4558]: I0120 16:42:54.366885 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:54 crc kubenswrapper[4558]: I0120 16:42:54.366898 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:54 crc kubenswrapper[4558]: I0120 16:42:54.366906 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:54Z","lastTransitionTime":"2026-01-20T16:42:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:54 crc kubenswrapper[4558]: I0120 16:42:54.468809 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:54 crc kubenswrapper[4558]: I0120 16:42:54.468841 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:54 crc kubenswrapper[4558]: I0120 16:42:54.468849 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:54 crc kubenswrapper[4558]: I0120 16:42:54.468979 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:54 crc kubenswrapper[4558]: I0120 16:42:54.468989 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:54Z","lastTransitionTime":"2026-01-20T16:42:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:54 crc kubenswrapper[4558]: I0120 16:42:54.565309 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 20 16:42:54 crc kubenswrapper[4558]: I0120 16:42:54.565410 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 20 16:42:54 crc kubenswrapper[4558]: I0120 16:42:54.565345 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 20 16:42:54 crc kubenswrapper[4558]: E0120 16:42:54.565505 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 20 16:42:54 crc kubenswrapper[4558]: E0120 16:42:54.565618 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 20 16:42:54 crc kubenswrapper[4558]: E0120 16:42:54.565738 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 20 16:42:54 crc kubenswrapper[4558]: I0120 16:42:54.570097 4558 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-13 13:23:02.240047738 +0000 UTC Jan 20 16:42:54 crc kubenswrapper[4558]: I0120 16:42:54.571228 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:54 crc kubenswrapper[4558]: I0120 16:42:54.571251 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:54 crc kubenswrapper[4558]: I0120 16:42:54.571259 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:54 crc kubenswrapper[4558]: I0120 16:42:54.571270 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:54 crc kubenswrapper[4558]: I0120 16:42:54.571278 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:54Z","lastTransitionTime":"2026-01-20T16:42:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:54 crc kubenswrapper[4558]: I0120 16:42:54.672789 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:54 crc kubenswrapper[4558]: I0120 16:42:54.672826 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:54 crc kubenswrapper[4558]: I0120 16:42:54.672834 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:54 crc kubenswrapper[4558]: I0120 16:42:54.672847 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:54 crc kubenswrapper[4558]: I0120 16:42:54.672855 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:54Z","lastTransitionTime":"2026-01-20T16:42:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:54 crc kubenswrapper[4558]: I0120 16:42:54.774583 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:54 crc kubenswrapper[4558]: I0120 16:42:54.774869 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:54 crc kubenswrapper[4558]: I0120 16:42:54.774971 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:54 crc kubenswrapper[4558]: I0120 16:42:54.775045 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:54 crc kubenswrapper[4558]: I0120 16:42:54.775115 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:54Z","lastTransitionTime":"2026-01-20T16:42:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:54 crc kubenswrapper[4558]: I0120 16:42:54.877450 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:54 crc kubenswrapper[4558]: I0120 16:42:54.877780 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:54 crc kubenswrapper[4558]: I0120 16:42:54.877921 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:54 crc kubenswrapper[4558]: I0120 16:42:54.877998 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:54 crc kubenswrapper[4558]: I0120 16:42:54.878056 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:54Z","lastTransitionTime":"2026-01-20T16:42:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:54 crc kubenswrapper[4558]: I0120 16:42:54.979582 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:54 crc kubenswrapper[4558]: I0120 16:42:54.979662 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:54 crc kubenswrapper[4558]: I0120 16:42:54.979671 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:54 crc kubenswrapper[4558]: I0120 16:42:54.979684 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:54 crc kubenswrapper[4558]: I0120 16:42:54.979692 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:54Z","lastTransitionTime":"2026-01-20T16:42:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:55 crc kubenswrapper[4558]: I0120 16:42:55.081549 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:55 crc kubenswrapper[4558]: I0120 16:42:55.081601 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:55 crc kubenswrapper[4558]: I0120 16:42:55.081610 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:55 crc kubenswrapper[4558]: I0120 16:42:55.081623 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:55 crc kubenswrapper[4558]: I0120 16:42:55.081633 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:55Z","lastTransitionTime":"2026-01-20T16:42:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:55 crc kubenswrapper[4558]: I0120 16:42:55.183238 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:55 crc kubenswrapper[4558]: I0120 16:42:55.183290 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:55 crc kubenswrapper[4558]: I0120 16:42:55.183298 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:55 crc kubenswrapper[4558]: I0120 16:42:55.183310 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:55 crc kubenswrapper[4558]: I0120 16:42:55.183319 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:55Z","lastTransitionTime":"2026-01-20T16:42:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:55 crc kubenswrapper[4558]: I0120 16:42:55.285133 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:55 crc kubenswrapper[4558]: I0120 16:42:55.285180 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:55 crc kubenswrapper[4558]: I0120 16:42:55.285191 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:55 crc kubenswrapper[4558]: I0120 16:42:55.285204 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:55 crc kubenswrapper[4558]: I0120 16:42:55.285211 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:55Z","lastTransitionTime":"2026-01-20T16:42:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:55 crc kubenswrapper[4558]: I0120 16:42:55.387184 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:55 crc kubenswrapper[4558]: I0120 16:42:55.387407 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:55 crc kubenswrapper[4558]: I0120 16:42:55.387477 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:55 crc kubenswrapper[4558]: I0120 16:42:55.387544 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:55 crc kubenswrapper[4558]: I0120 16:42:55.387623 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:55Z","lastTransitionTime":"2026-01-20T16:42:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:55 crc kubenswrapper[4558]: I0120 16:42:55.489624 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:55 crc kubenswrapper[4558]: I0120 16:42:55.489661 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:55 crc kubenswrapper[4558]: I0120 16:42:55.489669 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:55 crc kubenswrapper[4558]: I0120 16:42:55.489682 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:55 crc kubenswrapper[4558]: I0120 16:42:55.489690 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:55Z","lastTransitionTime":"2026-01-20T16:42:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:55 crc kubenswrapper[4558]: I0120 16:42:55.565846 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9wrq6" Jan 20 16:42:55 crc kubenswrapper[4558]: E0120 16:42:55.565961 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9wrq6" podUID="30032328-bd33-4073-9366-e10bc5e2aa77" Jan 20 16:42:55 crc kubenswrapper[4558]: I0120 16:42:55.570997 4558 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-16 22:03:27.174605054 +0000 UTC Jan 20 16:42:55 crc kubenswrapper[4558]: I0120 16:42:55.591930 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:55 crc kubenswrapper[4558]: I0120 16:42:55.591981 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:55 crc kubenswrapper[4558]: I0120 16:42:55.591991 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:55 crc kubenswrapper[4558]: I0120 16:42:55.592002 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:55 crc kubenswrapper[4558]: I0120 16:42:55.592012 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:55Z","lastTransitionTime":"2026-01-20T16:42:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:55 crc kubenswrapper[4558]: I0120 16:42:55.694534 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:55 crc kubenswrapper[4558]: I0120 16:42:55.694563 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:55 crc kubenswrapper[4558]: I0120 16:42:55.694686 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:55 crc kubenswrapper[4558]: I0120 16:42:55.694700 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:55 crc kubenswrapper[4558]: I0120 16:42:55.694709 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:55Z","lastTransitionTime":"2026-01-20T16:42:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:55 crc kubenswrapper[4558]: I0120 16:42:55.796977 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:55 crc kubenswrapper[4558]: I0120 16:42:55.797011 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:55 crc kubenswrapper[4558]: I0120 16:42:55.797020 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:55 crc kubenswrapper[4558]: I0120 16:42:55.797032 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:55 crc kubenswrapper[4558]: I0120 16:42:55.797041 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:55Z","lastTransitionTime":"2026-01-20T16:42:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:55 crc kubenswrapper[4558]: I0120 16:42:55.899349 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:55 crc kubenswrapper[4558]: I0120 16:42:55.899396 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:55 crc kubenswrapper[4558]: I0120 16:42:55.899405 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:55 crc kubenswrapper[4558]: I0120 16:42:55.899418 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:55 crc kubenswrapper[4558]: I0120 16:42:55.899427 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:55Z","lastTransitionTime":"2026-01-20T16:42:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:56 crc kubenswrapper[4558]: I0120 16:42:56.001885 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:56 crc kubenswrapper[4558]: I0120 16:42:56.001919 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:56 crc kubenswrapper[4558]: I0120 16:42:56.001931 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:56 crc kubenswrapper[4558]: I0120 16:42:56.001944 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:56 crc kubenswrapper[4558]: I0120 16:42:56.001953 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:56Z","lastTransitionTime":"2026-01-20T16:42:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:56 crc kubenswrapper[4558]: I0120 16:42:56.103897 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:56 crc kubenswrapper[4558]: I0120 16:42:56.103934 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:56 crc kubenswrapper[4558]: I0120 16:42:56.103942 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:56 crc kubenswrapper[4558]: I0120 16:42:56.103956 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:56 crc kubenswrapper[4558]: I0120 16:42:56.103964 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:56Z","lastTransitionTime":"2026-01-20T16:42:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:56 crc kubenswrapper[4558]: I0120 16:42:56.205538 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:56 crc kubenswrapper[4558]: I0120 16:42:56.205567 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:56 crc kubenswrapper[4558]: I0120 16:42:56.205577 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:56 crc kubenswrapper[4558]: I0120 16:42:56.205603 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:56 crc kubenswrapper[4558]: I0120 16:42:56.205611 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:56Z","lastTransitionTime":"2026-01-20T16:42:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:56 crc kubenswrapper[4558]: I0120 16:42:56.307050 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:56 crc kubenswrapper[4558]: I0120 16:42:56.307079 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:56 crc kubenswrapper[4558]: I0120 16:42:56.307087 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:56 crc kubenswrapper[4558]: I0120 16:42:56.307097 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:56 crc kubenswrapper[4558]: I0120 16:42:56.307104 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:56Z","lastTransitionTime":"2026-01-20T16:42:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:56 crc kubenswrapper[4558]: I0120 16:42:56.409190 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:56 crc kubenswrapper[4558]: I0120 16:42:56.409217 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:56 crc kubenswrapper[4558]: I0120 16:42:56.409225 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:56 crc kubenswrapper[4558]: I0120 16:42:56.409248 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:56 crc kubenswrapper[4558]: I0120 16:42:56.409256 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:56Z","lastTransitionTime":"2026-01-20T16:42:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:56 crc kubenswrapper[4558]: I0120 16:42:56.511733 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:56 crc kubenswrapper[4558]: I0120 16:42:56.511773 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:56 crc kubenswrapper[4558]: I0120 16:42:56.511783 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:56 crc kubenswrapper[4558]: I0120 16:42:56.511797 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:56 crc kubenswrapper[4558]: I0120 16:42:56.511806 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:56Z","lastTransitionTime":"2026-01-20T16:42:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:56 crc kubenswrapper[4558]: I0120 16:42:56.564938 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 20 16:42:56 crc kubenswrapper[4558]: I0120 16:42:56.564965 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 20 16:42:56 crc kubenswrapper[4558]: E0120 16:42:56.565050 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 20 16:42:56 crc kubenswrapper[4558]: I0120 16:42:56.565120 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 20 16:42:56 crc kubenswrapper[4558]: E0120 16:42:56.565253 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 20 16:42:56 crc kubenswrapper[4558]: E0120 16:42:56.565283 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 20 16:42:56 crc kubenswrapper[4558]: I0120 16:42:56.571932 4558 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-23 15:29:45.846912374 +0000 UTC Jan 20 16:42:56 crc kubenswrapper[4558]: I0120 16:42:56.576852 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30e1ff9d-8dcd-4754-9a7a-c09598fb83db\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ea68e442bb2143fbaa5e2c9fe335c6b023a31c2eeaa78d60de45c6ef40c2894\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a8122c3b57ac4f0026df149fe622450d07d8b0d517ad018decd7b21d8d9c04a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7444e8fea1853bae45dbd3d52d07e3f7b94314cd29fe8c99891e515a892e569e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6438ffea42f98178c4c2c21ae01725a8b54d6a6f790f47d64faf5ecd9f1c00f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2e22f1d4e87dd06a67f49aedad280da48ac1c4ba2e438a4925cf8bd5330c4d8\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0120 16:41:58.859734 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0120 16:41:58.861094 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2179835226/tls.crt::/tmp/serving-cert-2179835226/tls.key\\\\\\\"\\\\nI0120 16:42:04.205247 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0120 16:42:04.207291 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0120 16:42:04.207308 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0120 16:42:04.207329 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0120 16:42:04.207333 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0120 16:42:04.210615 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0120 16:42:04.210632 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0120 16:42:04.210641 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0120 16:42:04.210648 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0120 16:42:04.210652 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0120 16:42:04.210655 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0120 16:42:04.210658 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0120 16:42:04.210660 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0120 16:42:04.211964 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4850bde1fb538e21fc91949dc584ef0a791d718844691a44559e0513bb36203\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c39aa6c3942134a027be33cac715f9c0381bce5417754d5bad045c42a07d353f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c39aa6c3942134a027be33cac715f9c0381bce5417754d5bad045c42a07d353f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:56Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:56 crc kubenswrapper[4558]: I0120 16:42:56.585775 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"68337d27-3fa6-4a29-88b0-82e60c3739eb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de7f7c23993a57d30f08686e1cd4abb5b17e108ac12dd0c7929a31574c69eeb1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4b478\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b18a59ecb802507e0d5988eacc915fd7e0d6954518de80f61162c97f680fd8c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4b478\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2vr4r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:56Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:56 crc kubenswrapper[4558]: I0120 16:42:56.595385 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f10d97b3-5765-454b-b306-1ef544be2c86\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3ba1f84f7ea0577350c7025aeb96fb1b329297f4d6767727f9a4e6659d3027cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac2f374f27360645aa18b003d1b2415cd498c17c73812950904109dd4b77cbd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ac2f374f27360645aa18b003d1b2415cd498c17c73812950904109dd4b77cbd5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:56Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:56 crc kubenswrapper[4558]: I0120 16:42:56.611109 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"425eab90-fb70-4498-a98b-b95742033890\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48592e72d91cf527296f06e52fec1c16c1da21323c0644659945af109a74ebb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7e18f59b5e6b6c0be141b7efb3ccb1df4ff6e3c394bcc88fb1142c8df433493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c889e2b0e14244ef49ec057e3d5fd91687e335a983fdc99a6816abd2af643ddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d434793571cbba089cc34029d951bee9063aca58728b791f2d1af2d68229f778\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://89f7ad577df11c509855a90ac8791da6de3f0d2857a1ca56726a6327f40455bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://baab628ee8aadc67ea829cc46639be55ae63281a9ec9a89737260f558a9ddf75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://baab628ee8aadc67ea829cc46639be55ae63281a9ec9a89737260f558a9ddf75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5faab5625a16e1459b23ae0f4cc8ca3285bf4eb1ba4c53c1ed15eddb9054021\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5faab5625a16e1459b23ae0f4cc8ca3285bf4eb1ba4c53c1ed15eddb9054021\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f913fa07ec9571d1c61f9b7dddb46614846aa99d6e0b5b75709e9fa5a18a09c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f913fa07ec9571d1c61f9b7dddb46614846aa99d6e0b5b75709e9fa5a18a09c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:56Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:56 crc kubenswrapper[4558]: I0120 16:42:56.613358 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:56 crc kubenswrapper[4558]: I0120 16:42:56.613379 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:56 crc kubenswrapper[4558]: I0120 16:42:56.613387 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:56 crc kubenswrapper[4558]: I0120 16:42:56.613401 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:56 crc kubenswrapper[4558]: I0120 16:42:56.613410 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:56Z","lastTransitionTime":"2026-01-20T16:42:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:56 crc kubenswrapper[4558]: I0120 16:42:56.624093 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:56Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:56 crc kubenswrapper[4558]: I0120 16:42:56.633267 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:56Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:56 crc kubenswrapper[4558]: I0120 16:42:56.641400 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rfwnl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8fe5472-7373-4e5d-87fc-a9ecaf26a5cd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://38cd58ecc67d337df2c29b79e5460ea23d4acc1e66de8b08ea70d7ab7b30691d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9nvl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a14e9281ff19e324499722aefadf1e1f2fd005089c153918ad4d8a7f5bdfa853\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9nvl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-rfwnl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:56Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:56 crc kubenswrapper[4558]: I0120 16:42:56.649708 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60aafae7-2e6e-4d21-9ce6-8ff5382ef642\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d16b973c217656d6a8ceaf684e9dec82daeb583209943f6b2979be47ab03fda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb2e7753f75b10088eea9204511ae78e4be9a2b5be8c17ed04c3c972b13f8190\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a426b97002a49938b18009db49867dc9a15102707b42ec46bc64093c0ed3c2c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e4df1c3b2a1aef9adb3da64f89a1b8c0904fd22882609dbafe2fbc5718b8016\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6e4df1c3b2a1aef9adb3da64f89a1b8c0904fd22882609dbafe2fbc5718b8016\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:56Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:56 crc kubenswrapper[4558]: I0120 16:42:56.658946 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e190c24c1d8a3bc0241334598ce23829ada9afee282d49a0c82cd67ad82bcd02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:56Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:56 crc kubenswrapper[4558]: I0120 16:42:56.671770 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"254129cd-82fc-4162-b671-2434bc9e2972\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a6a184067bde115ef5e09fdd90e7dccfe89a9dc347af450998b91c068e7e803\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd8b93e5559d95511a1285a9aec9a7d72df6c330bcd1e1daf3e93f5494f70eeb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4264ef8ee49a8b172fb05ac5c9b0bd32350b5d9ae95293079dfeaf44f021b455\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0889cf371f8b06cc2d255c15fda97db1d5000d6f6cff29d2fdcb8667cede8a99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4265d3281e954ee5989ee008da28c6cb9fa29478e5fdf23325521d455304da9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://186a1d2caad8865abf7f565405c4a4c077038ccb67ba77927cc2392ec075a811\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d28c2f9293ff09b5e7d4ca4df1633cb114f685ad9a9dc8697e9efe3e9603259\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d28c2f9293ff09b5e7d4ca4df1633cb114f685ad9a9dc8697e9efe3e9603259\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-20T16:42:30Z\\\",\\\"message\\\":\\\"6215 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI0120 16:42:30.148863 6215 obj_retry.go:365] Adding new object: *v1.Pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI0120 16:42:30.148994 6215 ovn.go:134] Ensuring zone local for Pod openshift-kube-apiserver/kube-apiserver-crc in node crc\\\\nI0120 16:42:30.149000 6215 obj_retry.go:386] Retry successful for *v1.Pod openshift-kube-apiserver/kube-apiserver-crc after 0 failed attempt(s)\\\\nI0120 16:42:30.149004 6215 default_network_controller.go:776] Recording success event on pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI0120 16:42:30.148969 6215 services_controller.go:445] Built service openshift-kube-storage-version-migrator-operator/metrics LB template configs for network=default: []services.lbConfig(nil)\\\\nI0120 16:42:30.149028 6215 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-operator/iptables-alerter-4ln5h\\\\nI0120 16:42:30.149009 6215 base_network_controller_pods.go:916] Annotation values: ip=[10.217.0.3/23] ; mac=0a:58:0a:d9:00:03 ; gw=[10.217.0.1]\\\\nF0120 16:42:30.149066 6215 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller ini\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:29Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-nv2xw_openshift-ovn-kubernetes(254129cd-82fc-4162-b671-2434bc9e2972)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f41d488b0c24e594a2f5cad36b5f8efdb7d867fae0e18a74fe3c396a203d162f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aaa12ad387fc5d0780f6602a74013bb70c118f35c3a16eb4d136df7b0c48db9e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aaa12ad387fc5d0780f6602a74013bb70c118f35c3a16eb4d136df7b0c48db9e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-nv2xw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:56Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:56 crc kubenswrapper[4558]: I0120 16:42:56.682230 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f5t7h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0f90cb04-2e7a-4ee8-83fc-d6c0ee1702a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21a318f03d0df490cb7bae11819c52eca011d163081aeecff1b2dd8f72caabbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fd8a8162080a700ce21ff3b2cf7c28fabfc682758eb69c483909d4008b07daab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fd8a8162080a700ce21ff3b2cf7c28fabfc682758eb69c483909d4008b07daab\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e77617c69434ce20c448b525c7e86f1ece18d51ce6e14167b4ca7e99de8e5709\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e77617c69434ce20c448b525c7e86f1ece18d51ce6e14167b4ca7e99de8e5709\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://453c67434281e0261ec6799b30a548a01efd72d7df96632409e745b2e6a94546\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://453c67434281e0261ec6799b30a548a01efd72d7df96632409e745b2e6a94546\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://06eab4daa6c8b6a564cf1bb1403b7b3b259a234ee09f23b4f4bc0628c2d58e69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06eab4daa6c8b6a564cf1bb1403b7b3b259a234ee09f23b4f4bc0628c2d58e69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1291ac09fc6ae2a79fdeaa0fc47272f61b97f45f67ed3f7c90bdfdbfe3519ff2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1291ac09fc6ae2a79fdeaa0fc47272f61b97f45f67ed3f7c90bdfdbfe3519ff2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe484514f50a14e485d4a97ea3ce9401c77f3f4dae4717719be7ed6c4d7b1a59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe484514f50a14e485d4a97ea3ce9401c77f3f4dae4717719be7ed6c4d7b1a59\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f5t7h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:56Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:56 crc kubenswrapper[4558]: I0120 16:42:56.690015 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-9wrq6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30032328-bd33-4073-9366-e10bc5e2aa77\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6tz4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6tz4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:17Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-9wrq6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:56Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:56 crc kubenswrapper[4558]: I0120 16:42:56.699467 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:56Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:56 crc kubenswrapper[4558]: I0120 16:42:56.707010 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-d96bp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c8d2c8b-fb8e-4bff-b8d2-69570e5d9e57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe858db22acc96b062f1bc7941e05c823f30d1f9ba50bd497c57f38d57e04293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4v6jx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-d96bp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:56Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:56 crc kubenswrapper[4558]: I0120 16:42:56.714482 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3e45cc56934a7b90e9d47f1c6d06d8dc140d57db4a216469e8ae112f398663e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:56Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:56 crc kubenswrapper[4558]: I0120 16:42:56.715607 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:56 crc kubenswrapper[4558]: I0120 16:42:56.715627 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:56 crc kubenswrapper[4558]: I0120 16:42:56.715634 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:56 crc kubenswrapper[4558]: I0120 16:42:56.715646 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:56 crc kubenswrapper[4558]: I0120 16:42:56.715653 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:56Z","lastTransitionTime":"2026-01-20T16:42:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:56 crc kubenswrapper[4558]: I0120 16:42:56.722835 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jsqvf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bedf08c7-1f93-4931-a7f3-e729e2a137af\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2297634e20683413a5eb643517580c9486303dc9616a7588a4ac571f3d0e53b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e32b76495d88d356373f9389e163a2f3a72e202bb454c7015594c1f5a41b511\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-20T16:42:50Z\\\",\\\"message\\\":\\\"2026-01-20T16:42:05+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_149f95a3-037e-4113-9baf-ba51c18b4cf2\\\\n2026-01-20T16:42:05+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_149f95a3-037e-4113-9baf-ba51c18b4cf2 to /host/opt/cni/bin/\\\\n2026-01-20T16:42:05Z [verbose] multus-daemon started\\\\n2026-01-20T16:42:05Z [verbose] Readiness Indicator file check\\\\n2026-01-20T16:42:50Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xxtbc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jsqvf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:56Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:56 crc kubenswrapper[4558]: I0120 16:42:56.729508 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-47477" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e026445a-aa73-4a0e-ba35-e2e07de1278c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec2ee3ff161cba7e14ad26763081c9f092f0dfe969cef113631c27e0db53d157\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xcnwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-47477\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:56Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:56 crc kubenswrapper[4558]: I0120 16:42:56.737433 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a13bfefa-f525-4ecd-89f5-f10480532bf6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9030e970e96b6909d771d4fa0c3b4f493b4a825cbc43ef7e684272284329c7c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://168e8f533aaa108e4160f9ae4af6bb7944edba7a596178ffffdc5325073fc3c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e0141aef1babb57501016cb09787591b5e6d9136b8d7dd4ba64393f0be5a027\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c87f08993c2b47c63ba9652c7dafe0fdbecdcb03a7b7080664ab1f1bd0e1d602\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:56Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:56 crc kubenswrapper[4558]: I0120 16:42:56.745281 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f904dd3e4b43e51569b6532f00fa2f9edf912c1f1969ee2ea00c99d1b7e35a41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://789c877f4a456b3ce64bdbe6fefd50f1ef3f92bf9e26e6296005cf366e0ce265\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:56Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:56 crc kubenswrapper[4558]: I0120 16:42:56.817068 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:56 crc kubenswrapper[4558]: I0120 16:42:56.817098 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:56 crc kubenswrapper[4558]: I0120 16:42:56.817107 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:56 crc kubenswrapper[4558]: I0120 16:42:56.817119 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:56 crc kubenswrapper[4558]: I0120 16:42:56.817127 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:56Z","lastTransitionTime":"2026-01-20T16:42:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:56 crc kubenswrapper[4558]: I0120 16:42:56.918904 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:56 crc kubenswrapper[4558]: I0120 16:42:56.919129 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:56 crc kubenswrapper[4558]: I0120 16:42:56.919240 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:56 crc kubenswrapper[4558]: I0120 16:42:56.919310 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:56 crc kubenswrapper[4558]: I0120 16:42:56.919368 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:56Z","lastTransitionTime":"2026-01-20T16:42:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:57 crc kubenswrapper[4558]: I0120 16:42:57.020733 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:57 crc kubenswrapper[4558]: I0120 16:42:57.020774 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:57 crc kubenswrapper[4558]: I0120 16:42:57.020783 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:57 crc kubenswrapper[4558]: I0120 16:42:57.020795 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:57 crc kubenswrapper[4558]: I0120 16:42:57.020803 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:57Z","lastTransitionTime":"2026-01-20T16:42:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:57 crc kubenswrapper[4558]: I0120 16:42:57.123118 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:57 crc kubenswrapper[4558]: I0120 16:42:57.123156 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:57 crc kubenswrapper[4558]: I0120 16:42:57.123183 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:57 crc kubenswrapper[4558]: I0120 16:42:57.123196 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:57 crc kubenswrapper[4558]: I0120 16:42:57.123204 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:57Z","lastTransitionTime":"2026-01-20T16:42:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:57 crc kubenswrapper[4558]: I0120 16:42:57.225041 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:57 crc kubenswrapper[4558]: I0120 16:42:57.225082 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:57 crc kubenswrapper[4558]: I0120 16:42:57.225091 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:57 crc kubenswrapper[4558]: I0120 16:42:57.225104 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:57 crc kubenswrapper[4558]: I0120 16:42:57.225111 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:57Z","lastTransitionTime":"2026-01-20T16:42:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:57 crc kubenswrapper[4558]: I0120 16:42:57.326917 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:57 crc kubenswrapper[4558]: I0120 16:42:57.327105 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:57 crc kubenswrapper[4558]: I0120 16:42:57.327193 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:57 crc kubenswrapper[4558]: I0120 16:42:57.327274 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:57 crc kubenswrapper[4558]: I0120 16:42:57.327337 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:57Z","lastTransitionTime":"2026-01-20T16:42:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:57 crc kubenswrapper[4558]: I0120 16:42:57.429819 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:57 crc kubenswrapper[4558]: I0120 16:42:57.429852 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:57 crc kubenswrapper[4558]: I0120 16:42:57.429860 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:57 crc kubenswrapper[4558]: I0120 16:42:57.429875 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:57 crc kubenswrapper[4558]: I0120 16:42:57.429883 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:57Z","lastTransitionTime":"2026-01-20T16:42:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:57 crc kubenswrapper[4558]: I0120 16:42:57.532144 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:57 crc kubenswrapper[4558]: I0120 16:42:57.532201 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:57 crc kubenswrapper[4558]: I0120 16:42:57.532210 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:57 crc kubenswrapper[4558]: I0120 16:42:57.532248 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:57 crc kubenswrapper[4558]: I0120 16:42:57.532256 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:57Z","lastTransitionTime":"2026-01-20T16:42:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:57 crc kubenswrapper[4558]: I0120 16:42:57.565609 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9wrq6" Jan 20 16:42:57 crc kubenswrapper[4558]: E0120 16:42:57.565735 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9wrq6" podUID="30032328-bd33-4073-9366-e10bc5e2aa77" Jan 20 16:42:57 crc kubenswrapper[4558]: I0120 16:42:57.566158 4558 scope.go:117] "RemoveContainer" containerID="3d28c2f9293ff09b5e7d4ca4df1633cb114f685ad9a9dc8697e9efe3e9603259" Jan 20 16:42:57 crc kubenswrapper[4558]: I0120 16:42:57.572149 4558 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-27 08:36:22.34358152 +0000 UTC Jan 20 16:42:57 crc kubenswrapper[4558]: I0120 16:42:57.634678 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:57 crc kubenswrapper[4558]: I0120 16:42:57.634709 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:57 crc kubenswrapper[4558]: I0120 16:42:57.634718 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:57 crc kubenswrapper[4558]: I0120 16:42:57.634729 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:57 crc kubenswrapper[4558]: I0120 16:42:57.634737 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:57Z","lastTransitionTime":"2026-01-20T16:42:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:57 crc kubenswrapper[4558]: I0120 16:42:57.736930 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:57 crc kubenswrapper[4558]: I0120 16:42:57.736961 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:57 crc kubenswrapper[4558]: I0120 16:42:57.736969 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:57 crc kubenswrapper[4558]: I0120 16:42:57.736982 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:57 crc kubenswrapper[4558]: I0120 16:42:57.736991 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:57Z","lastTransitionTime":"2026-01-20T16:42:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:57 crc kubenswrapper[4558]: I0120 16:42:57.828729 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-nv2xw_254129cd-82fc-4162-b671-2434bc9e2972/ovnkube-controller/2.log" Jan 20 16:42:57 crc kubenswrapper[4558]: I0120 16:42:57.830637 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" event={"ID":"254129cd-82fc-4162-b671-2434bc9e2972","Type":"ContainerStarted","Data":"0e9f4ed928d20c9db64a689d7d14df9b81870bb0523cf5098e09d2d85aad9fca"} Jan 20 16:42:57 crc kubenswrapper[4558]: I0120 16:42:57.831363 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" Jan 20 16:42:57 crc kubenswrapper[4558]: I0120 16:42:57.838307 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:57 crc kubenswrapper[4558]: I0120 16:42:57.838327 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:57 crc kubenswrapper[4558]: I0120 16:42:57.838335 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:57 crc kubenswrapper[4558]: I0120 16:42:57.838346 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:57 crc kubenswrapper[4558]: I0120 16:42:57.838354 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:57Z","lastTransitionTime":"2026-01-20T16:42:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:57 crc kubenswrapper[4558]: I0120 16:42:57.845942 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-9wrq6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30032328-bd33-4073-9366-e10bc5e2aa77\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6tz4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6tz4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:17Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-9wrq6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:57Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:57 crc kubenswrapper[4558]: I0120 16:42:57.858688 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:57Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:57 crc kubenswrapper[4558]: I0120 16:42:57.866798 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-d96bp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c8d2c8b-fb8e-4bff-b8d2-69570e5d9e57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe858db22acc96b062f1bc7941e05c823f30d1f9ba50bd497c57f38d57e04293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4v6jx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-d96bp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:57Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:57 crc kubenswrapper[4558]: I0120 16:42:57.880456 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"254129cd-82fc-4162-b671-2434bc9e2972\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a6a184067bde115ef5e09fdd90e7dccfe89a9dc347af450998b91c068e7e803\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd8b93e5559d95511a1285a9aec9a7d72df6c330bcd1e1daf3e93f5494f70eeb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4264ef8ee49a8b172fb05ac5c9b0bd32350b5d9ae95293079dfeaf44f021b455\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0889cf371f8b06cc2d255c15fda97db1d5000d6f6cff29d2fdcb8667cede8a99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4265d3281e954ee5989ee008da28c6cb9fa29478e5fdf23325521d455304da9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://186a1d2caad8865abf7f565405c4a4c077038ccb67ba77927cc2392ec075a811\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e9f4ed928d20c9db64a689d7d14df9b81870bb0523cf5098e09d2d85aad9fca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d28c2f9293ff09b5e7d4ca4df1633cb114f685ad9a9dc8697e9efe3e9603259\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-20T16:42:30Z\\\",\\\"message\\\":\\\"6215 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI0120 16:42:30.148863 6215 obj_retry.go:365] Adding new object: *v1.Pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI0120 16:42:30.148994 6215 ovn.go:134] Ensuring zone local for Pod openshift-kube-apiserver/kube-apiserver-crc in node crc\\\\nI0120 16:42:30.149000 6215 obj_retry.go:386] Retry successful for *v1.Pod openshift-kube-apiserver/kube-apiserver-crc after 0 failed attempt(s)\\\\nI0120 16:42:30.149004 6215 default_network_controller.go:776] Recording success event on pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI0120 16:42:30.148969 6215 services_controller.go:445] Built service openshift-kube-storage-version-migrator-operator/metrics LB template configs for network=default: []services.lbConfig(nil)\\\\nI0120 16:42:30.149028 6215 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-operator/iptables-alerter-4ln5h\\\\nI0120 16:42:30.149009 6215 base_network_controller_pods.go:916] Annotation values: ip=[10.217.0.3/23] ; mac=0a:58:0a:d9:00:03 ; gw=[10.217.0.1]\\\\nF0120 16:42:30.149066 6215 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller ini\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:29Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f41d488b0c24e594a2f5cad36b5f8efdb7d867fae0e18a74fe3c396a203d162f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aaa12ad387fc5d0780f6602a74013bb70c118f35c3a16eb4d136df7b0c48db9e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aaa12ad387fc5d0780f6602a74013bb70c118f35c3a16eb4d136df7b0c48db9e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-nv2xw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:57Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:57 crc kubenswrapper[4558]: I0120 16:42:57.890843 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f5t7h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0f90cb04-2e7a-4ee8-83fc-d6c0ee1702a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21a318f03d0df490cb7bae11819c52eca011d163081aeecff1b2dd8f72caabbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fd8a8162080a700ce21ff3b2cf7c28fabfc682758eb69c483909d4008b07daab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fd8a8162080a700ce21ff3b2cf7c28fabfc682758eb69c483909d4008b07daab\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e77617c69434ce20c448b525c7e86f1ece18d51ce6e14167b4ca7e99de8e5709\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e77617c69434ce20c448b525c7e86f1ece18d51ce6e14167b4ca7e99de8e5709\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://453c67434281e0261ec6799b30a548a01efd72d7df96632409e745b2e6a94546\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://453c67434281e0261ec6799b30a548a01efd72d7df96632409e745b2e6a94546\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://06eab4daa6c8b6a564cf1bb1403b7b3b259a234ee09f23b4f4bc0628c2d58e69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06eab4daa6c8b6a564cf1bb1403b7b3b259a234ee09f23b4f4bc0628c2d58e69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1291ac09fc6ae2a79fdeaa0fc47272f61b97f45f67ed3f7c90bdfdbfe3519ff2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1291ac09fc6ae2a79fdeaa0fc47272f61b97f45f67ed3f7c90bdfdbfe3519ff2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe484514f50a14e485d4a97ea3ce9401c77f3f4dae4717719be7ed6c4d7b1a59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe484514f50a14e485d4a97ea3ce9401c77f3f4dae4717719be7ed6c4d7b1a59\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f5t7h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:57Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:57 crc kubenswrapper[4558]: I0120 16:42:57.898156 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-47477" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e026445a-aa73-4a0e-ba35-e2e07de1278c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec2ee3ff161cba7e14ad26763081c9f092f0dfe969cef113631c27e0db53d157\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xcnwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-47477\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:57Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:57 crc kubenswrapper[4558]: I0120 16:42:57.906835 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a13bfefa-f525-4ecd-89f5-f10480532bf6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9030e970e96b6909d771d4fa0c3b4f493b4a825cbc43ef7e684272284329c7c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://168e8f533aaa108e4160f9ae4af6bb7944edba7a596178ffffdc5325073fc3c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e0141aef1babb57501016cb09787591b5e6d9136b8d7dd4ba64393f0be5a027\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c87f08993c2b47c63ba9652c7dafe0fdbecdcb03a7b7080664ab1f1bd0e1d602\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:57Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:57 crc kubenswrapper[4558]: I0120 16:42:57.916042 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f904dd3e4b43e51569b6532f00fa2f9edf912c1f1969ee2ea00c99d1b7e35a41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://789c877f4a456b3ce64bdbe6fefd50f1ef3f92bf9e26e6296005cf366e0ce265\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:57Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:57 crc kubenswrapper[4558]: I0120 16:42:57.928133 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3e45cc56934a7b90e9d47f1c6d06d8dc140d57db4a216469e8ae112f398663e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:57Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:57 crc kubenswrapper[4558]: I0120 16:42:57.936984 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jsqvf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bedf08c7-1f93-4931-a7f3-e729e2a137af\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2297634e20683413a5eb643517580c9486303dc9616a7588a4ac571f3d0e53b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e32b76495d88d356373f9389e163a2f3a72e202bb454c7015594c1f5a41b511\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-20T16:42:50Z\\\",\\\"message\\\":\\\"2026-01-20T16:42:05+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_149f95a3-037e-4113-9baf-ba51c18b4cf2\\\\n2026-01-20T16:42:05+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_149f95a3-037e-4113-9baf-ba51c18b4cf2 to /host/opt/cni/bin/\\\\n2026-01-20T16:42:05Z [verbose] multus-daemon started\\\\n2026-01-20T16:42:05Z [verbose] Readiness Indicator file check\\\\n2026-01-20T16:42:50Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xxtbc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jsqvf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:57Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:57 crc kubenswrapper[4558]: I0120 16:42:57.939725 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:57 crc kubenswrapper[4558]: I0120 16:42:57.939748 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:57 crc kubenswrapper[4558]: I0120 16:42:57.939758 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:57 crc kubenswrapper[4558]: I0120 16:42:57.939771 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:57 crc kubenswrapper[4558]: I0120 16:42:57.939779 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:57Z","lastTransitionTime":"2026-01-20T16:42:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:57 crc kubenswrapper[4558]: I0120 16:42:57.944744 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f10d97b3-5765-454b-b306-1ef544be2c86\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3ba1f84f7ea0577350c7025aeb96fb1b329297f4d6767727f9a4e6659d3027cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac2f374f27360645aa18b003d1b2415cd498c17c73812950904109dd4b77cbd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ac2f374f27360645aa18b003d1b2415cd498c17c73812950904109dd4b77cbd5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:57Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:57 crc kubenswrapper[4558]: I0120 16:42:57.957187 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"425eab90-fb70-4498-a98b-b95742033890\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48592e72d91cf527296f06e52fec1c16c1da21323c0644659945af109a74ebb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7e18f59b5e6b6c0be141b7efb3ccb1df4ff6e3c394bcc88fb1142c8df433493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c889e2b0e14244ef49ec057e3d5fd91687e335a983fdc99a6816abd2af643ddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d434793571cbba089cc34029d951bee9063aca58728b791f2d1af2d68229f778\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://89f7ad577df11c509855a90ac8791da6de3f0d2857a1ca56726a6327f40455bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://baab628ee8aadc67ea829cc46639be55ae63281a9ec9a89737260f558a9ddf75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://baab628ee8aadc67ea829cc46639be55ae63281a9ec9a89737260f558a9ddf75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5faab5625a16e1459b23ae0f4cc8ca3285bf4eb1ba4c53c1ed15eddb9054021\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5faab5625a16e1459b23ae0f4cc8ca3285bf4eb1ba4c53c1ed15eddb9054021\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f913fa07ec9571d1c61f9b7dddb46614846aa99d6e0b5b75709e9fa5a18a09c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f913fa07ec9571d1c61f9b7dddb46614846aa99d6e0b5b75709e9fa5a18a09c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:57Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:57 crc kubenswrapper[4558]: I0120 16:42:57.967571 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30e1ff9d-8dcd-4754-9a7a-c09598fb83db\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ea68e442bb2143fbaa5e2c9fe335c6b023a31c2eeaa78d60de45c6ef40c2894\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a8122c3b57ac4f0026df149fe622450d07d8b0d517ad018decd7b21d8d9c04a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7444e8fea1853bae45dbd3d52d07e3f7b94314cd29fe8c99891e515a892e569e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6438ffea42f98178c4c2c21ae01725a8b54d6a6f790f47d64faf5ecd9f1c00f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2e22f1d4e87dd06a67f49aedad280da48ac1c4ba2e438a4925cf8bd5330c4d8\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0120 16:41:58.859734 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0120 16:41:58.861094 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2179835226/tls.crt::/tmp/serving-cert-2179835226/tls.key\\\\\\\"\\\\nI0120 16:42:04.205247 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0120 16:42:04.207291 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0120 16:42:04.207308 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0120 16:42:04.207329 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0120 16:42:04.207333 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0120 16:42:04.210615 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0120 16:42:04.210632 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0120 16:42:04.210641 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0120 16:42:04.210648 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0120 16:42:04.210652 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0120 16:42:04.210655 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0120 16:42:04.210658 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0120 16:42:04.210660 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0120 16:42:04.211964 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4850bde1fb538e21fc91949dc584ef0a791d718844691a44559e0513bb36203\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c39aa6c3942134a027be33cac715f9c0381bce5417754d5bad045c42a07d353f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c39aa6c3942134a027be33cac715f9c0381bce5417754d5bad045c42a07d353f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:57Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:57 crc kubenswrapper[4558]: I0120 16:42:57.975632 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"68337d27-3fa6-4a29-88b0-82e60c3739eb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de7f7c23993a57d30f08686e1cd4abb5b17e108ac12dd0c7929a31574c69eeb1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4b478\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b18a59ecb802507e0d5988eacc915fd7e0d6954518de80f61162c97f680fd8c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4b478\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2vr4r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:57Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:57 crc kubenswrapper[4558]: I0120 16:42:57.982599 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rfwnl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8fe5472-7373-4e5d-87fc-a9ecaf26a5cd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://38cd58ecc67d337df2c29b79e5460ea23d4acc1e66de8b08ea70d7ab7b30691d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9nvl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a14e9281ff19e324499722aefadf1e1f2fd005089c153918ad4d8a7f5bdfa853\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9nvl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-rfwnl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:57Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:57 crc kubenswrapper[4558]: I0120 16:42:57.990349 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60aafae7-2e6e-4d21-9ce6-8ff5382ef642\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d16b973c217656d6a8ceaf684e9dec82daeb583209943f6b2979be47ab03fda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb2e7753f75b10088eea9204511ae78e4be9a2b5be8c17ed04c3c972b13f8190\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a426b97002a49938b18009db49867dc9a15102707b42ec46bc64093c0ed3c2c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e4df1c3b2a1aef9adb3da64f89a1b8c0904fd22882609dbafe2fbc5718b8016\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6e4df1c3b2a1aef9adb3da64f89a1b8c0904fd22882609dbafe2fbc5718b8016\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:57Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:58 crc kubenswrapper[4558]: I0120 16:42:58.000812 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e190c24c1d8a3bc0241334598ce23829ada9afee282d49a0c82cd67ad82bcd02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:57Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:58 crc kubenswrapper[4558]: I0120 16:42:58.011567 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:58Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:58 crc kubenswrapper[4558]: I0120 16:42:58.020601 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:58Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:58 crc kubenswrapper[4558]: I0120 16:42:58.042120 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:58 crc kubenswrapper[4558]: I0120 16:42:58.042158 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:58 crc kubenswrapper[4558]: I0120 16:42:58.042179 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:58 crc kubenswrapper[4558]: I0120 16:42:58.042195 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:58 crc kubenswrapper[4558]: I0120 16:42:58.042205 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:58Z","lastTransitionTime":"2026-01-20T16:42:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:58 crc kubenswrapper[4558]: I0120 16:42:58.143868 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:58 crc kubenswrapper[4558]: I0120 16:42:58.143903 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:58 crc kubenswrapper[4558]: I0120 16:42:58.143911 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:58 crc kubenswrapper[4558]: I0120 16:42:58.143924 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:58 crc kubenswrapper[4558]: I0120 16:42:58.143933 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:58Z","lastTransitionTime":"2026-01-20T16:42:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:58 crc kubenswrapper[4558]: I0120 16:42:58.245869 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:58 crc kubenswrapper[4558]: I0120 16:42:58.245910 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:58 crc kubenswrapper[4558]: I0120 16:42:58.245920 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:58 crc kubenswrapper[4558]: I0120 16:42:58.245933 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:58 crc kubenswrapper[4558]: I0120 16:42:58.245944 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:58Z","lastTransitionTime":"2026-01-20T16:42:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:58 crc kubenswrapper[4558]: I0120 16:42:58.347728 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:58 crc kubenswrapper[4558]: I0120 16:42:58.347778 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:58 crc kubenswrapper[4558]: I0120 16:42:58.347786 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:58 crc kubenswrapper[4558]: I0120 16:42:58.347799 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:58 crc kubenswrapper[4558]: I0120 16:42:58.347808 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:58Z","lastTransitionTime":"2026-01-20T16:42:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:58 crc kubenswrapper[4558]: I0120 16:42:58.449969 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:58 crc kubenswrapper[4558]: I0120 16:42:58.450008 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:58 crc kubenswrapper[4558]: I0120 16:42:58.450021 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:58 crc kubenswrapper[4558]: I0120 16:42:58.450036 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:58 crc kubenswrapper[4558]: I0120 16:42:58.450045 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:58Z","lastTransitionTime":"2026-01-20T16:42:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:58 crc kubenswrapper[4558]: I0120 16:42:58.552103 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:58 crc kubenswrapper[4558]: I0120 16:42:58.552139 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:58 crc kubenswrapper[4558]: I0120 16:42:58.552147 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:58 crc kubenswrapper[4558]: I0120 16:42:58.552176 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:58 crc kubenswrapper[4558]: I0120 16:42:58.552185 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:58Z","lastTransitionTime":"2026-01-20T16:42:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:58 crc kubenswrapper[4558]: I0120 16:42:58.565367 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 20 16:42:58 crc kubenswrapper[4558]: I0120 16:42:58.565398 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 20 16:42:58 crc kubenswrapper[4558]: E0120 16:42:58.565455 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 20 16:42:58 crc kubenswrapper[4558]: I0120 16:42:58.565467 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 20 16:42:58 crc kubenswrapper[4558]: E0120 16:42:58.565536 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 20 16:42:58 crc kubenswrapper[4558]: E0120 16:42:58.565607 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 20 16:42:58 crc kubenswrapper[4558]: I0120 16:42:58.572560 4558 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-10 04:07:18.740433727 +0000 UTC Jan 20 16:42:58 crc kubenswrapper[4558]: I0120 16:42:58.654507 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:58 crc kubenswrapper[4558]: I0120 16:42:58.654549 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:58 crc kubenswrapper[4558]: I0120 16:42:58.654558 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:58 crc kubenswrapper[4558]: I0120 16:42:58.654571 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:58 crc kubenswrapper[4558]: I0120 16:42:58.654579 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:58Z","lastTransitionTime":"2026-01-20T16:42:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:58 crc kubenswrapper[4558]: I0120 16:42:58.756878 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:58 crc kubenswrapper[4558]: I0120 16:42:58.756926 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:58 crc kubenswrapper[4558]: I0120 16:42:58.756935 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:58 crc kubenswrapper[4558]: I0120 16:42:58.756948 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:58 crc kubenswrapper[4558]: I0120 16:42:58.756956 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:58Z","lastTransitionTime":"2026-01-20T16:42:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:58 crc kubenswrapper[4558]: I0120 16:42:58.794869 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:58 crc kubenswrapper[4558]: I0120 16:42:58.794902 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:58 crc kubenswrapper[4558]: I0120 16:42:58.794912 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:58 crc kubenswrapper[4558]: I0120 16:42:58.794924 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:58 crc kubenswrapper[4558]: I0120 16:42:58.794932 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:58Z","lastTransitionTime":"2026-01-20T16:42:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:58 crc kubenswrapper[4558]: E0120 16:42:58.804198 4558 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:42:58Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:42:58Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:58Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:42:58Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:42:58Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:58Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"bf027402-7a62-478b-8585-220c98495823\\\",\\\"systemUUID\\\":\\\"1def282c-e24e-4bc0-9a1b-d7cc30756d3d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:58Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:58 crc kubenswrapper[4558]: I0120 16:42:58.807240 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:58 crc kubenswrapper[4558]: I0120 16:42:58.807279 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:58 crc kubenswrapper[4558]: I0120 16:42:58.807290 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:58 crc kubenswrapper[4558]: I0120 16:42:58.807302 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:58 crc kubenswrapper[4558]: I0120 16:42:58.807310 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:58Z","lastTransitionTime":"2026-01-20T16:42:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:58 crc kubenswrapper[4558]: E0120 16:42:58.816123 4558 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:42:58Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:42:58Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:58Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:42:58Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:42:58Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:58Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"bf027402-7a62-478b-8585-220c98495823\\\",\\\"systemUUID\\\":\\\"1def282c-e24e-4bc0-9a1b-d7cc30756d3d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:58Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:58 crc kubenswrapper[4558]: I0120 16:42:58.818652 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:58 crc kubenswrapper[4558]: I0120 16:42:58.818677 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:58 crc kubenswrapper[4558]: I0120 16:42:58.818684 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:58 crc kubenswrapper[4558]: I0120 16:42:58.818696 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:58 crc kubenswrapper[4558]: I0120 16:42:58.818705 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:58Z","lastTransitionTime":"2026-01-20T16:42:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:58 crc kubenswrapper[4558]: E0120 16:42:58.827087 4558 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:42:58Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:42:58Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:58Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:42:58Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:42:58Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:58Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"bf027402-7a62-478b-8585-220c98495823\\\",\\\"systemUUID\\\":\\\"1def282c-e24e-4bc0-9a1b-d7cc30756d3d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:58Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:58 crc kubenswrapper[4558]: I0120 16:42:58.829483 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:58 crc kubenswrapper[4558]: I0120 16:42:58.829521 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:58 crc kubenswrapper[4558]: I0120 16:42:58.829530 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:58 crc kubenswrapper[4558]: I0120 16:42:58.829544 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:58 crc kubenswrapper[4558]: I0120 16:42:58.829552 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:58Z","lastTransitionTime":"2026-01-20T16:42:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:58 crc kubenswrapper[4558]: I0120 16:42:58.833839 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-nv2xw_254129cd-82fc-4162-b671-2434bc9e2972/ovnkube-controller/3.log" Jan 20 16:42:58 crc kubenswrapper[4558]: I0120 16:42:58.834369 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-nv2xw_254129cd-82fc-4162-b671-2434bc9e2972/ovnkube-controller/2.log" Jan 20 16:42:58 crc kubenswrapper[4558]: I0120 16:42:58.836865 4558 generic.go:334] "Generic (PLEG): container finished" podID="254129cd-82fc-4162-b671-2434bc9e2972" containerID="0e9f4ed928d20c9db64a689d7d14df9b81870bb0523cf5098e09d2d85aad9fca" exitCode=1 Jan 20 16:42:58 crc kubenswrapper[4558]: I0120 16:42:58.836896 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" event={"ID":"254129cd-82fc-4162-b671-2434bc9e2972","Type":"ContainerDied","Data":"0e9f4ed928d20c9db64a689d7d14df9b81870bb0523cf5098e09d2d85aad9fca"} Jan 20 16:42:58 crc kubenswrapper[4558]: I0120 16:42:58.836923 4558 scope.go:117] "RemoveContainer" containerID="3d28c2f9293ff09b5e7d4ca4df1633cb114f685ad9a9dc8697e9efe3e9603259" Jan 20 16:42:58 crc kubenswrapper[4558]: I0120 16:42:58.837655 4558 scope.go:117] "RemoveContainer" containerID="0e9f4ed928d20c9db64a689d7d14df9b81870bb0523cf5098e09d2d85aad9fca" Jan 20 16:42:58 crc kubenswrapper[4558]: E0120 16:42:58.837840 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-nv2xw_openshift-ovn-kubernetes(254129cd-82fc-4162-b671-2434bc9e2972)\"" pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" podUID="254129cd-82fc-4162-b671-2434bc9e2972" Jan 20 16:42:58 crc kubenswrapper[4558]: E0120 16:42:58.843064 4558 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:42:58Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:42:58Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:58Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:42:58Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:42:58Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:58Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"bf027402-7a62-478b-8585-220c98495823\\\",\\\"systemUUID\\\":\\\"1def282c-e24e-4bc0-9a1b-d7cc30756d3d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:58Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:58 crc kubenswrapper[4558]: I0120 16:42:58.845648 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:58 crc kubenswrapper[4558]: I0120 16:42:58.845679 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:58 crc kubenswrapper[4558]: I0120 16:42:58.845688 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:58 crc kubenswrapper[4558]: I0120 16:42:58.845704 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:58 crc kubenswrapper[4558]: I0120 16:42:58.845712 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:58Z","lastTransitionTime":"2026-01-20T16:42:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:58 crc kubenswrapper[4558]: I0120 16:42:58.846897 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3e45cc56934a7b90e9d47f1c6d06d8dc140d57db4a216469e8ae112f398663e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:58Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:58 crc kubenswrapper[4558]: E0120 16:42:58.856415 4558 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:42:58Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:42:58Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:58Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:42:58Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:42:58Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:58Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"bf027402-7a62-478b-8585-220c98495823\\\",\\\"systemUUID\\\":\\\"1def282c-e24e-4bc0-9a1b-d7cc30756d3d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:58Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:58 crc kubenswrapper[4558]: E0120 16:42:58.856544 4558 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 20 16:42:58 crc kubenswrapper[4558]: I0120 16:42:58.858440 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:58 crc kubenswrapper[4558]: I0120 16:42:58.858463 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:58 crc kubenswrapper[4558]: I0120 16:42:58.858472 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:58 crc kubenswrapper[4558]: I0120 16:42:58.858483 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:58 crc kubenswrapper[4558]: I0120 16:42:58.858493 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:58Z","lastTransitionTime":"2026-01-20T16:42:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:58 crc kubenswrapper[4558]: I0120 16:42:58.858498 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jsqvf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bedf08c7-1f93-4931-a7f3-e729e2a137af\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2297634e20683413a5eb643517580c9486303dc9616a7588a4ac571f3d0e53b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e32b76495d88d356373f9389e163a2f3a72e202bb454c7015594c1f5a41b511\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-20T16:42:50Z\\\",\\\"message\\\":\\\"2026-01-20T16:42:05+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_149f95a3-037e-4113-9baf-ba51c18b4cf2\\\\n2026-01-20T16:42:05+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_149f95a3-037e-4113-9baf-ba51c18b4cf2 to /host/opt/cni/bin/\\\\n2026-01-20T16:42:05Z [verbose] multus-daemon started\\\\n2026-01-20T16:42:05Z [verbose] Readiness Indicator file check\\\\n2026-01-20T16:42:50Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xxtbc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jsqvf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:58Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:58 crc kubenswrapper[4558]: I0120 16:42:58.865948 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-47477" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e026445a-aa73-4a0e-ba35-e2e07de1278c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec2ee3ff161cba7e14ad26763081c9f092f0dfe969cef113631c27e0db53d157\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xcnwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-47477\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:58Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:58 crc kubenswrapper[4558]: I0120 16:42:58.875730 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a13bfefa-f525-4ecd-89f5-f10480532bf6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9030e970e96b6909d771d4fa0c3b4f493b4a825cbc43ef7e684272284329c7c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://168e8f533aaa108e4160f9ae4af6bb7944edba7a596178ffffdc5325073fc3c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e0141aef1babb57501016cb09787591b5e6d9136b8d7dd4ba64393f0be5a027\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c87f08993c2b47c63ba9652c7dafe0fdbecdcb03a7b7080664ab1f1bd0e1d602\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:58Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:58 crc kubenswrapper[4558]: I0120 16:42:58.885156 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f904dd3e4b43e51569b6532f00fa2f9edf912c1f1969ee2ea00c99d1b7e35a41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://789c877f4a456b3ce64bdbe6fefd50f1ef3f92bf9e26e6296005cf366e0ce265\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:58Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:58 crc kubenswrapper[4558]: I0120 16:42:58.895558 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30e1ff9d-8dcd-4754-9a7a-c09598fb83db\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ea68e442bb2143fbaa5e2c9fe335c6b023a31c2eeaa78d60de45c6ef40c2894\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a8122c3b57ac4f0026df149fe622450d07d8b0d517ad018decd7b21d8d9c04a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7444e8fea1853bae45dbd3d52d07e3f7b94314cd29fe8c99891e515a892e569e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6438ffea42f98178c4c2c21ae01725a8b54d6a6f790f47d64faf5ecd9f1c00f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2e22f1d4e87dd06a67f49aedad280da48ac1c4ba2e438a4925cf8bd5330c4d8\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0120 16:41:58.859734 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0120 16:41:58.861094 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2179835226/tls.crt::/tmp/serving-cert-2179835226/tls.key\\\\\\\"\\\\nI0120 16:42:04.205247 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0120 16:42:04.207291 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0120 16:42:04.207308 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0120 16:42:04.207329 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0120 16:42:04.207333 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0120 16:42:04.210615 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0120 16:42:04.210632 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0120 16:42:04.210641 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0120 16:42:04.210648 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0120 16:42:04.210652 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0120 16:42:04.210655 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0120 16:42:04.210658 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0120 16:42:04.210660 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0120 16:42:04.211964 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4850bde1fb538e21fc91949dc584ef0a791d718844691a44559e0513bb36203\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c39aa6c3942134a027be33cac715f9c0381bce5417754d5bad045c42a07d353f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c39aa6c3942134a027be33cac715f9c0381bce5417754d5bad045c42a07d353f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:58Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:58 crc kubenswrapper[4558]: I0120 16:42:58.904253 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"68337d27-3fa6-4a29-88b0-82e60c3739eb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de7f7c23993a57d30f08686e1cd4abb5b17e108ac12dd0c7929a31574c69eeb1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4b478\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b18a59ecb802507e0d5988eacc915fd7e0d6954518de80f61162c97f680fd8c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4b478\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2vr4r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:58Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:58 crc kubenswrapper[4558]: I0120 16:42:58.911417 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f10d97b3-5765-454b-b306-1ef544be2c86\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3ba1f84f7ea0577350c7025aeb96fb1b329297f4d6767727f9a4e6659d3027cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac2f374f27360645aa18b003d1b2415cd498c17c73812950904109dd4b77cbd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ac2f374f27360645aa18b003d1b2415cd498c17c73812950904109dd4b77cbd5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:58Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:58 crc kubenswrapper[4558]: I0120 16:42:58.924911 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"425eab90-fb70-4498-a98b-b95742033890\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48592e72d91cf527296f06e52fec1c16c1da21323c0644659945af109a74ebb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7e18f59b5e6b6c0be141b7efb3ccb1df4ff6e3c394bcc88fb1142c8df433493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c889e2b0e14244ef49ec057e3d5fd91687e335a983fdc99a6816abd2af643ddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d434793571cbba089cc34029d951bee9063aca58728b791f2d1af2d68229f778\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://89f7ad577df11c509855a90ac8791da6de3f0d2857a1ca56726a6327f40455bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://baab628ee8aadc67ea829cc46639be55ae63281a9ec9a89737260f558a9ddf75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://baab628ee8aadc67ea829cc46639be55ae63281a9ec9a89737260f558a9ddf75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5faab5625a16e1459b23ae0f4cc8ca3285bf4eb1ba4c53c1ed15eddb9054021\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5faab5625a16e1459b23ae0f4cc8ca3285bf4eb1ba4c53c1ed15eddb9054021\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f913fa07ec9571d1c61f9b7dddb46614846aa99d6e0b5b75709e9fa5a18a09c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f913fa07ec9571d1c61f9b7dddb46614846aa99d6e0b5b75709e9fa5a18a09c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:58Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:58 crc kubenswrapper[4558]: I0120 16:42:58.934192 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:58Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:58 crc kubenswrapper[4558]: I0120 16:42:58.942210 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:58Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:58 crc kubenswrapper[4558]: I0120 16:42:58.949905 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rfwnl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8fe5472-7373-4e5d-87fc-a9ecaf26a5cd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://38cd58ecc67d337df2c29b79e5460ea23d4acc1e66de8b08ea70d7ab7b30691d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9nvl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a14e9281ff19e324499722aefadf1e1f2fd005089c153918ad4d8a7f5bdfa853\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9nvl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-rfwnl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:58Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:58 crc kubenswrapper[4558]: I0120 16:42:58.957420 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60aafae7-2e6e-4d21-9ce6-8ff5382ef642\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d16b973c217656d6a8ceaf684e9dec82daeb583209943f6b2979be47ab03fda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb2e7753f75b10088eea9204511ae78e4be9a2b5be8c17ed04c3c972b13f8190\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a426b97002a49938b18009db49867dc9a15102707b42ec46bc64093c0ed3c2c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e4df1c3b2a1aef9adb3da64f89a1b8c0904fd22882609dbafe2fbc5718b8016\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6e4df1c3b2a1aef9adb3da64f89a1b8c0904fd22882609dbafe2fbc5718b8016\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:58Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:58 crc kubenswrapper[4558]: I0120 16:42:58.959926 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:58 crc kubenswrapper[4558]: I0120 16:42:58.959959 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:58 crc kubenswrapper[4558]: I0120 16:42:58.959985 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:58 crc kubenswrapper[4558]: I0120 16:42:58.960000 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:58 crc kubenswrapper[4558]: I0120 16:42:58.960012 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:58Z","lastTransitionTime":"2026-01-20T16:42:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:58 crc kubenswrapper[4558]: I0120 16:42:58.967575 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e190c24c1d8a3bc0241334598ce23829ada9afee282d49a0c82cd67ad82bcd02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:58Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:58 crc kubenswrapper[4558]: I0120 16:42:58.981041 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"254129cd-82fc-4162-b671-2434bc9e2972\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a6a184067bde115ef5e09fdd90e7dccfe89a9dc347af450998b91c068e7e803\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd8b93e5559d95511a1285a9aec9a7d72df6c330bcd1e1daf3e93f5494f70eeb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4264ef8ee49a8b172fb05ac5c9b0bd32350b5d9ae95293079dfeaf44f021b455\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0889cf371f8b06cc2d255c15fda97db1d5000d6f6cff29d2fdcb8667cede8a99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4265d3281e954ee5989ee008da28c6cb9fa29478e5fdf23325521d455304da9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://186a1d2caad8865abf7f565405c4a4c077038ccb67ba77927cc2392ec075a811\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e9f4ed928d20c9db64a689d7d14df9b81870bb0523cf5098e09d2d85aad9fca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3d28c2f9293ff09b5e7d4ca4df1633cb114f685ad9a9dc8697e9efe3e9603259\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-20T16:42:30Z\\\",\\\"message\\\":\\\"6215 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI0120 16:42:30.148863 6215 obj_retry.go:365] Adding new object: *v1.Pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI0120 16:42:30.148994 6215 ovn.go:134] Ensuring zone local for Pod openshift-kube-apiserver/kube-apiserver-crc in node crc\\\\nI0120 16:42:30.149000 6215 obj_retry.go:386] Retry successful for *v1.Pod openshift-kube-apiserver/kube-apiserver-crc after 0 failed attempt(s)\\\\nI0120 16:42:30.149004 6215 default_network_controller.go:776] Recording success event on pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI0120 16:42:30.148969 6215 services_controller.go:445] Built service openshift-kube-storage-version-migrator-operator/metrics LB template configs for network=default: []services.lbConfig(nil)\\\\nI0120 16:42:30.149028 6215 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-operator/iptables-alerter-4ln5h\\\\nI0120 16:42:30.149009 6215 base_network_controller_pods.go:916] Annotation values: ip=[10.217.0.3/23] ; mac=0a:58:0a:d9:00:03 ; gw=[10.217.0.1]\\\\nF0120 16:42:30.149066 6215 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller ini\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:29Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e9f4ed928d20c9db64a689d7d14df9b81870bb0523cf5098e09d2d85aad9fca\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-20T16:42:58Z\\\",\\\"message\\\":\\\"ess{},},Conditions:[]Condition{},},}\\\\nI0120 16:42:58.184623 6635 lb_config.go:1031] Cluster endpoints for openshift-controller-manager/controller-manager for network=default are: map[]\\\\nF0120 16:42:58.184632 6635 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:58Z is after 2025-08-24T17:21:41Z]\\\\nI0120 16:42:58.184637 6635 services_controller.go:443] Built service openshift-controller-manager/controller-manager LB cluster-wide configs for network=default: []services.lbConfig{services.lbConfig{vips:[]string{\\\\\\\"10.217.5.149\\\\\\\"}, protocol:\\\\\\\"TCP\\\\\\\", inport:443, clusterEndpoints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f41d488b0c24e594a2f5cad36b5f8efdb7d867fae0e18a74fe3c396a203d162f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aaa12ad387fc5d0780f6602a74013bb70c118f35c3a16eb4d136df7b0c48db9e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aaa12ad387fc5d0780f6602a74013bb70c118f35c3a16eb4d136df7b0c48db9e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-nv2xw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:58Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:58 crc kubenswrapper[4558]: I0120 16:42:58.991214 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f5t7h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0f90cb04-2e7a-4ee8-83fc-d6c0ee1702a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21a318f03d0df490cb7bae11819c52eca011d163081aeecff1b2dd8f72caabbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fd8a8162080a700ce21ff3b2cf7c28fabfc682758eb69c483909d4008b07daab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fd8a8162080a700ce21ff3b2cf7c28fabfc682758eb69c483909d4008b07daab\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e77617c69434ce20c448b525c7e86f1ece18d51ce6e14167b4ca7e99de8e5709\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e77617c69434ce20c448b525c7e86f1ece18d51ce6e14167b4ca7e99de8e5709\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://453c67434281e0261ec6799b30a548a01efd72d7df96632409e745b2e6a94546\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://453c67434281e0261ec6799b30a548a01efd72d7df96632409e745b2e6a94546\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://06eab4daa6c8b6a564cf1bb1403b7b3b259a234ee09f23b4f4bc0628c2d58e69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06eab4daa6c8b6a564cf1bb1403b7b3b259a234ee09f23b4f4bc0628c2d58e69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1291ac09fc6ae2a79fdeaa0fc47272f61b97f45f67ed3f7c90bdfdbfe3519ff2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1291ac09fc6ae2a79fdeaa0fc47272f61b97f45f67ed3f7c90bdfdbfe3519ff2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe484514f50a14e485d4a97ea3ce9401c77f3f4dae4717719be7ed6c4d7b1a59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe484514f50a14e485d4a97ea3ce9401c77f3f4dae4717719be7ed6c4d7b1a59\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f5t7h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:58Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:58 crc kubenswrapper[4558]: I0120 16:42:58.998703 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-9wrq6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30032328-bd33-4073-9366-e10bc5e2aa77\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6tz4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6tz4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:17Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-9wrq6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:58Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:59 crc kubenswrapper[4558]: I0120 16:42:59.007387 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:59Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:59 crc kubenswrapper[4558]: I0120 16:42:59.014100 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-d96bp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c8d2c8b-fb8e-4bff-b8d2-69570e5d9e57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe858db22acc96b062f1bc7941e05c823f30d1f9ba50bd497c57f38d57e04293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4v6jx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-d96bp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:59Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:59 crc kubenswrapper[4558]: I0120 16:42:59.061689 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:59 crc kubenswrapper[4558]: I0120 16:42:59.061728 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:59 crc kubenswrapper[4558]: I0120 16:42:59.061737 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:59 crc kubenswrapper[4558]: I0120 16:42:59.061750 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:59 crc kubenswrapper[4558]: I0120 16:42:59.061760 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:59Z","lastTransitionTime":"2026-01-20T16:42:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:59 crc kubenswrapper[4558]: I0120 16:42:59.164022 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:59 crc kubenswrapper[4558]: I0120 16:42:59.164061 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:59 crc kubenswrapper[4558]: I0120 16:42:59.164069 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:59 crc kubenswrapper[4558]: I0120 16:42:59.164081 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:59 crc kubenswrapper[4558]: I0120 16:42:59.164089 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:59Z","lastTransitionTime":"2026-01-20T16:42:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:59 crc kubenswrapper[4558]: I0120 16:42:59.266010 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:59 crc kubenswrapper[4558]: I0120 16:42:59.266055 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:59 crc kubenswrapper[4558]: I0120 16:42:59.266064 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:59 crc kubenswrapper[4558]: I0120 16:42:59.266080 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:59 crc kubenswrapper[4558]: I0120 16:42:59.266088 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:59Z","lastTransitionTime":"2026-01-20T16:42:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:59 crc kubenswrapper[4558]: I0120 16:42:59.367835 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:59 crc kubenswrapper[4558]: I0120 16:42:59.367863 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:59 crc kubenswrapper[4558]: I0120 16:42:59.367876 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:59 crc kubenswrapper[4558]: I0120 16:42:59.367888 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:59 crc kubenswrapper[4558]: I0120 16:42:59.367897 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:59Z","lastTransitionTime":"2026-01-20T16:42:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:59 crc kubenswrapper[4558]: I0120 16:42:59.469872 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:59 crc kubenswrapper[4558]: I0120 16:42:59.469903 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:59 crc kubenswrapper[4558]: I0120 16:42:59.469913 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:59 crc kubenswrapper[4558]: I0120 16:42:59.469925 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:59 crc kubenswrapper[4558]: I0120 16:42:59.469933 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:59Z","lastTransitionTime":"2026-01-20T16:42:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:59 crc kubenswrapper[4558]: I0120 16:42:59.565682 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9wrq6" Jan 20 16:42:59 crc kubenswrapper[4558]: E0120 16:42:59.565795 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9wrq6" podUID="30032328-bd33-4073-9366-e10bc5e2aa77" Jan 20 16:42:59 crc kubenswrapper[4558]: I0120 16:42:59.572289 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:59 crc kubenswrapper[4558]: I0120 16:42:59.572393 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:59 crc kubenswrapper[4558]: I0120 16:42:59.572478 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:59 crc kubenswrapper[4558]: I0120 16:42:59.572727 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:59 crc kubenswrapper[4558]: I0120 16:42:59.573221 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:59Z","lastTransitionTime":"2026-01-20T16:42:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:59 crc kubenswrapper[4558]: I0120 16:42:59.572681 4558 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-17 05:21:04.668077022 +0000 UTC Jan 20 16:42:59 crc kubenswrapper[4558]: I0120 16:42:59.675045 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:59 crc kubenswrapper[4558]: I0120 16:42:59.675188 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:59 crc kubenswrapper[4558]: I0120 16:42:59.675256 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:59 crc kubenswrapper[4558]: I0120 16:42:59.675310 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:59 crc kubenswrapper[4558]: I0120 16:42:59.675389 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:59Z","lastTransitionTime":"2026-01-20T16:42:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:59 crc kubenswrapper[4558]: I0120 16:42:59.776839 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:59 crc kubenswrapper[4558]: I0120 16:42:59.777022 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:59 crc kubenswrapper[4558]: I0120 16:42:59.777106 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:59 crc kubenswrapper[4558]: I0120 16:42:59.777187 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:59 crc kubenswrapper[4558]: I0120 16:42:59.777256 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:59Z","lastTransitionTime":"2026-01-20T16:42:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:59 crc kubenswrapper[4558]: I0120 16:42:59.840372 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-nv2xw_254129cd-82fc-4162-b671-2434bc9e2972/ovnkube-controller/3.log" Jan 20 16:42:59 crc kubenswrapper[4558]: I0120 16:42:59.842852 4558 scope.go:117] "RemoveContainer" containerID="0e9f4ed928d20c9db64a689d7d14df9b81870bb0523cf5098e09d2d85aad9fca" Jan 20 16:42:59 crc kubenswrapper[4558]: E0120 16:42:59.843052 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-nv2xw_openshift-ovn-kubernetes(254129cd-82fc-4162-b671-2434bc9e2972)\"" pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" podUID="254129cd-82fc-4162-b671-2434bc9e2972" Jan 20 16:42:59 crc kubenswrapper[4558]: I0120 16:42:59.851523 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rfwnl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8fe5472-7373-4e5d-87fc-a9ecaf26a5cd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://38cd58ecc67d337df2c29b79e5460ea23d4acc1e66de8b08ea70d7ab7b30691d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9nvl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a14e9281ff19e324499722aefadf1e1f2fd005089c153918ad4d8a7f5bdfa853\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9nvl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-rfwnl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:59Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:59 crc kubenswrapper[4558]: I0120 16:42:59.859213 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60aafae7-2e6e-4d21-9ce6-8ff5382ef642\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d16b973c217656d6a8ceaf684e9dec82daeb583209943f6b2979be47ab03fda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb2e7753f75b10088eea9204511ae78e4be9a2b5be8c17ed04c3c972b13f8190\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a426b97002a49938b18009db49867dc9a15102707b42ec46bc64093c0ed3c2c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e4df1c3b2a1aef9adb3da64f89a1b8c0904fd22882609dbafe2fbc5718b8016\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6e4df1c3b2a1aef9adb3da64f89a1b8c0904fd22882609dbafe2fbc5718b8016\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:59Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:59 crc kubenswrapper[4558]: I0120 16:42:59.867748 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e190c24c1d8a3bc0241334598ce23829ada9afee282d49a0c82cd67ad82bcd02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:59Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:59 crc kubenswrapper[4558]: I0120 16:42:59.876777 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:59Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:59 crc kubenswrapper[4558]: I0120 16:42:59.878952 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:59 crc kubenswrapper[4558]: I0120 16:42:59.879050 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:59 crc kubenswrapper[4558]: I0120 16:42:59.879138 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:59 crc kubenswrapper[4558]: I0120 16:42:59.879241 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:59 crc kubenswrapper[4558]: I0120 16:42:59.879313 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:59Z","lastTransitionTime":"2026-01-20T16:42:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:59 crc kubenswrapper[4558]: I0120 16:42:59.885014 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:59Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:59 crc kubenswrapper[4558]: I0120 16:42:59.892044 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-9wrq6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30032328-bd33-4073-9366-e10bc5e2aa77\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6tz4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6tz4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:17Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-9wrq6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:59Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:59 crc kubenswrapper[4558]: I0120 16:42:59.900395 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:59Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:59 crc kubenswrapper[4558]: I0120 16:42:59.907961 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-d96bp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c8d2c8b-fb8e-4bff-b8d2-69570e5d9e57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe858db22acc96b062f1bc7941e05c823f30d1f9ba50bd497c57f38d57e04293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4v6jx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-d96bp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:59Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:59 crc kubenswrapper[4558]: I0120 16:42:59.920985 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"254129cd-82fc-4162-b671-2434bc9e2972\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a6a184067bde115ef5e09fdd90e7dccfe89a9dc347af450998b91c068e7e803\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd8b93e5559d95511a1285a9aec9a7d72df6c330bcd1e1daf3e93f5494f70eeb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4264ef8ee49a8b172fb05ac5c9b0bd32350b5d9ae95293079dfeaf44f021b455\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0889cf371f8b06cc2d255c15fda97db1d5000d6f6cff29d2fdcb8667cede8a99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4265d3281e954ee5989ee008da28c6cb9fa29478e5fdf23325521d455304da9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://186a1d2caad8865abf7f565405c4a4c077038ccb67ba77927cc2392ec075a811\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e9f4ed928d20c9db64a689d7d14df9b81870bb0523cf5098e09d2d85aad9fca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e9f4ed928d20c9db64a689d7d14df9b81870bb0523cf5098e09d2d85aad9fca\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-20T16:42:58Z\\\",\\\"message\\\":\\\"ess{},},Conditions:[]Condition{},},}\\\\nI0120 16:42:58.184623 6635 lb_config.go:1031] Cluster endpoints for openshift-controller-manager/controller-manager for network=default are: map[]\\\\nF0120 16:42:58.184632 6635 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:58Z is after 2025-08-24T17:21:41Z]\\\\nI0120 16:42:58.184637 6635 services_controller.go:443] Built service openshift-controller-manager/controller-manager LB cluster-wide configs for network=default: []services.lbConfig{services.lbConfig{vips:[]string{\\\\\\\"10.217.5.149\\\\\\\"}, protocol:\\\\\\\"TCP\\\\\\\", inport:443, clusterEndpoints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:57Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-nv2xw_openshift-ovn-kubernetes(254129cd-82fc-4162-b671-2434bc9e2972)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f41d488b0c24e594a2f5cad36b5f8efdb7d867fae0e18a74fe3c396a203d162f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aaa12ad387fc5d0780f6602a74013bb70c118f35c3a16eb4d136df7b0c48db9e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aaa12ad387fc5d0780f6602a74013bb70c118f35c3a16eb4d136df7b0c48db9e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-nv2xw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:59Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:59 crc kubenswrapper[4558]: I0120 16:42:59.932386 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f5t7h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0f90cb04-2e7a-4ee8-83fc-d6c0ee1702a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21a318f03d0df490cb7bae11819c52eca011d163081aeecff1b2dd8f72caabbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fd8a8162080a700ce21ff3b2cf7c28fabfc682758eb69c483909d4008b07daab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fd8a8162080a700ce21ff3b2cf7c28fabfc682758eb69c483909d4008b07daab\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e77617c69434ce20c448b525c7e86f1ece18d51ce6e14167b4ca7e99de8e5709\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e77617c69434ce20c448b525c7e86f1ece18d51ce6e14167b4ca7e99de8e5709\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://453c67434281e0261ec6799b30a548a01efd72d7df96632409e745b2e6a94546\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://453c67434281e0261ec6799b30a548a01efd72d7df96632409e745b2e6a94546\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://06eab4daa6c8b6a564cf1bb1403b7b3b259a234ee09f23b4f4bc0628c2d58e69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06eab4daa6c8b6a564cf1bb1403b7b3b259a234ee09f23b4f4bc0628c2d58e69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1291ac09fc6ae2a79fdeaa0fc47272f61b97f45f67ed3f7c90bdfdbfe3519ff2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1291ac09fc6ae2a79fdeaa0fc47272f61b97f45f67ed3f7c90bdfdbfe3519ff2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe484514f50a14e485d4a97ea3ce9401c77f3f4dae4717719be7ed6c4d7b1a59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe484514f50a14e485d4a97ea3ce9401c77f3f4dae4717719be7ed6c4d7b1a59\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f5t7h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:59Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:59 crc kubenswrapper[4558]: I0120 16:42:59.939887 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-47477" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e026445a-aa73-4a0e-ba35-e2e07de1278c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec2ee3ff161cba7e14ad26763081c9f092f0dfe969cef113631c27e0db53d157\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xcnwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-47477\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:59Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:59 crc kubenswrapper[4558]: I0120 16:42:59.949148 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a13bfefa-f525-4ecd-89f5-f10480532bf6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9030e970e96b6909d771d4fa0c3b4f493b4a825cbc43ef7e684272284329c7c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://168e8f533aaa108e4160f9ae4af6bb7944edba7a596178ffffdc5325073fc3c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e0141aef1babb57501016cb09787591b5e6d9136b8d7dd4ba64393f0be5a027\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c87f08993c2b47c63ba9652c7dafe0fdbecdcb03a7b7080664ab1f1bd0e1d602\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:59Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:59 crc kubenswrapper[4558]: I0120 16:42:59.957818 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f904dd3e4b43e51569b6532f00fa2f9edf912c1f1969ee2ea00c99d1b7e35a41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://789c877f4a456b3ce64bdbe6fefd50f1ef3f92bf9e26e6296005cf366e0ce265\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:59Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:59 crc kubenswrapper[4558]: I0120 16:42:59.965893 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3e45cc56934a7b90e9d47f1c6d06d8dc140d57db4a216469e8ae112f398663e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:59Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:59 crc kubenswrapper[4558]: I0120 16:42:59.974998 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jsqvf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bedf08c7-1f93-4931-a7f3-e729e2a137af\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2297634e20683413a5eb643517580c9486303dc9616a7588a4ac571f3d0e53b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e32b76495d88d356373f9389e163a2f3a72e202bb454c7015594c1f5a41b511\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-20T16:42:50Z\\\",\\\"message\\\":\\\"2026-01-20T16:42:05+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_149f95a3-037e-4113-9baf-ba51c18b4cf2\\\\n2026-01-20T16:42:05+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_149f95a3-037e-4113-9baf-ba51c18b4cf2 to /host/opt/cni/bin/\\\\n2026-01-20T16:42:05Z [verbose] multus-daemon started\\\\n2026-01-20T16:42:05Z [verbose] Readiness Indicator file check\\\\n2026-01-20T16:42:50Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xxtbc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jsqvf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:59Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:59 crc kubenswrapper[4558]: I0120 16:42:59.980895 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:42:59 crc kubenswrapper[4558]: I0120 16:42:59.980931 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:42:59 crc kubenswrapper[4558]: I0120 16:42:59.980941 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:42:59 crc kubenswrapper[4558]: I0120 16:42:59.980954 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:42:59 crc kubenswrapper[4558]: I0120 16:42:59.980976 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:42:59Z","lastTransitionTime":"2026-01-20T16:42:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:42:59 crc kubenswrapper[4558]: I0120 16:42:59.982574 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f10d97b3-5765-454b-b306-1ef544be2c86\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3ba1f84f7ea0577350c7025aeb96fb1b329297f4d6767727f9a4e6659d3027cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac2f374f27360645aa18b003d1b2415cd498c17c73812950904109dd4b77cbd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ac2f374f27360645aa18b003d1b2415cd498c17c73812950904109dd4b77cbd5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:59Z is after 2025-08-24T17:21:41Z" Jan 20 16:42:59 crc kubenswrapper[4558]: I0120 16:42:59.996037 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"425eab90-fb70-4498-a98b-b95742033890\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48592e72d91cf527296f06e52fec1c16c1da21323c0644659945af109a74ebb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7e18f59b5e6b6c0be141b7efb3ccb1df4ff6e3c394bcc88fb1142c8df433493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c889e2b0e14244ef49ec057e3d5fd91687e335a983fdc99a6816abd2af643ddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d434793571cbba089cc34029d951bee9063aca58728b791f2d1af2d68229f778\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://89f7ad577df11c509855a90ac8791da6de3f0d2857a1ca56726a6327f40455bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://baab628ee8aadc67ea829cc46639be55ae63281a9ec9a89737260f558a9ddf75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://baab628ee8aadc67ea829cc46639be55ae63281a9ec9a89737260f558a9ddf75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5faab5625a16e1459b23ae0f4cc8ca3285bf4eb1ba4c53c1ed15eddb9054021\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5faab5625a16e1459b23ae0f4cc8ca3285bf4eb1ba4c53c1ed15eddb9054021\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f913fa07ec9571d1c61f9b7dddb46614846aa99d6e0b5b75709e9fa5a18a09c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f913fa07ec9571d1c61f9b7dddb46614846aa99d6e0b5b75709e9fa5a18a09c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:59Z is after 2025-08-24T17:21:41Z" Jan 20 16:43:00 crc kubenswrapper[4558]: I0120 16:43:00.005158 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30e1ff9d-8dcd-4754-9a7a-c09598fb83db\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ea68e442bb2143fbaa5e2c9fe335c6b023a31c2eeaa78d60de45c6ef40c2894\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a8122c3b57ac4f0026df149fe622450d07d8b0d517ad018decd7b21d8d9c04a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7444e8fea1853bae45dbd3d52d07e3f7b94314cd29fe8c99891e515a892e569e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6438ffea42f98178c4c2c21ae01725a8b54d6a6f790f47d64faf5ecd9f1c00f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2e22f1d4e87dd06a67f49aedad280da48ac1c4ba2e438a4925cf8bd5330c4d8\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0120 16:41:58.859734 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0120 16:41:58.861094 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2179835226/tls.crt::/tmp/serving-cert-2179835226/tls.key\\\\\\\"\\\\nI0120 16:42:04.205247 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0120 16:42:04.207291 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0120 16:42:04.207308 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0120 16:42:04.207329 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0120 16:42:04.207333 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0120 16:42:04.210615 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0120 16:42:04.210632 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0120 16:42:04.210641 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0120 16:42:04.210648 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0120 16:42:04.210652 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0120 16:42:04.210655 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0120 16:42:04.210658 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0120 16:42:04.210660 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0120 16:42:04.211964 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4850bde1fb538e21fc91949dc584ef0a791d718844691a44559e0513bb36203\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c39aa6c3942134a027be33cac715f9c0381bce5417754d5bad045c42a07d353f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c39aa6c3942134a027be33cac715f9c0381bce5417754d5bad045c42a07d353f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:43:00Z is after 2025-08-24T17:21:41Z" Jan 20 16:43:00 crc kubenswrapper[4558]: I0120 16:43:00.013561 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"68337d27-3fa6-4a29-88b0-82e60c3739eb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de7f7c23993a57d30f08686e1cd4abb5b17e108ac12dd0c7929a31574c69eeb1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4b478\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b18a59ecb802507e0d5988eacc915fd7e0d6954518de80f61162c97f680fd8c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4b478\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2vr4r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:43:00Z is after 2025-08-24T17:21:41Z" Jan 20 16:43:00 crc kubenswrapper[4558]: I0120 16:43:00.083196 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:00 crc kubenswrapper[4558]: I0120 16:43:00.083231 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:00 crc kubenswrapper[4558]: I0120 16:43:00.083240 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:00 crc kubenswrapper[4558]: I0120 16:43:00.083252 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:00 crc kubenswrapper[4558]: I0120 16:43:00.083261 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:00Z","lastTransitionTime":"2026-01-20T16:43:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:00 crc kubenswrapper[4558]: I0120 16:43:00.185235 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:00 crc kubenswrapper[4558]: I0120 16:43:00.185290 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:00 crc kubenswrapper[4558]: I0120 16:43:00.185300 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:00 crc kubenswrapper[4558]: I0120 16:43:00.185313 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:00 crc kubenswrapper[4558]: I0120 16:43:00.185323 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:00Z","lastTransitionTime":"2026-01-20T16:43:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:00 crc kubenswrapper[4558]: I0120 16:43:00.286748 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:00 crc kubenswrapper[4558]: I0120 16:43:00.286781 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:00 crc kubenswrapper[4558]: I0120 16:43:00.286791 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:00 crc kubenswrapper[4558]: I0120 16:43:00.286804 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:00 crc kubenswrapper[4558]: I0120 16:43:00.286812 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:00Z","lastTransitionTime":"2026-01-20T16:43:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:00 crc kubenswrapper[4558]: I0120 16:43:00.388713 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:00 crc kubenswrapper[4558]: I0120 16:43:00.388745 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:00 crc kubenswrapper[4558]: I0120 16:43:00.388754 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:00 crc kubenswrapper[4558]: I0120 16:43:00.388771 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:00 crc kubenswrapper[4558]: I0120 16:43:00.388779 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:00Z","lastTransitionTime":"2026-01-20T16:43:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:00 crc kubenswrapper[4558]: I0120 16:43:00.491114 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:00 crc kubenswrapper[4558]: I0120 16:43:00.491148 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:00 crc kubenswrapper[4558]: I0120 16:43:00.491157 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:00 crc kubenswrapper[4558]: I0120 16:43:00.491183 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:00 crc kubenswrapper[4558]: I0120 16:43:00.491192 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:00Z","lastTransitionTime":"2026-01-20T16:43:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:00 crc kubenswrapper[4558]: I0120 16:43:00.565819 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 20 16:43:00 crc kubenswrapper[4558]: I0120 16:43:00.565889 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 20 16:43:00 crc kubenswrapper[4558]: E0120 16:43:00.565925 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 20 16:43:00 crc kubenswrapper[4558]: I0120 16:43:00.565954 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 20 16:43:00 crc kubenswrapper[4558]: E0120 16:43:00.566017 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 20 16:43:00 crc kubenswrapper[4558]: E0120 16:43:00.566072 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 20 16:43:00 crc kubenswrapper[4558]: I0120 16:43:00.574069 4558 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-14 17:00:02.656633425 +0000 UTC Jan 20 16:43:00 crc kubenswrapper[4558]: I0120 16:43:00.592525 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:00 crc kubenswrapper[4558]: I0120 16:43:00.592555 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:00 crc kubenswrapper[4558]: I0120 16:43:00.592563 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:00 crc kubenswrapper[4558]: I0120 16:43:00.592575 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:00 crc kubenswrapper[4558]: I0120 16:43:00.592584 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:00Z","lastTransitionTime":"2026-01-20T16:43:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:00 crc kubenswrapper[4558]: I0120 16:43:00.694899 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:00 crc kubenswrapper[4558]: I0120 16:43:00.694933 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:00 crc kubenswrapper[4558]: I0120 16:43:00.694942 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:00 crc kubenswrapper[4558]: I0120 16:43:00.694955 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:00 crc kubenswrapper[4558]: I0120 16:43:00.694963 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:00Z","lastTransitionTime":"2026-01-20T16:43:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:00 crc kubenswrapper[4558]: I0120 16:43:00.797219 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:00 crc kubenswrapper[4558]: I0120 16:43:00.797245 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:00 crc kubenswrapper[4558]: I0120 16:43:00.797253 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:00 crc kubenswrapper[4558]: I0120 16:43:00.797266 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:00 crc kubenswrapper[4558]: I0120 16:43:00.797273 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:00Z","lastTransitionTime":"2026-01-20T16:43:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:00 crc kubenswrapper[4558]: I0120 16:43:00.899089 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:00 crc kubenswrapper[4558]: I0120 16:43:00.899291 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:00 crc kubenswrapper[4558]: I0120 16:43:00.899383 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:00 crc kubenswrapper[4558]: I0120 16:43:00.899459 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:00 crc kubenswrapper[4558]: I0120 16:43:00.899521 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:00Z","lastTransitionTime":"2026-01-20T16:43:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:01 crc kubenswrapper[4558]: I0120 16:43:01.001117 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:01 crc kubenswrapper[4558]: I0120 16:43:01.001155 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:01 crc kubenswrapper[4558]: I0120 16:43:01.001185 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:01 crc kubenswrapper[4558]: I0120 16:43:01.001200 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:01 crc kubenswrapper[4558]: I0120 16:43:01.001211 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:01Z","lastTransitionTime":"2026-01-20T16:43:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:01 crc kubenswrapper[4558]: I0120 16:43:01.103304 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:01 crc kubenswrapper[4558]: I0120 16:43:01.103335 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:01 crc kubenswrapper[4558]: I0120 16:43:01.103344 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:01 crc kubenswrapper[4558]: I0120 16:43:01.103355 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:01 crc kubenswrapper[4558]: I0120 16:43:01.103363 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:01Z","lastTransitionTime":"2026-01-20T16:43:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:01 crc kubenswrapper[4558]: I0120 16:43:01.204668 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:01 crc kubenswrapper[4558]: I0120 16:43:01.204709 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:01 crc kubenswrapper[4558]: I0120 16:43:01.204718 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:01 crc kubenswrapper[4558]: I0120 16:43:01.204730 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:01 crc kubenswrapper[4558]: I0120 16:43:01.204738 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:01Z","lastTransitionTime":"2026-01-20T16:43:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:01 crc kubenswrapper[4558]: I0120 16:43:01.306706 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:01 crc kubenswrapper[4558]: I0120 16:43:01.306753 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:01 crc kubenswrapper[4558]: I0120 16:43:01.306762 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:01 crc kubenswrapper[4558]: I0120 16:43:01.306776 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:01 crc kubenswrapper[4558]: I0120 16:43:01.306785 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:01Z","lastTransitionTime":"2026-01-20T16:43:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:01 crc kubenswrapper[4558]: I0120 16:43:01.408490 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:01 crc kubenswrapper[4558]: I0120 16:43:01.408523 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:01 crc kubenswrapper[4558]: I0120 16:43:01.408531 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:01 crc kubenswrapper[4558]: I0120 16:43:01.408543 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:01 crc kubenswrapper[4558]: I0120 16:43:01.408552 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:01Z","lastTransitionTime":"2026-01-20T16:43:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:01 crc kubenswrapper[4558]: I0120 16:43:01.510724 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:01 crc kubenswrapper[4558]: I0120 16:43:01.510755 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:01 crc kubenswrapper[4558]: I0120 16:43:01.510764 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:01 crc kubenswrapper[4558]: I0120 16:43:01.510776 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:01 crc kubenswrapper[4558]: I0120 16:43:01.510784 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:01Z","lastTransitionTime":"2026-01-20T16:43:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:01 crc kubenswrapper[4558]: I0120 16:43:01.565483 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9wrq6" Jan 20 16:43:01 crc kubenswrapper[4558]: E0120 16:43:01.565582 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9wrq6" podUID="30032328-bd33-4073-9366-e10bc5e2aa77" Jan 20 16:43:01 crc kubenswrapper[4558]: I0120 16:43:01.574784 4558 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-18 11:55:24.118941321 +0000 UTC Jan 20 16:43:01 crc kubenswrapper[4558]: I0120 16:43:01.612473 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:01 crc kubenswrapper[4558]: I0120 16:43:01.612498 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:01 crc kubenswrapper[4558]: I0120 16:43:01.612506 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:01 crc kubenswrapper[4558]: I0120 16:43:01.612517 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:01 crc kubenswrapper[4558]: I0120 16:43:01.612526 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:01Z","lastTransitionTime":"2026-01-20T16:43:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:01 crc kubenswrapper[4558]: I0120 16:43:01.714198 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:01 crc kubenswrapper[4558]: I0120 16:43:01.714229 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:01 crc kubenswrapper[4558]: I0120 16:43:01.714239 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:01 crc kubenswrapper[4558]: I0120 16:43:01.714252 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:01 crc kubenswrapper[4558]: I0120 16:43:01.714261 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:01Z","lastTransitionTime":"2026-01-20T16:43:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:01 crc kubenswrapper[4558]: I0120 16:43:01.816410 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:01 crc kubenswrapper[4558]: I0120 16:43:01.816447 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:01 crc kubenswrapper[4558]: I0120 16:43:01.816455 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:01 crc kubenswrapper[4558]: I0120 16:43:01.816467 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:01 crc kubenswrapper[4558]: I0120 16:43:01.816476 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:01Z","lastTransitionTime":"2026-01-20T16:43:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:01 crc kubenswrapper[4558]: I0120 16:43:01.918322 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:01 crc kubenswrapper[4558]: I0120 16:43:01.918386 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:01 crc kubenswrapper[4558]: I0120 16:43:01.918395 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:01 crc kubenswrapper[4558]: I0120 16:43:01.918409 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:01 crc kubenswrapper[4558]: I0120 16:43:01.918418 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:01Z","lastTransitionTime":"2026-01-20T16:43:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:02 crc kubenswrapper[4558]: I0120 16:43:02.020802 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:02 crc kubenswrapper[4558]: I0120 16:43:02.020837 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:02 crc kubenswrapper[4558]: I0120 16:43:02.020845 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:02 crc kubenswrapper[4558]: I0120 16:43:02.020857 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:02 crc kubenswrapper[4558]: I0120 16:43:02.020865 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:02Z","lastTransitionTime":"2026-01-20T16:43:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:02 crc kubenswrapper[4558]: I0120 16:43:02.122997 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:02 crc kubenswrapper[4558]: I0120 16:43:02.123038 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:02 crc kubenswrapper[4558]: I0120 16:43:02.123051 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:02 crc kubenswrapper[4558]: I0120 16:43:02.123065 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:02 crc kubenswrapper[4558]: I0120 16:43:02.123074 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:02Z","lastTransitionTime":"2026-01-20T16:43:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:02 crc kubenswrapper[4558]: I0120 16:43:02.225246 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:02 crc kubenswrapper[4558]: I0120 16:43:02.225280 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:02 crc kubenswrapper[4558]: I0120 16:43:02.225289 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:02 crc kubenswrapper[4558]: I0120 16:43:02.225301 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:02 crc kubenswrapper[4558]: I0120 16:43:02.225310 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:02Z","lastTransitionTime":"2026-01-20T16:43:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:02 crc kubenswrapper[4558]: I0120 16:43:02.327474 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:02 crc kubenswrapper[4558]: I0120 16:43:02.327509 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:02 crc kubenswrapper[4558]: I0120 16:43:02.327518 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:02 crc kubenswrapper[4558]: I0120 16:43:02.327528 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:02 crc kubenswrapper[4558]: I0120 16:43:02.327537 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:02Z","lastTransitionTime":"2026-01-20T16:43:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:02 crc kubenswrapper[4558]: I0120 16:43:02.429432 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:02 crc kubenswrapper[4558]: I0120 16:43:02.429470 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:02 crc kubenswrapper[4558]: I0120 16:43:02.429479 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:02 crc kubenswrapper[4558]: I0120 16:43:02.429491 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:02 crc kubenswrapper[4558]: I0120 16:43:02.429502 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:02Z","lastTransitionTime":"2026-01-20T16:43:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:02 crc kubenswrapper[4558]: I0120 16:43:02.531663 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:02 crc kubenswrapper[4558]: I0120 16:43:02.531702 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:02 crc kubenswrapper[4558]: I0120 16:43:02.531710 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:02 crc kubenswrapper[4558]: I0120 16:43:02.531725 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:02 crc kubenswrapper[4558]: I0120 16:43:02.531734 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:02Z","lastTransitionTime":"2026-01-20T16:43:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:02 crc kubenswrapper[4558]: I0120 16:43:02.565054 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 20 16:43:02 crc kubenswrapper[4558]: I0120 16:43:02.565100 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 20 16:43:02 crc kubenswrapper[4558]: I0120 16:43:02.565065 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 20 16:43:02 crc kubenswrapper[4558]: E0120 16:43:02.565193 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 20 16:43:02 crc kubenswrapper[4558]: E0120 16:43:02.565249 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 20 16:43:02 crc kubenswrapper[4558]: E0120 16:43:02.565313 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 20 16:43:02 crc kubenswrapper[4558]: I0120 16:43:02.575892 4558 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-02 06:12:31.91772579 +0000 UTC Jan 20 16:43:02 crc kubenswrapper[4558]: I0120 16:43:02.633754 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:02 crc kubenswrapper[4558]: I0120 16:43:02.633788 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:02 crc kubenswrapper[4558]: I0120 16:43:02.633796 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:02 crc kubenswrapper[4558]: I0120 16:43:02.633808 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:02 crc kubenswrapper[4558]: I0120 16:43:02.633816 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:02Z","lastTransitionTime":"2026-01-20T16:43:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:02 crc kubenswrapper[4558]: I0120 16:43:02.735775 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:02 crc kubenswrapper[4558]: I0120 16:43:02.735808 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:02 crc kubenswrapper[4558]: I0120 16:43:02.735816 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:02 crc kubenswrapper[4558]: I0120 16:43:02.735831 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:02 crc kubenswrapper[4558]: I0120 16:43:02.735838 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:02Z","lastTransitionTime":"2026-01-20T16:43:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:02 crc kubenswrapper[4558]: I0120 16:43:02.837703 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:02 crc kubenswrapper[4558]: I0120 16:43:02.837737 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:02 crc kubenswrapper[4558]: I0120 16:43:02.837745 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:02 crc kubenswrapper[4558]: I0120 16:43:02.837755 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:02 crc kubenswrapper[4558]: I0120 16:43:02.837763 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:02Z","lastTransitionTime":"2026-01-20T16:43:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:02 crc kubenswrapper[4558]: I0120 16:43:02.939640 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:02 crc kubenswrapper[4558]: I0120 16:43:02.939696 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:02 crc kubenswrapper[4558]: I0120 16:43:02.939704 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:02 crc kubenswrapper[4558]: I0120 16:43:02.939716 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:02 crc kubenswrapper[4558]: I0120 16:43:02.939724 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:02Z","lastTransitionTime":"2026-01-20T16:43:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:03 crc kubenswrapper[4558]: I0120 16:43:03.041980 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:03 crc kubenswrapper[4558]: I0120 16:43:03.042037 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:03 crc kubenswrapper[4558]: I0120 16:43:03.042051 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:03 crc kubenswrapper[4558]: I0120 16:43:03.042066 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:03 crc kubenswrapper[4558]: I0120 16:43:03.042075 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:03Z","lastTransitionTime":"2026-01-20T16:43:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:03 crc kubenswrapper[4558]: I0120 16:43:03.144005 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:03 crc kubenswrapper[4558]: I0120 16:43:03.144064 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:03 crc kubenswrapper[4558]: I0120 16:43:03.144074 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:03 crc kubenswrapper[4558]: I0120 16:43:03.144087 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:03 crc kubenswrapper[4558]: I0120 16:43:03.144112 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:03Z","lastTransitionTime":"2026-01-20T16:43:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:03 crc kubenswrapper[4558]: I0120 16:43:03.246284 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:03 crc kubenswrapper[4558]: I0120 16:43:03.246337 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:03 crc kubenswrapper[4558]: I0120 16:43:03.246349 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:03 crc kubenswrapper[4558]: I0120 16:43:03.246366 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:03 crc kubenswrapper[4558]: I0120 16:43:03.246377 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:03Z","lastTransitionTime":"2026-01-20T16:43:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:03 crc kubenswrapper[4558]: I0120 16:43:03.347978 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:03 crc kubenswrapper[4558]: I0120 16:43:03.348036 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:03 crc kubenswrapper[4558]: I0120 16:43:03.348045 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:03 crc kubenswrapper[4558]: I0120 16:43:03.348057 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:03 crc kubenswrapper[4558]: I0120 16:43:03.348065 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:03Z","lastTransitionTime":"2026-01-20T16:43:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:03 crc kubenswrapper[4558]: I0120 16:43:03.450215 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:03 crc kubenswrapper[4558]: I0120 16:43:03.450260 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:03 crc kubenswrapper[4558]: I0120 16:43:03.450271 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:03 crc kubenswrapper[4558]: I0120 16:43:03.450285 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:03 crc kubenswrapper[4558]: I0120 16:43:03.450296 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:03Z","lastTransitionTime":"2026-01-20T16:43:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:03 crc kubenswrapper[4558]: I0120 16:43:03.552428 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:03 crc kubenswrapper[4558]: I0120 16:43:03.552470 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:03 crc kubenswrapper[4558]: I0120 16:43:03.552478 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:03 crc kubenswrapper[4558]: I0120 16:43:03.552492 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:03 crc kubenswrapper[4558]: I0120 16:43:03.552502 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:03Z","lastTransitionTime":"2026-01-20T16:43:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:03 crc kubenswrapper[4558]: I0120 16:43:03.565267 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9wrq6" Jan 20 16:43:03 crc kubenswrapper[4558]: E0120 16:43:03.565386 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9wrq6" podUID="30032328-bd33-4073-9366-e10bc5e2aa77" Jan 20 16:43:03 crc kubenswrapper[4558]: I0120 16:43:03.576862 4558 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-18 11:35:35.972329304 +0000 UTC Jan 20 16:43:03 crc kubenswrapper[4558]: I0120 16:43:03.655350 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:03 crc kubenswrapper[4558]: I0120 16:43:03.655384 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:03 crc kubenswrapper[4558]: I0120 16:43:03.655393 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:03 crc kubenswrapper[4558]: I0120 16:43:03.655404 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:03 crc kubenswrapper[4558]: I0120 16:43:03.655413 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:03Z","lastTransitionTime":"2026-01-20T16:43:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:03 crc kubenswrapper[4558]: I0120 16:43:03.757127 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:03 crc kubenswrapper[4558]: I0120 16:43:03.757178 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:03 crc kubenswrapper[4558]: I0120 16:43:03.757188 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:03 crc kubenswrapper[4558]: I0120 16:43:03.757201 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:03 crc kubenswrapper[4558]: I0120 16:43:03.757209 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:03Z","lastTransitionTime":"2026-01-20T16:43:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:03 crc kubenswrapper[4558]: I0120 16:43:03.859341 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:03 crc kubenswrapper[4558]: I0120 16:43:03.859393 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:03 crc kubenswrapper[4558]: I0120 16:43:03.859402 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:03 crc kubenswrapper[4558]: I0120 16:43:03.859414 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:03 crc kubenswrapper[4558]: I0120 16:43:03.859423 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:03Z","lastTransitionTime":"2026-01-20T16:43:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:03 crc kubenswrapper[4558]: I0120 16:43:03.961128 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:03 crc kubenswrapper[4558]: I0120 16:43:03.961181 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:03 crc kubenswrapper[4558]: I0120 16:43:03.961190 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:03 crc kubenswrapper[4558]: I0120 16:43:03.961202 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:03 crc kubenswrapper[4558]: I0120 16:43:03.961211 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:03Z","lastTransitionTime":"2026-01-20T16:43:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:04 crc kubenswrapper[4558]: I0120 16:43:04.063284 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:04 crc kubenswrapper[4558]: I0120 16:43:04.063515 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:04 crc kubenswrapper[4558]: I0120 16:43:04.063573 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:04 crc kubenswrapper[4558]: I0120 16:43:04.063651 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:04 crc kubenswrapper[4558]: I0120 16:43:04.063711 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:04Z","lastTransitionTime":"2026-01-20T16:43:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:04 crc kubenswrapper[4558]: I0120 16:43:04.165106 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:04 crc kubenswrapper[4558]: I0120 16:43:04.165144 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:04 crc kubenswrapper[4558]: I0120 16:43:04.165151 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:04 crc kubenswrapper[4558]: I0120 16:43:04.165183 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:04 crc kubenswrapper[4558]: I0120 16:43:04.165192 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:04Z","lastTransitionTime":"2026-01-20T16:43:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:04 crc kubenswrapper[4558]: I0120 16:43:04.267203 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:04 crc kubenswrapper[4558]: I0120 16:43:04.267241 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:04 crc kubenswrapper[4558]: I0120 16:43:04.267250 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:04 crc kubenswrapper[4558]: I0120 16:43:04.267264 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:04 crc kubenswrapper[4558]: I0120 16:43:04.267273 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:04Z","lastTransitionTime":"2026-01-20T16:43:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:04 crc kubenswrapper[4558]: I0120 16:43:04.368799 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:04 crc kubenswrapper[4558]: I0120 16:43:04.368838 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:04 crc kubenswrapper[4558]: I0120 16:43:04.368864 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:04 crc kubenswrapper[4558]: I0120 16:43:04.368878 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:04 crc kubenswrapper[4558]: I0120 16:43:04.368887 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:04Z","lastTransitionTime":"2026-01-20T16:43:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:04 crc kubenswrapper[4558]: I0120 16:43:04.470722 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:04 crc kubenswrapper[4558]: I0120 16:43:04.470759 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:04 crc kubenswrapper[4558]: I0120 16:43:04.470768 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:04 crc kubenswrapper[4558]: I0120 16:43:04.470781 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:04 crc kubenswrapper[4558]: I0120 16:43:04.470789 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:04Z","lastTransitionTime":"2026-01-20T16:43:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:04 crc kubenswrapper[4558]: I0120 16:43:04.565474 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 20 16:43:04 crc kubenswrapper[4558]: I0120 16:43:04.565522 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 20 16:43:04 crc kubenswrapper[4558]: I0120 16:43:04.565571 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 20 16:43:04 crc kubenswrapper[4558]: E0120 16:43:04.565578 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 20 16:43:04 crc kubenswrapper[4558]: E0120 16:43:04.565638 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 20 16:43:04 crc kubenswrapper[4558]: E0120 16:43:04.565690 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 20 16:43:04 crc kubenswrapper[4558]: I0120 16:43:04.572414 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:04 crc kubenswrapper[4558]: I0120 16:43:04.572457 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:04 crc kubenswrapper[4558]: I0120 16:43:04.572466 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:04 crc kubenswrapper[4558]: I0120 16:43:04.572477 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:04 crc kubenswrapper[4558]: I0120 16:43:04.572485 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:04Z","lastTransitionTime":"2026-01-20T16:43:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:04 crc kubenswrapper[4558]: I0120 16:43:04.577553 4558 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-31 04:27:43.102334021 +0000 UTC Jan 20 16:43:04 crc kubenswrapper[4558]: I0120 16:43:04.674731 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:04 crc kubenswrapper[4558]: I0120 16:43:04.674761 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:04 crc kubenswrapper[4558]: I0120 16:43:04.674769 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:04 crc kubenswrapper[4558]: I0120 16:43:04.674779 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:04 crc kubenswrapper[4558]: I0120 16:43:04.674788 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:04Z","lastTransitionTime":"2026-01-20T16:43:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:04 crc kubenswrapper[4558]: I0120 16:43:04.776734 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:04 crc kubenswrapper[4558]: I0120 16:43:04.776798 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:04 crc kubenswrapper[4558]: I0120 16:43:04.776808 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:04 crc kubenswrapper[4558]: I0120 16:43:04.776821 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:04 crc kubenswrapper[4558]: I0120 16:43:04.776829 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:04Z","lastTransitionTime":"2026-01-20T16:43:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:04 crc kubenswrapper[4558]: I0120 16:43:04.878086 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:04 crc kubenswrapper[4558]: I0120 16:43:04.878120 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:04 crc kubenswrapper[4558]: I0120 16:43:04.878127 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:04 crc kubenswrapper[4558]: I0120 16:43:04.878139 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:04 crc kubenswrapper[4558]: I0120 16:43:04.878150 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:04Z","lastTransitionTime":"2026-01-20T16:43:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:04 crc kubenswrapper[4558]: I0120 16:43:04.979706 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:04 crc kubenswrapper[4558]: I0120 16:43:04.979748 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:04 crc kubenswrapper[4558]: I0120 16:43:04.979758 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:04 crc kubenswrapper[4558]: I0120 16:43:04.979789 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:04 crc kubenswrapper[4558]: I0120 16:43:04.979800 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:04Z","lastTransitionTime":"2026-01-20T16:43:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:05 crc kubenswrapper[4558]: I0120 16:43:05.081715 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:05 crc kubenswrapper[4558]: I0120 16:43:05.081766 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:05 crc kubenswrapper[4558]: I0120 16:43:05.081776 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:05 crc kubenswrapper[4558]: I0120 16:43:05.081788 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:05 crc kubenswrapper[4558]: I0120 16:43:05.081796 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:05Z","lastTransitionTime":"2026-01-20T16:43:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:05 crc kubenswrapper[4558]: I0120 16:43:05.183796 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:05 crc kubenswrapper[4558]: I0120 16:43:05.183833 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:05 crc kubenswrapper[4558]: I0120 16:43:05.183840 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:05 crc kubenswrapper[4558]: I0120 16:43:05.183852 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:05 crc kubenswrapper[4558]: I0120 16:43:05.183863 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:05Z","lastTransitionTime":"2026-01-20T16:43:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:05 crc kubenswrapper[4558]: I0120 16:43:05.285501 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:05 crc kubenswrapper[4558]: I0120 16:43:05.285540 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:05 crc kubenswrapper[4558]: I0120 16:43:05.285549 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:05 crc kubenswrapper[4558]: I0120 16:43:05.285563 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:05 crc kubenswrapper[4558]: I0120 16:43:05.285572 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:05Z","lastTransitionTime":"2026-01-20T16:43:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:05 crc kubenswrapper[4558]: I0120 16:43:05.387912 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:05 crc kubenswrapper[4558]: I0120 16:43:05.387949 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:05 crc kubenswrapper[4558]: I0120 16:43:05.387957 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:05 crc kubenswrapper[4558]: I0120 16:43:05.387971 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:05 crc kubenswrapper[4558]: I0120 16:43:05.387979 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:05Z","lastTransitionTime":"2026-01-20T16:43:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:05 crc kubenswrapper[4558]: I0120 16:43:05.489416 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:05 crc kubenswrapper[4558]: I0120 16:43:05.489447 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:05 crc kubenswrapper[4558]: I0120 16:43:05.489456 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:05 crc kubenswrapper[4558]: I0120 16:43:05.489468 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:05 crc kubenswrapper[4558]: I0120 16:43:05.489477 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:05Z","lastTransitionTime":"2026-01-20T16:43:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:05 crc kubenswrapper[4558]: I0120 16:43:05.565095 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9wrq6" Jan 20 16:43:05 crc kubenswrapper[4558]: E0120 16:43:05.565247 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9wrq6" podUID="30032328-bd33-4073-9366-e10bc5e2aa77" Jan 20 16:43:05 crc kubenswrapper[4558]: I0120 16:43:05.578678 4558 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-06 02:07:47.102897732 +0000 UTC Jan 20 16:43:05 crc kubenswrapper[4558]: I0120 16:43:05.591120 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:05 crc kubenswrapper[4558]: I0120 16:43:05.591156 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:05 crc kubenswrapper[4558]: I0120 16:43:05.591183 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:05 crc kubenswrapper[4558]: I0120 16:43:05.591197 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:05 crc kubenswrapper[4558]: I0120 16:43:05.591206 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:05Z","lastTransitionTime":"2026-01-20T16:43:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:05 crc kubenswrapper[4558]: I0120 16:43:05.693147 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:05 crc kubenswrapper[4558]: I0120 16:43:05.693197 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:05 crc kubenswrapper[4558]: I0120 16:43:05.693208 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:05 crc kubenswrapper[4558]: I0120 16:43:05.693224 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:05 crc kubenswrapper[4558]: I0120 16:43:05.693232 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:05Z","lastTransitionTime":"2026-01-20T16:43:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:05 crc kubenswrapper[4558]: I0120 16:43:05.794874 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:05 crc kubenswrapper[4558]: I0120 16:43:05.795060 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:05 crc kubenswrapper[4558]: I0120 16:43:05.795125 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:05 crc kubenswrapper[4558]: I0120 16:43:05.795231 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:05 crc kubenswrapper[4558]: I0120 16:43:05.795289 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:05Z","lastTransitionTime":"2026-01-20T16:43:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:05 crc kubenswrapper[4558]: I0120 16:43:05.896972 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:05 crc kubenswrapper[4558]: I0120 16:43:05.896998 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:05 crc kubenswrapper[4558]: I0120 16:43:05.897005 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:05 crc kubenswrapper[4558]: I0120 16:43:05.897016 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:05 crc kubenswrapper[4558]: I0120 16:43:05.897024 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:05Z","lastTransitionTime":"2026-01-20T16:43:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:05 crc kubenswrapper[4558]: I0120 16:43:05.998987 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:05 crc kubenswrapper[4558]: I0120 16:43:05.999020 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:05 crc kubenswrapper[4558]: I0120 16:43:05.999028 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:05 crc kubenswrapper[4558]: I0120 16:43:05.999040 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:05 crc kubenswrapper[4558]: I0120 16:43:05.999051 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:05Z","lastTransitionTime":"2026-01-20T16:43:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:06 crc kubenswrapper[4558]: I0120 16:43:06.101182 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:06 crc kubenswrapper[4558]: I0120 16:43:06.101216 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:06 crc kubenswrapper[4558]: I0120 16:43:06.101225 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:06 crc kubenswrapper[4558]: I0120 16:43:06.101238 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:06 crc kubenswrapper[4558]: I0120 16:43:06.101246 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:06Z","lastTransitionTime":"2026-01-20T16:43:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:06 crc kubenswrapper[4558]: I0120 16:43:06.203214 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:06 crc kubenswrapper[4558]: I0120 16:43:06.203269 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:06 crc kubenswrapper[4558]: I0120 16:43:06.203280 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:06 crc kubenswrapper[4558]: I0120 16:43:06.203293 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:06 crc kubenswrapper[4558]: I0120 16:43:06.203301 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:06Z","lastTransitionTime":"2026-01-20T16:43:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:06 crc kubenswrapper[4558]: I0120 16:43:06.305380 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:06 crc kubenswrapper[4558]: I0120 16:43:06.305429 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:06 crc kubenswrapper[4558]: I0120 16:43:06.305439 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:06 crc kubenswrapper[4558]: I0120 16:43:06.305451 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:06 crc kubenswrapper[4558]: I0120 16:43:06.305460 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:06Z","lastTransitionTime":"2026-01-20T16:43:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:06 crc kubenswrapper[4558]: I0120 16:43:06.407379 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:06 crc kubenswrapper[4558]: I0120 16:43:06.407419 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:06 crc kubenswrapper[4558]: I0120 16:43:06.407426 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:06 crc kubenswrapper[4558]: I0120 16:43:06.407437 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:06 crc kubenswrapper[4558]: I0120 16:43:06.407446 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:06Z","lastTransitionTime":"2026-01-20T16:43:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:06 crc kubenswrapper[4558]: I0120 16:43:06.510114 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:06 crc kubenswrapper[4558]: I0120 16:43:06.510181 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:06 crc kubenswrapper[4558]: I0120 16:43:06.510192 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:06 crc kubenswrapper[4558]: I0120 16:43:06.510210 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:06 crc kubenswrapper[4558]: I0120 16:43:06.510220 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:06Z","lastTransitionTime":"2026-01-20T16:43:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:06 crc kubenswrapper[4558]: I0120 16:43:06.565820 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 20 16:43:06 crc kubenswrapper[4558]: E0120 16:43:06.565935 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 20 16:43:06 crc kubenswrapper[4558]: I0120 16:43:06.566018 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 20 16:43:06 crc kubenswrapper[4558]: I0120 16:43:06.566090 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 20 16:43:06 crc kubenswrapper[4558]: E0120 16:43:06.566117 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 20 16:43:06 crc kubenswrapper[4558]: E0120 16:43:06.566158 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 20 16:43:06 crc kubenswrapper[4558]: I0120 16:43:06.576138 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a13bfefa-f525-4ecd-89f5-f10480532bf6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9030e970e96b6909d771d4fa0c3b4f493b4a825cbc43ef7e684272284329c7c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://168e8f533aaa108e4160f9ae4af6bb7944edba7a596178ffffdc5325073fc3c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e0141aef1babb57501016cb09787591b5e6d9136b8d7dd4ba64393f0be5a027\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c87f08993c2b47c63ba9652c7dafe0fdbecdcb03a7b7080664ab1f1bd0e1d602\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:43:06Z is after 2025-08-24T17:21:41Z" Jan 20 16:43:06 crc kubenswrapper[4558]: I0120 16:43:06.579208 4558 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-14 22:15:24.93027975 +0000 UTC Jan 20 16:43:06 crc kubenswrapper[4558]: I0120 16:43:06.584517 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f904dd3e4b43e51569b6532f00fa2f9edf912c1f1969ee2ea00c99d1b7e35a41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://789c877f4a456b3ce64bdbe6fefd50f1ef3f92bf9e26e6296005cf366e0ce265\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:43:06Z is after 2025-08-24T17:21:41Z" Jan 20 16:43:06 crc kubenswrapper[4558]: I0120 16:43:06.591910 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3e45cc56934a7b90e9d47f1c6d06d8dc140d57db4a216469e8ae112f398663e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:43:06Z is after 2025-08-24T17:21:41Z" Jan 20 16:43:06 crc kubenswrapper[4558]: I0120 16:43:06.600241 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jsqvf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bedf08c7-1f93-4931-a7f3-e729e2a137af\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2297634e20683413a5eb643517580c9486303dc9616a7588a4ac571f3d0e53b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e32b76495d88d356373f9389e163a2f3a72e202bb454c7015594c1f5a41b511\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-20T16:42:50Z\\\",\\\"message\\\":\\\"2026-01-20T16:42:05+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_149f95a3-037e-4113-9baf-ba51c18b4cf2\\\\n2026-01-20T16:42:05+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_149f95a3-037e-4113-9baf-ba51c18b4cf2 to /host/opt/cni/bin/\\\\n2026-01-20T16:42:05Z [verbose] multus-daemon started\\\\n2026-01-20T16:42:05Z [verbose] Readiness Indicator file check\\\\n2026-01-20T16:42:50Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xxtbc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jsqvf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:43:06Z is after 2025-08-24T17:21:41Z" Jan 20 16:43:06 crc kubenswrapper[4558]: I0120 16:43:06.606785 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-47477" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e026445a-aa73-4a0e-ba35-e2e07de1278c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec2ee3ff161cba7e14ad26763081c9f092f0dfe969cef113631c27e0db53d157\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xcnwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-47477\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:43:06Z is after 2025-08-24T17:21:41Z" Jan 20 16:43:06 crc kubenswrapper[4558]: I0120 16:43:06.611530 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:06 crc kubenswrapper[4558]: I0120 16:43:06.611556 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:06 crc kubenswrapper[4558]: I0120 16:43:06.611565 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:06 crc kubenswrapper[4558]: I0120 16:43:06.611578 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:06 crc kubenswrapper[4558]: I0120 16:43:06.611586 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:06Z","lastTransitionTime":"2026-01-20T16:43:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:06 crc kubenswrapper[4558]: I0120 16:43:06.614401 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f10d97b3-5765-454b-b306-1ef544be2c86\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3ba1f84f7ea0577350c7025aeb96fb1b329297f4d6767727f9a4e6659d3027cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac2f374f27360645aa18b003d1b2415cd498c17c73812950904109dd4b77cbd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ac2f374f27360645aa18b003d1b2415cd498c17c73812950904109dd4b77cbd5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:43:06Z is after 2025-08-24T17:21:41Z" Jan 20 16:43:06 crc kubenswrapper[4558]: I0120 16:43:06.631539 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"425eab90-fb70-4498-a98b-b95742033890\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48592e72d91cf527296f06e52fec1c16c1da21323c0644659945af109a74ebb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7e18f59b5e6b6c0be141b7efb3ccb1df4ff6e3c394bcc88fb1142c8df433493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c889e2b0e14244ef49ec057e3d5fd91687e335a983fdc99a6816abd2af643ddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d434793571cbba089cc34029d951bee9063aca58728b791f2d1af2d68229f778\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://89f7ad577df11c509855a90ac8791da6de3f0d2857a1ca56726a6327f40455bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://baab628ee8aadc67ea829cc46639be55ae63281a9ec9a89737260f558a9ddf75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://baab628ee8aadc67ea829cc46639be55ae63281a9ec9a89737260f558a9ddf75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5faab5625a16e1459b23ae0f4cc8ca3285bf4eb1ba4c53c1ed15eddb9054021\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5faab5625a16e1459b23ae0f4cc8ca3285bf4eb1ba4c53c1ed15eddb9054021\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f913fa07ec9571d1c61f9b7dddb46614846aa99d6e0b5b75709e9fa5a18a09c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f913fa07ec9571d1c61f9b7dddb46614846aa99d6e0b5b75709e9fa5a18a09c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:43:06Z is after 2025-08-24T17:21:41Z" Jan 20 16:43:06 crc kubenswrapper[4558]: I0120 16:43:06.641252 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30e1ff9d-8dcd-4754-9a7a-c09598fb83db\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ea68e442bb2143fbaa5e2c9fe335c6b023a31c2eeaa78d60de45c6ef40c2894\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a8122c3b57ac4f0026df149fe622450d07d8b0d517ad018decd7b21d8d9c04a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7444e8fea1853bae45dbd3d52d07e3f7b94314cd29fe8c99891e515a892e569e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6438ffea42f98178c4c2c21ae01725a8b54d6a6f790f47d64faf5ecd9f1c00f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2e22f1d4e87dd06a67f49aedad280da48ac1c4ba2e438a4925cf8bd5330c4d8\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0120 16:41:58.859734 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0120 16:41:58.861094 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2179835226/tls.crt::/tmp/serving-cert-2179835226/tls.key\\\\\\\"\\\\nI0120 16:42:04.205247 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0120 16:42:04.207291 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0120 16:42:04.207308 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0120 16:42:04.207329 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0120 16:42:04.207333 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0120 16:42:04.210615 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0120 16:42:04.210632 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0120 16:42:04.210641 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0120 16:42:04.210648 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0120 16:42:04.210652 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0120 16:42:04.210655 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0120 16:42:04.210658 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0120 16:42:04.210660 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0120 16:42:04.211964 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4850bde1fb538e21fc91949dc584ef0a791d718844691a44559e0513bb36203\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c39aa6c3942134a027be33cac715f9c0381bce5417754d5bad045c42a07d353f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c39aa6c3942134a027be33cac715f9c0381bce5417754d5bad045c42a07d353f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:43:06Z is after 2025-08-24T17:21:41Z" Jan 20 16:43:06 crc kubenswrapper[4558]: I0120 16:43:06.649291 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"68337d27-3fa6-4a29-88b0-82e60c3739eb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de7f7c23993a57d30f08686e1cd4abb5b17e108ac12dd0c7929a31574c69eeb1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4b478\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b18a59ecb802507e0d5988eacc915fd7e0d6954518de80f61162c97f680fd8c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4b478\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2vr4r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:43:06Z is after 2025-08-24T17:21:41Z" Jan 20 16:43:06 crc kubenswrapper[4558]: I0120 16:43:06.657184 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60aafae7-2e6e-4d21-9ce6-8ff5382ef642\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d16b973c217656d6a8ceaf684e9dec82daeb583209943f6b2979be47ab03fda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb2e7753f75b10088eea9204511ae78e4be9a2b5be8c17ed04c3c972b13f8190\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a426b97002a49938b18009db49867dc9a15102707b42ec46bc64093c0ed3c2c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e4df1c3b2a1aef9adb3da64f89a1b8c0904fd22882609dbafe2fbc5718b8016\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6e4df1c3b2a1aef9adb3da64f89a1b8c0904fd22882609dbafe2fbc5718b8016\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:43:06Z is after 2025-08-24T17:21:41Z" Jan 20 16:43:06 crc kubenswrapper[4558]: I0120 16:43:06.665708 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e190c24c1d8a3bc0241334598ce23829ada9afee282d49a0c82cd67ad82bcd02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:43:06Z is after 2025-08-24T17:21:41Z" Jan 20 16:43:06 crc kubenswrapper[4558]: I0120 16:43:06.673568 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:43:06Z is after 2025-08-24T17:21:41Z" Jan 20 16:43:06 crc kubenswrapper[4558]: I0120 16:43:06.681518 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:43:06Z is after 2025-08-24T17:21:41Z" Jan 20 16:43:06 crc kubenswrapper[4558]: I0120 16:43:06.688605 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rfwnl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8fe5472-7373-4e5d-87fc-a9ecaf26a5cd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://38cd58ecc67d337df2c29b79e5460ea23d4acc1e66de8b08ea70d7ab7b30691d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9nvl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a14e9281ff19e324499722aefadf1e1f2fd005089c153918ad4d8a7f5bdfa853\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9nvl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-rfwnl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:43:06Z is after 2025-08-24T17:21:41Z" Jan 20 16:43:06 crc kubenswrapper[4558]: I0120 16:43:06.696458 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:43:06Z is after 2025-08-24T17:21:41Z" Jan 20 16:43:06 crc kubenswrapper[4558]: I0120 16:43:06.703061 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-d96bp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c8d2c8b-fb8e-4bff-b8d2-69570e5d9e57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe858db22acc96b062f1bc7941e05c823f30d1f9ba50bd497c57f38d57e04293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4v6jx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-d96bp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:43:06Z is after 2025-08-24T17:21:41Z" Jan 20 16:43:06 crc kubenswrapper[4558]: I0120 16:43:06.713826 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:06 crc kubenswrapper[4558]: I0120 16:43:06.713854 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:06 crc kubenswrapper[4558]: I0120 16:43:06.713862 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:06 crc kubenswrapper[4558]: I0120 16:43:06.713875 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:06 crc kubenswrapper[4558]: I0120 16:43:06.713883 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:06Z","lastTransitionTime":"2026-01-20T16:43:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:06 crc kubenswrapper[4558]: I0120 16:43:06.715573 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"254129cd-82fc-4162-b671-2434bc9e2972\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a6a184067bde115ef5e09fdd90e7dccfe89a9dc347af450998b91c068e7e803\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd8b93e5559d95511a1285a9aec9a7d72df6c330bcd1e1daf3e93f5494f70eeb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4264ef8ee49a8b172fb05ac5c9b0bd32350b5d9ae95293079dfeaf44f021b455\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0889cf371f8b06cc2d255c15fda97db1d5000d6f6cff29d2fdcb8667cede8a99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4265d3281e954ee5989ee008da28c6cb9fa29478e5fdf23325521d455304da9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://186a1d2caad8865abf7f565405c4a4c077038ccb67ba77927cc2392ec075a811\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e9f4ed928d20c9db64a689d7d14df9b81870bb0523cf5098e09d2d85aad9fca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e9f4ed928d20c9db64a689d7d14df9b81870bb0523cf5098e09d2d85aad9fca\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-20T16:42:58Z\\\",\\\"message\\\":\\\"ess{},},Conditions:[]Condition{},},}\\\\nI0120 16:42:58.184623 6635 lb_config.go:1031] Cluster endpoints for openshift-controller-manager/controller-manager for network=default are: map[]\\\\nF0120 16:42:58.184632 6635 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:58Z is after 2025-08-24T17:21:41Z]\\\\nI0120 16:42:58.184637 6635 services_controller.go:443] Built service openshift-controller-manager/controller-manager LB cluster-wide configs for network=default: []services.lbConfig{services.lbConfig{vips:[]string{\\\\\\\"10.217.5.149\\\\\\\"}, protocol:\\\\\\\"TCP\\\\\\\", inport:443, clusterEndpoints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:57Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-nv2xw_openshift-ovn-kubernetes(254129cd-82fc-4162-b671-2434bc9e2972)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f41d488b0c24e594a2f5cad36b5f8efdb7d867fae0e18a74fe3c396a203d162f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aaa12ad387fc5d0780f6602a74013bb70c118f35c3a16eb4d136df7b0c48db9e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aaa12ad387fc5d0780f6602a74013bb70c118f35c3a16eb4d136df7b0c48db9e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-nv2xw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:43:06Z is after 2025-08-24T17:21:41Z" Jan 20 16:43:06 crc kubenswrapper[4558]: I0120 16:43:06.725458 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f5t7h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0f90cb04-2e7a-4ee8-83fc-d6c0ee1702a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21a318f03d0df490cb7bae11819c52eca011d163081aeecff1b2dd8f72caabbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fd8a8162080a700ce21ff3b2cf7c28fabfc682758eb69c483909d4008b07daab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fd8a8162080a700ce21ff3b2cf7c28fabfc682758eb69c483909d4008b07daab\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e77617c69434ce20c448b525c7e86f1ece18d51ce6e14167b4ca7e99de8e5709\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e77617c69434ce20c448b525c7e86f1ece18d51ce6e14167b4ca7e99de8e5709\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://453c67434281e0261ec6799b30a548a01efd72d7df96632409e745b2e6a94546\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://453c67434281e0261ec6799b30a548a01efd72d7df96632409e745b2e6a94546\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://06eab4daa6c8b6a564cf1bb1403b7b3b259a234ee09f23b4f4bc0628c2d58e69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06eab4daa6c8b6a564cf1bb1403b7b3b259a234ee09f23b4f4bc0628c2d58e69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1291ac09fc6ae2a79fdeaa0fc47272f61b97f45f67ed3f7c90bdfdbfe3519ff2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1291ac09fc6ae2a79fdeaa0fc47272f61b97f45f67ed3f7c90bdfdbfe3519ff2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe484514f50a14e485d4a97ea3ce9401c77f3f4dae4717719be7ed6c4d7b1a59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe484514f50a14e485d4a97ea3ce9401c77f3f4dae4717719be7ed6c4d7b1a59\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f5t7h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:43:06Z is after 2025-08-24T17:21:41Z" Jan 20 16:43:06 crc kubenswrapper[4558]: I0120 16:43:06.732747 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-9wrq6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30032328-bd33-4073-9366-e10bc5e2aa77\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6tz4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6tz4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:17Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-9wrq6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:43:06Z is after 2025-08-24T17:21:41Z" Jan 20 16:43:06 crc kubenswrapper[4558]: I0120 16:43:06.815768 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:06 crc kubenswrapper[4558]: I0120 16:43:06.815802 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:06 crc kubenswrapper[4558]: I0120 16:43:06.815812 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:06 crc kubenswrapper[4558]: I0120 16:43:06.815824 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:06 crc kubenswrapper[4558]: I0120 16:43:06.815833 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:06Z","lastTransitionTime":"2026-01-20T16:43:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:06 crc kubenswrapper[4558]: I0120 16:43:06.917890 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:06 crc kubenswrapper[4558]: I0120 16:43:06.918145 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:06 crc kubenswrapper[4558]: I0120 16:43:06.918198 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:06 crc kubenswrapper[4558]: I0120 16:43:06.918212 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:06 crc kubenswrapper[4558]: I0120 16:43:06.918220 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:06Z","lastTransitionTime":"2026-01-20T16:43:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:07 crc kubenswrapper[4558]: I0120 16:43:07.020197 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:07 crc kubenswrapper[4558]: I0120 16:43:07.020224 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:07 crc kubenswrapper[4558]: I0120 16:43:07.020233 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:07 crc kubenswrapper[4558]: I0120 16:43:07.020244 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:07 crc kubenswrapper[4558]: I0120 16:43:07.020253 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:07Z","lastTransitionTime":"2026-01-20T16:43:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:07 crc kubenswrapper[4558]: I0120 16:43:07.123224 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:07 crc kubenswrapper[4558]: I0120 16:43:07.123288 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:07 crc kubenswrapper[4558]: I0120 16:43:07.123297 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:07 crc kubenswrapper[4558]: I0120 16:43:07.123310 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:07 crc kubenswrapper[4558]: I0120 16:43:07.123321 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:07Z","lastTransitionTime":"2026-01-20T16:43:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:07 crc kubenswrapper[4558]: I0120 16:43:07.225334 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:07 crc kubenswrapper[4558]: I0120 16:43:07.225387 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:07 crc kubenswrapper[4558]: I0120 16:43:07.225395 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:07 crc kubenswrapper[4558]: I0120 16:43:07.225406 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:07 crc kubenswrapper[4558]: I0120 16:43:07.225414 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:07Z","lastTransitionTime":"2026-01-20T16:43:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:07 crc kubenswrapper[4558]: I0120 16:43:07.327367 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:07 crc kubenswrapper[4558]: I0120 16:43:07.327402 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:07 crc kubenswrapper[4558]: I0120 16:43:07.327410 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:07 crc kubenswrapper[4558]: I0120 16:43:07.327424 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:07 crc kubenswrapper[4558]: I0120 16:43:07.327432 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:07Z","lastTransitionTime":"2026-01-20T16:43:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:07 crc kubenswrapper[4558]: I0120 16:43:07.429394 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:07 crc kubenswrapper[4558]: I0120 16:43:07.429432 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:07 crc kubenswrapper[4558]: I0120 16:43:07.429441 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:07 crc kubenswrapper[4558]: I0120 16:43:07.429454 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:07 crc kubenswrapper[4558]: I0120 16:43:07.429462 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:07Z","lastTransitionTime":"2026-01-20T16:43:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:07 crc kubenswrapper[4558]: I0120 16:43:07.531681 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:07 crc kubenswrapper[4558]: I0120 16:43:07.531719 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:07 crc kubenswrapper[4558]: I0120 16:43:07.531729 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:07 crc kubenswrapper[4558]: I0120 16:43:07.531741 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:07 crc kubenswrapper[4558]: I0120 16:43:07.531752 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:07Z","lastTransitionTime":"2026-01-20T16:43:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:07 crc kubenswrapper[4558]: I0120 16:43:07.565137 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9wrq6" Jan 20 16:43:07 crc kubenswrapper[4558]: E0120 16:43:07.565256 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9wrq6" podUID="30032328-bd33-4073-9366-e10bc5e2aa77" Jan 20 16:43:07 crc kubenswrapper[4558]: I0120 16:43:07.579611 4558 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-24 03:31:24.313093036 +0000 UTC Jan 20 16:43:07 crc kubenswrapper[4558]: I0120 16:43:07.633236 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:07 crc kubenswrapper[4558]: I0120 16:43:07.633264 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:07 crc kubenswrapper[4558]: I0120 16:43:07.633272 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:07 crc kubenswrapper[4558]: I0120 16:43:07.633282 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:07 crc kubenswrapper[4558]: I0120 16:43:07.633291 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:07Z","lastTransitionTime":"2026-01-20T16:43:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:07 crc kubenswrapper[4558]: I0120 16:43:07.734879 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:07 crc kubenswrapper[4558]: I0120 16:43:07.734925 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:07 crc kubenswrapper[4558]: I0120 16:43:07.734935 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:07 crc kubenswrapper[4558]: I0120 16:43:07.734951 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:07 crc kubenswrapper[4558]: I0120 16:43:07.734961 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:07Z","lastTransitionTime":"2026-01-20T16:43:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:07 crc kubenswrapper[4558]: I0120 16:43:07.837087 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:07 crc kubenswrapper[4558]: I0120 16:43:07.837123 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:07 crc kubenswrapper[4558]: I0120 16:43:07.837131 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:07 crc kubenswrapper[4558]: I0120 16:43:07.837143 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:07 crc kubenswrapper[4558]: I0120 16:43:07.837151 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:07Z","lastTransitionTime":"2026-01-20T16:43:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:07 crc kubenswrapper[4558]: I0120 16:43:07.939403 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:07 crc kubenswrapper[4558]: I0120 16:43:07.939448 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:07 crc kubenswrapper[4558]: I0120 16:43:07.939459 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:07 crc kubenswrapper[4558]: I0120 16:43:07.939474 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:07 crc kubenswrapper[4558]: I0120 16:43:07.939484 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:07Z","lastTransitionTime":"2026-01-20T16:43:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:08 crc kubenswrapper[4558]: I0120 16:43:08.041865 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:08 crc kubenswrapper[4558]: I0120 16:43:08.041900 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:08 crc kubenswrapper[4558]: I0120 16:43:08.041909 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:08 crc kubenswrapper[4558]: I0120 16:43:08.041921 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:08 crc kubenswrapper[4558]: I0120 16:43:08.041930 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:08Z","lastTransitionTime":"2026-01-20T16:43:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:08 crc kubenswrapper[4558]: I0120 16:43:08.143898 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:08 crc kubenswrapper[4558]: I0120 16:43:08.143936 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:08 crc kubenswrapper[4558]: I0120 16:43:08.143944 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:08 crc kubenswrapper[4558]: I0120 16:43:08.143956 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:08 crc kubenswrapper[4558]: I0120 16:43:08.143964 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:08Z","lastTransitionTime":"2026-01-20T16:43:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:08 crc kubenswrapper[4558]: I0120 16:43:08.246318 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:08 crc kubenswrapper[4558]: I0120 16:43:08.246355 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:08 crc kubenswrapper[4558]: I0120 16:43:08.246365 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:08 crc kubenswrapper[4558]: I0120 16:43:08.246381 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:08 crc kubenswrapper[4558]: I0120 16:43:08.246390 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:08Z","lastTransitionTime":"2026-01-20T16:43:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:08 crc kubenswrapper[4558]: I0120 16:43:08.348799 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:08 crc kubenswrapper[4558]: I0120 16:43:08.348839 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:08 crc kubenswrapper[4558]: I0120 16:43:08.348850 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:08 crc kubenswrapper[4558]: I0120 16:43:08.348862 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:08 crc kubenswrapper[4558]: I0120 16:43:08.348870 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:08Z","lastTransitionTime":"2026-01-20T16:43:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:08 crc kubenswrapper[4558]: I0120 16:43:08.451579 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:08 crc kubenswrapper[4558]: I0120 16:43:08.451633 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:08 crc kubenswrapper[4558]: I0120 16:43:08.451646 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:08 crc kubenswrapper[4558]: I0120 16:43:08.451661 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:08 crc kubenswrapper[4558]: I0120 16:43:08.451670 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:08Z","lastTransitionTime":"2026-01-20T16:43:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:08 crc kubenswrapper[4558]: I0120 16:43:08.460091 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 20 16:43:08 crc kubenswrapper[4558]: E0120 16:43:08.460206 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-20 16:44:12.460187771 +0000 UTC m=+146.220525738 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 20 16:43:08 crc kubenswrapper[4558]: I0120 16:43:08.553612 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:08 crc kubenswrapper[4558]: I0120 16:43:08.553647 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:08 crc kubenswrapper[4558]: I0120 16:43:08.553656 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:08 crc kubenswrapper[4558]: I0120 16:43:08.553668 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:08 crc kubenswrapper[4558]: I0120 16:43:08.553677 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:08Z","lastTransitionTime":"2026-01-20T16:43:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:08 crc kubenswrapper[4558]: I0120 16:43:08.561466 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 20 16:43:08 crc kubenswrapper[4558]: I0120 16:43:08.561499 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 20 16:43:08 crc kubenswrapper[4558]: I0120 16:43:08.561518 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 20 16:43:08 crc kubenswrapper[4558]: I0120 16:43:08.561539 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 20 16:43:08 crc kubenswrapper[4558]: E0120 16:43:08.561647 4558 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 20 16:43:08 crc kubenswrapper[4558]: E0120 16:43:08.561667 4558 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 20 16:43:08 crc kubenswrapper[4558]: E0120 16:43:08.561676 4558 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 20 16:43:08 crc kubenswrapper[4558]: E0120 16:43:08.561713 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-20 16:44:12.561701445 +0000 UTC m=+146.322039413 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 20 16:43:08 crc kubenswrapper[4558]: E0120 16:43:08.561678 4558 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 20 16:43:08 crc kubenswrapper[4558]: E0120 16:43:08.561784 4558 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 20 16:43:08 crc kubenswrapper[4558]: E0120 16:43:08.561809 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-20 16:44:12.561796714 +0000 UTC m=+146.322134681 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 20 16:43:08 crc kubenswrapper[4558]: E0120 16:43:08.561651 4558 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 20 16:43:08 crc kubenswrapper[4558]: E0120 16:43:08.561941 4558 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 20 16:43:08 crc kubenswrapper[4558]: E0120 16:43:08.561944 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-20 16:44:12.561921379 +0000 UTC m=+146.322259356 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 20 16:43:08 crc kubenswrapper[4558]: E0120 16:43:08.561953 4558 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 20 16:43:08 crc kubenswrapper[4558]: E0120 16:43:08.561991 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-20 16:44:12.561984407 +0000 UTC m=+146.322322375 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 20 16:43:08 crc kubenswrapper[4558]: I0120 16:43:08.565265 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 20 16:43:08 crc kubenswrapper[4558]: I0120 16:43:08.565354 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 20 16:43:08 crc kubenswrapper[4558]: E0120 16:43:08.565444 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 20 16:43:08 crc kubenswrapper[4558]: I0120 16:43:08.565503 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 20 16:43:08 crc kubenswrapper[4558]: E0120 16:43:08.565613 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 20 16:43:08 crc kubenswrapper[4558]: E0120 16:43:08.565726 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 20 16:43:08 crc kubenswrapper[4558]: I0120 16:43:08.579748 4558 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-31 01:59:34.385033619 +0000 UTC Jan 20 16:43:08 crc kubenswrapper[4558]: I0120 16:43:08.655566 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:08 crc kubenswrapper[4558]: I0120 16:43:08.655606 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:08 crc kubenswrapper[4558]: I0120 16:43:08.655616 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:08 crc kubenswrapper[4558]: I0120 16:43:08.655646 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:08 crc kubenswrapper[4558]: I0120 16:43:08.655655 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:08Z","lastTransitionTime":"2026-01-20T16:43:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:08 crc kubenswrapper[4558]: I0120 16:43:08.758120 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:08 crc kubenswrapper[4558]: I0120 16:43:08.758188 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:08 crc kubenswrapper[4558]: I0120 16:43:08.758197 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:08 crc kubenswrapper[4558]: I0120 16:43:08.758209 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:08 crc kubenswrapper[4558]: I0120 16:43:08.758219 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:08Z","lastTransitionTime":"2026-01-20T16:43:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:08 crc kubenswrapper[4558]: I0120 16:43:08.860359 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:08 crc kubenswrapper[4558]: I0120 16:43:08.860388 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:08 crc kubenswrapper[4558]: I0120 16:43:08.860397 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:08 crc kubenswrapper[4558]: I0120 16:43:08.860410 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:08 crc kubenswrapper[4558]: I0120 16:43:08.860436 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:08Z","lastTransitionTime":"2026-01-20T16:43:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:08 crc kubenswrapper[4558]: I0120 16:43:08.962299 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:08 crc kubenswrapper[4558]: I0120 16:43:08.962363 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:08 crc kubenswrapper[4558]: I0120 16:43:08.962372 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:08 crc kubenswrapper[4558]: I0120 16:43:08.962384 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:08 crc kubenswrapper[4558]: I0120 16:43:08.962393 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:08Z","lastTransitionTime":"2026-01-20T16:43:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:09 crc kubenswrapper[4558]: I0120 16:43:09.003919 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:09 crc kubenswrapper[4558]: I0120 16:43:09.003954 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:09 crc kubenswrapper[4558]: I0120 16:43:09.003962 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:09 crc kubenswrapper[4558]: I0120 16:43:09.003973 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:09 crc kubenswrapper[4558]: I0120 16:43:09.003981 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:09Z","lastTransitionTime":"2026-01-20T16:43:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:09 crc kubenswrapper[4558]: E0120 16:43:09.013020 4558 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:43:09Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:43:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:43:09Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:43:09Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:43:09Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:43:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:43:09Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:43:09Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"bf027402-7a62-478b-8585-220c98495823\\\",\\\"systemUUID\\\":\\\"1def282c-e24e-4bc0-9a1b-d7cc30756d3d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:43:09Z is after 2025-08-24T17:21:41Z" Jan 20 16:43:09 crc kubenswrapper[4558]: I0120 16:43:09.015516 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:09 crc kubenswrapper[4558]: I0120 16:43:09.015550 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:09 crc kubenswrapper[4558]: I0120 16:43:09.015559 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:09 crc kubenswrapper[4558]: I0120 16:43:09.015572 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:09 crc kubenswrapper[4558]: I0120 16:43:09.015580 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:09Z","lastTransitionTime":"2026-01-20T16:43:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:09 crc kubenswrapper[4558]: E0120 16:43:09.024098 4558 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:43:09Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:43:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:43:09Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:43:09Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:43:09Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:43:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:43:09Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:43:09Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"bf027402-7a62-478b-8585-220c98495823\\\",\\\"systemUUID\\\":\\\"1def282c-e24e-4bc0-9a1b-d7cc30756d3d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:43:09Z is after 2025-08-24T17:21:41Z" Jan 20 16:43:09 crc kubenswrapper[4558]: I0120 16:43:09.026682 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:09 crc kubenswrapper[4558]: I0120 16:43:09.026713 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:09 crc kubenswrapper[4558]: I0120 16:43:09.026724 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:09 crc kubenswrapper[4558]: I0120 16:43:09.026734 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:09 crc kubenswrapper[4558]: I0120 16:43:09.026742 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:09Z","lastTransitionTime":"2026-01-20T16:43:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:09 crc kubenswrapper[4558]: E0120 16:43:09.035047 4558 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:43:09Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:43:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:43:09Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:43:09Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:43:09Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:43:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:43:09Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:43:09Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"bf027402-7a62-478b-8585-220c98495823\\\",\\\"systemUUID\\\":\\\"1def282c-e24e-4bc0-9a1b-d7cc30756d3d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:43:09Z is after 2025-08-24T17:21:41Z" Jan 20 16:43:09 crc kubenswrapper[4558]: I0120 16:43:09.037419 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:09 crc kubenswrapper[4558]: I0120 16:43:09.037446 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:09 crc kubenswrapper[4558]: I0120 16:43:09.037456 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:09 crc kubenswrapper[4558]: I0120 16:43:09.037469 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:09 crc kubenswrapper[4558]: I0120 16:43:09.037477 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:09Z","lastTransitionTime":"2026-01-20T16:43:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:09 crc kubenswrapper[4558]: E0120 16:43:09.045454 4558 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:43:09Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:43:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:43:09Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:43:09Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:43:09Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:43:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:43:09Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:43:09Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"bf027402-7a62-478b-8585-220c98495823\\\",\\\"systemUUID\\\":\\\"1def282c-e24e-4bc0-9a1b-d7cc30756d3d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:43:09Z is after 2025-08-24T17:21:41Z" Jan 20 16:43:09 crc kubenswrapper[4558]: I0120 16:43:09.047994 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:09 crc kubenswrapper[4558]: I0120 16:43:09.048020 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:09 crc kubenswrapper[4558]: I0120 16:43:09.048027 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:09 crc kubenswrapper[4558]: I0120 16:43:09.048036 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:09 crc kubenswrapper[4558]: I0120 16:43:09.048044 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:09Z","lastTransitionTime":"2026-01-20T16:43:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:09 crc kubenswrapper[4558]: E0120 16:43:09.056138 4558 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:43:09Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:43:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:43:09Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:43:09Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:43:09Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:43:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:43:09Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:43:09Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"bf027402-7a62-478b-8585-220c98495823\\\",\\\"systemUUID\\\":\\\"1def282c-e24e-4bc0-9a1b-d7cc30756d3d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:43:09Z is after 2025-08-24T17:21:41Z" Jan 20 16:43:09 crc kubenswrapper[4558]: E0120 16:43:09.056305 4558 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 20 16:43:09 crc kubenswrapper[4558]: I0120 16:43:09.070564 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:09 crc kubenswrapper[4558]: I0120 16:43:09.070624 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:09 crc kubenswrapper[4558]: I0120 16:43:09.070638 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:09 crc kubenswrapper[4558]: I0120 16:43:09.070649 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:09 crc kubenswrapper[4558]: I0120 16:43:09.070659 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:09Z","lastTransitionTime":"2026-01-20T16:43:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:09 crc kubenswrapper[4558]: I0120 16:43:09.172557 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:09 crc kubenswrapper[4558]: I0120 16:43:09.172645 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:09 crc kubenswrapper[4558]: I0120 16:43:09.172658 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:09 crc kubenswrapper[4558]: I0120 16:43:09.172680 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:09 crc kubenswrapper[4558]: I0120 16:43:09.172695 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:09Z","lastTransitionTime":"2026-01-20T16:43:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:09 crc kubenswrapper[4558]: I0120 16:43:09.274498 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:09 crc kubenswrapper[4558]: I0120 16:43:09.274539 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:09 crc kubenswrapper[4558]: I0120 16:43:09.274548 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:09 crc kubenswrapper[4558]: I0120 16:43:09.274562 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:09 crc kubenswrapper[4558]: I0120 16:43:09.274571 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:09Z","lastTransitionTime":"2026-01-20T16:43:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:09 crc kubenswrapper[4558]: I0120 16:43:09.377292 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:09 crc kubenswrapper[4558]: I0120 16:43:09.377327 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:09 crc kubenswrapper[4558]: I0120 16:43:09.377336 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:09 crc kubenswrapper[4558]: I0120 16:43:09.377367 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:09 crc kubenswrapper[4558]: I0120 16:43:09.377377 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:09Z","lastTransitionTime":"2026-01-20T16:43:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:09 crc kubenswrapper[4558]: I0120 16:43:09.479098 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:09 crc kubenswrapper[4558]: I0120 16:43:09.479145 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:09 crc kubenswrapper[4558]: I0120 16:43:09.479153 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:09 crc kubenswrapper[4558]: I0120 16:43:09.479189 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:09 crc kubenswrapper[4558]: I0120 16:43:09.479199 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:09Z","lastTransitionTime":"2026-01-20T16:43:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:09 crc kubenswrapper[4558]: I0120 16:43:09.565487 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9wrq6" Jan 20 16:43:09 crc kubenswrapper[4558]: E0120 16:43:09.565642 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9wrq6" podUID="30032328-bd33-4073-9366-e10bc5e2aa77" Jan 20 16:43:09 crc kubenswrapper[4558]: I0120 16:43:09.579923 4558 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-02 08:32:40.631924463 +0000 UTC Jan 20 16:43:09 crc kubenswrapper[4558]: I0120 16:43:09.581095 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:09 crc kubenswrapper[4558]: I0120 16:43:09.581125 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:09 crc kubenswrapper[4558]: I0120 16:43:09.581134 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:09 crc kubenswrapper[4558]: I0120 16:43:09.581147 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:09 crc kubenswrapper[4558]: I0120 16:43:09.581156 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:09Z","lastTransitionTime":"2026-01-20T16:43:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:09 crc kubenswrapper[4558]: I0120 16:43:09.682808 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:09 crc kubenswrapper[4558]: I0120 16:43:09.682835 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:09 crc kubenswrapper[4558]: I0120 16:43:09.682843 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:09 crc kubenswrapper[4558]: I0120 16:43:09.682856 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:09 crc kubenswrapper[4558]: I0120 16:43:09.682864 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:09Z","lastTransitionTime":"2026-01-20T16:43:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:09 crc kubenswrapper[4558]: I0120 16:43:09.784855 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:09 crc kubenswrapper[4558]: I0120 16:43:09.784900 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:09 crc kubenswrapper[4558]: I0120 16:43:09.784908 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:09 crc kubenswrapper[4558]: I0120 16:43:09.784923 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:09 crc kubenswrapper[4558]: I0120 16:43:09.784930 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:09Z","lastTransitionTime":"2026-01-20T16:43:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:09 crc kubenswrapper[4558]: I0120 16:43:09.886986 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:09 crc kubenswrapper[4558]: I0120 16:43:09.887034 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:09 crc kubenswrapper[4558]: I0120 16:43:09.887042 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:09 crc kubenswrapper[4558]: I0120 16:43:09.887055 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:09 crc kubenswrapper[4558]: I0120 16:43:09.887063 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:09Z","lastTransitionTime":"2026-01-20T16:43:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:09 crc kubenswrapper[4558]: I0120 16:43:09.988708 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:09 crc kubenswrapper[4558]: I0120 16:43:09.988742 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:09 crc kubenswrapper[4558]: I0120 16:43:09.988750 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:09 crc kubenswrapper[4558]: I0120 16:43:09.988762 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:09 crc kubenswrapper[4558]: I0120 16:43:09.988770 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:09Z","lastTransitionTime":"2026-01-20T16:43:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:10 crc kubenswrapper[4558]: I0120 16:43:10.090601 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:10 crc kubenswrapper[4558]: I0120 16:43:10.090650 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:10 crc kubenswrapper[4558]: I0120 16:43:10.090659 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:10 crc kubenswrapper[4558]: I0120 16:43:10.090671 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:10 crc kubenswrapper[4558]: I0120 16:43:10.090680 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:10Z","lastTransitionTime":"2026-01-20T16:43:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:10 crc kubenswrapper[4558]: I0120 16:43:10.192367 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:10 crc kubenswrapper[4558]: I0120 16:43:10.192402 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:10 crc kubenswrapper[4558]: I0120 16:43:10.192410 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:10 crc kubenswrapper[4558]: I0120 16:43:10.192425 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:10 crc kubenswrapper[4558]: I0120 16:43:10.192433 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:10Z","lastTransitionTime":"2026-01-20T16:43:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:10 crc kubenswrapper[4558]: I0120 16:43:10.294088 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:10 crc kubenswrapper[4558]: I0120 16:43:10.294122 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:10 crc kubenswrapper[4558]: I0120 16:43:10.294131 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:10 crc kubenswrapper[4558]: I0120 16:43:10.294143 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:10 crc kubenswrapper[4558]: I0120 16:43:10.294150 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:10Z","lastTransitionTime":"2026-01-20T16:43:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:10 crc kubenswrapper[4558]: I0120 16:43:10.395940 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:10 crc kubenswrapper[4558]: I0120 16:43:10.395972 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:10 crc kubenswrapper[4558]: I0120 16:43:10.395982 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:10 crc kubenswrapper[4558]: I0120 16:43:10.395994 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:10 crc kubenswrapper[4558]: I0120 16:43:10.396003 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:10Z","lastTransitionTime":"2026-01-20T16:43:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:10 crc kubenswrapper[4558]: I0120 16:43:10.498276 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:10 crc kubenswrapper[4558]: I0120 16:43:10.498309 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:10 crc kubenswrapper[4558]: I0120 16:43:10.498319 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:10 crc kubenswrapper[4558]: I0120 16:43:10.498334 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:10 crc kubenswrapper[4558]: I0120 16:43:10.498342 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:10Z","lastTransitionTime":"2026-01-20T16:43:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:10 crc kubenswrapper[4558]: I0120 16:43:10.565983 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 20 16:43:10 crc kubenswrapper[4558]: I0120 16:43:10.566034 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 20 16:43:10 crc kubenswrapper[4558]: I0120 16:43:10.565997 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 20 16:43:10 crc kubenswrapper[4558]: E0120 16:43:10.566103 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 20 16:43:10 crc kubenswrapper[4558]: E0120 16:43:10.566222 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 20 16:43:10 crc kubenswrapper[4558]: E0120 16:43:10.566295 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 20 16:43:10 crc kubenswrapper[4558]: I0120 16:43:10.580940 4558 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-25 04:28:17.283091209 +0000 UTC Jan 20 16:43:10 crc kubenswrapper[4558]: I0120 16:43:10.600135 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:10 crc kubenswrapper[4558]: I0120 16:43:10.600186 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:10 crc kubenswrapper[4558]: I0120 16:43:10.600197 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:10 crc kubenswrapper[4558]: I0120 16:43:10.600210 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:10 crc kubenswrapper[4558]: I0120 16:43:10.600222 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:10Z","lastTransitionTime":"2026-01-20T16:43:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:10 crc kubenswrapper[4558]: I0120 16:43:10.702467 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:10 crc kubenswrapper[4558]: I0120 16:43:10.702496 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:10 crc kubenswrapper[4558]: I0120 16:43:10.702521 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:10 crc kubenswrapper[4558]: I0120 16:43:10.702534 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:10 crc kubenswrapper[4558]: I0120 16:43:10.702542 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:10Z","lastTransitionTime":"2026-01-20T16:43:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:10 crc kubenswrapper[4558]: I0120 16:43:10.804637 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:10 crc kubenswrapper[4558]: I0120 16:43:10.804672 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:10 crc kubenswrapper[4558]: I0120 16:43:10.804682 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:10 crc kubenswrapper[4558]: I0120 16:43:10.804694 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:10 crc kubenswrapper[4558]: I0120 16:43:10.804704 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:10Z","lastTransitionTime":"2026-01-20T16:43:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:10 crc kubenswrapper[4558]: I0120 16:43:10.906858 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:10 crc kubenswrapper[4558]: I0120 16:43:10.906890 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:10 crc kubenswrapper[4558]: I0120 16:43:10.906898 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:10 crc kubenswrapper[4558]: I0120 16:43:10.906911 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:10 crc kubenswrapper[4558]: I0120 16:43:10.906919 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:10Z","lastTransitionTime":"2026-01-20T16:43:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:11 crc kubenswrapper[4558]: I0120 16:43:11.009157 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:11 crc kubenswrapper[4558]: I0120 16:43:11.009488 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:11 crc kubenswrapper[4558]: I0120 16:43:11.009498 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:11 crc kubenswrapper[4558]: I0120 16:43:11.009515 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:11 crc kubenswrapper[4558]: I0120 16:43:11.009525 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:11Z","lastTransitionTime":"2026-01-20T16:43:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:11 crc kubenswrapper[4558]: I0120 16:43:11.110856 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:11 crc kubenswrapper[4558]: I0120 16:43:11.110900 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:11 crc kubenswrapper[4558]: I0120 16:43:11.110909 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:11 crc kubenswrapper[4558]: I0120 16:43:11.110923 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:11 crc kubenswrapper[4558]: I0120 16:43:11.110931 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:11Z","lastTransitionTime":"2026-01-20T16:43:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:11 crc kubenswrapper[4558]: I0120 16:43:11.212568 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:11 crc kubenswrapper[4558]: I0120 16:43:11.212623 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:11 crc kubenswrapper[4558]: I0120 16:43:11.212641 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:11 crc kubenswrapper[4558]: I0120 16:43:11.212657 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:11 crc kubenswrapper[4558]: I0120 16:43:11.212667 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:11Z","lastTransitionTime":"2026-01-20T16:43:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:11 crc kubenswrapper[4558]: I0120 16:43:11.314406 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:11 crc kubenswrapper[4558]: I0120 16:43:11.314435 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:11 crc kubenswrapper[4558]: I0120 16:43:11.314451 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:11 crc kubenswrapper[4558]: I0120 16:43:11.314463 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:11 crc kubenswrapper[4558]: I0120 16:43:11.314471 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:11Z","lastTransitionTime":"2026-01-20T16:43:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:11 crc kubenswrapper[4558]: I0120 16:43:11.416452 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:11 crc kubenswrapper[4558]: I0120 16:43:11.416501 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:11 crc kubenswrapper[4558]: I0120 16:43:11.416509 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:11 crc kubenswrapper[4558]: I0120 16:43:11.416521 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:11 crc kubenswrapper[4558]: I0120 16:43:11.416530 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:11Z","lastTransitionTime":"2026-01-20T16:43:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:11 crc kubenswrapper[4558]: I0120 16:43:11.518767 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:11 crc kubenswrapper[4558]: I0120 16:43:11.518805 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:11 crc kubenswrapper[4558]: I0120 16:43:11.518814 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:11 crc kubenswrapper[4558]: I0120 16:43:11.518825 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:11 crc kubenswrapper[4558]: I0120 16:43:11.518833 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:11Z","lastTransitionTime":"2026-01-20T16:43:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:11 crc kubenswrapper[4558]: I0120 16:43:11.565194 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9wrq6" Jan 20 16:43:11 crc kubenswrapper[4558]: E0120 16:43:11.565391 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9wrq6" podUID="30032328-bd33-4073-9366-e10bc5e2aa77" Jan 20 16:43:11 crc kubenswrapper[4558]: I0120 16:43:11.581836 4558 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-01 21:37:22.731190071 +0000 UTC Jan 20 16:43:11 crc kubenswrapper[4558]: I0120 16:43:11.621091 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:11 crc kubenswrapper[4558]: I0120 16:43:11.621145 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:11 crc kubenswrapper[4558]: I0120 16:43:11.621183 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:11 crc kubenswrapper[4558]: I0120 16:43:11.621205 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:11 crc kubenswrapper[4558]: I0120 16:43:11.621222 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:11Z","lastTransitionTime":"2026-01-20T16:43:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:11 crc kubenswrapper[4558]: I0120 16:43:11.724009 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:11 crc kubenswrapper[4558]: I0120 16:43:11.724262 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:11 crc kubenswrapper[4558]: I0120 16:43:11.724342 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:11 crc kubenswrapper[4558]: I0120 16:43:11.724413 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:11 crc kubenswrapper[4558]: I0120 16:43:11.724464 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:11Z","lastTransitionTime":"2026-01-20T16:43:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:11 crc kubenswrapper[4558]: I0120 16:43:11.826124 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:11 crc kubenswrapper[4558]: I0120 16:43:11.826191 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:11 crc kubenswrapper[4558]: I0120 16:43:11.826202 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:11 crc kubenswrapper[4558]: I0120 16:43:11.826217 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:11 crc kubenswrapper[4558]: I0120 16:43:11.826226 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:11Z","lastTransitionTime":"2026-01-20T16:43:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:11 crc kubenswrapper[4558]: I0120 16:43:11.928503 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:11 crc kubenswrapper[4558]: I0120 16:43:11.928579 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:11 crc kubenswrapper[4558]: I0120 16:43:11.928608 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:11 crc kubenswrapper[4558]: I0120 16:43:11.928633 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:11 crc kubenswrapper[4558]: I0120 16:43:11.928659 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:11Z","lastTransitionTime":"2026-01-20T16:43:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:12 crc kubenswrapper[4558]: I0120 16:43:12.030918 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:12 crc kubenswrapper[4558]: I0120 16:43:12.030952 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:12 crc kubenswrapper[4558]: I0120 16:43:12.030960 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:12 crc kubenswrapper[4558]: I0120 16:43:12.030971 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:12 crc kubenswrapper[4558]: I0120 16:43:12.030979 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:12Z","lastTransitionTime":"2026-01-20T16:43:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:12 crc kubenswrapper[4558]: I0120 16:43:12.133908 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:12 crc kubenswrapper[4558]: I0120 16:43:12.133991 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:12 crc kubenswrapper[4558]: I0120 16:43:12.134008 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:12 crc kubenswrapper[4558]: I0120 16:43:12.134056 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:12 crc kubenswrapper[4558]: I0120 16:43:12.134069 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:12Z","lastTransitionTime":"2026-01-20T16:43:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:12 crc kubenswrapper[4558]: I0120 16:43:12.236883 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:12 crc kubenswrapper[4558]: I0120 16:43:12.236942 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:12 crc kubenswrapper[4558]: I0120 16:43:12.236954 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:12 crc kubenswrapper[4558]: I0120 16:43:12.236972 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:12 crc kubenswrapper[4558]: I0120 16:43:12.236984 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:12Z","lastTransitionTime":"2026-01-20T16:43:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:12 crc kubenswrapper[4558]: I0120 16:43:12.339608 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:12 crc kubenswrapper[4558]: I0120 16:43:12.339645 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:12 crc kubenswrapper[4558]: I0120 16:43:12.339656 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:12 crc kubenswrapper[4558]: I0120 16:43:12.339670 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:12 crc kubenswrapper[4558]: I0120 16:43:12.339697 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:12Z","lastTransitionTime":"2026-01-20T16:43:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:12 crc kubenswrapper[4558]: I0120 16:43:12.441509 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:12 crc kubenswrapper[4558]: I0120 16:43:12.441537 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:12 crc kubenswrapper[4558]: I0120 16:43:12.441544 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:12 crc kubenswrapper[4558]: I0120 16:43:12.441556 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:12 crc kubenswrapper[4558]: I0120 16:43:12.441565 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:12Z","lastTransitionTime":"2026-01-20T16:43:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:12 crc kubenswrapper[4558]: I0120 16:43:12.543771 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:12 crc kubenswrapper[4558]: I0120 16:43:12.543826 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:12 crc kubenswrapper[4558]: I0120 16:43:12.543836 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:12 crc kubenswrapper[4558]: I0120 16:43:12.543850 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:12 crc kubenswrapper[4558]: I0120 16:43:12.543858 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:12Z","lastTransitionTime":"2026-01-20T16:43:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:12 crc kubenswrapper[4558]: I0120 16:43:12.565059 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 20 16:43:12 crc kubenswrapper[4558]: I0120 16:43:12.565067 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 20 16:43:12 crc kubenswrapper[4558]: E0120 16:43:12.565193 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 20 16:43:12 crc kubenswrapper[4558]: E0120 16:43:12.565271 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 20 16:43:12 crc kubenswrapper[4558]: I0120 16:43:12.565067 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 20 16:43:12 crc kubenswrapper[4558]: E0120 16:43:12.565336 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 20 16:43:12 crc kubenswrapper[4558]: I0120 16:43:12.582361 4558 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-26 13:52:41.528153948 +0000 UTC Jan 20 16:43:12 crc kubenswrapper[4558]: I0120 16:43:12.645865 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:12 crc kubenswrapper[4558]: I0120 16:43:12.645907 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:12 crc kubenswrapper[4558]: I0120 16:43:12.645916 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:12 crc kubenswrapper[4558]: I0120 16:43:12.645927 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:12 crc kubenswrapper[4558]: I0120 16:43:12.645934 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:12Z","lastTransitionTime":"2026-01-20T16:43:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:12 crc kubenswrapper[4558]: I0120 16:43:12.748240 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:12 crc kubenswrapper[4558]: I0120 16:43:12.748283 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:12 crc kubenswrapper[4558]: I0120 16:43:12.748291 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:12 crc kubenswrapper[4558]: I0120 16:43:12.748305 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:12 crc kubenswrapper[4558]: I0120 16:43:12.748313 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:12Z","lastTransitionTime":"2026-01-20T16:43:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:12 crc kubenswrapper[4558]: I0120 16:43:12.850112 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:12 crc kubenswrapper[4558]: I0120 16:43:12.850150 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:12 crc kubenswrapper[4558]: I0120 16:43:12.850158 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:12 crc kubenswrapper[4558]: I0120 16:43:12.850189 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:12 crc kubenswrapper[4558]: I0120 16:43:12.850198 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:12Z","lastTransitionTime":"2026-01-20T16:43:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:12 crc kubenswrapper[4558]: I0120 16:43:12.952306 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:12 crc kubenswrapper[4558]: I0120 16:43:12.952341 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:12 crc kubenswrapper[4558]: I0120 16:43:12.952349 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:12 crc kubenswrapper[4558]: I0120 16:43:12.952362 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:12 crc kubenswrapper[4558]: I0120 16:43:12.952369 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:12Z","lastTransitionTime":"2026-01-20T16:43:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:13 crc kubenswrapper[4558]: I0120 16:43:13.053654 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:13 crc kubenswrapper[4558]: I0120 16:43:13.053687 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:13 crc kubenswrapper[4558]: I0120 16:43:13.053696 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:13 crc kubenswrapper[4558]: I0120 16:43:13.053707 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:13 crc kubenswrapper[4558]: I0120 16:43:13.053716 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:13Z","lastTransitionTime":"2026-01-20T16:43:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:13 crc kubenswrapper[4558]: I0120 16:43:13.156099 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:13 crc kubenswrapper[4558]: I0120 16:43:13.156142 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:13 crc kubenswrapper[4558]: I0120 16:43:13.156149 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:13 crc kubenswrapper[4558]: I0120 16:43:13.156186 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:13 crc kubenswrapper[4558]: I0120 16:43:13.156195 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:13Z","lastTransitionTime":"2026-01-20T16:43:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:13 crc kubenswrapper[4558]: I0120 16:43:13.257909 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:13 crc kubenswrapper[4558]: I0120 16:43:13.257946 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:13 crc kubenswrapper[4558]: I0120 16:43:13.257954 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:13 crc kubenswrapper[4558]: I0120 16:43:13.257967 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:13 crc kubenswrapper[4558]: I0120 16:43:13.257975 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:13Z","lastTransitionTime":"2026-01-20T16:43:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:13 crc kubenswrapper[4558]: I0120 16:43:13.359983 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:13 crc kubenswrapper[4558]: I0120 16:43:13.360019 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:13 crc kubenswrapper[4558]: I0120 16:43:13.360027 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:13 crc kubenswrapper[4558]: I0120 16:43:13.360041 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:13 crc kubenswrapper[4558]: I0120 16:43:13.360050 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:13Z","lastTransitionTime":"2026-01-20T16:43:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:13 crc kubenswrapper[4558]: I0120 16:43:13.461322 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:13 crc kubenswrapper[4558]: I0120 16:43:13.461365 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:13 crc kubenswrapper[4558]: I0120 16:43:13.461376 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:13 crc kubenswrapper[4558]: I0120 16:43:13.461390 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:13 crc kubenswrapper[4558]: I0120 16:43:13.461398 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:13Z","lastTransitionTime":"2026-01-20T16:43:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:13 crc kubenswrapper[4558]: I0120 16:43:13.563428 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:13 crc kubenswrapper[4558]: I0120 16:43:13.563465 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:13 crc kubenswrapper[4558]: I0120 16:43:13.563474 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:13 crc kubenswrapper[4558]: I0120 16:43:13.563486 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:13 crc kubenswrapper[4558]: I0120 16:43:13.563494 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:13Z","lastTransitionTime":"2026-01-20T16:43:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:13 crc kubenswrapper[4558]: I0120 16:43:13.565701 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9wrq6" Jan 20 16:43:13 crc kubenswrapper[4558]: E0120 16:43:13.565794 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9wrq6" podUID="30032328-bd33-4073-9366-e10bc5e2aa77" Jan 20 16:43:13 crc kubenswrapper[4558]: I0120 16:43:13.583016 4558 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-23 13:51:22.929893488 +0000 UTC Jan 20 16:43:13 crc kubenswrapper[4558]: I0120 16:43:13.665287 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:13 crc kubenswrapper[4558]: I0120 16:43:13.665317 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:13 crc kubenswrapper[4558]: I0120 16:43:13.665328 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:13 crc kubenswrapper[4558]: I0120 16:43:13.665358 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:13 crc kubenswrapper[4558]: I0120 16:43:13.665367 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:13Z","lastTransitionTime":"2026-01-20T16:43:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:13 crc kubenswrapper[4558]: I0120 16:43:13.766720 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:13 crc kubenswrapper[4558]: I0120 16:43:13.766746 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:13 crc kubenswrapper[4558]: I0120 16:43:13.766754 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:13 crc kubenswrapper[4558]: I0120 16:43:13.766765 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:13 crc kubenswrapper[4558]: I0120 16:43:13.766772 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:13Z","lastTransitionTime":"2026-01-20T16:43:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:13 crc kubenswrapper[4558]: I0120 16:43:13.868687 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:13 crc kubenswrapper[4558]: I0120 16:43:13.868726 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:13 crc kubenswrapper[4558]: I0120 16:43:13.868737 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:13 crc kubenswrapper[4558]: I0120 16:43:13.868749 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:13 crc kubenswrapper[4558]: I0120 16:43:13.868758 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:13Z","lastTransitionTime":"2026-01-20T16:43:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:13 crc kubenswrapper[4558]: I0120 16:43:13.971344 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:13 crc kubenswrapper[4558]: I0120 16:43:13.971374 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:13 crc kubenswrapper[4558]: I0120 16:43:13.971399 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:13 crc kubenswrapper[4558]: I0120 16:43:13.971413 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:13 crc kubenswrapper[4558]: I0120 16:43:13.971423 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:13Z","lastTransitionTime":"2026-01-20T16:43:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:14 crc kubenswrapper[4558]: I0120 16:43:14.073143 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:14 crc kubenswrapper[4558]: I0120 16:43:14.073182 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:14 crc kubenswrapper[4558]: I0120 16:43:14.073191 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:14 crc kubenswrapper[4558]: I0120 16:43:14.073202 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:14 crc kubenswrapper[4558]: I0120 16:43:14.073209 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:14Z","lastTransitionTime":"2026-01-20T16:43:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:14 crc kubenswrapper[4558]: I0120 16:43:14.175535 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:14 crc kubenswrapper[4558]: I0120 16:43:14.175565 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:14 crc kubenswrapper[4558]: I0120 16:43:14.175599 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:14 crc kubenswrapper[4558]: I0120 16:43:14.175621 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:14 crc kubenswrapper[4558]: I0120 16:43:14.175631 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:14Z","lastTransitionTime":"2026-01-20T16:43:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:14 crc kubenswrapper[4558]: I0120 16:43:14.277269 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:14 crc kubenswrapper[4558]: I0120 16:43:14.277301 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:14 crc kubenswrapper[4558]: I0120 16:43:14.277312 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:14 crc kubenswrapper[4558]: I0120 16:43:14.277345 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:14 crc kubenswrapper[4558]: I0120 16:43:14.277353 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:14Z","lastTransitionTime":"2026-01-20T16:43:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:14 crc kubenswrapper[4558]: I0120 16:43:14.379315 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:14 crc kubenswrapper[4558]: I0120 16:43:14.379370 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:14 crc kubenswrapper[4558]: I0120 16:43:14.379381 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:14 crc kubenswrapper[4558]: I0120 16:43:14.379393 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:14 crc kubenswrapper[4558]: I0120 16:43:14.379401 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:14Z","lastTransitionTime":"2026-01-20T16:43:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:14 crc kubenswrapper[4558]: I0120 16:43:14.481268 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:14 crc kubenswrapper[4558]: I0120 16:43:14.481302 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:14 crc kubenswrapper[4558]: I0120 16:43:14.481311 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:14 crc kubenswrapper[4558]: I0120 16:43:14.481323 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:14 crc kubenswrapper[4558]: I0120 16:43:14.481331 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:14Z","lastTransitionTime":"2026-01-20T16:43:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:14 crc kubenswrapper[4558]: I0120 16:43:14.565376 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 20 16:43:14 crc kubenswrapper[4558]: I0120 16:43:14.565400 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 20 16:43:14 crc kubenswrapper[4558]: E0120 16:43:14.565492 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 20 16:43:14 crc kubenswrapper[4558]: E0120 16:43:14.565646 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 20 16:43:14 crc kubenswrapper[4558]: I0120 16:43:14.565660 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 20 16:43:14 crc kubenswrapper[4558]: E0120 16:43:14.565975 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 20 16:43:14 crc kubenswrapper[4558]: I0120 16:43:14.566184 4558 scope.go:117] "RemoveContainer" containerID="0e9f4ed928d20c9db64a689d7d14df9b81870bb0523cf5098e09d2d85aad9fca" Jan 20 16:43:14 crc kubenswrapper[4558]: E0120 16:43:14.566312 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-nv2xw_openshift-ovn-kubernetes(254129cd-82fc-4162-b671-2434bc9e2972)\"" pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" podUID="254129cd-82fc-4162-b671-2434bc9e2972" Jan 20 16:43:14 crc kubenswrapper[4558]: I0120 16:43:14.582879 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:14 crc kubenswrapper[4558]: I0120 16:43:14.582911 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:14 crc kubenswrapper[4558]: I0120 16:43:14.582919 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:14 crc kubenswrapper[4558]: I0120 16:43:14.582930 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:14 crc kubenswrapper[4558]: I0120 16:43:14.582939 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:14Z","lastTransitionTime":"2026-01-20T16:43:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:14 crc kubenswrapper[4558]: I0120 16:43:14.583061 4558 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-29 03:05:40.101782479 +0000 UTC Jan 20 16:43:14 crc kubenswrapper[4558]: I0120 16:43:14.684680 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:14 crc kubenswrapper[4558]: I0120 16:43:14.684706 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:14 crc kubenswrapper[4558]: I0120 16:43:14.684714 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:14 crc kubenswrapper[4558]: I0120 16:43:14.684726 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:14 crc kubenswrapper[4558]: I0120 16:43:14.684734 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:14Z","lastTransitionTime":"2026-01-20T16:43:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:14 crc kubenswrapper[4558]: I0120 16:43:14.786511 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:14 crc kubenswrapper[4558]: I0120 16:43:14.786696 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:14 crc kubenswrapper[4558]: I0120 16:43:14.786761 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:14 crc kubenswrapper[4558]: I0120 16:43:14.786842 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:14 crc kubenswrapper[4558]: I0120 16:43:14.786893 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:14Z","lastTransitionTime":"2026-01-20T16:43:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:14 crc kubenswrapper[4558]: I0120 16:43:14.889064 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:14 crc kubenswrapper[4558]: I0120 16:43:14.889090 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:14 crc kubenswrapper[4558]: I0120 16:43:14.889098 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:14 crc kubenswrapper[4558]: I0120 16:43:14.889110 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:14 crc kubenswrapper[4558]: I0120 16:43:14.889118 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:14Z","lastTransitionTime":"2026-01-20T16:43:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:14 crc kubenswrapper[4558]: I0120 16:43:14.990423 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:14 crc kubenswrapper[4558]: I0120 16:43:14.990464 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:14 crc kubenswrapper[4558]: I0120 16:43:14.990475 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:14 crc kubenswrapper[4558]: I0120 16:43:14.990489 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:14 crc kubenswrapper[4558]: I0120 16:43:14.990498 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:14Z","lastTransitionTime":"2026-01-20T16:43:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:15 crc kubenswrapper[4558]: I0120 16:43:15.092766 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:15 crc kubenswrapper[4558]: I0120 16:43:15.092793 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:15 crc kubenswrapper[4558]: I0120 16:43:15.092802 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:15 crc kubenswrapper[4558]: I0120 16:43:15.092814 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:15 crc kubenswrapper[4558]: I0120 16:43:15.092821 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:15Z","lastTransitionTime":"2026-01-20T16:43:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:15 crc kubenswrapper[4558]: I0120 16:43:15.194889 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:15 crc kubenswrapper[4558]: I0120 16:43:15.195060 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:15 crc kubenswrapper[4558]: I0120 16:43:15.195128 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:15 crc kubenswrapper[4558]: I0120 16:43:15.195209 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:15 crc kubenswrapper[4558]: I0120 16:43:15.195266 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:15Z","lastTransitionTime":"2026-01-20T16:43:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:15 crc kubenswrapper[4558]: I0120 16:43:15.296914 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:15 crc kubenswrapper[4558]: I0120 16:43:15.296958 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:15 crc kubenswrapper[4558]: I0120 16:43:15.296969 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:15 crc kubenswrapper[4558]: I0120 16:43:15.296984 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:15 crc kubenswrapper[4558]: I0120 16:43:15.296995 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:15Z","lastTransitionTime":"2026-01-20T16:43:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:15 crc kubenswrapper[4558]: I0120 16:43:15.398502 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:15 crc kubenswrapper[4558]: I0120 16:43:15.398532 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:15 crc kubenswrapper[4558]: I0120 16:43:15.398539 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:15 crc kubenswrapper[4558]: I0120 16:43:15.398550 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:15 crc kubenswrapper[4558]: I0120 16:43:15.398558 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:15Z","lastTransitionTime":"2026-01-20T16:43:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:15 crc kubenswrapper[4558]: I0120 16:43:15.501195 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:15 crc kubenswrapper[4558]: I0120 16:43:15.501224 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:15 crc kubenswrapper[4558]: I0120 16:43:15.501232 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:15 crc kubenswrapper[4558]: I0120 16:43:15.501245 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:15 crc kubenswrapper[4558]: I0120 16:43:15.501254 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:15Z","lastTransitionTime":"2026-01-20T16:43:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:15 crc kubenswrapper[4558]: I0120 16:43:15.565544 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9wrq6" Jan 20 16:43:15 crc kubenswrapper[4558]: E0120 16:43:15.565670 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9wrq6" podUID="30032328-bd33-4073-9366-e10bc5e2aa77" Jan 20 16:43:15 crc kubenswrapper[4558]: I0120 16:43:15.583819 4558 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-25 22:46:43.864751244 +0000 UTC Jan 20 16:43:15 crc kubenswrapper[4558]: I0120 16:43:15.603468 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:15 crc kubenswrapper[4558]: I0120 16:43:15.603635 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:15 crc kubenswrapper[4558]: I0120 16:43:15.603701 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:15 crc kubenswrapper[4558]: I0120 16:43:15.603778 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:15 crc kubenswrapper[4558]: I0120 16:43:15.603842 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:15Z","lastTransitionTime":"2026-01-20T16:43:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:15 crc kubenswrapper[4558]: I0120 16:43:15.705684 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:15 crc kubenswrapper[4558]: I0120 16:43:15.705876 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:15 crc kubenswrapper[4558]: I0120 16:43:15.705948 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:15 crc kubenswrapper[4558]: I0120 16:43:15.706006 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:15 crc kubenswrapper[4558]: I0120 16:43:15.706064 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:15Z","lastTransitionTime":"2026-01-20T16:43:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:15 crc kubenswrapper[4558]: I0120 16:43:15.808363 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:15 crc kubenswrapper[4558]: I0120 16:43:15.808391 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:15 crc kubenswrapper[4558]: I0120 16:43:15.808399 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:15 crc kubenswrapper[4558]: I0120 16:43:15.808411 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:15 crc kubenswrapper[4558]: I0120 16:43:15.808419 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:15Z","lastTransitionTime":"2026-01-20T16:43:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:15 crc kubenswrapper[4558]: I0120 16:43:15.910581 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:15 crc kubenswrapper[4558]: I0120 16:43:15.910640 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:15 crc kubenswrapper[4558]: I0120 16:43:15.910650 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:15 crc kubenswrapper[4558]: I0120 16:43:15.910664 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:15 crc kubenswrapper[4558]: I0120 16:43:15.910675 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:15Z","lastTransitionTime":"2026-01-20T16:43:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:16 crc kubenswrapper[4558]: I0120 16:43:16.012701 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:16 crc kubenswrapper[4558]: I0120 16:43:16.012731 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:16 crc kubenswrapper[4558]: I0120 16:43:16.012739 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:16 crc kubenswrapper[4558]: I0120 16:43:16.012752 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:16 crc kubenswrapper[4558]: I0120 16:43:16.012761 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:16Z","lastTransitionTime":"2026-01-20T16:43:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:16 crc kubenswrapper[4558]: I0120 16:43:16.114912 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:16 crc kubenswrapper[4558]: I0120 16:43:16.114952 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:16 crc kubenswrapper[4558]: I0120 16:43:16.114960 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:16 crc kubenswrapper[4558]: I0120 16:43:16.114974 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:16 crc kubenswrapper[4558]: I0120 16:43:16.114985 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:16Z","lastTransitionTime":"2026-01-20T16:43:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:16 crc kubenswrapper[4558]: I0120 16:43:16.216912 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:16 crc kubenswrapper[4558]: I0120 16:43:16.216947 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:16 crc kubenswrapper[4558]: I0120 16:43:16.216956 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:16 crc kubenswrapper[4558]: I0120 16:43:16.216985 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:16 crc kubenswrapper[4558]: I0120 16:43:16.216995 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:16Z","lastTransitionTime":"2026-01-20T16:43:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:16 crc kubenswrapper[4558]: I0120 16:43:16.318722 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:16 crc kubenswrapper[4558]: I0120 16:43:16.318757 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:16 crc kubenswrapper[4558]: I0120 16:43:16.318765 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:16 crc kubenswrapper[4558]: I0120 16:43:16.318776 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:16 crc kubenswrapper[4558]: I0120 16:43:16.318786 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:16Z","lastTransitionTime":"2026-01-20T16:43:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:16 crc kubenswrapper[4558]: I0120 16:43:16.420722 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:16 crc kubenswrapper[4558]: I0120 16:43:16.420765 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:16 crc kubenswrapper[4558]: I0120 16:43:16.420774 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:16 crc kubenswrapper[4558]: I0120 16:43:16.420787 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:16 crc kubenswrapper[4558]: I0120 16:43:16.420795 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:16Z","lastTransitionTime":"2026-01-20T16:43:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:16 crc kubenswrapper[4558]: I0120 16:43:16.522568 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:16 crc kubenswrapper[4558]: I0120 16:43:16.522653 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:16 crc kubenswrapper[4558]: I0120 16:43:16.522664 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:16 crc kubenswrapper[4558]: I0120 16:43:16.522678 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:16 crc kubenswrapper[4558]: I0120 16:43:16.522711 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:16Z","lastTransitionTime":"2026-01-20T16:43:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:16 crc kubenswrapper[4558]: I0120 16:43:16.565355 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 20 16:43:16 crc kubenswrapper[4558]: I0120 16:43:16.565400 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 20 16:43:16 crc kubenswrapper[4558]: I0120 16:43:16.565470 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 20 16:43:16 crc kubenswrapper[4558]: E0120 16:43:16.565505 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 20 16:43:16 crc kubenswrapper[4558]: E0120 16:43:16.565533 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 20 16:43:16 crc kubenswrapper[4558]: E0120 16:43:16.565578 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 20 16:43:16 crc kubenswrapper[4558]: I0120 16:43:16.575252 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60aafae7-2e6e-4d21-9ce6-8ff5382ef642\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d16b973c217656d6a8ceaf684e9dec82daeb583209943f6b2979be47ab03fda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb2e7753f75b10088eea9204511ae78e4be9a2b5be8c17ed04c3c972b13f8190\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a426b97002a49938b18009db49867dc9a15102707b42ec46bc64093c0ed3c2c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e4df1c3b2a1aef9adb3da64f89a1b8c0904fd22882609dbafe2fbc5718b8016\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6e4df1c3b2a1aef9adb3da64f89a1b8c0904fd22882609dbafe2fbc5718b8016\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:43:16Z is after 2025-08-24T17:21:41Z" Jan 20 16:43:16 crc kubenswrapper[4558]: I0120 16:43:16.584220 4558 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-07 05:43:55.224461471 +0000 UTC Jan 20 16:43:16 crc kubenswrapper[4558]: I0120 16:43:16.590388 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e190c24c1d8a3bc0241334598ce23829ada9afee282d49a0c82cd67ad82bcd02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:43:16Z is after 2025-08-24T17:21:41Z" Jan 20 16:43:16 crc kubenswrapper[4558]: I0120 16:43:16.605490 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:43:16Z is after 2025-08-24T17:21:41Z" Jan 20 16:43:16 crc kubenswrapper[4558]: I0120 16:43:16.619761 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:43:16Z is after 2025-08-24T17:21:41Z" Jan 20 16:43:16 crc kubenswrapper[4558]: I0120 16:43:16.623685 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:16 crc kubenswrapper[4558]: I0120 16:43:16.623712 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:16 crc kubenswrapper[4558]: I0120 16:43:16.623721 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:16 crc kubenswrapper[4558]: I0120 16:43:16.623733 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:16 crc kubenswrapper[4558]: I0120 16:43:16.623742 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:16Z","lastTransitionTime":"2026-01-20T16:43:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:16 crc kubenswrapper[4558]: I0120 16:43:16.631244 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rfwnl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8fe5472-7373-4e5d-87fc-a9ecaf26a5cd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://38cd58ecc67d337df2c29b79e5460ea23d4acc1e66de8b08ea70d7ab7b30691d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9nvl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a14e9281ff19e324499722aefadf1e1f2fd005089c153918ad4d8a7f5bdfa853\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9nvl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-rfwnl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:43:16Z is after 2025-08-24T17:21:41Z" Jan 20 16:43:16 crc kubenswrapper[4558]: I0120 16:43:16.641899 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:43:16Z is after 2025-08-24T17:21:41Z" Jan 20 16:43:16 crc kubenswrapper[4558]: I0120 16:43:16.652246 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-d96bp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c8d2c8b-fb8e-4bff-b8d2-69570e5d9e57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe858db22acc96b062f1bc7941e05c823f30d1f9ba50bd497c57f38d57e04293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4v6jx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-d96bp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:43:16Z is after 2025-08-24T17:21:41Z" Jan 20 16:43:16 crc kubenswrapper[4558]: I0120 16:43:16.665070 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"254129cd-82fc-4162-b671-2434bc9e2972\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a6a184067bde115ef5e09fdd90e7dccfe89a9dc347af450998b91c068e7e803\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd8b93e5559d95511a1285a9aec9a7d72df6c330bcd1e1daf3e93f5494f70eeb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4264ef8ee49a8b172fb05ac5c9b0bd32350b5d9ae95293079dfeaf44f021b455\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0889cf371f8b06cc2d255c15fda97db1d5000d6f6cff29d2fdcb8667cede8a99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4265d3281e954ee5989ee008da28c6cb9fa29478e5fdf23325521d455304da9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://186a1d2caad8865abf7f565405c4a4c077038ccb67ba77927cc2392ec075a811\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e9f4ed928d20c9db64a689d7d14df9b81870bb0523cf5098e09d2d85aad9fca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e9f4ed928d20c9db64a689d7d14df9b81870bb0523cf5098e09d2d85aad9fca\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-20T16:42:58Z\\\",\\\"message\\\":\\\"ess{},},Conditions:[]Condition{},},}\\\\nI0120 16:42:58.184623 6635 lb_config.go:1031] Cluster endpoints for openshift-controller-manager/controller-manager for network=default are: map[]\\\\nF0120 16:42:58.184632 6635 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:58Z is after 2025-08-24T17:21:41Z]\\\\nI0120 16:42:58.184637 6635 services_controller.go:443] Built service openshift-controller-manager/controller-manager LB cluster-wide configs for network=default: []services.lbConfig{services.lbConfig{vips:[]string{\\\\\\\"10.217.5.149\\\\\\\"}, protocol:\\\\\\\"TCP\\\\\\\", inport:443, clusterEndpoints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:57Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-nv2xw_openshift-ovn-kubernetes(254129cd-82fc-4162-b671-2434bc9e2972)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f41d488b0c24e594a2f5cad36b5f8efdb7d867fae0e18a74fe3c396a203d162f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aaa12ad387fc5d0780f6602a74013bb70c118f35c3a16eb4d136df7b0c48db9e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aaa12ad387fc5d0780f6602a74013bb70c118f35c3a16eb4d136df7b0c48db9e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-nv2xw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:43:16Z is after 2025-08-24T17:21:41Z" Jan 20 16:43:16 crc kubenswrapper[4558]: I0120 16:43:16.674747 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f5t7h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0f90cb04-2e7a-4ee8-83fc-d6c0ee1702a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21a318f03d0df490cb7bae11819c52eca011d163081aeecff1b2dd8f72caabbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fd8a8162080a700ce21ff3b2cf7c28fabfc682758eb69c483909d4008b07daab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fd8a8162080a700ce21ff3b2cf7c28fabfc682758eb69c483909d4008b07daab\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e77617c69434ce20c448b525c7e86f1ece18d51ce6e14167b4ca7e99de8e5709\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e77617c69434ce20c448b525c7e86f1ece18d51ce6e14167b4ca7e99de8e5709\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://453c67434281e0261ec6799b30a548a01efd72d7df96632409e745b2e6a94546\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://453c67434281e0261ec6799b30a548a01efd72d7df96632409e745b2e6a94546\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://06eab4daa6c8b6a564cf1bb1403b7b3b259a234ee09f23b4f4bc0628c2d58e69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06eab4daa6c8b6a564cf1bb1403b7b3b259a234ee09f23b4f4bc0628c2d58e69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1291ac09fc6ae2a79fdeaa0fc47272f61b97f45f67ed3f7c90bdfdbfe3519ff2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1291ac09fc6ae2a79fdeaa0fc47272f61b97f45f67ed3f7c90bdfdbfe3519ff2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe484514f50a14e485d4a97ea3ce9401c77f3f4dae4717719be7ed6c4d7b1a59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe484514f50a14e485d4a97ea3ce9401c77f3f4dae4717719be7ed6c4d7b1a59\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f5t7h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:43:16Z is after 2025-08-24T17:21:41Z" Jan 20 16:43:16 crc kubenswrapper[4558]: I0120 16:43:16.681419 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-9wrq6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30032328-bd33-4073-9366-e10bc5e2aa77\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6tz4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6tz4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:17Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-9wrq6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:43:16Z is after 2025-08-24T17:21:41Z" Jan 20 16:43:16 crc kubenswrapper[4558]: I0120 16:43:16.689559 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a13bfefa-f525-4ecd-89f5-f10480532bf6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9030e970e96b6909d771d4fa0c3b4f493b4a825cbc43ef7e684272284329c7c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://168e8f533aaa108e4160f9ae4af6bb7944edba7a596178ffffdc5325073fc3c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e0141aef1babb57501016cb09787591b5e6d9136b8d7dd4ba64393f0be5a027\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c87f08993c2b47c63ba9652c7dafe0fdbecdcb03a7b7080664ab1f1bd0e1d602\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:43:16Z is after 2025-08-24T17:21:41Z" Jan 20 16:43:16 crc kubenswrapper[4558]: I0120 16:43:16.697990 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f904dd3e4b43e51569b6532f00fa2f9edf912c1f1969ee2ea00c99d1b7e35a41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://789c877f4a456b3ce64bdbe6fefd50f1ef3f92bf9e26e6296005cf366e0ce265\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:43:16Z is after 2025-08-24T17:21:41Z" Jan 20 16:43:16 crc kubenswrapper[4558]: I0120 16:43:16.705246 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3e45cc56934a7b90e9d47f1c6d06d8dc140d57db4a216469e8ae112f398663e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:43:16Z is after 2025-08-24T17:21:41Z" Jan 20 16:43:16 crc kubenswrapper[4558]: I0120 16:43:16.714543 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jsqvf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bedf08c7-1f93-4931-a7f3-e729e2a137af\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2297634e20683413a5eb643517580c9486303dc9616a7588a4ac571f3d0e53b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e32b76495d88d356373f9389e163a2f3a72e202bb454c7015594c1f5a41b511\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-20T16:42:50Z\\\",\\\"message\\\":\\\"2026-01-20T16:42:05+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_149f95a3-037e-4113-9baf-ba51c18b4cf2\\\\n2026-01-20T16:42:05+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_149f95a3-037e-4113-9baf-ba51c18b4cf2 to /host/opt/cni/bin/\\\\n2026-01-20T16:42:05Z [verbose] multus-daemon started\\\\n2026-01-20T16:42:05Z [verbose] Readiness Indicator file check\\\\n2026-01-20T16:42:50Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xxtbc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jsqvf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:43:16Z is after 2025-08-24T17:21:41Z" Jan 20 16:43:16 crc kubenswrapper[4558]: I0120 16:43:16.721318 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-47477" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e026445a-aa73-4a0e-ba35-e2e07de1278c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec2ee3ff161cba7e14ad26763081c9f092f0dfe969cef113631c27e0db53d157\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xcnwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-47477\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:43:16Z is after 2025-08-24T17:21:41Z" Jan 20 16:43:16 crc kubenswrapper[4558]: I0120 16:43:16.725745 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:16 crc kubenswrapper[4558]: I0120 16:43:16.725788 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:16 crc kubenswrapper[4558]: I0120 16:43:16.725797 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:16 crc kubenswrapper[4558]: I0120 16:43:16.725810 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:16 crc kubenswrapper[4558]: I0120 16:43:16.725818 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:16Z","lastTransitionTime":"2026-01-20T16:43:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:16 crc kubenswrapper[4558]: I0120 16:43:16.728160 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f10d97b3-5765-454b-b306-1ef544be2c86\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3ba1f84f7ea0577350c7025aeb96fb1b329297f4d6767727f9a4e6659d3027cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac2f374f27360645aa18b003d1b2415cd498c17c73812950904109dd4b77cbd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ac2f374f27360645aa18b003d1b2415cd498c17c73812950904109dd4b77cbd5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:43:16Z is after 2025-08-24T17:21:41Z" Jan 20 16:43:16 crc kubenswrapper[4558]: I0120 16:43:16.740954 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"425eab90-fb70-4498-a98b-b95742033890\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48592e72d91cf527296f06e52fec1c16c1da21323c0644659945af109a74ebb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7e18f59b5e6b6c0be141b7efb3ccb1df4ff6e3c394bcc88fb1142c8df433493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c889e2b0e14244ef49ec057e3d5fd91687e335a983fdc99a6816abd2af643ddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d434793571cbba089cc34029d951bee9063aca58728b791f2d1af2d68229f778\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://89f7ad577df11c509855a90ac8791da6de3f0d2857a1ca56726a6327f40455bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://baab628ee8aadc67ea829cc46639be55ae63281a9ec9a89737260f558a9ddf75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://baab628ee8aadc67ea829cc46639be55ae63281a9ec9a89737260f558a9ddf75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5faab5625a16e1459b23ae0f4cc8ca3285bf4eb1ba4c53c1ed15eddb9054021\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5faab5625a16e1459b23ae0f4cc8ca3285bf4eb1ba4c53c1ed15eddb9054021\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f913fa07ec9571d1c61f9b7dddb46614846aa99d6e0b5b75709e9fa5a18a09c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f913fa07ec9571d1c61f9b7dddb46614846aa99d6e0b5b75709e9fa5a18a09c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:43:16Z is after 2025-08-24T17:21:41Z" Jan 20 16:43:16 crc kubenswrapper[4558]: I0120 16:43:16.750974 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30e1ff9d-8dcd-4754-9a7a-c09598fb83db\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ea68e442bb2143fbaa5e2c9fe335c6b023a31c2eeaa78d60de45c6ef40c2894\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a8122c3b57ac4f0026df149fe622450d07d8b0d517ad018decd7b21d8d9c04a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7444e8fea1853bae45dbd3d52d07e3f7b94314cd29fe8c99891e515a892e569e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6438ffea42f98178c4c2c21ae01725a8b54d6a6f790f47d64faf5ecd9f1c00f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2e22f1d4e87dd06a67f49aedad280da48ac1c4ba2e438a4925cf8bd5330c4d8\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0120 16:41:58.859734 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0120 16:41:58.861094 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2179835226/tls.crt::/tmp/serving-cert-2179835226/tls.key\\\\\\\"\\\\nI0120 16:42:04.205247 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0120 16:42:04.207291 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0120 16:42:04.207308 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0120 16:42:04.207329 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0120 16:42:04.207333 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0120 16:42:04.210615 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0120 16:42:04.210632 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0120 16:42:04.210641 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0120 16:42:04.210648 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0120 16:42:04.210652 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0120 16:42:04.210655 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0120 16:42:04.210658 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0120 16:42:04.210660 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0120 16:42:04.211964 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4850bde1fb538e21fc91949dc584ef0a791d718844691a44559e0513bb36203\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c39aa6c3942134a027be33cac715f9c0381bce5417754d5bad045c42a07d353f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c39aa6c3942134a027be33cac715f9c0381bce5417754d5bad045c42a07d353f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:43:16Z is after 2025-08-24T17:21:41Z" Jan 20 16:43:16 crc kubenswrapper[4558]: I0120 16:43:16.758744 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"68337d27-3fa6-4a29-88b0-82e60c3739eb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de7f7c23993a57d30f08686e1cd4abb5b17e108ac12dd0c7929a31574c69eeb1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4b478\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b18a59ecb802507e0d5988eacc915fd7e0d6954518de80f61162c97f680fd8c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4b478\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2vr4r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:43:16Z is after 2025-08-24T17:21:41Z" Jan 20 16:43:16 crc kubenswrapper[4558]: I0120 16:43:16.827788 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:16 crc kubenswrapper[4558]: I0120 16:43:16.827820 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:16 crc kubenswrapper[4558]: I0120 16:43:16.827828 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:16 crc kubenswrapper[4558]: I0120 16:43:16.827840 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:16 crc kubenswrapper[4558]: I0120 16:43:16.827850 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:16Z","lastTransitionTime":"2026-01-20T16:43:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:16 crc kubenswrapper[4558]: I0120 16:43:16.930047 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:16 crc kubenswrapper[4558]: I0120 16:43:16.930094 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:16 crc kubenswrapper[4558]: I0120 16:43:16.930102 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:16 crc kubenswrapper[4558]: I0120 16:43:16.930114 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:16 crc kubenswrapper[4558]: I0120 16:43:16.930123 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:16Z","lastTransitionTime":"2026-01-20T16:43:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:17 crc kubenswrapper[4558]: I0120 16:43:17.032413 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:17 crc kubenswrapper[4558]: I0120 16:43:17.032446 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:17 crc kubenswrapper[4558]: I0120 16:43:17.032456 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:17 crc kubenswrapper[4558]: I0120 16:43:17.032469 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:17 crc kubenswrapper[4558]: I0120 16:43:17.032478 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:17Z","lastTransitionTime":"2026-01-20T16:43:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:17 crc kubenswrapper[4558]: I0120 16:43:17.134667 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:17 crc kubenswrapper[4558]: I0120 16:43:17.134706 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:17 crc kubenswrapper[4558]: I0120 16:43:17.134716 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:17 crc kubenswrapper[4558]: I0120 16:43:17.134731 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:17 crc kubenswrapper[4558]: I0120 16:43:17.134741 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:17Z","lastTransitionTime":"2026-01-20T16:43:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:17 crc kubenswrapper[4558]: I0120 16:43:17.236981 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:17 crc kubenswrapper[4558]: I0120 16:43:17.237018 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:17 crc kubenswrapper[4558]: I0120 16:43:17.237045 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:17 crc kubenswrapper[4558]: I0120 16:43:17.237059 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:17 crc kubenswrapper[4558]: I0120 16:43:17.237067 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:17Z","lastTransitionTime":"2026-01-20T16:43:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:17 crc kubenswrapper[4558]: I0120 16:43:17.338894 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:17 crc kubenswrapper[4558]: I0120 16:43:17.338945 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:17 crc kubenswrapper[4558]: I0120 16:43:17.338954 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:17 crc kubenswrapper[4558]: I0120 16:43:17.338966 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:17 crc kubenswrapper[4558]: I0120 16:43:17.338974 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:17Z","lastTransitionTime":"2026-01-20T16:43:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:17 crc kubenswrapper[4558]: I0120 16:43:17.440886 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:17 crc kubenswrapper[4558]: I0120 16:43:17.440915 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:17 crc kubenswrapper[4558]: I0120 16:43:17.440923 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:17 crc kubenswrapper[4558]: I0120 16:43:17.440935 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:17 crc kubenswrapper[4558]: I0120 16:43:17.440942 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:17Z","lastTransitionTime":"2026-01-20T16:43:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:17 crc kubenswrapper[4558]: I0120 16:43:17.543104 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:17 crc kubenswrapper[4558]: I0120 16:43:17.543143 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:17 crc kubenswrapper[4558]: I0120 16:43:17.543151 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:17 crc kubenswrapper[4558]: I0120 16:43:17.543189 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:17 crc kubenswrapper[4558]: I0120 16:43:17.543197 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:17Z","lastTransitionTime":"2026-01-20T16:43:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:17 crc kubenswrapper[4558]: I0120 16:43:17.565675 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9wrq6" Jan 20 16:43:17 crc kubenswrapper[4558]: E0120 16:43:17.565790 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9wrq6" podUID="30032328-bd33-4073-9366-e10bc5e2aa77" Jan 20 16:43:17 crc kubenswrapper[4558]: I0120 16:43:17.585033 4558 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-11 04:31:14.626628669 +0000 UTC Jan 20 16:43:17 crc kubenswrapper[4558]: I0120 16:43:17.645474 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:17 crc kubenswrapper[4558]: I0120 16:43:17.645506 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:17 crc kubenswrapper[4558]: I0120 16:43:17.645514 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:17 crc kubenswrapper[4558]: I0120 16:43:17.645525 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:17 crc kubenswrapper[4558]: I0120 16:43:17.645534 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:17Z","lastTransitionTime":"2026-01-20T16:43:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:17 crc kubenswrapper[4558]: I0120 16:43:17.747125 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:17 crc kubenswrapper[4558]: I0120 16:43:17.747159 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:17 crc kubenswrapper[4558]: I0120 16:43:17.747185 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:17 crc kubenswrapper[4558]: I0120 16:43:17.747197 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:17 crc kubenswrapper[4558]: I0120 16:43:17.747206 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:17Z","lastTransitionTime":"2026-01-20T16:43:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:17 crc kubenswrapper[4558]: I0120 16:43:17.849085 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:17 crc kubenswrapper[4558]: I0120 16:43:17.849118 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:17 crc kubenswrapper[4558]: I0120 16:43:17.849126 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:17 crc kubenswrapper[4558]: I0120 16:43:17.849138 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:17 crc kubenswrapper[4558]: I0120 16:43:17.849146 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:17Z","lastTransitionTime":"2026-01-20T16:43:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:17 crc kubenswrapper[4558]: I0120 16:43:17.951422 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:17 crc kubenswrapper[4558]: I0120 16:43:17.951468 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:17 crc kubenswrapper[4558]: I0120 16:43:17.951477 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:17 crc kubenswrapper[4558]: I0120 16:43:17.951489 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:17 crc kubenswrapper[4558]: I0120 16:43:17.951497 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:17Z","lastTransitionTime":"2026-01-20T16:43:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:18 crc kubenswrapper[4558]: I0120 16:43:18.053679 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:18 crc kubenswrapper[4558]: I0120 16:43:18.053714 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:18 crc kubenswrapper[4558]: I0120 16:43:18.053722 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:18 crc kubenswrapper[4558]: I0120 16:43:18.053734 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:18 crc kubenswrapper[4558]: I0120 16:43:18.053742 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:18Z","lastTransitionTime":"2026-01-20T16:43:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:18 crc kubenswrapper[4558]: I0120 16:43:18.155822 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:18 crc kubenswrapper[4558]: I0120 16:43:18.155854 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:18 crc kubenswrapper[4558]: I0120 16:43:18.155862 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:18 crc kubenswrapper[4558]: I0120 16:43:18.155874 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:18 crc kubenswrapper[4558]: I0120 16:43:18.155881 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:18Z","lastTransitionTime":"2026-01-20T16:43:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:18 crc kubenswrapper[4558]: I0120 16:43:18.257993 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:18 crc kubenswrapper[4558]: I0120 16:43:18.258029 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:18 crc kubenswrapper[4558]: I0120 16:43:18.258037 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:18 crc kubenswrapper[4558]: I0120 16:43:18.258051 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:18 crc kubenswrapper[4558]: I0120 16:43:18.258059 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:18Z","lastTransitionTime":"2026-01-20T16:43:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:18 crc kubenswrapper[4558]: I0120 16:43:18.359754 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:18 crc kubenswrapper[4558]: I0120 16:43:18.359787 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:18 crc kubenswrapper[4558]: I0120 16:43:18.359795 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:18 crc kubenswrapper[4558]: I0120 16:43:18.359808 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:18 crc kubenswrapper[4558]: I0120 16:43:18.359815 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:18Z","lastTransitionTime":"2026-01-20T16:43:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:18 crc kubenswrapper[4558]: I0120 16:43:18.461520 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:18 crc kubenswrapper[4558]: I0120 16:43:18.461555 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:18 crc kubenswrapper[4558]: I0120 16:43:18.461563 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:18 crc kubenswrapper[4558]: I0120 16:43:18.461575 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:18 crc kubenswrapper[4558]: I0120 16:43:18.461583 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:18Z","lastTransitionTime":"2026-01-20T16:43:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:18 crc kubenswrapper[4558]: I0120 16:43:18.562808 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:18 crc kubenswrapper[4558]: I0120 16:43:18.562845 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:18 crc kubenswrapper[4558]: I0120 16:43:18.562855 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:18 crc kubenswrapper[4558]: I0120 16:43:18.562868 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:18 crc kubenswrapper[4558]: I0120 16:43:18.562876 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:18Z","lastTransitionTime":"2026-01-20T16:43:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:18 crc kubenswrapper[4558]: I0120 16:43:18.565055 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 20 16:43:18 crc kubenswrapper[4558]: I0120 16:43:18.565109 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 20 16:43:18 crc kubenswrapper[4558]: I0120 16:43:18.565055 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 20 16:43:18 crc kubenswrapper[4558]: E0120 16:43:18.565145 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 20 16:43:18 crc kubenswrapper[4558]: E0120 16:43:18.565213 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 20 16:43:18 crc kubenswrapper[4558]: E0120 16:43:18.565279 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 20 16:43:18 crc kubenswrapper[4558]: I0120 16:43:18.585878 4558 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-10 19:19:25.01704024 +0000 UTC Jan 20 16:43:18 crc kubenswrapper[4558]: I0120 16:43:18.664673 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:18 crc kubenswrapper[4558]: I0120 16:43:18.664703 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:18 crc kubenswrapper[4558]: I0120 16:43:18.664711 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:18 crc kubenswrapper[4558]: I0120 16:43:18.664722 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:18 crc kubenswrapper[4558]: I0120 16:43:18.664730 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:18Z","lastTransitionTime":"2026-01-20T16:43:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:18 crc kubenswrapper[4558]: I0120 16:43:18.766631 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:18 crc kubenswrapper[4558]: I0120 16:43:18.766695 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:18 crc kubenswrapper[4558]: I0120 16:43:18.766705 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:18 crc kubenswrapper[4558]: I0120 16:43:18.766718 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:18 crc kubenswrapper[4558]: I0120 16:43:18.766726 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:18Z","lastTransitionTime":"2026-01-20T16:43:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:18 crc kubenswrapper[4558]: I0120 16:43:18.868318 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:18 crc kubenswrapper[4558]: I0120 16:43:18.868353 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:18 crc kubenswrapper[4558]: I0120 16:43:18.868361 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:18 crc kubenswrapper[4558]: I0120 16:43:18.868376 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:18 crc kubenswrapper[4558]: I0120 16:43:18.868384 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:18Z","lastTransitionTime":"2026-01-20T16:43:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:18 crc kubenswrapper[4558]: I0120 16:43:18.970227 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:18 crc kubenswrapper[4558]: I0120 16:43:18.970263 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:18 crc kubenswrapper[4558]: I0120 16:43:18.970274 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:18 crc kubenswrapper[4558]: I0120 16:43:18.970288 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:18 crc kubenswrapper[4558]: I0120 16:43:18.970297 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:18Z","lastTransitionTime":"2026-01-20T16:43:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:19 crc kubenswrapper[4558]: I0120 16:43:19.071942 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:19 crc kubenswrapper[4558]: I0120 16:43:19.071976 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:19 crc kubenswrapper[4558]: I0120 16:43:19.071985 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:19 crc kubenswrapper[4558]: I0120 16:43:19.071996 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:19 crc kubenswrapper[4558]: I0120 16:43:19.072004 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:19Z","lastTransitionTime":"2026-01-20T16:43:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:19 crc kubenswrapper[4558]: I0120 16:43:19.173969 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:19 crc kubenswrapper[4558]: I0120 16:43:19.174000 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:19 crc kubenswrapper[4558]: I0120 16:43:19.174009 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:19 crc kubenswrapper[4558]: I0120 16:43:19.174020 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:19 crc kubenswrapper[4558]: I0120 16:43:19.174029 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:19Z","lastTransitionTime":"2026-01-20T16:43:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:19 crc kubenswrapper[4558]: I0120 16:43:19.276026 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:19 crc kubenswrapper[4558]: I0120 16:43:19.276064 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:19 crc kubenswrapper[4558]: I0120 16:43:19.276072 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:19 crc kubenswrapper[4558]: I0120 16:43:19.276085 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:19 crc kubenswrapper[4558]: I0120 16:43:19.276095 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:19Z","lastTransitionTime":"2026-01-20T16:43:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:19 crc kubenswrapper[4558]: I0120 16:43:19.281230 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:19 crc kubenswrapper[4558]: I0120 16:43:19.281262 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:19 crc kubenswrapper[4558]: I0120 16:43:19.281270 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:19 crc kubenswrapper[4558]: I0120 16:43:19.281283 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:19 crc kubenswrapper[4558]: I0120 16:43:19.281293 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:19Z","lastTransitionTime":"2026-01-20T16:43:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:19 crc kubenswrapper[4558]: E0120 16:43:19.290219 4558 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:43:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:43:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:43:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:43:19Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:43:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:43:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:43:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:43:19Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"bf027402-7a62-478b-8585-220c98495823\\\",\\\"systemUUID\\\":\\\"1def282c-e24e-4bc0-9a1b-d7cc30756d3d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:43:19Z is after 2025-08-24T17:21:41Z" Jan 20 16:43:19 crc kubenswrapper[4558]: I0120 16:43:19.292626 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:19 crc kubenswrapper[4558]: I0120 16:43:19.292690 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:19 crc kubenswrapper[4558]: I0120 16:43:19.292700 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:19 crc kubenswrapper[4558]: I0120 16:43:19.292713 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:19 crc kubenswrapper[4558]: I0120 16:43:19.292722 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:19Z","lastTransitionTime":"2026-01-20T16:43:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:19 crc kubenswrapper[4558]: E0120 16:43:19.300635 4558 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:43:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:43:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:43:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:43:19Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:43:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:43:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:43:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:43:19Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"bf027402-7a62-478b-8585-220c98495823\\\",\\\"systemUUID\\\":\\\"1def282c-e24e-4bc0-9a1b-d7cc30756d3d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:43:19Z is after 2025-08-24T17:21:41Z" Jan 20 16:43:19 crc kubenswrapper[4558]: I0120 16:43:19.302522 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:19 crc kubenswrapper[4558]: I0120 16:43:19.302547 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:19 crc kubenswrapper[4558]: I0120 16:43:19.302556 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:19 crc kubenswrapper[4558]: I0120 16:43:19.302567 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:19 crc kubenswrapper[4558]: I0120 16:43:19.302574 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:19Z","lastTransitionTime":"2026-01-20T16:43:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:19 crc kubenswrapper[4558]: E0120 16:43:19.310561 4558 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:43:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:43:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:43:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:43:19Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:43:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:43:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:43:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:43:19Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"bf027402-7a62-478b-8585-220c98495823\\\",\\\"systemUUID\\\":\\\"1def282c-e24e-4bc0-9a1b-d7cc30756d3d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:43:19Z is after 2025-08-24T17:21:41Z" Jan 20 16:43:19 crc kubenswrapper[4558]: I0120 16:43:19.312837 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:19 crc kubenswrapper[4558]: I0120 16:43:19.312865 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:19 crc kubenswrapper[4558]: I0120 16:43:19.312873 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:19 crc kubenswrapper[4558]: I0120 16:43:19.312886 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:19 crc kubenswrapper[4558]: I0120 16:43:19.312894 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:19Z","lastTransitionTime":"2026-01-20T16:43:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:19 crc kubenswrapper[4558]: E0120 16:43:19.321515 4558 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:43:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:43:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:43:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:43:19Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:43:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:43:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:43:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:43:19Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"bf027402-7a62-478b-8585-220c98495823\\\",\\\"systemUUID\\\":\\\"1def282c-e24e-4bc0-9a1b-d7cc30756d3d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:43:19Z is after 2025-08-24T17:21:41Z" Jan 20 16:43:19 crc kubenswrapper[4558]: I0120 16:43:19.324121 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:19 crc kubenswrapper[4558]: I0120 16:43:19.324150 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:19 crc kubenswrapper[4558]: I0120 16:43:19.324195 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:19 crc kubenswrapper[4558]: I0120 16:43:19.324217 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:19 crc kubenswrapper[4558]: I0120 16:43:19.324227 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:19Z","lastTransitionTime":"2026-01-20T16:43:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:19 crc kubenswrapper[4558]: E0120 16:43:19.332138 4558 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:43:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:43:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:43:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:43:19Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:43:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:43:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:43:19Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:43:19Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"bf027402-7a62-478b-8585-220c98495823\\\",\\\"systemUUID\\\":\\\"1def282c-e24e-4bc0-9a1b-d7cc30756d3d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:43:19Z is after 2025-08-24T17:21:41Z" Jan 20 16:43:19 crc kubenswrapper[4558]: E0120 16:43:19.332284 4558 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 20 16:43:19 crc kubenswrapper[4558]: I0120 16:43:19.377715 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:19 crc kubenswrapper[4558]: I0120 16:43:19.377750 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:19 crc kubenswrapper[4558]: I0120 16:43:19.377760 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:19 crc kubenswrapper[4558]: I0120 16:43:19.377772 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:19 crc kubenswrapper[4558]: I0120 16:43:19.377781 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:19Z","lastTransitionTime":"2026-01-20T16:43:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:19 crc kubenswrapper[4558]: I0120 16:43:19.479555 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:19 crc kubenswrapper[4558]: I0120 16:43:19.479589 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:19 crc kubenswrapper[4558]: I0120 16:43:19.479610 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:19 crc kubenswrapper[4558]: I0120 16:43:19.479622 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:19 crc kubenswrapper[4558]: I0120 16:43:19.479631 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:19Z","lastTransitionTime":"2026-01-20T16:43:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:19 crc kubenswrapper[4558]: I0120 16:43:19.565099 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9wrq6" Jan 20 16:43:19 crc kubenswrapper[4558]: E0120 16:43:19.565354 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9wrq6" podUID="30032328-bd33-4073-9366-e10bc5e2aa77" Jan 20 16:43:19 crc kubenswrapper[4558]: I0120 16:43:19.581395 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:19 crc kubenswrapper[4558]: I0120 16:43:19.581438 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:19 crc kubenswrapper[4558]: I0120 16:43:19.581448 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:19 crc kubenswrapper[4558]: I0120 16:43:19.581468 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:19 crc kubenswrapper[4558]: I0120 16:43:19.581478 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:19Z","lastTransitionTime":"2026-01-20T16:43:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:19 crc kubenswrapper[4558]: I0120 16:43:19.586579 4558 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-07 10:46:31.574617029 +0000 UTC Jan 20 16:43:19 crc kubenswrapper[4558]: I0120 16:43:19.683337 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:19 crc kubenswrapper[4558]: I0120 16:43:19.683375 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:19 crc kubenswrapper[4558]: I0120 16:43:19.683383 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:19 crc kubenswrapper[4558]: I0120 16:43:19.683396 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:19 crc kubenswrapper[4558]: I0120 16:43:19.683405 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:19Z","lastTransitionTime":"2026-01-20T16:43:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:19 crc kubenswrapper[4558]: I0120 16:43:19.785031 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:19 crc kubenswrapper[4558]: I0120 16:43:19.785062 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:19 crc kubenswrapper[4558]: I0120 16:43:19.785071 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:19 crc kubenswrapper[4558]: I0120 16:43:19.785099 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:19 crc kubenswrapper[4558]: I0120 16:43:19.785107 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:19Z","lastTransitionTime":"2026-01-20T16:43:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:19 crc kubenswrapper[4558]: I0120 16:43:19.888827 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:19 crc kubenswrapper[4558]: I0120 16:43:19.888891 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:19 crc kubenswrapper[4558]: I0120 16:43:19.888903 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:19 crc kubenswrapper[4558]: I0120 16:43:19.888935 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:19 crc kubenswrapper[4558]: I0120 16:43:19.888949 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:19Z","lastTransitionTime":"2026-01-20T16:43:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:19 crc kubenswrapper[4558]: I0120 16:43:19.990493 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:19 crc kubenswrapper[4558]: I0120 16:43:19.990529 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:19 crc kubenswrapper[4558]: I0120 16:43:19.990537 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:19 crc kubenswrapper[4558]: I0120 16:43:19.990549 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:19 crc kubenswrapper[4558]: I0120 16:43:19.990557 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:19Z","lastTransitionTime":"2026-01-20T16:43:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:20 crc kubenswrapper[4558]: I0120 16:43:20.092417 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:20 crc kubenswrapper[4558]: I0120 16:43:20.092452 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:20 crc kubenswrapper[4558]: I0120 16:43:20.092461 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:20 crc kubenswrapper[4558]: I0120 16:43:20.092474 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:20 crc kubenswrapper[4558]: I0120 16:43:20.092482 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:20Z","lastTransitionTime":"2026-01-20T16:43:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:20 crc kubenswrapper[4558]: I0120 16:43:20.194679 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:20 crc kubenswrapper[4558]: I0120 16:43:20.194713 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:20 crc kubenswrapper[4558]: I0120 16:43:20.194721 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:20 crc kubenswrapper[4558]: I0120 16:43:20.194732 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:20 crc kubenswrapper[4558]: I0120 16:43:20.194740 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:20Z","lastTransitionTime":"2026-01-20T16:43:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:20 crc kubenswrapper[4558]: I0120 16:43:20.296429 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:20 crc kubenswrapper[4558]: I0120 16:43:20.296469 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:20 crc kubenswrapper[4558]: I0120 16:43:20.296477 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:20 crc kubenswrapper[4558]: I0120 16:43:20.296490 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:20 crc kubenswrapper[4558]: I0120 16:43:20.296498 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:20Z","lastTransitionTime":"2026-01-20T16:43:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:20 crc kubenswrapper[4558]: I0120 16:43:20.398055 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:20 crc kubenswrapper[4558]: I0120 16:43:20.398083 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:20 crc kubenswrapper[4558]: I0120 16:43:20.398091 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:20 crc kubenswrapper[4558]: I0120 16:43:20.398102 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:20 crc kubenswrapper[4558]: I0120 16:43:20.398110 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:20Z","lastTransitionTime":"2026-01-20T16:43:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:20 crc kubenswrapper[4558]: I0120 16:43:20.499960 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:20 crc kubenswrapper[4558]: I0120 16:43:20.499986 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:20 crc kubenswrapper[4558]: I0120 16:43:20.499994 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:20 crc kubenswrapper[4558]: I0120 16:43:20.500005 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:20 crc kubenswrapper[4558]: I0120 16:43:20.500013 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:20Z","lastTransitionTime":"2026-01-20T16:43:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:20 crc kubenswrapper[4558]: I0120 16:43:20.565446 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 20 16:43:20 crc kubenswrapper[4558]: I0120 16:43:20.565496 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 20 16:43:20 crc kubenswrapper[4558]: I0120 16:43:20.565559 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 20 16:43:20 crc kubenswrapper[4558]: E0120 16:43:20.565556 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 20 16:43:20 crc kubenswrapper[4558]: E0120 16:43:20.565636 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 20 16:43:20 crc kubenswrapper[4558]: E0120 16:43:20.565685 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 20 16:43:20 crc kubenswrapper[4558]: I0120 16:43:20.586652 4558 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-13 07:24:35.992954482 +0000 UTC Jan 20 16:43:20 crc kubenswrapper[4558]: I0120 16:43:20.601780 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:20 crc kubenswrapper[4558]: I0120 16:43:20.601808 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:20 crc kubenswrapper[4558]: I0120 16:43:20.601815 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:20 crc kubenswrapper[4558]: I0120 16:43:20.601826 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:20 crc kubenswrapper[4558]: I0120 16:43:20.601834 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:20Z","lastTransitionTime":"2026-01-20T16:43:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:20 crc kubenswrapper[4558]: I0120 16:43:20.703239 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:20 crc kubenswrapper[4558]: I0120 16:43:20.703277 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:20 crc kubenswrapper[4558]: I0120 16:43:20.703287 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:20 crc kubenswrapper[4558]: I0120 16:43:20.703299 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:20 crc kubenswrapper[4558]: I0120 16:43:20.703309 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:20Z","lastTransitionTime":"2026-01-20T16:43:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:20 crc kubenswrapper[4558]: I0120 16:43:20.805820 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:20 crc kubenswrapper[4558]: I0120 16:43:20.805895 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:20 crc kubenswrapper[4558]: I0120 16:43:20.805914 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:20 crc kubenswrapper[4558]: I0120 16:43:20.805939 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:20 crc kubenswrapper[4558]: I0120 16:43:20.805956 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:20Z","lastTransitionTime":"2026-01-20T16:43:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:20 crc kubenswrapper[4558]: I0120 16:43:20.907579 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:20 crc kubenswrapper[4558]: I0120 16:43:20.907623 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:20 crc kubenswrapper[4558]: I0120 16:43:20.907633 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:20 crc kubenswrapper[4558]: I0120 16:43:20.907646 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:20 crc kubenswrapper[4558]: I0120 16:43:20.907655 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:20Z","lastTransitionTime":"2026-01-20T16:43:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:21 crc kubenswrapper[4558]: I0120 16:43:21.009663 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:21 crc kubenswrapper[4558]: I0120 16:43:21.009700 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:21 crc kubenswrapper[4558]: I0120 16:43:21.009708 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:21 crc kubenswrapper[4558]: I0120 16:43:21.009720 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:21 crc kubenswrapper[4558]: I0120 16:43:21.009729 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:21Z","lastTransitionTime":"2026-01-20T16:43:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:21 crc kubenswrapper[4558]: I0120 16:43:21.111789 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:21 crc kubenswrapper[4558]: I0120 16:43:21.111844 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:21 crc kubenswrapper[4558]: I0120 16:43:21.111854 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:21 crc kubenswrapper[4558]: I0120 16:43:21.111869 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:21 crc kubenswrapper[4558]: I0120 16:43:21.111879 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:21Z","lastTransitionTime":"2026-01-20T16:43:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:21 crc kubenswrapper[4558]: I0120 16:43:21.213635 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:21 crc kubenswrapper[4558]: I0120 16:43:21.213671 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:21 crc kubenswrapper[4558]: I0120 16:43:21.213681 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:21 crc kubenswrapper[4558]: I0120 16:43:21.213695 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:21 crc kubenswrapper[4558]: I0120 16:43:21.213704 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:21Z","lastTransitionTime":"2026-01-20T16:43:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:21 crc kubenswrapper[4558]: I0120 16:43:21.315903 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:21 crc kubenswrapper[4558]: I0120 16:43:21.315939 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:21 crc kubenswrapper[4558]: I0120 16:43:21.315948 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:21 crc kubenswrapper[4558]: I0120 16:43:21.315961 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:21 crc kubenswrapper[4558]: I0120 16:43:21.315969 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:21Z","lastTransitionTime":"2026-01-20T16:43:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:21 crc kubenswrapper[4558]: I0120 16:43:21.418129 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:21 crc kubenswrapper[4558]: I0120 16:43:21.418182 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:21 crc kubenswrapper[4558]: I0120 16:43:21.418190 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:21 crc kubenswrapper[4558]: I0120 16:43:21.418203 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:21 crc kubenswrapper[4558]: I0120 16:43:21.418212 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:21Z","lastTransitionTime":"2026-01-20T16:43:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:21 crc kubenswrapper[4558]: I0120 16:43:21.519679 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:21 crc kubenswrapper[4558]: I0120 16:43:21.519724 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:21 crc kubenswrapper[4558]: I0120 16:43:21.519732 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:21 crc kubenswrapper[4558]: I0120 16:43:21.519745 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:21 crc kubenswrapper[4558]: I0120 16:43:21.519753 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:21Z","lastTransitionTime":"2026-01-20T16:43:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:21 crc kubenswrapper[4558]: I0120 16:43:21.564910 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9wrq6" Jan 20 16:43:21 crc kubenswrapper[4558]: E0120 16:43:21.565159 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9wrq6" podUID="30032328-bd33-4073-9366-e10bc5e2aa77" Jan 20 16:43:21 crc kubenswrapper[4558]: I0120 16:43:21.586945 4558 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-17 21:34:38.392412308 +0000 UTC Jan 20 16:43:21 crc kubenswrapper[4558]: I0120 16:43:21.620997 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:21 crc kubenswrapper[4558]: I0120 16:43:21.621029 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:21 crc kubenswrapper[4558]: I0120 16:43:21.621037 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:21 crc kubenswrapper[4558]: I0120 16:43:21.621049 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:21 crc kubenswrapper[4558]: I0120 16:43:21.621057 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:21Z","lastTransitionTime":"2026-01-20T16:43:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:21 crc kubenswrapper[4558]: I0120 16:43:21.723098 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:21 crc kubenswrapper[4558]: I0120 16:43:21.723138 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:21 crc kubenswrapper[4558]: I0120 16:43:21.723146 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:21 crc kubenswrapper[4558]: I0120 16:43:21.723180 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:21 crc kubenswrapper[4558]: I0120 16:43:21.723190 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:21Z","lastTransitionTime":"2026-01-20T16:43:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:21 crc kubenswrapper[4558]: I0120 16:43:21.824662 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:21 crc kubenswrapper[4558]: I0120 16:43:21.824696 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:21 crc kubenswrapper[4558]: I0120 16:43:21.824704 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:21 crc kubenswrapper[4558]: I0120 16:43:21.824718 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:21 crc kubenswrapper[4558]: I0120 16:43:21.824727 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:21Z","lastTransitionTime":"2026-01-20T16:43:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:21 crc kubenswrapper[4558]: I0120 16:43:21.887257 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/30032328-bd33-4073-9366-e10bc5e2aa77-metrics-certs\") pod \"network-metrics-daemon-9wrq6\" (UID: \"30032328-bd33-4073-9366-e10bc5e2aa77\") " pod="openshift-multus/network-metrics-daemon-9wrq6" Jan 20 16:43:21 crc kubenswrapper[4558]: E0120 16:43:21.887380 4558 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 20 16:43:21 crc kubenswrapper[4558]: E0120 16:43:21.887431 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/30032328-bd33-4073-9366-e10bc5e2aa77-metrics-certs podName:30032328-bd33-4073-9366-e10bc5e2aa77 nodeName:}" failed. No retries permitted until 2026-01-20 16:44:25.887417041 +0000 UTC m=+159.647755008 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/30032328-bd33-4073-9366-e10bc5e2aa77-metrics-certs") pod "network-metrics-daemon-9wrq6" (UID: "30032328-bd33-4073-9366-e10bc5e2aa77") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 20 16:43:21 crc kubenswrapper[4558]: I0120 16:43:21.927006 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:21 crc kubenswrapper[4558]: I0120 16:43:21.927049 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:21 crc kubenswrapper[4558]: I0120 16:43:21.927057 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:21 crc kubenswrapper[4558]: I0120 16:43:21.927069 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:21 crc kubenswrapper[4558]: I0120 16:43:21.927077 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:21Z","lastTransitionTime":"2026-01-20T16:43:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:22 crc kubenswrapper[4558]: I0120 16:43:22.028966 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:22 crc kubenswrapper[4558]: I0120 16:43:22.029023 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:22 crc kubenswrapper[4558]: I0120 16:43:22.029032 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:22 crc kubenswrapper[4558]: I0120 16:43:22.029042 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:22 crc kubenswrapper[4558]: I0120 16:43:22.029052 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:22Z","lastTransitionTime":"2026-01-20T16:43:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:22 crc kubenswrapper[4558]: I0120 16:43:22.130478 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:22 crc kubenswrapper[4558]: I0120 16:43:22.130508 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:22 crc kubenswrapper[4558]: I0120 16:43:22.130517 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:22 crc kubenswrapper[4558]: I0120 16:43:22.130528 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:22 crc kubenswrapper[4558]: I0120 16:43:22.130535 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:22Z","lastTransitionTime":"2026-01-20T16:43:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:22 crc kubenswrapper[4558]: I0120 16:43:22.232086 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:22 crc kubenswrapper[4558]: I0120 16:43:22.232117 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:22 crc kubenswrapper[4558]: I0120 16:43:22.232127 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:22 crc kubenswrapper[4558]: I0120 16:43:22.232141 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:22 crc kubenswrapper[4558]: I0120 16:43:22.232149 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:22Z","lastTransitionTime":"2026-01-20T16:43:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:22 crc kubenswrapper[4558]: I0120 16:43:22.333518 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:22 crc kubenswrapper[4558]: I0120 16:43:22.333549 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:22 crc kubenswrapper[4558]: I0120 16:43:22.333557 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:22 crc kubenswrapper[4558]: I0120 16:43:22.333570 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:22 crc kubenswrapper[4558]: I0120 16:43:22.333579 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:22Z","lastTransitionTime":"2026-01-20T16:43:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:22 crc kubenswrapper[4558]: I0120 16:43:22.435756 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:22 crc kubenswrapper[4558]: I0120 16:43:22.435796 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:22 crc kubenswrapper[4558]: I0120 16:43:22.435808 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:22 crc kubenswrapper[4558]: I0120 16:43:22.435822 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:22 crc kubenswrapper[4558]: I0120 16:43:22.435833 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:22Z","lastTransitionTime":"2026-01-20T16:43:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:22 crc kubenswrapper[4558]: I0120 16:43:22.537853 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:22 crc kubenswrapper[4558]: I0120 16:43:22.537893 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:22 crc kubenswrapper[4558]: I0120 16:43:22.537901 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:22 crc kubenswrapper[4558]: I0120 16:43:22.537914 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:22 crc kubenswrapper[4558]: I0120 16:43:22.537924 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:22Z","lastTransitionTime":"2026-01-20T16:43:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:22 crc kubenswrapper[4558]: I0120 16:43:22.565740 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 20 16:43:22 crc kubenswrapper[4558]: I0120 16:43:22.565786 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 20 16:43:22 crc kubenswrapper[4558]: E0120 16:43:22.565858 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 20 16:43:22 crc kubenswrapper[4558]: I0120 16:43:22.565881 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 20 16:43:22 crc kubenswrapper[4558]: E0120 16:43:22.565967 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 20 16:43:22 crc kubenswrapper[4558]: E0120 16:43:22.566005 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 20 16:43:22 crc kubenswrapper[4558]: I0120 16:43:22.587890 4558 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-25 17:48:20.919779848 +0000 UTC Jan 20 16:43:22 crc kubenswrapper[4558]: I0120 16:43:22.640204 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:22 crc kubenswrapper[4558]: I0120 16:43:22.640245 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:22 crc kubenswrapper[4558]: I0120 16:43:22.640255 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:22 crc kubenswrapper[4558]: I0120 16:43:22.640270 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:22 crc kubenswrapper[4558]: I0120 16:43:22.640280 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:22Z","lastTransitionTime":"2026-01-20T16:43:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:22 crc kubenswrapper[4558]: I0120 16:43:22.742102 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:22 crc kubenswrapper[4558]: I0120 16:43:22.742153 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:22 crc kubenswrapper[4558]: I0120 16:43:22.742180 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:22 crc kubenswrapper[4558]: I0120 16:43:22.742194 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:22 crc kubenswrapper[4558]: I0120 16:43:22.742204 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:22Z","lastTransitionTime":"2026-01-20T16:43:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:22 crc kubenswrapper[4558]: I0120 16:43:22.843835 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:22 crc kubenswrapper[4558]: I0120 16:43:22.843870 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:22 crc kubenswrapper[4558]: I0120 16:43:22.843900 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:22 crc kubenswrapper[4558]: I0120 16:43:22.843912 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:22 crc kubenswrapper[4558]: I0120 16:43:22.843919 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:22Z","lastTransitionTime":"2026-01-20T16:43:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:22 crc kubenswrapper[4558]: I0120 16:43:22.945341 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:22 crc kubenswrapper[4558]: I0120 16:43:22.945371 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:22 crc kubenswrapper[4558]: I0120 16:43:22.945378 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:22 crc kubenswrapper[4558]: I0120 16:43:22.945390 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:22 crc kubenswrapper[4558]: I0120 16:43:22.945398 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:22Z","lastTransitionTime":"2026-01-20T16:43:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:23 crc kubenswrapper[4558]: I0120 16:43:23.047914 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:23 crc kubenswrapper[4558]: I0120 16:43:23.047968 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:23 crc kubenswrapper[4558]: I0120 16:43:23.047978 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:23 crc kubenswrapper[4558]: I0120 16:43:23.047998 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:23 crc kubenswrapper[4558]: I0120 16:43:23.048009 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:23Z","lastTransitionTime":"2026-01-20T16:43:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:23 crc kubenswrapper[4558]: I0120 16:43:23.149926 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:23 crc kubenswrapper[4558]: I0120 16:43:23.149965 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:23 crc kubenswrapper[4558]: I0120 16:43:23.149973 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:23 crc kubenswrapper[4558]: I0120 16:43:23.149987 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:23 crc kubenswrapper[4558]: I0120 16:43:23.149998 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:23Z","lastTransitionTime":"2026-01-20T16:43:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:23 crc kubenswrapper[4558]: I0120 16:43:23.252343 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:23 crc kubenswrapper[4558]: I0120 16:43:23.252375 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:23 crc kubenswrapper[4558]: I0120 16:43:23.252384 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:23 crc kubenswrapper[4558]: I0120 16:43:23.252395 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:23 crc kubenswrapper[4558]: I0120 16:43:23.252406 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:23Z","lastTransitionTime":"2026-01-20T16:43:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:23 crc kubenswrapper[4558]: I0120 16:43:23.354457 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:23 crc kubenswrapper[4558]: I0120 16:43:23.354494 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:23 crc kubenswrapper[4558]: I0120 16:43:23.354504 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:23 crc kubenswrapper[4558]: I0120 16:43:23.354518 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:23 crc kubenswrapper[4558]: I0120 16:43:23.354527 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:23Z","lastTransitionTime":"2026-01-20T16:43:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:23 crc kubenswrapper[4558]: I0120 16:43:23.456736 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:23 crc kubenswrapper[4558]: I0120 16:43:23.456768 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:23 crc kubenswrapper[4558]: I0120 16:43:23.456776 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:23 crc kubenswrapper[4558]: I0120 16:43:23.456791 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:23 crc kubenswrapper[4558]: I0120 16:43:23.456799 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:23Z","lastTransitionTime":"2026-01-20T16:43:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:23 crc kubenswrapper[4558]: I0120 16:43:23.559057 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:23 crc kubenswrapper[4558]: I0120 16:43:23.559093 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:23 crc kubenswrapper[4558]: I0120 16:43:23.559102 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:23 crc kubenswrapper[4558]: I0120 16:43:23.559116 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:23 crc kubenswrapper[4558]: I0120 16:43:23.559124 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:23Z","lastTransitionTime":"2026-01-20T16:43:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:23 crc kubenswrapper[4558]: I0120 16:43:23.565234 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9wrq6" Jan 20 16:43:23 crc kubenswrapper[4558]: E0120 16:43:23.565337 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9wrq6" podUID="30032328-bd33-4073-9366-e10bc5e2aa77" Jan 20 16:43:23 crc kubenswrapper[4558]: I0120 16:43:23.588557 4558 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-07 10:25:37.605119282 +0000 UTC Jan 20 16:43:23 crc kubenswrapper[4558]: I0120 16:43:23.660891 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:23 crc kubenswrapper[4558]: I0120 16:43:23.660930 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:23 crc kubenswrapper[4558]: I0120 16:43:23.660939 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:23 crc kubenswrapper[4558]: I0120 16:43:23.660956 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:23 crc kubenswrapper[4558]: I0120 16:43:23.660965 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:23Z","lastTransitionTime":"2026-01-20T16:43:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:23 crc kubenswrapper[4558]: I0120 16:43:23.763133 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:23 crc kubenswrapper[4558]: I0120 16:43:23.763180 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:23 crc kubenswrapper[4558]: I0120 16:43:23.763190 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:23 crc kubenswrapper[4558]: I0120 16:43:23.763204 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:23 crc kubenswrapper[4558]: I0120 16:43:23.763212 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:23Z","lastTransitionTime":"2026-01-20T16:43:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:23 crc kubenswrapper[4558]: I0120 16:43:23.864930 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:23 crc kubenswrapper[4558]: I0120 16:43:23.864989 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:23 crc kubenswrapper[4558]: I0120 16:43:23.864998 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:23 crc kubenswrapper[4558]: I0120 16:43:23.865012 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:23 crc kubenswrapper[4558]: I0120 16:43:23.865022 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:23Z","lastTransitionTime":"2026-01-20T16:43:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:23 crc kubenswrapper[4558]: I0120 16:43:23.967363 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:23 crc kubenswrapper[4558]: I0120 16:43:23.967393 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:23 crc kubenswrapper[4558]: I0120 16:43:23.967401 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:23 crc kubenswrapper[4558]: I0120 16:43:23.967412 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:23 crc kubenswrapper[4558]: I0120 16:43:23.967420 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:23Z","lastTransitionTime":"2026-01-20T16:43:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:24 crc kubenswrapper[4558]: I0120 16:43:24.069055 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:24 crc kubenswrapper[4558]: I0120 16:43:24.069088 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:24 crc kubenswrapper[4558]: I0120 16:43:24.069098 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:24 crc kubenswrapper[4558]: I0120 16:43:24.069111 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:24 crc kubenswrapper[4558]: I0120 16:43:24.069119 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:24Z","lastTransitionTime":"2026-01-20T16:43:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:24 crc kubenswrapper[4558]: I0120 16:43:24.171275 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:24 crc kubenswrapper[4558]: I0120 16:43:24.171316 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:24 crc kubenswrapper[4558]: I0120 16:43:24.171327 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:24 crc kubenswrapper[4558]: I0120 16:43:24.171341 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:24 crc kubenswrapper[4558]: I0120 16:43:24.171349 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:24Z","lastTransitionTime":"2026-01-20T16:43:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:24 crc kubenswrapper[4558]: I0120 16:43:24.273250 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:24 crc kubenswrapper[4558]: I0120 16:43:24.273283 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:24 crc kubenswrapper[4558]: I0120 16:43:24.273291 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:24 crc kubenswrapper[4558]: I0120 16:43:24.273310 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:24 crc kubenswrapper[4558]: I0120 16:43:24.273319 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:24Z","lastTransitionTime":"2026-01-20T16:43:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:24 crc kubenswrapper[4558]: I0120 16:43:24.376554 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:24 crc kubenswrapper[4558]: I0120 16:43:24.376635 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:24 crc kubenswrapper[4558]: I0120 16:43:24.376648 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:24 crc kubenswrapper[4558]: I0120 16:43:24.376667 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:24 crc kubenswrapper[4558]: I0120 16:43:24.376676 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:24Z","lastTransitionTime":"2026-01-20T16:43:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:24 crc kubenswrapper[4558]: I0120 16:43:24.480054 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:24 crc kubenswrapper[4558]: I0120 16:43:24.480143 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:24 crc kubenswrapper[4558]: I0120 16:43:24.480154 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:24 crc kubenswrapper[4558]: I0120 16:43:24.480224 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:24 crc kubenswrapper[4558]: I0120 16:43:24.480238 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:24Z","lastTransitionTime":"2026-01-20T16:43:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:24 crc kubenswrapper[4558]: I0120 16:43:24.565096 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 20 16:43:24 crc kubenswrapper[4558]: I0120 16:43:24.565195 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 20 16:43:24 crc kubenswrapper[4558]: E0120 16:43:24.565288 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 20 16:43:24 crc kubenswrapper[4558]: E0120 16:43:24.565426 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 20 16:43:24 crc kubenswrapper[4558]: I0120 16:43:24.565524 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 20 16:43:24 crc kubenswrapper[4558]: E0120 16:43:24.565617 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 20 16:43:24 crc kubenswrapper[4558]: I0120 16:43:24.582209 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:24 crc kubenswrapper[4558]: I0120 16:43:24.582254 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:24 crc kubenswrapper[4558]: I0120 16:43:24.582290 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:24 crc kubenswrapper[4558]: I0120 16:43:24.582307 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:24 crc kubenswrapper[4558]: I0120 16:43:24.582319 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:24Z","lastTransitionTime":"2026-01-20T16:43:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:24 crc kubenswrapper[4558]: I0120 16:43:24.589416 4558 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-08 23:56:53.019914675 +0000 UTC Jan 20 16:43:24 crc kubenswrapper[4558]: I0120 16:43:24.685720 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:24 crc kubenswrapper[4558]: I0120 16:43:24.685777 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:24 crc kubenswrapper[4558]: I0120 16:43:24.685788 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:24 crc kubenswrapper[4558]: I0120 16:43:24.685814 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:24 crc kubenswrapper[4558]: I0120 16:43:24.685826 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:24Z","lastTransitionTime":"2026-01-20T16:43:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:24 crc kubenswrapper[4558]: I0120 16:43:24.787570 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:24 crc kubenswrapper[4558]: I0120 16:43:24.787629 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:24 crc kubenswrapper[4558]: I0120 16:43:24.787640 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:24 crc kubenswrapper[4558]: I0120 16:43:24.787657 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:24 crc kubenswrapper[4558]: I0120 16:43:24.787667 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:24Z","lastTransitionTime":"2026-01-20T16:43:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:24 crc kubenswrapper[4558]: I0120 16:43:24.889540 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:24 crc kubenswrapper[4558]: I0120 16:43:24.889645 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:24 crc kubenswrapper[4558]: I0120 16:43:24.889655 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:24 crc kubenswrapper[4558]: I0120 16:43:24.889668 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:24 crc kubenswrapper[4558]: I0120 16:43:24.889678 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:24Z","lastTransitionTime":"2026-01-20T16:43:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:24 crc kubenswrapper[4558]: I0120 16:43:24.992426 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:24 crc kubenswrapper[4558]: I0120 16:43:24.992464 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:24 crc kubenswrapper[4558]: I0120 16:43:24.992472 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:24 crc kubenswrapper[4558]: I0120 16:43:24.992486 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:24 crc kubenswrapper[4558]: I0120 16:43:24.992496 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:24Z","lastTransitionTime":"2026-01-20T16:43:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:25 crc kubenswrapper[4558]: I0120 16:43:25.095157 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:25 crc kubenswrapper[4558]: I0120 16:43:25.095234 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:25 crc kubenswrapper[4558]: I0120 16:43:25.095244 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:25 crc kubenswrapper[4558]: I0120 16:43:25.095260 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:25 crc kubenswrapper[4558]: I0120 16:43:25.095268 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:25Z","lastTransitionTime":"2026-01-20T16:43:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:25 crc kubenswrapper[4558]: I0120 16:43:25.197380 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:25 crc kubenswrapper[4558]: I0120 16:43:25.197452 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:25 crc kubenswrapper[4558]: I0120 16:43:25.197473 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:25 crc kubenswrapper[4558]: I0120 16:43:25.197497 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:25 crc kubenswrapper[4558]: I0120 16:43:25.197509 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:25Z","lastTransitionTime":"2026-01-20T16:43:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:25 crc kubenswrapper[4558]: I0120 16:43:25.299159 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:25 crc kubenswrapper[4558]: I0120 16:43:25.299212 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:25 crc kubenswrapper[4558]: I0120 16:43:25.299222 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:25 crc kubenswrapper[4558]: I0120 16:43:25.299235 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:25 crc kubenswrapper[4558]: I0120 16:43:25.299244 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:25Z","lastTransitionTime":"2026-01-20T16:43:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:25 crc kubenswrapper[4558]: I0120 16:43:25.401346 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:25 crc kubenswrapper[4558]: I0120 16:43:25.401414 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:25 crc kubenswrapper[4558]: I0120 16:43:25.401427 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:25 crc kubenswrapper[4558]: I0120 16:43:25.401447 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:25 crc kubenswrapper[4558]: I0120 16:43:25.401461 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:25Z","lastTransitionTime":"2026-01-20T16:43:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:25 crc kubenswrapper[4558]: I0120 16:43:25.504126 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:25 crc kubenswrapper[4558]: I0120 16:43:25.504191 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:25 crc kubenswrapper[4558]: I0120 16:43:25.504206 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:25 crc kubenswrapper[4558]: I0120 16:43:25.504225 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:25 crc kubenswrapper[4558]: I0120 16:43:25.504240 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:25Z","lastTransitionTime":"2026-01-20T16:43:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:25 crc kubenswrapper[4558]: I0120 16:43:25.565568 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9wrq6" Jan 20 16:43:25 crc kubenswrapper[4558]: E0120 16:43:25.565759 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9wrq6" podUID="30032328-bd33-4073-9366-e10bc5e2aa77" Jan 20 16:43:25 crc kubenswrapper[4558]: I0120 16:43:25.589765 4558 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-23 00:29:59.439093307 +0000 UTC Jan 20 16:43:25 crc kubenswrapper[4558]: I0120 16:43:25.606370 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:25 crc kubenswrapper[4558]: I0120 16:43:25.606410 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:25 crc kubenswrapper[4558]: I0120 16:43:25.606419 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:25 crc kubenswrapper[4558]: I0120 16:43:25.606434 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:25 crc kubenswrapper[4558]: I0120 16:43:25.606444 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:25Z","lastTransitionTime":"2026-01-20T16:43:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:25 crc kubenswrapper[4558]: I0120 16:43:25.709645 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:25 crc kubenswrapper[4558]: I0120 16:43:25.709682 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:25 crc kubenswrapper[4558]: I0120 16:43:25.709694 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:25 crc kubenswrapper[4558]: I0120 16:43:25.709714 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:25 crc kubenswrapper[4558]: I0120 16:43:25.709724 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:25Z","lastTransitionTime":"2026-01-20T16:43:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:25 crc kubenswrapper[4558]: I0120 16:43:25.812223 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:25 crc kubenswrapper[4558]: I0120 16:43:25.812260 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:25 crc kubenswrapper[4558]: I0120 16:43:25.812268 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:25 crc kubenswrapper[4558]: I0120 16:43:25.812282 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:25 crc kubenswrapper[4558]: I0120 16:43:25.812291 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:25Z","lastTransitionTime":"2026-01-20T16:43:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:25 crc kubenswrapper[4558]: I0120 16:43:25.913812 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:25 crc kubenswrapper[4558]: I0120 16:43:25.913849 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:25 crc kubenswrapper[4558]: I0120 16:43:25.913858 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:25 crc kubenswrapper[4558]: I0120 16:43:25.913871 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:25 crc kubenswrapper[4558]: I0120 16:43:25.913879 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:25Z","lastTransitionTime":"2026-01-20T16:43:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:26 crc kubenswrapper[4558]: I0120 16:43:26.015947 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:26 crc kubenswrapper[4558]: I0120 16:43:26.015986 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:26 crc kubenswrapper[4558]: I0120 16:43:26.015995 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:26 crc kubenswrapper[4558]: I0120 16:43:26.016008 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:26 crc kubenswrapper[4558]: I0120 16:43:26.016016 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:26Z","lastTransitionTime":"2026-01-20T16:43:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:26 crc kubenswrapper[4558]: I0120 16:43:26.118454 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:26 crc kubenswrapper[4558]: I0120 16:43:26.118492 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:26 crc kubenswrapper[4558]: I0120 16:43:26.118503 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:26 crc kubenswrapper[4558]: I0120 16:43:26.118515 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:26 crc kubenswrapper[4558]: I0120 16:43:26.118525 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:26Z","lastTransitionTime":"2026-01-20T16:43:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:26 crc kubenswrapper[4558]: I0120 16:43:26.221304 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:26 crc kubenswrapper[4558]: I0120 16:43:26.221344 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:26 crc kubenswrapper[4558]: I0120 16:43:26.221352 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:26 crc kubenswrapper[4558]: I0120 16:43:26.221364 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:26 crc kubenswrapper[4558]: I0120 16:43:26.221373 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:26Z","lastTransitionTime":"2026-01-20T16:43:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:26 crc kubenswrapper[4558]: I0120 16:43:26.323948 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:26 crc kubenswrapper[4558]: I0120 16:43:26.323979 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:26 crc kubenswrapper[4558]: I0120 16:43:26.323987 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:26 crc kubenswrapper[4558]: I0120 16:43:26.324000 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:26 crc kubenswrapper[4558]: I0120 16:43:26.324009 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:26Z","lastTransitionTime":"2026-01-20T16:43:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:26 crc kubenswrapper[4558]: I0120 16:43:26.425945 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:26 crc kubenswrapper[4558]: I0120 16:43:26.426011 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:26 crc kubenswrapper[4558]: I0120 16:43:26.426021 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:26 crc kubenswrapper[4558]: I0120 16:43:26.426034 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:26 crc kubenswrapper[4558]: I0120 16:43:26.426042 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:26Z","lastTransitionTime":"2026-01-20T16:43:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:26 crc kubenswrapper[4558]: I0120 16:43:26.527889 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:26 crc kubenswrapper[4558]: I0120 16:43:26.527927 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:26 crc kubenswrapper[4558]: I0120 16:43:26.527935 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:26 crc kubenswrapper[4558]: I0120 16:43:26.527948 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:26 crc kubenswrapper[4558]: I0120 16:43:26.527958 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:26Z","lastTransitionTime":"2026-01-20T16:43:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:26 crc kubenswrapper[4558]: I0120 16:43:26.565308 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 20 16:43:26 crc kubenswrapper[4558]: I0120 16:43:26.565325 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 20 16:43:26 crc kubenswrapper[4558]: I0120 16:43:26.565387 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 20 16:43:26 crc kubenswrapper[4558]: E0120 16:43:26.565407 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 20 16:43:26 crc kubenswrapper[4558]: E0120 16:43:26.565496 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 20 16:43:26 crc kubenswrapper[4558]: E0120 16:43:26.565584 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 20 16:43:26 crc kubenswrapper[4558]: I0120 16:43:26.575308 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:43:26Z is after 2025-08-24T17:21:41Z" Jan 20 16:43:26 crc kubenswrapper[4558]: I0120 16:43:26.583423 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rfwnl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8fe5472-7373-4e5d-87fc-a9ecaf26a5cd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://38cd58ecc67d337df2c29b79e5460ea23d4acc1e66de8b08ea70d7ab7b30691d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9nvl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a14e9281ff19e324499722aefadf1e1f2fd005089c153918ad4d8a7f5bdfa853\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9nvl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-rfwnl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:43:26Z is after 2025-08-24T17:21:41Z" Jan 20 16:43:26 crc kubenswrapper[4558]: I0120 16:43:26.590040 4558 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-14 21:08:42.550820295 +0000 UTC Jan 20 16:43:26 crc kubenswrapper[4558]: I0120 16:43:26.591886 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60aafae7-2e6e-4d21-9ce6-8ff5382ef642\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0d16b973c217656d6a8ceaf684e9dec82daeb583209943f6b2979be47ab03fda\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb2e7753f75b10088eea9204511ae78e4be9a2b5be8c17ed04c3c972b13f8190\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a426b97002a49938b18009db49867dc9a15102707b42ec46bc64093c0ed3c2c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6e4df1c3b2a1aef9adb3da64f89a1b8c0904fd22882609dbafe2fbc5718b8016\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6e4df1c3b2a1aef9adb3da64f89a1b8c0904fd22882609dbafe2fbc5718b8016\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:43:26Z is after 2025-08-24T17:21:41Z" Jan 20 16:43:26 crc kubenswrapper[4558]: I0120 16:43:26.600802 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e190c24c1d8a3bc0241334598ce23829ada9afee282d49a0c82cd67ad82bcd02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:43:26Z is after 2025-08-24T17:21:41Z" Jan 20 16:43:26 crc kubenswrapper[4558]: I0120 16:43:26.609182 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:43:26Z is after 2025-08-24T17:21:41Z" Jan 20 16:43:26 crc kubenswrapper[4558]: I0120 16:43:26.618792 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-f5t7h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0f90cb04-2e7a-4ee8-83fc-d6c0ee1702a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://21a318f03d0df490cb7bae11819c52eca011d163081aeecff1b2dd8f72caabbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fd8a8162080a700ce21ff3b2cf7c28fabfc682758eb69c483909d4008b07daab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fd8a8162080a700ce21ff3b2cf7c28fabfc682758eb69c483909d4008b07daab\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e77617c69434ce20c448b525c7e86f1ece18d51ce6e14167b4ca7e99de8e5709\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e77617c69434ce20c448b525c7e86f1ece18d51ce6e14167b4ca7e99de8e5709\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://453c67434281e0261ec6799b30a548a01efd72d7df96632409e745b2e6a94546\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://453c67434281e0261ec6799b30a548a01efd72d7df96632409e745b2e6a94546\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://06eab4daa6c8b6a564cf1bb1403b7b3b259a234ee09f23b4f4bc0628c2d58e69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://06eab4daa6c8b6a564cf1bb1403b7b3b259a234ee09f23b4f4bc0628c2d58e69\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1291ac09fc6ae2a79fdeaa0fc47272f61b97f45f67ed3f7c90bdfdbfe3519ff2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1291ac09fc6ae2a79fdeaa0fc47272f61b97f45f67ed3f7c90bdfdbfe3519ff2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fe484514f50a14e485d4a97ea3ce9401c77f3f4dae4717719be7ed6c4d7b1a59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fe484514f50a14e485d4a97ea3ce9401c77f3f4dae4717719be7ed6c4d7b1a59\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nrmwm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-f5t7h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:43:26Z is after 2025-08-24T17:21:41Z" Jan 20 16:43:26 crc kubenswrapper[4558]: I0120 16:43:26.629123 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:26 crc kubenswrapper[4558]: I0120 16:43:26.629150 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:26 crc kubenswrapper[4558]: I0120 16:43:26.629159 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:26 crc kubenswrapper[4558]: I0120 16:43:26.629187 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:26 crc kubenswrapper[4558]: I0120 16:43:26.629196 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:26Z","lastTransitionTime":"2026-01-20T16:43:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:26 crc kubenswrapper[4558]: I0120 16:43:26.629788 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-9wrq6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30032328-bd33-4073-9366-e10bc5e2aa77\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6tz4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v6tz4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:17Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-9wrq6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:43:26Z is after 2025-08-24T17:21:41Z" Jan 20 16:43:26 crc kubenswrapper[4558]: I0120 16:43:26.638896 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:43:26Z is after 2025-08-24T17:21:41Z" Jan 20 16:43:26 crc kubenswrapper[4558]: I0120 16:43:26.646315 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-d96bp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5c8d2c8b-fb8e-4bff-b8d2-69570e5d9e57\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fe858db22acc96b062f1bc7941e05c823f30d1f9ba50bd497c57f38d57e04293\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4v6jx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-d96bp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:43:26Z is after 2025-08-24T17:21:41Z" Jan 20 16:43:26 crc kubenswrapper[4558]: I0120 16:43:26.659108 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"254129cd-82fc-4162-b671-2434bc9e2972\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1a6a184067bde115ef5e09fdd90e7dccfe89a9dc347af450998b91c068e7e803\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd8b93e5559d95511a1285a9aec9a7d72df6c330bcd1e1daf3e93f5494f70eeb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4264ef8ee49a8b172fb05ac5c9b0bd32350b5d9ae95293079dfeaf44f021b455\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0889cf371f8b06cc2d255c15fda97db1d5000d6f6cff29d2fdcb8667cede8a99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4265d3281e954ee5989ee008da28c6cb9fa29478e5fdf23325521d455304da9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://186a1d2caad8865abf7f565405c4a4c077038ccb67ba77927cc2392ec075a811\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e9f4ed928d20c9db64a689d7d14df9b81870bb0523cf5098e09d2d85aad9fca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e9f4ed928d20c9db64a689d7d14df9b81870bb0523cf5098e09d2d85aad9fca\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-20T16:42:58Z\\\",\\\"message\\\":\\\"ess{},},Conditions:[]Condition{},},}\\\\nI0120 16:42:58.184623 6635 lb_config.go:1031] Cluster endpoints for openshift-controller-manager/controller-manager for network=default are: map[]\\\\nF0120 16:42:58.184632 6635 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:42:58Z is after 2025-08-24T17:21:41Z]\\\\nI0120 16:42:58.184637 6635 services_controller.go:443] Built service openshift-controller-manager/controller-manager LB cluster-wide configs for network=default: []services.lbConfig{services.lbConfig{vips:[]string{\\\\\\\"10.217.5.149\\\\\\\"}, protocol:\\\\\\\"TCP\\\\\\\", inport:443, clusterEndpoints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:57Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-nv2xw_openshift-ovn-kubernetes(254129cd-82fc-4162-b671-2434bc9e2972)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f41d488b0c24e594a2f5cad36b5f8efdb7d867fae0e18a74fe3c396a203d162f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aaa12ad387fc5d0780f6602a74013bb70c118f35c3a16eb4d136df7b0c48db9e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aaa12ad387fc5d0780f6602a74013bb70c118f35c3a16eb4d136df7b0c48db9e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgzcc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-nv2xw\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:43:26Z is after 2025-08-24T17:21:41Z" Jan 20 16:43:26 crc kubenswrapper[4558]: I0120 16:43:26.667507 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jsqvf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bedf08c7-1f93-4931-a7f3-e729e2a137af\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2297634e20683413a5eb643517580c9486303dc9616a7588a4ac571f3d0e53b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e32b76495d88d356373f9389e163a2f3a72e202bb454c7015594c1f5a41b511\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-20T16:42:50Z\\\",\\\"message\\\":\\\"2026-01-20T16:42:05+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_149f95a3-037e-4113-9baf-ba51c18b4cf2\\\\n2026-01-20T16:42:05+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_149f95a3-037e-4113-9baf-ba51c18b4cf2 to /host/opt/cni/bin/\\\\n2026-01-20T16:42:05Z [verbose] multus-daemon started\\\\n2026-01-20T16:42:05Z [verbose] Readiness Indicator file check\\\\n2026-01-20T16:42:50Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xxtbc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jsqvf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:43:26Z is after 2025-08-24T17:21:41Z" Jan 20 16:43:26 crc kubenswrapper[4558]: I0120 16:43:26.674309 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-47477" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e026445a-aa73-4a0e-ba35-e2e07de1278c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec2ee3ff161cba7e14ad26763081c9f092f0dfe969cef113631c27e0db53d157\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xcnwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-47477\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:43:26Z is after 2025-08-24T17:21:41Z" Jan 20 16:43:26 crc kubenswrapper[4558]: I0120 16:43:26.682664 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a13bfefa-f525-4ecd-89f5-f10480532bf6\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9030e970e96b6909d771d4fa0c3b4f493b4a825cbc43ef7e684272284329c7c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://168e8f533aaa108e4160f9ae4af6bb7944edba7a596178ffffdc5325073fc3c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e0141aef1babb57501016cb09787591b5e6d9136b8d7dd4ba64393f0be5a027\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c87f08993c2b47c63ba9652c7dafe0fdbecdcb03a7b7080664ab1f1bd0e1d602\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:43:26Z is after 2025-08-24T17:21:41Z" Jan 20 16:43:26 crc kubenswrapper[4558]: I0120 16:43:26.691531 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f904dd3e4b43e51569b6532f00fa2f9edf912c1f1969ee2ea00c99d1b7e35a41\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://789c877f4a456b3ce64bdbe6fefd50f1ef3f92bf9e26e6296005cf366e0ce265\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:43:26Z is after 2025-08-24T17:21:41Z" Jan 20 16:43:26 crc kubenswrapper[4558]: I0120 16:43:26.699718 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:07Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3e45cc56934a7b90e9d47f1c6d06d8dc140d57db4a216469e8ae112f398663e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:43:26Z is after 2025-08-24T17:21:41Z" Jan 20 16:43:26 crc kubenswrapper[4558]: I0120 16:43:26.708176 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"68337d27-3fa6-4a29-88b0-82e60c3739eb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de7f7c23993a57d30f08686e1cd4abb5b17e108ac12dd0c7929a31574c69eeb1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4b478\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b18a59ecb802507e0d5988eacc915fd7e0d6954518de80f61162c97f680fd8c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4b478\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:42:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2vr4r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:43:26Z is after 2025-08-24T17:21:41Z" Jan 20 16:43:26 crc kubenswrapper[4558]: I0120 16:43:26.719472 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f10d97b3-5765-454b-b306-1ef544be2c86\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3ba1f84f7ea0577350c7025aeb96fb1b329297f4d6767727f9a4e6659d3027cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac2f374f27360645aa18b003d1b2415cd498c17c73812950904109dd4b77cbd5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ac2f374f27360645aa18b003d1b2415cd498c17c73812950904109dd4b77cbd5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:43:26Z is after 2025-08-24T17:21:41Z" Jan 20 16:43:26 crc kubenswrapper[4558]: I0120 16:43:26.731481 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:26 crc kubenswrapper[4558]: I0120 16:43:26.731511 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:26 crc kubenswrapper[4558]: I0120 16:43:26.731520 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:26 crc kubenswrapper[4558]: I0120 16:43:26.731534 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:26 crc kubenswrapper[4558]: I0120 16:43:26.731543 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:26Z","lastTransitionTime":"2026-01-20T16:43:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:26 crc kubenswrapper[4558]: I0120 16:43:26.735982 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"425eab90-fb70-4498-a98b-b95742033890\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48592e72d91cf527296f06e52fec1c16c1da21323c0644659945af109a74ebb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7e18f59b5e6b6c0be141b7efb3ccb1df4ff6e3c394bcc88fb1142c8df433493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c889e2b0e14244ef49ec057e3d5fd91687e335a983fdc99a6816abd2af643ddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d434793571cbba089cc34029d951bee9063aca58728b791f2d1af2d68229f778\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://89f7ad577df11c509855a90ac8791da6de3f0d2857a1ca56726a6327f40455bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://baab628ee8aadc67ea829cc46639be55ae63281a9ec9a89737260f558a9ddf75\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://baab628ee8aadc67ea829cc46639be55ae63281a9ec9a89737260f558a9ddf75\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5faab5625a16e1459b23ae0f4cc8ca3285bf4eb1ba4c53c1ed15eddb9054021\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d5faab5625a16e1459b23ae0f4cc8ca3285bf4eb1ba4c53c1ed15eddb9054021\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://f913fa07ec9571d1c61f9b7dddb46614846aa99d6e0b5b75709e9fa5a18a09c3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f913fa07ec9571d1c61f9b7dddb46614846aa99d6e0b5b75709e9fa5a18a09c3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:43:26Z is after 2025-08-24T17:21:41Z" Jan 20 16:43:26 crc kubenswrapper[4558]: I0120 16:43:26.746251 4558 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"30e1ff9d-8dcd-4754-9a7a-c09598fb83db\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:42:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T16:41:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ea68e442bb2143fbaa5e2c9fe335c6b023a31c2eeaa78d60de45c6ef40c2894\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a8122c3b57ac4f0026df149fe622450d07d8b0d517ad018decd7b21d8d9c04a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7444e8fea1853bae45dbd3d52d07e3f7b94314cd29fe8c99891e515a892e569e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b6438ffea42f98178c4c2c21ae01725a8b54d6a6f790f47d64faf5ecd9f1c00f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f2e22f1d4e87dd06a67f49aedad280da48ac1c4ba2e438a4925cf8bd5330c4d8\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-20T16:42:04Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0120 16:41:58.859734 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0120 16:41:58.861094 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2179835226/tls.crt::/tmp/serving-cert-2179835226/tls.key\\\\\\\"\\\\nI0120 16:42:04.205247 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0120 16:42:04.207291 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0120 16:42:04.207308 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0120 16:42:04.207329 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0120 16:42:04.207333 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0120 16:42:04.210615 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0120 16:42:04.210632 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0120 16:42:04.210641 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0120 16:42:04.210648 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0120 16:42:04.210652 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0120 16:42:04.210655 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0120 16:42:04.210658 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0120 16:42:04.210660 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0120 16:42:04.211964 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:42:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4850bde1fb538e21fc91949dc584ef0a791d718844691a44559e0513bb36203\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-20T16:41:48Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c39aa6c3942134a027be33cac715f9c0381bce5417754d5bad045c42a07d353f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c39aa6c3942134a027be33cac715f9c0381bce5417754d5bad045c42a07d353f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-20T16:41:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-20T16:41:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T16:41:46Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:43:26Z is after 2025-08-24T17:21:41Z" Jan 20 16:43:26 crc kubenswrapper[4558]: I0120 16:43:26.833446 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:26 crc kubenswrapper[4558]: I0120 16:43:26.833504 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:26 crc kubenswrapper[4558]: I0120 16:43:26.833513 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:26 crc kubenswrapper[4558]: I0120 16:43:26.833527 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:26 crc kubenswrapper[4558]: I0120 16:43:26.833553 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:26Z","lastTransitionTime":"2026-01-20T16:43:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:26 crc kubenswrapper[4558]: I0120 16:43:26.935049 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:26 crc kubenswrapper[4558]: I0120 16:43:26.935079 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:26 crc kubenswrapper[4558]: I0120 16:43:26.935088 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:26 crc kubenswrapper[4558]: I0120 16:43:26.935099 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:26 crc kubenswrapper[4558]: I0120 16:43:26.935107 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:26Z","lastTransitionTime":"2026-01-20T16:43:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:27 crc kubenswrapper[4558]: I0120 16:43:27.037135 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:27 crc kubenswrapper[4558]: I0120 16:43:27.037190 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:27 crc kubenswrapper[4558]: I0120 16:43:27.037199 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:27 crc kubenswrapper[4558]: I0120 16:43:27.037211 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:27 crc kubenswrapper[4558]: I0120 16:43:27.037219 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:27Z","lastTransitionTime":"2026-01-20T16:43:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:27 crc kubenswrapper[4558]: I0120 16:43:27.139045 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:27 crc kubenswrapper[4558]: I0120 16:43:27.139095 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:27 crc kubenswrapper[4558]: I0120 16:43:27.139109 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:27 crc kubenswrapper[4558]: I0120 16:43:27.139124 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:27 crc kubenswrapper[4558]: I0120 16:43:27.139132 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:27Z","lastTransitionTime":"2026-01-20T16:43:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:27 crc kubenswrapper[4558]: I0120 16:43:27.240820 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:27 crc kubenswrapper[4558]: I0120 16:43:27.240879 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:27 crc kubenswrapper[4558]: I0120 16:43:27.240889 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:27 crc kubenswrapper[4558]: I0120 16:43:27.240901 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:27 crc kubenswrapper[4558]: I0120 16:43:27.240910 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:27Z","lastTransitionTime":"2026-01-20T16:43:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:27 crc kubenswrapper[4558]: I0120 16:43:27.342693 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:27 crc kubenswrapper[4558]: I0120 16:43:27.342729 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:27 crc kubenswrapper[4558]: I0120 16:43:27.342737 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:27 crc kubenswrapper[4558]: I0120 16:43:27.342751 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:27 crc kubenswrapper[4558]: I0120 16:43:27.342758 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:27Z","lastTransitionTime":"2026-01-20T16:43:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:27 crc kubenswrapper[4558]: I0120 16:43:27.444699 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:27 crc kubenswrapper[4558]: I0120 16:43:27.444735 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:27 crc kubenswrapper[4558]: I0120 16:43:27.444744 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:27 crc kubenswrapper[4558]: I0120 16:43:27.444755 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:27 crc kubenswrapper[4558]: I0120 16:43:27.444765 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:27Z","lastTransitionTime":"2026-01-20T16:43:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:27 crc kubenswrapper[4558]: I0120 16:43:27.547015 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:27 crc kubenswrapper[4558]: I0120 16:43:27.547040 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:27 crc kubenswrapper[4558]: I0120 16:43:27.547050 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:27 crc kubenswrapper[4558]: I0120 16:43:27.547060 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:27 crc kubenswrapper[4558]: I0120 16:43:27.547069 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:27Z","lastTransitionTime":"2026-01-20T16:43:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:27 crc kubenswrapper[4558]: I0120 16:43:27.565661 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9wrq6" Jan 20 16:43:27 crc kubenswrapper[4558]: E0120 16:43:27.565743 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9wrq6" podUID="30032328-bd33-4073-9366-e10bc5e2aa77" Jan 20 16:43:27 crc kubenswrapper[4558]: I0120 16:43:27.590139 4558 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-04 20:17:53.424983908 +0000 UTC Jan 20 16:43:27 crc kubenswrapper[4558]: I0120 16:43:27.649226 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:27 crc kubenswrapper[4558]: I0120 16:43:27.649453 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:27 crc kubenswrapper[4558]: I0120 16:43:27.649519 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:27 crc kubenswrapper[4558]: I0120 16:43:27.649585 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:27 crc kubenswrapper[4558]: I0120 16:43:27.649674 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:27Z","lastTransitionTime":"2026-01-20T16:43:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:27 crc kubenswrapper[4558]: I0120 16:43:27.751273 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:27 crc kubenswrapper[4558]: I0120 16:43:27.751305 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:27 crc kubenswrapper[4558]: I0120 16:43:27.751314 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:27 crc kubenswrapper[4558]: I0120 16:43:27.751325 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:27 crc kubenswrapper[4558]: I0120 16:43:27.751334 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:27Z","lastTransitionTime":"2026-01-20T16:43:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:27 crc kubenswrapper[4558]: I0120 16:43:27.852837 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:27 crc kubenswrapper[4558]: I0120 16:43:27.853054 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:27 crc kubenswrapper[4558]: I0120 16:43:27.853112 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:27 crc kubenswrapper[4558]: I0120 16:43:27.853187 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:27 crc kubenswrapper[4558]: I0120 16:43:27.853257 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:27Z","lastTransitionTime":"2026-01-20T16:43:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:27 crc kubenswrapper[4558]: I0120 16:43:27.954801 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:27 crc kubenswrapper[4558]: I0120 16:43:27.954833 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:27 crc kubenswrapper[4558]: I0120 16:43:27.954841 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:27 crc kubenswrapper[4558]: I0120 16:43:27.954851 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:27 crc kubenswrapper[4558]: I0120 16:43:27.954859 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:27Z","lastTransitionTime":"2026-01-20T16:43:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:28 crc kubenswrapper[4558]: I0120 16:43:28.056946 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:28 crc kubenswrapper[4558]: I0120 16:43:28.056979 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:28 crc kubenswrapper[4558]: I0120 16:43:28.056987 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:28 crc kubenswrapper[4558]: I0120 16:43:28.056998 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:28 crc kubenswrapper[4558]: I0120 16:43:28.057006 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:28Z","lastTransitionTime":"2026-01-20T16:43:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:28 crc kubenswrapper[4558]: I0120 16:43:28.158814 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:28 crc kubenswrapper[4558]: I0120 16:43:28.158842 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:28 crc kubenswrapper[4558]: I0120 16:43:28.158851 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:28 crc kubenswrapper[4558]: I0120 16:43:28.158862 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:28 crc kubenswrapper[4558]: I0120 16:43:28.158870 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:28Z","lastTransitionTime":"2026-01-20T16:43:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:28 crc kubenswrapper[4558]: I0120 16:43:28.261218 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:28 crc kubenswrapper[4558]: I0120 16:43:28.261252 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:28 crc kubenswrapper[4558]: I0120 16:43:28.261260 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:28 crc kubenswrapper[4558]: I0120 16:43:28.261274 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:28 crc kubenswrapper[4558]: I0120 16:43:28.261283 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:28Z","lastTransitionTime":"2026-01-20T16:43:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:28 crc kubenswrapper[4558]: I0120 16:43:28.362800 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:28 crc kubenswrapper[4558]: I0120 16:43:28.362840 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:28 crc kubenswrapper[4558]: I0120 16:43:28.362849 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:28 crc kubenswrapper[4558]: I0120 16:43:28.362861 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:28 crc kubenswrapper[4558]: I0120 16:43:28.362868 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:28Z","lastTransitionTime":"2026-01-20T16:43:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:28 crc kubenswrapper[4558]: I0120 16:43:28.464793 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:28 crc kubenswrapper[4558]: I0120 16:43:28.464822 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:28 crc kubenswrapper[4558]: I0120 16:43:28.464830 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:28 crc kubenswrapper[4558]: I0120 16:43:28.464841 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:28 crc kubenswrapper[4558]: I0120 16:43:28.464848 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:28Z","lastTransitionTime":"2026-01-20T16:43:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:28 crc kubenswrapper[4558]: I0120 16:43:28.565753 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 20 16:43:28 crc kubenswrapper[4558]: E0120 16:43:28.565879 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 20 16:43:28 crc kubenswrapper[4558]: I0120 16:43:28.565965 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 20 16:43:28 crc kubenswrapper[4558]: E0120 16:43:28.566035 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 20 16:43:28 crc kubenswrapper[4558]: I0120 16:43:28.566339 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 20 16:43:28 crc kubenswrapper[4558]: E0120 16:43:28.566494 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 20 16:43:28 crc kubenswrapper[4558]: I0120 16:43:28.567273 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:28 crc kubenswrapper[4558]: I0120 16:43:28.567299 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:28 crc kubenswrapper[4558]: I0120 16:43:28.567308 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:28 crc kubenswrapper[4558]: I0120 16:43:28.567324 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:28 crc kubenswrapper[4558]: I0120 16:43:28.567332 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:28Z","lastTransitionTime":"2026-01-20T16:43:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:28 crc kubenswrapper[4558]: I0120 16:43:28.590791 4558 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-21 07:36:50.436085025 +0000 UTC Jan 20 16:43:28 crc kubenswrapper[4558]: I0120 16:43:28.669833 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:28 crc kubenswrapper[4558]: I0120 16:43:28.669858 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:28 crc kubenswrapper[4558]: I0120 16:43:28.669866 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:28 crc kubenswrapper[4558]: I0120 16:43:28.669877 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:28 crc kubenswrapper[4558]: I0120 16:43:28.669886 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:28Z","lastTransitionTime":"2026-01-20T16:43:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:28 crc kubenswrapper[4558]: I0120 16:43:28.772040 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:28 crc kubenswrapper[4558]: I0120 16:43:28.772075 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:28 crc kubenswrapper[4558]: I0120 16:43:28.772084 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:28 crc kubenswrapper[4558]: I0120 16:43:28.772096 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:28 crc kubenswrapper[4558]: I0120 16:43:28.772105 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:28Z","lastTransitionTime":"2026-01-20T16:43:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:28 crc kubenswrapper[4558]: I0120 16:43:28.873936 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:28 crc kubenswrapper[4558]: I0120 16:43:28.873980 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:28 crc kubenswrapper[4558]: I0120 16:43:28.873990 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:28 crc kubenswrapper[4558]: I0120 16:43:28.874004 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:28 crc kubenswrapper[4558]: I0120 16:43:28.874012 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:28Z","lastTransitionTime":"2026-01-20T16:43:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:28 crc kubenswrapper[4558]: I0120 16:43:28.975869 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:28 crc kubenswrapper[4558]: I0120 16:43:28.975895 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:28 crc kubenswrapper[4558]: I0120 16:43:28.975905 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:28 crc kubenswrapper[4558]: I0120 16:43:28.975918 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:28 crc kubenswrapper[4558]: I0120 16:43:28.975925 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:28Z","lastTransitionTime":"2026-01-20T16:43:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:29 crc kubenswrapper[4558]: I0120 16:43:29.077746 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:29 crc kubenswrapper[4558]: I0120 16:43:29.077776 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:29 crc kubenswrapper[4558]: I0120 16:43:29.077784 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:29 crc kubenswrapper[4558]: I0120 16:43:29.077796 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:29 crc kubenswrapper[4558]: I0120 16:43:29.077804 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:29Z","lastTransitionTime":"2026-01-20T16:43:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:29 crc kubenswrapper[4558]: I0120 16:43:29.180214 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:29 crc kubenswrapper[4558]: I0120 16:43:29.180270 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:29 crc kubenswrapper[4558]: I0120 16:43:29.180279 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:29 crc kubenswrapper[4558]: I0120 16:43:29.180291 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:29 crc kubenswrapper[4558]: I0120 16:43:29.180300 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:29Z","lastTransitionTime":"2026-01-20T16:43:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:29 crc kubenswrapper[4558]: I0120 16:43:29.282215 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:29 crc kubenswrapper[4558]: I0120 16:43:29.282251 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:29 crc kubenswrapper[4558]: I0120 16:43:29.282259 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:29 crc kubenswrapper[4558]: I0120 16:43:29.282271 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:29 crc kubenswrapper[4558]: I0120 16:43:29.282278 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:29Z","lastTransitionTime":"2026-01-20T16:43:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:29 crc kubenswrapper[4558]: I0120 16:43:29.384767 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:29 crc kubenswrapper[4558]: I0120 16:43:29.384799 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:29 crc kubenswrapper[4558]: I0120 16:43:29.384808 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:29 crc kubenswrapper[4558]: I0120 16:43:29.384820 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:29 crc kubenswrapper[4558]: I0120 16:43:29.384829 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:29Z","lastTransitionTime":"2026-01-20T16:43:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:29 crc kubenswrapper[4558]: I0120 16:43:29.487130 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:29 crc kubenswrapper[4558]: I0120 16:43:29.487187 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:29 crc kubenswrapper[4558]: I0120 16:43:29.487196 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:29 crc kubenswrapper[4558]: I0120 16:43:29.487207 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:29 crc kubenswrapper[4558]: I0120 16:43:29.487214 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:29Z","lastTransitionTime":"2026-01-20T16:43:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:29 crc kubenswrapper[4558]: I0120 16:43:29.538663 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:29 crc kubenswrapper[4558]: I0120 16:43:29.538726 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:29 crc kubenswrapper[4558]: I0120 16:43:29.538735 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:29 crc kubenswrapper[4558]: I0120 16:43:29.538750 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:29 crc kubenswrapper[4558]: I0120 16:43:29.538759 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:29Z","lastTransitionTime":"2026-01-20T16:43:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:29 crc kubenswrapper[4558]: E0120 16:43:29.552067 4558 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:43:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:43:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:43:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:43:29Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:43:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:43:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:43:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:43:29Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"bf027402-7a62-478b-8585-220c98495823\\\",\\\"systemUUID\\\":\\\"1def282c-e24e-4bc0-9a1b-d7cc30756d3d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:43:29Z is after 2025-08-24T17:21:41Z" Jan 20 16:43:29 crc kubenswrapper[4558]: I0120 16:43:29.555327 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:29 crc kubenswrapper[4558]: I0120 16:43:29.555361 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:29 crc kubenswrapper[4558]: I0120 16:43:29.555369 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:29 crc kubenswrapper[4558]: I0120 16:43:29.555384 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:29 crc kubenswrapper[4558]: I0120 16:43:29.555394 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:29Z","lastTransitionTime":"2026-01-20T16:43:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:29 crc kubenswrapper[4558]: I0120 16:43:29.565404 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9wrq6" Jan 20 16:43:29 crc kubenswrapper[4558]: E0120 16:43:29.565497 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9wrq6" podUID="30032328-bd33-4073-9366-e10bc5e2aa77" Jan 20 16:43:29 crc kubenswrapper[4558]: I0120 16:43:29.566141 4558 scope.go:117] "RemoveContainer" containerID="0e9f4ed928d20c9db64a689d7d14df9b81870bb0523cf5098e09d2d85aad9fca" Jan 20 16:43:29 crc kubenswrapper[4558]: E0120 16:43:29.566317 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-nv2xw_openshift-ovn-kubernetes(254129cd-82fc-4162-b671-2434bc9e2972)\"" pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" podUID="254129cd-82fc-4162-b671-2434bc9e2972" Jan 20 16:43:29 crc kubenswrapper[4558]: E0120 16:43:29.567654 4558 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:43:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:43:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:43:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:43:29Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:43:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:43:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:43:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:43:29Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"bf027402-7a62-478b-8585-220c98495823\\\",\\\"systemUUID\\\":\\\"1def282c-e24e-4bc0-9a1b-d7cc30756d3d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:43:29Z is after 2025-08-24T17:21:41Z" Jan 20 16:43:29 crc kubenswrapper[4558]: I0120 16:43:29.571670 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:29 crc kubenswrapper[4558]: I0120 16:43:29.571730 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:29 crc kubenswrapper[4558]: I0120 16:43:29.571744 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:29 crc kubenswrapper[4558]: I0120 16:43:29.571780 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:29 crc kubenswrapper[4558]: I0120 16:43:29.571798 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:29Z","lastTransitionTime":"2026-01-20T16:43:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:29 crc kubenswrapper[4558]: E0120 16:43:29.582017 4558 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:43:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:43:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:43:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:43:29Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:43:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:43:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:43:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:43:29Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"bf027402-7a62-478b-8585-220c98495823\\\",\\\"systemUUID\\\":\\\"1def282c-e24e-4bc0-9a1b-d7cc30756d3d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:43:29Z is after 2025-08-24T17:21:41Z" Jan 20 16:43:29 crc kubenswrapper[4558]: I0120 16:43:29.585035 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:29 crc kubenswrapper[4558]: I0120 16:43:29.585069 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:29 crc kubenswrapper[4558]: I0120 16:43:29.585079 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:29 crc kubenswrapper[4558]: I0120 16:43:29.585093 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:29 crc kubenswrapper[4558]: I0120 16:43:29.585104 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:29Z","lastTransitionTime":"2026-01-20T16:43:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:29 crc kubenswrapper[4558]: I0120 16:43:29.591874 4558 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-13 23:24:09.398623215 +0000 UTC Jan 20 16:43:29 crc kubenswrapper[4558]: E0120 16:43:29.593824 4558 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:43:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:43:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:43:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:43:29Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:43:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:43:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:43:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:43:29Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"bf027402-7a62-478b-8585-220c98495823\\\",\\\"systemUUID\\\":\\\"1def282c-e24e-4bc0-9a1b-d7cc30756d3d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:43:29Z is after 2025-08-24T17:21:41Z" Jan 20 16:43:29 crc kubenswrapper[4558]: I0120 16:43:29.596173 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:29 crc kubenswrapper[4558]: I0120 16:43:29.596202 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:29 crc kubenswrapper[4558]: I0120 16:43:29.596212 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:29 crc kubenswrapper[4558]: I0120 16:43:29.596223 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:29 crc kubenswrapper[4558]: I0120 16:43:29.596234 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:29Z","lastTransitionTime":"2026-01-20T16:43:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:29 crc kubenswrapper[4558]: E0120 16:43:29.605394 4558 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:43:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:43:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:43:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:43:29Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:43:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:43:29Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-20T16:43:29Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-20T16:43:29Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"bf027402-7a62-478b-8585-220c98495823\\\",\\\"systemUUID\\\":\\\"1def282c-e24e-4bc0-9a1b-d7cc30756d3d\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-20T16:43:29Z is after 2025-08-24T17:21:41Z" Jan 20 16:43:29 crc kubenswrapper[4558]: E0120 16:43:29.605560 4558 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 20 16:43:29 crc kubenswrapper[4558]: I0120 16:43:29.606916 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:29 crc kubenswrapper[4558]: I0120 16:43:29.606964 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:29 crc kubenswrapper[4558]: I0120 16:43:29.606974 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:29 crc kubenswrapper[4558]: I0120 16:43:29.606991 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:29 crc kubenswrapper[4558]: I0120 16:43:29.607004 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:29Z","lastTransitionTime":"2026-01-20T16:43:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:29 crc kubenswrapper[4558]: I0120 16:43:29.709157 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:29 crc kubenswrapper[4558]: I0120 16:43:29.709220 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:29 crc kubenswrapper[4558]: I0120 16:43:29.709228 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:29 crc kubenswrapper[4558]: I0120 16:43:29.709239 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:29 crc kubenswrapper[4558]: I0120 16:43:29.709249 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:29Z","lastTransitionTime":"2026-01-20T16:43:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:29 crc kubenswrapper[4558]: I0120 16:43:29.811751 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:29 crc kubenswrapper[4558]: I0120 16:43:29.811803 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:29 crc kubenswrapper[4558]: I0120 16:43:29.811814 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:29 crc kubenswrapper[4558]: I0120 16:43:29.811827 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:29 crc kubenswrapper[4558]: I0120 16:43:29.811834 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:29Z","lastTransitionTime":"2026-01-20T16:43:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:29 crc kubenswrapper[4558]: I0120 16:43:29.914374 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:29 crc kubenswrapper[4558]: I0120 16:43:29.914409 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:29 crc kubenswrapper[4558]: I0120 16:43:29.914416 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:29 crc kubenswrapper[4558]: I0120 16:43:29.914429 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:29 crc kubenswrapper[4558]: I0120 16:43:29.914440 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:29Z","lastTransitionTime":"2026-01-20T16:43:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:30 crc kubenswrapper[4558]: I0120 16:43:30.017099 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:30 crc kubenswrapper[4558]: I0120 16:43:30.017136 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:30 crc kubenswrapper[4558]: I0120 16:43:30.017144 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:30 crc kubenswrapper[4558]: I0120 16:43:30.017156 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:30 crc kubenswrapper[4558]: I0120 16:43:30.017181 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:30Z","lastTransitionTime":"2026-01-20T16:43:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:30 crc kubenswrapper[4558]: I0120 16:43:30.119752 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:30 crc kubenswrapper[4558]: I0120 16:43:30.119799 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:30 crc kubenswrapper[4558]: I0120 16:43:30.119807 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:30 crc kubenswrapper[4558]: I0120 16:43:30.119821 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:30 crc kubenswrapper[4558]: I0120 16:43:30.119831 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:30Z","lastTransitionTime":"2026-01-20T16:43:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:30 crc kubenswrapper[4558]: I0120 16:43:30.222467 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:30 crc kubenswrapper[4558]: I0120 16:43:30.222507 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:30 crc kubenswrapper[4558]: I0120 16:43:30.222514 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:30 crc kubenswrapper[4558]: I0120 16:43:30.222527 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:30 crc kubenswrapper[4558]: I0120 16:43:30.222536 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:30Z","lastTransitionTime":"2026-01-20T16:43:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:30 crc kubenswrapper[4558]: I0120 16:43:30.325336 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:30 crc kubenswrapper[4558]: I0120 16:43:30.325373 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:30 crc kubenswrapper[4558]: I0120 16:43:30.325383 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:30 crc kubenswrapper[4558]: I0120 16:43:30.325398 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:30 crc kubenswrapper[4558]: I0120 16:43:30.325406 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:30Z","lastTransitionTime":"2026-01-20T16:43:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:30 crc kubenswrapper[4558]: I0120 16:43:30.428306 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:30 crc kubenswrapper[4558]: I0120 16:43:30.428352 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:30 crc kubenswrapper[4558]: I0120 16:43:30.428362 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:30 crc kubenswrapper[4558]: I0120 16:43:30.428376 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:30 crc kubenswrapper[4558]: I0120 16:43:30.428385 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:30Z","lastTransitionTime":"2026-01-20T16:43:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:30 crc kubenswrapper[4558]: I0120 16:43:30.531012 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:30 crc kubenswrapper[4558]: I0120 16:43:30.531073 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:30 crc kubenswrapper[4558]: I0120 16:43:30.531083 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:30 crc kubenswrapper[4558]: I0120 16:43:30.531099 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:30 crc kubenswrapper[4558]: I0120 16:43:30.531109 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:30Z","lastTransitionTime":"2026-01-20T16:43:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:30 crc kubenswrapper[4558]: I0120 16:43:30.565099 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 20 16:43:30 crc kubenswrapper[4558]: I0120 16:43:30.565136 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 20 16:43:30 crc kubenswrapper[4558]: E0120 16:43:30.565274 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 20 16:43:30 crc kubenswrapper[4558]: E0120 16:43:30.565431 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 20 16:43:30 crc kubenswrapper[4558]: I0120 16:43:30.565429 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 20 16:43:30 crc kubenswrapper[4558]: E0120 16:43:30.565514 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 20 16:43:30 crc kubenswrapper[4558]: I0120 16:43:30.592837 4558 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-10 02:11:01.121396413 +0000 UTC Jan 20 16:43:30 crc kubenswrapper[4558]: I0120 16:43:30.633814 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:30 crc kubenswrapper[4558]: I0120 16:43:30.633861 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:30 crc kubenswrapper[4558]: I0120 16:43:30.633871 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:30 crc kubenswrapper[4558]: I0120 16:43:30.633885 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:30 crc kubenswrapper[4558]: I0120 16:43:30.633894 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:30Z","lastTransitionTime":"2026-01-20T16:43:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:30 crc kubenswrapper[4558]: I0120 16:43:30.736258 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:30 crc kubenswrapper[4558]: I0120 16:43:30.736296 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:30 crc kubenswrapper[4558]: I0120 16:43:30.736306 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:30 crc kubenswrapper[4558]: I0120 16:43:30.736320 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:30 crc kubenswrapper[4558]: I0120 16:43:30.736329 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:30Z","lastTransitionTime":"2026-01-20T16:43:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:30 crc kubenswrapper[4558]: I0120 16:43:30.838928 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:30 crc kubenswrapper[4558]: I0120 16:43:30.838965 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:30 crc kubenswrapper[4558]: I0120 16:43:30.838974 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:30 crc kubenswrapper[4558]: I0120 16:43:30.838987 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:30 crc kubenswrapper[4558]: I0120 16:43:30.838996 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:30Z","lastTransitionTime":"2026-01-20T16:43:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:30 crc kubenswrapper[4558]: I0120 16:43:30.941397 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:30 crc kubenswrapper[4558]: I0120 16:43:30.941431 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:30 crc kubenswrapper[4558]: I0120 16:43:30.941458 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:30 crc kubenswrapper[4558]: I0120 16:43:30.941471 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:30 crc kubenswrapper[4558]: I0120 16:43:30.941481 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:30Z","lastTransitionTime":"2026-01-20T16:43:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:31 crc kubenswrapper[4558]: I0120 16:43:31.043708 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:31 crc kubenswrapper[4558]: I0120 16:43:31.043754 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:31 crc kubenswrapper[4558]: I0120 16:43:31.043763 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:31 crc kubenswrapper[4558]: I0120 16:43:31.043775 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:31 crc kubenswrapper[4558]: I0120 16:43:31.043784 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:31Z","lastTransitionTime":"2026-01-20T16:43:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:31 crc kubenswrapper[4558]: I0120 16:43:31.145670 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:31 crc kubenswrapper[4558]: I0120 16:43:31.145705 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:31 crc kubenswrapper[4558]: I0120 16:43:31.145714 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:31 crc kubenswrapper[4558]: I0120 16:43:31.145726 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:31 crc kubenswrapper[4558]: I0120 16:43:31.145736 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:31Z","lastTransitionTime":"2026-01-20T16:43:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:31 crc kubenswrapper[4558]: I0120 16:43:31.247682 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:31 crc kubenswrapper[4558]: I0120 16:43:31.247736 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:31 crc kubenswrapper[4558]: I0120 16:43:31.247745 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:31 crc kubenswrapper[4558]: I0120 16:43:31.247759 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:31 crc kubenswrapper[4558]: I0120 16:43:31.247792 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:31Z","lastTransitionTime":"2026-01-20T16:43:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:31 crc kubenswrapper[4558]: I0120 16:43:31.349638 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:31 crc kubenswrapper[4558]: I0120 16:43:31.349671 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:31 crc kubenswrapper[4558]: I0120 16:43:31.349696 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:31 crc kubenswrapper[4558]: I0120 16:43:31.349709 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:31 crc kubenswrapper[4558]: I0120 16:43:31.349718 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:31Z","lastTransitionTime":"2026-01-20T16:43:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:31 crc kubenswrapper[4558]: I0120 16:43:31.452072 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:31 crc kubenswrapper[4558]: I0120 16:43:31.452103 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:31 crc kubenswrapper[4558]: I0120 16:43:31.452110 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:31 crc kubenswrapper[4558]: I0120 16:43:31.452123 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:31 crc kubenswrapper[4558]: I0120 16:43:31.452130 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:31Z","lastTransitionTime":"2026-01-20T16:43:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:31 crc kubenswrapper[4558]: I0120 16:43:31.554623 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:31 crc kubenswrapper[4558]: I0120 16:43:31.554668 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:31 crc kubenswrapper[4558]: I0120 16:43:31.554676 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:31 crc kubenswrapper[4558]: I0120 16:43:31.554689 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:31 crc kubenswrapper[4558]: I0120 16:43:31.554698 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:31Z","lastTransitionTime":"2026-01-20T16:43:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:31 crc kubenswrapper[4558]: I0120 16:43:31.565042 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9wrq6" Jan 20 16:43:31 crc kubenswrapper[4558]: E0120 16:43:31.565135 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9wrq6" podUID="30032328-bd33-4073-9366-e10bc5e2aa77" Jan 20 16:43:31 crc kubenswrapper[4558]: I0120 16:43:31.593299 4558 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-24 01:31:34.790334618 +0000 UTC Jan 20 16:43:31 crc kubenswrapper[4558]: I0120 16:43:31.656784 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:31 crc kubenswrapper[4558]: I0120 16:43:31.656840 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:31 crc kubenswrapper[4558]: I0120 16:43:31.656849 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:31 crc kubenswrapper[4558]: I0120 16:43:31.656862 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:31 crc kubenswrapper[4558]: I0120 16:43:31.656872 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:31Z","lastTransitionTime":"2026-01-20T16:43:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:31 crc kubenswrapper[4558]: I0120 16:43:31.758915 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:31 crc kubenswrapper[4558]: I0120 16:43:31.758966 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:31 crc kubenswrapper[4558]: I0120 16:43:31.758976 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:31 crc kubenswrapper[4558]: I0120 16:43:31.758991 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:31 crc kubenswrapper[4558]: I0120 16:43:31.759000 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:31Z","lastTransitionTime":"2026-01-20T16:43:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:31 crc kubenswrapper[4558]: I0120 16:43:31.861810 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:31 crc kubenswrapper[4558]: I0120 16:43:31.861860 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:31 crc kubenswrapper[4558]: I0120 16:43:31.861870 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:31 crc kubenswrapper[4558]: I0120 16:43:31.861887 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:31 crc kubenswrapper[4558]: I0120 16:43:31.861897 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:31Z","lastTransitionTime":"2026-01-20T16:43:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:31 crc kubenswrapper[4558]: I0120 16:43:31.964202 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:31 crc kubenswrapper[4558]: I0120 16:43:31.964247 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:31 crc kubenswrapper[4558]: I0120 16:43:31.964256 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:31 crc kubenswrapper[4558]: I0120 16:43:31.964270 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:31 crc kubenswrapper[4558]: I0120 16:43:31.964279 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:31Z","lastTransitionTime":"2026-01-20T16:43:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:32 crc kubenswrapper[4558]: I0120 16:43:32.066537 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:32 crc kubenswrapper[4558]: I0120 16:43:32.066567 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:32 crc kubenswrapper[4558]: I0120 16:43:32.066576 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:32 crc kubenswrapper[4558]: I0120 16:43:32.066618 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:32 crc kubenswrapper[4558]: I0120 16:43:32.066628 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:32Z","lastTransitionTime":"2026-01-20T16:43:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:32 crc kubenswrapper[4558]: I0120 16:43:32.169257 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:32 crc kubenswrapper[4558]: I0120 16:43:32.169317 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:32 crc kubenswrapper[4558]: I0120 16:43:32.169327 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:32 crc kubenswrapper[4558]: I0120 16:43:32.169339 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:32 crc kubenswrapper[4558]: I0120 16:43:32.169346 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:32Z","lastTransitionTime":"2026-01-20T16:43:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:32 crc kubenswrapper[4558]: I0120 16:43:32.271211 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:32 crc kubenswrapper[4558]: I0120 16:43:32.271286 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:32 crc kubenswrapper[4558]: I0120 16:43:32.271298 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:32 crc kubenswrapper[4558]: I0120 16:43:32.271320 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:32 crc kubenswrapper[4558]: I0120 16:43:32.271334 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:32Z","lastTransitionTime":"2026-01-20T16:43:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:32 crc kubenswrapper[4558]: I0120 16:43:32.374441 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:32 crc kubenswrapper[4558]: I0120 16:43:32.374483 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:32 crc kubenswrapper[4558]: I0120 16:43:32.374492 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:32 crc kubenswrapper[4558]: I0120 16:43:32.374506 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:32 crc kubenswrapper[4558]: I0120 16:43:32.374515 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:32Z","lastTransitionTime":"2026-01-20T16:43:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:32 crc kubenswrapper[4558]: I0120 16:43:32.477696 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:32 crc kubenswrapper[4558]: I0120 16:43:32.477775 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:32 crc kubenswrapper[4558]: I0120 16:43:32.477787 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:32 crc kubenswrapper[4558]: I0120 16:43:32.477812 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:32 crc kubenswrapper[4558]: I0120 16:43:32.477825 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:32Z","lastTransitionTime":"2026-01-20T16:43:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:32 crc kubenswrapper[4558]: I0120 16:43:32.565614 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 20 16:43:32 crc kubenswrapper[4558]: I0120 16:43:32.565724 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 20 16:43:32 crc kubenswrapper[4558]: I0120 16:43:32.565900 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 20 16:43:32 crc kubenswrapper[4558]: E0120 16:43:32.565987 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 20 16:43:32 crc kubenswrapper[4558]: E0120 16:43:32.566138 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 20 16:43:32 crc kubenswrapper[4558]: E0120 16:43:32.566191 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 20 16:43:32 crc kubenswrapper[4558]: I0120 16:43:32.579833 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:32 crc kubenswrapper[4558]: I0120 16:43:32.581020 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:32 crc kubenswrapper[4558]: I0120 16:43:32.581031 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:32 crc kubenswrapper[4558]: I0120 16:43:32.581044 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:32 crc kubenswrapper[4558]: I0120 16:43:32.581052 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:32Z","lastTransitionTime":"2026-01-20T16:43:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:32 crc kubenswrapper[4558]: I0120 16:43:32.594185 4558 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-12 08:30:12.778585509 +0000 UTC Jan 20 16:43:32 crc kubenswrapper[4558]: I0120 16:43:32.683825 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:32 crc kubenswrapper[4558]: I0120 16:43:32.683866 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:32 crc kubenswrapper[4558]: I0120 16:43:32.683875 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:32 crc kubenswrapper[4558]: I0120 16:43:32.683889 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:32 crc kubenswrapper[4558]: I0120 16:43:32.683897 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:32Z","lastTransitionTime":"2026-01-20T16:43:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:32 crc kubenswrapper[4558]: I0120 16:43:32.786564 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:32 crc kubenswrapper[4558]: I0120 16:43:32.786651 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:32 crc kubenswrapper[4558]: I0120 16:43:32.786665 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:32 crc kubenswrapper[4558]: I0120 16:43:32.786689 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:32 crc kubenswrapper[4558]: I0120 16:43:32.786702 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:32Z","lastTransitionTime":"2026-01-20T16:43:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:32 crc kubenswrapper[4558]: I0120 16:43:32.889641 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:32 crc kubenswrapper[4558]: I0120 16:43:32.889691 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:32 crc kubenswrapper[4558]: I0120 16:43:32.889702 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:32 crc kubenswrapper[4558]: I0120 16:43:32.889718 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:32 crc kubenswrapper[4558]: I0120 16:43:32.889731 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:32Z","lastTransitionTime":"2026-01-20T16:43:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:32 crc kubenswrapper[4558]: I0120 16:43:32.992264 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:32 crc kubenswrapper[4558]: I0120 16:43:32.992311 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:32 crc kubenswrapper[4558]: I0120 16:43:32.992323 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:32 crc kubenswrapper[4558]: I0120 16:43:32.992346 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:32 crc kubenswrapper[4558]: I0120 16:43:32.992357 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:32Z","lastTransitionTime":"2026-01-20T16:43:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:33 crc kubenswrapper[4558]: I0120 16:43:33.094956 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:33 crc kubenswrapper[4558]: I0120 16:43:33.094996 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:33 crc kubenswrapper[4558]: I0120 16:43:33.095004 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:33 crc kubenswrapper[4558]: I0120 16:43:33.095017 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:33 crc kubenswrapper[4558]: I0120 16:43:33.095026 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:33Z","lastTransitionTime":"2026-01-20T16:43:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:33 crc kubenswrapper[4558]: I0120 16:43:33.196695 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:33 crc kubenswrapper[4558]: I0120 16:43:33.196728 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:33 crc kubenswrapper[4558]: I0120 16:43:33.196736 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:33 crc kubenswrapper[4558]: I0120 16:43:33.196748 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:33 crc kubenswrapper[4558]: I0120 16:43:33.196756 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:33Z","lastTransitionTime":"2026-01-20T16:43:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:33 crc kubenswrapper[4558]: I0120 16:43:33.298636 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:33 crc kubenswrapper[4558]: I0120 16:43:33.298677 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:33 crc kubenswrapper[4558]: I0120 16:43:33.298688 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:33 crc kubenswrapper[4558]: I0120 16:43:33.298704 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:33 crc kubenswrapper[4558]: I0120 16:43:33.298712 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:33Z","lastTransitionTime":"2026-01-20T16:43:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:33 crc kubenswrapper[4558]: I0120 16:43:33.400671 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:33 crc kubenswrapper[4558]: I0120 16:43:33.400710 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:33 crc kubenswrapper[4558]: I0120 16:43:33.400719 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:33 crc kubenswrapper[4558]: I0120 16:43:33.400730 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:33 crc kubenswrapper[4558]: I0120 16:43:33.400739 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:33Z","lastTransitionTime":"2026-01-20T16:43:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:33 crc kubenswrapper[4558]: I0120 16:43:33.502781 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:33 crc kubenswrapper[4558]: I0120 16:43:33.502812 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:33 crc kubenswrapper[4558]: I0120 16:43:33.502821 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:33 crc kubenswrapper[4558]: I0120 16:43:33.502832 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:33 crc kubenswrapper[4558]: I0120 16:43:33.502839 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:33Z","lastTransitionTime":"2026-01-20T16:43:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:33 crc kubenswrapper[4558]: I0120 16:43:33.565201 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9wrq6" Jan 20 16:43:33 crc kubenswrapper[4558]: E0120 16:43:33.565288 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9wrq6" podUID="30032328-bd33-4073-9366-e10bc5e2aa77" Jan 20 16:43:33 crc kubenswrapper[4558]: I0120 16:43:33.594847 4558 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-06 01:31:29.439060653 +0000 UTC Jan 20 16:43:33 crc kubenswrapper[4558]: I0120 16:43:33.604124 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:33 crc kubenswrapper[4558]: I0120 16:43:33.604178 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:33 crc kubenswrapper[4558]: I0120 16:43:33.604188 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:33 crc kubenswrapper[4558]: I0120 16:43:33.604197 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:33 crc kubenswrapper[4558]: I0120 16:43:33.604205 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:33Z","lastTransitionTime":"2026-01-20T16:43:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:33 crc kubenswrapper[4558]: I0120 16:43:33.705905 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:33 crc kubenswrapper[4558]: I0120 16:43:33.705935 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:33 crc kubenswrapper[4558]: I0120 16:43:33.705946 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:33 crc kubenswrapper[4558]: I0120 16:43:33.705957 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:33 crc kubenswrapper[4558]: I0120 16:43:33.705967 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:33Z","lastTransitionTime":"2026-01-20T16:43:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:33 crc kubenswrapper[4558]: I0120 16:43:33.807500 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:33 crc kubenswrapper[4558]: I0120 16:43:33.807521 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:33 crc kubenswrapper[4558]: I0120 16:43:33.807529 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:33 crc kubenswrapper[4558]: I0120 16:43:33.807539 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:33 crc kubenswrapper[4558]: I0120 16:43:33.807547 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:33Z","lastTransitionTime":"2026-01-20T16:43:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:33 crc kubenswrapper[4558]: I0120 16:43:33.909724 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:33 crc kubenswrapper[4558]: I0120 16:43:33.909754 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:33 crc kubenswrapper[4558]: I0120 16:43:33.909763 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:33 crc kubenswrapper[4558]: I0120 16:43:33.909774 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:33 crc kubenswrapper[4558]: I0120 16:43:33.909782 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:33Z","lastTransitionTime":"2026-01-20T16:43:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:34 crc kubenswrapper[4558]: I0120 16:43:34.012155 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:34 crc kubenswrapper[4558]: I0120 16:43:34.012209 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:34 crc kubenswrapper[4558]: I0120 16:43:34.012219 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:34 crc kubenswrapper[4558]: I0120 16:43:34.012231 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:34 crc kubenswrapper[4558]: I0120 16:43:34.012240 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:34Z","lastTransitionTime":"2026-01-20T16:43:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:34 crc kubenswrapper[4558]: I0120 16:43:34.115768 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:34 crc kubenswrapper[4558]: I0120 16:43:34.117735 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:34 crc kubenswrapper[4558]: I0120 16:43:34.117767 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:34 crc kubenswrapper[4558]: I0120 16:43:34.117783 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:34 crc kubenswrapper[4558]: I0120 16:43:34.117793 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:34Z","lastTransitionTime":"2026-01-20T16:43:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:34 crc kubenswrapper[4558]: I0120 16:43:34.220452 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:34 crc kubenswrapper[4558]: I0120 16:43:34.220494 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:34 crc kubenswrapper[4558]: I0120 16:43:34.220506 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:34 crc kubenswrapper[4558]: I0120 16:43:34.220519 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:34 crc kubenswrapper[4558]: I0120 16:43:34.220530 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:34Z","lastTransitionTime":"2026-01-20T16:43:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:34 crc kubenswrapper[4558]: I0120 16:43:34.322386 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:34 crc kubenswrapper[4558]: I0120 16:43:34.322441 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:34 crc kubenswrapper[4558]: I0120 16:43:34.322450 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:34 crc kubenswrapper[4558]: I0120 16:43:34.322463 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:34 crc kubenswrapper[4558]: I0120 16:43:34.322470 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:34Z","lastTransitionTime":"2026-01-20T16:43:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:34 crc kubenswrapper[4558]: I0120 16:43:34.424409 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:34 crc kubenswrapper[4558]: I0120 16:43:34.424457 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:34 crc kubenswrapper[4558]: I0120 16:43:34.424468 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:34 crc kubenswrapper[4558]: I0120 16:43:34.424481 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:34 crc kubenswrapper[4558]: I0120 16:43:34.424489 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:34Z","lastTransitionTime":"2026-01-20T16:43:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:34 crc kubenswrapper[4558]: I0120 16:43:34.526129 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:34 crc kubenswrapper[4558]: I0120 16:43:34.526186 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:34 crc kubenswrapper[4558]: I0120 16:43:34.526196 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:34 crc kubenswrapper[4558]: I0120 16:43:34.526210 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:34 crc kubenswrapper[4558]: I0120 16:43:34.526219 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:34Z","lastTransitionTime":"2026-01-20T16:43:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:34 crc kubenswrapper[4558]: I0120 16:43:34.565564 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 20 16:43:34 crc kubenswrapper[4558]: I0120 16:43:34.565619 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 20 16:43:34 crc kubenswrapper[4558]: E0120 16:43:34.565717 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 20 16:43:34 crc kubenswrapper[4558]: I0120 16:43:34.565793 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 20 16:43:34 crc kubenswrapper[4558]: E0120 16:43:34.565901 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 20 16:43:34 crc kubenswrapper[4558]: E0120 16:43:34.565996 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 20 16:43:34 crc kubenswrapper[4558]: I0120 16:43:34.595782 4558 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-29 09:42:09.431936485 +0000 UTC Jan 20 16:43:34 crc kubenswrapper[4558]: I0120 16:43:34.628368 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:34 crc kubenswrapper[4558]: I0120 16:43:34.628414 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:34 crc kubenswrapper[4558]: I0120 16:43:34.628422 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:34 crc kubenswrapper[4558]: I0120 16:43:34.628435 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:34 crc kubenswrapper[4558]: I0120 16:43:34.628444 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:34Z","lastTransitionTime":"2026-01-20T16:43:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:34 crc kubenswrapper[4558]: I0120 16:43:34.730411 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:34 crc kubenswrapper[4558]: I0120 16:43:34.730445 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:34 crc kubenswrapper[4558]: I0120 16:43:34.730454 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:34 crc kubenswrapper[4558]: I0120 16:43:34.730466 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:34 crc kubenswrapper[4558]: I0120 16:43:34.730475 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:34Z","lastTransitionTime":"2026-01-20T16:43:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:34 crc kubenswrapper[4558]: I0120 16:43:34.832286 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:34 crc kubenswrapper[4558]: I0120 16:43:34.832338 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:34 crc kubenswrapper[4558]: I0120 16:43:34.832347 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:34 crc kubenswrapper[4558]: I0120 16:43:34.832363 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:34 crc kubenswrapper[4558]: I0120 16:43:34.832373 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:34Z","lastTransitionTime":"2026-01-20T16:43:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:34 crc kubenswrapper[4558]: I0120 16:43:34.934584 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:34 crc kubenswrapper[4558]: I0120 16:43:34.934632 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:34 crc kubenswrapper[4558]: I0120 16:43:34.934640 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:34 crc kubenswrapper[4558]: I0120 16:43:34.934652 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:34 crc kubenswrapper[4558]: I0120 16:43:34.934661 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:34Z","lastTransitionTime":"2026-01-20T16:43:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:35 crc kubenswrapper[4558]: I0120 16:43:35.036965 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:35 crc kubenswrapper[4558]: I0120 16:43:35.037021 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:35 crc kubenswrapper[4558]: I0120 16:43:35.037033 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:35 crc kubenswrapper[4558]: I0120 16:43:35.037047 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:35 crc kubenswrapper[4558]: I0120 16:43:35.037055 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:35Z","lastTransitionTime":"2026-01-20T16:43:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:35 crc kubenswrapper[4558]: I0120 16:43:35.138852 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:35 crc kubenswrapper[4558]: I0120 16:43:35.138884 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:35 crc kubenswrapper[4558]: I0120 16:43:35.138893 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:35 crc kubenswrapper[4558]: I0120 16:43:35.138907 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:35 crc kubenswrapper[4558]: I0120 16:43:35.138915 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:35Z","lastTransitionTime":"2026-01-20T16:43:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:35 crc kubenswrapper[4558]: I0120 16:43:35.241223 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:35 crc kubenswrapper[4558]: I0120 16:43:35.241263 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:35 crc kubenswrapper[4558]: I0120 16:43:35.241271 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:35 crc kubenswrapper[4558]: I0120 16:43:35.241284 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:35 crc kubenswrapper[4558]: I0120 16:43:35.241292 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:35Z","lastTransitionTime":"2026-01-20T16:43:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:35 crc kubenswrapper[4558]: I0120 16:43:35.342716 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:35 crc kubenswrapper[4558]: I0120 16:43:35.342758 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:35 crc kubenswrapper[4558]: I0120 16:43:35.342768 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:35 crc kubenswrapper[4558]: I0120 16:43:35.342781 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:35 crc kubenswrapper[4558]: I0120 16:43:35.342789 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:35Z","lastTransitionTime":"2026-01-20T16:43:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:35 crc kubenswrapper[4558]: I0120 16:43:35.444854 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:35 crc kubenswrapper[4558]: I0120 16:43:35.444887 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:35 crc kubenswrapper[4558]: I0120 16:43:35.444895 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:35 crc kubenswrapper[4558]: I0120 16:43:35.444908 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:35 crc kubenswrapper[4558]: I0120 16:43:35.444919 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:35Z","lastTransitionTime":"2026-01-20T16:43:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:35 crc kubenswrapper[4558]: I0120 16:43:35.546862 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:35 crc kubenswrapper[4558]: I0120 16:43:35.546903 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:35 crc kubenswrapper[4558]: I0120 16:43:35.546913 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:35 crc kubenswrapper[4558]: I0120 16:43:35.546930 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:35 crc kubenswrapper[4558]: I0120 16:43:35.546939 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:35Z","lastTransitionTime":"2026-01-20T16:43:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:35 crc kubenswrapper[4558]: I0120 16:43:35.565118 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9wrq6" Jan 20 16:43:35 crc kubenswrapper[4558]: E0120 16:43:35.565259 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9wrq6" podUID="30032328-bd33-4073-9366-e10bc5e2aa77" Jan 20 16:43:35 crc kubenswrapper[4558]: I0120 16:43:35.596641 4558 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-11 12:55:05.920866096 +0000 UTC Jan 20 16:43:35 crc kubenswrapper[4558]: I0120 16:43:35.649357 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:35 crc kubenswrapper[4558]: I0120 16:43:35.649398 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:35 crc kubenswrapper[4558]: I0120 16:43:35.649406 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:35 crc kubenswrapper[4558]: I0120 16:43:35.649419 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:35 crc kubenswrapper[4558]: I0120 16:43:35.649431 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:35Z","lastTransitionTime":"2026-01-20T16:43:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:35 crc kubenswrapper[4558]: I0120 16:43:35.751506 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:35 crc kubenswrapper[4558]: I0120 16:43:35.751540 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:35 crc kubenswrapper[4558]: I0120 16:43:35.751549 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:35 crc kubenswrapper[4558]: I0120 16:43:35.751562 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:35 crc kubenswrapper[4558]: I0120 16:43:35.751571 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:35Z","lastTransitionTime":"2026-01-20T16:43:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:35 crc kubenswrapper[4558]: I0120 16:43:35.853730 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:35 crc kubenswrapper[4558]: I0120 16:43:35.853762 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:35 crc kubenswrapper[4558]: I0120 16:43:35.853770 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:35 crc kubenswrapper[4558]: I0120 16:43:35.853781 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:35 crc kubenswrapper[4558]: I0120 16:43:35.853788 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:35Z","lastTransitionTime":"2026-01-20T16:43:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:35 crc kubenswrapper[4558]: I0120 16:43:35.955204 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:35 crc kubenswrapper[4558]: I0120 16:43:35.955244 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:35 crc kubenswrapper[4558]: I0120 16:43:35.955253 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:35 crc kubenswrapper[4558]: I0120 16:43:35.955266 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:35 crc kubenswrapper[4558]: I0120 16:43:35.955275 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:35Z","lastTransitionTime":"2026-01-20T16:43:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:36 crc kubenswrapper[4558]: I0120 16:43:36.057357 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:36 crc kubenswrapper[4558]: I0120 16:43:36.057399 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:36 crc kubenswrapper[4558]: I0120 16:43:36.057407 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:36 crc kubenswrapper[4558]: I0120 16:43:36.057418 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:36 crc kubenswrapper[4558]: I0120 16:43:36.057426 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:36Z","lastTransitionTime":"2026-01-20T16:43:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:36 crc kubenswrapper[4558]: I0120 16:43:36.159438 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:36 crc kubenswrapper[4558]: I0120 16:43:36.159474 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:36 crc kubenswrapper[4558]: I0120 16:43:36.159483 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:36 crc kubenswrapper[4558]: I0120 16:43:36.159494 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:36 crc kubenswrapper[4558]: I0120 16:43:36.159501 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:36Z","lastTransitionTime":"2026-01-20T16:43:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:36 crc kubenswrapper[4558]: I0120 16:43:36.261303 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:36 crc kubenswrapper[4558]: I0120 16:43:36.261330 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:36 crc kubenswrapper[4558]: I0120 16:43:36.261337 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:36 crc kubenswrapper[4558]: I0120 16:43:36.261349 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:36 crc kubenswrapper[4558]: I0120 16:43:36.261357 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:36Z","lastTransitionTime":"2026-01-20T16:43:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:36 crc kubenswrapper[4558]: I0120 16:43:36.363701 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:36 crc kubenswrapper[4558]: I0120 16:43:36.363736 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:36 crc kubenswrapper[4558]: I0120 16:43:36.363745 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:36 crc kubenswrapper[4558]: I0120 16:43:36.363758 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:36 crc kubenswrapper[4558]: I0120 16:43:36.363767 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:36Z","lastTransitionTime":"2026-01-20T16:43:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:36 crc kubenswrapper[4558]: I0120 16:43:36.465095 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:36 crc kubenswrapper[4558]: I0120 16:43:36.465126 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:36 crc kubenswrapper[4558]: I0120 16:43:36.465133 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:36 crc kubenswrapper[4558]: I0120 16:43:36.465145 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:36 crc kubenswrapper[4558]: I0120 16:43:36.465156 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:36Z","lastTransitionTime":"2026-01-20T16:43:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:36 crc kubenswrapper[4558]: I0120 16:43:36.565993 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 20 16:43:36 crc kubenswrapper[4558]: I0120 16:43:36.565999 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 20 16:43:36 crc kubenswrapper[4558]: E0120 16:43:36.566463 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 20 16:43:36 crc kubenswrapper[4558]: E0120 16:43:36.566463 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 20 16:43:36 crc kubenswrapper[4558]: I0120 16:43:36.566248 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 20 16:43:36 crc kubenswrapper[4558]: E0120 16:43:36.566764 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 20 16:43:36 crc kubenswrapper[4558]: I0120 16:43:36.567290 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:36 crc kubenswrapper[4558]: I0120 16:43:36.567316 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:36 crc kubenswrapper[4558]: I0120 16:43:36.567323 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:36 crc kubenswrapper[4558]: I0120 16:43:36.567334 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:36 crc kubenswrapper[4558]: I0120 16:43:36.567342 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:36Z","lastTransitionTime":"2026-01-20T16:43:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:36 crc kubenswrapper[4558]: I0120 16:43:36.580475 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=58.580448087 podStartE2EDuration="58.580448087s" podCreationTimestamp="2026-01-20 16:42:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 16:43:36.580396631 +0000 UTC m=+110.340734597" watchObservedRunningTime="2026-01-20 16:43:36.580448087 +0000 UTC m=+110.340786054" Jan 20 16:43:36 crc kubenswrapper[4558]: I0120 16:43:36.596715 4558 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-03 19:52:31.590185912 +0000 UTC Jan 20 16:43:36 crc kubenswrapper[4558]: I0120 16:43:36.624968 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-rfwnl" podStartSLOduration=92.624952712 podStartE2EDuration="1m32.624952712s" podCreationTimestamp="2026-01-20 16:42:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 16:43:36.614519284 +0000 UTC m=+110.374857251" watchObservedRunningTime="2026-01-20 16:43:36.624952712 +0000 UTC m=+110.385290679" Jan 20 16:43:36 crc kubenswrapper[4558]: I0120 16:43:36.634912 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-d96bp" podStartSLOduration=92.634898324 podStartE2EDuration="1m32.634898324s" podCreationTimestamp="2026-01-20 16:42:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 16:43:36.634481621 +0000 UTC m=+110.394819588" watchObservedRunningTime="2026-01-20 16:43:36.634898324 +0000 UTC m=+110.395236292" Jan 20 16:43:36 crc kubenswrapper[4558]: I0120 16:43:36.663297 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-f5t7h" podStartSLOduration=92.663280844 podStartE2EDuration="1m32.663280844s" podCreationTimestamp="2026-01-20 16:42:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 16:43:36.662953598 +0000 UTC m=+110.423291606" watchObservedRunningTime="2026-01-20 16:43:36.663280844 +0000 UTC m=+110.423618811" Jan 20 16:43:36 crc kubenswrapper[4558]: I0120 16:43:36.669003 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:36 crc kubenswrapper[4558]: I0120 16:43:36.669041 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:36 crc kubenswrapper[4558]: I0120 16:43:36.669050 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:36 crc kubenswrapper[4558]: I0120 16:43:36.669063 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:36 crc kubenswrapper[4558]: I0120 16:43:36.669072 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:36Z","lastTransitionTime":"2026-01-20T16:43:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:36 crc kubenswrapper[4558]: I0120 16:43:36.684695 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=87.684682998 podStartE2EDuration="1m27.684682998s" podCreationTimestamp="2026-01-20 16:42:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 16:43:36.684584943 +0000 UTC m=+110.444922910" watchObservedRunningTime="2026-01-20 16:43:36.684682998 +0000 UTC m=+110.445020965" Jan 20 16:43:36 crc kubenswrapper[4558]: I0120 16:43:36.717480 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-jsqvf" podStartSLOduration=92.71746474 podStartE2EDuration="1m32.71746474s" podCreationTimestamp="2026-01-20 16:42:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 16:43:36.709663641 +0000 UTC m=+110.470001608" watchObservedRunningTime="2026-01-20 16:43:36.71746474 +0000 UTC m=+110.477802707" Jan 20 16:43:36 crc kubenswrapper[4558]: I0120 16:43:36.724157 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-47477" podStartSLOduration=92.724147766 podStartE2EDuration="1m32.724147766s" podCreationTimestamp="2026-01-20 16:42:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 16:43:36.717729047 +0000 UTC m=+110.478067015" watchObservedRunningTime="2026-01-20 16:43:36.724147766 +0000 UTC m=+110.484485733" Jan 20 16:43:36 crc kubenswrapper[4558]: I0120 16:43:36.724262 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=43.724258615 podStartE2EDuration="43.724258615s" podCreationTimestamp="2026-01-20 16:42:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 16:43:36.723863712 +0000 UTC m=+110.484201689" watchObservedRunningTime="2026-01-20 16:43:36.724258615 +0000 UTC m=+110.484596582" Jan 20 16:43:36 crc kubenswrapper[4558]: I0120 16:43:36.741014 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=92.740998172 podStartE2EDuration="1m32.740998172s" podCreationTimestamp="2026-01-20 16:42:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 16:43:36.74093902 +0000 UTC m=+110.501276987" watchObservedRunningTime="2026-01-20 16:43:36.740998172 +0000 UTC m=+110.501336139" Jan 20 16:43:36 crc kubenswrapper[4558]: I0120 16:43:36.751248 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=92.751233408 podStartE2EDuration="1m32.751233408s" podCreationTimestamp="2026-01-20 16:42:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 16:43:36.750729981 +0000 UTC m=+110.511067948" watchObservedRunningTime="2026-01-20 16:43:36.751233408 +0000 UTC m=+110.511571375" Jan 20 16:43:36 crc kubenswrapper[4558]: I0120 16:43:36.758305 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podStartSLOduration=92.758292271 podStartE2EDuration="1m32.758292271s" podCreationTimestamp="2026-01-20 16:42:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 16:43:36.757626509 +0000 UTC m=+110.517964476" watchObservedRunningTime="2026-01-20 16:43:36.758292271 +0000 UTC m=+110.518630239" Jan 20 16:43:36 crc kubenswrapper[4558]: I0120 16:43:36.771069 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:36 crc kubenswrapper[4558]: I0120 16:43:36.771107 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:36 crc kubenswrapper[4558]: I0120 16:43:36.771115 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:36 crc kubenswrapper[4558]: I0120 16:43:36.771127 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:36 crc kubenswrapper[4558]: I0120 16:43:36.771137 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:36Z","lastTransitionTime":"2026-01-20T16:43:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:36 crc kubenswrapper[4558]: I0120 16:43:36.872819 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:36 crc kubenswrapper[4558]: I0120 16:43:36.872850 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:36 crc kubenswrapper[4558]: I0120 16:43:36.872858 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:36 crc kubenswrapper[4558]: I0120 16:43:36.872870 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:36 crc kubenswrapper[4558]: I0120 16:43:36.872880 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:36Z","lastTransitionTime":"2026-01-20T16:43:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:36 crc kubenswrapper[4558]: I0120 16:43:36.974492 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:36 crc kubenswrapper[4558]: I0120 16:43:36.974528 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:36 crc kubenswrapper[4558]: I0120 16:43:36.974537 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:36 crc kubenswrapper[4558]: I0120 16:43:36.974549 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:36 crc kubenswrapper[4558]: I0120 16:43:36.974564 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:36Z","lastTransitionTime":"2026-01-20T16:43:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:37 crc kubenswrapper[4558]: I0120 16:43:37.076537 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:37 crc kubenswrapper[4558]: I0120 16:43:37.076578 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:37 crc kubenswrapper[4558]: I0120 16:43:37.076588 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:37 crc kubenswrapper[4558]: I0120 16:43:37.076614 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:37 crc kubenswrapper[4558]: I0120 16:43:37.076623 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:37Z","lastTransitionTime":"2026-01-20T16:43:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:37 crc kubenswrapper[4558]: I0120 16:43:37.178023 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:37 crc kubenswrapper[4558]: I0120 16:43:37.178055 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:37 crc kubenswrapper[4558]: I0120 16:43:37.178065 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:37 crc kubenswrapper[4558]: I0120 16:43:37.178076 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:37 crc kubenswrapper[4558]: I0120 16:43:37.178086 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:37Z","lastTransitionTime":"2026-01-20T16:43:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:37 crc kubenswrapper[4558]: I0120 16:43:37.280022 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:37 crc kubenswrapper[4558]: I0120 16:43:37.280053 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:37 crc kubenswrapper[4558]: I0120 16:43:37.280061 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:37 crc kubenswrapper[4558]: I0120 16:43:37.280073 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:37 crc kubenswrapper[4558]: I0120 16:43:37.280081 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:37Z","lastTransitionTime":"2026-01-20T16:43:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:37 crc kubenswrapper[4558]: I0120 16:43:37.382317 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:37 crc kubenswrapper[4558]: I0120 16:43:37.382346 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:37 crc kubenswrapper[4558]: I0120 16:43:37.382357 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:37 crc kubenswrapper[4558]: I0120 16:43:37.382369 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:37 crc kubenswrapper[4558]: I0120 16:43:37.382377 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:37Z","lastTransitionTime":"2026-01-20T16:43:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:37 crc kubenswrapper[4558]: I0120 16:43:37.484751 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:37 crc kubenswrapper[4558]: I0120 16:43:37.484801 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:37 crc kubenswrapper[4558]: I0120 16:43:37.484811 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:37 crc kubenswrapper[4558]: I0120 16:43:37.484824 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:37 crc kubenswrapper[4558]: I0120 16:43:37.484832 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:37Z","lastTransitionTime":"2026-01-20T16:43:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:37 crc kubenswrapper[4558]: I0120 16:43:37.565555 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9wrq6" Jan 20 16:43:37 crc kubenswrapper[4558]: E0120 16:43:37.565675 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9wrq6" podUID="30032328-bd33-4073-9366-e10bc5e2aa77" Jan 20 16:43:37 crc kubenswrapper[4558]: I0120 16:43:37.586808 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:37 crc kubenswrapper[4558]: I0120 16:43:37.586838 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:37 crc kubenswrapper[4558]: I0120 16:43:37.586846 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:37 crc kubenswrapper[4558]: I0120 16:43:37.586857 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:37 crc kubenswrapper[4558]: I0120 16:43:37.586865 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:37Z","lastTransitionTime":"2026-01-20T16:43:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:37 crc kubenswrapper[4558]: I0120 16:43:37.597243 4558 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-12 19:08:39.605380259 +0000 UTC Jan 20 16:43:37 crc kubenswrapper[4558]: I0120 16:43:37.688771 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:37 crc kubenswrapper[4558]: I0120 16:43:37.688807 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:37 crc kubenswrapper[4558]: I0120 16:43:37.688814 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:37 crc kubenswrapper[4558]: I0120 16:43:37.688829 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:37 crc kubenswrapper[4558]: I0120 16:43:37.688837 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:37Z","lastTransitionTime":"2026-01-20T16:43:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:37 crc kubenswrapper[4558]: I0120 16:43:37.791005 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:37 crc kubenswrapper[4558]: I0120 16:43:37.791051 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:37 crc kubenswrapper[4558]: I0120 16:43:37.791061 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:37 crc kubenswrapper[4558]: I0120 16:43:37.791074 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:37 crc kubenswrapper[4558]: I0120 16:43:37.791083 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:37Z","lastTransitionTime":"2026-01-20T16:43:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:37 crc kubenswrapper[4558]: I0120 16:43:37.893023 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:37 crc kubenswrapper[4558]: I0120 16:43:37.893056 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:37 crc kubenswrapper[4558]: I0120 16:43:37.893081 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:37 crc kubenswrapper[4558]: I0120 16:43:37.893094 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:37 crc kubenswrapper[4558]: I0120 16:43:37.893106 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:37Z","lastTransitionTime":"2026-01-20T16:43:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:37 crc kubenswrapper[4558]: I0120 16:43:37.927136 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-jsqvf_bedf08c7-1f93-4931-a7f3-e729e2a137af/kube-multus/1.log" Jan 20 16:43:37 crc kubenswrapper[4558]: I0120 16:43:37.927461 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-jsqvf_bedf08c7-1f93-4931-a7f3-e729e2a137af/kube-multus/0.log" Jan 20 16:43:37 crc kubenswrapper[4558]: I0120 16:43:37.927517 4558 generic.go:334] "Generic (PLEG): container finished" podID="bedf08c7-1f93-4931-a7f3-e729e2a137af" containerID="2297634e20683413a5eb643517580c9486303dc9616a7588a4ac571f3d0e53b5" exitCode=1 Jan 20 16:43:37 crc kubenswrapper[4558]: I0120 16:43:37.927542 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-jsqvf" event={"ID":"bedf08c7-1f93-4931-a7f3-e729e2a137af","Type":"ContainerDied","Data":"2297634e20683413a5eb643517580c9486303dc9616a7588a4ac571f3d0e53b5"} Jan 20 16:43:37 crc kubenswrapper[4558]: I0120 16:43:37.927571 4558 scope.go:117] "RemoveContainer" containerID="9e32b76495d88d356373f9389e163a2f3a72e202bb454c7015594c1f5a41b511" Jan 20 16:43:37 crc kubenswrapper[4558]: I0120 16:43:37.928321 4558 scope.go:117] "RemoveContainer" containerID="2297634e20683413a5eb643517580c9486303dc9616a7588a4ac571f3d0e53b5" Jan 20 16:43:37 crc kubenswrapper[4558]: E0120 16:43:37.928485 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-jsqvf_openshift-multus(bedf08c7-1f93-4931-a7f3-e729e2a137af)\"" pod="openshift-multus/multus-jsqvf" podUID="bedf08c7-1f93-4931-a7f3-e729e2a137af" Jan 20 16:43:37 crc kubenswrapper[4558]: I0120 16:43:37.994822 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:37 crc kubenswrapper[4558]: I0120 16:43:37.994852 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:37 crc kubenswrapper[4558]: I0120 16:43:37.994860 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:37 crc kubenswrapper[4558]: I0120 16:43:37.994873 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:37 crc kubenswrapper[4558]: I0120 16:43:37.994883 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:37Z","lastTransitionTime":"2026-01-20T16:43:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:38 crc kubenswrapper[4558]: I0120 16:43:38.096945 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:38 crc kubenswrapper[4558]: I0120 16:43:38.096985 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:38 crc kubenswrapper[4558]: I0120 16:43:38.096996 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:38 crc kubenswrapper[4558]: I0120 16:43:38.097010 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:38 crc kubenswrapper[4558]: I0120 16:43:38.097020 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:38Z","lastTransitionTime":"2026-01-20T16:43:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:38 crc kubenswrapper[4558]: I0120 16:43:38.198914 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:38 crc kubenswrapper[4558]: I0120 16:43:38.198949 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:38 crc kubenswrapper[4558]: I0120 16:43:38.198956 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:38 crc kubenswrapper[4558]: I0120 16:43:38.198969 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:38 crc kubenswrapper[4558]: I0120 16:43:38.198978 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:38Z","lastTransitionTime":"2026-01-20T16:43:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:38 crc kubenswrapper[4558]: I0120 16:43:38.300607 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:38 crc kubenswrapper[4558]: I0120 16:43:38.300637 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:38 crc kubenswrapper[4558]: I0120 16:43:38.300645 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:38 crc kubenswrapper[4558]: I0120 16:43:38.300657 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:38 crc kubenswrapper[4558]: I0120 16:43:38.300665 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:38Z","lastTransitionTime":"2026-01-20T16:43:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:38 crc kubenswrapper[4558]: I0120 16:43:38.402579 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:38 crc kubenswrapper[4558]: I0120 16:43:38.402625 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:38 crc kubenswrapper[4558]: I0120 16:43:38.402634 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:38 crc kubenswrapper[4558]: I0120 16:43:38.402646 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:38 crc kubenswrapper[4558]: I0120 16:43:38.402655 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:38Z","lastTransitionTime":"2026-01-20T16:43:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:38 crc kubenswrapper[4558]: I0120 16:43:38.504880 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:38 crc kubenswrapper[4558]: I0120 16:43:38.504908 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:38 crc kubenswrapper[4558]: I0120 16:43:38.504915 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:38 crc kubenswrapper[4558]: I0120 16:43:38.504928 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:38 crc kubenswrapper[4558]: I0120 16:43:38.504936 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:38Z","lastTransitionTime":"2026-01-20T16:43:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:38 crc kubenswrapper[4558]: I0120 16:43:38.565743 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 20 16:43:38 crc kubenswrapper[4558]: I0120 16:43:38.565746 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 20 16:43:38 crc kubenswrapper[4558]: E0120 16:43:38.565844 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 20 16:43:38 crc kubenswrapper[4558]: I0120 16:43:38.565870 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 20 16:43:38 crc kubenswrapper[4558]: E0120 16:43:38.565923 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 20 16:43:38 crc kubenswrapper[4558]: E0120 16:43:38.566011 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 20 16:43:38 crc kubenswrapper[4558]: I0120 16:43:38.598018 4558 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-20 00:58:26.469762584 +0000 UTC Jan 20 16:43:38 crc kubenswrapper[4558]: I0120 16:43:38.606466 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:38 crc kubenswrapper[4558]: I0120 16:43:38.606492 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:38 crc kubenswrapper[4558]: I0120 16:43:38.606502 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:38 crc kubenswrapper[4558]: I0120 16:43:38.606515 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:38 crc kubenswrapper[4558]: I0120 16:43:38.606523 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:38Z","lastTransitionTime":"2026-01-20T16:43:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:38 crc kubenswrapper[4558]: I0120 16:43:38.708243 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:38 crc kubenswrapper[4558]: I0120 16:43:38.708278 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:38 crc kubenswrapper[4558]: I0120 16:43:38.708287 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:38 crc kubenswrapper[4558]: I0120 16:43:38.708300 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:38 crc kubenswrapper[4558]: I0120 16:43:38.708309 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:38Z","lastTransitionTime":"2026-01-20T16:43:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:38 crc kubenswrapper[4558]: I0120 16:43:38.810493 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:38 crc kubenswrapper[4558]: I0120 16:43:38.810529 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:38 crc kubenswrapper[4558]: I0120 16:43:38.810538 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:38 crc kubenswrapper[4558]: I0120 16:43:38.810550 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:38 crc kubenswrapper[4558]: I0120 16:43:38.810558 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:38Z","lastTransitionTime":"2026-01-20T16:43:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:38 crc kubenswrapper[4558]: I0120 16:43:38.912579 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:38 crc kubenswrapper[4558]: I0120 16:43:38.912628 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:38 crc kubenswrapper[4558]: I0120 16:43:38.912636 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:38 crc kubenswrapper[4558]: I0120 16:43:38.912649 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:38 crc kubenswrapper[4558]: I0120 16:43:38.912658 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:38Z","lastTransitionTime":"2026-01-20T16:43:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:38 crc kubenswrapper[4558]: I0120 16:43:38.930527 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-jsqvf_bedf08c7-1f93-4931-a7f3-e729e2a137af/kube-multus/1.log" Jan 20 16:43:39 crc kubenswrapper[4558]: I0120 16:43:39.014278 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:39 crc kubenswrapper[4558]: I0120 16:43:39.014300 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:39 crc kubenswrapper[4558]: I0120 16:43:39.014309 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:39 crc kubenswrapper[4558]: I0120 16:43:39.014321 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:39 crc kubenswrapper[4558]: I0120 16:43:39.014331 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:39Z","lastTransitionTime":"2026-01-20T16:43:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:39 crc kubenswrapper[4558]: I0120 16:43:39.116717 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:39 crc kubenswrapper[4558]: I0120 16:43:39.116748 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:39 crc kubenswrapper[4558]: I0120 16:43:39.116756 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:39 crc kubenswrapper[4558]: I0120 16:43:39.116768 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:39 crc kubenswrapper[4558]: I0120 16:43:39.116776 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:39Z","lastTransitionTime":"2026-01-20T16:43:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:39 crc kubenswrapper[4558]: I0120 16:43:39.219041 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:39 crc kubenswrapper[4558]: I0120 16:43:39.219076 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:39 crc kubenswrapper[4558]: I0120 16:43:39.219084 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:39 crc kubenswrapper[4558]: I0120 16:43:39.219096 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:39 crc kubenswrapper[4558]: I0120 16:43:39.219104 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:39Z","lastTransitionTime":"2026-01-20T16:43:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:39 crc kubenswrapper[4558]: I0120 16:43:39.320780 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:39 crc kubenswrapper[4558]: I0120 16:43:39.320816 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:39 crc kubenswrapper[4558]: I0120 16:43:39.320825 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:39 crc kubenswrapper[4558]: I0120 16:43:39.320841 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:39 crc kubenswrapper[4558]: I0120 16:43:39.320849 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:39Z","lastTransitionTime":"2026-01-20T16:43:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:39 crc kubenswrapper[4558]: I0120 16:43:39.422648 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:39 crc kubenswrapper[4558]: I0120 16:43:39.422679 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:39 crc kubenswrapper[4558]: I0120 16:43:39.422687 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:39 crc kubenswrapper[4558]: I0120 16:43:39.422699 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:39 crc kubenswrapper[4558]: I0120 16:43:39.422707 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:39Z","lastTransitionTime":"2026-01-20T16:43:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:39 crc kubenswrapper[4558]: I0120 16:43:39.524747 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:39 crc kubenswrapper[4558]: I0120 16:43:39.524779 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:39 crc kubenswrapper[4558]: I0120 16:43:39.524788 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:39 crc kubenswrapper[4558]: I0120 16:43:39.524800 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:39 crc kubenswrapper[4558]: I0120 16:43:39.524809 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:39Z","lastTransitionTime":"2026-01-20T16:43:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:39 crc kubenswrapper[4558]: I0120 16:43:39.565445 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9wrq6" Jan 20 16:43:39 crc kubenswrapper[4558]: E0120 16:43:39.565562 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9wrq6" podUID="30032328-bd33-4073-9366-e10bc5e2aa77" Jan 20 16:43:39 crc kubenswrapper[4558]: I0120 16:43:39.598675 4558 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-09 18:14:05.741802879 +0000 UTC Jan 20 16:43:39 crc kubenswrapper[4558]: I0120 16:43:39.626151 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:39 crc kubenswrapper[4558]: I0120 16:43:39.626201 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:39 crc kubenswrapper[4558]: I0120 16:43:39.626210 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:39 crc kubenswrapper[4558]: I0120 16:43:39.626223 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:39 crc kubenswrapper[4558]: I0120 16:43:39.626233 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:39Z","lastTransitionTime":"2026-01-20T16:43:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:39 crc kubenswrapper[4558]: I0120 16:43:39.728145 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:39 crc kubenswrapper[4558]: I0120 16:43:39.728205 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:39 crc kubenswrapper[4558]: I0120 16:43:39.728215 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:39 crc kubenswrapper[4558]: I0120 16:43:39.728227 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:39 crc kubenswrapper[4558]: I0120 16:43:39.728235 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:39Z","lastTransitionTime":"2026-01-20T16:43:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:39 crc kubenswrapper[4558]: I0120 16:43:39.805512 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 20 16:43:39 crc kubenswrapper[4558]: I0120 16:43:39.805554 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 20 16:43:39 crc kubenswrapper[4558]: I0120 16:43:39.805565 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 20 16:43:39 crc kubenswrapper[4558]: I0120 16:43:39.805579 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 20 16:43:39 crc kubenswrapper[4558]: I0120 16:43:39.805589 4558 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-20T16:43:39Z","lastTransitionTime":"2026-01-20T16:43:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 20 16:43:39 crc kubenswrapper[4558]: I0120 16:43:39.834623 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-tqmc7"] Jan 20 16:43:39 crc kubenswrapper[4558]: I0120 16:43:39.834980 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-tqmc7" Jan 20 16:43:39 crc kubenswrapper[4558]: I0120 16:43:39.836425 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Jan 20 16:43:39 crc kubenswrapper[4558]: I0120 16:43:39.836469 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Jan 20 16:43:39 crc kubenswrapper[4558]: I0120 16:43:39.836509 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Jan 20 16:43:39 crc kubenswrapper[4558]: I0120 16:43:39.836428 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Jan 20 16:43:39 crc kubenswrapper[4558]: I0120 16:43:39.938258 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/ed0f2cd3-b64e-4d05-8802-5203ba38a1ba-service-ca\") pod \"cluster-version-operator-5c965bbfc6-tqmc7\" (UID: \"ed0f2cd3-b64e-4d05-8802-5203ba38a1ba\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-tqmc7" Jan 20 16:43:39 crc kubenswrapper[4558]: I0120 16:43:39.938329 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ed0f2cd3-b64e-4d05-8802-5203ba38a1ba-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-tqmc7\" (UID: \"ed0f2cd3-b64e-4d05-8802-5203ba38a1ba\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-tqmc7" Jan 20 16:43:39 crc kubenswrapper[4558]: I0120 16:43:39.938366 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ed0f2cd3-b64e-4d05-8802-5203ba38a1ba-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-tqmc7\" (UID: \"ed0f2cd3-b64e-4d05-8802-5203ba38a1ba\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-tqmc7" Jan 20 16:43:39 crc kubenswrapper[4558]: I0120 16:43:39.938394 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/ed0f2cd3-b64e-4d05-8802-5203ba38a1ba-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-tqmc7\" (UID: \"ed0f2cd3-b64e-4d05-8802-5203ba38a1ba\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-tqmc7" Jan 20 16:43:39 crc kubenswrapper[4558]: I0120 16:43:39.938414 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/ed0f2cd3-b64e-4d05-8802-5203ba38a1ba-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-tqmc7\" (UID: \"ed0f2cd3-b64e-4d05-8802-5203ba38a1ba\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-tqmc7" Jan 20 16:43:40 crc kubenswrapper[4558]: I0120 16:43:40.039152 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/ed0f2cd3-b64e-4d05-8802-5203ba38a1ba-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-tqmc7\" (UID: \"ed0f2cd3-b64e-4d05-8802-5203ba38a1ba\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-tqmc7" Jan 20 16:43:40 crc kubenswrapper[4558]: I0120 16:43:40.039204 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/ed0f2cd3-b64e-4d05-8802-5203ba38a1ba-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-tqmc7\" (UID: \"ed0f2cd3-b64e-4d05-8802-5203ba38a1ba\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-tqmc7" Jan 20 16:43:40 crc kubenswrapper[4558]: I0120 16:43:40.039231 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/ed0f2cd3-b64e-4d05-8802-5203ba38a1ba-service-ca\") pod \"cluster-version-operator-5c965bbfc6-tqmc7\" (UID: \"ed0f2cd3-b64e-4d05-8802-5203ba38a1ba\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-tqmc7" Jan 20 16:43:40 crc kubenswrapper[4558]: I0120 16:43:40.039274 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ed0f2cd3-b64e-4d05-8802-5203ba38a1ba-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-tqmc7\" (UID: \"ed0f2cd3-b64e-4d05-8802-5203ba38a1ba\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-tqmc7" Jan 20 16:43:40 crc kubenswrapper[4558]: I0120 16:43:40.039296 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ed0f2cd3-b64e-4d05-8802-5203ba38a1ba-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-tqmc7\" (UID: \"ed0f2cd3-b64e-4d05-8802-5203ba38a1ba\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-tqmc7" Jan 20 16:43:40 crc kubenswrapper[4558]: I0120 16:43:40.039296 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/ed0f2cd3-b64e-4d05-8802-5203ba38a1ba-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-tqmc7\" (UID: \"ed0f2cd3-b64e-4d05-8802-5203ba38a1ba\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-tqmc7" Jan 20 16:43:40 crc kubenswrapper[4558]: I0120 16:43:40.039312 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/ed0f2cd3-b64e-4d05-8802-5203ba38a1ba-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-tqmc7\" (UID: \"ed0f2cd3-b64e-4d05-8802-5203ba38a1ba\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-tqmc7" Jan 20 16:43:40 crc kubenswrapper[4558]: I0120 16:43:40.039950 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/ed0f2cd3-b64e-4d05-8802-5203ba38a1ba-service-ca\") pod \"cluster-version-operator-5c965bbfc6-tqmc7\" (UID: \"ed0f2cd3-b64e-4d05-8802-5203ba38a1ba\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-tqmc7" Jan 20 16:43:40 crc kubenswrapper[4558]: I0120 16:43:40.044019 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ed0f2cd3-b64e-4d05-8802-5203ba38a1ba-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-tqmc7\" (UID: \"ed0f2cd3-b64e-4d05-8802-5203ba38a1ba\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-tqmc7" Jan 20 16:43:40 crc kubenswrapper[4558]: I0120 16:43:40.052486 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ed0f2cd3-b64e-4d05-8802-5203ba38a1ba-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-tqmc7\" (UID: \"ed0f2cd3-b64e-4d05-8802-5203ba38a1ba\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-tqmc7" Jan 20 16:43:40 crc kubenswrapper[4558]: I0120 16:43:40.144540 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-tqmc7" Jan 20 16:43:40 crc kubenswrapper[4558]: W0120 16:43:40.155216 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poded0f2cd3_b64e_4d05_8802_5203ba38a1ba.slice/crio-025f5879371b4ee3d573a23f99743a021409260da4c1a9168413470a116fe879 WatchSource:0}: Error finding container 025f5879371b4ee3d573a23f99743a021409260da4c1a9168413470a116fe879: Status 404 returned error can't find the container with id 025f5879371b4ee3d573a23f99743a021409260da4c1a9168413470a116fe879 Jan 20 16:43:40 crc kubenswrapper[4558]: I0120 16:43:40.565776 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 20 16:43:40 crc kubenswrapper[4558]: I0120 16:43:40.565814 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 20 16:43:40 crc kubenswrapper[4558]: I0120 16:43:40.565781 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 20 16:43:40 crc kubenswrapper[4558]: E0120 16:43:40.565901 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 20 16:43:40 crc kubenswrapper[4558]: E0120 16:43:40.566023 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 20 16:43:40 crc kubenswrapper[4558]: E0120 16:43:40.566069 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 20 16:43:40 crc kubenswrapper[4558]: I0120 16:43:40.599364 4558 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-17 09:31:02.768132082 +0000 UTC Jan 20 16:43:40 crc kubenswrapper[4558]: I0120 16:43:40.599406 4558 certificate_manager.go:356] kubernetes.io/kubelet-serving: Rotating certificates Jan 20 16:43:40 crc kubenswrapper[4558]: I0120 16:43:40.605385 4558 reflector.go:368] Caches populated for *v1.CertificateSigningRequest from k8s.io/client-go/tools/watch/informerwatcher.go:146 Jan 20 16:43:40 crc kubenswrapper[4558]: I0120 16:43:40.935884 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-tqmc7" event={"ID":"ed0f2cd3-b64e-4d05-8802-5203ba38a1ba","Type":"ContainerStarted","Data":"290ed90af330919a876bdd62846d2ccf48188391ed4e10ef5a1b818477b5f10b"} Jan 20 16:43:40 crc kubenswrapper[4558]: I0120 16:43:40.935922 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-tqmc7" event={"ID":"ed0f2cd3-b64e-4d05-8802-5203ba38a1ba","Type":"ContainerStarted","Data":"025f5879371b4ee3d573a23f99743a021409260da4c1a9168413470a116fe879"} Jan 20 16:43:40 crc kubenswrapper[4558]: I0120 16:43:40.945941 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-tqmc7" podStartSLOduration=96.945932317 podStartE2EDuration="1m36.945932317s" podCreationTimestamp="2026-01-20 16:42:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 16:43:40.945451694 +0000 UTC m=+114.705789661" watchObservedRunningTime="2026-01-20 16:43:40.945932317 +0000 UTC m=+114.706270284" Jan 20 16:43:41 crc kubenswrapper[4558]: I0120 16:43:41.565042 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9wrq6" Jan 20 16:43:41 crc kubenswrapper[4558]: E0120 16:43:41.565128 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9wrq6" podUID="30032328-bd33-4073-9366-e10bc5e2aa77" Jan 20 16:43:42 crc kubenswrapper[4558]: I0120 16:43:42.565048 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 20 16:43:42 crc kubenswrapper[4558]: I0120 16:43:42.565091 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 20 16:43:42 crc kubenswrapper[4558]: E0120 16:43:42.565184 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 20 16:43:42 crc kubenswrapper[4558]: I0120 16:43:42.565060 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 20 16:43:42 crc kubenswrapper[4558]: E0120 16:43:42.565309 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 20 16:43:42 crc kubenswrapper[4558]: E0120 16:43:42.565418 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 20 16:43:43 crc kubenswrapper[4558]: I0120 16:43:43.565466 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9wrq6" Jan 20 16:43:43 crc kubenswrapper[4558]: E0120 16:43:43.565851 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9wrq6" podUID="30032328-bd33-4073-9366-e10bc5e2aa77" Jan 20 16:43:43 crc kubenswrapper[4558]: I0120 16:43:43.565983 4558 scope.go:117] "RemoveContainer" containerID="0e9f4ed928d20c9db64a689d7d14df9b81870bb0523cf5098e09d2d85aad9fca" Jan 20 16:43:43 crc kubenswrapper[4558]: I0120 16:43:43.944541 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-nv2xw_254129cd-82fc-4162-b671-2434bc9e2972/ovnkube-controller/3.log" Jan 20 16:43:43 crc kubenswrapper[4558]: I0120 16:43:43.946495 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" event={"ID":"254129cd-82fc-4162-b671-2434bc9e2972","Type":"ContainerStarted","Data":"8d6af51a18940c05083d10a25b2c636978ce51ac88bbfc6ed169b6669fb846a4"} Jan 20 16:43:43 crc kubenswrapper[4558]: I0120 16:43:43.946847 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" Jan 20 16:43:43 crc kubenswrapper[4558]: I0120 16:43:43.966784 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" podStartSLOduration=99.966771631 podStartE2EDuration="1m39.966771631s" podCreationTimestamp="2026-01-20 16:42:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 16:43:43.966179387 +0000 UTC m=+117.726517354" watchObservedRunningTime="2026-01-20 16:43:43.966771631 +0000 UTC m=+117.727109597" Jan 20 16:43:44 crc kubenswrapper[4558]: I0120 16:43:44.155946 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-9wrq6"] Jan 20 16:43:44 crc kubenswrapper[4558]: I0120 16:43:44.156036 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9wrq6" Jan 20 16:43:44 crc kubenswrapper[4558]: E0120 16:43:44.156117 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9wrq6" podUID="30032328-bd33-4073-9366-e10bc5e2aa77" Jan 20 16:43:44 crc kubenswrapper[4558]: I0120 16:43:44.565930 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 20 16:43:44 crc kubenswrapper[4558]: E0120 16:43:44.566282 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 20 16:43:44 crc kubenswrapper[4558]: I0120 16:43:44.566051 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 20 16:43:44 crc kubenswrapper[4558]: E0120 16:43:44.566355 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 20 16:43:44 crc kubenswrapper[4558]: I0120 16:43:44.565953 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 20 16:43:44 crc kubenswrapper[4558]: E0120 16:43:44.566405 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 20 16:43:45 crc kubenswrapper[4558]: I0120 16:43:45.566021 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9wrq6" Jan 20 16:43:45 crc kubenswrapper[4558]: E0120 16:43:45.566158 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9wrq6" podUID="30032328-bd33-4073-9366-e10bc5e2aa77" Jan 20 16:43:46 crc kubenswrapper[4558]: E0120 16:43:46.559495 4558 kubelet_node_status.go:497] "Node not becoming ready in time after startup" Jan 20 16:43:46 crc kubenswrapper[4558]: I0120 16:43:46.565047 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 20 16:43:46 crc kubenswrapper[4558]: I0120 16:43:46.565067 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 20 16:43:46 crc kubenswrapper[4558]: E0120 16:43:46.565917 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 20 16:43:46 crc kubenswrapper[4558]: I0120 16:43:46.565936 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 20 16:43:46 crc kubenswrapper[4558]: E0120 16:43:46.566003 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 20 16:43:46 crc kubenswrapper[4558]: E0120 16:43:46.566021 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 20 16:43:46 crc kubenswrapper[4558]: E0120 16:43:46.625035 4558 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 20 16:43:47 crc kubenswrapper[4558]: I0120 16:43:47.565570 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9wrq6" Jan 20 16:43:47 crc kubenswrapper[4558]: E0120 16:43:47.565698 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9wrq6" podUID="30032328-bd33-4073-9366-e10bc5e2aa77" Jan 20 16:43:48 crc kubenswrapper[4558]: I0120 16:43:48.565553 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 20 16:43:48 crc kubenswrapper[4558]: I0120 16:43:48.565587 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 20 16:43:48 crc kubenswrapper[4558]: E0120 16:43:48.565672 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 20 16:43:48 crc kubenswrapper[4558]: I0120 16:43:48.565705 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 20 16:43:48 crc kubenswrapper[4558]: E0120 16:43:48.565729 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 20 16:43:48 crc kubenswrapper[4558]: E0120 16:43:48.565824 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 20 16:43:49 crc kubenswrapper[4558]: I0120 16:43:49.565461 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9wrq6" Jan 20 16:43:49 crc kubenswrapper[4558]: E0120 16:43:49.565567 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9wrq6" podUID="30032328-bd33-4073-9366-e10bc5e2aa77" Jan 20 16:43:50 crc kubenswrapper[4558]: I0120 16:43:50.565646 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 20 16:43:50 crc kubenswrapper[4558]: I0120 16:43:50.565654 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 20 16:43:50 crc kubenswrapper[4558]: I0120 16:43:50.565652 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 20 16:43:50 crc kubenswrapper[4558]: E0120 16:43:50.565784 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 20 16:43:50 crc kubenswrapper[4558]: E0120 16:43:50.565966 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 20 16:43:50 crc kubenswrapper[4558]: E0120 16:43:50.566060 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 20 16:43:51 crc kubenswrapper[4558]: I0120 16:43:51.565392 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9wrq6" Jan 20 16:43:51 crc kubenswrapper[4558]: E0120 16:43:51.565517 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9wrq6" podUID="30032328-bd33-4073-9366-e10bc5e2aa77" Jan 20 16:43:51 crc kubenswrapper[4558]: E0120 16:43:51.625846 4558 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 20 16:43:52 crc kubenswrapper[4558]: I0120 16:43:52.565265 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 20 16:43:52 crc kubenswrapper[4558]: I0120 16:43:52.565314 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 20 16:43:52 crc kubenswrapper[4558]: I0120 16:43:52.565369 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 20 16:43:52 crc kubenswrapper[4558]: E0120 16:43:52.565377 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 20 16:43:52 crc kubenswrapper[4558]: E0120 16:43:52.565471 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 20 16:43:52 crc kubenswrapper[4558]: E0120 16:43:52.565531 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 20 16:43:53 crc kubenswrapper[4558]: I0120 16:43:53.565546 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9wrq6" Jan 20 16:43:53 crc kubenswrapper[4558]: E0120 16:43:53.565679 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9wrq6" podUID="30032328-bd33-4073-9366-e10bc5e2aa77" Jan 20 16:43:53 crc kubenswrapper[4558]: I0120 16:43:53.565943 4558 scope.go:117] "RemoveContainer" containerID="2297634e20683413a5eb643517580c9486303dc9616a7588a4ac571f3d0e53b5" Jan 20 16:43:53 crc kubenswrapper[4558]: I0120 16:43:53.970809 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-jsqvf_bedf08c7-1f93-4931-a7f3-e729e2a137af/kube-multus/1.log" Jan 20 16:43:53 crc kubenswrapper[4558]: I0120 16:43:53.971011 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-jsqvf" event={"ID":"bedf08c7-1f93-4931-a7f3-e729e2a137af","Type":"ContainerStarted","Data":"6ff161b5eb2dde97ccaf09d4e6df49b859981accd8ab7643d47f6900019c3dae"} Jan 20 16:43:54 crc kubenswrapper[4558]: I0120 16:43:54.565717 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 20 16:43:54 crc kubenswrapper[4558]: E0120 16:43:54.565817 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 20 16:43:54 crc kubenswrapper[4558]: I0120 16:43:54.565959 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 20 16:43:54 crc kubenswrapper[4558]: E0120 16:43:54.565998 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 20 16:43:54 crc kubenswrapper[4558]: I0120 16:43:54.566079 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 20 16:43:54 crc kubenswrapper[4558]: E0120 16:43:54.566140 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 20 16:43:55 crc kubenswrapper[4558]: I0120 16:43:55.565401 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9wrq6" Jan 20 16:43:55 crc kubenswrapper[4558]: E0120 16:43:55.565524 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-9wrq6" podUID="30032328-bd33-4073-9366-e10bc5e2aa77" Jan 20 16:43:56 crc kubenswrapper[4558]: I0120 16:43:56.565060 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 20 16:43:56 crc kubenswrapper[4558]: I0120 16:43:56.565266 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 20 16:43:56 crc kubenswrapper[4558]: E0120 16:43:56.565955 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 20 16:43:56 crc kubenswrapper[4558]: E0120 16:43:56.566086 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 20 16:43:56 crc kubenswrapper[4558]: I0120 16:43:56.566140 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 20 16:43:56 crc kubenswrapper[4558]: E0120 16:43:56.566236 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 20 16:43:57 crc kubenswrapper[4558]: I0120 16:43:57.565428 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9wrq6" Jan 20 16:43:57 crc kubenswrapper[4558]: I0120 16:43:57.567776 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Jan 20 16:43:57 crc kubenswrapper[4558]: I0120 16:43:57.567864 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Jan 20 16:43:58 crc kubenswrapper[4558]: I0120 16:43:58.564922 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 20 16:43:58 crc kubenswrapper[4558]: I0120 16:43:58.564981 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 20 16:43:58 crc kubenswrapper[4558]: I0120 16:43:58.565186 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 20 16:43:58 crc kubenswrapper[4558]: I0120 16:43:58.567235 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Jan 20 16:43:58 crc kubenswrapper[4558]: I0120 16:43:58.567470 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Jan 20 16:43:58 crc kubenswrapper[4558]: I0120 16:43:58.567699 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Jan 20 16:43:58 crc kubenswrapper[4558]: I0120 16:43:58.567756 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.415857 4558 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.448111 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-sjzfb"] Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.448500 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-hsls5"] Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.448789 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-hsls5" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.449065 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-sjzfb" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.451878 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-t225g"] Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.452286 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-t225g" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.454281 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.463847 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-rh247"] Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.464295 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-rh247" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.464583 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-c97pw"] Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.465274 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-c97pw" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.465568 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-nwjzd"] Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.474950 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-nwjzd" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.486146 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.486335 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.486363 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.486441 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.486555 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.486676 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.486772 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.486868 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.486891 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.486958 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.487061 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.487183 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.487304 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.487316 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.488874 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-lhmdh"] Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.488897 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.488985 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.489129 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.489258 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.489265 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-ppqsn"] Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.489764 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-h7vz8"] Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.490001 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-h7vz8" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.489335 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-lhmdh" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.490401 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-ppqsn" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.493412 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.495748 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.496071 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.496326 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.497921 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-jq4hg"] Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.498333 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-225km"] Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.498516 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-jq4hg" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.498537 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-b97gp"] Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.498656 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-225km" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.498961 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.499059 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.499215 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.499238 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-b97gp" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.499302 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.499388 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.499471 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.499568 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.499658 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.499748 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.499780 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.499793 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.499853 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.499930 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.499939 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.500054 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.501699 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-wmcfr"] Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.502114 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-vmm7h"] Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.502355 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-jvkk8"] Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.502697 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-vmm7h" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.502738 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-wmcfr" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.503076 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jvkk8" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.508966 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.509044 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.509118 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.509157 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.509244 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.509452 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.509779 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.511361 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-7flzc"] Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.511781 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-5rxsc"] Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.512073 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-5rxsc" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.512292 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-7flzc" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.512371 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8xc26"] Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.512924 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8xc26" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.513937 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.515375 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.515556 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-zsdqk"] Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.527515 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d2311c1a-55c3-437c-ae71-881861de70ff-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-h7vz8\" (UID: \"d2311c1a-55c3-437c-ae71-881861de70ff\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-h7vz8" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.527557 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/f6c99edf-1afb-476e-b63e-ee73c372499c-etcd-serving-ca\") pod \"apiserver-76f77b778f-sjzfb\" (UID: \"f6c99edf-1afb-476e-b63e-ee73c372499c\") " pod="openshift-apiserver/apiserver-76f77b778f-sjzfb" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.527576 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/350b4e8e-0147-4d0a-b9c0-1cdcdc2312f1-images\") pod \"machine-api-operator-5694c8668f-hsls5\" (UID: \"350b4e8e-0147-4d0a-b9c0-1cdcdc2312f1\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-hsls5" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.527612 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/4654626b-36d0-4072-a04c-d8ee0678fd50-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-nwjzd\" (UID: \"4654626b-36d0-4072-a04c-d8ee0678fd50\") " pod="openshift-authentication/oauth-openshift-558db77b4-nwjzd" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.527646 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xwdlg\" (UniqueName: \"kubernetes.io/projected/350b4e8e-0147-4d0a-b9c0-1cdcdc2312f1-kube-api-access-xwdlg\") pod \"machine-api-operator-5694c8668f-hsls5\" (UID: \"350b4e8e-0147-4d0a-b9c0-1cdcdc2312f1\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-hsls5" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.527760 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d2311c1a-55c3-437c-ae71-881861de70ff-config\") pod \"openshift-apiserver-operator-796bbdcf4f-h7vz8\" (UID: \"d2311c1a-55c3-437c-ae71-881861de70ff\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-h7vz8" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.527784 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fxvmt\" (UniqueName: \"kubernetes.io/projected/20de26f9-85a3-42a2-ba6b-0839c0657141-kube-api-access-fxvmt\") pod \"openshift-config-operator-7777fb866f-ppqsn\" (UID: \"20de26f9-85a3-42a2-ba6b-0839c0657141\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-ppqsn" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.527804 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8c24d\" (UniqueName: \"kubernetes.io/projected/4654626b-36d0-4072-a04c-d8ee0678fd50-kube-api-access-8c24d\") pod \"oauth-openshift-558db77b4-nwjzd\" (UID: \"4654626b-36d0-4072-a04c-d8ee0678fd50\") " pod="openshift-authentication/oauth-openshift-558db77b4-nwjzd" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.527828 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/f6c99edf-1afb-476e-b63e-ee73c372499c-encryption-config\") pod \"apiserver-76f77b778f-sjzfb\" (UID: \"f6c99edf-1afb-476e-b63e-ee73c372499c\") " pod="openshift-apiserver/apiserver-76f77b778f-sjzfb" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.527849 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/4654626b-36d0-4072-a04c-d8ee0678fd50-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-nwjzd\" (UID: \"4654626b-36d0-4072-a04c-d8ee0678fd50\") " pod="openshift-authentication/oauth-openshift-558db77b4-nwjzd" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.527871 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/4654626b-36d0-4072-a04c-d8ee0678fd50-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-nwjzd\" (UID: \"4654626b-36d0-4072-a04c-d8ee0678fd50\") " pod="openshift-authentication/oauth-openshift-558db77b4-nwjzd" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.527889 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/f6c99edf-1afb-476e-b63e-ee73c372499c-image-import-ca\") pod \"apiserver-76f77b778f-sjzfb\" (UID: \"f6c99edf-1afb-476e-b63e-ee73c372499c\") " pod="openshift-apiserver/apiserver-76f77b778f-sjzfb" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.527910 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/f6c99edf-1afb-476e-b63e-ee73c372499c-etcd-client\") pod \"apiserver-76f77b778f-sjzfb\" (UID: \"f6c99edf-1afb-476e-b63e-ee73c372499c\") " pod="openshift-apiserver/apiserver-76f77b778f-sjzfb" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.527929 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f6c99edf-1afb-476e-b63e-ee73c372499c-audit-dir\") pod \"apiserver-76f77b778f-sjzfb\" (UID: \"f6c99edf-1afb-476e-b63e-ee73c372499c\") " pod="openshift-apiserver/apiserver-76f77b778f-sjzfb" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.527949 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/350b4e8e-0147-4d0a-b9c0-1cdcdc2312f1-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-hsls5\" (UID: \"350b4e8e-0147-4d0a-b9c0-1cdcdc2312f1\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-hsls5" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.527969 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/4654626b-36d0-4072-a04c-d8ee0678fd50-audit-policies\") pod \"oauth-openshift-558db77b4-nwjzd\" (UID: \"4654626b-36d0-4072-a04c-d8ee0678fd50\") " pod="openshift-authentication/oauth-openshift-558db77b4-nwjzd" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.527991 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/350b4e8e-0147-4d0a-b9c0-1cdcdc2312f1-config\") pod \"machine-api-operator-5694c8668f-hsls5\" (UID: \"350b4e8e-0147-4d0a-b9c0-1cdcdc2312f1\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-hsls5" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.528012 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/dd7b53bb-d740-497c-a36e-87e51d6f05a6-trusted-ca-bundle\") pod \"console-f9d7485db-lhmdh\" (UID: \"dd7b53bb-d740-497c-a36e-87e51d6f05a6\") " pod="openshift-console/console-f9d7485db-lhmdh" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.528033 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/4654626b-36d0-4072-a04c-d8ee0678fd50-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-nwjzd\" (UID: \"4654626b-36d0-4072-a04c-d8ee0678fd50\") " pod="openshift-authentication/oauth-openshift-558db77b4-nwjzd" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.528055 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m9sms\" (UniqueName: \"kubernetes.io/projected/d2311c1a-55c3-437c-ae71-881861de70ff-kube-api-access-m9sms\") pod \"openshift-apiserver-operator-796bbdcf4f-h7vz8\" (UID: \"d2311c1a-55c3-437c-ae71-881861de70ff\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-h7vz8" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.528195 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/dd7b53bb-d740-497c-a36e-87e51d6f05a6-console-oauth-config\") pod \"console-f9d7485db-lhmdh\" (UID: \"dd7b53bb-d740-497c-a36e-87e51d6f05a6\") " pod="openshift-console/console-f9d7485db-lhmdh" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.528216 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n4q45\" (UniqueName: \"kubernetes.io/projected/9e148c70-cb30-40cf-a333-65c0fdf0aa35-kube-api-access-n4q45\") pod \"authentication-operator-69f744f599-rh247\" (UID: \"9e148c70-cb30-40cf-a333-65c0fdf0aa35\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-rh247" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.528417 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/4654626b-36d0-4072-a04c-d8ee0678fd50-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-nwjzd\" (UID: \"4654626b-36d0-4072-a04c-d8ee0678fd50\") " pod="openshift-authentication/oauth-openshift-558db77b4-nwjzd" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.528454 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/f6c99edf-1afb-476e-b63e-ee73c372499c-audit\") pod \"apiserver-76f77b778f-sjzfb\" (UID: \"f6c99edf-1afb-476e-b63e-ee73c372499c\") " pod="openshift-apiserver/apiserver-76f77b778f-sjzfb" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.528681 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n6m5z\" (UniqueName: \"kubernetes.io/projected/dd7b53bb-d740-497c-a36e-87e51d6f05a6-kube-api-access-n6m5z\") pod \"console-f9d7485db-lhmdh\" (UID: \"dd7b53bb-d740-497c-a36e-87e51d6f05a6\") " pod="openshift-console/console-f9d7485db-lhmdh" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.528705 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9e148c70-cb30-40cf-a333-65c0fdf0aa35-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-rh247\" (UID: \"9e148c70-cb30-40cf-a333-65c0fdf0aa35\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-rh247" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.528860 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/4654626b-36d0-4072-a04c-d8ee0678fd50-audit-dir\") pod \"oauth-openshift-558db77b4-nwjzd\" (UID: \"4654626b-36d0-4072-a04c-d8ee0678fd50\") " pod="openshift-authentication/oauth-openshift-558db77b4-nwjzd" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.528967 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/20de26f9-85a3-42a2-ba6b-0839c0657141-serving-cert\") pod \"openshift-config-operator-7777fb866f-ppqsn\" (UID: \"20de26f9-85a3-42a2-ba6b-0839c0657141\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-ppqsn" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.529107 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/dd7b53bb-d740-497c-a36e-87e51d6f05a6-console-config\") pod \"console-f9d7485db-lhmdh\" (UID: \"dd7b53bb-d740-497c-a36e-87e51d6f05a6\") " pod="openshift-console/console-f9d7485db-lhmdh" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.529131 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c2b3de8e-9eaa-43d7-a061-674af5e035cb-client-ca\") pod \"controller-manager-879f6c89f-t225g\" (UID: \"c2b3de8e-9eaa-43d7-a061-674af5e035cb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-t225g" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.529315 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9w8mv\" (UniqueName: \"kubernetes.io/projected/f6c99edf-1afb-476e-b63e-ee73c372499c-kube-api-access-9w8mv\") pod \"apiserver-76f77b778f-sjzfb\" (UID: \"f6c99edf-1afb-476e-b63e-ee73c372499c\") " pod="openshift-apiserver/apiserver-76f77b778f-sjzfb" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.529345 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4654626b-36d0-4072-a04c-d8ee0678fd50-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-nwjzd\" (UID: \"4654626b-36d0-4072-a04c-d8ee0678fd50\") " pod="openshift-authentication/oauth-openshift-558db77b4-nwjzd" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.529382 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/1dd6e1c0-24f1-43fc-81d6-b0d0e3ed36c2-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-c97pw\" (UID: \"1dd6e1c0-24f1-43fc-81d6-b0d0e3ed36c2\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-c97pw" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.529402 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dqz5k\" (UniqueName: \"kubernetes.io/projected/1dd6e1c0-24f1-43fc-81d6-b0d0e3ed36c2-kube-api-access-dqz5k\") pod \"cluster-samples-operator-665b6dd947-c97pw\" (UID: \"1dd6e1c0-24f1-43fc-81d6-b0d0e3ed36c2\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-c97pw" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.529438 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/4654626b-36d0-4072-a04c-d8ee0678fd50-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-nwjzd\" (UID: \"4654626b-36d0-4072-a04c-d8ee0678fd50\") " pod="openshift-authentication/oauth-openshift-558db77b4-nwjzd" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.529463 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c2b3de8e-9eaa-43d7-a061-674af5e035cb-config\") pod \"controller-manager-879f6c89f-t225g\" (UID: \"c2b3de8e-9eaa-43d7-a061-674af5e035cb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-t225g" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.529482 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/dd7b53bb-d740-497c-a36e-87e51d6f05a6-console-serving-cert\") pod \"console-f9d7485db-lhmdh\" (UID: \"dd7b53bb-d740-497c-a36e-87e51d6f05a6\") " pod="openshift-console/console-f9d7485db-lhmdh" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.529502 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f6c99edf-1afb-476e-b63e-ee73c372499c-serving-cert\") pod \"apiserver-76f77b778f-sjzfb\" (UID: \"f6c99edf-1afb-476e-b63e-ee73c372499c\") " pod="openshift-apiserver/apiserver-76f77b778f-sjzfb" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.529598 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c2b3de8e-9eaa-43d7-a061-674af5e035cb-serving-cert\") pod \"controller-manager-879f6c89f-t225g\" (UID: \"c2b3de8e-9eaa-43d7-a061-674af5e035cb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-t225g" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.529639 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/f6c99edf-1afb-476e-b63e-ee73c372499c-node-pullsecrets\") pod \"apiserver-76f77b778f-sjzfb\" (UID: \"f6c99edf-1afb-476e-b63e-ee73c372499c\") " pod="openshift-apiserver/apiserver-76f77b778f-sjzfb" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.529672 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nfwxp\" (UniqueName: \"kubernetes.io/projected/c2b3de8e-9eaa-43d7-a061-674af5e035cb-kube-api-access-nfwxp\") pod \"controller-manager-879f6c89f-t225g\" (UID: \"c2b3de8e-9eaa-43d7-a061-674af5e035cb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-t225g" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.529693 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/4654626b-36d0-4072-a04c-d8ee0678fd50-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-nwjzd\" (UID: \"4654626b-36d0-4072-a04c-d8ee0678fd50\") " pod="openshift-authentication/oauth-openshift-558db77b4-nwjzd" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.529715 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9e148c70-cb30-40cf-a333-65c0fdf0aa35-config\") pod \"authentication-operator-69f744f599-rh247\" (UID: \"9e148c70-cb30-40cf-a333-65c0fdf0aa35\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-rh247" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.529737 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/4654626b-36d0-4072-a04c-d8ee0678fd50-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-nwjzd\" (UID: \"4654626b-36d0-4072-a04c-d8ee0678fd50\") " pod="openshift-authentication/oauth-openshift-558db77b4-nwjzd" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.529767 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/dd7b53bb-d740-497c-a36e-87e51d6f05a6-service-ca\") pod \"console-f9d7485db-lhmdh\" (UID: \"dd7b53bb-d740-497c-a36e-87e51d6f05a6\") " pod="openshift-console/console-f9d7485db-lhmdh" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.529787 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/4654626b-36d0-4072-a04c-d8ee0678fd50-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-nwjzd\" (UID: \"4654626b-36d0-4072-a04c-d8ee0678fd50\") " pod="openshift-authentication/oauth-openshift-558db77b4-nwjzd" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.529806 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/dd7b53bb-d740-497c-a36e-87e51d6f05a6-oauth-serving-cert\") pod \"console-f9d7485db-lhmdh\" (UID: \"dd7b53bb-d740-497c-a36e-87e51d6f05a6\") " pod="openshift-console/console-f9d7485db-lhmdh" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.529827 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9e148c70-cb30-40cf-a333-65c0fdf0aa35-service-ca-bundle\") pod \"authentication-operator-69f744f599-rh247\" (UID: \"9e148c70-cb30-40cf-a333-65c0fdf0aa35\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-rh247" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.529876 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/4654626b-36d0-4072-a04c-d8ee0678fd50-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-nwjzd\" (UID: \"4654626b-36d0-4072-a04c-d8ee0678fd50\") " pod="openshift-authentication/oauth-openshift-558db77b4-nwjzd" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.529901 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c2b3de8e-9eaa-43d7-a061-674af5e035cb-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-t225g\" (UID: \"c2b3de8e-9eaa-43d7-a061-674af5e035cb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-t225g" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.529924 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f6c99edf-1afb-476e-b63e-ee73c372499c-config\") pod \"apiserver-76f77b778f-sjzfb\" (UID: \"f6c99edf-1afb-476e-b63e-ee73c372499c\") " pod="openshift-apiserver/apiserver-76f77b778f-sjzfb" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.529945 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9e148c70-cb30-40cf-a333-65c0fdf0aa35-serving-cert\") pod \"authentication-operator-69f744f599-rh247\" (UID: \"9e148c70-cb30-40cf-a333-65c0fdf0aa35\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-rh247" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.529987 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/20de26f9-85a3-42a2-ba6b-0839c0657141-available-featuregates\") pod \"openshift-config-operator-7777fb866f-ppqsn\" (UID: \"20de26f9-85a3-42a2-ba6b-0839c0657141\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-ppqsn" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.530009 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f6c99edf-1afb-476e-b63e-ee73c372499c-trusted-ca-bundle\") pod \"apiserver-76f77b778f-sjzfb\" (UID: \"f6c99edf-1afb-476e-b63e-ee73c372499c\") " pod="openshift-apiserver/apiserver-76f77b778f-sjzfb" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.532425 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-qdvgg"] Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.534570 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-qdvgg" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.537928 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.538051 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.542858 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.543144 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.543369 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-zsdqk" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.544109 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.545219 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-sgbgg"] Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.545800 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.546024 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.546055 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-sgbgg" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.546156 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.546223 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.546310 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.546400 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.546416 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.546484 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.546518 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.546593 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.546642 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.549105 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8rqtk"] Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.549524 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8rqtk" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.549713 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.549933 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.550216 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.551794 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-dqf5x"] Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.552458 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-dqf5x" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.552747 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-49k6d"] Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.553115 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-49k6d" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.553226 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.553261 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-25jqm"] Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.553518 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.553545 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-25jqm" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.554134 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-vkkjt"] Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.554725 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-vkkjt" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.556700 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.557116 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.557379 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.557566 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.557750 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.557995 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.558081 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.556731 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.558399 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.558434 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.558692 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.556628 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.560624 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.560837 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.563055 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.569188 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.569284 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.569654 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.569953 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.570298 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.570468 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.570375 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.570818 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.570815 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-t225g"] Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.571764 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.571845 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-mshrf"] Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.571800 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.572646 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-mshrf" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.572727 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.572833 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.572977 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.574212 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.574899 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.575889 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.574943 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.575254 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.580980 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-l8kh6"] Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.581796 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-l8kh6" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.586447 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-7wfbg"] Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.587195 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.589538 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-7wfbg" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.589538 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-p5lzn"] Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.591226 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-rh5k4"] Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.591237 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-p5lzn" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.592566 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.594584 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zd9qc"] Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.595546 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-rh5k4" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.596227 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.597832 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.598843 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zd9qc" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.599014 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-4w5mk"] Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.599475 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-4w5mk" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.600840 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-4t4rs"] Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.601724 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-sjzfb"] Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.601845 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-4t4rs" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.602021 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-t4dxf"] Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.603698 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-pltl7"] Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.603813 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-t4dxf" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.604511 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29482110-7p5pb"] Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.604769 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-pltl7" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.605050 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-rh247"] Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.605264 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29482110-7p5pb" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.605483 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-hsls5"] Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.606439 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-2j5lz"] Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.607347 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-v2c77"] Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.607579 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-2j5lz" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.607920 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-v2c77" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.608494 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-225km"] Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.609590 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-ppqsn"] Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.610777 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.610878 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-lhmdh"] Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.611757 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-h7vz8"] Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.612974 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-c97pw"] Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.614202 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-qdvgg"] Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.615208 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-7flzc"] Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.616511 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-wmcfr"] Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.617210 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8rqtk"] Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.618187 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-sgbgg"] Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.619000 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-vmm7h"] Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.619955 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-zsdqk"] Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.620854 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-p5lzn"] Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.621918 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-25jqm"] Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.622793 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-5rxsc"] Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.623681 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-jvkk8"] Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.625158 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-rh5k4"] Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.625385 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-mshrf"] Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.626383 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-49k6d"] Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.627245 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-dqf5x"] Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.628237 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-nwjzd"] Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.629041 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8xc26"] Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.629978 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-v2c77"] Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.630377 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.630807 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-b97gp"] Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.631063 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9e148c70-cb30-40cf-a333-65c0fdf0aa35-serving-cert\") pod \"authentication-operator-69f744f599-rh247\" (UID: \"9e148c70-cb30-40cf-a333-65c0fdf0aa35\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-rh247" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.631137 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/04ab3f66-5485-4b6f-9293-ed76b2695d50-profile-collector-cert\") pod \"catalog-operator-68c6474976-zd9qc\" (UID: \"04ab3f66-5485-4b6f-9293-ed76b2695d50\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zd9qc" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.631310 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/a8761e3d-bfc2-481f-a29e-15f7566a5881-srv-cert\") pod \"olm-operator-6b444d44fb-p5lzn\" (UID: \"a8761e3d-bfc2-481f-a29e-15f7566a5881\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-p5lzn" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.631345 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/1d076664-8f86-4e46-a67a-85539915bb04-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-7flzc\" (UID: \"1d076664-8f86-4e46-a67a-85539915bb04\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-7flzc" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.631371 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ca06f669-4a81-4e6c-abc1-2800cd58c188-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-49k6d\" (UID: \"ca06f669-4a81-4e6c-abc1-2800cd58c188\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-49k6d" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.631399 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/20de26f9-85a3-42a2-ba6b-0839c0657141-available-featuregates\") pod \"openshift-config-operator-7777fb866f-ppqsn\" (UID: \"20de26f9-85a3-42a2-ba6b-0839c0657141\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-ppqsn" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.631419 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f6c99edf-1afb-476e-b63e-ee73c372499c-trusted-ca-bundle\") pod \"apiserver-76f77b778f-sjzfb\" (UID: \"f6c99edf-1afb-476e-b63e-ee73c372499c\") " pod="openshift-apiserver/apiserver-76f77b778f-sjzfb" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.631441 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/350b4e8e-0147-4d0a-b9c0-1cdcdc2312f1-images\") pod \"machine-api-operator-5694c8668f-hsls5\" (UID: \"350b4e8e-0147-4d0a-b9c0-1cdcdc2312f1\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-hsls5" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.631460 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/31b93be8-46ba-485b-b5d3-639064825683-config\") pod \"service-ca-operator-777779d784-25jqm\" (UID: \"31b93be8-46ba-485b-b5d3-639064825683\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-25jqm" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.631480 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d2311c1a-55c3-437c-ae71-881861de70ff-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-h7vz8\" (UID: \"d2311c1a-55c3-437c-ae71-881861de70ff\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-h7vz8" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.631498 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/f6c99edf-1afb-476e-b63e-ee73c372499c-etcd-serving-ca\") pod \"apiserver-76f77b778f-sjzfb\" (UID: \"f6c99edf-1afb-476e-b63e-ee73c372499c\") " pod="openshift-apiserver/apiserver-76f77b778f-sjzfb" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.631521 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/4654626b-36d0-4072-a04c-d8ee0678fd50-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-nwjzd\" (UID: \"4654626b-36d0-4072-a04c-d8ee0678fd50\") " pod="openshift-authentication/oauth-openshift-558db77b4-nwjzd" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.631540 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/04ab3f66-5485-4b6f-9293-ed76b2695d50-srv-cert\") pod \"catalog-operator-68c6474976-zd9qc\" (UID: \"04ab3f66-5485-4b6f-9293-ed76b2695d50\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zd9qc" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.631662 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xwdlg\" (UniqueName: \"kubernetes.io/projected/350b4e8e-0147-4d0a-b9c0-1cdcdc2312f1-kube-api-access-xwdlg\") pod \"machine-api-operator-5694c8668f-hsls5\" (UID: \"350b4e8e-0147-4d0a-b9c0-1cdcdc2312f1\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-hsls5" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.631720 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/20de26f9-85a3-42a2-ba6b-0839c0657141-available-featuregates\") pod \"openshift-config-operator-7777fb866f-ppqsn\" (UID: \"20de26f9-85a3-42a2-ba6b-0839c0657141\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-ppqsn" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.631903 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-l8kh6"] Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.631991 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/84f45a25-f77d-4c4c-88b6-5bdc4c286f10-default-certificate\") pod \"router-default-5444994796-vkkjt\" (UID: \"84f45a25-f77d-4c4c-88b6-5bdc4c286f10\") " pod="openshift-ingress/router-default-5444994796-vkkjt" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.632036 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d49afd2e-470e-4bd4-a30d-5014f423510b-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-8xc26\" (UID: \"d49afd2e-470e-4bd4-a30d-5014f423510b\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8xc26" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.632060 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/31b93be8-46ba-485b-b5d3-639064825683-serving-cert\") pod \"service-ca-operator-777779d784-25jqm\" (UID: \"31b93be8-46ba-485b-b5d3-639064825683\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-25jqm" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.632100 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/84f45a25-f77d-4c4c-88b6-5bdc4c286f10-stats-auth\") pod \"router-default-5444994796-vkkjt\" (UID: \"84f45a25-f77d-4c4c-88b6-5bdc4c286f10\") " pod="openshift-ingress/router-default-5444994796-vkkjt" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.632201 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d2311c1a-55c3-437c-ae71-881861de70ff-config\") pod \"openshift-apiserver-operator-796bbdcf4f-h7vz8\" (UID: \"d2311c1a-55c3-437c-ae71-881861de70ff\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-h7vz8" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.632255 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fxvmt\" (UniqueName: \"kubernetes.io/projected/20de26f9-85a3-42a2-ba6b-0839c0657141-kube-api-access-fxvmt\") pod \"openshift-config-operator-7777fb866f-ppqsn\" (UID: \"20de26f9-85a3-42a2-ba6b-0839c0657141\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-ppqsn" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.632372 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8c24d\" (UniqueName: \"kubernetes.io/projected/4654626b-36d0-4072-a04c-d8ee0678fd50-kube-api-access-8c24d\") pod \"oauth-openshift-558db77b4-nwjzd\" (UID: \"4654626b-36d0-4072-a04c-d8ee0678fd50\") " pod="openshift-authentication/oauth-openshift-558db77b4-nwjzd" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.632456 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/f6c99edf-1afb-476e-b63e-ee73c372499c-etcd-serving-ca\") pod \"apiserver-76f77b778f-sjzfb\" (UID: \"f6c99edf-1afb-476e-b63e-ee73c372499c\") " pod="openshift-apiserver/apiserver-76f77b778f-sjzfb" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.632502 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/f6c99edf-1afb-476e-b63e-ee73c372499c-encryption-config\") pod \"apiserver-76f77b778f-sjzfb\" (UID: \"f6c99edf-1afb-476e-b63e-ee73c372499c\") " pod="openshift-apiserver/apiserver-76f77b778f-sjzfb" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.632529 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/4654626b-36d0-4072-a04c-d8ee0678fd50-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-nwjzd\" (UID: \"4654626b-36d0-4072-a04c-d8ee0678fd50\") " pod="openshift-authentication/oauth-openshift-558db77b4-nwjzd" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.632547 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/4654626b-36d0-4072-a04c-d8ee0678fd50-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-nwjzd\" (UID: \"4654626b-36d0-4072-a04c-d8ee0678fd50\") " pod="openshift-authentication/oauth-openshift-558db77b4-nwjzd" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.632568 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/f6c99edf-1afb-476e-b63e-ee73c372499c-image-import-ca\") pod \"apiserver-76f77b778f-sjzfb\" (UID: \"f6c99edf-1afb-476e-b63e-ee73c372499c\") " pod="openshift-apiserver/apiserver-76f77b778f-sjzfb" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.632590 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-km9th\" (UniqueName: \"kubernetes.io/projected/7f63cf5d-23fe-4e8b-a9a7-3ce5c0c30aad-kube-api-access-km9th\") pod \"route-controller-manager-6576b87f9c-vmm7h\" (UID: \"7f63cf5d-23fe-4e8b-a9a7-3ce5c0c30aad\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-vmm7h" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.632627 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/84f45a25-f77d-4c4c-88b6-5bdc4c286f10-service-ca-bundle\") pod \"router-default-5444994796-vkkjt\" (UID: \"84f45a25-f77d-4c4c-88b6-5bdc4c286f10\") " pod="openshift-ingress/router-default-5444994796-vkkjt" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.633114 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/4654626b-36d0-4072-a04c-d8ee0678fd50-audit-policies\") pod \"oauth-openshift-558db77b4-nwjzd\" (UID: \"4654626b-36d0-4072-a04c-d8ee0678fd50\") " pod="openshift-authentication/oauth-openshift-558db77b4-nwjzd" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.633154 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/203c31e0-e1fc-4da7-8275-91295c22f307-trusted-ca\") pod \"console-operator-58897d9998-wmcfr\" (UID: \"203c31e0-e1fc-4da7-8275-91295c22f307\") " pod="openshift-console-operator/console-operator-58897d9998-wmcfr" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.633189 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/206f3850-b746-456c-9500-4d862f983a1e-proxy-tls\") pod \"machine-config-operator-74547568cd-dqf5x\" (UID: \"206f3850-b746-456c-9500-4d862f983a1e\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-dqf5x" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.633230 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/f6c99edf-1afb-476e-b63e-ee73c372499c-etcd-client\") pod \"apiserver-76f77b778f-sjzfb\" (UID: \"f6c99edf-1afb-476e-b63e-ee73c372499c\") " pod="openshift-apiserver/apiserver-76f77b778f-sjzfb" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.633251 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f6c99edf-1afb-476e-b63e-ee73c372499c-audit-dir\") pod \"apiserver-76f77b778f-sjzfb\" (UID: \"f6c99edf-1afb-476e-b63e-ee73c372499c\") " pod="openshift-apiserver/apiserver-76f77b778f-sjzfb" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.633267 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/350b4e8e-0147-4d0a-b9c0-1cdcdc2312f1-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-hsls5\" (UID: \"350b4e8e-0147-4d0a-b9c0-1cdcdc2312f1\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-hsls5" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.633284 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7nm45\" (UniqueName: \"kubernetes.io/projected/206f3850-b746-456c-9500-4d862f983a1e-kube-api-access-7nm45\") pod \"machine-config-operator-74547568cd-dqf5x\" (UID: \"206f3850-b746-456c-9500-4d862f983a1e\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-dqf5x" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.633328 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/350b4e8e-0147-4d0a-b9c0-1cdcdc2312f1-config\") pod \"machine-api-operator-5694c8668f-hsls5\" (UID: \"350b4e8e-0147-4d0a-b9c0-1cdcdc2312f1\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-hsls5" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.633348 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t227k\" (UniqueName: \"kubernetes.io/projected/34b7c77f-6a7d-43de-9ee4-bdba78dc8248-kube-api-access-t227k\") pod \"multus-admission-controller-857f4d67dd-7wfbg\" (UID: \"34b7c77f-6a7d-43de-9ee4-bdba78dc8248\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-7wfbg" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.633370 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/dd7b53bb-d740-497c-a36e-87e51d6f05a6-trusted-ca-bundle\") pod \"console-f9d7485db-lhmdh\" (UID: \"dd7b53bb-d740-497c-a36e-87e51d6f05a6\") " pod="openshift-console/console-f9d7485db-lhmdh" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.633388 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/4654626b-36d0-4072-a04c-d8ee0678fd50-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-nwjzd\" (UID: \"4654626b-36d0-4072-a04c-d8ee0678fd50\") " pod="openshift-authentication/oauth-openshift-558db77b4-nwjzd" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.633408 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/203c31e0-e1fc-4da7-8275-91295c22f307-serving-cert\") pod \"console-operator-58897d9998-wmcfr\" (UID: \"203c31e0-e1fc-4da7-8275-91295c22f307\") " pod="openshift-console-operator/console-operator-58897d9998-wmcfr" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.633425 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7f63cf5d-23fe-4e8b-a9a7-3ce5c0c30aad-client-ca\") pod \"route-controller-manager-6576b87f9c-vmm7h\" (UID: \"7f63cf5d-23fe-4e8b-a9a7-3ce5c0c30aad\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-vmm7h" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.633431 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f6c99edf-1afb-476e-b63e-ee73c372499c-trusted-ca-bundle\") pod \"apiserver-76f77b778f-sjzfb\" (UID: \"f6c99edf-1afb-476e-b63e-ee73c372499c\") " pod="openshift-apiserver/apiserver-76f77b778f-sjzfb" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.633443 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m9sms\" (UniqueName: \"kubernetes.io/projected/d2311c1a-55c3-437c-ae71-881861de70ff-kube-api-access-m9sms\") pod \"openshift-apiserver-operator-796bbdcf4f-h7vz8\" (UID: \"d2311c1a-55c3-437c-ae71-881861de70ff\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-h7vz8" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.633462 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/dd7b53bb-d740-497c-a36e-87e51d6f05a6-console-oauth-config\") pod \"console-f9d7485db-lhmdh\" (UID: \"dd7b53bb-d740-497c-a36e-87e51d6f05a6\") " pod="openshift-console/console-f9d7485db-lhmdh" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.633480 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n4q45\" (UniqueName: \"kubernetes.io/projected/9e148c70-cb30-40cf-a333-65c0fdf0aa35-kube-api-access-n4q45\") pod \"authentication-operator-69f744f599-rh247\" (UID: \"9e148c70-cb30-40cf-a333-65c0fdf0aa35\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-rh247" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.633497 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/4654626b-36d0-4072-a04c-d8ee0678fd50-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-nwjzd\" (UID: \"4654626b-36d0-4072-a04c-d8ee0678fd50\") " pod="openshift-authentication/oauth-openshift-558db77b4-nwjzd" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.633514 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d49afd2e-470e-4bd4-a30d-5014f423510b-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-8xc26\" (UID: \"d49afd2e-470e-4bd4-a30d-5014f423510b\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8xc26" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.633533 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b88a23ca-e1d6-4e41-9aee-6f9c2c572f42-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-8rqtk\" (UID: \"b88a23ca-e1d6-4e41-9aee-6f9c2c572f42\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8rqtk" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.633548 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-24z8p\" (UniqueName: \"kubernetes.io/projected/d49afd2e-470e-4bd4-a30d-5014f423510b-kube-api-access-24z8p\") pod \"openshift-controller-manager-operator-756b6f6bc6-8xc26\" (UID: \"d49afd2e-470e-4bd4-a30d-5014f423510b\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8xc26" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.633563 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b88a23ca-e1d6-4e41-9aee-6f9c2c572f42-config\") pod \"kube-controller-manager-operator-78b949d7b-8rqtk\" (UID: \"b88a23ca-e1d6-4e41-9aee-6f9c2c572f42\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8rqtk" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.633584 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/f6c99edf-1afb-476e-b63e-ee73c372499c-audit\") pod \"apiserver-76f77b778f-sjzfb\" (UID: \"f6c99edf-1afb-476e-b63e-ee73c372499c\") " pod="openshift-apiserver/apiserver-76f77b778f-sjzfb" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.633587 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/350b4e8e-0147-4d0a-b9c0-1cdcdc2312f1-images\") pod \"machine-api-operator-5694c8668f-hsls5\" (UID: \"350b4e8e-0147-4d0a-b9c0-1cdcdc2312f1\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-hsls5" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.633615 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n6m5z\" (UniqueName: \"kubernetes.io/projected/dd7b53bb-d740-497c-a36e-87e51d6f05a6-kube-api-access-n6m5z\") pod \"console-f9d7485db-lhmdh\" (UID: \"dd7b53bb-d740-497c-a36e-87e51d6f05a6\") " pod="openshift-console/console-f9d7485db-lhmdh" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.633652 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/1d076664-8f86-4e46-a67a-85539915bb04-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-7flzc\" (UID: \"1d076664-8f86-4e46-a67a-85539915bb04\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-7flzc" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.633674 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ca06f669-4a81-4e6c-abc1-2800cd58c188-config\") pod \"kube-apiserver-operator-766d6c64bb-49k6d\" (UID: \"ca06f669-4a81-4e6c-abc1-2800cd58c188\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-49k6d" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.633691 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/dd7b53bb-d740-497c-a36e-87e51d6f05a6-console-config\") pod \"console-f9d7485db-lhmdh\" (UID: \"dd7b53bb-d740-497c-a36e-87e51d6f05a6\") " pod="openshift-console/console-f9d7485db-lhmdh" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.633707 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9e148c70-cb30-40cf-a333-65c0fdf0aa35-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-rh247\" (UID: \"9e148c70-cb30-40cf-a333-65c0fdf0aa35\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-rh247" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.633722 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/4654626b-36d0-4072-a04c-d8ee0678fd50-audit-dir\") pod \"oauth-openshift-558db77b4-nwjzd\" (UID: \"4654626b-36d0-4072-a04c-d8ee0678fd50\") " pod="openshift-authentication/oauth-openshift-558db77b4-nwjzd" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.633741 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/20de26f9-85a3-42a2-ba6b-0839c0657141-serving-cert\") pod \"openshift-config-operator-7777fb866f-ppqsn\" (UID: \"20de26f9-85a3-42a2-ba6b-0839c0657141\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-ppqsn" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.633756 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7f63cf5d-23fe-4e8b-a9a7-3ce5c0c30aad-config\") pod \"route-controller-manager-6576b87f9c-vmm7h\" (UID: \"7f63cf5d-23fe-4e8b-a9a7-3ce5c0c30aad\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-vmm7h" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.633755 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/4654626b-36d0-4072-a04c-d8ee0678fd50-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-nwjzd\" (UID: \"4654626b-36d0-4072-a04c-d8ee0678fd50\") " pod="openshift-authentication/oauth-openshift-558db77b4-nwjzd" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.633763 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d2311c1a-55c3-437c-ae71-881861de70ff-config\") pod \"openshift-apiserver-operator-796bbdcf4f-h7vz8\" (UID: \"d2311c1a-55c3-437c-ae71-881861de70ff\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-h7vz8" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.633774 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pr9jf\" (UniqueName: \"kubernetes.io/projected/a8761e3d-bfc2-481f-a29e-15f7566a5881-kube-api-access-pr9jf\") pod \"olm-operator-6b444d44fb-p5lzn\" (UID: \"a8761e3d-bfc2-481f-a29e-15f7566a5881\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-p5lzn" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.633840 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v44kv\" (UniqueName: \"kubernetes.io/projected/84f45a25-f77d-4c4c-88b6-5bdc4c286f10-kube-api-access-v44kv\") pod \"router-default-5444994796-vkkjt\" (UID: \"84f45a25-f77d-4c4c-88b6-5bdc4c286f10\") " pod="openshift-ingress/router-default-5444994796-vkkjt" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.633866 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/f6c99edf-1afb-476e-b63e-ee73c372499c-image-import-ca\") pod \"apiserver-76f77b778f-sjzfb\" (UID: \"f6c99edf-1afb-476e-b63e-ee73c372499c\") " pod="openshift-apiserver/apiserver-76f77b778f-sjzfb" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.633875 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c2b3de8e-9eaa-43d7-a061-674af5e035cb-client-ca\") pod \"controller-manager-879f6c89f-t225g\" (UID: \"c2b3de8e-9eaa-43d7-a061-674af5e035cb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-t225g" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.633902 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9w8mv\" (UniqueName: \"kubernetes.io/projected/f6c99edf-1afb-476e-b63e-ee73c372499c-kube-api-access-9w8mv\") pod \"apiserver-76f77b778f-sjzfb\" (UID: \"f6c99edf-1afb-476e-b63e-ee73c372499c\") " pod="openshift-apiserver/apiserver-76f77b778f-sjzfb" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.633925 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4654626b-36d0-4072-a04c-d8ee0678fd50-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-nwjzd\" (UID: \"4654626b-36d0-4072-a04c-d8ee0678fd50\") " pod="openshift-authentication/oauth-openshift-558db77b4-nwjzd" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.633946 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/1dd6e1c0-24f1-43fc-81d6-b0d0e3ed36c2-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-c97pw\" (UID: \"1dd6e1c0-24f1-43fc-81d6-b0d0e3ed36c2\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-c97pw" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.633969 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dqz5k\" (UniqueName: \"kubernetes.io/projected/1dd6e1c0-24f1-43fc-81d6-b0d0e3ed36c2-kube-api-access-dqz5k\") pod \"cluster-samples-operator-665b6dd947-c97pw\" (UID: \"1dd6e1c0-24f1-43fc-81d6-b0d0e3ed36c2\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-c97pw" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.633988 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/4654626b-36d0-4072-a04c-d8ee0678fd50-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-nwjzd\" (UID: \"4654626b-36d0-4072-a04c-d8ee0678fd50\") " pod="openshift-authentication/oauth-openshift-558db77b4-nwjzd" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.634014 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5kw5d\" (UniqueName: \"kubernetes.io/projected/31b93be8-46ba-485b-b5d3-639064825683-kube-api-access-5kw5d\") pod \"service-ca-operator-777779d784-25jqm\" (UID: \"31b93be8-46ba-485b-b5d3-639064825683\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-25jqm" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.634034 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ca06f669-4a81-4e6c-abc1-2800cd58c188-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-49k6d\" (UID: \"ca06f669-4a81-4e6c-abc1-2800cd58c188\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-49k6d" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.634059 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c2b3de8e-9eaa-43d7-a061-674af5e035cb-config\") pod \"controller-manager-879f6c89f-t225g\" (UID: \"c2b3de8e-9eaa-43d7-a061-674af5e035cb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-t225g" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.634078 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f6c99edf-1afb-476e-b63e-ee73c372499c-serving-cert\") pod \"apiserver-76f77b778f-sjzfb\" (UID: \"f6c99edf-1afb-476e-b63e-ee73c372499c\") " pod="openshift-apiserver/apiserver-76f77b778f-sjzfb" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.634097 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/dd7b53bb-d740-497c-a36e-87e51d6f05a6-console-serving-cert\") pod \"console-f9d7485db-lhmdh\" (UID: \"dd7b53bb-d740-497c-a36e-87e51d6f05a6\") " pod="openshift-console/console-f9d7485db-lhmdh" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.634115 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7f63cf5d-23fe-4e8b-a9a7-3ce5c0c30aad-serving-cert\") pod \"route-controller-manager-6576b87f9c-vmm7h\" (UID: \"7f63cf5d-23fe-4e8b-a9a7-3ce5c0c30aad\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-vmm7h" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.634139 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c2b3de8e-9eaa-43d7-a061-674af5e035cb-serving-cert\") pod \"controller-manager-879f6c89f-t225g\" (UID: \"c2b3de8e-9eaa-43d7-a061-674af5e035cb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-t225g" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.634156 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j9dpp\" (UniqueName: \"kubernetes.io/projected/1d076664-8f86-4e46-a67a-85539915bb04-kube-api-access-j9dpp\") pod \"cluster-image-registry-operator-dc59b4c8b-7flzc\" (UID: \"1d076664-8f86-4e46-a67a-85539915bb04\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-7flzc" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.634196 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/f6c99edf-1afb-476e-b63e-ee73c372499c-node-pullsecrets\") pod \"apiserver-76f77b778f-sjzfb\" (UID: \"f6c99edf-1afb-476e-b63e-ee73c372499c\") " pod="openshift-apiserver/apiserver-76f77b778f-sjzfb" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.634215 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/84f45a25-f77d-4c4c-88b6-5bdc4c286f10-metrics-certs\") pod \"router-default-5444994796-vkkjt\" (UID: \"84f45a25-f77d-4c4c-88b6-5bdc4c286f10\") " pod="openshift-ingress/router-default-5444994796-vkkjt" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.634231 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/a8761e3d-bfc2-481f-a29e-15f7566a5881-profile-collector-cert\") pod \"olm-operator-6b444d44fb-p5lzn\" (UID: \"a8761e3d-bfc2-481f-a29e-15f7566a5881\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-p5lzn" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.634258 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/206f3850-b746-456c-9500-4d862f983a1e-auth-proxy-config\") pod \"machine-config-operator-74547568cd-dqf5x\" (UID: \"206f3850-b746-456c-9500-4d862f983a1e\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-dqf5x" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.634275 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/4654626b-36d0-4072-a04c-d8ee0678fd50-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-nwjzd\" (UID: \"4654626b-36d0-4072-a04c-d8ee0678fd50\") " pod="openshift-authentication/oauth-openshift-558db77b4-nwjzd" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.634295 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/34b7c77f-6a7d-43de-9ee4-bdba78dc8248-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-7wfbg\" (UID: \"34b7c77f-6a7d-43de-9ee4-bdba78dc8248\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-7wfbg" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.634331 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nfwxp\" (UniqueName: \"kubernetes.io/projected/c2b3de8e-9eaa-43d7-a061-674af5e035cb-kube-api-access-nfwxp\") pod \"controller-manager-879f6c89f-t225g\" (UID: \"c2b3de8e-9eaa-43d7-a061-674af5e035cb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-t225g" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.634353 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9e148c70-cb30-40cf-a333-65c0fdf0aa35-config\") pod \"authentication-operator-69f744f599-rh247\" (UID: \"9e148c70-cb30-40cf-a333-65c0fdf0aa35\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-rh247" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.634376 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/4654626b-36d0-4072-a04c-d8ee0678fd50-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-nwjzd\" (UID: \"4654626b-36d0-4072-a04c-d8ee0678fd50\") " pod="openshift-authentication/oauth-openshift-558db77b4-nwjzd" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.634393 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/4654626b-36d0-4072-a04c-d8ee0678fd50-audit-policies\") pod \"oauth-openshift-558db77b4-nwjzd\" (UID: \"4654626b-36d0-4072-a04c-d8ee0678fd50\") " pod="openshift-authentication/oauth-openshift-558db77b4-nwjzd" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.634394 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/203c31e0-e1fc-4da7-8275-91295c22f307-config\") pod \"console-operator-58897d9998-wmcfr\" (UID: \"203c31e0-e1fc-4da7-8275-91295c22f307\") " pod="openshift-console-operator/console-operator-58897d9998-wmcfr" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.634446 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b88a23ca-e1d6-4e41-9aee-6f9c2c572f42-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-8rqtk\" (UID: \"b88a23ca-e1d6-4e41-9aee-6f9c2c572f42\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8rqtk" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.634473 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/1d076664-8f86-4e46-a67a-85539915bb04-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-7flzc\" (UID: \"1d076664-8f86-4e46-a67a-85539915bb04\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-7flzc" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.634496 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/350b4e8e-0147-4d0a-b9c0-1cdcdc2312f1-config\") pod \"machine-api-operator-5694c8668f-hsls5\" (UID: \"350b4e8e-0147-4d0a-b9c0-1cdcdc2312f1\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-hsls5" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.634506 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/dd7b53bb-d740-497c-a36e-87e51d6f05a6-service-ca\") pod \"console-f9d7485db-lhmdh\" (UID: \"dd7b53bb-d740-497c-a36e-87e51d6f05a6\") " pod="openshift-console/console-f9d7485db-lhmdh" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.634553 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/4654626b-36d0-4072-a04c-d8ee0678fd50-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-nwjzd\" (UID: \"4654626b-36d0-4072-a04c-d8ee0678fd50\") " pod="openshift-authentication/oauth-openshift-558db77b4-nwjzd" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.634577 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/4654626b-36d0-4072-a04c-d8ee0678fd50-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-nwjzd\" (UID: \"4654626b-36d0-4072-a04c-d8ee0678fd50\") " pod="openshift-authentication/oauth-openshift-558db77b4-nwjzd" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.634614 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/206f3850-b746-456c-9500-4d862f983a1e-images\") pod \"machine-config-operator-74547568cd-dqf5x\" (UID: \"206f3850-b746-456c-9500-4d862f983a1e\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-dqf5x" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.634637 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/dd7b53bb-d740-497c-a36e-87e51d6f05a6-oauth-serving-cert\") pod \"console-f9d7485db-lhmdh\" (UID: \"dd7b53bb-d740-497c-a36e-87e51d6f05a6\") " pod="openshift-console/console-f9d7485db-lhmdh" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.634658 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9e148c70-cb30-40cf-a333-65c0fdf0aa35-service-ca-bundle\") pod \"authentication-operator-69f744f599-rh247\" (UID: \"9e148c70-cb30-40cf-a333-65c0fdf0aa35\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-rh247" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.634674 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f6c99edf-1afb-476e-b63e-ee73c372499c-config\") pod \"apiserver-76f77b778f-sjzfb\" (UID: \"f6c99edf-1afb-476e-b63e-ee73c372499c\") " pod="openshift-apiserver/apiserver-76f77b778f-sjzfb" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.634695 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7vbtb\" (UniqueName: \"kubernetes.io/projected/203c31e0-e1fc-4da7-8275-91295c22f307-kube-api-access-7vbtb\") pod \"console-operator-58897d9998-wmcfr\" (UID: \"203c31e0-e1fc-4da7-8275-91295c22f307\") " pod="openshift-console-operator/console-operator-58897d9998-wmcfr" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.634715 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c2b3de8e-9eaa-43d7-a061-674af5e035cb-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-t225g\" (UID: \"c2b3de8e-9eaa-43d7-a061-674af5e035cb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-t225g" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.634735 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wwj4x\" (UniqueName: \"kubernetes.io/projected/04ab3f66-5485-4b6f-9293-ed76b2695d50-kube-api-access-wwj4x\") pod \"catalog-operator-68c6474976-zd9qc\" (UID: \"04ab3f66-5485-4b6f-9293-ed76b2695d50\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zd9qc" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.634825 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f6c99edf-1afb-476e-b63e-ee73c372499c-audit-dir\") pod \"apiserver-76f77b778f-sjzfb\" (UID: \"f6c99edf-1afb-476e-b63e-ee73c372499c\") " pod="openshift-apiserver/apiserver-76f77b778f-sjzfb" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.634899 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/dd7b53bb-d740-497c-a36e-87e51d6f05a6-trusted-ca-bundle\") pod \"console-f9d7485db-lhmdh\" (UID: \"dd7b53bb-d740-497c-a36e-87e51d6f05a6\") " pod="openshift-console/console-f9d7485db-lhmdh" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.635106 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/dd7b53bb-d740-497c-a36e-87e51d6f05a6-service-ca\") pod \"console-f9d7485db-lhmdh\" (UID: \"dd7b53bb-d740-497c-a36e-87e51d6f05a6\") " pod="openshift-console/console-f9d7485db-lhmdh" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.635143 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c2b3de8e-9eaa-43d7-a061-674af5e035cb-client-ca\") pod \"controller-manager-879f6c89f-t225g\" (UID: \"c2b3de8e-9eaa-43d7-a061-674af5e035cb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-t225g" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.635420 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/4654626b-36d0-4072-a04c-d8ee0678fd50-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-nwjzd\" (UID: \"4654626b-36d0-4072-a04c-d8ee0678fd50\") " pod="openshift-authentication/oauth-openshift-558db77b4-nwjzd" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.636725 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-4w5mk"] Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.636733 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4654626b-36d0-4072-a04c-d8ee0678fd50-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-nwjzd\" (UID: \"4654626b-36d0-4072-a04c-d8ee0678fd50\") " pod="openshift-authentication/oauth-openshift-558db77b4-nwjzd" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.636952 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/f6c99edf-1afb-476e-b63e-ee73c372499c-etcd-client\") pod \"apiserver-76f77b778f-sjzfb\" (UID: \"f6c99edf-1afb-476e-b63e-ee73c372499c\") " pod="openshift-apiserver/apiserver-76f77b778f-sjzfb" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.637233 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/f6c99edf-1afb-476e-b63e-ee73c372499c-audit\") pod \"apiserver-76f77b778f-sjzfb\" (UID: \"f6c99edf-1afb-476e-b63e-ee73c372499c\") " pod="openshift-apiserver/apiserver-76f77b778f-sjzfb" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.637286 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/f6c99edf-1afb-476e-b63e-ee73c372499c-encryption-config\") pod \"apiserver-76f77b778f-sjzfb\" (UID: \"f6c99edf-1afb-476e-b63e-ee73c372499c\") " pod="openshift-apiserver/apiserver-76f77b778f-sjzfb" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.637286 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/4654626b-36d0-4072-a04c-d8ee0678fd50-audit-dir\") pod \"oauth-openshift-558db77b4-nwjzd\" (UID: \"4654626b-36d0-4072-a04c-d8ee0678fd50\") " pod="openshift-authentication/oauth-openshift-558db77b4-nwjzd" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.637554 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/dd7b53bb-d740-497c-a36e-87e51d6f05a6-oauth-serving-cert\") pod \"console-f9d7485db-lhmdh\" (UID: \"dd7b53bb-d740-497c-a36e-87e51d6f05a6\") " pod="openshift-console/console-f9d7485db-lhmdh" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.637579 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9e148c70-cb30-40cf-a333-65c0fdf0aa35-serving-cert\") pod \"authentication-operator-69f744f599-rh247\" (UID: \"9e148c70-cb30-40cf-a333-65c0fdf0aa35\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-rh247" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.637633 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9e148c70-cb30-40cf-a333-65c0fdf0aa35-service-ca-bundle\") pod \"authentication-operator-69f744f599-rh247\" (UID: \"9e148c70-cb30-40cf-a333-65c0fdf0aa35\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-rh247" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.638039 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9e148c70-cb30-40cf-a333-65c0fdf0aa35-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-rh247\" (UID: \"9e148c70-cb30-40cf-a333-65c0fdf0aa35\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-rh247" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.638085 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/4654626b-36d0-4072-a04c-d8ee0678fd50-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-nwjzd\" (UID: \"4654626b-36d0-4072-a04c-d8ee0678fd50\") " pod="openshift-authentication/oauth-openshift-558db77b4-nwjzd" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.645477 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d2311c1a-55c3-437c-ae71-881861de70ff-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-h7vz8\" (UID: \"d2311c1a-55c3-437c-ae71-881861de70ff\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-h7vz8" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.646156 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c2b3de8e-9eaa-43d7-a061-674af5e035cb-config\") pod \"controller-manager-879f6c89f-t225g\" (UID: \"c2b3de8e-9eaa-43d7-a061-674af5e035cb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-t225g" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.646319 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c2b3de8e-9eaa-43d7-a061-674af5e035cb-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-t225g\" (UID: \"c2b3de8e-9eaa-43d7-a061-674af5e035cb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-t225g" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.646577 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9e148c70-cb30-40cf-a333-65c0fdf0aa35-config\") pod \"authentication-operator-69f744f599-rh247\" (UID: \"9e148c70-cb30-40cf-a333-65c0fdf0aa35\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-rh247" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.646585 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/4654626b-36d0-4072-a04c-d8ee0678fd50-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-nwjzd\" (UID: \"4654626b-36d0-4072-a04c-d8ee0678fd50\") " pod="openshift-authentication/oauth-openshift-558db77b4-nwjzd" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.639944 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29482110-7p5pb"] Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.646706 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-7wfbg"] Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.646547 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f6c99edf-1afb-476e-b63e-ee73c372499c-config\") pod \"apiserver-76f77b778f-sjzfb\" (UID: \"f6c99edf-1afb-476e-b63e-ee73c372499c\") " pod="openshift-apiserver/apiserver-76f77b778f-sjzfb" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.646735 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/f6c99edf-1afb-476e-b63e-ee73c372499c-node-pullsecrets\") pod \"apiserver-76f77b778f-sjzfb\" (UID: \"f6c99edf-1afb-476e-b63e-ee73c372499c\") " pod="openshift-apiserver/apiserver-76f77b778f-sjzfb" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.646723 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/dd7b53bb-d740-497c-a36e-87e51d6f05a6-console-oauth-config\") pod \"console-f9d7485db-lhmdh\" (UID: \"dd7b53bb-d740-497c-a36e-87e51d6f05a6\") " pod="openshift-console/console-f9d7485db-lhmdh" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.647306 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/20de26f9-85a3-42a2-ba6b-0839c0657141-serving-cert\") pod \"openshift-config-operator-7777fb866f-ppqsn\" (UID: \"20de26f9-85a3-42a2-ba6b-0839c0657141\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-ppqsn" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.647401 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/dd7b53bb-d740-497c-a36e-87e51d6f05a6-console-config\") pod \"console-f9d7485db-lhmdh\" (UID: \"dd7b53bb-d740-497c-a36e-87e51d6f05a6\") " pod="openshift-console/console-f9d7485db-lhmdh" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.647423 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/1dd6e1c0-24f1-43fc-81d6-b0d0e3ed36c2-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-c97pw\" (UID: \"1dd6e1c0-24f1-43fc-81d6-b0d0e3ed36c2\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-c97pw" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.647592 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/4654626b-36d0-4072-a04c-d8ee0678fd50-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-nwjzd\" (UID: \"4654626b-36d0-4072-a04c-d8ee0678fd50\") " pod="openshift-authentication/oauth-openshift-558db77b4-nwjzd" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.648311 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/dd7b53bb-d740-497c-a36e-87e51d6f05a6-console-serving-cert\") pod \"console-f9d7485db-lhmdh\" (UID: \"dd7b53bb-d740-497c-a36e-87e51d6f05a6\") " pod="openshift-console/console-f9d7485db-lhmdh" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.648366 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-pltl7"] Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.649131 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/4654626b-36d0-4072-a04c-d8ee0678fd50-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-nwjzd\" (UID: \"4654626b-36d0-4072-a04c-d8ee0678fd50\") " pod="openshift-authentication/oauth-openshift-558db77b4-nwjzd" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.649329 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/350b4e8e-0147-4d0a-b9c0-1cdcdc2312f1-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-hsls5\" (UID: \"350b4e8e-0147-4d0a-b9c0-1cdcdc2312f1\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-hsls5" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.649488 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/4654626b-36d0-4072-a04c-d8ee0678fd50-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-nwjzd\" (UID: \"4654626b-36d0-4072-a04c-d8ee0678fd50\") " pod="openshift-authentication/oauth-openshift-558db77b4-nwjzd" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.649793 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/4654626b-36d0-4072-a04c-d8ee0678fd50-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-nwjzd\" (UID: \"4654626b-36d0-4072-a04c-d8ee0678fd50\") " pod="openshift-authentication/oauth-openshift-558db77b4-nwjzd" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.649815 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c2b3de8e-9eaa-43d7-a061-674af5e035cb-serving-cert\") pod \"controller-manager-879f6c89f-t225g\" (UID: \"c2b3de8e-9eaa-43d7-a061-674af5e035cb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-t225g" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.652786 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/4654626b-36d0-4072-a04c-d8ee0678fd50-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-nwjzd\" (UID: \"4654626b-36d0-4072-a04c-d8ee0678fd50\") " pod="openshift-authentication/oauth-openshift-558db77b4-nwjzd" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.652931 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/4654626b-36d0-4072-a04c-d8ee0678fd50-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-nwjzd\" (UID: \"4654626b-36d0-4072-a04c-d8ee0678fd50\") " pod="openshift-authentication/oauth-openshift-558db77b4-nwjzd" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.653081 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.653403 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f6c99edf-1afb-476e-b63e-ee73c372499c-serving-cert\") pod \"apiserver-76f77b778f-sjzfb\" (UID: \"f6c99edf-1afb-476e-b63e-ee73c372499c\") " pod="openshift-apiserver/apiserver-76f77b778f-sjzfb" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.653861 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zd9qc"] Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.654706 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-4t4rs"] Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.656289 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-2j5lz"] Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.657132 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-t4dxf"] Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.660795 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-9cqxr"] Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.662257 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-9cqxr" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.663080 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-gbvfn"] Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.664024 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-gbvfn" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.664235 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-9cqxr"] Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.665654 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-gbvfn"] Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.666877 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-cp5zh"] Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.667419 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-cp5zh" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.670643 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.690793 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.710809 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.731004 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.735551 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/203c31e0-e1fc-4da7-8275-91295c22f307-config\") pod \"console-operator-58897d9998-wmcfr\" (UID: \"203c31e0-e1fc-4da7-8275-91295c22f307\") " pod="openshift-console-operator/console-operator-58897d9998-wmcfr" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.735597 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/1d076664-8f86-4e46-a67a-85539915bb04-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-7flzc\" (UID: \"1d076664-8f86-4e46-a67a-85539915bb04\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-7flzc" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.735645 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b88a23ca-e1d6-4e41-9aee-6f9c2c572f42-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-8rqtk\" (UID: \"b88a23ca-e1d6-4e41-9aee-6f9c2c572f42\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8rqtk" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.735682 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/206f3850-b746-456c-9500-4d862f983a1e-images\") pod \"machine-config-operator-74547568cd-dqf5x\" (UID: \"206f3850-b746-456c-9500-4d862f983a1e\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-dqf5x" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.735707 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7vbtb\" (UniqueName: \"kubernetes.io/projected/203c31e0-e1fc-4da7-8275-91295c22f307-kube-api-access-7vbtb\") pod \"console-operator-58897d9998-wmcfr\" (UID: \"203c31e0-e1fc-4da7-8275-91295c22f307\") " pod="openshift-console-operator/console-operator-58897d9998-wmcfr" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.735726 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wwj4x\" (UniqueName: \"kubernetes.io/projected/04ab3f66-5485-4b6f-9293-ed76b2695d50-kube-api-access-wwj4x\") pod \"catalog-operator-68c6474976-zd9qc\" (UID: \"04ab3f66-5485-4b6f-9293-ed76b2695d50\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zd9qc" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.735745 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/04ab3f66-5485-4b6f-9293-ed76b2695d50-profile-collector-cert\") pod \"catalog-operator-68c6474976-zd9qc\" (UID: \"04ab3f66-5485-4b6f-9293-ed76b2695d50\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zd9qc" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.735774 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/a8761e3d-bfc2-481f-a29e-15f7566a5881-srv-cert\") pod \"olm-operator-6b444d44fb-p5lzn\" (UID: \"a8761e3d-bfc2-481f-a29e-15f7566a5881\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-p5lzn" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.735797 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/1d076664-8f86-4e46-a67a-85539915bb04-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-7flzc\" (UID: \"1d076664-8f86-4e46-a67a-85539915bb04\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-7flzc" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.735815 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ca06f669-4a81-4e6c-abc1-2800cd58c188-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-49k6d\" (UID: \"ca06f669-4a81-4e6c-abc1-2800cd58c188\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-49k6d" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.735837 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/31b93be8-46ba-485b-b5d3-639064825683-config\") pod \"service-ca-operator-777779d784-25jqm\" (UID: \"31b93be8-46ba-485b-b5d3-639064825683\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-25jqm" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.735853 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/04ab3f66-5485-4b6f-9293-ed76b2695d50-srv-cert\") pod \"catalog-operator-68c6474976-zd9qc\" (UID: \"04ab3f66-5485-4b6f-9293-ed76b2695d50\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zd9qc" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.735891 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/84f45a25-f77d-4c4c-88b6-5bdc4c286f10-default-certificate\") pod \"router-default-5444994796-vkkjt\" (UID: \"84f45a25-f77d-4c4c-88b6-5bdc4c286f10\") " pod="openshift-ingress/router-default-5444994796-vkkjt" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.735912 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d49afd2e-470e-4bd4-a30d-5014f423510b-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-8xc26\" (UID: \"d49afd2e-470e-4bd4-a30d-5014f423510b\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8xc26" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.735949 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/31b93be8-46ba-485b-b5d3-639064825683-serving-cert\") pod \"service-ca-operator-777779d784-25jqm\" (UID: \"31b93be8-46ba-485b-b5d3-639064825683\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-25jqm" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.735965 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/84f45a25-f77d-4c4c-88b6-5bdc4c286f10-stats-auth\") pod \"router-default-5444994796-vkkjt\" (UID: \"84f45a25-f77d-4c4c-88b6-5bdc4c286f10\") " pod="openshift-ingress/router-default-5444994796-vkkjt" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.735987 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-km9th\" (UniqueName: \"kubernetes.io/projected/7f63cf5d-23fe-4e8b-a9a7-3ce5c0c30aad-kube-api-access-km9th\") pod \"route-controller-manager-6576b87f9c-vmm7h\" (UID: \"7f63cf5d-23fe-4e8b-a9a7-3ce5c0c30aad\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-vmm7h" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.736002 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/84f45a25-f77d-4c4c-88b6-5bdc4c286f10-service-ca-bundle\") pod \"router-default-5444994796-vkkjt\" (UID: \"84f45a25-f77d-4c4c-88b6-5bdc4c286f10\") " pod="openshift-ingress/router-default-5444994796-vkkjt" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.736019 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/206f3850-b746-456c-9500-4d862f983a1e-proxy-tls\") pod \"machine-config-operator-74547568cd-dqf5x\" (UID: \"206f3850-b746-456c-9500-4d862f983a1e\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-dqf5x" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.736039 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/203c31e0-e1fc-4da7-8275-91295c22f307-trusted-ca\") pod \"console-operator-58897d9998-wmcfr\" (UID: \"203c31e0-e1fc-4da7-8275-91295c22f307\") " pod="openshift-console-operator/console-operator-58897d9998-wmcfr" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.736056 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7nm45\" (UniqueName: \"kubernetes.io/projected/206f3850-b746-456c-9500-4d862f983a1e-kube-api-access-7nm45\") pod \"machine-config-operator-74547568cd-dqf5x\" (UID: \"206f3850-b746-456c-9500-4d862f983a1e\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-dqf5x" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.736077 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t227k\" (UniqueName: \"kubernetes.io/projected/34b7c77f-6a7d-43de-9ee4-bdba78dc8248-kube-api-access-t227k\") pod \"multus-admission-controller-857f4d67dd-7wfbg\" (UID: \"34b7c77f-6a7d-43de-9ee4-bdba78dc8248\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-7wfbg" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.736111 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/203c31e0-e1fc-4da7-8275-91295c22f307-serving-cert\") pod \"console-operator-58897d9998-wmcfr\" (UID: \"203c31e0-e1fc-4da7-8275-91295c22f307\") " pod="openshift-console-operator/console-operator-58897d9998-wmcfr" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.736128 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7f63cf5d-23fe-4e8b-a9a7-3ce5c0c30aad-client-ca\") pod \"route-controller-manager-6576b87f9c-vmm7h\" (UID: \"7f63cf5d-23fe-4e8b-a9a7-3ce5c0c30aad\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-vmm7h" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.736144 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d49afd2e-470e-4bd4-a30d-5014f423510b-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-8xc26\" (UID: \"d49afd2e-470e-4bd4-a30d-5014f423510b\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8xc26" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.736159 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b88a23ca-e1d6-4e41-9aee-6f9c2c572f42-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-8rqtk\" (UID: \"b88a23ca-e1d6-4e41-9aee-6f9c2c572f42\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8rqtk" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.736199 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-24z8p\" (UniqueName: \"kubernetes.io/projected/d49afd2e-470e-4bd4-a30d-5014f423510b-kube-api-access-24z8p\") pod \"openshift-controller-manager-operator-756b6f6bc6-8xc26\" (UID: \"d49afd2e-470e-4bd4-a30d-5014f423510b\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8xc26" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.736218 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b88a23ca-e1d6-4e41-9aee-6f9c2c572f42-config\") pod \"kube-controller-manager-operator-78b949d7b-8rqtk\" (UID: \"b88a23ca-e1d6-4e41-9aee-6f9c2c572f42\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8rqtk" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.736239 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/1d076664-8f86-4e46-a67a-85539915bb04-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-7flzc\" (UID: \"1d076664-8f86-4e46-a67a-85539915bb04\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-7flzc" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.736258 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ca06f669-4a81-4e6c-abc1-2800cd58c188-config\") pod \"kube-apiserver-operator-766d6c64bb-49k6d\" (UID: \"ca06f669-4a81-4e6c-abc1-2800cd58c188\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-49k6d" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.736280 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7f63cf5d-23fe-4e8b-a9a7-3ce5c0c30aad-config\") pod \"route-controller-manager-6576b87f9c-vmm7h\" (UID: \"7f63cf5d-23fe-4e8b-a9a7-3ce5c0c30aad\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-vmm7h" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.736299 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pr9jf\" (UniqueName: \"kubernetes.io/projected/a8761e3d-bfc2-481f-a29e-15f7566a5881-kube-api-access-pr9jf\") pod \"olm-operator-6b444d44fb-p5lzn\" (UID: \"a8761e3d-bfc2-481f-a29e-15f7566a5881\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-p5lzn" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.736325 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v44kv\" (UniqueName: \"kubernetes.io/projected/84f45a25-f77d-4c4c-88b6-5bdc4c286f10-kube-api-access-v44kv\") pod \"router-default-5444994796-vkkjt\" (UID: \"84f45a25-f77d-4c4c-88b6-5bdc4c286f10\") " pod="openshift-ingress/router-default-5444994796-vkkjt" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.736354 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5kw5d\" (UniqueName: \"kubernetes.io/projected/31b93be8-46ba-485b-b5d3-639064825683-kube-api-access-5kw5d\") pod \"service-ca-operator-777779d784-25jqm\" (UID: \"31b93be8-46ba-485b-b5d3-639064825683\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-25jqm" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.736371 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ca06f669-4a81-4e6c-abc1-2800cd58c188-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-49k6d\" (UID: \"ca06f669-4a81-4e6c-abc1-2800cd58c188\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-49k6d" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.736389 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7f63cf5d-23fe-4e8b-a9a7-3ce5c0c30aad-serving-cert\") pod \"route-controller-manager-6576b87f9c-vmm7h\" (UID: \"7f63cf5d-23fe-4e8b-a9a7-3ce5c0c30aad\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-vmm7h" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.736398 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/203c31e0-e1fc-4da7-8275-91295c22f307-config\") pod \"console-operator-58897d9998-wmcfr\" (UID: \"203c31e0-e1fc-4da7-8275-91295c22f307\") " pod="openshift-console-operator/console-operator-58897d9998-wmcfr" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.736411 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j9dpp\" (UniqueName: \"kubernetes.io/projected/1d076664-8f86-4e46-a67a-85539915bb04-kube-api-access-j9dpp\") pod \"cluster-image-registry-operator-dc59b4c8b-7flzc\" (UID: \"1d076664-8f86-4e46-a67a-85539915bb04\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-7flzc" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.736430 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/a8761e3d-bfc2-481f-a29e-15f7566a5881-profile-collector-cert\") pod \"olm-operator-6b444d44fb-p5lzn\" (UID: \"a8761e3d-bfc2-481f-a29e-15f7566a5881\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-p5lzn" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.736451 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/84f45a25-f77d-4c4c-88b6-5bdc4c286f10-metrics-certs\") pod \"router-default-5444994796-vkkjt\" (UID: \"84f45a25-f77d-4c4c-88b6-5bdc4c286f10\") " pod="openshift-ingress/router-default-5444994796-vkkjt" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.736467 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/206f3850-b746-456c-9500-4d862f983a1e-auth-proxy-config\") pod \"machine-config-operator-74547568cd-dqf5x\" (UID: \"206f3850-b746-456c-9500-4d862f983a1e\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-dqf5x" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.736502 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/34b7c77f-6a7d-43de-9ee4-bdba78dc8248-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-7wfbg\" (UID: \"34b7c77f-6a7d-43de-9ee4-bdba78dc8248\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-7wfbg" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.736669 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/1d076664-8f86-4e46-a67a-85539915bb04-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-7flzc\" (UID: \"1d076664-8f86-4e46-a67a-85539915bb04\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-7flzc" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.737460 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/203c31e0-e1fc-4da7-8275-91295c22f307-trusted-ca\") pod \"console-operator-58897d9998-wmcfr\" (UID: \"203c31e0-e1fc-4da7-8275-91295c22f307\") " pod="openshift-console-operator/console-operator-58897d9998-wmcfr" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.737594 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/206f3850-b746-456c-9500-4d862f983a1e-auth-proxy-config\") pod \"machine-config-operator-74547568cd-dqf5x\" (UID: \"206f3850-b746-456c-9500-4d862f983a1e\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-dqf5x" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.737927 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7f63cf5d-23fe-4e8b-a9a7-3ce5c0c30aad-client-ca\") pod \"route-controller-manager-6576b87f9c-vmm7h\" (UID: \"7f63cf5d-23fe-4e8b-a9a7-3ce5c0c30aad\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-vmm7h" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.738015 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7f63cf5d-23fe-4e8b-a9a7-3ce5c0c30aad-config\") pod \"route-controller-manager-6576b87f9c-vmm7h\" (UID: \"7f63cf5d-23fe-4e8b-a9a7-3ce5c0c30aad\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-vmm7h" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.739886 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7f63cf5d-23fe-4e8b-a9a7-3ce5c0c30aad-serving-cert\") pod \"route-controller-manager-6576b87f9c-vmm7h\" (UID: \"7f63cf5d-23fe-4e8b-a9a7-3ce5c0c30aad\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-vmm7h" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.739919 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/1d076664-8f86-4e46-a67a-85539915bb04-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-7flzc\" (UID: \"1d076664-8f86-4e46-a67a-85539915bb04\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-7flzc" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.740076 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/203c31e0-e1fc-4da7-8275-91295c22f307-serving-cert\") pod \"console-operator-58897d9998-wmcfr\" (UID: \"203c31e0-e1fc-4da7-8275-91295c22f307\") " pod="openshift-console-operator/console-operator-58897d9998-wmcfr" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.750937 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.759679 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d49afd2e-470e-4bd4-a30d-5014f423510b-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-8xc26\" (UID: \"d49afd2e-470e-4bd4-a30d-5014f423510b\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8xc26" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.771199 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.792459 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.811499 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.817539 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d49afd2e-470e-4bd4-a30d-5014f423510b-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-8xc26\" (UID: \"d49afd2e-470e-4bd4-a30d-5014f423510b\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8xc26" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.831567 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.851122 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.870673 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.891648 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.910908 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.931294 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.951065 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.970810 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Jan 20 16:44:00 crc kubenswrapper[4558]: I0120 16:44:00.990445 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Jan 20 16:44:01 crc kubenswrapper[4558]: I0120 16:44:01.011037 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Jan 20 16:44:01 crc kubenswrapper[4558]: I0120 16:44:01.031406 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Jan 20 16:44:01 crc kubenswrapper[4558]: I0120 16:44:01.050559 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Jan 20 16:44:01 crc kubenswrapper[4558]: I0120 16:44:01.057772 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b88a23ca-e1d6-4e41-9aee-6f9c2c572f42-config\") pod \"kube-controller-manager-operator-78b949d7b-8rqtk\" (UID: \"b88a23ca-e1d6-4e41-9aee-6f9c2c572f42\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8rqtk" Jan 20 16:44:01 crc kubenswrapper[4558]: I0120 16:44:01.071985 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Jan 20 16:44:01 crc kubenswrapper[4558]: I0120 16:44:01.091488 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Jan 20 16:44:01 crc kubenswrapper[4558]: I0120 16:44:01.099414 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b88a23ca-e1d6-4e41-9aee-6f9c2c572f42-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-8rqtk\" (UID: \"b88a23ca-e1d6-4e41-9aee-6f9c2c572f42\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8rqtk" Jan 20 16:44:01 crc kubenswrapper[4558]: I0120 16:44:01.112340 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Jan 20 16:44:01 crc kubenswrapper[4558]: I0120 16:44:01.131653 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Jan 20 16:44:01 crc kubenswrapper[4558]: I0120 16:44:01.137504 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/206f3850-b746-456c-9500-4d862f983a1e-images\") pod \"machine-config-operator-74547568cd-dqf5x\" (UID: \"206f3850-b746-456c-9500-4d862f983a1e\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-dqf5x" Jan 20 16:44:01 crc kubenswrapper[4558]: I0120 16:44:01.152648 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Jan 20 16:44:01 crc kubenswrapper[4558]: I0120 16:44:01.172266 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Jan 20 16:44:01 crc kubenswrapper[4558]: I0120 16:44:01.179135 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/206f3850-b746-456c-9500-4d862f983a1e-proxy-tls\") pod \"machine-config-operator-74547568cd-dqf5x\" (UID: \"206f3850-b746-456c-9500-4d862f983a1e\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-dqf5x" Jan 20 16:44:01 crc kubenswrapper[4558]: I0120 16:44:01.191125 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Jan 20 16:44:01 crc kubenswrapper[4558]: I0120 16:44:01.211733 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Jan 20 16:44:01 crc kubenswrapper[4558]: I0120 16:44:01.231204 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Jan 20 16:44:01 crc kubenswrapper[4558]: I0120 16:44:01.239311 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ca06f669-4a81-4e6c-abc1-2800cd58c188-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-49k6d\" (UID: \"ca06f669-4a81-4e6c-abc1-2800cd58c188\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-49k6d" Jan 20 16:44:01 crc kubenswrapper[4558]: I0120 16:44:01.251006 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Jan 20 16:44:01 crc kubenswrapper[4558]: I0120 16:44:01.258331 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ca06f669-4a81-4e6c-abc1-2800cd58c188-config\") pod \"kube-apiserver-operator-766d6c64bb-49k6d\" (UID: \"ca06f669-4a81-4e6c-abc1-2800cd58c188\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-49k6d" Jan 20 16:44:01 crc kubenswrapper[4558]: I0120 16:44:01.272559 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Jan 20 16:44:01 crc kubenswrapper[4558]: I0120 16:44:01.279440 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/31b93be8-46ba-485b-b5d3-639064825683-serving-cert\") pod \"service-ca-operator-777779d784-25jqm\" (UID: \"31b93be8-46ba-485b-b5d3-639064825683\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-25jqm" Jan 20 16:44:01 crc kubenswrapper[4558]: I0120 16:44:01.291421 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Jan 20 16:44:01 crc kubenswrapper[4558]: I0120 16:44:01.311631 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Jan 20 16:44:01 crc kubenswrapper[4558]: I0120 16:44:01.331846 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Jan 20 16:44:01 crc kubenswrapper[4558]: I0120 16:44:01.351552 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Jan 20 16:44:01 crc kubenswrapper[4558]: I0120 16:44:01.357553 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/31b93be8-46ba-485b-b5d3-639064825683-config\") pod \"service-ca-operator-777779d784-25jqm\" (UID: \"31b93be8-46ba-485b-b5d3-639064825683\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-25jqm" Jan 20 16:44:01 crc kubenswrapper[4558]: I0120 16:44:01.371074 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Jan 20 16:44:01 crc kubenswrapper[4558]: I0120 16:44:01.391772 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Jan 20 16:44:01 crc kubenswrapper[4558]: I0120 16:44:01.411697 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Jan 20 16:44:01 crc kubenswrapper[4558]: I0120 16:44:01.420284 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/84f45a25-f77d-4c4c-88b6-5bdc4c286f10-default-certificate\") pod \"router-default-5444994796-vkkjt\" (UID: \"84f45a25-f77d-4c4c-88b6-5bdc4c286f10\") " pod="openshift-ingress/router-default-5444994796-vkkjt" Jan 20 16:44:01 crc kubenswrapper[4558]: I0120 16:44:01.432043 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Jan 20 16:44:01 crc kubenswrapper[4558]: I0120 16:44:01.439535 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/84f45a25-f77d-4c4c-88b6-5bdc4c286f10-stats-auth\") pod \"router-default-5444994796-vkkjt\" (UID: \"84f45a25-f77d-4c4c-88b6-5bdc4c286f10\") " pod="openshift-ingress/router-default-5444994796-vkkjt" Jan 20 16:44:01 crc kubenswrapper[4558]: I0120 16:44:01.450893 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Jan 20 16:44:01 crc kubenswrapper[4558]: I0120 16:44:01.460661 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/84f45a25-f77d-4c4c-88b6-5bdc4c286f10-metrics-certs\") pod \"router-default-5444994796-vkkjt\" (UID: \"84f45a25-f77d-4c4c-88b6-5bdc4c286f10\") " pod="openshift-ingress/router-default-5444994796-vkkjt" Jan 20 16:44:01 crc kubenswrapper[4558]: I0120 16:44:01.470958 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Jan 20 16:44:01 crc kubenswrapper[4558]: I0120 16:44:01.478268 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/84f45a25-f77d-4c4c-88b6-5bdc4c286f10-service-ca-bundle\") pod \"router-default-5444994796-vkkjt\" (UID: \"84f45a25-f77d-4c4c-88b6-5bdc4c286f10\") " pod="openshift-ingress/router-default-5444994796-vkkjt" Jan 20 16:44:01 crc kubenswrapper[4558]: I0120 16:44:01.491371 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Jan 20 16:44:01 crc kubenswrapper[4558]: I0120 16:44:01.531234 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Jan 20 16:44:01 crc kubenswrapper[4558]: I0120 16:44:01.552541 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Jan 20 16:44:01 crc kubenswrapper[4558]: I0120 16:44:01.571422 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Jan 20 16:44:01 crc kubenswrapper[4558]: I0120 16:44:01.589842 4558 request.go:700] Waited for 1.016928234s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-dns-operator/configmaps?fieldSelector=metadata.name%3Dkube-root-ca.crt&limit=500&resourceVersion=0 Jan 20 16:44:01 crc kubenswrapper[4558]: I0120 16:44:01.591199 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Jan 20 16:44:01 crc kubenswrapper[4558]: I0120 16:44:01.611781 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Jan 20 16:44:01 crc kubenswrapper[4558]: I0120 16:44:01.631811 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Jan 20 16:44:01 crc kubenswrapper[4558]: I0120 16:44:01.650937 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Jan 20 16:44:01 crc kubenswrapper[4558]: I0120 16:44:01.671963 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Jan 20 16:44:01 crc kubenswrapper[4558]: I0120 16:44:01.691490 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Jan 20 16:44:01 crc kubenswrapper[4558]: I0120 16:44:01.712673 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Jan 20 16:44:01 crc kubenswrapper[4558]: I0120 16:44:01.720842 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/34b7c77f-6a7d-43de-9ee4-bdba78dc8248-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-7wfbg\" (UID: \"34b7c77f-6a7d-43de-9ee4-bdba78dc8248\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-7wfbg" Jan 20 16:44:01 crc kubenswrapper[4558]: I0120 16:44:01.731529 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Jan 20 16:44:01 crc kubenswrapper[4558]: E0120 16:44:01.736945 4558 secret.go:188] Couldn't get secret openshift-operator-lifecycle-manager/catalog-operator-serving-cert: failed to sync secret cache: timed out waiting for the condition Jan 20 16:44:01 crc kubenswrapper[4558]: E0120 16:44:01.736983 4558 secret.go:188] Couldn't get secret openshift-operator-lifecycle-manager/pprof-cert: failed to sync secret cache: timed out waiting for the condition Jan 20 16:44:01 crc kubenswrapper[4558]: E0120 16:44:01.736998 4558 secret.go:188] Couldn't get secret openshift-operator-lifecycle-manager/olm-operator-serving-cert: failed to sync secret cache: timed out waiting for the condition Jan 20 16:44:01 crc kubenswrapper[4558]: E0120 16:44:01.737026 4558 secret.go:188] Couldn't get secret openshift-operator-lifecycle-manager/pprof-cert: failed to sync secret cache: timed out waiting for the condition Jan 20 16:44:01 crc kubenswrapper[4558]: E0120 16:44:01.737000 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/04ab3f66-5485-4b6f-9293-ed76b2695d50-srv-cert podName:04ab3f66-5485-4b6f-9293-ed76b2695d50 nodeName:}" failed. No retries permitted until 2026-01-20 16:44:02.236982446 +0000 UTC m=+135.997320413 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "srv-cert" (UniqueName: "kubernetes.io/secret/04ab3f66-5485-4b6f-9293-ed76b2695d50-srv-cert") pod "catalog-operator-68c6474976-zd9qc" (UID: "04ab3f66-5485-4b6f-9293-ed76b2695d50") : failed to sync secret cache: timed out waiting for the condition Jan 20 16:44:01 crc kubenswrapper[4558]: E0120 16:44:01.737074 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/04ab3f66-5485-4b6f-9293-ed76b2695d50-profile-collector-cert podName:04ab3f66-5485-4b6f-9293-ed76b2695d50 nodeName:}" failed. No retries permitted until 2026-01-20 16:44:02.237055193 +0000 UTC m=+135.997393170 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "profile-collector-cert" (UniqueName: "kubernetes.io/secret/04ab3f66-5485-4b6f-9293-ed76b2695d50-profile-collector-cert") pod "catalog-operator-68c6474976-zd9qc" (UID: "04ab3f66-5485-4b6f-9293-ed76b2695d50") : failed to sync secret cache: timed out waiting for the condition Jan 20 16:44:01 crc kubenswrapper[4558]: E0120 16:44:01.737094 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a8761e3d-bfc2-481f-a29e-15f7566a5881-srv-cert podName:a8761e3d-bfc2-481f-a29e-15f7566a5881 nodeName:}" failed. No retries permitted until 2026-01-20 16:44:02.237084138 +0000 UTC m=+135.997422115 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "srv-cert" (UniqueName: "kubernetes.io/secret/a8761e3d-bfc2-481f-a29e-15f7566a5881-srv-cert") pod "olm-operator-6b444d44fb-p5lzn" (UID: "a8761e3d-bfc2-481f-a29e-15f7566a5881") : failed to sync secret cache: timed out waiting for the condition Jan 20 16:44:01 crc kubenswrapper[4558]: E0120 16:44:01.737110 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a8761e3d-bfc2-481f-a29e-15f7566a5881-profile-collector-cert podName:a8761e3d-bfc2-481f-a29e-15f7566a5881 nodeName:}" failed. No retries permitted until 2026-01-20 16:44:02.237103575 +0000 UTC m=+135.997441552 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "profile-collector-cert" (UniqueName: "kubernetes.io/secret/a8761e3d-bfc2-481f-a29e-15f7566a5881-profile-collector-cert") pod "olm-operator-6b444d44fb-p5lzn" (UID: "a8761e3d-bfc2-481f-a29e-15f7566a5881") : failed to sync secret cache: timed out waiting for the condition Jan 20 16:44:01 crc kubenswrapper[4558]: I0120 16:44:01.751448 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Jan 20 16:44:01 crc kubenswrapper[4558]: I0120 16:44:01.770852 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Jan 20 16:44:01 crc kubenswrapper[4558]: I0120 16:44:01.790887 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Jan 20 16:44:01 crc kubenswrapper[4558]: I0120 16:44:01.810666 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Jan 20 16:44:01 crc kubenswrapper[4558]: I0120 16:44:01.831037 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Jan 20 16:44:01 crc kubenswrapper[4558]: I0120 16:44:01.851196 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Jan 20 16:44:01 crc kubenswrapper[4558]: I0120 16:44:01.871537 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Jan 20 16:44:01 crc kubenswrapper[4558]: I0120 16:44:01.890576 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Jan 20 16:44:01 crc kubenswrapper[4558]: I0120 16:44:01.910717 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Jan 20 16:44:01 crc kubenswrapper[4558]: I0120 16:44:01.950922 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Jan 20 16:44:01 crc kubenswrapper[4558]: I0120 16:44:01.971012 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Jan 20 16:44:01 crc kubenswrapper[4558]: I0120 16:44:01.991507 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Jan 20 16:44:02 crc kubenswrapper[4558]: I0120 16:44:02.010858 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Jan 20 16:44:02 crc kubenswrapper[4558]: I0120 16:44:02.032422 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Jan 20 16:44:02 crc kubenswrapper[4558]: I0120 16:44:02.051192 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Jan 20 16:44:02 crc kubenswrapper[4558]: I0120 16:44:02.070831 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Jan 20 16:44:02 crc kubenswrapper[4558]: I0120 16:44:02.091408 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Jan 20 16:44:02 crc kubenswrapper[4558]: I0120 16:44:02.111350 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 20 16:44:02 crc kubenswrapper[4558]: I0120 16:44:02.132404 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 20 16:44:02 crc kubenswrapper[4558]: I0120 16:44:02.150768 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Jan 20 16:44:02 crc kubenswrapper[4558]: I0120 16:44:02.177095 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Jan 20 16:44:02 crc kubenswrapper[4558]: I0120 16:44:02.191546 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Jan 20 16:44:02 crc kubenswrapper[4558]: I0120 16:44:02.211525 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Jan 20 16:44:02 crc kubenswrapper[4558]: I0120 16:44:02.230852 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Jan 20 16:44:02 crc kubenswrapper[4558]: I0120 16:44:02.251254 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Jan 20 16:44:02 crc kubenswrapper[4558]: I0120 16:44:02.257458 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/a8761e3d-bfc2-481f-a29e-15f7566a5881-profile-collector-cert\") pod \"olm-operator-6b444d44fb-p5lzn\" (UID: \"a8761e3d-bfc2-481f-a29e-15f7566a5881\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-p5lzn" Jan 20 16:44:02 crc kubenswrapper[4558]: I0120 16:44:02.257552 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/04ab3f66-5485-4b6f-9293-ed76b2695d50-profile-collector-cert\") pod \"catalog-operator-68c6474976-zd9qc\" (UID: \"04ab3f66-5485-4b6f-9293-ed76b2695d50\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zd9qc" Jan 20 16:44:02 crc kubenswrapper[4558]: I0120 16:44:02.257575 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/a8761e3d-bfc2-481f-a29e-15f7566a5881-srv-cert\") pod \"olm-operator-6b444d44fb-p5lzn\" (UID: \"a8761e3d-bfc2-481f-a29e-15f7566a5881\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-p5lzn" Jan 20 16:44:02 crc kubenswrapper[4558]: I0120 16:44:02.257600 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/04ab3f66-5485-4b6f-9293-ed76b2695d50-srv-cert\") pod \"catalog-operator-68c6474976-zd9qc\" (UID: \"04ab3f66-5485-4b6f-9293-ed76b2695d50\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zd9qc" Jan 20 16:44:02 crc kubenswrapper[4558]: I0120 16:44:02.260710 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/a8761e3d-bfc2-481f-a29e-15f7566a5881-srv-cert\") pod \"olm-operator-6b444d44fb-p5lzn\" (UID: \"a8761e3d-bfc2-481f-a29e-15f7566a5881\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-p5lzn" Jan 20 16:44:02 crc kubenswrapper[4558]: I0120 16:44:02.261474 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/04ab3f66-5485-4b6f-9293-ed76b2695d50-srv-cert\") pod \"catalog-operator-68c6474976-zd9qc\" (UID: \"04ab3f66-5485-4b6f-9293-ed76b2695d50\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zd9qc" Jan 20 16:44:02 crc kubenswrapper[4558]: I0120 16:44:02.261691 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/04ab3f66-5485-4b6f-9293-ed76b2695d50-profile-collector-cert\") pod \"catalog-operator-68c6474976-zd9qc\" (UID: \"04ab3f66-5485-4b6f-9293-ed76b2695d50\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zd9qc" Jan 20 16:44:02 crc kubenswrapper[4558]: I0120 16:44:02.261709 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/a8761e3d-bfc2-481f-a29e-15f7566a5881-profile-collector-cert\") pod \"olm-operator-6b444d44fb-p5lzn\" (UID: \"a8761e3d-bfc2-481f-a29e-15f7566a5881\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-p5lzn" Jan 20 16:44:02 crc kubenswrapper[4558]: I0120 16:44:02.272839 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Jan 20 16:44:02 crc kubenswrapper[4558]: I0120 16:44:02.291486 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Jan 20 16:44:02 crc kubenswrapper[4558]: I0120 16:44:02.311745 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Jan 20 16:44:02 crc kubenswrapper[4558]: I0120 16:44:02.346894 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xwdlg\" (UniqueName: \"kubernetes.io/projected/350b4e8e-0147-4d0a-b9c0-1cdcdc2312f1-kube-api-access-xwdlg\") pod \"machine-api-operator-5694c8668f-hsls5\" (UID: \"350b4e8e-0147-4d0a-b9c0-1cdcdc2312f1\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-hsls5" Jan 20 16:44:02 crc kubenswrapper[4558]: I0120 16:44:02.364424 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fxvmt\" (UniqueName: \"kubernetes.io/projected/20de26f9-85a3-42a2-ba6b-0839c0657141-kube-api-access-fxvmt\") pod \"openshift-config-operator-7777fb866f-ppqsn\" (UID: \"20de26f9-85a3-42a2-ba6b-0839c0657141\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-ppqsn" Jan 20 16:44:02 crc kubenswrapper[4558]: I0120 16:44:02.383441 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8c24d\" (UniqueName: \"kubernetes.io/projected/4654626b-36d0-4072-a04c-d8ee0678fd50-kube-api-access-8c24d\") pod \"oauth-openshift-558db77b4-nwjzd\" (UID: \"4654626b-36d0-4072-a04c-d8ee0678fd50\") " pod="openshift-authentication/oauth-openshift-558db77b4-nwjzd" Jan 20 16:44:02 crc kubenswrapper[4558]: I0120 16:44:02.398522 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-ppqsn" Jan 20 16:44:02 crc kubenswrapper[4558]: I0120 16:44:02.405973 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n4q45\" (UniqueName: \"kubernetes.io/projected/9e148c70-cb30-40cf-a333-65c0fdf0aa35-kube-api-access-n4q45\") pod \"authentication-operator-69f744f599-rh247\" (UID: \"9e148c70-cb30-40cf-a333-65c0fdf0aa35\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-rh247" Jan 20 16:44:02 crc kubenswrapper[4558]: I0120 16:44:02.424426 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9w8mv\" (UniqueName: \"kubernetes.io/projected/f6c99edf-1afb-476e-b63e-ee73c372499c-kube-api-access-9w8mv\") pod \"apiserver-76f77b778f-sjzfb\" (UID: \"f6c99edf-1afb-476e-b63e-ee73c372499c\") " pod="openshift-apiserver/apiserver-76f77b778f-sjzfb" Jan 20 16:44:02 crc kubenswrapper[4558]: I0120 16:44:02.444420 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m9sms\" (UniqueName: \"kubernetes.io/projected/d2311c1a-55c3-437c-ae71-881861de70ff-kube-api-access-m9sms\") pod \"openshift-apiserver-operator-796bbdcf4f-h7vz8\" (UID: \"d2311c1a-55c3-437c-ae71-881861de70ff\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-h7vz8" Jan 20 16:44:02 crc kubenswrapper[4558]: I0120 16:44:02.463273 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dqz5k\" (UniqueName: \"kubernetes.io/projected/1dd6e1c0-24f1-43fc-81d6-b0d0e3ed36c2-kube-api-access-dqz5k\") pod \"cluster-samples-operator-665b6dd947-c97pw\" (UID: \"1dd6e1c0-24f1-43fc-81d6-b0d0e3ed36c2\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-c97pw" Jan 20 16:44:02 crc kubenswrapper[4558]: I0120 16:44:02.484787 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nfwxp\" (UniqueName: \"kubernetes.io/projected/c2b3de8e-9eaa-43d7-a061-674af5e035cb-kube-api-access-nfwxp\") pod \"controller-manager-879f6c89f-t225g\" (UID: \"c2b3de8e-9eaa-43d7-a061-674af5e035cb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-t225g" Jan 20 16:44:02 crc kubenswrapper[4558]: I0120 16:44:02.503993 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n6m5z\" (UniqueName: \"kubernetes.io/projected/dd7b53bb-d740-497c-a36e-87e51d6f05a6-kube-api-access-n6m5z\") pod \"console-f9d7485db-lhmdh\" (UID: \"dd7b53bb-d740-497c-a36e-87e51d6f05a6\") " pod="openshift-console/console-f9d7485db-lhmdh" Jan 20 16:44:02 crc kubenswrapper[4558]: I0120 16:44:02.511209 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Jan 20 16:44:02 crc kubenswrapper[4558]: I0120 16:44:02.531308 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Jan 20 16:44:02 crc kubenswrapper[4558]: I0120 16:44:02.557190 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-ppqsn"] Jan 20 16:44:02 crc kubenswrapper[4558]: I0120 16:44:02.558592 4558 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Jan 20 16:44:02 crc kubenswrapper[4558]: I0120 16:44:02.562520 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-hsls5" Jan 20 16:44:02 crc kubenswrapper[4558]: W0120 16:44:02.563275 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod20de26f9_85a3_42a2_ba6b_0839c0657141.slice/crio-931962c7120b218bfaa27f657032cfeeb85a0f38cc5cfb8962db747cd93f4d5f WatchSource:0}: Error finding container 931962c7120b218bfaa27f657032cfeeb85a0f38cc5cfb8962db747cd93f4d5f: Status 404 returned error can't find the container with id 931962c7120b218bfaa27f657032cfeeb85a0f38cc5cfb8962db747cd93f4d5f Jan 20 16:44:02 crc kubenswrapper[4558]: I0120 16:44:02.570942 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Jan 20 16:44:02 crc kubenswrapper[4558]: I0120 16:44:02.574713 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-sjzfb" Jan 20 16:44:02 crc kubenswrapper[4558]: I0120 16:44:02.591291 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-t225g" Jan 20 16:44:02 crc kubenswrapper[4558]: I0120 16:44:02.591786 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Jan 20 16:44:02 crc kubenswrapper[4558]: I0120 16:44:02.602516 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-rh247" Jan 20 16:44:02 crc kubenswrapper[4558]: I0120 16:44:02.609674 4558 request.go:700] Waited for 1.945372911s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-dns/secrets?fieldSelector=metadata.name%3Ddns-default-metrics-tls&limit=500&resourceVersion=0 Jan 20 16:44:02 crc kubenswrapper[4558]: I0120 16:44:02.612231 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Jan 20 16:44:02 crc kubenswrapper[4558]: I0120 16:44:02.630897 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-c97pw" Jan 20 16:44:02 crc kubenswrapper[4558]: I0120 16:44:02.634262 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Jan 20 16:44:02 crc kubenswrapper[4558]: I0120 16:44:02.653370 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Jan 20 16:44:02 crc kubenswrapper[4558]: I0120 16:44:02.671385 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-nwjzd" Jan 20 16:44:02 crc kubenswrapper[4558]: I0120 16:44:02.672693 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-h7vz8" Jan 20 16:44:02 crc kubenswrapper[4558]: I0120 16:44:02.672876 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Jan 20 16:44:02 crc kubenswrapper[4558]: I0120 16:44:02.689630 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-lhmdh" Jan 20 16:44:02 crc kubenswrapper[4558]: I0120 16:44:02.711311 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-hsls5"] Jan 20 16:44:02 crc kubenswrapper[4558]: I0120 16:44:02.713694 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7vbtb\" (UniqueName: \"kubernetes.io/projected/203c31e0-e1fc-4da7-8275-91295c22f307-kube-api-access-7vbtb\") pod \"console-operator-58897d9998-wmcfr\" (UID: \"203c31e0-e1fc-4da7-8275-91295c22f307\") " pod="openshift-console-operator/console-operator-58897d9998-wmcfr" Jan 20 16:44:02 crc kubenswrapper[4558]: I0120 16:44:02.724938 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wwj4x\" (UniqueName: \"kubernetes.io/projected/04ab3f66-5485-4b6f-9293-ed76b2695d50-kube-api-access-wwj4x\") pod \"catalog-operator-68c6474976-zd9qc\" (UID: \"04ab3f66-5485-4b6f-9293-ed76b2695d50\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zd9qc" Jan 20 16:44:02 crc kubenswrapper[4558]: I0120 16:44:02.732799 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-wmcfr" Jan 20 16:44:02 crc kubenswrapper[4558]: I0120 16:44:02.750192 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/1d076664-8f86-4e46-a67a-85539915bb04-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-7flzc\" (UID: \"1d076664-8f86-4e46-a67a-85539915bb04\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-7flzc" Jan 20 16:44:02 crc kubenswrapper[4558]: I0120 16:44:02.750454 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-sjzfb"] Jan 20 16:44:02 crc kubenswrapper[4558]: I0120 16:44:02.766059 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" Jan 20 16:44:02 crc kubenswrapper[4558]: I0120 16:44:02.766414 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-km9th\" (UniqueName: \"kubernetes.io/projected/7f63cf5d-23fe-4e8b-a9a7-3ce5c0c30aad-kube-api-access-km9th\") pod \"route-controller-manager-6576b87f9c-vmm7h\" (UID: \"7f63cf5d-23fe-4e8b-a9a7-3ce5c0c30aad\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-vmm7h" Jan 20 16:44:02 crc kubenswrapper[4558]: I0120 16:44:02.784565 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b88a23ca-e1d6-4e41-9aee-6f9c2c572f42-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-8rqtk\" (UID: \"b88a23ca-e1d6-4e41-9aee-6f9c2c572f42\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8rqtk" Jan 20 16:44:02 crc kubenswrapper[4558]: I0120 16:44:02.808772 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-24z8p\" (UniqueName: \"kubernetes.io/projected/d49afd2e-470e-4bd4-a30d-5014f423510b-kube-api-access-24z8p\") pod \"openshift-controller-manager-operator-756b6f6bc6-8xc26\" (UID: \"d49afd2e-470e-4bd4-a30d-5014f423510b\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8xc26" Jan 20 16:44:02 crc kubenswrapper[4558]: I0120 16:44:02.812389 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8rqtk" Jan 20 16:44:02 crc kubenswrapper[4558]: I0120 16:44:02.828217 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ca06f669-4a81-4e6c-abc1-2800cd58c188-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-49k6d\" (UID: \"ca06f669-4a81-4e6c-abc1-2800cd58c188\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-49k6d" Jan 20 16:44:02 crc kubenswrapper[4558]: I0120 16:44:02.835270 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-c97pw"] Jan 20 16:44:02 crc kubenswrapper[4558]: I0120 16:44:02.845113 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5kw5d\" (UniqueName: \"kubernetes.io/projected/31b93be8-46ba-485b-b5d3-639064825683-kube-api-access-5kw5d\") pod \"service-ca-operator-777779d784-25jqm\" (UID: \"31b93be8-46ba-485b-b5d3-639064825683\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-25jqm" Jan 20 16:44:02 crc kubenswrapper[4558]: I0120 16:44:02.865395 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pr9jf\" (UniqueName: \"kubernetes.io/projected/a8761e3d-bfc2-481f-a29e-15f7566a5881-kube-api-access-pr9jf\") pod \"olm-operator-6b444d44fb-p5lzn\" (UID: \"a8761e3d-bfc2-481f-a29e-15f7566a5881\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-p5lzn" Jan 20 16:44:02 crc kubenswrapper[4558]: I0120 16:44:02.885115 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7nm45\" (UniqueName: \"kubernetes.io/projected/206f3850-b746-456c-9500-4d862f983a1e-kube-api-access-7nm45\") pod \"machine-config-operator-74547568cd-dqf5x\" (UID: \"206f3850-b746-456c-9500-4d862f983a1e\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-dqf5x" Jan 20 16:44:02 crc kubenswrapper[4558]: I0120 16:44:02.888115 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-p5lzn" Jan 20 16:44:02 crc kubenswrapper[4558]: I0120 16:44:02.904932 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v44kv\" (UniqueName: \"kubernetes.io/projected/84f45a25-f77d-4c4c-88b6-5bdc4c286f10-kube-api-access-v44kv\") pod \"router-default-5444994796-vkkjt\" (UID: \"84f45a25-f77d-4c4c-88b6-5bdc4c286f10\") " pod="openshift-ingress/router-default-5444994796-vkkjt" Jan 20 16:44:02 crc kubenswrapper[4558]: I0120 16:44:02.905977 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zd9qc" Jan 20 16:44:02 crc kubenswrapper[4558]: I0120 16:44:02.907866 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-h7vz8"] Jan 20 16:44:02 crc kubenswrapper[4558]: I0120 16:44:02.924014 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t227k\" (UniqueName: \"kubernetes.io/projected/34b7c77f-6a7d-43de-9ee4-bdba78dc8248-kube-api-access-t227k\") pod \"multus-admission-controller-857f4d67dd-7wfbg\" (UID: \"34b7c77f-6a7d-43de-9ee4-bdba78dc8248\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-7wfbg" Jan 20 16:44:02 crc kubenswrapper[4558]: W0120 16:44:02.935022 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd2311c1a_55c3_437c_ae71_881861de70ff.slice/crio-53700759595c3c0dfb1a8b07a4b8e98bf299c5b950927368839c1a596be879e4 WatchSource:0}: Error finding container 53700759595c3c0dfb1a8b07a4b8e98bf299c5b950927368839c1a596be879e4: Status 404 returned error can't find the container with id 53700759595c3c0dfb1a8b07a4b8e98bf299c5b950927368839c1a596be879e4 Jan 20 16:44:02 crc kubenswrapper[4558]: I0120 16:44:02.943713 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-wmcfr"] Jan 20 16:44:02 crc kubenswrapper[4558]: I0120 16:44:02.946985 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j9dpp\" (UniqueName: \"kubernetes.io/projected/1d076664-8f86-4e46-a67a-85539915bb04-kube-api-access-j9dpp\") pod \"cluster-image-registry-operator-dc59b4c8b-7flzc\" (UID: \"1d076664-8f86-4e46-a67a-85539915bb04\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-7flzc" Jan 20 16:44:02 crc kubenswrapper[4558]: I0120 16:44:02.987605 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-t225g"] Jan 20 16:44:02 crc kubenswrapper[4558]: I0120 16:44:02.988289 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-rh247"] Jan 20 16:44:02 crc kubenswrapper[4558]: I0120 16:44:02.997829 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-h7vz8" event={"ID":"d2311c1a-55c3-437c-ae71-881861de70ff","Type":"ContainerStarted","Data":"53700759595c3c0dfb1a8b07a4b8e98bf299c5b950927368839c1a596be879e4"} Jan 20 16:44:02 crc kubenswrapper[4558]: W0120 16:44:02.998693 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc2b3de8e_9eaa_43d7_a061_674af5e035cb.slice/crio-7f0e80865945a529cda1ad18f57e0cab543a8ad4eed6a7d6d2ab9621ef965ce0 WatchSource:0}: Error finding container 7f0e80865945a529cda1ad18f57e0cab543a8ad4eed6a7d6d2ab9621ef965ce0: Status 404 returned error can't find the container with id 7f0e80865945a529cda1ad18f57e0cab543a8ad4eed6a7d6d2ab9621ef965ce0 Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:02.999837 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-c97pw" event={"ID":"1dd6e1c0-24f1-43fc-81d6-b0d0e3ed36c2","Type":"ContainerStarted","Data":"a9c5bcccd26c784cefd443c996d2fd4205799b108f5659605412c182baa85412"} Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.003254 4558 generic.go:334] "Generic (PLEG): container finished" podID="20de26f9-85a3-42a2-ba6b-0839c0657141" containerID="4d0aebf7f2334a1d27848467af5e55b98f157f0485499c170b2dcd08979fc754" exitCode=0 Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.003344 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-ppqsn" event={"ID":"20de26f9-85a3-42a2-ba6b-0839c0657141","Type":"ContainerDied","Data":"4d0aebf7f2334a1d27848467af5e55b98f157f0485499c170b2dcd08979fc754"} Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.003379 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-ppqsn" event={"ID":"20de26f9-85a3-42a2-ba6b-0839c0657141","Type":"ContainerStarted","Data":"931962c7120b218bfaa27f657032cfeeb85a0f38cc5cfb8962db747cd93f4d5f"} Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.003561 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8rqtk"] Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.004739 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-wmcfr" event={"ID":"203c31e0-e1fc-4da7-8275-91295c22f307","Type":"ContainerStarted","Data":"85f90e7d59db6388d9610e038f8a6d35bb24203783693d83300c344c73af7cc5"} Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.008073 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-sjzfb" event={"ID":"f6c99edf-1afb-476e-b63e-ee73c372499c","Type":"ContainerStarted","Data":"62521a55d5d2bc115615548f0e46cb40a714cf5238578feaeb9b113bd2aa9983"} Jan 20 16:44:03 crc kubenswrapper[4558]: W0120 16:44:03.011721 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9e148c70_cb30_40cf_a333_65c0fdf0aa35.slice/crio-7289ad705f59e936398b64e5ab8b634c5014570dc889450807fdf614d418f2d0 WatchSource:0}: Error finding container 7289ad705f59e936398b64e5ab8b634c5014570dc889450807fdf614d418f2d0: Status 404 returned error can't find the container with id 7289ad705f59e936398b64e5ab8b634c5014570dc889450807fdf614d418f2d0 Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.013519 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-hsls5" event={"ID":"350b4e8e-0147-4d0a-b9c0-1cdcdc2312f1","Type":"ContainerStarted","Data":"b9b5822d5e9bb5c1e6b303b793d8de723ce352adef1a85b4a059c027f22b36fb"} Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.013567 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-hsls5" event={"ID":"350b4e8e-0147-4d0a-b9c0-1cdcdc2312f1","Type":"ContainerStarted","Data":"6ccb2d2a71fbaa42cce239891883ad8f0e69bde44dad442094e77c93d1237029"} Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.013578 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-hsls5" event={"ID":"350b4e8e-0147-4d0a-b9c0-1cdcdc2312f1","Type":"ContainerStarted","Data":"58c6d4d41eab9d52039a78a2f5073dd6e6163444492f62be09afb774053d9e43"} Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.023863 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-vmm7h" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.064760 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/3ed1f27c-300c-4da3-8c4c-742baaf440eb-tmpfs\") pod \"packageserver-d55dfcdfc-rh5k4\" (UID: \"3ed1f27c-300c-4da3-8c4c-742baaf440eb\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-rh5k4" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.064818 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fdlvs\" (UniqueName: \"kubernetes.io/projected/25c0e5f7-93dc-49a4-a31c-b34dbc125f3e-kube-api-access-fdlvs\") pod \"kube-storage-version-migrator-operator-b67b599dd-l8kh6\" (UID: \"25c0e5f7-93dc-49a4-a31c-b34dbc125f3e\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-l8kh6" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.064839 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1cdfeb06-d9ec-43ad-850c-d685e4e7c8a5-etcd-client\") pod \"etcd-operator-b45778765-5rxsc\" (UID: \"1cdfeb06-d9ec-43ad-850c-d685e4e7c8a5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-5rxsc" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.064936 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/ea01cf9d-e1ac-4197-a0ac-830eebd15245-ca-trust-extracted\") pod \"image-registry-697d97f7c8-zsdqk\" (UID: \"ea01cf9d-e1ac-4197-a0ac-830eebd15245\") " pod="openshift-image-registry/image-registry-697d97f7c8-zsdqk" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.064973 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pkb27\" (UniqueName: \"kubernetes.io/projected/3d51bcc0-ce56-4953-b0d6-252bb4a3a66c-kube-api-access-pkb27\") pod \"ingress-operator-5b745b69d9-jvkk8\" (UID: \"3d51bcc0-ce56-4953-b0d6-252bb4a3a66c\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jvkk8" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.065373 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rc7pc\" (UniqueName: \"kubernetes.io/projected/436af592-a7f2-4649-b2e3-47df5e85ce9c-kube-api-access-rc7pc\") pod \"apiserver-7bbb656c7d-b97gp\" (UID: \"436af592-a7f2-4649-b2e3-47df5e85ce9c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-b97gp" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.065458 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/25c0e5f7-93dc-49a4-a31c-b34dbc125f3e-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-l8kh6\" (UID: \"25c0e5f7-93dc-49a4-a31c-b34dbc125f3e\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-l8kh6" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.065514 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/1cdfeb06-d9ec-43ad-850c-d685e4e7c8a5-etcd-service-ca\") pod \"etcd-operator-b45778765-5rxsc\" (UID: \"1cdfeb06-d9ec-43ad-850c-d685e4e7c8a5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-5rxsc" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.065544 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-khtcb\" (UniqueName: \"kubernetes.io/projected/7241fe86-b7e1-4246-b040-66ca5e799562-kube-api-access-khtcb\") pod \"machine-approver-56656f9798-jq4hg\" (UID: \"7241fe86-b7e1-4246-b040-66ca5e799562\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-jq4hg" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.065572 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a9e14748-e028-43ec-8e2c-d2d8ce033c75-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-qdvgg\" (UID: \"a9e14748-e028-43ec-8e2c-d2d8ce033c75\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-qdvgg" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.065770 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/436af592-a7f2-4649-b2e3-47df5e85ce9c-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-b97gp\" (UID: \"436af592-a7f2-4649-b2e3-47df5e85ce9c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-b97gp" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.065827 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/436af592-a7f2-4649-b2e3-47df5e85ce9c-encryption-config\") pod \"apiserver-7bbb656c7d-b97gp\" (UID: \"436af592-a7f2-4649-b2e3-47df5e85ce9c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-b97gp" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.065864 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7hmsc\" (UniqueName: \"kubernetes.io/projected/ea01cf9d-e1ac-4197-a0ac-830eebd15245-kube-api-access-7hmsc\") pod \"image-registry-697d97f7c8-zsdqk\" (UID: \"ea01cf9d-e1ac-4197-a0ac-830eebd15245\") " pod="openshift-image-registry/image-registry-697d97f7c8-zsdqk" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.065884 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/436af592-a7f2-4649-b2e3-47df5e85ce9c-serving-cert\") pod \"apiserver-7bbb656c7d-b97gp\" (UID: \"436af592-a7f2-4649-b2e3-47df5e85ce9c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-b97gp" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.065911 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4b2nh\" (UniqueName: \"kubernetes.io/projected/59e85616-edec-46fa-aa93-a1ecb76b415c-kube-api-access-4b2nh\") pod \"downloads-7954f5f757-225km\" (UID: \"59e85616-edec-46fa-aa93-a1ecb76b415c\") " pod="openshift-console/downloads-7954f5f757-225km" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.065935 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/7241fe86-b7e1-4246-b040-66ca5e799562-machine-approver-tls\") pod \"machine-approver-56656f9798-jq4hg\" (UID: \"7241fe86-b7e1-4246-b040-66ca5e799562\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-jq4hg" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.066207 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/436af592-a7f2-4649-b2e3-47df5e85ce9c-etcd-client\") pod \"apiserver-7bbb656c7d-b97gp\" (UID: \"436af592-a7f2-4649-b2e3-47df5e85ce9c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-b97gp" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.066300 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/ea01cf9d-e1ac-4197-a0ac-830eebd15245-registry-certificates\") pod \"image-registry-697d97f7c8-zsdqk\" (UID: \"ea01cf9d-e1ac-4197-a0ac-830eebd15245\") " pod="openshift-image-registry/image-registry-697d97f7c8-zsdqk" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.066328 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a9e14748-e028-43ec-8e2c-d2d8ce033c75-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-qdvgg\" (UID: \"a9e14748-e028-43ec-8e2c-d2d8ce033c75\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-qdvgg" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.066395 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/963800b2-ec8d-4d8f-9a05-f199a20c8ba9-metrics-tls\") pod \"dns-operator-744455d44c-mshrf\" (UID: \"963800b2-ec8d-4d8f-9a05-f199a20c8ba9\") " pod="openshift-dns-operator/dns-operator-744455d44c-mshrf" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.066663 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v7xrc\" (UniqueName: \"kubernetes.io/projected/963800b2-ec8d-4d8f-9a05-f199a20c8ba9-kube-api-access-v7xrc\") pod \"dns-operator-744455d44c-mshrf\" (UID: \"963800b2-ec8d-4d8f-9a05-f199a20c8ba9\") " pod="openshift-dns-operator/dns-operator-744455d44c-mshrf" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.066815 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/3d51bcc0-ce56-4953-b0d6-252bb4a3a66c-metrics-tls\") pod \"ingress-operator-5b745b69d9-jvkk8\" (UID: \"3d51bcc0-ce56-4953-b0d6-252bb4a3a66c\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jvkk8" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.067085 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/7241fe86-b7e1-4246-b040-66ca5e799562-auth-proxy-config\") pod \"machine-approver-56656f9798-jq4hg\" (UID: \"7241fe86-b7e1-4246-b040-66ca5e799562\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-jq4hg" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.067339 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/436af592-a7f2-4649-b2e3-47df5e85ce9c-audit-dir\") pod \"apiserver-7bbb656c7d-b97gp\" (UID: \"436af592-a7f2-4649-b2e3-47df5e85ce9c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-b97gp" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.067514 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7241fe86-b7e1-4246-b040-66ca5e799562-config\") pod \"machine-approver-56656f9798-jq4hg\" (UID: \"7241fe86-b7e1-4246-b040-66ca5e799562\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-jq4hg" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.067681 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/25c0e5f7-93dc-49a4-a31c-b34dbc125f3e-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-l8kh6\" (UID: \"25c0e5f7-93dc-49a4-a31c-b34dbc125f3e\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-l8kh6" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.067809 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c286z\" (UniqueName: \"kubernetes.io/projected/61738345-9c27-415a-a089-e0000fe5c92a-kube-api-access-c286z\") pod \"migrator-59844c95c7-sgbgg\" (UID: \"61738345-9c27-415a-a089-e0000fe5c92a\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-sgbgg" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.067841 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ea01cf9d-e1ac-4197-a0ac-830eebd15245-trusted-ca\") pod \"image-registry-697d97f7c8-zsdqk\" (UID: \"ea01cf9d-e1ac-4197-a0ac-830eebd15245\") " pod="openshift-image-registry/image-registry-697d97f7c8-zsdqk" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.067876 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/1cdfeb06-d9ec-43ad-850c-d685e4e7c8a5-etcd-ca\") pod \"etcd-operator-b45778765-5rxsc\" (UID: \"1cdfeb06-d9ec-43ad-850c-d685e4e7c8a5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-5rxsc" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.067897 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/3d51bcc0-ce56-4953-b0d6-252bb4a3a66c-bound-sa-token\") pod \"ingress-operator-5b745b69d9-jvkk8\" (UID: \"3d51bcc0-ce56-4953-b0d6-252bb4a3a66c\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jvkk8" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.067939 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/436af592-a7f2-4649-b2e3-47df5e85ce9c-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-b97gp\" (UID: \"436af592-a7f2-4649-b2e3-47df5e85ce9c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-b97gp" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.067965 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m99nx\" (UniqueName: \"kubernetes.io/projected/3ed1f27c-300c-4da3-8c4c-742baaf440eb-kube-api-access-m99nx\") pod \"packageserver-d55dfcdfc-rh5k4\" (UID: \"3ed1f27c-300c-4da3-8c4c-742baaf440eb\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-rh5k4" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.067983 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/3ed1f27c-300c-4da3-8c4c-742baaf440eb-apiservice-cert\") pod \"packageserver-d55dfcdfc-rh5k4\" (UID: \"3ed1f27c-300c-4da3-8c4c-742baaf440eb\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-rh5k4" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.068006 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/3ed1f27c-300c-4da3-8c4c-742baaf440eb-webhook-cert\") pod \"packageserver-d55dfcdfc-rh5k4\" (UID: \"3ed1f27c-300c-4da3-8c4c-742baaf440eb\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-rh5k4" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.068082 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zsdqk\" (UID: \"ea01cf9d-e1ac-4197-a0ac-830eebd15245\") " pod="openshift-image-registry/image-registry-697d97f7c8-zsdqk" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.068129 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/ea01cf9d-e1ac-4197-a0ac-830eebd15245-bound-sa-token\") pod \"image-registry-697d97f7c8-zsdqk\" (UID: \"ea01cf9d-e1ac-4197-a0ac-830eebd15245\") " pod="openshift-image-registry/image-registry-697d97f7c8-zsdqk" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.068176 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/436af592-a7f2-4649-b2e3-47df5e85ce9c-audit-policies\") pod \"apiserver-7bbb656c7d-b97gp\" (UID: \"436af592-a7f2-4649-b2e3-47df5e85ce9c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-b97gp" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.068233 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a9e14748-e028-43ec-8e2c-d2d8ce033c75-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-qdvgg\" (UID: \"a9e14748-e028-43ec-8e2c-d2d8ce033c75\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-qdvgg" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.068276 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/ea01cf9d-e1ac-4197-a0ac-830eebd15245-registry-tls\") pod \"image-registry-697d97f7c8-zsdqk\" (UID: \"ea01cf9d-e1ac-4197-a0ac-830eebd15245\") " pod="openshift-image-registry/image-registry-697d97f7c8-zsdqk" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.068308 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1cdfeb06-d9ec-43ad-850c-d685e4e7c8a5-config\") pod \"etcd-operator-b45778765-5rxsc\" (UID: \"1cdfeb06-d9ec-43ad-850c-d685e4e7c8a5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-5rxsc" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.068349 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/ea01cf9d-e1ac-4197-a0ac-830eebd15245-installation-pull-secrets\") pod \"image-registry-697d97f7c8-zsdqk\" (UID: \"ea01cf9d-e1ac-4197-a0ac-830eebd15245\") " pod="openshift-image-registry/image-registry-697d97f7c8-zsdqk" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.068378 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/3d51bcc0-ce56-4953-b0d6-252bb4a3a66c-trusted-ca\") pod \"ingress-operator-5b745b69d9-jvkk8\" (UID: \"3d51bcc0-ce56-4953-b0d6-252bb4a3a66c\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jvkk8" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.068397 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1cdfeb06-d9ec-43ad-850c-d685e4e7c8a5-serving-cert\") pod \"etcd-operator-b45778765-5rxsc\" (UID: \"1cdfeb06-d9ec-43ad-850c-d685e4e7c8a5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-5rxsc" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.068421 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xfqdv\" (UniqueName: \"kubernetes.io/projected/1cdfeb06-d9ec-43ad-850c-d685e4e7c8a5-kube-api-access-xfqdv\") pod \"etcd-operator-b45778765-5rxsc\" (UID: \"1cdfeb06-d9ec-43ad-850c-d685e4e7c8a5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-5rxsc" Jan 20 16:44:03 crc kubenswrapper[4558]: E0120 16:44:03.068640 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-20 16:44:03.568612596 +0000 UTC m=+137.328950563 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zsdqk" (UID: "ea01cf9d-e1ac-4197-a0ac-830eebd15245") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.081256 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-7flzc" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.087383 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-nwjzd"] Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.088418 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8xc26" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.095474 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-lhmdh"] Jan 20 16:44:03 crc kubenswrapper[4558]: W0120 16:44:03.098008 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4654626b_36d0_4072_a04c_d8ee0678fd50.slice/crio-7afde49da87647c5e61c4acc968cf8fcd828569cb8c1a0fe790df704633e922f WatchSource:0}: Error finding container 7afde49da87647c5e61c4acc968cf8fcd828569cb8c1a0fe790df704633e922f: Status 404 returned error can't find the container with id 7afde49da87647c5e61c4acc968cf8fcd828569cb8c1a0fe790df704633e922f Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.107319 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zd9qc"] Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.118269 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-dqf5x" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.124713 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-49k6d" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.133613 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-25jqm" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.137689 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-vkkjt" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.147066 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-p5lzn"] Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.170057 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.170336 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a9e14748-e028-43ec-8e2c-d2d8ce033c75-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-qdvgg\" (UID: \"a9e14748-e028-43ec-8e2c-d2d8ce033c75\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-qdvgg" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.170390 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/71c9147f-5ab8-4342-b8e2-e4c37d281efb-signing-cabundle\") pod \"service-ca-9c57cc56f-4t4rs\" (UID: \"71c9147f-5ab8-4342-b8e2-e4c37d281efb\") " pod="openshift-service-ca/service-ca-9c57cc56f-4t4rs" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.170423 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/436af592-a7f2-4649-b2e3-47df5e85ce9c-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-b97gp\" (UID: \"436af592-a7f2-4649-b2e3-47df5e85ce9c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-b97gp" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.170443 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/6aa23de2-5f67-4f16-bd7d-eeecb3dfa19f-registration-dir\") pod \"csi-hostpathplugin-9cqxr\" (UID: \"6aa23de2-5f67-4f16-bd7d-eeecb3dfa19f\") " pod="hostpath-provisioner/csi-hostpathplugin-9cqxr" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.170465 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/119fb5fe-2460-4d2d-9db9-452afaa1e93e-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-4w5mk\" (UID: \"119fb5fe-2460-4d2d-9db9-452afaa1e93e\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-4w5mk" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.170481 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/31eb233b-13fe-40cb-9259-f18574a85e01-secret-volume\") pod \"collect-profiles-29482110-7p5pb\" (UID: \"31eb233b-13fe-40cb-9259-f18574a85e01\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482110-7p5pb" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.170504 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/436af592-a7f2-4649-b2e3-47df5e85ce9c-encryption-config\") pod \"apiserver-7bbb656c7d-b97gp\" (UID: \"436af592-a7f2-4649-b2e3-47df5e85ce9c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-b97gp" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.170521 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/96b0927f-cc7b-48e8-82bd-bbec85c2b9b2-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-2j5lz\" (UID: \"96b0927f-cc7b-48e8-82bd-bbec85c2b9b2\") " pod="openshift-marketplace/marketplace-operator-79b997595-2j5lz" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.170540 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7hmsc\" (UniqueName: \"kubernetes.io/projected/ea01cf9d-e1ac-4197-a0ac-830eebd15245-kube-api-access-7hmsc\") pod \"image-registry-697d97f7c8-zsdqk\" (UID: \"ea01cf9d-e1ac-4197-a0ac-830eebd15245\") " pod="openshift-image-registry/image-registry-697d97f7c8-zsdqk" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.170558 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/436af592-a7f2-4649-b2e3-47df5e85ce9c-serving-cert\") pod \"apiserver-7bbb656c7d-b97gp\" (UID: \"436af592-a7f2-4649-b2e3-47df5e85ce9c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-b97gp" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.170572 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ad0a9f73-941c-4023-a106-2b0fa19da6a0-config-volume\") pod \"dns-default-gbvfn\" (UID: \"ad0a9f73-941c-4023-a106-2b0fa19da6a0\") " pod="openshift-dns/dns-default-gbvfn" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.170587 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/ad0a9f73-941c-4023-a106-2b0fa19da6a0-metrics-tls\") pod \"dns-default-gbvfn\" (UID: \"ad0a9f73-941c-4023-a106-2b0fa19da6a0\") " pod="openshift-dns/dns-default-gbvfn" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.170628 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4b2nh\" (UniqueName: \"kubernetes.io/projected/59e85616-edec-46fa-aa93-a1ecb76b415c-kube-api-access-4b2nh\") pod \"downloads-7954f5f757-225km\" (UID: \"59e85616-edec-46fa-aa93-a1ecb76b415c\") " pod="openshift-console/downloads-7954f5f757-225km" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.170655 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/7241fe86-b7e1-4246-b040-66ca5e799562-machine-approver-tls\") pod \"machine-approver-56656f9798-jq4hg\" (UID: \"7241fe86-b7e1-4246-b040-66ca5e799562\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-jq4hg" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.170672 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-njmr4\" (UniqueName: \"kubernetes.io/projected/6aa23de2-5f67-4f16-bd7d-eeecb3dfa19f-kube-api-access-njmr4\") pod \"csi-hostpathplugin-9cqxr\" (UID: \"6aa23de2-5f67-4f16-bd7d-eeecb3dfa19f\") " pod="hostpath-provisioner/csi-hostpathplugin-9cqxr" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.170717 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/436af592-a7f2-4649-b2e3-47df5e85ce9c-etcd-client\") pod \"apiserver-7bbb656c7d-b97gp\" (UID: \"436af592-a7f2-4649-b2e3-47df5e85ce9c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-b97gp" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.170731 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8t6d2\" (UniqueName: \"kubernetes.io/projected/31eb233b-13fe-40cb-9259-f18574a85e01-kube-api-access-8t6d2\") pod \"collect-profiles-29482110-7p5pb\" (UID: \"31eb233b-13fe-40cb-9259-f18574a85e01\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482110-7p5pb" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.170753 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/9a09b2d6-5243-4a2c-a13f-3b640bfaa3dc-cert\") pod \"ingress-canary-v2c77\" (UID: \"9a09b2d6-5243-4a2c-a13f-3b640bfaa3dc\") " pod="openshift-ingress-canary/ingress-canary-v2c77" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.170775 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nzkhw\" (UniqueName: \"kubernetes.io/projected/96b0927f-cc7b-48e8-82bd-bbec85c2b9b2-kube-api-access-nzkhw\") pod \"marketplace-operator-79b997595-2j5lz\" (UID: \"96b0927f-cc7b-48e8-82bd-bbec85c2b9b2\") " pod="openshift-marketplace/marketplace-operator-79b997595-2j5lz" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.170801 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/8ad94588-fb91-4748-bc0c-c87f686fa9a0-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-pltl7\" (UID: \"8ad94588-fb91-4748-bc0c-c87f686fa9a0\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-pltl7" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.170829 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/ea01cf9d-e1ac-4197-a0ac-830eebd15245-registry-certificates\") pod \"image-registry-697d97f7c8-zsdqk\" (UID: \"ea01cf9d-e1ac-4197-a0ac-830eebd15245\") " pod="openshift-image-registry/image-registry-697d97f7c8-zsdqk" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.170845 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a9e14748-e028-43ec-8e2c-d2d8ce033c75-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-qdvgg\" (UID: \"a9e14748-e028-43ec-8e2c-d2d8ce033c75\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-qdvgg" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.170861 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/963800b2-ec8d-4d8f-9a05-f199a20c8ba9-metrics-tls\") pod \"dns-operator-744455d44c-mshrf\" (UID: \"963800b2-ec8d-4d8f-9a05-f199a20c8ba9\") " pod="openshift-dns-operator/dns-operator-744455d44c-mshrf" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.170877 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/6aa23de2-5f67-4f16-bd7d-eeecb3dfa19f-mountpoint-dir\") pod \"csi-hostpathplugin-9cqxr\" (UID: \"6aa23de2-5f67-4f16-bd7d-eeecb3dfa19f\") " pod="hostpath-provisioner/csi-hostpathplugin-9cqxr" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.170933 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v7xrc\" (UniqueName: \"kubernetes.io/projected/963800b2-ec8d-4d8f-9a05-f199a20c8ba9-kube-api-access-v7xrc\") pod \"dns-operator-744455d44c-mshrf\" (UID: \"963800b2-ec8d-4d8f-9a05-f199a20c8ba9\") " pod="openshift-dns-operator/dns-operator-744455d44c-mshrf" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.170951 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s8z5l\" (UniqueName: \"kubernetes.io/projected/119fb5fe-2460-4d2d-9db9-452afaa1e93e-kube-api-access-s8z5l\") pod \"control-plane-machine-set-operator-78cbb6b69f-4w5mk\" (UID: \"119fb5fe-2460-4d2d-9db9-452afaa1e93e\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-4w5mk" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.170969 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/3d51bcc0-ce56-4953-b0d6-252bb4a3a66c-metrics-tls\") pod \"ingress-operator-5b745b69d9-jvkk8\" (UID: \"3d51bcc0-ce56-4953-b0d6-252bb4a3a66c\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jvkk8" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.170988 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/8ad94588-fb91-4748-bc0c-c87f686fa9a0-proxy-tls\") pod \"machine-config-controller-84d6567774-pltl7\" (UID: \"8ad94588-fb91-4748-bc0c-c87f686fa9a0\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-pltl7" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.171007 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b489m\" (UniqueName: \"kubernetes.io/projected/8ad94588-fb91-4748-bc0c-c87f686fa9a0-kube-api-access-b489m\") pod \"machine-config-controller-84d6567774-pltl7\" (UID: \"8ad94588-fb91-4748-bc0c-c87f686fa9a0\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-pltl7" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.171035 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/7241fe86-b7e1-4246-b040-66ca5e799562-auth-proxy-config\") pod \"machine-approver-56656f9798-jq4hg\" (UID: \"7241fe86-b7e1-4246-b040-66ca5e799562\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-jq4hg" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.171074 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/f4d440d5-1f47-40bc-bfdd-95267dfde12c-certs\") pod \"machine-config-server-cp5zh\" (UID: \"f4d440d5-1f47-40bc-bfdd-95267dfde12c\") " pod="openshift-machine-config-operator/machine-config-server-cp5zh" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.171089 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/6aa23de2-5f67-4f16-bd7d-eeecb3dfa19f-plugins-dir\") pod \"csi-hostpathplugin-9cqxr\" (UID: \"6aa23de2-5f67-4f16-bd7d-eeecb3dfa19f\") " pod="hostpath-provisioner/csi-hostpathplugin-9cqxr" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.171105 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/436af592-a7f2-4649-b2e3-47df5e85ce9c-audit-dir\") pod \"apiserver-7bbb656c7d-b97gp\" (UID: \"436af592-a7f2-4649-b2e3-47df5e85ce9c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-b97gp" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.171123 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7241fe86-b7e1-4246-b040-66ca5e799562-config\") pod \"machine-approver-56656f9798-jq4hg\" (UID: \"7241fe86-b7e1-4246-b040-66ca5e799562\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-jq4hg" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.171144 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/25c0e5f7-93dc-49a4-a31c-b34dbc125f3e-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-l8kh6\" (UID: \"25c0e5f7-93dc-49a4-a31c-b34dbc125f3e\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-l8kh6" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.171213 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/a53f5d18-851d-4a86-b349-e53e035d39ce-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-t4dxf\" (UID: \"a53f5d18-851d-4a86-b349-e53e035d39ce\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-t4dxf" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.171232 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/31eb233b-13fe-40cb-9259-f18574a85e01-config-volume\") pod \"collect-profiles-29482110-7p5pb\" (UID: \"31eb233b-13fe-40cb-9259-f18574a85e01\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482110-7p5pb" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.171259 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c286z\" (UniqueName: \"kubernetes.io/projected/61738345-9c27-415a-a089-e0000fe5c92a-kube-api-access-c286z\") pod \"migrator-59844c95c7-sgbgg\" (UID: \"61738345-9c27-415a-a089-e0000fe5c92a\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-sgbgg" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.171285 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ea01cf9d-e1ac-4197-a0ac-830eebd15245-trusted-ca\") pod \"image-registry-697d97f7c8-zsdqk\" (UID: \"ea01cf9d-e1ac-4197-a0ac-830eebd15245\") " pod="openshift-image-registry/image-registry-697d97f7c8-zsdqk" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.171300 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2px9f\" (UniqueName: \"kubernetes.io/projected/f4d440d5-1f47-40bc-bfdd-95267dfde12c-kube-api-access-2px9f\") pod \"machine-config-server-cp5zh\" (UID: \"f4d440d5-1f47-40bc-bfdd-95267dfde12c\") " pod="openshift-machine-config-operator/machine-config-server-cp5zh" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.171314 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2jw85\" (UniqueName: \"kubernetes.io/projected/ad0a9f73-941c-4023-a106-2b0fa19da6a0-kube-api-access-2jw85\") pod \"dns-default-gbvfn\" (UID: \"ad0a9f73-941c-4023-a106-2b0fa19da6a0\") " pod="openshift-dns/dns-default-gbvfn" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.171351 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/1cdfeb06-d9ec-43ad-850c-d685e4e7c8a5-etcd-ca\") pod \"etcd-operator-b45778765-5rxsc\" (UID: \"1cdfeb06-d9ec-43ad-850c-d685e4e7c8a5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-5rxsc" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.171365 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/96b0927f-cc7b-48e8-82bd-bbec85c2b9b2-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-2j5lz\" (UID: \"96b0927f-cc7b-48e8-82bd-bbec85c2b9b2\") " pod="openshift-marketplace/marketplace-operator-79b997595-2j5lz" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.171382 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/3d51bcc0-ce56-4953-b0d6-252bb4a3a66c-bound-sa-token\") pod \"ingress-operator-5b745b69d9-jvkk8\" (UID: \"3d51bcc0-ce56-4953-b0d6-252bb4a3a66c\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jvkk8" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.171408 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/f4d440d5-1f47-40bc-bfdd-95267dfde12c-node-bootstrap-token\") pod \"machine-config-server-cp5zh\" (UID: \"f4d440d5-1f47-40bc-bfdd-95267dfde12c\") " pod="openshift-machine-config-operator/machine-config-server-cp5zh" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.171433 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/436af592-a7f2-4649-b2e3-47df5e85ce9c-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-b97gp\" (UID: \"436af592-a7f2-4649-b2e3-47df5e85ce9c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-b97gp" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.171449 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m99nx\" (UniqueName: \"kubernetes.io/projected/3ed1f27c-300c-4da3-8c4c-742baaf440eb-kube-api-access-m99nx\") pod \"packageserver-d55dfcdfc-rh5k4\" (UID: \"3ed1f27c-300c-4da3-8c4c-742baaf440eb\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-rh5k4" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.171464 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/3ed1f27c-300c-4da3-8c4c-742baaf440eb-apiservice-cert\") pod \"packageserver-d55dfcdfc-rh5k4\" (UID: \"3ed1f27c-300c-4da3-8c4c-742baaf440eb\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-rh5k4" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.171478 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/3ed1f27c-300c-4da3-8c4c-742baaf440eb-webhook-cert\") pod \"packageserver-d55dfcdfc-rh5k4\" (UID: \"3ed1f27c-300c-4da3-8c4c-742baaf440eb\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-rh5k4" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.171529 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/ea01cf9d-e1ac-4197-a0ac-830eebd15245-bound-sa-token\") pod \"image-registry-697d97f7c8-zsdqk\" (UID: \"ea01cf9d-e1ac-4197-a0ac-830eebd15245\") " pod="openshift-image-registry/image-registry-697d97f7c8-zsdqk" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.171544 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/436af592-a7f2-4649-b2e3-47df5e85ce9c-audit-policies\") pod \"apiserver-7bbb656c7d-b97gp\" (UID: \"436af592-a7f2-4649-b2e3-47df5e85ce9c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-b97gp" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.171559 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2clwt\" (UniqueName: \"kubernetes.io/projected/9a09b2d6-5243-4a2c-a13f-3b640bfaa3dc-kube-api-access-2clwt\") pod \"ingress-canary-v2c77\" (UID: \"9a09b2d6-5243-4a2c-a13f-3b640bfaa3dc\") " pod="openshift-ingress-canary/ingress-canary-v2c77" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.171607 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a9e14748-e028-43ec-8e2c-d2d8ce033c75-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-qdvgg\" (UID: \"a9e14748-e028-43ec-8e2c-d2d8ce033c75\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-qdvgg" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.171665 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/ea01cf9d-e1ac-4197-a0ac-830eebd15245-registry-tls\") pod \"image-registry-697d97f7c8-zsdqk\" (UID: \"ea01cf9d-e1ac-4197-a0ac-830eebd15245\") " pod="openshift-image-registry/image-registry-697d97f7c8-zsdqk" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.171681 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1cdfeb06-d9ec-43ad-850c-d685e4e7c8a5-config\") pod \"etcd-operator-b45778765-5rxsc\" (UID: \"1cdfeb06-d9ec-43ad-850c-d685e4e7c8a5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-5rxsc" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.171709 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/6aa23de2-5f67-4f16-bd7d-eeecb3dfa19f-socket-dir\") pod \"csi-hostpathplugin-9cqxr\" (UID: \"6aa23de2-5f67-4f16-bd7d-eeecb3dfa19f\") " pod="hostpath-provisioner/csi-hostpathplugin-9cqxr" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.171728 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/ea01cf9d-e1ac-4197-a0ac-830eebd15245-installation-pull-secrets\") pod \"image-registry-697d97f7c8-zsdqk\" (UID: \"ea01cf9d-e1ac-4197-a0ac-830eebd15245\") " pod="openshift-image-registry/image-registry-697d97f7c8-zsdqk" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.171743 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/3d51bcc0-ce56-4953-b0d6-252bb4a3a66c-trusted-ca\") pod \"ingress-operator-5b745b69d9-jvkk8\" (UID: \"3d51bcc0-ce56-4953-b0d6-252bb4a3a66c\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jvkk8" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.171759 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1cdfeb06-d9ec-43ad-850c-d685e4e7c8a5-serving-cert\") pod \"etcd-operator-b45778765-5rxsc\" (UID: \"1cdfeb06-d9ec-43ad-850c-d685e4e7c8a5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-5rxsc" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.171775 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xfqdv\" (UniqueName: \"kubernetes.io/projected/1cdfeb06-d9ec-43ad-850c-d685e4e7c8a5-kube-api-access-xfqdv\") pod \"etcd-operator-b45778765-5rxsc\" (UID: \"1cdfeb06-d9ec-43ad-850c-d685e4e7c8a5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-5rxsc" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.171791 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/71c9147f-5ab8-4342-b8e2-e4c37d281efb-signing-key\") pod \"service-ca-9c57cc56f-4t4rs\" (UID: \"71c9147f-5ab8-4342-b8e2-e4c37d281efb\") " pod="openshift-service-ca/service-ca-9c57cc56f-4t4rs" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.173477 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a9e14748-e028-43ec-8e2c-d2d8ce033c75-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-qdvgg\" (UID: \"a9e14748-e028-43ec-8e2c-d2d8ce033c75\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-qdvgg" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.173977 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/3ed1f27c-300c-4da3-8c4c-742baaf440eb-tmpfs\") pod \"packageserver-d55dfcdfc-rh5k4\" (UID: \"3ed1f27c-300c-4da3-8c4c-742baaf440eb\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-rh5k4" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.174036 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fdlvs\" (UniqueName: \"kubernetes.io/projected/25c0e5f7-93dc-49a4-a31c-b34dbc125f3e-kube-api-access-fdlvs\") pod \"kube-storage-version-migrator-operator-b67b599dd-l8kh6\" (UID: \"25c0e5f7-93dc-49a4-a31c-b34dbc125f3e\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-l8kh6" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.174058 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1cdfeb06-d9ec-43ad-850c-d685e4e7c8a5-etcd-client\") pod \"etcd-operator-b45778765-5rxsc\" (UID: \"1cdfeb06-d9ec-43ad-850c-d685e4e7c8a5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-5rxsc" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.174080 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hlcd4\" (UniqueName: \"kubernetes.io/projected/a53f5d18-851d-4a86-b349-e53e035d39ce-kube-api-access-hlcd4\") pod \"package-server-manager-789f6589d5-t4dxf\" (UID: \"a53f5d18-851d-4a86-b349-e53e035d39ce\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-t4dxf" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.174110 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/6aa23de2-5f67-4f16-bd7d-eeecb3dfa19f-csi-data-dir\") pod \"csi-hostpathplugin-9cqxr\" (UID: \"6aa23de2-5f67-4f16-bd7d-eeecb3dfa19f\") " pod="hostpath-provisioner/csi-hostpathplugin-9cqxr" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.174181 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/ea01cf9d-e1ac-4197-a0ac-830eebd15245-ca-trust-extracted\") pod \"image-registry-697d97f7c8-zsdqk\" (UID: \"ea01cf9d-e1ac-4197-a0ac-830eebd15245\") " pod="openshift-image-registry/image-registry-697d97f7c8-zsdqk" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.174204 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vlfwc\" (UniqueName: \"kubernetes.io/projected/71c9147f-5ab8-4342-b8e2-e4c37d281efb-kube-api-access-vlfwc\") pod \"service-ca-9c57cc56f-4t4rs\" (UID: \"71c9147f-5ab8-4342-b8e2-e4c37d281efb\") " pod="openshift-service-ca/service-ca-9c57cc56f-4t4rs" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.174225 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pkb27\" (UniqueName: \"kubernetes.io/projected/3d51bcc0-ce56-4953-b0d6-252bb4a3a66c-kube-api-access-pkb27\") pod \"ingress-operator-5b745b69d9-jvkk8\" (UID: \"3d51bcc0-ce56-4953-b0d6-252bb4a3a66c\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jvkk8" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.174250 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rc7pc\" (UniqueName: \"kubernetes.io/projected/436af592-a7f2-4649-b2e3-47df5e85ce9c-kube-api-access-rc7pc\") pod \"apiserver-7bbb656c7d-b97gp\" (UID: \"436af592-a7f2-4649-b2e3-47df5e85ce9c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-b97gp" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.174291 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/25c0e5f7-93dc-49a4-a31c-b34dbc125f3e-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-l8kh6\" (UID: \"25c0e5f7-93dc-49a4-a31c-b34dbc125f3e\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-l8kh6" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.174306 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/1cdfeb06-d9ec-43ad-850c-d685e4e7c8a5-etcd-service-ca\") pod \"etcd-operator-b45778765-5rxsc\" (UID: \"1cdfeb06-d9ec-43ad-850c-d685e4e7c8a5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-5rxsc" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.174322 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-khtcb\" (UniqueName: \"kubernetes.io/projected/7241fe86-b7e1-4246-b040-66ca5e799562-kube-api-access-khtcb\") pod \"machine-approver-56656f9798-jq4hg\" (UID: \"7241fe86-b7e1-4246-b040-66ca5e799562\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-jq4hg" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.174778 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/436af592-a7f2-4649-b2e3-47df5e85ce9c-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-b97gp\" (UID: \"436af592-a7f2-4649-b2e3-47df5e85ce9c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-b97gp" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.175392 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/ea01cf9d-e1ac-4197-a0ac-830eebd15245-ca-trust-extracted\") pod \"image-registry-697d97f7c8-zsdqk\" (UID: \"ea01cf9d-e1ac-4197-a0ac-830eebd15245\") " pod="openshift-image-registry/image-registry-697d97f7c8-zsdqk" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.178883 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-7wfbg" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.179956 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/1cdfeb06-d9ec-43ad-850c-d685e4e7c8a5-etcd-service-ca\") pod \"etcd-operator-b45778765-5rxsc\" (UID: \"1cdfeb06-d9ec-43ad-850c-d685e4e7c8a5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-5rxsc" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.180216 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/3ed1f27c-300c-4da3-8c4c-742baaf440eb-tmpfs\") pod \"packageserver-d55dfcdfc-rh5k4\" (UID: \"3ed1f27c-300c-4da3-8c4c-742baaf440eb\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-rh5k4" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.180264 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/436af592-a7f2-4649-b2e3-47df5e85ce9c-audit-policies\") pod \"apiserver-7bbb656c7d-b97gp\" (UID: \"436af592-a7f2-4649-b2e3-47df5e85ce9c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-b97gp" Jan 20 16:44:03 crc kubenswrapper[4558]: E0120 16:44:03.180894 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-20 16:44:03.680872917 +0000 UTC m=+137.441210883 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.181399 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1cdfeb06-d9ec-43ad-850c-d685e4e7c8a5-config\") pod \"etcd-operator-b45778765-5rxsc\" (UID: \"1cdfeb06-d9ec-43ad-850c-d685e4e7c8a5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-5rxsc" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.182018 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/436af592-a7f2-4649-b2e3-47df5e85ce9c-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-b97gp\" (UID: \"436af592-a7f2-4649-b2e3-47df5e85ce9c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-b97gp" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.183243 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/3ed1f27c-300c-4da3-8c4c-742baaf440eb-apiservice-cert\") pod \"packageserver-d55dfcdfc-rh5k4\" (UID: \"3ed1f27c-300c-4da3-8c4c-742baaf440eb\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-rh5k4" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.185516 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/7241fe86-b7e1-4246-b040-66ca5e799562-machine-approver-tls\") pod \"machine-approver-56656f9798-jq4hg\" (UID: \"7241fe86-b7e1-4246-b040-66ca5e799562\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-jq4hg" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.186472 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/436af592-a7f2-4649-b2e3-47df5e85ce9c-audit-dir\") pod \"apiserver-7bbb656c7d-b97gp\" (UID: \"436af592-a7f2-4649-b2e3-47df5e85ce9c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-b97gp" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.186496 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/7241fe86-b7e1-4246-b040-66ca5e799562-auth-proxy-config\") pod \"machine-approver-56656f9798-jq4hg\" (UID: \"7241fe86-b7e1-4246-b040-66ca5e799562\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-jq4hg" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.186674 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/1cdfeb06-d9ec-43ad-850c-d685e4e7c8a5-etcd-ca\") pod \"etcd-operator-b45778765-5rxsc\" (UID: \"1cdfeb06-d9ec-43ad-850c-d685e4e7c8a5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-5rxsc" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.186874 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7241fe86-b7e1-4246-b040-66ca5e799562-config\") pod \"machine-approver-56656f9798-jq4hg\" (UID: \"7241fe86-b7e1-4246-b040-66ca5e799562\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-jq4hg" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.187156 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/ea01cf9d-e1ac-4197-a0ac-830eebd15245-registry-tls\") pod \"image-registry-697d97f7c8-zsdqk\" (UID: \"ea01cf9d-e1ac-4197-a0ac-830eebd15245\") " pod="openshift-image-registry/image-registry-697d97f7c8-zsdqk" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.187510 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/436af592-a7f2-4649-b2e3-47df5e85ce9c-encryption-config\") pod \"apiserver-7bbb656c7d-b97gp\" (UID: \"436af592-a7f2-4649-b2e3-47df5e85ce9c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-b97gp" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.188242 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/25c0e5f7-93dc-49a4-a31c-b34dbc125f3e-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-l8kh6\" (UID: \"25c0e5f7-93dc-49a4-a31c-b34dbc125f3e\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-l8kh6" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.188339 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/ea01cf9d-e1ac-4197-a0ac-830eebd15245-installation-pull-secrets\") pod \"image-registry-697d97f7c8-zsdqk\" (UID: \"ea01cf9d-e1ac-4197-a0ac-830eebd15245\") " pod="openshift-image-registry/image-registry-697d97f7c8-zsdqk" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.189044 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/ea01cf9d-e1ac-4197-a0ac-830eebd15245-registry-certificates\") pod \"image-registry-697d97f7c8-zsdqk\" (UID: \"ea01cf9d-e1ac-4197-a0ac-830eebd15245\") " pod="openshift-image-registry/image-registry-697d97f7c8-zsdqk" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.189398 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1cdfeb06-d9ec-43ad-850c-d685e4e7c8a5-etcd-client\") pod \"etcd-operator-b45778765-5rxsc\" (UID: \"1cdfeb06-d9ec-43ad-850c-d685e4e7c8a5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-5rxsc" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.190837 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ea01cf9d-e1ac-4197-a0ac-830eebd15245-trusted-ca\") pod \"image-registry-697d97f7c8-zsdqk\" (UID: \"ea01cf9d-e1ac-4197-a0ac-830eebd15245\") " pod="openshift-image-registry/image-registry-697d97f7c8-zsdqk" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.192708 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/25c0e5f7-93dc-49a4-a31c-b34dbc125f3e-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-l8kh6\" (UID: \"25c0e5f7-93dc-49a4-a31c-b34dbc125f3e\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-l8kh6" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.193906 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/3d51bcc0-ce56-4953-b0d6-252bb4a3a66c-metrics-tls\") pod \"ingress-operator-5b745b69d9-jvkk8\" (UID: \"3d51bcc0-ce56-4953-b0d6-252bb4a3a66c\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jvkk8" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.194426 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/3d51bcc0-ce56-4953-b0d6-252bb4a3a66c-trusted-ca\") pod \"ingress-operator-5b745b69d9-jvkk8\" (UID: \"3d51bcc0-ce56-4953-b0d6-252bb4a3a66c\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jvkk8" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.197131 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/436af592-a7f2-4649-b2e3-47df5e85ce9c-serving-cert\") pod \"apiserver-7bbb656c7d-b97gp\" (UID: \"436af592-a7f2-4649-b2e3-47df5e85ce9c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-b97gp" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.197292 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/3ed1f27c-300c-4da3-8c4c-742baaf440eb-webhook-cert\") pod \"packageserver-d55dfcdfc-rh5k4\" (UID: \"3ed1f27c-300c-4da3-8c4c-742baaf440eb\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-rh5k4" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.204650 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/ea01cf9d-e1ac-4197-a0ac-830eebd15245-bound-sa-token\") pod \"image-registry-697d97f7c8-zsdqk\" (UID: \"ea01cf9d-e1ac-4197-a0ac-830eebd15245\") " pod="openshift-image-registry/image-registry-697d97f7c8-zsdqk" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.214667 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1cdfeb06-d9ec-43ad-850c-d685e4e7c8a5-serving-cert\") pod \"etcd-operator-b45778765-5rxsc\" (UID: \"1cdfeb06-d9ec-43ad-850c-d685e4e7c8a5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-5rxsc" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.217883 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a9e14748-e028-43ec-8e2c-d2d8ce033c75-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-qdvgg\" (UID: \"a9e14748-e028-43ec-8e2c-d2d8ce033c75\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-qdvgg" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.217964 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/963800b2-ec8d-4d8f-9a05-f199a20c8ba9-metrics-tls\") pod \"dns-operator-744455d44c-mshrf\" (UID: \"963800b2-ec8d-4d8f-9a05-f199a20c8ba9\") " pod="openshift-dns-operator/dns-operator-744455d44c-mshrf" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.218469 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/436af592-a7f2-4649-b2e3-47df5e85ce9c-etcd-client\") pod \"apiserver-7bbb656c7d-b97gp\" (UID: \"436af592-a7f2-4649-b2e3-47df5e85ce9c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-b97gp" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.236526 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-vmm7h"] Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.244317 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/3d51bcc0-ce56-4953-b0d6-252bb4a3a66c-bound-sa-token\") pod \"ingress-operator-5b745b69d9-jvkk8\" (UID: \"3d51bcc0-ce56-4953-b0d6-252bb4a3a66c\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jvkk8" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.250416 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m99nx\" (UniqueName: \"kubernetes.io/projected/3ed1f27c-300c-4da3-8c4c-742baaf440eb-kube-api-access-m99nx\") pod \"packageserver-d55dfcdfc-rh5k4\" (UID: \"3ed1f27c-300c-4da3-8c4c-742baaf440eb\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-rh5k4" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.264278 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pkb27\" (UniqueName: \"kubernetes.io/projected/3d51bcc0-ce56-4953-b0d6-252bb4a3a66c-kube-api-access-pkb27\") pod \"ingress-operator-5b745b69d9-jvkk8\" (UID: \"3d51bcc0-ce56-4953-b0d6-252bb4a3a66c\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jvkk8" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.275661 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/119fb5fe-2460-4d2d-9db9-452afaa1e93e-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-4w5mk\" (UID: \"119fb5fe-2460-4d2d-9db9-452afaa1e93e\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-4w5mk" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.275698 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/6aa23de2-5f67-4f16-bd7d-eeecb3dfa19f-registration-dir\") pod \"csi-hostpathplugin-9cqxr\" (UID: \"6aa23de2-5f67-4f16-bd7d-eeecb3dfa19f\") " pod="hostpath-provisioner/csi-hostpathplugin-9cqxr" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.275715 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/31eb233b-13fe-40cb-9259-f18574a85e01-secret-volume\") pod \"collect-profiles-29482110-7p5pb\" (UID: \"31eb233b-13fe-40cb-9259-f18574a85e01\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482110-7p5pb" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.275738 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/96b0927f-cc7b-48e8-82bd-bbec85c2b9b2-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-2j5lz\" (UID: \"96b0927f-cc7b-48e8-82bd-bbec85c2b9b2\") " pod="openshift-marketplace/marketplace-operator-79b997595-2j5lz" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.275768 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ad0a9f73-941c-4023-a106-2b0fa19da6a0-config-volume\") pod \"dns-default-gbvfn\" (UID: \"ad0a9f73-941c-4023-a106-2b0fa19da6a0\") " pod="openshift-dns/dns-default-gbvfn" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.275796 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/ad0a9f73-941c-4023-a106-2b0fa19da6a0-metrics-tls\") pod \"dns-default-gbvfn\" (UID: \"ad0a9f73-941c-4023-a106-2b0fa19da6a0\") " pod="openshift-dns/dns-default-gbvfn" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.275822 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-njmr4\" (UniqueName: \"kubernetes.io/projected/6aa23de2-5f67-4f16-bd7d-eeecb3dfa19f-kube-api-access-njmr4\") pod \"csi-hostpathplugin-9cqxr\" (UID: \"6aa23de2-5f67-4f16-bd7d-eeecb3dfa19f\") " pod="hostpath-provisioner/csi-hostpathplugin-9cqxr" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.275856 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8t6d2\" (UniqueName: \"kubernetes.io/projected/31eb233b-13fe-40cb-9259-f18574a85e01-kube-api-access-8t6d2\") pod \"collect-profiles-29482110-7p5pb\" (UID: \"31eb233b-13fe-40cb-9259-f18574a85e01\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482110-7p5pb" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.275982 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/9a09b2d6-5243-4a2c-a13f-3b640bfaa3dc-cert\") pod \"ingress-canary-v2c77\" (UID: \"9a09b2d6-5243-4a2c-a13f-3b640bfaa3dc\") " pod="openshift-ingress-canary/ingress-canary-v2c77" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.276001 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nzkhw\" (UniqueName: \"kubernetes.io/projected/96b0927f-cc7b-48e8-82bd-bbec85c2b9b2-kube-api-access-nzkhw\") pod \"marketplace-operator-79b997595-2j5lz\" (UID: \"96b0927f-cc7b-48e8-82bd-bbec85c2b9b2\") " pod="openshift-marketplace/marketplace-operator-79b997595-2j5lz" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.276022 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/8ad94588-fb91-4748-bc0c-c87f686fa9a0-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-pltl7\" (UID: \"8ad94588-fb91-4748-bc0c-c87f686fa9a0\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-pltl7" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.276029 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/6aa23de2-5f67-4f16-bd7d-eeecb3dfa19f-registration-dir\") pod \"csi-hostpathplugin-9cqxr\" (UID: \"6aa23de2-5f67-4f16-bd7d-eeecb3dfa19f\") " pod="hostpath-provisioner/csi-hostpathplugin-9cqxr" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.276042 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/6aa23de2-5f67-4f16-bd7d-eeecb3dfa19f-mountpoint-dir\") pod \"csi-hostpathplugin-9cqxr\" (UID: \"6aa23de2-5f67-4f16-bd7d-eeecb3dfa19f\") " pod="hostpath-provisioner/csi-hostpathplugin-9cqxr" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.276093 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s8z5l\" (UniqueName: \"kubernetes.io/projected/119fb5fe-2460-4d2d-9db9-452afaa1e93e-kube-api-access-s8z5l\") pod \"control-plane-machine-set-operator-78cbb6b69f-4w5mk\" (UID: \"119fb5fe-2460-4d2d-9db9-452afaa1e93e\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-4w5mk" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.276114 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/6aa23de2-5f67-4f16-bd7d-eeecb3dfa19f-mountpoint-dir\") pod \"csi-hostpathplugin-9cqxr\" (UID: \"6aa23de2-5f67-4f16-bd7d-eeecb3dfa19f\") " pod="hostpath-provisioner/csi-hostpathplugin-9cqxr" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.276118 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/8ad94588-fb91-4748-bc0c-c87f686fa9a0-proxy-tls\") pod \"machine-config-controller-84d6567774-pltl7\" (UID: \"8ad94588-fb91-4748-bc0c-c87f686fa9a0\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-pltl7" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.276154 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b489m\" (UniqueName: \"kubernetes.io/projected/8ad94588-fb91-4748-bc0c-c87f686fa9a0-kube-api-access-b489m\") pod \"machine-config-controller-84d6567774-pltl7\" (UID: \"8ad94588-fb91-4748-bc0c-c87f686fa9a0\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-pltl7" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.276198 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/f4d440d5-1f47-40bc-bfdd-95267dfde12c-certs\") pod \"machine-config-server-cp5zh\" (UID: \"f4d440d5-1f47-40bc-bfdd-95267dfde12c\") " pod="openshift-machine-config-operator/machine-config-server-cp5zh" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.276214 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/6aa23de2-5f67-4f16-bd7d-eeecb3dfa19f-plugins-dir\") pod \"csi-hostpathplugin-9cqxr\" (UID: \"6aa23de2-5f67-4f16-bd7d-eeecb3dfa19f\") " pod="hostpath-provisioner/csi-hostpathplugin-9cqxr" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.276244 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/a53f5d18-851d-4a86-b349-e53e035d39ce-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-t4dxf\" (UID: \"a53f5d18-851d-4a86-b349-e53e035d39ce\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-t4dxf" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.276273 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/31eb233b-13fe-40cb-9259-f18574a85e01-config-volume\") pod \"collect-profiles-29482110-7p5pb\" (UID: \"31eb233b-13fe-40cb-9259-f18574a85e01\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482110-7p5pb" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.276303 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2px9f\" (UniqueName: \"kubernetes.io/projected/f4d440d5-1f47-40bc-bfdd-95267dfde12c-kube-api-access-2px9f\") pod \"machine-config-server-cp5zh\" (UID: \"f4d440d5-1f47-40bc-bfdd-95267dfde12c\") " pod="openshift-machine-config-operator/machine-config-server-cp5zh" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.276319 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2jw85\" (UniqueName: \"kubernetes.io/projected/ad0a9f73-941c-4023-a106-2b0fa19da6a0-kube-api-access-2jw85\") pod \"dns-default-gbvfn\" (UID: \"ad0a9f73-941c-4023-a106-2b0fa19da6a0\") " pod="openshift-dns/dns-default-gbvfn" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.276342 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/96b0927f-cc7b-48e8-82bd-bbec85c2b9b2-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-2j5lz\" (UID: \"96b0927f-cc7b-48e8-82bd-bbec85c2b9b2\") " pod="openshift-marketplace/marketplace-operator-79b997595-2j5lz" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.276363 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/f4d440d5-1f47-40bc-bfdd-95267dfde12c-node-bootstrap-token\") pod \"machine-config-server-cp5zh\" (UID: \"f4d440d5-1f47-40bc-bfdd-95267dfde12c\") " pod="openshift-machine-config-operator/machine-config-server-cp5zh" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.276385 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2clwt\" (UniqueName: \"kubernetes.io/projected/9a09b2d6-5243-4a2c-a13f-3b640bfaa3dc-kube-api-access-2clwt\") pod \"ingress-canary-v2c77\" (UID: \"9a09b2d6-5243-4a2c-a13f-3b640bfaa3dc\") " pod="openshift-ingress-canary/ingress-canary-v2c77" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.276416 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zsdqk\" (UID: \"ea01cf9d-e1ac-4197-a0ac-830eebd15245\") " pod="openshift-image-registry/image-registry-697d97f7c8-zsdqk" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.276447 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/6aa23de2-5f67-4f16-bd7d-eeecb3dfa19f-socket-dir\") pod \"csi-hostpathplugin-9cqxr\" (UID: \"6aa23de2-5f67-4f16-bd7d-eeecb3dfa19f\") " pod="hostpath-provisioner/csi-hostpathplugin-9cqxr" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.276471 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/71c9147f-5ab8-4342-b8e2-e4c37d281efb-signing-key\") pod \"service-ca-9c57cc56f-4t4rs\" (UID: \"71c9147f-5ab8-4342-b8e2-e4c37d281efb\") " pod="openshift-service-ca/service-ca-9c57cc56f-4t4rs" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.276500 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hlcd4\" (UniqueName: \"kubernetes.io/projected/a53f5d18-851d-4a86-b349-e53e035d39ce-kube-api-access-hlcd4\") pod \"package-server-manager-789f6589d5-t4dxf\" (UID: \"a53f5d18-851d-4a86-b349-e53e035d39ce\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-t4dxf" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.276527 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/6aa23de2-5f67-4f16-bd7d-eeecb3dfa19f-csi-data-dir\") pod \"csi-hostpathplugin-9cqxr\" (UID: \"6aa23de2-5f67-4f16-bd7d-eeecb3dfa19f\") " pod="hostpath-provisioner/csi-hostpathplugin-9cqxr" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.276547 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vlfwc\" (UniqueName: \"kubernetes.io/projected/71c9147f-5ab8-4342-b8e2-e4c37d281efb-kube-api-access-vlfwc\") pod \"service-ca-9c57cc56f-4t4rs\" (UID: \"71c9147f-5ab8-4342-b8e2-e4c37d281efb\") " pod="openshift-service-ca/service-ca-9c57cc56f-4t4rs" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.276602 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/71c9147f-5ab8-4342-b8e2-e4c37d281efb-signing-cabundle\") pod \"service-ca-9c57cc56f-4t4rs\" (UID: \"71c9147f-5ab8-4342-b8e2-e4c37d281efb\") " pod="openshift-service-ca/service-ca-9c57cc56f-4t4rs" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.277398 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/71c9147f-5ab8-4342-b8e2-e4c37d281efb-signing-cabundle\") pod \"service-ca-9c57cc56f-4t4rs\" (UID: \"71c9147f-5ab8-4342-b8e2-e4c37d281efb\") " pod="openshift-service-ca/service-ca-9c57cc56f-4t4rs" Jan 20 16:44:03 crc kubenswrapper[4558]: E0120 16:44:03.277646 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-20 16:44:03.777617837 +0000 UTC m=+137.537955803 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zsdqk" (UID: "ea01cf9d-e1ac-4197-a0ac-830eebd15245") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.277986 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/6aa23de2-5f67-4f16-bd7d-eeecb3dfa19f-socket-dir\") pod \"csi-hostpathplugin-9cqxr\" (UID: \"6aa23de2-5f67-4f16-bd7d-eeecb3dfa19f\") " pod="hostpath-provisioner/csi-hostpathplugin-9cqxr" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.282456 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/a53f5d18-851d-4a86-b349-e53e035d39ce-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-t4dxf\" (UID: \"a53f5d18-851d-4a86-b349-e53e035d39ce\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-t4dxf" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.282972 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/96b0927f-cc7b-48e8-82bd-bbec85c2b9b2-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-2j5lz\" (UID: \"96b0927f-cc7b-48e8-82bd-bbec85c2b9b2\") " pod="openshift-marketplace/marketplace-operator-79b997595-2j5lz" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.283528 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/119fb5fe-2460-4d2d-9db9-452afaa1e93e-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-4w5mk\" (UID: \"119fb5fe-2460-4d2d-9db9-452afaa1e93e\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-4w5mk" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.289377 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/6aa23de2-5f67-4f16-bd7d-eeecb3dfa19f-csi-data-dir\") pod \"csi-hostpathplugin-9cqxr\" (UID: \"6aa23de2-5f67-4f16-bd7d-eeecb3dfa19f\") " pod="hostpath-provisioner/csi-hostpathplugin-9cqxr" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.290446 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/8ad94588-fb91-4748-bc0c-c87f686fa9a0-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-pltl7\" (UID: \"8ad94588-fb91-4748-bc0c-c87f686fa9a0\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-pltl7" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.290633 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/9a09b2d6-5243-4a2c-a13f-3b640bfaa3dc-cert\") pod \"ingress-canary-v2c77\" (UID: \"9a09b2d6-5243-4a2c-a13f-3b640bfaa3dc\") " pod="openshift-ingress-canary/ingress-canary-v2c77" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.290987 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/6aa23de2-5f67-4f16-bd7d-eeecb3dfa19f-plugins-dir\") pod \"csi-hostpathplugin-9cqxr\" (UID: \"6aa23de2-5f67-4f16-bd7d-eeecb3dfa19f\") " pod="hostpath-provisioner/csi-hostpathplugin-9cqxr" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.291303 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/f4d440d5-1f47-40bc-bfdd-95267dfde12c-node-bootstrap-token\") pod \"machine-config-server-cp5zh\" (UID: \"f4d440d5-1f47-40bc-bfdd-95267dfde12c\") " pod="openshift-machine-config-operator/machine-config-server-cp5zh" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.291589 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ad0a9f73-941c-4023-a106-2b0fa19da6a0-config-volume\") pod \"dns-default-gbvfn\" (UID: \"ad0a9f73-941c-4023-a106-2b0fa19da6a0\") " pod="openshift-dns/dns-default-gbvfn" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.298240 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/ad0a9f73-941c-4023-a106-2b0fa19da6a0-metrics-tls\") pod \"dns-default-gbvfn\" (UID: \"ad0a9f73-941c-4023-a106-2b0fa19da6a0\") " pod="openshift-dns/dns-default-gbvfn" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.298970 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/31eb233b-13fe-40cb-9259-f18574a85e01-config-volume\") pod \"collect-profiles-29482110-7p5pb\" (UID: \"31eb233b-13fe-40cb-9259-f18574a85e01\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482110-7p5pb" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.301055 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/8ad94588-fb91-4748-bc0c-c87f686fa9a0-proxy-tls\") pod \"machine-config-controller-84d6567774-pltl7\" (UID: \"8ad94588-fb91-4748-bc0c-c87f686fa9a0\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-pltl7" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.302651 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/96b0927f-cc7b-48e8-82bd-bbec85c2b9b2-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-2j5lz\" (UID: \"96b0927f-cc7b-48e8-82bd-bbec85c2b9b2\") " pod="openshift-marketplace/marketplace-operator-79b997595-2j5lz" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.302694 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/71c9147f-5ab8-4342-b8e2-e4c37d281efb-signing-key\") pod \"service-ca-9c57cc56f-4t4rs\" (UID: \"71c9147f-5ab8-4342-b8e2-e4c37d281efb\") " pod="openshift-service-ca/service-ca-9c57cc56f-4t4rs" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.302699 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/31eb233b-13fe-40cb-9259-f18574a85e01-secret-volume\") pod \"collect-profiles-29482110-7p5pb\" (UID: \"31eb233b-13fe-40cb-9259-f18574a85e01\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482110-7p5pb" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.309233 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rc7pc\" (UniqueName: \"kubernetes.io/projected/436af592-a7f2-4649-b2e3-47df5e85ce9c-kube-api-access-rc7pc\") pod \"apiserver-7bbb656c7d-b97gp\" (UID: \"436af592-a7f2-4649-b2e3-47df5e85ce9c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-b97gp" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.316894 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/f4d440d5-1f47-40bc-bfdd-95267dfde12c-certs\") pod \"machine-config-server-cp5zh\" (UID: \"f4d440d5-1f47-40bc-bfdd-95267dfde12c\") " pod="openshift-machine-config-operator/machine-config-server-cp5zh" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.318154 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-b97gp" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.330567 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-khtcb\" (UniqueName: \"kubernetes.io/projected/7241fe86-b7e1-4246-b040-66ca5e799562-kube-api-access-khtcb\") pod \"machine-approver-56656f9798-jq4hg\" (UID: \"7241fe86-b7e1-4246-b040-66ca5e799562\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-jq4hg" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.342382 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jvkk8" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.361847 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4b2nh\" (UniqueName: \"kubernetes.io/projected/59e85616-edec-46fa-aa93-a1ecb76b415c-kube-api-access-4b2nh\") pod \"downloads-7954f5f757-225km\" (UID: \"59e85616-edec-46fa-aa93-a1ecb76b415c\") " pod="openshift-console/downloads-7954f5f757-225km" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.367988 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7hmsc\" (UniqueName: \"kubernetes.io/projected/ea01cf9d-e1ac-4197-a0ac-830eebd15245-kube-api-access-7hmsc\") pod \"image-registry-697d97f7c8-zsdqk\" (UID: \"ea01cf9d-e1ac-4197-a0ac-830eebd15245\") " pod="openshift-image-registry/image-registry-697d97f7c8-zsdqk" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.377379 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 20 16:44:03 crc kubenswrapper[4558]: E0120 16:44:03.377746 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-20 16:44:03.877726015 +0000 UTC m=+137.638063982 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.385017 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fdlvs\" (UniqueName: \"kubernetes.io/projected/25c0e5f7-93dc-49a4-a31c-b34dbc125f3e-kube-api-access-fdlvs\") pod \"kube-storage-version-migrator-operator-b67b599dd-l8kh6\" (UID: \"25c0e5f7-93dc-49a4-a31c-b34dbc125f3e\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-l8kh6" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.411025 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c286z\" (UniqueName: \"kubernetes.io/projected/61738345-9c27-415a-a089-e0000fe5c92a-kube-api-access-c286z\") pod \"migrator-59844c95c7-sgbgg\" (UID: \"61738345-9c27-415a-a089-e0000fe5c92a\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-sgbgg" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.437139 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a9e14748-e028-43ec-8e2c-d2d8ce033c75-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-qdvgg\" (UID: \"a9e14748-e028-43ec-8e2c-d2d8ce033c75\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-qdvgg" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.447899 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v7xrc\" (UniqueName: \"kubernetes.io/projected/963800b2-ec8d-4d8f-9a05-f199a20c8ba9-kube-api-access-v7xrc\") pod \"dns-operator-744455d44c-mshrf\" (UID: \"963800b2-ec8d-4d8f-9a05-f199a20c8ba9\") " pod="openshift-dns-operator/dns-operator-744455d44c-mshrf" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.453691 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-mshrf" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.457971 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-l8kh6" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.468905 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-25jqm"] Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.482186 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xfqdv\" (UniqueName: \"kubernetes.io/projected/1cdfeb06-d9ec-43ad-850c-d685e4e7c8a5-kube-api-access-xfqdv\") pod \"etcd-operator-b45778765-5rxsc\" (UID: \"1cdfeb06-d9ec-43ad-850c-d685e4e7c8a5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-5rxsc" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.482579 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zsdqk\" (UID: \"ea01cf9d-e1ac-4197-a0ac-830eebd15245\") " pod="openshift-image-registry/image-registry-697d97f7c8-zsdqk" Jan 20 16:44:03 crc kubenswrapper[4558]: E0120 16:44:03.482949 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-20 16:44:03.982934473 +0000 UTC m=+137.743272440 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zsdqk" (UID: "ea01cf9d-e1ac-4197-a0ac-830eebd15245") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.501844 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s8z5l\" (UniqueName: \"kubernetes.io/projected/119fb5fe-2460-4d2d-9db9-452afaa1e93e-kube-api-access-s8z5l\") pod \"control-plane-machine-set-operator-78cbb6b69f-4w5mk\" (UID: \"119fb5fe-2460-4d2d-9db9-452afaa1e93e\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-4w5mk" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.501909 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-dqf5x"] Jan 20 16:44:03 crc kubenswrapper[4558]: W0120 16:44:03.502146 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod31b93be8_46ba_485b_b5d3_639064825683.slice/crio-a790e3210960207557e854d73bdcb663153defaccb3c1904a3c739b840e61ea1 WatchSource:0}: Error finding container a790e3210960207557e854d73bdcb663153defaccb3c1904a3c739b840e61ea1: Status 404 returned error can't find the container with id a790e3210960207557e854d73bdcb663153defaccb3c1904a3c739b840e61ea1 Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.503563 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-rh5k4" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.512878 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-4w5mk" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.521815 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2clwt\" (UniqueName: \"kubernetes.io/projected/9a09b2d6-5243-4a2c-a13f-3b640bfaa3dc-kube-api-access-2clwt\") pod \"ingress-canary-v2c77\" (UID: \"9a09b2d6-5243-4a2c-a13f-3b640bfaa3dc\") " pod="openshift-ingress-canary/ingress-canary-v2c77" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.550195 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-49k6d"] Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.550719 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2jw85\" (UniqueName: \"kubernetes.io/projected/ad0a9f73-941c-4023-a106-2b0fa19da6a0-kube-api-access-2jw85\") pod \"dns-default-gbvfn\" (UID: \"ad0a9f73-941c-4023-a106-2b0fa19da6a0\") " pod="openshift-dns/dns-default-gbvfn" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.551097 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-v2c77" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.555737 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8xc26"] Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.563343 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2px9f\" (UniqueName: \"kubernetes.io/projected/f4d440d5-1f47-40bc-bfdd-95267dfde12c-kube-api-access-2px9f\") pod \"machine-config-server-cp5zh\" (UID: \"f4d440d5-1f47-40bc-bfdd-95267dfde12c\") " pod="openshift-machine-config-operator/machine-config-server-cp5zh" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.570862 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-7flzc"] Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.575854 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-gbvfn" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.583579 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-cp5zh" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.584260 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 20 16:44:03 crc kubenswrapper[4558]: E0120 16:44:03.584717 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-20 16:44:04.084687207 +0000 UTC m=+137.845025174 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.584929 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-7wfbg"] Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.590835 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hlcd4\" (UniqueName: \"kubernetes.io/projected/a53f5d18-851d-4a86-b349-e53e035d39ce-kube-api-access-hlcd4\") pod \"package-server-manager-789f6589d5-t4dxf\" (UID: \"a53f5d18-851d-4a86-b349-e53e035d39ce\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-t4dxf" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.592729 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b489m\" (UniqueName: \"kubernetes.io/projected/8ad94588-fb91-4748-bc0c-c87f686fa9a0-kube-api-access-b489m\") pod \"machine-config-controller-84d6567774-pltl7\" (UID: \"8ad94588-fb91-4748-bc0c-c87f686fa9a0\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-pltl7" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.606111 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-jq4hg" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.607950 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nzkhw\" (UniqueName: \"kubernetes.io/projected/96b0927f-cc7b-48e8-82bd-bbec85c2b9b2-kube-api-access-nzkhw\") pod \"marketplace-operator-79b997595-2j5lz\" (UID: \"96b0927f-cc7b-48e8-82bd-bbec85c2b9b2\") " pod="openshift-marketplace/marketplace-operator-79b997595-2j5lz" Jan 20 16:44:03 crc kubenswrapper[4558]: W0120 16:44:03.610101 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podca06f669_4a81_4e6c_abc1_2800cd58c188.slice/crio-276b241a2a279db3d812d88c13705d9a0092f5f892359905538c8c5942623d4b WatchSource:0}: Error finding container 276b241a2a279db3d812d88c13705d9a0092f5f892359905538c8c5942623d4b: Status 404 returned error can't find the container with id 276b241a2a279db3d812d88c13705d9a0092f5f892359905538c8c5942623d4b Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.612122 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-225km" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.645924 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vlfwc\" (UniqueName: \"kubernetes.io/projected/71c9147f-5ab8-4342-b8e2-e4c37d281efb-kube-api-access-vlfwc\") pod \"service-ca-9c57cc56f-4t4rs\" (UID: \"71c9147f-5ab8-4342-b8e2-e4c37d281efb\") " pod="openshift-service-ca/service-ca-9c57cc56f-4t4rs" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.654604 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-njmr4\" (UniqueName: \"kubernetes.io/projected/6aa23de2-5f67-4f16-bd7d-eeecb3dfa19f-kube-api-access-njmr4\") pod \"csi-hostpathplugin-9cqxr\" (UID: \"6aa23de2-5f67-4f16-bd7d-eeecb3dfa19f\") " pod="hostpath-provisioner/csi-hostpathplugin-9cqxr" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.675245 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-5rxsc" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.685813 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zsdqk\" (UID: \"ea01cf9d-e1ac-4197-a0ac-830eebd15245\") " pod="openshift-image-registry/image-registry-697d97f7c8-zsdqk" Jan 20 16:44:03 crc kubenswrapper[4558]: E0120 16:44:03.686249 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-20 16:44:04.186236898 +0000 UTC m=+137.946574865 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zsdqk" (UID: "ea01cf9d-e1ac-4197-a0ac-830eebd15245") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.694657 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8t6d2\" (UniqueName: \"kubernetes.io/projected/31eb233b-13fe-40cb-9259-f18574a85e01-kube-api-access-8t6d2\") pod \"collect-profiles-29482110-7p5pb\" (UID: \"31eb233b-13fe-40cb-9259-f18574a85e01\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482110-7p5pb" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.694687 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-qdvgg" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.705604 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-sgbgg" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.787676 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 20 16:44:03 crc kubenswrapper[4558]: E0120 16:44:03.797481 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-20 16:44:04.297426007 +0000 UTC m=+138.057763975 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.797601 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zsdqk\" (UID: \"ea01cf9d-e1ac-4197-a0ac-830eebd15245\") " pod="openshift-image-registry/image-registry-697d97f7c8-zsdqk" Jan 20 16:44:03 crc kubenswrapper[4558]: E0120 16:44:03.798722 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-20 16:44:04.29870914 +0000 UTC m=+138.059047106 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zsdqk" (UID: "ea01cf9d-e1ac-4197-a0ac-830eebd15245") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.821842 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-4t4rs" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.825362 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-jvkk8"] Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.826465 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-t4dxf" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.832681 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-pltl7" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.833793 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-b97gp"] Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.837672 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29482110-7p5pb" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.853572 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-2j5lz" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.874058 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-9cqxr" Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.898748 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 20 16:44:03 crc kubenswrapper[4558]: E0120 16:44:03.898963 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-20 16:44:04.398943888 +0000 UTC m=+138.159281854 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 20 16:44:03 crc kubenswrapper[4558]: I0120 16:44:03.899035 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zsdqk\" (UID: \"ea01cf9d-e1ac-4197-a0ac-830eebd15245\") " pod="openshift-image-registry/image-registry-697d97f7c8-zsdqk" Jan 20 16:44:03 crc kubenswrapper[4558]: E0120 16:44:03.899346 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-20 16:44:04.399339263 +0000 UTC m=+138.159677231 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zsdqk" (UID: "ea01cf9d-e1ac-4197-a0ac-830eebd15245") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 20 16:44:04 crc kubenswrapper[4558]: I0120 16:44:04.000698 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 20 16:44:04 crc kubenswrapper[4558]: E0120 16:44:04.001074 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-20 16:44:04.50105049 +0000 UTC m=+138.261388457 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 20 16:44:04 crc kubenswrapper[4558]: I0120 16:44:04.001293 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zsdqk\" (UID: \"ea01cf9d-e1ac-4197-a0ac-830eebd15245\") " pod="openshift-image-registry/image-registry-697d97f7c8-zsdqk" Jan 20 16:44:04 crc kubenswrapper[4558]: E0120 16:44:04.001755 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-20 16:44:04.501741444 +0000 UTC m=+138.262079411 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zsdqk" (UID: "ea01cf9d-e1ac-4197-a0ac-830eebd15245") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 20 16:44:04 crc kubenswrapper[4558]: I0120 16:44:04.025917 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8xc26" event={"ID":"d49afd2e-470e-4bd4-a30d-5014f423510b","Type":"ContainerStarted","Data":"543666f5555bacc2f6dd29527917e0b83bc311f03bc75417746b3c7918c0800e"} Jan 20 16:44:04 crc kubenswrapper[4558]: I0120 16:44:04.069794 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-wmcfr" event={"ID":"203c31e0-e1fc-4da7-8275-91295c22f307","Type":"ContainerStarted","Data":"19753c8022161cdfd70ebc875b566d4c754ae6baadbdb2910fb5b214f17a7d39"} Jan 20 16:44:04 crc kubenswrapper[4558]: I0120 16:44:04.106468 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-wmcfr" Jan 20 16:44:04 crc kubenswrapper[4558]: I0120 16:44:04.111028 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 20 16:44:04 crc kubenswrapper[4558]: E0120 16:44:04.111398 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-20 16:44:04.611358856 +0000 UTC m=+138.371696823 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 20 16:44:04 crc kubenswrapper[4558]: I0120 16:44:04.123666 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zsdqk\" (UID: \"ea01cf9d-e1ac-4197-a0ac-830eebd15245\") " pod="openshift-image-registry/image-registry-697d97f7c8-zsdqk" Jan 20 16:44:04 crc kubenswrapper[4558]: E0120 16:44:04.123998 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-20 16:44:04.623985333 +0000 UTC m=+138.384323299 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zsdqk" (UID: "ea01cf9d-e1ac-4197-a0ac-830eebd15245") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 20 16:44:04 crc kubenswrapper[4558]: I0120 16:44:04.136796 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-7wfbg" event={"ID":"34b7c77f-6a7d-43de-9ee4-bdba78dc8248","Type":"ContainerStarted","Data":"db8cc23538a13bd25ed470ccdb47e02cab7e3020c472171c70bf9e82ad02765d"} Jan 20 16:44:04 crc kubenswrapper[4558]: I0120 16:44:04.142825 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-gbvfn"] Jan 20 16:44:04 crc kubenswrapper[4558]: I0120 16:44:04.160056 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-l8kh6"] Jan 20 16:44:04 crc kubenswrapper[4558]: I0120 16:44:04.166820 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-rh5k4"] Jan 20 16:44:04 crc kubenswrapper[4558]: I0120 16:44:04.206373 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-49k6d" event={"ID":"ca06f669-4a81-4e6c-abc1-2800cd58c188","Type":"ContainerStarted","Data":"276b241a2a279db3d812d88c13705d9a0092f5f892359905538c8c5942623d4b"} Jan 20 16:44:04 crc kubenswrapper[4558]: I0120 16:44:04.225822 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 20 16:44:04 crc kubenswrapper[4558]: E0120 16:44:04.227127 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-20 16:44:04.727105868 +0000 UTC m=+138.487443836 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 20 16:44:04 crc kubenswrapper[4558]: I0120 16:44:04.264524 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8rqtk" event={"ID":"b88a23ca-e1d6-4e41-9aee-6f9c2c572f42","Type":"ContainerStarted","Data":"99e08aba36a4d7712805aa1cc5efdbc0c6b24d6f65db9b6b6d5ec1b5fe45e7a9"} Jan 20 16:44:04 crc kubenswrapper[4558]: I0120 16:44:04.264562 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8rqtk" event={"ID":"b88a23ca-e1d6-4e41-9aee-6f9c2c572f42","Type":"ContainerStarted","Data":"c66663d9ed79ee02096e1081afe3e381242b48aa03af0c4fad67c05db73783b2"} Jan 20 16:44:04 crc kubenswrapper[4558]: I0120 16:44:04.279299 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-c97pw" event={"ID":"1dd6e1c0-24f1-43fc-81d6-b0d0e3ed36c2","Type":"ContainerStarted","Data":"8a40b31e93676557ad2d19d447bf46ae95ef3f26b9991d1b461b9771280e8fb3"} Jan 20 16:44:04 crc kubenswrapper[4558]: I0120 16:44:04.279349 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-c97pw" event={"ID":"1dd6e1c0-24f1-43fc-81d6-b0d0e3ed36c2","Type":"ContainerStarted","Data":"d0832b954a7df465688c9f54d0b0197d366ae6883a7a682d1e10460bf3ee4676"} Jan 20 16:44:04 crc kubenswrapper[4558]: I0120 16:44:04.283987 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zd9qc" event={"ID":"04ab3f66-5485-4b6f-9293-ed76b2695d50","Type":"ContainerStarted","Data":"5458e46e56f7d65eab79c3c48391c11132277e97ba84585b2d13259aa9368eb2"} Jan 20 16:44:04 crc kubenswrapper[4558]: I0120 16:44:04.284033 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zd9qc" event={"ID":"04ab3f66-5485-4b6f-9293-ed76b2695d50","Type":"ContainerStarted","Data":"fc848064a8e34f306dacbe53cfc5ac397d6489a3ad20433bb5c67d23d7dccbd4"} Jan 20 16:44:04 crc kubenswrapper[4558]: I0120 16:44:04.284910 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zd9qc" Jan 20 16:44:04 crc kubenswrapper[4558]: I0120 16:44:04.294454 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-v2c77"] Jan 20 16:44:04 crc kubenswrapper[4558]: I0120 16:44:04.298931 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zd9qc" Jan 20 16:44:04 crc kubenswrapper[4558]: I0120 16:44:04.302244 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-25jqm" event={"ID":"31b93be8-46ba-485b-b5d3-639064825683","Type":"ContainerStarted","Data":"dd08e0b10af439a4464a9c27fed9494695f216a5067273dcd579aed4ed7e01c9"} Jan 20 16:44:04 crc kubenswrapper[4558]: I0120 16:44:04.302275 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-25jqm" event={"ID":"31b93be8-46ba-485b-b5d3-639064825683","Type":"ContainerStarted","Data":"a790e3210960207557e854d73bdcb663153defaccb3c1904a3c739b840e61ea1"} Jan 20 16:44:04 crc kubenswrapper[4558]: I0120 16:44:04.315258 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-4w5mk"] Jan 20 16:44:04 crc kubenswrapper[4558]: I0120 16:44:04.317835 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-mshrf"] Jan 20 16:44:04 crc kubenswrapper[4558]: I0120 16:44:04.329687 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-225km"] Jan 20 16:44:04 crc kubenswrapper[4558]: I0120 16:44:04.329902 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zsdqk\" (UID: \"ea01cf9d-e1ac-4197-a0ac-830eebd15245\") " pod="openshift-image-registry/image-registry-697d97f7c8-zsdqk" Jan 20 16:44:04 crc kubenswrapper[4558]: E0120 16:44:04.333747 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-20 16:44:04.833728066 +0000 UTC m=+138.594066033 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zsdqk" (UID: "ea01cf9d-e1ac-4197-a0ac-830eebd15245") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 20 16:44:04 crc kubenswrapper[4558]: I0120 16:44:04.351041 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-sgbgg"] Jan 20 16:44:04 crc kubenswrapper[4558]: I0120 16:44:04.404430 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-cp5zh" event={"ID":"f4d440d5-1f47-40bc-bfdd-95267dfde12c","Type":"ContainerStarted","Data":"ed35561fc721141aa8835a416505849680eaf1032f625d9b11100afe348b0cb3"} Jan 20 16:44:04 crc kubenswrapper[4558]: I0120 16:44:04.429735 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-wmcfr" Jan 20 16:44:04 crc kubenswrapper[4558]: I0120 16:44:04.430911 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 20 16:44:04 crc kubenswrapper[4558]: E0120 16:44:04.432533 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-20 16:44:04.932516293 +0000 UTC m=+138.692854260 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 20 16:44:04 crc kubenswrapper[4558]: I0120 16:44:04.463056 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-qdvgg"] Jan 20 16:44:04 crc kubenswrapper[4558]: I0120 16:44:04.507623 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-7flzc" event={"ID":"1d076664-8f86-4e46-a67a-85539915bb04","Type":"ContainerStarted","Data":"86cfb3ddbcc44ba50faaa28e8570cd849a84822238020864b61e0e67e091e141"} Jan 20 16:44:04 crc kubenswrapper[4558]: I0120 16:44:04.528433 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-ppqsn" event={"ID":"20de26f9-85a3-42a2-ba6b-0839c0657141","Type":"ContainerStarted","Data":"b406dde493e7d1a0c2fa609635f314dd018e1eff11f57e6ed920c34723054d4e"} Jan 20 16:44:04 crc kubenswrapper[4558]: I0120 16:44:04.529267 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-ppqsn" Jan 20 16:44:04 crc kubenswrapper[4558]: I0120 16:44:04.533344 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zsdqk\" (UID: \"ea01cf9d-e1ac-4197-a0ac-830eebd15245\") " pod="openshift-image-registry/image-registry-697d97f7c8-zsdqk" Jan 20 16:44:04 crc kubenswrapper[4558]: E0120 16:44:04.533733 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-20 16:44:05.03370362 +0000 UTC m=+138.794041587 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zsdqk" (UID: "ea01cf9d-e1ac-4197-a0ac-830eebd15245") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 20 16:44:04 crc kubenswrapper[4558]: I0120 16:44:04.534026 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-jq4hg" event={"ID":"7241fe86-b7e1-4246-b040-66ca5e799562","Type":"ContainerStarted","Data":"1a3b98c655c8b8f614117172fc0a0ce03e1b1fb8b241f2e814c3b03901dedca1"} Jan 20 16:44:04 crc kubenswrapper[4558]: I0120 16:44:04.582792 4558 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-nwjzd container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.29:6443/healthz\": dial tcp 10.217.0.29:6443: connect: connection refused" start-of-body= Jan 20 16:44:04 crc kubenswrapper[4558]: I0120 16:44:04.583079 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-nwjzd" podUID="4654626b-36d0-4072-a04c-d8ee0678fd50" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.29:6443/healthz\": dial tcp 10.217.0.29:6443: connect: connection refused" Jan 20 16:44:04 crc kubenswrapper[4558]: I0120 16:44:04.623964 4558 generic.go:334] "Generic (PLEG): container finished" podID="f6c99edf-1afb-476e-b63e-ee73c372499c" containerID="134708ccd776f9c1155b685d80d4d0a5c7e6c9f007d543175f3f5b90aa5dda90" exitCode=0 Jan 20 16:44:04 crc kubenswrapper[4558]: I0120 16:44:04.648097 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 20 16:44:04 crc kubenswrapper[4558]: E0120 16:44:04.649244 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-20 16:44:05.149146408 +0000 UTC m=+138.909484376 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 20 16:44:04 crc kubenswrapper[4558]: I0120 16:44:04.649601 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zsdqk\" (UID: \"ea01cf9d-e1ac-4197-a0ac-830eebd15245\") " pod="openshift-image-registry/image-registry-697d97f7c8-zsdqk" Jan 20 16:44:04 crc kubenswrapper[4558]: E0120 16:44:04.654665 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-20 16:44:05.154642704 +0000 UTC m=+138.914980672 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zsdqk" (UID: "ea01cf9d-e1ac-4197-a0ac-830eebd15245") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 20 16:44:04 crc kubenswrapper[4558]: I0120 16:44:04.750837 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 20 16:44:04 crc kubenswrapper[4558]: E0120 16:44:04.750990 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-20 16:44:05.250957793 +0000 UTC m=+139.011295761 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 20 16:44:04 crc kubenswrapper[4558]: I0120 16:44:04.751483 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zsdqk\" (UID: \"ea01cf9d-e1ac-4197-a0ac-830eebd15245\") " pod="openshift-image-registry/image-registry-697d97f7c8-zsdqk" Jan 20 16:44:04 crc kubenswrapper[4558]: E0120 16:44:04.753699 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-20 16:44:05.253681735 +0000 UTC m=+139.014019702 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zsdqk" (UID: "ea01cf9d-e1ac-4197-a0ac-830eebd15245") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 20 16:44:04 crc kubenswrapper[4558]: I0120 16:44:04.838432 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-nwjzd" Jan 20 16:44:04 crc kubenswrapper[4558]: I0120 16:44:04.838480 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-vmm7h" Jan 20 16:44:04 crc kubenswrapper[4558]: I0120 16:44:04.838515 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-vmm7h" Jan 20 16:44:04 crc kubenswrapper[4558]: I0120 16:44:04.838527 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-nwjzd" event={"ID":"4654626b-36d0-4072-a04c-d8ee0678fd50","Type":"ContainerStarted","Data":"93a8a520bcd4319cde8907bbd948bb0c6da3eb3d67bae6be2a6e7fb65cac45fb"} Jan 20 16:44:04 crc kubenswrapper[4558]: I0120 16:44:04.838542 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-p5lzn" Jan 20 16:44:04 crc kubenswrapper[4558]: I0120 16:44:04.838552 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-nwjzd" event={"ID":"4654626b-36d0-4072-a04c-d8ee0678fd50","Type":"ContainerStarted","Data":"7afde49da87647c5e61c4acc968cf8fcd828569cb8c1a0fe790df704633e922f"} Jan 20 16:44:04 crc kubenswrapper[4558]: I0120 16:44:04.838567 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-p5lzn" Jan 20 16:44:04 crc kubenswrapper[4558]: I0120 16:44:04.838575 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-vmm7h" event={"ID":"7f63cf5d-23fe-4e8b-a9a7-3ce5c0c30aad","Type":"ContainerStarted","Data":"503969b54b9081a8c984c77ef00b3babbe7301561e2b587f91900396fe94ae13"} Jan 20 16:44:04 crc kubenswrapper[4558]: I0120 16:44:04.838584 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-vmm7h" event={"ID":"7f63cf5d-23fe-4e8b-a9a7-3ce5c0c30aad","Type":"ContainerStarted","Data":"9f6d91d11789f4fc3597c708c39e57ff6c1b8b642451ac94de240adcd054b40c"} Jan 20 16:44:04 crc kubenswrapper[4558]: I0120 16:44:04.838594 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-sjzfb" event={"ID":"f6c99edf-1afb-476e-b63e-ee73c372499c","Type":"ContainerDied","Data":"134708ccd776f9c1155b685d80d4d0a5c7e6c9f007d543175f3f5b90aa5dda90"} Jan 20 16:44:04 crc kubenswrapper[4558]: I0120 16:44:04.838609 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-vkkjt" event={"ID":"84f45a25-f77d-4c4c-88b6-5bdc4c286f10","Type":"ContainerStarted","Data":"aa7a2e4623e85257977f5a3e3f1c08156069fe3a9cc154afcbb05b7972094ae1"} Jan 20 16:44:04 crc kubenswrapper[4558]: I0120 16:44:04.838618 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-vkkjt" event={"ID":"84f45a25-f77d-4c4c-88b6-5bdc4c286f10","Type":"ContainerStarted","Data":"5119832d537925500318d002e05acd9f144649e53df29b693432b7c0eb8c1d41"} Jan 20 16:44:04 crc kubenswrapper[4558]: I0120 16:44:04.838628 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-p5lzn" event={"ID":"a8761e3d-bfc2-481f-a29e-15f7566a5881","Type":"ContainerStarted","Data":"74b4d45868390a9861bedb6fabcc324a9ae9c6e9dd343554300331d107cf1c6d"} Jan 20 16:44:04 crc kubenswrapper[4558]: I0120 16:44:04.838646 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-p5lzn" event={"ID":"a8761e3d-bfc2-481f-a29e-15f7566a5881","Type":"ContainerStarted","Data":"7497e1c31780b4de96920b9e67ee5d9d85a46772aed7e3c40e3d5d1a3949b0fe"} Jan 20 16:44:04 crc kubenswrapper[4558]: I0120 16:44:04.854052 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 20 16:44:04 crc kubenswrapper[4558]: E0120 16:44:04.854417 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-20 16:44:05.35440138 +0000 UTC m=+139.114739347 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 20 16:44:04 crc kubenswrapper[4558]: I0120 16:44:04.854959 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zsdqk\" (UID: \"ea01cf9d-e1ac-4197-a0ac-830eebd15245\") " pod="openshift-image-registry/image-registry-697d97f7c8-zsdqk" Jan 20 16:44:04 crc kubenswrapper[4558]: E0120 16:44:04.855314 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-20 16:44:05.355299665 +0000 UTC m=+139.115637632 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zsdqk" (UID: "ea01cf9d-e1ac-4197-a0ac-830eebd15245") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 20 16:44:04 crc kubenswrapper[4558]: I0120 16:44:04.871744 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-t225g" event={"ID":"c2b3de8e-9eaa-43d7-a061-674af5e035cb","Type":"ContainerStarted","Data":"e9553821ac5d05520a8e73a629cc2f5168c903ec0af450aef3f2f7532badfa0a"} Jan 20 16:44:04 crc kubenswrapper[4558]: I0120 16:44:04.871792 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-t225g" event={"ID":"c2b3de8e-9eaa-43d7-a061-674af5e035cb","Type":"ContainerStarted","Data":"7f0e80865945a529cda1ad18f57e0cab543a8ad4eed6a7d6d2ab9621ef965ce0"} Jan 20 16:44:04 crc kubenswrapper[4558]: I0120 16:44:04.876575 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-t225g" Jan 20 16:44:04 crc kubenswrapper[4558]: I0120 16:44:04.907660 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-rh247" event={"ID":"9e148c70-cb30-40cf-a333-65c0fdf0aa35","Type":"ContainerStarted","Data":"a74f09504a701282432fca8d99389e184196c9f969b7912b4ba432be72bf4505"} Jan 20 16:44:04 crc kubenswrapper[4558]: I0120 16:44:04.907699 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-rh247" event={"ID":"9e148c70-cb30-40cf-a333-65c0fdf0aa35","Type":"ContainerStarted","Data":"7289ad705f59e936398b64e5ab8b634c5014570dc889450807fdf614d418f2d0"} Jan 20 16:44:04 crc kubenswrapper[4558]: I0120 16:44:04.937906 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-t225g" Jan 20 16:44:04 crc kubenswrapper[4558]: I0120 16:44:04.956650 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 20 16:44:04 crc kubenswrapper[4558]: E0120 16:44:04.957023 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-20 16:44:05.457009148 +0000 UTC m=+139.217347115 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 20 16:44:04 crc kubenswrapper[4558]: I0120 16:44:04.974233 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-lhmdh" event={"ID":"dd7b53bb-d740-497c-a36e-87e51d6f05a6","Type":"ContainerStarted","Data":"5640d2dba8d4d4223739aac688df00e0e4bd854674b1de562daa2b66cc2a5108"} Jan 20 16:44:04 crc kubenswrapper[4558]: I0120 16:44:04.974287 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-lhmdh" event={"ID":"dd7b53bb-d740-497c-a36e-87e51d6f05a6","Type":"ContainerStarted","Data":"0d9b60603a6be01eb70b927705a9134c95d87e7e2cba0cf840bf6e4f0f2bff3f"} Jan 20 16:44:05 crc kubenswrapper[4558]: I0120 16:44:05.018517 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-dqf5x" event={"ID":"206f3850-b746-456c-9500-4d862f983a1e","Type":"ContainerStarted","Data":"2329ec04abe2e0277a30d956568520ae2aa6c417f9e64a66b3dc7f37d0fcab9d"} Jan 20 16:44:05 crc kubenswrapper[4558]: I0120 16:44:05.018571 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-dqf5x" event={"ID":"206f3850-b746-456c-9500-4d862f983a1e","Type":"ContainerStarted","Data":"e6680028568a42ece168d0c58cc3b74384fa35aeb5b58c3d450d1484cbe94114"} Jan 20 16:44:05 crc kubenswrapper[4558]: I0120 16:44:05.059325 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zsdqk\" (UID: \"ea01cf9d-e1ac-4197-a0ac-830eebd15245\") " pod="openshift-image-registry/image-registry-697d97f7c8-zsdqk" Jan 20 16:44:05 crc kubenswrapper[4558]: E0120 16:44:05.067703 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-20 16:44:05.567686349 +0000 UTC m=+139.328024317 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zsdqk" (UID: "ea01cf9d-e1ac-4197-a0ac-830eebd15245") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 20 16:44:05 crc kubenswrapper[4558]: I0120 16:44:05.071616 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-h7vz8" event={"ID":"d2311c1a-55c3-437c-ae71-881861de70ff","Type":"ContainerStarted","Data":"bcc03ef4a89d8aa24976253b003271baf46ce7bfb20137b023c26ee6f6b9c650"} Jan 20 16:44:05 crc kubenswrapper[4558]: I0120 16:44:05.104869 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-pltl7"] Jan 20 16:44:05 crc kubenswrapper[4558]: I0120 16:44:05.141367 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-vkkjt" Jan 20 16:44:05 crc kubenswrapper[4558]: I0120 16:44:05.158324 4558 patch_prober.go:28] interesting pod/router-default-5444994796-vkkjt container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 20 16:44:05 crc kubenswrapper[4558]: [-]has-synced failed: reason withheld Jan 20 16:44:05 crc kubenswrapper[4558]: [+]process-running ok Jan 20 16:44:05 crc kubenswrapper[4558]: healthz check failed Jan 20 16:44:05 crc kubenswrapper[4558]: I0120 16:44:05.158370 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-vkkjt" podUID="84f45a25-f77d-4c4c-88b6-5bdc4c286f10" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 20 16:44:05 crc kubenswrapper[4558]: I0120 16:44:05.159802 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 20 16:44:05 crc kubenswrapper[4558]: E0120 16:44:05.161043 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-20 16:44:05.661018267 +0000 UTC m=+139.421356234 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 20 16:44:05 crc kubenswrapper[4558]: I0120 16:44:05.194149 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-4t4rs"] Jan 20 16:44:05 crc kubenswrapper[4558]: I0120 16:44:05.268612 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zsdqk\" (UID: \"ea01cf9d-e1ac-4197-a0ac-830eebd15245\") " pod="openshift-image-registry/image-registry-697d97f7c8-zsdqk" Jan 20 16:44:05 crc kubenswrapper[4558]: E0120 16:44:05.269343 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-20 16:44:05.769320375 +0000 UTC m=+139.529658342 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zsdqk" (UID: "ea01cf9d-e1ac-4197-a0ac-830eebd15245") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 20 16:44:05 crc kubenswrapper[4558]: I0120 16:44:05.343177 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-c97pw" podStartSLOduration=121.343146868 podStartE2EDuration="2m1.343146868s" podCreationTimestamp="2026-01-20 16:42:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 16:44:05.301221068 +0000 UTC m=+139.061559036" watchObservedRunningTime="2026-01-20 16:44:05.343146868 +0000 UTC m=+139.103484835" Jan 20 16:44:05 crc kubenswrapper[4558]: I0120 16:44:05.345302 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-lhmdh" podStartSLOduration=121.345292809 podStartE2EDuration="2m1.345292809s" podCreationTimestamp="2026-01-20 16:42:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 16:44:05.342847533 +0000 UTC m=+139.103185500" watchObservedRunningTime="2026-01-20 16:44:05.345292809 +0000 UTC m=+139.105630776" Jan 20 16:44:05 crc kubenswrapper[4558]: I0120 16:44:05.372470 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 20 16:44:05 crc kubenswrapper[4558]: I0120 16:44:05.378716 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-dqf5x" podStartSLOduration=121.378699897 podStartE2EDuration="2m1.378699897s" podCreationTimestamp="2026-01-20 16:42:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 16:44:05.372173075 +0000 UTC m=+139.132511043" watchObservedRunningTime="2026-01-20 16:44:05.378699897 +0000 UTC m=+139.139037864" Jan 20 16:44:05 crc kubenswrapper[4558]: E0120 16:44:05.379054 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-20 16:44:05.879025642 +0000 UTC m=+139.639363609 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 20 16:44:05 crc kubenswrapper[4558]: I0120 16:44:05.384912 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-5rxsc"] Jan 20 16:44:05 crc kubenswrapper[4558]: I0120 16:44:05.445367 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-nwjzd" podStartSLOduration=121.445349481 podStartE2EDuration="2m1.445349481s" podCreationTimestamp="2026-01-20 16:42:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 16:44:05.405747249 +0000 UTC m=+139.166085216" watchObservedRunningTime="2026-01-20 16:44:05.445349481 +0000 UTC m=+139.205687449" Jan 20 16:44:05 crc kubenswrapper[4558]: I0120 16:44:05.445955 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-8rqtk" podStartSLOduration=121.445950657 podStartE2EDuration="2m1.445950657s" podCreationTimestamp="2026-01-20 16:42:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 16:44:05.43798632 +0000 UTC m=+139.198324288" watchObservedRunningTime="2026-01-20 16:44:05.445950657 +0000 UTC m=+139.206288624" Jan 20 16:44:05 crc kubenswrapper[4558]: I0120 16:44:05.479299 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-vkkjt" podStartSLOduration=121.479277913 podStartE2EDuration="2m1.479277913s" podCreationTimestamp="2026-01-20 16:42:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 16:44:05.46391466 +0000 UTC m=+139.224252627" watchObservedRunningTime="2026-01-20 16:44:05.479277913 +0000 UTC m=+139.239615881" Jan 20 16:44:05 crc kubenswrapper[4558]: I0120 16:44:05.480696 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zsdqk\" (UID: \"ea01cf9d-e1ac-4197-a0ac-830eebd15245\") " pod="openshift-image-registry/image-registry-697d97f7c8-zsdqk" Jan 20 16:44:05 crc kubenswrapper[4558]: E0120 16:44:05.480902 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-20 16:44:05.980889616 +0000 UTC m=+139.741227583 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zsdqk" (UID: "ea01cf9d-e1ac-4197-a0ac-830eebd15245") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 20 16:44:05 crc kubenswrapper[4558]: I0120 16:44:05.514854 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zd9qc" podStartSLOduration=121.514834749 podStartE2EDuration="2m1.514834749s" podCreationTimestamp="2026-01-20 16:42:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 16:44:05.487767991 +0000 UTC m=+139.248105958" watchObservedRunningTime="2026-01-20 16:44:05.514834749 +0000 UTC m=+139.275172707" Jan 20 16:44:05 crc kubenswrapper[4558]: I0120 16:44:05.518249 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-2j5lz"] Jan 20 16:44:05 crc kubenswrapper[4558]: I0120 16:44:05.529382 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-t4dxf"] Jan 20 16:44:05 crc kubenswrapper[4558]: I0120 16:44:05.534578 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-h7vz8" podStartSLOduration=121.534563504 podStartE2EDuration="2m1.534563504s" podCreationTimestamp="2026-01-20 16:42:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 16:44:05.518017167 +0000 UTC m=+139.278355135" watchObservedRunningTime="2026-01-20 16:44:05.534563504 +0000 UTC m=+139.294901472" Jan 20 16:44:05 crc kubenswrapper[4558]: W0120 16:44:05.547364 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod96b0927f_cc7b_48e8_82bd_bbec85c2b9b2.slice/crio-256e657a15a3a936d01b883af8814daec575807ddfc5373e72189273b6ecc468 WatchSource:0}: Error finding container 256e657a15a3a936d01b883af8814daec575807ddfc5373e72189273b6ecc468: Status 404 returned error can't find the container with id 256e657a15a3a936d01b883af8814daec575807ddfc5373e72189273b6ecc468 Jan 20 16:44:05 crc kubenswrapper[4558]: W0120 16:44:05.549248 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda53f5d18_851d_4a86_b349_e53e035d39ce.slice/crio-631b7c49a2421e16599711dcab2abafbd179f007f27790baaab3f174439a7ea1 WatchSource:0}: Error finding container 631b7c49a2421e16599711dcab2abafbd179f007f27790baaab3f174439a7ea1: Status 404 returned error can't find the container with id 631b7c49a2421e16599711dcab2abafbd179f007f27790baaab3f174439a7ea1 Jan 20 16:44:05 crc kubenswrapper[4558]: I0120 16:44:05.576598 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-wmcfr" podStartSLOduration=121.576579835 podStartE2EDuration="2m1.576579835s" podCreationTimestamp="2026-01-20 16:42:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 16:44:05.571886405 +0000 UTC m=+139.332224372" watchObservedRunningTime="2026-01-20 16:44:05.576579835 +0000 UTC m=+139.336917802" Jan 20 16:44:05 crc kubenswrapper[4558]: I0120 16:44:05.585411 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 20 16:44:05 crc kubenswrapper[4558]: E0120 16:44:05.585921 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-20 16:44:06.085891704 +0000 UTC m=+139.846229662 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 20 16:44:05 crc kubenswrapper[4558]: I0120 16:44:05.604722 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-9cqxr"] Jan 20 16:44:05 crc kubenswrapper[4558]: I0120 16:44:05.612803 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-t225g" podStartSLOduration=121.612786538 podStartE2EDuration="2m1.612786538s" podCreationTimestamp="2026-01-20 16:42:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 16:44:05.61198249 +0000 UTC m=+139.372320457" watchObservedRunningTime="2026-01-20 16:44:05.612786538 +0000 UTC m=+139.373124505" Jan 20 16:44:05 crc kubenswrapper[4558]: I0120 16:44:05.638982 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29482110-7p5pb"] Jan 20 16:44:05 crc kubenswrapper[4558]: I0120 16:44:05.671760 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-bgqpg"] Jan 20 16:44:05 crc kubenswrapper[4558]: I0120 16:44:05.672708 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-bgqpg" Jan 20 16:44:05 crc kubenswrapper[4558]: I0120 16:44:05.676452 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-p5lzn" podStartSLOduration=121.676437721 podStartE2EDuration="2m1.676437721s" podCreationTimestamp="2026-01-20 16:42:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 16:44:05.669290789 +0000 UTC m=+139.429628756" watchObservedRunningTime="2026-01-20 16:44:05.676437721 +0000 UTC m=+139.436775689" Jan 20 16:44:05 crc kubenswrapper[4558]: I0120 16:44:05.676955 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Jan 20 16:44:05 crc kubenswrapper[4558]: I0120 16:44:05.687537 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-bgqpg"] Jan 20 16:44:05 crc kubenswrapper[4558]: I0120 16:44:05.688046 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/11fdde87-2c2c-40ba-84e5-e0c93fc58130-catalog-content\") pod \"certified-operators-bgqpg\" (UID: \"11fdde87-2c2c-40ba-84e5-e0c93fc58130\") " pod="openshift-marketplace/certified-operators-bgqpg" Jan 20 16:44:05 crc kubenswrapper[4558]: I0120 16:44:05.688082 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zsdqk\" (UID: \"ea01cf9d-e1ac-4197-a0ac-830eebd15245\") " pod="openshift-image-registry/image-registry-697d97f7c8-zsdqk" Jan 20 16:44:05 crc kubenswrapper[4558]: I0120 16:44:05.688122 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9zszb\" (UniqueName: \"kubernetes.io/projected/11fdde87-2c2c-40ba-84e5-e0c93fc58130-kube-api-access-9zszb\") pod \"certified-operators-bgqpg\" (UID: \"11fdde87-2c2c-40ba-84e5-e0c93fc58130\") " pod="openshift-marketplace/certified-operators-bgqpg" Jan 20 16:44:05 crc kubenswrapper[4558]: I0120 16:44:05.688150 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/11fdde87-2c2c-40ba-84e5-e0c93fc58130-utilities\") pod \"certified-operators-bgqpg\" (UID: \"11fdde87-2c2c-40ba-84e5-e0c93fc58130\") " pod="openshift-marketplace/certified-operators-bgqpg" Jan 20 16:44:05 crc kubenswrapper[4558]: E0120 16:44:05.688428 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-20 16:44:06.188414302 +0000 UTC m=+139.948752268 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zsdqk" (UID: "ea01cf9d-e1ac-4197-a0ac-830eebd15245") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 20 16:44:05 crc kubenswrapper[4558]: I0120 16:44:05.753043 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-25jqm" podStartSLOduration=121.753028044 podStartE2EDuration="2m1.753028044s" podCreationTimestamp="2026-01-20 16:42:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 16:44:05.702322298 +0000 UTC m=+139.462660265" watchObservedRunningTime="2026-01-20 16:44:05.753028044 +0000 UTC m=+139.513366010" Jan 20 16:44:05 crc kubenswrapper[4558]: I0120 16:44:05.789432 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-vmm7h" podStartSLOduration=121.789408163 podStartE2EDuration="2m1.789408163s" podCreationTimestamp="2026-01-20 16:42:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 16:44:05.78257365 +0000 UTC m=+139.542911608" watchObservedRunningTime="2026-01-20 16:44:05.789408163 +0000 UTC m=+139.549746130" Jan 20 16:44:05 crc kubenswrapper[4558]: I0120 16:44:05.789702 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-hsls5" podStartSLOduration=121.789695706 podStartE2EDuration="2m1.789695706s" podCreationTimestamp="2026-01-20 16:42:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 16:44:05.75567522 +0000 UTC m=+139.516013187" watchObservedRunningTime="2026-01-20 16:44:05.789695706 +0000 UTC m=+139.550033673" Jan 20 16:44:05 crc kubenswrapper[4558]: I0120 16:44:05.799299 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 20 16:44:05 crc kubenswrapper[4558]: E0120 16:44:05.800746 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-20 16:44:06.300725198 +0000 UTC m=+140.061063165 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 20 16:44:05 crc kubenswrapper[4558]: I0120 16:44:05.801903 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/11fdde87-2c2c-40ba-84e5-e0c93fc58130-utilities\") pod \"certified-operators-bgqpg\" (UID: \"11fdde87-2c2c-40ba-84e5-e0c93fc58130\") " pod="openshift-marketplace/certified-operators-bgqpg" Jan 20 16:44:05 crc kubenswrapper[4558]: I0120 16:44:05.802097 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/11fdde87-2c2c-40ba-84e5-e0c93fc58130-catalog-content\") pod \"certified-operators-bgqpg\" (UID: \"11fdde87-2c2c-40ba-84e5-e0c93fc58130\") " pod="openshift-marketplace/certified-operators-bgqpg" Jan 20 16:44:05 crc kubenswrapper[4558]: I0120 16:44:05.802157 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zsdqk\" (UID: \"ea01cf9d-e1ac-4197-a0ac-830eebd15245\") " pod="openshift-image-registry/image-registry-697d97f7c8-zsdqk" Jan 20 16:44:05 crc kubenswrapper[4558]: I0120 16:44:05.802259 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9zszb\" (UniqueName: \"kubernetes.io/projected/11fdde87-2c2c-40ba-84e5-e0c93fc58130-kube-api-access-9zszb\") pod \"certified-operators-bgqpg\" (UID: \"11fdde87-2c2c-40ba-84e5-e0c93fc58130\") " pod="openshift-marketplace/certified-operators-bgqpg" Jan 20 16:44:05 crc kubenswrapper[4558]: I0120 16:44:05.803311 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/11fdde87-2c2c-40ba-84e5-e0c93fc58130-utilities\") pod \"certified-operators-bgqpg\" (UID: \"11fdde87-2c2c-40ba-84e5-e0c93fc58130\") " pod="openshift-marketplace/certified-operators-bgqpg" Jan 20 16:44:05 crc kubenswrapper[4558]: I0120 16:44:05.803528 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/11fdde87-2c2c-40ba-84e5-e0c93fc58130-catalog-content\") pod \"certified-operators-bgqpg\" (UID: \"11fdde87-2c2c-40ba-84e5-e0c93fc58130\") " pod="openshift-marketplace/certified-operators-bgqpg" Jan 20 16:44:05 crc kubenswrapper[4558]: I0120 16:44:05.839352 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-ppqsn" podStartSLOduration=121.839333535 podStartE2EDuration="2m1.839333535s" podCreationTimestamp="2026-01-20 16:42:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 16:44:05.838886812 +0000 UTC m=+139.599224778" watchObservedRunningTime="2026-01-20 16:44:05.839333535 +0000 UTC m=+139.599671502" Jan 20 16:44:05 crc kubenswrapper[4558]: E0120 16:44:05.839838 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-20 16:44:06.339817949 +0000 UTC m=+140.100155916 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zsdqk" (UID: "ea01cf9d-e1ac-4197-a0ac-830eebd15245") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 20 16:44:05 crc kubenswrapper[4558]: I0120 16:44:05.864370 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-chwxp"] Jan 20 16:44:05 crc kubenswrapper[4558]: I0120 16:44:05.867806 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-rh247" podStartSLOduration=121.867790448 podStartE2EDuration="2m1.867790448s" podCreationTimestamp="2026-01-20 16:42:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 16:44:05.857780441 +0000 UTC m=+139.618118407" watchObservedRunningTime="2026-01-20 16:44:05.867790448 +0000 UTC m=+139.628128415" Jan 20 16:44:05 crc kubenswrapper[4558]: I0120 16:44:05.870604 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-chwxp"] Jan 20 16:44:05 crc kubenswrapper[4558]: I0120 16:44:05.881785 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-chwxp" Jan 20 16:44:05 crc kubenswrapper[4558]: I0120 16:44:05.884743 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9zszb\" (UniqueName: \"kubernetes.io/projected/11fdde87-2c2c-40ba-84e5-e0c93fc58130-kube-api-access-9zszb\") pod \"certified-operators-bgqpg\" (UID: \"11fdde87-2c2c-40ba-84e5-e0c93fc58130\") " pod="openshift-marketplace/certified-operators-bgqpg" Jan 20 16:44:05 crc kubenswrapper[4558]: I0120 16:44:05.893866 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Jan 20 16:44:05 crc kubenswrapper[4558]: I0120 16:44:05.905608 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 20 16:44:05 crc kubenswrapper[4558]: I0120 16:44:05.905803 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/03467399-b14f-421a-a7ec-f2a533daed0d-catalog-content\") pod \"community-operators-chwxp\" (UID: \"03467399-b14f-421a-a7ec-f2a533daed0d\") " pod="openshift-marketplace/community-operators-chwxp" Jan 20 16:44:05 crc kubenswrapper[4558]: I0120 16:44:05.905906 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/03467399-b14f-421a-a7ec-f2a533daed0d-utilities\") pod \"community-operators-chwxp\" (UID: \"03467399-b14f-421a-a7ec-f2a533daed0d\") " pod="openshift-marketplace/community-operators-chwxp" Jan 20 16:44:05 crc kubenswrapper[4558]: I0120 16:44:05.905970 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9gsqz\" (UniqueName: \"kubernetes.io/projected/03467399-b14f-421a-a7ec-f2a533daed0d-kube-api-access-9gsqz\") pod \"community-operators-chwxp\" (UID: \"03467399-b14f-421a-a7ec-f2a533daed0d\") " pod="openshift-marketplace/community-operators-chwxp" Jan 20 16:44:05 crc kubenswrapper[4558]: E0120 16:44:05.906066 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-20 16:44:06.406050056 +0000 UTC m=+140.166388023 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 20 16:44:06 crc kubenswrapper[4558]: I0120 16:44:06.007017 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/03467399-b14f-421a-a7ec-f2a533daed0d-utilities\") pod \"community-operators-chwxp\" (UID: \"03467399-b14f-421a-a7ec-f2a533daed0d\") " pod="openshift-marketplace/community-operators-chwxp" Jan 20 16:44:06 crc kubenswrapper[4558]: I0120 16:44:06.007305 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9gsqz\" (UniqueName: \"kubernetes.io/projected/03467399-b14f-421a-a7ec-f2a533daed0d-kube-api-access-9gsqz\") pod \"community-operators-chwxp\" (UID: \"03467399-b14f-421a-a7ec-f2a533daed0d\") " pod="openshift-marketplace/community-operators-chwxp" Jan 20 16:44:06 crc kubenswrapper[4558]: I0120 16:44:06.007327 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/03467399-b14f-421a-a7ec-f2a533daed0d-catalog-content\") pod \"community-operators-chwxp\" (UID: \"03467399-b14f-421a-a7ec-f2a533daed0d\") " pod="openshift-marketplace/community-operators-chwxp" Jan 20 16:44:06 crc kubenswrapper[4558]: I0120 16:44:06.007348 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zsdqk\" (UID: \"ea01cf9d-e1ac-4197-a0ac-830eebd15245\") " pod="openshift-image-registry/image-registry-697d97f7c8-zsdqk" Jan 20 16:44:06 crc kubenswrapper[4558]: E0120 16:44:06.007594 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-20 16:44:06.507581582 +0000 UTC m=+140.267919549 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zsdqk" (UID: "ea01cf9d-e1ac-4197-a0ac-830eebd15245") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 20 16:44:06 crc kubenswrapper[4558]: I0120 16:44:06.008519 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/03467399-b14f-421a-a7ec-f2a533daed0d-utilities\") pod \"community-operators-chwxp\" (UID: \"03467399-b14f-421a-a7ec-f2a533daed0d\") " pod="openshift-marketplace/community-operators-chwxp" Jan 20 16:44:06 crc kubenswrapper[4558]: I0120 16:44:06.011042 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/03467399-b14f-421a-a7ec-f2a533daed0d-catalog-content\") pod \"community-operators-chwxp\" (UID: \"03467399-b14f-421a-a7ec-f2a533daed0d\") " pod="openshift-marketplace/community-operators-chwxp" Jan 20 16:44:06 crc kubenswrapper[4558]: I0120 16:44:06.045476 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-bgqpg" Jan 20 16:44:06 crc kubenswrapper[4558]: I0120 16:44:06.064892 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-7pjxc"] Jan 20 16:44:06 crc kubenswrapper[4558]: I0120 16:44:06.065801 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-7pjxc" Jan 20 16:44:06 crc kubenswrapper[4558]: I0120 16:44:06.083044 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9gsqz\" (UniqueName: \"kubernetes.io/projected/03467399-b14f-421a-a7ec-f2a533daed0d-kube-api-access-9gsqz\") pod \"community-operators-chwxp\" (UID: \"03467399-b14f-421a-a7ec-f2a533daed0d\") " pod="openshift-marketplace/community-operators-chwxp" Jan 20 16:44:06 crc kubenswrapper[4558]: I0120 16:44:06.086716 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-7pjxc"] Jan 20 16:44:06 crc kubenswrapper[4558]: I0120 16:44:06.112541 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 20 16:44:06 crc kubenswrapper[4558]: E0120 16:44:06.112901 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-20 16:44:06.612886462 +0000 UTC m=+140.373224429 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 20 16:44:06 crc kubenswrapper[4558]: I0120 16:44:06.158333 4558 patch_prober.go:28] interesting pod/router-default-5444994796-vkkjt container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 20 16:44:06 crc kubenswrapper[4558]: [-]has-synced failed: reason withheld Jan 20 16:44:06 crc kubenswrapper[4558]: [+]process-running ok Jan 20 16:44:06 crc kubenswrapper[4558]: healthz check failed Jan 20 16:44:06 crc kubenswrapper[4558]: I0120 16:44:06.158627 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-vkkjt" podUID="84f45a25-f77d-4c4c-88b6-5bdc4c286f10" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 20 16:44:06 crc kubenswrapper[4558]: I0120 16:44:06.197481 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jvkk8" event={"ID":"3d51bcc0-ce56-4953-b0d6-252bb4a3a66c","Type":"ContainerStarted","Data":"b36a4dcbebbc262b2a6bcac30b99cbccb08df919c8070749369cbfaa98119472"} Jan 20 16:44:06 crc kubenswrapper[4558]: I0120 16:44:06.197528 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jvkk8" event={"ID":"3d51bcc0-ce56-4953-b0d6-252bb4a3a66c","Type":"ContainerStarted","Data":"ff838796a17f14d36f24b828d6025736cfd6b6ef2a0fc3709f72fb124a7fdab6"} Jan 20 16:44:06 crc kubenswrapper[4558]: I0120 16:44:06.197540 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jvkk8" event={"ID":"3d51bcc0-ce56-4953-b0d6-252bb4a3a66c","Type":"ContainerStarted","Data":"a2068d880b8ef0cda57f2debe4204b219cbc37cfaf4f830c647b53346d1a6418"} Jan 20 16:44:06 crc kubenswrapper[4558]: I0120 16:44:06.218756 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zsdqk\" (UID: \"ea01cf9d-e1ac-4197-a0ac-830eebd15245\") " pod="openshift-image-registry/image-registry-697d97f7c8-zsdqk" Jan 20 16:44:06 crc kubenswrapper[4558]: I0120 16:44:06.218853 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xf6sf\" (UniqueName: \"kubernetes.io/projected/04aecb46-8cfc-4f76-9fe7-31ac1f5bd71e-kube-api-access-xf6sf\") pod \"certified-operators-7pjxc\" (UID: \"04aecb46-8cfc-4f76-9fe7-31ac1f5bd71e\") " pod="openshift-marketplace/certified-operators-7pjxc" Jan 20 16:44:06 crc kubenswrapper[4558]: I0120 16:44:06.218976 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/04aecb46-8cfc-4f76-9fe7-31ac1f5bd71e-catalog-content\") pod \"certified-operators-7pjxc\" (UID: \"04aecb46-8cfc-4f76-9fe7-31ac1f5bd71e\") " pod="openshift-marketplace/certified-operators-7pjxc" Jan 20 16:44:06 crc kubenswrapper[4558]: I0120 16:44:06.219023 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/04aecb46-8cfc-4f76-9fe7-31ac1f5bd71e-utilities\") pod \"certified-operators-7pjxc\" (UID: \"04aecb46-8cfc-4f76-9fe7-31ac1f5bd71e\") " pod="openshift-marketplace/certified-operators-7pjxc" Jan 20 16:44:06 crc kubenswrapper[4558]: E0120 16:44:06.219982 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-20 16:44:06.719955763 +0000 UTC m=+140.480293730 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zsdqk" (UID: "ea01cf9d-e1ac-4197-a0ac-830eebd15245") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 20 16:44:06 crc kubenswrapper[4558]: I0120 16:44:06.246406 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-jq4hg" event={"ID":"7241fe86-b7e1-4246-b040-66ca5e799562","Type":"ContainerStarted","Data":"817702d91f9d6935e63518c65dc8384bdb326ee4ca3822312a2686f8ee040fe3"} Jan 20 16:44:06 crc kubenswrapper[4558]: I0120 16:44:06.246483 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-chwxp" Jan 20 16:44:06 crc kubenswrapper[4558]: I0120 16:44:06.252803 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-rfmgn"] Jan 20 16:44:06 crc kubenswrapper[4558]: I0120 16:44:06.253702 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rfmgn" Jan 20 16:44:06 crc kubenswrapper[4558]: I0120 16:44:06.270013 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-jvkk8" podStartSLOduration=122.269992365 podStartE2EDuration="2m2.269992365s" podCreationTimestamp="2026-01-20 16:42:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 16:44:06.268047743 +0000 UTC m=+140.028385711" watchObservedRunningTime="2026-01-20 16:44:06.269992365 +0000 UTC m=+140.030330332" Jan 20 16:44:06 crc kubenswrapper[4558]: I0120 16:44:06.278146 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-rfmgn"] Jan 20 16:44:06 crc kubenswrapper[4558]: I0120 16:44:06.308425 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-4t4rs" event={"ID":"71c9147f-5ab8-4342-b8e2-e4c37d281efb","Type":"ContainerStarted","Data":"5933f67fb59f6820a69ca0e4dad518b2d628dccd3e11cef6584d40e30eef63ce"} Jan 20 16:44:06 crc kubenswrapper[4558]: I0120 16:44:06.308487 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-4t4rs" event={"ID":"71c9147f-5ab8-4342-b8e2-e4c37d281efb","Type":"ContainerStarted","Data":"4f4e020a80e51dbb1118652c4017ab1fa95461e784f3a25616d220abebabd8a0"} Jan 20 16:44:06 crc kubenswrapper[4558]: I0120 16:44:06.324585 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 20 16:44:06 crc kubenswrapper[4558]: I0120 16:44:06.324807 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/04aecb46-8cfc-4f76-9fe7-31ac1f5bd71e-catalog-content\") pod \"certified-operators-7pjxc\" (UID: \"04aecb46-8cfc-4f76-9fe7-31ac1f5bd71e\") " pod="openshift-marketplace/certified-operators-7pjxc" Jan 20 16:44:06 crc kubenswrapper[4558]: I0120 16:44:06.324844 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/04aecb46-8cfc-4f76-9fe7-31ac1f5bd71e-utilities\") pod \"certified-operators-7pjxc\" (UID: \"04aecb46-8cfc-4f76-9fe7-31ac1f5bd71e\") " pod="openshift-marketplace/certified-operators-7pjxc" Jan 20 16:44:06 crc kubenswrapper[4558]: I0120 16:44:06.324897 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xf6sf\" (UniqueName: \"kubernetes.io/projected/04aecb46-8cfc-4f76-9fe7-31ac1f5bd71e-kube-api-access-xf6sf\") pod \"certified-operators-7pjxc\" (UID: \"04aecb46-8cfc-4f76-9fe7-31ac1f5bd71e\") " pod="openshift-marketplace/certified-operators-7pjxc" Jan 20 16:44:06 crc kubenswrapper[4558]: E0120 16:44:06.326177 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-20 16:44:06.826127319 +0000 UTC m=+140.586465286 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 20 16:44:06 crc kubenswrapper[4558]: I0120 16:44:06.326446 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/04aecb46-8cfc-4f76-9fe7-31ac1f5bd71e-catalog-content\") pod \"certified-operators-7pjxc\" (UID: \"04aecb46-8cfc-4f76-9fe7-31ac1f5bd71e\") " pod="openshift-marketplace/certified-operators-7pjxc" Jan 20 16:44:06 crc kubenswrapper[4558]: I0120 16:44:06.328420 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/04aecb46-8cfc-4f76-9fe7-31ac1f5bd71e-utilities\") pod \"certified-operators-7pjxc\" (UID: \"04aecb46-8cfc-4f76-9fe7-31ac1f5bd71e\") " pod="openshift-marketplace/certified-operators-7pjxc" Jan 20 16:44:06 crc kubenswrapper[4558]: I0120 16:44:06.340957 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-7flzc" event={"ID":"1d076664-8f86-4e46-a67a-85539915bb04","Type":"ContainerStarted","Data":"d2e1ab5a10b47c939e6741277776d14a1670514061fb3257f5f61eafa5158470"} Jan 20 16:44:06 crc kubenswrapper[4558]: I0120 16:44:06.370006 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-l8kh6" event={"ID":"25c0e5f7-93dc-49a4-a31c-b34dbc125f3e","Type":"ContainerStarted","Data":"7a0c3828a2993c9fff8f3e0c56c3d68f0b40ad837f133526afea580bd27d2761"} Jan 20 16:44:06 crc kubenswrapper[4558]: I0120 16:44:06.370266 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-l8kh6" event={"ID":"25c0e5f7-93dc-49a4-a31c-b34dbc125f3e","Type":"ContainerStarted","Data":"a06c966d0824df54be8600444e76c0ff30b29f8625b6fc5511ba077c085ba391"} Jan 20 16:44:06 crc kubenswrapper[4558]: I0120 16:44:06.391586 4558 generic.go:334] "Generic (PLEG): container finished" podID="436af592-a7f2-4649-b2e3-47df5e85ce9c" containerID="fc40297baf5f989426ad7469b288b8c78a72c05d0a24309c86d9324a7812635e" exitCode=0 Jan 20 16:44:06 crc kubenswrapper[4558]: I0120 16:44:06.391682 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-b97gp" event={"ID":"436af592-a7f2-4649-b2e3-47df5e85ce9c","Type":"ContainerDied","Data":"fc40297baf5f989426ad7469b288b8c78a72c05d0a24309c86d9324a7812635e"} Jan 20 16:44:06 crc kubenswrapper[4558]: I0120 16:44:06.391707 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-b97gp" event={"ID":"436af592-a7f2-4649-b2e3-47df5e85ce9c","Type":"ContainerStarted","Data":"eec379c9a948ecb5031c060a6e65ee4c5577e45d0a32bbbbad37c2be61462c69"} Jan 20 16:44:06 crc kubenswrapper[4558]: I0120 16:44:06.393630 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xf6sf\" (UniqueName: \"kubernetes.io/projected/04aecb46-8cfc-4f76-9fe7-31ac1f5bd71e-kube-api-access-xf6sf\") pod \"certified-operators-7pjxc\" (UID: \"04aecb46-8cfc-4f76-9fe7-31ac1f5bd71e\") " pod="openshift-marketplace/certified-operators-7pjxc" Jan 20 16:44:06 crc kubenswrapper[4558]: I0120 16:44:06.394577 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-4t4rs" podStartSLOduration=122.394568857 podStartE2EDuration="2m2.394568857s" podCreationTimestamp="2026-01-20 16:42:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 16:44:06.366132603 +0000 UTC m=+140.126470560" watchObservedRunningTime="2026-01-20 16:44:06.394568857 +0000 UTC m=+140.154906823" Jan 20 16:44:06 crc kubenswrapper[4558]: I0120 16:44:06.428807 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zsdqk\" (UID: \"ea01cf9d-e1ac-4197-a0ac-830eebd15245\") " pod="openshift-image-registry/image-registry-697d97f7c8-zsdqk" Jan 20 16:44:06 crc kubenswrapper[4558]: I0120 16:44:06.428884 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2aa030eb-fbf5-4bf5-ac24-0791492977c2-catalog-content\") pod \"community-operators-rfmgn\" (UID: \"2aa030eb-fbf5-4bf5-ac24-0791492977c2\") " pod="openshift-marketplace/community-operators-rfmgn" Jan 20 16:44:06 crc kubenswrapper[4558]: I0120 16:44:06.428968 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2aa030eb-fbf5-4bf5-ac24-0791492977c2-utilities\") pod \"community-operators-rfmgn\" (UID: \"2aa030eb-fbf5-4bf5-ac24-0791492977c2\") " pod="openshift-marketplace/community-operators-rfmgn" Jan 20 16:44:06 crc kubenswrapper[4558]: I0120 16:44:06.429022 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qm76k\" (UniqueName: \"kubernetes.io/projected/2aa030eb-fbf5-4bf5-ac24-0791492977c2-kube-api-access-qm76k\") pod \"community-operators-rfmgn\" (UID: \"2aa030eb-fbf5-4bf5-ac24-0791492977c2\") " pod="openshift-marketplace/community-operators-rfmgn" Jan 20 16:44:06 crc kubenswrapper[4558]: E0120 16:44:06.429598 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-20 16:44:06.929587667 +0000 UTC m=+140.689925635 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zsdqk" (UID: "ea01cf9d-e1ac-4197-a0ac-830eebd15245") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 20 16:44:06 crc kubenswrapper[4558]: I0120 16:44:06.437565 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-7pjxc" Jan 20 16:44:06 crc kubenswrapper[4558]: I0120 16:44:06.456313 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-9cqxr" event={"ID":"6aa23de2-5f67-4f16-bd7d-eeecb3dfa19f","Type":"ContainerStarted","Data":"a55dad1b4d2699db1765a295b8bf51e2f306289e61ea92e051c08ad4149959c0"} Jan 20 16:44:06 crc kubenswrapper[4558]: I0120 16:44:06.467351 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-7flzc" podStartSLOduration=122.467338275 podStartE2EDuration="2m2.467338275s" podCreationTimestamp="2026-01-20 16:42:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 16:44:06.394078952 +0000 UTC m=+140.154416919" watchObservedRunningTime="2026-01-20 16:44:06.467338275 +0000 UTC m=+140.227676241" Jan 20 16:44:06 crc kubenswrapper[4558]: I0120 16:44:06.486075 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-qdvgg" event={"ID":"a9e14748-e028-43ec-8e2c-d2d8ce033c75","Type":"ContainerStarted","Data":"acd1851c776fe9a763856fdb95c3d59b07b4713fd69e336395595b7eb6d5c56d"} Jan 20 16:44:06 crc kubenswrapper[4558]: I0120 16:44:06.486113 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-qdvgg" event={"ID":"a9e14748-e028-43ec-8e2c-d2d8ce033c75","Type":"ContainerStarted","Data":"3ca7284fe2194055409eb004fc8d5c193066d2e80b055aaf405e17cc56e85af5"} Jan 20 16:44:06 crc kubenswrapper[4558]: I0120 16:44:06.497236 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-4w5mk" event={"ID":"119fb5fe-2460-4d2d-9db9-452afaa1e93e","Type":"ContainerStarted","Data":"6913c1a61b4c8c397ce7f82b6df8fdfa339dbfe66c969ab585f8716e45080962"} Jan 20 16:44:06 crc kubenswrapper[4558]: I0120 16:44:06.497268 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-4w5mk" event={"ID":"119fb5fe-2460-4d2d-9db9-452afaa1e93e","Type":"ContainerStarted","Data":"0764d04eecb3f3c5db485aefa69090a5f327938a3dc576b38846c480c3f53d0c"} Jan 20 16:44:06 crc kubenswrapper[4558]: I0120 16:44:06.522605 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-gbvfn" event={"ID":"ad0a9f73-941c-4023-a106-2b0fa19da6a0","Type":"ContainerStarted","Data":"c84cb6f7923820273318bb393822396b60e7dc8b4a6f7be8851d75e0c9900689"} Jan 20 16:44:06 crc kubenswrapper[4558]: I0120 16:44:06.522983 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-gbvfn" event={"ID":"ad0a9f73-941c-4023-a106-2b0fa19da6a0","Type":"ContainerStarted","Data":"6f52b35ffa3928478acd37bafb07393da93571ad7274f4cdaa1e57589cc1de4f"} Jan 20 16:44:06 crc kubenswrapper[4558]: I0120 16:44:06.544720 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 20 16:44:06 crc kubenswrapper[4558]: I0120 16:44:06.545052 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2aa030eb-fbf5-4bf5-ac24-0791492977c2-catalog-content\") pod \"community-operators-rfmgn\" (UID: \"2aa030eb-fbf5-4bf5-ac24-0791492977c2\") " pod="openshift-marketplace/community-operators-rfmgn" Jan 20 16:44:06 crc kubenswrapper[4558]: I0120 16:44:06.547134 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2aa030eb-fbf5-4bf5-ac24-0791492977c2-catalog-content\") pod \"community-operators-rfmgn\" (UID: \"2aa030eb-fbf5-4bf5-ac24-0791492977c2\") " pod="openshift-marketplace/community-operators-rfmgn" Jan 20 16:44:06 crc kubenswrapper[4558]: E0120 16:44:06.547270 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-20 16:44:07.047248975 +0000 UTC m=+140.807586941 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 20 16:44:06 crc kubenswrapper[4558]: I0120 16:44:06.555466 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2aa030eb-fbf5-4bf5-ac24-0791492977c2-utilities\") pod \"community-operators-rfmgn\" (UID: \"2aa030eb-fbf5-4bf5-ac24-0791492977c2\") " pod="openshift-marketplace/community-operators-rfmgn" Jan 20 16:44:06 crc kubenswrapper[4558]: I0120 16:44:06.555687 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qm76k\" (UniqueName: \"kubernetes.io/projected/2aa030eb-fbf5-4bf5-ac24-0791492977c2-kube-api-access-qm76k\") pod \"community-operators-rfmgn\" (UID: \"2aa030eb-fbf5-4bf5-ac24-0791492977c2\") " pod="openshift-marketplace/community-operators-rfmgn" Jan 20 16:44:06 crc kubenswrapper[4558]: I0120 16:44:06.555801 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zsdqk\" (UID: \"ea01cf9d-e1ac-4197-a0ac-830eebd15245\") " pod="openshift-image-registry/image-registry-697d97f7c8-zsdqk" Jan 20 16:44:06 crc kubenswrapper[4558]: I0120 16:44:06.558355 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2aa030eb-fbf5-4bf5-ac24-0791492977c2-utilities\") pod \"community-operators-rfmgn\" (UID: \"2aa030eb-fbf5-4bf5-ac24-0791492977c2\") " pod="openshift-marketplace/community-operators-rfmgn" Jan 20 16:44:06 crc kubenswrapper[4558]: E0120 16:44:06.564494 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-20 16:44:07.06447786 +0000 UTC m=+140.824815827 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zsdqk" (UID: "ea01cf9d-e1ac-4197-a0ac-830eebd15245") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 20 16:44:06 crc kubenswrapper[4558]: I0120 16:44:06.579121 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-l8kh6" podStartSLOduration=122.579099263 podStartE2EDuration="2m2.579099263s" podCreationTimestamp="2026-01-20 16:42:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 16:44:06.522837068 +0000 UTC m=+140.283175035" watchObservedRunningTime="2026-01-20 16:44:06.579099263 +0000 UTC m=+140.339437229" Jan 20 16:44:06 crc kubenswrapper[4558]: I0120 16:44:06.626545 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qm76k\" (UniqueName: \"kubernetes.io/projected/2aa030eb-fbf5-4bf5-ac24-0791492977c2-kube-api-access-qm76k\") pod \"community-operators-rfmgn\" (UID: \"2aa030eb-fbf5-4bf5-ac24-0791492977c2\") " pod="openshift-marketplace/community-operators-rfmgn" Jan 20 16:44:06 crc kubenswrapper[4558]: I0120 16:44:06.685875 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-qdvgg" podStartSLOduration=122.685855964 podStartE2EDuration="2m2.685855964s" podCreationTimestamp="2026-01-20 16:42:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 16:44:06.64355184 +0000 UTC m=+140.403889807" watchObservedRunningTime="2026-01-20 16:44:06.685855964 +0000 UTC m=+140.446193930" Jan 20 16:44:06 crc kubenswrapper[4558]: I0120 16:44:06.687182 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-4w5mk" podStartSLOduration=122.687175765 podStartE2EDuration="2m2.687175765s" podCreationTimestamp="2026-01-20 16:42:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 16:44:06.579011457 +0000 UTC m=+140.339349423" watchObservedRunningTime="2026-01-20 16:44:06.687175765 +0000 UTC m=+140.447513732" Jan 20 16:44:06 crc kubenswrapper[4558]: I0120 16:44:06.688097 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 20 16:44:06 crc kubenswrapper[4558]: E0120 16:44:06.688465 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-20 16:44:07.188450812 +0000 UTC m=+140.948788779 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 20 16:44:06 crc kubenswrapper[4558]: I0120 16:44:06.719351 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-5rxsc" event={"ID":"1cdfeb06-d9ec-43ad-850c-d685e4e7c8a5","Type":"ContainerStarted","Data":"b2f0df1f36130e9f3731599cf76b491edd9659739ac4c951cbf49acc184e8f1b"} Jan 20 16:44:06 crc kubenswrapper[4558]: I0120 16:44:06.742763 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-mshrf" event={"ID":"963800b2-ec8d-4d8f-9a05-f199a20c8ba9","Type":"ContainerStarted","Data":"a26044554bd6182fea8d691d82e39b4774ccb8f53b6ae8c4d355e0bfe62e667e"} Jan 20 16:44:06 crc kubenswrapper[4558]: I0120 16:44:06.743479 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-mshrf" event={"ID":"963800b2-ec8d-4d8f-9a05-f199a20c8ba9","Type":"ContainerStarted","Data":"dbd477c5295f3105a734ea5e5d07a58801876dd0ded2390379be7b8e43bfb5ef"} Jan 20 16:44:06 crc kubenswrapper[4558]: I0120 16:44:06.789736 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zsdqk\" (UID: \"ea01cf9d-e1ac-4197-a0ac-830eebd15245\") " pod="openshift-image-registry/image-registry-697d97f7c8-zsdqk" Jan 20 16:44:06 crc kubenswrapper[4558]: E0120 16:44:06.791016 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-20 16:44:07.291001793 +0000 UTC m=+141.051339760 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zsdqk" (UID: "ea01cf9d-e1ac-4197-a0ac-830eebd15245") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 20 16:44:06 crc kubenswrapper[4558]: I0120 16:44:06.792027 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-sgbgg" event={"ID":"61738345-9c27-415a-a089-e0000fe5c92a","Type":"ContainerStarted","Data":"e609dcdda35d03d0cad2fbb054574c8de5cb999d3aea4214a7d3a0ad4dffa275"} Jan 20 16:44:06 crc kubenswrapper[4558]: I0120 16:44:06.792088 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-sgbgg" event={"ID":"61738345-9c27-415a-a089-e0000fe5c92a","Type":"ContainerStarted","Data":"b20b87db1870b8445ed7ff44c1d5d609ecceebc5f0345dfbdacff6be75d345f6"} Jan 20 16:44:06 crc kubenswrapper[4558]: I0120 16:44:06.837433 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-cp5zh" event={"ID":"f4d440d5-1f47-40bc-bfdd-95267dfde12c","Type":"ContainerStarted","Data":"98b893cdcc4d5f61889fc00cee21ce1943ad0b9fd9403b7fba924e716c6de1e6"} Jan 20 16:44:06 crc kubenswrapper[4558]: I0120 16:44:06.860385 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-dqf5x" event={"ID":"206f3850-b746-456c-9500-4d862f983a1e","Type":"ContainerStarted","Data":"64f29e98190c63a86923ef566655d73467804382b6d142823e6b93ae122e4df2"} Jan 20 16:44:06 crc kubenswrapper[4558]: I0120 16:44:06.875147 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-v2c77" event={"ID":"9a09b2d6-5243-4a2c-a13f-3b640bfaa3dc","Type":"ContainerStarted","Data":"1f28949f42024cad62a331ded238fb50c71957b05e50886af6b653802c48f643"} Jan 20 16:44:06 crc kubenswrapper[4558]: I0120 16:44:06.875205 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-v2c77" event={"ID":"9a09b2d6-5243-4a2c-a13f-3b640bfaa3dc","Type":"ContainerStarted","Data":"c79922d25f2935219874f0d0900c311fee47e49824c367904fdc8dd291add8b5"} Jan 20 16:44:06 crc kubenswrapper[4558]: I0120 16:44:06.892791 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 20 16:44:06 crc kubenswrapper[4558]: I0120 16:44:06.893381 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-225km" event={"ID":"59e85616-edec-46fa-aa93-a1ecb76b415c","Type":"ContainerStarted","Data":"8216be5628974b93bd6ee4e3753b3855769effeb6d6b33526f9a6cc2382ac3f7"} Jan 20 16:44:06 crc kubenswrapper[4558]: I0120 16:44:06.893535 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-225km" event={"ID":"59e85616-edec-46fa-aa93-a1ecb76b415c","Type":"ContainerStarted","Data":"eeb692143a806868d28462801a64edac3b5ab0fdde4c3f41d6446ab0c60b8d44"} Jan 20 16:44:06 crc kubenswrapper[4558]: E0120 16:44:06.894118 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-20 16:44:07.394101961 +0000 UTC m=+141.154439928 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 20 16:44:06 crc kubenswrapper[4558]: I0120 16:44:06.895732 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-225km" Jan 20 16:44:06 crc kubenswrapper[4558]: I0120 16:44:06.912563 4558 patch_prober.go:28] interesting pod/downloads-7954f5f757-225km container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.12:8080/\": dial tcp 10.217.0.12:8080: connect: connection refused" start-of-body= Jan 20 16:44:06 crc kubenswrapper[4558]: I0120 16:44:06.912618 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-225km" podUID="59e85616-edec-46fa-aa93-a1ecb76b415c" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.12:8080/\": dial tcp 10.217.0.12:8080: connect: connection refused" Jan 20 16:44:06 crc kubenswrapper[4558]: I0120 16:44:06.912982 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rfmgn" Jan 20 16:44:06 crc kubenswrapper[4558]: I0120 16:44:06.943055 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-rh5k4" event={"ID":"3ed1f27c-300c-4da3-8c4c-742baaf440eb","Type":"ContainerStarted","Data":"86167029b18cbecbef15006d2352b9a4c4aaf5e6d362447c0716475bcf978a4b"} Jan 20 16:44:06 crc kubenswrapper[4558]: I0120 16:44:06.943089 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-rh5k4" event={"ID":"3ed1f27c-300c-4da3-8c4c-742baaf440eb","Type":"ContainerStarted","Data":"3cca04a03c4028bdd7e69de26609508823c77daaec93aed58c9f538f7637be16"} Jan 20 16:44:06 crc kubenswrapper[4558]: I0120 16:44:06.950395 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-rh5k4" Jan 20 16:44:06 crc kubenswrapper[4558]: I0120 16:44:06.982598 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-rh5k4" Jan 20 16:44:07 crc kubenswrapper[4558]: I0120 16:44:07.003582 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zsdqk\" (UID: \"ea01cf9d-e1ac-4197-a0ac-830eebd15245\") " pod="openshift-image-registry/image-registry-697d97f7c8-zsdqk" Jan 20 16:44:07 crc kubenswrapper[4558]: E0120 16:44:07.003914 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-20 16:44:07.50390336 +0000 UTC m=+141.264241327 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zsdqk" (UID: "ea01cf9d-e1ac-4197-a0ac-830eebd15245") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 20 16:44:07 crc kubenswrapper[4558]: I0120 16:44:07.021712 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-7wfbg" event={"ID":"34b7c77f-6a7d-43de-9ee4-bdba78dc8248","Type":"ContainerStarted","Data":"170264020e4cdc4e8c07c88f877b8142cf4a4e331872a7892303e168935bdce1"} Jan 20 16:44:07 crc kubenswrapper[4558]: I0120 16:44:07.021760 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-7wfbg" event={"ID":"34b7c77f-6a7d-43de-9ee4-bdba78dc8248","Type":"ContainerStarted","Data":"e733161a6239d32048e7e3676cb13bdfa1d9f419c81072c867ceff7590c2cf23"} Jan 20 16:44:07 crc kubenswrapper[4558]: I0120 16:44:07.042369 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-49k6d" event={"ID":"ca06f669-4a81-4e6c-abc1-2800cd58c188","Type":"ContainerStarted","Data":"2df4d2f35359be5ac14f3291fc4828eb72ad236ac57a92b3e8e908965573114c"} Jan 20 16:44:07 crc kubenswrapper[4558]: I0120 16:44:07.074080 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-2j5lz" event={"ID":"96b0927f-cc7b-48e8-82bd-bbec85c2b9b2","Type":"ContainerStarted","Data":"256e657a15a3a936d01b883af8814daec575807ddfc5373e72189273b6ecc468"} Jan 20 16:44:07 crc kubenswrapper[4558]: I0120 16:44:07.074690 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-2j5lz" Jan 20 16:44:07 crc kubenswrapper[4558]: I0120 16:44:07.082747 4558 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-2j5lz container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.40:8080/healthz\": dial tcp 10.217.0.40:8080: connect: connection refused" start-of-body= Jan 20 16:44:07 crc kubenswrapper[4558]: I0120 16:44:07.082788 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-2j5lz" podUID="96b0927f-cc7b-48e8-82bd-bbec85c2b9b2" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.40:8080/healthz\": dial tcp 10.217.0.40:8080: connect: connection refused" Jan 20 16:44:07 crc kubenswrapper[4558]: I0120 16:44:07.099523 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-t4dxf" event={"ID":"a53f5d18-851d-4a86-b349-e53e035d39ce","Type":"ContainerStarted","Data":"631b7c49a2421e16599711dcab2abafbd179f007f27790baaab3f174439a7ea1"} Jan 20 16:44:07 crc kubenswrapper[4558]: I0120 16:44:07.099675 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-t4dxf" Jan 20 16:44:07 crc kubenswrapper[4558]: I0120 16:44:07.105033 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 20 16:44:07 crc kubenswrapper[4558]: E0120 16:44:07.106529 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-20 16:44:07.606508873 +0000 UTC m=+141.366846841 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 20 16:44:07 crc kubenswrapper[4558]: I0120 16:44:07.126979 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-225km" podStartSLOduration=123.126964531 podStartE2EDuration="2m3.126964531s" podCreationTimestamp="2026-01-20 16:42:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 16:44:07.126438137 +0000 UTC m=+140.886776104" watchObservedRunningTime="2026-01-20 16:44:07.126964531 +0000 UTC m=+140.887302498" Jan 20 16:44:07 crc kubenswrapper[4558]: I0120 16:44:07.129931 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-rh5k4" podStartSLOduration=123.129923116 podStartE2EDuration="2m3.129923116s" podCreationTimestamp="2026-01-20 16:42:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 16:44:07.064056951 +0000 UTC m=+140.824394917" watchObservedRunningTime="2026-01-20 16:44:07.129923116 +0000 UTC m=+140.890261083" Jan 20 16:44:07 crc kubenswrapper[4558]: I0120 16:44:07.158757 4558 patch_prober.go:28] interesting pod/router-default-5444994796-vkkjt container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 20 16:44:07 crc kubenswrapper[4558]: [-]has-synced failed: reason withheld Jan 20 16:44:07 crc kubenswrapper[4558]: [+]process-running ok Jan 20 16:44:07 crc kubenswrapper[4558]: healthz check failed Jan 20 16:44:07 crc kubenswrapper[4558]: I0120 16:44:07.158801 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-vkkjt" podUID="84f45a25-f77d-4c4c-88b6-5bdc4c286f10" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 20 16:44:07 crc kubenswrapper[4558]: I0120 16:44:07.160639 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-cp5zh" podStartSLOduration=7.160628514 podStartE2EDuration="7.160628514s" podCreationTimestamp="2026-01-20 16:44:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 16:44:07.14541911 +0000 UTC m=+140.905757077" watchObservedRunningTime="2026-01-20 16:44:07.160628514 +0000 UTC m=+140.920966481" Jan 20 16:44:07 crc kubenswrapper[4558]: I0120 16:44:07.160822 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-bgqpg"] Jan 20 16:44:07 crc kubenswrapper[4558]: I0120 16:44:07.193620 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8xc26" event={"ID":"d49afd2e-470e-4bd4-a30d-5014f423510b","Type":"ContainerStarted","Data":"1019f7efd82a2ee57b4849441dfba28f336f7c02c8180ed5a82ee26f2fcf5641"} Jan 20 16:44:07 crc kubenswrapper[4558]: I0120 16:44:07.196478 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-sgbgg" podStartSLOduration=123.196459366 podStartE2EDuration="2m3.196459366s" podCreationTimestamp="2026-01-20 16:42:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 16:44:07.177632886 +0000 UTC m=+140.937970873" watchObservedRunningTime="2026-01-20 16:44:07.196459366 +0000 UTC m=+140.956797334" Jan 20 16:44:07 crc kubenswrapper[4558]: I0120 16:44:07.243893 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zsdqk\" (UID: \"ea01cf9d-e1ac-4197-a0ac-830eebd15245\") " pod="openshift-image-registry/image-registry-697d97f7c8-zsdqk" Jan 20 16:44:07 crc kubenswrapper[4558]: I0120 16:44:07.248327 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-v2c77" podStartSLOduration=7.248305114 podStartE2EDuration="7.248305114s" podCreationTimestamp="2026-01-20 16:44:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 16:44:07.196444428 +0000 UTC m=+140.956782395" watchObservedRunningTime="2026-01-20 16:44:07.248305114 +0000 UTC m=+141.008643082" Jan 20 16:44:07 crc kubenswrapper[4558]: E0120 16:44:07.251254 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-20 16:44:07.751229024 +0000 UTC m=+141.511566991 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zsdqk" (UID: "ea01cf9d-e1ac-4197-a0ac-830eebd15245") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 20 16:44:07 crc kubenswrapper[4558]: I0120 16:44:07.281120 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-chwxp"] Jan 20 16:44:07 crc kubenswrapper[4558]: I0120 16:44:07.321548 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-5rxsc" podStartSLOduration=123.321530252 podStartE2EDuration="2m3.321530252s" podCreationTimestamp="2026-01-20 16:42:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 16:44:07.31504531 +0000 UTC m=+141.075383266" watchObservedRunningTime="2026-01-20 16:44:07.321530252 +0000 UTC m=+141.081868219" Jan 20 16:44:07 crc kubenswrapper[4558]: I0120 16:44:07.327798 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-pltl7" event={"ID":"8ad94588-fb91-4748-bc0c-c87f686fa9a0","Type":"ContainerStarted","Data":"e0dcb6ff40207eeabc2a3d57116f823412e3a4b8e7531345f21813bce0301f8a"} Jan 20 16:44:07 crc kubenswrapper[4558]: I0120 16:44:07.328233 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-pltl7" event={"ID":"8ad94588-fb91-4748-bc0c-c87f686fa9a0","Type":"ContainerStarted","Data":"b684ae78239d01c37e42e8ba77961a53674770b4926c4923979f1b8e1e3e56d0"} Jan 20 16:44:07 crc kubenswrapper[4558]: I0120 16:44:07.329888 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-7pjxc"] Jan 20 16:44:07 crc kubenswrapper[4558]: I0120 16:44:07.341059 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29482110-7p5pb" event={"ID":"31eb233b-13fe-40cb-9259-f18574a85e01","Type":"ContainerStarted","Data":"b1395ae185fc8ce778c26e4c5116454432cf134a17cd7710f9f32667c53c3676"} Jan 20 16:44:07 crc kubenswrapper[4558]: I0120 16:44:07.351512 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-t4dxf" podStartSLOduration=123.351486044 podStartE2EDuration="2m3.351486044s" podCreationTimestamp="2026-01-20 16:42:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 16:44:07.35058317 +0000 UTC m=+141.110921137" watchObservedRunningTime="2026-01-20 16:44:07.351486044 +0000 UTC m=+141.111824012" Jan 20 16:44:07 crc kubenswrapper[4558]: I0120 16:44:07.364587 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-nwjzd" Jan 20 16:44:07 crc kubenswrapper[4558]: I0120 16:44:07.383250 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-2j5lz" podStartSLOduration=123.383231935 podStartE2EDuration="2m3.383231935s" podCreationTimestamp="2026-01-20 16:42:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 16:44:07.383047077 +0000 UTC m=+141.143385044" watchObservedRunningTime="2026-01-20 16:44:07.383231935 +0000 UTC m=+141.143569903" Jan 20 16:44:07 crc kubenswrapper[4558]: I0120 16:44:07.388145 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-ppqsn" Jan 20 16:44:07 crc kubenswrapper[4558]: I0120 16:44:07.405314 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-7wfbg" podStartSLOduration=123.405297542 podStartE2EDuration="2m3.405297542s" podCreationTimestamp="2026-01-20 16:42:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 16:44:07.404514574 +0000 UTC m=+141.164852542" watchObservedRunningTime="2026-01-20 16:44:07.405297542 +0000 UTC m=+141.165635510" Jan 20 16:44:07 crc kubenswrapper[4558]: I0120 16:44:07.419684 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 20 16:44:07 crc kubenswrapper[4558]: E0120 16:44:07.421999 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-20 16:44:07.921983563 +0000 UTC m=+141.682321530 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 20 16:44:07 crc kubenswrapper[4558]: I0120 16:44:07.524912 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zsdqk\" (UID: \"ea01cf9d-e1ac-4197-a0ac-830eebd15245\") " pod="openshift-image-registry/image-registry-697d97f7c8-zsdqk" Jan 20 16:44:07 crc kubenswrapper[4558]: E0120 16:44:07.525310 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-20 16:44:08.025298437 +0000 UTC m=+141.785636404 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zsdqk" (UID: "ea01cf9d-e1ac-4197-a0ac-830eebd15245") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 20 16:44:07 crc kubenswrapper[4558]: I0120 16:44:07.532019 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-8xc26" podStartSLOduration=123.53200157 podStartE2EDuration="2m3.53200157s" podCreationTimestamp="2026-01-20 16:42:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 16:44:07.478859527 +0000 UTC m=+141.239197494" watchObservedRunningTime="2026-01-20 16:44:07.53200157 +0000 UTC m=+141.292339538" Jan 20 16:44:07 crc kubenswrapper[4558]: I0120 16:44:07.578888 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-49k6d" podStartSLOduration=123.57885872 podStartE2EDuration="2m3.57885872s" podCreationTimestamp="2026-01-20 16:42:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 16:44:07.532445218 +0000 UTC m=+141.292783185" watchObservedRunningTime="2026-01-20 16:44:07.57885872 +0000 UTC m=+141.339196687" Jan 20 16:44:07 crc kubenswrapper[4558]: I0120 16:44:07.615112 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29482110-7p5pb" podStartSLOduration=123.615090751 podStartE2EDuration="2m3.615090751s" podCreationTimestamp="2026-01-20 16:42:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 16:44:07.601872256 +0000 UTC m=+141.362210223" watchObservedRunningTime="2026-01-20 16:44:07.615090751 +0000 UTC m=+141.375428718" Jan 20 16:44:07 crc kubenswrapper[4558]: I0120 16:44:07.618975 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-rfmgn"] Jan 20 16:44:07 crc kubenswrapper[4558]: I0120 16:44:07.626706 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 20 16:44:07 crc kubenswrapper[4558]: E0120 16:44:07.627097 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-20 16:44:08.127082941 +0000 UTC m=+141.887420907 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 20 16:44:07 crc kubenswrapper[4558]: I0120 16:44:07.654088 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-mndbv"] Jan 20 16:44:07 crc kubenswrapper[4558]: I0120 16:44:07.667595 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mndbv" Jan 20 16:44:07 crc kubenswrapper[4558]: I0120 16:44:07.679525 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-mndbv"] Jan 20 16:44:07 crc kubenswrapper[4558]: I0120 16:44:07.679649 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Jan 20 16:44:07 crc kubenswrapper[4558]: I0120 16:44:07.729041 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zsdqk\" (UID: \"ea01cf9d-e1ac-4197-a0ac-830eebd15245\") " pod="openshift-image-registry/image-registry-697d97f7c8-zsdqk" Jan 20 16:44:07 crc kubenswrapper[4558]: I0120 16:44:07.729218 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/05d983ba-960a-4975-b3de-73a3891fb342-catalog-content\") pod \"redhat-marketplace-mndbv\" (UID: \"05d983ba-960a-4975-b3de-73a3891fb342\") " pod="openshift-marketplace/redhat-marketplace-mndbv" Jan 20 16:44:07 crc kubenswrapper[4558]: I0120 16:44:07.729244 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xz4cs\" (UniqueName: \"kubernetes.io/projected/05d983ba-960a-4975-b3de-73a3891fb342-kube-api-access-xz4cs\") pod \"redhat-marketplace-mndbv\" (UID: \"05d983ba-960a-4975-b3de-73a3891fb342\") " pod="openshift-marketplace/redhat-marketplace-mndbv" Jan 20 16:44:07 crc kubenswrapper[4558]: I0120 16:44:07.729264 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/05d983ba-960a-4975-b3de-73a3891fb342-utilities\") pod \"redhat-marketplace-mndbv\" (UID: \"05d983ba-960a-4975-b3de-73a3891fb342\") " pod="openshift-marketplace/redhat-marketplace-mndbv" Jan 20 16:44:07 crc kubenswrapper[4558]: E0120 16:44:07.729625 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-20 16:44:08.229609956 +0000 UTC m=+141.989947924 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zsdqk" (UID: "ea01cf9d-e1ac-4197-a0ac-830eebd15245") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 20 16:44:07 crc kubenswrapper[4558]: I0120 16:44:07.770296 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-pltl7" podStartSLOduration=123.770276879 podStartE2EDuration="2m3.770276879s" podCreationTimestamp="2026-01-20 16:42:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 16:44:07.712040367 +0000 UTC m=+141.472378334" watchObservedRunningTime="2026-01-20 16:44:07.770276879 +0000 UTC m=+141.530614846" Jan 20 16:44:07 crc kubenswrapper[4558]: I0120 16:44:07.831828 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 20 16:44:07 crc kubenswrapper[4558]: I0120 16:44:07.832447 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/05d983ba-960a-4975-b3de-73a3891fb342-catalog-content\") pod \"redhat-marketplace-mndbv\" (UID: \"05d983ba-960a-4975-b3de-73a3891fb342\") " pod="openshift-marketplace/redhat-marketplace-mndbv" Jan 20 16:44:07 crc kubenswrapper[4558]: I0120 16:44:07.832478 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xz4cs\" (UniqueName: \"kubernetes.io/projected/05d983ba-960a-4975-b3de-73a3891fb342-kube-api-access-xz4cs\") pod \"redhat-marketplace-mndbv\" (UID: \"05d983ba-960a-4975-b3de-73a3891fb342\") " pod="openshift-marketplace/redhat-marketplace-mndbv" Jan 20 16:44:07 crc kubenswrapper[4558]: I0120 16:44:07.832495 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/05d983ba-960a-4975-b3de-73a3891fb342-utilities\") pod \"redhat-marketplace-mndbv\" (UID: \"05d983ba-960a-4975-b3de-73a3891fb342\") " pod="openshift-marketplace/redhat-marketplace-mndbv" Jan 20 16:44:07 crc kubenswrapper[4558]: I0120 16:44:07.833055 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/05d983ba-960a-4975-b3de-73a3891fb342-utilities\") pod \"redhat-marketplace-mndbv\" (UID: \"05d983ba-960a-4975-b3de-73a3891fb342\") " pod="openshift-marketplace/redhat-marketplace-mndbv" Jan 20 16:44:07 crc kubenswrapper[4558]: E0120 16:44:07.838620 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-20 16:44:08.338579935 +0000 UTC m=+142.098917901 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 20 16:44:07 crc kubenswrapper[4558]: I0120 16:44:07.838754 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/05d983ba-960a-4975-b3de-73a3891fb342-catalog-content\") pod \"redhat-marketplace-mndbv\" (UID: \"05d983ba-960a-4975-b3de-73a3891fb342\") " pod="openshift-marketplace/redhat-marketplace-mndbv" Jan 20 16:44:07 crc kubenswrapper[4558]: I0120 16:44:07.874557 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xz4cs\" (UniqueName: \"kubernetes.io/projected/05d983ba-960a-4975-b3de-73a3891fb342-kube-api-access-xz4cs\") pod \"redhat-marketplace-mndbv\" (UID: \"05d983ba-960a-4975-b3de-73a3891fb342\") " pod="openshift-marketplace/redhat-marketplace-mndbv" Jan 20 16:44:07 crc kubenswrapper[4558]: I0120 16:44:07.935232 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zsdqk\" (UID: \"ea01cf9d-e1ac-4197-a0ac-830eebd15245\") " pod="openshift-image-registry/image-registry-697d97f7c8-zsdqk" Jan 20 16:44:07 crc kubenswrapper[4558]: E0120 16:44:07.935601 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-20 16:44:08.435586329 +0000 UTC m=+142.195924296 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zsdqk" (UID: "ea01cf9d-e1ac-4197-a0ac-830eebd15245") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 20 16:44:08 crc kubenswrapper[4558]: I0120 16:44:08.035994 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 20 16:44:08 crc kubenswrapper[4558]: E0120 16:44:08.036138 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-20 16:44:08.536115884 +0000 UTC m=+142.296453851 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 20 16:44:08 crc kubenswrapper[4558]: I0120 16:44:08.036532 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zsdqk\" (UID: \"ea01cf9d-e1ac-4197-a0ac-830eebd15245\") " pod="openshift-image-registry/image-registry-697d97f7c8-zsdqk" Jan 20 16:44:08 crc kubenswrapper[4558]: I0120 16:44:08.036802 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mndbv" Jan 20 16:44:08 crc kubenswrapper[4558]: E0120 16:44:08.036862 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-20 16:44:08.53684992 +0000 UTC m=+142.297187886 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zsdqk" (UID: "ea01cf9d-e1ac-4197-a0ac-830eebd15245") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 20 16:44:08 crc kubenswrapper[4558]: I0120 16:44:08.040086 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-275ld"] Jan 20 16:44:08 crc kubenswrapper[4558]: I0120 16:44:08.041192 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-275ld" Jan 20 16:44:08 crc kubenswrapper[4558]: I0120 16:44:08.046430 4558 csr.go:261] certificate signing request csr-rpv6q is approved, waiting to be issued Jan 20 16:44:08 crc kubenswrapper[4558]: I0120 16:44:08.051646 4558 csr.go:257] certificate signing request csr-rpv6q is issued Jan 20 16:44:08 crc kubenswrapper[4558]: I0120 16:44:08.054483 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-275ld"] Jan 20 16:44:08 crc kubenswrapper[4558]: I0120 16:44:08.137628 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 20 16:44:08 crc kubenswrapper[4558]: I0120 16:44:08.137818 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/567265d1-8d94-4502-aba8-e4ce03c0def9-utilities\") pod \"redhat-marketplace-275ld\" (UID: \"567265d1-8d94-4502-aba8-e4ce03c0def9\") " pod="openshift-marketplace/redhat-marketplace-275ld" Jan 20 16:44:08 crc kubenswrapper[4558]: I0120 16:44:08.137894 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/567265d1-8d94-4502-aba8-e4ce03c0def9-catalog-content\") pod \"redhat-marketplace-275ld\" (UID: \"567265d1-8d94-4502-aba8-e4ce03c0def9\") " pod="openshift-marketplace/redhat-marketplace-275ld" Jan 20 16:44:08 crc kubenswrapper[4558]: I0120 16:44:08.137959 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cxhxg\" (UniqueName: \"kubernetes.io/projected/567265d1-8d94-4502-aba8-e4ce03c0def9-kube-api-access-cxhxg\") pod \"redhat-marketplace-275ld\" (UID: \"567265d1-8d94-4502-aba8-e4ce03c0def9\") " pod="openshift-marketplace/redhat-marketplace-275ld" Jan 20 16:44:08 crc kubenswrapper[4558]: E0120 16:44:08.138066 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-20 16:44:08.638053326 +0000 UTC m=+142.398391294 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 20 16:44:08 crc kubenswrapper[4558]: I0120 16:44:08.145341 4558 patch_prober.go:28] interesting pod/router-default-5444994796-vkkjt container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 20 16:44:08 crc kubenswrapper[4558]: [-]has-synced failed: reason withheld Jan 20 16:44:08 crc kubenswrapper[4558]: [+]process-running ok Jan 20 16:44:08 crc kubenswrapper[4558]: healthz check failed Jan 20 16:44:08 crc kubenswrapper[4558]: I0120 16:44:08.145385 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-vkkjt" podUID="84f45a25-f77d-4c4c-88b6-5bdc4c286f10" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 20 16:44:08 crc kubenswrapper[4558]: I0120 16:44:08.239184 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/567265d1-8d94-4502-aba8-e4ce03c0def9-catalog-content\") pod \"redhat-marketplace-275ld\" (UID: \"567265d1-8d94-4502-aba8-e4ce03c0def9\") " pod="openshift-marketplace/redhat-marketplace-275ld" Jan 20 16:44:08 crc kubenswrapper[4558]: I0120 16:44:08.239239 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zsdqk\" (UID: \"ea01cf9d-e1ac-4197-a0ac-830eebd15245\") " pod="openshift-image-registry/image-registry-697d97f7c8-zsdqk" Jan 20 16:44:08 crc kubenswrapper[4558]: I0120 16:44:08.239274 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cxhxg\" (UniqueName: \"kubernetes.io/projected/567265d1-8d94-4502-aba8-e4ce03c0def9-kube-api-access-cxhxg\") pod \"redhat-marketplace-275ld\" (UID: \"567265d1-8d94-4502-aba8-e4ce03c0def9\") " pod="openshift-marketplace/redhat-marketplace-275ld" Jan 20 16:44:08 crc kubenswrapper[4558]: I0120 16:44:08.239311 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/567265d1-8d94-4502-aba8-e4ce03c0def9-utilities\") pod \"redhat-marketplace-275ld\" (UID: \"567265d1-8d94-4502-aba8-e4ce03c0def9\") " pod="openshift-marketplace/redhat-marketplace-275ld" Jan 20 16:44:08 crc kubenswrapper[4558]: I0120 16:44:08.239696 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/567265d1-8d94-4502-aba8-e4ce03c0def9-utilities\") pod \"redhat-marketplace-275ld\" (UID: \"567265d1-8d94-4502-aba8-e4ce03c0def9\") " pod="openshift-marketplace/redhat-marketplace-275ld" Jan 20 16:44:08 crc kubenswrapper[4558]: I0120 16:44:08.239901 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/567265d1-8d94-4502-aba8-e4ce03c0def9-catalog-content\") pod \"redhat-marketplace-275ld\" (UID: \"567265d1-8d94-4502-aba8-e4ce03c0def9\") " pod="openshift-marketplace/redhat-marketplace-275ld" Jan 20 16:44:08 crc kubenswrapper[4558]: E0120 16:44:08.240099 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-20 16:44:08.740085157 +0000 UTC m=+142.500423124 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zsdqk" (UID: "ea01cf9d-e1ac-4197-a0ac-830eebd15245") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 20 16:44:08 crc kubenswrapper[4558]: I0120 16:44:08.261979 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cxhxg\" (UniqueName: \"kubernetes.io/projected/567265d1-8d94-4502-aba8-e4ce03c0def9-kube-api-access-cxhxg\") pod \"redhat-marketplace-275ld\" (UID: \"567265d1-8d94-4502-aba8-e4ce03c0def9\") " pod="openshift-marketplace/redhat-marketplace-275ld" Jan 20 16:44:08 crc kubenswrapper[4558]: I0120 16:44:08.340133 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 20 16:44:08 crc kubenswrapper[4558]: E0120 16:44:08.340244 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-20 16:44:08.840230066 +0000 UTC m=+142.600568033 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 20 16:44:08 crc kubenswrapper[4558]: I0120 16:44:08.340595 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zsdqk\" (UID: \"ea01cf9d-e1ac-4197-a0ac-830eebd15245\") " pod="openshift-image-registry/image-registry-697d97f7c8-zsdqk" Jan 20 16:44:08 crc kubenswrapper[4558]: E0120 16:44:08.340948 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-20 16:44:08.840939966 +0000 UTC m=+142.601277933 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zsdqk" (UID: "ea01cf9d-e1ac-4197-a0ac-830eebd15245") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 20 16:44:08 crc kubenswrapper[4558]: I0120 16:44:08.350032 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-sgbgg" event={"ID":"61738345-9c27-415a-a089-e0000fe5c92a","Type":"ContainerStarted","Data":"e3e15a84f3141320f7dba2c056640299351f9c447ab5c116e8aba9d2e0d4b62b"} Jan 20 16:44:08 crc kubenswrapper[4558]: I0120 16:44:08.352251 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-jq4hg" event={"ID":"7241fe86-b7e1-4246-b040-66ca5e799562","Type":"ContainerStarted","Data":"99a52fb3abd7573c4626b7627f5ebf8642ac28607ae09fe565948517e3364b97"} Jan 20 16:44:08 crc kubenswrapper[4558]: I0120 16:44:08.356460 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-9cqxr" event={"ID":"6aa23de2-5f67-4f16-bd7d-eeecb3dfa19f","Type":"ContainerStarted","Data":"c8a5066059078e745e1eb013b323122a2054b639fdac391bb773cb6cbb4caf67"} Jan 20 16:44:08 crc kubenswrapper[4558]: I0120 16:44:08.360292 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-gbvfn" event={"ID":"ad0a9f73-941c-4023-a106-2b0fa19da6a0","Type":"ContainerStarted","Data":"f4e2e303109a371580168bdc94304f1cc2a1ca13dece175cd6f95cc5bee6f56c"} Jan 20 16:44:08 crc kubenswrapper[4558]: I0120 16:44:08.360443 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-gbvfn" Jan 20 16:44:08 crc kubenswrapper[4558]: I0120 16:44:08.363358 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-t4dxf" event={"ID":"a53f5d18-851d-4a86-b349-e53e035d39ce","Type":"ContainerStarted","Data":"95b056b99492dc4b2a9918feb495f2a0faf59946c6de5d57fd4829fb33fd83c3"} Jan 20 16:44:08 crc kubenswrapper[4558]: I0120 16:44:08.363397 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-t4dxf" event={"ID":"a53f5d18-851d-4a86-b349-e53e035d39ce","Type":"ContainerStarted","Data":"f708c54b96ceb7a31071f1c3e40c95a9115d748e89f507dc9407ff94b95fd825"} Jan 20 16:44:08 crc kubenswrapper[4558]: I0120 16:44:08.379289 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-jq4hg" podStartSLOduration=124.379272031 podStartE2EDuration="2m4.379272031s" podCreationTimestamp="2026-01-20 16:42:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 16:44:08.378590705 +0000 UTC m=+142.138928672" watchObservedRunningTime="2026-01-20 16:44:08.379272031 +0000 UTC m=+142.139609998" Jan 20 16:44:08 crc kubenswrapper[4558]: I0120 16:44:08.383880 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-pltl7" event={"ID":"8ad94588-fb91-4748-bc0c-c87f686fa9a0","Type":"ContainerStarted","Data":"1ff7b46388e903f91d5a74f3405645384e50351eb998650d963e33014974b502"} Jan 20 16:44:08 crc kubenswrapper[4558]: I0120 16:44:08.389894 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-275ld" Jan 20 16:44:08 crc kubenswrapper[4558]: I0120 16:44:08.393198 4558 generic.go:334] "Generic (PLEG): container finished" podID="11fdde87-2c2c-40ba-84e5-e0c93fc58130" containerID="0d306167c7391c2c6c552b1eb5c696492d5c7c26d4b97f44208762e73700dcd4" exitCode=0 Jan 20 16:44:08 crc kubenswrapper[4558]: I0120 16:44:08.393259 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bgqpg" event={"ID":"11fdde87-2c2c-40ba-84e5-e0c93fc58130","Type":"ContainerDied","Data":"0d306167c7391c2c6c552b1eb5c696492d5c7c26d4b97f44208762e73700dcd4"} Jan 20 16:44:08 crc kubenswrapper[4558]: I0120 16:44:08.393280 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bgqpg" event={"ID":"11fdde87-2c2c-40ba-84e5-e0c93fc58130","Type":"ContainerStarted","Data":"d596ca7d3fba7864e1cfff15aa3f38d10311b062fbda0815f9c424b9939df615"} Jan 20 16:44:08 crc kubenswrapper[4558]: I0120 16:44:08.398513 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-gbvfn" podStartSLOduration=8.398501835 podStartE2EDuration="8.398501835s" podCreationTimestamp="2026-01-20 16:44:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 16:44:08.39574552 +0000 UTC m=+142.156083488" watchObservedRunningTime="2026-01-20 16:44:08.398501835 +0000 UTC m=+142.158839802" Jan 20 16:44:08 crc kubenswrapper[4558]: I0120 16:44:08.403556 4558 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 20 16:44:08 crc kubenswrapper[4558]: I0120 16:44:08.404150 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-mshrf" event={"ID":"963800b2-ec8d-4d8f-9a05-f199a20c8ba9","Type":"ContainerStarted","Data":"d87f9bcdc3de4bf9172fc691c6a53a00b191aca75e96b6005617d5d54e123cba"} Jan 20 16:44:08 crc kubenswrapper[4558]: I0120 16:44:08.411972 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-2j5lz" event={"ID":"96b0927f-cc7b-48e8-82bd-bbec85c2b9b2","Type":"ContainerStarted","Data":"ca73560949e9fc5dc991b312a1529736b6d7a2bd41cf857a84a1ef3542fbba5c"} Jan 20 16:44:08 crc kubenswrapper[4558]: I0120 16:44:08.416687 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Jan 20 16:44:08 crc kubenswrapper[4558]: I0120 16:44:08.416952 4558 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-2j5lz container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.40:8080/healthz\": dial tcp 10.217.0.40:8080: connect: connection refused" start-of-body= Jan 20 16:44:08 crc kubenswrapper[4558]: I0120 16:44:08.417001 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-2j5lz" podUID="96b0927f-cc7b-48e8-82bd-bbec85c2b9b2" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.40:8080/healthz\": dial tcp 10.217.0.40:8080: connect: connection refused" Jan 20 16:44:08 crc kubenswrapper[4558]: I0120 16:44:08.417544 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 20 16:44:08 crc kubenswrapper[4558]: I0120 16:44:08.421982 4558 generic.go:334] "Generic (PLEG): container finished" podID="2aa030eb-fbf5-4bf5-ac24-0791492977c2" containerID="01415cc2f48ff2bb8ca7184ee83e04510660f7275f127b61c4129885f8bc6839" exitCode=0 Jan 20 16:44:08 crc kubenswrapper[4558]: I0120 16:44:08.422053 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rfmgn" event={"ID":"2aa030eb-fbf5-4bf5-ac24-0791492977c2","Type":"ContainerDied","Data":"01415cc2f48ff2bb8ca7184ee83e04510660f7275f127b61c4129885f8bc6839"} Jan 20 16:44:08 crc kubenswrapper[4558]: I0120 16:44:08.422080 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rfmgn" event={"ID":"2aa030eb-fbf5-4bf5-ac24-0791492977c2","Type":"ContainerStarted","Data":"9c21664012bebd70f028e3608a210a424a5da816f6cbf4eccabaaa4d14daee26"} Jan 20 16:44:08 crc kubenswrapper[4558]: I0120 16:44:08.425983 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Jan 20 16:44:08 crc kubenswrapper[4558]: I0120 16:44:08.426212 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Jan 20 16:44:08 crc kubenswrapper[4558]: I0120 16:44:08.433065 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Jan 20 16:44:08 crc kubenswrapper[4558]: I0120 16:44:08.435637 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-sjzfb" event={"ID":"f6c99edf-1afb-476e-b63e-ee73c372499c","Type":"ContainerStarted","Data":"c447ac6de8825d2b93a0c2969f40e2fe4a131660353ecafeea0be6c18e4b97f1"} Jan 20 16:44:08 crc kubenswrapper[4558]: I0120 16:44:08.435688 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-sjzfb" event={"ID":"f6c99edf-1afb-476e-b63e-ee73c372499c","Type":"ContainerStarted","Data":"3a3e14aa23015f4ac673f23c16e1bebc9983e597ffcb38c29f88f58f7f8e39df"} Jan 20 16:44:08 crc kubenswrapper[4558]: I0120 16:44:08.440688 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-5rxsc" event={"ID":"1cdfeb06-d9ec-43ad-850c-d685e4e7c8a5","Type":"ContainerStarted","Data":"0c03b813ae0cef1599b12a4e023030a9da8522dc3e4133d3c5b8046c72ffbcf2"} Jan 20 16:44:08 crc kubenswrapper[4558]: I0120 16:44:08.441124 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 20 16:44:08 crc kubenswrapper[4558]: E0120 16:44:08.442834 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-20 16:44:08.942819049 +0000 UTC m=+142.703157016 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 20 16:44:08 crc kubenswrapper[4558]: I0120 16:44:08.447863 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-mshrf" podStartSLOduration=124.447843855 podStartE2EDuration="2m4.447843855s" podCreationTimestamp="2026-01-20 16:42:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 16:44:08.446290272 +0000 UTC m=+142.206628239" watchObservedRunningTime="2026-01-20 16:44:08.447843855 +0000 UTC m=+142.208181822" Jan 20 16:44:08 crc kubenswrapper[4558]: I0120 16:44:08.468775 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-b97gp" event={"ID":"436af592-a7f2-4649-b2e3-47df5e85ce9c","Type":"ContainerStarted","Data":"2580e83ae62f65dec7e854774b24207e6e080c94e3043d51e07cb9102691c639"} Jan 20 16:44:08 crc kubenswrapper[4558]: I0120 16:44:08.490552 4558 generic.go:334] "Generic (PLEG): container finished" podID="03467399-b14f-421a-a7ec-f2a533daed0d" containerID="2c7513d64d0908c87f7ae5b1a41770fb262c11553ce6d0b68b47acbb1c37bde1" exitCode=0 Jan 20 16:44:08 crc kubenswrapper[4558]: I0120 16:44:08.491692 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-chwxp" event={"ID":"03467399-b14f-421a-a7ec-f2a533daed0d","Type":"ContainerDied","Data":"2c7513d64d0908c87f7ae5b1a41770fb262c11553ce6d0b68b47acbb1c37bde1"} Jan 20 16:44:08 crc kubenswrapper[4558]: I0120 16:44:08.491718 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-chwxp" event={"ID":"03467399-b14f-421a-a7ec-f2a533daed0d","Type":"ContainerStarted","Data":"ac14644a9057b92779a5f41b1bd464485b019b2d9080807342e0cfa2eaf6b8ab"} Jan 20 16:44:08 crc kubenswrapper[4558]: I0120 16:44:08.504963 4558 generic.go:334] "Generic (PLEG): container finished" podID="04aecb46-8cfc-4f76-9fe7-31ac1f5bd71e" containerID="f0dd7447a21500c96ca1f487d164385428f01ff81b7c6dce95e7a8a4d0ad549f" exitCode=0 Jan 20 16:44:08 crc kubenswrapper[4558]: I0120 16:44:08.505203 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7pjxc" event={"ID":"04aecb46-8cfc-4f76-9fe7-31ac1f5bd71e","Type":"ContainerDied","Data":"f0dd7447a21500c96ca1f487d164385428f01ff81b7c6dce95e7a8a4d0ad549f"} Jan 20 16:44:08 crc kubenswrapper[4558]: I0120 16:44:08.505250 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7pjxc" event={"ID":"04aecb46-8cfc-4f76-9fe7-31ac1f5bd71e","Type":"ContainerStarted","Data":"a5827942feb719a55a98eb7d81f945f7da6cd5892e4f9747b1059ee7f0c7c1bb"} Jan 20 16:44:08 crc kubenswrapper[4558]: I0120 16:44:08.514586 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-sjzfb" podStartSLOduration=124.51456809 podStartE2EDuration="2m4.51456809s" podCreationTimestamp="2026-01-20 16:42:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 16:44:08.514411995 +0000 UTC m=+142.274749962" watchObservedRunningTime="2026-01-20 16:44:08.51456809 +0000 UTC m=+142.274906057" Jan 20 16:44:08 crc kubenswrapper[4558]: I0120 16:44:08.527366 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29482110-7p5pb" event={"ID":"31eb233b-13fe-40cb-9259-f18574a85e01","Type":"ContainerStarted","Data":"7f57d98e525d4bcc9c65ec21f59f379838f112b6debbf769cd5bf60c98cb5558"} Jan 20 16:44:08 crc kubenswrapper[4558]: I0120 16:44:08.530529 4558 patch_prober.go:28] interesting pod/downloads-7954f5f757-225km container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.12:8080/\": dial tcp 10.217.0.12:8080: connect: connection refused" start-of-body= Jan 20 16:44:08 crc kubenswrapper[4558]: I0120 16:44:08.530575 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-225km" podUID="59e85616-edec-46fa-aa93-a1ecb76b415c" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.12:8080/\": dial tcp 10.217.0.12:8080: connect: connection refused" Jan 20 16:44:08 crc kubenswrapper[4558]: I0120 16:44:08.533816 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-mndbv"] Jan 20 16:44:08 crc kubenswrapper[4558]: I0120 16:44:08.550693 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-b97gp" podStartSLOduration=124.550675095 podStartE2EDuration="2m4.550675095s" podCreationTimestamp="2026-01-20 16:42:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 16:44:08.538515329 +0000 UTC m=+142.298853296" watchObservedRunningTime="2026-01-20 16:44:08.550675095 +0000 UTC m=+142.311013062" Jan 20 16:44:08 crc kubenswrapper[4558]: I0120 16:44:08.550868 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ed9427fd-b75f-44cc-82fd-d7ecca334c93-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"ed9427fd-b75f-44cc-82fd-d7ecca334c93\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 20 16:44:08 crc kubenswrapper[4558]: I0120 16:44:08.550945 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/ed9427fd-b75f-44cc-82fd-d7ecca334c93-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"ed9427fd-b75f-44cc-82fd-d7ecca334c93\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 20 16:44:08 crc kubenswrapper[4558]: I0120 16:44:08.551104 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zsdqk\" (UID: \"ea01cf9d-e1ac-4197-a0ac-830eebd15245\") " pod="openshift-image-registry/image-registry-697d97f7c8-zsdqk" Jan 20 16:44:08 crc kubenswrapper[4558]: E0120 16:44:08.556114 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-20 16:44:09.056097061 +0000 UTC m=+142.816435028 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zsdqk" (UID: "ea01cf9d-e1ac-4197-a0ac-830eebd15245") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 20 16:44:08 crc kubenswrapper[4558]: I0120 16:44:08.654344 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 20 16:44:08 crc kubenswrapper[4558]: I0120 16:44:08.654648 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/ed9427fd-b75f-44cc-82fd-d7ecca334c93-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"ed9427fd-b75f-44cc-82fd-d7ecca334c93\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 20 16:44:08 crc kubenswrapper[4558]: I0120 16:44:08.655991 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ed9427fd-b75f-44cc-82fd-d7ecca334c93-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"ed9427fd-b75f-44cc-82fd-d7ecca334c93\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 20 16:44:08 crc kubenswrapper[4558]: I0120 16:44:08.656225 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/ed9427fd-b75f-44cc-82fd-d7ecca334c93-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"ed9427fd-b75f-44cc-82fd-d7ecca334c93\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 20 16:44:08 crc kubenswrapper[4558]: E0120 16:44:08.656027 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-20 16:44:09.15600836 +0000 UTC m=+142.916346326 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 20 16:44:08 crc kubenswrapper[4558]: I0120 16:44:08.676492 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ed9427fd-b75f-44cc-82fd-d7ecca334c93-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"ed9427fd-b75f-44cc-82fd-d7ecca334c93\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 20 16:44:08 crc kubenswrapper[4558]: I0120 16:44:08.745707 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 20 16:44:08 crc kubenswrapper[4558]: I0120 16:44:08.761415 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zsdqk\" (UID: \"ea01cf9d-e1ac-4197-a0ac-830eebd15245\") " pod="openshift-image-registry/image-registry-697d97f7c8-zsdqk" Jan 20 16:44:08 crc kubenswrapper[4558]: E0120 16:44:08.761706 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-20 16:44:09.261693928 +0000 UTC m=+143.022031896 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zsdqk" (UID: "ea01cf9d-e1ac-4197-a0ac-830eebd15245") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 20 16:44:08 crc kubenswrapper[4558]: I0120 16:44:08.766075 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-275ld"] Jan 20 16:44:08 crc kubenswrapper[4558]: W0120 16:44:08.800141 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod567265d1_8d94_4502_aba8_e4ce03c0def9.slice/crio-3a6eb2b523868b88f2d2a04b7f734fccc2a0bccc1e7eea7eca82e58f63441cf3 WatchSource:0}: Error finding container 3a6eb2b523868b88f2d2a04b7f734fccc2a0bccc1e7eea7eca82e58f63441cf3: Status 404 returned error can't find the container with id 3a6eb2b523868b88f2d2a04b7f734fccc2a0bccc1e7eea7eca82e58f63441cf3 Jan 20 16:44:08 crc kubenswrapper[4558]: I0120 16:44:08.823454 4558 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Jan 20 16:44:08 crc kubenswrapper[4558]: I0120 16:44:08.846173 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-hr9pm"] Jan 20 16:44:08 crc kubenswrapper[4558]: I0120 16:44:08.849758 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hr9pm" Jan 20 16:44:08 crc kubenswrapper[4558]: I0120 16:44:08.856049 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Jan 20 16:44:08 crc kubenswrapper[4558]: I0120 16:44:08.862423 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 20 16:44:08 crc kubenswrapper[4558]: E0120 16:44:08.862958 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-20 16:44:09.362943773 +0000 UTC m=+143.123281741 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 20 16:44:08 crc kubenswrapper[4558]: I0120 16:44:08.863075 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-hr9pm"] Jan 20 16:44:08 crc kubenswrapper[4558]: I0120 16:44:08.964549 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0099ba6c-2da8-471c-bde7-528db23c4faf-catalog-content\") pod \"redhat-operators-hr9pm\" (UID: \"0099ba6c-2da8-471c-bde7-528db23c4faf\") " pod="openshift-marketplace/redhat-operators-hr9pm" Jan 20 16:44:08 crc kubenswrapper[4558]: I0120 16:44:08.964587 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0099ba6c-2da8-471c-bde7-528db23c4faf-utilities\") pod \"redhat-operators-hr9pm\" (UID: \"0099ba6c-2da8-471c-bde7-528db23c4faf\") " pod="openshift-marketplace/redhat-operators-hr9pm" Jan 20 16:44:08 crc kubenswrapper[4558]: I0120 16:44:08.964634 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8rkqf\" (UniqueName: \"kubernetes.io/projected/0099ba6c-2da8-471c-bde7-528db23c4faf-kube-api-access-8rkqf\") pod \"redhat-operators-hr9pm\" (UID: \"0099ba6c-2da8-471c-bde7-528db23c4faf\") " pod="openshift-marketplace/redhat-operators-hr9pm" Jan 20 16:44:08 crc kubenswrapper[4558]: I0120 16:44:08.964712 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zsdqk\" (UID: \"ea01cf9d-e1ac-4197-a0ac-830eebd15245\") " pod="openshift-image-registry/image-registry-697d97f7c8-zsdqk" Jan 20 16:44:08 crc kubenswrapper[4558]: E0120 16:44:08.964958 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-20 16:44:09.464948102 +0000 UTC m=+143.225286070 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zsdqk" (UID: "ea01cf9d-e1ac-4197-a0ac-830eebd15245") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 20 16:44:09 crc kubenswrapper[4558]: I0120 16:44:09.053535 4558 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2027-01-20 16:39:08 +0000 UTC, rotation deadline is 2026-10-31 11:02:53.600302523 +0000 UTC Jan 20 16:44:09 crc kubenswrapper[4558]: I0120 16:44:09.053780 4558 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 6810h18m44.546527879s for next certificate rotation Jan 20 16:44:09 crc kubenswrapper[4558]: I0120 16:44:09.065886 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 20 16:44:09 crc kubenswrapper[4558]: I0120 16:44:09.066151 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0099ba6c-2da8-471c-bde7-528db23c4faf-catalog-content\") pod \"redhat-operators-hr9pm\" (UID: \"0099ba6c-2da8-471c-bde7-528db23c4faf\") " pod="openshift-marketplace/redhat-operators-hr9pm" Jan 20 16:44:09 crc kubenswrapper[4558]: I0120 16:44:09.066192 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0099ba6c-2da8-471c-bde7-528db23c4faf-utilities\") pod \"redhat-operators-hr9pm\" (UID: \"0099ba6c-2da8-471c-bde7-528db23c4faf\") " pod="openshift-marketplace/redhat-operators-hr9pm" Jan 20 16:44:09 crc kubenswrapper[4558]: I0120 16:44:09.066557 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0099ba6c-2da8-471c-bde7-528db23c4faf-utilities\") pod \"redhat-operators-hr9pm\" (UID: \"0099ba6c-2da8-471c-bde7-528db23c4faf\") " pod="openshift-marketplace/redhat-operators-hr9pm" Jan 20 16:44:09 crc kubenswrapper[4558]: I0120 16:44:09.066815 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0099ba6c-2da8-471c-bde7-528db23c4faf-catalog-content\") pod \"redhat-operators-hr9pm\" (UID: \"0099ba6c-2da8-471c-bde7-528db23c4faf\") " pod="openshift-marketplace/redhat-operators-hr9pm" Jan 20 16:44:09 crc kubenswrapper[4558]: I0120 16:44:09.066920 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8rkqf\" (UniqueName: \"kubernetes.io/projected/0099ba6c-2da8-471c-bde7-528db23c4faf-kube-api-access-8rkqf\") pod \"redhat-operators-hr9pm\" (UID: \"0099ba6c-2da8-471c-bde7-528db23c4faf\") " pod="openshift-marketplace/redhat-operators-hr9pm" Jan 20 16:44:09 crc kubenswrapper[4558]: E0120 16:44:09.067363 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-20 16:44:09.567299457 +0000 UTC m=+143.327637423 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 20 16:44:09 crc kubenswrapper[4558]: I0120 16:44:09.112528 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Jan 20 16:44:09 crc kubenswrapper[4558]: I0120 16:44:09.115081 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8rkqf\" (UniqueName: \"kubernetes.io/projected/0099ba6c-2da8-471c-bde7-528db23c4faf-kube-api-access-8rkqf\") pod \"redhat-operators-hr9pm\" (UID: \"0099ba6c-2da8-471c-bde7-528db23c4faf\") " pod="openshift-marketplace/redhat-operators-hr9pm" Jan 20 16:44:09 crc kubenswrapper[4558]: W0120 16:44:09.133829 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-poded9427fd_b75f_44cc_82fd_d7ecca334c93.slice/crio-563dd431d9c45830aae5c336d4a14d2e8a455b2fb3910fd8bd3db0e53f781703 WatchSource:0}: Error finding container 563dd431d9c45830aae5c336d4a14d2e8a455b2fb3910fd8bd3db0e53f781703: Status 404 returned error can't find the container with id 563dd431d9c45830aae5c336d4a14d2e8a455b2fb3910fd8bd3db0e53f781703 Jan 20 16:44:09 crc kubenswrapper[4558]: I0120 16:44:09.144275 4558 patch_prober.go:28] interesting pod/router-default-5444994796-vkkjt container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 20 16:44:09 crc kubenswrapper[4558]: [-]has-synced failed: reason withheld Jan 20 16:44:09 crc kubenswrapper[4558]: [+]process-running ok Jan 20 16:44:09 crc kubenswrapper[4558]: healthz check failed Jan 20 16:44:09 crc kubenswrapper[4558]: I0120 16:44:09.144326 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-vkkjt" podUID="84f45a25-f77d-4c4c-88b6-5bdc4c286f10" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 20 16:44:09 crc kubenswrapper[4558]: I0120 16:44:09.168082 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zsdqk\" (UID: \"ea01cf9d-e1ac-4197-a0ac-830eebd15245\") " pod="openshift-image-registry/image-registry-697d97f7c8-zsdqk" Jan 20 16:44:09 crc kubenswrapper[4558]: E0120 16:44:09.168522 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-20 16:44:09.668510578 +0000 UTC m=+143.428848546 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zsdqk" (UID: "ea01cf9d-e1ac-4197-a0ac-830eebd15245") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 20 16:44:09 crc kubenswrapper[4558]: I0120 16:44:09.214776 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hr9pm" Jan 20 16:44:09 crc kubenswrapper[4558]: I0120 16:44:09.239289 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-b9k54"] Jan 20 16:44:09 crc kubenswrapper[4558]: I0120 16:44:09.240273 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-b9k54" Jan 20 16:44:09 crc kubenswrapper[4558]: I0120 16:44:09.251956 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-b9k54"] Jan 20 16:44:09 crc kubenswrapper[4558]: I0120 16:44:09.269568 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 20 16:44:09 crc kubenswrapper[4558]: E0120 16:44:09.269962 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-20 16:44:09.769949379 +0000 UTC m=+143.530287347 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 20 16:44:09 crc kubenswrapper[4558]: I0120 16:44:09.370936 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zsdqk\" (UID: \"ea01cf9d-e1ac-4197-a0ac-830eebd15245\") " pod="openshift-image-registry/image-registry-697d97f7c8-zsdqk" Jan 20 16:44:09 crc kubenswrapper[4558]: I0120 16:44:09.370982 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j59qc\" (UniqueName: \"kubernetes.io/projected/91268065-1eb8-448e-8597-458d2586ad12-kube-api-access-j59qc\") pod \"redhat-operators-b9k54\" (UID: \"91268065-1eb8-448e-8597-458d2586ad12\") " pod="openshift-marketplace/redhat-operators-b9k54" Jan 20 16:44:09 crc kubenswrapper[4558]: I0120 16:44:09.371030 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/91268065-1eb8-448e-8597-458d2586ad12-catalog-content\") pod \"redhat-operators-b9k54\" (UID: \"91268065-1eb8-448e-8597-458d2586ad12\") " pod="openshift-marketplace/redhat-operators-b9k54" Jan 20 16:44:09 crc kubenswrapper[4558]: I0120 16:44:09.371083 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/91268065-1eb8-448e-8597-458d2586ad12-utilities\") pod \"redhat-operators-b9k54\" (UID: \"91268065-1eb8-448e-8597-458d2586ad12\") " pod="openshift-marketplace/redhat-operators-b9k54" Jan 20 16:44:09 crc kubenswrapper[4558]: E0120 16:44:09.371292 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-20 16:44:09.871281299 +0000 UTC m=+143.631619266 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zsdqk" (UID: "ea01cf9d-e1ac-4197-a0ac-830eebd15245") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 20 16:44:09 crc kubenswrapper[4558]: I0120 16:44:09.409896 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-hr9pm"] Jan 20 16:44:09 crc kubenswrapper[4558]: I0120 16:44:09.472323 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 20 16:44:09 crc kubenswrapper[4558]: I0120 16:44:09.473020 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j59qc\" (UniqueName: \"kubernetes.io/projected/91268065-1eb8-448e-8597-458d2586ad12-kube-api-access-j59qc\") pod \"redhat-operators-b9k54\" (UID: \"91268065-1eb8-448e-8597-458d2586ad12\") " pod="openshift-marketplace/redhat-operators-b9k54" Jan 20 16:44:09 crc kubenswrapper[4558]: I0120 16:44:09.473102 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/91268065-1eb8-448e-8597-458d2586ad12-catalog-content\") pod \"redhat-operators-b9k54\" (UID: \"91268065-1eb8-448e-8597-458d2586ad12\") " pod="openshift-marketplace/redhat-operators-b9k54" Jan 20 16:44:09 crc kubenswrapper[4558]: E0120 16:44:09.473192 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-20 16:44:09.973152107 +0000 UTC m=+143.733490074 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 20 16:44:09 crc kubenswrapper[4558]: I0120 16:44:09.473309 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/91268065-1eb8-448e-8597-458d2586ad12-utilities\") pod \"redhat-operators-b9k54\" (UID: \"91268065-1eb8-448e-8597-458d2586ad12\") " pod="openshift-marketplace/redhat-operators-b9k54" Jan 20 16:44:09 crc kubenswrapper[4558]: I0120 16:44:09.473443 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zsdqk\" (UID: \"ea01cf9d-e1ac-4197-a0ac-830eebd15245\") " pod="openshift-image-registry/image-registry-697d97f7c8-zsdqk" Jan 20 16:44:09 crc kubenswrapper[4558]: I0120 16:44:09.473469 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/91268065-1eb8-448e-8597-458d2586ad12-catalog-content\") pod \"redhat-operators-b9k54\" (UID: \"91268065-1eb8-448e-8597-458d2586ad12\") " pod="openshift-marketplace/redhat-operators-b9k54" Jan 20 16:44:09 crc kubenswrapper[4558]: I0120 16:44:09.473735 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/91268065-1eb8-448e-8597-458d2586ad12-utilities\") pod \"redhat-operators-b9k54\" (UID: \"91268065-1eb8-448e-8597-458d2586ad12\") " pod="openshift-marketplace/redhat-operators-b9k54" Jan 20 16:44:09 crc kubenswrapper[4558]: E0120 16:44:09.473946 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-20 16:44:09.973934374 +0000 UTC m=+143.734272341 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zsdqk" (UID: "ea01cf9d-e1ac-4197-a0ac-830eebd15245") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 20 16:44:09 crc kubenswrapper[4558]: I0120 16:44:09.491855 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j59qc\" (UniqueName: \"kubernetes.io/projected/91268065-1eb8-448e-8597-458d2586ad12-kube-api-access-j59qc\") pod \"redhat-operators-b9k54\" (UID: \"91268065-1eb8-448e-8597-458d2586ad12\") " pod="openshift-marketplace/redhat-operators-b9k54" Jan 20 16:44:09 crc kubenswrapper[4558]: I0120 16:44:09.537984 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"ed9427fd-b75f-44cc-82fd-d7ecca334c93","Type":"ContainerStarted","Data":"191c7a6aea8c03787da93957f330a9160f6d825cae5e84c9dee434ad02174f52"} Jan 20 16:44:09 crc kubenswrapper[4558]: I0120 16:44:09.538051 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"ed9427fd-b75f-44cc-82fd-d7ecca334c93","Type":"ContainerStarted","Data":"563dd431d9c45830aae5c336d4a14d2e8a455b2fb3910fd8bd3db0e53f781703"} Jan 20 16:44:09 crc kubenswrapper[4558]: I0120 16:44:09.545834 4558 generic.go:334] "Generic (PLEG): container finished" podID="05d983ba-960a-4975-b3de-73a3891fb342" containerID="da7a87a8742bf479fa28cd4ee4aa2b5ae97fea463c83fe90474cc551c785b068" exitCode=0 Jan 20 16:44:09 crc kubenswrapper[4558]: I0120 16:44:09.545904 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mndbv" event={"ID":"05d983ba-960a-4975-b3de-73a3891fb342","Type":"ContainerDied","Data":"da7a87a8742bf479fa28cd4ee4aa2b5ae97fea463c83fe90474cc551c785b068"} Jan 20 16:44:09 crc kubenswrapper[4558]: I0120 16:44:09.545930 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mndbv" event={"ID":"05d983ba-960a-4975-b3de-73a3891fb342","Type":"ContainerStarted","Data":"26c796cf0b148af39b037b1a478f5a34d889ddff53301973245c4db96f3a0572"} Jan 20 16:44:09 crc kubenswrapper[4558]: I0120 16:44:09.550651 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-9cqxr" event={"ID":"6aa23de2-5f67-4f16-bd7d-eeecb3dfa19f","Type":"ContainerStarted","Data":"c3b843276d36d72579c9cbd2221863e74b25661506205d0ca3e421391b204fde"} Jan 20 16:44:09 crc kubenswrapper[4558]: I0120 16:44:09.550725 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-9cqxr" event={"ID":"6aa23de2-5f67-4f16-bd7d-eeecb3dfa19f","Type":"ContainerStarted","Data":"0778a3d9635f93084515d596d43c9817b02095879d5df4e6cdc5a25fbcf1c69d"} Jan 20 16:44:09 crc kubenswrapper[4558]: I0120 16:44:09.550737 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-9cqxr" event={"ID":"6aa23de2-5f67-4f16-bd7d-eeecb3dfa19f","Type":"ContainerStarted","Data":"7cce8c91a9c6819a875bb78ea191b960aed1ef8b24f7dfb14bf307856e1e9108"} Jan 20 16:44:09 crc kubenswrapper[4558]: I0120 16:44:09.553761 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/revision-pruner-9-crc" podStartSLOduration=1.553746437 podStartE2EDuration="1.553746437s" podCreationTimestamp="2026-01-20 16:44:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 16:44:09.550286104 +0000 UTC m=+143.310624071" watchObservedRunningTime="2026-01-20 16:44:09.553746437 +0000 UTC m=+143.314084403" Jan 20 16:44:09 crc kubenswrapper[4558]: I0120 16:44:09.555319 4558 generic.go:334] "Generic (PLEG): container finished" podID="567265d1-8d94-4502-aba8-e4ce03c0def9" containerID="217e4d83348241c5e4f558fadef7fa7a642acb7b588f19144787f7210717f4b9" exitCode=0 Jan 20 16:44:09 crc kubenswrapper[4558]: I0120 16:44:09.555423 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-275ld" event={"ID":"567265d1-8d94-4502-aba8-e4ce03c0def9","Type":"ContainerDied","Data":"217e4d83348241c5e4f558fadef7fa7a642acb7b588f19144787f7210717f4b9"} Jan 20 16:44:09 crc kubenswrapper[4558]: I0120 16:44:09.555448 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-275ld" event={"ID":"567265d1-8d94-4502-aba8-e4ce03c0def9","Type":"ContainerStarted","Data":"3a6eb2b523868b88f2d2a04b7f734fccc2a0bccc1e7eea7eca82e58f63441cf3"} Jan 20 16:44:09 crc kubenswrapper[4558]: I0120 16:44:09.557798 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-b9k54" Jan 20 16:44:09 crc kubenswrapper[4558]: I0120 16:44:09.576126 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hr9pm" event={"ID":"0099ba6c-2da8-471c-bde7-528db23c4faf","Type":"ContainerStarted","Data":"171754b6b52f7171c4db51da5a991465ade514e88f7dd3f142b1d37434e8da8e"} Jan 20 16:44:09 crc kubenswrapper[4558]: I0120 16:44:09.578562 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 20 16:44:09 crc kubenswrapper[4558]: I0120 16:44:09.578997 4558 patch_prober.go:28] interesting pod/downloads-7954f5f757-225km container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.12:8080/\": dial tcp 10.217.0.12:8080: connect: connection refused" start-of-body= Jan 20 16:44:09 crc kubenswrapper[4558]: E0120 16:44:09.579021 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-20 16:44:10.079007225 +0000 UTC m=+143.839345192 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 20 16:44:09 crc kubenswrapper[4558]: I0120 16:44:09.579040 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-225km" podUID="59e85616-edec-46fa-aa93-a1ecb76b415c" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.12:8080/\": dial tcp 10.217.0.12:8080: connect: connection refused" Jan 20 16:44:09 crc kubenswrapper[4558]: I0120 16:44:09.583539 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-2j5lz" Jan 20 16:44:09 crc kubenswrapper[4558]: I0120 16:44:09.652397 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-9cqxr" podStartSLOduration=9.652382877 podStartE2EDuration="9.652382877s" podCreationTimestamp="2026-01-20 16:44:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 16:44:09.633395012 +0000 UTC m=+143.393732979" watchObservedRunningTime="2026-01-20 16:44:09.652382877 +0000 UTC m=+143.412720844" Jan 20 16:44:09 crc kubenswrapper[4558]: I0120 16:44:09.684984 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zsdqk\" (UID: \"ea01cf9d-e1ac-4197-a0ac-830eebd15245\") " pod="openshift-image-registry/image-registry-697d97f7c8-zsdqk" Jan 20 16:44:09 crc kubenswrapper[4558]: I0120 16:44:09.700286 4558 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2026-01-20T16:44:08.823620427Z","Handler":null,"Name":""} Jan 20 16:44:09 crc kubenswrapper[4558]: E0120 16:44:09.702028 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-20 16:44:10.202014354 +0000 UTC m=+143.962352322 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zsdqk" (UID: "ea01cf9d-e1ac-4197-a0ac-830eebd15245") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 20 16:44:09 crc kubenswrapper[4558]: I0120 16:44:09.714989 4558 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Jan 20 16:44:09 crc kubenswrapper[4558]: I0120 16:44:09.715232 4558 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Jan 20 16:44:09 crc kubenswrapper[4558]: I0120 16:44:09.786945 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 20 16:44:09 crc kubenswrapper[4558]: I0120 16:44:09.810308 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Jan 20 16:44:09 crc kubenswrapper[4558]: I0120 16:44:09.849708 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-b9k54"] Jan 20 16:44:09 crc kubenswrapper[4558]: W0120 16:44:09.865412 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod91268065_1eb8_448e_8597_458d2586ad12.slice/crio-447aa47447ac3861539de847a4409e04529a89c6ad4a3b1d3003377eb6c74775 WatchSource:0}: Error finding container 447aa47447ac3861539de847a4409e04529a89c6ad4a3b1d3003377eb6c74775: Status 404 returned error can't find the container with id 447aa47447ac3861539de847a4409e04529a89c6ad4a3b1d3003377eb6c74775 Jan 20 16:44:09 crc kubenswrapper[4558]: I0120 16:44:09.888552 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zsdqk\" (UID: \"ea01cf9d-e1ac-4197-a0ac-830eebd15245\") " pod="openshift-image-registry/image-registry-697d97f7c8-zsdqk" Jan 20 16:44:09 crc kubenswrapper[4558]: I0120 16:44:09.890909 4558 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 20 16:44:09 crc kubenswrapper[4558]: I0120 16:44:09.890946 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zsdqk\" (UID: \"ea01cf9d-e1ac-4197-a0ac-830eebd15245\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-zsdqk" Jan 20 16:44:09 crc kubenswrapper[4558]: I0120 16:44:09.913764 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zsdqk\" (UID: \"ea01cf9d-e1ac-4197-a0ac-830eebd15245\") " pod="openshift-image-registry/image-registry-697d97f7c8-zsdqk" Jan 20 16:44:10 crc kubenswrapper[4558]: I0120 16:44:10.001066 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-zsdqk" Jan 20 16:44:10 crc kubenswrapper[4558]: I0120 16:44:10.143031 4558 patch_prober.go:28] interesting pod/router-default-5444994796-vkkjt container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 20 16:44:10 crc kubenswrapper[4558]: [-]has-synced failed: reason withheld Jan 20 16:44:10 crc kubenswrapper[4558]: [+]process-running ok Jan 20 16:44:10 crc kubenswrapper[4558]: healthz check failed Jan 20 16:44:10 crc kubenswrapper[4558]: I0120 16:44:10.143383 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-vkkjt" podUID="84f45a25-f77d-4c4c-88b6-5bdc4c286f10" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 20 16:44:10 crc kubenswrapper[4558]: I0120 16:44:10.497894 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-zsdqk"] Jan 20 16:44:10 crc kubenswrapper[4558]: I0120 16:44:10.570971 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Jan 20 16:44:10 crc kubenswrapper[4558]: I0120 16:44:10.584080 4558 generic.go:334] "Generic (PLEG): container finished" podID="0099ba6c-2da8-471c-bde7-528db23c4faf" containerID="50fbb7a62ff8ee104b3a72ffc13c1b7169f074bed241d16baec69089a6bbf133" exitCode=0 Jan 20 16:44:10 crc kubenswrapper[4558]: I0120 16:44:10.584147 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hr9pm" event={"ID":"0099ba6c-2da8-471c-bde7-528db23c4faf","Type":"ContainerDied","Data":"50fbb7a62ff8ee104b3a72ffc13c1b7169f074bed241d16baec69089a6bbf133"} Jan 20 16:44:10 crc kubenswrapper[4558]: I0120 16:44:10.587720 4558 generic.go:334] "Generic (PLEG): container finished" podID="ed9427fd-b75f-44cc-82fd-d7ecca334c93" containerID="191c7a6aea8c03787da93957f330a9160f6d825cae5e84c9dee434ad02174f52" exitCode=0 Jan 20 16:44:10 crc kubenswrapper[4558]: I0120 16:44:10.587780 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"ed9427fd-b75f-44cc-82fd-d7ecca334c93","Type":"ContainerDied","Data":"191c7a6aea8c03787da93957f330a9160f6d825cae5e84c9dee434ad02174f52"} Jan 20 16:44:10 crc kubenswrapper[4558]: I0120 16:44:10.590262 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-zsdqk" event={"ID":"ea01cf9d-e1ac-4197-a0ac-830eebd15245","Type":"ContainerStarted","Data":"9a8052fd1c09fcf4afa44eb1c15cab9253a408351002d246fa260433502c5c64"} Jan 20 16:44:10 crc kubenswrapper[4558]: I0120 16:44:10.593031 4558 generic.go:334] "Generic (PLEG): container finished" podID="91268065-1eb8-448e-8597-458d2586ad12" containerID="c4b151c7eb0c287c5d4510d4d05477bdd96cced57570f7cec5d25c4b32b4b2da" exitCode=0 Jan 20 16:44:10 crc kubenswrapper[4558]: I0120 16:44:10.593073 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b9k54" event={"ID":"91268065-1eb8-448e-8597-458d2586ad12","Type":"ContainerDied","Data":"c4b151c7eb0c287c5d4510d4d05477bdd96cced57570f7cec5d25c4b32b4b2da"} Jan 20 16:44:10 crc kubenswrapper[4558]: I0120 16:44:10.593088 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b9k54" event={"ID":"91268065-1eb8-448e-8597-458d2586ad12","Type":"ContainerStarted","Data":"447aa47447ac3861539de847a4409e04529a89c6ad4a3b1d3003377eb6c74775"} Jan 20 16:44:10 crc kubenswrapper[4558]: I0120 16:44:10.595032 4558 generic.go:334] "Generic (PLEG): container finished" podID="31eb233b-13fe-40cb-9259-f18574a85e01" containerID="7f57d98e525d4bcc9c65ec21f59f379838f112b6debbf769cd5bf60c98cb5558" exitCode=0 Jan 20 16:44:10 crc kubenswrapper[4558]: I0120 16:44:10.595092 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29482110-7p5pb" event={"ID":"31eb233b-13fe-40cb-9259-f18574a85e01","Type":"ContainerDied","Data":"7f57d98e525d4bcc9c65ec21f59f379838f112b6debbf769cd5bf60c98cb5558"} Jan 20 16:44:11 crc kubenswrapper[4558]: I0120 16:44:11.140872 4558 patch_prober.go:28] interesting pod/router-default-5444994796-vkkjt container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 20 16:44:11 crc kubenswrapper[4558]: [-]has-synced failed: reason withheld Jan 20 16:44:11 crc kubenswrapper[4558]: [+]process-running ok Jan 20 16:44:11 crc kubenswrapper[4558]: healthz check failed Jan 20 16:44:11 crc kubenswrapper[4558]: I0120 16:44:11.140967 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-vkkjt" podUID="84f45a25-f77d-4c4c-88b6-5bdc4c286f10" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 20 16:44:11 crc kubenswrapper[4558]: I0120 16:44:11.602669 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-zsdqk" event={"ID":"ea01cf9d-e1ac-4197-a0ac-830eebd15245","Type":"ContainerStarted","Data":"d9840b5ca22dca84102fb767b8b153581bafb67e70ee8524eb5d479b886a9745"} Jan 20 16:44:11 crc kubenswrapper[4558]: I0120 16:44:11.617725 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-zsdqk" podStartSLOduration=127.617712118 podStartE2EDuration="2m7.617712118s" podCreationTimestamp="2026-01-20 16:42:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 16:44:11.616920142 +0000 UTC m=+145.377258109" watchObservedRunningTime="2026-01-20 16:44:11.617712118 +0000 UTC m=+145.378050085" Jan 20 16:44:11 crc kubenswrapper[4558]: I0120 16:44:11.864056 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 20 16:44:11 crc kubenswrapper[4558]: I0120 16:44:11.871807 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29482110-7p5pb" Jan 20 16:44:11 crc kubenswrapper[4558]: I0120 16:44:11.916937 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ed9427fd-b75f-44cc-82fd-d7ecca334c93-kube-api-access\") pod \"ed9427fd-b75f-44cc-82fd-d7ecca334c93\" (UID: \"ed9427fd-b75f-44cc-82fd-d7ecca334c93\") " Jan 20 16:44:11 crc kubenswrapper[4558]: I0120 16:44:11.916998 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/ed9427fd-b75f-44cc-82fd-d7ecca334c93-kubelet-dir\") pod \"ed9427fd-b75f-44cc-82fd-d7ecca334c93\" (UID: \"ed9427fd-b75f-44cc-82fd-d7ecca334c93\") " Jan 20 16:44:11 crc kubenswrapper[4558]: I0120 16:44:11.917130 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ed9427fd-b75f-44cc-82fd-d7ecca334c93-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "ed9427fd-b75f-44cc-82fd-d7ecca334c93" (UID: "ed9427fd-b75f-44cc-82fd-d7ecca334c93"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 16:44:11 crc kubenswrapper[4558]: I0120 16:44:11.917417 4558 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/ed9427fd-b75f-44cc-82fd-d7ecca334c93-kubelet-dir\") on node \"crc\" DevicePath \"\"" Jan 20 16:44:11 crc kubenswrapper[4558]: I0120 16:44:11.931438 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ed9427fd-b75f-44cc-82fd-d7ecca334c93-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "ed9427fd-b75f-44cc-82fd-d7ecca334c93" (UID: "ed9427fd-b75f-44cc-82fd-d7ecca334c93"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:44:12 crc kubenswrapper[4558]: I0120 16:44:12.018550 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8t6d2\" (UniqueName: \"kubernetes.io/projected/31eb233b-13fe-40cb-9259-f18574a85e01-kube-api-access-8t6d2\") pod \"31eb233b-13fe-40cb-9259-f18574a85e01\" (UID: \"31eb233b-13fe-40cb-9259-f18574a85e01\") " Jan 20 16:44:12 crc kubenswrapper[4558]: I0120 16:44:12.018637 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/31eb233b-13fe-40cb-9259-f18574a85e01-config-volume\") pod \"31eb233b-13fe-40cb-9259-f18574a85e01\" (UID: \"31eb233b-13fe-40cb-9259-f18574a85e01\") " Jan 20 16:44:12 crc kubenswrapper[4558]: I0120 16:44:12.018680 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/31eb233b-13fe-40cb-9259-f18574a85e01-secret-volume\") pod \"31eb233b-13fe-40cb-9259-f18574a85e01\" (UID: \"31eb233b-13fe-40cb-9259-f18574a85e01\") " Jan 20 16:44:12 crc kubenswrapper[4558]: I0120 16:44:12.018980 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ed9427fd-b75f-44cc-82fd-d7ecca334c93-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 20 16:44:12 crc kubenswrapper[4558]: I0120 16:44:12.019727 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31eb233b-13fe-40cb-9259-f18574a85e01-config-volume" (OuterVolumeSpecName: "config-volume") pod "31eb233b-13fe-40cb-9259-f18574a85e01" (UID: "31eb233b-13fe-40cb-9259-f18574a85e01"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 16:44:12 crc kubenswrapper[4558]: I0120 16:44:12.038912 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31eb233b-13fe-40cb-9259-f18574a85e01-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "31eb233b-13fe-40cb-9259-f18574a85e01" (UID: "31eb233b-13fe-40cb-9259-f18574a85e01"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:44:12 crc kubenswrapper[4558]: I0120 16:44:12.039380 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31eb233b-13fe-40cb-9259-f18574a85e01-kube-api-access-8t6d2" (OuterVolumeSpecName: "kube-api-access-8t6d2") pod "31eb233b-13fe-40cb-9259-f18574a85e01" (UID: "31eb233b-13fe-40cb-9259-f18574a85e01"). InnerVolumeSpecName "kube-api-access-8t6d2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:44:12 crc kubenswrapper[4558]: I0120 16:44:12.121332 4558 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/31eb233b-13fe-40cb-9259-f18574a85e01-config-volume\") on node \"crc\" DevicePath \"\"" Jan 20 16:44:12 crc kubenswrapper[4558]: I0120 16:44:12.121367 4558 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/31eb233b-13fe-40cb-9259-f18574a85e01-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 20 16:44:12 crc kubenswrapper[4558]: I0120 16:44:12.121377 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8t6d2\" (UniqueName: \"kubernetes.io/projected/31eb233b-13fe-40cb-9259-f18574a85e01-kube-api-access-8t6d2\") on node \"crc\" DevicePath \"\"" Jan 20 16:44:12 crc kubenswrapper[4558]: I0120 16:44:12.141205 4558 patch_prober.go:28] interesting pod/router-default-5444994796-vkkjt container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 20 16:44:12 crc kubenswrapper[4558]: [-]has-synced failed: reason withheld Jan 20 16:44:12 crc kubenswrapper[4558]: [+]process-running ok Jan 20 16:44:12 crc kubenswrapper[4558]: healthz check failed Jan 20 16:44:12 crc kubenswrapper[4558]: I0120 16:44:12.141271 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-vkkjt" podUID="84f45a25-f77d-4c4c-88b6-5bdc4c286f10" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 20 16:44:12 crc kubenswrapper[4558]: I0120 16:44:12.575756 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-sjzfb" Jan 20 16:44:12 crc kubenswrapper[4558]: I0120 16:44:12.575899 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-sjzfb" Jan 20 16:44:12 crc kubenswrapper[4558]: I0120 16:44:12.584388 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-sjzfb" Jan 20 16:44:12 crc kubenswrapper[4558]: I0120 16:44:12.618606 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29482110-7p5pb" event={"ID":"31eb233b-13fe-40cb-9259-f18574a85e01","Type":"ContainerDied","Data":"b1395ae185fc8ce778c26e4c5116454432cf134a17cd7710f9f32667c53c3676"} Jan 20 16:44:12 crc kubenswrapper[4558]: I0120 16:44:12.618638 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29482110-7p5pb" Jan 20 16:44:12 crc kubenswrapper[4558]: I0120 16:44:12.618649 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b1395ae185fc8ce778c26e4c5116454432cf134a17cd7710f9f32667c53c3676" Jan 20 16:44:12 crc kubenswrapper[4558]: I0120 16:44:12.622223 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"ed9427fd-b75f-44cc-82fd-d7ecca334c93","Type":"ContainerDied","Data":"563dd431d9c45830aae5c336d4a14d2e8a455b2fb3910fd8bd3db0e53f781703"} Jan 20 16:44:12 crc kubenswrapper[4558]: I0120 16:44:12.622275 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="563dd431d9c45830aae5c336d4a14d2e8a455b2fb3910fd8bd3db0e53f781703" Jan 20 16:44:12 crc kubenswrapper[4558]: I0120 16:44:12.622510 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 20 16:44:12 crc kubenswrapper[4558]: I0120 16:44:12.623485 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-zsdqk" Jan 20 16:44:12 crc kubenswrapper[4558]: I0120 16:44:12.629212 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 20 16:44:12 crc kubenswrapper[4558]: I0120 16:44:12.629448 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 20 16:44:12 crc kubenswrapper[4558]: I0120 16:44:12.629473 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 20 16:44:12 crc kubenswrapper[4558]: I0120 16:44:12.629576 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 20 16:44:12 crc kubenswrapper[4558]: I0120 16:44:12.630473 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 20 16:44:12 crc kubenswrapper[4558]: I0120 16:44:12.636094 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-sjzfb" Jan 20 16:44:12 crc kubenswrapper[4558]: I0120 16:44:12.638859 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 20 16:44:12 crc kubenswrapper[4558]: I0120 16:44:12.644950 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 20 16:44:12 crc kubenswrapper[4558]: I0120 16:44:12.645481 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 20 16:44:12 crc kubenswrapper[4558]: I0120 16:44:12.682896 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 20 16:44:12 crc kubenswrapper[4558]: I0120 16:44:12.685450 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 20 16:44:12 crc kubenswrapper[4558]: I0120 16:44:12.687656 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 20 16:44:12 crc kubenswrapper[4558]: I0120 16:44:12.690860 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-lhmdh" Jan 20 16:44:12 crc kubenswrapper[4558]: I0120 16:44:12.692033 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-lhmdh" Jan 20 16:44:12 crc kubenswrapper[4558]: I0120 16:44:12.692553 4558 patch_prober.go:28] interesting pod/console-f9d7485db-lhmdh container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.10:8443/health\": dial tcp 10.217.0.10:8443: connect: connection refused" start-of-body= Jan 20 16:44:12 crc kubenswrapper[4558]: I0120 16:44:12.692603 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-lhmdh" podUID="dd7b53bb-d740-497c-a36e-87e51d6f05a6" containerName="console" probeResult="failure" output="Get \"https://10.217.0.10:8443/health\": dial tcp 10.217.0.10:8443: connect: connection refused" Jan 20 16:44:13 crc kubenswrapper[4558]: I0120 16:44:13.140763 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-vkkjt" Jan 20 16:44:13 crc kubenswrapper[4558]: I0120 16:44:13.142691 4558 patch_prober.go:28] interesting pod/router-default-5444994796-vkkjt container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 20 16:44:13 crc kubenswrapper[4558]: [+]has-synced ok Jan 20 16:44:13 crc kubenswrapper[4558]: [+]process-running ok Jan 20 16:44:13 crc kubenswrapper[4558]: healthz check failed Jan 20 16:44:13 crc kubenswrapper[4558]: I0120 16:44:13.142775 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-vkkjt" podUID="84f45a25-f77d-4c4c-88b6-5bdc4c286f10" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 20 16:44:13 crc kubenswrapper[4558]: W0120 16:44:13.200638 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3b6479f0_333b_4a96_9adf_2099afdc2447.slice/crio-da89502f2a5af8d111fb4a74ae158eceef2e04305a5de1e51175be2e173a74a2 WatchSource:0}: Error finding container da89502f2a5af8d111fb4a74ae158eceef2e04305a5de1e51175be2e173a74a2: Status 404 returned error can't find the container with id da89502f2a5af8d111fb4a74ae158eceef2e04305a5de1e51175be2e173a74a2 Jan 20 16:44:13 crc kubenswrapper[4558]: I0120 16:44:13.319349 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-b97gp" Jan 20 16:44:13 crc kubenswrapper[4558]: I0120 16:44:13.319518 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-b97gp" Jan 20 16:44:13 crc kubenswrapper[4558]: I0120 16:44:13.326060 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-b97gp" Jan 20 16:44:13 crc kubenswrapper[4558]: I0120 16:44:13.643436 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-225km" Jan 20 16:44:13 crc kubenswrapper[4558]: I0120 16:44:13.649341 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"9afe6802b0d2a3feff1d221469e59f1748c084fc4e7948cb1bf6f8646a7b8f3b"} Jan 20 16:44:13 crc kubenswrapper[4558]: I0120 16:44:13.652343 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"7a4b8652a9280a2c103df071189d7a609325cdfab23168c42a2f7ffd3ca1b028"} Jan 20 16:44:13 crc kubenswrapper[4558]: I0120 16:44:13.652401 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"da89502f2a5af8d111fb4a74ae158eceef2e04305a5de1e51175be2e173a74a2"} Jan 20 16:44:13 crc kubenswrapper[4558]: I0120 16:44:13.653508 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 20 16:44:13 crc kubenswrapper[4558]: I0120 16:44:13.673749 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"d0a69a795f65a10f954e9719139fc3a08c6f920dfd3f85413419c51c90a57b95"} Jan 20 16:44:13 crc kubenswrapper[4558]: I0120 16:44:13.677002 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"0d813a3bdaed3ebb0a6c112028810a47ceaa6afea538b7e49ce8d5a8f7a1080f"} Jan 20 16:44:13 crc kubenswrapper[4558]: I0120 16:44:13.685193 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-b97gp" Jan 20 16:44:14 crc kubenswrapper[4558]: I0120 16:44:14.143003 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-vkkjt" Jan 20 16:44:14 crc kubenswrapper[4558]: I0120 16:44:14.148904 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-vkkjt" Jan 20 16:44:15 crc kubenswrapper[4558]: I0120 16:44:15.221228 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Jan 20 16:44:15 crc kubenswrapper[4558]: E0120 16:44:15.221403 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed9427fd-b75f-44cc-82fd-d7ecca334c93" containerName="pruner" Jan 20 16:44:15 crc kubenswrapper[4558]: I0120 16:44:15.221414 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed9427fd-b75f-44cc-82fd-d7ecca334c93" containerName="pruner" Jan 20 16:44:15 crc kubenswrapper[4558]: E0120 16:44:15.221426 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="31eb233b-13fe-40cb-9259-f18574a85e01" containerName="collect-profiles" Jan 20 16:44:15 crc kubenswrapper[4558]: I0120 16:44:15.221431 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="31eb233b-13fe-40cb-9259-f18574a85e01" containerName="collect-profiles" Jan 20 16:44:15 crc kubenswrapper[4558]: I0120 16:44:15.221510 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="ed9427fd-b75f-44cc-82fd-d7ecca334c93" containerName="pruner" Jan 20 16:44:15 crc kubenswrapper[4558]: I0120 16:44:15.221520 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="31eb233b-13fe-40cb-9259-f18574a85e01" containerName="collect-profiles" Jan 20 16:44:15 crc kubenswrapper[4558]: I0120 16:44:15.221827 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 20 16:44:15 crc kubenswrapper[4558]: I0120 16:44:15.223793 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Jan 20 16:44:15 crc kubenswrapper[4558]: I0120 16:44:15.223960 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Jan 20 16:44:15 crc kubenswrapper[4558]: I0120 16:44:15.229129 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Jan 20 16:44:15 crc kubenswrapper[4558]: I0120 16:44:15.323966 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f480e1f1-c185-47d4-adce-f0e95d1e3372-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"f480e1f1-c185-47d4-adce-f0e95d1e3372\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 20 16:44:15 crc kubenswrapper[4558]: I0120 16:44:15.324060 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f480e1f1-c185-47d4-adce-f0e95d1e3372-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"f480e1f1-c185-47d4-adce-f0e95d1e3372\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 20 16:44:15 crc kubenswrapper[4558]: I0120 16:44:15.428070 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f480e1f1-c185-47d4-adce-f0e95d1e3372-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"f480e1f1-c185-47d4-adce-f0e95d1e3372\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 20 16:44:15 crc kubenswrapper[4558]: I0120 16:44:15.428225 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f480e1f1-c185-47d4-adce-f0e95d1e3372-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"f480e1f1-c185-47d4-adce-f0e95d1e3372\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 20 16:44:15 crc kubenswrapper[4558]: I0120 16:44:15.428388 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f480e1f1-c185-47d4-adce-f0e95d1e3372-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"f480e1f1-c185-47d4-adce-f0e95d1e3372\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 20 16:44:15 crc kubenswrapper[4558]: I0120 16:44:15.446731 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f480e1f1-c185-47d4-adce-f0e95d1e3372-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"f480e1f1-c185-47d4-adce-f0e95d1e3372\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 20 16:44:15 crc kubenswrapper[4558]: I0120 16:44:15.545516 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 20 16:44:15 crc kubenswrapper[4558]: I0120 16:44:15.586653 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-gbvfn" Jan 20 16:44:20 crc kubenswrapper[4558]: I0120 16:44:20.747793 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"214e659d7eaee4c2c0dee2c1db9a1d7c1886ec358c221a0a43d4d80c9d875230"} Jan 20 16:44:22 crc kubenswrapper[4558]: I0120 16:44:22.693923 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-lhmdh" Jan 20 16:44:22 crc kubenswrapper[4558]: I0120 16:44:22.697095 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-lhmdh" Jan 20 16:44:25 crc kubenswrapper[4558]: I0120 16:44:25.981694 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/30032328-bd33-4073-9366-e10bc5e2aa77-metrics-certs\") pod \"network-metrics-daemon-9wrq6\" (UID: \"30032328-bd33-4073-9366-e10bc5e2aa77\") " pod="openshift-multus/network-metrics-daemon-9wrq6" Jan 20 16:44:26 crc kubenswrapper[4558]: I0120 16:44:26.001152 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/30032328-bd33-4073-9366-e10bc5e2aa77-metrics-certs\") pod \"network-metrics-daemon-9wrq6\" (UID: \"30032328-bd33-4073-9366-e10bc5e2aa77\") " pod="openshift-multus/network-metrics-daemon-9wrq6" Jan 20 16:44:26 crc kubenswrapper[4558]: I0120 16:44:26.076183 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-9wrq6" Jan 20 16:44:27 crc kubenswrapper[4558]: I0120 16:44:27.330218 4558 patch_prober.go:28] interesting pod/machine-config-daemon-2vr4r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 20 16:44:27 crc kubenswrapper[4558]: I0120 16:44:27.330580 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 20 16:44:28 crc kubenswrapper[4558]: I0120 16:44:28.558197 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Jan 20 16:44:30 crc kubenswrapper[4558]: I0120 16:44:30.005938 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-zsdqk" Jan 20 16:44:32 crc kubenswrapper[4558]: W0120 16:44:32.105724 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-podf480e1f1_c185_47d4_adce_f0e95d1e3372.slice/crio-549aa977cdf56ea8ad16f22a375d348311394e1ba5d953554dd7557056bb299b WatchSource:0}: Error finding container 549aa977cdf56ea8ad16f22a375d348311394e1ba5d953554dd7557056bb299b: Status 404 returned error can't find the container with id 549aa977cdf56ea8ad16f22a375d348311394e1ba5d953554dd7557056bb299b Jan 20 16:44:32 crc kubenswrapper[4558]: I0120 16:44:32.320075 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-9wrq6"] Jan 20 16:44:32 crc kubenswrapper[4558]: W0120 16:44:32.342414 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod30032328_bd33_4073_9366_e10bc5e2aa77.slice/crio-e021ac607007a966f10e57b7b4d2854fa06272d2c9d458ff7d73496edf29f1f9 WatchSource:0}: Error finding container e021ac607007a966f10e57b7b4d2854fa06272d2c9d458ff7d73496edf29f1f9: Status 404 returned error can't find the container with id e021ac607007a966f10e57b7b4d2854fa06272d2c9d458ff7d73496edf29f1f9 Jan 20 16:44:32 crc kubenswrapper[4558]: I0120 16:44:32.805009 4558 generic.go:334] "Generic (PLEG): container finished" podID="03467399-b14f-421a-a7ec-f2a533daed0d" containerID="cf9c3da8d1183bd36ea6b5078dc260ec622ca8def9815b582632960b1f34ff04" exitCode=0 Jan 20 16:44:32 crc kubenswrapper[4558]: I0120 16:44:32.805571 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-chwxp" event={"ID":"03467399-b14f-421a-a7ec-f2a533daed0d","Type":"ContainerDied","Data":"cf9c3da8d1183bd36ea6b5078dc260ec622ca8def9815b582632960b1f34ff04"} Jan 20 16:44:32 crc kubenswrapper[4558]: I0120 16:44:32.808833 4558 generic.go:334] "Generic (PLEG): container finished" podID="11fdde87-2c2c-40ba-84e5-e0c93fc58130" containerID="6412e7111e737e96044e3c71ef49ed9bbd5ff4e07d685f6a40825af67af9508c" exitCode=0 Jan 20 16:44:32 crc kubenswrapper[4558]: I0120 16:44:32.808930 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bgqpg" event={"ID":"11fdde87-2c2c-40ba-84e5-e0c93fc58130","Type":"ContainerDied","Data":"6412e7111e737e96044e3c71ef49ed9bbd5ff4e07d685f6a40825af67af9508c"} Jan 20 16:44:32 crc kubenswrapper[4558]: I0120 16:44:32.813050 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"f480e1f1-c185-47d4-adce-f0e95d1e3372","Type":"ContainerStarted","Data":"bfd71d859ca191d708c18bc45742a837a46d8d3a5530f95d0b092d313a428fbe"} Jan 20 16:44:32 crc kubenswrapper[4558]: I0120 16:44:32.813105 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"f480e1f1-c185-47d4-adce-f0e95d1e3372","Type":"ContainerStarted","Data":"549aa977cdf56ea8ad16f22a375d348311394e1ba5d953554dd7557056bb299b"} Jan 20 16:44:32 crc kubenswrapper[4558]: I0120 16:44:32.819246 4558 generic.go:334] "Generic (PLEG): container finished" podID="05d983ba-960a-4975-b3de-73a3891fb342" containerID="012e66d2321004a8253622882671c56121399e29c0a87fb6e64485d5efcf0cc7" exitCode=0 Jan 20 16:44:32 crc kubenswrapper[4558]: I0120 16:44:32.819328 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mndbv" event={"ID":"05d983ba-960a-4975-b3de-73a3891fb342","Type":"ContainerDied","Data":"012e66d2321004a8253622882671c56121399e29c0a87fb6e64485d5efcf0cc7"} Jan 20 16:44:32 crc kubenswrapper[4558]: I0120 16:44:32.825698 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b9k54" event={"ID":"91268065-1eb8-448e-8597-458d2586ad12","Type":"ContainerStarted","Data":"b50a1bff2d82b05b3228e89433d451c658aa2efac295e7e9d2cf38c86c7b5901"} Jan 20 16:44:32 crc kubenswrapper[4558]: I0120 16:44:32.829539 4558 generic.go:334] "Generic (PLEG): container finished" podID="2aa030eb-fbf5-4bf5-ac24-0791492977c2" containerID="94485e77e9c1b2ab67fe6e772ecc51bf9c230fc0c7a7362fea19a37a3d0e9be7" exitCode=0 Jan 20 16:44:32 crc kubenswrapper[4558]: I0120 16:44:32.829690 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rfmgn" event={"ID":"2aa030eb-fbf5-4bf5-ac24-0791492977c2","Type":"ContainerDied","Data":"94485e77e9c1b2ab67fe6e772ecc51bf9c230fc0c7a7362fea19a37a3d0e9be7"} Jan 20 16:44:32 crc kubenswrapper[4558]: I0120 16:44:32.832800 4558 generic.go:334] "Generic (PLEG): container finished" podID="567265d1-8d94-4502-aba8-e4ce03c0def9" containerID="74047f31d4ad656869362d419f9f4b1d00d9d511a6befa9bc4830981a36eb4ee" exitCode=0 Jan 20 16:44:32 crc kubenswrapper[4558]: I0120 16:44:32.832878 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-275ld" event={"ID":"567265d1-8d94-4502-aba8-e4ce03c0def9","Type":"ContainerDied","Data":"74047f31d4ad656869362d419f9f4b1d00d9d511a6befa9bc4830981a36eb4ee"} Jan 20 16:44:32 crc kubenswrapper[4558]: I0120 16:44:32.839557 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hr9pm" event={"ID":"0099ba6c-2da8-471c-bde7-528db23c4faf","Type":"ContainerStarted","Data":"bc83a14fde76b44006731fddf6932b220bc2bdf344c8611287f523d2d037bd4a"} Jan 20 16:44:32 crc kubenswrapper[4558]: I0120 16:44:32.841065 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-9wrq6" event={"ID":"30032328-bd33-4073-9366-e10bc5e2aa77","Type":"ContainerStarted","Data":"e021ac607007a966f10e57b7b4d2854fa06272d2c9d458ff7d73496edf29f1f9"} Jan 20 16:44:32 crc kubenswrapper[4558]: I0120 16:44:32.865631 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-8-crc" podStartSLOduration=17.865619239 podStartE2EDuration="17.865619239s" podCreationTimestamp="2026-01-20 16:44:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 16:44:32.863898671 +0000 UTC m=+166.624236648" watchObservedRunningTime="2026-01-20 16:44:32.865619239 +0000 UTC m=+166.625957207" Jan 20 16:44:33 crc kubenswrapper[4558]: I0120 16:44:33.847203 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mndbv" event={"ID":"05d983ba-960a-4975-b3de-73a3891fb342","Type":"ContainerStarted","Data":"bc337f05c8c80f963877487d730b664b8478ace32a99e7d4e2160bcdece78381"} Jan 20 16:44:33 crc kubenswrapper[4558]: I0120 16:44:33.848742 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-275ld" event={"ID":"567265d1-8d94-4502-aba8-e4ce03c0def9","Type":"ContainerStarted","Data":"bdf66e59d0b9aa27ce7c1466ba43ff53cff54e2236701e30523c3a594c541df7"} Jan 20 16:44:33 crc kubenswrapper[4558]: I0120 16:44:33.850065 4558 generic.go:334] "Generic (PLEG): container finished" podID="0099ba6c-2da8-471c-bde7-528db23c4faf" containerID="bc83a14fde76b44006731fddf6932b220bc2bdf344c8611287f523d2d037bd4a" exitCode=0 Jan 20 16:44:33 crc kubenswrapper[4558]: I0120 16:44:33.850132 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hr9pm" event={"ID":"0099ba6c-2da8-471c-bde7-528db23c4faf","Type":"ContainerDied","Data":"bc83a14fde76b44006731fddf6932b220bc2bdf344c8611287f523d2d037bd4a"} Jan 20 16:44:33 crc kubenswrapper[4558]: I0120 16:44:33.851634 4558 generic.go:334] "Generic (PLEG): container finished" podID="91268065-1eb8-448e-8597-458d2586ad12" containerID="b50a1bff2d82b05b3228e89433d451c658aa2efac295e7e9d2cf38c86c7b5901" exitCode=0 Jan 20 16:44:33 crc kubenswrapper[4558]: I0120 16:44:33.851688 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b9k54" event={"ID":"91268065-1eb8-448e-8597-458d2586ad12","Type":"ContainerDied","Data":"b50a1bff2d82b05b3228e89433d451c658aa2efac295e7e9d2cf38c86c7b5901"} Jan 20 16:44:33 crc kubenswrapper[4558]: I0120 16:44:33.854597 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rfmgn" event={"ID":"2aa030eb-fbf5-4bf5-ac24-0791492977c2","Type":"ContainerStarted","Data":"c6650fc3fb9599a4c0e9ed2e7a4c88c4bbea9ff27730244b9fa4bea4aafd60f0"} Jan 20 16:44:33 crc kubenswrapper[4558]: I0120 16:44:33.858111 4558 generic.go:334] "Generic (PLEG): container finished" podID="04aecb46-8cfc-4f76-9fe7-31ac1f5bd71e" containerID="15fe4d3d007fe83139c8f54811261c5ea7984d9efe1b125c24feb217995c23b4" exitCode=0 Jan 20 16:44:33 crc kubenswrapper[4558]: I0120 16:44:33.858175 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7pjxc" event={"ID":"04aecb46-8cfc-4f76-9fe7-31ac1f5bd71e","Type":"ContainerDied","Data":"15fe4d3d007fe83139c8f54811261c5ea7984d9efe1b125c24feb217995c23b4"} Jan 20 16:44:33 crc kubenswrapper[4558]: I0120 16:44:33.864400 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bgqpg" event={"ID":"11fdde87-2c2c-40ba-84e5-e0c93fc58130","Type":"ContainerStarted","Data":"281a736e412a599971d1daf067155d194b977c9ca8221d102dbcdef3490bd4c3"} Jan 20 16:44:33 crc kubenswrapper[4558]: I0120 16:44:33.865760 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-9wrq6" event={"ID":"30032328-bd33-4073-9366-e10bc5e2aa77","Type":"ContainerStarted","Data":"390ffcbfcd7c90db6c28800b9e8cdca7a23c479b925ab3cd0a576a15720e870e"} Jan 20 16:44:33 crc kubenswrapper[4558]: I0120 16:44:33.865786 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-9wrq6" event={"ID":"30032328-bd33-4073-9366-e10bc5e2aa77","Type":"ContainerStarted","Data":"d4b32fb92d808f0ea10bb1e10a444d722e79a47d0b11ae0100825657658b279d"} Jan 20 16:44:33 crc kubenswrapper[4558]: I0120 16:44:33.867086 4558 generic.go:334] "Generic (PLEG): container finished" podID="f480e1f1-c185-47d4-adce-f0e95d1e3372" containerID="bfd71d859ca191d708c18bc45742a837a46d8d3a5530f95d0b092d313a428fbe" exitCode=0 Jan 20 16:44:33 crc kubenswrapper[4558]: I0120 16:44:33.867114 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"f480e1f1-c185-47d4-adce-f0e95d1e3372","Type":"ContainerDied","Data":"bfd71d859ca191d708c18bc45742a837a46d8d3a5530f95d0b092d313a428fbe"} Jan 20 16:44:33 crc kubenswrapper[4558]: I0120 16:44:33.882511 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-mndbv" podStartSLOduration=2.97474492 podStartE2EDuration="26.882497414s" podCreationTimestamp="2026-01-20 16:44:07 +0000 UTC" firstStartedPulling="2026-01-20 16:44:09.547097816 +0000 UTC m=+143.307435783" lastFinishedPulling="2026-01-20 16:44:33.454850309 +0000 UTC m=+167.215188277" observedRunningTime="2026-01-20 16:44:33.872148818 +0000 UTC m=+167.632486785" watchObservedRunningTime="2026-01-20 16:44:33.882497414 +0000 UTC m=+167.642835381" Jan 20 16:44:33 crc kubenswrapper[4558]: I0120 16:44:33.901997 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-bgqpg" podStartSLOduration=3.907691258 podStartE2EDuration="28.901983761s" podCreationTimestamp="2026-01-20 16:44:05 +0000 UTC" firstStartedPulling="2026-01-20 16:44:08.403290666 +0000 UTC m=+142.163628632" lastFinishedPulling="2026-01-20 16:44:33.397583178 +0000 UTC m=+167.157921135" observedRunningTime="2026-01-20 16:44:33.899146395 +0000 UTC m=+167.659484362" watchObservedRunningTime="2026-01-20 16:44:33.901983761 +0000 UTC m=+167.662321728" Jan 20 16:44:33 crc kubenswrapper[4558]: I0120 16:44:33.926273 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-275ld" podStartSLOduration=1.821245125 podStartE2EDuration="25.926262526s" podCreationTimestamp="2026-01-20 16:44:08 +0000 UTC" firstStartedPulling="2026-01-20 16:44:09.564481694 +0000 UTC m=+143.324819661" lastFinishedPulling="2026-01-20 16:44:33.669499095 +0000 UTC m=+167.429837062" observedRunningTime="2026-01-20 16:44:33.923416654 +0000 UTC m=+167.683754621" watchObservedRunningTime="2026-01-20 16:44:33.926262526 +0000 UTC m=+167.686600493" Jan 20 16:44:33 crc kubenswrapper[4558]: I0120 16:44:33.949580 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-9wrq6" podStartSLOduration=149.949563395 podStartE2EDuration="2m29.949563395s" podCreationTimestamp="2026-01-20 16:42:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 16:44:33.934713189 +0000 UTC m=+167.695051147" watchObservedRunningTime="2026-01-20 16:44:33.949563395 +0000 UTC m=+167.709901362" Jan 20 16:44:33 crc kubenswrapper[4558]: I0120 16:44:33.981034 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-rfmgn" podStartSLOduration=2.846985074 podStartE2EDuration="27.981018507s" podCreationTimestamp="2026-01-20 16:44:06 +0000 UTC" firstStartedPulling="2026-01-20 16:44:08.429293535 +0000 UTC m=+142.189631502" lastFinishedPulling="2026-01-20 16:44:33.563326968 +0000 UTC m=+167.323664935" observedRunningTime="2026-01-20 16:44:33.97827634 +0000 UTC m=+167.738614307" watchObservedRunningTime="2026-01-20 16:44:33.981018507 +0000 UTC m=+167.741356474" Jan 20 16:44:34 crc kubenswrapper[4558]: I0120 16:44:34.874532 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-chwxp" event={"ID":"03467399-b14f-421a-a7ec-f2a533daed0d","Type":"ContainerStarted","Data":"838417c7e988767191dc59fe1cdcb6526ce68f568e4415ddcfa65aa9e72754d4"} Jan 20 16:44:34 crc kubenswrapper[4558]: I0120 16:44:34.880571 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7pjxc" event={"ID":"04aecb46-8cfc-4f76-9fe7-31ac1f5bd71e","Type":"ContainerStarted","Data":"ec7c97685ba1b5ea26aa0cef35f1f659e1a9974ef9f58f40ddb50c035856e582"} Jan 20 16:44:34 crc kubenswrapper[4558]: I0120 16:44:34.884526 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hr9pm" event={"ID":"0099ba6c-2da8-471c-bde7-528db23c4faf","Type":"ContainerStarted","Data":"55c2641fdcac002bf4f850acf983db5fdb2a87445738f279b29d89ae8e70ddc5"} Jan 20 16:44:34 crc kubenswrapper[4558]: I0120 16:44:34.902799 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-chwxp" podStartSLOduration=4.6034781 podStartE2EDuration="29.902784915s" podCreationTimestamp="2026-01-20 16:44:05 +0000 UTC" firstStartedPulling="2026-01-20 16:44:08.496376688 +0000 UTC m=+142.256714655" lastFinishedPulling="2026-01-20 16:44:33.795683502 +0000 UTC m=+167.556021470" observedRunningTime="2026-01-20 16:44:34.901359994 +0000 UTC m=+168.661697962" watchObservedRunningTime="2026-01-20 16:44:34.902784915 +0000 UTC m=+168.663122883" Jan 20 16:44:34 crc kubenswrapper[4558]: I0120 16:44:34.947331 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-hr9pm" podStartSLOduration=2.95164999 podStartE2EDuration="26.947317135s" podCreationTimestamp="2026-01-20 16:44:08 +0000 UTC" firstStartedPulling="2026-01-20 16:44:10.586125174 +0000 UTC m=+144.346463141" lastFinishedPulling="2026-01-20 16:44:34.581792319 +0000 UTC m=+168.342130286" observedRunningTime="2026-01-20 16:44:34.9468484 +0000 UTC m=+168.707186367" watchObservedRunningTime="2026-01-20 16:44:34.947317135 +0000 UTC m=+168.707655102" Jan 20 16:44:34 crc kubenswrapper[4558]: I0120 16:44:34.947766 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-7pjxc" podStartSLOduration=2.999475845 podStartE2EDuration="28.947762086s" podCreationTimestamp="2026-01-20 16:44:06 +0000 UTC" firstStartedPulling="2026-01-20 16:44:08.513597418 +0000 UTC m=+142.273935385" lastFinishedPulling="2026-01-20 16:44:34.46188366 +0000 UTC m=+168.222221626" observedRunningTime="2026-01-20 16:44:34.932186161 +0000 UTC m=+168.692524127" watchObservedRunningTime="2026-01-20 16:44:34.947762086 +0000 UTC m=+168.708100052" Jan 20 16:44:35 crc kubenswrapper[4558]: I0120 16:44:35.119363 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 20 16:44:35 crc kubenswrapper[4558]: I0120 16:44:35.201980 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f480e1f1-c185-47d4-adce-f0e95d1e3372-kube-api-access\") pod \"f480e1f1-c185-47d4-adce-f0e95d1e3372\" (UID: \"f480e1f1-c185-47d4-adce-f0e95d1e3372\") " Jan 20 16:44:35 crc kubenswrapper[4558]: I0120 16:44:35.202116 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f480e1f1-c185-47d4-adce-f0e95d1e3372-kubelet-dir\") pod \"f480e1f1-c185-47d4-adce-f0e95d1e3372\" (UID: \"f480e1f1-c185-47d4-adce-f0e95d1e3372\") " Jan 20 16:44:35 crc kubenswrapper[4558]: I0120 16:44:35.202221 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f480e1f1-c185-47d4-adce-f0e95d1e3372-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "f480e1f1-c185-47d4-adce-f0e95d1e3372" (UID: "f480e1f1-c185-47d4-adce-f0e95d1e3372"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 16:44:35 crc kubenswrapper[4558]: I0120 16:44:35.202356 4558 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f480e1f1-c185-47d4-adce-f0e95d1e3372-kubelet-dir\") on node \"crc\" DevicePath \"\"" Jan 20 16:44:35 crc kubenswrapper[4558]: I0120 16:44:35.208216 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f480e1f1-c185-47d4-adce-f0e95d1e3372-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "f480e1f1-c185-47d4-adce-f0e95d1e3372" (UID: "f480e1f1-c185-47d4-adce-f0e95d1e3372"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:44:35 crc kubenswrapper[4558]: I0120 16:44:35.303320 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f480e1f1-c185-47d4-adce-f0e95d1e3372-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 20 16:44:35 crc kubenswrapper[4558]: I0120 16:44:35.891288 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b9k54" event={"ID":"91268065-1eb8-448e-8597-458d2586ad12","Type":"ContainerStarted","Data":"d100d613eb9f033ab14e478f2d346607b2497f19fb2ec0eb1def9eb67e5192bb"} Jan 20 16:44:35 crc kubenswrapper[4558]: I0120 16:44:35.892360 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"f480e1f1-c185-47d4-adce-f0e95d1e3372","Type":"ContainerDied","Data":"549aa977cdf56ea8ad16f22a375d348311394e1ba5d953554dd7557056bb299b"} Jan 20 16:44:35 crc kubenswrapper[4558]: I0120 16:44:35.892386 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="549aa977cdf56ea8ad16f22a375d348311394e1ba5d953554dd7557056bb299b" Jan 20 16:44:35 crc kubenswrapper[4558]: I0120 16:44:35.892623 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 20 16:44:36 crc kubenswrapper[4558]: I0120 16:44:36.046645 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-bgqpg" Jan 20 16:44:36 crc kubenswrapper[4558]: I0120 16:44:36.046868 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-bgqpg" Jan 20 16:44:36 crc kubenswrapper[4558]: I0120 16:44:36.113979 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-bgqpg" Jan 20 16:44:36 crc kubenswrapper[4558]: I0120 16:44:36.128668 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-b9k54" podStartSLOduration=2.991684631 podStartE2EDuration="27.128653693s" podCreationTimestamp="2026-01-20 16:44:09 +0000 UTC" firstStartedPulling="2026-01-20 16:44:10.594235856 +0000 UTC m=+144.354573823" lastFinishedPulling="2026-01-20 16:44:34.731204918 +0000 UTC m=+168.491542885" observedRunningTime="2026-01-20 16:44:35.907612651 +0000 UTC m=+169.667950618" watchObservedRunningTime="2026-01-20 16:44:36.128653693 +0000 UTC m=+169.888991660" Jan 20 16:44:36 crc kubenswrapper[4558]: I0120 16:44:36.246748 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-chwxp" Jan 20 16:44:36 crc kubenswrapper[4558]: I0120 16:44:36.246797 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-chwxp" Jan 20 16:44:36 crc kubenswrapper[4558]: I0120 16:44:36.272552 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-chwxp" Jan 20 16:44:36 crc kubenswrapper[4558]: I0120 16:44:36.438584 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-7pjxc" Jan 20 16:44:36 crc kubenswrapper[4558]: I0120 16:44:36.438619 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-7pjxc" Jan 20 16:44:36 crc kubenswrapper[4558]: I0120 16:44:36.468503 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-7pjxc" Jan 20 16:44:36 crc kubenswrapper[4558]: I0120 16:44:36.914258 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-rfmgn" Jan 20 16:44:36 crc kubenswrapper[4558]: I0120 16:44:36.914321 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-rfmgn" Jan 20 16:44:36 crc kubenswrapper[4558]: I0120 16:44:36.940040 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-rfmgn" Jan 20 16:44:38 crc kubenswrapper[4558]: I0120 16:44:38.038030 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-mndbv" Jan 20 16:44:38 crc kubenswrapper[4558]: I0120 16:44:38.038823 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-mndbv" Jan 20 16:44:38 crc kubenswrapper[4558]: I0120 16:44:38.066702 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-mndbv" Jan 20 16:44:38 crc kubenswrapper[4558]: I0120 16:44:38.390684 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-275ld" Jan 20 16:44:38 crc kubenswrapper[4558]: I0120 16:44:38.390797 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-275ld" Jan 20 16:44:38 crc kubenswrapper[4558]: I0120 16:44:38.418570 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-275ld" Jan 20 16:44:38 crc kubenswrapper[4558]: I0120 16:44:38.936132 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-rfmgn" Jan 20 16:44:38 crc kubenswrapper[4558]: I0120 16:44:38.936410 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-275ld" Jan 20 16:44:38 crc kubenswrapper[4558]: I0120 16:44:38.937713 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-mndbv" Jan 20 16:44:39 crc kubenswrapper[4558]: I0120 16:44:39.215110 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-hr9pm" Jan 20 16:44:39 crc kubenswrapper[4558]: I0120 16:44:39.215155 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-hr9pm" Jan 20 16:44:39 crc kubenswrapper[4558]: I0120 16:44:39.558677 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-b9k54" Jan 20 16:44:39 crc kubenswrapper[4558]: I0120 16:44:39.558722 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-b9k54" Jan 20 16:44:39 crc kubenswrapper[4558]: I0120 16:44:39.587669 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-b9k54" Jan 20 16:44:39 crc kubenswrapper[4558]: I0120 16:44:39.936198 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-b9k54" Jan 20 16:44:40 crc kubenswrapper[4558]: I0120 16:44:40.240155 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-hr9pm" podUID="0099ba6c-2da8-471c-bde7-528db23c4faf" containerName="registry-server" probeResult="failure" output=< Jan 20 16:44:40 crc kubenswrapper[4558]: timeout: failed to connect service ":50051" within 1s Jan 20 16:44:40 crc kubenswrapper[4558]: > Jan 20 16:44:41 crc kubenswrapper[4558]: I0120 16:44:41.037153 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-rfmgn"] Jan 20 16:44:41 crc kubenswrapper[4558]: I0120 16:44:41.037353 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-rfmgn" podUID="2aa030eb-fbf5-4bf5-ac24-0791492977c2" containerName="registry-server" containerID="cri-o://c6650fc3fb9599a4c0e9ed2e7a4c88c4bbea9ff27730244b9fa4bea4aafd60f0" gracePeriod=2 Jan 20 16:44:41 crc kubenswrapper[4558]: I0120 16:44:41.235978 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-b9k54"] Jan 20 16:44:41 crc kubenswrapper[4558]: I0120 16:44:41.325689 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rfmgn" Jan 20 16:44:41 crc kubenswrapper[4558]: I0120 16:44:41.375077 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2aa030eb-fbf5-4bf5-ac24-0791492977c2-utilities\") pod \"2aa030eb-fbf5-4bf5-ac24-0791492977c2\" (UID: \"2aa030eb-fbf5-4bf5-ac24-0791492977c2\") " Jan 20 16:44:41 crc kubenswrapper[4558]: I0120 16:44:41.375122 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2aa030eb-fbf5-4bf5-ac24-0791492977c2-catalog-content\") pod \"2aa030eb-fbf5-4bf5-ac24-0791492977c2\" (UID: \"2aa030eb-fbf5-4bf5-ac24-0791492977c2\") " Jan 20 16:44:41 crc kubenswrapper[4558]: I0120 16:44:41.375180 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qm76k\" (UniqueName: \"kubernetes.io/projected/2aa030eb-fbf5-4bf5-ac24-0791492977c2-kube-api-access-qm76k\") pod \"2aa030eb-fbf5-4bf5-ac24-0791492977c2\" (UID: \"2aa030eb-fbf5-4bf5-ac24-0791492977c2\") " Jan 20 16:44:41 crc kubenswrapper[4558]: I0120 16:44:41.376062 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2aa030eb-fbf5-4bf5-ac24-0791492977c2-utilities" (OuterVolumeSpecName: "utilities") pod "2aa030eb-fbf5-4bf5-ac24-0791492977c2" (UID: "2aa030eb-fbf5-4bf5-ac24-0791492977c2"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 16:44:41 crc kubenswrapper[4558]: I0120 16:44:41.380009 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2aa030eb-fbf5-4bf5-ac24-0791492977c2-kube-api-access-qm76k" (OuterVolumeSpecName: "kube-api-access-qm76k") pod "2aa030eb-fbf5-4bf5-ac24-0791492977c2" (UID: "2aa030eb-fbf5-4bf5-ac24-0791492977c2"). InnerVolumeSpecName "kube-api-access-qm76k". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:44:41 crc kubenswrapper[4558]: I0120 16:44:41.422787 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2aa030eb-fbf5-4bf5-ac24-0791492977c2-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2aa030eb-fbf5-4bf5-ac24-0791492977c2" (UID: "2aa030eb-fbf5-4bf5-ac24-0791492977c2"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 16:44:41 crc kubenswrapper[4558]: I0120 16:44:41.476307 4558 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2aa030eb-fbf5-4bf5-ac24-0791492977c2-utilities\") on node \"crc\" DevicePath \"\"" Jan 20 16:44:41 crc kubenswrapper[4558]: I0120 16:44:41.476523 4558 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2aa030eb-fbf5-4bf5-ac24-0791492977c2-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 20 16:44:41 crc kubenswrapper[4558]: I0120 16:44:41.476619 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qm76k\" (UniqueName: \"kubernetes.io/projected/2aa030eb-fbf5-4bf5-ac24-0791492977c2-kube-api-access-qm76k\") on node \"crc\" DevicePath \"\"" Jan 20 16:44:41 crc kubenswrapper[4558]: I0120 16:44:41.921299 4558 generic.go:334] "Generic (PLEG): container finished" podID="2aa030eb-fbf5-4bf5-ac24-0791492977c2" containerID="c6650fc3fb9599a4c0e9ed2e7a4c88c4bbea9ff27730244b9fa4bea4aafd60f0" exitCode=0 Jan 20 16:44:41 crc kubenswrapper[4558]: I0120 16:44:41.921373 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rfmgn" Jan 20 16:44:41 crc kubenswrapper[4558]: I0120 16:44:41.921392 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rfmgn" event={"ID":"2aa030eb-fbf5-4bf5-ac24-0791492977c2","Type":"ContainerDied","Data":"c6650fc3fb9599a4c0e9ed2e7a4c88c4bbea9ff27730244b9fa4bea4aafd60f0"} Jan 20 16:44:41 crc kubenswrapper[4558]: I0120 16:44:41.921442 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rfmgn" event={"ID":"2aa030eb-fbf5-4bf5-ac24-0791492977c2","Type":"ContainerDied","Data":"9c21664012bebd70f028e3608a210a424a5da816f6cbf4eccabaaa4d14daee26"} Jan 20 16:44:41 crc kubenswrapper[4558]: I0120 16:44:41.921465 4558 scope.go:117] "RemoveContainer" containerID="c6650fc3fb9599a4c0e9ed2e7a4c88c4bbea9ff27730244b9fa4bea4aafd60f0" Jan 20 16:44:41 crc kubenswrapper[4558]: I0120 16:44:41.921792 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-b9k54" podUID="91268065-1eb8-448e-8597-458d2586ad12" containerName="registry-server" containerID="cri-o://d100d613eb9f033ab14e478f2d346607b2497f19fb2ec0eb1def9eb67e5192bb" gracePeriod=2 Jan 20 16:44:41 crc kubenswrapper[4558]: I0120 16:44:41.934835 4558 scope.go:117] "RemoveContainer" containerID="94485e77e9c1b2ab67fe6e772ecc51bf9c230fc0c7a7362fea19a37a3d0e9be7" Jan 20 16:44:41 crc kubenswrapper[4558]: I0120 16:44:41.945105 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-rfmgn"] Jan 20 16:44:41 crc kubenswrapper[4558]: I0120 16:44:41.947822 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-rfmgn"] Jan 20 16:44:41 crc kubenswrapper[4558]: I0120 16:44:41.953960 4558 scope.go:117] "RemoveContainer" containerID="01415cc2f48ff2bb8ca7184ee83e04510660f7275f127b61c4129885f8bc6839" Jan 20 16:44:41 crc kubenswrapper[4558]: I0120 16:44:41.967283 4558 scope.go:117] "RemoveContainer" containerID="c6650fc3fb9599a4c0e9ed2e7a4c88c4bbea9ff27730244b9fa4bea4aafd60f0" Jan 20 16:44:41 crc kubenswrapper[4558]: E0120 16:44:41.967698 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c6650fc3fb9599a4c0e9ed2e7a4c88c4bbea9ff27730244b9fa4bea4aafd60f0\": container with ID starting with c6650fc3fb9599a4c0e9ed2e7a4c88c4bbea9ff27730244b9fa4bea4aafd60f0 not found: ID does not exist" containerID="c6650fc3fb9599a4c0e9ed2e7a4c88c4bbea9ff27730244b9fa4bea4aafd60f0" Jan 20 16:44:41 crc kubenswrapper[4558]: I0120 16:44:41.967732 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c6650fc3fb9599a4c0e9ed2e7a4c88c4bbea9ff27730244b9fa4bea4aafd60f0"} err="failed to get container status \"c6650fc3fb9599a4c0e9ed2e7a4c88c4bbea9ff27730244b9fa4bea4aafd60f0\": rpc error: code = NotFound desc = could not find container \"c6650fc3fb9599a4c0e9ed2e7a4c88c4bbea9ff27730244b9fa4bea4aafd60f0\": container with ID starting with c6650fc3fb9599a4c0e9ed2e7a4c88c4bbea9ff27730244b9fa4bea4aafd60f0 not found: ID does not exist" Jan 20 16:44:41 crc kubenswrapper[4558]: I0120 16:44:41.967768 4558 scope.go:117] "RemoveContainer" containerID="94485e77e9c1b2ab67fe6e772ecc51bf9c230fc0c7a7362fea19a37a3d0e9be7" Jan 20 16:44:41 crc kubenswrapper[4558]: E0120 16:44:41.968103 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"94485e77e9c1b2ab67fe6e772ecc51bf9c230fc0c7a7362fea19a37a3d0e9be7\": container with ID starting with 94485e77e9c1b2ab67fe6e772ecc51bf9c230fc0c7a7362fea19a37a3d0e9be7 not found: ID does not exist" containerID="94485e77e9c1b2ab67fe6e772ecc51bf9c230fc0c7a7362fea19a37a3d0e9be7" Jan 20 16:44:41 crc kubenswrapper[4558]: I0120 16:44:41.968147 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"94485e77e9c1b2ab67fe6e772ecc51bf9c230fc0c7a7362fea19a37a3d0e9be7"} err="failed to get container status \"94485e77e9c1b2ab67fe6e772ecc51bf9c230fc0c7a7362fea19a37a3d0e9be7\": rpc error: code = NotFound desc = could not find container \"94485e77e9c1b2ab67fe6e772ecc51bf9c230fc0c7a7362fea19a37a3d0e9be7\": container with ID starting with 94485e77e9c1b2ab67fe6e772ecc51bf9c230fc0c7a7362fea19a37a3d0e9be7 not found: ID does not exist" Jan 20 16:44:41 crc kubenswrapper[4558]: I0120 16:44:41.968184 4558 scope.go:117] "RemoveContainer" containerID="01415cc2f48ff2bb8ca7184ee83e04510660f7275f127b61c4129885f8bc6839" Jan 20 16:44:41 crc kubenswrapper[4558]: E0120 16:44:41.968439 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"01415cc2f48ff2bb8ca7184ee83e04510660f7275f127b61c4129885f8bc6839\": container with ID starting with 01415cc2f48ff2bb8ca7184ee83e04510660f7275f127b61c4129885f8bc6839 not found: ID does not exist" containerID="01415cc2f48ff2bb8ca7184ee83e04510660f7275f127b61c4129885f8bc6839" Jan 20 16:44:41 crc kubenswrapper[4558]: I0120 16:44:41.968465 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"01415cc2f48ff2bb8ca7184ee83e04510660f7275f127b61c4129885f8bc6839"} err="failed to get container status \"01415cc2f48ff2bb8ca7184ee83e04510660f7275f127b61c4129885f8bc6839\": rpc error: code = NotFound desc = could not find container \"01415cc2f48ff2bb8ca7184ee83e04510660f7275f127b61c4129885f8bc6839\": container with ID starting with 01415cc2f48ff2bb8ca7184ee83e04510660f7275f127b61c4129885f8bc6839 not found: ID does not exist" Jan 20 16:44:42 crc kubenswrapper[4558]: I0120 16:44:42.208696 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-b9k54" Jan 20 16:44:42 crc kubenswrapper[4558]: I0120 16:44:42.285564 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/91268065-1eb8-448e-8597-458d2586ad12-utilities\") pod \"91268065-1eb8-448e-8597-458d2586ad12\" (UID: \"91268065-1eb8-448e-8597-458d2586ad12\") " Jan 20 16:44:42 crc kubenswrapper[4558]: I0120 16:44:42.285638 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/91268065-1eb8-448e-8597-458d2586ad12-catalog-content\") pod \"91268065-1eb8-448e-8597-458d2586ad12\" (UID: \"91268065-1eb8-448e-8597-458d2586ad12\") " Jan 20 16:44:42 crc kubenswrapper[4558]: I0120 16:44:42.285682 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j59qc\" (UniqueName: \"kubernetes.io/projected/91268065-1eb8-448e-8597-458d2586ad12-kube-api-access-j59qc\") pod \"91268065-1eb8-448e-8597-458d2586ad12\" (UID: \"91268065-1eb8-448e-8597-458d2586ad12\") " Jan 20 16:44:42 crc kubenswrapper[4558]: I0120 16:44:42.286270 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/91268065-1eb8-448e-8597-458d2586ad12-utilities" (OuterVolumeSpecName: "utilities") pod "91268065-1eb8-448e-8597-458d2586ad12" (UID: "91268065-1eb8-448e-8597-458d2586ad12"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 16:44:42 crc kubenswrapper[4558]: I0120 16:44:42.288987 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/91268065-1eb8-448e-8597-458d2586ad12-kube-api-access-j59qc" (OuterVolumeSpecName: "kube-api-access-j59qc") pod "91268065-1eb8-448e-8597-458d2586ad12" (UID: "91268065-1eb8-448e-8597-458d2586ad12"). InnerVolumeSpecName "kube-api-access-j59qc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:44:42 crc kubenswrapper[4558]: I0120 16:44:42.371688 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/91268065-1eb8-448e-8597-458d2586ad12-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "91268065-1eb8-448e-8597-458d2586ad12" (UID: "91268065-1eb8-448e-8597-458d2586ad12"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 16:44:42 crc kubenswrapper[4558]: I0120 16:44:42.387532 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j59qc\" (UniqueName: \"kubernetes.io/projected/91268065-1eb8-448e-8597-458d2586ad12-kube-api-access-j59qc\") on node \"crc\" DevicePath \"\"" Jan 20 16:44:42 crc kubenswrapper[4558]: I0120 16:44:42.387561 4558 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/91268065-1eb8-448e-8597-458d2586ad12-utilities\") on node \"crc\" DevicePath \"\"" Jan 20 16:44:42 crc kubenswrapper[4558]: I0120 16:44:42.387571 4558 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/91268065-1eb8-448e-8597-458d2586ad12-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 20 16:44:42 crc kubenswrapper[4558]: I0120 16:44:42.571248 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2aa030eb-fbf5-4bf5-ac24-0791492977c2" path="/var/lib/kubelet/pods/2aa030eb-fbf5-4bf5-ac24-0791492977c2/volumes" Jan 20 16:44:42 crc kubenswrapper[4558]: I0120 16:44:42.928149 4558 generic.go:334] "Generic (PLEG): container finished" podID="91268065-1eb8-448e-8597-458d2586ad12" containerID="d100d613eb9f033ab14e478f2d346607b2497f19fb2ec0eb1def9eb67e5192bb" exitCode=0 Jan 20 16:44:42 crc kubenswrapper[4558]: I0120 16:44:42.928225 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-b9k54" Jan 20 16:44:42 crc kubenswrapper[4558]: I0120 16:44:42.928233 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b9k54" event={"ID":"91268065-1eb8-448e-8597-458d2586ad12","Type":"ContainerDied","Data":"d100d613eb9f033ab14e478f2d346607b2497f19fb2ec0eb1def9eb67e5192bb"} Jan 20 16:44:42 crc kubenswrapper[4558]: I0120 16:44:42.928260 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b9k54" event={"ID":"91268065-1eb8-448e-8597-458d2586ad12","Type":"ContainerDied","Data":"447aa47447ac3861539de847a4409e04529a89c6ad4a3b1d3003377eb6c74775"} Jan 20 16:44:42 crc kubenswrapper[4558]: I0120 16:44:42.928277 4558 scope.go:117] "RemoveContainer" containerID="d100d613eb9f033ab14e478f2d346607b2497f19fb2ec0eb1def9eb67e5192bb" Jan 20 16:44:42 crc kubenswrapper[4558]: I0120 16:44:42.941148 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-b9k54"] Jan 20 16:44:42 crc kubenswrapper[4558]: I0120 16:44:42.943346 4558 scope.go:117] "RemoveContainer" containerID="b50a1bff2d82b05b3228e89433d451c658aa2efac295e7e9d2cf38c86c7b5901" Jan 20 16:44:42 crc kubenswrapper[4558]: I0120 16:44:42.947368 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-b9k54"] Jan 20 16:44:42 crc kubenswrapper[4558]: I0120 16:44:42.956433 4558 scope.go:117] "RemoveContainer" containerID="c4b151c7eb0c287c5d4510d4d05477bdd96cced57570f7cec5d25c4b32b4b2da" Jan 20 16:44:42 crc kubenswrapper[4558]: I0120 16:44:42.966587 4558 scope.go:117] "RemoveContainer" containerID="d100d613eb9f033ab14e478f2d346607b2497f19fb2ec0eb1def9eb67e5192bb" Jan 20 16:44:42 crc kubenswrapper[4558]: E0120 16:44:42.966914 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d100d613eb9f033ab14e478f2d346607b2497f19fb2ec0eb1def9eb67e5192bb\": container with ID starting with d100d613eb9f033ab14e478f2d346607b2497f19fb2ec0eb1def9eb67e5192bb not found: ID does not exist" containerID="d100d613eb9f033ab14e478f2d346607b2497f19fb2ec0eb1def9eb67e5192bb" Jan 20 16:44:42 crc kubenswrapper[4558]: I0120 16:44:42.966959 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d100d613eb9f033ab14e478f2d346607b2497f19fb2ec0eb1def9eb67e5192bb"} err="failed to get container status \"d100d613eb9f033ab14e478f2d346607b2497f19fb2ec0eb1def9eb67e5192bb\": rpc error: code = NotFound desc = could not find container \"d100d613eb9f033ab14e478f2d346607b2497f19fb2ec0eb1def9eb67e5192bb\": container with ID starting with d100d613eb9f033ab14e478f2d346607b2497f19fb2ec0eb1def9eb67e5192bb not found: ID does not exist" Jan 20 16:44:42 crc kubenswrapper[4558]: I0120 16:44:42.966981 4558 scope.go:117] "RemoveContainer" containerID="b50a1bff2d82b05b3228e89433d451c658aa2efac295e7e9d2cf38c86c7b5901" Jan 20 16:44:42 crc kubenswrapper[4558]: E0120 16:44:42.967246 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b50a1bff2d82b05b3228e89433d451c658aa2efac295e7e9d2cf38c86c7b5901\": container with ID starting with b50a1bff2d82b05b3228e89433d451c658aa2efac295e7e9d2cf38c86c7b5901 not found: ID does not exist" containerID="b50a1bff2d82b05b3228e89433d451c658aa2efac295e7e9d2cf38c86c7b5901" Jan 20 16:44:42 crc kubenswrapper[4558]: I0120 16:44:42.967279 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b50a1bff2d82b05b3228e89433d451c658aa2efac295e7e9d2cf38c86c7b5901"} err="failed to get container status \"b50a1bff2d82b05b3228e89433d451c658aa2efac295e7e9d2cf38c86c7b5901\": rpc error: code = NotFound desc = could not find container \"b50a1bff2d82b05b3228e89433d451c658aa2efac295e7e9d2cf38c86c7b5901\": container with ID starting with b50a1bff2d82b05b3228e89433d451c658aa2efac295e7e9d2cf38c86c7b5901 not found: ID does not exist" Jan 20 16:44:42 crc kubenswrapper[4558]: I0120 16:44:42.967299 4558 scope.go:117] "RemoveContainer" containerID="c4b151c7eb0c287c5d4510d4d05477bdd96cced57570f7cec5d25c4b32b4b2da" Jan 20 16:44:42 crc kubenswrapper[4558]: E0120 16:44:42.967538 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c4b151c7eb0c287c5d4510d4d05477bdd96cced57570f7cec5d25c4b32b4b2da\": container with ID starting with c4b151c7eb0c287c5d4510d4d05477bdd96cced57570f7cec5d25c4b32b4b2da not found: ID does not exist" containerID="c4b151c7eb0c287c5d4510d4d05477bdd96cced57570f7cec5d25c4b32b4b2da" Jan 20 16:44:42 crc kubenswrapper[4558]: I0120 16:44:42.967564 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c4b151c7eb0c287c5d4510d4d05477bdd96cced57570f7cec5d25c4b32b4b2da"} err="failed to get container status \"c4b151c7eb0c287c5d4510d4d05477bdd96cced57570f7cec5d25c4b32b4b2da\": rpc error: code = NotFound desc = could not find container \"c4b151c7eb0c287c5d4510d4d05477bdd96cced57570f7cec5d25c4b32b4b2da\": container with ID starting with c4b151c7eb0c287c5d4510d4d05477bdd96cced57570f7cec5d25c4b32b4b2da not found: ID does not exist" Jan 20 16:44:43 crc kubenswrapper[4558]: I0120 16:44:43.436620 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-275ld"] Jan 20 16:44:43 crc kubenswrapper[4558]: I0120 16:44:43.437058 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-275ld" podUID="567265d1-8d94-4502-aba8-e4ce03c0def9" containerName="registry-server" containerID="cri-o://bdf66e59d0b9aa27ce7c1466ba43ff53cff54e2236701e30523c3a594c541df7" gracePeriod=2 Jan 20 16:44:43 crc kubenswrapper[4558]: I0120 16:44:43.726309 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-275ld" Jan 20 16:44:43 crc kubenswrapper[4558]: I0120 16:44:43.803626 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cxhxg\" (UniqueName: \"kubernetes.io/projected/567265d1-8d94-4502-aba8-e4ce03c0def9-kube-api-access-cxhxg\") pod \"567265d1-8d94-4502-aba8-e4ce03c0def9\" (UID: \"567265d1-8d94-4502-aba8-e4ce03c0def9\") " Jan 20 16:44:43 crc kubenswrapper[4558]: I0120 16:44:43.803731 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/567265d1-8d94-4502-aba8-e4ce03c0def9-catalog-content\") pod \"567265d1-8d94-4502-aba8-e4ce03c0def9\" (UID: \"567265d1-8d94-4502-aba8-e4ce03c0def9\") " Jan 20 16:44:43 crc kubenswrapper[4558]: I0120 16:44:43.803798 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/567265d1-8d94-4502-aba8-e4ce03c0def9-utilities\") pod \"567265d1-8d94-4502-aba8-e4ce03c0def9\" (UID: \"567265d1-8d94-4502-aba8-e4ce03c0def9\") " Jan 20 16:44:43 crc kubenswrapper[4558]: I0120 16:44:43.804552 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/567265d1-8d94-4502-aba8-e4ce03c0def9-utilities" (OuterVolumeSpecName: "utilities") pod "567265d1-8d94-4502-aba8-e4ce03c0def9" (UID: "567265d1-8d94-4502-aba8-e4ce03c0def9"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 16:44:43 crc kubenswrapper[4558]: I0120 16:44:43.808792 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/567265d1-8d94-4502-aba8-e4ce03c0def9-kube-api-access-cxhxg" (OuterVolumeSpecName: "kube-api-access-cxhxg") pod "567265d1-8d94-4502-aba8-e4ce03c0def9" (UID: "567265d1-8d94-4502-aba8-e4ce03c0def9"). InnerVolumeSpecName "kube-api-access-cxhxg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:44:43 crc kubenswrapper[4558]: I0120 16:44:43.822521 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/567265d1-8d94-4502-aba8-e4ce03c0def9-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "567265d1-8d94-4502-aba8-e4ce03c0def9" (UID: "567265d1-8d94-4502-aba8-e4ce03c0def9"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 16:44:43 crc kubenswrapper[4558]: I0120 16:44:43.833387 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-t4dxf" Jan 20 16:44:43 crc kubenswrapper[4558]: I0120 16:44:43.904944 4558 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/567265d1-8d94-4502-aba8-e4ce03c0def9-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 20 16:44:43 crc kubenswrapper[4558]: I0120 16:44:43.904970 4558 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/567265d1-8d94-4502-aba8-e4ce03c0def9-utilities\") on node \"crc\" DevicePath \"\"" Jan 20 16:44:43 crc kubenswrapper[4558]: I0120 16:44:43.904979 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cxhxg\" (UniqueName: \"kubernetes.io/projected/567265d1-8d94-4502-aba8-e4ce03c0def9-kube-api-access-cxhxg\") on node \"crc\" DevicePath \"\"" Jan 20 16:44:43 crc kubenswrapper[4558]: I0120 16:44:43.940778 4558 generic.go:334] "Generic (PLEG): container finished" podID="567265d1-8d94-4502-aba8-e4ce03c0def9" containerID="bdf66e59d0b9aa27ce7c1466ba43ff53cff54e2236701e30523c3a594c541df7" exitCode=0 Jan 20 16:44:43 crc kubenswrapper[4558]: I0120 16:44:43.940869 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-275ld" Jan 20 16:44:43 crc kubenswrapper[4558]: I0120 16:44:43.940885 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-275ld" event={"ID":"567265d1-8d94-4502-aba8-e4ce03c0def9","Type":"ContainerDied","Data":"bdf66e59d0b9aa27ce7c1466ba43ff53cff54e2236701e30523c3a594c541df7"} Jan 20 16:44:43 crc kubenswrapper[4558]: I0120 16:44:43.941409 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-275ld" event={"ID":"567265d1-8d94-4502-aba8-e4ce03c0def9","Type":"ContainerDied","Data":"3a6eb2b523868b88f2d2a04b7f734fccc2a0bccc1e7eea7eca82e58f63441cf3"} Jan 20 16:44:43 crc kubenswrapper[4558]: I0120 16:44:43.941427 4558 scope.go:117] "RemoveContainer" containerID="bdf66e59d0b9aa27ce7c1466ba43ff53cff54e2236701e30523c3a594c541df7" Jan 20 16:44:43 crc kubenswrapper[4558]: I0120 16:44:43.972487 4558 scope.go:117] "RemoveContainer" containerID="74047f31d4ad656869362d419f9f4b1d00d9d511a6befa9bc4830981a36eb4ee" Jan 20 16:44:43 crc kubenswrapper[4558]: I0120 16:44:43.979516 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-275ld"] Jan 20 16:44:43 crc kubenswrapper[4558]: I0120 16:44:43.983671 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-275ld"] Jan 20 16:44:43 crc kubenswrapper[4558]: I0120 16:44:43.989106 4558 scope.go:117] "RemoveContainer" containerID="217e4d83348241c5e4f558fadef7fa7a642acb7b588f19144787f7210717f4b9" Jan 20 16:44:43 crc kubenswrapper[4558]: I0120 16:44:43.999736 4558 scope.go:117] "RemoveContainer" containerID="bdf66e59d0b9aa27ce7c1466ba43ff53cff54e2236701e30523c3a594c541df7" Jan 20 16:44:44 crc kubenswrapper[4558]: E0120 16:44:44.000497 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bdf66e59d0b9aa27ce7c1466ba43ff53cff54e2236701e30523c3a594c541df7\": container with ID starting with bdf66e59d0b9aa27ce7c1466ba43ff53cff54e2236701e30523c3a594c541df7 not found: ID does not exist" containerID="bdf66e59d0b9aa27ce7c1466ba43ff53cff54e2236701e30523c3a594c541df7" Jan 20 16:44:44 crc kubenswrapper[4558]: I0120 16:44:44.000525 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bdf66e59d0b9aa27ce7c1466ba43ff53cff54e2236701e30523c3a594c541df7"} err="failed to get container status \"bdf66e59d0b9aa27ce7c1466ba43ff53cff54e2236701e30523c3a594c541df7\": rpc error: code = NotFound desc = could not find container \"bdf66e59d0b9aa27ce7c1466ba43ff53cff54e2236701e30523c3a594c541df7\": container with ID starting with bdf66e59d0b9aa27ce7c1466ba43ff53cff54e2236701e30523c3a594c541df7 not found: ID does not exist" Jan 20 16:44:44 crc kubenswrapper[4558]: I0120 16:44:44.000542 4558 scope.go:117] "RemoveContainer" containerID="74047f31d4ad656869362d419f9f4b1d00d9d511a6befa9bc4830981a36eb4ee" Jan 20 16:44:44 crc kubenswrapper[4558]: E0120 16:44:44.000784 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"74047f31d4ad656869362d419f9f4b1d00d9d511a6befa9bc4830981a36eb4ee\": container with ID starting with 74047f31d4ad656869362d419f9f4b1d00d9d511a6befa9bc4830981a36eb4ee not found: ID does not exist" containerID="74047f31d4ad656869362d419f9f4b1d00d9d511a6befa9bc4830981a36eb4ee" Jan 20 16:44:44 crc kubenswrapper[4558]: I0120 16:44:44.000807 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"74047f31d4ad656869362d419f9f4b1d00d9d511a6befa9bc4830981a36eb4ee"} err="failed to get container status \"74047f31d4ad656869362d419f9f4b1d00d9d511a6befa9bc4830981a36eb4ee\": rpc error: code = NotFound desc = could not find container \"74047f31d4ad656869362d419f9f4b1d00d9d511a6befa9bc4830981a36eb4ee\": container with ID starting with 74047f31d4ad656869362d419f9f4b1d00d9d511a6befa9bc4830981a36eb4ee not found: ID does not exist" Jan 20 16:44:44 crc kubenswrapper[4558]: I0120 16:44:44.000820 4558 scope.go:117] "RemoveContainer" containerID="217e4d83348241c5e4f558fadef7fa7a642acb7b588f19144787f7210717f4b9" Jan 20 16:44:44 crc kubenswrapper[4558]: E0120 16:44:44.001042 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"217e4d83348241c5e4f558fadef7fa7a642acb7b588f19144787f7210717f4b9\": container with ID starting with 217e4d83348241c5e4f558fadef7fa7a642acb7b588f19144787f7210717f4b9 not found: ID does not exist" containerID="217e4d83348241c5e4f558fadef7fa7a642acb7b588f19144787f7210717f4b9" Jan 20 16:44:44 crc kubenswrapper[4558]: I0120 16:44:44.001078 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"217e4d83348241c5e4f558fadef7fa7a642acb7b588f19144787f7210717f4b9"} err="failed to get container status \"217e4d83348241c5e4f558fadef7fa7a642acb7b588f19144787f7210717f4b9\": rpc error: code = NotFound desc = could not find container \"217e4d83348241c5e4f558fadef7fa7a642acb7b588f19144787f7210717f4b9\": container with ID starting with 217e4d83348241c5e4f558fadef7fa7a642acb7b588f19144787f7210717f4b9 not found: ID does not exist" Jan 20 16:44:44 crc kubenswrapper[4558]: I0120 16:44:44.572363 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="567265d1-8d94-4502-aba8-e4ce03c0def9" path="/var/lib/kubelet/pods/567265d1-8d94-4502-aba8-e4ce03c0def9/volumes" Jan 20 16:44:44 crc kubenswrapper[4558]: I0120 16:44:44.573615 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="91268065-1eb8-448e-8597-458d2586ad12" path="/var/lib/kubelet/pods/91268065-1eb8-448e-8597-458d2586ad12/volumes" Jan 20 16:44:46 crc kubenswrapper[4558]: I0120 16:44:46.076597 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-bgqpg" Jan 20 16:44:46 crc kubenswrapper[4558]: I0120 16:44:46.274892 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-chwxp" Jan 20 16:44:46 crc kubenswrapper[4558]: I0120 16:44:46.467695 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-7pjxc" Jan 20 16:44:48 crc kubenswrapper[4558]: I0120 16:44:48.434443 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-7pjxc"] Jan 20 16:44:48 crc kubenswrapper[4558]: I0120 16:44:48.435013 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-7pjxc" podUID="04aecb46-8cfc-4f76-9fe7-31ac1f5bd71e" containerName="registry-server" containerID="cri-o://ec7c97685ba1b5ea26aa0cef35f1f659e1a9974ef9f58f40ddb50c035856e582" gracePeriod=2 Jan 20 16:44:48 crc kubenswrapper[4558]: I0120 16:44:48.747568 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-7pjxc" Jan 20 16:44:48 crc kubenswrapper[4558]: I0120 16:44:48.870063 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/04aecb46-8cfc-4f76-9fe7-31ac1f5bd71e-catalog-content\") pod \"04aecb46-8cfc-4f76-9fe7-31ac1f5bd71e\" (UID: \"04aecb46-8cfc-4f76-9fe7-31ac1f5bd71e\") " Jan 20 16:44:48 crc kubenswrapper[4558]: I0120 16:44:48.870384 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/04aecb46-8cfc-4f76-9fe7-31ac1f5bd71e-utilities\") pod \"04aecb46-8cfc-4f76-9fe7-31ac1f5bd71e\" (UID: \"04aecb46-8cfc-4f76-9fe7-31ac1f5bd71e\") " Jan 20 16:44:48 crc kubenswrapper[4558]: I0120 16:44:48.870449 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xf6sf\" (UniqueName: \"kubernetes.io/projected/04aecb46-8cfc-4f76-9fe7-31ac1f5bd71e-kube-api-access-xf6sf\") pod \"04aecb46-8cfc-4f76-9fe7-31ac1f5bd71e\" (UID: \"04aecb46-8cfc-4f76-9fe7-31ac1f5bd71e\") " Jan 20 16:44:48 crc kubenswrapper[4558]: I0120 16:44:48.870818 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/04aecb46-8cfc-4f76-9fe7-31ac1f5bd71e-utilities" (OuterVolumeSpecName: "utilities") pod "04aecb46-8cfc-4f76-9fe7-31ac1f5bd71e" (UID: "04aecb46-8cfc-4f76-9fe7-31ac1f5bd71e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 16:44:48 crc kubenswrapper[4558]: I0120 16:44:48.874625 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/04aecb46-8cfc-4f76-9fe7-31ac1f5bd71e-kube-api-access-xf6sf" (OuterVolumeSpecName: "kube-api-access-xf6sf") pod "04aecb46-8cfc-4f76-9fe7-31ac1f5bd71e" (UID: "04aecb46-8cfc-4f76-9fe7-31ac1f5bd71e"). InnerVolumeSpecName "kube-api-access-xf6sf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:44:48 crc kubenswrapper[4558]: I0120 16:44:48.902941 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/04aecb46-8cfc-4f76-9fe7-31ac1f5bd71e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "04aecb46-8cfc-4f76-9fe7-31ac1f5bd71e" (UID: "04aecb46-8cfc-4f76-9fe7-31ac1f5bd71e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 16:44:48 crc kubenswrapper[4558]: I0120 16:44:48.967706 4558 generic.go:334] "Generic (PLEG): container finished" podID="04aecb46-8cfc-4f76-9fe7-31ac1f5bd71e" containerID="ec7c97685ba1b5ea26aa0cef35f1f659e1a9974ef9f58f40ddb50c035856e582" exitCode=0 Jan 20 16:44:48 crc kubenswrapper[4558]: I0120 16:44:48.967760 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-7pjxc" Jan 20 16:44:48 crc kubenswrapper[4558]: I0120 16:44:48.967750 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7pjxc" event={"ID":"04aecb46-8cfc-4f76-9fe7-31ac1f5bd71e","Type":"ContainerDied","Data":"ec7c97685ba1b5ea26aa0cef35f1f659e1a9974ef9f58f40ddb50c035856e582"} Jan 20 16:44:48 crc kubenswrapper[4558]: I0120 16:44:48.967820 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7pjxc" event={"ID":"04aecb46-8cfc-4f76-9fe7-31ac1f5bd71e","Type":"ContainerDied","Data":"a5827942feb719a55a98eb7d81f945f7da6cd5892e4f9747b1059ee7f0c7c1bb"} Jan 20 16:44:48 crc kubenswrapper[4558]: I0120 16:44:48.967840 4558 scope.go:117] "RemoveContainer" containerID="ec7c97685ba1b5ea26aa0cef35f1f659e1a9974ef9f58f40ddb50c035856e582" Jan 20 16:44:48 crc kubenswrapper[4558]: I0120 16:44:48.971370 4558 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/04aecb46-8cfc-4f76-9fe7-31ac1f5bd71e-utilities\") on node \"crc\" DevicePath \"\"" Jan 20 16:44:48 crc kubenswrapper[4558]: I0120 16:44:48.971474 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xf6sf\" (UniqueName: \"kubernetes.io/projected/04aecb46-8cfc-4f76-9fe7-31ac1f5bd71e-kube-api-access-xf6sf\") on node \"crc\" DevicePath \"\"" Jan 20 16:44:48 crc kubenswrapper[4558]: I0120 16:44:48.971545 4558 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/04aecb46-8cfc-4f76-9fe7-31ac1f5bd71e-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 20 16:44:48 crc kubenswrapper[4558]: I0120 16:44:48.980266 4558 scope.go:117] "RemoveContainer" containerID="15fe4d3d007fe83139c8f54811261c5ea7984d9efe1b125c24feb217995c23b4" Jan 20 16:44:48 crc kubenswrapper[4558]: I0120 16:44:48.988050 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-7pjxc"] Jan 20 16:44:48 crc kubenswrapper[4558]: I0120 16:44:48.990479 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-7pjxc"] Jan 20 16:44:48 crc kubenswrapper[4558]: I0120 16:44:48.994535 4558 scope.go:117] "RemoveContainer" containerID="f0dd7447a21500c96ca1f487d164385428f01ff81b7c6dce95e7a8a4d0ad549f" Jan 20 16:44:49 crc kubenswrapper[4558]: I0120 16:44:49.013536 4558 scope.go:117] "RemoveContainer" containerID="ec7c97685ba1b5ea26aa0cef35f1f659e1a9974ef9f58f40ddb50c035856e582" Jan 20 16:44:49 crc kubenswrapper[4558]: E0120 16:44:49.013864 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ec7c97685ba1b5ea26aa0cef35f1f659e1a9974ef9f58f40ddb50c035856e582\": container with ID starting with ec7c97685ba1b5ea26aa0cef35f1f659e1a9974ef9f58f40ddb50c035856e582 not found: ID does not exist" containerID="ec7c97685ba1b5ea26aa0cef35f1f659e1a9974ef9f58f40ddb50c035856e582" Jan 20 16:44:49 crc kubenswrapper[4558]: I0120 16:44:49.013907 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ec7c97685ba1b5ea26aa0cef35f1f659e1a9974ef9f58f40ddb50c035856e582"} err="failed to get container status \"ec7c97685ba1b5ea26aa0cef35f1f659e1a9974ef9f58f40ddb50c035856e582\": rpc error: code = NotFound desc = could not find container \"ec7c97685ba1b5ea26aa0cef35f1f659e1a9974ef9f58f40ddb50c035856e582\": container with ID starting with ec7c97685ba1b5ea26aa0cef35f1f659e1a9974ef9f58f40ddb50c035856e582 not found: ID does not exist" Jan 20 16:44:49 crc kubenswrapper[4558]: I0120 16:44:49.013935 4558 scope.go:117] "RemoveContainer" containerID="15fe4d3d007fe83139c8f54811261c5ea7984d9efe1b125c24feb217995c23b4" Jan 20 16:44:49 crc kubenswrapper[4558]: E0120 16:44:49.014320 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"15fe4d3d007fe83139c8f54811261c5ea7984d9efe1b125c24feb217995c23b4\": container with ID starting with 15fe4d3d007fe83139c8f54811261c5ea7984d9efe1b125c24feb217995c23b4 not found: ID does not exist" containerID="15fe4d3d007fe83139c8f54811261c5ea7984d9efe1b125c24feb217995c23b4" Jan 20 16:44:49 crc kubenswrapper[4558]: I0120 16:44:49.014426 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"15fe4d3d007fe83139c8f54811261c5ea7984d9efe1b125c24feb217995c23b4"} err="failed to get container status \"15fe4d3d007fe83139c8f54811261c5ea7984d9efe1b125c24feb217995c23b4\": rpc error: code = NotFound desc = could not find container \"15fe4d3d007fe83139c8f54811261c5ea7984d9efe1b125c24feb217995c23b4\": container with ID starting with 15fe4d3d007fe83139c8f54811261c5ea7984d9efe1b125c24feb217995c23b4 not found: ID does not exist" Jan 20 16:44:49 crc kubenswrapper[4558]: I0120 16:44:49.014531 4558 scope.go:117] "RemoveContainer" containerID="f0dd7447a21500c96ca1f487d164385428f01ff81b7c6dce95e7a8a4d0ad549f" Jan 20 16:44:49 crc kubenswrapper[4558]: E0120 16:44:49.014889 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f0dd7447a21500c96ca1f487d164385428f01ff81b7c6dce95e7a8a4d0ad549f\": container with ID starting with f0dd7447a21500c96ca1f487d164385428f01ff81b7c6dce95e7a8a4d0ad549f not found: ID does not exist" containerID="f0dd7447a21500c96ca1f487d164385428f01ff81b7c6dce95e7a8a4d0ad549f" Jan 20 16:44:49 crc kubenswrapper[4558]: I0120 16:44:49.014910 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f0dd7447a21500c96ca1f487d164385428f01ff81b7c6dce95e7a8a4d0ad549f"} err="failed to get container status \"f0dd7447a21500c96ca1f487d164385428f01ff81b7c6dce95e7a8a4d0ad549f\": rpc error: code = NotFound desc = could not find container \"f0dd7447a21500c96ca1f487d164385428f01ff81b7c6dce95e7a8a4d0ad549f\": container with ID starting with f0dd7447a21500c96ca1f487d164385428f01ff81b7c6dce95e7a8a4d0ad549f not found: ID does not exist" Jan 20 16:44:49 crc kubenswrapper[4558]: I0120 16:44:49.246266 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-hr9pm" Jan 20 16:44:49 crc kubenswrapper[4558]: I0120 16:44:49.271677 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-hr9pm" Jan 20 16:44:50 crc kubenswrapper[4558]: I0120 16:44:50.570956 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="04aecb46-8cfc-4f76-9fe7-31ac1f5bd71e" path="/var/lib/kubelet/pods/04aecb46-8cfc-4f76-9fe7-31ac1f5bd71e/volumes" Jan 20 16:44:52 crc kubenswrapper[4558]: I0120 16:44:52.610840 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Jan 20 16:44:52 crc kubenswrapper[4558]: E0120 16:44:52.611066 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="91268065-1eb8-448e-8597-458d2586ad12" containerName="extract-content" Jan 20 16:44:52 crc kubenswrapper[4558]: I0120 16:44:52.611080 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="91268065-1eb8-448e-8597-458d2586ad12" containerName="extract-content" Jan 20 16:44:52 crc kubenswrapper[4558]: E0120 16:44:52.611093 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f480e1f1-c185-47d4-adce-f0e95d1e3372" containerName="pruner" Jan 20 16:44:52 crc kubenswrapper[4558]: I0120 16:44:52.611098 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="f480e1f1-c185-47d4-adce-f0e95d1e3372" containerName="pruner" Jan 20 16:44:52 crc kubenswrapper[4558]: E0120 16:44:52.611107 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="04aecb46-8cfc-4f76-9fe7-31ac1f5bd71e" containerName="extract-content" Jan 20 16:44:52 crc kubenswrapper[4558]: I0120 16:44:52.611114 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="04aecb46-8cfc-4f76-9fe7-31ac1f5bd71e" containerName="extract-content" Jan 20 16:44:52 crc kubenswrapper[4558]: E0120 16:44:52.611123 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="91268065-1eb8-448e-8597-458d2586ad12" containerName="extract-utilities" Jan 20 16:44:52 crc kubenswrapper[4558]: I0120 16:44:52.611128 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="91268065-1eb8-448e-8597-458d2586ad12" containerName="extract-utilities" Jan 20 16:44:52 crc kubenswrapper[4558]: E0120 16:44:52.611134 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="567265d1-8d94-4502-aba8-e4ce03c0def9" containerName="extract-content" Jan 20 16:44:52 crc kubenswrapper[4558]: I0120 16:44:52.611139 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="567265d1-8d94-4502-aba8-e4ce03c0def9" containerName="extract-content" Jan 20 16:44:52 crc kubenswrapper[4558]: E0120 16:44:52.611149 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2aa030eb-fbf5-4bf5-ac24-0791492977c2" containerName="extract-content" Jan 20 16:44:52 crc kubenswrapper[4558]: I0120 16:44:52.611154 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="2aa030eb-fbf5-4bf5-ac24-0791492977c2" containerName="extract-content" Jan 20 16:44:52 crc kubenswrapper[4558]: E0120 16:44:52.611177 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="04aecb46-8cfc-4f76-9fe7-31ac1f5bd71e" containerName="registry-server" Jan 20 16:44:52 crc kubenswrapper[4558]: I0120 16:44:52.611182 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="04aecb46-8cfc-4f76-9fe7-31ac1f5bd71e" containerName="registry-server" Jan 20 16:44:52 crc kubenswrapper[4558]: E0120 16:44:52.611190 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="567265d1-8d94-4502-aba8-e4ce03c0def9" containerName="extract-utilities" Jan 20 16:44:52 crc kubenswrapper[4558]: I0120 16:44:52.611195 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="567265d1-8d94-4502-aba8-e4ce03c0def9" containerName="extract-utilities" Jan 20 16:44:52 crc kubenswrapper[4558]: E0120 16:44:52.611203 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2aa030eb-fbf5-4bf5-ac24-0791492977c2" containerName="registry-server" Jan 20 16:44:52 crc kubenswrapper[4558]: I0120 16:44:52.611208 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="2aa030eb-fbf5-4bf5-ac24-0791492977c2" containerName="registry-server" Jan 20 16:44:52 crc kubenswrapper[4558]: E0120 16:44:52.611216 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="567265d1-8d94-4502-aba8-e4ce03c0def9" containerName="registry-server" Jan 20 16:44:52 crc kubenswrapper[4558]: I0120 16:44:52.611221 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="567265d1-8d94-4502-aba8-e4ce03c0def9" containerName="registry-server" Jan 20 16:44:52 crc kubenswrapper[4558]: E0120 16:44:52.611231 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2aa030eb-fbf5-4bf5-ac24-0791492977c2" containerName="extract-utilities" Jan 20 16:44:52 crc kubenswrapper[4558]: I0120 16:44:52.611238 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="2aa030eb-fbf5-4bf5-ac24-0791492977c2" containerName="extract-utilities" Jan 20 16:44:52 crc kubenswrapper[4558]: E0120 16:44:52.611247 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="04aecb46-8cfc-4f76-9fe7-31ac1f5bd71e" containerName="extract-utilities" Jan 20 16:44:52 crc kubenswrapper[4558]: I0120 16:44:52.611253 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="04aecb46-8cfc-4f76-9fe7-31ac1f5bd71e" containerName="extract-utilities" Jan 20 16:44:52 crc kubenswrapper[4558]: E0120 16:44:52.611261 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="91268065-1eb8-448e-8597-458d2586ad12" containerName="registry-server" Jan 20 16:44:52 crc kubenswrapper[4558]: I0120 16:44:52.611266 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="91268065-1eb8-448e-8597-458d2586ad12" containerName="registry-server" Jan 20 16:44:52 crc kubenswrapper[4558]: I0120 16:44:52.611392 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="567265d1-8d94-4502-aba8-e4ce03c0def9" containerName="registry-server" Jan 20 16:44:52 crc kubenswrapper[4558]: I0120 16:44:52.611404 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="2aa030eb-fbf5-4bf5-ac24-0791492977c2" containerName="registry-server" Jan 20 16:44:52 crc kubenswrapper[4558]: I0120 16:44:52.611415 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="04aecb46-8cfc-4f76-9fe7-31ac1f5bd71e" containerName="registry-server" Jan 20 16:44:52 crc kubenswrapper[4558]: I0120 16:44:52.611423 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="f480e1f1-c185-47d4-adce-f0e95d1e3372" containerName="pruner" Jan 20 16:44:52 crc kubenswrapper[4558]: I0120 16:44:52.611429 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="91268065-1eb8-448e-8597-458d2586ad12" containerName="registry-server" Jan 20 16:44:52 crc kubenswrapper[4558]: I0120 16:44:52.611786 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 20 16:44:52 crc kubenswrapper[4558]: I0120 16:44:52.613435 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Jan 20 16:44:52 crc kubenswrapper[4558]: I0120 16:44:52.613524 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Jan 20 16:44:52 crc kubenswrapper[4558]: I0120 16:44:52.619068 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Jan 20 16:44:52 crc kubenswrapper[4558]: I0120 16:44:52.693550 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 20 16:44:52 crc kubenswrapper[4558]: I0120 16:44:52.807488 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e92a7b19-3fe4-4c0c-a73d-05f4ca1b56d2-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"e92a7b19-3fe4-4c0c-a73d-05f4ca1b56d2\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 20 16:44:52 crc kubenswrapper[4558]: I0120 16:44:52.807533 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/e92a7b19-3fe4-4c0c-a73d-05f4ca1b56d2-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"e92a7b19-3fe4-4c0c-a73d-05f4ca1b56d2\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 20 16:44:52 crc kubenswrapper[4558]: I0120 16:44:52.908452 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e92a7b19-3fe4-4c0c-a73d-05f4ca1b56d2-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"e92a7b19-3fe4-4c0c-a73d-05f4ca1b56d2\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 20 16:44:52 crc kubenswrapper[4558]: I0120 16:44:52.908498 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/e92a7b19-3fe4-4c0c-a73d-05f4ca1b56d2-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"e92a7b19-3fe4-4c0c-a73d-05f4ca1b56d2\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 20 16:44:52 crc kubenswrapper[4558]: I0120 16:44:52.908636 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/e92a7b19-3fe4-4c0c-a73d-05f4ca1b56d2-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"e92a7b19-3fe4-4c0c-a73d-05f4ca1b56d2\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 20 16:44:52 crc kubenswrapper[4558]: I0120 16:44:52.924637 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e92a7b19-3fe4-4c0c-a73d-05f4ca1b56d2-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"e92a7b19-3fe4-4c0c-a73d-05f4ca1b56d2\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 20 16:44:53 crc kubenswrapper[4558]: I0120 16:44:53.222880 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 20 16:44:53 crc kubenswrapper[4558]: I0120 16:44:53.560425 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Jan 20 16:44:53 crc kubenswrapper[4558]: I0120 16:44:53.992602 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"e92a7b19-3fe4-4c0c-a73d-05f4ca1b56d2","Type":"ContainerStarted","Data":"3e7d7918c7f988cd66611fb161c8443ad8a09a480d2cc0b9cd3e70a7eb2620a6"} Jan 20 16:44:53 crc kubenswrapper[4558]: I0120 16:44:53.992973 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"e92a7b19-3fe4-4c0c-a73d-05f4ca1b56d2","Type":"ContainerStarted","Data":"ad30c0a513c70efeadffbb768f7a1657304257ea11892fda2d8b1c31fd9a36ad"} Jan 20 16:44:54 crc kubenswrapper[4558]: I0120 16:44:54.997532 4558 generic.go:334] "Generic (PLEG): container finished" podID="e92a7b19-3fe4-4c0c-a73d-05f4ca1b56d2" containerID="3e7d7918c7f988cd66611fb161c8443ad8a09a480d2cc0b9cd3e70a7eb2620a6" exitCode=0 Jan 20 16:44:54 crc kubenswrapper[4558]: I0120 16:44:54.997575 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"e92a7b19-3fe4-4c0c-a73d-05f4ca1b56d2","Type":"ContainerDied","Data":"3e7d7918c7f988cd66611fb161c8443ad8a09a480d2cc0b9cd3e70a7eb2620a6"} Jan 20 16:44:56 crc kubenswrapper[4558]: I0120 16:44:56.167330 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 20 16:44:56 crc kubenswrapper[4558]: I0120 16:44:56.344761 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e92a7b19-3fe4-4c0c-a73d-05f4ca1b56d2-kube-api-access\") pod \"e92a7b19-3fe4-4c0c-a73d-05f4ca1b56d2\" (UID: \"e92a7b19-3fe4-4c0c-a73d-05f4ca1b56d2\") " Jan 20 16:44:56 crc kubenswrapper[4558]: I0120 16:44:56.344838 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/e92a7b19-3fe4-4c0c-a73d-05f4ca1b56d2-kubelet-dir\") pod \"e92a7b19-3fe4-4c0c-a73d-05f4ca1b56d2\" (UID: \"e92a7b19-3fe4-4c0c-a73d-05f4ca1b56d2\") " Jan 20 16:44:56 crc kubenswrapper[4558]: I0120 16:44:56.344952 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e92a7b19-3fe4-4c0c-a73d-05f4ca1b56d2-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "e92a7b19-3fe4-4c0c-a73d-05f4ca1b56d2" (UID: "e92a7b19-3fe4-4c0c-a73d-05f4ca1b56d2"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 16:44:56 crc kubenswrapper[4558]: I0120 16:44:56.345063 4558 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/e92a7b19-3fe4-4c0c-a73d-05f4ca1b56d2-kubelet-dir\") on node \"crc\" DevicePath \"\"" Jan 20 16:44:56 crc kubenswrapper[4558]: I0120 16:44:56.350340 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e92a7b19-3fe4-4c0c-a73d-05f4ca1b56d2-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e92a7b19-3fe4-4c0c-a73d-05f4ca1b56d2" (UID: "e92a7b19-3fe4-4c0c-a73d-05f4ca1b56d2"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:44:56 crc kubenswrapper[4558]: I0120 16:44:56.447487 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e92a7b19-3fe4-4c0c-a73d-05f4ca1b56d2-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 20 16:44:57 crc kubenswrapper[4558]: I0120 16:44:57.007295 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"e92a7b19-3fe4-4c0c-a73d-05f4ca1b56d2","Type":"ContainerDied","Data":"ad30c0a513c70efeadffbb768f7a1657304257ea11892fda2d8b1c31fd9a36ad"} Jan 20 16:44:57 crc kubenswrapper[4558]: I0120 16:44:57.007337 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ad30c0a513c70efeadffbb768f7a1657304257ea11892fda2d8b1c31fd9a36ad" Jan 20 16:44:57 crc kubenswrapper[4558]: I0120 16:44:57.007383 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 20 16:44:57 crc kubenswrapper[4558]: I0120 16:44:57.330650 4558 patch_prober.go:28] interesting pod/machine-config-daemon-2vr4r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 20 16:44:57 crc kubenswrapper[4558]: I0120 16:44:57.330708 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 20 16:44:59 crc kubenswrapper[4558]: I0120 16:44:59.612867 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Jan 20 16:44:59 crc kubenswrapper[4558]: E0120 16:44:59.613643 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e92a7b19-3fe4-4c0c-a73d-05f4ca1b56d2" containerName="pruner" Jan 20 16:44:59 crc kubenswrapper[4558]: I0120 16:44:59.613657 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="e92a7b19-3fe4-4c0c-a73d-05f4ca1b56d2" containerName="pruner" Jan 20 16:44:59 crc kubenswrapper[4558]: I0120 16:44:59.613743 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="e92a7b19-3fe4-4c0c-a73d-05f4ca1b56d2" containerName="pruner" Jan 20 16:44:59 crc kubenswrapper[4558]: I0120 16:44:59.614046 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Jan 20 16:44:59 crc kubenswrapper[4558]: I0120 16:44:59.616923 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Jan 20 16:44:59 crc kubenswrapper[4558]: I0120 16:44:59.616948 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Jan 20 16:44:59 crc kubenswrapper[4558]: I0120 16:44:59.620714 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Jan 20 16:44:59 crc kubenswrapper[4558]: I0120 16:44:59.781674 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/1bd82daf-3edd-4b66-8dc3-0efa12742508-kubelet-dir\") pod \"installer-9-crc\" (UID: \"1bd82daf-3edd-4b66-8dc3-0efa12742508\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 20 16:44:59 crc kubenswrapper[4558]: I0120 16:44:59.781717 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1bd82daf-3edd-4b66-8dc3-0efa12742508-kube-api-access\") pod \"installer-9-crc\" (UID: \"1bd82daf-3edd-4b66-8dc3-0efa12742508\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 20 16:44:59 crc kubenswrapper[4558]: I0120 16:44:59.781754 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/1bd82daf-3edd-4b66-8dc3-0efa12742508-var-lock\") pod \"installer-9-crc\" (UID: \"1bd82daf-3edd-4b66-8dc3-0efa12742508\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 20 16:44:59 crc kubenswrapper[4558]: I0120 16:44:59.882958 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/1bd82daf-3edd-4b66-8dc3-0efa12742508-kubelet-dir\") pod \"installer-9-crc\" (UID: \"1bd82daf-3edd-4b66-8dc3-0efa12742508\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 20 16:44:59 crc kubenswrapper[4558]: I0120 16:44:59.883004 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1bd82daf-3edd-4b66-8dc3-0efa12742508-kube-api-access\") pod \"installer-9-crc\" (UID: \"1bd82daf-3edd-4b66-8dc3-0efa12742508\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 20 16:44:59 crc kubenswrapper[4558]: I0120 16:44:59.883035 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/1bd82daf-3edd-4b66-8dc3-0efa12742508-var-lock\") pod \"installer-9-crc\" (UID: \"1bd82daf-3edd-4b66-8dc3-0efa12742508\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 20 16:44:59 crc kubenswrapper[4558]: I0120 16:44:59.883110 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/1bd82daf-3edd-4b66-8dc3-0efa12742508-kubelet-dir\") pod \"installer-9-crc\" (UID: \"1bd82daf-3edd-4b66-8dc3-0efa12742508\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 20 16:44:59 crc kubenswrapper[4558]: I0120 16:44:59.883143 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/1bd82daf-3edd-4b66-8dc3-0efa12742508-var-lock\") pod \"installer-9-crc\" (UID: \"1bd82daf-3edd-4b66-8dc3-0efa12742508\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 20 16:44:59 crc kubenswrapper[4558]: I0120 16:44:59.897948 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1bd82daf-3edd-4b66-8dc3-0efa12742508-kube-api-access\") pod \"installer-9-crc\" (UID: \"1bd82daf-3edd-4b66-8dc3-0efa12742508\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 20 16:44:59 crc kubenswrapper[4558]: I0120 16:44:59.931919 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Jan 20 16:45:00 crc kubenswrapper[4558]: I0120 16:45:00.092867 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Jan 20 16:45:00 crc kubenswrapper[4558]: I0120 16:45:00.127783 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29482125-cn6z6"] Jan 20 16:45:00 crc kubenswrapper[4558]: I0120 16:45:00.128441 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29482125-cn6z6" Jan 20 16:45:00 crc kubenswrapper[4558]: I0120 16:45:00.130732 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 20 16:45:00 crc kubenswrapper[4558]: I0120 16:45:00.134396 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29482125-cn6z6"] Jan 20 16:45:00 crc kubenswrapper[4558]: I0120 16:45:00.134475 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 20 16:45:00 crc kubenswrapper[4558]: I0120 16:45:00.187366 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1042596b-4f2c-4da7-9bfb-05d2e8be6b80-secret-volume\") pod \"collect-profiles-29482125-cn6z6\" (UID: \"1042596b-4f2c-4da7-9bfb-05d2e8be6b80\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482125-cn6z6" Jan 20 16:45:00 crc kubenswrapper[4558]: I0120 16:45:00.187484 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1042596b-4f2c-4da7-9bfb-05d2e8be6b80-config-volume\") pod \"collect-profiles-29482125-cn6z6\" (UID: \"1042596b-4f2c-4da7-9bfb-05d2e8be6b80\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482125-cn6z6" Jan 20 16:45:00 crc kubenswrapper[4558]: I0120 16:45:00.187590 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cfpt2\" (UniqueName: \"kubernetes.io/projected/1042596b-4f2c-4da7-9bfb-05d2e8be6b80-kube-api-access-cfpt2\") pod \"collect-profiles-29482125-cn6z6\" (UID: \"1042596b-4f2c-4da7-9bfb-05d2e8be6b80\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482125-cn6z6" Jan 20 16:45:00 crc kubenswrapper[4558]: I0120 16:45:00.288245 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1042596b-4f2c-4da7-9bfb-05d2e8be6b80-secret-volume\") pod \"collect-profiles-29482125-cn6z6\" (UID: \"1042596b-4f2c-4da7-9bfb-05d2e8be6b80\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482125-cn6z6" Jan 20 16:45:00 crc kubenswrapper[4558]: I0120 16:45:00.288308 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1042596b-4f2c-4da7-9bfb-05d2e8be6b80-config-volume\") pod \"collect-profiles-29482125-cn6z6\" (UID: \"1042596b-4f2c-4da7-9bfb-05d2e8be6b80\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482125-cn6z6" Jan 20 16:45:00 crc kubenswrapper[4558]: I0120 16:45:00.288363 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cfpt2\" (UniqueName: \"kubernetes.io/projected/1042596b-4f2c-4da7-9bfb-05d2e8be6b80-kube-api-access-cfpt2\") pod \"collect-profiles-29482125-cn6z6\" (UID: \"1042596b-4f2c-4da7-9bfb-05d2e8be6b80\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482125-cn6z6" Jan 20 16:45:00 crc kubenswrapper[4558]: I0120 16:45:00.289528 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1042596b-4f2c-4da7-9bfb-05d2e8be6b80-config-volume\") pod \"collect-profiles-29482125-cn6z6\" (UID: \"1042596b-4f2c-4da7-9bfb-05d2e8be6b80\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482125-cn6z6" Jan 20 16:45:00 crc kubenswrapper[4558]: I0120 16:45:00.292917 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1042596b-4f2c-4da7-9bfb-05d2e8be6b80-secret-volume\") pod \"collect-profiles-29482125-cn6z6\" (UID: \"1042596b-4f2c-4da7-9bfb-05d2e8be6b80\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482125-cn6z6" Jan 20 16:45:00 crc kubenswrapper[4558]: I0120 16:45:00.304690 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cfpt2\" (UniqueName: \"kubernetes.io/projected/1042596b-4f2c-4da7-9bfb-05d2e8be6b80-kube-api-access-cfpt2\") pod \"collect-profiles-29482125-cn6z6\" (UID: \"1042596b-4f2c-4da7-9bfb-05d2e8be6b80\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482125-cn6z6" Jan 20 16:45:00 crc kubenswrapper[4558]: I0120 16:45:00.457422 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29482125-cn6z6" Jan 20 16:45:00 crc kubenswrapper[4558]: I0120 16:45:00.800735 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29482125-cn6z6"] Jan 20 16:45:00 crc kubenswrapper[4558]: W0120 16:45:00.806888 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1042596b_4f2c_4da7_9bfb_05d2e8be6b80.slice/crio-eea21b20f8ff583f9da05dd0bfedd5b8e042a34f1505fb5a1e95f93b8c398d8b WatchSource:0}: Error finding container eea21b20f8ff583f9da05dd0bfedd5b8e042a34f1505fb5a1e95f93b8c398d8b: Status 404 returned error can't find the container with id eea21b20f8ff583f9da05dd0bfedd5b8e042a34f1505fb5a1e95f93b8c398d8b Jan 20 16:45:01 crc kubenswrapper[4558]: I0120 16:45:01.027227 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"1bd82daf-3edd-4b66-8dc3-0efa12742508","Type":"ContainerStarted","Data":"2b677e98325be11a28cd9caf4390b3985d824b63bd23269569cb14a8efecf5b4"} Jan 20 16:45:01 crc kubenswrapper[4558]: I0120 16:45:01.027576 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"1bd82daf-3edd-4b66-8dc3-0efa12742508","Type":"ContainerStarted","Data":"f9716851ee942377a21119eb5e86b693633861e6d4c953f1a63ce1c4e75ff2a1"} Jan 20 16:45:01 crc kubenswrapper[4558]: I0120 16:45:01.029048 4558 generic.go:334] "Generic (PLEG): container finished" podID="1042596b-4f2c-4da7-9bfb-05d2e8be6b80" containerID="1d239ea1d0bf395c205ee9453448d581f75ec979383afda6ed0ada4d6982504a" exitCode=0 Jan 20 16:45:01 crc kubenswrapper[4558]: I0120 16:45:01.029142 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29482125-cn6z6" event={"ID":"1042596b-4f2c-4da7-9bfb-05d2e8be6b80","Type":"ContainerDied","Data":"1d239ea1d0bf395c205ee9453448d581f75ec979383afda6ed0ada4d6982504a"} Jan 20 16:45:01 crc kubenswrapper[4558]: I0120 16:45:01.029196 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29482125-cn6z6" event={"ID":"1042596b-4f2c-4da7-9bfb-05d2e8be6b80","Type":"ContainerStarted","Data":"eea21b20f8ff583f9da05dd0bfedd5b8e042a34f1505fb5a1e95f93b8c398d8b"} Jan 20 16:45:01 crc kubenswrapper[4558]: I0120 16:45:01.041506 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/installer-9-crc" podStartSLOduration=2.041495548 podStartE2EDuration="2.041495548s" podCreationTimestamp="2026-01-20 16:44:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 16:45:01.040757965 +0000 UTC m=+194.801095943" watchObservedRunningTime="2026-01-20 16:45:01.041495548 +0000 UTC m=+194.801833515" Jan 20 16:45:01 crc kubenswrapper[4558]: I0120 16:45:01.322916 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-nwjzd"] Jan 20 16:45:02 crc kubenswrapper[4558]: I0120 16:45:02.202944 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29482125-cn6z6" Jan 20 16:45:02 crc kubenswrapper[4558]: I0120 16:45:02.311863 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1042596b-4f2c-4da7-9bfb-05d2e8be6b80-config-volume\") pod \"1042596b-4f2c-4da7-9bfb-05d2e8be6b80\" (UID: \"1042596b-4f2c-4da7-9bfb-05d2e8be6b80\") " Jan 20 16:45:02 crc kubenswrapper[4558]: I0120 16:45:02.311905 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1042596b-4f2c-4da7-9bfb-05d2e8be6b80-secret-volume\") pod \"1042596b-4f2c-4da7-9bfb-05d2e8be6b80\" (UID: \"1042596b-4f2c-4da7-9bfb-05d2e8be6b80\") " Jan 20 16:45:02 crc kubenswrapper[4558]: I0120 16:45:02.312660 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1042596b-4f2c-4da7-9bfb-05d2e8be6b80-config-volume" (OuterVolumeSpecName: "config-volume") pod "1042596b-4f2c-4da7-9bfb-05d2e8be6b80" (UID: "1042596b-4f2c-4da7-9bfb-05d2e8be6b80"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 16:45:02 crc kubenswrapper[4558]: I0120 16:45:02.312692 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfpt2\" (UniqueName: \"kubernetes.io/projected/1042596b-4f2c-4da7-9bfb-05d2e8be6b80-kube-api-access-cfpt2\") pod \"1042596b-4f2c-4da7-9bfb-05d2e8be6b80\" (UID: \"1042596b-4f2c-4da7-9bfb-05d2e8be6b80\") " Jan 20 16:45:02 crc kubenswrapper[4558]: I0120 16:45:02.313051 4558 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1042596b-4f2c-4da7-9bfb-05d2e8be6b80-config-volume\") on node \"crc\" DevicePath \"\"" Jan 20 16:45:02 crc kubenswrapper[4558]: I0120 16:45:02.316561 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1042596b-4f2c-4da7-9bfb-05d2e8be6b80-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "1042596b-4f2c-4da7-9bfb-05d2e8be6b80" (UID: "1042596b-4f2c-4da7-9bfb-05d2e8be6b80"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:45:02 crc kubenswrapper[4558]: I0120 16:45:02.316849 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1042596b-4f2c-4da7-9bfb-05d2e8be6b80-kube-api-access-cfpt2" (OuterVolumeSpecName: "kube-api-access-cfpt2") pod "1042596b-4f2c-4da7-9bfb-05d2e8be6b80" (UID: "1042596b-4f2c-4da7-9bfb-05d2e8be6b80"). InnerVolumeSpecName "kube-api-access-cfpt2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:45:02 crc kubenswrapper[4558]: I0120 16:45:02.414218 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfpt2\" (UniqueName: \"kubernetes.io/projected/1042596b-4f2c-4da7-9bfb-05d2e8be6b80-kube-api-access-cfpt2\") on node \"crc\" DevicePath \"\"" Jan 20 16:45:02 crc kubenswrapper[4558]: I0120 16:45:02.414253 4558 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1042596b-4f2c-4da7-9bfb-05d2e8be6b80-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 20 16:45:03 crc kubenswrapper[4558]: I0120 16:45:03.038293 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29482125-cn6z6" event={"ID":"1042596b-4f2c-4da7-9bfb-05d2e8be6b80","Type":"ContainerDied","Data":"eea21b20f8ff583f9da05dd0bfedd5b8e042a34f1505fb5a1e95f93b8c398d8b"} Jan 20 16:45:03 crc kubenswrapper[4558]: I0120 16:45:03.038329 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="eea21b20f8ff583f9da05dd0bfedd5b8e042a34f1505fb5a1e95f93b8c398d8b" Jan 20 16:45:03 crc kubenswrapper[4558]: I0120 16:45:03.038330 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29482125-cn6z6" Jan 20 16:45:26 crc kubenswrapper[4558]: I0120 16:45:26.346903 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-nwjzd" podUID="4654626b-36d0-4072-a04c-d8ee0678fd50" containerName="oauth-openshift" containerID="cri-o://93a8a520bcd4319cde8907bbd948bb0c6da3eb3d67bae6be2a6e7fb65cac45fb" gracePeriod=15 Jan 20 16:45:26 crc kubenswrapper[4558]: I0120 16:45:26.609910 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-nwjzd" Jan 20 16:45:26 crc kubenswrapper[4558]: I0120 16:45:26.632644 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-76766fc778-pfrpz"] Jan 20 16:45:26 crc kubenswrapper[4558]: E0120 16:45:26.632883 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4654626b-36d0-4072-a04c-d8ee0678fd50" containerName="oauth-openshift" Jan 20 16:45:26 crc kubenswrapper[4558]: I0120 16:45:26.632895 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="4654626b-36d0-4072-a04c-d8ee0678fd50" containerName="oauth-openshift" Jan 20 16:45:26 crc kubenswrapper[4558]: E0120 16:45:26.632927 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1042596b-4f2c-4da7-9bfb-05d2e8be6b80" containerName="collect-profiles" Jan 20 16:45:26 crc kubenswrapper[4558]: I0120 16:45:26.632934 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="1042596b-4f2c-4da7-9bfb-05d2e8be6b80" containerName="collect-profiles" Jan 20 16:45:26 crc kubenswrapper[4558]: I0120 16:45:26.633032 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="4654626b-36d0-4072-a04c-d8ee0678fd50" containerName="oauth-openshift" Jan 20 16:45:26 crc kubenswrapper[4558]: I0120 16:45:26.633045 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="1042596b-4f2c-4da7-9bfb-05d2e8be6b80" containerName="collect-profiles" Jan 20 16:45:26 crc kubenswrapper[4558]: I0120 16:45:26.633498 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-76766fc778-pfrpz" Jan 20 16:45:26 crc kubenswrapper[4558]: I0120 16:45:26.640302 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-76766fc778-pfrpz"] Jan 20 16:45:26 crc kubenswrapper[4558]: I0120 16:45:26.682083 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4654626b-36d0-4072-a04c-d8ee0678fd50-v4-0-config-system-trusted-ca-bundle\") pod \"4654626b-36d0-4072-a04c-d8ee0678fd50\" (UID: \"4654626b-36d0-4072-a04c-d8ee0678fd50\") " Jan 20 16:45:26 crc kubenswrapper[4558]: I0120 16:45:26.682116 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/4654626b-36d0-4072-a04c-d8ee0678fd50-v4-0-config-system-ocp-branding-template\") pod \"4654626b-36d0-4072-a04c-d8ee0678fd50\" (UID: \"4654626b-36d0-4072-a04c-d8ee0678fd50\") " Jan 20 16:45:26 crc kubenswrapper[4558]: I0120 16:45:26.682139 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/4654626b-36d0-4072-a04c-d8ee0678fd50-v4-0-config-user-template-login\") pod \"4654626b-36d0-4072-a04c-d8ee0678fd50\" (UID: \"4654626b-36d0-4072-a04c-d8ee0678fd50\") " Jan 20 16:45:26 crc kubenswrapper[4558]: I0120 16:45:26.682157 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/4654626b-36d0-4072-a04c-d8ee0678fd50-v4-0-config-user-template-error\") pod \"4654626b-36d0-4072-a04c-d8ee0678fd50\" (UID: \"4654626b-36d0-4072-a04c-d8ee0678fd50\") " Jan 20 16:45:26 crc kubenswrapper[4558]: I0120 16:45:26.682212 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/4654626b-36d0-4072-a04c-d8ee0678fd50-audit-policies\") pod \"4654626b-36d0-4072-a04c-d8ee0678fd50\" (UID: \"4654626b-36d0-4072-a04c-d8ee0678fd50\") " Jan 20 16:45:26 crc kubenswrapper[4558]: I0120 16:45:26.682235 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8c24d\" (UniqueName: \"kubernetes.io/projected/4654626b-36d0-4072-a04c-d8ee0678fd50-kube-api-access-8c24d\") pod \"4654626b-36d0-4072-a04c-d8ee0678fd50\" (UID: \"4654626b-36d0-4072-a04c-d8ee0678fd50\") " Jan 20 16:45:26 crc kubenswrapper[4558]: I0120 16:45:26.682253 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/4654626b-36d0-4072-a04c-d8ee0678fd50-audit-dir\") pod \"4654626b-36d0-4072-a04c-d8ee0678fd50\" (UID: \"4654626b-36d0-4072-a04c-d8ee0678fd50\") " Jan 20 16:45:26 crc kubenswrapper[4558]: I0120 16:45:26.682268 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/4654626b-36d0-4072-a04c-d8ee0678fd50-v4-0-config-system-serving-cert\") pod \"4654626b-36d0-4072-a04c-d8ee0678fd50\" (UID: \"4654626b-36d0-4072-a04c-d8ee0678fd50\") " Jan 20 16:45:26 crc kubenswrapper[4558]: I0120 16:45:26.682284 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/4654626b-36d0-4072-a04c-d8ee0678fd50-v4-0-config-user-idp-0-file-data\") pod \"4654626b-36d0-4072-a04c-d8ee0678fd50\" (UID: \"4654626b-36d0-4072-a04c-d8ee0678fd50\") " Jan 20 16:45:26 crc kubenswrapper[4558]: I0120 16:45:26.682662 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4654626b-36d0-4072-a04c-d8ee0678fd50-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "4654626b-36d0-4072-a04c-d8ee0678fd50" (UID: "4654626b-36d0-4072-a04c-d8ee0678fd50"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 16:45:26 crc kubenswrapper[4558]: I0120 16:45:26.682859 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4654626b-36d0-4072-a04c-d8ee0678fd50-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "4654626b-36d0-4072-a04c-d8ee0678fd50" (UID: "4654626b-36d0-4072-a04c-d8ee0678fd50"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 16:45:26 crc kubenswrapper[4558]: I0120 16:45:26.682893 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4654626b-36d0-4072-a04c-d8ee0678fd50-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "4654626b-36d0-4072-a04c-d8ee0678fd50" (UID: "4654626b-36d0-4072-a04c-d8ee0678fd50"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 16:45:26 crc kubenswrapper[4558]: I0120 16:45:26.682934 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/4654626b-36d0-4072-a04c-d8ee0678fd50-v4-0-config-system-cliconfig\") pod \"4654626b-36d0-4072-a04c-d8ee0678fd50\" (UID: \"4654626b-36d0-4072-a04c-d8ee0678fd50\") " Jan 20 16:45:26 crc kubenswrapper[4558]: I0120 16:45:26.682964 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/4654626b-36d0-4072-a04c-d8ee0678fd50-v4-0-config-system-router-certs\") pod \"4654626b-36d0-4072-a04c-d8ee0678fd50\" (UID: \"4654626b-36d0-4072-a04c-d8ee0678fd50\") " Jan 20 16:45:26 crc kubenswrapper[4558]: I0120 16:45:26.682988 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/4654626b-36d0-4072-a04c-d8ee0678fd50-v4-0-config-system-service-ca\") pod \"4654626b-36d0-4072-a04c-d8ee0678fd50\" (UID: \"4654626b-36d0-4072-a04c-d8ee0678fd50\") " Jan 20 16:45:26 crc kubenswrapper[4558]: I0120 16:45:26.683011 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/4654626b-36d0-4072-a04c-d8ee0678fd50-v4-0-config-system-session\") pod \"4654626b-36d0-4072-a04c-d8ee0678fd50\" (UID: \"4654626b-36d0-4072-a04c-d8ee0678fd50\") " Jan 20 16:45:26 crc kubenswrapper[4558]: I0120 16:45:26.683033 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/4654626b-36d0-4072-a04c-d8ee0678fd50-v4-0-config-user-template-provider-selection\") pod \"4654626b-36d0-4072-a04c-d8ee0678fd50\" (UID: \"4654626b-36d0-4072-a04c-d8ee0678fd50\") " Jan 20 16:45:26 crc kubenswrapper[4558]: I0120 16:45:26.683129 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/d614011d-599a-4664-9c3f-1f6da5878609-v4-0-config-user-template-login\") pod \"oauth-openshift-76766fc778-pfrpz\" (UID: \"d614011d-599a-4664-9c3f-1f6da5878609\") " pod="openshift-authentication/oauth-openshift-76766fc778-pfrpz" Jan 20 16:45:26 crc kubenswrapper[4558]: I0120 16:45:26.683196 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/d614011d-599a-4664-9c3f-1f6da5878609-v4-0-config-system-serving-cert\") pod \"oauth-openshift-76766fc778-pfrpz\" (UID: \"d614011d-599a-4664-9c3f-1f6da5878609\") " pod="openshift-authentication/oauth-openshift-76766fc778-pfrpz" Jan 20 16:45:26 crc kubenswrapper[4558]: I0120 16:45:26.683223 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/d614011d-599a-4664-9c3f-1f6da5878609-v4-0-config-system-service-ca\") pod \"oauth-openshift-76766fc778-pfrpz\" (UID: \"d614011d-599a-4664-9c3f-1f6da5878609\") " pod="openshift-authentication/oauth-openshift-76766fc778-pfrpz" Jan 20 16:45:26 crc kubenswrapper[4558]: I0120 16:45:26.683247 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f2hw4\" (UniqueName: \"kubernetes.io/projected/d614011d-599a-4664-9c3f-1f6da5878609-kube-api-access-f2hw4\") pod \"oauth-openshift-76766fc778-pfrpz\" (UID: \"d614011d-599a-4664-9c3f-1f6da5878609\") " pod="openshift-authentication/oauth-openshift-76766fc778-pfrpz" Jan 20 16:45:26 crc kubenswrapper[4558]: I0120 16:45:26.683270 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/d614011d-599a-4664-9c3f-1f6da5878609-v4-0-config-system-session\") pod \"oauth-openshift-76766fc778-pfrpz\" (UID: \"d614011d-599a-4664-9c3f-1f6da5878609\") " pod="openshift-authentication/oauth-openshift-76766fc778-pfrpz" Jan 20 16:45:26 crc kubenswrapper[4558]: I0120 16:45:26.683285 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/d614011d-599a-4664-9c3f-1f6da5878609-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-76766fc778-pfrpz\" (UID: \"d614011d-599a-4664-9c3f-1f6da5878609\") " pod="openshift-authentication/oauth-openshift-76766fc778-pfrpz" Jan 20 16:45:26 crc kubenswrapper[4558]: I0120 16:45:26.683300 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4654626b-36d0-4072-a04c-d8ee0678fd50-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "4654626b-36d0-4072-a04c-d8ee0678fd50" (UID: "4654626b-36d0-4072-a04c-d8ee0678fd50"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 16:45:26 crc kubenswrapper[4558]: I0120 16:45:26.683308 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/d614011d-599a-4664-9c3f-1f6da5878609-v4-0-config-system-router-certs\") pod \"oauth-openshift-76766fc778-pfrpz\" (UID: \"d614011d-599a-4664-9c3f-1f6da5878609\") " pod="openshift-authentication/oauth-openshift-76766fc778-pfrpz" Jan 20 16:45:26 crc kubenswrapper[4558]: I0120 16:45:26.683328 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/d614011d-599a-4664-9c3f-1f6da5878609-audit-policies\") pod \"oauth-openshift-76766fc778-pfrpz\" (UID: \"d614011d-599a-4664-9c3f-1f6da5878609\") " pod="openshift-authentication/oauth-openshift-76766fc778-pfrpz" Jan 20 16:45:26 crc kubenswrapper[4558]: I0120 16:45:26.683343 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/d614011d-599a-4664-9c3f-1f6da5878609-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-76766fc778-pfrpz\" (UID: \"d614011d-599a-4664-9c3f-1f6da5878609\") " pod="openshift-authentication/oauth-openshift-76766fc778-pfrpz" Jan 20 16:45:26 crc kubenswrapper[4558]: I0120 16:45:26.683359 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/d614011d-599a-4664-9c3f-1f6da5878609-audit-dir\") pod \"oauth-openshift-76766fc778-pfrpz\" (UID: \"d614011d-599a-4664-9c3f-1f6da5878609\") " pod="openshift-authentication/oauth-openshift-76766fc778-pfrpz" Jan 20 16:45:26 crc kubenswrapper[4558]: I0120 16:45:26.683373 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/d614011d-599a-4664-9c3f-1f6da5878609-v4-0-config-user-template-error\") pod \"oauth-openshift-76766fc778-pfrpz\" (UID: \"d614011d-599a-4664-9c3f-1f6da5878609\") " pod="openshift-authentication/oauth-openshift-76766fc778-pfrpz" Jan 20 16:45:26 crc kubenswrapper[4558]: I0120 16:45:26.683396 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/d614011d-599a-4664-9c3f-1f6da5878609-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-76766fc778-pfrpz\" (UID: \"d614011d-599a-4664-9c3f-1f6da5878609\") " pod="openshift-authentication/oauth-openshift-76766fc778-pfrpz" Jan 20 16:45:26 crc kubenswrapper[4558]: I0120 16:45:26.683411 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d614011d-599a-4664-9c3f-1f6da5878609-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-76766fc778-pfrpz\" (UID: \"d614011d-599a-4664-9c3f-1f6da5878609\") " pod="openshift-authentication/oauth-openshift-76766fc778-pfrpz" Jan 20 16:45:26 crc kubenswrapper[4558]: I0120 16:45:26.683428 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/d614011d-599a-4664-9c3f-1f6da5878609-v4-0-config-system-cliconfig\") pod \"oauth-openshift-76766fc778-pfrpz\" (UID: \"d614011d-599a-4664-9c3f-1f6da5878609\") " pod="openshift-authentication/oauth-openshift-76766fc778-pfrpz" Jan 20 16:45:26 crc kubenswrapper[4558]: I0120 16:45:26.683458 4558 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4654626b-36d0-4072-a04c-d8ee0678fd50-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 16:45:26 crc kubenswrapper[4558]: I0120 16:45:26.683467 4558 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/4654626b-36d0-4072-a04c-d8ee0678fd50-audit-policies\") on node \"crc\" DevicePath \"\"" Jan 20 16:45:26 crc kubenswrapper[4558]: I0120 16:45:26.683477 4558 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/4654626b-36d0-4072-a04c-d8ee0678fd50-audit-dir\") on node \"crc\" DevicePath \"\"" Jan 20 16:45:26 crc kubenswrapper[4558]: I0120 16:45:26.683502 4558 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/4654626b-36d0-4072-a04c-d8ee0678fd50-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Jan 20 16:45:26 crc kubenswrapper[4558]: I0120 16:45:26.683557 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4654626b-36d0-4072-a04c-d8ee0678fd50-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "4654626b-36d0-4072-a04c-d8ee0678fd50" (UID: "4654626b-36d0-4072-a04c-d8ee0678fd50"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 16:45:26 crc kubenswrapper[4558]: I0120 16:45:26.686884 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4654626b-36d0-4072-a04c-d8ee0678fd50-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "4654626b-36d0-4072-a04c-d8ee0678fd50" (UID: "4654626b-36d0-4072-a04c-d8ee0678fd50"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:45:26 crc kubenswrapper[4558]: I0120 16:45:26.686999 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4654626b-36d0-4072-a04c-d8ee0678fd50-kube-api-access-8c24d" (OuterVolumeSpecName: "kube-api-access-8c24d") pod "4654626b-36d0-4072-a04c-d8ee0678fd50" (UID: "4654626b-36d0-4072-a04c-d8ee0678fd50"). InnerVolumeSpecName "kube-api-access-8c24d". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:45:26 crc kubenswrapper[4558]: I0120 16:45:26.687023 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4654626b-36d0-4072-a04c-d8ee0678fd50-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "4654626b-36d0-4072-a04c-d8ee0678fd50" (UID: "4654626b-36d0-4072-a04c-d8ee0678fd50"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:45:26 crc kubenswrapper[4558]: I0120 16:45:26.687276 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4654626b-36d0-4072-a04c-d8ee0678fd50-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "4654626b-36d0-4072-a04c-d8ee0678fd50" (UID: "4654626b-36d0-4072-a04c-d8ee0678fd50"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:45:26 crc kubenswrapper[4558]: I0120 16:45:26.687378 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4654626b-36d0-4072-a04c-d8ee0678fd50-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "4654626b-36d0-4072-a04c-d8ee0678fd50" (UID: "4654626b-36d0-4072-a04c-d8ee0678fd50"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:45:26 crc kubenswrapper[4558]: I0120 16:45:26.687616 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4654626b-36d0-4072-a04c-d8ee0678fd50-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "4654626b-36d0-4072-a04c-d8ee0678fd50" (UID: "4654626b-36d0-4072-a04c-d8ee0678fd50"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:45:26 crc kubenswrapper[4558]: I0120 16:45:26.688090 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4654626b-36d0-4072-a04c-d8ee0678fd50-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "4654626b-36d0-4072-a04c-d8ee0678fd50" (UID: "4654626b-36d0-4072-a04c-d8ee0678fd50"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:45:26 crc kubenswrapper[4558]: I0120 16:45:26.688401 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4654626b-36d0-4072-a04c-d8ee0678fd50-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "4654626b-36d0-4072-a04c-d8ee0678fd50" (UID: "4654626b-36d0-4072-a04c-d8ee0678fd50"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:45:26 crc kubenswrapper[4558]: I0120 16:45:26.688428 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4654626b-36d0-4072-a04c-d8ee0678fd50-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "4654626b-36d0-4072-a04c-d8ee0678fd50" (UID: "4654626b-36d0-4072-a04c-d8ee0678fd50"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:45:26 crc kubenswrapper[4558]: I0120 16:45:26.784435 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/d614011d-599a-4664-9c3f-1f6da5878609-v4-0-config-user-template-login\") pod \"oauth-openshift-76766fc778-pfrpz\" (UID: \"d614011d-599a-4664-9c3f-1f6da5878609\") " pod="openshift-authentication/oauth-openshift-76766fc778-pfrpz" Jan 20 16:45:26 crc kubenswrapper[4558]: I0120 16:45:26.784474 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/d614011d-599a-4664-9c3f-1f6da5878609-v4-0-config-system-serving-cert\") pod \"oauth-openshift-76766fc778-pfrpz\" (UID: \"d614011d-599a-4664-9c3f-1f6da5878609\") " pod="openshift-authentication/oauth-openshift-76766fc778-pfrpz" Jan 20 16:45:26 crc kubenswrapper[4558]: I0120 16:45:26.784501 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/d614011d-599a-4664-9c3f-1f6da5878609-v4-0-config-system-service-ca\") pod \"oauth-openshift-76766fc778-pfrpz\" (UID: \"d614011d-599a-4664-9c3f-1f6da5878609\") " pod="openshift-authentication/oauth-openshift-76766fc778-pfrpz" Jan 20 16:45:26 crc kubenswrapper[4558]: I0120 16:45:26.784526 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f2hw4\" (UniqueName: \"kubernetes.io/projected/d614011d-599a-4664-9c3f-1f6da5878609-kube-api-access-f2hw4\") pod \"oauth-openshift-76766fc778-pfrpz\" (UID: \"d614011d-599a-4664-9c3f-1f6da5878609\") " pod="openshift-authentication/oauth-openshift-76766fc778-pfrpz" Jan 20 16:45:26 crc kubenswrapper[4558]: I0120 16:45:26.784550 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/d614011d-599a-4664-9c3f-1f6da5878609-v4-0-config-system-session\") pod \"oauth-openshift-76766fc778-pfrpz\" (UID: \"d614011d-599a-4664-9c3f-1f6da5878609\") " pod="openshift-authentication/oauth-openshift-76766fc778-pfrpz" Jan 20 16:45:26 crc kubenswrapper[4558]: I0120 16:45:26.784567 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/d614011d-599a-4664-9c3f-1f6da5878609-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-76766fc778-pfrpz\" (UID: \"d614011d-599a-4664-9c3f-1f6da5878609\") " pod="openshift-authentication/oauth-openshift-76766fc778-pfrpz" Jan 20 16:45:26 crc kubenswrapper[4558]: I0120 16:45:26.784588 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/d614011d-599a-4664-9c3f-1f6da5878609-v4-0-config-system-router-certs\") pod \"oauth-openshift-76766fc778-pfrpz\" (UID: \"d614011d-599a-4664-9c3f-1f6da5878609\") " pod="openshift-authentication/oauth-openshift-76766fc778-pfrpz" Jan 20 16:45:26 crc kubenswrapper[4558]: I0120 16:45:26.784607 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/d614011d-599a-4664-9c3f-1f6da5878609-audit-policies\") pod \"oauth-openshift-76766fc778-pfrpz\" (UID: \"d614011d-599a-4664-9c3f-1f6da5878609\") " pod="openshift-authentication/oauth-openshift-76766fc778-pfrpz" Jan 20 16:45:26 crc kubenswrapper[4558]: I0120 16:45:26.784622 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/d614011d-599a-4664-9c3f-1f6da5878609-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-76766fc778-pfrpz\" (UID: \"d614011d-599a-4664-9c3f-1f6da5878609\") " pod="openshift-authentication/oauth-openshift-76766fc778-pfrpz" Jan 20 16:45:26 crc kubenswrapper[4558]: I0120 16:45:26.784637 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/d614011d-599a-4664-9c3f-1f6da5878609-audit-dir\") pod \"oauth-openshift-76766fc778-pfrpz\" (UID: \"d614011d-599a-4664-9c3f-1f6da5878609\") " pod="openshift-authentication/oauth-openshift-76766fc778-pfrpz" Jan 20 16:45:26 crc kubenswrapper[4558]: I0120 16:45:26.784655 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/d614011d-599a-4664-9c3f-1f6da5878609-v4-0-config-user-template-error\") pod \"oauth-openshift-76766fc778-pfrpz\" (UID: \"d614011d-599a-4664-9c3f-1f6da5878609\") " pod="openshift-authentication/oauth-openshift-76766fc778-pfrpz" Jan 20 16:45:26 crc kubenswrapper[4558]: I0120 16:45:26.784676 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/d614011d-599a-4664-9c3f-1f6da5878609-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-76766fc778-pfrpz\" (UID: \"d614011d-599a-4664-9c3f-1f6da5878609\") " pod="openshift-authentication/oauth-openshift-76766fc778-pfrpz" Jan 20 16:45:26 crc kubenswrapper[4558]: I0120 16:45:26.784692 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d614011d-599a-4664-9c3f-1f6da5878609-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-76766fc778-pfrpz\" (UID: \"d614011d-599a-4664-9c3f-1f6da5878609\") " pod="openshift-authentication/oauth-openshift-76766fc778-pfrpz" Jan 20 16:45:26 crc kubenswrapper[4558]: I0120 16:45:26.784710 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/d614011d-599a-4664-9c3f-1f6da5878609-v4-0-config-system-cliconfig\") pod \"oauth-openshift-76766fc778-pfrpz\" (UID: \"d614011d-599a-4664-9c3f-1f6da5878609\") " pod="openshift-authentication/oauth-openshift-76766fc778-pfrpz" Jan 20 16:45:26 crc kubenswrapper[4558]: I0120 16:45:26.784742 4558 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/4654626b-36d0-4072-a04c-d8ee0678fd50-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Jan 20 16:45:26 crc kubenswrapper[4558]: I0120 16:45:26.784753 4558 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/4654626b-36d0-4072-a04c-d8ee0678fd50-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Jan 20 16:45:26 crc kubenswrapper[4558]: I0120 16:45:26.784763 4558 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/4654626b-36d0-4072-a04c-d8ee0678fd50-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Jan 20 16:45:26 crc kubenswrapper[4558]: I0120 16:45:26.784772 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8c24d\" (UniqueName: \"kubernetes.io/projected/4654626b-36d0-4072-a04c-d8ee0678fd50-kube-api-access-8c24d\") on node \"crc\" DevicePath \"\"" Jan 20 16:45:26 crc kubenswrapper[4558]: I0120 16:45:26.784781 4558 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/4654626b-36d0-4072-a04c-d8ee0678fd50-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 20 16:45:26 crc kubenswrapper[4558]: I0120 16:45:26.784789 4558 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/4654626b-36d0-4072-a04c-d8ee0678fd50-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Jan 20 16:45:26 crc kubenswrapper[4558]: I0120 16:45:26.784798 4558 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/4654626b-36d0-4072-a04c-d8ee0678fd50-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Jan 20 16:45:26 crc kubenswrapper[4558]: I0120 16:45:26.784807 4558 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/4654626b-36d0-4072-a04c-d8ee0678fd50-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Jan 20 16:45:26 crc kubenswrapper[4558]: I0120 16:45:26.784815 4558 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/4654626b-36d0-4072-a04c-d8ee0678fd50-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Jan 20 16:45:26 crc kubenswrapper[4558]: I0120 16:45:26.784824 4558 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/4654626b-36d0-4072-a04c-d8ee0678fd50-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Jan 20 16:45:26 crc kubenswrapper[4558]: I0120 16:45:26.784952 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/d614011d-599a-4664-9c3f-1f6da5878609-audit-dir\") pod \"oauth-openshift-76766fc778-pfrpz\" (UID: \"d614011d-599a-4664-9c3f-1f6da5878609\") " pod="openshift-authentication/oauth-openshift-76766fc778-pfrpz" Jan 20 16:45:26 crc kubenswrapper[4558]: I0120 16:45:26.785455 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/d614011d-599a-4664-9c3f-1f6da5878609-audit-policies\") pod \"oauth-openshift-76766fc778-pfrpz\" (UID: \"d614011d-599a-4664-9c3f-1f6da5878609\") " pod="openshift-authentication/oauth-openshift-76766fc778-pfrpz" Jan 20 16:45:26 crc kubenswrapper[4558]: I0120 16:45:26.785729 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/d614011d-599a-4664-9c3f-1f6da5878609-v4-0-config-system-cliconfig\") pod \"oauth-openshift-76766fc778-pfrpz\" (UID: \"d614011d-599a-4664-9c3f-1f6da5878609\") " pod="openshift-authentication/oauth-openshift-76766fc778-pfrpz" Jan 20 16:45:26 crc kubenswrapper[4558]: I0120 16:45:26.785869 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/d614011d-599a-4664-9c3f-1f6da5878609-v4-0-config-system-service-ca\") pod \"oauth-openshift-76766fc778-pfrpz\" (UID: \"d614011d-599a-4664-9c3f-1f6da5878609\") " pod="openshift-authentication/oauth-openshift-76766fc778-pfrpz" Jan 20 16:45:26 crc kubenswrapper[4558]: I0120 16:45:26.786420 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d614011d-599a-4664-9c3f-1f6da5878609-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-76766fc778-pfrpz\" (UID: \"d614011d-599a-4664-9c3f-1f6da5878609\") " pod="openshift-authentication/oauth-openshift-76766fc778-pfrpz" Jan 20 16:45:26 crc kubenswrapper[4558]: I0120 16:45:26.787382 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/d614011d-599a-4664-9c3f-1f6da5878609-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-76766fc778-pfrpz\" (UID: \"d614011d-599a-4664-9c3f-1f6da5878609\") " pod="openshift-authentication/oauth-openshift-76766fc778-pfrpz" Jan 20 16:45:26 crc kubenswrapper[4558]: I0120 16:45:26.787397 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/d614011d-599a-4664-9c3f-1f6da5878609-v4-0-config-user-template-login\") pod \"oauth-openshift-76766fc778-pfrpz\" (UID: \"d614011d-599a-4664-9c3f-1f6da5878609\") " pod="openshift-authentication/oauth-openshift-76766fc778-pfrpz" Jan 20 16:45:26 crc kubenswrapper[4558]: I0120 16:45:26.787561 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/d614011d-599a-4664-9c3f-1f6da5878609-v4-0-config-system-serving-cert\") pod \"oauth-openshift-76766fc778-pfrpz\" (UID: \"d614011d-599a-4664-9c3f-1f6da5878609\") " pod="openshift-authentication/oauth-openshift-76766fc778-pfrpz" Jan 20 16:45:26 crc kubenswrapper[4558]: I0120 16:45:26.787966 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/d614011d-599a-4664-9c3f-1f6da5878609-v4-0-config-system-router-certs\") pod \"oauth-openshift-76766fc778-pfrpz\" (UID: \"d614011d-599a-4664-9c3f-1f6da5878609\") " pod="openshift-authentication/oauth-openshift-76766fc778-pfrpz" Jan 20 16:45:26 crc kubenswrapper[4558]: I0120 16:45:26.788063 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/d614011d-599a-4664-9c3f-1f6da5878609-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-76766fc778-pfrpz\" (UID: \"d614011d-599a-4664-9c3f-1f6da5878609\") " pod="openshift-authentication/oauth-openshift-76766fc778-pfrpz" Jan 20 16:45:26 crc kubenswrapper[4558]: I0120 16:45:26.788249 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/d614011d-599a-4664-9c3f-1f6da5878609-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-76766fc778-pfrpz\" (UID: \"d614011d-599a-4664-9c3f-1f6da5878609\") " pod="openshift-authentication/oauth-openshift-76766fc778-pfrpz" Jan 20 16:45:26 crc kubenswrapper[4558]: I0120 16:45:26.788436 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/d614011d-599a-4664-9c3f-1f6da5878609-v4-0-config-user-template-error\") pod \"oauth-openshift-76766fc778-pfrpz\" (UID: \"d614011d-599a-4664-9c3f-1f6da5878609\") " pod="openshift-authentication/oauth-openshift-76766fc778-pfrpz" Jan 20 16:45:26 crc kubenswrapper[4558]: I0120 16:45:26.789093 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/d614011d-599a-4664-9c3f-1f6da5878609-v4-0-config-system-session\") pod \"oauth-openshift-76766fc778-pfrpz\" (UID: \"d614011d-599a-4664-9c3f-1f6da5878609\") " pod="openshift-authentication/oauth-openshift-76766fc778-pfrpz" Jan 20 16:45:26 crc kubenswrapper[4558]: I0120 16:45:26.797596 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f2hw4\" (UniqueName: \"kubernetes.io/projected/d614011d-599a-4664-9c3f-1f6da5878609-kube-api-access-f2hw4\") pod \"oauth-openshift-76766fc778-pfrpz\" (UID: \"d614011d-599a-4664-9c3f-1f6da5878609\") " pod="openshift-authentication/oauth-openshift-76766fc778-pfrpz" Jan 20 16:45:26 crc kubenswrapper[4558]: I0120 16:45:26.949537 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-76766fc778-pfrpz" Jan 20 16:45:27 crc kubenswrapper[4558]: I0120 16:45:27.138499 4558 generic.go:334] "Generic (PLEG): container finished" podID="4654626b-36d0-4072-a04c-d8ee0678fd50" containerID="93a8a520bcd4319cde8907bbd948bb0c6da3eb3d67bae6be2a6e7fb65cac45fb" exitCode=0 Jan 20 16:45:27 crc kubenswrapper[4558]: I0120 16:45:27.138540 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-nwjzd" event={"ID":"4654626b-36d0-4072-a04c-d8ee0678fd50","Type":"ContainerDied","Data":"93a8a520bcd4319cde8907bbd948bb0c6da3eb3d67bae6be2a6e7fb65cac45fb"} Jan 20 16:45:27 crc kubenswrapper[4558]: I0120 16:45:27.138703 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-nwjzd" event={"ID":"4654626b-36d0-4072-a04c-d8ee0678fd50","Type":"ContainerDied","Data":"7afde49da87647c5e61c4acc968cf8fcd828569cb8c1a0fe790df704633e922f"} Jan 20 16:45:27 crc kubenswrapper[4558]: I0120 16:45:27.138725 4558 scope.go:117] "RemoveContainer" containerID="93a8a520bcd4319cde8907bbd948bb0c6da3eb3d67bae6be2a6e7fb65cac45fb" Jan 20 16:45:27 crc kubenswrapper[4558]: I0120 16:45:27.138552 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-nwjzd" Jan 20 16:45:27 crc kubenswrapper[4558]: I0120 16:45:27.157707 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-nwjzd"] Jan 20 16:45:27 crc kubenswrapper[4558]: I0120 16:45:27.160029 4558 scope.go:117] "RemoveContainer" containerID="93a8a520bcd4319cde8907bbd948bb0c6da3eb3d67bae6be2a6e7fb65cac45fb" Jan 20 16:45:27 crc kubenswrapper[4558]: I0120 16:45:27.160246 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-nwjzd"] Jan 20 16:45:27 crc kubenswrapper[4558]: E0120 16:45:27.160397 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"93a8a520bcd4319cde8907bbd948bb0c6da3eb3d67bae6be2a6e7fb65cac45fb\": container with ID starting with 93a8a520bcd4319cde8907bbd948bb0c6da3eb3d67bae6be2a6e7fb65cac45fb not found: ID does not exist" containerID="93a8a520bcd4319cde8907bbd948bb0c6da3eb3d67bae6be2a6e7fb65cac45fb" Jan 20 16:45:27 crc kubenswrapper[4558]: I0120 16:45:27.160427 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"93a8a520bcd4319cde8907bbd948bb0c6da3eb3d67bae6be2a6e7fb65cac45fb"} err="failed to get container status \"93a8a520bcd4319cde8907bbd948bb0c6da3eb3d67bae6be2a6e7fb65cac45fb\": rpc error: code = NotFound desc = could not find container \"93a8a520bcd4319cde8907bbd948bb0c6da3eb3d67bae6be2a6e7fb65cac45fb\": container with ID starting with 93a8a520bcd4319cde8907bbd948bb0c6da3eb3d67bae6be2a6e7fb65cac45fb not found: ID does not exist" Jan 20 16:45:27 crc kubenswrapper[4558]: I0120 16:45:27.280069 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-76766fc778-pfrpz"] Jan 20 16:45:27 crc kubenswrapper[4558]: I0120 16:45:27.330472 4558 patch_prober.go:28] interesting pod/machine-config-daemon-2vr4r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 20 16:45:27 crc kubenswrapper[4558]: I0120 16:45:27.330524 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 20 16:45:27 crc kubenswrapper[4558]: I0120 16:45:27.330564 4558 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" Jan 20 16:45:27 crc kubenswrapper[4558]: I0120 16:45:27.331008 4558 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"b18a59ecb802507e0d5988eacc915fd7e0d6954518de80f61162c97f680fd8c1"} pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 20 16:45:27 crc kubenswrapper[4558]: I0120 16:45:27.331064 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" containerID="cri-o://b18a59ecb802507e0d5988eacc915fd7e0d6954518de80f61162c97f680fd8c1" gracePeriod=600 Jan 20 16:45:28 crc kubenswrapper[4558]: I0120 16:45:28.145651 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-76766fc778-pfrpz" event={"ID":"d614011d-599a-4664-9c3f-1f6da5878609","Type":"ContainerStarted","Data":"5ed14556df0c6f20ba9e987fa48634660dc359264c3a155f77765e07402efa62"} Jan 20 16:45:28 crc kubenswrapper[4558]: I0120 16:45:28.146027 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-76766fc778-pfrpz" event={"ID":"d614011d-599a-4664-9c3f-1f6da5878609","Type":"ContainerStarted","Data":"eefa39e336a85d1cc195ba61442ce3d4d5acbd6da47a136a1acdf0bb4ce150ab"} Jan 20 16:45:28 crc kubenswrapper[4558]: I0120 16:45:28.146052 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-76766fc778-pfrpz" Jan 20 16:45:28 crc kubenswrapper[4558]: I0120 16:45:28.149961 4558 generic.go:334] "Generic (PLEG): container finished" podID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerID="b18a59ecb802507e0d5988eacc915fd7e0d6954518de80f61162c97f680fd8c1" exitCode=0 Jan 20 16:45:28 crc kubenswrapper[4558]: I0120 16:45:28.150032 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" event={"ID":"68337d27-3fa6-4a29-88b0-82e60c3739eb","Type":"ContainerDied","Data":"b18a59ecb802507e0d5988eacc915fd7e0d6954518de80f61162c97f680fd8c1"} Jan 20 16:45:28 crc kubenswrapper[4558]: I0120 16:45:28.150082 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" event={"ID":"68337d27-3fa6-4a29-88b0-82e60c3739eb","Type":"ContainerStarted","Data":"919fd3462b46bed62b26a1ec80dddd868d9b963136777394bcd10ef842cf78a9"} Jan 20 16:45:28 crc kubenswrapper[4558]: I0120 16:45:28.151293 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-76766fc778-pfrpz" Jan 20 16:45:28 crc kubenswrapper[4558]: I0120 16:45:28.161571 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-76766fc778-pfrpz" podStartSLOduration=27.161552104 podStartE2EDuration="27.161552104s" podCreationTimestamp="2026-01-20 16:45:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 16:45:28.161067932 +0000 UTC m=+221.921405899" watchObservedRunningTime="2026-01-20 16:45:28.161552104 +0000 UTC m=+221.921890072" Jan 20 16:45:28 crc kubenswrapper[4558]: I0120 16:45:28.571632 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4654626b-36d0-4072-a04c-d8ee0678fd50" path="/var/lib/kubelet/pods/4654626b-36d0-4072-a04c-d8ee0678fd50/volumes" Jan 20 16:45:37 crc kubenswrapper[4558]: I0120 16:45:37.913058 4558 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Jan 20 16:45:37 crc kubenswrapper[4558]: I0120 16:45:37.913930 4558 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Jan 20 16:45:37 crc kubenswrapper[4558]: I0120 16:45:37.914145 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" containerID="cri-o://4ea68e442bb2143fbaa5e2c9fe335c6b023a31c2eeaa78d60de45c6ef40c2894" gracePeriod=15 Jan 20 16:45:37 crc kubenswrapper[4558]: I0120 16:45:37.914200 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 20 16:45:37 crc kubenswrapper[4558]: I0120 16:45:37.914278 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" containerID="cri-o://7444e8fea1853bae45dbd3d52d07e3f7b94314cd29fe8c99891e515a892e569e" gracePeriod=15 Jan 20 16:45:37 crc kubenswrapper[4558]: I0120 16:45:37.914293 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" containerID="cri-o://3a8122c3b57ac4f0026df149fe622450d07d8b0d517ad018decd7b21d8d9c04a" gracePeriod=15 Jan 20 16:45:37 crc kubenswrapper[4558]: I0120 16:45:37.914380 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" containerID="cri-o://b6438ffea42f98178c4c2c21ae01725a8b54d6a6f790f47d64faf5ecd9f1c00f" gracePeriod=15 Jan 20 16:45:37 crc kubenswrapper[4558]: I0120 16:45:37.914396 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" containerID="cri-o://e4850bde1fb538e21fc91949dc584ef0a791d718844691a44559e0513bb36203" gracePeriod=15 Jan 20 16:45:37 crc kubenswrapper[4558]: I0120 16:45:37.916150 4558 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Jan 20 16:45:37 crc kubenswrapper[4558]: E0120 16:45:37.916361 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Jan 20 16:45:37 crc kubenswrapper[4558]: I0120 16:45:37.916372 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Jan 20 16:45:37 crc kubenswrapper[4558]: E0120 16:45:37.916382 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Jan 20 16:45:37 crc kubenswrapper[4558]: I0120 16:45:37.916388 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Jan 20 16:45:37 crc kubenswrapper[4558]: E0120 16:45:37.916395 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 20 16:45:37 crc kubenswrapper[4558]: I0120 16:45:37.916402 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 20 16:45:37 crc kubenswrapper[4558]: E0120 16:45:37.916410 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 20 16:45:37 crc kubenswrapper[4558]: I0120 16:45:37.916416 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 20 16:45:37 crc kubenswrapper[4558]: E0120 16:45:37.916431 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Jan 20 16:45:37 crc kubenswrapper[4558]: I0120 16:45:37.916436 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Jan 20 16:45:37 crc kubenswrapper[4558]: E0120 16:45:37.916443 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Jan 20 16:45:37 crc kubenswrapper[4558]: I0120 16:45:37.916448 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Jan 20 16:45:37 crc kubenswrapper[4558]: E0120 16:45:37.916455 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Jan 20 16:45:37 crc kubenswrapper[4558]: I0120 16:45:37.916461 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Jan 20 16:45:37 crc kubenswrapper[4558]: I0120 16:45:37.916539 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Jan 20 16:45:37 crc kubenswrapper[4558]: I0120 16:45:37.916547 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 20 16:45:37 crc kubenswrapper[4558]: I0120 16:45:37.916558 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Jan 20 16:45:37 crc kubenswrapper[4558]: I0120 16:45:37.916565 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Jan 20 16:45:37 crc kubenswrapper[4558]: I0120 16:45:37.916571 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Jan 20 16:45:37 crc kubenswrapper[4558]: I0120 16:45:37.916725 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 20 16:45:38 crc kubenswrapper[4558]: I0120 16:45:38.007436 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 20 16:45:38 crc kubenswrapper[4558]: I0120 16:45:38.007682 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 20 16:45:38 crc kubenswrapper[4558]: I0120 16:45:38.007703 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 20 16:45:38 crc kubenswrapper[4558]: I0120 16:45:38.007722 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 20 16:45:38 crc kubenswrapper[4558]: I0120 16:45:38.007739 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 20 16:45:38 crc kubenswrapper[4558]: I0120 16:45:38.007758 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 20 16:45:38 crc kubenswrapper[4558]: I0120 16:45:38.007847 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 20 16:45:38 crc kubenswrapper[4558]: I0120 16:45:38.007874 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 20 16:45:38 crc kubenswrapper[4558]: I0120 16:45:38.108506 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 20 16:45:38 crc kubenswrapper[4558]: I0120 16:45:38.108553 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 20 16:45:38 crc kubenswrapper[4558]: I0120 16:45:38.108569 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 20 16:45:38 crc kubenswrapper[4558]: I0120 16:45:38.108587 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 20 16:45:38 crc kubenswrapper[4558]: I0120 16:45:38.108605 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 20 16:45:38 crc kubenswrapper[4558]: I0120 16:45:38.108625 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 20 16:45:38 crc kubenswrapper[4558]: I0120 16:45:38.108664 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 20 16:45:38 crc kubenswrapper[4558]: I0120 16:45:38.108667 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 20 16:45:38 crc kubenswrapper[4558]: I0120 16:45:38.108680 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 20 16:45:38 crc kubenswrapper[4558]: I0120 16:45:38.108713 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 20 16:45:38 crc kubenswrapper[4558]: I0120 16:45:38.108739 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 20 16:45:38 crc kubenswrapper[4558]: I0120 16:45:38.108760 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 20 16:45:38 crc kubenswrapper[4558]: I0120 16:45:38.108780 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 20 16:45:38 crc kubenswrapper[4558]: I0120 16:45:38.108846 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 20 16:45:38 crc kubenswrapper[4558]: I0120 16:45:38.108866 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 20 16:45:38 crc kubenswrapper[4558]: I0120 16:45:38.108887 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 20 16:45:38 crc kubenswrapper[4558]: I0120 16:45:38.195443 4558 generic.go:334] "Generic (PLEG): container finished" podID="1bd82daf-3edd-4b66-8dc3-0efa12742508" containerID="2b677e98325be11a28cd9caf4390b3985d824b63bd23269569cb14a8efecf5b4" exitCode=0 Jan 20 16:45:38 crc kubenswrapper[4558]: I0120 16:45:38.195502 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"1bd82daf-3edd-4b66-8dc3-0efa12742508","Type":"ContainerDied","Data":"2b677e98325be11a28cd9caf4390b3985d824b63bd23269569cb14a8efecf5b4"} Jan 20 16:45:38 crc kubenswrapper[4558]: I0120 16:45:38.195955 4558 status_manager.go:851] "Failed to get status for pod" podUID="1bd82daf-3edd-4b66-8dc3-0efa12742508" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.25.8:6443: connect: connection refused" Jan 20 16:45:38 crc kubenswrapper[4558]: I0120 16:45:38.196139 4558 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 192.168.25.8:6443: connect: connection refused" Jan 20 16:45:38 crc kubenswrapper[4558]: I0120 16:45:38.196946 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Jan 20 16:45:38 crc kubenswrapper[4558]: I0120 16:45:38.197876 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Jan 20 16:45:38 crc kubenswrapper[4558]: I0120 16:45:38.198392 4558 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="b6438ffea42f98178c4c2c21ae01725a8b54d6a6f790f47d64faf5ecd9f1c00f" exitCode=0 Jan 20 16:45:38 crc kubenswrapper[4558]: I0120 16:45:38.198412 4558 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="e4850bde1fb538e21fc91949dc584ef0a791d718844691a44559e0513bb36203" exitCode=0 Jan 20 16:45:38 crc kubenswrapper[4558]: I0120 16:45:38.198419 4558 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="3a8122c3b57ac4f0026df149fe622450d07d8b0d517ad018decd7b21d8d9c04a" exitCode=0 Jan 20 16:45:38 crc kubenswrapper[4558]: I0120 16:45:38.198426 4558 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="7444e8fea1853bae45dbd3d52d07e3f7b94314cd29fe8c99891e515a892e569e" exitCode=2 Jan 20 16:45:38 crc kubenswrapper[4558]: I0120 16:45:38.198470 4558 scope.go:117] "RemoveContainer" containerID="f2e22f1d4e87dd06a67f49aedad280da48ac1c4ba2e438a4925cf8bd5330c4d8" Jan 20 16:45:39 crc kubenswrapper[4558]: I0120 16:45:39.204115 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Jan 20 16:45:39 crc kubenswrapper[4558]: I0120 16:45:39.356410 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Jan 20 16:45:39 crc kubenswrapper[4558]: I0120 16:45:39.356952 4558 status_manager.go:851] "Failed to get status for pod" podUID="1bd82daf-3edd-4b66-8dc3-0efa12742508" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.25.8:6443: connect: connection refused" Jan 20 16:45:39 crc kubenswrapper[4558]: I0120 16:45:39.422672 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1bd82daf-3edd-4b66-8dc3-0efa12742508-kube-api-access\") pod \"1bd82daf-3edd-4b66-8dc3-0efa12742508\" (UID: \"1bd82daf-3edd-4b66-8dc3-0efa12742508\") " Jan 20 16:45:39 crc kubenswrapper[4558]: I0120 16:45:39.422706 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/1bd82daf-3edd-4b66-8dc3-0efa12742508-var-lock\") pod \"1bd82daf-3edd-4b66-8dc3-0efa12742508\" (UID: \"1bd82daf-3edd-4b66-8dc3-0efa12742508\") " Jan 20 16:45:39 crc kubenswrapper[4558]: I0120 16:45:39.422750 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/1bd82daf-3edd-4b66-8dc3-0efa12742508-kubelet-dir\") pod \"1bd82daf-3edd-4b66-8dc3-0efa12742508\" (UID: \"1bd82daf-3edd-4b66-8dc3-0efa12742508\") " Jan 20 16:45:39 crc kubenswrapper[4558]: I0120 16:45:39.422829 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1bd82daf-3edd-4b66-8dc3-0efa12742508-var-lock" (OuterVolumeSpecName: "var-lock") pod "1bd82daf-3edd-4b66-8dc3-0efa12742508" (UID: "1bd82daf-3edd-4b66-8dc3-0efa12742508"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 16:45:39 crc kubenswrapper[4558]: I0120 16:45:39.422856 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1bd82daf-3edd-4b66-8dc3-0efa12742508-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "1bd82daf-3edd-4b66-8dc3-0efa12742508" (UID: "1bd82daf-3edd-4b66-8dc3-0efa12742508"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 16:45:39 crc kubenswrapper[4558]: I0120 16:45:39.422917 4558 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/1bd82daf-3edd-4b66-8dc3-0efa12742508-var-lock\") on node \"crc\" DevicePath \"\"" Jan 20 16:45:39 crc kubenswrapper[4558]: I0120 16:45:39.422930 4558 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/1bd82daf-3edd-4b66-8dc3-0efa12742508-kubelet-dir\") on node \"crc\" DevicePath \"\"" Jan 20 16:45:39 crc kubenswrapper[4558]: I0120 16:45:39.426885 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bd82daf-3edd-4b66-8dc3-0efa12742508-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1bd82daf-3edd-4b66-8dc3-0efa12742508" (UID: "1bd82daf-3edd-4b66-8dc3-0efa12742508"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:45:39 crc kubenswrapper[4558]: I0120 16:45:39.524081 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1bd82daf-3edd-4b66-8dc3-0efa12742508-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 20 16:45:40 crc kubenswrapper[4558]: I0120 16:45:40.183555 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Jan 20 16:45:40 crc kubenswrapper[4558]: I0120 16:45:40.184468 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 20 16:45:40 crc kubenswrapper[4558]: I0120 16:45:40.184922 4558 status_manager.go:851] "Failed to get status for pod" podUID="1bd82daf-3edd-4b66-8dc3-0efa12742508" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.25.8:6443: connect: connection refused" Jan 20 16:45:40 crc kubenswrapper[4558]: I0120 16:45:40.185344 4558 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 192.168.25.8:6443: connect: connection refused" Jan 20 16:45:40 crc kubenswrapper[4558]: I0120 16:45:40.209680 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"1bd82daf-3edd-4b66-8dc3-0efa12742508","Type":"ContainerDied","Data":"f9716851ee942377a21119eb5e86b693633861e6d4c953f1a63ce1c4e75ff2a1"} Jan 20 16:45:40 crc kubenswrapper[4558]: I0120 16:45:40.209708 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f9716851ee942377a21119eb5e86b693633861e6d4c953f1a63ce1c4e75ff2a1" Jan 20 16:45:40 crc kubenswrapper[4558]: I0120 16:45:40.209746 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Jan 20 16:45:40 crc kubenswrapper[4558]: I0120 16:45:40.211703 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Jan 20 16:45:40 crc kubenswrapper[4558]: I0120 16:45:40.212352 4558 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="4ea68e442bb2143fbaa5e2c9fe335c6b023a31c2eeaa78d60de45c6ef40c2894" exitCode=0 Jan 20 16:45:40 crc kubenswrapper[4558]: I0120 16:45:40.212388 4558 scope.go:117] "RemoveContainer" containerID="b6438ffea42f98178c4c2c21ae01725a8b54d6a6f790f47d64faf5ecd9f1c00f" Jan 20 16:45:40 crc kubenswrapper[4558]: I0120 16:45:40.212479 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 20 16:45:40 crc kubenswrapper[4558]: I0120 16:45:40.220709 4558 status_manager.go:851] "Failed to get status for pod" podUID="1bd82daf-3edd-4b66-8dc3-0efa12742508" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.25.8:6443: connect: connection refused" Jan 20 16:45:40 crc kubenswrapper[4558]: I0120 16:45:40.220874 4558 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 192.168.25.8:6443: connect: connection refused" Jan 20 16:45:40 crc kubenswrapper[4558]: I0120 16:45:40.226108 4558 scope.go:117] "RemoveContainer" containerID="e4850bde1fb538e21fc91949dc584ef0a791d718844691a44559e0513bb36203" Jan 20 16:45:40 crc kubenswrapper[4558]: I0120 16:45:40.230394 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Jan 20 16:45:40 crc kubenswrapper[4558]: I0120 16:45:40.230447 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Jan 20 16:45:40 crc kubenswrapper[4558]: I0120 16:45:40.230478 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Jan 20 16:45:40 crc kubenswrapper[4558]: I0120 16:45:40.230599 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 16:45:40 crc kubenswrapper[4558]: I0120 16:45:40.230603 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 16:45:40 crc kubenswrapper[4558]: I0120 16:45:40.230680 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir" (OuterVolumeSpecName: "cert-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "cert-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 16:45:40 crc kubenswrapper[4558]: I0120 16:45:40.230878 4558 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") on node \"crc\" DevicePath \"\"" Jan 20 16:45:40 crc kubenswrapper[4558]: I0120 16:45:40.230949 4558 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") on node \"crc\" DevicePath \"\"" Jan 20 16:45:40 crc kubenswrapper[4558]: I0120 16:45:40.231002 4558 reconciler_common.go:293] "Volume detached for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") on node \"crc\" DevicePath \"\"" Jan 20 16:45:40 crc kubenswrapper[4558]: I0120 16:45:40.235817 4558 scope.go:117] "RemoveContainer" containerID="3a8122c3b57ac4f0026df149fe622450d07d8b0d517ad018decd7b21d8d9c04a" Jan 20 16:45:40 crc kubenswrapper[4558]: I0120 16:45:40.245838 4558 scope.go:117] "RemoveContainer" containerID="7444e8fea1853bae45dbd3d52d07e3f7b94314cd29fe8c99891e515a892e569e" Jan 20 16:45:40 crc kubenswrapper[4558]: I0120 16:45:40.255843 4558 scope.go:117] "RemoveContainer" containerID="4ea68e442bb2143fbaa5e2c9fe335c6b023a31c2eeaa78d60de45c6ef40c2894" Jan 20 16:45:40 crc kubenswrapper[4558]: I0120 16:45:40.266362 4558 scope.go:117] "RemoveContainer" containerID="c39aa6c3942134a027be33cac715f9c0381bce5417754d5bad045c42a07d353f" Jan 20 16:45:40 crc kubenswrapper[4558]: I0120 16:45:40.279817 4558 scope.go:117] "RemoveContainer" containerID="b6438ffea42f98178c4c2c21ae01725a8b54d6a6f790f47d64faf5ecd9f1c00f" Jan 20 16:45:40 crc kubenswrapper[4558]: E0120 16:45:40.280078 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b6438ffea42f98178c4c2c21ae01725a8b54d6a6f790f47d64faf5ecd9f1c00f\": container with ID starting with b6438ffea42f98178c4c2c21ae01725a8b54d6a6f790f47d64faf5ecd9f1c00f not found: ID does not exist" containerID="b6438ffea42f98178c4c2c21ae01725a8b54d6a6f790f47d64faf5ecd9f1c00f" Jan 20 16:45:40 crc kubenswrapper[4558]: I0120 16:45:40.280104 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b6438ffea42f98178c4c2c21ae01725a8b54d6a6f790f47d64faf5ecd9f1c00f"} err="failed to get container status \"b6438ffea42f98178c4c2c21ae01725a8b54d6a6f790f47d64faf5ecd9f1c00f\": rpc error: code = NotFound desc = could not find container \"b6438ffea42f98178c4c2c21ae01725a8b54d6a6f790f47d64faf5ecd9f1c00f\": container with ID starting with b6438ffea42f98178c4c2c21ae01725a8b54d6a6f790f47d64faf5ecd9f1c00f not found: ID does not exist" Jan 20 16:45:40 crc kubenswrapper[4558]: I0120 16:45:40.280123 4558 scope.go:117] "RemoveContainer" containerID="e4850bde1fb538e21fc91949dc584ef0a791d718844691a44559e0513bb36203" Jan 20 16:45:40 crc kubenswrapper[4558]: E0120 16:45:40.280354 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e4850bde1fb538e21fc91949dc584ef0a791d718844691a44559e0513bb36203\": container with ID starting with e4850bde1fb538e21fc91949dc584ef0a791d718844691a44559e0513bb36203 not found: ID does not exist" containerID="e4850bde1fb538e21fc91949dc584ef0a791d718844691a44559e0513bb36203" Jan 20 16:45:40 crc kubenswrapper[4558]: I0120 16:45:40.280377 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e4850bde1fb538e21fc91949dc584ef0a791d718844691a44559e0513bb36203"} err="failed to get container status \"e4850bde1fb538e21fc91949dc584ef0a791d718844691a44559e0513bb36203\": rpc error: code = NotFound desc = could not find container \"e4850bde1fb538e21fc91949dc584ef0a791d718844691a44559e0513bb36203\": container with ID starting with e4850bde1fb538e21fc91949dc584ef0a791d718844691a44559e0513bb36203 not found: ID does not exist" Jan 20 16:45:40 crc kubenswrapper[4558]: I0120 16:45:40.280393 4558 scope.go:117] "RemoveContainer" containerID="3a8122c3b57ac4f0026df149fe622450d07d8b0d517ad018decd7b21d8d9c04a" Jan 20 16:45:40 crc kubenswrapper[4558]: E0120 16:45:40.280800 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3a8122c3b57ac4f0026df149fe622450d07d8b0d517ad018decd7b21d8d9c04a\": container with ID starting with 3a8122c3b57ac4f0026df149fe622450d07d8b0d517ad018decd7b21d8d9c04a not found: ID does not exist" containerID="3a8122c3b57ac4f0026df149fe622450d07d8b0d517ad018decd7b21d8d9c04a" Jan 20 16:45:40 crc kubenswrapper[4558]: I0120 16:45:40.280833 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3a8122c3b57ac4f0026df149fe622450d07d8b0d517ad018decd7b21d8d9c04a"} err="failed to get container status \"3a8122c3b57ac4f0026df149fe622450d07d8b0d517ad018decd7b21d8d9c04a\": rpc error: code = NotFound desc = could not find container \"3a8122c3b57ac4f0026df149fe622450d07d8b0d517ad018decd7b21d8d9c04a\": container with ID starting with 3a8122c3b57ac4f0026df149fe622450d07d8b0d517ad018decd7b21d8d9c04a not found: ID does not exist" Jan 20 16:45:40 crc kubenswrapper[4558]: I0120 16:45:40.280858 4558 scope.go:117] "RemoveContainer" containerID="7444e8fea1853bae45dbd3d52d07e3f7b94314cd29fe8c99891e515a892e569e" Jan 20 16:45:40 crc kubenswrapper[4558]: E0120 16:45:40.281132 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7444e8fea1853bae45dbd3d52d07e3f7b94314cd29fe8c99891e515a892e569e\": container with ID starting with 7444e8fea1853bae45dbd3d52d07e3f7b94314cd29fe8c99891e515a892e569e not found: ID does not exist" containerID="7444e8fea1853bae45dbd3d52d07e3f7b94314cd29fe8c99891e515a892e569e" Jan 20 16:45:40 crc kubenswrapper[4558]: I0120 16:45:40.281157 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7444e8fea1853bae45dbd3d52d07e3f7b94314cd29fe8c99891e515a892e569e"} err="failed to get container status \"7444e8fea1853bae45dbd3d52d07e3f7b94314cd29fe8c99891e515a892e569e\": rpc error: code = NotFound desc = could not find container \"7444e8fea1853bae45dbd3d52d07e3f7b94314cd29fe8c99891e515a892e569e\": container with ID starting with 7444e8fea1853bae45dbd3d52d07e3f7b94314cd29fe8c99891e515a892e569e not found: ID does not exist" Jan 20 16:45:40 crc kubenswrapper[4558]: I0120 16:45:40.281187 4558 scope.go:117] "RemoveContainer" containerID="4ea68e442bb2143fbaa5e2c9fe335c6b023a31c2eeaa78d60de45c6ef40c2894" Jan 20 16:45:40 crc kubenswrapper[4558]: E0120 16:45:40.283330 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4ea68e442bb2143fbaa5e2c9fe335c6b023a31c2eeaa78d60de45c6ef40c2894\": container with ID starting with 4ea68e442bb2143fbaa5e2c9fe335c6b023a31c2eeaa78d60de45c6ef40c2894 not found: ID does not exist" containerID="4ea68e442bb2143fbaa5e2c9fe335c6b023a31c2eeaa78d60de45c6ef40c2894" Jan 20 16:45:40 crc kubenswrapper[4558]: I0120 16:45:40.283359 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4ea68e442bb2143fbaa5e2c9fe335c6b023a31c2eeaa78d60de45c6ef40c2894"} err="failed to get container status \"4ea68e442bb2143fbaa5e2c9fe335c6b023a31c2eeaa78d60de45c6ef40c2894\": rpc error: code = NotFound desc = could not find container \"4ea68e442bb2143fbaa5e2c9fe335c6b023a31c2eeaa78d60de45c6ef40c2894\": container with ID starting with 4ea68e442bb2143fbaa5e2c9fe335c6b023a31c2eeaa78d60de45c6ef40c2894 not found: ID does not exist" Jan 20 16:45:40 crc kubenswrapper[4558]: I0120 16:45:40.283378 4558 scope.go:117] "RemoveContainer" containerID="c39aa6c3942134a027be33cac715f9c0381bce5417754d5bad045c42a07d353f" Jan 20 16:45:40 crc kubenswrapper[4558]: E0120 16:45:40.284115 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c39aa6c3942134a027be33cac715f9c0381bce5417754d5bad045c42a07d353f\": container with ID starting with c39aa6c3942134a027be33cac715f9c0381bce5417754d5bad045c42a07d353f not found: ID does not exist" containerID="c39aa6c3942134a027be33cac715f9c0381bce5417754d5bad045c42a07d353f" Jan 20 16:45:40 crc kubenswrapper[4558]: I0120 16:45:40.284251 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c39aa6c3942134a027be33cac715f9c0381bce5417754d5bad045c42a07d353f"} err="failed to get container status \"c39aa6c3942134a027be33cac715f9c0381bce5417754d5bad045c42a07d353f\": rpc error: code = NotFound desc = could not find container \"c39aa6c3942134a027be33cac715f9c0381bce5417754d5bad045c42a07d353f\": container with ID starting with c39aa6c3942134a027be33cac715f9c0381bce5417754d5bad045c42a07d353f not found: ID does not exist" Jan 20 16:45:40 crc kubenswrapper[4558]: I0120 16:45:40.524327 4558 status_manager.go:851] "Failed to get status for pod" podUID="1bd82daf-3edd-4b66-8dc3-0efa12742508" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.25.8:6443: connect: connection refused" Jan 20 16:45:40 crc kubenswrapper[4558]: I0120 16:45:40.524675 4558 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 192.168.25.8:6443: connect: connection refused" Jan 20 16:45:40 crc kubenswrapper[4558]: I0120 16:45:40.570243 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4b27818a5e8e43d0dc095d08835c792" path="/var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/volumes" Jan 20 16:45:41 crc kubenswrapper[4558]: E0120 16:45:41.814080 4558 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.25.8:6443: connect: connection refused" Jan 20 16:45:41 crc kubenswrapper[4558]: E0120 16:45:41.815392 4558 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.25.8:6443: connect: connection refused" Jan 20 16:45:41 crc kubenswrapper[4558]: E0120 16:45:41.815809 4558 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.25.8:6443: connect: connection refused" Jan 20 16:45:41 crc kubenswrapper[4558]: E0120 16:45:41.816055 4558 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.25.8:6443: connect: connection refused" Jan 20 16:45:41 crc kubenswrapper[4558]: E0120 16:45:41.816370 4558 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.25.8:6443: connect: connection refused" Jan 20 16:45:41 crc kubenswrapper[4558]: I0120 16:45:41.816461 4558 controller.go:115] "failed to update lease using latest lease, fallback to ensure lease" err="failed 5 attempts to update lease" Jan 20 16:45:41 crc kubenswrapper[4558]: E0120 16:45:41.816722 4558 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.25.8:6443: connect: connection refused" interval="200ms" Jan 20 16:45:42 crc kubenswrapper[4558]: E0120 16:45:42.017669 4558 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.25.8:6443: connect: connection refused" interval="400ms" Jan 20 16:45:42 crc kubenswrapper[4558]: E0120 16:45:42.418269 4558 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.25.8:6443: connect: connection refused" interval="800ms" Jan 20 16:45:42 crc kubenswrapper[4558]: E0120 16:45:42.935604 4558 kubelet.go:1929] "Failed creating a mirror pod for" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods\": dial tcp 192.168.25.8:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 20 16:45:42 crc kubenswrapper[4558]: I0120 16:45:42.935966 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 20 16:45:42 crc kubenswrapper[4558]: W0120 16:45:42.951631 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf85e55b1a89d02b0cb034b1ea31ed45a.slice/crio-72298c28ee354d2c179d1026eba642acdcfe487a7fa38c70e20126acb8ac8968 WatchSource:0}: Error finding container 72298c28ee354d2c179d1026eba642acdcfe487a7fa38c70e20126acb8ac8968: Status 404 returned error can't find the container with id 72298c28ee354d2c179d1026eba642acdcfe487a7fa38c70e20126acb8ac8968 Jan 20 16:45:42 crc kubenswrapper[4558]: E0120 16:45:42.953577 4558 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 192.168.25.8:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.188c7e3a67219bec openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-01-20 16:45:42.9532211 +0000 UTC m=+236.713559067,LastTimestamp:2026-01-20 16:45:42.9532211 +0000 UTC m=+236.713559067,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Jan 20 16:45:43 crc kubenswrapper[4558]: E0120 16:45:43.219290 4558 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.25.8:6443: connect: connection refused" interval="1.6s" Jan 20 16:45:43 crc kubenswrapper[4558]: I0120 16:45:43.226728 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"92bd2dbe496a153423db938740852dbba2c997af8aafa002645e9c2b721af012"} Jan 20 16:45:43 crc kubenswrapper[4558]: I0120 16:45:43.226770 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"72298c28ee354d2c179d1026eba642acdcfe487a7fa38c70e20126acb8ac8968"} Jan 20 16:45:43 crc kubenswrapper[4558]: I0120 16:45:43.227282 4558 status_manager.go:851] "Failed to get status for pod" podUID="1bd82daf-3edd-4b66-8dc3-0efa12742508" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.25.8:6443: connect: connection refused" Jan 20 16:45:43 crc kubenswrapper[4558]: E0120 16:45:43.227302 4558 kubelet.go:1929] "Failed creating a mirror pod for" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods\": dial tcp 192.168.25.8:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 20 16:45:44 crc kubenswrapper[4558]: E0120 16:45:44.033724 4558 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 192.168.25.8:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.188c7e3a67219bec openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-01-20 16:45:42.9532211 +0000 UTC m=+236.713559067,LastTimestamp:2026-01-20 16:45:42.9532211 +0000 UTC m=+236.713559067,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Jan 20 16:45:44 crc kubenswrapper[4558]: E0120 16:45:44.820642 4558 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.25.8:6443: connect: connection refused" interval="3.2s" Jan 20 16:45:46 crc kubenswrapper[4558]: I0120 16:45:46.567206 4558 status_manager.go:851] "Failed to get status for pod" podUID="1bd82daf-3edd-4b66-8dc3-0efa12742508" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.25.8:6443: connect: connection refused" Jan 20 16:45:48 crc kubenswrapper[4558]: E0120 16:45:48.021710 4558 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 192.168.25.8:6443: connect: connection refused" interval="6.4s" Jan 20 16:45:52 crc kubenswrapper[4558]: I0120 16:45:52.565903 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 20 16:45:52 crc kubenswrapper[4558]: I0120 16:45:52.566618 4558 status_manager.go:851] "Failed to get status for pod" podUID="1bd82daf-3edd-4b66-8dc3-0efa12742508" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.25.8:6443: connect: connection refused" Jan 20 16:45:52 crc kubenswrapper[4558]: I0120 16:45:52.576142 4558 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="30e1ff9d-8dcd-4754-9a7a-c09598fb83db" Jan 20 16:45:52 crc kubenswrapper[4558]: I0120 16:45:52.576183 4558 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="30e1ff9d-8dcd-4754-9a7a-c09598fb83db" Jan 20 16:45:52 crc kubenswrapper[4558]: E0120 16:45:52.576455 4558 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 192.168.25.8:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 20 16:45:52 crc kubenswrapper[4558]: I0120 16:45:52.576740 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 20 16:45:53 crc kubenswrapper[4558]: I0120 16:45:53.273461 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Jan 20 16:45:53 crc kubenswrapper[4558]: I0120 16:45:53.273675 4558 generic.go:334] "Generic (PLEG): container finished" podID="f614b9022728cf315e60c057852e563e" containerID="168e8f533aaa108e4160f9ae4af6bb7944edba7a596178ffffdc5325073fc3c8" exitCode=1 Jan 20 16:45:53 crc kubenswrapper[4558]: I0120 16:45:53.273723 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerDied","Data":"168e8f533aaa108e4160f9ae4af6bb7944edba7a596178ffffdc5325073fc3c8"} Jan 20 16:45:53 crc kubenswrapper[4558]: I0120 16:45:53.274062 4558 scope.go:117] "RemoveContainer" containerID="168e8f533aaa108e4160f9ae4af6bb7944edba7a596178ffffdc5325073fc3c8" Jan 20 16:45:53 crc kubenswrapper[4558]: I0120 16:45:53.274340 4558 status_manager.go:851] "Failed to get status for pod" podUID="1bd82daf-3edd-4b66-8dc3-0efa12742508" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.25.8:6443: connect: connection refused" Jan 20 16:45:53 crc kubenswrapper[4558]: I0120 16:45:53.274569 4558 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 192.168.25.8:6443: connect: connection refused" Jan 20 16:45:53 crc kubenswrapper[4558]: I0120 16:45:53.275448 4558 generic.go:334] "Generic (PLEG): container finished" podID="71bb4a3aecc4ba5b26c4b7318770ce13" containerID="a19c766d29f45c74bc605036aa70b4a5c2bc6bab8b893592ff905dde7e538b04" exitCode=0 Jan 20 16:45:53 crc kubenswrapper[4558]: I0120 16:45:53.275500 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerDied","Data":"a19c766d29f45c74bc605036aa70b4a5c2bc6bab8b893592ff905dde7e538b04"} Jan 20 16:45:53 crc kubenswrapper[4558]: I0120 16:45:53.275532 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"dbd442283c187507bafeead491e12c555ca050e98623891afaf5174c560d3987"} Jan 20 16:45:53 crc kubenswrapper[4558]: I0120 16:45:53.275764 4558 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="30e1ff9d-8dcd-4754-9a7a-c09598fb83db" Jan 20 16:45:53 crc kubenswrapper[4558]: I0120 16:45:53.275783 4558 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="30e1ff9d-8dcd-4754-9a7a-c09598fb83db" Jan 20 16:45:53 crc kubenswrapper[4558]: I0120 16:45:53.276034 4558 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 192.168.25.8:6443: connect: connection refused" Jan 20 16:45:53 crc kubenswrapper[4558]: E0120 16:45:53.276082 4558 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 192.168.25.8:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 20 16:45:53 crc kubenswrapper[4558]: I0120 16:45:53.276338 4558 status_manager.go:851] "Failed to get status for pod" podUID="1bd82daf-3edd-4b66-8dc3-0efa12742508" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 192.168.25.8:6443: connect: connection refused" Jan 20 16:45:53 crc kubenswrapper[4558]: I0120 16:45:53.459778 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 20 16:45:54 crc kubenswrapper[4558]: I0120 16:45:54.282296 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Jan 20 16:45:54 crc kubenswrapper[4558]: I0120 16:45:54.282397 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"3191780b9e10f309fe444e6266dff69c6014d34bb4317605dc17199c8a7fb49a"} Jan 20 16:45:54 crc kubenswrapper[4558]: I0120 16:45:54.284375 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"6658bbb4bce36e154c6faf6a8abfdf379ac419322a439d69a4a14b1f137cd9cd"} Jan 20 16:45:54 crc kubenswrapper[4558]: I0120 16:45:54.284402 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"3d56d8db147c3471e4c67498bc7b5a30aa94ed0fa53551482239fb01278ab530"} Jan 20 16:45:54 crc kubenswrapper[4558]: I0120 16:45:54.284412 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"c7f659052cf0c90922a70fb07ba8d422a58140d7fee5cf078471ec6dbdfba2e5"} Jan 20 16:45:54 crc kubenswrapper[4558]: I0120 16:45:54.284421 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"7ed0d6c181dc463ffa608dbe07c29f0bc26b599d7a66078a70e1231e00e35334"} Jan 20 16:45:54 crc kubenswrapper[4558]: I0120 16:45:54.284429 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"1ead5ac468431b4a8637b1218ffab17901f80b42e1e31a38b069b2b6d3078103"} Jan 20 16:45:54 crc kubenswrapper[4558]: I0120 16:45:54.284627 4558 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="30e1ff9d-8dcd-4754-9a7a-c09598fb83db" Jan 20 16:45:54 crc kubenswrapper[4558]: I0120 16:45:54.284635 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 20 16:45:54 crc kubenswrapper[4558]: I0120 16:45:54.284644 4558 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="30e1ff9d-8dcd-4754-9a7a-c09598fb83db" Jan 20 16:45:57 crc kubenswrapper[4558]: I0120 16:45:57.577117 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 20 16:45:57 crc kubenswrapper[4558]: I0120 16:45:57.577532 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 20 16:45:57 crc kubenswrapper[4558]: I0120 16:45:57.581481 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 20 16:45:58 crc kubenswrapper[4558]: I0120 16:45:58.753363 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 20 16:45:58 crc kubenswrapper[4558]: I0120 16:45:58.756782 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 20 16:45:59 crc kubenswrapper[4558]: I0120 16:45:59.309174 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 20 16:45:59 crc kubenswrapper[4558]: I0120 16:45:59.734946 4558 kubelet.go:1914] "Deleted mirror pod because it is outdated" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 20 16:45:59 crc kubenswrapper[4558]: I0120 16:45:59.789215 4558 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="9bb8b82f-626d-41e9-bec9-842cc7a35d00" Jan 20 16:46:00 crc kubenswrapper[4558]: I0120 16:46:00.312344 4558 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="30e1ff9d-8dcd-4754-9a7a-c09598fb83db" Jan 20 16:46:00 crc kubenswrapper[4558]: I0120 16:46:00.312367 4558 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="30e1ff9d-8dcd-4754-9a7a-c09598fb83db" Jan 20 16:46:00 crc kubenswrapper[4558]: I0120 16:46:00.314777 4558 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="9bb8b82f-626d-41e9-bec9-842cc7a35d00" Jan 20 16:46:00 crc kubenswrapper[4558]: I0120 16:46:00.315532 4558 status_manager.go:308] "Container readiness changed before pod has synced" pod="openshift-kube-apiserver/kube-apiserver-crc" containerID="cri-o://1ead5ac468431b4a8637b1218ffab17901f80b42e1e31a38b069b2b6d3078103" Jan 20 16:46:00 crc kubenswrapper[4558]: I0120 16:46:00.315550 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 20 16:46:01 crc kubenswrapper[4558]: I0120 16:46:01.316434 4558 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="30e1ff9d-8dcd-4754-9a7a-c09598fb83db" Jan 20 16:46:01 crc kubenswrapper[4558]: I0120 16:46:01.316629 4558 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="30e1ff9d-8dcd-4754-9a7a-c09598fb83db" Jan 20 16:46:01 crc kubenswrapper[4558]: I0120 16:46:01.318757 4558 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="9bb8b82f-626d-41e9-bec9-842cc7a35d00" Jan 20 16:46:03 crc kubenswrapper[4558]: I0120 16:46:03.463486 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 20 16:46:09 crc kubenswrapper[4558]: I0120 16:46:09.912688 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Jan 20 16:46:09 crc kubenswrapper[4558]: I0120 16:46:09.974426 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Jan 20 16:46:10 crc kubenswrapper[4558]: I0120 16:46:10.251739 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Jan 20 16:46:10 crc kubenswrapper[4558]: I0120 16:46:10.483668 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Jan 20 16:46:10 crc kubenswrapper[4558]: I0120 16:46:10.609868 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Jan 20 16:46:11 crc kubenswrapper[4558]: I0120 16:46:11.081608 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Jan 20 16:46:11 crc kubenswrapper[4558]: I0120 16:46:11.153914 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Jan 20 16:46:11 crc kubenswrapper[4558]: I0120 16:46:11.274559 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Jan 20 16:46:11 crc kubenswrapper[4558]: I0120 16:46:11.423864 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Jan 20 16:46:11 crc kubenswrapper[4558]: I0120 16:46:11.492416 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Jan 20 16:46:11 crc kubenswrapper[4558]: I0120 16:46:11.564869 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Jan 20 16:46:11 crc kubenswrapper[4558]: I0120 16:46:11.575262 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Jan 20 16:46:11 crc kubenswrapper[4558]: I0120 16:46:11.736454 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Jan 20 16:46:11 crc kubenswrapper[4558]: I0120 16:46:11.869709 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Jan 20 16:46:11 crc kubenswrapper[4558]: I0120 16:46:11.928761 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Jan 20 16:46:11 crc kubenswrapper[4558]: I0120 16:46:11.989969 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Jan 20 16:46:12 crc kubenswrapper[4558]: I0120 16:46:12.005072 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Jan 20 16:46:12 crc kubenswrapper[4558]: I0120 16:46:12.047181 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Jan 20 16:46:12 crc kubenswrapper[4558]: I0120 16:46:12.274755 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Jan 20 16:46:12 crc kubenswrapper[4558]: I0120 16:46:12.412472 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Jan 20 16:46:12 crc kubenswrapper[4558]: I0120 16:46:12.443535 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Jan 20 16:46:12 crc kubenswrapper[4558]: I0120 16:46:12.505714 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Jan 20 16:46:12 crc kubenswrapper[4558]: I0120 16:46:12.709477 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Jan 20 16:46:12 crc kubenswrapper[4558]: I0120 16:46:12.715259 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Jan 20 16:46:12 crc kubenswrapper[4558]: I0120 16:46:12.844396 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Jan 20 16:46:12 crc kubenswrapper[4558]: I0120 16:46:12.908623 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Jan 20 16:46:13 crc kubenswrapper[4558]: I0120 16:46:13.091289 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Jan 20 16:46:13 crc kubenswrapper[4558]: I0120 16:46:13.159433 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Jan 20 16:46:13 crc kubenswrapper[4558]: I0120 16:46:13.169463 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Jan 20 16:46:13 crc kubenswrapper[4558]: I0120 16:46:13.215921 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Jan 20 16:46:13 crc kubenswrapper[4558]: I0120 16:46:13.229427 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Jan 20 16:46:13 crc kubenswrapper[4558]: I0120 16:46:13.259844 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Jan 20 16:46:13 crc kubenswrapper[4558]: I0120 16:46:13.293194 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Jan 20 16:46:13 crc kubenswrapper[4558]: I0120 16:46:13.331975 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Jan 20 16:46:13 crc kubenswrapper[4558]: I0120 16:46:13.343141 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Jan 20 16:46:13 crc kubenswrapper[4558]: I0120 16:46:13.606944 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Jan 20 16:46:13 crc kubenswrapper[4558]: I0120 16:46:13.674477 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Jan 20 16:46:13 crc kubenswrapper[4558]: I0120 16:46:13.696438 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Jan 20 16:46:13 crc kubenswrapper[4558]: I0120 16:46:13.764567 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Jan 20 16:46:13 crc kubenswrapper[4558]: I0120 16:46:13.818245 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Jan 20 16:46:13 crc kubenswrapper[4558]: I0120 16:46:13.835644 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Jan 20 16:46:13 crc kubenswrapper[4558]: I0120 16:46:13.874544 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Jan 20 16:46:13 crc kubenswrapper[4558]: I0120 16:46:13.886857 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Jan 20 16:46:13 crc kubenswrapper[4558]: I0120 16:46:13.893831 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Jan 20 16:46:13 crc kubenswrapper[4558]: I0120 16:46:13.935248 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Jan 20 16:46:13 crc kubenswrapper[4558]: I0120 16:46:13.941303 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Jan 20 16:46:13 crc kubenswrapper[4558]: I0120 16:46:13.991064 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Jan 20 16:46:14 crc kubenswrapper[4558]: I0120 16:46:14.033440 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Jan 20 16:46:14 crc kubenswrapper[4558]: I0120 16:46:14.038693 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Jan 20 16:46:14 crc kubenswrapper[4558]: I0120 16:46:14.068732 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Jan 20 16:46:14 crc kubenswrapper[4558]: I0120 16:46:14.109245 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Jan 20 16:46:14 crc kubenswrapper[4558]: I0120 16:46:14.109549 4558 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Jan 20 16:46:14 crc kubenswrapper[4558]: I0120 16:46:14.175621 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Jan 20 16:46:14 crc kubenswrapper[4558]: I0120 16:46:14.230275 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Jan 20 16:46:14 crc kubenswrapper[4558]: I0120 16:46:14.296340 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Jan 20 16:46:14 crc kubenswrapper[4558]: I0120 16:46:14.297489 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Jan 20 16:46:14 crc kubenswrapper[4558]: I0120 16:46:14.335131 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Jan 20 16:46:14 crc kubenswrapper[4558]: I0120 16:46:14.381688 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Jan 20 16:46:14 crc kubenswrapper[4558]: I0120 16:46:14.475899 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Jan 20 16:46:14 crc kubenswrapper[4558]: I0120 16:46:14.521111 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Jan 20 16:46:14 crc kubenswrapper[4558]: I0120 16:46:14.540007 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Jan 20 16:46:14 crc kubenswrapper[4558]: I0120 16:46:14.581013 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Jan 20 16:46:14 crc kubenswrapper[4558]: I0120 16:46:14.636776 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Jan 20 16:46:14 crc kubenswrapper[4558]: I0120 16:46:14.644882 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Jan 20 16:46:14 crc kubenswrapper[4558]: I0120 16:46:14.663831 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Jan 20 16:46:14 crc kubenswrapper[4558]: I0120 16:46:14.863480 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Jan 20 16:46:14 crc kubenswrapper[4558]: I0120 16:46:14.900115 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Jan 20 16:46:14 crc kubenswrapper[4558]: I0120 16:46:14.989285 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Jan 20 16:46:15 crc kubenswrapper[4558]: I0120 16:46:15.055684 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Jan 20 16:46:15 crc kubenswrapper[4558]: I0120 16:46:15.070472 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Jan 20 16:46:15 crc kubenswrapper[4558]: I0120 16:46:15.133313 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Jan 20 16:46:15 crc kubenswrapper[4558]: I0120 16:46:15.216519 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Jan 20 16:46:15 crc kubenswrapper[4558]: I0120 16:46:15.219299 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Jan 20 16:46:15 crc kubenswrapper[4558]: I0120 16:46:15.238369 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Jan 20 16:46:15 crc kubenswrapper[4558]: I0120 16:46:15.238891 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Jan 20 16:46:15 crc kubenswrapper[4558]: I0120 16:46:15.343399 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Jan 20 16:46:15 crc kubenswrapper[4558]: I0120 16:46:15.392887 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Jan 20 16:46:15 crc kubenswrapper[4558]: I0120 16:46:15.393981 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Jan 20 16:46:15 crc kubenswrapper[4558]: I0120 16:46:15.472425 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Jan 20 16:46:15 crc kubenswrapper[4558]: I0120 16:46:15.561287 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Jan 20 16:46:15 crc kubenswrapper[4558]: I0120 16:46:15.597493 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Jan 20 16:46:15 crc kubenswrapper[4558]: I0120 16:46:15.611398 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Jan 20 16:46:15 crc kubenswrapper[4558]: I0120 16:46:15.617625 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Jan 20 16:46:15 crc kubenswrapper[4558]: I0120 16:46:15.629084 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Jan 20 16:46:15 crc kubenswrapper[4558]: I0120 16:46:15.658418 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Jan 20 16:46:15 crc kubenswrapper[4558]: I0120 16:46:15.708923 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Jan 20 16:46:15 crc kubenswrapper[4558]: I0120 16:46:15.887816 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Jan 20 16:46:15 crc kubenswrapper[4558]: I0120 16:46:15.944753 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Jan 20 16:46:15 crc kubenswrapper[4558]: I0120 16:46:15.948679 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Jan 20 16:46:15 crc kubenswrapper[4558]: I0120 16:46:15.966442 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Jan 20 16:46:15 crc kubenswrapper[4558]: I0120 16:46:15.969140 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Jan 20 16:46:15 crc kubenswrapper[4558]: I0120 16:46:15.971058 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Jan 20 16:46:16 crc kubenswrapper[4558]: I0120 16:46:16.035997 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Jan 20 16:46:16 crc kubenswrapper[4558]: I0120 16:46:16.146400 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Jan 20 16:46:16 crc kubenswrapper[4558]: I0120 16:46:16.163473 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Jan 20 16:46:16 crc kubenswrapper[4558]: I0120 16:46:16.175178 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Jan 20 16:46:16 crc kubenswrapper[4558]: I0120 16:46:16.203410 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Jan 20 16:46:16 crc kubenswrapper[4558]: I0120 16:46:16.226807 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Jan 20 16:46:16 crc kubenswrapper[4558]: I0120 16:46:16.358044 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Jan 20 16:46:16 crc kubenswrapper[4558]: I0120 16:46:16.404886 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Jan 20 16:46:16 crc kubenswrapper[4558]: I0120 16:46:16.435772 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Jan 20 16:46:16 crc kubenswrapper[4558]: I0120 16:46:16.441283 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Jan 20 16:46:16 crc kubenswrapper[4558]: I0120 16:46:16.465740 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Jan 20 16:46:16 crc kubenswrapper[4558]: I0120 16:46:16.465828 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Jan 20 16:46:16 crc kubenswrapper[4558]: I0120 16:46:16.467408 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Jan 20 16:46:16 crc kubenswrapper[4558]: I0120 16:46:16.504982 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Jan 20 16:46:16 crc kubenswrapper[4558]: I0120 16:46:16.543456 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Jan 20 16:46:16 crc kubenswrapper[4558]: I0120 16:46:16.592151 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Jan 20 16:46:16 crc kubenswrapper[4558]: I0120 16:46:16.756722 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Jan 20 16:46:16 crc kubenswrapper[4558]: I0120 16:46:16.869868 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Jan 20 16:46:16 crc kubenswrapper[4558]: I0120 16:46:16.978787 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Jan 20 16:46:17 crc kubenswrapper[4558]: I0120 16:46:17.056143 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Jan 20 16:46:17 crc kubenswrapper[4558]: I0120 16:46:17.058310 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Jan 20 16:46:17 crc kubenswrapper[4558]: I0120 16:46:17.207948 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Jan 20 16:46:17 crc kubenswrapper[4558]: I0120 16:46:17.286212 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Jan 20 16:46:17 crc kubenswrapper[4558]: I0120 16:46:17.410081 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Jan 20 16:46:17 crc kubenswrapper[4558]: I0120 16:46:17.410989 4558 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Jan 20 16:46:17 crc kubenswrapper[4558]: I0120 16:46:17.436666 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Jan 20 16:46:17 crc kubenswrapper[4558]: I0120 16:46:17.480300 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Jan 20 16:46:17 crc kubenswrapper[4558]: I0120 16:46:17.561611 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Jan 20 16:46:17 crc kubenswrapper[4558]: I0120 16:46:17.686272 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Jan 20 16:46:17 crc kubenswrapper[4558]: I0120 16:46:17.785775 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Jan 20 16:46:17 crc kubenswrapper[4558]: I0120 16:46:17.792113 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Jan 20 16:46:17 crc kubenswrapper[4558]: I0120 16:46:17.936356 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Jan 20 16:46:17 crc kubenswrapper[4558]: I0120 16:46:17.953290 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Jan 20 16:46:17 crc kubenswrapper[4558]: I0120 16:46:17.971026 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Jan 20 16:46:17 crc kubenswrapper[4558]: I0120 16:46:17.992449 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Jan 20 16:46:17 crc kubenswrapper[4558]: I0120 16:46:17.995420 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Jan 20 16:46:18 crc kubenswrapper[4558]: I0120 16:46:18.039578 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Jan 20 16:46:18 crc kubenswrapper[4558]: I0120 16:46:18.102713 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Jan 20 16:46:18 crc kubenswrapper[4558]: I0120 16:46:18.186547 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Jan 20 16:46:18 crc kubenswrapper[4558]: I0120 16:46:18.272990 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Jan 20 16:46:18 crc kubenswrapper[4558]: I0120 16:46:18.298298 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Jan 20 16:46:18 crc kubenswrapper[4558]: I0120 16:46:18.392285 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Jan 20 16:46:18 crc kubenswrapper[4558]: I0120 16:46:18.426579 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Jan 20 16:46:18 crc kubenswrapper[4558]: I0120 16:46:18.441136 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Jan 20 16:46:18 crc kubenswrapper[4558]: I0120 16:46:18.444152 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Jan 20 16:46:18 crc kubenswrapper[4558]: I0120 16:46:18.444565 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Jan 20 16:46:18 crc kubenswrapper[4558]: I0120 16:46:18.446787 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Jan 20 16:46:18 crc kubenswrapper[4558]: I0120 16:46:18.472047 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Jan 20 16:46:18 crc kubenswrapper[4558]: I0120 16:46:18.577872 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Jan 20 16:46:18 crc kubenswrapper[4558]: I0120 16:46:18.601948 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Jan 20 16:46:18 crc kubenswrapper[4558]: I0120 16:46:18.637649 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Jan 20 16:46:18 crc kubenswrapper[4558]: I0120 16:46:18.658794 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Jan 20 16:46:18 crc kubenswrapper[4558]: I0120 16:46:18.722364 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Jan 20 16:46:18 crc kubenswrapper[4558]: I0120 16:46:18.771788 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Jan 20 16:46:18 crc kubenswrapper[4558]: I0120 16:46:18.798978 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Jan 20 16:46:18 crc kubenswrapper[4558]: I0120 16:46:18.827789 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Jan 20 16:46:18 crc kubenswrapper[4558]: I0120 16:46:18.877642 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Jan 20 16:46:18 crc kubenswrapper[4558]: I0120 16:46:18.898122 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Jan 20 16:46:18 crc kubenswrapper[4558]: I0120 16:46:18.915966 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Jan 20 16:46:19 crc kubenswrapper[4558]: I0120 16:46:19.107783 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Jan 20 16:46:19 crc kubenswrapper[4558]: I0120 16:46:19.131723 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Jan 20 16:46:19 crc kubenswrapper[4558]: I0120 16:46:19.141695 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Jan 20 16:46:19 crc kubenswrapper[4558]: I0120 16:46:19.324535 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Jan 20 16:46:19 crc kubenswrapper[4558]: I0120 16:46:19.325519 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Jan 20 16:46:19 crc kubenswrapper[4558]: I0120 16:46:19.350496 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Jan 20 16:46:19 crc kubenswrapper[4558]: I0120 16:46:19.353792 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Jan 20 16:46:19 crc kubenswrapper[4558]: I0120 16:46:19.432885 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Jan 20 16:46:19 crc kubenswrapper[4558]: I0120 16:46:19.463042 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Jan 20 16:46:19 crc kubenswrapper[4558]: I0120 16:46:19.527437 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Jan 20 16:46:19 crc kubenswrapper[4558]: I0120 16:46:19.662360 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Jan 20 16:46:19 crc kubenswrapper[4558]: I0120 16:46:19.675698 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Jan 20 16:46:19 crc kubenswrapper[4558]: I0120 16:46:19.685506 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Jan 20 16:46:19 crc kubenswrapper[4558]: I0120 16:46:19.850889 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Jan 20 16:46:19 crc kubenswrapper[4558]: I0120 16:46:19.926242 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Jan 20 16:46:19 crc kubenswrapper[4558]: I0120 16:46:19.935659 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Jan 20 16:46:20 crc kubenswrapper[4558]: I0120 16:46:20.014764 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Jan 20 16:46:20 crc kubenswrapper[4558]: I0120 16:46:20.081402 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Jan 20 16:46:20 crc kubenswrapper[4558]: I0120 16:46:20.115130 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Jan 20 16:46:20 crc kubenswrapper[4558]: I0120 16:46:20.180396 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Jan 20 16:46:20 crc kubenswrapper[4558]: I0120 16:46:20.215710 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Jan 20 16:46:20 crc kubenswrapper[4558]: I0120 16:46:20.254687 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Jan 20 16:46:20 crc kubenswrapper[4558]: I0120 16:46:20.283470 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Jan 20 16:46:20 crc kubenswrapper[4558]: I0120 16:46:20.350893 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Jan 20 16:46:20 crc kubenswrapper[4558]: I0120 16:46:20.420118 4558 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Jan 20 16:46:20 crc kubenswrapper[4558]: I0120 16:46:20.433363 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Jan 20 16:46:20 crc kubenswrapper[4558]: I0120 16:46:20.644508 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Jan 20 16:46:20 crc kubenswrapper[4558]: I0120 16:46:20.658613 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Jan 20 16:46:20 crc kubenswrapper[4558]: I0120 16:46:20.674572 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Jan 20 16:46:20 crc kubenswrapper[4558]: I0120 16:46:20.738305 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Jan 20 16:46:20 crc kubenswrapper[4558]: I0120 16:46:20.831728 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Jan 20 16:46:20 crc kubenswrapper[4558]: I0120 16:46:20.901197 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Jan 20 16:46:20 crc kubenswrapper[4558]: I0120 16:46:20.954119 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Jan 20 16:46:20 crc kubenswrapper[4558]: I0120 16:46:20.993295 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Jan 20 16:46:21 crc kubenswrapper[4558]: I0120 16:46:21.060485 4558 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Jan 20 16:46:21 crc kubenswrapper[4558]: I0120 16:46:21.144272 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Jan 20 16:46:21 crc kubenswrapper[4558]: I0120 16:46:21.192311 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Jan 20 16:46:21 crc kubenswrapper[4558]: I0120 16:46:21.418598 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Jan 20 16:46:21 crc kubenswrapper[4558]: I0120 16:46:21.429582 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Jan 20 16:46:21 crc kubenswrapper[4558]: I0120 16:46:21.465616 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Jan 20 16:46:21 crc kubenswrapper[4558]: I0120 16:46:21.511882 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Jan 20 16:46:21 crc kubenswrapper[4558]: I0120 16:46:21.567409 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Jan 20 16:46:21 crc kubenswrapper[4558]: I0120 16:46:21.576306 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Jan 20 16:46:21 crc kubenswrapper[4558]: I0120 16:46:21.607040 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Jan 20 16:46:21 crc kubenswrapper[4558]: I0120 16:46:21.614759 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Jan 20 16:46:21 crc kubenswrapper[4558]: I0120 16:46:21.652138 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Jan 20 16:46:21 crc kubenswrapper[4558]: I0120 16:46:21.729960 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Jan 20 16:46:21 crc kubenswrapper[4558]: I0120 16:46:21.751883 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Jan 20 16:46:21 crc kubenswrapper[4558]: I0120 16:46:21.885124 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Jan 20 16:46:21 crc kubenswrapper[4558]: I0120 16:46:21.983291 4558 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Jan 20 16:46:21 crc kubenswrapper[4558]: I0120 16:46:21.986514 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Jan 20 16:46:21 crc kubenswrapper[4558]: I0120 16:46:21.987916 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Jan 20 16:46:21 crc kubenswrapper[4558]: I0120 16:46:21.987968 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Jan 20 16:46:21 crc kubenswrapper[4558]: I0120 16:46:21.991007 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 20 16:46:22 crc kubenswrapper[4558]: I0120 16:46:22.004866 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=23.004852342 podStartE2EDuration="23.004852342s" podCreationTimestamp="2026-01-20 16:45:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 16:46:21.9995451 +0000 UTC m=+275.759883067" watchObservedRunningTime="2026-01-20 16:46:22.004852342 +0000 UTC m=+275.765190309" Jan 20 16:46:22 crc kubenswrapper[4558]: I0120 16:46:22.115736 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Jan 20 16:46:22 crc kubenswrapper[4558]: I0120 16:46:22.135920 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Jan 20 16:46:22 crc kubenswrapper[4558]: I0120 16:46:22.333189 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Jan 20 16:46:22 crc kubenswrapper[4558]: I0120 16:46:22.336401 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Jan 20 16:46:22 crc kubenswrapper[4558]: I0120 16:46:22.407442 4558 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Jan 20 16:46:22 crc kubenswrapper[4558]: I0120 16:46:22.407688 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" containerID="cri-o://92bd2dbe496a153423db938740852dbba2c997af8aafa002645e9c2b721af012" gracePeriod=5 Jan 20 16:46:22 crc kubenswrapper[4558]: I0120 16:46:22.459237 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Jan 20 16:46:22 crc kubenswrapper[4558]: I0120 16:46:22.459273 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Jan 20 16:46:22 crc kubenswrapper[4558]: I0120 16:46:22.487370 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Jan 20 16:46:22 crc kubenswrapper[4558]: I0120 16:46:22.490991 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Jan 20 16:46:22 crc kubenswrapper[4558]: I0120 16:46:22.497912 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-bgqpg"] Jan 20 16:46:22 crc kubenswrapper[4558]: I0120 16:46:22.498107 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-bgqpg" podUID="11fdde87-2c2c-40ba-84e5-e0c93fc58130" containerName="registry-server" containerID="cri-o://281a736e412a599971d1daf067155d194b977c9ca8221d102dbcdef3490bd4c3" gracePeriod=30 Jan 20 16:46:22 crc kubenswrapper[4558]: I0120 16:46:22.504468 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-chwxp"] Jan 20 16:46:22 crc kubenswrapper[4558]: I0120 16:46:22.504647 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-chwxp" podUID="03467399-b14f-421a-a7ec-f2a533daed0d" containerName="registry-server" containerID="cri-o://838417c7e988767191dc59fe1cdcb6526ce68f568e4415ddcfa65aa9e72754d4" gracePeriod=30 Jan 20 16:46:22 crc kubenswrapper[4558]: I0120 16:46:22.507461 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-2j5lz"] Jan 20 16:46:22 crc kubenswrapper[4558]: I0120 16:46:22.507625 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-2j5lz" podUID="96b0927f-cc7b-48e8-82bd-bbec85c2b9b2" containerName="marketplace-operator" containerID="cri-o://ca73560949e9fc5dc991b312a1529736b6d7a2bd41cf857a84a1ef3542fbba5c" gracePeriod=30 Jan 20 16:46:22 crc kubenswrapper[4558]: I0120 16:46:22.520615 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-mndbv"] Jan 20 16:46:22 crc kubenswrapper[4558]: I0120 16:46:22.520833 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-mndbv" podUID="05d983ba-960a-4975-b3de-73a3891fb342" containerName="registry-server" containerID="cri-o://bc337f05c8c80f963877487d730b664b8478ace32a99e7d4e2160bcdece78381" gracePeriod=30 Jan 20 16:46:22 crc kubenswrapper[4558]: I0120 16:46:22.527064 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-hr9pm"] Jan 20 16:46:22 crc kubenswrapper[4558]: I0120 16:46:22.527281 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-hr9pm" podUID="0099ba6c-2da8-471c-bde7-528db23c4faf" containerName="registry-server" containerID="cri-o://55c2641fdcac002bf4f850acf983db5fdb2a87445738f279b29d89ae8e70ddc5" gracePeriod=30 Jan 20 16:46:22 crc kubenswrapper[4558]: I0120 16:46:22.535189 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-fhnw5"] Jan 20 16:46:22 crc kubenswrapper[4558]: E0120 16:46:22.535376 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1bd82daf-3edd-4b66-8dc3-0efa12742508" containerName="installer" Jan 20 16:46:22 crc kubenswrapper[4558]: I0120 16:46:22.535388 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="1bd82daf-3edd-4b66-8dc3-0efa12742508" containerName="installer" Jan 20 16:46:22 crc kubenswrapper[4558]: E0120 16:46:22.535396 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Jan 20 16:46:22 crc kubenswrapper[4558]: I0120 16:46:22.535401 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Jan 20 16:46:22 crc kubenswrapper[4558]: I0120 16:46:22.535482 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="1bd82daf-3edd-4b66-8dc3-0efa12742508" containerName="installer" Jan 20 16:46:22 crc kubenswrapper[4558]: I0120 16:46:22.535494 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Jan 20 16:46:22 crc kubenswrapper[4558]: I0120 16:46:22.535797 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-fhnw5" Jan 20 16:46:22 crc kubenswrapper[4558]: I0120 16:46:22.605576 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/6e227740-1076-4ebf-9fd4-b1ae12cc7beb-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-fhnw5\" (UID: \"6e227740-1076-4ebf-9fd4-b1ae12cc7beb\") " pod="openshift-marketplace/marketplace-operator-79b997595-fhnw5" Jan 20 16:46:22 crc kubenswrapper[4558]: I0120 16:46:22.605648 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/6e227740-1076-4ebf-9fd4-b1ae12cc7beb-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-fhnw5\" (UID: \"6e227740-1076-4ebf-9fd4-b1ae12cc7beb\") " pod="openshift-marketplace/marketplace-operator-79b997595-fhnw5" Jan 20 16:46:22 crc kubenswrapper[4558]: I0120 16:46:22.605675 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jbj4j\" (UniqueName: \"kubernetes.io/projected/6e227740-1076-4ebf-9fd4-b1ae12cc7beb-kube-api-access-jbj4j\") pod \"marketplace-operator-79b997595-fhnw5\" (UID: \"6e227740-1076-4ebf-9fd4-b1ae12cc7beb\") " pod="openshift-marketplace/marketplace-operator-79b997595-fhnw5" Jan 20 16:46:22 crc kubenswrapper[4558]: I0120 16:46:22.645305 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Jan 20 16:46:22 crc kubenswrapper[4558]: I0120 16:46:22.706394 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/6e227740-1076-4ebf-9fd4-b1ae12cc7beb-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-fhnw5\" (UID: \"6e227740-1076-4ebf-9fd4-b1ae12cc7beb\") " pod="openshift-marketplace/marketplace-operator-79b997595-fhnw5" Jan 20 16:46:22 crc kubenswrapper[4558]: I0120 16:46:22.706465 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/6e227740-1076-4ebf-9fd4-b1ae12cc7beb-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-fhnw5\" (UID: \"6e227740-1076-4ebf-9fd4-b1ae12cc7beb\") " pod="openshift-marketplace/marketplace-operator-79b997595-fhnw5" Jan 20 16:46:22 crc kubenswrapper[4558]: I0120 16:46:22.706492 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jbj4j\" (UniqueName: \"kubernetes.io/projected/6e227740-1076-4ebf-9fd4-b1ae12cc7beb-kube-api-access-jbj4j\") pod \"marketplace-operator-79b997595-fhnw5\" (UID: \"6e227740-1076-4ebf-9fd4-b1ae12cc7beb\") " pod="openshift-marketplace/marketplace-operator-79b997595-fhnw5" Jan 20 16:46:22 crc kubenswrapper[4558]: I0120 16:46:22.708413 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/6e227740-1076-4ebf-9fd4-b1ae12cc7beb-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-fhnw5\" (UID: \"6e227740-1076-4ebf-9fd4-b1ae12cc7beb\") " pod="openshift-marketplace/marketplace-operator-79b997595-fhnw5" Jan 20 16:46:22 crc kubenswrapper[4558]: I0120 16:46:22.711241 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/6e227740-1076-4ebf-9fd4-b1ae12cc7beb-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-fhnw5\" (UID: \"6e227740-1076-4ebf-9fd4-b1ae12cc7beb\") " pod="openshift-marketplace/marketplace-operator-79b997595-fhnw5" Jan 20 16:46:22 crc kubenswrapper[4558]: I0120 16:46:22.711953 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Jan 20 16:46:22 crc kubenswrapper[4558]: I0120 16:46:22.718241 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Jan 20 16:46:22 crc kubenswrapper[4558]: I0120 16:46:22.719910 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jbj4j\" (UniqueName: \"kubernetes.io/projected/6e227740-1076-4ebf-9fd4-b1ae12cc7beb-kube-api-access-jbj4j\") pod \"marketplace-operator-79b997595-fhnw5\" (UID: \"6e227740-1076-4ebf-9fd4-b1ae12cc7beb\") " pod="openshift-marketplace/marketplace-operator-79b997595-fhnw5" Jan 20 16:46:22 crc kubenswrapper[4558]: I0120 16:46:22.739507 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Jan 20 16:46:22 crc kubenswrapper[4558]: I0120 16:46:22.858903 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Jan 20 16:46:22 crc kubenswrapper[4558]: I0120 16:46:22.883476 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-fhnw5" Jan 20 16:46:22 crc kubenswrapper[4558]: I0120 16:46:22.887010 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-bgqpg" Jan 20 16:46:22 crc kubenswrapper[4558]: I0120 16:46:22.908300 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mndbv" Jan 20 16:46:22 crc kubenswrapper[4558]: I0120 16:46:22.908566 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/11fdde87-2c2c-40ba-84e5-e0c93fc58130-catalog-content\") pod \"11fdde87-2c2c-40ba-84e5-e0c93fc58130\" (UID: \"11fdde87-2c2c-40ba-84e5-e0c93fc58130\") " Jan 20 16:46:22 crc kubenswrapper[4558]: I0120 16:46:22.911305 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9zszb\" (UniqueName: \"kubernetes.io/projected/11fdde87-2c2c-40ba-84e5-e0c93fc58130-kube-api-access-9zszb\") pod \"11fdde87-2c2c-40ba-84e5-e0c93fc58130\" (UID: \"11fdde87-2c2c-40ba-84e5-e0c93fc58130\") " Jan 20 16:46:22 crc kubenswrapper[4558]: I0120 16:46:22.911334 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/11fdde87-2c2c-40ba-84e5-e0c93fc58130-utilities\") pod \"11fdde87-2c2c-40ba-84e5-e0c93fc58130\" (UID: \"11fdde87-2c2c-40ba-84e5-e0c93fc58130\") " Jan 20 16:46:22 crc kubenswrapper[4558]: I0120 16:46:22.912478 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/11fdde87-2c2c-40ba-84e5-e0c93fc58130-utilities" (OuterVolumeSpecName: "utilities") pod "11fdde87-2c2c-40ba-84e5-e0c93fc58130" (UID: "11fdde87-2c2c-40ba-84e5-e0c93fc58130"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 16:46:22 crc kubenswrapper[4558]: I0120 16:46:22.918393 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-chwxp" Jan 20 16:46:22 crc kubenswrapper[4558]: I0120 16:46:22.918468 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/11fdde87-2c2c-40ba-84e5-e0c93fc58130-kube-api-access-9zszb" (OuterVolumeSpecName: "kube-api-access-9zszb") pod "11fdde87-2c2c-40ba-84e5-e0c93fc58130" (UID: "11fdde87-2c2c-40ba-84e5-e0c93fc58130"). InnerVolumeSpecName "kube-api-access-9zszb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:46:22 crc kubenswrapper[4558]: I0120 16:46:22.920963 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hr9pm" Jan 20 16:46:22 crc kubenswrapper[4558]: I0120 16:46:22.921139 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-2j5lz" Jan 20 16:46:22 crc kubenswrapper[4558]: I0120 16:46:22.955651 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/11fdde87-2c2c-40ba-84e5-e0c93fc58130-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "11fdde87-2c2c-40ba-84e5-e0c93fc58130" (UID: "11fdde87-2c2c-40ba-84e5-e0c93fc58130"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 16:46:23 crc kubenswrapper[4558]: I0120 16:46:23.012786 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0099ba6c-2da8-471c-bde7-528db23c4faf-utilities\") pod \"0099ba6c-2da8-471c-bde7-528db23c4faf\" (UID: \"0099ba6c-2da8-471c-bde7-528db23c4faf\") " Jan 20 16:46:23 crc kubenswrapper[4558]: I0120 16:46:23.012825 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/96b0927f-cc7b-48e8-82bd-bbec85c2b9b2-marketplace-operator-metrics\") pod \"96b0927f-cc7b-48e8-82bd-bbec85c2b9b2\" (UID: \"96b0927f-cc7b-48e8-82bd-bbec85c2b9b2\") " Jan 20 16:46:23 crc kubenswrapper[4558]: I0120 16:46:23.012851 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzkhw\" (UniqueName: \"kubernetes.io/projected/96b0927f-cc7b-48e8-82bd-bbec85c2b9b2-kube-api-access-nzkhw\") pod \"96b0927f-cc7b-48e8-82bd-bbec85c2b9b2\" (UID: \"96b0927f-cc7b-48e8-82bd-bbec85c2b9b2\") " Jan 20 16:46:23 crc kubenswrapper[4558]: I0120 16:46:23.012877 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/03467399-b14f-421a-a7ec-f2a533daed0d-utilities\") pod \"03467399-b14f-421a-a7ec-f2a533daed0d\" (UID: \"03467399-b14f-421a-a7ec-f2a533daed0d\") " Jan 20 16:46:23 crc kubenswrapper[4558]: I0120 16:46:23.012900 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9gsqz\" (UniqueName: \"kubernetes.io/projected/03467399-b14f-421a-a7ec-f2a533daed0d-kube-api-access-9gsqz\") pod \"03467399-b14f-421a-a7ec-f2a533daed0d\" (UID: \"03467399-b14f-421a-a7ec-f2a533daed0d\") " Jan 20 16:46:23 crc kubenswrapper[4558]: I0120 16:46:23.012913 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xz4cs\" (UniqueName: \"kubernetes.io/projected/05d983ba-960a-4975-b3de-73a3891fb342-kube-api-access-xz4cs\") pod \"05d983ba-960a-4975-b3de-73a3891fb342\" (UID: \"05d983ba-960a-4975-b3de-73a3891fb342\") " Jan 20 16:46:23 crc kubenswrapper[4558]: I0120 16:46:23.012944 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/05d983ba-960a-4975-b3de-73a3891fb342-catalog-content\") pod \"05d983ba-960a-4975-b3de-73a3891fb342\" (UID: \"05d983ba-960a-4975-b3de-73a3891fb342\") " Jan 20 16:46:23 crc kubenswrapper[4558]: I0120 16:46:23.013268 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8rkqf\" (UniqueName: \"kubernetes.io/projected/0099ba6c-2da8-471c-bde7-528db23c4faf-kube-api-access-8rkqf\") pod \"0099ba6c-2da8-471c-bde7-528db23c4faf\" (UID: \"0099ba6c-2da8-471c-bde7-528db23c4faf\") " Jan 20 16:46:23 crc kubenswrapper[4558]: I0120 16:46:23.013299 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0099ba6c-2da8-471c-bde7-528db23c4faf-catalog-content\") pod \"0099ba6c-2da8-471c-bde7-528db23c4faf\" (UID: \"0099ba6c-2da8-471c-bde7-528db23c4faf\") " Jan 20 16:46:23 crc kubenswrapper[4558]: I0120 16:46:23.013357 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/05d983ba-960a-4975-b3de-73a3891fb342-utilities\") pod \"05d983ba-960a-4975-b3de-73a3891fb342\" (UID: \"05d983ba-960a-4975-b3de-73a3891fb342\") " Jan 20 16:46:23 crc kubenswrapper[4558]: I0120 16:46:23.013381 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/96b0927f-cc7b-48e8-82bd-bbec85c2b9b2-marketplace-trusted-ca\") pod \"96b0927f-cc7b-48e8-82bd-bbec85c2b9b2\" (UID: \"96b0927f-cc7b-48e8-82bd-bbec85c2b9b2\") " Jan 20 16:46:23 crc kubenswrapper[4558]: I0120 16:46:23.013403 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/03467399-b14f-421a-a7ec-f2a533daed0d-catalog-content\") pod \"03467399-b14f-421a-a7ec-f2a533daed0d\" (UID: \"03467399-b14f-421a-a7ec-f2a533daed0d\") " Jan 20 16:46:23 crc kubenswrapper[4558]: I0120 16:46:23.013490 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/03467399-b14f-421a-a7ec-f2a533daed0d-utilities" (OuterVolumeSpecName: "utilities") pod "03467399-b14f-421a-a7ec-f2a533daed0d" (UID: "03467399-b14f-421a-a7ec-f2a533daed0d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 16:46:23 crc kubenswrapper[4558]: I0120 16:46:23.013705 4558 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/03467399-b14f-421a-a7ec-f2a533daed0d-utilities\") on node \"crc\" DevicePath \"\"" Jan 20 16:46:23 crc kubenswrapper[4558]: I0120 16:46:23.013728 4558 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/11fdde87-2c2c-40ba-84e5-e0c93fc58130-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 20 16:46:23 crc kubenswrapper[4558]: I0120 16:46:23.013740 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9zszb\" (UniqueName: \"kubernetes.io/projected/11fdde87-2c2c-40ba-84e5-e0c93fc58130-kube-api-access-9zszb\") on node \"crc\" DevicePath \"\"" Jan 20 16:46:23 crc kubenswrapper[4558]: I0120 16:46:23.013749 4558 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/11fdde87-2c2c-40ba-84e5-e0c93fc58130-utilities\") on node \"crc\" DevicePath \"\"" Jan 20 16:46:23 crc kubenswrapper[4558]: I0120 16:46:23.014366 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/05d983ba-960a-4975-b3de-73a3891fb342-utilities" (OuterVolumeSpecName: "utilities") pod "05d983ba-960a-4975-b3de-73a3891fb342" (UID: "05d983ba-960a-4975-b3de-73a3891fb342"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 16:46:23 crc kubenswrapper[4558]: I0120 16:46:23.014549 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/96b0927f-cc7b-48e8-82bd-bbec85c2b9b2-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "96b0927f-cc7b-48e8-82bd-bbec85c2b9b2" (UID: "96b0927f-cc7b-48e8-82bd-bbec85c2b9b2"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 16:46:23 crc kubenswrapper[4558]: I0120 16:46:23.014858 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0099ba6c-2da8-471c-bde7-528db23c4faf-utilities" (OuterVolumeSpecName: "utilities") pod "0099ba6c-2da8-471c-bde7-528db23c4faf" (UID: "0099ba6c-2da8-471c-bde7-528db23c4faf"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 16:46:23 crc kubenswrapper[4558]: I0120 16:46:23.015023 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b0927f-cc7b-48e8-82bd-bbec85c2b9b2-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "96b0927f-cc7b-48e8-82bd-bbec85c2b9b2" (UID: "96b0927f-cc7b-48e8-82bd-bbec85c2b9b2"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:46:23 crc kubenswrapper[4558]: I0120 16:46:23.015107 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b0927f-cc7b-48e8-82bd-bbec85c2b9b2-kube-api-access-nzkhw" (OuterVolumeSpecName: "kube-api-access-nzkhw") pod "96b0927f-cc7b-48e8-82bd-bbec85c2b9b2" (UID: "96b0927f-cc7b-48e8-82bd-bbec85c2b9b2"). InnerVolumeSpecName "kube-api-access-nzkhw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:46:23 crc kubenswrapper[4558]: I0120 16:46:23.015697 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/05d983ba-960a-4975-b3de-73a3891fb342-kube-api-access-xz4cs" (OuterVolumeSpecName: "kube-api-access-xz4cs") pod "05d983ba-960a-4975-b3de-73a3891fb342" (UID: "05d983ba-960a-4975-b3de-73a3891fb342"). InnerVolumeSpecName "kube-api-access-xz4cs". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:46:23 crc kubenswrapper[4558]: I0120 16:46:23.015778 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0099ba6c-2da8-471c-bde7-528db23c4faf-kube-api-access-8rkqf" (OuterVolumeSpecName: "kube-api-access-8rkqf") pod "0099ba6c-2da8-471c-bde7-528db23c4faf" (UID: "0099ba6c-2da8-471c-bde7-528db23c4faf"). InnerVolumeSpecName "kube-api-access-8rkqf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:46:23 crc kubenswrapper[4558]: I0120 16:46:23.016596 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/03467399-b14f-421a-a7ec-f2a533daed0d-kube-api-access-9gsqz" (OuterVolumeSpecName: "kube-api-access-9gsqz") pod "03467399-b14f-421a-a7ec-f2a533daed0d" (UID: "03467399-b14f-421a-a7ec-f2a533daed0d"). InnerVolumeSpecName "kube-api-access-9gsqz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:46:23 crc kubenswrapper[4558]: I0120 16:46:23.042628 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/05d983ba-960a-4975-b3de-73a3891fb342-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "05d983ba-960a-4975-b3de-73a3891fb342" (UID: "05d983ba-960a-4975-b3de-73a3891fb342"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 16:46:23 crc kubenswrapper[4558]: I0120 16:46:23.067885 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/03467399-b14f-421a-a7ec-f2a533daed0d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "03467399-b14f-421a-a7ec-f2a533daed0d" (UID: "03467399-b14f-421a-a7ec-f2a533daed0d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 16:46:23 crc kubenswrapper[4558]: I0120 16:46:23.106147 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0099ba6c-2da8-471c-bde7-528db23c4faf-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0099ba6c-2da8-471c-bde7-528db23c4faf" (UID: "0099ba6c-2da8-471c-bde7-528db23c4faf"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 16:46:23 crc kubenswrapper[4558]: I0120 16:46:23.114625 4558 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/05d983ba-960a-4975-b3de-73a3891fb342-utilities\") on node \"crc\" DevicePath \"\"" Jan 20 16:46:23 crc kubenswrapper[4558]: I0120 16:46:23.114651 4558 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/96b0927f-cc7b-48e8-82bd-bbec85c2b9b2-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 20 16:46:23 crc kubenswrapper[4558]: I0120 16:46:23.114682 4558 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/03467399-b14f-421a-a7ec-f2a533daed0d-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 20 16:46:23 crc kubenswrapper[4558]: I0120 16:46:23.114691 4558 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0099ba6c-2da8-471c-bde7-528db23c4faf-utilities\") on node \"crc\" DevicePath \"\"" Jan 20 16:46:23 crc kubenswrapper[4558]: I0120 16:46:23.114700 4558 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/96b0927f-cc7b-48e8-82bd-bbec85c2b9b2-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Jan 20 16:46:23 crc kubenswrapper[4558]: I0120 16:46:23.114708 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzkhw\" (UniqueName: \"kubernetes.io/projected/96b0927f-cc7b-48e8-82bd-bbec85c2b9b2-kube-api-access-nzkhw\") on node \"crc\" DevicePath \"\"" Jan 20 16:46:23 crc kubenswrapper[4558]: I0120 16:46:23.114716 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9gsqz\" (UniqueName: \"kubernetes.io/projected/03467399-b14f-421a-a7ec-f2a533daed0d-kube-api-access-9gsqz\") on node \"crc\" DevicePath \"\"" Jan 20 16:46:23 crc kubenswrapper[4558]: I0120 16:46:23.114724 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xz4cs\" (UniqueName: \"kubernetes.io/projected/05d983ba-960a-4975-b3de-73a3891fb342-kube-api-access-xz4cs\") on node \"crc\" DevicePath \"\"" Jan 20 16:46:23 crc kubenswrapper[4558]: I0120 16:46:23.114732 4558 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/05d983ba-960a-4975-b3de-73a3891fb342-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 20 16:46:23 crc kubenswrapper[4558]: I0120 16:46:23.114739 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8rkqf\" (UniqueName: \"kubernetes.io/projected/0099ba6c-2da8-471c-bde7-528db23c4faf-kube-api-access-8rkqf\") on node \"crc\" DevicePath \"\"" Jan 20 16:46:23 crc kubenswrapper[4558]: I0120 16:46:23.114746 4558 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0099ba6c-2da8-471c-bde7-528db23c4faf-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 20 16:46:23 crc kubenswrapper[4558]: I0120 16:46:23.161094 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Jan 20 16:46:23 crc kubenswrapper[4558]: I0120 16:46:23.171446 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Jan 20 16:46:23 crc kubenswrapper[4558]: I0120 16:46:23.180206 4558 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Jan 20 16:46:23 crc kubenswrapper[4558]: I0120 16:46:23.267565 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Jan 20 16:46:23 crc kubenswrapper[4558]: I0120 16:46:23.406752 4558 generic.go:334] "Generic (PLEG): container finished" podID="11fdde87-2c2c-40ba-84e5-e0c93fc58130" containerID="281a736e412a599971d1daf067155d194b977c9ca8221d102dbcdef3490bd4c3" exitCode=0 Jan 20 16:46:23 crc kubenswrapper[4558]: I0120 16:46:23.406932 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bgqpg" event={"ID":"11fdde87-2c2c-40ba-84e5-e0c93fc58130","Type":"ContainerDied","Data":"281a736e412a599971d1daf067155d194b977c9ca8221d102dbcdef3490bd4c3"} Jan 20 16:46:23 crc kubenswrapper[4558]: I0120 16:46:23.407019 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bgqpg" event={"ID":"11fdde87-2c2c-40ba-84e5-e0c93fc58130","Type":"ContainerDied","Data":"d596ca7d3fba7864e1cfff15aa3f38d10311b062fbda0815f9c424b9939df615"} Jan 20 16:46:23 crc kubenswrapper[4558]: I0120 16:46:23.407037 4558 scope.go:117] "RemoveContainer" containerID="281a736e412a599971d1daf067155d194b977c9ca8221d102dbcdef3490bd4c3" Jan 20 16:46:23 crc kubenswrapper[4558]: I0120 16:46:23.406953 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-bgqpg" Jan 20 16:46:23 crc kubenswrapper[4558]: I0120 16:46:23.409882 4558 generic.go:334] "Generic (PLEG): container finished" podID="0099ba6c-2da8-471c-bde7-528db23c4faf" containerID="55c2641fdcac002bf4f850acf983db5fdb2a87445738f279b29d89ae8e70ddc5" exitCode=0 Jan 20 16:46:23 crc kubenswrapper[4558]: I0120 16:46:23.409906 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hr9pm" event={"ID":"0099ba6c-2da8-471c-bde7-528db23c4faf","Type":"ContainerDied","Data":"55c2641fdcac002bf4f850acf983db5fdb2a87445738f279b29d89ae8e70ddc5"} Jan 20 16:46:23 crc kubenswrapper[4558]: I0120 16:46:23.409942 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hr9pm" event={"ID":"0099ba6c-2da8-471c-bde7-528db23c4faf","Type":"ContainerDied","Data":"171754b6b52f7171c4db51da5a991465ade514e88f7dd3f142b1d37434e8da8e"} Jan 20 16:46:23 crc kubenswrapper[4558]: I0120 16:46:23.409917 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hr9pm" Jan 20 16:46:23 crc kubenswrapper[4558]: I0120 16:46:23.411738 4558 generic.go:334] "Generic (PLEG): container finished" podID="05d983ba-960a-4975-b3de-73a3891fb342" containerID="bc337f05c8c80f963877487d730b664b8478ace32a99e7d4e2160bcdece78381" exitCode=0 Jan 20 16:46:23 crc kubenswrapper[4558]: I0120 16:46:23.411830 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mndbv" Jan 20 16:46:23 crc kubenswrapper[4558]: I0120 16:46:23.412051 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mndbv" event={"ID":"05d983ba-960a-4975-b3de-73a3891fb342","Type":"ContainerDied","Data":"bc337f05c8c80f963877487d730b664b8478ace32a99e7d4e2160bcdece78381"} Jan 20 16:46:23 crc kubenswrapper[4558]: I0120 16:46:23.412081 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mndbv" event={"ID":"05d983ba-960a-4975-b3de-73a3891fb342","Type":"ContainerDied","Data":"26c796cf0b148af39b037b1a478f5a34d889ddff53301973245c4db96f3a0572"} Jan 20 16:46:23 crc kubenswrapper[4558]: I0120 16:46:23.414456 4558 generic.go:334] "Generic (PLEG): container finished" podID="03467399-b14f-421a-a7ec-f2a533daed0d" containerID="838417c7e988767191dc59fe1cdcb6526ce68f568e4415ddcfa65aa9e72754d4" exitCode=0 Jan 20 16:46:23 crc kubenswrapper[4558]: I0120 16:46:23.414524 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-chwxp" event={"ID":"03467399-b14f-421a-a7ec-f2a533daed0d","Type":"ContainerDied","Data":"838417c7e988767191dc59fe1cdcb6526ce68f568e4415ddcfa65aa9e72754d4"} Jan 20 16:46:23 crc kubenswrapper[4558]: I0120 16:46:23.414550 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-chwxp" event={"ID":"03467399-b14f-421a-a7ec-f2a533daed0d","Type":"ContainerDied","Data":"ac14644a9057b92779a5f41b1bd464485b019b2d9080807342e0cfa2eaf6b8ab"} Jan 20 16:46:23 crc kubenswrapper[4558]: I0120 16:46:23.414602 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-chwxp" Jan 20 16:46:23 crc kubenswrapper[4558]: I0120 16:46:23.417197 4558 generic.go:334] "Generic (PLEG): container finished" podID="96b0927f-cc7b-48e8-82bd-bbec85c2b9b2" containerID="ca73560949e9fc5dc991b312a1529736b6d7a2bd41cf857a84a1ef3542fbba5c" exitCode=0 Jan 20 16:46:23 crc kubenswrapper[4558]: I0120 16:46:23.417227 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-2j5lz" event={"ID":"96b0927f-cc7b-48e8-82bd-bbec85c2b9b2","Type":"ContainerDied","Data":"ca73560949e9fc5dc991b312a1529736b6d7a2bd41cf857a84a1ef3542fbba5c"} Jan 20 16:46:23 crc kubenswrapper[4558]: I0120 16:46:23.417245 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-2j5lz" event={"ID":"96b0927f-cc7b-48e8-82bd-bbec85c2b9b2","Type":"ContainerDied","Data":"256e657a15a3a936d01b883af8814daec575807ddfc5373e72189273b6ecc468"} Jan 20 16:46:23 crc kubenswrapper[4558]: I0120 16:46:23.417283 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-2j5lz" Jan 20 16:46:23 crc kubenswrapper[4558]: I0120 16:46:23.420204 4558 scope.go:117] "RemoveContainer" containerID="6412e7111e737e96044e3c71ef49ed9bbd5ff4e07d685f6a40825af67af9508c" Jan 20 16:46:23 crc kubenswrapper[4558]: I0120 16:46:23.430730 4558 scope.go:117] "RemoveContainer" containerID="0d306167c7391c2c6c552b1eb5c696492d5c7c26d4b97f44208762e73700dcd4" Jan 20 16:46:23 crc kubenswrapper[4558]: I0120 16:46:23.446919 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-bgqpg"] Jan 20 16:46:23 crc kubenswrapper[4558]: I0120 16:46:23.454363 4558 scope.go:117] "RemoveContainer" containerID="281a736e412a599971d1daf067155d194b977c9ca8221d102dbcdef3490bd4c3" Jan 20 16:46:23 crc kubenswrapper[4558]: E0120 16:46:23.454698 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"281a736e412a599971d1daf067155d194b977c9ca8221d102dbcdef3490bd4c3\": container with ID starting with 281a736e412a599971d1daf067155d194b977c9ca8221d102dbcdef3490bd4c3 not found: ID does not exist" containerID="281a736e412a599971d1daf067155d194b977c9ca8221d102dbcdef3490bd4c3" Jan 20 16:46:23 crc kubenswrapper[4558]: I0120 16:46:23.454721 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"281a736e412a599971d1daf067155d194b977c9ca8221d102dbcdef3490bd4c3"} err="failed to get container status \"281a736e412a599971d1daf067155d194b977c9ca8221d102dbcdef3490bd4c3\": rpc error: code = NotFound desc = could not find container \"281a736e412a599971d1daf067155d194b977c9ca8221d102dbcdef3490bd4c3\": container with ID starting with 281a736e412a599971d1daf067155d194b977c9ca8221d102dbcdef3490bd4c3 not found: ID does not exist" Jan 20 16:46:23 crc kubenswrapper[4558]: I0120 16:46:23.454738 4558 scope.go:117] "RemoveContainer" containerID="6412e7111e737e96044e3c71ef49ed9bbd5ff4e07d685f6a40825af67af9508c" Jan 20 16:46:23 crc kubenswrapper[4558]: E0120 16:46:23.454951 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6412e7111e737e96044e3c71ef49ed9bbd5ff4e07d685f6a40825af67af9508c\": container with ID starting with 6412e7111e737e96044e3c71ef49ed9bbd5ff4e07d685f6a40825af67af9508c not found: ID does not exist" containerID="6412e7111e737e96044e3c71ef49ed9bbd5ff4e07d685f6a40825af67af9508c" Jan 20 16:46:23 crc kubenswrapper[4558]: I0120 16:46:23.454967 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6412e7111e737e96044e3c71ef49ed9bbd5ff4e07d685f6a40825af67af9508c"} err="failed to get container status \"6412e7111e737e96044e3c71ef49ed9bbd5ff4e07d685f6a40825af67af9508c\": rpc error: code = NotFound desc = could not find container \"6412e7111e737e96044e3c71ef49ed9bbd5ff4e07d685f6a40825af67af9508c\": container with ID starting with 6412e7111e737e96044e3c71ef49ed9bbd5ff4e07d685f6a40825af67af9508c not found: ID does not exist" Jan 20 16:46:23 crc kubenswrapper[4558]: I0120 16:46:23.454979 4558 scope.go:117] "RemoveContainer" containerID="0d306167c7391c2c6c552b1eb5c696492d5c7c26d4b97f44208762e73700dcd4" Jan 20 16:46:23 crc kubenswrapper[4558]: E0120 16:46:23.455512 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0d306167c7391c2c6c552b1eb5c696492d5c7c26d4b97f44208762e73700dcd4\": container with ID starting with 0d306167c7391c2c6c552b1eb5c696492d5c7c26d4b97f44208762e73700dcd4 not found: ID does not exist" containerID="0d306167c7391c2c6c552b1eb5c696492d5c7c26d4b97f44208762e73700dcd4" Jan 20 16:46:23 crc kubenswrapper[4558]: I0120 16:46:23.455529 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0d306167c7391c2c6c552b1eb5c696492d5c7c26d4b97f44208762e73700dcd4"} err="failed to get container status \"0d306167c7391c2c6c552b1eb5c696492d5c7c26d4b97f44208762e73700dcd4\": rpc error: code = NotFound desc = could not find container \"0d306167c7391c2c6c552b1eb5c696492d5c7c26d4b97f44208762e73700dcd4\": container with ID starting with 0d306167c7391c2c6c552b1eb5c696492d5c7c26d4b97f44208762e73700dcd4 not found: ID does not exist" Jan 20 16:46:23 crc kubenswrapper[4558]: I0120 16:46:23.455541 4558 scope.go:117] "RemoveContainer" containerID="55c2641fdcac002bf4f850acf983db5fdb2a87445738f279b29d89ae8e70ddc5" Jan 20 16:46:23 crc kubenswrapper[4558]: I0120 16:46:23.455594 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-bgqpg"] Jan 20 16:46:23 crc kubenswrapper[4558]: I0120 16:46:23.462245 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-hr9pm"] Jan 20 16:46:23 crc kubenswrapper[4558]: I0120 16:46:23.466192 4558 scope.go:117] "RemoveContainer" containerID="bc83a14fde76b44006731fddf6932b220bc2bdf344c8611287f523d2d037bd4a" Jan 20 16:46:23 crc kubenswrapper[4558]: I0120 16:46:23.466382 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-hr9pm"] Jan 20 16:46:23 crc kubenswrapper[4558]: I0120 16:46:23.470921 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-mndbv"] Jan 20 16:46:23 crc kubenswrapper[4558]: I0120 16:46:23.474703 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-mndbv"] Jan 20 16:46:23 crc kubenswrapper[4558]: I0120 16:46:23.478837 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-chwxp"] Jan 20 16:46:23 crc kubenswrapper[4558]: I0120 16:46:23.484215 4558 scope.go:117] "RemoveContainer" containerID="50fbb7a62ff8ee104b3a72ffc13c1b7169f074bed241d16baec69089a6bbf133" Jan 20 16:46:23 crc kubenswrapper[4558]: I0120 16:46:23.484326 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-chwxp"] Jan 20 16:46:23 crc kubenswrapper[4558]: I0120 16:46:23.485297 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-2j5lz"] Jan 20 16:46:23 crc kubenswrapper[4558]: I0120 16:46:23.487620 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-2j5lz"] Jan 20 16:46:23 crc kubenswrapper[4558]: I0120 16:46:23.498984 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-fhnw5"] Jan 20 16:46:23 crc kubenswrapper[4558]: I0120 16:46:23.500394 4558 scope.go:117] "RemoveContainer" containerID="55c2641fdcac002bf4f850acf983db5fdb2a87445738f279b29d89ae8e70ddc5" Jan 20 16:46:23 crc kubenswrapper[4558]: E0120 16:46:23.501183 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"55c2641fdcac002bf4f850acf983db5fdb2a87445738f279b29d89ae8e70ddc5\": container with ID starting with 55c2641fdcac002bf4f850acf983db5fdb2a87445738f279b29d89ae8e70ddc5 not found: ID does not exist" containerID="55c2641fdcac002bf4f850acf983db5fdb2a87445738f279b29d89ae8e70ddc5" Jan 20 16:46:23 crc kubenswrapper[4558]: I0120 16:46:23.501217 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"55c2641fdcac002bf4f850acf983db5fdb2a87445738f279b29d89ae8e70ddc5"} err="failed to get container status \"55c2641fdcac002bf4f850acf983db5fdb2a87445738f279b29d89ae8e70ddc5\": rpc error: code = NotFound desc = could not find container \"55c2641fdcac002bf4f850acf983db5fdb2a87445738f279b29d89ae8e70ddc5\": container with ID starting with 55c2641fdcac002bf4f850acf983db5fdb2a87445738f279b29d89ae8e70ddc5 not found: ID does not exist" Jan 20 16:46:23 crc kubenswrapper[4558]: I0120 16:46:23.501237 4558 scope.go:117] "RemoveContainer" containerID="bc83a14fde76b44006731fddf6932b220bc2bdf344c8611287f523d2d037bd4a" Jan 20 16:46:23 crc kubenswrapper[4558]: E0120 16:46:23.501636 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bc83a14fde76b44006731fddf6932b220bc2bdf344c8611287f523d2d037bd4a\": container with ID starting with bc83a14fde76b44006731fddf6932b220bc2bdf344c8611287f523d2d037bd4a not found: ID does not exist" containerID="bc83a14fde76b44006731fddf6932b220bc2bdf344c8611287f523d2d037bd4a" Jan 20 16:46:23 crc kubenswrapper[4558]: I0120 16:46:23.501655 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bc83a14fde76b44006731fddf6932b220bc2bdf344c8611287f523d2d037bd4a"} err="failed to get container status \"bc83a14fde76b44006731fddf6932b220bc2bdf344c8611287f523d2d037bd4a\": rpc error: code = NotFound desc = could not find container \"bc83a14fde76b44006731fddf6932b220bc2bdf344c8611287f523d2d037bd4a\": container with ID starting with bc83a14fde76b44006731fddf6932b220bc2bdf344c8611287f523d2d037bd4a not found: ID does not exist" Jan 20 16:46:23 crc kubenswrapper[4558]: I0120 16:46:23.501692 4558 scope.go:117] "RemoveContainer" containerID="50fbb7a62ff8ee104b3a72ffc13c1b7169f074bed241d16baec69089a6bbf133" Jan 20 16:46:23 crc kubenswrapper[4558]: E0120 16:46:23.501914 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"50fbb7a62ff8ee104b3a72ffc13c1b7169f074bed241d16baec69089a6bbf133\": container with ID starting with 50fbb7a62ff8ee104b3a72ffc13c1b7169f074bed241d16baec69089a6bbf133 not found: ID does not exist" containerID="50fbb7a62ff8ee104b3a72ffc13c1b7169f074bed241d16baec69089a6bbf133" Jan 20 16:46:23 crc kubenswrapper[4558]: I0120 16:46:23.501938 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"50fbb7a62ff8ee104b3a72ffc13c1b7169f074bed241d16baec69089a6bbf133"} err="failed to get container status \"50fbb7a62ff8ee104b3a72ffc13c1b7169f074bed241d16baec69089a6bbf133\": rpc error: code = NotFound desc = could not find container \"50fbb7a62ff8ee104b3a72ffc13c1b7169f074bed241d16baec69089a6bbf133\": container with ID starting with 50fbb7a62ff8ee104b3a72ffc13c1b7169f074bed241d16baec69089a6bbf133 not found: ID does not exist" Jan 20 16:46:23 crc kubenswrapper[4558]: I0120 16:46:23.501952 4558 scope.go:117] "RemoveContainer" containerID="bc337f05c8c80f963877487d730b664b8478ace32a99e7d4e2160bcdece78381" Jan 20 16:46:23 crc kubenswrapper[4558]: I0120 16:46:23.518328 4558 scope.go:117] "RemoveContainer" containerID="012e66d2321004a8253622882671c56121399e29c0a87fb6e64485d5efcf0cc7" Jan 20 16:46:23 crc kubenswrapper[4558]: I0120 16:46:23.529940 4558 scope.go:117] "RemoveContainer" containerID="da7a87a8742bf479fa28cd4ee4aa2b5ae97fea463c83fe90474cc551c785b068" Jan 20 16:46:23 crc kubenswrapper[4558]: I0120 16:46:23.544914 4558 scope.go:117] "RemoveContainer" containerID="bc337f05c8c80f963877487d730b664b8478ace32a99e7d4e2160bcdece78381" Jan 20 16:46:23 crc kubenswrapper[4558]: E0120 16:46:23.545298 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bc337f05c8c80f963877487d730b664b8478ace32a99e7d4e2160bcdece78381\": container with ID starting with bc337f05c8c80f963877487d730b664b8478ace32a99e7d4e2160bcdece78381 not found: ID does not exist" containerID="bc337f05c8c80f963877487d730b664b8478ace32a99e7d4e2160bcdece78381" Jan 20 16:46:23 crc kubenswrapper[4558]: I0120 16:46:23.545325 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bc337f05c8c80f963877487d730b664b8478ace32a99e7d4e2160bcdece78381"} err="failed to get container status \"bc337f05c8c80f963877487d730b664b8478ace32a99e7d4e2160bcdece78381\": rpc error: code = NotFound desc = could not find container \"bc337f05c8c80f963877487d730b664b8478ace32a99e7d4e2160bcdece78381\": container with ID starting with bc337f05c8c80f963877487d730b664b8478ace32a99e7d4e2160bcdece78381 not found: ID does not exist" Jan 20 16:46:23 crc kubenswrapper[4558]: I0120 16:46:23.545359 4558 scope.go:117] "RemoveContainer" containerID="012e66d2321004a8253622882671c56121399e29c0a87fb6e64485d5efcf0cc7" Jan 20 16:46:23 crc kubenswrapper[4558]: E0120 16:46:23.545609 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"012e66d2321004a8253622882671c56121399e29c0a87fb6e64485d5efcf0cc7\": container with ID starting with 012e66d2321004a8253622882671c56121399e29c0a87fb6e64485d5efcf0cc7 not found: ID does not exist" containerID="012e66d2321004a8253622882671c56121399e29c0a87fb6e64485d5efcf0cc7" Jan 20 16:46:23 crc kubenswrapper[4558]: I0120 16:46:23.545646 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"012e66d2321004a8253622882671c56121399e29c0a87fb6e64485d5efcf0cc7"} err="failed to get container status \"012e66d2321004a8253622882671c56121399e29c0a87fb6e64485d5efcf0cc7\": rpc error: code = NotFound desc = could not find container \"012e66d2321004a8253622882671c56121399e29c0a87fb6e64485d5efcf0cc7\": container with ID starting with 012e66d2321004a8253622882671c56121399e29c0a87fb6e64485d5efcf0cc7 not found: ID does not exist" Jan 20 16:46:23 crc kubenswrapper[4558]: I0120 16:46:23.545661 4558 scope.go:117] "RemoveContainer" containerID="da7a87a8742bf479fa28cd4ee4aa2b5ae97fea463c83fe90474cc551c785b068" Jan 20 16:46:23 crc kubenswrapper[4558]: E0120 16:46:23.545938 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"da7a87a8742bf479fa28cd4ee4aa2b5ae97fea463c83fe90474cc551c785b068\": container with ID starting with da7a87a8742bf479fa28cd4ee4aa2b5ae97fea463c83fe90474cc551c785b068 not found: ID does not exist" containerID="da7a87a8742bf479fa28cd4ee4aa2b5ae97fea463c83fe90474cc551c785b068" Jan 20 16:46:23 crc kubenswrapper[4558]: I0120 16:46:23.545954 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"da7a87a8742bf479fa28cd4ee4aa2b5ae97fea463c83fe90474cc551c785b068"} err="failed to get container status \"da7a87a8742bf479fa28cd4ee4aa2b5ae97fea463c83fe90474cc551c785b068\": rpc error: code = NotFound desc = could not find container \"da7a87a8742bf479fa28cd4ee4aa2b5ae97fea463c83fe90474cc551c785b068\": container with ID starting with da7a87a8742bf479fa28cd4ee4aa2b5ae97fea463c83fe90474cc551c785b068 not found: ID does not exist" Jan 20 16:46:23 crc kubenswrapper[4558]: I0120 16:46:23.545966 4558 scope.go:117] "RemoveContainer" containerID="838417c7e988767191dc59fe1cdcb6526ce68f568e4415ddcfa65aa9e72754d4" Jan 20 16:46:23 crc kubenswrapper[4558]: I0120 16:46:23.555712 4558 scope.go:117] "RemoveContainer" containerID="cf9c3da8d1183bd36ea6b5078dc260ec622ca8def9815b582632960b1f34ff04" Jan 20 16:46:23 crc kubenswrapper[4558]: I0120 16:46:23.561473 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Jan 20 16:46:23 crc kubenswrapper[4558]: I0120 16:46:23.565713 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Jan 20 16:46:23 crc kubenswrapper[4558]: I0120 16:46:23.568211 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Jan 20 16:46:23 crc kubenswrapper[4558]: I0120 16:46:23.570883 4558 scope.go:117] "RemoveContainer" containerID="2c7513d64d0908c87f7ae5b1a41770fb262c11553ce6d0b68b47acbb1c37bde1" Jan 20 16:46:23 crc kubenswrapper[4558]: I0120 16:46:23.582009 4558 scope.go:117] "RemoveContainer" containerID="838417c7e988767191dc59fe1cdcb6526ce68f568e4415ddcfa65aa9e72754d4" Jan 20 16:46:23 crc kubenswrapper[4558]: E0120 16:46:23.582314 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"838417c7e988767191dc59fe1cdcb6526ce68f568e4415ddcfa65aa9e72754d4\": container with ID starting with 838417c7e988767191dc59fe1cdcb6526ce68f568e4415ddcfa65aa9e72754d4 not found: ID does not exist" containerID="838417c7e988767191dc59fe1cdcb6526ce68f568e4415ddcfa65aa9e72754d4" Jan 20 16:46:23 crc kubenswrapper[4558]: I0120 16:46:23.582357 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"838417c7e988767191dc59fe1cdcb6526ce68f568e4415ddcfa65aa9e72754d4"} err="failed to get container status \"838417c7e988767191dc59fe1cdcb6526ce68f568e4415ddcfa65aa9e72754d4\": rpc error: code = NotFound desc = could not find container \"838417c7e988767191dc59fe1cdcb6526ce68f568e4415ddcfa65aa9e72754d4\": container with ID starting with 838417c7e988767191dc59fe1cdcb6526ce68f568e4415ddcfa65aa9e72754d4 not found: ID does not exist" Jan 20 16:46:23 crc kubenswrapper[4558]: I0120 16:46:23.582376 4558 scope.go:117] "RemoveContainer" containerID="cf9c3da8d1183bd36ea6b5078dc260ec622ca8def9815b582632960b1f34ff04" Jan 20 16:46:23 crc kubenswrapper[4558]: E0120 16:46:23.582740 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cf9c3da8d1183bd36ea6b5078dc260ec622ca8def9815b582632960b1f34ff04\": container with ID starting with cf9c3da8d1183bd36ea6b5078dc260ec622ca8def9815b582632960b1f34ff04 not found: ID does not exist" containerID="cf9c3da8d1183bd36ea6b5078dc260ec622ca8def9815b582632960b1f34ff04" Jan 20 16:46:23 crc kubenswrapper[4558]: I0120 16:46:23.582771 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cf9c3da8d1183bd36ea6b5078dc260ec622ca8def9815b582632960b1f34ff04"} err="failed to get container status \"cf9c3da8d1183bd36ea6b5078dc260ec622ca8def9815b582632960b1f34ff04\": rpc error: code = NotFound desc = could not find container \"cf9c3da8d1183bd36ea6b5078dc260ec622ca8def9815b582632960b1f34ff04\": container with ID starting with cf9c3da8d1183bd36ea6b5078dc260ec622ca8def9815b582632960b1f34ff04 not found: ID does not exist" Jan 20 16:46:23 crc kubenswrapper[4558]: I0120 16:46:23.582816 4558 scope.go:117] "RemoveContainer" containerID="2c7513d64d0908c87f7ae5b1a41770fb262c11553ce6d0b68b47acbb1c37bde1" Jan 20 16:46:23 crc kubenswrapper[4558]: E0120 16:46:23.583103 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2c7513d64d0908c87f7ae5b1a41770fb262c11553ce6d0b68b47acbb1c37bde1\": container with ID starting with 2c7513d64d0908c87f7ae5b1a41770fb262c11553ce6d0b68b47acbb1c37bde1 not found: ID does not exist" containerID="2c7513d64d0908c87f7ae5b1a41770fb262c11553ce6d0b68b47acbb1c37bde1" Jan 20 16:46:23 crc kubenswrapper[4558]: I0120 16:46:23.583136 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2c7513d64d0908c87f7ae5b1a41770fb262c11553ce6d0b68b47acbb1c37bde1"} err="failed to get container status \"2c7513d64d0908c87f7ae5b1a41770fb262c11553ce6d0b68b47acbb1c37bde1\": rpc error: code = NotFound desc = could not find container \"2c7513d64d0908c87f7ae5b1a41770fb262c11553ce6d0b68b47acbb1c37bde1\": container with ID starting with 2c7513d64d0908c87f7ae5b1a41770fb262c11553ce6d0b68b47acbb1c37bde1 not found: ID does not exist" Jan 20 16:46:23 crc kubenswrapper[4558]: I0120 16:46:23.583159 4558 scope.go:117] "RemoveContainer" containerID="ca73560949e9fc5dc991b312a1529736b6d7a2bd41cf857a84a1ef3542fbba5c" Jan 20 16:46:23 crc kubenswrapper[4558]: I0120 16:46:23.592354 4558 scope.go:117] "RemoveContainer" containerID="ca73560949e9fc5dc991b312a1529736b6d7a2bd41cf857a84a1ef3542fbba5c" Jan 20 16:46:23 crc kubenswrapper[4558]: E0120 16:46:23.592634 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ca73560949e9fc5dc991b312a1529736b6d7a2bd41cf857a84a1ef3542fbba5c\": container with ID starting with ca73560949e9fc5dc991b312a1529736b6d7a2bd41cf857a84a1ef3542fbba5c not found: ID does not exist" containerID="ca73560949e9fc5dc991b312a1529736b6d7a2bd41cf857a84a1ef3542fbba5c" Jan 20 16:46:23 crc kubenswrapper[4558]: I0120 16:46:23.592660 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ca73560949e9fc5dc991b312a1529736b6d7a2bd41cf857a84a1ef3542fbba5c"} err="failed to get container status \"ca73560949e9fc5dc991b312a1529736b6d7a2bd41cf857a84a1ef3542fbba5c\": rpc error: code = NotFound desc = could not find container \"ca73560949e9fc5dc991b312a1529736b6d7a2bd41cf857a84a1ef3542fbba5c\": container with ID starting with ca73560949e9fc5dc991b312a1529736b6d7a2bd41cf857a84a1ef3542fbba5c not found: ID does not exist" Jan 20 16:46:23 crc kubenswrapper[4558]: I0120 16:46:23.676743 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Jan 20 16:46:23 crc kubenswrapper[4558]: I0120 16:46:23.700977 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Jan 20 16:46:23 crc kubenswrapper[4558]: I0120 16:46:23.755131 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Jan 20 16:46:23 crc kubenswrapper[4558]: I0120 16:46:23.796932 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Jan 20 16:46:23 crc kubenswrapper[4558]: I0120 16:46:23.837379 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-fhnw5"] Jan 20 16:46:23 crc kubenswrapper[4558]: W0120 16:46:23.845993 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6e227740_1076_4ebf_9fd4_b1ae12cc7beb.slice/crio-5f60c92689eba9fec3b18971e9563a9d7d1a0570d74710fa8e76549178c3146e WatchSource:0}: Error finding container 5f60c92689eba9fec3b18971e9563a9d7d1a0570d74710fa8e76549178c3146e: Status 404 returned error can't find the container with id 5f60c92689eba9fec3b18971e9563a9d7d1a0570d74710fa8e76549178c3146e Jan 20 16:46:23 crc kubenswrapper[4558]: I0120 16:46:23.973179 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Jan 20 16:46:24 crc kubenswrapper[4558]: I0120 16:46:24.351882 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Jan 20 16:46:24 crc kubenswrapper[4558]: I0120 16:46:24.425049 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-fhnw5" event={"ID":"6e227740-1076-4ebf-9fd4-b1ae12cc7beb","Type":"ContainerStarted","Data":"4510ffc98dff82d5aa88f860f6779c7b18f24e7adfdadf18a09e4c34cd06111a"} Jan 20 16:46:24 crc kubenswrapper[4558]: I0120 16:46:24.425085 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-fhnw5" event={"ID":"6e227740-1076-4ebf-9fd4-b1ae12cc7beb","Type":"ContainerStarted","Data":"5f60c92689eba9fec3b18971e9563a9d7d1a0570d74710fa8e76549178c3146e"} Jan 20 16:46:24 crc kubenswrapper[4558]: I0120 16:46:24.425906 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-fhnw5" Jan 20 16:46:24 crc kubenswrapper[4558]: I0120 16:46:24.428854 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-fhnw5" Jan 20 16:46:24 crc kubenswrapper[4558]: I0120 16:46:24.438094 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-fhnw5" podStartSLOduration=2.438083098 podStartE2EDuration="2.438083098s" podCreationTimestamp="2026-01-20 16:46:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 16:46:24.435502059 +0000 UTC m=+278.195840026" watchObservedRunningTime="2026-01-20 16:46:24.438083098 +0000 UTC m=+278.198421066" Jan 20 16:46:24 crc kubenswrapper[4558]: I0120 16:46:24.481679 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Jan 20 16:46:24 crc kubenswrapper[4558]: I0120 16:46:24.558203 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Jan 20 16:46:24 crc kubenswrapper[4558]: I0120 16:46:24.565429 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Jan 20 16:46:24 crc kubenswrapper[4558]: I0120 16:46:24.570876 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0099ba6c-2da8-471c-bde7-528db23c4faf" path="/var/lib/kubelet/pods/0099ba6c-2da8-471c-bde7-528db23c4faf/volumes" Jan 20 16:46:24 crc kubenswrapper[4558]: I0120 16:46:24.571454 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="03467399-b14f-421a-a7ec-f2a533daed0d" path="/var/lib/kubelet/pods/03467399-b14f-421a-a7ec-f2a533daed0d/volumes" Jan 20 16:46:24 crc kubenswrapper[4558]: I0120 16:46:24.571949 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="05d983ba-960a-4975-b3de-73a3891fb342" path="/var/lib/kubelet/pods/05d983ba-960a-4975-b3de-73a3891fb342/volumes" Jan 20 16:46:24 crc kubenswrapper[4558]: I0120 16:46:24.572483 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="11fdde87-2c2c-40ba-84e5-e0c93fc58130" path="/var/lib/kubelet/pods/11fdde87-2c2c-40ba-84e5-e0c93fc58130/volumes" Jan 20 16:46:24 crc kubenswrapper[4558]: I0120 16:46:24.573003 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b0927f-cc7b-48e8-82bd-bbec85c2b9b2" path="/var/lib/kubelet/pods/96b0927f-cc7b-48e8-82bd-bbec85c2b9b2/volumes" Jan 20 16:46:24 crc kubenswrapper[4558]: I0120 16:46:24.625844 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Jan 20 16:46:24 crc kubenswrapper[4558]: I0120 16:46:24.636210 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Jan 20 16:46:24 crc kubenswrapper[4558]: I0120 16:46:24.666913 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Jan 20 16:46:24 crc kubenswrapper[4558]: I0120 16:46:24.774464 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Jan 20 16:46:24 crc kubenswrapper[4558]: I0120 16:46:24.845970 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Jan 20 16:46:25 crc kubenswrapper[4558]: I0120 16:46:25.025507 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Jan 20 16:46:25 crc kubenswrapper[4558]: I0120 16:46:25.089216 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Jan 20 16:46:25 crc kubenswrapper[4558]: I0120 16:46:25.288580 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Jan 20 16:46:25 crc kubenswrapper[4558]: I0120 16:46:25.365831 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Jan 20 16:46:25 crc kubenswrapper[4558]: I0120 16:46:25.539092 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Jan 20 16:46:25 crc kubenswrapper[4558]: I0120 16:46:25.574199 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Jan 20 16:46:25 crc kubenswrapper[4558]: I0120 16:46:25.760858 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Jan 20 16:46:25 crc kubenswrapper[4558]: I0120 16:46:25.868450 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Jan 20 16:46:25 crc kubenswrapper[4558]: I0120 16:46:25.868982 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Jan 20 16:46:26 crc kubenswrapper[4558]: I0120 16:46:26.034507 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Jan 20 16:46:26 crc kubenswrapper[4558]: I0120 16:46:26.063213 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Jan 20 16:46:26 crc kubenswrapper[4558]: I0120 16:46:26.114658 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Jan 20 16:46:26 crc kubenswrapper[4558]: I0120 16:46:26.576635 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Jan 20 16:46:27 crc kubenswrapper[4558]: I0120 16:46:27.438149 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Jan 20 16:46:27 crc kubenswrapper[4558]: I0120 16:46:27.438384 4558 generic.go:334] "Generic (PLEG): container finished" podID="f85e55b1a89d02b0cb034b1ea31ed45a" containerID="92bd2dbe496a153423db938740852dbba2c997af8aafa002645e9c2b721af012" exitCode=137 Jan 20 16:46:27 crc kubenswrapper[4558]: I0120 16:46:27.964530 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Jan 20 16:46:27 crc kubenswrapper[4558]: I0120 16:46:27.964596 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 20 16:46:28 crc kubenswrapper[4558]: I0120 16:46:28.069478 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Jan 20 16:46:28 crc kubenswrapper[4558]: I0120 16:46:28.069516 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Jan 20 16:46:28 crc kubenswrapper[4558]: I0120 16:46:28.069533 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Jan 20 16:46:28 crc kubenswrapper[4558]: I0120 16:46:28.069555 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 16:46:28 crc kubenswrapper[4558]: I0120 16:46:28.069579 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Jan 20 16:46:28 crc kubenswrapper[4558]: I0120 16:46:28.069597 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Jan 20 16:46:28 crc kubenswrapper[4558]: I0120 16:46:28.069619 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock" (OuterVolumeSpecName: "var-lock") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 16:46:28 crc kubenswrapper[4558]: I0120 16:46:28.069656 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log" (OuterVolumeSpecName: "var-log") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 16:46:28 crc kubenswrapper[4558]: I0120 16:46:28.069746 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests" (OuterVolumeSpecName: "manifests") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "manifests". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 16:46:28 crc kubenswrapper[4558]: I0120 16:46:28.069773 4558 reconciler_common.go:293] "Volume detached for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") on node \"crc\" DevicePath \"\"" Jan 20 16:46:28 crc kubenswrapper[4558]: I0120 16:46:28.069783 4558 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") on node \"crc\" DevicePath \"\"" Jan 20 16:46:28 crc kubenswrapper[4558]: I0120 16:46:28.069791 4558 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") on node \"crc\" DevicePath \"\"" Jan 20 16:46:28 crc kubenswrapper[4558]: I0120 16:46:28.076185 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir" (OuterVolumeSpecName: "pod-resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "pod-resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 16:46:28 crc kubenswrapper[4558]: I0120 16:46:28.170794 4558 reconciler_common.go:293] "Volume detached for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") on node \"crc\" DevicePath \"\"" Jan 20 16:46:28 crc kubenswrapper[4558]: I0120 16:46:28.171054 4558 reconciler_common.go:293] "Volume detached for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") on node \"crc\" DevicePath \"\"" Jan 20 16:46:28 crc kubenswrapper[4558]: I0120 16:46:28.446136 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Jan 20 16:46:28 crc kubenswrapper[4558]: I0120 16:46:28.446211 4558 scope.go:117] "RemoveContainer" containerID="92bd2dbe496a153423db938740852dbba2c997af8aafa002645e9c2b721af012" Jan 20 16:46:28 crc kubenswrapper[4558]: I0120 16:46:28.446307 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 20 16:46:28 crc kubenswrapper[4558]: I0120 16:46:28.574528 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" path="/var/lib/kubelet/pods/f85e55b1a89d02b0cb034b1ea31ed45a/volumes" Jan 20 16:46:45 crc kubenswrapper[4558]: I0120 16:46:45.380274 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-t225g"] Jan 20 16:46:45 crc kubenswrapper[4558]: I0120 16:46:45.380799 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-t225g" podUID="c2b3de8e-9eaa-43d7-a061-674af5e035cb" containerName="controller-manager" containerID="cri-o://e9553821ac5d05520a8e73a629cc2f5168c903ec0af450aef3f2f7532badfa0a" gracePeriod=30 Jan 20 16:46:45 crc kubenswrapper[4558]: I0120 16:46:45.479572 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-vmm7h"] Jan 20 16:46:45 crc kubenswrapper[4558]: I0120 16:46:45.479758 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-vmm7h" podUID="7f63cf5d-23fe-4e8b-a9a7-3ce5c0c30aad" containerName="route-controller-manager" containerID="cri-o://503969b54b9081a8c984c77ef00b3babbe7301561e2b587f91900396fe94ae13" gracePeriod=30 Jan 20 16:46:45 crc kubenswrapper[4558]: I0120 16:46:45.517419 4558 generic.go:334] "Generic (PLEG): container finished" podID="c2b3de8e-9eaa-43d7-a061-674af5e035cb" containerID="e9553821ac5d05520a8e73a629cc2f5168c903ec0af450aef3f2f7532badfa0a" exitCode=0 Jan 20 16:46:45 crc kubenswrapper[4558]: I0120 16:46:45.517549 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-t225g" event={"ID":"c2b3de8e-9eaa-43d7-a061-674af5e035cb","Type":"ContainerDied","Data":"e9553821ac5d05520a8e73a629cc2f5168c903ec0af450aef3f2f7532badfa0a"} Jan 20 16:46:45 crc kubenswrapper[4558]: I0120 16:46:45.657328 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-t225g" Jan 20 16:46:45 crc kubenswrapper[4558]: I0120 16:46:45.771537 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c2b3de8e-9eaa-43d7-a061-674af5e035cb-proxy-ca-bundles\") pod \"c2b3de8e-9eaa-43d7-a061-674af5e035cb\" (UID: \"c2b3de8e-9eaa-43d7-a061-674af5e035cb\") " Jan 20 16:46:45 crc kubenswrapper[4558]: I0120 16:46:45.771627 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c2b3de8e-9eaa-43d7-a061-674af5e035cb-client-ca\") pod \"c2b3de8e-9eaa-43d7-a061-674af5e035cb\" (UID: \"c2b3de8e-9eaa-43d7-a061-674af5e035cb\") " Jan 20 16:46:45 crc kubenswrapper[4558]: I0120 16:46:45.771663 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nfwxp\" (UniqueName: \"kubernetes.io/projected/c2b3de8e-9eaa-43d7-a061-674af5e035cb-kube-api-access-nfwxp\") pod \"c2b3de8e-9eaa-43d7-a061-674af5e035cb\" (UID: \"c2b3de8e-9eaa-43d7-a061-674af5e035cb\") " Jan 20 16:46:45 crc kubenswrapper[4558]: I0120 16:46:45.771685 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c2b3de8e-9eaa-43d7-a061-674af5e035cb-serving-cert\") pod \"c2b3de8e-9eaa-43d7-a061-674af5e035cb\" (UID: \"c2b3de8e-9eaa-43d7-a061-674af5e035cb\") " Jan 20 16:46:45 crc kubenswrapper[4558]: I0120 16:46:45.771718 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c2b3de8e-9eaa-43d7-a061-674af5e035cb-config\") pod \"c2b3de8e-9eaa-43d7-a061-674af5e035cb\" (UID: \"c2b3de8e-9eaa-43d7-a061-674af5e035cb\") " Jan 20 16:46:45 crc kubenswrapper[4558]: I0120 16:46:45.772264 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c2b3de8e-9eaa-43d7-a061-674af5e035cb-client-ca" (OuterVolumeSpecName: "client-ca") pod "c2b3de8e-9eaa-43d7-a061-674af5e035cb" (UID: "c2b3de8e-9eaa-43d7-a061-674af5e035cb"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 16:46:45 crc kubenswrapper[4558]: I0120 16:46:45.772337 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c2b3de8e-9eaa-43d7-a061-674af5e035cb-config" (OuterVolumeSpecName: "config") pod "c2b3de8e-9eaa-43d7-a061-674af5e035cb" (UID: "c2b3de8e-9eaa-43d7-a061-674af5e035cb"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 16:46:45 crc kubenswrapper[4558]: I0120 16:46:45.772688 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c2b3de8e-9eaa-43d7-a061-674af5e035cb-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "c2b3de8e-9eaa-43d7-a061-674af5e035cb" (UID: "c2b3de8e-9eaa-43d7-a061-674af5e035cb"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 16:46:45 crc kubenswrapper[4558]: I0120 16:46:45.774758 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-vmm7h" Jan 20 16:46:45 crc kubenswrapper[4558]: I0120 16:46:45.775910 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c2b3de8e-9eaa-43d7-a061-674af5e035cb-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "c2b3de8e-9eaa-43d7-a061-674af5e035cb" (UID: "c2b3de8e-9eaa-43d7-a061-674af5e035cb"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:46:45 crc kubenswrapper[4558]: I0120 16:46:45.775938 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c2b3de8e-9eaa-43d7-a061-674af5e035cb-kube-api-access-nfwxp" (OuterVolumeSpecName: "kube-api-access-nfwxp") pod "c2b3de8e-9eaa-43d7-a061-674af5e035cb" (UID: "c2b3de8e-9eaa-43d7-a061-674af5e035cb"). InnerVolumeSpecName "kube-api-access-nfwxp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:46:45 crc kubenswrapper[4558]: I0120 16:46:45.847268 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-5bfb6c7bd4-kzh68"] Jan 20 16:46:45 crc kubenswrapper[4558]: E0120 16:46:45.847426 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="11fdde87-2c2c-40ba-84e5-e0c93fc58130" containerName="extract-utilities" Jan 20 16:46:45 crc kubenswrapper[4558]: I0120 16:46:45.847438 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="11fdde87-2c2c-40ba-84e5-e0c93fc58130" containerName="extract-utilities" Jan 20 16:46:45 crc kubenswrapper[4558]: E0120 16:46:45.847448 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0099ba6c-2da8-471c-bde7-528db23c4faf" containerName="extract-content" Jan 20 16:46:45 crc kubenswrapper[4558]: I0120 16:46:45.847454 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="0099ba6c-2da8-471c-bde7-528db23c4faf" containerName="extract-content" Jan 20 16:46:45 crc kubenswrapper[4558]: E0120 16:46:45.847461 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="05d983ba-960a-4975-b3de-73a3891fb342" containerName="extract-utilities" Jan 20 16:46:45 crc kubenswrapper[4558]: I0120 16:46:45.847467 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="05d983ba-960a-4975-b3de-73a3891fb342" containerName="extract-utilities" Jan 20 16:46:45 crc kubenswrapper[4558]: E0120 16:46:45.847473 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0099ba6c-2da8-471c-bde7-528db23c4faf" containerName="registry-server" Jan 20 16:46:45 crc kubenswrapper[4558]: I0120 16:46:45.847478 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="0099ba6c-2da8-471c-bde7-528db23c4faf" containerName="registry-server" Jan 20 16:46:45 crc kubenswrapper[4558]: E0120 16:46:45.847486 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="96b0927f-cc7b-48e8-82bd-bbec85c2b9b2" containerName="marketplace-operator" Jan 20 16:46:45 crc kubenswrapper[4558]: I0120 16:46:45.847491 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="96b0927f-cc7b-48e8-82bd-bbec85c2b9b2" containerName="marketplace-operator" Jan 20 16:46:45 crc kubenswrapper[4558]: E0120 16:46:45.847501 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="03467399-b14f-421a-a7ec-f2a533daed0d" containerName="registry-server" Jan 20 16:46:45 crc kubenswrapper[4558]: I0120 16:46:45.847508 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="03467399-b14f-421a-a7ec-f2a533daed0d" containerName="registry-server" Jan 20 16:46:45 crc kubenswrapper[4558]: E0120 16:46:45.847515 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="05d983ba-960a-4975-b3de-73a3891fb342" containerName="registry-server" Jan 20 16:46:45 crc kubenswrapper[4558]: I0120 16:46:45.847520 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="05d983ba-960a-4975-b3de-73a3891fb342" containerName="registry-server" Jan 20 16:46:45 crc kubenswrapper[4558]: E0120 16:46:45.847527 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="03467399-b14f-421a-a7ec-f2a533daed0d" containerName="extract-utilities" Jan 20 16:46:45 crc kubenswrapper[4558]: I0120 16:46:45.847532 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="03467399-b14f-421a-a7ec-f2a533daed0d" containerName="extract-utilities" Jan 20 16:46:45 crc kubenswrapper[4558]: E0120 16:46:45.847539 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="05d983ba-960a-4975-b3de-73a3891fb342" containerName="extract-content" Jan 20 16:46:45 crc kubenswrapper[4558]: I0120 16:46:45.847544 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="05d983ba-960a-4975-b3de-73a3891fb342" containerName="extract-content" Jan 20 16:46:45 crc kubenswrapper[4558]: E0120 16:46:45.847553 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c2b3de8e-9eaa-43d7-a061-674af5e035cb" containerName="controller-manager" Jan 20 16:46:45 crc kubenswrapper[4558]: I0120 16:46:45.847558 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="c2b3de8e-9eaa-43d7-a061-674af5e035cb" containerName="controller-manager" Jan 20 16:46:45 crc kubenswrapper[4558]: E0120 16:46:45.847565 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="11fdde87-2c2c-40ba-84e5-e0c93fc58130" containerName="extract-content" Jan 20 16:46:45 crc kubenswrapper[4558]: I0120 16:46:45.847571 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="11fdde87-2c2c-40ba-84e5-e0c93fc58130" containerName="extract-content" Jan 20 16:46:45 crc kubenswrapper[4558]: E0120 16:46:45.847578 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="03467399-b14f-421a-a7ec-f2a533daed0d" containerName="extract-content" Jan 20 16:46:45 crc kubenswrapper[4558]: I0120 16:46:45.847584 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="03467399-b14f-421a-a7ec-f2a533daed0d" containerName="extract-content" Jan 20 16:46:45 crc kubenswrapper[4558]: E0120 16:46:45.847591 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7f63cf5d-23fe-4e8b-a9a7-3ce5c0c30aad" containerName="route-controller-manager" Jan 20 16:46:45 crc kubenswrapper[4558]: I0120 16:46:45.847596 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="7f63cf5d-23fe-4e8b-a9a7-3ce5c0c30aad" containerName="route-controller-manager" Jan 20 16:46:45 crc kubenswrapper[4558]: E0120 16:46:45.847603 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0099ba6c-2da8-471c-bde7-528db23c4faf" containerName="extract-utilities" Jan 20 16:46:45 crc kubenswrapper[4558]: I0120 16:46:45.847608 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="0099ba6c-2da8-471c-bde7-528db23c4faf" containerName="extract-utilities" Jan 20 16:46:45 crc kubenswrapper[4558]: E0120 16:46:45.847615 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="11fdde87-2c2c-40ba-84e5-e0c93fc58130" containerName="registry-server" Jan 20 16:46:45 crc kubenswrapper[4558]: I0120 16:46:45.847620 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="11fdde87-2c2c-40ba-84e5-e0c93fc58130" containerName="registry-server" Jan 20 16:46:45 crc kubenswrapper[4558]: I0120 16:46:45.847686 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="96b0927f-cc7b-48e8-82bd-bbec85c2b9b2" containerName="marketplace-operator" Jan 20 16:46:45 crc kubenswrapper[4558]: I0120 16:46:45.847694 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="7f63cf5d-23fe-4e8b-a9a7-3ce5c0c30aad" containerName="route-controller-manager" Jan 20 16:46:45 crc kubenswrapper[4558]: I0120 16:46:45.847703 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="05d983ba-960a-4975-b3de-73a3891fb342" containerName="registry-server" Jan 20 16:46:45 crc kubenswrapper[4558]: I0120 16:46:45.847711 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="03467399-b14f-421a-a7ec-f2a533daed0d" containerName="registry-server" Jan 20 16:46:45 crc kubenswrapper[4558]: I0120 16:46:45.847718 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="c2b3de8e-9eaa-43d7-a061-674af5e035cb" containerName="controller-manager" Jan 20 16:46:45 crc kubenswrapper[4558]: I0120 16:46:45.847727 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="0099ba6c-2da8-471c-bde7-528db23c4faf" containerName="registry-server" Jan 20 16:46:45 crc kubenswrapper[4558]: I0120 16:46:45.847733 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="11fdde87-2c2c-40ba-84e5-e0c93fc58130" containerName="registry-server" Jan 20 16:46:45 crc kubenswrapper[4558]: I0120 16:46:45.848008 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-5bfb6c7bd4-kzh68" Jan 20 16:46:45 crc kubenswrapper[4558]: I0120 16:46:45.856679 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-5bfb6c7bd4-kzh68"] Jan 20 16:46:45 crc kubenswrapper[4558]: I0120 16:46:45.872604 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7f63cf5d-23fe-4e8b-a9a7-3ce5c0c30aad-serving-cert\") pod \"7f63cf5d-23fe-4e8b-a9a7-3ce5c0c30aad\" (UID: \"7f63cf5d-23fe-4e8b-a9a7-3ce5c0c30aad\") " Jan 20 16:46:45 crc kubenswrapper[4558]: I0120 16:46:45.872661 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-km9th\" (UniqueName: \"kubernetes.io/projected/7f63cf5d-23fe-4e8b-a9a7-3ce5c0c30aad-kube-api-access-km9th\") pod \"7f63cf5d-23fe-4e8b-a9a7-3ce5c0c30aad\" (UID: \"7f63cf5d-23fe-4e8b-a9a7-3ce5c0c30aad\") " Jan 20 16:46:45 crc kubenswrapper[4558]: I0120 16:46:45.872695 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7f63cf5d-23fe-4e8b-a9a7-3ce5c0c30aad-client-ca\") pod \"7f63cf5d-23fe-4e8b-a9a7-3ce5c0c30aad\" (UID: \"7f63cf5d-23fe-4e8b-a9a7-3ce5c0c30aad\") " Jan 20 16:46:45 crc kubenswrapper[4558]: I0120 16:46:45.872755 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7f63cf5d-23fe-4e8b-a9a7-3ce5c0c30aad-config\") pod \"7f63cf5d-23fe-4e8b-a9a7-3ce5c0c30aad\" (UID: \"7f63cf5d-23fe-4e8b-a9a7-3ce5c0c30aad\") " Jan 20 16:46:45 crc kubenswrapper[4558]: I0120 16:46:45.872875 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/73a27de9-993a-4348-ae1b-c1dabd1585d1-client-ca\") pod \"controller-manager-5bfb6c7bd4-kzh68\" (UID: \"73a27de9-993a-4348-ae1b-c1dabd1585d1\") " pod="openshift-controller-manager/controller-manager-5bfb6c7bd4-kzh68" Jan 20 16:46:45 crc kubenswrapper[4558]: I0120 16:46:45.872945 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xj7lh\" (UniqueName: \"kubernetes.io/projected/73a27de9-993a-4348-ae1b-c1dabd1585d1-kube-api-access-xj7lh\") pod \"controller-manager-5bfb6c7bd4-kzh68\" (UID: \"73a27de9-993a-4348-ae1b-c1dabd1585d1\") " pod="openshift-controller-manager/controller-manager-5bfb6c7bd4-kzh68" Jan 20 16:46:45 crc kubenswrapper[4558]: I0120 16:46:45.872975 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/73a27de9-993a-4348-ae1b-c1dabd1585d1-proxy-ca-bundles\") pod \"controller-manager-5bfb6c7bd4-kzh68\" (UID: \"73a27de9-993a-4348-ae1b-c1dabd1585d1\") " pod="openshift-controller-manager/controller-manager-5bfb6c7bd4-kzh68" Jan 20 16:46:45 crc kubenswrapper[4558]: I0120 16:46:45.872994 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/73a27de9-993a-4348-ae1b-c1dabd1585d1-config\") pod \"controller-manager-5bfb6c7bd4-kzh68\" (UID: \"73a27de9-993a-4348-ae1b-c1dabd1585d1\") " pod="openshift-controller-manager/controller-manager-5bfb6c7bd4-kzh68" Jan 20 16:46:45 crc kubenswrapper[4558]: I0120 16:46:45.873095 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/73a27de9-993a-4348-ae1b-c1dabd1585d1-serving-cert\") pod \"controller-manager-5bfb6c7bd4-kzh68\" (UID: \"73a27de9-993a-4348-ae1b-c1dabd1585d1\") " pod="openshift-controller-manager/controller-manager-5bfb6c7bd4-kzh68" Jan 20 16:46:45 crc kubenswrapper[4558]: I0120 16:46:45.873255 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nfwxp\" (UniqueName: \"kubernetes.io/projected/c2b3de8e-9eaa-43d7-a061-674af5e035cb-kube-api-access-nfwxp\") on node \"crc\" DevicePath \"\"" Jan 20 16:46:45 crc kubenswrapper[4558]: I0120 16:46:45.873268 4558 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c2b3de8e-9eaa-43d7-a061-674af5e035cb-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 20 16:46:45 crc kubenswrapper[4558]: I0120 16:46:45.873278 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c2b3de8e-9eaa-43d7-a061-674af5e035cb-config\") on node \"crc\" DevicePath \"\"" Jan 20 16:46:45 crc kubenswrapper[4558]: I0120 16:46:45.873287 4558 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c2b3de8e-9eaa-43d7-a061-674af5e035cb-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 20 16:46:45 crc kubenswrapper[4558]: I0120 16:46:45.873296 4558 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c2b3de8e-9eaa-43d7-a061-674af5e035cb-client-ca\") on node \"crc\" DevicePath \"\"" Jan 20 16:46:45 crc kubenswrapper[4558]: I0120 16:46:45.873424 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7f63cf5d-23fe-4e8b-a9a7-3ce5c0c30aad-client-ca" (OuterVolumeSpecName: "client-ca") pod "7f63cf5d-23fe-4e8b-a9a7-3ce5c0c30aad" (UID: "7f63cf5d-23fe-4e8b-a9a7-3ce5c0c30aad"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 16:46:45 crc kubenswrapper[4558]: I0120 16:46:45.873447 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7f63cf5d-23fe-4e8b-a9a7-3ce5c0c30aad-config" (OuterVolumeSpecName: "config") pod "7f63cf5d-23fe-4e8b-a9a7-3ce5c0c30aad" (UID: "7f63cf5d-23fe-4e8b-a9a7-3ce5c0c30aad"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 16:46:45 crc kubenswrapper[4558]: I0120 16:46:45.875288 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7f63cf5d-23fe-4e8b-a9a7-3ce5c0c30aad-kube-api-access-km9th" (OuterVolumeSpecName: "kube-api-access-km9th") pod "7f63cf5d-23fe-4e8b-a9a7-3ce5c0c30aad" (UID: "7f63cf5d-23fe-4e8b-a9a7-3ce5c0c30aad"). InnerVolumeSpecName "kube-api-access-km9th". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:46:45 crc kubenswrapper[4558]: I0120 16:46:45.876242 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7f63cf5d-23fe-4e8b-a9a7-3ce5c0c30aad-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7f63cf5d-23fe-4e8b-a9a7-3ce5c0c30aad" (UID: "7f63cf5d-23fe-4e8b-a9a7-3ce5c0c30aad"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:46:45 crc kubenswrapper[4558]: I0120 16:46:45.888324 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7b8459dfc9-j9hhh"] Jan 20 16:46:45 crc kubenswrapper[4558]: I0120 16:46:45.888902 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7b8459dfc9-j9hhh" Jan 20 16:46:45 crc kubenswrapper[4558]: I0120 16:46:45.893229 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7b8459dfc9-j9hhh"] Jan 20 16:46:45 crc kubenswrapper[4558]: I0120 16:46:45.974794 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/73a27de9-993a-4348-ae1b-c1dabd1585d1-proxy-ca-bundles\") pod \"controller-manager-5bfb6c7bd4-kzh68\" (UID: \"73a27de9-993a-4348-ae1b-c1dabd1585d1\") " pod="openshift-controller-manager/controller-manager-5bfb6c7bd4-kzh68" Jan 20 16:46:45 crc kubenswrapper[4558]: I0120 16:46:45.974842 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/73a27de9-993a-4348-ae1b-c1dabd1585d1-config\") pod \"controller-manager-5bfb6c7bd4-kzh68\" (UID: \"73a27de9-993a-4348-ae1b-c1dabd1585d1\") " pod="openshift-controller-manager/controller-manager-5bfb6c7bd4-kzh68" Jan 20 16:46:45 crc kubenswrapper[4558]: I0120 16:46:45.974863 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7d8b62ef-1dd6-4575-9db9-807b980136eb-client-ca\") pod \"route-controller-manager-7b8459dfc9-j9hhh\" (UID: \"7d8b62ef-1dd6-4575-9db9-807b980136eb\") " pod="openshift-route-controller-manager/route-controller-manager-7b8459dfc9-j9hhh" Jan 20 16:46:45 crc kubenswrapper[4558]: I0120 16:46:45.974885 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/73a27de9-993a-4348-ae1b-c1dabd1585d1-serving-cert\") pod \"controller-manager-5bfb6c7bd4-kzh68\" (UID: \"73a27de9-993a-4348-ae1b-c1dabd1585d1\") " pod="openshift-controller-manager/controller-manager-5bfb6c7bd4-kzh68" Jan 20 16:46:45 crc kubenswrapper[4558]: I0120 16:46:45.974909 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r9cgw\" (UniqueName: \"kubernetes.io/projected/7d8b62ef-1dd6-4575-9db9-807b980136eb-kube-api-access-r9cgw\") pod \"route-controller-manager-7b8459dfc9-j9hhh\" (UID: \"7d8b62ef-1dd6-4575-9db9-807b980136eb\") " pod="openshift-route-controller-manager/route-controller-manager-7b8459dfc9-j9hhh" Jan 20 16:46:45 crc kubenswrapper[4558]: I0120 16:46:45.974941 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7d8b62ef-1dd6-4575-9db9-807b980136eb-config\") pod \"route-controller-manager-7b8459dfc9-j9hhh\" (UID: \"7d8b62ef-1dd6-4575-9db9-807b980136eb\") " pod="openshift-route-controller-manager/route-controller-manager-7b8459dfc9-j9hhh" Jan 20 16:46:45 crc kubenswrapper[4558]: I0120 16:46:45.974955 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7d8b62ef-1dd6-4575-9db9-807b980136eb-serving-cert\") pod \"route-controller-manager-7b8459dfc9-j9hhh\" (UID: \"7d8b62ef-1dd6-4575-9db9-807b980136eb\") " pod="openshift-route-controller-manager/route-controller-manager-7b8459dfc9-j9hhh" Jan 20 16:46:45 crc kubenswrapper[4558]: I0120 16:46:45.975003 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/73a27de9-993a-4348-ae1b-c1dabd1585d1-client-ca\") pod \"controller-manager-5bfb6c7bd4-kzh68\" (UID: \"73a27de9-993a-4348-ae1b-c1dabd1585d1\") " pod="openshift-controller-manager/controller-manager-5bfb6c7bd4-kzh68" Jan 20 16:46:45 crc kubenswrapper[4558]: I0120 16:46:45.975076 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xj7lh\" (UniqueName: \"kubernetes.io/projected/73a27de9-993a-4348-ae1b-c1dabd1585d1-kube-api-access-xj7lh\") pod \"controller-manager-5bfb6c7bd4-kzh68\" (UID: \"73a27de9-993a-4348-ae1b-c1dabd1585d1\") " pod="openshift-controller-manager/controller-manager-5bfb6c7bd4-kzh68" Jan 20 16:46:45 crc kubenswrapper[4558]: I0120 16:46:45.975117 4558 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7f63cf5d-23fe-4e8b-a9a7-3ce5c0c30aad-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 20 16:46:45 crc kubenswrapper[4558]: I0120 16:46:45.975127 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-km9th\" (UniqueName: \"kubernetes.io/projected/7f63cf5d-23fe-4e8b-a9a7-3ce5c0c30aad-kube-api-access-km9th\") on node \"crc\" DevicePath \"\"" Jan 20 16:46:45 crc kubenswrapper[4558]: I0120 16:46:45.975136 4558 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7f63cf5d-23fe-4e8b-a9a7-3ce5c0c30aad-client-ca\") on node \"crc\" DevicePath \"\"" Jan 20 16:46:45 crc kubenswrapper[4558]: I0120 16:46:45.975144 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7f63cf5d-23fe-4e8b-a9a7-3ce5c0c30aad-config\") on node \"crc\" DevicePath \"\"" Jan 20 16:46:45 crc kubenswrapper[4558]: I0120 16:46:45.975779 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/73a27de9-993a-4348-ae1b-c1dabd1585d1-client-ca\") pod \"controller-manager-5bfb6c7bd4-kzh68\" (UID: \"73a27de9-993a-4348-ae1b-c1dabd1585d1\") " pod="openshift-controller-manager/controller-manager-5bfb6c7bd4-kzh68" Jan 20 16:46:45 crc kubenswrapper[4558]: I0120 16:46:45.975879 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/73a27de9-993a-4348-ae1b-c1dabd1585d1-proxy-ca-bundles\") pod \"controller-manager-5bfb6c7bd4-kzh68\" (UID: \"73a27de9-993a-4348-ae1b-c1dabd1585d1\") " pod="openshift-controller-manager/controller-manager-5bfb6c7bd4-kzh68" Jan 20 16:46:45 crc kubenswrapper[4558]: I0120 16:46:45.975963 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/73a27de9-993a-4348-ae1b-c1dabd1585d1-config\") pod \"controller-manager-5bfb6c7bd4-kzh68\" (UID: \"73a27de9-993a-4348-ae1b-c1dabd1585d1\") " pod="openshift-controller-manager/controller-manager-5bfb6c7bd4-kzh68" Jan 20 16:46:45 crc kubenswrapper[4558]: I0120 16:46:45.977882 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/73a27de9-993a-4348-ae1b-c1dabd1585d1-serving-cert\") pod \"controller-manager-5bfb6c7bd4-kzh68\" (UID: \"73a27de9-993a-4348-ae1b-c1dabd1585d1\") " pod="openshift-controller-manager/controller-manager-5bfb6c7bd4-kzh68" Jan 20 16:46:45 crc kubenswrapper[4558]: I0120 16:46:45.988726 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xj7lh\" (UniqueName: \"kubernetes.io/projected/73a27de9-993a-4348-ae1b-c1dabd1585d1-kube-api-access-xj7lh\") pod \"controller-manager-5bfb6c7bd4-kzh68\" (UID: \"73a27de9-993a-4348-ae1b-c1dabd1585d1\") " pod="openshift-controller-manager/controller-manager-5bfb6c7bd4-kzh68" Jan 20 16:46:46 crc kubenswrapper[4558]: I0120 16:46:46.076070 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7d8b62ef-1dd6-4575-9db9-807b980136eb-client-ca\") pod \"route-controller-manager-7b8459dfc9-j9hhh\" (UID: \"7d8b62ef-1dd6-4575-9db9-807b980136eb\") " pod="openshift-route-controller-manager/route-controller-manager-7b8459dfc9-j9hhh" Jan 20 16:46:46 crc kubenswrapper[4558]: I0120 16:46:46.076125 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r9cgw\" (UniqueName: \"kubernetes.io/projected/7d8b62ef-1dd6-4575-9db9-807b980136eb-kube-api-access-r9cgw\") pod \"route-controller-manager-7b8459dfc9-j9hhh\" (UID: \"7d8b62ef-1dd6-4575-9db9-807b980136eb\") " pod="openshift-route-controller-manager/route-controller-manager-7b8459dfc9-j9hhh" Jan 20 16:46:46 crc kubenswrapper[4558]: I0120 16:46:46.076160 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7d8b62ef-1dd6-4575-9db9-807b980136eb-config\") pod \"route-controller-manager-7b8459dfc9-j9hhh\" (UID: \"7d8b62ef-1dd6-4575-9db9-807b980136eb\") " pod="openshift-route-controller-manager/route-controller-manager-7b8459dfc9-j9hhh" Jan 20 16:46:46 crc kubenswrapper[4558]: I0120 16:46:46.076192 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7d8b62ef-1dd6-4575-9db9-807b980136eb-serving-cert\") pod \"route-controller-manager-7b8459dfc9-j9hhh\" (UID: \"7d8b62ef-1dd6-4575-9db9-807b980136eb\") " pod="openshift-route-controller-manager/route-controller-manager-7b8459dfc9-j9hhh" Jan 20 16:46:46 crc kubenswrapper[4558]: I0120 16:46:46.076805 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7d8b62ef-1dd6-4575-9db9-807b980136eb-client-ca\") pod \"route-controller-manager-7b8459dfc9-j9hhh\" (UID: \"7d8b62ef-1dd6-4575-9db9-807b980136eb\") " pod="openshift-route-controller-manager/route-controller-manager-7b8459dfc9-j9hhh" Jan 20 16:46:46 crc kubenswrapper[4558]: I0120 16:46:46.077112 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7d8b62ef-1dd6-4575-9db9-807b980136eb-config\") pod \"route-controller-manager-7b8459dfc9-j9hhh\" (UID: \"7d8b62ef-1dd6-4575-9db9-807b980136eb\") " pod="openshift-route-controller-manager/route-controller-manager-7b8459dfc9-j9hhh" Jan 20 16:46:46 crc kubenswrapper[4558]: I0120 16:46:46.078811 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7d8b62ef-1dd6-4575-9db9-807b980136eb-serving-cert\") pod \"route-controller-manager-7b8459dfc9-j9hhh\" (UID: \"7d8b62ef-1dd6-4575-9db9-807b980136eb\") " pod="openshift-route-controller-manager/route-controller-manager-7b8459dfc9-j9hhh" Jan 20 16:46:46 crc kubenswrapper[4558]: I0120 16:46:46.089834 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r9cgw\" (UniqueName: \"kubernetes.io/projected/7d8b62ef-1dd6-4575-9db9-807b980136eb-kube-api-access-r9cgw\") pod \"route-controller-manager-7b8459dfc9-j9hhh\" (UID: \"7d8b62ef-1dd6-4575-9db9-807b980136eb\") " pod="openshift-route-controller-manager/route-controller-manager-7b8459dfc9-j9hhh" Jan 20 16:46:46 crc kubenswrapper[4558]: I0120 16:46:46.157892 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-5bfb6c7bd4-kzh68" Jan 20 16:46:46 crc kubenswrapper[4558]: I0120 16:46:46.198407 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7b8459dfc9-j9hhh" Jan 20 16:46:46 crc kubenswrapper[4558]: I0120 16:46:46.454815 4558 cert_rotation.go:91] certificate rotation detected, shutting down client connections to start using new credentials Jan 20 16:46:46 crc kubenswrapper[4558]: I0120 16:46:46.492498 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-5bfb6c7bd4-kzh68"] Jan 20 16:46:46 crc kubenswrapper[4558]: I0120 16:46:46.521885 4558 generic.go:334] "Generic (PLEG): container finished" podID="7f63cf5d-23fe-4e8b-a9a7-3ce5c0c30aad" containerID="503969b54b9081a8c984c77ef00b3babbe7301561e2b587f91900396fe94ae13" exitCode=0 Jan 20 16:46:46 crc kubenswrapper[4558]: I0120 16:46:46.522084 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-vmm7h" event={"ID":"7f63cf5d-23fe-4e8b-a9a7-3ce5c0c30aad","Type":"ContainerDied","Data":"503969b54b9081a8c984c77ef00b3babbe7301561e2b587f91900396fe94ae13"} Jan 20 16:46:46 crc kubenswrapper[4558]: I0120 16:46:46.522199 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-vmm7h" event={"ID":"7f63cf5d-23fe-4e8b-a9a7-3ce5c0c30aad","Type":"ContainerDied","Data":"9f6d91d11789f4fc3597c708c39e57ff6c1b8b642451ac94de240adcd054b40c"} Jan 20 16:46:46 crc kubenswrapper[4558]: I0120 16:46:46.522277 4558 scope.go:117] "RemoveContainer" containerID="503969b54b9081a8c984c77ef00b3babbe7301561e2b587f91900396fe94ae13" Jan 20 16:46:46 crc kubenswrapper[4558]: I0120 16:46:46.522280 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-vmm7h" Jan 20 16:46:46 crc kubenswrapper[4558]: I0120 16:46:46.523884 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-5bfb6c7bd4-kzh68" event={"ID":"73a27de9-993a-4348-ae1b-c1dabd1585d1","Type":"ContainerStarted","Data":"996fea73de8c6c147add0044cb831c69576716d3b9b58f6a8fcd4a8fb6382f31"} Jan 20 16:46:46 crc kubenswrapper[4558]: I0120 16:46:46.525392 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-t225g" event={"ID":"c2b3de8e-9eaa-43d7-a061-674af5e035cb","Type":"ContainerDied","Data":"7f0e80865945a529cda1ad18f57e0cab543a8ad4eed6a7d6d2ab9621ef965ce0"} Jan 20 16:46:46 crc kubenswrapper[4558]: I0120 16:46:46.525472 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-t225g" Jan 20 16:46:46 crc kubenswrapper[4558]: I0120 16:46:46.535822 4558 scope.go:117] "RemoveContainer" containerID="503969b54b9081a8c984c77ef00b3babbe7301561e2b587f91900396fe94ae13" Jan 20 16:46:46 crc kubenswrapper[4558]: E0120 16:46:46.536104 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"503969b54b9081a8c984c77ef00b3babbe7301561e2b587f91900396fe94ae13\": container with ID starting with 503969b54b9081a8c984c77ef00b3babbe7301561e2b587f91900396fe94ae13 not found: ID does not exist" containerID="503969b54b9081a8c984c77ef00b3babbe7301561e2b587f91900396fe94ae13" Jan 20 16:46:46 crc kubenswrapper[4558]: I0120 16:46:46.536137 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"503969b54b9081a8c984c77ef00b3babbe7301561e2b587f91900396fe94ae13"} err="failed to get container status \"503969b54b9081a8c984c77ef00b3babbe7301561e2b587f91900396fe94ae13\": rpc error: code = NotFound desc = could not find container \"503969b54b9081a8c984c77ef00b3babbe7301561e2b587f91900396fe94ae13\": container with ID starting with 503969b54b9081a8c984c77ef00b3babbe7301561e2b587f91900396fe94ae13 not found: ID does not exist" Jan 20 16:46:46 crc kubenswrapper[4558]: I0120 16:46:46.536173 4558 scope.go:117] "RemoveContainer" containerID="e9553821ac5d05520a8e73a629cc2f5168c903ec0af450aef3f2f7532badfa0a" Jan 20 16:46:46 crc kubenswrapper[4558]: I0120 16:46:46.537364 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7b8459dfc9-j9hhh"] Jan 20 16:46:46 crc kubenswrapper[4558]: W0120 16:46:46.543893 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7d8b62ef_1dd6_4575_9db9_807b980136eb.slice/crio-64cefaaeaff19af7483a4027962c0453f21ded6c72875463766d36e455eea35f WatchSource:0}: Error finding container 64cefaaeaff19af7483a4027962c0453f21ded6c72875463766d36e455eea35f: Status 404 returned error can't find the container with id 64cefaaeaff19af7483a4027962c0453f21ded6c72875463766d36e455eea35f Jan 20 16:46:46 crc kubenswrapper[4558]: I0120 16:46:46.621342 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-t225g"] Jan 20 16:46:46 crc kubenswrapper[4558]: I0120 16:46:46.625416 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-t225g"] Jan 20 16:46:46 crc kubenswrapper[4558]: I0120 16:46:46.630605 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-vmm7h"] Jan 20 16:46:46 crc kubenswrapper[4558]: I0120 16:46:46.636748 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-vmm7h"] Jan 20 16:46:46 crc kubenswrapper[4558]: I0120 16:46:46.960413 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Jan 20 16:46:47 crc kubenswrapper[4558]: I0120 16:46:47.530577 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7b8459dfc9-j9hhh" event={"ID":"7d8b62ef-1dd6-4575-9db9-807b980136eb","Type":"ContainerStarted","Data":"971fa4e1e2e7c48cbbd184351f9b4f4069306490e26f183fb449e2ee0b21e917"} Jan 20 16:46:47 crc kubenswrapper[4558]: I0120 16:46:47.531301 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-7b8459dfc9-j9hhh" Jan 20 16:46:47 crc kubenswrapper[4558]: I0120 16:46:47.531376 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7b8459dfc9-j9hhh" event={"ID":"7d8b62ef-1dd6-4575-9db9-807b980136eb","Type":"ContainerStarted","Data":"64cefaaeaff19af7483a4027962c0453f21ded6c72875463766d36e455eea35f"} Jan 20 16:46:47 crc kubenswrapper[4558]: I0120 16:46:47.532533 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-5bfb6c7bd4-kzh68" event={"ID":"73a27de9-993a-4348-ae1b-c1dabd1585d1","Type":"ContainerStarted","Data":"664d5b58bd863050dac8bab84a329dfd68e0a04788de0eeb1adc6cd98b926436"} Jan 20 16:46:47 crc kubenswrapper[4558]: I0120 16:46:47.532713 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-5bfb6c7bd4-kzh68" Jan 20 16:46:47 crc kubenswrapper[4558]: I0120 16:46:47.535174 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-7b8459dfc9-j9hhh" Jan 20 16:46:47 crc kubenswrapper[4558]: I0120 16:46:47.536200 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-5bfb6c7bd4-kzh68" Jan 20 16:46:47 crc kubenswrapper[4558]: I0120 16:46:47.542485 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-7b8459dfc9-j9hhh" podStartSLOduration=2.542474947 podStartE2EDuration="2.542474947s" podCreationTimestamp="2026-01-20 16:46:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 16:46:47.540776951 +0000 UTC m=+301.301114917" watchObservedRunningTime="2026-01-20 16:46:47.542474947 +0000 UTC m=+301.302812913" Jan 20 16:46:47 crc kubenswrapper[4558]: I0120 16:46:47.566274 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-5bfb6c7bd4-kzh68" podStartSLOduration=2.566261206 podStartE2EDuration="2.566261206s" podCreationTimestamp="2026-01-20 16:46:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 16:46:47.563422561 +0000 UTC m=+301.323760528" watchObservedRunningTime="2026-01-20 16:46:47.566261206 +0000 UTC m=+301.326599173" Jan 20 16:46:48 crc kubenswrapper[4558]: I0120 16:46:48.570506 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7f63cf5d-23fe-4e8b-a9a7-3ce5c0c30aad" path="/var/lib/kubelet/pods/7f63cf5d-23fe-4e8b-a9a7-3ce5c0c30aad/volumes" Jan 20 16:46:48 crc kubenswrapper[4558]: I0120 16:46:48.571019 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c2b3de8e-9eaa-43d7-a061-674af5e035cb" path="/var/lib/kubelet/pods/c2b3de8e-9eaa-43d7-a061-674af5e035cb/volumes" Jan 20 16:46:48 crc kubenswrapper[4558]: E0120 16:46:48.938317 4558 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7f63cf5d_23fe_4e8b_a9a7_3ce5c0c30aad.slice/crio-9f6d91d11789f4fc3597c708c39e57ff6c1b8b642451ac94de240adcd054b40c\": RecentStats: unable to find data in memory cache]" Jan 20 16:46:58 crc kubenswrapper[4558]: I0120 16:46:58.705213 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Jan 20 16:46:59 crc kubenswrapper[4558]: E0120 16:46:59.029084 4558 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7f63cf5d_23fe_4e8b_a9a7_3ce5c0c30aad.slice/crio-9f6d91d11789f4fc3597c708c39e57ff6c1b8b642451ac94de240adcd054b40c\": RecentStats: unable to find data in memory cache]" Jan 20 16:47:09 crc kubenswrapper[4558]: E0120 16:47:09.119007 4558 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7f63cf5d_23fe_4e8b_a9a7_3ce5c0c30aad.slice/crio-9f6d91d11789f4fc3597c708c39e57ff6c1b8b642451ac94de240adcd054b40c\": RecentStats: unable to find data in memory cache]" Jan 20 16:47:19 crc kubenswrapper[4558]: E0120 16:47:19.204713 4558 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7f63cf5d_23fe_4e8b_a9a7_3ce5c0c30aad.slice/crio-9f6d91d11789f4fc3597c708c39e57ff6c1b8b642451ac94de240adcd054b40c\": RecentStats: unable to find data in memory cache]" Jan 20 16:47:27 crc kubenswrapper[4558]: I0120 16:47:27.329820 4558 patch_prober.go:28] interesting pod/machine-config-daemon-2vr4r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 20 16:47:27 crc kubenswrapper[4558]: I0120 16:47:27.330187 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 20 16:47:29 crc kubenswrapper[4558]: E0120 16:47:29.306530 4558 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7f63cf5d_23fe_4e8b_a9a7_3ce5c0c30aad.slice/crio-9f6d91d11789f4fc3597c708c39e57ff6c1b8b642451ac94de240adcd054b40c\": RecentStats: unable to find data in memory cache]" Jan 20 16:47:39 crc kubenswrapper[4558]: E0120 16:47:39.388190 4558 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7f63cf5d_23fe_4e8b_a9a7_3ce5c0c30aad.slice/crio-9f6d91d11789f4fc3597c708c39e57ff6c1b8b642451ac94de240adcd054b40c\": RecentStats: unable to find data in memory cache]" Jan 20 16:47:40 crc kubenswrapper[4558]: I0120 16:47:40.742131 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-lzwqk"] Jan 20 16:47:40 crc kubenswrapper[4558]: I0120 16:47:40.742830 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-lzwqk" Jan 20 16:47:40 crc kubenswrapper[4558]: I0120 16:47:40.748672 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-lzwqk"] Jan 20 16:47:40 crc kubenswrapper[4558]: I0120 16:47:40.855679 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/daa24a53-3e87-4166-a665-f3b71890f430-registry-tls\") pod \"image-registry-66df7c8f76-lzwqk\" (UID: \"daa24a53-3e87-4166-a665-f3b71890f430\") " pod="openshift-image-registry/image-registry-66df7c8f76-lzwqk" Jan 20 16:47:40 crc kubenswrapper[4558]: I0120 16:47:40.855755 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-lzwqk\" (UID: \"daa24a53-3e87-4166-a665-f3b71890f430\") " pod="openshift-image-registry/image-registry-66df7c8f76-lzwqk" Jan 20 16:47:40 crc kubenswrapper[4558]: I0120 16:47:40.855783 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/daa24a53-3e87-4166-a665-f3b71890f430-registry-certificates\") pod \"image-registry-66df7c8f76-lzwqk\" (UID: \"daa24a53-3e87-4166-a665-f3b71890f430\") " pod="openshift-image-registry/image-registry-66df7c8f76-lzwqk" Jan 20 16:47:40 crc kubenswrapper[4558]: I0120 16:47:40.855857 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zs5sq\" (UniqueName: \"kubernetes.io/projected/daa24a53-3e87-4166-a665-f3b71890f430-kube-api-access-zs5sq\") pod \"image-registry-66df7c8f76-lzwqk\" (UID: \"daa24a53-3e87-4166-a665-f3b71890f430\") " pod="openshift-image-registry/image-registry-66df7c8f76-lzwqk" Jan 20 16:47:40 crc kubenswrapper[4558]: I0120 16:47:40.855878 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/daa24a53-3e87-4166-a665-f3b71890f430-ca-trust-extracted\") pod \"image-registry-66df7c8f76-lzwqk\" (UID: \"daa24a53-3e87-4166-a665-f3b71890f430\") " pod="openshift-image-registry/image-registry-66df7c8f76-lzwqk" Jan 20 16:47:40 crc kubenswrapper[4558]: I0120 16:47:40.855907 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/daa24a53-3e87-4166-a665-f3b71890f430-installation-pull-secrets\") pod \"image-registry-66df7c8f76-lzwqk\" (UID: \"daa24a53-3e87-4166-a665-f3b71890f430\") " pod="openshift-image-registry/image-registry-66df7c8f76-lzwqk" Jan 20 16:47:40 crc kubenswrapper[4558]: I0120 16:47:40.855955 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/daa24a53-3e87-4166-a665-f3b71890f430-trusted-ca\") pod \"image-registry-66df7c8f76-lzwqk\" (UID: \"daa24a53-3e87-4166-a665-f3b71890f430\") " pod="openshift-image-registry/image-registry-66df7c8f76-lzwqk" Jan 20 16:47:40 crc kubenswrapper[4558]: I0120 16:47:40.856066 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/daa24a53-3e87-4166-a665-f3b71890f430-bound-sa-token\") pod \"image-registry-66df7c8f76-lzwqk\" (UID: \"daa24a53-3e87-4166-a665-f3b71890f430\") " pod="openshift-image-registry/image-registry-66df7c8f76-lzwqk" Jan 20 16:47:40 crc kubenswrapper[4558]: I0120 16:47:40.870475 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-lzwqk\" (UID: \"daa24a53-3e87-4166-a665-f3b71890f430\") " pod="openshift-image-registry/image-registry-66df7c8f76-lzwqk" Jan 20 16:47:40 crc kubenswrapper[4558]: I0120 16:47:40.957380 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zs5sq\" (UniqueName: \"kubernetes.io/projected/daa24a53-3e87-4166-a665-f3b71890f430-kube-api-access-zs5sq\") pod \"image-registry-66df7c8f76-lzwqk\" (UID: \"daa24a53-3e87-4166-a665-f3b71890f430\") " pod="openshift-image-registry/image-registry-66df7c8f76-lzwqk" Jan 20 16:47:40 crc kubenswrapper[4558]: I0120 16:47:40.957413 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/daa24a53-3e87-4166-a665-f3b71890f430-ca-trust-extracted\") pod \"image-registry-66df7c8f76-lzwqk\" (UID: \"daa24a53-3e87-4166-a665-f3b71890f430\") " pod="openshift-image-registry/image-registry-66df7c8f76-lzwqk" Jan 20 16:47:40 crc kubenswrapper[4558]: I0120 16:47:40.957434 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/daa24a53-3e87-4166-a665-f3b71890f430-installation-pull-secrets\") pod \"image-registry-66df7c8f76-lzwqk\" (UID: \"daa24a53-3e87-4166-a665-f3b71890f430\") " pod="openshift-image-registry/image-registry-66df7c8f76-lzwqk" Jan 20 16:47:40 crc kubenswrapper[4558]: I0120 16:47:40.957460 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/daa24a53-3e87-4166-a665-f3b71890f430-trusted-ca\") pod \"image-registry-66df7c8f76-lzwqk\" (UID: \"daa24a53-3e87-4166-a665-f3b71890f430\") " pod="openshift-image-registry/image-registry-66df7c8f76-lzwqk" Jan 20 16:47:40 crc kubenswrapper[4558]: I0120 16:47:40.957476 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/daa24a53-3e87-4166-a665-f3b71890f430-bound-sa-token\") pod \"image-registry-66df7c8f76-lzwqk\" (UID: \"daa24a53-3e87-4166-a665-f3b71890f430\") " pod="openshift-image-registry/image-registry-66df7c8f76-lzwqk" Jan 20 16:47:40 crc kubenswrapper[4558]: I0120 16:47:40.957496 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/daa24a53-3e87-4166-a665-f3b71890f430-registry-tls\") pod \"image-registry-66df7c8f76-lzwqk\" (UID: \"daa24a53-3e87-4166-a665-f3b71890f430\") " pod="openshift-image-registry/image-registry-66df7c8f76-lzwqk" Jan 20 16:47:40 crc kubenswrapper[4558]: I0120 16:47:40.957523 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/daa24a53-3e87-4166-a665-f3b71890f430-registry-certificates\") pod \"image-registry-66df7c8f76-lzwqk\" (UID: \"daa24a53-3e87-4166-a665-f3b71890f430\") " pod="openshift-image-registry/image-registry-66df7c8f76-lzwqk" Jan 20 16:47:40 crc kubenswrapper[4558]: I0120 16:47:40.958135 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/daa24a53-3e87-4166-a665-f3b71890f430-ca-trust-extracted\") pod \"image-registry-66df7c8f76-lzwqk\" (UID: \"daa24a53-3e87-4166-a665-f3b71890f430\") " pod="openshift-image-registry/image-registry-66df7c8f76-lzwqk" Jan 20 16:47:40 crc kubenswrapper[4558]: I0120 16:47:40.958803 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/daa24a53-3e87-4166-a665-f3b71890f430-trusted-ca\") pod \"image-registry-66df7c8f76-lzwqk\" (UID: \"daa24a53-3e87-4166-a665-f3b71890f430\") " pod="openshift-image-registry/image-registry-66df7c8f76-lzwqk" Jan 20 16:47:40 crc kubenswrapper[4558]: I0120 16:47:40.958868 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/daa24a53-3e87-4166-a665-f3b71890f430-registry-certificates\") pod \"image-registry-66df7c8f76-lzwqk\" (UID: \"daa24a53-3e87-4166-a665-f3b71890f430\") " pod="openshift-image-registry/image-registry-66df7c8f76-lzwqk" Jan 20 16:47:40 crc kubenswrapper[4558]: I0120 16:47:40.962070 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/daa24a53-3e87-4166-a665-f3b71890f430-registry-tls\") pod \"image-registry-66df7c8f76-lzwqk\" (UID: \"daa24a53-3e87-4166-a665-f3b71890f430\") " pod="openshift-image-registry/image-registry-66df7c8f76-lzwqk" Jan 20 16:47:40 crc kubenswrapper[4558]: I0120 16:47:40.962073 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/daa24a53-3e87-4166-a665-f3b71890f430-installation-pull-secrets\") pod \"image-registry-66df7c8f76-lzwqk\" (UID: \"daa24a53-3e87-4166-a665-f3b71890f430\") " pod="openshift-image-registry/image-registry-66df7c8f76-lzwqk" Jan 20 16:47:40 crc kubenswrapper[4558]: I0120 16:47:40.970324 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zs5sq\" (UniqueName: \"kubernetes.io/projected/daa24a53-3e87-4166-a665-f3b71890f430-kube-api-access-zs5sq\") pod \"image-registry-66df7c8f76-lzwqk\" (UID: \"daa24a53-3e87-4166-a665-f3b71890f430\") " pod="openshift-image-registry/image-registry-66df7c8f76-lzwqk" Jan 20 16:47:40 crc kubenswrapper[4558]: I0120 16:47:40.971325 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/daa24a53-3e87-4166-a665-f3b71890f430-bound-sa-token\") pod \"image-registry-66df7c8f76-lzwqk\" (UID: \"daa24a53-3e87-4166-a665-f3b71890f430\") " pod="openshift-image-registry/image-registry-66df7c8f76-lzwqk" Jan 20 16:47:41 crc kubenswrapper[4558]: I0120 16:47:41.055511 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-lzwqk" Jan 20 16:47:41 crc kubenswrapper[4558]: I0120 16:47:41.389656 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-lzwqk"] Jan 20 16:47:41 crc kubenswrapper[4558]: I0120 16:47:41.735993 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-lzwqk" event={"ID":"daa24a53-3e87-4166-a665-f3b71890f430","Type":"ContainerStarted","Data":"c852aca1abb49751a5ab4dce1087b825e546fe42455f23a5070e6a6c8e88b410"} Jan 20 16:47:41 crc kubenswrapper[4558]: I0120 16:47:41.736323 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-lzwqk" Jan 20 16:47:41 crc kubenswrapper[4558]: I0120 16:47:41.736337 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-lzwqk" event={"ID":"daa24a53-3e87-4166-a665-f3b71890f430","Type":"ContainerStarted","Data":"66ab1fcadefb548be26c8bf83680c6b448b9b6095400151bbe3fd70c81ee352e"} Jan 20 16:47:41 crc kubenswrapper[4558]: I0120 16:47:41.770285 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-lzwqk" podStartSLOduration=1.770272273 podStartE2EDuration="1.770272273s" podCreationTimestamp="2026-01-20 16:47:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 16:47:41.768055629 +0000 UTC m=+355.528393595" watchObservedRunningTime="2026-01-20 16:47:41.770272273 +0000 UTC m=+355.530610240" Jan 20 16:47:44 crc kubenswrapper[4558]: I0120 16:47:44.582737 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-hgqdq"] Jan 20 16:47:44 crc kubenswrapper[4558]: I0120 16:47:44.584418 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hgqdq" Jan 20 16:47:44 crc kubenswrapper[4558]: I0120 16:47:44.585960 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Jan 20 16:47:44 crc kubenswrapper[4558]: I0120 16:47:44.591463 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-hgqdq"] Jan 20 16:47:44 crc kubenswrapper[4558]: I0120 16:47:44.703764 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x6gst\" (UniqueName: \"kubernetes.io/projected/da87968b-3eb6-4478-bf20-ec2b8d7bbf95-kube-api-access-x6gst\") pod \"community-operators-hgqdq\" (UID: \"da87968b-3eb6-4478-bf20-ec2b8d7bbf95\") " pod="openshift-marketplace/community-operators-hgqdq" Jan 20 16:47:44 crc kubenswrapper[4558]: I0120 16:47:44.703906 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/da87968b-3eb6-4478-bf20-ec2b8d7bbf95-utilities\") pod \"community-operators-hgqdq\" (UID: \"da87968b-3eb6-4478-bf20-ec2b8d7bbf95\") " pod="openshift-marketplace/community-operators-hgqdq" Jan 20 16:47:44 crc kubenswrapper[4558]: I0120 16:47:44.703977 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/da87968b-3eb6-4478-bf20-ec2b8d7bbf95-catalog-content\") pod \"community-operators-hgqdq\" (UID: \"da87968b-3eb6-4478-bf20-ec2b8d7bbf95\") " pod="openshift-marketplace/community-operators-hgqdq" Jan 20 16:47:44 crc kubenswrapper[4558]: I0120 16:47:44.778292 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-mhth9"] Jan 20 16:47:44 crc kubenswrapper[4558]: I0120 16:47:44.779101 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mhth9" Jan 20 16:47:44 crc kubenswrapper[4558]: I0120 16:47:44.782450 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Jan 20 16:47:44 crc kubenswrapper[4558]: I0120 16:47:44.786941 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-mhth9"] Jan 20 16:47:44 crc kubenswrapper[4558]: I0120 16:47:44.805595 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x6gst\" (UniqueName: \"kubernetes.io/projected/da87968b-3eb6-4478-bf20-ec2b8d7bbf95-kube-api-access-x6gst\") pod \"community-operators-hgqdq\" (UID: \"da87968b-3eb6-4478-bf20-ec2b8d7bbf95\") " pod="openshift-marketplace/community-operators-hgqdq" Jan 20 16:47:44 crc kubenswrapper[4558]: I0120 16:47:44.805641 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/236763ea-aede-458b-8f3c-e6ec5627a605-utilities\") pod \"certified-operators-mhth9\" (UID: \"236763ea-aede-458b-8f3c-e6ec5627a605\") " pod="openshift-marketplace/certified-operators-mhth9" Jan 20 16:47:44 crc kubenswrapper[4558]: I0120 16:47:44.805668 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/da87968b-3eb6-4478-bf20-ec2b8d7bbf95-utilities\") pod \"community-operators-hgqdq\" (UID: \"da87968b-3eb6-4478-bf20-ec2b8d7bbf95\") " pod="openshift-marketplace/community-operators-hgqdq" Jan 20 16:47:44 crc kubenswrapper[4558]: I0120 16:47:44.805715 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sw9wg\" (UniqueName: \"kubernetes.io/projected/236763ea-aede-458b-8f3c-e6ec5627a605-kube-api-access-sw9wg\") pod \"certified-operators-mhth9\" (UID: \"236763ea-aede-458b-8f3c-e6ec5627a605\") " pod="openshift-marketplace/certified-operators-mhth9" Jan 20 16:47:44 crc kubenswrapper[4558]: I0120 16:47:44.805743 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/da87968b-3eb6-4478-bf20-ec2b8d7bbf95-catalog-content\") pod \"community-operators-hgqdq\" (UID: \"da87968b-3eb6-4478-bf20-ec2b8d7bbf95\") " pod="openshift-marketplace/community-operators-hgqdq" Jan 20 16:47:44 crc kubenswrapper[4558]: I0120 16:47:44.805787 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/236763ea-aede-458b-8f3c-e6ec5627a605-catalog-content\") pod \"certified-operators-mhth9\" (UID: \"236763ea-aede-458b-8f3c-e6ec5627a605\") " pod="openshift-marketplace/certified-operators-mhth9" Jan 20 16:47:44 crc kubenswrapper[4558]: I0120 16:47:44.806117 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/da87968b-3eb6-4478-bf20-ec2b8d7bbf95-catalog-content\") pod \"community-operators-hgqdq\" (UID: \"da87968b-3eb6-4478-bf20-ec2b8d7bbf95\") " pod="openshift-marketplace/community-operators-hgqdq" Jan 20 16:47:44 crc kubenswrapper[4558]: I0120 16:47:44.806205 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/da87968b-3eb6-4478-bf20-ec2b8d7bbf95-utilities\") pod \"community-operators-hgqdq\" (UID: \"da87968b-3eb6-4478-bf20-ec2b8d7bbf95\") " pod="openshift-marketplace/community-operators-hgqdq" Jan 20 16:47:44 crc kubenswrapper[4558]: I0120 16:47:44.822192 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x6gst\" (UniqueName: \"kubernetes.io/projected/da87968b-3eb6-4478-bf20-ec2b8d7bbf95-kube-api-access-x6gst\") pod \"community-operators-hgqdq\" (UID: \"da87968b-3eb6-4478-bf20-ec2b8d7bbf95\") " pod="openshift-marketplace/community-operators-hgqdq" Jan 20 16:47:44 crc kubenswrapper[4558]: I0120 16:47:44.896242 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hgqdq" Jan 20 16:47:44 crc kubenswrapper[4558]: I0120 16:47:44.907837 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/236763ea-aede-458b-8f3c-e6ec5627a605-catalog-content\") pod \"certified-operators-mhth9\" (UID: \"236763ea-aede-458b-8f3c-e6ec5627a605\") " pod="openshift-marketplace/certified-operators-mhth9" Jan 20 16:47:44 crc kubenswrapper[4558]: I0120 16:47:44.907896 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/236763ea-aede-458b-8f3c-e6ec5627a605-utilities\") pod \"certified-operators-mhth9\" (UID: \"236763ea-aede-458b-8f3c-e6ec5627a605\") " pod="openshift-marketplace/certified-operators-mhth9" Jan 20 16:47:44 crc kubenswrapper[4558]: I0120 16:47:44.907925 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sw9wg\" (UniqueName: \"kubernetes.io/projected/236763ea-aede-458b-8f3c-e6ec5627a605-kube-api-access-sw9wg\") pod \"certified-operators-mhth9\" (UID: \"236763ea-aede-458b-8f3c-e6ec5627a605\") " pod="openshift-marketplace/certified-operators-mhth9" Jan 20 16:47:44 crc kubenswrapper[4558]: I0120 16:47:44.908283 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/236763ea-aede-458b-8f3c-e6ec5627a605-catalog-content\") pod \"certified-operators-mhth9\" (UID: \"236763ea-aede-458b-8f3c-e6ec5627a605\") " pod="openshift-marketplace/certified-operators-mhth9" Jan 20 16:47:44 crc kubenswrapper[4558]: I0120 16:47:44.908334 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/236763ea-aede-458b-8f3c-e6ec5627a605-utilities\") pod \"certified-operators-mhth9\" (UID: \"236763ea-aede-458b-8f3c-e6ec5627a605\") " pod="openshift-marketplace/certified-operators-mhth9" Jan 20 16:47:44 crc kubenswrapper[4558]: I0120 16:47:44.920737 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sw9wg\" (UniqueName: \"kubernetes.io/projected/236763ea-aede-458b-8f3c-e6ec5627a605-kube-api-access-sw9wg\") pod \"certified-operators-mhth9\" (UID: \"236763ea-aede-458b-8f3c-e6ec5627a605\") " pod="openshift-marketplace/certified-operators-mhth9" Jan 20 16:47:45 crc kubenswrapper[4558]: I0120 16:47:45.089997 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mhth9" Jan 20 16:47:45 crc kubenswrapper[4558]: I0120 16:47:45.227664 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-hgqdq"] Jan 20 16:47:45 crc kubenswrapper[4558]: W0120 16:47:45.230282 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podda87968b_3eb6_4478_bf20_ec2b8d7bbf95.slice/crio-bf5b4e26b49ad8e86db59be85b0927905994dbe9693cc916a6c7485ad76c0c74 WatchSource:0}: Error finding container bf5b4e26b49ad8e86db59be85b0927905994dbe9693cc916a6c7485ad76c0c74: Status 404 returned error can't find the container with id bf5b4e26b49ad8e86db59be85b0927905994dbe9693cc916a6c7485ad76c0c74 Jan 20 16:47:45 crc kubenswrapper[4558]: I0120 16:47:45.370975 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7b8459dfc9-j9hhh"] Jan 20 16:47:45 crc kubenswrapper[4558]: I0120 16:47:45.371428 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-7b8459dfc9-j9hhh" podUID="7d8b62ef-1dd6-4575-9db9-807b980136eb" containerName="route-controller-manager" containerID="cri-o://971fa4e1e2e7c48cbbd184351f9b4f4069306490e26f183fb449e2ee0b21e917" gracePeriod=30 Jan 20 16:47:45 crc kubenswrapper[4558]: I0120 16:47:45.412720 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-mhth9"] Jan 20 16:47:45 crc kubenswrapper[4558]: W0120 16:47:45.461005 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod236763ea_aede_458b_8f3c_e6ec5627a605.slice/crio-e2c6db6cfb5b52f79841a980c3830e41e4e8e0da3c3b795eca2973fbfef7a06e WatchSource:0}: Error finding container e2c6db6cfb5b52f79841a980c3830e41e4e8e0da3c3b795eca2973fbfef7a06e: Status 404 returned error can't find the container with id e2c6db6cfb5b52f79841a980c3830e41e4e8e0da3c3b795eca2973fbfef7a06e Jan 20 16:47:45 crc kubenswrapper[4558]: I0120 16:47:45.640416 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7b8459dfc9-j9hhh" Jan 20 16:47:45 crc kubenswrapper[4558]: I0120 16:47:45.718087 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7d8b62ef-1dd6-4575-9db9-807b980136eb-client-ca\") pod \"7d8b62ef-1dd6-4575-9db9-807b980136eb\" (UID: \"7d8b62ef-1dd6-4575-9db9-807b980136eb\") " Jan 20 16:47:45 crc kubenswrapper[4558]: I0120 16:47:45.718180 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7d8b62ef-1dd6-4575-9db9-807b980136eb-config\") pod \"7d8b62ef-1dd6-4575-9db9-807b980136eb\" (UID: \"7d8b62ef-1dd6-4575-9db9-807b980136eb\") " Jan 20 16:47:45 crc kubenswrapper[4558]: I0120 16:47:45.718228 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7d8b62ef-1dd6-4575-9db9-807b980136eb-serving-cert\") pod \"7d8b62ef-1dd6-4575-9db9-807b980136eb\" (UID: \"7d8b62ef-1dd6-4575-9db9-807b980136eb\") " Jan 20 16:47:45 crc kubenswrapper[4558]: I0120 16:47:45.718772 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7d8b62ef-1dd6-4575-9db9-807b980136eb-client-ca" (OuterVolumeSpecName: "client-ca") pod "7d8b62ef-1dd6-4575-9db9-807b980136eb" (UID: "7d8b62ef-1dd6-4575-9db9-807b980136eb"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 16:47:45 crc kubenswrapper[4558]: I0120 16:47:45.718785 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7d8b62ef-1dd6-4575-9db9-807b980136eb-config" (OuterVolumeSpecName: "config") pod "7d8b62ef-1dd6-4575-9db9-807b980136eb" (UID: "7d8b62ef-1dd6-4575-9db9-807b980136eb"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 16:47:45 crc kubenswrapper[4558]: I0120 16:47:45.718932 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r9cgw\" (UniqueName: \"kubernetes.io/projected/7d8b62ef-1dd6-4575-9db9-807b980136eb-kube-api-access-r9cgw\") pod \"7d8b62ef-1dd6-4575-9db9-807b980136eb\" (UID: \"7d8b62ef-1dd6-4575-9db9-807b980136eb\") " Jan 20 16:47:45 crc kubenswrapper[4558]: I0120 16:47:45.719354 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7d8b62ef-1dd6-4575-9db9-807b980136eb-config\") on node \"crc\" DevicePath \"\"" Jan 20 16:47:45 crc kubenswrapper[4558]: I0120 16:47:45.719371 4558 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7d8b62ef-1dd6-4575-9db9-807b980136eb-client-ca\") on node \"crc\" DevicePath \"\"" Jan 20 16:47:45 crc kubenswrapper[4558]: I0120 16:47:45.722424 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7d8b62ef-1dd6-4575-9db9-807b980136eb-kube-api-access-r9cgw" (OuterVolumeSpecName: "kube-api-access-r9cgw") pod "7d8b62ef-1dd6-4575-9db9-807b980136eb" (UID: "7d8b62ef-1dd6-4575-9db9-807b980136eb"). InnerVolumeSpecName "kube-api-access-r9cgw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:47:45 crc kubenswrapper[4558]: I0120 16:47:45.722478 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7d8b62ef-1dd6-4575-9db9-807b980136eb-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7d8b62ef-1dd6-4575-9db9-807b980136eb" (UID: "7d8b62ef-1dd6-4575-9db9-807b980136eb"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:47:45 crc kubenswrapper[4558]: I0120 16:47:45.751816 4558 generic.go:334] "Generic (PLEG): container finished" podID="236763ea-aede-458b-8f3c-e6ec5627a605" containerID="01129e26dee8b3dadd62d4b58f53182bc18ceb362bdb5e1f94d88321e95445a4" exitCode=0 Jan 20 16:47:45 crc kubenswrapper[4558]: I0120 16:47:45.751886 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mhth9" event={"ID":"236763ea-aede-458b-8f3c-e6ec5627a605","Type":"ContainerDied","Data":"01129e26dee8b3dadd62d4b58f53182bc18ceb362bdb5e1f94d88321e95445a4"} Jan 20 16:47:45 crc kubenswrapper[4558]: I0120 16:47:45.751909 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mhth9" event={"ID":"236763ea-aede-458b-8f3c-e6ec5627a605","Type":"ContainerStarted","Data":"e2c6db6cfb5b52f79841a980c3830e41e4e8e0da3c3b795eca2973fbfef7a06e"} Jan 20 16:47:45 crc kubenswrapper[4558]: I0120 16:47:45.753301 4558 generic.go:334] "Generic (PLEG): container finished" podID="7d8b62ef-1dd6-4575-9db9-807b980136eb" containerID="971fa4e1e2e7c48cbbd184351f9b4f4069306490e26f183fb449e2ee0b21e917" exitCode=0 Jan 20 16:47:45 crc kubenswrapper[4558]: I0120 16:47:45.753389 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7b8459dfc9-j9hhh" Jan 20 16:47:45 crc kubenswrapper[4558]: I0120 16:47:45.753794 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7b8459dfc9-j9hhh" event={"ID":"7d8b62ef-1dd6-4575-9db9-807b980136eb","Type":"ContainerDied","Data":"971fa4e1e2e7c48cbbd184351f9b4f4069306490e26f183fb449e2ee0b21e917"} Jan 20 16:47:45 crc kubenswrapper[4558]: I0120 16:47:45.753859 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7b8459dfc9-j9hhh" event={"ID":"7d8b62ef-1dd6-4575-9db9-807b980136eb","Type":"ContainerDied","Data":"64cefaaeaff19af7483a4027962c0453f21ded6c72875463766d36e455eea35f"} Jan 20 16:47:45 crc kubenswrapper[4558]: I0120 16:47:45.753876 4558 scope.go:117] "RemoveContainer" containerID="971fa4e1e2e7c48cbbd184351f9b4f4069306490e26f183fb449e2ee0b21e917" Jan 20 16:47:45 crc kubenswrapper[4558]: I0120 16:47:45.754968 4558 generic.go:334] "Generic (PLEG): container finished" podID="da87968b-3eb6-4478-bf20-ec2b8d7bbf95" containerID="e8bedc127be059ddd8ed2131bc43cd86f1b24eaf22594f08b7ece8a239b534e4" exitCode=0 Jan 20 16:47:45 crc kubenswrapper[4558]: I0120 16:47:45.755004 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hgqdq" event={"ID":"da87968b-3eb6-4478-bf20-ec2b8d7bbf95","Type":"ContainerDied","Data":"e8bedc127be059ddd8ed2131bc43cd86f1b24eaf22594f08b7ece8a239b534e4"} Jan 20 16:47:45 crc kubenswrapper[4558]: I0120 16:47:45.755020 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hgqdq" event={"ID":"da87968b-3eb6-4478-bf20-ec2b8d7bbf95","Type":"ContainerStarted","Data":"bf5b4e26b49ad8e86db59be85b0927905994dbe9693cc916a6c7485ad76c0c74"} Jan 20 16:47:45 crc kubenswrapper[4558]: I0120 16:47:45.770224 4558 scope.go:117] "RemoveContainer" containerID="971fa4e1e2e7c48cbbd184351f9b4f4069306490e26f183fb449e2ee0b21e917" Jan 20 16:47:45 crc kubenswrapper[4558]: I0120 16:47:45.773636 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7b8459dfc9-j9hhh"] Jan 20 16:47:45 crc kubenswrapper[4558]: E0120 16:47:45.773960 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"971fa4e1e2e7c48cbbd184351f9b4f4069306490e26f183fb449e2ee0b21e917\": container with ID starting with 971fa4e1e2e7c48cbbd184351f9b4f4069306490e26f183fb449e2ee0b21e917 not found: ID does not exist" containerID="971fa4e1e2e7c48cbbd184351f9b4f4069306490e26f183fb449e2ee0b21e917" Jan 20 16:47:45 crc kubenswrapper[4558]: I0120 16:47:45.773981 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"971fa4e1e2e7c48cbbd184351f9b4f4069306490e26f183fb449e2ee0b21e917"} err="failed to get container status \"971fa4e1e2e7c48cbbd184351f9b4f4069306490e26f183fb449e2ee0b21e917\": rpc error: code = NotFound desc = could not find container \"971fa4e1e2e7c48cbbd184351f9b4f4069306490e26f183fb449e2ee0b21e917\": container with ID starting with 971fa4e1e2e7c48cbbd184351f9b4f4069306490e26f183fb449e2ee0b21e917 not found: ID does not exist" Jan 20 16:47:45 crc kubenswrapper[4558]: I0120 16:47:45.787844 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7b8459dfc9-j9hhh"] Jan 20 16:47:45 crc kubenswrapper[4558]: I0120 16:47:45.820904 4558 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7d8b62ef-1dd6-4575-9db9-807b980136eb-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 20 16:47:45 crc kubenswrapper[4558]: I0120 16:47:45.820938 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r9cgw\" (UniqueName: \"kubernetes.io/projected/7d8b62ef-1dd6-4575-9db9-807b980136eb-kube-api-access-r9cgw\") on node \"crc\" DevicePath \"\"" Jan 20 16:47:46 crc kubenswrapper[4558]: I0120 16:47:46.393296 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5fcccc99c6-mfpql"] Jan 20 16:47:46 crc kubenswrapper[4558]: E0120 16:47:46.393523 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7d8b62ef-1dd6-4575-9db9-807b980136eb" containerName="route-controller-manager" Jan 20 16:47:46 crc kubenswrapper[4558]: I0120 16:47:46.393577 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="7d8b62ef-1dd6-4575-9db9-807b980136eb" containerName="route-controller-manager" Jan 20 16:47:46 crc kubenswrapper[4558]: I0120 16:47:46.393737 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="7d8b62ef-1dd6-4575-9db9-807b980136eb" containerName="route-controller-manager" Jan 20 16:47:46 crc kubenswrapper[4558]: I0120 16:47:46.394093 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5fcccc99c6-mfpql" Jan 20 16:47:46 crc kubenswrapper[4558]: I0120 16:47:46.396032 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Jan 20 16:47:46 crc kubenswrapper[4558]: I0120 16:47:46.396324 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Jan 20 16:47:46 crc kubenswrapper[4558]: I0120 16:47:46.396451 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Jan 20 16:47:46 crc kubenswrapper[4558]: I0120 16:47:46.396569 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Jan 20 16:47:46 crc kubenswrapper[4558]: I0120 16:47:46.396778 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Jan 20 16:47:46 crc kubenswrapper[4558]: I0120 16:47:46.396895 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Jan 20 16:47:46 crc kubenswrapper[4558]: I0120 16:47:46.401098 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5fcccc99c6-mfpql"] Jan 20 16:47:46 crc kubenswrapper[4558]: I0120 16:47:46.428714 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gkkxh\" (UniqueName: \"kubernetes.io/projected/5ac49f44-c667-49b3-8803-57058032cf09-kube-api-access-gkkxh\") pod \"route-controller-manager-5fcccc99c6-mfpql\" (UID: \"5ac49f44-c667-49b3-8803-57058032cf09\") " pod="openshift-route-controller-manager/route-controller-manager-5fcccc99c6-mfpql" Jan 20 16:47:46 crc kubenswrapper[4558]: I0120 16:47:46.428848 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5ac49f44-c667-49b3-8803-57058032cf09-client-ca\") pod \"route-controller-manager-5fcccc99c6-mfpql\" (UID: \"5ac49f44-c667-49b3-8803-57058032cf09\") " pod="openshift-route-controller-manager/route-controller-manager-5fcccc99c6-mfpql" Jan 20 16:47:46 crc kubenswrapper[4558]: I0120 16:47:46.428981 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5ac49f44-c667-49b3-8803-57058032cf09-serving-cert\") pod \"route-controller-manager-5fcccc99c6-mfpql\" (UID: \"5ac49f44-c667-49b3-8803-57058032cf09\") " pod="openshift-route-controller-manager/route-controller-manager-5fcccc99c6-mfpql" Jan 20 16:47:46 crc kubenswrapper[4558]: I0120 16:47:46.429012 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5ac49f44-c667-49b3-8803-57058032cf09-config\") pod \"route-controller-manager-5fcccc99c6-mfpql\" (UID: \"5ac49f44-c667-49b3-8803-57058032cf09\") " pod="openshift-route-controller-manager/route-controller-manager-5fcccc99c6-mfpql" Jan 20 16:47:46 crc kubenswrapper[4558]: I0120 16:47:46.529854 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gkkxh\" (UniqueName: \"kubernetes.io/projected/5ac49f44-c667-49b3-8803-57058032cf09-kube-api-access-gkkxh\") pod \"route-controller-manager-5fcccc99c6-mfpql\" (UID: \"5ac49f44-c667-49b3-8803-57058032cf09\") " pod="openshift-route-controller-manager/route-controller-manager-5fcccc99c6-mfpql" Jan 20 16:47:46 crc kubenswrapper[4558]: I0120 16:47:46.529908 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5ac49f44-c667-49b3-8803-57058032cf09-client-ca\") pod \"route-controller-manager-5fcccc99c6-mfpql\" (UID: \"5ac49f44-c667-49b3-8803-57058032cf09\") " pod="openshift-route-controller-manager/route-controller-manager-5fcccc99c6-mfpql" Jan 20 16:47:46 crc kubenswrapper[4558]: I0120 16:47:46.529959 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5ac49f44-c667-49b3-8803-57058032cf09-serving-cert\") pod \"route-controller-manager-5fcccc99c6-mfpql\" (UID: \"5ac49f44-c667-49b3-8803-57058032cf09\") " pod="openshift-route-controller-manager/route-controller-manager-5fcccc99c6-mfpql" Jan 20 16:47:46 crc kubenswrapper[4558]: I0120 16:47:46.529978 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5ac49f44-c667-49b3-8803-57058032cf09-config\") pod \"route-controller-manager-5fcccc99c6-mfpql\" (UID: \"5ac49f44-c667-49b3-8803-57058032cf09\") " pod="openshift-route-controller-manager/route-controller-manager-5fcccc99c6-mfpql" Jan 20 16:47:46 crc kubenswrapper[4558]: I0120 16:47:46.537861 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Jan 20 16:47:46 crc kubenswrapper[4558]: I0120 16:47:46.537952 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Jan 20 16:47:46 crc kubenswrapper[4558]: I0120 16:47:46.537975 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Jan 20 16:47:46 crc kubenswrapper[4558]: I0120 16:47:46.541190 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5ac49f44-c667-49b3-8803-57058032cf09-client-ca\") pod \"route-controller-manager-5fcccc99c6-mfpql\" (UID: \"5ac49f44-c667-49b3-8803-57058032cf09\") " pod="openshift-route-controller-manager/route-controller-manager-5fcccc99c6-mfpql" Jan 20 16:47:46 crc kubenswrapper[4558]: I0120 16:47:46.541335 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5ac49f44-c667-49b3-8803-57058032cf09-config\") pod \"route-controller-manager-5fcccc99c6-mfpql\" (UID: \"5ac49f44-c667-49b3-8803-57058032cf09\") " pod="openshift-route-controller-manager/route-controller-manager-5fcccc99c6-mfpql" Jan 20 16:47:46 crc kubenswrapper[4558]: I0120 16:47:46.543831 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5ac49f44-c667-49b3-8803-57058032cf09-serving-cert\") pod \"route-controller-manager-5fcccc99c6-mfpql\" (UID: \"5ac49f44-c667-49b3-8803-57058032cf09\") " pod="openshift-route-controller-manager/route-controller-manager-5fcccc99c6-mfpql" Jan 20 16:47:46 crc kubenswrapper[4558]: I0120 16:47:46.555986 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Jan 20 16:47:46 crc kubenswrapper[4558]: I0120 16:47:46.561373 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Jan 20 16:47:46 crc kubenswrapper[4558]: I0120 16:47:46.573828 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7d8b62ef-1dd6-4575-9db9-807b980136eb" path="/var/lib/kubelet/pods/7d8b62ef-1dd6-4575-9db9-807b980136eb/volumes" Jan 20 16:47:46 crc kubenswrapper[4558]: I0120 16:47:46.574196 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gkkxh\" (UniqueName: \"kubernetes.io/projected/5ac49f44-c667-49b3-8803-57058032cf09-kube-api-access-gkkxh\") pod \"route-controller-manager-5fcccc99c6-mfpql\" (UID: \"5ac49f44-c667-49b3-8803-57058032cf09\") " pod="openshift-route-controller-manager/route-controller-manager-5fcccc99c6-mfpql" Jan 20 16:47:46 crc kubenswrapper[4558]: I0120 16:47:46.738007 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Jan 20 16:47:46 crc kubenswrapper[4558]: I0120 16:47:46.746903 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5fcccc99c6-mfpql" Jan 20 16:47:46 crc kubenswrapper[4558]: I0120 16:47:46.759991 4558 generic.go:334] "Generic (PLEG): container finished" podID="236763ea-aede-458b-8f3c-e6ec5627a605" containerID="5318abbea033245e7d6ff85032b9a8ae9c8cc272b42b4d5a565025df199ae290" exitCode=0 Jan 20 16:47:46 crc kubenswrapper[4558]: I0120 16:47:46.760045 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mhth9" event={"ID":"236763ea-aede-458b-8f3c-e6ec5627a605","Type":"ContainerDied","Data":"5318abbea033245e7d6ff85032b9a8ae9c8cc272b42b4d5a565025df199ae290"} Jan 20 16:47:46 crc kubenswrapper[4558]: I0120 16:47:46.762019 4558 generic.go:334] "Generic (PLEG): container finished" podID="da87968b-3eb6-4478-bf20-ec2b8d7bbf95" containerID="9d6e076cd8a78273b31a00752bb6421f40526596196d65f7fd5cd065b87dd5ba" exitCode=0 Jan 20 16:47:46 crc kubenswrapper[4558]: I0120 16:47:46.762045 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hgqdq" event={"ID":"da87968b-3eb6-4478-bf20-ec2b8d7bbf95","Type":"ContainerDied","Data":"9d6e076cd8a78273b31a00752bb6421f40526596196d65f7fd5cd065b87dd5ba"} Jan 20 16:47:46 crc kubenswrapper[4558]: I0120 16:47:46.981322 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-fnqb8"] Jan 20 16:47:46 crc kubenswrapper[4558]: I0120 16:47:46.982120 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-fnqb8" Jan 20 16:47:46 crc kubenswrapper[4558]: I0120 16:47:46.983615 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Jan 20 16:47:46 crc kubenswrapper[4558]: I0120 16:47:46.989618 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-fnqb8"] Jan 20 16:47:47 crc kubenswrapper[4558]: I0120 16:47:47.035204 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cpj8l\" (UniqueName: \"kubernetes.io/projected/3e16ce3b-593e-4c3d-a1c0-ab3c557da830-kube-api-access-cpj8l\") pod \"redhat-marketplace-fnqb8\" (UID: \"3e16ce3b-593e-4c3d-a1c0-ab3c557da830\") " pod="openshift-marketplace/redhat-marketplace-fnqb8" Jan 20 16:47:47 crc kubenswrapper[4558]: I0120 16:47:47.035385 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3e16ce3b-593e-4c3d-a1c0-ab3c557da830-utilities\") pod \"redhat-marketplace-fnqb8\" (UID: \"3e16ce3b-593e-4c3d-a1c0-ab3c557da830\") " pod="openshift-marketplace/redhat-marketplace-fnqb8" Jan 20 16:47:47 crc kubenswrapper[4558]: I0120 16:47:47.035449 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3e16ce3b-593e-4c3d-a1c0-ab3c557da830-catalog-content\") pod \"redhat-marketplace-fnqb8\" (UID: \"3e16ce3b-593e-4c3d-a1c0-ab3c557da830\") " pod="openshift-marketplace/redhat-marketplace-fnqb8" Jan 20 16:47:47 crc kubenswrapper[4558]: I0120 16:47:47.082785 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5fcccc99c6-mfpql"] Jan 20 16:47:47 crc kubenswrapper[4558]: W0120 16:47:47.087485 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5ac49f44_c667_49b3_8803_57058032cf09.slice/crio-87b858e8755a94bb5a277e3d4fcc00db37e0eeec679694be6c01090a6054b835 WatchSource:0}: Error finding container 87b858e8755a94bb5a277e3d4fcc00db37e0eeec679694be6c01090a6054b835: Status 404 returned error can't find the container with id 87b858e8755a94bb5a277e3d4fcc00db37e0eeec679694be6c01090a6054b835 Jan 20 16:47:47 crc kubenswrapper[4558]: I0120 16:47:47.135982 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cpj8l\" (UniqueName: \"kubernetes.io/projected/3e16ce3b-593e-4c3d-a1c0-ab3c557da830-kube-api-access-cpj8l\") pod \"redhat-marketplace-fnqb8\" (UID: \"3e16ce3b-593e-4c3d-a1c0-ab3c557da830\") " pod="openshift-marketplace/redhat-marketplace-fnqb8" Jan 20 16:47:47 crc kubenswrapper[4558]: I0120 16:47:47.136048 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3e16ce3b-593e-4c3d-a1c0-ab3c557da830-utilities\") pod \"redhat-marketplace-fnqb8\" (UID: \"3e16ce3b-593e-4c3d-a1c0-ab3c557da830\") " pod="openshift-marketplace/redhat-marketplace-fnqb8" Jan 20 16:47:47 crc kubenswrapper[4558]: I0120 16:47:47.136074 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3e16ce3b-593e-4c3d-a1c0-ab3c557da830-catalog-content\") pod \"redhat-marketplace-fnqb8\" (UID: \"3e16ce3b-593e-4c3d-a1c0-ab3c557da830\") " pod="openshift-marketplace/redhat-marketplace-fnqb8" Jan 20 16:47:47 crc kubenswrapper[4558]: I0120 16:47:47.136463 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3e16ce3b-593e-4c3d-a1c0-ab3c557da830-catalog-content\") pod \"redhat-marketplace-fnqb8\" (UID: \"3e16ce3b-593e-4c3d-a1c0-ab3c557da830\") " pod="openshift-marketplace/redhat-marketplace-fnqb8" Jan 20 16:47:47 crc kubenswrapper[4558]: I0120 16:47:47.136487 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3e16ce3b-593e-4c3d-a1c0-ab3c557da830-utilities\") pod \"redhat-marketplace-fnqb8\" (UID: \"3e16ce3b-593e-4c3d-a1c0-ab3c557da830\") " pod="openshift-marketplace/redhat-marketplace-fnqb8" Jan 20 16:47:47 crc kubenswrapper[4558]: I0120 16:47:47.150697 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cpj8l\" (UniqueName: \"kubernetes.io/projected/3e16ce3b-593e-4c3d-a1c0-ab3c557da830-kube-api-access-cpj8l\") pod \"redhat-marketplace-fnqb8\" (UID: \"3e16ce3b-593e-4c3d-a1c0-ab3c557da830\") " pod="openshift-marketplace/redhat-marketplace-fnqb8" Jan 20 16:47:47 crc kubenswrapper[4558]: I0120 16:47:47.179980 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-6bqsp"] Jan 20 16:47:47 crc kubenswrapper[4558]: I0120 16:47:47.180931 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6bqsp" Jan 20 16:47:47 crc kubenswrapper[4558]: I0120 16:47:47.182210 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Jan 20 16:47:47 crc kubenswrapper[4558]: I0120 16:47:47.187092 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-6bqsp"] Jan 20 16:47:47 crc kubenswrapper[4558]: I0120 16:47:47.237229 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/de39c54d-ba42-4c81-8ad7-be41cc1cc50a-catalog-content\") pod \"redhat-operators-6bqsp\" (UID: \"de39c54d-ba42-4c81-8ad7-be41cc1cc50a\") " pod="openshift-marketplace/redhat-operators-6bqsp" Jan 20 16:47:47 crc kubenswrapper[4558]: I0120 16:47:47.237496 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hxb8c\" (UniqueName: \"kubernetes.io/projected/de39c54d-ba42-4c81-8ad7-be41cc1cc50a-kube-api-access-hxb8c\") pod \"redhat-operators-6bqsp\" (UID: \"de39c54d-ba42-4c81-8ad7-be41cc1cc50a\") " pod="openshift-marketplace/redhat-operators-6bqsp" Jan 20 16:47:47 crc kubenswrapper[4558]: I0120 16:47:47.237552 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/de39c54d-ba42-4c81-8ad7-be41cc1cc50a-utilities\") pod \"redhat-operators-6bqsp\" (UID: \"de39c54d-ba42-4c81-8ad7-be41cc1cc50a\") " pod="openshift-marketplace/redhat-operators-6bqsp" Jan 20 16:47:47 crc kubenswrapper[4558]: I0120 16:47:47.293026 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-fnqb8" Jan 20 16:47:47 crc kubenswrapper[4558]: I0120 16:47:47.338628 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hxb8c\" (UniqueName: \"kubernetes.io/projected/de39c54d-ba42-4c81-8ad7-be41cc1cc50a-kube-api-access-hxb8c\") pod \"redhat-operators-6bqsp\" (UID: \"de39c54d-ba42-4c81-8ad7-be41cc1cc50a\") " pod="openshift-marketplace/redhat-operators-6bqsp" Jan 20 16:47:47 crc kubenswrapper[4558]: I0120 16:47:47.338662 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/de39c54d-ba42-4c81-8ad7-be41cc1cc50a-utilities\") pod \"redhat-operators-6bqsp\" (UID: \"de39c54d-ba42-4c81-8ad7-be41cc1cc50a\") " pod="openshift-marketplace/redhat-operators-6bqsp" Jan 20 16:47:47 crc kubenswrapper[4558]: I0120 16:47:47.338689 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/de39c54d-ba42-4c81-8ad7-be41cc1cc50a-catalog-content\") pod \"redhat-operators-6bqsp\" (UID: \"de39c54d-ba42-4c81-8ad7-be41cc1cc50a\") " pod="openshift-marketplace/redhat-operators-6bqsp" Jan 20 16:47:47 crc kubenswrapper[4558]: I0120 16:47:47.339095 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/de39c54d-ba42-4c81-8ad7-be41cc1cc50a-utilities\") pod \"redhat-operators-6bqsp\" (UID: \"de39c54d-ba42-4c81-8ad7-be41cc1cc50a\") " pod="openshift-marketplace/redhat-operators-6bqsp" Jan 20 16:47:47 crc kubenswrapper[4558]: I0120 16:47:47.339189 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/de39c54d-ba42-4c81-8ad7-be41cc1cc50a-catalog-content\") pod \"redhat-operators-6bqsp\" (UID: \"de39c54d-ba42-4c81-8ad7-be41cc1cc50a\") " pod="openshift-marketplace/redhat-operators-6bqsp" Jan 20 16:47:47 crc kubenswrapper[4558]: I0120 16:47:47.354927 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hxb8c\" (UniqueName: \"kubernetes.io/projected/de39c54d-ba42-4c81-8ad7-be41cc1cc50a-kube-api-access-hxb8c\") pod \"redhat-operators-6bqsp\" (UID: \"de39c54d-ba42-4c81-8ad7-be41cc1cc50a\") " pod="openshift-marketplace/redhat-operators-6bqsp" Jan 20 16:47:47 crc kubenswrapper[4558]: I0120 16:47:47.491763 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6bqsp" Jan 20 16:47:47 crc kubenswrapper[4558]: I0120 16:47:47.642156 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-fnqb8"] Jan 20 16:47:47 crc kubenswrapper[4558]: W0120 16:47:47.647811 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3e16ce3b_593e_4c3d_a1c0_ab3c557da830.slice/crio-b214677a1fe3350315ac317d50849f33e5ebb0c009dd72db6b1285acecd0b496 WatchSource:0}: Error finding container b214677a1fe3350315ac317d50849f33e5ebb0c009dd72db6b1285acecd0b496: Status 404 returned error can't find the container with id b214677a1fe3350315ac317d50849f33e5ebb0c009dd72db6b1285acecd0b496 Jan 20 16:47:47 crc kubenswrapper[4558]: I0120 16:47:47.767903 4558 generic.go:334] "Generic (PLEG): container finished" podID="3e16ce3b-593e-4c3d-a1c0-ab3c557da830" containerID="0a93cb1a31639ad49c761b54fe625c6d9b17f2eae06fdf21c57b5c88cd5834bd" exitCode=0 Jan 20 16:47:47 crc kubenswrapper[4558]: I0120 16:47:47.767958 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fnqb8" event={"ID":"3e16ce3b-593e-4c3d-a1c0-ab3c557da830","Type":"ContainerDied","Data":"0a93cb1a31639ad49c761b54fe625c6d9b17f2eae06fdf21c57b5c88cd5834bd"} Jan 20 16:47:47 crc kubenswrapper[4558]: I0120 16:47:47.768253 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fnqb8" event={"ID":"3e16ce3b-593e-4c3d-a1c0-ab3c557da830","Type":"ContainerStarted","Data":"b214677a1fe3350315ac317d50849f33e5ebb0c009dd72db6b1285acecd0b496"} Jan 20 16:47:47 crc kubenswrapper[4558]: I0120 16:47:47.770910 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mhth9" event={"ID":"236763ea-aede-458b-8f3c-e6ec5627a605","Type":"ContainerStarted","Data":"06dd4bdb76a15d4317349243b4cf7dba47936c3dfaf57497abdb8ab39c067e8a"} Jan 20 16:47:47 crc kubenswrapper[4558]: I0120 16:47:47.772854 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5fcccc99c6-mfpql" event={"ID":"5ac49f44-c667-49b3-8803-57058032cf09","Type":"ContainerStarted","Data":"6517cf08bdcd009c204ade5eb43606ac48ddd6832e9b1c230224669ccddcd751"} Jan 20 16:47:47 crc kubenswrapper[4558]: I0120 16:47:47.772896 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5fcccc99c6-mfpql" event={"ID":"5ac49f44-c667-49b3-8803-57058032cf09","Type":"ContainerStarted","Data":"87b858e8755a94bb5a277e3d4fcc00db37e0eeec679694be6c01090a6054b835"} Jan 20 16:47:47 crc kubenswrapper[4558]: I0120 16:47:47.772913 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-5fcccc99c6-mfpql" Jan 20 16:47:47 crc kubenswrapper[4558]: I0120 16:47:47.775421 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hgqdq" event={"ID":"da87968b-3eb6-4478-bf20-ec2b8d7bbf95","Type":"ContainerStarted","Data":"770c13661c6422247f16e6e08dcb3fb2e08d1853761a6a04fc26d98ef5445698"} Jan 20 16:47:47 crc kubenswrapper[4558]: I0120 16:47:47.777725 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-5fcccc99c6-mfpql" Jan 20 16:47:47 crc kubenswrapper[4558]: I0120 16:47:47.816421 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-hgqdq" podStartSLOduration=2.240441733 podStartE2EDuration="3.816407331s" podCreationTimestamp="2026-01-20 16:47:44 +0000 UTC" firstStartedPulling="2026-01-20 16:47:45.755880937 +0000 UTC m=+359.516218903" lastFinishedPulling="2026-01-20 16:47:47.331846533 +0000 UTC m=+361.092184501" observedRunningTime="2026-01-20 16:47:47.814283181 +0000 UTC m=+361.574621158" watchObservedRunningTime="2026-01-20 16:47:47.816407331 +0000 UTC m=+361.576745298" Jan 20 16:47:47 crc kubenswrapper[4558]: I0120 16:47:47.828918 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-5fcccc99c6-mfpql" podStartSLOduration=2.828903799 podStartE2EDuration="2.828903799s" podCreationTimestamp="2026-01-20 16:47:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 16:47:47.826952315 +0000 UTC m=+361.587290282" watchObservedRunningTime="2026-01-20 16:47:47.828903799 +0000 UTC m=+361.589241767" Jan 20 16:47:47 crc kubenswrapper[4558]: I0120 16:47:47.836208 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-6bqsp"] Jan 20 16:47:47 crc kubenswrapper[4558]: I0120 16:47:47.842329 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-mhth9" podStartSLOduration=2.315177076 podStartE2EDuration="3.842313952s" podCreationTimestamp="2026-01-20 16:47:44 +0000 UTC" firstStartedPulling="2026-01-20 16:47:45.754584309 +0000 UTC m=+359.514922276" lastFinishedPulling="2026-01-20 16:47:47.281721186 +0000 UTC m=+361.042059152" observedRunningTime="2026-01-20 16:47:47.842021209 +0000 UTC m=+361.602359177" watchObservedRunningTime="2026-01-20 16:47:47.842313952 +0000 UTC m=+361.602651920" Jan 20 16:47:48 crc kubenswrapper[4558]: I0120 16:47:48.781076 4558 generic.go:334] "Generic (PLEG): container finished" podID="de39c54d-ba42-4c81-8ad7-be41cc1cc50a" containerID="92ad17e5910184ff9e29a8584c5d71850f001151c08ff64077b06c193c2249e9" exitCode=0 Jan 20 16:47:48 crc kubenswrapper[4558]: I0120 16:47:48.781172 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6bqsp" event={"ID":"de39c54d-ba42-4c81-8ad7-be41cc1cc50a","Type":"ContainerDied","Data":"92ad17e5910184ff9e29a8584c5d71850f001151c08ff64077b06c193c2249e9"} Jan 20 16:47:48 crc kubenswrapper[4558]: I0120 16:47:48.781519 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6bqsp" event={"ID":"de39c54d-ba42-4c81-8ad7-be41cc1cc50a","Type":"ContainerStarted","Data":"fc045b32d968b216642c2aa23ce5173405159009b66cd550a65fe27ec850b6b6"} Jan 20 16:47:49 crc kubenswrapper[4558]: I0120 16:47:49.787441 4558 generic.go:334] "Generic (PLEG): container finished" podID="3e16ce3b-593e-4c3d-a1c0-ab3c557da830" containerID="18a024d43e36b12b6dd202b7a48fbd6f8cc03d6d3f370b38148f7120ec4bb9de" exitCode=0 Jan 20 16:47:49 crc kubenswrapper[4558]: I0120 16:47:49.787517 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fnqb8" event={"ID":"3e16ce3b-593e-4c3d-a1c0-ab3c557da830","Type":"ContainerDied","Data":"18a024d43e36b12b6dd202b7a48fbd6f8cc03d6d3f370b38148f7120ec4bb9de"} Jan 20 16:47:49 crc kubenswrapper[4558]: I0120 16:47:49.789829 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6bqsp" event={"ID":"de39c54d-ba42-4c81-8ad7-be41cc1cc50a","Type":"ContainerStarted","Data":"4d1d6026b5db667653a9ab6d841936e591a00a57eb9ee22de72e9332b64914f0"} Jan 20 16:47:50 crc kubenswrapper[4558]: I0120 16:47:50.794688 4558 generic.go:334] "Generic (PLEG): container finished" podID="de39c54d-ba42-4c81-8ad7-be41cc1cc50a" containerID="4d1d6026b5db667653a9ab6d841936e591a00a57eb9ee22de72e9332b64914f0" exitCode=0 Jan 20 16:47:50 crc kubenswrapper[4558]: I0120 16:47:50.794764 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6bqsp" event={"ID":"de39c54d-ba42-4c81-8ad7-be41cc1cc50a","Type":"ContainerDied","Data":"4d1d6026b5db667653a9ab6d841936e591a00a57eb9ee22de72e9332b64914f0"} Jan 20 16:47:50 crc kubenswrapper[4558]: I0120 16:47:50.796395 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-fnqb8" event={"ID":"3e16ce3b-593e-4c3d-a1c0-ab3c557da830","Type":"ContainerStarted","Data":"561a3ead967cb172613d1cfaf5334938180e507ea51fc18c5dcaf6cea313d4e8"} Jan 20 16:47:50 crc kubenswrapper[4558]: I0120 16:47:50.819879 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-fnqb8" podStartSLOduration=2.2810720030000002 podStartE2EDuration="4.819866615s" podCreationTimestamp="2026-01-20 16:47:46 +0000 UTC" firstStartedPulling="2026-01-20 16:47:47.769010133 +0000 UTC m=+361.529348101" lastFinishedPulling="2026-01-20 16:47:50.307804746 +0000 UTC m=+364.068142713" observedRunningTime="2026-01-20 16:47:50.819437966 +0000 UTC m=+364.579775933" watchObservedRunningTime="2026-01-20 16:47:50.819866615 +0000 UTC m=+364.580204582" Jan 20 16:47:51 crc kubenswrapper[4558]: I0120 16:47:51.804135 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6bqsp" event={"ID":"de39c54d-ba42-4c81-8ad7-be41cc1cc50a","Type":"ContainerStarted","Data":"63c7a5b810fb37016641bbc5429eba354b75b0db1651a2a29da3876e4b0212c1"} Jan 20 16:47:51 crc kubenswrapper[4558]: I0120 16:47:51.816404 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-6bqsp" podStartSLOduration=2.049885711 podStartE2EDuration="4.816390405s" podCreationTimestamp="2026-01-20 16:47:47 +0000 UTC" firstStartedPulling="2026-01-20 16:47:48.782461486 +0000 UTC m=+362.542799453" lastFinishedPulling="2026-01-20 16:47:51.54896618 +0000 UTC m=+365.309304147" observedRunningTime="2026-01-20 16:47:51.815261975 +0000 UTC m=+365.575599942" watchObservedRunningTime="2026-01-20 16:47:51.816390405 +0000 UTC m=+365.576728372" Jan 20 16:47:54 crc kubenswrapper[4558]: I0120 16:47:54.896948 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-hgqdq" Jan 20 16:47:54 crc kubenswrapper[4558]: I0120 16:47:54.897496 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-hgqdq" Jan 20 16:47:54 crc kubenswrapper[4558]: I0120 16:47:54.924812 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-hgqdq" Jan 20 16:47:55 crc kubenswrapper[4558]: I0120 16:47:55.090521 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-mhth9" Jan 20 16:47:55 crc kubenswrapper[4558]: I0120 16:47:55.090564 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-mhth9" Jan 20 16:47:55 crc kubenswrapper[4558]: I0120 16:47:55.117606 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-mhth9" Jan 20 16:47:55 crc kubenswrapper[4558]: I0120 16:47:55.846832 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-mhth9" Jan 20 16:47:55 crc kubenswrapper[4558]: I0120 16:47:55.850102 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-hgqdq" Jan 20 16:47:57 crc kubenswrapper[4558]: I0120 16:47:57.293374 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-fnqb8" Jan 20 16:47:57 crc kubenswrapper[4558]: I0120 16:47:57.293622 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-fnqb8" Jan 20 16:47:57 crc kubenswrapper[4558]: I0120 16:47:57.321270 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-fnqb8" Jan 20 16:47:57 crc kubenswrapper[4558]: I0120 16:47:57.330035 4558 patch_prober.go:28] interesting pod/machine-config-daemon-2vr4r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 20 16:47:57 crc kubenswrapper[4558]: I0120 16:47:57.330081 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 20 16:47:57 crc kubenswrapper[4558]: I0120 16:47:57.492655 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-6bqsp" Jan 20 16:47:57 crc kubenswrapper[4558]: I0120 16:47:57.492706 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-6bqsp" Jan 20 16:47:57 crc kubenswrapper[4558]: I0120 16:47:57.519933 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-6bqsp" Jan 20 16:47:57 crc kubenswrapper[4558]: I0120 16:47:57.854594 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-6bqsp" Jan 20 16:47:57 crc kubenswrapper[4558]: I0120 16:47:57.858932 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-fnqb8" Jan 20 16:48:01 crc kubenswrapper[4558]: I0120 16:48:01.060139 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-lzwqk" Jan 20 16:48:01 crc kubenswrapper[4558]: I0120 16:48:01.103872 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-zsdqk"] Jan 20 16:48:05 crc kubenswrapper[4558]: I0120 16:48:05.392613 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-5bfb6c7bd4-kzh68"] Jan 20 16:48:05 crc kubenswrapper[4558]: I0120 16:48:05.393061 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-5bfb6c7bd4-kzh68" podUID="73a27de9-993a-4348-ae1b-c1dabd1585d1" containerName="controller-manager" containerID="cri-o://664d5b58bd863050dac8bab84a329dfd68e0a04788de0eeb1adc6cd98b926436" gracePeriod=30 Jan 20 16:48:05 crc kubenswrapper[4558]: I0120 16:48:05.723221 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-5bfb6c7bd4-kzh68" Jan 20 16:48:05 crc kubenswrapper[4558]: I0120 16:48:05.759937 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xj7lh\" (UniqueName: \"kubernetes.io/projected/73a27de9-993a-4348-ae1b-c1dabd1585d1-kube-api-access-xj7lh\") pod \"73a27de9-993a-4348-ae1b-c1dabd1585d1\" (UID: \"73a27de9-993a-4348-ae1b-c1dabd1585d1\") " Jan 20 16:48:05 crc kubenswrapper[4558]: I0120 16:48:05.759995 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/73a27de9-993a-4348-ae1b-c1dabd1585d1-client-ca\") pod \"73a27de9-993a-4348-ae1b-c1dabd1585d1\" (UID: \"73a27de9-993a-4348-ae1b-c1dabd1585d1\") " Jan 20 16:48:05 crc kubenswrapper[4558]: I0120 16:48:05.760050 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/73a27de9-993a-4348-ae1b-c1dabd1585d1-proxy-ca-bundles\") pod \"73a27de9-993a-4348-ae1b-c1dabd1585d1\" (UID: \"73a27de9-993a-4348-ae1b-c1dabd1585d1\") " Jan 20 16:48:05 crc kubenswrapper[4558]: I0120 16:48:05.760088 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/73a27de9-993a-4348-ae1b-c1dabd1585d1-serving-cert\") pod \"73a27de9-993a-4348-ae1b-c1dabd1585d1\" (UID: \"73a27de9-993a-4348-ae1b-c1dabd1585d1\") " Jan 20 16:48:05 crc kubenswrapper[4558]: I0120 16:48:05.760121 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/73a27de9-993a-4348-ae1b-c1dabd1585d1-config\") pod \"73a27de9-993a-4348-ae1b-c1dabd1585d1\" (UID: \"73a27de9-993a-4348-ae1b-c1dabd1585d1\") " Jan 20 16:48:05 crc kubenswrapper[4558]: I0120 16:48:05.761244 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/73a27de9-993a-4348-ae1b-c1dabd1585d1-client-ca" (OuterVolumeSpecName: "client-ca") pod "73a27de9-993a-4348-ae1b-c1dabd1585d1" (UID: "73a27de9-993a-4348-ae1b-c1dabd1585d1"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 16:48:05 crc kubenswrapper[4558]: I0120 16:48:05.761256 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/73a27de9-993a-4348-ae1b-c1dabd1585d1-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "73a27de9-993a-4348-ae1b-c1dabd1585d1" (UID: "73a27de9-993a-4348-ae1b-c1dabd1585d1"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 16:48:05 crc kubenswrapper[4558]: I0120 16:48:05.764778 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/73a27de9-993a-4348-ae1b-c1dabd1585d1-config" (OuterVolumeSpecName: "config") pod "73a27de9-993a-4348-ae1b-c1dabd1585d1" (UID: "73a27de9-993a-4348-ae1b-c1dabd1585d1"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 16:48:05 crc kubenswrapper[4558]: I0120 16:48:05.765559 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/73a27de9-993a-4348-ae1b-c1dabd1585d1-kube-api-access-xj7lh" (OuterVolumeSpecName: "kube-api-access-xj7lh") pod "73a27de9-993a-4348-ae1b-c1dabd1585d1" (UID: "73a27de9-993a-4348-ae1b-c1dabd1585d1"). InnerVolumeSpecName "kube-api-access-xj7lh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:48:05 crc kubenswrapper[4558]: I0120 16:48:05.767450 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/73a27de9-993a-4348-ae1b-c1dabd1585d1-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "73a27de9-993a-4348-ae1b-c1dabd1585d1" (UID: "73a27de9-993a-4348-ae1b-c1dabd1585d1"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:48:05 crc kubenswrapper[4558]: I0120 16:48:05.869265 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xj7lh\" (UniqueName: \"kubernetes.io/projected/73a27de9-993a-4348-ae1b-c1dabd1585d1-kube-api-access-xj7lh\") on node \"crc\" DevicePath \"\"" Jan 20 16:48:05 crc kubenswrapper[4558]: I0120 16:48:05.869312 4558 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/73a27de9-993a-4348-ae1b-c1dabd1585d1-client-ca\") on node \"crc\" DevicePath \"\"" Jan 20 16:48:05 crc kubenswrapper[4558]: I0120 16:48:05.869326 4558 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/73a27de9-993a-4348-ae1b-c1dabd1585d1-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 20 16:48:05 crc kubenswrapper[4558]: I0120 16:48:05.869335 4558 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/73a27de9-993a-4348-ae1b-c1dabd1585d1-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 20 16:48:05 crc kubenswrapper[4558]: I0120 16:48:05.869346 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/73a27de9-993a-4348-ae1b-c1dabd1585d1-config\") on node \"crc\" DevicePath \"\"" Jan 20 16:48:05 crc kubenswrapper[4558]: I0120 16:48:05.875027 4558 generic.go:334] "Generic (PLEG): container finished" podID="73a27de9-993a-4348-ae1b-c1dabd1585d1" containerID="664d5b58bd863050dac8bab84a329dfd68e0a04788de0eeb1adc6cd98b926436" exitCode=0 Jan 20 16:48:05 crc kubenswrapper[4558]: I0120 16:48:05.875088 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-5bfb6c7bd4-kzh68" event={"ID":"73a27de9-993a-4348-ae1b-c1dabd1585d1","Type":"ContainerDied","Data":"664d5b58bd863050dac8bab84a329dfd68e0a04788de0eeb1adc6cd98b926436"} Jan 20 16:48:05 crc kubenswrapper[4558]: I0120 16:48:05.875115 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-5bfb6c7bd4-kzh68" Jan 20 16:48:05 crc kubenswrapper[4558]: I0120 16:48:05.875139 4558 scope.go:117] "RemoveContainer" containerID="664d5b58bd863050dac8bab84a329dfd68e0a04788de0eeb1adc6cd98b926436" Jan 20 16:48:05 crc kubenswrapper[4558]: I0120 16:48:05.875125 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-5bfb6c7bd4-kzh68" event={"ID":"73a27de9-993a-4348-ae1b-c1dabd1585d1","Type":"ContainerDied","Data":"996fea73de8c6c147add0044cb831c69576716d3b9b58f6a8fcd4a8fb6382f31"} Jan 20 16:48:05 crc kubenswrapper[4558]: I0120 16:48:05.888484 4558 scope.go:117] "RemoveContainer" containerID="664d5b58bd863050dac8bab84a329dfd68e0a04788de0eeb1adc6cd98b926436" Jan 20 16:48:05 crc kubenswrapper[4558]: E0120 16:48:05.888934 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"664d5b58bd863050dac8bab84a329dfd68e0a04788de0eeb1adc6cd98b926436\": container with ID starting with 664d5b58bd863050dac8bab84a329dfd68e0a04788de0eeb1adc6cd98b926436 not found: ID does not exist" containerID="664d5b58bd863050dac8bab84a329dfd68e0a04788de0eeb1adc6cd98b926436" Jan 20 16:48:05 crc kubenswrapper[4558]: I0120 16:48:05.888968 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"664d5b58bd863050dac8bab84a329dfd68e0a04788de0eeb1adc6cd98b926436"} err="failed to get container status \"664d5b58bd863050dac8bab84a329dfd68e0a04788de0eeb1adc6cd98b926436\": rpc error: code = NotFound desc = could not find container \"664d5b58bd863050dac8bab84a329dfd68e0a04788de0eeb1adc6cd98b926436\": container with ID starting with 664d5b58bd863050dac8bab84a329dfd68e0a04788de0eeb1adc6cd98b926436 not found: ID does not exist" Jan 20 16:48:05 crc kubenswrapper[4558]: I0120 16:48:05.896678 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-5bfb6c7bd4-kzh68"] Jan 20 16:48:05 crc kubenswrapper[4558]: I0120 16:48:05.899236 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-5bfb6c7bd4-kzh68"] Jan 20 16:48:06 crc kubenswrapper[4558]: I0120 16:48:06.570417 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="73a27de9-993a-4348-ae1b-c1dabd1585d1" path="/var/lib/kubelet/pods/73a27de9-993a-4348-ae1b-c1dabd1585d1/volumes" Jan 20 16:48:07 crc kubenswrapper[4558]: I0120 16:48:07.407197 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-6945f956d8-687db"] Jan 20 16:48:07 crc kubenswrapper[4558]: E0120 16:48:07.407800 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="73a27de9-993a-4348-ae1b-c1dabd1585d1" containerName="controller-manager" Jan 20 16:48:07 crc kubenswrapper[4558]: I0120 16:48:07.407818 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="73a27de9-993a-4348-ae1b-c1dabd1585d1" containerName="controller-manager" Jan 20 16:48:07 crc kubenswrapper[4558]: I0120 16:48:07.407915 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="73a27de9-993a-4348-ae1b-c1dabd1585d1" containerName="controller-manager" Jan 20 16:48:07 crc kubenswrapper[4558]: I0120 16:48:07.408308 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6945f956d8-687db" Jan 20 16:48:07 crc kubenswrapper[4558]: I0120 16:48:07.410112 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Jan 20 16:48:07 crc kubenswrapper[4558]: I0120 16:48:07.410517 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Jan 20 16:48:07 crc kubenswrapper[4558]: I0120 16:48:07.410819 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Jan 20 16:48:07 crc kubenswrapper[4558]: I0120 16:48:07.410902 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Jan 20 16:48:07 crc kubenswrapper[4558]: I0120 16:48:07.411296 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Jan 20 16:48:07 crc kubenswrapper[4558]: I0120 16:48:07.411342 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Jan 20 16:48:07 crc kubenswrapper[4558]: I0120 16:48:07.420338 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Jan 20 16:48:07 crc kubenswrapper[4558]: I0120 16:48:07.420912 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-6945f956d8-687db"] Jan 20 16:48:07 crc kubenswrapper[4558]: I0120 16:48:07.488641 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7fa84ade-abb1-4488-9be7-e31eb4a37086-proxy-ca-bundles\") pod \"controller-manager-6945f956d8-687db\" (UID: \"7fa84ade-abb1-4488-9be7-e31eb4a37086\") " pod="openshift-controller-manager/controller-manager-6945f956d8-687db" Jan 20 16:48:07 crc kubenswrapper[4558]: I0120 16:48:07.488747 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7fa84ade-abb1-4488-9be7-e31eb4a37086-config\") pod \"controller-manager-6945f956d8-687db\" (UID: \"7fa84ade-abb1-4488-9be7-e31eb4a37086\") " pod="openshift-controller-manager/controller-manager-6945f956d8-687db" Jan 20 16:48:07 crc kubenswrapper[4558]: I0120 16:48:07.488943 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hn6xs\" (UniqueName: \"kubernetes.io/projected/7fa84ade-abb1-4488-9be7-e31eb4a37086-kube-api-access-hn6xs\") pod \"controller-manager-6945f956d8-687db\" (UID: \"7fa84ade-abb1-4488-9be7-e31eb4a37086\") " pod="openshift-controller-manager/controller-manager-6945f956d8-687db" Jan 20 16:48:07 crc kubenswrapper[4558]: I0120 16:48:07.489021 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7fa84ade-abb1-4488-9be7-e31eb4a37086-client-ca\") pod \"controller-manager-6945f956d8-687db\" (UID: \"7fa84ade-abb1-4488-9be7-e31eb4a37086\") " pod="openshift-controller-manager/controller-manager-6945f956d8-687db" Jan 20 16:48:07 crc kubenswrapper[4558]: I0120 16:48:07.489075 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7fa84ade-abb1-4488-9be7-e31eb4a37086-serving-cert\") pod \"controller-manager-6945f956d8-687db\" (UID: \"7fa84ade-abb1-4488-9be7-e31eb4a37086\") " pod="openshift-controller-manager/controller-manager-6945f956d8-687db" Jan 20 16:48:07 crc kubenswrapper[4558]: I0120 16:48:07.590561 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7fa84ade-abb1-4488-9be7-e31eb4a37086-config\") pod \"controller-manager-6945f956d8-687db\" (UID: \"7fa84ade-abb1-4488-9be7-e31eb4a37086\") " pod="openshift-controller-manager/controller-manager-6945f956d8-687db" Jan 20 16:48:07 crc kubenswrapper[4558]: I0120 16:48:07.590701 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hn6xs\" (UniqueName: \"kubernetes.io/projected/7fa84ade-abb1-4488-9be7-e31eb4a37086-kube-api-access-hn6xs\") pod \"controller-manager-6945f956d8-687db\" (UID: \"7fa84ade-abb1-4488-9be7-e31eb4a37086\") " pod="openshift-controller-manager/controller-manager-6945f956d8-687db" Jan 20 16:48:07 crc kubenswrapper[4558]: I0120 16:48:07.590761 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7fa84ade-abb1-4488-9be7-e31eb4a37086-client-ca\") pod \"controller-manager-6945f956d8-687db\" (UID: \"7fa84ade-abb1-4488-9be7-e31eb4a37086\") " pod="openshift-controller-manager/controller-manager-6945f956d8-687db" Jan 20 16:48:07 crc kubenswrapper[4558]: I0120 16:48:07.590797 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7fa84ade-abb1-4488-9be7-e31eb4a37086-serving-cert\") pod \"controller-manager-6945f956d8-687db\" (UID: \"7fa84ade-abb1-4488-9be7-e31eb4a37086\") " pod="openshift-controller-manager/controller-manager-6945f956d8-687db" Jan 20 16:48:07 crc kubenswrapper[4558]: I0120 16:48:07.590840 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7fa84ade-abb1-4488-9be7-e31eb4a37086-proxy-ca-bundles\") pod \"controller-manager-6945f956d8-687db\" (UID: \"7fa84ade-abb1-4488-9be7-e31eb4a37086\") " pod="openshift-controller-manager/controller-manager-6945f956d8-687db" Jan 20 16:48:07 crc kubenswrapper[4558]: I0120 16:48:07.591718 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7fa84ade-abb1-4488-9be7-e31eb4a37086-client-ca\") pod \"controller-manager-6945f956d8-687db\" (UID: \"7fa84ade-abb1-4488-9be7-e31eb4a37086\") " pod="openshift-controller-manager/controller-manager-6945f956d8-687db" Jan 20 16:48:07 crc kubenswrapper[4558]: I0120 16:48:07.592211 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7fa84ade-abb1-4488-9be7-e31eb4a37086-proxy-ca-bundles\") pod \"controller-manager-6945f956d8-687db\" (UID: \"7fa84ade-abb1-4488-9be7-e31eb4a37086\") " pod="openshift-controller-manager/controller-manager-6945f956d8-687db" Jan 20 16:48:07 crc kubenswrapper[4558]: I0120 16:48:07.592735 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7fa84ade-abb1-4488-9be7-e31eb4a37086-config\") pod \"controller-manager-6945f956d8-687db\" (UID: \"7fa84ade-abb1-4488-9be7-e31eb4a37086\") " pod="openshift-controller-manager/controller-manager-6945f956d8-687db" Jan 20 16:48:07 crc kubenswrapper[4558]: I0120 16:48:07.596383 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7fa84ade-abb1-4488-9be7-e31eb4a37086-serving-cert\") pod \"controller-manager-6945f956d8-687db\" (UID: \"7fa84ade-abb1-4488-9be7-e31eb4a37086\") " pod="openshift-controller-manager/controller-manager-6945f956d8-687db" Jan 20 16:48:07 crc kubenswrapper[4558]: I0120 16:48:07.604301 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hn6xs\" (UniqueName: \"kubernetes.io/projected/7fa84ade-abb1-4488-9be7-e31eb4a37086-kube-api-access-hn6xs\") pod \"controller-manager-6945f956d8-687db\" (UID: \"7fa84ade-abb1-4488-9be7-e31eb4a37086\") " pod="openshift-controller-manager/controller-manager-6945f956d8-687db" Jan 20 16:48:07 crc kubenswrapper[4558]: I0120 16:48:07.723185 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6945f956d8-687db" Jan 20 16:48:08 crc kubenswrapper[4558]: I0120 16:48:08.083692 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-6945f956d8-687db"] Jan 20 16:48:08 crc kubenswrapper[4558]: W0120 16:48:08.087369 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7fa84ade_abb1_4488_9be7_e31eb4a37086.slice/crio-1fb352323b6cbb32db8c09aecda98fefa104dda66d1782022e5a30971745a9c7 WatchSource:0}: Error finding container 1fb352323b6cbb32db8c09aecda98fefa104dda66d1782022e5a30971745a9c7: Status 404 returned error can't find the container with id 1fb352323b6cbb32db8c09aecda98fefa104dda66d1782022e5a30971745a9c7 Jan 20 16:48:08 crc kubenswrapper[4558]: I0120 16:48:08.891629 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-6945f956d8-687db" event={"ID":"7fa84ade-abb1-4488-9be7-e31eb4a37086","Type":"ContainerStarted","Data":"d57dcf11dbf75deacaba9cc365bd66075a271ab793dbf2a8d93de8b81263311b"} Jan 20 16:48:08 crc kubenswrapper[4558]: I0120 16:48:08.892475 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-6945f956d8-687db" event={"ID":"7fa84ade-abb1-4488-9be7-e31eb4a37086","Type":"ContainerStarted","Data":"1fb352323b6cbb32db8c09aecda98fefa104dda66d1782022e5a30971745a9c7"} Jan 20 16:48:08 crc kubenswrapper[4558]: I0120 16:48:08.892493 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-6945f956d8-687db" Jan 20 16:48:08 crc kubenswrapper[4558]: I0120 16:48:08.895127 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-6945f956d8-687db" Jan 20 16:48:08 crc kubenswrapper[4558]: I0120 16:48:08.906348 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-6945f956d8-687db" podStartSLOduration=3.906339064 podStartE2EDuration="3.906339064s" podCreationTimestamp="2026-01-20 16:48:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 16:48:08.903436154 +0000 UTC m=+382.663774121" watchObservedRunningTime="2026-01-20 16:48:08.906339064 +0000 UTC m=+382.666677031" Jan 20 16:48:26 crc kubenswrapper[4558]: I0120 16:48:26.132022 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-zsdqk" podUID="ea01cf9d-e1ac-4197-a0ac-830eebd15245" containerName="registry" containerID="cri-o://d9840b5ca22dca84102fb767b8b153581bafb67e70ee8524eb5d479b886a9745" gracePeriod=30 Jan 20 16:48:26 crc kubenswrapper[4558]: I0120 16:48:26.502526 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-zsdqk" Jan 20 16:48:26 crc kubenswrapper[4558]: I0120 16:48:26.589268 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/ea01cf9d-e1ac-4197-a0ac-830eebd15245-registry-certificates\") pod \"ea01cf9d-e1ac-4197-a0ac-830eebd15245\" (UID: \"ea01cf9d-e1ac-4197-a0ac-830eebd15245\") " Jan 20 16:48:26 crc kubenswrapper[4558]: I0120 16:48:26.589327 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/ea01cf9d-e1ac-4197-a0ac-830eebd15245-ca-trust-extracted\") pod \"ea01cf9d-e1ac-4197-a0ac-830eebd15245\" (UID: \"ea01cf9d-e1ac-4197-a0ac-830eebd15245\") " Jan 20 16:48:26 crc kubenswrapper[4558]: I0120 16:48:26.589373 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7hmsc\" (UniqueName: \"kubernetes.io/projected/ea01cf9d-e1ac-4197-a0ac-830eebd15245-kube-api-access-7hmsc\") pod \"ea01cf9d-e1ac-4197-a0ac-830eebd15245\" (UID: \"ea01cf9d-e1ac-4197-a0ac-830eebd15245\") " Jan 20 16:48:26 crc kubenswrapper[4558]: I0120 16:48:26.589406 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/ea01cf9d-e1ac-4197-a0ac-830eebd15245-installation-pull-secrets\") pod \"ea01cf9d-e1ac-4197-a0ac-830eebd15245\" (UID: \"ea01cf9d-e1ac-4197-a0ac-830eebd15245\") " Jan 20 16:48:26 crc kubenswrapper[4558]: I0120 16:48:26.589549 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"ea01cf9d-e1ac-4197-a0ac-830eebd15245\" (UID: \"ea01cf9d-e1ac-4197-a0ac-830eebd15245\") " Jan 20 16:48:26 crc kubenswrapper[4558]: I0120 16:48:26.589603 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/ea01cf9d-e1ac-4197-a0ac-830eebd15245-bound-sa-token\") pod \"ea01cf9d-e1ac-4197-a0ac-830eebd15245\" (UID: \"ea01cf9d-e1ac-4197-a0ac-830eebd15245\") " Jan 20 16:48:26 crc kubenswrapper[4558]: I0120 16:48:26.589623 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ea01cf9d-e1ac-4197-a0ac-830eebd15245-trusted-ca\") pod \"ea01cf9d-e1ac-4197-a0ac-830eebd15245\" (UID: \"ea01cf9d-e1ac-4197-a0ac-830eebd15245\") " Jan 20 16:48:26 crc kubenswrapper[4558]: I0120 16:48:26.589641 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/ea01cf9d-e1ac-4197-a0ac-830eebd15245-registry-tls\") pod \"ea01cf9d-e1ac-4197-a0ac-830eebd15245\" (UID: \"ea01cf9d-e1ac-4197-a0ac-830eebd15245\") " Jan 20 16:48:26 crc kubenswrapper[4558]: I0120 16:48:26.589794 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ea01cf9d-e1ac-4197-a0ac-830eebd15245-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "ea01cf9d-e1ac-4197-a0ac-830eebd15245" (UID: "ea01cf9d-e1ac-4197-a0ac-830eebd15245"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 16:48:26 crc kubenswrapper[4558]: I0120 16:48:26.590008 4558 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/ea01cf9d-e1ac-4197-a0ac-830eebd15245-registry-certificates\") on node \"crc\" DevicePath \"\"" Jan 20 16:48:26 crc kubenswrapper[4558]: I0120 16:48:26.590425 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ea01cf9d-e1ac-4197-a0ac-830eebd15245-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "ea01cf9d-e1ac-4197-a0ac-830eebd15245" (UID: "ea01cf9d-e1ac-4197-a0ac-830eebd15245"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 16:48:26 crc kubenswrapper[4558]: I0120 16:48:26.594848 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ea01cf9d-e1ac-4197-a0ac-830eebd15245-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "ea01cf9d-e1ac-4197-a0ac-830eebd15245" (UID: "ea01cf9d-e1ac-4197-a0ac-830eebd15245"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:48:26 crc kubenswrapper[4558]: I0120 16:48:26.594966 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ea01cf9d-e1ac-4197-a0ac-830eebd15245-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "ea01cf9d-e1ac-4197-a0ac-830eebd15245" (UID: "ea01cf9d-e1ac-4197-a0ac-830eebd15245"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:48:26 crc kubenswrapper[4558]: I0120 16:48:26.595200 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ea01cf9d-e1ac-4197-a0ac-830eebd15245-kube-api-access-7hmsc" (OuterVolumeSpecName: "kube-api-access-7hmsc") pod "ea01cf9d-e1ac-4197-a0ac-830eebd15245" (UID: "ea01cf9d-e1ac-4197-a0ac-830eebd15245"). InnerVolumeSpecName "kube-api-access-7hmsc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:48:26 crc kubenswrapper[4558]: I0120 16:48:26.595535 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ea01cf9d-e1ac-4197-a0ac-830eebd15245-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "ea01cf9d-e1ac-4197-a0ac-830eebd15245" (UID: "ea01cf9d-e1ac-4197-a0ac-830eebd15245"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:48:26 crc kubenswrapper[4558]: I0120 16:48:26.596983 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "ea01cf9d-e1ac-4197-a0ac-830eebd15245" (UID: "ea01cf9d-e1ac-4197-a0ac-830eebd15245"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Jan 20 16:48:26 crc kubenswrapper[4558]: I0120 16:48:26.602335 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ea01cf9d-e1ac-4197-a0ac-830eebd15245-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "ea01cf9d-e1ac-4197-a0ac-830eebd15245" (UID: "ea01cf9d-e1ac-4197-a0ac-830eebd15245"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 16:48:26 crc kubenswrapper[4558]: I0120 16:48:26.691829 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7hmsc\" (UniqueName: \"kubernetes.io/projected/ea01cf9d-e1ac-4197-a0ac-830eebd15245-kube-api-access-7hmsc\") on node \"crc\" DevicePath \"\"" Jan 20 16:48:26 crc kubenswrapper[4558]: I0120 16:48:26.691862 4558 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/ea01cf9d-e1ac-4197-a0ac-830eebd15245-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Jan 20 16:48:26 crc kubenswrapper[4558]: I0120 16:48:26.691874 4558 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/ea01cf9d-e1ac-4197-a0ac-830eebd15245-bound-sa-token\") on node \"crc\" DevicePath \"\"" Jan 20 16:48:26 crc kubenswrapper[4558]: I0120 16:48:26.691886 4558 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ea01cf9d-e1ac-4197-a0ac-830eebd15245-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 20 16:48:26 crc kubenswrapper[4558]: I0120 16:48:26.691895 4558 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/ea01cf9d-e1ac-4197-a0ac-830eebd15245-registry-tls\") on node \"crc\" DevicePath \"\"" Jan 20 16:48:26 crc kubenswrapper[4558]: I0120 16:48:26.691904 4558 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/ea01cf9d-e1ac-4197-a0ac-830eebd15245-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Jan 20 16:48:26 crc kubenswrapper[4558]: I0120 16:48:26.981468 4558 generic.go:334] "Generic (PLEG): container finished" podID="ea01cf9d-e1ac-4197-a0ac-830eebd15245" containerID="d9840b5ca22dca84102fb767b8b153581bafb67e70ee8524eb5d479b886a9745" exitCode=0 Jan 20 16:48:26 crc kubenswrapper[4558]: I0120 16:48:26.981534 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-zsdqk" event={"ID":"ea01cf9d-e1ac-4197-a0ac-830eebd15245","Type":"ContainerDied","Data":"d9840b5ca22dca84102fb767b8b153581bafb67e70ee8524eb5d479b886a9745"} Jan 20 16:48:26 crc kubenswrapper[4558]: I0120 16:48:26.981554 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-zsdqk" Jan 20 16:48:26 crc kubenswrapper[4558]: I0120 16:48:26.981564 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-zsdqk" event={"ID":"ea01cf9d-e1ac-4197-a0ac-830eebd15245","Type":"ContainerDied","Data":"9a8052fd1c09fcf4afa44eb1c15cab9253a408351002d246fa260433502c5c64"} Jan 20 16:48:26 crc kubenswrapper[4558]: I0120 16:48:26.981580 4558 scope.go:117] "RemoveContainer" containerID="d9840b5ca22dca84102fb767b8b153581bafb67e70ee8524eb5d479b886a9745" Jan 20 16:48:27 crc kubenswrapper[4558]: I0120 16:48:27.003189 4558 scope.go:117] "RemoveContainer" containerID="d9840b5ca22dca84102fb767b8b153581bafb67e70ee8524eb5d479b886a9745" Jan 20 16:48:27 crc kubenswrapper[4558]: E0120 16:48:27.003519 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d9840b5ca22dca84102fb767b8b153581bafb67e70ee8524eb5d479b886a9745\": container with ID starting with d9840b5ca22dca84102fb767b8b153581bafb67e70ee8524eb5d479b886a9745 not found: ID does not exist" containerID="d9840b5ca22dca84102fb767b8b153581bafb67e70ee8524eb5d479b886a9745" Jan 20 16:48:27 crc kubenswrapper[4558]: I0120 16:48:27.003548 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d9840b5ca22dca84102fb767b8b153581bafb67e70ee8524eb5d479b886a9745"} err="failed to get container status \"d9840b5ca22dca84102fb767b8b153581bafb67e70ee8524eb5d479b886a9745\": rpc error: code = NotFound desc = could not find container \"d9840b5ca22dca84102fb767b8b153581bafb67e70ee8524eb5d479b886a9745\": container with ID starting with d9840b5ca22dca84102fb767b8b153581bafb67e70ee8524eb5d479b886a9745 not found: ID does not exist" Jan 20 16:48:27 crc kubenswrapper[4558]: I0120 16:48:27.004416 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-zsdqk"] Jan 20 16:48:27 crc kubenswrapper[4558]: I0120 16:48:27.007514 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-zsdqk"] Jan 20 16:48:27 crc kubenswrapper[4558]: I0120 16:48:27.329679 4558 patch_prober.go:28] interesting pod/machine-config-daemon-2vr4r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 20 16:48:27 crc kubenswrapper[4558]: I0120 16:48:27.329741 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 20 16:48:27 crc kubenswrapper[4558]: I0120 16:48:27.329783 4558 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" Jan 20 16:48:27 crc kubenswrapper[4558]: I0120 16:48:27.330314 4558 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"919fd3462b46bed62b26a1ec80dddd868d9b963136777394bcd10ef842cf78a9"} pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 20 16:48:27 crc kubenswrapper[4558]: I0120 16:48:27.330369 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" containerID="cri-o://919fd3462b46bed62b26a1ec80dddd868d9b963136777394bcd10ef842cf78a9" gracePeriod=600 Jan 20 16:48:27 crc kubenswrapper[4558]: I0120 16:48:27.989231 4558 generic.go:334] "Generic (PLEG): container finished" podID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerID="919fd3462b46bed62b26a1ec80dddd868d9b963136777394bcd10ef842cf78a9" exitCode=0 Jan 20 16:48:27 crc kubenswrapper[4558]: I0120 16:48:27.989341 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" event={"ID":"68337d27-3fa6-4a29-88b0-82e60c3739eb","Type":"ContainerDied","Data":"919fd3462b46bed62b26a1ec80dddd868d9b963136777394bcd10ef842cf78a9"} Jan 20 16:48:27 crc kubenswrapper[4558]: I0120 16:48:27.989774 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" event={"ID":"68337d27-3fa6-4a29-88b0-82e60c3739eb","Type":"ContainerStarted","Data":"75faba5e9c60d39b0965f7842c4fcbaf51da890c19853fb9a294a0e3410f3d20"} Jan 20 16:48:27 crc kubenswrapper[4558]: I0120 16:48:27.989800 4558 scope.go:117] "RemoveContainer" containerID="b18a59ecb802507e0d5988eacc915fd7e0d6954518de80f61162c97f680fd8c1" Jan 20 16:48:28 crc kubenswrapper[4558]: I0120 16:48:28.571012 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ea01cf9d-e1ac-4197-a0ac-830eebd15245" path="/var/lib/kubelet/pods/ea01cf9d-e1ac-4197-a0ac-830eebd15245/volumes" Jan 20 16:50:27 crc kubenswrapper[4558]: I0120 16:50:27.330084 4558 patch_prober.go:28] interesting pod/machine-config-daemon-2vr4r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 20 16:50:27 crc kubenswrapper[4558]: I0120 16:50:27.330425 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.005634 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-nv2xw"] Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.006396 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" podUID="254129cd-82fc-4162-b671-2434bc9e2972" containerName="ovn-controller" containerID="cri-o://186a1d2caad8865abf7f565405c4a4c077038ccb67ba77927cc2392ec075a811" gracePeriod=30 Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.006424 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" podUID="254129cd-82fc-4162-b671-2434bc9e2972" containerName="northd" containerID="cri-o://0889cf371f8b06cc2d255c15fda97db1d5000d6f6cff29d2fdcb8667cede8a99" gracePeriod=30 Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.006494 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" podUID="254129cd-82fc-4162-b671-2434bc9e2972" containerName="ovn-acl-logging" containerID="cri-o://e4265d3281e954ee5989ee008da28c6cb9fa29478e5fdf23325521d455304da9" gracePeriod=30 Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.006472 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" podUID="254129cd-82fc-4162-b671-2434bc9e2972" containerName="kube-rbac-proxy-node" containerID="cri-o://1a6a184067bde115ef5e09fdd90e7dccfe89a9dc347af450998b91c068e7e803" gracePeriod=30 Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.006494 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" podUID="254129cd-82fc-4162-b671-2434bc9e2972" containerName="sbdb" containerID="cri-o://f41d488b0c24e594a2f5cad36b5f8efdb7d867fae0e18a74fe3c396a203d162f" gracePeriod=30 Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.006509 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" podUID="254129cd-82fc-4162-b671-2434bc9e2972" containerName="nbdb" containerID="cri-o://4264ef8ee49a8b172fb05ac5c9b0bd32350b5d9ae95293079dfeaf44f021b455" gracePeriod=30 Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.006618 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" podUID="254129cd-82fc-4162-b671-2434bc9e2972" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://cd8b93e5559d95511a1285a9aec9a7d72df6c330bcd1e1daf3e93f5494f70eeb" gracePeriod=30 Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.034408 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" podUID="254129cd-82fc-4162-b671-2434bc9e2972" containerName="ovnkube-controller" containerID="cri-o://8d6af51a18940c05083d10a25b2c636978ce51ac88bbfc6ed169b6669fb846a4" gracePeriod=30 Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.311541 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-nv2xw_254129cd-82fc-4162-b671-2434bc9e2972/ovnkube-controller/3.log" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.313521 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-nv2xw_254129cd-82fc-4162-b671-2434bc9e2972/ovn-acl-logging/0.log" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.314028 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-nv2xw_254129cd-82fc-4162-b671-2434bc9e2972/ovn-controller/0.log" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.314445 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.352920 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-sn4p9"] Jan 20 16:50:54 crc kubenswrapper[4558]: E0120 16:50:54.353083 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="254129cd-82fc-4162-b671-2434bc9e2972" containerName="ovnkube-controller" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.353095 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="254129cd-82fc-4162-b671-2434bc9e2972" containerName="ovnkube-controller" Jan 20 16:50:54 crc kubenswrapper[4558]: E0120 16:50:54.353106 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="254129cd-82fc-4162-b671-2434bc9e2972" containerName="sbdb" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.353111 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="254129cd-82fc-4162-b671-2434bc9e2972" containerName="sbdb" Jan 20 16:50:54 crc kubenswrapper[4558]: E0120 16:50:54.353120 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="254129cd-82fc-4162-b671-2434bc9e2972" containerName="ovnkube-controller" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.353125 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="254129cd-82fc-4162-b671-2434bc9e2972" containerName="ovnkube-controller" Jan 20 16:50:54 crc kubenswrapper[4558]: E0120 16:50:54.353133 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="254129cd-82fc-4162-b671-2434bc9e2972" containerName="kube-rbac-proxy-node" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.353139 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="254129cd-82fc-4162-b671-2434bc9e2972" containerName="kube-rbac-proxy-node" Jan 20 16:50:54 crc kubenswrapper[4558]: E0120 16:50:54.353147 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="254129cd-82fc-4162-b671-2434bc9e2972" containerName="ovnkube-controller" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.353153 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="254129cd-82fc-4162-b671-2434bc9e2972" containerName="ovnkube-controller" Jan 20 16:50:54 crc kubenswrapper[4558]: E0120 16:50:54.353159 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="254129cd-82fc-4162-b671-2434bc9e2972" containerName="ovnkube-controller" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.353177 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="254129cd-82fc-4162-b671-2434bc9e2972" containerName="ovnkube-controller" Jan 20 16:50:54 crc kubenswrapper[4558]: E0120 16:50:54.353184 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="254129cd-82fc-4162-b671-2434bc9e2972" containerName="kubecfg-setup" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.353189 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="254129cd-82fc-4162-b671-2434bc9e2972" containerName="kubecfg-setup" Jan 20 16:50:54 crc kubenswrapper[4558]: E0120 16:50:54.353196 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="254129cd-82fc-4162-b671-2434bc9e2972" containerName="nbdb" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.353200 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="254129cd-82fc-4162-b671-2434bc9e2972" containerName="nbdb" Jan 20 16:50:54 crc kubenswrapper[4558]: E0120 16:50:54.353209 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ea01cf9d-e1ac-4197-a0ac-830eebd15245" containerName="registry" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.353214 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ea01cf9d-e1ac-4197-a0ac-830eebd15245" containerName="registry" Jan 20 16:50:54 crc kubenswrapper[4558]: E0120 16:50:54.353220 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="254129cd-82fc-4162-b671-2434bc9e2972" containerName="northd" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.353225 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="254129cd-82fc-4162-b671-2434bc9e2972" containerName="northd" Jan 20 16:50:54 crc kubenswrapper[4558]: E0120 16:50:54.353232 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="254129cd-82fc-4162-b671-2434bc9e2972" containerName="ovn-controller" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.353237 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="254129cd-82fc-4162-b671-2434bc9e2972" containerName="ovn-controller" Jan 20 16:50:54 crc kubenswrapper[4558]: E0120 16:50:54.353245 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="254129cd-82fc-4162-b671-2434bc9e2972" containerName="ovn-acl-logging" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.353250 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="254129cd-82fc-4162-b671-2434bc9e2972" containerName="ovn-acl-logging" Jan 20 16:50:54 crc kubenswrapper[4558]: E0120 16:50:54.353256 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="254129cd-82fc-4162-b671-2434bc9e2972" containerName="kube-rbac-proxy-ovn-metrics" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.353262 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="254129cd-82fc-4162-b671-2434bc9e2972" containerName="kube-rbac-proxy-ovn-metrics" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.353353 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="254129cd-82fc-4162-b671-2434bc9e2972" containerName="kube-rbac-proxy-node" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.353363 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="254129cd-82fc-4162-b671-2434bc9e2972" containerName="ovnkube-controller" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.353369 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="254129cd-82fc-4162-b671-2434bc9e2972" containerName="ovn-acl-logging" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.353375 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="254129cd-82fc-4162-b671-2434bc9e2972" containerName="ovnkube-controller" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.353382 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="254129cd-82fc-4162-b671-2434bc9e2972" containerName="sbdb" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.353387 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="254129cd-82fc-4162-b671-2434bc9e2972" containerName="nbdb" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.353393 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="254129cd-82fc-4162-b671-2434bc9e2972" containerName="ovnkube-controller" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.353400 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="254129cd-82fc-4162-b671-2434bc9e2972" containerName="ovn-controller" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.353406 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="254129cd-82fc-4162-b671-2434bc9e2972" containerName="kube-rbac-proxy-ovn-metrics" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.353412 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="254129cd-82fc-4162-b671-2434bc9e2972" containerName="northd" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.353419 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="ea01cf9d-e1ac-4197-a0ac-830eebd15245" containerName="registry" Jan 20 16:50:54 crc kubenswrapper[4558]: E0120 16:50:54.353492 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="254129cd-82fc-4162-b671-2434bc9e2972" containerName="ovnkube-controller" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.353498 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="254129cd-82fc-4162-b671-2434bc9e2972" containerName="ovnkube-controller" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.353570 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="254129cd-82fc-4162-b671-2434bc9e2972" containerName="ovnkube-controller" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.353579 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="254129cd-82fc-4162-b671-2434bc9e2972" containerName="ovnkube-controller" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.354767 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-sn4p9" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.482550 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/254129cd-82fc-4162-b671-2434bc9e2972-etc-openvswitch\") pod \"254129cd-82fc-4162-b671-2434bc9e2972\" (UID: \"254129cd-82fc-4162-b671-2434bc9e2972\") " Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.482675 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/254129cd-82fc-4162-b671-2434bc9e2972-host-run-ovn-kubernetes\") pod \"254129cd-82fc-4162-b671-2434bc9e2972\" (UID: \"254129cd-82fc-4162-b671-2434bc9e2972\") " Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.482711 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/254129cd-82fc-4162-b671-2434bc9e2972-run-systemd\") pod \"254129cd-82fc-4162-b671-2434bc9e2972\" (UID: \"254129cd-82fc-4162-b671-2434bc9e2972\") " Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.482737 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/254129cd-82fc-4162-b671-2434bc9e2972-run-openvswitch\") pod \"254129cd-82fc-4162-b671-2434bc9e2972\" (UID: \"254129cd-82fc-4162-b671-2434bc9e2972\") " Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.482766 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/254129cd-82fc-4162-b671-2434bc9e2972-host-var-lib-cni-networks-ovn-kubernetes\") pod \"254129cd-82fc-4162-b671-2434bc9e2972\" (UID: \"254129cd-82fc-4162-b671-2434bc9e2972\") " Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.482790 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/254129cd-82fc-4162-b671-2434bc9e2972-systemd-units\") pod \"254129cd-82fc-4162-b671-2434bc9e2972\" (UID: \"254129cd-82fc-4162-b671-2434bc9e2972\") " Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.482791 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/254129cd-82fc-4162-b671-2434bc9e2972-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "254129cd-82fc-4162-b671-2434bc9e2972" (UID: "254129cd-82fc-4162-b671-2434bc9e2972"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.482825 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/254129cd-82fc-4162-b671-2434bc9e2972-ovnkube-config\") pod \"254129cd-82fc-4162-b671-2434bc9e2972\" (UID: \"254129cd-82fc-4162-b671-2434bc9e2972\") " Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.482829 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/254129cd-82fc-4162-b671-2434bc9e2972-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "254129cd-82fc-4162-b671-2434bc9e2972" (UID: "254129cd-82fc-4162-b671-2434bc9e2972"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.482854 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/254129cd-82fc-4162-b671-2434bc9e2972-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "254129cd-82fc-4162-b671-2434bc9e2972" (UID: "254129cd-82fc-4162-b671-2434bc9e2972"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.482888 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/254129cd-82fc-4162-b671-2434bc9e2972-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "254129cd-82fc-4162-b671-2434bc9e2972" (UID: "254129cd-82fc-4162-b671-2434bc9e2972"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.482897 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/254129cd-82fc-4162-b671-2434bc9e2972-env-overrides\") pod \"254129cd-82fc-4162-b671-2434bc9e2972\" (UID: \"254129cd-82fc-4162-b671-2434bc9e2972\") " Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.482920 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/254129cd-82fc-4162-b671-2434bc9e2972-host-kubelet\") pod \"254129cd-82fc-4162-b671-2434bc9e2972\" (UID: \"254129cd-82fc-4162-b671-2434bc9e2972\") " Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.482911 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/254129cd-82fc-4162-b671-2434bc9e2972-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "254129cd-82fc-4162-b671-2434bc9e2972" (UID: "254129cd-82fc-4162-b671-2434bc9e2972"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.482945 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/254129cd-82fc-4162-b671-2434bc9e2972-log-socket\") pod \"254129cd-82fc-4162-b671-2434bc9e2972\" (UID: \"254129cd-82fc-4162-b671-2434bc9e2972\") " Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.482960 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/254129cd-82fc-4162-b671-2434bc9e2972-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "254129cd-82fc-4162-b671-2434bc9e2972" (UID: "254129cd-82fc-4162-b671-2434bc9e2972"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.482966 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/254129cd-82fc-4162-b671-2434bc9e2972-host-cni-bin\") pod \"254129cd-82fc-4162-b671-2434bc9e2972\" (UID: \"254129cd-82fc-4162-b671-2434bc9e2972\") " Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.483022 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rgzcc\" (UniqueName: \"kubernetes.io/projected/254129cd-82fc-4162-b671-2434bc9e2972-kube-api-access-rgzcc\") pod \"254129cd-82fc-4162-b671-2434bc9e2972\" (UID: \"254129cd-82fc-4162-b671-2434bc9e2972\") " Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.483026 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/254129cd-82fc-4162-b671-2434bc9e2972-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "254129cd-82fc-4162-b671-2434bc9e2972" (UID: "254129cd-82fc-4162-b671-2434bc9e2972"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.483042 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/254129cd-82fc-4162-b671-2434bc9e2972-var-lib-openvswitch\") pod \"254129cd-82fc-4162-b671-2434bc9e2972\" (UID: \"254129cd-82fc-4162-b671-2434bc9e2972\") " Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.483047 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/254129cd-82fc-4162-b671-2434bc9e2972-log-socket" (OuterVolumeSpecName: "log-socket") pod "254129cd-82fc-4162-b671-2434bc9e2972" (UID: "254129cd-82fc-4162-b671-2434bc9e2972"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.483083 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/254129cd-82fc-4162-b671-2434bc9e2972-host-run-netns\") pod \"254129cd-82fc-4162-b671-2434bc9e2972\" (UID: \"254129cd-82fc-4162-b671-2434bc9e2972\") " Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.483101 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/254129cd-82fc-4162-b671-2434bc9e2972-node-log\") pod \"254129cd-82fc-4162-b671-2434bc9e2972\" (UID: \"254129cd-82fc-4162-b671-2434bc9e2972\") " Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.483122 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/254129cd-82fc-4162-b671-2434bc9e2972-run-ovn\") pod \"254129cd-82fc-4162-b671-2434bc9e2972\" (UID: \"254129cd-82fc-4162-b671-2434bc9e2972\") " Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.483143 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/254129cd-82fc-4162-b671-2434bc9e2972-ovn-node-metrics-cert\") pod \"254129cd-82fc-4162-b671-2434bc9e2972\" (UID: \"254129cd-82fc-4162-b671-2434bc9e2972\") " Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.483146 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/254129cd-82fc-4162-b671-2434bc9e2972-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "254129cd-82fc-4162-b671-2434bc9e2972" (UID: "254129cd-82fc-4162-b671-2434bc9e2972"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.483156 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/254129cd-82fc-4162-b671-2434bc9e2972-host-slash\") pod \"254129cd-82fc-4162-b671-2434bc9e2972\" (UID: \"254129cd-82fc-4162-b671-2434bc9e2972\") " Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.483155 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/254129cd-82fc-4162-b671-2434bc9e2972-node-log" (OuterVolumeSpecName: "node-log") pod "254129cd-82fc-4162-b671-2434bc9e2972" (UID: "254129cd-82fc-4162-b671-2434bc9e2972"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.483190 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/254129cd-82fc-4162-b671-2434bc9e2972-host-cni-netd\") pod \"254129cd-82fc-4162-b671-2434bc9e2972\" (UID: \"254129cd-82fc-4162-b671-2434bc9e2972\") " Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.483176 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/254129cd-82fc-4162-b671-2434bc9e2972-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "254129cd-82fc-4162-b671-2434bc9e2972" (UID: "254129cd-82fc-4162-b671-2434bc9e2972"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.483232 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/254129cd-82fc-4162-b671-2434bc9e2972-ovnkube-script-lib\") pod \"254129cd-82fc-4162-b671-2434bc9e2972\" (UID: \"254129cd-82fc-4162-b671-2434bc9e2972\") " Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.483230 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/254129cd-82fc-4162-b671-2434bc9e2972-host-slash" (OuterVolumeSpecName: "host-slash") pod "254129cd-82fc-4162-b671-2434bc9e2972" (UID: "254129cd-82fc-4162-b671-2434bc9e2972"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.483226 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/254129cd-82fc-4162-b671-2434bc9e2972-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "254129cd-82fc-4162-b671-2434bc9e2972" (UID: "254129cd-82fc-4162-b671-2434bc9e2972"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.483252 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/254129cd-82fc-4162-b671-2434bc9e2972-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "254129cd-82fc-4162-b671-2434bc9e2972" (UID: "254129cd-82fc-4162-b671-2434bc9e2972"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.483289 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/254129cd-82fc-4162-b671-2434bc9e2972-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "254129cd-82fc-4162-b671-2434bc9e2972" (UID: "254129cd-82fc-4162-b671-2434bc9e2972"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.483356 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/7267c3cc-17ca-4c43-80b8-78cc276b16f0-run-ovn\") pod \"ovnkube-node-sn4p9\" (UID: \"7267c3cc-17ca-4c43-80b8-78cc276b16f0\") " pod="openshift-ovn-kubernetes/ovnkube-node-sn4p9" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.483406 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/7267c3cc-17ca-4c43-80b8-78cc276b16f0-run-systemd\") pod \"ovnkube-node-sn4p9\" (UID: \"7267c3cc-17ca-4c43-80b8-78cc276b16f0\") " pod="openshift-ovn-kubernetes/ovnkube-node-sn4p9" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.483420 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/7267c3cc-17ca-4c43-80b8-78cc276b16f0-host-cni-bin\") pod \"ovnkube-node-sn4p9\" (UID: \"7267c3cc-17ca-4c43-80b8-78cc276b16f0\") " pod="openshift-ovn-kubernetes/ovnkube-node-sn4p9" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.483436 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/7267c3cc-17ca-4c43-80b8-78cc276b16f0-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-sn4p9\" (UID: \"7267c3cc-17ca-4c43-80b8-78cc276b16f0\") " pod="openshift-ovn-kubernetes/ovnkube-node-sn4p9" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.483464 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/7267c3cc-17ca-4c43-80b8-78cc276b16f0-var-lib-openvswitch\") pod \"ovnkube-node-sn4p9\" (UID: \"7267c3cc-17ca-4c43-80b8-78cc276b16f0\") " pod="openshift-ovn-kubernetes/ovnkube-node-sn4p9" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.483562 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/7267c3cc-17ca-4c43-80b8-78cc276b16f0-systemd-units\") pod \"ovnkube-node-sn4p9\" (UID: \"7267c3cc-17ca-4c43-80b8-78cc276b16f0\") " pod="openshift-ovn-kubernetes/ovnkube-node-sn4p9" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.483572 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/254129cd-82fc-4162-b671-2434bc9e2972-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "254129cd-82fc-4162-b671-2434bc9e2972" (UID: "254129cd-82fc-4162-b671-2434bc9e2972"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.483590 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/7267c3cc-17ca-4c43-80b8-78cc276b16f0-env-overrides\") pod \"ovnkube-node-sn4p9\" (UID: \"7267c3cc-17ca-4c43-80b8-78cc276b16f0\") " pod="openshift-ovn-kubernetes/ovnkube-node-sn4p9" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.483666 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/7267c3cc-17ca-4c43-80b8-78cc276b16f0-ovn-node-metrics-cert\") pod \"ovnkube-node-sn4p9\" (UID: \"7267c3cc-17ca-4c43-80b8-78cc276b16f0\") " pod="openshift-ovn-kubernetes/ovnkube-node-sn4p9" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.483694 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/7267c3cc-17ca-4c43-80b8-78cc276b16f0-host-run-ovn-kubernetes\") pod \"ovnkube-node-sn4p9\" (UID: \"7267c3cc-17ca-4c43-80b8-78cc276b16f0\") " pod="openshift-ovn-kubernetes/ovnkube-node-sn4p9" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.483714 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/7267c3cc-17ca-4c43-80b8-78cc276b16f0-node-log\") pod \"ovnkube-node-sn4p9\" (UID: \"7267c3cc-17ca-4c43-80b8-78cc276b16f0\") " pod="openshift-ovn-kubernetes/ovnkube-node-sn4p9" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.483732 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/7267c3cc-17ca-4c43-80b8-78cc276b16f0-host-run-netns\") pod \"ovnkube-node-sn4p9\" (UID: \"7267c3cc-17ca-4c43-80b8-78cc276b16f0\") " pod="openshift-ovn-kubernetes/ovnkube-node-sn4p9" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.483750 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9rvbg\" (UniqueName: \"kubernetes.io/projected/7267c3cc-17ca-4c43-80b8-78cc276b16f0-kube-api-access-9rvbg\") pod \"ovnkube-node-sn4p9\" (UID: \"7267c3cc-17ca-4c43-80b8-78cc276b16f0\") " pod="openshift-ovn-kubernetes/ovnkube-node-sn4p9" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.483768 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/7267c3cc-17ca-4c43-80b8-78cc276b16f0-log-socket\") pod \"ovnkube-node-sn4p9\" (UID: \"7267c3cc-17ca-4c43-80b8-78cc276b16f0\") " pod="openshift-ovn-kubernetes/ovnkube-node-sn4p9" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.483799 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/7267c3cc-17ca-4c43-80b8-78cc276b16f0-ovnkube-script-lib\") pod \"ovnkube-node-sn4p9\" (UID: \"7267c3cc-17ca-4c43-80b8-78cc276b16f0\") " pod="openshift-ovn-kubernetes/ovnkube-node-sn4p9" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.483823 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/7267c3cc-17ca-4c43-80b8-78cc276b16f0-host-cni-netd\") pod \"ovnkube-node-sn4p9\" (UID: \"7267c3cc-17ca-4c43-80b8-78cc276b16f0\") " pod="openshift-ovn-kubernetes/ovnkube-node-sn4p9" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.483842 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/7267c3cc-17ca-4c43-80b8-78cc276b16f0-ovnkube-config\") pod \"ovnkube-node-sn4p9\" (UID: \"7267c3cc-17ca-4c43-80b8-78cc276b16f0\") " pod="openshift-ovn-kubernetes/ovnkube-node-sn4p9" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.483867 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/7267c3cc-17ca-4c43-80b8-78cc276b16f0-host-kubelet\") pod \"ovnkube-node-sn4p9\" (UID: \"7267c3cc-17ca-4c43-80b8-78cc276b16f0\") " pod="openshift-ovn-kubernetes/ovnkube-node-sn4p9" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.483961 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/254129cd-82fc-4162-b671-2434bc9e2972-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "254129cd-82fc-4162-b671-2434bc9e2972" (UID: "254129cd-82fc-4162-b671-2434bc9e2972"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.483963 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/7267c3cc-17ca-4c43-80b8-78cc276b16f0-etc-openvswitch\") pod \"ovnkube-node-sn4p9\" (UID: \"7267c3cc-17ca-4c43-80b8-78cc276b16f0\") " pod="openshift-ovn-kubernetes/ovnkube-node-sn4p9" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.484002 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/7267c3cc-17ca-4c43-80b8-78cc276b16f0-run-openvswitch\") pod \"ovnkube-node-sn4p9\" (UID: \"7267c3cc-17ca-4c43-80b8-78cc276b16f0\") " pod="openshift-ovn-kubernetes/ovnkube-node-sn4p9" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.484023 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/7267c3cc-17ca-4c43-80b8-78cc276b16f0-host-slash\") pod \"ovnkube-node-sn4p9\" (UID: \"7267c3cc-17ca-4c43-80b8-78cc276b16f0\") " pod="openshift-ovn-kubernetes/ovnkube-node-sn4p9" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.484105 4558 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/254129cd-82fc-4162-b671-2434bc9e2972-log-socket\") on node \"crc\" DevicePath \"\"" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.484120 4558 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/254129cd-82fc-4162-b671-2434bc9e2972-host-cni-bin\") on node \"crc\" DevicePath \"\"" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.484129 4558 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/254129cd-82fc-4162-b671-2434bc9e2972-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.484140 4558 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/254129cd-82fc-4162-b671-2434bc9e2972-host-run-netns\") on node \"crc\" DevicePath \"\"" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.484148 4558 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/254129cd-82fc-4162-b671-2434bc9e2972-node-log\") on node \"crc\" DevicePath \"\"" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.484157 4558 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/254129cd-82fc-4162-b671-2434bc9e2972-run-ovn\") on node \"crc\" DevicePath \"\"" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.484190 4558 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/254129cd-82fc-4162-b671-2434bc9e2972-host-slash\") on node \"crc\" DevicePath \"\"" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.484198 4558 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/254129cd-82fc-4162-b671-2434bc9e2972-host-cni-netd\") on node \"crc\" DevicePath \"\"" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.484207 4558 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/254129cd-82fc-4162-b671-2434bc9e2972-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.484215 4558 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/254129cd-82fc-4162-b671-2434bc9e2972-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.484223 4558 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/254129cd-82fc-4162-b671-2434bc9e2972-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.484232 4558 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/254129cd-82fc-4162-b671-2434bc9e2972-run-openvswitch\") on node \"crc\" DevicePath \"\"" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.484241 4558 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/254129cd-82fc-4162-b671-2434bc9e2972-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.484279 4558 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/254129cd-82fc-4162-b671-2434bc9e2972-systemd-units\") on node \"crc\" DevicePath \"\"" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.484290 4558 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/254129cd-82fc-4162-b671-2434bc9e2972-ovnkube-config\") on node \"crc\" DevicePath \"\"" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.484299 4558 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/254129cd-82fc-4162-b671-2434bc9e2972-host-kubelet\") on node \"crc\" DevicePath \"\"" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.484306 4558 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/254129cd-82fc-4162-b671-2434bc9e2972-env-overrides\") on node \"crc\" DevicePath \"\"" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.488110 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/254129cd-82fc-4162-b671-2434bc9e2972-kube-api-access-rgzcc" (OuterVolumeSpecName: "kube-api-access-rgzcc") pod "254129cd-82fc-4162-b671-2434bc9e2972" (UID: "254129cd-82fc-4162-b671-2434bc9e2972"). InnerVolumeSpecName "kube-api-access-rgzcc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.488434 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/254129cd-82fc-4162-b671-2434bc9e2972-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "254129cd-82fc-4162-b671-2434bc9e2972" (UID: "254129cd-82fc-4162-b671-2434bc9e2972"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.494221 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/254129cd-82fc-4162-b671-2434bc9e2972-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "254129cd-82fc-4162-b671-2434bc9e2972" (UID: "254129cd-82fc-4162-b671-2434bc9e2972"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.576268 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-nv2xw_254129cd-82fc-4162-b671-2434bc9e2972/ovnkube-controller/3.log" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.578498 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-nv2xw_254129cd-82fc-4162-b671-2434bc9e2972/ovn-acl-logging/0.log" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.578949 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-nv2xw_254129cd-82fc-4162-b671-2434bc9e2972/ovn-controller/0.log" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.579972 4558 generic.go:334] "Generic (PLEG): container finished" podID="254129cd-82fc-4162-b671-2434bc9e2972" containerID="8d6af51a18940c05083d10a25b2c636978ce51ac88bbfc6ed169b6669fb846a4" exitCode=0 Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.579997 4558 generic.go:334] "Generic (PLEG): container finished" podID="254129cd-82fc-4162-b671-2434bc9e2972" containerID="f41d488b0c24e594a2f5cad36b5f8efdb7d867fae0e18a74fe3c396a203d162f" exitCode=0 Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.580004 4558 generic.go:334] "Generic (PLEG): container finished" podID="254129cd-82fc-4162-b671-2434bc9e2972" containerID="4264ef8ee49a8b172fb05ac5c9b0bd32350b5d9ae95293079dfeaf44f021b455" exitCode=0 Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.580010 4558 generic.go:334] "Generic (PLEG): container finished" podID="254129cd-82fc-4162-b671-2434bc9e2972" containerID="0889cf371f8b06cc2d255c15fda97db1d5000d6f6cff29d2fdcb8667cede8a99" exitCode=0 Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.580016 4558 generic.go:334] "Generic (PLEG): container finished" podID="254129cd-82fc-4162-b671-2434bc9e2972" containerID="cd8b93e5559d95511a1285a9aec9a7d72df6c330bcd1e1daf3e93f5494f70eeb" exitCode=0 Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.580023 4558 generic.go:334] "Generic (PLEG): container finished" podID="254129cd-82fc-4162-b671-2434bc9e2972" containerID="1a6a184067bde115ef5e09fdd90e7dccfe89a9dc347af450998b91c068e7e803" exitCode=0 Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.580029 4558 generic.go:334] "Generic (PLEG): container finished" podID="254129cd-82fc-4162-b671-2434bc9e2972" containerID="e4265d3281e954ee5989ee008da28c6cb9fa29478e5fdf23325521d455304da9" exitCode=143 Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.580036 4558 generic.go:334] "Generic (PLEG): container finished" podID="254129cd-82fc-4162-b671-2434bc9e2972" containerID="186a1d2caad8865abf7f565405c4a4c077038ccb67ba77927cc2392ec075a811" exitCode=143 Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.580046 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.580041 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" event={"ID":"254129cd-82fc-4162-b671-2434bc9e2972","Type":"ContainerDied","Data":"8d6af51a18940c05083d10a25b2c636978ce51ac88bbfc6ed169b6669fb846a4"} Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.580123 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" event={"ID":"254129cd-82fc-4162-b671-2434bc9e2972","Type":"ContainerDied","Data":"f41d488b0c24e594a2f5cad36b5f8efdb7d867fae0e18a74fe3c396a203d162f"} Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.580141 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" event={"ID":"254129cd-82fc-4162-b671-2434bc9e2972","Type":"ContainerDied","Data":"4264ef8ee49a8b172fb05ac5c9b0bd32350b5d9ae95293079dfeaf44f021b455"} Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.580152 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" event={"ID":"254129cd-82fc-4162-b671-2434bc9e2972","Type":"ContainerDied","Data":"0889cf371f8b06cc2d255c15fda97db1d5000d6f6cff29d2fdcb8667cede8a99"} Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.580186 4558 scope.go:117] "RemoveContainer" containerID="8d6af51a18940c05083d10a25b2c636978ce51ac88bbfc6ed169b6669fb846a4" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.580193 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" event={"ID":"254129cd-82fc-4162-b671-2434bc9e2972","Type":"ContainerDied","Data":"cd8b93e5559d95511a1285a9aec9a7d72df6c330bcd1e1daf3e93f5494f70eeb"} Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.580204 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" event={"ID":"254129cd-82fc-4162-b671-2434bc9e2972","Type":"ContainerDied","Data":"1a6a184067bde115ef5e09fdd90e7dccfe89a9dc347af450998b91c068e7e803"} Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.580214 4558 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0e9f4ed928d20c9db64a689d7d14df9b81870bb0523cf5098e09d2d85aad9fca"} Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.580223 4558 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f41d488b0c24e594a2f5cad36b5f8efdb7d867fae0e18a74fe3c396a203d162f"} Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.580229 4558 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4264ef8ee49a8b172fb05ac5c9b0bd32350b5d9ae95293079dfeaf44f021b455"} Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.580233 4558 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0889cf371f8b06cc2d255c15fda97db1d5000d6f6cff29d2fdcb8667cede8a99"} Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.580238 4558 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"cd8b93e5559d95511a1285a9aec9a7d72df6c330bcd1e1daf3e93f5494f70eeb"} Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.580261 4558 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"1a6a184067bde115ef5e09fdd90e7dccfe89a9dc347af450998b91c068e7e803"} Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.580266 4558 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"e4265d3281e954ee5989ee008da28c6cb9fa29478e5fdf23325521d455304da9"} Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.580271 4558 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"186a1d2caad8865abf7f565405c4a4c077038ccb67ba77927cc2392ec075a811"} Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.580275 4558 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"aaa12ad387fc5d0780f6602a74013bb70c118f35c3a16eb4d136df7b0c48db9e"} Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.580282 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" event={"ID":"254129cd-82fc-4162-b671-2434bc9e2972","Type":"ContainerDied","Data":"e4265d3281e954ee5989ee008da28c6cb9fa29478e5fdf23325521d455304da9"} Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.580292 4558 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"8d6af51a18940c05083d10a25b2c636978ce51ac88bbfc6ed169b6669fb846a4"} Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.580297 4558 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0e9f4ed928d20c9db64a689d7d14df9b81870bb0523cf5098e09d2d85aad9fca"} Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.580303 4558 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f41d488b0c24e594a2f5cad36b5f8efdb7d867fae0e18a74fe3c396a203d162f"} Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.580308 4558 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4264ef8ee49a8b172fb05ac5c9b0bd32350b5d9ae95293079dfeaf44f021b455"} Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.580312 4558 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0889cf371f8b06cc2d255c15fda97db1d5000d6f6cff29d2fdcb8667cede8a99"} Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.580397 4558 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"cd8b93e5559d95511a1285a9aec9a7d72df6c330bcd1e1daf3e93f5494f70eeb"} Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.580422 4558 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"1a6a184067bde115ef5e09fdd90e7dccfe89a9dc347af450998b91c068e7e803"} Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.580428 4558 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"e4265d3281e954ee5989ee008da28c6cb9fa29478e5fdf23325521d455304da9"} Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.580432 4558 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"186a1d2caad8865abf7f565405c4a4c077038ccb67ba77927cc2392ec075a811"} Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.580438 4558 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"aaa12ad387fc5d0780f6602a74013bb70c118f35c3a16eb4d136df7b0c48db9e"} Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.580446 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" event={"ID":"254129cd-82fc-4162-b671-2434bc9e2972","Type":"ContainerDied","Data":"186a1d2caad8865abf7f565405c4a4c077038ccb67ba77927cc2392ec075a811"} Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.580455 4558 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"8d6af51a18940c05083d10a25b2c636978ce51ac88bbfc6ed169b6669fb846a4"} Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.580460 4558 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0e9f4ed928d20c9db64a689d7d14df9b81870bb0523cf5098e09d2d85aad9fca"} Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.580465 4558 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f41d488b0c24e594a2f5cad36b5f8efdb7d867fae0e18a74fe3c396a203d162f"} Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.580470 4558 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4264ef8ee49a8b172fb05ac5c9b0bd32350b5d9ae95293079dfeaf44f021b455"} Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.580474 4558 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0889cf371f8b06cc2d255c15fda97db1d5000d6f6cff29d2fdcb8667cede8a99"} Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.580480 4558 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"cd8b93e5559d95511a1285a9aec9a7d72df6c330bcd1e1daf3e93f5494f70eeb"} Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.580532 4558 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"1a6a184067bde115ef5e09fdd90e7dccfe89a9dc347af450998b91c068e7e803"} Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.580538 4558 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"e4265d3281e954ee5989ee008da28c6cb9fa29478e5fdf23325521d455304da9"} Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.580542 4558 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"186a1d2caad8865abf7f565405c4a4c077038ccb67ba77927cc2392ec075a811"} Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.580548 4558 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"aaa12ad387fc5d0780f6602a74013bb70c118f35c3a16eb4d136df7b0c48db9e"} Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.580556 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-nv2xw" event={"ID":"254129cd-82fc-4162-b671-2434bc9e2972","Type":"ContainerDied","Data":"93769c5fb7661e2ef34ef9538594b4c32ba73d941226786897ed3ff239b9b8f5"} Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.580564 4558 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"8d6af51a18940c05083d10a25b2c636978ce51ac88bbfc6ed169b6669fb846a4"} Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.580588 4558 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0e9f4ed928d20c9db64a689d7d14df9b81870bb0523cf5098e09d2d85aad9fca"} Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.580593 4558 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f41d488b0c24e594a2f5cad36b5f8efdb7d867fae0e18a74fe3c396a203d162f"} Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.580597 4558 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4264ef8ee49a8b172fb05ac5c9b0bd32350b5d9ae95293079dfeaf44f021b455"} Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.580603 4558 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0889cf371f8b06cc2d255c15fda97db1d5000d6f6cff29d2fdcb8667cede8a99"} Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.580607 4558 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"cd8b93e5559d95511a1285a9aec9a7d72df6c330bcd1e1daf3e93f5494f70eeb"} Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.580611 4558 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"1a6a184067bde115ef5e09fdd90e7dccfe89a9dc347af450998b91c068e7e803"} Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.580615 4558 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"e4265d3281e954ee5989ee008da28c6cb9fa29478e5fdf23325521d455304da9"} Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.580620 4558 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"186a1d2caad8865abf7f565405c4a4c077038ccb67ba77927cc2392ec075a811"} Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.580624 4558 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"aaa12ad387fc5d0780f6602a74013bb70c118f35c3a16eb4d136df7b0c48db9e"} Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.582963 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-jsqvf_bedf08c7-1f93-4931-a7f3-e729e2a137af/kube-multus/2.log" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.583416 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-jsqvf_bedf08c7-1f93-4931-a7f3-e729e2a137af/kube-multus/1.log" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.583456 4558 generic.go:334] "Generic (PLEG): container finished" podID="bedf08c7-1f93-4931-a7f3-e729e2a137af" containerID="6ff161b5eb2dde97ccaf09d4e6df49b859981accd8ab7643d47f6900019c3dae" exitCode=2 Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.583482 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-jsqvf" event={"ID":"bedf08c7-1f93-4931-a7f3-e729e2a137af","Type":"ContainerDied","Data":"6ff161b5eb2dde97ccaf09d4e6df49b859981accd8ab7643d47f6900019c3dae"} Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.583502 4558 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2297634e20683413a5eb643517580c9486303dc9616a7588a4ac571f3d0e53b5"} Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.583785 4558 scope.go:117] "RemoveContainer" containerID="6ff161b5eb2dde97ccaf09d4e6df49b859981accd8ab7643d47f6900019c3dae" Jan 20 16:50:54 crc kubenswrapper[4558]: E0120 16:50:54.583955 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-jsqvf_openshift-multus(bedf08c7-1f93-4931-a7f3-e729e2a137af)\"" pod="openshift-multus/multus-jsqvf" podUID="bedf08c7-1f93-4931-a7f3-e729e2a137af" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.585390 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/7267c3cc-17ca-4c43-80b8-78cc276b16f0-host-kubelet\") pod \"ovnkube-node-sn4p9\" (UID: \"7267c3cc-17ca-4c43-80b8-78cc276b16f0\") " pod="openshift-ovn-kubernetes/ovnkube-node-sn4p9" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.585442 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/7267c3cc-17ca-4c43-80b8-78cc276b16f0-host-kubelet\") pod \"ovnkube-node-sn4p9\" (UID: \"7267c3cc-17ca-4c43-80b8-78cc276b16f0\") " pod="openshift-ovn-kubernetes/ovnkube-node-sn4p9" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.585451 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/7267c3cc-17ca-4c43-80b8-78cc276b16f0-etc-openvswitch\") pod \"ovnkube-node-sn4p9\" (UID: \"7267c3cc-17ca-4c43-80b8-78cc276b16f0\") " pod="openshift-ovn-kubernetes/ovnkube-node-sn4p9" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.585489 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/7267c3cc-17ca-4c43-80b8-78cc276b16f0-run-openvswitch\") pod \"ovnkube-node-sn4p9\" (UID: \"7267c3cc-17ca-4c43-80b8-78cc276b16f0\") " pod="openshift-ovn-kubernetes/ovnkube-node-sn4p9" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.585508 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/7267c3cc-17ca-4c43-80b8-78cc276b16f0-etc-openvswitch\") pod \"ovnkube-node-sn4p9\" (UID: \"7267c3cc-17ca-4c43-80b8-78cc276b16f0\") " pod="openshift-ovn-kubernetes/ovnkube-node-sn4p9" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.585510 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/7267c3cc-17ca-4c43-80b8-78cc276b16f0-host-slash\") pod \"ovnkube-node-sn4p9\" (UID: \"7267c3cc-17ca-4c43-80b8-78cc276b16f0\") " pod="openshift-ovn-kubernetes/ovnkube-node-sn4p9" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.585534 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/7267c3cc-17ca-4c43-80b8-78cc276b16f0-run-openvswitch\") pod \"ovnkube-node-sn4p9\" (UID: \"7267c3cc-17ca-4c43-80b8-78cc276b16f0\") " pod="openshift-ovn-kubernetes/ovnkube-node-sn4p9" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.585541 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/7267c3cc-17ca-4c43-80b8-78cc276b16f0-run-ovn\") pod \"ovnkube-node-sn4p9\" (UID: \"7267c3cc-17ca-4c43-80b8-78cc276b16f0\") " pod="openshift-ovn-kubernetes/ovnkube-node-sn4p9" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.585569 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/7267c3cc-17ca-4c43-80b8-78cc276b16f0-run-systemd\") pod \"ovnkube-node-sn4p9\" (UID: \"7267c3cc-17ca-4c43-80b8-78cc276b16f0\") " pod="openshift-ovn-kubernetes/ovnkube-node-sn4p9" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.585577 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/7267c3cc-17ca-4c43-80b8-78cc276b16f0-host-slash\") pod \"ovnkube-node-sn4p9\" (UID: \"7267c3cc-17ca-4c43-80b8-78cc276b16f0\") " pod="openshift-ovn-kubernetes/ovnkube-node-sn4p9" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.585582 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/7267c3cc-17ca-4c43-80b8-78cc276b16f0-host-cni-bin\") pod \"ovnkube-node-sn4p9\" (UID: \"7267c3cc-17ca-4c43-80b8-78cc276b16f0\") " pod="openshift-ovn-kubernetes/ovnkube-node-sn4p9" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.585600 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/7267c3cc-17ca-4c43-80b8-78cc276b16f0-host-cni-bin\") pod \"ovnkube-node-sn4p9\" (UID: \"7267c3cc-17ca-4c43-80b8-78cc276b16f0\") " pod="openshift-ovn-kubernetes/ovnkube-node-sn4p9" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.585609 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/7267c3cc-17ca-4c43-80b8-78cc276b16f0-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-sn4p9\" (UID: \"7267c3cc-17ca-4c43-80b8-78cc276b16f0\") " pod="openshift-ovn-kubernetes/ovnkube-node-sn4p9" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.585620 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/7267c3cc-17ca-4c43-80b8-78cc276b16f0-run-ovn\") pod \"ovnkube-node-sn4p9\" (UID: \"7267c3cc-17ca-4c43-80b8-78cc276b16f0\") " pod="openshift-ovn-kubernetes/ovnkube-node-sn4p9" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.585631 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/7267c3cc-17ca-4c43-80b8-78cc276b16f0-var-lib-openvswitch\") pod \"ovnkube-node-sn4p9\" (UID: \"7267c3cc-17ca-4c43-80b8-78cc276b16f0\") " pod="openshift-ovn-kubernetes/ovnkube-node-sn4p9" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.585652 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/7267c3cc-17ca-4c43-80b8-78cc276b16f0-run-systemd\") pod \"ovnkube-node-sn4p9\" (UID: \"7267c3cc-17ca-4c43-80b8-78cc276b16f0\") " pod="openshift-ovn-kubernetes/ovnkube-node-sn4p9" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.585685 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/7267c3cc-17ca-4c43-80b8-78cc276b16f0-systemd-units\") pod \"ovnkube-node-sn4p9\" (UID: \"7267c3cc-17ca-4c43-80b8-78cc276b16f0\") " pod="openshift-ovn-kubernetes/ovnkube-node-sn4p9" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.585695 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/7267c3cc-17ca-4c43-80b8-78cc276b16f0-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-sn4p9\" (UID: \"7267c3cc-17ca-4c43-80b8-78cc276b16f0\") " pod="openshift-ovn-kubernetes/ovnkube-node-sn4p9" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.585722 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/7267c3cc-17ca-4c43-80b8-78cc276b16f0-var-lib-openvswitch\") pod \"ovnkube-node-sn4p9\" (UID: \"7267c3cc-17ca-4c43-80b8-78cc276b16f0\") " pod="openshift-ovn-kubernetes/ovnkube-node-sn4p9" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.585724 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/7267c3cc-17ca-4c43-80b8-78cc276b16f0-systemd-units\") pod \"ovnkube-node-sn4p9\" (UID: \"7267c3cc-17ca-4c43-80b8-78cc276b16f0\") " pod="openshift-ovn-kubernetes/ovnkube-node-sn4p9" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.586320 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/7267c3cc-17ca-4c43-80b8-78cc276b16f0-env-overrides\") pod \"ovnkube-node-sn4p9\" (UID: \"7267c3cc-17ca-4c43-80b8-78cc276b16f0\") " pod="openshift-ovn-kubernetes/ovnkube-node-sn4p9" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.585703 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/7267c3cc-17ca-4c43-80b8-78cc276b16f0-env-overrides\") pod \"ovnkube-node-sn4p9\" (UID: \"7267c3cc-17ca-4c43-80b8-78cc276b16f0\") " pod="openshift-ovn-kubernetes/ovnkube-node-sn4p9" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.586403 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/7267c3cc-17ca-4c43-80b8-78cc276b16f0-ovn-node-metrics-cert\") pod \"ovnkube-node-sn4p9\" (UID: \"7267c3cc-17ca-4c43-80b8-78cc276b16f0\") " pod="openshift-ovn-kubernetes/ovnkube-node-sn4p9" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.586909 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/7267c3cc-17ca-4c43-80b8-78cc276b16f0-host-run-ovn-kubernetes\") pod \"ovnkube-node-sn4p9\" (UID: \"7267c3cc-17ca-4c43-80b8-78cc276b16f0\") " pod="openshift-ovn-kubernetes/ovnkube-node-sn4p9" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.586953 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/7267c3cc-17ca-4c43-80b8-78cc276b16f0-node-log\") pod \"ovnkube-node-sn4p9\" (UID: \"7267c3cc-17ca-4c43-80b8-78cc276b16f0\") " pod="openshift-ovn-kubernetes/ovnkube-node-sn4p9" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.586970 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/7267c3cc-17ca-4c43-80b8-78cc276b16f0-host-run-netns\") pod \"ovnkube-node-sn4p9\" (UID: \"7267c3cc-17ca-4c43-80b8-78cc276b16f0\") " pod="openshift-ovn-kubernetes/ovnkube-node-sn4p9" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.586984 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9rvbg\" (UniqueName: \"kubernetes.io/projected/7267c3cc-17ca-4c43-80b8-78cc276b16f0-kube-api-access-9rvbg\") pod \"ovnkube-node-sn4p9\" (UID: \"7267c3cc-17ca-4c43-80b8-78cc276b16f0\") " pod="openshift-ovn-kubernetes/ovnkube-node-sn4p9" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.587000 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/7267c3cc-17ca-4c43-80b8-78cc276b16f0-log-socket\") pod \"ovnkube-node-sn4p9\" (UID: \"7267c3cc-17ca-4c43-80b8-78cc276b16f0\") " pod="openshift-ovn-kubernetes/ovnkube-node-sn4p9" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.587071 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/7267c3cc-17ca-4c43-80b8-78cc276b16f0-ovnkube-script-lib\") pod \"ovnkube-node-sn4p9\" (UID: \"7267c3cc-17ca-4c43-80b8-78cc276b16f0\") " pod="openshift-ovn-kubernetes/ovnkube-node-sn4p9" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.587106 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/7267c3cc-17ca-4c43-80b8-78cc276b16f0-host-cni-netd\") pod \"ovnkube-node-sn4p9\" (UID: \"7267c3cc-17ca-4c43-80b8-78cc276b16f0\") " pod="openshift-ovn-kubernetes/ovnkube-node-sn4p9" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.587131 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/7267c3cc-17ca-4c43-80b8-78cc276b16f0-ovnkube-config\") pod \"ovnkube-node-sn4p9\" (UID: \"7267c3cc-17ca-4c43-80b8-78cc276b16f0\") " pod="openshift-ovn-kubernetes/ovnkube-node-sn4p9" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.587198 4558 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/254129cd-82fc-4162-b671-2434bc9e2972-run-systemd\") on node \"crc\" DevicePath \"\"" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.587210 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rgzcc\" (UniqueName: \"kubernetes.io/projected/254129cd-82fc-4162-b671-2434bc9e2972-kube-api-access-rgzcc\") on node \"crc\" DevicePath \"\"" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.587220 4558 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/254129cd-82fc-4162-b671-2434bc9e2972-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.587615 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/7267c3cc-17ca-4c43-80b8-78cc276b16f0-ovnkube-config\") pod \"ovnkube-node-sn4p9\" (UID: \"7267c3cc-17ca-4c43-80b8-78cc276b16f0\") " pod="openshift-ovn-kubernetes/ovnkube-node-sn4p9" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.587669 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/7267c3cc-17ca-4c43-80b8-78cc276b16f0-host-run-ovn-kubernetes\") pod \"ovnkube-node-sn4p9\" (UID: \"7267c3cc-17ca-4c43-80b8-78cc276b16f0\") " pod="openshift-ovn-kubernetes/ovnkube-node-sn4p9" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.587691 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/7267c3cc-17ca-4c43-80b8-78cc276b16f0-node-log\") pod \"ovnkube-node-sn4p9\" (UID: \"7267c3cc-17ca-4c43-80b8-78cc276b16f0\") " pod="openshift-ovn-kubernetes/ovnkube-node-sn4p9" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.587711 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/7267c3cc-17ca-4c43-80b8-78cc276b16f0-host-run-netns\") pod \"ovnkube-node-sn4p9\" (UID: \"7267c3cc-17ca-4c43-80b8-78cc276b16f0\") " pod="openshift-ovn-kubernetes/ovnkube-node-sn4p9" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.587905 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/7267c3cc-17ca-4c43-80b8-78cc276b16f0-log-socket\") pod \"ovnkube-node-sn4p9\" (UID: \"7267c3cc-17ca-4c43-80b8-78cc276b16f0\") " pod="openshift-ovn-kubernetes/ovnkube-node-sn4p9" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.587989 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/7267c3cc-17ca-4c43-80b8-78cc276b16f0-host-cni-netd\") pod \"ovnkube-node-sn4p9\" (UID: \"7267c3cc-17ca-4c43-80b8-78cc276b16f0\") " pod="openshift-ovn-kubernetes/ovnkube-node-sn4p9" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.588574 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/7267c3cc-17ca-4c43-80b8-78cc276b16f0-ovnkube-script-lib\") pod \"ovnkube-node-sn4p9\" (UID: \"7267c3cc-17ca-4c43-80b8-78cc276b16f0\") " pod="openshift-ovn-kubernetes/ovnkube-node-sn4p9" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.589420 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/7267c3cc-17ca-4c43-80b8-78cc276b16f0-ovn-node-metrics-cert\") pod \"ovnkube-node-sn4p9\" (UID: \"7267c3cc-17ca-4c43-80b8-78cc276b16f0\") " pod="openshift-ovn-kubernetes/ovnkube-node-sn4p9" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.598582 4558 scope.go:117] "RemoveContainer" containerID="0e9f4ed928d20c9db64a689d7d14df9b81870bb0523cf5098e09d2d85aad9fca" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.598909 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-nv2xw"] Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.601336 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9rvbg\" (UniqueName: \"kubernetes.io/projected/7267c3cc-17ca-4c43-80b8-78cc276b16f0-kube-api-access-9rvbg\") pod \"ovnkube-node-sn4p9\" (UID: \"7267c3cc-17ca-4c43-80b8-78cc276b16f0\") " pod="openshift-ovn-kubernetes/ovnkube-node-sn4p9" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.602376 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-nv2xw"] Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.614369 4558 scope.go:117] "RemoveContainer" containerID="f41d488b0c24e594a2f5cad36b5f8efdb7d867fae0e18a74fe3c396a203d162f" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.625401 4558 scope.go:117] "RemoveContainer" containerID="4264ef8ee49a8b172fb05ac5c9b0bd32350b5d9ae95293079dfeaf44f021b455" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.635724 4558 scope.go:117] "RemoveContainer" containerID="0889cf371f8b06cc2d255c15fda97db1d5000d6f6cff29d2fdcb8667cede8a99" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.646000 4558 scope.go:117] "RemoveContainer" containerID="cd8b93e5559d95511a1285a9aec9a7d72df6c330bcd1e1daf3e93f5494f70eeb" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.655177 4558 scope.go:117] "RemoveContainer" containerID="1a6a184067bde115ef5e09fdd90e7dccfe89a9dc347af450998b91c068e7e803" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.664175 4558 scope.go:117] "RemoveContainer" containerID="e4265d3281e954ee5989ee008da28c6cb9fa29478e5fdf23325521d455304da9" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.667650 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-sn4p9" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.679365 4558 scope.go:117] "RemoveContainer" containerID="186a1d2caad8865abf7f565405c4a4c077038ccb67ba77927cc2392ec075a811" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.693866 4558 scope.go:117] "RemoveContainer" containerID="aaa12ad387fc5d0780f6602a74013bb70c118f35c3a16eb4d136df7b0c48db9e" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.708236 4558 scope.go:117] "RemoveContainer" containerID="8d6af51a18940c05083d10a25b2c636978ce51ac88bbfc6ed169b6669fb846a4" Jan 20 16:50:54 crc kubenswrapper[4558]: E0120 16:50:54.708643 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8d6af51a18940c05083d10a25b2c636978ce51ac88bbfc6ed169b6669fb846a4\": container with ID starting with 8d6af51a18940c05083d10a25b2c636978ce51ac88bbfc6ed169b6669fb846a4 not found: ID does not exist" containerID="8d6af51a18940c05083d10a25b2c636978ce51ac88bbfc6ed169b6669fb846a4" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.708694 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8d6af51a18940c05083d10a25b2c636978ce51ac88bbfc6ed169b6669fb846a4"} err="failed to get container status \"8d6af51a18940c05083d10a25b2c636978ce51ac88bbfc6ed169b6669fb846a4\": rpc error: code = NotFound desc = could not find container \"8d6af51a18940c05083d10a25b2c636978ce51ac88bbfc6ed169b6669fb846a4\": container with ID starting with 8d6af51a18940c05083d10a25b2c636978ce51ac88bbfc6ed169b6669fb846a4 not found: ID does not exist" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.708717 4558 scope.go:117] "RemoveContainer" containerID="0e9f4ed928d20c9db64a689d7d14df9b81870bb0523cf5098e09d2d85aad9fca" Jan 20 16:50:54 crc kubenswrapper[4558]: E0120 16:50:54.709060 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0e9f4ed928d20c9db64a689d7d14df9b81870bb0523cf5098e09d2d85aad9fca\": container with ID starting with 0e9f4ed928d20c9db64a689d7d14df9b81870bb0523cf5098e09d2d85aad9fca not found: ID does not exist" containerID="0e9f4ed928d20c9db64a689d7d14df9b81870bb0523cf5098e09d2d85aad9fca" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.709106 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0e9f4ed928d20c9db64a689d7d14df9b81870bb0523cf5098e09d2d85aad9fca"} err="failed to get container status \"0e9f4ed928d20c9db64a689d7d14df9b81870bb0523cf5098e09d2d85aad9fca\": rpc error: code = NotFound desc = could not find container \"0e9f4ed928d20c9db64a689d7d14df9b81870bb0523cf5098e09d2d85aad9fca\": container with ID starting with 0e9f4ed928d20c9db64a689d7d14df9b81870bb0523cf5098e09d2d85aad9fca not found: ID does not exist" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.709133 4558 scope.go:117] "RemoveContainer" containerID="f41d488b0c24e594a2f5cad36b5f8efdb7d867fae0e18a74fe3c396a203d162f" Jan 20 16:50:54 crc kubenswrapper[4558]: E0120 16:50:54.709591 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f41d488b0c24e594a2f5cad36b5f8efdb7d867fae0e18a74fe3c396a203d162f\": container with ID starting with f41d488b0c24e594a2f5cad36b5f8efdb7d867fae0e18a74fe3c396a203d162f not found: ID does not exist" containerID="f41d488b0c24e594a2f5cad36b5f8efdb7d867fae0e18a74fe3c396a203d162f" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.709611 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f41d488b0c24e594a2f5cad36b5f8efdb7d867fae0e18a74fe3c396a203d162f"} err="failed to get container status \"f41d488b0c24e594a2f5cad36b5f8efdb7d867fae0e18a74fe3c396a203d162f\": rpc error: code = NotFound desc = could not find container \"f41d488b0c24e594a2f5cad36b5f8efdb7d867fae0e18a74fe3c396a203d162f\": container with ID starting with f41d488b0c24e594a2f5cad36b5f8efdb7d867fae0e18a74fe3c396a203d162f not found: ID does not exist" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.709624 4558 scope.go:117] "RemoveContainer" containerID="4264ef8ee49a8b172fb05ac5c9b0bd32350b5d9ae95293079dfeaf44f021b455" Jan 20 16:50:54 crc kubenswrapper[4558]: E0120 16:50:54.709884 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4264ef8ee49a8b172fb05ac5c9b0bd32350b5d9ae95293079dfeaf44f021b455\": container with ID starting with 4264ef8ee49a8b172fb05ac5c9b0bd32350b5d9ae95293079dfeaf44f021b455 not found: ID does not exist" containerID="4264ef8ee49a8b172fb05ac5c9b0bd32350b5d9ae95293079dfeaf44f021b455" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.709901 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4264ef8ee49a8b172fb05ac5c9b0bd32350b5d9ae95293079dfeaf44f021b455"} err="failed to get container status \"4264ef8ee49a8b172fb05ac5c9b0bd32350b5d9ae95293079dfeaf44f021b455\": rpc error: code = NotFound desc = could not find container \"4264ef8ee49a8b172fb05ac5c9b0bd32350b5d9ae95293079dfeaf44f021b455\": container with ID starting with 4264ef8ee49a8b172fb05ac5c9b0bd32350b5d9ae95293079dfeaf44f021b455 not found: ID does not exist" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.709913 4558 scope.go:117] "RemoveContainer" containerID="0889cf371f8b06cc2d255c15fda97db1d5000d6f6cff29d2fdcb8667cede8a99" Jan 20 16:50:54 crc kubenswrapper[4558]: E0120 16:50:54.710199 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0889cf371f8b06cc2d255c15fda97db1d5000d6f6cff29d2fdcb8667cede8a99\": container with ID starting with 0889cf371f8b06cc2d255c15fda97db1d5000d6f6cff29d2fdcb8667cede8a99 not found: ID does not exist" containerID="0889cf371f8b06cc2d255c15fda97db1d5000d6f6cff29d2fdcb8667cede8a99" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.710254 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0889cf371f8b06cc2d255c15fda97db1d5000d6f6cff29d2fdcb8667cede8a99"} err="failed to get container status \"0889cf371f8b06cc2d255c15fda97db1d5000d6f6cff29d2fdcb8667cede8a99\": rpc error: code = NotFound desc = could not find container \"0889cf371f8b06cc2d255c15fda97db1d5000d6f6cff29d2fdcb8667cede8a99\": container with ID starting with 0889cf371f8b06cc2d255c15fda97db1d5000d6f6cff29d2fdcb8667cede8a99 not found: ID does not exist" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.710295 4558 scope.go:117] "RemoveContainer" containerID="cd8b93e5559d95511a1285a9aec9a7d72df6c330bcd1e1daf3e93f5494f70eeb" Jan 20 16:50:54 crc kubenswrapper[4558]: E0120 16:50:54.710632 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cd8b93e5559d95511a1285a9aec9a7d72df6c330bcd1e1daf3e93f5494f70eeb\": container with ID starting with cd8b93e5559d95511a1285a9aec9a7d72df6c330bcd1e1daf3e93f5494f70eeb not found: ID does not exist" containerID="cd8b93e5559d95511a1285a9aec9a7d72df6c330bcd1e1daf3e93f5494f70eeb" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.710671 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cd8b93e5559d95511a1285a9aec9a7d72df6c330bcd1e1daf3e93f5494f70eeb"} err="failed to get container status \"cd8b93e5559d95511a1285a9aec9a7d72df6c330bcd1e1daf3e93f5494f70eeb\": rpc error: code = NotFound desc = could not find container \"cd8b93e5559d95511a1285a9aec9a7d72df6c330bcd1e1daf3e93f5494f70eeb\": container with ID starting with cd8b93e5559d95511a1285a9aec9a7d72df6c330bcd1e1daf3e93f5494f70eeb not found: ID does not exist" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.710689 4558 scope.go:117] "RemoveContainer" containerID="1a6a184067bde115ef5e09fdd90e7dccfe89a9dc347af450998b91c068e7e803" Jan 20 16:50:54 crc kubenswrapper[4558]: E0120 16:50:54.710956 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1a6a184067bde115ef5e09fdd90e7dccfe89a9dc347af450998b91c068e7e803\": container with ID starting with 1a6a184067bde115ef5e09fdd90e7dccfe89a9dc347af450998b91c068e7e803 not found: ID does not exist" containerID="1a6a184067bde115ef5e09fdd90e7dccfe89a9dc347af450998b91c068e7e803" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.710978 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1a6a184067bde115ef5e09fdd90e7dccfe89a9dc347af450998b91c068e7e803"} err="failed to get container status \"1a6a184067bde115ef5e09fdd90e7dccfe89a9dc347af450998b91c068e7e803\": rpc error: code = NotFound desc = could not find container \"1a6a184067bde115ef5e09fdd90e7dccfe89a9dc347af450998b91c068e7e803\": container with ID starting with 1a6a184067bde115ef5e09fdd90e7dccfe89a9dc347af450998b91c068e7e803 not found: ID does not exist" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.710991 4558 scope.go:117] "RemoveContainer" containerID="e4265d3281e954ee5989ee008da28c6cb9fa29478e5fdf23325521d455304da9" Jan 20 16:50:54 crc kubenswrapper[4558]: E0120 16:50:54.711294 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e4265d3281e954ee5989ee008da28c6cb9fa29478e5fdf23325521d455304da9\": container with ID starting with e4265d3281e954ee5989ee008da28c6cb9fa29478e5fdf23325521d455304da9 not found: ID does not exist" containerID="e4265d3281e954ee5989ee008da28c6cb9fa29478e5fdf23325521d455304da9" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.711317 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e4265d3281e954ee5989ee008da28c6cb9fa29478e5fdf23325521d455304da9"} err="failed to get container status \"e4265d3281e954ee5989ee008da28c6cb9fa29478e5fdf23325521d455304da9\": rpc error: code = NotFound desc = could not find container \"e4265d3281e954ee5989ee008da28c6cb9fa29478e5fdf23325521d455304da9\": container with ID starting with e4265d3281e954ee5989ee008da28c6cb9fa29478e5fdf23325521d455304da9 not found: ID does not exist" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.711331 4558 scope.go:117] "RemoveContainer" containerID="186a1d2caad8865abf7f565405c4a4c077038ccb67ba77927cc2392ec075a811" Jan 20 16:50:54 crc kubenswrapper[4558]: E0120 16:50:54.711562 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"186a1d2caad8865abf7f565405c4a4c077038ccb67ba77927cc2392ec075a811\": container with ID starting with 186a1d2caad8865abf7f565405c4a4c077038ccb67ba77927cc2392ec075a811 not found: ID does not exist" containerID="186a1d2caad8865abf7f565405c4a4c077038ccb67ba77927cc2392ec075a811" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.711577 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"186a1d2caad8865abf7f565405c4a4c077038ccb67ba77927cc2392ec075a811"} err="failed to get container status \"186a1d2caad8865abf7f565405c4a4c077038ccb67ba77927cc2392ec075a811\": rpc error: code = NotFound desc = could not find container \"186a1d2caad8865abf7f565405c4a4c077038ccb67ba77927cc2392ec075a811\": container with ID starting with 186a1d2caad8865abf7f565405c4a4c077038ccb67ba77927cc2392ec075a811 not found: ID does not exist" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.711588 4558 scope.go:117] "RemoveContainer" containerID="aaa12ad387fc5d0780f6602a74013bb70c118f35c3a16eb4d136df7b0c48db9e" Jan 20 16:50:54 crc kubenswrapper[4558]: E0120 16:50:54.711844 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aaa12ad387fc5d0780f6602a74013bb70c118f35c3a16eb4d136df7b0c48db9e\": container with ID starting with aaa12ad387fc5d0780f6602a74013bb70c118f35c3a16eb4d136df7b0c48db9e not found: ID does not exist" containerID="aaa12ad387fc5d0780f6602a74013bb70c118f35c3a16eb4d136df7b0c48db9e" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.711863 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aaa12ad387fc5d0780f6602a74013bb70c118f35c3a16eb4d136df7b0c48db9e"} err="failed to get container status \"aaa12ad387fc5d0780f6602a74013bb70c118f35c3a16eb4d136df7b0c48db9e\": rpc error: code = NotFound desc = could not find container \"aaa12ad387fc5d0780f6602a74013bb70c118f35c3a16eb4d136df7b0c48db9e\": container with ID starting with aaa12ad387fc5d0780f6602a74013bb70c118f35c3a16eb4d136df7b0c48db9e not found: ID does not exist" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.711879 4558 scope.go:117] "RemoveContainer" containerID="8d6af51a18940c05083d10a25b2c636978ce51ac88bbfc6ed169b6669fb846a4" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.712254 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8d6af51a18940c05083d10a25b2c636978ce51ac88bbfc6ed169b6669fb846a4"} err="failed to get container status \"8d6af51a18940c05083d10a25b2c636978ce51ac88bbfc6ed169b6669fb846a4\": rpc error: code = NotFound desc = could not find container \"8d6af51a18940c05083d10a25b2c636978ce51ac88bbfc6ed169b6669fb846a4\": container with ID starting with 8d6af51a18940c05083d10a25b2c636978ce51ac88bbfc6ed169b6669fb846a4 not found: ID does not exist" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.712272 4558 scope.go:117] "RemoveContainer" containerID="0e9f4ed928d20c9db64a689d7d14df9b81870bb0523cf5098e09d2d85aad9fca" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.712514 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0e9f4ed928d20c9db64a689d7d14df9b81870bb0523cf5098e09d2d85aad9fca"} err="failed to get container status \"0e9f4ed928d20c9db64a689d7d14df9b81870bb0523cf5098e09d2d85aad9fca\": rpc error: code = NotFound desc = could not find container \"0e9f4ed928d20c9db64a689d7d14df9b81870bb0523cf5098e09d2d85aad9fca\": container with ID starting with 0e9f4ed928d20c9db64a689d7d14df9b81870bb0523cf5098e09d2d85aad9fca not found: ID does not exist" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.712534 4558 scope.go:117] "RemoveContainer" containerID="f41d488b0c24e594a2f5cad36b5f8efdb7d867fae0e18a74fe3c396a203d162f" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.712766 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f41d488b0c24e594a2f5cad36b5f8efdb7d867fae0e18a74fe3c396a203d162f"} err="failed to get container status \"f41d488b0c24e594a2f5cad36b5f8efdb7d867fae0e18a74fe3c396a203d162f\": rpc error: code = NotFound desc = could not find container \"f41d488b0c24e594a2f5cad36b5f8efdb7d867fae0e18a74fe3c396a203d162f\": container with ID starting with f41d488b0c24e594a2f5cad36b5f8efdb7d867fae0e18a74fe3c396a203d162f not found: ID does not exist" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.712782 4558 scope.go:117] "RemoveContainer" containerID="4264ef8ee49a8b172fb05ac5c9b0bd32350b5d9ae95293079dfeaf44f021b455" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.713006 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4264ef8ee49a8b172fb05ac5c9b0bd32350b5d9ae95293079dfeaf44f021b455"} err="failed to get container status \"4264ef8ee49a8b172fb05ac5c9b0bd32350b5d9ae95293079dfeaf44f021b455\": rpc error: code = NotFound desc = could not find container \"4264ef8ee49a8b172fb05ac5c9b0bd32350b5d9ae95293079dfeaf44f021b455\": container with ID starting with 4264ef8ee49a8b172fb05ac5c9b0bd32350b5d9ae95293079dfeaf44f021b455 not found: ID does not exist" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.713023 4558 scope.go:117] "RemoveContainer" containerID="0889cf371f8b06cc2d255c15fda97db1d5000d6f6cff29d2fdcb8667cede8a99" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.713320 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0889cf371f8b06cc2d255c15fda97db1d5000d6f6cff29d2fdcb8667cede8a99"} err="failed to get container status \"0889cf371f8b06cc2d255c15fda97db1d5000d6f6cff29d2fdcb8667cede8a99\": rpc error: code = NotFound desc = could not find container \"0889cf371f8b06cc2d255c15fda97db1d5000d6f6cff29d2fdcb8667cede8a99\": container with ID starting with 0889cf371f8b06cc2d255c15fda97db1d5000d6f6cff29d2fdcb8667cede8a99 not found: ID does not exist" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.713339 4558 scope.go:117] "RemoveContainer" containerID="cd8b93e5559d95511a1285a9aec9a7d72df6c330bcd1e1daf3e93f5494f70eeb" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.713644 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cd8b93e5559d95511a1285a9aec9a7d72df6c330bcd1e1daf3e93f5494f70eeb"} err="failed to get container status \"cd8b93e5559d95511a1285a9aec9a7d72df6c330bcd1e1daf3e93f5494f70eeb\": rpc error: code = NotFound desc = could not find container \"cd8b93e5559d95511a1285a9aec9a7d72df6c330bcd1e1daf3e93f5494f70eeb\": container with ID starting with cd8b93e5559d95511a1285a9aec9a7d72df6c330bcd1e1daf3e93f5494f70eeb not found: ID does not exist" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.713677 4558 scope.go:117] "RemoveContainer" containerID="1a6a184067bde115ef5e09fdd90e7dccfe89a9dc347af450998b91c068e7e803" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.714188 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1a6a184067bde115ef5e09fdd90e7dccfe89a9dc347af450998b91c068e7e803"} err="failed to get container status \"1a6a184067bde115ef5e09fdd90e7dccfe89a9dc347af450998b91c068e7e803\": rpc error: code = NotFound desc = could not find container \"1a6a184067bde115ef5e09fdd90e7dccfe89a9dc347af450998b91c068e7e803\": container with ID starting with 1a6a184067bde115ef5e09fdd90e7dccfe89a9dc347af450998b91c068e7e803 not found: ID does not exist" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.714226 4558 scope.go:117] "RemoveContainer" containerID="e4265d3281e954ee5989ee008da28c6cb9fa29478e5fdf23325521d455304da9" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.714522 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e4265d3281e954ee5989ee008da28c6cb9fa29478e5fdf23325521d455304da9"} err="failed to get container status \"e4265d3281e954ee5989ee008da28c6cb9fa29478e5fdf23325521d455304da9\": rpc error: code = NotFound desc = could not find container \"e4265d3281e954ee5989ee008da28c6cb9fa29478e5fdf23325521d455304da9\": container with ID starting with e4265d3281e954ee5989ee008da28c6cb9fa29478e5fdf23325521d455304da9 not found: ID does not exist" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.714544 4558 scope.go:117] "RemoveContainer" containerID="186a1d2caad8865abf7f565405c4a4c077038ccb67ba77927cc2392ec075a811" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.714746 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"186a1d2caad8865abf7f565405c4a4c077038ccb67ba77927cc2392ec075a811"} err="failed to get container status \"186a1d2caad8865abf7f565405c4a4c077038ccb67ba77927cc2392ec075a811\": rpc error: code = NotFound desc = could not find container \"186a1d2caad8865abf7f565405c4a4c077038ccb67ba77927cc2392ec075a811\": container with ID starting with 186a1d2caad8865abf7f565405c4a4c077038ccb67ba77927cc2392ec075a811 not found: ID does not exist" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.714761 4558 scope.go:117] "RemoveContainer" containerID="aaa12ad387fc5d0780f6602a74013bb70c118f35c3a16eb4d136df7b0c48db9e" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.715018 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aaa12ad387fc5d0780f6602a74013bb70c118f35c3a16eb4d136df7b0c48db9e"} err="failed to get container status \"aaa12ad387fc5d0780f6602a74013bb70c118f35c3a16eb4d136df7b0c48db9e\": rpc error: code = NotFound desc = could not find container \"aaa12ad387fc5d0780f6602a74013bb70c118f35c3a16eb4d136df7b0c48db9e\": container with ID starting with aaa12ad387fc5d0780f6602a74013bb70c118f35c3a16eb4d136df7b0c48db9e not found: ID does not exist" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.715037 4558 scope.go:117] "RemoveContainer" containerID="8d6af51a18940c05083d10a25b2c636978ce51ac88bbfc6ed169b6669fb846a4" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.715264 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8d6af51a18940c05083d10a25b2c636978ce51ac88bbfc6ed169b6669fb846a4"} err="failed to get container status \"8d6af51a18940c05083d10a25b2c636978ce51ac88bbfc6ed169b6669fb846a4\": rpc error: code = NotFound desc = could not find container \"8d6af51a18940c05083d10a25b2c636978ce51ac88bbfc6ed169b6669fb846a4\": container with ID starting with 8d6af51a18940c05083d10a25b2c636978ce51ac88bbfc6ed169b6669fb846a4 not found: ID does not exist" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.715289 4558 scope.go:117] "RemoveContainer" containerID="0e9f4ed928d20c9db64a689d7d14df9b81870bb0523cf5098e09d2d85aad9fca" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.715553 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0e9f4ed928d20c9db64a689d7d14df9b81870bb0523cf5098e09d2d85aad9fca"} err="failed to get container status \"0e9f4ed928d20c9db64a689d7d14df9b81870bb0523cf5098e09d2d85aad9fca\": rpc error: code = NotFound desc = could not find container \"0e9f4ed928d20c9db64a689d7d14df9b81870bb0523cf5098e09d2d85aad9fca\": container with ID starting with 0e9f4ed928d20c9db64a689d7d14df9b81870bb0523cf5098e09d2d85aad9fca not found: ID does not exist" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.715575 4558 scope.go:117] "RemoveContainer" containerID="f41d488b0c24e594a2f5cad36b5f8efdb7d867fae0e18a74fe3c396a203d162f" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.715802 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f41d488b0c24e594a2f5cad36b5f8efdb7d867fae0e18a74fe3c396a203d162f"} err="failed to get container status \"f41d488b0c24e594a2f5cad36b5f8efdb7d867fae0e18a74fe3c396a203d162f\": rpc error: code = NotFound desc = could not find container \"f41d488b0c24e594a2f5cad36b5f8efdb7d867fae0e18a74fe3c396a203d162f\": container with ID starting with f41d488b0c24e594a2f5cad36b5f8efdb7d867fae0e18a74fe3c396a203d162f not found: ID does not exist" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.715821 4558 scope.go:117] "RemoveContainer" containerID="4264ef8ee49a8b172fb05ac5c9b0bd32350b5d9ae95293079dfeaf44f021b455" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.716041 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4264ef8ee49a8b172fb05ac5c9b0bd32350b5d9ae95293079dfeaf44f021b455"} err="failed to get container status \"4264ef8ee49a8b172fb05ac5c9b0bd32350b5d9ae95293079dfeaf44f021b455\": rpc error: code = NotFound desc = could not find container \"4264ef8ee49a8b172fb05ac5c9b0bd32350b5d9ae95293079dfeaf44f021b455\": container with ID starting with 4264ef8ee49a8b172fb05ac5c9b0bd32350b5d9ae95293079dfeaf44f021b455 not found: ID does not exist" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.716063 4558 scope.go:117] "RemoveContainer" containerID="0889cf371f8b06cc2d255c15fda97db1d5000d6f6cff29d2fdcb8667cede8a99" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.716309 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0889cf371f8b06cc2d255c15fda97db1d5000d6f6cff29d2fdcb8667cede8a99"} err="failed to get container status \"0889cf371f8b06cc2d255c15fda97db1d5000d6f6cff29d2fdcb8667cede8a99\": rpc error: code = NotFound desc = could not find container \"0889cf371f8b06cc2d255c15fda97db1d5000d6f6cff29d2fdcb8667cede8a99\": container with ID starting with 0889cf371f8b06cc2d255c15fda97db1d5000d6f6cff29d2fdcb8667cede8a99 not found: ID does not exist" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.716339 4558 scope.go:117] "RemoveContainer" containerID="cd8b93e5559d95511a1285a9aec9a7d72df6c330bcd1e1daf3e93f5494f70eeb" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.716596 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cd8b93e5559d95511a1285a9aec9a7d72df6c330bcd1e1daf3e93f5494f70eeb"} err="failed to get container status \"cd8b93e5559d95511a1285a9aec9a7d72df6c330bcd1e1daf3e93f5494f70eeb\": rpc error: code = NotFound desc = could not find container \"cd8b93e5559d95511a1285a9aec9a7d72df6c330bcd1e1daf3e93f5494f70eeb\": container with ID starting with cd8b93e5559d95511a1285a9aec9a7d72df6c330bcd1e1daf3e93f5494f70eeb not found: ID does not exist" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.716626 4558 scope.go:117] "RemoveContainer" containerID="1a6a184067bde115ef5e09fdd90e7dccfe89a9dc347af450998b91c068e7e803" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.716912 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1a6a184067bde115ef5e09fdd90e7dccfe89a9dc347af450998b91c068e7e803"} err="failed to get container status \"1a6a184067bde115ef5e09fdd90e7dccfe89a9dc347af450998b91c068e7e803\": rpc error: code = NotFound desc = could not find container \"1a6a184067bde115ef5e09fdd90e7dccfe89a9dc347af450998b91c068e7e803\": container with ID starting with 1a6a184067bde115ef5e09fdd90e7dccfe89a9dc347af450998b91c068e7e803 not found: ID does not exist" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.716934 4558 scope.go:117] "RemoveContainer" containerID="e4265d3281e954ee5989ee008da28c6cb9fa29478e5fdf23325521d455304da9" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.717156 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e4265d3281e954ee5989ee008da28c6cb9fa29478e5fdf23325521d455304da9"} err="failed to get container status \"e4265d3281e954ee5989ee008da28c6cb9fa29478e5fdf23325521d455304da9\": rpc error: code = NotFound desc = could not find container \"e4265d3281e954ee5989ee008da28c6cb9fa29478e5fdf23325521d455304da9\": container with ID starting with e4265d3281e954ee5989ee008da28c6cb9fa29478e5fdf23325521d455304da9 not found: ID does not exist" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.717208 4558 scope.go:117] "RemoveContainer" containerID="186a1d2caad8865abf7f565405c4a4c077038ccb67ba77927cc2392ec075a811" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.717455 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"186a1d2caad8865abf7f565405c4a4c077038ccb67ba77927cc2392ec075a811"} err="failed to get container status \"186a1d2caad8865abf7f565405c4a4c077038ccb67ba77927cc2392ec075a811\": rpc error: code = NotFound desc = could not find container \"186a1d2caad8865abf7f565405c4a4c077038ccb67ba77927cc2392ec075a811\": container with ID starting with 186a1d2caad8865abf7f565405c4a4c077038ccb67ba77927cc2392ec075a811 not found: ID does not exist" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.717475 4558 scope.go:117] "RemoveContainer" containerID="aaa12ad387fc5d0780f6602a74013bb70c118f35c3a16eb4d136df7b0c48db9e" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.717698 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aaa12ad387fc5d0780f6602a74013bb70c118f35c3a16eb4d136df7b0c48db9e"} err="failed to get container status \"aaa12ad387fc5d0780f6602a74013bb70c118f35c3a16eb4d136df7b0c48db9e\": rpc error: code = NotFound desc = could not find container \"aaa12ad387fc5d0780f6602a74013bb70c118f35c3a16eb4d136df7b0c48db9e\": container with ID starting with aaa12ad387fc5d0780f6602a74013bb70c118f35c3a16eb4d136df7b0c48db9e not found: ID does not exist" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.717715 4558 scope.go:117] "RemoveContainer" containerID="8d6af51a18940c05083d10a25b2c636978ce51ac88bbfc6ed169b6669fb846a4" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.717939 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8d6af51a18940c05083d10a25b2c636978ce51ac88bbfc6ed169b6669fb846a4"} err="failed to get container status \"8d6af51a18940c05083d10a25b2c636978ce51ac88bbfc6ed169b6669fb846a4\": rpc error: code = NotFound desc = could not find container \"8d6af51a18940c05083d10a25b2c636978ce51ac88bbfc6ed169b6669fb846a4\": container with ID starting with 8d6af51a18940c05083d10a25b2c636978ce51ac88bbfc6ed169b6669fb846a4 not found: ID does not exist" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.717958 4558 scope.go:117] "RemoveContainer" containerID="0e9f4ed928d20c9db64a689d7d14df9b81870bb0523cf5098e09d2d85aad9fca" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.718185 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0e9f4ed928d20c9db64a689d7d14df9b81870bb0523cf5098e09d2d85aad9fca"} err="failed to get container status \"0e9f4ed928d20c9db64a689d7d14df9b81870bb0523cf5098e09d2d85aad9fca\": rpc error: code = NotFound desc = could not find container \"0e9f4ed928d20c9db64a689d7d14df9b81870bb0523cf5098e09d2d85aad9fca\": container with ID starting with 0e9f4ed928d20c9db64a689d7d14df9b81870bb0523cf5098e09d2d85aad9fca not found: ID does not exist" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.718201 4558 scope.go:117] "RemoveContainer" containerID="f41d488b0c24e594a2f5cad36b5f8efdb7d867fae0e18a74fe3c396a203d162f" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.718482 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f41d488b0c24e594a2f5cad36b5f8efdb7d867fae0e18a74fe3c396a203d162f"} err="failed to get container status \"f41d488b0c24e594a2f5cad36b5f8efdb7d867fae0e18a74fe3c396a203d162f\": rpc error: code = NotFound desc = could not find container \"f41d488b0c24e594a2f5cad36b5f8efdb7d867fae0e18a74fe3c396a203d162f\": container with ID starting with f41d488b0c24e594a2f5cad36b5f8efdb7d867fae0e18a74fe3c396a203d162f not found: ID does not exist" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.718499 4558 scope.go:117] "RemoveContainer" containerID="4264ef8ee49a8b172fb05ac5c9b0bd32350b5d9ae95293079dfeaf44f021b455" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.718698 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4264ef8ee49a8b172fb05ac5c9b0bd32350b5d9ae95293079dfeaf44f021b455"} err="failed to get container status \"4264ef8ee49a8b172fb05ac5c9b0bd32350b5d9ae95293079dfeaf44f021b455\": rpc error: code = NotFound desc = could not find container \"4264ef8ee49a8b172fb05ac5c9b0bd32350b5d9ae95293079dfeaf44f021b455\": container with ID starting with 4264ef8ee49a8b172fb05ac5c9b0bd32350b5d9ae95293079dfeaf44f021b455 not found: ID does not exist" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.718716 4558 scope.go:117] "RemoveContainer" containerID="0889cf371f8b06cc2d255c15fda97db1d5000d6f6cff29d2fdcb8667cede8a99" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.718949 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0889cf371f8b06cc2d255c15fda97db1d5000d6f6cff29d2fdcb8667cede8a99"} err="failed to get container status \"0889cf371f8b06cc2d255c15fda97db1d5000d6f6cff29d2fdcb8667cede8a99\": rpc error: code = NotFound desc = could not find container \"0889cf371f8b06cc2d255c15fda97db1d5000d6f6cff29d2fdcb8667cede8a99\": container with ID starting with 0889cf371f8b06cc2d255c15fda97db1d5000d6f6cff29d2fdcb8667cede8a99 not found: ID does not exist" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.718968 4558 scope.go:117] "RemoveContainer" containerID="cd8b93e5559d95511a1285a9aec9a7d72df6c330bcd1e1daf3e93f5494f70eeb" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.719196 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cd8b93e5559d95511a1285a9aec9a7d72df6c330bcd1e1daf3e93f5494f70eeb"} err="failed to get container status \"cd8b93e5559d95511a1285a9aec9a7d72df6c330bcd1e1daf3e93f5494f70eeb\": rpc error: code = NotFound desc = could not find container \"cd8b93e5559d95511a1285a9aec9a7d72df6c330bcd1e1daf3e93f5494f70eeb\": container with ID starting with cd8b93e5559d95511a1285a9aec9a7d72df6c330bcd1e1daf3e93f5494f70eeb not found: ID does not exist" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.719212 4558 scope.go:117] "RemoveContainer" containerID="1a6a184067bde115ef5e09fdd90e7dccfe89a9dc347af450998b91c068e7e803" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.719391 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1a6a184067bde115ef5e09fdd90e7dccfe89a9dc347af450998b91c068e7e803"} err="failed to get container status \"1a6a184067bde115ef5e09fdd90e7dccfe89a9dc347af450998b91c068e7e803\": rpc error: code = NotFound desc = could not find container \"1a6a184067bde115ef5e09fdd90e7dccfe89a9dc347af450998b91c068e7e803\": container with ID starting with 1a6a184067bde115ef5e09fdd90e7dccfe89a9dc347af450998b91c068e7e803 not found: ID does not exist" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.719407 4558 scope.go:117] "RemoveContainer" containerID="e4265d3281e954ee5989ee008da28c6cb9fa29478e5fdf23325521d455304da9" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.719642 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e4265d3281e954ee5989ee008da28c6cb9fa29478e5fdf23325521d455304da9"} err="failed to get container status \"e4265d3281e954ee5989ee008da28c6cb9fa29478e5fdf23325521d455304da9\": rpc error: code = NotFound desc = could not find container \"e4265d3281e954ee5989ee008da28c6cb9fa29478e5fdf23325521d455304da9\": container with ID starting with e4265d3281e954ee5989ee008da28c6cb9fa29478e5fdf23325521d455304da9 not found: ID does not exist" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.719670 4558 scope.go:117] "RemoveContainer" containerID="186a1d2caad8865abf7f565405c4a4c077038ccb67ba77927cc2392ec075a811" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.720688 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"186a1d2caad8865abf7f565405c4a4c077038ccb67ba77927cc2392ec075a811"} err="failed to get container status \"186a1d2caad8865abf7f565405c4a4c077038ccb67ba77927cc2392ec075a811\": rpc error: code = NotFound desc = could not find container \"186a1d2caad8865abf7f565405c4a4c077038ccb67ba77927cc2392ec075a811\": container with ID starting with 186a1d2caad8865abf7f565405c4a4c077038ccb67ba77927cc2392ec075a811 not found: ID does not exist" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.720726 4558 scope.go:117] "RemoveContainer" containerID="aaa12ad387fc5d0780f6602a74013bb70c118f35c3a16eb4d136df7b0c48db9e" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.721065 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aaa12ad387fc5d0780f6602a74013bb70c118f35c3a16eb4d136df7b0c48db9e"} err="failed to get container status \"aaa12ad387fc5d0780f6602a74013bb70c118f35c3a16eb4d136df7b0c48db9e\": rpc error: code = NotFound desc = could not find container \"aaa12ad387fc5d0780f6602a74013bb70c118f35c3a16eb4d136df7b0c48db9e\": container with ID starting with aaa12ad387fc5d0780f6602a74013bb70c118f35c3a16eb4d136df7b0c48db9e not found: ID does not exist" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.721083 4558 scope.go:117] "RemoveContainer" containerID="8d6af51a18940c05083d10a25b2c636978ce51ac88bbfc6ed169b6669fb846a4" Jan 20 16:50:54 crc kubenswrapper[4558]: I0120 16:50:54.721336 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8d6af51a18940c05083d10a25b2c636978ce51ac88bbfc6ed169b6669fb846a4"} err="failed to get container status \"8d6af51a18940c05083d10a25b2c636978ce51ac88bbfc6ed169b6669fb846a4\": rpc error: code = NotFound desc = could not find container \"8d6af51a18940c05083d10a25b2c636978ce51ac88bbfc6ed169b6669fb846a4\": container with ID starting with 8d6af51a18940c05083d10a25b2c636978ce51ac88bbfc6ed169b6669fb846a4 not found: ID does not exist" Jan 20 16:50:55 crc kubenswrapper[4558]: I0120 16:50:55.589042 4558 generic.go:334] "Generic (PLEG): container finished" podID="7267c3cc-17ca-4c43-80b8-78cc276b16f0" containerID="c8d675b409f18c458bfe50b0328bdb9e083c1c5cdc778298c46a7a83a1a34def" exitCode=0 Jan 20 16:50:55 crc kubenswrapper[4558]: I0120 16:50:55.589124 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-sn4p9" event={"ID":"7267c3cc-17ca-4c43-80b8-78cc276b16f0","Type":"ContainerDied","Data":"c8d675b409f18c458bfe50b0328bdb9e083c1c5cdc778298c46a7a83a1a34def"} Jan 20 16:50:55 crc kubenswrapper[4558]: I0120 16:50:55.589600 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-sn4p9" event={"ID":"7267c3cc-17ca-4c43-80b8-78cc276b16f0","Type":"ContainerStarted","Data":"a172915a098a4c047d211eeaf2564d99e84c3c99a0225a87869d8fe7be69042b"} Jan 20 16:50:56 crc kubenswrapper[4558]: I0120 16:50:56.570875 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="254129cd-82fc-4162-b671-2434bc9e2972" path="/var/lib/kubelet/pods/254129cd-82fc-4162-b671-2434bc9e2972/volumes" Jan 20 16:50:56 crc kubenswrapper[4558]: I0120 16:50:56.599525 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-sn4p9" event={"ID":"7267c3cc-17ca-4c43-80b8-78cc276b16f0","Type":"ContainerStarted","Data":"c547ed9213388d58b7e3a2e33492f519f21439edb033dcb4bdb1a8d670335c7c"} Jan 20 16:50:56 crc kubenswrapper[4558]: I0120 16:50:56.599572 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-sn4p9" event={"ID":"7267c3cc-17ca-4c43-80b8-78cc276b16f0","Type":"ContainerStarted","Data":"43f3178f04b65f53ba349221c443ae0ebe0f0dde15eb2a535a8fe236c5077062"} Jan 20 16:50:56 crc kubenswrapper[4558]: I0120 16:50:56.599583 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-sn4p9" event={"ID":"7267c3cc-17ca-4c43-80b8-78cc276b16f0","Type":"ContainerStarted","Data":"44aeb128dad69d28b7ec9763b5d0d08a7205b4d55e022798b0ebd8584e6dd8d3"} Jan 20 16:50:56 crc kubenswrapper[4558]: I0120 16:50:56.599593 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-sn4p9" event={"ID":"7267c3cc-17ca-4c43-80b8-78cc276b16f0","Type":"ContainerStarted","Data":"8fd2900abda97a10ad6eff50227200d1cd46d221b79fbd0570962efaf5facfdf"} Jan 20 16:50:56 crc kubenswrapper[4558]: I0120 16:50:56.599601 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-sn4p9" event={"ID":"7267c3cc-17ca-4c43-80b8-78cc276b16f0","Type":"ContainerStarted","Data":"a913df4c468a17eafec36fd280c3854a62d796927d844772be1263d27520fb0c"} Jan 20 16:50:56 crc kubenswrapper[4558]: I0120 16:50:56.599610 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-sn4p9" event={"ID":"7267c3cc-17ca-4c43-80b8-78cc276b16f0","Type":"ContainerStarted","Data":"e372175f165f001b906cd3c19082e2023a901aa4328c990a8833512779ceeefc"} Jan 20 16:50:57 crc kubenswrapper[4558]: I0120 16:50:57.329610 4558 patch_prober.go:28] interesting pod/machine-config-daemon-2vr4r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 20 16:50:57 crc kubenswrapper[4558]: I0120 16:50:57.329717 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 20 16:50:58 crc kubenswrapper[4558]: I0120 16:50:58.610388 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-sn4p9" event={"ID":"7267c3cc-17ca-4c43-80b8-78cc276b16f0","Type":"ContainerStarted","Data":"7bd80e401e8ed0fc7cb6828b3770751f52062972bdaff905bf2228998a8970ee"} Jan 20 16:51:00 crc kubenswrapper[4558]: I0120 16:51:00.619804 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-sn4p9" event={"ID":"7267c3cc-17ca-4c43-80b8-78cc276b16f0","Type":"ContainerStarted","Data":"3ecdffde35dc71fbda19a6b6826c753232745ad6ac99b081e4ccc85158b40003"} Jan 20 16:51:00 crc kubenswrapper[4558]: I0120 16:51:00.620185 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-sn4p9" Jan 20 16:51:00 crc kubenswrapper[4558]: I0120 16:51:00.620202 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-sn4p9" Jan 20 16:51:00 crc kubenswrapper[4558]: I0120 16:51:00.620211 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-sn4p9" Jan 20 16:51:00 crc kubenswrapper[4558]: I0120 16:51:00.640188 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-sn4p9" Jan 20 16:51:00 crc kubenswrapper[4558]: I0120 16:51:00.640940 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-sn4p9" Jan 20 16:51:00 crc kubenswrapper[4558]: I0120 16:51:00.645184 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-sn4p9" podStartSLOduration=6.645151689 podStartE2EDuration="6.645151689s" podCreationTimestamp="2026-01-20 16:50:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 16:51:00.645107396 +0000 UTC m=+554.405445363" watchObservedRunningTime="2026-01-20 16:51:00.645151689 +0000 UTC m=+554.405489655" Jan 20 16:51:05 crc kubenswrapper[4558]: I0120 16:51:05.566230 4558 scope.go:117] "RemoveContainer" containerID="6ff161b5eb2dde97ccaf09d4e6df49b859981accd8ab7643d47f6900019c3dae" Jan 20 16:51:05 crc kubenswrapper[4558]: E0120 16:51:05.566797 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-jsqvf_openshift-multus(bedf08c7-1f93-4931-a7f3-e729e2a137af)\"" pod="openshift-multus/multus-jsqvf" podUID="bedf08c7-1f93-4931-a7f3-e729e2a137af" Jan 20 16:51:17 crc kubenswrapper[4558]: I0120 16:51:17.566052 4558 scope.go:117] "RemoveContainer" containerID="6ff161b5eb2dde97ccaf09d4e6df49b859981accd8ab7643d47f6900019c3dae" Jan 20 16:51:17 crc kubenswrapper[4558]: I0120 16:51:17.682490 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-jsqvf_bedf08c7-1f93-4931-a7f3-e729e2a137af/kube-multus/2.log" Jan 20 16:51:17 crc kubenswrapper[4558]: I0120 16:51:17.682888 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-jsqvf_bedf08c7-1f93-4931-a7f3-e729e2a137af/kube-multus/1.log" Jan 20 16:51:17 crc kubenswrapper[4558]: I0120 16:51:17.682930 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-jsqvf" event={"ID":"bedf08c7-1f93-4931-a7f3-e729e2a137af","Type":"ContainerStarted","Data":"d1349e43ce6c2b46cc4468c1288d4c33bc8b9d5fd8b8e8369850589d7f3c41d6"} Jan 20 16:51:23 crc kubenswrapper[4558]: I0120 16:51:23.188448 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713dqrqd"] Jan 20 16:51:23 crc kubenswrapper[4558]: I0120 16:51:23.189633 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713dqrqd" Jan 20 16:51:23 crc kubenswrapper[4558]: I0120 16:51:23.191482 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Jan 20 16:51:23 crc kubenswrapper[4558]: I0120 16:51:23.197656 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713dqrqd"] Jan 20 16:51:23 crc kubenswrapper[4558]: I0120 16:51:23.291332 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kxll2\" (UniqueName: \"kubernetes.io/projected/3d57036a-69b1-4789-87cf-8d1cfe930ba4-kube-api-access-kxll2\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713dqrqd\" (UID: \"3d57036a-69b1-4789-87cf-8d1cfe930ba4\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713dqrqd" Jan 20 16:51:23 crc kubenswrapper[4558]: I0120 16:51:23.291434 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/3d57036a-69b1-4789-87cf-8d1cfe930ba4-bundle\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713dqrqd\" (UID: \"3d57036a-69b1-4789-87cf-8d1cfe930ba4\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713dqrqd" Jan 20 16:51:23 crc kubenswrapper[4558]: I0120 16:51:23.291519 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/3d57036a-69b1-4789-87cf-8d1cfe930ba4-util\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713dqrqd\" (UID: \"3d57036a-69b1-4789-87cf-8d1cfe930ba4\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713dqrqd" Jan 20 16:51:23 crc kubenswrapper[4558]: I0120 16:51:23.392967 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/3d57036a-69b1-4789-87cf-8d1cfe930ba4-util\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713dqrqd\" (UID: \"3d57036a-69b1-4789-87cf-8d1cfe930ba4\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713dqrqd" Jan 20 16:51:23 crc kubenswrapper[4558]: I0120 16:51:23.393090 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kxll2\" (UniqueName: \"kubernetes.io/projected/3d57036a-69b1-4789-87cf-8d1cfe930ba4-kube-api-access-kxll2\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713dqrqd\" (UID: \"3d57036a-69b1-4789-87cf-8d1cfe930ba4\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713dqrqd" Jan 20 16:51:23 crc kubenswrapper[4558]: I0120 16:51:23.393118 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/3d57036a-69b1-4789-87cf-8d1cfe930ba4-bundle\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713dqrqd\" (UID: \"3d57036a-69b1-4789-87cf-8d1cfe930ba4\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713dqrqd" Jan 20 16:51:23 crc kubenswrapper[4558]: I0120 16:51:23.393581 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/3d57036a-69b1-4789-87cf-8d1cfe930ba4-bundle\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713dqrqd\" (UID: \"3d57036a-69b1-4789-87cf-8d1cfe930ba4\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713dqrqd" Jan 20 16:51:23 crc kubenswrapper[4558]: I0120 16:51:23.393597 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/3d57036a-69b1-4789-87cf-8d1cfe930ba4-util\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713dqrqd\" (UID: \"3d57036a-69b1-4789-87cf-8d1cfe930ba4\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713dqrqd" Jan 20 16:51:23 crc kubenswrapper[4558]: I0120 16:51:23.411874 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kxll2\" (UniqueName: \"kubernetes.io/projected/3d57036a-69b1-4789-87cf-8d1cfe930ba4-kube-api-access-kxll2\") pod \"53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713dqrqd\" (UID: \"3d57036a-69b1-4789-87cf-8d1cfe930ba4\") " pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713dqrqd" Jan 20 16:51:23 crc kubenswrapper[4558]: I0120 16:51:23.501474 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713dqrqd" Jan 20 16:51:23 crc kubenswrapper[4558]: I0120 16:51:23.844732 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713dqrqd"] Jan 20 16:51:24 crc kubenswrapper[4558]: I0120 16:51:24.683438 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-sn4p9" Jan 20 16:51:24 crc kubenswrapper[4558]: I0120 16:51:24.710624 4558 generic.go:334] "Generic (PLEG): container finished" podID="3d57036a-69b1-4789-87cf-8d1cfe930ba4" containerID="28d0b08f2259bd32b892d79cde7178d047e8252b0fa83bee727a4cfad2e799f0" exitCode=0 Jan 20 16:51:24 crc kubenswrapper[4558]: I0120 16:51:24.710654 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713dqrqd" event={"ID":"3d57036a-69b1-4789-87cf-8d1cfe930ba4","Type":"ContainerDied","Data":"28d0b08f2259bd32b892d79cde7178d047e8252b0fa83bee727a4cfad2e799f0"} Jan 20 16:51:24 crc kubenswrapper[4558]: I0120 16:51:24.710675 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713dqrqd" event={"ID":"3d57036a-69b1-4789-87cf-8d1cfe930ba4","Type":"ContainerStarted","Data":"2a572abdffe11bf3e97d45024a5dd4793a38d3b8f4708744d8f35929a2f5aa43"} Jan 20 16:51:24 crc kubenswrapper[4558]: I0120 16:51:24.711923 4558 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 20 16:51:26 crc kubenswrapper[4558]: I0120 16:51:26.719930 4558 generic.go:334] "Generic (PLEG): container finished" podID="3d57036a-69b1-4789-87cf-8d1cfe930ba4" containerID="00db097f2ddfda6465d9abab59d5175779ddae2204ad5787e6d692f4d4c41008" exitCode=0 Jan 20 16:51:26 crc kubenswrapper[4558]: I0120 16:51:26.720033 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713dqrqd" event={"ID":"3d57036a-69b1-4789-87cf-8d1cfe930ba4","Type":"ContainerDied","Data":"00db097f2ddfda6465d9abab59d5175779ddae2204ad5787e6d692f4d4c41008"} Jan 20 16:51:27 crc kubenswrapper[4558]: I0120 16:51:27.329943 4558 patch_prober.go:28] interesting pod/machine-config-daemon-2vr4r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 20 16:51:27 crc kubenswrapper[4558]: I0120 16:51:27.329991 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 20 16:51:27 crc kubenswrapper[4558]: I0120 16:51:27.330023 4558 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" Jan 20 16:51:27 crc kubenswrapper[4558]: I0120 16:51:27.330497 4558 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"75faba5e9c60d39b0965f7842c4fcbaf51da890c19853fb9a294a0e3410f3d20"} pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 20 16:51:27 crc kubenswrapper[4558]: I0120 16:51:27.330550 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" containerID="cri-o://75faba5e9c60d39b0965f7842c4fcbaf51da890c19853fb9a294a0e3410f3d20" gracePeriod=600 Jan 20 16:51:27 crc kubenswrapper[4558]: I0120 16:51:27.726665 4558 generic.go:334] "Generic (PLEG): container finished" podID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerID="75faba5e9c60d39b0965f7842c4fcbaf51da890c19853fb9a294a0e3410f3d20" exitCode=0 Jan 20 16:51:27 crc kubenswrapper[4558]: I0120 16:51:27.726738 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" event={"ID":"68337d27-3fa6-4a29-88b0-82e60c3739eb","Type":"ContainerDied","Data":"75faba5e9c60d39b0965f7842c4fcbaf51da890c19853fb9a294a0e3410f3d20"} Jan 20 16:51:27 crc kubenswrapper[4558]: I0120 16:51:27.727440 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" event={"ID":"68337d27-3fa6-4a29-88b0-82e60c3739eb","Type":"ContainerStarted","Data":"5429c282dce69a1ca0312758655bba8954487a4d9cc36ecc68f43f7950925034"} Jan 20 16:51:27 crc kubenswrapper[4558]: I0120 16:51:27.727507 4558 scope.go:117] "RemoveContainer" containerID="919fd3462b46bed62b26a1ec80dddd868d9b963136777394bcd10ef842cf78a9" Jan 20 16:51:27 crc kubenswrapper[4558]: I0120 16:51:27.730081 4558 generic.go:334] "Generic (PLEG): container finished" podID="3d57036a-69b1-4789-87cf-8d1cfe930ba4" containerID="d2387c4689530bbb58b913d16f3f2f901afea9db8cef210fd9fc8818ac525384" exitCode=0 Jan 20 16:51:27 crc kubenswrapper[4558]: I0120 16:51:27.730110 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713dqrqd" event={"ID":"3d57036a-69b1-4789-87cf-8d1cfe930ba4","Type":"ContainerDied","Data":"d2387c4689530bbb58b913d16f3f2f901afea9db8cef210fd9fc8818ac525384"} Jan 20 16:51:28 crc kubenswrapper[4558]: I0120 16:51:28.887852 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713dqrqd" Jan 20 16:51:29 crc kubenswrapper[4558]: I0120 16:51:29.061270 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/3d57036a-69b1-4789-87cf-8d1cfe930ba4-bundle\") pod \"3d57036a-69b1-4789-87cf-8d1cfe930ba4\" (UID: \"3d57036a-69b1-4789-87cf-8d1cfe930ba4\") " Jan 20 16:51:29 crc kubenswrapper[4558]: I0120 16:51:29.061388 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kxll2\" (UniqueName: \"kubernetes.io/projected/3d57036a-69b1-4789-87cf-8d1cfe930ba4-kube-api-access-kxll2\") pod \"3d57036a-69b1-4789-87cf-8d1cfe930ba4\" (UID: \"3d57036a-69b1-4789-87cf-8d1cfe930ba4\") " Jan 20 16:51:29 crc kubenswrapper[4558]: I0120 16:51:29.061439 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/3d57036a-69b1-4789-87cf-8d1cfe930ba4-util\") pod \"3d57036a-69b1-4789-87cf-8d1cfe930ba4\" (UID: \"3d57036a-69b1-4789-87cf-8d1cfe930ba4\") " Jan 20 16:51:29 crc kubenswrapper[4558]: I0120 16:51:29.061847 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3d57036a-69b1-4789-87cf-8d1cfe930ba4-bundle" (OuterVolumeSpecName: "bundle") pod "3d57036a-69b1-4789-87cf-8d1cfe930ba4" (UID: "3d57036a-69b1-4789-87cf-8d1cfe930ba4"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 16:51:29 crc kubenswrapper[4558]: I0120 16:51:29.065723 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3d57036a-69b1-4789-87cf-8d1cfe930ba4-kube-api-access-kxll2" (OuterVolumeSpecName: "kube-api-access-kxll2") pod "3d57036a-69b1-4789-87cf-8d1cfe930ba4" (UID: "3d57036a-69b1-4789-87cf-8d1cfe930ba4"). InnerVolumeSpecName "kube-api-access-kxll2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:51:29 crc kubenswrapper[4558]: I0120 16:51:29.071152 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3d57036a-69b1-4789-87cf-8d1cfe930ba4-util" (OuterVolumeSpecName: "util") pod "3d57036a-69b1-4789-87cf-8d1cfe930ba4" (UID: "3d57036a-69b1-4789-87cf-8d1cfe930ba4"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 16:51:29 crc kubenswrapper[4558]: I0120 16:51:29.162560 4558 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/3d57036a-69b1-4789-87cf-8d1cfe930ba4-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 16:51:29 crc kubenswrapper[4558]: I0120 16:51:29.162587 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kxll2\" (UniqueName: \"kubernetes.io/projected/3d57036a-69b1-4789-87cf-8d1cfe930ba4-kube-api-access-kxll2\") on node \"crc\" DevicePath \"\"" Jan 20 16:51:29 crc kubenswrapper[4558]: I0120 16:51:29.162597 4558 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/3d57036a-69b1-4789-87cf-8d1cfe930ba4-util\") on node \"crc\" DevicePath \"\"" Jan 20 16:51:29 crc kubenswrapper[4558]: I0120 16:51:29.741882 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713dqrqd" event={"ID":"3d57036a-69b1-4789-87cf-8d1cfe930ba4","Type":"ContainerDied","Data":"2a572abdffe11bf3e97d45024a5dd4793a38d3b8f4708744d8f35929a2f5aa43"} Jan 20 16:51:29 crc kubenswrapper[4558]: I0120 16:51:29.741923 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2a572abdffe11bf3e97d45024a5dd4793a38d3b8f4708744d8f35929a2f5aa43" Jan 20 16:51:29 crc kubenswrapper[4558]: I0120 16:51:29.742124 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713dqrqd" Jan 20 16:51:32 crc kubenswrapper[4558]: I0120 16:51:32.017130 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-operator-646758c888-r2zgd"] Jan 20 16:51:32 crc kubenswrapper[4558]: E0120 16:51:32.017507 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3d57036a-69b1-4789-87cf-8d1cfe930ba4" containerName="pull" Jan 20 16:51:32 crc kubenswrapper[4558]: I0120 16:51:32.017519 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="3d57036a-69b1-4789-87cf-8d1cfe930ba4" containerName="pull" Jan 20 16:51:32 crc kubenswrapper[4558]: E0120 16:51:32.017532 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3d57036a-69b1-4789-87cf-8d1cfe930ba4" containerName="extract" Jan 20 16:51:32 crc kubenswrapper[4558]: I0120 16:51:32.017537 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="3d57036a-69b1-4789-87cf-8d1cfe930ba4" containerName="extract" Jan 20 16:51:32 crc kubenswrapper[4558]: E0120 16:51:32.017547 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3d57036a-69b1-4789-87cf-8d1cfe930ba4" containerName="util" Jan 20 16:51:32 crc kubenswrapper[4558]: I0120 16:51:32.017552 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="3d57036a-69b1-4789-87cf-8d1cfe930ba4" containerName="util" Jan 20 16:51:32 crc kubenswrapper[4558]: I0120 16:51:32.017645 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="3d57036a-69b1-4789-87cf-8d1cfe930ba4" containerName="extract" Jan 20 16:51:32 crc kubenswrapper[4558]: I0120 16:51:32.017954 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-646758c888-r2zgd" Jan 20 16:51:32 crc kubenswrapper[4558]: I0120 16:51:32.019453 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-operator-dockercfg-6tlcn" Jan 20 16:51:32 crc kubenswrapper[4558]: I0120 16:51:32.019462 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"openshift-service-ca.crt" Jan 20 16:51:32 crc kubenswrapper[4558]: I0120 16:51:32.019513 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"kube-root-ca.crt" Jan 20 16:51:32 crc kubenswrapper[4558]: I0120 16:51:32.029409 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-646758c888-r2zgd"] Jan 20 16:51:32 crc kubenswrapper[4558]: I0120 16:51:32.193455 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8mrmk\" (UniqueName: \"kubernetes.io/projected/b1c60395-86ef-4c6c-8432-b2bc357aac2d-kube-api-access-8mrmk\") pod \"nmstate-operator-646758c888-r2zgd\" (UID: \"b1c60395-86ef-4c6c-8432-b2bc357aac2d\") " pod="openshift-nmstate/nmstate-operator-646758c888-r2zgd" Jan 20 16:51:32 crc kubenswrapper[4558]: I0120 16:51:32.294229 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8mrmk\" (UniqueName: \"kubernetes.io/projected/b1c60395-86ef-4c6c-8432-b2bc357aac2d-kube-api-access-8mrmk\") pod \"nmstate-operator-646758c888-r2zgd\" (UID: \"b1c60395-86ef-4c6c-8432-b2bc357aac2d\") " pod="openshift-nmstate/nmstate-operator-646758c888-r2zgd" Jan 20 16:51:32 crc kubenswrapper[4558]: I0120 16:51:32.309610 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8mrmk\" (UniqueName: \"kubernetes.io/projected/b1c60395-86ef-4c6c-8432-b2bc357aac2d-kube-api-access-8mrmk\") pod \"nmstate-operator-646758c888-r2zgd\" (UID: \"b1c60395-86ef-4c6c-8432-b2bc357aac2d\") " pod="openshift-nmstate/nmstate-operator-646758c888-r2zgd" Jan 20 16:51:32 crc kubenswrapper[4558]: I0120 16:51:32.331371 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-646758c888-r2zgd" Jan 20 16:51:32 crc kubenswrapper[4558]: I0120 16:51:32.679780 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-646758c888-r2zgd"] Jan 20 16:51:32 crc kubenswrapper[4558]: I0120 16:51:32.754046 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-646758c888-r2zgd" event={"ID":"b1c60395-86ef-4c6c-8432-b2bc357aac2d","Type":"ContainerStarted","Data":"328d6dd135c398b008862842b87646e7c09353391176075fff124ac73157f76f"} Jan 20 16:51:34 crc kubenswrapper[4558]: I0120 16:51:34.762773 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-646758c888-r2zgd" event={"ID":"b1c60395-86ef-4c6c-8432-b2bc357aac2d","Type":"ContainerStarted","Data":"bd186e0d45d5f70f7108c68750f309ec60c33464048ac3916ba3a110931e2ab8"} Jan 20 16:51:34 crc kubenswrapper[4558]: I0120 16:51:34.775416 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-operator-646758c888-r2zgd" podStartSLOduration=0.918366053 podStartE2EDuration="2.775388313s" podCreationTimestamp="2026-01-20 16:51:32 +0000 UTC" firstStartedPulling="2026-01-20 16:51:32.68604564 +0000 UTC m=+586.446383608" lastFinishedPulling="2026-01-20 16:51:34.5430679 +0000 UTC m=+588.303405868" observedRunningTime="2026-01-20 16:51:34.775336807 +0000 UTC m=+588.535674774" watchObservedRunningTime="2026-01-20 16:51:34.775388313 +0000 UTC m=+588.535726280" Jan 20 16:51:35 crc kubenswrapper[4558]: I0120 16:51:35.560563 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-metrics-54757c584b-l6chb"] Jan 20 16:51:35 crc kubenswrapper[4558]: I0120 16:51:35.561710 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-54757c584b-l6chb" Jan 20 16:51:35 crc kubenswrapper[4558]: I0120 16:51:35.563772 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-handler-dockercfg-h7t5s" Jan 20 16:51:35 crc kubenswrapper[4558]: I0120 16:51:35.577466 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-webhook-8474b5b9d8-t4zzd"] Jan 20 16:51:35 crc kubenswrapper[4558]: I0120 16:51:35.578070 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-t4zzd" Jan 20 16:51:35 crc kubenswrapper[4558]: I0120 16:51:35.579365 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"openshift-nmstate-webhook" Jan 20 16:51:35 crc kubenswrapper[4558]: I0120 16:51:35.580862 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-54757c584b-l6chb"] Jan 20 16:51:35 crc kubenswrapper[4558]: I0120 16:51:35.592130 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-handler-btt28"] Jan 20 16:51:35 crc kubenswrapper[4558]: I0120 16:51:35.592792 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-btt28" Jan 20 16:51:35 crc kubenswrapper[4558]: I0120 16:51:35.627492 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wd2cq\" (UniqueName: \"kubernetes.io/projected/ecd5abb7-faaf-42b3-8698-71ea80253e9d-kube-api-access-wd2cq\") pod \"nmstate-webhook-8474b5b9d8-t4zzd\" (UID: \"ecd5abb7-faaf-42b3-8698-71ea80253e9d\") " pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-t4zzd" Jan 20 16:51:35 crc kubenswrapper[4558]: I0120 16:51:35.627543 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/f54cd5f9-30bf-494b-8528-4f25f8fa1521-ovs-socket\") pod \"nmstate-handler-btt28\" (UID: \"f54cd5f9-30bf-494b-8528-4f25f8fa1521\") " pod="openshift-nmstate/nmstate-handler-btt28" Jan 20 16:51:35 crc kubenswrapper[4558]: I0120 16:51:35.627572 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wnkxh\" (UniqueName: \"kubernetes.io/projected/1b599dc7-9188-4dbc-ad4a-32db989cb635-kube-api-access-wnkxh\") pod \"nmstate-metrics-54757c584b-l6chb\" (UID: \"1b599dc7-9188-4dbc-ad4a-32db989cb635\") " pod="openshift-nmstate/nmstate-metrics-54757c584b-l6chb" Jan 20 16:51:35 crc kubenswrapper[4558]: I0120 16:51:35.627698 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/ecd5abb7-faaf-42b3-8698-71ea80253e9d-tls-key-pair\") pod \"nmstate-webhook-8474b5b9d8-t4zzd\" (UID: \"ecd5abb7-faaf-42b3-8698-71ea80253e9d\") " pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-t4zzd" Jan 20 16:51:35 crc kubenswrapper[4558]: I0120 16:51:35.627756 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/f54cd5f9-30bf-494b-8528-4f25f8fa1521-dbus-socket\") pod \"nmstate-handler-btt28\" (UID: \"f54cd5f9-30bf-494b-8528-4f25f8fa1521\") " pod="openshift-nmstate/nmstate-handler-btt28" Jan 20 16:51:35 crc kubenswrapper[4558]: I0120 16:51:35.627818 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nzmf5\" (UniqueName: \"kubernetes.io/projected/f54cd5f9-30bf-494b-8528-4f25f8fa1521-kube-api-access-nzmf5\") pod \"nmstate-handler-btt28\" (UID: \"f54cd5f9-30bf-494b-8528-4f25f8fa1521\") " pod="openshift-nmstate/nmstate-handler-btt28" Jan 20 16:51:35 crc kubenswrapper[4558]: I0120 16:51:35.627880 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/f54cd5f9-30bf-494b-8528-4f25f8fa1521-nmstate-lock\") pod \"nmstate-handler-btt28\" (UID: \"f54cd5f9-30bf-494b-8528-4f25f8fa1521\") " pod="openshift-nmstate/nmstate-handler-btt28" Jan 20 16:51:35 crc kubenswrapper[4558]: I0120 16:51:35.631059 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-8474b5b9d8-t4zzd"] Jan 20 16:51:35 crc kubenswrapper[4558]: I0120 16:51:35.666006 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7754f76f8b-n5w94"] Jan 20 16:51:35 crc kubenswrapper[4558]: I0120 16:51:35.666802 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-n5w94" Jan 20 16:51:35 crc kubenswrapper[4558]: I0120 16:51:35.668736 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"plugin-serving-cert" Jan 20 16:51:35 crc kubenswrapper[4558]: I0120 16:51:35.668921 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"nginx-conf" Jan 20 16:51:35 crc kubenswrapper[4558]: I0120 16:51:35.671547 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"default-dockercfg-79n4v" Jan 20 16:51:35 crc kubenswrapper[4558]: I0120 16:51:35.671900 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7754f76f8b-n5w94"] Jan 20 16:51:35 crc kubenswrapper[4558]: I0120 16:51:35.729371 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/ecd5abb7-faaf-42b3-8698-71ea80253e9d-tls-key-pair\") pod \"nmstate-webhook-8474b5b9d8-t4zzd\" (UID: \"ecd5abb7-faaf-42b3-8698-71ea80253e9d\") " pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-t4zzd" Jan 20 16:51:35 crc kubenswrapper[4558]: I0120 16:51:35.729421 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/f54cd5f9-30bf-494b-8528-4f25f8fa1521-dbus-socket\") pod \"nmstate-handler-btt28\" (UID: \"f54cd5f9-30bf-494b-8528-4f25f8fa1521\") " pod="openshift-nmstate/nmstate-handler-btt28" Jan 20 16:51:35 crc kubenswrapper[4558]: I0120 16:51:35.729447 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/7abfa901-9433-43f3-8f51-1da05d50f84d-nginx-conf\") pod \"nmstate-console-plugin-7754f76f8b-n5w94\" (UID: \"7abfa901-9433-43f3-8f51-1da05d50f84d\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-n5w94" Jan 20 16:51:35 crc kubenswrapper[4558]: I0120 16:51:35.729470 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pgjrd\" (UniqueName: \"kubernetes.io/projected/7abfa901-9433-43f3-8f51-1da05d50f84d-kube-api-access-pgjrd\") pod \"nmstate-console-plugin-7754f76f8b-n5w94\" (UID: \"7abfa901-9433-43f3-8f51-1da05d50f84d\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-n5w94" Jan 20 16:51:35 crc kubenswrapper[4558]: I0120 16:51:35.729499 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nzmf5\" (UniqueName: \"kubernetes.io/projected/f54cd5f9-30bf-494b-8528-4f25f8fa1521-kube-api-access-nzmf5\") pod \"nmstate-handler-btt28\" (UID: \"f54cd5f9-30bf-494b-8528-4f25f8fa1521\") " pod="openshift-nmstate/nmstate-handler-btt28" Jan 20 16:51:35 crc kubenswrapper[4558]: I0120 16:51:35.729520 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/7abfa901-9433-43f3-8f51-1da05d50f84d-plugin-serving-cert\") pod \"nmstate-console-plugin-7754f76f8b-n5w94\" (UID: \"7abfa901-9433-43f3-8f51-1da05d50f84d\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-n5w94" Jan 20 16:51:35 crc kubenswrapper[4558]: I0120 16:51:35.729548 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/f54cd5f9-30bf-494b-8528-4f25f8fa1521-nmstate-lock\") pod \"nmstate-handler-btt28\" (UID: \"f54cd5f9-30bf-494b-8528-4f25f8fa1521\") " pod="openshift-nmstate/nmstate-handler-btt28" Jan 20 16:51:35 crc kubenswrapper[4558]: I0120 16:51:35.729576 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wd2cq\" (UniqueName: \"kubernetes.io/projected/ecd5abb7-faaf-42b3-8698-71ea80253e9d-kube-api-access-wd2cq\") pod \"nmstate-webhook-8474b5b9d8-t4zzd\" (UID: \"ecd5abb7-faaf-42b3-8698-71ea80253e9d\") " pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-t4zzd" Jan 20 16:51:35 crc kubenswrapper[4558]: I0120 16:51:35.729603 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/f54cd5f9-30bf-494b-8528-4f25f8fa1521-ovs-socket\") pod \"nmstate-handler-btt28\" (UID: \"f54cd5f9-30bf-494b-8528-4f25f8fa1521\") " pod="openshift-nmstate/nmstate-handler-btt28" Jan 20 16:51:35 crc kubenswrapper[4558]: I0120 16:51:35.729620 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wnkxh\" (UniqueName: \"kubernetes.io/projected/1b599dc7-9188-4dbc-ad4a-32db989cb635-kube-api-access-wnkxh\") pod \"nmstate-metrics-54757c584b-l6chb\" (UID: \"1b599dc7-9188-4dbc-ad4a-32db989cb635\") " pod="openshift-nmstate/nmstate-metrics-54757c584b-l6chb" Jan 20 16:51:35 crc kubenswrapper[4558]: I0120 16:51:35.729952 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/f54cd5f9-30bf-494b-8528-4f25f8fa1521-nmstate-lock\") pod \"nmstate-handler-btt28\" (UID: \"f54cd5f9-30bf-494b-8528-4f25f8fa1521\") " pod="openshift-nmstate/nmstate-handler-btt28" Jan 20 16:51:35 crc kubenswrapper[4558]: I0120 16:51:35.730060 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/f54cd5f9-30bf-494b-8528-4f25f8fa1521-ovs-socket\") pod \"nmstate-handler-btt28\" (UID: \"f54cd5f9-30bf-494b-8528-4f25f8fa1521\") " pod="openshift-nmstate/nmstate-handler-btt28" Jan 20 16:51:35 crc kubenswrapper[4558]: I0120 16:51:35.730296 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/f54cd5f9-30bf-494b-8528-4f25f8fa1521-dbus-socket\") pod \"nmstate-handler-btt28\" (UID: \"f54cd5f9-30bf-494b-8528-4f25f8fa1521\") " pod="openshift-nmstate/nmstate-handler-btt28" Jan 20 16:51:35 crc kubenswrapper[4558]: I0120 16:51:35.734775 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/ecd5abb7-faaf-42b3-8698-71ea80253e9d-tls-key-pair\") pod \"nmstate-webhook-8474b5b9d8-t4zzd\" (UID: \"ecd5abb7-faaf-42b3-8698-71ea80253e9d\") " pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-t4zzd" Jan 20 16:51:35 crc kubenswrapper[4558]: I0120 16:51:35.744144 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wnkxh\" (UniqueName: \"kubernetes.io/projected/1b599dc7-9188-4dbc-ad4a-32db989cb635-kube-api-access-wnkxh\") pod \"nmstate-metrics-54757c584b-l6chb\" (UID: \"1b599dc7-9188-4dbc-ad4a-32db989cb635\") " pod="openshift-nmstate/nmstate-metrics-54757c584b-l6chb" Jan 20 16:51:35 crc kubenswrapper[4558]: I0120 16:51:35.744277 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wd2cq\" (UniqueName: \"kubernetes.io/projected/ecd5abb7-faaf-42b3-8698-71ea80253e9d-kube-api-access-wd2cq\") pod \"nmstate-webhook-8474b5b9d8-t4zzd\" (UID: \"ecd5abb7-faaf-42b3-8698-71ea80253e9d\") " pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-t4zzd" Jan 20 16:51:35 crc kubenswrapper[4558]: I0120 16:51:35.754990 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nzmf5\" (UniqueName: \"kubernetes.io/projected/f54cd5f9-30bf-494b-8528-4f25f8fa1521-kube-api-access-nzmf5\") pod \"nmstate-handler-btt28\" (UID: \"f54cd5f9-30bf-494b-8528-4f25f8fa1521\") " pod="openshift-nmstate/nmstate-handler-btt28" Jan 20 16:51:35 crc kubenswrapper[4558]: I0120 16:51:35.830309 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/7abfa901-9433-43f3-8f51-1da05d50f84d-nginx-conf\") pod \"nmstate-console-plugin-7754f76f8b-n5w94\" (UID: \"7abfa901-9433-43f3-8f51-1da05d50f84d\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-n5w94" Jan 20 16:51:35 crc kubenswrapper[4558]: I0120 16:51:35.831227 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pgjrd\" (UniqueName: \"kubernetes.io/projected/7abfa901-9433-43f3-8f51-1da05d50f84d-kube-api-access-pgjrd\") pod \"nmstate-console-plugin-7754f76f8b-n5w94\" (UID: \"7abfa901-9433-43f3-8f51-1da05d50f84d\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-n5w94" Jan 20 16:51:35 crc kubenswrapper[4558]: I0120 16:51:35.831618 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/7abfa901-9433-43f3-8f51-1da05d50f84d-plugin-serving-cert\") pod \"nmstate-console-plugin-7754f76f8b-n5w94\" (UID: \"7abfa901-9433-43f3-8f51-1da05d50f84d\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-n5w94" Jan 20 16:51:35 crc kubenswrapper[4558]: I0120 16:51:35.831053 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/7abfa901-9433-43f3-8f51-1da05d50f84d-nginx-conf\") pod \"nmstate-console-plugin-7754f76f8b-n5w94\" (UID: \"7abfa901-9433-43f3-8f51-1da05d50f84d\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-n5w94" Jan 20 16:51:35 crc kubenswrapper[4558]: I0120 16:51:35.834926 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/7abfa901-9433-43f3-8f51-1da05d50f84d-plugin-serving-cert\") pod \"nmstate-console-plugin-7754f76f8b-n5w94\" (UID: \"7abfa901-9433-43f3-8f51-1da05d50f84d\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-n5w94" Jan 20 16:51:35 crc kubenswrapper[4558]: I0120 16:51:35.838465 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9964b78f-sd6td"] Jan 20 16:51:35 crc kubenswrapper[4558]: I0120 16:51:35.839016 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9964b78f-sd6td" Jan 20 16:51:35 crc kubenswrapper[4558]: I0120 16:51:35.852460 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9964b78f-sd6td"] Jan 20 16:51:35 crc kubenswrapper[4558]: I0120 16:51:35.862535 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pgjrd\" (UniqueName: \"kubernetes.io/projected/7abfa901-9433-43f3-8f51-1da05d50f84d-kube-api-access-pgjrd\") pod \"nmstate-console-plugin-7754f76f8b-n5w94\" (UID: \"7abfa901-9433-43f3-8f51-1da05d50f84d\") " pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-n5w94" Jan 20 16:51:35 crc kubenswrapper[4558]: I0120 16:51:35.876944 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-54757c584b-l6chb" Jan 20 16:51:35 crc kubenswrapper[4558]: I0120 16:51:35.891292 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-t4zzd" Jan 20 16:51:35 crc kubenswrapper[4558]: I0120 16:51:35.904761 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-btt28" Jan 20 16:51:35 crc kubenswrapper[4558]: W0120 16:51:35.922834 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf54cd5f9_30bf_494b_8528_4f25f8fa1521.slice/crio-a60865786aa78f9f25a7c86cf33ef36ab2caebf54810015988be67f318072e20 WatchSource:0}: Error finding container a60865786aa78f9f25a7c86cf33ef36ab2caebf54810015988be67f318072e20: Status 404 returned error can't find the container with id a60865786aa78f9f25a7c86cf33ef36ab2caebf54810015988be67f318072e20 Jan 20 16:51:35 crc kubenswrapper[4558]: I0120 16:51:35.934732 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/6b72246a-ef74-4d39-99b7-068cebe1c279-console-config\") pod \"console-f9964b78f-sd6td\" (UID: \"6b72246a-ef74-4d39-99b7-068cebe1c279\") " pod="openshift-console/console-f9964b78f-sd6td" Jan 20 16:51:35 crc kubenswrapper[4558]: I0120 16:51:35.935003 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/6b72246a-ef74-4d39-99b7-068cebe1c279-oauth-serving-cert\") pod \"console-f9964b78f-sd6td\" (UID: \"6b72246a-ef74-4d39-99b7-068cebe1c279\") " pod="openshift-console/console-f9964b78f-sd6td" Jan 20 16:51:35 crc kubenswrapper[4558]: I0120 16:51:35.935030 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/6b72246a-ef74-4d39-99b7-068cebe1c279-console-oauth-config\") pod \"console-f9964b78f-sd6td\" (UID: \"6b72246a-ef74-4d39-99b7-068cebe1c279\") " pod="openshift-console/console-f9964b78f-sd6td" Jan 20 16:51:35 crc kubenswrapper[4558]: I0120 16:51:35.935065 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/6b72246a-ef74-4d39-99b7-068cebe1c279-service-ca\") pod \"console-f9964b78f-sd6td\" (UID: \"6b72246a-ef74-4d39-99b7-068cebe1c279\") " pod="openshift-console/console-f9964b78f-sd6td" Jan 20 16:51:35 crc kubenswrapper[4558]: I0120 16:51:35.935087 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6b72246a-ef74-4d39-99b7-068cebe1c279-trusted-ca-bundle\") pod \"console-f9964b78f-sd6td\" (UID: \"6b72246a-ef74-4d39-99b7-068cebe1c279\") " pod="openshift-console/console-f9964b78f-sd6td" Jan 20 16:51:35 crc kubenswrapper[4558]: I0120 16:51:35.935156 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/6b72246a-ef74-4d39-99b7-068cebe1c279-console-serving-cert\") pod \"console-f9964b78f-sd6td\" (UID: \"6b72246a-ef74-4d39-99b7-068cebe1c279\") " pod="openshift-console/console-f9964b78f-sd6td" Jan 20 16:51:35 crc kubenswrapper[4558]: I0120 16:51:35.935199 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mgzjm\" (UniqueName: \"kubernetes.io/projected/6b72246a-ef74-4d39-99b7-068cebe1c279-kube-api-access-mgzjm\") pod \"console-f9964b78f-sd6td\" (UID: \"6b72246a-ef74-4d39-99b7-068cebe1c279\") " pod="openshift-console/console-f9964b78f-sd6td" Jan 20 16:51:35 crc kubenswrapper[4558]: I0120 16:51:35.978597 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-n5w94" Jan 20 16:51:36 crc kubenswrapper[4558]: I0120 16:51:36.036662 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/6b72246a-ef74-4d39-99b7-068cebe1c279-service-ca\") pod \"console-f9964b78f-sd6td\" (UID: \"6b72246a-ef74-4d39-99b7-068cebe1c279\") " pod="openshift-console/console-f9964b78f-sd6td" Jan 20 16:51:36 crc kubenswrapper[4558]: I0120 16:51:36.036706 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6b72246a-ef74-4d39-99b7-068cebe1c279-trusted-ca-bundle\") pod \"console-f9964b78f-sd6td\" (UID: \"6b72246a-ef74-4d39-99b7-068cebe1c279\") " pod="openshift-console/console-f9964b78f-sd6td" Jan 20 16:51:36 crc kubenswrapper[4558]: I0120 16:51:36.036763 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/6b72246a-ef74-4d39-99b7-068cebe1c279-console-serving-cert\") pod \"console-f9964b78f-sd6td\" (UID: \"6b72246a-ef74-4d39-99b7-068cebe1c279\") " pod="openshift-console/console-f9964b78f-sd6td" Jan 20 16:51:36 crc kubenswrapper[4558]: I0120 16:51:36.036785 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mgzjm\" (UniqueName: \"kubernetes.io/projected/6b72246a-ef74-4d39-99b7-068cebe1c279-kube-api-access-mgzjm\") pod \"console-f9964b78f-sd6td\" (UID: \"6b72246a-ef74-4d39-99b7-068cebe1c279\") " pod="openshift-console/console-f9964b78f-sd6td" Jan 20 16:51:36 crc kubenswrapper[4558]: I0120 16:51:36.036851 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/6b72246a-ef74-4d39-99b7-068cebe1c279-console-config\") pod \"console-f9964b78f-sd6td\" (UID: \"6b72246a-ef74-4d39-99b7-068cebe1c279\") " pod="openshift-console/console-f9964b78f-sd6td" Jan 20 16:51:36 crc kubenswrapper[4558]: I0120 16:51:36.036873 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/6b72246a-ef74-4d39-99b7-068cebe1c279-oauth-serving-cert\") pod \"console-f9964b78f-sd6td\" (UID: \"6b72246a-ef74-4d39-99b7-068cebe1c279\") " pod="openshift-console/console-f9964b78f-sd6td" Jan 20 16:51:36 crc kubenswrapper[4558]: I0120 16:51:36.036889 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/6b72246a-ef74-4d39-99b7-068cebe1c279-console-oauth-config\") pod \"console-f9964b78f-sd6td\" (UID: \"6b72246a-ef74-4d39-99b7-068cebe1c279\") " pod="openshift-console/console-f9964b78f-sd6td" Jan 20 16:51:36 crc kubenswrapper[4558]: I0120 16:51:36.038010 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/6b72246a-ef74-4d39-99b7-068cebe1c279-service-ca\") pod \"console-f9964b78f-sd6td\" (UID: \"6b72246a-ef74-4d39-99b7-068cebe1c279\") " pod="openshift-console/console-f9964b78f-sd6td" Jan 20 16:51:36 crc kubenswrapper[4558]: I0120 16:51:36.038213 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6b72246a-ef74-4d39-99b7-068cebe1c279-trusted-ca-bundle\") pod \"console-f9964b78f-sd6td\" (UID: \"6b72246a-ef74-4d39-99b7-068cebe1c279\") " pod="openshift-console/console-f9964b78f-sd6td" Jan 20 16:51:36 crc kubenswrapper[4558]: I0120 16:51:36.038429 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/6b72246a-ef74-4d39-99b7-068cebe1c279-console-config\") pod \"console-f9964b78f-sd6td\" (UID: \"6b72246a-ef74-4d39-99b7-068cebe1c279\") " pod="openshift-console/console-f9964b78f-sd6td" Jan 20 16:51:36 crc kubenswrapper[4558]: I0120 16:51:36.038591 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/6b72246a-ef74-4d39-99b7-068cebe1c279-oauth-serving-cert\") pod \"console-f9964b78f-sd6td\" (UID: \"6b72246a-ef74-4d39-99b7-068cebe1c279\") " pod="openshift-console/console-f9964b78f-sd6td" Jan 20 16:51:36 crc kubenswrapper[4558]: I0120 16:51:36.042887 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/6b72246a-ef74-4d39-99b7-068cebe1c279-console-oauth-config\") pod \"console-f9964b78f-sd6td\" (UID: \"6b72246a-ef74-4d39-99b7-068cebe1c279\") " pod="openshift-console/console-f9964b78f-sd6td" Jan 20 16:51:36 crc kubenswrapper[4558]: I0120 16:51:36.043195 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/6b72246a-ef74-4d39-99b7-068cebe1c279-console-serving-cert\") pod \"console-f9964b78f-sd6td\" (UID: \"6b72246a-ef74-4d39-99b7-068cebe1c279\") " pod="openshift-console/console-f9964b78f-sd6td" Jan 20 16:51:36 crc kubenswrapper[4558]: I0120 16:51:36.052945 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mgzjm\" (UniqueName: \"kubernetes.io/projected/6b72246a-ef74-4d39-99b7-068cebe1c279-kube-api-access-mgzjm\") pod \"console-f9964b78f-sd6td\" (UID: \"6b72246a-ef74-4d39-99b7-068cebe1c279\") " pod="openshift-console/console-f9964b78f-sd6td" Jan 20 16:51:36 crc kubenswrapper[4558]: I0120 16:51:36.177556 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9964b78f-sd6td" Jan 20 16:51:36 crc kubenswrapper[4558]: I0120 16:51:36.244540 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-54757c584b-l6chb"] Jan 20 16:51:36 crc kubenswrapper[4558]: W0120 16:51:36.257287 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1b599dc7_9188_4dbc_ad4a_32db989cb635.slice/crio-2b849ab6a51e48a974eeefc18e4c7d8d079bdd3d15dcf30aecc15ac06492161c WatchSource:0}: Error finding container 2b849ab6a51e48a974eeefc18e4c7d8d079bdd3d15dcf30aecc15ac06492161c: Status 404 returned error can't find the container with id 2b849ab6a51e48a974eeefc18e4c7d8d079bdd3d15dcf30aecc15ac06492161c Jan 20 16:51:36 crc kubenswrapper[4558]: I0120 16:51:36.275086 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-8474b5b9d8-t4zzd"] Jan 20 16:51:36 crc kubenswrapper[4558]: W0120 16:51:36.285853 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podecd5abb7_faaf_42b3_8698_71ea80253e9d.slice/crio-b9a245baae701d6620d8bf3a0a93aaaf760a727dec5092b07282658b7d78730c WatchSource:0}: Error finding container b9a245baae701d6620d8bf3a0a93aaaf760a727dec5092b07282658b7d78730c: Status 404 returned error can't find the container with id b9a245baae701d6620d8bf3a0a93aaaf760a727dec5092b07282658b7d78730c Jan 20 16:51:36 crc kubenswrapper[4558]: I0120 16:51:36.326981 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7754f76f8b-n5w94"] Jan 20 16:51:36 crc kubenswrapper[4558]: I0120 16:51:36.348598 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9964b78f-sd6td"] Jan 20 16:51:36 crc kubenswrapper[4558]: W0120 16:51:36.350243 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6b72246a_ef74_4d39_99b7_068cebe1c279.slice/crio-ee9c567398fc27ad19ed3d93e573ef9ed8f6f0af769ac82cf16fc595a28483d1 WatchSource:0}: Error finding container ee9c567398fc27ad19ed3d93e573ef9ed8f6f0af769ac82cf16fc595a28483d1: Status 404 returned error can't find the container with id ee9c567398fc27ad19ed3d93e573ef9ed8f6f0af769ac82cf16fc595a28483d1 Jan 20 16:51:36 crc kubenswrapper[4558]: I0120 16:51:36.774091 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9964b78f-sd6td" event={"ID":"6b72246a-ef74-4d39-99b7-068cebe1c279","Type":"ContainerStarted","Data":"e767c2c62ecf07981b2fb50355dcc54922c49bc7a80ea410c8e8714abf59b490"} Jan 20 16:51:36 crc kubenswrapper[4558]: I0120 16:51:36.774418 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9964b78f-sd6td" event={"ID":"6b72246a-ef74-4d39-99b7-068cebe1c279","Type":"ContainerStarted","Data":"ee9c567398fc27ad19ed3d93e573ef9ed8f6f0af769ac82cf16fc595a28483d1"} Jan 20 16:51:36 crc kubenswrapper[4558]: I0120 16:51:36.775473 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-t4zzd" event={"ID":"ecd5abb7-faaf-42b3-8698-71ea80253e9d","Type":"ContainerStarted","Data":"b9a245baae701d6620d8bf3a0a93aaaf760a727dec5092b07282658b7d78730c"} Jan 20 16:51:36 crc kubenswrapper[4558]: I0120 16:51:36.776677 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-54757c584b-l6chb" event={"ID":"1b599dc7-9188-4dbc-ad4a-32db989cb635","Type":"ContainerStarted","Data":"2b849ab6a51e48a974eeefc18e4c7d8d079bdd3d15dcf30aecc15ac06492161c"} Jan 20 16:51:36 crc kubenswrapper[4558]: I0120 16:51:36.777612 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-n5w94" event={"ID":"7abfa901-9433-43f3-8f51-1da05d50f84d","Type":"ContainerStarted","Data":"95b57e9272aaca9cfd180200e2586e845439f73ed27197b59f726f113ce67f4b"} Jan 20 16:51:36 crc kubenswrapper[4558]: I0120 16:51:36.778782 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-btt28" event={"ID":"f54cd5f9-30bf-494b-8528-4f25f8fa1521","Type":"ContainerStarted","Data":"a60865786aa78f9f25a7c86cf33ef36ab2caebf54810015988be67f318072e20"} Jan 20 16:51:36 crc kubenswrapper[4558]: I0120 16:51:36.796027 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9964b78f-sd6td" podStartSLOduration=1.796008161 podStartE2EDuration="1.796008161s" podCreationTimestamp="2026-01-20 16:51:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 16:51:36.794282376 +0000 UTC m=+590.554620342" watchObservedRunningTime="2026-01-20 16:51:36.796008161 +0000 UTC m=+590.556346128" Jan 20 16:51:38 crc kubenswrapper[4558]: I0120 16:51:38.790797 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-n5w94" event={"ID":"7abfa901-9433-43f3-8f51-1da05d50f84d","Type":"ContainerStarted","Data":"687fa4cd398228a634e8b3748fd409d6dabf88ffbab1663978fcc47e2ec62008"} Jan 20 16:51:38 crc kubenswrapper[4558]: I0120 16:51:38.795494 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-btt28" event={"ID":"f54cd5f9-30bf-494b-8528-4f25f8fa1521","Type":"ContainerStarted","Data":"31f92d6cddf7e4d051b55637123f879d960e2c34b83e28ee28472279d3647e49"} Jan 20 16:51:38 crc kubenswrapper[4558]: I0120 16:51:38.795634 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-handler-btt28" Jan 20 16:51:38 crc kubenswrapper[4558]: I0120 16:51:38.798837 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-t4zzd" event={"ID":"ecd5abb7-faaf-42b3-8698-71ea80253e9d","Type":"ContainerStarted","Data":"40ab106dc1182dd018490931af5ff1d6f2de1ea27d35323e2bfd5abbf85d116e"} Jan 20 16:51:38 crc kubenswrapper[4558]: I0120 16:51:38.798979 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-t4zzd" Jan 20 16:51:38 crc kubenswrapper[4558]: I0120 16:51:38.800077 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-54757c584b-l6chb" event={"ID":"1b599dc7-9188-4dbc-ad4a-32db989cb635","Type":"ContainerStarted","Data":"9ab678c8f75213ea049edcf5636a21216d67bf2d7fa5064f9712d80562c3f595"} Jan 20 16:51:38 crc kubenswrapper[4558]: I0120 16:51:38.802570 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-console-plugin-7754f76f8b-n5w94" podStartSLOduration=1.549541267 podStartE2EDuration="3.802559577s" podCreationTimestamp="2026-01-20 16:51:35 +0000 UTC" firstStartedPulling="2026-01-20 16:51:36.338013211 +0000 UTC m=+590.098351178" lastFinishedPulling="2026-01-20 16:51:38.59103152 +0000 UTC m=+592.351369488" observedRunningTime="2026-01-20 16:51:38.802030032 +0000 UTC m=+592.562367998" watchObservedRunningTime="2026-01-20 16:51:38.802559577 +0000 UTC m=+592.562897544" Jan 20 16:51:38 crc kubenswrapper[4558]: I0120 16:51:38.835212 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-t4zzd" podStartSLOduration=2.1450408 podStartE2EDuration="3.835189151s" podCreationTimestamp="2026-01-20 16:51:35 +0000 UTC" firstStartedPulling="2026-01-20 16:51:36.291265411 +0000 UTC m=+590.051603379" lastFinishedPulling="2026-01-20 16:51:37.981413763 +0000 UTC m=+591.741751730" observedRunningTime="2026-01-20 16:51:38.834268519 +0000 UTC m=+592.594606486" watchObservedRunningTime="2026-01-20 16:51:38.835189151 +0000 UTC m=+592.595527118" Jan 20 16:51:38 crc kubenswrapper[4558]: I0120 16:51:38.837833 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-handler-btt28" podStartSLOduration=1.810013164 podStartE2EDuration="3.837821771s" podCreationTimestamp="2026-01-20 16:51:35 +0000 UTC" firstStartedPulling="2026-01-20 16:51:35.924532231 +0000 UTC m=+589.684870198" lastFinishedPulling="2026-01-20 16:51:37.952340838 +0000 UTC m=+591.712678805" observedRunningTime="2026-01-20 16:51:38.819685559 +0000 UTC m=+592.580023526" watchObservedRunningTime="2026-01-20 16:51:38.837821771 +0000 UTC m=+592.598159738" Jan 20 16:51:40 crc kubenswrapper[4558]: I0120 16:51:40.809013 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-54757c584b-l6chb" event={"ID":"1b599dc7-9188-4dbc-ad4a-32db989cb635","Type":"ContainerStarted","Data":"32cdf94b54739b3c9f7c5d12ecfc10bc0abdb0e0be98937f047711e3e209b07c"} Jan 20 16:51:40 crc kubenswrapper[4558]: I0120 16:51:40.822917 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-metrics-54757c584b-l6chb" podStartSLOduration=1.927027427 podStartE2EDuration="5.822902625s" podCreationTimestamp="2026-01-20 16:51:35 +0000 UTC" firstStartedPulling="2026-01-20 16:51:36.258661015 +0000 UTC m=+590.018998983" lastFinishedPulling="2026-01-20 16:51:40.154536214 +0000 UTC m=+593.914874181" observedRunningTime="2026-01-20 16:51:40.82070822 +0000 UTC m=+594.581046197" watchObservedRunningTime="2026-01-20 16:51:40.822902625 +0000 UTC m=+594.583240593" Jan 20 16:51:45 crc kubenswrapper[4558]: I0120 16:51:45.922882 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-handler-btt28" Jan 20 16:51:46 crc kubenswrapper[4558]: I0120 16:51:46.178643 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9964b78f-sd6td" Jan 20 16:51:46 crc kubenswrapper[4558]: I0120 16:51:46.178869 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9964b78f-sd6td" Jan 20 16:51:46 crc kubenswrapper[4558]: I0120 16:51:46.182412 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9964b78f-sd6td" Jan 20 16:51:46 crc kubenswrapper[4558]: I0120 16:51:46.732379 4558 scope.go:117] "RemoveContainer" containerID="2297634e20683413a5eb643517580c9486303dc9616a7588a4ac571f3d0e53b5" Jan 20 16:51:46 crc kubenswrapper[4558]: I0120 16:51:46.833299 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-jsqvf_bedf08c7-1f93-4931-a7f3-e729e2a137af/kube-multus/2.log" Jan 20 16:51:46 crc kubenswrapper[4558]: I0120 16:51:46.836201 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9964b78f-sd6td" Jan 20 16:51:46 crc kubenswrapper[4558]: I0120 16:51:46.867452 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-lhmdh"] Jan 20 16:51:55 crc kubenswrapper[4558]: I0120 16:51:55.895553 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-webhook-8474b5b9d8-t4zzd" Jan 20 16:52:04 crc kubenswrapper[4558]: I0120 16:52:04.419009 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc2zr95"] Jan 20 16:52:04 crc kubenswrapper[4558]: I0120 16:52:04.420240 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc2zr95" Jan 20 16:52:04 crc kubenswrapper[4558]: I0120 16:52:04.421590 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Jan 20 16:52:04 crc kubenswrapper[4558]: I0120 16:52:04.430787 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc2zr95"] Jan 20 16:52:04 crc kubenswrapper[4558]: I0120 16:52:04.482832 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-snbms\" (UniqueName: \"kubernetes.io/projected/53937263-221c-4ee7-87fb-d1b03392fd73-kube-api-access-snbms\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc2zr95\" (UID: \"53937263-221c-4ee7-87fb-d1b03392fd73\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc2zr95" Jan 20 16:52:04 crc kubenswrapper[4558]: I0120 16:52:04.483065 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/53937263-221c-4ee7-87fb-d1b03392fd73-util\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc2zr95\" (UID: \"53937263-221c-4ee7-87fb-d1b03392fd73\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc2zr95" Jan 20 16:52:04 crc kubenswrapper[4558]: I0120 16:52:04.483191 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/53937263-221c-4ee7-87fb-d1b03392fd73-bundle\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc2zr95\" (UID: \"53937263-221c-4ee7-87fb-d1b03392fd73\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc2zr95" Jan 20 16:52:04 crc kubenswrapper[4558]: I0120 16:52:04.584392 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/53937263-221c-4ee7-87fb-d1b03392fd73-util\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc2zr95\" (UID: \"53937263-221c-4ee7-87fb-d1b03392fd73\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc2zr95" Jan 20 16:52:04 crc kubenswrapper[4558]: I0120 16:52:04.584468 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/53937263-221c-4ee7-87fb-d1b03392fd73-bundle\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc2zr95\" (UID: \"53937263-221c-4ee7-87fb-d1b03392fd73\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc2zr95" Jan 20 16:52:04 crc kubenswrapper[4558]: I0120 16:52:04.584543 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-snbms\" (UniqueName: \"kubernetes.io/projected/53937263-221c-4ee7-87fb-d1b03392fd73-kube-api-access-snbms\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc2zr95\" (UID: \"53937263-221c-4ee7-87fb-d1b03392fd73\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc2zr95" Jan 20 16:52:04 crc kubenswrapper[4558]: I0120 16:52:04.585135 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/53937263-221c-4ee7-87fb-d1b03392fd73-util\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc2zr95\" (UID: \"53937263-221c-4ee7-87fb-d1b03392fd73\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc2zr95" Jan 20 16:52:04 crc kubenswrapper[4558]: I0120 16:52:04.585217 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/53937263-221c-4ee7-87fb-d1b03392fd73-bundle\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc2zr95\" (UID: \"53937263-221c-4ee7-87fb-d1b03392fd73\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc2zr95" Jan 20 16:52:04 crc kubenswrapper[4558]: I0120 16:52:04.599783 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-snbms\" (UniqueName: \"kubernetes.io/projected/53937263-221c-4ee7-87fb-d1b03392fd73-kube-api-access-snbms\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc2zr95\" (UID: \"53937263-221c-4ee7-87fb-d1b03392fd73\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc2zr95" Jan 20 16:52:04 crc kubenswrapper[4558]: I0120 16:52:04.731148 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc2zr95" Jan 20 16:52:05 crc kubenswrapper[4558]: I0120 16:52:05.061627 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc2zr95"] Jan 20 16:52:05 crc kubenswrapper[4558]: I0120 16:52:05.914236 4558 generic.go:334] "Generic (PLEG): container finished" podID="53937263-221c-4ee7-87fb-d1b03392fd73" containerID="d50ae13cd66aa04a993341846b0b30b4416d4e8e26cd5b5c2711e75c30aed2e5" exitCode=0 Jan 20 16:52:05 crc kubenswrapper[4558]: I0120 16:52:05.914429 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc2zr95" event={"ID":"53937263-221c-4ee7-87fb-d1b03392fd73","Type":"ContainerDied","Data":"d50ae13cd66aa04a993341846b0b30b4416d4e8e26cd5b5c2711e75c30aed2e5"} Jan 20 16:52:05 crc kubenswrapper[4558]: I0120 16:52:05.914474 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc2zr95" event={"ID":"53937263-221c-4ee7-87fb-d1b03392fd73","Type":"ContainerStarted","Data":"ec214ede57366c91ca966fa7559e81ffb70499321a9f5f904d5936f5ae9418f3"} Jan 20 16:52:07 crc kubenswrapper[4558]: I0120 16:52:07.923686 4558 generic.go:334] "Generic (PLEG): container finished" podID="53937263-221c-4ee7-87fb-d1b03392fd73" containerID="16cbe3a528e972ced7903a9f940e695a3a47c207b050a747263272c689be1553" exitCode=0 Jan 20 16:52:07 crc kubenswrapper[4558]: I0120 16:52:07.923773 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc2zr95" event={"ID":"53937263-221c-4ee7-87fb-d1b03392fd73","Type":"ContainerDied","Data":"16cbe3a528e972ced7903a9f940e695a3a47c207b050a747263272c689be1553"} Jan 20 16:52:08 crc kubenswrapper[4558]: I0120 16:52:08.929357 4558 generic.go:334] "Generic (PLEG): container finished" podID="53937263-221c-4ee7-87fb-d1b03392fd73" containerID="f88f389a40aff0c5aede1b6cc6ee1b4ee71a7f1e83cf6f6c55ac533ba391b6b4" exitCode=0 Jan 20 16:52:08 crc kubenswrapper[4558]: I0120 16:52:08.929391 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc2zr95" event={"ID":"53937263-221c-4ee7-87fb-d1b03392fd73","Type":"ContainerDied","Data":"f88f389a40aff0c5aede1b6cc6ee1b4ee71a7f1e83cf6f6c55ac533ba391b6b4"} Jan 20 16:52:10 crc kubenswrapper[4558]: I0120 16:52:10.103644 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc2zr95" Jan 20 16:52:10 crc kubenswrapper[4558]: I0120 16:52:10.241439 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-snbms\" (UniqueName: \"kubernetes.io/projected/53937263-221c-4ee7-87fb-d1b03392fd73-kube-api-access-snbms\") pod \"53937263-221c-4ee7-87fb-d1b03392fd73\" (UID: \"53937263-221c-4ee7-87fb-d1b03392fd73\") " Jan 20 16:52:10 crc kubenswrapper[4558]: I0120 16:52:10.241494 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/53937263-221c-4ee7-87fb-d1b03392fd73-bundle\") pod \"53937263-221c-4ee7-87fb-d1b03392fd73\" (UID: \"53937263-221c-4ee7-87fb-d1b03392fd73\") " Jan 20 16:52:10 crc kubenswrapper[4558]: I0120 16:52:10.241539 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/53937263-221c-4ee7-87fb-d1b03392fd73-util\") pod \"53937263-221c-4ee7-87fb-d1b03392fd73\" (UID: \"53937263-221c-4ee7-87fb-d1b03392fd73\") " Jan 20 16:52:10 crc kubenswrapper[4558]: I0120 16:52:10.242206 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/53937263-221c-4ee7-87fb-d1b03392fd73-bundle" (OuterVolumeSpecName: "bundle") pod "53937263-221c-4ee7-87fb-d1b03392fd73" (UID: "53937263-221c-4ee7-87fb-d1b03392fd73"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 16:52:10 crc kubenswrapper[4558]: I0120 16:52:10.247111 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/53937263-221c-4ee7-87fb-d1b03392fd73-kube-api-access-snbms" (OuterVolumeSpecName: "kube-api-access-snbms") pod "53937263-221c-4ee7-87fb-d1b03392fd73" (UID: "53937263-221c-4ee7-87fb-d1b03392fd73"). InnerVolumeSpecName "kube-api-access-snbms". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:52:10 crc kubenswrapper[4558]: I0120 16:52:10.342908 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-snbms\" (UniqueName: \"kubernetes.io/projected/53937263-221c-4ee7-87fb-d1b03392fd73-kube-api-access-snbms\") on node \"crc\" DevicePath \"\"" Jan 20 16:52:10 crc kubenswrapper[4558]: I0120 16:52:10.342929 4558 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/53937263-221c-4ee7-87fb-d1b03392fd73-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 16:52:10 crc kubenswrapper[4558]: I0120 16:52:10.439058 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/53937263-221c-4ee7-87fb-d1b03392fd73-util" (OuterVolumeSpecName: "util") pod "53937263-221c-4ee7-87fb-d1b03392fd73" (UID: "53937263-221c-4ee7-87fb-d1b03392fd73"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 16:52:10 crc kubenswrapper[4558]: I0120 16:52:10.444298 4558 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/53937263-221c-4ee7-87fb-d1b03392fd73-util\") on node \"crc\" DevicePath \"\"" Jan 20 16:52:10 crc kubenswrapper[4558]: I0120 16:52:10.937684 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc2zr95" event={"ID":"53937263-221c-4ee7-87fb-d1b03392fd73","Type":"ContainerDied","Data":"ec214ede57366c91ca966fa7559e81ffb70499321a9f5f904d5936f5ae9418f3"} Jan 20 16:52:10 crc kubenswrapper[4558]: I0120 16:52:10.937717 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc2zr95" Jan 20 16:52:10 crc kubenswrapper[4558]: I0120 16:52:10.937717 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ec214ede57366c91ca966fa7559e81ffb70499321a9f5f904d5936f5ae9418f3" Jan 20 16:52:11 crc kubenswrapper[4558]: I0120 16:52:11.896678 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-f9d7485db-lhmdh" podUID="dd7b53bb-d740-497c-a36e-87e51d6f05a6" containerName="console" containerID="cri-o://5640d2dba8d4d4223739aac688df00e0e4bd854674b1de562daa2b66cc2a5108" gracePeriod=15 Jan 20 16:52:12 crc kubenswrapper[4558]: I0120 16:52:12.218997 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-lhmdh_dd7b53bb-d740-497c-a36e-87e51d6f05a6/console/0.log" Jan 20 16:52:12 crc kubenswrapper[4558]: I0120 16:52:12.219352 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-lhmdh" Jan 20 16:52:12 crc kubenswrapper[4558]: I0120 16:52:12.365721 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/dd7b53bb-d740-497c-a36e-87e51d6f05a6-service-ca\") pod \"dd7b53bb-d740-497c-a36e-87e51d6f05a6\" (UID: \"dd7b53bb-d740-497c-a36e-87e51d6f05a6\") " Jan 20 16:52:12 crc kubenswrapper[4558]: I0120 16:52:12.365774 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/dd7b53bb-d740-497c-a36e-87e51d6f05a6-console-config\") pod \"dd7b53bb-d740-497c-a36e-87e51d6f05a6\" (UID: \"dd7b53bb-d740-497c-a36e-87e51d6f05a6\") " Jan 20 16:52:12 crc kubenswrapper[4558]: I0120 16:52:12.365833 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/dd7b53bb-d740-497c-a36e-87e51d6f05a6-trusted-ca-bundle\") pod \"dd7b53bb-d740-497c-a36e-87e51d6f05a6\" (UID: \"dd7b53bb-d740-497c-a36e-87e51d6f05a6\") " Jan 20 16:52:12 crc kubenswrapper[4558]: I0120 16:52:12.365865 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n6m5z\" (UniqueName: \"kubernetes.io/projected/dd7b53bb-d740-497c-a36e-87e51d6f05a6-kube-api-access-n6m5z\") pod \"dd7b53bb-d740-497c-a36e-87e51d6f05a6\" (UID: \"dd7b53bb-d740-497c-a36e-87e51d6f05a6\") " Jan 20 16:52:12 crc kubenswrapper[4558]: I0120 16:52:12.365900 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/dd7b53bb-d740-497c-a36e-87e51d6f05a6-console-serving-cert\") pod \"dd7b53bb-d740-497c-a36e-87e51d6f05a6\" (UID: \"dd7b53bb-d740-497c-a36e-87e51d6f05a6\") " Jan 20 16:52:12 crc kubenswrapper[4558]: I0120 16:52:12.365935 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/dd7b53bb-d740-497c-a36e-87e51d6f05a6-oauth-serving-cert\") pod \"dd7b53bb-d740-497c-a36e-87e51d6f05a6\" (UID: \"dd7b53bb-d740-497c-a36e-87e51d6f05a6\") " Jan 20 16:52:12 crc kubenswrapper[4558]: I0120 16:52:12.365955 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/dd7b53bb-d740-497c-a36e-87e51d6f05a6-console-oauth-config\") pod \"dd7b53bb-d740-497c-a36e-87e51d6f05a6\" (UID: \"dd7b53bb-d740-497c-a36e-87e51d6f05a6\") " Jan 20 16:52:12 crc kubenswrapper[4558]: I0120 16:52:12.366549 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dd7b53bb-d740-497c-a36e-87e51d6f05a6-service-ca" (OuterVolumeSpecName: "service-ca") pod "dd7b53bb-d740-497c-a36e-87e51d6f05a6" (UID: "dd7b53bb-d740-497c-a36e-87e51d6f05a6"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 16:52:12 crc kubenswrapper[4558]: I0120 16:52:12.366574 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dd7b53bb-d740-497c-a36e-87e51d6f05a6-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "dd7b53bb-d740-497c-a36e-87e51d6f05a6" (UID: "dd7b53bb-d740-497c-a36e-87e51d6f05a6"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 16:52:12 crc kubenswrapper[4558]: I0120 16:52:12.366560 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dd7b53bb-d740-497c-a36e-87e51d6f05a6-console-config" (OuterVolumeSpecName: "console-config") pod "dd7b53bb-d740-497c-a36e-87e51d6f05a6" (UID: "dd7b53bb-d740-497c-a36e-87e51d6f05a6"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 16:52:12 crc kubenswrapper[4558]: I0120 16:52:12.366621 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dd7b53bb-d740-497c-a36e-87e51d6f05a6-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "dd7b53bb-d740-497c-a36e-87e51d6f05a6" (UID: "dd7b53bb-d740-497c-a36e-87e51d6f05a6"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 16:52:12 crc kubenswrapper[4558]: I0120 16:52:12.366895 4558 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/dd7b53bb-d740-497c-a36e-87e51d6f05a6-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 20 16:52:12 crc kubenswrapper[4558]: I0120 16:52:12.366920 4558 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/dd7b53bb-d740-497c-a36e-87e51d6f05a6-service-ca\") on node \"crc\" DevicePath \"\"" Jan 20 16:52:12 crc kubenswrapper[4558]: I0120 16:52:12.366930 4558 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/dd7b53bb-d740-497c-a36e-87e51d6f05a6-console-config\") on node \"crc\" DevicePath \"\"" Jan 20 16:52:12 crc kubenswrapper[4558]: I0120 16:52:12.366940 4558 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/dd7b53bb-d740-497c-a36e-87e51d6f05a6-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 16:52:12 crc kubenswrapper[4558]: I0120 16:52:12.372761 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dd7b53bb-d740-497c-a36e-87e51d6f05a6-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "dd7b53bb-d740-497c-a36e-87e51d6f05a6" (UID: "dd7b53bb-d740-497c-a36e-87e51d6f05a6"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:52:12 crc kubenswrapper[4558]: I0120 16:52:12.373001 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dd7b53bb-d740-497c-a36e-87e51d6f05a6-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "dd7b53bb-d740-497c-a36e-87e51d6f05a6" (UID: "dd7b53bb-d740-497c-a36e-87e51d6f05a6"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:52:12 crc kubenswrapper[4558]: I0120 16:52:12.373045 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dd7b53bb-d740-497c-a36e-87e51d6f05a6-kube-api-access-n6m5z" (OuterVolumeSpecName: "kube-api-access-n6m5z") pod "dd7b53bb-d740-497c-a36e-87e51d6f05a6" (UID: "dd7b53bb-d740-497c-a36e-87e51d6f05a6"). InnerVolumeSpecName "kube-api-access-n6m5z". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:52:12 crc kubenswrapper[4558]: I0120 16:52:12.468427 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n6m5z\" (UniqueName: \"kubernetes.io/projected/dd7b53bb-d740-497c-a36e-87e51d6f05a6-kube-api-access-n6m5z\") on node \"crc\" DevicePath \"\"" Jan 20 16:52:12 crc kubenswrapper[4558]: I0120 16:52:12.468454 4558 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/dd7b53bb-d740-497c-a36e-87e51d6f05a6-console-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 20 16:52:12 crc kubenswrapper[4558]: I0120 16:52:12.468463 4558 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/dd7b53bb-d740-497c-a36e-87e51d6f05a6-console-oauth-config\") on node \"crc\" DevicePath \"\"" Jan 20 16:52:12 crc kubenswrapper[4558]: I0120 16:52:12.948029 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-lhmdh_dd7b53bb-d740-497c-a36e-87e51d6f05a6/console/0.log" Jan 20 16:52:12 crc kubenswrapper[4558]: I0120 16:52:12.948076 4558 generic.go:334] "Generic (PLEG): container finished" podID="dd7b53bb-d740-497c-a36e-87e51d6f05a6" containerID="5640d2dba8d4d4223739aac688df00e0e4bd854674b1de562daa2b66cc2a5108" exitCode=2 Jan 20 16:52:12 crc kubenswrapper[4558]: I0120 16:52:12.948106 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-lhmdh" event={"ID":"dd7b53bb-d740-497c-a36e-87e51d6f05a6","Type":"ContainerDied","Data":"5640d2dba8d4d4223739aac688df00e0e4bd854674b1de562daa2b66cc2a5108"} Jan 20 16:52:12 crc kubenswrapper[4558]: I0120 16:52:12.948130 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-lhmdh" event={"ID":"dd7b53bb-d740-497c-a36e-87e51d6f05a6","Type":"ContainerDied","Data":"0d9b60603a6be01eb70b927705a9134c95d87e7e2cba0cf840bf6e4f0f2bff3f"} Jan 20 16:52:12 crc kubenswrapper[4558]: I0120 16:52:12.948138 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-lhmdh" Jan 20 16:52:12 crc kubenswrapper[4558]: I0120 16:52:12.948146 4558 scope.go:117] "RemoveContainer" containerID="5640d2dba8d4d4223739aac688df00e0e4bd854674b1de562daa2b66cc2a5108" Jan 20 16:52:12 crc kubenswrapper[4558]: I0120 16:52:12.960012 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-lhmdh"] Jan 20 16:52:12 crc kubenswrapper[4558]: I0120 16:52:12.960550 4558 scope.go:117] "RemoveContainer" containerID="5640d2dba8d4d4223739aac688df00e0e4bd854674b1de562daa2b66cc2a5108" Jan 20 16:52:12 crc kubenswrapper[4558]: E0120 16:52:12.961343 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5640d2dba8d4d4223739aac688df00e0e4bd854674b1de562daa2b66cc2a5108\": container with ID starting with 5640d2dba8d4d4223739aac688df00e0e4bd854674b1de562daa2b66cc2a5108 not found: ID does not exist" containerID="5640d2dba8d4d4223739aac688df00e0e4bd854674b1de562daa2b66cc2a5108" Jan 20 16:52:12 crc kubenswrapper[4558]: I0120 16:52:12.961380 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5640d2dba8d4d4223739aac688df00e0e4bd854674b1de562daa2b66cc2a5108"} err="failed to get container status \"5640d2dba8d4d4223739aac688df00e0e4bd854674b1de562daa2b66cc2a5108\": rpc error: code = NotFound desc = could not find container \"5640d2dba8d4d4223739aac688df00e0e4bd854674b1de562daa2b66cc2a5108\": container with ID starting with 5640d2dba8d4d4223739aac688df00e0e4bd854674b1de562daa2b66cc2a5108 not found: ID does not exist" Jan 20 16:52:12 crc kubenswrapper[4558]: I0120 16:52:12.963013 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-f9d7485db-lhmdh"] Jan 20 16:52:14 crc kubenswrapper[4558]: I0120 16:52:14.570472 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dd7b53bb-d740-497c-a36e-87e51d6f05a6" path="/var/lib/kubelet/pods/dd7b53bb-d740-497c-a36e-87e51d6f05a6/volumes" Jan 20 16:52:19 crc kubenswrapper[4558]: I0120 16:52:19.691604 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-747dffd588-lx9c6"] Jan 20 16:52:19 crc kubenswrapper[4558]: E0120 16:52:19.691977 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="53937263-221c-4ee7-87fb-d1b03392fd73" containerName="extract" Jan 20 16:52:19 crc kubenswrapper[4558]: I0120 16:52:19.691989 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="53937263-221c-4ee7-87fb-d1b03392fd73" containerName="extract" Jan 20 16:52:19 crc kubenswrapper[4558]: E0120 16:52:19.691999 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="53937263-221c-4ee7-87fb-d1b03392fd73" containerName="pull" Jan 20 16:52:19 crc kubenswrapper[4558]: I0120 16:52:19.692005 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="53937263-221c-4ee7-87fb-d1b03392fd73" containerName="pull" Jan 20 16:52:19 crc kubenswrapper[4558]: E0120 16:52:19.692022 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd7b53bb-d740-497c-a36e-87e51d6f05a6" containerName="console" Jan 20 16:52:19 crc kubenswrapper[4558]: I0120 16:52:19.692028 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd7b53bb-d740-497c-a36e-87e51d6f05a6" containerName="console" Jan 20 16:52:19 crc kubenswrapper[4558]: E0120 16:52:19.692037 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="53937263-221c-4ee7-87fb-d1b03392fd73" containerName="util" Jan 20 16:52:19 crc kubenswrapper[4558]: I0120 16:52:19.692042 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="53937263-221c-4ee7-87fb-d1b03392fd73" containerName="util" Jan 20 16:52:19 crc kubenswrapper[4558]: I0120 16:52:19.692121 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="53937263-221c-4ee7-87fb-d1b03392fd73" containerName="extract" Jan 20 16:52:19 crc kubenswrapper[4558]: I0120 16:52:19.692136 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="dd7b53bb-d740-497c-a36e-87e51d6f05a6" containerName="console" Jan 20 16:52:19 crc kubenswrapper[4558]: I0120 16:52:19.692459 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-747dffd588-lx9c6" Jan 20 16:52:19 crc kubenswrapper[4558]: I0120 16:52:19.693641 4558 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-cert" Jan 20 16:52:19 crc kubenswrapper[4558]: I0120 16:52:19.694327 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Jan 20 16:52:19 crc kubenswrapper[4558]: I0120 16:52:19.694383 4558 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Jan 20 16:52:19 crc kubenswrapper[4558]: I0120 16:52:19.694489 4558 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-8cpx9" Jan 20 16:52:19 crc kubenswrapper[4558]: I0120 16:52:19.694508 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Jan 20 16:52:19 crc kubenswrapper[4558]: I0120 16:52:19.705936 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-747dffd588-lx9c6"] Jan 20 16:52:19 crc kubenswrapper[4558]: I0120 16:52:19.847115 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/704a513a-fe43-4730-9f48-6c85506e338b-apiservice-cert\") pod \"metallb-operator-controller-manager-747dffd588-lx9c6\" (UID: \"704a513a-fe43-4730-9f48-6c85506e338b\") " pod="metallb-system/metallb-operator-controller-manager-747dffd588-lx9c6" Jan 20 16:52:19 crc kubenswrapper[4558]: I0120 16:52:19.847184 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5plnn\" (UniqueName: \"kubernetes.io/projected/704a513a-fe43-4730-9f48-6c85506e338b-kube-api-access-5plnn\") pod \"metallb-operator-controller-manager-747dffd588-lx9c6\" (UID: \"704a513a-fe43-4730-9f48-6c85506e338b\") " pod="metallb-system/metallb-operator-controller-manager-747dffd588-lx9c6" Jan 20 16:52:19 crc kubenswrapper[4558]: I0120 16:52:19.847322 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/704a513a-fe43-4730-9f48-6c85506e338b-webhook-cert\") pod \"metallb-operator-controller-manager-747dffd588-lx9c6\" (UID: \"704a513a-fe43-4730-9f48-6c85506e338b\") " pod="metallb-system/metallb-operator-controller-manager-747dffd588-lx9c6" Jan 20 16:52:19 crc kubenswrapper[4558]: I0120 16:52:19.913812 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-7d7c8846bf-bnzd9"] Jan 20 16:52:19 crc kubenswrapper[4558]: I0120 16:52:19.914398 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-7d7c8846bf-bnzd9" Jan 20 16:52:19 crc kubenswrapper[4558]: I0120 16:52:19.916373 4558 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Jan 20 16:52:19 crc kubenswrapper[4558]: I0120 16:52:19.916497 4558 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Jan 20 16:52:19 crc kubenswrapper[4558]: I0120 16:52:19.916523 4558 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-sbmdq" Jan 20 16:52:19 crc kubenswrapper[4558]: I0120 16:52:19.948535 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/704a513a-fe43-4730-9f48-6c85506e338b-webhook-cert\") pod \"metallb-operator-controller-manager-747dffd588-lx9c6\" (UID: \"704a513a-fe43-4730-9f48-6c85506e338b\") " pod="metallb-system/metallb-operator-controller-manager-747dffd588-lx9c6" Jan 20 16:52:19 crc kubenswrapper[4558]: I0120 16:52:19.948592 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/704a513a-fe43-4730-9f48-6c85506e338b-apiservice-cert\") pod \"metallb-operator-controller-manager-747dffd588-lx9c6\" (UID: \"704a513a-fe43-4730-9f48-6c85506e338b\") " pod="metallb-system/metallb-operator-controller-manager-747dffd588-lx9c6" Jan 20 16:52:19 crc kubenswrapper[4558]: I0120 16:52:19.948626 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5plnn\" (UniqueName: \"kubernetes.io/projected/704a513a-fe43-4730-9f48-6c85506e338b-kube-api-access-5plnn\") pod \"metallb-operator-controller-manager-747dffd588-lx9c6\" (UID: \"704a513a-fe43-4730-9f48-6c85506e338b\") " pod="metallb-system/metallb-operator-controller-manager-747dffd588-lx9c6" Jan 20 16:52:19 crc kubenswrapper[4558]: I0120 16:52:19.950646 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-7d7c8846bf-bnzd9"] Jan 20 16:52:19 crc kubenswrapper[4558]: I0120 16:52:19.953083 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/704a513a-fe43-4730-9f48-6c85506e338b-webhook-cert\") pod \"metallb-operator-controller-manager-747dffd588-lx9c6\" (UID: \"704a513a-fe43-4730-9f48-6c85506e338b\") " pod="metallb-system/metallb-operator-controller-manager-747dffd588-lx9c6" Jan 20 16:52:19 crc kubenswrapper[4558]: I0120 16:52:19.955568 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/704a513a-fe43-4730-9f48-6c85506e338b-apiservice-cert\") pod \"metallb-operator-controller-manager-747dffd588-lx9c6\" (UID: \"704a513a-fe43-4730-9f48-6c85506e338b\") " pod="metallb-system/metallb-operator-controller-manager-747dffd588-lx9c6" Jan 20 16:52:19 crc kubenswrapper[4558]: I0120 16:52:19.963782 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5plnn\" (UniqueName: \"kubernetes.io/projected/704a513a-fe43-4730-9f48-6c85506e338b-kube-api-access-5plnn\") pod \"metallb-operator-controller-manager-747dffd588-lx9c6\" (UID: \"704a513a-fe43-4730-9f48-6c85506e338b\") " pod="metallb-system/metallb-operator-controller-manager-747dffd588-lx9c6" Jan 20 16:52:20 crc kubenswrapper[4558]: I0120 16:52:20.004966 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-747dffd588-lx9c6" Jan 20 16:52:20 crc kubenswrapper[4558]: I0120 16:52:20.049676 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/e29496dd-2347-49e8-b4ca-1d071c0dcf2a-webhook-cert\") pod \"metallb-operator-webhook-server-7d7c8846bf-bnzd9\" (UID: \"e29496dd-2347-49e8-b4ca-1d071c0dcf2a\") " pod="metallb-system/metallb-operator-webhook-server-7d7c8846bf-bnzd9" Jan 20 16:52:20 crc kubenswrapper[4558]: I0120 16:52:20.049721 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/e29496dd-2347-49e8-b4ca-1d071c0dcf2a-apiservice-cert\") pod \"metallb-operator-webhook-server-7d7c8846bf-bnzd9\" (UID: \"e29496dd-2347-49e8-b4ca-1d071c0dcf2a\") " pod="metallb-system/metallb-operator-webhook-server-7d7c8846bf-bnzd9" Jan 20 16:52:20 crc kubenswrapper[4558]: I0120 16:52:20.049756 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7z2fr\" (UniqueName: \"kubernetes.io/projected/e29496dd-2347-49e8-b4ca-1d071c0dcf2a-kube-api-access-7z2fr\") pod \"metallb-operator-webhook-server-7d7c8846bf-bnzd9\" (UID: \"e29496dd-2347-49e8-b4ca-1d071c0dcf2a\") " pod="metallb-system/metallb-operator-webhook-server-7d7c8846bf-bnzd9" Jan 20 16:52:20 crc kubenswrapper[4558]: I0120 16:52:20.150577 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/e29496dd-2347-49e8-b4ca-1d071c0dcf2a-webhook-cert\") pod \"metallb-operator-webhook-server-7d7c8846bf-bnzd9\" (UID: \"e29496dd-2347-49e8-b4ca-1d071c0dcf2a\") " pod="metallb-system/metallb-operator-webhook-server-7d7c8846bf-bnzd9" Jan 20 16:52:20 crc kubenswrapper[4558]: I0120 16:52:20.150617 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/e29496dd-2347-49e8-b4ca-1d071c0dcf2a-apiservice-cert\") pod \"metallb-operator-webhook-server-7d7c8846bf-bnzd9\" (UID: \"e29496dd-2347-49e8-b4ca-1d071c0dcf2a\") " pod="metallb-system/metallb-operator-webhook-server-7d7c8846bf-bnzd9" Jan 20 16:52:20 crc kubenswrapper[4558]: I0120 16:52:20.150649 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7z2fr\" (UniqueName: \"kubernetes.io/projected/e29496dd-2347-49e8-b4ca-1d071c0dcf2a-kube-api-access-7z2fr\") pod \"metallb-operator-webhook-server-7d7c8846bf-bnzd9\" (UID: \"e29496dd-2347-49e8-b4ca-1d071c0dcf2a\") " pod="metallb-system/metallb-operator-webhook-server-7d7c8846bf-bnzd9" Jan 20 16:52:20 crc kubenswrapper[4558]: I0120 16:52:20.156196 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/e29496dd-2347-49e8-b4ca-1d071c0dcf2a-apiservice-cert\") pod \"metallb-operator-webhook-server-7d7c8846bf-bnzd9\" (UID: \"e29496dd-2347-49e8-b4ca-1d071c0dcf2a\") " pod="metallb-system/metallb-operator-webhook-server-7d7c8846bf-bnzd9" Jan 20 16:52:20 crc kubenswrapper[4558]: I0120 16:52:20.156213 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/e29496dd-2347-49e8-b4ca-1d071c0dcf2a-webhook-cert\") pod \"metallb-operator-webhook-server-7d7c8846bf-bnzd9\" (UID: \"e29496dd-2347-49e8-b4ca-1d071c0dcf2a\") " pod="metallb-system/metallb-operator-webhook-server-7d7c8846bf-bnzd9" Jan 20 16:52:20 crc kubenswrapper[4558]: I0120 16:52:20.168971 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7z2fr\" (UniqueName: \"kubernetes.io/projected/e29496dd-2347-49e8-b4ca-1d071c0dcf2a-kube-api-access-7z2fr\") pod \"metallb-operator-webhook-server-7d7c8846bf-bnzd9\" (UID: \"e29496dd-2347-49e8-b4ca-1d071c0dcf2a\") " pod="metallb-system/metallb-operator-webhook-server-7d7c8846bf-bnzd9" Jan 20 16:52:20 crc kubenswrapper[4558]: I0120 16:52:20.225303 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-7d7c8846bf-bnzd9" Jan 20 16:52:20 crc kubenswrapper[4558]: I0120 16:52:20.364992 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-747dffd588-lx9c6"] Jan 20 16:52:20 crc kubenswrapper[4558]: W0120 16:52:20.374623 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod704a513a_fe43_4730_9f48_6c85506e338b.slice/crio-d552bd81aa2dbed322db1935b6b5b835ab9a43c1fc3cf11e4349544320aac42a WatchSource:0}: Error finding container d552bd81aa2dbed322db1935b6b5b835ab9a43c1fc3cf11e4349544320aac42a: Status 404 returned error can't find the container with id d552bd81aa2dbed322db1935b6b5b835ab9a43c1fc3cf11e4349544320aac42a Jan 20 16:52:20 crc kubenswrapper[4558]: I0120 16:52:20.420251 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-7d7c8846bf-bnzd9"] Jan 20 16:52:20 crc kubenswrapper[4558]: W0120 16:52:20.427352 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode29496dd_2347_49e8_b4ca_1d071c0dcf2a.slice/crio-4327914be990590b34f8ac8c0372d7301cbfcc61230e302daa88c88480b4b1f9 WatchSource:0}: Error finding container 4327914be990590b34f8ac8c0372d7301cbfcc61230e302daa88c88480b4b1f9: Status 404 returned error can't find the container with id 4327914be990590b34f8ac8c0372d7301cbfcc61230e302daa88c88480b4b1f9 Jan 20 16:52:20 crc kubenswrapper[4558]: I0120 16:52:20.983816 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-7d7c8846bf-bnzd9" event={"ID":"e29496dd-2347-49e8-b4ca-1d071c0dcf2a","Type":"ContainerStarted","Data":"4327914be990590b34f8ac8c0372d7301cbfcc61230e302daa88c88480b4b1f9"} Jan 20 16:52:20 crc kubenswrapper[4558]: I0120 16:52:20.984830 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-747dffd588-lx9c6" event={"ID":"704a513a-fe43-4730-9f48-6c85506e338b","Type":"ContainerStarted","Data":"d552bd81aa2dbed322db1935b6b5b835ab9a43c1fc3cf11e4349544320aac42a"} Jan 20 16:52:24 crc kubenswrapper[4558]: I0120 16:52:24.004370 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-747dffd588-lx9c6" event={"ID":"704a513a-fe43-4730-9f48-6c85506e338b","Type":"ContainerStarted","Data":"415e2b6e72d14b311f35b86ccda4aa53690f09d50e7e03a1a7dc1962357a22cc"} Jan 20 16:52:24 crc kubenswrapper[4558]: I0120 16:52:24.004611 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-747dffd588-lx9c6" Jan 20 16:52:24 crc kubenswrapper[4558]: I0120 16:52:24.019549 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-747dffd588-lx9c6" podStartSLOduration=2.050603309 podStartE2EDuration="5.01953853s" podCreationTimestamp="2026-01-20 16:52:19 +0000 UTC" firstStartedPulling="2026-01-20 16:52:20.376210957 +0000 UTC m=+634.136548923" lastFinishedPulling="2026-01-20 16:52:23.345146178 +0000 UTC m=+637.105484144" observedRunningTime="2026-01-20 16:52:24.017154468 +0000 UTC m=+637.777492435" watchObservedRunningTime="2026-01-20 16:52:24.01953853 +0000 UTC m=+637.779876498" Jan 20 16:52:27 crc kubenswrapper[4558]: I0120 16:52:27.020227 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-7d7c8846bf-bnzd9" event={"ID":"e29496dd-2347-49e8-b4ca-1d071c0dcf2a","Type":"ContainerStarted","Data":"85f0e6f543c912590d5555e7e2673e41458782511435c8874f3b5485c9221e0d"} Jan 20 16:52:27 crc kubenswrapper[4558]: I0120 16:52:27.020618 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-7d7c8846bf-bnzd9" Jan 20 16:52:40 crc kubenswrapper[4558]: I0120 16:52:40.229643 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-7d7c8846bf-bnzd9" Jan 20 16:52:40 crc kubenswrapper[4558]: I0120 16:52:40.245226 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-7d7c8846bf-bnzd9" podStartSLOduration=14.855889769000001 podStartE2EDuration="21.245207839s" podCreationTimestamp="2026-01-20 16:52:19 +0000 UTC" firstStartedPulling="2026-01-20 16:52:20.430339995 +0000 UTC m=+634.190677963" lastFinishedPulling="2026-01-20 16:52:26.819658066 +0000 UTC m=+640.579996033" observedRunningTime="2026-01-20 16:52:27.036558044 +0000 UTC m=+640.796896011" watchObservedRunningTime="2026-01-20 16:52:40.245207839 +0000 UTC m=+654.005545805" Jan 20 16:53:00 crc kubenswrapper[4558]: I0120 16:53:00.007921 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-747dffd588-lx9c6" Jan 20 16:53:00 crc kubenswrapper[4558]: I0120 16:53:00.608888 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-b75qk"] Jan 20 16:53:00 crc kubenswrapper[4558]: I0120 16:53:00.610828 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-b75qk" Jan 20 16:53:00 crc kubenswrapper[4558]: I0120 16:53:00.612000 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-7df86c4f6c-vf5fl"] Jan 20 16:53:00 crc kubenswrapper[4558]: I0120 16:53:00.612224 4558 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-z76mt" Jan 20 16:53:00 crc kubenswrapper[4558]: I0120 16:53:00.612233 4558 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Jan 20 16:53:00 crc kubenswrapper[4558]: I0120 16:53:00.612646 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Jan 20 16:53:00 crc kubenswrapper[4558]: I0120 16:53:00.612656 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-vf5fl" Jan 20 16:53:00 crc kubenswrapper[4558]: I0120 16:53:00.614040 4558 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Jan 20 16:53:00 crc kubenswrapper[4558]: I0120 16:53:00.617925 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7df86c4f6c-vf5fl"] Jan 20 16:53:00 crc kubenswrapper[4558]: I0120 16:53:00.705650 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-w684s"] Jan 20 16:53:00 crc kubenswrapper[4558]: I0120 16:53:00.706547 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-w684s" Jan 20 16:53:00 crc kubenswrapper[4558]: I0120 16:53:00.707968 4558 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Jan 20 16:53:00 crc kubenswrapper[4558]: I0120 16:53:00.708220 4558 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Jan 20 16:53:00 crc kubenswrapper[4558]: I0120 16:53:00.713304 4558 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-pqcm8" Jan 20 16:53:00 crc kubenswrapper[4558]: I0120 16:53:00.718094 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-6968d8fdc4-d9p9k"] Jan 20 16:53:00 crc kubenswrapper[4558]: I0120 16:53:00.718737 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-6968d8fdc4-d9p9k" Jan 20 16:53:00 crc kubenswrapper[4558]: I0120 16:53:00.719391 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Jan 20 16:53:00 crc kubenswrapper[4558]: I0120 16:53:00.721432 4558 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Jan 20 16:53:00 crc kubenswrapper[4558]: I0120 16:53:00.738905 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-6968d8fdc4-d9p9k"] Jan 20 16:53:00 crc kubenswrapper[4558]: I0120 16:53:00.807620 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/59de8a65-6da7-4086-93d4-d76fa35f7660-frr-sockets\") pod \"frr-k8s-b75qk\" (UID: \"59de8a65-6da7-4086-93d4-d76fa35f7660\") " pod="metallb-system/frr-k8s-b75qk" Jan 20 16:53:00 crc kubenswrapper[4558]: I0120 16:53:00.807686 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/59de8a65-6da7-4086-93d4-d76fa35f7660-frr-startup\") pod \"frr-k8s-b75qk\" (UID: \"59de8a65-6da7-4086-93d4-d76fa35f7660\") " pod="metallb-system/frr-k8s-b75qk" Jan 20 16:53:00 crc kubenswrapper[4558]: I0120 16:53:00.807723 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6qlc9\" (UniqueName: \"kubernetes.io/projected/e7f30f4b-a84e-47b2-b393-c52757e6ca69-kube-api-access-6qlc9\") pod \"frr-k8s-webhook-server-7df86c4f6c-vf5fl\" (UID: \"e7f30f4b-a84e-47b2-b393-c52757e6ca69\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-vf5fl" Jan 20 16:53:00 crc kubenswrapper[4558]: I0120 16:53:00.807746 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bx6gp\" (UniqueName: \"kubernetes.io/projected/241ae411-7e63-4bb6-a110-b9983a418f9e-kube-api-access-bx6gp\") pod \"speaker-w684s\" (UID: \"241ae411-7e63-4bb6-a110-b9983a418f9e\") " pod="metallb-system/speaker-w684s" Jan 20 16:53:00 crc kubenswrapper[4558]: I0120 16:53:00.807771 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/59de8a65-6da7-4086-93d4-d76fa35f7660-metrics\") pod \"frr-k8s-b75qk\" (UID: \"59de8a65-6da7-4086-93d4-d76fa35f7660\") " pod="metallb-system/frr-k8s-b75qk" Jan 20 16:53:00 crc kubenswrapper[4558]: I0120 16:53:00.807895 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5835ce2a-d074-4a95-aa34-ad3a62f77503-metrics-certs\") pod \"controller-6968d8fdc4-d9p9k\" (UID: \"5835ce2a-d074-4a95-aa34-ad3a62f77503\") " pod="metallb-system/controller-6968d8fdc4-d9p9k" Jan 20 16:53:00 crc kubenswrapper[4558]: I0120 16:53:00.807935 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l2js5\" (UniqueName: \"kubernetes.io/projected/5835ce2a-d074-4a95-aa34-ad3a62f77503-kube-api-access-l2js5\") pod \"controller-6968d8fdc4-d9p9k\" (UID: \"5835ce2a-d074-4a95-aa34-ad3a62f77503\") " pod="metallb-system/controller-6968d8fdc4-d9p9k" Jan 20 16:53:00 crc kubenswrapper[4558]: I0120 16:53:00.807982 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/241ae411-7e63-4bb6-a110-b9983a418f9e-memberlist\") pod \"speaker-w684s\" (UID: \"241ae411-7e63-4bb6-a110-b9983a418f9e\") " pod="metallb-system/speaker-w684s" Jan 20 16:53:00 crc kubenswrapper[4558]: I0120 16:53:00.808006 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/59de8a65-6da7-4086-93d4-d76fa35f7660-metrics-certs\") pod \"frr-k8s-b75qk\" (UID: \"59de8a65-6da7-4086-93d4-d76fa35f7660\") " pod="metallb-system/frr-k8s-b75qk" Jan 20 16:53:00 crc kubenswrapper[4558]: I0120 16:53:00.808024 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e7f30f4b-a84e-47b2-b393-c52757e6ca69-cert\") pod \"frr-k8s-webhook-server-7df86c4f6c-vf5fl\" (UID: \"e7f30f4b-a84e-47b2-b393-c52757e6ca69\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-vf5fl" Jan 20 16:53:00 crc kubenswrapper[4558]: I0120 16:53:00.808045 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/5835ce2a-d074-4a95-aa34-ad3a62f77503-cert\") pod \"controller-6968d8fdc4-d9p9k\" (UID: \"5835ce2a-d074-4a95-aa34-ad3a62f77503\") " pod="metallb-system/controller-6968d8fdc4-d9p9k" Jan 20 16:53:00 crc kubenswrapper[4558]: I0120 16:53:00.808072 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/59de8a65-6da7-4086-93d4-d76fa35f7660-frr-conf\") pod \"frr-k8s-b75qk\" (UID: \"59de8a65-6da7-4086-93d4-d76fa35f7660\") " pod="metallb-system/frr-k8s-b75qk" Jan 20 16:53:00 crc kubenswrapper[4558]: I0120 16:53:00.808118 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/59de8a65-6da7-4086-93d4-d76fa35f7660-reloader\") pod \"frr-k8s-b75qk\" (UID: \"59de8a65-6da7-4086-93d4-d76fa35f7660\") " pod="metallb-system/frr-k8s-b75qk" Jan 20 16:53:00 crc kubenswrapper[4558]: I0120 16:53:00.808138 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/241ae411-7e63-4bb6-a110-b9983a418f9e-metallb-excludel2\") pod \"speaker-w684s\" (UID: \"241ae411-7e63-4bb6-a110-b9983a418f9e\") " pod="metallb-system/speaker-w684s" Jan 20 16:53:00 crc kubenswrapper[4558]: I0120 16:53:00.808159 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fcqhl\" (UniqueName: \"kubernetes.io/projected/59de8a65-6da7-4086-93d4-d76fa35f7660-kube-api-access-fcqhl\") pod \"frr-k8s-b75qk\" (UID: \"59de8a65-6da7-4086-93d4-d76fa35f7660\") " pod="metallb-system/frr-k8s-b75qk" Jan 20 16:53:00 crc kubenswrapper[4558]: I0120 16:53:00.808188 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/241ae411-7e63-4bb6-a110-b9983a418f9e-metrics-certs\") pod \"speaker-w684s\" (UID: \"241ae411-7e63-4bb6-a110-b9983a418f9e\") " pod="metallb-system/speaker-w684s" Jan 20 16:53:00 crc kubenswrapper[4558]: I0120 16:53:00.908946 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/59de8a65-6da7-4086-93d4-d76fa35f7660-metrics\") pod \"frr-k8s-b75qk\" (UID: \"59de8a65-6da7-4086-93d4-d76fa35f7660\") " pod="metallb-system/frr-k8s-b75qk" Jan 20 16:53:00 crc kubenswrapper[4558]: I0120 16:53:00.908996 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5835ce2a-d074-4a95-aa34-ad3a62f77503-metrics-certs\") pod \"controller-6968d8fdc4-d9p9k\" (UID: \"5835ce2a-d074-4a95-aa34-ad3a62f77503\") " pod="metallb-system/controller-6968d8fdc4-d9p9k" Jan 20 16:53:00 crc kubenswrapper[4558]: I0120 16:53:00.909024 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l2js5\" (UniqueName: \"kubernetes.io/projected/5835ce2a-d074-4a95-aa34-ad3a62f77503-kube-api-access-l2js5\") pod \"controller-6968d8fdc4-d9p9k\" (UID: \"5835ce2a-d074-4a95-aa34-ad3a62f77503\") " pod="metallb-system/controller-6968d8fdc4-d9p9k" Jan 20 16:53:00 crc kubenswrapper[4558]: I0120 16:53:00.909062 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/241ae411-7e63-4bb6-a110-b9983a418f9e-memberlist\") pod \"speaker-w684s\" (UID: \"241ae411-7e63-4bb6-a110-b9983a418f9e\") " pod="metallb-system/speaker-w684s" Jan 20 16:53:00 crc kubenswrapper[4558]: I0120 16:53:00.909085 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e7f30f4b-a84e-47b2-b393-c52757e6ca69-cert\") pod \"frr-k8s-webhook-server-7df86c4f6c-vf5fl\" (UID: \"e7f30f4b-a84e-47b2-b393-c52757e6ca69\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-vf5fl" Jan 20 16:53:00 crc kubenswrapper[4558]: I0120 16:53:00.909100 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/59de8a65-6da7-4086-93d4-d76fa35f7660-metrics-certs\") pod \"frr-k8s-b75qk\" (UID: \"59de8a65-6da7-4086-93d4-d76fa35f7660\") " pod="metallb-system/frr-k8s-b75qk" Jan 20 16:53:00 crc kubenswrapper[4558]: I0120 16:53:00.909113 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/5835ce2a-d074-4a95-aa34-ad3a62f77503-cert\") pod \"controller-6968d8fdc4-d9p9k\" (UID: \"5835ce2a-d074-4a95-aa34-ad3a62f77503\") " pod="metallb-system/controller-6968d8fdc4-d9p9k" Jan 20 16:53:00 crc kubenswrapper[4558]: I0120 16:53:00.909128 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/59de8a65-6da7-4086-93d4-d76fa35f7660-frr-conf\") pod \"frr-k8s-b75qk\" (UID: \"59de8a65-6da7-4086-93d4-d76fa35f7660\") " pod="metallb-system/frr-k8s-b75qk" Jan 20 16:53:00 crc kubenswrapper[4558]: E0120 16:53:00.909134 4558 secret.go:188] Couldn't get secret metallb-system/controller-certs-secret: secret "controller-certs-secret" not found Jan 20 16:53:00 crc kubenswrapper[4558]: E0120 16:53:00.909186 4558 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Jan 20 16:53:00 crc kubenswrapper[4558]: E0120 16:53:00.909213 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5835ce2a-d074-4a95-aa34-ad3a62f77503-metrics-certs podName:5835ce2a-d074-4a95-aa34-ad3a62f77503 nodeName:}" failed. No retries permitted until 2026-01-20 16:53:01.409196354 +0000 UTC m=+675.169534312 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/5835ce2a-d074-4a95-aa34-ad3a62f77503-metrics-certs") pod "controller-6968d8fdc4-d9p9k" (UID: "5835ce2a-d074-4a95-aa34-ad3a62f77503") : secret "controller-certs-secret" not found Jan 20 16:53:00 crc kubenswrapper[4558]: E0120 16:53:00.909230 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/241ae411-7e63-4bb6-a110-b9983a418f9e-memberlist podName:241ae411-7e63-4bb6-a110-b9983a418f9e nodeName:}" failed. No retries permitted until 2026-01-20 16:53:01.409221882 +0000 UTC m=+675.169559849 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/241ae411-7e63-4bb6-a110-b9983a418f9e-memberlist") pod "speaker-w684s" (UID: "241ae411-7e63-4bb6-a110-b9983a418f9e") : secret "metallb-memberlist" not found Jan 20 16:53:00 crc kubenswrapper[4558]: I0120 16:53:00.909148 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/59de8a65-6da7-4086-93d4-d76fa35f7660-reloader\") pod \"frr-k8s-b75qk\" (UID: \"59de8a65-6da7-4086-93d4-d76fa35f7660\") " pod="metallb-system/frr-k8s-b75qk" Jan 20 16:53:00 crc kubenswrapper[4558]: I0120 16:53:00.909270 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/241ae411-7e63-4bb6-a110-b9983a418f9e-metallb-excludel2\") pod \"speaker-w684s\" (UID: \"241ae411-7e63-4bb6-a110-b9983a418f9e\") " pod="metallb-system/speaker-w684s" Jan 20 16:53:00 crc kubenswrapper[4558]: I0120 16:53:00.909309 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fcqhl\" (UniqueName: \"kubernetes.io/projected/59de8a65-6da7-4086-93d4-d76fa35f7660-kube-api-access-fcqhl\") pod \"frr-k8s-b75qk\" (UID: \"59de8a65-6da7-4086-93d4-d76fa35f7660\") " pod="metallb-system/frr-k8s-b75qk" Jan 20 16:53:00 crc kubenswrapper[4558]: I0120 16:53:00.909328 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/241ae411-7e63-4bb6-a110-b9983a418f9e-metrics-certs\") pod \"speaker-w684s\" (UID: \"241ae411-7e63-4bb6-a110-b9983a418f9e\") " pod="metallb-system/speaker-w684s" Jan 20 16:53:00 crc kubenswrapper[4558]: I0120 16:53:00.909378 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/59de8a65-6da7-4086-93d4-d76fa35f7660-frr-sockets\") pod \"frr-k8s-b75qk\" (UID: \"59de8a65-6da7-4086-93d4-d76fa35f7660\") " pod="metallb-system/frr-k8s-b75qk" Jan 20 16:53:00 crc kubenswrapper[4558]: I0120 16:53:00.909413 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/59de8a65-6da7-4086-93d4-d76fa35f7660-frr-startup\") pod \"frr-k8s-b75qk\" (UID: \"59de8a65-6da7-4086-93d4-d76fa35f7660\") " pod="metallb-system/frr-k8s-b75qk" Jan 20 16:53:00 crc kubenswrapper[4558]: I0120 16:53:00.909443 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6qlc9\" (UniqueName: \"kubernetes.io/projected/e7f30f4b-a84e-47b2-b393-c52757e6ca69-kube-api-access-6qlc9\") pod \"frr-k8s-webhook-server-7df86c4f6c-vf5fl\" (UID: \"e7f30f4b-a84e-47b2-b393-c52757e6ca69\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-vf5fl" Jan 20 16:53:00 crc kubenswrapper[4558]: I0120 16:53:00.909463 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bx6gp\" (UniqueName: \"kubernetes.io/projected/241ae411-7e63-4bb6-a110-b9983a418f9e-kube-api-access-bx6gp\") pod \"speaker-w684s\" (UID: \"241ae411-7e63-4bb6-a110-b9983a418f9e\") " pod="metallb-system/speaker-w684s" Jan 20 16:53:00 crc kubenswrapper[4558]: I0120 16:53:00.910022 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/59de8a65-6da7-4086-93d4-d76fa35f7660-metrics\") pod \"frr-k8s-b75qk\" (UID: \"59de8a65-6da7-4086-93d4-d76fa35f7660\") " pod="metallb-system/frr-k8s-b75qk" Jan 20 16:53:00 crc kubenswrapper[4558]: I0120 16:53:00.910178 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/59de8a65-6da7-4086-93d4-d76fa35f7660-reloader\") pod \"frr-k8s-b75qk\" (UID: \"59de8a65-6da7-4086-93d4-d76fa35f7660\") " pod="metallb-system/frr-k8s-b75qk" Jan 20 16:53:00 crc kubenswrapper[4558]: I0120 16:53:00.910328 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/241ae411-7e63-4bb6-a110-b9983a418f9e-metallb-excludel2\") pod \"speaker-w684s\" (UID: \"241ae411-7e63-4bb6-a110-b9983a418f9e\") " pod="metallb-system/speaker-w684s" Jan 20 16:53:00 crc kubenswrapper[4558]: I0120 16:53:00.910498 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/59de8a65-6da7-4086-93d4-d76fa35f7660-frr-sockets\") pod \"frr-k8s-b75qk\" (UID: \"59de8a65-6da7-4086-93d4-d76fa35f7660\") " pod="metallb-system/frr-k8s-b75qk" Jan 20 16:53:00 crc kubenswrapper[4558]: I0120 16:53:00.910572 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/59de8a65-6da7-4086-93d4-d76fa35f7660-frr-startup\") pod \"frr-k8s-b75qk\" (UID: \"59de8a65-6da7-4086-93d4-d76fa35f7660\") " pod="metallb-system/frr-k8s-b75qk" Jan 20 16:53:00 crc kubenswrapper[4558]: I0120 16:53:00.910689 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/59de8a65-6da7-4086-93d4-d76fa35f7660-frr-conf\") pod \"frr-k8s-b75qk\" (UID: \"59de8a65-6da7-4086-93d4-d76fa35f7660\") " pod="metallb-system/frr-k8s-b75qk" Jan 20 16:53:00 crc kubenswrapper[4558]: I0120 16:53:00.912388 4558 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Jan 20 16:53:00 crc kubenswrapper[4558]: I0120 16:53:00.914666 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/59de8a65-6da7-4086-93d4-d76fa35f7660-metrics-certs\") pod \"frr-k8s-b75qk\" (UID: \"59de8a65-6da7-4086-93d4-d76fa35f7660\") " pod="metallb-system/frr-k8s-b75qk" Jan 20 16:53:00 crc kubenswrapper[4558]: I0120 16:53:00.914922 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/241ae411-7e63-4bb6-a110-b9983a418f9e-metrics-certs\") pod \"speaker-w684s\" (UID: \"241ae411-7e63-4bb6-a110-b9983a418f9e\") " pod="metallb-system/speaker-w684s" Jan 20 16:53:00 crc kubenswrapper[4558]: I0120 16:53:00.915047 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e7f30f4b-a84e-47b2-b393-c52757e6ca69-cert\") pod \"frr-k8s-webhook-server-7df86c4f6c-vf5fl\" (UID: \"e7f30f4b-a84e-47b2-b393-c52757e6ca69\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-vf5fl" Jan 20 16:53:00 crc kubenswrapper[4558]: I0120 16:53:00.924498 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/5835ce2a-d074-4a95-aa34-ad3a62f77503-cert\") pod \"controller-6968d8fdc4-d9p9k\" (UID: \"5835ce2a-d074-4a95-aa34-ad3a62f77503\") " pod="metallb-system/controller-6968d8fdc4-d9p9k" Jan 20 16:53:00 crc kubenswrapper[4558]: I0120 16:53:00.927112 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l2js5\" (UniqueName: \"kubernetes.io/projected/5835ce2a-d074-4a95-aa34-ad3a62f77503-kube-api-access-l2js5\") pod \"controller-6968d8fdc4-d9p9k\" (UID: \"5835ce2a-d074-4a95-aa34-ad3a62f77503\") " pod="metallb-system/controller-6968d8fdc4-d9p9k" Jan 20 16:53:00 crc kubenswrapper[4558]: I0120 16:53:00.927279 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bx6gp\" (UniqueName: \"kubernetes.io/projected/241ae411-7e63-4bb6-a110-b9983a418f9e-kube-api-access-bx6gp\") pod \"speaker-w684s\" (UID: \"241ae411-7e63-4bb6-a110-b9983a418f9e\") " pod="metallb-system/speaker-w684s" Jan 20 16:53:00 crc kubenswrapper[4558]: I0120 16:53:00.928601 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6qlc9\" (UniqueName: \"kubernetes.io/projected/e7f30f4b-a84e-47b2-b393-c52757e6ca69-kube-api-access-6qlc9\") pod \"frr-k8s-webhook-server-7df86c4f6c-vf5fl\" (UID: \"e7f30f4b-a84e-47b2-b393-c52757e6ca69\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-vf5fl" Jan 20 16:53:00 crc kubenswrapper[4558]: I0120 16:53:00.930035 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fcqhl\" (UniqueName: \"kubernetes.io/projected/59de8a65-6da7-4086-93d4-d76fa35f7660-kube-api-access-fcqhl\") pod \"frr-k8s-b75qk\" (UID: \"59de8a65-6da7-4086-93d4-d76fa35f7660\") " pod="metallb-system/frr-k8s-b75qk" Jan 20 16:53:00 crc kubenswrapper[4558]: I0120 16:53:00.935977 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-vf5fl" Jan 20 16:53:01 crc kubenswrapper[4558]: I0120 16:53:01.224494 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-b75qk" Jan 20 16:53:01 crc kubenswrapper[4558]: I0120 16:53:01.274889 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7df86c4f6c-vf5fl"] Jan 20 16:53:01 crc kubenswrapper[4558]: I0120 16:53:01.413869 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/241ae411-7e63-4bb6-a110-b9983a418f9e-memberlist\") pod \"speaker-w684s\" (UID: \"241ae411-7e63-4bb6-a110-b9983a418f9e\") " pod="metallb-system/speaker-w684s" Jan 20 16:53:01 crc kubenswrapper[4558]: I0120 16:53:01.414023 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5835ce2a-d074-4a95-aa34-ad3a62f77503-metrics-certs\") pod \"controller-6968d8fdc4-d9p9k\" (UID: \"5835ce2a-d074-4a95-aa34-ad3a62f77503\") " pod="metallb-system/controller-6968d8fdc4-d9p9k" Jan 20 16:53:01 crc kubenswrapper[4558]: E0120 16:53:01.414044 4558 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Jan 20 16:53:01 crc kubenswrapper[4558]: E0120 16:53:01.414115 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/241ae411-7e63-4bb6-a110-b9983a418f9e-memberlist podName:241ae411-7e63-4bb6-a110-b9983a418f9e nodeName:}" failed. No retries permitted until 2026-01-20 16:53:02.41409772 +0000 UTC m=+676.174435687 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/241ae411-7e63-4bb6-a110-b9983a418f9e-memberlist") pod "speaker-w684s" (UID: "241ae411-7e63-4bb6-a110-b9983a418f9e") : secret "metallb-memberlist" not found Jan 20 16:53:01 crc kubenswrapper[4558]: I0120 16:53:01.417382 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5835ce2a-d074-4a95-aa34-ad3a62f77503-metrics-certs\") pod \"controller-6968d8fdc4-d9p9k\" (UID: \"5835ce2a-d074-4a95-aa34-ad3a62f77503\") " pod="metallb-system/controller-6968d8fdc4-d9p9k" Jan 20 16:53:01 crc kubenswrapper[4558]: I0120 16:53:01.632296 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-6968d8fdc4-d9p9k" Jan 20 16:53:01 crc kubenswrapper[4558]: I0120 16:53:01.979476 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-6968d8fdc4-d9p9k"] Jan 20 16:53:02 crc kubenswrapper[4558]: I0120 16:53:02.174056 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-6968d8fdc4-d9p9k" event={"ID":"5835ce2a-d074-4a95-aa34-ad3a62f77503","Type":"ContainerStarted","Data":"9ceded2a33ee099c3b0986454b6537858d5a3e84ca7e3c79b4c636ba9d8fe040"} Jan 20 16:53:02 crc kubenswrapper[4558]: I0120 16:53:02.174524 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-6968d8fdc4-d9p9k" event={"ID":"5835ce2a-d074-4a95-aa34-ad3a62f77503","Type":"ContainerStarted","Data":"d3204832a4388205aa0dde99003ff5bc343750fe094fe2b4535ac32bfa658316"} Jan 20 16:53:02 crc kubenswrapper[4558]: I0120 16:53:02.174538 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-6968d8fdc4-d9p9k" event={"ID":"5835ce2a-d074-4a95-aa34-ad3a62f77503","Type":"ContainerStarted","Data":"7e6ab51307eb3bde7fda904ef7d2df4abe5b3a8fc6f6ece80430c2f283388f8e"} Jan 20 16:53:02 crc kubenswrapper[4558]: I0120 16:53:02.174552 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-6968d8fdc4-d9p9k" Jan 20 16:53:02 crc kubenswrapper[4558]: I0120 16:53:02.175298 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-b75qk" event={"ID":"59de8a65-6da7-4086-93d4-d76fa35f7660","Type":"ContainerStarted","Data":"4e24158a27b4af434f96586eaa3452bed96791fa896eff216c1913f7d3b60fa3"} Jan 20 16:53:02 crc kubenswrapper[4558]: I0120 16:53:02.175940 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-vf5fl" event={"ID":"e7f30f4b-a84e-47b2-b393-c52757e6ca69","Type":"ContainerStarted","Data":"921bb1939ccc068a64306c283f9f24ff5e60f211bf95e052ff2bc48faa8c8af5"} Jan 20 16:53:02 crc kubenswrapper[4558]: I0120 16:53:02.190287 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-6968d8fdc4-d9p9k" podStartSLOduration=2.190274397 podStartE2EDuration="2.190274397s" podCreationTimestamp="2026-01-20 16:53:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 16:53:02.189103568 +0000 UTC m=+675.949441536" watchObservedRunningTime="2026-01-20 16:53:02.190274397 +0000 UTC m=+675.950612364" Jan 20 16:53:02 crc kubenswrapper[4558]: I0120 16:53:02.425061 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/241ae411-7e63-4bb6-a110-b9983a418f9e-memberlist\") pod \"speaker-w684s\" (UID: \"241ae411-7e63-4bb6-a110-b9983a418f9e\") " pod="metallb-system/speaker-w684s" Jan 20 16:53:02 crc kubenswrapper[4558]: I0120 16:53:02.429911 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/241ae411-7e63-4bb6-a110-b9983a418f9e-memberlist\") pod \"speaker-w684s\" (UID: \"241ae411-7e63-4bb6-a110-b9983a418f9e\") " pod="metallb-system/speaker-w684s" Jan 20 16:53:02 crc kubenswrapper[4558]: I0120 16:53:02.520235 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-w684s" Jan 20 16:53:02 crc kubenswrapper[4558]: W0120 16:53:02.539911 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod241ae411_7e63_4bb6_a110_b9983a418f9e.slice/crio-f572e4faea2e8fed21dd7f432928c128f6f41a21c64c3692d3055a887a60e592 WatchSource:0}: Error finding container f572e4faea2e8fed21dd7f432928c128f6f41a21c64c3692d3055a887a60e592: Status 404 returned error can't find the container with id f572e4faea2e8fed21dd7f432928c128f6f41a21c64c3692d3055a887a60e592 Jan 20 16:53:03 crc kubenswrapper[4558]: I0120 16:53:03.182763 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-w684s" event={"ID":"241ae411-7e63-4bb6-a110-b9983a418f9e","Type":"ContainerStarted","Data":"96304d2298dcd56d3831a566233ed5b4f0c4e1f326d67cc426cb69054a1a810d"} Jan 20 16:53:03 crc kubenswrapper[4558]: I0120 16:53:03.182982 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-w684s" event={"ID":"241ae411-7e63-4bb6-a110-b9983a418f9e","Type":"ContainerStarted","Data":"c93dce169f714ba79948303859feb9873ee83a69ba96a69c97d05cea9dd31af2"} Jan 20 16:53:03 crc kubenswrapper[4558]: I0120 16:53:03.182993 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-w684s" event={"ID":"241ae411-7e63-4bb6-a110-b9983a418f9e","Type":"ContainerStarted","Data":"f572e4faea2e8fed21dd7f432928c128f6f41a21c64c3692d3055a887a60e592"} Jan 20 16:53:03 crc kubenswrapper[4558]: I0120 16:53:03.183142 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-w684s" Jan 20 16:53:03 crc kubenswrapper[4558]: I0120 16:53:03.198777 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-w684s" podStartSLOduration=3.198763172 podStartE2EDuration="3.198763172s" podCreationTimestamp="2026-01-20 16:53:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 16:53:03.195292978 +0000 UTC m=+676.955630946" watchObservedRunningTime="2026-01-20 16:53:03.198763172 +0000 UTC m=+676.959101140" Jan 20 16:53:07 crc kubenswrapper[4558]: I0120 16:53:07.205313 4558 generic.go:334] "Generic (PLEG): container finished" podID="59de8a65-6da7-4086-93d4-d76fa35f7660" containerID="fd8a127362eb25c5f3ded5e7bdece1396b9a3e297f76394dc931d2f6622fcd13" exitCode=0 Jan 20 16:53:07 crc kubenswrapper[4558]: I0120 16:53:07.205380 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-b75qk" event={"ID":"59de8a65-6da7-4086-93d4-d76fa35f7660","Type":"ContainerDied","Data":"fd8a127362eb25c5f3ded5e7bdece1396b9a3e297f76394dc931d2f6622fcd13"} Jan 20 16:53:07 crc kubenswrapper[4558]: I0120 16:53:07.207800 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-vf5fl" event={"ID":"e7f30f4b-a84e-47b2-b393-c52757e6ca69","Type":"ContainerStarted","Data":"3325c41ccf95e435c431ba69dbd1c6c2d7611829dde0619c9a31e888f6540e9b"} Jan 20 16:53:07 crc kubenswrapper[4558]: I0120 16:53:07.207951 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-vf5fl" Jan 20 16:53:07 crc kubenswrapper[4558]: I0120 16:53:07.229698 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-vf5fl" podStartSLOduration=1.436306049 podStartE2EDuration="7.22968034s" podCreationTimestamp="2026-01-20 16:53:00 +0000 UTC" firstStartedPulling="2026-01-20 16:53:01.282271545 +0000 UTC m=+675.042609512" lastFinishedPulling="2026-01-20 16:53:07.075645835 +0000 UTC m=+680.835983803" observedRunningTime="2026-01-20 16:53:07.228765623 +0000 UTC m=+680.989103590" watchObservedRunningTime="2026-01-20 16:53:07.22968034 +0000 UTC m=+680.990018308" Jan 20 16:53:08 crc kubenswrapper[4558]: I0120 16:53:08.213384 4558 generic.go:334] "Generic (PLEG): container finished" podID="59de8a65-6da7-4086-93d4-d76fa35f7660" containerID="9489277b084faa6e87a0d4885bad31ac3f5d9aac7039d767f4ddc42522fcb029" exitCode=0 Jan 20 16:53:08 crc kubenswrapper[4558]: I0120 16:53:08.213425 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-b75qk" event={"ID":"59de8a65-6da7-4086-93d4-d76fa35f7660","Type":"ContainerDied","Data":"9489277b084faa6e87a0d4885bad31ac3f5d9aac7039d767f4ddc42522fcb029"} Jan 20 16:53:09 crc kubenswrapper[4558]: I0120 16:53:09.220855 4558 generic.go:334] "Generic (PLEG): container finished" podID="59de8a65-6da7-4086-93d4-d76fa35f7660" containerID="1194ab7db15c4fc254d68ed27e113bac7ef0cb377399fa0a039d43b5e3e7a3a8" exitCode=0 Jan 20 16:53:09 crc kubenswrapper[4558]: I0120 16:53:09.220895 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-b75qk" event={"ID":"59de8a65-6da7-4086-93d4-d76fa35f7660","Type":"ContainerDied","Data":"1194ab7db15c4fc254d68ed27e113bac7ef0cb377399fa0a039d43b5e3e7a3a8"} Jan 20 16:53:10 crc kubenswrapper[4558]: I0120 16:53:10.228942 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-b75qk" event={"ID":"59de8a65-6da7-4086-93d4-d76fa35f7660","Type":"ContainerStarted","Data":"b4308bf3e6049a685d59b4149a0b8eeefc5524a7ba6d89757a4be110b52f84cd"} Jan 20 16:53:10 crc kubenswrapper[4558]: I0120 16:53:10.229138 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-b75qk" event={"ID":"59de8a65-6da7-4086-93d4-d76fa35f7660","Type":"ContainerStarted","Data":"1d54327c21234e581ceb362f4dd7e37d9dc6c50feadacd0d94bb70de33e4c38b"} Jan 20 16:53:10 crc kubenswrapper[4558]: I0120 16:53:10.229150 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-b75qk" event={"ID":"59de8a65-6da7-4086-93d4-d76fa35f7660","Type":"ContainerStarted","Data":"5f861a15f0a792996ee8089d2f828b4c0995d44eb6fed2a20dbf93e94b78f8ba"} Jan 20 16:53:10 crc kubenswrapper[4558]: I0120 16:53:10.229158 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-b75qk" event={"ID":"59de8a65-6da7-4086-93d4-d76fa35f7660","Type":"ContainerStarted","Data":"d7be0dc2bf057c695f957ff650208e402cb60e5a70302a29766d5169d53b00bb"} Jan 20 16:53:10 crc kubenswrapper[4558]: I0120 16:53:10.229180 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-b75qk" event={"ID":"59de8a65-6da7-4086-93d4-d76fa35f7660","Type":"ContainerStarted","Data":"e0902c7c701dad84993e0847f64b4a8573fea050a4b70798829ea5713ff9c96c"} Jan 20 16:53:10 crc kubenswrapper[4558]: I0120 16:53:10.229188 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-b75qk" event={"ID":"59de8a65-6da7-4086-93d4-d76fa35f7660","Type":"ContainerStarted","Data":"eb3d4f9529408d612b6688cca67de2dc42b50eb0794761acc9148fd470649923"} Jan 20 16:53:10 crc kubenswrapper[4558]: I0120 16:53:10.229296 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-b75qk" Jan 20 16:53:10 crc kubenswrapper[4558]: I0120 16:53:10.246389 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-b75qk" podStartSLOduration=4.494138704 podStartE2EDuration="10.246373377s" podCreationTimestamp="2026-01-20 16:53:00 +0000 UTC" firstStartedPulling="2026-01-20 16:53:01.318852515 +0000 UTC m=+675.079190482" lastFinishedPulling="2026-01-20 16:53:07.071087188 +0000 UTC m=+680.831425155" observedRunningTime="2026-01-20 16:53:10.244413208 +0000 UTC m=+684.004751174" watchObservedRunningTime="2026-01-20 16:53:10.246373377 +0000 UTC m=+684.006711345" Jan 20 16:53:11 crc kubenswrapper[4558]: I0120 16:53:11.225028 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-b75qk" Jan 20 16:53:11 crc kubenswrapper[4558]: I0120 16:53:11.252700 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-b75qk" Jan 20 16:53:12 crc kubenswrapper[4558]: I0120 16:53:12.523497 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-w684s" Jan 20 16:53:13 crc kubenswrapper[4558]: I0120 16:53:13.710607 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931ajdtsz"] Jan 20 16:53:13 crc kubenswrapper[4558]: I0120 16:53:13.711736 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931ajdtsz" Jan 20 16:53:13 crc kubenswrapper[4558]: I0120 16:53:13.713840 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Jan 20 16:53:13 crc kubenswrapper[4558]: I0120 16:53:13.716973 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931ajdtsz"] Jan 20 16:53:13 crc kubenswrapper[4558]: I0120 16:53:13.757879 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-npxdd\" (UniqueName: \"kubernetes.io/projected/117625c2-239a-43e3-8bfa-69f6b5985a5c-kube-api-access-npxdd\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931ajdtsz\" (UID: \"117625c2-239a-43e3-8bfa-69f6b5985a5c\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931ajdtsz" Jan 20 16:53:13 crc kubenswrapper[4558]: I0120 16:53:13.757976 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/117625c2-239a-43e3-8bfa-69f6b5985a5c-bundle\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931ajdtsz\" (UID: \"117625c2-239a-43e3-8bfa-69f6b5985a5c\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931ajdtsz" Jan 20 16:53:13 crc kubenswrapper[4558]: I0120 16:53:13.757998 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/117625c2-239a-43e3-8bfa-69f6b5985a5c-util\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931ajdtsz\" (UID: \"117625c2-239a-43e3-8bfa-69f6b5985a5c\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931ajdtsz" Jan 20 16:53:13 crc kubenswrapper[4558]: I0120 16:53:13.858643 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-npxdd\" (UniqueName: \"kubernetes.io/projected/117625c2-239a-43e3-8bfa-69f6b5985a5c-kube-api-access-npxdd\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931ajdtsz\" (UID: \"117625c2-239a-43e3-8bfa-69f6b5985a5c\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931ajdtsz" Jan 20 16:53:13 crc kubenswrapper[4558]: I0120 16:53:13.858738 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/117625c2-239a-43e3-8bfa-69f6b5985a5c-bundle\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931ajdtsz\" (UID: \"117625c2-239a-43e3-8bfa-69f6b5985a5c\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931ajdtsz" Jan 20 16:53:13 crc kubenswrapper[4558]: I0120 16:53:13.858766 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/117625c2-239a-43e3-8bfa-69f6b5985a5c-util\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931ajdtsz\" (UID: \"117625c2-239a-43e3-8bfa-69f6b5985a5c\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931ajdtsz" Jan 20 16:53:13 crc kubenswrapper[4558]: I0120 16:53:13.859108 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/117625c2-239a-43e3-8bfa-69f6b5985a5c-util\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931ajdtsz\" (UID: \"117625c2-239a-43e3-8bfa-69f6b5985a5c\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931ajdtsz" Jan 20 16:53:13 crc kubenswrapper[4558]: I0120 16:53:13.859235 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/117625c2-239a-43e3-8bfa-69f6b5985a5c-bundle\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931ajdtsz\" (UID: \"117625c2-239a-43e3-8bfa-69f6b5985a5c\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931ajdtsz" Jan 20 16:53:13 crc kubenswrapper[4558]: I0120 16:53:13.874981 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-npxdd\" (UniqueName: \"kubernetes.io/projected/117625c2-239a-43e3-8bfa-69f6b5985a5c-kube-api-access-npxdd\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931ajdtsz\" (UID: \"117625c2-239a-43e3-8bfa-69f6b5985a5c\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931ajdtsz" Jan 20 16:53:14 crc kubenswrapper[4558]: I0120 16:53:14.026328 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931ajdtsz" Jan 20 16:53:14 crc kubenswrapper[4558]: I0120 16:53:14.373647 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931ajdtsz"] Jan 20 16:53:15 crc kubenswrapper[4558]: I0120 16:53:15.252077 4558 generic.go:334] "Generic (PLEG): container finished" podID="117625c2-239a-43e3-8bfa-69f6b5985a5c" containerID="f5e71363483300f1cb36f155575c0e2ce4006683ba92a3febac3147c640735a2" exitCode=0 Jan 20 16:53:15 crc kubenswrapper[4558]: I0120 16:53:15.252192 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931ajdtsz" event={"ID":"117625c2-239a-43e3-8bfa-69f6b5985a5c","Type":"ContainerDied","Data":"f5e71363483300f1cb36f155575c0e2ce4006683ba92a3febac3147c640735a2"} Jan 20 16:53:15 crc kubenswrapper[4558]: I0120 16:53:15.252350 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931ajdtsz" event={"ID":"117625c2-239a-43e3-8bfa-69f6b5985a5c","Type":"ContainerStarted","Data":"6522c8cb4f5ff76756911cc4cd33708388117d6f3fd28da9b5dcc04785f23f54"} Jan 20 16:53:18 crc kubenswrapper[4558]: I0120 16:53:18.270905 4558 generic.go:334] "Generic (PLEG): container finished" podID="117625c2-239a-43e3-8bfa-69f6b5985a5c" containerID="afb51aa04838379590de66c61c00dd88a80d74c7c36baeb32ddcf6a0f1ef8d99" exitCode=0 Jan 20 16:53:18 crc kubenswrapper[4558]: I0120 16:53:18.270994 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931ajdtsz" event={"ID":"117625c2-239a-43e3-8bfa-69f6b5985a5c","Type":"ContainerDied","Data":"afb51aa04838379590de66c61c00dd88a80d74c7c36baeb32ddcf6a0f1ef8d99"} Jan 20 16:53:19 crc kubenswrapper[4558]: I0120 16:53:19.281960 4558 generic.go:334] "Generic (PLEG): container finished" podID="117625c2-239a-43e3-8bfa-69f6b5985a5c" containerID="adff697b21bc60fb8083ad5a0bf911980fb6c5e78f89dcf68ff82149ffd264be" exitCode=0 Jan 20 16:53:19 crc kubenswrapper[4558]: I0120 16:53:19.282007 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931ajdtsz" event={"ID":"117625c2-239a-43e3-8bfa-69f6b5985a5c","Type":"ContainerDied","Data":"adff697b21bc60fb8083ad5a0bf911980fb6c5e78f89dcf68ff82149ffd264be"} Jan 20 16:53:20 crc kubenswrapper[4558]: I0120 16:53:20.467193 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931ajdtsz" Jan 20 16:53:20 crc kubenswrapper[4558]: I0120 16:53:20.537195 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/117625c2-239a-43e3-8bfa-69f6b5985a5c-bundle\") pod \"117625c2-239a-43e3-8bfa-69f6b5985a5c\" (UID: \"117625c2-239a-43e3-8bfa-69f6b5985a5c\") " Jan 20 16:53:20 crc kubenswrapper[4558]: I0120 16:53:20.537290 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-npxdd\" (UniqueName: \"kubernetes.io/projected/117625c2-239a-43e3-8bfa-69f6b5985a5c-kube-api-access-npxdd\") pod \"117625c2-239a-43e3-8bfa-69f6b5985a5c\" (UID: \"117625c2-239a-43e3-8bfa-69f6b5985a5c\") " Jan 20 16:53:20 crc kubenswrapper[4558]: I0120 16:53:20.537322 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/117625c2-239a-43e3-8bfa-69f6b5985a5c-util\") pod \"117625c2-239a-43e3-8bfa-69f6b5985a5c\" (UID: \"117625c2-239a-43e3-8bfa-69f6b5985a5c\") " Jan 20 16:53:20 crc kubenswrapper[4558]: I0120 16:53:20.537991 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/117625c2-239a-43e3-8bfa-69f6b5985a5c-bundle" (OuterVolumeSpecName: "bundle") pod "117625c2-239a-43e3-8bfa-69f6b5985a5c" (UID: "117625c2-239a-43e3-8bfa-69f6b5985a5c"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 16:53:20 crc kubenswrapper[4558]: I0120 16:53:20.543196 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/117625c2-239a-43e3-8bfa-69f6b5985a5c-kube-api-access-npxdd" (OuterVolumeSpecName: "kube-api-access-npxdd") pod "117625c2-239a-43e3-8bfa-69f6b5985a5c" (UID: "117625c2-239a-43e3-8bfa-69f6b5985a5c"). InnerVolumeSpecName "kube-api-access-npxdd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:53:20 crc kubenswrapper[4558]: I0120 16:53:20.544521 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/117625c2-239a-43e3-8bfa-69f6b5985a5c-util" (OuterVolumeSpecName: "util") pod "117625c2-239a-43e3-8bfa-69f6b5985a5c" (UID: "117625c2-239a-43e3-8bfa-69f6b5985a5c"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 16:53:20 crc kubenswrapper[4558]: I0120 16:53:20.638205 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-npxdd\" (UniqueName: \"kubernetes.io/projected/117625c2-239a-43e3-8bfa-69f6b5985a5c-kube-api-access-npxdd\") on node \"crc\" DevicePath \"\"" Jan 20 16:53:20 crc kubenswrapper[4558]: I0120 16:53:20.638242 4558 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/117625c2-239a-43e3-8bfa-69f6b5985a5c-util\") on node \"crc\" DevicePath \"\"" Jan 20 16:53:20 crc kubenswrapper[4558]: I0120 16:53:20.638253 4558 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/117625c2-239a-43e3-8bfa-69f6b5985a5c-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 16:53:20 crc kubenswrapper[4558]: I0120 16:53:20.940261 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-vf5fl" Jan 20 16:53:21 crc kubenswrapper[4558]: I0120 16:53:21.226535 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-b75qk" Jan 20 16:53:21 crc kubenswrapper[4558]: I0120 16:53:21.293750 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931ajdtsz" event={"ID":"117625c2-239a-43e3-8bfa-69f6b5985a5c","Type":"ContainerDied","Data":"6522c8cb4f5ff76756911cc4cd33708388117d6f3fd28da9b5dcc04785f23f54"} Jan 20 16:53:21 crc kubenswrapper[4558]: I0120 16:53:21.293787 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6522c8cb4f5ff76756911cc4cd33708388117d6f3fd28da9b5dcc04785f23f54" Jan 20 16:53:21 crc kubenswrapper[4558]: I0120 16:53:21.294002 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931ajdtsz" Jan 20 16:53:21 crc kubenswrapper[4558]: I0120 16:53:21.635585 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-6968d8fdc4-d9p9k" Jan 20 16:53:26 crc kubenswrapper[4558]: I0120 16:53:26.801354 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-7rbpv"] Jan 20 16:53:26 crc kubenswrapper[4558]: E0120 16:53:26.801895 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="117625c2-239a-43e3-8bfa-69f6b5985a5c" containerName="pull" Jan 20 16:53:26 crc kubenswrapper[4558]: I0120 16:53:26.801907 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="117625c2-239a-43e3-8bfa-69f6b5985a5c" containerName="pull" Jan 20 16:53:26 crc kubenswrapper[4558]: E0120 16:53:26.801918 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="117625c2-239a-43e3-8bfa-69f6b5985a5c" containerName="util" Jan 20 16:53:26 crc kubenswrapper[4558]: I0120 16:53:26.801923 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="117625c2-239a-43e3-8bfa-69f6b5985a5c" containerName="util" Jan 20 16:53:26 crc kubenswrapper[4558]: E0120 16:53:26.801941 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="117625c2-239a-43e3-8bfa-69f6b5985a5c" containerName="extract" Jan 20 16:53:26 crc kubenswrapper[4558]: I0120 16:53:26.801947 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="117625c2-239a-43e3-8bfa-69f6b5985a5c" containerName="extract" Jan 20 16:53:26 crc kubenswrapper[4558]: I0120 16:53:26.802033 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="117625c2-239a-43e3-8bfa-69f6b5985a5c" containerName="extract" Jan 20 16:53:26 crc kubenswrapper[4558]: I0120 16:53:26.802416 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-7rbpv" Jan 20 16:53:26 crc kubenswrapper[4558]: I0120 16:53:26.804090 4558 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager-operator"/"cert-manager-operator-controller-manager-dockercfg-2rrq9" Jan 20 16:53:26 crc kubenswrapper[4558]: I0120 16:53:26.804431 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager-operator"/"openshift-service-ca.crt" Jan 20 16:53:26 crc kubenswrapper[4558]: I0120 16:53:26.804766 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager-operator"/"kube-root-ca.crt" Jan 20 16:53:26 crc kubenswrapper[4558]: I0120 16:53:26.804937 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/86931b75-0fc6-41d5-a944-a3d6d6ad15e1-tmp\") pod \"cert-manager-operator-controller-manager-64cf6dff88-7rbpv\" (UID: \"86931b75-0fc6-41d5-a944-a3d6d6ad15e1\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-7rbpv" Jan 20 16:53:26 crc kubenswrapper[4558]: I0120 16:53:26.805249 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xvc2r\" (UniqueName: \"kubernetes.io/projected/86931b75-0fc6-41d5-a944-a3d6d6ad15e1-kube-api-access-xvc2r\") pod \"cert-manager-operator-controller-manager-64cf6dff88-7rbpv\" (UID: \"86931b75-0fc6-41d5-a944-a3d6d6ad15e1\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-7rbpv" Jan 20 16:53:26 crc kubenswrapper[4558]: I0120 16:53:26.818124 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-7rbpv"] Jan 20 16:53:26 crc kubenswrapper[4558]: I0120 16:53:26.906313 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xvc2r\" (UniqueName: \"kubernetes.io/projected/86931b75-0fc6-41d5-a944-a3d6d6ad15e1-kube-api-access-xvc2r\") pod \"cert-manager-operator-controller-manager-64cf6dff88-7rbpv\" (UID: \"86931b75-0fc6-41d5-a944-a3d6d6ad15e1\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-7rbpv" Jan 20 16:53:26 crc kubenswrapper[4558]: I0120 16:53:26.906642 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/86931b75-0fc6-41d5-a944-a3d6d6ad15e1-tmp\") pod \"cert-manager-operator-controller-manager-64cf6dff88-7rbpv\" (UID: \"86931b75-0fc6-41d5-a944-a3d6d6ad15e1\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-7rbpv" Jan 20 16:53:26 crc kubenswrapper[4558]: I0120 16:53:26.907037 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/86931b75-0fc6-41d5-a944-a3d6d6ad15e1-tmp\") pod \"cert-manager-operator-controller-manager-64cf6dff88-7rbpv\" (UID: \"86931b75-0fc6-41d5-a944-a3d6d6ad15e1\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-7rbpv" Jan 20 16:53:26 crc kubenswrapper[4558]: I0120 16:53:26.923839 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xvc2r\" (UniqueName: \"kubernetes.io/projected/86931b75-0fc6-41d5-a944-a3d6d6ad15e1-kube-api-access-xvc2r\") pod \"cert-manager-operator-controller-manager-64cf6dff88-7rbpv\" (UID: \"86931b75-0fc6-41d5-a944-a3d6d6ad15e1\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-7rbpv" Jan 20 16:53:27 crc kubenswrapper[4558]: I0120 16:53:27.116232 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-7rbpv" Jan 20 16:53:27 crc kubenswrapper[4558]: I0120 16:53:27.332981 4558 patch_prober.go:28] interesting pod/machine-config-daemon-2vr4r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 20 16:53:27 crc kubenswrapper[4558]: I0120 16:53:27.336489 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 20 16:53:27 crc kubenswrapper[4558]: I0120 16:53:27.535290 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-7rbpv"] Jan 20 16:53:28 crc kubenswrapper[4558]: I0120 16:53:28.332565 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-7rbpv" event={"ID":"86931b75-0fc6-41d5-a944-a3d6d6ad15e1","Type":"ContainerStarted","Data":"18494cb7a5d7444f968f65a7fc6c8ce131b189ce009d415fc45c5ff7cd5326a7"} Jan 20 16:53:34 crc kubenswrapper[4558]: I0120 16:53:34.369928 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-7rbpv" event={"ID":"86931b75-0fc6-41d5-a944-a3d6d6ad15e1","Type":"ContainerStarted","Data":"8d47b64ea1eed129ca71ab653da706669b2c842ae0b9b682d75bcc6fa6cf5865"} Jan 20 16:53:34 crc kubenswrapper[4558]: I0120 16:53:34.386474 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager-operator/cert-manager-operator-controller-manager-64cf6dff88-7rbpv" podStartSLOduration=2.667167387 podStartE2EDuration="8.386459601s" podCreationTimestamp="2026-01-20 16:53:26 +0000 UTC" firstStartedPulling="2026-01-20 16:53:27.546550394 +0000 UTC m=+701.306888361" lastFinishedPulling="2026-01-20 16:53:33.265842608 +0000 UTC m=+707.026180575" observedRunningTime="2026-01-20 16:53:34.386388999 +0000 UTC m=+708.146726966" watchObservedRunningTime="2026-01-20 16:53:34.386459601 +0000 UTC m=+708.146797568" Jan 20 16:53:36 crc kubenswrapper[4558]: I0120 16:53:36.900873 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-f4fb5df64-s79l7"] Jan 20 16:53:36 crc kubenswrapper[4558]: I0120 16:53:36.901670 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-f4fb5df64-s79l7" Jan 20 16:53:36 crc kubenswrapper[4558]: I0120 16:53:36.905132 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Jan 20 16:53:36 crc kubenswrapper[4558]: I0120 16:53:36.905262 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Jan 20 16:53:36 crc kubenswrapper[4558]: I0120 16:53:36.905812 4558 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-zstvb" Jan 20 16:53:36 crc kubenswrapper[4558]: I0120 16:53:36.910664 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-f4fb5df64-s79l7"] Jan 20 16:53:36 crc kubenswrapper[4558]: I0120 16:53:36.941258 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/0aafb007-c951-4a3f-90ee-14897538c76d-bound-sa-token\") pod \"cert-manager-webhook-f4fb5df64-s79l7\" (UID: \"0aafb007-c951-4a3f-90ee-14897538c76d\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-s79l7" Jan 20 16:53:36 crc kubenswrapper[4558]: I0120 16:53:36.941335 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qrnzq\" (UniqueName: \"kubernetes.io/projected/0aafb007-c951-4a3f-90ee-14897538c76d-kube-api-access-qrnzq\") pod \"cert-manager-webhook-f4fb5df64-s79l7\" (UID: \"0aafb007-c951-4a3f-90ee-14897538c76d\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-s79l7" Jan 20 16:53:37 crc kubenswrapper[4558]: I0120 16:53:37.041746 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/0aafb007-c951-4a3f-90ee-14897538c76d-bound-sa-token\") pod \"cert-manager-webhook-f4fb5df64-s79l7\" (UID: \"0aafb007-c951-4a3f-90ee-14897538c76d\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-s79l7" Jan 20 16:53:37 crc kubenswrapper[4558]: I0120 16:53:37.041832 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qrnzq\" (UniqueName: \"kubernetes.io/projected/0aafb007-c951-4a3f-90ee-14897538c76d-kube-api-access-qrnzq\") pod \"cert-manager-webhook-f4fb5df64-s79l7\" (UID: \"0aafb007-c951-4a3f-90ee-14897538c76d\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-s79l7" Jan 20 16:53:37 crc kubenswrapper[4558]: I0120 16:53:37.058230 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qrnzq\" (UniqueName: \"kubernetes.io/projected/0aafb007-c951-4a3f-90ee-14897538c76d-kube-api-access-qrnzq\") pod \"cert-manager-webhook-f4fb5df64-s79l7\" (UID: \"0aafb007-c951-4a3f-90ee-14897538c76d\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-s79l7" Jan 20 16:53:37 crc kubenswrapper[4558]: I0120 16:53:37.058295 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/0aafb007-c951-4a3f-90ee-14897538c76d-bound-sa-token\") pod \"cert-manager-webhook-f4fb5df64-s79l7\" (UID: \"0aafb007-c951-4a3f-90ee-14897538c76d\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-s79l7" Jan 20 16:53:37 crc kubenswrapper[4558]: I0120 16:53:37.214615 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-f4fb5df64-s79l7" Jan 20 16:53:37 crc kubenswrapper[4558]: I0120 16:53:37.374590 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-f4fb5df64-s79l7"] Jan 20 16:53:38 crc kubenswrapper[4558]: I0120 16:53:38.386219 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-f4fb5df64-s79l7" event={"ID":"0aafb007-c951-4a3f-90ee-14897538c76d","Type":"ContainerStarted","Data":"a2e9d772d88a13a46829e6cee3abd9127d649422124451a012351322ca894538"} Jan 20 16:53:39 crc kubenswrapper[4558]: I0120 16:53:39.086718 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-855d9ccff4-n85r2"] Jan 20 16:53:39 crc kubenswrapper[4558]: I0120 16:53:39.089301 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-855d9ccff4-n85r2" Jan 20 16:53:39 crc kubenswrapper[4558]: I0120 16:53:39.090960 4558 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-m2mj6" Jan 20 16:53:39 crc kubenswrapper[4558]: I0120 16:53:39.093392 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-855d9ccff4-n85r2"] Jan 20 16:53:39 crc kubenswrapper[4558]: I0120 16:53:39.178204 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/dd2168dc-3cb1-45db-b8fd-7e112804ffcd-bound-sa-token\") pod \"cert-manager-cainjector-855d9ccff4-n85r2\" (UID: \"dd2168dc-3cb1-45db-b8fd-7e112804ffcd\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-n85r2" Jan 20 16:53:39 crc kubenswrapper[4558]: I0120 16:53:39.178377 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7tp9k\" (UniqueName: \"kubernetes.io/projected/dd2168dc-3cb1-45db-b8fd-7e112804ffcd-kube-api-access-7tp9k\") pod \"cert-manager-cainjector-855d9ccff4-n85r2\" (UID: \"dd2168dc-3cb1-45db-b8fd-7e112804ffcd\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-n85r2" Jan 20 16:53:39 crc kubenswrapper[4558]: I0120 16:53:39.279478 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7tp9k\" (UniqueName: \"kubernetes.io/projected/dd2168dc-3cb1-45db-b8fd-7e112804ffcd-kube-api-access-7tp9k\") pod \"cert-manager-cainjector-855d9ccff4-n85r2\" (UID: \"dd2168dc-3cb1-45db-b8fd-7e112804ffcd\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-n85r2" Jan 20 16:53:39 crc kubenswrapper[4558]: I0120 16:53:39.279616 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/dd2168dc-3cb1-45db-b8fd-7e112804ffcd-bound-sa-token\") pod \"cert-manager-cainjector-855d9ccff4-n85r2\" (UID: \"dd2168dc-3cb1-45db-b8fd-7e112804ffcd\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-n85r2" Jan 20 16:53:39 crc kubenswrapper[4558]: I0120 16:53:39.294483 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7tp9k\" (UniqueName: \"kubernetes.io/projected/dd2168dc-3cb1-45db-b8fd-7e112804ffcd-kube-api-access-7tp9k\") pod \"cert-manager-cainjector-855d9ccff4-n85r2\" (UID: \"dd2168dc-3cb1-45db-b8fd-7e112804ffcd\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-n85r2" Jan 20 16:53:39 crc kubenswrapper[4558]: I0120 16:53:39.296868 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/dd2168dc-3cb1-45db-b8fd-7e112804ffcd-bound-sa-token\") pod \"cert-manager-cainjector-855d9ccff4-n85r2\" (UID: \"dd2168dc-3cb1-45db-b8fd-7e112804ffcd\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-n85r2" Jan 20 16:53:39 crc kubenswrapper[4558]: I0120 16:53:39.406504 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-855d9ccff4-n85r2" Jan 20 16:53:39 crc kubenswrapper[4558]: I0120 16:53:39.742088 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-855d9ccff4-n85r2"] Jan 20 16:53:40 crc kubenswrapper[4558]: I0120 16:53:40.396858 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-855d9ccff4-n85r2" event={"ID":"dd2168dc-3cb1-45db-b8fd-7e112804ffcd","Type":"ContainerStarted","Data":"288026e972ad72952942275eb00fcc80021987f1256ae625e72b55796573ef32"} Jan 20 16:53:43 crc kubenswrapper[4558]: I0120 16:53:43.412559 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-f4fb5df64-s79l7" event={"ID":"0aafb007-c951-4a3f-90ee-14897538c76d","Type":"ContainerStarted","Data":"8af008e769a684c85a6316bc99a01ce92ba3d026189f6b9a4e08be3fd8ef3d70"} Jan 20 16:53:43 crc kubenswrapper[4558]: I0120 16:53:43.412829 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-f4fb5df64-s79l7" Jan 20 16:53:43 crc kubenswrapper[4558]: I0120 16:53:43.414331 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-855d9ccff4-n85r2" event={"ID":"dd2168dc-3cb1-45db-b8fd-7e112804ffcd","Type":"ContainerStarted","Data":"00bc571317c8dc98f1c01813fe9faf3ac6e962570853b0420b843f357750b097"} Jan 20 16:53:43 crc kubenswrapper[4558]: I0120 16:53:43.427752 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-f4fb5df64-s79l7" podStartSLOduration=1.759196297 podStartE2EDuration="7.427739574s" podCreationTimestamp="2026-01-20 16:53:36 +0000 UTC" firstStartedPulling="2026-01-20 16:53:37.385453524 +0000 UTC m=+711.145791490" lastFinishedPulling="2026-01-20 16:53:43.053996799 +0000 UTC m=+716.814334767" observedRunningTime="2026-01-20 16:53:43.42513256 +0000 UTC m=+717.185470527" watchObservedRunningTime="2026-01-20 16:53:43.427739574 +0000 UTC m=+717.188077541" Jan 20 16:53:43 crc kubenswrapper[4558]: I0120 16:53:43.438569 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-855d9ccff4-n85r2" podStartSLOduration=1.153101089 podStartE2EDuration="4.438553774s" podCreationTimestamp="2026-01-20 16:53:39 +0000 UTC" firstStartedPulling="2026-01-20 16:53:39.751610668 +0000 UTC m=+713.511948624" lastFinishedPulling="2026-01-20 16:53:43.037063342 +0000 UTC m=+716.797401309" observedRunningTime="2026-01-20 16:53:43.435573659 +0000 UTC m=+717.195911627" watchObservedRunningTime="2026-01-20 16:53:43.438553774 +0000 UTC m=+717.198891742" Jan 20 16:53:52 crc kubenswrapper[4558]: I0120 16:53:52.216764 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-f4fb5df64-s79l7" Jan 20 16:53:55 crc kubenswrapper[4558]: I0120 16:53:55.839110 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-86cb77c54b-4t55w"] Jan 20 16:53:55 crc kubenswrapper[4558]: I0120 16:53:55.840070 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-86cb77c54b-4t55w" Jan 20 16:53:55 crc kubenswrapper[4558]: I0120 16:53:55.842263 4558 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-8cpqr" Jan 20 16:53:55 crc kubenswrapper[4558]: I0120 16:53:55.849733 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-86cb77c54b-4t55w"] Jan 20 16:53:55 crc kubenswrapper[4558]: I0120 16:53:55.880662 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/857afdf1-c962-4b4a-a79b-15547f6b407c-bound-sa-token\") pod \"cert-manager-86cb77c54b-4t55w\" (UID: \"857afdf1-c962-4b4a-a79b-15547f6b407c\") " pod="cert-manager/cert-manager-86cb77c54b-4t55w" Jan 20 16:53:55 crc kubenswrapper[4558]: I0120 16:53:55.880858 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vnhgx\" (UniqueName: \"kubernetes.io/projected/857afdf1-c962-4b4a-a79b-15547f6b407c-kube-api-access-vnhgx\") pod \"cert-manager-86cb77c54b-4t55w\" (UID: \"857afdf1-c962-4b4a-a79b-15547f6b407c\") " pod="cert-manager/cert-manager-86cb77c54b-4t55w" Jan 20 16:53:55 crc kubenswrapper[4558]: I0120 16:53:55.981664 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vnhgx\" (UniqueName: \"kubernetes.io/projected/857afdf1-c962-4b4a-a79b-15547f6b407c-kube-api-access-vnhgx\") pod \"cert-manager-86cb77c54b-4t55w\" (UID: \"857afdf1-c962-4b4a-a79b-15547f6b407c\") " pod="cert-manager/cert-manager-86cb77c54b-4t55w" Jan 20 16:53:55 crc kubenswrapper[4558]: I0120 16:53:55.981797 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/857afdf1-c962-4b4a-a79b-15547f6b407c-bound-sa-token\") pod \"cert-manager-86cb77c54b-4t55w\" (UID: \"857afdf1-c962-4b4a-a79b-15547f6b407c\") " pod="cert-manager/cert-manager-86cb77c54b-4t55w" Jan 20 16:53:55 crc kubenswrapper[4558]: I0120 16:53:55.996947 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/857afdf1-c962-4b4a-a79b-15547f6b407c-bound-sa-token\") pod \"cert-manager-86cb77c54b-4t55w\" (UID: \"857afdf1-c962-4b4a-a79b-15547f6b407c\") " pod="cert-manager/cert-manager-86cb77c54b-4t55w" Jan 20 16:53:55 crc kubenswrapper[4558]: I0120 16:53:55.997096 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vnhgx\" (UniqueName: \"kubernetes.io/projected/857afdf1-c962-4b4a-a79b-15547f6b407c-kube-api-access-vnhgx\") pod \"cert-manager-86cb77c54b-4t55w\" (UID: \"857afdf1-c962-4b4a-a79b-15547f6b407c\") " pod="cert-manager/cert-manager-86cb77c54b-4t55w" Jan 20 16:53:56 crc kubenswrapper[4558]: I0120 16:53:56.153602 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-86cb77c54b-4t55w" Jan 20 16:53:56 crc kubenswrapper[4558]: I0120 16:53:56.491315 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-86cb77c54b-4t55w"] Jan 20 16:53:57 crc kubenswrapper[4558]: I0120 16:53:57.329648 4558 patch_prober.go:28] interesting pod/machine-config-daemon-2vr4r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 20 16:53:57 crc kubenswrapper[4558]: I0120 16:53:57.329830 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 20 16:53:57 crc kubenswrapper[4558]: I0120 16:53:57.476513 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-86cb77c54b-4t55w" event={"ID":"857afdf1-c962-4b4a-a79b-15547f6b407c","Type":"ContainerStarted","Data":"ced51b49ba16cb4a46fd6455756c31f7f202c0eaca65b08487202fe917f4905a"} Jan 20 16:53:57 crc kubenswrapper[4558]: I0120 16:53:57.476552 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-86cb77c54b-4t55w" event={"ID":"857afdf1-c962-4b4a-a79b-15547f6b407c","Type":"ContainerStarted","Data":"b0ff2c2e934cc0e785558ed53fd545bebc4ee3150689ac961a404dadcfcbb79a"} Jan 20 16:53:57 crc kubenswrapper[4558]: I0120 16:53:57.486705 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-86cb77c54b-4t55w" podStartSLOduration=2.486692405 podStartE2EDuration="2.486692405s" podCreationTimestamp="2026-01-20 16:53:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 16:53:57.485920875 +0000 UTC m=+731.246258843" watchObservedRunningTime="2026-01-20 16:53:57.486692405 +0000 UTC m=+731.247030372" Jan 20 16:54:04 crc kubenswrapper[4558]: I0120 16:54:04.555285 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-mxswj"] Jan 20 16:54:04 crc kubenswrapper[4558]: I0120 16:54:04.556332 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-mxswj" Jan 20 16:54:04 crc kubenswrapper[4558]: I0120 16:54:04.561468 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-index-dockercfg-n442n" Jan 20 16:54:04 crc kubenswrapper[4558]: I0120 16:54:04.561957 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-mxswj"] Jan 20 16:54:04 crc kubenswrapper[4558]: I0120 16:54:04.562578 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Jan 20 16:54:04 crc kubenswrapper[4558]: I0120 16:54:04.562733 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Jan 20 16:54:04 crc kubenswrapper[4558]: I0120 16:54:04.678954 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5zwpf\" (UniqueName: \"kubernetes.io/projected/a29d0f03-26ab-44a1-b24d-2cec192e90a9-kube-api-access-5zwpf\") pod \"openstack-operator-index-mxswj\" (UID: \"a29d0f03-26ab-44a1-b24d-2cec192e90a9\") " pod="openstack-operators/openstack-operator-index-mxswj" Jan 20 16:54:04 crc kubenswrapper[4558]: I0120 16:54:04.780189 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5zwpf\" (UniqueName: \"kubernetes.io/projected/a29d0f03-26ab-44a1-b24d-2cec192e90a9-kube-api-access-5zwpf\") pod \"openstack-operator-index-mxswj\" (UID: \"a29d0f03-26ab-44a1-b24d-2cec192e90a9\") " pod="openstack-operators/openstack-operator-index-mxswj" Jan 20 16:54:04 crc kubenswrapper[4558]: I0120 16:54:04.798540 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5zwpf\" (UniqueName: \"kubernetes.io/projected/a29d0f03-26ab-44a1-b24d-2cec192e90a9-kube-api-access-5zwpf\") pod \"openstack-operator-index-mxswj\" (UID: \"a29d0f03-26ab-44a1-b24d-2cec192e90a9\") " pod="openstack-operators/openstack-operator-index-mxswj" Jan 20 16:54:04 crc kubenswrapper[4558]: I0120 16:54:04.875069 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-mxswj" Jan 20 16:54:05 crc kubenswrapper[4558]: I0120 16:54:05.274396 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-mxswj"] Jan 20 16:54:05 crc kubenswrapper[4558]: W0120 16:54:05.279013 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda29d0f03_26ab_44a1_b24d_2cec192e90a9.slice/crio-8dbe60328f18f508dfb9fb3d1c233fd5046fa299c4fe9ec583c52c8503358e38 WatchSource:0}: Error finding container 8dbe60328f18f508dfb9fb3d1c233fd5046fa299c4fe9ec583c52c8503358e38: Status 404 returned error can't find the container with id 8dbe60328f18f508dfb9fb3d1c233fd5046fa299c4fe9ec583c52c8503358e38 Jan 20 16:54:05 crc kubenswrapper[4558]: I0120 16:54:05.511910 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-mxswj" event={"ID":"a29d0f03-26ab-44a1-b24d-2cec192e90a9","Type":"ContainerStarted","Data":"8dbe60328f18f508dfb9fb3d1c233fd5046fa299c4fe9ec583c52c8503358e38"} Jan 20 16:54:07 crc kubenswrapper[4558]: I0120 16:54:07.522674 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-mxswj" event={"ID":"a29d0f03-26ab-44a1-b24d-2cec192e90a9","Type":"ContainerStarted","Data":"37c4ca96947e998a70f2a781dca9100ca875fe543b57d9fed9b079564a734071"} Jan 20 16:54:07 crc kubenswrapper[4558]: I0120 16:54:07.535461 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-mxswj" podStartSLOduration=1.9230736739999998 podStartE2EDuration="3.535446342s" podCreationTimestamp="2026-01-20 16:54:04 +0000 UTC" firstStartedPulling="2026-01-20 16:54:05.281351521 +0000 UTC m=+739.041689488" lastFinishedPulling="2026-01-20 16:54:06.893724189 +0000 UTC m=+740.654062156" observedRunningTime="2026-01-20 16:54:07.532292183 +0000 UTC m=+741.292630151" watchObservedRunningTime="2026-01-20 16:54:07.535446342 +0000 UTC m=+741.295784309" Jan 20 16:54:07 crc kubenswrapper[4558]: I0120 16:54:07.743858 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-mxswj"] Jan 20 16:54:08 crc kubenswrapper[4558]: I0120 16:54:08.348138 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-pt7m4"] Jan 20 16:54:08 crc kubenswrapper[4558]: I0120 16:54:08.349033 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-pt7m4" Jan 20 16:54:08 crc kubenswrapper[4558]: I0120 16:54:08.355377 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-pt7m4"] Jan 20 16:54:08 crc kubenswrapper[4558]: I0120 16:54:08.438113 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-drmnl\" (UniqueName: \"kubernetes.io/projected/666aae5d-ee4c-47d1-8cf7-0aa2cb8c813d-kube-api-access-drmnl\") pod \"openstack-operator-index-pt7m4\" (UID: \"666aae5d-ee4c-47d1-8cf7-0aa2cb8c813d\") " pod="openstack-operators/openstack-operator-index-pt7m4" Jan 20 16:54:08 crc kubenswrapper[4558]: I0120 16:54:08.539360 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-drmnl\" (UniqueName: \"kubernetes.io/projected/666aae5d-ee4c-47d1-8cf7-0aa2cb8c813d-kube-api-access-drmnl\") pod \"openstack-operator-index-pt7m4\" (UID: \"666aae5d-ee4c-47d1-8cf7-0aa2cb8c813d\") " pod="openstack-operators/openstack-operator-index-pt7m4" Jan 20 16:54:08 crc kubenswrapper[4558]: I0120 16:54:08.553408 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-drmnl\" (UniqueName: \"kubernetes.io/projected/666aae5d-ee4c-47d1-8cf7-0aa2cb8c813d-kube-api-access-drmnl\") pod \"openstack-operator-index-pt7m4\" (UID: \"666aae5d-ee4c-47d1-8cf7-0aa2cb8c813d\") " pod="openstack-operators/openstack-operator-index-pt7m4" Jan 20 16:54:08 crc kubenswrapper[4558]: I0120 16:54:08.661134 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-pt7m4" Jan 20 16:54:08 crc kubenswrapper[4558]: I0120 16:54:08.997528 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-pt7m4"] Jan 20 16:54:09 crc kubenswrapper[4558]: W0120 16:54:09.000723 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod666aae5d_ee4c_47d1_8cf7_0aa2cb8c813d.slice/crio-5ef4294ca664514991957dd1796480ea4ca0ba14c6896d3b9c89bd88d43388e5 WatchSource:0}: Error finding container 5ef4294ca664514991957dd1796480ea4ca0ba14c6896d3b9c89bd88d43388e5: Status 404 returned error can't find the container with id 5ef4294ca664514991957dd1796480ea4ca0ba14c6896d3b9c89bd88d43388e5 Jan 20 16:54:09 crc kubenswrapper[4558]: I0120 16:54:09.532492 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-pt7m4" event={"ID":"666aae5d-ee4c-47d1-8cf7-0aa2cb8c813d","Type":"ContainerStarted","Data":"5ef4294ca664514991957dd1796480ea4ca0ba14c6896d3b9c89bd88d43388e5"} Jan 20 16:54:09 crc kubenswrapper[4558]: I0120 16:54:09.532587 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/openstack-operator-index-mxswj" podUID="a29d0f03-26ab-44a1-b24d-2cec192e90a9" containerName="registry-server" containerID="cri-o://37c4ca96947e998a70f2a781dca9100ca875fe543b57d9fed9b079564a734071" gracePeriod=2 Jan 20 16:54:09 crc kubenswrapper[4558]: I0120 16:54:09.820609 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-mxswj" Jan 20 16:54:09 crc kubenswrapper[4558]: I0120 16:54:09.853904 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5zwpf\" (UniqueName: \"kubernetes.io/projected/a29d0f03-26ab-44a1-b24d-2cec192e90a9-kube-api-access-5zwpf\") pod \"a29d0f03-26ab-44a1-b24d-2cec192e90a9\" (UID: \"a29d0f03-26ab-44a1-b24d-2cec192e90a9\") " Jan 20 16:54:09 crc kubenswrapper[4558]: I0120 16:54:09.858847 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a29d0f03-26ab-44a1-b24d-2cec192e90a9-kube-api-access-5zwpf" (OuterVolumeSpecName: "kube-api-access-5zwpf") pod "a29d0f03-26ab-44a1-b24d-2cec192e90a9" (UID: "a29d0f03-26ab-44a1-b24d-2cec192e90a9"). InnerVolumeSpecName "kube-api-access-5zwpf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:54:09 crc kubenswrapper[4558]: I0120 16:54:09.954824 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5zwpf\" (UniqueName: \"kubernetes.io/projected/a29d0f03-26ab-44a1-b24d-2cec192e90a9-kube-api-access-5zwpf\") on node \"crc\" DevicePath \"\"" Jan 20 16:54:10 crc kubenswrapper[4558]: I0120 16:54:10.538912 4558 generic.go:334] "Generic (PLEG): container finished" podID="a29d0f03-26ab-44a1-b24d-2cec192e90a9" containerID="37c4ca96947e998a70f2a781dca9100ca875fe543b57d9fed9b079564a734071" exitCode=0 Jan 20 16:54:10 crc kubenswrapper[4558]: I0120 16:54:10.538950 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-mxswj" event={"ID":"a29d0f03-26ab-44a1-b24d-2cec192e90a9","Type":"ContainerDied","Data":"37c4ca96947e998a70f2a781dca9100ca875fe543b57d9fed9b079564a734071"} Jan 20 16:54:10 crc kubenswrapper[4558]: I0120 16:54:10.539272 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-mxswj" event={"ID":"a29d0f03-26ab-44a1-b24d-2cec192e90a9","Type":"ContainerDied","Data":"8dbe60328f18f508dfb9fb3d1c233fd5046fa299c4fe9ec583c52c8503358e38"} Jan 20 16:54:10 crc kubenswrapper[4558]: I0120 16:54:10.538965 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-mxswj" Jan 20 16:54:10 crc kubenswrapper[4558]: I0120 16:54:10.539292 4558 scope.go:117] "RemoveContainer" containerID="37c4ca96947e998a70f2a781dca9100ca875fe543b57d9fed9b079564a734071" Jan 20 16:54:10 crc kubenswrapper[4558]: I0120 16:54:10.540490 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-pt7m4" event={"ID":"666aae5d-ee4c-47d1-8cf7-0aa2cb8c813d","Type":"ContainerStarted","Data":"0fc1bcd802de9cc79769c26cb5ab84e85afa3e5dc41c20e72994c498aca1b4c0"} Jan 20 16:54:10 crc kubenswrapper[4558]: I0120 16:54:10.553282 4558 scope.go:117] "RemoveContainer" containerID="37c4ca96947e998a70f2a781dca9100ca875fe543b57d9fed9b079564a734071" Jan 20 16:54:10 crc kubenswrapper[4558]: E0120 16:54:10.553592 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"37c4ca96947e998a70f2a781dca9100ca875fe543b57d9fed9b079564a734071\": container with ID starting with 37c4ca96947e998a70f2a781dca9100ca875fe543b57d9fed9b079564a734071 not found: ID does not exist" containerID="37c4ca96947e998a70f2a781dca9100ca875fe543b57d9fed9b079564a734071" Jan 20 16:54:10 crc kubenswrapper[4558]: I0120 16:54:10.553628 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"37c4ca96947e998a70f2a781dca9100ca875fe543b57d9fed9b079564a734071"} err="failed to get container status \"37c4ca96947e998a70f2a781dca9100ca875fe543b57d9fed9b079564a734071\": rpc error: code = NotFound desc = could not find container \"37c4ca96947e998a70f2a781dca9100ca875fe543b57d9fed9b079564a734071\": container with ID starting with 37c4ca96947e998a70f2a781dca9100ca875fe543b57d9fed9b079564a734071 not found: ID does not exist" Jan 20 16:54:10 crc kubenswrapper[4558]: I0120 16:54:10.563427 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-pt7m4" podStartSLOduration=1.9863146459999999 podStartE2EDuration="2.563416369s" podCreationTimestamp="2026-01-20 16:54:08 +0000 UTC" firstStartedPulling="2026-01-20 16:54:09.003576604 +0000 UTC m=+742.763914570" lastFinishedPulling="2026-01-20 16:54:09.580678325 +0000 UTC m=+743.341016293" observedRunningTime="2026-01-20 16:54:10.553272354 +0000 UTC m=+744.313610321" watchObservedRunningTime="2026-01-20 16:54:10.563416369 +0000 UTC m=+744.323754337" Jan 20 16:54:10 crc kubenswrapper[4558]: I0120 16:54:10.564210 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-mxswj"] Jan 20 16:54:10 crc kubenswrapper[4558]: I0120 16:54:10.571369 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/openstack-operator-index-mxswj"] Jan 20 16:54:12 crc kubenswrapper[4558]: I0120 16:54:12.484017 4558 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Jan 20 16:54:12 crc kubenswrapper[4558]: I0120 16:54:12.572056 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a29d0f03-26ab-44a1-b24d-2cec192e90a9" path="/var/lib/kubelet/pods/a29d0f03-26ab-44a1-b24d-2cec192e90a9/volumes" Jan 20 16:54:18 crc kubenswrapper[4558]: I0120 16:54:18.661909 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-index-pt7m4" Jan 20 16:54:18 crc kubenswrapper[4558]: I0120 16:54:18.662385 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/openstack-operator-index-pt7m4" Jan 20 16:54:18 crc kubenswrapper[4558]: I0120 16:54:18.680965 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/openstack-operator-index-pt7m4" Jan 20 16:54:19 crc kubenswrapper[4558]: I0120 16:54:19.607153 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-index-pt7m4" Jan 20 16:54:27 crc kubenswrapper[4558]: I0120 16:54:27.176612 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/7f8269a825e737cb1f2e67fcbeccb826d8bfc6ea337cf3db10b8143e2end5bq"] Jan 20 16:54:27 crc kubenswrapper[4558]: E0120 16:54:27.177002 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a29d0f03-26ab-44a1-b24d-2cec192e90a9" containerName="registry-server" Jan 20 16:54:27 crc kubenswrapper[4558]: I0120 16:54:27.177014 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="a29d0f03-26ab-44a1-b24d-2cec192e90a9" containerName="registry-server" Jan 20 16:54:27 crc kubenswrapper[4558]: I0120 16:54:27.177115 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="a29d0f03-26ab-44a1-b24d-2cec192e90a9" containerName="registry-server" Jan 20 16:54:27 crc kubenswrapper[4558]: I0120 16:54:27.177821 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/7f8269a825e737cb1f2e67fcbeccb826d8bfc6ea337cf3db10b8143e2end5bq" Jan 20 16:54:27 crc kubenswrapper[4558]: I0120 16:54:27.178994 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-7xp5w" Jan 20 16:54:27 crc kubenswrapper[4558]: I0120 16:54:27.181663 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/7f8269a825e737cb1f2e67fcbeccb826d8bfc6ea337cf3db10b8143e2end5bq"] Jan 20 16:54:27 crc kubenswrapper[4558]: I0120 16:54:27.250475 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/cb58f5ea-4fc9-4dfd-9ca3-93a04769d2da-bundle\") pod \"7f8269a825e737cb1f2e67fcbeccb826d8bfc6ea337cf3db10b8143e2end5bq\" (UID: \"cb58f5ea-4fc9-4dfd-9ca3-93a04769d2da\") " pod="openstack-operators/7f8269a825e737cb1f2e67fcbeccb826d8bfc6ea337cf3db10b8143e2end5bq" Jan 20 16:54:27 crc kubenswrapper[4558]: I0120 16:54:27.250715 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/cb58f5ea-4fc9-4dfd-9ca3-93a04769d2da-util\") pod \"7f8269a825e737cb1f2e67fcbeccb826d8bfc6ea337cf3db10b8143e2end5bq\" (UID: \"cb58f5ea-4fc9-4dfd-9ca3-93a04769d2da\") " pod="openstack-operators/7f8269a825e737cb1f2e67fcbeccb826d8bfc6ea337cf3db10b8143e2end5bq" Jan 20 16:54:27 crc kubenswrapper[4558]: I0120 16:54:27.250840 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qzn69\" (UniqueName: \"kubernetes.io/projected/cb58f5ea-4fc9-4dfd-9ca3-93a04769d2da-kube-api-access-qzn69\") pod \"7f8269a825e737cb1f2e67fcbeccb826d8bfc6ea337cf3db10b8143e2end5bq\" (UID: \"cb58f5ea-4fc9-4dfd-9ca3-93a04769d2da\") " pod="openstack-operators/7f8269a825e737cb1f2e67fcbeccb826d8bfc6ea337cf3db10b8143e2end5bq" Jan 20 16:54:27 crc kubenswrapper[4558]: I0120 16:54:27.329755 4558 patch_prober.go:28] interesting pod/machine-config-daemon-2vr4r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 20 16:54:27 crc kubenswrapper[4558]: I0120 16:54:27.329810 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 20 16:54:27 crc kubenswrapper[4558]: I0120 16:54:27.329848 4558 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" Jan 20 16:54:27 crc kubenswrapper[4558]: I0120 16:54:27.330390 4558 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"5429c282dce69a1ca0312758655bba8954487a4d9cc36ecc68f43f7950925034"} pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 20 16:54:27 crc kubenswrapper[4558]: I0120 16:54:27.330442 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" containerID="cri-o://5429c282dce69a1ca0312758655bba8954487a4d9cc36ecc68f43f7950925034" gracePeriod=600 Jan 20 16:54:27 crc kubenswrapper[4558]: I0120 16:54:27.351483 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qzn69\" (UniqueName: \"kubernetes.io/projected/cb58f5ea-4fc9-4dfd-9ca3-93a04769d2da-kube-api-access-qzn69\") pod \"7f8269a825e737cb1f2e67fcbeccb826d8bfc6ea337cf3db10b8143e2end5bq\" (UID: \"cb58f5ea-4fc9-4dfd-9ca3-93a04769d2da\") " pod="openstack-operators/7f8269a825e737cb1f2e67fcbeccb826d8bfc6ea337cf3db10b8143e2end5bq" Jan 20 16:54:27 crc kubenswrapper[4558]: I0120 16:54:27.351522 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/cb58f5ea-4fc9-4dfd-9ca3-93a04769d2da-bundle\") pod \"7f8269a825e737cb1f2e67fcbeccb826d8bfc6ea337cf3db10b8143e2end5bq\" (UID: \"cb58f5ea-4fc9-4dfd-9ca3-93a04769d2da\") " pod="openstack-operators/7f8269a825e737cb1f2e67fcbeccb826d8bfc6ea337cf3db10b8143e2end5bq" Jan 20 16:54:27 crc kubenswrapper[4558]: I0120 16:54:27.351581 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/cb58f5ea-4fc9-4dfd-9ca3-93a04769d2da-util\") pod \"7f8269a825e737cb1f2e67fcbeccb826d8bfc6ea337cf3db10b8143e2end5bq\" (UID: \"cb58f5ea-4fc9-4dfd-9ca3-93a04769d2da\") " pod="openstack-operators/7f8269a825e737cb1f2e67fcbeccb826d8bfc6ea337cf3db10b8143e2end5bq" Jan 20 16:54:27 crc kubenswrapper[4558]: I0120 16:54:27.351956 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/cb58f5ea-4fc9-4dfd-9ca3-93a04769d2da-util\") pod \"7f8269a825e737cb1f2e67fcbeccb826d8bfc6ea337cf3db10b8143e2end5bq\" (UID: \"cb58f5ea-4fc9-4dfd-9ca3-93a04769d2da\") " pod="openstack-operators/7f8269a825e737cb1f2e67fcbeccb826d8bfc6ea337cf3db10b8143e2end5bq" Jan 20 16:54:27 crc kubenswrapper[4558]: I0120 16:54:27.352309 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/cb58f5ea-4fc9-4dfd-9ca3-93a04769d2da-bundle\") pod \"7f8269a825e737cb1f2e67fcbeccb826d8bfc6ea337cf3db10b8143e2end5bq\" (UID: \"cb58f5ea-4fc9-4dfd-9ca3-93a04769d2da\") " pod="openstack-operators/7f8269a825e737cb1f2e67fcbeccb826d8bfc6ea337cf3db10b8143e2end5bq" Jan 20 16:54:27 crc kubenswrapper[4558]: I0120 16:54:27.365812 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qzn69\" (UniqueName: \"kubernetes.io/projected/cb58f5ea-4fc9-4dfd-9ca3-93a04769d2da-kube-api-access-qzn69\") pod \"7f8269a825e737cb1f2e67fcbeccb826d8bfc6ea337cf3db10b8143e2end5bq\" (UID: \"cb58f5ea-4fc9-4dfd-9ca3-93a04769d2da\") " pod="openstack-operators/7f8269a825e737cb1f2e67fcbeccb826d8bfc6ea337cf3db10b8143e2end5bq" Jan 20 16:54:27 crc kubenswrapper[4558]: I0120 16:54:27.492403 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/7f8269a825e737cb1f2e67fcbeccb826d8bfc6ea337cf3db10b8143e2end5bq" Jan 20 16:54:27 crc kubenswrapper[4558]: I0120 16:54:27.625926 4558 generic.go:334] "Generic (PLEG): container finished" podID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerID="5429c282dce69a1ca0312758655bba8954487a4d9cc36ecc68f43f7950925034" exitCode=0 Jan 20 16:54:27 crc kubenswrapper[4558]: I0120 16:54:27.625964 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" event={"ID":"68337d27-3fa6-4a29-88b0-82e60c3739eb","Type":"ContainerDied","Data":"5429c282dce69a1ca0312758655bba8954487a4d9cc36ecc68f43f7950925034"} Jan 20 16:54:27 crc kubenswrapper[4558]: I0120 16:54:27.625994 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" event={"ID":"68337d27-3fa6-4a29-88b0-82e60c3739eb","Type":"ContainerStarted","Data":"caa2a6c1a9115d7646c6f8d1705231c6b0c61a1f8491c5197970cd8a3fb6bbc2"} Jan 20 16:54:27 crc kubenswrapper[4558]: I0120 16:54:27.626009 4558 scope.go:117] "RemoveContainer" containerID="75faba5e9c60d39b0965f7842c4fcbaf51da890c19853fb9a294a0e3410f3d20" Jan 20 16:54:27 crc kubenswrapper[4558]: I0120 16:54:27.839966 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/7f8269a825e737cb1f2e67fcbeccb826d8bfc6ea337cf3db10b8143e2end5bq"] Jan 20 16:54:28 crc kubenswrapper[4558]: I0120 16:54:28.632450 4558 generic.go:334] "Generic (PLEG): container finished" podID="cb58f5ea-4fc9-4dfd-9ca3-93a04769d2da" containerID="52089be45c0734a738c9d20c13735ae48fb1812f3e7d83fd3be74d42aa4a1e37" exitCode=0 Jan 20 16:54:28 crc kubenswrapper[4558]: I0120 16:54:28.632514 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/7f8269a825e737cb1f2e67fcbeccb826d8bfc6ea337cf3db10b8143e2end5bq" event={"ID":"cb58f5ea-4fc9-4dfd-9ca3-93a04769d2da","Type":"ContainerDied","Data":"52089be45c0734a738c9d20c13735ae48fb1812f3e7d83fd3be74d42aa4a1e37"} Jan 20 16:54:28 crc kubenswrapper[4558]: I0120 16:54:28.632747 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/7f8269a825e737cb1f2e67fcbeccb826d8bfc6ea337cf3db10b8143e2end5bq" event={"ID":"cb58f5ea-4fc9-4dfd-9ca3-93a04769d2da","Type":"ContainerStarted","Data":"47cde238d1da2d2d7c9cbc82a509863bf7e9314cef89317dedfe51f464453163"} Jan 20 16:54:30 crc kubenswrapper[4558]: I0120 16:54:30.644663 4558 generic.go:334] "Generic (PLEG): container finished" podID="cb58f5ea-4fc9-4dfd-9ca3-93a04769d2da" containerID="aae98456e24c83d36df09930721945ddc3c5c2f923ce2b243599e54208df3542" exitCode=0 Jan 20 16:54:30 crc kubenswrapper[4558]: I0120 16:54:30.644754 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/7f8269a825e737cb1f2e67fcbeccb826d8bfc6ea337cf3db10b8143e2end5bq" event={"ID":"cb58f5ea-4fc9-4dfd-9ca3-93a04769d2da","Type":"ContainerDied","Data":"aae98456e24c83d36df09930721945ddc3c5c2f923ce2b243599e54208df3542"} Jan 20 16:54:31 crc kubenswrapper[4558]: I0120 16:54:31.652666 4558 generic.go:334] "Generic (PLEG): container finished" podID="cb58f5ea-4fc9-4dfd-9ca3-93a04769d2da" containerID="fa8486dccca9e1fc6947a83a66596efd783eb166c15f19dff4450bd119503c33" exitCode=0 Jan 20 16:54:31 crc kubenswrapper[4558]: I0120 16:54:31.652743 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/7f8269a825e737cb1f2e67fcbeccb826d8bfc6ea337cf3db10b8143e2end5bq" event={"ID":"cb58f5ea-4fc9-4dfd-9ca3-93a04769d2da","Type":"ContainerDied","Data":"fa8486dccca9e1fc6947a83a66596efd783eb166c15f19dff4450bd119503c33"} Jan 20 16:54:32 crc kubenswrapper[4558]: I0120 16:54:32.848501 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/7f8269a825e737cb1f2e67fcbeccb826d8bfc6ea337cf3db10b8143e2end5bq" Jan 20 16:54:32 crc kubenswrapper[4558]: I0120 16:54:32.919202 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qzn69\" (UniqueName: \"kubernetes.io/projected/cb58f5ea-4fc9-4dfd-9ca3-93a04769d2da-kube-api-access-qzn69\") pod \"cb58f5ea-4fc9-4dfd-9ca3-93a04769d2da\" (UID: \"cb58f5ea-4fc9-4dfd-9ca3-93a04769d2da\") " Jan 20 16:54:32 crc kubenswrapper[4558]: I0120 16:54:32.919262 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/cb58f5ea-4fc9-4dfd-9ca3-93a04769d2da-bundle\") pod \"cb58f5ea-4fc9-4dfd-9ca3-93a04769d2da\" (UID: \"cb58f5ea-4fc9-4dfd-9ca3-93a04769d2da\") " Jan 20 16:54:32 crc kubenswrapper[4558]: I0120 16:54:32.919302 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/cb58f5ea-4fc9-4dfd-9ca3-93a04769d2da-util\") pod \"cb58f5ea-4fc9-4dfd-9ca3-93a04769d2da\" (UID: \"cb58f5ea-4fc9-4dfd-9ca3-93a04769d2da\") " Jan 20 16:54:32 crc kubenswrapper[4558]: I0120 16:54:32.920142 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cb58f5ea-4fc9-4dfd-9ca3-93a04769d2da-bundle" (OuterVolumeSpecName: "bundle") pod "cb58f5ea-4fc9-4dfd-9ca3-93a04769d2da" (UID: "cb58f5ea-4fc9-4dfd-9ca3-93a04769d2da"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 16:54:32 crc kubenswrapper[4558]: I0120 16:54:32.926301 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cb58f5ea-4fc9-4dfd-9ca3-93a04769d2da-kube-api-access-qzn69" (OuterVolumeSpecName: "kube-api-access-qzn69") pod "cb58f5ea-4fc9-4dfd-9ca3-93a04769d2da" (UID: "cb58f5ea-4fc9-4dfd-9ca3-93a04769d2da"). InnerVolumeSpecName "kube-api-access-qzn69". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:54:32 crc kubenswrapper[4558]: I0120 16:54:32.929587 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cb58f5ea-4fc9-4dfd-9ca3-93a04769d2da-util" (OuterVolumeSpecName: "util") pod "cb58f5ea-4fc9-4dfd-9ca3-93a04769d2da" (UID: "cb58f5ea-4fc9-4dfd-9ca3-93a04769d2da"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 16:54:33 crc kubenswrapper[4558]: I0120 16:54:33.020956 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qzn69\" (UniqueName: \"kubernetes.io/projected/cb58f5ea-4fc9-4dfd-9ca3-93a04769d2da-kube-api-access-qzn69\") on node \"crc\" DevicePath \"\"" Jan 20 16:54:33 crc kubenswrapper[4558]: I0120 16:54:33.021003 4558 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/cb58f5ea-4fc9-4dfd-9ca3-93a04769d2da-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 16:54:33 crc kubenswrapper[4558]: I0120 16:54:33.021020 4558 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/cb58f5ea-4fc9-4dfd-9ca3-93a04769d2da-util\") on node \"crc\" DevicePath \"\"" Jan 20 16:54:33 crc kubenswrapper[4558]: I0120 16:54:33.662926 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/7f8269a825e737cb1f2e67fcbeccb826d8bfc6ea337cf3db10b8143e2end5bq" event={"ID":"cb58f5ea-4fc9-4dfd-9ca3-93a04769d2da","Type":"ContainerDied","Data":"47cde238d1da2d2d7c9cbc82a509863bf7e9314cef89317dedfe51f464453163"} Jan 20 16:54:33 crc kubenswrapper[4558]: I0120 16:54:33.663341 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="47cde238d1da2d2d7c9cbc82a509863bf7e9314cef89317dedfe51f464453163" Jan 20 16:54:33 crc kubenswrapper[4558]: I0120 16:54:33.662997 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/7f8269a825e737cb1f2e67fcbeccb826d8bfc6ea337cf3db10b8143e2end5bq" Jan 20 16:54:39 crc kubenswrapper[4558]: I0120 16:54:39.990293 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-init-6d4d7d8545-4v6kg"] Jan 20 16:54:39 crc kubenswrapper[4558]: E0120 16:54:39.991431 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cb58f5ea-4fc9-4dfd-9ca3-93a04769d2da" containerName="extract" Jan 20 16:54:39 crc kubenswrapper[4558]: I0120 16:54:39.991453 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="cb58f5ea-4fc9-4dfd-9ca3-93a04769d2da" containerName="extract" Jan 20 16:54:39 crc kubenswrapper[4558]: E0120 16:54:39.991470 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cb58f5ea-4fc9-4dfd-9ca3-93a04769d2da" containerName="util" Jan 20 16:54:39 crc kubenswrapper[4558]: I0120 16:54:39.991476 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="cb58f5ea-4fc9-4dfd-9ca3-93a04769d2da" containerName="util" Jan 20 16:54:39 crc kubenswrapper[4558]: E0120 16:54:39.991484 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cb58f5ea-4fc9-4dfd-9ca3-93a04769d2da" containerName="pull" Jan 20 16:54:39 crc kubenswrapper[4558]: I0120 16:54:39.991489 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="cb58f5ea-4fc9-4dfd-9ca3-93a04769d2da" containerName="pull" Jan 20 16:54:39 crc kubenswrapper[4558]: I0120 16:54:39.991579 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="cb58f5ea-4fc9-4dfd-9ca3-93a04769d2da" containerName="extract" Jan 20 16:54:39 crc kubenswrapper[4558]: I0120 16:54:39.991924 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-init-6d4d7d8545-4v6kg" Jan 20 16:54:39 crc kubenswrapper[4558]: I0120 16:54:39.995779 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-init-dockercfg-pkd5v" Jan 20 16:54:40 crc kubenswrapper[4558]: I0120 16:54:40.064104 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-init-6d4d7d8545-4v6kg"] Jan 20 16:54:40 crc kubenswrapper[4558]: I0120 16:54:40.095026 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lncvc\" (UniqueName: \"kubernetes.io/projected/a491bcd2-847b-41eb-addd-54e625a9e6e1-kube-api-access-lncvc\") pod \"openstack-operator-controller-init-6d4d7d8545-4v6kg\" (UID: \"a491bcd2-847b-41eb-addd-54e625a9e6e1\") " pod="openstack-operators/openstack-operator-controller-init-6d4d7d8545-4v6kg" Jan 20 16:54:40 crc kubenswrapper[4558]: I0120 16:54:40.195749 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lncvc\" (UniqueName: \"kubernetes.io/projected/a491bcd2-847b-41eb-addd-54e625a9e6e1-kube-api-access-lncvc\") pod \"openstack-operator-controller-init-6d4d7d8545-4v6kg\" (UID: \"a491bcd2-847b-41eb-addd-54e625a9e6e1\") " pod="openstack-operators/openstack-operator-controller-init-6d4d7d8545-4v6kg" Jan 20 16:54:40 crc kubenswrapper[4558]: I0120 16:54:40.211459 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lncvc\" (UniqueName: \"kubernetes.io/projected/a491bcd2-847b-41eb-addd-54e625a9e6e1-kube-api-access-lncvc\") pod \"openstack-operator-controller-init-6d4d7d8545-4v6kg\" (UID: \"a491bcd2-847b-41eb-addd-54e625a9e6e1\") " pod="openstack-operators/openstack-operator-controller-init-6d4d7d8545-4v6kg" Jan 20 16:54:40 crc kubenswrapper[4558]: I0120 16:54:40.305413 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-init-6d4d7d8545-4v6kg" Jan 20 16:54:40 crc kubenswrapper[4558]: I0120 16:54:40.664830 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-init-6d4d7d8545-4v6kg"] Jan 20 16:54:40 crc kubenswrapper[4558]: W0120 16:54:40.667702 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda491bcd2_847b_41eb_addd_54e625a9e6e1.slice/crio-2b8abeab32c6e045c040ac133acde37b745dca669228d4ba2a5e925998835139 WatchSource:0}: Error finding container 2b8abeab32c6e045c040ac133acde37b745dca669228d4ba2a5e925998835139: Status 404 returned error can't find the container with id 2b8abeab32c6e045c040ac133acde37b745dca669228d4ba2a5e925998835139 Jan 20 16:54:40 crc kubenswrapper[4558]: I0120 16:54:40.691190 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-init-6d4d7d8545-4v6kg" event={"ID":"a491bcd2-847b-41eb-addd-54e625a9e6e1","Type":"ContainerStarted","Data":"2b8abeab32c6e045c040ac133acde37b745dca669228d4ba2a5e925998835139"} Jan 20 16:54:45 crc kubenswrapper[4558]: I0120 16:54:45.720074 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-init-6d4d7d8545-4v6kg" event={"ID":"a491bcd2-847b-41eb-addd-54e625a9e6e1","Type":"ContainerStarted","Data":"2ef63481a765808418e8fe2316f84a64976e170f6b42473a8954a04e0fa1bbe7"} Jan 20 16:54:45 crc kubenswrapper[4558]: I0120 16:54:45.720458 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-init-6d4d7d8545-4v6kg" Jan 20 16:54:45 crc kubenswrapper[4558]: I0120 16:54:45.741321 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-init-6d4d7d8545-4v6kg" podStartSLOduration=1.8749174960000001 podStartE2EDuration="6.741305392s" podCreationTimestamp="2026-01-20 16:54:39 +0000 UTC" firstStartedPulling="2026-01-20 16:54:40.669134688 +0000 UTC m=+774.429472655" lastFinishedPulling="2026-01-20 16:54:45.535522584 +0000 UTC m=+779.295860551" observedRunningTime="2026-01-20 16:54:45.737645363 +0000 UTC m=+779.497983330" watchObservedRunningTime="2026-01-20 16:54:45.741305392 +0000 UTC m=+779.501643359" Jan 20 16:54:50 crc kubenswrapper[4558]: I0120 16:54:50.307609 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-init-6d4d7d8545-4v6kg" Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.253421 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7ddb5c749-jrgz7"] Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.254365 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-7ddb5c749-jrgz7" Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.256039 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"barbican-operator-controller-manager-dockercfg-dxfgb" Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.258899 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/cinder-operator-controller-manager-9b68f5989-mdfnr"] Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.259582 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-9b68f5989-mdfnr" Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.260996 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"cinder-operator-controller-manager-dockercfg-vqrqf" Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.264879 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-9b68f5989-mdfnr"] Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.280932 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/designate-operator-controller-manager-9f958b845-q8wlk"] Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.281667 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-9f958b845-q8wlk" Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.283078 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"designate-operator-controller-manager-dockercfg-hhv5c" Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.285024 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/glance-operator-controller-manager-c6994669c-78pts"] Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.285566 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-c6994669c-78pts" Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.287774 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"glance-operator-controller-manager-dockercfg-7484j" Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.292511 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-9f958b845-q8wlk"] Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.295808 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-c6994669c-78pts"] Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.308946 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/heat-operator-controller-manager-594c8c9d5d-k28w4"] Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.309854 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-594c8c9d5d-k28w4" Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.310864 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"heat-operator-controller-manager-dockercfg-qhb5q" Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.312608 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7ddb5c749-jrgz7"] Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.318734 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/horizon-operator-controller-manager-77d5c5b54f-gvd5f"] Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.319455 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-77d5c5b54f-gvd5f" Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.322384 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-594c8c9d5d-k28w4"] Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.324480 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-controller-manager-dockercfg-95kcl" Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.326002 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-77d5c5b54f-gvd5f"] Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.346660 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-controller-manager-77c48c7859-4f7km"] Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.347293 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-77c48c7859-4f7km" Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.350348 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-fb4t5" Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.350424 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-webhook-server-cert" Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.350479 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ironic-operator-controller-manager-78757b4889-t99dx"] Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.351121 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-78757b4889-t99dx" Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.353333 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ironic-operator-controller-manager-dockercfg-8g2hh" Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.360992 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-77c48c7859-4f7km"] Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.370980 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-78757b4889-t99dx"] Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.383872 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-controller-manager-767fdc4f47-8mw4r"] Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.384547 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-767fdc4f47-8mw4r" Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.388448 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-dockercfg-87vtm" Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.395734 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-767fdc4f47-8mw4r"] Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.416676 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/manila-operator-controller-manager-864f6b75bf-4jj8h"] Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.417392 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-864f6b75bf-4jj8h" Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.419497 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"manila-operator-controller-manager-dockercfg-skvck" Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.420989 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-864f6b75bf-4jj8h"] Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.426934 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-c87fff755-plg7f"] Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.435642 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-c87fff755-plg7f" Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.437991 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-dockercfg-wccnk" Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.445384 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xl9zj\" (UniqueName: \"kubernetes.io/projected/54c6bd49-d022-40c6-b547-58d3eb5ba7e2-kube-api-access-xl9zj\") pod \"designate-operator-controller-manager-9f958b845-q8wlk\" (UID: \"54c6bd49-d022-40c6-b547-58d3eb5ba7e2\") " pod="openstack-operators/designate-operator-controller-manager-9f958b845-q8wlk" Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.445448 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4wqzs\" (UniqueName: \"kubernetes.io/projected/2e9e43c0-674a-4769-b44b-7eb6ee1a69d5-kube-api-access-4wqzs\") pod \"infra-operator-controller-manager-77c48c7859-4f7km\" (UID: \"2e9e43c0-674a-4769-b44b-7eb6ee1a69d5\") " pod="openstack-operators/infra-operator-controller-manager-77c48c7859-4f7km" Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.445490 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/2e9e43c0-674a-4769-b44b-7eb6ee1a69d5-cert\") pod \"infra-operator-controller-manager-77c48c7859-4f7km\" (UID: \"2e9e43c0-674a-4769-b44b-7eb6ee1a69d5\") " pod="openstack-operators/infra-operator-controller-manager-77c48c7859-4f7km" Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.445536 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v7ch6\" (UniqueName: \"kubernetes.io/projected/e645ac52-b990-4b3f-a282-59ce60cacaff-kube-api-access-v7ch6\") pod \"glance-operator-controller-manager-c6994669c-78pts\" (UID: \"e645ac52-b990-4b3f-a282-59ce60cacaff\") " pod="openstack-operators/glance-operator-controller-manager-c6994669c-78pts" Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.445585 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x6bm9\" (UniqueName: \"kubernetes.io/projected/88bca883-e84c-4223-80de-4c389d44f9fe-kube-api-access-x6bm9\") pod \"heat-operator-controller-manager-594c8c9d5d-k28w4\" (UID: \"88bca883-e84c-4223-80de-4c389d44f9fe\") " pod="openstack-operators/heat-operator-controller-manager-594c8c9d5d-k28w4" Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.445611 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c4z7x\" (UniqueName: \"kubernetes.io/projected/31cd4c27-d472-4406-886a-7222315c465d-kube-api-access-c4z7x\") pod \"horizon-operator-controller-manager-77d5c5b54f-gvd5f\" (UID: \"31cd4c27-d472-4406-886a-7222315c465d\") " pod="openstack-operators/horizon-operator-controller-manager-77d5c5b54f-gvd5f" Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.445632 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xs2mv\" (UniqueName: \"kubernetes.io/projected/1c12cb14-b8f1-4939-84d9-6a6b851f095a-kube-api-access-xs2mv\") pod \"barbican-operator-controller-manager-7ddb5c749-jrgz7\" (UID: \"1c12cb14-b8f1-4939-84d9-6a6b851f095a\") " pod="openstack-operators/barbican-operator-controller-manager-7ddb5c749-jrgz7" Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.445670 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p92dc\" (UniqueName: \"kubernetes.io/projected/973549d4-6a26-4166-8be0-b0dfb7c5aec6-kube-api-access-p92dc\") pod \"cinder-operator-controller-manager-9b68f5989-mdfnr\" (UID: \"973549d4-6a26-4166-8be0-b0dfb7c5aec6\") " pod="openstack-operators/cinder-operator-controller-manager-9b68f5989-mdfnr" Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.449792 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/neutron-operator-controller-manager-cb4666565-6689s"] Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.456677 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-cb4666565-6689s" Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.458492 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"neutron-operator-controller-manager-dockercfg-4m27d" Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.474353 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-c87fff755-plg7f"] Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.481548 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-cb4666565-6689s"] Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.485377 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/nova-operator-controller-manager-65849867d6-js8vs"] Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.486141 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-65849867d6-js8vs" Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.488746 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-65849867d6-js8vs"] Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.490138 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"nova-operator-controller-manager-dockercfg-gtn2h" Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.495276 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/octavia-operator-controller-manager-7fc9b76cf6-9ld4x"] Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.495824 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-7fc9b76cf6-9ld4x" Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.497516 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"octavia-operator-controller-manager-dockercfg-q6g8v" Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.499573 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-7fc9b76cf6-9ld4x"] Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.513889 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ovn-operator-controller-manager-55db956ddc-6j4g5"] Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.514689 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-55db956ddc-6j4g5" Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.515943 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ovn-operator-controller-manager-dockercfg-pj47q" Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.517061 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-5b9875986dl6bbk"] Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.517701 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-5b9875986dl6bbk" Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.519254 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-webhook-server-cert" Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.519507 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-qlb9j" Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.520248 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-55db956ddc-6j4g5"] Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.527304 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/placement-operator-controller-manager-686df47fcb-tsdgq"] Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.527772 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-686df47fcb-tsdgq" Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.530224 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"placement-operator-controller-manager-dockercfg-tnbwb" Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.537093 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-5b9875986dl6bbk"] Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.540118 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-686df47fcb-tsdgq"] Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.548600 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/2e9e43c0-674a-4769-b44b-7eb6ee1a69d5-cert\") pod \"infra-operator-controller-manager-77c48c7859-4f7km\" (UID: \"2e9e43c0-674a-4769-b44b-7eb6ee1a69d5\") " pod="openstack-operators/infra-operator-controller-manager-77c48c7859-4f7km" Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.548636 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v7ch6\" (UniqueName: \"kubernetes.io/projected/e645ac52-b990-4b3f-a282-59ce60cacaff-kube-api-access-v7ch6\") pod \"glance-operator-controller-manager-c6994669c-78pts\" (UID: \"e645ac52-b990-4b3f-a282-59ce60cacaff\") " pod="openstack-operators/glance-operator-controller-manager-c6994669c-78pts" Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.548680 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xwdml\" (UniqueName: \"kubernetes.io/projected/115bb848-a715-4eac-8993-63842460dd3d-kube-api-access-xwdml\") pod \"keystone-operator-controller-manager-767fdc4f47-8mw4r\" (UID: \"115bb848-a715-4eac-8993-63842460dd3d\") " pod="openstack-operators/keystone-operator-controller-manager-767fdc4f47-8mw4r" Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.548700 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x6bm9\" (UniqueName: \"kubernetes.io/projected/88bca883-e84c-4223-80de-4c389d44f9fe-kube-api-access-x6bm9\") pod \"heat-operator-controller-manager-594c8c9d5d-k28w4\" (UID: \"88bca883-e84c-4223-80de-4c389d44f9fe\") " pod="openstack-operators/heat-operator-controller-manager-594c8c9d5d-k28w4" Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.548724 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r2qgm\" (UniqueName: \"kubernetes.io/projected/6e337f3c-6900-4314-adbb-aec361ccb7cc-kube-api-access-r2qgm\") pod \"manila-operator-controller-manager-864f6b75bf-4jj8h\" (UID: \"6e337f3c-6900-4314-adbb-aec361ccb7cc\") " pod="openstack-operators/manila-operator-controller-manager-864f6b75bf-4jj8h" Jan 20 16:55:06 crc kubenswrapper[4558]: E0120 16:55:06.548745 4558 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.548747 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c4z7x\" (UniqueName: \"kubernetes.io/projected/31cd4c27-d472-4406-886a-7222315c465d-kube-api-access-c4z7x\") pod \"horizon-operator-controller-manager-77d5c5b54f-gvd5f\" (UID: \"31cd4c27-d472-4406-886a-7222315c465d\") " pod="openstack-operators/horizon-operator-controller-manager-77d5c5b54f-gvd5f" Jan 20 16:55:06 crc kubenswrapper[4558]: E0120 16:55:06.548789 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/2e9e43c0-674a-4769-b44b-7eb6ee1a69d5-cert podName:2e9e43c0-674a-4769-b44b-7eb6ee1a69d5 nodeName:}" failed. No retries permitted until 2026-01-20 16:55:07.04877382 +0000 UTC m=+800.809111788 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/2e9e43c0-674a-4769-b44b-7eb6ee1a69d5-cert") pod "infra-operator-controller-manager-77c48c7859-4f7km" (UID: "2e9e43c0-674a-4769-b44b-7eb6ee1a69d5") : secret "infra-operator-webhook-server-cert" not found Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.548811 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xs2mv\" (UniqueName: \"kubernetes.io/projected/1c12cb14-b8f1-4939-84d9-6a6b851f095a-kube-api-access-xs2mv\") pod \"barbican-operator-controller-manager-7ddb5c749-jrgz7\" (UID: \"1c12cb14-b8f1-4939-84d9-6a6b851f095a\") " pod="openstack-operators/barbican-operator-controller-manager-7ddb5c749-jrgz7" Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.548880 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hvbdx\" (UniqueName: \"kubernetes.io/projected/dba009f4-faa1-4d6b-bc19-c00eedd6c7c7-kube-api-access-hvbdx\") pod \"mariadb-operator-controller-manager-c87fff755-plg7f\" (UID: \"dba009f4-faa1-4d6b-bc19-c00eedd6c7c7\") " pod="openstack-operators/mariadb-operator-controller-manager-c87fff755-plg7f" Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.551935 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p92dc\" (UniqueName: \"kubernetes.io/projected/973549d4-6a26-4166-8be0-b0dfb7c5aec6-kube-api-access-p92dc\") pod \"cinder-operator-controller-manager-9b68f5989-mdfnr\" (UID: \"973549d4-6a26-4166-8be0-b0dfb7c5aec6\") " pod="openstack-operators/cinder-operator-controller-manager-9b68f5989-mdfnr" Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.551991 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xl9zj\" (UniqueName: \"kubernetes.io/projected/54c6bd49-d022-40c6-b547-58d3eb5ba7e2-kube-api-access-xl9zj\") pod \"designate-operator-controller-manager-9f958b845-q8wlk\" (UID: \"54c6bd49-d022-40c6-b547-58d3eb5ba7e2\") " pod="openstack-operators/designate-operator-controller-manager-9f958b845-q8wlk" Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.552042 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4wqzs\" (UniqueName: \"kubernetes.io/projected/2e9e43c0-674a-4769-b44b-7eb6ee1a69d5-kube-api-access-4wqzs\") pod \"infra-operator-controller-manager-77c48c7859-4f7km\" (UID: \"2e9e43c0-674a-4769-b44b-7eb6ee1a69d5\") " pod="openstack-operators/infra-operator-controller-manager-77c48c7859-4f7km" Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.552062 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-68gw2\" (UniqueName: \"kubernetes.io/projected/e7054184-22f8-43b2-b75f-0534d5bb467f-kube-api-access-68gw2\") pod \"ironic-operator-controller-manager-78757b4889-t99dx\" (UID: \"e7054184-22f8-43b2-b75f-0534d5bb467f\") " pod="openstack-operators/ironic-operator-controller-manager-78757b4889-t99dx" Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.563086 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v7ch6\" (UniqueName: \"kubernetes.io/projected/e645ac52-b990-4b3f-a282-59ce60cacaff-kube-api-access-v7ch6\") pod \"glance-operator-controller-manager-c6994669c-78pts\" (UID: \"e645ac52-b990-4b3f-a282-59ce60cacaff\") " pod="openstack-operators/glance-operator-controller-manager-c6994669c-78pts" Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.563302 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/swift-operator-controller-manager-85dd56d4cc-cvlzf"] Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.563903 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-85dd56d4cc-cvlzf" Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.566121 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-85dd56d4cc-cvlzf"] Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.567136 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-controller-manager-dockercfg-7m2kc" Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.568187 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xs2mv\" (UniqueName: \"kubernetes.io/projected/1c12cb14-b8f1-4939-84d9-6a6b851f095a-kube-api-access-xs2mv\") pod \"barbican-operator-controller-manager-7ddb5c749-jrgz7\" (UID: \"1c12cb14-b8f1-4939-84d9-6a6b851f095a\") " pod="openstack-operators/barbican-operator-controller-manager-7ddb5c749-jrgz7" Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.568543 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-7ddb5c749-jrgz7" Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.568834 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xl9zj\" (UniqueName: \"kubernetes.io/projected/54c6bd49-d022-40c6-b547-58d3eb5ba7e2-kube-api-access-xl9zj\") pod \"designate-operator-controller-manager-9f958b845-q8wlk\" (UID: \"54c6bd49-d022-40c6-b547-58d3eb5ba7e2\") " pod="openstack-operators/designate-operator-controller-manager-9f958b845-q8wlk" Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.570054 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c4z7x\" (UniqueName: \"kubernetes.io/projected/31cd4c27-d472-4406-886a-7222315c465d-kube-api-access-c4z7x\") pod \"horizon-operator-controller-manager-77d5c5b54f-gvd5f\" (UID: \"31cd4c27-d472-4406-886a-7222315c465d\") " pod="openstack-operators/horizon-operator-controller-manager-77d5c5b54f-gvd5f" Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.571197 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x6bm9\" (UniqueName: \"kubernetes.io/projected/88bca883-e84c-4223-80de-4c389d44f9fe-kube-api-access-x6bm9\") pod \"heat-operator-controller-manager-594c8c9d5d-k28w4\" (UID: \"88bca883-e84c-4223-80de-4c389d44f9fe\") " pod="openstack-operators/heat-operator-controller-manager-594c8c9d5d-k28w4" Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.574254 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4wqzs\" (UniqueName: \"kubernetes.io/projected/2e9e43c0-674a-4769-b44b-7eb6ee1a69d5-kube-api-access-4wqzs\") pod \"infra-operator-controller-manager-77c48c7859-4f7km\" (UID: \"2e9e43c0-674a-4769-b44b-7eb6ee1a69d5\") " pod="openstack-operators/infra-operator-controller-manager-77c48c7859-4f7km" Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.574745 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p92dc\" (UniqueName: \"kubernetes.io/projected/973549d4-6a26-4166-8be0-b0dfb7c5aec6-kube-api-access-p92dc\") pod \"cinder-operator-controller-manager-9b68f5989-mdfnr\" (UID: \"973549d4-6a26-4166-8be0-b0dfb7c5aec6\") " pod="openstack-operators/cinder-operator-controller-manager-9b68f5989-mdfnr" Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.580140 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-9b68f5989-mdfnr" Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.593668 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-9f958b845-q8wlk" Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.600011 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-c6994669c-78pts" Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.632221 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-594c8c9d5d-k28w4" Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.641269 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-77d5c5b54f-gvd5f" Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.654216 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-68gw2\" (UniqueName: \"kubernetes.io/projected/e7054184-22f8-43b2-b75f-0534d5bb467f-kube-api-access-68gw2\") pod \"ironic-operator-controller-manager-78757b4889-t99dx\" (UID: \"e7054184-22f8-43b2-b75f-0534d5bb467f\") " pod="openstack-operators/ironic-operator-controller-manager-78757b4889-t99dx" Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.654249 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k9ktz\" (UniqueName: \"kubernetes.io/projected/aec45707-2c02-43bb-b7d9-a24b906cadca-kube-api-access-k9ktz\") pod \"octavia-operator-controller-manager-7fc9b76cf6-9ld4x\" (UID: \"aec45707-2c02-43bb-b7d9-a24b906cadca\") " pod="openstack-operators/octavia-operator-controller-manager-7fc9b76cf6-9ld4x" Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.654275 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5vndv\" (UniqueName: \"kubernetes.io/projected/cbbd3f62-64b6-4cc2-8c3b-b21d317c0624-kube-api-access-5vndv\") pod \"openstack-baremetal-operator-controller-manager-5b9875986dl6bbk\" (UID: \"cbbd3f62-64b6-4cc2-8c3b-b21d317c0624\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-5b9875986dl6bbk" Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.654291 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-99gtz\" (UniqueName: \"kubernetes.io/projected/56c68b3c-13ac-4e77-a8a5-bd99d83d5667-kube-api-access-99gtz\") pod \"swift-operator-controller-manager-85dd56d4cc-cvlzf\" (UID: \"56c68b3c-13ac-4e77-a8a5-bd99d83d5667\") " pod="openstack-operators/swift-operator-controller-manager-85dd56d4cc-cvlzf" Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.654306 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6qfl9\" (UniqueName: \"kubernetes.io/projected/42eb153b-1142-4dc6-9e34-b4cc38e49701-kube-api-access-6qfl9\") pod \"nova-operator-controller-manager-65849867d6-js8vs\" (UID: \"42eb153b-1142-4dc6-9e34-b4cc38e49701\") " pod="openstack-operators/nova-operator-controller-manager-65849867d6-js8vs" Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.654350 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/cbbd3f62-64b6-4cc2-8c3b-b21d317c0624-cert\") pod \"openstack-baremetal-operator-controller-manager-5b9875986dl6bbk\" (UID: \"cbbd3f62-64b6-4cc2-8c3b-b21d317c0624\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-5b9875986dl6bbk" Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.654374 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xwdml\" (UniqueName: \"kubernetes.io/projected/115bb848-a715-4eac-8993-63842460dd3d-kube-api-access-xwdml\") pod \"keystone-operator-controller-manager-767fdc4f47-8mw4r\" (UID: \"115bb848-a715-4eac-8993-63842460dd3d\") " pod="openstack-operators/keystone-operator-controller-manager-767fdc4f47-8mw4r" Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.654395 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r2qgm\" (UniqueName: \"kubernetes.io/projected/6e337f3c-6900-4314-adbb-aec361ccb7cc-kube-api-access-r2qgm\") pod \"manila-operator-controller-manager-864f6b75bf-4jj8h\" (UID: \"6e337f3c-6900-4314-adbb-aec361ccb7cc\") " pod="openstack-operators/manila-operator-controller-manager-864f6b75bf-4jj8h" Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.654412 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8rxpt\" (UniqueName: \"kubernetes.io/projected/0292902b-f0ca-4abc-b220-4e3268243db5-kube-api-access-8rxpt\") pod \"placement-operator-controller-manager-686df47fcb-tsdgq\" (UID: \"0292902b-f0ca-4abc-b220-4e3268243db5\") " pod="openstack-operators/placement-operator-controller-manager-686df47fcb-tsdgq" Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.654452 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jwfxn\" (UniqueName: \"kubernetes.io/projected/1998e4dc-a1d4-405e-b0dd-546e1d5fed6f-kube-api-access-jwfxn\") pod \"ovn-operator-controller-manager-55db956ddc-6j4g5\" (UID: \"1998e4dc-a1d4-405e-b0dd-546e1d5fed6f\") " pod="openstack-operators/ovn-operator-controller-manager-55db956ddc-6j4g5" Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.654467 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hvbdx\" (UniqueName: \"kubernetes.io/projected/dba009f4-faa1-4d6b-bc19-c00eedd6c7c7-kube-api-access-hvbdx\") pod \"mariadb-operator-controller-manager-c87fff755-plg7f\" (UID: \"dba009f4-faa1-4d6b-bc19-c00eedd6c7c7\") " pod="openstack-operators/mariadb-operator-controller-manager-c87fff755-plg7f" Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.654503 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n7lck\" (UniqueName: \"kubernetes.io/projected/f1a6abd3-1c13-40ca-8e20-344fd40bc348-kube-api-access-n7lck\") pod \"neutron-operator-controller-manager-cb4666565-6689s\" (UID: \"f1a6abd3-1c13-40ca-8e20-344fd40bc348\") " pod="openstack-operators/neutron-operator-controller-manager-cb4666565-6689s" Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.672930 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-5f8f495fcf-ldhxq"] Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.673113 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xwdml\" (UniqueName: \"kubernetes.io/projected/115bb848-a715-4eac-8993-63842460dd3d-kube-api-access-xwdml\") pod \"keystone-operator-controller-manager-767fdc4f47-8mw4r\" (UID: \"115bb848-a715-4eac-8993-63842460dd3d\") " pod="openstack-operators/keystone-operator-controller-manager-767fdc4f47-8mw4r" Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.673786 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-5f8f495fcf-ldhxq" Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.674444 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r2qgm\" (UniqueName: \"kubernetes.io/projected/6e337f3c-6900-4314-adbb-aec361ccb7cc-kube-api-access-r2qgm\") pod \"manila-operator-controller-manager-864f6b75bf-4jj8h\" (UID: \"6e337f3c-6900-4314-adbb-aec361ccb7cc\") " pod="openstack-operators/manila-operator-controller-manager-864f6b75bf-4jj8h" Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.674831 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hvbdx\" (UniqueName: \"kubernetes.io/projected/dba009f4-faa1-4d6b-bc19-c00eedd6c7c7-kube-api-access-hvbdx\") pod \"mariadb-operator-controller-manager-c87fff755-plg7f\" (UID: \"dba009f4-faa1-4d6b-bc19-c00eedd6c7c7\") " pod="openstack-operators/mariadb-operator-controller-manager-c87fff755-plg7f" Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.676074 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-68gw2\" (UniqueName: \"kubernetes.io/projected/e7054184-22f8-43b2-b75f-0534d5bb467f-kube-api-access-68gw2\") pod \"ironic-operator-controller-manager-78757b4889-t99dx\" (UID: \"e7054184-22f8-43b2-b75f-0534d5bb467f\") " pod="openstack-operators/ironic-operator-controller-manager-78757b4889-t99dx" Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.676555 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"telemetry-operator-controller-manager-dockercfg-4qwkc" Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.680486 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-5f8f495fcf-ldhxq"] Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.708633 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-767fdc4f47-8mw4r" Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.750291 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-864f6b75bf-4jj8h" Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.755407 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k9ktz\" (UniqueName: \"kubernetes.io/projected/aec45707-2c02-43bb-b7d9-a24b906cadca-kube-api-access-k9ktz\") pod \"octavia-operator-controller-manager-7fc9b76cf6-9ld4x\" (UID: \"aec45707-2c02-43bb-b7d9-a24b906cadca\") " pod="openstack-operators/octavia-operator-controller-manager-7fc9b76cf6-9ld4x" Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.755444 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5vndv\" (UniqueName: \"kubernetes.io/projected/cbbd3f62-64b6-4cc2-8c3b-b21d317c0624-kube-api-access-5vndv\") pod \"openstack-baremetal-operator-controller-manager-5b9875986dl6bbk\" (UID: \"cbbd3f62-64b6-4cc2-8c3b-b21d317c0624\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-5b9875986dl6bbk" Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.755465 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-99gtz\" (UniqueName: \"kubernetes.io/projected/56c68b3c-13ac-4e77-a8a5-bd99d83d5667-kube-api-access-99gtz\") pod \"swift-operator-controller-manager-85dd56d4cc-cvlzf\" (UID: \"56c68b3c-13ac-4e77-a8a5-bd99d83d5667\") " pod="openstack-operators/swift-operator-controller-manager-85dd56d4cc-cvlzf" Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.755483 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6qfl9\" (UniqueName: \"kubernetes.io/projected/42eb153b-1142-4dc6-9e34-b4cc38e49701-kube-api-access-6qfl9\") pod \"nova-operator-controller-manager-65849867d6-js8vs\" (UID: \"42eb153b-1142-4dc6-9e34-b4cc38e49701\") " pod="openstack-operators/nova-operator-controller-manager-65849867d6-js8vs" Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.755506 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qnxsc\" (UniqueName: \"kubernetes.io/projected/da27b90a-7f65-4d58-92f1-d46b0d92fd79-kube-api-access-qnxsc\") pod \"telemetry-operator-controller-manager-5f8f495fcf-ldhxq\" (UID: \"da27b90a-7f65-4d58-92f1-d46b0d92fd79\") " pod="openstack-operators/telemetry-operator-controller-manager-5f8f495fcf-ldhxq" Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.755547 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/cbbd3f62-64b6-4cc2-8c3b-b21d317c0624-cert\") pod \"openstack-baremetal-operator-controller-manager-5b9875986dl6bbk\" (UID: \"cbbd3f62-64b6-4cc2-8c3b-b21d317c0624\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-5b9875986dl6bbk" Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.755579 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8rxpt\" (UniqueName: \"kubernetes.io/projected/0292902b-f0ca-4abc-b220-4e3268243db5-kube-api-access-8rxpt\") pod \"placement-operator-controller-manager-686df47fcb-tsdgq\" (UID: \"0292902b-f0ca-4abc-b220-4e3268243db5\") " pod="openstack-operators/placement-operator-controller-manager-686df47fcb-tsdgq" Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.755612 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jwfxn\" (UniqueName: \"kubernetes.io/projected/1998e4dc-a1d4-405e-b0dd-546e1d5fed6f-kube-api-access-jwfxn\") pod \"ovn-operator-controller-manager-55db956ddc-6j4g5\" (UID: \"1998e4dc-a1d4-405e-b0dd-546e1d5fed6f\") " pod="openstack-operators/ovn-operator-controller-manager-55db956ddc-6j4g5" Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.755642 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n7lck\" (UniqueName: \"kubernetes.io/projected/f1a6abd3-1c13-40ca-8e20-344fd40bc348-kube-api-access-n7lck\") pod \"neutron-operator-controller-manager-cb4666565-6689s\" (UID: \"f1a6abd3-1c13-40ca-8e20-344fd40bc348\") " pod="openstack-operators/neutron-operator-controller-manager-cb4666565-6689s" Jan 20 16:55:06 crc kubenswrapper[4558]: E0120 16:55:06.756151 4558 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 20 16:55:06 crc kubenswrapper[4558]: E0120 16:55:06.756246 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/cbbd3f62-64b6-4cc2-8c3b-b21d317c0624-cert podName:cbbd3f62-64b6-4cc2-8c3b-b21d317c0624 nodeName:}" failed. No retries permitted until 2026-01-20 16:55:07.25623024 +0000 UTC m=+801.016568207 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/cbbd3f62-64b6-4cc2-8c3b-b21d317c0624-cert") pod "openstack-baremetal-operator-controller-manager-5b9875986dl6bbk" (UID: "cbbd3f62-64b6-4cc2-8c3b-b21d317c0624") : secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.757899 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-c87fff755-plg7f" Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.759983 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/test-operator-controller-manager-7cd8bc9dbb-9hmt6"] Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.761667 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-7cd8bc9dbb-9hmt6" Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.770461 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"test-operator-controller-manager-dockercfg-zbpfb" Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.772611 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jwfxn\" (UniqueName: \"kubernetes.io/projected/1998e4dc-a1d4-405e-b0dd-546e1d5fed6f-kube-api-access-jwfxn\") pod \"ovn-operator-controller-manager-55db956ddc-6j4g5\" (UID: \"1998e4dc-a1d4-405e-b0dd-546e1d5fed6f\") " pod="openstack-operators/ovn-operator-controller-manager-55db956ddc-6j4g5" Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.772690 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6qfl9\" (UniqueName: \"kubernetes.io/projected/42eb153b-1142-4dc6-9e34-b4cc38e49701-kube-api-access-6qfl9\") pod \"nova-operator-controller-manager-65849867d6-js8vs\" (UID: \"42eb153b-1142-4dc6-9e34-b4cc38e49701\") " pod="openstack-operators/nova-operator-controller-manager-65849867d6-js8vs" Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.774360 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-7cd8bc9dbb-9hmt6"] Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.778030 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-99gtz\" (UniqueName: \"kubernetes.io/projected/56c68b3c-13ac-4e77-a8a5-bd99d83d5667-kube-api-access-99gtz\") pod \"swift-operator-controller-manager-85dd56d4cc-cvlzf\" (UID: \"56c68b3c-13ac-4e77-a8a5-bd99d83d5667\") " pod="openstack-operators/swift-operator-controller-manager-85dd56d4cc-cvlzf" Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.783523 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8rxpt\" (UniqueName: \"kubernetes.io/projected/0292902b-f0ca-4abc-b220-4e3268243db5-kube-api-access-8rxpt\") pod \"placement-operator-controller-manager-686df47fcb-tsdgq\" (UID: \"0292902b-f0ca-4abc-b220-4e3268243db5\") " pod="openstack-operators/placement-operator-controller-manager-686df47fcb-tsdgq" Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.784403 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7ddb5c749-jrgz7"] Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.787263 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5vndv\" (UniqueName: \"kubernetes.io/projected/cbbd3f62-64b6-4cc2-8c3b-b21d317c0624-kube-api-access-5vndv\") pod \"openstack-baremetal-operator-controller-manager-5b9875986dl6bbk\" (UID: \"cbbd3f62-64b6-4cc2-8c3b-b21d317c0624\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-5b9875986dl6bbk" Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.787710 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k9ktz\" (UniqueName: \"kubernetes.io/projected/aec45707-2c02-43bb-b7d9-a24b906cadca-kube-api-access-k9ktz\") pod \"octavia-operator-controller-manager-7fc9b76cf6-9ld4x\" (UID: \"aec45707-2c02-43bb-b7d9-a24b906cadca\") " pod="openstack-operators/octavia-operator-controller-manager-7fc9b76cf6-9ld4x" Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.791244 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n7lck\" (UniqueName: \"kubernetes.io/projected/f1a6abd3-1c13-40ca-8e20-344fd40bc348-kube-api-access-n7lck\") pod \"neutron-operator-controller-manager-cb4666565-6689s\" (UID: \"f1a6abd3-1c13-40ca-8e20-344fd40bc348\") " pod="openstack-operators/neutron-operator-controller-manager-cb4666565-6689s" Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.798120 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-65849867d6-js8vs" Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.807470 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-7fc9b76cf6-9ld4x" Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.828034 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-55db956ddc-6j4g5" Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.846517 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-686df47fcb-tsdgq" Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.857689 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v5qjm\" (UniqueName: \"kubernetes.io/projected/fafbffd7-66c2-42ab-97c2-034a1fd6fd7c-kube-api-access-v5qjm\") pod \"test-operator-controller-manager-7cd8bc9dbb-9hmt6\" (UID: \"fafbffd7-66c2-42ab-97c2-034a1fd6fd7c\") " pod="openstack-operators/test-operator-controller-manager-7cd8bc9dbb-9hmt6" Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.857908 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qnxsc\" (UniqueName: \"kubernetes.io/projected/da27b90a-7f65-4d58-92f1-d46b0d92fd79-kube-api-access-qnxsc\") pod \"telemetry-operator-controller-manager-5f8f495fcf-ldhxq\" (UID: \"da27b90a-7f65-4d58-92f1-d46b0d92fd79\") " pod="openstack-operators/telemetry-operator-controller-manager-5f8f495fcf-ldhxq" Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.868403 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/watcher-operator-controller-manager-64cd966744-925vf"] Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.869263 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-64cd966744-925vf" Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.871379 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"watcher-operator-controller-manager-dockercfg-pl9jm" Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.874404 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-64cd966744-925vf"] Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.902387 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qnxsc\" (UniqueName: \"kubernetes.io/projected/da27b90a-7f65-4d58-92f1-d46b0d92fd79-kube-api-access-qnxsc\") pod \"telemetry-operator-controller-manager-5f8f495fcf-ldhxq\" (UID: \"da27b90a-7f65-4d58-92f1-d46b0d92fd79\") " pod="openstack-operators/telemetry-operator-controller-manager-5f8f495fcf-ldhxq" Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.956346 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-85dd56d4cc-cvlzf" Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.959042 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v5qjm\" (UniqueName: \"kubernetes.io/projected/fafbffd7-66c2-42ab-97c2-034a1fd6fd7c-kube-api-access-v5qjm\") pod \"test-operator-controller-manager-7cd8bc9dbb-9hmt6\" (UID: \"fafbffd7-66c2-42ab-97c2-034a1fd6fd7c\") " pod="openstack-operators/test-operator-controller-manager-7cd8bc9dbb-9hmt6" Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.959111 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2z8zq\" (UniqueName: \"kubernetes.io/projected/8dea3161-8db9-4778-b59c-066f876ff6e2-kube-api-access-2z8zq\") pod \"watcher-operator-controller-manager-64cd966744-925vf\" (UID: \"8dea3161-8db9-4778-b59c-066f876ff6e2\") " pod="openstack-operators/watcher-operator-controller-manager-64cd966744-925vf" Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.978652 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v5qjm\" (UniqueName: \"kubernetes.io/projected/fafbffd7-66c2-42ab-97c2-034a1fd6fd7c-kube-api-access-v5qjm\") pod \"test-operator-controller-manager-7cd8bc9dbb-9hmt6\" (UID: \"fafbffd7-66c2-42ab-97c2-034a1fd6fd7c\") " pod="openstack-operators/test-operator-controller-manager-7cd8bc9dbb-9hmt6" Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.981473 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-78757b4889-t99dx" Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.993840 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-5f8f495fcf-ldhxq" Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.994842 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-manager-75bfd788c8-wflcw"] Jan 20 16:55:06 crc kubenswrapper[4558]: I0120 16:55:06.995772 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-75bfd788c8-wflcw" Jan 20 16:55:07 crc kubenswrapper[4558]: I0120 16:55:07.000591 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"metrics-server-cert" Jan 20 16:55:07 crc kubenswrapper[4558]: I0120 16:55:07.000788 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"webhook-server-cert" Jan 20 16:55:07 crc kubenswrapper[4558]: I0120 16:55:07.000919 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-wp7jw" Jan 20 16:55:07 crc kubenswrapper[4558]: I0120 16:55:07.008748 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-75bfd788c8-wflcw"] Jan 20 16:55:07 crc kubenswrapper[4558]: I0120 16:55:07.057478 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-qwwmt"] Jan 20 16:55:07 crc kubenswrapper[4558]: I0120 16:55:07.058223 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-qwwmt" Jan 20 16:55:07 crc kubenswrapper[4558]: I0120 16:55:07.060154 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-controller-manager-dockercfg-zzhhz" Jan 20 16:55:07 crc kubenswrapper[4558]: I0120 16:55:07.060519 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/deea5510-f570-48ac-b23f-bba261ef3951-metrics-certs\") pod \"openstack-operator-controller-manager-75bfd788c8-wflcw\" (UID: \"deea5510-f570-48ac-b23f-bba261ef3951\") " pod="openstack-operators/openstack-operator-controller-manager-75bfd788c8-wflcw" Jan 20 16:55:07 crc kubenswrapper[4558]: I0120 16:55:07.060623 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7sxlc\" (UniqueName: \"kubernetes.io/projected/deea5510-f570-48ac-b23f-bba261ef3951-kube-api-access-7sxlc\") pod \"openstack-operator-controller-manager-75bfd788c8-wflcw\" (UID: \"deea5510-f570-48ac-b23f-bba261ef3951\") " pod="openstack-operators/openstack-operator-controller-manager-75bfd788c8-wflcw" Jan 20 16:55:07 crc kubenswrapper[4558]: I0120 16:55:07.060727 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/deea5510-f570-48ac-b23f-bba261ef3951-webhook-certs\") pod \"openstack-operator-controller-manager-75bfd788c8-wflcw\" (UID: \"deea5510-f570-48ac-b23f-bba261ef3951\") " pod="openstack-operators/openstack-operator-controller-manager-75bfd788c8-wflcw" Jan 20 16:55:07 crc kubenswrapper[4558]: I0120 16:55:07.060789 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2z8zq\" (UniqueName: \"kubernetes.io/projected/8dea3161-8db9-4778-b59c-066f876ff6e2-kube-api-access-2z8zq\") pod \"watcher-operator-controller-manager-64cd966744-925vf\" (UID: \"8dea3161-8db9-4778-b59c-066f876ff6e2\") " pod="openstack-operators/watcher-operator-controller-manager-64cd966744-925vf" Jan 20 16:55:07 crc kubenswrapper[4558]: I0120 16:55:07.061033 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xm2kt\" (UniqueName: \"kubernetes.io/projected/af89563a-d3e9-4424-8fde-a0a77b21b7e8-kube-api-access-xm2kt\") pod \"rabbitmq-cluster-operator-manager-668c99d594-qwwmt\" (UID: \"af89563a-d3e9-4424-8fde-a0a77b21b7e8\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-qwwmt" Jan 20 16:55:07 crc kubenswrapper[4558]: I0120 16:55:07.061130 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/2e9e43c0-674a-4769-b44b-7eb6ee1a69d5-cert\") pod \"infra-operator-controller-manager-77c48c7859-4f7km\" (UID: \"2e9e43c0-674a-4769-b44b-7eb6ee1a69d5\") " pod="openstack-operators/infra-operator-controller-manager-77c48c7859-4f7km" Jan 20 16:55:07 crc kubenswrapper[4558]: E0120 16:55:07.061328 4558 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Jan 20 16:55:07 crc kubenswrapper[4558]: E0120 16:55:07.061368 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/2e9e43c0-674a-4769-b44b-7eb6ee1a69d5-cert podName:2e9e43c0-674a-4769-b44b-7eb6ee1a69d5 nodeName:}" failed. No retries permitted until 2026-01-20 16:55:08.061356284 +0000 UTC m=+801.821694251 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/2e9e43c0-674a-4769-b44b-7eb6ee1a69d5-cert") pod "infra-operator-controller-manager-77c48c7859-4f7km" (UID: "2e9e43c0-674a-4769-b44b-7eb6ee1a69d5") : secret "infra-operator-webhook-server-cert" not found Jan 20 16:55:07 crc kubenswrapper[4558]: I0120 16:55:07.068273 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-qwwmt"] Jan 20 16:55:07 crc kubenswrapper[4558]: I0120 16:55:07.071725 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-cb4666565-6689s" Jan 20 16:55:07 crc kubenswrapper[4558]: I0120 16:55:07.080071 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2z8zq\" (UniqueName: \"kubernetes.io/projected/8dea3161-8db9-4778-b59c-066f876ff6e2-kube-api-access-2z8zq\") pod \"watcher-operator-controller-manager-64cd966744-925vf\" (UID: \"8dea3161-8db9-4778-b59c-066f876ff6e2\") " pod="openstack-operators/watcher-operator-controller-manager-64cd966744-925vf" Jan 20 16:55:07 crc kubenswrapper[4558]: I0120 16:55:07.121092 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-7cd8bc9dbb-9hmt6" Jan 20 16:55:07 crc kubenswrapper[4558]: I0120 16:55:07.162106 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xm2kt\" (UniqueName: \"kubernetes.io/projected/af89563a-d3e9-4424-8fde-a0a77b21b7e8-kube-api-access-xm2kt\") pod \"rabbitmq-cluster-operator-manager-668c99d594-qwwmt\" (UID: \"af89563a-d3e9-4424-8fde-a0a77b21b7e8\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-qwwmt" Jan 20 16:55:07 crc kubenswrapper[4558]: I0120 16:55:07.162297 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/deea5510-f570-48ac-b23f-bba261ef3951-metrics-certs\") pod \"openstack-operator-controller-manager-75bfd788c8-wflcw\" (UID: \"deea5510-f570-48ac-b23f-bba261ef3951\") " pod="openstack-operators/openstack-operator-controller-manager-75bfd788c8-wflcw" Jan 20 16:55:07 crc kubenswrapper[4558]: I0120 16:55:07.162386 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7sxlc\" (UniqueName: \"kubernetes.io/projected/deea5510-f570-48ac-b23f-bba261ef3951-kube-api-access-7sxlc\") pod \"openstack-operator-controller-manager-75bfd788c8-wflcw\" (UID: \"deea5510-f570-48ac-b23f-bba261ef3951\") " pod="openstack-operators/openstack-operator-controller-manager-75bfd788c8-wflcw" Jan 20 16:55:07 crc kubenswrapper[4558]: I0120 16:55:07.162533 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/deea5510-f570-48ac-b23f-bba261ef3951-webhook-certs\") pod \"openstack-operator-controller-manager-75bfd788c8-wflcw\" (UID: \"deea5510-f570-48ac-b23f-bba261ef3951\") " pod="openstack-operators/openstack-operator-controller-manager-75bfd788c8-wflcw" Jan 20 16:55:07 crc kubenswrapper[4558]: E0120 16:55:07.162667 4558 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Jan 20 16:55:07 crc kubenswrapper[4558]: E0120 16:55:07.162713 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/deea5510-f570-48ac-b23f-bba261ef3951-webhook-certs podName:deea5510-f570-48ac-b23f-bba261ef3951 nodeName:}" failed. No retries permitted until 2026-01-20 16:55:07.662700909 +0000 UTC m=+801.423038876 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/deea5510-f570-48ac-b23f-bba261ef3951-webhook-certs") pod "openstack-operator-controller-manager-75bfd788c8-wflcw" (UID: "deea5510-f570-48ac-b23f-bba261ef3951") : secret "webhook-server-cert" not found Jan 20 16:55:07 crc kubenswrapper[4558]: E0120 16:55:07.163154 4558 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Jan 20 16:55:07 crc kubenswrapper[4558]: E0120 16:55:07.163227 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/deea5510-f570-48ac-b23f-bba261ef3951-metrics-certs podName:deea5510-f570-48ac-b23f-bba261ef3951 nodeName:}" failed. No retries permitted until 2026-01-20 16:55:07.663214023 +0000 UTC m=+801.423551990 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/deea5510-f570-48ac-b23f-bba261ef3951-metrics-certs") pod "openstack-operator-controller-manager-75bfd788c8-wflcw" (UID: "deea5510-f570-48ac-b23f-bba261ef3951") : secret "metrics-server-cert" not found Jan 20 16:55:07 crc kubenswrapper[4558]: I0120 16:55:07.182981 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xm2kt\" (UniqueName: \"kubernetes.io/projected/af89563a-d3e9-4424-8fde-a0a77b21b7e8-kube-api-access-xm2kt\") pod \"rabbitmq-cluster-operator-manager-668c99d594-qwwmt\" (UID: \"af89563a-d3e9-4424-8fde-a0a77b21b7e8\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-qwwmt" Jan 20 16:55:07 crc kubenswrapper[4558]: I0120 16:55:07.185998 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7sxlc\" (UniqueName: \"kubernetes.io/projected/deea5510-f570-48ac-b23f-bba261ef3951-kube-api-access-7sxlc\") pod \"openstack-operator-controller-manager-75bfd788c8-wflcw\" (UID: \"deea5510-f570-48ac-b23f-bba261ef3951\") " pod="openstack-operators/openstack-operator-controller-manager-75bfd788c8-wflcw" Jan 20 16:55:07 crc kubenswrapper[4558]: I0120 16:55:07.229461 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-64cd966744-925vf" Jan 20 16:55:07 crc kubenswrapper[4558]: I0120 16:55:07.257338 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-9b68f5989-mdfnr"] Jan 20 16:55:07 crc kubenswrapper[4558]: I0120 16:55:07.264294 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/cbbd3f62-64b6-4cc2-8c3b-b21d317c0624-cert\") pod \"openstack-baremetal-operator-controller-manager-5b9875986dl6bbk\" (UID: \"cbbd3f62-64b6-4cc2-8c3b-b21d317c0624\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-5b9875986dl6bbk" Jan 20 16:55:07 crc kubenswrapper[4558]: E0120 16:55:07.264552 4558 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 20 16:55:07 crc kubenswrapper[4558]: E0120 16:55:07.264595 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/cbbd3f62-64b6-4cc2-8c3b-b21d317c0624-cert podName:cbbd3f62-64b6-4cc2-8c3b-b21d317c0624 nodeName:}" failed. No retries permitted until 2026-01-20 16:55:08.264583744 +0000 UTC m=+802.024921710 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/cbbd3f62-64b6-4cc2-8c3b-b21d317c0624-cert") pod "openstack-baremetal-operator-controller-manager-5b9875986dl6bbk" (UID: "cbbd3f62-64b6-4cc2-8c3b-b21d317c0624") : secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 20 16:55:07 crc kubenswrapper[4558]: I0120 16:55:07.271618 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-77d5c5b54f-gvd5f"] Jan 20 16:55:07 crc kubenswrapper[4558]: I0120 16:55:07.368142 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-594c8c9d5d-k28w4"] Jan 20 16:55:07 crc kubenswrapper[4558]: I0120 16:55:07.373462 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-9f958b845-q8wlk"] Jan 20 16:55:07 crc kubenswrapper[4558]: W0120 16:55:07.374972 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod88bca883_e84c_4223_80de_4c389d44f9fe.slice/crio-73315ccc650e02a7ee82add71d227bbb21cae3bc59495806b28f8083bed4bd15 WatchSource:0}: Error finding container 73315ccc650e02a7ee82add71d227bbb21cae3bc59495806b28f8083bed4bd15: Status 404 returned error can't find the container with id 73315ccc650e02a7ee82add71d227bbb21cae3bc59495806b28f8083bed4bd15 Jan 20 16:55:07 crc kubenswrapper[4558]: I0120 16:55:07.380476 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-c6994669c-78pts"] Jan 20 16:55:07 crc kubenswrapper[4558]: I0120 16:55:07.406549 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-qwwmt" Jan 20 16:55:07 crc kubenswrapper[4558]: I0120 16:55:07.515669 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-864f6b75bf-4jj8h"] Jan 20 16:55:07 crc kubenswrapper[4558]: I0120 16:55:07.542139 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-c87fff755-plg7f"] Jan 20 16:55:07 crc kubenswrapper[4558]: I0120 16:55:07.549818 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-7fc9b76cf6-9ld4x"] Jan 20 16:55:07 crc kubenswrapper[4558]: I0120 16:55:07.553826 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-65849867d6-js8vs"] Jan 20 16:55:07 crc kubenswrapper[4558]: I0120 16:55:07.558367 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-767fdc4f47-8mw4r"] Jan 20 16:55:07 crc kubenswrapper[4558]: I0120 16:55:07.639334 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-686df47fcb-tsdgq"] Jan 20 16:55:07 crc kubenswrapper[4558]: I0120 16:55:07.643746 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-55db956ddc-6j4g5"] Jan 20 16:55:07 crc kubenswrapper[4558]: E0120 16:55:07.653416 4558 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/ovn-operator@sha256:8b3bfb9e86618b7ac69443939b0968fae28a22cd62ea1e429b599ff9f8a5f8cf,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-jwfxn,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovn-operator-controller-manager-55db956ddc-6j4g5_openstack-operators(1998e4dc-a1d4-405e-b0dd-546e1d5fed6f): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Jan 20 16:55:07 crc kubenswrapper[4558]: E0120 16:55:07.654657 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/ovn-operator-controller-manager-55db956ddc-6j4g5" podUID="1998e4dc-a1d4-405e-b0dd-546e1d5fed6f" Jan 20 16:55:07 crc kubenswrapper[4558]: I0120 16:55:07.660977 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-78757b4889-t99dx"] Jan 20 16:55:07 crc kubenswrapper[4558]: I0120 16:55:07.664538 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-cb4666565-6689s"] Jan 20 16:55:07 crc kubenswrapper[4558]: E0120 16:55:07.668364 4558 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/neutron-operator@sha256:0f440bf7dc937ce0135bdd328716686fd2f1320f453a9ac4e11e96383148ad6c,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-n7lck,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod neutron-operator-controller-manager-cb4666565-6689s_openstack-operators(f1a6abd3-1c13-40ca-8e20-344fd40bc348): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Jan 20 16:55:07 crc kubenswrapper[4558]: I0120 16:55:07.669963 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-85dd56d4cc-cvlzf"] Jan 20 16:55:07 crc kubenswrapper[4558]: E0120 16:55:07.669995 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/neutron-operator-controller-manager-cb4666565-6689s" podUID="f1a6abd3-1c13-40ca-8e20-344fd40bc348" Jan 20 16:55:07 crc kubenswrapper[4558]: I0120 16:55:07.671339 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/deea5510-f570-48ac-b23f-bba261ef3951-metrics-certs\") pod \"openstack-operator-controller-manager-75bfd788c8-wflcw\" (UID: \"deea5510-f570-48ac-b23f-bba261ef3951\") " pod="openstack-operators/openstack-operator-controller-manager-75bfd788c8-wflcw" Jan 20 16:55:07 crc kubenswrapper[4558]: I0120 16:55:07.671404 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/deea5510-f570-48ac-b23f-bba261ef3951-webhook-certs\") pod \"openstack-operator-controller-manager-75bfd788c8-wflcw\" (UID: \"deea5510-f570-48ac-b23f-bba261ef3951\") " pod="openstack-operators/openstack-operator-controller-manager-75bfd788c8-wflcw" Jan 20 16:55:07 crc kubenswrapper[4558]: E0120 16:55:07.671529 4558 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Jan 20 16:55:07 crc kubenswrapper[4558]: E0120 16:55:07.671563 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/deea5510-f570-48ac-b23f-bba261ef3951-webhook-certs podName:deea5510-f570-48ac-b23f-bba261ef3951 nodeName:}" failed. No retries permitted until 2026-01-20 16:55:08.671551857 +0000 UTC m=+802.431889825 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/deea5510-f570-48ac-b23f-bba261ef3951-webhook-certs") pod "openstack-operator-controller-manager-75bfd788c8-wflcw" (UID: "deea5510-f570-48ac-b23f-bba261ef3951") : secret "webhook-server-cert" not found Jan 20 16:55:07 crc kubenswrapper[4558]: E0120 16:55:07.671606 4558 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Jan 20 16:55:07 crc kubenswrapper[4558]: E0120 16:55:07.671626 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/deea5510-f570-48ac-b23f-bba261ef3951-metrics-certs podName:deea5510-f570-48ac-b23f-bba261ef3951 nodeName:}" failed. No retries permitted until 2026-01-20 16:55:08.671619985 +0000 UTC m=+802.431957952 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/deea5510-f570-48ac-b23f-bba261ef3951-metrics-certs") pod "openstack-operator-controller-manager-75bfd788c8-wflcw" (UID: "deea5510-f570-48ac-b23f-bba261ef3951") : secret "metrics-server-cert" not found Jan 20 16:55:07 crc kubenswrapper[4558]: E0120 16:55:07.688346 4558 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/swift-operator@sha256:9404536bf7cb7c3818e1a0f92b53e4d7c02fe7942324f32894106f02f8fc7e92,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-99gtz,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod swift-operator-controller-manager-85dd56d4cc-cvlzf_openstack-operators(56c68b3c-13ac-4e77-a8a5-bd99d83d5667): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Jan 20 16:55:07 crc kubenswrapper[4558]: E0120 16:55:07.691070 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/swift-operator-controller-manager-85dd56d4cc-cvlzf" podUID="56c68b3c-13ac-4e77-a8a5-bd99d83d5667" Jan 20 16:55:07 crc kubenswrapper[4558]: W0120 16:55:07.792553 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8dea3161_8db9_4778_b59c_066f876ff6e2.slice/crio-39933de3fbfb847d356269e4650840d03a83b8f8833b334f119f985f25844f20 WatchSource:0}: Error finding container 39933de3fbfb847d356269e4650840d03a83b8f8833b334f119f985f25844f20: Status 404 returned error can't find the container with id 39933de3fbfb847d356269e4650840d03a83b8f8833b334f119f985f25844f20 Jan 20 16:55:07 crc kubenswrapper[4558]: I0120 16:55:07.793055 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-64cd966744-925vf"] Jan 20 16:55:07 crc kubenswrapper[4558]: E0120 16:55:07.795218 4558 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/watcher-operator@sha256:d687150a46d97eb382dcd8305a2a611943af74771debe1fa9cc13a21e51c69ad,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-2z8zq,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-operator-controller-manager-64cd966744-925vf_openstack-operators(8dea3161-8db9-4778-b59c-066f876ff6e2): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Jan 20 16:55:07 crc kubenswrapper[4558]: E0120 16:55:07.796375 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/watcher-operator-controller-manager-64cd966744-925vf" podUID="8dea3161-8db9-4778-b59c-066f876ff6e2" Jan 20 16:55:07 crc kubenswrapper[4558]: I0120 16:55:07.803520 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-7cd8bc9dbb-9hmt6"] Jan 20 16:55:07 crc kubenswrapper[4558]: W0120 16:55:07.805651 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podda27b90a_7f65_4d58_92f1_d46b0d92fd79.slice/crio-d1faba4ee13a7bafb0b75d449fbc83fa7cb71005eb76b1ecd5bb12da92401fdd WatchSource:0}: Error finding container d1faba4ee13a7bafb0b75d449fbc83fa7cb71005eb76b1ecd5bb12da92401fdd: Status 404 returned error can't find the container with id d1faba4ee13a7bafb0b75d449fbc83fa7cb71005eb76b1ecd5bb12da92401fdd Jan 20 16:55:07 crc kubenswrapper[4558]: W0120 16:55:07.806123 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfafbffd7_66c2_42ab_97c2_034a1fd6fd7c.slice/crio-a769665ebdc91c1821d741cf2a08f672ea2daed89255a87c446061803b18b4e1 WatchSource:0}: Error finding container a769665ebdc91c1821d741cf2a08f672ea2daed89255a87c446061803b18b4e1: Status 404 returned error can't find the container with id a769665ebdc91c1821d741cf2a08f672ea2daed89255a87c446061803b18b4e1 Jan 20 16:55:07 crc kubenswrapper[4558]: W0120 16:55:07.806941 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podaf89563a_d3e9_4424_8fde_a0a77b21b7e8.slice/crio-95e192423190c4b02745003348fc10127faf0848aab43a357cad7726917e1dae WatchSource:0}: Error finding container 95e192423190c4b02745003348fc10127faf0848aab43a357cad7726917e1dae: Status 404 returned error can't find the container with id 95e192423190c4b02745003348fc10127faf0848aab43a357cad7726917e1dae Jan 20 16:55:07 crc kubenswrapper[4558]: I0120 16:55:07.807050 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-5f8f495fcf-ldhxq"] Jan 20 16:55:07 crc kubenswrapper[4558]: E0120 16:55:07.807048 4558 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/telemetry-operator@sha256:2e89109f5db66abf1afd15ef59bda35a53db40c5e59e020579ac5aa0acea1843,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-qnxsc,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-5f8f495fcf-ldhxq_openstack-operators(da27b90a-7f65-4d58-92f1-d46b0d92fd79): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Jan 20 16:55:07 crc kubenswrapper[4558]: E0120 16:55:07.808127 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/telemetry-operator-controller-manager-5f8f495fcf-ldhxq" podUID="da27b90a-7f65-4d58-92f1-d46b0d92fd79" Jan 20 16:55:07 crc kubenswrapper[4558]: E0120 16:55:07.808113 4558 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/test-operator@sha256:244a4906353b84899db16a89e1ebb64491c9f85e69327cb2a72b6da0142a6e5e,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-v5qjm,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-7cd8bc9dbb-9hmt6_openstack-operators(fafbffd7-66c2-42ab-97c2-034a1fd6fd7c): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Jan 20 16:55:07 crc kubenswrapper[4558]: E0120 16:55:07.808950 4558 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2,Command:[/manager],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:9782,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-xm2kt,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cluster-operator-manager-668c99d594-qwwmt_openstack-operators(af89563a-d3e9-4424-8fde-a0a77b21b7e8): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Jan 20 16:55:07 crc kubenswrapper[4558]: E0120 16:55:07.810178 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-qwwmt" podUID="af89563a-d3e9-4424-8fde-a0a77b21b7e8" Jan 20 16:55:07 crc kubenswrapper[4558]: E0120 16:55:07.810215 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/test-operator-controller-manager-7cd8bc9dbb-9hmt6" podUID="fafbffd7-66c2-42ab-97c2-034a1fd6fd7c" Jan 20 16:55:07 crc kubenswrapper[4558]: I0120 16:55:07.813396 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-qwwmt"] Jan 20 16:55:07 crc kubenswrapper[4558]: I0120 16:55:07.829746 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-9b68f5989-mdfnr" event={"ID":"973549d4-6a26-4166-8be0-b0dfb7c5aec6","Type":"ContainerStarted","Data":"d5fe121897b5174786c6780029aabcd803d789144d4124c7ce2486f21b7ab7ff"} Jan 20 16:55:07 crc kubenswrapper[4558]: I0120 16:55:07.830533 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-686df47fcb-tsdgq" event={"ID":"0292902b-f0ca-4abc-b220-4e3268243db5","Type":"ContainerStarted","Data":"0bd39ffb8d125a629ba0fa7a8715c8589b3898686eb6047476712f2e5529161f"} Jan 20 16:55:07 crc kubenswrapper[4558]: I0120 16:55:07.831439 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-77d5c5b54f-gvd5f" event={"ID":"31cd4c27-d472-4406-886a-7222315c465d","Type":"ContainerStarted","Data":"4e4469f7470b01017b7ae12b8d06566a27bf4f549e470c52d36752cc814826b0"} Jan 20 16:55:07 crc kubenswrapper[4558]: I0120 16:55:07.832352 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-c6994669c-78pts" event={"ID":"e645ac52-b990-4b3f-a282-59ce60cacaff","Type":"ContainerStarted","Data":"27056ae81fe7826da1d6fa9f6f1d8e788a8eb91d8889eda00b46f36834734ebc"} Jan 20 16:55:07 crc kubenswrapper[4558]: I0120 16:55:07.833129 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7ddb5c749-jrgz7" event={"ID":"1c12cb14-b8f1-4939-84d9-6a6b851f095a","Type":"ContainerStarted","Data":"67d4063bebd7d0fadaacf160cdd38e5b88088d0299d04b5f060250c8e827bf92"} Jan 20 16:55:07 crc kubenswrapper[4558]: I0120 16:55:07.833930 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-9f958b845-q8wlk" event={"ID":"54c6bd49-d022-40c6-b547-58d3eb5ba7e2","Type":"ContainerStarted","Data":"1d2622698d065c909f6269ea676cdd70f3bf297ed4f93892642244f06a392f55"} Jan 20 16:55:07 crc kubenswrapper[4558]: I0120 16:55:07.834879 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-qwwmt" event={"ID":"af89563a-d3e9-4424-8fde-a0a77b21b7e8","Type":"ContainerStarted","Data":"95e192423190c4b02745003348fc10127faf0848aab43a357cad7726917e1dae"} Jan 20 16:55:07 crc kubenswrapper[4558]: E0120 16:55:07.835973 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-qwwmt" podUID="af89563a-d3e9-4424-8fde-a0a77b21b7e8" Jan 20 16:55:07 crc kubenswrapper[4558]: I0120 16:55:07.836193 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-64cd966744-925vf" event={"ID":"8dea3161-8db9-4778-b59c-066f876ff6e2","Type":"ContainerStarted","Data":"39933de3fbfb847d356269e4650840d03a83b8f8833b334f119f985f25844f20"} Jan 20 16:55:07 crc kubenswrapper[4558]: E0120 16:55:07.837106 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:d687150a46d97eb382dcd8305a2a611943af74771debe1fa9cc13a21e51c69ad\\\"\"" pod="openstack-operators/watcher-operator-controller-manager-64cd966744-925vf" podUID="8dea3161-8db9-4778-b59c-066f876ff6e2" Jan 20 16:55:07 crc kubenswrapper[4558]: I0120 16:55:07.837367 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-85dd56d4cc-cvlzf" event={"ID":"56c68b3c-13ac-4e77-a8a5-bd99d83d5667","Type":"ContainerStarted","Data":"ec9c70089d261c58fc10859ab6e45e365d0e82bb520f8ec6f1aaa63937d5d418"} Jan 20 16:55:07 crc kubenswrapper[4558]: I0120 16:55:07.840409 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-c87fff755-plg7f" event={"ID":"dba009f4-faa1-4d6b-bc19-c00eedd6c7c7","Type":"ContainerStarted","Data":"4b45e48a7b0a0cdd1b20556b57b0790aacc8fc228006c34bc740ab81af9229d7"} Jan 20 16:55:07 crc kubenswrapper[4558]: E0120 16:55:07.840404 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:9404536bf7cb7c3818e1a0f92b53e4d7c02fe7942324f32894106f02f8fc7e92\\\"\"" pod="openstack-operators/swift-operator-controller-manager-85dd56d4cc-cvlzf" podUID="56c68b3c-13ac-4e77-a8a5-bd99d83d5667" Jan 20 16:55:07 crc kubenswrapper[4558]: I0120 16:55:07.842237 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-5f8f495fcf-ldhxq" event={"ID":"da27b90a-7f65-4d58-92f1-d46b0d92fd79","Type":"ContainerStarted","Data":"d1faba4ee13a7bafb0b75d449fbc83fa7cb71005eb76b1ecd5bb12da92401fdd"} Jan 20 16:55:07 crc kubenswrapper[4558]: E0120 16:55:07.843234 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:2e89109f5db66abf1afd15ef59bda35a53db40c5e59e020579ac5aa0acea1843\\\"\"" pod="openstack-operators/telemetry-operator-controller-manager-5f8f495fcf-ldhxq" podUID="da27b90a-7f65-4d58-92f1-d46b0d92fd79" Jan 20 16:55:07 crc kubenswrapper[4558]: I0120 16:55:07.843624 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-55db956ddc-6j4g5" event={"ID":"1998e4dc-a1d4-405e-b0dd-546e1d5fed6f","Type":"ContainerStarted","Data":"6527f64ed52e3ab43a1eb08a98213cc0550684dd7dd545297a441c95f1ebba4a"} Jan 20 16:55:07 crc kubenswrapper[4558]: E0120 16:55:07.845230 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/ovn-operator@sha256:8b3bfb9e86618b7ac69443939b0968fae28a22cd62ea1e429b599ff9f8a5f8cf\\\"\"" pod="openstack-operators/ovn-operator-controller-manager-55db956ddc-6j4g5" podUID="1998e4dc-a1d4-405e-b0dd-546e1d5fed6f" Jan 20 16:55:07 crc kubenswrapper[4558]: I0120 16:55:07.846521 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-78757b4889-t99dx" event={"ID":"e7054184-22f8-43b2-b75f-0534d5bb467f","Type":"ContainerStarted","Data":"fffa13f3c9140100511254269a251a0171968943c09935e7c3f8c50dc0d6f823"} Jan 20 16:55:07 crc kubenswrapper[4558]: I0120 16:55:07.848131 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-594c8c9d5d-k28w4" event={"ID":"88bca883-e84c-4223-80de-4c389d44f9fe","Type":"ContainerStarted","Data":"73315ccc650e02a7ee82add71d227bbb21cae3bc59495806b28f8083bed4bd15"} Jan 20 16:55:07 crc kubenswrapper[4558]: I0120 16:55:07.849550 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-cb4666565-6689s" event={"ID":"f1a6abd3-1c13-40ca-8e20-344fd40bc348","Type":"ContainerStarted","Data":"2668f4beeb53773d859e9276acc935b99cc710342eafa7af9f148e1a8c4e4023"} Jan 20 16:55:07 crc kubenswrapper[4558]: E0120 16:55:07.850842 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/neutron-operator@sha256:0f440bf7dc937ce0135bdd328716686fd2f1320f453a9ac4e11e96383148ad6c\\\"\"" pod="openstack-operators/neutron-operator-controller-manager-cb4666565-6689s" podUID="f1a6abd3-1c13-40ca-8e20-344fd40bc348" Jan 20 16:55:07 crc kubenswrapper[4558]: I0120 16:55:07.851569 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-767fdc4f47-8mw4r" event={"ID":"115bb848-a715-4eac-8993-63842460dd3d","Type":"ContainerStarted","Data":"c938c07e7c4489351287def83b4a4be818302d0370e36a5423e6dd9a1d8f4159"} Jan 20 16:55:07 crc kubenswrapper[4558]: I0120 16:55:07.852324 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-864f6b75bf-4jj8h" event={"ID":"6e337f3c-6900-4314-adbb-aec361ccb7cc","Type":"ContainerStarted","Data":"799f8d0787755ffca063fe9cd0642def285cace0fcb26e687d0fa1a649496264"} Jan 20 16:55:07 crc kubenswrapper[4558]: I0120 16:55:07.853734 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-7fc9b76cf6-9ld4x" event={"ID":"aec45707-2c02-43bb-b7d9-a24b906cadca","Type":"ContainerStarted","Data":"a78147227d827b6853f6a956387bcca6c3b48e18eec67855bdb5107d778025ef"} Jan 20 16:55:07 crc kubenswrapper[4558]: I0120 16:55:07.864106 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-7cd8bc9dbb-9hmt6" event={"ID":"fafbffd7-66c2-42ab-97c2-034a1fd6fd7c","Type":"ContainerStarted","Data":"a769665ebdc91c1821d741cf2a08f672ea2daed89255a87c446061803b18b4e1"} Jan 20 16:55:07 crc kubenswrapper[4558]: I0120 16:55:07.866657 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-65849867d6-js8vs" event={"ID":"42eb153b-1142-4dc6-9e34-b4cc38e49701","Type":"ContainerStarted","Data":"0355fea3cd6f04ab4c1fb797c9af673336ce1d3cbdc25889bdc0212ac8bd9e76"} Jan 20 16:55:07 crc kubenswrapper[4558]: E0120 16:55:07.866805 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:244a4906353b84899db16a89e1ebb64491c9f85e69327cb2a72b6da0142a6e5e\\\"\"" pod="openstack-operators/test-operator-controller-manager-7cd8bc9dbb-9hmt6" podUID="fafbffd7-66c2-42ab-97c2-034a1fd6fd7c" Jan 20 16:55:08 crc kubenswrapper[4558]: I0120 16:55:08.076709 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/2e9e43c0-674a-4769-b44b-7eb6ee1a69d5-cert\") pod \"infra-operator-controller-manager-77c48c7859-4f7km\" (UID: \"2e9e43c0-674a-4769-b44b-7eb6ee1a69d5\") " pod="openstack-operators/infra-operator-controller-manager-77c48c7859-4f7km" Jan 20 16:55:08 crc kubenswrapper[4558]: E0120 16:55:08.076844 4558 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Jan 20 16:55:08 crc kubenswrapper[4558]: E0120 16:55:08.076897 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/2e9e43c0-674a-4769-b44b-7eb6ee1a69d5-cert podName:2e9e43c0-674a-4769-b44b-7eb6ee1a69d5 nodeName:}" failed. No retries permitted until 2026-01-20 16:55:10.076883235 +0000 UTC m=+803.837221201 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/2e9e43c0-674a-4769-b44b-7eb6ee1a69d5-cert") pod "infra-operator-controller-manager-77c48c7859-4f7km" (UID: "2e9e43c0-674a-4769-b44b-7eb6ee1a69d5") : secret "infra-operator-webhook-server-cert" not found Jan 20 16:55:08 crc kubenswrapper[4558]: I0120 16:55:08.280902 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/cbbd3f62-64b6-4cc2-8c3b-b21d317c0624-cert\") pod \"openstack-baremetal-operator-controller-manager-5b9875986dl6bbk\" (UID: \"cbbd3f62-64b6-4cc2-8c3b-b21d317c0624\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-5b9875986dl6bbk" Jan 20 16:55:08 crc kubenswrapper[4558]: E0120 16:55:08.281054 4558 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 20 16:55:08 crc kubenswrapper[4558]: E0120 16:55:08.281252 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/cbbd3f62-64b6-4cc2-8c3b-b21d317c0624-cert podName:cbbd3f62-64b6-4cc2-8c3b-b21d317c0624 nodeName:}" failed. No retries permitted until 2026-01-20 16:55:10.281239216 +0000 UTC m=+804.041577173 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/cbbd3f62-64b6-4cc2-8c3b-b21d317c0624-cert") pod "openstack-baremetal-operator-controller-manager-5b9875986dl6bbk" (UID: "cbbd3f62-64b6-4cc2-8c3b-b21d317c0624") : secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 20 16:55:08 crc kubenswrapper[4558]: I0120 16:55:08.684615 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/deea5510-f570-48ac-b23f-bba261ef3951-metrics-certs\") pod \"openstack-operator-controller-manager-75bfd788c8-wflcw\" (UID: \"deea5510-f570-48ac-b23f-bba261ef3951\") " pod="openstack-operators/openstack-operator-controller-manager-75bfd788c8-wflcw" Jan 20 16:55:08 crc kubenswrapper[4558]: I0120 16:55:08.684691 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/deea5510-f570-48ac-b23f-bba261ef3951-webhook-certs\") pod \"openstack-operator-controller-manager-75bfd788c8-wflcw\" (UID: \"deea5510-f570-48ac-b23f-bba261ef3951\") " pod="openstack-operators/openstack-operator-controller-manager-75bfd788c8-wflcw" Jan 20 16:55:08 crc kubenswrapper[4558]: E0120 16:55:08.684748 4558 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Jan 20 16:55:08 crc kubenswrapper[4558]: E0120 16:55:08.684761 4558 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Jan 20 16:55:08 crc kubenswrapper[4558]: E0120 16:55:08.684807 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/deea5510-f570-48ac-b23f-bba261ef3951-metrics-certs podName:deea5510-f570-48ac-b23f-bba261ef3951 nodeName:}" failed. No retries permitted until 2026-01-20 16:55:10.684793852 +0000 UTC m=+804.445131819 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/deea5510-f570-48ac-b23f-bba261ef3951-metrics-certs") pod "openstack-operator-controller-manager-75bfd788c8-wflcw" (UID: "deea5510-f570-48ac-b23f-bba261ef3951") : secret "metrics-server-cert" not found Jan 20 16:55:08 crc kubenswrapper[4558]: E0120 16:55:08.684822 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/deea5510-f570-48ac-b23f-bba261ef3951-webhook-certs podName:deea5510-f570-48ac-b23f-bba261ef3951 nodeName:}" failed. No retries permitted until 2026-01-20 16:55:10.684815683 +0000 UTC m=+804.445153651 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/deea5510-f570-48ac-b23f-bba261ef3951-webhook-certs") pod "openstack-operator-controller-manager-75bfd788c8-wflcw" (UID: "deea5510-f570-48ac-b23f-bba261ef3951") : secret "webhook-server-cert" not found Jan 20 16:55:08 crc kubenswrapper[4558]: E0120 16:55:08.873450 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:244a4906353b84899db16a89e1ebb64491c9f85e69327cb2a72b6da0142a6e5e\\\"\"" pod="openstack-operators/test-operator-controller-manager-7cd8bc9dbb-9hmt6" podUID="fafbffd7-66c2-42ab-97c2-034a1fd6fd7c" Jan 20 16:55:08 crc kubenswrapper[4558]: E0120 16:55:08.873966 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:9404536bf7cb7c3818e1a0f92b53e4d7c02fe7942324f32894106f02f8fc7e92\\\"\"" pod="openstack-operators/swift-operator-controller-manager-85dd56d4cc-cvlzf" podUID="56c68b3c-13ac-4e77-a8a5-bd99d83d5667" Jan 20 16:55:08 crc kubenswrapper[4558]: E0120 16:55:08.874781 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:d687150a46d97eb382dcd8305a2a611943af74771debe1fa9cc13a21e51c69ad\\\"\"" pod="openstack-operators/watcher-operator-controller-manager-64cd966744-925vf" podUID="8dea3161-8db9-4778-b59c-066f876ff6e2" Jan 20 16:55:08 crc kubenswrapper[4558]: E0120 16:55:08.874888 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/neutron-operator@sha256:0f440bf7dc937ce0135bdd328716686fd2f1320f453a9ac4e11e96383148ad6c\\\"\"" pod="openstack-operators/neutron-operator-controller-manager-cb4666565-6689s" podUID="f1a6abd3-1c13-40ca-8e20-344fd40bc348" Jan 20 16:55:08 crc kubenswrapper[4558]: E0120 16:55:08.874938 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:2e89109f5db66abf1afd15ef59bda35a53db40c5e59e020579ac5aa0acea1843\\\"\"" pod="openstack-operators/telemetry-operator-controller-manager-5f8f495fcf-ldhxq" podUID="da27b90a-7f65-4d58-92f1-d46b0d92fd79" Jan 20 16:55:08 crc kubenswrapper[4558]: E0120 16:55:08.875075 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-qwwmt" podUID="af89563a-d3e9-4424-8fde-a0a77b21b7e8" Jan 20 16:55:08 crc kubenswrapper[4558]: E0120 16:55:08.875478 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/ovn-operator@sha256:8b3bfb9e86618b7ac69443939b0968fae28a22cd62ea1e429b599ff9f8a5f8cf\\\"\"" pod="openstack-operators/ovn-operator-controller-manager-55db956ddc-6j4g5" podUID="1998e4dc-a1d4-405e-b0dd-546e1d5fed6f" Jan 20 16:55:10 crc kubenswrapper[4558]: I0120 16:55:10.102131 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/2e9e43c0-674a-4769-b44b-7eb6ee1a69d5-cert\") pod \"infra-operator-controller-manager-77c48c7859-4f7km\" (UID: \"2e9e43c0-674a-4769-b44b-7eb6ee1a69d5\") " pod="openstack-operators/infra-operator-controller-manager-77c48c7859-4f7km" Jan 20 16:55:10 crc kubenswrapper[4558]: E0120 16:55:10.102315 4558 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Jan 20 16:55:10 crc kubenswrapper[4558]: E0120 16:55:10.102395 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/2e9e43c0-674a-4769-b44b-7eb6ee1a69d5-cert podName:2e9e43c0-674a-4769-b44b-7eb6ee1a69d5 nodeName:}" failed. No retries permitted until 2026-01-20 16:55:14.102378255 +0000 UTC m=+807.862716222 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/2e9e43c0-674a-4769-b44b-7eb6ee1a69d5-cert") pod "infra-operator-controller-manager-77c48c7859-4f7km" (UID: "2e9e43c0-674a-4769-b44b-7eb6ee1a69d5") : secret "infra-operator-webhook-server-cert" not found Jan 20 16:55:10 crc kubenswrapper[4558]: I0120 16:55:10.305045 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/cbbd3f62-64b6-4cc2-8c3b-b21d317c0624-cert\") pod \"openstack-baremetal-operator-controller-manager-5b9875986dl6bbk\" (UID: \"cbbd3f62-64b6-4cc2-8c3b-b21d317c0624\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-5b9875986dl6bbk" Jan 20 16:55:10 crc kubenswrapper[4558]: E0120 16:55:10.305241 4558 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 20 16:55:10 crc kubenswrapper[4558]: E0120 16:55:10.305415 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/cbbd3f62-64b6-4cc2-8c3b-b21d317c0624-cert podName:cbbd3f62-64b6-4cc2-8c3b-b21d317c0624 nodeName:}" failed. No retries permitted until 2026-01-20 16:55:14.305398244 +0000 UTC m=+808.065736211 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/cbbd3f62-64b6-4cc2-8c3b-b21d317c0624-cert") pod "openstack-baremetal-operator-controller-manager-5b9875986dl6bbk" (UID: "cbbd3f62-64b6-4cc2-8c3b-b21d317c0624") : secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 20 16:55:10 crc kubenswrapper[4558]: I0120 16:55:10.714213 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/deea5510-f570-48ac-b23f-bba261ef3951-metrics-certs\") pod \"openstack-operator-controller-manager-75bfd788c8-wflcw\" (UID: \"deea5510-f570-48ac-b23f-bba261ef3951\") " pod="openstack-operators/openstack-operator-controller-manager-75bfd788c8-wflcw" Jan 20 16:55:10 crc kubenswrapper[4558]: I0120 16:55:10.714343 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/deea5510-f570-48ac-b23f-bba261ef3951-webhook-certs\") pod \"openstack-operator-controller-manager-75bfd788c8-wflcw\" (UID: \"deea5510-f570-48ac-b23f-bba261ef3951\") " pod="openstack-operators/openstack-operator-controller-manager-75bfd788c8-wflcw" Jan 20 16:55:10 crc kubenswrapper[4558]: E0120 16:55:10.714438 4558 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Jan 20 16:55:10 crc kubenswrapper[4558]: E0120 16:55:10.714474 4558 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Jan 20 16:55:10 crc kubenswrapper[4558]: E0120 16:55:10.714530 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/deea5510-f570-48ac-b23f-bba261ef3951-metrics-certs podName:deea5510-f570-48ac-b23f-bba261ef3951 nodeName:}" failed. No retries permitted until 2026-01-20 16:55:14.714489971 +0000 UTC m=+808.474827937 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/deea5510-f570-48ac-b23f-bba261ef3951-metrics-certs") pod "openstack-operator-controller-manager-75bfd788c8-wflcw" (UID: "deea5510-f570-48ac-b23f-bba261ef3951") : secret "metrics-server-cert" not found Jan 20 16:55:10 crc kubenswrapper[4558]: E0120 16:55:10.714548 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/deea5510-f570-48ac-b23f-bba261ef3951-webhook-certs podName:deea5510-f570-48ac-b23f-bba261ef3951 nodeName:}" failed. No retries permitted until 2026-01-20 16:55:14.714541888 +0000 UTC m=+808.474879855 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/deea5510-f570-48ac-b23f-bba261ef3951-webhook-certs") pod "openstack-operator-controller-manager-75bfd788c8-wflcw" (UID: "deea5510-f570-48ac-b23f-bba261ef3951") : secret "webhook-server-cert" not found Jan 20 16:55:14 crc kubenswrapper[4558]: I0120 16:55:14.159734 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/2e9e43c0-674a-4769-b44b-7eb6ee1a69d5-cert\") pod \"infra-operator-controller-manager-77c48c7859-4f7km\" (UID: \"2e9e43c0-674a-4769-b44b-7eb6ee1a69d5\") " pod="openstack-operators/infra-operator-controller-manager-77c48c7859-4f7km" Jan 20 16:55:14 crc kubenswrapper[4558]: E0120 16:55:14.159904 4558 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Jan 20 16:55:14 crc kubenswrapper[4558]: E0120 16:55:14.160010 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/2e9e43c0-674a-4769-b44b-7eb6ee1a69d5-cert podName:2e9e43c0-674a-4769-b44b-7eb6ee1a69d5 nodeName:}" failed. No retries permitted until 2026-01-20 16:55:22.159995735 +0000 UTC m=+815.920333702 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/2e9e43c0-674a-4769-b44b-7eb6ee1a69d5-cert") pod "infra-operator-controller-manager-77c48c7859-4f7km" (UID: "2e9e43c0-674a-4769-b44b-7eb6ee1a69d5") : secret "infra-operator-webhook-server-cert" not found Jan 20 16:55:14 crc kubenswrapper[4558]: I0120 16:55:14.361558 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/cbbd3f62-64b6-4cc2-8c3b-b21d317c0624-cert\") pod \"openstack-baremetal-operator-controller-manager-5b9875986dl6bbk\" (UID: \"cbbd3f62-64b6-4cc2-8c3b-b21d317c0624\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-5b9875986dl6bbk" Jan 20 16:55:14 crc kubenswrapper[4558]: E0120 16:55:14.361740 4558 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 20 16:55:14 crc kubenswrapper[4558]: E0120 16:55:14.361810 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/cbbd3f62-64b6-4cc2-8c3b-b21d317c0624-cert podName:cbbd3f62-64b6-4cc2-8c3b-b21d317c0624 nodeName:}" failed. No retries permitted until 2026-01-20 16:55:22.361793947 +0000 UTC m=+816.122131914 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/cbbd3f62-64b6-4cc2-8c3b-b21d317c0624-cert") pod "openstack-baremetal-operator-controller-manager-5b9875986dl6bbk" (UID: "cbbd3f62-64b6-4cc2-8c3b-b21d317c0624") : secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 20 16:55:14 crc kubenswrapper[4558]: I0120 16:55:14.767352 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/deea5510-f570-48ac-b23f-bba261ef3951-webhook-certs\") pod \"openstack-operator-controller-manager-75bfd788c8-wflcw\" (UID: \"deea5510-f570-48ac-b23f-bba261ef3951\") " pod="openstack-operators/openstack-operator-controller-manager-75bfd788c8-wflcw" Jan 20 16:55:14 crc kubenswrapper[4558]: E0120 16:55:14.767522 4558 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Jan 20 16:55:14 crc kubenswrapper[4558]: I0120 16:55:14.767553 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/deea5510-f570-48ac-b23f-bba261ef3951-metrics-certs\") pod \"openstack-operator-controller-manager-75bfd788c8-wflcw\" (UID: \"deea5510-f570-48ac-b23f-bba261ef3951\") " pod="openstack-operators/openstack-operator-controller-manager-75bfd788c8-wflcw" Jan 20 16:55:14 crc kubenswrapper[4558]: E0120 16:55:14.767590 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/deea5510-f570-48ac-b23f-bba261ef3951-webhook-certs podName:deea5510-f570-48ac-b23f-bba261ef3951 nodeName:}" failed. No retries permitted until 2026-01-20 16:55:22.767574459 +0000 UTC m=+816.527912436 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/deea5510-f570-48ac-b23f-bba261ef3951-webhook-certs") pod "openstack-operator-controller-manager-75bfd788c8-wflcw" (UID: "deea5510-f570-48ac-b23f-bba261ef3951") : secret "webhook-server-cert" not found Jan 20 16:55:14 crc kubenswrapper[4558]: E0120 16:55:14.767726 4558 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Jan 20 16:55:14 crc kubenswrapper[4558]: E0120 16:55:14.767805 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/deea5510-f570-48ac-b23f-bba261ef3951-metrics-certs podName:deea5510-f570-48ac-b23f-bba261ef3951 nodeName:}" failed. No retries permitted until 2026-01-20 16:55:22.76778774 +0000 UTC m=+816.528125717 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/deea5510-f570-48ac-b23f-bba261ef3951-metrics-certs") pod "openstack-operator-controller-manager-75bfd788c8-wflcw" (UID: "deea5510-f570-48ac-b23f-bba261ef3951") : secret "metrics-server-cert" not found Jan 20 16:55:15 crc kubenswrapper[4558]: I0120 16:55:15.908442 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-767fdc4f47-8mw4r" event={"ID":"115bb848-a715-4eac-8993-63842460dd3d","Type":"ContainerStarted","Data":"f1506f0ee5ae0e37dc7c3f1bab9db3a2151ee378ef978f06310c406e3b62a13b"} Jan 20 16:55:15 crc kubenswrapper[4558]: I0120 16:55:15.909516 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-controller-manager-767fdc4f47-8mw4r" Jan 20 16:55:15 crc kubenswrapper[4558]: I0120 16:55:15.915443 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-c6994669c-78pts" event={"ID":"e645ac52-b990-4b3f-a282-59ce60cacaff","Type":"ContainerStarted","Data":"d569da36da81b9b9b27be2b8f5c9a33c02392ee1c64ee171850ebd6b90d34bc4"} Jan 20 16:55:15 crc kubenswrapper[4558]: I0120 16:55:15.915851 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/glance-operator-controller-manager-c6994669c-78pts" Jan 20 16:55:15 crc kubenswrapper[4558]: I0120 16:55:15.923985 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-7fc9b76cf6-9ld4x" event={"ID":"aec45707-2c02-43bb-b7d9-a24b906cadca","Type":"ContainerStarted","Data":"9b5a1aa8966e4b09819fa1a3e8844f0277e5fd276937d6ef5896353e424ac13b"} Jan 20 16:55:15 crc kubenswrapper[4558]: I0120 16:55:15.924471 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/octavia-operator-controller-manager-7fc9b76cf6-9ld4x" Jan 20 16:55:15 crc kubenswrapper[4558]: I0120 16:55:15.931464 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-c87fff755-plg7f" event={"ID":"dba009f4-faa1-4d6b-bc19-c00eedd6c7c7","Type":"ContainerStarted","Data":"b21d03a1bce545072a0f5d40d609ff0a56267d13add877890bddf1f91228ba7f"} Jan 20 16:55:15 crc kubenswrapper[4558]: I0120 16:55:15.931844 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-c87fff755-plg7f" Jan 20 16:55:15 crc kubenswrapper[4558]: I0120 16:55:15.933142 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-9f958b845-q8wlk" event={"ID":"54c6bd49-d022-40c6-b547-58d3eb5ba7e2","Type":"ContainerStarted","Data":"ecb3d0f3bf1d477d3fea81ea29ae6dbfb39eff4f2f6120f21ae3ef0a4efba324"} Jan 20 16:55:15 crc kubenswrapper[4558]: I0120 16:55:15.933572 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/designate-operator-controller-manager-9f958b845-q8wlk" Jan 20 16:55:15 crc kubenswrapper[4558]: I0120 16:55:15.934657 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-9b68f5989-mdfnr" event={"ID":"973549d4-6a26-4166-8be0-b0dfb7c5aec6","Type":"ContainerStarted","Data":"6aa6ab1b79913f16a8c37d31d4f54307176b8cfa305a050b83890240a717629c"} Jan 20 16:55:15 crc kubenswrapper[4558]: I0120 16:55:15.934994 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/cinder-operator-controller-manager-9b68f5989-mdfnr" Jan 20 16:55:15 crc kubenswrapper[4558]: I0120 16:55:15.935856 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-686df47fcb-tsdgq" event={"ID":"0292902b-f0ca-4abc-b220-4e3268243db5","Type":"ContainerStarted","Data":"63231d660c87582107f72247293a009f16107c11a93e6a345163da171fa24b3e"} Jan 20 16:55:15 crc kubenswrapper[4558]: I0120 16:55:15.936217 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/placement-operator-controller-manager-686df47fcb-tsdgq" Jan 20 16:55:15 crc kubenswrapper[4558]: I0120 16:55:15.937126 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-78757b4889-t99dx" event={"ID":"e7054184-22f8-43b2-b75f-0534d5bb467f","Type":"ContainerStarted","Data":"50d9b3907efdf86bfbc435a6c7984f8ffc06ca5383c5f947cc9ccf10c522de7b"} Jan 20 16:55:15 crc kubenswrapper[4558]: I0120 16:55:15.937444 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ironic-operator-controller-manager-78757b4889-t99dx" Jan 20 16:55:15 crc kubenswrapper[4558]: I0120 16:55:15.938450 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-594c8c9d5d-k28w4" event={"ID":"88bca883-e84c-4223-80de-4c389d44f9fe","Type":"ContainerStarted","Data":"3b80173f89c66eaddb2369263d220ac89d3df4fc48524e79192762bb9e4aed17"} Jan 20 16:55:15 crc kubenswrapper[4558]: I0120 16:55:15.938747 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/heat-operator-controller-manager-594c8c9d5d-k28w4" Jan 20 16:55:15 crc kubenswrapper[4558]: I0120 16:55:15.939796 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-77d5c5b54f-gvd5f" event={"ID":"31cd4c27-d472-4406-886a-7222315c465d","Type":"ContainerStarted","Data":"5ef1d53f579e55a1d5029b72751834ff89a20ca0419624b102fe6231908e54b1"} Jan 20 16:55:15 crc kubenswrapper[4558]: I0120 16:55:15.940116 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/horizon-operator-controller-manager-77d5c5b54f-gvd5f" Jan 20 16:55:15 crc kubenswrapper[4558]: I0120 16:55:15.945331 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7ddb5c749-jrgz7" event={"ID":"1c12cb14-b8f1-4939-84d9-6a6b851f095a","Type":"ContainerStarted","Data":"bd95c9802be7aa3b09b45d31c56df20489fe579fe93a21fade47b46985baa4cc"} Jan 20 16:55:15 crc kubenswrapper[4558]: I0120 16:55:15.945461 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/barbican-operator-controller-manager-7ddb5c749-jrgz7" Jan 20 16:55:15 crc kubenswrapper[4558]: I0120 16:55:15.946786 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-65849867d6-js8vs" event={"ID":"42eb153b-1142-4dc6-9e34-b4cc38e49701","Type":"ContainerStarted","Data":"ad003af07a804bd7051853e53266eefbbf5253d38becc3b1787e7568e72d2f2c"} Jan 20 16:55:15 crc kubenswrapper[4558]: I0120 16:55:15.946936 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/nova-operator-controller-manager-65849867d6-js8vs" Jan 20 16:55:15 crc kubenswrapper[4558]: I0120 16:55:15.948341 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-864f6b75bf-4jj8h" event={"ID":"6e337f3c-6900-4314-adbb-aec361ccb7cc","Type":"ContainerStarted","Data":"45317194cb1488747739e7ccc74659e1093db6bf0596f5b55836038289216605"} Jan 20 16:55:15 crc kubenswrapper[4558]: I0120 16:55:15.948676 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/manila-operator-controller-manager-864f6b75bf-4jj8h" Jan 20 16:55:15 crc kubenswrapper[4558]: I0120 16:55:15.953010 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-controller-manager-767fdc4f47-8mw4r" podStartSLOduration=2.287724216 podStartE2EDuration="9.953002129s" podCreationTimestamp="2026-01-20 16:55:06 +0000 UTC" firstStartedPulling="2026-01-20 16:55:07.559061843 +0000 UTC m=+801.319399809" lastFinishedPulling="2026-01-20 16:55:15.224339755 +0000 UTC m=+808.984677722" observedRunningTime="2026-01-20 16:55:15.946106486 +0000 UTC m=+809.706444453" watchObservedRunningTime="2026-01-20 16:55:15.953002129 +0000 UTC m=+809.713340096" Jan 20 16:55:15 crc kubenswrapper[4558]: I0120 16:55:15.982217 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/horizon-operator-controller-manager-77d5c5b54f-gvd5f" podStartSLOduration=2.102715856 podStartE2EDuration="9.982205216s" podCreationTimestamp="2026-01-20 16:55:06 +0000 UTC" firstStartedPulling="2026-01-20 16:55:07.283664954 +0000 UTC m=+801.044002921" lastFinishedPulling="2026-01-20 16:55:15.163154314 +0000 UTC m=+808.923492281" observedRunningTime="2026-01-20 16:55:15.980090692 +0000 UTC m=+809.740428658" watchObservedRunningTime="2026-01-20 16:55:15.982205216 +0000 UTC m=+809.742543174" Jan 20 16:55:16 crc kubenswrapper[4558]: I0120 16:55:16.019117 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ironic-operator-controller-manager-78757b4889-t99dx" podStartSLOduration=2.489443953 podStartE2EDuration="10.019088933s" podCreationTimestamp="2026-01-20 16:55:06 +0000 UTC" firstStartedPulling="2026-01-20 16:55:07.666038614 +0000 UTC m=+801.426376581" lastFinishedPulling="2026-01-20 16:55:15.195683595 +0000 UTC m=+808.956021561" observedRunningTime="2026-01-20 16:55:16.01289555 +0000 UTC m=+809.773233517" watchObservedRunningTime="2026-01-20 16:55:16.019088933 +0000 UTC m=+809.779426900" Jan 20 16:55:16 crc kubenswrapper[4558]: I0120 16:55:16.033613 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/placement-operator-controller-manager-686df47fcb-tsdgq" podStartSLOduration=2.476775989 podStartE2EDuration="10.033599821s" podCreationTimestamp="2026-01-20 16:55:06 +0000 UTC" firstStartedPulling="2026-01-20 16:55:07.651326437 +0000 UTC m=+801.411664404" lastFinishedPulling="2026-01-20 16:55:15.20815027 +0000 UTC m=+808.968488236" observedRunningTime="2026-01-20 16:55:16.029105242 +0000 UTC m=+809.789443209" watchObservedRunningTime="2026-01-20 16:55:16.033599821 +0000 UTC m=+809.793937788" Jan 20 16:55:16 crc kubenswrapper[4558]: I0120 16:55:16.053012 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/barbican-operator-controller-manager-7ddb5c749-jrgz7" podStartSLOduration=3.870280463 podStartE2EDuration="10.052998317s" podCreationTimestamp="2026-01-20 16:55:06 +0000 UTC" firstStartedPulling="2026-01-20 16:55:06.836781745 +0000 UTC m=+800.597119713" lastFinishedPulling="2026-01-20 16:55:13.0194996 +0000 UTC m=+806.779837567" observedRunningTime="2026-01-20 16:55:16.049978842 +0000 UTC m=+809.810316809" watchObservedRunningTime="2026-01-20 16:55:16.052998317 +0000 UTC m=+809.813336285" Jan 20 16:55:16 crc kubenswrapper[4558]: I0120 16:55:16.092782 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/cinder-operator-controller-manager-9b68f5989-mdfnr" podStartSLOduration=2.152962804 podStartE2EDuration="10.092769862s" podCreationTimestamp="2026-01-20 16:55:06 +0000 UTC" firstStartedPulling="2026-01-20 16:55:07.2539426 +0000 UTC m=+801.014280568" lastFinishedPulling="2026-01-20 16:55:15.193749659 +0000 UTC m=+808.954087626" observedRunningTime="2026-01-20 16:55:16.086800812 +0000 UTC m=+809.847138779" watchObservedRunningTime="2026-01-20 16:55:16.092769862 +0000 UTC m=+809.853107830" Jan 20 16:55:16 crc kubenswrapper[4558]: I0120 16:55:16.141716 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-controller-manager-c87fff755-plg7f" podStartSLOduration=2.483306314 podStartE2EDuration="10.141703351s" podCreationTimestamp="2026-01-20 16:55:06 +0000 UTC" firstStartedPulling="2026-01-20 16:55:07.547655179 +0000 UTC m=+801.307993146" lastFinishedPulling="2026-01-20 16:55:15.206052216 +0000 UTC m=+808.966390183" observedRunningTime="2026-01-20 16:55:16.119815845 +0000 UTC m=+809.880153813" watchObservedRunningTime="2026-01-20 16:55:16.141703351 +0000 UTC m=+809.902041318" Jan 20 16:55:16 crc kubenswrapper[4558]: I0120 16:55:16.143309 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/octavia-operator-controller-manager-7fc9b76cf6-9ld4x" podStartSLOduration=2.530587096 podStartE2EDuration="10.143300994s" podCreationTimestamp="2026-01-20 16:55:06 +0000 UTC" firstStartedPulling="2026-01-20 16:55:07.555981722 +0000 UTC m=+801.316319689" lastFinishedPulling="2026-01-20 16:55:15.168695621 +0000 UTC m=+808.929033587" observedRunningTime="2026-01-20 16:55:16.137360578 +0000 UTC m=+809.897698544" watchObservedRunningTime="2026-01-20 16:55:16.143300994 +0000 UTC m=+809.903638962" Jan 20 16:55:16 crc kubenswrapper[4558]: I0120 16:55:16.168599 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/glance-operator-controller-manager-c6994669c-78pts" podStartSLOduration=2.352126756 podStartE2EDuration="10.168585677s" podCreationTimestamp="2026-01-20 16:55:06 +0000 UTC" firstStartedPulling="2026-01-20 16:55:07.389616889 +0000 UTC m=+801.149954857" lastFinishedPulling="2026-01-20 16:55:15.206075811 +0000 UTC m=+808.966413778" observedRunningTime="2026-01-20 16:55:16.156112829 +0000 UTC m=+809.916450795" watchObservedRunningTime="2026-01-20 16:55:16.168585677 +0000 UTC m=+809.928923633" Jan 20 16:55:16 crc kubenswrapper[4558]: I0120 16:55:16.187363 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/designate-operator-controller-manager-9f958b845-q8wlk" podStartSLOduration=2.382944127 podStartE2EDuration="10.187347756s" podCreationTimestamp="2026-01-20 16:55:06 +0000 UTC" firstStartedPulling="2026-01-20 16:55:07.401654869 +0000 UTC m=+801.161992836" lastFinishedPulling="2026-01-20 16:55:15.206058499 +0000 UTC m=+808.966396465" observedRunningTime="2026-01-20 16:55:16.183649413 +0000 UTC m=+809.943987380" watchObservedRunningTime="2026-01-20 16:55:16.187347756 +0000 UTC m=+809.947685723" Jan 20 16:55:16 crc kubenswrapper[4558]: I0120 16:55:16.241981 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/heat-operator-controller-manager-594c8c9d5d-k28w4" podStartSLOduration=2.437014221 podStartE2EDuration="10.241968236s" podCreationTimestamp="2026-01-20 16:55:06 +0000 UTC" firstStartedPulling="2026-01-20 16:55:07.38966014 +0000 UTC m=+801.149998107" lastFinishedPulling="2026-01-20 16:55:15.194614154 +0000 UTC m=+808.954952122" observedRunningTime="2026-01-20 16:55:16.240888876 +0000 UTC m=+810.001226842" watchObservedRunningTime="2026-01-20 16:55:16.241968236 +0000 UTC m=+810.002306203" Jan 20 16:55:16 crc kubenswrapper[4558]: I0120 16:55:16.276599 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/nova-operator-controller-manager-65849867d6-js8vs" podStartSLOduration=2.622129142 podStartE2EDuration="10.276582124s" podCreationTimestamp="2026-01-20 16:55:06 +0000 UTC" firstStartedPulling="2026-01-20 16:55:07.553609844 +0000 UTC m=+801.313947811" lastFinishedPulling="2026-01-20 16:55:15.208062826 +0000 UTC m=+808.968400793" observedRunningTime="2026-01-20 16:55:16.264778975 +0000 UTC m=+810.025116943" watchObservedRunningTime="2026-01-20 16:55:16.276582124 +0000 UTC m=+810.036920092" Jan 20 16:55:16 crc kubenswrapper[4558]: I0120 16:55:16.285469 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/manila-operator-controller-manager-864f6b75bf-4jj8h" podStartSLOduration=2.613705306 podStartE2EDuration="10.285454984s" podCreationTimestamp="2026-01-20 16:55:06 +0000 UTC" firstStartedPulling="2026-01-20 16:55:07.533323327 +0000 UTC m=+801.293661294" lastFinishedPulling="2026-01-20 16:55:15.205073006 +0000 UTC m=+808.965410972" observedRunningTime="2026-01-20 16:55:16.284374813 +0000 UTC m=+810.044712781" watchObservedRunningTime="2026-01-20 16:55:16.285454984 +0000 UTC m=+810.045792951" Jan 20 16:55:22 crc kubenswrapper[4558]: I0120 16:55:22.253595 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/2e9e43c0-674a-4769-b44b-7eb6ee1a69d5-cert\") pod \"infra-operator-controller-manager-77c48c7859-4f7km\" (UID: \"2e9e43c0-674a-4769-b44b-7eb6ee1a69d5\") " pod="openstack-operators/infra-operator-controller-manager-77c48c7859-4f7km" Jan 20 16:55:22 crc kubenswrapper[4558]: E0120 16:55:22.253747 4558 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Jan 20 16:55:22 crc kubenswrapper[4558]: E0120 16:55:22.253917 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/2e9e43c0-674a-4769-b44b-7eb6ee1a69d5-cert podName:2e9e43c0-674a-4769-b44b-7eb6ee1a69d5 nodeName:}" failed. No retries permitted until 2026-01-20 16:55:38.253902119 +0000 UTC m=+832.014240096 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/2e9e43c0-674a-4769-b44b-7eb6ee1a69d5-cert") pod "infra-operator-controller-manager-77c48c7859-4f7km" (UID: "2e9e43c0-674a-4769-b44b-7eb6ee1a69d5") : secret "infra-operator-webhook-server-cert" not found Jan 20 16:55:22 crc kubenswrapper[4558]: I0120 16:55:22.455957 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/cbbd3f62-64b6-4cc2-8c3b-b21d317c0624-cert\") pod \"openstack-baremetal-operator-controller-manager-5b9875986dl6bbk\" (UID: \"cbbd3f62-64b6-4cc2-8c3b-b21d317c0624\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-5b9875986dl6bbk" Jan 20 16:55:22 crc kubenswrapper[4558]: E0120 16:55:22.456121 4558 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 20 16:55:22 crc kubenswrapper[4558]: E0120 16:55:22.456216 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/cbbd3f62-64b6-4cc2-8c3b-b21d317c0624-cert podName:cbbd3f62-64b6-4cc2-8c3b-b21d317c0624 nodeName:}" failed. No retries permitted until 2026-01-20 16:55:38.456200031 +0000 UTC m=+832.216537988 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/cbbd3f62-64b6-4cc2-8c3b-b21d317c0624-cert") pod "openstack-baremetal-operator-controller-manager-5b9875986dl6bbk" (UID: "cbbd3f62-64b6-4cc2-8c3b-b21d317c0624") : secret "openstack-baremetal-operator-webhook-server-cert" not found Jan 20 16:55:22 crc kubenswrapper[4558]: I0120 16:55:22.860950 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/deea5510-f570-48ac-b23f-bba261ef3951-metrics-certs\") pod \"openstack-operator-controller-manager-75bfd788c8-wflcw\" (UID: \"deea5510-f570-48ac-b23f-bba261ef3951\") " pod="openstack-operators/openstack-operator-controller-manager-75bfd788c8-wflcw" Jan 20 16:55:22 crc kubenswrapper[4558]: I0120 16:55:22.861030 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/deea5510-f570-48ac-b23f-bba261ef3951-webhook-certs\") pod \"openstack-operator-controller-manager-75bfd788c8-wflcw\" (UID: \"deea5510-f570-48ac-b23f-bba261ef3951\") " pod="openstack-operators/openstack-operator-controller-manager-75bfd788c8-wflcw" Jan 20 16:55:22 crc kubenswrapper[4558]: E0120 16:55:22.861218 4558 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Jan 20 16:55:22 crc kubenswrapper[4558]: E0120 16:55:22.861261 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/deea5510-f570-48ac-b23f-bba261ef3951-webhook-certs podName:deea5510-f570-48ac-b23f-bba261ef3951 nodeName:}" failed. No retries permitted until 2026-01-20 16:55:38.861247535 +0000 UTC m=+832.621585502 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/deea5510-f570-48ac-b23f-bba261ef3951-webhook-certs") pod "openstack-operator-controller-manager-75bfd788c8-wflcw" (UID: "deea5510-f570-48ac-b23f-bba261ef3951") : secret "webhook-server-cert" not found Jan 20 16:55:22 crc kubenswrapper[4558]: E0120 16:55:22.861445 4558 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Jan 20 16:55:22 crc kubenswrapper[4558]: E0120 16:55:22.861523 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/deea5510-f570-48ac-b23f-bba261ef3951-metrics-certs podName:deea5510-f570-48ac-b23f-bba261ef3951 nodeName:}" failed. No retries permitted until 2026-01-20 16:55:38.861508435 +0000 UTC m=+832.621846402 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/deea5510-f570-48ac-b23f-bba261ef3951-metrics-certs") pod "openstack-operator-controller-manager-75bfd788c8-wflcw" (UID: "deea5510-f570-48ac-b23f-bba261ef3951") : secret "metrics-server-cert" not found Jan 20 16:55:23 crc kubenswrapper[4558]: I0120 16:55:23.988594 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-cb4666565-6689s" event={"ID":"f1a6abd3-1c13-40ca-8e20-344fd40bc348","Type":"ContainerStarted","Data":"0e2eb3da46b7bd583c44ebc8b13f7d1c4b0aa944896d7eec0b5487a1c1e21756"} Jan 20 16:55:23 crc kubenswrapper[4558]: I0120 16:55:23.989960 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/neutron-operator-controller-manager-cb4666565-6689s" Jan 20 16:55:24 crc kubenswrapper[4558]: I0120 16:55:24.001827 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/neutron-operator-controller-manager-cb4666565-6689s" podStartSLOduration=2.361543016 podStartE2EDuration="18.001816326s" podCreationTimestamp="2026-01-20 16:55:06 +0000 UTC" firstStartedPulling="2026-01-20 16:55:07.668288343 +0000 UTC m=+801.428626311" lastFinishedPulling="2026-01-20 16:55:23.308561654 +0000 UTC m=+817.068899621" observedRunningTime="2026-01-20 16:55:23.999106102 +0000 UTC m=+817.759444058" watchObservedRunningTime="2026-01-20 16:55:24.001816326 +0000 UTC m=+817.762154294" Jan 20 16:55:26 crc kubenswrapper[4558]: I0120 16:55:26.000861 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-7cd8bc9dbb-9hmt6" event={"ID":"fafbffd7-66c2-42ab-97c2-034a1fd6fd7c","Type":"ContainerStarted","Data":"2ff3e4d2ad64773a61f739b0fa6f4ece97c0c40fd69baf1df699ce574d95ed13"} Jan 20 16:55:26 crc kubenswrapper[4558]: I0120 16:55:26.001729 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/test-operator-controller-manager-7cd8bc9dbb-9hmt6" Jan 20 16:55:26 crc kubenswrapper[4558]: I0120 16:55:26.008587 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-64cd966744-925vf" event={"ID":"8dea3161-8db9-4778-b59c-066f876ff6e2","Type":"ContainerStarted","Data":"e6cde12ec6613c6ca86331ad5562eff59e938f8143553a59aa1e652328de6c2b"} Jan 20 16:55:26 crc kubenswrapper[4558]: I0120 16:55:26.009459 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/watcher-operator-controller-manager-64cd966744-925vf" Jan 20 16:55:26 crc kubenswrapper[4558]: I0120 16:55:26.019201 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-5f8f495fcf-ldhxq" event={"ID":"da27b90a-7f65-4d58-92f1-d46b0d92fd79","Type":"ContainerStarted","Data":"c4f4ac4e6be0e8cf007dc4d69369b47f2f03fdf263af1c11172de431224ca4f9"} Jan 20 16:55:26 crc kubenswrapper[4558]: I0120 16:55:26.019730 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-5f8f495fcf-ldhxq" Jan 20 16:55:26 crc kubenswrapper[4558]: I0120 16:55:26.024111 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/test-operator-controller-manager-7cd8bc9dbb-9hmt6" podStartSLOduration=2.6363634 podStartE2EDuration="20.024101712s" podCreationTimestamp="2026-01-20 16:55:06 +0000 UTC" firstStartedPulling="2026-01-20 16:55:07.808043523 +0000 UTC m=+801.568381490" lastFinishedPulling="2026-01-20 16:55:25.195781836 +0000 UTC m=+818.956119802" observedRunningTime="2026-01-20 16:55:26.01882904 +0000 UTC m=+819.779167008" watchObservedRunningTime="2026-01-20 16:55:26.024101712 +0000 UTC m=+819.784439679" Jan 20 16:55:26 crc kubenswrapper[4558]: I0120 16:55:26.032521 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/watcher-operator-controller-manager-64cd966744-925vf" podStartSLOduration=2.61684123 podStartE2EDuration="20.0325108s" podCreationTimestamp="2026-01-20 16:55:06 +0000 UTC" firstStartedPulling="2026-01-20 16:55:07.795063894 +0000 UTC m=+801.555401860" lastFinishedPulling="2026-01-20 16:55:25.210733464 +0000 UTC m=+818.971071430" observedRunningTime="2026-01-20 16:55:26.030722459 +0000 UTC m=+819.791060426" watchObservedRunningTime="2026-01-20 16:55:26.0325108 +0000 UTC m=+819.792848767" Jan 20 16:55:26 crc kubenswrapper[4558]: I0120 16:55:26.041572 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/telemetry-operator-controller-manager-5f8f495fcf-ldhxq" podStartSLOduration=2.638522259 podStartE2EDuration="20.041562166s" podCreationTimestamp="2026-01-20 16:55:06 +0000 UTC" firstStartedPulling="2026-01-20 16:55:07.806944617 +0000 UTC m=+801.567282585" lastFinishedPulling="2026-01-20 16:55:25.209984525 +0000 UTC m=+818.970322492" observedRunningTime="2026-01-20 16:55:26.040087083 +0000 UTC m=+819.800425060" watchObservedRunningTime="2026-01-20 16:55:26.041562166 +0000 UTC m=+819.801900133" Jan 20 16:55:26 crc kubenswrapper[4558]: I0120 16:55:26.572997 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/barbican-operator-controller-manager-7ddb5c749-jrgz7" Jan 20 16:55:26 crc kubenswrapper[4558]: I0120 16:55:26.582577 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/cinder-operator-controller-manager-9b68f5989-mdfnr" Jan 20 16:55:26 crc kubenswrapper[4558]: I0120 16:55:26.597477 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/designate-operator-controller-manager-9f958b845-q8wlk" Jan 20 16:55:26 crc kubenswrapper[4558]: I0120 16:55:26.603490 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/glance-operator-controller-manager-c6994669c-78pts" Jan 20 16:55:26 crc kubenswrapper[4558]: I0120 16:55:26.638300 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/heat-operator-controller-manager-594c8c9d5d-k28w4" Jan 20 16:55:26 crc kubenswrapper[4558]: I0120 16:55:26.645664 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/horizon-operator-controller-manager-77d5c5b54f-gvd5f" Jan 20 16:55:26 crc kubenswrapper[4558]: I0120 16:55:26.712730 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-controller-manager-767fdc4f47-8mw4r" Jan 20 16:55:26 crc kubenswrapper[4558]: I0120 16:55:26.753674 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/manila-operator-controller-manager-864f6b75bf-4jj8h" Jan 20 16:55:26 crc kubenswrapper[4558]: I0120 16:55:26.765849 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-controller-manager-c87fff755-plg7f" Jan 20 16:55:26 crc kubenswrapper[4558]: I0120 16:55:26.803702 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/nova-operator-controller-manager-65849867d6-js8vs" Jan 20 16:55:26 crc kubenswrapper[4558]: I0120 16:55:26.813502 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/octavia-operator-controller-manager-7fc9b76cf6-9ld4x" Jan 20 16:55:26 crc kubenswrapper[4558]: I0120 16:55:26.848648 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/placement-operator-controller-manager-686df47fcb-tsdgq" Jan 20 16:55:26 crc kubenswrapper[4558]: I0120 16:55:26.985104 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ironic-operator-controller-manager-78757b4889-t99dx" Jan 20 16:55:32 crc kubenswrapper[4558]: I0120 16:55:32.059651 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-qwwmt" event={"ID":"af89563a-d3e9-4424-8fde-a0a77b21b7e8","Type":"ContainerStarted","Data":"5a0b4e0b6774ce77a765f98980c1f9ce2e4138aebc6117795de6f7f8346a3737"} Jan 20 16:55:32 crc kubenswrapper[4558]: I0120 16:55:32.061310 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-55db956ddc-6j4g5" event={"ID":"1998e4dc-a1d4-405e-b0dd-546e1d5fed6f","Type":"ContainerStarted","Data":"96b14ccca580d6f73044405aa0fb79aa275da3630e3f85445651826254e89bab"} Jan 20 16:55:32 crc kubenswrapper[4558]: I0120 16:55:32.061558 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ovn-operator-controller-manager-55db956ddc-6j4g5" Jan 20 16:55:32 crc kubenswrapper[4558]: I0120 16:55:32.062571 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-85dd56d4cc-cvlzf" event={"ID":"56c68b3c-13ac-4e77-a8a5-bd99d83d5667","Type":"ContainerStarted","Data":"51ecd2e724af29b0f6a51dc85e4ad8a754870e77bf87c5ae416f003c008d7caf"} Jan 20 16:55:32 crc kubenswrapper[4558]: I0120 16:55:32.062787 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/swift-operator-controller-manager-85dd56d4cc-cvlzf" Jan 20 16:55:32 crc kubenswrapper[4558]: I0120 16:55:32.075725 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-qwwmt" podStartSLOduration=1.068088622 podStartE2EDuration="25.075700451s" podCreationTimestamp="2026-01-20 16:55:07 +0000 UTC" firstStartedPulling="2026-01-20 16:55:07.808835923 +0000 UTC m=+801.569173890" lastFinishedPulling="2026-01-20 16:55:31.816447752 +0000 UTC m=+825.576785719" observedRunningTime="2026-01-20 16:55:32.071435073 +0000 UTC m=+825.831773040" watchObservedRunningTime="2026-01-20 16:55:32.075700451 +0000 UTC m=+825.836038418" Jan 20 16:55:32 crc kubenswrapper[4558]: I0120 16:55:32.085493 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ovn-operator-controller-manager-55db956ddc-6j4g5" podStartSLOduration=1.9368921609999998 podStartE2EDuration="26.085475307s" podCreationTimestamp="2026-01-20 16:55:06 +0000 UTC" firstStartedPulling="2026-01-20 16:55:07.653305597 +0000 UTC m=+801.413643564" lastFinishedPulling="2026-01-20 16:55:31.801888733 +0000 UTC m=+825.562226710" observedRunningTime="2026-01-20 16:55:32.083748992 +0000 UTC m=+825.844086949" watchObservedRunningTime="2026-01-20 16:55:32.085475307 +0000 UTC m=+825.845813274" Jan 20 16:55:32 crc kubenswrapper[4558]: I0120 16:55:32.098455 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/swift-operator-controller-manager-85dd56d4cc-cvlzf" podStartSLOduration=1.9702340409999999 podStartE2EDuration="26.09842976s" podCreationTimestamp="2026-01-20 16:55:06 +0000 UTC" firstStartedPulling="2026-01-20 16:55:07.688228237 +0000 UTC m=+801.448566205" lastFinishedPulling="2026-01-20 16:55:31.816423957 +0000 UTC m=+825.576761924" observedRunningTime="2026-01-20 16:55:32.095580603 +0000 UTC m=+825.855918570" watchObservedRunningTime="2026-01-20 16:55:32.09842976 +0000 UTC m=+825.858767727" Jan 20 16:55:36 crc kubenswrapper[4558]: I0120 16:55:36.830734 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ovn-operator-controller-manager-55db956ddc-6j4g5" Jan 20 16:55:36 crc kubenswrapper[4558]: I0120 16:55:36.958519 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/swift-operator-controller-manager-85dd56d4cc-cvlzf" Jan 20 16:55:36 crc kubenswrapper[4558]: I0120 16:55:36.996542 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-5f8f495fcf-ldhxq" Jan 20 16:55:37 crc kubenswrapper[4558]: I0120 16:55:37.075539 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/neutron-operator-controller-manager-cb4666565-6689s" Jan 20 16:55:37 crc kubenswrapper[4558]: I0120 16:55:37.122876 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/test-operator-controller-manager-7cd8bc9dbb-9hmt6" Jan 20 16:55:37 crc kubenswrapper[4558]: I0120 16:55:37.232365 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/watcher-operator-controller-manager-64cd966744-925vf" Jan 20 16:55:38 crc kubenswrapper[4558]: I0120 16:55:38.255251 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/2e9e43c0-674a-4769-b44b-7eb6ee1a69d5-cert\") pod \"infra-operator-controller-manager-77c48c7859-4f7km\" (UID: \"2e9e43c0-674a-4769-b44b-7eb6ee1a69d5\") " pod="openstack-operators/infra-operator-controller-manager-77c48c7859-4f7km" Jan 20 16:55:38 crc kubenswrapper[4558]: I0120 16:55:38.259903 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/2e9e43c0-674a-4769-b44b-7eb6ee1a69d5-cert\") pod \"infra-operator-controller-manager-77c48c7859-4f7km\" (UID: \"2e9e43c0-674a-4769-b44b-7eb6ee1a69d5\") " pod="openstack-operators/infra-operator-controller-manager-77c48c7859-4f7km" Jan 20 16:55:38 crc kubenswrapper[4558]: I0120 16:55:38.459000 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/cbbd3f62-64b6-4cc2-8c3b-b21d317c0624-cert\") pod \"openstack-baremetal-operator-controller-manager-5b9875986dl6bbk\" (UID: \"cbbd3f62-64b6-4cc2-8c3b-b21d317c0624\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-5b9875986dl6bbk" Jan 20 16:55:38 crc kubenswrapper[4558]: I0120 16:55:38.462891 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-77c48c7859-4f7km" Jan 20 16:55:38 crc kubenswrapper[4558]: I0120 16:55:38.463006 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/cbbd3f62-64b6-4cc2-8c3b-b21d317c0624-cert\") pod \"openstack-baremetal-operator-controller-manager-5b9875986dl6bbk\" (UID: \"cbbd3f62-64b6-4cc2-8c3b-b21d317c0624\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-5b9875986dl6bbk" Jan 20 16:55:38 crc kubenswrapper[4558]: I0120 16:55:38.634889 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-5b9875986dl6bbk" Jan 20 16:55:38 crc kubenswrapper[4558]: I0120 16:55:38.801500 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-77c48c7859-4f7km"] Jan 20 16:55:38 crc kubenswrapper[4558]: I0120 16:55:38.865043 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/deea5510-f570-48ac-b23f-bba261ef3951-metrics-certs\") pod \"openstack-operator-controller-manager-75bfd788c8-wflcw\" (UID: \"deea5510-f570-48ac-b23f-bba261ef3951\") " pod="openstack-operators/openstack-operator-controller-manager-75bfd788c8-wflcw" Jan 20 16:55:38 crc kubenswrapper[4558]: I0120 16:55:38.865115 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/deea5510-f570-48ac-b23f-bba261ef3951-webhook-certs\") pod \"openstack-operator-controller-manager-75bfd788c8-wflcw\" (UID: \"deea5510-f570-48ac-b23f-bba261ef3951\") " pod="openstack-operators/openstack-operator-controller-manager-75bfd788c8-wflcw" Jan 20 16:55:38 crc kubenswrapper[4558]: I0120 16:55:38.869186 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/deea5510-f570-48ac-b23f-bba261ef3951-webhook-certs\") pod \"openstack-operator-controller-manager-75bfd788c8-wflcw\" (UID: \"deea5510-f570-48ac-b23f-bba261ef3951\") " pod="openstack-operators/openstack-operator-controller-manager-75bfd788c8-wflcw" Jan 20 16:55:38 crc kubenswrapper[4558]: I0120 16:55:38.869186 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/deea5510-f570-48ac-b23f-bba261ef3951-metrics-certs\") pod \"openstack-operator-controller-manager-75bfd788c8-wflcw\" (UID: \"deea5510-f570-48ac-b23f-bba261ef3951\") " pod="openstack-operators/openstack-operator-controller-manager-75bfd788c8-wflcw" Jan 20 16:55:38 crc kubenswrapper[4558]: I0120 16:55:38.895658 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-75bfd788c8-wflcw" Jan 20 16:55:38 crc kubenswrapper[4558]: I0120 16:55:38.993178 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-5b9875986dl6bbk"] Jan 20 16:55:39 crc kubenswrapper[4558]: I0120 16:55:39.105615 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-77c48c7859-4f7km" event={"ID":"2e9e43c0-674a-4769-b44b-7eb6ee1a69d5","Type":"ContainerStarted","Data":"7830ae1810760294cff9779d3b1f563fd4dd2a7667783c5e89fa7f597c46168e"} Jan 20 16:55:39 crc kubenswrapper[4558]: I0120 16:55:39.106588 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-5b9875986dl6bbk" event={"ID":"cbbd3f62-64b6-4cc2-8c3b-b21d317c0624","Type":"ContainerStarted","Data":"500e7a715909470495e104983b0e9b743961d3cf052eba13bc5362bb72398884"} Jan 20 16:55:39 crc kubenswrapper[4558]: I0120 16:55:39.244655 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-75bfd788c8-wflcw"] Jan 20 16:55:39 crc kubenswrapper[4558]: W0120 16:55:39.245879 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddeea5510_f570_48ac_b23f_bba261ef3951.slice/crio-77f052a321b1fe1b30248d395ed5ac53d1857594bddd143973d267c4b4bcefc9 WatchSource:0}: Error finding container 77f052a321b1fe1b30248d395ed5ac53d1857594bddd143973d267c4b4bcefc9: Status 404 returned error can't find the container with id 77f052a321b1fe1b30248d395ed5ac53d1857594bddd143973d267c4b4bcefc9 Jan 20 16:55:40 crc kubenswrapper[4558]: I0120 16:55:40.112838 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-75bfd788c8-wflcw" event={"ID":"deea5510-f570-48ac-b23f-bba261ef3951","Type":"ContainerStarted","Data":"64bcb4cba85895bee43f773af00ef7e07c4d7bd80d07b323ae48eeb3ec59dd5a"} Jan 20 16:55:40 crc kubenswrapper[4558]: I0120 16:55:40.113054 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-manager-75bfd788c8-wflcw" Jan 20 16:55:40 crc kubenswrapper[4558]: I0120 16:55:40.113066 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-75bfd788c8-wflcw" event={"ID":"deea5510-f570-48ac-b23f-bba261ef3951","Type":"ContainerStarted","Data":"77f052a321b1fe1b30248d395ed5ac53d1857594bddd143973d267c4b4bcefc9"} Jan 20 16:55:40 crc kubenswrapper[4558]: I0120 16:55:40.140787 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-manager-75bfd788c8-wflcw" podStartSLOduration=34.140770798 podStartE2EDuration="34.140770798s" podCreationTimestamp="2026-01-20 16:55:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 16:55:40.138916272 +0000 UTC m=+833.899254239" watchObservedRunningTime="2026-01-20 16:55:40.140770798 +0000 UTC m=+833.901108765" Jan 20 16:55:42 crc kubenswrapper[4558]: I0120 16:55:42.125269 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-5b9875986dl6bbk" event={"ID":"cbbd3f62-64b6-4cc2-8c3b-b21d317c0624","Type":"ContainerStarted","Data":"14786d8dc9d059a3c99ac1f27b72a5111fcedcf4c96caa245bb691b274f41a26"} Jan 20 16:55:42 crc kubenswrapper[4558]: I0120 16:55:42.125482 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-baremetal-operator-controller-manager-5b9875986dl6bbk" Jan 20 16:55:42 crc kubenswrapper[4558]: I0120 16:55:42.126843 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-77c48c7859-4f7km" event={"ID":"2e9e43c0-674a-4769-b44b-7eb6ee1a69d5","Type":"ContainerStarted","Data":"5fea48a489b7236322aa035fd26c8bc64999234427695faf363f012948d76b1b"} Jan 20 16:55:42 crc kubenswrapper[4558]: I0120 16:55:42.126982 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-controller-manager-77c48c7859-4f7km" Jan 20 16:55:42 crc kubenswrapper[4558]: I0120 16:55:42.144949 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-baremetal-operator-controller-manager-5b9875986dl6bbk" podStartSLOduration=33.590324875 podStartE2EDuration="36.144936001s" podCreationTimestamp="2026-01-20 16:55:06 +0000 UTC" firstStartedPulling="2026-01-20 16:55:38.995659978 +0000 UTC m=+832.755997935" lastFinishedPulling="2026-01-20 16:55:41.550271094 +0000 UTC m=+835.310609061" observedRunningTime="2026-01-20 16:55:42.144604639 +0000 UTC m=+835.904942605" watchObservedRunningTime="2026-01-20 16:55:42.144936001 +0000 UTC m=+835.905273968" Jan 20 16:55:42 crc kubenswrapper[4558]: I0120 16:55:42.157871 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-controller-manager-77c48c7859-4f7km" podStartSLOduration=33.41092497 podStartE2EDuration="36.157854587s" podCreationTimestamp="2026-01-20 16:55:06 +0000 UTC" firstStartedPulling="2026-01-20 16:55:38.805849671 +0000 UTC m=+832.566187638" lastFinishedPulling="2026-01-20 16:55:41.552779288 +0000 UTC m=+835.313117255" observedRunningTime="2026-01-20 16:55:42.156440137 +0000 UTC m=+835.916778105" watchObservedRunningTime="2026-01-20 16:55:42.157854587 +0000 UTC m=+835.918192553" Jan 20 16:55:48 crc kubenswrapper[4558]: I0120 16:55:48.471680 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-controller-manager-77c48c7859-4f7km" Jan 20 16:55:48 crc kubenswrapper[4558]: I0120 16:55:48.641074 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-baremetal-operator-controller-manager-5b9875986dl6bbk" Jan 20 16:55:48 crc kubenswrapper[4558]: I0120 16:55:48.901055 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-manager-75bfd788c8-wflcw" Jan 20 16:55:53 crc kubenswrapper[4558]: I0120 16:55:53.583733 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["crc-storage/crc-storage-crc-hdnsg"] Jan 20 16:55:53 crc kubenswrapper[4558]: I0120 16:55:53.584841 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-hdnsg" Jan 20 16:55:53 crc kubenswrapper[4558]: I0120 16:55:53.589421 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"kube-root-ca.crt" Jan 20 16:55:53 crc kubenswrapper[4558]: I0120 16:55:53.589634 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-hdnsg"] Jan 20 16:55:53 crc kubenswrapper[4558]: I0120 16:55:53.589668 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"crc-storage" Jan 20 16:55:53 crc kubenswrapper[4558]: I0120 16:55:53.589765 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"openshift-service-ca.crt" Jan 20 16:55:53 crc kubenswrapper[4558]: I0120 16:55:53.589811 4558 reflector.go:368] Caches populated for *v1.Secret from object-"crc-storage"/"crc-storage-dockercfg-k9ht8" Jan 20 16:55:53 crc kubenswrapper[4558]: I0120 16:55:53.643481 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/bbf47bcb-a8a0-43c8-b40e-4a1a3f8cb896-node-mnt\") pod \"crc-storage-crc-hdnsg\" (UID: \"bbf47bcb-a8a0-43c8-b40e-4a1a3f8cb896\") " pod="crc-storage/crc-storage-crc-hdnsg" Jan 20 16:55:53 crc kubenswrapper[4558]: I0120 16:55:53.643667 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/bbf47bcb-a8a0-43c8-b40e-4a1a3f8cb896-crc-storage\") pod \"crc-storage-crc-hdnsg\" (UID: \"bbf47bcb-a8a0-43c8-b40e-4a1a3f8cb896\") " pod="crc-storage/crc-storage-crc-hdnsg" Jan 20 16:55:53 crc kubenswrapper[4558]: I0120 16:55:53.643833 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7ww2j\" (UniqueName: \"kubernetes.io/projected/bbf47bcb-a8a0-43c8-b40e-4a1a3f8cb896-kube-api-access-7ww2j\") pod \"crc-storage-crc-hdnsg\" (UID: \"bbf47bcb-a8a0-43c8-b40e-4a1a3f8cb896\") " pod="crc-storage/crc-storage-crc-hdnsg" Jan 20 16:55:53 crc kubenswrapper[4558]: I0120 16:55:53.744868 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/bbf47bcb-a8a0-43c8-b40e-4a1a3f8cb896-node-mnt\") pod \"crc-storage-crc-hdnsg\" (UID: \"bbf47bcb-a8a0-43c8-b40e-4a1a3f8cb896\") " pod="crc-storage/crc-storage-crc-hdnsg" Jan 20 16:55:53 crc kubenswrapper[4558]: I0120 16:55:53.744907 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/bbf47bcb-a8a0-43c8-b40e-4a1a3f8cb896-crc-storage\") pod \"crc-storage-crc-hdnsg\" (UID: \"bbf47bcb-a8a0-43c8-b40e-4a1a3f8cb896\") " pod="crc-storage/crc-storage-crc-hdnsg" Jan 20 16:55:53 crc kubenswrapper[4558]: I0120 16:55:53.744943 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7ww2j\" (UniqueName: \"kubernetes.io/projected/bbf47bcb-a8a0-43c8-b40e-4a1a3f8cb896-kube-api-access-7ww2j\") pod \"crc-storage-crc-hdnsg\" (UID: \"bbf47bcb-a8a0-43c8-b40e-4a1a3f8cb896\") " pod="crc-storage/crc-storage-crc-hdnsg" Jan 20 16:55:53 crc kubenswrapper[4558]: I0120 16:55:53.745115 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/bbf47bcb-a8a0-43c8-b40e-4a1a3f8cb896-node-mnt\") pod \"crc-storage-crc-hdnsg\" (UID: \"bbf47bcb-a8a0-43c8-b40e-4a1a3f8cb896\") " pod="crc-storage/crc-storage-crc-hdnsg" Jan 20 16:55:53 crc kubenswrapper[4558]: I0120 16:55:53.745667 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/bbf47bcb-a8a0-43c8-b40e-4a1a3f8cb896-crc-storage\") pod \"crc-storage-crc-hdnsg\" (UID: \"bbf47bcb-a8a0-43c8-b40e-4a1a3f8cb896\") " pod="crc-storage/crc-storage-crc-hdnsg" Jan 20 16:55:53 crc kubenswrapper[4558]: I0120 16:55:53.760768 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7ww2j\" (UniqueName: \"kubernetes.io/projected/bbf47bcb-a8a0-43c8-b40e-4a1a3f8cb896-kube-api-access-7ww2j\") pod \"crc-storage-crc-hdnsg\" (UID: \"bbf47bcb-a8a0-43c8-b40e-4a1a3f8cb896\") " pod="crc-storage/crc-storage-crc-hdnsg" Jan 20 16:55:53 crc kubenswrapper[4558]: I0120 16:55:53.900774 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-hdnsg" Jan 20 16:55:54 crc kubenswrapper[4558]: I0120 16:55:54.301644 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-hdnsg"] Jan 20 16:55:54 crc kubenswrapper[4558]: W0120 16:55:54.305732 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbbf47bcb_a8a0_43c8_b40e_4a1a3f8cb896.slice/crio-c57a5fce8ac175577391c1ecd5321f2ec479dd68a5565e9ee6194111882319db WatchSource:0}: Error finding container c57a5fce8ac175577391c1ecd5321f2ec479dd68a5565e9ee6194111882319db: Status 404 returned error can't find the container with id c57a5fce8ac175577391c1ecd5321f2ec479dd68a5565e9ee6194111882319db Jan 20 16:55:55 crc kubenswrapper[4558]: I0120 16:55:55.196639 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-hdnsg" event={"ID":"bbf47bcb-a8a0-43c8-b40e-4a1a3f8cb896","Type":"ContainerStarted","Data":"c57a5fce8ac175577391c1ecd5321f2ec479dd68a5565e9ee6194111882319db"} Jan 20 16:55:56 crc kubenswrapper[4558]: I0120 16:55:56.204378 4558 generic.go:334] "Generic (PLEG): container finished" podID="bbf47bcb-a8a0-43c8-b40e-4a1a3f8cb896" containerID="4c8dd757b611d7337e141eef01ee239352334771cae4ea64722f647fea518f2e" exitCode=0 Jan 20 16:55:56 crc kubenswrapper[4558]: I0120 16:55:56.204415 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-hdnsg" event={"ID":"bbf47bcb-a8a0-43c8-b40e-4a1a3f8cb896","Type":"ContainerDied","Data":"4c8dd757b611d7337e141eef01ee239352334771cae4ea64722f647fea518f2e"} Jan 20 16:55:57 crc kubenswrapper[4558]: I0120 16:55:57.405414 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-hdnsg" Jan 20 16:55:57 crc kubenswrapper[4558]: I0120 16:55:57.592173 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7ww2j\" (UniqueName: \"kubernetes.io/projected/bbf47bcb-a8a0-43c8-b40e-4a1a3f8cb896-kube-api-access-7ww2j\") pod \"bbf47bcb-a8a0-43c8-b40e-4a1a3f8cb896\" (UID: \"bbf47bcb-a8a0-43c8-b40e-4a1a3f8cb896\") " Jan 20 16:55:57 crc kubenswrapper[4558]: I0120 16:55:57.592235 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/bbf47bcb-a8a0-43c8-b40e-4a1a3f8cb896-node-mnt\") pod \"bbf47bcb-a8a0-43c8-b40e-4a1a3f8cb896\" (UID: \"bbf47bcb-a8a0-43c8-b40e-4a1a3f8cb896\") " Jan 20 16:55:57 crc kubenswrapper[4558]: I0120 16:55:57.592282 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/bbf47bcb-a8a0-43c8-b40e-4a1a3f8cb896-crc-storage\") pod \"bbf47bcb-a8a0-43c8-b40e-4a1a3f8cb896\" (UID: \"bbf47bcb-a8a0-43c8-b40e-4a1a3f8cb896\") " Jan 20 16:55:57 crc kubenswrapper[4558]: I0120 16:55:57.592386 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/bbf47bcb-a8a0-43c8-b40e-4a1a3f8cb896-node-mnt" (OuterVolumeSpecName: "node-mnt") pod "bbf47bcb-a8a0-43c8-b40e-4a1a3f8cb896" (UID: "bbf47bcb-a8a0-43c8-b40e-4a1a3f8cb896"). InnerVolumeSpecName "node-mnt". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 16:55:57 crc kubenswrapper[4558]: I0120 16:55:57.592639 4558 reconciler_common.go:293] "Volume detached for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/bbf47bcb-a8a0-43c8-b40e-4a1a3f8cb896-node-mnt\") on node \"crc\" DevicePath \"\"" Jan 20 16:55:57 crc kubenswrapper[4558]: I0120 16:55:57.597253 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bbf47bcb-a8a0-43c8-b40e-4a1a3f8cb896-kube-api-access-7ww2j" (OuterVolumeSpecName: "kube-api-access-7ww2j") pod "bbf47bcb-a8a0-43c8-b40e-4a1a3f8cb896" (UID: "bbf47bcb-a8a0-43c8-b40e-4a1a3f8cb896"). InnerVolumeSpecName "kube-api-access-7ww2j". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:55:57 crc kubenswrapper[4558]: I0120 16:55:57.621668 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bbf47bcb-a8a0-43c8-b40e-4a1a3f8cb896-crc-storage" (OuterVolumeSpecName: "crc-storage") pod "bbf47bcb-a8a0-43c8-b40e-4a1a3f8cb896" (UID: "bbf47bcb-a8a0-43c8-b40e-4a1a3f8cb896"). InnerVolumeSpecName "crc-storage". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 16:55:57 crc kubenswrapper[4558]: I0120 16:55:57.694325 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7ww2j\" (UniqueName: \"kubernetes.io/projected/bbf47bcb-a8a0-43c8-b40e-4a1a3f8cb896-kube-api-access-7ww2j\") on node \"crc\" DevicePath \"\"" Jan 20 16:55:57 crc kubenswrapper[4558]: I0120 16:55:57.694371 4558 reconciler_common.go:293] "Volume detached for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/bbf47bcb-a8a0-43c8-b40e-4a1a3f8cb896-crc-storage\") on node \"crc\" DevicePath \"\"" Jan 20 16:55:58 crc kubenswrapper[4558]: I0120 16:55:58.215366 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-hdnsg" event={"ID":"bbf47bcb-a8a0-43c8-b40e-4a1a3f8cb896","Type":"ContainerDied","Data":"c57a5fce8ac175577391c1ecd5321f2ec479dd68a5565e9ee6194111882319db"} Jan 20 16:55:58 crc kubenswrapper[4558]: I0120 16:55:58.215402 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c57a5fce8ac175577391c1ecd5321f2ec479dd68a5565e9ee6194111882319db" Jan 20 16:55:58 crc kubenswrapper[4558]: I0120 16:55:58.215410 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-hdnsg" Jan 20 16:56:00 crc kubenswrapper[4558]: I0120 16:56:00.274442 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["crc-storage/crc-storage-crc-hdnsg"] Jan 20 16:56:00 crc kubenswrapper[4558]: I0120 16:56:00.281129 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["crc-storage/crc-storage-crc-hdnsg"] Jan 20 16:56:00 crc kubenswrapper[4558]: I0120 16:56:00.363834 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["crc-storage/crc-storage-crc-fnm2v"] Jan 20 16:56:00 crc kubenswrapper[4558]: E0120 16:56:00.364264 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bbf47bcb-a8a0-43c8-b40e-4a1a3f8cb896" containerName="storage" Jan 20 16:56:00 crc kubenswrapper[4558]: I0120 16:56:00.364285 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="bbf47bcb-a8a0-43c8-b40e-4a1a3f8cb896" containerName="storage" Jan 20 16:56:00 crc kubenswrapper[4558]: I0120 16:56:00.364469 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="bbf47bcb-a8a0-43c8-b40e-4a1a3f8cb896" containerName="storage" Jan 20 16:56:00 crc kubenswrapper[4558]: I0120 16:56:00.365050 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-fnm2v" Jan 20 16:56:00 crc kubenswrapper[4558]: I0120 16:56:00.366751 4558 reflector.go:368] Caches populated for *v1.Secret from object-"crc-storage"/"crc-storage-dockercfg-k9ht8" Jan 20 16:56:00 crc kubenswrapper[4558]: I0120 16:56:00.368087 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"kube-root-ca.crt" Jan 20 16:56:00 crc kubenswrapper[4558]: I0120 16:56:00.368102 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"openshift-service-ca.crt" Jan 20 16:56:00 crc kubenswrapper[4558]: I0120 16:56:00.369191 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"crc-storage" Jan 20 16:56:00 crc kubenswrapper[4558]: I0120 16:56:00.373476 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-fnm2v"] Jan 20 16:56:00 crc kubenswrapper[4558]: I0120 16:56:00.535008 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6bjqn\" (UniqueName: \"kubernetes.io/projected/d6232781-485e-49dc-8a38-f5943e6c88e1-kube-api-access-6bjqn\") pod \"crc-storage-crc-fnm2v\" (UID: \"d6232781-485e-49dc-8a38-f5943e6c88e1\") " pod="crc-storage/crc-storage-crc-fnm2v" Jan 20 16:56:00 crc kubenswrapper[4558]: I0120 16:56:00.535157 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/d6232781-485e-49dc-8a38-f5943e6c88e1-crc-storage\") pod \"crc-storage-crc-fnm2v\" (UID: \"d6232781-485e-49dc-8a38-f5943e6c88e1\") " pod="crc-storage/crc-storage-crc-fnm2v" Jan 20 16:56:00 crc kubenswrapper[4558]: I0120 16:56:00.535356 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/d6232781-485e-49dc-8a38-f5943e6c88e1-node-mnt\") pod \"crc-storage-crc-fnm2v\" (UID: \"d6232781-485e-49dc-8a38-f5943e6c88e1\") " pod="crc-storage/crc-storage-crc-fnm2v" Jan 20 16:56:00 crc kubenswrapper[4558]: I0120 16:56:00.572899 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bbf47bcb-a8a0-43c8-b40e-4a1a3f8cb896" path="/var/lib/kubelet/pods/bbf47bcb-a8a0-43c8-b40e-4a1a3f8cb896/volumes" Jan 20 16:56:00 crc kubenswrapper[4558]: I0120 16:56:00.637060 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/d6232781-485e-49dc-8a38-f5943e6c88e1-crc-storage\") pod \"crc-storage-crc-fnm2v\" (UID: \"d6232781-485e-49dc-8a38-f5943e6c88e1\") " pod="crc-storage/crc-storage-crc-fnm2v" Jan 20 16:56:00 crc kubenswrapper[4558]: I0120 16:56:00.637265 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/d6232781-485e-49dc-8a38-f5943e6c88e1-node-mnt\") pod \"crc-storage-crc-fnm2v\" (UID: \"d6232781-485e-49dc-8a38-f5943e6c88e1\") " pod="crc-storage/crc-storage-crc-fnm2v" Jan 20 16:56:00 crc kubenswrapper[4558]: I0120 16:56:00.637323 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6bjqn\" (UniqueName: \"kubernetes.io/projected/d6232781-485e-49dc-8a38-f5943e6c88e1-kube-api-access-6bjqn\") pod \"crc-storage-crc-fnm2v\" (UID: \"d6232781-485e-49dc-8a38-f5943e6c88e1\") " pod="crc-storage/crc-storage-crc-fnm2v" Jan 20 16:56:00 crc kubenswrapper[4558]: I0120 16:56:00.637547 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/d6232781-485e-49dc-8a38-f5943e6c88e1-node-mnt\") pod \"crc-storage-crc-fnm2v\" (UID: \"d6232781-485e-49dc-8a38-f5943e6c88e1\") " pod="crc-storage/crc-storage-crc-fnm2v" Jan 20 16:56:00 crc kubenswrapper[4558]: I0120 16:56:00.638046 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/d6232781-485e-49dc-8a38-f5943e6c88e1-crc-storage\") pod \"crc-storage-crc-fnm2v\" (UID: \"d6232781-485e-49dc-8a38-f5943e6c88e1\") " pod="crc-storage/crc-storage-crc-fnm2v" Jan 20 16:56:00 crc kubenswrapper[4558]: I0120 16:56:00.651932 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6bjqn\" (UniqueName: \"kubernetes.io/projected/d6232781-485e-49dc-8a38-f5943e6c88e1-kube-api-access-6bjqn\") pod \"crc-storage-crc-fnm2v\" (UID: \"d6232781-485e-49dc-8a38-f5943e6c88e1\") " pod="crc-storage/crc-storage-crc-fnm2v" Jan 20 16:56:00 crc kubenswrapper[4558]: I0120 16:56:00.680925 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-fnm2v" Jan 20 16:56:01 crc kubenswrapper[4558]: I0120 16:56:01.042447 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-fnm2v"] Jan 20 16:56:01 crc kubenswrapper[4558]: W0120 16:56:01.044551 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd6232781_485e_49dc_8a38_f5943e6c88e1.slice/crio-f68641a39ad66a96e686c8eba4ee70e66854038c8fdbcc3dc903f6221a081be9 WatchSource:0}: Error finding container f68641a39ad66a96e686c8eba4ee70e66854038c8fdbcc3dc903f6221a081be9: Status 404 returned error can't find the container with id f68641a39ad66a96e686c8eba4ee70e66854038c8fdbcc3dc903f6221a081be9 Jan 20 16:56:01 crc kubenswrapper[4558]: I0120 16:56:01.231233 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-fnm2v" event={"ID":"d6232781-485e-49dc-8a38-f5943e6c88e1","Type":"ContainerStarted","Data":"f68641a39ad66a96e686c8eba4ee70e66854038c8fdbcc3dc903f6221a081be9"} Jan 20 16:56:02 crc kubenswrapper[4558]: I0120 16:56:02.238373 4558 generic.go:334] "Generic (PLEG): container finished" podID="d6232781-485e-49dc-8a38-f5943e6c88e1" containerID="16451790b38caf12a201fb0b698455f8add58eb4ef0a84fc3e537dd73d5257ea" exitCode=0 Jan 20 16:56:02 crc kubenswrapper[4558]: I0120 16:56:02.238416 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-fnm2v" event={"ID":"d6232781-485e-49dc-8a38-f5943e6c88e1","Type":"ContainerDied","Data":"16451790b38caf12a201fb0b698455f8add58eb4ef0a84fc3e537dd73d5257ea"} Jan 20 16:56:03 crc kubenswrapper[4558]: I0120 16:56:03.437475 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-fnm2v" Jan 20 16:56:03 crc kubenswrapper[4558]: I0120 16:56:03.470563 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/d6232781-485e-49dc-8a38-f5943e6c88e1-crc-storage\") pod \"d6232781-485e-49dc-8a38-f5943e6c88e1\" (UID: \"d6232781-485e-49dc-8a38-f5943e6c88e1\") " Jan 20 16:56:03 crc kubenswrapper[4558]: I0120 16:56:03.470606 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/d6232781-485e-49dc-8a38-f5943e6c88e1-node-mnt\") pod \"d6232781-485e-49dc-8a38-f5943e6c88e1\" (UID: \"d6232781-485e-49dc-8a38-f5943e6c88e1\") " Jan 20 16:56:03 crc kubenswrapper[4558]: I0120 16:56:03.470679 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6bjqn\" (UniqueName: \"kubernetes.io/projected/d6232781-485e-49dc-8a38-f5943e6c88e1-kube-api-access-6bjqn\") pod \"d6232781-485e-49dc-8a38-f5943e6c88e1\" (UID: \"d6232781-485e-49dc-8a38-f5943e6c88e1\") " Jan 20 16:56:03 crc kubenswrapper[4558]: I0120 16:56:03.470779 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d6232781-485e-49dc-8a38-f5943e6c88e1-node-mnt" (OuterVolumeSpecName: "node-mnt") pod "d6232781-485e-49dc-8a38-f5943e6c88e1" (UID: "d6232781-485e-49dc-8a38-f5943e6c88e1"). InnerVolumeSpecName "node-mnt". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 16:56:03 crc kubenswrapper[4558]: I0120 16:56:03.470927 4558 reconciler_common.go:293] "Volume detached for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/d6232781-485e-49dc-8a38-f5943e6c88e1-node-mnt\") on node \"crc\" DevicePath \"\"" Jan 20 16:56:03 crc kubenswrapper[4558]: I0120 16:56:03.476031 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d6232781-485e-49dc-8a38-f5943e6c88e1-kube-api-access-6bjqn" (OuterVolumeSpecName: "kube-api-access-6bjqn") pod "d6232781-485e-49dc-8a38-f5943e6c88e1" (UID: "d6232781-485e-49dc-8a38-f5943e6c88e1"). InnerVolumeSpecName "kube-api-access-6bjqn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:56:03 crc kubenswrapper[4558]: I0120 16:56:03.485697 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d6232781-485e-49dc-8a38-f5943e6c88e1-crc-storage" (OuterVolumeSpecName: "crc-storage") pod "d6232781-485e-49dc-8a38-f5943e6c88e1" (UID: "d6232781-485e-49dc-8a38-f5943e6c88e1"). InnerVolumeSpecName "crc-storage". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 16:56:03 crc kubenswrapper[4558]: I0120 16:56:03.571476 4558 reconciler_common.go:293] "Volume detached for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/d6232781-485e-49dc-8a38-f5943e6c88e1-crc-storage\") on node \"crc\" DevicePath \"\"" Jan 20 16:56:03 crc kubenswrapper[4558]: I0120 16:56:03.571507 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6bjqn\" (UniqueName: \"kubernetes.io/projected/d6232781-485e-49dc-8a38-f5943e6c88e1-kube-api-access-6bjqn\") on node \"crc\" DevicePath \"\"" Jan 20 16:56:04 crc kubenswrapper[4558]: I0120 16:56:04.249539 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-fnm2v" event={"ID":"d6232781-485e-49dc-8a38-f5943e6c88e1","Type":"ContainerDied","Data":"f68641a39ad66a96e686c8eba4ee70e66854038c8fdbcc3dc903f6221a081be9"} Jan 20 16:56:04 crc kubenswrapper[4558]: I0120 16:56:04.250007 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f68641a39ad66a96e686c8eba4ee70e66854038c8fdbcc3dc903f6221a081be9" Jan 20 16:56:04 crc kubenswrapper[4558]: I0120 16:56:04.249591 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-fnm2v" Jan 20 16:56:06 crc kubenswrapper[4558]: I0120 16:56:06.632346 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-f5849d7b9-w2tw2"] Jan 20 16:56:06 crc kubenswrapper[4558]: E0120 16:56:06.632828 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d6232781-485e-49dc-8a38-f5943e6c88e1" containerName="storage" Jan 20 16:56:06 crc kubenswrapper[4558]: I0120 16:56:06.632841 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d6232781-485e-49dc-8a38-f5943e6c88e1" containerName="storage" Jan 20 16:56:06 crc kubenswrapper[4558]: I0120 16:56:06.633048 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="d6232781-485e-49dc-8a38-f5943e6c88e1" containerName="storage" Jan 20 16:56:06 crc kubenswrapper[4558]: I0120 16:56:06.633669 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-f5849d7b9-w2tw2" Jan 20 16:56:06 crc kubenswrapper[4558]: I0120 16:56:06.635686 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"dnsmasq" Jan 20 16:56:06 crc kubenswrapper[4558]: I0120 16:56:06.635736 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openshift-service-ca.crt" Jan 20 16:56:06 crc kubenswrapper[4558]: I0120 16:56:06.635693 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"kube-root-ca.crt" Jan 20 16:56:06 crc kubenswrapper[4558]: I0120 16:56:06.635900 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dnsmasq-dnsmasq-dockercfg-v9w2q" Jan 20 16:56:06 crc kubenswrapper[4558]: I0120 16:56:06.644092 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-f5849d7b9-w2tw2"] Jan 20 16:56:06 crc kubenswrapper[4558]: I0120 16:56:06.673516 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-mltw6"] Jan 20 16:56:06 crc kubenswrapper[4558]: I0120 16:56:06.674474 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-mltw6" Jan 20 16:56:06 crc kubenswrapper[4558]: I0120 16:56:06.675832 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"dnsmasq-svc" Jan 20 16:56:06 crc kubenswrapper[4558]: I0120 16:56:06.685637 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-mltw6"] Jan 20 16:56:06 crc kubenswrapper[4558]: I0120 16:56:06.712089 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/72531f1f-c34a-4508-a054-8719bbaacb7c-config\") pod \"dnsmasq-dnsmasq-84b9f45d47-mltw6\" (UID: \"72531f1f-c34a-4508-a054-8719bbaacb7c\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-mltw6" Jan 20 16:56:06 crc kubenswrapper[4558]: I0120 16:56:06.712128 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/36908d16-3c7e-48f8-9056-113f1fd07dc8-config\") pod \"dnsmasq-dnsmasq-f5849d7b9-w2tw2\" (UID: \"36908d16-3c7e-48f8-9056-113f1fd07dc8\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-f5849d7b9-w2tw2" Jan 20 16:56:06 crc kubenswrapper[4558]: I0120 16:56:06.712178 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/72531f1f-c34a-4508-a054-8719bbaacb7c-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-84b9f45d47-mltw6\" (UID: \"72531f1f-c34a-4508-a054-8719bbaacb7c\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-mltw6" Jan 20 16:56:06 crc kubenswrapper[4558]: I0120 16:56:06.712313 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qw6x6\" (UniqueName: \"kubernetes.io/projected/72531f1f-c34a-4508-a054-8719bbaacb7c-kube-api-access-qw6x6\") pod \"dnsmasq-dnsmasq-84b9f45d47-mltw6\" (UID: \"72531f1f-c34a-4508-a054-8719bbaacb7c\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-mltw6" Jan 20 16:56:06 crc kubenswrapper[4558]: I0120 16:56:06.712519 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-659jc\" (UniqueName: \"kubernetes.io/projected/36908d16-3c7e-48f8-9056-113f1fd07dc8-kube-api-access-659jc\") pod \"dnsmasq-dnsmasq-f5849d7b9-w2tw2\" (UID: \"36908d16-3c7e-48f8-9056-113f1fd07dc8\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-f5849d7b9-w2tw2" Jan 20 16:56:06 crc kubenswrapper[4558]: I0120 16:56:06.813355 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qw6x6\" (UniqueName: \"kubernetes.io/projected/72531f1f-c34a-4508-a054-8719bbaacb7c-kube-api-access-qw6x6\") pod \"dnsmasq-dnsmasq-84b9f45d47-mltw6\" (UID: \"72531f1f-c34a-4508-a054-8719bbaacb7c\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-mltw6" Jan 20 16:56:06 crc kubenswrapper[4558]: I0120 16:56:06.813444 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-659jc\" (UniqueName: \"kubernetes.io/projected/36908d16-3c7e-48f8-9056-113f1fd07dc8-kube-api-access-659jc\") pod \"dnsmasq-dnsmasq-f5849d7b9-w2tw2\" (UID: \"36908d16-3c7e-48f8-9056-113f1fd07dc8\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-f5849d7b9-w2tw2" Jan 20 16:56:06 crc kubenswrapper[4558]: I0120 16:56:06.813480 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/72531f1f-c34a-4508-a054-8719bbaacb7c-config\") pod \"dnsmasq-dnsmasq-84b9f45d47-mltw6\" (UID: \"72531f1f-c34a-4508-a054-8719bbaacb7c\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-mltw6" Jan 20 16:56:06 crc kubenswrapper[4558]: I0120 16:56:06.813494 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/36908d16-3c7e-48f8-9056-113f1fd07dc8-config\") pod \"dnsmasq-dnsmasq-f5849d7b9-w2tw2\" (UID: \"36908d16-3c7e-48f8-9056-113f1fd07dc8\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-f5849d7b9-w2tw2" Jan 20 16:56:06 crc kubenswrapper[4558]: I0120 16:56:06.813511 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/72531f1f-c34a-4508-a054-8719bbaacb7c-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-84b9f45d47-mltw6\" (UID: \"72531f1f-c34a-4508-a054-8719bbaacb7c\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-mltw6" Jan 20 16:56:06 crc kubenswrapper[4558]: I0120 16:56:06.814212 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/72531f1f-c34a-4508-a054-8719bbaacb7c-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-84b9f45d47-mltw6\" (UID: \"72531f1f-c34a-4508-a054-8719bbaacb7c\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-mltw6" Jan 20 16:56:06 crc kubenswrapper[4558]: I0120 16:56:06.814674 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/72531f1f-c34a-4508-a054-8719bbaacb7c-config\") pod \"dnsmasq-dnsmasq-84b9f45d47-mltw6\" (UID: \"72531f1f-c34a-4508-a054-8719bbaacb7c\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-mltw6" Jan 20 16:56:06 crc kubenswrapper[4558]: I0120 16:56:06.814831 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/36908d16-3c7e-48f8-9056-113f1fd07dc8-config\") pod \"dnsmasq-dnsmasq-f5849d7b9-w2tw2\" (UID: \"36908d16-3c7e-48f8-9056-113f1fd07dc8\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-f5849d7b9-w2tw2" Jan 20 16:56:06 crc kubenswrapper[4558]: I0120 16:56:06.829579 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qw6x6\" (UniqueName: \"kubernetes.io/projected/72531f1f-c34a-4508-a054-8719bbaacb7c-kube-api-access-qw6x6\") pod \"dnsmasq-dnsmasq-84b9f45d47-mltw6\" (UID: \"72531f1f-c34a-4508-a054-8719bbaacb7c\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-mltw6" Jan 20 16:56:06 crc kubenswrapper[4558]: I0120 16:56:06.830088 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-659jc\" (UniqueName: \"kubernetes.io/projected/36908d16-3c7e-48f8-9056-113f1fd07dc8-kube-api-access-659jc\") pod \"dnsmasq-dnsmasq-f5849d7b9-w2tw2\" (UID: \"36908d16-3c7e-48f8-9056-113f1fd07dc8\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-f5849d7b9-w2tw2" Jan 20 16:56:06 crc kubenswrapper[4558]: I0120 16:56:06.952027 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-f5849d7b9-w2tw2" Jan 20 16:56:06 crc kubenswrapper[4558]: I0120 16:56:06.986635 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-mltw6" Jan 20 16:56:07 crc kubenswrapper[4558]: I0120 16:56:07.309346 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-f5849d7b9-w2tw2"] Jan 20 16:56:07 crc kubenswrapper[4558]: I0120 16:56:07.352012 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-mltw6"] Jan 20 16:56:07 crc kubenswrapper[4558]: W0120 16:56:07.358222 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod72531f1f_c34a_4508_a054_8719bbaacb7c.slice/crio-78c4ec1e964a9b740f63c33f146dbe854871651c436899662b2721259744bc9c WatchSource:0}: Error finding container 78c4ec1e964a9b740f63c33f146dbe854871651c436899662b2721259744bc9c: Status 404 returned error can't find the container with id 78c4ec1e964a9b740f63c33f146dbe854871651c436899662b2721259744bc9c Jan 20 16:56:08 crc kubenswrapper[4558]: I0120 16:56:08.268688 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-mltw6" event={"ID":"72531f1f-c34a-4508-a054-8719bbaacb7c","Type":"ContainerStarted","Data":"78c4ec1e964a9b740f63c33f146dbe854871651c436899662b2721259744bc9c"} Jan 20 16:56:08 crc kubenswrapper[4558]: I0120 16:56:08.269696 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-f5849d7b9-w2tw2" event={"ID":"36908d16-3c7e-48f8-9056-113f1fd07dc8","Type":"ContainerStarted","Data":"22bd349ea68b09aa4e19eb00caa09de58664bf1cf95c433a4340327ebf6e5cff"} Jan 20 16:56:15 crc kubenswrapper[4558]: I0120 16:56:15.569202 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/rabbitmq-cell1-server-0"] Jan 20 16:56:15 crc kubenswrapper[4558]: I0120 16:56:15.570344 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 16:56:15 crc kubenswrapper[4558]: I0120 16:56:15.574408 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"rabbitmq-cell1-server-conf" Jan 20 16:56:15 crc kubenswrapper[4558]: I0120 16:56:15.574587 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"rabbitmq-cell1-plugins-conf" Jan 20 16:56:15 crc kubenswrapper[4558]: I0120 16:56:15.575212 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-rabbitmq-cell1-svc" Jan 20 16:56:15 crc kubenswrapper[4558]: I0120 16:56:15.575592 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"rabbitmq-cell1-default-user" Jan 20 16:56:15 crc kubenswrapper[4558]: I0120 16:56:15.575598 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"rabbitmq-cell1-server-dockercfg-27nch" Jan 20 16:56:15 crc kubenswrapper[4558]: I0120 16:56:15.575717 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"rabbitmq-cell1-config-data" Jan 20 16:56:15 crc kubenswrapper[4558]: I0120 16:56:15.575814 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"rabbitmq-cell1-erlang-cookie" Jan 20 16:56:15 crc kubenswrapper[4558]: I0120 16:56:15.580585 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/rabbitmq-cell1-server-0"] Jan 20 16:56:15 crc kubenswrapper[4558]: I0120 16:56:15.731594 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/bf355276-9e62-474e-bfb1-616dde5b83bc-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"bf355276-9e62-474e-bfb1-616dde5b83bc\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 16:56:15 crc kubenswrapper[4558]: I0120 16:56:15.731658 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/bf355276-9e62-474e-bfb1-616dde5b83bc-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"bf355276-9e62-474e-bfb1-616dde5b83bc\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 16:56:15 crc kubenswrapper[4558]: I0120 16:56:15.731680 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/bf355276-9e62-474e-bfb1-616dde5b83bc-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"bf355276-9e62-474e-bfb1-616dde5b83bc\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 16:56:15 crc kubenswrapper[4558]: I0120 16:56:15.731793 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/bf355276-9e62-474e-bfb1-616dde5b83bc-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"bf355276-9e62-474e-bfb1-616dde5b83bc\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 16:56:15 crc kubenswrapper[4558]: I0120 16:56:15.731848 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/bf355276-9e62-474e-bfb1-616dde5b83bc-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"bf355276-9e62-474e-bfb1-616dde5b83bc\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 16:56:15 crc kubenswrapper[4558]: I0120 16:56:15.731905 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/bf355276-9e62-474e-bfb1-616dde5b83bc-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"bf355276-9e62-474e-bfb1-616dde5b83bc\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 16:56:15 crc kubenswrapper[4558]: I0120 16:56:15.731921 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wfp6r\" (UniqueName: \"kubernetes.io/projected/bf355276-9e62-474e-bfb1-616dde5b83bc-kube-api-access-wfp6r\") pod \"rabbitmq-cell1-server-0\" (UID: \"bf355276-9e62-474e-bfb1-616dde5b83bc\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 16:56:15 crc kubenswrapper[4558]: I0120 16:56:15.731935 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/bf355276-9e62-474e-bfb1-616dde5b83bc-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"bf355276-9e62-474e-bfb1-616dde5b83bc\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 16:56:15 crc kubenswrapper[4558]: I0120 16:56:15.731956 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"bf355276-9e62-474e-bfb1-616dde5b83bc\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 16:56:15 crc kubenswrapper[4558]: I0120 16:56:15.731993 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/bf355276-9e62-474e-bfb1-616dde5b83bc-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"bf355276-9e62-474e-bfb1-616dde5b83bc\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 16:56:15 crc kubenswrapper[4558]: I0120 16:56:15.732016 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/bf355276-9e62-474e-bfb1-616dde5b83bc-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"bf355276-9e62-474e-bfb1-616dde5b83bc\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 16:56:15 crc kubenswrapper[4558]: I0120 16:56:15.832851 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/bf355276-9e62-474e-bfb1-616dde5b83bc-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"bf355276-9e62-474e-bfb1-616dde5b83bc\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 16:56:15 crc kubenswrapper[4558]: I0120 16:56:15.832888 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wfp6r\" (UniqueName: \"kubernetes.io/projected/bf355276-9e62-474e-bfb1-616dde5b83bc-kube-api-access-wfp6r\") pod \"rabbitmq-cell1-server-0\" (UID: \"bf355276-9e62-474e-bfb1-616dde5b83bc\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 16:56:15 crc kubenswrapper[4558]: I0120 16:56:15.832905 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/bf355276-9e62-474e-bfb1-616dde5b83bc-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"bf355276-9e62-474e-bfb1-616dde5b83bc\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 16:56:15 crc kubenswrapper[4558]: I0120 16:56:15.832922 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"bf355276-9e62-474e-bfb1-616dde5b83bc\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 16:56:15 crc kubenswrapper[4558]: I0120 16:56:15.832940 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/bf355276-9e62-474e-bfb1-616dde5b83bc-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"bf355276-9e62-474e-bfb1-616dde5b83bc\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 16:56:15 crc kubenswrapper[4558]: I0120 16:56:15.832958 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/bf355276-9e62-474e-bfb1-616dde5b83bc-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"bf355276-9e62-474e-bfb1-616dde5b83bc\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 16:56:15 crc kubenswrapper[4558]: I0120 16:56:15.833001 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/bf355276-9e62-474e-bfb1-616dde5b83bc-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"bf355276-9e62-474e-bfb1-616dde5b83bc\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 16:56:15 crc kubenswrapper[4558]: I0120 16:56:15.833024 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/bf355276-9e62-474e-bfb1-616dde5b83bc-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"bf355276-9e62-474e-bfb1-616dde5b83bc\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 16:56:15 crc kubenswrapper[4558]: I0120 16:56:15.833039 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/bf355276-9e62-474e-bfb1-616dde5b83bc-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"bf355276-9e62-474e-bfb1-616dde5b83bc\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 16:56:15 crc kubenswrapper[4558]: I0120 16:56:15.833064 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/bf355276-9e62-474e-bfb1-616dde5b83bc-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"bf355276-9e62-474e-bfb1-616dde5b83bc\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 16:56:15 crc kubenswrapper[4558]: I0120 16:56:15.833083 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/bf355276-9e62-474e-bfb1-616dde5b83bc-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"bf355276-9e62-474e-bfb1-616dde5b83bc\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 16:56:15 crc kubenswrapper[4558]: I0120 16:56:15.834344 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"bf355276-9e62-474e-bfb1-616dde5b83bc\") device mount path \"/mnt/openstack/pv02\"" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 16:56:15 crc kubenswrapper[4558]: I0120 16:56:15.834855 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/bf355276-9e62-474e-bfb1-616dde5b83bc-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"bf355276-9e62-474e-bfb1-616dde5b83bc\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 16:56:15 crc kubenswrapper[4558]: I0120 16:56:15.835044 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/bf355276-9e62-474e-bfb1-616dde5b83bc-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"bf355276-9e62-474e-bfb1-616dde5b83bc\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 16:56:15 crc kubenswrapper[4558]: I0120 16:56:15.836632 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/bf355276-9e62-474e-bfb1-616dde5b83bc-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"bf355276-9e62-474e-bfb1-616dde5b83bc\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 16:56:15 crc kubenswrapper[4558]: I0120 16:56:15.836683 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/bf355276-9e62-474e-bfb1-616dde5b83bc-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"bf355276-9e62-474e-bfb1-616dde5b83bc\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 16:56:15 crc kubenswrapper[4558]: I0120 16:56:15.837752 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/bf355276-9e62-474e-bfb1-616dde5b83bc-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"bf355276-9e62-474e-bfb1-616dde5b83bc\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 16:56:15 crc kubenswrapper[4558]: I0120 16:56:15.838222 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/bf355276-9e62-474e-bfb1-616dde5b83bc-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"bf355276-9e62-474e-bfb1-616dde5b83bc\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 16:56:15 crc kubenswrapper[4558]: I0120 16:56:15.839501 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/bf355276-9e62-474e-bfb1-616dde5b83bc-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"bf355276-9e62-474e-bfb1-616dde5b83bc\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 16:56:15 crc kubenswrapper[4558]: I0120 16:56:15.839401 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/bf355276-9e62-474e-bfb1-616dde5b83bc-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"bf355276-9e62-474e-bfb1-616dde5b83bc\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 16:56:15 crc kubenswrapper[4558]: I0120 16:56:15.840287 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/bf355276-9e62-474e-bfb1-616dde5b83bc-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"bf355276-9e62-474e-bfb1-616dde5b83bc\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 16:56:15 crc kubenswrapper[4558]: I0120 16:56:15.848975 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wfp6r\" (UniqueName: \"kubernetes.io/projected/bf355276-9e62-474e-bfb1-616dde5b83bc-kube-api-access-wfp6r\") pod \"rabbitmq-cell1-server-0\" (UID: \"bf355276-9e62-474e-bfb1-616dde5b83bc\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 16:56:15 crc kubenswrapper[4558]: I0120 16:56:15.856618 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"bf355276-9e62-474e-bfb1-616dde5b83bc\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 16:56:15 crc kubenswrapper[4558]: I0120 16:56:15.894722 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 16:56:16 crc kubenswrapper[4558]: I0120 16:56:16.356830 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/rabbitmq-server-0"] Jan 20 16:56:16 crc kubenswrapper[4558]: I0120 16:56:16.357909 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 16:56:16 crc kubenswrapper[4558]: I0120 16:56:16.359653 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"rabbitmq-erlang-cookie" Jan 20 16:56:16 crc kubenswrapper[4558]: I0120 16:56:16.359702 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"rabbitmq-server-conf" Jan 20 16:56:16 crc kubenswrapper[4558]: I0120 16:56:16.359666 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"rabbitmq-default-user" Jan 20 16:56:16 crc kubenswrapper[4558]: I0120 16:56:16.360973 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"rabbitmq-plugins-conf" Jan 20 16:56:16 crc kubenswrapper[4558]: I0120 16:56:16.363087 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-rabbitmq-svc" Jan 20 16:56:16 crc kubenswrapper[4558]: I0120 16:56:16.363243 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"rabbitmq-config-data" Jan 20 16:56:16 crc kubenswrapper[4558]: I0120 16:56:16.363451 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"rabbitmq-server-dockercfg-mtpwm" Jan 20 16:56:16 crc kubenswrapper[4558]: I0120 16:56:16.368304 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/rabbitmq-server-0"] Jan 20 16:56:16 crc kubenswrapper[4558]: I0120 16:56:16.543007 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage20-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage20-crc\") pod \"rabbitmq-server-0\" (UID: \"3f868eba-f4e1-4e32-b271-391cf271fe97\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 16:56:16 crc kubenswrapper[4558]: I0120 16:56:16.543100 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/3f868eba-f4e1-4e32-b271-391cf271fe97-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"3f868eba-f4e1-4e32-b271-391cf271fe97\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 16:56:16 crc kubenswrapper[4558]: I0120 16:56:16.543137 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/3f868eba-f4e1-4e32-b271-391cf271fe97-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"3f868eba-f4e1-4e32-b271-391cf271fe97\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 16:56:16 crc kubenswrapper[4558]: I0120 16:56:16.543209 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/3f868eba-f4e1-4e32-b271-391cf271fe97-server-conf\") pod \"rabbitmq-server-0\" (UID: \"3f868eba-f4e1-4e32-b271-391cf271fe97\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 16:56:16 crc kubenswrapper[4558]: I0120 16:56:16.543261 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/3f868eba-f4e1-4e32-b271-391cf271fe97-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"3f868eba-f4e1-4e32-b271-391cf271fe97\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 16:56:16 crc kubenswrapper[4558]: I0120 16:56:16.543280 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/3f868eba-f4e1-4e32-b271-391cf271fe97-pod-info\") pod \"rabbitmq-server-0\" (UID: \"3f868eba-f4e1-4e32-b271-391cf271fe97\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 16:56:16 crc kubenswrapper[4558]: I0120 16:56:16.543333 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w8fm7\" (UniqueName: \"kubernetes.io/projected/3f868eba-f4e1-4e32-b271-391cf271fe97-kube-api-access-w8fm7\") pod \"rabbitmq-server-0\" (UID: \"3f868eba-f4e1-4e32-b271-391cf271fe97\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 16:56:16 crc kubenswrapper[4558]: I0120 16:56:16.543352 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/3f868eba-f4e1-4e32-b271-391cf271fe97-config-data\") pod \"rabbitmq-server-0\" (UID: \"3f868eba-f4e1-4e32-b271-391cf271fe97\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 16:56:16 crc kubenswrapper[4558]: I0120 16:56:16.543401 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/3f868eba-f4e1-4e32-b271-391cf271fe97-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"3f868eba-f4e1-4e32-b271-391cf271fe97\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 16:56:16 crc kubenswrapper[4558]: I0120 16:56:16.543448 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/3f868eba-f4e1-4e32-b271-391cf271fe97-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"3f868eba-f4e1-4e32-b271-391cf271fe97\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 16:56:16 crc kubenswrapper[4558]: I0120 16:56:16.543464 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/3f868eba-f4e1-4e32-b271-391cf271fe97-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"3f868eba-f4e1-4e32-b271-391cf271fe97\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 16:56:16 crc kubenswrapper[4558]: I0120 16:56:16.644904 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage20-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage20-crc\") pod \"rabbitmq-server-0\" (UID: \"3f868eba-f4e1-4e32-b271-391cf271fe97\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 16:56:16 crc kubenswrapper[4558]: I0120 16:56:16.644950 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/3f868eba-f4e1-4e32-b271-391cf271fe97-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"3f868eba-f4e1-4e32-b271-391cf271fe97\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 16:56:16 crc kubenswrapper[4558]: I0120 16:56:16.644975 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/3f868eba-f4e1-4e32-b271-391cf271fe97-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"3f868eba-f4e1-4e32-b271-391cf271fe97\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 16:56:16 crc kubenswrapper[4558]: I0120 16:56:16.645000 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/3f868eba-f4e1-4e32-b271-391cf271fe97-server-conf\") pod \"rabbitmq-server-0\" (UID: \"3f868eba-f4e1-4e32-b271-391cf271fe97\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 16:56:16 crc kubenswrapper[4558]: I0120 16:56:16.645032 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/3f868eba-f4e1-4e32-b271-391cf271fe97-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"3f868eba-f4e1-4e32-b271-391cf271fe97\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 16:56:16 crc kubenswrapper[4558]: I0120 16:56:16.645051 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/3f868eba-f4e1-4e32-b271-391cf271fe97-pod-info\") pod \"rabbitmq-server-0\" (UID: \"3f868eba-f4e1-4e32-b271-391cf271fe97\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 16:56:16 crc kubenswrapper[4558]: I0120 16:56:16.645103 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w8fm7\" (UniqueName: \"kubernetes.io/projected/3f868eba-f4e1-4e32-b271-391cf271fe97-kube-api-access-w8fm7\") pod \"rabbitmq-server-0\" (UID: \"3f868eba-f4e1-4e32-b271-391cf271fe97\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 16:56:16 crc kubenswrapper[4558]: I0120 16:56:16.645111 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage20-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage20-crc\") pod \"rabbitmq-server-0\" (UID: \"3f868eba-f4e1-4e32-b271-391cf271fe97\") device mount path \"/mnt/openstack/pv20\"" pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 16:56:16 crc kubenswrapper[4558]: I0120 16:56:16.645118 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/3f868eba-f4e1-4e32-b271-391cf271fe97-config-data\") pod \"rabbitmq-server-0\" (UID: \"3f868eba-f4e1-4e32-b271-391cf271fe97\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 16:56:16 crc kubenswrapper[4558]: I0120 16:56:16.645578 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/3f868eba-f4e1-4e32-b271-391cf271fe97-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"3f868eba-f4e1-4e32-b271-391cf271fe97\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 16:56:16 crc kubenswrapper[4558]: I0120 16:56:16.645626 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/3f868eba-f4e1-4e32-b271-391cf271fe97-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"3f868eba-f4e1-4e32-b271-391cf271fe97\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 16:56:16 crc kubenswrapper[4558]: I0120 16:56:16.645647 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/3f868eba-f4e1-4e32-b271-391cf271fe97-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"3f868eba-f4e1-4e32-b271-391cf271fe97\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 16:56:16 crc kubenswrapper[4558]: I0120 16:56:16.645837 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/3f868eba-f4e1-4e32-b271-391cf271fe97-config-data\") pod \"rabbitmq-server-0\" (UID: \"3f868eba-f4e1-4e32-b271-391cf271fe97\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 16:56:16 crc kubenswrapper[4558]: I0120 16:56:16.646384 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/3f868eba-f4e1-4e32-b271-391cf271fe97-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"3f868eba-f4e1-4e32-b271-391cf271fe97\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 16:56:16 crc kubenswrapper[4558]: I0120 16:56:16.647078 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/3f868eba-f4e1-4e32-b271-391cf271fe97-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"3f868eba-f4e1-4e32-b271-391cf271fe97\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 16:56:16 crc kubenswrapper[4558]: I0120 16:56:16.647593 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/3f868eba-f4e1-4e32-b271-391cf271fe97-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"3f868eba-f4e1-4e32-b271-391cf271fe97\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 16:56:16 crc kubenswrapper[4558]: I0120 16:56:16.647927 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/3f868eba-f4e1-4e32-b271-391cf271fe97-server-conf\") pod \"rabbitmq-server-0\" (UID: \"3f868eba-f4e1-4e32-b271-391cf271fe97\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 16:56:16 crc kubenswrapper[4558]: I0120 16:56:16.649932 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/3f868eba-f4e1-4e32-b271-391cf271fe97-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"3f868eba-f4e1-4e32-b271-391cf271fe97\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 16:56:16 crc kubenswrapper[4558]: I0120 16:56:16.655584 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/3f868eba-f4e1-4e32-b271-391cf271fe97-pod-info\") pod \"rabbitmq-server-0\" (UID: \"3f868eba-f4e1-4e32-b271-391cf271fe97\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 16:56:16 crc kubenswrapper[4558]: I0120 16:56:16.658372 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/3f868eba-f4e1-4e32-b271-391cf271fe97-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"3f868eba-f4e1-4e32-b271-391cf271fe97\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 16:56:16 crc kubenswrapper[4558]: I0120 16:56:16.662059 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w8fm7\" (UniqueName: \"kubernetes.io/projected/3f868eba-f4e1-4e32-b271-391cf271fe97-kube-api-access-w8fm7\") pod \"rabbitmq-server-0\" (UID: \"3f868eba-f4e1-4e32-b271-391cf271fe97\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 16:56:16 crc kubenswrapper[4558]: I0120 16:56:16.666488 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/3f868eba-f4e1-4e32-b271-391cf271fe97-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"3f868eba-f4e1-4e32-b271-391cf271fe97\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 16:56:16 crc kubenswrapper[4558]: I0120 16:56:16.668747 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage20-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage20-crc\") pod \"rabbitmq-server-0\" (UID: \"3f868eba-f4e1-4e32-b271-391cf271fe97\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 16:56:16 crc kubenswrapper[4558]: I0120 16:56:16.679043 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 16:56:17 crc kubenswrapper[4558]: I0120 16:56:17.756033 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/openstack-galera-0"] Jan 20 16:56:17 crc kubenswrapper[4558]: I0120 16:56:17.757105 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 16:56:17 crc kubenswrapper[4558]: I0120 16:56:17.760705 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-galera-openstack-svc" Jan 20 16:56:17 crc kubenswrapper[4558]: I0120 16:56:17.763928 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"galera-openstack-dockercfg-7wr65" Jan 20 16:56:17 crc kubenswrapper[4558]: I0120 16:56:17.764209 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-scripts" Jan 20 16:56:17 crc kubenswrapper[4558]: I0120 16:56:17.764732 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-config-data" Jan 20 16:56:17 crc kubenswrapper[4558]: I0120 16:56:17.766194 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"combined-ca-bundle" Jan 20 16:56:17 crc kubenswrapper[4558]: I0120 16:56:17.766635 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/openstack-galera-0"] Jan 20 16:56:17 crc kubenswrapper[4558]: I0120 16:56:17.863271 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0e28ce69-8b5d-4e5e-b8bb-860e6e3745ae-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"0e28ce69-8b5d-4e5e-b8bb-860e6e3745ae\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 16:56:17 crc kubenswrapper[4558]: I0120 16:56:17.863325 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/0e28ce69-8b5d-4e5e-b8bb-860e6e3745ae-kolla-config\") pod \"openstack-galera-0\" (UID: \"0e28ce69-8b5d-4e5e-b8bb-860e6e3745ae\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 16:56:17 crc kubenswrapper[4558]: I0120 16:56:17.863394 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mxkf7\" (UniqueName: \"kubernetes.io/projected/0e28ce69-8b5d-4e5e-b8bb-860e6e3745ae-kube-api-access-mxkf7\") pod \"openstack-galera-0\" (UID: \"0e28ce69-8b5d-4e5e-b8bb-860e6e3745ae\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 16:56:17 crc kubenswrapper[4558]: I0120 16:56:17.863421 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0e28ce69-8b5d-4e5e-b8bb-860e6e3745ae-operator-scripts\") pod \"openstack-galera-0\" (UID: \"0e28ce69-8b5d-4e5e-b8bb-860e6e3745ae\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 16:56:17 crc kubenswrapper[4558]: I0120 16:56:17.863436 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/0e28ce69-8b5d-4e5e-b8bb-860e6e3745ae-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"0e28ce69-8b5d-4e5e-b8bb-860e6e3745ae\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 16:56:17 crc kubenswrapper[4558]: I0120 16:56:17.863464 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/0e28ce69-8b5d-4e5e-b8bb-860e6e3745ae-config-data-default\") pod \"openstack-galera-0\" (UID: \"0e28ce69-8b5d-4e5e-b8bb-860e6e3745ae\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 16:56:17 crc kubenswrapper[4558]: I0120 16:56:17.863487 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage13-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage13-crc\") pod \"openstack-galera-0\" (UID: \"0e28ce69-8b5d-4e5e-b8bb-860e6e3745ae\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 16:56:17 crc kubenswrapper[4558]: I0120 16:56:17.863503 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/0e28ce69-8b5d-4e5e-b8bb-860e6e3745ae-config-data-generated\") pod \"openstack-galera-0\" (UID: \"0e28ce69-8b5d-4e5e-b8bb-860e6e3745ae\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 16:56:17 crc kubenswrapper[4558]: I0120 16:56:17.964350 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0e28ce69-8b5d-4e5e-b8bb-860e6e3745ae-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"0e28ce69-8b5d-4e5e-b8bb-860e6e3745ae\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 16:56:17 crc kubenswrapper[4558]: I0120 16:56:17.964422 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/0e28ce69-8b5d-4e5e-b8bb-860e6e3745ae-kolla-config\") pod \"openstack-galera-0\" (UID: \"0e28ce69-8b5d-4e5e-b8bb-860e6e3745ae\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 16:56:17 crc kubenswrapper[4558]: I0120 16:56:17.964488 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mxkf7\" (UniqueName: \"kubernetes.io/projected/0e28ce69-8b5d-4e5e-b8bb-860e6e3745ae-kube-api-access-mxkf7\") pod \"openstack-galera-0\" (UID: \"0e28ce69-8b5d-4e5e-b8bb-860e6e3745ae\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 16:56:17 crc kubenswrapper[4558]: I0120 16:56:17.964511 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0e28ce69-8b5d-4e5e-b8bb-860e6e3745ae-operator-scripts\") pod \"openstack-galera-0\" (UID: \"0e28ce69-8b5d-4e5e-b8bb-860e6e3745ae\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 16:56:17 crc kubenswrapper[4558]: I0120 16:56:17.964524 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/0e28ce69-8b5d-4e5e-b8bb-860e6e3745ae-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"0e28ce69-8b5d-4e5e-b8bb-860e6e3745ae\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 16:56:17 crc kubenswrapper[4558]: I0120 16:56:17.964557 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/0e28ce69-8b5d-4e5e-b8bb-860e6e3745ae-config-data-default\") pod \"openstack-galera-0\" (UID: \"0e28ce69-8b5d-4e5e-b8bb-860e6e3745ae\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 16:56:17 crc kubenswrapper[4558]: I0120 16:56:17.964583 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage13-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage13-crc\") pod \"openstack-galera-0\" (UID: \"0e28ce69-8b5d-4e5e-b8bb-860e6e3745ae\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 16:56:17 crc kubenswrapper[4558]: I0120 16:56:17.964598 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/0e28ce69-8b5d-4e5e-b8bb-860e6e3745ae-config-data-generated\") pod \"openstack-galera-0\" (UID: \"0e28ce69-8b5d-4e5e-b8bb-860e6e3745ae\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 16:56:17 crc kubenswrapper[4558]: I0120 16:56:17.965007 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/0e28ce69-8b5d-4e5e-b8bb-860e6e3745ae-config-data-generated\") pod \"openstack-galera-0\" (UID: \"0e28ce69-8b5d-4e5e-b8bb-860e6e3745ae\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 16:56:17 crc kubenswrapper[4558]: I0120 16:56:17.966394 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/0e28ce69-8b5d-4e5e-b8bb-860e6e3745ae-config-data-default\") pod \"openstack-galera-0\" (UID: \"0e28ce69-8b5d-4e5e-b8bb-860e6e3745ae\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 16:56:17 crc kubenswrapper[4558]: I0120 16:56:17.966720 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0e28ce69-8b5d-4e5e-b8bb-860e6e3745ae-operator-scripts\") pod \"openstack-galera-0\" (UID: \"0e28ce69-8b5d-4e5e-b8bb-860e6e3745ae\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 16:56:17 crc kubenswrapper[4558]: I0120 16:56:17.966776 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage13-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage13-crc\") pod \"openstack-galera-0\" (UID: \"0e28ce69-8b5d-4e5e-b8bb-860e6e3745ae\") device mount path \"/mnt/openstack/pv13\"" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 16:56:17 crc kubenswrapper[4558]: I0120 16:56:17.967138 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/0e28ce69-8b5d-4e5e-b8bb-860e6e3745ae-kolla-config\") pod \"openstack-galera-0\" (UID: \"0e28ce69-8b5d-4e5e-b8bb-860e6e3745ae\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 16:56:17 crc kubenswrapper[4558]: I0120 16:56:17.971457 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0e28ce69-8b5d-4e5e-b8bb-860e6e3745ae-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"0e28ce69-8b5d-4e5e-b8bb-860e6e3745ae\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 16:56:17 crc kubenswrapper[4558]: I0120 16:56:17.976639 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/0e28ce69-8b5d-4e5e-b8bb-860e6e3745ae-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"0e28ce69-8b5d-4e5e-b8bb-860e6e3745ae\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 16:56:17 crc kubenswrapper[4558]: I0120 16:56:17.980304 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mxkf7\" (UniqueName: \"kubernetes.io/projected/0e28ce69-8b5d-4e5e-b8bb-860e6e3745ae-kube-api-access-mxkf7\") pod \"openstack-galera-0\" (UID: \"0e28ce69-8b5d-4e5e-b8bb-860e6e3745ae\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 16:56:17 crc kubenswrapper[4558]: I0120 16:56:17.993338 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage13-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage13-crc\") pod \"openstack-galera-0\" (UID: \"0e28ce69-8b5d-4e5e-b8bb-860e6e3745ae\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 16:56:18 crc kubenswrapper[4558]: I0120 16:56:18.075447 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 16:56:19 crc kubenswrapper[4558]: I0120 16:56:19.218402 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/openstack-cell1-galera-0"] Jan 20 16:56:19 crc kubenswrapper[4558]: I0120 16:56:19.222946 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 16:56:19 crc kubenswrapper[4558]: I0120 16:56:19.228804 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"galera-openstack-cell1-dockercfg-npsbd" Jan 20 16:56:19 crc kubenswrapper[4558]: I0120 16:56:19.228951 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-galera-openstack-cell1-svc" Jan 20 16:56:19 crc kubenswrapper[4558]: I0120 16:56:19.229051 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/openstack-cell1-galera-0"] Jan 20 16:56:19 crc kubenswrapper[4558]: I0120 16:56:19.229432 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-cell1-scripts" Jan 20 16:56:19 crc kubenswrapper[4558]: I0120 16:56:19.229521 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-cell1-config-data" Jan 20 16:56:19 crc kubenswrapper[4558]: I0120 16:56:19.386960 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/6374f20d-dc8a-4f1c-9df3-086e6904b394-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"6374f20d-dc8a-4f1c-9df3-086e6904b394\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 16:56:19 crc kubenswrapper[4558]: I0120 16:56:19.387219 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6374f20d-dc8a-4f1c-9df3-086e6904b394-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"6374f20d-dc8a-4f1c-9df3-086e6904b394\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 16:56:19 crc kubenswrapper[4558]: I0120 16:56:19.387241 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/6374f20d-dc8a-4f1c-9df3-086e6904b394-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"6374f20d-dc8a-4f1c-9df3-086e6904b394\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 16:56:19 crc kubenswrapper[4558]: I0120 16:56:19.387272 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage17-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage17-crc\") pod \"openstack-cell1-galera-0\" (UID: \"6374f20d-dc8a-4f1c-9df3-086e6904b394\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 16:56:19 crc kubenswrapper[4558]: I0120 16:56:19.387289 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6374f20d-dc8a-4f1c-9df3-086e6904b394-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"6374f20d-dc8a-4f1c-9df3-086e6904b394\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 16:56:19 crc kubenswrapper[4558]: I0120 16:56:19.387313 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/6374f20d-dc8a-4f1c-9df3-086e6904b394-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"6374f20d-dc8a-4f1c-9df3-086e6904b394\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 16:56:19 crc kubenswrapper[4558]: I0120 16:56:19.387336 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/6374f20d-dc8a-4f1c-9df3-086e6904b394-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"6374f20d-dc8a-4f1c-9df3-086e6904b394\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 16:56:19 crc kubenswrapper[4558]: I0120 16:56:19.387375 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-97bg4\" (UniqueName: \"kubernetes.io/projected/6374f20d-dc8a-4f1c-9df3-086e6904b394-kube-api-access-97bg4\") pod \"openstack-cell1-galera-0\" (UID: \"6374f20d-dc8a-4f1c-9df3-086e6904b394\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 16:56:19 crc kubenswrapper[4558]: I0120 16:56:19.488736 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage17-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage17-crc\") pod \"openstack-cell1-galera-0\" (UID: \"6374f20d-dc8a-4f1c-9df3-086e6904b394\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 16:56:19 crc kubenswrapper[4558]: I0120 16:56:19.488787 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6374f20d-dc8a-4f1c-9df3-086e6904b394-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"6374f20d-dc8a-4f1c-9df3-086e6904b394\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 16:56:19 crc kubenswrapper[4558]: I0120 16:56:19.488823 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/6374f20d-dc8a-4f1c-9df3-086e6904b394-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"6374f20d-dc8a-4f1c-9df3-086e6904b394\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 16:56:19 crc kubenswrapper[4558]: I0120 16:56:19.488860 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/6374f20d-dc8a-4f1c-9df3-086e6904b394-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"6374f20d-dc8a-4f1c-9df3-086e6904b394\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 16:56:19 crc kubenswrapper[4558]: I0120 16:56:19.488926 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-97bg4\" (UniqueName: \"kubernetes.io/projected/6374f20d-dc8a-4f1c-9df3-086e6904b394-kube-api-access-97bg4\") pod \"openstack-cell1-galera-0\" (UID: \"6374f20d-dc8a-4f1c-9df3-086e6904b394\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 16:56:19 crc kubenswrapper[4558]: I0120 16:56:19.488986 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/6374f20d-dc8a-4f1c-9df3-086e6904b394-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"6374f20d-dc8a-4f1c-9df3-086e6904b394\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 16:56:19 crc kubenswrapper[4558]: I0120 16:56:19.489000 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6374f20d-dc8a-4f1c-9df3-086e6904b394-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"6374f20d-dc8a-4f1c-9df3-086e6904b394\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 16:56:19 crc kubenswrapper[4558]: I0120 16:56:19.489023 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/6374f20d-dc8a-4f1c-9df3-086e6904b394-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"6374f20d-dc8a-4f1c-9df3-086e6904b394\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 16:56:19 crc kubenswrapper[4558]: I0120 16:56:19.489053 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage17-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage17-crc\") pod \"openstack-cell1-galera-0\" (UID: \"6374f20d-dc8a-4f1c-9df3-086e6904b394\") device mount path \"/mnt/openstack/pv17\"" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 16:56:19 crc kubenswrapper[4558]: I0120 16:56:19.489327 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/6374f20d-dc8a-4f1c-9df3-086e6904b394-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"6374f20d-dc8a-4f1c-9df3-086e6904b394\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 16:56:19 crc kubenswrapper[4558]: I0120 16:56:19.489801 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/6374f20d-dc8a-4f1c-9df3-086e6904b394-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"6374f20d-dc8a-4f1c-9df3-086e6904b394\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 16:56:19 crc kubenswrapper[4558]: I0120 16:56:19.490053 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/6374f20d-dc8a-4f1c-9df3-086e6904b394-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"6374f20d-dc8a-4f1c-9df3-086e6904b394\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 16:56:19 crc kubenswrapper[4558]: I0120 16:56:19.490517 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6374f20d-dc8a-4f1c-9df3-086e6904b394-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"6374f20d-dc8a-4f1c-9df3-086e6904b394\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 16:56:19 crc kubenswrapper[4558]: I0120 16:56:19.492852 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/6374f20d-dc8a-4f1c-9df3-086e6904b394-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"6374f20d-dc8a-4f1c-9df3-086e6904b394\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 16:56:19 crc kubenswrapper[4558]: I0120 16:56:19.500894 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6374f20d-dc8a-4f1c-9df3-086e6904b394-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"6374f20d-dc8a-4f1c-9df3-086e6904b394\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 16:56:19 crc kubenswrapper[4558]: I0120 16:56:19.507491 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-97bg4\" (UniqueName: \"kubernetes.io/projected/6374f20d-dc8a-4f1c-9df3-086e6904b394-kube-api-access-97bg4\") pod \"openstack-cell1-galera-0\" (UID: \"6374f20d-dc8a-4f1c-9df3-086e6904b394\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 16:56:19 crc kubenswrapper[4558]: I0120 16:56:19.508612 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage17-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage17-crc\") pod \"openstack-cell1-galera-0\" (UID: \"6374f20d-dc8a-4f1c-9df3-086e6904b394\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 16:56:19 crc kubenswrapper[4558]: I0120 16:56:19.565369 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 16:56:19 crc kubenswrapper[4558]: I0120 16:56:19.567015 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/openstack-galera-0"] Jan 20 16:56:19 crc kubenswrapper[4558]: I0120 16:56:19.572160 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/rabbitmq-server-0"] Jan 20 16:56:19 crc kubenswrapper[4558]: I0120 16:56:19.577430 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/rabbitmq-cell1-server-0"] Jan 20 16:56:19 crc kubenswrapper[4558]: W0120 16:56:19.577923 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3f868eba_f4e1_4e32_b271_391cf271fe97.slice/crio-f311bc2280f5763ffdea30dc963d623368834e0e8968e555959763b8021c1baa WatchSource:0}: Error finding container f311bc2280f5763ffdea30dc963d623368834e0e8968e555959763b8021c1baa: Status 404 returned error can't find the container with id f311bc2280f5763ffdea30dc963d623368834e0e8968e555959763b8021c1baa Jan 20 16:56:19 crc kubenswrapper[4558]: W0120 16:56:19.579070 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0e28ce69_8b5d_4e5e_b8bb_860e6e3745ae.slice/crio-9386e80aa945354b7a1f22d3025aa79ac74ed014a7a4ee404aae6cc4b3238b4f WatchSource:0}: Error finding container 9386e80aa945354b7a1f22d3025aa79ac74ed014a7a4ee404aae6cc4b3238b4f: Status 404 returned error can't find the container with id 9386e80aa945354b7a1f22d3025aa79ac74ed014a7a4ee404aae6cc4b3238b4f Jan 20 16:56:19 crc kubenswrapper[4558]: W0120 16:56:19.583911 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbf355276_9e62_474e_bfb1_616dde5b83bc.slice/crio-724277ae417b63e0868ec82c9d4f15f8b41d086acf0d26215797be7ac3368f0c WatchSource:0}: Error finding container 724277ae417b63e0868ec82c9d4f15f8b41d086acf0d26215797be7ac3368f0c: Status 404 returned error can't find the container with id 724277ae417b63e0868ec82c9d4f15f8b41d086acf0d26215797be7ac3368f0c Jan 20 16:56:19 crc kubenswrapper[4558]: I0120 16:56:19.604052 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/memcached-0"] Jan 20 16:56:19 crc kubenswrapper[4558]: I0120 16:56:19.605174 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/memcached-0" Jan 20 16:56:19 crc kubenswrapper[4558]: I0120 16:56:19.607947 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-memcached-svc" Jan 20 16:56:19 crc kubenswrapper[4558]: I0120 16:56:19.609831 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"memcached-config-data" Jan 20 16:56:19 crc kubenswrapper[4558]: I0120 16:56:19.610035 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"memcached-memcached-dockercfg-4kbpc" Jan 20 16:56:19 crc kubenswrapper[4558]: I0120 16:56:19.618478 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/memcached-0"] Jan 20 16:56:19 crc kubenswrapper[4558]: I0120 16:56:19.691892 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/324ace6f-8bac-4269-a674-d9b6e990cd18-combined-ca-bundle\") pod \"memcached-0\" (UID: \"324ace6f-8bac-4269-a674-d9b6e990cd18\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 16:56:19 crc kubenswrapper[4558]: I0120 16:56:19.691953 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-st8tp\" (UniqueName: \"kubernetes.io/projected/324ace6f-8bac-4269-a674-d9b6e990cd18-kube-api-access-st8tp\") pod \"memcached-0\" (UID: \"324ace6f-8bac-4269-a674-d9b6e990cd18\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 16:56:19 crc kubenswrapper[4558]: I0120 16:56:19.692658 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/324ace6f-8bac-4269-a674-d9b6e990cd18-config-data\") pod \"memcached-0\" (UID: \"324ace6f-8bac-4269-a674-d9b6e990cd18\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 16:56:19 crc kubenswrapper[4558]: I0120 16:56:19.692690 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/324ace6f-8bac-4269-a674-d9b6e990cd18-kolla-config\") pod \"memcached-0\" (UID: \"324ace6f-8bac-4269-a674-d9b6e990cd18\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 16:56:19 crc kubenswrapper[4558]: I0120 16:56:19.692848 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/324ace6f-8bac-4269-a674-d9b6e990cd18-memcached-tls-certs\") pod \"memcached-0\" (UID: \"324ace6f-8bac-4269-a674-d9b6e990cd18\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 16:56:19 crc kubenswrapper[4558]: I0120 16:56:19.794543 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/324ace6f-8bac-4269-a674-d9b6e990cd18-combined-ca-bundle\") pod \"memcached-0\" (UID: \"324ace6f-8bac-4269-a674-d9b6e990cd18\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 16:56:19 crc kubenswrapper[4558]: I0120 16:56:19.794612 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-st8tp\" (UniqueName: \"kubernetes.io/projected/324ace6f-8bac-4269-a674-d9b6e990cd18-kube-api-access-st8tp\") pod \"memcached-0\" (UID: \"324ace6f-8bac-4269-a674-d9b6e990cd18\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 16:56:19 crc kubenswrapper[4558]: I0120 16:56:19.794673 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/324ace6f-8bac-4269-a674-d9b6e990cd18-config-data\") pod \"memcached-0\" (UID: \"324ace6f-8bac-4269-a674-d9b6e990cd18\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 16:56:19 crc kubenswrapper[4558]: I0120 16:56:19.794706 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/324ace6f-8bac-4269-a674-d9b6e990cd18-kolla-config\") pod \"memcached-0\" (UID: \"324ace6f-8bac-4269-a674-d9b6e990cd18\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 16:56:19 crc kubenswrapper[4558]: I0120 16:56:19.794802 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/324ace6f-8bac-4269-a674-d9b6e990cd18-memcached-tls-certs\") pod \"memcached-0\" (UID: \"324ace6f-8bac-4269-a674-d9b6e990cd18\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 16:56:19 crc kubenswrapper[4558]: I0120 16:56:19.796065 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/324ace6f-8bac-4269-a674-d9b6e990cd18-kolla-config\") pod \"memcached-0\" (UID: \"324ace6f-8bac-4269-a674-d9b6e990cd18\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 16:56:19 crc kubenswrapper[4558]: I0120 16:56:19.796103 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/324ace6f-8bac-4269-a674-d9b6e990cd18-config-data\") pod \"memcached-0\" (UID: \"324ace6f-8bac-4269-a674-d9b6e990cd18\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 16:56:19 crc kubenswrapper[4558]: I0120 16:56:19.798923 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/324ace6f-8bac-4269-a674-d9b6e990cd18-memcached-tls-certs\") pod \"memcached-0\" (UID: \"324ace6f-8bac-4269-a674-d9b6e990cd18\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 16:56:19 crc kubenswrapper[4558]: I0120 16:56:19.798936 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/324ace6f-8bac-4269-a674-d9b6e990cd18-combined-ca-bundle\") pod \"memcached-0\" (UID: \"324ace6f-8bac-4269-a674-d9b6e990cd18\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 16:56:19 crc kubenswrapper[4558]: I0120 16:56:19.809707 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-st8tp\" (UniqueName: \"kubernetes.io/projected/324ace6f-8bac-4269-a674-d9b6e990cd18-kube-api-access-st8tp\") pod \"memcached-0\" (UID: \"324ace6f-8bac-4269-a674-d9b6e990cd18\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 16:56:19 crc kubenswrapper[4558]: I0120 16:56:19.924725 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/memcached-0" Jan 20 16:56:19 crc kubenswrapper[4558]: I0120 16:56:19.971034 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/openstack-cell1-galera-0"] Jan 20 16:56:19 crc kubenswrapper[4558]: W0120 16:56:19.976820 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6374f20d_dc8a_4f1c_9df3_086e6904b394.slice/crio-d35a18247b7e91cdcddd888317e88cf08b8eb185e0a9afca25f7db86dfb4e278 WatchSource:0}: Error finding container d35a18247b7e91cdcddd888317e88cf08b8eb185e0a9afca25f7db86dfb4e278: Status 404 returned error can't find the container with id d35a18247b7e91cdcddd888317e88cf08b8eb185e0a9afca25f7db86dfb4e278 Jan 20 16:56:20 crc kubenswrapper[4558]: I0120 16:56:20.288567 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/memcached-0"] Jan 20 16:56:20 crc kubenswrapper[4558]: W0120 16:56:20.290775 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod324ace6f_8bac_4269_a674_d9b6e990cd18.slice/crio-2596754423a638526c01cacc69d8c2528039a5fb4569f763f1e4c4fc4242e1cc WatchSource:0}: Error finding container 2596754423a638526c01cacc69d8c2528039a5fb4569f763f1e4c4fc4242e1cc: Status 404 returned error can't find the container with id 2596754423a638526c01cacc69d8c2528039a5fb4569f763f1e4c4fc4242e1cc Jan 20 16:56:20 crc kubenswrapper[4558]: I0120 16:56:20.347382 4558 generic.go:334] "Generic (PLEG): container finished" podID="72531f1f-c34a-4508-a054-8719bbaacb7c" containerID="acc0a33c2aaa9b94930f16919358442e245b006e8f71a86cd096169e5583bbd4" exitCode=0 Jan 20 16:56:20 crc kubenswrapper[4558]: I0120 16:56:20.347438 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-mltw6" event={"ID":"72531f1f-c34a-4508-a054-8719bbaacb7c","Type":"ContainerDied","Data":"acc0a33c2aaa9b94930f16919358442e245b006e8f71a86cd096169e5583bbd4"} Jan 20 16:56:20 crc kubenswrapper[4558]: I0120 16:56:20.348966 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-galera-0" event={"ID":"0e28ce69-8b5d-4e5e-b8bb-860e6e3745ae","Type":"ContainerStarted","Data":"9386e80aa945354b7a1f22d3025aa79ac74ed014a7a4ee404aae6cc4b3238b4f"} Jan 20 16:56:20 crc kubenswrapper[4558]: I0120 16:56:20.350058 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-cell1-galera-0" event={"ID":"6374f20d-dc8a-4f1c-9df3-086e6904b394","Type":"ContainerStarted","Data":"d35a18247b7e91cdcddd888317e88cf08b8eb185e0a9afca25f7db86dfb4e278"} Jan 20 16:56:20 crc kubenswrapper[4558]: I0120 16:56:20.351368 4558 generic.go:334] "Generic (PLEG): container finished" podID="36908d16-3c7e-48f8-9056-113f1fd07dc8" containerID="b3cd74923ab6240f3861d9e1bcd85b2038839e8f660d39e58ca41ed3d85b6e24" exitCode=0 Jan 20 16:56:20 crc kubenswrapper[4558]: I0120 16:56:20.351418 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-f5849d7b9-w2tw2" event={"ID":"36908d16-3c7e-48f8-9056-113f1fd07dc8","Type":"ContainerDied","Data":"b3cd74923ab6240f3861d9e1bcd85b2038839e8f660d39e58ca41ed3d85b6e24"} Jan 20 16:56:20 crc kubenswrapper[4558]: I0120 16:56:20.352325 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/memcached-0" event={"ID":"324ace6f-8bac-4269-a674-d9b6e990cd18","Type":"ContainerStarted","Data":"2596754423a638526c01cacc69d8c2528039a5fb4569f763f1e4c4fc4242e1cc"} Jan 20 16:56:20 crc kubenswrapper[4558]: I0120 16:56:20.353088 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-server-0" event={"ID":"3f868eba-f4e1-4e32-b271-391cf271fe97","Type":"ContainerStarted","Data":"f311bc2280f5763ffdea30dc963d623368834e0e8968e555959763b8021c1baa"} Jan 20 16:56:20 crc kubenswrapper[4558]: I0120 16:56:20.354480 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" event={"ID":"bf355276-9e62-474e-bfb1-616dde5b83bc","Type":"ContainerStarted","Data":"724277ae417b63e0868ec82c9d4f15f8b41d086acf0d26215797be7ac3368f0c"} Jan 20 16:56:21 crc kubenswrapper[4558]: I0120 16:56:21.362483 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-mltw6" event={"ID":"72531f1f-c34a-4508-a054-8719bbaacb7c","Type":"ContainerStarted","Data":"ff91bdcda632f425d0a1a57174bb650bdcf98af307215e589377f26846ce7470"} Jan 20 16:56:21 crc kubenswrapper[4558]: I0120 16:56:21.362567 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-mltw6" Jan 20 16:56:21 crc kubenswrapper[4558]: I0120 16:56:21.364265 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-f5849d7b9-w2tw2" event={"ID":"36908d16-3c7e-48f8-9056-113f1fd07dc8","Type":"ContainerStarted","Data":"d456a86632118b638ae89546d8849001b1284b74d73678e38b39d7443a755cfa"} Jan 20 16:56:21 crc kubenswrapper[4558]: I0120 16:56:21.382370 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-mltw6" podStartSLOduration=3.57123019 podStartE2EDuration="15.382358445s" podCreationTimestamp="2026-01-20 16:56:06 +0000 UTC" firstStartedPulling="2026-01-20 16:56:07.359821947 +0000 UTC m=+861.120159914" lastFinishedPulling="2026-01-20 16:56:19.170950202 +0000 UTC m=+872.931288169" observedRunningTime="2026-01-20 16:56:21.378507333 +0000 UTC m=+875.138845301" watchObservedRunningTime="2026-01-20 16:56:21.382358445 +0000 UTC m=+875.142696411" Jan 20 16:56:21 crc kubenswrapper[4558]: I0120 16:56:21.401627 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-f5849d7b9-w2tw2" podStartSLOduration=3.523704205 podStartE2EDuration="15.401612289s" podCreationTimestamp="2026-01-20 16:56:06 +0000 UTC" firstStartedPulling="2026-01-20 16:56:07.317038138 +0000 UTC m=+861.077376105" lastFinishedPulling="2026-01-20 16:56:19.194946222 +0000 UTC m=+872.955284189" observedRunningTime="2026-01-20 16:56:21.393425855 +0000 UTC m=+875.153763822" watchObservedRunningTime="2026-01-20 16:56:21.401612289 +0000 UTC m=+875.161950255" Jan 20 16:56:21 crc kubenswrapper[4558]: I0120 16:56:21.546086 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 16:56:21 crc kubenswrapper[4558]: I0120 16:56:21.546887 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 16:56:21 crc kubenswrapper[4558]: I0120 16:56:21.549537 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"telemetry-ceilometer-dockercfg-4jpf4" Jan 20 16:56:21 crc kubenswrapper[4558]: I0120 16:56:21.587298 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 16:56:21 crc kubenswrapper[4558]: I0120 16:56:21.622872 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sb7ld\" (UniqueName: \"kubernetes.io/projected/7bec7fd7-edb6-4186-9fbb-09b10b49b280-kube-api-access-sb7ld\") pod \"kube-state-metrics-0\" (UID: \"7bec7fd7-edb6-4186-9fbb-09b10b49b280\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 16:56:21 crc kubenswrapper[4558]: I0120 16:56:21.731855 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sb7ld\" (UniqueName: \"kubernetes.io/projected/7bec7fd7-edb6-4186-9fbb-09b10b49b280-kube-api-access-sb7ld\") pod \"kube-state-metrics-0\" (UID: \"7bec7fd7-edb6-4186-9fbb-09b10b49b280\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 16:56:21 crc kubenswrapper[4558]: I0120 16:56:21.760604 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sb7ld\" (UniqueName: \"kubernetes.io/projected/7bec7fd7-edb6-4186-9fbb-09b10b49b280-kube-api-access-sb7ld\") pod \"kube-state-metrics-0\" (UID: \"7bec7fd7-edb6-4186-9fbb-09b10b49b280\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 16:56:21 crc kubenswrapper[4558]: I0120 16:56:21.867648 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 16:56:21 crc kubenswrapper[4558]: I0120 16:56:21.952334 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-f5849d7b9-w2tw2" Jan 20 16:56:22 crc kubenswrapper[4558]: I0120 16:56:22.273009 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 16:56:23 crc kubenswrapper[4558]: I0120 16:56:23.380937 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/kube-state-metrics-0" event={"ID":"7bec7fd7-edb6-4186-9fbb-09b10b49b280","Type":"ContainerStarted","Data":"9996a5b8da748425da2cc85e8d1b163f6b641d7fcd97c9ff5115ebc81c536103"} Jan 20 16:56:25 crc kubenswrapper[4558]: I0120 16:56:25.429654 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ovsdbserver-nb-0"] Jan 20 16:56:25 crc kubenswrapper[4558]: I0120 16:56:25.431190 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 16:56:25 crc kubenswrapper[4558]: I0120 16:56:25.433542 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ovncluster-ovndbcluster-nb-dockercfg-mht55" Jan 20 16:56:25 crc kubenswrapper[4558]: I0120 16:56:25.433697 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"ovndbcluster-nb-config" Jan 20 16:56:25 crc kubenswrapper[4558]: I0120 16:56:25.433813 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-ovndbcluster-nb-ovndbs" Jan 20 16:56:25 crc kubenswrapper[4558]: I0120 16:56:25.433947 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"ovndbcluster-nb-scripts" Jan 20 16:56:25 crc kubenswrapper[4558]: I0120 16:56:25.434000 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-ovn-metrics" Jan 20 16:56:25 crc kubenswrapper[4558]: I0120 16:56:25.475047 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-nb-0"] Jan 20 16:56:25 crc kubenswrapper[4558]: I0120 16:56:25.632502 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"ovsdbserver-nb-0\" (UID: \"8805bbf2-dd7b-41db-89ee-8b1d3053bf02\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 16:56:25 crc kubenswrapper[4558]: I0120 16:56:25.632590 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/8805bbf2-dd7b-41db-89ee-8b1d3053bf02-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"8805bbf2-dd7b-41db-89ee-8b1d3053bf02\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 16:56:25 crc kubenswrapper[4558]: I0120 16:56:25.632662 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/8805bbf2-dd7b-41db-89ee-8b1d3053bf02-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"8805bbf2-dd7b-41db-89ee-8b1d3053bf02\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 16:56:25 crc kubenswrapper[4558]: I0120 16:56:25.632692 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8805bbf2-dd7b-41db-89ee-8b1d3053bf02-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"8805bbf2-dd7b-41db-89ee-8b1d3053bf02\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 16:56:25 crc kubenswrapper[4558]: I0120 16:56:25.632732 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cccjj\" (UniqueName: \"kubernetes.io/projected/8805bbf2-dd7b-41db-89ee-8b1d3053bf02-kube-api-access-cccjj\") pod \"ovsdbserver-nb-0\" (UID: \"8805bbf2-dd7b-41db-89ee-8b1d3053bf02\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 16:56:25 crc kubenswrapper[4558]: I0120 16:56:25.632746 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8805bbf2-dd7b-41db-89ee-8b1d3053bf02-config\") pod \"ovsdbserver-nb-0\" (UID: \"8805bbf2-dd7b-41db-89ee-8b1d3053bf02\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 16:56:25 crc kubenswrapper[4558]: I0120 16:56:25.632762 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/8805bbf2-dd7b-41db-89ee-8b1d3053bf02-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"8805bbf2-dd7b-41db-89ee-8b1d3053bf02\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 16:56:25 crc kubenswrapper[4558]: I0120 16:56:25.632787 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8805bbf2-dd7b-41db-89ee-8b1d3053bf02-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"8805bbf2-dd7b-41db-89ee-8b1d3053bf02\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 16:56:25 crc kubenswrapper[4558]: I0120 16:56:25.734050 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/8805bbf2-dd7b-41db-89ee-8b1d3053bf02-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"8805bbf2-dd7b-41db-89ee-8b1d3053bf02\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 16:56:25 crc kubenswrapper[4558]: I0120 16:56:25.734206 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/8805bbf2-dd7b-41db-89ee-8b1d3053bf02-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"8805bbf2-dd7b-41db-89ee-8b1d3053bf02\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 16:56:25 crc kubenswrapper[4558]: I0120 16:56:25.734254 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8805bbf2-dd7b-41db-89ee-8b1d3053bf02-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"8805bbf2-dd7b-41db-89ee-8b1d3053bf02\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 16:56:25 crc kubenswrapper[4558]: I0120 16:56:25.734372 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cccjj\" (UniqueName: \"kubernetes.io/projected/8805bbf2-dd7b-41db-89ee-8b1d3053bf02-kube-api-access-cccjj\") pod \"ovsdbserver-nb-0\" (UID: \"8805bbf2-dd7b-41db-89ee-8b1d3053bf02\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 16:56:25 crc kubenswrapper[4558]: I0120 16:56:25.734392 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8805bbf2-dd7b-41db-89ee-8b1d3053bf02-config\") pod \"ovsdbserver-nb-0\" (UID: \"8805bbf2-dd7b-41db-89ee-8b1d3053bf02\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 16:56:25 crc kubenswrapper[4558]: I0120 16:56:25.734420 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/8805bbf2-dd7b-41db-89ee-8b1d3053bf02-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"8805bbf2-dd7b-41db-89ee-8b1d3053bf02\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 16:56:25 crc kubenswrapper[4558]: I0120 16:56:25.734468 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8805bbf2-dd7b-41db-89ee-8b1d3053bf02-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"8805bbf2-dd7b-41db-89ee-8b1d3053bf02\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 16:56:25 crc kubenswrapper[4558]: I0120 16:56:25.734551 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"ovsdbserver-nb-0\" (UID: \"8805bbf2-dd7b-41db-89ee-8b1d3053bf02\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 16:56:25 crc kubenswrapper[4558]: I0120 16:56:25.734781 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/8805bbf2-dd7b-41db-89ee-8b1d3053bf02-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"8805bbf2-dd7b-41db-89ee-8b1d3053bf02\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 16:56:25 crc kubenswrapper[4558]: I0120 16:56:25.735378 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"ovsdbserver-nb-0\" (UID: \"8805bbf2-dd7b-41db-89ee-8b1d3053bf02\") device mount path \"/mnt/openstack/pv08\"" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 16:56:25 crc kubenswrapper[4558]: I0120 16:56:25.735415 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8805bbf2-dd7b-41db-89ee-8b1d3053bf02-config\") pod \"ovsdbserver-nb-0\" (UID: \"8805bbf2-dd7b-41db-89ee-8b1d3053bf02\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 16:56:25 crc kubenswrapper[4558]: I0120 16:56:25.736365 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8805bbf2-dd7b-41db-89ee-8b1d3053bf02-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"8805bbf2-dd7b-41db-89ee-8b1d3053bf02\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 16:56:25 crc kubenswrapper[4558]: I0120 16:56:25.793192 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/8805bbf2-dd7b-41db-89ee-8b1d3053bf02-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"8805bbf2-dd7b-41db-89ee-8b1d3053bf02\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 16:56:25 crc kubenswrapper[4558]: I0120 16:56:25.793277 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8805bbf2-dd7b-41db-89ee-8b1d3053bf02-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"8805bbf2-dd7b-41db-89ee-8b1d3053bf02\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 16:56:25 crc kubenswrapper[4558]: I0120 16:56:25.793785 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cccjj\" (UniqueName: \"kubernetes.io/projected/8805bbf2-dd7b-41db-89ee-8b1d3053bf02-kube-api-access-cccjj\") pod \"ovsdbserver-nb-0\" (UID: \"8805bbf2-dd7b-41db-89ee-8b1d3053bf02\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 16:56:25 crc kubenswrapper[4558]: I0120 16:56:25.793889 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/8805bbf2-dd7b-41db-89ee-8b1d3053bf02-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"8805bbf2-dd7b-41db-89ee-8b1d3053bf02\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 16:56:25 crc kubenswrapper[4558]: I0120 16:56:25.812788 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"ovsdbserver-nb-0\" (UID: \"8805bbf2-dd7b-41db-89ee-8b1d3053bf02\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 16:56:26 crc kubenswrapper[4558]: I0120 16:56:26.044351 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 16:56:26 crc kubenswrapper[4558]: I0120 16:56:26.413324 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" event={"ID":"bf355276-9e62-474e-bfb1-616dde5b83bc","Type":"ContainerStarted","Data":"e602226e4f361c85082bf7248cd70619371bfecaf58e19de2a31846417adbebd"} Jan 20 16:56:26 crc kubenswrapper[4558]: I0120 16:56:26.416914 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-galera-0" event={"ID":"0e28ce69-8b5d-4e5e-b8bb-860e6e3745ae","Type":"ContainerStarted","Data":"dca4ae7dca5eb96c8fcc474ac58d3706e17fe078d95aa681217db1b6da9140a3"} Jan 20 16:56:26 crc kubenswrapper[4558]: I0120 16:56:26.420498 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-cell1-galera-0" event={"ID":"6374f20d-dc8a-4f1c-9df3-086e6904b394","Type":"ContainerStarted","Data":"de6e1b519940940afb11dd7e03f1334e5085e524686926bc4bad044033126f7f"} Jan 20 16:56:26 crc kubenswrapper[4558]: I0120 16:56:26.422129 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/memcached-0" event={"ID":"324ace6f-8bac-4269-a674-d9b6e990cd18","Type":"ContainerStarted","Data":"93541e9fd292f8f2cfa2ba3ec3bd317e9352f88de020c5959cf2c84ba5b458ba"} Jan 20 16:56:26 crc kubenswrapper[4558]: I0120 16:56:26.422364 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/memcached-0" Jan 20 16:56:26 crc kubenswrapper[4558]: I0120 16:56:26.450766 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/memcached-0" podStartSLOduration=2.595047767 podStartE2EDuration="7.450740258s" podCreationTimestamp="2026-01-20 16:56:19 +0000 UTC" firstStartedPulling="2026-01-20 16:56:20.292347227 +0000 UTC m=+874.052685193" lastFinishedPulling="2026-01-20 16:56:25.148039717 +0000 UTC m=+878.908377684" observedRunningTime="2026-01-20 16:56:26.449678312 +0000 UTC m=+880.210016279" watchObservedRunningTime="2026-01-20 16:56:26.450740258 +0000 UTC m=+880.211078226" Jan 20 16:56:26 crc kubenswrapper[4558]: I0120 16:56:26.953303 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-f5849d7b9-w2tw2" Jan 20 16:56:26 crc kubenswrapper[4558]: I0120 16:56:26.987274 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-mltw6" Jan 20 16:56:27 crc kubenswrapper[4558]: I0120 16:56:27.019546 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-f5849d7b9-w2tw2"] Jan 20 16:56:27 crc kubenswrapper[4558]: I0120 16:56:27.239501 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-nb-0"] Jan 20 16:56:27 crc kubenswrapper[4558]: I0120 16:56:27.241496 4558 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 20 16:56:27 crc kubenswrapper[4558]: I0120 16:56:27.265741 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ovsdbserver-sb-0"] Jan 20 16:56:27 crc kubenswrapper[4558]: I0120 16:56:27.266759 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 16:56:27 crc kubenswrapper[4558]: I0120 16:56:27.268080 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"ovndbcluster-sb-config" Jan 20 16:56:27 crc kubenswrapper[4558]: I0120 16:56:27.268337 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-ovndbcluster-sb-ovndbs" Jan 20 16:56:27 crc kubenswrapper[4558]: I0120 16:56:27.268543 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"ovndbcluster-sb-scripts" Jan 20 16:56:27 crc kubenswrapper[4558]: I0120 16:56:27.269007 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ovncluster-ovndbcluster-sb-dockercfg-fbc5d" Jan 20 16:56:27 crc kubenswrapper[4558]: I0120 16:56:27.275513 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-sb-0"] Jan 20 16:56:27 crc kubenswrapper[4558]: I0120 16:56:27.330090 4558 patch_prober.go:28] interesting pod/machine-config-daemon-2vr4r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 20 16:56:27 crc kubenswrapper[4558]: I0120 16:56:27.330140 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 20 16:56:27 crc kubenswrapper[4558]: I0120 16:56:27.364035 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/1c0d7766-0fb5-47ec-87f4-6644f5b1afd9-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"1c0d7766-0fb5-47ec-87f4-6644f5b1afd9\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 16:56:27 crc kubenswrapper[4558]: I0120 16:56:27.364118 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/1c0d7766-0fb5-47ec-87f4-6644f5b1afd9-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"1c0d7766-0fb5-47ec-87f4-6644f5b1afd9\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 16:56:27 crc kubenswrapper[4558]: I0120 16:56:27.364257 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/1c0d7766-0fb5-47ec-87f4-6644f5b1afd9-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"1c0d7766-0fb5-47ec-87f4-6644f5b1afd9\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 16:56:27 crc kubenswrapper[4558]: I0120 16:56:27.364347 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1c0d7766-0fb5-47ec-87f4-6644f5b1afd9-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"1c0d7766-0fb5-47ec-87f4-6644f5b1afd9\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 16:56:27 crc kubenswrapper[4558]: I0120 16:56:27.364467 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage19-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage19-crc\") pod \"ovsdbserver-sb-0\" (UID: \"1c0d7766-0fb5-47ec-87f4-6644f5b1afd9\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 16:56:27 crc kubenswrapper[4558]: I0120 16:56:27.364506 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6sgcz\" (UniqueName: \"kubernetes.io/projected/1c0d7766-0fb5-47ec-87f4-6644f5b1afd9-kube-api-access-6sgcz\") pod \"ovsdbserver-sb-0\" (UID: \"1c0d7766-0fb5-47ec-87f4-6644f5b1afd9\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 16:56:27 crc kubenswrapper[4558]: I0120 16:56:27.364545 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1c0d7766-0fb5-47ec-87f4-6644f5b1afd9-config\") pod \"ovsdbserver-sb-0\" (UID: \"1c0d7766-0fb5-47ec-87f4-6644f5b1afd9\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 16:56:27 crc kubenswrapper[4558]: I0120 16:56:27.364624 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c0d7766-0fb5-47ec-87f4-6644f5b1afd9-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"1c0d7766-0fb5-47ec-87f4-6644f5b1afd9\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 16:56:27 crc kubenswrapper[4558]: I0120 16:56:27.428277 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-nb-0" event={"ID":"8805bbf2-dd7b-41db-89ee-8b1d3053bf02","Type":"ContainerStarted","Data":"6af10391dc0133fa2595b1be05648390a7170e51d6d9a3dbae2c77b9b689b77c"} Jan 20 16:56:27 crc kubenswrapper[4558]: I0120 16:56:27.429637 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-server-0" event={"ID":"3f868eba-f4e1-4e32-b271-391cf271fe97","Type":"ContainerStarted","Data":"1cbb8a79828c1017ed54f50de2db6768bd2eb566cb59fce2be3b621f69a500cd"} Jan 20 16:56:27 crc kubenswrapper[4558]: I0120 16:56:27.430958 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/kube-state-metrics-0" event={"ID":"7bec7fd7-edb6-4186-9fbb-09b10b49b280","Type":"ContainerStarted","Data":"14355c2e786124ac69d8fdaed5080cbd251d7fb3c6511f9f485bd19331884edf"} Jan 20 16:56:27 crc kubenswrapper[4558]: I0120 16:56:27.431421 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-f5849d7b9-w2tw2" podUID="36908d16-3c7e-48f8-9056-113f1fd07dc8" containerName="dnsmasq-dns" containerID="cri-o://d456a86632118b638ae89546d8849001b1284b74d73678e38b39d7443a755cfa" gracePeriod=10 Jan 20 16:56:27 crc kubenswrapper[4558]: I0120 16:56:27.462361 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/kube-state-metrics-0" podStartSLOduration=2.258301802 podStartE2EDuration="6.462348661s" podCreationTimestamp="2026-01-20 16:56:21 +0000 UTC" firstStartedPulling="2026-01-20 16:56:22.679464487 +0000 UTC m=+876.439802454" lastFinishedPulling="2026-01-20 16:56:26.883511346 +0000 UTC m=+880.643849313" observedRunningTime="2026-01-20 16:56:27.459838842 +0000 UTC m=+881.220176809" watchObservedRunningTime="2026-01-20 16:56:27.462348661 +0000 UTC m=+881.222686628" Jan 20 16:56:27 crc kubenswrapper[4558]: I0120 16:56:27.467686 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/1c0d7766-0fb5-47ec-87f4-6644f5b1afd9-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"1c0d7766-0fb5-47ec-87f4-6644f5b1afd9\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 16:56:27 crc kubenswrapper[4558]: I0120 16:56:27.467748 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1c0d7766-0fb5-47ec-87f4-6644f5b1afd9-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"1c0d7766-0fb5-47ec-87f4-6644f5b1afd9\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 16:56:27 crc kubenswrapper[4558]: I0120 16:56:27.467820 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage19-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage19-crc\") pod \"ovsdbserver-sb-0\" (UID: \"1c0d7766-0fb5-47ec-87f4-6644f5b1afd9\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 16:56:27 crc kubenswrapper[4558]: I0120 16:56:27.467850 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6sgcz\" (UniqueName: \"kubernetes.io/projected/1c0d7766-0fb5-47ec-87f4-6644f5b1afd9-kube-api-access-6sgcz\") pod \"ovsdbserver-sb-0\" (UID: \"1c0d7766-0fb5-47ec-87f4-6644f5b1afd9\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 16:56:27 crc kubenswrapper[4558]: I0120 16:56:27.467876 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1c0d7766-0fb5-47ec-87f4-6644f5b1afd9-config\") pod \"ovsdbserver-sb-0\" (UID: \"1c0d7766-0fb5-47ec-87f4-6644f5b1afd9\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 16:56:27 crc kubenswrapper[4558]: I0120 16:56:27.467915 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c0d7766-0fb5-47ec-87f4-6644f5b1afd9-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"1c0d7766-0fb5-47ec-87f4-6644f5b1afd9\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 16:56:27 crc kubenswrapper[4558]: I0120 16:56:27.467945 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/1c0d7766-0fb5-47ec-87f4-6644f5b1afd9-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"1c0d7766-0fb5-47ec-87f4-6644f5b1afd9\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 16:56:27 crc kubenswrapper[4558]: I0120 16:56:27.467978 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/1c0d7766-0fb5-47ec-87f4-6644f5b1afd9-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"1c0d7766-0fb5-47ec-87f4-6644f5b1afd9\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 16:56:27 crc kubenswrapper[4558]: I0120 16:56:27.469014 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/1c0d7766-0fb5-47ec-87f4-6644f5b1afd9-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"1c0d7766-0fb5-47ec-87f4-6644f5b1afd9\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 16:56:27 crc kubenswrapper[4558]: I0120 16:56:27.469366 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1c0d7766-0fb5-47ec-87f4-6644f5b1afd9-config\") pod \"ovsdbserver-sb-0\" (UID: \"1c0d7766-0fb5-47ec-87f4-6644f5b1afd9\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 16:56:27 crc kubenswrapper[4558]: I0120 16:56:27.469424 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage19-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage19-crc\") pod \"ovsdbserver-sb-0\" (UID: \"1c0d7766-0fb5-47ec-87f4-6644f5b1afd9\") device mount path \"/mnt/openstack/pv19\"" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 16:56:27 crc kubenswrapper[4558]: I0120 16:56:27.469695 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1c0d7766-0fb5-47ec-87f4-6644f5b1afd9-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"1c0d7766-0fb5-47ec-87f4-6644f5b1afd9\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 16:56:27 crc kubenswrapper[4558]: I0120 16:56:27.473985 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/1c0d7766-0fb5-47ec-87f4-6644f5b1afd9-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"1c0d7766-0fb5-47ec-87f4-6644f5b1afd9\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 16:56:27 crc kubenswrapper[4558]: I0120 16:56:27.474044 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c0d7766-0fb5-47ec-87f4-6644f5b1afd9-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"1c0d7766-0fb5-47ec-87f4-6644f5b1afd9\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 16:56:27 crc kubenswrapper[4558]: I0120 16:56:27.479936 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/1c0d7766-0fb5-47ec-87f4-6644f5b1afd9-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"1c0d7766-0fb5-47ec-87f4-6644f5b1afd9\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 16:56:27 crc kubenswrapper[4558]: I0120 16:56:27.485304 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6sgcz\" (UniqueName: \"kubernetes.io/projected/1c0d7766-0fb5-47ec-87f4-6644f5b1afd9-kube-api-access-6sgcz\") pod \"ovsdbserver-sb-0\" (UID: \"1c0d7766-0fb5-47ec-87f4-6644f5b1afd9\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 16:56:27 crc kubenswrapper[4558]: I0120 16:56:27.487742 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage19-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage19-crc\") pod \"ovsdbserver-sb-0\" (UID: \"1c0d7766-0fb5-47ec-87f4-6644f5b1afd9\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 16:56:27 crc kubenswrapper[4558]: I0120 16:56:27.583043 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 16:56:27 crc kubenswrapper[4558]: I0120 16:56:27.751993 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-f5849d7b9-w2tw2" Jan 20 16:56:27 crc kubenswrapper[4558]: I0120 16:56:27.872471 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-659jc\" (UniqueName: \"kubernetes.io/projected/36908d16-3c7e-48f8-9056-113f1fd07dc8-kube-api-access-659jc\") pod \"36908d16-3c7e-48f8-9056-113f1fd07dc8\" (UID: \"36908d16-3c7e-48f8-9056-113f1fd07dc8\") " Jan 20 16:56:27 crc kubenswrapper[4558]: I0120 16:56:27.872538 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/36908d16-3c7e-48f8-9056-113f1fd07dc8-config\") pod \"36908d16-3c7e-48f8-9056-113f1fd07dc8\" (UID: \"36908d16-3c7e-48f8-9056-113f1fd07dc8\") " Jan 20 16:56:27 crc kubenswrapper[4558]: I0120 16:56:27.877253 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/36908d16-3c7e-48f8-9056-113f1fd07dc8-kube-api-access-659jc" (OuterVolumeSpecName: "kube-api-access-659jc") pod "36908d16-3c7e-48f8-9056-113f1fd07dc8" (UID: "36908d16-3c7e-48f8-9056-113f1fd07dc8"). InnerVolumeSpecName "kube-api-access-659jc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:56:27 crc kubenswrapper[4558]: I0120 16:56:27.898996 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/36908d16-3c7e-48f8-9056-113f1fd07dc8-config" (OuterVolumeSpecName: "config") pod "36908d16-3c7e-48f8-9056-113f1fd07dc8" (UID: "36908d16-3c7e-48f8-9056-113f1fd07dc8"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 16:56:27 crc kubenswrapper[4558]: I0120 16:56:27.974526 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-659jc\" (UniqueName: \"kubernetes.io/projected/36908d16-3c7e-48f8-9056-113f1fd07dc8-kube-api-access-659jc\") on node \"crc\" DevicePath \"\"" Jan 20 16:56:27 crc kubenswrapper[4558]: I0120 16:56:27.974744 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/36908d16-3c7e-48f8-9056-113f1fd07dc8-config\") on node \"crc\" DevicePath \"\"" Jan 20 16:56:27 crc kubenswrapper[4558]: I0120 16:56:27.976662 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-sb-0"] Jan 20 16:56:27 crc kubenswrapper[4558]: W0120 16:56:27.980661 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1c0d7766_0fb5_47ec_87f4_6644f5b1afd9.slice/crio-580e103bbc5f07f4f33408a521c976313d301c9db8538c7a4d7ef229996839a0 WatchSource:0}: Error finding container 580e103bbc5f07f4f33408a521c976313d301c9db8538c7a4d7ef229996839a0: Status 404 returned error can't find the container with id 580e103bbc5f07f4f33408a521c976313d301c9db8538c7a4d7ef229996839a0 Jan 20 16:56:28 crc kubenswrapper[4558]: I0120 16:56:28.436867 4558 generic.go:334] "Generic (PLEG): container finished" podID="36908d16-3c7e-48f8-9056-113f1fd07dc8" containerID="d456a86632118b638ae89546d8849001b1284b74d73678e38b39d7443a755cfa" exitCode=0 Jan 20 16:56:28 crc kubenswrapper[4558]: I0120 16:56:28.436927 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-f5849d7b9-w2tw2" Jan 20 16:56:28 crc kubenswrapper[4558]: I0120 16:56:28.436953 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-f5849d7b9-w2tw2" event={"ID":"36908d16-3c7e-48f8-9056-113f1fd07dc8","Type":"ContainerDied","Data":"d456a86632118b638ae89546d8849001b1284b74d73678e38b39d7443a755cfa"} Jan 20 16:56:28 crc kubenswrapper[4558]: I0120 16:56:28.437072 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-f5849d7b9-w2tw2" event={"ID":"36908d16-3c7e-48f8-9056-113f1fd07dc8","Type":"ContainerDied","Data":"22bd349ea68b09aa4e19eb00caa09de58664bf1cf95c433a4340327ebf6e5cff"} Jan 20 16:56:28 crc kubenswrapper[4558]: I0120 16:56:28.437092 4558 scope.go:117] "RemoveContainer" containerID="d456a86632118b638ae89546d8849001b1284b74d73678e38b39d7443a755cfa" Jan 20 16:56:28 crc kubenswrapper[4558]: I0120 16:56:28.438931 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-sb-0" event={"ID":"1c0d7766-0fb5-47ec-87f4-6644f5b1afd9","Type":"ContainerStarted","Data":"580e103bbc5f07f4f33408a521c976313d301c9db8538c7a4d7ef229996839a0"} Jan 20 16:56:28 crc kubenswrapper[4558]: I0120 16:56:28.439323 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 16:56:28 crc kubenswrapper[4558]: I0120 16:56:28.459490 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-f5849d7b9-w2tw2"] Jan 20 16:56:28 crc kubenswrapper[4558]: I0120 16:56:28.463433 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-f5849d7b9-w2tw2"] Jan 20 16:56:28 crc kubenswrapper[4558]: I0120 16:56:28.466941 4558 scope.go:117] "RemoveContainer" containerID="b3cd74923ab6240f3861d9e1bcd85b2038839e8f660d39e58ca41ed3d85b6e24" Jan 20 16:56:28 crc kubenswrapper[4558]: I0120 16:56:28.492364 4558 scope.go:117] "RemoveContainer" containerID="d456a86632118b638ae89546d8849001b1284b74d73678e38b39d7443a755cfa" Jan 20 16:56:28 crc kubenswrapper[4558]: E0120 16:56:28.492842 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d456a86632118b638ae89546d8849001b1284b74d73678e38b39d7443a755cfa\": container with ID starting with d456a86632118b638ae89546d8849001b1284b74d73678e38b39d7443a755cfa not found: ID does not exist" containerID="d456a86632118b638ae89546d8849001b1284b74d73678e38b39d7443a755cfa" Jan 20 16:56:28 crc kubenswrapper[4558]: I0120 16:56:28.492866 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d456a86632118b638ae89546d8849001b1284b74d73678e38b39d7443a755cfa"} err="failed to get container status \"d456a86632118b638ae89546d8849001b1284b74d73678e38b39d7443a755cfa\": rpc error: code = NotFound desc = could not find container \"d456a86632118b638ae89546d8849001b1284b74d73678e38b39d7443a755cfa\": container with ID starting with d456a86632118b638ae89546d8849001b1284b74d73678e38b39d7443a755cfa not found: ID does not exist" Jan 20 16:56:28 crc kubenswrapper[4558]: I0120 16:56:28.492884 4558 scope.go:117] "RemoveContainer" containerID="b3cd74923ab6240f3861d9e1bcd85b2038839e8f660d39e58ca41ed3d85b6e24" Jan 20 16:56:28 crc kubenswrapper[4558]: E0120 16:56:28.493191 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b3cd74923ab6240f3861d9e1bcd85b2038839e8f660d39e58ca41ed3d85b6e24\": container with ID starting with b3cd74923ab6240f3861d9e1bcd85b2038839e8f660d39e58ca41ed3d85b6e24 not found: ID does not exist" containerID="b3cd74923ab6240f3861d9e1bcd85b2038839e8f660d39e58ca41ed3d85b6e24" Jan 20 16:56:28 crc kubenswrapper[4558]: I0120 16:56:28.493232 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b3cd74923ab6240f3861d9e1bcd85b2038839e8f660d39e58ca41ed3d85b6e24"} err="failed to get container status \"b3cd74923ab6240f3861d9e1bcd85b2038839e8f660d39e58ca41ed3d85b6e24\": rpc error: code = NotFound desc = could not find container \"b3cd74923ab6240f3861d9e1bcd85b2038839e8f660d39e58ca41ed3d85b6e24\": container with ID starting with b3cd74923ab6240f3861d9e1bcd85b2038839e8f660d39e58ca41ed3d85b6e24 not found: ID does not exist" Jan 20 16:56:28 crc kubenswrapper[4558]: I0120 16:56:28.571854 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="36908d16-3c7e-48f8-9056-113f1fd07dc8" path="/var/lib/kubelet/pods/36908d16-3c7e-48f8-9056-113f1fd07dc8/volumes" Jan 20 16:56:29 crc kubenswrapper[4558]: I0120 16:56:29.447796 4558 generic.go:334] "Generic (PLEG): container finished" podID="0e28ce69-8b5d-4e5e-b8bb-860e6e3745ae" containerID="dca4ae7dca5eb96c8fcc474ac58d3706e17fe078d95aa681217db1b6da9140a3" exitCode=0 Jan 20 16:56:29 crc kubenswrapper[4558]: I0120 16:56:29.447886 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-galera-0" event={"ID":"0e28ce69-8b5d-4e5e-b8bb-860e6e3745ae","Type":"ContainerDied","Data":"dca4ae7dca5eb96c8fcc474ac58d3706e17fe078d95aa681217db1b6da9140a3"} Jan 20 16:56:29 crc kubenswrapper[4558]: I0120 16:56:29.451229 4558 generic.go:334] "Generic (PLEG): container finished" podID="6374f20d-dc8a-4f1c-9df3-086e6904b394" containerID="de6e1b519940940afb11dd7e03f1334e5085e524686926bc4bad044033126f7f" exitCode=0 Jan 20 16:56:29 crc kubenswrapper[4558]: I0120 16:56:29.451364 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-cell1-galera-0" event={"ID":"6374f20d-dc8a-4f1c-9df3-086e6904b394","Type":"ContainerDied","Data":"de6e1b519940940afb11dd7e03f1334e5085e524686926bc4bad044033126f7f"} Jan 20 16:56:30 crc kubenswrapper[4558]: I0120 16:56:30.462262 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-cell1-galera-0" event={"ID":"6374f20d-dc8a-4f1c-9df3-086e6904b394","Type":"ContainerStarted","Data":"14cc352d3f4cfc88e8fb040c94fae40dfc936c241aaccfd3a719350b13e1a6d1"} Jan 20 16:56:30 crc kubenswrapper[4558]: I0120 16:56:30.465406 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-galera-0" event={"ID":"0e28ce69-8b5d-4e5e-b8bb-860e6e3745ae","Type":"ContainerStarted","Data":"b8b4710c6079f7f8ec88f1289e78374ee84e488da8b6978ef790e87233e7f5fb"} Jan 20 16:56:30 crc kubenswrapper[4558]: I0120 16:56:30.481280 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/openstack-cell1-galera-0" podStartSLOduration=7.250748538 podStartE2EDuration="12.481264664s" podCreationTimestamp="2026-01-20 16:56:18 +0000 UTC" firstStartedPulling="2026-01-20 16:56:19.979497876 +0000 UTC m=+873.739835843" lastFinishedPulling="2026-01-20 16:56:25.210014002 +0000 UTC m=+878.970351969" observedRunningTime="2026-01-20 16:56:30.479566971 +0000 UTC m=+884.239904938" watchObservedRunningTime="2026-01-20 16:56:30.481264664 +0000 UTC m=+884.241602630" Jan 20 16:56:30 crc kubenswrapper[4558]: I0120 16:56:30.495915 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/openstack-galera-0" podStartSLOduration=8.856132636 podStartE2EDuration="14.495898621s" podCreationTimestamp="2026-01-20 16:56:16 +0000 UTC" firstStartedPulling="2026-01-20 16:56:19.581600123 +0000 UTC m=+873.341938090" lastFinishedPulling="2026-01-20 16:56:25.221366108 +0000 UTC m=+878.981704075" observedRunningTime="2026-01-20 16:56:30.495196601 +0000 UTC m=+884.255534568" watchObservedRunningTime="2026-01-20 16:56:30.495898621 +0000 UTC m=+884.256236587" Jan 20 16:56:31 crc kubenswrapper[4558]: I0120 16:56:31.472320 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-sb-0" event={"ID":"1c0d7766-0fb5-47ec-87f4-6644f5b1afd9","Type":"ContainerStarted","Data":"20a844916c974def3060dd83baa14a82ac52a34e846bc5a85b9c976771061eb1"} Jan 20 16:56:31 crc kubenswrapper[4558]: I0120 16:56:31.473742 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-nb-0" event={"ID":"8805bbf2-dd7b-41db-89ee-8b1d3053bf02","Type":"ContainerStarted","Data":"3827b2816f3cb25c8c183f31035843c25fc855df97604f4ab511f8bc21e7c1ef"} Jan 20 16:56:34 crc kubenswrapper[4558]: I0120 16:56:34.493607 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-sb-0" event={"ID":"1c0d7766-0fb5-47ec-87f4-6644f5b1afd9","Type":"ContainerStarted","Data":"6fa53430a72d92b62c65c8602e3f7ecea38be4fd2c7715f04903b7006b2c6625"} Jan 20 16:56:34 crc kubenswrapper[4558]: I0120 16:56:34.495076 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-nb-0" event={"ID":"8805bbf2-dd7b-41db-89ee-8b1d3053bf02","Type":"ContainerStarted","Data":"110cfb1c28534cceb930fa4d3f07dc6b34a497e54cb137356436aeb344c172e2"} Jan 20 16:56:34 crc kubenswrapper[4558]: I0120 16:56:34.513629 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ovsdbserver-sb-0" podStartSLOduration=2.223150897 podStartE2EDuration="8.513615973s" podCreationTimestamp="2026-01-20 16:56:26 +0000 UTC" firstStartedPulling="2026-01-20 16:56:27.982455862 +0000 UTC m=+881.742793829" lastFinishedPulling="2026-01-20 16:56:34.272920938 +0000 UTC m=+888.033258905" observedRunningTime="2026-01-20 16:56:34.50433476 +0000 UTC m=+888.264672728" watchObservedRunningTime="2026-01-20 16:56:34.513615973 +0000 UTC m=+888.273953940" Jan 20 16:56:34 crc kubenswrapper[4558]: I0120 16:56:34.532598 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ovsdbserver-nb-0" podStartSLOduration=3.503845248 podStartE2EDuration="10.532583749s" podCreationTimestamp="2026-01-20 16:56:24 +0000 UTC" firstStartedPulling="2026-01-20 16:56:27.241296302 +0000 UTC m=+881.001634268" lastFinishedPulling="2026-01-20 16:56:34.270034801 +0000 UTC m=+888.030372769" observedRunningTime="2026-01-20 16:56:34.523767161 +0000 UTC m=+888.284105128" watchObservedRunningTime="2026-01-20 16:56:34.532583749 +0000 UTC m=+888.292921716" Jan 20 16:56:34 crc kubenswrapper[4558]: I0120 16:56:34.926402 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/memcached-0" Jan 20 16:56:35 crc kubenswrapper[4558]: I0120 16:56:35.045632 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 16:56:35 crc kubenswrapper[4558]: I0120 16:56:35.073265 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 16:56:35 crc kubenswrapper[4558]: I0120 16:56:35.500533 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 16:56:36 crc kubenswrapper[4558]: I0120 16:56:36.071878 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 16:56:36 crc kubenswrapper[4558]: I0120 16:56:36.583915 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 16:56:36 crc kubenswrapper[4558]: I0120 16:56:36.608278 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 16:56:37 crc kubenswrapper[4558]: I0120 16:56:37.511535 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 16:56:37 crc kubenswrapper[4558]: I0120 16:56:37.536457 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 16:56:37 crc kubenswrapper[4558]: I0120 16:56:37.697073 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ovn-northd-0"] Jan 20 16:56:37 crc kubenswrapper[4558]: E0120 16:56:37.697366 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="36908d16-3c7e-48f8-9056-113f1fd07dc8" containerName="init" Jan 20 16:56:37 crc kubenswrapper[4558]: I0120 16:56:37.697380 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="36908d16-3c7e-48f8-9056-113f1fd07dc8" containerName="init" Jan 20 16:56:37 crc kubenswrapper[4558]: E0120 16:56:37.697402 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="36908d16-3c7e-48f8-9056-113f1fd07dc8" containerName="dnsmasq-dns" Jan 20 16:56:37 crc kubenswrapper[4558]: I0120 16:56:37.697407 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="36908d16-3c7e-48f8-9056-113f1fd07dc8" containerName="dnsmasq-dns" Jan 20 16:56:37 crc kubenswrapper[4558]: I0120 16:56:37.697557 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="36908d16-3c7e-48f8-9056-113f1fd07dc8" containerName="dnsmasq-dns" Jan 20 16:56:37 crc kubenswrapper[4558]: I0120 16:56:37.698221 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 16:56:37 crc kubenswrapper[4558]: I0120 16:56:37.702456 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-ovnnorthd-ovndbs" Jan 20 16:56:37 crc kubenswrapper[4558]: I0120 16:56:37.702464 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ovnnorthd-ovnnorthd-dockercfg-czv6c" Jan 20 16:56:37 crc kubenswrapper[4558]: I0120 16:56:37.702597 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"ovnnorthd-config" Jan 20 16:56:37 crc kubenswrapper[4558]: I0120 16:56:37.702658 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"ovnnorthd-scripts" Jan 20 16:56:37 crc kubenswrapper[4558]: I0120 16:56:37.704970 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovn-northd-0"] Jan 20 16:56:37 crc kubenswrapper[4558]: I0120 16:56:37.729734 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0c535ada-ed58-4c94-82c9-d6b1a35f78be-scripts\") pod \"ovn-northd-0\" (UID: \"0c535ada-ed58-4c94-82c9-d6b1a35f78be\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 16:56:37 crc kubenswrapper[4558]: I0120 16:56:37.729807 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/0c535ada-ed58-4c94-82c9-d6b1a35f78be-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"0c535ada-ed58-4c94-82c9-d6b1a35f78be\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 16:56:37 crc kubenswrapper[4558]: I0120 16:56:37.729864 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0c535ada-ed58-4c94-82c9-d6b1a35f78be-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"0c535ada-ed58-4c94-82c9-d6b1a35f78be\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 16:56:37 crc kubenswrapper[4558]: I0120 16:56:37.729880 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tv7cz\" (UniqueName: \"kubernetes.io/projected/0c535ada-ed58-4c94-82c9-d6b1a35f78be-kube-api-access-tv7cz\") pod \"ovn-northd-0\" (UID: \"0c535ada-ed58-4c94-82c9-d6b1a35f78be\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 16:56:37 crc kubenswrapper[4558]: I0120 16:56:37.729908 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/0c535ada-ed58-4c94-82c9-d6b1a35f78be-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"0c535ada-ed58-4c94-82c9-d6b1a35f78be\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 16:56:37 crc kubenswrapper[4558]: I0120 16:56:37.729933 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0c535ada-ed58-4c94-82c9-d6b1a35f78be-config\") pod \"ovn-northd-0\" (UID: \"0c535ada-ed58-4c94-82c9-d6b1a35f78be\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 16:56:37 crc kubenswrapper[4558]: I0120 16:56:37.730016 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/0c535ada-ed58-4c94-82c9-d6b1a35f78be-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"0c535ada-ed58-4c94-82c9-d6b1a35f78be\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 16:56:37 crc kubenswrapper[4558]: I0120 16:56:37.831671 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/0c535ada-ed58-4c94-82c9-d6b1a35f78be-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"0c535ada-ed58-4c94-82c9-d6b1a35f78be\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 16:56:37 crc kubenswrapper[4558]: I0120 16:56:37.831721 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0c535ada-ed58-4c94-82c9-d6b1a35f78be-scripts\") pod \"ovn-northd-0\" (UID: \"0c535ada-ed58-4c94-82c9-d6b1a35f78be\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 16:56:37 crc kubenswrapper[4558]: I0120 16:56:37.831773 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/0c535ada-ed58-4c94-82c9-d6b1a35f78be-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"0c535ada-ed58-4c94-82c9-d6b1a35f78be\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 16:56:37 crc kubenswrapper[4558]: I0120 16:56:37.831825 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0c535ada-ed58-4c94-82c9-d6b1a35f78be-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"0c535ada-ed58-4c94-82c9-d6b1a35f78be\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 16:56:37 crc kubenswrapper[4558]: I0120 16:56:37.831841 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tv7cz\" (UniqueName: \"kubernetes.io/projected/0c535ada-ed58-4c94-82c9-d6b1a35f78be-kube-api-access-tv7cz\") pod \"ovn-northd-0\" (UID: \"0c535ada-ed58-4c94-82c9-d6b1a35f78be\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 16:56:37 crc kubenswrapper[4558]: I0120 16:56:37.831867 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/0c535ada-ed58-4c94-82c9-d6b1a35f78be-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"0c535ada-ed58-4c94-82c9-d6b1a35f78be\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 16:56:37 crc kubenswrapper[4558]: I0120 16:56:37.831892 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0c535ada-ed58-4c94-82c9-d6b1a35f78be-config\") pod \"ovn-northd-0\" (UID: \"0c535ada-ed58-4c94-82c9-d6b1a35f78be\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 16:56:37 crc kubenswrapper[4558]: I0120 16:56:37.832305 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/0c535ada-ed58-4c94-82c9-d6b1a35f78be-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"0c535ada-ed58-4c94-82c9-d6b1a35f78be\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 16:56:37 crc kubenswrapper[4558]: I0120 16:56:37.832613 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0c535ada-ed58-4c94-82c9-d6b1a35f78be-scripts\") pod \"ovn-northd-0\" (UID: \"0c535ada-ed58-4c94-82c9-d6b1a35f78be\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 16:56:37 crc kubenswrapper[4558]: I0120 16:56:37.832671 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0c535ada-ed58-4c94-82c9-d6b1a35f78be-config\") pod \"ovn-northd-0\" (UID: \"0c535ada-ed58-4c94-82c9-d6b1a35f78be\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 16:56:37 crc kubenswrapper[4558]: I0120 16:56:37.836563 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/0c535ada-ed58-4c94-82c9-d6b1a35f78be-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"0c535ada-ed58-4c94-82c9-d6b1a35f78be\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 16:56:37 crc kubenswrapper[4558]: I0120 16:56:37.836774 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/0c535ada-ed58-4c94-82c9-d6b1a35f78be-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"0c535ada-ed58-4c94-82c9-d6b1a35f78be\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 16:56:37 crc kubenswrapper[4558]: I0120 16:56:37.837560 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0c535ada-ed58-4c94-82c9-d6b1a35f78be-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"0c535ada-ed58-4c94-82c9-d6b1a35f78be\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 16:56:37 crc kubenswrapper[4558]: I0120 16:56:37.846872 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tv7cz\" (UniqueName: \"kubernetes.io/projected/0c535ada-ed58-4c94-82c9-d6b1a35f78be-kube-api-access-tv7cz\") pod \"ovn-northd-0\" (UID: \"0c535ada-ed58-4c94-82c9-d6b1a35f78be\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 16:56:38 crc kubenswrapper[4558]: I0120 16:56:38.021032 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 16:56:38 crc kubenswrapper[4558]: I0120 16:56:38.076081 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 16:56:38 crc kubenswrapper[4558]: I0120 16:56:38.078391 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 16:56:38 crc kubenswrapper[4558]: I0120 16:56:38.144953 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 16:56:38 crc kubenswrapper[4558]: W0120 16:56:38.387456 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0c535ada_ed58_4c94_82c9_d6b1a35f78be.slice/crio-1e0838a1f2660b8c9c0cd79709800f43140956dd9e3eb4529eb3969ecad42019 WatchSource:0}: Error finding container 1e0838a1f2660b8c9c0cd79709800f43140956dd9e3eb4529eb3969ecad42019: Status 404 returned error can't find the container with id 1e0838a1f2660b8c9c0cd79709800f43140956dd9e3eb4529eb3969ecad42019 Jan 20 16:56:38 crc kubenswrapper[4558]: I0120 16:56:38.392980 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovn-northd-0"] Jan 20 16:56:38 crc kubenswrapper[4558]: I0120 16:56:38.520066 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-northd-0" event={"ID":"0c535ada-ed58-4c94-82c9-d6b1a35f78be","Type":"ContainerStarted","Data":"1e0838a1f2660b8c9c0cd79709800f43140956dd9e3eb4529eb3969ecad42019"} Jan 20 16:56:38 crc kubenswrapper[4558]: I0120 16:56:38.571898 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 16:56:39 crc kubenswrapper[4558]: I0120 16:56:39.565774 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 16:56:39 crc kubenswrapper[4558]: I0120 16:56:39.565968 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 16:56:39 crc kubenswrapper[4558]: I0120 16:56:39.574148 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/keystone-7e4f-account-create-update-nq676"] Jan 20 16:56:39 crc kubenswrapper[4558]: I0120 16:56:39.574970 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-7e4f-account-create-update-nq676" Jan 20 16:56:39 crc kubenswrapper[4558]: I0120 16:56:39.584682 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-db-secret" Jan 20 16:56:39 crc kubenswrapper[4558]: I0120 16:56:39.605552 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-7e4f-account-create-update-nq676"] Jan 20 16:56:39 crc kubenswrapper[4558]: I0120 16:56:39.634031 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/keystone-db-create-2jqq4"] Jan 20 16:56:39 crc kubenswrapper[4558]: I0120 16:56:39.635507 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-db-create-2jqq4" Jan 20 16:56:39 crc kubenswrapper[4558]: I0120 16:56:39.640681 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-db-create-2jqq4"] Jan 20 16:56:39 crc kubenswrapper[4558]: I0120 16:56:39.670474 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 16:56:39 crc kubenswrapper[4558]: I0120 16:56:39.756811 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wbc7h\" (UniqueName: \"kubernetes.io/projected/3da300ce-eec2-414b-afdd-01d3f3bd71c5-kube-api-access-wbc7h\") pod \"keystone-db-create-2jqq4\" (UID: \"3da300ce-eec2-414b-afdd-01d3f3bd71c5\") " pod="openstack-kuttl-tests/keystone-db-create-2jqq4" Jan 20 16:56:39 crc kubenswrapper[4558]: I0120 16:56:39.756893 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gtn85\" (UniqueName: \"kubernetes.io/projected/66a36d00-ae7a-4881-b8b0-1cab43a25547-kube-api-access-gtn85\") pod \"keystone-7e4f-account-create-update-nq676\" (UID: \"66a36d00-ae7a-4881-b8b0-1cab43a25547\") " pod="openstack-kuttl-tests/keystone-7e4f-account-create-update-nq676" Jan 20 16:56:39 crc kubenswrapper[4558]: I0120 16:56:39.757031 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3da300ce-eec2-414b-afdd-01d3f3bd71c5-operator-scripts\") pod \"keystone-db-create-2jqq4\" (UID: \"3da300ce-eec2-414b-afdd-01d3f3bd71c5\") " pod="openstack-kuttl-tests/keystone-db-create-2jqq4" Jan 20 16:56:39 crc kubenswrapper[4558]: I0120 16:56:39.757054 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/66a36d00-ae7a-4881-b8b0-1cab43a25547-operator-scripts\") pod \"keystone-7e4f-account-create-update-nq676\" (UID: \"66a36d00-ae7a-4881-b8b0-1cab43a25547\") " pod="openstack-kuttl-tests/keystone-7e4f-account-create-update-nq676" Jan 20 16:56:39 crc kubenswrapper[4558]: I0120 16:56:39.858834 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wbc7h\" (UniqueName: \"kubernetes.io/projected/3da300ce-eec2-414b-afdd-01d3f3bd71c5-kube-api-access-wbc7h\") pod \"keystone-db-create-2jqq4\" (UID: \"3da300ce-eec2-414b-afdd-01d3f3bd71c5\") " pod="openstack-kuttl-tests/keystone-db-create-2jqq4" Jan 20 16:56:39 crc kubenswrapper[4558]: I0120 16:56:39.858882 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gtn85\" (UniqueName: \"kubernetes.io/projected/66a36d00-ae7a-4881-b8b0-1cab43a25547-kube-api-access-gtn85\") pod \"keystone-7e4f-account-create-update-nq676\" (UID: \"66a36d00-ae7a-4881-b8b0-1cab43a25547\") " pod="openstack-kuttl-tests/keystone-7e4f-account-create-update-nq676" Jan 20 16:56:39 crc kubenswrapper[4558]: I0120 16:56:39.858967 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3da300ce-eec2-414b-afdd-01d3f3bd71c5-operator-scripts\") pod \"keystone-db-create-2jqq4\" (UID: \"3da300ce-eec2-414b-afdd-01d3f3bd71c5\") " pod="openstack-kuttl-tests/keystone-db-create-2jqq4" Jan 20 16:56:39 crc kubenswrapper[4558]: I0120 16:56:39.858987 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/66a36d00-ae7a-4881-b8b0-1cab43a25547-operator-scripts\") pod \"keystone-7e4f-account-create-update-nq676\" (UID: \"66a36d00-ae7a-4881-b8b0-1cab43a25547\") " pod="openstack-kuttl-tests/keystone-7e4f-account-create-update-nq676" Jan 20 16:56:39 crc kubenswrapper[4558]: I0120 16:56:39.859679 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/66a36d00-ae7a-4881-b8b0-1cab43a25547-operator-scripts\") pod \"keystone-7e4f-account-create-update-nq676\" (UID: \"66a36d00-ae7a-4881-b8b0-1cab43a25547\") " pod="openstack-kuttl-tests/keystone-7e4f-account-create-update-nq676" Jan 20 16:56:39 crc kubenswrapper[4558]: I0120 16:56:39.859809 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3da300ce-eec2-414b-afdd-01d3f3bd71c5-operator-scripts\") pod \"keystone-db-create-2jqq4\" (UID: \"3da300ce-eec2-414b-afdd-01d3f3bd71c5\") " pod="openstack-kuttl-tests/keystone-db-create-2jqq4" Jan 20 16:56:39 crc kubenswrapper[4558]: I0120 16:56:39.871789 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wbc7h\" (UniqueName: \"kubernetes.io/projected/3da300ce-eec2-414b-afdd-01d3f3bd71c5-kube-api-access-wbc7h\") pod \"keystone-db-create-2jqq4\" (UID: \"3da300ce-eec2-414b-afdd-01d3f3bd71c5\") " pod="openstack-kuttl-tests/keystone-db-create-2jqq4" Jan 20 16:56:39 crc kubenswrapper[4558]: I0120 16:56:39.872057 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gtn85\" (UniqueName: \"kubernetes.io/projected/66a36d00-ae7a-4881-b8b0-1cab43a25547-kube-api-access-gtn85\") pod \"keystone-7e4f-account-create-update-nq676\" (UID: \"66a36d00-ae7a-4881-b8b0-1cab43a25547\") " pod="openstack-kuttl-tests/keystone-7e4f-account-create-update-nq676" Jan 20 16:56:39 crc kubenswrapper[4558]: I0120 16:56:39.896209 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-7e4f-account-create-update-nq676" Jan 20 16:56:39 crc kubenswrapper[4558]: I0120 16:56:39.897858 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/placement-db-create-kds4k"] Jan 20 16:56:39 crc kubenswrapper[4558]: I0120 16:56:39.898716 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-db-create-kds4k" Jan 20 16:56:39 crc kubenswrapper[4558]: I0120 16:56:39.903913 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/placement-64a7-account-create-update-5hw2k"] Jan 20 16:56:39 crc kubenswrapper[4558]: I0120 16:56:39.904704 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-64a7-account-create-update-5hw2k" Jan 20 16:56:39 crc kubenswrapper[4558]: I0120 16:56:39.905747 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"placement-db-secret" Jan 20 16:56:39 crc kubenswrapper[4558]: I0120 16:56:39.911665 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-db-create-kds4k"] Jan 20 16:56:39 crc kubenswrapper[4558]: I0120 16:56:39.917998 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-64a7-account-create-update-5hw2k"] Jan 20 16:56:39 crc kubenswrapper[4558]: I0120 16:56:39.953119 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-db-create-2jqq4" Jan 20 16:56:40 crc kubenswrapper[4558]: I0120 16:56:40.061380 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l2bmd\" (UniqueName: \"kubernetes.io/projected/cc72bb7b-054c-4141-9db7-1bf5a8b716bb-kube-api-access-l2bmd\") pod \"placement-64a7-account-create-update-5hw2k\" (UID: \"cc72bb7b-054c-4141-9db7-1bf5a8b716bb\") " pod="openstack-kuttl-tests/placement-64a7-account-create-update-5hw2k" Jan 20 16:56:40 crc kubenswrapper[4558]: I0120 16:56:40.061673 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rlkcr\" (UniqueName: \"kubernetes.io/projected/e221e8f5-cc9e-4399-8b97-92ef8104bb70-kube-api-access-rlkcr\") pod \"placement-db-create-kds4k\" (UID: \"e221e8f5-cc9e-4399-8b97-92ef8104bb70\") " pod="openstack-kuttl-tests/placement-db-create-kds4k" Jan 20 16:56:40 crc kubenswrapper[4558]: I0120 16:56:40.061705 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cc72bb7b-054c-4141-9db7-1bf5a8b716bb-operator-scripts\") pod \"placement-64a7-account-create-update-5hw2k\" (UID: \"cc72bb7b-054c-4141-9db7-1bf5a8b716bb\") " pod="openstack-kuttl-tests/placement-64a7-account-create-update-5hw2k" Jan 20 16:56:40 crc kubenswrapper[4558]: I0120 16:56:40.061729 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e221e8f5-cc9e-4399-8b97-92ef8104bb70-operator-scripts\") pod \"placement-db-create-kds4k\" (UID: \"e221e8f5-cc9e-4399-8b97-92ef8104bb70\") " pod="openstack-kuttl-tests/placement-db-create-kds4k" Jan 20 16:56:40 crc kubenswrapper[4558]: I0120 16:56:40.162921 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cc72bb7b-054c-4141-9db7-1bf5a8b716bb-operator-scripts\") pod \"placement-64a7-account-create-update-5hw2k\" (UID: \"cc72bb7b-054c-4141-9db7-1bf5a8b716bb\") " pod="openstack-kuttl-tests/placement-64a7-account-create-update-5hw2k" Jan 20 16:56:40 crc kubenswrapper[4558]: I0120 16:56:40.162973 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e221e8f5-cc9e-4399-8b97-92ef8104bb70-operator-scripts\") pod \"placement-db-create-kds4k\" (UID: \"e221e8f5-cc9e-4399-8b97-92ef8104bb70\") " pod="openstack-kuttl-tests/placement-db-create-kds4k" Jan 20 16:56:40 crc kubenswrapper[4558]: I0120 16:56:40.163079 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l2bmd\" (UniqueName: \"kubernetes.io/projected/cc72bb7b-054c-4141-9db7-1bf5a8b716bb-kube-api-access-l2bmd\") pod \"placement-64a7-account-create-update-5hw2k\" (UID: \"cc72bb7b-054c-4141-9db7-1bf5a8b716bb\") " pod="openstack-kuttl-tests/placement-64a7-account-create-update-5hw2k" Jan 20 16:56:40 crc kubenswrapper[4558]: I0120 16:56:40.163200 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rlkcr\" (UniqueName: \"kubernetes.io/projected/e221e8f5-cc9e-4399-8b97-92ef8104bb70-kube-api-access-rlkcr\") pod \"placement-db-create-kds4k\" (UID: \"e221e8f5-cc9e-4399-8b97-92ef8104bb70\") " pod="openstack-kuttl-tests/placement-db-create-kds4k" Jan 20 16:56:40 crc kubenswrapper[4558]: I0120 16:56:40.163904 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e221e8f5-cc9e-4399-8b97-92ef8104bb70-operator-scripts\") pod \"placement-db-create-kds4k\" (UID: \"e221e8f5-cc9e-4399-8b97-92ef8104bb70\") " pod="openstack-kuttl-tests/placement-db-create-kds4k" Jan 20 16:56:40 crc kubenswrapper[4558]: I0120 16:56:40.164246 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cc72bb7b-054c-4141-9db7-1bf5a8b716bb-operator-scripts\") pod \"placement-64a7-account-create-update-5hw2k\" (UID: \"cc72bb7b-054c-4141-9db7-1bf5a8b716bb\") " pod="openstack-kuttl-tests/placement-64a7-account-create-update-5hw2k" Jan 20 16:56:40 crc kubenswrapper[4558]: I0120 16:56:40.177111 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rlkcr\" (UniqueName: \"kubernetes.io/projected/e221e8f5-cc9e-4399-8b97-92ef8104bb70-kube-api-access-rlkcr\") pod \"placement-db-create-kds4k\" (UID: \"e221e8f5-cc9e-4399-8b97-92ef8104bb70\") " pod="openstack-kuttl-tests/placement-db-create-kds4k" Jan 20 16:56:40 crc kubenswrapper[4558]: I0120 16:56:40.177119 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l2bmd\" (UniqueName: \"kubernetes.io/projected/cc72bb7b-054c-4141-9db7-1bf5a8b716bb-kube-api-access-l2bmd\") pod \"placement-64a7-account-create-update-5hw2k\" (UID: \"cc72bb7b-054c-4141-9db7-1bf5a8b716bb\") " pod="openstack-kuttl-tests/placement-64a7-account-create-update-5hw2k" Jan 20 16:56:40 crc kubenswrapper[4558]: I0120 16:56:40.228527 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-db-create-kds4k" Jan 20 16:56:40 crc kubenswrapper[4558]: I0120 16:56:40.241470 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-64a7-account-create-update-5hw2k" Jan 20 16:56:40 crc kubenswrapper[4558]: I0120 16:56:40.278232 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-7e4f-account-create-update-nq676"] Jan 20 16:56:40 crc kubenswrapper[4558]: W0120 16:56:40.281235 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod66a36d00_ae7a_4881_b8b0_1cab43a25547.slice/crio-4251f5ebfbe87c1ba568e32c868dddd7b7a57c8ddf0f4b460a1ba2c0c9900efc WatchSource:0}: Error finding container 4251f5ebfbe87c1ba568e32c868dddd7b7a57c8ddf0f4b460a1ba2c0c9900efc: Status 404 returned error can't find the container with id 4251f5ebfbe87c1ba568e32c868dddd7b7a57c8ddf0f4b460a1ba2c0c9900efc Jan 20 16:56:40 crc kubenswrapper[4558]: I0120 16:56:40.341027 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-db-create-2jqq4"] Jan 20 16:56:40 crc kubenswrapper[4558]: W0120 16:56:40.363587 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3da300ce_eec2_414b_afdd_01d3f3bd71c5.slice/crio-aefc815a88f4ca935c13e661208aade03484012c1280e4f37cca6c772604cf2a WatchSource:0}: Error finding container aefc815a88f4ca935c13e661208aade03484012c1280e4f37cca6c772604cf2a: Status 404 returned error can't find the container with id aefc815a88f4ca935c13e661208aade03484012c1280e4f37cca6c772604cf2a Jan 20 16:56:40 crc kubenswrapper[4558]: I0120 16:56:40.533902 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-db-create-2jqq4" event={"ID":"3da300ce-eec2-414b-afdd-01d3f3bd71c5","Type":"ContainerStarted","Data":"5109d61a58515d57c16c7eef2904c3a44f55c305b1540fae92275e63c46df394"} Jan 20 16:56:40 crc kubenswrapper[4558]: I0120 16:56:40.533940 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-db-create-2jqq4" event={"ID":"3da300ce-eec2-414b-afdd-01d3f3bd71c5","Type":"ContainerStarted","Data":"aefc815a88f4ca935c13e661208aade03484012c1280e4f37cca6c772604cf2a"} Jan 20 16:56:40 crc kubenswrapper[4558]: I0120 16:56:40.535635 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-7e4f-account-create-update-nq676" event={"ID":"66a36d00-ae7a-4881-b8b0-1cab43a25547","Type":"ContainerStarted","Data":"36d3c626c341df4133e753cd213edab18c0255979df43a32b39a7004eacff55d"} Jan 20 16:56:40 crc kubenswrapper[4558]: I0120 16:56:40.535659 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-7e4f-account-create-update-nq676" event={"ID":"66a36d00-ae7a-4881-b8b0-1cab43a25547","Type":"ContainerStarted","Data":"4251f5ebfbe87c1ba568e32c868dddd7b7a57c8ddf0f4b460a1ba2c0c9900efc"} Jan 20 16:56:40 crc kubenswrapper[4558]: I0120 16:56:40.538283 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-northd-0" event={"ID":"0c535ada-ed58-4c94-82c9-d6b1a35f78be","Type":"ContainerStarted","Data":"644a2c213ff45475c1b94047c5229ac3c3fe716ff973c59e6790b1314050f1f3"} Jan 20 16:56:40 crc kubenswrapper[4558]: I0120 16:56:40.538324 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-northd-0" event={"ID":"0c535ada-ed58-4c94-82c9-d6b1a35f78be","Type":"ContainerStarted","Data":"bf9413112ce86eb3a9f0b2a6c2c7bc61341d600bd5c50302655d4e980e7c1cc1"} Jan 20 16:56:40 crc kubenswrapper[4558]: I0120 16:56:40.538644 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 16:56:40 crc kubenswrapper[4558]: I0120 16:56:40.547657 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/keystone-db-create-2jqq4" podStartSLOduration=1.547647491 podStartE2EDuration="1.547647491s" podCreationTimestamp="2026-01-20 16:56:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 16:56:40.545570044 +0000 UTC m=+894.305908011" watchObservedRunningTime="2026-01-20 16:56:40.547647491 +0000 UTC m=+894.307985457" Jan 20 16:56:40 crc kubenswrapper[4558]: I0120 16:56:40.559584 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ovn-northd-0" podStartSLOduration=2.487447449 podStartE2EDuration="3.55957053s" podCreationTimestamp="2026-01-20 16:56:37 +0000 UTC" firstStartedPulling="2026-01-20 16:56:38.389364623 +0000 UTC m=+892.149702590" lastFinishedPulling="2026-01-20 16:56:39.461487704 +0000 UTC m=+893.221825671" observedRunningTime="2026-01-20 16:56:40.556987984 +0000 UTC m=+894.317325951" watchObservedRunningTime="2026-01-20 16:56:40.55957053 +0000 UTC m=+894.319908497" Jan 20 16:56:40 crc kubenswrapper[4558]: I0120 16:56:40.578070 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/keystone-7e4f-account-create-update-nq676" podStartSLOduration=1.578058755 podStartE2EDuration="1.578058755s" podCreationTimestamp="2026-01-20 16:56:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 16:56:40.573694518 +0000 UTC m=+894.334032485" watchObservedRunningTime="2026-01-20 16:56:40.578058755 +0000 UTC m=+894.338396721" Jan 20 16:56:40 crc kubenswrapper[4558]: I0120 16:56:40.603672 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-db-create-kds4k"] Jan 20 16:56:40 crc kubenswrapper[4558]: I0120 16:56:40.606198 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 16:56:40 crc kubenswrapper[4558]: I0120 16:56:40.664742 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-64a7-account-create-update-5hw2k"] Jan 20 16:56:40 crc kubenswrapper[4558]: W0120 16:56:40.692793 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcc72bb7b_054c_4141_9db7_1bf5a8b716bb.slice/crio-a3bf2950b6bb152ce6cb8180222141097ab0fcf5d45629b646893216e249e231 WatchSource:0}: Error finding container a3bf2950b6bb152ce6cb8180222141097ab0fcf5d45629b646893216e249e231: Status 404 returned error can't find the container with id a3bf2950b6bb152ce6cb8180222141097ab0fcf5d45629b646893216e249e231 Jan 20 16:56:41 crc kubenswrapper[4558]: I0120 16:56:41.546198 4558 generic.go:334] "Generic (PLEG): container finished" podID="66a36d00-ae7a-4881-b8b0-1cab43a25547" containerID="36d3c626c341df4133e753cd213edab18c0255979df43a32b39a7004eacff55d" exitCode=0 Jan 20 16:56:41 crc kubenswrapper[4558]: I0120 16:56:41.546266 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-7e4f-account-create-update-nq676" event={"ID":"66a36d00-ae7a-4881-b8b0-1cab43a25547","Type":"ContainerDied","Data":"36d3c626c341df4133e753cd213edab18c0255979df43a32b39a7004eacff55d"} Jan 20 16:56:41 crc kubenswrapper[4558]: I0120 16:56:41.547805 4558 generic.go:334] "Generic (PLEG): container finished" podID="cc72bb7b-054c-4141-9db7-1bf5a8b716bb" containerID="e7af03c31a0a54d7310b8a8e36f1a031462a9a3a9c87a532af17774e273cf04c" exitCode=0 Jan 20 16:56:41 crc kubenswrapper[4558]: I0120 16:56:41.547873 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-64a7-account-create-update-5hw2k" event={"ID":"cc72bb7b-054c-4141-9db7-1bf5a8b716bb","Type":"ContainerDied","Data":"e7af03c31a0a54d7310b8a8e36f1a031462a9a3a9c87a532af17774e273cf04c"} Jan 20 16:56:41 crc kubenswrapper[4558]: I0120 16:56:41.547890 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-64a7-account-create-update-5hw2k" event={"ID":"cc72bb7b-054c-4141-9db7-1bf5a8b716bb","Type":"ContainerStarted","Data":"a3bf2950b6bb152ce6cb8180222141097ab0fcf5d45629b646893216e249e231"} Jan 20 16:56:41 crc kubenswrapper[4558]: I0120 16:56:41.548966 4558 generic.go:334] "Generic (PLEG): container finished" podID="3da300ce-eec2-414b-afdd-01d3f3bd71c5" containerID="5109d61a58515d57c16c7eef2904c3a44f55c305b1540fae92275e63c46df394" exitCode=0 Jan 20 16:56:41 crc kubenswrapper[4558]: I0120 16:56:41.549008 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-db-create-2jqq4" event={"ID":"3da300ce-eec2-414b-afdd-01d3f3bd71c5","Type":"ContainerDied","Data":"5109d61a58515d57c16c7eef2904c3a44f55c305b1540fae92275e63c46df394"} Jan 20 16:56:41 crc kubenswrapper[4558]: I0120 16:56:41.550451 4558 generic.go:334] "Generic (PLEG): container finished" podID="e221e8f5-cc9e-4399-8b97-92ef8104bb70" containerID="4a2010948788fe3337e1ffdf2267cc309f34c059666b5da0de06d4433df1c518" exitCode=0 Jan 20 16:56:41 crc kubenswrapper[4558]: I0120 16:56:41.550532 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-db-create-kds4k" event={"ID":"e221e8f5-cc9e-4399-8b97-92ef8104bb70","Type":"ContainerDied","Data":"4a2010948788fe3337e1ffdf2267cc309f34c059666b5da0de06d4433df1c518"} Jan 20 16:56:41 crc kubenswrapper[4558]: I0120 16:56:41.550571 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-db-create-kds4k" event={"ID":"e221e8f5-cc9e-4399-8b97-92ef8104bb70","Type":"ContainerStarted","Data":"f0697c2842d6783f4de023bab652d036e5d1a8aec081aab8a10c0d3ac2a8f820"} Jan 20 16:56:41 crc kubenswrapper[4558]: I0120 16:56:41.872427 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 16:56:42 crc kubenswrapper[4558]: I0120 16:56:42.860680 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-7e4f-account-create-update-nq676" Jan 20 16:56:42 crc kubenswrapper[4558]: I0120 16:56:42.945001 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/swift-storage-0"] Jan 20 16:56:42 crc kubenswrapper[4558]: E0120 16:56:42.945315 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="66a36d00-ae7a-4881-b8b0-1cab43a25547" containerName="mariadb-account-create-update" Jan 20 16:56:42 crc kubenswrapper[4558]: I0120 16:56:42.945329 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="66a36d00-ae7a-4881-b8b0-1cab43a25547" containerName="mariadb-account-create-update" Jan 20 16:56:42 crc kubenswrapper[4558]: I0120 16:56:42.945478 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="66a36d00-ae7a-4881-b8b0-1cab43a25547" containerName="mariadb-account-create-update" Jan 20 16:56:42 crc kubenswrapper[4558]: I0120 16:56:42.952791 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-storage-0" Jan 20 16:56:42 crc kubenswrapper[4558]: I0120 16:56:42.962267 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"swift-conf" Jan 20 16:56:42 crc kubenswrapper[4558]: I0120 16:56:42.962437 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"swift-ring-files" Jan 20 16:56:42 crc kubenswrapper[4558]: I0120 16:56:42.962633 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"swift-storage-config-data" Jan 20 16:56:42 crc kubenswrapper[4558]: I0120 16:56:42.962754 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"swift-swift-dockercfg-42b9p" Jan 20 16:56:42 crc kubenswrapper[4558]: I0120 16:56:42.970609 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/swift-storage-0"] Jan 20 16:56:42 crc kubenswrapper[4558]: I0120 16:56:42.999139 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-db-create-2jqq4" Jan 20 16:56:43 crc kubenswrapper[4558]: I0120 16:56:43.014015 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-64a7-account-create-update-5hw2k" Jan 20 16:56:43 crc kubenswrapper[4558]: I0120 16:56:43.015943 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/66a36d00-ae7a-4881-b8b0-1cab43a25547-operator-scripts\") pod \"66a36d00-ae7a-4881-b8b0-1cab43a25547\" (UID: \"66a36d00-ae7a-4881-b8b0-1cab43a25547\") " Jan 20 16:56:43 crc kubenswrapper[4558]: I0120 16:56:43.015974 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gtn85\" (UniqueName: \"kubernetes.io/projected/66a36d00-ae7a-4881-b8b0-1cab43a25547-kube-api-access-gtn85\") pod \"66a36d00-ae7a-4881-b8b0-1cab43a25547\" (UID: \"66a36d00-ae7a-4881-b8b0-1cab43a25547\") " Jan 20 16:56:43 crc kubenswrapper[4558]: I0120 16:56:43.016458 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/66a36d00-ae7a-4881-b8b0-1cab43a25547-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "66a36d00-ae7a-4881-b8b0-1cab43a25547" (UID: "66a36d00-ae7a-4881-b8b0-1cab43a25547"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 16:56:43 crc kubenswrapper[4558]: I0120 16:56:43.016724 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2ql5k\" (UniqueName: \"kubernetes.io/projected/ad708448-38df-4494-bca7-fe394c9b53a7-kube-api-access-2ql5k\") pod \"swift-storage-0\" (UID: \"ad708448-38df-4494-bca7-fe394c9b53a7\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 16:56:43 crc kubenswrapper[4558]: I0120 16:56:43.016765 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/ad708448-38df-4494-bca7-fe394c9b53a7-lock\") pod \"swift-storage-0\" (UID: \"ad708448-38df-4494-bca7-fe394c9b53a7\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 16:56:43 crc kubenswrapper[4558]: I0120 16:56:43.016876 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/ad708448-38df-4494-bca7-fe394c9b53a7-etc-swift\") pod \"swift-storage-0\" (UID: \"ad708448-38df-4494-bca7-fe394c9b53a7\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 16:56:43 crc kubenswrapper[4558]: I0120 16:56:43.016959 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/ad708448-38df-4494-bca7-fe394c9b53a7-cache\") pod \"swift-storage-0\" (UID: \"ad708448-38df-4494-bca7-fe394c9b53a7\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 16:56:43 crc kubenswrapper[4558]: I0120 16:56:43.017064 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"swift-storage-0\" (UID: \"ad708448-38df-4494-bca7-fe394c9b53a7\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 16:56:43 crc kubenswrapper[4558]: I0120 16:56:43.017209 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/66a36d00-ae7a-4881-b8b0-1cab43a25547-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 16:56:43 crc kubenswrapper[4558]: I0120 16:56:43.018110 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-db-create-kds4k" Jan 20 16:56:43 crc kubenswrapper[4558]: I0120 16:56:43.021604 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/66a36d00-ae7a-4881-b8b0-1cab43a25547-kube-api-access-gtn85" (OuterVolumeSpecName: "kube-api-access-gtn85") pod "66a36d00-ae7a-4881-b8b0-1cab43a25547" (UID: "66a36d00-ae7a-4881-b8b0-1cab43a25547"). InnerVolumeSpecName "kube-api-access-gtn85". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:56:43 crc kubenswrapper[4558]: I0120 16:56:43.118580 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l2bmd\" (UniqueName: \"kubernetes.io/projected/cc72bb7b-054c-4141-9db7-1bf5a8b716bb-kube-api-access-l2bmd\") pod \"cc72bb7b-054c-4141-9db7-1bf5a8b716bb\" (UID: \"cc72bb7b-054c-4141-9db7-1bf5a8b716bb\") " Jan 20 16:56:43 crc kubenswrapper[4558]: I0120 16:56:43.118658 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wbc7h\" (UniqueName: \"kubernetes.io/projected/3da300ce-eec2-414b-afdd-01d3f3bd71c5-kube-api-access-wbc7h\") pod \"3da300ce-eec2-414b-afdd-01d3f3bd71c5\" (UID: \"3da300ce-eec2-414b-afdd-01d3f3bd71c5\") " Jan 20 16:56:43 crc kubenswrapper[4558]: I0120 16:56:43.118727 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3da300ce-eec2-414b-afdd-01d3f3bd71c5-operator-scripts\") pod \"3da300ce-eec2-414b-afdd-01d3f3bd71c5\" (UID: \"3da300ce-eec2-414b-afdd-01d3f3bd71c5\") " Jan 20 16:56:43 crc kubenswrapper[4558]: I0120 16:56:43.118787 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rlkcr\" (UniqueName: \"kubernetes.io/projected/e221e8f5-cc9e-4399-8b97-92ef8104bb70-kube-api-access-rlkcr\") pod \"e221e8f5-cc9e-4399-8b97-92ef8104bb70\" (UID: \"e221e8f5-cc9e-4399-8b97-92ef8104bb70\") " Jan 20 16:56:43 crc kubenswrapper[4558]: I0120 16:56:43.118819 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cc72bb7b-054c-4141-9db7-1bf5a8b716bb-operator-scripts\") pod \"cc72bb7b-054c-4141-9db7-1bf5a8b716bb\" (UID: \"cc72bb7b-054c-4141-9db7-1bf5a8b716bb\") " Jan 20 16:56:43 crc kubenswrapper[4558]: I0120 16:56:43.118852 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e221e8f5-cc9e-4399-8b97-92ef8104bb70-operator-scripts\") pod \"e221e8f5-cc9e-4399-8b97-92ef8104bb70\" (UID: \"e221e8f5-cc9e-4399-8b97-92ef8104bb70\") " Jan 20 16:56:43 crc kubenswrapper[4558]: I0120 16:56:43.119094 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2ql5k\" (UniqueName: \"kubernetes.io/projected/ad708448-38df-4494-bca7-fe394c9b53a7-kube-api-access-2ql5k\") pod \"swift-storage-0\" (UID: \"ad708448-38df-4494-bca7-fe394c9b53a7\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 16:56:43 crc kubenswrapper[4558]: I0120 16:56:43.119131 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/ad708448-38df-4494-bca7-fe394c9b53a7-lock\") pod \"swift-storage-0\" (UID: \"ad708448-38df-4494-bca7-fe394c9b53a7\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 16:56:43 crc kubenswrapper[4558]: I0120 16:56:43.119203 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/ad708448-38df-4494-bca7-fe394c9b53a7-etc-swift\") pod \"swift-storage-0\" (UID: \"ad708448-38df-4494-bca7-fe394c9b53a7\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 16:56:43 crc kubenswrapper[4558]: I0120 16:56:43.119230 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/ad708448-38df-4494-bca7-fe394c9b53a7-cache\") pod \"swift-storage-0\" (UID: \"ad708448-38df-4494-bca7-fe394c9b53a7\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 16:56:43 crc kubenswrapper[4558]: I0120 16:56:43.119332 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"swift-storage-0\" (UID: \"ad708448-38df-4494-bca7-fe394c9b53a7\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 16:56:43 crc kubenswrapper[4558]: E0120 16:56:43.119396 4558 projected.go:288] Couldn't get configMap openstack-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Jan 20 16:56:43 crc kubenswrapper[4558]: E0120 16:56:43.119427 4558 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack-kuttl-tests/swift-storage-0: configmap "swift-ring-files" not found Jan 20 16:56:43 crc kubenswrapper[4558]: E0120 16:56:43.119477 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/ad708448-38df-4494-bca7-fe394c9b53a7-etc-swift podName:ad708448-38df-4494-bca7-fe394c9b53a7 nodeName:}" failed. No retries permitted until 2026-01-20 16:56:43.619460266 +0000 UTC m=+897.379798233 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/ad708448-38df-4494-bca7-fe394c9b53a7-etc-swift") pod "swift-storage-0" (UID: "ad708448-38df-4494-bca7-fe394c9b53a7") : configmap "swift-ring-files" not found Jan 20 16:56:43 crc kubenswrapper[4558]: I0120 16:56:43.119429 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gtn85\" (UniqueName: \"kubernetes.io/projected/66a36d00-ae7a-4881-b8b0-1cab43a25547-kube-api-access-gtn85\") on node \"crc\" DevicePath \"\"" Jan 20 16:56:43 crc kubenswrapper[4558]: I0120 16:56:43.119514 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/ad708448-38df-4494-bca7-fe394c9b53a7-lock\") pod \"swift-storage-0\" (UID: \"ad708448-38df-4494-bca7-fe394c9b53a7\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 16:56:43 crc kubenswrapper[4558]: I0120 16:56:43.119599 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/ad708448-38df-4494-bca7-fe394c9b53a7-cache\") pod \"swift-storage-0\" (UID: \"ad708448-38df-4494-bca7-fe394c9b53a7\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 16:56:43 crc kubenswrapper[4558]: I0120 16:56:43.119626 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e221e8f5-cc9e-4399-8b97-92ef8104bb70-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "e221e8f5-cc9e-4399-8b97-92ef8104bb70" (UID: "e221e8f5-cc9e-4399-8b97-92ef8104bb70"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 16:56:43 crc kubenswrapper[4558]: I0120 16:56:43.119653 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"swift-storage-0\" (UID: \"ad708448-38df-4494-bca7-fe394c9b53a7\") device mount path \"/mnt/openstack/pv05\"" pod="openstack-kuttl-tests/swift-storage-0" Jan 20 16:56:43 crc kubenswrapper[4558]: I0120 16:56:43.119690 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3da300ce-eec2-414b-afdd-01d3f3bd71c5-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "3da300ce-eec2-414b-afdd-01d3f3bd71c5" (UID: "3da300ce-eec2-414b-afdd-01d3f3bd71c5"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 16:56:43 crc kubenswrapper[4558]: I0120 16:56:43.119762 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cc72bb7b-054c-4141-9db7-1bf5a8b716bb-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "cc72bb7b-054c-4141-9db7-1bf5a8b716bb" (UID: "cc72bb7b-054c-4141-9db7-1bf5a8b716bb"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 16:56:43 crc kubenswrapper[4558]: I0120 16:56:43.122618 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3da300ce-eec2-414b-afdd-01d3f3bd71c5-kube-api-access-wbc7h" (OuterVolumeSpecName: "kube-api-access-wbc7h") pod "3da300ce-eec2-414b-afdd-01d3f3bd71c5" (UID: "3da300ce-eec2-414b-afdd-01d3f3bd71c5"). InnerVolumeSpecName "kube-api-access-wbc7h". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:56:43 crc kubenswrapper[4558]: I0120 16:56:43.123096 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cc72bb7b-054c-4141-9db7-1bf5a8b716bb-kube-api-access-l2bmd" (OuterVolumeSpecName: "kube-api-access-l2bmd") pod "cc72bb7b-054c-4141-9db7-1bf5a8b716bb" (UID: "cc72bb7b-054c-4141-9db7-1bf5a8b716bb"). InnerVolumeSpecName "kube-api-access-l2bmd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:56:43 crc kubenswrapper[4558]: I0120 16:56:43.123236 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e221e8f5-cc9e-4399-8b97-92ef8104bb70-kube-api-access-rlkcr" (OuterVolumeSpecName: "kube-api-access-rlkcr") pod "e221e8f5-cc9e-4399-8b97-92ef8104bb70" (UID: "e221e8f5-cc9e-4399-8b97-92ef8104bb70"). InnerVolumeSpecName "kube-api-access-rlkcr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:56:43 crc kubenswrapper[4558]: I0120 16:56:43.135329 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"swift-storage-0\" (UID: \"ad708448-38df-4494-bca7-fe394c9b53a7\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 16:56:43 crc kubenswrapper[4558]: I0120 16:56:43.135684 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2ql5k\" (UniqueName: \"kubernetes.io/projected/ad708448-38df-4494-bca7-fe394c9b53a7-kube-api-access-2ql5k\") pod \"swift-storage-0\" (UID: \"ad708448-38df-4494-bca7-fe394c9b53a7\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 16:56:43 crc kubenswrapper[4558]: I0120 16:56:43.221203 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e221e8f5-cc9e-4399-8b97-92ef8104bb70-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 16:56:43 crc kubenswrapper[4558]: I0120 16:56:43.221232 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l2bmd\" (UniqueName: \"kubernetes.io/projected/cc72bb7b-054c-4141-9db7-1bf5a8b716bb-kube-api-access-l2bmd\") on node \"crc\" DevicePath \"\"" Jan 20 16:56:43 crc kubenswrapper[4558]: I0120 16:56:43.221243 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wbc7h\" (UniqueName: \"kubernetes.io/projected/3da300ce-eec2-414b-afdd-01d3f3bd71c5-kube-api-access-wbc7h\") on node \"crc\" DevicePath \"\"" Jan 20 16:56:43 crc kubenswrapper[4558]: I0120 16:56:43.221253 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3da300ce-eec2-414b-afdd-01d3f3bd71c5-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 16:56:43 crc kubenswrapper[4558]: I0120 16:56:43.221262 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rlkcr\" (UniqueName: \"kubernetes.io/projected/e221e8f5-cc9e-4399-8b97-92ef8104bb70-kube-api-access-rlkcr\") on node \"crc\" DevicePath \"\"" Jan 20 16:56:43 crc kubenswrapper[4558]: I0120 16:56:43.221272 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cc72bb7b-054c-4141-9db7-1bf5a8b716bb-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 16:56:43 crc kubenswrapper[4558]: I0120 16:56:43.562998 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-db-create-2jqq4" Jan 20 16:56:43 crc kubenswrapper[4558]: I0120 16:56:43.563000 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-db-create-2jqq4" event={"ID":"3da300ce-eec2-414b-afdd-01d3f3bd71c5","Type":"ContainerDied","Data":"aefc815a88f4ca935c13e661208aade03484012c1280e4f37cca6c772604cf2a"} Jan 20 16:56:43 crc kubenswrapper[4558]: I0120 16:56:43.563297 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="aefc815a88f4ca935c13e661208aade03484012c1280e4f37cca6c772604cf2a" Jan 20 16:56:43 crc kubenswrapper[4558]: I0120 16:56:43.564366 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-db-create-kds4k" Jan 20 16:56:43 crc kubenswrapper[4558]: I0120 16:56:43.564363 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-db-create-kds4k" event={"ID":"e221e8f5-cc9e-4399-8b97-92ef8104bb70","Type":"ContainerDied","Data":"f0697c2842d6783f4de023bab652d036e5d1a8aec081aab8a10c0d3ac2a8f820"} Jan 20 16:56:43 crc kubenswrapper[4558]: I0120 16:56:43.564407 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f0697c2842d6783f4de023bab652d036e5d1a8aec081aab8a10c0d3ac2a8f820" Jan 20 16:56:43 crc kubenswrapper[4558]: I0120 16:56:43.565522 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-7e4f-account-create-update-nq676" Jan 20 16:56:43 crc kubenswrapper[4558]: I0120 16:56:43.565522 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-7e4f-account-create-update-nq676" event={"ID":"66a36d00-ae7a-4881-b8b0-1cab43a25547","Type":"ContainerDied","Data":"4251f5ebfbe87c1ba568e32c868dddd7b7a57c8ddf0f4b460a1ba2c0c9900efc"} Jan 20 16:56:43 crc kubenswrapper[4558]: I0120 16:56:43.565686 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4251f5ebfbe87c1ba568e32c868dddd7b7a57c8ddf0f4b460a1ba2c0c9900efc" Jan 20 16:56:43 crc kubenswrapper[4558]: I0120 16:56:43.566577 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-64a7-account-create-update-5hw2k" event={"ID":"cc72bb7b-054c-4141-9db7-1bf5a8b716bb","Type":"ContainerDied","Data":"a3bf2950b6bb152ce6cb8180222141097ab0fcf5d45629b646893216e249e231"} Jan 20 16:56:43 crc kubenswrapper[4558]: I0120 16:56:43.566601 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a3bf2950b6bb152ce6cb8180222141097ab0fcf5d45629b646893216e249e231" Jan 20 16:56:43 crc kubenswrapper[4558]: I0120 16:56:43.566627 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-64a7-account-create-update-5hw2k" Jan 20 16:56:43 crc kubenswrapper[4558]: I0120 16:56:43.627417 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/ad708448-38df-4494-bca7-fe394c9b53a7-etc-swift\") pod \"swift-storage-0\" (UID: \"ad708448-38df-4494-bca7-fe394c9b53a7\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 16:56:43 crc kubenswrapper[4558]: E0120 16:56:43.628480 4558 projected.go:288] Couldn't get configMap openstack-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Jan 20 16:56:43 crc kubenswrapper[4558]: E0120 16:56:43.628505 4558 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack-kuttl-tests/swift-storage-0: configmap "swift-ring-files" not found Jan 20 16:56:43 crc kubenswrapper[4558]: E0120 16:56:43.628543 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/ad708448-38df-4494-bca7-fe394c9b53a7-etc-swift podName:ad708448-38df-4494-bca7-fe394c9b53a7 nodeName:}" failed. No retries permitted until 2026-01-20 16:56:44.628529211 +0000 UTC m=+898.388867178 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/ad708448-38df-4494-bca7-fe394c9b53a7-etc-swift") pod "swift-storage-0" (UID: "ad708448-38df-4494-bca7-fe394c9b53a7") : configmap "swift-ring-files" not found Jan 20 16:56:44 crc kubenswrapper[4558]: I0120 16:56:44.642052 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/ad708448-38df-4494-bca7-fe394c9b53a7-etc-swift\") pod \"swift-storage-0\" (UID: \"ad708448-38df-4494-bca7-fe394c9b53a7\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 16:56:44 crc kubenswrapper[4558]: E0120 16:56:44.642275 4558 projected.go:288] Couldn't get configMap openstack-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Jan 20 16:56:44 crc kubenswrapper[4558]: E0120 16:56:44.642302 4558 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack-kuttl-tests/swift-storage-0: configmap "swift-ring-files" not found Jan 20 16:56:44 crc kubenswrapper[4558]: E0120 16:56:44.642354 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/ad708448-38df-4494-bca7-fe394c9b53a7-etc-swift podName:ad708448-38df-4494-bca7-fe394c9b53a7 nodeName:}" failed. No retries permitted until 2026-01-20 16:56:46.64233745 +0000 UTC m=+900.402675417 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/ad708448-38df-4494-bca7-fe394c9b53a7-etc-swift") pod "swift-storage-0" (UID: "ad708448-38df-4494-bca7-fe394c9b53a7") : configmap "swift-ring-files" not found Jan 20 16:56:45 crc kubenswrapper[4558]: I0120 16:56:45.134008 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/glance-db-create-ztgv2"] Jan 20 16:56:45 crc kubenswrapper[4558]: E0120 16:56:45.134306 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e221e8f5-cc9e-4399-8b97-92ef8104bb70" containerName="mariadb-database-create" Jan 20 16:56:45 crc kubenswrapper[4558]: I0120 16:56:45.134323 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="e221e8f5-cc9e-4399-8b97-92ef8104bb70" containerName="mariadb-database-create" Jan 20 16:56:45 crc kubenswrapper[4558]: E0120 16:56:45.134333 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cc72bb7b-054c-4141-9db7-1bf5a8b716bb" containerName="mariadb-account-create-update" Jan 20 16:56:45 crc kubenswrapper[4558]: I0120 16:56:45.134339 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="cc72bb7b-054c-4141-9db7-1bf5a8b716bb" containerName="mariadb-account-create-update" Jan 20 16:56:45 crc kubenswrapper[4558]: E0120 16:56:45.134359 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3da300ce-eec2-414b-afdd-01d3f3bd71c5" containerName="mariadb-database-create" Jan 20 16:56:45 crc kubenswrapper[4558]: I0120 16:56:45.134366 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="3da300ce-eec2-414b-afdd-01d3f3bd71c5" containerName="mariadb-database-create" Jan 20 16:56:45 crc kubenswrapper[4558]: I0120 16:56:45.134495 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="3da300ce-eec2-414b-afdd-01d3f3bd71c5" containerName="mariadb-database-create" Jan 20 16:56:45 crc kubenswrapper[4558]: I0120 16:56:45.134511 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="cc72bb7b-054c-4141-9db7-1bf5a8b716bb" containerName="mariadb-account-create-update" Jan 20 16:56:45 crc kubenswrapper[4558]: I0120 16:56:45.134519 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="e221e8f5-cc9e-4399-8b97-92ef8104bb70" containerName="mariadb-database-create" Jan 20 16:56:45 crc kubenswrapper[4558]: I0120 16:56:45.134927 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-db-create-ztgv2" Jan 20 16:56:45 crc kubenswrapper[4558]: I0120 16:56:45.142927 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-db-create-ztgv2"] Jan 20 16:56:45 crc kubenswrapper[4558]: I0120 16:56:45.236770 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/glance-b594-account-create-update-8q4dg"] Jan 20 16:56:45 crc kubenswrapper[4558]: I0120 16:56:45.239243 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-b594-account-create-update-8q4dg" Jan 20 16:56:45 crc kubenswrapper[4558]: I0120 16:56:45.240773 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-db-secret" Jan 20 16:56:45 crc kubenswrapper[4558]: I0120 16:56:45.249921 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lq9bf\" (UniqueName: \"kubernetes.io/projected/20f64bc1-f5bb-48bc-a586-eadcfad3fa6b-kube-api-access-lq9bf\") pod \"glance-db-create-ztgv2\" (UID: \"20f64bc1-f5bb-48bc-a586-eadcfad3fa6b\") " pod="openstack-kuttl-tests/glance-db-create-ztgv2" Jan 20 16:56:45 crc kubenswrapper[4558]: I0120 16:56:45.250021 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/20f64bc1-f5bb-48bc-a586-eadcfad3fa6b-operator-scripts\") pod \"glance-db-create-ztgv2\" (UID: \"20f64bc1-f5bb-48bc-a586-eadcfad3fa6b\") " pod="openstack-kuttl-tests/glance-db-create-ztgv2" Jan 20 16:56:45 crc kubenswrapper[4558]: I0120 16:56:45.254132 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-b594-account-create-update-8q4dg"] Jan 20 16:56:45 crc kubenswrapper[4558]: I0120 16:56:45.351722 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gwlxh\" (UniqueName: \"kubernetes.io/projected/2c7d824d-0fa4-4a19-971b-88e9f1da0707-kube-api-access-gwlxh\") pod \"glance-b594-account-create-update-8q4dg\" (UID: \"2c7d824d-0fa4-4a19-971b-88e9f1da0707\") " pod="openstack-kuttl-tests/glance-b594-account-create-update-8q4dg" Jan 20 16:56:45 crc kubenswrapper[4558]: I0120 16:56:45.351828 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2c7d824d-0fa4-4a19-971b-88e9f1da0707-operator-scripts\") pod \"glance-b594-account-create-update-8q4dg\" (UID: \"2c7d824d-0fa4-4a19-971b-88e9f1da0707\") " pod="openstack-kuttl-tests/glance-b594-account-create-update-8q4dg" Jan 20 16:56:45 crc kubenswrapper[4558]: I0120 16:56:45.351921 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lq9bf\" (UniqueName: \"kubernetes.io/projected/20f64bc1-f5bb-48bc-a586-eadcfad3fa6b-kube-api-access-lq9bf\") pod \"glance-db-create-ztgv2\" (UID: \"20f64bc1-f5bb-48bc-a586-eadcfad3fa6b\") " pod="openstack-kuttl-tests/glance-db-create-ztgv2" Jan 20 16:56:45 crc kubenswrapper[4558]: I0120 16:56:45.351958 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/20f64bc1-f5bb-48bc-a586-eadcfad3fa6b-operator-scripts\") pod \"glance-db-create-ztgv2\" (UID: \"20f64bc1-f5bb-48bc-a586-eadcfad3fa6b\") " pod="openstack-kuttl-tests/glance-db-create-ztgv2" Jan 20 16:56:45 crc kubenswrapper[4558]: I0120 16:56:45.352594 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/20f64bc1-f5bb-48bc-a586-eadcfad3fa6b-operator-scripts\") pod \"glance-db-create-ztgv2\" (UID: \"20f64bc1-f5bb-48bc-a586-eadcfad3fa6b\") " pod="openstack-kuttl-tests/glance-db-create-ztgv2" Jan 20 16:56:45 crc kubenswrapper[4558]: I0120 16:56:45.367916 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lq9bf\" (UniqueName: \"kubernetes.io/projected/20f64bc1-f5bb-48bc-a586-eadcfad3fa6b-kube-api-access-lq9bf\") pod \"glance-db-create-ztgv2\" (UID: \"20f64bc1-f5bb-48bc-a586-eadcfad3fa6b\") " pod="openstack-kuttl-tests/glance-db-create-ztgv2" Jan 20 16:56:45 crc kubenswrapper[4558]: I0120 16:56:45.453383 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gwlxh\" (UniqueName: \"kubernetes.io/projected/2c7d824d-0fa4-4a19-971b-88e9f1da0707-kube-api-access-gwlxh\") pod \"glance-b594-account-create-update-8q4dg\" (UID: \"2c7d824d-0fa4-4a19-971b-88e9f1da0707\") " pod="openstack-kuttl-tests/glance-b594-account-create-update-8q4dg" Jan 20 16:56:45 crc kubenswrapper[4558]: I0120 16:56:45.453471 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2c7d824d-0fa4-4a19-971b-88e9f1da0707-operator-scripts\") pod \"glance-b594-account-create-update-8q4dg\" (UID: \"2c7d824d-0fa4-4a19-971b-88e9f1da0707\") " pod="openstack-kuttl-tests/glance-b594-account-create-update-8q4dg" Jan 20 16:56:45 crc kubenswrapper[4558]: I0120 16:56:45.453687 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-db-create-ztgv2" Jan 20 16:56:45 crc kubenswrapper[4558]: I0120 16:56:45.454070 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2c7d824d-0fa4-4a19-971b-88e9f1da0707-operator-scripts\") pod \"glance-b594-account-create-update-8q4dg\" (UID: \"2c7d824d-0fa4-4a19-971b-88e9f1da0707\") " pod="openstack-kuttl-tests/glance-b594-account-create-update-8q4dg" Jan 20 16:56:45 crc kubenswrapper[4558]: I0120 16:56:45.466913 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gwlxh\" (UniqueName: \"kubernetes.io/projected/2c7d824d-0fa4-4a19-971b-88e9f1da0707-kube-api-access-gwlxh\") pod \"glance-b594-account-create-update-8q4dg\" (UID: \"2c7d824d-0fa4-4a19-971b-88e9f1da0707\") " pod="openstack-kuttl-tests/glance-b594-account-create-update-8q4dg" Jan 20 16:56:45 crc kubenswrapper[4558]: I0120 16:56:45.558838 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-b594-account-create-update-8q4dg" Jan 20 16:56:45 crc kubenswrapper[4558]: I0120 16:56:45.802952 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-db-create-ztgv2"] Jan 20 16:56:45 crc kubenswrapper[4558]: W0120 16:56:45.804911 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod20f64bc1_f5bb_48bc_a586_eadcfad3fa6b.slice/crio-6d2fe315a3172b64aba0ba8c66cef286dac64b5a372205952c43291093f00c04 WatchSource:0}: Error finding container 6d2fe315a3172b64aba0ba8c66cef286dac64b5a372205952c43291093f00c04: Status 404 returned error can't find the container with id 6d2fe315a3172b64aba0ba8c66cef286dac64b5a372205952c43291093f00c04 Jan 20 16:56:45 crc kubenswrapper[4558]: I0120 16:56:45.929832 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-b594-account-create-update-8q4dg"] Jan 20 16:56:45 crc kubenswrapper[4558]: W0120 16:56:45.933975 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2c7d824d_0fa4_4a19_971b_88e9f1da0707.slice/crio-d7812d824659fba7f551c66fbdf0c6bad46b6f1a7cb15e21ea4c2733e35e27e6 WatchSource:0}: Error finding container d7812d824659fba7f551c66fbdf0c6bad46b6f1a7cb15e21ea4c2733e35e27e6: Status 404 returned error can't find the container with id d7812d824659fba7f551c66fbdf0c6bad46b6f1a7cb15e21ea4c2733e35e27e6 Jan 20 16:56:46 crc kubenswrapper[4558]: I0120 16:56:46.594956 4558 generic.go:334] "Generic (PLEG): container finished" podID="2c7d824d-0fa4-4a19-971b-88e9f1da0707" containerID="81a298964f3ebaf7af8b941cde650e1463fc246ad00310e6cefc05c9e48309b1" exitCode=0 Jan 20 16:56:46 crc kubenswrapper[4558]: I0120 16:56:46.595033 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-b594-account-create-update-8q4dg" event={"ID":"2c7d824d-0fa4-4a19-971b-88e9f1da0707","Type":"ContainerDied","Data":"81a298964f3ebaf7af8b941cde650e1463fc246ad00310e6cefc05c9e48309b1"} Jan 20 16:56:46 crc kubenswrapper[4558]: I0120 16:56:46.595474 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-b594-account-create-update-8q4dg" event={"ID":"2c7d824d-0fa4-4a19-971b-88e9f1da0707","Type":"ContainerStarted","Data":"d7812d824659fba7f551c66fbdf0c6bad46b6f1a7cb15e21ea4c2733e35e27e6"} Jan 20 16:56:46 crc kubenswrapper[4558]: I0120 16:56:46.597214 4558 generic.go:334] "Generic (PLEG): container finished" podID="20f64bc1-f5bb-48bc-a586-eadcfad3fa6b" containerID="9836d70012398b9d97dedfe1f355a7b969edd38aa09ecfb16ea977a111566eb7" exitCode=0 Jan 20 16:56:46 crc kubenswrapper[4558]: I0120 16:56:46.597240 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-db-create-ztgv2" event={"ID":"20f64bc1-f5bb-48bc-a586-eadcfad3fa6b","Type":"ContainerDied","Data":"9836d70012398b9d97dedfe1f355a7b969edd38aa09ecfb16ea977a111566eb7"} Jan 20 16:56:46 crc kubenswrapper[4558]: I0120 16:56:46.597254 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-db-create-ztgv2" event={"ID":"20f64bc1-f5bb-48bc-a586-eadcfad3fa6b","Type":"ContainerStarted","Data":"6d2fe315a3172b64aba0ba8c66cef286dac64b5a372205952c43291093f00c04"} Jan 20 16:56:46 crc kubenswrapper[4558]: I0120 16:56:46.674040 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/ad708448-38df-4494-bca7-fe394c9b53a7-etc-swift\") pod \"swift-storage-0\" (UID: \"ad708448-38df-4494-bca7-fe394c9b53a7\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 16:56:46 crc kubenswrapper[4558]: E0120 16:56:46.674219 4558 projected.go:288] Couldn't get configMap openstack-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Jan 20 16:56:46 crc kubenswrapper[4558]: E0120 16:56:46.674243 4558 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack-kuttl-tests/swift-storage-0: configmap "swift-ring-files" not found Jan 20 16:56:46 crc kubenswrapper[4558]: E0120 16:56:46.674291 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/ad708448-38df-4494-bca7-fe394c9b53a7-etc-swift podName:ad708448-38df-4494-bca7-fe394c9b53a7 nodeName:}" failed. No retries permitted until 2026-01-20 16:56:50.674276936 +0000 UTC m=+904.434614903 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/ad708448-38df-4494-bca7-fe394c9b53a7-etc-swift") pod "swift-storage-0" (UID: "ad708448-38df-4494-bca7-fe394c9b53a7") : configmap "swift-ring-files" not found Jan 20 16:56:46 crc kubenswrapper[4558]: I0120 16:56:46.737657 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/root-account-create-update-wdggt"] Jan 20 16:56:46 crc kubenswrapper[4558]: I0120 16:56:46.738516 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-wdggt" Jan 20 16:56:46 crc kubenswrapper[4558]: I0120 16:56:46.742024 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"openstack-mariadb-root-db-secret" Jan 20 16:56:46 crc kubenswrapper[4558]: I0120 16:56:46.746933 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-wdggt"] Jan 20 16:56:46 crc kubenswrapper[4558]: I0120 16:56:46.877211 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7cfln\" (UniqueName: \"kubernetes.io/projected/9e76a80a-0569-4d9c-a5da-793d0290ad83-kube-api-access-7cfln\") pod \"root-account-create-update-wdggt\" (UID: \"9e76a80a-0569-4d9c-a5da-793d0290ad83\") " pod="openstack-kuttl-tests/root-account-create-update-wdggt" Jan 20 16:56:46 crc kubenswrapper[4558]: I0120 16:56:46.877429 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9e76a80a-0569-4d9c-a5da-793d0290ad83-operator-scripts\") pod \"root-account-create-update-wdggt\" (UID: \"9e76a80a-0569-4d9c-a5da-793d0290ad83\") " pod="openstack-kuttl-tests/root-account-create-update-wdggt" Jan 20 16:56:46 crc kubenswrapper[4558]: I0120 16:56:46.978532 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9e76a80a-0569-4d9c-a5da-793d0290ad83-operator-scripts\") pod \"root-account-create-update-wdggt\" (UID: \"9e76a80a-0569-4d9c-a5da-793d0290ad83\") " pod="openstack-kuttl-tests/root-account-create-update-wdggt" Jan 20 16:56:46 crc kubenswrapper[4558]: I0120 16:56:46.978878 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7cfln\" (UniqueName: \"kubernetes.io/projected/9e76a80a-0569-4d9c-a5da-793d0290ad83-kube-api-access-7cfln\") pod \"root-account-create-update-wdggt\" (UID: \"9e76a80a-0569-4d9c-a5da-793d0290ad83\") " pod="openstack-kuttl-tests/root-account-create-update-wdggt" Jan 20 16:56:46 crc kubenswrapper[4558]: I0120 16:56:46.979207 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9e76a80a-0569-4d9c-a5da-793d0290ad83-operator-scripts\") pod \"root-account-create-update-wdggt\" (UID: \"9e76a80a-0569-4d9c-a5da-793d0290ad83\") " pod="openstack-kuttl-tests/root-account-create-update-wdggt" Jan 20 16:56:46 crc kubenswrapper[4558]: I0120 16:56:46.989313 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/swift-ring-rebalance-49f5x"] Jan 20 16:56:46 crc kubenswrapper[4558]: I0120 16:56:46.990101 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-ring-rebalance-49f5x" Jan 20 16:56:46 crc kubenswrapper[4558]: I0120 16:56:46.993214 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"swift-ring-config-data" Jan 20 16:56:46 crc kubenswrapper[4558]: I0120 16:56:46.998273 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7cfln\" (UniqueName: \"kubernetes.io/projected/9e76a80a-0569-4d9c-a5da-793d0290ad83-kube-api-access-7cfln\") pod \"root-account-create-update-wdggt\" (UID: \"9e76a80a-0569-4d9c-a5da-793d0290ad83\") " pod="openstack-kuttl-tests/root-account-create-update-wdggt" Jan 20 16:56:46 crc kubenswrapper[4558]: I0120 16:56:46.998324 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"swift-ring-scripts" Jan 20 16:56:46 crc kubenswrapper[4558]: I0120 16:56:46.998386 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"swift-proxy-config-data" Jan 20 16:56:47 crc kubenswrapper[4558]: I0120 16:56:47.003322 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/swift-ring-rebalance-49f5x"] Jan 20 16:56:47 crc kubenswrapper[4558]: I0120 16:56:47.057208 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-wdggt" Jan 20 16:56:47 crc kubenswrapper[4558]: I0120 16:56:47.080106 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/0e936a71-a599-4e5a-a8b7-9cde8054a1b5-dispersionconf\") pod \"swift-ring-rebalance-49f5x\" (UID: \"0e936a71-a599-4e5a-a8b7-9cde8054a1b5\") " pod="openstack-kuttl-tests/swift-ring-rebalance-49f5x" Jan 20 16:56:47 crc kubenswrapper[4558]: I0120 16:56:47.080417 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k7b9q\" (UniqueName: \"kubernetes.io/projected/0e936a71-a599-4e5a-a8b7-9cde8054a1b5-kube-api-access-k7b9q\") pod \"swift-ring-rebalance-49f5x\" (UID: \"0e936a71-a599-4e5a-a8b7-9cde8054a1b5\") " pod="openstack-kuttl-tests/swift-ring-rebalance-49f5x" Jan 20 16:56:47 crc kubenswrapper[4558]: I0120 16:56:47.080456 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0e936a71-a599-4e5a-a8b7-9cde8054a1b5-combined-ca-bundle\") pod \"swift-ring-rebalance-49f5x\" (UID: \"0e936a71-a599-4e5a-a8b7-9cde8054a1b5\") " pod="openstack-kuttl-tests/swift-ring-rebalance-49f5x" Jan 20 16:56:47 crc kubenswrapper[4558]: I0120 16:56:47.080491 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/0e936a71-a599-4e5a-a8b7-9cde8054a1b5-swiftconf\") pod \"swift-ring-rebalance-49f5x\" (UID: \"0e936a71-a599-4e5a-a8b7-9cde8054a1b5\") " pod="openstack-kuttl-tests/swift-ring-rebalance-49f5x" Jan 20 16:56:47 crc kubenswrapper[4558]: I0120 16:56:47.080506 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/0e936a71-a599-4e5a-a8b7-9cde8054a1b5-etc-swift\") pod \"swift-ring-rebalance-49f5x\" (UID: \"0e936a71-a599-4e5a-a8b7-9cde8054a1b5\") " pod="openstack-kuttl-tests/swift-ring-rebalance-49f5x" Jan 20 16:56:47 crc kubenswrapper[4558]: I0120 16:56:47.080529 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/0e936a71-a599-4e5a-a8b7-9cde8054a1b5-ring-data-devices\") pod \"swift-ring-rebalance-49f5x\" (UID: \"0e936a71-a599-4e5a-a8b7-9cde8054a1b5\") " pod="openstack-kuttl-tests/swift-ring-rebalance-49f5x" Jan 20 16:56:47 crc kubenswrapper[4558]: I0120 16:56:47.080549 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0e936a71-a599-4e5a-a8b7-9cde8054a1b5-scripts\") pod \"swift-ring-rebalance-49f5x\" (UID: \"0e936a71-a599-4e5a-a8b7-9cde8054a1b5\") " pod="openstack-kuttl-tests/swift-ring-rebalance-49f5x" Jan 20 16:56:47 crc kubenswrapper[4558]: I0120 16:56:47.181500 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0e936a71-a599-4e5a-a8b7-9cde8054a1b5-scripts\") pod \"swift-ring-rebalance-49f5x\" (UID: \"0e936a71-a599-4e5a-a8b7-9cde8054a1b5\") " pod="openstack-kuttl-tests/swift-ring-rebalance-49f5x" Jan 20 16:56:47 crc kubenswrapper[4558]: I0120 16:56:47.181580 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/0e936a71-a599-4e5a-a8b7-9cde8054a1b5-dispersionconf\") pod \"swift-ring-rebalance-49f5x\" (UID: \"0e936a71-a599-4e5a-a8b7-9cde8054a1b5\") " pod="openstack-kuttl-tests/swift-ring-rebalance-49f5x" Jan 20 16:56:47 crc kubenswrapper[4558]: I0120 16:56:47.181633 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k7b9q\" (UniqueName: \"kubernetes.io/projected/0e936a71-a599-4e5a-a8b7-9cde8054a1b5-kube-api-access-k7b9q\") pod \"swift-ring-rebalance-49f5x\" (UID: \"0e936a71-a599-4e5a-a8b7-9cde8054a1b5\") " pod="openstack-kuttl-tests/swift-ring-rebalance-49f5x" Jan 20 16:56:47 crc kubenswrapper[4558]: I0120 16:56:47.181666 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0e936a71-a599-4e5a-a8b7-9cde8054a1b5-combined-ca-bundle\") pod \"swift-ring-rebalance-49f5x\" (UID: \"0e936a71-a599-4e5a-a8b7-9cde8054a1b5\") " pod="openstack-kuttl-tests/swift-ring-rebalance-49f5x" Jan 20 16:56:47 crc kubenswrapper[4558]: I0120 16:56:47.181695 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/0e936a71-a599-4e5a-a8b7-9cde8054a1b5-swiftconf\") pod \"swift-ring-rebalance-49f5x\" (UID: \"0e936a71-a599-4e5a-a8b7-9cde8054a1b5\") " pod="openstack-kuttl-tests/swift-ring-rebalance-49f5x" Jan 20 16:56:47 crc kubenswrapper[4558]: I0120 16:56:47.181710 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/0e936a71-a599-4e5a-a8b7-9cde8054a1b5-etc-swift\") pod \"swift-ring-rebalance-49f5x\" (UID: \"0e936a71-a599-4e5a-a8b7-9cde8054a1b5\") " pod="openstack-kuttl-tests/swift-ring-rebalance-49f5x" Jan 20 16:56:47 crc kubenswrapper[4558]: I0120 16:56:47.181733 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/0e936a71-a599-4e5a-a8b7-9cde8054a1b5-ring-data-devices\") pod \"swift-ring-rebalance-49f5x\" (UID: \"0e936a71-a599-4e5a-a8b7-9cde8054a1b5\") " pod="openstack-kuttl-tests/swift-ring-rebalance-49f5x" Jan 20 16:56:47 crc kubenswrapper[4558]: I0120 16:56:47.182514 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/0e936a71-a599-4e5a-a8b7-9cde8054a1b5-ring-data-devices\") pod \"swift-ring-rebalance-49f5x\" (UID: \"0e936a71-a599-4e5a-a8b7-9cde8054a1b5\") " pod="openstack-kuttl-tests/swift-ring-rebalance-49f5x" Jan 20 16:56:47 crc kubenswrapper[4558]: I0120 16:56:47.182907 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/0e936a71-a599-4e5a-a8b7-9cde8054a1b5-etc-swift\") pod \"swift-ring-rebalance-49f5x\" (UID: \"0e936a71-a599-4e5a-a8b7-9cde8054a1b5\") " pod="openstack-kuttl-tests/swift-ring-rebalance-49f5x" Jan 20 16:56:47 crc kubenswrapper[4558]: I0120 16:56:47.184370 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0e936a71-a599-4e5a-a8b7-9cde8054a1b5-scripts\") pod \"swift-ring-rebalance-49f5x\" (UID: \"0e936a71-a599-4e5a-a8b7-9cde8054a1b5\") " pod="openstack-kuttl-tests/swift-ring-rebalance-49f5x" Jan 20 16:56:47 crc kubenswrapper[4558]: I0120 16:56:47.185636 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0e936a71-a599-4e5a-a8b7-9cde8054a1b5-combined-ca-bundle\") pod \"swift-ring-rebalance-49f5x\" (UID: \"0e936a71-a599-4e5a-a8b7-9cde8054a1b5\") " pod="openstack-kuttl-tests/swift-ring-rebalance-49f5x" Jan 20 16:56:47 crc kubenswrapper[4558]: I0120 16:56:47.186067 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/0e936a71-a599-4e5a-a8b7-9cde8054a1b5-dispersionconf\") pod \"swift-ring-rebalance-49f5x\" (UID: \"0e936a71-a599-4e5a-a8b7-9cde8054a1b5\") " pod="openstack-kuttl-tests/swift-ring-rebalance-49f5x" Jan 20 16:56:47 crc kubenswrapper[4558]: I0120 16:56:47.186957 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/0e936a71-a599-4e5a-a8b7-9cde8054a1b5-swiftconf\") pod \"swift-ring-rebalance-49f5x\" (UID: \"0e936a71-a599-4e5a-a8b7-9cde8054a1b5\") " pod="openstack-kuttl-tests/swift-ring-rebalance-49f5x" Jan 20 16:56:47 crc kubenswrapper[4558]: I0120 16:56:47.196835 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k7b9q\" (UniqueName: \"kubernetes.io/projected/0e936a71-a599-4e5a-a8b7-9cde8054a1b5-kube-api-access-k7b9q\") pod \"swift-ring-rebalance-49f5x\" (UID: \"0e936a71-a599-4e5a-a8b7-9cde8054a1b5\") " pod="openstack-kuttl-tests/swift-ring-rebalance-49f5x" Jan 20 16:56:47 crc kubenswrapper[4558]: I0120 16:56:47.337746 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"swift-swift-dockercfg-42b9p" Jan 20 16:56:47 crc kubenswrapper[4558]: I0120 16:56:47.346783 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-ring-rebalance-49f5x" Jan 20 16:56:47 crc kubenswrapper[4558]: I0120 16:56:47.417855 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-wdggt"] Jan 20 16:56:47 crc kubenswrapper[4558]: W0120 16:56:47.428273 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9e76a80a_0569_4d9c_a5da_793d0290ad83.slice/crio-43b7dc05d69bb77dc1f90ba86feb070147731e5758a9e79dd9ec8ec6a446c8b9 WatchSource:0}: Error finding container 43b7dc05d69bb77dc1f90ba86feb070147731e5758a9e79dd9ec8ec6a446c8b9: Status 404 returned error can't find the container with id 43b7dc05d69bb77dc1f90ba86feb070147731e5758a9e79dd9ec8ec6a446c8b9 Jan 20 16:56:47 crc kubenswrapper[4558]: I0120 16:56:47.603793 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/root-account-create-update-wdggt" event={"ID":"9e76a80a-0569-4d9c-a5da-793d0290ad83","Type":"ContainerStarted","Data":"ff1ae6eb7658764a108f757416b422839e42b8f1bcf59e8c59c55baa302f8c10"} Jan 20 16:56:47 crc kubenswrapper[4558]: I0120 16:56:47.605060 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/root-account-create-update-wdggt" event={"ID":"9e76a80a-0569-4d9c-a5da-793d0290ad83","Type":"ContainerStarted","Data":"43b7dc05d69bb77dc1f90ba86feb070147731e5758a9e79dd9ec8ec6a446c8b9"} Jan 20 16:56:47 crc kubenswrapper[4558]: I0120 16:56:47.617810 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/root-account-create-update-wdggt" podStartSLOduration=1.617800624 podStartE2EDuration="1.617800624s" podCreationTimestamp="2026-01-20 16:56:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 16:56:47.611986411 +0000 UTC m=+901.372324378" watchObservedRunningTime="2026-01-20 16:56:47.617800624 +0000 UTC m=+901.378138592" Jan 20 16:56:47 crc kubenswrapper[4558]: I0120 16:56:47.713733 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/swift-ring-rebalance-49f5x"] Jan 20 16:56:47 crc kubenswrapper[4558]: W0120 16:56:47.728964 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0e936a71_a599_4e5a_a8b7_9cde8054a1b5.slice/crio-eed866d47c32492c41a2906dfa1b48d235a068892f54ffdba59d332aad584d18 WatchSource:0}: Error finding container eed866d47c32492c41a2906dfa1b48d235a068892f54ffdba59d332aad584d18: Status 404 returned error can't find the container with id eed866d47c32492c41a2906dfa1b48d235a068892f54ffdba59d332aad584d18 Jan 20 16:56:47 crc kubenswrapper[4558]: I0120 16:56:47.950228 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-db-create-ztgv2" Jan 20 16:56:47 crc kubenswrapper[4558]: I0120 16:56:47.954621 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-b594-account-create-update-8q4dg" Jan 20 16:56:48 crc kubenswrapper[4558]: I0120 16:56:48.096008 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/20f64bc1-f5bb-48bc-a586-eadcfad3fa6b-operator-scripts\") pod \"20f64bc1-f5bb-48bc-a586-eadcfad3fa6b\" (UID: \"20f64bc1-f5bb-48bc-a586-eadcfad3fa6b\") " Jan 20 16:56:48 crc kubenswrapper[4558]: I0120 16:56:48.096093 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lq9bf\" (UniqueName: \"kubernetes.io/projected/20f64bc1-f5bb-48bc-a586-eadcfad3fa6b-kube-api-access-lq9bf\") pod \"20f64bc1-f5bb-48bc-a586-eadcfad3fa6b\" (UID: \"20f64bc1-f5bb-48bc-a586-eadcfad3fa6b\") " Jan 20 16:56:48 crc kubenswrapper[4558]: I0120 16:56:48.096147 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2c7d824d-0fa4-4a19-971b-88e9f1da0707-operator-scripts\") pod \"2c7d824d-0fa4-4a19-971b-88e9f1da0707\" (UID: \"2c7d824d-0fa4-4a19-971b-88e9f1da0707\") " Jan 20 16:56:48 crc kubenswrapper[4558]: I0120 16:56:48.096275 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gwlxh\" (UniqueName: \"kubernetes.io/projected/2c7d824d-0fa4-4a19-971b-88e9f1da0707-kube-api-access-gwlxh\") pod \"2c7d824d-0fa4-4a19-971b-88e9f1da0707\" (UID: \"2c7d824d-0fa4-4a19-971b-88e9f1da0707\") " Jan 20 16:56:48 crc kubenswrapper[4558]: I0120 16:56:48.099774 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2c7d824d-0fa4-4a19-971b-88e9f1da0707-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "2c7d824d-0fa4-4a19-971b-88e9f1da0707" (UID: "2c7d824d-0fa4-4a19-971b-88e9f1da0707"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 16:56:48 crc kubenswrapper[4558]: I0120 16:56:48.100311 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/20f64bc1-f5bb-48bc-a586-eadcfad3fa6b-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "20f64bc1-f5bb-48bc-a586-eadcfad3fa6b" (UID: "20f64bc1-f5bb-48bc-a586-eadcfad3fa6b"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 16:56:48 crc kubenswrapper[4558]: I0120 16:56:48.116292 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2c7d824d-0fa4-4a19-971b-88e9f1da0707-kube-api-access-gwlxh" (OuterVolumeSpecName: "kube-api-access-gwlxh") pod "2c7d824d-0fa4-4a19-971b-88e9f1da0707" (UID: "2c7d824d-0fa4-4a19-971b-88e9f1da0707"). InnerVolumeSpecName "kube-api-access-gwlxh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:56:48 crc kubenswrapper[4558]: I0120 16:56:48.125271 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20f64bc1-f5bb-48bc-a586-eadcfad3fa6b-kube-api-access-lq9bf" (OuterVolumeSpecName: "kube-api-access-lq9bf") pod "20f64bc1-f5bb-48bc-a586-eadcfad3fa6b" (UID: "20f64bc1-f5bb-48bc-a586-eadcfad3fa6b"). InnerVolumeSpecName "kube-api-access-lq9bf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:56:48 crc kubenswrapper[4558]: I0120 16:56:48.198410 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lq9bf\" (UniqueName: \"kubernetes.io/projected/20f64bc1-f5bb-48bc-a586-eadcfad3fa6b-kube-api-access-lq9bf\") on node \"crc\" DevicePath \"\"" Jan 20 16:56:48 crc kubenswrapper[4558]: I0120 16:56:48.198581 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2c7d824d-0fa4-4a19-971b-88e9f1da0707-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 16:56:48 crc kubenswrapper[4558]: I0120 16:56:48.198671 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gwlxh\" (UniqueName: \"kubernetes.io/projected/2c7d824d-0fa4-4a19-971b-88e9f1da0707-kube-api-access-gwlxh\") on node \"crc\" DevicePath \"\"" Jan 20 16:56:48 crc kubenswrapper[4558]: I0120 16:56:48.198742 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/20f64bc1-f5bb-48bc-a586-eadcfad3fa6b-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 16:56:48 crc kubenswrapper[4558]: I0120 16:56:48.610752 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-b594-account-create-update-8q4dg" Jan 20 16:56:48 crc kubenswrapper[4558]: I0120 16:56:48.610749 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-b594-account-create-update-8q4dg" event={"ID":"2c7d824d-0fa4-4a19-971b-88e9f1da0707","Type":"ContainerDied","Data":"d7812d824659fba7f551c66fbdf0c6bad46b6f1a7cb15e21ea4c2733e35e27e6"} Jan 20 16:56:48 crc kubenswrapper[4558]: I0120 16:56:48.611178 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d7812d824659fba7f551c66fbdf0c6bad46b6f1a7cb15e21ea4c2733e35e27e6" Jan 20 16:56:48 crc kubenswrapper[4558]: I0120 16:56:48.611873 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-db-create-ztgv2" event={"ID":"20f64bc1-f5bb-48bc-a586-eadcfad3fa6b","Type":"ContainerDied","Data":"6d2fe315a3172b64aba0ba8c66cef286dac64b5a372205952c43291093f00c04"} Jan 20 16:56:48 crc kubenswrapper[4558]: I0120 16:56:48.611906 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6d2fe315a3172b64aba0ba8c66cef286dac64b5a372205952c43291093f00c04" Jan 20 16:56:48 crc kubenswrapper[4558]: I0120 16:56:48.611954 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-db-create-ztgv2" Jan 20 16:56:48 crc kubenswrapper[4558]: I0120 16:56:48.613023 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-ring-rebalance-49f5x" event={"ID":"0e936a71-a599-4e5a-a8b7-9cde8054a1b5","Type":"ContainerStarted","Data":"eed866d47c32492c41a2906dfa1b48d235a068892f54ffdba59d332aad584d18"} Jan 20 16:56:48 crc kubenswrapper[4558]: I0120 16:56:48.614648 4558 generic.go:334] "Generic (PLEG): container finished" podID="9e76a80a-0569-4d9c-a5da-793d0290ad83" containerID="ff1ae6eb7658764a108f757416b422839e42b8f1bcf59e8c59c55baa302f8c10" exitCode=0 Jan 20 16:56:48 crc kubenswrapper[4558]: I0120 16:56:48.614737 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/root-account-create-update-wdggt" event={"ID":"9e76a80a-0569-4d9c-a5da-793d0290ad83","Type":"ContainerDied","Data":"ff1ae6eb7658764a108f757416b422839e42b8f1bcf59e8c59c55baa302f8c10"} Jan 20 16:56:50 crc kubenswrapper[4558]: I0120 16:56:50.373755 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/glance-db-sync-2hhvl"] Jan 20 16:56:50 crc kubenswrapper[4558]: E0120 16:56:50.374276 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="20f64bc1-f5bb-48bc-a586-eadcfad3fa6b" containerName="mariadb-database-create" Jan 20 16:56:50 crc kubenswrapper[4558]: I0120 16:56:50.374289 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="20f64bc1-f5bb-48bc-a586-eadcfad3fa6b" containerName="mariadb-database-create" Jan 20 16:56:50 crc kubenswrapper[4558]: E0120 16:56:50.374306 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2c7d824d-0fa4-4a19-971b-88e9f1da0707" containerName="mariadb-account-create-update" Jan 20 16:56:50 crc kubenswrapper[4558]: I0120 16:56:50.374313 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="2c7d824d-0fa4-4a19-971b-88e9f1da0707" containerName="mariadb-account-create-update" Jan 20 16:56:50 crc kubenswrapper[4558]: I0120 16:56:50.374447 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="20f64bc1-f5bb-48bc-a586-eadcfad3fa6b" containerName="mariadb-database-create" Jan 20 16:56:50 crc kubenswrapper[4558]: I0120 16:56:50.374461 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="2c7d824d-0fa4-4a19-971b-88e9f1da0707" containerName="mariadb-account-create-update" Jan 20 16:56:50 crc kubenswrapper[4558]: I0120 16:56:50.374939 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-db-sync-2hhvl" Jan 20 16:56:50 crc kubenswrapper[4558]: I0120 16:56:50.376636 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-glance-dockercfg-67jwr" Jan 20 16:56:50 crc kubenswrapper[4558]: I0120 16:56:50.379603 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-config-data" Jan 20 16:56:50 crc kubenswrapper[4558]: I0120 16:56:50.382518 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-db-sync-2hhvl"] Jan 20 16:56:50 crc kubenswrapper[4558]: I0120 16:56:50.530211 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fe5cc26d-38d6-43e0-8977-6ff7e991ab6c-combined-ca-bundle\") pod \"glance-db-sync-2hhvl\" (UID: \"fe5cc26d-38d6-43e0-8977-6ff7e991ab6c\") " pod="openstack-kuttl-tests/glance-db-sync-2hhvl" Jan 20 16:56:50 crc kubenswrapper[4558]: I0120 16:56:50.530633 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fe5cc26d-38d6-43e0-8977-6ff7e991ab6c-config-data\") pod \"glance-db-sync-2hhvl\" (UID: \"fe5cc26d-38d6-43e0-8977-6ff7e991ab6c\") " pod="openstack-kuttl-tests/glance-db-sync-2hhvl" Jan 20 16:56:50 crc kubenswrapper[4558]: I0120 16:56:50.530710 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-896bc\" (UniqueName: \"kubernetes.io/projected/fe5cc26d-38d6-43e0-8977-6ff7e991ab6c-kube-api-access-896bc\") pod \"glance-db-sync-2hhvl\" (UID: \"fe5cc26d-38d6-43e0-8977-6ff7e991ab6c\") " pod="openstack-kuttl-tests/glance-db-sync-2hhvl" Jan 20 16:56:50 crc kubenswrapper[4558]: I0120 16:56:50.530755 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/fe5cc26d-38d6-43e0-8977-6ff7e991ab6c-db-sync-config-data\") pod \"glance-db-sync-2hhvl\" (UID: \"fe5cc26d-38d6-43e0-8977-6ff7e991ab6c\") " pod="openstack-kuttl-tests/glance-db-sync-2hhvl" Jan 20 16:56:50 crc kubenswrapper[4558]: I0120 16:56:50.632192 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-896bc\" (UniqueName: \"kubernetes.io/projected/fe5cc26d-38d6-43e0-8977-6ff7e991ab6c-kube-api-access-896bc\") pod \"glance-db-sync-2hhvl\" (UID: \"fe5cc26d-38d6-43e0-8977-6ff7e991ab6c\") " pod="openstack-kuttl-tests/glance-db-sync-2hhvl" Jan 20 16:56:50 crc kubenswrapper[4558]: I0120 16:56:50.632286 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/fe5cc26d-38d6-43e0-8977-6ff7e991ab6c-db-sync-config-data\") pod \"glance-db-sync-2hhvl\" (UID: \"fe5cc26d-38d6-43e0-8977-6ff7e991ab6c\") " pod="openstack-kuttl-tests/glance-db-sync-2hhvl" Jan 20 16:56:50 crc kubenswrapper[4558]: I0120 16:56:50.632372 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fe5cc26d-38d6-43e0-8977-6ff7e991ab6c-combined-ca-bundle\") pod \"glance-db-sync-2hhvl\" (UID: \"fe5cc26d-38d6-43e0-8977-6ff7e991ab6c\") " pod="openstack-kuttl-tests/glance-db-sync-2hhvl" Jan 20 16:56:50 crc kubenswrapper[4558]: I0120 16:56:50.632457 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fe5cc26d-38d6-43e0-8977-6ff7e991ab6c-config-data\") pod \"glance-db-sync-2hhvl\" (UID: \"fe5cc26d-38d6-43e0-8977-6ff7e991ab6c\") " pod="openstack-kuttl-tests/glance-db-sync-2hhvl" Jan 20 16:56:50 crc kubenswrapper[4558]: I0120 16:56:50.638118 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fe5cc26d-38d6-43e0-8977-6ff7e991ab6c-combined-ca-bundle\") pod \"glance-db-sync-2hhvl\" (UID: \"fe5cc26d-38d6-43e0-8977-6ff7e991ab6c\") " pod="openstack-kuttl-tests/glance-db-sync-2hhvl" Jan 20 16:56:50 crc kubenswrapper[4558]: I0120 16:56:50.638235 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/fe5cc26d-38d6-43e0-8977-6ff7e991ab6c-db-sync-config-data\") pod \"glance-db-sync-2hhvl\" (UID: \"fe5cc26d-38d6-43e0-8977-6ff7e991ab6c\") " pod="openstack-kuttl-tests/glance-db-sync-2hhvl" Jan 20 16:56:50 crc kubenswrapper[4558]: I0120 16:56:50.640918 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fe5cc26d-38d6-43e0-8977-6ff7e991ab6c-config-data\") pod \"glance-db-sync-2hhvl\" (UID: \"fe5cc26d-38d6-43e0-8977-6ff7e991ab6c\") " pod="openstack-kuttl-tests/glance-db-sync-2hhvl" Jan 20 16:56:50 crc kubenswrapper[4558]: I0120 16:56:50.656689 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-896bc\" (UniqueName: \"kubernetes.io/projected/fe5cc26d-38d6-43e0-8977-6ff7e991ab6c-kube-api-access-896bc\") pod \"glance-db-sync-2hhvl\" (UID: \"fe5cc26d-38d6-43e0-8977-6ff7e991ab6c\") " pod="openstack-kuttl-tests/glance-db-sync-2hhvl" Jan 20 16:56:50 crc kubenswrapper[4558]: I0120 16:56:50.697801 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-db-sync-2hhvl" Jan 20 16:56:50 crc kubenswrapper[4558]: I0120 16:56:50.736201 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/ad708448-38df-4494-bca7-fe394c9b53a7-etc-swift\") pod \"swift-storage-0\" (UID: \"ad708448-38df-4494-bca7-fe394c9b53a7\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 16:56:50 crc kubenswrapper[4558]: E0120 16:56:50.736361 4558 projected.go:288] Couldn't get configMap openstack-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Jan 20 16:56:50 crc kubenswrapper[4558]: E0120 16:56:50.736383 4558 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack-kuttl-tests/swift-storage-0: configmap "swift-ring-files" not found Jan 20 16:56:50 crc kubenswrapper[4558]: E0120 16:56:50.736455 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/ad708448-38df-4494-bca7-fe394c9b53a7-etc-swift podName:ad708448-38df-4494-bca7-fe394c9b53a7 nodeName:}" failed. No retries permitted until 2026-01-20 16:56:58.736439882 +0000 UTC m=+912.496777849 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/ad708448-38df-4494-bca7-fe394c9b53a7-etc-swift") pod "swift-storage-0" (UID: "ad708448-38df-4494-bca7-fe394c9b53a7") : configmap "swift-ring-files" not found Jan 20 16:56:51 crc kubenswrapper[4558]: I0120 16:56:51.327688 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-wdggt" Jan 20 16:56:51 crc kubenswrapper[4558]: I0120 16:56:51.458021 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7cfln\" (UniqueName: \"kubernetes.io/projected/9e76a80a-0569-4d9c-a5da-793d0290ad83-kube-api-access-7cfln\") pod \"9e76a80a-0569-4d9c-a5da-793d0290ad83\" (UID: \"9e76a80a-0569-4d9c-a5da-793d0290ad83\") " Jan 20 16:56:51 crc kubenswrapper[4558]: I0120 16:56:51.458130 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9e76a80a-0569-4d9c-a5da-793d0290ad83-operator-scripts\") pod \"9e76a80a-0569-4d9c-a5da-793d0290ad83\" (UID: \"9e76a80a-0569-4d9c-a5da-793d0290ad83\") " Jan 20 16:56:51 crc kubenswrapper[4558]: I0120 16:56:51.458987 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9e76a80a-0569-4d9c-a5da-793d0290ad83-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "9e76a80a-0569-4d9c-a5da-793d0290ad83" (UID: "9e76a80a-0569-4d9c-a5da-793d0290ad83"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 16:56:51 crc kubenswrapper[4558]: I0120 16:56:51.463217 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9e76a80a-0569-4d9c-a5da-793d0290ad83-kube-api-access-7cfln" (OuterVolumeSpecName: "kube-api-access-7cfln") pod "9e76a80a-0569-4d9c-a5da-793d0290ad83" (UID: "9e76a80a-0569-4d9c-a5da-793d0290ad83"). InnerVolumeSpecName "kube-api-access-7cfln". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:56:51 crc kubenswrapper[4558]: I0120 16:56:51.559591 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9e76a80a-0569-4d9c-a5da-793d0290ad83-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 16:56:51 crc kubenswrapper[4558]: I0120 16:56:51.559882 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7cfln\" (UniqueName: \"kubernetes.io/projected/9e76a80a-0569-4d9c-a5da-793d0290ad83-kube-api-access-7cfln\") on node \"crc\" DevicePath \"\"" Jan 20 16:56:51 crc kubenswrapper[4558]: I0120 16:56:51.636683 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-ring-rebalance-49f5x" event={"ID":"0e936a71-a599-4e5a-a8b7-9cde8054a1b5","Type":"ContainerStarted","Data":"b196841960ac1c7f6a7c02f8f3bfbe22cecef21da30a06b127c88a4183e3b847"} Jan 20 16:56:51 crc kubenswrapper[4558]: I0120 16:56:51.638219 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/root-account-create-update-wdggt" event={"ID":"9e76a80a-0569-4d9c-a5da-793d0290ad83","Type":"ContainerDied","Data":"43b7dc05d69bb77dc1f90ba86feb070147731e5758a9e79dd9ec8ec6a446c8b9"} Jan 20 16:56:51 crc kubenswrapper[4558]: I0120 16:56:51.638254 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="43b7dc05d69bb77dc1f90ba86feb070147731e5758a9e79dd9ec8ec6a446c8b9" Jan 20 16:56:51 crc kubenswrapper[4558]: I0120 16:56:51.638269 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-wdggt" Jan 20 16:56:51 crc kubenswrapper[4558]: I0120 16:56:51.666562 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/swift-ring-rebalance-49f5x" podStartSLOduration=2.142922193 podStartE2EDuration="5.666547703s" podCreationTimestamp="2026-01-20 16:56:46 +0000 UTC" firstStartedPulling="2026-01-20 16:56:47.73721675 +0000 UTC m=+901.497554717" lastFinishedPulling="2026-01-20 16:56:51.26084226 +0000 UTC m=+905.021180227" observedRunningTime="2026-01-20 16:56:51.662708175 +0000 UTC m=+905.423046141" watchObservedRunningTime="2026-01-20 16:56:51.666547703 +0000 UTC m=+905.426885671" Jan 20 16:56:51 crc kubenswrapper[4558]: I0120 16:56:51.686439 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-db-sync-2hhvl"] Jan 20 16:56:52 crc kubenswrapper[4558]: I0120 16:56:52.646448 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-db-sync-2hhvl" event={"ID":"fe5cc26d-38d6-43e0-8977-6ff7e991ab6c","Type":"ContainerStarted","Data":"b5da9ce34a4e3e6af8665256ac56bb64e8ca83042a91c6c0fe8a62b5e6ea9145"} Jan 20 16:56:53 crc kubenswrapper[4558]: I0120 16:56:53.061431 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 16:56:53 crc kubenswrapper[4558]: I0120 16:56:53.186693 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-wdggt"] Jan 20 16:56:53 crc kubenswrapper[4558]: I0120 16:56:53.191684 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-wdggt"] Jan 20 16:56:54 crc kubenswrapper[4558]: I0120 16:56:54.573469 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9e76a80a-0569-4d9c-a5da-793d0290ad83" path="/var/lib/kubelet/pods/9e76a80a-0569-4d9c-a5da-793d0290ad83/volumes" Jan 20 16:56:56 crc kubenswrapper[4558]: I0120 16:56:56.766988 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/root-account-create-update-cx24m"] Jan 20 16:56:56 crc kubenswrapper[4558]: E0120 16:56:56.767553 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9e76a80a-0569-4d9c-a5da-793d0290ad83" containerName="mariadb-account-create-update" Jan 20 16:56:56 crc kubenswrapper[4558]: I0120 16:56:56.767567 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="9e76a80a-0569-4d9c-a5da-793d0290ad83" containerName="mariadb-account-create-update" Jan 20 16:56:56 crc kubenswrapper[4558]: I0120 16:56:56.767728 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="9e76a80a-0569-4d9c-a5da-793d0290ad83" containerName="mariadb-account-create-update" Jan 20 16:56:56 crc kubenswrapper[4558]: I0120 16:56:56.768269 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-cx24m" Jan 20 16:56:56 crc kubenswrapper[4558]: I0120 16:56:56.769737 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"openstack-mariadb-root-db-secret" Jan 20 16:56:56 crc kubenswrapper[4558]: I0120 16:56:56.771643 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-cx24m"] Jan 20 16:56:56 crc kubenswrapper[4558]: I0120 16:56:56.843307 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kt5x6\" (UniqueName: \"kubernetes.io/projected/b6e0dfd1-68d5-4557-9723-9a089cba1360-kube-api-access-kt5x6\") pod \"root-account-create-update-cx24m\" (UID: \"b6e0dfd1-68d5-4557-9723-9a089cba1360\") " pod="openstack-kuttl-tests/root-account-create-update-cx24m" Jan 20 16:56:56 crc kubenswrapper[4558]: I0120 16:56:56.843558 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b6e0dfd1-68d5-4557-9723-9a089cba1360-operator-scripts\") pod \"root-account-create-update-cx24m\" (UID: \"b6e0dfd1-68d5-4557-9723-9a089cba1360\") " pod="openstack-kuttl-tests/root-account-create-update-cx24m" Jan 20 16:56:56 crc kubenswrapper[4558]: I0120 16:56:56.944205 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kt5x6\" (UniqueName: \"kubernetes.io/projected/b6e0dfd1-68d5-4557-9723-9a089cba1360-kube-api-access-kt5x6\") pod \"root-account-create-update-cx24m\" (UID: \"b6e0dfd1-68d5-4557-9723-9a089cba1360\") " pod="openstack-kuttl-tests/root-account-create-update-cx24m" Jan 20 16:56:56 crc kubenswrapper[4558]: I0120 16:56:56.944260 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b6e0dfd1-68d5-4557-9723-9a089cba1360-operator-scripts\") pod \"root-account-create-update-cx24m\" (UID: \"b6e0dfd1-68d5-4557-9723-9a089cba1360\") " pod="openstack-kuttl-tests/root-account-create-update-cx24m" Jan 20 16:56:56 crc kubenswrapper[4558]: I0120 16:56:56.945111 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b6e0dfd1-68d5-4557-9723-9a089cba1360-operator-scripts\") pod \"root-account-create-update-cx24m\" (UID: \"b6e0dfd1-68d5-4557-9723-9a089cba1360\") " pod="openstack-kuttl-tests/root-account-create-update-cx24m" Jan 20 16:56:56 crc kubenswrapper[4558]: I0120 16:56:56.960393 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kt5x6\" (UniqueName: \"kubernetes.io/projected/b6e0dfd1-68d5-4557-9723-9a089cba1360-kube-api-access-kt5x6\") pod \"root-account-create-update-cx24m\" (UID: \"b6e0dfd1-68d5-4557-9723-9a089cba1360\") " pod="openstack-kuttl-tests/root-account-create-update-cx24m" Jan 20 16:56:57 crc kubenswrapper[4558]: I0120 16:56:57.086696 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-cx24m" Jan 20 16:56:57 crc kubenswrapper[4558]: I0120 16:56:57.329639 4558 patch_prober.go:28] interesting pod/machine-config-daemon-2vr4r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 20 16:56:57 crc kubenswrapper[4558]: I0120 16:56:57.329691 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 20 16:56:57 crc kubenswrapper[4558]: I0120 16:56:57.447252 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-cx24m"] Jan 20 16:56:57 crc kubenswrapper[4558]: W0120 16:56:57.449972 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb6e0dfd1_68d5_4557_9723_9a089cba1360.slice/crio-68affbe420d217674927bffe99a12a0a5b720b852a72be91930afafebda8c105 WatchSource:0}: Error finding container 68affbe420d217674927bffe99a12a0a5b720b852a72be91930afafebda8c105: Status 404 returned error can't find the container with id 68affbe420d217674927bffe99a12a0a5b720b852a72be91930afafebda8c105 Jan 20 16:56:57 crc kubenswrapper[4558]: I0120 16:56:57.675467 4558 generic.go:334] "Generic (PLEG): container finished" podID="bf355276-9e62-474e-bfb1-616dde5b83bc" containerID="e602226e4f361c85082bf7248cd70619371bfecaf58e19de2a31846417adbebd" exitCode=0 Jan 20 16:56:57 crc kubenswrapper[4558]: I0120 16:56:57.675546 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" event={"ID":"bf355276-9e62-474e-bfb1-616dde5b83bc","Type":"ContainerDied","Data":"e602226e4f361c85082bf7248cd70619371bfecaf58e19de2a31846417adbebd"} Jan 20 16:56:57 crc kubenswrapper[4558]: I0120 16:56:57.677951 4558 generic.go:334] "Generic (PLEG): container finished" podID="0e936a71-a599-4e5a-a8b7-9cde8054a1b5" containerID="b196841960ac1c7f6a7c02f8f3bfbe22cecef21da30a06b127c88a4183e3b847" exitCode=0 Jan 20 16:56:57 crc kubenswrapper[4558]: I0120 16:56:57.678067 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-ring-rebalance-49f5x" event={"ID":"0e936a71-a599-4e5a-a8b7-9cde8054a1b5","Type":"ContainerDied","Data":"b196841960ac1c7f6a7c02f8f3bfbe22cecef21da30a06b127c88a4183e3b847"} Jan 20 16:56:57 crc kubenswrapper[4558]: I0120 16:56:57.681279 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/root-account-create-update-cx24m" event={"ID":"b6e0dfd1-68d5-4557-9723-9a089cba1360","Type":"ContainerStarted","Data":"4f8ad4db714d7249bbcc2a75589c40c4641e949e1fac475245dd93d0418a8537"} Jan 20 16:56:57 crc kubenswrapper[4558]: I0120 16:56:57.681310 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/root-account-create-update-cx24m" event={"ID":"b6e0dfd1-68d5-4557-9723-9a089cba1360","Type":"ContainerStarted","Data":"68affbe420d217674927bffe99a12a0a5b720b852a72be91930afafebda8c105"} Jan 20 16:56:57 crc kubenswrapper[4558]: I0120 16:56:57.708793 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/root-account-create-update-cx24m" podStartSLOduration=1.708778239 podStartE2EDuration="1.708778239s" podCreationTimestamp="2026-01-20 16:56:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 16:56:57.708212605 +0000 UTC m=+911.468550572" watchObservedRunningTime="2026-01-20 16:56:57.708778239 +0000 UTC m=+911.469116206" Jan 20 16:56:58 crc kubenswrapper[4558]: I0120 16:56:58.686967 4558 generic.go:334] "Generic (PLEG): container finished" podID="3f868eba-f4e1-4e32-b271-391cf271fe97" containerID="1cbb8a79828c1017ed54f50de2db6768bd2eb566cb59fce2be3b621f69a500cd" exitCode=0 Jan 20 16:56:58 crc kubenswrapper[4558]: I0120 16:56:58.687020 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-server-0" event={"ID":"3f868eba-f4e1-4e32-b271-391cf271fe97","Type":"ContainerDied","Data":"1cbb8a79828c1017ed54f50de2db6768bd2eb566cb59fce2be3b621f69a500cd"} Jan 20 16:56:58 crc kubenswrapper[4558]: I0120 16:56:58.690463 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" event={"ID":"bf355276-9e62-474e-bfb1-616dde5b83bc","Type":"ContainerStarted","Data":"1e8c1d79d17c69967f38cfb7502a531e6019712fea6395ed1952fa83bb5dfd3a"} Jan 20 16:56:58 crc kubenswrapper[4558]: I0120 16:56:58.690643 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 16:56:58 crc kubenswrapper[4558]: I0120 16:56:58.691579 4558 generic.go:334] "Generic (PLEG): container finished" podID="b6e0dfd1-68d5-4557-9723-9a089cba1360" containerID="4f8ad4db714d7249bbcc2a75589c40c4641e949e1fac475245dd93d0418a8537" exitCode=0 Jan 20 16:56:58 crc kubenswrapper[4558]: I0120 16:56:58.691640 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/root-account-create-update-cx24m" event={"ID":"b6e0dfd1-68d5-4557-9723-9a089cba1360","Type":"ContainerDied","Data":"4f8ad4db714d7249bbcc2a75589c40c4641e949e1fac475245dd93d0418a8537"} Jan 20 16:56:58 crc kubenswrapper[4558]: I0120 16:56:58.749266 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" podStartSLOduration=39.185573404 podStartE2EDuration="44.749250504s" podCreationTimestamp="2026-01-20 16:56:14 +0000 UTC" firstStartedPulling="2026-01-20 16:56:19.585687739 +0000 UTC m=+873.346025706" lastFinishedPulling="2026-01-20 16:56:25.149364839 +0000 UTC m=+878.909702806" observedRunningTime="2026-01-20 16:56:58.74359066 +0000 UTC m=+912.503928627" watchObservedRunningTime="2026-01-20 16:56:58.749250504 +0000 UTC m=+912.509588470" Jan 20 16:56:58 crc kubenswrapper[4558]: I0120 16:56:58.769837 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/ad708448-38df-4494-bca7-fe394c9b53a7-etc-swift\") pod \"swift-storage-0\" (UID: \"ad708448-38df-4494-bca7-fe394c9b53a7\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 16:56:58 crc kubenswrapper[4558]: I0120 16:56:58.775043 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/ad708448-38df-4494-bca7-fe394c9b53a7-etc-swift\") pod \"swift-storage-0\" (UID: \"ad708448-38df-4494-bca7-fe394c9b53a7\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 16:56:58 crc kubenswrapper[4558]: I0120 16:56:58.911619 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-storage-0" Jan 20 16:56:59 crc kubenswrapper[4558]: I0120 16:56:59.047690 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-ring-rebalance-49f5x" Jan 20 16:56:59 crc kubenswrapper[4558]: I0120 16:56:59.174403 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k7b9q\" (UniqueName: \"kubernetes.io/projected/0e936a71-a599-4e5a-a8b7-9cde8054a1b5-kube-api-access-k7b9q\") pod \"0e936a71-a599-4e5a-a8b7-9cde8054a1b5\" (UID: \"0e936a71-a599-4e5a-a8b7-9cde8054a1b5\") " Jan 20 16:56:59 crc kubenswrapper[4558]: I0120 16:56:59.174586 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0e936a71-a599-4e5a-a8b7-9cde8054a1b5-scripts\") pod \"0e936a71-a599-4e5a-a8b7-9cde8054a1b5\" (UID: \"0e936a71-a599-4e5a-a8b7-9cde8054a1b5\") " Jan 20 16:56:59 crc kubenswrapper[4558]: I0120 16:56:59.174606 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/0e936a71-a599-4e5a-a8b7-9cde8054a1b5-swiftconf\") pod \"0e936a71-a599-4e5a-a8b7-9cde8054a1b5\" (UID: \"0e936a71-a599-4e5a-a8b7-9cde8054a1b5\") " Jan 20 16:56:59 crc kubenswrapper[4558]: I0120 16:56:59.174621 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/0e936a71-a599-4e5a-a8b7-9cde8054a1b5-etc-swift\") pod \"0e936a71-a599-4e5a-a8b7-9cde8054a1b5\" (UID: \"0e936a71-a599-4e5a-a8b7-9cde8054a1b5\") " Jan 20 16:56:59 crc kubenswrapper[4558]: I0120 16:56:59.174649 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0e936a71-a599-4e5a-a8b7-9cde8054a1b5-combined-ca-bundle\") pod \"0e936a71-a599-4e5a-a8b7-9cde8054a1b5\" (UID: \"0e936a71-a599-4e5a-a8b7-9cde8054a1b5\") " Jan 20 16:56:59 crc kubenswrapper[4558]: I0120 16:56:59.174697 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/0e936a71-a599-4e5a-a8b7-9cde8054a1b5-ring-data-devices\") pod \"0e936a71-a599-4e5a-a8b7-9cde8054a1b5\" (UID: \"0e936a71-a599-4e5a-a8b7-9cde8054a1b5\") " Jan 20 16:56:59 crc kubenswrapper[4558]: I0120 16:56:59.174805 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/0e936a71-a599-4e5a-a8b7-9cde8054a1b5-dispersionconf\") pod \"0e936a71-a599-4e5a-a8b7-9cde8054a1b5\" (UID: \"0e936a71-a599-4e5a-a8b7-9cde8054a1b5\") " Jan 20 16:56:59 crc kubenswrapper[4558]: I0120 16:56:59.175259 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0e936a71-a599-4e5a-a8b7-9cde8054a1b5-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "0e936a71-a599-4e5a-a8b7-9cde8054a1b5" (UID: "0e936a71-a599-4e5a-a8b7-9cde8054a1b5"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 16:56:59 crc kubenswrapper[4558]: I0120 16:56:59.176137 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0e936a71-a599-4e5a-a8b7-9cde8054a1b5-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "0e936a71-a599-4e5a-a8b7-9cde8054a1b5" (UID: "0e936a71-a599-4e5a-a8b7-9cde8054a1b5"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 16:56:59 crc kubenswrapper[4558]: I0120 16:56:59.199539 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0e936a71-a599-4e5a-a8b7-9cde8054a1b5-kube-api-access-k7b9q" (OuterVolumeSpecName: "kube-api-access-k7b9q") pod "0e936a71-a599-4e5a-a8b7-9cde8054a1b5" (UID: "0e936a71-a599-4e5a-a8b7-9cde8054a1b5"). InnerVolumeSpecName "kube-api-access-k7b9q". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:56:59 crc kubenswrapper[4558]: I0120 16:56:59.201353 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0e936a71-a599-4e5a-a8b7-9cde8054a1b5-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "0e936a71-a599-4e5a-a8b7-9cde8054a1b5" (UID: "0e936a71-a599-4e5a-a8b7-9cde8054a1b5"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:56:59 crc kubenswrapper[4558]: I0120 16:56:59.204831 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0e936a71-a599-4e5a-a8b7-9cde8054a1b5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0e936a71-a599-4e5a-a8b7-9cde8054a1b5" (UID: "0e936a71-a599-4e5a-a8b7-9cde8054a1b5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:56:59 crc kubenswrapper[4558]: I0120 16:56:59.205385 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0e936a71-a599-4e5a-a8b7-9cde8054a1b5-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "0e936a71-a599-4e5a-a8b7-9cde8054a1b5" (UID: "0e936a71-a599-4e5a-a8b7-9cde8054a1b5"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:56:59 crc kubenswrapper[4558]: I0120 16:56:59.214278 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0e936a71-a599-4e5a-a8b7-9cde8054a1b5-scripts" (OuterVolumeSpecName: "scripts") pod "0e936a71-a599-4e5a-a8b7-9cde8054a1b5" (UID: "0e936a71-a599-4e5a-a8b7-9cde8054a1b5"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 16:56:59 crc kubenswrapper[4558]: I0120 16:56:59.276812 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k7b9q\" (UniqueName: \"kubernetes.io/projected/0e936a71-a599-4e5a-a8b7-9cde8054a1b5-kube-api-access-k7b9q\") on node \"crc\" DevicePath \"\"" Jan 20 16:56:59 crc kubenswrapper[4558]: I0120 16:56:59.276844 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0e936a71-a599-4e5a-a8b7-9cde8054a1b5-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 16:56:59 crc kubenswrapper[4558]: I0120 16:56:59.276853 4558 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/0e936a71-a599-4e5a-a8b7-9cde8054a1b5-swiftconf\") on node \"crc\" DevicePath \"\"" Jan 20 16:56:59 crc kubenswrapper[4558]: I0120 16:56:59.276861 4558 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/0e936a71-a599-4e5a-a8b7-9cde8054a1b5-etc-swift\") on node \"crc\" DevicePath \"\"" Jan 20 16:56:59 crc kubenswrapper[4558]: I0120 16:56:59.276870 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0e936a71-a599-4e5a-a8b7-9cde8054a1b5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 16:56:59 crc kubenswrapper[4558]: I0120 16:56:59.276878 4558 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/0e936a71-a599-4e5a-a8b7-9cde8054a1b5-ring-data-devices\") on node \"crc\" DevicePath \"\"" Jan 20 16:56:59 crc kubenswrapper[4558]: I0120 16:56:59.276884 4558 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/0e936a71-a599-4e5a-a8b7-9cde8054a1b5-dispersionconf\") on node \"crc\" DevicePath \"\"" Jan 20 16:56:59 crc kubenswrapper[4558]: I0120 16:56:59.373149 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/swift-storage-0"] Jan 20 16:56:59 crc kubenswrapper[4558]: I0120 16:56:59.699432 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-ring-rebalance-49f5x" event={"ID":"0e936a71-a599-4e5a-a8b7-9cde8054a1b5","Type":"ContainerDied","Data":"eed866d47c32492c41a2906dfa1b48d235a068892f54ffdba59d332aad584d18"} Jan 20 16:56:59 crc kubenswrapper[4558]: I0120 16:56:59.699467 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="eed866d47c32492c41a2906dfa1b48d235a068892f54ffdba59d332aad584d18" Jan 20 16:56:59 crc kubenswrapper[4558]: I0120 16:56:59.699508 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-ring-rebalance-49f5x" Jan 20 16:56:59 crc kubenswrapper[4558]: I0120 16:56:59.707872 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-server-0" event={"ID":"3f868eba-f4e1-4e32-b271-391cf271fe97","Type":"ContainerStarted","Data":"7186b3f7bece934410de1e6b9f5050c2ca0be8663df3206ff0ed31c8bdab8497"} Jan 20 16:56:59 crc kubenswrapper[4558]: I0120 16:56:59.734653 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/rabbitmq-server-0" podStartSLOduration=39.042739716 podStartE2EDuration="44.734639682s" podCreationTimestamp="2026-01-20 16:56:15 +0000 UTC" firstStartedPulling="2026-01-20 16:56:19.581904175 +0000 UTC m=+873.342242141" lastFinishedPulling="2026-01-20 16:56:25.27380414 +0000 UTC m=+879.034142107" observedRunningTime="2026-01-20 16:56:59.724312792 +0000 UTC m=+913.484650760" watchObservedRunningTime="2026-01-20 16:56:59.734639682 +0000 UTC m=+913.494977649" Jan 20 16:57:00 crc kubenswrapper[4558]: I0120 16:57:00.105404 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-2tv5d"] Jan 20 16:57:00 crc kubenswrapper[4558]: E0120 16:57:00.105836 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0e936a71-a599-4e5a-a8b7-9cde8054a1b5" containerName="swift-ring-rebalance" Jan 20 16:57:00 crc kubenswrapper[4558]: I0120 16:57:00.105851 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="0e936a71-a599-4e5a-a8b7-9cde8054a1b5" containerName="swift-ring-rebalance" Jan 20 16:57:00 crc kubenswrapper[4558]: I0120 16:57:00.105979 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="0e936a71-a599-4e5a-a8b7-9cde8054a1b5" containerName="swift-ring-rebalance" Jan 20 16:57:00 crc kubenswrapper[4558]: I0120 16:57:00.106866 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2tv5d" Jan 20 16:57:00 crc kubenswrapper[4558]: I0120 16:57:00.117647 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-2tv5d"] Jan 20 16:57:00 crc kubenswrapper[4558]: I0120 16:57:00.188576 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bj4tj\" (UniqueName: \"kubernetes.io/projected/0d7a49b7-4cfe-4d63-a510-803f562eb3f6-kube-api-access-bj4tj\") pod \"community-operators-2tv5d\" (UID: \"0d7a49b7-4cfe-4d63-a510-803f562eb3f6\") " pod="openshift-marketplace/community-operators-2tv5d" Jan 20 16:57:00 crc kubenswrapper[4558]: I0120 16:57:00.188635 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0d7a49b7-4cfe-4d63-a510-803f562eb3f6-catalog-content\") pod \"community-operators-2tv5d\" (UID: \"0d7a49b7-4cfe-4d63-a510-803f562eb3f6\") " pod="openshift-marketplace/community-operators-2tv5d" Jan 20 16:57:00 crc kubenswrapper[4558]: I0120 16:57:00.188662 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0d7a49b7-4cfe-4d63-a510-803f562eb3f6-utilities\") pod \"community-operators-2tv5d\" (UID: \"0d7a49b7-4cfe-4d63-a510-803f562eb3f6\") " pod="openshift-marketplace/community-operators-2tv5d" Jan 20 16:57:00 crc kubenswrapper[4558]: I0120 16:57:00.289852 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0d7a49b7-4cfe-4d63-a510-803f562eb3f6-catalog-content\") pod \"community-operators-2tv5d\" (UID: \"0d7a49b7-4cfe-4d63-a510-803f562eb3f6\") " pod="openshift-marketplace/community-operators-2tv5d" Jan 20 16:57:00 crc kubenswrapper[4558]: I0120 16:57:00.289900 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0d7a49b7-4cfe-4d63-a510-803f562eb3f6-utilities\") pod \"community-operators-2tv5d\" (UID: \"0d7a49b7-4cfe-4d63-a510-803f562eb3f6\") " pod="openshift-marketplace/community-operators-2tv5d" Jan 20 16:57:00 crc kubenswrapper[4558]: I0120 16:57:00.290312 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0d7a49b7-4cfe-4d63-a510-803f562eb3f6-catalog-content\") pod \"community-operators-2tv5d\" (UID: \"0d7a49b7-4cfe-4d63-a510-803f562eb3f6\") " pod="openshift-marketplace/community-operators-2tv5d" Jan 20 16:57:00 crc kubenswrapper[4558]: I0120 16:57:00.290357 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0d7a49b7-4cfe-4d63-a510-803f562eb3f6-utilities\") pod \"community-operators-2tv5d\" (UID: \"0d7a49b7-4cfe-4d63-a510-803f562eb3f6\") " pod="openshift-marketplace/community-operators-2tv5d" Jan 20 16:57:00 crc kubenswrapper[4558]: I0120 16:57:00.290479 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bj4tj\" (UniqueName: \"kubernetes.io/projected/0d7a49b7-4cfe-4d63-a510-803f562eb3f6-kube-api-access-bj4tj\") pod \"community-operators-2tv5d\" (UID: \"0d7a49b7-4cfe-4d63-a510-803f562eb3f6\") " pod="openshift-marketplace/community-operators-2tv5d" Jan 20 16:57:00 crc kubenswrapper[4558]: I0120 16:57:00.309188 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bj4tj\" (UniqueName: \"kubernetes.io/projected/0d7a49b7-4cfe-4d63-a510-803f562eb3f6-kube-api-access-bj4tj\") pod \"community-operators-2tv5d\" (UID: \"0d7a49b7-4cfe-4d63-a510-803f562eb3f6\") " pod="openshift-marketplace/community-operators-2tv5d" Jan 20 16:57:00 crc kubenswrapper[4558]: I0120 16:57:00.427210 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2tv5d" Jan 20 16:57:04 crc kubenswrapper[4558]: I0120 16:57:04.175575 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-jlgl4"] Jan 20 16:57:04 crc kubenswrapper[4558]: I0120 16:57:04.179713 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-jlgl4" Jan 20 16:57:04 crc kubenswrapper[4558]: I0120 16:57:04.187400 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-jlgl4"] Jan 20 16:57:04 crc kubenswrapper[4558]: I0120 16:57:04.278263 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/78267dd1-57b0-4d3b-9578-42989b2364f5-catalog-content\") pod \"redhat-marketplace-jlgl4\" (UID: \"78267dd1-57b0-4d3b-9578-42989b2364f5\") " pod="openshift-marketplace/redhat-marketplace-jlgl4" Jan 20 16:57:04 crc kubenswrapper[4558]: I0120 16:57:04.278310 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j9xwr\" (UniqueName: \"kubernetes.io/projected/78267dd1-57b0-4d3b-9578-42989b2364f5-kube-api-access-j9xwr\") pod \"redhat-marketplace-jlgl4\" (UID: \"78267dd1-57b0-4d3b-9578-42989b2364f5\") " pod="openshift-marketplace/redhat-marketplace-jlgl4" Jan 20 16:57:04 crc kubenswrapper[4558]: I0120 16:57:04.278352 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/78267dd1-57b0-4d3b-9578-42989b2364f5-utilities\") pod \"redhat-marketplace-jlgl4\" (UID: \"78267dd1-57b0-4d3b-9578-42989b2364f5\") " pod="openshift-marketplace/redhat-marketplace-jlgl4" Jan 20 16:57:04 crc kubenswrapper[4558]: I0120 16:57:04.379597 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/78267dd1-57b0-4d3b-9578-42989b2364f5-catalog-content\") pod \"redhat-marketplace-jlgl4\" (UID: \"78267dd1-57b0-4d3b-9578-42989b2364f5\") " pod="openshift-marketplace/redhat-marketplace-jlgl4" Jan 20 16:57:04 crc kubenswrapper[4558]: I0120 16:57:04.379644 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j9xwr\" (UniqueName: \"kubernetes.io/projected/78267dd1-57b0-4d3b-9578-42989b2364f5-kube-api-access-j9xwr\") pod \"redhat-marketplace-jlgl4\" (UID: \"78267dd1-57b0-4d3b-9578-42989b2364f5\") " pod="openshift-marketplace/redhat-marketplace-jlgl4" Jan 20 16:57:04 crc kubenswrapper[4558]: I0120 16:57:04.379680 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/78267dd1-57b0-4d3b-9578-42989b2364f5-utilities\") pod \"redhat-marketplace-jlgl4\" (UID: \"78267dd1-57b0-4d3b-9578-42989b2364f5\") " pod="openshift-marketplace/redhat-marketplace-jlgl4" Jan 20 16:57:04 crc kubenswrapper[4558]: I0120 16:57:04.380089 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/78267dd1-57b0-4d3b-9578-42989b2364f5-utilities\") pod \"redhat-marketplace-jlgl4\" (UID: \"78267dd1-57b0-4d3b-9578-42989b2364f5\") " pod="openshift-marketplace/redhat-marketplace-jlgl4" Jan 20 16:57:04 crc kubenswrapper[4558]: I0120 16:57:04.380314 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/78267dd1-57b0-4d3b-9578-42989b2364f5-catalog-content\") pod \"redhat-marketplace-jlgl4\" (UID: \"78267dd1-57b0-4d3b-9578-42989b2364f5\") " pod="openshift-marketplace/redhat-marketplace-jlgl4" Jan 20 16:57:04 crc kubenswrapper[4558]: I0120 16:57:04.396568 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j9xwr\" (UniqueName: \"kubernetes.io/projected/78267dd1-57b0-4d3b-9578-42989b2364f5-kube-api-access-j9xwr\") pod \"redhat-marketplace-jlgl4\" (UID: \"78267dd1-57b0-4d3b-9578-42989b2364f5\") " pod="openshift-marketplace/redhat-marketplace-jlgl4" Jan 20 16:57:04 crc kubenswrapper[4558]: I0120 16:57:04.494573 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-jlgl4" Jan 20 16:57:06 crc kubenswrapper[4558]: I0120 16:57:06.301923 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-cx24m" Jan 20 16:57:06 crc kubenswrapper[4558]: I0120 16:57:06.405982 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kt5x6\" (UniqueName: \"kubernetes.io/projected/b6e0dfd1-68d5-4557-9723-9a089cba1360-kube-api-access-kt5x6\") pod \"b6e0dfd1-68d5-4557-9723-9a089cba1360\" (UID: \"b6e0dfd1-68d5-4557-9723-9a089cba1360\") " Jan 20 16:57:06 crc kubenswrapper[4558]: I0120 16:57:06.406040 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b6e0dfd1-68d5-4557-9723-9a089cba1360-operator-scripts\") pod \"b6e0dfd1-68d5-4557-9723-9a089cba1360\" (UID: \"b6e0dfd1-68d5-4557-9723-9a089cba1360\") " Jan 20 16:57:06 crc kubenswrapper[4558]: I0120 16:57:06.406747 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6e0dfd1-68d5-4557-9723-9a089cba1360-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "b6e0dfd1-68d5-4557-9723-9a089cba1360" (UID: "b6e0dfd1-68d5-4557-9723-9a089cba1360"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 16:57:06 crc kubenswrapper[4558]: I0120 16:57:06.408633 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6e0dfd1-68d5-4557-9723-9a089cba1360-kube-api-access-kt5x6" (OuterVolumeSpecName: "kube-api-access-kt5x6") pod "b6e0dfd1-68d5-4557-9723-9a089cba1360" (UID: "b6e0dfd1-68d5-4557-9723-9a089cba1360"). InnerVolumeSpecName "kube-api-access-kt5x6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:57:06 crc kubenswrapper[4558]: I0120 16:57:06.507740 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kt5x6\" (UniqueName: \"kubernetes.io/projected/b6e0dfd1-68d5-4557-9723-9a089cba1360-kube-api-access-kt5x6\") on node \"crc\" DevicePath \"\"" Jan 20 16:57:06 crc kubenswrapper[4558]: I0120 16:57:06.507770 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b6e0dfd1-68d5-4557-9723-9a089cba1360-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 16:57:06 crc kubenswrapper[4558]: I0120 16:57:06.553770 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-2tv5d"] Jan 20 16:57:06 crc kubenswrapper[4558]: I0120 16:57:06.594377 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-jlgl4"] Jan 20 16:57:06 crc kubenswrapper[4558]: W0120 16:57:06.602538 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod78267dd1_57b0_4d3b_9578_42989b2364f5.slice/crio-5468352d1af212e5329314f98e5ab6d4b0a17092709b2773cdf084c716ff6632 WatchSource:0}: Error finding container 5468352d1af212e5329314f98e5ab6d4b0a17092709b2773cdf084c716ff6632: Status 404 returned error can't find the container with id 5468352d1af212e5329314f98e5ab6d4b0a17092709b2773cdf084c716ff6632 Jan 20 16:57:06 crc kubenswrapper[4558]: I0120 16:57:06.681384 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 16:57:06 crc kubenswrapper[4558]: I0120 16:57:06.748638 4558 generic.go:334] "Generic (PLEG): container finished" podID="0d7a49b7-4cfe-4d63-a510-803f562eb3f6" containerID="0725c6dc809a1ae646dc87fed739715135e6aa5f948621abdc42fcf6fc70dfa1" exitCode=0 Jan 20 16:57:06 crc kubenswrapper[4558]: I0120 16:57:06.748697 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2tv5d" event={"ID":"0d7a49b7-4cfe-4d63-a510-803f562eb3f6","Type":"ContainerDied","Data":"0725c6dc809a1ae646dc87fed739715135e6aa5f948621abdc42fcf6fc70dfa1"} Jan 20 16:57:06 crc kubenswrapper[4558]: I0120 16:57:06.748722 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2tv5d" event={"ID":"0d7a49b7-4cfe-4d63-a510-803f562eb3f6","Type":"ContainerStarted","Data":"bf1f1e5996145a9dff7fb78312f3f1af9d466f08953a194f6d3081227a8ba368"} Jan 20 16:57:06 crc kubenswrapper[4558]: I0120 16:57:06.752461 4558 generic.go:334] "Generic (PLEG): container finished" podID="78267dd1-57b0-4d3b-9578-42989b2364f5" containerID="79f9ce2e887d794fcfd32a289cc5a8fc46ee6b948a8ccea14d8be3682f1f741d" exitCode=0 Jan 20 16:57:06 crc kubenswrapper[4558]: I0120 16:57:06.752535 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jlgl4" event={"ID":"78267dd1-57b0-4d3b-9578-42989b2364f5","Type":"ContainerDied","Data":"79f9ce2e887d794fcfd32a289cc5a8fc46ee6b948a8ccea14d8be3682f1f741d"} Jan 20 16:57:06 crc kubenswrapper[4558]: I0120 16:57:06.752562 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jlgl4" event={"ID":"78267dd1-57b0-4d3b-9578-42989b2364f5","Type":"ContainerStarted","Data":"5468352d1af212e5329314f98e5ab6d4b0a17092709b2773cdf084c716ff6632"} Jan 20 16:57:06 crc kubenswrapper[4558]: I0120 16:57:06.753803 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-db-sync-2hhvl" event={"ID":"fe5cc26d-38d6-43e0-8977-6ff7e991ab6c","Type":"ContainerStarted","Data":"0af9671e129d86a8a04ea896394c1f0ccdaf1e564becf0be8550e2230fea5ee9"} Jan 20 16:57:06 crc kubenswrapper[4558]: I0120 16:57:06.754752 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"ad708448-38df-4494-bca7-fe394c9b53a7","Type":"ContainerStarted","Data":"5d7945bb752c856572c7eb12ee3598fa0c9ce27902b86d35bf4dc5047564ded4"} Jan 20 16:57:06 crc kubenswrapper[4558]: I0120 16:57:06.755999 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/root-account-create-update-cx24m" event={"ID":"b6e0dfd1-68d5-4557-9723-9a089cba1360","Type":"ContainerDied","Data":"68affbe420d217674927bffe99a12a0a5b720b852a72be91930afafebda8c105"} Jan 20 16:57:06 crc kubenswrapper[4558]: I0120 16:57:06.756024 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="68affbe420d217674927bffe99a12a0a5b720b852a72be91930afafebda8c105" Jan 20 16:57:06 crc kubenswrapper[4558]: I0120 16:57:06.756062 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-cx24m" Jan 20 16:57:06 crc kubenswrapper[4558]: I0120 16:57:06.773185 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/glance-db-sync-2hhvl" podStartSLOduration=2.241515322 podStartE2EDuration="16.773154906s" podCreationTimestamp="2026-01-20 16:56:50 +0000 UTC" firstStartedPulling="2026-01-20 16:56:51.695722784 +0000 UTC m=+905.456060751" lastFinishedPulling="2026-01-20 16:57:06.227362368 +0000 UTC m=+919.987700335" observedRunningTime="2026-01-20 16:57:06.77073735 +0000 UTC m=+920.531075318" watchObservedRunningTime="2026-01-20 16:57:06.773154906 +0000 UTC m=+920.533492874" Jan 20 16:57:07 crc kubenswrapper[4558]: I0120 16:57:07.763772 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2tv5d" event={"ID":"0d7a49b7-4cfe-4d63-a510-803f562eb3f6","Type":"ContainerStarted","Data":"0556dfbd38b9935bab93c57889334ca1663ebdd4e2ade3f48d76bb738c108c7a"} Jan 20 16:57:07 crc kubenswrapper[4558]: I0120 16:57:07.766780 4558 generic.go:334] "Generic (PLEG): container finished" podID="78267dd1-57b0-4d3b-9578-42989b2364f5" containerID="5b65529db92b6006a9d484f63f3393e3c121c0f6cce9eed81a2a72728d86052a" exitCode=0 Jan 20 16:57:07 crc kubenswrapper[4558]: I0120 16:57:07.766862 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jlgl4" event={"ID":"78267dd1-57b0-4d3b-9578-42989b2364f5","Type":"ContainerDied","Data":"5b65529db92b6006a9d484f63f3393e3c121c0f6cce9eed81a2a72728d86052a"} Jan 20 16:57:08 crc kubenswrapper[4558]: I0120 16:57:08.188014 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-cx24m"] Jan 20 16:57:08 crc kubenswrapper[4558]: I0120 16:57:08.193423 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-cx24m"] Jan 20 16:57:08 crc kubenswrapper[4558]: I0120 16:57:08.572312 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6e0dfd1-68d5-4557-9723-9a089cba1360" path="/var/lib/kubelet/pods/b6e0dfd1-68d5-4557-9723-9a089cba1360/volumes" Jan 20 16:57:08 crc kubenswrapper[4558]: I0120 16:57:08.782638 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"ad708448-38df-4494-bca7-fe394c9b53a7","Type":"ContainerStarted","Data":"24445a4a28eeea3fb63d875b16f990af43a36181cb7778f0d316092430ca7285"} Jan 20 16:57:08 crc kubenswrapper[4558]: I0120 16:57:08.785480 4558 generic.go:334] "Generic (PLEG): container finished" podID="0d7a49b7-4cfe-4d63-a510-803f562eb3f6" containerID="0556dfbd38b9935bab93c57889334ca1663ebdd4e2ade3f48d76bb738c108c7a" exitCode=0 Jan 20 16:57:08 crc kubenswrapper[4558]: I0120 16:57:08.785514 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2tv5d" event={"ID":"0d7a49b7-4cfe-4d63-a510-803f562eb3f6","Type":"ContainerDied","Data":"0556dfbd38b9935bab93c57889334ca1663ebdd4e2ade3f48d76bb738c108c7a"} Jan 20 16:57:08 crc kubenswrapper[4558]: I0120 16:57:08.789035 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jlgl4" event={"ID":"78267dd1-57b0-4d3b-9578-42989b2364f5","Type":"ContainerStarted","Data":"64aa6b60f7b0c5125431161cac6c581a9f41d676d84ade2ff99a1a3a675693ab"} Jan 20 16:57:08 crc kubenswrapper[4558]: I0120 16:57:08.812886 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-jlgl4" podStartSLOduration=2.984245988 podStartE2EDuration="4.812876159s" podCreationTimestamp="2026-01-20 16:57:04 +0000 UTC" firstStartedPulling="2026-01-20 16:57:06.754491511 +0000 UTC m=+920.514829478" lastFinishedPulling="2026-01-20 16:57:08.583121692 +0000 UTC m=+922.343459649" observedRunningTime="2026-01-20 16:57:08.809217069 +0000 UTC m=+922.569555036" watchObservedRunningTime="2026-01-20 16:57:08.812876159 +0000 UTC m=+922.573214126" Jan 20 16:57:10 crc kubenswrapper[4558]: I0120 16:57:10.966769 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-4df59"] Jan 20 16:57:10 crc kubenswrapper[4558]: E0120 16:57:10.967922 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b6e0dfd1-68d5-4557-9723-9a089cba1360" containerName="mariadb-account-create-update" Jan 20 16:57:10 crc kubenswrapper[4558]: I0120 16:57:10.968013 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="b6e0dfd1-68d5-4557-9723-9a089cba1360" containerName="mariadb-account-create-update" Jan 20 16:57:10 crc kubenswrapper[4558]: I0120 16:57:10.968276 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="b6e0dfd1-68d5-4557-9723-9a089cba1360" containerName="mariadb-account-create-update" Jan 20 16:57:10 crc kubenswrapper[4558]: I0120 16:57:10.969321 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4df59" Jan 20 16:57:10 crc kubenswrapper[4558]: I0120 16:57:10.971855 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/32e4598f-f0a7-4d4f-a04f-e37171e73905-catalog-content\") pod \"certified-operators-4df59\" (UID: \"32e4598f-f0a7-4d4f-a04f-e37171e73905\") " pod="openshift-marketplace/certified-operators-4df59" Jan 20 16:57:10 crc kubenswrapper[4558]: I0120 16:57:10.972009 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/32e4598f-f0a7-4d4f-a04f-e37171e73905-utilities\") pod \"certified-operators-4df59\" (UID: \"32e4598f-f0a7-4d4f-a04f-e37171e73905\") " pod="openshift-marketplace/certified-operators-4df59" Jan 20 16:57:10 crc kubenswrapper[4558]: I0120 16:57:10.972142 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t4bfd\" (UniqueName: \"kubernetes.io/projected/32e4598f-f0a7-4d4f-a04f-e37171e73905-kube-api-access-t4bfd\") pod \"certified-operators-4df59\" (UID: \"32e4598f-f0a7-4d4f-a04f-e37171e73905\") " pod="openshift-marketplace/certified-operators-4df59" Jan 20 16:57:10 crc kubenswrapper[4558]: I0120 16:57:10.973899 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-4df59"] Jan 20 16:57:11 crc kubenswrapper[4558]: I0120 16:57:11.073290 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/32e4598f-f0a7-4d4f-a04f-e37171e73905-catalog-content\") pod \"certified-operators-4df59\" (UID: \"32e4598f-f0a7-4d4f-a04f-e37171e73905\") " pod="openshift-marketplace/certified-operators-4df59" Jan 20 16:57:11 crc kubenswrapper[4558]: I0120 16:57:11.073346 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/32e4598f-f0a7-4d4f-a04f-e37171e73905-utilities\") pod \"certified-operators-4df59\" (UID: \"32e4598f-f0a7-4d4f-a04f-e37171e73905\") " pod="openshift-marketplace/certified-operators-4df59" Jan 20 16:57:11 crc kubenswrapper[4558]: I0120 16:57:11.073384 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t4bfd\" (UniqueName: \"kubernetes.io/projected/32e4598f-f0a7-4d4f-a04f-e37171e73905-kube-api-access-t4bfd\") pod \"certified-operators-4df59\" (UID: \"32e4598f-f0a7-4d4f-a04f-e37171e73905\") " pod="openshift-marketplace/certified-operators-4df59" Jan 20 16:57:11 crc kubenswrapper[4558]: I0120 16:57:11.073702 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/32e4598f-f0a7-4d4f-a04f-e37171e73905-catalog-content\") pod \"certified-operators-4df59\" (UID: \"32e4598f-f0a7-4d4f-a04f-e37171e73905\") " pod="openshift-marketplace/certified-operators-4df59" Jan 20 16:57:11 crc kubenswrapper[4558]: I0120 16:57:11.073754 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/32e4598f-f0a7-4d4f-a04f-e37171e73905-utilities\") pod \"certified-operators-4df59\" (UID: \"32e4598f-f0a7-4d4f-a04f-e37171e73905\") " pod="openshift-marketplace/certified-operators-4df59" Jan 20 16:57:11 crc kubenswrapper[4558]: I0120 16:57:11.093154 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t4bfd\" (UniqueName: \"kubernetes.io/projected/32e4598f-f0a7-4d4f-a04f-e37171e73905-kube-api-access-t4bfd\") pod \"certified-operators-4df59\" (UID: \"32e4598f-f0a7-4d4f-a04f-e37171e73905\") " pod="openshift-marketplace/certified-operators-4df59" Jan 20 16:57:11 crc kubenswrapper[4558]: I0120 16:57:11.282757 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4df59" Jan 20 16:57:11 crc kubenswrapper[4558]: W0120 16:57:11.746413 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod32e4598f_f0a7_4d4f_a04f_e37171e73905.slice/crio-87ae66334dab79a817fc6ad7560a15fa47a74f4462e01f78ebf21ea499f1c3d3 WatchSource:0}: Error finding container 87ae66334dab79a817fc6ad7560a15fa47a74f4462e01f78ebf21ea499f1c3d3: Status 404 returned error can't find the container with id 87ae66334dab79a817fc6ad7560a15fa47a74f4462e01f78ebf21ea499f1c3d3 Jan 20 16:57:11 crc kubenswrapper[4558]: I0120 16:57:11.746457 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-4df59"] Jan 20 16:57:11 crc kubenswrapper[4558]: I0120 16:57:11.791142 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/root-account-create-update-gkn7h"] Jan 20 16:57:11 crc kubenswrapper[4558]: I0120 16:57:11.791966 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-gkn7h" Jan 20 16:57:11 crc kubenswrapper[4558]: I0120 16:57:11.797814 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"openstack-mariadb-root-db-secret" Jan 20 16:57:11 crc kubenswrapper[4558]: I0120 16:57:11.815105 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-gkn7h"] Jan 20 16:57:11 crc kubenswrapper[4558]: I0120 16:57:11.829909 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"ad708448-38df-4494-bca7-fe394c9b53a7","Type":"ContainerStarted","Data":"4c8b93db2628bf0f39a03755d724643f7c79df42b7cf8e54ef0a1ee499194855"} Jan 20 16:57:11 crc kubenswrapper[4558]: I0120 16:57:11.829961 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"ad708448-38df-4494-bca7-fe394c9b53a7","Type":"ContainerStarted","Data":"9502d1598a1db6c1187fe75f1d6475e3133bb27c977752998b79b425efafa253"} Jan 20 16:57:11 crc kubenswrapper[4558]: I0120 16:57:11.829972 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"ad708448-38df-4494-bca7-fe394c9b53a7","Type":"ContainerStarted","Data":"e13ad48d737a5f4a2b0376363ec37ed14a4cc5193e36abfdc9038bf0f4fc0e27"} Jan 20 16:57:11 crc kubenswrapper[4558]: I0120 16:57:11.831691 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4df59" event={"ID":"32e4598f-f0a7-4d4f-a04f-e37171e73905","Type":"ContainerStarted","Data":"87ae66334dab79a817fc6ad7560a15fa47a74f4462e01f78ebf21ea499f1c3d3"} Jan 20 16:57:11 crc kubenswrapper[4558]: I0120 16:57:11.833657 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2tv5d" event={"ID":"0d7a49b7-4cfe-4d63-a510-803f562eb3f6","Type":"ContainerStarted","Data":"797b7b3eb600e707232f7a5510095efe322a2487ba727866d42d2933b005e7e4"} Jan 20 16:57:11 crc kubenswrapper[4558]: I0120 16:57:11.987804 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mrcql\" (UniqueName: \"kubernetes.io/projected/f1383f04-a9ba-47b5-baaa-d160c552a567-kube-api-access-mrcql\") pod \"root-account-create-update-gkn7h\" (UID: \"f1383f04-a9ba-47b5-baaa-d160c552a567\") " pod="openstack-kuttl-tests/root-account-create-update-gkn7h" Jan 20 16:57:11 crc kubenswrapper[4558]: I0120 16:57:11.987913 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f1383f04-a9ba-47b5-baaa-d160c552a567-operator-scripts\") pod \"root-account-create-update-gkn7h\" (UID: \"f1383f04-a9ba-47b5-baaa-d160c552a567\") " pod="openstack-kuttl-tests/root-account-create-update-gkn7h" Jan 20 16:57:12 crc kubenswrapper[4558]: I0120 16:57:12.089560 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mrcql\" (UniqueName: \"kubernetes.io/projected/f1383f04-a9ba-47b5-baaa-d160c552a567-kube-api-access-mrcql\") pod \"root-account-create-update-gkn7h\" (UID: \"f1383f04-a9ba-47b5-baaa-d160c552a567\") " pod="openstack-kuttl-tests/root-account-create-update-gkn7h" Jan 20 16:57:12 crc kubenswrapper[4558]: I0120 16:57:12.089663 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f1383f04-a9ba-47b5-baaa-d160c552a567-operator-scripts\") pod \"root-account-create-update-gkn7h\" (UID: \"f1383f04-a9ba-47b5-baaa-d160c552a567\") " pod="openstack-kuttl-tests/root-account-create-update-gkn7h" Jan 20 16:57:12 crc kubenswrapper[4558]: I0120 16:57:12.090311 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f1383f04-a9ba-47b5-baaa-d160c552a567-operator-scripts\") pod \"root-account-create-update-gkn7h\" (UID: \"f1383f04-a9ba-47b5-baaa-d160c552a567\") " pod="openstack-kuttl-tests/root-account-create-update-gkn7h" Jan 20 16:57:12 crc kubenswrapper[4558]: I0120 16:57:12.104294 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mrcql\" (UniqueName: \"kubernetes.io/projected/f1383f04-a9ba-47b5-baaa-d160c552a567-kube-api-access-mrcql\") pod \"root-account-create-update-gkn7h\" (UID: \"f1383f04-a9ba-47b5-baaa-d160c552a567\") " pod="openstack-kuttl-tests/root-account-create-update-gkn7h" Jan 20 16:57:12 crc kubenswrapper[4558]: I0120 16:57:12.126832 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-gkn7h" Jan 20 16:57:12 crc kubenswrapper[4558]: I0120 16:57:12.509537 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-2tv5d" podStartSLOduration=7.98234096 podStartE2EDuration="12.509522918s" podCreationTimestamp="2026-01-20 16:57:00 +0000 UTC" firstStartedPulling="2026-01-20 16:57:06.750126722 +0000 UTC m=+920.510464680" lastFinishedPulling="2026-01-20 16:57:11.277308671 +0000 UTC m=+925.037646638" observedRunningTime="2026-01-20 16:57:11.854324143 +0000 UTC m=+925.614662110" watchObservedRunningTime="2026-01-20 16:57:12.509522918 +0000 UTC m=+926.269860885" Jan 20 16:57:12 crc kubenswrapper[4558]: I0120 16:57:12.513605 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-gkn7h"] Jan 20 16:57:12 crc kubenswrapper[4558]: W0120 16:57:12.517248 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf1383f04_a9ba_47b5_baaa_d160c552a567.slice/crio-a11200e0476156dec132bb568e2ae49f0d2dd394673114ed080e2a99e92be4b5 WatchSource:0}: Error finding container a11200e0476156dec132bb568e2ae49f0d2dd394673114ed080e2a99e92be4b5: Status 404 returned error can't find the container with id a11200e0476156dec132bb568e2ae49f0d2dd394673114ed080e2a99e92be4b5 Jan 20 16:57:12 crc kubenswrapper[4558]: I0120 16:57:12.847266 4558 generic.go:334] "Generic (PLEG): container finished" podID="fe5cc26d-38d6-43e0-8977-6ff7e991ab6c" containerID="0af9671e129d86a8a04ea896394c1f0ccdaf1e564becf0be8550e2230fea5ee9" exitCode=0 Jan 20 16:57:12 crc kubenswrapper[4558]: I0120 16:57:12.847469 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-db-sync-2hhvl" event={"ID":"fe5cc26d-38d6-43e0-8977-6ff7e991ab6c","Type":"ContainerDied","Data":"0af9671e129d86a8a04ea896394c1f0ccdaf1e564becf0be8550e2230fea5ee9"} Jan 20 16:57:12 crc kubenswrapper[4558]: I0120 16:57:12.852956 4558 generic.go:334] "Generic (PLEG): container finished" podID="32e4598f-f0a7-4d4f-a04f-e37171e73905" containerID="09559d9e17de2bd3da60487cf4a69bf245697f188ed5b3b7161976c6d7b4c109" exitCode=0 Jan 20 16:57:12 crc kubenswrapper[4558]: I0120 16:57:12.853032 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4df59" event={"ID":"32e4598f-f0a7-4d4f-a04f-e37171e73905","Type":"ContainerDied","Data":"09559d9e17de2bd3da60487cf4a69bf245697f188ed5b3b7161976c6d7b4c109"} Jan 20 16:57:12 crc kubenswrapper[4558]: I0120 16:57:12.855621 4558 generic.go:334] "Generic (PLEG): container finished" podID="f1383f04-a9ba-47b5-baaa-d160c552a567" containerID="dd12e62385b08aec22ded13c6449e48eeba8060a664335bfc549ee00f3e80b1a" exitCode=0 Jan 20 16:57:12 crc kubenswrapper[4558]: I0120 16:57:12.855755 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/root-account-create-update-gkn7h" event={"ID":"f1383f04-a9ba-47b5-baaa-d160c552a567","Type":"ContainerDied","Data":"dd12e62385b08aec22ded13c6449e48eeba8060a664335bfc549ee00f3e80b1a"} Jan 20 16:57:12 crc kubenswrapper[4558]: I0120 16:57:12.855801 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/root-account-create-update-gkn7h" event={"ID":"f1383f04-a9ba-47b5-baaa-d160c552a567","Type":"ContainerStarted","Data":"a11200e0476156dec132bb568e2ae49f0d2dd394673114ed080e2a99e92be4b5"} Jan 20 16:57:13 crc kubenswrapper[4558]: I0120 16:57:13.378114 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-sj9bb"] Jan 20 16:57:13 crc kubenswrapper[4558]: I0120 16:57:13.379581 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-sj9bb" Jan 20 16:57:13 crc kubenswrapper[4558]: I0120 16:57:13.386906 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-sj9bb"] Jan 20 16:57:13 crc kubenswrapper[4558]: I0120 16:57:13.414698 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wvttt\" (UniqueName: \"kubernetes.io/projected/282a24f3-5eac-4fa8-8a40-6260b94e2164-kube-api-access-wvttt\") pod \"redhat-operators-sj9bb\" (UID: \"282a24f3-5eac-4fa8-8a40-6260b94e2164\") " pod="openshift-marketplace/redhat-operators-sj9bb" Jan 20 16:57:13 crc kubenswrapper[4558]: I0120 16:57:13.414768 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/282a24f3-5eac-4fa8-8a40-6260b94e2164-utilities\") pod \"redhat-operators-sj9bb\" (UID: \"282a24f3-5eac-4fa8-8a40-6260b94e2164\") " pod="openshift-marketplace/redhat-operators-sj9bb" Jan 20 16:57:13 crc kubenswrapper[4558]: I0120 16:57:13.414830 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/282a24f3-5eac-4fa8-8a40-6260b94e2164-catalog-content\") pod \"redhat-operators-sj9bb\" (UID: \"282a24f3-5eac-4fa8-8a40-6260b94e2164\") " pod="openshift-marketplace/redhat-operators-sj9bb" Jan 20 16:57:13 crc kubenswrapper[4558]: I0120 16:57:13.516361 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wvttt\" (UniqueName: \"kubernetes.io/projected/282a24f3-5eac-4fa8-8a40-6260b94e2164-kube-api-access-wvttt\") pod \"redhat-operators-sj9bb\" (UID: \"282a24f3-5eac-4fa8-8a40-6260b94e2164\") " pod="openshift-marketplace/redhat-operators-sj9bb" Jan 20 16:57:13 crc kubenswrapper[4558]: I0120 16:57:13.516635 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/282a24f3-5eac-4fa8-8a40-6260b94e2164-utilities\") pod \"redhat-operators-sj9bb\" (UID: \"282a24f3-5eac-4fa8-8a40-6260b94e2164\") " pod="openshift-marketplace/redhat-operators-sj9bb" Jan 20 16:57:13 crc kubenswrapper[4558]: I0120 16:57:13.516677 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/282a24f3-5eac-4fa8-8a40-6260b94e2164-catalog-content\") pod \"redhat-operators-sj9bb\" (UID: \"282a24f3-5eac-4fa8-8a40-6260b94e2164\") " pod="openshift-marketplace/redhat-operators-sj9bb" Jan 20 16:57:13 crc kubenswrapper[4558]: I0120 16:57:13.517059 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/282a24f3-5eac-4fa8-8a40-6260b94e2164-catalog-content\") pod \"redhat-operators-sj9bb\" (UID: \"282a24f3-5eac-4fa8-8a40-6260b94e2164\") " pod="openshift-marketplace/redhat-operators-sj9bb" Jan 20 16:57:13 crc kubenswrapper[4558]: I0120 16:57:13.517304 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/282a24f3-5eac-4fa8-8a40-6260b94e2164-utilities\") pod \"redhat-operators-sj9bb\" (UID: \"282a24f3-5eac-4fa8-8a40-6260b94e2164\") " pod="openshift-marketplace/redhat-operators-sj9bb" Jan 20 16:57:13 crc kubenswrapper[4558]: I0120 16:57:13.546351 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wvttt\" (UniqueName: \"kubernetes.io/projected/282a24f3-5eac-4fa8-8a40-6260b94e2164-kube-api-access-wvttt\") pod \"redhat-operators-sj9bb\" (UID: \"282a24f3-5eac-4fa8-8a40-6260b94e2164\") " pod="openshift-marketplace/redhat-operators-sj9bb" Jan 20 16:57:13 crc kubenswrapper[4558]: I0120 16:57:13.746406 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-sj9bb" Jan 20 16:57:13 crc kubenswrapper[4558]: I0120 16:57:13.865768 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"ad708448-38df-4494-bca7-fe394c9b53a7","Type":"ContainerStarted","Data":"aa379f22b8042a8997aeff6f2e901aac68a2feff4a66a0636172020228c651ff"} Jan 20 16:57:13 crc kubenswrapper[4558]: I0120 16:57:13.866006 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"ad708448-38df-4494-bca7-fe394c9b53a7","Type":"ContainerStarted","Data":"58081c924356de25ebdeeecdbf39084210cf71b6f92814049f47601bfce7e1ef"} Jan 20 16:57:13 crc kubenswrapper[4558]: I0120 16:57:13.866019 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"ad708448-38df-4494-bca7-fe394c9b53a7","Type":"ContainerStarted","Data":"e121933f3a246845b6169455f9e1e99e6b2a61bce703613cb6fb31821719438d"} Jan 20 16:57:13 crc kubenswrapper[4558]: I0120 16:57:13.866028 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"ad708448-38df-4494-bca7-fe394c9b53a7","Type":"ContainerStarted","Data":"1e0a8b23c89daae563498da37b455997040b6b47b7a29c5c9590feba957d5e7e"} Jan 20 16:57:13 crc kubenswrapper[4558]: I0120 16:57:13.868733 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4df59" event={"ID":"32e4598f-f0a7-4d4f-a04f-e37171e73905","Type":"ContainerStarted","Data":"9188a95d3a317a681ceaade1654598890e519670adac4fb9fbc77b599cebe30c"} Jan 20 16:57:14 crc kubenswrapper[4558]: I0120 16:57:14.175328 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-sj9bb"] Jan 20 16:57:14 crc kubenswrapper[4558]: W0120 16:57:14.190516 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod282a24f3_5eac_4fa8_8a40_6260b94e2164.slice/crio-c3c1e666c682481300e954f79b2ed63d800ac8758e333d048d306c132805e213 WatchSource:0}: Error finding container c3c1e666c682481300e954f79b2ed63d800ac8758e333d048d306c132805e213: Status 404 returned error can't find the container with id c3c1e666c682481300e954f79b2ed63d800ac8758e333d048d306c132805e213 Jan 20 16:57:14 crc kubenswrapper[4558]: I0120 16:57:14.249614 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-db-sync-2hhvl" Jan 20 16:57:14 crc kubenswrapper[4558]: I0120 16:57:14.328007 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fe5cc26d-38d6-43e0-8977-6ff7e991ab6c-config-data\") pod \"fe5cc26d-38d6-43e0-8977-6ff7e991ab6c\" (UID: \"fe5cc26d-38d6-43e0-8977-6ff7e991ab6c\") " Jan 20 16:57:14 crc kubenswrapper[4558]: I0120 16:57:14.328476 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fe5cc26d-38d6-43e0-8977-6ff7e991ab6c-combined-ca-bundle\") pod \"fe5cc26d-38d6-43e0-8977-6ff7e991ab6c\" (UID: \"fe5cc26d-38d6-43e0-8977-6ff7e991ab6c\") " Jan 20 16:57:14 crc kubenswrapper[4558]: I0120 16:57:14.328534 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/fe5cc26d-38d6-43e0-8977-6ff7e991ab6c-db-sync-config-data\") pod \"fe5cc26d-38d6-43e0-8977-6ff7e991ab6c\" (UID: \"fe5cc26d-38d6-43e0-8977-6ff7e991ab6c\") " Jan 20 16:57:14 crc kubenswrapper[4558]: I0120 16:57:14.328557 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-896bc\" (UniqueName: \"kubernetes.io/projected/fe5cc26d-38d6-43e0-8977-6ff7e991ab6c-kube-api-access-896bc\") pod \"fe5cc26d-38d6-43e0-8977-6ff7e991ab6c\" (UID: \"fe5cc26d-38d6-43e0-8977-6ff7e991ab6c\") " Jan 20 16:57:14 crc kubenswrapper[4558]: I0120 16:57:14.333463 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fe5cc26d-38d6-43e0-8977-6ff7e991ab6c-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "fe5cc26d-38d6-43e0-8977-6ff7e991ab6c" (UID: "fe5cc26d-38d6-43e0-8977-6ff7e991ab6c"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:57:14 crc kubenswrapper[4558]: I0120 16:57:14.333761 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fe5cc26d-38d6-43e0-8977-6ff7e991ab6c-kube-api-access-896bc" (OuterVolumeSpecName: "kube-api-access-896bc") pod "fe5cc26d-38d6-43e0-8977-6ff7e991ab6c" (UID: "fe5cc26d-38d6-43e0-8977-6ff7e991ab6c"). InnerVolumeSpecName "kube-api-access-896bc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:57:14 crc kubenswrapper[4558]: I0120 16:57:14.343323 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-gkn7h" Jan 20 16:57:14 crc kubenswrapper[4558]: I0120 16:57:14.352555 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fe5cc26d-38d6-43e0-8977-6ff7e991ab6c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fe5cc26d-38d6-43e0-8977-6ff7e991ab6c" (UID: "fe5cc26d-38d6-43e0-8977-6ff7e991ab6c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:57:14 crc kubenswrapper[4558]: I0120 16:57:14.375270 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fe5cc26d-38d6-43e0-8977-6ff7e991ab6c-config-data" (OuterVolumeSpecName: "config-data") pod "fe5cc26d-38d6-43e0-8977-6ff7e991ab6c" (UID: "fe5cc26d-38d6-43e0-8977-6ff7e991ab6c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:57:14 crc kubenswrapper[4558]: I0120 16:57:14.429573 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f1383f04-a9ba-47b5-baaa-d160c552a567-operator-scripts\") pod \"f1383f04-a9ba-47b5-baaa-d160c552a567\" (UID: \"f1383f04-a9ba-47b5-baaa-d160c552a567\") " Jan 20 16:57:14 crc kubenswrapper[4558]: I0120 16:57:14.429703 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mrcql\" (UniqueName: \"kubernetes.io/projected/f1383f04-a9ba-47b5-baaa-d160c552a567-kube-api-access-mrcql\") pod \"f1383f04-a9ba-47b5-baaa-d160c552a567\" (UID: \"f1383f04-a9ba-47b5-baaa-d160c552a567\") " Jan 20 16:57:14 crc kubenswrapper[4558]: I0120 16:57:14.430073 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fe5cc26d-38d6-43e0-8977-6ff7e991ab6c-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 16:57:14 crc kubenswrapper[4558]: I0120 16:57:14.430085 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fe5cc26d-38d6-43e0-8977-6ff7e991ab6c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 16:57:14 crc kubenswrapper[4558]: I0120 16:57:14.430095 4558 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/fe5cc26d-38d6-43e0-8977-6ff7e991ab6c-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 16:57:14 crc kubenswrapper[4558]: I0120 16:57:14.430103 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-896bc\" (UniqueName: \"kubernetes.io/projected/fe5cc26d-38d6-43e0-8977-6ff7e991ab6c-kube-api-access-896bc\") on node \"crc\" DevicePath \"\"" Jan 20 16:57:14 crc kubenswrapper[4558]: I0120 16:57:14.430406 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f1383f04-a9ba-47b5-baaa-d160c552a567-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "f1383f04-a9ba-47b5-baaa-d160c552a567" (UID: "f1383f04-a9ba-47b5-baaa-d160c552a567"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 16:57:14 crc kubenswrapper[4558]: I0120 16:57:14.433819 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f1383f04-a9ba-47b5-baaa-d160c552a567-kube-api-access-mrcql" (OuterVolumeSpecName: "kube-api-access-mrcql") pod "f1383f04-a9ba-47b5-baaa-d160c552a567" (UID: "f1383f04-a9ba-47b5-baaa-d160c552a567"). InnerVolumeSpecName "kube-api-access-mrcql". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:57:14 crc kubenswrapper[4558]: I0120 16:57:14.495323 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-jlgl4" Jan 20 16:57:14 crc kubenswrapper[4558]: I0120 16:57:14.495371 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-jlgl4" Jan 20 16:57:14 crc kubenswrapper[4558]: I0120 16:57:14.531585 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f1383f04-a9ba-47b5-baaa-d160c552a567-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 16:57:14 crc kubenswrapper[4558]: I0120 16:57:14.531614 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mrcql\" (UniqueName: \"kubernetes.io/projected/f1383f04-a9ba-47b5-baaa-d160c552a567-kube-api-access-mrcql\") on node \"crc\" DevicePath \"\"" Jan 20 16:57:14 crc kubenswrapper[4558]: I0120 16:57:14.547933 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-jlgl4" Jan 20 16:57:14 crc kubenswrapper[4558]: I0120 16:57:14.878131 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-db-sync-2hhvl" event={"ID":"fe5cc26d-38d6-43e0-8977-6ff7e991ab6c","Type":"ContainerDied","Data":"b5da9ce34a4e3e6af8665256ac56bb64e8ca83042a91c6c0fe8a62b5e6ea9145"} Jan 20 16:57:14 crc kubenswrapper[4558]: I0120 16:57:14.878212 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b5da9ce34a4e3e6af8665256ac56bb64e8ca83042a91c6c0fe8a62b5e6ea9145" Jan 20 16:57:14 crc kubenswrapper[4558]: I0120 16:57:14.878291 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-db-sync-2hhvl" Jan 20 16:57:14 crc kubenswrapper[4558]: I0120 16:57:14.880799 4558 generic.go:334] "Generic (PLEG): container finished" podID="32e4598f-f0a7-4d4f-a04f-e37171e73905" containerID="9188a95d3a317a681ceaade1654598890e519670adac4fb9fbc77b599cebe30c" exitCode=0 Jan 20 16:57:14 crc kubenswrapper[4558]: I0120 16:57:14.880844 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4df59" event={"ID":"32e4598f-f0a7-4d4f-a04f-e37171e73905","Type":"ContainerDied","Data":"9188a95d3a317a681ceaade1654598890e519670adac4fb9fbc77b599cebe30c"} Jan 20 16:57:14 crc kubenswrapper[4558]: I0120 16:57:14.887966 4558 generic.go:334] "Generic (PLEG): container finished" podID="282a24f3-5eac-4fa8-8a40-6260b94e2164" containerID="ab0e92a7817efc18119aee5daed03b4c6b290fe4f86f0311996e0e59e78ab1a3" exitCode=0 Jan 20 16:57:14 crc kubenswrapper[4558]: I0120 16:57:14.888024 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-sj9bb" event={"ID":"282a24f3-5eac-4fa8-8a40-6260b94e2164","Type":"ContainerDied","Data":"ab0e92a7817efc18119aee5daed03b4c6b290fe4f86f0311996e0e59e78ab1a3"} Jan 20 16:57:14 crc kubenswrapper[4558]: I0120 16:57:14.888051 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-sj9bb" event={"ID":"282a24f3-5eac-4fa8-8a40-6260b94e2164","Type":"ContainerStarted","Data":"c3c1e666c682481300e954f79b2ed63d800ac8758e333d048d306c132805e213"} Jan 20 16:57:14 crc kubenswrapper[4558]: I0120 16:57:14.892521 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/root-account-create-update-gkn7h" event={"ID":"f1383f04-a9ba-47b5-baaa-d160c552a567","Type":"ContainerDied","Data":"a11200e0476156dec132bb568e2ae49f0d2dd394673114ed080e2a99e92be4b5"} Jan 20 16:57:14 crc kubenswrapper[4558]: I0120 16:57:14.894592 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a11200e0476156dec132bb568e2ae49f0d2dd394673114ed080e2a99e92be4b5" Jan 20 16:57:14 crc kubenswrapper[4558]: I0120 16:57:14.892532 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-gkn7h" Jan 20 16:57:14 crc kubenswrapper[4558]: I0120 16:57:14.928417 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-jlgl4" Jan 20 16:57:15 crc kubenswrapper[4558]: I0120 16:57:15.897321 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 16:57:15 crc kubenswrapper[4558]: I0120 16:57:15.901987 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4df59" event={"ID":"32e4598f-f0a7-4d4f-a04f-e37171e73905","Type":"ContainerStarted","Data":"025dfd613ceca62d521214b8cf184ed58f632222b81d934ad8642198b62b45a2"} Jan 20 16:57:15 crc kubenswrapper[4558]: I0120 16:57:15.910242 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"ad708448-38df-4494-bca7-fe394c9b53a7","Type":"ContainerStarted","Data":"a0064904f7cf7b2f3f57d0cbd50180940cf612a73b85319dcd1b4cc9e5286c49"} Jan 20 16:57:15 crc kubenswrapper[4558]: I0120 16:57:15.910263 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"ad708448-38df-4494-bca7-fe394c9b53a7","Type":"ContainerStarted","Data":"4fd13baec559163b2b66474d46cf26e69eb2ac8b71ab5c82119b909d78b9c3ec"} Jan 20 16:57:15 crc kubenswrapper[4558]: I0120 16:57:15.910271 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"ad708448-38df-4494-bca7-fe394c9b53a7","Type":"ContainerStarted","Data":"7f4031ea42bd00352739604cfd6ebff1f1c035663b1e3b5d3f1ba025cafadaf4"} Jan 20 16:57:15 crc kubenswrapper[4558]: I0120 16:57:15.910279 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"ad708448-38df-4494-bca7-fe394c9b53a7","Type":"ContainerStarted","Data":"b729d20938039dd3cd6701145536a565bd85b83711e04e7135e5d6f37060390e"} Jan 20 16:57:15 crc kubenswrapper[4558]: I0120 16:57:15.910286 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"ad708448-38df-4494-bca7-fe394c9b53a7","Type":"ContainerStarted","Data":"5d77e588e0ee15e79eb372260c13103acd9bceda72bb63cee529297cc6f3bf98"} Jan 20 16:57:15 crc kubenswrapper[4558]: I0120 16:57:15.910294 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"ad708448-38df-4494-bca7-fe394c9b53a7","Type":"ContainerStarted","Data":"90254ca174d78ea243b5ca11d013559dbc865a160f1d264e2b31efdc48f96768"} Jan 20 16:57:15 crc kubenswrapper[4558]: I0120 16:57:15.934334 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-4df59" podStartSLOduration=3.420941657 podStartE2EDuration="5.934316783s" podCreationTimestamp="2026-01-20 16:57:10 +0000 UTC" firstStartedPulling="2026-01-20 16:57:12.966060725 +0000 UTC m=+926.726398692" lastFinishedPulling="2026-01-20 16:57:15.47943585 +0000 UTC m=+929.239773818" observedRunningTime="2026-01-20 16:57:15.929099242 +0000 UTC m=+929.689437209" watchObservedRunningTime="2026-01-20 16:57:15.934316783 +0000 UTC m=+929.694654751" Jan 20 16:57:16 crc kubenswrapper[4558]: I0120 16:57:16.682327 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 16:57:16 crc kubenswrapper[4558]: I0120 16:57:16.927847 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"ad708448-38df-4494-bca7-fe394c9b53a7","Type":"ContainerStarted","Data":"e1f52810d18aeba836cd426714154488ddb699c7ef7cb6b0251ec23a6345ebf4"} Jan 20 16:57:16 crc kubenswrapper[4558]: I0120 16:57:16.983021 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/swift-storage-0" podStartSLOduration=27.239237558 podStartE2EDuration="35.983008501s" podCreationTimestamp="2026-01-20 16:56:41 +0000 UTC" firstStartedPulling="2026-01-20 16:57:06.176753449 +0000 UTC m=+919.937091416" lastFinishedPulling="2026-01-20 16:57:14.920524392 +0000 UTC m=+928.680862359" observedRunningTime="2026-01-20 16:57:16.973062627 +0000 UTC m=+930.733400594" watchObservedRunningTime="2026-01-20 16:57:16.983008501 +0000 UTC m=+930.743346468" Jan 20 16:57:17 crc kubenswrapper[4558]: I0120 16:57:17.267190 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-58b8ddd7c-rr7mz"] Jan 20 16:57:17 crc kubenswrapper[4558]: E0120 16:57:17.267479 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f1383f04-a9ba-47b5-baaa-d160c552a567" containerName="mariadb-account-create-update" Jan 20 16:57:17 crc kubenswrapper[4558]: I0120 16:57:17.267495 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="f1383f04-a9ba-47b5-baaa-d160c552a567" containerName="mariadb-account-create-update" Jan 20 16:57:17 crc kubenswrapper[4558]: E0120 16:57:17.267510 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fe5cc26d-38d6-43e0-8977-6ff7e991ab6c" containerName="glance-db-sync" Jan 20 16:57:17 crc kubenswrapper[4558]: I0120 16:57:17.267515 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="fe5cc26d-38d6-43e0-8977-6ff7e991ab6c" containerName="glance-db-sync" Jan 20 16:57:17 crc kubenswrapper[4558]: I0120 16:57:17.267668 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="fe5cc26d-38d6-43e0-8977-6ff7e991ab6c" containerName="glance-db-sync" Jan 20 16:57:17 crc kubenswrapper[4558]: I0120 16:57:17.267680 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="f1383f04-a9ba-47b5-baaa-d160c552a567" containerName="mariadb-account-create-update" Jan 20 16:57:17 crc kubenswrapper[4558]: I0120 16:57:17.268391 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-58b8ddd7c-rr7mz" Jan 20 16:57:17 crc kubenswrapper[4558]: I0120 16:57:17.272444 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"dns-swift-storage-0" Jan 20 16:57:17 crc kubenswrapper[4558]: I0120 16:57:17.283393 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-58b8ddd7c-rr7mz"] Jan 20 16:57:17 crc kubenswrapper[4558]: I0120 16:57:17.385523 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d9be5f16-16de-4ae6-9007-cca3e80501b9-dns-swift-storage-0\") pod \"dnsmasq-dnsmasq-58b8ddd7c-rr7mz\" (UID: \"d9be5f16-16de-4ae6-9007-cca3e80501b9\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-58b8ddd7c-rr7mz" Jan 20 16:57:17 crc kubenswrapper[4558]: I0120 16:57:17.385776 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2lr8g\" (UniqueName: \"kubernetes.io/projected/d9be5f16-16de-4ae6-9007-cca3e80501b9-kube-api-access-2lr8g\") pod \"dnsmasq-dnsmasq-58b8ddd7c-rr7mz\" (UID: \"d9be5f16-16de-4ae6-9007-cca3e80501b9\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-58b8ddd7c-rr7mz" Jan 20 16:57:17 crc kubenswrapper[4558]: I0120 16:57:17.385886 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/d9be5f16-16de-4ae6-9007-cca3e80501b9-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-58b8ddd7c-rr7mz\" (UID: \"d9be5f16-16de-4ae6-9007-cca3e80501b9\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-58b8ddd7c-rr7mz" Jan 20 16:57:17 crc kubenswrapper[4558]: I0120 16:57:17.385963 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d9be5f16-16de-4ae6-9007-cca3e80501b9-config\") pod \"dnsmasq-dnsmasq-58b8ddd7c-rr7mz\" (UID: \"d9be5f16-16de-4ae6-9007-cca3e80501b9\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-58b8ddd7c-rr7mz" Jan 20 16:57:17 crc kubenswrapper[4558]: I0120 16:57:17.487211 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d9be5f16-16de-4ae6-9007-cca3e80501b9-config\") pod \"dnsmasq-dnsmasq-58b8ddd7c-rr7mz\" (UID: \"d9be5f16-16de-4ae6-9007-cca3e80501b9\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-58b8ddd7c-rr7mz" Jan 20 16:57:17 crc kubenswrapper[4558]: I0120 16:57:17.487329 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d9be5f16-16de-4ae6-9007-cca3e80501b9-dns-swift-storage-0\") pod \"dnsmasq-dnsmasq-58b8ddd7c-rr7mz\" (UID: \"d9be5f16-16de-4ae6-9007-cca3e80501b9\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-58b8ddd7c-rr7mz" Jan 20 16:57:17 crc kubenswrapper[4558]: I0120 16:57:17.487363 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2lr8g\" (UniqueName: \"kubernetes.io/projected/d9be5f16-16de-4ae6-9007-cca3e80501b9-kube-api-access-2lr8g\") pod \"dnsmasq-dnsmasq-58b8ddd7c-rr7mz\" (UID: \"d9be5f16-16de-4ae6-9007-cca3e80501b9\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-58b8ddd7c-rr7mz" Jan 20 16:57:17 crc kubenswrapper[4558]: I0120 16:57:17.488003 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d9be5f16-16de-4ae6-9007-cca3e80501b9-config\") pod \"dnsmasq-dnsmasq-58b8ddd7c-rr7mz\" (UID: \"d9be5f16-16de-4ae6-9007-cca3e80501b9\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-58b8ddd7c-rr7mz" Jan 20 16:57:17 crc kubenswrapper[4558]: I0120 16:57:17.488517 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d9be5f16-16de-4ae6-9007-cca3e80501b9-dns-swift-storage-0\") pod \"dnsmasq-dnsmasq-58b8ddd7c-rr7mz\" (UID: \"d9be5f16-16de-4ae6-9007-cca3e80501b9\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-58b8ddd7c-rr7mz" Jan 20 16:57:17 crc kubenswrapper[4558]: I0120 16:57:17.488668 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/d9be5f16-16de-4ae6-9007-cca3e80501b9-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-58b8ddd7c-rr7mz\" (UID: \"d9be5f16-16de-4ae6-9007-cca3e80501b9\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-58b8ddd7c-rr7mz" Jan 20 16:57:17 crc kubenswrapper[4558]: I0120 16:57:17.489153 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/d9be5f16-16de-4ae6-9007-cca3e80501b9-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-58b8ddd7c-rr7mz\" (UID: \"d9be5f16-16de-4ae6-9007-cca3e80501b9\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-58b8ddd7c-rr7mz" Jan 20 16:57:17 crc kubenswrapper[4558]: I0120 16:57:17.498889 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/cinder-db-create-6cd95"] Jan 20 16:57:17 crc kubenswrapper[4558]: I0120 16:57:17.499712 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-db-create-6cd95" Jan 20 16:57:17 crc kubenswrapper[4558]: I0120 16:57:17.510140 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-db-create-6cd95"] Jan 20 16:57:17 crc kubenswrapper[4558]: I0120 16:57:17.520987 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2lr8g\" (UniqueName: \"kubernetes.io/projected/d9be5f16-16de-4ae6-9007-cca3e80501b9-kube-api-access-2lr8g\") pod \"dnsmasq-dnsmasq-58b8ddd7c-rr7mz\" (UID: \"d9be5f16-16de-4ae6-9007-cca3e80501b9\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-58b8ddd7c-rr7mz" Jan 20 16:57:17 crc kubenswrapper[4558]: I0120 16:57:17.539771 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/barbican-eb81-account-create-update-sr6r9"] Jan 20 16:57:17 crc kubenswrapper[4558]: I0120 16:57:17.540702 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-eb81-account-create-update-sr6r9" Jan 20 16:57:17 crc kubenswrapper[4558]: I0120 16:57:17.542022 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"barbican-db-secret" Jan 20 16:57:17 crc kubenswrapper[4558]: I0120 16:57:17.546462 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-eb81-account-create-update-sr6r9"] Jan 20 16:57:17 crc kubenswrapper[4558]: I0120 16:57:17.582421 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-58b8ddd7c-rr7mz" Jan 20 16:57:17 crc kubenswrapper[4558]: I0120 16:57:17.590867 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s62q7\" (UniqueName: \"kubernetes.io/projected/cdd71514-e929-4a96-b799-91eecad5ac02-kube-api-access-s62q7\") pod \"barbican-eb81-account-create-update-sr6r9\" (UID: \"cdd71514-e929-4a96-b799-91eecad5ac02\") " pod="openstack-kuttl-tests/barbican-eb81-account-create-update-sr6r9" Jan 20 16:57:17 crc kubenswrapper[4558]: I0120 16:57:17.591020 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cdd71514-e929-4a96-b799-91eecad5ac02-operator-scripts\") pod \"barbican-eb81-account-create-update-sr6r9\" (UID: \"cdd71514-e929-4a96-b799-91eecad5ac02\") " pod="openstack-kuttl-tests/barbican-eb81-account-create-update-sr6r9" Jan 20 16:57:17 crc kubenswrapper[4558]: I0120 16:57:17.591050 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wlbph\" (UniqueName: \"kubernetes.io/projected/22adc061-9ded-4378-abcc-1e86a3985839-kube-api-access-wlbph\") pod \"cinder-db-create-6cd95\" (UID: \"22adc061-9ded-4378-abcc-1e86a3985839\") " pod="openstack-kuttl-tests/cinder-db-create-6cd95" Jan 20 16:57:17 crc kubenswrapper[4558]: I0120 16:57:17.591087 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/22adc061-9ded-4378-abcc-1e86a3985839-operator-scripts\") pod \"cinder-db-create-6cd95\" (UID: \"22adc061-9ded-4378-abcc-1e86a3985839\") " pod="openstack-kuttl-tests/cinder-db-create-6cd95" Jan 20 16:57:17 crc kubenswrapper[4558]: I0120 16:57:17.608366 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/barbican-db-create-w6fpd"] Jan 20 16:57:17 crc kubenswrapper[4558]: I0120 16:57:17.609261 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-db-create-w6fpd" Jan 20 16:57:17 crc kubenswrapper[4558]: I0120 16:57:17.613031 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-db-create-w6fpd"] Jan 20 16:57:17 crc kubenswrapper[4558]: I0120 16:57:17.692825 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-825dr\" (UniqueName: \"kubernetes.io/projected/7b3471c1-c42b-4be4-96fe-7e43ef86ba15-kube-api-access-825dr\") pod \"barbican-db-create-w6fpd\" (UID: \"7b3471c1-c42b-4be4-96fe-7e43ef86ba15\") " pod="openstack-kuttl-tests/barbican-db-create-w6fpd" Jan 20 16:57:17 crc kubenswrapper[4558]: I0120 16:57:17.693064 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cdd71514-e929-4a96-b799-91eecad5ac02-operator-scripts\") pod \"barbican-eb81-account-create-update-sr6r9\" (UID: \"cdd71514-e929-4a96-b799-91eecad5ac02\") " pod="openstack-kuttl-tests/barbican-eb81-account-create-update-sr6r9" Jan 20 16:57:17 crc kubenswrapper[4558]: I0120 16:57:17.693089 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wlbph\" (UniqueName: \"kubernetes.io/projected/22adc061-9ded-4378-abcc-1e86a3985839-kube-api-access-wlbph\") pod \"cinder-db-create-6cd95\" (UID: \"22adc061-9ded-4378-abcc-1e86a3985839\") " pod="openstack-kuttl-tests/cinder-db-create-6cd95" Jan 20 16:57:17 crc kubenswrapper[4558]: I0120 16:57:17.693130 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/22adc061-9ded-4378-abcc-1e86a3985839-operator-scripts\") pod \"cinder-db-create-6cd95\" (UID: \"22adc061-9ded-4378-abcc-1e86a3985839\") " pod="openstack-kuttl-tests/cinder-db-create-6cd95" Jan 20 16:57:17 crc kubenswrapper[4558]: I0120 16:57:17.693233 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s62q7\" (UniqueName: \"kubernetes.io/projected/cdd71514-e929-4a96-b799-91eecad5ac02-kube-api-access-s62q7\") pod \"barbican-eb81-account-create-update-sr6r9\" (UID: \"cdd71514-e929-4a96-b799-91eecad5ac02\") " pod="openstack-kuttl-tests/barbican-eb81-account-create-update-sr6r9" Jan 20 16:57:17 crc kubenswrapper[4558]: I0120 16:57:17.693262 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7b3471c1-c42b-4be4-96fe-7e43ef86ba15-operator-scripts\") pod \"barbican-db-create-w6fpd\" (UID: \"7b3471c1-c42b-4be4-96fe-7e43ef86ba15\") " pod="openstack-kuttl-tests/barbican-db-create-w6fpd" Jan 20 16:57:17 crc kubenswrapper[4558]: I0120 16:57:17.693882 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cdd71514-e929-4a96-b799-91eecad5ac02-operator-scripts\") pod \"barbican-eb81-account-create-update-sr6r9\" (UID: \"cdd71514-e929-4a96-b799-91eecad5ac02\") " pod="openstack-kuttl-tests/barbican-eb81-account-create-update-sr6r9" Jan 20 16:57:17 crc kubenswrapper[4558]: I0120 16:57:17.694576 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/22adc061-9ded-4378-abcc-1e86a3985839-operator-scripts\") pod \"cinder-db-create-6cd95\" (UID: \"22adc061-9ded-4378-abcc-1e86a3985839\") " pod="openstack-kuttl-tests/cinder-db-create-6cd95" Jan 20 16:57:17 crc kubenswrapper[4558]: I0120 16:57:17.716673 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s62q7\" (UniqueName: \"kubernetes.io/projected/cdd71514-e929-4a96-b799-91eecad5ac02-kube-api-access-s62q7\") pod \"barbican-eb81-account-create-update-sr6r9\" (UID: \"cdd71514-e929-4a96-b799-91eecad5ac02\") " pod="openstack-kuttl-tests/barbican-eb81-account-create-update-sr6r9" Jan 20 16:57:17 crc kubenswrapper[4558]: I0120 16:57:17.722452 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wlbph\" (UniqueName: \"kubernetes.io/projected/22adc061-9ded-4378-abcc-1e86a3985839-kube-api-access-wlbph\") pod \"cinder-db-create-6cd95\" (UID: \"22adc061-9ded-4378-abcc-1e86a3985839\") " pod="openstack-kuttl-tests/cinder-db-create-6cd95" Jan 20 16:57:17 crc kubenswrapper[4558]: I0120 16:57:17.726536 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/cinder-f537-account-create-update-5crz6"] Jan 20 16:57:17 crc kubenswrapper[4558]: I0120 16:57:17.727490 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-f537-account-create-update-5crz6" Jan 20 16:57:17 crc kubenswrapper[4558]: I0120 16:57:17.729803 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cinder-db-secret" Jan 20 16:57:17 crc kubenswrapper[4558]: I0120 16:57:17.732430 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-f537-account-create-update-5crz6"] Jan 20 16:57:17 crc kubenswrapper[4558]: I0120 16:57:17.796948 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7b3471c1-c42b-4be4-96fe-7e43ef86ba15-operator-scripts\") pod \"barbican-db-create-w6fpd\" (UID: \"7b3471c1-c42b-4be4-96fe-7e43ef86ba15\") " pod="openstack-kuttl-tests/barbican-db-create-w6fpd" Jan 20 16:57:17 crc kubenswrapper[4558]: I0120 16:57:17.797007 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e0c2bc6c-ea80-49a0-876f-f0a4d29c12bd-operator-scripts\") pod \"cinder-f537-account-create-update-5crz6\" (UID: \"e0c2bc6c-ea80-49a0-876f-f0a4d29c12bd\") " pod="openstack-kuttl-tests/cinder-f537-account-create-update-5crz6" Jan 20 16:57:17 crc kubenswrapper[4558]: I0120 16:57:17.797044 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-825dr\" (UniqueName: \"kubernetes.io/projected/7b3471c1-c42b-4be4-96fe-7e43ef86ba15-kube-api-access-825dr\") pod \"barbican-db-create-w6fpd\" (UID: \"7b3471c1-c42b-4be4-96fe-7e43ef86ba15\") " pod="openstack-kuttl-tests/barbican-db-create-w6fpd" Jan 20 16:57:17 crc kubenswrapper[4558]: I0120 16:57:17.797121 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jd9hj\" (UniqueName: \"kubernetes.io/projected/e0c2bc6c-ea80-49a0-876f-f0a4d29c12bd-kube-api-access-jd9hj\") pod \"cinder-f537-account-create-update-5crz6\" (UID: \"e0c2bc6c-ea80-49a0-876f-f0a4d29c12bd\") " pod="openstack-kuttl-tests/cinder-f537-account-create-update-5crz6" Jan 20 16:57:17 crc kubenswrapper[4558]: I0120 16:57:17.798002 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7b3471c1-c42b-4be4-96fe-7e43ef86ba15-operator-scripts\") pod \"barbican-db-create-w6fpd\" (UID: \"7b3471c1-c42b-4be4-96fe-7e43ef86ba15\") " pod="openstack-kuttl-tests/barbican-db-create-w6fpd" Jan 20 16:57:17 crc kubenswrapper[4558]: I0120 16:57:17.803115 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/neutron-db-create-9zm4f"] Jan 20 16:57:17 crc kubenswrapper[4558]: I0120 16:57:17.803958 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-db-create-9zm4f" Jan 20 16:57:17 crc kubenswrapper[4558]: I0120 16:57:17.810839 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/neutron-a57a-account-create-update-76pmk"] Jan 20 16:57:17 crc kubenswrapper[4558]: I0120 16:57:17.812762 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-a57a-account-create-update-76pmk" Jan 20 16:57:17 crc kubenswrapper[4558]: I0120 16:57:17.813918 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"neutron-db-secret" Jan 20 16:57:17 crc kubenswrapper[4558]: I0120 16:57:17.814937 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-825dr\" (UniqueName: \"kubernetes.io/projected/7b3471c1-c42b-4be4-96fe-7e43ef86ba15-kube-api-access-825dr\") pod \"barbican-db-create-w6fpd\" (UID: \"7b3471c1-c42b-4be4-96fe-7e43ef86ba15\") " pod="openstack-kuttl-tests/barbican-db-create-w6fpd" Jan 20 16:57:17 crc kubenswrapper[4558]: I0120 16:57:17.826306 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-a57a-account-create-update-76pmk"] Jan 20 16:57:17 crc kubenswrapper[4558]: I0120 16:57:17.838039 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-db-create-9zm4f"] Jan 20 16:57:17 crc kubenswrapper[4558]: I0120 16:57:17.858988 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-db-create-6cd95" Jan 20 16:57:17 crc kubenswrapper[4558]: I0120 16:57:17.861043 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-eb81-account-create-update-sr6r9" Jan 20 16:57:17 crc kubenswrapper[4558]: I0120 16:57:17.867112 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/keystone-db-sync-wpq8b"] Jan 20 16:57:17 crc kubenswrapper[4558]: I0120 16:57:17.867963 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-db-sync-wpq8b" Jan 20 16:57:17 crc kubenswrapper[4558]: I0120 16:57:17.872381 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-config-data" Jan 20 16:57:17 crc kubenswrapper[4558]: I0120 16:57:17.872549 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-scripts" Jan 20 16:57:17 crc kubenswrapper[4558]: I0120 16:57:17.872650 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone" Jan 20 16:57:17 crc kubenswrapper[4558]: I0120 16:57:17.876640 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-keystone-dockercfg-wlvl5" Jan 20 16:57:17 crc kubenswrapper[4558]: I0120 16:57:17.892085 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-db-sync-wpq8b"] Jan 20 16:57:17 crc kubenswrapper[4558]: I0120 16:57:17.899213 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jd9hj\" (UniqueName: \"kubernetes.io/projected/e0c2bc6c-ea80-49a0-876f-f0a4d29c12bd-kube-api-access-jd9hj\") pod \"cinder-f537-account-create-update-5crz6\" (UID: \"e0c2bc6c-ea80-49a0-876f-f0a4d29c12bd\") " pod="openstack-kuttl-tests/cinder-f537-account-create-update-5crz6" Jan 20 16:57:17 crc kubenswrapper[4558]: I0120 16:57:17.899288 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e0c2bc6c-ea80-49a0-876f-f0a4d29c12bd-operator-scripts\") pod \"cinder-f537-account-create-update-5crz6\" (UID: \"e0c2bc6c-ea80-49a0-876f-f0a4d29c12bd\") " pod="openstack-kuttl-tests/cinder-f537-account-create-update-5crz6" Jan 20 16:57:17 crc kubenswrapper[4558]: I0120 16:57:17.899919 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e0c2bc6c-ea80-49a0-876f-f0a4d29c12bd-operator-scripts\") pod \"cinder-f537-account-create-update-5crz6\" (UID: \"e0c2bc6c-ea80-49a0-876f-f0a4d29c12bd\") " pod="openstack-kuttl-tests/cinder-f537-account-create-update-5crz6" Jan 20 16:57:17 crc kubenswrapper[4558]: I0120 16:57:17.914941 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jd9hj\" (UniqueName: \"kubernetes.io/projected/e0c2bc6c-ea80-49a0-876f-f0a4d29c12bd-kube-api-access-jd9hj\") pod \"cinder-f537-account-create-update-5crz6\" (UID: \"e0c2bc6c-ea80-49a0-876f-f0a4d29c12bd\") " pod="openstack-kuttl-tests/cinder-f537-account-create-update-5crz6" Jan 20 16:57:17 crc kubenswrapper[4558]: I0120 16:57:17.957449 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-db-create-w6fpd" Jan 20 16:57:18 crc kubenswrapper[4558]: I0120 16:57:18.000552 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xhm8z\" (UniqueName: \"kubernetes.io/projected/cde9092f-c62d-406c-b800-87c38b56402b-kube-api-access-xhm8z\") pod \"keystone-db-sync-wpq8b\" (UID: \"cde9092f-c62d-406c-b800-87c38b56402b\") " pod="openstack-kuttl-tests/keystone-db-sync-wpq8b" Jan 20 16:57:18 crc kubenswrapper[4558]: I0120 16:57:18.000623 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/567ff26d-1c6e-4117-869d-3bfcb1a705fc-operator-scripts\") pod \"neutron-db-create-9zm4f\" (UID: \"567ff26d-1c6e-4117-869d-3bfcb1a705fc\") " pod="openstack-kuttl-tests/neutron-db-create-9zm4f" Jan 20 16:57:18 crc kubenswrapper[4558]: I0120 16:57:18.000659 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sr4kn\" (UniqueName: \"kubernetes.io/projected/f4589cf0-10b5-4bd9-a06d-58481ece189a-kube-api-access-sr4kn\") pod \"neutron-a57a-account-create-update-76pmk\" (UID: \"f4589cf0-10b5-4bd9-a06d-58481ece189a\") " pod="openstack-kuttl-tests/neutron-a57a-account-create-update-76pmk" Jan 20 16:57:18 crc kubenswrapper[4558]: I0120 16:57:18.000678 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f4589cf0-10b5-4bd9-a06d-58481ece189a-operator-scripts\") pod \"neutron-a57a-account-create-update-76pmk\" (UID: \"f4589cf0-10b5-4bd9-a06d-58481ece189a\") " pod="openstack-kuttl-tests/neutron-a57a-account-create-update-76pmk" Jan 20 16:57:18 crc kubenswrapper[4558]: I0120 16:57:18.000707 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cde9092f-c62d-406c-b800-87c38b56402b-combined-ca-bundle\") pod \"keystone-db-sync-wpq8b\" (UID: \"cde9092f-c62d-406c-b800-87c38b56402b\") " pod="openstack-kuttl-tests/keystone-db-sync-wpq8b" Jan 20 16:57:18 crc kubenswrapper[4558]: I0120 16:57:18.000728 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cde9092f-c62d-406c-b800-87c38b56402b-config-data\") pod \"keystone-db-sync-wpq8b\" (UID: \"cde9092f-c62d-406c-b800-87c38b56402b\") " pod="openstack-kuttl-tests/keystone-db-sync-wpq8b" Jan 20 16:57:18 crc kubenswrapper[4558]: I0120 16:57:18.000840 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bggx2\" (UniqueName: \"kubernetes.io/projected/567ff26d-1c6e-4117-869d-3bfcb1a705fc-kube-api-access-bggx2\") pod \"neutron-db-create-9zm4f\" (UID: \"567ff26d-1c6e-4117-869d-3bfcb1a705fc\") " pod="openstack-kuttl-tests/neutron-db-create-9zm4f" Jan 20 16:57:18 crc kubenswrapper[4558]: I0120 16:57:18.072654 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-f537-account-create-update-5crz6" Jan 20 16:57:18 crc kubenswrapper[4558]: I0120 16:57:18.092707 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-58b8ddd7c-rr7mz"] Jan 20 16:57:18 crc kubenswrapper[4558]: W0120 16:57:18.102051 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd9be5f16_16de_4ae6_9007_cca3e80501b9.slice/crio-393c240e307e6f1c510b640fb0e60d5df3df97c49287fbfe0533b6fedc013ca9 WatchSource:0}: Error finding container 393c240e307e6f1c510b640fb0e60d5df3df97c49287fbfe0533b6fedc013ca9: Status 404 returned error can't find the container with id 393c240e307e6f1c510b640fb0e60d5df3df97c49287fbfe0533b6fedc013ca9 Jan 20 16:57:18 crc kubenswrapper[4558]: I0120 16:57:18.102252 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sr4kn\" (UniqueName: \"kubernetes.io/projected/f4589cf0-10b5-4bd9-a06d-58481ece189a-kube-api-access-sr4kn\") pod \"neutron-a57a-account-create-update-76pmk\" (UID: \"f4589cf0-10b5-4bd9-a06d-58481ece189a\") " pod="openstack-kuttl-tests/neutron-a57a-account-create-update-76pmk" Jan 20 16:57:18 crc kubenswrapper[4558]: I0120 16:57:18.102301 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f4589cf0-10b5-4bd9-a06d-58481ece189a-operator-scripts\") pod \"neutron-a57a-account-create-update-76pmk\" (UID: \"f4589cf0-10b5-4bd9-a06d-58481ece189a\") " pod="openstack-kuttl-tests/neutron-a57a-account-create-update-76pmk" Jan 20 16:57:18 crc kubenswrapper[4558]: I0120 16:57:18.102328 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cde9092f-c62d-406c-b800-87c38b56402b-combined-ca-bundle\") pod \"keystone-db-sync-wpq8b\" (UID: \"cde9092f-c62d-406c-b800-87c38b56402b\") " pod="openstack-kuttl-tests/keystone-db-sync-wpq8b" Jan 20 16:57:18 crc kubenswrapper[4558]: I0120 16:57:18.102346 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cde9092f-c62d-406c-b800-87c38b56402b-config-data\") pod \"keystone-db-sync-wpq8b\" (UID: \"cde9092f-c62d-406c-b800-87c38b56402b\") " pod="openstack-kuttl-tests/keystone-db-sync-wpq8b" Jan 20 16:57:18 crc kubenswrapper[4558]: I0120 16:57:18.102430 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bggx2\" (UniqueName: \"kubernetes.io/projected/567ff26d-1c6e-4117-869d-3bfcb1a705fc-kube-api-access-bggx2\") pod \"neutron-db-create-9zm4f\" (UID: \"567ff26d-1c6e-4117-869d-3bfcb1a705fc\") " pod="openstack-kuttl-tests/neutron-db-create-9zm4f" Jan 20 16:57:18 crc kubenswrapper[4558]: I0120 16:57:18.102459 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xhm8z\" (UniqueName: \"kubernetes.io/projected/cde9092f-c62d-406c-b800-87c38b56402b-kube-api-access-xhm8z\") pod \"keystone-db-sync-wpq8b\" (UID: \"cde9092f-c62d-406c-b800-87c38b56402b\") " pod="openstack-kuttl-tests/keystone-db-sync-wpq8b" Jan 20 16:57:18 crc kubenswrapper[4558]: I0120 16:57:18.102489 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/567ff26d-1c6e-4117-869d-3bfcb1a705fc-operator-scripts\") pod \"neutron-db-create-9zm4f\" (UID: \"567ff26d-1c6e-4117-869d-3bfcb1a705fc\") " pod="openstack-kuttl-tests/neutron-db-create-9zm4f" Jan 20 16:57:18 crc kubenswrapper[4558]: I0120 16:57:18.103289 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/567ff26d-1c6e-4117-869d-3bfcb1a705fc-operator-scripts\") pod \"neutron-db-create-9zm4f\" (UID: \"567ff26d-1c6e-4117-869d-3bfcb1a705fc\") " pod="openstack-kuttl-tests/neutron-db-create-9zm4f" Jan 20 16:57:18 crc kubenswrapper[4558]: I0120 16:57:18.103835 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f4589cf0-10b5-4bd9-a06d-58481ece189a-operator-scripts\") pod \"neutron-a57a-account-create-update-76pmk\" (UID: \"f4589cf0-10b5-4bd9-a06d-58481ece189a\") " pod="openstack-kuttl-tests/neutron-a57a-account-create-update-76pmk" Jan 20 16:57:18 crc kubenswrapper[4558]: I0120 16:57:18.110453 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cde9092f-c62d-406c-b800-87c38b56402b-combined-ca-bundle\") pod \"keystone-db-sync-wpq8b\" (UID: \"cde9092f-c62d-406c-b800-87c38b56402b\") " pod="openstack-kuttl-tests/keystone-db-sync-wpq8b" Jan 20 16:57:18 crc kubenswrapper[4558]: I0120 16:57:18.115768 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cde9092f-c62d-406c-b800-87c38b56402b-config-data\") pod \"keystone-db-sync-wpq8b\" (UID: \"cde9092f-c62d-406c-b800-87c38b56402b\") " pod="openstack-kuttl-tests/keystone-db-sync-wpq8b" Jan 20 16:57:18 crc kubenswrapper[4558]: I0120 16:57:18.117443 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bggx2\" (UniqueName: \"kubernetes.io/projected/567ff26d-1c6e-4117-869d-3bfcb1a705fc-kube-api-access-bggx2\") pod \"neutron-db-create-9zm4f\" (UID: \"567ff26d-1c6e-4117-869d-3bfcb1a705fc\") " pod="openstack-kuttl-tests/neutron-db-create-9zm4f" Jan 20 16:57:18 crc kubenswrapper[4558]: I0120 16:57:18.123895 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sr4kn\" (UniqueName: \"kubernetes.io/projected/f4589cf0-10b5-4bd9-a06d-58481ece189a-kube-api-access-sr4kn\") pod \"neutron-a57a-account-create-update-76pmk\" (UID: \"f4589cf0-10b5-4bd9-a06d-58481ece189a\") " pod="openstack-kuttl-tests/neutron-a57a-account-create-update-76pmk" Jan 20 16:57:18 crc kubenswrapper[4558]: I0120 16:57:18.124062 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xhm8z\" (UniqueName: \"kubernetes.io/projected/cde9092f-c62d-406c-b800-87c38b56402b-kube-api-access-xhm8z\") pod \"keystone-db-sync-wpq8b\" (UID: \"cde9092f-c62d-406c-b800-87c38b56402b\") " pod="openstack-kuttl-tests/keystone-db-sync-wpq8b" Jan 20 16:57:18 crc kubenswrapper[4558]: I0120 16:57:18.124222 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-db-create-9zm4f" Jan 20 16:57:18 crc kubenswrapper[4558]: I0120 16:57:18.129796 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-a57a-account-create-update-76pmk" Jan 20 16:57:18 crc kubenswrapper[4558]: I0120 16:57:18.191207 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-db-sync-wpq8b" Jan 20 16:57:18 crc kubenswrapper[4558]: I0120 16:57:18.219393 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-gkn7h"] Jan 20 16:57:18 crc kubenswrapper[4558]: I0120 16:57:18.225695 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-gkn7h"] Jan 20 16:57:18 crc kubenswrapper[4558]: I0120 16:57:18.304446 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-db-create-6cd95"] Jan 20 16:57:18 crc kubenswrapper[4558]: I0120 16:57:18.364450 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-jlgl4"] Jan 20 16:57:18 crc kubenswrapper[4558]: I0120 16:57:18.364824 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-jlgl4" podUID="78267dd1-57b0-4d3b-9578-42989b2364f5" containerName="registry-server" containerID="cri-o://64aa6b60f7b0c5125431161cac6c581a9f41d676d84ade2ff99a1a3a675693ab" gracePeriod=2 Jan 20 16:57:18 crc kubenswrapper[4558]: I0120 16:57:18.370395 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-eb81-account-create-update-sr6r9"] Jan 20 16:57:18 crc kubenswrapper[4558]: I0120 16:57:18.430090 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-db-create-w6fpd"] Jan 20 16:57:18 crc kubenswrapper[4558]: W0120 16:57:18.494237 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7b3471c1_c42b_4be4_96fe_7e43ef86ba15.slice/crio-de3cc7b04b49083d959ad73d76a4acb12b6096e997b92079a3f2a19d49216c7d WatchSource:0}: Error finding container de3cc7b04b49083d959ad73d76a4acb12b6096e997b92079a3f2a19d49216c7d: Status 404 returned error can't find the container with id de3cc7b04b49083d959ad73d76a4acb12b6096e997b92079a3f2a19d49216c7d Jan 20 16:57:18 crc kubenswrapper[4558]: I0120 16:57:18.549397 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-f537-account-create-update-5crz6"] Jan 20 16:57:18 crc kubenswrapper[4558]: W0120 16:57:18.555676 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode0c2bc6c_ea80_49a0_876f_f0a4d29c12bd.slice/crio-8ddba1e78843209db2f234831f4f16164a18c124a044005e0f3176b45c507a63 WatchSource:0}: Error finding container 8ddba1e78843209db2f234831f4f16164a18c124a044005e0f3176b45c507a63: Status 404 returned error can't find the container with id 8ddba1e78843209db2f234831f4f16164a18c124a044005e0f3176b45c507a63 Jan 20 16:57:18 crc kubenswrapper[4558]: I0120 16:57:18.574084 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f1383f04-a9ba-47b5-baaa-d160c552a567" path="/var/lib/kubelet/pods/f1383f04-a9ba-47b5-baaa-d160c552a567/volumes" Jan 20 16:57:18 crc kubenswrapper[4558]: I0120 16:57:18.967298 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-db-create-w6fpd" event={"ID":"7b3471c1-c42b-4be4-96fe-7e43ef86ba15","Type":"ContainerStarted","Data":"6b4dcc6e19550c83acc36231d55fd104b76cb31fdde78c00b5a97e00712e1b7b"} Jan 20 16:57:18 crc kubenswrapper[4558]: I0120 16:57:18.967515 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-db-create-w6fpd" event={"ID":"7b3471c1-c42b-4be4-96fe-7e43ef86ba15","Type":"ContainerStarted","Data":"de3cc7b04b49083d959ad73d76a4acb12b6096e997b92079a3f2a19d49216c7d"} Jan 20 16:57:18 crc kubenswrapper[4558]: I0120 16:57:18.969339 4558 generic.go:334] "Generic (PLEG): container finished" podID="d9be5f16-16de-4ae6-9007-cca3e80501b9" containerID="7477443fea20b5ececf459b62e43d6fa92bceedf2e0c238ff19891cd39e64fc4" exitCode=0 Jan 20 16:57:18 crc kubenswrapper[4558]: I0120 16:57:18.969387 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-58b8ddd7c-rr7mz" event={"ID":"d9be5f16-16de-4ae6-9007-cca3e80501b9","Type":"ContainerDied","Data":"7477443fea20b5ececf459b62e43d6fa92bceedf2e0c238ff19891cd39e64fc4"} Jan 20 16:57:18 crc kubenswrapper[4558]: I0120 16:57:18.969410 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-58b8ddd7c-rr7mz" event={"ID":"d9be5f16-16de-4ae6-9007-cca3e80501b9","Type":"ContainerStarted","Data":"393c240e307e6f1c510b640fb0e60d5df3df97c49287fbfe0533b6fedc013ca9"} Jan 20 16:57:18 crc kubenswrapper[4558]: I0120 16:57:18.998094 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-f537-account-create-update-5crz6" event={"ID":"e0c2bc6c-ea80-49a0-876f-f0a4d29c12bd","Type":"ContainerStarted","Data":"7405f2673cf5b03bf250ba407fb04abd2bfacbb3ce9e9f78626cf77ca6e5282f"} Jan 20 16:57:18 crc kubenswrapper[4558]: I0120 16:57:18.998137 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-f537-account-create-update-5crz6" event={"ID":"e0c2bc6c-ea80-49a0-876f-f0a4d29c12bd","Type":"ContainerStarted","Data":"8ddba1e78843209db2f234831f4f16164a18c124a044005e0f3176b45c507a63"} Jan 20 16:57:19 crc kubenswrapper[4558]: I0120 16:57:19.012475 4558 generic.go:334] "Generic (PLEG): container finished" podID="22adc061-9ded-4378-abcc-1e86a3985839" containerID="ce1f05df89b8a08d15ce18251031109452f2c5fc73b5986865a48c1a2ee95f36" exitCode=0 Jan 20 16:57:19 crc kubenswrapper[4558]: I0120 16:57:19.012556 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-db-create-6cd95" event={"ID":"22adc061-9ded-4378-abcc-1e86a3985839","Type":"ContainerDied","Data":"ce1f05df89b8a08d15ce18251031109452f2c5fc73b5986865a48c1a2ee95f36"} Jan 20 16:57:19 crc kubenswrapper[4558]: I0120 16:57:19.012581 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-db-create-6cd95" event={"ID":"22adc061-9ded-4378-abcc-1e86a3985839","Type":"ContainerStarted","Data":"49f7b3fd4a67b64e1697771f156d606f917d8465c17bce958776e2b6d4a2930b"} Jan 20 16:57:19 crc kubenswrapper[4558]: I0120 16:57:19.014136 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/barbican-db-create-w6fpd" podStartSLOduration=2.014121235 podStartE2EDuration="2.014121235s" podCreationTimestamp="2026-01-20 16:57:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 16:57:19.012828824 +0000 UTC m=+932.773166792" watchObservedRunningTime="2026-01-20 16:57:19.014121235 +0000 UTC m=+932.774459203" Jan 20 16:57:19 crc kubenswrapper[4558]: I0120 16:57:19.049226 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/cinder-f537-account-create-update-5crz6" podStartSLOduration=2.049212073 podStartE2EDuration="2.049212073s" podCreationTimestamp="2026-01-20 16:57:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 16:57:19.04821094 +0000 UTC m=+932.808548907" watchObservedRunningTime="2026-01-20 16:57:19.049212073 +0000 UTC m=+932.809550040" Jan 20 16:57:19 crc kubenswrapper[4558]: I0120 16:57:19.061590 4558 generic.go:334] "Generic (PLEG): container finished" podID="78267dd1-57b0-4d3b-9578-42989b2364f5" containerID="64aa6b60f7b0c5125431161cac6c581a9f41d676d84ade2ff99a1a3a675693ab" exitCode=0 Jan 20 16:57:19 crc kubenswrapper[4558]: I0120 16:57:19.061672 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jlgl4" event={"ID":"78267dd1-57b0-4d3b-9578-42989b2364f5","Type":"ContainerDied","Data":"64aa6b60f7b0c5125431161cac6c581a9f41d676d84ade2ff99a1a3a675693ab"} Jan 20 16:57:19 crc kubenswrapper[4558]: I0120 16:57:19.099369 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-eb81-account-create-update-sr6r9" event={"ID":"cdd71514-e929-4a96-b799-91eecad5ac02","Type":"ContainerStarted","Data":"329b7d530c48ee6b77abaf4d3f063c42bc1f88503f6401038f6906a9d41fa856"} Jan 20 16:57:19 crc kubenswrapper[4558]: I0120 16:57:19.099411 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-eb81-account-create-update-sr6r9" event={"ID":"cdd71514-e929-4a96-b799-91eecad5ac02","Type":"ContainerStarted","Data":"70bb092ba585f08e467b5d341469444954403d77222f504b678d478920e619d3"} Jan 20 16:57:19 crc kubenswrapper[4558]: I0120 16:57:19.138492 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/barbican-eb81-account-create-update-sr6r9" podStartSLOduration=2.13847543 podStartE2EDuration="2.13847543s" podCreationTimestamp="2026-01-20 16:57:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 16:57:19.129327938 +0000 UTC m=+932.889665905" watchObservedRunningTime="2026-01-20 16:57:19.13847543 +0000 UTC m=+932.898813396" Jan 20 16:57:19 crc kubenswrapper[4558]: I0120 16:57:19.241387 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-db-create-9zm4f"] Jan 20 16:57:19 crc kubenswrapper[4558]: I0120 16:57:19.247990 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-a57a-account-create-update-76pmk"] Jan 20 16:57:19 crc kubenswrapper[4558]: I0120 16:57:19.252828 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-db-sync-wpq8b"] Jan 20 16:57:19 crc kubenswrapper[4558]: W0120 16:57:19.298360 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf4589cf0_10b5_4bd9_a06d_58481ece189a.slice/crio-aaa598a3ed80584a968977328a5c696efece7c98f340cda4a41913d370d08745 WatchSource:0}: Error finding container aaa598a3ed80584a968977328a5c696efece7c98f340cda4a41913d370d08745: Status 404 returned error can't find the container with id aaa598a3ed80584a968977328a5c696efece7c98f340cda4a41913d370d08745 Jan 20 16:57:19 crc kubenswrapper[4558]: W0120 16:57:19.328855 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcde9092f_c62d_406c_b800_87c38b56402b.slice/crio-1fcc92b5b23a9573e011ec7fffcf1dd2ece5f714962d3c82d55acf6976a11e6d WatchSource:0}: Error finding container 1fcc92b5b23a9573e011ec7fffcf1dd2ece5f714962d3c82d55acf6976a11e6d: Status 404 returned error can't find the container with id 1fcc92b5b23a9573e011ec7fffcf1dd2ece5f714962d3c82d55acf6976a11e6d Jan 20 16:57:19 crc kubenswrapper[4558]: I0120 16:57:19.332701 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-jlgl4" Jan 20 16:57:19 crc kubenswrapper[4558]: W0120 16:57:19.334749 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod567ff26d_1c6e_4117_869d_3bfcb1a705fc.slice/crio-a6254fffb383e0d014a2ea0224050a5fb1ef7d0f14c0519b3febf944c9e4f747 WatchSource:0}: Error finding container a6254fffb383e0d014a2ea0224050a5fb1ef7d0f14c0519b3febf944c9e4f747: Status 404 returned error can't find the container with id a6254fffb383e0d014a2ea0224050a5fb1ef7d0f14c0519b3febf944c9e4f747 Jan 20 16:57:19 crc kubenswrapper[4558]: I0120 16:57:19.350704 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/78267dd1-57b0-4d3b-9578-42989b2364f5-catalog-content\") pod \"78267dd1-57b0-4d3b-9578-42989b2364f5\" (UID: \"78267dd1-57b0-4d3b-9578-42989b2364f5\") " Jan 20 16:57:19 crc kubenswrapper[4558]: I0120 16:57:19.350791 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j9xwr\" (UniqueName: \"kubernetes.io/projected/78267dd1-57b0-4d3b-9578-42989b2364f5-kube-api-access-j9xwr\") pod \"78267dd1-57b0-4d3b-9578-42989b2364f5\" (UID: \"78267dd1-57b0-4d3b-9578-42989b2364f5\") " Jan 20 16:57:19 crc kubenswrapper[4558]: I0120 16:57:19.350809 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/78267dd1-57b0-4d3b-9578-42989b2364f5-utilities\") pod \"78267dd1-57b0-4d3b-9578-42989b2364f5\" (UID: \"78267dd1-57b0-4d3b-9578-42989b2364f5\") " Jan 20 16:57:19 crc kubenswrapper[4558]: I0120 16:57:19.351612 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/78267dd1-57b0-4d3b-9578-42989b2364f5-utilities" (OuterVolumeSpecName: "utilities") pod "78267dd1-57b0-4d3b-9578-42989b2364f5" (UID: "78267dd1-57b0-4d3b-9578-42989b2364f5"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 16:57:19 crc kubenswrapper[4558]: I0120 16:57:19.355522 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/78267dd1-57b0-4d3b-9578-42989b2364f5-kube-api-access-j9xwr" (OuterVolumeSpecName: "kube-api-access-j9xwr") pod "78267dd1-57b0-4d3b-9578-42989b2364f5" (UID: "78267dd1-57b0-4d3b-9578-42989b2364f5"). InnerVolumeSpecName "kube-api-access-j9xwr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:57:19 crc kubenswrapper[4558]: I0120 16:57:19.368470 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/78267dd1-57b0-4d3b-9578-42989b2364f5-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "78267dd1-57b0-4d3b-9578-42989b2364f5" (UID: "78267dd1-57b0-4d3b-9578-42989b2364f5"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 16:57:19 crc kubenswrapper[4558]: I0120 16:57:19.453139 4558 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/78267dd1-57b0-4d3b-9578-42989b2364f5-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 20 16:57:19 crc kubenswrapper[4558]: I0120 16:57:19.453343 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j9xwr\" (UniqueName: \"kubernetes.io/projected/78267dd1-57b0-4d3b-9578-42989b2364f5-kube-api-access-j9xwr\") on node \"crc\" DevicePath \"\"" Jan 20 16:57:19 crc kubenswrapper[4558]: I0120 16:57:19.453356 4558 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/78267dd1-57b0-4d3b-9578-42989b2364f5-utilities\") on node \"crc\" DevicePath \"\"" Jan 20 16:57:20 crc kubenswrapper[4558]: I0120 16:57:20.107936 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-58b8ddd7c-rr7mz" event={"ID":"d9be5f16-16de-4ae6-9007-cca3e80501b9","Type":"ContainerStarted","Data":"45b2a75ffe97418a0ca920c583c6b2e02b1fd292c83ac512e2a46d1e00cba17b"} Jan 20 16:57:20 crc kubenswrapper[4558]: I0120 16:57:20.108071 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-58b8ddd7c-rr7mz" Jan 20 16:57:20 crc kubenswrapper[4558]: I0120 16:57:20.111751 4558 generic.go:334] "Generic (PLEG): container finished" podID="e0c2bc6c-ea80-49a0-876f-f0a4d29c12bd" containerID="7405f2673cf5b03bf250ba407fb04abd2bfacbb3ce9e9f78626cf77ca6e5282f" exitCode=0 Jan 20 16:57:20 crc kubenswrapper[4558]: I0120 16:57:20.111785 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-f537-account-create-update-5crz6" event={"ID":"e0c2bc6c-ea80-49a0-876f-f0a4d29c12bd","Type":"ContainerDied","Data":"7405f2673cf5b03bf250ba407fb04abd2bfacbb3ce9e9f78626cf77ca6e5282f"} Jan 20 16:57:20 crc kubenswrapper[4558]: I0120 16:57:20.114518 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jlgl4" event={"ID":"78267dd1-57b0-4d3b-9578-42989b2364f5","Type":"ContainerDied","Data":"5468352d1af212e5329314f98e5ab6d4b0a17092709b2773cdf084c716ff6632"} Jan 20 16:57:20 crc kubenswrapper[4558]: I0120 16:57:20.114559 4558 scope.go:117] "RemoveContainer" containerID="64aa6b60f7b0c5125431161cac6c581a9f41d676d84ade2ff99a1a3a675693ab" Jan 20 16:57:20 crc kubenswrapper[4558]: I0120 16:57:20.114567 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-jlgl4" Jan 20 16:57:20 crc kubenswrapper[4558]: I0120 16:57:20.117478 4558 generic.go:334] "Generic (PLEG): container finished" podID="cdd71514-e929-4a96-b799-91eecad5ac02" containerID="329b7d530c48ee6b77abaf4d3f063c42bc1f88503f6401038f6906a9d41fa856" exitCode=0 Jan 20 16:57:20 crc kubenswrapper[4558]: I0120 16:57:20.117544 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-eb81-account-create-update-sr6r9" event={"ID":"cdd71514-e929-4a96-b799-91eecad5ac02","Type":"ContainerDied","Data":"329b7d530c48ee6b77abaf4d3f063c42bc1f88503f6401038f6906a9d41fa856"} Jan 20 16:57:20 crc kubenswrapper[4558]: I0120 16:57:20.136082 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-58b8ddd7c-rr7mz" podStartSLOduration=3.136068237 podStartE2EDuration="3.136068237s" podCreationTimestamp="2026-01-20 16:57:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 16:57:20.1268164 +0000 UTC m=+933.887154367" watchObservedRunningTime="2026-01-20 16:57:20.136068237 +0000 UTC m=+933.896406244" Jan 20 16:57:20 crc kubenswrapper[4558]: I0120 16:57:20.136318 4558 generic.go:334] "Generic (PLEG): container finished" podID="7b3471c1-c42b-4be4-96fe-7e43ef86ba15" containerID="6b4dcc6e19550c83acc36231d55fd104b76cb31fdde78c00b5a97e00712e1b7b" exitCode=0 Jan 20 16:57:20 crc kubenswrapper[4558]: I0120 16:57:20.136389 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-db-create-w6fpd" event={"ID":"7b3471c1-c42b-4be4-96fe-7e43ef86ba15","Type":"ContainerDied","Data":"6b4dcc6e19550c83acc36231d55fd104b76cb31fdde78c00b5a97e00712e1b7b"} Jan 20 16:57:20 crc kubenswrapper[4558]: I0120 16:57:20.140881 4558 generic.go:334] "Generic (PLEG): container finished" podID="567ff26d-1c6e-4117-869d-3bfcb1a705fc" containerID="2a069dfdf3bb503b2cf66c545612acb06ac462ac0963d7e87cbba6a225f63a2b" exitCode=0 Jan 20 16:57:20 crc kubenswrapper[4558]: I0120 16:57:20.140958 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-db-create-9zm4f" event={"ID":"567ff26d-1c6e-4117-869d-3bfcb1a705fc","Type":"ContainerDied","Data":"2a069dfdf3bb503b2cf66c545612acb06ac462ac0963d7e87cbba6a225f63a2b"} Jan 20 16:57:20 crc kubenswrapper[4558]: I0120 16:57:20.141004 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-db-create-9zm4f" event={"ID":"567ff26d-1c6e-4117-869d-3bfcb1a705fc","Type":"ContainerStarted","Data":"a6254fffb383e0d014a2ea0224050a5fb1ef7d0f14c0519b3febf944c9e4f747"} Jan 20 16:57:20 crc kubenswrapper[4558]: I0120 16:57:20.142548 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-db-sync-wpq8b" event={"ID":"cde9092f-c62d-406c-b800-87c38b56402b","Type":"ContainerStarted","Data":"1fcc92b5b23a9573e011ec7fffcf1dd2ece5f714962d3c82d55acf6976a11e6d"} Jan 20 16:57:20 crc kubenswrapper[4558]: I0120 16:57:20.150423 4558 scope.go:117] "RemoveContainer" containerID="5b65529db92b6006a9d484f63f3393e3c121c0f6cce9eed81a2a72728d86052a" Jan 20 16:57:20 crc kubenswrapper[4558]: I0120 16:57:20.152847 4558 generic.go:334] "Generic (PLEG): container finished" podID="f4589cf0-10b5-4bd9-a06d-58481ece189a" containerID="ff98c6e0fa3a0b949cb3e04983d372048c98008841cfee160a4116cbb2002809" exitCode=0 Jan 20 16:57:20 crc kubenswrapper[4558]: I0120 16:57:20.153038 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-a57a-account-create-update-76pmk" event={"ID":"f4589cf0-10b5-4bd9-a06d-58481ece189a","Type":"ContainerDied","Data":"ff98c6e0fa3a0b949cb3e04983d372048c98008841cfee160a4116cbb2002809"} Jan 20 16:57:20 crc kubenswrapper[4558]: I0120 16:57:20.153063 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-a57a-account-create-update-76pmk" event={"ID":"f4589cf0-10b5-4bd9-a06d-58481ece189a","Type":"ContainerStarted","Data":"aaa598a3ed80584a968977328a5c696efece7c98f340cda4a41913d370d08745"} Jan 20 16:57:20 crc kubenswrapper[4558]: I0120 16:57:20.178930 4558 scope.go:117] "RemoveContainer" containerID="79f9ce2e887d794fcfd32a289cc5a8fc46ee6b948a8ccea14d8be3682f1f741d" Jan 20 16:57:20 crc kubenswrapper[4558]: I0120 16:57:20.211643 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-jlgl4"] Jan 20 16:57:20 crc kubenswrapper[4558]: I0120 16:57:20.216749 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-jlgl4"] Jan 20 16:57:20 crc kubenswrapper[4558]: I0120 16:57:20.428395 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-2tv5d" Jan 20 16:57:20 crc kubenswrapper[4558]: I0120 16:57:20.428432 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-2tv5d" Jan 20 16:57:20 crc kubenswrapper[4558]: I0120 16:57:20.450919 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-db-create-6cd95" Jan 20 16:57:20 crc kubenswrapper[4558]: I0120 16:57:20.467598 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/22adc061-9ded-4378-abcc-1e86a3985839-operator-scripts\") pod \"22adc061-9ded-4378-abcc-1e86a3985839\" (UID: \"22adc061-9ded-4378-abcc-1e86a3985839\") " Jan 20 16:57:20 crc kubenswrapper[4558]: I0120 16:57:20.467663 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wlbph\" (UniqueName: \"kubernetes.io/projected/22adc061-9ded-4378-abcc-1e86a3985839-kube-api-access-wlbph\") pod \"22adc061-9ded-4378-abcc-1e86a3985839\" (UID: \"22adc061-9ded-4378-abcc-1e86a3985839\") " Jan 20 16:57:20 crc kubenswrapper[4558]: I0120 16:57:20.468070 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22adc061-9ded-4378-abcc-1e86a3985839-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "22adc061-9ded-4378-abcc-1e86a3985839" (UID: "22adc061-9ded-4378-abcc-1e86a3985839"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 16:57:20 crc kubenswrapper[4558]: I0120 16:57:20.469492 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-2tv5d" Jan 20 16:57:20 crc kubenswrapper[4558]: I0120 16:57:20.473326 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22adc061-9ded-4378-abcc-1e86a3985839-kube-api-access-wlbph" (OuterVolumeSpecName: "kube-api-access-wlbph") pod "22adc061-9ded-4378-abcc-1e86a3985839" (UID: "22adc061-9ded-4378-abcc-1e86a3985839"). InnerVolumeSpecName "kube-api-access-wlbph". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:57:20 crc kubenswrapper[4558]: I0120 16:57:20.569637 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/22adc061-9ded-4378-abcc-1e86a3985839-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 16:57:20 crc kubenswrapper[4558]: I0120 16:57:20.569661 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wlbph\" (UniqueName: \"kubernetes.io/projected/22adc061-9ded-4378-abcc-1e86a3985839-kube-api-access-wlbph\") on node \"crc\" DevicePath \"\"" Jan 20 16:57:20 crc kubenswrapper[4558]: I0120 16:57:20.573788 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="78267dd1-57b0-4d3b-9578-42989b2364f5" path="/var/lib/kubelet/pods/78267dd1-57b0-4d3b-9578-42989b2364f5/volumes" Jan 20 16:57:21 crc kubenswrapper[4558]: I0120 16:57:21.162144 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-db-create-6cd95" event={"ID":"22adc061-9ded-4378-abcc-1e86a3985839","Type":"ContainerDied","Data":"49f7b3fd4a67b64e1697771f156d606f917d8465c17bce958776e2b6d4a2930b"} Jan 20 16:57:21 crc kubenswrapper[4558]: I0120 16:57:21.162395 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="49f7b3fd4a67b64e1697771f156d606f917d8465c17bce958776e2b6d4a2930b" Jan 20 16:57:21 crc kubenswrapper[4558]: I0120 16:57:21.162214 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-db-create-6cd95" Jan 20 16:57:21 crc kubenswrapper[4558]: I0120 16:57:21.207242 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-2tv5d" Jan 20 16:57:21 crc kubenswrapper[4558]: I0120 16:57:21.284113 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-4df59" Jan 20 16:57:21 crc kubenswrapper[4558]: I0120 16:57:21.284758 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-4df59" Jan 20 16:57:21 crc kubenswrapper[4558]: I0120 16:57:21.322357 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-4df59" Jan 20 16:57:22 crc kubenswrapper[4558]: I0120 16:57:22.199559 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-4df59" Jan 20 16:57:22 crc kubenswrapper[4558]: I0120 16:57:22.763688 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-2tv5d"] Jan 20 16:57:23 crc kubenswrapper[4558]: I0120 16:57:23.183034 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-2tv5d" podUID="0d7a49b7-4cfe-4d63-a510-803f562eb3f6" containerName="registry-server" containerID="cri-o://797b7b3eb600e707232f7a5510095efe322a2487ba727866d42d2933b005e7e4" gracePeriod=2 Jan 20 16:57:23 crc kubenswrapper[4558]: I0120 16:57:23.234119 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/root-account-create-update-hmgt7"] Jan 20 16:57:23 crc kubenswrapper[4558]: E0120 16:57:23.234627 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="78267dd1-57b0-4d3b-9578-42989b2364f5" containerName="extract-utilities" Jan 20 16:57:23 crc kubenswrapper[4558]: I0120 16:57:23.234640 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="78267dd1-57b0-4d3b-9578-42989b2364f5" containerName="extract-utilities" Jan 20 16:57:23 crc kubenswrapper[4558]: E0120 16:57:23.234652 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="22adc061-9ded-4378-abcc-1e86a3985839" containerName="mariadb-database-create" Jan 20 16:57:23 crc kubenswrapper[4558]: I0120 16:57:23.234657 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="22adc061-9ded-4378-abcc-1e86a3985839" containerName="mariadb-database-create" Jan 20 16:57:23 crc kubenswrapper[4558]: E0120 16:57:23.234673 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="78267dd1-57b0-4d3b-9578-42989b2364f5" containerName="registry-server" Jan 20 16:57:23 crc kubenswrapper[4558]: I0120 16:57:23.234679 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="78267dd1-57b0-4d3b-9578-42989b2364f5" containerName="registry-server" Jan 20 16:57:23 crc kubenswrapper[4558]: E0120 16:57:23.234692 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="78267dd1-57b0-4d3b-9578-42989b2364f5" containerName="extract-content" Jan 20 16:57:23 crc kubenswrapper[4558]: I0120 16:57:23.234698 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="78267dd1-57b0-4d3b-9578-42989b2364f5" containerName="extract-content" Jan 20 16:57:23 crc kubenswrapper[4558]: I0120 16:57:23.234876 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="22adc061-9ded-4378-abcc-1e86a3985839" containerName="mariadb-database-create" Jan 20 16:57:23 crc kubenswrapper[4558]: I0120 16:57:23.234893 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="78267dd1-57b0-4d3b-9578-42989b2364f5" containerName="registry-server" Jan 20 16:57:23 crc kubenswrapper[4558]: I0120 16:57:23.235442 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-hmgt7" Jan 20 16:57:23 crc kubenswrapper[4558]: I0120 16:57:23.236902 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"openstack-cell1-mariadb-root-db-secret" Jan 20 16:57:23 crc kubenswrapper[4558]: I0120 16:57:23.244560 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-hmgt7"] Jan 20 16:57:23 crc kubenswrapper[4558]: I0120 16:57:23.330021 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t8lxh\" (UniqueName: \"kubernetes.io/projected/0cd03d7d-cae3-4c24-90db-5f8598078f52-kube-api-access-t8lxh\") pod \"root-account-create-update-hmgt7\" (UID: \"0cd03d7d-cae3-4c24-90db-5f8598078f52\") " pod="openstack-kuttl-tests/root-account-create-update-hmgt7" Jan 20 16:57:23 crc kubenswrapper[4558]: I0120 16:57:23.330106 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0cd03d7d-cae3-4c24-90db-5f8598078f52-operator-scripts\") pod \"root-account-create-update-hmgt7\" (UID: \"0cd03d7d-cae3-4c24-90db-5f8598078f52\") " pod="openstack-kuttl-tests/root-account-create-update-hmgt7" Jan 20 16:57:23 crc kubenswrapper[4558]: I0120 16:57:23.432021 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0cd03d7d-cae3-4c24-90db-5f8598078f52-operator-scripts\") pod \"root-account-create-update-hmgt7\" (UID: \"0cd03d7d-cae3-4c24-90db-5f8598078f52\") " pod="openstack-kuttl-tests/root-account-create-update-hmgt7" Jan 20 16:57:23 crc kubenswrapper[4558]: I0120 16:57:23.432110 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t8lxh\" (UniqueName: \"kubernetes.io/projected/0cd03d7d-cae3-4c24-90db-5f8598078f52-kube-api-access-t8lxh\") pod \"root-account-create-update-hmgt7\" (UID: \"0cd03d7d-cae3-4c24-90db-5f8598078f52\") " pod="openstack-kuttl-tests/root-account-create-update-hmgt7" Jan 20 16:57:23 crc kubenswrapper[4558]: I0120 16:57:23.432781 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0cd03d7d-cae3-4c24-90db-5f8598078f52-operator-scripts\") pod \"root-account-create-update-hmgt7\" (UID: \"0cd03d7d-cae3-4c24-90db-5f8598078f52\") " pod="openstack-kuttl-tests/root-account-create-update-hmgt7" Jan 20 16:57:23 crc kubenswrapper[4558]: I0120 16:57:23.458302 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t8lxh\" (UniqueName: \"kubernetes.io/projected/0cd03d7d-cae3-4c24-90db-5f8598078f52-kube-api-access-t8lxh\") pod \"root-account-create-update-hmgt7\" (UID: \"0cd03d7d-cae3-4c24-90db-5f8598078f52\") " pod="openstack-kuttl-tests/root-account-create-update-hmgt7" Jan 20 16:57:23 crc kubenswrapper[4558]: I0120 16:57:23.590964 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-hmgt7" Jan 20 16:57:23 crc kubenswrapper[4558]: I0120 16:57:23.763538 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-4df59"] Jan 20 16:57:24 crc kubenswrapper[4558]: I0120 16:57:24.190916 4558 generic.go:334] "Generic (PLEG): container finished" podID="0d7a49b7-4cfe-4d63-a510-803f562eb3f6" containerID="797b7b3eb600e707232f7a5510095efe322a2487ba727866d42d2933b005e7e4" exitCode=0 Jan 20 16:57:24 crc kubenswrapper[4558]: I0120 16:57:24.191006 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2tv5d" event={"ID":"0d7a49b7-4cfe-4d63-a510-803f562eb3f6","Type":"ContainerDied","Data":"797b7b3eb600e707232f7a5510095efe322a2487ba727866d42d2933b005e7e4"} Jan 20 16:57:25 crc kubenswrapper[4558]: I0120 16:57:25.196010 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-4df59" podUID="32e4598f-f0a7-4d4f-a04f-e37171e73905" containerName="registry-server" containerID="cri-o://025dfd613ceca62d521214b8cf184ed58f632222b81d934ad8642198b62b45a2" gracePeriod=2 Jan 20 16:57:26 crc kubenswrapper[4558]: I0120 16:57:26.204303 4558 generic.go:334] "Generic (PLEG): container finished" podID="32e4598f-f0a7-4d4f-a04f-e37171e73905" containerID="025dfd613ceca62d521214b8cf184ed58f632222b81d934ad8642198b62b45a2" exitCode=0 Jan 20 16:57:26 crc kubenswrapper[4558]: I0120 16:57:26.204322 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4df59" event={"ID":"32e4598f-f0a7-4d4f-a04f-e37171e73905","Type":"ContainerDied","Data":"025dfd613ceca62d521214b8cf184ed58f632222b81d934ad8642198b62b45a2"} Jan 20 16:57:26 crc kubenswrapper[4558]: I0120 16:57:26.648634 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-eb81-account-create-update-sr6r9" Jan 20 16:57:26 crc kubenswrapper[4558]: I0120 16:57:26.650119 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-db-create-9zm4f" Jan 20 16:57:26 crc kubenswrapper[4558]: I0120 16:57:26.654011 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-db-create-w6fpd" Jan 20 16:57:26 crc kubenswrapper[4558]: I0120 16:57:26.661656 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-f537-account-create-update-5crz6" Jan 20 16:57:26 crc kubenswrapper[4558]: I0120 16:57:26.670093 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cdd71514-e929-4a96-b799-91eecad5ac02-operator-scripts\") pod \"cdd71514-e929-4a96-b799-91eecad5ac02\" (UID: \"cdd71514-e929-4a96-b799-91eecad5ac02\") " Jan 20 16:57:26 crc kubenswrapper[4558]: I0120 16:57:26.670128 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/567ff26d-1c6e-4117-869d-3bfcb1a705fc-operator-scripts\") pod \"567ff26d-1c6e-4117-869d-3bfcb1a705fc\" (UID: \"567ff26d-1c6e-4117-869d-3bfcb1a705fc\") " Jan 20 16:57:26 crc kubenswrapper[4558]: I0120 16:57:26.670242 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7b3471c1-c42b-4be4-96fe-7e43ef86ba15-operator-scripts\") pod \"7b3471c1-c42b-4be4-96fe-7e43ef86ba15\" (UID: \"7b3471c1-c42b-4be4-96fe-7e43ef86ba15\") " Jan 20 16:57:26 crc kubenswrapper[4558]: I0120 16:57:26.670288 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bggx2\" (UniqueName: \"kubernetes.io/projected/567ff26d-1c6e-4117-869d-3bfcb1a705fc-kube-api-access-bggx2\") pod \"567ff26d-1c6e-4117-869d-3bfcb1a705fc\" (UID: \"567ff26d-1c6e-4117-869d-3bfcb1a705fc\") " Jan 20 16:57:26 crc kubenswrapper[4558]: I0120 16:57:26.670307 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-825dr\" (UniqueName: \"kubernetes.io/projected/7b3471c1-c42b-4be4-96fe-7e43ef86ba15-kube-api-access-825dr\") pod \"7b3471c1-c42b-4be4-96fe-7e43ef86ba15\" (UID: \"7b3471c1-c42b-4be4-96fe-7e43ef86ba15\") " Jan 20 16:57:26 crc kubenswrapper[4558]: I0120 16:57:26.670338 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s62q7\" (UniqueName: \"kubernetes.io/projected/cdd71514-e929-4a96-b799-91eecad5ac02-kube-api-access-s62q7\") pod \"cdd71514-e929-4a96-b799-91eecad5ac02\" (UID: \"cdd71514-e929-4a96-b799-91eecad5ac02\") " Jan 20 16:57:26 crc kubenswrapper[4558]: I0120 16:57:26.671620 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cdd71514-e929-4a96-b799-91eecad5ac02-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "cdd71514-e929-4a96-b799-91eecad5ac02" (UID: "cdd71514-e929-4a96-b799-91eecad5ac02"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 16:57:26 crc kubenswrapper[4558]: I0120 16:57:26.671964 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/567ff26d-1c6e-4117-869d-3bfcb1a705fc-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "567ff26d-1c6e-4117-869d-3bfcb1a705fc" (UID: "567ff26d-1c6e-4117-869d-3bfcb1a705fc"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 16:57:26 crc kubenswrapper[4558]: I0120 16:57:26.672318 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7b3471c1-c42b-4be4-96fe-7e43ef86ba15-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "7b3471c1-c42b-4be4-96fe-7e43ef86ba15" (UID: "7b3471c1-c42b-4be4-96fe-7e43ef86ba15"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 16:57:26 crc kubenswrapper[4558]: I0120 16:57:26.685464 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/567ff26d-1c6e-4117-869d-3bfcb1a705fc-kube-api-access-bggx2" (OuterVolumeSpecName: "kube-api-access-bggx2") pod "567ff26d-1c6e-4117-869d-3bfcb1a705fc" (UID: "567ff26d-1c6e-4117-869d-3bfcb1a705fc"). InnerVolumeSpecName "kube-api-access-bggx2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:57:26 crc kubenswrapper[4558]: I0120 16:57:26.688721 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cdd71514-e929-4a96-b799-91eecad5ac02-kube-api-access-s62q7" (OuterVolumeSpecName: "kube-api-access-s62q7") pod "cdd71514-e929-4a96-b799-91eecad5ac02" (UID: "cdd71514-e929-4a96-b799-91eecad5ac02"). InnerVolumeSpecName "kube-api-access-s62q7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:57:26 crc kubenswrapper[4558]: I0120 16:57:26.693706 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7b3471c1-c42b-4be4-96fe-7e43ef86ba15-kube-api-access-825dr" (OuterVolumeSpecName: "kube-api-access-825dr") pod "7b3471c1-c42b-4be4-96fe-7e43ef86ba15" (UID: "7b3471c1-c42b-4be4-96fe-7e43ef86ba15"). InnerVolumeSpecName "kube-api-access-825dr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:57:26 crc kubenswrapper[4558]: I0120 16:57:26.699976 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-a57a-account-create-update-76pmk" Jan 20 16:57:26 crc kubenswrapper[4558]: I0120 16:57:26.772253 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f4589cf0-10b5-4bd9-a06d-58481ece189a-operator-scripts\") pod \"f4589cf0-10b5-4bd9-a06d-58481ece189a\" (UID: \"f4589cf0-10b5-4bd9-a06d-58481ece189a\") " Jan 20 16:57:26 crc kubenswrapper[4558]: I0120 16:57:26.772343 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e0c2bc6c-ea80-49a0-876f-f0a4d29c12bd-operator-scripts\") pod \"e0c2bc6c-ea80-49a0-876f-f0a4d29c12bd\" (UID: \"e0c2bc6c-ea80-49a0-876f-f0a4d29c12bd\") " Jan 20 16:57:26 crc kubenswrapper[4558]: I0120 16:57:26.772424 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sr4kn\" (UniqueName: \"kubernetes.io/projected/f4589cf0-10b5-4bd9-a06d-58481ece189a-kube-api-access-sr4kn\") pod \"f4589cf0-10b5-4bd9-a06d-58481ece189a\" (UID: \"f4589cf0-10b5-4bd9-a06d-58481ece189a\") " Jan 20 16:57:26 crc kubenswrapper[4558]: I0120 16:57:26.772463 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jd9hj\" (UniqueName: \"kubernetes.io/projected/e0c2bc6c-ea80-49a0-876f-f0a4d29c12bd-kube-api-access-jd9hj\") pod \"e0c2bc6c-ea80-49a0-876f-f0a4d29c12bd\" (UID: \"e0c2bc6c-ea80-49a0-876f-f0a4d29c12bd\") " Jan 20 16:57:26 crc kubenswrapper[4558]: I0120 16:57:26.772868 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7b3471c1-c42b-4be4-96fe-7e43ef86ba15-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 16:57:26 crc kubenswrapper[4558]: I0120 16:57:26.772881 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bggx2\" (UniqueName: \"kubernetes.io/projected/567ff26d-1c6e-4117-869d-3bfcb1a705fc-kube-api-access-bggx2\") on node \"crc\" DevicePath \"\"" Jan 20 16:57:26 crc kubenswrapper[4558]: I0120 16:57:26.772934 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-825dr\" (UniqueName: \"kubernetes.io/projected/7b3471c1-c42b-4be4-96fe-7e43ef86ba15-kube-api-access-825dr\") on node \"crc\" DevicePath \"\"" Jan 20 16:57:26 crc kubenswrapper[4558]: I0120 16:57:26.772943 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s62q7\" (UniqueName: \"kubernetes.io/projected/cdd71514-e929-4a96-b799-91eecad5ac02-kube-api-access-s62q7\") on node \"crc\" DevicePath \"\"" Jan 20 16:57:26 crc kubenswrapper[4558]: I0120 16:57:26.772951 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cdd71514-e929-4a96-b799-91eecad5ac02-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 16:57:26 crc kubenswrapper[4558]: I0120 16:57:26.772962 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/567ff26d-1c6e-4117-869d-3bfcb1a705fc-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 16:57:26 crc kubenswrapper[4558]: I0120 16:57:26.774326 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f4589cf0-10b5-4bd9-a06d-58481ece189a-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "f4589cf0-10b5-4bd9-a06d-58481ece189a" (UID: "f4589cf0-10b5-4bd9-a06d-58481ece189a"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 16:57:26 crc kubenswrapper[4558]: I0120 16:57:26.774458 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e0c2bc6c-ea80-49a0-876f-f0a4d29c12bd-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "e0c2bc6c-ea80-49a0-876f-f0a4d29c12bd" (UID: "e0c2bc6c-ea80-49a0-876f-f0a4d29c12bd"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 16:57:26 crc kubenswrapper[4558]: I0120 16:57:26.779420 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f4589cf0-10b5-4bd9-a06d-58481ece189a-kube-api-access-sr4kn" (OuterVolumeSpecName: "kube-api-access-sr4kn") pod "f4589cf0-10b5-4bd9-a06d-58481ece189a" (UID: "f4589cf0-10b5-4bd9-a06d-58481ece189a"). InnerVolumeSpecName "kube-api-access-sr4kn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:57:26 crc kubenswrapper[4558]: I0120 16:57:26.779596 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e0c2bc6c-ea80-49a0-876f-f0a4d29c12bd-kube-api-access-jd9hj" (OuterVolumeSpecName: "kube-api-access-jd9hj") pod "e0c2bc6c-ea80-49a0-876f-f0a4d29c12bd" (UID: "e0c2bc6c-ea80-49a0-876f-f0a4d29c12bd"). InnerVolumeSpecName "kube-api-access-jd9hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:57:26 crc kubenswrapper[4558]: I0120 16:57:26.873503 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f4589cf0-10b5-4bd9-a06d-58481ece189a-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 16:57:26 crc kubenswrapper[4558]: I0120 16:57:26.873532 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e0c2bc6c-ea80-49a0-876f-f0a4d29c12bd-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 16:57:26 crc kubenswrapper[4558]: I0120 16:57:26.873543 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sr4kn\" (UniqueName: \"kubernetes.io/projected/f4589cf0-10b5-4bd9-a06d-58481ece189a-kube-api-access-sr4kn\") on node \"crc\" DevicePath \"\"" Jan 20 16:57:26 crc kubenswrapper[4558]: I0120 16:57:26.873553 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jd9hj\" (UniqueName: \"kubernetes.io/projected/e0c2bc6c-ea80-49a0-876f-f0a4d29c12bd-kube-api-access-jd9hj\") on node \"crc\" DevicePath \"\"" Jan 20 16:57:26 crc kubenswrapper[4558]: I0120 16:57:26.900249 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4df59" Jan 20 16:57:26 crc kubenswrapper[4558]: I0120 16:57:26.904857 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2tv5d" Jan 20 16:57:26 crc kubenswrapper[4558]: I0120 16:57:26.974691 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t4bfd\" (UniqueName: \"kubernetes.io/projected/32e4598f-f0a7-4d4f-a04f-e37171e73905-kube-api-access-t4bfd\") pod \"32e4598f-f0a7-4d4f-a04f-e37171e73905\" (UID: \"32e4598f-f0a7-4d4f-a04f-e37171e73905\") " Jan 20 16:57:26 crc kubenswrapper[4558]: I0120 16:57:26.974751 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0d7a49b7-4cfe-4d63-a510-803f562eb3f6-catalog-content\") pod \"0d7a49b7-4cfe-4d63-a510-803f562eb3f6\" (UID: \"0d7a49b7-4cfe-4d63-a510-803f562eb3f6\") " Jan 20 16:57:26 crc kubenswrapper[4558]: I0120 16:57:26.974842 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/32e4598f-f0a7-4d4f-a04f-e37171e73905-catalog-content\") pod \"32e4598f-f0a7-4d4f-a04f-e37171e73905\" (UID: \"32e4598f-f0a7-4d4f-a04f-e37171e73905\") " Jan 20 16:57:26 crc kubenswrapper[4558]: I0120 16:57:26.974867 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0d7a49b7-4cfe-4d63-a510-803f562eb3f6-utilities\") pod \"0d7a49b7-4cfe-4d63-a510-803f562eb3f6\" (UID: \"0d7a49b7-4cfe-4d63-a510-803f562eb3f6\") " Jan 20 16:57:26 crc kubenswrapper[4558]: I0120 16:57:26.974904 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bj4tj\" (UniqueName: \"kubernetes.io/projected/0d7a49b7-4cfe-4d63-a510-803f562eb3f6-kube-api-access-bj4tj\") pod \"0d7a49b7-4cfe-4d63-a510-803f562eb3f6\" (UID: \"0d7a49b7-4cfe-4d63-a510-803f562eb3f6\") " Jan 20 16:57:26 crc kubenswrapper[4558]: I0120 16:57:26.974926 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/32e4598f-f0a7-4d4f-a04f-e37171e73905-utilities\") pod \"32e4598f-f0a7-4d4f-a04f-e37171e73905\" (UID: \"32e4598f-f0a7-4d4f-a04f-e37171e73905\") " Jan 20 16:57:26 crc kubenswrapper[4558]: I0120 16:57:26.975821 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/32e4598f-f0a7-4d4f-a04f-e37171e73905-utilities" (OuterVolumeSpecName: "utilities") pod "32e4598f-f0a7-4d4f-a04f-e37171e73905" (UID: "32e4598f-f0a7-4d4f-a04f-e37171e73905"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 16:57:26 crc kubenswrapper[4558]: I0120 16:57:26.975821 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0d7a49b7-4cfe-4d63-a510-803f562eb3f6-utilities" (OuterVolumeSpecName: "utilities") pod "0d7a49b7-4cfe-4d63-a510-803f562eb3f6" (UID: "0d7a49b7-4cfe-4d63-a510-803f562eb3f6"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 16:57:26 crc kubenswrapper[4558]: I0120 16:57:26.978795 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0d7a49b7-4cfe-4d63-a510-803f562eb3f6-kube-api-access-bj4tj" (OuterVolumeSpecName: "kube-api-access-bj4tj") pod "0d7a49b7-4cfe-4d63-a510-803f562eb3f6" (UID: "0d7a49b7-4cfe-4d63-a510-803f562eb3f6"). InnerVolumeSpecName "kube-api-access-bj4tj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:57:26 crc kubenswrapper[4558]: I0120 16:57:26.979762 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/32e4598f-f0a7-4d4f-a04f-e37171e73905-kube-api-access-t4bfd" (OuterVolumeSpecName: "kube-api-access-t4bfd") pod "32e4598f-f0a7-4d4f-a04f-e37171e73905" (UID: "32e4598f-f0a7-4d4f-a04f-e37171e73905"). InnerVolumeSpecName "kube-api-access-t4bfd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:57:27 crc kubenswrapper[4558]: I0120 16:57:27.023843 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0d7a49b7-4cfe-4d63-a510-803f562eb3f6-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0d7a49b7-4cfe-4d63-a510-803f562eb3f6" (UID: "0d7a49b7-4cfe-4d63-a510-803f562eb3f6"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 16:57:27 crc kubenswrapper[4558]: I0120 16:57:27.027700 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/32e4598f-f0a7-4d4f-a04f-e37171e73905-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "32e4598f-f0a7-4d4f-a04f-e37171e73905" (UID: "32e4598f-f0a7-4d4f-a04f-e37171e73905"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 16:57:27 crc kubenswrapper[4558]: I0120 16:57:27.076789 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t4bfd\" (UniqueName: \"kubernetes.io/projected/32e4598f-f0a7-4d4f-a04f-e37171e73905-kube-api-access-t4bfd\") on node \"crc\" DevicePath \"\"" Jan 20 16:57:27 crc kubenswrapper[4558]: I0120 16:57:27.076831 4558 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0d7a49b7-4cfe-4d63-a510-803f562eb3f6-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 20 16:57:27 crc kubenswrapper[4558]: I0120 16:57:27.076899 4558 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/32e4598f-f0a7-4d4f-a04f-e37171e73905-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 20 16:57:27 crc kubenswrapper[4558]: I0120 16:57:27.076910 4558 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0d7a49b7-4cfe-4d63-a510-803f562eb3f6-utilities\") on node \"crc\" DevicePath \"\"" Jan 20 16:57:27 crc kubenswrapper[4558]: I0120 16:57:27.077567 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bj4tj\" (UniqueName: \"kubernetes.io/projected/0d7a49b7-4cfe-4d63-a510-803f562eb3f6-kube-api-access-bj4tj\") on node \"crc\" DevicePath \"\"" Jan 20 16:57:27 crc kubenswrapper[4558]: I0120 16:57:27.077582 4558 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/32e4598f-f0a7-4d4f-a04f-e37171e73905-utilities\") on node \"crc\" DevicePath \"\"" Jan 20 16:57:27 crc kubenswrapper[4558]: I0120 16:57:27.093263 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-hmgt7"] Jan 20 16:57:27 crc kubenswrapper[4558]: W0120 16:57:27.098610 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0cd03d7d_cae3_4c24_90db_5f8598078f52.slice/crio-8820e290983958ac6bb1e9099eb3a9aa0751b9ccf61a00c9c7adf10611a82fce WatchSource:0}: Error finding container 8820e290983958ac6bb1e9099eb3a9aa0751b9ccf61a00c9c7adf10611a82fce: Status 404 returned error can't find the container with id 8820e290983958ac6bb1e9099eb3a9aa0751b9ccf61a00c9c7adf10611a82fce Jan 20 16:57:27 crc kubenswrapper[4558]: I0120 16:57:27.221319 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-db-create-9zm4f" event={"ID":"567ff26d-1c6e-4117-869d-3bfcb1a705fc","Type":"ContainerDied","Data":"a6254fffb383e0d014a2ea0224050a5fb1ef7d0f14c0519b3febf944c9e4f747"} Jan 20 16:57:27 crc kubenswrapper[4558]: I0120 16:57:27.221595 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a6254fffb383e0d014a2ea0224050a5fb1ef7d0f14c0519b3febf944c9e4f747" Jan 20 16:57:27 crc kubenswrapper[4558]: I0120 16:57:27.221325 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-db-create-9zm4f" Jan 20 16:57:27 crc kubenswrapper[4558]: I0120 16:57:27.222616 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-f537-account-create-update-5crz6" event={"ID":"e0c2bc6c-ea80-49a0-876f-f0a4d29c12bd","Type":"ContainerDied","Data":"8ddba1e78843209db2f234831f4f16164a18c124a044005e0f3176b45c507a63"} Jan 20 16:57:27 crc kubenswrapper[4558]: I0120 16:57:27.222643 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8ddba1e78843209db2f234831f4f16164a18c124a044005e0f3176b45c507a63" Jan 20 16:57:27 crc kubenswrapper[4558]: I0120 16:57:27.222689 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-f537-account-create-update-5crz6" Jan 20 16:57:27 crc kubenswrapper[4558]: I0120 16:57:27.229259 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2tv5d" event={"ID":"0d7a49b7-4cfe-4d63-a510-803f562eb3f6","Type":"ContainerDied","Data":"bf1f1e5996145a9dff7fb78312f3f1af9d466f08953a194f6d3081227a8ba368"} Jan 20 16:57:27 crc kubenswrapper[4558]: I0120 16:57:27.229307 4558 scope.go:117] "RemoveContainer" containerID="797b7b3eb600e707232f7a5510095efe322a2487ba727866d42d2933b005e7e4" Jan 20 16:57:27 crc kubenswrapper[4558]: I0120 16:57:27.229319 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2tv5d" Jan 20 16:57:27 crc kubenswrapper[4558]: I0120 16:57:27.231343 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-db-create-w6fpd" Jan 20 16:57:27 crc kubenswrapper[4558]: I0120 16:57:27.231372 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-db-create-w6fpd" event={"ID":"7b3471c1-c42b-4be4-96fe-7e43ef86ba15","Type":"ContainerDied","Data":"de3cc7b04b49083d959ad73d76a4acb12b6096e997b92079a3f2a19d49216c7d"} Jan 20 16:57:27 crc kubenswrapper[4558]: I0120 16:57:27.231390 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="de3cc7b04b49083d959ad73d76a4acb12b6096e997b92079a3f2a19d49216c7d" Jan 20 16:57:27 crc kubenswrapper[4558]: I0120 16:57:27.236043 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-sj9bb" event={"ID":"282a24f3-5eac-4fa8-8a40-6260b94e2164","Type":"ContainerStarted","Data":"7190074912a3226238aa3a0220a473a296c63514afb80c674cda60894bc82304"} Jan 20 16:57:27 crc kubenswrapper[4558]: I0120 16:57:27.238904 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-a57a-account-create-update-76pmk" event={"ID":"f4589cf0-10b5-4bd9-a06d-58481ece189a","Type":"ContainerDied","Data":"aaa598a3ed80584a968977328a5c696efece7c98f340cda4a41913d370d08745"} Jan 20 16:57:27 crc kubenswrapper[4558]: I0120 16:57:27.238930 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="aaa598a3ed80584a968977328a5c696efece7c98f340cda4a41913d370d08745" Jan 20 16:57:27 crc kubenswrapper[4558]: I0120 16:57:27.238977 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-a57a-account-create-update-76pmk" Jan 20 16:57:27 crc kubenswrapper[4558]: I0120 16:57:27.240844 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-eb81-account-create-update-sr6r9" event={"ID":"cdd71514-e929-4a96-b799-91eecad5ac02","Type":"ContainerDied","Data":"70bb092ba585f08e467b5d341469444954403d77222f504b678d478920e619d3"} Jan 20 16:57:27 crc kubenswrapper[4558]: I0120 16:57:27.240868 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="70bb092ba585f08e467b5d341469444954403d77222f504b678d478920e619d3" Jan 20 16:57:27 crc kubenswrapper[4558]: I0120 16:57:27.240905 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-eb81-account-create-update-sr6r9" Jan 20 16:57:27 crc kubenswrapper[4558]: I0120 16:57:27.245854 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4df59" event={"ID":"32e4598f-f0a7-4d4f-a04f-e37171e73905","Type":"ContainerDied","Data":"87ae66334dab79a817fc6ad7560a15fa47a74f4462e01f78ebf21ea499f1c3d3"} Jan 20 16:57:27 crc kubenswrapper[4558]: I0120 16:57:27.245876 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4df59" Jan 20 16:57:27 crc kubenswrapper[4558]: I0120 16:57:27.247243 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/root-account-create-update-hmgt7" event={"ID":"0cd03d7d-cae3-4c24-90db-5f8598078f52","Type":"ContainerStarted","Data":"2f2f250179bc5be2a7d3735356c0ccab7139bbcd6422209731717b7c77640b5c"} Jan 20 16:57:27 crc kubenswrapper[4558]: I0120 16:57:27.247305 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/root-account-create-update-hmgt7" event={"ID":"0cd03d7d-cae3-4c24-90db-5f8598078f52","Type":"ContainerStarted","Data":"8820e290983958ac6bb1e9099eb3a9aa0751b9ccf61a00c9c7adf10611a82fce"} Jan 20 16:57:27 crc kubenswrapper[4558]: I0120 16:57:27.257456 4558 scope.go:117] "RemoveContainer" containerID="0556dfbd38b9935bab93c57889334ca1663ebdd4e2ade3f48d76bb738c108c7a" Jan 20 16:57:27 crc kubenswrapper[4558]: I0120 16:57:27.268741 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/root-account-create-update-hmgt7" podStartSLOduration=4.268727197 podStartE2EDuration="4.268727197s" podCreationTimestamp="2026-01-20 16:57:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 16:57:27.265892686 +0000 UTC m=+941.026230644" watchObservedRunningTime="2026-01-20 16:57:27.268727197 +0000 UTC m=+941.029065163" Jan 20 16:57:27 crc kubenswrapper[4558]: I0120 16:57:27.289828 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-2tv5d"] Jan 20 16:57:27 crc kubenswrapper[4558]: I0120 16:57:27.294657 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-2tv5d"] Jan 20 16:57:27 crc kubenswrapper[4558]: I0120 16:57:27.296692 4558 scope.go:117] "RemoveContainer" containerID="0725c6dc809a1ae646dc87fed739715135e6aa5f948621abdc42fcf6fc70dfa1" Jan 20 16:57:27 crc kubenswrapper[4558]: I0120 16:57:27.306094 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-4df59"] Jan 20 16:57:27 crc kubenswrapper[4558]: I0120 16:57:27.310530 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-4df59"] Jan 20 16:57:27 crc kubenswrapper[4558]: I0120 16:57:27.317640 4558 scope.go:117] "RemoveContainer" containerID="025dfd613ceca62d521214b8cf184ed58f632222b81d934ad8642198b62b45a2" Jan 20 16:57:27 crc kubenswrapper[4558]: I0120 16:57:27.333012 4558 patch_prober.go:28] interesting pod/machine-config-daemon-2vr4r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 20 16:57:27 crc kubenswrapper[4558]: I0120 16:57:27.333066 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 20 16:57:27 crc kubenswrapper[4558]: I0120 16:57:27.333106 4558 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" Jan 20 16:57:27 crc kubenswrapper[4558]: I0120 16:57:27.333751 4558 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"caa2a6c1a9115d7646c6f8d1705231c6b0c61a1f8491c5197970cd8a3fb6bbc2"} pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 20 16:57:27 crc kubenswrapper[4558]: I0120 16:57:27.333796 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" containerID="cri-o://caa2a6c1a9115d7646c6f8d1705231c6b0c61a1f8491c5197970cd8a3fb6bbc2" gracePeriod=600 Jan 20 16:57:27 crc kubenswrapper[4558]: I0120 16:57:27.348333 4558 scope.go:117] "RemoveContainer" containerID="9188a95d3a317a681ceaade1654598890e519670adac4fb9fbc77b599cebe30c" Jan 20 16:57:27 crc kubenswrapper[4558]: I0120 16:57:27.389470 4558 scope.go:117] "RemoveContainer" containerID="09559d9e17de2bd3da60487cf4a69bf245697f188ed5b3b7161976c6d7b4c109" Jan 20 16:57:27 crc kubenswrapper[4558]: I0120 16:57:27.584653 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-58b8ddd7c-rr7mz" Jan 20 16:57:27 crc kubenswrapper[4558]: I0120 16:57:27.623592 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-mltw6"] Jan 20 16:57:27 crc kubenswrapper[4558]: I0120 16:57:27.623795 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-mltw6" podUID="72531f1f-c34a-4508-a054-8719bbaacb7c" containerName="dnsmasq-dns" containerID="cri-o://ff91bdcda632f425d0a1a57174bb650bdcf98af307215e589377f26846ce7470" gracePeriod=10 Jan 20 16:57:28 crc kubenswrapper[4558]: I0120 16:57:28.275125 4558 generic.go:334] "Generic (PLEG): container finished" podID="72531f1f-c34a-4508-a054-8719bbaacb7c" containerID="ff91bdcda632f425d0a1a57174bb650bdcf98af307215e589377f26846ce7470" exitCode=0 Jan 20 16:57:28 crc kubenswrapper[4558]: I0120 16:57:28.275299 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-mltw6" event={"ID":"72531f1f-c34a-4508-a054-8719bbaacb7c","Type":"ContainerDied","Data":"ff91bdcda632f425d0a1a57174bb650bdcf98af307215e589377f26846ce7470"} Jan 20 16:57:28 crc kubenswrapper[4558]: I0120 16:57:28.278298 4558 generic.go:334] "Generic (PLEG): container finished" podID="0cd03d7d-cae3-4c24-90db-5f8598078f52" containerID="2f2f250179bc5be2a7d3735356c0ccab7139bbcd6422209731717b7c77640b5c" exitCode=0 Jan 20 16:57:28 crc kubenswrapper[4558]: I0120 16:57:28.278338 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/root-account-create-update-hmgt7" event={"ID":"0cd03d7d-cae3-4c24-90db-5f8598078f52","Type":"ContainerDied","Data":"2f2f250179bc5be2a7d3735356c0ccab7139bbcd6422209731717b7c77640b5c"} Jan 20 16:57:28 crc kubenswrapper[4558]: I0120 16:57:28.281072 4558 generic.go:334] "Generic (PLEG): container finished" podID="282a24f3-5eac-4fa8-8a40-6260b94e2164" containerID="7190074912a3226238aa3a0220a473a296c63514afb80c674cda60894bc82304" exitCode=0 Jan 20 16:57:28 crc kubenswrapper[4558]: I0120 16:57:28.281114 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-sj9bb" event={"ID":"282a24f3-5eac-4fa8-8a40-6260b94e2164","Type":"ContainerDied","Data":"7190074912a3226238aa3a0220a473a296c63514afb80c674cda60894bc82304"} Jan 20 16:57:28 crc kubenswrapper[4558]: I0120 16:57:28.284232 4558 generic.go:334] "Generic (PLEG): container finished" podID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerID="caa2a6c1a9115d7646c6f8d1705231c6b0c61a1f8491c5197970cd8a3fb6bbc2" exitCode=0 Jan 20 16:57:28 crc kubenswrapper[4558]: I0120 16:57:28.284283 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" event={"ID":"68337d27-3fa6-4a29-88b0-82e60c3739eb","Type":"ContainerDied","Data":"caa2a6c1a9115d7646c6f8d1705231c6b0c61a1f8491c5197970cd8a3fb6bbc2"} Jan 20 16:57:28 crc kubenswrapper[4558]: I0120 16:57:28.284299 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" event={"ID":"68337d27-3fa6-4a29-88b0-82e60c3739eb","Type":"ContainerStarted","Data":"ed4e09803bdddac3e0ae0eefbe36e242b8c6be19d37cc1c52ac24044a0f94446"} Jan 20 16:57:28 crc kubenswrapper[4558]: I0120 16:57:28.284313 4558 scope.go:117] "RemoveContainer" containerID="5429c282dce69a1ca0312758655bba8954487a4d9cc36ecc68f43f7950925034" Jan 20 16:57:28 crc kubenswrapper[4558]: I0120 16:57:28.573956 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0d7a49b7-4cfe-4d63-a510-803f562eb3f6" path="/var/lib/kubelet/pods/0d7a49b7-4cfe-4d63-a510-803f562eb3f6/volumes" Jan 20 16:57:28 crc kubenswrapper[4558]: I0120 16:57:28.575003 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="32e4598f-f0a7-4d4f-a04f-e37171e73905" path="/var/lib/kubelet/pods/32e4598f-f0a7-4d4f-a04f-e37171e73905/volumes" Jan 20 16:57:30 crc kubenswrapper[4558]: I0120 16:57:30.130273 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-mltw6" Jan 20 16:57:30 crc kubenswrapper[4558]: I0120 16:57:30.134256 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-hmgt7" Jan 20 16:57:30 crc kubenswrapper[4558]: I0120 16:57:30.234190 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t8lxh\" (UniqueName: \"kubernetes.io/projected/0cd03d7d-cae3-4c24-90db-5f8598078f52-kube-api-access-t8lxh\") pod \"0cd03d7d-cae3-4c24-90db-5f8598078f52\" (UID: \"0cd03d7d-cae3-4c24-90db-5f8598078f52\") " Jan 20 16:57:30 crc kubenswrapper[4558]: I0120 16:57:30.234441 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0cd03d7d-cae3-4c24-90db-5f8598078f52-operator-scripts\") pod \"0cd03d7d-cae3-4c24-90db-5f8598078f52\" (UID: \"0cd03d7d-cae3-4c24-90db-5f8598078f52\") " Jan 20 16:57:30 crc kubenswrapper[4558]: I0120 16:57:30.234475 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/72531f1f-c34a-4508-a054-8719bbaacb7c-config\") pod \"72531f1f-c34a-4508-a054-8719bbaacb7c\" (UID: \"72531f1f-c34a-4508-a054-8719bbaacb7c\") " Jan 20 16:57:30 crc kubenswrapper[4558]: I0120 16:57:30.234564 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/72531f1f-c34a-4508-a054-8719bbaacb7c-dnsmasq-svc\") pod \"72531f1f-c34a-4508-a054-8719bbaacb7c\" (UID: \"72531f1f-c34a-4508-a054-8719bbaacb7c\") " Jan 20 16:57:30 crc kubenswrapper[4558]: I0120 16:57:30.234585 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qw6x6\" (UniqueName: \"kubernetes.io/projected/72531f1f-c34a-4508-a054-8719bbaacb7c-kube-api-access-qw6x6\") pod \"72531f1f-c34a-4508-a054-8719bbaacb7c\" (UID: \"72531f1f-c34a-4508-a054-8719bbaacb7c\") " Jan 20 16:57:30 crc kubenswrapper[4558]: I0120 16:57:30.235358 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0cd03d7d-cae3-4c24-90db-5f8598078f52-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "0cd03d7d-cae3-4c24-90db-5f8598078f52" (UID: "0cd03d7d-cae3-4c24-90db-5f8598078f52"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 16:57:30 crc kubenswrapper[4558]: I0120 16:57:30.241673 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/72531f1f-c34a-4508-a054-8719bbaacb7c-kube-api-access-qw6x6" (OuterVolumeSpecName: "kube-api-access-qw6x6") pod "72531f1f-c34a-4508-a054-8719bbaacb7c" (UID: "72531f1f-c34a-4508-a054-8719bbaacb7c"). InnerVolumeSpecName "kube-api-access-qw6x6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:57:30 crc kubenswrapper[4558]: I0120 16:57:30.241727 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0cd03d7d-cae3-4c24-90db-5f8598078f52-kube-api-access-t8lxh" (OuterVolumeSpecName: "kube-api-access-t8lxh") pod "0cd03d7d-cae3-4c24-90db-5f8598078f52" (UID: "0cd03d7d-cae3-4c24-90db-5f8598078f52"). InnerVolumeSpecName "kube-api-access-t8lxh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:57:30 crc kubenswrapper[4558]: I0120 16:57:30.265695 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/72531f1f-c34a-4508-a054-8719bbaacb7c-dnsmasq-svc" (OuterVolumeSpecName: "dnsmasq-svc") pod "72531f1f-c34a-4508-a054-8719bbaacb7c" (UID: "72531f1f-c34a-4508-a054-8719bbaacb7c"). InnerVolumeSpecName "dnsmasq-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 16:57:30 crc kubenswrapper[4558]: I0120 16:57:30.267326 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/72531f1f-c34a-4508-a054-8719bbaacb7c-config" (OuterVolumeSpecName: "config") pod "72531f1f-c34a-4508-a054-8719bbaacb7c" (UID: "72531f1f-c34a-4508-a054-8719bbaacb7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 16:57:30 crc kubenswrapper[4558]: I0120 16:57:30.302852 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-db-sync-wpq8b" event={"ID":"cde9092f-c62d-406c-b800-87c38b56402b","Type":"ContainerStarted","Data":"132812cda13f3aa921d317d99a570a33e0780093d77639760f5e2e76e1f5cc09"} Jan 20 16:57:30 crc kubenswrapper[4558]: I0120 16:57:30.316275 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-mltw6" event={"ID":"72531f1f-c34a-4508-a054-8719bbaacb7c","Type":"ContainerDied","Data":"78c4ec1e964a9b740f63c33f146dbe854871651c436899662b2721259744bc9c"} Jan 20 16:57:30 crc kubenswrapper[4558]: I0120 16:57:30.316317 4558 scope.go:117] "RemoveContainer" containerID="ff91bdcda632f425d0a1a57174bb650bdcf98af307215e589377f26846ce7470" Jan 20 16:57:30 crc kubenswrapper[4558]: I0120 16:57:30.316398 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-mltw6" Jan 20 16:57:30 crc kubenswrapper[4558]: I0120 16:57:30.320854 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/keystone-db-sync-wpq8b" podStartSLOduration=2.622239456 podStartE2EDuration="13.320838963s" podCreationTimestamp="2026-01-20 16:57:17 +0000 UTC" firstStartedPulling="2026-01-20 16:57:19.332179595 +0000 UTC m=+933.092517563" lastFinishedPulling="2026-01-20 16:57:30.030779103 +0000 UTC m=+943.791117070" observedRunningTime="2026-01-20 16:57:30.314233402 +0000 UTC m=+944.074571369" watchObservedRunningTime="2026-01-20 16:57:30.320838963 +0000 UTC m=+944.081176930" Jan 20 16:57:30 crc kubenswrapper[4558]: I0120 16:57:30.324205 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/root-account-create-update-hmgt7" event={"ID":"0cd03d7d-cae3-4c24-90db-5f8598078f52","Type":"ContainerDied","Data":"8820e290983958ac6bb1e9099eb3a9aa0751b9ccf61a00c9c7adf10611a82fce"} Jan 20 16:57:30 crc kubenswrapper[4558]: I0120 16:57:30.324234 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8820e290983958ac6bb1e9099eb3a9aa0751b9ccf61a00c9c7adf10611a82fce" Jan 20 16:57:30 crc kubenswrapper[4558]: I0120 16:57:30.324277 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-hmgt7" Jan 20 16:57:30 crc kubenswrapper[4558]: I0120 16:57:30.337071 4558 reconciler_common.go:293] "Volume detached for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/72531f1f-c34a-4508-a054-8719bbaacb7c-dnsmasq-svc\") on node \"crc\" DevicePath \"\"" Jan 20 16:57:30 crc kubenswrapper[4558]: I0120 16:57:30.337094 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qw6x6\" (UniqueName: \"kubernetes.io/projected/72531f1f-c34a-4508-a054-8719bbaacb7c-kube-api-access-qw6x6\") on node \"crc\" DevicePath \"\"" Jan 20 16:57:30 crc kubenswrapper[4558]: I0120 16:57:30.337119 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t8lxh\" (UniqueName: \"kubernetes.io/projected/0cd03d7d-cae3-4c24-90db-5f8598078f52-kube-api-access-t8lxh\") on node \"crc\" DevicePath \"\"" Jan 20 16:57:30 crc kubenswrapper[4558]: I0120 16:57:30.337130 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0cd03d7d-cae3-4c24-90db-5f8598078f52-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 16:57:30 crc kubenswrapper[4558]: I0120 16:57:30.337138 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/72531f1f-c34a-4508-a054-8719bbaacb7c-config\") on node \"crc\" DevicePath \"\"" Jan 20 16:57:30 crc kubenswrapper[4558]: I0120 16:57:30.342500 4558 scope.go:117] "RemoveContainer" containerID="acc0a33c2aaa9b94930f16919358442e245b006e8f71a86cd096169e5583bbd4" Jan 20 16:57:30 crc kubenswrapper[4558]: I0120 16:57:30.342629 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-mltw6"] Jan 20 16:57:30 crc kubenswrapper[4558]: I0120 16:57:30.348043 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-mltw6"] Jan 20 16:57:30 crc kubenswrapper[4558]: I0120 16:57:30.580911 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="72531f1f-c34a-4508-a054-8719bbaacb7c" path="/var/lib/kubelet/pods/72531f1f-c34a-4508-a054-8719bbaacb7c/volumes" Jan 20 16:57:31 crc kubenswrapper[4558]: I0120 16:57:31.340606 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-sj9bb" event={"ID":"282a24f3-5eac-4fa8-8a40-6260b94e2164","Type":"ContainerStarted","Data":"8b89b54c87d76249ac04db1cd612a219c5d38ea12852a52a99724321c43ca1e0"} Jan 20 16:57:31 crc kubenswrapper[4558]: I0120 16:57:31.358678 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-sj9bb" podStartSLOduration=2.672376332 podStartE2EDuration="18.35866583s" podCreationTimestamp="2026-01-20 16:57:13 +0000 UTC" firstStartedPulling="2026-01-20 16:57:14.892733292 +0000 UTC m=+928.653071260" lastFinishedPulling="2026-01-20 16:57:30.579022791 +0000 UTC m=+944.339360758" observedRunningTime="2026-01-20 16:57:31.353912241 +0000 UTC m=+945.114250207" watchObservedRunningTime="2026-01-20 16:57:31.35866583 +0000 UTC m=+945.119003797" Jan 20 16:57:32 crc kubenswrapper[4558]: I0120 16:57:32.350005 4558 generic.go:334] "Generic (PLEG): container finished" podID="cde9092f-c62d-406c-b800-87c38b56402b" containerID="132812cda13f3aa921d317d99a570a33e0780093d77639760f5e2e76e1f5cc09" exitCode=0 Jan 20 16:57:32 crc kubenswrapper[4558]: I0120 16:57:32.350070 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-db-sync-wpq8b" event={"ID":"cde9092f-c62d-406c-b800-87c38b56402b","Type":"ContainerDied","Data":"132812cda13f3aa921d317d99a570a33e0780093d77639760f5e2e76e1f5cc09"} Jan 20 16:57:33 crc kubenswrapper[4558]: I0120 16:57:33.601291 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-db-sync-wpq8b" Jan 20 16:57:33 crc kubenswrapper[4558]: I0120 16:57:33.685368 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cde9092f-c62d-406c-b800-87c38b56402b-combined-ca-bundle\") pod \"cde9092f-c62d-406c-b800-87c38b56402b\" (UID: \"cde9092f-c62d-406c-b800-87c38b56402b\") " Jan 20 16:57:33 crc kubenswrapper[4558]: I0120 16:57:33.685577 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xhm8z\" (UniqueName: \"kubernetes.io/projected/cde9092f-c62d-406c-b800-87c38b56402b-kube-api-access-xhm8z\") pod \"cde9092f-c62d-406c-b800-87c38b56402b\" (UID: \"cde9092f-c62d-406c-b800-87c38b56402b\") " Jan 20 16:57:33 crc kubenswrapper[4558]: I0120 16:57:33.685636 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cde9092f-c62d-406c-b800-87c38b56402b-config-data\") pod \"cde9092f-c62d-406c-b800-87c38b56402b\" (UID: \"cde9092f-c62d-406c-b800-87c38b56402b\") " Jan 20 16:57:33 crc kubenswrapper[4558]: I0120 16:57:33.689835 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cde9092f-c62d-406c-b800-87c38b56402b-kube-api-access-xhm8z" (OuterVolumeSpecName: "kube-api-access-xhm8z") pod "cde9092f-c62d-406c-b800-87c38b56402b" (UID: "cde9092f-c62d-406c-b800-87c38b56402b"). InnerVolumeSpecName "kube-api-access-xhm8z". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:57:33 crc kubenswrapper[4558]: I0120 16:57:33.703387 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cde9092f-c62d-406c-b800-87c38b56402b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "cde9092f-c62d-406c-b800-87c38b56402b" (UID: "cde9092f-c62d-406c-b800-87c38b56402b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:57:33 crc kubenswrapper[4558]: I0120 16:57:33.717451 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cde9092f-c62d-406c-b800-87c38b56402b-config-data" (OuterVolumeSpecName: "config-data") pod "cde9092f-c62d-406c-b800-87c38b56402b" (UID: "cde9092f-c62d-406c-b800-87c38b56402b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:57:33 crc kubenswrapper[4558]: I0120 16:57:33.746846 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-sj9bb" Jan 20 16:57:33 crc kubenswrapper[4558]: I0120 16:57:33.746893 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-sj9bb" Jan 20 16:57:33 crc kubenswrapper[4558]: I0120 16:57:33.787943 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cde9092f-c62d-406c-b800-87c38b56402b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 16:57:33 crc kubenswrapper[4558]: I0120 16:57:33.787995 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xhm8z\" (UniqueName: \"kubernetes.io/projected/cde9092f-c62d-406c-b800-87c38b56402b-kube-api-access-xhm8z\") on node \"crc\" DevicePath \"\"" Jan 20 16:57:33 crc kubenswrapper[4558]: I0120 16:57:33.788006 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cde9092f-c62d-406c-b800-87c38b56402b-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 16:57:34 crc kubenswrapper[4558]: I0120 16:57:34.369890 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-db-sync-wpq8b" event={"ID":"cde9092f-c62d-406c-b800-87c38b56402b","Type":"ContainerDied","Data":"1fcc92b5b23a9573e011ec7fffcf1dd2ece5f714962d3c82d55acf6976a11e6d"} Jan 20 16:57:34 crc kubenswrapper[4558]: I0120 16:57:34.369926 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1fcc92b5b23a9573e011ec7fffcf1dd2ece5f714962d3c82d55acf6976a11e6d" Jan 20 16:57:34 crc kubenswrapper[4558]: I0120 16:57:34.369937 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-db-sync-wpq8b" Jan 20 16:57:34 crc kubenswrapper[4558]: I0120 16:57:34.538442 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/keystone-bootstrap-fxc4n"] Jan 20 16:57:34 crc kubenswrapper[4558]: E0120 16:57:34.538701 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="72531f1f-c34a-4508-a054-8719bbaacb7c" containerName="dnsmasq-dns" Jan 20 16:57:34 crc kubenswrapper[4558]: I0120 16:57:34.538719 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="72531f1f-c34a-4508-a054-8719bbaacb7c" containerName="dnsmasq-dns" Jan 20 16:57:34 crc kubenswrapper[4558]: E0120 16:57:34.538729 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cde9092f-c62d-406c-b800-87c38b56402b" containerName="keystone-db-sync" Jan 20 16:57:34 crc kubenswrapper[4558]: I0120 16:57:34.538734 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="cde9092f-c62d-406c-b800-87c38b56402b" containerName="keystone-db-sync" Jan 20 16:57:34 crc kubenswrapper[4558]: E0120 16:57:34.538744 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="32e4598f-f0a7-4d4f-a04f-e37171e73905" containerName="extract-utilities" Jan 20 16:57:34 crc kubenswrapper[4558]: I0120 16:57:34.538750 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="32e4598f-f0a7-4d4f-a04f-e37171e73905" containerName="extract-utilities" Jan 20 16:57:34 crc kubenswrapper[4558]: E0120 16:57:34.538759 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d7a49b7-4cfe-4d63-a510-803f562eb3f6" containerName="extract-content" Jan 20 16:57:34 crc kubenswrapper[4558]: I0120 16:57:34.538764 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d7a49b7-4cfe-4d63-a510-803f562eb3f6" containerName="extract-content" Jan 20 16:57:34 crc kubenswrapper[4558]: E0120 16:57:34.538775 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7b3471c1-c42b-4be4-96fe-7e43ef86ba15" containerName="mariadb-database-create" Jan 20 16:57:34 crc kubenswrapper[4558]: I0120 16:57:34.538780 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="7b3471c1-c42b-4be4-96fe-7e43ef86ba15" containerName="mariadb-database-create" Jan 20 16:57:34 crc kubenswrapper[4558]: E0120 16:57:34.538789 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e0c2bc6c-ea80-49a0-876f-f0a4d29c12bd" containerName="mariadb-account-create-update" Jan 20 16:57:34 crc kubenswrapper[4558]: I0120 16:57:34.538794 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="e0c2bc6c-ea80-49a0-876f-f0a4d29c12bd" containerName="mariadb-account-create-update" Jan 20 16:57:34 crc kubenswrapper[4558]: E0120 16:57:34.538807 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0cd03d7d-cae3-4c24-90db-5f8598078f52" containerName="mariadb-account-create-update" Jan 20 16:57:34 crc kubenswrapper[4558]: I0120 16:57:34.538812 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="0cd03d7d-cae3-4c24-90db-5f8598078f52" containerName="mariadb-account-create-update" Jan 20 16:57:34 crc kubenswrapper[4558]: E0120 16:57:34.538824 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cdd71514-e929-4a96-b799-91eecad5ac02" containerName="mariadb-account-create-update" Jan 20 16:57:34 crc kubenswrapper[4558]: I0120 16:57:34.538829 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="cdd71514-e929-4a96-b799-91eecad5ac02" containerName="mariadb-account-create-update" Jan 20 16:57:34 crc kubenswrapper[4558]: E0120 16:57:34.538836 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4589cf0-10b5-4bd9-a06d-58481ece189a" containerName="mariadb-account-create-update" Jan 20 16:57:34 crc kubenswrapper[4558]: I0120 16:57:34.538842 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4589cf0-10b5-4bd9-a06d-58481ece189a" containerName="mariadb-account-create-update" Jan 20 16:57:34 crc kubenswrapper[4558]: E0120 16:57:34.538849 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="72531f1f-c34a-4508-a054-8719bbaacb7c" containerName="init" Jan 20 16:57:34 crc kubenswrapper[4558]: I0120 16:57:34.538854 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="72531f1f-c34a-4508-a054-8719bbaacb7c" containerName="init" Jan 20 16:57:34 crc kubenswrapper[4558]: E0120 16:57:34.538861 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="32e4598f-f0a7-4d4f-a04f-e37171e73905" containerName="extract-content" Jan 20 16:57:34 crc kubenswrapper[4558]: I0120 16:57:34.538866 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="32e4598f-f0a7-4d4f-a04f-e37171e73905" containerName="extract-content" Jan 20 16:57:34 crc kubenswrapper[4558]: E0120 16:57:34.538875 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d7a49b7-4cfe-4d63-a510-803f562eb3f6" containerName="extract-utilities" Jan 20 16:57:34 crc kubenswrapper[4558]: I0120 16:57:34.538880 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d7a49b7-4cfe-4d63-a510-803f562eb3f6" containerName="extract-utilities" Jan 20 16:57:34 crc kubenswrapper[4558]: E0120 16:57:34.538889 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="567ff26d-1c6e-4117-869d-3bfcb1a705fc" containerName="mariadb-database-create" Jan 20 16:57:34 crc kubenswrapper[4558]: I0120 16:57:34.538895 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="567ff26d-1c6e-4117-869d-3bfcb1a705fc" containerName="mariadb-database-create" Jan 20 16:57:34 crc kubenswrapper[4558]: E0120 16:57:34.538904 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="32e4598f-f0a7-4d4f-a04f-e37171e73905" containerName="registry-server" Jan 20 16:57:34 crc kubenswrapper[4558]: I0120 16:57:34.538909 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="32e4598f-f0a7-4d4f-a04f-e37171e73905" containerName="registry-server" Jan 20 16:57:34 crc kubenswrapper[4558]: E0120 16:57:34.538923 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d7a49b7-4cfe-4d63-a510-803f562eb3f6" containerName="registry-server" Jan 20 16:57:34 crc kubenswrapper[4558]: I0120 16:57:34.538928 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d7a49b7-4cfe-4d63-a510-803f562eb3f6" containerName="registry-server" Jan 20 16:57:34 crc kubenswrapper[4558]: I0120 16:57:34.539082 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="32e4598f-f0a7-4d4f-a04f-e37171e73905" containerName="registry-server" Jan 20 16:57:34 crc kubenswrapper[4558]: I0120 16:57:34.539094 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="e0c2bc6c-ea80-49a0-876f-f0a4d29c12bd" containerName="mariadb-account-create-update" Jan 20 16:57:34 crc kubenswrapper[4558]: I0120 16:57:34.539101 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="cde9092f-c62d-406c-b800-87c38b56402b" containerName="keystone-db-sync" Jan 20 16:57:34 crc kubenswrapper[4558]: I0120 16:57:34.539110 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="7b3471c1-c42b-4be4-96fe-7e43ef86ba15" containerName="mariadb-database-create" Jan 20 16:57:34 crc kubenswrapper[4558]: I0120 16:57:34.539117 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="cdd71514-e929-4a96-b799-91eecad5ac02" containerName="mariadb-account-create-update" Jan 20 16:57:34 crc kubenswrapper[4558]: I0120 16:57:34.539123 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4589cf0-10b5-4bd9-a06d-58481ece189a" containerName="mariadb-account-create-update" Jan 20 16:57:34 crc kubenswrapper[4558]: I0120 16:57:34.539132 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="72531f1f-c34a-4508-a054-8719bbaacb7c" containerName="dnsmasq-dns" Jan 20 16:57:34 crc kubenswrapper[4558]: I0120 16:57:34.539141 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="0d7a49b7-4cfe-4d63-a510-803f562eb3f6" containerName="registry-server" Jan 20 16:57:34 crc kubenswrapper[4558]: I0120 16:57:34.539151 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="0cd03d7d-cae3-4c24-90db-5f8598078f52" containerName="mariadb-account-create-update" Jan 20 16:57:34 crc kubenswrapper[4558]: I0120 16:57:34.539156 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="567ff26d-1c6e-4117-869d-3bfcb1a705fc" containerName="mariadb-database-create" Jan 20 16:57:34 crc kubenswrapper[4558]: I0120 16:57:34.539618 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-bootstrap-fxc4n" Jan 20 16:57:34 crc kubenswrapper[4558]: I0120 16:57:34.541496 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-scripts" Jan 20 16:57:34 crc kubenswrapper[4558]: I0120 16:57:34.541916 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone" Jan 20 16:57:34 crc kubenswrapper[4558]: I0120 16:57:34.541977 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"osp-secret" Jan 20 16:57:34 crc kubenswrapper[4558]: I0120 16:57:34.543300 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-keystone-dockercfg-wlvl5" Jan 20 16:57:34 crc kubenswrapper[4558]: I0120 16:57:34.545394 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-config-data" Jan 20 16:57:34 crc kubenswrapper[4558]: I0120 16:57:34.548875 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-bootstrap-fxc4n"] Jan 20 16:57:34 crc kubenswrapper[4558]: I0120 16:57:34.598654 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lg4j5\" (UniqueName: \"kubernetes.io/projected/484f0ab5-2b8f-4204-b5d1-e032c159dbf2-kube-api-access-lg4j5\") pod \"keystone-bootstrap-fxc4n\" (UID: \"484f0ab5-2b8f-4204-b5d1-e032c159dbf2\") " pod="openstack-kuttl-tests/keystone-bootstrap-fxc4n" Jan 20 16:57:34 crc kubenswrapper[4558]: I0120 16:57:34.598880 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/484f0ab5-2b8f-4204-b5d1-e032c159dbf2-combined-ca-bundle\") pod \"keystone-bootstrap-fxc4n\" (UID: \"484f0ab5-2b8f-4204-b5d1-e032c159dbf2\") " pod="openstack-kuttl-tests/keystone-bootstrap-fxc4n" Jan 20 16:57:34 crc kubenswrapper[4558]: I0120 16:57:34.598923 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/484f0ab5-2b8f-4204-b5d1-e032c159dbf2-fernet-keys\") pod \"keystone-bootstrap-fxc4n\" (UID: \"484f0ab5-2b8f-4204-b5d1-e032c159dbf2\") " pod="openstack-kuttl-tests/keystone-bootstrap-fxc4n" Jan 20 16:57:34 crc kubenswrapper[4558]: I0120 16:57:34.598952 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/484f0ab5-2b8f-4204-b5d1-e032c159dbf2-credential-keys\") pod \"keystone-bootstrap-fxc4n\" (UID: \"484f0ab5-2b8f-4204-b5d1-e032c159dbf2\") " pod="openstack-kuttl-tests/keystone-bootstrap-fxc4n" Jan 20 16:57:34 crc kubenswrapper[4558]: I0120 16:57:34.598985 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/484f0ab5-2b8f-4204-b5d1-e032c159dbf2-config-data\") pod \"keystone-bootstrap-fxc4n\" (UID: \"484f0ab5-2b8f-4204-b5d1-e032c159dbf2\") " pod="openstack-kuttl-tests/keystone-bootstrap-fxc4n" Jan 20 16:57:34 crc kubenswrapper[4558]: I0120 16:57:34.599142 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/484f0ab5-2b8f-4204-b5d1-e032c159dbf2-scripts\") pod \"keystone-bootstrap-fxc4n\" (UID: \"484f0ab5-2b8f-4204-b5d1-e032c159dbf2\") " pod="openstack-kuttl-tests/keystone-bootstrap-fxc4n" Jan 20 16:57:34 crc kubenswrapper[4558]: I0120 16:57:34.700972 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/484f0ab5-2b8f-4204-b5d1-e032c159dbf2-credential-keys\") pod \"keystone-bootstrap-fxc4n\" (UID: \"484f0ab5-2b8f-4204-b5d1-e032c159dbf2\") " pod="openstack-kuttl-tests/keystone-bootstrap-fxc4n" Jan 20 16:57:34 crc kubenswrapper[4558]: I0120 16:57:34.701033 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/484f0ab5-2b8f-4204-b5d1-e032c159dbf2-config-data\") pod \"keystone-bootstrap-fxc4n\" (UID: \"484f0ab5-2b8f-4204-b5d1-e032c159dbf2\") " pod="openstack-kuttl-tests/keystone-bootstrap-fxc4n" Jan 20 16:57:34 crc kubenswrapper[4558]: I0120 16:57:34.701151 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/484f0ab5-2b8f-4204-b5d1-e032c159dbf2-scripts\") pod \"keystone-bootstrap-fxc4n\" (UID: \"484f0ab5-2b8f-4204-b5d1-e032c159dbf2\") " pod="openstack-kuttl-tests/keystone-bootstrap-fxc4n" Jan 20 16:57:34 crc kubenswrapper[4558]: I0120 16:57:34.701277 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lg4j5\" (UniqueName: \"kubernetes.io/projected/484f0ab5-2b8f-4204-b5d1-e032c159dbf2-kube-api-access-lg4j5\") pod \"keystone-bootstrap-fxc4n\" (UID: \"484f0ab5-2b8f-4204-b5d1-e032c159dbf2\") " pod="openstack-kuttl-tests/keystone-bootstrap-fxc4n" Jan 20 16:57:34 crc kubenswrapper[4558]: I0120 16:57:34.701320 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/484f0ab5-2b8f-4204-b5d1-e032c159dbf2-combined-ca-bundle\") pod \"keystone-bootstrap-fxc4n\" (UID: \"484f0ab5-2b8f-4204-b5d1-e032c159dbf2\") " pod="openstack-kuttl-tests/keystone-bootstrap-fxc4n" Jan 20 16:57:34 crc kubenswrapper[4558]: I0120 16:57:34.701384 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/484f0ab5-2b8f-4204-b5d1-e032c159dbf2-fernet-keys\") pod \"keystone-bootstrap-fxc4n\" (UID: \"484f0ab5-2b8f-4204-b5d1-e032c159dbf2\") " pod="openstack-kuttl-tests/keystone-bootstrap-fxc4n" Jan 20 16:57:34 crc kubenswrapper[4558]: I0120 16:57:34.712946 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/484f0ab5-2b8f-4204-b5d1-e032c159dbf2-combined-ca-bundle\") pod \"keystone-bootstrap-fxc4n\" (UID: \"484f0ab5-2b8f-4204-b5d1-e032c159dbf2\") " pod="openstack-kuttl-tests/keystone-bootstrap-fxc4n" Jan 20 16:57:34 crc kubenswrapper[4558]: I0120 16:57:34.713641 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/484f0ab5-2b8f-4204-b5d1-e032c159dbf2-fernet-keys\") pod \"keystone-bootstrap-fxc4n\" (UID: \"484f0ab5-2b8f-4204-b5d1-e032c159dbf2\") " pod="openstack-kuttl-tests/keystone-bootstrap-fxc4n" Jan 20 16:57:34 crc kubenswrapper[4558]: I0120 16:57:34.713849 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/484f0ab5-2b8f-4204-b5d1-e032c159dbf2-scripts\") pod \"keystone-bootstrap-fxc4n\" (UID: \"484f0ab5-2b8f-4204-b5d1-e032c159dbf2\") " pod="openstack-kuttl-tests/keystone-bootstrap-fxc4n" Jan 20 16:57:34 crc kubenswrapper[4558]: I0120 16:57:34.718588 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/484f0ab5-2b8f-4204-b5d1-e032c159dbf2-credential-keys\") pod \"keystone-bootstrap-fxc4n\" (UID: \"484f0ab5-2b8f-4204-b5d1-e032c159dbf2\") " pod="openstack-kuttl-tests/keystone-bootstrap-fxc4n" Jan 20 16:57:34 crc kubenswrapper[4558]: I0120 16:57:34.724253 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/484f0ab5-2b8f-4204-b5d1-e032c159dbf2-config-data\") pod \"keystone-bootstrap-fxc4n\" (UID: \"484f0ab5-2b8f-4204-b5d1-e032c159dbf2\") " pod="openstack-kuttl-tests/keystone-bootstrap-fxc4n" Jan 20 16:57:34 crc kubenswrapper[4558]: I0120 16:57:34.724307 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/neutron-db-sync-ckhth"] Jan 20 16:57:34 crc kubenswrapper[4558]: I0120 16:57:34.725146 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-db-sync-ckhth" Jan 20 16:57:34 crc kubenswrapper[4558]: I0120 16:57:34.730273 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/cinder-db-sync-jwg24"] Jan 20 16:57:34 crc kubenswrapper[4558]: I0120 16:57:34.731334 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-db-sync-jwg24" Jan 20 16:57:34 crc kubenswrapper[4558]: I0120 16:57:34.733627 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"neutron-httpd-config" Jan 20 16:57:34 crc kubenswrapper[4558]: I0120 16:57:34.739510 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"neutron-neutron-dockercfg-7d264" Jan 20 16:57:34 crc kubenswrapper[4558]: I0120 16:57:34.739818 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"neutron-config" Jan 20 16:57:34 crc kubenswrapper[4558]: I0120 16:57:34.741218 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cinder-config-data" Jan 20 16:57:34 crc kubenswrapper[4558]: I0120 16:57:34.741428 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cinder-scripts" Jan 20 16:57:34 crc kubenswrapper[4558]: I0120 16:57:34.747282 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cinder-cinder-dockercfg-s7wtr" Jan 20 16:57:34 crc kubenswrapper[4558]: I0120 16:57:34.747402 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-db-sync-ckhth"] Jan 20 16:57:34 crc kubenswrapper[4558]: I0120 16:57:34.748510 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lg4j5\" (UniqueName: \"kubernetes.io/projected/484f0ab5-2b8f-4204-b5d1-e032c159dbf2-kube-api-access-lg4j5\") pod \"keystone-bootstrap-fxc4n\" (UID: \"484f0ab5-2b8f-4204-b5d1-e032c159dbf2\") " pod="openstack-kuttl-tests/keystone-bootstrap-fxc4n" Jan 20 16:57:34 crc kubenswrapper[4558]: I0120 16:57:34.780854 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-sj9bb" podUID="282a24f3-5eac-4fa8-8a40-6260b94e2164" containerName="registry-server" probeResult="failure" output=< Jan 20 16:57:34 crc kubenswrapper[4558]: timeout: failed to connect service ":50051" within 1s Jan 20 16:57:34 crc kubenswrapper[4558]: > Jan 20 16:57:34 crc kubenswrapper[4558]: I0120 16:57:34.803356 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7b8d1710-d43e-4f4d-a16b-fb97e8bb7ffa-config-data\") pod \"cinder-db-sync-jwg24\" (UID: \"7b8d1710-d43e-4f4d-a16b-fb97e8bb7ffa\") " pod="openstack-kuttl-tests/cinder-db-sync-jwg24" Jan 20 16:57:34 crc kubenswrapper[4558]: I0120 16:57:34.803496 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zh7rj\" (UniqueName: \"kubernetes.io/projected/b79fb42a-3e70-4daa-b0ea-fe854def5825-kube-api-access-zh7rj\") pod \"neutron-db-sync-ckhth\" (UID: \"b79fb42a-3e70-4daa-b0ea-fe854def5825\") " pod="openstack-kuttl-tests/neutron-db-sync-ckhth" Jan 20 16:57:34 crc kubenswrapper[4558]: I0120 16:57:34.803531 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7b8d1710-d43e-4f4d-a16b-fb97e8bb7ffa-combined-ca-bundle\") pod \"cinder-db-sync-jwg24\" (UID: \"7b8d1710-d43e-4f4d-a16b-fb97e8bb7ffa\") " pod="openstack-kuttl-tests/cinder-db-sync-jwg24" Jan 20 16:57:34 crc kubenswrapper[4558]: I0120 16:57:34.803552 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b79fb42a-3e70-4daa-b0ea-fe854def5825-combined-ca-bundle\") pod \"neutron-db-sync-ckhth\" (UID: \"b79fb42a-3e70-4daa-b0ea-fe854def5825\") " pod="openstack-kuttl-tests/neutron-db-sync-ckhth" Jan 20 16:57:34 crc kubenswrapper[4558]: I0120 16:57:34.803568 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/b79fb42a-3e70-4daa-b0ea-fe854def5825-config\") pod \"neutron-db-sync-ckhth\" (UID: \"b79fb42a-3e70-4daa-b0ea-fe854def5825\") " pod="openstack-kuttl-tests/neutron-db-sync-ckhth" Jan 20 16:57:34 crc kubenswrapper[4558]: I0120 16:57:34.803623 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p67ps\" (UniqueName: \"kubernetes.io/projected/7b8d1710-d43e-4f4d-a16b-fb97e8bb7ffa-kube-api-access-p67ps\") pod \"cinder-db-sync-jwg24\" (UID: \"7b8d1710-d43e-4f4d-a16b-fb97e8bb7ffa\") " pod="openstack-kuttl-tests/cinder-db-sync-jwg24" Jan 20 16:57:34 crc kubenswrapper[4558]: I0120 16:57:34.803671 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7b8d1710-d43e-4f4d-a16b-fb97e8bb7ffa-scripts\") pod \"cinder-db-sync-jwg24\" (UID: \"7b8d1710-d43e-4f4d-a16b-fb97e8bb7ffa\") " pod="openstack-kuttl-tests/cinder-db-sync-jwg24" Jan 20 16:57:34 crc kubenswrapper[4558]: I0120 16:57:34.803713 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/7b8d1710-d43e-4f4d-a16b-fb97e8bb7ffa-db-sync-config-data\") pod \"cinder-db-sync-jwg24\" (UID: \"7b8d1710-d43e-4f4d-a16b-fb97e8bb7ffa\") " pod="openstack-kuttl-tests/cinder-db-sync-jwg24" Jan 20 16:57:34 crc kubenswrapper[4558]: I0120 16:57:34.803743 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7b8d1710-d43e-4f4d-a16b-fb97e8bb7ffa-etc-machine-id\") pod \"cinder-db-sync-jwg24\" (UID: \"7b8d1710-d43e-4f4d-a16b-fb97e8bb7ffa\") " pod="openstack-kuttl-tests/cinder-db-sync-jwg24" Jan 20 16:57:34 crc kubenswrapper[4558]: I0120 16:57:34.811130 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-db-sync-jwg24"] Jan 20 16:57:34 crc kubenswrapper[4558]: I0120 16:57:34.823500 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 16:57:34 crc kubenswrapper[4558]: I0120 16:57:34.825442 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:57:34 crc kubenswrapper[4558]: I0120 16:57:34.828436 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-scripts" Jan 20 16:57:34 crc kubenswrapper[4558]: I0120 16:57:34.828648 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-config-data" Jan 20 16:57:34 crc kubenswrapper[4558]: I0120 16:57:34.842517 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/placement-db-sync-d6pfc"] Jan 20 16:57:34 crc kubenswrapper[4558]: I0120 16:57:34.843450 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-db-sync-d6pfc" Jan 20 16:57:34 crc kubenswrapper[4558]: I0120 16:57:34.845494 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"placement-placement-dockercfg-2kfdl" Jan 20 16:57:34 crc kubenswrapper[4558]: I0120 16:57:34.845726 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"placement-scripts" Jan 20 16:57:34 crc kubenswrapper[4558]: I0120 16:57:34.845992 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"placement-config-data" Jan 20 16:57:34 crc kubenswrapper[4558]: I0120 16:57:34.856434 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-bootstrap-fxc4n" Jan 20 16:57:34 crc kubenswrapper[4558]: I0120 16:57:34.863199 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 16:57:34 crc kubenswrapper[4558]: I0120 16:57:34.877699 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/barbican-db-sync-mpmc8"] Jan 20 16:57:34 crc kubenswrapper[4558]: I0120 16:57:34.889878 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-db-sync-d6pfc"] Jan 20 16:57:34 crc kubenswrapper[4558]: I0120 16:57:34.889961 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-db-sync-mpmc8" Jan 20 16:57:34 crc kubenswrapper[4558]: I0120 16:57:34.893096 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"barbican-config-data" Jan 20 16:57:34 crc kubenswrapper[4558]: I0120 16:57:34.893291 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"barbican-barbican-dockercfg-z9dpp" Jan 20 16:57:34 crc kubenswrapper[4558]: I0120 16:57:34.893387 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-db-sync-mpmc8"] Jan 20 16:57:34 crc kubenswrapper[4558]: I0120 16:57:34.909584 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c1e3af55-1788-4d23-b3c5-6227e848ea15-run-httpd\") pod \"ceilometer-0\" (UID: \"c1e3af55-1788-4d23-b3c5-6227e848ea15\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:57:34 crc kubenswrapper[4558]: I0120 16:57:34.909621 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b1e54a34-818b-4918-baeb-586ff225fe1f-combined-ca-bundle\") pod \"placement-db-sync-d6pfc\" (UID: \"b1e54a34-818b-4918-baeb-586ff225fe1f\") " pod="openstack-kuttl-tests/placement-db-sync-d6pfc" Jan 20 16:57:34 crc kubenswrapper[4558]: I0120 16:57:34.909644 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7b8d1710-d43e-4f4d-a16b-fb97e8bb7ffa-config-data\") pod \"cinder-db-sync-jwg24\" (UID: \"7b8d1710-d43e-4f4d-a16b-fb97e8bb7ffa\") " pod="openstack-kuttl-tests/cinder-db-sync-jwg24" Jan 20 16:57:34 crc kubenswrapper[4558]: I0120 16:57:34.909677 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zh7rj\" (UniqueName: \"kubernetes.io/projected/b79fb42a-3e70-4daa-b0ea-fe854def5825-kube-api-access-zh7rj\") pod \"neutron-db-sync-ckhth\" (UID: \"b79fb42a-3e70-4daa-b0ea-fe854def5825\") " pod="openstack-kuttl-tests/neutron-db-sync-ckhth" Jan 20 16:57:34 crc kubenswrapper[4558]: I0120 16:57:34.909695 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7b8d1710-d43e-4f4d-a16b-fb97e8bb7ffa-combined-ca-bundle\") pod \"cinder-db-sync-jwg24\" (UID: \"7b8d1710-d43e-4f4d-a16b-fb97e8bb7ffa\") " pod="openstack-kuttl-tests/cinder-db-sync-jwg24" Jan 20 16:57:34 crc kubenswrapper[4558]: I0120 16:57:34.909711 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/b79fb42a-3e70-4daa-b0ea-fe854def5825-config\") pod \"neutron-db-sync-ckhth\" (UID: \"b79fb42a-3e70-4daa-b0ea-fe854def5825\") " pod="openstack-kuttl-tests/neutron-db-sync-ckhth" Jan 20 16:57:34 crc kubenswrapper[4558]: I0120 16:57:34.909727 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b79fb42a-3e70-4daa-b0ea-fe854def5825-combined-ca-bundle\") pod \"neutron-db-sync-ckhth\" (UID: \"b79fb42a-3e70-4daa-b0ea-fe854def5825\") " pod="openstack-kuttl-tests/neutron-db-sync-ckhth" Jan 20 16:57:34 crc kubenswrapper[4558]: I0120 16:57:34.909781 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b1e54a34-818b-4918-baeb-586ff225fe1f-config-data\") pod \"placement-db-sync-d6pfc\" (UID: \"b1e54a34-818b-4918-baeb-586ff225fe1f\") " pod="openstack-kuttl-tests/placement-db-sync-d6pfc" Jan 20 16:57:34 crc kubenswrapper[4558]: I0120 16:57:34.909798 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p67ps\" (UniqueName: \"kubernetes.io/projected/7b8d1710-d43e-4f4d-a16b-fb97e8bb7ffa-kube-api-access-p67ps\") pod \"cinder-db-sync-jwg24\" (UID: \"7b8d1710-d43e-4f4d-a16b-fb97e8bb7ffa\") " pod="openstack-kuttl-tests/cinder-db-sync-jwg24" Jan 20 16:57:34 crc kubenswrapper[4558]: I0120 16:57:34.909831 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b1e54a34-818b-4918-baeb-586ff225fe1f-logs\") pod \"placement-db-sync-d6pfc\" (UID: \"b1e54a34-818b-4918-baeb-586ff225fe1f\") " pod="openstack-kuttl-tests/placement-db-sync-d6pfc" Jan 20 16:57:34 crc kubenswrapper[4558]: I0120 16:57:34.909847 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c1e3af55-1788-4d23-b3c5-6227e848ea15-log-httpd\") pod \"ceilometer-0\" (UID: \"c1e3af55-1788-4d23-b3c5-6227e848ea15\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:57:34 crc kubenswrapper[4558]: I0120 16:57:34.909860 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c1e3af55-1788-4d23-b3c5-6227e848ea15-scripts\") pod \"ceilometer-0\" (UID: \"c1e3af55-1788-4d23-b3c5-6227e848ea15\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:57:34 crc kubenswrapper[4558]: I0120 16:57:34.909877 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c1e3af55-1788-4d23-b3c5-6227e848ea15-config-data\") pod \"ceilometer-0\" (UID: \"c1e3af55-1788-4d23-b3c5-6227e848ea15\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:57:34 crc kubenswrapper[4558]: I0120 16:57:34.909891 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c1e3af55-1788-4d23-b3c5-6227e848ea15-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"c1e3af55-1788-4d23-b3c5-6227e848ea15\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:57:34 crc kubenswrapper[4558]: I0120 16:57:34.909904 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jh79j\" (UniqueName: \"kubernetes.io/projected/b1e54a34-818b-4918-baeb-586ff225fe1f-kube-api-access-jh79j\") pod \"placement-db-sync-d6pfc\" (UID: \"b1e54a34-818b-4918-baeb-586ff225fe1f\") " pod="openstack-kuttl-tests/placement-db-sync-d6pfc" Jan 20 16:57:34 crc kubenswrapper[4558]: I0120 16:57:34.909926 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7b8d1710-d43e-4f4d-a16b-fb97e8bb7ffa-scripts\") pod \"cinder-db-sync-jwg24\" (UID: \"7b8d1710-d43e-4f4d-a16b-fb97e8bb7ffa\") " pod="openstack-kuttl-tests/cinder-db-sync-jwg24" Jan 20 16:57:34 crc kubenswrapper[4558]: I0120 16:57:34.909971 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/7b8d1710-d43e-4f4d-a16b-fb97e8bb7ffa-db-sync-config-data\") pod \"cinder-db-sync-jwg24\" (UID: \"7b8d1710-d43e-4f4d-a16b-fb97e8bb7ffa\") " pod="openstack-kuttl-tests/cinder-db-sync-jwg24" Jan 20 16:57:34 crc kubenswrapper[4558]: I0120 16:57:34.909987 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ghzgm\" (UniqueName: \"kubernetes.io/projected/c1e3af55-1788-4d23-b3c5-6227e848ea15-kube-api-access-ghzgm\") pod \"ceilometer-0\" (UID: \"c1e3af55-1788-4d23-b3c5-6227e848ea15\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:57:34 crc kubenswrapper[4558]: I0120 16:57:34.910003 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b1e54a34-818b-4918-baeb-586ff225fe1f-scripts\") pod \"placement-db-sync-d6pfc\" (UID: \"b1e54a34-818b-4918-baeb-586ff225fe1f\") " pod="openstack-kuttl-tests/placement-db-sync-d6pfc" Jan 20 16:57:34 crc kubenswrapper[4558]: I0120 16:57:34.910027 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c1e3af55-1788-4d23-b3c5-6227e848ea15-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"c1e3af55-1788-4d23-b3c5-6227e848ea15\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:57:34 crc kubenswrapper[4558]: I0120 16:57:34.910045 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7b8d1710-d43e-4f4d-a16b-fb97e8bb7ffa-etc-machine-id\") pod \"cinder-db-sync-jwg24\" (UID: \"7b8d1710-d43e-4f4d-a16b-fb97e8bb7ffa\") " pod="openstack-kuttl-tests/cinder-db-sync-jwg24" Jan 20 16:57:34 crc kubenswrapper[4558]: I0120 16:57:34.910100 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7b8d1710-d43e-4f4d-a16b-fb97e8bb7ffa-etc-machine-id\") pod \"cinder-db-sync-jwg24\" (UID: \"7b8d1710-d43e-4f4d-a16b-fb97e8bb7ffa\") " pod="openstack-kuttl-tests/cinder-db-sync-jwg24" Jan 20 16:57:34 crc kubenswrapper[4558]: I0120 16:57:34.924841 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/7b8d1710-d43e-4f4d-a16b-fb97e8bb7ffa-db-sync-config-data\") pod \"cinder-db-sync-jwg24\" (UID: \"7b8d1710-d43e-4f4d-a16b-fb97e8bb7ffa\") " pod="openstack-kuttl-tests/cinder-db-sync-jwg24" Jan 20 16:57:34 crc kubenswrapper[4558]: I0120 16:57:34.925698 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7b8d1710-d43e-4f4d-a16b-fb97e8bb7ffa-combined-ca-bundle\") pod \"cinder-db-sync-jwg24\" (UID: \"7b8d1710-d43e-4f4d-a16b-fb97e8bb7ffa\") " pod="openstack-kuttl-tests/cinder-db-sync-jwg24" Jan 20 16:57:34 crc kubenswrapper[4558]: I0120 16:57:34.926528 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7b8d1710-d43e-4f4d-a16b-fb97e8bb7ffa-config-data\") pod \"cinder-db-sync-jwg24\" (UID: \"7b8d1710-d43e-4f4d-a16b-fb97e8bb7ffa\") " pod="openstack-kuttl-tests/cinder-db-sync-jwg24" Jan 20 16:57:34 crc kubenswrapper[4558]: I0120 16:57:34.928361 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7b8d1710-d43e-4f4d-a16b-fb97e8bb7ffa-scripts\") pod \"cinder-db-sync-jwg24\" (UID: \"7b8d1710-d43e-4f4d-a16b-fb97e8bb7ffa\") " pod="openstack-kuttl-tests/cinder-db-sync-jwg24" Jan 20 16:57:34 crc kubenswrapper[4558]: I0120 16:57:34.939474 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b79fb42a-3e70-4daa-b0ea-fe854def5825-combined-ca-bundle\") pod \"neutron-db-sync-ckhth\" (UID: \"b79fb42a-3e70-4daa-b0ea-fe854def5825\") " pod="openstack-kuttl-tests/neutron-db-sync-ckhth" Jan 20 16:57:34 crc kubenswrapper[4558]: I0120 16:57:34.939685 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/b79fb42a-3e70-4daa-b0ea-fe854def5825-config\") pod \"neutron-db-sync-ckhth\" (UID: \"b79fb42a-3e70-4daa-b0ea-fe854def5825\") " pod="openstack-kuttl-tests/neutron-db-sync-ckhth" Jan 20 16:57:34 crc kubenswrapper[4558]: I0120 16:57:34.943373 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zh7rj\" (UniqueName: \"kubernetes.io/projected/b79fb42a-3e70-4daa-b0ea-fe854def5825-kube-api-access-zh7rj\") pod \"neutron-db-sync-ckhth\" (UID: \"b79fb42a-3e70-4daa-b0ea-fe854def5825\") " pod="openstack-kuttl-tests/neutron-db-sync-ckhth" Jan 20 16:57:34 crc kubenswrapper[4558]: I0120 16:57:34.943520 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p67ps\" (UniqueName: \"kubernetes.io/projected/7b8d1710-d43e-4f4d-a16b-fb97e8bb7ffa-kube-api-access-p67ps\") pod \"cinder-db-sync-jwg24\" (UID: \"7b8d1710-d43e-4f4d-a16b-fb97e8bb7ffa\") " pod="openstack-kuttl-tests/cinder-db-sync-jwg24" Jan 20 16:57:35 crc kubenswrapper[4558]: I0120 16:57:35.010922 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ghzgm\" (UniqueName: \"kubernetes.io/projected/c1e3af55-1788-4d23-b3c5-6227e848ea15-kube-api-access-ghzgm\") pod \"ceilometer-0\" (UID: \"c1e3af55-1788-4d23-b3c5-6227e848ea15\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:57:35 crc kubenswrapper[4558]: I0120 16:57:35.011130 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b1e54a34-818b-4918-baeb-586ff225fe1f-scripts\") pod \"placement-db-sync-d6pfc\" (UID: \"b1e54a34-818b-4918-baeb-586ff225fe1f\") " pod="openstack-kuttl-tests/placement-db-sync-d6pfc" Jan 20 16:57:35 crc kubenswrapper[4558]: I0120 16:57:35.011182 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c1e3af55-1788-4d23-b3c5-6227e848ea15-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"c1e3af55-1788-4d23-b3c5-6227e848ea15\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:57:35 crc kubenswrapper[4558]: I0120 16:57:35.011208 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c1e3af55-1788-4d23-b3c5-6227e848ea15-run-httpd\") pod \"ceilometer-0\" (UID: \"c1e3af55-1788-4d23-b3c5-6227e848ea15\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:57:35 crc kubenswrapper[4558]: I0120 16:57:35.011224 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b1e54a34-818b-4918-baeb-586ff225fe1f-combined-ca-bundle\") pod \"placement-db-sync-d6pfc\" (UID: \"b1e54a34-818b-4918-baeb-586ff225fe1f\") " pod="openstack-kuttl-tests/placement-db-sync-d6pfc" Jan 20 16:57:35 crc kubenswrapper[4558]: I0120 16:57:35.011247 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b3c9bd31-20f5-45ea-ae8d-987a8a70e321-db-sync-config-data\") pod \"barbican-db-sync-mpmc8\" (UID: \"b3c9bd31-20f5-45ea-ae8d-987a8a70e321\") " pod="openstack-kuttl-tests/barbican-db-sync-mpmc8" Jan 20 16:57:35 crc kubenswrapper[4558]: I0120 16:57:35.011278 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5phkx\" (UniqueName: \"kubernetes.io/projected/b3c9bd31-20f5-45ea-ae8d-987a8a70e321-kube-api-access-5phkx\") pod \"barbican-db-sync-mpmc8\" (UID: \"b3c9bd31-20f5-45ea-ae8d-987a8a70e321\") " pod="openstack-kuttl-tests/barbican-db-sync-mpmc8" Jan 20 16:57:35 crc kubenswrapper[4558]: I0120 16:57:35.011306 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b3c9bd31-20f5-45ea-ae8d-987a8a70e321-combined-ca-bundle\") pod \"barbican-db-sync-mpmc8\" (UID: \"b3c9bd31-20f5-45ea-ae8d-987a8a70e321\") " pod="openstack-kuttl-tests/barbican-db-sync-mpmc8" Jan 20 16:57:35 crc kubenswrapper[4558]: I0120 16:57:35.011348 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b1e54a34-818b-4918-baeb-586ff225fe1f-config-data\") pod \"placement-db-sync-d6pfc\" (UID: \"b1e54a34-818b-4918-baeb-586ff225fe1f\") " pod="openstack-kuttl-tests/placement-db-sync-d6pfc" Jan 20 16:57:35 crc kubenswrapper[4558]: I0120 16:57:35.011386 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b1e54a34-818b-4918-baeb-586ff225fe1f-logs\") pod \"placement-db-sync-d6pfc\" (UID: \"b1e54a34-818b-4918-baeb-586ff225fe1f\") " pod="openstack-kuttl-tests/placement-db-sync-d6pfc" Jan 20 16:57:35 crc kubenswrapper[4558]: I0120 16:57:35.011403 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c1e3af55-1788-4d23-b3c5-6227e848ea15-scripts\") pod \"ceilometer-0\" (UID: \"c1e3af55-1788-4d23-b3c5-6227e848ea15\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:57:35 crc kubenswrapper[4558]: I0120 16:57:35.011417 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c1e3af55-1788-4d23-b3c5-6227e848ea15-log-httpd\") pod \"ceilometer-0\" (UID: \"c1e3af55-1788-4d23-b3c5-6227e848ea15\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:57:35 crc kubenswrapper[4558]: I0120 16:57:35.011443 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c1e3af55-1788-4d23-b3c5-6227e848ea15-config-data\") pod \"ceilometer-0\" (UID: \"c1e3af55-1788-4d23-b3c5-6227e848ea15\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:57:35 crc kubenswrapper[4558]: I0120 16:57:35.011458 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c1e3af55-1788-4d23-b3c5-6227e848ea15-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"c1e3af55-1788-4d23-b3c5-6227e848ea15\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:57:35 crc kubenswrapper[4558]: I0120 16:57:35.011472 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jh79j\" (UniqueName: \"kubernetes.io/projected/b1e54a34-818b-4918-baeb-586ff225fe1f-kube-api-access-jh79j\") pod \"placement-db-sync-d6pfc\" (UID: \"b1e54a34-818b-4918-baeb-586ff225fe1f\") " pod="openstack-kuttl-tests/placement-db-sync-d6pfc" Jan 20 16:57:35 crc kubenswrapper[4558]: I0120 16:57:35.012224 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c1e3af55-1788-4d23-b3c5-6227e848ea15-log-httpd\") pod \"ceilometer-0\" (UID: \"c1e3af55-1788-4d23-b3c5-6227e848ea15\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:57:35 crc kubenswrapper[4558]: I0120 16:57:35.013036 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b1e54a34-818b-4918-baeb-586ff225fe1f-logs\") pod \"placement-db-sync-d6pfc\" (UID: \"b1e54a34-818b-4918-baeb-586ff225fe1f\") " pod="openstack-kuttl-tests/placement-db-sync-d6pfc" Jan 20 16:57:35 crc kubenswrapper[4558]: I0120 16:57:35.013719 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c1e3af55-1788-4d23-b3c5-6227e848ea15-run-httpd\") pod \"ceilometer-0\" (UID: \"c1e3af55-1788-4d23-b3c5-6227e848ea15\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:57:35 crc kubenswrapper[4558]: I0120 16:57:35.015913 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c1e3af55-1788-4d23-b3c5-6227e848ea15-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"c1e3af55-1788-4d23-b3c5-6227e848ea15\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:57:35 crc kubenswrapper[4558]: I0120 16:57:35.018019 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c1e3af55-1788-4d23-b3c5-6227e848ea15-config-data\") pod \"ceilometer-0\" (UID: \"c1e3af55-1788-4d23-b3c5-6227e848ea15\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:57:35 crc kubenswrapper[4558]: I0120 16:57:35.018637 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b1e54a34-818b-4918-baeb-586ff225fe1f-combined-ca-bundle\") pod \"placement-db-sync-d6pfc\" (UID: \"b1e54a34-818b-4918-baeb-586ff225fe1f\") " pod="openstack-kuttl-tests/placement-db-sync-d6pfc" Jan 20 16:57:35 crc kubenswrapper[4558]: I0120 16:57:35.022300 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b1e54a34-818b-4918-baeb-586ff225fe1f-config-data\") pod \"placement-db-sync-d6pfc\" (UID: \"b1e54a34-818b-4918-baeb-586ff225fe1f\") " pod="openstack-kuttl-tests/placement-db-sync-d6pfc" Jan 20 16:57:35 crc kubenswrapper[4558]: I0120 16:57:35.022768 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c1e3af55-1788-4d23-b3c5-6227e848ea15-scripts\") pod \"ceilometer-0\" (UID: \"c1e3af55-1788-4d23-b3c5-6227e848ea15\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:57:35 crc kubenswrapper[4558]: I0120 16:57:35.023434 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b1e54a34-818b-4918-baeb-586ff225fe1f-scripts\") pod \"placement-db-sync-d6pfc\" (UID: \"b1e54a34-818b-4918-baeb-586ff225fe1f\") " pod="openstack-kuttl-tests/placement-db-sync-d6pfc" Jan 20 16:57:35 crc kubenswrapper[4558]: I0120 16:57:35.029726 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c1e3af55-1788-4d23-b3c5-6227e848ea15-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"c1e3af55-1788-4d23-b3c5-6227e848ea15\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:57:35 crc kubenswrapper[4558]: I0120 16:57:35.032414 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jh79j\" (UniqueName: \"kubernetes.io/projected/b1e54a34-818b-4918-baeb-586ff225fe1f-kube-api-access-jh79j\") pod \"placement-db-sync-d6pfc\" (UID: \"b1e54a34-818b-4918-baeb-586ff225fe1f\") " pod="openstack-kuttl-tests/placement-db-sync-d6pfc" Jan 20 16:57:35 crc kubenswrapper[4558]: I0120 16:57:35.039726 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ghzgm\" (UniqueName: \"kubernetes.io/projected/c1e3af55-1788-4d23-b3c5-6227e848ea15-kube-api-access-ghzgm\") pod \"ceilometer-0\" (UID: \"c1e3af55-1788-4d23-b3c5-6227e848ea15\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:57:35 crc kubenswrapper[4558]: I0120 16:57:35.113433 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b3c9bd31-20f5-45ea-ae8d-987a8a70e321-combined-ca-bundle\") pod \"barbican-db-sync-mpmc8\" (UID: \"b3c9bd31-20f5-45ea-ae8d-987a8a70e321\") " pod="openstack-kuttl-tests/barbican-db-sync-mpmc8" Jan 20 16:57:35 crc kubenswrapper[4558]: I0120 16:57:35.113649 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b3c9bd31-20f5-45ea-ae8d-987a8a70e321-db-sync-config-data\") pod \"barbican-db-sync-mpmc8\" (UID: \"b3c9bd31-20f5-45ea-ae8d-987a8a70e321\") " pod="openstack-kuttl-tests/barbican-db-sync-mpmc8" Jan 20 16:57:35 crc kubenswrapper[4558]: I0120 16:57:35.113693 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5phkx\" (UniqueName: \"kubernetes.io/projected/b3c9bd31-20f5-45ea-ae8d-987a8a70e321-kube-api-access-5phkx\") pod \"barbican-db-sync-mpmc8\" (UID: \"b3c9bd31-20f5-45ea-ae8d-987a8a70e321\") " pod="openstack-kuttl-tests/barbican-db-sync-mpmc8" Jan 20 16:57:35 crc kubenswrapper[4558]: I0120 16:57:35.115648 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-db-sync-ckhth" Jan 20 16:57:35 crc kubenswrapper[4558]: I0120 16:57:35.117818 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b3c9bd31-20f5-45ea-ae8d-987a8a70e321-db-sync-config-data\") pod \"barbican-db-sync-mpmc8\" (UID: \"b3c9bd31-20f5-45ea-ae8d-987a8a70e321\") " pod="openstack-kuttl-tests/barbican-db-sync-mpmc8" Jan 20 16:57:35 crc kubenswrapper[4558]: I0120 16:57:35.119928 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b3c9bd31-20f5-45ea-ae8d-987a8a70e321-combined-ca-bundle\") pod \"barbican-db-sync-mpmc8\" (UID: \"b3c9bd31-20f5-45ea-ae8d-987a8a70e321\") " pod="openstack-kuttl-tests/barbican-db-sync-mpmc8" Jan 20 16:57:35 crc kubenswrapper[4558]: I0120 16:57:35.131961 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5phkx\" (UniqueName: \"kubernetes.io/projected/b3c9bd31-20f5-45ea-ae8d-987a8a70e321-kube-api-access-5phkx\") pod \"barbican-db-sync-mpmc8\" (UID: \"b3c9bd31-20f5-45ea-ae8d-987a8a70e321\") " pod="openstack-kuttl-tests/barbican-db-sync-mpmc8" Jan 20 16:57:35 crc kubenswrapper[4558]: I0120 16:57:35.136488 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-db-sync-jwg24" Jan 20 16:57:35 crc kubenswrapper[4558]: I0120 16:57:35.144698 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:57:35 crc kubenswrapper[4558]: I0120 16:57:35.171650 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-db-sync-d6pfc" Jan 20 16:57:35 crc kubenswrapper[4558]: I0120 16:57:35.255520 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-db-sync-mpmc8" Jan 20 16:57:35 crc kubenswrapper[4558]: I0120 16:57:35.297716 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-bootstrap-fxc4n"] Jan 20 16:57:35 crc kubenswrapper[4558]: W0120 16:57:35.310358 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod484f0ab5_2b8f_4204_b5d1_e032c159dbf2.slice/crio-de3f98ab064ffb0dd88423b3c28a997bb70522f9d43f9deff95d670b0cd5d561 WatchSource:0}: Error finding container de3f98ab064ffb0dd88423b3c28a997bb70522f9d43f9deff95d670b0cd5d561: Status 404 returned error can't find the container with id de3f98ab064ffb0dd88423b3c28a997bb70522f9d43f9deff95d670b0cd5d561 Jan 20 16:57:35 crc kubenswrapper[4558]: I0120 16:57:35.379609 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-bootstrap-fxc4n" event={"ID":"484f0ab5-2b8f-4204-b5d1-e032c159dbf2","Type":"ContainerStarted","Data":"de3f98ab064ffb0dd88423b3c28a997bb70522f9d43f9deff95d670b0cd5d561"} Jan 20 16:57:35 crc kubenswrapper[4558]: I0120 16:57:35.573152 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-db-sync-ckhth"] Jan 20 16:57:35 crc kubenswrapper[4558]: W0120 16:57:35.581857 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb79fb42a_3e70_4daa_b0ea_fe854def5825.slice/crio-d11a6b2c8a240887d185dccbe507617c83d9ba4668c7089d0f885be80c9a0050 WatchSource:0}: Error finding container d11a6b2c8a240887d185dccbe507617c83d9ba4668c7089d0f885be80c9a0050: Status 404 returned error can't find the container with id d11a6b2c8a240887d185dccbe507617c83d9ba4668c7089d0f885be80c9a0050 Jan 20 16:57:35 crc kubenswrapper[4558]: I0120 16:57:35.613627 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 16:57:35 crc kubenswrapper[4558]: I0120 16:57:35.616567 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 16:57:35 crc kubenswrapper[4558]: I0120 16:57:35.619071 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-scripts" Jan 20 16:57:35 crc kubenswrapper[4558]: I0120 16:57:35.620346 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 16:57:35 crc kubenswrapper[4558]: I0120 16:57:35.625707 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-glance-default-public-svc" Jan 20 16:57:35 crc kubenswrapper[4558]: I0120 16:57:35.626062 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-glance-dockercfg-67jwr" Jan 20 16:57:35 crc kubenswrapper[4558]: I0120 16:57:35.626245 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-default-external-config-data" Jan 20 16:57:35 crc kubenswrapper[4558]: I0120 16:57:35.679436 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 16:57:35 crc kubenswrapper[4558]: I0120 16:57:35.680713 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 16:57:35 crc kubenswrapper[4558]: I0120 16:57:35.690529 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-glance-default-internal-svc" Jan 20 16:57:35 crc kubenswrapper[4558]: I0120 16:57:35.691151 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-default-internal-config-data" Jan 20 16:57:35 crc kubenswrapper[4558]: I0120 16:57:35.725895 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/79a5c7ed-336f-47df-a568-0773deb035ef-config-data\") pod \"glance-default-external-api-0\" (UID: \"79a5c7ed-336f-47df-a568-0773deb035ef\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 16:57:35 crc kubenswrapper[4558]: I0120 16:57:35.725930 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/79a5c7ed-336f-47df-a568-0773deb035ef-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"79a5c7ed-336f-47df-a568-0773deb035ef\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 16:57:35 crc kubenswrapper[4558]: I0120 16:57:35.725950 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/79a5c7ed-336f-47df-a568-0773deb035ef-scripts\") pod \"glance-default-external-api-0\" (UID: \"79a5c7ed-336f-47df-a568-0773deb035ef\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 16:57:35 crc kubenswrapper[4558]: I0120 16:57:35.726003 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/79a5c7ed-336f-47df-a568-0773deb035ef-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"79a5c7ed-336f-47df-a568-0773deb035ef\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 16:57:35 crc kubenswrapper[4558]: I0120 16:57:35.726035 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-external-api-0\" (UID: \"79a5c7ed-336f-47df-a568-0773deb035ef\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 16:57:35 crc kubenswrapper[4558]: I0120 16:57:35.726050 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/79a5c7ed-336f-47df-a568-0773deb035ef-logs\") pod \"glance-default-external-api-0\" (UID: \"79a5c7ed-336f-47df-a568-0773deb035ef\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 16:57:35 crc kubenswrapper[4558]: I0120 16:57:35.726076 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/79a5c7ed-336f-47df-a568-0773deb035ef-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"79a5c7ed-336f-47df-a568-0773deb035ef\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 16:57:35 crc kubenswrapper[4558]: I0120 16:57:35.726092 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zz6z5\" (UniqueName: \"kubernetes.io/projected/79a5c7ed-336f-47df-a568-0773deb035ef-kube-api-access-zz6z5\") pod \"glance-default-external-api-0\" (UID: \"79a5c7ed-336f-47df-a568-0773deb035ef\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 16:57:35 crc kubenswrapper[4558]: I0120 16:57:35.774089 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 16:57:35 crc kubenswrapper[4558]: I0120 16:57:35.774350 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 16:57:35 crc kubenswrapper[4558]: I0120 16:57:35.774364 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-db-sync-d6pfc"] Jan 20 16:57:35 crc kubenswrapper[4558]: W0120 16:57:35.776356 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc1e3af55_1788_4d23_b3c5_6227e848ea15.slice/crio-4ed7d820d77ed20afacf7d07db65e1e25610895dee06160c7c233aa856b7a72c WatchSource:0}: Error finding container 4ed7d820d77ed20afacf7d07db65e1e25610895dee06160c7c233aa856b7a72c: Status 404 returned error can't find the container with id 4ed7d820d77ed20afacf7d07db65e1e25610895dee06160c7c233aa856b7a72c Jan 20 16:57:35 crc kubenswrapper[4558]: I0120 16:57:35.824964 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-db-sync-jwg24"] Jan 20 16:57:35 crc kubenswrapper[4558]: I0120 16:57:35.826994 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/49756a66-f375-44c8-b2fb-f0a1da20ea32-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"49756a66-f375-44c8-b2fb-f0a1da20ea32\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 16:57:35 crc kubenswrapper[4558]: I0120 16:57:35.827041 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cw9n2\" (UniqueName: \"kubernetes.io/projected/49756a66-f375-44c8-b2fb-f0a1da20ea32-kube-api-access-cw9n2\") pod \"glance-default-internal-api-0\" (UID: \"49756a66-f375-44c8-b2fb-f0a1da20ea32\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 16:57:35 crc kubenswrapper[4558]: I0120 16:57:35.827063 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/49756a66-f375-44c8-b2fb-f0a1da20ea32-scripts\") pod \"glance-default-internal-api-0\" (UID: \"49756a66-f375-44c8-b2fb-f0a1da20ea32\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 16:57:35 crc kubenswrapper[4558]: I0120 16:57:35.827082 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"49756a66-f375-44c8-b2fb-f0a1da20ea32\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 16:57:35 crc kubenswrapper[4558]: I0120 16:57:35.827101 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/79a5c7ed-336f-47df-a568-0773deb035ef-config-data\") pod \"glance-default-external-api-0\" (UID: \"79a5c7ed-336f-47df-a568-0773deb035ef\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 16:57:35 crc kubenswrapper[4558]: I0120 16:57:35.827117 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/79a5c7ed-336f-47df-a568-0773deb035ef-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"79a5c7ed-336f-47df-a568-0773deb035ef\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 16:57:35 crc kubenswrapper[4558]: I0120 16:57:35.827132 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/79a5c7ed-336f-47df-a568-0773deb035ef-scripts\") pod \"glance-default-external-api-0\" (UID: \"79a5c7ed-336f-47df-a568-0773deb035ef\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 16:57:35 crc kubenswrapper[4558]: I0120 16:57:35.827146 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/79a5c7ed-336f-47df-a568-0773deb035ef-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"79a5c7ed-336f-47df-a568-0773deb035ef\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 16:57:35 crc kubenswrapper[4558]: I0120 16:57:35.827192 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/49756a66-f375-44c8-b2fb-f0a1da20ea32-logs\") pod \"glance-default-internal-api-0\" (UID: \"49756a66-f375-44c8-b2fb-f0a1da20ea32\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 16:57:35 crc kubenswrapper[4558]: I0120 16:57:35.827206 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/49756a66-f375-44c8-b2fb-f0a1da20ea32-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"49756a66-f375-44c8-b2fb-f0a1da20ea32\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 16:57:35 crc kubenswrapper[4558]: I0120 16:57:35.827221 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-external-api-0\" (UID: \"79a5c7ed-336f-47df-a568-0773deb035ef\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 16:57:35 crc kubenswrapper[4558]: I0120 16:57:35.827235 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/79a5c7ed-336f-47df-a568-0773deb035ef-logs\") pod \"glance-default-external-api-0\" (UID: \"79a5c7ed-336f-47df-a568-0773deb035ef\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 16:57:35 crc kubenswrapper[4558]: I0120 16:57:35.827258 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zz6z5\" (UniqueName: \"kubernetes.io/projected/79a5c7ed-336f-47df-a568-0773deb035ef-kube-api-access-zz6z5\") pod \"glance-default-external-api-0\" (UID: \"79a5c7ed-336f-47df-a568-0773deb035ef\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 16:57:35 crc kubenswrapper[4558]: I0120 16:57:35.827272 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/79a5c7ed-336f-47df-a568-0773deb035ef-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"79a5c7ed-336f-47df-a568-0773deb035ef\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 16:57:35 crc kubenswrapper[4558]: I0120 16:57:35.827292 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/49756a66-f375-44c8-b2fb-f0a1da20ea32-config-data\") pod \"glance-default-internal-api-0\" (UID: \"49756a66-f375-44c8-b2fb-f0a1da20ea32\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 16:57:35 crc kubenswrapper[4558]: I0120 16:57:35.827312 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/49756a66-f375-44c8-b2fb-f0a1da20ea32-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"49756a66-f375-44c8-b2fb-f0a1da20ea32\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 16:57:35 crc kubenswrapper[4558]: I0120 16:57:35.827620 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-external-api-0\" (UID: \"79a5c7ed-336f-47df-a568-0773deb035ef\") device mount path \"/mnt/openstack/pv01\"" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 16:57:35 crc kubenswrapper[4558]: I0120 16:57:35.828663 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/79a5c7ed-336f-47df-a568-0773deb035ef-logs\") pod \"glance-default-external-api-0\" (UID: \"79a5c7ed-336f-47df-a568-0773deb035ef\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 16:57:35 crc kubenswrapper[4558]: I0120 16:57:35.829336 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/79a5c7ed-336f-47df-a568-0773deb035ef-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"79a5c7ed-336f-47df-a568-0773deb035ef\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 16:57:35 crc kubenswrapper[4558]: I0120 16:57:35.833260 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/79a5c7ed-336f-47df-a568-0773deb035ef-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"79a5c7ed-336f-47df-a568-0773deb035ef\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 16:57:35 crc kubenswrapper[4558]: I0120 16:57:35.833298 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/79a5c7ed-336f-47df-a568-0773deb035ef-scripts\") pod \"glance-default-external-api-0\" (UID: \"79a5c7ed-336f-47df-a568-0773deb035ef\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 16:57:35 crc kubenswrapper[4558]: I0120 16:57:35.833426 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/79a5c7ed-336f-47df-a568-0773deb035ef-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"79a5c7ed-336f-47df-a568-0773deb035ef\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 16:57:35 crc kubenswrapper[4558]: I0120 16:57:35.834206 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/79a5c7ed-336f-47df-a568-0773deb035ef-config-data\") pod \"glance-default-external-api-0\" (UID: \"79a5c7ed-336f-47df-a568-0773deb035ef\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 16:57:35 crc kubenswrapper[4558]: I0120 16:57:35.858709 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zz6z5\" (UniqueName: \"kubernetes.io/projected/79a5c7ed-336f-47df-a568-0773deb035ef-kube-api-access-zz6z5\") pod \"glance-default-external-api-0\" (UID: \"79a5c7ed-336f-47df-a568-0773deb035ef\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 16:57:35 crc kubenswrapper[4558]: I0120 16:57:35.858843 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-external-api-0\" (UID: \"79a5c7ed-336f-47df-a568-0773deb035ef\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 16:57:35 crc kubenswrapper[4558]: I0120 16:57:35.866861 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-db-sync-mpmc8"] Jan 20 16:57:35 crc kubenswrapper[4558]: I0120 16:57:35.928232 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/49756a66-f375-44c8-b2fb-f0a1da20ea32-logs\") pod \"glance-default-internal-api-0\" (UID: \"49756a66-f375-44c8-b2fb-f0a1da20ea32\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 16:57:35 crc kubenswrapper[4558]: I0120 16:57:35.928374 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/49756a66-f375-44c8-b2fb-f0a1da20ea32-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"49756a66-f375-44c8-b2fb-f0a1da20ea32\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 16:57:35 crc kubenswrapper[4558]: I0120 16:57:35.928479 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/49756a66-f375-44c8-b2fb-f0a1da20ea32-config-data\") pod \"glance-default-internal-api-0\" (UID: \"49756a66-f375-44c8-b2fb-f0a1da20ea32\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 16:57:35 crc kubenswrapper[4558]: I0120 16:57:35.928545 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/49756a66-f375-44c8-b2fb-f0a1da20ea32-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"49756a66-f375-44c8-b2fb-f0a1da20ea32\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 16:57:35 crc kubenswrapper[4558]: I0120 16:57:35.928642 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/49756a66-f375-44c8-b2fb-f0a1da20ea32-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"49756a66-f375-44c8-b2fb-f0a1da20ea32\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 16:57:35 crc kubenswrapper[4558]: I0120 16:57:35.928681 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/49756a66-f375-44c8-b2fb-f0a1da20ea32-logs\") pod \"glance-default-internal-api-0\" (UID: \"49756a66-f375-44c8-b2fb-f0a1da20ea32\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 16:57:35 crc kubenswrapper[4558]: I0120 16:57:35.928776 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cw9n2\" (UniqueName: \"kubernetes.io/projected/49756a66-f375-44c8-b2fb-f0a1da20ea32-kube-api-access-cw9n2\") pod \"glance-default-internal-api-0\" (UID: \"49756a66-f375-44c8-b2fb-f0a1da20ea32\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 16:57:35 crc kubenswrapper[4558]: I0120 16:57:35.928845 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/49756a66-f375-44c8-b2fb-f0a1da20ea32-scripts\") pod \"glance-default-internal-api-0\" (UID: \"49756a66-f375-44c8-b2fb-f0a1da20ea32\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 16:57:35 crc kubenswrapper[4558]: I0120 16:57:35.928908 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"49756a66-f375-44c8-b2fb-f0a1da20ea32\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 16:57:35 crc kubenswrapper[4558]: I0120 16:57:35.929096 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"49756a66-f375-44c8-b2fb-f0a1da20ea32\") device mount path \"/mnt/openstack/pv07\"" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 16:57:35 crc kubenswrapper[4558]: I0120 16:57:35.929108 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/49756a66-f375-44c8-b2fb-f0a1da20ea32-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"49756a66-f375-44c8-b2fb-f0a1da20ea32\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 16:57:35 crc kubenswrapper[4558]: I0120 16:57:35.931907 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/49756a66-f375-44c8-b2fb-f0a1da20ea32-config-data\") pod \"glance-default-internal-api-0\" (UID: \"49756a66-f375-44c8-b2fb-f0a1da20ea32\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 16:57:35 crc kubenswrapper[4558]: I0120 16:57:35.932203 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/49756a66-f375-44c8-b2fb-f0a1da20ea32-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"49756a66-f375-44c8-b2fb-f0a1da20ea32\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 16:57:35 crc kubenswrapper[4558]: I0120 16:57:35.932838 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/49756a66-f375-44c8-b2fb-f0a1da20ea32-scripts\") pod \"glance-default-internal-api-0\" (UID: \"49756a66-f375-44c8-b2fb-f0a1da20ea32\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 16:57:35 crc kubenswrapper[4558]: I0120 16:57:35.933715 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/49756a66-f375-44c8-b2fb-f0a1da20ea32-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"49756a66-f375-44c8-b2fb-f0a1da20ea32\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 16:57:35 crc kubenswrapper[4558]: I0120 16:57:35.944410 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cw9n2\" (UniqueName: \"kubernetes.io/projected/49756a66-f375-44c8-b2fb-f0a1da20ea32-kube-api-access-cw9n2\") pod \"glance-default-internal-api-0\" (UID: \"49756a66-f375-44c8-b2fb-f0a1da20ea32\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 16:57:35 crc kubenswrapper[4558]: I0120 16:57:35.952519 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"49756a66-f375-44c8-b2fb-f0a1da20ea32\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 16:57:35 crc kubenswrapper[4558]: I0120 16:57:35.971478 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 16:57:36 crc kubenswrapper[4558]: I0120 16:57:36.127886 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 16:57:36 crc kubenswrapper[4558]: I0120 16:57:36.365606 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 16:57:36 crc kubenswrapper[4558]: I0120 16:57:36.397670 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-db-sync-d6pfc" event={"ID":"b1e54a34-818b-4918-baeb-586ff225fe1f","Type":"ContainerStarted","Data":"03c23d6d6a3cc40c7b906b0244668b67eea91d715e82ca654f9c30d5abd205cf"} Jan 20 16:57:36 crc kubenswrapper[4558]: I0120 16:57:36.401690 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"c1e3af55-1788-4d23-b3c5-6227e848ea15","Type":"ContainerStarted","Data":"4ed7d820d77ed20afacf7d07db65e1e25610895dee06160c7c233aa856b7a72c"} Jan 20 16:57:36 crc kubenswrapper[4558]: I0120 16:57:36.402551 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-db-sync-mpmc8" event={"ID":"b3c9bd31-20f5-45ea-ae8d-987a8a70e321","Type":"ContainerStarted","Data":"d4d1df28a2973894463776ce7b632dd351dbc7ee6f63ab78f5cc26a71eda223f"} Jan 20 16:57:36 crc kubenswrapper[4558]: I0120 16:57:36.404023 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-db-sync-ckhth" event={"ID":"b79fb42a-3e70-4daa-b0ea-fe854def5825","Type":"ContainerStarted","Data":"218f67d660ed8f9f78b141cd28c9a5dcb77d2f1c59769fc787f836f2d3a884a4"} Jan 20 16:57:36 crc kubenswrapper[4558]: I0120 16:57:36.404058 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-db-sync-ckhth" event={"ID":"b79fb42a-3e70-4daa-b0ea-fe854def5825","Type":"ContainerStarted","Data":"d11a6b2c8a240887d185dccbe507617c83d9ba4668c7089d0f885be80c9a0050"} Jan 20 16:57:36 crc kubenswrapper[4558]: I0120 16:57:36.405234 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-bootstrap-fxc4n" event={"ID":"484f0ab5-2b8f-4204-b5d1-e032c159dbf2","Type":"ContainerStarted","Data":"3c5d4636841c553689fe99af2f7d7d5a269bad5dc903333adc71ad557501864a"} Jan 20 16:57:36 crc kubenswrapper[4558]: I0120 16:57:36.406689 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-db-sync-jwg24" event={"ID":"7b8d1710-d43e-4f4d-a16b-fb97e8bb7ffa","Type":"ContainerStarted","Data":"4cb53da6e78ccba02b3e0b49fd32433b44da1ccb3582085cf458effb14b08711"} Jan 20 16:57:36 crc kubenswrapper[4558]: I0120 16:57:36.407535 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"79a5c7ed-336f-47df-a568-0773deb035ef","Type":"ContainerStarted","Data":"4b7e9a0783cdcf47d55236d9a1ed5e45c98198ad9ebc81ebdabbdd427eac15a8"} Jan 20 16:57:36 crc kubenswrapper[4558]: I0120 16:57:36.431335 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/keystone-bootstrap-fxc4n" podStartSLOduration=2.431320625 podStartE2EDuration="2.431320625s" podCreationTimestamp="2026-01-20 16:57:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 16:57:36.430263738 +0000 UTC m=+950.190601705" watchObservedRunningTime="2026-01-20 16:57:36.431320625 +0000 UTC m=+950.191658592" Jan 20 16:57:36 crc kubenswrapper[4558]: I0120 16:57:36.431577 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/neutron-db-sync-ckhth" podStartSLOduration=2.431572789 podStartE2EDuration="2.431572789s" podCreationTimestamp="2026-01-20 16:57:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 16:57:36.416107056 +0000 UTC m=+950.176445023" watchObservedRunningTime="2026-01-20 16:57:36.431572789 +0000 UTC m=+950.191910757" Jan 20 16:57:36 crc kubenswrapper[4558]: I0120 16:57:36.545816 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 16:57:36 crc kubenswrapper[4558]: W0120 16:57:36.554641 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod49756a66_f375_44c8_b2fb_f0a1da20ea32.slice/crio-e4d37616049eed91e13d87bbfd25939fa3298613a59a967a3d87cedf6b4064f7 WatchSource:0}: Error finding container e4d37616049eed91e13d87bbfd25939fa3298613a59a967a3d87cedf6b4064f7: Status 404 returned error can't find the container with id e4d37616049eed91e13d87bbfd25939fa3298613a59a967a3d87cedf6b4064f7 Jan 20 16:57:37 crc kubenswrapper[4558]: I0120 16:57:37.298775 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 16:57:37 crc kubenswrapper[4558]: I0120 16:57:37.326689 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 16:57:37 crc kubenswrapper[4558]: I0120 16:57:37.379139 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 16:57:37 crc kubenswrapper[4558]: I0120 16:57:37.434061 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"49756a66-f375-44c8-b2fb-f0a1da20ea32","Type":"ContainerStarted","Data":"8ee292b6755c15e25a4c14150cbc6425e95bd7d9b44a0ccac531a2e2ae971bfc"} Jan 20 16:57:37 crc kubenswrapper[4558]: I0120 16:57:37.434347 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"49756a66-f375-44c8-b2fb-f0a1da20ea32","Type":"ContainerStarted","Data":"e4d37616049eed91e13d87bbfd25939fa3298613a59a967a3d87cedf6b4064f7"} Jan 20 16:57:37 crc kubenswrapper[4558]: I0120 16:57:37.446782 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"79a5c7ed-336f-47df-a568-0773deb035ef","Type":"ContainerStarted","Data":"c634ab0a436e61a0780c49cb600dcebd60f457dc231c1cf9edeab591b9b87718"} Jan 20 16:57:38 crc kubenswrapper[4558]: I0120 16:57:38.457876 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"49756a66-f375-44c8-b2fb-f0a1da20ea32","Type":"ContainerStarted","Data":"3c5b22111fa434b2a9ca6ededf7d7ab5fdd095fbfed2795efb8abea90ffd4cc5"} Jan 20 16:57:38 crc kubenswrapper[4558]: I0120 16:57:38.458055 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-internal-api-0" podUID="49756a66-f375-44c8-b2fb-f0a1da20ea32" containerName="glance-httpd" containerID="cri-o://3c5b22111fa434b2a9ca6ededf7d7ab5fdd095fbfed2795efb8abea90ffd4cc5" gracePeriod=30 Jan 20 16:57:38 crc kubenswrapper[4558]: I0120 16:57:38.458004 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-internal-api-0" podUID="49756a66-f375-44c8-b2fb-f0a1da20ea32" containerName="glance-log" containerID="cri-o://8ee292b6755c15e25a4c14150cbc6425e95bd7d9b44a0ccac531a2e2ae971bfc" gracePeriod=30 Jan 20 16:57:38 crc kubenswrapper[4558]: I0120 16:57:38.461009 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"79a5c7ed-336f-47df-a568-0773deb035ef","Type":"ContainerStarted","Data":"7a0021d6089e4d387c3496128d6af121e686a922c41c05a1b7f954161c54339b"} Jan 20 16:57:38 crc kubenswrapper[4558]: I0120 16:57:38.461149 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-external-api-0" podUID="79a5c7ed-336f-47df-a568-0773deb035ef" containerName="glance-log" containerID="cri-o://c634ab0a436e61a0780c49cb600dcebd60f457dc231c1cf9edeab591b9b87718" gracePeriod=30 Jan 20 16:57:38 crc kubenswrapper[4558]: I0120 16:57:38.461266 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-external-api-0" podUID="79a5c7ed-336f-47df-a568-0773deb035ef" containerName="glance-httpd" containerID="cri-o://7a0021d6089e4d387c3496128d6af121e686a922c41c05a1b7f954161c54339b" gracePeriod=30 Jan 20 16:57:38 crc kubenswrapper[4558]: I0120 16:57:38.486086 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/glance-default-internal-api-0" podStartSLOduration=4.486072312 podStartE2EDuration="4.486072312s" podCreationTimestamp="2026-01-20 16:57:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 16:57:38.476552079 +0000 UTC m=+952.236890047" watchObservedRunningTime="2026-01-20 16:57:38.486072312 +0000 UTC m=+952.246410280" Jan 20 16:57:38 crc kubenswrapper[4558]: I0120 16:57:38.497990 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/glance-default-external-api-0" podStartSLOduration=4.497976328 podStartE2EDuration="4.497976328s" podCreationTimestamp="2026-01-20 16:57:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 16:57:38.494450838 +0000 UTC m=+952.254788805" watchObservedRunningTime="2026-01-20 16:57:38.497976328 +0000 UTC m=+952.258314295" Jan 20 16:57:39 crc kubenswrapper[4558]: I0120 16:57:39.471497 4558 generic.go:334] "Generic (PLEG): container finished" podID="49756a66-f375-44c8-b2fb-f0a1da20ea32" containerID="3c5b22111fa434b2a9ca6ededf7d7ab5fdd095fbfed2795efb8abea90ffd4cc5" exitCode=0 Jan 20 16:57:39 crc kubenswrapper[4558]: I0120 16:57:39.471707 4558 generic.go:334] "Generic (PLEG): container finished" podID="49756a66-f375-44c8-b2fb-f0a1da20ea32" containerID="8ee292b6755c15e25a4c14150cbc6425e95bd7d9b44a0ccac531a2e2ae971bfc" exitCode=143 Jan 20 16:57:39 crc kubenswrapper[4558]: I0120 16:57:39.471580 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"49756a66-f375-44c8-b2fb-f0a1da20ea32","Type":"ContainerDied","Data":"3c5b22111fa434b2a9ca6ededf7d7ab5fdd095fbfed2795efb8abea90ffd4cc5"} Jan 20 16:57:39 crc kubenswrapper[4558]: I0120 16:57:39.471771 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"49756a66-f375-44c8-b2fb-f0a1da20ea32","Type":"ContainerDied","Data":"8ee292b6755c15e25a4c14150cbc6425e95bd7d9b44a0ccac531a2e2ae971bfc"} Jan 20 16:57:39 crc kubenswrapper[4558]: I0120 16:57:39.474023 4558 generic.go:334] "Generic (PLEG): container finished" podID="484f0ab5-2b8f-4204-b5d1-e032c159dbf2" containerID="3c5d4636841c553689fe99af2f7d7d5a269bad5dc903333adc71ad557501864a" exitCode=0 Jan 20 16:57:39 crc kubenswrapper[4558]: I0120 16:57:39.474095 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-bootstrap-fxc4n" event={"ID":"484f0ab5-2b8f-4204-b5d1-e032c159dbf2","Type":"ContainerDied","Data":"3c5d4636841c553689fe99af2f7d7d5a269bad5dc903333adc71ad557501864a"} Jan 20 16:57:39 crc kubenswrapper[4558]: I0120 16:57:39.478541 4558 generic.go:334] "Generic (PLEG): container finished" podID="79a5c7ed-336f-47df-a568-0773deb035ef" containerID="7a0021d6089e4d387c3496128d6af121e686a922c41c05a1b7f954161c54339b" exitCode=0 Jan 20 16:57:39 crc kubenswrapper[4558]: I0120 16:57:39.478564 4558 generic.go:334] "Generic (PLEG): container finished" podID="79a5c7ed-336f-47df-a568-0773deb035ef" containerID="c634ab0a436e61a0780c49cb600dcebd60f457dc231c1cf9edeab591b9b87718" exitCode=143 Jan 20 16:57:39 crc kubenswrapper[4558]: I0120 16:57:39.478585 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"79a5c7ed-336f-47df-a568-0773deb035ef","Type":"ContainerDied","Data":"7a0021d6089e4d387c3496128d6af121e686a922c41c05a1b7f954161c54339b"} Jan 20 16:57:39 crc kubenswrapper[4558]: I0120 16:57:39.478607 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"79a5c7ed-336f-47df-a568-0773deb035ef","Type":"ContainerDied","Data":"c634ab0a436e61a0780c49cb600dcebd60f457dc231c1cf9edeab591b9b87718"} Jan 20 16:57:41 crc kubenswrapper[4558]: I0120 16:57:41.659275 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 16:57:41 crc kubenswrapper[4558]: I0120 16:57:41.664099 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-bootstrap-fxc4n" Jan 20 16:57:41 crc kubenswrapper[4558]: I0120 16:57:41.671306 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 16:57:41 crc kubenswrapper[4558]: I0120 16:57:41.725723 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/484f0ab5-2b8f-4204-b5d1-e032c159dbf2-combined-ca-bundle\") pod \"484f0ab5-2b8f-4204-b5d1-e032c159dbf2\" (UID: \"484f0ab5-2b8f-4204-b5d1-e032c159dbf2\") " Jan 20 16:57:41 crc kubenswrapper[4558]: I0120 16:57:41.726445 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/484f0ab5-2b8f-4204-b5d1-e032c159dbf2-credential-keys\") pod \"484f0ab5-2b8f-4204-b5d1-e032c159dbf2\" (UID: \"484f0ab5-2b8f-4204-b5d1-e032c159dbf2\") " Jan 20 16:57:41 crc kubenswrapper[4558]: I0120 16:57:41.726522 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/79a5c7ed-336f-47df-a568-0773deb035ef-scripts\") pod \"79a5c7ed-336f-47df-a568-0773deb035ef\" (UID: \"79a5c7ed-336f-47df-a568-0773deb035ef\") " Jan 20 16:57:41 crc kubenswrapper[4558]: I0120 16:57:41.726607 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lg4j5\" (UniqueName: \"kubernetes.io/projected/484f0ab5-2b8f-4204-b5d1-e032c159dbf2-kube-api-access-lg4j5\") pod \"484f0ab5-2b8f-4204-b5d1-e032c159dbf2\" (UID: \"484f0ab5-2b8f-4204-b5d1-e032c159dbf2\") " Jan 20 16:57:41 crc kubenswrapper[4558]: I0120 16:57:41.726653 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/79a5c7ed-336f-47df-a568-0773deb035ef-combined-ca-bundle\") pod \"79a5c7ed-336f-47df-a568-0773deb035ef\" (UID: \"79a5c7ed-336f-47df-a568-0773deb035ef\") " Jan 20 16:57:41 crc kubenswrapper[4558]: I0120 16:57:41.726669 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/79a5c7ed-336f-47df-a568-0773deb035ef-public-tls-certs\") pod \"79a5c7ed-336f-47df-a568-0773deb035ef\" (UID: \"79a5c7ed-336f-47df-a568-0773deb035ef\") " Jan 20 16:57:41 crc kubenswrapper[4558]: I0120 16:57:41.726725 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/484f0ab5-2b8f-4204-b5d1-e032c159dbf2-scripts\") pod \"484f0ab5-2b8f-4204-b5d1-e032c159dbf2\" (UID: \"484f0ab5-2b8f-4204-b5d1-e032c159dbf2\") " Jan 20 16:57:41 crc kubenswrapper[4558]: I0120 16:57:41.726743 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zz6z5\" (UniqueName: \"kubernetes.io/projected/79a5c7ed-336f-47df-a568-0773deb035ef-kube-api-access-zz6z5\") pod \"79a5c7ed-336f-47df-a568-0773deb035ef\" (UID: \"79a5c7ed-336f-47df-a568-0773deb035ef\") " Jan 20 16:57:41 crc kubenswrapper[4558]: I0120 16:57:41.726761 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"79a5c7ed-336f-47df-a568-0773deb035ef\" (UID: \"79a5c7ed-336f-47df-a568-0773deb035ef\") " Jan 20 16:57:41 crc kubenswrapper[4558]: I0120 16:57:41.726777 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/484f0ab5-2b8f-4204-b5d1-e032c159dbf2-config-data\") pod \"484f0ab5-2b8f-4204-b5d1-e032c159dbf2\" (UID: \"484f0ab5-2b8f-4204-b5d1-e032c159dbf2\") " Jan 20 16:57:41 crc kubenswrapper[4558]: I0120 16:57:41.726796 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/79a5c7ed-336f-47df-a568-0773deb035ef-logs\") pod \"79a5c7ed-336f-47df-a568-0773deb035ef\" (UID: \"79a5c7ed-336f-47df-a568-0773deb035ef\") " Jan 20 16:57:41 crc kubenswrapper[4558]: I0120 16:57:41.726827 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/79a5c7ed-336f-47df-a568-0773deb035ef-config-data\") pod \"79a5c7ed-336f-47df-a568-0773deb035ef\" (UID: \"79a5c7ed-336f-47df-a568-0773deb035ef\") " Jan 20 16:57:41 crc kubenswrapper[4558]: I0120 16:57:41.726842 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/79a5c7ed-336f-47df-a568-0773deb035ef-httpd-run\") pod \"79a5c7ed-336f-47df-a568-0773deb035ef\" (UID: \"79a5c7ed-336f-47df-a568-0773deb035ef\") " Jan 20 16:57:41 crc kubenswrapper[4558]: I0120 16:57:41.726856 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/484f0ab5-2b8f-4204-b5d1-e032c159dbf2-fernet-keys\") pod \"484f0ab5-2b8f-4204-b5d1-e032c159dbf2\" (UID: \"484f0ab5-2b8f-4204-b5d1-e032c159dbf2\") " Jan 20 16:57:41 crc kubenswrapper[4558]: I0120 16:57:41.730321 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/79a5c7ed-336f-47df-a568-0773deb035ef-logs" (OuterVolumeSpecName: "logs") pod "79a5c7ed-336f-47df-a568-0773deb035ef" (UID: "79a5c7ed-336f-47df-a568-0773deb035ef"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 16:57:41 crc kubenswrapper[4558]: I0120 16:57:41.731606 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/79a5c7ed-336f-47df-a568-0773deb035ef-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "79a5c7ed-336f-47df-a568-0773deb035ef" (UID: "79a5c7ed-336f-47df-a568-0773deb035ef"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 16:57:41 crc kubenswrapper[4558]: I0120 16:57:41.733897 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage01-crc" (OuterVolumeSpecName: "glance") pod "79a5c7ed-336f-47df-a568-0773deb035ef" (UID: "79a5c7ed-336f-47df-a568-0773deb035ef"). InnerVolumeSpecName "local-storage01-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 16:57:41 crc kubenswrapper[4558]: I0120 16:57:41.733978 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/484f0ab5-2b8f-4204-b5d1-e032c159dbf2-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "484f0ab5-2b8f-4204-b5d1-e032c159dbf2" (UID: "484f0ab5-2b8f-4204-b5d1-e032c159dbf2"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:57:41 crc kubenswrapper[4558]: I0120 16:57:41.735242 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/484f0ab5-2b8f-4204-b5d1-e032c159dbf2-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "484f0ab5-2b8f-4204-b5d1-e032c159dbf2" (UID: "484f0ab5-2b8f-4204-b5d1-e032c159dbf2"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:57:41 crc kubenswrapper[4558]: I0120 16:57:41.735598 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/79a5c7ed-336f-47df-a568-0773deb035ef-kube-api-access-zz6z5" (OuterVolumeSpecName: "kube-api-access-zz6z5") pod "79a5c7ed-336f-47df-a568-0773deb035ef" (UID: "79a5c7ed-336f-47df-a568-0773deb035ef"). InnerVolumeSpecName "kube-api-access-zz6z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:57:41 crc kubenswrapper[4558]: I0120 16:57:41.736053 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/79a5c7ed-336f-47df-a568-0773deb035ef-scripts" (OuterVolumeSpecName: "scripts") pod "79a5c7ed-336f-47df-a568-0773deb035ef" (UID: "79a5c7ed-336f-47df-a568-0773deb035ef"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:57:41 crc kubenswrapper[4558]: I0120 16:57:41.737684 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/484f0ab5-2b8f-4204-b5d1-e032c159dbf2-kube-api-access-lg4j5" (OuterVolumeSpecName: "kube-api-access-lg4j5") pod "484f0ab5-2b8f-4204-b5d1-e032c159dbf2" (UID: "484f0ab5-2b8f-4204-b5d1-e032c159dbf2"). InnerVolumeSpecName "kube-api-access-lg4j5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:57:41 crc kubenswrapper[4558]: I0120 16:57:41.745220 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/484f0ab5-2b8f-4204-b5d1-e032c159dbf2-scripts" (OuterVolumeSpecName: "scripts") pod "484f0ab5-2b8f-4204-b5d1-e032c159dbf2" (UID: "484f0ab5-2b8f-4204-b5d1-e032c159dbf2"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:57:41 crc kubenswrapper[4558]: I0120 16:57:41.750928 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/484f0ab5-2b8f-4204-b5d1-e032c159dbf2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "484f0ab5-2b8f-4204-b5d1-e032c159dbf2" (UID: "484f0ab5-2b8f-4204-b5d1-e032c159dbf2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:57:41 crc kubenswrapper[4558]: I0120 16:57:41.757030 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/484f0ab5-2b8f-4204-b5d1-e032c159dbf2-config-data" (OuterVolumeSpecName: "config-data") pod "484f0ab5-2b8f-4204-b5d1-e032c159dbf2" (UID: "484f0ab5-2b8f-4204-b5d1-e032c159dbf2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:57:41 crc kubenswrapper[4558]: I0120 16:57:41.762008 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/79a5c7ed-336f-47df-a568-0773deb035ef-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "79a5c7ed-336f-47df-a568-0773deb035ef" (UID: "79a5c7ed-336f-47df-a568-0773deb035ef"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:57:41 crc kubenswrapper[4558]: I0120 16:57:41.792409 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/79a5c7ed-336f-47df-a568-0773deb035ef-config-data" (OuterVolumeSpecName: "config-data") pod "79a5c7ed-336f-47df-a568-0773deb035ef" (UID: "79a5c7ed-336f-47df-a568-0773deb035ef"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:57:41 crc kubenswrapper[4558]: I0120 16:57:41.801456 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/79a5c7ed-336f-47df-a568-0773deb035ef-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "79a5c7ed-336f-47df-a568-0773deb035ef" (UID: "79a5c7ed-336f-47df-a568-0773deb035ef"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:57:41 crc kubenswrapper[4558]: I0120 16:57:41.827864 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"49756a66-f375-44c8-b2fb-f0a1da20ea32\" (UID: \"49756a66-f375-44c8-b2fb-f0a1da20ea32\") " Jan 20 16:57:41 crc kubenswrapper[4558]: I0120 16:57:41.827988 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/49756a66-f375-44c8-b2fb-f0a1da20ea32-internal-tls-certs\") pod \"49756a66-f375-44c8-b2fb-f0a1da20ea32\" (UID: \"49756a66-f375-44c8-b2fb-f0a1da20ea32\") " Jan 20 16:57:41 crc kubenswrapper[4558]: I0120 16:57:41.828463 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/49756a66-f375-44c8-b2fb-f0a1da20ea32-logs\") pod \"49756a66-f375-44c8-b2fb-f0a1da20ea32\" (UID: \"49756a66-f375-44c8-b2fb-f0a1da20ea32\") " Jan 20 16:57:41 crc kubenswrapper[4558]: I0120 16:57:41.828955 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/49756a66-f375-44c8-b2fb-f0a1da20ea32-logs" (OuterVolumeSpecName: "logs") pod "49756a66-f375-44c8-b2fb-f0a1da20ea32" (UID: "49756a66-f375-44c8-b2fb-f0a1da20ea32"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 16:57:41 crc kubenswrapper[4558]: I0120 16:57:41.829079 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/49756a66-f375-44c8-b2fb-f0a1da20ea32-combined-ca-bundle\") pod \"49756a66-f375-44c8-b2fb-f0a1da20ea32\" (UID: \"49756a66-f375-44c8-b2fb-f0a1da20ea32\") " Jan 20 16:57:41 crc kubenswrapper[4558]: I0120 16:57:41.829629 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/49756a66-f375-44c8-b2fb-f0a1da20ea32-httpd-run\") pod \"49756a66-f375-44c8-b2fb-f0a1da20ea32\" (UID: \"49756a66-f375-44c8-b2fb-f0a1da20ea32\") " Jan 20 16:57:41 crc kubenswrapper[4558]: I0120 16:57:41.829653 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/49756a66-f375-44c8-b2fb-f0a1da20ea32-config-data\") pod \"49756a66-f375-44c8-b2fb-f0a1da20ea32\" (UID: \"49756a66-f375-44c8-b2fb-f0a1da20ea32\") " Jan 20 16:57:41 crc kubenswrapper[4558]: I0120 16:57:41.829678 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cw9n2\" (UniqueName: \"kubernetes.io/projected/49756a66-f375-44c8-b2fb-f0a1da20ea32-kube-api-access-cw9n2\") pod \"49756a66-f375-44c8-b2fb-f0a1da20ea32\" (UID: \"49756a66-f375-44c8-b2fb-f0a1da20ea32\") " Jan 20 16:57:41 crc kubenswrapper[4558]: I0120 16:57:41.829691 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/49756a66-f375-44c8-b2fb-f0a1da20ea32-scripts\") pod \"49756a66-f375-44c8-b2fb-f0a1da20ea32\" (UID: \"49756a66-f375-44c8-b2fb-f0a1da20ea32\") " Jan 20 16:57:41 crc kubenswrapper[4558]: I0120 16:57:41.830282 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/49756a66-f375-44c8-b2fb-f0a1da20ea32-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "49756a66-f375-44c8-b2fb-f0a1da20ea32" (UID: "49756a66-f375-44c8-b2fb-f0a1da20ea32"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 16:57:41 crc kubenswrapper[4558]: I0120 16:57:41.831103 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lg4j5\" (UniqueName: \"kubernetes.io/projected/484f0ab5-2b8f-4204-b5d1-e032c159dbf2-kube-api-access-lg4j5\") on node \"crc\" DevicePath \"\"" Jan 20 16:57:41 crc kubenswrapper[4558]: I0120 16:57:41.831118 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/79a5c7ed-336f-47df-a568-0773deb035ef-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 16:57:41 crc kubenswrapper[4558]: I0120 16:57:41.831126 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/79a5c7ed-336f-47df-a568-0773deb035ef-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 16:57:41 crc kubenswrapper[4558]: I0120 16:57:41.831135 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/49756a66-f375-44c8-b2fb-f0a1da20ea32-logs\") on node \"crc\" DevicePath \"\"" Jan 20 16:57:41 crc kubenswrapper[4558]: I0120 16:57:41.831142 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/484f0ab5-2b8f-4204-b5d1-e032c159dbf2-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 16:57:41 crc kubenswrapper[4558]: I0120 16:57:41.831150 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zz6z5\" (UniqueName: \"kubernetes.io/projected/79a5c7ed-336f-47df-a568-0773deb035ef-kube-api-access-zz6z5\") on node \"crc\" DevicePath \"\"" Jan 20 16:57:41 crc kubenswrapper[4558]: I0120 16:57:41.831202 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" " Jan 20 16:57:41 crc kubenswrapper[4558]: I0120 16:57:41.831211 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/484f0ab5-2b8f-4204-b5d1-e032c159dbf2-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 16:57:41 crc kubenswrapper[4558]: I0120 16:57:41.831219 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/79a5c7ed-336f-47df-a568-0773deb035ef-logs\") on node \"crc\" DevicePath \"\"" Jan 20 16:57:41 crc kubenswrapper[4558]: I0120 16:57:41.831227 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/79a5c7ed-336f-47df-a568-0773deb035ef-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 16:57:41 crc kubenswrapper[4558]: I0120 16:57:41.831233 4558 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/79a5c7ed-336f-47df-a568-0773deb035ef-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 20 16:57:41 crc kubenswrapper[4558]: I0120 16:57:41.831240 4558 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/484f0ab5-2b8f-4204-b5d1-e032c159dbf2-fernet-keys\") on node \"crc\" DevicePath \"\"" Jan 20 16:57:41 crc kubenswrapper[4558]: I0120 16:57:41.831249 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/484f0ab5-2b8f-4204-b5d1-e032c159dbf2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 16:57:41 crc kubenswrapper[4558]: I0120 16:57:41.831256 4558 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/49756a66-f375-44c8-b2fb-f0a1da20ea32-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 20 16:57:41 crc kubenswrapper[4558]: I0120 16:57:41.831263 4558 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/484f0ab5-2b8f-4204-b5d1-e032c159dbf2-credential-keys\") on node \"crc\" DevicePath \"\"" Jan 20 16:57:41 crc kubenswrapper[4558]: I0120 16:57:41.831270 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/79a5c7ed-336f-47df-a568-0773deb035ef-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 16:57:41 crc kubenswrapper[4558]: I0120 16:57:41.832843 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49756a66-f375-44c8-b2fb-f0a1da20ea32-kube-api-access-cw9n2" (OuterVolumeSpecName: "kube-api-access-cw9n2") pod "49756a66-f375-44c8-b2fb-f0a1da20ea32" (UID: "49756a66-f375-44c8-b2fb-f0a1da20ea32"). InnerVolumeSpecName "kube-api-access-cw9n2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:57:41 crc kubenswrapper[4558]: I0120 16:57:41.834859 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49756a66-f375-44c8-b2fb-f0a1da20ea32-scripts" (OuterVolumeSpecName: "scripts") pod "49756a66-f375-44c8-b2fb-f0a1da20ea32" (UID: "49756a66-f375-44c8-b2fb-f0a1da20ea32"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:57:41 crc kubenswrapper[4558]: I0120 16:57:41.838440 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage07-crc" (OuterVolumeSpecName: "glance") pod "49756a66-f375-44c8-b2fb-f0a1da20ea32" (UID: "49756a66-f375-44c8-b2fb-f0a1da20ea32"). InnerVolumeSpecName "local-storage07-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 16:57:41 crc kubenswrapper[4558]: I0120 16:57:41.850564 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage01-crc" (UniqueName: "kubernetes.io/local-volume/local-storage01-crc") on node "crc" Jan 20 16:57:41 crc kubenswrapper[4558]: I0120 16:57:41.851324 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49756a66-f375-44c8-b2fb-f0a1da20ea32-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "49756a66-f375-44c8-b2fb-f0a1da20ea32" (UID: "49756a66-f375-44c8-b2fb-f0a1da20ea32"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:57:41 crc kubenswrapper[4558]: I0120 16:57:41.866821 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49756a66-f375-44c8-b2fb-f0a1da20ea32-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "49756a66-f375-44c8-b2fb-f0a1da20ea32" (UID: "49756a66-f375-44c8-b2fb-f0a1da20ea32"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:57:41 crc kubenswrapper[4558]: I0120 16:57:41.868277 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49756a66-f375-44c8-b2fb-f0a1da20ea32-config-data" (OuterVolumeSpecName: "config-data") pod "49756a66-f375-44c8-b2fb-f0a1da20ea32" (UID: "49756a66-f375-44c8-b2fb-f0a1da20ea32"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:57:41 crc kubenswrapper[4558]: I0120 16:57:41.932956 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/49756a66-f375-44c8-b2fb-f0a1da20ea32-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 16:57:41 crc kubenswrapper[4558]: I0120 16:57:41.932983 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" DevicePath \"\"" Jan 20 16:57:41 crc kubenswrapper[4558]: I0120 16:57:41.932993 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/49756a66-f375-44c8-b2fb-f0a1da20ea32-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 16:57:41 crc kubenswrapper[4558]: I0120 16:57:41.933001 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cw9n2\" (UniqueName: \"kubernetes.io/projected/49756a66-f375-44c8-b2fb-f0a1da20ea32-kube-api-access-cw9n2\") on node \"crc\" DevicePath \"\"" Jan 20 16:57:41 crc kubenswrapper[4558]: I0120 16:57:41.933010 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/49756a66-f375-44c8-b2fb-f0a1da20ea32-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 16:57:41 crc kubenswrapper[4558]: I0120 16:57:41.933035 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" " Jan 20 16:57:41 crc kubenswrapper[4558]: I0120 16:57:41.933044 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/49756a66-f375-44c8-b2fb-f0a1da20ea32-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 16:57:41 crc kubenswrapper[4558]: I0120 16:57:41.945710 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage07-crc" (UniqueName: "kubernetes.io/local-volume/local-storage07-crc") on node "crc" Jan 20 16:57:42 crc kubenswrapper[4558]: I0120 16:57:42.035225 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" DevicePath \"\"" Jan 20 16:57:42 crc kubenswrapper[4558]: I0120 16:57:42.499492 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"49756a66-f375-44c8-b2fb-f0a1da20ea32","Type":"ContainerDied","Data":"e4d37616049eed91e13d87bbfd25939fa3298613a59a967a3d87cedf6b4064f7"} Jan 20 16:57:42 crc kubenswrapper[4558]: I0120 16:57:42.499542 4558 scope.go:117] "RemoveContainer" containerID="3c5b22111fa434b2a9ca6ededf7d7ab5fdd095fbfed2795efb8abea90ffd4cc5" Jan 20 16:57:42 crc kubenswrapper[4558]: I0120 16:57:42.499630 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 16:57:42 crc kubenswrapper[4558]: I0120 16:57:42.503149 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-bootstrap-fxc4n" event={"ID":"484f0ab5-2b8f-4204-b5d1-e032c159dbf2","Type":"ContainerDied","Data":"de3f98ab064ffb0dd88423b3c28a997bb70522f9d43f9deff95d670b0cd5d561"} Jan 20 16:57:42 crc kubenswrapper[4558]: I0120 16:57:42.503215 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="de3f98ab064ffb0dd88423b3c28a997bb70522f9d43f9deff95d670b0cd5d561" Jan 20 16:57:42 crc kubenswrapper[4558]: I0120 16:57:42.503289 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-bootstrap-fxc4n" Jan 20 16:57:42 crc kubenswrapper[4558]: I0120 16:57:42.508533 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"79a5c7ed-336f-47df-a568-0773deb035ef","Type":"ContainerDied","Data":"4b7e9a0783cdcf47d55236d9a1ed5e45c98198ad9ebc81ebdabbdd427eac15a8"} Jan 20 16:57:42 crc kubenswrapper[4558]: I0120 16:57:42.508580 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 16:57:42 crc kubenswrapper[4558]: I0120 16:57:42.531461 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 16:57:42 crc kubenswrapper[4558]: I0120 16:57:42.545987 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 16:57:42 crc kubenswrapper[4558]: I0120 16:57:42.556496 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 16:57:42 crc kubenswrapper[4558]: I0120 16:57:42.599287 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49756a66-f375-44c8-b2fb-f0a1da20ea32" path="/var/lib/kubelet/pods/49756a66-f375-44c8-b2fb-f0a1da20ea32/volumes" Jan 20 16:57:42 crc kubenswrapper[4558]: I0120 16:57:42.599881 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 16:57:42 crc kubenswrapper[4558]: I0120 16:57:42.599911 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 16:57:42 crc kubenswrapper[4558]: E0120 16:57:42.600136 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="484f0ab5-2b8f-4204-b5d1-e032c159dbf2" containerName="keystone-bootstrap" Jan 20 16:57:42 crc kubenswrapper[4558]: I0120 16:57:42.600151 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="484f0ab5-2b8f-4204-b5d1-e032c159dbf2" containerName="keystone-bootstrap" Jan 20 16:57:42 crc kubenswrapper[4558]: E0120 16:57:42.604726 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="49756a66-f375-44c8-b2fb-f0a1da20ea32" containerName="glance-log" Jan 20 16:57:42 crc kubenswrapper[4558]: I0120 16:57:42.604736 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="49756a66-f375-44c8-b2fb-f0a1da20ea32" containerName="glance-log" Jan 20 16:57:42 crc kubenswrapper[4558]: E0120 16:57:42.604750 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="79a5c7ed-336f-47df-a568-0773deb035ef" containerName="glance-log" Jan 20 16:57:42 crc kubenswrapper[4558]: I0120 16:57:42.604757 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="79a5c7ed-336f-47df-a568-0773deb035ef" containerName="glance-log" Jan 20 16:57:42 crc kubenswrapper[4558]: E0120 16:57:42.604781 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="79a5c7ed-336f-47df-a568-0773deb035ef" containerName="glance-httpd" Jan 20 16:57:42 crc kubenswrapper[4558]: I0120 16:57:42.604789 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="79a5c7ed-336f-47df-a568-0773deb035ef" containerName="glance-httpd" Jan 20 16:57:42 crc kubenswrapper[4558]: E0120 16:57:42.604798 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="49756a66-f375-44c8-b2fb-f0a1da20ea32" containerName="glance-httpd" Jan 20 16:57:42 crc kubenswrapper[4558]: I0120 16:57:42.604836 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="49756a66-f375-44c8-b2fb-f0a1da20ea32" containerName="glance-httpd" Jan 20 16:57:42 crc kubenswrapper[4558]: I0120 16:57:42.604975 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="49756a66-f375-44c8-b2fb-f0a1da20ea32" containerName="glance-httpd" Jan 20 16:57:42 crc kubenswrapper[4558]: I0120 16:57:42.604991 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="79a5c7ed-336f-47df-a568-0773deb035ef" containerName="glance-httpd" Jan 20 16:57:42 crc kubenswrapper[4558]: I0120 16:57:42.605001 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="484f0ab5-2b8f-4204-b5d1-e032c159dbf2" containerName="keystone-bootstrap" Jan 20 16:57:42 crc kubenswrapper[4558]: I0120 16:57:42.605009 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="79a5c7ed-336f-47df-a568-0773deb035ef" containerName="glance-log" Jan 20 16:57:42 crc kubenswrapper[4558]: I0120 16:57:42.605018 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="49756a66-f375-44c8-b2fb-f0a1da20ea32" containerName="glance-log" Jan 20 16:57:42 crc kubenswrapper[4558]: I0120 16:57:42.605931 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 16:57:42 crc kubenswrapper[4558]: I0120 16:57:42.605955 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 16:57:42 crc kubenswrapper[4558]: I0120 16:57:42.606090 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 16:57:42 crc kubenswrapper[4558]: I0120 16:57:42.607707 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 16:57:42 crc kubenswrapper[4558]: I0120 16:57:42.607767 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 16:57:42 crc kubenswrapper[4558]: I0120 16:57:42.609128 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-scripts" Jan 20 16:57:42 crc kubenswrapper[4558]: I0120 16:57:42.609390 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-glance-default-internal-svc" Jan 20 16:57:42 crc kubenswrapper[4558]: I0120 16:57:42.609578 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-glance-dockercfg-67jwr" Jan 20 16:57:42 crc kubenswrapper[4558]: I0120 16:57:42.609831 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-default-internal-config-data" Jan 20 16:57:42 crc kubenswrapper[4558]: I0120 16:57:42.610017 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-glance-default-public-svc" Jan 20 16:57:42 crc kubenswrapper[4558]: I0120 16:57:42.610253 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-default-external-config-data" Jan 20 16:57:42 crc kubenswrapper[4558]: I0120 16:57:42.729383 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-bootstrap-fxc4n"] Jan 20 16:57:42 crc kubenswrapper[4558]: I0120 16:57:42.734442 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/keystone-bootstrap-fxc4n"] Jan 20 16:57:42 crc kubenswrapper[4558]: I0120 16:57:42.745597 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0cdccf60-59fa-48ac-997d-0b60175ce57e-config-data\") pod \"glance-default-external-api-0\" (UID: \"0cdccf60-59fa-48ac-997d-0b60175ce57e\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 16:57:42 crc kubenswrapper[4558]: I0120 16:57:42.745703 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0cdccf60-59fa-48ac-997d-0b60175ce57e-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"0cdccf60-59fa-48ac-997d-0b60175ce57e\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 16:57:42 crc kubenswrapper[4558]: I0120 16:57:42.745797 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/90838b03-cde2-4954-a70b-b75b79a2038e-logs\") pod \"glance-default-internal-api-0\" (UID: \"90838b03-cde2-4954-a70b-b75b79a2038e\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 16:57:42 crc kubenswrapper[4558]: I0120 16:57:42.745954 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0cdccf60-59fa-48ac-997d-0b60175ce57e-logs\") pod \"glance-default-external-api-0\" (UID: \"0cdccf60-59fa-48ac-997d-0b60175ce57e\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 16:57:42 crc kubenswrapper[4558]: I0120 16:57:42.745985 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/90838b03-cde2-4954-a70b-b75b79a2038e-scripts\") pod \"glance-default-internal-api-0\" (UID: \"90838b03-cde2-4954-a70b-b75b79a2038e\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 16:57:42 crc kubenswrapper[4558]: I0120 16:57:42.746015 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/90838b03-cde2-4954-a70b-b75b79a2038e-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"90838b03-cde2-4954-a70b-b75b79a2038e\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 16:57:42 crc kubenswrapper[4558]: I0120 16:57:42.746030 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qhfsv\" (UniqueName: \"kubernetes.io/projected/90838b03-cde2-4954-a70b-b75b79a2038e-kube-api-access-qhfsv\") pod \"glance-default-internal-api-0\" (UID: \"90838b03-cde2-4954-a70b-b75b79a2038e\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 16:57:42 crc kubenswrapper[4558]: I0120 16:57:42.746093 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0cdccf60-59fa-48ac-997d-0b60175ce57e-scripts\") pod \"glance-default-external-api-0\" (UID: \"0cdccf60-59fa-48ac-997d-0b60175ce57e\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 16:57:42 crc kubenswrapper[4558]: I0120 16:57:42.746142 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/90838b03-cde2-4954-a70b-b75b79a2038e-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"90838b03-cde2-4954-a70b-b75b79a2038e\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 16:57:42 crc kubenswrapper[4558]: I0120 16:57:42.746236 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/0cdccf60-59fa-48ac-997d-0b60175ce57e-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"0cdccf60-59fa-48ac-997d-0b60175ce57e\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 16:57:42 crc kubenswrapper[4558]: I0120 16:57:42.746285 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"90838b03-cde2-4954-a70b-b75b79a2038e\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 16:57:42 crc kubenswrapper[4558]: I0120 16:57:42.746337 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0cdccf60-59fa-48ac-997d-0b60175ce57e-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"0cdccf60-59fa-48ac-997d-0b60175ce57e\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 16:57:42 crc kubenswrapper[4558]: I0120 16:57:42.746398 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-external-api-0\" (UID: \"0cdccf60-59fa-48ac-997d-0b60175ce57e\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 16:57:42 crc kubenswrapper[4558]: I0120 16:57:42.746561 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/90838b03-cde2-4954-a70b-b75b79a2038e-config-data\") pod \"glance-default-internal-api-0\" (UID: \"90838b03-cde2-4954-a70b-b75b79a2038e\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 16:57:42 crc kubenswrapper[4558]: I0120 16:57:42.746610 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-65flp\" (UniqueName: \"kubernetes.io/projected/0cdccf60-59fa-48ac-997d-0b60175ce57e-kube-api-access-65flp\") pod \"glance-default-external-api-0\" (UID: \"0cdccf60-59fa-48ac-997d-0b60175ce57e\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 16:57:42 crc kubenswrapper[4558]: I0120 16:57:42.746647 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/90838b03-cde2-4954-a70b-b75b79a2038e-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"90838b03-cde2-4954-a70b-b75b79a2038e\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 16:57:42 crc kubenswrapper[4558]: I0120 16:57:42.833433 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/keystone-bootstrap-vkskq"] Jan 20 16:57:42 crc kubenswrapper[4558]: I0120 16:57:42.834553 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-bootstrap-vkskq" Jan 20 16:57:42 crc kubenswrapper[4558]: I0120 16:57:42.836914 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone" Jan 20 16:57:42 crc kubenswrapper[4558]: I0120 16:57:42.837091 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-scripts" Jan 20 16:57:42 crc kubenswrapper[4558]: I0120 16:57:42.837241 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-config-data" Jan 20 16:57:42 crc kubenswrapper[4558]: I0120 16:57:42.837344 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-keystone-dockercfg-wlvl5" Jan 20 16:57:42 crc kubenswrapper[4558]: I0120 16:57:42.837498 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"osp-secret" Jan 20 16:57:42 crc kubenswrapper[4558]: I0120 16:57:42.847584 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0cdccf60-59fa-48ac-997d-0b60175ce57e-logs\") pod \"glance-default-external-api-0\" (UID: \"0cdccf60-59fa-48ac-997d-0b60175ce57e\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 16:57:42 crc kubenswrapper[4558]: I0120 16:57:42.847615 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/90838b03-cde2-4954-a70b-b75b79a2038e-scripts\") pod \"glance-default-internal-api-0\" (UID: \"90838b03-cde2-4954-a70b-b75b79a2038e\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 16:57:42 crc kubenswrapper[4558]: I0120 16:57:42.847638 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qhfsv\" (UniqueName: \"kubernetes.io/projected/90838b03-cde2-4954-a70b-b75b79a2038e-kube-api-access-qhfsv\") pod \"glance-default-internal-api-0\" (UID: \"90838b03-cde2-4954-a70b-b75b79a2038e\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 16:57:42 crc kubenswrapper[4558]: I0120 16:57:42.847654 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/90838b03-cde2-4954-a70b-b75b79a2038e-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"90838b03-cde2-4954-a70b-b75b79a2038e\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 16:57:42 crc kubenswrapper[4558]: I0120 16:57:42.847667 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0cdccf60-59fa-48ac-997d-0b60175ce57e-scripts\") pod \"glance-default-external-api-0\" (UID: \"0cdccf60-59fa-48ac-997d-0b60175ce57e\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 16:57:42 crc kubenswrapper[4558]: I0120 16:57:42.847687 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/90838b03-cde2-4954-a70b-b75b79a2038e-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"90838b03-cde2-4954-a70b-b75b79a2038e\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 16:57:42 crc kubenswrapper[4558]: I0120 16:57:42.847702 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/0cdccf60-59fa-48ac-997d-0b60175ce57e-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"0cdccf60-59fa-48ac-997d-0b60175ce57e\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 16:57:42 crc kubenswrapper[4558]: I0120 16:57:42.847722 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"90838b03-cde2-4954-a70b-b75b79a2038e\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 16:57:42 crc kubenswrapper[4558]: I0120 16:57:42.847741 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0cdccf60-59fa-48ac-997d-0b60175ce57e-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"0cdccf60-59fa-48ac-997d-0b60175ce57e\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 16:57:42 crc kubenswrapper[4558]: I0120 16:57:42.847770 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-external-api-0\" (UID: \"0cdccf60-59fa-48ac-997d-0b60175ce57e\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 16:57:42 crc kubenswrapper[4558]: I0120 16:57:42.847787 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/90838b03-cde2-4954-a70b-b75b79a2038e-config-data\") pod \"glance-default-internal-api-0\" (UID: \"90838b03-cde2-4954-a70b-b75b79a2038e\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 16:57:42 crc kubenswrapper[4558]: I0120 16:57:42.847801 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-65flp\" (UniqueName: \"kubernetes.io/projected/0cdccf60-59fa-48ac-997d-0b60175ce57e-kube-api-access-65flp\") pod \"glance-default-external-api-0\" (UID: \"0cdccf60-59fa-48ac-997d-0b60175ce57e\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 16:57:42 crc kubenswrapper[4558]: I0120 16:57:42.847823 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/90838b03-cde2-4954-a70b-b75b79a2038e-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"90838b03-cde2-4954-a70b-b75b79a2038e\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 16:57:42 crc kubenswrapper[4558]: I0120 16:57:42.847866 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0cdccf60-59fa-48ac-997d-0b60175ce57e-config-data\") pod \"glance-default-external-api-0\" (UID: \"0cdccf60-59fa-48ac-997d-0b60175ce57e\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 16:57:42 crc kubenswrapper[4558]: I0120 16:57:42.847887 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0cdccf60-59fa-48ac-997d-0b60175ce57e-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"0cdccf60-59fa-48ac-997d-0b60175ce57e\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 16:57:42 crc kubenswrapper[4558]: I0120 16:57:42.847907 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/90838b03-cde2-4954-a70b-b75b79a2038e-logs\") pod \"glance-default-internal-api-0\" (UID: \"90838b03-cde2-4954-a70b-b75b79a2038e\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 16:57:42 crc kubenswrapper[4558]: I0120 16:57:42.848310 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/90838b03-cde2-4954-a70b-b75b79a2038e-logs\") pod \"glance-default-internal-api-0\" (UID: \"90838b03-cde2-4954-a70b-b75b79a2038e\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 16:57:42 crc kubenswrapper[4558]: I0120 16:57:42.848801 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"90838b03-cde2-4954-a70b-b75b79a2038e\") device mount path \"/mnt/openstack/pv07\"" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 16:57:42 crc kubenswrapper[4558]: I0120 16:57:42.849135 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-external-api-0\" (UID: \"0cdccf60-59fa-48ac-997d-0b60175ce57e\") device mount path \"/mnt/openstack/pv01\"" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 16:57:42 crc kubenswrapper[4558]: I0120 16:57:42.849359 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0cdccf60-59fa-48ac-997d-0b60175ce57e-logs\") pod \"glance-default-external-api-0\" (UID: \"0cdccf60-59fa-48ac-997d-0b60175ce57e\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 16:57:42 crc kubenswrapper[4558]: I0120 16:57:42.849585 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/0cdccf60-59fa-48ac-997d-0b60175ce57e-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"0cdccf60-59fa-48ac-997d-0b60175ce57e\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 16:57:42 crc kubenswrapper[4558]: I0120 16:57:42.853205 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0cdccf60-59fa-48ac-997d-0b60175ce57e-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"0cdccf60-59fa-48ac-997d-0b60175ce57e\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 16:57:42 crc kubenswrapper[4558]: I0120 16:57:42.853666 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0cdccf60-59fa-48ac-997d-0b60175ce57e-config-data\") pod \"glance-default-external-api-0\" (UID: \"0cdccf60-59fa-48ac-997d-0b60175ce57e\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 16:57:42 crc kubenswrapper[4558]: I0120 16:57:42.853756 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-bootstrap-vkskq"] Jan 20 16:57:42 crc kubenswrapper[4558]: I0120 16:57:42.854230 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/90838b03-cde2-4954-a70b-b75b79a2038e-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"90838b03-cde2-4954-a70b-b75b79a2038e\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 16:57:42 crc kubenswrapper[4558]: I0120 16:57:42.854334 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/90838b03-cde2-4954-a70b-b75b79a2038e-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"90838b03-cde2-4954-a70b-b75b79a2038e\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 16:57:42 crc kubenswrapper[4558]: I0120 16:57:42.854420 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/90838b03-cde2-4954-a70b-b75b79a2038e-scripts\") pod \"glance-default-internal-api-0\" (UID: \"90838b03-cde2-4954-a70b-b75b79a2038e\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 16:57:42 crc kubenswrapper[4558]: I0120 16:57:42.854552 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/90838b03-cde2-4954-a70b-b75b79a2038e-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"90838b03-cde2-4954-a70b-b75b79a2038e\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 16:57:42 crc kubenswrapper[4558]: I0120 16:57:42.861263 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/90838b03-cde2-4954-a70b-b75b79a2038e-config-data\") pod \"glance-default-internal-api-0\" (UID: \"90838b03-cde2-4954-a70b-b75b79a2038e\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 16:57:42 crc kubenswrapper[4558]: I0120 16:57:42.862385 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0cdccf60-59fa-48ac-997d-0b60175ce57e-scripts\") pod \"glance-default-external-api-0\" (UID: \"0cdccf60-59fa-48ac-997d-0b60175ce57e\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 16:57:42 crc kubenswrapper[4558]: I0120 16:57:42.864639 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qhfsv\" (UniqueName: \"kubernetes.io/projected/90838b03-cde2-4954-a70b-b75b79a2038e-kube-api-access-qhfsv\") pod \"glance-default-internal-api-0\" (UID: \"90838b03-cde2-4954-a70b-b75b79a2038e\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 16:57:42 crc kubenswrapper[4558]: I0120 16:57:42.865028 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-65flp\" (UniqueName: \"kubernetes.io/projected/0cdccf60-59fa-48ac-997d-0b60175ce57e-kube-api-access-65flp\") pod \"glance-default-external-api-0\" (UID: \"0cdccf60-59fa-48ac-997d-0b60175ce57e\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 16:57:42 crc kubenswrapper[4558]: I0120 16:57:42.869088 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-external-api-0\" (UID: \"0cdccf60-59fa-48ac-997d-0b60175ce57e\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 16:57:42 crc kubenswrapper[4558]: I0120 16:57:42.869436 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0cdccf60-59fa-48ac-997d-0b60175ce57e-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"0cdccf60-59fa-48ac-997d-0b60175ce57e\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 16:57:42 crc kubenswrapper[4558]: I0120 16:57:42.876770 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"90838b03-cde2-4954-a70b-b75b79a2038e\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 16:57:42 crc kubenswrapper[4558]: I0120 16:57:42.924899 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 16:57:42 crc kubenswrapper[4558]: I0120 16:57:42.933129 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 16:57:42 crc kubenswrapper[4558]: I0120 16:57:42.949302 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pcwc2\" (UniqueName: \"kubernetes.io/projected/5a82a446-869f-466c-a345-bd211d1851c2-kube-api-access-pcwc2\") pod \"keystone-bootstrap-vkskq\" (UID: \"5a82a446-869f-466c-a345-bd211d1851c2\") " pod="openstack-kuttl-tests/keystone-bootstrap-vkskq" Jan 20 16:57:42 crc kubenswrapper[4558]: I0120 16:57:42.949374 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5a82a446-869f-466c-a345-bd211d1851c2-config-data\") pod \"keystone-bootstrap-vkskq\" (UID: \"5a82a446-869f-466c-a345-bd211d1851c2\") " pod="openstack-kuttl-tests/keystone-bootstrap-vkskq" Jan 20 16:57:42 crc kubenswrapper[4558]: I0120 16:57:42.949402 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/5a82a446-869f-466c-a345-bd211d1851c2-fernet-keys\") pod \"keystone-bootstrap-vkskq\" (UID: \"5a82a446-869f-466c-a345-bd211d1851c2\") " pod="openstack-kuttl-tests/keystone-bootstrap-vkskq" Jan 20 16:57:42 crc kubenswrapper[4558]: I0120 16:57:42.949422 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a82a446-869f-466c-a345-bd211d1851c2-combined-ca-bundle\") pod \"keystone-bootstrap-vkskq\" (UID: \"5a82a446-869f-466c-a345-bd211d1851c2\") " pod="openstack-kuttl-tests/keystone-bootstrap-vkskq" Jan 20 16:57:42 crc kubenswrapper[4558]: I0120 16:57:42.949441 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5a82a446-869f-466c-a345-bd211d1851c2-scripts\") pod \"keystone-bootstrap-vkskq\" (UID: \"5a82a446-869f-466c-a345-bd211d1851c2\") " pod="openstack-kuttl-tests/keystone-bootstrap-vkskq" Jan 20 16:57:42 crc kubenswrapper[4558]: I0120 16:57:42.949479 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/5a82a446-869f-466c-a345-bd211d1851c2-credential-keys\") pod \"keystone-bootstrap-vkskq\" (UID: \"5a82a446-869f-466c-a345-bd211d1851c2\") " pod="openstack-kuttl-tests/keystone-bootstrap-vkskq" Jan 20 16:57:43 crc kubenswrapper[4558]: I0120 16:57:43.051197 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/5a82a446-869f-466c-a345-bd211d1851c2-credential-keys\") pod \"keystone-bootstrap-vkskq\" (UID: \"5a82a446-869f-466c-a345-bd211d1851c2\") " pod="openstack-kuttl-tests/keystone-bootstrap-vkskq" Jan 20 16:57:43 crc kubenswrapper[4558]: I0120 16:57:43.051270 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pcwc2\" (UniqueName: \"kubernetes.io/projected/5a82a446-869f-466c-a345-bd211d1851c2-kube-api-access-pcwc2\") pod \"keystone-bootstrap-vkskq\" (UID: \"5a82a446-869f-466c-a345-bd211d1851c2\") " pod="openstack-kuttl-tests/keystone-bootstrap-vkskq" Jan 20 16:57:43 crc kubenswrapper[4558]: I0120 16:57:43.051378 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5a82a446-869f-466c-a345-bd211d1851c2-config-data\") pod \"keystone-bootstrap-vkskq\" (UID: \"5a82a446-869f-466c-a345-bd211d1851c2\") " pod="openstack-kuttl-tests/keystone-bootstrap-vkskq" Jan 20 16:57:43 crc kubenswrapper[4558]: I0120 16:57:43.051413 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/5a82a446-869f-466c-a345-bd211d1851c2-fernet-keys\") pod \"keystone-bootstrap-vkskq\" (UID: \"5a82a446-869f-466c-a345-bd211d1851c2\") " pod="openstack-kuttl-tests/keystone-bootstrap-vkskq" Jan 20 16:57:43 crc kubenswrapper[4558]: I0120 16:57:43.051439 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a82a446-869f-466c-a345-bd211d1851c2-combined-ca-bundle\") pod \"keystone-bootstrap-vkskq\" (UID: \"5a82a446-869f-466c-a345-bd211d1851c2\") " pod="openstack-kuttl-tests/keystone-bootstrap-vkskq" Jan 20 16:57:43 crc kubenswrapper[4558]: I0120 16:57:43.051462 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5a82a446-869f-466c-a345-bd211d1851c2-scripts\") pod \"keystone-bootstrap-vkskq\" (UID: \"5a82a446-869f-466c-a345-bd211d1851c2\") " pod="openstack-kuttl-tests/keystone-bootstrap-vkskq" Jan 20 16:57:43 crc kubenswrapper[4558]: I0120 16:57:43.056329 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5a82a446-869f-466c-a345-bd211d1851c2-scripts\") pod \"keystone-bootstrap-vkskq\" (UID: \"5a82a446-869f-466c-a345-bd211d1851c2\") " pod="openstack-kuttl-tests/keystone-bootstrap-vkskq" Jan 20 16:57:43 crc kubenswrapper[4558]: I0120 16:57:43.056394 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a82a446-869f-466c-a345-bd211d1851c2-combined-ca-bundle\") pod \"keystone-bootstrap-vkskq\" (UID: \"5a82a446-869f-466c-a345-bd211d1851c2\") " pod="openstack-kuttl-tests/keystone-bootstrap-vkskq" Jan 20 16:57:43 crc kubenswrapper[4558]: I0120 16:57:43.056977 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/5a82a446-869f-466c-a345-bd211d1851c2-fernet-keys\") pod \"keystone-bootstrap-vkskq\" (UID: \"5a82a446-869f-466c-a345-bd211d1851c2\") " pod="openstack-kuttl-tests/keystone-bootstrap-vkskq" Jan 20 16:57:43 crc kubenswrapper[4558]: I0120 16:57:43.057075 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/5a82a446-869f-466c-a345-bd211d1851c2-credential-keys\") pod \"keystone-bootstrap-vkskq\" (UID: \"5a82a446-869f-466c-a345-bd211d1851c2\") " pod="openstack-kuttl-tests/keystone-bootstrap-vkskq" Jan 20 16:57:43 crc kubenswrapper[4558]: I0120 16:57:43.067243 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pcwc2\" (UniqueName: \"kubernetes.io/projected/5a82a446-869f-466c-a345-bd211d1851c2-kube-api-access-pcwc2\") pod \"keystone-bootstrap-vkskq\" (UID: \"5a82a446-869f-466c-a345-bd211d1851c2\") " pod="openstack-kuttl-tests/keystone-bootstrap-vkskq" Jan 20 16:57:43 crc kubenswrapper[4558]: I0120 16:57:43.067941 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5a82a446-869f-466c-a345-bd211d1851c2-config-data\") pod \"keystone-bootstrap-vkskq\" (UID: \"5a82a446-869f-466c-a345-bd211d1851c2\") " pod="openstack-kuttl-tests/keystone-bootstrap-vkskq" Jan 20 16:57:43 crc kubenswrapper[4558]: I0120 16:57:43.151261 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-bootstrap-vkskq" Jan 20 16:57:43 crc kubenswrapper[4558]: I0120 16:57:43.525078 4558 generic.go:334] "Generic (PLEG): container finished" podID="b79fb42a-3e70-4daa-b0ea-fe854def5825" containerID="218f67d660ed8f9f78b141cd28c9a5dcb77d2f1c59769fc787f836f2d3a884a4" exitCode=0 Jan 20 16:57:43 crc kubenswrapper[4558]: I0120 16:57:43.525132 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-db-sync-ckhth" event={"ID":"b79fb42a-3e70-4daa-b0ea-fe854def5825","Type":"ContainerDied","Data":"218f67d660ed8f9f78b141cd28c9a5dcb77d2f1c59769fc787f836f2d3a884a4"} Jan 20 16:57:43 crc kubenswrapper[4558]: I0120 16:57:43.780038 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-sj9bb" Jan 20 16:57:43 crc kubenswrapper[4558]: I0120 16:57:43.820698 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-sj9bb" Jan 20 16:57:44 crc kubenswrapper[4558]: I0120 16:57:44.391861 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-sj9bb"] Jan 20 16:57:44 crc kubenswrapper[4558]: I0120 16:57:44.575154 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="484f0ab5-2b8f-4204-b5d1-e032c159dbf2" path="/var/lib/kubelet/pods/484f0ab5-2b8f-4204-b5d1-e032c159dbf2/volumes" Jan 20 16:57:44 crc kubenswrapper[4558]: I0120 16:57:44.576190 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="79a5c7ed-336f-47df-a568-0773deb035ef" path="/var/lib/kubelet/pods/79a5c7ed-336f-47df-a568-0773deb035ef/volumes" Jan 20 16:57:44 crc kubenswrapper[4558]: I0120 16:57:44.576698 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-6bqsp"] Jan 20 16:57:44 crc kubenswrapper[4558]: I0120 16:57:44.578091 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-6bqsp" podUID="de39c54d-ba42-4c81-8ad7-be41cc1cc50a" containerName="registry-server" containerID="cri-o://63c7a5b810fb37016641bbc5429eba354b75b0db1651a2a29da3876e4b0212c1" gracePeriod=2 Jan 20 16:57:45 crc kubenswrapper[4558]: I0120 16:57:45.540444 4558 generic.go:334] "Generic (PLEG): container finished" podID="de39c54d-ba42-4c81-8ad7-be41cc1cc50a" containerID="63c7a5b810fb37016641bbc5429eba354b75b0db1651a2a29da3876e4b0212c1" exitCode=0 Jan 20 16:57:45 crc kubenswrapper[4558]: I0120 16:57:45.540580 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6bqsp" event={"ID":"de39c54d-ba42-4c81-8ad7-be41cc1cc50a","Type":"ContainerDied","Data":"63c7a5b810fb37016641bbc5429eba354b75b0db1651a2a29da3876e4b0212c1"} Jan 20 16:57:47 crc kubenswrapper[4558]: E0120 16:57:47.493277 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 63c7a5b810fb37016641bbc5429eba354b75b0db1651a2a29da3876e4b0212c1 is running failed: container process not found" containerID="63c7a5b810fb37016641bbc5429eba354b75b0db1651a2a29da3876e4b0212c1" cmd=["grpc_health_probe","-addr=:50051"] Jan 20 16:57:47 crc kubenswrapper[4558]: E0120 16:57:47.493720 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 63c7a5b810fb37016641bbc5429eba354b75b0db1651a2a29da3876e4b0212c1 is running failed: container process not found" containerID="63c7a5b810fb37016641bbc5429eba354b75b0db1651a2a29da3876e4b0212c1" cmd=["grpc_health_probe","-addr=:50051"] Jan 20 16:57:47 crc kubenswrapper[4558]: E0120 16:57:47.494147 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 63c7a5b810fb37016641bbc5429eba354b75b0db1651a2a29da3876e4b0212c1 is running failed: container process not found" containerID="63c7a5b810fb37016641bbc5429eba354b75b0db1651a2a29da3876e4b0212c1" cmd=["grpc_health_probe","-addr=:50051"] Jan 20 16:57:47 crc kubenswrapper[4558]: E0120 16:57:47.494232 4558 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 63c7a5b810fb37016641bbc5429eba354b75b0db1651a2a29da3876e4b0212c1 is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/redhat-operators-6bqsp" podUID="de39c54d-ba42-4c81-8ad7-be41cc1cc50a" containerName="registry-server" Jan 20 16:57:51 crc kubenswrapper[4558]: E0120 16:57:51.705296 4558 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-ceilometer-central@sha256:5a548c25fe3d02f7a042cb0a6d28fc8039a34c4a3b3d07aadda4aba3a926e777" Jan 20 16:57:51 crc kubenswrapper[4558]: E0120 16:57:51.705564 4558 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:ceilometer-central-agent,Image:quay.io/podified-antelope-centos9/openstack-ceilometer-central@sha256:5a548c25fe3d02f7a042cb0a6d28fc8039a34c4a3b3d07aadda4aba3a926e777,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n564h59chbdhb9h78h5b9h98hb8h5dbh5f4h5fdh665h64fh66fh58fh88h5c4h5b8h664h674h5f8hc5hch7dh66fh585hb7h8h659h5cch665h5f4q,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:scripts,ReadOnly:true,MountPath:/var/lib/openstack/bin,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/openstack/config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:ceilometer-central-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-ghzgm,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[/usr/bin/python3 /var/lib/openstack/bin/centralhealth.py],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:300,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ceilometer-0_openstack-kuttl-tests(c1e3af55-1788-4d23-b3c5-6227e848ea15): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 20 16:57:51 crc kubenswrapper[4558]: I0120 16:57:51.724123 4558 scope.go:117] "RemoveContainer" containerID="8ee292b6755c15e25a4c14150cbc6425e95bd7d9b44a0ccac531a2e2ae971bfc" Jan 20 16:57:51 crc kubenswrapper[4558]: I0120 16:57:51.775424 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-db-sync-ckhth" Jan 20 16:57:51 crc kubenswrapper[4558]: I0120 16:57:51.888084 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/b79fb42a-3e70-4daa-b0ea-fe854def5825-config\") pod \"b79fb42a-3e70-4daa-b0ea-fe854def5825\" (UID: \"b79fb42a-3e70-4daa-b0ea-fe854def5825\") " Jan 20 16:57:51 crc kubenswrapper[4558]: I0120 16:57:51.888121 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b79fb42a-3e70-4daa-b0ea-fe854def5825-combined-ca-bundle\") pod \"b79fb42a-3e70-4daa-b0ea-fe854def5825\" (UID: \"b79fb42a-3e70-4daa-b0ea-fe854def5825\") " Jan 20 16:57:51 crc kubenswrapper[4558]: I0120 16:57:51.888299 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zh7rj\" (UniqueName: \"kubernetes.io/projected/b79fb42a-3e70-4daa-b0ea-fe854def5825-kube-api-access-zh7rj\") pod \"b79fb42a-3e70-4daa-b0ea-fe854def5825\" (UID: \"b79fb42a-3e70-4daa-b0ea-fe854def5825\") " Jan 20 16:57:51 crc kubenswrapper[4558]: I0120 16:57:51.893109 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b79fb42a-3e70-4daa-b0ea-fe854def5825-kube-api-access-zh7rj" (OuterVolumeSpecName: "kube-api-access-zh7rj") pod "b79fb42a-3e70-4daa-b0ea-fe854def5825" (UID: "b79fb42a-3e70-4daa-b0ea-fe854def5825"). InnerVolumeSpecName "kube-api-access-zh7rj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:57:51 crc kubenswrapper[4558]: I0120 16:57:51.905608 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b79fb42a-3e70-4daa-b0ea-fe854def5825-config" (OuterVolumeSpecName: "config") pod "b79fb42a-3e70-4daa-b0ea-fe854def5825" (UID: "b79fb42a-3e70-4daa-b0ea-fe854def5825"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:57:51 crc kubenswrapper[4558]: I0120 16:57:51.907907 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b79fb42a-3e70-4daa-b0ea-fe854def5825-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b79fb42a-3e70-4daa-b0ea-fe854def5825" (UID: "b79fb42a-3e70-4daa-b0ea-fe854def5825"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:57:51 crc kubenswrapper[4558]: I0120 16:57:51.990557 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zh7rj\" (UniqueName: \"kubernetes.io/projected/b79fb42a-3e70-4daa-b0ea-fe854def5825-kube-api-access-zh7rj\") on node \"crc\" DevicePath \"\"" Jan 20 16:57:51 crc kubenswrapper[4558]: I0120 16:57:51.990583 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/b79fb42a-3e70-4daa-b0ea-fe854def5825-config\") on node \"crc\" DevicePath \"\"" Jan 20 16:57:51 crc kubenswrapper[4558]: I0120 16:57:51.990593 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b79fb42a-3e70-4daa-b0ea-fe854def5825-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 16:57:52 crc kubenswrapper[4558]: I0120 16:57:52.610932 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-db-sync-ckhth" event={"ID":"b79fb42a-3e70-4daa-b0ea-fe854def5825","Type":"ContainerDied","Data":"d11a6b2c8a240887d185dccbe507617c83d9ba4668c7089d0f885be80c9a0050"} Jan 20 16:57:52 crc kubenswrapper[4558]: I0120 16:57:52.611113 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d11a6b2c8a240887d185dccbe507617c83d9ba4668c7089d0f885be80c9a0050" Jan 20 16:57:52 crc kubenswrapper[4558]: I0120 16:57:52.611295 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-db-sync-ckhth" Jan 20 16:57:52 crc kubenswrapper[4558]: I0120 16:57:52.667511 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6bqsp" Jan 20 16:57:52 crc kubenswrapper[4558]: I0120 16:57:52.676995 4558 scope.go:117] "RemoveContainer" containerID="7a0021d6089e4d387c3496128d6af121e686a922c41c05a1b7f954161c54339b" Jan 20 16:57:52 crc kubenswrapper[4558]: I0120 16:57:52.706328 4558 scope.go:117] "RemoveContainer" containerID="c634ab0a436e61a0780c49cb600dcebd60f457dc231c1cf9edeab591b9b87718" Jan 20 16:57:52 crc kubenswrapper[4558]: I0120 16:57:52.813540 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/de39c54d-ba42-4c81-8ad7-be41cc1cc50a-utilities\") pod \"de39c54d-ba42-4c81-8ad7-be41cc1cc50a\" (UID: \"de39c54d-ba42-4c81-8ad7-be41cc1cc50a\") " Jan 20 16:57:52 crc kubenswrapper[4558]: I0120 16:57:52.813833 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hxb8c\" (UniqueName: \"kubernetes.io/projected/de39c54d-ba42-4c81-8ad7-be41cc1cc50a-kube-api-access-hxb8c\") pod \"de39c54d-ba42-4c81-8ad7-be41cc1cc50a\" (UID: \"de39c54d-ba42-4c81-8ad7-be41cc1cc50a\") " Jan 20 16:57:52 crc kubenswrapper[4558]: I0120 16:57:52.813977 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/de39c54d-ba42-4c81-8ad7-be41cc1cc50a-catalog-content\") pod \"de39c54d-ba42-4c81-8ad7-be41cc1cc50a\" (UID: \"de39c54d-ba42-4c81-8ad7-be41cc1cc50a\") " Jan 20 16:57:52 crc kubenswrapper[4558]: I0120 16:57:52.813979 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/de39c54d-ba42-4c81-8ad7-be41cc1cc50a-utilities" (OuterVolumeSpecName: "utilities") pod "de39c54d-ba42-4c81-8ad7-be41cc1cc50a" (UID: "de39c54d-ba42-4c81-8ad7-be41cc1cc50a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 16:57:52 crc kubenswrapper[4558]: I0120 16:57:52.814683 4558 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/de39c54d-ba42-4c81-8ad7-be41cc1cc50a-utilities\") on node \"crc\" DevicePath \"\"" Jan 20 16:57:52 crc kubenswrapper[4558]: I0120 16:57:52.816783 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/de39c54d-ba42-4c81-8ad7-be41cc1cc50a-kube-api-access-hxb8c" (OuterVolumeSpecName: "kube-api-access-hxb8c") pod "de39c54d-ba42-4c81-8ad7-be41cc1cc50a" (UID: "de39c54d-ba42-4c81-8ad7-be41cc1cc50a"). InnerVolumeSpecName "kube-api-access-hxb8c". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:57:52 crc kubenswrapper[4558]: I0120 16:57:52.899212 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/de39c54d-ba42-4c81-8ad7-be41cc1cc50a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "de39c54d-ba42-4c81-8ad7-be41cc1cc50a" (UID: "de39c54d-ba42-4c81-8ad7-be41cc1cc50a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 16:57:52 crc kubenswrapper[4558]: I0120 16:57:52.923409 4558 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/de39c54d-ba42-4c81-8ad7-be41cc1cc50a-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 20 16:57:52 crc kubenswrapper[4558]: I0120 16:57:52.923437 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hxb8c\" (UniqueName: \"kubernetes.io/projected/de39c54d-ba42-4c81-8ad7-be41cc1cc50a-kube-api-access-hxb8c\") on node \"crc\" DevicePath \"\"" Jan 20 16:57:52 crc kubenswrapper[4558]: I0120 16:57:52.975324 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/neutron-577dd7499c-hrw6z"] Jan 20 16:57:52 crc kubenswrapper[4558]: E0120 16:57:52.975591 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="de39c54d-ba42-4c81-8ad7-be41cc1cc50a" containerName="extract-content" Jan 20 16:57:52 crc kubenswrapper[4558]: I0120 16:57:52.975608 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="de39c54d-ba42-4c81-8ad7-be41cc1cc50a" containerName="extract-content" Jan 20 16:57:52 crc kubenswrapper[4558]: E0120 16:57:52.984780 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b79fb42a-3e70-4daa-b0ea-fe854def5825" containerName="neutron-db-sync" Jan 20 16:57:52 crc kubenswrapper[4558]: I0120 16:57:52.984806 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="b79fb42a-3e70-4daa-b0ea-fe854def5825" containerName="neutron-db-sync" Jan 20 16:57:52 crc kubenswrapper[4558]: E0120 16:57:52.984835 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="de39c54d-ba42-4c81-8ad7-be41cc1cc50a" containerName="extract-utilities" Jan 20 16:57:52 crc kubenswrapper[4558]: I0120 16:57:52.984842 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="de39c54d-ba42-4c81-8ad7-be41cc1cc50a" containerName="extract-utilities" Jan 20 16:57:52 crc kubenswrapper[4558]: E0120 16:57:52.984850 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="de39c54d-ba42-4c81-8ad7-be41cc1cc50a" containerName="registry-server" Jan 20 16:57:52 crc kubenswrapper[4558]: I0120 16:57:52.984855 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="de39c54d-ba42-4c81-8ad7-be41cc1cc50a" containerName="registry-server" Jan 20 16:57:52 crc kubenswrapper[4558]: I0120 16:57:52.985062 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="b79fb42a-3e70-4daa-b0ea-fe854def5825" containerName="neutron-db-sync" Jan 20 16:57:52 crc kubenswrapper[4558]: I0120 16:57:52.985083 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="de39c54d-ba42-4c81-8ad7-be41cc1cc50a" containerName="registry-server" Jan 20 16:57:52 crc kubenswrapper[4558]: I0120 16:57:52.985704 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-577dd7499c-hrw6z"] Jan 20 16:57:52 crc kubenswrapper[4558]: I0120 16:57:52.985776 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-577dd7499c-hrw6z" Jan 20 16:57:52 crc kubenswrapper[4558]: I0120 16:57:52.987936 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"neutron-neutron-dockercfg-7d264" Jan 20 16:57:52 crc kubenswrapper[4558]: I0120 16:57:52.988022 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-neutron-ovndbs" Jan 20 16:57:52 crc kubenswrapper[4558]: I0120 16:57:52.988347 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"neutron-config" Jan 20 16:57:52 crc kubenswrapper[4558]: I0120 16:57:52.988458 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"neutron-httpd-config" Jan 20 16:57:53 crc kubenswrapper[4558]: I0120 16:57:53.096034 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-bootstrap-vkskq"] Jan 20 16:57:53 crc kubenswrapper[4558]: I0120 16:57:53.119754 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 16:57:53 crc kubenswrapper[4558]: I0120 16:57:53.123457 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"osp-secret" Jan 20 16:57:53 crc kubenswrapper[4558]: I0120 16:57:53.126857 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/8949f5f0-c260-4456-8d11-e91486237ef6-httpd-config\") pod \"neutron-577dd7499c-hrw6z\" (UID: \"8949f5f0-c260-4456-8d11-e91486237ef6\") " pod="openstack-kuttl-tests/neutron-577dd7499c-hrw6z" Jan 20 16:57:53 crc kubenswrapper[4558]: W0120 16:57:53.126928 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod90838b03_cde2_4954_a70b_b75b79a2038e.slice/crio-5fee9c465f43def4d3a086596df727cd73ce8459d36cf5743c6a11f53e7071d9 WatchSource:0}: Error finding container 5fee9c465f43def4d3a086596df727cd73ce8459d36cf5743c6a11f53e7071d9: Status 404 returned error can't find the container with id 5fee9c465f43def4d3a086596df727cd73ce8459d36cf5743c6a11f53e7071d9 Jan 20 16:57:53 crc kubenswrapper[4558]: I0120 16:57:53.127086 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/8949f5f0-c260-4456-8d11-e91486237ef6-config\") pod \"neutron-577dd7499c-hrw6z\" (UID: \"8949f5f0-c260-4456-8d11-e91486237ef6\") " pod="openstack-kuttl-tests/neutron-577dd7499c-hrw6z" Jan 20 16:57:53 crc kubenswrapper[4558]: I0120 16:57:53.127267 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2zxdx\" (UniqueName: \"kubernetes.io/projected/8949f5f0-c260-4456-8d11-e91486237ef6-kube-api-access-2zxdx\") pod \"neutron-577dd7499c-hrw6z\" (UID: \"8949f5f0-c260-4456-8d11-e91486237ef6\") " pod="openstack-kuttl-tests/neutron-577dd7499c-hrw6z" Jan 20 16:57:53 crc kubenswrapper[4558]: I0120 16:57:53.127394 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/8949f5f0-c260-4456-8d11-e91486237ef6-ovndb-tls-certs\") pod \"neutron-577dd7499c-hrw6z\" (UID: \"8949f5f0-c260-4456-8d11-e91486237ef6\") " pod="openstack-kuttl-tests/neutron-577dd7499c-hrw6z" Jan 20 16:57:53 crc kubenswrapper[4558]: I0120 16:57:53.127606 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8949f5f0-c260-4456-8d11-e91486237ef6-combined-ca-bundle\") pod \"neutron-577dd7499c-hrw6z\" (UID: \"8949f5f0-c260-4456-8d11-e91486237ef6\") " pod="openstack-kuttl-tests/neutron-577dd7499c-hrw6z" Jan 20 16:57:53 crc kubenswrapper[4558]: I0120 16:57:53.230351 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/8949f5f0-c260-4456-8d11-e91486237ef6-ovndb-tls-certs\") pod \"neutron-577dd7499c-hrw6z\" (UID: \"8949f5f0-c260-4456-8d11-e91486237ef6\") " pod="openstack-kuttl-tests/neutron-577dd7499c-hrw6z" Jan 20 16:57:53 crc kubenswrapper[4558]: I0120 16:57:53.230409 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8949f5f0-c260-4456-8d11-e91486237ef6-combined-ca-bundle\") pod \"neutron-577dd7499c-hrw6z\" (UID: \"8949f5f0-c260-4456-8d11-e91486237ef6\") " pod="openstack-kuttl-tests/neutron-577dd7499c-hrw6z" Jan 20 16:57:53 crc kubenswrapper[4558]: I0120 16:57:53.230449 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/8949f5f0-c260-4456-8d11-e91486237ef6-httpd-config\") pod \"neutron-577dd7499c-hrw6z\" (UID: \"8949f5f0-c260-4456-8d11-e91486237ef6\") " pod="openstack-kuttl-tests/neutron-577dd7499c-hrw6z" Jan 20 16:57:53 crc kubenswrapper[4558]: I0120 16:57:53.230519 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/8949f5f0-c260-4456-8d11-e91486237ef6-config\") pod \"neutron-577dd7499c-hrw6z\" (UID: \"8949f5f0-c260-4456-8d11-e91486237ef6\") " pod="openstack-kuttl-tests/neutron-577dd7499c-hrw6z" Jan 20 16:57:53 crc kubenswrapper[4558]: I0120 16:57:53.230560 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2zxdx\" (UniqueName: \"kubernetes.io/projected/8949f5f0-c260-4456-8d11-e91486237ef6-kube-api-access-2zxdx\") pod \"neutron-577dd7499c-hrw6z\" (UID: \"8949f5f0-c260-4456-8d11-e91486237ef6\") " pod="openstack-kuttl-tests/neutron-577dd7499c-hrw6z" Jan 20 16:57:53 crc kubenswrapper[4558]: I0120 16:57:53.234936 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8949f5f0-c260-4456-8d11-e91486237ef6-combined-ca-bundle\") pod \"neutron-577dd7499c-hrw6z\" (UID: \"8949f5f0-c260-4456-8d11-e91486237ef6\") " pod="openstack-kuttl-tests/neutron-577dd7499c-hrw6z" Jan 20 16:57:53 crc kubenswrapper[4558]: I0120 16:57:53.243131 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/8949f5f0-c260-4456-8d11-e91486237ef6-ovndb-tls-certs\") pod \"neutron-577dd7499c-hrw6z\" (UID: \"8949f5f0-c260-4456-8d11-e91486237ef6\") " pod="openstack-kuttl-tests/neutron-577dd7499c-hrw6z" Jan 20 16:57:53 crc kubenswrapper[4558]: I0120 16:57:53.243320 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/8949f5f0-c260-4456-8d11-e91486237ef6-httpd-config\") pod \"neutron-577dd7499c-hrw6z\" (UID: \"8949f5f0-c260-4456-8d11-e91486237ef6\") " pod="openstack-kuttl-tests/neutron-577dd7499c-hrw6z" Jan 20 16:57:53 crc kubenswrapper[4558]: I0120 16:57:53.248596 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2zxdx\" (UniqueName: \"kubernetes.io/projected/8949f5f0-c260-4456-8d11-e91486237ef6-kube-api-access-2zxdx\") pod \"neutron-577dd7499c-hrw6z\" (UID: \"8949f5f0-c260-4456-8d11-e91486237ef6\") " pod="openstack-kuttl-tests/neutron-577dd7499c-hrw6z" Jan 20 16:57:53 crc kubenswrapper[4558]: I0120 16:57:53.255082 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/8949f5f0-c260-4456-8d11-e91486237ef6-config\") pod \"neutron-577dd7499c-hrw6z\" (UID: \"8949f5f0-c260-4456-8d11-e91486237ef6\") " pod="openstack-kuttl-tests/neutron-577dd7499c-hrw6z" Jan 20 16:57:53 crc kubenswrapper[4558]: I0120 16:57:53.286082 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 16:57:53 crc kubenswrapper[4558]: W0120 16:57:53.299887 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0cdccf60_59fa_48ac_997d_0b60175ce57e.slice/crio-014518ad9c0876dae5ce745b6c17b69d0d7d344e92b6a86d82ab53f989bd68bd WatchSource:0}: Error finding container 014518ad9c0876dae5ce745b6c17b69d0d7d344e92b6a86d82ab53f989bd68bd: Status 404 returned error can't find the container with id 014518ad9c0876dae5ce745b6c17b69d0d7d344e92b6a86d82ab53f989bd68bd Jan 20 16:57:53 crc kubenswrapper[4558]: I0120 16:57:53.309033 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-577dd7499c-hrw6z" Jan 20 16:57:53 crc kubenswrapper[4558]: I0120 16:57:53.627658 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-db-sync-jwg24" event={"ID":"7b8d1710-d43e-4f4d-a16b-fb97e8bb7ffa","Type":"ContainerStarted","Data":"f01eee54b0096cee030175949e49bf2c2cae19d2175a23fa1d7f52c350aef666"} Jan 20 16:57:53 crc kubenswrapper[4558]: I0120 16:57:53.628718 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"0cdccf60-59fa-48ac-997d-0b60175ce57e","Type":"ContainerStarted","Data":"014518ad9c0876dae5ce745b6c17b69d0d7d344e92b6a86d82ab53f989bd68bd"} Jan 20 16:57:53 crc kubenswrapper[4558]: I0120 16:57:53.637076 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-db-sync-mpmc8" event={"ID":"b3c9bd31-20f5-45ea-ae8d-987a8a70e321","Type":"ContainerStarted","Data":"e4a4514a229126f0ce1c13f448b71f47a96fadb42e52108d7c6e75d1a62f839b"} Jan 20 16:57:53 crc kubenswrapper[4558]: I0120 16:57:53.639145 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"90838b03-cde2-4954-a70b-b75b79a2038e","Type":"ContainerStarted","Data":"5fee9c465f43def4d3a086596df727cd73ce8459d36cf5743c6a11f53e7071d9"} Jan 20 16:57:53 crc kubenswrapper[4558]: I0120 16:57:53.644629 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-db-sync-d6pfc" event={"ID":"b1e54a34-818b-4918-baeb-586ff225fe1f","Type":"ContainerStarted","Data":"06a97eded1d2e3329c2e57b788138eac12416ca0b8878a26d14405b5accd7c67"} Jan 20 16:57:53 crc kubenswrapper[4558]: I0120 16:57:53.651201 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6bqsp" event={"ID":"de39c54d-ba42-4c81-8ad7-be41cc1cc50a","Type":"ContainerDied","Data":"fc045b32d968b216642c2aa23ce5173405159009b66cd550a65fe27ec850b6b6"} Jan 20 16:57:53 crc kubenswrapper[4558]: I0120 16:57:53.651233 4558 scope.go:117] "RemoveContainer" containerID="63c7a5b810fb37016641bbc5429eba354b75b0db1651a2a29da3876e4b0212c1" Jan 20 16:57:53 crc kubenswrapper[4558]: I0120 16:57:53.651303 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6bqsp" Jan 20 16:57:53 crc kubenswrapper[4558]: I0120 16:57:53.660419 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-bootstrap-vkskq" event={"ID":"5a82a446-869f-466c-a345-bd211d1851c2","Type":"ContainerStarted","Data":"f0cbb47b3b8abf3b84596f44ff9f49275729177b2875c4e8e764e817b8d6eb91"} Jan 20 16:57:53 crc kubenswrapper[4558]: I0120 16:57:53.660441 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-bootstrap-vkskq" event={"ID":"5a82a446-869f-466c-a345-bd211d1851c2","Type":"ContainerStarted","Data":"258afccf4fbf9be861474bd36a7e4e0551b0ff93dcb00a54d84718092cde0f42"} Jan 20 16:57:53 crc kubenswrapper[4558]: I0120 16:57:53.660831 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/cinder-db-sync-jwg24" podStartSLOduration=2.7800068700000002 podStartE2EDuration="19.660568507s" podCreationTimestamp="2026-01-20 16:57:34 +0000 UTC" firstStartedPulling="2026-01-20 16:57:35.834720994 +0000 UTC m=+949.595058962" lastFinishedPulling="2026-01-20 16:57:52.715282631 +0000 UTC m=+966.475620599" observedRunningTime="2026-01-20 16:57:53.6429469 +0000 UTC m=+967.403284867" watchObservedRunningTime="2026-01-20 16:57:53.660568507 +0000 UTC m=+967.420906474" Jan 20 16:57:53 crc kubenswrapper[4558]: I0120 16:57:53.672365 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/barbican-db-sync-mpmc8" podStartSLOduration=2.893249109 podStartE2EDuration="19.672350924s" podCreationTimestamp="2026-01-20 16:57:34 +0000 UTC" firstStartedPulling="2026-01-20 16:57:35.873853863 +0000 UTC m=+949.634191830" lastFinishedPulling="2026-01-20 16:57:52.652955677 +0000 UTC m=+966.413293645" observedRunningTime="2026-01-20 16:57:53.665504379 +0000 UTC m=+967.425842347" watchObservedRunningTime="2026-01-20 16:57:53.672350924 +0000 UTC m=+967.432688891" Jan 20 16:57:53 crc kubenswrapper[4558]: I0120 16:57:53.707726 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-6bqsp"] Jan 20 16:57:53 crc kubenswrapper[4558]: I0120 16:57:53.714223 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-6bqsp"] Jan 20 16:57:53 crc kubenswrapper[4558]: I0120 16:57:53.715216 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/keystone-bootstrap-vkskq" podStartSLOduration=11.715200611 podStartE2EDuration="11.715200611s" podCreationTimestamp="2026-01-20 16:57:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 16:57:53.707615859 +0000 UTC m=+967.467953826" watchObservedRunningTime="2026-01-20 16:57:53.715200611 +0000 UTC m=+967.475538578" Jan 20 16:57:53 crc kubenswrapper[4558]: I0120 16:57:53.723510 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/placement-db-sync-d6pfc" podStartSLOduration=2.84809579 podStartE2EDuration="19.723496532s" podCreationTimestamp="2026-01-20 16:57:34 +0000 UTC" firstStartedPulling="2026-01-20 16:57:35.752859687 +0000 UTC m=+949.513197654" lastFinishedPulling="2026-01-20 16:57:52.628260419 +0000 UTC m=+966.388598396" observedRunningTime="2026-01-20 16:57:53.717103771 +0000 UTC m=+967.477441738" watchObservedRunningTime="2026-01-20 16:57:53.723496532 +0000 UTC m=+967.483834499" Jan 20 16:57:53 crc kubenswrapper[4558]: I0120 16:57:53.756300 4558 scope.go:117] "RemoveContainer" containerID="4d1d6026b5db667653a9ab6d841936e591a00a57eb9ee22de72e9332b64914f0" Jan 20 16:57:53 crc kubenswrapper[4558]: I0120 16:57:53.866698 4558 scope.go:117] "RemoveContainer" containerID="92ad17e5910184ff9e29a8584c5d71850f001151c08ff64077b06c193c2249e9" Jan 20 16:57:54 crc kubenswrapper[4558]: I0120 16:57:54.217884 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-577dd7499c-hrw6z"] Jan 20 16:57:54 crc kubenswrapper[4558]: I0120 16:57:54.583851 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="de39c54d-ba42-4c81-8ad7-be41cc1cc50a" path="/var/lib/kubelet/pods/de39c54d-ba42-4c81-8ad7-be41cc1cc50a/volumes" Jan 20 16:57:54 crc kubenswrapper[4558]: I0120 16:57:54.681581 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"0cdccf60-59fa-48ac-997d-0b60175ce57e","Type":"ContainerStarted","Data":"93f8e8d586fe38469a94ae23eddbfc980dd890c101b66c86fce3f0bbfc106d56"} Jan 20 16:57:54 crc kubenswrapper[4558]: I0120 16:57:54.689705 4558 generic.go:334] "Generic (PLEG): container finished" podID="b1e54a34-818b-4918-baeb-586ff225fe1f" containerID="06a97eded1d2e3329c2e57b788138eac12416ca0b8878a26d14405b5accd7c67" exitCode=0 Jan 20 16:57:54 crc kubenswrapper[4558]: I0120 16:57:54.689776 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-db-sync-d6pfc" event={"ID":"b1e54a34-818b-4918-baeb-586ff225fe1f","Type":"ContainerDied","Data":"06a97eded1d2e3329c2e57b788138eac12416ca0b8878a26d14405b5accd7c67"} Jan 20 16:57:54 crc kubenswrapper[4558]: I0120 16:57:54.693476 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-577dd7499c-hrw6z" event={"ID":"8949f5f0-c260-4456-8d11-e91486237ef6","Type":"ContainerStarted","Data":"e0389f084c9272c173ccedb6457ef9b3c7a5face334cb3ee4b35a877d894c33b"} Jan 20 16:57:54 crc kubenswrapper[4558]: I0120 16:57:54.693513 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-577dd7499c-hrw6z" event={"ID":"8949f5f0-c260-4456-8d11-e91486237ef6","Type":"ContainerStarted","Data":"603431ee4995dde1cbb89b63f722d065d375af2a2da9a14c43c53c29740e2965"} Jan 20 16:57:54 crc kubenswrapper[4558]: I0120 16:57:54.693523 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-577dd7499c-hrw6z" event={"ID":"8949f5f0-c260-4456-8d11-e91486237ef6","Type":"ContainerStarted","Data":"3caa8749d23ebc62708f17bcc49d95ca30c24ee797d59ead6771b72c5f31f50f"} Jan 20 16:57:54 crc kubenswrapper[4558]: I0120 16:57:54.694249 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/neutron-577dd7499c-hrw6z" Jan 20 16:57:54 crc kubenswrapper[4558]: I0120 16:57:54.707378 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"c1e3af55-1788-4d23-b3c5-6227e848ea15","Type":"ContainerStarted","Data":"5df8eb7fa973376010ffaf1986356d036d2880d6b817d28856f89bb5815b2133"} Jan 20 16:57:54 crc kubenswrapper[4558]: I0120 16:57:54.713632 4558 generic.go:334] "Generic (PLEG): container finished" podID="b3c9bd31-20f5-45ea-ae8d-987a8a70e321" containerID="e4a4514a229126f0ce1c13f448b71f47a96fadb42e52108d7c6e75d1a62f839b" exitCode=0 Jan 20 16:57:54 crc kubenswrapper[4558]: I0120 16:57:54.713678 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-db-sync-mpmc8" event={"ID":"b3c9bd31-20f5-45ea-ae8d-987a8a70e321","Type":"ContainerDied","Data":"e4a4514a229126f0ce1c13f448b71f47a96fadb42e52108d7c6e75d1a62f839b"} Jan 20 16:57:54 crc kubenswrapper[4558]: I0120 16:57:54.716631 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"90838b03-cde2-4954-a70b-b75b79a2038e","Type":"ContainerStarted","Data":"298ca0eadbdc74c1da1f7848d31054f8093eaec760f25c58f34284a16c073e2d"} Jan 20 16:57:54 crc kubenswrapper[4558]: I0120 16:57:54.716661 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"90838b03-cde2-4954-a70b-b75b79a2038e","Type":"ContainerStarted","Data":"46fdfa3571a318310ff740837ecf48d117f01c12a9efa467bfc8b7e395e6abd4"} Jan 20 16:57:54 crc kubenswrapper[4558]: I0120 16:57:54.738584 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/neutron-577dd7499c-hrw6z" podStartSLOduration=2.738571113 podStartE2EDuration="2.738571113s" podCreationTimestamp="2026-01-20 16:57:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 16:57:54.723937896 +0000 UTC m=+968.484275864" watchObservedRunningTime="2026-01-20 16:57:54.738571113 +0000 UTC m=+968.498909080" Jan 20 16:57:54 crc kubenswrapper[4558]: I0120 16:57:54.764814 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/glance-default-internal-api-0" podStartSLOduration=12.764800778 podStartE2EDuration="12.764800778s" podCreationTimestamp="2026-01-20 16:57:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 16:57:54.763221237 +0000 UTC m=+968.523559204" watchObservedRunningTime="2026-01-20 16:57:54.764800778 +0000 UTC m=+968.525138745" Jan 20 16:57:55 crc kubenswrapper[4558]: I0120 16:57:55.309970 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/neutron-5d557df858-frznf"] Jan 20 16:57:55 crc kubenswrapper[4558]: I0120 16:57:55.311297 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-5d557df858-frznf" Jan 20 16:57:55 crc kubenswrapper[4558]: I0120 16:57:55.326095 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-neutron-public-svc" Jan 20 16:57:55 crc kubenswrapper[4558]: I0120 16:57:55.327101 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-neutron-internal-svc" Jan 20 16:57:55 crc kubenswrapper[4558]: I0120 16:57:55.332151 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-5d557df858-frznf"] Jan 20 16:57:55 crc kubenswrapper[4558]: I0120 16:57:55.377726 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7d0b0e33-9ce7-44d9-8b09-6008715fe1a0-internal-tls-certs\") pod \"neutron-5d557df858-frznf\" (UID: \"7d0b0e33-9ce7-44d9-8b09-6008715fe1a0\") " pod="openstack-kuttl-tests/neutron-5d557df858-frznf" Jan 20 16:57:55 crc kubenswrapper[4558]: I0120 16:57:55.377768 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7d0b0e33-9ce7-44d9-8b09-6008715fe1a0-public-tls-certs\") pod \"neutron-5d557df858-frznf\" (UID: \"7d0b0e33-9ce7-44d9-8b09-6008715fe1a0\") " pod="openstack-kuttl-tests/neutron-5d557df858-frznf" Jan 20 16:57:55 crc kubenswrapper[4558]: I0120 16:57:55.377789 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/7d0b0e33-9ce7-44d9-8b09-6008715fe1a0-config\") pod \"neutron-5d557df858-frznf\" (UID: \"7d0b0e33-9ce7-44d9-8b09-6008715fe1a0\") " pod="openstack-kuttl-tests/neutron-5d557df858-frznf" Jan 20 16:57:55 crc kubenswrapper[4558]: I0120 16:57:55.377853 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7d0b0e33-9ce7-44d9-8b09-6008715fe1a0-combined-ca-bundle\") pod \"neutron-5d557df858-frznf\" (UID: \"7d0b0e33-9ce7-44d9-8b09-6008715fe1a0\") " pod="openstack-kuttl-tests/neutron-5d557df858-frznf" Jan 20 16:57:55 crc kubenswrapper[4558]: I0120 16:57:55.377915 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/7d0b0e33-9ce7-44d9-8b09-6008715fe1a0-httpd-config\") pod \"neutron-5d557df858-frznf\" (UID: \"7d0b0e33-9ce7-44d9-8b09-6008715fe1a0\") " pod="openstack-kuttl-tests/neutron-5d557df858-frznf" Jan 20 16:57:55 crc kubenswrapper[4558]: I0120 16:57:55.377954 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g7g6w\" (UniqueName: \"kubernetes.io/projected/7d0b0e33-9ce7-44d9-8b09-6008715fe1a0-kube-api-access-g7g6w\") pod \"neutron-5d557df858-frznf\" (UID: \"7d0b0e33-9ce7-44d9-8b09-6008715fe1a0\") " pod="openstack-kuttl-tests/neutron-5d557df858-frznf" Jan 20 16:57:55 crc kubenswrapper[4558]: I0120 16:57:55.377997 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/7d0b0e33-9ce7-44d9-8b09-6008715fe1a0-ovndb-tls-certs\") pod \"neutron-5d557df858-frznf\" (UID: \"7d0b0e33-9ce7-44d9-8b09-6008715fe1a0\") " pod="openstack-kuttl-tests/neutron-5d557df858-frznf" Jan 20 16:57:55 crc kubenswrapper[4558]: I0120 16:57:55.479280 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7d0b0e33-9ce7-44d9-8b09-6008715fe1a0-combined-ca-bundle\") pod \"neutron-5d557df858-frznf\" (UID: \"7d0b0e33-9ce7-44d9-8b09-6008715fe1a0\") " pod="openstack-kuttl-tests/neutron-5d557df858-frznf" Jan 20 16:57:55 crc kubenswrapper[4558]: I0120 16:57:55.479400 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/7d0b0e33-9ce7-44d9-8b09-6008715fe1a0-httpd-config\") pod \"neutron-5d557df858-frznf\" (UID: \"7d0b0e33-9ce7-44d9-8b09-6008715fe1a0\") " pod="openstack-kuttl-tests/neutron-5d557df858-frznf" Jan 20 16:57:55 crc kubenswrapper[4558]: I0120 16:57:55.479570 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g7g6w\" (UniqueName: \"kubernetes.io/projected/7d0b0e33-9ce7-44d9-8b09-6008715fe1a0-kube-api-access-g7g6w\") pod \"neutron-5d557df858-frznf\" (UID: \"7d0b0e33-9ce7-44d9-8b09-6008715fe1a0\") " pod="openstack-kuttl-tests/neutron-5d557df858-frznf" Jan 20 16:57:55 crc kubenswrapper[4558]: I0120 16:57:55.479653 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/7d0b0e33-9ce7-44d9-8b09-6008715fe1a0-ovndb-tls-certs\") pod \"neutron-5d557df858-frznf\" (UID: \"7d0b0e33-9ce7-44d9-8b09-6008715fe1a0\") " pod="openstack-kuttl-tests/neutron-5d557df858-frznf" Jan 20 16:57:55 crc kubenswrapper[4558]: I0120 16:57:55.479775 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7d0b0e33-9ce7-44d9-8b09-6008715fe1a0-internal-tls-certs\") pod \"neutron-5d557df858-frznf\" (UID: \"7d0b0e33-9ce7-44d9-8b09-6008715fe1a0\") " pod="openstack-kuttl-tests/neutron-5d557df858-frznf" Jan 20 16:57:55 crc kubenswrapper[4558]: I0120 16:57:55.479798 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7d0b0e33-9ce7-44d9-8b09-6008715fe1a0-public-tls-certs\") pod \"neutron-5d557df858-frznf\" (UID: \"7d0b0e33-9ce7-44d9-8b09-6008715fe1a0\") " pod="openstack-kuttl-tests/neutron-5d557df858-frznf" Jan 20 16:57:55 crc kubenswrapper[4558]: I0120 16:57:55.479817 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/7d0b0e33-9ce7-44d9-8b09-6008715fe1a0-config\") pod \"neutron-5d557df858-frznf\" (UID: \"7d0b0e33-9ce7-44d9-8b09-6008715fe1a0\") " pod="openstack-kuttl-tests/neutron-5d557df858-frznf" Jan 20 16:57:55 crc kubenswrapper[4558]: I0120 16:57:55.484798 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/7d0b0e33-9ce7-44d9-8b09-6008715fe1a0-ovndb-tls-certs\") pod \"neutron-5d557df858-frznf\" (UID: \"7d0b0e33-9ce7-44d9-8b09-6008715fe1a0\") " pod="openstack-kuttl-tests/neutron-5d557df858-frznf" Jan 20 16:57:55 crc kubenswrapper[4558]: I0120 16:57:55.487412 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7d0b0e33-9ce7-44d9-8b09-6008715fe1a0-public-tls-certs\") pod \"neutron-5d557df858-frznf\" (UID: \"7d0b0e33-9ce7-44d9-8b09-6008715fe1a0\") " pod="openstack-kuttl-tests/neutron-5d557df858-frznf" Jan 20 16:57:55 crc kubenswrapper[4558]: I0120 16:57:55.488473 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7d0b0e33-9ce7-44d9-8b09-6008715fe1a0-internal-tls-certs\") pod \"neutron-5d557df858-frznf\" (UID: \"7d0b0e33-9ce7-44d9-8b09-6008715fe1a0\") " pod="openstack-kuttl-tests/neutron-5d557df858-frznf" Jan 20 16:57:55 crc kubenswrapper[4558]: I0120 16:57:55.490271 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/7d0b0e33-9ce7-44d9-8b09-6008715fe1a0-httpd-config\") pod \"neutron-5d557df858-frznf\" (UID: \"7d0b0e33-9ce7-44d9-8b09-6008715fe1a0\") " pod="openstack-kuttl-tests/neutron-5d557df858-frznf" Jan 20 16:57:55 crc kubenswrapper[4558]: I0120 16:57:55.492697 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7d0b0e33-9ce7-44d9-8b09-6008715fe1a0-combined-ca-bundle\") pod \"neutron-5d557df858-frznf\" (UID: \"7d0b0e33-9ce7-44d9-8b09-6008715fe1a0\") " pod="openstack-kuttl-tests/neutron-5d557df858-frznf" Jan 20 16:57:55 crc kubenswrapper[4558]: I0120 16:57:55.493802 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/7d0b0e33-9ce7-44d9-8b09-6008715fe1a0-config\") pod \"neutron-5d557df858-frznf\" (UID: \"7d0b0e33-9ce7-44d9-8b09-6008715fe1a0\") " pod="openstack-kuttl-tests/neutron-5d557df858-frznf" Jan 20 16:57:55 crc kubenswrapper[4558]: I0120 16:57:55.500715 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g7g6w\" (UniqueName: \"kubernetes.io/projected/7d0b0e33-9ce7-44d9-8b09-6008715fe1a0-kube-api-access-g7g6w\") pod \"neutron-5d557df858-frznf\" (UID: \"7d0b0e33-9ce7-44d9-8b09-6008715fe1a0\") " pod="openstack-kuttl-tests/neutron-5d557df858-frznf" Jan 20 16:57:55 crc kubenswrapper[4558]: I0120 16:57:55.631865 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-5d557df858-frznf" Jan 20 16:57:55 crc kubenswrapper[4558]: I0120 16:57:55.748750 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"0cdccf60-59fa-48ac-997d-0b60175ce57e","Type":"ContainerStarted","Data":"294a55c018cc11da5fc8b4a83211a9da8f8f646c9ce5de9f03eaacc39bb09bf3"} Jan 20 16:57:55 crc kubenswrapper[4558]: I0120 16:57:55.777471 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/glance-default-external-api-0" podStartSLOduration=13.777449758 podStartE2EDuration="13.777449758s" podCreationTimestamp="2026-01-20 16:57:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 16:57:55.765812784 +0000 UTC m=+969.526150751" watchObservedRunningTime="2026-01-20 16:57:55.777449758 +0000 UTC m=+969.537787725" Jan 20 16:57:56 crc kubenswrapper[4558]: I0120 16:57:56.085210 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-5d557df858-frznf"] Jan 20 16:57:56 crc kubenswrapper[4558]: I0120 16:57:56.235812 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-db-sync-mpmc8" Jan 20 16:57:56 crc kubenswrapper[4558]: I0120 16:57:56.244011 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-db-sync-d6pfc" Jan 20 16:57:56 crc kubenswrapper[4558]: I0120 16:57:56.298714 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b1e54a34-818b-4918-baeb-586ff225fe1f-combined-ca-bundle\") pod \"b1e54a34-818b-4918-baeb-586ff225fe1f\" (UID: \"b1e54a34-818b-4918-baeb-586ff225fe1f\") " Jan 20 16:57:56 crc kubenswrapper[4558]: I0120 16:57:56.298983 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b1e54a34-818b-4918-baeb-586ff225fe1f-scripts\") pod \"b1e54a34-818b-4918-baeb-586ff225fe1f\" (UID: \"b1e54a34-818b-4918-baeb-586ff225fe1f\") " Jan 20 16:57:56 crc kubenswrapper[4558]: I0120 16:57:56.299529 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5phkx\" (UniqueName: \"kubernetes.io/projected/b3c9bd31-20f5-45ea-ae8d-987a8a70e321-kube-api-access-5phkx\") pod \"b3c9bd31-20f5-45ea-ae8d-987a8a70e321\" (UID: \"b3c9bd31-20f5-45ea-ae8d-987a8a70e321\") " Jan 20 16:57:56 crc kubenswrapper[4558]: I0120 16:57:56.299556 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b1e54a34-818b-4918-baeb-586ff225fe1f-logs\") pod \"b1e54a34-818b-4918-baeb-586ff225fe1f\" (UID: \"b1e54a34-818b-4918-baeb-586ff225fe1f\") " Jan 20 16:57:56 crc kubenswrapper[4558]: I0120 16:57:56.299599 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b1e54a34-818b-4918-baeb-586ff225fe1f-config-data\") pod \"b1e54a34-818b-4918-baeb-586ff225fe1f\" (UID: \"b1e54a34-818b-4918-baeb-586ff225fe1f\") " Jan 20 16:57:56 crc kubenswrapper[4558]: I0120 16:57:56.299618 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b3c9bd31-20f5-45ea-ae8d-987a8a70e321-db-sync-config-data\") pod \"b3c9bd31-20f5-45ea-ae8d-987a8a70e321\" (UID: \"b3c9bd31-20f5-45ea-ae8d-987a8a70e321\") " Jan 20 16:57:56 crc kubenswrapper[4558]: I0120 16:57:56.299640 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b3c9bd31-20f5-45ea-ae8d-987a8a70e321-combined-ca-bundle\") pod \"b3c9bd31-20f5-45ea-ae8d-987a8a70e321\" (UID: \"b3c9bd31-20f5-45ea-ae8d-987a8a70e321\") " Jan 20 16:57:56 crc kubenswrapper[4558]: I0120 16:57:56.299874 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jh79j\" (UniqueName: \"kubernetes.io/projected/b1e54a34-818b-4918-baeb-586ff225fe1f-kube-api-access-jh79j\") pod \"b1e54a34-818b-4918-baeb-586ff225fe1f\" (UID: \"b1e54a34-818b-4918-baeb-586ff225fe1f\") " Jan 20 16:57:56 crc kubenswrapper[4558]: I0120 16:57:56.300462 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b1e54a34-818b-4918-baeb-586ff225fe1f-logs" (OuterVolumeSpecName: "logs") pod "b1e54a34-818b-4918-baeb-586ff225fe1f" (UID: "b1e54a34-818b-4918-baeb-586ff225fe1f"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 16:57:56 crc kubenswrapper[4558]: I0120 16:57:56.300745 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b1e54a34-818b-4918-baeb-586ff225fe1f-logs\") on node \"crc\" DevicePath \"\"" Jan 20 16:57:56 crc kubenswrapper[4558]: I0120 16:57:56.303298 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b3c9bd31-20f5-45ea-ae8d-987a8a70e321-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "b3c9bd31-20f5-45ea-ae8d-987a8a70e321" (UID: "b3c9bd31-20f5-45ea-ae8d-987a8a70e321"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:57:56 crc kubenswrapper[4558]: I0120 16:57:56.321683 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b1e54a34-818b-4918-baeb-586ff225fe1f-kube-api-access-jh79j" (OuterVolumeSpecName: "kube-api-access-jh79j") pod "b1e54a34-818b-4918-baeb-586ff225fe1f" (UID: "b1e54a34-818b-4918-baeb-586ff225fe1f"). InnerVolumeSpecName "kube-api-access-jh79j". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:57:56 crc kubenswrapper[4558]: I0120 16:57:56.322989 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b1e54a34-818b-4918-baeb-586ff225fe1f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b1e54a34-818b-4918-baeb-586ff225fe1f" (UID: "b1e54a34-818b-4918-baeb-586ff225fe1f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:57:56 crc kubenswrapper[4558]: I0120 16:57:56.323618 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b1e54a34-818b-4918-baeb-586ff225fe1f-scripts" (OuterVolumeSpecName: "scripts") pod "b1e54a34-818b-4918-baeb-586ff225fe1f" (UID: "b1e54a34-818b-4918-baeb-586ff225fe1f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:57:56 crc kubenswrapper[4558]: I0120 16:57:56.337662 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b3c9bd31-20f5-45ea-ae8d-987a8a70e321-kube-api-access-5phkx" (OuterVolumeSpecName: "kube-api-access-5phkx") pod "b3c9bd31-20f5-45ea-ae8d-987a8a70e321" (UID: "b3c9bd31-20f5-45ea-ae8d-987a8a70e321"). InnerVolumeSpecName "kube-api-access-5phkx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:57:56 crc kubenswrapper[4558]: I0120 16:57:56.337826 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b1e54a34-818b-4918-baeb-586ff225fe1f-config-data" (OuterVolumeSpecName: "config-data") pod "b1e54a34-818b-4918-baeb-586ff225fe1f" (UID: "b1e54a34-818b-4918-baeb-586ff225fe1f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:57:56 crc kubenswrapper[4558]: I0120 16:57:56.338309 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b3c9bd31-20f5-45ea-ae8d-987a8a70e321-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b3c9bd31-20f5-45ea-ae8d-987a8a70e321" (UID: "b3c9bd31-20f5-45ea-ae8d-987a8a70e321"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:57:56 crc kubenswrapper[4558]: I0120 16:57:56.402909 4558 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b3c9bd31-20f5-45ea-ae8d-987a8a70e321-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 16:57:56 crc kubenswrapper[4558]: I0120 16:57:56.402935 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b3c9bd31-20f5-45ea-ae8d-987a8a70e321-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 16:57:56 crc kubenswrapper[4558]: I0120 16:57:56.402944 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jh79j\" (UniqueName: \"kubernetes.io/projected/b1e54a34-818b-4918-baeb-586ff225fe1f-kube-api-access-jh79j\") on node \"crc\" DevicePath \"\"" Jan 20 16:57:56 crc kubenswrapper[4558]: I0120 16:57:56.402953 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b1e54a34-818b-4918-baeb-586ff225fe1f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 16:57:56 crc kubenswrapper[4558]: I0120 16:57:56.402961 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b1e54a34-818b-4918-baeb-586ff225fe1f-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 16:57:56 crc kubenswrapper[4558]: I0120 16:57:56.402968 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5phkx\" (UniqueName: \"kubernetes.io/projected/b3c9bd31-20f5-45ea-ae8d-987a8a70e321-kube-api-access-5phkx\") on node \"crc\" DevicePath \"\"" Jan 20 16:57:56 crc kubenswrapper[4558]: I0120 16:57:56.402977 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b1e54a34-818b-4918-baeb-586ff225fe1f-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 16:57:56 crc kubenswrapper[4558]: E0120 16:57:56.527852 4558 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7b8d1710_d43e_4f4d_a16b_fb97e8bb7ffa.slice/crio-conmon-f01eee54b0096cee030175949e49bf2c2cae19d2175a23fa1d7f52c350aef666.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7b8d1710_d43e_4f4d_a16b_fb97e8bb7ffa.slice/crio-f01eee54b0096cee030175949e49bf2c2cae19d2175a23fa1d7f52c350aef666.scope\": RecentStats: unable to find data in memory cache]" Jan 20 16:57:56 crc kubenswrapper[4558]: I0120 16:57:56.756800 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-db-sync-mpmc8" Jan 20 16:57:56 crc kubenswrapper[4558]: I0120 16:57:56.756967 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-db-sync-mpmc8" event={"ID":"b3c9bd31-20f5-45ea-ae8d-987a8a70e321","Type":"ContainerDied","Data":"d4d1df28a2973894463776ce7b632dd351dbc7ee6f63ab78f5cc26a71eda223f"} Jan 20 16:57:56 crc kubenswrapper[4558]: I0120 16:57:56.757782 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d4d1df28a2973894463776ce7b632dd351dbc7ee6f63ab78f5cc26a71eda223f" Jan 20 16:57:56 crc kubenswrapper[4558]: I0120 16:57:56.758124 4558 generic.go:334] "Generic (PLEG): container finished" podID="5a82a446-869f-466c-a345-bd211d1851c2" containerID="f0cbb47b3b8abf3b84596f44ff9f49275729177b2875c4e8e764e817b8d6eb91" exitCode=0 Jan 20 16:57:56 crc kubenswrapper[4558]: I0120 16:57:56.758196 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-bootstrap-vkskq" event={"ID":"5a82a446-869f-466c-a345-bd211d1851c2","Type":"ContainerDied","Data":"f0cbb47b3b8abf3b84596f44ff9f49275729177b2875c4e8e764e817b8d6eb91"} Jan 20 16:57:56 crc kubenswrapper[4558]: I0120 16:57:56.761088 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-5d557df858-frznf" event={"ID":"7d0b0e33-9ce7-44d9-8b09-6008715fe1a0","Type":"ContainerStarted","Data":"0db5ea38beca13c9805cb3405d85b10e954c953b65c96cec72dc60b73ae7e610"} Jan 20 16:57:56 crc kubenswrapper[4558]: I0120 16:57:56.761209 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-5d557df858-frznf" event={"ID":"7d0b0e33-9ce7-44d9-8b09-6008715fe1a0","Type":"ContainerStarted","Data":"759720ba5c84a0de6cd2dbe18773a4daeffbac9fb1b15d97e54f0ba7761ed858"} Jan 20 16:57:56 crc kubenswrapper[4558]: I0120 16:57:56.761281 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-5d557df858-frznf" event={"ID":"7d0b0e33-9ce7-44d9-8b09-6008715fe1a0","Type":"ContainerStarted","Data":"37a6708a9860e88b4efa532ce9bee2f4585543237bb73c3a902befede3cff640"} Jan 20 16:57:56 crc kubenswrapper[4558]: I0120 16:57:56.761460 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/neutron-5d557df858-frznf" Jan 20 16:57:56 crc kubenswrapper[4558]: I0120 16:57:56.762712 4558 generic.go:334] "Generic (PLEG): container finished" podID="7b8d1710-d43e-4f4d-a16b-fb97e8bb7ffa" containerID="f01eee54b0096cee030175949e49bf2c2cae19d2175a23fa1d7f52c350aef666" exitCode=0 Jan 20 16:57:56 crc kubenswrapper[4558]: I0120 16:57:56.762820 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-db-sync-jwg24" event={"ID":"7b8d1710-d43e-4f4d-a16b-fb97e8bb7ffa","Type":"ContainerDied","Data":"f01eee54b0096cee030175949e49bf2c2cae19d2175a23fa1d7f52c350aef666"} Jan 20 16:57:56 crc kubenswrapper[4558]: I0120 16:57:56.764708 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-db-sync-d6pfc" event={"ID":"b1e54a34-818b-4918-baeb-586ff225fe1f","Type":"ContainerDied","Data":"03c23d6d6a3cc40c7b906b0244668b67eea91d715e82ca654f9c30d5abd205cf"} Jan 20 16:57:56 crc kubenswrapper[4558]: I0120 16:57:56.764761 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="03c23d6d6a3cc40c7b906b0244668b67eea91d715e82ca654f9c30d5abd205cf" Jan 20 16:57:56 crc kubenswrapper[4558]: I0120 16:57:56.765237 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-db-sync-d6pfc" Jan 20 16:57:56 crc kubenswrapper[4558]: I0120 16:57:56.815873 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/neutron-5d557df858-frznf" podStartSLOduration=1.8158567049999998 podStartE2EDuration="1.815856705s" podCreationTimestamp="2026-01-20 16:57:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 16:57:56.813288626 +0000 UTC m=+970.573626593" watchObservedRunningTime="2026-01-20 16:57:56.815856705 +0000 UTC m=+970.576194672" Jan 20 16:57:56 crc kubenswrapper[4558]: I0120 16:57:56.869252 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/barbican-worker-7649847b9c-9wk9v"] Jan 20 16:57:56 crc kubenswrapper[4558]: E0120 16:57:56.869703 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b1e54a34-818b-4918-baeb-586ff225fe1f" containerName="placement-db-sync" Jan 20 16:57:56 crc kubenswrapper[4558]: I0120 16:57:56.869762 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="b1e54a34-818b-4918-baeb-586ff225fe1f" containerName="placement-db-sync" Jan 20 16:57:56 crc kubenswrapper[4558]: E0120 16:57:56.869852 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3c9bd31-20f5-45ea-ae8d-987a8a70e321" containerName="barbican-db-sync" Jan 20 16:57:56 crc kubenswrapper[4558]: I0120 16:57:56.869899 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3c9bd31-20f5-45ea-ae8d-987a8a70e321" containerName="barbican-db-sync" Jan 20 16:57:56 crc kubenswrapper[4558]: I0120 16:57:56.870093 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="b1e54a34-818b-4918-baeb-586ff225fe1f" containerName="placement-db-sync" Jan 20 16:57:56 crc kubenswrapper[4558]: I0120 16:57:56.870156 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="b3c9bd31-20f5-45ea-ae8d-987a8a70e321" containerName="barbican-db-sync" Jan 20 16:57:56 crc kubenswrapper[4558]: I0120 16:57:56.870959 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-worker-7649847b9c-9wk9v" Jan 20 16:57:56 crc kubenswrapper[4558]: I0120 16:57:56.873637 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"barbican-config-data" Jan 20 16:57:56 crc kubenswrapper[4558]: I0120 16:57:56.873892 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"barbican-barbican-dockercfg-z9dpp" Jan 20 16:57:56 crc kubenswrapper[4558]: I0120 16:57:56.874019 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"barbican-worker-config-data" Jan 20 16:57:56 crc kubenswrapper[4558]: I0120 16:57:56.881577 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/barbican-keystone-listener-6b66757dfd-k89tm"] Jan 20 16:57:56 crc kubenswrapper[4558]: I0120 16:57:56.882809 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-keystone-listener-6b66757dfd-k89tm" Jan 20 16:57:56 crc kubenswrapper[4558]: I0120 16:57:56.884010 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"barbican-keystone-listener-config-data" Jan 20 16:57:56 crc kubenswrapper[4558]: I0120 16:57:56.900005 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-worker-7649847b9c-9wk9v"] Jan 20 16:57:56 crc kubenswrapper[4558]: I0120 16:57:56.916537 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-keystone-listener-6b66757dfd-k89tm"] Jan 20 16:57:56 crc kubenswrapper[4558]: I0120 16:57:56.965105 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/barbican-api-7bdfbf6ddd-thvjg"] Jan 20 16:57:56 crc kubenswrapper[4558]: I0120 16:57:56.966292 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-7bdfbf6ddd-thvjg" Jan 20 16:57:56 crc kubenswrapper[4558]: I0120 16:57:56.968310 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"barbican-api-config-data" Jan 20 16:57:56 crc kubenswrapper[4558]: I0120 16:57:56.979868 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-api-7bdfbf6ddd-thvjg"] Jan 20 16:57:57 crc kubenswrapper[4558]: I0120 16:57:57.016638 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8be0815b-492d-4db8-bfd5-29d81bdb14d6-logs\") pod \"barbican-api-7bdfbf6ddd-thvjg\" (UID: \"8be0815b-492d-4db8-bfd5-29d81bdb14d6\") " pod="openstack-kuttl-tests/barbican-api-7bdfbf6ddd-thvjg" Jan 20 16:57:57 crc kubenswrapper[4558]: I0120 16:57:57.016676 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8be0815b-492d-4db8-bfd5-29d81bdb14d6-config-data-custom\") pod \"barbican-api-7bdfbf6ddd-thvjg\" (UID: \"8be0815b-492d-4db8-bfd5-29d81bdb14d6\") " pod="openstack-kuttl-tests/barbican-api-7bdfbf6ddd-thvjg" Jan 20 16:57:57 crc kubenswrapper[4558]: I0120 16:57:57.016743 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/40234b95-d302-420a-96d7-c56ffe609530-config-data-custom\") pod \"barbican-worker-7649847b9c-9wk9v\" (UID: \"40234b95-d302-420a-96d7-c56ffe609530\") " pod="openstack-kuttl-tests/barbican-worker-7649847b9c-9wk9v" Jan 20 16:57:57 crc kubenswrapper[4558]: I0120 16:57:57.016759 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/69c5c1c6-5c3f-48d4-8d14-c3c7cdcb1915-config-data-custom\") pod \"barbican-keystone-listener-6b66757dfd-k89tm\" (UID: \"69c5c1c6-5c3f-48d4-8d14-c3c7cdcb1915\") " pod="openstack-kuttl-tests/barbican-keystone-listener-6b66757dfd-k89tm" Jan 20 16:57:57 crc kubenswrapper[4558]: I0120 16:57:57.016783 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/40234b95-d302-420a-96d7-c56ffe609530-logs\") pod \"barbican-worker-7649847b9c-9wk9v\" (UID: \"40234b95-d302-420a-96d7-c56ffe609530\") " pod="openstack-kuttl-tests/barbican-worker-7649847b9c-9wk9v" Jan 20 16:57:57 crc kubenswrapper[4558]: I0120 16:57:57.016870 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wrts6\" (UniqueName: \"kubernetes.io/projected/40234b95-d302-420a-96d7-c56ffe609530-kube-api-access-wrts6\") pod \"barbican-worker-7649847b9c-9wk9v\" (UID: \"40234b95-d302-420a-96d7-c56ffe609530\") " pod="openstack-kuttl-tests/barbican-worker-7649847b9c-9wk9v" Jan 20 16:57:57 crc kubenswrapper[4558]: I0120 16:57:57.016894 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4db2c\" (UniqueName: \"kubernetes.io/projected/8be0815b-492d-4db8-bfd5-29d81bdb14d6-kube-api-access-4db2c\") pod \"barbican-api-7bdfbf6ddd-thvjg\" (UID: \"8be0815b-492d-4db8-bfd5-29d81bdb14d6\") " pod="openstack-kuttl-tests/barbican-api-7bdfbf6ddd-thvjg" Jan 20 16:57:57 crc kubenswrapper[4558]: I0120 16:57:57.016914 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/69c5c1c6-5c3f-48d4-8d14-c3c7cdcb1915-config-data\") pod \"barbican-keystone-listener-6b66757dfd-k89tm\" (UID: \"69c5c1c6-5c3f-48d4-8d14-c3c7cdcb1915\") " pod="openstack-kuttl-tests/barbican-keystone-listener-6b66757dfd-k89tm" Jan 20 16:57:57 crc kubenswrapper[4558]: I0120 16:57:57.016996 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8be0815b-492d-4db8-bfd5-29d81bdb14d6-config-data\") pod \"barbican-api-7bdfbf6ddd-thvjg\" (UID: \"8be0815b-492d-4db8-bfd5-29d81bdb14d6\") " pod="openstack-kuttl-tests/barbican-api-7bdfbf6ddd-thvjg" Jan 20 16:57:57 crc kubenswrapper[4558]: I0120 16:57:57.017040 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/69c5c1c6-5c3f-48d4-8d14-c3c7cdcb1915-logs\") pod \"barbican-keystone-listener-6b66757dfd-k89tm\" (UID: \"69c5c1c6-5c3f-48d4-8d14-c3c7cdcb1915\") " pod="openstack-kuttl-tests/barbican-keystone-listener-6b66757dfd-k89tm" Jan 20 16:57:57 crc kubenswrapper[4558]: I0120 16:57:57.017107 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/69c5c1c6-5c3f-48d4-8d14-c3c7cdcb1915-combined-ca-bundle\") pod \"barbican-keystone-listener-6b66757dfd-k89tm\" (UID: \"69c5c1c6-5c3f-48d4-8d14-c3c7cdcb1915\") " pod="openstack-kuttl-tests/barbican-keystone-listener-6b66757dfd-k89tm" Jan 20 16:57:57 crc kubenswrapper[4558]: I0120 16:57:57.017131 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8be0815b-492d-4db8-bfd5-29d81bdb14d6-combined-ca-bundle\") pod \"barbican-api-7bdfbf6ddd-thvjg\" (UID: \"8be0815b-492d-4db8-bfd5-29d81bdb14d6\") " pod="openstack-kuttl-tests/barbican-api-7bdfbf6ddd-thvjg" Jan 20 16:57:57 crc kubenswrapper[4558]: I0120 16:57:57.017146 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/40234b95-d302-420a-96d7-c56ffe609530-config-data\") pod \"barbican-worker-7649847b9c-9wk9v\" (UID: \"40234b95-d302-420a-96d7-c56ffe609530\") " pod="openstack-kuttl-tests/barbican-worker-7649847b9c-9wk9v" Jan 20 16:57:57 crc kubenswrapper[4558]: I0120 16:57:57.017226 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40234b95-d302-420a-96d7-c56ffe609530-combined-ca-bundle\") pod \"barbican-worker-7649847b9c-9wk9v\" (UID: \"40234b95-d302-420a-96d7-c56ffe609530\") " pod="openstack-kuttl-tests/barbican-worker-7649847b9c-9wk9v" Jan 20 16:57:57 crc kubenswrapper[4558]: I0120 16:57:57.017275 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kw87t\" (UniqueName: \"kubernetes.io/projected/69c5c1c6-5c3f-48d4-8d14-c3c7cdcb1915-kube-api-access-kw87t\") pod \"barbican-keystone-listener-6b66757dfd-k89tm\" (UID: \"69c5c1c6-5c3f-48d4-8d14-c3c7cdcb1915\") " pod="openstack-kuttl-tests/barbican-keystone-listener-6b66757dfd-k89tm" Jan 20 16:57:57 crc kubenswrapper[4558]: I0120 16:57:57.119075 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/69c5c1c6-5c3f-48d4-8d14-c3c7cdcb1915-combined-ca-bundle\") pod \"barbican-keystone-listener-6b66757dfd-k89tm\" (UID: \"69c5c1c6-5c3f-48d4-8d14-c3c7cdcb1915\") " pod="openstack-kuttl-tests/barbican-keystone-listener-6b66757dfd-k89tm" Jan 20 16:57:57 crc kubenswrapper[4558]: I0120 16:57:57.119120 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8be0815b-492d-4db8-bfd5-29d81bdb14d6-combined-ca-bundle\") pod \"barbican-api-7bdfbf6ddd-thvjg\" (UID: \"8be0815b-492d-4db8-bfd5-29d81bdb14d6\") " pod="openstack-kuttl-tests/barbican-api-7bdfbf6ddd-thvjg" Jan 20 16:57:57 crc kubenswrapper[4558]: I0120 16:57:57.119141 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/40234b95-d302-420a-96d7-c56ffe609530-config-data\") pod \"barbican-worker-7649847b9c-9wk9v\" (UID: \"40234b95-d302-420a-96d7-c56ffe609530\") " pod="openstack-kuttl-tests/barbican-worker-7649847b9c-9wk9v" Jan 20 16:57:57 crc kubenswrapper[4558]: I0120 16:57:57.119199 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40234b95-d302-420a-96d7-c56ffe609530-combined-ca-bundle\") pod \"barbican-worker-7649847b9c-9wk9v\" (UID: \"40234b95-d302-420a-96d7-c56ffe609530\") " pod="openstack-kuttl-tests/barbican-worker-7649847b9c-9wk9v" Jan 20 16:57:57 crc kubenswrapper[4558]: I0120 16:57:57.119225 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kw87t\" (UniqueName: \"kubernetes.io/projected/69c5c1c6-5c3f-48d4-8d14-c3c7cdcb1915-kube-api-access-kw87t\") pod \"barbican-keystone-listener-6b66757dfd-k89tm\" (UID: \"69c5c1c6-5c3f-48d4-8d14-c3c7cdcb1915\") " pod="openstack-kuttl-tests/barbican-keystone-listener-6b66757dfd-k89tm" Jan 20 16:57:57 crc kubenswrapper[4558]: I0120 16:57:57.119254 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8be0815b-492d-4db8-bfd5-29d81bdb14d6-logs\") pod \"barbican-api-7bdfbf6ddd-thvjg\" (UID: \"8be0815b-492d-4db8-bfd5-29d81bdb14d6\") " pod="openstack-kuttl-tests/barbican-api-7bdfbf6ddd-thvjg" Jan 20 16:57:57 crc kubenswrapper[4558]: I0120 16:57:57.119270 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8be0815b-492d-4db8-bfd5-29d81bdb14d6-config-data-custom\") pod \"barbican-api-7bdfbf6ddd-thvjg\" (UID: \"8be0815b-492d-4db8-bfd5-29d81bdb14d6\") " pod="openstack-kuttl-tests/barbican-api-7bdfbf6ddd-thvjg" Jan 20 16:57:57 crc kubenswrapper[4558]: I0120 16:57:57.119307 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/40234b95-d302-420a-96d7-c56ffe609530-config-data-custom\") pod \"barbican-worker-7649847b9c-9wk9v\" (UID: \"40234b95-d302-420a-96d7-c56ffe609530\") " pod="openstack-kuttl-tests/barbican-worker-7649847b9c-9wk9v" Jan 20 16:57:57 crc kubenswrapper[4558]: I0120 16:57:57.119324 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/69c5c1c6-5c3f-48d4-8d14-c3c7cdcb1915-config-data-custom\") pod \"barbican-keystone-listener-6b66757dfd-k89tm\" (UID: \"69c5c1c6-5c3f-48d4-8d14-c3c7cdcb1915\") " pod="openstack-kuttl-tests/barbican-keystone-listener-6b66757dfd-k89tm" Jan 20 16:57:57 crc kubenswrapper[4558]: I0120 16:57:57.119340 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/40234b95-d302-420a-96d7-c56ffe609530-logs\") pod \"barbican-worker-7649847b9c-9wk9v\" (UID: \"40234b95-d302-420a-96d7-c56ffe609530\") " pod="openstack-kuttl-tests/barbican-worker-7649847b9c-9wk9v" Jan 20 16:57:57 crc kubenswrapper[4558]: I0120 16:57:57.119370 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wrts6\" (UniqueName: \"kubernetes.io/projected/40234b95-d302-420a-96d7-c56ffe609530-kube-api-access-wrts6\") pod \"barbican-worker-7649847b9c-9wk9v\" (UID: \"40234b95-d302-420a-96d7-c56ffe609530\") " pod="openstack-kuttl-tests/barbican-worker-7649847b9c-9wk9v" Jan 20 16:57:57 crc kubenswrapper[4558]: I0120 16:57:57.119389 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4db2c\" (UniqueName: \"kubernetes.io/projected/8be0815b-492d-4db8-bfd5-29d81bdb14d6-kube-api-access-4db2c\") pod \"barbican-api-7bdfbf6ddd-thvjg\" (UID: \"8be0815b-492d-4db8-bfd5-29d81bdb14d6\") " pod="openstack-kuttl-tests/barbican-api-7bdfbf6ddd-thvjg" Jan 20 16:57:57 crc kubenswrapper[4558]: I0120 16:57:57.119405 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/69c5c1c6-5c3f-48d4-8d14-c3c7cdcb1915-config-data\") pod \"barbican-keystone-listener-6b66757dfd-k89tm\" (UID: \"69c5c1c6-5c3f-48d4-8d14-c3c7cdcb1915\") " pod="openstack-kuttl-tests/barbican-keystone-listener-6b66757dfd-k89tm" Jan 20 16:57:57 crc kubenswrapper[4558]: I0120 16:57:57.119425 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8be0815b-492d-4db8-bfd5-29d81bdb14d6-config-data\") pod \"barbican-api-7bdfbf6ddd-thvjg\" (UID: \"8be0815b-492d-4db8-bfd5-29d81bdb14d6\") " pod="openstack-kuttl-tests/barbican-api-7bdfbf6ddd-thvjg" Jan 20 16:57:57 crc kubenswrapper[4558]: I0120 16:57:57.119442 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/69c5c1c6-5c3f-48d4-8d14-c3c7cdcb1915-logs\") pod \"barbican-keystone-listener-6b66757dfd-k89tm\" (UID: \"69c5c1c6-5c3f-48d4-8d14-c3c7cdcb1915\") " pod="openstack-kuttl-tests/barbican-keystone-listener-6b66757dfd-k89tm" Jan 20 16:57:57 crc kubenswrapper[4558]: I0120 16:57:57.119767 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/69c5c1c6-5c3f-48d4-8d14-c3c7cdcb1915-logs\") pod \"barbican-keystone-listener-6b66757dfd-k89tm\" (UID: \"69c5c1c6-5c3f-48d4-8d14-c3c7cdcb1915\") " pod="openstack-kuttl-tests/barbican-keystone-listener-6b66757dfd-k89tm" Jan 20 16:57:57 crc kubenswrapper[4558]: I0120 16:57:57.119993 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8be0815b-492d-4db8-bfd5-29d81bdb14d6-logs\") pod \"barbican-api-7bdfbf6ddd-thvjg\" (UID: \"8be0815b-492d-4db8-bfd5-29d81bdb14d6\") " pod="openstack-kuttl-tests/barbican-api-7bdfbf6ddd-thvjg" Jan 20 16:57:57 crc kubenswrapper[4558]: I0120 16:57:57.122435 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/40234b95-d302-420a-96d7-c56ffe609530-logs\") pod \"barbican-worker-7649847b9c-9wk9v\" (UID: \"40234b95-d302-420a-96d7-c56ffe609530\") " pod="openstack-kuttl-tests/barbican-worker-7649847b9c-9wk9v" Jan 20 16:57:57 crc kubenswrapper[4558]: I0120 16:57:57.126758 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/69c5c1c6-5c3f-48d4-8d14-c3c7cdcb1915-config-data-custom\") pod \"barbican-keystone-listener-6b66757dfd-k89tm\" (UID: \"69c5c1c6-5c3f-48d4-8d14-c3c7cdcb1915\") " pod="openstack-kuttl-tests/barbican-keystone-listener-6b66757dfd-k89tm" Jan 20 16:57:57 crc kubenswrapper[4558]: I0120 16:57:57.126843 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40234b95-d302-420a-96d7-c56ffe609530-combined-ca-bundle\") pod \"barbican-worker-7649847b9c-9wk9v\" (UID: \"40234b95-d302-420a-96d7-c56ffe609530\") " pod="openstack-kuttl-tests/barbican-worker-7649847b9c-9wk9v" Jan 20 16:57:57 crc kubenswrapper[4558]: I0120 16:57:57.126775 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8be0815b-492d-4db8-bfd5-29d81bdb14d6-config-data-custom\") pod \"barbican-api-7bdfbf6ddd-thvjg\" (UID: \"8be0815b-492d-4db8-bfd5-29d81bdb14d6\") " pod="openstack-kuttl-tests/barbican-api-7bdfbf6ddd-thvjg" Jan 20 16:57:57 crc kubenswrapper[4558]: I0120 16:57:57.127235 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/69c5c1c6-5c3f-48d4-8d14-c3c7cdcb1915-config-data\") pod \"barbican-keystone-listener-6b66757dfd-k89tm\" (UID: \"69c5c1c6-5c3f-48d4-8d14-c3c7cdcb1915\") " pod="openstack-kuttl-tests/barbican-keystone-listener-6b66757dfd-k89tm" Jan 20 16:57:57 crc kubenswrapper[4558]: I0120 16:57:57.129368 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/40234b95-d302-420a-96d7-c56ffe609530-config-data-custom\") pod \"barbican-worker-7649847b9c-9wk9v\" (UID: \"40234b95-d302-420a-96d7-c56ffe609530\") " pod="openstack-kuttl-tests/barbican-worker-7649847b9c-9wk9v" Jan 20 16:57:57 crc kubenswrapper[4558]: I0120 16:57:57.129828 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/40234b95-d302-420a-96d7-c56ffe609530-config-data\") pod \"barbican-worker-7649847b9c-9wk9v\" (UID: \"40234b95-d302-420a-96d7-c56ffe609530\") " pod="openstack-kuttl-tests/barbican-worker-7649847b9c-9wk9v" Jan 20 16:57:57 crc kubenswrapper[4558]: I0120 16:57:57.131345 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8be0815b-492d-4db8-bfd5-29d81bdb14d6-config-data\") pod \"barbican-api-7bdfbf6ddd-thvjg\" (UID: \"8be0815b-492d-4db8-bfd5-29d81bdb14d6\") " pod="openstack-kuttl-tests/barbican-api-7bdfbf6ddd-thvjg" Jan 20 16:57:57 crc kubenswrapper[4558]: I0120 16:57:57.131620 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/69c5c1c6-5c3f-48d4-8d14-c3c7cdcb1915-combined-ca-bundle\") pod \"barbican-keystone-listener-6b66757dfd-k89tm\" (UID: \"69c5c1c6-5c3f-48d4-8d14-c3c7cdcb1915\") " pod="openstack-kuttl-tests/barbican-keystone-listener-6b66757dfd-k89tm" Jan 20 16:57:57 crc kubenswrapper[4558]: I0120 16:57:57.134495 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8be0815b-492d-4db8-bfd5-29d81bdb14d6-combined-ca-bundle\") pod \"barbican-api-7bdfbf6ddd-thvjg\" (UID: \"8be0815b-492d-4db8-bfd5-29d81bdb14d6\") " pod="openstack-kuttl-tests/barbican-api-7bdfbf6ddd-thvjg" Jan 20 16:57:57 crc kubenswrapper[4558]: I0120 16:57:57.134994 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4db2c\" (UniqueName: \"kubernetes.io/projected/8be0815b-492d-4db8-bfd5-29d81bdb14d6-kube-api-access-4db2c\") pod \"barbican-api-7bdfbf6ddd-thvjg\" (UID: \"8be0815b-492d-4db8-bfd5-29d81bdb14d6\") " pod="openstack-kuttl-tests/barbican-api-7bdfbf6ddd-thvjg" Jan 20 16:57:57 crc kubenswrapper[4558]: I0120 16:57:57.136317 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wrts6\" (UniqueName: \"kubernetes.io/projected/40234b95-d302-420a-96d7-c56ffe609530-kube-api-access-wrts6\") pod \"barbican-worker-7649847b9c-9wk9v\" (UID: \"40234b95-d302-420a-96d7-c56ffe609530\") " pod="openstack-kuttl-tests/barbican-worker-7649847b9c-9wk9v" Jan 20 16:57:57 crc kubenswrapper[4558]: I0120 16:57:57.136449 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kw87t\" (UniqueName: \"kubernetes.io/projected/69c5c1c6-5c3f-48d4-8d14-c3c7cdcb1915-kube-api-access-kw87t\") pod \"barbican-keystone-listener-6b66757dfd-k89tm\" (UID: \"69c5c1c6-5c3f-48d4-8d14-c3c7cdcb1915\") " pod="openstack-kuttl-tests/barbican-keystone-listener-6b66757dfd-k89tm" Jan 20 16:57:57 crc kubenswrapper[4558]: I0120 16:57:57.203844 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-worker-7649847b9c-9wk9v" Jan 20 16:57:57 crc kubenswrapper[4558]: I0120 16:57:57.210232 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-keystone-listener-6b66757dfd-k89tm" Jan 20 16:57:57 crc kubenswrapper[4558]: I0120 16:57:57.281655 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-7bdfbf6ddd-thvjg" Jan 20 16:57:57 crc kubenswrapper[4558]: I0120 16:57:57.329938 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/placement-5d77597494-bkh4z"] Jan 20 16:57:57 crc kubenswrapper[4558]: I0120 16:57:57.331385 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-5d77597494-bkh4z" Jan 20 16:57:57 crc kubenswrapper[4558]: I0120 16:57:57.334012 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"placement-scripts" Jan 20 16:57:57 crc kubenswrapper[4558]: I0120 16:57:57.334236 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-placement-internal-svc" Jan 20 16:57:57 crc kubenswrapper[4558]: I0120 16:57:57.334460 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"placement-config-data" Jan 20 16:57:57 crc kubenswrapper[4558]: I0120 16:57:57.334579 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-placement-public-svc" Jan 20 16:57:57 crc kubenswrapper[4558]: I0120 16:57:57.334803 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"placement-placement-dockercfg-2kfdl" Jan 20 16:57:57 crc kubenswrapper[4558]: I0120 16:57:57.351326 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-5d77597494-bkh4z"] Jan 20 16:57:57 crc kubenswrapper[4558]: I0120 16:57:57.428292 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rskmp\" (UniqueName: \"kubernetes.io/projected/a140c9a4-bec6-42e6-bc6c-d63566e4f7f6-kube-api-access-rskmp\") pod \"placement-5d77597494-bkh4z\" (UID: \"a140c9a4-bec6-42e6-bc6c-d63566e4f7f6\") " pod="openstack-kuttl-tests/placement-5d77597494-bkh4z" Jan 20 16:57:57 crc kubenswrapper[4558]: I0120 16:57:57.428425 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a140c9a4-bec6-42e6-bc6c-d63566e4f7f6-scripts\") pod \"placement-5d77597494-bkh4z\" (UID: \"a140c9a4-bec6-42e6-bc6c-d63566e4f7f6\") " pod="openstack-kuttl-tests/placement-5d77597494-bkh4z" Jan 20 16:57:57 crc kubenswrapper[4558]: I0120 16:57:57.428465 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a140c9a4-bec6-42e6-bc6c-d63566e4f7f6-internal-tls-certs\") pod \"placement-5d77597494-bkh4z\" (UID: \"a140c9a4-bec6-42e6-bc6c-d63566e4f7f6\") " pod="openstack-kuttl-tests/placement-5d77597494-bkh4z" Jan 20 16:57:57 crc kubenswrapper[4558]: I0120 16:57:57.428498 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a140c9a4-bec6-42e6-bc6c-d63566e4f7f6-public-tls-certs\") pod \"placement-5d77597494-bkh4z\" (UID: \"a140c9a4-bec6-42e6-bc6c-d63566e4f7f6\") " pod="openstack-kuttl-tests/placement-5d77597494-bkh4z" Jan 20 16:57:57 crc kubenswrapper[4558]: I0120 16:57:57.428638 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a140c9a4-bec6-42e6-bc6c-d63566e4f7f6-logs\") pod \"placement-5d77597494-bkh4z\" (UID: \"a140c9a4-bec6-42e6-bc6c-d63566e4f7f6\") " pod="openstack-kuttl-tests/placement-5d77597494-bkh4z" Jan 20 16:57:57 crc kubenswrapper[4558]: I0120 16:57:57.428666 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a140c9a4-bec6-42e6-bc6c-d63566e4f7f6-combined-ca-bundle\") pod \"placement-5d77597494-bkh4z\" (UID: \"a140c9a4-bec6-42e6-bc6c-d63566e4f7f6\") " pod="openstack-kuttl-tests/placement-5d77597494-bkh4z" Jan 20 16:57:57 crc kubenswrapper[4558]: I0120 16:57:57.428768 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a140c9a4-bec6-42e6-bc6c-d63566e4f7f6-config-data\") pod \"placement-5d77597494-bkh4z\" (UID: \"a140c9a4-bec6-42e6-bc6c-d63566e4f7f6\") " pod="openstack-kuttl-tests/placement-5d77597494-bkh4z" Jan 20 16:57:57 crc kubenswrapper[4558]: I0120 16:57:57.529975 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rskmp\" (UniqueName: \"kubernetes.io/projected/a140c9a4-bec6-42e6-bc6c-d63566e4f7f6-kube-api-access-rskmp\") pod \"placement-5d77597494-bkh4z\" (UID: \"a140c9a4-bec6-42e6-bc6c-d63566e4f7f6\") " pod="openstack-kuttl-tests/placement-5d77597494-bkh4z" Jan 20 16:57:57 crc kubenswrapper[4558]: I0120 16:57:57.530083 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a140c9a4-bec6-42e6-bc6c-d63566e4f7f6-scripts\") pod \"placement-5d77597494-bkh4z\" (UID: \"a140c9a4-bec6-42e6-bc6c-d63566e4f7f6\") " pod="openstack-kuttl-tests/placement-5d77597494-bkh4z" Jan 20 16:57:57 crc kubenswrapper[4558]: I0120 16:57:57.530114 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a140c9a4-bec6-42e6-bc6c-d63566e4f7f6-internal-tls-certs\") pod \"placement-5d77597494-bkh4z\" (UID: \"a140c9a4-bec6-42e6-bc6c-d63566e4f7f6\") " pod="openstack-kuttl-tests/placement-5d77597494-bkh4z" Jan 20 16:57:57 crc kubenswrapper[4558]: I0120 16:57:57.530145 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a140c9a4-bec6-42e6-bc6c-d63566e4f7f6-public-tls-certs\") pod \"placement-5d77597494-bkh4z\" (UID: \"a140c9a4-bec6-42e6-bc6c-d63566e4f7f6\") " pod="openstack-kuttl-tests/placement-5d77597494-bkh4z" Jan 20 16:57:57 crc kubenswrapper[4558]: I0120 16:57:57.530255 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a140c9a4-bec6-42e6-bc6c-d63566e4f7f6-logs\") pod \"placement-5d77597494-bkh4z\" (UID: \"a140c9a4-bec6-42e6-bc6c-d63566e4f7f6\") " pod="openstack-kuttl-tests/placement-5d77597494-bkh4z" Jan 20 16:57:57 crc kubenswrapper[4558]: I0120 16:57:57.530279 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a140c9a4-bec6-42e6-bc6c-d63566e4f7f6-combined-ca-bundle\") pod \"placement-5d77597494-bkh4z\" (UID: \"a140c9a4-bec6-42e6-bc6c-d63566e4f7f6\") " pod="openstack-kuttl-tests/placement-5d77597494-bkh4z" Jan 20 16:57:57 crc kubenswrapper[4558]: I0120 16:57:57.530367 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a140c9a4-bec6-42e6-bc6c-d63566e4f7f6-config-data\") pod \"placement-5d77597494-bkh4z\" (UID: \"a140c9a4-bec6-42e6-bc6c-d63566e4f7f6\") " pod="openstack-kuttl-tests/placement-5d77597494-bkh4z" Jan 20 16:57:57 crc kubenswrapper[4558]: I0120 16:57:57.530879 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a140c9a4-bec6-42e6-bc6c-d63566e4f7f6-logs\") pod \"placement-5d77597494-bkh4z\" (UID: \"a140c9a4-bec6-42e6-bc6c-d63566e4f7f6\") " pod="openstack-kuttl-tests/placement-5d77597494-bkh4z" Jan 20 16:57:57 crc kubenswrapper[4558]: I0120 16:57:57.533240 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a140c9a4-bec6-42e6-bc6c-d63566e4f7f6-public-tls-certs\") pod \"placement-5d77597494-bkh4z\" (UID: \"a140c9a4-bec6-42e6-bc6c-d63566e4f7f6\") " pod="openstack-kuttl-tests/placement-5d77597494-bkh4z" Jan 20 16:57:57 crc kubenswrapper[4558]: I0120 16:57:57.536781 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a140c9a4-bec6-42e6-bc6c-d63566e4f7f6-combined-ca-bundle\") pod \"placement-5d77597494-bkh4z\" (UID: \"a140c9a4-bec6-42e6-bc6c-d63566e4f7f6\") " pod="openstack-kuttl-tests/placement-5d77597494-bkh4z" Jan 20 16:57:57 crc kubenswrapper[4558]: I0120 16:57:57.536802 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a140c9a4-bec6-42e6-bc6c-d63566e4f7f6-internal-tls-certs\") pod \"placement-5d77597494-bkh4z\" (UID: \"a140c9a4-bec6-42e6-bc6c-d63566e4f7f6\") " pod="openstack-kuttl-tests/placement-5d77597494-bkh4z" Jan 20 16:57:57 crc kubenswrapper[4558]: I0120 16:57:57.536959 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a140c9a4-bec6-42e6-bc6c-d63566e4f7f6-config-data\") pod \"placement-5d77597494-bkh4z\" (UID: \"a140c9a4-bec6-42e6-bc6c-d63566e4f7f6\") " pod="openstack-kuttl-tests/placement-5d77597494-bkh4z" Jan 20 16:57:57 crc kubenswrapper[4558]: I0120 16:57:57.541509 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a140c9a4-bec6-42e6-bc6c-d63566e4f7f6-scripts\") pod \"placement-5d77597494-bkh4z\" (UID: \"a140c9a4-bec6-42e6-bc6c-d63566e4f7f6\") " pod="openstack-kuttl-tests/placement-5d77597494-bkh4z" Jan 20 16:57:57 crc kubenswrapper[4558]: I0120 16:57:57.542896 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rskmp\" (UniqueName: \"kubernetes.io/projected/a140c9a4-bec6-42e6-bc6c-d63566e4f7f6-kube-api-access-rskmp\") pod \"placement-5d77597494-bkh4z\" (UID: \"a140c9a4-bec6-42e6-bc6c-d63566e4f7f6\") " pod="openstack-kuttl-tests/placement-5d77597494-bkh4z" Jan 20 16:57:57 crc kubenswrapper[4558]: I0120 16:57:57.654128 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-5d77597494-bkh4z" Jan 20 16:57:58 crc kubenswrapper[4558]: I0120 16:57:58.383396 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-db-sync-jwg24" Jan 20 16:57:58 crc kubenswrapper[4558]: I0120 16:57:58.386957 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-bootstrap-vkskq" Jan 20 16:57:58 crc kubenswrapper[4558]: I0120 16:57:58.443126 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/5a82a446-869f-466c-a345-bd211d1851c2-fernet-keys\") pod \"5a82a446-869f-466c-a345-bd211d1851c2\" (UID: \"5a82a446-869f-466c-a345-bd211d1851c2\") " Jan 20 16:57:58 crc kubenswrapper[4558]: I0120 16:57:58.443231 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/5a82a446-869f-466c-a345-bd211d1851c2-credential-keys\") pod \"5a82a446-869f-466c-a345-bd211d1851c2\" (UID: \"5a82a446-869f-466c-a345-bd211d1851c2\") " Jan 20 16:57:58 crc kubenswrapper[4558]: I0120 16:57:58.443262 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcwc2\" (UniqueName: \"kubernetes.io/projected/5a82a446-869f-466c-a345-bd211d1851c2-kube-api-access-pcwc2\") pod \"5a82a446-869f-466c-a345-bd211d1851c2\" (UID: \"5a82a446-869f-466c-a345-bd211d1851c2\") " Jan 20 16:57:58 crc kubenswrapper[4558]: I0120 16:57:58.443810 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p67ps\" (UniqueName: \"kubernetes.io/projected/7b8d1710-d43e-4f4d-a16b-fb97e8bb7ffa-kube-api-access-p67ps\") pod \"7b8d1710-d43e-4f4d-a16b-fb97e8bb7ffa\" (UID: \"7b8d1710-d43e-4f4d-a16b-fb97e8bb7ffa\") " Jan 20 16:57:58 crc kubenswrapper[4558]: I0120 16:57:58.443851 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7b8d1710-d43e-4f4d-a16b-fb97e8bb7ffa-scripts\") pod \"7b8d1710-d43e-4f4d-a16b-fb97e8bb7ffa\" (UID: \"7b8d1710-d43e-4f4d-a16b-fb97e8bb7ffa\") " Jan 20 16:57:58 crc kubenswrapper[4558]: I0120 16:57:58.443921 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7b8d1710-d43e-4f4d-a16b-fb97e8bb7ffa-config-data\") pod \"7b8d1710-d43e-4f4d-a16b-fb97e8bb7ffa\" (UID: \"7b8d1710-d43e-4f4d-a16b-fb97e8bb7ffa\") " Jan 20 16:57:58 crc kubenswrapper[4558]: I0120 16:57:58.443939 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/7b8d1710-d43e-4f4d-a16b-fb97e8bb7ffa-db-sync-config-data\") pod \"7b8d1710-d43e-4f4d-a16b-fb97e8bb7ffa\" (UID: \"7b8d1710-d43e-4f4d-a16b-fb97e8bb7ffa\") " Jan 20 16:57:58 crc kubenswrapper[4558]: I0120 16:57:58.444015 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5a82a446-869f-466c-a345-bd211d1851c2-config-data\") pod \"5a82a446-869f-466c-a345-bd211d1851c2\" (UID: \"5a82a446-869f-466c-a345-bd211d1851c2\") " Jan 20 16:57:58 crc kubenswrapper[4558]: I0120 16:57:58.444073 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a82a446-869f-466c-a345-bd211d1851c2-combined-ca-bundle\") pod \"5a82a446-869f-466c-a345-bd211d1851c2\" (UID: \"5a82a446-869f-466c-a345-bd211d1851c2\") " Jan 20 16:57:58 crc kubenswrapper[4558]: I0120 16:57:58.444097 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5a82a446-869f-466c-a345-bd211d1851c2-scripts\") pod \"5a82a446-869f-466c-a345-bd211d1851c2\" (UID: \"5a82a446-869f-466c-a345-bd211d1851c2\") " Jan 20 16:57:58 crc kubenswrapper[4558]: I0120 16:57:58.444113 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7b8d1710-d43e-4f4d-a16b-fb97e8bb7ffa-etc-machine-id\") pod \"7b8d1710-d43e-4f4d-a16b-fb97e8bb7ffa\" (UID: \"7b8d1710-d43e-4f4d-a16b-fb97e8bb7ffa\") " Jan 20 16:57:58 crc kubenswrapper[4558]: I0120 16:57:58.444135 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7b8d1710-d43e-4f4d-a16b-fb97e8bb7ffa-combined-ca-bundle\") pod \"7b8d1710-d43e-4f4d-a16b-fb97e8bb7ffa\" (UID: \"7b8d1710-d43e-4f4d-a16b-fb97e8bb7ffa\") " Jan 20 16:57:58 crc kubenswrapper[4558]: I0120 16:57:58.447240 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7b8d1710-d43e-4f4d-a16b-fb97e8bb7ffa-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "7b8d1710-d43e-4f4d-a16b-fb97e8bb7ffa" (UID: "7b8d1710-d43e-4f4d-a16b-fb97e8bb7ffa"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 16:57:58 crc kubenswrapper[4558]: I0120 16:57:58.448093 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5a82a446-869f-466c-a345-bd211d1851c2-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "5a82a446-869f-466c-a345-bd211d1851c2" (UID: "5a82a446-869f-466c-a345-bd211d1851c2"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:57:58 crc kubenswrapper[4558]: I0120 16:57:58.449762 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5a82a446-869f-466c-a345-bd211d1851c2-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "5a82a446-869f-466c-a345-bd211d1851c2" (UID: "5a82a446-869f-466c-a345-bd211d1851c2"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:57:58 crc kubenswrapper[4558]: I0120 16:57:58.450028 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7b8d1710-d43e-4f4d-a16b-fb97e8bb7ffa-scripts" (OuterVolumeSpecName: "scripts") pod "7b8d1710-d43e-4f4d-a16b-fb97e8bb7ffa" (UID: "7b8d1710-d43e-4f4d-a16b-fb97e8bb7ffa"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:57:58 crc kubenswrapper[4558]: I0120 16:57:58.450289 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7b8d1710-d43e-4f4d-a16b-fb97e8bb7ffa-kube-api-access-p67ps" (OuterVolumeSpecName: "kube-api-access-p67ps") pod "7b8d1710-d43e-4f4d-a16b-fb97e8bb7ffa" (UID: "7b8d1710-d43e-4f4d-a16b-fb97e8bb7ffa"). InnerVolumeSpecName "kube-api-access-p67ps". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:57:58 crc kubenswrapper[4558]: I0120 16:57:58.450933 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5a82a446-869f-466c-a345-bd211d1851c2-kube-api-access-pcwc2" (OuterVolumeSpecName: "kube-api-access-pcwc2") pod "5a82a446-869f-466c-a345-bd211d1851c2" (UID: "5a82a446-869f-466c-a345-bd211d1851c2"). InnerVolumeSpecName "kube-api-access-pcwc2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:57:58 crc kubenswrapper[4558]: I0120 16:57:58.453815 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5a82a446-869f-466c-a345-bd211d1851c2-scripts" (OuterVolumeSpecName: "scripts") pod "5a82a446-869f-466c-a345-bd211d1851c2" (UID: "5a82a446-869f-466c-a345-bd211d1851c2"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:57:58 crc kubenswrapper[4558]: I0120 16:57:58.457295 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7b8d1710-d43e-4f4d-a16b-fb97e8bb7ffa-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "7b8d1710-d43e-4f4d-a16b-fb97e8bb7ffa" (UID: "7b8d1710-d43e-4f4d-a16b-fb97e8bb7ffa"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:57:58 crc kubenswrapper[4558]: I0120 16:57:58.472642 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5a82a446-869f-466c-a345-bd211d1851c2-config-data" (OuterVolumeSpecName: "config-data") pod "5a82a446-869f-466c-a345-bd211d1851c2" (UID: "5a82a446-869f-466c-a345-bd211d1851c2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:57:58 crc kubenswrapper[4558]: I0120 16:57:58.490634 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5a82a446-869f-466c-a345-bd211d1851c2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5a82a446-869f-466c-a345-bd211d1851c2" (UID: "5a82a446-869f-466c-a345-bd211d1851c2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:57:58 crc kubenswrapper[4558]: I0120 16:57:58.500312 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7b8d1710-d43e-4f4d-a16b-fb97e8bb7ffa-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7b8d1710-d43e-4f4d-a16b-fb97e8bb7ffa" (UID: "7b8d1710-d43e-4f4d-a16b-fb97e8bb7ffa"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:57:58 crc kubenswrapper[4558]: I0120 16:57:58.518325 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7b8d1710-d43e-4f4d-a16b-fb97e8bb7ffa-config-data" (OuterVolumeSpecName: "config-data") pod "7b8d1710-d43e-4f4d-a16b-fb97e8bb7ffa" (UID: "7b8d1710-d43e-4f4d-a16b-fb97e8bb7ffa"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:57:58 crc kubenswrapper[4558]: I0120 16:57:58.548395 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7b8d1710-d43e-4f4d-a16b-fb97e8bb7ffa-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 16:57:58 crc kubenswrapper[4558]: I0120 16:57:58.548475 4558 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/7b8d1710-d43e-4f4d-a16b-fb97e8bb7ffa-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 16:57:58 crc kubenswrapper[4558]: I0120 16:57:58.548488 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5a82a446-869f-466c-a345-bd211d1851c2-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 16:57:58 crc kubenswrapper[4558]: I0120 16:57:58.548496 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a82a446-869f-466c-a345-bd211d1851c2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 16:57:58 crc kubenswrapper[4558]: I0120 16:57:58.548503 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5a82a446-869f-466c-a345-bd211d1851c2-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 16:57:58 crc kubenswrapper[4558]: I0120 16:57:58.548510 4558 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7b8d1710-d43e-4f4d-a16b-fb97e8bb7ffa-etc-machine-id\") on node \"crc\" DevicePath \"\"" Jan 20 16:57:58 crc kubenswrapper[4558]: I0120 16:57:58.548517 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7b8d1710-d43e-4f4d-a16b-fb97e8bb7ffa-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 16:57:58 crc kubenswrapper[4558]: I0120 16:57:58.548524 4558 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/5a82a446-869f-466c-a345-bd211d1851c2-fernet-keys\") on node \"crc\" DevicePath \"\"" Jan 20 16:57:58 crc kubenswrapper[4558]: I0120 16:57:58.548531 4558 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/5a82a446-869f-466c-a345-bd211d1851c2-credential-keys\") on node \"crc\" DevicePath \"\"" Jan 20 16:57:58 crc kubenswrapper[4558]: I0120 16:57:58.548538 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcwc2\" (UniqueName: \"kubernetes.io/projected/5a82a446-869f-466c-a345-bd211d1851c2-kube-api-access-pcwc2\") on node \"crc\" DevicePath \"\"" Jan 20 16:57:58 crc kubenswrapper[4558]: I0120 16:57:58.548546 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p67ps\" (UniqueName: \"kubernetes.io/projected/7b8d1710-d43e-4f4d-a16b-fb97e8bb7ffa-kube-api-access-p67ps\") on node \"crc\" DevicePath \"\"" Jan 20 16:57:58 crc kubenswrapper[4558]: I0120 16:57:58.548553 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7b8d1710-d43e-4f4d-a16b-fb97e8bb7ffa-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 16:57:58 crc kubenswrapper[4558]: I0120 16:57:58.779732 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-db-sync-jwg24" event={"ID":"7b8d1710-d43e-4f4d-a16b-fb97e8bb7ffa","Type":"ContainerDied","Data":"4cb53da6e78ccba02b3e0b49fd32433b44da1ccb3582085cf458effb14b08711"} Jan 20 16:57:58 crc kubenswrapper[4558]: I0120 16:57:58.779778 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4cb53da6e78ccba02b3e0b49fd32433b44da1ccb3582085cf458effb14b08711" Jan 20 16:57:58 crc kubenswrapper[4558]: I0120 16:57:58.779759 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-db-sync-jwg24" Jan 20 16:57:58 crc kubenswrapper[4558]: I0120 16:57:58.783452 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-bootstrap-vkskq" event={"ID":"5a82a446-869f-466c-a345-bd211d1851c2","Type":"ContainerDied","Data":"258afccf4fbf9be861474bd36a7e4e0551b0ff93dcb00a54d84718092cde0f42"} Jan 20 16:57:58 crc kubenswrapper[4558]: I0120 16:57:58.783481 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-bootstrap-vkskq" Jan 20 16:57:58 crc kubenswrapper[4558]: I0120 16:57:58.783481 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="258afccf4fbf9be861474bd36a7e4e0551b0ff93dcb00a54d84718092cde0f42" Jan 20 16:57:58 crc kubenswrapper[4558]: I0120 16:57:58.878094 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/keystone-6494546496-58v7b"] Jan 20 16:57:58 crc kubenswrapper[4558]: E0120 16:57:58.878482 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5a82a446-869f-466c-a345-bd211d1851c2" containerName="keystone-bootstrap" Jan 20 16:57:58 crc kubenswrapper[4558]: I0120 16:57:58.878499 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="5a82a446-869f-466c-a345-bd211d1851c2" containerName="keystone-bootstrap" Jan 20 16:57:58 crc kubenswrapper[4558]: E0120 16:57:58.878518 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7b8d1710-d43e-4f4d-a16b-fb97e8bb7ffa" containerName="cinder-db-sync" Jan 20 16:57:58 crc kubenswrapper[4558]: I0120 16:57:58.878523 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="7b8d1710-d43e-4f4d-a16b-fb97e8bb7ffa" containerName="cinder-db-sync" Jan 20 16:57:58 crc kubenswrapper[4558]: I0120 16:57:58.878707 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="7b8d1710-d43e-4f4d-a16b-fb97e8bb7ffa" containerName="cinder-db-sync" Jan 20 16:57:58 crc kubenswrapper[4558]: I0120 16:57:58.878729 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="5a82a446-869f-466c-a345-bd211d1851c2" containerName="keystone-bootstrap" Jan 20 16:57:58 crc kubenswrapper[4558]: I0120 16:57:58.879252 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-6494546496-58v7b" Jan 20 16:57:58 crc kubenswrapper[4558]: I0120 16:57:58.885830 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-config-data" Jan 20 16:57:58 crc kubenswrapper[4558]: I0120 16:57:58.886205 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-keystone-internal-svc" Jan 20 16:57:58 crc kubenswrapper[4558]: I0120 16:57:58.886238 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-scripts" Jan 20 16:57:58 crc kubenswrapper[4558]: I0120 16:57:58.888671 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-keystone-dockercfg-wlvl5" Jan 20 16:57:58 crc kubenswrapper[4558]: I0120 16:57:58.888742 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-keystone-public-svc" Jan 20 16:57:58 crc kubenswrapper[4558]: I0120 16:57:58.890386 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone" Jan 20 16:57:58 crc kubenswrapper[4558]: I0120 16:57:58.892226 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-6494546496-58v7b"] Jan 20 16:57:58 crc kubenswrapper[4558]: I0120 16:57:58.955529 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/43781c23-b22a-4449-8306-67efbe8dd6fc-combined-ca-bundle\") pod \"keystone-6494546496-58v7b\" (UID: \"43781c23-b22a-4449-8306-67efbe8dd6fc\") " pod="openstack-kuttl-tests/keystone-6494546496-58v7b" Jan 20 16:57:58 crc kubenswrapper[4558]: I0120 16:57:58.955565 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/43781c23-b22a-4449-8306-67efbe8dd6fc-credential-keys\") pod \"keystone-6494546496-58v7b\" (UID: \"43781c23-b22a-4449-8306-67efbe8dd6fc\") " pod="openstack-kuttl-tests/keystone-6494546496-58v7b" Jan 20 16:57:58 crc kubenswrapper[4558]: I0120 16:57:58.955584 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/43781c23-b22a-4449-8306-67efbe8dd6fc-fernet-keys\") pod \"keystone-6494546496-58v7b\" (UID: \"43781c23-b22a-4449-8306-67efbe8dd6fc\") " pod="openstack-kuttl-tests/keystone-6494546496-58v7b" Jan 20 16:57:58 crc kubenswrapper[4558]: I0120 16:57:58.955619 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/43781c23-b22a-4449-8306-67efbe8dd6fc-public-tls-certs\") pod \"keystone-6494546496-58v7b\" (UID: \"43781c23-b22a-4449-8306-67efbe8dd6fc\") " pod="openstack-kuttl-tests/keystone-6494546496-58v7b" Jan 20 16:57:58 crc kubenswrapper[4558]: I0120 16:57:58.955668 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-snwkf\" (UniqueName: \"kubernetes.io/projected/43781c23-b22a-4449-8306-67efbe8dd6fc-kube-api-access-snwkf\") pod \"keystone-6494546496-58v7b\" (UID: \"43781c23-b22a-4449-8306-67efbe8dd6fc\") " pod="openstack-kuttl-tests/keystone-6494546496-58v7b" Jan 20 16:57:58 crc kubenswrapper[4558]: I0120 16:57:58.955688 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/43781c23-b22a-4449-8306-67efbe8dd6fc-config-data\") pod \"keystone-6494546496-58v7b\" (UID: \"43781c23-b22a-4449-8306-67efbe8dd6fc\") " pod="openstack-kuttl-tests/keystone-6494546496-58v7b" Jan 20 16:57:58 crc kubenswrapper[4558]: I0120 16:57:58.955733 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/43781c23-b22a-4449-8306-67efbe8dd6fc-internal-tls-certs\") pod \"keystone-6494546496-58v7b\" (UID: \"43781c23-b22a-4449-8306-67efbe8dd6fc\") " pod="openstack-kuttl-tests/keystone-6494546496-58v7b" Jan 20 16:57:58 crc kubenswrapper[4558]: I0120 16:57:58.955746 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/43781c23-b22a-4449-8306-67efbe8dd6fc-scripts\") pod \"keystone-6494546496-58v7b\" (UID: \"43781c23-b22a-4449-8306-67efbe8dd6fc\") " pod="openstack-kuttl-tests/keystone-6494546496-58v7b" Jan 20 16:57:59 crc kubenswrapper[4558]: I0120 16:57:59.033415 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 16:57:59 crc kubenswrapper[4558]: I0120 16:57:59.034630 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 16:57:59 crc kubenswrapper[4558]: I0120 16:57:59.036826 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cinder-config-data" Jan 20 16:57:59 crc kubenswrapper[4558]: I0120 16:57:59.037036 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cinder-scripts" Jan 20 16:57:59 crc kubenswrapper[4558]: I0120 16:57:59.037188 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cinder-scheduler-config-data" Jan 20 16:57:59 crc kubenswrapper[4558]: I0120 16:57:59.037527 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cinder-cinder-dockercfg-s7wtr" Jan 20 16:57:59 crc kubenswrapper[4558]: I0120 16:57:59.050110 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 16:57:59 crc kubenswrapper[4558]: I0120 16:57:59.057513 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/43781c23-b22a-4449-8306-67efbe8dd6fc-public-tls-certs\") pod \"keystone-6494546496-58v7b\" (UID: \"43781c23-b22a-4449-8306-67efbe8dd6fc\") " pod="openstack-kuttl-tests/keystone-6494546496-58v7b" Jan 20 16:57:59 crc kubenswrapper[4558]: I0120 16:57:59.058062 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-snwkf\" (UniqueName: \"kubernetes.io/projected/43781c23-b22a-4449-8306-67efbe8dd6fc-kube-api-access-snwkf\") pod \"keystone-6494546496-58v7b\" (UID: \"43781c23-b22a-4449-8306-67efbe8dd6fc\") " pod="openstack-kuttl-tests/keystone-6494546496-58v7b" Jan 20 16:57:59 crc kubenswrapper[4558]: I0120 16:57:59.058141 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/43781c23-b22a-4449-8306-67efbe8dd6fc-config-data\") pod \"keystone-6494546496-58v7b\" (UID: \"43781c23-b22a-4449-8306-67efbe8dd6fc\") " pod="openstack-kuttl-tests/keystone-6494546496-58v7b" Jan 20 16:57:59 crc kubenswrapper[4558]: I0120 16:57:59.058281 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/43781c23-b22a-4449-8306-67efbe8dd6fc-internal-tls-certs\") pod \"keystone-6494546496-58v7b\" (UID: \"43781c23-b22a-4449-8306-67efbe8dd6fc\") " pod="openstack-kuttl-tests/keystone-6494546496-58v7b" Jan 20 16:57:59 crc kubenswrapper[4558]: I0120 16:57:59.058308 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/43781c23-b22a-4449-8306-67efbe8dd6fc-scripts\") pod \"keystone-6494546496-58v7b\" (UID: \"43781c23-b22a-4449-8306-67efbe8dd6fc\") " pod="openstack-kuttl-tests/keystone-6494546496-58v7b" Jan 20 16:57:59 crc kubenswrapper[4558]: I0120 16:57:59.058356 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/43781c23-b22a-4449-8306-67efbe8dd6fc-combined-ca-bundle\") pod \"keystone-6494546496-58v7b\" (UID: \"43781c23-b22a-4449-8306-67efbe8dd6fc\") " pod="openstack-kuttl-tests/keystone-6494546496-58v7b" Jan 20 16:57:59 crc kubenswrapper[4558]: I0120 16:57:59.058376 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/43781c23-b22a-4449-8306-67efbe8dd6fc-credential-keys\") pod \"keystone-6494546496-58v7b\" (UID: \"43781c23-b22a-4449-8306-67efbe8dd6fc\") " pod="openstack-kuttl-tests/keystone-6494546496-58v7b" Jan 20 16:57:59 crc kubenswrapper[4558]: I0120 16:57:59.058397 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/43781c23-b22a-4449-8306-67efbe8dd6fc-fernet-keys\") pod \"keystone-6494546496-58v7b\" (UID: \"43781c23-b22a-4449-8306-67efbe8dd6fc\") " pod="openstack-kuttl-tests/keystone-6494546496-58v7b" Jan 20 16:57:59 crc kubenswrapper[4558]: I0120 16:57:59.060719 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/43781c23-b22a-4449-8306-67efbe8dd6fc-public-tls-certs\") pod \"keystone-6494546496-58v7b\" (UID: \"43781c23-b22a-4449-8306-67efbe8dd6fc\") " pod="openstack-kuttl-tests/keystone-6494546496-58v7b" Jan 20 16:57:59 crc kubenswrapper[4558]: I0120 16:57:59.060986 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/43781c23-b22a-4449-8306-67efbe8dd6fc-fernet-keys\") pod \"keystone-6494546496-58v7b\" (UID: \"43781c23-b22a-4449-8306-67efbe8dd6fc\") " pod="openstack-kuttl-tests/keystone-6494546496-58v7b" Jan 20 16:57:59 crc kubenswrapper[4558]: I0120 16:57:59.063659 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/43781c23-b22a-4449-8306-67efbe8dd6fc-combined-ca-bundle\") pod \"keystone-6494546496-58v7b\" (UID: \"43781c23-b22a-4449-8306-67efbe8dd6fc\") " pod="openstack-kuttl-tests/keystone-6494546496-58v7b" Jan 20 16:57:59 crc kubenswrapper[4558]: I0120 16:57:59.065949 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/43781c23-b22a-4449-8306-67efbe8dd6fc-credential-keys\") pod \"keystone-6494546496-58v7b\" (UID: \"43781c23-b22a-4449-8306-67efbe8dd6fc\") " pod="openstack-kuttl-tests/keystone-6494546496-58v7b" Jan 20 16:57:59 crc kubenswrapper[4558]: I0120 16:57:59.070752 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/43781c23-b22a-4449-8306-67efbe8dd6fc-config-data\") pod \"keystone-6494546496-58v7b\" (UID: \"43781c23-b22a-4449-8306-67efbe8dd6fc\") " pod="openstack-kuttl-tests/keystone-6494546496-58v7b" Jan 20 16:57:59 crc kubenswrapper[4558]: I0120 16:57:59.078573 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/43781c23-b22a-4449-8306-67efbe8dd6fc-internal-tls-certs\") pod \"keystone-6494546496-58v7b\" (UID: \"43781c23-b22a-4449-8306-67efbe8dd6fc\") " pod="openstack-kuttl-tests/keystone-6494546496-58v7b" Jan 20 16:57:59 crc kubenswrapper[4558]: I0120 16:57:59.078702 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-snwkf\" (UniqueName: \"kubernetes.io/projected/43781c23-b22a-4449-8306-67efbe8dd6fc-kube-api-access-snwkf\") pod \"keystone-6494546496-58v7b\" (UID: \"43781c23-b22a-4449-8306-67efbe8dd6fc\") " pod="openstack-kuttl-tests/keystone-6494546496-58v7b" Jan 20 16:57:59 crc kubenswrapper[4558]: I0120 16:57:59.078790 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/43781c23-b22a-4449-8306-67efbe8dd6fc-scripts\") pod \"keystone-6494546496-58v7b\" (UID: \"43781c23-b22a-4449-8306-67efbe8dd6fc\") " pod="openstack-kuttl-tests/keystone-6494546496-58v7b" Jan 20 16:57:59 crc kubenswrapper[4558]: I0120 16:57:59.159606 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d4a1ee65-2f6a-4db8-b56d-f0729c084060-config-data\") pod \"cinder-scheduler-0\" (UID: \"d4a1ee65-2f6a-4db8-b56d-f0729c084060\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 16:57:59 crc kubenswrapper[4558]: I0120 16:57:59.159646 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d4a1ee65-2f6a-4db8-b56d-f0729c084060-scripts\") pod \"cinder-scheduler-0\" (UID: \"d4a1ee65-2f6a-4db8-b56d-f0729c084060\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 16:57:59 crc kubenswrapper[4558]: I0120 16:57:59.159665 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/d4a1ee65-2f6a-4db8-b56d-f0729c084060-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"d4a1ee65-2f6a-4db8-b56d-f0729c084060\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 16:57:59 crc kubenswrapper[4558]: I0120 16:57:59.159684 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4a1ee65-2f6a-4db8-b56d-f0729c084060-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"d4a1ee65-2f6a-4db8-b56d-f0729c084060\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 16:57:59 crc kubenswrapper[4558]: I0120 16:57:59.159779 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d4a1ee65-2f6a-4db8-b56d-f0729c084060-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"d4a1ee65-2f6a-4db8-b56d-f0729c084060\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 16:57:59 crc kubenswrapper[4558]: I0120 16:57:59.159820 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zbhzm\" (UniqueName: \"kubernetes.io/projected/d4a1ee65-2f6a-4db8-b56d-f0729c084060-kube-api-access-zbhzm\") pod \"cinder-scheduler-0\" (UID: \"d4a1ee65-2f6a-4db8-b56d-f0729c084060\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 16:57:59 crc kubenswrapper[4558]: I0120 16:57:59.194199 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-6494546496-58v7b" Jan 20 16:57:59 crc kubenswrapper[4558]: I0120 16:57:59.224872 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 16:57:59 crc kubenswrapper[4558]: I0120 16:57:59.226113 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 16:57:59 crc kubenswrapper[4558]: I0120 16:57:59.227751 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cinder-api-config-data" Jan 20 16:57:59 crc kubenswrapper[4558]: I0120 16:57:59.255105 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 16:57:59 crc kubenswrapper[4558]: I0120 16:57:59.263781 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e9fb262e-7147-4f43-bedf-96936e8eea0d-logs\") pod \"cinder-api-0\" (UID: \"e9fb262e-7147-4f43-bedf-96936e8eea0d\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 16:57:59 crc kubenswrapper[4558]: I0120 16:57:59.263830 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zbhzm\" (UniqueName: \"kubernetes.io/projected/d4a1ee65-2f6a-4db8-b56d-f0729c084060-kube-api-access-zbhzm\") pod \"cinder-scheduler-0\" (UID: \"d4a1ee65-2f6a-4db8-b56d-f0729c084060\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 16:57:59 crc kubenswrapper[4558]: I0120 16:57:59.263864 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/e9fb262e-7147-4f43-bedf-96936e8eea0d-etc-machine-id\") pod \"cinder-api-0\" (UID: \"e9fb262e-7147-4f43-bedf-96936e8eea0d\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 16:57:59 crc kubenswrapper[4558]: I0120 16:57:59.263939 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e9fb262e-7147-4f43-bedf-96936e8eea0d-scripts\") pod \"cinder-api-0\" (UID: \"e9fb262e-7147-4f43-bedf-96936e8eea0d\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 16:57:59 crc kubenswrapper[4558]: I0120 16:57:59.263955 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e9fb262e-7147-4f43-bedf-96936e8eea0d-config-data-custom\") pod \"cinder-api-0\" (UID: \"e9fb262e-7147-4f43-bedf-96936e8eea0d\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 16:57:59 crc kubenswrapper[4558]: I0120 16:57:59.263987 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e9fb262e-7147-4f43-bedf-96936e8eea0d-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"e9fb262e-7147-4f43-bedf-96936e8eea0d\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 16:57:59 crc kubenswrapper[4558]: I0120 16:57:59.264017 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d4a1ee65-2f6a-4db8-b56d-f0729c084060-config-data\") pod \"cinder-scheduler-0\" (UID: \"d4a1ee65-2f6a-4db8-b56d-f0729c084060\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 16:57:59 crc kubenswrapper[4558]: I0120 16:57:59.264036 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d4a1ee65-2f6a-4db8-b56d-f0729c084060-scripts\") pod \"cinder-scheduler-0\" (UID: \"d4a1ee65-2f6a-4db8-b56d-f0729c084060\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 16:57:59 crc kubenswrapper[4558]: I0120 16:57:59.264056 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/d4a1ee65-2f6a-4db8-b56d-f0729c084060-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"d4a1ee65-2f6a-4db8-b56d-f0729c084060\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 16:57:59 crc kubenswrapper[4558]: I0120 16:57:59.264076 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4a1ee65-2f6a-4db8-b56d-f0729c084060-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"d4a1ee65-2f6a-4db8-b56d-f0729c084060\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 16:57:59 crc kubenswrapper[4558]: I0120 16:57:59.264097 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q9nrm\" (UniqueName: \"kubernetes.io/projected/e9fb262e-7147-4f43-bedf-96936e8eea0d-kube-api-access-q9nrm\") pod \"cinder-api-0\" (UID: \"e9fb262e-7147-4f43-bedf-96936e8eea0d\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 16:57:59 crc kubenswrapper[4558]: I0120 16:57:59.264126 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e9fb262e-7147-4f43-bedf-96936e8eea0d-config-data\") pod \"cinder-api-0\" (UID: \"e9fb262e-7147-4f43-bedf-96936e8eea0d\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 16:57:59 crc kubenswrapper[4558]: I0120 16:57:59.264143 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d4a1ee65-2f6a-4db8-b56d-f0729c084060-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"d4a1ee65-2f6a-4db8-b56d-f0729c084060\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 16:57:59 crc kubenswrapper[4558]: I0120 16:57:59.271118 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/d4a1ee65-2f6a-4db8-b56d-f0729c084060-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"d4a1ee65-2f6a-4db8-b56d-f0729c084060\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 16:57:59 crc kubenswrapper[4558]: I0120 16:57:59.273019 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d4a1ee65-2f6a-4db8-b56d-f0729c084060-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"d4a1ee65-2f6a-4db8-b56d-f0729c084060\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 16:57:59 crc kubenswrapper[4558]: I0120 16:57:59.279121 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d4a1ee65-2f6a-4db8-b56d-f0729c084060-config-data\") pod \"cinder-scheduler-0\" (UID: \"d4a1ee65-2f6a-4db8-b56d-f0729c084060\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 16:57:59 crc kubenswrapper[4558]: I0120 16:57:59.294229 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4a1ee65-2f6a-4db8-b56d-f0729c084060-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"d4a1ee65-2f6a-4db8-b56d-f0729c084060\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 16:57:59 crc kubenswrapper[4558]: I0120 16:57:59.296073 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zbhzm\" (UniqueName: \"kubernetes.io/projected/d4a1ee65-2f6a-4db8-b56d-f0729c084060-kube-api-access-zbhzm\") pod \"cinder-scheduler-0\" (UID: \"d4a1ee65-2f6a-4db8-b56d-f0729c084060\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 16:57:59 crc kubenswrapper[4558]: I0120 16:57:59.297603 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d4a1ee65-2f6a-4db8-b56d-f0729c084060-scripts\") pod \"cinder-scheduler-0\" (UID: \"d4a1ee65-2f6a-4db8-b56d-f0729c084060\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 16:57:59 crc kubenswrapper[4558]: I0120 16:57:59.346147 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 16:57:59 crc kubenswrapper[4558]: I0120 16:57:59.365747 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e9fb262e-7147-4f43-bedf-96936e8eea0d-scripts\") pod \"cinder-api-0\" (UID: \"e9fb262e-7147-4f43-bedf-96936e8eea0d\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 16:57:59 crc kubenswrapper[4558]: I0120 16:57:59.365787 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e9fb262e-7147-4f43-bedf-96936e8eea0d-config-data-custom\") pod \"cinder-api-0\" (UID: \"e9fb262e-7147-4f43-bedf-96936e8eea0d\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 16:57:59 crc kubenswrapper[4558]: I0120 16:57:59.365833 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e9fb262e-7147-4f43-bedf-96936e8eea0d-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"e9fb262e-7147-4f43-bedf-96936e8eea0d\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 16:57:59 crc kubenswrapper[4558]: I0120 16:57:59.365904 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q9nrm\" (UniqueName: \"kubernetes.io/projected/e9fb262e-7147-4f43-bedf-96936e8eea0d-kube-api-access-q9nrm\") pod \"cinder-api-0\" (UID: \"e9fb262e-7147-4f43-bedf-96936e8eea0d\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 16:57:59 crc kubenswrapper[4558]: I0120 16:57:59.365941 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e9fb262e-7147-4f43-bedf-96936e8eea0d-config-data\") pod \"cinder-api-0\" (UID: \"e9fb262e-7147-4f43-bedf-96936e8eea0d\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 16:57:59 crc kubenswrapper[4558]: I0120 16:57:59.365981 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e9fb262e-7147-4f43-bedf-96936e8eea0d-logs\") pod \"cinder-api-0\" (UID: \"e9fb262e-7147-4f43-bedf-96936e8eea0d\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 16:57:59 crc kubenswrapper[4558]: I0120 16:57:59.366020 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/e9fb262e-7147-4f43-bedf-96936e8eea0d-etc-machine-id\") pod \"cinder-api-0\" (UID: \"e9fb262e-7147-4f43-bedf-96936e8eea0d\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 16:57:59 crc kubenswrapper[4558]: I0120 16:57:59.366115 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/e9fb262e-7147-4f43-bedf-96936e8eea0d-etc-machine-id\") pod \"cinder-api-0\" (UID: \"e9fb262e-7147-4f43-bedf-96936e8eea0d\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 16:57:59 crc kubenswrapper[4558]: I0120 16:57:59.366567 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e9fb262e-7147-4f43-bedf-96936e8eea0d-logs\") pod \"cinder-api-0\" (UID: \"e9fb262e-7147-4f43-bedf-96936e8eea0d\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 16:57:59 crc kubenswrapper[4558]: I0120 16:57:59.371002 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e9fb262e-7147-4f43-bedf-96936e8eea0d-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"e9fb262e-7147-4f43-bedf-96936e8eea0d\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 16:57:59 crc kubenswrapper[4558]: I0120 16:57:59.371918 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e9fb262e-7147-4f43-bedf-96936e8eea0d-config-data-custom\") pod \"cinder-api-0\" (UID: \"e9fb262e-7147-4f43-bedf-96936e8eea0d\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 16:57:59 crc kubenswrapper[4558]: I0120 16:57:59.372078 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e9fb262e-7147-4f43-bedf-96936e8eea0d-scripts\") pod \"cinder-api-0\" (UID: \"e9fb262e-7147-4f43-bedf-96936e8eea0d\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 16:57:59 crc kubenswrapper[4558]: I0120 16:57:59.374876 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e9fb262e-7147-4f43-bedf-96936e8eea0d-config-data\") pod \"cinder-api-0\" (UID: \"e9fb262e-7147-4f43-bedf-96936e8eea0d\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 16:57:59 crc kubenswrapper[4558]: I0120 16:57:59.388674 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q9nrm\" (UniqueName: \"kubernetes.io/projected/e9fb262e-7147-4f43-bedf-96936e8eea0d-kube-api-access-q9nrm\") pod \"cinder-api-0\" (UID: \"e9fb262e-7147-4f43-bedf-96936e8eea0d\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 16:57:59 crc kubenswrapper[4558]: I0120 16:57:59.538810 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 16:58:01 crc kubenswrapper[4558]: I0120 16:58:01.803650 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-5d77597494-bkh4z"] Jan 20 16:58:01 crc kubenswrapper[4558]: W0120 16:58:01.806962 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda140c9a4_bec6_42e6_bc6c_d63566e4f7f6.slice/crio-22aef6e67fa6ccb296984c72ec063d7c974b405920e883d538f54b5078fdb6ac WatchSource:0}: Error finding container 22aef6e67fa6ccb296984c72ec063d7c974b405920e883d538f54b5078fdb6ac: Status 404 returned error can't find the container with id 22aef6e67fa6ccb296984c72ec063d7c974b405920e883d538f54b5078fdb6ac Jan 20 16:58:01 crc kubenswrapper[4558]: I0120 16:58:01.807645 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"c1e3af55-1788-4d23-b3c5-6227e848ea15","Type":"ContainerStarted","Data":"cede7fedfff117f8c9861769debc9b41b0ace89fa517ecf7fb22b21f6cc45cca"} Jan 20 16:58:01 crc kubenswrapper[4558]: I0120 16:58:01.915090 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-api-7bdfbf6ddd-thvjg"] Jan 20 16:58:01 crc kubenswrapper[4558]: I0120 16:58:01.971480 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-worker-7649847b9c-9wk9v"] Jan 20 16:58:01 crc kubenswrapper[4558]: I0120 16:58:01.977386 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-keystone-listener-6b66757dfd-k89tm"] Jan 20 16:58:01 crc kubenswrapper[4558]: W0120 16:58:01.980122 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod69c5c1c6_5c3f_48d4_8d14_c3c7cdcb1915.slice/crio-61fa0fb541c45a5e261d0db1712d9f845beda9e8cf2cd1e2bc7309d58b007aac WatchSource:0}: Error finding container 61fa0fb541c45a5e261d0db1712d9f845beda9e8cf2cd1e2bc7309d58b007aac: Status 404 returned error can't find the container with id 61fa0fb541c45a5e261d0db1712d9f845beda9e8cf2cd1e2bc7309d58b007aac Jan 20 16:58:02 crc kubenswrapper[4558]: I0120 16:58:02.068109 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-6494546496-58v7b"] Jan 20 16:58:02 crc kubenswrapper[4558]: W0120 16:58:02.075196 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod43781c23_b22a_4449_8306_67efbe8dd6fc.slice/crio-0314642ddff11a7a338c7adcef338a679612552d0433d7fda0bc23af866db076 WatchSource:0}: Error finding container 0314642ddff11a7a338c7adcef338a679612552d0433d7fda0bc23af866db076: Status 404 returned error can't find the container with id 0314642ddff11a7a338c7adcef338a679612552d0433d7fda0bc23af866db076 Jan 20 16:58:02 crc kubenswrapper[4558]: I0120 16:58:02.075911 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 16:58:02 crc kubenswrapper[4558]: W0120 16:58:02.079344 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd4a1ee65_2f6a_4db8_b56d_f0729c084060.slice/crio-f52909e9c4945ba2e0b0d9dc2e5bed6740c3eb850afdd70c18bb16c0f63a70fd WatchSource:0}: Error finding container f52909e9c4945ba2e0b0d9dc2e5bed6740c3eb850afdd70c18bb16c0f63a70fd: Status 404 returned error can't find the container with id f52909e9c4945ba2e0b0d9dc2e5bed6740c3eb850afdd70c18bb16c0f63a70fd Jan 20 16:58:02 crc kubenswrapper[4558]: W0120 16:58:02.081811 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode9fb262e_7147_4f43_bedf_96936e8eea0d.slice/crio-3a61005a56a4d40a835b05e3fb63e8cdefc180dbf489b24c8f75d455fc0d5e3d WatchSource:0}: Error finding container 3a61005a56a4d40a835b05e3fb63e8cdefc180dbf489b24c8f75d455fc0d5e3d: Status 404 returned error can't find the container with id 3a61005a56a4d40a835b05e3fb63e8cdefc180dbf489b24c8f75d455fc0d5e3d Jan 20 16:58:02 crc kubenswrapper[4558]: I0120 16:58:02.084887 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 16:58:02 crc kubenswrapper[4558]: I0120 16:58:02.816245 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-5d77597494-bkh4z" event={"ID":"a140c9a4-bec6-42e6-bc6c-d63566e4f7f6","Type":"ContainerStarted","Data":"620ea4b124f9388111a2afb26185bb93d973f00636b50d2b8aed2e76b5d21043"} Jan 20 16:58:02 crc kubenswrapper[4558]: I0120 16:58:02.816283 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-5d77597494-bkh4z" event={"ID":"a140c9a4-bec6-42e6-bc6c-d63566e4f7f6","Type":"ContainerStarted","Data":"b78d30b920cedd5deca6765214e5cedff3241e2599a5afce55dcb23d1cc75565"} Jan 20 16:58:02 crc kubenswrapper[4558]: I0120 16:58:02.816295 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-5d77597494-bkh4z" event={"ID":"a140c9a4-bec6-42e6-bc6c-d63566e4f7f6","Type":"ContainerStarted","Data":"22aef6e67fa6ccb296984c72ec063d7c974b405920e883d538f54b5078fdb6ac"} Jan 20 16:58:02 crc kubenswrapper[4558]: I0120 16:58:02.816337 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/placement-5d77597494-bkh4z" Jan 20 16:58:02 crc kubenswrapper[4558]: I0120 16:58:02.816368 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/placement-5d77597494-bkh4z" Jan 20 16:58:02 crc kubenswrapper[4558]: I0120 16:58:02.818916 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-6494546496-58v7b" event={"ID":"43781c23-b22a-4449-8306-67efbe8dd6fc","Type":"ContainerStarted","Data":"c6dd483fc55626bbad92f374c9cd3e8b6e558a86ecdf6e7a87f558af6aed4a91"} Jan 20 16:58:02 crc kubenswrapper[4558]: I0120 16:58:02.818955 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-6494546496-58v7b" event={"ID":"43781c23-b22a-4449-8306-67efbe8dd6fc","Type":"ContainerStarted","Data":"0314642ddff11a7a338c7adcef338a679612552d0433d7fda0bc23af866db076"} Jan 20 16:58:02 crc kubenswrapper[4558]: I0120 16:58:02.819076 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/keystone-6494546496-58v7b" Jan 20 16:58:02 crc kubenswrapper[4558]: I0120 16:58:02.832003 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"e9fb262e-7147-4f43-bedf-96936e8eea0d","Type":"ContainerStarted","Data":"e57b0e1983683a09c62276a34c8d4c4edb36f370bfc8dbf13f659e8276f522e8"} Jan 20 16:58:02 crc kubenswrapper[4558]: I0120 16:58:02.832127 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"e9fb262e-7147-4f43-bedf-96936e8eea0d","Type":"ContainerStarted","Data":"3a61005a56a4d40a835b05e3fb63e8cdefc180dbf489b24c8f75d455fc0d5e3d"} Jan 20 16:58:02 crc kubenswrapper[4558]: I0120 16:58:02.834370 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"d4a1ee65-2f6a-4db8-b56d-f0729c084060","Type":"ContainerStarted","Data":"f52909e9c4945ba2e0b0d9dc2e5bed6740c3eb850afdd70c18bb16c0f63a70fd"} Jan 20 16:58:02 crc kubenswrapper[4558]: I0120 16:58:02.836621 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-7bdfbf6ddd-thvjg" event={"ID":"8be0815b-492d-4db8-bfd5-29d81bdb14d6","Type":"ContainerStarted","Data":"53d735f4f7282a16615b6f3b8dc32ad8c06f8b6e5d927b47a7f63325bbbf042e"} Jan 20 16:58:02 crc kubenswrapper[4558]: I0120 16:58:02.836649 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-7bdfbf6ddd-thvjg" event={"ID":"8be0815b-492d-4db8-bfd5-29d81bdb14d6","Type":"ContainerStarted","Data":"caeca9047300cdd39b8552502d469030f74bd63d62befb9645f590eaf61fd3f1"} Jan 20 16:58:02 crc kubenswrapper[4558]: I0120 16:58:02.836660 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-7bdfbf6ddd-thvjg" event={"ID":"8be0815b-492d-4db8-bfd5-29d81bdb14d6","Type":"ContainerStarted","Data":"26e2be8f6e22cc33201906e6911dbcc942470a8f9146c2c7520e62cd1c90123f"} Jan 20 16:58:02 crc kubenswrapper[4558]: I0120 16:58:02.839795 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/barbican-api-7bdfbf6ddd-thvjg" Jan 20 16:58:02 crc kubenswrapper[4558]: I0120 16:58:02.839828 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/barbican-api-7bdfbf6ddd-thvjg" Jan 20 16:58:02 crc kubenswrapper[4558]: I0120 16:58:02.842689 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/placement-5d77597494-bkh4z" podStartSLOduration=5.84267136 podStartE2EDuration="5.84267136s" podCreationTimestamp="2026-01-20 16:57:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 16:58:02.832873556 +0000 UTC m=+976.593211523" watchObservedRunningTime="2026-01-20 16:58:02.84267136 +0000 UTC m=+976.603009328" Jan 20 16:58:02 crc kubenswrapper[4558]: I0120 16:58:02.844264 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-worker-7649847b9c-9wk9v" event={"ID":"40234b95-d302-420a-96d7-c56ffe609530","Type":"ContainerStarted","Data":"760e77d6a0cd8419ad9a68e3cf141ac4c57667551c43955981772f313ed258f5"} Jan 20 16:58:02 crc kubenswrapper[4558]: I0120 16:58:02.845328 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-keystone-listener-6b66757dfd-k89tm" event={"ID":"69c5c1c6-5c3f-48d4-8d14-c3c7cdcb1915","Type":"ContainerStarted","Data":"61fa0fb541c45a5e261d0db1712d9f845beda9e8cf2cd1e2bc7309d58b007aac"} Jan 20 16:58:02 crc kubenswrapper[4558]: I0120 16:58:02.856373 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/keystone-6494546496-58v7b" podStartSLOduration=4.856359009 podStartE2EDuration="4.856359009s" podCreationTimestamp="2026-01-20 16:57:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 16:58:02.851521021 +0000 UTC m=+976.611858988" watchObservedRunningTime="2026-01-20 16:58:02.856359009 +0000 UTC m=+976.616696976" Jan 20 16:58:02 crc kubenswrapper[4558]: I0120 16:58:02.869683 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/barbican-api-7bdfbf6ddd-thvjg" podStartSLOduration=6.869668546 podStartE2EDuration="6.869668546s" podCreationTimestamp="2026-01-20 16:57:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 16:58:02.864986332 +0000 UTC m=+976.625324299" watchObservedRunningTime="2026-01-20 16:58:02.869668546 +0000 UTC m=+976.630006513" Jan 20 16:58:02 crc kubenswrapper[4558]: I0120 16:58:02.926147 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 16:58:02 crc kubenswrapper[4558]: I0120 16:58:02.926208 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 16:58:02 crc kubenswrapper[4558]: I0120 16:58:02.934210 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 16:58:02 crc kubenswrapper[4558]: I0120 16:58:02.934257 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 16:58:02 crc kubenswrapper[4558]: I0120 16:58:02.957681 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 16:58:02 crc kubenswrapper[4558]: I0120 16:58:02.962095 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 16:58:02 crc kubenswrapper[4558]: I0120 16:58:02.966040 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 16:58:02 crc kubenswrapper[4558]: I0120 16:58:02.971151 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 16:58:03 crc kubenswrapper[4558]: I0120 16:58:03.623071 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 16:58:03 crc kubenswrapper[4558]: I0120 16:58:03.859377 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"e9fb262e-7147-4f43-bedf-96936e8eea0d","Type":"ContainerStarted","Data":"bc01840859b7663e90d5bdc41c63bb1cdbffbf13b837c36af41599cd52b68794"} Jan 20 16:58:03 crc kubenswrapper[4558]: I0120 16:58:03.859564 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 16:58:03 crc kubenswrapper[4558]: I0120 16:58:03.865538 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-worker-7649847b9c-9wk9v" event={"ID":"40234b95-d302-420a-96d7-c56ffe609530","Type":"ContainerStarted","Data":"4a04ce46269bb3a821a0882c3df0c15680043afbc05a402c642893802ec047fb"} Jan 20 16:58:03 crc kubenswrapper[4558]: I0120 16:58:03.865575 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 16:58:03 crc kubenswrapper[4558]: I0120 16:58:03.867706 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 16:58:03 crc kubenswrapper[4558]: I0120 16:58:03.867965 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 16:58:03 crc kubenswrapper[4558]: I0120 16:58:03.867977 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 16:58:03 crc kubenswrapper[4558]: I0120 16:58:03.886391 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/cinder-api-0" podStartSLOduration=4.886356182 podStartE2EDuration="4.886356182s" podCreationTimestamp="2026-01-20 16:57:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 16:58:03.877579168 +0000 UTC m=+977.637917134" watchObservedRunningTime="2026-01-20 16:58:03.886356182 +0000 UTC m=+977.646694149" Jan 20 16:58:04 crc kubenswrapper[4558]: I0120 16:58:04.264072 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/barbican-api-595757ff5d-frx87"] Jan 20 16:58:04 crc kubenswrapper[4558]: I0120 16:58:04.271820 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-595757ff5d-frx87" Jan 20 16:58:04 crc kubenswrapper[4558]: I0120 16:58:04.277331 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-barbican-internal-svc" Jan 20 16:58:04 crc kubenswrapper[4558]: I0120 16:58:04.277341 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-barbican-public-svc" Jan 20 16:58:04 crc kubenswrapper[4558]: I0120 16:58:04.281980 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-api-595757ff5d-frx87"] Jan 20 16:58:04 crc kubenswrapper[4558]: I0120 16:58:04.368909 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3c6de94f-c605-43d9-97b5-ccf91e49d1fb-internal-tls-certs\") pod \"barbican-api-595757ff5d-frx87\" (UID: \"3c6de94f-c605-43d9-97b5-ccf91e49d1fb\") " pod="openstack-kuttl-tests/barbican-api-595757ff5d-frx87" Jan 20 16:58:04 crc kubenswrapper[4558]: I0120 16:58:04.368967 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3c6de94f-c605-43d9-97b5-ccf91e49d1fb-config-data\") pod \"barbican-api-595757ff5d-frx87\" (UID: \"3c6de94f-c605-43d9-97b5-ccf91e49d1fb\") " pod="openstack-kuttl-tests/barbican-api-595757ff5d-frx87" Jan 20 16:58:04 crc kubenswrapper[4558]: I0120 16:58:04.368992 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3c6de94f-c605-43d9-97b5-ccf91e49d1fb-logs\") pod \"barbican-api-595757ff5d-frx87\" (UID: \"3c6de94f-c605-43d9-97b5-ccf91e49d1fb\") " pod="openstack-kuttl-tests/barbican-api-595757ff5d-frx87" Jan 20 16:58:04 crc kubenswrapper[4558]: I0120 16:58:04.369186 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3c6de94f-c605-43d9-97b5-ccf91e49d1fb-config-data-custom\") pod \"barbican-api-595757ff5d-frx87\" (UID: \"3c6de94f-c605-43d9-97b5-ccf91e49d1fb\") " pod="openstack-kuttl-tests/barbican-api-595757ff5d-frx87" Jan 20 16:58:04 crc kubenswrapper[4558]: I0120 16:58:04.369284 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-66xl6\" (UniqueName: \"kubernetes.io/projected/3c6de94f-c605-43d9-97b5-ccf91e49d1fb-kube-api-access-66xl6\") pod \"barbican-api-595757ff5d-frx87\" (UID: \"3c6de94f-c605-43d9-97b5-ccf91e49d1fb\") " pod="openstack-kuttl-tests/barbican-api-595757ff5d-frx87" Jan 20 16:58:04 crc kubenswrapper[4558]: I0120 16:58:04.369406 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3c6de94f-c605-43d9-97b5-ccf91e49d1fb-public-tls-certs\") pod \"barbican-api-595757ff5d-frx87\" (UID: \"3c6de94f-c605-43d9-97b5-ccf91e49d1fb\") " pod="openstack-kuttl-tests/barbican-api-595757ff5d-frx87" Jan 20 16:58:04 crc kubenswrapper[4558]: I0120 16:58:04.369466 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3c6de94f-c605-43d9-97b5-ccf91e49d1fb-combined-ca-bundle\") pod \"barbican-api-595757ff5d-frx87\" (UID: \"3c6de94f-c605-43d9-97b5-ccf91e49d1fb\") " pod="openstack-kuttl-tests/barbican-api-595757ff5d-frx87" Jan 20 16:58:04 crc kubenswrapper[4558]: I0120 16:58:04.471285 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3c6de94f-c605-43d9-97b5-ccf91e49d1fb-config-data-custom\") pod \"barbican-api-595757ff5d-frx87\" (UID: \"3c6de94f-c605-43d9-97b5-ccf91e49d1fb\") " pod="openstack-kuttl-tests/barbican-api-595757ff5d-frx87" Jan 20 16:58:04 crc kubenswrapper[4558]: I0120 16:58:04.471345 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-66xl6\" (UniqueName: \"kubernetes.io/projected/3c6de94f-c605-43d9-97b5-ccf91e49d1fb-kube-api-access-66xl6\") pod \"barbican-api-595757ff5d-frx87\" (UID: \"3c6de94f-c605-43d9-97b5-ccf91e49d1fb\") " pod="openstack-kuttl-tests/barbican-api-595757ff5d-frx87" Jan 20 16:58:04 crc kubenswrapper[4558]: I0120 16:58:04.471377 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3c6de94f-c605-43d9-97b5-ccf91e49d1fb-public-tls-certs\") pod \"barbican-api-595757ff5d-frx87\" (UID: \"3c6de94f-c605-43d9-97b5-ccf91e49d1fb\") " pod="openstack-kuttl-tests/barbican-api-595757ff5d-frx87" Jan 20 16:58:04 crc kubenswrapper[4558]: I0120 16:58:04.471396 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3c6de94f-c605-43d9-97b5-ccf91e49d1fb-combined-ca-bundle\") pod \"barbican-api-595757ff5d-frx87\" (UID: \"3c6de94f-c605-43d9-97b5-ccf91e49d1fb\") " pod="openstack-kuttl-tests/barbican-api-595757ff5d-frx87" Jan 20 16:58:04 crc kubenswrapper[4558]: I0120 16:58:04.471475 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3c6de94f-c605-43d9-97b5-ccf91e49d1fb-internal-tls-certs\") pod \"barbican-api-595757ff5d-frx87\" (UID: \"3c6de94f-c605-43d9-97b5-ccf91e49d1fb\") " pod="openstack-kuttl-tests/barbican-api-595757ff5d-frx87" Jan 20 16:58:04 crc kubenswrapper[4558]: I0120 16:58:04.471493 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3c6de94f-c605-43d9-97b5-ccf91e49d1fb-config-data\") pod \"barbican-api-595757ff5d-frx87\" (UID: \"3c6de94f-c605-43d9-97b5-ccf91e49d1fb\") " pod="openstack-kuttl-tests/barbican-api-595757ff5d-frx87" Jan 20 16:58:04 crc kubenswrapper[4558]: I0120 16:58:04.471509 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3c6de94f-c605-43d9-97b5-ccf91e49d1fb-logs\") pod \"barbican-api-595757ff5d-frx87\" (UID: \"3c6de94f-c605-43d9-97b5-ccf91e49d1fb\") " pod="openstack-kuttl-tests/barbican-api-595757ff5d-frx87" Jan 20 16:58:04 crc kubenswrapper[4558]: I0120 16:58:04.472862 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3c6de94f-c605-43d9-97b5-ccf91e49d1fb-logs\") pod \"barbican-api-595757ff5d-frx87\" (UID: \"3c6de94f-c605-43d9-97b5-ccf91e49d1fb\") " pod="openstack-kuttl-tests/barbican-api-595757ff5d-frx87" Jan 20 16:58:04 crc kubenswrapper[4558]: I0120 16:58:04.478962 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3c6de94f-c605-43d9-97b5-ccf91e49d1fb-internal-tls-certs\") pod \"barbican-api-595757ff5d-frx87\" (UID: \"3c6de94f-c605-43d9-97b5-ccf91e49d1fb\") " pod="openstack-kuttl-tests/barbican-api-595757ff5d-frx87" Jan 20 16:58:04 crc kubenswrapper[4558]: I0120 16:58:04.483588 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3c6de94f-c605-43d9-97b5-ccf91e49d1fb-combined-ca-bundle\") pod \"barbican-api-595757ff5d-frx87\" (UID: \"3c6de94f-c605-43d9-97b5-ccf91e49d1fb\") " pod="openstack-kuttl-tests/barbican-api-595757ff5d-frx87" Jan 20 16:58:04 crc kubenswrapper[4558]: I0120 16:58:04.485363 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3c6de94f-c605-43d9-97b5-ccf91e49d1fb-config-data\") pod \"barbican-api-595757ff5d-frx87\" (UID: \"3c6de94f-c605-43d9-97b5-ccf91e49d1fb\") " pod="openstack-kuttl-tests/barbican-api-595757ff5d-frx87" Jan 20 16:58:04 crc kubenswrapper[4558]: I0120 16:58:04.485701 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3c6de94f-c605-43d9-97b5-ccf91e49d1fb-public-tls-certs\") pod \"barbican-api-595757ff5d-frx87\" (UID: \"3c6de94f-c605-43d9-97b5-ccf91e49d1fb\") " pod="openstack-kuttl-tests/barbican-api-595757ff5d-frx87" Jan 20 16:58:04 crc kubenswrapper[4558]: I0120 16:58:04.489603 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-66xl6\" (UniqueName: \"kubernetes.io/projected/3c6de94f-c605-43d9-97b5-ccf91e49d1fb-kube-api-access-66xl6\") pod \"barbican-api-595757ff5d-frx87\" (UID: \"3c6de94f-c605-43d9-97b5-ccf91e49d1fb\") " pod="openstack-kuttl-tests/barbican-api-595757ff5d-frx87" Jan 20 16:58:04 crc kubenswrapper[4558]: I0120 16:58:04.495457 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3c6de94f-c605-43d9-97b5-ccf91e49d1fb-config-data-custom\") pod \"barbican-api-595757ff5d-frx87\" (UID: \"3c6de94f-c605-43d9-97b5-ccf91e49d1fb\") " pod="openstack-kuttl-tests/barbican-api-595757ff5d-frx87" Jan 20 16:58:04 crc kubenswrapper[4558]: I0120 16:58:04.601048 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-595757ff5d-frx87" Jan 20 16:58:04 crc kubenswrapper[4558]: I0120 16:58:04.877682 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-worker-7649847b9c-9wk9v" event={"ID":"40234b95-d302-420a-96d7-c56ffe609530","Type":"ContainerStarted","Data":"8645a2a68bc998fad70edddaf73505c46e74320243525ed7f7c2c0c00d201e22"} Jan 20 16:58:04 crc kubenswrapper[4558]: I0120 16:58:04.886029 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-keystone-listener-6b66757dfd-k89tm" event={"ID":"69c5c1c6-5c3f-48d4-8d14-c3c7cdcb1915","Type":"ContainerStarted","Data":"5a4543f4f5068e723c149b86d815e8bf663acc3075125bad09cc80cc4d87503d"} Jan 20 16:58:04 crc kubenswrapper[4558]: I0120 16:58:04.886052 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-keystone-listener-6b66757dfd-k89tm" event={"ID":"69c5c1c6-5c3f-48d4-8d14-c3c7cdcb1915","Type":"ContainerStarted","Data":"43ce3893218e12bafcf3980d1a94c5b6f79210afef04e341874b8c63d98def1c"} Jan 20 16:58:04 crc kubenswrapper[4558]: I0120 16:58:04.890930 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"d4a1ee65-2f6a-4db8-b56d-f0729c084060","Type":"ContainerStarted","Data":"03b367715211c190150b7a3c5d23027458fc3dfb546230b77e8b7b4f4e3cbe86"} Jan 20 16:58:04 crc kubenswrapper[4558]: I0120 16:58:04.890949 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"d4a1ee65-2f6a-4db8-b56d-f0729c084060","Type":"ContainerStarted","Data":"243e6138e05c0c7fcce3ba9f609d21d90a0fef60ee11f1b373592271c6c011c0"} Jan 20 16:58:04 crc kubenswrapper[4558]: I0120 16:58:04.891478 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/cinder-api-0" podUID="e9fb262e-7147-4f43-bedf-96936e8eea0d" containerName="cinder-api-log" containerID="cri-o://e57b0e1983683a09c62276a34c8d4c4edb36f370bfc8dbf13f659e8276f522e8" gracePeriod=30 Jan 20 16:58:04 crc kubenswrapper[4558]: I0120 16:58:04.891557 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/cinder-api-0" podUID="e9fb262e-7147-4f43-bedf-96936e8eea0d" containerName="cinder-api" containerID="cri-o://bc01840859b7663e90d5bdc41c63bb1cdbffbf13b837c36af41599cd52b68794" gracePeriod=30 Jan 20 16:58:04 crc kubenswrapper[4558]: I0120 16:58:04.944155 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/barbican-worker-7649847b9c-9wk9v" podStartSLOduration=7.537781981 podStartE2EDuration="8.944134986s" podCreationTimestamp="2026-01-20 16:57:56 +0000 UTC" firstStartedPulling="2026-01-20 16:58:01.979893371 +0000 UTC m=+975.740231339" lastFinishedPulling="2026-01-20 16:58:03.386246378 +0000 UTC m=+977.146584344" observedRunningTime="2026-01-20 16:58:04.893012834 +0000 UTC m=+978.653350792" watchObservedRunningTime="2026-01-20 16:58:04.944134986 +0000 UTC m=+978.704472953" Jan 20 16:58:04 crc kubenswrapper[4558]: I0120 16:58:04.952815 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/cinder-scheduler-0" podStartSLOduration=4.64572891 podStartE2EDuration="5.952797915s" podCreationTimestamp="2026-01-20 16:57:59 +0000 UTC" firstStartedPulling="2026-01-20 16:58:02.080988207 +0000 UTC m=+975.841326175" lastFinishedPulling="2026-01-20 16:58:03.388057212 +0000 UTC m=+977.148395180" observedRunningTime="2026-01-20 16:58:04.917082035 +0000 UTC m=+978.677420002" watchObservedRunningTime="2026-01-20 16:58:04.952797915 +0000 UTC m=+978.713135883" Jan 20 16:58:04 crc kubenswrapper[4558]: I0120 16:58:04.961588 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/barbican-keystone-listener-6b66757dfd-k89tm" podStartSLOduration=7.038911625 podStartE2EDuration="8.961577074s" podCreationTimestamp="2026-01-20 16:57:56 +0000 UTC" firstStartedPulling="2026-01-20 16:58:01.983612155 +0000 UTC m=+975.743950121" lastFinishedPulling="2026-01-20 16:58:03.906277602 +0000 UTC m=+977.666615570" observedRunningTime="2026-01-20 16:58:04.948788707 +0000 UTC m=+978.709126674" watchObservedRunningTime="2026-01-20 16:58:04.961577074 +0000 UTC m=+978.721915041" Jan 20 16:58:05 crc kubenswrapper[4558]: I0120 16:58:05.070103 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-api-595757ff5d-frx87"] Jan 20 16:58:05 crc kubenswrapper[4558]: W0120 16:58:05.094677 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3c6de94f_c605_43d9_97b5_ccf91e49d1fb.slice/crio-c495fe3ee551481098ce5d2fed77126628308deee2ebfade29bf4bdd17f835c0 WatchSource:0}: Error finding container c495fe3ee551481098ce5d2fed77126628308deee2ebfade29bf4bdd17f835c0: Status 404 returned error can't find the container with id c495fe3ee551481098ce5d2fed77126628308deee2ebfade29bf4bdd17f835c0 Jan 20 16:58:05 crc kubenswrapper[4558]: I0120 16:58:05.526781 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 16:58:05 crc kubenswrapper[4558]: I0120 16:58:05.590109 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e9fb262e-7147-4f43-bedf-96936e8eea0d-logs\") pod \"e9fb262e-7147-4f43-bedf-96936e8eea0d\" (UID: \"e9fb262e-7147-4f43-bedf-96936e8eea0d\") " Jan 20 16:58:05 crc kubenswrapper[4558]: I0120 16:58:05.590197 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e9fb262e-7147-4f43-bedf-96936e8eea0d-combined-ca-bundle\") pod \"e9fb262e-7147-4f43-bedf-96936e8eea0d\" (UID: \"e9fb262e-7147-4f43-bedf-96936e8eea0d\") " Jan 20 16:58:05 crc kubenswrapper[4558]: I0120 16:58:05.590293 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e9fb262e-7147-4f43-bedf-96936e8eea0d-scripts\") pod \"e9fb262e-7147-4f43-bedf-96936e8eea0d\" (UID: \"e9fb262e-7147-4f43-bedf-96936e8eea0d\") " Jan 20 16:58:05 crc kubenswrapper[4558]: I0120 16:58:05.590315 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e9fb262e-7147-4f43-bedf-96936e8eea0d-config-data-custom\") pod \"e9fb262e-7147-4f43-bedf-96936e8eea0d\" (UID: \"e9fb262e-7147-4f43-bedf-96936e8eea0d\") " Jan 20 16:58:05 crc kubenswrapper[4558]: I0120 16:58:05.590358 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e9fb262e-7147-4f43-bedf-96936e8eea0d-config-data\") pod \"e9fb262e-7147-4f43-bedf-96936e8eea0d\" (UID: \"e9fb262e-7147-4f43-bedf-96936e8eea0d\") " Jan 20 16:58:05 crc kubenswrapper[4558]: I0120 16:58:05.590415 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q9nrm\" (UniqueName: \"kubernetes.io/projected/e9fb262e-7147-4f43-bedf-96936e8eea0d-kube-api-access-q9nrm\") pod \"e9fb262e-7147-4f43-bedf-96936e8eea0d\" (UID: \"e9fb262e-7147-4f43-bedf-96936e8eea0d\") " Jan 20 16:58:05 crc kubenswrapper[4558]: I0120 16:58:05.590473 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/e9fb262e-7147-4f43-bedf-96936e8eea0d-etc-machine-id\") pod \"e9fb262e-7147-4f43-bedf-96936e8eea0d\" (UID: \"e9fb262e-7147-4f43-bedf-96936e8eea0d\") " Jan 20 16:58:05 crc kubenswrapper[4558]: I0120 16:58:05.590570 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e9fb262e-7147-4f43-bedf-96936e8eea0d-logs" (OuterVolumeSpecName: "logs") pod "e9fb262e-7147-4f43-bedf-96936e8eea0d" (UID: "e9fb262e-7147-4f43-bedf-96936e8eea0d"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 16:58:05 crc kubenswrapper[4558]: I0120 16:58:05.591220 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e9fb262e-7147-4f43-bedf-96936e8eea0d-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "e9fb262e-7147-4f43-bedf-96936e8eea0d" (UID: "e9fb262e-7147-4f43-bedf-96936e8eea0d"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 16:58:05 crc kubenswrapper[4558]: I0120 16:58:05.594288 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e9fb262e-7147-4f43-bedf-96936e8eea0d-kube-api-access-q9nrm" (OuterVolumeSpecName: "kube-api-access-q9nrm") pod "e9fb262e-7147-4f43-bedf-96936e8eea0d" (UID: "e9fb262e-7147-4f43-bedf-96936e8eea0d"). InnerVolumeSpecName "kube-api-access-q9nrm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:58:05 crc kubenswrapper[4558]: I0120 16:58:05.595781 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e9fb262e-7147-4f43-bedf-96936e8eea0d-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "e9fb262e-7147-4f43-bedf-96936e8eea0d" (UID: "e9fb262e-7147-4f43-bedf-96936e8eea0d"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:58:05 crc kubenswrapper[4558]: I0120 16:58:05.597592 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e9fb262e-7147-4f43-bedf-96936e8eea0d-scripts" (OuterVolumeSpecName: "scripts") pod "e9fb262e-7147-4f43-bedf-96936e8eea0d" (UID: "e9fb262e-7147-4f43-bedf-96936e8eea0d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:58:05 crc kubenswrapper[4558]: I0120 16:58:05.611350 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e9fb262e-7147-4f43-bedf-96936e8eea0d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e9fb262e-7147-4f43-bedf-96936e8eea0d" (UID: "e9fb262e-7147-4f43-bedf-96936e8eea0d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:58:05 crc kubenswrapper[4558]: I0120 16:58:05.645443 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e9fb262e-7147-4f43-bedf-96936e8eea0d-config-data" (OuterVolumeSpecName: "config-data") pod "e9fb262e-7147-4f43-bedf-96936e8eea0d" (UID: "e9fb262e-7147-4f43-bedf-96936e8eea0d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:58:05 crc kubenswrapper[4558]: I0120 16:58:05.691779 4558 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/e9fb262e-7147-4f43-bedf-96936e8eea0d-etc-machine-id\") on node \"crc\" DevicePath \"\"" Jan 20 16:58:05 crc kubenswrapper[4558]: I0120 16:58:05.691809 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e9fb262e-7147-4f43-bedf-96936e8eea0d-logs\") on node \"crc\" DevicePath \"\"" Jan 20 16:58:05 crc kubenswrapper[4558]: I0120 16:58:05.691819 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e9fb262e-7147-4f43-bedf-96936e8eea0d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 16:58:05 crc kubenswrapper[4558]: I0120 16:58:05.691827 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e9fb262e-7147-4f43-bedf-96936e8eea0d-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 16:58:05 crc kubenswrapper[4558]: I0120 16:58:05.691834 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e9fb262e-7147-4f43-bedf-96936e8eea0d-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 16:58:05 crc kubenswrapper[4558]: I0120 16:58:05.691842 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e9fb262e-7147-4f43-bedf-96936e8eea0d-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 16:58:05 crc kubenswrapper[4558]: I0120 16:58:05.691850 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q9nrm\" (UniqueName: \"kubernetes.io/projected/e9fb262e-7147-4f43-bedf-96936e8eea0d-kube-api-access-q9nrm\") on node \"crc\" DevicePath \"\"" Jan 20 16:58:05 crc kubenswrapper[4558]: I0120 16:58:05.753057 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 16:58:05 crc kubenswrapper[4558]: I0120 16:58:05.753549 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 16:58:05 crc kubenswrapper[4558]: I0120 16:58:05.841434 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 16:58:05 crc kubenswrapper[4558]: I0120 16:58:05.915657 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-595757ff5d-frx87" event={"ID":"3c6de94f-c605-43d9-97b5-ccf91e49d1fb","Type":"ContainerStarted","Data":"ff9f3ae585b003190c9a4e57172ef28f205a59206653a665e67865ef3bac9d47"} Jan 20 16:58:05 crc kubenswrapper[4558]: I0120 16:58:05.915877 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-595757ff5d-frx87" event={"ID":"3c6de94f-c605-43d9-97b5-ccf91e49d1fb","Type":"ContainerStarted","Data":"065a0343a2fa20d49288d6aad3e2c8001219424d475f4cf3fa0b18d5f777af97"} Jan 20 16:58:05 crc kubenswrapper[4558]: I0120 16:58:05.915890 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-595757ff5d-frx87" event={"ID":"3c6de94f-c605-43d9-97b5-ccf91e49d1fb","Type":"ContainerStarted","Data":"c495fe3ee551481098ce5d2fed77126628308deee2ebfade29bf4bdd17f835c0"} Jan 20 16:58:05 crc kubenswrapper[4558]: I0120 16:58:05.915938 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/barbican-api-595757ff5d-frx87" Jan 20 16:58:05 crc kubenswrapper[4558]: I0120 16:58:05.915959 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/barbican-api-595757ff5d-frx87" Jan 20 16:58:05 crc kubenswrapper[4558]: I0120 16:58:05.921653 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 16:58:05 crc kubenswrapper[4558]: I0120 16:58:05.926962 4558 generic.go:334] "Generic (PLEG): container finished" podID="e9fb262e-7147-4f43-bedf-96936e8eea0d" containerID="bc01840859b7663e90d5bdc41c63bb1cdbffbf13b837c36af41599cd52b68794" exitCode=0 Jan 20 16:58:05 crc kubenswrapper[4558]: I0120 16:58:05.927074 4558 generic.go:334] "Generic (PLEG): container finished" podID="e9fb262e-7147-4f43-bedf-96936e8eea0d" containerID="e57b0e1983683a09c62276a34c8d4c4edb36f370bfc8dbf13f659e8276f522e8" exitCode=143 Jan 20 16:58:05 crc kubenswrapper[4558]: I0120 16:58:05.927884 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 16:58:05 crc kubenswrapper[4558]: I0120 16:58:05.928760 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"e9fb262e-7147-4f43-bedf-96936e8eea0d","Type":"ContainerDied","Data":"bc01840859b7663e90d5bdc41c63bb1cdbffbf13b837c36af41599cd52b68794"} Jan 20 16:58:05 crc kubenswrapper[4558]: I0120 16:58:05.928815 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"e9fb262e-7147-4f43-bedf-96936e8eea0d","Type":"ContainerDied","Data":"e57b0e1983683a09c62276a34c8d4c4edb36f370bfc8dbf13f659e8276f522e8"} Jan 20 16:58:05 crc kubenswrapper[4558]: I0120 16:58:05.928849 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"e9fb262e-7147-4f43-bedf-96936e8eea0d","Type":"ContainerDied","Data":"3a61005a56a4d40a835b05e3fb63e8cdefc180dbf489b24c8f75d455fc0d5e3d"} Jan 20 16:58:05 crc kubenswrapper[4558]: I0120 16:58:05.928871 4558 scope.go:117] "RemoveContainer" containerID="bc01840859b7663e90d5bdc41c63bb1cdbffbf13b837c36af41599cd52b68794" Jan 20 16:58:05 crc kubenswrapper[4558]: I0120 16:58:05.944827 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/barbican-api-595757ff5d-frx87" podStartSLOduration=1.944811614 podStartE2EDuration="1.944811614s" podCreationTimestamp="2026-01-20 16:58:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 16:58:05.931225718 +0000 UTC m=+979.691563684" watchObservedRunningTime="2026-01-20 16:58:05.944811614 +0000 UTC m=+979.705149581" Jan 20 16:58:05 crc kubenswrapper[4558]: I0120 16:58:05.969375 4558 scope.go:117] "RemoveContainer" containerID="e57b0e1983683a09c62276a34c8d4c4edb36f370bfc8dbf13f659e8276f522e8" Jan 20 16:58:05 crc kubenswrapper[4558]: I0120 16:58:05.985877 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 16:58:05 crc kubenswrapper[4558]: I0120 16:58:05.991707 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 16:58:05 crc kubenswrapper[4558]: I0120 16:58:05.997836 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 16:58:06 crc kubenswrapper[4558]: I0120 16:58:06.005882 4558 scope.go:117] "RemoveContainer" containerID="bc01840859b7663e90d5bdc41c63bb1cdbffbf13b837c36af41599cd52b68794" Jan 20 16:58:06 crc kubenswrapper[4558]: E0120 16:58:06.006640 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bc01840859b7663e90d5bdc41c63bb1cdbffbf13b837c36af41599cd52b68794\": container with ID starting with bc01840859b7663e90d5bdc41c63bb1cdbffbf13b837c36af41599cd52b68794 not found: ID does not exist" containerID="bc01840859b7663e90d5bdc41c63bb1cdbffbf13b837c36af41599cd52b68794" Jan 20 16:58:06 crc kubenswrapper[4558]: I0120 16:58:06.006669 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bc01840859b7663e90d5bdc41c63bb1cdbffbf13b837c36af41599cd52b68794"} err="failed to get container status \"bc01840859b7663e90d5bdc41c63bb1cdbffbf13b837c36af41599cd52b68794\": rpc error: code = NotFound desc = could not find container \"bc01840859b7663e90d5bdc41c63bb1cdbffbf13b837c36af41599cd52b68794\": container with ID starting with bc01840859b7663e90d5bdc41c63bb1cdbffbf13b837c36af41599cd52b68794 not found: ID does not exist" Jan 20 16:58:06 crc kubenswrapper[4558]: I0120 16:58:06.006688 4558 scope.go:117] "RemoveContainer" containerID="e57b0e1983683a09c62276a34c8d4c4edb36f370bfc8dbf13f659e8276f522e8" Jan 20 16:58:06 crc kubenswrapper[4558]: E0120 16:58:06.006819 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e9fb262e-7147-4f43-bedf-96936e8eea0d" containerName="cinder-api-log" Jan 20 16:58:06 crc kubenswrapper[4558]: I0120 16:58:06.006893 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="e9fb262e-7147-4f43-bedf-96936e8eea0d" containerName="cinder-api-log" Jan 20 16:58:06 crc kubenswrapper[4558]: E0120 16:58:06.006970 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e9fb262e-7147-4f43-bedf-96936e8eea0d" containerName="cinder-api" Jan 20 16:58:06 crc kubenswrapper[4558]: I0120 16:58:06.007017 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="e9fb262e-7147-4f43-bedf-96936e8eea0d" containerName="cinder-api" Jan 20 16:58:06 crc kubenswrapper[4558]: I0120 16:58:06.007338 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="e9fb262e-7147-4f43-bedf-96936e8eea0d" containerName="cinder-api" Jan 20 16:58:06 crc kubenswrapper[4558]: I0120 16:58:06.007416 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="e9fb262e-7147-4f43-bedf-96936e8eea0d" containerName="cinder-api-log" Jan 20 16:58:06 crc kubenswrapper[4558]: E0120 16:58:06.007638 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e57b0e1983683a09c62276a34c8d4c4edb36f370bfc8dbf13f659e8276f522e8\": container with ID starting with e57b0e1983683a09c62276a34c8d4c4edb36f370bfc8dbf13f659e8276f522e8 not found: ID does not exist" containerID="e57b0e1983683a09c62276a34c8d4c4edb36f370bfc8dbf13f659e8276f522e8" Jan 20 16:58:06 crc kubenswrapper[4558]: I0120 16:58:06.007663 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e57b0e1983683a09c62276a34c8d4c4edb36f370bfc8dbf13f659e8276f522e8"} err="failed to get container status \"e57b0e1983683a09c62276a34c8d4c4edb36f370bfc8dbf13f659e8276f522e8\": rpc error: code = NotFound desc = could not find container \"e57b0e1983683a09c62276a34c8d4c4edb36f370bfc8dbf13f659e8276f522e8\": container with ID starting with e57b0e1983683a09c62276a34c8d4c4edb36f370bfc8dbf13f659e8276f522e8 not found: ID does not exist" Jan 20 16:58:06 crc kubenswrapper[4558]: I0120 16:58:06.007675 4558 scope.go:117] "RemoveContainer" containerID="bc01840859b7663e90d5bdc41c63bb1cdbffbf13b837c36af41599cd52b68794" Jan 20 16:58:06 crc kubenswrapper[4558]: I0120 16:58:06.008394 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bc01840859b7663e90d5bdc41c63bb1cdbffbf13b837c36af41599cd52b68794"} err="failed to get container status \"bc01840859b7663e90d5bdc41c63bb1cdbffbf13b837c36af41599cd52b68794\": rpc error: code = NotFound desc = could not find container \"bc01840859b7663e90d5bdc41c63bb1cdbffbf13b837c36af41599cd52b68794\": container with ID starting with bc01840859b7663e90d5bdc41c63bb1cdbffbf13b837c36af41599cd52b68794 not found: ID does not exist" Jan 20 16:58:06 crc kubenswrapper[4558]: I0120 16:58:06.008430 4558 scope.go:117] "RemoveContainer" containerID="e57b0e1983683a09c62276a34c8d4c4edb36f370bfc8dbf13f659e8276f522e8" Jan 20 16:58:06 crc kubenswrapper[4558]: I0120 16:58:06.008876 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 16:58:06 crc kubenswrapper[4558]: I0120 16:58:06.009040 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e57b0e1983683a09c62276a34c8d4c4edb36f370bfc8dbf13f659e8276f522e8"} err="failed to get container status \"e57b0e1983683a09c62276a34c8d4c4edb36f370bfc8dbf13f659e8276f522e8\": rpc error: code = NotFound desc = could not find container \"e57b0e1983683a09c62276a34c8d4c4edb36f370bfc8dbf13f659e8276f522e8\": container with ID starting with e57b0e1983683a09c62276a34c8d4c4edb36f370bfc8dbf13f659e8276f522e8 not found: ID does not exist" Jan 20 16:58:06 crc kubenswrapper[4558]: I0120 16:58:06.010450 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cinder-api-config-data" Jan 20 16:58:06 crc kubenswrapper[4558]: I0120 16:58:06.011354 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-cinder-internal-svc" Jan 20 16:58:06 crc kubenswrapper[4558]: I0120 16:58:06.012158 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-cinder-public-svc" Jan 20 16:58:06 crc kubenswrapper[4558]: I0120 16:58:06.027011 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 16:58:06 crc kubenswrapper[4558]: I0120 16:58:06.104498 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9vwjp\" (UniqueName: \"kubernetes.io/projected/764c7a15-6a1a-470c-9d0b-a63ed418cc09-kube-api-access-9vwjp\") pod \"cinder-api-0\" (UID: \"764c7a15-6a1a-470c-9d0b-a63ed418cc09\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 16:58:06 crc kubenswrapper[4558]: I0120 16:58:06.104552 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/764c7a15-6a1a-470c-9d0b-a63ed418cc09-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"764c7a15-6a1a-470c-9d0b-a63ed418cc09\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 16:58:06 crc kubenswrapper[4558]: I0120 16:58:06.104605 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/764c7a15-6a1a-470c-9d0b-a63ed418cc09-etc-machine-id\") pod \"cinder-api-0\" (UID: \"764c7a15-6a1a-470c-9d0b-a63ed418cc09\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 16:58:06 crc kubenswrapper[4558]: I0120 16:58:06.104626 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/764c7a15-6a1a-470c-9d0b-a63ed418cc09-config-data\") pod \"cinder-api-0\" (UID: \"764c7a15-6a1a-470c-9d0b-a63ed418cc09\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 16:58:06 crc kubenswrapper[4558]: I0120 16:58:06.104682 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/764c7a15-6a1a-470c-9d0b-a63ed418cc09-logs\") pod \"cinder-api-0\" (UID: \"764c7a15-6a1a-470c-9d0b-a63ed418cc09\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 16:58:06 crc kubenswrapper[4558]: I0120 16:58:06.104699 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/764c7a15-6a1a-470c-9d0b-a63ed418cc09-scripts\") pod \"cinder-api-0\" (UID: \"764c7a15-6a1a-470c-9d0b-a63ed418cc09\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 16:58:06 crc kubenswrapper[4558]: I0120 16:58:06.104713 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/764c7a15-6a1a-470c-9d0b-a63ed418cc09-public-tls-certs\") pod \"cinder-api-0\" (UID: \"764c7a15-6a1a-470c-9d0b-a63ed418cc09\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 16:58:06 crc kubenswrapper[4558]: I0120 16:58:06.104807 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/764c7a15-6a1a-470c-9d0b-a63ed418cc09-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"764c7a15-6a1a-470c-9d0b-a63ed418cc09\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 16:58:06 crc kubenswrapper[4558]: I0120 16:58:06.104842 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/764c7a15-6a1a-470c-9d0b-a63ed418cc09-config-data-custom\") pod \"cinder-api-0\" (UID: \"764c7a15-6a1a-470c-9d0b-a63ed418cc09\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 16:58:06 crc kubenswrapper[4558]: I0120 16:58:06.206423 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/764c7a15-6a1a-470c-9d0b-a63ed418cc09-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"764c7a15-6a1a-470c-9d0b-a63ed418cc09\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 16:58:06 crc kubenswrapper[4558]: I0120 16:58:06.206472 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/764c7a15-6a1a-470c-9d0b-a63ed418cc09-config-data-custom\") pod \"cinder-api-0\" (UID: \"764c7a15-6a1a-470c-9d0b-a63ed418cc09\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 16:58:06 crc kubenswrapper[4558]: I0120 16:58:06.206524 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9vwjp\" (UniqueName: \"kubernetes.io/projected/764c7a15-6a1a-470c-9d0b-a63ed418cc09-kube-api-access-9vwjp\") pod \"cinder-api-0\" (UID: \"764c7a15-6a1a-470c-9d0b-a63ed418cc09\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 16:58:06 crc kubenswrapper[4558]: I0120 16:58:06.206597 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/764c7a15-6a1a-470c-9d0b-a63ed418cc09-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"764c7a15-6a1a-470c-9d0b-a63ed418cc09\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 16:58:06 crc kubenswrapper[4558]: I0120 16:58:06.206677 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/764c7a15-6a1a-470c-9d0b-a63ed418cc09-etc-machine-id\") pod \"cinder-api-0\" (UID: \"764c7a15-6a1a-470c-9d0b-a63ed418cc09\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 16:58:06 crc kubenswrapper[4558]: I0120 16:58:06.206701 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/764c7a15-6a1a-470c-9d0b-a63ed418cc09-config-data\") pod \"cinder-api-0\" (UID: \"764c7a15-6a1a-470c-9d0b-a63ed418cc09\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 16:58:06 crc kubenswrapper[4558]: I0120 16:58:06.206800 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/764c7a15-6a1a-470c-9d0b-a63ed418cc09-logs\") pod \"cinder-api-0\" (UID: \"764c7a15-6a1a-470c-9d0b-a63ed418cc09\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 16:58:06 crc kubenswrapper[4558]: I0120 16:58:06.206826 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/764c7a15-6a1a-470c-9d0b-a63ed418cc09-scripts\") pod \"cinder-api-0\" (UID: \"764c7a15-6a1a-470c-9d0b-a63ed418cc09\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 16:58:06 crc kubenswrapper[4558]: I0120 16:58:06.206879 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/764c7a15-6a1a-470c-9d0b-a63ed418cc09-public-tls-certs\") pod \"cinder-api-0\" (UID: \"764c7a15-6a1a-470c-9d0b-a63ed418cc09\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 16:58:06 crc kubenswrapper[4558]: I0120 16:58:06.207432 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/764c7a15-6a1a-470c-9d0b-a63ed418cc09-etc-machine-id\") pod \"cinder-api-0\" (UID: \"764c7a15-6a1a-470c-9d0b-a63ed418cc09\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 16:58:06 crc kubenswrapper[4558]: I0120 16:58:06.207659 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/764c7a15-6a1a-470c-9d0b-a63ed418cc09-logs\") pod \"cinder-api-0\" (UID: \"764c7a15-6a1a-470c-9d0b-a63ed418cc09\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 16:58:06 crc kubenswrapper[4558]: I0120 16:58:06.210245 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/764c7a15-6a1a-470c-9d0b-a63ed418cc09-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"764c7a15-6a1a-470c-9d0b-a63ed418cc09\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 16:58:06 crc kubenswrapper[4558]: I0120 16:58:06.210762 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/764c7a15-6a1a-470c-9d0b-a63ed418cc09-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"764c7a15-6a1a-470c-9d0b-a63ed418cc09\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 16:58:06 crc kubenswrapper[4558]: I0120 16:58:06.211077 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/764c7a15-6a1a-470c-9d0b-a63ed418cc09-public-tls-certs\") pod \"cinder-api-0\" (UID: \"764c7a15-6a1a-470c-9d0b-a63ed418cc09\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 16:58:06 crc kubenswrapper[4558]: I0120 16:58:06.211102 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/764c7a15-6a1a-470c-9d0b-a63ed418cc09-config-data-custom\") pod \"cinder-api-0\" (UID: \"764c7a15-6a1a-470c-9d0b-a63ed418cc09\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 16:58:06 crc kubenswrapper[4558]: I0120 16:58:06.212026 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/764c7a15-6a1a-470c-9d0b-a63ed418cc09-scripts\") pod \"cinder-api-0\" (UID: \"764c7a15-6a1a-470c-9d0b-a63ed418cc09\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 16:58:06 crc kubenswrapper[4558]: I0120 16:58:06.214684 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/764c7a15-6a1a-470c-9d0b-a63ed418cc09-config-data\") pod \"cinder-api-0\" (UID: \"764c7a15-6a1a-470c-9d0b-a63ed418cc09\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 16:58:06 crc kubenswrapper[4558]: I0120 16:58:06.222830 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9vwjp\" (UniqueName: \"kubernetes.io/projected/764c7a15-6a1a-470c-9d0b-a63ed418cc09-kube-api-access-9vwjp\") pod \"cinder-api-0\" (UID: \"764c7a15-6a1a-470c-9d0b-a63ed418cc09\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 16:58:06 crc kubenswrapper[4558]: I0120 16:58:06.331333 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 16:58:06 crc kubenswrapper[4558]: I0120 16:58:06.575230 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e9fb262e-7147-4f43-bedf-96936e8eea0d" path="/var/lib/kubelet/pods/e9fb262e-7147-4f43-bedf-96936e8eea0d/volumes" Jan 20 16:58:06 crc kubenswrapper[4558]: I0120 16:58:06.713326 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 16:58:06 crc kubenswrapper[4558]: I0120 16:58:06.937150 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"764c7a15-6a1a-470c-9d0b-a63ed418cc09","Type":"ContainerStarted","Data":"b1cb1f1f4309dc51f85279c592856913ccf83c72a5d397ad1932b9a9a1ae4bf2"} Jan 20 16:58:08 crc kubenswrapper[4558]: I0120 16:58:08.541323 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/barbican-api-7bdfbf6ddd-thvjg" podUID="8be0815b-492d-4db8-bfd5-29d81bdb14d6" containerName="barbican-api-log" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 20 16:58:08 crc kubenswrapper[4558]: I0120 16:58:08.541586 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/barbican-api-7bdfbf6ddd-thvjg" Jan 20 16:58:09 crc kubenswrapper[4558]: I0120 16:58:09.363494 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 16:58:09 crc kubenswrapper[4558]: I0120 16:58:09.635637 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 16:58:09 crc kubenswrapper[4558]: I0120 16:58:09.701483 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/barbican-api-7bdfbf6ddd-thvjg" Jan 20 16:58:09 crc kubenswrapper[4558]: I0120 16:58:09.972340 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"764c7a15-6a1a-470c-9d0b-a63ed418cc09","Type":"ContainerStarted","Data":"5fcc9425b33989b0ac07f50e1aa0774a071177ee9453a094ad6d619c7974603b"} Jan 20 16:58:10 crc kubenswrapper[4558]: I0120 16:58:10.007662 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 16:58:10 crc kubenswrapper[4558]: E0120 16:58:10.383913 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ceilometer-central-agent\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-kuttl-tests/ceilometer-0" podUID="c1e3af55-1788-4d23-b3c5-6227e848ea15" Jan 20 16:58:10 crc kubenswrapper[4558]: I0120 16:58:10.991604 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"c1e3af55-1788-4d23-b3c5-6227e848ea15","Type":"ContainerStarted","Data":"9d493e28fa6e5c99c3da647eeb0734af41d5c285c3247a20f83d595f2326552a"} Jan 20 16:58:10 crc kubenswrapper[4558]: I0120 16:58:10.991874 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:58:10 crc kubenswrapper[4558]: I0120 16:58:10.991920 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="c1e3af55-1788-4d23-b3c5-6227e848ea15" containerName="proxy-httpd" containerID="cri-o://9d493e28fa6e5c99c3da647eeb0734af41d5c285c3247a20f83d595f2326552a" gracePeriod=30 Jan 20 16:58:10 crc kubenswrapper[4558]: I0120 16:58:10.991918 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="c1e3af55-1788-4d23-b3c5-6227e848ea15" containerName="ceilometer-notification-agent" containerID="cri-o://5df8eb7fa973376010ffaf1986356d036d2880d6b817d28856f89bb5815b2133" gracePeriod=30 Jan 20 16:58:10 crc kubenswrapper[4558]: I0120 16:58:10.991982 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="c1e3af55-1788-4d23-b3c5-6227e848ea15" containerName="sg-core" containerID="cri-o://cede7fedfff117f8c9861769debc9b41b0ace89fa517ecf7fb22b21f6cc45cca" gracePeriod=30 Jan 20 16:58:10 crc kubenswrapper[4558]: I0120 16:58:10.996873 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"764c7a15-6a1a-470c-9d0b-a63ed418cc09","Type":"ContainerStarted","Data":"44cfb2c4bfc8454ac6259ca35fbc70464e44213a38cdb484b02b66bc424c90d5"} Jan 20 16:58:10 crc kubenswrapper[4558]: I0120 16:58:10.996978 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/cinder-scheduler-0" podUID="d4a1ee65-2f6a-4db8-b56d-f0729c084060" containerName="cinder-scheduler" containerID="cri-o://03b367715211c190150b7a3c5d23027458fc3dfb546230b77e8b7b4f4e3cbe86" gracePeriod=30 Jan 20 16:58:10 crc kubenswrapper[4558]: I0120 16:58:10.997047 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/cinder-scheduler-0" podUID="d4a1ee65-2f6a-4db8-b56d-f0729c084060" containerName="probe" containerID="cri-o://243e6138e05c0c7fcce3ba9f609d21d90a0fef60ee11f1b373592271c6c011c0" gracePeriod=30 Jan 20 16:58:11 crc kubenswrapper[4558]: I0120 16:58:11.037239 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/cinder-api-0" podStartSLOduration=6.037222344 podStartE2EDuration="6.037222344s" podCreationTimestamp="2026-01-20 16:58:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 16:58:11.032537084 +0000 UTC m=+984.792875051" watchObservedRunningTime="2026-01-20 16:58:11.037222344 +0000 UTC m=+984.797560311" Jan 20 16:58:11 crc kubenswrapper[4558]: I0120 16:58:11.332105 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 16:58:12 crc kubenswrapper[4558]: I0120 16:58:12.005298 4558 generic.go:334] "Generic (PLEG): container finished" podID="d4a1ee65-2f6a-4db8-b56d-f0729c084060" containerID="243e6138e05c0c7fcce3ba9f609d21d90a0fef60ee11f1b373592271c6c011c0" exitCode=0 Jan 20 16:58:12 crc kubenswrapper[4558]: I0120 16:58:12.005384 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"d4a1ee65-2f6a-4db8-b56d-f0729c084060","Type":"ContainerDied","Data":"243e6138e05c0c7fcce3ba9f609d21d90a0fef60ee11f1b373592271c6c011c0"} Jan 20 16:58:12 crc kubenswrapper[4558]: I0120 16:58:12.008485 4558 generic.go:334] "Generic (PLEG): container finished" podID="c1e3af55-1788-4d23-b3c5-6227e848ea15" containerID="9d493e28fa6e5c99c3da647eeb0734af41d5c285c3247a20f83d595f2326552a" exitCode=0 Jan 20 16:58:12 crc kubenswrapper[4558]: I0120 16:58:12.008538 4558 generic.go:334] "Generic (PLEG): container finished" podID="c1e3af55-1788-4d23-b3c5-6227e848ea15" containerID="cede7fedfff117f8c9861769debc9b41b0ace89fa517ecf7fb22b21f6cc45cca" exitCode=2 Jan 20 16:58:12 crc kubenswrapper[4558]: I0120 16:58:12.008572 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"c1e3af55-1788-4d23-b3c5-6227e848ea15","Type":"ContainerDied","Data":"9d493e28fa6e5c99c3da647eeb0734af41d5c285c3247a20f83d595f2326552a"} Jan 20 16:58:12 crc kubenswrapper[4558]: I0120 16:58:12.008603 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"c1e3af55-1788-4d23-b3c5-6227e848ea15","Type":"ContainerDied","Data":"cede7fedfff117f8c9861769debc9b41b0ace89fa517ecf7fb22b21f6cc45cca"} Jan 20 16:58:15 crc kubenswrapper[4558]: I0120 16:58:15.040119 4558 generic.go:334] "Generic (PLEG): container finished" podID="d4a1ee65-2f6a-4db8-b56d-f0729c084060" containerID="03b367715211c190150b7a3c5d23027458fc3dfb546230b77e8b7b4f4e3cbe86" exitCode=0 Jan 20 16:58:15 crc kubenswrapper[4558]: I0120 16:58:15.040283 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"d4a1ee65-2f6a-4db8-b56d-f0729c084060","Type":"ContainerDied","Data":"03b367715211c190150b7a3c5d23027458fc3dfb546230b77e8b7b4f4e3cbe86"} Jan 20 16:58:15 crc kubenswrapper[4558]: I0120 16:58:15.040449 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"d4a1ee65-2f6a-4db8-b56d-f0729c084060","Type":"ContainerDied","Data":"f52909e9c4945ba2e0b0d9dc2e5bed6740c3eb850afdd70c18bb16c0f63a70fd"} Jan 20 16:58:15 crc kubenswrapper[4558]: I0120 16:58:15.040464 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f52909e9c4945ba2e0b0d9dc2e5bed6740c3eb850afdd70c18bb16c0f63a70fd" Jan 20 16:58:15 crc kubenswrapper[4558]: I0120 16:58:15.042439 4558 generic.go:334] "Generic (PLEG): container finished" podID="c1e3af55-1788-4d23-b3c5-6227e848ea15" containerID="5df8eb7fa973376010ffaf1986356d036d2880d6b817d28856f89bb5815b2133" exitCode=0 Jan 20 16:58:15 crc kubenswrapper[4558]: I0120 16:58:15.042470 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"c1e3af55-1788-4d23-b3c5-6227e848ea15","Type":"ContainerDied","Data":"5df8eb7fa973376010ffaf1986356d036d2880d6b817d28856f89bb5815b2133"} Jan 20 16:58:15 crc kubenswrapper[4558]: I0120 16:58:15.124728 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 16:58:15 crc kubenswrapper[4558]: I0120 16:58:15.178876 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/d4a1ee65-2f6a-4db8-b56d-f0729c084060-etc-machine-id\") pod \"d4a1ee65-2f6a-4db8-b56d-f0729c084060\" (UID: \"d4a1ee65-2f6a-4db8-b56d-f0729c084060\") " Jan 20 16:58:15 crc kubenswrapper[4558]: I0120 16:58:15.179256 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d4a1ee65-2f6a-4db8-b56d-f0729c084060-config-data-custom\") pod \"d4a1ee65-2f6a-4db8-b56d-f0729c084060\" (UID: \"d4a1ee65-2f6a-4db8-b56d-f0729c084060\") " Jan 20 16:58:15 crc kubenswrapper[4558]: I0120 16:58:15.178945 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d4a1ee65-2f6a-4db8-b56d-f0729c084060-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "d4a1ee65-2f6a-4db8-b56d-f0729c084060" (UID: "d4a1ee65-2f6a-4db8-b56d-f0729c084060"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 16:58:15 crc kubenswrapper[4558]: I0120 16:58:15.179308 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d4a1ee65-2f6a-4db8-b56d-f0729c084060-config-data\") pod \"d4a1ee65-2f6a-4db8-b56d-f0729c084060\" (UID: \"d4a1ee65-2f6a-4db8-b56d-f0729c084060\") " Jan 20 16:58:15 crc kubenswrapper[4558]: I0120 16:58:15.179510 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zbhzm\" (UniqueName: \"kubernetes.io/projected/d4a1ee65-2f6a-4db8-b56d-f0729c084060-kube-api-access-zbhzm\") pod \"d4a1ee65-2f6a-4db8-b56d-f0729c084060\" (UID: \"d4a1ee65-2f6a-4db8-b56d-f0729c084060\") " Jan 20 16:58:15 crc kubenswrapper[4558]: I0120 16:58:15.179539 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d4a1ee65-2f6a-4db8-b56d-f0729c084060-scripts\") pod \"d4a1ee65-2f6a-4db8-b56d-f0729c084060\" (UID: \"d4a1ee65-2f6a-4db8-b56d-f0729c084060\") " Jan 20 16:58:15 crc kubenswrapper[4558]: I0120 16:58:15.179695 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4a1ee65-2f6a-4db8-b56d-f0729c084060-combined-ca-bundle\") pod \"d4a1ee65-2f6a-4db8-b56d-f0729c084060\" (UID: \"d4a1ee65-2f6a-4db8-b56d-f0729c084060\") " Jan 20 16:58:15 crc kubenswrapper[4558]: I0120 16:58:15.185480 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d4a1ee65-2f6a-4db8-b56d-f0729c084060-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "d4a1ee65-2f6a-4db8-b56d-f0729c084060" (UID: "d4a1ee65-2f6a-4db8-b56d-f0729c084060"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:58:15 crc kubenswrapper[4558]: I0120 16:58:15.185619 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d4a1ee65-2f6a-4db8-b56d-f0729c084060-kube-api-access-zbhzm" (OuterVolumeSpecName: "kube-api-access-zbhzm") pod "d4a1ee65-2f6a-4db8-b56d-f0729c084060" (UID: "d4a1ee65-2f6a-4db8-b56d-f0729c084060"). InnerVolumeSpecName "kube-api-access-zbhzm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:58:15 crc kubenswrapper[4558]: I0120 16:58:15.188878 4558 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/d4a1ee65-2f6a-4db8-b56d-f0729c084060-etc-machine-id\") on node \"crc\" DevicePath \"\"" Jan 20 16:58:15 crc kubenswrapper[4558]: I0120 16:58:15.190535 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d4a1ee65-2f6a-4db8-b56d-f0729c084060-scripts" (OuterVolumeSpecName: "scripts") pod "d4a1ee65-2f6a-4db8-b56d-f0729c084060" (UID: "d4a1ee65-2f6a-4db8-b56d-f0729c084060"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:58:15 crc kubenswrapper[4558]: I0120 16:58:15.242138 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d4a1ee65-2f6a-4db8-b56d-f0729c084060-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d4a1ee65-2f6a-4db8-b56d-f0729c084060" (UID: "d4a1ee65-2f6a-4db8-b56d-f0729c084060"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:58:15 crc kubenswrapper[4558]: I0120 16:58:15.258849 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:58:15 crc kubenswrapper[4558]: I0120 16:58:15.281129 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d4a1ee65-2f6a-4db8-b56d-f0729c084060-config-data" (OuterVolumeSpecName: "config-data") pod "d4a1ee65-2f6a-4db8-b56d-f0729c084060" (UID: "d4a1ee65-2f6a-4db8-b56d-f0729c084060"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:58:15 crc kubenswrapper[4558]: I0120 16:58:15.290011 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c1e3af55-1788-4d23-b3c5-6227e848ea15-run-httpd\") pod \"c1e3af55-1788-4d23-b3c5-6227e848ea15\" (UID: \"c1e3af55-1788-4d23-b3c5-6227e848ea15\") " Jan 20 16:58:15 crc kubenswrapper[4558]: I0120 16:58:15.290081 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c1e3af55-1788-4d23-b3c5-6227e848ea15-scripts\") pod \"c1e3af55-1788-4d23-b3c5-6227e848ea15\" (UID: \"c1e3af55-1788-4d23-b3c5-6227e848ea15\") " Jan 20 16:58:15 crc kubenswrapper[4558]: I0120 16:58:15.290223 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c1e3af55-1788-4d23-b3c5-6227e848ea15-sg-core-conf-yaml\") pod \"c1e3af55-1788-4d23-b3c5-6227e848ea15\" (UID: \"c1e3af55-1788-4d23-b3c5-6227e848ea15\") " Jan 20 16:58:15 crc kubenswrapper[4558]: I0120 16:58:15.290294 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ghzgm\" (UniqueName: \"kubernetes.io/projected/c1e3af55-1788-4d23-b3c5-6227e848ea15-kube-api-access-ghzgm\") pod \"c1e3af55-1788-4d23-b3c5-6227e848ea15\" (UID: \"c1e3af55-1788-4d23-b3c5-6227e848ea15\") " Jan 20 16:58:15 crc kubenswrapper[4558]: I0120 16:58:15.290315 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c1e3af55-1788-4d23-b3c5-6227e848ea15-config-data\") pod \"c1e3af55-1788-4d23-b3c5-6227e848ea15\" (UID: \"c1e3af55-1788-4d23-b3c5-6227e848ea15\") " Jan 20 16:58:15 crc kubenswrapper[4558]: I0120 16:58:15.290403 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c1e3af55-1788-4d23-b3c5-6227e848ea15-log-httpd\") pod \"c1e3af55-1788-4d23-b3c5-6227e848ea15\" (UID: \"c1e3af55-1788-4d23-b3c5-6227e848ea15\") " Jan 20 16:58:15 crc kubenswrapper[4558]: I0120 16:58:15.290429 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c1e3af55-1788-4d23-b3c5-6227e848ea15-combined-ca-bundle\") pod \"c1e3af55-1788-4d23-b3c5-6227e848ea15\" (UID: \"c1e3af55-1788-4d23-b3c5-6227e848ea15\") " Jan 20 16:58:15 crc kubenswrapper[4558]: I0120 16:58:15.290595 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c1e3af55-1788-4d23-b3c5-6227e848ea15-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "c1e3af55-1788-4d23-b3c5-6227e848ea15" (UID: "c1e3af55-1788-4d23-b3c5-6227e848ea15"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 16:58:15 crc kubenswrapper[4558]: I0120 16:58:15.290836 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c1e3af55-1788-4d23-b3c5-6227e848ea15-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "c1e3af55-1788-4d23-b3c5-6227e848ea15" (UID: "c1e3af55-1788-4d23-b3c5-6227e848ea15"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 16:58:15 crc kubenswrapper[4558]: I0120 16:58:15.291327 4558 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c1e3af55-1788-4d23-b3c5-6227e848ea15-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 16:58:15 crc kubenswrapper[4558]: I0120 16:58:15.291341 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d4a1ee65-2f6a-4db8-b56d-f0729c084060-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 16:58:15 crc kubenswrapper[4558]: I0120 16:58:15.291351 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d4a1ee65-2f6a-4db8-b56d-f0729c084060-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 16:58:15 crc kubenswrapper[4558]: I0120 16:58:15.291360 4558 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c1e3af55-1788-4d23-b3c5-6227e848ea15-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 16:58:15 crc kubenswrapper[4558]: I0120 16:58:15.291368 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zbhzm\" (UniqueName: \"kubernetes.io/projected/d4a1ee65-2f6a-4db8-b56d-f0729c084060-kube-api-access-zbhzm\") on node \"crc\" DevicePath \"\"" Jan 20 16:58:15 crc kubenswrapper[4558]: I0120 16:58:15.291376 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d4a1ee65-2f6a-4db8-b56d-f0729c084060-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 16:58:15 crc kubenswrapper[4558]: I0120 16:58:15.291384 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4a1ee65-2f6a-4db8-b56d-f0729c084060-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 16:58:15 crc kubenswrapper[4558]: I0120 16:58:15.293433 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c1e3af55-1788-4d23-b3c5-6227e848ea15-scripts" (OuterVolumeSpecName: "scripts") pod "c1e3af55-1788-4d23-b3c5-6227e848ea15" (UID: "c1e3af55-1788-4d23-b3c5-6227e848ea15"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:58:15 crc kubenswrapper[4558]: I0120 16:58:15.293821 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c1e3af55-1788-4d23-b3c5-6227e848ea15-kube-api-access-ghzgm" (OuterVolumeSpecName: "kube-api-access-ghzgm") pod "c1e3af55-1788-4d23-b3c5-6227e848ea15" (UID: "c1e3af55-1788-4d23-b3c5-6227e848ea15"). InnerVolumeSpecName "kube-api-access-ghzgm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:58:15 crc kubenswrapper[4558]: I0120 16:58:15.314302 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c1e3af55-1788-4d23-b3c5-6227e848ea15-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "c1e3af55-1788-4d23-b3c5-6227e848ea15" (UID: "c1e3af55-1788-4d23-b3c5-6227e848ea15"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:58:15 crc kubenswrapper[4558]: I0120 16:58:15.328437 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c1e3af55-1788-4d23-b3c5-6227e848ea15-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c1e3af55-1788-4d23-b3c5-6227e848ea15" (UID: "c1e3af55-1788-4d23-b3c5-6227e848ea15"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:58:15 crc kubenswrapper[4558]: I0120 16:58:15.346244 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c1e3af55-1788-4d23-b3c5-6227e848ea15-config-data" (OuterVolumeSpecName: "config-data") pod "c1e3af55-1788-4d23-b3c5-6227e848ea15" (UID: "c1e3af55-1788-4d23-b3c5-6227e848ea15"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:58:15 crc kubenswrapper[4558]: I0120 16:58:15.393594 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c1e3af55-1788-4d23-b3c5-6227e848ea15-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 16:58:15 crc kubenswrapper[4558]: I0120 16:58:15.393629 4558 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c1e3af55-1788-4d23-b3c5-6227e848ea15-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 20 16:58:15 crc kubenswrapper[4558]: I0120 16:58:15.393641 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ghzgm\" (UniqueName: \"kubernetes.io/projected/c1e3af55-1788-4d23-b3c5-6227e848ea15-kube-api-access-ghzgm\") on node \"crc\" DevicePath \"\"" Jan 20 16:58:15 crc kubenswrapper[4558]: I0120 16:58:15.393649 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c1e3af55-1788-4d23-b3c5-6227e848ea15-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 16:58:15 crc kubenswrapper[4558]: I0120 16:58:15.393658 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c1e3af55-1788-4d23-b3c5-6227e848ea15-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 16:58:15 crc kubenswrapper[4558]: I0120 16:58:15.798001 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/barbican-api-595757ff5d-frx87" Jan 20 16:58:15 crc kubenswrapper[4558]: I0120 16:58:15.802777 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/barbican-api-595757ff5d-frx87" Jan 20 16:58:15 crc kubenswrapper[4558]: I0120 16:58:15.862821 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-api-7bdfbf6ddd-thvjg"] Jan 20 16:58:15 crc kubenswrapper[4558]: I0120 16:58:15.863024 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-api-7bdfbf6ddd-thvjg" podUID="8be0815b-492d-4db8-bfd5-29d81bdb14d6" containerName="barbican-api-log" containerID="cri-o://caeca9047300cdd39b8552502d469030f74bd63d62befb9645f590eaf61fd3f1" gracePeriod=30 Jan 20 16:58:15 crc kubenswrapper[4558]: I0120 16:58:15.863142 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-api-7bdfbf6ddd-thvjg" podUID="8be0815b-492d-4db8-bfd5-29d81bdb14d6" containerName="barbican-api" containerID="cri-o://53d735f4f7282a16615b6f3b8dc32ad8c06f8b6e5d927b47a7f63325bbbf042e" gracePeriod=30 Jan 20 16:58:16 crc kubenswrapper[4558]: I0120 16:58:16.049715 4558 generic.go:334] "Generic (PLEG): container finished" podID="8be0815b-492d-4db8-bfd5-29d81bdb14d6" containerID="caeca9047300cdd39b8552502d469030f74bd63d62befb9645f590eaf61fd3f1" exitCode=143 Jan 20 16:58:16 crc kubenswrapper[4558]: I0120 16:58:16.049787 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-7bdfbf6ddd-thvjg" event={"ID":"8be0815b-492d-4db8-bfd5-29d81bdb14d6","Type":"ContainerDied","Data":"caeca9047300cdd39b8552502d469030f74bd63d62befb9645f590eaf61fd3f1"} Jan 20 16:58:16 crc kubenswrapper[4558]: I0120 16:58:16.052344 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:58:16 crc kubenswrapper[4558]: I0120 16:58:16.059909 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"c1e3af55-1788-4d23-b3c5-6227e848ea15","Type":"ContainerDied","Data":"4ed7d820d77ed20afacf7d07db65e1e25610895dee06160c7c233aa856b7a72c"} Jan 20 16:58:16 crc kubenswrapper[4558]: I0120 16:58:16.059948 4558 scope.go:117] "RemoveContainer" containerID="9d493e28fa6e5c99c3da647eeb0734af41d5c285c3247a20f83d595f2326552a" Jan 20 16:58:16 crc kubenswrapper[4558]: I0120 16:58:16.060052 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 16:58:16 crc kubenswrapper[4558]: I0120 16:58:16.092858 4558 scope.go:117] "RemoveContainer" containerID="cede7fedfff117f8c9861769debc9b41b0ace89fa517ecf7fb22b21f6cc45cca" Jan 20 16:58:16 crc kubenswrapper[4558]: I0120 16:58:16.107061 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 16:58:16 crc kubenswrapper[4558]: I0120 16:58:16.114275 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 16:58:16 crc kubenswrapper[4558]: I0120 16:58:16.115381 4558 scope.go:117] "RemoveContainer" containerID="5df8eb7fa973376010ffaf1986356d036d2880d6b817d28856f89bb5815b2133" Jan 20 16:58:16 crc kubenswrapper[4558]: I0120 16:58:16.130660 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 16:58:16 crc kubenswrapper[4558]: I0120 16:58:16.140519 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 16:58:16 crc kubenswrapper[4558]: I0120 16:58:16.147377 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 16:58:16 crc kubenswrapper[4558]: E0120 16:58:16.147921 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d4a1ee65-2f6a-4db8-b56d-f0729c084060" containerName="probe" Jan 20 16:58:16 crc kubenswrapper[4558]: I0120 16:58:16.147945 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d4a1ee65-2f6a-4db8-b56d-f0729c084060" containerName="probe" Jan 20 16:58:16 crc kubenswrapper[4558]: E0120 16:58:16.147969 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d4a1ee65-2f6a-4db8-b56d-f0729c084060" containerName="cinder-scheduler" Jan 20 16:58:16 crc kubenswrapper[4558]: I0120 16:58:16.147978 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d4a1ee65-2f6a-4db8-b56d-f0729c084060" containerName="cinder-scheduler" Jan 20 16:58:16 crc kubenswrapper[4558]: E0120 16:58:16.147988 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c1e3af55-1788-4d23-b3c5-6227e848ea15" containerName="proxy-httpd" Jan 20 16:58:16 crc kubenswrapper[4558]: I0120 16:58:16.147993 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="c1e3af55-1788-4d23-b3c5-6227e848ea15" containerName="proxy-httpd" Jan 20 16:58:16 crc kubenswrapper[4558]: E0120 16:58:16.148023 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c1e3af55-1788-4d23-b3c5-6227e848ea15" containerName="ceilometer-notification-agent" Jan 20 16:58:16 crc kubenswrapper[4558]: I0120 16:58:16.148030 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="c1e3af55-1788-4d23-b3c5-6227e848ea15" containerName="ceilometer-notification-agent" Jan 20 16:58:16 crc kubenswrapper[4558]: E0120 16:58:16.148044 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c1e3af55-1788-4d23-b3c5-6227e848ea15" containerName="sg-core" Jan 20 16:58:16 crc kubenswrapper[4558]: I0120 16:58:16.148052 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="c1e3af55-1788-4d23-b3c5-6227e848ea15" containerName="sg-core" Jan 20 16:58:16 crc kubenswrapper[4558]: I0120 16:58:16.148317 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="c1e3af55-1788-4d23-b3c5-6227e848ea15" containerName="proxy-httpd" Jan 20 16:58:16 crc kubenswrapper[4558]: I0120 16:58:16.148347 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="c1e3af55-1788-4d23-b3c5-6227e848ea15" containerName="ceilometer-notification-agent" Jan 20 16:58:16 crc kubenswrapper[4558]: I0120 16:58:16.148363 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="d4a1ee65-2f6a-4db8-b56d-f0729c084060" containerName="cinder-scheduler" Jan 20 16:58:16 crc kubenswrapper[4558]: I0120 16:58:16.148379 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="d4a1ee65-2f6a-4db8-b56d-f0729c084060" containerName="probe" Jan 20 16:58:16 crc kubenswrapper[4558]: I0120 16:58:16.148396 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="c1e3af55-1788-4d23-b3c5-6227e848ea15" containerName="sg-core" Jan 20 16:58:16 crc kubenswrapper[4558]: I0120 16:58:16.150143 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:58:16 crc kubenswrapper[4558]: I0120 16:58:16.152519 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-config-data" Jan 20 16:58:16 crc kubenswrapper[4558]: I0120 16:58:16.152719 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-scripts" Jan 20 16:58:16 crc kubenswrapper[4558]: I0120 16:58:16.153933 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 16:58:16 crc kubenswrapper[4558]: I0120 16:58:16.157089 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 16:58:16 crc kubenswrapper[4558]: I0120 16:58:16.162424 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cinder-scheduler-config-data" Jan 20 16:58:16 crc kubenswrapper[4558]: I0120 16:58:16.162794 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 16:58:16 crc kubenswrapper[4558]: I0120 16:58:16.172317 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 16:58:16 crc kubenswrapper[4558]: I0120 16:58:16.211929 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5l9bc\" (UniqueName: \"kubernetes.io/projected/b8911a4f-a706-4956-9028-138c018a92ba-kube-api-access-5l9bc\") pod \"cinder-scheduler-0\" (UID: \"b8911a4f-a706-4956-9028-138c018a92ba\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 16:58:16 crc kubenswrapper[4558]: I0120 16:58:16.212266 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8911a4f-a706-4956-9028-138c018a92ba-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"b8911a4f-a706-4956-9028-138c018a92ba\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 16:58:16 crc kubenswrapper[4558]: I0120 16:58:16.212379 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2ffb9b74-e15e-45db-b2d8-e6b63e1eb51d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"2ffb9b74-e15e-45db-b2d8-e6b63e1eb51d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:58:16 crc kubenswrapper[4558]: I0120 16:58:16.212478 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-frsxr\" (UniqueName: \"kubernetes.io/projected/2ffb9b74-e15e-45db-b2d8-e6b63e1eb51d-kube-api-access-frsxr\") pod \"ceilometer-0\" (UID: \"2ffb9b74-e15e-45db-b2d8-e6b63e1eb51d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:58:16 crc kubenswrapper[4558]: I0120 16:58:16.212615 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b8911a4f-a706-4956-9028-138c018a92ba-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"b8911a4f-a706-4956-9028-138c018a92ba\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 16:58:16 crc kubenswrapper[4558]: I0120 16:58:16.212689 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b8911a4f-a706-4956-9028-138c018a92ba-config-data\") pod \"cinder-scheduler-0\" (UID: \"b8911a4f-a706-4956-9028-138c018a92ba\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 16:58:16 crc kubenswrapper[4558]: I0120 16:58:16.212758 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/2ffb9b74-e15e-45db-b2d8-e6b63e1eb51d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"2ffb9b74-e15e-45db-b2d8-e6b63e1eb51d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:58:16 crc kubenswrapper[4558]: I0120 16:58:16.213627 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2ffb9b74-e15e-45db-b2d8-e6b63e1eb51d-run-httpd\") pod \"ceilometer-0\" (UID: \"2ffb9b74-e15e-45db-b2d8-e6b63e1eb51d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:58:16 crc kubenswrapper[4558]: I0120 16:58:16.213720 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b8911a4f-a706-4956-9028-138c018a92ba-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"b8911a4f-a706-4956-9028-138c018a92ba\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 16:58:16 crc kubenswrapper[4558]: I0120 16:58:16.213774 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b8911a4f-a706-4956-9028-138c018a92ba-scripts\") pod \"cinder-scheduler-0\" (UID: \"b8911a4f-a706-4956-9028-138c018a92ba\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 16:58:16 crc kubenswrapper[4558]: I0120 16:58:16.213860 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2ffb9b74-e15e-45db-b2d8-e6b63e1eb51d-config-data\") pod \"ceilometer-0\" (UID: \"2ffb9b74-e15e-45db-b2d8-e6b63e1eb51d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:58:16 crc kubenswrapper[4558]: I0120 16:58:16.213884 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2ffb9b74-e15e-45db-b2d8-e6b63e1eb51d-scripts\") pod \"ceilometer-0\" (UID: \"2ffb9b74-e15e-45db-b2d8-e6b63e1eb51d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:58:16 crc kubenswrapper[4558]: I0120 16:58:16.213926 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2ffb9b74-e15e-45db-b2d8-e6b63e1eb51d-log-httpd\") pod \"ceilometer-0\" (UID: \"2ffb9b74-e15e-45db-b2d8-e6b63e1eb51d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:58:16 crc kubenswrapper[4558]: I0120 16:58:16.316277 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2ffb9b74-e15e-45db-b2d8-e6b63e1eb51d-run-httpd\") pod \"ceilometer-0\" (UID: \"2ffb9b74-e15e-45db-b2d8-e6b63e1eb51d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:58:16 crc kubenswrapper[4558]: I0120 16:58:16.316346 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b8911a4f-a706-4956-9028-138c018a92ba-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"b8911a4f-a706-4956-9028-138c018a92ba\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 16:58:16 crc kubenswrapper[4558]: I0120 16:58:16.316383 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b8911a4f-a706-4956-9028-138c018a92ba-scripts\") pod \"cinder-scheduler-0\" (UID: \"b8911a4f-a706-4956-9028-138c018a92ba\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 16:58:16 crc kubenswrapper[4558]: I0120 16:58:16.316440 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2ffb9b74-e15e-45db-b2d8-e6b63e1eb51d-config-data\") pod \"ceilometer-0\" (UID: \"2ffb9b74-e15e-45db-b2d8-e6b63e1eb51d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:58:16 crc kubenswrapper[4558]: I0120 16:58:16.316460 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2ffb9b74-e15e-45db-b2d8-e6b63e1eb51d-scripts\") pod \"ceilometer-0\" (UID: \"2ffb9b74-e15e-45db-b2d8-e6b63e1eb51d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:58:16 crc kubenswrapper[4558]: I0120 16:58:16.316488 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2ffb9b74-e15e-45db-b2d8-e6b63e1eb51d-log-httpd\") pod \"ceilometer-0\" (UID: \"2ffb9b74-e15e-45db-b2d8-e6b63e1eb51d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:58:16 crc kubenswrapper[4558]: I0120 16:58:16.316508 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5l9bc\" (UniqueName: \"kubernetes.io/projected/b8911a4f-a706-4956-9028-138c018a92ba-kube-api-access-5l9bc\") pod \"cinder-scheduler-0\" (UID: \"b8911a4f-a706-4956-9028-138c018a92ba\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 16:58:16 crc kubenswrapper[4558]: I0120 16:58:16.316570 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8911a4f-a706-4956-9028-138c018a92ba-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"b8911a4f-a706-4956-9028-138c018a92ba\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 16:58:16 crc kubenswrapper[4558]: I0120 16:58:16.316593 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2ffb9b74-e15e-45db-b2d8-e6b63e1eb51d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"2ffb9b74-e15e-45db-b2d8-e6b63e1eb51d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:58:16 crc kubenswrapper[4558]: I0120 16:58:16.316623 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-frsxr\" (UniqueName: \"kubernetes.io/projected/2ffb9b74-e15e-45db-b2d8-e6b63e1eb51d-kube-api-access-frsxr\") pod \"ceilometer-0\" (UID: \"2ffb9b74-e15e-45db-b2d8-e6b63e1eb51d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:58:16 crc kubenswrapper[4558]: I0120 16:58:16.316677 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b8911a4f-a706-4956-9028-138c018a92ba-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"b8911a4f-a706-4956-9028-138c018a92ba\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 16:58:16 crc kubenswrapper[4558]: I0120 16:58:16.316692 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2ffb9b74-e15e-45db-b2d8-e6b63e1eb51d-run-httpd\") pod \"ceilometer-0\" (UID: \"2ffb9b74-e15e-45db-b2d8-e6b63e1eb51d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:58:16 crc kubenswrapper[4558]: I0120 16:58:16.316711 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b8911a4f-a706-4956-9028-138c018a92ba-config-data\") pod \"cinder-scheduler-0\" (UID: \"b8911a4f-a706-4956-9028-138c018a92ba\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 16:58:16 crc kubenswrapper[4558]: I0120 16:58:16.316799 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/2ffb9b74-e15e-45db-b2d8-e6b63e1eb51d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"2ffb9b74-e15e-45db-b2d8-e6b63e1eb51d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:58:16 crc kubenswrapper[4558]: I0120 16:58:16.316985 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2ffb9b74-e15e-45db-b2d8-e6b63e1eb51d-log-httpd\") pod \"ceilometer-0\" (UID: \"2ffb9b74-e15e-45db-b2d8-e6b63e1eb51d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:58:16 crc kubenswrapper[4558]: I0120 16:58:16.317225 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b8911a4f-a706-4956-9028-138c018a92ba-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"b8911a4f-a706-4956-9028-138c018a92ba\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 16:58:16 crc kubenswrapper[4558]: I0120 16:58:16.322017 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2ffb9b74-e15e-45db-b2d8-e6b63e1eb51d-scripts\") pod \"ceilometer-0\" (UID: \"2ffb9b74-e15e-45db-b2d8-e6b63e1eb51d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:58:16 crc kubenswrapper[4558]: I0120 16:58:16.322058 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/2ffb9b74-e15e-45db-b2d8-e6b63e1eb51d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"2ffb9b74-e15e-45db-b2d8-e6b63e1eb51d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:58:16 crc kubenswrapper[4558]: I0120 16:58:16.323648 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8911a4f-a706-4956-9028-138c018a92ba-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"b8911a4f-a706-4956-9028-138c018a92ba\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 16:58:16 crc kubenswrapper[4558]: I0120 16:58:16.330811 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b8911a4f-a706-4956-9028-138c018a92ba-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"b8911a4f-a706-4956-9028-138c018a92ba\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 16:58:16 crc kubenswrapper[4558]: I0120 16:58:16.331305 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b8911a4f-a706-4956-9028-138c018a92ba-scripts\") pod \"cinder-scheduler-0\" (UID: \"b8911a4f-a706-4956-9028-138c018a92ba\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 16:58:16 crc kubenswrapper[4558]: I0120 16:58:16.331613 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b8911a4f-a706-4956-9028-138c018a92ba-config-data\") pod \"cinder-scheduler-0\" (UID: \"b8911a4f-a706-4956-9028-138c018a92ba\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 16:58:16 crc kubenswrapper[4558]: I0120 16:58:16.333044 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2ffb9b74-e15e-45db-b2d8-e6b63e1eb51d-config-data\") pod \"ceilometer-0\" (UID: \"2ffb9b74-e15e-45db-b2d8-e6b63e1eb51d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:58:16 crc kubenswrapper[4558]: I0120 16:58:16.333498 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2ffb9b74-e15e-45db-b2d8-e6b63e1eb51d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"2ffb9b74-e15e-45db-b2d8-e6b63e1eb51d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:58:16 crc kubenswrapper[4558]: I0120 16:58:16.333629 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-frsxr\" (UniqueName: \"kubernetes.io/projected/2ffb9b74-e15e-45db-b2d8-e6b63e1eb51d-kube-api-access-frsxr\") pod \"ceilometer-0\" (UID: \"2ffb9b74-e15e-45db-b2d8-e6b63e1eb51d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:58:16 crc kubenswrapper[4558]: I0120 16:58:16.333904 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5l9bc\" (UniqueName: \"kubernetes.io/projected/b8911a4f-a706-4956-9028-138c018a92ba-kube-api-access-5l9bc\") pod \"cinder-scheduler-0\" (UID: \"b8911a4f-a706-4956-9028-138c018a92ba\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 16:58:16 crc kubenswrapper[4558]: I0120 16:58:16.469023 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:58:16 crc kubenswrapper[4558]: I0120 16:58:16.474523 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 16:58:16 crc kubenswrapper[4558]: I0120 16:58:16.581676 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c1e3af55-1788-4d23-b3c5-6227e848ea15" path="/var/lib/kubelet/pods/c1e3af55-1788-4d23-b3c5-6227e848ea15/volumes" Jan 20 16:58:16 crc kubenswrapper[4558]: I0120 16:58:16.582850 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d4a1ee65-2f6a-4db8-b56d-f0729c084060" path="/var/lib/kubelet/pods/d4a1ee65-2f6a-4db8-b56d-f0729c084060/volumes" Jan 20 16:58:16 crc kubenswrapper[4558]: I0120 16:58:16.906430 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 16:58:16 crc kubenswrapper[4558]: I0120 16:58:16.945210 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 16:58:16 crc kubenswrapper[4558]: W0120 16:58:16.949215 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb8911a4f_a706_4956_9028_138c018a92ba.slice/crio-bb97393e531ae7f10b20f798c00eb1b76a194278be5ba6288ed018bb78edba2b WatchSource:0}: Error finding container bb97393e531ae7f10b20f798c00eb1b76a194278be5ba6288ed018bb78edba2b: Status 404 returned error can't find the container with id bb97393e531ae7f10b20f798c00eb1b76a194278be5ba6288ed018bb78edba2b Jan 20 16:58:17 crc kubenswrapper[4558]: I0120 16:58:17.060007 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"2ffb9b74-e15e-45db-b2d8-e6b63e1eb51d","Type":"ContainerStarted","Data":"7f488c284eb5f7576e9286a6005da650571a0368095b057aa29ef1b7c718c579"} Jan 20 16:58:17 crc kubenswrapper[4558]: I0120 16:58:17.062229 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"b8911a4f-a706-4956-9028-138c018a92ba","Type":"ContainerStarted","Data":"bb97393e531ae7f10b20f798c00eb1b76a194278be5ba6288ed018bb78edba2b"} Jan 20 16:58:17 crc kubenswrapper[4558]: I0120 16:58:17.902939 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 16:58:18 crc kubenswrapper[4558]: I0120 16:58:18.074625 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"2ffb9b74-e15e-45db-b2d8-e6b63e1eb51d","Type":"ContainerStarted","Data":"b7ee658883bc3c2dedd442b787d58c71a76627afcfa42d35a4e3d2cbee602149"} Jan 20 16:58:18 crc kubenswrapper[4558]: I0120 16:58:18.076331 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"b8911a4f-a706-4956-9028-138c018a92ba","Type":"ContainerStarted","Data":"d3bb244923af1279a2fec8556cc18369058a01a9dc93f5e798291670f59b37c6"} Jan 20 16:58:18 crc kubenswrapper[4558]: I0120 16:58:18.076360 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"b8911a4f-a706-4956-9028-138c018a92ba","Type":"ContainerStarted","Data":"8c01eafed347e73a4f6bbd3d6df2573984ce045e60275b9c6bccf3c8df4b43d7"} Jan 20 16:58:18 crc kubenswrapper[4558]: I0120 16:58:18.099235 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/cinder-scheduler-0" podStartSLOduration=2.099220668 podStartE2EDuration="2.099220668s" podCreationTimestamp="2026-01-20 16:58:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 16:58:18.092582987 +0000 UTC m=+991.852920954" watchObservedRunningTime="2026-01-20 16:58:18.099220668 +0000 UTC m=+991.859558636" Jan 20 16:58:18 crc kubenswrapper[4558]: I0120 16:58:18.992038 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/barbican-api-7bdfbf6ddd-thvjg" podUID="8be0815b-492d-4db8-bfd5-29d81bdb14d6" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.145:9311/healthcheck\": read tcp 10.217.0.2:55036->10.217.0.145:9311: read: connection reset by peer" Jan 20 16:58:18 crc kubenswrapper[4558]: I0120 16:58:18.992068 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/barbican-api-7bdfbf6ddd-thvjg" podUID="8be0815b-492d-4db8-bfd5-29d81bdb14d6" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.145:9311/healthcheck\": read tcp 10.217.0.2:55038->10.217.0.145:9311: read: connection reset by peer" Jan 20 16:58:19 crc kubenswrapper[4558]: I0120 16:58:19.089615 4558 generic.go:334] "Generic (PLEG): container finished" podID="8be0815b-492d-4db8-bfd5-29d81bdb14d6" containerID="53d735f4f7282a16615b6f3b8dc32ad8c06f8b6e5d927b47a7f63325bbbf042e" exitCode=0 Jan 20 16:58:19 crc kubenswrapper[4558]: I0120 16:58:19.089693 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-7bdfbf6ddd-thvjg" event={"ID":"8be0815b-492d-4db8-bfd5-29d81bdb14d6","Type":"ContainerDied","Data":"53d735f4f7282a16615b6f3b8dc32ad8c06f8b6e5d927b47a7f63325bbbf042e"} Jan 20 16:58:19 crc kubenswrapper[4558]: I0120 16:58:19.350667 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-7bdfbf6ddd-thvjg" Jan 20 16:58:19 crc kubenswrapper[4558]: I0120 16:58:19.371561 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4db2c\" (UniqueName: \"kubernetes.io/projected/8be0815b-492d-4db8-bfd5-29d81bdb14d6-kube-api-access-4db2c\") pod \"8be0815b-492d-4db8-bfd5-29d81bdb14d6\" (UID: \"8be0815b-492d-4db8-bfd5-29d81bdb14d6\") " Jan 20 16:58:19 crc kubenswrapper[4558]: I0120 16:58:19.371641 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8be0815b-492d-4db8-bfd5-29d81bdb14d6-logs\") pod \"8be0815b-492d-4db8-bfd5-29d81bdb14d6\" (UID: \"8be0815b-492d-4db8-bfd5-29d81bdb14d6\") " Jan 20 16:58:19 crc kubenswrapper[4558]: I0120 16:58:19.371663 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8be0815b-492d-4db8-bfd5-29d81bdb14d6-config-data\") pod \"8be0815b-492d-4db8-bfd5-29d81bdb14d6\" (UID: \"8be0815b-492d-4db8-bfd5-29d81bdb14d6\") " Jan 20 16:58:19 crc kubenswrapper[4558]: I0120 16:58:19.371695 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8be0815b-492d-4db8-bfd5-29d81bdb14d6-config-data-custom\") pod \"8be0815b-492d-4db8-bfd5-29d81bdb14d6\" (UID: \"8be0815b-492d-4db8-bfd5-29d81bdb14d6\") " Jan 20 16:58:19 crc kubenswrapper[4558]: I0120 16:58:19.371720 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8be0815b-492d-4db8-bfd5-29d81bdb14d6-combined-ca-bundle\") pod \"8be0815b-492d-4db8-bfd5-29d81bdb14d6\" (UID: \"8be0815b-492d-4db8-bfd5-29d81bdb14d6\") " Jan 20 16:58:19 crc kubenswrapper[4558]: I0120 16:58:19.372488 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8be0815b-492d-4db8-bfd5-29d81bdb14d6-logs" (OuterVolumeSpecName: "logs") pod "8be0815b-492d-4db8-bfd5-29d81bdb14d6" (UID: "8be0815b-492d-4db8-bfd5-29d81bdb14d6"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 16:58:19 crc kubenswrapper[4558]: I0120 16:58:19.376318 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8be0815b-492d-4db8-bfd5-29d81bdb14d6-kube-api-access-4db2c" (OuterVolumeSpecName: "kube-api-access-4db2c") pod "8be0815b-492d-4db8-bfd5-29d81bdb14d6" (UID: "8be0815b-492d-4db8-bfd5-29d81bdb14d6"). InnerVolumeSpecName "kube-api-access-4db2c". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:58:19 crc kubenswrapper[4558]: I0120 16:58:19.378300 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8be0815b-492d-4db8-bfd5-29d81bdb14d6-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "8be0815b-492d-4db8-bfd5-29d81bdb14d6" (UID: "8be0815b-492d-4db8-bfd5-29d81bdb14d6"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:58:19 crc kubenswrapper[4558]: I0120 16:58:19.396195 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8be0815b-492d-4db8-bfd5-29d81bdb14d6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8be0815b-492d-4db8-bfd5-29d81bdb14d6" (UID: "8be0815b-492d-4db8-bfd5-29d81bdb14d6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:58:19 crc kubenswrapper[4558]: I0120 16:58:19.419022 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8be0815b-492d-4db8-bfd5-29d81bdb14d6-config-data" (OuterVolumeSpecName: "config-data") pod "8be0815b-492d-4db8-bfd5-29d81bdb14d6" (UID: "8be0815b-492d-4db8-bfd5-29d81bdb14d6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:58:19 crc kubenswrapper[4558]: I0120 16:58:19.473770 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4db2c\" (UniqueName: \"kubernetes.io/projected/8be0815b-492d-4db8-bfd5-29d81bdb14d6-kube-api-access-4db2c\") on node \"crc\" DevicePath \"\"" Jan 20 16:58:19 crc kubenswrapper[4558]: I0120 16:58:19.473800 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8be0815b-492d-4db8-bfd5-29d81bdb14d6-logs\") on node \"crc\" DevicePath \"\"" Jan 20 16:58:19 crc kubenswrapper[4558]: I0120 16:58:19.473810 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8be0815b-492d-4db8-bfd5-29d81bdb14d6-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 16:58:19 crc kubenswrapper[4558]: I0120 16:58:19.473819 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8be0815b-492d-4db8-bfd5-29d81bdb14d6-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 16:58:19 crc kubenswrapper[4558]: I0120 16:58:19.473827 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8be0815b-492d-4db8-bfd5-29d81bdb14d6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 16:58:20 crc kubenswrapper[4558]: I0120 16:58:20.098454 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"2ffb9b74-e15e-45db-b2d8-e6b63e1eb51d","Type":"ContainerStarted","Data":"852fdef698848c41f9bb05c5c2bd39cc27719921891f87d42bb115977032714c"} Jan 20 16:58:20 crc kubenswrapper[4558]: I0120 16:58:20.100299 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-7bdfbf6ddd-thvjg" event={"ID":"8be0815b-492d-4db8-bfd5-29d81bdb14d6","Type":"ContainerDied","Data":"26e2be8f6e22cc33201906e6911dbcc942470a8f9146c2c7520e62cd1c90123f"} Jan 20 16:58:20 crc kubenswrapper[4558]: I0120 16:58:20.100354 4558 scope.go:117] "RemoveContainer" containerID="53d735f4f7282a16615b6f3b8dc32ad8c06f8b6e5d927b47a7f63325bbbf042e" Jan 20 16:58:20 crc kubenswrapper[4558]: I0120 16:58:20.100354 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-7bdfbf6ddd-thvjg" Jan 20 16:58:20 crc kubenswrapper[4558]: I0120 16:58:20.123678 4558 scope.go:117] "RemoveContainer" containerID="caeca9047300cdd39b8552502d469030f74bd63d62befb9645f590eaf61fd3f1" Jan 20 16:58:20 crc kubenswrapper[4558]: I0120 16:58:20.125904 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-api-7bdfbf6ddd-thvjg"] Jan 20 16:58:20 crc kubenswrapper[4558]: I0120 16:58:20.132116 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/barbican-api-7bdfbf6ddd-thvjg"] Jan 20 16:58:20 crc kubenswrapper[4558]: I0120 16:58:20.574787 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8be0815b-492d-4db8-bfd5-29d81bdb14d6" path="/var/lib/kubelet/pods/8be0815b-492d-4db8-bfd5-29d81bdb14d6/volumes" Jan 20 16:58:21 crc kubenswrapper[4558]: I0120 16:58:21.110844 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"2ffb9b74-e15e-45db-b2d8-e6b63e1eb51d","Type":"ContainerStarted","Data":"14757df3405b5b02eb99dcdfcfb12d5a83dff3bcc11e6b0ba0b6a92ccc27dcd4"} Jan 20 16:58:21 crc kubenswrapper[4558]: I0120 16:58:21.475532 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 16:58:23 crc kubenswrapper[4558]: I0120 16:58:23.126815 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"2ffb9b74-e15e-45db-b2d8-e6b63e1eb51d","Type":"ContainerStarted","Data":"e1e5629ee3608ca2964283471ea9053abf2262180449bdda5f5f8acd3141ba80"} Jan 20 16:58:23 crc kubenswrapper[4558]: I0120 16:58:23.127142 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:58:23 crc kubenswrapper[4558]: I0120 16:58:23.150673 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ceilometer-0" podStartSLOduration=1.973282362 podStartE2EDuration="7.150655928s" podCreationTimestamp="2026-01-20 16:58:16 +0000 UTC" firstStartedPulling="2026-01-20 16:58:16.911902958 +0000 UTC m=+990.672240924" lastFinishedPulling="2026-01-20 16:58:22.089276522 +0000 UTC m=+995.849614490" observedRunningTime="2026-01-20 16:58:23.146709266 +0000 UTC m=+996.907047223" watchObservedRunningTime="2026-01-20 16:58:23.150655928 +0000 UTC m=+996.910993884" Jan 20 16:58:23 crc kubenswrapper[4558]: I0120 16:58:23.319722 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/neutron-577dd7499c-hrw6z" Jan 20 16:58:25 crc kubenswrapper[4558]: I0120 16:58:25.643369 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/neutron-5d557df858-frznf" Jan 20 16:58:25 crc kubenswrapper[4558]: I0120 16:58:25.685798 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-577dd7499c-hrw6z"] Jan 20 16:58:25 crc kubenswrapper[4558]: I0120 16:58:25.685970 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/neutron-577dd7499c-hrw6z" podUID="8949f5f0-c260-4456-8d11-e91486237ef6" containerName="neutron-api" containerID="cri-o://603431ee4995dde1cbb89b63f722d065d375af2a2da9a14c43c53c29740e2965" gracePeriod=30 Jan 20 16:58:25 crc kubenswrapper[4558]: I0120 16:58:25.686022 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/neutron-577dd7499c-hrw6z" podUID="8949f5f0-c260-4456-8d11-e91486237ef6" containerName="neutron-httpd" containerID="cri-o://e0389f084c9272c173ccedb6457ef9b3c7a5face334cb3ee4b35a877d894c33b" gracePeriod=30 Jan 20 16:58:26 crc kubenswrapper[4558]: I0120 16:58:26.147925 4558 generic.go:334] "Generic (PLEG): container finished" podID="8949f5f0-c260-4456-8d11-e91486237ef6" containerID="e0389f084c9272c173ccedb6457ef9b3c7a5face334cb3ee4b35a877d894c33b" exitCode=0 Jan 20 16:58:26 crc kubenswrapper[4558]: I0120 16:58:26.147964 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-577dd7499c-hrw6z" event={"ID":"8949f5f0-c260-4456-8d11-e91486237ef6","Type":"ContainerDied","Data":"e0389f084c9272c173ccedb6457ef9b3c7a5face334cb3ee4b35a877d894c33b"} Jan 20 16:58:26 crc kubenswrapper[4558]: I0120 16:58:26.659519 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 16:58:28 crc kubenswrapper[4558]: I0120 16:58:28.534521 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/placement-5d77597494-bkh4z" Jan 20 16:58:28 crc kubenswrapper[4558]: I0120 16:58:28.542738 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/placement-5d77597494-bkh4z" Jan 20 16:58:29 crc kubenswrapper[4558]: I0120 16:58:29.119821 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-577dd7499c-hrw6z" Jan 20 16:58:29 crc kubenswrapper[4558]: I0120 16:58:29.177145 4558 generic.go:334] "Generic (PLEG): container finished" podID="8949f5f0-c260-4456-8d11-e91486237ef6" containerID="603431ee4995dde1cbb89b63f722d065d375af2a2da9a14c43c53c29740e2965" exitCode=0 Jan 20 16:58:29 crc kubenswrapper[4558]: I0120 16:58:29.177215 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-577dd7499c-hrw6z" event={"ID":"8949f5f0-c260-4456-8d11-e91486237ef6","Type":"ContainerDied","Data":"603431ee4995dde1cbb89b63f722d065d375af2a2da9a14c43c53c29740e2965"} Jan 20 16:58:29 crc kubenswrapper[4558]: I0120 16:58:29.177248 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-577dd7499c-hrw6z" event={"ID":"8949f5f0-c260-4456-8d11-e91486237ef6","Type":"ContainerDied","Data":"3caa8749d23ebc62708f17bcc49d95ca30c24ee797d59ead6771b72c5f31f50f"} Jan 20 16:58:29 crc kubenswrapper[4558]: I0120 16:58:29.177265 4558 scope.go:117] "RemoveContainer" containerID="e0389f084c9272c173ccedb6457ef9b3c7a5face334cb3ee4b35a877d894c33b" Jan 20 16:58:29 crc kubenswrapper[4558]: I0120 16:58:29.177276 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-577dd7499c-hrw6z" Jan 20 16:58:29 crc kubenswrapper[4558]: I0120 16:58:29.192176 4558 scope.go:117] "RemoveContainer" containerID="603431ee4995dde1cbb89b63f722d065d375af2a2da9a14c43c53c29740e2965" Jan 20 16:58:29 crc kubenswrapper[4558]: I0120 16:58:29.207537 4558 scope.go:117] "RemoveContainer" containerID="e0389f084c9272c173ccedb6457ef9b3c7a5face334cb3ee4b35a877d894c33b" Jan 20 16:58:29 crc kubenswrapper[4558]: E0120 16:58:29.207811 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e0389f084c9272c173ccedb6457ef9b3c7a5face334cb3ee4b35a877d894c33b\": container with ID starting with e0389f084c9272c173ccedb6457ef9b3c7a5face334cb3ee4b35a877d894c33b not found: ID does not exist" containerID="e0389f084c9272c173ccedb6457ef9b3c7a5face334cb3ee4b35a877d894c33b" Jan 20 16:58:29 crc kubenswrapper[4558]: I0120 16:58:29.207840 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e0389f084c9272c173ccedb6457ef9b3c7a5face334cb3ee4b35a877d894c33b"} err="failed to get container status \"e0389f084c9272c173ccedb6457ef9b3c7a5face334cb3ee4b35a877d894c33b\": rpc error: code = NotFound desc = could not find container \"e0389f084c9272c173ccedb6457ef9b3c7a5face334cb3ee4b35a877d894c33b\": container with ID starting with e0389f084c9272c173ccedb6457ef9b3c7a5face334cb3ee4b35a877d894c33b not found: ID does not exist" Jan 20 16:58:29 crc kubenswrapper[4558]: I0120 16:58:29.207858 4558 scope.go:117] "RemoveContainer" containerID="603431ee4995dde1cbb89b63f722d065d375af2a2da9a14c43c53c29740e2965" Jan 20 16:58:29 crc kubenswrapper[4558]: E0120 16:58:29.208127 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"603431ee4995dde1cbb89b63f722d065d375af2a2da9a14c43c53c29740e2965\": container with ID starting with 603431ee4995dde1cbb89b63f722d065d375af2a2da9a14c43c53c29740e2965 not found: ID does not exist" containerID="603431ee4995dde1cbb89b63f722d065d375af2a2da9a14c43c53c29740e2965" Jan 20 16:58:29 crc kubenswrapper[4558]: I0120 16:58:29.208219 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"603431ee4995dde1cbb89b63f722d065d375af2a2da9a14c43c53c29740e2965"} err="failed to get container status \"603431ee4995dde1cbb89b63f722d065d375af2a2da9a14c43c53c29740e2965\": rpc error: code = NotFound desc = could not find container \"603431ee4995dde1cbb89b63f722d065d375af2a2da9a14c43c53c29740e2965\": container with ID starting with 603431ee4995dde1cbb89b63f722d065d375af2a2da9a14c43c53c29740e2965 not found: ID does not exist" Jan 20 16:58:29 crc kubenswrapper[4558]: I0120 16:58:29.221627 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/8949f5f0-c260-4456-8d11-e91486237ef6-config\") pod \"8949f5f0-c260-4456-8d11-e91486237ef6\" (UID: \"8949f5f0-c260-4456-8d11-e91486237ef6\") " Jan 20 16:58:29 crc kubenswrapper[4558]: I0120 16:58:29.221773 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2zxdx\" (UniqueName: \"kubernetes.io/projected/8949f5f0-c260-4456-8d11-e91486237ef6-kube-api-access-2zxdx\") pod \"8949f5f0-c260-4456-8d11-e91486237ef6\" (UID: \"8949f5f0-c260-4456-8d11-e91486237ef6\") " Jan 20 16:58:29 crc kubenswrapper[4558]: I0120 16:58:29.221794 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/8949f5f0-c260-4456-8d11-e91486237ef6-ovndb-tls-certs\") pod \"8949f5f0-c260-4456-8d11-e91486237ef6\" (UID: \"8949f5f0-c260-4456-8d11-e91486237ef6\") " Jan 20 16:58:29 crc kubenswrapper[4558]: I0120 16:58:29.221814 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/8949f5f0-c260-4456-8d11-e91486237ef6-httpd-config\") pod \"8949f5f0-c260-4456-8d11-e91486237ef6\" (UID: \"8949f5f0-c260-4456-8d11-e91486237ef6\") " Jan 20 16:58:29 crc kubenswrapper[4558]: I0120 16:58:29.221851 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8949f5f0-c260-4456-8d11-e91486237ef6-combined-ca-bundle\") pod \"8949f5f0-c260-4456-8d11-e91486237ef6\" (UID: \"8949f5f0-c260-4456-8d11-e91486237ef6\") " Jan 20 16:58:29 crc kubenswrapper[4558]: I0120 16:58:29.226805 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8949f5f0-c260-4456-8d11-e91486237ef6-kube-api-access-2zxdx" (OuterVolumeSpecName: "kube-api-access-2zxdx") pod "8949f5f0-c260-4456-8d11-e91486237ef6" (UID: "8949f5f0-c260-4456-8d11-e91486237ef6"). InnerVolumeSpecName "kube-api-access-2zxdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:58:29 crc kubenswrapper[4558]: I0120 16:58:29.230203 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8949f5f0-c260-4456-8d11-e91486237ef6-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "8949f5f0-c260-4456-8d11-e91486237ef6" (UID: "8949f5f0-c260-4456-8d11-e91486237ef6"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:58:29 crc kubenswrapper[4558]: I0120 16:58:29.255032 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8949f5f0-c260-4456-8d11-e91486237ef6-config" (OuterVolumeSpecName: "config") pod "8949f5f0-c260-4456-8d11-e91486237ef6" (UID: "8949f5f0-c260-4456-8d11-e91486237ef6"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:58:29 crc kubenswrapper[4558]: I0120 16:58:29.256936 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8949f5f0-c260-4456-8d11-e91486237ef6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8949f5f0-c260-4456-8d11-e91486237ef6" (UID: "8949f5f0-c260-4456-8d11-e91486237ef6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:58:29 crc kubenswrapper[4558]: I0120 16:58:29.276513 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8949f5f0-c260-4456-8d11-e91486237ef6-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "8949f5f0-c260-4456-8d11-e91486237ef6" (UID: "8949f5f0-c260-4456-8d11-e91486237ef6"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:58:29 crc kubenswrapper[4558]: I0120 16:58:29.323852 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/8949f5f0-c260-4456-8d11-e91486237ef6-config\") on node \"crc\" DevicePath \"\"" Jan 20 16:58:29 crc kubenswrapper[4558]: I0120 16:58:29.323877 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2zxdx\" (UniqueName: \"kubernetes.io/projected/8949f5f0-c260-4456-8d11-e91486237ef6-kube-api-access-2zxdx\") on node \"crc\" DevicePath \"\"" Jan 20 16:58:29 crc kubenswrapper[4558]: I0120 16:58:29.323888 4558 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/8949f5f0-c260-4456-8d11-e91486237ef6-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 16:58:29 crc kubenswrapper[4558]: I0120 16:58:29.323899 4558 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/8949f5f0-c260-4456-8d11-e91486237ef6-httpd-config\") on node \"crc\" DevicePath \"\"" Jan 20 16:58:29 crc kubenswrapper[4558]: I0120 16:58:29.323907 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8949f5f0-c260-4456-8d11-e91486237ef6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 16:58:29 crc kubenswrapper[4558]: I0120 16:58:29.502114 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-577dd7499c-hrw6z"] Jan 20 16:58:29 crc kubenswrapper[4558]: I0120 16:58:29.507822 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/neutron-577dd7499c-hrw6z"] Jan 20 16:58:30 crc kubenswrapper[4558]: I0120 16:58:30.495235 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/keystone-6494546496-58v7b" Jan 20 16:58:30 crc kubenswrapper[4558]: I0120 16:58:30.572706 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8949f5f0-c260-4456-8d11-e91486237ef6" path="/var/lib/kubelet/pods/8949f5f0-c260-4456-8d11-e91486237ef6/volumes" Jan 20 16:58:31 crc kubenswrapper[4558]: I0120 16:58:31.228717 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/openstackclient"] Jan 20 16:58:31 crc kubenswrapper[4558]: E0120 16:58:31.229273 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8949f5f0-c260-4456-8d11-e91486237ef6" containerName="neutron-httpd" Jan 20 16:58:31 crc kubenswrapper[4558]: I0120 16:58:31.229292 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="8949f5f0-c260-4456-8d11-e91486237ef6" containerName="neutron-httpd" Jan 20 16:58:31 crc kubenswrapper[4558]: E0120 16:58:31.229317 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8949f5f0-c260-4456-8d11-e91486237ef6" containerName="neutron-api" Jan 20 16:58:31 crc kubenswrapper[4558]: I0120 16:58:31.229323 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="8949f5f0-c260-4456-8d11-e91486237ef6" containerName="neutron-api" Jan 20 16:58:31 crc kubenswrapper[4558]: E0120 16:58:31.229332 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8be0815b-492d-4db8-bfd5-29d81bdb14d6" containerName="barbican-api" Jan 20 16:58:31 crc kubenswrapper[4558]: I0120 16:58:31.229338 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="8be0815b-492d-4db8-bfd5-29d81bdb14d6" containerName="barbican-api" Jan 20 16:58:31 crc kubenswrapper[4558]: E0120 16:58:31.229347 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8be0815b-492d-4db8-bfd5-29d81bdb14d6" containerName="barbican-api-log" Jan 20 16:58:31 crc kubenswrapper[4558]: I0120 16:58:31.229352 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="8be0815b-492d-4db8-bfd5-29d81bdb14d6" containerName="barbican-api-log" Jan 20 16:58:31 crc kubenswrapper[4558]: I0120 16:58:31.229520 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="8be0815b-492d-4db8-bfd5-29d81bdb14d6" containerName="barbican-api" Jan 20 16:58:31 crc kubenswrapper[4558]: I0120 16:58:31.229542 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="8949f5f0-c260-4456-8d11-e91486237ef6" containerName="neutron-api" Jan 20 16:58:31 crc kubenswrapper[4558]: I0120 16:58:31.229560 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="8be0815b-492d-4db8-bfd5-29d81bdb14d6" containerName="barbican-api-log" Jan 20 16:58:31 crc kubenswrapper[4558]: I0120 16:58:31.229570 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="8949f5f0-c260-4456-8d11-e91486237ef6" containerName="neutron-httpd" Jan 20 16:58:31 crc kubenswrapper[4558]: I0120 16:58:31.230022 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstackclient" Jan 20 16:58:31 crc kubenswrapper[4558]: I0120 16:58:31.231617 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"openstack-config-secret" Jan 20 16:58:31 crc kubenswrapper[4558]: I0120 16:58:31.231637 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"openstackclient-openstackclient-dockercfg-nvprf" Jan 20 16:58:31 crc kubenswrapper[4558]: I0120 16:58:31.232043 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-config" Jan 20 16:58:31 crc kubenswrapper[4558]: I0120 16:58:31.236483 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/openstackclient"] Jan 20 16:58:31 crc kubenswrapper[4558]: I0120 16:58:31.270235 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/4fe22ec2-af71-47bf-8053-86dee012df91-openstack-config-secret\") pod \"openstackclient\" (UID: \"4fe22ec2-af71-47bf-8053-86dee012df91\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 16:58:31 crc kubenswrapper[4558]: I0120 16:58:31.270275 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/4fe22ec2-af71-47bf-8053-86dee012df91-openstack-config\") pod \"openstackclient\" (UID: \"4fe22ec2-af71-47bf-8053-86dee012df91\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 16:58:31 crc kubenswrapper[4558]: I0120 16:58:31.270463 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-82mbz\" (UniqueName: \"kubernetes.io/projected/4fe22ec2-af71-47bf-8053-86dee012df91-kube-api-access-82mbz\") pod \"openstackclient\" (UID: \"4fe22ec2-af71-47bf-8053-86dee012df91\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 16:58:31 crc kubenswrapper[4558]: I0120 16:58:31.270497 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4fe22ec2-af71-47bf-8053-86dee012df91-combined-ca-bundle\") pod \"openstackclient\" (UID: \"4fe22ec2-af71-47bf-8053-86dee012df91\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 16:58:31 crc kubenswrapper[4558]: I0120 16:58:31.372255 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-82mbz\" (UniqueName: \"kubernetes.io/projected/4fe22ec2-af71-47bf-8053-86dee012df91-kube-api-access-82mbz\") pod \"openstackclient\" (UID: \"4fe22ec2-af71-47bf-8053-86dee012df91\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 16:58:31 crc kubenswrapper[4558]: I0120 16:58:31.372293 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4fe22ec2-af71-47bf-8053-86dee012df91-combined-ca-bundle\") pod \"openstackclient\" (UID: \"4fe22ec2-af71-47bf-8053-86dee012df91\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 16:58:31 crc kubenswrapper[4558]: I0120 16:58:31.372390 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/4fe22ec2-af71-47bf-8053-86dee012df91-openstack-config-secret\") pod \"openstackclient\" (UID: \"4fe22ec2-af71-47bf-8053-86dee012df91\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 16:58:31 crc kubenswrapper[4558]: I0120 16:58:31.372418 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/4fe22ec2-af71-47bf-8053-86dee012df91-openstack-config\") pod \"openstackclient\" (UID: \"4fe22ec2-af71-47bf-8053-86dee012df91\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 16:58:31 crc kubenswrapper[4558]: I0120 16:58:31.373299 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/4fe22ec2-af71-47bf-8053-86dee012df91-openstack-config\") pod \"openstackclient\" (UID: \"4fe22ec2-af71-47bf-8053-86dee012df91\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 16:58:31 crc kubenswrapper[4558]: I0120 16:58:31.377863 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/4fe22ec2-af71-47bf-8053-86dee012df91-openstack-config-secret\") pod \"openstackclient\" (UID: \"4fe22ec2-af71-47bf-8053-86dee012df91\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 16:58:31 crc kubenswrapper[4558]: I0120 16:58:31.377894 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4fe22ec2-af71-47bf-8053-86dee012df91-combined-ca-bundle\") pod \"openstackclient\" (UID: \"4fe22ec2-af71-47bf-8053-86dee012df91\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 16:58:31 crc kubenswrapper[4558]: I0120 16:58:31.385090 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-82mbz\" (UniqueName: \"kubernetes.io/projected/4fe22ec2-af71-47bf-8053-86dee012df91-kube-api-access-82mbz\") pod \"openstackclient\" (UID: \"4fe22ec2-af71-47bf-8053-86dee012df91\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 16:58:31 crc kubenswrapper[4558]: I0120 16:58:31.542958 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstackclient" Jan 20 16:58:31 crc kubenswrapper[4558]: I0120 16:58:31.901257 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/openstackclient"] Jan 20 16:58:31 crc kubenswrapper[4558]: W0120 16:58:31.904677 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4fe22ec2_af71_47bf_8053_86dee012df91.slice/crio-7180deedfdf7f7927582a43759bd57eb758186c62fd21cee4d186f1edf303aff WatchSource:0}: Error finding container 7180deedfdf7f7927582a43759bd57eb758186c62fd21cee4d186f1edf303aff: Status 404 returned error can't find the container with id 7180deedfdf7f7927582a43759bd57eb758186c62fd21cee4d186f1edf303aff Jan 20 16:58:32 crc kubenswrapper[4558]: I0120 16:58:32.196532 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstackclient" event={"ID":"4fe22ec2-af71-47bf-8053-86dee012df91","Type":"ContainerStarted","Data":"7180deedfdf7f7927582a43759bd57eb758186c62fd21cee4d186f1edf303aff"} Jan 20 16:58:34 crc kubenswrapper[4558]: I0120 16:58:34.129859 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/swift-proxy-6b7b5d66dd-8plzj"] Jan 20 16:58:34 crc kubenswrapper[4558]: I0120 16:58:34.131681 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-proxy-6b7b5d66dd-8plzj" Jan 20 16:58:34 crc kubenswrapper[4558]: I0120 16:58:34.133804 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-swift-internal-svc" Jan 20 16:58:34 crc kubenswrapper[4558]: I0120 16:58:34.134115 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-swift-public-svc" Jan 20 16:58:34 crc kubenswrapper[4558]: I0120 16:58:34.134296 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"swift-proxy-config-data" Jan 20 16:58:34 crc kubenswrapper[4558]: I0120 16:58:34.138056 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/swift-proxy-6b7b5d66dd-8plzj"] Jan 20 16:58:34 crc kubenswrapper[4558]: I0120 16:58:34.228634 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/1d5c2e68-fb9d-4818-a0c2-27db2516e92b-etc-swift\") pod \"swift-proxy-6b7b5d66dd-8plzj\" (UID: \"1d5c2e68-fb9d-4818-a0c2-27db2516e92b\") " pod="openstack-kuttl-tests/swift-proxy-6b7b5d66dd-8plzj" Jan 20 16:58:34 crc kubenswrapper[4558]: I0120 16:58:34.228728 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1d5c2e68-fb9d-4818-a0c2-27db2516e92b-public-tls-certs\") pod \"swift-proxy-6b7b5d66dd-8plzj\" (UID: \"1d5c2e68-fb9d-4818-a0c2-27db2516e92b\") " pod="openstack-kuttl-tests/swift-proxy-6b7b5d66dd-8plzj" Jan 20 16:58:34 crc kubenswrapper[4558]: I0120 16:58:34.228751 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1d5c2e68-fb9d-4818-a0c2-27db2516e92b-log-httpd\") pod \"swift-proxy-6b7b5d66dd-8plzj\" (UID: \"1d5c2e68-fb9d-4818-a0c2-27db2516e92b\") " pod="openstack-kuttl-tests/swift-proxy-6b7b5d66dd-8plzj" Jan 20 16:58:34 crc kubenswrapper[4558]: I0120 16:58:34.228793 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1d5c2e68-fb9d-4818-a0c2-27db2516e92b-internal-tls-certs\") pod \"swift-proxy-6b7b5d66dd-8plzj\" (UID: \"1d5c2e68-fb9d-4818-a0c2-27db2516e92b\") " pod="openstack-kuttl-tests/swift-proxy-6b7b5d66dd-8plzj" Jan 20 16:58:34 crc kubenswrapper[4558]: I0120 16:58:34.228975 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hw7sd\" (UniqueName: \"kubernetes.io/projected/1d5c2e68-fb9d-4818-a0c2-27db2516e92b-kube-api-access-hw7sd\") pod \"swift-proxy-6b7b5d66dd-8plzj\" (UID: \"1d5c2e68-fb9d-4818-a0c2-27db2516e92b\") " pod="openstack-kuttl-tests/swift-proxy-6b7b5d66dd-8plzj" Jan 20 16:58:34 crc kubenswrapper[4558]: I0120 16:58:34.229089 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1d5c2e68-fb9d-4818-a0c2-27db2516e92b-config-data\") pod \"swift-proxy-6b7b5d66dd-8plzj\" (UID: \"1d5c2e68-fb9d-4818-a0c2-27db2516e92b\") " pod="openstack-kuttl-tests/swift-proxy-6b7b5d66dd-8plzj" Jan 20 16:58:34 crc kubenswrapper[4558]: I0120 16:58:34.229179 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1d5c2e68-fb9d-4818-a0c2-27db2516e92b-run-httpd\") pod \"swift-proxy-6b7b5d66dd-8plzj\" (UID: \"1d5c2e68-fb9d-4818-a0c2-27db2516e92b\") " pod="openstack-kuttl-tests/swift-proxy-6b7b5d66dd-8plzj" Jan 20 16:58:34 crc kubenswrapper[4558]: I0120 16:58:34.229269 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1d5c2e68-fb9d-4818-a0c2-27db2516e92b-combined-ca-bundle\") pod \"swift-proxy-6b7b5d66dd-8plzj\" (UID: \"1d5c2e68-fb9d-4818-a0c2-27db2516e92b\") " pod="openstack-kuttl-tests/swift-proxy-6b7b5d66dd-8plzj" Jan 20 16:58:34 crc kubenswrapper[4558]: I0120 16:58:34.330119 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1d5c2e68-fb9d-4818-a0c2-27db2516e92b-public-tls-certs\") pod \"swift-proxy-6b7b5d66dd-8plzj\" (UID: \"1d5c2e68-fb9d-4818-a0c2-27db2516e92b\") " pod="openstack-kuttl-tests/swift-proxy-6b7b5d66dd-8plzj" Jan 20 16:58:34 crc kubenswrapper[4558]: I0120 16:58:34.330158 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1d5c2e68-fb9d-4818-a0c2-27db2516e92b-log-httpd\") pod \"swift-proxy-6b7b5d66dd-8plzj\" (UID: \"1d5c2e68-fb9d-4818-a0c2-27db2516e92b\") " pod="openstack-kuttl-tests/swift-proxy-6b7b5d66dd-8plzj" Jan 20 16:58:34 crc kubenswrapper[4558]: I0120 16:58:34.330217 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1d5c2e68-fb9d-4818-a0c2-27db2516e92b-internal-tls-certs\") pod \"swift-proxy-6b7b5d66dd-8plzj\" (UID: \"1d5c2e68-fb9d-4818-a0c2-27db2516e92b\") " pod="openstack-kuttl-tests/swift-proxy-6b7b5d66dd-8plzj" Jan 20 16:58:34 crc kubenswrapper[4558]: I0120 16:58:34.330306 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hw7sd\" (UniqueName: \"kubernetes.io/projected/1d5c2e68-fb9d-4818-a0c2-27db2516e92b-kube-api-access-hw7sd\") pod \"swift-proxy-6b7b5d66dd-8plzj\" (UID: \"1d5c2e68-fb9d-4818-a0c2-27db2516e92b\") " pod="openstack-kuttl-tests/swift-proxy-6b7b5d66dd-8plzj" Jan 20 16:58:34 crc kubenswrapper[4558]: I0120 16:58:34.330349 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1d5c2e68-fb9d-4818-a0c2-27db2516e92b-config-data\") pod \"swift-proxy-6b7b5d66dd-8plzj\" (UID: \"1d5c2e68-fb9d-4818-a0c2-27db2516e92b\") " pod="openstack-kuttl-tests/swift-proxy-6b7b5d66dd-8plzj" Jan 20 16:58:34 crc kubenswrapper[4558]: I0120 16:58:34.330391 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1d5c2e68-fb9d-4818-a0c2-27db2516e92b-run-httpd\") pod \"swift-proxy-6b7b5d66dd-8plzj\" (UID: \"1d5c2e68-fb9d-4818-a0c2-27db2516e92b\") " pod="openstack-kuttl-tests/swift-proxy-6b7b5d66dd-8plzj" Jan 20 16:58:34 crc kubenswrapper[4558]: I0120 16:58:34.330434 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1d5c2e68-fb9d-4818-a0c2-27db2516e92b-combined-ca-bundle\") pod \"swift-proxy-6b7b5d66dd-8plzj\" (UID: \"1d5c2e68-fb9d-4818-a0c2-27db2516e92b\") " pod="openstack-kuttl-tests/swift-proxy-6b7b5d66dd-8plzj" Jan 20 16:58:34 crc kubenswrapper[4558]: I0120 16:58:34.330454 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/1d5c2e68-fb9d-4818-a0c2-27db2516e92b-etc-swift\") pod \"swift-proxy-6b7b5d66dd-8plzj\" (UID: \"1d5c2e68-fb9d-4818-a0c2-27db2516e92b\") " pod="openstack-kuttl-tests/swift-proxy-6b7b5d66dd-8plzj" Jan 20 16:58:34 crc kubenswrapper[4558]: I0120 16:58:34.330633 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1d5c2e68-fb9d-4818-a0c2-27db2516e92b-log-httpd\") pod \"swift-proxy-6b7b5d66dd-8plzj\" (UID: \"1d5c2e68-fb9d-4818-a0c2-27db2516e92b\") " pod="openstack-kuttl-tests/swift-proxy-6b7b5d66dd-8plzj" Jan 20 16:58:34 crc kubenswrapper[4558]: I0120 16:58:34.330850 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1d5c2e68-fb9d-4818-a0c2-27db2516e92b-run-httpd\") pod \"swift-proxy-6b7b5d66dd-8plzj\" (UID: \"1d5c2e68-fb9d-4818-a0c2-27db2516e92b\") " pod="openstack-kuttl-tests/swift-proxy-6b7b5d66dd-8plzj" Jan 20 16:58:34 crc kubenswrapper[4558]: I0120 16:58:34.335984 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1d5c2e68-fb9d-4818-a0c2-27db2516e92b-config-data\") pod \"swift-proxy-6b7b5d66dd-8plzj\" (UID: \"1d5c2e68-fb9d-4818-a0c2-27db2516e92b\") " pod="openstack-kuttl-tests/swift-proxy-6b7b5d66dd-8plzj" Jan 20 16:58:34 crc kubenswrapper[4558]: I0120 16:58:34.336420 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/1d5c2e68-fb9d-4818-a0c2-27db2516e92b-etc-swift\") pod \"swift-proxy-6b7b5d66dd-8plzj\" (UID: \"1d5c2e68-fb9d-4818-a0c2-27db2516e92b\") " pod="openstack-kuttl-tests/swift-proxy-6b7b5d66dd-8plzj" Jan 20 16:58:34 crc kubenswrapper[4558]: I0120 16:58:34.338735 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1d5c2e68-fb9d-4818-a0c2-27db2516e92b-combined-ca-bundle\") pod \"swift-proxy-6b7b5d66dd-8plzj\" (UID: \"1d5c2e68-fb9d-4818-a0c2-27db2516e92b\") " pod="openstack-kuttl-tests/swift-proxy-6b7b5d66dd-8plzj" Jan 20 16:58:34 crc kubenswrapper[4558]: I0120 16:58:34.339261 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1d5c2e68-fb9d-4818-a0c2-27db2516e92b-internal-tls-certs\") pod \"swift-proxy-6b7b5d66dd-8plzj\" (UID: \"1d5c2e68-fb9d-4818-a0c2-27db2516e92b\") " pod="openstack-kuttl-tests/swift-proxy-6b7b5d66dd-8plzj" Jan 20 16:58:34 crc kubenswrapper[4558]: I0120 16:58:34.339692 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1d5c2e68-fb9d-4818-a0c2-27db2516e92b-public-tls-certs\") pod \"swift-proxy-6b7b5d66dd-8plzj\" (UID: \"1d5c2e68-fb9d-4818-a0c2-27db2516e92b\") " pod="openstack-kuttl-tests/swift-proxy-6b7b5d66dd-8plzj" Jan 20 16:58:34 crc kubenswrapper[4558]: I0120 16:58:34.346746 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hw7sd\" (UniqueName: \"kubernetes.io/projected/1d5c2e68-fb9d-4818-a0c2-27db2516e92b-kube-api-access-hw7sd\") pod \"swift-proxy-6b7b5d66dd-8plzj\" (UID: \"1d5c2e68-fb9d-4818-a0c2-27db2516e92b\") " pod="openstack-kuttl-tests/swift-proxy-6b7b5d66dd-8plzj" Jan 20 16:58:34 crc kubenswrapper[4558]: I0120 16:58:34.450981 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-proxy-6b7b5d66dd-8plzj" Jan 20 16:58:34 crc kubenswrapper[4558]: I0120 16:58:34.848240 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/swift-proxy-6b7b5d66dd-8plzj"] Jan 20 16:58:35 crc kubenswrapper[4558]: I0120 16:58:35.222716 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-proxy-6b7b5d66dd-8plzj" event={"ID":"1d5c2e68-fb9d-4818-a0c2-27db2516e92b","Type":"ContainerStarted","Data":"2c012c9aed3c1cbbfd0f3adb398aa6e942741aa3f3d75ea5d680bf8543a23be2"} Jan 20 16:58:35 crc kubenswrapper[4558]: I0120 16:58:35.222936 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/swift-proxy-6b7b5d66dd-8plzj" Jan 20 16:58:35 crc kubenswrapper[4558]: I0120 16:58:35.222947 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-proxy-6b7b5d66dd-8plzj" event={"ID":"1d5c2e68-fb9d-4818-a0c2-27db2516e92b","Type":"ContainerStarted","Data":"2c68bcde1b4540d7e10ed3de55e085fa25f7fe350c76cf38edeedca6d5734cb7"} Jan 20 16:58:35 crc kubenswrapper[4558]: I0120 16:58:35.222956 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-proxy-6b7b5d66dd-8plzj" event={"ID":"1d5c2e68-fb9d-4818-a0c2-27db2516e92b","Type":"ContainerStarted","Data":"14980298a33d464d1cf0b4215251b5e0402796e093f9375391a0ff5808714937"} Jan 20 16:58:35 crc kubenswrapper[4558]: I0120 16:58:35.223068 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/swift-proxy-6b7b5d66dd-8plzj" Jan 20 16:58:35 crc kubenswrapper[4558]: I0120 16:58:35.238641 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/swift-proxy-6b7b5d66dd-8plzj" podStartSLOduration=1.238627662 podStartE2EDuration="1.238627662s" podCreationTimestamp="2026-01-20 16:58:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 16:58:35.234715746 +0000 UTC m=+1008.995053712" watchObservedRunningTime="2026-01-20 16:58:35.238627662 +0000 UTC m=+1008.998965628" Jan 20 16:58:35 crc kubenswrapper[4558]: I0120 16:58:35.474464 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-api-db-create-9rlkr"] Jan 20 16:58:35 crc kubenswrapper[4558]: I0120 16:58:35.475377 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-db-create-9rlkr" Jan 20 16:58:35 crc kubenswrapper[4558]: I0120 16:58:35.494873 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-db-create-9rlkr"] Jan 20 16:58:35 crc kubenswrapper[4558]: I0120 16:58:35.554209 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a66af596-2f1d-496b-87cc-c67d42f99a56-operator-scripts\") pod \"nova-api-db-create-9rlkr\" (UID: \"a66af596-2f1d-496b-87cc-c67d42f99a56\") " pod="openstack-kuttl-tests/nova-api-db-create-9rlkr" Jan 20 16:58:35 crc kubenswrapper[4558]: I0120 16:58:35.554284 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dggll\" (UniqueName: \"kubernetes.io/projected/a66af596-2f1d-496b-87cc-c67d42f99a56-kube-api-access-dggll\") pod \"nova-api-db-create-9rlkr\" (UID: \"a66af596-2f1d-496b-87cc-c67d42f99a56\") " pod="openstack-kuttl-tests/nova-api-db-create-9rlkr" Jan 20 16:58:35 crc kubenswrapper[4558]: I0120 16:58:35.604654 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell0-db-create-bcpmp"] Jan 20 16:58:35 crc kubenswrapper[4558]: I0120 16:58:35.614220 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-db-create-bcpmp" Jan 20 16:58:35 crc kubenswrapper[4558]: I0120 16:58:35.622232 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-db-create-bcpmp"] Jan 20 16:58:35 crc kubenswrapper[4558]: I0120 16:58:35.642497 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-api-b4f9-account-create-update-smxt8"] Jan 20 16:58:35 crc kubenswrapper[4558]: I0120 16:58:35.643452 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-b4f9-account-create-update-smxt8" Jan 20 16:58:35 crc kubenswrapper[4558]: I0120 16:58:35.646551 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-api-db-secret" Jan 20 16:58:35 crc kubenswrapper[4558]: I0120 16:58:35.664946 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-b4f9-account-create-update-smxt8"] Jan 20 16:58:35 crc kubenswrapper[4558]: I0120 16:58:35.665308 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cz9nd\" (UniqueName: \"kubernetes.io/projected/8074b22b-e6be-4d65-a8fd-293434b77010-kube-api-access-cz9nd\") pod \"nova-cell0-db-create-bcpmp\" (UID: \"8074b22b-e6be-4d65-a8fd-293434b77010\") " pod="openstack-kuttl-tests/nova-cell0-db-create-bcpmp" Jan 20 16:58:35 crc kubenswrapper[4558]: I0120 16:58:35.665482 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8074b22b-e6be-4d65-a8fd-293434b77010-operator-scripts\") pod \"nova-cell0-db-create-bcpmp\" (UID: \"8074b22b-e6be-4d65-a8fd-293434b77010\") " pod="openstack-kuttl-tests/nova-cell0-db-create-bcpmp" Jan 20 16:58:35 crc kubenswrapper[4558]: I0120 16:58:35.665963 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a66af596-2f1d-496b-87cc-c67d42f99a56-operator-scripts\") pod \"nova-api-db-create-9rlkr\" (UID: \"a66af596-2f1d-496b-87cc-c67d42f99a56\") " pod="openstack-kuttl-tests/nova-api-db-create-9rlkr" Jan 20 16:58:35 crc kubenswrapper[4558]: I0120 16:58:35.666015 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dggll\" (UniqueName: \"kubernetes.io/projected/a66af596-2f1d-496b-87cc-c67d42f99a56-kube-api-access-dggll\") pod \"nova-api-db-create-9rlkr\" (UID: \"a66af596-2f1d-496b-87cc-c67d42f99a56\") " pod="openstack-kuttl-tests/nova-api-db-create-9rlkr" Jan 20 16:58:35 crc kubenswrapper[4558]: I0120 16:58:35.667271 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a66af596-2f1d-496b-87cc-c67d42f99a56-operator-scripts\") pod \"nova-api-db-create-9rlkr\" (UID: \"a66af596-2f1d-496b-87cc-c67d42f99a56\") " pod="openstack-kuttl-tests/nova-api-db-create-9rlkr" Jan 20 16:58:35 crc kubenswrapper[4558]: I0120 16:58:35.697645 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dggll\" (UniqueName: \"kubernetes.io/projected/a66af596-2f1d-496b-87cc-c67d42f99a56-kube-api-access-dggll\") pod \"nova-api-db-create-9rlkr\" (UID: \"a66af596-2f1d-496b-87cc-c67d42f99a56\") " pod="openstack-kuttl-tests/nova-api-db-create-9rlkr" Jan 20 16:58:35 crc kubenswrapper[4558]: I0120 16:58:35.726345 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell1-db-create-8jk7l"] Jan 20 16:58:35 crc kubenswrapper[4558]: I0120 16:58:35.727376 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-db-create-8jk7l" Jan 20 16:58:35 crc kubenswrapper[4558]: I0120 16:58:35.731681 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-db-create-8jk7l"] Jan 20 16:58:35 crc kubenswrapper[4558]: I0120 16:58:35.767730 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/24ec82c6-7ca6-47e0-9291-17327cb4b222-operator-scripts\") pod \"nova-api-b4f9-account-create-update-smxt8\" (UID: \"24ec82c6-7ca6-47e0-9291-17327cb4b222\") " pod="openstack-kuttl-tests/nova-api-b4f9-account-create-update-smxt8" Jan 20 16:58:35 crc kubenswrapper[4558]: I0120 16:58:35.767832 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8074b22b-e6be-4d65-a8fd-293434b77010-operator-scripts\") pod \"nova-cell0-db-create-bcpmp\" (UID: \"8074b22b-e6be-4d65-a8fd-293434b77010\") " pod="openstack-kuttl-tests/nova-cell0-db-create-bcpmp" Jan 20 16:58:35 crc kubenswrapper[4558]: I0120 16:58:35.767970 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j5ljf\" (UniqueName: \"kubernetes.io/projected/24ec82c6-7ca6-47e0-9291-17327cb4b222-kube-api-access-j5ljf\") pod \"nova-api-b4f9-account-create-update-smxt8\" (UID: \"24ec82c6-7ca6-47e0-9291-17327cb4b222\") " pod="openstack-kuttl-tests/nova-api-b4f9-account-create-update-smxt8" Jan 20 16:58:35 crc kubenswrapper[4558]: I0120 16:58:35.768027 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cz9nd\" (UniqueName: \"kubernetes.io/projected/8074b22b-e6be-4d65-a8fd-293434b77010-kube-api-access-cz9nd\") pod \"nova-cell0-db-create-bcpmp\" (UID: \"8074b22b-e6be-4d65-a8fd-293434b77010\") " pod="openstack-kuttl-tests/nova-cell0-db-create-bcpmp" Jan 20 16:58:35 crc kubenswrapper[4558]: I0120 16:58:35.768818 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8074b22b-e6be-4d65-a8fd-293434b77010-operator-scripts\") pod \"nova-cell0-db-create-bcpmp\" (UID: \"8074b22b-e6be-4d65-a8fd-293434b77010\") " pod="openstack-kuttl-tests/nova-cell0-db-create-bcpmp" Jan 20 16:58:35 crc kubenswrapper[4558]: I0120 16:58:35.797325 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-db-create-9rlkr" Jan 20 16:58:35 crc kubenswrapper[4558]: I0120 16:58:35.797426 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cz9nd\" (UniqueName: \"kubernetes.io/projected/8074b22b-e6be-4d65-a8fd-293434b77010-kube-api-access-cz9nd\") pod \"nova-cell0-db-create-bcpmp\" (UID: \"8074b22b-e6be-4d65-a8fd-293434b77010\") " pod="openstack-kuttl-tests/nova-cell0-db-create-bcpmp" Jan 20 16:58:35 crc kubenswrapper[4558]: I0120 16:58:35.800244 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell0-b5f5-account-create-update-5mqdg"] Jan 20 16:58:35 crc kubenswrapper[4558]: I0120 16:58:35.801262 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-b5f5-account-create-update-5mqdg" Jan 20 16:58:35 crc kubenswrapper[4558]: I0120 16:58:35.805664 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell0-db-secret" Jan 20 16:58:35 crc kubenswrapper[4558]: I0120 16:58:35.813897 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-b5f5-account-create-update-5mqdg"] Jan 20 16:58:35 crc kubenswrapper[4558]: I0120 16:58:35.870596 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xsllr\" (UniqueName: \"kubernetes.io/projected/d804cfef-d66b-4e73-9eaa-32500f685367-kube-api-access-xsllr\") pod \"nova-cell1-db-create-8jk7l\" (UID: \"d804cfef-d66b-4e73-9eaa-32500f685367\") " pod="openstack-kuttl-tests/nova-cell1-db-create-8jk7l" Jan 20 16:58:35 crc kubenswrapper[4558]: I0120 16:58:35.870653 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3252a616-e809-4059-9c6a-cf8af5aef236-operator-scripts\") pod \"nova-cell0-b5f5-account-create-update-5mqdg\" (UID: \"3252a616-e809-4059-9c6a-cf8af5aef236\") " pod="openstack-kuttl-tests/nova-cell0-b5f5-account-create-update-5mqdg" Jan 20 16:58:35 crc kubenswrapper[4558]: I0120 16:58:35.870682 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d804cfef-d66b-4e73-9eaa-32500f685367-operator-scripts\") pod \"nova-cell1-db-create-8jk7l\" (UID: \"d804cfef-d66b-4e73-9eaa-32500f685367\") " pod="openstack-kuttl-tests/nova-cell1-db-create-8jk7l" Jan 20 16:58:35 crc kubenswrapper[4558]: I0120 16:58:35.870720 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j5ljf\" (UniqueName: \"kubernetes.io/projected/24ec82c6-7ca6-47e0-9291-17327cb4b222-kube-api-access-j5ljf\") pod \"nova-api-b4f9-account-create-update-smxt8\" (UID: \"24ec82c6-7ca6-47e0-9291-17327cb4b222\") " pod="openstack-kuttl-tests/nova-api-b4f9-account-create-update-smxt8" Jan 20 16:58:35 crc kubenswrapper[4558]: I0120 16:58:35.870784 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lzxrx\" (UniqueName: \"kubernetes.io/projected/3252a616-e809-4059-9c6a-cf8af5aef236-kube-api-access-lzxrx\") pod \"nova-cell0-b5f5-account-create-update-5mqdg\" (UID: \"3252a616-e809-4059-9c6a-cf8af5aef236\") " pod="openstack-kuttl-tests/nova-cell0-b5f5-account-create-update-5mqdg" Jan 20 16:58:35 crc kubenswrapper[4558]: I0120 16:58:35.870858 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/24ec82c6-7ca6-47e0-9291-17327cb4b222-operator-scripts\") pod \"nova-api-b4f9-account-create-update-smxt8\" (UID: \"24ec82c6-7ca6-47e0-9291-17327cb4b222\") " pod="openstack-kuttl-tests/nova-api-b4f9-account-create-update-smxt8" Jan 20 16:58:35 crc kubenswrapper[4558]: I0120 16:58:35.871503 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/24ec82c6-7ca6-47e0-9291-17327cb4b222-operator-scripts\") pod \"nova-api-b4f9-account-create-update-smxt8\" (UID: \"24ec82c6-7ca6-47e0-9291-17327cb4b222\") " pod="openstack-kuttl-tests/nova-api-b4f9-account-create-update-smxt8" Jan 20 16:58:35 crc kubenswrapper[4558]: I0120 16:58:35.890979 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j5ljf\" (UniqueName: \"kubernetes.io/projected/24ec82c6-7ca6-47e0-9291-17327cb4b222-kube-api-access-j5ljf\") pod \"nova-api-b4f9-account-create-update-smxt8\" (UID: \"24ec82c6-7ca6-47e0-9291-17327cb4b222\") " pod="openstack-kuttl-tests/nova-api-b4f9-account-create-update-smxt8" Jan 20 16:58:35 crc kubenswrapper[4558]: I0120 16:58:35.933974 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-db-create-bcpmp" Jan 20 16:58:35 crc kubenswrapper[4558]: I0120 16:58:35.936645 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 16:58:35 crc kubenswrapper[4558]: I0120 16:58:35.936886 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="2ffb9b74-e15e-45db-b2d8-e6b63e1eb51d" containerName="ceilometer-central-agent" containerID="cri-o://b7ee658883bc3c2dedd442b787d58c71a76627afcfa42d35a4e3d2cbee602149" gracePeriod=30 Jan 20 16:58:35 crc kubenswrapper[4558]: I0120 16:58:35.937729 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="2ffb9b74-e15e-45db-b2d8-e6b63e1eb51d" containerName="proxy-httpd" containerID="cri-o://e1e5629ee3608ca2964283471ea9053abf2262180449bdda5f5f8acd3141ba80" gracePeriod=30 Jan 20 16:58:35 crc kubenswrapper[4558]: I0120 16:58:35.938063 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="2ffb9b74-e15e-45db-b2d8-e6b63e1eb51d" containerName="sg-core" containerID="cri-o://14757df3405b5b02eb99dcdfcfb12d5a83dff3bcc11e6b0ba0b6a92ccc27dcd4" gracePeriod=30 Jan 20 16:58:35 crc kubenswrapper[4558]: I0120 16:58:35.938593 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="2ffb9b74-e15e-45db-b2d8-e6b63e1eb51d" containerName="ceilometer-notification-agent" containerID="cri-o://852fdef698848c41f9bb05c5c2bd39cc27719921891f87d42bb115977032714c" gracePeriod=30 Jan 20 16:58:35 crc kubenswrapper[4558]: I0120 16:58:35.949572 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:58:35 crc kubenswrapper[4558]: I0120 16:58:35.989414 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lzxrx\" (UniqueName: \"kubernetes.io/projected/3252a616-e809-4059-9c6a-cf8af5aef236-kube-api-access-lzxrx\") pod \"nova-cell0-b5f5-account-create-update-5mqdg\" (UID: \"3252a616-e809-4059-9c6a-cf8af5aef236\") " pod="openstack-kuttl-tests/nova-cell0-b5f5-account-create-update-5mqdg" Jan 20 16:58:35 crc kubenswrapper[4558]: I0120 16:58:35.989683 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xsllr\" (UniqueName: \"kubernetes.io/projected/d804cfef-d66b-4e73-9eaa-32500f685367-kube-api-access-xsllr\") pod \"nova-cell1-db-create-8jk7l\" (UID: \"d804cfef-d66b-4e73-9eaa-32500f685367\") " pod="openstack-kuttl-tests/nova-cell1-db-create-8jk7l" Jan 20 16:58:35 crc kubenswrapper[4558]: I0120 16:58:35.989768 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3252a616-e809-4059-9c6a-cf8af5aef236-operator-scripts\") pod \"nova-cell0-b5f5-account-create-update-5mqdg\" (UID: \"3252a616-e809-4059-9c6a-cf8af5aef236\") " pod="openstack-kuttl-tests/nova-cell0-b5f5-account-create-update-5mqdg" Jan 20 16:58:35 crc kubenswrapper[4558]: I0120 16:58:35.989802 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d804cfef-d66b-4e73-9eaa-32500f685367-operator-scripts\") pod \"nova-cell1-db-create-8jk7l\" (UID: \"d804cfef-d66b-4e73-9eaa-32500f685367\") " pod="openstack-kuttl-tests/nova-cell1-db-create-8jk7l" Jan 20 16:58:35 crc kubenswrapper[4558]: I0120 16:58:35.991419 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3252a616-e809-4059-9c6a-cf8af5aef236-operator-scripts\") pod \"nova-cell0-b5f5-account-create-update-5mqdg\" (UID: \"3252a616-e809-4059-9c6a-cf8af5aef236\") " pod="openstack-kuttl-tests/nova-cell0-b5f5-account-create-update-5mqdg" Jan 20 16:58:35 crc kubenswrapper[4558]: I0120 16:58:35.992488 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d804cfef-d66b-4e73-9eaa-32500f685367-operator-scripts\") pod \"nova-cell1-db-create-8jk7l\" (UID: \"d804cfef-d66b-4e73-9eaa-32500f685367\") " pod="openstack-kuttl-tests/nova-cell1-db-create-8jk7l" Jan 20 16:58:35 crc kubenswrapper[4558]: I0120 16:58:35.996129 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-b4f9-account-create-update-smxt8" Jan 20 16:58:36 crc kubenswrapper[4558]: I0120 16:58:36.024896 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lzxrx\" (UniqueName: \"kubernetes.io/projected/3252a616-e809-4059-9c6a-cf8af5aef236-kube-api-access-lzxrx\") pod \"nova-cell0-b5f5-account-create-update-5mqdg\" (UID: \"3252a616-e809-4059-9c6a-cf8af5aef236\") " pod="openstack-kuttl-tests/nova-cell0-b5f5-account-create-update-5mqdg" Jan 20 16:58:36 crc kubenswrapper[4558]: I0120 16:58:36.026515 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xsllr\" (UniqueName: \"kubernetes.io/projected/d804cfef-d66b-4e73-9eaa-32500f685367-kube-api-access-xsllr\") pod \"nova-cell1-db-create-8jk7l\" (UID: \"d804cfef-d66b-4e73-9eaa-32500f685367\") " pod="openstack-kuttl-tests/nova-cell1-db-create-8jk7l" Jan 20 16:58:36 crc kubenswrapper[4558]: I0120 16:58:36.034890 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell1-9ff1-account-create-update-zlhvt"] Jan 20 16:58:36 crc kubenswrapper[4558]: I0120 16:58:36.040620 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-9ff1-account-create-update-zlhvt" Jan 20 16:58:36 crc kubenswrapper[4558]: I0120 16:58:36.042643 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell1-db-secret" Jan 20 16:58:36 crc kubenswrapper[4558]: I0120 16:58:36.046689 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-db-create-8jk7l" Jan 20 16:58:36 crc kubenswrapper[4558]: I0120 16:58:36.060746 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-9ff1-account-create-update-zlhvt"] Jan 20 16:58:36 crc kubenswrapper[4558]: I0120 16:58:36.099833 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q9gxj\" (UniqueName: \"kubernetes.io/projected/9cc658b3-2ddb-4902-bd24-a0eb9e2ad7c4-kube-api-access-q9gxj\") pod \"nova-cell1-9ff1-account-create-update-zlhvt\" (UID: \"9cc658b3-2ddb-4902-bd24-a0eb9e2ad7c4\") " pod="openstack-kuttl-tests/nova-cell1-9ff1-account-create-update-zlhvt" Jan 20 16:58:36 crc kubenswrapper[4558]: I0120 16:58:36.100284 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9cc658b3-2ddb-4902-bd24-a0eb9e2ad7c4-operator-scripts\") pod \"nova-cell1-9ff1-account-create-update-zlhvt\" (UID: \"9cc658b3-2ddb-4902-bd24-a0eb9e2ad7c4\") " pod="openstack-kuttl-tests/nova-cell1-9ff1-account-create-update-zlhvt" Jan 20 16:58:36 crc kubenswrapper[4558]: I0120 16:58:36.191683 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-b5f5-account-create-update-5mqdg" Jan 20 16:58:36 crc kubenswrapper[4558]: I0120 16:58:36.203999 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q9gxj\" (UniqueName: \"kubernetes.io/projected/9cc658b3-2ddb-4902-bd24-a0eb9e2ad7c4-kube-api-access-q9gxj\") pod \"nova-cell1-9ff1-account-create-update-zlhvt\" (UID: \"9cc658b3-2ddb-4902-bd24-a0eb9e2ad7c4\") " pod="openstack-kuttl-tests/nova-cell1-9ff1-account-create-update-zlhvt" Jan 20 16:58:36 crc kubenswrapper[4558]: I0120 16:58:36.204139 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9cc658b3-2ddb-4902-bd24-a0eb9e2ad7c4-operator-scripts\") pod \"nova-cell1-9ff1-account-create-update-zlhvt\" (UID: \"9cc658b3-2ddb-4902-bd24-a0eb9e2ad7c4\") " pod="openstack-kuttl-tests/nova-cell1-9ff1-account-create-update-zlhvt" Jan 20 16:58:36 crc kubenswrapper[4558]: I0120 16:58:36.205280 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9cc658b3-2ddb-4902-bd24-a0eb9e2ad7c4-operator-scripts\") pod \"nova-cell1-9ff1-account-create-update-zlhvt\" (UID: \"9cc658b3-2ddb-4902-bd24-a0eb9e2ad7c4\") " pod="openstack-kuttl-tests/nova-cell1-9ff1-account-create-update-zlhvt" Jan 20 16:58:36 crc kubenswrapper[4558]: I0120 16:58:36.231246 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q9gxj\" (UniqueName: \"kubernetes.io/projected/9cc658b3-2ddb-4902-bd24-a0eb9e2ad7c4-kube-api-access-q9gxj\") pod \"nova-cell1-9ff1-account-create-update-zlhvt\" (UID: \"9cc658b3-2ddb-4902-bd24-a0eb9e2ad7c4\") " pod="openstack-kuttl-tests/nova-cell1-9ff1-account-create-update-zlhvt" Jan 20 16:58:36 crc kubenswrapper[4558]: I0120 16:58:36.240700 4558 generic.go:334] "Generic (PLEG): container finished" podID="2ffb9b74-e15e-45db-b2d8-e6b63e1eb51d" containerID="e1e5629ee3608ca2964283471ea9053abf2262180449bdda5f5f8acd3141ba80" exitCode=0 Jan 20 16:58:36 crc kubenswrapper[4558]: I0120 16:58:36.240724 4558 generic.go:334] "Generic (PLEG): container finished" podID="2ffb9b74-e15e-45db-b2d8-e6b63e1eb51d" containerID="14757df3405b5b02eb99dcdfcfb12d5a83dff3bcc11e6b0ba0b6a92ccc27dcd4" exitCode=2 Jan 20 16:58:36 crc kubenswrapper[4558]: I0120 16:58:36.241754 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"2ffb9b74-e15e-45db-b2d8-e6b63e1eb51d","Type":"ContainerDied","Data":"e1e5629ee3608ca2964283471ea9053abf2262180449bdda5f5f8acd3141ba80"} Jan 20 16:58:36 crc kubenswrapper[4558]: I0120 16:58:36.241782 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"2ffb9b74-e15e-45db-b2d8-e6b63e1eb51d","Type":"ContainerDied","Data":"14757df3405b5b02eb99dcdfcfb12d5a83dff3bcc11e6b0ba0b6a92ccc27dcd4"} Jan 20 16:58:36 crc kubenswrapper[4558]: I0120 16:58:36.278603 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-db-create-9rlkr"] Jan 20 16:58:36 crc kubenswrapper[4558]: I0120 16:58:36.385048 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-9ff1-account-create-update-zlhvt" Jan 20 16:58:36 crc kubenswrapper[4558]: I0120 16:58:36.529133 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-b4f9-account-create-update-smxt8"] Jan 20 16:58:36 crc kubenswrapper[4558]: I0120 16:58:36.538572 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-db-create-bcpmp"] Jan 20 16:58:36 crc kubenswrapper[4558]: I0120 16:58:36.620268 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-db-create-8jk7l"] Jan 20 16:58:36 crc kubenswrapper[4558]: I0120 16:58:36.794921 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-b5f5-account-create-update-5mqdg"] Jan 20 16:58:36 crc kubenswrapper[4558]: I0120 16:58:36.928882 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-9ff1-account-create-update-zlhvt"] Jan 20 16:58:37 crc kubenswrapper[4558]: I0120 16:58:37.256314 4558 generic.go:334] "Generic (PLEG): container finished" podID="a66af596-2f1d-496b-87cc-c67d42f99a56" containerID="8b7b47b4782e3e9aacef0f5eea2d8d7e660530d49bda8b1aa64c6b0f0a557740" exitCode=0 Jan 20 16:58:37 crc kubenswrapper[4558]: I0120 16:58:37.256379 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-db-create-9rlkr" event={"ID":"a66af596-2f1d-496b-87cc-c67d42f99a56","Type":"ContainerDied","Data":"8b7b47b4782e3e9aacef0f5eea2d8d7e660530d49bda8b1aa64c6b0f0a557740"} Jan 20 16:58:37 crc kubenswrapper[4558]: I0120 16:58:37.256403 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-db-create-9rlkr" event={"ID":"a66af596-2f1d-496b-87cc-c67d42f99a56","Type":"ContainerStarted","Data":"f3c6c93e6f1635fa5b4eddd93413f89f474ec404aca11eed36f5c661d008aadb"} Jan 20 16:58:37 crc kubenswrapper[4558]: I0120 16:58:37.259508 4558 generic.go:334] "Generic (PLEG): container finished" podID="3252a616-e809-4059-9c6a-cf8af5aef236" containerID="591c362bc682cbd8e0d3cbd8295b50cde5fa85ed1fb56d6f747d6009d445c395" exitCode=0 Jan 20 16:58:37 crc kubenswrapper[4558]: I0120 16:58:37.259557 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-b5f5-account-create-update-5mqdg" event={"ID":"3252a616-e809-4059-9c6a-cf8af5aef236","Type":"ContainerDied","Data":"591c362bc682cbd8e0d3cbd8295b50cde5fa85ed1fb56d6f747d6009d445c395"} Jan 20 16:58:37 crc kubenswrapper[4558]: I0120 16:58:37.259572 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-b5f5-account-create-update-5mqdg" event={"ID":"3252a616-e809-4059-9c6a-cf8af5aef236","Type":"ContainerStarted","Data":"9e363c78db52b01ea564b265ce8f9bd77a8b2d9ba237581095eef10874a6a72e"} Jan 20 16:58:37 crc kubenswrapper[4558]: I0120 16:58:37.262726 4558 generic.go:334] "Generic (PLEG): container finished" podID="2ffb9b74-e15e-45db-b2d8-e6b63e1eb51d" containerID="b7ee658883bc3c2dedd442b787d58c71a76627afcfa42d35a4e3d2cbee602149" exitCode=0 Jan 20 16:58:37 crc kubenswrapper[4558]: I0120 16:58:37.262760 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"2ffb9b74-e15e-45db-b2d8-e6b63e1eb51d","Type":"ContainerDied","Data":"b7ee658883bc3c2dedd442b787d58c71a76627afcfa42d35a4e3d2cbee602149"} Jan 20 16:58:37 crc kubenswrapper[4558]: I0120 16:58:37.269077 4558 generic.go:334] "Generic (PLEG): container finished" podID="d804cfef-d66b-4e73-9eaa-32500f685367" containerID="d61066676614c4ff8053fc80cbca3d6e61c8c8994020479c786429e5a7609638" exitCode=0 Jan 20 16:58:37 crc kubenswrapper[4558]: I0120 16:58:37.269293 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-db-create-8jk7l" event={"ID":"d804cfef-d66b-4e73-9eaa-32500f685367","Type":"ContainerDied","Data":"d61066676614c4ff8053fc80cbca3d6e61c8c8994020479c786429e5a7609638"} Jan 20 16:58:37 crc kubenswrapper[4558]: I0120 16:58:37.269392 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-db-create-8jk7l" event={"ID":"d804cfef-d66b-4e73-9eaa-32500f685367","Type":"ContainerStarted","Data":"6a9d4e44bdb8bdeb33793ecd9b3832a3200e7b6e7a0cb09edac0cd54cbce8063"} Jan 20 16:58:37 crc kubenswrapper[4558]: I0120 16:58:37.274713 4558 generic.go:334] "Generic (PLEG): container finished" podID="9cc658b3-2ddb-4902-bd24-a0eb9e2ad7c4" containerID="9dc6d188111caa16be7e42f9dc05193b4a08598807aae37d3d8b8dc08bb5b17e" exitCode=0 Jan 20 16:58:37 crc kubenswrapper[4558]: I0120 16:58:37.274786 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-9ff1-account-create-update-zlhvt" event={"ID":"9cc658b3-2ddb-4902-bd24-a0eb9e2ad7c4","Type":"ContainerDied","Data":"9dc6d188111caa16be7e42f9dc05193b4a08598807aae37d3d8b8dc08bb5b17e"} Jan 20 16:58:37 crc kubenswrapper[4558]: I0120 16:58:37.274854 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-9ff1-account-create-update-zlhvt" event={"ID":"9cc658b3-2ddb-4902-bd24-a0eb9e2ad7c4","Type":"ContainerStarted","Data":"b6b8d8c6a0db18595d54a7526b620c536c71fe92446f8f96f238bb728a8f0e2c"} Jan 20 16:58:37 crc kubenswrapper[4558]: I0120 16:58:37.276364 4558 generic.go:334] "Generic (PLEG): container finished" podID="24ec82c6-7ca6-47e0-9291-17327cb4b222" containerID="696014d7096181a1aaf1489ef7979589908b3b72428090b1c90f5210f07074f6" exitCode=0 Jan 20 16:58:37 crc kubenswrapper[4558]: I0120 16:58:37.276414 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-b4f9-account-create-update-smxt8" event={"ID":"24ec82c6-7ca6-47e0-9291-17327cb4b222","Type":"ContainerDied","Data":"696014d7096181a1aaf1489ef7979589908b3b72428090b1c90f5210f07074f6"} Jan 20 16:58:37 crc kubenswrapper[4558]: I0120 16:58:37.276428 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-b4f9-account-create-update-smxt8" event={"ID":"24ec82c6-7ca6-47e0-9291-17327cb4b222","Type":"ContainerStarted","Data":"c902f94120ae129a9f181e1d6d944dfb59283d5b88f1a1b92b917566af45635d"} Jan 20 16:58:37 crc kubenswrapper[4558]: I0120 16:58:37.278657 4558 generic.go:334] "Generic (PLEG): container finished" podID="8074b22b-e6be-4d65-a8fd-293434b77010" containerID="b64bb5dba514acffdebd80ad0a9174f80f0ddc9282c335ff220ed3779508fb8b" exitCode=0 Jan 20 16:58:37 crc kubenswrapper[4558]: I0120 16:58:37.278682 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-db-create-bcpmp" event={"ID":"8074b22b-e6be-4d65-a8fd-293434b77010","Type":"ContainerDied","Data":"b64bb5dba514acffdebd80ad0a9174f80f0ddc9282c335ff220ed3779508fb8b"} Jan 20 16:58:37 crc kubenswrapper[4558]: I0120 16:58:37.278695 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-db-create-bcpmp" event={"ID":"8074b22b-e6be-4d65-a8fd-293434b77010","Type":"ContainerStarted","Data":"75248487e50dc90b2ed884cd13a36b02e6cd959acf2681229aaac6bc6e180abd"} Jan 20 16:58:37 crc kubenswrapper[4558]: E0120 16:58:37.328460 4558 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd804cfef_d66b_4e73_9eaa_32500f685367.slice/crio-d61066676614c4ff8053fc80cbca3d6e61c8c8994020479c786429e5a7609638.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd804cfef_d66b_4e73_9eaa_32500f685367.slice/crio-conmon-d61066676614c4ff8053fc80cbca3d6e61c8c8994020479c786429e5a7609638.scope\": RecentStats: unable to find data in memory cache]" Jan 20 16:58:39 crc kubenswrapper[4558]: I0120 16:58:39.293495 4558 generic.go:334] "Generic (PLEG): container finished" podID="2ffb9b74-e15e-45db-b2d8-e6b63e1eb51d" containerID="852fdef698848c41f9bb05c5c2bd39cc27719921891f87d42bb115977032714c" exitCode=0 Jan 20 16:58:39 crc kubenswrapper[4558]: I0120 16:58:39.293578 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"2ffb9b74-e15e-45db-b2d8-e6b63e1eb51d","Type":"ContainerDied","Data":"852fdef698848c41f9bb05c5c2bd39cc27719921891f87d42bb115977032714c"} Jan 20 16:58:39 crc kubenswrapper[4558]: I0120 16:58:39.462662 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/swift-proxy-6b7b5d66dd-8plzj" Jan 20 16:58:42 crc kubenswrapper[4558]: I0120 16:58:42.318906 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-9ff1-account-create-update-zlhvt" event={"ID":"9cc658b3-2ddb-4902-bd24-a0eb9e2ad7c4","Type":"ContainerDied","Data":"b6b8d8c6a0db18595d54a7526b620c536c71fe92446f8f96f238bb728a8f0e2c"} Jan 20 16:58:42 crc kubenswrapper[4558]: I0120 16:58:42.319358 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b6b8d8c6a0db18595d54a7526b620c536c71fe92446f8f96f238bb728a8f0e2c" Jan 20 16:58:42 crc kubenswrapper[4558]: I0120 16:58:42.321421 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-b4f9-account-create-update-smxt8" event={"ID":"24ec82c6-7ca6-47e0-9291-17327cb4b222","Type":"ContainerDied","Data":"c902f94120ae129a9f181e1d6d944dfb59283d5b88f1a1b92b917566af45635d"} Jan 20 16:58:42 crc kubenswrapper[4558]: I0120 16:58:42.321477 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c902f94120ae129a9f181e1d6d944dfb59283d5b88f1a1b92b917566af45635d" Jan 20 16:58:42 crc kubenswrapper[4558]: I0120 16:58:42.324721 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-db-create-bcpmp" event={"ID":"8074b22b-e6be-4d65-a8fd-293434b77010","Type":"ContainerDied","Data":"75248487e50dc90b2ed884cd13a36b02e6cd959acf2681229aaac6bc6e180abd"} Jan 20 16:58:42 crc kubenswrapper[4558]: I0120 16:58:42.324757 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="75248487e50dc90b2ed884cd13a36b02e6cd959acf2681229aaac6bc6e180abd" Jan 20 16:58:42 crc kubenswrapper[4558]: I0120 16:58:42.326398 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-db-create-9rlkr" event={"ID":"a66af596-2f1d-496b-87cc-c67d42f99a56","Type":"ContainerDied","Data":"f3c6c93e6f1635fa5b4eddd93413f89f474ec404aca11eed36f5c661d008aadb"} Jan 20 16:58:42 crc kubenswrapper[4558]: I0120 16:58:42.326418 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f3c6c93e6f1635fa5b4eddd93413f89f474ec404aca11eed36f5c661d008aadb" Jan 20 16:58:42 crc kubenswrapper[4558]: I0120 16:58:42.327596 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-b5f5-account-create-update-5mqdg" event={"ID":"3252a616-e809-4059-9c6a-cf8af5aef236","Type":"ContainerDied","Data":"9e363c78db52b01ea564b265ce8f9bd77a8b2d9ba237581095eef10874a6a72e"} Jan 20 16:58:42 crc kubenswrapper[4558]: I0120 16:58:42.327635 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9e363c78db52b01ea564b265ce8f9bd77a8b2d9ba237581095eef10874a6a72e" Jan 20 16:58:42 crc kubenswrapper[4558]: I0120 16:58:42.329810 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-db-create-8jk7l" event={"ID":"d804cfef-d66b-4e73-9eaa-32500f685367","Type":"ContainerDied","Data":"6a9d4e44bdb8bdeb33793ecd9b3832a3200e7b6e7a0cb09edac0cd54cbce8063"} Jan 20 16:58:42 crc kubenswrapper[4558]: I0120 16:58:42.329843 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6a9d4e44bdb8bdeb33793ecd9b3832a3200e7b6e7a0cb09edac0cd54cbce8063" Jan 20 16:58:42 crc kubenswrapper[4558]: I0120 16:58:42.338332 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-b4f9-account-create-update-smxt8" Jan 20 16:58:42 crc kubenswrapper[4558]: I0120 16:58:42.342860 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-db-create-8jk7l" Jan 20 16:58:42 crc kubenswrapper[4558]: I0120 16:58:42.353221 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-db-create-bcpmp" Jan 20 16:58:42 crc kubenswrapper[4558]: I0120 16:58:42.373015 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-9ff1-account-create-update-zlhvt" Jan 20 16:58:42 crc kubenswrapper[4558]: I0120 16:58:42.373086 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-b5f5-account-create-update-5mqdg" Jan 20 16:58:42 crc kubenswrapper[4558]: I0120 16:58:42.378909 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-db-create-9rlkr" Jan 20 16:58:42 crc kubenswrapper[4558]: I0120 16:58:42.413239 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/24ec82c6-7ca6-47e0-9291-17327cb4b222-operator-scripts\") pod \"24ec82c6-7ca6-47e0-9291-17327cb4b222\" (UID: \"24ec82c6-7ca6-47e0-9291-17327cb4b222\") " Jan 20 16:58:42 crc kubenswrapper[4558]: I0120 16:58:42.413421 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j5ljf\" (UniqueName: \"kubernetes.io/projected/24ec82c6-7ca6-47e0-9291-17327cb4b222-kube-api-access-j5ljf\") pod \"24ec82c6-7ca6-47e0-9291-17327cb4b222\" (UID: \"24ec82c6-7ca6-47e0-9291-17327cb4b222\") " Jan 20 16:58:42 crc kubenswrapper[4558]: I0120 16:58:42.414085 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/24ec82c6-7ca6-47e0-9291-17327cb4b222-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "24ec82c6-7ca6-47e0-9291-17327cb4b222" (UID: "24ec82c6-7ca6-47e0-9291-17327cb4b222"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 16:58:42 crc kubenswrapper[4558]: I0120 16:58:42.417611 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/24ec82c6-7ca6-47e0-9291-17327cb4b222-kube-api-access-j5ljf" (OuterVolumeSpecName: "kube-api-access-j5ljf") pod "24ec82c6-7ca6-47e0-9291-17327cb4b222" (UID: "24ec82c6-7ca6-47e0-9291-17327cb4b222"). InnerVolumeSpecName "kube-api-access-j5ljf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:58:42 crc kubenswrapper[4558]: I0120 16:58:42.432452 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:58:42 crc kubenswrapper[4558]: I0120 16:58:42.514442 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xsllr\" (UniqueName: \"kubernetes.io/projected/d804cfef-d66b-4e73-9eaa-32500f685367-kube-api-access-xsllr\") pod \"d804cfef-d66b-4e73-9eaa-32500f685367\" (UID: \"d804cfef-d66b-4e73-9eaa-32500f685367\") " Jan 20 16:58:42 crc kubenswrapper[4558]: I0120 16:58:42.514474 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-frsxr\" (UniqueName: \"kubernetes.io/projected/2ffb9b74-e15e-45db-b2d8-e6b63e1eb51d-kube-api-access-frsxr\") pod \"2ffb9b74-e15e-45db-b2d8-e6b63e1eb51d\" (UID: \"2ffb9b74-e15e-45db-b2d8-e6b63e1eb51d\") " Jan 20 16:58:42 crc kubenswrapper[4558]: I0120 16:58:42.514491 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q9gxj\" (UniqueName: \"kubernetes.io/projected/9cc658b3-2ddb-4902-bd24-a0eb9e2ad7c4-kube-api-access-q9gxj\") pod \"9cc658b3-2ddb-4902-bd24-a0eb9e2ad7c4\" (UID: \"9cc658b3-2ddb-4902-bd24-a0eb9e2ad7c4\") " Jan 20 16:58:42 crc kubenswrapper[4558]: I0120 16:58:42.514509 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d804cfef-d66b-4e73-9eaa-32500f685367-operator-scripts\") pod \"d804cfef-d66b-4e73-9eaa-32500f685367\" (UID: \"d804cfef-d66b-4e73-9eaa-32500f685367\") " Jan 20 16:58:42 crc kubenswrapper[4558]: I0120 16:58:42.514541 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2ffb9b74-e15e-45db-b2d8-e6b63e1eb51d-config-data\") pod \"2ffb9b74-e15e-45db-b2d8-e6b63e1eb51d\" (UID: \"2ffb9b74-e15e-45db-b2d8-e6b63e1eb51d\") " Jan 20 16:58:42 crc kubenswrapper[4558]: I0120 16:58:42.514570 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/2ffb9b74-e15e-45db-b2d8-e6b63e1eb51d-sg-core-conf-yaml\") pod \"2ffb9b74-e15e-45db-b2d8-e6b63e1eb51d\" (UID: \"2ffb9b74-e15e-45db-b2d8-e6b63e1eb51d\") " Jan 20 16:58:42 crc kubenswrapper[4558]: I0120 16:58:42.514587 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9cc658b3-2ddb-4902-bd24-a0eb9e2ad7c4-operator-scripts\") pod \"9cc658b3-2ddb-4902-bd24-a0eb9e2ad7c4\" (UID: \"9cc658b3-2ddb-4902-bd24-a0eb9e2ad7c4\") " Jan 20 16:58:42 crc kubenswrapper[4558]: I0120 16:58:42.514608 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2ffb9b74-e15e-45db-b2d8-e6b63e1eb51d-log-httpd\") pod \"2ffb9b74-e15e-45db-b2d8-e6b63e1eb51d\" (UID: \"2ffb9b74-e15e-45db-b2d8-e6b63e1eb51d\") " Jan 20 16:58:42 crc kubenswrapper[4558]: I0120 16:58:42.514677 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8074b22b-e6be-4d65-a8fd-293434b77010-operator-scripts\") pod \"8074b22b-e6be-4d65-a8fd-293434b77010\" (UID: \"8074b22b-e6be-4d65-a8fd-293434b77010\") " Jan 20 16:58:42 crc kubenswrapper[4558]: I0120 16:58:42.514694 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzxrx\" (UniqueName: \"kubernetes.io/projected/3252a616-e809-4059-9c6a-cf8af5aef236-kube-api-access-lzxrx\") pod \"3252a616-e809-4059-9c6a-cf8af5aef236\" (UID: \"3252a616-e809-4059-9c6a-cf8af5aef236\") " Jan 20 16:58:42 crc kubenswrapper[4558]: I0120 16:58:42.514711 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cz9nd\" (UniqueName: \"kubernetes.io/projected/8074b22b-e6be-4d65-a8fd-293434b77010-kube-api-access-cz9nd\") pod \"8074b22b-e6be-4d65-a8fd-293434b77010\" (UID: \"8074b22b-e6be-4d65-a8fd-293434b77010\") " Jan 20 16:58:42 crc kubenswrapper[4558]: I0120 16:58:42.514755 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2ffb9b74-e15e-45db-b2d8-e6b63e1eb51d-scripts\") pod \"2ffb9b74-e15e-45db-b2d8-e6b63e1eb51d\" (UID: \"2ffb9b74-e15e-45db-b2d8-e6b63e1eb51d\") " Jan 20 16:58:42 crc kubenswrapper[4558]: I0120 16:58:42.514772 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a66af596-2f1d-496b-87cc-c67d42f99a56-operator-scripts\") pod \"a66af596-2f1d-496b-87cc-c67d42f99a56\" (UID: \"a66af596-2f1d-496b-87cc-c67d42f99a56\") " Jan 20 16:58:42 crc kubenswrapper[4558]: I0120 16:58:42.514799 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2ffb9b74-e15e-45db-b2d8-e6b63e1eb51d-run-httpd\") pod \"2ffb9b74-e15e-45db-b2d8-e6b63e1eb51d\" (UID: \"2ffb9b74-e15e-45db-b2d8-e6b63e1eb51d\") " Jan 20 16:58:42 crc kubenswrapper[4558]: I0120 16:58:42.514842 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2ffb9b74-e15e-45db-b2d8-e6b63e1eb51d-combined-ca-bundle\") pod \"2ffb9b74-e15e-45db-b2d8-e6b63e1eb51d\" (UID: \"2ffb9b74-e15e-45db-b2d8-e6b63e1eb51d\") " Jan 20 16:58:42 crc kubenswrapper[4558]: I0120 16:58:42.514868 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dggll\" (UniqueName: \"kubernetes.io/projected/a66af596-2f1d-496b-87cc-c67d42f99a56-kube-api-access-dggll\") pod \"a66af596-2f1d-496b-87cc-c67d42f99a56\" (UID: \"a66af596-2f1d-496b-87cc-c67d42f99a56\") " Jan 20 16:58:42 crc kubenswrapper[4558]: I0120 16:58:42.514891 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3252a616-e809-4059-9c6a-cf8af5aef236-operator-scripts\") pod \"3252a616-e809-4059-9c6a-cf8af5aef236\" (UID: \"3252a616-e809-4059-9c6a-cf8af5aef236\") " Jan 20 16:58:42 crc kubenswrapper[4558]: I0120 16:58:42.515249 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j5ljf\" (UniqueName: \"kubernetes.io/projected/24ec82c6-7ca6-47e0-9291-17327cb4b222-kube-api-access-j5ljf\") on node \"crc\" DevicePath \"\"" Jan 20 16:58:42 crc kubenswrapper[4558]: I0120 16:58:42.515266 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/24ec82c6-7ca6-47e0-9291-17327cb4b222-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 16:58:42 crc kubenswrapper[4558]: I0120 16:58:42.515321 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d804cfef-d66b-4e73-9eaa-32500f685367-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "d804cfef-d66b-4e73-9eaa-32500f685367" (UID: "d804cfef-d66b-4e73-9eaa-32500f685367"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 16:58:42 crc kubenswrapper[4558]: I0120 16:58:42.515482 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2ffb9b74-e15e-45db-b2d8-e6b63e1eb51d-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "2ffb9b74-e15e-45db-b2d8-e6b63e1eb51d" (UID: "2ffb9b74-e15e-45db-b2d8-e6b63e1eb51d"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 16:58:42 crc kubenswrapper[4558]: I0120 16:58:42.515661 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3252a616-e809-4059-9c6a-cf8af5aef236-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "3252a616-e809-4059-9c6a-cf8af5aef236" (UID: "3252a616-e809-4059-9c6a-cf8af5aef236"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 16:58:42 crc kubenswrapper[4558]: I0120 16:58:42.515745 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2ffb9b74-e15e-45db-b2d8-e6b63e1eb51d-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "2ffb9b74-e15e-45db-b2d8-e6b63e1eb51d" (UID: "2ffb9b74-e15e-45db-b2d8-e6b63e1eb51d"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 16:58:42 crc kubenswrapper[4558]: I0120 16:58:42.516079 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9cc658b3-2ddb-4902-bd24-a0eb9e2ad7c4-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "9cc658b3-2ddb-4902-bd24-a0eb9e2ad7c4" (UID: "9cc658b3-2ddb-4902-bd24-a0eb9e2ad7c4"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 16:58:42 crc kubenswrapper[4558]: I0120 16:58:42.516428 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8074b22b-e6be-4d65-a8fd-293434b77010-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "8074b22b-e6be-4d65-a8fd-293434b77010" (UID: "8074b22b-e6be-4d65-a8fd-293434b77010"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 16:58:42 crc kubenswrapper[4558]: I0120 16:58:42.518323 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8074b22b-e6be-4d65-a8fd-293434b77010-kube-api-access-cz9nd" (OuterVolumeSpecName: "kube-api-access-cz9nd") pod "8074b22b-e6be-4d65-a8fd-293434b77010" (UID: "8074b22b-e6be-4d65-a8fd-293434b77010"). InnerVolumeSpecName "kube-api-access-cz9nd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:58:42 crc kubenswrapper[4558]: I0120 16:58:42.518600 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a66af596-2f1d-496b-87cc-c67d42f99a56-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "a66af596-2f1d-496b-87cc-c67d42f99a56" (UID: "a66af596-2f1d-496b-87cc-c67d42f99a56"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 16:58:42 crc kubenswrapper[4558]: I0120 16:58:42.518904 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d804cfef-d66b-4e73-9eaa-32500f685367-kube-api-access-xsllr" (OuterVolumeSpecName: "kube-api-access-xsllr") pod "d804cfef-d66b-4e73-9eaa-32500f685367" (UID: "d804cfef-d66b-4e73-9eaa-32500f685367"). InnerVolumeSpecName "kube-api-access-xsllr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:58:42 crc kubenswrapper[4558]: I0120 16:58:42.518962 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2ffb9b74-e15e-45db-b2d8-e6b63e1eb51d-scripts" (OuterVolumeSpecName: "scripts") pod "2ffb9b74-e15e-45db-b2d8-e6b63e1eb51d" (UID: "2ffb9b74-e15e-45db-b2d8-e6b63e1eb51d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:58:42 crc kubenswrapper[4558]: I0120 16:58:42.519088 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9cc658b3-2ddb-4902-bd24-a0eb9e2ad7c4-kube-api-access-q9gxj" (OuterVolumeSpecName: "kube-api-access-q9gxj") pod "9cc658b3-2ddb-4902-bd24-a0eb9e2ad7c4" (UID: "9cc658b3-2ddb-4902-bd24-a0eb9e2ad7c4"). InnerVolumeSpecName "kube-api-access-q9gxj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:58:42 crc kubenswrapper[4558]: I0120 16:58:42.521684 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a66af596-2f1d-496b-87cc-c67d42f99a56-kube-api-access-dggll" (OuterVolumeSpecName: "kube-api-access-dggll") pod "a66af596-2f1d-496b-87cc-c67d42f99a56" (UID: "a66af596-2f1d-496b-87cc-c67d42f99a56"). InnerVolumeSpecName "kube-api-access-dggll". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:58:42 crc kubenswrapper[4558]: I0120 16:58:42.521807 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2ffb9b74-e15e-45db-b2d8-e6b63e1eb51d-kube-api-access-frsxr" (OuterVolumeSpecName: "kube-api-access-frsxr") pod "2ffb9b74-e15e-45db-b2d8-e6b63e1eb51d" (UID: "2ffb9b74-e15e-45db-b2d8-e6b63e1eb51d"). InnerVolumeSpecName "kube-api-access-frsxr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:58:42 crc kubenswrapper[4558]: I0120 16:58:42.522032 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3252a616-e809-4059-9c6a-cf8af5aef236-kube-api-access-lzxrx" (OuterVolumeSpecName: "kube-api-access-lzxrx") pod "3252a616-e809-4059-9c6a-cf8af5aef236" (UID: "3252a616-e809-4059-9c6a-cf8af5aef236"). InnerVolumeSpecName "kube-api-access-lzxrx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:58:42 crc kubenswrapper[4558]: I0120 16:58:42.536668 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2ffb9b74-e15e-45db-b2d8-e6b63e1eb51d-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "2ffb9b74-e15e-45db-b2d8-e6b63e1eb51d" (UID: "2ffb9b74-e15e-45db-b2d8-e6b63e1eb51d"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:58:42 crc kubenswrapper[4558]: I0120 16:58:42.571708 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2ffb9b74-e15e-45db-b2d8-e6b63e1eb51d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2ffb9b74-e15e-45db-b2d8-e6b63e1eb51d" (UID: "2ffb9b74-e15e-45db-b2d8-e6b63e1eb51d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:58:42 crc kubenswrapper[4558]: I0120 16:58:42.594372 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2ffb9b74-e15e-45db-b2d8-e6b63e1eb51d-config-data" (OuterVolumeSpecName: "config-data") pod "2ffb9b74-e15e-45db-b2d8-e6b63e1eb51d" (UID: "2ffb9b74-e15e-45db-b2d8-e6b63e1eb51d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:58:42 crc kubenswrapper[4558]: I0120 16:58:42.616872 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dggll\" (UniqueName: \"kubernetes.io/projected/a66af596-2f1d-496b-87cc-c67d42f99a56-kube-api-access-dggll\") on node \"crc\" DevicePath \"\"" Jan 20 16:58:42 crc kubenswrapper[4558]: I0120 16:58:42.616955 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3252a616-e809-4059-9c6a-cf8af5aef236-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 16:58:42 crc kubenswrapper[4558]: I0120 16:58:42.617021 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xsllr\" (UniqueName: \"kubernetes.io/projected/d804cfef-d66b-4e73-9eaa-32500f685367-kube-api-access-xsllr\") on node \"crc\" DevicePath \"\"" Jan 20 16:58:42 crc kubenswrapper[4558]: I0120 16:58:42.617080 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-frsxr\" (UniqueName: \"kubernetes.io/projected/2ffb9b74-e15e-45db-b2d8-e6b63e1eb51d-kube-api-access-frsxr\") on node \"crc\" DevicePath \"\"" Jan 20 16:58:42 crc kubenswrapper[4558]: I0120 16:58:42.617135 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q9gxj\" (UniqueName: \"kubernetes.io/projected/9cc658b3-2ddb-4902-bd24-a0eb9e2ad7c4-kube-api-access-q9gxj\") on node \"crc\" DevicePath \"\"" Jan 20 16:58:42 crc kubenswrapper[4558]: I0120 16:58:42.619367 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d804cfef-d66b-4e73-9eaa-32500f685367-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 16:58:42 crc kubenswrapper[4558]: I0120 16:58:42.619438 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2ffb9b74-e15e-45db-b2d8-e6b63e1eb51d-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 16:58:42 crc kubenswrapper[4558]: I0120 16:58:42.619494 4558 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/2ffb9b74-e15e-45db-b2d8-e6b63e1eb51d-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 20 16:58:42 crc kubenswrapper[4558]: I0120 16:58:42.619542 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9cc658b3-2ddb-4902-bd24-a0eb9e2ad7c4-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 16:58:42 crc kubenswrapper[4558]: I0120 16:58:42.619588 4558 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2ffb9b74-e15e-45db-b2d8-e6b63e1eb51d-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 16:58:42 crc kubenswrapper[4558]: I0120 16:58:42.619633 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8074b22b-e6be-4d65-a8fd-293434b77010-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 16:58:42 crc kubenswrapper[4558]: I0120 16:58:42.619705 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzxrx\" (UniqueName: \"kubernetes.io/projected/3252a616-e809-4059-9c6a-cf8af5aef236-kube-api-access-lzxrx\") on node \"crc\" DevicePath \"\"" Jan 20 16:58:42 crc kubenswrapper[4558]: I0120 16:58:42.619759 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cz9nd\" (UniqueName: \"kubernetes.io/projected/8074b22b-e6be-4d65-a8fd-293434b77010-kube-api-access-cz9nd\") on node \"crc\" DevicePath \"\"" Jan 20 16:58:42 crc kubenswrapper[4558]: I0120 16:58:42.619806 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2ffb9b74-e15e-45db-b2d8-e6b63e1eb51d-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 16:58:42 crc kubenswrapper[4558]: I0120 16:58:42.619858 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a66af596-2f1d-496b-87cc-c67d42f99a56-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 16:58:42 crc kubenswrapper[4558]: I0120 16:58:42.619908 4558 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2ffb9b74-e15e-45db-b2d8-e6b63e1eb51d-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 16:58:42 crc kubenswrapper[4558]: I0120 16:58:42.619953 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2ffb9b74-e15e-45db-b2d8-e6b63e1eb51d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 16:58:43 crc kubenswrapper[4558]: I0120 16:58:43.338014 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstackclient" event={"ID":"4fe22ec2-af71-47bf-8053-86dee012df91","Type":"ContainerStarted","Data":"00c9d686e4cf00913af81033ddbff9abfc51f8f3c4f1da58ce8a3bff41d506fa"} Jan 20 16:58:43 crc kubenswrapper[4558]: I0120 16:58:43.340273 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-db-create-9rlkr" Jan 20 16:58:43 crc kubenswrapper[4558]: I0120 16:58:43.340291 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:58:43 crc kubenswrapper[4558]: I0120 16:58:43.340266 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"2ffb9b74-e15e-45db-b2d8-e6b63e1eb51d","Type":"ContainerDied","Data":"7f488c284eb5f7576e9286a6005da650571a0368095b057aa29ef1b7c718c579"} Jan 20 16:58:43 crc kubenswrapper[4558]: I0120 16:58:43.340321 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-b5f5-account-create-update-5mqdg" Jan 20 16:58:43 crc kubenswrapper[4558]: I0120 16:58:43.340295 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-db-create-8jk7l" Jan 20 16:58:43 crc kubenswrapper[4558]: I0120 16:58:43.340271 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-b4f9-account-create-update-smxt8" Jan 20 16:58:43 crc kubenswrapper[4558]: I0120 16:58:43.340354 4558 scope.go:117] "RemoveContainer" containerID="e1e5629ee3608ca2964283471ea9053abf2262180449bdda5f5f8acd3141ba80" Jan 20 16:58:43 crc kubenswrapper[4558]: I0120 16:58:43.340380 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-db-create-bcpmp" Jan 20 16:58:43 crc kubenswrapper[4558]: I0120 16:58:43.340457 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-9ff1-account-create-update-zlhvt" Jan 20 16:58:43 crc kubenswrapper[4558]: I0120 16:58:43.356418 4558 scope.go:117] "RemoveContainer" containerID="14757df3405b5b02eb99dcdfcfb12d5a83dff3bcc11e6b0ba0b6a92ccc27dcd4" Jan 20 16:58:43 crc kubenswrapper[4558]: I0120 16:58:43.373268 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/openstackclient" podStartSLOduration=2.065553018 podStartE2EDuration="12.373251407s" podCreationTimestamp="2026-01-20 16:58:31 +0000 UTC" firstStartedPulling="2026-01-20 16:58:31.906886994 +0000 UTC m=+1005.667224961" lastFinishedPulling="2026-01-20 16:58:42.214585383 +0000 UTC m=+1015.974923350" observedRunningTime="2026-01-20 16:58:43.362395553 +0000 UTC m=+1017.122733520" watchObservedRunningTime="2026-01-20 16:58:43.373251407 +0000 UTC m=+1017.133589374" Jan 20 16:58:43 crc kubenswrapper[4558]: I0120 16:58:43.373515 4558 scope.go:117] "RemoveContainer" containerID="852fdef698848c41f9bb05c5c2bd39cc27719921891f87d42bb115977032714c" Jan 20 16:58:43 crc kubenswrapper[4558]: I0120 16:58:43.391617 4558 scope.go:117] "RemoveContainer" containerID="b7ee658883bc3c2dedd442b787d58c71a76627afcfa42d35a4e3d2cbee602149" Jan 20 16:58:43 crc kubenswrapper[4558]: I0120 16:58:43.394291 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 16:58:43 crc kubenswrapper[4558]: I0120 16:58:43.407721 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 16:58:43 crc kubenswrapper[4558]: I0120 16:58:43.419766 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 16:58:43 crc kubenswrapper[4558]: E0120 16:58:43.420070 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a66af596-2f1d-496b-87cc-c67d42f99a56" containerName="mariadb-database-create" Jan 20 16:58:43 crc kubenswrapper[4558]: I0120 16:58:43.420087 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="a66af596-2f1d-496b-87cc-c67d42f99a56" containerName="mariadb-database-create" Jan 20 16:58:43 crc kubenswrapper[4558]: E0120 16:58:43.420104 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9cc658b3-2ddb-4902-bd24-a0eb9e2ad7c4" containerName="mariadb-account-create-update" Jan 20 16:58:43 crc kubenswrapper[4558]: I0120 16:58:43.420111 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="9cc658b3-2ddb-4902-bd24-a0eb9e2ad7c4" containerName="mariadb-account-create-update" Jan 20 16:58:43 crc kubenswrapper[4558]: E0120 16:58:43.420135 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="24ec82c6-7ca6-47e0-9291-17327cb4b222" containerName="mariadb-account-create-update" Jan 20 16:58:43 crc kubenswrapper[4558]: I0120 16:58:43.420143 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="24ec82c6-7ca6-47e0-9291-17327cb4b222" containerName="mariadb-account-create-update" Jan 20 16:58:43 crc kubenswrapper[4558]: E0120 16:58:43.420150 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2ffb9b74-e15e-45db-b2d8-e6b63e1eb51d" containerName="proxy-httpd" Jan 20 16:58:43 crc kubenswrapper[4558]: I0120 16:58:43.420155 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="2ffb9b74-e15e-45db-b2d8-e6b63e1eb51d" containerName="proxy-httpd" Jan 20 16:58:43 crc kubenswrapper[4558]: E0120 16:58:43.420182 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3252a616-e809-4059-9c6a-cf8af5aef236" containerName="mariadb-account-create-update" Jan 20 16:58:43 crc kubenswrapper[4558]: I0120 16:58:43.420196 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="3252a616-e809-4059-9c6a-cf8af5aef236" containerName="mariadb-account-create-update" Jan 20 16:58:43 crc kubenswrapper[4558]: E0120 16:58:43.420209 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2ffb9b74-e15e-45db-b2d8-e6b63e1eb51d" containerName="sg-core" Jan 20 16:58:43 crc kubenswrapper[4558]: I0120 16:58:43.420215 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="2ffb9b74-e15e-45db-b2d8-e6b63e1eb51d" containerName="sg-core" Jan 20 16:58:43 crc kubenswrapper[4558]: E0120 16:58:43.420223 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2ffb9b74-e15e-45db-b2d8-e6b63e1eb51d" containerName="ceilometer-notification-agent" Jan 20 16:58:43 crc kubenswrapper[4558]: I0120 16:58:43.420228 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="2ffb9b74-e15e-45db-b2d8-e6b63e1eb51d" containerName="ceilometer-notification-agent" Jan 20 16:58:43 crc kubenswrapper[4558]: E0120 16:58:43.420235 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2ffb9b74-e15e-45db-b2d8-e6b63e1eb51d" containerName="ceilometer-central-agent" Jan 20 16:58:43 crc kubenswrapper[4558]: I0120 16:58:43.420241 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="2ffb9b74-e15e-45db-b2d8-e6b63e1eb51d" containerName="ceilometer-central-agent" Jan 20 16:58:43 crc kubenswrapper[4558]: E0120 16:58:43.420251 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d804cfef-d66b-4e73-9eaa-32500f685367" containerName="mariadb-database-create" Jan 20 16:58:43 crc kubenswrapper[4558]: I0120 16:58:43.420257 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d804cfef-d66b-4e73-9eaa-32500f685367" containerName="mariadb-database-create" Jan 20 16:58:43 crc kubenswrapper[4558]: E0120 16:58:43.420265 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8074b22b-e6be-4d65-a8fd-293434b77010" containerName="mariadb-database-create" Jan 20 16:58:43 crc kubenswrapper[4558]: I0120 16:58:43.420271 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="8074b22b-e6be-4d65-a8fd-293434b77010" containerName="mariadb-database-create" Jan 20 16:58:43 crc kubenswrapper[4558]: I0120 16:58:43.420416 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="2ffb9b74-e15e-45db-b2d8-e6b63e1eb51d" containerName="proxy-httpd" Jan 20 16:58:43 crc kubenswrapper[4558]: I0120 16:58:43.420429 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="2ffb9b74-e15e-45db-b2d8-e6b63e1eb51d" containerName="ceilometer-central-agent" Jan 20 16:58:43 crc kubenswrapper[4558]: I0120 16:58:43.420437 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="2ffb9b74-e15e-45db-b2d8-e6b63e1eb51d" containerName="sg-core" Jan 20 16:58:43 crc kubenswrapper[4558]: I0120 16:58:43.420453 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="24ec82c6-7ca6-47e0-9291-17327cb4b222" containerName="mariadb-account-create-update" Jan 20 16:58:43 crc kubenswrapper[4558]: I0120 16:58:43.420461 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="9cc658b3-2ddb-4902-bd24-a0eb9e2ad7c4" containerName="mariadb-account-create-update" Jan 20 16:58:43 crc kubenswrapper[4558]: I0120 16:58:43.420471 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="2ffb9b74-e15e-45db-b2d8-e6b63e1eb51d" containerName="ceilometer-notification-agent" Jan 20 16:58:43 crc kubenswrapper[4558]: I0120 16:58:43.420478 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="a66af596-2f1d-496b-87cc-c67d42f99a56" containerName="mariadb-database-create" Jan 20 16:58:43 crc kubenswrapper[4558]: I0120 16:58:43.420485 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="d804cfef-d66b-4e73-9eaa-32500f685367" containerName="mariadb-database-create" Jan 20 16:58:43 crc kubenswrapper[4558]: I0120 16:58:43.420496 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="3252a616-e809-4059-9c6a-cf8af5aef236" containerName="mariadb-account-create-update" Jan 20 16:58:43 crc kubenswrapper[4558]: I0120 16:58:43.420504 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="8074b22b-e6be-4d65-a8fd-293434b77010" containerName="mariadb-database-create" Jan 20 16:58:43 crc kubenswrapper[4558]: I0120 16:58:43.422018 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:58:43 crc kubenswrapper[4558]: I0120 16:58:43.424702 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-scripts" Jan 20 16:58:43 crc kubenswrapper[4558]: I0120 16:58:43.424930 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-config-data" Jan 20 16:58:43 crc kubenswrapper[4558]: I0120 16:58:43.472959 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 16:58:43 crc kubenswrapper[4558]: I0120 16:58:43.535466 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a25d1528-cb2e-4d44-bcff-cbee0febbf9b-run-httpd\") pod \"ceilometer-0\" (UID: \"a25d1528-cb2e-4d44-bcff-cbee0febbf9b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:58:43 crc kubenswrapper[4558]: I0120 16:58:43.535509 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a25d1528-cb2e-4d44-bcff-cbee0febbf9b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a25d1528-cb2e-4d44-bcff-cbee0febbf9b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:58:43 crc kubenswrapper[4558]: I0120 16:58:43.535560 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a25d1528-cb2e-4d44-bcff-cbee0febbf9b-config-data\") pod \"ceilometer-0\" (UID: \"a25d1528-cb2e-4d44-bcff-cbee0febbf9b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:58:43 crc kubenswrapper[4558]: I0120 16:58:43.535581 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a25d1528-cb2e-4d44-bcff-cbee0febbf9b-scripts\") pod \"ceilometer-0\" (UID: \"a25d1528-cb2e-4d44-bcff-cbee0febbf9b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:58:43 crc kubenswrapper[4558]: I0120 16:58:43.535598 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a25d1528-cb2e-4d44-bcff-cbee0febbf9b-log-httpd\") pod \"ceilometer-0\" (UID: \"a25d1528-cb2e-4d44-bcff-cbee0febbf9b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:58:43 crc kubenswrapper[4558]: I0120 16:58:43.535623 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mhtsv\" (UniqueName: \"kubernetes.io/projected/a25d1528-cb2e-4d44-bcff-cbee0febbf9b-kube-api-access-mhtsv\") pod \"ceilometer-0\" (UID: \"a25d1528-cb2e-4d44-bcff-cbee0febbf9b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:58:43 crc kubenswrapper[4558]: I0120 16:58:43.535638 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a25d1528-cb2e-4d44-bcff-cbee0febbf9b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a25d1528-cb2e-4d44-bcff-cbee0febbf9b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:58:43 crc kubenswrapper[4558]: I0120 16:58:43.637255 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a25d1528-cb2e-4d44-bcff-cbee0febbf9b-run-httpd\") pod \"ceilometer-0\" (UID: \"a25d1528-cb2e-4d44-bcff-cbee0febbf9b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:58:43 crc kubenswrapper[4558]: I0120 16:58:43.637308 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a25d1528-cb2e-4d44-bcff-cbee0febbf9b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a25d1528-cb2e-4d44-bcff-cbee0febbf9b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:58:43 crc kubenswrapper[4558]: I0120 16:58:43.637387 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a25d1528-cb2e-4d44-bcff-cbee0febbf9b-config-data\") pod \"ceilometer-0\" (UID: \"a25d1528-cb2e-4d44-bcff-cbee0febbf9b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:58:43 crc kubenswrapper[4558]: I0120 16:58:43.637419 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a25d1528-cb2e-4d44-bcff-cbee0febbf9b-scripts\") pod \"ceilometer-0\" (UID: \"a25d1528-cb2e-4d44-bcff-cbee0febbf9b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:58:43 crc kubenswrapper[4558]: I0120 16:58:43.637446 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a25d1528-cb2e-4d44-bcff-cbee0febbf9b-log-httpd\") pod \"ceilometer-0\" (UID: \"a25d1528-cb2e-4d44-bcff-cbee0febbf9b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:58:43 crc kubenswrapper[4558]: I0120 16:58:43.637484 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mhtsv\" (UniqueName: \"kubernetes.io/projected/a25d1528-cb2e-4d44-bcff-cbee0febbf9b-kube-api-access-mhtsv\") pod \"ceilometer-0\" (UID: \"a25d1528-cb2e-4d44-bcff-cbee0febbf9b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:58:43 crc kubenswrapper[4558]: I0120 16:58:43.637503 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a25d1528-cb2e-4d44-bcff-cbee0febbf9b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a25d1528-cb2e-4d44-bcff-cbee0febbf9b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:58:43 crc kubenswrapper[4558]: I0120 16:58:43.637683 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a25d1528-cb2e-4d44-bcff-cbee0febbf9b-run-httpd\") pod \"ceilometer-0\" (UID: \"a25d1528-cb2e-4d44-bcff-cbee0febbf9b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:58:43 crc kubenswrapper[4558]: I0120 16:58:43.637781 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a25d1528-cb2e-4d44-bcff-cbee0febbf9b-log-httpd\") pod \"ceilometer-0\" (UID: \"a25d1528-cb2e-4d44-bcff-cbee0febbf9b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:58:43 crc kubenswrapper[4558]: I0120 16:58:43.640479 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a25d1528-cb2e-4d44-bcff-cbee0febbf9b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a25d1528-cb2e-4d44-bcff-cbee0febbf9b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:58:43 crc kubenswrapper[4558]: I0120 16:58:43.640680 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a25d1528-cb2e-4d44-bcff-cbee0febbf9b-scripts\") pod \"ceilometer-0\" (UID: \"a25d1528-cb2e-4d44-bcff-cbee0febbf9b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:58:43 crc kubenswrapper[4558]: I0120 16:58:43.641067 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a25d1528-cb2e-4d44-bcff-cbee0febbf9b-config-data\") pod \"ceilometer-0\" (UID: \"a25d1528-cb2e-4d44-bcff-cbee0febbf9b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:58:43 crc kubenswrapper[4558]: I0120 16:58:43.646558 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a25d1528-cb2e-4d44-bcff-cbee0febbf9b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a25d1528-cb2e-4d44-bcff-cbee0febbf9b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:58:43 crc kubenswrapper[4558]: I0120 16:58:43.656243 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mhtsv\" (UniqueName: \"kubernetes.io/projected/a25d1528-cb2e-4d44-bcff-cbee0febbf9b-kube-api-access-mhtsv\") pod \"ceilometer-0\" (UID: \"a25d1528-cb2e-4d44-bcff-cbee0febbf9b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:58:43 crc kubenswrapper[4558]: I0120 16:58:43.747139 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:58:43 crc kubenswrapper[4558]: I0120 16:58:43.991945 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 16:58:44 crc kubenswrapper[4558]: I0120 16:58:44.127205 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 16:58:44 crc kubenswrapper[4558]: W0120 16:58:44.128946 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda25d1528_cb2e_4d44_bcff_cbee0febbf9b.slice/crio-413e381ab29eb3eab43a0c334784566ca177e99b184159fbed62a18b9e4060e2 WatchSource:0}: Error finding container 413e381ab29eb3eab43a0c334784566ca177e99b184159fbed62a18b9e4060e2: Status 404 returned error can't find the container with id 413e381ab29eb3eab43a0c334784566ca177e99b184159fbed62a18b9e4060e2 Jan 20 16:58:44 crc kubenswrapper[4558]: I0120 16:58:44.347900 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"a25d1528-cb2e-4d44-bcff-cbee0febbf9b","Type":"ContainerStarted","Data":"413e381ab29eb3eab43a0c334784566ca177e99b184159fbed62a18b9e4060e2"} Jan 20 16:58:44 crc kubenswrapper[4558]: I0120 16:58:44.457301 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/swift-proxy-6b7b5d66dd-8plzj" Jan 20 16:58:44 crc kubenswrapper[4558]: I0120 16:58:44.573464 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2ffb9b74-e15e-45db-b2d8-e6b63e1eb51d" path="/var/lib/kubelet/pods/2ffb9b74-e15e-45db-b2d8-e6b63e1eb51d/volumes" Jan 20 16:58:45 crc kubenswrapper[4558]: I0120 16:58:45.356420 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"a25d1528-cb2e-4d44-bcff-cbee0febbf9b","Type":"ContainerStarted","Data":"f28e148e09db47eceea4bf2d682b5cfd254188726d31ea5fe49b06ed3cf7b2f8"} Jan 20 16:58:46 crc kubenswrapper[4558]: I0120 16:58:46.040299 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-db-sync-k6vkv"] Jan 20 16:58:46 crc kubenswrapper[4558]: I0120 16:58:46.041367 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-k6vkv" Jan 20 16:58:46 crc kubenswrapper[4558]: I0120 16:58:46.043133 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-nova-dockercfg-ntfs6" Jan 20 16:58:46 crc kubenswrapper[4558]: I0120 16:58:46.043225 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell0-conductor-config-data" Jan 20 16:58:46 crc kubenswrapper[4558]: I0120 16:58:46.047138 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell0-conductor-scripts" Jan 20 16:58:46 crc kubenswrapper[4558]: I0120 16:58:46.050359 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-db-sync-k6vkv"] Jan 20 16:58:46 crc kubenswrapper[4558]: I0120 16:58:46.175600 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f6c4192-92da-4aff-ade3-e502935bc96d-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-k6vkv\" (UID: \"9f6c4192-92da-4aff-ade3-e502935bc96d\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-k6vkv" Jan 20 16:58:46 crc kubenswrapper[4558]: I0120 16:58:46.175846 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9f6c4192-92da-4aff-ade3-e502935bc96d-config-data\") pod \"nova-cell0-conductor-db-sync-k6vkv\" (UID: \"9f6c4192-92da-4aff-ade3-e502935bc96d\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-k6vkv" Jan 20 16:58:46 crc kubenswrapper[4558]: I0120 16:58:46.175884 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6jwd9\" (UniqueName: \"kubernetes.io/projected/9f6c4192-92da-4aff-ade3-e502935bc96d-kube-api-access-6jwd9\") pod \"nova-cell0-conductor-db-sync-k6vkv\" (UID: \"9f6c4192-92da-4aff-ade3-e502935bc96d\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-k6vkv" Jan 20 16:58:46 crc kubenswrapper[4558]: I0120 16:58:46.175926 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9f6c4192-92da-4aff-ade3-e502935bc96d-scripts\") pod \"nova-cell0-conductor-db-sync-k6vkv\" (UID: \"9f6c4192-92da-4aff-ade3-e502935bc96d\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-k6vkv" Jan 20 16:58:46 crc kubenswrapper[4558]: I0120 16:58:46.277381 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f6c4192-92da-4aff-ade3-e502935bc96d-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-k6vkv\" (UID: \"9f6c4192-92da-4aff-ade3-e502935bc96d\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-k6vkv" Jan 20 16:58:46 crc kubenswrapper[4558]: I0120 16:58:46.277431 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9f6c4192-92da-4aff-ade3-e502935bc96d-config-data\") pod \"nova-cell0-conductor-db-sync-k6vkv\" (UID: \"9f6c4192-92da-4aff-ade3-e502935bc96d\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-k6vkv" Jan 20 16:58:46 crc kubenswrapper[4558]: I0120 16:58:46.277465 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6jwd9\" (UniqueName: \"kubernetes.io/projected/9f6c4192-92da-4aff-ade3-e502935bc96d-kube-api-access-6jwd9\") pod \"nova-cell0-conductor-db-sync-k6vkv\" (UID: \"9f6c4192-92da-4aff-ade3-e502935bc96d\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-k6vkv" Jan 20 16:58:46 crc kubenswrapper[4558]: I0120 16:58:46.277501 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9f6c4192-92da-4aff-ade3-e502935bc96d-scripts\") pod \"nova-cell0-conductor-db-sync-k6vkv\" (UID: \"9f6c4192-92da-4aff-ade3-e502935bc96d\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-k6vkv" Jan 20 16:58:46 crc kubenswrapper[4558]: I0120 16:58:46.281089 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9f6c4192-92da-4aff-ade3-e502935bc96d-config-data\") pod \"nova-cell0-conductor-db-sync-k6vkv\" (UID: \"9f6c4192-92da-4aff-ade3-e502935bc96d\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-k6vkv" Jan 20 16:58:46 crc kubenswrapper[4558]: I0120 16:58:46.282561 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f6c4192-92da-4aff-ade3-e502935bc96d-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-k6vkv\" (UID: \"9f6c4192-92da-4aff-ade3-e502935bc96d\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-k6vkv" Jan 20 16:58:46 crc kubenswrapper[4558]: I0120 16:58:46.286699 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9f6c4192-92da-4aff-ade3-e502935bc96d-scripts\") pod \"nova-cell0-conductor-db-sync-k6vkv\" (UID: \"9f6c4192-92da-4aff-ade3-e502935bc96d\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-k6vkv" Jan 20 16:58:46 crc kubenswrapper[4558]: I0120 16:58:46.291734 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6jwd9\" (UniqueName: \"kubernetes.io/projected/9f6c4192-92da-4aff-ade3-e502935bc96d-kube-api-access-6jwd9\") pod \"nova-cell0-conductor-db-sync-k6vkv\" (UID: \"9f6c4192-92da-4aff-ade3-e502935bc96d\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-k6vkv" Jan 20 16:58:46 crc kubenswrapper[4558]: I0120 16:58:46.355545 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-k6vkv" Jan 20 16:58:46 crc kubenswrapper[4558]: I0120 16:58:46.367837 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"a25d1528-cb2e-4d44-bcff-cbee0febbf9b","Type":"ContainerStarted","Data":"2923167716a43fc429d5914cc58cb5e2dc67e79ac7c693e5671e0a656709c94c"} Jan 20 16:58:46 crc kubenswrapper[4558]: I0120 16:58:46.756198 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-db-sync-k6vkv"] Jan 20 16:58:47 crc kubenswrapper[4558]: I0120 16:58:47.383808 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-k6vkv" event={"ID":"9f6c4192-92da-4aff-ade3-e502935bc96d","Type":"ContainerStarted","Data":"c144a1e7860fe1410902372e72ca35e56df256ee9c79d78b3de963cc58371952"} Jan 20 16:58:47 crc kubenswrapper[4558]: I0120 16:58:47.386559 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"a25d1528-cb2e-4d44-bcff-cbee0febbf9b","Type":"ContainerStarted","Data":"ef0bee6d0daeab337417075e1abd16960c3e66d18ab72e0d85381f20662ec416"} Jan 20 16:58:48 crc kubenswrapper[4558]: I0120 16:58:48.400686 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"a25d1528-cb2e-4d44-bcff-cbee0febbf9b","Type":"ContainerStarted","Data":"5ea299b9e1a26087e75d352274e554174e1b3e1c9848de0109d15fa7e7bef405"} Jan 20 16:58:48 crc kubenswrapper[4558]: I0120 16:58:48.400878 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="a25d1528-cb2e-4d44-bcff-cbee0febbf9b" containerName="proxy-httpd" containerID="cri-o://5ea299b9e1a26087e75d352274e554174e1b3e1c9848de0109d15fa7e7bef405" gracePeriod=30 Jan 20 16:58:48 crc kubenswrapper[4558]: I0120 16:58:48.400924 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="a25d1528-cb2e-4d44-bcff-cbee0febbf9b" containerName="ceilometer-notification-agent" containerID="cri-o://2923167716a43fc429d5914cc58cb5e2dc67e79ac7c693e5671e0a656709c94c" gracePeriod=30 Jan 20 16:58:48 crc kubenswrapper[4558]: I0120 16:58:48.400934 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="a25d1528-cb2e-4d44-bcff-cbee0febbf9b" containerName="sg-core" containerID="cri-o://ef0bee6d0daeab337417075e1abd16960c3e66d18ab72e0d85381f20662ec416" gracePeriod=30 Jan 20 16:58:48 crc kubenswrapper[4558]: I0120 16:58:48.400824 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="a25d1528-cb2e-4d44-bcff-cbee0febbf9b" containerName="ceilometer-central-agent" containerID="cri-o://f28e148e09db47eceea4bf2d682b5cfd254188726d31ea5fe49b06ed3cf7b2f8" gracePeriod=30 Jan 20 16:58:48 crc kubenswrapper[4558]: I0120 16:58:48.400900 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:58:48 crc kubenswrapper[4558]: I0120 16:58:48.423212 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ceilometer-0" podStartSLOduration=1.390522019 podStartE2EDuration="5.423198337s" podCreationTimestamp="2026-01-20 16:58:43 +0000 UTC" firstStartedPulling="2026-01-20 16:58:44.133198154 +0000 UTC m=+1017.893536120" lastFinishedPulling="2026-01-20 16:58:48.165874471 +0000 UTC m=+1021.926212438" observedRunningTime="2026-01-20 16:58:48.415531329 +0000 UTC m=+1022.175869297" watchObservedRunningTime="2026-01-20 16:58:48.423198337 +0000 UTC m=+1022.183536303" Jan 20 16:58:49 crc kubenswrapper[4558]: I0120 16:58:49.411098 4558 generic.go:334] "Generic (PLEG): container finished" podID="a25d1528-cb2e-4d44-bcff-cbee0febbf9b" containerID="ef0bee6d0daeab337417075e1abd16960c3e66d18ab72e0d85381f20662ec416" exitCode=2 Jan 20 16:58:49 crc kubenswrapper[4558]: I0120 16:58:49.411131 4558 generic.go:334] "Generic (PLEG): container finished" podID="a25d1528-cb2e-4d44-bcff-cbee0febbf9b" containerID="2923167716a43fc429d5914cc58cb5e2dc67e79ac7c693e5671e0a656709c94c" exitCode=0 Jan 20 16:58:49 crc kubenswrapper[4558]: I0120 16:58:49.411151 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"a25d1528-cb2e-4d44-bcff-cbee0febbf9b","Type":"ContainerDied","Data":"ef0bee6d0daeab337417075e1abd16960c3e66d18ab72e0d85381f20662ec416"} Jan 20 16:58:49 crc kubenswrapper[4558]: I0120 16:58:49.411198 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"a25d1528-cb2e-4d44-bcff-cbee0febbf9b","Type":"ContainerDied","Data":"2923167716a43fc429d5914cc58cb5e2dc67e79ac7c693e5671e0a656709c94c"} Jan 20 16:58:49 crc kubenswrapper[4558]: I0120 16:58:49.956519 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 16:58:49 crc kubenswrapper[4558]: I0120 16:58:49.956714 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-internal-api-0" podUID="90838b03-cde2-4954-a70b-b75b79a2038e" containerName="glance-log" containerID="cri-o://298ca0eadbdc74c1da1f7848d31054f8093eaec760f25c58f34284a16c073e2d" gracePeriod=30 Jan 20 16:58:49 crc kubenswrapper[4558]: I0120 16:58:49.957037 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-internal-api-0" podUID="90838b03-cde2-4954-a70b-b75b79a2038e" containerName="glance-httpd" containerID="cri-o://46fdfa3571a318310ff740837ecf48d117f01c12a9efa467bfc8b7e395e6abd4" gracePeriod=30 Jan 20 16:58:50 crc kubenswrapper[4558]: I0120 16:58:50.419274 4558 generic.go:334] "Generic (PLEG): container finished" podID="90838b03-cde2-4954-a70b-b75b79a2038e" containerID="298ca0eadbdc74c1da1f7848d31054f8093eaec760f25c58f34284a16c073e2d" exitCode=143 Jan 20 16:58:50 crc kubenswrapper[4558]: I0120 16:58:50.419348 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"90838b03-cde2-4954-a70b-b75b79a2038e","Type":"ContainerDied","Data":"298ca0eadbdc74c1da1f7848d31054f8093eaec760f25c58f34284a16c073e2d"} Jan 20 16:58:53 crc kubenswrapper[4558]: I0120 16:58:53.391051 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 16:58:53 crc kubenswrapper[4558]: I0120 16:58:53.395130 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/90838b03-cde2-4954-a70b-b75b79a2038e-logs\") pod \"90838b03-cde2-4954-a70b-b75b79a2038e\" (UID: \"90838b03-cde2-4954-a70b-b75b79a2038e\") " Jan 20 16:58:53 crc kubenswrapper[4558]: I0120 16:58:53.395223 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qhfsv\" (UniqueName: \"kubernetes.io/projected/90838b03-cde2-4954-a70b-b75b79a2038e-kube-api-access-qhfsv\") pod \"90838b03-cde2-4954-a70b-b75b79a2038e\" (UID: \"90838b03-cde2-4954-a70b-b75b79a2038e\") " Jan 20 16:58:53 crc kubenswrapper[4558]: I0120 16:58:53.395269 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/90838b03-cde2-4954-a70b-b75b79a2038e-config-data\") pod \"90838b03-cde2-4954-a70b-b75b79a2038e\" (UID: \"90838b03-cde2-4954-a70b-b75b79a2038e\") " Jan 20 16:58:53 crc kubenswrapper[4558]: I0120 16:58:53.395296 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/90838b03-cde2-4954-a70b-b75b79a2038e-internal-tls-certs\") pod \"90838b03-cde2-4954-a70b-b75b79a2038e\" (UID: \"90838b03-cde2-4954-a70b-b75b79a2038e\") " Jan 20 16:58:53 crc kubenswrapper[4558]: I0120 16:58:53.395311 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/90838b03-cde2-4954-a70b-b75b79a2038e-scripts\") pod \"90838b03-cde2-4954-a70b-b75b79a2038e\" (UID: \"90838b03-cde2-4954-a70b-b75b79a2038e\") " Jan 20 16:58:53 crc kubenswrapper[4558]: I0120 16:58:53.395341 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"90838b03-cde2-4954-a70b-b75b79a2038e\" (UID: \"90838b03-cde2-4954-a70b-b75b79a2038e\") " Jan 20 16:58:53 crc kubenswrapper[4558]: I0120 16:58:53.395400 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/90838b03-cde2-4954-a70b-b75b79a2038e-httpd-run\") pod \"90838b03-cde2-4954-a70b-b75b79a2038e\" (UID: \"90838b03-cde2-4954-a70b-b75b79a2038e\") " Jan 20 16:58:53 crc kubenswrapper[4558]: I0120 16:58:53.395422 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/90838b03-cde2-4954-a70b-b75b79a2038e-combined-ca-bundle\") pod \"90838b03-cde2-4954-a70b-b75b79a2038e\" (UID: \"90838b03-cde2-4954-a70b-b75b79a2038e\") " Jan 20 16:58:53 crc kubenswrapper[4558]: I0120 16:58:53.409853 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/90838b03-cde2-4954-a70b-b75b79a2038e-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "90838b03-cde2-4954-a70b-b75b79a2038e" (UID: "90838b03-cde2-4954-a70b-b75b79a2038e"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 16:58:53 crc kubenswrapper[4558]: I0120 16:58:53.410774 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/90838b03-cde2-4954-a70b-b75b79a2038e-logs" (OuterVolumeSpecName: "logs") pod "90838b03-cde2-4954-a70b-b75b79a2038e" (UID: "90838b03-cde2-4954-a70b-b75b79a2038e"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 16:58:53 crc kubenswrapper[4558]: I0120 16:58:53.413847 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/90838b03-cde2-4954-a70b-b75b79a2038e-scripts" (OuterVolumeSpecName: "scripts") pod "90838b03-cde2-4954-a70b-b75b79a2038e" (UID: "90838b03-cde2-4954-a70b-b75b79a2038e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:58:53 crc kubenswrapper[4558]: I0120 16:58:53.431677 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/90838b03-cde2-4954-a70b-b75b79a2038e-kube-api-access-qhfsv" (OuterVolumeSpecName: "kube-api-access-qhfsv") pod "90838b03-cde2-4954-a70b-b75b79a2038e" (UID: "90838b03-cde2-4954-a70b-b75b79a2038e"). InnerVolumeSpecName "kube-api-access-qhfsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:58:53 crc kubenswrapper[4558]: I0120 16:58:53.432284 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage07-crc" (OuterVolumeSpecName: "glance") pod "90838b03-cde2-4954-a70b-b75b79a2038e" (UID: "90838b03-cde2-4954-a70b-b75b79a2038e"). InnerVolumeSpecName "local-storage07-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 16:58:53 crc kubenswrapper[4558]: I0120 16:58:53.453735 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/90838b03-cde2-4954-a70b-b75b79a2038e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "90838b03-cde2-4954-a70b-b75b79a2038e" (UID: "90838b03-cde2-4954-a70b-b75b79a2038e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:58:53 crc kubenswrapper[4558]: I0120 16:58:53.463316 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/90838b03-cde2-4954-a70b-b75b79a2038e-config-data" (OuterVolumeSpecName: "config-data") pod "90838b03-cde2-4954-a70b-b75b79a2038e" (UID: "90838b03-cde2-4954-a70b-b75b79a2038e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:58:53 crc kubenswrapper[4558]: I0120 16:58:53.465725 4558 generic.go:334] "Generic (PLEG): container finished" podID="90838b03-cde2-4954-a70b-b75b79a2038e" containerID="46fdfa3571a318310ff740837ecf48d117f01c12a9efa467bfc8b7e395e6abd4" exitCode=0 Jan 20 16:58:53 crc kubenswrapper[4558]: I0120 16:58:53.465785 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 16:58:53 crc kubenswrapper[4558]: I0120 16:58:53.465785 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"90838b03-cde2-4954-a70b-b75b79a2038e","Type":"ContainerDied","Data":"46fdfa3571a318310ff740837ecf48d117f01c12a9efa467bfc8b7e395e6abd4"} Jan 20 16:58:53 crc kubenswrapper[4558]: I0120 16:58:53.465818 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"90838b03-cde2-4954-a70b-b75b79a2038e","Type":"ContainerDied","Data":"5fee9c465f43def4d3a086596df727cd73ce8459d36cf5743c6a11f53e7071d9"} Jan 20 16:58:53 crc kubenswrapper[4558]: I0120 16:58:53.465833 4558 scope.go:117] "RemoveContainer" containerID="46fdfa3571a318310ff740837ecf48d117f01c12a9efa467bfc8b7e395e6abd4" Jan 20 16:58:53 crc kubenswrapper[4558]: I0120 16:58:53.469060 4558 generic.go:334] "Generic (PLEG): container finished" podID="a25d1528-cb2e-4d44-bcff-cbee0febbf9b" containerID="f28e148e09db47eceea4bf2d682b5cfd254188726d31ea5fe49b06ed3cf7b2f8" exitCode=0 Jan 20 16:58:53 crc kubenswrapper[4558]: I0120 16:58:53.469253 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"a25d1528-cb2e-4d44-bcff-cbee0febbf9b","Type":"ContainerDied","Data":"f28e148e09db47eceea4bf2d682b5cfd254188726d31ea5fe49b06ed3cf7b2f8"} Jan 20 16:58:53 crc kubenswrapper[4558]: I0120 16:58:53.470444 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-k6vkv" event={"ID":"9f6c4192-92da-4aff-ade3-e502935bc96d","Type":"ContainerStarted","Data":"3f5e26b3d7dc3e18ff91d7a5ac659587be205d5cf45b39eccf8b2cb509ff08d2"} Jan 20 16:58:53 crc kubenswrapper[4558]: I0120 16:58:53.474210 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/90838b03-cde2-4954-a70b-b75b79a2038e-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "90838b03-cde2-4954-a70b-b75b79a2038e" (UID: "90838b03-cde2-4954-a70b-b75b79a2038e"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:58:53 crc kubenswrapper[4558]: I0120 16:58:53.485234 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-k6vkv" podStartSLOduration=1.03328881 podStartE2EDuration="7.485219651s" podCreationTimestamp="2026-01-20 16:58:46 +0000 UTC" firstStartedPulling="2026-01-20 16:58:46.761613848 +0000 UTC m=+1020.521951815" lastFinishedPulling="2026-01-20 16:58:53.213544689 +0000 UTC m=+1026.973882656" observedRunningTime="2026-01-20 16:58:53.484384701 +0000 UTC m=+1027.244722668" watchObservedRunningTime="2026-01-20 16:58:53.485219651 +0000 UTC m=+1027.245557618" Jan 20 16:58:53 crc kubenswrapper[4558]: I0120 16:58:53.493957 4558 scope.go:117] "RemoveContainer" containerID="298ca0eadbdc74c1da1f7848d31054f8093eaec760f25c58f34284a16c073e2d" Jan 20 16:58:53 crc kubenswrapper[4558]: I0120 16:58:53.499215 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/90838b03-cde2-4954-a70b-b75b79a2038e-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 16:58:53 crc kubenswrapper[4558]: I0120 16:58:53.499238 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/90838b03-cde2-4954-a70b-b75b79a2038e-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 16:58:53 crc kubenswrapper[4558]: I0120 16:58:53.499247 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/90838b03-cde2-4954-a70b-b75b79a2038e-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 16:58:53 crc kubenswrapper[4558]: I0120 16:58:53.499264 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" " Jan 20 16:58:53 crc kubenswrapper[4558]: I0120 16:58:53.499272 4558 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/90838b03-cde2-4954-a70b-b75b79a2038e-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 20 16:58:53 crc kubenswrapper[4558]: I0120 16:58:53.499280 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/90838b03-cde2-4954-a70b-b75b79a2038e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 16:58:53 crc kubenswrapper[4558]: I0120 16:58:53.499287 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/90838b03-cde2-4954-a70b-b75b79a2038e-logs\") on node \"crc\" DevicePath \"\"" Jan 20 16:58:53 crc kubenswrapper[4558]: I0120 16:58:53.499295 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qhfsv\" (UniqueName: \"kubernetes.io/projected/90838b03-cde2-4954-a70b-b75b79a2038e-kube-api-access-qhfsv\") on node \"crc\" DevicePath \"\"" Jan 20 16:58:53 crc kubenswrapper[4558]: I0120 16:58:53.510021 4558 scope.go:117] "RemoveContainer" containerID="46fdfa3571a318310ff740837ecf48d117f01c12a9efa467bfc8b7e395e6abd4" Jan 20 16:58:53 crc kubenswrapper[4558]: E0120 16:58:53.510385 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"46fdfa3571a318310ff740837ecf48d117f01c12a9efa467bfc8b7e395e6abd4\": container with ID starting with 46fdfa3571a318310ff740837ecf48d117f01c12a9efa467bfc8b7e395e6abd4 not found: ID does not exist" containerID="46fdfa3571a318310ff740837ecf48d117f01c12a9efa467bfc8b7e395e6abd4" Jan 20 16:58:53 crc kubenswrapper[4558]: I0120 16:58:53.510420 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"46fdfa3571a318310ff740837ecf48d117f01c12a9efa467bfc8b7e395e6abd4"} err="failed to get container status \"46fdfa3571a318310ff740837ecf48d117f01c12a9efa467bfc8b7e395e6abd4\": rpc error: code = NotFound desc = could not find container \"46fdfa3571a318310ff740837ecf48d117f01c12a9efa467bfc8b7e395e6abd4\": container with ID starting with 46fdfa3571a318310ff740837ecf48d117f01c12a9efa467bfc8b7e395e6abd4 not found: ID does not exist" Jan 20 16:58:53 crc kubenswrapper[4558]: I0120 16:58:53.510444 4558 scope.go:117] "RemoveContainer" containerID="298ca0eadbdc74c1da1f7848d31054f8093eaec760f25c58f34284a16c073e2d" Jan 20 16:58:53 crc kubenswrapper[4558]: E0120 16:58:53.510710 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"298ca0eadbdc74c1da1f7848d31054f8093eaec760f25c58f34284a16c073e2d\": container with ID starting with 298ca0eadbdc74c1da1f7848d31054f8093eaec760f25c58f34284a16c073e2d not found: ID does not exist" containerID="298ca0eadbdc74c1da1f7848d31054f8093eaec760f25c58f34284a16c073e2d" Jan 20 16:58:53 crc kubenswrapper[4558]: I0120 16:58:53.510734 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"298ca0eadbdc74c1da1f7848d31054f8093eaec760f25c58f34284a16c073e2d"} err="failed to get container status \"298ca0eadbdc74c1da1f7848d31054f8093eaec760f25c58f34284a16c073e2d\": rpc error: code = NotFound desc = could not find container \"298ca0eadbdc74c1da1f7848d31054f8093eaec760f25c58f34284a16c073e2d\": container with ID starting with 298ca0eadbdc74c1da1f7848d31054f8093eaec760f25c58f34284a16c073e2d not found: ID does not exist" Jan 20 16:58:53 crc kubenswrapper[4558]: I0120 16:58:53.516438 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage07-crc" (UniqueName: "kubernetes.io/local-volume/local-storage07-crc") on node "crc" Jan 20 16:58:53 crc kubenswrapper[4558]: I0120 16:58:53.602316 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" DevicePath \"\"" Jan 20 16:58:53 crc kubenswrapper[4558]: I0120 16:58:53.792360 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 16:58:53 crc kubenswrapper[4558]: I0120 16:58:53.797518 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 16:58:53 crc kubenswrapper[4558]: I0120 16:58:53.810767 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 16:58:53 crc kubenswrapper[4558]: E0120 16:58:53.811055 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="90838b03-cde2-4954-a70b-b75b79a2038e" containerName="glance-log" Jan 20 16:58:53 crc kubenswrapper[4558]: I0120 16:58:53.811071 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="90838b03-cde2-4954-a70b-b75b79a2038e" containerName="glance-log" Jan 20 16:58:53 crc kubenswrapper[4558]: E0120 16:58:53.811087 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="90838b03-cde2-4954-a70b-b75b79a2038e" containerName="glance-httpd" Jan 20 16:58:53 crc kubenswrapper[4558]: I0120 16:58:53.811093 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="90838b03-cde2-4954-a70b-b75b79a2038e" containerName="glance-httpd" Jan 20 16:58:53 crc kubenswrapper[4558]: I0120 16:58:53.811255 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="90838b03-cde2-4954-a70b-b75b79a2038e" containerName="glance-httpd" Jan 20 16:58:53 crc kubenswrapper[4558]: I0120 16:58:53.811285 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="90838b03-cde2-4954-a70b-b75b79a2038e" containerName="glance-log" Jan 20 16:58:53 crc kubenswrapper[4558]: I0120 16:58:53.812030 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 16:58:53 crc kubenswrapper[4558]: I0120 16:58:53.813419 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-glance-default-internal-svc" Jan 20 16:58:53 crc kubenswrapper[4558]: I0120 16:58:53.813702 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-default-internal-config-data" Jan 20 16:58:53 crc kubenswrapper[4558]: I0120 16:58:53.821485 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 16:58:53 crc kubenswrapper[4558]: I0120 16:58:53.907905 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b28e6999-784e-4577-88bb-db648f7a3cbc-logs\") pod \"glance-default-internal-api-0\" (UID: \"b28e6999-784e-4577-88bb-db648f7a3cbc\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 16:58:53 crc kubenswrapper[4558]: I0120 16:58:53.907944 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b28e6999-784e-4577-88bb-db648f7a3cbc-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"b28e6999-784e-4577-88bb-db648f7a3cbc\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 16:58:53 crc kubenswrapper[4558]: I0120 16:58:53.907984 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b28e6999-784e-4577-88bb-db648f7a3cbc-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"b28e6999-784e-4577-88bb-db648f7a3cbc\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 16:58:53 crc kubenswrapper[4558]: I0120 16:58:53.908011 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b28e6999-784e-4577-88bb-db648f7a3cbc-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"b28e6999-784e-4577-88bb-db648f7a3cbc\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 16:58:53 crc kubenswrapper[4558]: I0120 16:58:53.908087 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b28e6999-784e-4577-88bb-db648f7a3cbc-scripts\") pod \"glance-default-internal-api-0\" (UID: \"b28e6999-784e-4577-88bb-db648f7a3cbc\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 16:58:53 crc kubenswrapper[4558]: I0120 16:58:53.908114 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9wtwl\" (UniqueName: \"kubernetes.io/projected/b28e6999-784e-4577-88bb-db648f7a3cbc-kube-api-access-9wtwl\") pod \"glance-default-internal-api-0\" (UID: \"b28e6999-784e-4577-88bb-db648f7a3cbc\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 16:58:53 crc kubenswrapper[4558]: I0120 16:58:53.908140 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"b28e6999-784e-4577-88bb-db648f7a3cbc\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 16:58:53 crc kubenswrapper[4558]: I0120 16:58:53.908156 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b28e6999-784e-4577-88bb-db648f7a3cbc-config-data\") pod \"glance-default-internal-api-0\" (UID: \"b28e6999-784e-4577-88bb-db648f7a3cbc\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 16:58:54 crc kubenswrapper[4558]: I0120 16:58:54.009514 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b28e6999-784e-4577-88bb-db648f7a3cbc-scripts\") pod \"glance-default-internal-api-0\" (UID: \"b28e6999-784e-4577-88bb-db648f7a3cbc\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 16:58:54 crc kubenswrapper[4558]: I0120 16:58:54.009559 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9wtwl\" (UniqueName: \"kubernetes.io/projected/b28e6999-784e-4577-88bb-db648f7a3cbc-kube-api-access-9wtwl\") pod \"glance-default-internal-api-0\" (UID: \"b28e6999-784e-4577-88bb-db648f7a3cbc\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 16:58:54 crc kubenswrapper[4558]: I0120 16:58:54.009591 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"b28e6999-784e-4577-88bb-db648f7a3cbc\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 16:58:54 crc kubenswrapper[4558]: I0120 16:58:54.009606 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b28e6999-784e-4577-88bb-db648f7a3cbc-config-data\") pod \"glance-default-internal-api-0\" (UID: \"b28e6999-784e-4577-88bb-db648f7a3cbc\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 16:58:54 crc kubenswrapper[4558]: I0120 16:58:54.009808 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b28e6999-784e-4577-88bb-db648f7a3cbc-logs\") pod \"glance-default-internal-api-0\" (UID: \"b28e6999-784e-4577-88bb-db648f7a3cbc\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 16:58:54 crc kubenswrapper[4558]: I0120 16:58:54.009834 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b28e6999-784e-4577-88bb-db648f7a3cbc-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"b28e6999-784e-4577-88bb-db648f7a3cbc\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 16:58:54 crc kubenswrapper[4558]: I0120 16:58:54.009824 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"b28e6999-784e-4577-88bb-db648f7a3cbc\") device mount path \"/mnt/openstack/pv07\"" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 16:58:54 crc kubenswrapper[4558]: I0120 16:58:54.010063 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b28e6999-784e-4577-88bb-db648f7a3cbc-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"b28e6999-784e-4577-88bb-db648f7a3cbc\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 16:58:54 crc kubenswrapper[4558]: I0120 16:58:54.010111 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b28e6999-784e-4577-88bb-db648f7a3cbc-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"b28e6999-784e-4577-88bb-db648f7a3cbc\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 16:58:54 crc kubenswrapper[4558]: I0120 16:58:54.010290 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b28e6999-784e-4577-88bb-db648f7a3cbc-logs\") pod \"glance-default-internal-api-0\" (UID: \"b28e6999-784e-4577-88bb-db648f7a3cbc\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 16:58:54 crc kubenswrapper[4558]: I0120 16:58:54.010448 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b28e6999-784e-4577-88bb-db648f7a3cbc-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"b28e6999-784e-4577-88bb-db648f7a3cbc\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 16:58:54 crc kubenswrapper[4558]: I0120 16:58:54.014797 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b28e6999-784e-4577-88bb-db648f7a3cbc-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"b28e6999-784e-4577-88bb-db648f7a3cbc\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 16:58:54 crc kubenswrapper[4558]: I0120 16:58:54.014875 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b28e6999-784e-4577-88bb-db648f7a3cbc-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"b28e6999-784e-4577-88bb-db648f7a3cbc\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 16:58:54 crc kubenswrapper[4558]: I0120 16:58:54.015753 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b28e6999-784e-4577-88bb-db648f7a3cbc-scripts\") pod \"glance-default-internal-api-0\" (UID: \"b28e6999-784e-4577-88bb-db648f7a3cbc\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 16:58:54 crc kubenswrapper[4558]: I0120 16:58:54.024645 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b28e6999-784e-4577-88bb-db648f7a3cbc-config-data\") pod \"glance-default-internal-api-0\" (UID: \"b28e6999-784e-4577-88bb-db648f7a3cbc\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 16:58:54 crc kubenswrapper[4558]: I0120 16:58:54.025754 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9wtwl\" (UniqueName: \"kubernetes.io/projected/b28e6999-784e-4577-88bb-db648f7a3cbc-kube-api-access-9wtwl\") pod \"glance-default-internal-api-0\" (UID: \"b28e6999-784e-4577-88bb-db648f7a3cbc\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 16:58:54 crc kubenswrapper[4558]: I0120 16:58:54.035458 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"b28e6999-784e-4577-88bb-db648f7a3cbc\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 16:58:54 crc kubenswrapper[4558]: I0120 16:58:54.134069 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 16:58:54 crc kubenswrapper[4558]: I0120 16:58:54.511873 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 16:58:54 crc kubenswrapper[4558]: I0120 16:58:54.588486 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="90838b03-cde2-4954-a70b-b75b79a2038e" path="/var/lib/kubelet/pods/90838b03-cde2-4954-a70b-b75b79a2038e/volumes" Jan 20 16:58:55 crc kubenswrapper[4558]: I0120 16:58:55.511027 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"b28e6999-784e-4577-88bb-db648f7a3cbc","Type":"ContainerStarted","Data":"a79b4e6f8ce00fed51a727ad2bc430351f4d330704b2b9cb8bb5ce9d963a3a12"} Jan 20 16:58:55 crc kubenswrapper[4558]: I0120 16:58:55.511410 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"b28e6999-784e-4577-88bb-db648f7a3cbc","Type":"ContainerStarted","Data":"ed2110d076583425a8af34103095d99e22acc0c9b835eb2f9a4e87e9dc475ca6"} Jan 20 16:58:55 crc kubenswrapper[4558]: I0120 16:58:55.511424 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"b28e6999-784e-4577-88bb-db648f7a3cbc","Type":"ContainerStarted","Data":"44e57d20387387b66617c4a66fe7686baa61d70b4659ee4cac3fd72de4c54b44"} Jan 20 16:58:55 crc kubenswrapper[4558]: I0120 16:58:55.528282 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/glance-default-internal-api-0" podStartSLOduration=2.528268904 podStartE2EDuration="2.528268904s" podCreationTimestamp="2026-01-20 16:58:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 16:58:55.528027228 +0000 UTC m=+1029.288365195" watchObservedRunningTime="2026-01-20 16:58:55.528268904 +0000 UTC m=+1029.288606870" Jan 20 16:58:58 crc kubenswrapper[4558]: I0120 16:58:58.529567 4558 generic.go:334] "Generic (PLEG): container finished" podID="9f6c4192-92da-4aff-ade3-e502935bc96d" containerID="3f5e26b3d7dc3e18ff91d7a5ac659587be205d5cf45b39eccf8b2cb509ff08d2" exitCode=0 Jan 20 16:58:58 crc kubenswrapper[4558]: I0120 16:58:58.529663 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-k6vkv" event={"ID":"9f6c4192-92da-4aff-ade3-e502935bc96d","Type":"ContainerDied","Data":"3f5e26b3d7dc3e18ff91d7a5ac659587be205d5cf45b39eccf8b2cb509ff08d2"} Jan 20 16:58:59 crc kubenswrapper[4558]: I0120 16:58:59.264484 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 16:58:59 crc kubenswrapper[4558]: I0120 16:58:59.264876 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-external-api-0" podUID="0cdccf60-59fa-48ac-997d-0b60175ce57e" containerName="glance-log" containerID="cri-o://93f8e8d586fe38469a94ae23eddbfc980dd890c101b66c86fce3f0bbfc106d56" gracePeriod=30 Jan 20 16:58:59 crc kubenswrapper[4558]: I0120 16:58:59.264941 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-external-api-0" podUID="0cdccf60-59fa-48ac-997d-0b60175ce57e" containerName="glance-httpd" containerID="cri-o://294a55c018cc11da5fc8b4a83211a9da8f8f646c9ce5de9f03eaacc39bb09bf3" gracePeriod=30 Jan 20 16:58:59 crc kubenswrapper[4558]: I0120 16:58:59.539228 4558 generic.go:334] "Generic (PLEG): container finished" podID="0cdccf60-59fa-48ac-997d-0b60175ce57e" containerID="93f8e8d586fe38469a94ae23eddbfc980dd890c101b66c86fce3f0bbfc106d56" exitCode=143 Jan 20 16:58:59 crc kubenswrapper[4558]: I0120 16:58:59.539324 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"0cdccf60-59fa-48ac-997d-0b60175ce57e","Type":"ContainerDied","Data":"93f8e8d586fe38469a94ae23eddbfc980dd890c101b66c86fce3f0bbfc106d56"} Jan 20 16:58:59 crc kubenswrapper[4558]: I0120 16:58:59.786524 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-k6vkv" Jan 20 16:58:59 crc kubenswrapper[4558]: I0120 16:58:59.904271 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f6c4192-92da-4aff-ade3-e502935bc96d-combined-ca-bundle\") pod \"9f6c4192-92da-4aff-ade3-e502935bc96d\" (UID: \"9f6c4192-92da-4aff-ade3-e502935bc96d\") " Jan 20 16:58:59 crc kubenswrapper[4558]: I0120 16:58:59.904431 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9f6c4192-92da-4aff-ade3-e502935bc96d-scripts\") pod \"9f6c4192-92da-4aff-ade3-e502935bc96d\" (UID: \"9f6c4192-92da-4aff-ade3-e502935bc96d\") " Jan 20 16:58:59 crc kubenswrapper[4558]: I0120 16:58:59.904505 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6jwd9\" (UniqueName: \"kubernetes.io/projected/9f6c4192-92da-4aff-ade3-e502935bc96d-kube-api-access-6jwd9\") pod \"9f6c4192-92da-4aff-ade3-e502935bc96d\" (UID: \"9f6c4192-92da-4aff-ade3-e502935bc96d\") " Jan 20 16:58:59 crc kubenswrapper[4558]: I0120 16:58:59.904532 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9f6c4192-92da-4aff-ade3-e502935bc96d-config-data\") pod \"9f6c4192-92da-4aff-ade3-e502935bc96d\" (UID: \"9f6c4192-92da-4aff-ade3-e502935bc96d\") " Jan 20 16:58:59 crc kubenswrapper[4558]: I0120 16:58:59.908944 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9f6c4192-92da-4aff-ade3-e502935bc96d-scripts" (OuterVolumeSpecName: "scripts") pod "9f6c4192-92da-4aff-ade3-e502935bc96d" (UID: "9f6c4192-92da-4aff-ade3-e502935bc96d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:58:59 crc kubenswrapper[4558]: I0120 16:58:59.909727 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9f6c4192-92da-4aff-ade3-e502935bc96d-kube-api-access-6jwd9" (OuterVolumeSpecName: "kube-api-access-6jwd9") pod "9f6c4192-92da-4aff-ade3-e502935bc96d" (UID: "9f6c4192-92da-4aff-ade3-e502935bc96d"). InnerVolumeSpecName "kube-api-access-6jwd9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:58:59 crc kubenswrapper[4558]: I0120 16:58:59.924207 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9f6c4192-92da-4aff-ade3-e502935bc96d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9f6c4192-92da-4aff-ade3-e502935bc96d" (UID: "9f6c4192-92da-4aff-ade3-e502935bc96d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:58:59 crc kubenswrapper[4558]: I0120 16:58:59.925315 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9f6c4192-92da-4aff-ade3-e502935bc96d-config-data" (OuterVolumeSpecName: "config-data") pod "9f6c4192-92da-4aff-ade3-e502935bc96d" (UID: "9f6c4192-92da-4aff-ade3-e502935bc96d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:59:00 crc kubenswrapper[4558]: I0120 16:59:00.006870 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f6c4192-92da-4aff-ade3-e502935bc96d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 16:59:00 crc kubenswrapper[4558]: I0120 16:59:00.006907 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9f6c4192-92da-4aff-ade3-e502935bc96d-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 16:59:00 crc kubenswrapper[4558]: I0120 16:59:00.006917 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6jwd9\" (UniqueName: \"kubernetes.io/projected/9f6c4192-92da-4aff-ade3-e502935bc96d-kube-api-access-6jwd9\") on node \"crc\" DevicePath \"\"" Jan 20 16:59:00 crc kubenswrapper[4558]: I0120 16:59:00.006927 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9f6c4192-92da-4aff-ade3-e502935bc96d-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 16:59:00 crc kubenswrapper[4558]: I0120 16:59:00.547378 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-k6vkv" event={"ID":"9f6c4192-92da-4aff-ade3-e502935bc96d","Type":"ContainerDied","Data":"c144a1e7860fe1410902372e72ca35e56df256ee9c79d78b3de963cc58371952"} Jan 20 16:59:00 crc kubenswrapper[4558]: I0120 16:59:00.547413 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c144a1e7860fe1410902372e72ca35e56df256ee9c79d78b3de963cc58371952" Jan 20 16:59:00 crc kubenswrapper[4558]: I0120 16:59:00.547417 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-k6vkv" Jan 20 16:59:00 crc kubenswrapper[4558]: I0120 16:59:00.611505 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-0"] Jan 20 16:59:00 crc kubenswrapper[4558]: E0120 16:59:00.611833 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9f6c4192-92da-4aff-ade3-e502935bc96d" containerName="nova-cell0-conductor-db-sync" Jan 20 16:59:00 crc kubenswrapper[4558]: I0120 16:59:00.611849 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="9f6c4192-92da-4aff-ade3-e502935bc96d" containerName="nova-cell0-conductor-db-sync" Jan 20 16:59:00 crc kubenswrapper[4558]: I0120 16:59:00.612026 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="9f6c4192-92da-4aff-ade3-e502935bc96d" containerName="nova-cell0-conductor-db-sync" Jan 20 16:59:00 crc kubenswrapper[4558]: I0120 16:59:00.612535 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 16:59:00 crc kubenswrapper[4558]: I0120 16:59:00.614485 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell0-conductor-config-data" Jan 20 16:59:00 crc kubenswrapper[4558]: I0120 16:59:00.615106 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-nova-dockercfg-ntfs6" Jan 20 16:59:00 crc kubenswrapper[4558]: I0120 16:59:00.623135 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-0"] Jan 20 16:59:00 crc kubenswrapper[4558]: I0120 16:59:00.717246 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-scrpp\" (UniqueName: \"kubernetes.io/projected/8fb1a2dc-0a3d-4976-98ca-16a0dd66990f-kube-api-access-scrpp\") pod \"nova-cell0-conductor-0\" (UID: \"8fb1a2dc-0a3d-4976-98ca-16a0dd66990f\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 16:59:00 crc kubenswrapper[4558]: I0120 16:59:00.717428 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8fb1a2dc-0a3d-4976-98ca-16a0dd66990f-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"8fb1a2dc-0a3d-4976-98ca-16a0dd66990f\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 16:59:00 crc kubenswrapper[4558]: I0120 16:59:00.717550 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8fb1a2dc-0a3d-4976-98ca-16a0dd66990f-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"8fb1a2dc-0a3d-4976-98ca-16a0dd66990f\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 16:59:00 crc kubenswrapper[4558]: I0120 16:59:00.818909 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-scrpp\" (UniqueName: \"kubernetes.io/projected/8fb1a2dc-0a3d-4976-98ca-16a0dd66990f-kube-api-access-scrpp\") pod \"nova-cell0-conductor-0\" (UID: \"8fb1a2dc-0a3d-4976-98ca-16a0dd66990f\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 16:59:00 crc kubenswrapper[4558]: I0120 16:59:00.818995 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8fb1a2dc-0a3d-4976-98ca-16a0dd66990f-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"8fb1a2dc-0a3d-4976-98ca-16a0dd66990f\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 16:59:00 crc kubenswrapper[4558]: I0120 16:59:00.819036 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8fb1a2dc-0a3d-4976-98ca-16a0dd66990f-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"8fb1a2dc-0a3d-4976-98ca-16a0dd66990f\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 16:59:00 crc kubenswrapper[4558]: I0120 16:59:00.823224 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8fb1a2dc-0a3d-4976-98ca-16a0dd66990f-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"8fb1a2dc-0a3d-4976-98ca-16a0dd66990f\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 16:59:00 crc kubenswrapper[4558]: I0120 16:59:00.823291 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8fb1a2dc-0a3d-4976-98ca-16a0dd66990f-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"8fb1a2dc-0a3d-4976-98ca-16a0dd66990f\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 16:59:00 crc kubenswrapper[4558]: I0120 16:59:00.833225 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-scrpp\" (UniqueName: \"kubernetes.io/projected/8fb1a2dc-0a3d-4976-98ca-16a0dd66990f-kube-api-access-scrpp\") pod \"nova-cell0-conductor-0\" (UID: \"8fb1a2dc-0a3d-4976-98ca-16a0dd66990f\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 16:59:00 crc kubenswrapper[4558]: I0120 16:59:00.935218 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 16:59:01 crc kubenswrapper[4558]: I0120 16:59:01.319412 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-0"] Jan 20 16:59:01 crc kubenswrapper[4558]: W0120 16:59:01.321433 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8fb1a2dc_0a3d_4976_98ca_16a0dd66990f.slice/crio-b8fead0f0fa01b79a7127b43e5354e3138d01e3c96ff170fd8c31cd7da99915f WatchSource:0}: Error finding container b8fead0f0fa01b79a7127b43e5354e3138d01e3c96ff170fd8c31cd7da99915f: Status 404 returned error can't find the container with id b8fead0f0fa01b79a7127b43e5354e3138d01e3c96ff170fd8c31cd7da99915f Jan 20 16:59:01 crc kubenswrapper[4558]: I0120 16:59:01.554720 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-0" event={"ID":"8fb1a2dc-0a3d-4976-98ca-16a0dd66990f","Type":"ContainerStarted","Data":"22730bfcc8aa4a00b58336970b30eda6bb7a2ab5925c129af77003ea774a038b"} Jan 20 16:59:01 crc kubenswrapper[4558]: I0120 16:59:01.554759 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-0" event={"ID":"8fb1a2dc-0a3d-4976-98ca-16a0dd66990f","Type":"ContainerStarted","Data":"b8fead0f0fa01b79a7127b43e5354e3138d01e3c96ff170fd8c31cd7da99915f"} Jan 20 16:59:01 crc kubenswrapper[4558]: I0120 16:59:01.554844 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 16:59:01 crc kubenswrapper[4558]: I0120 16:59:01.574908 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell0-conductor-0" podStartSLOduration=1.574892339 podStartE2EDuration="1.574892339s" podCreationTimestamp="2026-01-20 16:59:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 16:59:01.569342905 +0000 UTC m=+1035.329680872" watchObservedRunningTime="2026-01-20 16:59:01.574892339 +0000 UTC m=+1035.335230307" Jan 20 16:59:02 crc kubenswrapper[4558]: I0120 16:59:02.562153 4558 generic.go:334] "Generic (PLEG): container finished" podID="0cdccf60-59fa-48ac-997d-0b60175ce57e" containerID="294a55c018cc11da5fc8b4a83211a9da8f8f646c9ce5de9f03eaacc39bb09bf3" exitCode=0 Jan 20 16:59:02 crc kubenswrapper[4558]: I0120 16:59:02.562552 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"0cdccf60-59fa-48ac-997d-0b60175ce57e","Type":"ContainerDied","Data":"294a55c018cc11da5fc8b4a83211a9da8f8f646c9ce5de9f03eaacc39bb09bf3"} Jan 20 16:59:02 crc kubenswrapper[4558]: I0120 16:59:02.740142 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 16:59:02 crc kubenswrapper[4558]: I0120 16:59:02.845252 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0cdccf60-59fa-48ac-997d-0b60175ce57e-public-tls-certs\") pod \"0cdccf60-59fa-48ac-997d-0b60175ce57e\" (UID: \"0cdccf60-59fa-48ac-997d-0b60175ce57e\") " Jan 20 16:59:02 crc kubenswrapper[4558]: I0120 16:59:02.845310 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0cdccf60-59fa-48ac-997d-0b60175ce57e-scripts\") pod \"0cdccf60-59fa-48ac-997d-0b60175ce57e\" (UID: \"0cdccf60-59fa-48ac-997d-0b60175ce57e\") " Jan 20 16:59:02 crc kubenswrapper[4558]: I0120 16:59:02.845346 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0cdccf60-59fa-48ac-997d-0b60175ce57e-combined-ca-bundle\") pod \"0cdccf60-59fa-48ac-997d-0b60175ce57e\" (UID: \"0cdccf60-59fa-48ac-997d-0b60175ce57e\") " Jan 20 16:59:02 crc kubenswrapper[4558]: I0120 16:59:02.845369 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0cdccf60-59fa-48ac-997d-0b60175ce57e-config-data\") pod \"0cdccf60-59fa-48ac-997d-0b60175ce57e\" (UID: \"0cdccf60-59fa-48ac-997d-0b60175ce57e\") " Jan 20 16:59:02 crc kubenswrapper[4558]: I0120 16:59:02.845384 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0cdccf60-59fa-48ac-997d-0b60175ce57e-logs\") pod \"0cdccf60-59fa-48ac-997d-0b60175ce57e\" (UID: \"0cdccf60-59fa-48ac-997d-0b60175ce57e\") " Jan 20 16:59:02 crc kubenswrapper[4558]: I0120 16:59:02.845407 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-65flp\" (UniqueName: \"kubernetes.io/projected/0cdccf60-59fa-48ac-997d-0b60175ce57e-kube-api-access-65flp\") pod \"0cdccf60-59fa-48ac-997d-0b60175ce57e\" (UID: \"0cdccf60-59fa-48ac-997d-0b60175ce57e\") " Jan 20 16:59:02 crc kubenswrapper[4558]: I0120 16:59:02.845454 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"0cdccf60-59fa-48ac-997d-0b60175ce57e\" (UID: \"0cdccf60-59fa-48ac-997d-0b60175ce57e\") " Jan 20 16:59:02 crc kubenswrapper[4558]: I0120 16:59:02.845478 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/0cdccf60-59fa-48ac-997d-0b60175ce57e-httpd-run\") pod \"0cdccf60-59fa-48ac-997d-0b60175ce57e\" (UID: \"0cdccf60-59fa-48ac-997d-0b60175ce57e\") " Jan 20 16:59:02 crc kubenswrapper[4558]: I0120 16:59:02.846121 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0cdccf60-59fa-48ac-997d-0b60175ce57e-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "0cdccf60-59fa-48ac-997d-0b60175ce57e" (UID: "0cdccf60-59fa-48ac-997d-0b60175ce57e"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 16:59:02 crc kubenswrapper[4558]: I0120 16:59:02.846338 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0cdccf60-59fa-48ac-997d-0b60175ce57e-logs" (OuterVolumeSpecName: "logs") pod "0cdccf60-59fa-48ac-997d-0b60175ce57e" (UID: "0cdccf60-59fa-48ac-997d-0b60175ce57e"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 16:59:02 crc kubenswrapper[4558]: I0120 16:59:02.849547 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage01-crc" (OuterVolumeSpecName: "glance") pod "0cdccf60-59fa-48ac-997d-0b60175ce57e" (UID: "0cdccf60-59fa-48ac-997d-0b60175ce57e"). InnerVolumeSpecName "local-storage01-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 16:59:02 crc kubenswrapper[4558]: I0120 16:59:02.849568 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0cdccf60-59fa-48ac-997d-0b60175ce57e-scripts" (OuterVolumeSpecName: "scripts") pod "0cdccf60-59fa-48ac-997d-0b60175ce57e" (UID: "0cdccf60-59fa-48ac-997d-0b60175ce57e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:59:02 crc kubenswrapper[4558]: I0120 16:59:02.850583 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0cdccf60-59fa-48ac-997d-0b60175ce57e-kube-api-access-65flp" (OuterVolumeSpecName: "kube-api-access-65flp") pod "0cdccf60-59fa-48ac-997d-0b60175ce57e" (UID: "0cdccf60-59fa-48ac-997d-0b60175ce57e"). InnerVolumeSpecName "kube-api-access-65flp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:59:02 crc kubenswrapper[4558]: I0120 16:59:02.865383 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0cdccf60-59fa-48ac-997d-0b60175ce57e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0cdccf60-59fa-48ac-997d-0b60175ce57e" (UID: "0cdccf60-59fa-48ac-997d-0b60175ce57e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:59:02 crc kubenswrapper[4558]: I0120 16:59:02.880184 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0cdccf60-59fa-48ac-997d-0b60175ce57e-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "0cdccf60-59fa-48ac-997d-0b60175ce57e" (UID: "0cdccf60-59fa-48ac-997d-0b60175ce57e"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:59:02 crc kubenswrapper[4558]: I0120 16:59:02.882024 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0cdccf60-59fa-48ac-997d-0b60175ce57e-config-data" (OuterVolumeSpecName: "config-data") pod "0cdccf60-59fa-48ac-997d-0b60175ce57e" (UID: "0cdccf60-59fa-48ac-997d-0b60175ce57e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:59:02 crc kubenswrapper[4558]: I0120 16:59:02.947341 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0cdccf60-59fa-48ac-997d-0b60175ce57e-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 16:59:02 crc kubenswrapper[4558]: I0120 16:59:02.947370 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0cdccf60-59fa-48ac-997d-0b60175ce57e-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 16:59:02 crc kubenswrapper[4558]: I0120 16:59:02.947379 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0cdccf60-59fa-48ac-997d-0b60175ce57e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 16:59:02 crc kubenswrapper[4558]: I0120 16:59:02.947387 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0cdccf60-59fa-48ac-997d-0b60175ce57e-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 16:59:02 crc kubenswrapper[4558]: I0120 16:59:02.947394 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0cdccf60-59fa-48ac-997d-0b60175ce57e-logs\") on node \"crc\" DevicePath \"\"" Jan 20 16:59:02 crc kubenswrapper[4558]: I0120 16:59:02.947403 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-65flp\" (UniqueName: \"kubernetes.io/projected/0cdccf60-59fa-48ac-997d-0b60175ce57e-kube-api-access-65flp\") on node \"crc\" DevicePath \"\"" Jan 20 16:59:02 crc kubenswrapper[4558]: I0120 16:59:02.947442 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" " Jan 20 16:59:02 crc kubenswrapper[4558]: I0120 16:59:02.947451 4558 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/0cdccf60-59fa-48ac-997d-0b60175ce57e-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 20 16:59:02 crc kubenswrapper[4558]: I0120 16:59:02.960557 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage01-crc" (UniqueName: "kubernetes.io/local-volume/local-storage01-crc") on node "crc" Jan 20 16:59:03 crc kubenswrapper[4558]: I0120 16:59:03.048914 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" DevicePath \"\"" Jan 20 16:59:03 crc kubenswrapper[4558]: I0120 16:59:03.569212 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"0cdccf60-59fa-48ac-997d-0b60175ce57e","Type":"ContainerDied","Data":"014518ad9c0876dae5ce745b6c17b69d0d7d344e92b6a86d82ab53f989bd68bd"} Jan 20 16:59:03 crc kubenswrapper[4558]: I0120 16:59:03.569257 4558 scope.go:117] "RemoveContainer" containerID="294a55c018cc11da5fc8b4a83211a9da8f8f646c9ce5de9f03eaacc39bb09bf3" Jan 20 16:59:03 crc kubenswrapper[4558]: I0120 16:59:03.569354 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 16:59:03 crc kubenswrapper[4558]: I0120 16:59:03.591979 4558 scope.go:117] "RemoveContainer" containerID="93f8e8d586fe38469a94ae23eddbfc980dd890c101b66c86fce3f0bbfc106d56" Jan 20 16:59:03 crc kubenswrapper[4558]: I0120 16:59:03.592486 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 16:59:03 crc kubenswrapper[4558]: I0120 16:59:03.598371 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 16:59:03 crc kubenswrapper[4558]: I0120 16:59:03.613789 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 16:59:03 crc kubenswrapper[4558]: E0120 16:59:03.614102 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0cdccf60-59fa-48ac-997d-0b60175ce57e" containerName="glance-log" Jan 20 16:59:03 crc kubenswrapper[4558]: I0120 16:59:03.614117 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="0cdccf60-59fa-48ac-997d-0b60175ce57e" containerName="glance-log" Jan 20 16:59:03 crc kubenswrapper[4558]: E0120 16:59:03.614134 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0cdccf60-59fa-48ac-997d-0b60175ce57e" containerName="glance-httpd" Jan 20 16:59:03 crc kubenswrapper[4558]: I0120 16:59:03.614139 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="0cdccf60-59fa-48ac-997d-0b60175ce57e" containerName="glance-httpd" Jan 20 16:59:03 crc kubenswrapper[4558]: I0120 16:59:03.614338 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="0cdccf60-59fa-48ac-997d-0b60175ce57e" containerName="glance-log" Jan 20 16:59:03 crc kubenswrapper[4558]: I0120 16:59:03.614358 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="0cdccf60-59fa-48ac-997d-0b60175ce57e" containerName="glance-httpd" Jan 20 16:59:03 crc kubenswrapper[4558]: I0120 16:59:03.615060 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 16:59:03 crc kubenswrapper[4558]: I0120 16:59:03.617379 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-glance-default-public-svc" Jan 20 16:59:03 crc kubenswrapper[4558]: I0120 16:59:03.617690 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-default-external-config-data" Jan 20 16:59:03 crc kubenswrapper[4558]: I0120 16:59:03.620953 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 16:59:03 crc kubenswrapper[4558]: I0120 16:59:03.759060 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d2ccd35d-37c4-450d-b04c-ac505e35b0e8-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"d2ccd35d-37c4-450d-b04c-ac505e35b0e8\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 16:59:03 crc kubenswrapper[4558]: I0120 16:59:03.759143 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d2ccd35d-37c4-450d-b04c-ac505e35b0e8-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"d2ccd35d-37c4-450d-b04c-ac505e35b0e8\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 16:59:03 crc kubenswrapper[4558]: I0120 16:59:03.759220 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d2ccd35d-37c4-450d-b04c-ac505e35b0e8-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"d2ccd35d-37c4-450d-b04c-ac505e35b0e8\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 16:59:03 crc kubenswrapper[4558]: I0120 16:59:03.759262 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d2ccd35d-37c4-450d-b04c-ac505e35b0e8-logs\") pod \"glance-default-external-api-0\" (UID: \"d2ccd35d-37c4-450d-b04c-ac505e35b0e8\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 16:59:03 crc kubenswrapper[4558]: I0120 16:59:03.759290 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d2ccd35d-37c4-450d-b04c-ac505e35b0e8-config-data\") pod \"glance-default-external-api-0\" (UID: \"d2ccd35d-37c4-450d-b04c-ac505e35b0e8\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 16:59:03 crc kubenswrapper[4558]: I0120 16:59:03.759321 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d2ccd35d-37c4-450d-b04c-ac505e35b0e8-scripts\") pod \"glance-default-external-api-0\" (UID: \"d2ccd35d-37c4-450d-b04c-ac505e35b0e8\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 16:59:03 crc kubenswrapper[4558]: I0120 16:59:03.759411 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-external-api-0\" (UID: \"d2ccd35d-37c4-450d-b04c-ac505e35b0e8\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 16:59:03 crc kubenswrapper[4558]: I0120 16:59:03.759497 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-drvw9\" (UniqueName: \"kubernetes.io/projected/d2ccd35d-37c4-450d-b04c-ac505e35b0e8-kube-api-access-drvw9\") pod \"glance-default-external-api-0\" (UID: \"d2ccd35d-37c4-450d-b04c-ac505e35b0e8\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 16:59:03 crc kubenswrapper[4558]: I0120 16:59:03.860283 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-drvw9\" (UniqueName: \"kubernetes.io/projected/d2ccd35d-37c4-450d-b04c-ac505e35b0e8-kube-api-access-drvw9\") pod \"glance-default-external-api-0\" (UID: \"d2ccd35d-37c4-450d-b04c-ac505e35b0e8\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 16:59:03 crc kubenswrapper[4558]: I0120 16:59:03.860327 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d2ccd35d-37c4-450d-b04c-ac505e35b0e8-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"d2ccd35d-37c4-450d-b04c-ac505e35b0e8\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 16:59:03 crc kubenswrapper[4558]: I0120 16:59:03.860381 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d2ccd35d-37c4-450d-b04c-ac505e35b0e8-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"d2ccd35d-37c4-450d-b04c-ac505e35b0e8\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 16:59:03 crc kubenswrapper[4558]: I0120 16:59:03.860405 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d2ccd35d-37c4-450d-b04c-ac505e35b0e8-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"d2ccd35d-37c4-450d-b04c-ac505e35b0e8\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 16:59:03 crc kubenswrapper[4558]: I0120 16:59:03.860444 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d2ccd35d-37c4-450d-b04c-ac505e35b0e8-logs\") pod \"glance-default-external-api-0\" (UID: \"d2ccd35d-37c4-450d-b04c-ac505e35b0e8\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 16:59:03 crc kubenswrapper[4558]: I0120 16:59:03.860465 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d2ccd35d-37c4-450d-b04c-ac505e35b0e8-config-data\") pod \"glance-default-external-api-0\" (UID: \"d2ccd35d-37c4-450d-b04c-ac505e35b0e8\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 16:59:03 crc kubenswrapper[4558]: I0120 16:59:03.860496 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d2ccd35d-37c4-450d-b04c-ac505e35b0e8-scripts\") pod \"glance-default-external-api-0\" (UID: \"d2ccd35d-37c4-450d-b04c-ac505e35b0e8\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 16:59:03 crc kubenswrapper[4558]: I0120 16:59:03.860545 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-external-api-0\" (UID: \"d2ccd35d-37c4-450d-b04c-ac505e35b0e8\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 16:59:03 crc kubenswrapper[4558]: I0120 16:59:03.860703 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-external-api-0\" (UID: \"d2ccd35d-37c4-450d-b04c-ac505e35b0e8\") device mount path \"/mnt/openstack/pv01\"" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 16:59:03 crc kubenswrapper[4558]: I0120 16:59:03.861222 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d2ccd35d-37c4-450d-b04c-ac505e35b0e8-logs\") pod \"glance-default-external-api-0\" (UID: \"d2ccd35d-37c4-450d-b04c-ac505e35b0e8\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 16:59:03 crc kubenswrapper[4558]: I0120 16:59:03.861332 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d2ccd35d-37c4-450d-b04c-ac505e35b0e8-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"d2ccd35d-37c4-450d-b04c-ac505e35b0e8\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 16:59:03 crc kubenswrapper[4558]: I0120 16:59:03.865247 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d2ccd35d-37c4-450d-b04c-ac505e35b0e8-scripts\") pod \"glance-default-external-api-0\" (UID: \"d2ccd35d-37c4-450d-b04c-ac505e35b0e8\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 16:59:03 crc kubenswrapper[4558]: I0120 16:59:03.865774 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d2ccd35d-37c4-450d-b04c-ac505e35b0e8-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"d2ccd35d-37c4-450d-b04c-ac505e35b0e8\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 16:59:03 crc kubenswrapper[4558]: I0120 16:59:03.865831 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d2ccd35d-37c4-450d-b04c-ac505e35b0e8-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"d2ccd35d-37c4-450d-b04c-ac505e35b0e8\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 16:59:03 crc kubenswrapper[4558]: I0120 16:59:03.866145 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d2ccd35d-37c4-450d-b04c-ac505e35b0e8-config-data\") pod \"glance-default-external-api-0\" (UID: \"d2ccd35d-37c4-450d-b04c-ac505e35b0e8\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 16:59:03 crc kubenswrapper[4558]: I0120 16:59:03.872877 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-drvw9\" (UniqueName: \"kubernetes.io/projected/d2ccd35d-37c4-450d-b04c-ac505e35b0e8-kube-api-access-drvw9\") pod \"glance-default-external-api-0\" (UID: \"d2ccd35d-37c4-450d-b04c-ac505e35b0e8\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 16:59:03 crc kubenswrapper[4558]: I0120 16:59:03.878575 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-external-api-0\" (UID: \"d2ccd35d-37c4-450d-b04c-ac505e35b0e8\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 16:59:03 crc kubenswrapper[4558]: I0120 16:59:03.931391 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 16:59:04 crc kubenswrapper[4558]: I0120 16:59:04.144152 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 16:59:04 crc kubenswrapper[4558]: I0120 16:59:04.144374 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 16:59:04 crc kubenswrapper[4558]: I0120 16:59:04.165693 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 16:59:04 crc kubenswrapper[4558]: I0120 16:59:04.171965 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 16:59:04 crc kubenswrapper[4558]: I0120 16:59:04.290256 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 16:59:04 crc kubenswrapper[4558]: I0120 16:59:04.573419 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0cdccf60-59fa-48ac-997d-0b60175ce57e" path="/var/lib/kubelet/pods/0cdccf60-59fa-48ac-997d-0b60175ce57e/volumes" Jan 20 16:59:04 crc kubenswrapper[4558]: I0120 16:59:04.577460 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"d2ccd35d-37c4-450d-b04c-ac505e35b0e8","Type":"ContainerStarted","Data":"aebe50fb6ffccea5e397bc2849dcc3242a96e322c460d57c3ee743b4a1c87e39"} Jan 20 16:59:04 crc kubenswrapper[4558]: I0120 16:59:04.578834 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 16:59:04 crc kubenswrapper[4558]: I0120 16:59:04.579012 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 16:59:04 crc kubenswrapper[4558]: I0120 16:59:04.785715 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-0"] Jan 20 16:59:04 crc kubenswrapper[4558]: I0120 16:59:04.785859 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-cell0-conductor-0" podUID="8fb1a2dc-0a3d-4976-98ca-16a0dd66990f" containerName="nova-cell0-conductor-conductor" containerID="cri-o://22730bfcc8aa4a00b58336970b30eda6bb7a2ab5925c129af77003ea774a038b" gracePeriod=30 Jan 20 16:59:05 crc kubenswrapper[4558]: I0120 16:59:05.592023 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"d2ccd35d-37c4-450d-b04c-ac505e35b0e8","Type":"ContainerStarted","Data":"3a7a952277efc163959dfa821edbe7eed459482ba7b2f738ddd5297a1de3b321"} Jan 20 16:59:05 crc kubenswrapper[4558]: I0120 16:59:05.592243 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"d2ccd35d-37c4-450d-b04c-ac505e35b0e8","Type":"ContainerStarted","Data":"2f43a86fbf8e121d1c2b64dc721cdb7d0f3e79ac1af1c83f48879ac356eba5d7"} Jan 20 16:59:05 crc kubenswrapper[4558]: I0120 16:59:05.615595 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/glance-default-external-api-0" podStartSLOduration=2.615579832 podStartE2EDuration="2.615579832s" podCreationTimestamp="2026-01-20 16:59:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 16:59:05.607496862 +0000 UTC m=+1039.367834830" watchObservedRunningTime="2026-01-20 16:59:05.615579832 +0000 UTC m=+1039.375917799" Jan 20 16:59:06 crc kubenswrapper[4558]: I0120 16:59:06.161061 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 16:59:06 crc kubenswrapper[4558]: I0120 16:59:06.165914 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 16:59:06 crc kubenswrapper[4558]: I0120 16:59:06.224972 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 16:59:06 crc kubenswrapper[4558]: I0120 16:59:06.302688 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8fb1a2dc-0a3d-4976-98ca-16a0dd66990f-combined-ca-bundle\") pod \"8fb1a2dc-0a3d-4976-98ca-16a0dd66990f\" (UID: \"8fb1a2dc-0a3d-4976-98ca-16a0dd66990f\") " Jan 20 16:59:06 crc kubenswrapper[4558]: I0120 16:59:06.302957 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8fb1a2dc-0a3d-4976-98ca-16a0dd66990f-config-data\") pod \"8fb1a2dc-0a3d-4976-98ca-16a0dd66990f\" (UID: \"8fb1a2dc-0a3d-4976-98ca-16a0dd66990f\") " Jan 20 16:59:06 crc kubenswrapper[4558]: I0120 16:59:06.303093 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-scrpp\" (UniqueName: \"kubernetes.io/projected/8fb1a2dc-0a3d-4976-98ca-16a0dd66990f-kube-api-access-scrpp\") pod \"8fb1a2dc-0a3d-4976-98ca-16a0dd66990f\" (UID: \"8fb1a2dc-0a3d-4976-98ca-16a0dd66990f\") " Jan 20 16:59:06 crc kubenswrapper[4558]: I0120 16:59:06.309282 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8fb1a2dc-0a3d-4976-98ca-16a0dd66990f-kube-api-access-scrpp" (OuterVolumeSpecName: "kube-api-access-scrpp") pod "8fb1a2dc-0a3d-4976-98ca-16a0dd66990f" (UID: "8fb1a2dc-0a3d-4976-98ca-16a0dd66990f"). InnerVolumeSpecName "kube-api-access-scrpp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:59:06 crc kubenswrapper[4558]: I0120 16:59:06.321503 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8fb1a2dc-0a3d-4976-98ca-16a0dd66990f-config-data" (OuterVolumeSpecName: "config-data") pod "8fb1a2dc-0a3d-4976-98ca-16a0dd66990f" (UID: "8fb1a2dc-0a3d-4976-98ca-16a0dd66990f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:59:06 crc kubenswrapper[4558]: I0120 16:59:06.327356 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8fb1a2dc-0a3d-4976-98ca-16a0dd66990f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8fb1a2dc-0a3d-4976-98ca-16a0dd66990f" (UID: "8fb1a2dc-0a3d-4976-98ca-16a0dd66990f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:59:06 crc kubenswrapper[4558]: I0120 16:59:06.404878 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-scrpp\" (UniqueName: \"kubernetes.io/projected/8fb1a2dc-0a3d-4976-98ca-16a0dd66990f-kube-api-access-scrpp\") on node \"crc\" DevicePath \"\"" Jan 20 16:59:06 crc kubenswrapper[4558]: I0120 16:59:06.405033 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8fb1a2dc-0a3d-4976-98ca-16a0dd66990f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 16:59:06 crc kubenswrapper[4558]: I0120 16:59:06.405098 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8fb1a2dc-0a3d-4976-98ca-16a0dd66990f-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 16:59:06 crc kubenswrapper[4558]: I0120 16:59:06.599764 4558 generic.go:334] "Generic (PLEG): container finished" podID="8fb1a2dc-0a3d-4976-98ca-16a0dd66990f" containerID="22730bfcc8aa4a00b58336970b30eda6bb7a2ab5925c129af77003ea774a038b" exitCode=0 Jan 20 16:59:06 crc kubenswrapper[4558]: I0120 16:59:06.600291 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-0" event={"ID":"8fb1a2dc-0a3d-4976-98ca-16a0dd66990f","Type":"ContainerDied","Data":"22730bfcc8aa4a00b58336970b30eda6bb7a2ab5925c129af77003ea774a038b"} Jan 20 16:59:06 crc kubenswrapper[4558]: I0120 16:59:06.600325 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-0" event={"ID":"8fb1a2dc-0a3d-4976-98ca-16a0dd66990f","Type":"ContainerDied","Data":"b8fead0f0fa01b79a7127b43e5354e3138d01e3c96ff170fd8c31cd7da99915f"} Jan 20 16:59:06 crc kubenswrapper[4558]: I0120 16:59:06.600320 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 16:59:06 crc kubenswrapper[4558]: I0120 16:59:06.600367 4558 scope.go:117] "RemoveContainer" containerID="22730bfcc8aa4a00b58336970b30eda6bb7a2ab5925c129af77003ea774a038b" Jan 20 16:59:06 crc kubenswrapper[4558]: I0120 16:59:06.622251 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-0"] Jan 20 16:59:06 crc kubenswrapper[4558]: I0120 16:59:06.630004 4558 scope.go:117] "RemoveContainer" containerID="22730bfcc8aa4a00b58336970b30eda6bb7a2ab5925c129af77003ea774a038b" Jan 20 16:59:06 crc kubenswrapper[4558]: E0120 16:59:06.630340 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"22730bfcc8aa4a00b58336970b30eda6bb7a2ab5925c129af77003ea774a038b\": container with ID starting with 22730bfcc8aa4a00b58336970b30eda6bb7a2ab5925c129af77003ea774a038b not found: ID does not exist" containerID="22730bfcc8aa4a00b58336970b30eda6bb7a2ab5925c129af77003ea774a038b" Jan 20 16:59:06 crc kubenswrapper[4558]: I0120 16:59:06.630375 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"22730bfcc8aa4a00b58336970b30eda6bb7a2ab5925c129af77003ea774a038b"} err="failed to get container status \"22730bfcc8aa4a00b58336970b30eda6bb7a2ab5925c129af77003ea774a038b\": rpc error: code = NotFound desc = could not find container \"22730bfcc8aa4a00b58336970b30eda6bb7a2ab5925c129af77003ea774a038b\": container with ID starting with 22730bfcc8aa4a00b58336970b30eda6bb7a2ab5925c129af77003ea774a038b not found: ID does not exist" Jan 20 16:59:06 crc kubenswrapper[4558]: I0120 16:59:06.633366 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-0"] Jan 20 16:59:06 crc kubenswrapper[4558]: I0120 16:59:06.647027 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-0"] Jan 20 16:59:06 crc kubenswrapper[4558]: E0120 16:59:06.647405 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8fb1a2dc-0a3d-4976-98ca-16a0dd66990f" containerName="nova-cell0-conductor-conductor" Jan 20 16:59:06 crc kubenswrapper[4558]: I0120 16:59:06.647423 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="8fb1a2dc-0a3d-4976-98ca-16a0dd66990f" containerName="nova-cell0-conductor-conductor" Jan 20 16:59:06 crc kubenswrapper[4558]: I0120 16:59:06.647594 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="8fb1a2dc-0a3d-4976-98ca-16a0dd66990f" containerName="nova-cell0-conductor-conductor" Jan 20 16:59:06 crc kubenswrapper[4558]: I0120 16:59:06.648076 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 16:59:06 crc kubenswrapper[4558]: I0120 16:59:06.653023 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-0"] Jan 20 16:59:06 crc kubenswrapper[4558]: I0120 16:59:06.658574 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-nova-dockercfg-ntfs6" Jan 20 16:59:06 crc kubenswrapper[4558]: I0120 16:59:06.658871 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell0-conductor-config-data" Jan 20 16:59:06 crc kubenswrapper[4558]: I0120 16:59:06.811464 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6r6jm\" (UniqueName: \"kubernetes.io/projected/9ea3b871-4db3-4108-baea-e57a23d9d6c5-kube-api-access-6r6jm\") pod \"nova-cell0-conductor-0\" (UID: \"9ea3b871-4db3-4108-baea-e57a23d9d6c5\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 16:59:06 crc kubenswrapper[4558]: I0120 16:59:06.811561 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9ea3b871-4db3-4108-baea-e57a23d9d6c5-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"9ea3b871-4db3-4108-baea-e57a23d9d6c5\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 16:59:06 crc kubenswrapper[4558]: I0120 16:59:06.811596 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9ea3b871-4db3-4108-baea-e57a23d9d6c5-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"9ea3b871-4db3-4108-baea-e57a23d9d6c5\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 16:59:06 crc kubenswrapper[4558]: I0120 16:59:06.913390 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9ea3b871-4db3-4108-baea-e57a23d9d6c5-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"9ea3b871-4db3-4108-baea-e57a23d9d6c5\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 16:59:06 crc kubenswrapper[4558]: I0120 16:59:06.913454 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9ea3b871-4db3-4108-baea-e57a23d9d6c5-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"9ea3b871-4db3-4108-baea-e57a23d9d6c5\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 16:59:06 crc kubenswrapper[4558]: I0120 16:59:06.913545 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6r6jm\" (UniqueName: \"kubernetes.io/projected/9ea3b871-4db3-4108-baea-e57a23d9d6c5-kube-api-access-6r6jm\") pod \"nova-cell0-conductor-0\" (UID: \"9ea3b871-4db3-4108-baea-e57a23d9d6c5\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 16:59:06 crc kubenswrapper[4558]: I0120 16:59:06.916970 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9ea3b871-4db3-4108-baea-e57a23d9d6c5-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"9ea3b871-4db3-4108-baea-e57a23d9d6c5\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 16:59:06 crc kubenswrapper[4558]: I0120 16:59:06.917398 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9ea3b871-4db3-4108-baea-e57a23d9d6c5-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"9ea3b871-4db3-4108-baea-e57a23d9d6c5\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 16:59:06 crc kubenswrapper[4558]: I0120 16:59:06.945624 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6r6jm\" (UniqueName: \"kubernetes.io/projected/9ea3b871-4db3-4108-baea-e57a23d9d6c5-kube-api-access-6r6jm\") pod \"nova-cell0-conductor-0\" (UID: \"9ea3b871-4db3-4108-baea-e57a23d9d6c5\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 16:59:06 crc kubenswrapper[4558]: I0120 16:59:06.972290 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 16:59:07 crc kubenswrapper[4558]: I0120 16:59:07.381141 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-0"] Jan 20 16:59:07 crc kubenswrapper[4558]: W0120 16:59:07.392092 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9ea3b871_4db3_4108_baea_e57a23d9d6c5.slice/crio-434d0d0f14cb12224054882a03ccf11144ee356b713b8f02b115a3bd459ff3c0 WatchSource:0}: Error finding container 434d0d0f14cb12224054882a03ccf11144ee356b713b8f02b115a3bd459ff3c0: Status 404 returned error can't find the container with id 434d0d0f14cb12224054882a03ccf11144ee356b713b8f02b115a3bd459ff3c0 Jan 20 16:59:07 crc kubenswrapper[4558]: I0120 16:59:07.607482 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-0" event={"ID":"9ea3b871-4db3-4108-baea-e57a23d9d6c5","Type":"ContainerStarted","Data":"6074e6e8906baa2f1dffa9b96ff41ea08d5604ac43ef2c6593b686fd35af868a"} Jan 20 16:59:07 crc kubenswrapper[4558]: I0120 16:59:07.608368 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 16:59:07 crc kubenswrapper[4558]: I0120 16:59:07.608436 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-0" event={"ID":"9ea3b871-4db3-4108-baea-e57a23d9d6c5","Type":"ContainerStarted","Data":"434d0d0f14cb12224054882a03ccf11144ee356b713b8f02b115a3bd459ff3c0"} Jan 20 16:59:07 crc kubenswrapper[4558]: I0120 16:59:07.628034 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell0-conductor-0" podStartSLOduration=1.628017908 podStartE2EDuration="1.628017908s" podCreationTimestamp="2026-01-20 16:59:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 16:59:07.619567099 +0000 UTC m=+1041.379905066" watchObservedRunningTime="2026-01-20 16:59:07.628017908 +0000 UTC m=+1041.388355875" Jan 20 16:59:08 crc kubenswrapper[4558]: I0120 16:59:08.574679 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8fb1a2dc-0a3d-4976-98ca-16a0dd66990f" path="/var/lib/kubelet/pods/8fb1a2dc-0a3d-4976-98ca-16a0dd66990f/volumes" Jan 20 16:59:13 crc kubenswrapper[4558]: I0120 16:59:13.751620 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/ceilometer-0" podUID="a25d1528-cb2e-4d44-bcff-cbee0febbf9b" containerName="proxy-httpd" probeResult="failure" output="HTTP probe failed with statuscode: 503" Jan 20 16:59:13 crc kubenswrapper[4558]: I0120 16:59:13.931973 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 16:59:13 crc kubenswrapper[4558]: I0120 16:59:13.932010 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 16:59:13 crc kubenswrapper[4558]: I0120 16:59:13.954814 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 16:59:13 crc kubenswrapper[4558]: I0120 16:59:13.959712 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 16:59:14 crc kubenswrapper[4558]: I0120 16:59:14.651006 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 16:59:14 crc kubenswrapper[4558]: I0120 16:59:14.651038 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 16:59:16 crc kubenswrapper[4558]: I0120 16:59:16.209268 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 16:59:16 crc kubenswrapper[4558]: I0120 16:59:16.234639 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 16:59:17 crc kubenswrapper[4558]: I0120 16:59:17.003233 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 16:59:17 crc kubenswrapper[4558]: I0120 16:59:17.401112 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell0-cell-mapping-wn5b6"] Jan 20 16:59:17 crc kubenswrapper[4558]: I0120 16:59:17.402306 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-cell-mapping-wn5b6" Jan 20 16:59:17 crc kubenswrapper[4558]: I0120 16:59:17.403849 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell0-manage-config-data" Jan 20 16:59:17 crc kubenswrapper[4558]: I0120 16:59:17.404157 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell0-manage-scripts" Jan 20 16:59:17 crc kubenswrapper[4558]: I0120 16:59:17.411214 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-cell-mapping-wn5b6"] Jan 20 16:59:17 crc kubenswrapper[4558]: I0120 16:59:17.491442 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6aaeaa31-51e8-451a-9a40-6880c278ee0e-config-data\") pod \"nova-cell0-cell-mapping-wn5b6\" (UID: \"6aaeaa31-51e8-451a-9a40-6880c278ee0e\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-wn5b6" Jan 20 16:59:17 crc kubenswrapper[4558]: I0120 16:59:17.491506 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6aaeaa31-51e8-451a-9a40-6880c278ee0e-scripts\") pod \"nova-cell0-cell-mapping-wn5b6\" (UID: \"6aaeaa31-51e8-451a-9a40-6880c278ee0e\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-wn5b6" Jan 20 16:59:17 crc kubenswrapper[4558]: I0120 16:59:17.491846 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6aaeaa31-51e8-451a-9a40-6880c278ee0e-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-wn5b6\" (UID: \"6aaeaa31-51e8-451a-9a40-6880c278ee0e\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-wn5b6" Jan 20 16:59:17 crc kubenswrapper[4558]: I0120 16:59:17.491915 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nsm8k\" (UniqueName: \"kubernetes.io/projected/6aaeaa31-51e8-451a-9a40-6880c278ee0e-kube-api-access-nsm8k\") pod \"nova-cell0-cell-mapping-wn5b6\" (UID: \"6aaeaa31-51e8-451a-9a40-6880c278ee0e\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-wn5b6" Jan 20 16:59:17 crc kubenswrapper[4558]: I0120 16:59:17.550003 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 16:59:17 crc kubenswrapper[4558]: I0120 16:59:17.551213 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 16:59:17 crc kubenswrapper[4558]: I0120 16:59:17.554359 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-api-config-data" Jan 20 16:59:17 crc kubenswrapper[4558]: I0120 16:59:17.559005 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 16:59:17 crc kubenswrapper[4558]: I0120 16:59:17.593474 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6aaeaa31-51e8-451a-9a40-6880c278ee0e-scripts\") pod \"nova-cell0-cell-mapping-wn5b6\" (UID: \"6aaeaa31-51e8-451a-9a40-6880c278ee0e\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-wn5b6" Jan 20 16:59:17 crc kubenswrapper[4558]: I0120 16:59:17.593593 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6aaeaa31-51e8-451a-9a40-6880c278ee0e-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-wn5b6\" (UID: \"6aaeaa31-51e8-451a-9a40-6880c278ee0e\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-wn5b6" Jan 20 16:59:17 crc kubenswrapper[4558]: I0120 16:59:17.593625 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nsm8k\" (UniqueName: \"kubernetes.io/projected/6aaeaa31-51e8-451a-9a40-6880c278ee0e-kube-api-access-nsm8k\") pod \"nova-cell0-cell-mapping-wn5b6\" (UID: \"6aaeaa31-51e8-451a-9a40-6880c278ee0e\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-wn5b6" Jan 20 16:59:17 crc kubenswrapper[4558]: I0120 16:59:17.593677 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6aaeaa31-51e8-451a-9a40-6880c278ee0e-config-data\") pod \"nova-cell0-cell-mapping-wn5b6\" (UID: \"6aaeaa31-51e8-451a-9a40-6880c278ee0e\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-wn5b6" Jan 20 16:59:17 crc kubenswrapper[4558]: I0120 16:59:17.605917 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 16:59:17 crc kubenswrapper[4558]: I0120 16:59:17.606704 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6aaeaa31-51e8-451a-9a40-6880c278ee0e-scripts\") pod \"nova-cell0-cell-mapping-wn5b6\" (UID: \"6aaeaa31-51e8-451a-9a40-6880c278ee0e\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-wn5b6" Jan 20 16:59:17 crc kubenswrapper[4558]: I0120 16:59:17.607432 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 16:59:17 crc kubenswrapper[4558]: I0120 16:59:17.620176 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-metadata-config-data" Jan 20 16:59:17 crc kubenswrapper[4558]: I0120 16:59:17.621252 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6aaeaa31-51e8-451a-9a40-6880c278ee0e-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-wn5b6\" (UID: \"6aaeaa31-51e8-451a-9a40-6880c278ee0e\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-wn5b6" Jan 20 16:59:17 crc kubenswrapper[4558]: I0120 16:59:17.629405 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nsm8k\" (UniqueName: \"kubernetes.io/projected/6aaeaa31-51e8-451a-9a40-6880c278ee0e-kube-api-access-nsm8k\") pod \"nova-cell0-cell-mapping-wn5b6\" (UID: \"6aaeaa31-51e8-451a-9a40-6880c278ee0e\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-wn5b6" Jan 20 16:59:17 crc kubenswrapper[4558]: I0120 16:59:17.656236 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 16:59:17 crc kubenswrapper[4558]: I0120 16:59:17.659760 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6aaeaa31-51e8-451a-9a40-6880c278ee0e-config-data\") pod \"nova-cell0-cell-mapping-wn5b6\" (UID: \"6aaeaa31-51e8-451a-9a40-6880c278ee0e\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-wn5b6" Jan 20 16:59:17 crc kubenswrapper[4558]: I0120 16:59:17.698407 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ce97e7c6-01a4-4edc-85c2-5fc73a0bdb6e-logs\") pod \"nova-metadata-0\" (UID: \"ce97e7c6-01a4-4edc-85c2-5fc73a0bdb6e\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 16:59:17 crc kubenswrapper[4558]: I0120 16:59:17.698488 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ce97e7c6-01a4-4edc-85c2-5fc73a0bdb6e-config-data\") pod \"nova-metadata-0\" (UID: \"ce97e7c6-01a4-4edc-85c2-5fc73a0bdb6e\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 16:59:17 crc kubenswrapper[4558]: I0120 16:59:17.698568 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mrm75\" (UniqueName: \"kubernetes.io/projected/ce97e7c6-01a4-4edc-85c2-5fc73a0bdb6e-kube-api-access-mrm75\") pod \"nova-metadata-0\" (UID: \"ce97e7c6-01a4-4edc-85c2-5fc73a0bdb6e\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 16:59:17 crc kubenswrapper[4558]: I0120 16:59:17.698716 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5484df8-2512-48d4-af79-ea72bf7c02b0-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"f5484df8-2512-48d4-af79-ea72bf7c02b0\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 16:59:17 crc kubenswrapper[4558]: I0120 16:59:17.698792 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f5484df8-2512-48d4-af79-ea72bf7c02b0-config-data\") pod \"nova-api-0\" (UID: \"f5484df8-2512-48d4-af79-ea72bf7c02b0\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 16:59:17 crc kubenswrapper[4558]: I0120 16:59:17.698810 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ce97e7c6-01a4-4edc-85c2-5fc73a0bdb6e-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"ce97e7c6-01a4-4edc-85c2-5fc73a0bdb6e\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 16:59:17 crc kubenswrapper[4558]: I0120 16:59:17.698847 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f5484df8-2512-48d4-af79-ea72bf7c02b0-logs\") pod \"nova-api-0\" (UID: \"f5484df8-2512-48d4-af79-ea72bf7c02b0\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 16:59:17 crc kubenswrapper[4558]: I0120 16:59:17.698868 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ch6jz\" (UniqueName: \"kubernetes.io/projected/f5484df8-2512-48d4-af79-ea72bf7c02b0-kube-api-access-ch6jz\") pod \"nova-api-0\" (UID: \"f5484df8-2512-48d4-af79-ea72bf7c02b0\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 16:59:17 crc kubenswrapper[4558]: I0120 16:59:17.713841 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 16:59:17 crc kubenswrapper[4558]: I0120 16:59:17.714889 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 16:59:17 crc kubenswrapper[4558]: I0120 16:59:17.719412 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-scheduler-config-data" Jan 20 16:59:17 crc kubenswrapper[4558]: I0120 16:59:17.719818 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-cell-mapping-wn5b6" Jan 20 16:59:17 crc kubenswrapper[4558]: I0120 16:59:17.752262 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 16:59:17 crc kubenswrapper[4558]: I0120 16:59:17.785208 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 16:59:17 crc kubenswrapper[4558]: I0120 16:59:17.786385 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 16:59:17 crc kubenswrapper[4558]: I0120 16:59:17.789225 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell1-novncproxy-config-data" Jan 20 16:59:17 crc kubenswrapper[4558]: I0120 16:59:17.799778 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ce97e7c6-01a4-4edc-85c2-5fc73a0bdb6e-logs\") pod \"nova-metadata-0\" (UID: \"ce97e7c6-01a4-4edc-85c2-5fc73a0bdb6e\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 16:59:17 crc kubenswrapper[4558]: I0120 16:59:17.799908 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-985hs\" (UniqueName: \"kubernetes.io/projected/900fb0c7-ddb0-46b3-82f7-cdd543afa534-kube-api-access-985hs\") pod \"nova-scheduler-0\" (UID: \"900fb0c7-ddb0-46b3-82f7-cdd543afa534\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 16:59:17 crc kubenswrapper[4558]: I0120 16:59:17.800026 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ce97e7c6-01a4-4edc-85c2-5fc73a0bdb6e-config-data\") pod \"nova-metadata-0\" (UID: \"ce97e7c6-01a4-4edc-85c2-5fc73a0bdb6e\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 16:59:17 crc kubenswrapper[4558]: I0120 16:59:17.800150 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ce97e7c6-01a4-4edc-85c2-5fc73a0bdb6e-logs\") pod \"nova-metadata-0\" (UID: \"ce97e7c6-01a4-4edc-85c2-5fc73a0bdb6e\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 16:59:17 crc kubenswrapper[4558]: I0120 16:59:17.800316 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mrm75\" (UniqueName: \"kubernetes.io/projected/ce97e7c6-01a4-4edc-85c2-5fc73a0bdb6e-kube-api-access-mrm75\") pod \"nova-metadata-0\" (UID: \"ce97e7c6-01a4-4edc-85c2-5fc73a0bdb6e\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 16:59:17 crc kubenswrapper[4558]: I0120 16:59:17.800442 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/900fb0c7-ddb0-46b3-82f7-cdd543afa534-config-data\") pod \"nova-scheduler-0\" (UID: \"900fb0c7-ddb0-46b3-82f7-cdd543afa534\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 16:59:17 crc kubenswrapper[4558]: I0120 16:59:17.800757 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/900fb0c7-ddb0-46b3-82f7-cdd543afa534-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"900fb0c7-ddb0-46b3-82f7-cdd543afa534\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 16:59:17 crc kubenswrapper[4558]: I0120 16:59:17.800862 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5484df8-2512-48d4-af79-ea72bf7c02b0-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"f5484df8-2512-48d4-af79-ea72bf7c02b0\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 16:59:17 crc kubenswrapper[4558]: I0120 16:59:17.801005 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f5484df8-2512-48d4-af79-ea72bf7c02b0-config-data\") pod \"nova-api-0\" (UID: \"f5484df8-2512-48d4-af79-ea72bf7c02b0\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 16:59:17 crc kubenswrapper[4558]: I0120 16:59:17.801082 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ce97e7c6-01a4-4edc-85c2-5fc73a0bdb6e-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"ce97e7c6-01a4-4edc-85c2-5fc73a0bdb6e\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 16:59:17 crc kubenswrapper[4558]: I0120 16:59:17.802844 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f5484df8-2512-48d4-af79-ea72bf7c02b0-logs\") pod \"nova-api-0\" (UID: \"f5484df8-2512-48d4-af79-ea72bf7c02b0\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 16:59:17 crc kubenswrapper[4558]: I0120 16:59:17.802964 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ch6jz\" (UniqueName: \"kubernetes.io/projected/f5484df8-2512-48d4-af79-ea72bf7c02b0-kube-api-access-ch6jz\") pod \"nova-api-0\" (UID: \"f5484df8-2512-48d4-af79-ea72bf7c02b0\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 16:59:17 crc kubenswrapper[4558]: I0120 16:59:17.806957 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ce97e7c6-01a4-4edc-85c2-5fc73a0bdb6e-config-data\") pod \"nova-metadata-0\" (UID: \"ce97e7c6-01a4-4edc-85c2-5fc73a0bdb6e\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 16:59:17 crc kubenswrapper[4558]: I0120 16:59:17.807287 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ce97e7c6-01a4-4edc-85c2-5fc73a0bdb6e-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"ce97e7c6-01a4-4edc-85c2-5fc73a0bdb6e\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 16:59:17 crc kubenswrapper[4558]: I0120 16:59:17.809987 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 16:59:17 crc kubenswrapper[4558]: I0120 16:59:17.810660 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f5484df8-2512-48d4-af79-ea72bf7c02b0-config-data\") pod \"nova-api-0\" (UID: \"f5484df8-2512-48d4-af79-ea72bf7c02b0\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 16:59:17 crc kubenswrapper[4558]: I0120 16:59:17.816174 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5484df8-2512-48d4-af79-ea72bf7c02b0-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"f5484df8-2512-48d4-af79-ea72bf7c02b0\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 16:59:17 crc kubenswrapper[4558]: I0120 16:59:17.819752 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f5484df8-2512-48d4-af79-ea72bf7c02b0-logs\") pod \"nova-api-0\" (UID: \"f5484df8-2512-48d4-af79-ea72bf7c02b0\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 16:59:17 crc kubenswrapper[4558]: I0120 16:59:17.822371 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mrm75\" (UniqueName: \"kubernetes.io/projected/ce97e7c6-01a4-4edc-85c2-5fc73a0bdb6e-kube-api-access-mrm75\") pod \"nova-metadata-0\" (UID: \"ce97e7c6-01a4-4edc-85c2-5fc73a0bdb6e\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 16:59:17 crc kubenswrapper[4558]: I0120 16:59:17.823801 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ch6jz\" (UniqueName: \"kubernetes.io/projected/f5484df8-2512-48d4-af79-ea72bf7c02b0-kube-api-access-ch6jz\") pod \"nova-api-0\" (UID: \"f5484df8-2512-48d4-af79-ea72bf7c02b0\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 16:59:17 crc kubenswrapper[4558]: I0120 16:59:17.871037 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 16:59:17 crc kubenswrapper[4558]: I0120 16:59:17.904555 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/966d48b4-44df-4217-be28-d015d6defc20-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"966d48b4-44df-4217-be28-d015d6defc20\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 16:59:17 crc kubenswrapper[4558]: I0120 16:59:17.904594 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/966d48b4-44df-4217-be28-d015d6defc20-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"966d48b4-44df-4217-be28-d015d6defc20\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 16:59:17 crc kubenswrapper[4558]: I0120 16:59:17.904745 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-985hs\" (UniqueName: \"kubernetes.io/projected/900fb0c7-ddb0-46b3-82f7-cdd543afa534-kube-api-access-985hs\") pod \"nova-scheduler-0\" (UID: \"900fb0c7-ddb0-46b3-82f7-cdd543afa534\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 16:59:17 crc kubenswrapper[4558]: I0120 16:59:17.904879 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t5ml2\" (UniqueName: \"kubernetes.io/projected/966d48b4-44df-4217-be28-d015d6defc20-kube-api-access-t5ml2\") pod \"nova-cell1-novncproxy-0\" (UID: \"966d48b4-44df-4217-be28-d015d6defc20\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 16:59:17 crc kubenswrapper[4558]: I0120 16:59:17.904912 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/900fb0c7-ddb0-46b3-82f7-cdd543afa534-config-data\") pod \"nova-scheduler-0\" (UID: \"900fb0c7-ddb0-46b3-82f7-cdd543afa534\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 16:59:17 crc kubenswrapper[4558]: I0120 16:59:17.904992 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/900fb0c7-ddb0-46b3-82f7-cdd543afa534-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"900fb0c7-ddb0-46b3-82f7-cdd543afa534\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 16:59:17 crc kubenswrapper[4558]: I0120 16:59:17.909767 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/900fb0c7-ddb0-46b3-82f7-cdd543afa534-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"900fb0c7-ddb0-46b3-82f7-cdd543afa534\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 16:59:17 crc kubenswrapper[4558]: I0120 16:59:17.910491 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/900fb0c7-ddb0-46b3-82f7-cdd543afa534-config-data\") pod \"nova-scheduler-0\" (UID: \"900fb0c7-ddb0-46b3-82f7-cdd543afa534\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 16:59:17 crc kubenswrapper[4558]: I0120 16:59:17.922433 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-985hs\" (UniqueName: \"kubernetes.io/projected/900fb0c7-ddb0-46b3-82f7-cdd543afa534-kube-api-access-985hs\") pod \"nova-scheduler-0\" (UID: \"900fb0c7-ddb0-46b3-82f7-cdd543afa534\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 16:59:17 crc kubenswrapper[4558]: I0120 16:59:17.978755 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 16:59:18 crc kubenswrapper[4558]: I0120 16:59:18.008750 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/966d48b4-44df-4217-be28-d015d6defc20-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"966d48b4-44df-4217-be28-d015d6defc20\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 16:59:18 crc kubenswrapper[4558]: I0120 16:59:18.008940 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/966d48b4-44df-4217-be28-d015d6defc20-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"966d48b4-44df-4217-be28-d015d6defc20\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 16:59:18 crc kubenswrapper[4558]: I0120 16:59:18.009055 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t5ml2\" (UniqueName: \"kubernetes.io/projected/966d48b4-44df-4217-be28-d015d6defc20-kube-api-access-t5ml2\") pod \"nova-cell1-novncproxy-0\" (UID: \"966d48b4-44df-4217-be28-d015d6defc20\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 16:59:18 crc kubenswrapper[4558]: I0120 16:59:18.012377 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/966d48b4-44df-4217-be28-d015d6defc20-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"966d48b4-44df-4217-be28-d015d6defc20\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 16:59:18 crc kubenswrapper[4558]: I0120 16:59:18.021856 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t5ml2\" (UniqueName: \"kubernetes.io/projected/966d48b4-44df-4217-be28-d015d6defc20-kube-api-access-t5ml2\") pod \"nova-cell1-novncproxy-0\" (UID: \"966d48b4-44df-4217-be28-d015d6defc20\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 16:59:18 crc kubenswrapper[4558]: I0120 16:59:18.025795 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/966d48b4-44df-4217-be28-d015d6defc20-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"966d48b4-44df-4217-be28-d015d6defc20\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 16:59:18 crc kubenswrapper[4558]: I0120 16:59:18.041216 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 16:59:18 crc kubenswrapper[4558]: I0120 16:59:18.149942 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 16:59:18 crc kubenswrapper[4558]: W0120 16:59:18.216021 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6aaeaa31_51e8_451a_9a40_6880c278ee0e.slice/crio-321734cbbeb4acd9ab0fe4553e13077b91b55873213ca8a318c340180975a971 WatchSource:0}: Error finding container 321734cbbeb4acd9ab0fe4553e13077b91b55873213ca8a318c340180975a971: Status 404 returned error can't find the container with id 321734cbbeb4acd9ab0fe4553e13077b91b55873213ca8a318c340180975a971 Jan 20 16:59:18 crc kubenswrapper[4558]: I0120 16:59:18.223695 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-cell-mapping-wn5b6"] Jan 20 16:59:18 crc kubenswrapper[4558]: I0120 16:59:18.311741 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 16:59:18 crc kubenswrapper[4558]: I0120 16:59:18.447917 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 16:59:18 crc kubenswrapper[4558]: I0120 16:59:18.477739 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 16:59:18 crc kubenswrapper[4558]: I0120 16:59:18.516756 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 16:59:18 crc kubenswrapper[4558]: I0120 16:59:18.562902 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-db-sync-mj7lb"] Jan 20 16:59:18 crc kubenswrapper[4558]: I0120 16:59:18.563988 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-mj7lb" Jan 20 16:59:18 crc kubenswrapper[4558]: I0120 16:59:18.566916 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell1-conductor-config-data" Jan 20 16:59:18 crc kubenswrapper[4558]: I0120 16:59:18.567097 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell1-conductor-scripts" Jan 20 16:59:18 crc kubenswrapper[4558]: I0120 16:59:18.578604 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-db-sync-mj7lb"] Jan 20 16:59:18 crc kubenswrapper[4558]: I0120 16:59:18.622587 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ec56e0f2-2aaa-458d-8c02-e3dba504c48a-scripts\") pod \"nova-cell1-conductor-db-sync-mj7lb\" (UID: \"ec56e0f2-2aaa-458d-8c02-e3dba504c48a\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-mj7lb" Jan 20 16:59:18 crc kubenswrapper[4558]: I0120 16:59:18.622699 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec56e0f2-2aaa-458d-8c02-e3dba504c48a-config-data\") pod \"nova-cell1-conductor-db-sync-mj7lb\" (UID: \"ec56e0f2-2aaa-458d-8c02-e3dba504c48a\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-mj7lb" Jan 20 16:59:18 crc kubenswrapper[4558]: I0120 16:59:18.622774 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec56e0f2-2aaa-458d-8c02-e3dba504c48a-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-mj7lb\" (UID: \"ec56e0f2-2aaa-458d-8c02-e3dba504c48a\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-mj7lb" Jan 20 16:59:18 crc kubenswrapper[4558]: I0120 16:59:18.622872 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7x78m\" (UniqueName: \"kubernetes.io/projected/ec56e0f2-2aaa-458d-8c02-e3dba504c48a-kube-api-access-7x78m\") pod \"nova-cell1-conductor-db-sync-mj7lb\" (UID: \"ec56e0f2-2aaa-458d-8c02-e3dba504c48a\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-mj7lb" Jan 20 16:59:18 crc kubenswrapper[4558]: I0120 16:59:18.689741 4558 generic.go:334] "Generic (PLEG): container finished" podID="a25d1528-cb2e-4d44-bcff-cbee0febbf9b" containerID="5ea299b9e1a26087e75d352274e554174e1b3e1c9848de0109d15fa7e7bef405" exitCode=137 Jan 20 16:59:18 crc kubenswrapper[4558]: I0120 16:59:18.689958 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"a25d1528-cb2e-4d44-bcff-cbee0febbf9b","Type":"ContainerDied","Data":"5ea299b9e1a26087e75d352274e554174e1b3e1c9848de0109d15fa7e7bef405"} Jan 20 16:59:18 crc kubenswrapper[4558]: I0120 16:59:18.693543 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-cell-mapping-wn5b6" event={"ID":"6aaeaa31-51e8-451a-9a40-6880c278ee0e","Type":"ContainerStarted","Data":"363d8978a2d2487cb2e7b6e16d0c176bad4492f0362547057829d38e6bcf10f8"} Jan 20 16:59:18 crc kubenswrapper[4558]: I0120 16:59:18.693574 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-cell-mapping-wn5b6" event={"ID":"6aaeaa31-51e8-451a-9a40-6880c278ee0e","Type":"ContainerStarted","Data":"321734cbbeb4acd9ab0fe4553e13077b91b55873213ca8a318c340180975a971"} Jan 20 16:59:18 crc kubenswrapper[4558]: I0120 16:59:18.696909 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"ce97e7c6-01a4-4edc-85c2-5fc73a0bdb6e","Type":"ContainerStarted","Data":"12b9d5e572bfe2b8f86991f0d2f34b55ace0d9f0d3f7a8abd7db9373abf000c7"} Jan 20 16:59:18 crc kubenswrapper[4558]: I0120 16:59:18.697662 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"966d48b4-44df-4217-be28-d015d6defc20","Type":"ContainerStarted","Data":"a34d73b6d559dd63c47c700d2f13ef487628e03d9fe39fef7ec2ca0f9a90dfe6"} Jan 20 16:59:18 crc kubenswrapper[4558]: I0120 16:59:18.698422 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"f5484df8-2512-48d4-af79-ea72bf7c02b0","Type":"ContainerStarted","Data":"b6f2e4f8e15632dcec10e3b72ffa7aab39a917fac4297111958939cd502216c7"} Jan 20 16:59:18 crc kubenswrapper[4558]: I0120 16:59:18.699014 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"900fb0c7-ddb0-46b3-82f7-cdd543afa534","Type":"ContainerStarted","Data":"0a35efaebf80022aaabcdb8f43b4120a947bc1163e177f586070c7bdb6954672"} Jan 20 16:59:18 crc kubenswrapper[4558]: I0120 16:59:18.708727 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell0-cell-mapping-wn5b6" podStartSLOduration=1.7087168080000001 podStartE2EDuration="1.708716808s" podCreationTimestamp="2026-01-20 16:59:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 16:59:18.705911402 +0000 UTC m=+1052.466249369" watchObservedRunningTime="2026-01-20 16:59:18.708716808 +0000 UTC m=+1052.469054774" Jan 20 16:59:18 crc kubenswrapper[4558]: I0120 16:59:18.724828 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec56e0f2-2aaa-458d-8c02-e3dba504c48a-config-data\") pod \"nova-cell1-conductor-db-sync-mj7lb\" (UID: \"ec56e0f2-2aaa-458d-8c02-e3dba504c48a\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-mj7lb" Jan 20 16:59:18 crc kubenswrapper[4558]: I0120 16:59:18.725121 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec56e0f2-2aaa-458d-8c02-e3dba504c48a-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-mj7lb\" (UID: \"ec56e0f2-2aaa-458d-8c02-e3dba504c48a\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-mj7lb" Jan 20 16:59:18 crc kubenswrapper[4558]: I0120 16:59:18.725577 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7x78m\" (UniqueName: \"kubernetes.io/projected/ec56e0f2-2aaa-458d-8c02-e3dba504c48a-kube-api-access-7x78m\") pod \"nova-cell1-conductor-db-sync-mj7lb\" (UID: \"ec56e0f2-2aaa-458d-8c02-e3dba504c48a\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-mj7lb" Jan 20 16:59:18 crc kubenswrapper[4558]: I0120 16:59:18.725827 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ec56e0f2-2aaa-458d-8c02-e3dba504c48a-scripts\") pod \"nova-cell1-conductor-db-sync-mj7lb\" (UID: \"ec56e0f2-2aaa-458d-8c02-e3dba504c48a\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-mj7lb" Jan 20 16:59:18 crc kubenswrapper[4558]: I0120 16:59:18.729978 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec56e0f2-2aaa-458d-8c02-e3dba504c48a-config-data\") pod \"nova-cell1-conductor-db-sync-mj7lb\" (UID: \"ec56e0f2-2aaa-458d-8c02-e3dba504c48a\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-mj7lb" Jan 20 16:59:18 crc kubenswrapper[4558]: I0120 16:59:18.730631 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ec56e0f2-2aaa-458d-8c02-e3dba504c48a-scripts\") pod \"nova-cell1-conductor-db-sync-mj7lb\" (UID: \"ec56e0f2-2aaa-458d-8c02-e3dba504c48a\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-mj7lb" Jan 20 16:59:18 crc kubenswrapper[4558]: I0120 16:59:18.734879 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec56e0f2-2aaa-458d-8c02-e3dba504c48a-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-mj7lb\" (UID: \"ec56e0f2-2aaa-458d-8c02-e3dba504c48a\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-mj7lb" Jan 20 16:59:18 crc kubenswrapper[4558]: I0120 16:59:18.749260 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7x78m\" (UniqueName: \"kubernetes.io/projected/ec56e0f2-2aaa-458d-8c02-e3dba504c48a-kube-api-access-7x78m\") pod \"nova-cell1-conductor-db-sync-mj7lb\" (UID: \"ec56e0f2-2aaa-458d-8c02-e3dba504c48a\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-mj7lb" Jan 20 16:59:18 crc kubenswrapper[4558]: I0120 16:59:18.860155 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:59:18 crc kubenswrapper[4558]: I0120 16:59:18.882348 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-mj7lb" Jan 20 16:59:18 crc kubenswrapper[4558]: I0120 16:59:18.928372 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a25d1528-cb2e-4d44-bcff-cbee0febbf9b-run-httpd\") pod \"a25d1528-cb2e-4d44-bcff-cbee0febbf9b\" (UID: \"a25d1528-cb2e-4d44-bcff-cbee0febbf9b\") " Jan 20 16:59:18 crc kubenswrapper[4558]: I0120 16:59:18.928541 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a25d1528-cb2e-4d44-bcff-cbee0febbf9b-combined-ca-bundle\") pod \"a25d1528-cb2e-4d44-bcff-cbee0febbf9b\" (UID: \"a25d1528-cb2e-4d44-bcff-cbee0febbf9b\") " Jan 20 16:59:18 crc kubenswrapper[4558]: I0120 16:59:18.928603 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a25d1528-cb2e-4d44-bcff-cbee0febbf9b-scripts\") pod \"a25d1528-cb2e-4d44-bcff-cbee0febbf9b\" (UID: \"a25d1528-cb2e-4d44-bcff-cbee0febbf9b\") " Jan 20 16:59:18 crc kubenswrapper[4558]: I0120 16:59:18.928633 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a25d1528-cb2e-4d44-bcff-cbee0febbf9b-log-httpd\") pod \"a25d1528-cb2e-4d44-bcff-cbee0febbf9b\" (UID: \"a25d1528-cb2e-4d44-bcff-cbee0febbf9b\") " Jan 20 16:59:18 crc kubenswrapper[4558]: I0120 16:59:18.928862 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a25d1528-cb2e-4d44-bcff-cbee0febbf9b-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "a25d1528-cb2e-4d44-bcff-cbee0febbf9b" (UID: "a25d1528-cb2e-4d44-bcff-cbee0febbf9b"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 16:59:18 crc kubenswrapper[4558]: I0120 16:59:18.930215 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a25d1528-cb2e-4d44-bcff-cbee0febbf9b-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "a25d1528-cb2e-4d44-bcff-cbee0febbf9b" (UID: "a25d1528-cb2e-4d44-bcff-cbee0febbf9b"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 16:59:18 crc kubenswrapper[4558]: I0120 16:59:18.932855 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a25d1528-cb2e-4d44-bcff-cbee0febbf9b-config-data\") pod \"a25d1528-cb2e-4d44-bcff-cbee0febbf9b\" (UID: \"a25d1528-cb2e-4d44-bcff-cbee0febbf9b\") " Jan 20 16:59:18 crc kubenswrapper[4558]: I0120 16:59:18.932892 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a25d1528-cb2e-4d44-bcff-cbee0febbf9b-sg-core-conf-yaml\") pod \"a25d1528-cb2e-4d44-bcff-cbee0febbf9b\" (UID: \"a25d1528-cb2e-4d44-bcff-cbee0febbf9b\") " Jan 20 16:59:18 crc kubenswrapper[4558]: I0120 16:59:18.932926 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mhtsv\" (UniqueName: \"kubernetes.io/projected/a25d1528-cb2e-4d44-bcff-cbee0febbf9b-kube-api-access-mhtsv\") pod \"a25d1528-cb2e-4d44-bcff-cbee0febbf9b\" (UID: \"a25d1528-cb2e-4d44-bcff-cbee0febbf9b\") " Jan 20 16:59:18 crc kubenswrapper[4558]: I0120 16:59:18.933286 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a25d1528-cb2e-4d44-bcff-cbee0febbf9b-scripts" (OuterVolumeSpecName: "scripts") pod "a25d1528-cb2e-4d44-bcff-cbee0febbf9b" (UID: "a25d1528-cb2e-4d44-bcff-cbee0febbf9b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:59:18 crc kubenswrapper[4558]: I0120 16:59:18.933524 4558 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a25d1528-cb2e-4d44-bcff-cbee0febbf9b-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 16:59:18 crc kubenswrapper[4558]: I0120 16:59:18.933542 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a25d1528-cb2e-4d44-bcff-cbee0febbf9b-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 16:59:18 crc kubenswrapper[4558]: I0120 16:59:18.933552 4558 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a25d1528-cb2e-4d44-bcff-cbee0febbf9b-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 16:59:18 crc kubenswrapper[4558]: I0120 16:59:18.935896 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a25d1528-cb2e-4d44-bcff-cbee0febbf9b-kube-api-access-mhtsv" (OuterVolumeSpecName: "kube-api-access-mhtsv") pod "a25d1528-cb2e-4d44-bcff-cbee0febbf9b" (UID: "a25d1528-cb2e-4d44-bcff-cbee0febbf9b"). InnerVolumeSpecName "kube-api-access-mhtsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:59:18 crc kubenswrapper[4558]: I0120 16:59:18.984019 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a25d1528-cb2e-4d44-bcff-cbee0febbf9b-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "a25d1528-cb2e-4d44-bcff-cbee0febbf9b" (UID: "a25d1528-cb2e-4d44-bcff-cbee0febbf9b"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:59:18 crc kubenswrapper[4558]: I0120 16:59:18.993030 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a25d1528-cb2e-4d44-bcff-cbee0febbf9b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a25d1528-cb2e-4d44-bcff-cbee0febbf9b" (UID: "a25d1528-cb2e-4d44-bcff-cbee0febbf9b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:59:19 crc kubenswrapper[4558]: I0120 16:59:19.025726 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a25d1528-cb2e-4d44-bcff-cbee0febbf9b-config-data" (OuterVolumeSpecName: "config-data") pod "a25d1528-cb2e-4d44-bcff-cbee0febbf9b" (UID: "a25d1528-cb2e-4d44-bcff-cbee0febbf9b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:59:19 crc kubenswrapper[4558]: I0120 16:59:19.035399 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a25d1528-cb2e-4d44-bcff-cbee0febbf9b-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 16:59:19 crc kubenswrapper[4558]: I0120 16:59:19.035425 4558 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a25d1528-cb2e-4d44-bcff-cbee0febbf9b-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 20 16:59:19 crc kubenswrapper[4558]: I0120 16:59:19.035435 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mhtsv\" (UniqueName: \"kubernetes.io/projected/a25d1528-cb2e-4d44-bcff-cbee0febbf9b-kube-api-access-mhtsv\") on node \"crc\" DevicePath \"\"" Jan 20 16:59:19 crc kubenswrapper[4558]: I0120 16:59:19.035443 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a25d1528-cb2e-4d44-bcff-cbee0febbf9b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 16:59:19 crc kubenswrapper[4558]: I0120 16:59:19.282302 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-db-sync-mj7lb"] Jan 20 16:59:19 crc kubenswrapper[4558]: W0120 16:59:19.294757 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podec56e0f2_2aaa_458d_8c02_e3dba504c48a.slice/crio-e2fb53677466478de0c20c5c25e1f67b5878b8e7d27339e8986cd090ae6dee8a WatchSource:0}: Error finding container e2fb53677466478de0c20c5c25e1f67b5878b8e7d27339e8986cd090ae6dee8a: Status 404 returned error can't find the container with id e2fb53677466478de0c20c5c25e1f67b5878b8e7d27339e8986cd090ae6dee8a Jan 20 16:59:19 crc kubenswrapper[4558]: I0120 16:59:19.709143 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"a25d1528-cb2e-4d44-bcff-cbee0febbf9b","Type":"ContainerDied","Data":"413e381ab29eb3eab43a0c334784566ca177e99b184159fbed62a18b9e4060e2"} Jan 20 16:59:19 crc kubenswrapper[4558]: I0120 16:59:19.709223 4558 scope.go:117] "RemoveContainer" containerID="5ea299b9e1a26087e75d352274e554174e1b3e1c9848de0109d15fa7e7bef405" Jan 20 16:59:19 crc kubenswrapper[4558]: I0120 16:59:19.709378 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:59:19 crc kubenswrapper[4558]: I0120 16:59:19.717957 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-mj7lb" event={"ID":"ec56e0f2-2aaa-458d-8c02-e3dba504c48a","Type":"ContainerStarted","Data":"43e92682c057f8b00344d5b760cd89b42d21a4dde25d4648333ff3c54e65f150"} Jan 20 16:59:19 crc kubenswrapper[4558]: I0120 16:59:19.717994 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-mj7lb" event={"ID":"ec56e0f2-2aaa-458d-8c02-e3dba504c48a","Type":"ContainerStarted","Data":"e2fb53677466478de0c20c5c25e1f67b5878b8e7d27339e8986cd090ae6dee8a"} Jan 20 16:59:19 crc kubenswrapper[4558]: I0120 16:59:19.740110 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-mj7lb" podStartSLOduration=1.7400966850000001 podStartE2EDuration="1.740096685s" podCreationTimestamp="2026-01-20 16:59:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 16:59:19.736756634 +0000 UTC m=+1053.497094602" watchObservedRunningTime="2026-01-20 16:59:19.740096685 +0000 UTC m=+1053.500434651" Jan 20 16:59:19 crc kubenswrapper[4558]: I0120 16:59:19.742724 4558 scope.go:117] "RemoveContainer" containerID="ef0bee6d0daeab337417075e1abd16960c3e66d18ab72e0d85381f20662ec416" Jan 20 16:59:19 crc kubenswrapper[4558]: I0120 16:59:19.763677 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 16:59:19 crc kubenswrapper[4558]: I0120 16:59:19.770649 4558 scope.go:117] "RemoveContainer" containerID="2923167716a43fc429d5914cc58cb5e2dc67e79ac7c693e5671e0a656709c94c" Jan 20 16:59:19 crc kubenswrapper[4558]: I0120 16:59:19.774642 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 16:59:19 crc kubenswrapper[4558]: I0120 16:59:19.792495 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 16:59:19 crc kubenswrapper[4558]: E0120 16:59:19.792808 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a25d1528-cb2e-4d44-bcff-cbee0febbf9b" containerName="ceilometer-notification-agent" Jan 20 16:59:19 crc kubenswrapper[4558]: I0120 16:59:19.792823 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="a25d1528-cb2e-4d44-bcff-cbee0febbf9b" containerName="ceilometer-notification-agent" Jan 20 16:59:19 crc kubenswrapper[4558]: E0120 16:59:19.792833 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a25d1528-cb2e-4d44-bcff-cbee0febbf9b" containerName="sg-core" Jan 20 16:59:19 crc kubenswrapper[4558]: I0120 16:59:19.792838 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="a25d1528-cb2e-4d44-bcff-cbee0febbf9b" containerName="sg-core" Jan 20 16:59:19 crc kubenswrapper[4558]: E0120 16:59:19.792852 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a25d1528-cb2e-4d44-bcff-cbee0febbf9b" containerName="proxy-httpd" Jan 20 16:59:19 crc kubenswrapper[4558]: I0120 16:59:19.792857 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="a25d1528-cb2e-4d44-bcff-cbee0febbf9b" containerName="proxy-httpd" Jan 20 16:59:19 crc kubenswrapper[4558]: E0120 16:59:19.792883 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a25d1528-cb2e-4d44-bcff-cbee0febbf9b" containerName="ceilometer-central-agent" Jan 20 16:59:19 crc kubenswrapper[4558]: I0120 16:59:19.792889 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="a25d1528-cb2e-4d44-bcff-cbee0febbf9b" containerName="ceilometer-central-agent" Jan 20 16:59:19 crc kubenswrapper[4558]: I0120 16:59:19.793021 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="a25d1528-cb2e-4d44-bcff-cbee0febbf9b" containerName="sg-core" Jan 20 16:59:19 crc kubenswrapper[4558]: I0120 16:59:19.793036 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="a25d1528-cb2e-4d44-bcff-cbee0febbf9b" containerName="ceilometer-central-agent" Jan 20 16:59:19 crc kubenswrapper[4558]: I0120 16:59:19.793047 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="a25d1528-cb2e-4d44-bcff-cbee0febbf9b" containerName="proxy-httpd" Jan 20 16:59:19 crc kubenswrapper[4558]: I0120 16:59:19.793063 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="a25d1528-cb2e-4d44-bcff-cbee0febbf9b" containerName="ceilometer-notification-agent" Jan 20 16:59:19 crc kubenswrapper[4558]: I0120 16:59:19.794386 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:59:19 crc kubenswrapper[4558]: I0120 16:59:19.798001 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 16:59:19 crc kubenswrapper[4558]: I0120 16:59:19.800927 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-config-data" Jan 20 16:59:19 crc kubenswrapper[4558]: I0120 16:59:19.800984 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-scripts" Jan 20 16:59:19 crc kubenswrapper[4558]: I0120 16:59:19.802219 4558 scope.go:117] "RemoveContainer" containerID="f28e148e09db47eceea4bf2d682b5cfd254188726d31ea5fe49b06ed3cf7b2f8" Jan 20 16:59:19 crc kubenswrapper[4558]: I0120 16:59:19.847807 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/473a463c-85cb-4702-9759-761904072818-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"473a463c-85cb-4702-9759-761904072818\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:59:19 crc kubenswrapper[4558]: I0120 16:59:19.847858 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/473a463c-85cb-4702-9759-761904072818-scripts\") pod \"ceilometer-0\" (UID: \"473a463c-85cb-4702-9759-761904072818\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:59:19 crc kubenswrapper[4558]: I0120 16:59:19.848048 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mnnhn\" (UniqueName: \"kubernetes.io/projected/473a463c-85cb-4702-9759-761904072818-kube-api-access-mnnhn\") pod \"ceilometer-0\" (UID: \"473a463c-85cb-4702-9759-761904072818\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:59:19 crc kubenswrapper[4558]: I0120 16:59:19.848941 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/473a463c-85cb-4702-9759-761904072818-config-data\") pod \"ceilometer-0\" (UID: \"473a463c-85cb-4702-9759-761904072818\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:59:19 crc kubenswrapper[4558]: I0120 16:59:19.848996 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/473a463c-85cb-4702-9759-761904072818-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"473a463c-85cb-4702-9759-761904072818\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:59:19 crc kubenswrapper[4558]: I0120 16:59:19.849221 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/473a463c-85cb-4702-9759-761904072818-log-httpd\") pod \"ceilometer-0\" (UID: \"473a463c-85cb-4702-9759-761904072818\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:59:19 crc kubenswrapper[4558]: I0120 16:59:19.849243 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/473a463c-85cb-4702-9759-761904072818-run-httpd\") pod \"ceilometer-0\" (UID: \"473a463c-85cb-4702-9759-761904072818\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:59:19 crc kubenswrapper[4558]: I0120 16:59:19.950469 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/473a463c-85cb-4702-9759-761904072818-scripts\") pod \"ceilometer-0\" (UID: \"473a463c-85cb-4702-9759-761904072818\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:59:19 crc kubenswrapper[4558]: I0120 16:59:19.950569 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mnnhn\" (UniqueName: \"kubernetes.io/projected/473a463c-85cb-4702-9759-761904072818-kube-api-access-mnnhn\") pod \"ceilometer-0\" (UID: \"473a463c-85cb-4702-9759-761904072818\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:59:19 crc kubenswrapper[4558]: I0120 16:59:19.950621 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/473a463c-85cb-4702-9759-761904072818-config-data\") pod \"ceilometer-0\" (UID: \"473a463c-85cb-4702-9759-761904072818\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:59:19 crc kubenswrapper[4558]: I0120 16:59:19.950636 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/473a463c-85cb-4702-9759-761904072818-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"473a463c-85cb-4702-9759-761904072818\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:59:19 crc kubenswrapper[4558]: I0120 16:59:19.950661 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/473a463c-85cb-4702-9759-761904072818-log-httpd\") pod \"ceilometer-0\" (UID: \"473a463c-85cb-4702-9759-761904072818\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:59:19 crc kubenswrapper[4558]: I0120 16:59:19.950677 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/473a463c-85cb-4702-9759-761904072818-run-httpd\") pod \"ceilometer-0\" (UID: \"473a463c-85cb-4702-9759-761904072818\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:59:19 crc kubenswrapper[4558]: I0120 16:59:19.950762 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/473a463c-85cb-4702-9759-761904072818-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"473a463c-85cb-4702-9759-761904072818\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:59:19 crc kubenswrapper[4558]: I0120 16:59:19.952370 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/473a463c-85cb-4702-9759-761904072818-log-httpd\") pod \"ceilometer-0\" (UID: \"473a463c-85cb-4702-9759-761904072818\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:59:19 crc kubenswrapper[4558]: I0120 16:59:19.952374 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/473a463c-85cb-4702-9759-761904072818-run-httpd\") pod \"ceilometer-0\" (UID: \"473a463c-85cb-4702-9759-761904072818\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:59:19 crc kubenswrapper[4558]: I0120 16:59:19.957954 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/473a463c-85cb-4702-9759-761904072818-config-data\") pod \"ceilometer-0\" (UID: \"473a463c-85cb-4702-9759-761904072818\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:59:19 crc kubenswrapper[4558]: I0120 16:59:19.958839 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/473a463c-85cb-4702-9759-761904072818-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"473a463c-85cb-4702-9759-761904072818\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:59:19 crc kubenswrapper[4558]: I0120 16:59:19.959003 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/473a463c-85cb-4702-9759-761904072818-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"473a463c-85cb-4702-9759-761904072818\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:59:19 crc kubenswrapper[4558]: I0120 16:59:19.959311 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/473a463c-85cb-4702-9759-761904072818-scripts\") pod \"ceilometer-0\" (UID: \"473a463c-85cb-4702-9759-761904072818\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:59:19 crc kubenswrapper[4558]: I0120 16:59:19.966720 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mnnhn\" (UniqueName: \"kubernetes.io/projected/473a463c-85cb-4702-9759-761904072818-kube-api-access-mnnhn\") pod \"ceilometer-0\" (UID: \"473a463c-85cb-4702-9759-761904072818\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:59:20 crc kubenswrapper[4558]: I0120 16:59:20.111920 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:59:20 crc kubenswrapper[4558]: I0120 16:59:20.588216 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a25d1528-cb2e-4d44-bcff-cbee0febbf9b" path="/var/lib/kubelet/pods/a25d1528-cb2e-4d44-bcff-cbee0febbf9b/volumes" Jan 20 16:59:20 crc kubenswrapper[4558]: I0120 16:59:20.728311 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"f5484df8-2512-48d4-af79-ea72bf7c02b0","Type":"ContainerStarted","Data":"6b5e284c69a12941754fdf4cd1d81b50fbf2bbe0fa967f797f258dd94e2ccebf"} Jan 20 16:59:20 crc kubenswrapper[4558]: I0120 16:59:20.731744 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"ce97e7c6-01a4-4edc-85c2-5fc73a0bdb6e","Type":"ContainerStarted","Data":"ea1faaaee51200f91128202826f05b7fee4131979bc59f5c84e6b22125654f81"} Jan 20 16:59:20 crc kubenswrapper[4558]: I0120 16:59:20.908015 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 16:59:21 crc kubenswrapper[4558]: I0120 16:59:21.315941 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 16:59:21 crc kubenswrapper[4558]: I0120 16:59:21.351305 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 16:59:21 crc kubenswrapper[4558]: W0120 16:59:21.636805 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod473a463c_85cb_4702_9759_761904072818.slice/crio-a25eecc02f955ebbdc93b128f536260bfffae2c61a5934b541e9dc8c982ef61b WatchSource:0}: Error finding container a25eecc02f955ebbdc93b128f536260bfffae2c61a5934b541e9dc8c982ef61b: Status 404 returned error can't find the container with id a25eecc02f955ebbdc93b128f536260bfffae2c61a5934b541e9dc8c982ef61b Jan 20 16:59:21 crc kubenswrapper[4558]: I0120 16:59:21.750184 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"ce97e7c6-01a4-4edc-85c2-5fc73a0bdb6e","Type":"ContainerStarted","Data":"c57d7be4e8c557d5a606fbbd2065d862051cbe59d2527eaeb23df64c71ab3996"} Jan 20 16:59:21 crc kubenswrapper[4558]: I0120 16:59:21.752667 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"f5484df8-2512-48d4-af79-ea72bf7c02b0","Type":"ContainerStarted","Data":"59c4f6d7ed8eb3d57f30987913f9fd8cd4d6bcba4c783d8566dd6559f9755936"} Jan 20 16:59:21 crc kubenswrapper[4558]: I0120 16:59:21.753886 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"473a463c-85cb-4702-9759-761904072818","Type":"ContainerStarted","Data":"a25eecc02f955ebbdc93b128f536260bfffae2c61a5934b541e9dc8c982ef61b"} Jan 20 16:59:21 crc kubenswrapper[4558]: I0120 16:59:21.769435 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-metadata-0" podStartSLOduration=2.7831562979999998 podStartE2EDuration="4.769421413s" podCreationTimestamp="2026-01-20 16:59:17 +0000 UTC" firstStartedPulling="2026-01-20 16:59:18.502241647 +0000 UTC m=+1052.262579614" lastFinishedPulling="2026-01-20 16:59:20.488506761 +0000 UTC m=+1054.248844729" observedRunningTime="2026-01-20 16:59:21.760550543 +0000 UTC m=+1055.520888510" watchObservedRunningTime="2026-01-20 16:59:21.769421413 +0000 UTC m=+1055.529759379" Jan 20 16:59:21 crc kubenswrapper[4558]: I0120 16:59:21.784859 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-api-0" podStartSLOduration=2.643564737 podStartE2EDuration="4.784844882s" podCreationTimestamp="2026-01-20 16:59:17 +0000 UTC" firstStartedPulling="2026-01-20 16:59:18.346292269 +0000 UTC m=+1052.106630236" lastFinishedPulling="2026-01-20 16:59:20.487572414 +0000 UTC m=+1054.247910381" observedRunningTime="2026-01-20 16:59:21.776563442 +0000 UTC m=+1055.536901408" watchObservedRunningTime="2026-01-20 16:59:21.784844882 +0000 UTC m=+1055.545182850" Jan 20 16:59:22 crc kubenswrapper[4558]: I0120 16:59:22.762584 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"900fb0c7-ddb0-46b3-82f7-cdd543afa534","Type":"ContainerStarted","Data":"08ed39808896b2786fe6ca35a6d00b3c1ef2d42417098de4bd4d04960cf43db1"} Jan 20 16:59:22 crc kubenswrapper[4558]: I0120 16:59:22.763930 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"966d48b4-44df-4217-be28-d015d6defc20","Type":"ContainerStarted","Data":"699f41d3854a9fb8a133db3974fb8fbf2d759a82427c5d466adde671f8342bc6"} Jan 20 16:59:22 crc kubenswrapper[4558]: I0120 16:59:22.764049 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" podUID="966d48b4-44df-4217-be28-d015d6defc20" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://699f41d3854a9fb8a133db3974fb8fbf2d759a82427c5d466adde671f8342bc6" gracePeriod=30 Jan 20 16:59:22 crc kubenswrapper[4558]: I0120 16:59:22.767942 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"473a463c-85cb-4702-9759-761904072818","Type":"ContainerStarted","Data":"efe3ef9d14657f5a9f606bc1ca5681abed7f791ea57ee1600378d05da399c8a7"} Jan 20 16:59:22 crc kubenswrapper[4558]: I0120 16:59:22.768053 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-metadata-0" podUID="ce97e7c6-01a4-4edc-85c2-5fc73a0bdb6e" containerName="nova-metadata-log" containerID="cri-o://ea1faaaee51200f91128202826f05b7fee4131979bc59f5c84e6b22125654f81" gracePeriod=30 Jan 20 16:59:22 crc kubenswrapper[4558]: I0120 16:59:22.768653 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-metadata-0" podUID="ce97e7c6-01a4-4edc-85c2-5fc73a0bdb6e" containerName="nova-metadata-metadata" containerID="cri-o://c57d7be4e8c557d5a606fbbd2065d862051cbe59d2527eaeb23df64c71ab3996" gracePeriod=30 Jan 20 16:59:22 crc kubenswrapper[4558]: I0120 16:59:22.810155 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" podStartSLOduration=2.653778142 podStartE2EDuration="5.810141111s" podCreationTimestamp="2026-01-20 16:59:17 +0000 UTC" firstStartedPulling="2026-01-20 16:59:18.501256866 +0000 UTC m=+1052.261594823" lastFinishedPulling="2026-01-20 16:59:21.657619825 +0000 UTC m=+1055.417957792" observedRunningTime="2026-01-20 16:59:22.807859361 +0000 UTC m=+1056.568197328" watchObservedRunningTime="2026-01-20 16:59:22.810141111 +0000 UTC m=+1056.570479079" Jan 20 16:59:22 crc kubenswrapper[4558]: I0120 16:59:22.813808 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-scheduler-0" podStartSLOduration=2.679328081 podStartE2EDuration="5.813797946s" podCreationTimestamp="2026-01-20 16:59:17 +0000 UTC" firstStartedPulling="2026-01-20 16:59:18.521859604 +0000 UTC m=+1052.282197571" lastFinishedPulling="2026-01-20 16:59:21.656329468 +0000 UTC m=+1055.416667436" observedRunningTime="2026-01-20 16:59:22.785212749 +0000 UTC m=+1056.545550716" watchObservedRunningTime="2026-01-20 16:59:22.813797946 +0000 UTC m=+1056.574135914" Jan 20 16:59:22 crc kubenswrapper[4558]: I0120 16:59:22.979655 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 16:59:22 crc kubenswrapper[4558]: I0120 16:59:22.979694 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 16:59:23 crc kubenswrapper[4558]: I0120 16:59:23.043529 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 16:59:23 crc kubenswrapper[4558]: I0120 16:59:23.156415 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 16:59:23 crc kubenswrapper[4558]: I0120 16:59:23.241628 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 16:59:23 crc kubenswrapper[4558]: I0120 16:59:23.304507 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mrm75\" (UniqueName: \"kubernetes.io/projected/ce97e7c6-01a4-4edc-85c2-5fc73a0bdb6e-kube-api-access-mrm75\") pod \"ce97e7c6-01a4-4edc-85c2-5fc73a0bdb6e\" (UID: \"ce97e7c6-01a4-4edc-85c2-5fc73a0bdb6e\") " Jan 20 16:59:23 crc kubenswrapper[4558]: I0120 16:59:23.304630 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ce97e7c6-01a4-4edc-85c2-5fc73a0bdb6e-logs\") pod \"ce97e7c6-01a4-4edc-85c2-5fc73a0bdb6e\" (UID: \"ce97e7c6-01a4-4edc-85c2-5fc73a0bdb6e\") " Jan 20 16:59:23 crc kubenswrapper[4558]: I0120 16:59:23.304677 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ce97e7c6-01a4-4edc-85c2-5fc73a0bdb6e-combined-ca-bundle\") pod \"ce97e7c6-01a4-4edc-85c2-5fc73a0bdb6e\" (UID: \"ce97e7c6-01a4-4edc-85c2-5fc73a0bdb6e\") " Jan 20 16:59:23 crc kubenswrapper[4558]: I0120 16:59:23.304792 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ce97e7c6-01a4-4edc-85c2-5fc73a0bdb6e-config-data\") pod \"ce97e7c6-01a4-4edc-85c2-5fc73a0bdb6e\" (UID: \"ce97e7c6-01a4-4edc-85c2-5fc73a0bdb6e\") " Jan 20 16:59:23 crc kubenswrapper[4558]: I0120 16:59:23.304878 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ce97e7c6-01a4-4edc-85c2-5fc73a0bdb6e-logs" (OuterVolumeSpecName: "logs") pod "ce97e7c6-01a4-4edc-85c2-5fc73a0bdb6e" (UID: "ce97e7c6-01a4-4edc-85c2-5fc73a0bdb6e"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 16:59:23 crc kubenswrapper[4558]: I0120 16:59:23.305152 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ce97e7c6-01a4-4edc-85c2-5fc73a0bdb6e-logs\") on node \"crc\" DevicePath \"\"" Jan 20 16:59:23 crc kubenswrapper[4558]: I0120 16:59:23.308005 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ce97e7c6-01a4-4edc-85c2-5fc73a0bdb6e-kube-api-access-mrm75" (OuterVolumeSpecName: "kube-api-access-mrm75") pod "ce97e7c6-01a4-4edc-85c2-5fc73a0bdb6e" (UID: "ce97e7c6-01a4-4edc-85c2-5fc73a0bdb6e"). InnerVolumeSpecName "kube-api-access-mrm75". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:59:23 crc kubenswrapper[4558]: I0120 16:59:23.323071 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ce97e7c6-01a4-4edc-85c2-5fc73a0bdb6e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ce97e7c6-01a4-4edc-85c2-5fc73a0bdb6e" (UID: "ce97e7c6-01a4-4edc-85c2-5fc73a0bdb6e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:59:23 crc kubenswrapper[4558]: I0120 16:59:23.324796 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ce97e7c6-01a4-4edc-85c2-5fc73a0bdb6e-config-data" (OuterVolumeSpecName: "config-data") pod "ce97e7c6-01a4-4edc-85c2-5fc73a0bdb6e" (UID: "ce97e7c6-01a4-4edc-85c2-5fc73a0bdb6e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:59:23 crc kubenswrapper[4558]: I0120 16:59:23.407146 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ce97e7c6-01a4-4edc-85c2-5fc73a0bdb6e-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 16:59:23 crc kubenswrapper[4558]: I0120 16:59:23.407185 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mrm75\" (UniqueName: \"kubernetes.io/projected/ce97e7c6-01a4-4edc-85c2-5fc73a0bdb6e-kube-api-access-mrm75\") on node \"crc\" DevicePath \"\"" Jan 20 16:59:23 crc kubenswrapper[4558]: I0120 16:59:23.407204 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ce97e7c6-01a4-4edc-85c2-5fc73a0bdb6e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 16:59:23 crc kubenswrapper[4558]: I0120 16:59:23.794785 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"473a463c-85cb-4702-9759-761904072818","Type":"ContainerStarted","Data":"3a19ed138105b3740685ac28d410913719c1974f6fed6a27ac6a13e7aa6cb0a5"} Jan 20 16:59:23 crc kubenswrapper[4558]: I0120 16:59:23.796474 4558 generic.go:334] "Generic (PLEG): container finished" podID="ec56e0f2-2aaa-458d-8c02-e3dba504c48a" containerID="43e92682c057f8b00344d5b760cd89b42d21a4dde25d4648333ff3c54e65f150" exitCode=0 Jan 20 16:59:23 crc kubenswrapper[4558]: I0120 16:59:23.796524 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-mj7lb" event={"ID":"ec56e0f2-2aaa-458d-8c02-e3dba504c48a","Type":"ContainerDied","Data":"43e92682c057f8b00344d5b760cd89b42d21a4dde25d4648333ff3c54e65f150"} Jan 20 16:59:23 crc kubenswrapper[4558]: I0120 16:59:23.799580 4558 generic.go:334] "Generic (PLEG): container finished" podID="ce97e7c6-01a4-4edc-85c2-5fc73a0bdb6e" containerID="c57d7be4e8c557d5a606fbbd2065d862051cbe59d2527eaeb23df64c71ab3996" exitCode=0 Jan 20 16:59:23 crc kubenswrapper[4558]: I0120 16:59:23.799609 4558 generic.go:334] "Generic (PLEG): container finished" podID="ce97e7c6-01a4-4edc-85c2-5fc73a0bdb6e" containerID="ea1faaaee51200f91128202826f05b7fee4131979bc59f5c84e6b22125654f81" exitCode=143 Jan 20 16:59:23 crc kubenswrapper[4558]: I0120 16:59:23.799662 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 16:59:23 crc kubenswrapper[4558]: I0120 16:59:23.799703 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"ce97e7c6-01a4-4edc-85c2-5fc73a0bdb6e","Type":"ContainerDied","Data":"c57d7be4e8c557d5a606fbbd2065d862051cbe59d2527eaeb23df64c71ab3996"} Jan 20 16:59:23 crc kubenswrapper[4558]: I0120 16:59:23.799728 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"ce97e7c6-01a4-4edc-85c2-5fc73a0bdb6e","Type":"ContainerDied","Data":"ea1faaaee51200f91128202826f05b7fee4131979bc59f5c84e6b22125654f81"} Jan 20 16:59:23 crc kubenswrapper[4558]: I0120 16:59:23.799777 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"ce97e7c6-01a4-4edc-85c2-5fc73a0bdb6e","Type":"ContainerDied","Data":"12b9d5e572bfe2b8f86991f0d2f34b55ace0d9f0d3f7a8abd7db9373abf000c7"} Jan 20 16:59:23 crc kubenswrapper[4558]: I0120 16:59:23.799793 4558 scope.go:117] "RemoveContainer" containerID="c57d7be4e8c557d5a606fbbd2065d862051cbe59d2527eaeb23df64c71ab3996" Jan 20 16:59:23 crc kubenswrapper[4558]: I0120 16:59:23.830960 4558 scope.go:117] "RemoveContainer" containerID="ea1faaaee51200f91128202826f05b7fee4131979bc59f5c84e6b22125654f81" Jan 20 16:59:23 crc kubenswrapper[4558]: I0120 16:59:23.833489 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 16:59:23 crc kubenswrapper[4558]: I0120 16:59:23.851358 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 16:59:23 crc kubenswrapper[4558]: I0120 16:59:23.855215 4558 scope.go:117] "RemoveContainer" containerID="c57d7be4e8c557d5a606fbbd2065d862051cbe59d2527eaeb23df64c71ab3996" Jan 20 16:59:23 crc kubenswrapper[4558]: E0120 16:59:23.857337 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c57d7be4e8c557d5a606fbbd2065d862051cbe59d2527eaeb23df64c71ab3996\": container with ID starting with c57d7be4e8c557d5a606fbbd2065d862051cbe59d2527eaeb23df64c71ab3996 not found: ID does not exist" containerID="c57d7be4e8c557d5a606fbbd2065d862051cbe59d2527eaeb23df64c71ab3996" Jan 20 16:59:23 crc kubenswrapper[4558]: I0120 16:59:23.857378 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c57d7be4e8c557d5a606fbbd2065d862051cbe59d2527eaeb23df64c71ab3996"} err="failed to get container status \"c57d7be4e8c557d5a606fbbd2065d862051cbe59d2527eaeb23df64c71ab3996\": rpc error: code = NotFound desc = could not find container \"c57d7be4e8c557d5a606fbbd2065d862051cbe59d2527eaeb23df64c71ab3996\": container with ID starting with c57d7be4e8c557d5a606fbbd2065d862051cbe59d2527eaeb23df64c71ab3996 not found: ID does not exist" Jan 20 16:59:23 crc kubenswrapper[4558]: I0120 16:59:23.857401 4558 scope.go:117] "RemoveContainer" containerID="ea1faaaee51200f91128202826f05b7fee4131979bc59f5c84e6b22125654f81" Jan 20 16:59:23 crc kubenswrapper[4558]: E0120 16:59:23.858426 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ea1faaaee51200f91128202826f05b7fee4131979bc59f5c84e6b22125654f81\": container with ID starting with ea1faaaee51200f91128202826f05b7fee4131979bc59f5c84e6b22125654f81 not found: ID does not exist" containerID="ea1faaaee51200f91128202826f05b7fee4131979bc59f5c84e6b22125654f81" Jan 20 16:59:23 crc kubenswrapper[4558]: I0120 16:59:23.858460 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ea1faaaee51200f91128202826f05b7fee4131979bc59f5c84e6b22125654f81"} err="failed to get container status \"ea1faaaee51200f91128202826f05b7fee4131979bc59f5c84e6b22125654f81\": rpc error: code = NotFound desc = could not find container \"ea1faaaee51200f91128202826f05b7fee4131979bc59f5c84e6b22125654f81\": container with ID starting with ea1faaaee51200f91128202826f05b7fee4131979bc59f5c84e6b22125654f81 not found: ID does not exist" Jan 20 16:59:23 crc kubenswrapper[4558]: I0120 16:59:23.858478 4558 scope.go:117] "RemoveContainer" containerID="c57d7be4e8c557d5a606fbbd2065d862051cbe59d2527eaeb23df64c71ab3996" Jan 20 16:59:23 crc kubenswrapper[4558]: I0120 16:59:23.859821 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c57d7be4e8c557d5a606fbbd2065d862051cbe59d2527eaeb23df64c71ab3996"} err="failed to get container status \"c57d7be4e8c557d5a606fbbd2065d862051cbe59d2527eaeb23df64c71ab3996\": rpc error: code = NotFound desc = could not find container \"c57d7be4e8c557d5a606fbbd2065d862051cbe59d2527eaeb23df64c71ab3996\": container with ID starting with c57d7be4e8c557d5a606fbbd2065d862051cbe59d2527eaeb23df64c71ab3996 not found: ID does not exist" Jan 20 16:59:23 crc kubenswrapper[4558]: I0120 16:59:23.859861 4558 scope.go:117] "RemoveContainer" containerID="ea1faaaee51200f91128202826f05b7fee4131979bc59f5c84e6b22125654f81" Jan 20 16:59:23 crc kubenswrapper[4558]: I0120 16:59:23.860111 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ea1faaaee51200f91128202826f05b7fee4131979bc59f5c84e6b22125654f81"} err="failed to get container status \"ea1faaaee51200f91128202826f05b7fee4131979bc59f5c84e6b22125654f81\": rpc error: code = NotFound desc = could not find container \"ea1faaaee51200f91128202826f05b7fee4131979bc59f5c84e6b22125654f81\": container with ID starting with ea1faaaee51200f91128202826f05b7fee4131979bc59f5c84e6b22125654f81 not found: ID does not exist" Jan 20 16:59:23 crc kubenswrapper[4558]: I0120 16:59:23.862294 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 16:59:23 crc kubenswrapper[4558]: E0120 16:59:23.862637 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ce97e7c6-01a4-4edc-85c2-5fc73a0bdb6e" containerName="nova-metadata-metadata" Jan 20 16:59:23 crc kubenswrapper[4558]: I0120 16:59:23.862654 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ce97e7c6-01a4-4edc-85c2-5fc73a0bdb6e" containerName="nova-metadata-metadata" Jan 20 16:59:23 crc kubenswrapper[4558]: E0120 16:59:23.862670 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ce97e7c6-01a4-4edc-85c2-5fc73a0bdb6e" containerName="nova-metadata-log" Jan 20 16:59:23 crc kubenswrapper[4558]: I0120 16:59:23.862676 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ce97e7c6-01a4-4edc-85c2-5fc73a0bdb6e" containerName="nova-metadata-log" Jan 20 16:59:23 crc kubenswrapper[4558]: I0120 16:59:23.862810 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="ce97e7c6-01a4-4edc-85c2-5fc73a0bdb6e" containerName="nova-metadata-metadata" Jan 20 16:59:23 crc kubenswrapper[4558]: I0120 16:59:23.862827 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="ce97e7c6-01a4-4edc-85c2-5fc73a0bdb6e" containerName="nova-metadata-log" Jan 20 16:59:23 crc kubenswrapper[4558]: I0120 16:59:23.863655 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 16:59:23 crc kubenswrapper[4558]: I0120 16:59:23.871155 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-metadata-internal-svc" Jan 20 16:59:23 crc kubenswrapper[4558]: I0120 16:59:23.871361 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-metadata-config-data" Jan 20 16:59:23 crc kubenswrapper[4558]: I0120 16:59:23.878654 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 16:59:23 crc kubenswrapper[4558]: I0120 16:59:23.916408 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e6582dae-6890-4971-849d-eca29269fb70-config-data\") pod \"nova-metadata-0\" (UID: \"e6582dae-6890-4971-849d-eca29269fb70\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 16:59:23 crc kubenswrapper[4558]: I0120 16:59:23.916468 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e6582dae-6890-4971-849d-eca29269fb70-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"e6582dae-6890-4971-849d-eca29269fb70\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 16:59:23 crc kubenswrapper[4558]: I0120 16:59:23.916502 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/e6582dae-6890-4971-849d-eca29269fb70-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"e6582dae-6890-4971-849d-eca29269fb70\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 16:59:23 crc kubenswrapper[4558]: I0120 16:59:23.916587 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e6582dae-6890-4971-849d-eca29269fb70-logs\") pod \"nova-metadata-0\" (UID: \"e6582dae-6890-4971-849d-eca29269fb70\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 16:59:23 crc kubenswrapper[4558]: I0120 16:59:23.916778 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rk6c5\" (UniqueName: \"kubernetes.io/projected/e6582dae-6890-4971-849d-eca29269fb70-kube-api-access-rk6c5\") pod \"nova-metadata-0\" (UID: \"e6582dae-6890-4971-849d-eca29269fb70\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 16:59:24 crc kubenswrapper[4558]: I0120 16:59:24.017669 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e6582dae-6890-4971-849d-eca29269fb70-config-data\") pod \"nova-metadata-0\" (UID: \"e6582dae-6890-4971-849d-eca29269fb70\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 16:59:24 crc kubenswrapper[4558]: I0120 16:59:24.017740 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e6582dae-6890-4971-849d-eca29269fb70-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"e6582dae-6890-4971-849d-eca29269fb70\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 16:59:24 crc kubenswrapper[4558]: I0120 16:59:24.017769 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/e6582dae-6890-4971-849d-eca29269fb70-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"e6582dae-6890-4971-849d-eca29269fb70\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 16:59:24 crc kubenswrapper[4558]: I0120 16:59:24.017804 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e6582dae-6890-4971-849d-eca29269fb70-logs\") pod \"nova-metadata-0\" (UID: \"e6582dae-6890-4971-849d-eca29269fb70\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 16:59:24 crc kubenswrapper[4558]: I0120 16:59:24.017847 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rk6c5\" (UniqueName: \"kubernetes.io/projected/e6582dae-6890-4971-849d-eca29269fb70-kube-api-access-rk6c5\") pod \"nova-metadata-0\" (UID: \"e6582dae-6890-4971-849d-eca29269fb70\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 16:59:24 crc kubenswrapper[4558]: I0120 16:59:24.018256 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e6582dae-6890-4971-849d-eca29269fb70-logs\") pod \"nova-metadata-0\" (UID: \"e6582dae-6890-4971-849d-eca29269fb70\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 16:59:24 crc kubenswrapper[4558]: I0120 16:59:24.021778 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/e6582dae-6890-4971-849d-eca29269fb70-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"e6582dae-6890-4971-849d-eca29269fb70\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 16:59:24 crc kubenswrapper[4558]: I0120 16:59:24.022039 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e6582dae-6890-4971-849d-eca29269fb70-config-data\") pod \"nova-metadata-0\" (UID: \"e6582dae-6890-4971-849d-eca29269fb70\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 16:59:24 crc kubenswrapper[4558]: I0120 16:59:24.026846 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e6582dae-6890-4971-849d-eca29269fb70-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"e6582dae-6890-4971-849d-eca29269fb70\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 16:59:24 crc kubenswrapper[4558]: I0120 16:59:24.042828 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rk6c5\" (UniqueName: \"kubernetes.io/projected/e6582dae-6890-4971-849d-eca29269fb70-kube-api-access-rk6c5\") pod \"nova-metadata-0\" (UID: \"e6582dae-6890-4971-849d-eca29269fb70\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 16:59:24 crc kubenswrapper[4558]: I0120 16:59:24.186812 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 16:59:24 crc kubenswrapper[4558]: I0120 16:59:24.573886 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ce97e7c6-01a4-4edc-85c2-5fc73a0bdb6e" path="/var/lib/kubelet/pods/ce97e7c6-01a4-4edc-85c2-5fc73a0bdb6e/volumes" Jan 20 16:59:24 crc kubenswrapper[4558]: I0120 16:59:24.574616 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 16:59:24 crc kubenswrapper[4558]: W0120 16:59:24.575380 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode6582dae_6890_4971_849d_eca29269fb70.slice/crio-3457f9179bd4ef56defe49a0a8c09861c52831debdd04889c87822639c825c61 WatchSource:0}: Error finding container 3457f9179bd4ef56defe49a0a8c09861c52831debdd04889c87822639c825c61: Status 404 returned error can't find the container with id 3457f9179bd4ef56defe49a0a8c09861c52831debdd04889c87822639c825c61 Jan 20 16:59:24 crc kubenswrapper[4558]: I0120 16:59:24.809072 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"473a463c-85cb-4702-9759-761904072818","Type":"ContainerStarted","Data":"2825940fb8c15a1edf0410c1dc202cc5c9ccc1417f80fabebfdaac6104c498e5"} Jan 20 16:59:24 crc kubenswrapper[4558]: I0120 16:59:24.810327 4558 generic.go:334] "Generic (PLEG): container finished" podID="6aaeaa31-51e8-451a-9a40-6880c278ee0e" containerID="363d8978a2d2487cb2e7b6e16d0c176bad4492f0362547057829d38e6bcf10f8" exitCode=0 Jan 20 16:59:24 crc kubenswrapper[4558]: I0120 16:59:24.810394 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-cell-mapping-wn5b6" event={"ID":"6aaeaa31-51e8-451a-9a40-6880c278ee0e","Type":"ContainerDied","Data":"363d8978a2d2487cb2e7b6e16d0c176bad4492f0362547057829d38e6bcf10f8"} Jan 20 16:59:24 crc kubenswrapper[4558]: I0120 16:59:24.813773 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"e6582dae-6890-4971-849d-eca29269fb70","Type":"ContainerStarted","Data":"35aaf298507a5a639e3dac2b9c8dd012d367f2fa107451230f1ec49b118afc75"} Jan 20 16:59:24 crc kubenswrapper[4558]: I0120 16:59:24.813799 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"e6582dae-6890-4971-849d-eca29269fb70","Type":"ContainerStarted","Data":"3457f9179bd4ef56defe49a0a8c09861c52831debdd04889c87822639c825c61"} Jan 20 16:59:25 crc kubenswrapper[4558]: I0120 16:59:25.023151 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-mj7lb" Jan 20 16:59:25 crc kubenswrapper[4558]: I0120 16:59:25.133859 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec56e0f2-2aaa-458d-8c02-e3dba504c48a-combined-ca-bundle\") pod \"ec56e0f2-2aaa-458d-8c02-e3dba504c48a\" (UID: \"ec56e0f2-2aaa-458d-8c02-e3dba504c48a\") " Jan 20 16:59:25 crc kubenswrapper[4558]: I0120 16:59:25.133926 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7x78m\" (UniqueName: \"kubernetes.io/projected/ec56e0f2-2aaa-458d-8c02-e3dba504c48a-kube-api-access-7x78m\") pod \"ec56e0f2-2aaa-458d-8c02-e3dba504c48a\" (UID: \"ec56e0f2-2aaa-458d-8c02-e3dba504c48a\") " Jan 20 16:59:25 crc kubenswrapper[4558]: I0120 16:59:25.133951 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec56e0f2-2aaa-458d-8c02-e3dba504c48a-config-data\") pod \"ec56e0f2-2aaa-458d-8c02-e3dba504c48a\" (UID: \"ec56e0f2-2aaa-458d-8c02-e3dba504c48a\") " Jan 20 16:59:25 crc kubenswrapper[4558]: I0120 16:59:25.133991 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ec56e0f2-2aaa-458d-8c02-e3dba504c48a-scripts\") pod \"ec56e0f2-2aaa-458d-8c02-e3dba504c48a\" (UID: \"ec56e0f2-2aaa-458d-8c02-e3dba504c48a\") " Jan 20 16:59:25 crc kubenswrapper[4558]: I0120 16:59:25.137285 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ec56e0f2-2aaa-458d-8c02-e3dba504c48a-scripts" (OuterVolumeSpecName: "scripts") pod "ec56e0f2-2aaa-458d-8c02-e3dba504c48a" (UID: "ec56e0f2-2aaa-458d-8c02-e3dba504c48a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:59:25 crc kubenswrapper[4558]: I0120 16:59:25.137617 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ec56e0f2-2aaa-458d-8c02-e3dba504c48a-kube-api-access-7x78m" (OuterVolumeSpecName: "kube-api-access-7x78m") pod "ec56e0f2-2aaa-458d-8c02-e3dba504c48a" (UID: "ec56e0f2-2aaa-458d-8c02-e3dba504c48a"). InnerVolumeSpecName "kube-api-access-7x78m". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:59:25 crc kubenswrapper[4558]: I0120 16:59:25.152301 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ec56e0f2-2aaa-458d-8c02-e3dba504c48a-config-data" (OuterVolumeSpecName: "config-data") pod "ec56e0f2-2aaa-458d-8c02-e3dba504c48a" (UID: "ec56e0f2-2aaa-458d-8c02-e3dba504c48a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:59:25 crc kubenswrapper[4558]: I0120 16:59:25.153203 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ec56e0f2-2aaa-458d-8c02-e3dba504c48a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ec56e0f2-2aaa-458d-8c02-e3dba504c48a" (UID: "ec56e0f2-2aaa-458d-8c02-e3dba504c48a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:59:25 crc kubenswrapper[4558]: I0120 16:59:25.235785 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec56e0f2-2aaa-458d-8c02-e3dba504c48a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 16:59:25 crc kubenswrapper[4558]: I0120 16:59:25.235812 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7x78m\" (UniqueName: \"kubernetes.io/projected/ec56e0f2-2aaa-458d-8c02-e3dba504c48a-kube-api-access-7x78m\") on node \"crc\" DevicePath \"\"" Jan 20 16:59:25 crc kubenswrapper[4558]: I0120 16:59:25.235823 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec56e0f2-2aaa-458d-8c02-e3dba504c48a-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 16:59:25 crc kubenswrapper[4558]: I0120 16:59:25.235830 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ec56e0f2-2aaa-458d-8c02-e3dba504c48a-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 16:59:25 crc kubenswrapper[4558]: I0120 16:59:25.822401 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-mj7lb" Jan 20 16:59:25 crc kubenswrapper[4558]: I0120 16:59:25.823795 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-mj7lb" event={"ID":"ec56e0f2-2aaa-458d-8c02-e3dba504c48a","Type":"ContainerDied","Data":"e2fb53677466478de0c20c5c25e1f67b5878b8e7d27339e8986cd090ae6dee8a"} Jan 20 16:59:25 crc kubenswrapper[4558]: I0120 16:59:25.823837 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e2fb53677466478de0c20c5c25e1f67b5878b8e7d27339e8986cd090ae6dee8a" Jan 20 16:59:25 crc kubenswrapper[4558]: I0120 16:59:25.828846 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"e6582dae-6890-4971-849d-eca29269fb70","Type":"ContainerStarted","Data":"85b649ace58a22b44216836181388597a07a3b4273a312b767cd9d3f0541e020"} Jan 20 16:59:25 crc kubenswrapper[4558]: I0120 16:59:25.861141 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-metadata-0" podStartSLOduration=2.861123115 podStartE2EDuration="2.861123115s" podCreationTimestamp="2026-01-20 16:59:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 16:59:25.845520658 +0000 UTC m=+1059.605858625" watchObservedRunningTime="2026-01-20 16:59:25.861123115 +0000 UTC m=+1059.621461082" Jan 20 16:59:25 crc kubenswrapper[4558]: I0120 16:59:25.873714 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-0"] Jan 20 16:59:25 crc kubenswrapper[4558]: E0120 16:59:25.874047 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ec56e0f2-2aaa-458d-8c02-e3dba504c48a" containerName="nova-cell1-conductor-db-sync" Jan 20 16:59:25 crc kubenswrapper[4558]: I0120 16:59:25.874066 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ec56e0f2-2aaa-458d-8c02-e3dba504c48a" containerName="nova-cell1-conductor-db-sync" Jan 20 16:59:25 crc kubenswrapper[4558]: I0120 16:59:25.874328 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="ec56e0f2-2aaa-458d-8c02-e3dba504c48a" containerName="nova-cell1-conductor-db-sync" Jan 20 16:59:25 crc kubenswrapper[4558]: I0120 16:59:25.874842 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 16:59:25 crc kubenswrapper[4558]: I0120 16:59:25.876067 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell1-conductor-config-data" Jan 20 16:59:25 crc kubenswrapper[4558]: I0120 16:59:25.884447 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-0"] Jan 20 16:59:25 crc kubenswrapper[4558]: I0120 16:59:25.951703 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/180488ea-6eeb-4078-9b57-351bdfb54f5d-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"180488ea-6eeb-4078-9b57-351bdfb54f5d\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 16:59:25 crc kubenswrapper[4558]: I0120 16:59:25.951753 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/180488ea-6eeb-4078-9b57-351bdfb54f5d-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"180488ea-6eeb-4078-9b57-351bdfb54f5d\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 16:59:25 crc kubenswrapper[4558]: I0120 16:59:25.951845 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tp72f\" (UniqueName: \"kubernetes.io/projected/180488ea-6eeb-4078-9b57-351bdfb54f5d-kube-api-access-tp72f\") pod \"nova-cell1-conductor-0\" (UID: \"180488ea-6eeb-4078-9b57-351bdfb54f5d\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 16:59:26 crc kubenswrapper[4558]: I0120 16:59:26.053863 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/180488ea-6eeb-4078-9b57-351bdfb54f5d-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"180488ea-6eeb-4078-9b57-351bdfb54f5d\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 16:59:26 crc kubenswrapper[4558]: I0120 16:59:26.053900 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/180488ea-6eeb-4078-9b57-351bdfb54f5d-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"180488ea-6eeb-4078-9b57-351bdfb54f5d\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 16:59:26 crc kubenswrapper[4558]: I0120 16:59:26.053944 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tp72f\" (UniqueName: \"kubernetes.io/projected/180488ea-6eeb-4078-9b57-351bdfb54f5d-kube-api-access-tp72f\") pod \"nova-cell1-conductor-0\" (UID: \"180488ea-6eeb-4078-9b57-351bdfb54f5d\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 16:59:26 crc kubenswrapper[4558]: I0120 16:59:26.056602 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/180488ea-6eeb-4078-9b57-351bdfb54f5d-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"180488ea-6eeb-4078-9b57-351bdfb54f5d\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 16:59:26 crc kubenswrapper[4558]: I0120 16:59:26.057690 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/180488ea-6eeb-4078-9b57-351bdfb54f5d-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"180488ea-6eeb-4078-9b57-351bdfb54f5d\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 16:59:26 crc kubenswrapper[4558]: I0120 16:59:26.066618 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tp72f\" (UniqueName: \"kubernetes.io/projected/180488ea-6eeb-4078-9b57-351bdfb54f5d-kube-api-access-tp72f\") pod \"nova-cell1-conductor-0\" (UID: \"180488ea-6eeb-4078-9b57-351bdfb54f5d\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 16:59:26 crc kubenswrapper[4558]: I0120 16:59:26.152334 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-cell-mapping-wn5b6" Jan 20 16:59:26 crc kubenswrapper[4558]: I0120 16:59:26.188422 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 16:59:26 crc kubenswrapper[4558]: I0120 16:59:26.256285 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6aaeaa31-51e8-451a-9a40-6880c278ee0e-combined-ca-bundle\") pod \"6aaeaa31-51e8-451a-9a40-6880c278ee0e\" (UID: \"6aaeaa31-51e8-451a-9a40-6880c278ee0e\") " Jan 20 16:59:26 crc kubenswrapper[4558]: I0120 16:59:26.256320 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6aaeaa31-51e8-451a-9a40-6880c278ee0e-config-data\") pod \"6aaeaa31-51e8-451a-9a40-6880c278ee0e\" (UID: \"6aaeaa31-51e8-451a-9a40-6880c278ee0e\") " Jan 20 16:59:26 crc kubenswrapper[4558]: I0120 16:59:26.256401 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6aaeaa31-51e8-451a-9a40-6880c278ee0e-scripts\") pod \"6aaeaa31-51e8-451a-9a40-6880c278ee0e\" (UID: \"6aaeaa31-51e8-451a-9a40-6880c278ee0e\") " Jan 20 16:59:26 crc kubenswrapper[4558]: I0120 16:59:26.256503 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nsm8k\" (UniqueName: \"kubernetes.io/projected/6aaeaa31-51e8-451a-9a40-6880c278ee0e-kube-api-access-nsm8k\") pod \"6aaeaa31-51e8-451a-9a40-6880c278ee0e\" (UID: \"6aaeaa31-51e8-451a-9a40-6880c278ee0e\") " Jan 20 16:59:26 crc kubenswrapper[4558]: I0120 16:59:26.259032 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6aaeaa31-51e8-451a-9a40-6880c278ee0e-scripts" (OuterVolumeSpecName: "scripts") pod "6aaeaa31-51e8-451a-9a40-6880c278ee0e" (UID: "6aaeaa31-51e8-451a-9a40-6880c278ee0e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:59:26 crc kubenswrapper[4558]: I0120 16:59:26.261765 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6aaeaa31-51e8-451a-9a40-6880c278ee0e-kube-api-access-nsm8k" (OuterVolumeSpecName: "kube-api-access-nsm8k") pod "6aaeaa31-51e8-451a-9a40-6880c278ee0e" (UID: "6aaeaa31-51e8-451a-9a40-6880c278ee0e"). InnerVolumeSpecName "kube-api-access-nsm8k". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:59:26 crc kubenswrapper[4558]: I0120 16:59:26.282358 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6aaeaa31-51e8-451a-9a40-6880c278ee0e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6aaeaa31-51e8-451a-9a40-6880c278ee0e" (UID: "6aaeaa31-51e8-451a-9a40-6880c278ee0e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:59:26 crc kubenswrapper[4558]: I0120 16:59:26.290237 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6aaeaa31-51e8-451a-9a40-6880c278ee0e-config-data" (OuterVolumeSpecName: "config-data") pod "6aaeaa31-51e8-451a-9a40-6880c278ee0e" (UID: "6aaeaa31-51e8-451a-9a40-6880c278ee0e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:59:26 crc kubenswrapper[4558]: I0120 16:59:26.358520 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nsm8k\" (UniqueName: \"kubernetes.io/projected/6aaeaa31-51e8-451a-9a40-6880c278ee0e-kube-api-access-nsm8k\") on node \"crc\" DevicePath \"\"" Jan 20 16:59:26 crc kubenswrapper[4558]: I0120 16:59:26.358556 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6aaeaa31-51e8-451a-9a40-6880c278ee0e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 16:59:26 crc kubenswrapper[4558]: I0120 16:59:26.358567 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6aaeaa31-51e8-451a-9a40-6880c278ee0e-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 16:59:26 crc kubenswrapper[4558]: I0120 16:59:26.358575 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6aaeaa31-51e8-451a-9a40-6880c278ee0e-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 16:59:26 crc kubenswrapper[4558]: I0120 16:59:26.570419 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-0"] Jan 20 16:59:26 crc kubenswrapper[4558]: I0120 16:59:26.841840 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-0" event={"ID":"180488ea-6eeb-4078-9b57-351bdfb54f5d","Type":"ContainerStarted","Data":"14704db98182ec5a7c903f3bee7ae82b53751a4fd86ecd1c586dd1360d6439c9"} Jan 20 16:59:26 crc kubenswrapper[4558]: I0120 16:59:26.842058 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-0" event={"ID":"180488ea-6eeb-4078-9b57-351bdfb54f5d","Type":"ContainerStarted","Data":"d3145793fb8cab60f10ee8f1aabd5a6b5c95d52c99e804e0b0de25fad4c4cbf0"} Jan 20 16:59:26 crc kubenswrapper[4558]: I0120 16:59:26.842178 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 16:59:26 crc kubenswrapper[4558]: I0120 16:59:26.844625 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"473a463c-85cb-4702-9759-761904072818","Type":"ContainerStarted","Data":"9a54cdc016f9f70eb47e9944e8d06f082661c74136b8d28a93740b3b4fcb3687"} Jan 20 16:59:26 crc kubenswrapper[4558]: I0120 16:59:26.845424 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:59:26 crc kubenswrapper[4558]: I0120 16:59:26.847052 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-cell-mapping-wn5b6" Jan 20 16:59:26 crc kubenswrapper[4558]: I0120 16:59:26.847449 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-cell-mapping-wn5b6" event={"ID":"6aaeaa31-51e8-451a-9a40-6880c278ee0e","Type":"ContainerDied","Data":"321734cbbeb4acd9ab0fe4553e13077b91b55873213ca8a318c340180975a971"} Jan 20 16:59:26 crc kubenswrapper[4558]: I0120 16:59:26.847467 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="321734cbbeb4acd9ab0fe4553e13077b91b55873213ca8a318c340180975a971" Jan 20 16:59:26 crc kubenswrapper[4558]: I0120 16:59:26.862343 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell1-conductor-0" podStartSLOduration=1.862333355 podStartE2EDuration="1.862333355s" podCreationTimestamp="2026-01-20 16:59:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 16:59:26.855973768 +0000 UTC m=+1060.616311734" watchObservedRunningTime="2026-01-20 16:59:26.862333355 +0000 UTC m=+1060.622671312" Jan 20 16:59:26 crc kubenswrapper[4558]: I0120 16:59:26.876022 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ceilometer-0" podStartSLOduration=3.5084452 podStartE2EDuration="7.876007136s" podCreationTimestamp="2026-01-20 16:59:19 +0000 UTC" firstStartedPulling="2026-01-20 16:59:21.651290153 +0000 UTC m=+1055.411628120" lastFinishedPulling="2026-01-20 16:59:26.018852089 +0000 UTC m=+1059.779190056" observedRunningTime="2026-01-20 16:59:26.87201521 +0000 UTC m=+1060.632353176" watchObservedRunningTime="2026-01-20 16:59:26.876007136 +0000 UTC m=+1060.636345103" Jan 20 16:59:26 crc kubenswrapper[4558]: I0120 16:59:26.991626 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 16:59:26 crc kubenswrapper[4558]: I0120 16:59:26.991789 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-api-0" podUID="f5484df8-2512-48d4-af79-ea72bf7c02b0" containerName="nova-api-log" containerID="cri-o://6b5e284c69a12941754fdf4cd1d81b50fbf2bbe0fa967f797f258dd94e2ccebf" gracePeriod=30 Jan 20 16:59:26 crc kubenswrapper[4558]: I0120 16:59:26.991873 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-api-0" podUID="f5484df8-2512-48d4-af79-ea72bf7c02b0" containerName="nova-api-api" containerID="cri-o://59c4f6d7ed8eb3d57f30987913f9fd8cd4d6bcba4c783d8566dd6559f9755936" gracePeriod=30 Jan 20 16:59:27 crc kubenswrapper[4558]: I0120 16:59:27.012410 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 16:59:27 crc kubenswrapper[4558]: I0120 16:59:27.012558 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-scheduler-0" podUID="900fb0c7-ddb0-46b3-82f7-cdd543afa534" containerName="nova-scheduler-scheduler" containerID="cri-o://08ed39808896b2786fe6ca35a6d00b3c1ef2d42417098de4bd4d04960cf43db1" gracePeriod=30 Jan 20 16:59:27 crc kubenswrapper[4558]: I0120 16:59:27.022034 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 16:59:27 crc kubenswrapper[4558]: I0120 16:59:27.329492 4558 patch_prober.go:28] interesting pod/machine-config-daemon-2vr4r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 20 16:59:27 crc kubenswrapper[4558]: I0120 16:59:27.329528 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 20 16:59:27 crc kubenswrapper[4558]: I0120 16:59:27.453392 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 16:59:27 crc kubenswrapper[4558]: I0120 16:59:27.481848 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f5484df8-2512-48d4-af79-ea72bf7c02b0-config-data\") pod \"f5484df8-2512-48d4-af79-ea72bf7c02b0\" (UID: \"f5484df8-2512-48d4-af79-ea72bf7c02b0\") " Jan 20 16:59:27 crc kubenswrapper[4558]: I0120 16:59:27.482152 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5484df8-2512-48d4-af79-ea72bf7c02b0-combined-ca-bundle\") pod \"f5484df8-2512-48d4-af79-ea72bf7c02b0\" (UID: \"f5484df8-2512-48d4-af79-ea72bf7c02b0\") " Jan 20 16:59:27 crc kubenswrapper[4558]: I0120 16:59:27.482553 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f5484df8-2512-48d4-af79-ea72bf7c02b0-logs\") pod \"f5484df8-2512-48d4-af79-ea72bf7c02b0\" (UID: \"f5484df8-2512-48d4-af79-ea72bf7c02b0\") " Jan 20 16:59:27 crc kubenswrapper[4558]: I0120 16:59:27.482629 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ch6jz\" (UniqueName: \"kubernetes.io/projected/f5484df8-2512-48d4-af79-ea72bf7c02b0-kube-api-access-ch6jz\") pod \"f5484df8-2512-48d4-af79-ea72bf7c02b0\" (UID: \"f5484df8-2512-48d4-af79-ea72bf7c02b0\") " Jan 20 16:59:27 crc kubenswrapper[4558]: I0120 16:59:27.482860 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f5484df8-2512-48d4-af79-ea72bf7c02b0-logs" (OuterVolumeSpecName: "logs") pod "f5484df8-2512-48d4-af79-ea72bf7c02b0" (UID: "f5484df8-2512-48d4-af79-ea72bf7c02b0"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 16:59:27 crc kubenswrapper[4558]: I0120 16:59:27.483593 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f5484df8-2512-48d4-af79-ea72bf7c02b0-logs\") on node \"crc\" DevicePath \"\"" Jan 20 16:59:27 crc kubenswrapper[4558]: I0120 16:59:27.485677 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f5484df8-2512-48d4-af79-ea72bf7c02b0-kube-api-access-ch6jz" (OuterVolumeSpecName: "kube-api-access-ch6jz") pod "f5484df8-2512-48d4-af79-ea72bf7c02b0" (UID: "f5484df8-2512-48d4-af79-ea72bf7c02b0"). InnerVolumeSpecName "kube-api-access-ch6jz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:59:27 crc kubenswrapper[4558]: I0120 16:59:27.501318 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f5484df8-2512-48d4-af79-ea72bf7c02b0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f5484df8-2512-48d4-af79-ea72bf7c02b0" (UID: "f5484df8-2512-48d4-af79-ea72bf7c02b0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:59:27 crc kubenswrapper[4558]: I0120 16:59:27.502988 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f5484df8-2512-48d4-af79-ea72bf7c02b0-config-data" (OuterVolumeSpecName: "config-data") pod "f5484df8-2512-48d4-af79-ea72bf7c02b0" (UID: "f5484df8-2512-48d4-af79-ea72bf7c02b0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:59:27 crc kubenswrapper[4558]: I0120 16:59:27.585151 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f5484df8-2512-48d4-af79-ea72bf7c02b0-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 16:59:27 crc kubenswrapper[4558]: I0120 16:59:27.585211 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5484df8-2512-48d4-af79-ea72bf7c02b0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 16:59:27 crc kubenswrapper[4558]: I0120 16:59:27.585222 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ch6jz\" (UniqueName: \"kubernetes.io/projected/f5484df8-2512-48d4-af79-ea72bf7c02b0-kube-api-access-ch6jz\") on node \"crc\" DevicePath \"\"" Jan 20 16:59:27 crc kubenswrapper[4558]: I0120 16:59:27.854496 4558 generic.go:334] "Generic (PLEG): container finished" podID="f5484df8-2512-48d4-af79-ea72bf7c02b0" containerID="59c4f6d7ed8eb3d57f30987913f9fd8cd4d6bcba4c783d8566dd6559f9755936" exitCode=0 Jan 20 16:59:27 crc kubenswrapper[4558]: I0120 16:59:27.854525 4558 generic.go:334] "Generic (PLEG): container finished" podID="f5484df8-2512-48d4-af79-ea72bf7c02b0" containerID="6b5e284c69a12941754fdf4cd1d81b50fbf2bbe0fa967f797f258dd94e2ccebf" exitCode=143 Jan 20 16:59:27 crc kubenswrapper[4558]: I0120 16:59:27.854666 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-metadata-0" podUID="e6582dae-6890-4971-849d-eca29269fb70" containerName="nova-metadata-log" containerID="cri-o://35aaf298507a5a639e3dac2b9c8dd012d367f2fa107451230f1ec49b118afc75" gracePeriod=30 Jan 20 16:59:27 crc kubenswrapper[4558]: I0120 16:59:27.854883 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 16:59:27 crc kubenswrapper[4558]: I0120 16:59:27.856294 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"f5484df8-2512-48d4-af79-ea72bf7c02b0","Type":"ContainerDied","Data":"59c4f6d7ed8eb3d57f30987913f9fd8cd4d6bcba4c783d8566dd6559f9755936"} Jan 20 16:59:27 crc kubenswrapper[4558]: I0120 16:59:27.856353 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"f5484df8-2512-48d4-af79-ea72bf7c02b0","Type":"ContainerDied","Data":"6b5e284c69a12941754fdf4cd1d81b50fbf2bbe0fa967f797f258dd94e2ccebf"} Jan 20 16:59:27 crc kubenswrapper[4558]: I0120 16:59:27.856364 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"f5484df8-2512-48d4-af79-ea72bf7c02b0","Type":"ContainerDied","Data":"b6f2e4f8e15632dcec10e3b72ffa7aab39a917fac4297111958939cd502216c7"} Jan 20 16:59:27 crc kubenswrapper[4558]: I0120 16:59:27.856379 4558 scope.go:117] "RemoveContainer" containerID="59c4f6d7ed8eb3d57f30987913f9fd8cd4d6bcba4c783d8566dd6559f9755936" Jan 20 16:59:27 crc kubenswrapper[4558]: I0120 16:59:27.856486 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-metadata-0" podUID="e6582dae-6890-4971-849d-eca29269fb70" containerName="nova-metadata-metadata" containerID="cri-o://85b649ace58a22b44216836181388597a07a3b4273a312b767cd9d3f0541e020" gracePeriod=30 Jan 20 16:59:27 crc kubenswrapper[4558]: I0120 16:59:27.873006 4558 scope.go:117] "RemoveContainer" containerID="6b5e284c69a12941754fdf4cd1d81b50fbf2bbe0fa967f797f258dd94e2ccebf" Jan 20 16:59:27 crc kubenswrapper[4558]: I0120 16:59:27.893911 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 16:59:27 crc kubenswrapper[4558]: I0120 16:59:27.902349 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 16:59:27 crc kubenswrapper[4558]: I0120 16:59:27.905891 4558 scope.go:117] "RemoveContainer" containerID="59c4f6d7ed8eb3d57f30987913f9fd8cd4d6bcba4c783d8566dd6559f9755936" Jan 20 16:59:27 crc kubenswrapper[4558]: E0120 16:59:27.906322 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"59c4f6d7ed8eb3d57f30987913f9fd8cd4d6bcba4c783d8566dd6559f9755936\": container with ID starting with 59c4f6d7ed8eb3d57f30987913f9fd8cd4d6bcba4c783d8566dd6559f9755936 not found: ID does not exist" containerID="59c4f6d7ed8eb3d57f30987913f9fd8cd4d6bcba4c783d8566dd6559f9755936" Jan 20 16:59:27 crc kubenswrapper[4558]: I0120 16:59:27.906353 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"59c4f6d7ed8eb3d57f30987913f9fd8cd4d6bcba4c783d8566dd6559f9755936"} err="failed to get container status \"59c4f6d7ed8eb3d57f30987913f9fd8cd4d6bcba4c783d8566dd6559f9755936\": rpc error: code = NotFound desc = could not find container \"59c4f6d7ed8eb3d57f30987913f9fd8cd4d6bcba4c783d8566dd6559f9755936\": container with ID starting with 59c4f6d7ed8eb3d57f30987913f9fd8cd4d6bcba4c783d8566dd6559f9755936 not found: ID does not exist" Jan 20 16:59:27 crc kubenswrapper[4558]: I0120 16:59:27.906378 4558 scope.go:117] "RemoveContainer" containerID="6b5e284c69a12941754fdf4cd1d81b50fbf2bbe0fa967f797f258dd94e2ccebf" Jan 20 16:59:27 crc kubenswrapper[4558]: E0120 16:59:27.906687 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6b5e284c69a12941754fdf4cd1d81b50fbf2bbe0fa967f797f258dd94e2ccebf\": container with ID starting with 6b5e284c69a12941754fdf4cd1d81b50fbf2bbe0fa967f797f258dd94e2ccebf not found: ID does not exist" containerID="6b5e284c69a12941754fdf4cd1d81b50fbf2bbe0fa967f797f258dd94e2ccebf" Jan 20 16:59:27 crc kubenswrapper[4558]: I0120 16:59:27.906716 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6b5e284c69a12941754fdf4cd1d81b50fbf2bbe0fa967f797f258dd94e2ccebf"} err="failed to get container status \"6b5e284c69a12941754fdf4cd1d81b50fbf2bbe0fa967f797f258dd94e2ccebf\": rpc error: code = NotFound desc = could not find container \"6b5e284c69a12941754fdf4cd1d81b50fbf2bbe0fa967f797f258dd94e2ccebf\": container with ID starting with 6b5e284c69a12941754fdf4cd1d81b50fbf2bbe0fa967f797f258dd94e2ccebf not found: ID does not exist" Jan 20 16:59:27 crc kubenswrapper[4558]: I0120 16:59:27.906746 4558 scope.go:117] "RemoveContainer" containerID="59c4f6d7ed8eb3d57f30987913f9fd8cd4d6bcba4c783d8566dd6559f9755936" Jan 20 16:59:27 crc kubenswrapper[4558]: I0120 16:59:27.906923 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"59c4f6d7ed8eb3d57f30987913f9fd8cd4d6bcba4c783d8566dd6559f9755936"} err="failed to get container status \"59c4f6d7ed8eb3d57f30987913f9fd8cd4d6bcba4c783d8566dd6559f9755936\": rpc error: code = NotFound desc = could not find container \"59c4f6d7ed8eb3d57f30987913f9fd8cd4d6bcba4c783d8566dd6559f9755936\": container with ID starting with 59c4f6d7ed8eb3d57f30987913f9fd8cd4d6bcba4c783d8566dd6559f9755936 not found: ID does not exist" Jan 20 16:59:27 crc kubenswrapper[4558]: I0120 16:59:27.906937 4558 scope.go:117] "RemoveContainer" containerID="6b5e284c69a12941754fdf4cd1d81b50fbf2bbe0fa967f797f258dd94e2ccebf" Jan 20 16:59:27 crc kubenswrapper[4558]: I0120 16:59:27.907090 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6b5e284c69a12941754fdf4cd1d81b50fbf2bbe0fa967f797f258dd94e2ccebf"} err="failed to get container status \"6b5e284c69a12941754fdf4cd1d81b50fbf2bbe0fa967f797f258dd94e2ccebf\": rpc error: code = NotFound desc = could not find container \"6b5e284c69a12941754fdf4cd1d81b50fbf2bbe0fa967f797f258dd94e2ccebf\": container with ID starting with 6b5e284c69a12941754fdf4cd1d81b50fbf2bbe0fa967f797f258dd94e2ccebf not found: ID does not exist" Jan 20 16:59:27 crc kubenswrapper[4558]: I0120 16:59:27.909086 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 16:59:27 crc kubenswrapper[4558]: E0120 16:59:27.909423 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f5484df8-2512-48d4-af79-ea72bf7c02b0" containerName="nova-api-log" Jan 20 16:59:27 crc kubenswrapper[4558]: I0120 16:59:27.909440 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="f5484df8-2512-48d4-af79-ea72bf7c02b0" containerName="nova-api-log" Jan 20 16:59:27 crc kubenswrapper[4558]: E0120 16:59:27.909456 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f5484df8-2512-48d4-af79-ea72bf7c02b0" containerName="nova-api-api" Jan 20 16:59:27 crc kubenswrapper[4558]: I0120 16:59:27.909462 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="f5484df8-2512-48d4-af79-ea72bf7c02b0" containerName="nova-api-api" Jan 20 16:59:27 crc kubenswrapper[4558]: E0120 16:59:27.909481 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6aaeaa31-51e8-451a-9a40-6880c278ee0e" containerName="nova-manage" Jan 20 16:59:27 crc kubenswrapper[4558]: I0120 16:59:27.909486 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="6aaeaa31-51e8-451a-9a40-6880c278ee0e" containerName="nova-manage" Jan 20 16:59:27 crc kubenswrapper[4558]: I0120 16:59:27.909640 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="f5484df8-2512-48d4-af79-ea72bf7c02b0" containerName="nova-api-api" Jan 20 16:59:27 crc kubenswrapper[4558]: I0120 16:59:27.909666 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="f5484df8-2512-48d4-af79-ea72bf7c02b0" containerName="nova-api-log" Jan 20 16:59:27 crc kubenswrapper[4558]: I0120 16:59:27.909677 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="6aaeaa31-51e8-451a-9a40-6880c278ee0e" containerName="nova-manage" Jan 20 16:59:27 crc kubenswrapper[4558]: I0120 16:59:27.910495 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 16:59:27 crc kubenswrapper[4558]: I0120 16:59:27.912105 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-api-config-data" Jan 20 16:59:27 crc kubenswrapper[4558]: I0120 16:59:27.914915 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 16:59:27 crc kubenswrapper[4558]: I0120 16:59:27.992720 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ef06289e-606c-4644-899d-e80e21228977-logs\") pod \"nova-api-0\" (UID: \"ef06289e-606c-4644-899d-e80e21228977\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 16:59:27 crc kubenswrapper[4558]: I0120 16:59:27.992840 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ef06289e-606c-4644-899d-e80e21228977-config-data\") pod \"nova-api-0\" (UID: \"ef06289e-606c-4644-899d-e80e21228977\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 16:59:27 crc kubenswrapper[4558]: I0120 16:59:27.992866 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ef06289e-606c-4644-899d-e80e21228977-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"ef06289e-606c-4644-899d-e80e21228977\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 16:59:27 crc kubenswrapper[4558]: I0120 16:59:27.992879 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-54vzs\" (UniqueName: \"kubernetes.io/projected/ef06289e-606c-4644-899d-e80e21228977-kube-api-access-54vzs\") pod \"nova-api-0\" (UID: \"ef06289e-606c-4644-899d-e80e21228977\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 16:59:28 crc kubenswrapper[4558]: I0120 16:59:28.094679 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ef06289e-606c-4644-899d-e80e21228977-logs\") pod \"nova-api-0\" (UID: \"ef06289e-606c-4644-899d-e80e21228977\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 16:59:28 crc kubenswrapper[4558]: I0120 16:59:28.094874 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ef06289e-606c-4644-899d-e80e21228977-config-data\") pod \"nova-api-0\" (UID: \"ef06289e-606c-4644-899d-e80e21228977\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 16:59:28 crc kubenswrapper[4558]: I0120 16:59:28.094908 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ef06289e-606c-4644-899d-e80e21228977-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"ef06289e-606c-4644-899d-e80e21228977\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 16:59:28 crc kubenswrapper[4558]: I0120 16:59:28.094924 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-54vzs\" (UniqueName: \"kubernetes.io/projected/ef06289e-606c-4644-899d-e80e21228977-kube-api-access-54vzs\") pod \"nova-api-0\" (UID: \"ef06289e-606c-4644-899d-e80e21228977\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 16:59:28 crc kubenswrapper[4558]: I0120 16:59:28.095049 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ef06289e-606c-4644-899d-e80e21228977-logs\") pod \"nova-api-0\" (UID: \"ef06289e-606c-4644-899d-e80e21228977\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 16:59:28 crc kubenswrapper[4558]: I0120 16:59:28.098398 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ef06289e-606c-4644-899d-e80e21228977-config-data\") pod \"nova-api-0\" (UID: \"ef06289e-606c-4644-899d-e80e21228977\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 16:59:28 crc kubenswrapper[4558]: I0120 16:59:28.098811 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ef06289e-606c-4644-899d-e80e21228977-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"ef06289e-606c-4644-899d-e80e21228977\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 16:59:28 crc kubenswrapper[4558]: I0120 16:59:28.109245 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-54vzs\" (UniqueName: \"kubernetes.io/projected/ef06289e-606c-4644-899d-e80e21228977-kube-api-access-54vzs\") pod \"nova-api-0\" (UID: \"ef06289e-606c-4644-899d-e80e21228977\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 16:59:28 crc kubenswrapper[4558]: I0120 16:59:28.231789 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 16:59:28 crc kubenswrapper[4558]: I0120 16:59:28.288710 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 16:59:28 crc kubenswrapper[4558]: I0120 16:59:28.398411 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/e6582dae-6890-4971-849d-eca29269fb70-nova-metadata-tls-certs\") pod \"e6582dae-6890-4971-849d-eca29269fb70\" (UID: \"e6582dae-6890-4971-849d-eca29269fb70\") " Jan 20 16:59:28 crc kubenswrapper[4558]: I0120 16:59:28.398493 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e6582dae-6890-4971-849d-eca29269fb70-config-data\") pod \"e6582dae-6890-4971-849d-eca29269fb70\" (UID: \"e6582dae-6890-4971-849d-eca29269fb70\") " Jan 20 16:59:28 crc kubenswrapper[4558]: I0120 16:59:28.398561 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e6582dae-6890-4971-849d-eca29269fb70-logs\") pod \"e6582dae-6890-4971-849d-eca29269fb70\" (UID: \"e6582dae-6890-4971-849d-eca29269fb70\") " Jan 20 16:59:28 crc kubenswrapper[4558]: I0120 16:59:28.398631 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e6582dae-6890-4971-849d-eca29269fb70-combined-ca-bundle\") pod \"e6582dae-6890-4971-849d-eca29269fb70\" (UID: \"e6582dae-6890-4971-849d-eca29269fb70\") " Jan 20 16:59:28 crc kubenswrapper[4558]: I0120 16:59:28.398720 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rk6c5\" (UniqueName: \"kubernetes.io/projected/e6582dae-6890-4971-849d-eca29269fb70-kube-api-access-rk6c5\") pod \"e6582dae-6890-4971-849d-eca29269fb70\" (UID: \"e6582dae-6890-4971-849d-eca29269fb70\") " Jan 20 16:59:28 crc kubenswrapper[4558]: I0120 16:59:28.398915 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e6582dae-6890-4971-849d-eca29269fb70-logs" (OuterVolumeSpecName: "logs") pod "e6582dae-6890-4971-849d-eca29269fb70" (UID: "e6582dae-6890-4971-849d-eca29269fb70"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 16:59:28 crc kubenswrapper[4558]: I0120 16:59:28.399062 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e6582dae-6890-4971-849d-eca29269fb70-logs\") on node \"crc\" DevicePath \"\"" Jan 20 16:59:28 crc kubenswrapper[4558]: I0120 16:59:28.403186 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e6582dae-6890-4971-849d-eca29269fb70-kube-api-access-rk6c5" (OuterVolumeSpecName: "kube-api-access-rk6c5") pod "e6582dae-6890-4971-849d-eca29269fb70" (UID: "e6582dae-6890-4971-849d-eca29269fb70"). InnerVolumeSpecName "kube-api-access-rk6c5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:59:28 crc kubenswrapper[4558]: I0120 16:59:28.426509 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e6582dae-6890-4971-849d-eca29269fb70-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e6582dae-6890-4971-849d-eca29269fb70" (UID: "e6582dae-6890-4971-849d-eca29269fb70"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:59:28 crc kubenswrapper[4558]: I0120 16:59:28.429050 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e6582dae-6890-4971-849d-eca29269fb70-config-data" (OuterVolumeSpecName: "config-data") pod "e6582dae-6890-4971-849d-eca29269fb70" (UID: "e6582dae-6890-4971-849d-eca29269fb70"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:59:28 crc kubenswrapper[4558]: I0120 16:59:28.438043 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e6582dae-6890-4971-849d-eca29269fb70-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "e6582dae-6890-4971-849d-eca29269fb70" (UID: "e6582dae-6890-4971-849d-eca29269fb70"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:59:28 crc kubenswrapper[4558]: I0120 16:59:28.500208 4558 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/e6582dae-6890-4971-849d-eca29269fb70-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 16:59:28 crc kubenswrapper[4558]: I0120 16:59:28.500233 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e6582dae-6890-4971-849d-eca29269fb70-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 16:59:28 crc kubenswrapper[4558]: I0120 16:59:28.500242 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e6582dae-6890-4971-849d-eca29269fb70-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 16:59:28 crc kubenswrapper[4558]: I0120 16:59:28.500268 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rk6c5\" (UniqueName: \"kubernetes.io/projected/e6582dae-6890-4971-849d-eca29269fb70-kube-api-access-rk6c5\") on node \"crc\" DevicePath \"\"" Jan 20 16:59:28 crc kubenswrapper[4558]: I0120 16:59:28.574850 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f5484df8-2512-48d4-af79-ea72bf7c02b0" path="/var/lib/kubelet/pods/f5484df8-2512-48d4-af79-ea72bf7c02b0/volumes" Jan 20 16:59:28 crc kubenswrapper[4558]: I0120 16:59:28.620835 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 16:59:28 crc kubenswrapper[4558]: I0120 16:59:28.750036 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 16:59:28 crc kubenswrapper[4558]: I0120 16:59:28.806650 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/900fb0c7-ddb0-46b3-82f7-cdd543afa534-combined-ca-bundle\") pod \"900fb0c7-ddb0-46b3-82f7-cdd543afa534\" (UID: \"900fb0c7-ddb0-46b3-82f7-cdd543afa534\") " Jan 20 16:59:28 crc kubenswrapper[4558]: I0120 16:59:28.806705 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/900fb0c7-ddb0-46b3-82f7-cdd543afa534-config-data\") pod \"900fb0c7-ddb0-46b3-82f7-cdd543afa534\" (UID: \"900fb0c7-ddb0-46b3-82f7-cdd543afa534\") " Jan 20 16:59:28 crc kubenswrapper[4558]: I0120 16:59:28.806817 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-985hs\" (UniqueName: \"kubernetes.io/projected/900fb0c7-ddb0-46b3-82f7-cdd543afa534-kube-api-access-985hs\") pod \"900fb0c7-ddb0-46b3-82f7-cdd543afa534\" (UID: \"900fb0c7-ddb0-46b3-82f7-cdd543afa534\") " Jan 20 16:59:28 crc kubenswrapper[4558]: I0120 16:59:28.811772 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/900fb0c7-ddb0-46b3-82f7-cdd543afa534-kube-api-access-985hs" (OuterVolumeSpecName: "kube-api-access-985hs") pod "900fb0c7-ddb0-46b3-82f7-cdd543afa534" (UID: "900fb0c7-ddb0-46b3-82f7-cdd543afa534"). InnerVolumeSpecName "kube-api-access-985hs". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:59:28 crc kubenswrapper[4558]: I0120 16:59:28.830360 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/900fb0c7-ddb0-46b3-82f7-cdd543afa534-config-data" (OuterVolumeSpecName: "config-data") pod "900fb0c7-ddb0-46b3-82f7-cdd543afa534" (UID: "900fb0c7-ddb0-46b3-82f7-cdd543afa534"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:59:28 crc kubenswrapper[4558]: I0120 16:59:28.879754 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/900fb0c7-ddb0-46b3-82f7-cdd543afa534-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "900fb0c7-ddb0-46b3-82f7-cdd543afa534" (UID: "900fb0c7-ddb0-46b3-82f7-cdd543afa534"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:59:28 crc kubenswrapper[4558]: I0120 16:59:28.912867 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/900fb0c7-ddb0-46b3-82f7-cdd543afa534-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 16:59:28 crc kubenswrapper[4558]: I0120 16:59:28.913067 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/900fb0c7-ddb0-46b3-82f7-cdd543afa534-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 16:59:28 crc kubenswrapper[4558]: I0120 16:59:28.913141 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 16:59:28 crc kubenswrapper[4558]: I0120 16:59:28.913084 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"900fb0c7-ddb0-46b3-82f7-cdd543afa534","Type":"ContainerDied","Data":"08ed39808896b2786fe6ca35a6d00b3c1ef2d42417098de4bd4d04960cf43db1"} Jan 20 16:59:28 crc kubenswrapper[4558]: I0120 16:59:28.914541 4558 scope.go:117] "RemoveContainer" containerID="08ed39808896b2786fe6ca35a6d00b3c1ef2d42417098de4bd4d04960cf43db1" Jan 20 16:59:28 crc kubenswrapper[4558]: I0120 16:59:28.913152 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-985hs\" (UniqueName: \"kubernetes.io/projected/900fb0c7-ddb0-46b3-82f7-cdd543afa534-kube-api-access-985hs\") on node \"crc\" DevicePath \"\"" Jan 20 16:59:28 crc kubenswrapper[4558]: I0120 16:59:28.913062 4558 generic.go:334] "Generic (PLEG): container finished" podID="900fb0c7-ddb0-46b3-82f7-cdd543afa534" containerID="08ed39808896b2786fe6ca35a6d00b3c1ef2d42417098de4bd4d04960cf43db1" exitCode=0 Jan 20 16:59:28 crc kubenswrapper[4558]: I0120 16:59:28.914833 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"900fb0c7-ddb0-46b3-82f7-cdd543afa534","Type":"ContainerDied","Data":"0a35efaebf80022aaabcdb8f43b4120a947bc1163e177f586070c7bdb6954672"} Jan 20 16:59:28 crc kubenswrapper[4558]: I0120 16:59:28.918413 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"ef06289e-606c-4644-899d-e80e21228977","Type":"ContainerStarted","Data":"d67d05b9a1811fa02ce8ef71918c092c22214f4d7c7c181a9745cf9cd671b5c8"} Jan 20 16:59:28 crc kubenswrapper[4558]: I0120 16:59:28.918515 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"ef06289e-606c-4644-899d-e80e21228977","Type":"ContainerStarted","Data":"5f19d37d87398cc8eede4263d8e826b0b7f034c333a76494079e14d3d47b23aa"} Jan 20 16:59:28 crc kubenswrapper[4558]: I0120 16:59:28.924013 4558 generic.go:334] "Generic (PLEG): container finished" podID="e6582dae-6890-4971-849d-eca29269fb70" containerID="85b649ace58a22b44216836181388597a07a3b4273a312b767cd9d3f0541e020" exitCode=0 Jan 20 16:59:28 crc kubenswrapper[4558]: I0120 16:59:28.924138 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 16:59:28 crc kubenswrapper[4558]: I0120 16:59:28.924153 4558 generic.go:334] "Generic (PLEG): container finished" podID="e6582dae-6890-4971-849d-eca29269fb70" containerID="35aaf298507a5a639e3dac2b9c8dd012d367f2fa107451230f1ec49b118afc75" exitCode=143 Jan 20 16:59:28 crc kubenswrapper[4558]: I0120 16:59:28.924126 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"e6582dae-6890-4971-849d-eca29269fb70","Type":"ContainerDied","Data":"85b649ace58a22b44216836181388597a07a3b4273a312b767cd9d3f0541e020"} Jan 20 16:59:28 crc kubenswrapper[4558]: I0120 16:59:28.924909 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"e6582dae-6890-4971-849d-eca29269fb70","Type":"ContainerDied","Data":"35aaf298507a5a639e3dac2b9c8dd012d367f2fa107451230f1ec49b118afc75"} Jan 20 16:59:28 crc kubenswrapper[4558]: I0120 16:59:28.924944 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"e6582dae-6890-4971-849d-eca29269fb70","Type":"ContainerDied","Data":"3457f9179bd4ef56defe49a0a8c09861c52831debdd04889c87822639c825c61"} Jan 20 16:59:28 crc kubenswrapper[4558]: I0120 16:59:28.940984 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 16:59:28 crc kubenswrapper[4558]: I0120 16:59:28.952251 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 16:59:28 crc kubenswrapper[4558]: I0120 16:59:28.960257 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 16:59:28 crc kubenswrapper[4558]: I0120 16:59:28.960465 4558 scope.go:117] "RemoveContainer" containerID="08ed39808896b2786fe6ca35a6d00b3c1ef2d42417098de4bd4d04960cf43db1" Jan 20 16:59:28 crc kubenswrapper[4558]: E0120 16:59:28.962596 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"08ed39808896b2786fe6ca35a6d00b3c1ef2d42417098de4bd4d04960cf43db1\": container with ID starting with 08ed39808896b2786fe6ca35a6d00b3c1ef2d42417098de4bd4d04960cf43db1 not found: ID does not exist" containerID="08ed39808896b2786fe6ca35a6d00b3c1ef2d42417098de4bd4d04960cf43db1" Jan 20 16:59:28 crc kubenswrapper[4558]: I0120 16:59:28.962691 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"08ed39808896b2786fe6ca35a6d00b3c1ef2d42417098de4bd4d04960cf43db1"} err="failed to get container status \"08ed39808896b2786fe6ca35a6d00b3c1ef2d42417098de4bd4d04960cf43db1\": rpc error: code = NotFound desc = could not find container \"08ed39808896b2786fe6ca35a6d00b3c1ef2d42417098de4bd4d04960cf43db1\": container with ID starting with 08ed39808896b2786fe6ca35a6d00b3c1ef2d42417098de4bd4d04960cf43db1 not found: ID does not exist" Jan 20 16:59:28 crc kubenswrapper[4558]: I0120 16:59:28.962763 4558 scope.go:117] "RemoveContainer" containerID="85b649ace58a22b44216836181388597a07a3b4273a312b767cd9d3f0541e020" Jan 20 16:59:28 crc kubenswrapper[4558]: I0120 16:59:28.969064 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 16:59:28 crc kubenswrapper[4558]: E0120 16:59:28.969414 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e6582dae-6890-4971-849d-eca29269fb70" containerName="nova-metadata-log" Jan 20 16:59:28 crc kubenswrapper[4558]: I0120 16:59:28.969430 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="e6582dae-6890-4971-849d-eca29269fb70" containerName="nova-metadata-log" Jan 20 16:59:28 crc kubenswrapper[4558]: E0120 16:59:28.969444 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="900fb0c7-ddb0-46b3-82f7-cdd543afa534" containerName="nova-scheduler-scheduler" Jan 20 16:59:28 crc kubenswrapper[4558]: I0120 16:59:28.969450 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="900fb0c7-ddb0-46b3-82f7-cdd543afa534" containerName="nova-scheduler-scheduler" Jan 20 16:59:28 crc kubenswrapper[4558]: E0120 16:59:28.969468 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e6582dae-6890-4971-849d-eca29269fb70" containerName="nova-metadata-metadata" Jan 20 16:59:28 crc kubenswrapper[4558]: I0120 16:59:28.969474 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="e6582dae-6890-4971-849d-eca29269fb70" containerName="nova-metadata-metadata" Jan 20 16:59:28 crc kubenswrapper[4558]: I0120 16:59:28.969608 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="e6582dae-6890-4971-849d-eca29269fb70" containerName="nova-metadata-log" Jan 20 16:59:28 crc kubenswrapper[4558]: I0120 16:59:28.969623 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="e6582dae-6890-4971-849d-eca29269fb70" containerName="nova-metadata-metadata" Jan 20 16:59:28 crc kubenswrapper[4558]: I0120 16:59:28.969648 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="900fb0c7-ddb0-46b3-82f7-cdd543afa534" containerName="nova-scheduler-scheduler" Jan 20 16:59:28 crc kubenswrapper[4558]: I0120 16:59:28.970136 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 16:59:28 crc kubenswrapper[4558]: I0120 16:59:28.984795 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 16:59:28 crc kubenswrapper[4558]: I0120 16:59:28.984825 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-scheduler-config-data" Jan 20 16:59:28 crc kubenswrapper[4558]: I0120 16:59:28.991188 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 16:59:28 crc kubenswrapper[4558]: I0120 16:59:28.999246 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 16:59:29 crc kubenswrapper[4558]: I0120 16:59:29.000474 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 16:59:29 crc kubenswrapper[4558]: I0120 16:59:29.004272 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-metadata-config-data" Jan 20 16:59:29 crc kubenswrapper[4558]: I0120 16:59:29.004454 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-metadata-internal-svc" Jan 20 16:59:29 crc kubenswrapper[4558]: I0120 16:59:29.005090 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 16:59:29 crc kubenswrapper[4558]: I0120 16:59:29.015925 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0f4ced88-ef0d-40df-ac5f-b1baf062f8e3-config-data\") pod \"nova-scheduler-0\" (UID: \"0f4ced88-ef0d-40df-ac5f-b1baf062f8e3\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 16:59:29 crc kubenswrapper[4558]: I0120 16:59:29.015973 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0f4ced88-ef0d-40df-ac5f-b1baf062f8e3-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"0f4ced88-ef0d-40df-ac5f-b1baf062f8e3\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 16:59:29 crc kubenswrapper[4558]: I0120 16:59:29.016516 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rhhjh\" (UniqueName: \"kubernetes.io/projected/0f4ced88-ef0d-40df-ac5f-b1baf062f8e3-kube-api-access-rhhjh\") pod \"nova-scheduler-0\" (UID: \"0f4ced88-ef0d-40df-ac5f-b1baf062f8e3\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 16:59:29 crc kubenswrapper[4558]: I0120 16:59:29.017836 4558 scope.go:117] "RemoveContainer" containerID="35aaf298507a5a639e3dac2b9c8dd012d367f2fa107451230f1ec49b118afc75" Jan 20 16:59:29 crc kubenswrapper[4558]: I0120 16:59:29.038089 4558 scope.go:117] "RemoveContainer" containerID="85b649ace58a22b44216836181388597a07a3b4273a312b767cd9d3f0541e020" Jan 20 16:59:29 crc kubenswrapper[4558]: E0120 16:59:29.039034 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"85b649ace58a22b44216836181388597a07a3b4273a312b767cd9d3f0541e020\": container with ID starting with 85b649ace58a22b44216836181388597a07a3b4273a312b767cd9d3f0541e020 not found: ID does not exist" containerID="85b649ace58a22b44216836181388597a07a3b4273a312b767cd9d3f0541e020" Jan 20 16:59:29 crc kubenswrapper[4558]: I0120 16:59:29.039077 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"85b649ace58a22b44216836181388597a07a3b4273a312b767cd9d3f0541e020"} err="failed to get container status \"85b649ace58a22b44216836181388597a07a3b4273a312b767cd9d3f0541e020\": rpc error: code = NotFound desc = could not find container \"85b649ace58a22b44216836181388597a07a3b4273a312b767cd9d3f0541e020\": container with ID starting with 85b649ace58a22b44216836181388597a07a3b4273a312b767cd9d3f0541e020 not found: ID does not exist" Jan 20 16:59:29 crc kubenswrapper[4558]: I0120 16:59:29.039096 4558 scope.go:117] "RemoveContainer" containerID="35aaf298507a5a639e3dac2b9c8dd012d367f2fa107451230f1ec49b118afc75" Jan 20 16:59:29 crc kubenswrapper[4558]: E0120 16:59:29.039447 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"35aaf298507a5a639e3dac2b9c8dd012d367f2fa107451230f1ec49b118afc75\": container with ID starting with 35aaf298507a5a639e3dac2b9c8dd012d367f2fa107451230f1ec49b118afc75 not found: ID does not exist" containerID="35aaf298507a5a639e3dac2b9c8dd012d367f2fa107451230f1ec49b118afc75" Jan 20 16:59:29 crc kubenswrapper[4558]: I0120 16:59:29.039466 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"35aaf298507a5a639e3dac2b9c8dd012d367f2fa107451230f1ec49b118afc75"} err="failed to get container status \"35aaf298507a5a639e3dac2b9c8dd012d367f2fa107451230f1ec49b118afc75\": rpc error: code = NotFound desc = could not find container \"35aaf298507a5a639e3dac2b9c8dd012d367f2fa107451230f1ec49b118afc75\": container with ID starting with 35aaf298507a5a639e3dac2b9c8dd012d367f2fa107451230f1ec49b118afc75 not found: ID does not exist" Jan 20 16:59:29 crc kubenswrapper[4558]: I0120 16:59:29.039478 4558 scope.go:117] "RemoveContainer" containerID="85b649ace58a22b44216836181388597a07a3b4273a312b767cd9d3f0541e020" Jan 20 16:59:29 crc kubenswrapper[4558]: I0120 16:59:29.039724 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"85b649ace58a22b44216836181388597a07a3b4273a312b767cd9d3f0541e020"} err="failed to get container status \"85b649ace58a22b44216836181388597a07a3b4273a312b767cd9d3f0541e020\": rpc error: code = NotFound desc = could not find container \"85b649ace58a22b44216836181388597a07a3b4273a312b767cd9d3f0541e020\": container with ID starting with 85b649ace58a22b44216836181388597a07a3b4273a312b767cd9d3f0541e020 not found: ID does not exist" Jan 20 16:59:29 crc kubenswrapper[4558]: I0120 16:59:29.039758 4558 scope.go:117] "RemoveContainer" containerID="35aaf298507a5a639e3dac2b9c8dd012d367f2fa107451230f1ec49b118afc75" Jan 20 16:59:29 crc kubenswrapper[4558]: I0120 16:59:29.039942 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"35aaf298507a5a639e3dac2b9c8dd012d367f2fa107451230f1ec49b118afc75"} err="failed to get container status \"35aaf298507a5a639e3dac2b9c8dd012d367f2fa107451230f1ec49b118afc75\": rpc error: code = NotFound desc = could not find container \"35aaf298507a5a639e3dac2b9c8dd012d367f2fa107451230f1ec49b118afc75\": container with ID starting with 35aaf298507a5a639e3dac2b9c8dd012d367f2fa107451230f1ec49b118afc75 not found: ID does not exist" Jan 20 16:59:29 crc kubenswrapper[4558]: I0120 16:59:29.118103 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-878d7\" (UniqueName: \"kubernetes.io/projected/aa2a96c7-ff73-49de-9acc-7e5ce8791029-kube-api-access-878d7\") pod \"nova-metadata-0\" (UID: \"aa2a96c7-ff73-49de-9acc-7e5ce8791029\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 16:59:29 crc kubenswrapper[4558]: I0120 16:59:29.118188 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa2a96c7-ff73-49de-9acc-7e5ce8791029-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"aa2a96c7-ff73-49de-9acc-7e5ce8791029\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 16:59:29 crc kubenswrapper[4558]: I0120 16:59:29.118240 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/aa2a96c7-ff73-49de-9acc-7e5ce8791029-logs\") pod \"nova-metadata-0\" (UID: \"aa2a96c7-ff73-49de-9acc-7e5ce8791029\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 16:59:29 crc kubenswrapper[4558]: I0120 16:59:29.118261 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/aa2a96c7-ff73-49de-9acc-7e5ce8791029-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"aa2a96c7-ff73-49de-9acc-7e5ce8791029\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 16:59:29 crc kubenswrapper[4558]: I0120 16:59:29.118460 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rhhjh\" (UniqueName: \"kubernetes.io/projected/0f4ced88-ef0d-40df-ac5f-b1baf062f8e3-kube-api-access-rhhjh\") pod \"nova-scheduler-0\" (UID: \"0f4ced88-ef0d-40df-ac5f-b1baf062f8e3\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 16:59:29 crc kubenswrapper[4558]: I0120 16:59:29.118508 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa2a96c7-ff73-49de-9acc-7e5ce8791029-config-data\") pod \"nova-metadata-0\" (UID: \"aa2a96c7-ff73-49de-9acc-7e5ce8791029\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 16:59:29 crc kubenswrapper[4558]: I0120 16:59:29.118569 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0f4ced88-ef0d-40df-ac5f-b1baf062f8e3-config-data\") pod \"nova-scheduler-0\" (UID: \"0f4ced88-ef0d-40df-ac5f-b1baf062f8e3\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 16:59:29 crc kubenswrapper[4558]: I0120 16:59:29.118591 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0f4ced88-ef0d-40df-ac5f-b1baf062f8e3-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"0f4ced88-ef0d-40df-ac5f-b1baf062f8e3\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 16:59:29 crc kubenswrapper[4558]: I0120 16:59:29.121649 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0f4ced88-ef0d-40df-ac5f-b1baf062f8e3-config-data\") pod \"nova-scheduler-0\" (UID: \"0f4ced88-ef0d-40df-ac5f-b1baf062f8e3\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 16:59:29 crc kubenswrapper[4558]: I0120 16:59:29.121693 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0f4ced88-ef0d-40df-ac5f-b1baf062f8e3-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"0f4ced88-ef0d-40df-ac5f-b1baf062f8e3\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 16:59:29 crc kubenswrapper[4558]: I0120 16:59:29.130896 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rhhjh\" (UniqueName: \"kubernetes.io/projected/0f4ced88-ef0d-40df-ac5f-b1baf062f8e3-kube-api-access-rhhjh\") pod \"nova-scheduler-0\" (UID: \"0f4ced88-ef0d-40df-ac5f-b1baf062f8e3\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 16:59:29 crc kubenswrapper[4558]: I0120 16:59:29.219716 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa2a96c7-ff73-49de-9acc-7e5ce8791029-config-data\") pod \"nova-metadata-0\" (UID: \"aa2a96c7-ff73-49de-9acc-7e5ce8791029\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 16:59:29 crc kubenswrapper[4558]: I0120 16:59:29.219799 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-878d7\" (UniqueName: \"kubernetes.io/projected/aa2a96c7-ff73-49de-9acc-7e5ce8791029-kube-api-access-878d7\") pod \"nova-metadata-0\" (UID: \"aa2a96c7-ff73-49de-9acc-7e5ce8791029\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 16:59:29 crc kubenswrapper[4558]: I0120 16:59:29.219849 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa2a96c7-ff73-49de-9acc-7e5ce8791029-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"aa2a96c7-ff73-49de-9acc-7e5ce8791029\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 16:59:29 crc kubenswrapper[4558]: I0120 16:59:29.219881 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/aa2a96c7-ff73-49de-9acc-7e5ce8791029-logs\") pod \"nova-metadata-0\" (UID: \"aa2a96c7-ff73-49de-9acc-7e5ce8791029\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 16:59:29 crc kubenswrapper[4558]: I0120 16:59:29.219902 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/aa2a96c7-ff73-49de-9acc-7e5ce8791029-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"aa2a96c7-ff73-49de-9acc-7e5ce8791029\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 16:59:29 crc kubenswrapper[4558]: I0120 16:59:29.220235 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/aa2a96c7-ff73-49de-9acc-7e5ce8791029-logs\") pod \"nova-metadata-0\" (UID: \"aa2a96c7-ff73-49de-9acc-7e5ce8791029\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 16:59:29 crc kubenswrapper[4558]: I0120 16:59:29.222636 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/aa2a96c7-ff73-49de-9acc-7e5ce8791029-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"aa2a96c7-ff73-49de-9acc-7e5ce8791029\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 16:59:29 crc kubenswrapper[4558]: I0120 16:59:29.222907 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa2a96c7-ff73-49de-9acc-7e5ce8791029-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"aa2a96c7-ff73-49de-9acc-7e5ce8791029\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 16:59:29 crc kubenswrapper[4558]: I0120 16:59:29.223033 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa2a96c7-ff73-49de-9acc-7e5ce8791029-config-data\") pod \"nova-metadata-0\" (UID: \"aa2a96c7-ff73-49de-9acc-7e5ce8791029\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 16:59:29 crc kubenswrapper[4558]: I0120 16:59:29.233244 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-878d7\" (UniqueName: \"kubernetes.io/projected/aa2a96c7-ff73-49de-9acc-7e5ce8791029-kube-api-access-878d7\") pod \"nova-metadata-0\" (UID: \"aa2a96c7-ff73-49de-9acc-7e5ce8791029\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 16:59:29 crc kubenswrapper[4558]: I0120 16:59:29.296317 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 16:59:29 crc kubenswrapper[4558]: I0120 16:59:29.325580 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 16:59:29 crc kubenswrapper[4558]: I0120 16:59:29.659597 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 16:59:29 crc kubenswrapper[4558]: W0120 16:59:29.661206 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0f4ced88_ef0d_40df_ac5f_b1baf062f8e3.slice/crio-416f49c29a91606a5f452eefde4f71aa0b0af574547e8a4b84a41fc61f5ef6fe WatchSource:0}: Error finding container 416f49c29a91606a5f452eefde4f71aa0b0af574547e8a4b84a41fc61f5ef6fe: Status 404 returned error can't find the container with id 416f49c29a91606a5f452eefde4f71aa0b0af574547e8a4b84a41fc61f5ef6fe Jan 20 16:59:29 crc kubenswrapper[4558]: I0120 16:59:29.731159 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 16:59:29 crc kubenswrapper[4558]: W0120 16:59:29.741204 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podaa2a96c7_ff73_49de_9acc_7e5ce8791029.slice/crio-33b869d23d26107cec428871ae63123901c7d870de544d3bf1e9505323ae6a2c WatchSource:0}: Error finding container 33b869d23d26107cec428871ae63123901c7d870de544d3bf1e9505323ae6a2c: Status 404 returned error can't find the container with id 33b869d23d26107cec428871ae63123901c7d870de544d3bf1e9505323ae6a2c Jan 20 16:59:29 crc kubenswrapper[4558]: I0120 16:59:29.933275 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"aa2a96c7-ff73-49de-9acc-7e5ce8791029","Type":"ContainerStarted","Data":"e00279eefaa9c994ac81a4a5f4771f651b81ab3c8953e4cb17bc67d888e02db4"} Jan 20 16:59:29 crc kubenswrapper[4558]: I0120 16:59:29.933311 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"aa2a96c7-ff73-49de-9acc-7e5ce8791029","Type":"ContainerStarted","Data":"33b869d23d26107cec428871ae63123901c7d870de544d3bf1e9505323ae6a2c"} Jan 20 16:59:29 crc kubenswrapper[4558]: I0120 16:59:29.936619 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"ef06289e-606c-4644-899d-e80e21228977","Type":"ContainerStarted","Data":"038d8ffac4b3f152867ee7ad7cbec45a4f1368fff79ac971ad761889b8f60991"} Jan 20 16:59:29 crc kubenswrapper[4558]: I0120 16:59:29.937990 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"0f4ced88-ef0d-40df-ac5f-b1baf062f8e3","Type":"ContainerStarted","Data":"5046214ebbf76cbcda5d6afa9f9f3b15fddfcb6bf26dfc014b0afde9bcf8c549"} Jan 20 16:59:29 crc kubenswrapper[4558]: I0120 16:59:29.938011 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"0f4ced88-ef0d-40df-ac5f-b1baf062f8e3","Type":"ContainerStarted","Data":"416f49c29a91606a5f452eefde4f71aa0b0af574547e8a4b84a41fc61f5ef6fe"} Jan 20 16:59:29 crc kubenswrapper[4558]: I0120 16:59:29.949801 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-api-0" podStartSLOduration=2.949791585 podStartE2EDuration="2.949791585s" podCreationTimestamp="2026-01-20 16:59:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 16:59:29.94862965 +0000 UTC m=+1063.708967617" watchObservedRunningTime="2026-01-20 16:59:29.949791585 +0000 UTC m=+1063.710129552" Jan 20 16:59:29 crc kubenswrapper[4558]: I0120 16:59:29.963251 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-scheduler-0" podStartSLOduration=1.9632419049999998 podStartE2EDuration="1.963241905s" podCreationTimestamp="2026-01-20 16:59:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 16:59:29.96037246 +0000 UTC m=+1063.720710427" watchObservedRunningTime="2026-01-20 16:59:29.963241905 +0000 UTC m=+1063.723579873" Jan 20 16:59:30 crc kubenswrapper[4558]: I0120 16:59:30.573547 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="900fb0c7-ddb0-46b3-82f7-cdd543afa534" path="/var/lib/kubelet/pods/900fb0c7-ddb0-46b3-82f7-cdd543afa534/volumes" Jan 20 16:59:30 crc kubenswrapper[4558]: I0120 16:59:30.574371 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e6582dae-6890-4971-849d-eca29269fb70" path="/var/lib/kubelet/pods/e6582dae-6890-4971-849d-eca29269fb70/volumes" Jan 20 16:59:30 crc kubenswrapper[4558]: I0120 16:59:30.946349 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"aa2a96c7-ff73-49de-9acc-7e5ce8791029","Type":"ContainerStarted","Data":"10ccbac057411e8b97d567d8a1fee98ca6979c330f6d03f60ae03427a6c09e0c"} Jan 20 16:59:31 crc kubenswrapper[4558]: I0120 16:59:31.208670 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 16:59:31 crc kubenswrapper[4558]: I0120 16:59:31.235367 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-metadata-0" podStartSLOduration=3.235354636 podStartE2EDuration="3.235354636s" podCreationTimestamp="2026-01-20 16:59:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 16:59:30.962332971 +0000 UTC m=+1064.722670937" watchObservedRunningTime="2026-01-20 16:59:31.235354636 +0000 UTC m=+1064.995692603" Jan 20 16:59:34 crc kubenswrapper[4558]: I0120 16:59:34.297008 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 16:59:34 crc kubenswrapper[4558]: I0120 16:59:34.326280 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 16:59:34 crc kubenswrapper[4558]: I0120 16:59:34.326310 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 16:59:38 crc kubenswrapper[4558]: I0120 16:59:38.232693 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-api-0" Jan 20 16:59:38 crc kubenswrapper[4558]: I0120 16:59:38.232899 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-api-0" Jan 20 16:59:39 crc kubenswrapper[4558]: I0120 16:59:39.296998 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 16:59:39 crc kubenswrapper[4558]: I0120 16:59:39.314333 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-api-0" podUID="ef06289e-606c-4644-899d-e80e21228977" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.177:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 20 16:59:39 crc kubenswrapper[4558]: I0120 16:59:39.314340 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-api-0" podUID="ef06289e-606c-4644-899d-e80e21228977" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.177:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 20 16:59:39 crc kubenswrapper[4558]: I0120 16:59:39.317595 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 16:59:39 crc kubenswrapper[4558]: I0120 16:59:39.326543 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 16:59:39 crc kubenswrapper[4558]: I0120 16:59:39.326590 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 16:59:40 crc kubenswrapper[4558]: I0120 16:59:40.026715 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 16:59:40 crc kubenswrapper[4558]: I0120 16:59:40.342299 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-metadata-0" podUID="aa2a96c7-ff73-49de-9acc-7e5ce8791029" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.179:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 20 16:59:40 crc kubenswrapper[4558]: I0120 16:59:40.342318 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-metadata-0" podUID="aa2a96c7-ff73-49de-9acc-7e5ce8791029" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.179:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 20 16:59:48 crc kubenswrapper[4558]: I0120 16:59:48.234552 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-api-0" Jan 20 16:59:48 crc kubenswrapper[4558]: I0120 16:59:48.235147 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-api-0" Jan 20 16:59:48 crc kubenswrapper[4558]: I0120 16:59:48.235702 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-api-0" Jan 20 16:59:48 crc kubenswrapper[4558]: I0120 16:59:48.237747 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-api-0" Jan 20 16:59:49 crc kubenswrapper[4558]: I0120 16:59:49.061728 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-api-0" Jan 20 16:59:49 crc kubenswrapper[4558]: I0120 16:59:49.065134 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-api-0" Jan 20 16:59:49 crc kubenswrapper[4558]: I0120 16:59:49.330491 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 16:59:49 crc kubenswrapper[4558]: I0120 16:59:49.333120 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 16:59:49 crc kubenswrapper[4558]: I0120 16:59:49.334130 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 16:59:50 crc kubenswrapper[4558]: I0120 16:59:50.085957 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 16:59:50 crc kubenswrapper[4558]: I0120 16:59:50.122116 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:59:50 crc kubenswrapper[4558]: I0120 16:59:50.649532 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 16:59:51 crc kubenswrapper[4558]: I0120 16:59:51.073415 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="473a463c-85cb-4702-9759-761904072818" containerName="ceilometer-central-agent" containerID="cri-o://efe3ef9d14657f5a9f606bc1ca5681abed7f791ea57ee1600378d05da399c8a7" gracePeriod=30 Jan 20 16:59:51 crc kubenswrapper[4558]: I0120 16:59:51.073503 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="473a463c-85cb-4702-9759-761904072818" containerName="proxy-httpd" containerID="cri-o://9a54cdc016f9f70eb47e9944e8d06f082661c74136b8d28a93740b3b4fcb3687" gracePeriod=30 Jan 20 16:59:51 crc kubenswrapper[4558]: I0120 16:59:51.073543 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="473a463c-85cb-4702-9759-761904072818" containerName="sg-core" containerID="cri-o://2825940fb8c15a1edf0410c1dc202cc5c9ccc1417f80fabebfdaac6104c498e5" gracePeriod=30 Jan 20 16:59:51 crc kubenswrapper[4558]: I0120 16:59:51.073574 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="473a463c-85cb-4702-9759-761904072818" containerName="ceilometer-notification-agent" containerID="cri-o://3a19ed138105b3740685ac28d410913719c1974f6fed6a27ac6a13e7aa6cb0a5" gracePeriod=30 Jan 20 16:59:51 crc kubenswrapper[4558]: I0120 16:59:51.489146 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 16:59:52 crc kubenswrapper[4558]: I0120 16:59:52.082271 4558 generic.go:334] "Generic (PLEG): container finished" podID="473a463c-85cb-4702-9759-761904072818" containerID="9a54cdc016f9f70eb47e9944e8d06f082661c74136b8d28a93740b3b4fcb3687" exitCode=0 Jan 20 16:59:52 crc kubenswrapper[4558]: I0120 16:59:52.082298 4558 generic.go:334] "Generic (PLEG): container finished" podID="473a463c-85cb-4702-9759-761904072818" containerID="2825940fb8c15a1edf0410c1dc202cc5c9ccc1417f80fabebfdaac6104c498e5" exitCode=2 Jan 20 16:59:52 crc kubenswrapper[4558]: I0120 16:59:52.082307 4558 generic.go:334] "Generic (PLEG): container finished" podID="473a463c-85cb-4702-9759-761904072818" containerID="efe3ef9d14657f5a9f606bc1ca5681abed7f791ea57ee1600378d05da399c8a7" exitCode=0 Jan 20 16:59:52 crc kubenswrapper[4558]: I0120 16:59:52.082332 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"473a463c-85cb-4702-9759-761904072818","Type":"ContainerDied","Data":"9a54cdc016f9f70eb47e9944e8d06f082661c74136b8d28a93740b3b4fcb3687"} Jan 20 16:59:52 crc kubenswrapper[4558]: I0120 16:59:52.082363 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"473a463c-85cb-4702-9759-761904072818","Type":"ContainerDied","Data":"2825940fb8c15a1edf0410c1dc202cc5c9ccc1417f80fabebfdaac6104c498e5"} Jan 20 16:59:52 crc kubenswrapper[4558]: I0120 16:59:52.082374 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"473a463c-85cb-4702-9759-761904072818","Type":"ContainerDied","Data":"efe3ef9d14657f5a9f606bc1ca5681abed7f791ea57ee1600378d05da399c8a7"} Jan 20 16:59:52 crc kubenswrapper[4558]: I0120 16:59:52.082458 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-api-0" podUID="ef06289e-606c-4644-899d-e80e21228977" containerName="nova-api-log" containerID="cri-o://d67d05b9a1811fa02ce8ef71918c092c22214f4d7c7c181a9745cf9cd671b5c8" gracePeriod=30 Jan 20 16:59:52 crc kubenswrapper[4558]: I0120 16:59:52.082535 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-api-0" podUID="ef06289e-606c-4644-899d-e80e21228977" containerName="nova-api-api" containerID="cri-o://038d8ffac4b3f152867ee7ad7cbec45a4f1368fff79ac971ad761889b8f60991" gracePeriod=30 Jan 20 16:59:52 crc kubenswrapper[4558]: I0120 16:59:52.882369 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:59:52 crc kubenswrapper[4558]: I0120 16:59:52.975331 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/473a463c-85cb-4702-9759-761904072818-log-httpd\") pod \"473a463c-85cb-4702-9759-761904072818\" (UID: \"473a463c-85cb-4702-9759-761904072818\") " Jan 20 16:59:52 crc kubenswrapper[4558]: I0120 16:59:52.975482 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/473a463c-85cb-4702-9759-761904072818-run-httpd\") pod \"473a463c-85cb-4702-9759-761904072818\" (UID: \"473a463c-85cb-4702-9759-761904072818\") " Jan 20 16:59:52 crc kubenswrapper[4558]: I0120 16:59:52.975576 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/473a463c-85cb-4702-9759-761904072818-sg-core-conf-yaml\") pod \"473a463c-85cb-4702-9759-761904072818\" (UID: \"473a463c-85cb-4702-9759-761904072818\") " Jan 20 16:59:52 crc kubenswrapper[4558]: I0120 16:59:52.975609 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnnhn\" (UniqueName: \"kubernetes.io/projected/473a463c-85cb-4702-9759-761904072818-kube-api-access-mnnhn\") pod \"473a463c-85cb-4702-9759-761904072818\" (UID: \"473a463c-85cb-4702-9759-761904072818\") " Jan 20 16:59:52 crc kubenswrapper[4558]: I0120 16:59:52.975692 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/473a463c-85cb-4702-9759-761904072818-config-data\") pod \"473a463c-85cb-4702-9759-761904072818\" (UID: \"473a463c-85cb-4702-9759-761904072818\") " Jan 20 16:59:52 crc kubenswrapper[4558]: I0120 16:59:52.975719 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/473a463c-85cb-4702-9759-761904072818-combined-ca-bundle\") pod \"473a463c-85cb-4702-9759-761904072818\" (UID: \"473a463c-85cb-4702-9759-761904072818\") " Jan 20 16:59:52 crc kubenswrapper[4558]: I0120 16:59:52.975776 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/473a463c-85cb-4702-9759-761904072818-scripts\") pod \"473a463c-85cb-4702-9759-761904072818\" (UID: \"473a463c-85cb-4702-9759-761904072818\") " Jan 20 16:59:52 crc kubenswrapper[4558]: I0120 16:59:52.976425 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/473a463c-85cb-4702-9759-761904072818-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "473a463c-85cb-4702-9759-761904072818" (UID: "473a463c-85cb-4702-9759-761904072818"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 16:59:52 crc kubenswrapper[4558]: I0120 16:59:52.976722 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/473a463c-85cb-4702-9759-761904072818-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "473a463c-85cb-4702-9759-761904072818" (UID: "473a463c-85cb-4702-9759-761904072818"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 16:59:52 crc kubenswrapper[4558]: I0120 16:59:52.980714 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/473a463c-85cb-4702-9759-761904072818-kube-api-access-mnnhn" (OuterVolumeSpecName: "kube-api-access-mnnhn") pod "473a463c-85cb-4702-9759-761904072818" (UID: "473a463c-85cb-4702-9759-761904072818"). InnerVolumeSpecName "kube-api-access-mnnhn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:59:53 crc kubenswrapper[4558]: I0120 16:59:53.005389 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/473a463c-85cb-4702-9759-761904072818-scripts" (OuterVolumeSpecName: "scripts") pod "473a463c-85cb-4702-9759-761904072818" (UID: "473a463c-85cb-4702-9759-761904072818"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:59:53 crc kubenswrapper[4558]: I0120 16:59:53.009645 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/473a463c-85cb-4702-9759-761904072818-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "473a463c-85cb-4702-9759-761904072818" (UID: "473a463c-85cb-4702-9759-761904072818"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:59:53 crc kubenswrapper[4558]: I0120 16:59:53.058265 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/473a463c-85cb-4702-9759-761904072818-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "473a463c-85cb-4702-9759-761904072818" (UID: "473a463c-85cb-4702-9759-761904072818"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:59:53 crc kubenswrapper[4558]: I0120 16:59:53.072936 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/473a463c-85cb-4702-9759-761904072818-config-data" (OuterVolumeSpecName: "config-data") pod "473a463c-85cb-4702-9759-761904072818" (UID: "473a463c-85cb-4702-9759-761904072818"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:59:53 crc kubenswrapper[4558]: I0120 16:59:53.077637 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/473a463c-85cb-4702-9759-761904072818-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 16:59:53 crc kubenswrapper[4558]: I0120 16:59:53.077758 4558 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/473a463c-85cb-4702-9759-761904072818-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 16:59:53 crc kubenswrapper[4558]: I0120 16:59:53.077831 4558 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/473a463c-85cb-4702-9759-761904072818-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 16:59:53 crc kubenswrapper[4558]: I0120 16:59:53.077893 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnnhn\" (UniqueName: \"kubernetes.io/projected/473a463c-85cb-4702-9759-761904072818-kube-api-access-mnnhn\") on node \"crc\" DevicePath \"\"" Jan 20 16:59:53 crc kubenswrapper[4558]: I0120 16:59:53.077956 4558 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/473a463c-85cb-4702-9759-761904072818-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 20 16:59:53 crc kubenswrapper[4558]: I0120 16:59:53.078010 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/473a463c-85cb-4702-9759-761904072818-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 16:59:53 crc kubenswrapper[4558]: I0120 16:59:53.078120 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/473a463c-85cb-4702-9759-761904072818-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 16:59:53 crc kubenswrapper[4558]: I0120 16:59:53.086753 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 16:59:53 crc kubenswrapper[4558]: I0120 16:59:53.097464 4558 generic.go:334] "Generic (PLEG): container finished" podID="ef06289e-606c-4644-899d-e80e21228977" containerID="d67d05b9a1811fa02ce8ef71918c092c22214f4d7c7c181a9745cf9cd671b5c8" exitCode=143 Jan 20 16:59:53 crc kubenswrapper[4558]: I0120 16:59:53.097539 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"ef06289e-606c-4644-899d-e80e21228977","Type":"ContainerDied","Data":"d67d05b9a1811fa02ce8ef71918c092c22214f4d7c7c181a9745cf9cd671b5c8"} Jan 20 16:59:53 crc kubenswrapper[4558]: I0120 16:59:53.100738 4558 generic.go:334] "Generic (PLEG): container finished" podID="966d48b4-44df-4217-be28-d015d6defc20" containerID="699f41d3854a9fb8a133db3974fb8fbf2d759a82427c5d466adde671f8342bc6" exitCode=137 Jan 20 16:59:53 crc kubenswrapper[4558]: I0120 16:59:53.100785 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"966d48b4-44df-4217-be28-d015d6defc20","Type":"ContainerDied","Data":"699f41d3854a9fb8a133db3974fb8fbf2d759a82427c5d466adde671f8342bc6"} Jan 20 16:59:53 crc kubenswrapper[4558]: I0120 16:59:53.100801 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"966d48b4-44df-4217-be28-d015d6defc20","Type":"ContainerDied","Data":"a34d73b6d559dd63c47c700d2f13ef487628e03d9fe39fef7ec2ca0f9a90dfe6"} Jan 20 16:59:53 crc kubenswrapper[4558]: I0120 16:59:53.100815 4558 scope.go:117] "RemoveContainer" containerID="699f41d3854a9fb8a133db3974fb8fbf2d759a82427c5d466adde671f8342bc6" Jan 20 16:59:53 crc kubenswrapper[4558]: I0120 16:59:53.100913 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 16:59:53 crc kubenswrapper[4558]: I0120 16:59:53.106629 4558 generic.go:334] "Generic (PLEG): container finished" podID="473a463c-85cb-4702-9759-761904072818" containerID="3a19ed138105b3740685ac28d410913719c1974f6fed6a27ac6a13e7aa6cb0a5" exitCode=0 Jan 20 16:59:53 crc kubenswrapper[4558]: I0120 16:59:53.106654 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"473a463c-85cb-4702-9759-761904072818","Type":"ContainerDied","Data":"3a19ed138105b3740685ac28d410913719c1974f6fed6a27ac6a13e7aa6cb0a5"} Jan 20 16:59:53 crc kubenswrapper[4558]: I0120 16:59:53.106670 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"473a463c-85cb-4702-9759-761904072818","Type":"ContainerDied","Data":"a25eecc02f955ebbdc93b128f536260bfffae2c61a5934b541e9dc8c982ef61b"} Jan 20 16:59:53 crc kubenswrapper[4558]: I0120 16:59:53.106710 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:59:53 crc kubenswrapper[4558]: I0120 16:59:53.124525 4558 scope.go:117] "RemoveContainer" containerID="699f41d3854a9fb8a133db3974fb8fbf2d759a82427c5d466adde671f8342bc6" Jan 20 16:59:53 crc kubenswrapper[4558]: E0120 16:59:53.125311 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"699f41d3854a9fb8a133db3974fb8fbf2d759a82427c5d466adde671f8342bc6\": container with ID starting with 699f41d3854a9fb8a133db3974fb8fbf2d759a82427c5d466adde671f8342bc6 not found: ID does not exist" containerID="699f41d3854a9fb8a133db3974fb8fbf2d759a82427c5d466adde671f8342bc6" Jan 20 16:59:53 crc kubenswrapper[4558]: I0120 16:59:53.125339 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"699f41d3854a9fb8a133db3974fb8fbf2d759a82427c5d466adde671f8342bc6"} err="failed to get container status \"699f41d3854a9fb8a133db3974fb8fbf2d759a82427c5d466adde671f8342bc6\": rpc error: code = NotFound desc = could not find container \"699f41d3854a9fb8a133db3974fb8fbf2d759a82427c5d466adde671f8342bc6\": container with ID starting with 699f41d3854a9fb8a133db3974fb8fbf2d759a82427c5d466adde671f8342bc6 not found: ID does not exist" Jan 20 16:59:53 crc kubenswrapper[4558]: I0120 16:59:53.125356 4558 scope.go:117] "RemoveContainer" containerID="9a54cdc016f9f70eb47e9944e8d06f082661c74136b8d28a93740b3b4fcb3687" Jan 20 16:59:53 crc kubenswrapper[4558]: I0120 16:59:53.145244 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 16:59:53 crc kubenswrapper[4558]: I0120 16:59:53.147364 4558 scope.go:117] "RemoveContainer" containerID="2825940fb8c15a1edf0410c1dc202cc5c9ccc1417f80fabebfdaac6104c498e5" Jan 20 16:59:53 crc kubenswrapper[4558]: I0120 16:59:53.149514 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 16:59:53 crc kubenswrapper[4558]: I0120 16:59:53.162265 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 16:59:53 crc kubenswrapper[4558]: E0120 16:59:53.162584 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="473a463c-85cb-4702-9759-761904072818" containerName="ceilometer-notification-agent" Jan 20 16:59:53 crc kubenswrapper[4558]: I0120 16:59:53.162600 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="473a463c-85cb-4702-9759-761904072818" containerName="ceilometer-notification-agent" Jan 20 16:59:53 crc kubenswrapper[4558]: E0120 16:59:53.162618 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="473a463c-85cb-4702-9759-761904072818" containerName="sg-core" Jan 20 16:59:53 crc kubenswrapper[4558]: I0120 16:59:53.162624 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="473a463c-85cb-4702-9759-761904072818" containerName="sg-core" Jan 20 16:59:53 crc kubenswrapper[4558]: E0120 16:59:53.162638 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="473a463c-85cb-4702-9759-761904072818" containerName="proxy-httpd" Jan 20 16:59:53 crc kubenswrapper[4558]: I0120 16:59:53.162643 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="473a463c-85cb-4702-9759-761904072818" containerName="proxy-httpd" Jan 20 16:59:53 crc kubenswrapper[4558]: E0120 16:59:53.162649 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="966d48b4-44df-4217-be28-d015d6defc20" containerName="nova-cell1-novncproxy-novncproxy" Jan 20 16:59:53 crc kubenswrapper[4558]: I0120 16:59:53.162655 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="966d48b4-44df-4217-be28-d015d6defc20" containerName="nova-cell1-novncproxy-novncproxy" Jan 20 16:59:53 crc kubenswrapper[4558]: E0120 16:59:53.162672 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="473a463c-85cb-4702-9759-761904072818" containerName="ceilometer-central-agent" Jan 20 16:59:53 crc kubenswrapper[4558]: I0120 16:59:53.162677 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="473a463c-85cb-4702-9759-761904072818" containerName="ceilometer-central-agent" Jan 20 16:59:53 crc kubenswrapper[4558]: I0120 16:59:53.163076 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="473a463c-85cb-4702-9759-761904072818" containerName="ceilometer-central-agent" Jan 20 16:59:53 crc kubenswrapper[4558]: I0120 16:59:53.163099 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="966d48b4-44df-4217-be28-d015d6defc20" containerName="nova-cell1-novncproxy-novncproxy" Jan 20 16:59:53 crc kubenswrapper[4558]: I0120 16:59:53.163110 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="473a463c-85cb-4702-9759-761904072818" containerName="sg-core" Jan 20 16:59:53 crc kubenswrapper[4558]: I0120 16:59:53.163134 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="473a463c-85cb-4702-9759-761904072818" containerName="ceilometer-notification-agent" Jan 20 16:59:53 crc kubenswrapper[4558]: I0120 16:59:53.163140 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="473a463c-85cb-4702-9759-761904072818" containerName="proxy-httpd" Jan 20 16:59:53 crc kubenswrapper[4558]: I0120 16:59:53.164498 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:59:53 crc kubenswrapper[4558]: I0120 16:59:53.166493 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-scripts" Jan 20 16:59:53 crc kubenswrapper[4558]: I0120 16:59:53.166956 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-config-data" Jan 20 16:59:53 crc kubenswrapper[4558]: I0120 16:59:53.174268 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 16:59:53 crc kubenswrapper[4558]: I0120 16:59:53.178750 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/966d48b4-44df-4217-be28-d015d6defc20-combined-ca-bundle\") pod \"966d48b4-44df-4217-be28-d015d6defc20\" (UID: \"966d48b4-44df-4217-be28-d015d6defc20\") " Jan 20 16:59:53 crc kubenswrapper[4558]: I0120 16:59:53.178878 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/966d48b4-44df-4217-be28-d015d6defc20-config-data\") pod \"966d48b4-44df-4217-be28-d015d6defc20\" (UID: \"966d48b4-44df-4217-be28-d015d6defc20\") " Jan 20 16:59:53 crc kubenswrapper[4558]: I0120 16:59:53.179484 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t5ml2\" (UniqueName: \"kubernetes.io/projected/966d48b4-44df-4217-be28-d015d6defc20-kube-api-access-t5ml2\") pod \"966d48b4-44df-4217-be28-d015d6defc20\" (UID: \"966d48b4-44df-4217-be28-d015d6defc20\") " Jan 20 16:59:53 crc kubenswrapper[4558]: I0120 16:59:53.181509 4558 scope.go:117] "RemoveContainer" containerID="3a19ed138105b3740685ac28d410913719c1974f6fed6a27ac6a13e7aa6cb0a5" Jan 20 16:59:53 crc kubenswrapper[4558]: I0120 16:59:53.183514 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/966d48b4-44df-4217-be28-d015d6defc20-kube-api-access-t5ml2" (OuterVolumeSpecName: "kube-api-access-t5ml2") pod "966d48b4-44df-4217-be28-d015d6defc20" (UID: "966d48b4-44df-4217-be28-d015d6defc20"). InnerVolumeSpecName "kube-api-access-t5ml2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:59:53 crc kubenswrapper[4558]: I0120 16:59:53.201008 4558 scope.go:117] "RemoveContainer" containerID="efe3ef9d14657f5a9f606bc1ca5681abed7f791ea57ee1600378d05da399c8a7" Jan 20 16:59:53 crc kubenswrapper[4558]: I0120 16:59:53.203691 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/966d48b4-44df-4217-be28-d015d6defc20-config-data" (OuterVolumeSpecName: "config-data") pod "966d48b4-44df-4217-be28-d015d6defc20" (UID: "966d48b4-44df-4217-be28-d015d6defc20"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:59:53 crc kubenswrapper[4558]: I0120 16:59:53.209029 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/966d48b4-44df-4217-be28-d015d6defc20-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "966d48b4-44df-4217-be28-d015d6defc20" (UID: "966d48b4-44df-4217-be28-d015d6defc20"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:59:53 crc kubenswrapper[4558]: I0120 16:59:53.267296 4558 scope.go:117] "RemoveContainer" containerID="9a54cdc016f9f70eb47e9944e8d06f082661c74136b8d28a93740b3b4fcb3687" Jan 20 16:59:53 crc kubenswrapper[4558]: E0120 16:59:53.267901 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9a54cdc016f9f70eb47e9944e8d06f082661c74136b8d28a93740b3b4fcb3687\": container with ID starting with 9a54cdc016f9f70eb47e9944e8d06f082661c74136b8d28a93740b3b4fcb3687 not found: ID does not exist" containerID="9a54cdc016f9f70eb47e9944e8d06f082661c74136b8d28a93740b3b4fcb3687" Jan 20 16:59:53 crc kubenswrapper[4558]: I0120 16:59:53.267940 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9a54cdc016f9f70eb47e9944e8d06f082661c74136b8d28a93740b3b4fcb3687"} err="failed to get container status \"9a54cdc016f9f70eb47e9944e8d06f082661c74136b8d28a93740b3b4fcb3687\": rpc error: code = NotFound desc = could not find container \"9a54cdc016f9f70eb47e9944e8d06f082661c74136b8d28a93740b3b4fcb3687\": container with ID starting with 9a54cdc016f9f70eb47e9944e8d06f082661c74136b8d28a93740b3b4fcb3687 not found: ID does not exist" Jan 20 16:59:53 crc kubenswrapper[4558]: I0120 16:59:53.267986 4558 scope.go:117] "RemoveContainer" containerID="2825940fb8c15a1edf0410c1dc202cc5c9ccc1417f80fabebfdaac6104c498e5" Jan 20 16:59:53 crc kubenswrapper[4558]: E0120 16:59:53.269601 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2825940fb8c15a1edf0410c1dc202cc5c9ccc1417f80fabebfdaac6104c498e5\": container with ID starting with 2825940fb8c15a1edf0410c1dc202cc5c9ccc1417f80fabebfdaac6104c498e5 not found: ID does not exist" containerID="2825940fb8c15a1edf0410c1dc202cc5c9ccc1417f80fabebfdaac6104c498e5" Jan 20 16:59:53 crc kubenswrapper[4558]: I0120 16:59:53.269645 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2825940fb8c15a1edf0410c1dc202cc5c9ccc1417f80fabebfdaac6104c498e5"} err="failed to get container status \"2825940fb8c15a1edf0410c1dc202cc5c9ccc1417f80fabebfdaac6104c498e5\": rpc error: code = NotFound desc = could not find container \"2825940fb8c15a1edf0410c1dc202cc5c9ccc1417f80fabebfdaac6104c498e5\": container with ID starting with 2825940fb8c15a1edf0410c1dc202cc5c9ccc1417f80fabebfdaac6104c498e5 not found: ID does not exist" Jan 20 16:59:53 crc kubenswrapper[4558]: I0120 16:59:53.269670 4558 scope.go:117] "RemoveContainer" containerID="3a19ed138105b3740685ac28d410913719c1974f6fed6a27ac6a13e7aa6cb0a5" Jan 20 16:59:53 crc kubenswrapper[4558]: E0120 16:59:53.270082 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3a19ed138105b3740685ac28d410913719c1974f6fed6a27ac6a13e7aa6cb0a5\": container with ID starting with 3a19ed138105b3740685ac28d410913719c1974f6fed6a27ac6a13e7aa6cb0a5 not found: ID does not exist" containerID="3a19ed138105b3740685ac28d410913719c1974f6fed6a27ac6a13e7aa6cb0a5" Jan 20 16:59:53 crc kubenswrapper[4558]: I0120 16:59:53.270103 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3a19ed138105b3740685ac28d410913719c1974f6fed6a27ac6a13e7aa6cb0a5"} err="failed to get container status \"3a19ed138105b3740685ac28d410913719c1974f6fed6a27ac6a13e7aa6cb0a5\": rpc error: code = NotFound desc = could not find container \"3a19ed138105b3740685ac28d410913719c1974f6fed6a27ac6a13e7aa6cb0a5\": container with ID starting with 3a19ed138105b3740685ac28d410913719c1974f6fed6a27ac6a13e7aa6cb0a5 not found: ID does not exist" Jan 20 16:59:53 crc kubenswrapper[4558]: I0120 16:59:53.270121 4558 scope.go:117] "RemoveContainer" containerID="efe3ef9d14657f5a9f606bc1ca5681abed7f791ea57ee1600378d05da399c8a7" Jan 20 16:59:53 crc kubenswrapper[4558]: E0120 16:59:53.270423 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"efe3ef9d14657f5a9f606bc1ca5681abed7f791ea57ee1600378d05da399c8a7\": container with ID starting with efe3ef9d14657f5a9f606bc1ca5681abed7f791ea57ee1600378d05da399c8a7 not found: ID does not exist" containerID="efe3ef9d14657f5a9f606bc1ca5681abed7f791ea57ee1600378d05da399c8a7" Jan 20 16:59:53 crc kubenswrapper[4558]: I0120 16:59:53.270450 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"efe3ef9d14657f5a9f606bc1ca5681abed7f791ea57ee1600378d05da399c8a7"} err="failed to get container status \"efe3ef9d14657f5a9f606bc1ca5681abed7f791ea57ee1600378d05da399c8a7\": rpc error: code = NotFound desc = could not find container \"efe3ef9d14657f5a9f606bc1ca5681abed7f791ea57ee1600378d05da399c8a7\": container with ID starting with efe3ef9d14657f5a9f606bc1ca5681abed7f791ea57ee1600378d05da399c8a7 not found: ID does not exist" Jan 20 16:59:53 crc kubenswrapper[4558]: I0120 16:59:53.281349 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dmm5f\" (UniqueName: \"kubernetes.io/projected/f16eaa9a-4f36-46ba-bcc0-1cbcf468cc13-kube-api-access-dmm5f\") pod \"ceilometer-0\" (UID: \"f16eaa9a-4f36-46ba-bcc0-1cbcf468cc13\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:59:53 crc kubenswrapper[4558]: I0120 16:59:53.281621 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f16eaa9a-4f36-46ba-bcc0-1cbcf468cc13-config-data\") pod \"ceilometer-0\" (UID: \"f16eaa9a-4f36-46ba-bcc0-1cbcf468cc13\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:59:53 crc kubenswrapper[4558]: I0120 16:59:53.281665 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f16eaa9a-4f36-46ba-bcc0-1cbcf468cc13-scripts\") pod \"ceilometer-0\" (UID: \"f16eaa9a-4f36-46ba-bcc0-1cbcf468cc13\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:59:53 crc kubenswrapper[4558]: I0120 16:59:53.281692 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f16eaa9a-4f36-46ba-bcc0-1cbcf468cc13-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"f16eaa9a-4f36-46ba-bcc0-1cbcf468cc13\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:59:53 crc kubenswrapper[4558]: I0120 16:59:53.281716 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f16eaa9a-4f36-46ba-bcc0-1cbcf468cc13-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"f16eaa9a-4f36-46ba-bcc0-1cbcf468cc13\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:59:53 crc kubenswrapper[4558]: I0120 16:59:53.281882 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f16eaa9a-4f36-46ba-bcc0-1cbcf468cc13-log-httpd\") pod \"ceilometer-0\" (UID: \"f16eaa9a-4f36-46ba-bcc0-1cbcf468cc13\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:59:53 crc kubenswrapper[4558]: I0120 16:59:53.281979 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f16eaa9a-4f36-46ba-bcc0-1cbcf468cc13-run-httpd\") pod \"ceilometer-0\" (UID: \"f16eaa9a-4f36-46ba-bcc0-1cbcf468cc13\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:59:53 crc kubenswrapper[4558]: I0120 16:59:53.282126 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/966d48b4-44df-4217-be28-d015d6defc20-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 16:59:53 crc kubenswrapper[4558]: I0120 16:59:53.282143 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t5ml2\" (UniqueName: \"kubernetes.io/projected/966d48b4-44df-4217-be28-d015d6defc20-kube-api-access-t5ml2\") on node \"crc\" DevicePath \"\"" Jan 20 16:59:53 crc kubenswrapper[4558]: I0120 16:59:53.282155 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/966d48b4-44df-4217-be28-d015d6defc20-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 16:59:53 crc kubenswrapper[4558]: I0120 16:59:53.383046 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f16eaa9a-4f36-46ba-bcc0-1cbcf468cc13-config-data\") pod \"ceilometer-0\" (UID: \"f16eaa9a-4f36-46ba-bcc0-1cbcf468cc13\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:59:53 crc kubenswrapper[4558]: I0120 16:59:53.383086 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f16eaa9a-4f36-46ba-bcc0-1cbcf468cc13-scripts\") pod \"ceilometer-0\" (UID: \"f16eaa9a-4f36-46ba-bcc0-1cbcf468cc13\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:59:53 crc kubenswrapper[4558]: I0120 16:59:53.383109 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f16eaa9a-4f36-46ba-bcc0-1cbcf468cc13-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"f16eaa9a-4f36-46ba-bcc0-1cbcf468cc13\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:59:53 crc kubenswrapper[4558]: I0120 16:59:53.383126 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f16eaa9a-4f36-46ba-bcc0-1cbcf468cc13-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"f16eaa9a-4f36-46ba-bcc0-1cbcf468cc13\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:59:53 crc kubenswrapper[4558]: I0120 16:59:53.383157 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f16eaa9a-4f36-46ba-bcc0-1cbcf468cc13-log-httpd\") pod \"ceilometer-0\" (UID: \"f16eaa9a-4f36-46ba-bcc0-1cbcf468cc13\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:59:53 crc kubenswrapper[4558]: I0120 16:59:53.383215 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f16eaa9a-4f36-46ba-bcc0-1cbcf468cc13-run-httpd\") pod \"ceilometer-0\" (UID: \"f16eaa9a-4f36-46ba-bcc0-1cbcf468cc13\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:59:53 crc kubenswrapper[4558]: I0120 16:59:53.383239 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dmm5f\" (UniqueName: \"kubernetes.io/projected/f16eaa9a-4f36-46ba-bcc0-1cbcf468cc13-kube-api-access-dmm5f\") pod \"ceilometer-0\" (UID: \"f16eaa9a-4f36-46ba-bcc0-1cbcf468cc13\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:59:53 crc kubenswrapper[4558]: I0120 16:59:53.383687 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f16eaa9a-4f36-46ba-bcc0-1cbcf468cc13-log-httpd\") pod \"ceilometer-0\" (UID: \"f16eaa9a-4f36-46ba-bcc0-1cbcf468cc13\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:59:53 crc kubenswrapper[4558]: I0120 16:59:53.383735 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f16eaa9a-4f36-46ba-bcc0-1cbcf468cc13-run-httpd\") pod \"ceilometer-0\" (UID: \"f16eaa9a-4f36-46ba-bcc0-1cbcf468cc13\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:59:53 crc kubenswrapper[4558]: I0120 16:59:53.387428 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f16eaa9a-4f36-46ba-bcc0-1cbcf468cc13-config-data\") pod \"ceilometer-0\" (UID: \"f16eaa9a-4f36-46ba-bcc0-1cbcf468cc13\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:59:53 crc kubenswrapper[4558]: I0120 16:59:53.387979 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f16eaa9a-4f36-46ba-bcc0-1cbcf468cc13-scripts\") pod \"ceilometer-0\" (UID: \"f16eaa9a-4f36-46ba-bcc0-1cbcf468cc13\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:59:53 crc kubenswrapper[4558]: I0120 16:59:53.388963 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f16eaa9a-4f36-46ba-bcc0-1cbcf468cc13-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"f16eaa9a-4f36-46ba-bcc0-1cbcf468cc13\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:59:53 crc kubenswrapper[4558]: I0120 16:59:53.396844 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f16eaa9a-4f36-46ba-bcc0-1cbcf468cc13-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"f16eaa9a-4f36-46ba-bcc0-1cbcf468cc13\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:59:53 crc kubenswrapper[4558]: I0120 16:59:53.401705 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dmm5f\" (UniqueName: \"kubernetes.io/projected/f16eaa9a-4f36-46ba-bcc0-1cbcf468cc13-kube-api-access-dmm5f\") pod \"ceilometer-0\" (UID: \"f16eaa9a-4f36-46ba-bcc0-1cbcf468cc13\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:59:53 crc kubenswrapper[4558]: I0120 16:59:53.462095 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 16:59:53 crc kubenswrapper[4558]: I0120 16:59:53.470645 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 16:59:53 crc kubenswrapper[4558]: I0120 16:59:53.480569 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 16:59:53 crc kubenswrapper[4558]: I0120 16:59:53.481822 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 16:59:53 crc kubenswrapper[4558]: I0120 16:59:53.483896 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-novncproxy-cell1-public-svc" Jan 20 16:59:53 crc kubenswrapper[4558]: I0120 16:59:53.483902 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-novncproxy-cell1-vencrypt" Jan 20 16:59:53 crc kubenswrapper[4558]: I0120 16:59:53.485702 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell1-novncproxy-config-data" Jan 20 16:59:53 crc kubenswrapper[4558]: I0120 16:59:53.489887 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 16:59:53 crc kubenswrapper[4558]: I0120 16:59:53.511856 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 16:59:53 crc kubenswrapper[4558]: I0120 16:59:53.583444 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 16:59:53 crc kubenswrapper[4558]: I0120 16:59:53.586574 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/316ffdc9-b0e1-47d9-90f8-45a8ceb87353-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"316ffdc9-b0e1-47d9-90f8-45a8ceb87353\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 16:59:53 crc kubenswrapper[4558]: I0120 16:59:53.586615 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/316ffdc9-b0e1-47d9-90f8-45a8ceb87353-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"316ffdc9-b0e1-47d9-90f8-45a8ceb87353\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 16:59:53 crc kubenswrapper[4558]: I0120 16:59:53.586684 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/316ffdc9-b0e1-47d9-90f8-45a8ceb87353-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"316ffdc9-b0e1-47d9-90f8-45a8ceb87353\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 16:59:53 crc kubenswrapper[4558]: I0120 16:59:53.586717 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9xtfh\" (UniqueName: \"kubernetes.io/projected/316ffdc9-b0e1-47d9-90f8-45a8ceb87353-kube-api-access-9xtfh\") pod \"nova-cell1-novncproxy-0\" (UID: \"316ffdc9-b0e1-47d9-90f8-45a8ceb87353\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 16:59:53 crc kubenswrapper[4558]: I0120 16:59:53.586733 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/316ffdc9-b0e1-47d9-90f8-45a8ceb87353-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"316ffdc9-b0e1-47d9-90f8-45a8ceb87353\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 16:59:53 crc kubenswrapper[4558]: I0120 16:59:53.690803 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/316ffdc9-b0e1-47d9-90f8-45a8ceb87353-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"316ffdc9-b0e1-47d9-90f8-45a8ceb87353\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 16:59:53 crc kubenswrapper[4558]: I0120 16:59:53.690860 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/316ffdc9-b0e1-47d9-90f8-45a8ceb87353-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"316ffdc9-b0e1-47d9-90f8-45a8ceb87353\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 16:59:53 crc kubenswrapper[4558]: I0120 16:59:53.690954 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/316ffdc9-b0e1-47d9-90f8-45a8ceb87353-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"316ffdc9-b0e1-47d9-90f8-45a8ceb87353\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 16:59:53 crc kubenswrapper[4558]: I0120 16:59:53.691010 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9xtfh\" (UniqueName: \"kubernetes.io/projected/316ffdc9-b0e1-47d9-90f8-45a8ceb87353-kube-api-access-9xtfh\") pod \"nova-cell1-novncproxy-0\" (UID: \"316ffdc9-b0e1-47d9-90f8-45a8ceb87353\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 16:59:53 crc kubenswrapper[4558]: I0120 16:59:53.691026 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/316ffdc9-b0e1-47d9-90f8-45a8ceb87353-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"316ffdc9-b0e1-47d9-90f8-45a8ceb87353\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 16:59:53 crc kubenswrapper[4558]: I0120 16:59:53.695366 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/316ffdc9-b0e1-47d9-90f8-45a8ceb87353-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"316ffdc9-b0e1-47d9-90f8-45a8ceb87353\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 16:59:53 crc kubenswrapper[4558]: I0120 16:59:53.695491 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/316ffdc9-b0e1-47d9-90f8-45a8ceb87353-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"316ffdc9-b0e1-47d9-90f8-45a8ceb87353\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 16:59:53 crc kubenswrapper[4558]: I0120 16:59:53.696075 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/316ffdc9-b0e1-47d9-90f8-45a8ceb87353-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"316ffdc9-b0e1-47d9-90f8-45a8ceb87353\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 16:59:53 crc kubenswrapper[4558]: I0120 16:59:53.697137 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/316ffdc9-b0e1-47d9-90f8-45a8ceb87353-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"316ffdc9-b0e1-47d9-90f8-45a8ceb87353\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 16:59:53 crc kubenswrapper[4558]: I0120 16:59:53.710542 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9xtfh\" (UniqueName: \"kubernetes.io/projected/316ffdc9-b0e1-47d9-90f8-45a8ceb87353-kube-api-access-9xtfh\") pod \"nova-cell1-novncproxy-0\" (UID: \"316ffdc9-b0e1-47d9-90f8-45a8ceb87353\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 16:59:53 crc kubenswrapper[4558]: I0120 16:59:53.808926 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 16:59:53 crc kubenswrapper[4558]: I0120 16:59:53.871129 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 16:59:54 crc kubenswrapper[4558]: I0120 16:59:54.114485 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"f16eaa9a-4f36-46ba-bcc0-1cbcf468cc13","Type":"ContainerStarted","Data":"b9827572335abe6681b267b22eaa440d65b110777ebf8a0c8381349ca066a881"} Jan 20 16:59:54 crc kubenswrapper[4558]: I0120 16:59:54.207583 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 16:59:54 crc kubenswrapper[4558]: W0120 16:59:54.207964 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod316ffdc9_b0e1_47d9_90f8_45a8ceb87353.slice/crio-22ed144e22a0a787f2f5ee5d3f4132a65715e67507c454535805f97f41f9b903 WatchSource:0}: Error finding container 22ed144e22a0a787f2f5ee5d3f4132a65715e67507c454535805f97f41f9b903: Status 404 returned error can't find the container with id 22ed144e22a0a787f2f5ee5d3f4132a65715e67507c454535805f97f41f9b903 Jan 20 16:59:54 crc kubenswrapper[4558]: I0120 16:59:54.573917 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="473a463c-85cb-4702-9759-761904072818" path="/var/lib/kubelet/pods/473a463c-85cb-4702-9759-761904072818/volumes" Jan 20 16:59:54 crc kubenswrapper[4558]: I0120 16:59:54.574929 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="966d48b4-44df-4217-be28-d015d6defc20" path="/var/lib/kubelet/pods/966d48b4-44df-4217-be28-d015d6defc20/volumes" Jan 20 16:59:55 crc kubenswrapper[4558]: I0120 16:59:55.061148 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 16:59:55 crc kubenswrapper[4558]: I0120 16:59:55.061597 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/kube-state-metrics-0" podUID="7bec7fd7-edb6-4186-9fbb-09b10b49b280" containerName="kube-state-metrics" containerID="cri-o://14355c2e786124ac69d8fdaed5080cbd251d7fb3c6511f9f485bd19331884edf" gracePeriod=30 Jan 20 16:59:55 crc kubenswrapper[4558]: I0120 16:59:55.123353 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"316ffdc9-b0e1-47d9-90f8-45a8ceb87353","Type":"ContainerStarted","Data":"1e113a1489cb99ef7da66f10ddf493b5813e1713d85e6d3338e7700ffa711d7c"} Jan 20 16:59:55 crc kubenswrapper[4558]: I0120 16:59:55.123395 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"316ffdc9-b0e1-47d9-90f8-45a8ceb87353","Type":"ContainerStarted","Data":"22ed144e22a0a787f2f5ee5d3f4132a65715e67507c454535805f97f41f9b903"} Jan 20 16:59:55 crc kubenswrapper[4558]: I0120 16:59:55.124317 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"f16eaa9a-4f36-46ba-bcc0-1cbcf468cc13","Type":"ContainerStarted","Data":"1cfe9eea184a95209e0efe16db0c988a10ac4cafef8d6d9b82b11e451d505963"} Jan 20 16:59:55 crc kubenswrapper[4558]: I0120 16:59:55.141333 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" podStartSLOduration=2.141318684 podStartE2EDuration="2.141318684s" podCreationTimestamp="2026-01-20 16:59:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 16:59:55.135465408 +0000 UTC m=+1088.895803375" watchObservedRunningTime="2026-01-20 16:59:55.141318684 +0000 UTC m=+1088.901656652" Jan 20 16:59:55 crc kubenswrapper[4558]: I0120 16:59:55.447139 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 16:59:55 crc kubenswrapper[4558]: I0120 16:59:55.537067 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb7ld\" (UniqueName: \"kubernetes.io/projected/7bec7fd7-edb6-4186-9fbb-09b10b49b280-kube-api-access-sb7ld\") pod \"7bec7fd7-edb6-4186-9fbb-09b10b49b280\" (UID: \"7bec7fd7-edb6-4186-9fbb-09b10b49b280\") " Jan 20 16:59:55 crc kubenswrapper[4558]: I0120 16:59:55.543215 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bec7fd7-edb6-4186-9fbb-09b10b49b280-kube-api-access-sb7ld" (OuterVolumeSpecName: "kube-api-access-sb7ld") pod "7bec7fd7-edb6-4186-9fbb-09b10b49b280" (UID: "7bec7fd7-edb6-4186-9fbb-09b10b49b280"). InnerVolumeSpecName "kube-api-access-sb7ld". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:59:55 crc kubenswrapper[4558]: I0120 16:59:55.584350 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 16:59:55 crc kubenswrapper[4558]: I0120 16:59:55.638696 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb7ld\" (UniqueName: \"kubernetes.io/projected/7bec7fd7-edb6-4186-9fbb-09b10b49b280-kube-api-access-sb7ld\") on node \"crc\" DevicePath \"\"" Jan 20 16:59:55 crc kubenswrapper[4558]: I0120 16:59:55.740150 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ef06289e-606c-4644-899d-e80e21228977-logs\") pod \"ef06289e-606c-4644-899d-e80e21228977\" (UID: \"ef06289e-606c-4644-899d-e80e21228977\") " Jan 20 16:59:55 crc kubenswrapper[4558]: I0120 16:59:55.740290 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ef06289e-606c-4644-899d-e80e21228977-combined-ca-bundle\") pod \"ef06289e-606c-4644-899d-e80e21228977\" (UID: \"ef06289e-606c-4644-899d-e80e21228977\") " Jan 20 16:59:55 crc kubenswrapper[4558]: I0120 16:59:55.740431 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-54vzs\" (UniqueName: \"kubernetes.io/projected/ef06289e-606c-4644-899d-e80e21228977-kube-api-access-54vzs\") pod \"ef06289e-606c-4644-899d-e80e21228977\" (UID: \"ef06289e-606c-4644-899d-e80e21228977\") " Jan 20 16:59:55 crc kubenswrapper[4558]: I0120 16:59:55.741119 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ef06289e-606c-4644-899d-e80e21228977-config-data\") pod \"ef06289e-606c-4644-899d-e80e21228977\" (UID: \"ef06289e-606c-4644-899d-e80e21228977\") " Jan 20 16:59:55 crc kubenswrapper[4558]: I0120 16:59:55.741362 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ef06289e-606c-4644-899d-e80e21228977-logs" (OuterVolumeSpecName: "logs") pod "ef06289e-606c-4644-899d-e80e21228977" (UID: "ef06289e-606c-4644-899d-e80e21228977"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 16:59:55 crc kubenswrapper[4558]: I0120 16:59:55.741867 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ef06289e-606c-4644-899d-e80e21228977-logs\") on node \"crc\" DevicePath \"\"" Jan 20 16:59:55 crc kubenswrapper[4558]: I0120 16:59:55.744317 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ef06289e-606c-4644-899d-e80e21228977-kube-api-access-54vzs" (OuterVolumeSpecName: "kube-api-access-54vzs") pod "ef06289e-606c-4644-899d-e80e21228977" (UID: "ef06289e-606c-4644-899d-e80e21228977"). InnerVolumeSpecName "kube-api-access-54vzs". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 16:59:55 crc kubenswrapper[4558]: I0120 16:59:55.760310 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ef06289e-606c-4644-899d-e80e21228977-config-data" (OuterVolumeSpecName: "config-data") pod "ef06289e-606c-4644-899d-e80e21228977" (UID: "ef06289e-606c-4644-899d-e80e21228977"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:59:55 crc kubenswrapper[4558]: I0120 16:59:55.765993 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ef06289e-606c-4644-899d-e80e21228977-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ef06289e-606c-4644-899d-e80e21228977" (UID: "ef06289e-606c-4644-899d-e80e21228977"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 16:59:55 crc kubenswrapper[4558]: I0120 16:59:55.843393 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-54vzs\" (UniqueName: \"kubernetes.io/projected/ef06289e-606c-4644-899d-e80e21228977-kube-api-access-54vzs\") on node \"crc\" DevicePath \"\"" Jan 20 16:59:55 crc kubenswrapper[4558]: I0120 16:59:55.843423 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ef06289e-606c-4644-899d-e80e21228977-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 16:59:55 crc kubenswrapper[4558]: I0120 16:59:55.843434 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ef06289e-606c-4644-899d-e80e21228977-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 16:59:56 crc kubenswrapper[4558]: I0120 16:59:56.132811 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"f16eaa9a-4f36-46ba-bcc0-1cbcf468cc13","Type":"ContainerStarted","Data":"7dc47b2d3e8519484a029584834258692ee775e86feca47dbdd9da48984af2cc"} Jan 20 16:59:56 crc kubenswrapper[4558]: I0120 16:59:56.134986 4558 generic.go:334] "Generic (PLEG): container finished" podID="7bec7fd7-edb6-4186-9fbb-09b10b49b280" containerID="14355c2e786124ac69d8fdaed5080cbd251d7fb3c6511f9f485bd19331884edf" exitCode=2 Jan 20 16:59:56 crc kubenswrapper[4558]: I0120 16:59:56.135089 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 16:59:56 crc kubenswrapper[4558]: I0120 16:59:56.137869 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/kube-state-metrics-0" event={"ID":"7bec7fd7-edb6-4186-9fbb-09b10b49b280","Type":"ContainerDied","Data":"14355c2e786124ac69d8fdaed5080cbd251d7fb3c6511f9f485bd19331884edf"} Jan 20 16:59:56 crc kubenswrapper[4558]: I0120 16:59:56.137897 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/kube-state-metrics-0" event={"ID":"7bec7fd7-edb6-4186-9fbb-09b10b49b280","Type":"ContainerDied","Data":"9996a5b8da748425da2cc85e8d1b163f6b641d7fcd97c9ff5115ebc81c536103"} Jan 20 16:59:56 crc kubenswrapper[4558]: I0120 16:59:56.137926 4558 scope.go:117] "RemoveContainer" containerID="14355c2e786124ac69d8fdaed5080cbd251d7fb3c6511f9f485bd19331884edf" Jan 20 16:59:56 crc kubenswrapper[4558]: I0120 16:59:56.142339 4558 generic.go:334] "Generic (PLEG): container finished" podID="ef06289e-606c-4644-899d-e80e21228977" containerID="038d8ffac4b3f152867ee7ad7cbec45a4f1368fff79ac971ad761889b8f60991" exitCode=0 Jan 20 16:59:56 crc kubenswrapper[4558]: I0120 16:59:56.142390 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 16:59:56 crc kubenswrapper[4558]: I0120 16:59:56.142417 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"ef06289e-606c-4644-899d-e80e21228977","Type":"ContainerDied","Data":"038d8ffac4b3f152867ee7ad7cbec45a4f1368fff79ac971ad761889b8f60991"} Jan 20 16:59:56 crc kubenswrapper[4558]: I0120 16:59:56.142440 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"ef06289e-606c-4644-899d-e80e21228977","Type":"ContainerDied","Data":"5f19d37d87398cc8eede4263d8e826b0b7f034c333a76494079e14d3d47b23aa"} Jan 20 16:59:56 crc kubenswrapper[4558]: I0120 16:59:56.164006 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 16:59:56 crc kubenswrapper[4558]: I0120 16:59:56.165443 4558 scope.go:117] "RemoveContainer" containerID="14355c2e786124ac69d8fdaed5080cbd251d7fb3c6511f9f485bd19331884edf" Jan 20 16:59:56 crc kubenswrapper[4558]: E0120 16:59:56.166385 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"14355c2e786124ac69d8fdaed5080cbd251d7fb3c6511f9f485bd19331884edf\": container with ID starting with 14355c2e786124ac69d8fdaed5080cbd251d7fb3c6511f9f485bd19331884edf not found: ID does not exist" containerID="14355c2e786124ac69d8fdaed5080cbd251d7fb3c6511f9f485bd19331884edf" Jan 20 16:59:56 crc kubenswrapper[4558]: I0120 16:59:56.166430 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"14355c2e786124ac69d8fdaed5080cbd251d7fb3c6511f9f485bd19331884edf"} err="failed to get container status \"14355c2e786124ac69d8fdaed5080cbd251d7fb3c6511f9f485bd19331884edf\": rpc error: code = NotFound desc = could not find container \"14355c2e786124ac69d8fdaed5080cbd251d7fb3c6511f9f485bd19331884edf\": container with ID starting with 14355c2e786124ac69d8fdaed5080cbd251d7fb3c6511f9f485bd19331884edf not found: ID does not exist" Jan 20 16:59:56 crc kubenswrapper[4558]: I0120 16:59:56.166453 4558 scope.go:117] "RemoveContainer" containerID="038d8ffac4b3f152867ee7ad7cbec45a4f1368fff79ac971ad761889b8f60991" Jan 20 16:59:56 crc kubenswrapper[4558]: I0120 16:59:56.169319 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 16:59:56 crc kubenswrapper[4558]: I0120 16:59:56.179954 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 16:59:56 crc kubenswrapper[4558]: I0120 16:59:56.186153 4558 scope.go:117] "RemoveContainer" containerID="d67d05b9a1811fa02ce8ef71918c092c22214f4d7c7c181a9745cf9cd671b5c8" Jan 20 16:59:56 crc kubenswrapper[4558]: I0120 16:59:56.200243 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 16:59:56 crc kubenswrapper[4558]: E0120 16:59:56.200638 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ef06289e-606c-4644-899d-e80e21228977" containerName="nova-api-api" Jan 20 16:59:56 crc kubenswrapper[4558]: I0120 16:59:56.200654 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ef06289e-606c-4644-899d-e80e21228977" containerName="nova-api-api" Jan 20 16:59:56 crc kubenswrapper[4558]: E0120 16:59:56.200673 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7bec7fd7-edb6-4186-9fbb-09b10b49b280" containerName="kube-state-metrics" Jan 20 16:59:56 crc kubenswrapper[4558]: I0120 16:59:56.200679 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="7bec7fd7-edb6-4186-9fbb-09b10b49b280" containerName="kube-state-metrics" Jan 20 16:59:56 crc kubenswrapper[4558]: E0120 16:59:56.200703 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ef06289e-606c-4644-899d-e80e21228977" containerName="nova-api-log" Jan 20 16:59:56 crc kubenswrapper[4558]: I0120 16:59:56.200708 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ef06289e-606c-4644-899d-e80e21228977" containerName="nova-api-log" Jan 20 16:59:56 crc kubenswrapper[4558]: I0120 16:59:56.200878 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="7bec7fd7-edb6-4186-9fbb-09b10b49b280" containerName="kube-state-metrics" Jan 20 16:59:56 crc kubenswrapper[4558]: I0120 16:59:56.200895 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="ef06289e-606c-4644-899d-e80e21228977" containerName="nova-api-log" Jan 20 16:59:56 crc kubenswrapper[4558]: I0120 16:59:56.200908 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="ef06289e-606c-4644-899d-e80e21228977" containerName="nova-api-api" Jan 20 16:59:56 crc kubenswrapper[4558]: I0120 16:59:56.201510 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 16:59:56 crc kubenswrapper[4558]: I0120 16:59:56.212537 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"kube-state-metrics-tls-config" Jan 20 16:59:56 crc kubenswrapper[4558]: I0120 16:59:56.214553 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 16:59:56 crc kubenswrapper[4558]: I0120 16:59:56.218346 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-kube-state-metrics-svc" Jan 20 16:59:56 crc kubenswrapper[4558]: I0120 16:59:56.226113 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 16:59:56 crc kubenswrapper[4558]: I0120 16:59:56.240494 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 16:59:56 crc kubenswrapper[4558]: I0120 16:59:56.241818 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 16:59:56 crc kubenswrapper[4558]: I0120 16:59:56.246205 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-public-svc" Jan 20 16:59:56 crc kubenswrapper[4558]: I0120 16:59:56.246766 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-api-config-data" Jan 20 16:59:56 crc kubenswrapper[4558]: I0120 16:59:56.247011 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-internal-svc" Jan 20 16:59:56 crc kubenswrapper[4558]: I0120 16:59:56.266359 4558 scope.go:117] "RemoveContainer" containerID="038d8ffac4b3f152867ee7ad7cbec45a4f1368fff79ac971ad761889b8f60991" Jan 20 16:59:56 crc kubenswrapper[4558]: E0120 16:59:56.268788 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"038d8ffac4b3f152867ee7ad7cbec45a4f1368fff79ac971ad761889b8f60991\": container with ID starting with 038d8ffac4b3f152867ee7ad7cbec45a4f1368fff79ac971ad761889b8f60991 not found: ID does not exist" containerID="038d8ffac4b3f152867ee7ad7cbec45a4f1368fff79ac971ad761889b8f60991" Jan 20 16:59:56 crc kubenswrapper[4558]: I0120 16:59:56.268819 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"038d8ffac4b3f152867ee7ad7cbec45a4f1368fff79ac971ad761889b8f60991"} err="failed to get container status \"038d8ffac4b3f152867ee7ad7cbec45a4f1368fff79ac971ad761889b8f60991\": rpc error: code = NotFound desc = could not find container \"038d8ffac4b3f152867ee7ad7cbec45a4f1368fff79ac971ad761889b8f60991\": container with ID starting with 038d8ffac4b3f152867ee7ad7cbec45a4f1368fff79ac971ad761889b8f60991 not found: ID does not exist" Jan 20 16:59:56 crc kubenswrapper[4558]: I0120 16:59:56.268840 4558 scope.go:117] "RemoveContainer" containerID="d67d05b9a1811fa02ce8ef71918c092c22214f4d7c7c181a9745cf9cd671b5c8" Jan 20 16:59:56 crc kubenswrapper[4558]: E0120 16:59:56.269568 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d67d05b9a1811fa02ce8ef71918c092c22214f4d7c7c181a9745cf9cd671b5c8\": container with ID starting with d67d05b9a1811fa02ce8ef71918c092c22214f4d7c7c181a9745cf9cd671b5c8 not found: ID does not exist" containerID="d67d05b9a1811fa02ce8ef71918c092c22214f4d7c7c181a9745cf9cd671b5c8" Jan 20 16:59:56 crc kubenswrapper[4558]: I0120 16:59:56.269816 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d67d05b9a1811fa02ce8ef71918c092c22214f4d7c7c181a9745cf9cd671b5c8"} err="failed to get container status \"d67d05b9a1811fa02ce8ef71918c092c22214f4d7c7c181a9745cf9cd671b5c8\": rpc error: code = NotFound desc = could not find container \"d67d05b9a1811fa02ce8ef71918c092c22214f4d7c7c181a9745cf9cd671b5c8\": container with ID starting with d67d05b9a1811fa02ce8ef71918c092c22214f4d7c7c181a9745cf9cd671b5c8 not found: ID does not exist" Jan 20 16:59:56 crc kubenswrapper[4558]: I0120 16:59:56.282927 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 16:59:56 crc kubenswrapper[4558]: I0120 16:59:56.352967 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b2ffa52-4d19-46f6-aea5-62fc758def73-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"3b2ffa52-4d19-46f6-aea5-62fc758def73\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 16:59:56 crc kubenswrapper[4558]: I0120 16:59:56.353028 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6afafa3e-aec8-4d69-beb3-ae8d48c9aad4-public-tls-certs\") pod \"nova-api-0\" (UID: \"6afafa3e-aec8-4d69-beb3-ae8d48c9aad4\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 16:59:56 crc kubenswrapper[4558]: I0120 16:59:56.353276 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cgjvx\" (UniqueName: \"kubernetes.io/projected/3b2ffa52-4d19-46f6-aea5-62fc758def73-kube-api-access-cgjvx\") pod \"kube-state-metrics-0\" (UID: \"3b2ffa52-4d19-46f6-aea5-62fc758def73\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 16:59:56 crc kubenswrapper[4558]: I0120 16:59:56.353304 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/3b2ffa52-4d19-46f6-aea5-62fc758def73-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"3b2ffa52-4d19-46f6-aea5-62fc758def73\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 16:59:56 crc kubenswrapper[4558]: I0120 16:59:56.353322 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6afafa3e-aec8-4d69-beb3-ae8d48c9aad4-logs\") pod \"nova-api-0\" (UID: \"6afafa3e-aec8-4d69-beb3-ae8d48c9aad4\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 16:59:56 crc kubenswrapper[4558]: I0120 16:59:56.353363 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/3b2ffa52-4d19-46f6-aea5-62fc758def73-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"3b2ffa52-4d19-46f6-aea5-62fc758def73\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 16:59:56 crc kubenswrapper[4558]: I0120 16:59:56.353388 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6afafa3e-aec8-4d69-beb3-ae8d48c9aad4-config-data\") pod \"nova-api-0\" (UID: \"6afafa3e-aec8-4d69-beb3-ae8d48c9aad4\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 16:59:56 crc kubenswrapper[4558]: I0120 16:59:56.353571 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6afafa3e-aec8-4d69-beb3-ae8d48c9aad4-internal-tls-certs\") pod \"nova-api-0\" (UID: \"6afafa3e-aec8-4d69-beb3-ae8d48c9aad4\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 16:59:56 crc kubenswrapper[4558]: I0120 16:59:56.353629 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6vq84\" (UniqueName: \"kubernetes.io/projected/6afafa3e-aec8-4d69-beb3-ae8d48c9aad4-kube-api-access-6vq84\") pod \"nova-api-0\" (UID: \"6afafa3e-aec8-4d69-beb3-ae8d48c9aad4\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 16:59:56 crc kubenswrapper[4558]: I0120 16:59:56.353703 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6afafa3e-aec8-4d69-beb3-ae8d48c9aad4-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"6afafa3e-aec8-4d69-beb3-ae8d48c9aad4\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 16:59:56 crc kubenswrapper[4558]: I0120 16:59:56.455502 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b2ffa52-4d19-46f6-aea5-62fc758def73-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"3b2ffa52-4d19-46f6-aea5-62fc758def73\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 16:59:56 crc kubenswrapper[4558]: I0120 16:59:56.455852 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6afafa3e-aec8-4d69-beb3-ae8d48c9aad4-public-tls-certs\") pod \"nova-api-0\" (UID: \"6afafa3e-aec8-4d69-beb3-ae8d48c9aad4\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 16:59:56 crc kubenswrapper[4558]: I0120 16:59:56.456119 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cgjvx\" (UniqueName: \"kubernetes.io/projected/3b2ffa52-4d19-46f6-aea5-62fc758def73-kube-api-access-cgjvx\") pod \"kube-state-metrics-0\" (UID: \"3b2ffa52-4d19-46f6-aea5-62fc758def73\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 16:59:56 crc kubenswrapper[4558]: I0120 16:59:56.456179 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/3b2ffa52-4d19-46f6-aea5-62fc758def73-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"3b2ffa52-4d19-46f6-aea5-62fc758def73\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 16:59:56 crc kubenswrapper[4558]: I0120 16:59:56.456212 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6afafa3e-aec8-4d69-beb3-ae8d48c9aad4-logs\") pod \"nova-api-0\" (UID: \"6afafa3e-aec8-4d69-beb3-ae8d48c9aad4\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 16:59:56 crc kubenswrapper[4558]: I0120 16:59:56.456242 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/3b2ffa52-4d19-46f6-aea5-62fc758def73-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"3b2ffa52-4d19-46f6-aea5-62fc758def73\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 16:59:56 crc kubenswrapper[4558]: I0120 16:59:56.456269 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6afafa3e-aec8-4d69-beb3-ae8d48c9aad4-config-data\") pod \"nova-api-0\" (UID: \"6afafa3e-aec8-4d69-beb3-ae8d48c9aad4\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 16:59:56 crc kubenswrapper[4558]: I0120 16:59:56.456370 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6afafa3e-aec8-4d69-beb3-ae8d48c9aad4-internal-tls-certs\") pod \"nova-api-0\" (UID: \"6afafa3e-aec8-4d69-beb3-ae8d48c9aad4\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 16:59:56 crc kubenswrapper[4558]: I0120 16:59:56.456399 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6vq84\" (UniqueName: \"kubernetes.io/projected/6afafa3e-aec8-4d69-beb3-ae8d48c9aad4-kube-api-access-6vq84\") pod \"nova-api-0\" (UID: \"6afafa3e-aec8-4d69-beb3-ae8d48c9aad4\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 16:59:56 crc kubenswrapper[4558]: I0120 16:59:56.456448 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6afafa3e-aec8-4d69-beb3-ae8d48c9aad4-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"6afafa3e-aec8-4d69-beb3-ae8d48c9aad4\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 16:59:56 crc kubenswrapper[4558]: I0120 16:59:56.456591 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6afafa3e-aec8-4d69-beb3-ae8d48c9aad4-logs\") pod \"nova-api-0\" (UID: \"6afafa3e-aec8-4d69-beb3-ae8d48c9aad4\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 16:59:56 crc kubenswrapper[4558]: I0120 16:59:56.460596 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6afafa3e-aec8-4d69-beb3-ae8d48c9aad4-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"6afafa3e-aec8-4d69-beb3-ae8d48c9aad4\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 16:59:56 crc kubenswrapper[4558]: I0120 16:59:56.461268 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/3b2ffa52-4d19-46f6-aea5-62fc758def73-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"3b2ffa52-4d19-46f6-aea5-62fc758def73\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 16:59:56 crc kubenswrapper[4558]: I0120 16:59:56.460629 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6afafa3e-aec8-4d69-beb3-ae8d48c9aad4-public-tls-certs\") pod \"nova-api-0\" (UID: \"6afafa3e-aec8-4d69-beb3-ae8d48c9aad4\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 16:59:56 crc kubenswrapper[4558]: I0120 16:59:56.461661 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/3b2ffa52-4d19-46f6-aea5-62fc758def73-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"3b2ffa52-4d19-46f6-aea5-62fc758def73\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 16:59:56 crc kubenswrapper[4558]: I0120 16:59:56.462124 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6afafa3e-aec8-4d69-beb3-ae8d48c9aad4-config-data\") pod \"nova-api-0\" (UID: \"6afafa3e-aec8-4d69-beb3-ae8d48c9aad4\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 16:59:56 crc kubenswrapper[4558]: I0120 16:59:56.467967 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b2ffa52-4d19-46f6-aea5-62fc758def73-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"3b2ffa52-4d19-46f6-aea5-62fc758def73\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 16:59:56 crc kubenswrapper[4558]: I0120 16:59:56.472216 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cgjvx\" (UniqueName: \"kubernetes.io/projected/3b2ffa52-4d19-46f6-aea5-62fc758def73-kube-api-access-cgjvx\") pod \"kube-state-metrics-0\" (UID: \"3b2ffa52-4d19-46f6-aea5-62fc758def73\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 16:59:56 crc kubenswrapper[4558]: I0120 16:59:56.473357 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6afafa3e-aec8-4d69-beb3-ae8d48c9aad4-internal-tls-certs\") pod \"nova-api-0\" (UID: \"6afafa3e-aec8-4d69-beb3-ae8d48c9aad4\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 16:59:56 crc kubenswrapper[4558]: I0120 16:59:56.473776 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6vq84\" (UniqueName: \"kubernetes.io/projected/6afafa3e-aec8-4d69-beb3-ae8d48c9aad4-kube-api-access-6vq84\") pod \"nova-api-0\" (UID: \"6afafa3e-aec8-4d69-beb3-ae8d48c9aad4\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 16:59:56 crc kubenswrapper[4558]: I0120 16:59:56.575534 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bec7fd7-edb6-4186-9fbb-09b10b49b280" path="/var/lib/kubelet/pods/7bec7fd7-edb6-4186-9fbb-09b10b49b280/volumes" Jan 20 16:59:56 crc kubenswrapper[4558]: I0120 16:59:56.576292 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ef06289e-606c-4644-899d-e80e21228977" path="/var/lib/kubelet/pods/ef06289e-606c-4644-899d-e80e21228977/volumes" Jan 20 16:59:56 crc kubenswrapper[4558]: I0120 16:59:56.578694 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 16:59:56 crc kubenswrapper[4558]: I0120 16:59:56.585276 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 16:59:57 crc kubenswrapper[4558]: W0120 16:59:57.018414 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6afafa3e_aec8_4d69_beb3_ae8d48c9aad4.slice/crio-ac757e22cf3c03cbd152200bf8fef455b224b185615ba94bbde8f9ace3b88cbf WatchSource:0}: Error finding container ac757e22cf3c03cbd152200bf8fef455b224b185615ba94bbde8f9ace3b88cbf: Status 404 returned error can't find the container with id ac757e22cf3c03cbd152200bf8fef455b224b185615ba94bbde8f9ace3b88cbf Jan 20 16:59:57 crc kubenswrapper[4558]: I0120 16:59:57.022292 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 16:59:57 crc kubenswrapper[4558]: I0120 16:59:57.066745 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 16:59:57 crc kubenswrapper[4558]: W0120 16:59:57.069470 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3b2ffa52_4d19_46f6_aea5_62fc758def73.slice/crio-f5a8cebf2ed809f975d647e2856e22eeb5999054069b00b7875007c34dce0cfb WatchSource:0}: Error finding container f5a8cebf2ed809f975d647e2856e22eeb5999054069b00b7875007c34dce0cfb: Status 404 returned error can't find the container with id f5a8cebf2ed809f975d647e2856e22eeb5999054069b00b7875007c34dce0cfb Jan 20 16:59:57 crc kubenswrapper[4558]: I0120 16:59:57.153871 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"6afafa3e-aec8-4d69-beb3-ae8d48c9aad4","Type":"ContainerStarted","Data":"5d4e4d796f3e9f903c11a264ac4eca8c07e322e42169b6cd650abccef3389baa"} Jan 20 16:59:57 crc kubenswrapper[4558]: I0120 16:59:57.153915 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"6afafa3e-aec8-4d69-beb3-ae8d48c9aad4","Type":"ContainerStarted","Data":"ac757e22cf3c03cbd152200bf8fef455b224b185615ba94bbde8f9ace3b88cbf"} Jan 20 16:59:57 crc kubenswrapper[4558]: I0120 16:59:57.155127 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/kube-state-metrics-0" event={"ID":"3b2ffa52-4d19-46f6-aea5-62fc758def73","Type":"ContainerStarted","Data":"f5a8cebf2ed809f975d647e2856e22eeb5999054069b00b7875007c34dce0cfb"} Jan 20 16:59:57 crc kubenswrapper[4558]: I0120 16:59:57.174155 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"f16eaa9a-4f36-46ba-bcc0-1cbcf468cc13","Type":"ContainerStarted","Data":"7b001e2399cb6257bcda617bfcb7b1e368ff0c2a696f4f0b8908899fb5ef73c1"} Jan 20 16:59:57 crc kubenswrapper[4558]: I0120 16:59:57.329952 4558 patch_prober.go:28] interesting pod/machine-config-daemon-2vr4r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 20 16:59:57 crc kubenswrapper[4558]: I0120 16:59:57.331512 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 20 16:59:58 crc kubenswrapper[4558]: I0120 16:59:58.197131 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"6afafa3e-aec8-4d69-beb3-ae8d48c9aad4","Type":"ContainerStarted","Data":"969678606d229a086d091f113b7fe767d47a4983b2586dab05a3ea147403e783"} Jan 20 16:59:58 crc kubenswrapper[4558]: I0120 16:59:58.202024 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/kube-state-metrics-0" event={"ID":"3b2ffa52-4d19-46f6-aea5-62fc758def73","Type":"ContainerStarted","Data":"3d3a8f2db93b034973dfab0303e49c56217b13cf7cd940baab7ecf8cfbd2745f"} Jan 20 16:59:58 crc kubenswrapper[4558]: I0120 16:59:58.202901 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 16:59:58 crc kubenswrapper[4558]: I0120 16:59:58.248342 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-api-0" podStartSLOduration=2.248327304 podStartE2EDuration="2.248327304s" podCreationTimestamp="2026-01-20 16:59:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 16:59:58.225265961 +0000 UTC m=+1091.985603929" watchObservedRunningTime="2026-01-20 16:59:58.248327304 +0000 UTC m=+1092.008665271" Jan 20 16:59:58 crc kubenswrapper[4558]: I0120 16:59:58.248770 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/kube-state-metrics-0" podStartSLOduration=1.988723077 podStartE2EDuration="2.248765938s" podCreationTimestamp="2026-01-20 16:59:56 +0000 UTC" firstStartedPulling="2026-01-20 16:59:57.071311891 +0000 UTC m=+1090.831649859" lastFinishedPulling="2026-01-20 16:59:57.331354753 +0000 UTC m=+1091.091692720" observedRunningTime="2026-01-20 16:59:58.243487864 +0000 UTC m=+1092.003825831" watchObservedRunningTime="2026-01-20 16:59:58.248765938 +0000 UTC m=+1092.009103906" Jan 20 16:59:58 crc kubenswrapper[4558]: I0120 16:59:58.809124 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 16:59:59 crc kubenswrapper[4558]: I0120 16:59:59.219765 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"f16eaa9a-4f36-46ba-bcc0-1cbcf468cc13","Type":"ContainerStarted","Data":"b805f5c63f446b530cd8775b3769fe58efa4409cf5d731cbd4ddb0886ebd2a0b"} Jan 20 16:59:59 crc kubenswrapper[4558]: I0120 16:59:59.220212 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="f16eaa9a-4f36-46ba-bcc0-1cbcf468cc13" containerName="ceilometer-central-agent" containerID="cri-o://1cfe9eea184a95209e0efe16db0c988a10ac4cafef8d6d9b82b11e451d505963" gracePeriod=30 Jan 20 16:59:59 crc kubenswrapper[4558]: I0120 16:59:59.220268 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="f16eaa9a-4f36-46ba-bcc0-1cbcf468cc13" containerName="sg-core" containerID="cri-o://7b001e2399cb6257bcda617bfcb7b1e368ff0c2a696f4f0b8908899fb5ef73c1" gracePeriod=30 Jan 20 16:59:59 crc kubenswrapper[4558]: I0120 16:59:59.220305 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="f16eaa9a-4f36-46ba-bcc0-1cbcf468cc13" containerName="proxy-httpd" containerID="cri-o://b805f5c63f446b530cd8775b3769fe58efa4409cf5d731cbd4ddb0886ebd2a0b" gracePeriod=30 Jan 20 16:59:59 crc kubenswrapper[4558]: I0120 16:59:59.220394 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="f16eaa9a-4f36-46ba-bcc0-1cbcf468cc13" containerName="ceilometer-notification-agent" containerID="cri-o://7dc47b2d3e8519484a029584834258692ee775e86feca47dbdd9da48984af2cc" gracePeriod=30 Jan 20 16:59:59 crc kubenswrapper[4558]: I0120 16:59:59.241545 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ceilometer-0" podStartSLOduration=2.197057579 podStartE2EDuration="6.241528404s" podCreationTimestamp="2026-01-20 16:59:53 +0000 UTC" firstStartedPulling="2026-01-20 16:59:53.884624925 +0000 UTC m=+1087.644962892" lastFinishedPulling="2026-01-20 16:59:57.92909575 +0000 UTC m=+1091.689433717" observedRunningTime="2026-01-20 16:59:59.237683645 +0000 UTC m=+1092.998021613" watchObservedRunningTime="2026-01-20 16:59:59.241528404 +0000 UTC m=+1093.001866372" Jan 20 16:59:59 crc kubenswrapper[4558]: I0120 16:59:59.899510 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.029077 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f16eaa9a-4f36-46ba-bcc0-1cbcf468cc13-log-httpd\") pod \"f16eaa9a-4f36-46ba-bcc0-1cbcf468cc13\" (UID: \"f16eaa9a-4f36-46ba-bcc0-1cbcf468cc13\") " Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.029322 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f16eaa9a-4f36-46ba-bcc0-1cbcf468cc13-combined-ca-bundle\") pod \"f16eaa9a-4f36-46ba-bcc0-1cbcf468cc13\" (UID: \"f16eaa9a-4f36-46ba-bcc0-1cbcf468cc13\") " Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.029426 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f16eaa9a-4f36-46ba-bcc0-1cbcf468cc13-sg-core-conf-yaml\") pod \"f16eaa9a-4f36-46ba-bcc0-1cbcf468cc13\" (UID: \"f16eaa9a-4f36-46ba-bcc0-1cbcf468cc13\") " Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.029487 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f16eaa9a-4f36-46ba-bcc0-1cbcf468cc13-config-data\") pod \"f16eaa9a-4f36-46ba-bcc0-1cbcf468cc13\" (UID: \"f16eaa9a-4f36-46ba-bcc0-1cbcf468cc13\") " Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.029538 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f16eaa9a-4f36-46ba-bcc0-1cbcf468cc13-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "f16eaa9a-4f36-46ba-bcc0-1cbcf468cc13" (UID: "f16eaa9a-4f36-46ba-bcc0-1cbcf468cc13"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.029588 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f16eaa9a-4f36-46ba-bcc0-1cbcf468cc13-run-httpd\") pod \"f16eaa9a-4f36-46ba-bcc0-1cbcf468cc13\" (UID: \"f16eaa9a-4f36-46ba-bcc0-1cbcf468cc13\") " Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.029631 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dmm5f\" (UniqueName: \"kubernetes.io/projected/f16eaa9a-4f36-46ba-bcc0-1cbcf468cc13-kube-api-access-dmm5f\") pod \"f16eaa9a-4f36-46ba-bcc0-1cbcf468cc13\" (UID: \"f16eaa9a-4f36-46ba-bcc0-1cbcf468cc13\") " Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.029658 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f16eaa9a-4f36-46ba-bcc0-1cbcf468cc13-scripts\") pod \"f16eaa9a-4f36-46ba-bcc0-1cbcf468cc13\" (UID: \"f16eaa9a-4f36-46ba-bcc0-1cbcf468cc13\") " Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.029894 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f16eaa9a-4f36-46ba-bcc0-1cbcf468cc13-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "f16eaa9a-4f36-46ba-bcc0-1cbcf468cc13" (UID: "f16eaa9a-4f36-46ba-bcc0-1cbcf468cc13"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.031107 4558 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f16eaa9a-4f36-46ba-bcc0-1cbcf468cc13-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.031144 4558 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f16eaa9a-4f36-46ba-bcc0-1cbcf468cc13-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.034971 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f16eaa9a-4f36-46ba-bcc0-1cbcf468cc13-scripts" (OuterVolumeSpecName: "scripts") pod "f16eaa9a-4f36-46ba-bcc0-1cbcf468cc13" (UID: "f16eaa9a-4f36-46ba-bcc0-1cbcf468cc13"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.035004 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f16eaa9a-4f36-46ba-bcc0-1cbcf468cc13-kube-api-access-dmm5f" (OuterVolumeSpecName: "kube-api-access-dmm5f") pod "f16eaa9a-4f36-46ba-bcc0-1cbcf468cc13" (UID: "f16eaa9a-4f36-46ba-bcc0-1cbcf468cc13"). InnerVolumeSpecName "kube-api-access-dmm5f". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.054766 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f16eaa9a-4f36-46ba-bcc0-1cbcf468cc13-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "f16eaa9a-4f36-46ba-bcc0-1cbcf468cc13" (UID: "f16eaa9a-4f36-46ba-bcc0-1cbcf468cc13"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.086224 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f16eaa9a-4f36-46ba-bcc0-1cbcf468cc13-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f16eaa9a-4f36-46ba-bcc0-1cbcf468cc13" (UID: "f16eaa9a-4f36-46ba-bcc0-1cbcf468cc13"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.102799 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f16eaa9a-4f36-46ba-bcc0-1cbcf468cc13-config-data" (OuterVolumeSpecName: "config-data") pod "f16eaa9a-4f36-46ba-bcc0-1cbcf468cc13" (UID: "f16eaa9a-4f36-46ba-bcc0-1cbcf468cc13"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.132849 4558 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f16eaa9a-4f36-46ba-bcc0-1cbcf468cc13-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.132876 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f16eaa9a-4f36-46ba-bcc0-1cbcf468cc13-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.132886 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dmm5f\" (UniqueName: \"kubernetes.io/projected/f16eaa9a-4f36-46ba-bcc0-1cbcf468cc13-kube-api-access-dmm5f\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.132896 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f16eaa9a-4f36-46ba-bcc0-1cbcf468cc13-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.132907 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f16eaa9a-4f36-46ba-bcc0-1cbcf468cc13-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.138470 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29482140-szzm6"] Jan 20 17:00:00 crc kubenswrapper[4558]: E0120 17:00:00.138761 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f16eaa9a-4f36-46ba-bcc0-1cbcf468cc13" containerName="sg-core" Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.138778 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="f16eaa9a-4f36-46ba-bcc0-1cbcf468cc13" containerName="sg-core" Jan 20 17:00:00 crc kubenswrapper[4558]: E0120 17:00:00.138789 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f16eaa9a-4f36-46ba-bcc0-1cbcf468cc13" containerName="proxy-httpd" Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.138795 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="f16eaa9a-4f36-46ba-bcc0-1cbcf468cc13" containerName="proxy-httpd" Jan 20 17:00:00 crc kubenswrapper[4558]: E0120 17:00:00.138819 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f16eaa9a-4f36-46ba-bcc0-1cbcf468cc13" containerName="ceilometer-notification-agent" Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.138824 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="f16eaa9a-4f36-46ba-bcc0-1cbcf468cc13" containerName="ceilometer-notification-agent" Jan 20 17:00:00 crc kubenswrapper[4558]: E0120 17:00:00.138837 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f16eaa9a-4f36-46ba-bcc0-1cbcf468cc13" containerName="ceilometer-central-agent" Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.138844 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="f16eaa9a-4f36-46ba-bcc0-1cbcf468cc13" containerName="ceilometer-central-agent" Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.139007 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="f16eaa9a-4f36-46ba-bcc0-1cbcf468cc13" containerName="proxy-httpd" Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.139018 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="f16eaa9a-4f36-46ba-bcc0-1cbcf468cc13" containerName="sg-core" Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.139025 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="f16eaa9a-4f36-46ba-bcc0-1cbcf468cc13" containerName="ceilometer-central-agent" Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.139033 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="f16eaa9a-4f36-46ba-bcc0-1cbcf468cc13" containerName="ceilometer-notification-agent" Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.139631 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29482140-szzm6" Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.141827 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.142020 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.149709 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29482140-szzm6"] Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.229948 4558 generic.go:334] "Generic (PLEG): container finished" podID="f16eaa9a-4f36-46ba-bcc0-1cbcf468cc13" containerID="b805f5c63f446b530cd8775b3769fe58efa4409cf5d731cbd4ddb0886ebd2a0b" exitCode=0 Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.229978 4558 generic.go:334] "Generic (PLEG): container finished" podID="f16eaa9a-4f36-46ba-bcc0-1cbcf468cc13" containerID="7b001e2399cb6257bcda617bfcb7b1e368ff0c2a696f4f0b8908899fb5ef73c1" exitCode=2 Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.229982 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"f16eaa9a-4f36-46ba-bcc0-1cbcf468cc13","Type":"ContainerDied","Data":"b805f5c63f446b530cd8775b3769fe58efa4409cf5d731cbd4ddb0886ebd2a0b"} Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.229986 4558 generic.go:334] "Generic (PLEG): container finished" podID="f16eaa9a-4f36-46ba-bcc0-1cbcf468cc13" containerID="7dc47b2d3e8519484a029584834258692ee775e86feca47dbdd9da48984af2cc" exitCode=0 Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.230010 4558 generic.go:334] "Generic (PLEG): container finished" podID="f16eaa9a-4f36-46ba-bcc0-1cbcf468cc13" containerID="1cfe9eea184a95209e0efe16db0c988a10ac4cafef8d6d9b82b11e451d505963" exitCode=0 Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.230024 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"f16eaa9a-4f36-46ba-bcc0-1cbcf468cc13","Type":"ContainerDied","Data":"7b001e2399cb6257bcda617bfcb7b1e368ff0c2a696f4f0b8908899fb5ef73c1"} Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.230039 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"f16eaa9a-4f36-46ba-bcc0-1cbcf468cc13","Type":"ContainerDied","Data":"7dc47b2d3e8519484a029584834258692ee775e86feca47dbdd9da48984af2cc"} Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.230049 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"f16eaa9a-4f36-46ba-bcc0-1cbcf468cc13","Type":"ContainerDied","Data":"1cfe9eea184a95209e0efe16db0c988a10ac4cafef8d6d9b82b11e451d505963"} Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.230059 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"f16eaa9a-4f36-46ba-bcc0-1cbcf468cc13","Type":"ContainerDied","Data":"b9827572335abe6681b267b22eaa440d65b110777ebf8a0c8381349ca066a881"} Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.230074 4558 scope.go:117] "RemoveContainer" containerID="b805f5c63f446b530cd8775b3769fe58efa4409cf5d731cbd4ddb0886ebd2a0b" Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.230069 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.234676 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e1aaf016-20be-4047-919d-cda81437437a-config-volume\") pod \"collect-profiles-29482140-szzm6\" (UID: \"e1aaf016-20be-4047-919d-cda81437437a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482140-szzm6" Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.234790 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-brmbt\" (UniqueName: \"kubernetes.io/projected/e1aaf016-20be-4047-919d-cda81437437a-kube-api-access-brmbt\") pod \"collect-profiles-29482140-szzm6\" (UID: \"e1aaf016-20be-4047-919d-cda81437437a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482140-szzm6" Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.234964 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e1aaf016-20be-4047-919d-cda81437437a-secret-volume\") pod \"collect-profiles-29482140-szzm6\" (UID: \"e1aaf016-20be-4047-919d-cda81437437a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482140-szzm6" Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.247423 4558 scope.go:117] "RemoveContainer" containerID="7b001e2399cb6257bcda617bfcb7b1e368ff0c2a696f4f0b8908899fb5ef73c1" Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.257638 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.264639 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.267652 4558 scope.go:117] "RemoveContainer" containerID="7dc47b2d3e8519484a029584834258692ee775e86feca47dbdd9da48984af2cc" Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.284389 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.289546 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.291646 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-scripts" Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.291829 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-config-data" Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.293877 4558 scope.go:117] "RemoveContainer" containerID="1cfe9eea184a95209e0efe16db0c988a10ac4cafef8d6d9b82b11e451d505963" Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.294058 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-ceilometer-internal-svc" Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.294641 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.313386 4558 scope.go:117] "RemoveContainer" containerID="b805f5c63f446b530cd8775b3769fe58efa4409cf5d731cbd4ddb0886ebd2a0b" Jan 20 17:00:00 crc kubenswrapper[4558]: E0120 17:00:00.313906 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b805f5c63f446b530cd8775b3769fe58efa4409cf5d731cbd4ddb0886ebd2a0b\": container with ID starting with b805f5c63f446b530cd8775b3769fe58efa4409cf5d731cbd4ddb0886ebd2a0b not found: ID does not exist" containerID="b805f5c63f446b530cd8775b3769fe58efa4409cf5d731cbd4ddb0886ebd2a0b" Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.313954 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b805f5c63f446b530cd8775b3769fe58efa4409cf5d731cbd4ddb0886ebd2a0b"} err="failed to get container status \"b805f5c63f446b530cd8775b3769fe58efa4409cf5d731cbd4ddb0886ebd2a0b\": rpc error: code = NotFound desc = could not find container \"b805f5c63f446b530cd8775b3769fe58efa4409cf5d731cbd4ddb0886ebd2a0b\": container with ID starting with b805f5c63f446b530cd8775b3769fe58efa4409cf5d731cbd4ddb0886ebd2a0b not found: ID does not exist" Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.313979 4558 scope.go:117] "RemoveContainer" containerID="7b001e2399cb6257bcda617bfcb7b1e368ff0c2a696f4f0b8908899fb5ef73c1" Jan 20 17:00:00 crc kubenswrapper[4558]: E0120 17:00:00.314395 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7b001e2399cb6257bcda617bfcb7b1e368ff0c2a696f4f0b8908899fb5ef73c1\": container with ID starting with 7b001e2399cb6257bcda617bfcb7b1e368ff0c2a696f4f0b8908899fb5ef73c1 not found: ID does not exist" containerID="7b001e2399cb6257bcda617bfcb7b1e368ff0c2a696f4f0b8908899fb5ef73c1" Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.314432 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7b001e2399cb6257bcda617bfcb7b1e368ff0c2a696f4f0b8908899fb5ef73c1"} err="failed to get container status \"7b001e2399cb6257bcda617bfcb7b1e368ff0c2a696f4f0b8908899fb5ef73c1\": rpc error: code = NotFound desc = could not find container \"7b001e2399cb6257bcda617bfcb7b1e368ff0c2a696f4f0b8908899fb5ef73c1\": container with ID starting with 7b001e2399cb6257bcda617bfcb7b1e368ff0c2a696f4f0b8908899fb5ef73c1 not found: ID does not exist" Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.314462 4558 scope.go:117] "RemoveContainer" containerID="7dc47b2d3e8519484a029584834258692ee775e86feca47dbdd9da48984af2cc" Jan 20 17:00:00 crc kubenswrapper[4558]: E0120 17:00:00.314809 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7dc47b2d3e8519484a029584834258692ee775e86feca47dbdd9da48984af2cc\": container with ID starting with 7dc47b2d3e8519484a029584834258692ee775e86feca47dbdd9da48984af2cc not found: ID does not exist" containerID="7dc47b2d3e8519484a029584834258692ee775e86feca47dbdd9da48984af2cc" Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.314858 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7dc47b2d3e8519484a029584834258692ee775e86feca47dbdd9da48984af2cc"} err="failed to get container status \"7dc47b2d3e8519484a029584834258692ee775e86feca47dbdd9da48984af2cc\": rpc error: code = NotFound desc = could not find container \"7dc47b2d3e8519484a029584834258692ee775e86feca47dbdd9da48984af2cc\": container with ID starting with 7dc47b2d3e8519484a029584834258692ee775e86feca47dbdd9da48984af2cc not found: ID does not exist" Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.314888 4558 scope.go:117] "RemoveContainer" containerID="1cfe9eea184a95209e0efe16db0c988a10ac4cafef8d6d9b82b11e451d505963" Jan 20 17:00:00 crc kubenswrapper[4558]: E0120 17:00:00.315295 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1cfe9eea184a95209e0efe16db0c988a10ac4cafef8d6d9b82b11e451d505963\": container with ID starting with 1cfe9eea184a95209e0efe16db0c988a10ac4cafef8d6d9b82b11e451d505963 not found: ID does not exist" containerID="1cfe9eea184a95209e0efe16db0c988a10ac4cafef8d6d9b82b11e451d505963" Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.315326 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1cfe9eea184a95209e0efe16db0c988a10ac4cafef8d6d9b82b11e451d505963"} err="failed to get container status \"1cfe9eea184a95209e0efe16db0c988a10ac4cafef8d6d9b82b11e451d505963\": rpc error: code = NotFound desc = could not find container \"1cfe9eea184a95209e0efe16db0c988a10ac4cafef8d6d9b82b11e451d505963\": container with ID starting with 1cfe9eea184a95209e0efe16db0c988a10ac4cafef8d6d9b82b11e451d505963 not found: ID does not exist" Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.315343 4558 scope.go:117] "RemoveContainer" containerID="b805f5c63f446b530cd8775b3769fe58efa4409cf5d731cbd4ddb0886ebd2a0b" Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.315668 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b805f5c63f446b530cd8775b3769fe58efa4409cf5d731cbd4ddb0886ebd2a0b"} err="failed to get container status \"b805f5c63f446b530cd8775b3769fe58efa4409cf5d731cbd4ddb0886ebd2a0b\": rpc error: code = NotFound desc = could not find container \"b805f5c63f446b530cd8775b3769fe58efa4409cf5d731cbd4ddb0886ebd2a0b\": container with ID starting with b805f5c63f446b530cd8775b3769fe58efa4409cf5d731cbd4ddb0886ebd2a0b not found: ID does not exist" Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.315699 4558 scope.go:117] "RemoveContainer" containerID="7b001e2399cb6257bcda617bfcb7b1e368ff0c2a696f4f0b8908899fb5ef73c1" Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.316147 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7b001e2399cb6257bcda617bfcb7b1e368ff0c2a696f4f0b8908899fb5ef73c1"} err="failed to get container status \"7b001e2399cb6257bcda617bfcb7b1e368ff0c2a696f4f0b8908899fb5ef73c1\": rpc error: code = NotFound desc = could not find container \"7b001e2399cb6257bcda617bfcb7b1e368ff0c2a696f4f0b8908899fb5ef73c1\": container with ID starting with 7b001e2399cb6257bcda617bfcb7b1e368ff0c2a696f4f0b8908899fb5ef73c1 not found: ID does not exist" Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.316193 4558 scope.go:117] "RemoveContainer" containerID="7dc47b2d3e8519484a029584834258692ee775e86feca47dbdd9da48984af2cc" Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.316490 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7dc47b2d3e8519484a029584834258692ee775e86feca47dbdd9da48984af2cc"} err="failed to get container status \"7dc47b2d3e8519484a029584834258692ee775e86feca47dbdd9da48984af2cc\": rpc error: code = NotFound desc = could not find container \"7dc47b2d3e8519484a029584834258692ee775e86feca47dbdd9da48984af2cc\": container with ID starting with 7dc47b2d3e8519484a029584834258692ee775e86feca47dbdd9da48984af2cc not found: ID does not exist" Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.316515 4558 scope.go:117] "RemoveContainer" containerID="1cfe9eea184a95209e0efe16db0c988a10ac4cafef8d6d9b82b11e451d505963" Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.316782 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1cfe9eea184a95209e0efe16db0c988a10ac4cafef8d6d9b82b11e451d505963"} err="failed to get container status \"1cfe9eea184a95209e0efe16db0c988a10ac4cafef8d6d9b82b11e451d505963\": rpc error: code = NotFound desc = could not find container \"1cfe9eea184a95209e0efe16db0c988a10ac4cafef8d6d9b82b11e451d505963\": container with ID starting with 1cfe9eea184a95209e0efe16db0c988a10ac4cafef8d6d9b82b11e451d505963 not found: ID does not exist" Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.316804 4558 scope.go:117] "RemoveContainer" containerID="b805f5c63f446b530cd8775b3769fe58efa4409cf5d731cbd4ddb0886ebd2a0b" Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.317283 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b805f5c63f446b530cd8775b3769fe58efa4409cf5d731cbd4ddb0886ebd2a0b"} err="failed to get container status \"b805f5c63f446b530cd8775b3769fe58efa4409cf5d731cbd4ddb0886ebd2a0b\": rpc error: code = NotFound desc = could not find container \"b805f5c63f446b530cd8775b3769fe58efa4409cf5d731cbd4ddb0886ebd2a0b\": container with ID starting with b805f5c63f446b530cd8775b3769fe58efa4409cf5d731cbd4ddb0886ebd2a0b not found: ID does not exist" Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.317308 4558 scope.go:117] "RemoveContainer" containerID="7b001e2399cb6257bcda617bfcb7b1e368ff0c2a696f4f0b8908899fb5ef73c1" Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.317560 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7b001e2399cb6257bcda617bfcb7b1e368ff0c2a696f4f0b8908899fb5ef73c1"} err="failed to get container status \"7b001e2399cb6257bcda617bfcb7b1e368ff0c2a696f4f0b8908899fb5ef73c1\": rpc error: code = NotFound desc = could not find container \"7b001e2399cb6257bcda617bfcb7b1e368ff0c2a696f4f0b8908899fb5ef73c1\": container with ID starting with 7b001e2399cb6257bcda617bfcb7b1e368ff0c2a696f4f0b8908899fb5ef73c1 not found: ID does not exist" Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.317577 4558 scope.go:117] "RemoveContainer" containerID="7dc47b2d3e8519484a029584834258692ee775e86feca47dbdd9da48984af2cc" Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.317838 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7dc47b2d3e8519484a029584834258692ee775e86feca47dbdd9da48984af2cc"} err="failed to get container status \"7dc47b2d3e8519484a029584834258692ee775e86feca47dbdd9da48984af2cc\": rpc error: code = NotFound desc = could not find container \"7dc47b2d3e8519484a029584834258692ee775e86feca47dbdd9da48984af2cc\": container with ID starting with 7dc47b2d3e8519484a029584834258692ee775e86feca47dbdd9da48984af2cc not found: ID does not exist" Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.317873 4558 scope.go:117] "RemoveContainer" containerID="1cfe9eea184a95209e0efe16db0c988a10ac4cafef8d6d9b82b11e451d505963" Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.318109 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1cfe9eea184a95209e0efe16db0c988a10ac4cafef8d6d9b82b11e451d505963"} err="failed to get container status \"1cfe9eea184a95209e0efe16db0c988a10ac4cafef8d6d9b82b11e451d505963\": rpc error: code = NotFound desc = could not find container \"1cfe9eea184a95209e0efe16db0c988a10ac4cafef8d6d9b82b11e451d505963\": container with ID starting with 1cfe9eea184a95209e0efe16db0c988a10ac4cafef8d6d9b82b11e451d505963 not found: ID does not exist" Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.318130 4558 scope.go:117] "RemoveContainer" containerID="b805f5c63f446b530cd8775b3769fe58efa4409cf5d731cbd4ddb0886ebd2a0b" Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.318440 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b805f5c63f446b530cd8775b3769fe58efa4409cf5d731cbd4ddb0886ebd2a0b"} err="failed to get container status \"b805f5c63f446b530cd8775b3769fe58efa4409cf5d731cbd4ddb0886ebd2a0b\": rpc error: code = NotFound desc = could not find container \"b805f5c63f446b530cd8775b3769fe58efa4409cf5d731cbd4ddb0886ebd2a0b\": container with ID starting with b805f5c63f446b530cd8775b3769fe58efa4409cf5d731cbd4ddb0886ebd2a0b not found: ID does not exist" Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.318464 4558 scope.go:117] "RemoveContainer" containerID="7b001e2399cb6257bcda617bfcb7b1e368ff0c2a696f4f0b8908899fb5ef73c1" Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.318680 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7b001e2399cb6257bcda617bfcb7b1e368ff0c2a696f4f0b8908899fb5ef73c1"} err="failed to get container status \"7b001e2399cb6257bcda617bfcb7b1e368ff0c2a696f4f0b8908899fb5ef73c1\": rpc error: code = NotFound desc = could not find container \"7b001e2399cb6257bcda617bfcb7b1e368ff0c2a696f4f0b8908899fb5ef73c1\": container with ID starting with 7b001e2399cb6257bcda617bfcb7b1e368ff0c2a696f4f0b8908899fb5ef73c1 not found: ID does not exist" Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.318706 4558 scope.go:117] "RemoveContainer" containerID="7dc47b2d3e8519484a029584834258692ee775e86feca47dbdd9da48984af2cc" Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.318973 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7dc47b2d3e8519484a029584834258692ee775e86feca47dbdd9da48984af2cc"} err="failed to get container status \"7dc47b2d3e8519484a029584834258692ee775e86feca47dbdd9da48984af2cc\": rpc error: code = NotFound desc = could not find container \"7dc47b2d3e8519484a029584834258692ee775e86feca47dbdd9da48984af2cc\": container with ID starting with 7dc47b2d3e8519484a029584834258692ee775e86feca47dbdd9da48984af2cc not found: ID does not exist" Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.318997 4558 scope.go:117] "RemoveContainer" containerID="1cfe9eea184a95209e0efe16db0c988a10ac4cafef8d6d9b82b11e451d505963" Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.319312 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1cfe9eea184a95209e0efe16db0c988a10ac4cafef8d6d9b82b11e451d505963"} err="failed to get container status \"1cfe9eea184a95209e0efe16db0c988a10ac4cafef8d6d9b82b11e451d505963\": rpc error: code = NotFound desc = could not find container \"1cfe9eea184a95209e0efe16db0c988a10ac4cafef8d6d9b82b11e451d505963\": container with ID starting with 1cfe9eea184a95209e0efe16db0c988a10ac4cafef8d6d9b82b11e451d505963 not found: ID does not exist" Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.337055 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e1aaf016-20be-4047-919d-cda81437437a-config-volume\") pod \"collect-profiles-29482140-szzm6\" (UID: \"e1aaf016-20be-4047-919d-cda81437437a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482140-szzm6" Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.337213 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-brmbt\" (UniqueName: \"kubernetes.io/projected/e1aaf016-20be-4047-919d-cda81437437a-kube-api-access-brmbt\") pod \"collect-profiles-29482140-szzm6\" (UID: \"e1aaf016-20be-4047-919d-cda81437437a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482140-szzm6" Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.337350 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e1aaf016-20be-4047-919d-cda81437437a-secret-volume\") pod \"collect-profiles-29482140-szzm6\" (UID: \"e1aaf016-20be-4047-919d-cda81437437a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482140-szzm6" Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.337885 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e1aaf016-20be-4047-919d-cda81437437a-config-volume\") pod \"collect-profiles-29482140-szzm6\" (UID: \"e1aaf016-20be-4047-919d-cda81437437a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482140-szzm6" Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.342010 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e1aaf016-20be-4047-919d-cda81437437a-secret-volume\") pod \"collect-profiles-29482140-szzm6\" (UID: \"e1aaf016-20be-4047-919d-cda81437437a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482140-szzm6" Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.351755 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-brmbt\" (UniqueName: \"kubernetes.io/projected/e1aaf016-20be-4047-919d-cda81437437a-kube-api-access-brmbt\") pod \"collect-profiles-29482140-szzm6\" (UID: \"e1aaf016-20be-4047-919d-cda81437437a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482140-szzm6" Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.439357 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8074f6f3-f564-42bc-b08b-28ffe75bbbc5-scripts\") pod \"ceilometer-0\" (UID: \"8074f6f3-f564-42bc-b08b-28ffe75bbbc5\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.439429 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8074f6f3-f564-42bc-b08b-28ffe75bbbc5-run-httpd\") pod \"ceilometer-0\" (UID: \"8074f6f3-f564-42bc-b08b-28ffe75bbbc5\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.439453 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8074f6f3-f564-42bc-b08b-28ffe75bbbc5-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"8074f6f3-f564-42bc-b08b-28ffe75bbbc5\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.439586 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8074f6f3-f564-42bc-b08b-28ffe75bbbc5-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"8074f6f3-f564-42bc-b08b-28ffe75bbbc5\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.439701 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/8074f6f3-f564-42bc-b08b-28ffe75bbbc5-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"8074f6f3-f564-42bc-b08b-28ffe75bbbc5\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.439828 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8074f6f3-f564-42bc-b08b-28ffe75bbbc5-config-data\") pod \"ceilometer-0\" (UID: \"8074f6f3-f564-42bc-b08b-28ffe75bbbc5\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.439846 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8074f6f3-f564-42bc-b08b-28ffe75bbbc5-log-httpd\") pod \"ceilometer-0\" (UID: \"8074f6f3-f564-42bc-b08b-28ffe75bbbc5\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.440100 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4vb45\" (UniqueName: \"kubernetes.io/projected/8074f6f3-f564-42bc-b08b-28ffe75bbbc5-kube-api-access-4vb45\") pod \"ceilometer-0\" (UID: \"8074f6f3-f564-42bc-b08b-28ffe75bbbc5\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.456730 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29482140-szzm6" Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.542533 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8074f6f3-f564-42bc-b08b-28ffe75bbbc5-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"8074f6f3-f564-42bc-b08b-28ffe75bbbc5\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.542610 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/8074f6f3-f564-42bc-b08b-28ffe75bbbc5-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"8074f6f3-f564-42bc-b08b-28ffe75bbbc5\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.542666 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8074f6f3-f564-42bc-b08b-28ffe75bbbc5-config-data\") pod \"ceilometer-0\" (UID: \"8074f6f3-f564-42bc-b08b-28ffe75bbbc5\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.542684 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8074f6f3-f564-42bc-b08b-28ffe75bbbc5-log-httpd\") pod \"ceilometer-0\" (UID: \"8074f6f3-f564-42bc-b08b-28ffe75bbbc5\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.542767 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4vb45\" (UniqueName: \"kubernetes.io/projected/8074f6f3-f564-42bc-b08b-28ffe75bbbc5-kube-api-access-4vb45\") pod \"ceilometer-0\" (UID: \"8074f6f3-f564-42bc-b08b-28ffe75bbbc5\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.542834 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8074f6f3-f564-42bc-b08b-28ffe75bbbc5-scripts\") pod \"ceilometer-0\" (UID: \"8074f6f3-f564-42bc-b08b-28ffe75bbbc5\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.542860 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8074f6f3-f564-42bc-b08b-28ffe75bbbc5-run-httpd\") pod \"ceilometer-0\" (UID: \"8074f6f3-f564-42bc-b08b-28ffe75bbbc5\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.542874 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8074f6f3-f564-42bc-b08b-28ffe75bbbc5-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"8074f6f3-f564-42bc-b08b-28ffe75bbbc5\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.543978 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8074f6f3-f564-42bc-b08b-28ffe75bbbc5-log-httpd\") pod \"ceilometer-0\" (UID: \"8074f6f3-f564-42bc-b08b-28ffe75bbbc5\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.545464 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8074f6f3-f564-42bc-b08b-28ffe75bbbc5-run-httpd\") pod \"ceilometer-0\" (UID: \"8074f6f3-f564-42bc-b08b-28ffe75bbbc5\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.548995 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8074f6f3-f564-42bc-b08b-28ffe75bbbc5-scripts\") pod \"ceilometer-0\" (UID: \"8074f6f3-f564-42bc-b08b-28ffe75bbbc5\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.549034 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8074f6f3-f564-42bc-b08b-28ffe75bbbc5-config-data\") pod \"ceilometer-0\" (UID: \"8074f6f3-f564-42bc-b08b-28ffe75bbbc5\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.549903 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8074f6f3-f564-42bc-b08b-28ffe75bbbc5-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"8074f6f3-f564-42bc-b08b-28ffe75bbbc5\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.550712 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/8074f6f3-f564-42bc-b08b-28ffe75bbbc5-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"8074f6f3-f564-42bc-b08b-28ffe75bbbc5\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.551714 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8074f6f3-f564-42bc-b08b-28ffe75bbbc5-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"8074f6f3-f564-42bc-b08b-28ffe75bbbc5\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.560573 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4vb45\" (UniqueName: \"kubernetes.io/projected/8074f6f3-f564-42bc-b08b-28ffe75bbbc5-kube-api-access-4vb45\") pod \"ceilometer-0\" (UID: \"8074f6f3-f564-42bc-b08b-28ffe75bbbc5\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.576112 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f16eaa9a-4f36-46ba-bcc0-1cbcf468cc13" path="/var/lib/kubelet/pods/f16eaa9a-4f36-46ba-bcc0-1cbcf468cc13/volumes" Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.611261 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:00:00 crc kubenswrapper[4558]: I0120 17:00:00.854634 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29482140-szzm6"] Jan 20 17:00:01 crc kubenswrapper[4558]: I0120 17:00:01.003762 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:00:01 crc kubenswrapper[4558]: W0120 17:00:01.008624 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8074f6f3_f564_42bc_b08b_28ffe75bbbc5.slice/crio-4c55940b3048dc7379bec7d4ab8d303574107c3616709230db18b4ab5204470b WatchSource:0}: Error finding container 4c55940b3048dc7379bec7d4ab8d303574107c3616709230db18b4ab5204470b: Status 404 returned error can't find the container with id 4c55940b3048dc7379bec7d4ab8d303574107c3616709230db18b4ab5204470b Jan 20 17:00:01 crc kubenswrapper[4558]: I0120 17:00:01.238608 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"8074f6f3-f564-42bc-b08b-28ffe75bbbc5","Type":"ContainerStarted","Data":"4c55940b3048dc7379bec7d4ab8d303574107c3616709230db18b4ab5204470b"} Jan 20 17:00:01 crc kubenswrapper[4558]: I0120 17:00:01.241900 4558 generic.go:334] "Generic (PLEG): container finished" podID="e1aaf016-20be-4047-919d-cda81437437a" containerID="1bc3200f596051008ebbe5f7b24025a7b896079f1476665b72b344bebfe77bf2" exitCode=0 Jan 20 17:00:01 crc kubenswrapper[4558]: I0120 17:00:01.241939 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29482140-szzm6" event={"ID":"e1aaf016-20be-4047-919d-cda81437437a","Type":"ContainerDied","Data":"1bc3200f596051008ebbe5f7b24025a7b896079f1476665b72b344bebfe77bf2"} Jan 20 17:00:01 crc kubenswrapper[4558]: I0120 17:00:01.241960 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29482140-szzm6" event={"ID":"e1aaf016-20be-4047-919d-cda81437437a","Type":"ContainerStarted","Data":"c5a40c69d484f3555051965169b034bf9b3c5d86d0176674790936438f1239f9"} Jan 20 17:00:02 crc kubenswrapper[4558]: I0120 17:00:02.620953 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29482140-szzm6" Jan 20 17:00:02 crc kubenswrapper[4558]: I0120 17:00:02.687801 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e1aaf016-20be-4047-919d-cda81437437a-secret-volume\") pod \"e1aaf016-20be-4047-919d-cda81437437a\" (UID: \"e1aaf016-20be-4047-919d-cda81437437a\") " Jan 20 17:00:02 crc kubenswrapper[4558]: I0120 17:00:02.687916 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-brmbt\" (UniqueName: \"kubernetes.io/projected/e1aaf016-20be-4047-919d-cda81437437a-kube-api-access-brmbt\") pod \"e1aaf016-20be-4047-919d-cda81437437a\" (UID: \"e1aaf016-20be-4047-919d-cda81437437a\") " Jan 20 17:00:02 crc kubenswrapper[4558]: I0120 17:00:02.687939 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e1aaf016-20be-4047-919d-cda81437437a-config-volume\") pod \"e1aaf016-20be-4047-919d-cda81437437a\" (UID: \"e1aaf016-20be-4047-919d-cda81437437a\") " Jan 20 17:00:02 crc kubenswrapper[4558]: I0120 17:00:02.689109 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e1aaf016-20be-4047-919d-cda81437437a-config-volume" (OuterVolumeSpecName: "config-volume") pod "e1aaf016-20be-4047-919d-cda81437437a" (UID: "e1aaf016-20be-4047-919d-cda81437437a"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:00:02 crc kubenswrapper[4558]: I0120 17:00:02.693582 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e1aaf016-20be-4047-919d-cda81437437a-kube-api-access-brmbt" (OuterVolumeSpecName: "kube-api-access-brmbt") pod "e1aaf016-20be-4047-919d-cda81437437a" (UID: "e1aaf016-20be-4047-919d-cda81437437a"). InnerVolumeSpecName "kube-api-access-brmbt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:00:02 crc kubenswrapper[4558]: I0120 17:00:02.710437 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e1aaf016-20be-4047-919d-cda81437437a-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "e1aaf016-20be-4047-919d-cda81437437a" (UID: "e1aaf016-20be-4047-919d-cda81437437a"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:00:02 crc kubenswrapper[4558]: I0120 17:00:02.789775 4558 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e1aaf016-20be-4047-919d-cda81437437a-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:02 crc kubenswrapper[4558]: I0120 17:00:02.789805 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-brmbt\" (UniqueName: \"kubernetes.io/projected/e1aaf016-20be-4047-919d-cda81437437a-kube-api-access-brmbt\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:02 crc kubenswrapper[4558]: I0120 17:00:02.789814 4558 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e1aaf016-20be-4047-919d-cda81437437a-config-volume\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:03 crc kubenswrapper[4558]: I0120 17:00:03.261776 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29482140-szzm6" Jan 20 17:00:03 crc kubenswrapper[4558]: I0120 17:00:03.261786 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29482140-szzm6" event={"ID":"e1aaf016-20be-4047-919d-cda81437437a","Type":"ContainerDied","Data":"c5a40c69d484f3555051965169b034bf9b3c5d86d0176674790936438f1239f9"} Jan 20 17:00:03 crc kubenswrapper[4558]: I0120 17:00:03.261825 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c5a40c69d484f3555051965169b034bf9b3c5d86d0176674790936438f1239f9" Jan 20 17:00:03 crc kubenswrapper[4558]: I0120 17:00:03.263863 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"8074f6f3-f564-42bc-b08b-28ffe75bbbc5","Type":"ContainerStarted","Data":"cc735ea1e3ff81a6e1e946cb0561445b4ff17c02725e6a7c5e3b8fb04ea0d90d"} Jan 20 17:00:03 crc kubenswrapper[4558]: I0120 17:00:03.809378 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:00:03 crc kubenswrapper[4558]: I0120 17:00:03.823492 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:00:04 crc kubenswrapper[4558]: I0120 17:00:04.272114 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"8074f6f3-f564-42bc-b08b-28ffe75bbbc5","Type":"ContainerStarted","Data":"255c4f9294e2677844d684383d43130961e7ee3e4db470ecb77910d99c7f5de5"} Jan 20 17:00:04 crc kubenswrapper[4558]: I0120 17:00:04.285107 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:00:04 crc kubenswrapper[4558]: I0120 17:00:04.418039 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell1-cell-mapping-6zmz2"] Jan 20 17:00:04 crc kubenswrapper[4558]: E0120 17:00:04.418412 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e1aaf016-20be-4047-919d-cda81437437a" containerName="collect-profiles" Jan 20 17:00:04 crc kubenswrapper[4558]: I0120 17:00:04.418429 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="e1aaf016-20be-4047-919d-cda81437437a" containerName="collect-profiles" Jan 20 17:00:04 crc kubenswrapper[4558]: I0120 17:00:04.418588 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="e1aaf016-20be-4047-919d-cda81437437a" containerName="collect-profiles" Jan 20 17:00:04 crc kubenswrapper[4558]: I0120 17:00:04.419090 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-cell-mapping-6zmz2" Jan 20 17:00:04 crc kubenswrapper[4558]: I0120 17:00:04.425034 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell1-manage-config-data" Jan 20 17:00:04 crc kubenswrapper[4558]: I0120 17:00:04.425126 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell1-manage-scripts" Jan 20 17:00:04 crc kubenswrapper[4558]: I0120 17:00:04.425740 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-cell-mapping-6zmz2"] Jan 20 17:00:04 crc kubenswrapper[4558]: I0120 17:00:04.521343 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/95fbc547-5231-4cbb-ba75-b1766ca8c903-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-6zmz2\" (UID: \"95fbc547-5231-4cbb-ba75-b1766ca8c903\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-6zmz2" Jan 20 17:00:04 crc kubenswrapper[4558]: I0120 17:00:04.521408 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/95fbc547-5231-4cbb-ba75-b1766ca8c903-scripts\") pod \"nova-cell1-cell-mapping-6zmz2\" (UID: \"95fbc547-5231-4cbb-ba75-b1766ca8c903\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-6zmz2" Jan 20 17:00:04 crc kubenswrapper[4558]: I0120 17:00:04.521539 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bhgjk\" (UniqueName: \"kubernetes.io/projected/95fbc547-5231-4cbb-ba75-b1766ca8c903-kube-api-access-bhgjk\") pod \"nova-cell1-cell-mapping-6zmz2\" (UID: \"95fbc547-5231-4cbb-ba75-b1766ca8c903\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-6zmz2" Jan 20 17:00:04 crc kubenswrapper[4558]: I0120 17:00:04.521637 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/95fbc547-5231-4cbb-ba75-b1766ca8c903-config-data\") pod \"nova-cell1-cell-mapping-6zmz2\" (UID: \"95fbc547-5231-4cbb-ba75-b1766ca8c903\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-6zmz2" Jan 20 17:00:04 crc kubenswrapper[4558]: I0120 17:00:04.622850 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/95fbc547-5231-4cbb-ba75-b1766ca8c903-config-data\") pod \"nova-cell1-cell-mapping-6zmz2\" (UID: \"95fbc547-5231-4cbb-ba75-b1766ca8c903\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-6zmz2" Jan 20 17:00:04 crc kubenswrapper[4558]: I0120 17:00:04.623113 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/95fbc547-5231-4cbb-ba75-b1766ca8c903-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-6zmz2\" (UID: \"95fbc547-5231-4cbb-ba75-b1766ca8c903\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-6zmz2" Jan 20 17:00:04 crc kubenswrapper[4558]: I0120 17:00:04.623151 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/95fbc547-5231-4cbb-ba75-b1766ca8c903-scripts\") pod \"nova-cell1-cell-mapping-6zmz2\" (UID: \"95fbc547-5231-4cbb-ba75-b1766ca8c903\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-6zmz2" Jan 20 17:00:04 crc kubenswrapper[4558]: I0120 17:00:04.623249 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bhgjk\" (UniqueName: \"kubernetes.io/projected/95fbc547-5231-4cbb-ba75-b1766ca8c903-kube-api-access-bhgjk\") pod \"nova-cell1-cell-mapping-6zmz2\" (UID: \"95fbc547-5231-4cbb-ba75-b1766ca8c903\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-6zmz2" Jan 20 17:00:04 crc kubenswrapper[4558]: I0120 17:00:04.628790 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/95fbc547-5231-4cbb-ba75-b1766ca8c903-scripts\") pod \"nova-cell1-cell-mapping-6zmz2\" (UID: \"95fbc547-5231-4cbb-ba75-b1766ca8c903\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-6zmz2" Jan 20 17:00:04 crc kubenswrapper[4558]: I0120 17:00:04.628821 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/95fbc547-5231-4cbb-ba75-b1766ca8c903-config-data\") pod \"nova-cell1-cell-mapping-6zmz2\" (UID: \"95fbc547-5231-4cbb-ba75-b1766ca8c903\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-6zmz2" Jan 20 17:00:04 crc kubenswrapper[4558]: I0120 17:00:04.631701 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/95fbc547-5231-4cbb-ba75-b1766ca8c903-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-6zmz2\" (UID: \"95fbc547-5231-4cbb-ba75-b1766ca8c903\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-6zmz2" Jan 20 17:00:04 crc kubenswrapper[4558]: I0120 17:00:04.639249 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bhgjk\" (UniqueName: \"kubernetes.io/projected/95fbc547-5231-4cbb-ba75-b1766ca8c903-kube-api-access-bhgjk\") pod \"nova-cell1-cell-mapping-6zmz2\" (UID: \"95fbc547-5231-4cbb-ba75-b1766ca8c903\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-6zmz2" Jan 20 17:00:04 crc kubenswrapper[4558]: I0120 17:00:04.733347 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-cell-mapping-6zmz2" Jan 20 17:00:05 crc kubenswrapper[4558]: I0120 17:00:05.118898 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-cell-mapping-6zmz2"] Jan 20 17:00:05 crc kubenswrapper[4558]: W0120 17:00:05.127713 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod95fbc547_5231_4cbb_ba75_b1766ca8c903.slice/crio-50039605bec22b3f29f256e130e74ecbb5c51bb265d75efd7e02e8b31c786b14 WatchSource:0}: Error finding container 50039605bec22b3f29f256e130e74ecbb5c51bb265d75efd7e02e8b31c786b14: Status 404 returned error can't find the container with id 50039605bec22b3f29f256e130e74ecbb5c51bb265d75efd7e02e8b31c786b14 Jan 20 17:00:05 crc kubenswrapper[4558]: I0120 17:00:05.282494 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-cell-mapping-6zmz2" event={"ID":"95fbc547-5231-4cbb-ba75-b1766ca8c903","Type":"ContainerStarted","Data":"39af34e7d1a9f016585ee97297ecd80fd7ecc02479b2a62eee07c83b322cb2a8"} Jan 20 17:00:05 crc kubenswrapper[4558]: I0120 17:00:05.282791 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-cell-mapping-6zmz2" event={"ID":"95fbc547-5231-4cbb-ba75-b1766ca8c903","Type":"ContainerStarted","Data":"50039605bec22b3f29f256e130e74ecbb5c51bb265d75efd7e02e8b31c786b14"} Jan 20 17:00:05 crc kubenswrapper[4558]: I0120 17:00:05.286228 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"8074f6f3-f564-42bc-b08b-28ffe75bbbc5","Type":"ContainerStarted","Data":"37fbfba85457fa31861b89574c96f8a2be864e14e57e69c2dae33967885a1430"} Jan 20 17:00:05 crc kubenswrapper[4558]: I0120 17:00:05.296481 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell1-cell-mapping-6zmz2" podStartSLOduration=1.296464878 podStartE2EDuration="1.296464878s" podCreationTimestamp="2026-01-20 17:00:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:00:05.29512595 +0000 UTC m=+1099.055463918" watchObservedRunningTime="2026-01-20 17:00:05.296464878 +0000 UTC m=+1099.056802845" Jan 20 17:00:06 crc kubenswrapper[4558]: I0120 17:00:06.586334 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:00:06 crc kubenswrapper[4558]: I0120 17:00:06.587442 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:00:06 crc kubenswrapper[4558]: I0120 17:00:06.594153 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:00:07 crc kubenswrapper[4558]: I0120 17:00:07.594298 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-api-0" podUID="6afafa3e-aec8-4d69-beb3-ae8d48c9aad4" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.183:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 20 17:00:07 crc kubenswrapper[4558]: I0120 17:00:07.594296 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-api-0" podUID="6afafa3e-aec8-4d69-beb3-ae8d48c9aad4" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.183:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 20 17:00:08 crc kubenswrapper[4558]: I0120 17:00:08.306928 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"8074f6f3-f564-42bc-b08b-28ffe75bbbc5","Type":"ContainerStarted","Data":"20cf0ea0e052fa597f57928ad72ba1f7803b3e9d839d21a4246b51731cb87948"} Jan 20 17:00:08 crc kubenswrapper[4558]: I0120 17:00:08.307914 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:00:08 crc kubenswrapper[4558]: I0120 17:00:08.328690 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ceilometer-0" podStartSLOduration=2.001721756 podStartE2EDuration="8.328674555s" podCreationTimestamp="2026-01-20 17:00:00 +0000 UTC" firstStartedPulling="2026-01-20 17:00:01.010820279 +0000 UTC m=+1094.771158246" lastFinishedPulling="2026-01-20 17:00:07.337773078 +0000 UTC m=+1101.098111045" observedRunningTime="2026-01-20 17:00:08.324700453 +0000 UTC m=+1102.085038420" watchObservedRunningTime="2026-01-20 17:00:08.328674555 +0000 UTC m=+1102.089012521" Jan 20 17:00:09 crc kubenswrapper[4558]: I0120 17:00:09.349478 4558 generic.go:334] "Generic (PLEG): container finished" podID="95fbc547-5231-4cbb-ba75-b1766ca8c903" containerID="39af34e7d1a9f016585ee97297ecd80fd7ecc02479b2a62eee07c83b322cb2a8" exitCode=0 Jan 20 17:00:09 crc kubenswrapper[4558]: I0120 17:00:09.349513 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-cell-mapping-6zmz2" event={"ID":"95fbc547-5231-4cbb-ba75-b1766ca8c903","Type":"ContainerDied","Data":"39af34e7d1a9f016585ee97297ecd80fd7ecc02479b2a62eee07c83b322cb2a8"} Jan 20 17:00:10 crc kubenswrapper[4558]: I0120 17:00:10.613430 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-cell-mapping-6zmz2" Jan 20 17:00:10 crc kubenswrapper[4558]: I0120 17:00:10.718422 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/95fbc547-5231-4cbb-ba75-b1766ca8c903-combined-ca-bundle\") pod \"95fbc547-5231-4cbb-ba75-b1766ca8c903\" (UID: \"95fbc547-5231-4cbb-ba75-b1766ca8c903\") " Jan 20 17:00:10 crc kubenswrapper[4558]: I0120 17:00:10.718565 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/95fbc547-5231-4cbb-ba75-b1766ca8c903-config-data\") pod \"95fbc547-5231-4cbb-ba75-b1766ca8c903\" (UID: \"95fbc547-5231-4cbb-ba75-b1766ca8c903\") " Jan 20 17:00:10 crc kubenswrapper[4558]: I0120 17:00:10.718588 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bhgjk\" (UniqueName: \"kubernetes.io/projected/95fbc547-5231-4cbb-ba75-b1766ca8c903-kube-api-access-bhgjk\") pod \"95fbc547-5231-4cbb-ba75-b1766ca8c903\" (UID: \"95fbc547-5231-4cbb-ba75-b1766ca8c903\") " Jan 20 17:00:10 crc kubenswrapper[4558]: I0120 17:00:10.718636 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/95fbc547-5231-4cbb-ba75-b1766ca8c903-scripts\") pod \"95fbc547-5231-4cbb-ba75-b1766ca8c903\" (UID: \"95fbc547-5231-4cbb-ba75-b1766ca8c903\") " Jan 20 17:00:10 crc kubenswrapper[4558]: I0120 17:00:10.723011 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/95fbc547-5231-4cbb-ba75-b1766ca8c903-scripts" (OuterVolumeSpecName: "scripts") pod "95fbc547-5231-4cbb-ba75-b1766ca8c903" (UID: "95fbc547-5231-4cbb-ba75-b1766ca8c903"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:00:10 crc kubenswrapper[4558]: I0120 17:00:10.723687 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/95fbc547-5231-4cbb-ba75-b1766ca8c903-kube-api-access-bhgjk" (OuterVolumeSpecName: "kube-api-access-bhgjk") pod "95fbc547-5231-4cbb-ba75-b1766ca8c903" (UID: "95fbc547-5231-4cbb-ba75-b1766ca8c903"). InnerVolumeSpecName "kube-api-access-bhgjk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:00:10 crc kubenswrapper[4558]: I0120 17:00:10.738424 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/95fbc547-5231-4cbb-ba75-b1766ca8c903-config-data" (OuterVolumeSpecName: "config-data") pod "95fbc547-5231-4cbb-ba75-b1766ca8c903" (UID: "95fbc547-5231-4cbb-ba75-b1766ca8c903"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:00:10 crc kubenswrapper[4558]: I0120 17:00:10.739026 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/95fbc547-5231-4cbb-ba75-b1766ca8c903-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "95fbc547-5231-4cbb-ba75-b1766ca8c903" (UID: "95fbc547-5231-4cbb-ba75-b1766ca8c903"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:00:10 crc kubenswrapper[4558]: I0120 17:00:10.820261 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/95fbc547-5231-4cbb-ba75-b1766ca8c903-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:10 crc kubenswrapper[4558]: I0120 17:00:10.820284 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/95fbc547-5231-4cbb-ba75-b1766ca8c903-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:10 crc kubenswrapper[4558]: I0120 17:00:10.820294 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bhgjk\" (UniqueName: \"kubernetes.io/projected/95fbc547-5231-4cbb-ba75-b1766ca8c903-kube-api-access-bhgjk\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:10 crc kubenswrapper[4558]: I0120 17:00:10.820302 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/95fbc547-5231-4cbb-ba75-b1766ca8c903-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:11 crc kubenswrapper[4558]: I0120 17:00:11.362960 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-cell-mapping-6zmz2" event={"ID":"95fbc547-5231-4cbb-ba75-b1766ca8c903","Type":"ContainerDied","Data":"50039605bec22b3f29f256e130e74ecbb5c51bb265d75efd7e02e8b31c786b14"} Jan 20 17:00:11 crc kubenswrapper[4558]: I0120 17:00:11.362995 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="50039605bec22b3f29f256e130e74ecbb5c51bb265d75efd7e02e8b31c786b14" Jan 20 17:00:11 crc kubenswrapper[4558]: I0120 17:00:11.362995 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-cell-mapping-6zmz2" Jan 20 17:00:11 crc kubenswrapper[4558]: I0120 17:00:11.515275 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:00:11 crc kubenswrapper[4558]: I0120 17:00:11.515453 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-api-0" podUID="6afafa3e-aec8-4d69-beb3-ae8d48c9aad4" containerName="nova-api-log" containerID="cri-o://5d4e4d796f3e9f903c11a264ac4eca8c07e322e42169b6cd650abccef3389baa" gracePeriod=30 Jan 20 17:00:11 crc kubenswrapper[4558]: I0120 17:00:11.515561 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-api-0" podUID="6afafa3e-aec8-4d69-beb3-ae8d48c9aad4" containerName="nova-api-api" containerID="cri-o://969678606d229a086d091f113b7fe767d47a4983b2586dab05a3ea147403e783" gracePeriod=30 Jan 20 17:00:11 crc kubenswrapper[4558]: I0120 17:00:11.526585 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:00:11 crc kubenswrapper[4558]: I0120 17:00:11.526805 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-metadata-0" podUID="aa2a96c7-ff73-49de-9acc-7e5ce8791029" containerName="nova-metadata-log" containerID="cri-o://e00279eefaa9c994ac81a4a5f4771f651b81ab3c8953e4cb17bc67d888e02db4" gracePeriod=30 Jan 20 17:00:11 crc kubenswrapper[4558]: I0120 17:00:11.526888 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-metadata-0" podUID="aa2a96c7-ff73-49de-9acc-7e5ce8791029" containerName="nova-metadata-metadata" containerID="cri-o://10ccbac057411e8b97d567d8a1fee98ca6979c330f6d03f60ae03427a6c09e0c" gracePeriod=30 Jan 20 17:00:11 crc kubenswrapper[4558]: I0120 17:00:11.534912 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:00:11 crc kubenswrapper[4558]: I0120 17:00:11.535066 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-scheduler-0" podUID="0f4ced88-ef0d-40df-ac5f-b1baf062f8e3" containerName="nova-scheduler-scheduler" containerID="cri-o://5046214ebbf76cbcda5d6afa9f9f3b15fddfcb6bf26dfc014b0afde9bcf8c549" gracePeriod=30 Jan 20 17:00:12 crc kubenswrapper[4558]: I0120 17:00:12.382437 4558 generic.go:334] "Generic (PLEG): container finished" podID="6afafa3e-aec8-4d69-beb3-ae8d48c9aad4" containerID="5d4e4d796f3e9f903c11a264ac4eca8c07e322e42169b6cd650abccef3389baa" exitCode=143 Jan 20 17:00:12 crc kubenswrapper[4558]: I0120 17:00:12.382515 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"6afafa3e-aec8-4d69-beb3-ae8d48c9aad4","Type":"ContainerDied","Data":"5d4e4d796f3e9f903c11a264ac4eca8c07e322e42169b6cd650abccef3389baa"} Jan 20 17:00:12 crc kubenswrapper[4558]: I0120 17:00:12.384871 4558 generic.go:334] "Generic (PLEG): container finished" podID="0f4ced88-ef0d-40df-ac5f-b1baf062f8e3" containerID="5046214ebbf76cbcda5d6afa9f9f3b15fddfcb6bf26dfc014b0afde9bcf8c549" exitCode=0 Jan 20 17:00:12 crc kubenswrapper[4558]: I0120 17:00:12.384924 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"0f4ced88-ef0d-40df-ac5f-b1baf062f8e3","Type":"ContainerDied","Data":"5046214ebbf76cbcda5d6afa9f9f3b15fddfcb6bf26dfc014b0afde9bcf8c549"} Jan 20 17:00:12 crc kubenswrapper[4558]: I0120 17:00:12.386276 4558 generic.go:334] "Generic (PLEG): container finished" podID="aa2a96c7-ff73-49de-9acc-7e5ce8791029" containerID="e00279eefaa9c994ac81a4a5f4771f651b81ab3c8953e4cb17bc67d888e02db4" exitCode=143 Jan 20 17:00:12 crc kubenswrapper[4558]: I0120 17:00:12.386304 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"aa2a96c7-ff73-49de-9acc-7e5ce8791029","Type":"ContainerDied","Data":"e00279eefaa9c994ac81a4a5f4771f651b81ab3c8953e4cb17bc67d888e02db4"} Jan 20 17:00:12 crc kubenswrapper[4558]: I0120 17:00:12.497634 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:00:12 crc kubenswrapper[4558]: I0120 17:00:12.650655 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rhhjh\" (UniqueName: \"kubernetes.io/projected/0f4ced88-ef0d-40df-ac5f-b1baf062f8e3-kube-api-access-rhhjh\") pod \"0f4ced88-ef0d-40df-ac5f-b1baf062f8e3\" (UID: \"0f4ced88-ef0d-40df-ac5f-b1baf062f8e3\") " Jan 20 17:00:12 crc kubenswrapper[4558]: I0120 17:00:12.650777 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0f4ced88-ef0d-40df-ac5f-b1baf062f8e3-config-data\") pod \"0f4ced88-ef0d-40df-ac5f-b1baf062f8e3\" (UID: \"0f4ced88-ef0d-40df-ac5f-b1baf062f8e3\") " Jan 20 17:00:12 crc kubenswrapper[4558]: I0120 17:00:12.650843 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0f4ced88-ef0d-40df-ac5f-b1baf062f8e3-combined-ca-bundle\") pod \"0f4ced88-ef0d-40df-ac5f-b1baf062f8e3\" (UID: \"0f4ced88-ef0d-40df-ac5f-b1baf062f8e3\") " Jan 20 17:00:12 crc kubenswrapper[4558]: I0120 17:00:12.668319 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0f4ced88-ef0d-40df-ac5f-b1baf062f8e3-kube-api-access-rhhjh" (OuterVolumeSpecName: "kube-api-access-rhhjh") pod "0f4ced88-ef0d-40df-ac5f-b1baf062f8e3" (UID: "0f4ced88-ef0d-40df-ac5f-b1baf062f8e3"). InnerVolumeSpecName "kube-api-access-rhhjh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:00:12 crc kubenswrapper[4558]: I0120 17:00:12.686261 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0f4ced88-ef0d-40df-ac5f-b1baf062f8e3-config-data" (OuterVolumeSpecName: "config-data") pod "0f4ced88-ef0d-40df-ac5f-b1baf062f8e3" (UID: "0f4ced88-ef0d-40df-ac5f-b1baf062f8e3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:00:12 crc kubenswrapper[4558]: I0120 17:00:12.693123 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0f4ced88-ef0d-40df-ac5f-b1baf062f8e3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0f4ced88-ef0d-40df-ac5f-b1baf062f8e3" (UID: "0f4ced88-ef0d-40df-ac5f-b1baf062f8e3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:00:12 crc kubenswrapper[4558]: I0120 17:00:12.753298 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rhhjh\" (UniqueName: \"kubernetes.io/projected/0f4ced88-ef0d-40df-ac5f-b1baf062f8e3-kube-api-access-rhhjh\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:12 crc kubenswrapper[4558]: I0120 17:00:12.753328 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0f4ced88-ef0d-40df-ac5f-b1baf062f8e3-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:12 crc kubenswrapper[4558]: I0120 17:00:12.753338 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0f4ced88-ef0d-40df-ac5f-b1baf062f8e3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:13 crc kubenswrapper[4558]: I0120 17:00:13.397772 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"0f4ced88-ef0d-40df-ac5f-b1baf062f8e3","Type":"ContainerDied","Data":"416f49c29a91606a5f452eefde4f71aa0b0af574547e8a4b84a41fc61f5ef6fe"} Jan 20 17:00:13 crc kubenswrapper[4558]: I0120 17:00:13.397826 4558 scope.go:117] "RemoveContainer" containerID="5046214ebbf76cbcda5d6afa9f9f3b15fddfcb6bf26dfc014b0afde9bcf8c549" Jan 20 17:00:13 crc kubenswrapper[4558]: I0120 17:00:13.397832 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:00:13 crc kubenswrapper[4558]: I0120 17:00:13.422682 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:00:13 crc kubenswrapper[4558]: I0120 17:00:13.428687 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:00:13 crc kubenswrapper[4558]: I0120 17:00:13.437875 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:00:13 crc kubenswrapper[4558]: E0120 17:00:13.438213 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="95fbc547-5231-4cbb-ba75-b1766ca8c903" containerName="nova-manage" Jan 20 17:00:13 crc kubenswrapper[4558]: I0120 17:00:13.438230 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="95fbc547-5231-4cbb-ba75-b1766ca8c903" containerName="nova-manage" Jan 20 17:00:13 crc kubenswrapper[4558]: E0120 17:00:13.438253 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0f4ced88-ef0d-40df-ac5f-b1baf062f8e3" containerName="nova-scheduler-scheduler" Jan 20 17:00:13 crc kubenswrapper[4558]: I0120 17:00:13.438259 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="0f4ced88-ef0d-40df-ac5f-b1baf062f8e3" containerName="nova-scheduler-scheduler" Jan 20 17:00:13 crc kubenswrapper[4558]: I0120 17:00:13.438435 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="0f4ced88-ef0d-40df-ac5f-b1baf062f8e3" containerName="nova-scheduler-scheduler" Jan 20 17:00:13 crc kubenswrapper[4558]: I0120 17:00:13.438451 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="95fbc547-5231-4cbb-ba75-b1766ca8c903" containerName="nova-manage" Jan 20 17:00:13 crc kubenswrapper[4558]: I0120 17:00:13.438938 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:00:13 crc kubenswrapper[4558]: I0120 17:00:13.440334 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-scheduler-config-data" Jan 20 17:00:13 crc kubenswrapper[4558]: I0120 17:00:13.448681 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:00:13 crc kubenswrapper[4558]: I0120 17:00:13.564877 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/10c92abf-cd48-4659-8595-ce9610c0fe2e-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"10c92abf-cd48-4659-8595-ce9610c0fe2e\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:00:13 crc kubenswrapper[4558]: I0120 17:00:13.564959 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/10c92abf-cd48-4659-8595-ce9610c0fe2e-config-data\") pod \"nova-scheduler-0\" (UID: \"10c92abf-cd48-4659-8595-ce9610c0fe2e\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:00:13 crc kubenswrapper[4558]: I0120 17:00:13.565007 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ddb5d\" (UniqueName: \"kubernetes.io/projected/10c92abf-cd48-4659-8595-ce9610c0fe2e-kube-api-access-ddb5d\") pod \"nova-scheduler-0\" (UID: \"10c92abf-cd48-4659-8595-ce9610c0fe2e\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:00:13 crc kubenswrapper[4558]: I0120 17:00:13.666353 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ddb5d\" (UniqueName: \"kubernetes.io/projected/10c92abf-cd48-4659-8595-ce9610c0fe2e-kube-api-access-ddb5d\") pod \"nova-scheduler-0\" (UID: \"10c92abf-cd48-4659-8595-ce9610c0fe2e\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:00:13 crc kubenswrapper[4558]: I0120 17:00:13.666873 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/10c92abf-cd48-4659-8595-ce9610c0fe2e-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"10c92abf-cd48-4659-8595-ce9610c0fe2e\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:00:13 crc kubenswrapper[4558]: I0120 17:00:13.666978 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/10c92abf-cd48-4659-8595-ce9610c0fe2e-config-data\") pod \"nova-scheduler-0\" (UID: \"10c92abf-cd48-4659-8595-ce9610c0fe2e\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:00:13 crc kubenswrapper[4558]: I0120 17:00:13.671534 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/10c92abf-cd48-4659-8595-ce9610c0fe2e-config-data\") pod \"nova-scheduler-0\" (UID: \"10c92abf-cd48-4659-8595-ce9610c0fe2e\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:00:13 crc kubenswrapper[4558]: I0120 17:00:13.671755 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/10c92abf-cd48-4659-8595-ce9610c0fe2e-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"10c92abf-cd48-4659-8595-ce9610c0fe2e\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:00:13 crc kubenswrapper[4558]: I0120 17:00:13.680638 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ddb5d\" (UniqueName: \"kubernetes.io/projected/10c92abf-cd48-4659-8595-ce9610c0fe2e-kube-api-access-ddb5d\") pod \"nova-scheduler-0\" (UID: \"10c92abf-cd48-4659-8595-ce9610c0fe2e\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:00:13 crc kubenswrapper[4558]: I0120 17:00:13.753596 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:00:14 crc kubenswrapper[4558]: I0120 17:00:14.152115 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:00:14 crc kubenswrapper[4558]: W0120 17:00:14.152238 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod10c92abf_cd48_4659_8595_ce9610c0fe2e.slice/crio-8e9bddfc70e9709e67f22104987e15e8c3a3f1b421349d32348a0a6dbd582a39 WatchSource:0}: Error finding container 8e9bddfc70e9709e67f22104987e15e8c3a3f1b421349d32348a0a6dbd582a39: Status 404 returned error can't find the container with id 8e9bddfc70e9709e67f22104987e15e8c3a3f1b421349d32348a0a6dbd582a39 Jan 20 17:00:14 crc kubenswrapper[4558]: I0120 17:00:14.409599 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"10c92abf-cd48-4659-8595-ce9610c0fe2e","Type":"ContainerStarted","Data":"6c101cbe30eb5d950364b9539489f846cc6d1f7298614409d0709726ebba0b06"} Jan 20 17:00:14 crc kubenswrapper[4558]: I0120 17:00:14.409655 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"10c92abf-cd48-4659-8595-ce9610c0fe2e","Type":"ContainerStarted","Data":"8e9bddfc70e9709e67f22104987e15e8c3a3f1b421349d32348a0a6dbd582a39"} Jan 20 17:00:14 crc kubenswrapper[4558]: I0120 17:00:14.424512 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-scheduler-0" podStartSLOduration=1.424499266 podStartE2EDuration="1.424499266s" podCreationTimestamp="2026-01-20 17:00:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:00:14.421825598 +0000 UTC m=+1108.182163566" watchObservedRunningTime="2026-01-20 17:00:14.424499266 +0000 UTC m=+1108.184837233" Jan 20 17:00:14 crc kubenswrapper[4558]: I0120 17:00:14.572840 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0f4ced88-ef0d-40df-ac5f-b1baf062f8e3" path="/var/lib/kubelet/pods/0f4ced88-ef0d-40df-ac5f-b1baf062f8e3/volumes" Jan 20 17:00:14 crc kubenswrapper[4558]: I0120 17:00:14.656425 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/nova-metadata-0" podUID="aa2a96c7-ff73-49de-9acc-7e5ce8791029" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.179:8775/\": read tcp 10.217.0.2:53252->10.217.0.179:8775: read: connection reset by peer" Jan 20 17:00:14 crc kubenswrapper[4558]: I0120 17:00:14.656447 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/nova-metadata-0" podUID="aa2a96c7-ff73-49de-9acc-7e5ce8791029" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.179:8775/\": read tcp 10.217.0.2:53250->10.217.0.179:8775: read: connection reset by peer" Jan 20 17:00:15 crc kubenswrapper[4558]: I0120 17:00:15.089078 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:00:15 crc kubenswrapper[4558]: I0120 17:00:15.096072 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:00:15 crc kubenswrapper[4558]: I0120 17:00:15.191412 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6afafa3e-aec8-4d69-beb3-ae8d48c9aad4-logs\") pod \"6afafa3e-aec8-4d69-beb3-ae8d48c9aad4\" (UID: \"6afafa3e-aec8-4d69-beb3-ae8d48c9aad4\") " Jan 20 17:00:15 crc kubenswrapper[4558]: I0120 17:00:15.191461 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6afafa3e-aec8-4d69-beb3-ae8d48c9aad4-public-tls-certs\") pod \"6afafa3e-aec8-4d69-beb3-ae8d48c9aad4\" (UID: \"6afafa3e-aec8-4d69-beb3-ae8d48c9aad4\") " Jan 20 17:00:15 crc kubenswrapper[4558]: I0120 17:00:15.191478 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/aa2a96c7-ff73-49de-9acc-7e5ce8791029-nova-metadata-tls-certs\") pod \"aa2a96c7-ff73-49de-9acc-7e5ce8791029\" (UID: \"aa2a96c7-ff73-49de-9acc-7e5ce8791029\") " Jan 20 17:00:15 crc kubenswrapper[4558]: I0120 17:00:15.191504 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6afafa3e-aec8-4d69-beb3-ae8d48c9aad4-config-data\") pod \"6afafa3e-aec8-4d69-beb3-ae8d48c9aad4\" (UID: \"6afafa3e-aec8-4d69-beb3-ae8d48c9aad4\") " Jan 20 17:00:15 crc kubenswrapper[4558]: I0120 17:00:15.191548 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-878d7\" (UniqueName: \"kubernetes.io/projected/aa2a96c7-ff73-49de-9acc-7e5ce8791029-kube-api-access-878d7\") pod \"aa2a96c7-ff73-49de-9acc-7e5ce8791029\" (UID: \"aa2a96c7-ff73-49de-9acc-7e5ce8791029\") " Jan 20 17:00:15 crc kubenswrapper[4558]: I0120 17:00:15.191586 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6afafa3e-aec8-4d69-beb3-ae8d48c9aad4-internal-tls-certs\") pod \"6afafa3e-aec8-4d69-beb3-ae8d48c9aad4\" (UID: \"6afafa3e-aec8-4d69-beb3-ae8d48c9aad4\") " Jan 20 17:00:15 crc kubenswrapper[4558]: I0120 17:00:15.191611 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6afafa3e-aec8-4d69-beb3-ae8d48c9aad4-combined-ca-bundle\") pod \"6afafa3e-aec8-4d69-beb3-ae8d48c9aad4\" (UID: \"6afafa3e-aec8-4d69-beb3-ae8d48c9aad4\") " Jan 20 17:00:15 crc kubenswrapper[4558]: I0120 17:00:15.191649 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6vq84\" (UniqueName: \"kubernetes.io/projected/6afafa3e-aec8-4d69-beb3-ae8d48c9aad4-kube-api-access-6vq84\") pod \"6afafa3e-aec8-4d69-beb3-ae8d48c9aad4\" (UID: \"6afafa3e-aec8-4d69-beb3-ae8d48c9aad4\") " Jan 20 17:00:15 crc kubenswrapper[4558]: I0120 17:00:15.191731 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/aa2a96c7-ff73-49de-9acc-7e5ce8791029-logs\") pod \"aa2a96c7-ff73-49de-9acc-7e5ce8791029\" (UID: \"aa2a96c7-ff73-49de-9acc-7e5ce8791029\") " Jan 20 17:00:15 crc kubenswrapper[4558]: I0120 17:00:15.191756 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa2a96c7-ff73-49de-9acc-7e5ce8791029-config-data\") pod \"aa2a96c7-ff73-49de-9acc-7e5ce8791029\" (UID: \"aa2a96c7-ff73-49de-9acc-7e5ce8791029\") " Jan 20 17:00:15 crc kubenswrapper[4558]: I0120 17:00:15.191781 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa2a96c7-ff73-49de-9acc-7e5ce8791029-combined-ca-bundle\") pod \"aa2a96c7-ff73-49de-9acc-7e5ce8791029\" (UID: \"aa2a96c7-ff73-49de-9acc-7e5ce8791029\") " Jan 20 17:00:15 crc kubenswrapper[4558]: I0120 17:00:15.191803 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6afafa3e-aec8-4d69-beb3-ae8d48c9aad4-logs" (OuterVolumeSpecName: "logs") pod "6afafa3e-aec8-4d69-beb3-ae8d48c9aad4" (UID: "6afafa3e-aec8-4d69-beb3-ae8d48c9aad4"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:00:15 crc kubenswrapper[4558]: I0120 17:00:15.192079 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/aa2a96c7-ff73-49de-9acc-7e5ce8791029-logs" (OuterVolumeSpecName: "logs") pod "aa2a96c7-ff73-49de-9acc-7e5ce8791029" (UID: "aa2a96c7-ff73-49de-9acc-7e5ce8791029"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:00:15 crc kubenswrapper[4558]: I0120 17:00:15.192498 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6afafa3e-aec8-4d69-beb3-ae8d48c9aad4-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:15 crc kubenswrapper[4558]: I0120 17:00:15.192512 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/aa2a96c7-ff73-49de-9acc-7e5ce8791029-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:15 crc kubenswrapper[4558]: I0120 17:00:15.196494 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aa2a96c7-ff73-49de-9acc-7e5ce8791029-kube-api-access-878d7" (OuterVolumeSpecName: "kube-api-access-878d7") pod "aa2a96c7-ff73-49de-9acc-7e5ce8791029" (UID: "aa2a96c7-ff73-49de-9acc-7e5ce8791029"). InnerVolumeSpecName "kube-api-access-878d7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:00:15 crc kubenswrapper[4558]: I0120 17:00:15.196565 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6afafa3e-aec8-4d69-beb3-ae8d48c9aad4-kube-api-access-6vq84" (OuterVolumeSpecName: "kube-api-access-6vq84") pod "6afafa3e-aec8-4d69-beb3-ae8d48c9aad4" (UID: "6afafa3e-aec8-4d69-beb3-ae8d48c9aad4"). InnerVolumeSpecName "kube-api-access-6vq84". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:00:15 crc kubenswrapper[4558]: I0120 17:00:15.217090 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6afafa3e-aec8-4d69-beb3-ae8d48c9aad4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6afafa3e-aec8-4d69-beb3-ae8d48c9aad4" (UID: "6afafa3e-aec8-4d69-beb3-ae8d48c9aad4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:00:15 crc kubenswrapper[4558]: I0120 17:00:15.217796 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aa2a96c7-ff73-49de-9acc-7e5ce8791029-config-data" (OuterVolumeSpecName: "config-data") pod "aa2a96c7-ff73-49de-9acc-7e5ce8791029" (UID: "aa2a96c7-ff73-49de-9acc-7e5ce8791029"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:00:15 crc kubenswrapper[4558]: I0120 17:00:15.219216 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aa2a96c7-ff73-49de-9acc-7e5ce8791029-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "aa2a96c7-ff73-49de-9acc-7e5ce8791029" (UID: "aa2a96c7-ff73-49de-9acc-7e5ce8791029"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:00:15 crc kubenswrapper[4558]: I0120 17:00:15.219540 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6afafa3e-aec8-4d69-beb3-ae8d48c9aad4-config-data" (OuterVolumeSpecName: "config-data") pod "6afafa3e-aec8-4d69-beb3-ae8d48c9aad4" (UID: "6afafa3e-aec8-4d69-beb3-ae8d48c9aad4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:00:15 crc kubenswrapper[4558]: I0120 17:00:15.231889 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6afafa3e-aec8-4d69-beb3-ae8d48c9aad4-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "6afafa3e-aec8-4d69-beb3-ae8d48c9aad4" (UID: "6afafa3e-aec8-4d69-beb3-ae8d48c9aad4"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:00:15 crc kubenswrapper[4558]: I0120 17:00:15.236774 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aa2a96c7-ff73-49de-9acc-7e5ce8791029-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "aa2a96c7-ff73-49de-9acc-7e5ce8791029" (UID: "aa2a96c7-ff73-49de-9acc-7e5ce8791029"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:00:15 crc kubenswrapper[4558]: I0120 17:00:15.237069 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6afafa3e-aec8-4d69-beb3-ae8d48c9aad4-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "6afafa3e-aec8-4d69-beb3-ae8d48c9aad4" (UID: "6afafa3e-aec8-4d69-beb3-ae8d48c9aad4"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:00:15 crc kubenswrapper[4558]: I0120 17:00:15.294157 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa2a96c7-ff73-49de-9acc-7e5ce8791029-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:15 crc kubenswrapper[4558]: I0120 17:00:15.294194 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa2a96c7-ff73-49de-9acc-7e5ce8791029-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:15 crc kubenswrapper[4558]: I0120 17:00:15.294216 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6afafa3e-aec8-4d69-beb3-ae8d48c9aad4-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:15 crc kubenswrapper[4558]: I0120 17:00:15.294224 4558 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/aa2a96c7-ff73-49de-9acc-7e5ce8791029-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:15 crc kubenswrapper[4558]: I0120 17:00:15.294236 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6afafa3e-aec8-4d69-beb3-ae8d48c9aad4-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:15 crc kubenswrapper[4558]: I0120 17:00:15.294243 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-878d7\" (UniqueName: \"kubernetes.io/projected/aa2a96c7-ff73-49de-9acc-7e5ce8791029-kube-api-access-878d7\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:15 crc kubenswrapper[4558]: I0120 17:00:15.294250 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6afafa3e-aec8-4d69-beb3-ae8d48c9aad4-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:15 crc kubenswrapper[4558]: I0120 17:00:15.294258 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6afafa3e-aec8-4d69-beb3-ae8d48c9aad4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:15 crc kubenswrapper[4558]: I0120 17:00:15.294265 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6vq84\" (UniqueName: \"kubernetes.io/projected/6afafa3e-aec8-4d69-beb3-ae8d48c9aad4-kube-api-access-6vq84\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:15 crc kubenswrapper[4558]: I0120 17:00:15.419185 4558 generic.go:334] "Generic (PLEG): container finished" podID="aa2a96c7-ff73-49de-9acc-7e5ce8791029" containerID="10ccbac057411e8b97d567d8a1fee98ca6979c330f6d03f60ae03427a6c09e0c" exitCode=0 Jan 20 17:00:15 crc kubenswrapper[4558]: I0120 17:00:15.419248 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"aa2a96c7-ff73-49de-9acc-7e5ce8791029","Type":"ContainerDied","Data":"10ccbac057411e8b97d567d8a1fee98ca6979c330f6d03f60ae03427a6c09e0c"} Jan 20 17:00:15 crc kubenswrapper[4558]: I0120 17:00:15.419600 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"aa2a96c7-ff73-49de-9acc-7e5ce8791029","Type":"ContainerDied","Data":"33b869d23d26107cec428871ae63123901c7d870de544d3bf1e9505323ae6a2c"} Jan 20 17:00:15 crc kubenswrapper[4558]: I0120 17:00:15.419654 4558 scope.go:117] "RemoveContainer" containerID="10ccbac057411e8b97d567d8a1fee98ca6979c330f6d03f60ae03427a6c09e0c" Jan 20 17:00:15 crc kubenswrapper[4558]: I0120 17:00:15.420032 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:00:15 crc kubenswrapper[4558]: I0120 17:00:15.421238 4558 generic.go:334] "Generic (PLEG): container finished" podID="6afafa3e-aec8-4d69-beb3-ae8d48c9aad4" containerID="969678606d229a086d091f113b7fe767d47a4983b2586dab05a3ea147403e783" exitCode=0 Jan 20 17:00:15 crc kubenswrapper[4558]: I0120 17:00:15.423613 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"6afafa3e-aec8-4d69-beb3-ae8d48c9aad4","Type":"ContainerDied","Data":"969678606d229a086d091f113b7fe767d47a4983b2586dab05a3ea147403e783"} Jan 20 17:00:15 crc kubenswrapper[4558]: I0120 17:00:15.423665 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"6afafa3e-aec8-4d69-beb3-ae8d48c9aad4","Type":"ContainerDied","Data":"ac757e22cf3c03cbd152200bf8fef455b224b185615ba94bbde8f9ace3b88cbf"} Jan 20 17:00:15 crc kubenswrapper[4558]: I0120 17:00:15.424091 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:00:15 crc kubenswrapper[4558]: I0120 17:00:15.455077 4558 scope.go:117] "RemoveContainer" containerID="e00279eefaa9c994ac81a4a5f4771f651b81ab3c8953e4cb17bc67d888e02db4" Jan 20 17:00:15 crc kubenswrapper[4558]: I0120 17:00:15.459982 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:00:15 crc kubenswrapper[4558]: I0120 17:00:15.472456 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:00:15 crc kubenswrapper[4558]: I0120 17:00:15.480926 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:00:15 crc kubenswrapper[4558]: E0120 17:00:15.481349 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aa2a96c7-ff73-49de-9acc-7e5ce8791029" containerName="nova-metadata-metadata" Jan 20 17:00:15 crc kubenswrapper[4558]: I0120 17:00:15.481369 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="aa2a96c7-ff73-49de-9acc-7e5ce8791029" containerName="nova-metadata-metadata" Jan 20 17:00:15 crc kubenswrapper[4558]: E0120 17:00:15.481379 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6afafa3e-aec8-4d69-beb3-ae8d48c9aad4" containerName="nova-api-api" Jan 20 17:00:15 crc kubenswrapper[4558]: I0120 17:00:15.481384 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="6afafa3e-aec8-4d69-beb3-ae8d48c9aad4" containerName="nova-api-api" Jan 20 17:00:15 crc kubenswrapper[4558]: E0120 17:00:15.481396 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6afafa3e-aec8-4d69-beb3-ae8d48c9aad4" containerName="nova-api-log" Jan 20 17:00:15 crc kubenswrapper[4558]: I0120 17:00:15.481401 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="6afafa3e-aec8-4d69-beb3-ae8d48c9aad4" containerName="nova-api-log" Jan 20 17:00:15 crc kubenswrapper[4558]: E0120 17:00:15.481436 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aa2a96c7-ff73-49de-9acc-7e5ce8791029" containerName="nova-metadata-log" Jan 20 17:00:15 crc kubenswrapper[4558]: I0120 17:00:15.481441 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="aa2a96c7-ff73-49de-9acc-7e5ce8791029" containerName="nova-metadata-log" Jan 20 17:00:15 crc kubenswrapper[4558]: I0120 17:00:15.481581 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="aa2a96c7-ff73-49de-9acc-7e5ce8791029" containerName="nova-metadata-metadata" Jan 20 17:00:15 crc kubenswrapper[4558]: I0120 17:00:15.481591 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="6afafa3e-aec8-4d69-beb3-ae8d48c9aad4" containerName="nova-api-log" Jan 20 17:00:15 crc kubenswrapper[4558]: I0120 17:00:15.481607 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="6afafa3e-aec8-4d69-beb3-ae8d48c9aad4" containerName="nova-api-api" Jan 20 17:00:15 crc kubenswrapper[4558]: I0120 17:00:15.481618 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="aa2a96c7-ff73-49de-9acc-7e5ce8791029" containerName="nova-metadata-log" Jan 20 17:00:15 crc kubenswrapper[4558]: I0120 17:00:15.482468 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:00:15 crc kubenswrapper[4558]: I0120 17:00:15.486989 4558 scope.go:117] "RemoveContainer" containerID="10ccbac057411e8b97d567d8a1fee98ca6979c330f6d03f60ae03427a6c09e0c" Jan 20 17:00:15 crc kubenswrapper[4558]: E0120 17:00:15.487366 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"10ccbac057411e8b97d567d8a1fee98ca6979c330f6d03f60ae03427a6c09e0c\": container with ID starting with 10ccbac057411e8b97d567d8a1fee98ca6979c330f6d03f60ae03427a6c09e0c not found: ID does not exist" containerID="10ccbac057411e8b97d567d8a1fee98ca6979c330f6d03f60ae03427a6c09e0c" Jan 20 17:00:15 crc kubenswrapper[4558]: I0120 17:00:15.487436 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"10ccbac057411e8b97d567d8a1fee98ca6979c330f6d03f60ae03427a6c09e0c"} err="failed to get container status \"10ccbac057411e8b97d567d8a1fee98ca6979c330f6d03f60ae03427a6c09e0c\": rpc error: code = NotFound desc = could not find container \"10ccbac057411e8b97d567d8a1fee98ca6979c330f6d03f60ae03427a6c09e0c\": container with ID starting with 10ccbac057411e8b97d567d8a1fee98ca6979c330f6d03f60ae03427a6c09e0c not found: ID does not exist" Jan 20 17:00:15 crc kubenswrapper[4558]: I0120 17:00:15.487460 4558 scope.go:117] "RemoveContainer" containerID="e00279eefaa9c994ac81a4a5f4771f651b81ab3c8953e4cb17bc67d888e02db4" Jan 20 17:00:15 crc kubenswrapper[4558]: E0120 17:00:15.487824 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e00279eefaa9c994ac81a4a5f4771f651b81ab3c8953e4cb17bc67d888e02db4\": container with ID starting with e00279eefaa9c994ac81a4a5f4771f651b81ab3c8953e4cb17bc67d888e02db4 not found: ID does not exist" containerID="e00279eefaa9c994ac81a4a5f4771f651b81ab3c8953e4cb17bc67d888e02db4" Jan 20 17:00:15 crc kubenswrapper[4558]: I0120 17:00:15.487849 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e00279eefaa9c994ac81a4a5f4771f651b81ab3c8953e4cb17bc67d888e02db4"} err="failed to get container status \"e00279eefaa9c994ac81a4a5f4771f651b81ab3c8953e4cb17bc67d888e02db4\": rpc error: code = NotFound desc = could not find container \"e00279eefaa9c994ac81a4a5f4771f651b81ab3c8953e4cb17bc67d888e02db4\": container with ID starting with e00279eefaa9c994ac81a4a5f4771f651b81ab3c8953e4cb17bc67d888e02db4 not found: ID does not exist" Jan 20 17:00:15 crc kubenswrapper[4558]: I0120 17:00:15.487868 4558 scope.go:117] "RemoveContainer" containerID="969678606d229a086d091f113b7fe767d47a4983b2586dab05a3ea147403e783" Jan 20 17:00:15 crc kubenswrapper[4558]: I0120 17:00:15.489006 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-api-config-data" Jan 20 17:00:15 crc kubenswrapper[4558]: I0120 17:00:15.489133 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-public-svc" Jan 20 17:00:15 crc kubenswrapper[4558]: I0120 17:00:15.489300 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-internal-svc" Jan 20 17:00:15 crc kubenswrapper[4558]: I0120 17:00:15.491914 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:00:15 crc kubenswrapper[4558]: I0120 17:00:15.507754 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:00:15 crc kubenswrapper[4558]: I0120 17:00:15.514005 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:00:15 crc kubenswrapper[4558]: I0120 17:00:15.518866 4558 scope.go:117] "RemoveContainer" containerID="5d4e4d796f3e9f903c11a264ac4eca8c07e322e42169b6cd650abccef3389baa" Jan 20 17:00:15 crc kubenswrapper[4558]: I0120 17:00:15.523397 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:00:15 crc kubenswrapper[4558]: I0120 17:00:15.524766 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:00:15 crc kubenswrapper[4558]: I0120 17:00:15.526756 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-metadata-internal-svc" Jan 20 17:00:15 crc kubenswrapper[4558]: I0120 17:00:15.526760 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-metadata-config-data" Jan 20 17:00:15 crc kubenswrapper[4558]: I0120 17:00:15.533280 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:00:15 crc kubenswrapper[4558]: I0120 17:00:15.539509 4558 scope.go:117] "RemoveContainer" containerID="969678606d229a086d091f113b7fe767d47a4983b2586dab05a3ea147403e783" Jan 20 17:00:15 crc kubenswrapper[4558]: E0120 17:00:15.539865 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"969678606d229a086d091f113b7fe767d47a4983b2586dab05a3ea147403e783\": container with ID starting with 969678606d229a086d091f113b7fe767d47a4983b2586dab05a3ea147403e783 not found: ID does not exist" containerID="969678606d229a086d091f113b7fe767d47a4983b2586dab05a3ea147403e783" Jan 20 17:00:15 crc kubenswrapper[4558]: I0120 17:00:15.539911 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"969678606d229a086d091f113b7fe767d47a4983b2586dab05a3ea147403e783"} err="failed to get container status \"969678606d229a086d091f113b7fe767d47a4983b2586dab05a3ea147403e783\": rpc error: code = NotFound desc = could not find container \"969678606d229a086d091f113b7fe767d47a4983b2586dab05a3ea147403e783\": container with ID starting with 969678606d229a086d091f113b7fe767d47a4983b2586dab05a3ea147403e783 not found: ID does not exist" Jan 20 17:00:15 crc kubenswrapper[4558]: I0120 17:00:15.539934 4558 scope.go:117] "RemoveContainer" containerID="5d4e4d796f3e9f903c11a264ac4eca8c07e322e42169b6cd650abccef3389baa" Jan 20 17:00:15 crc kubenswrapper[4558]: E0120 17:00:15.540233 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5d4e4d796f3e9f903c11a264ac4eca8c07e322e42169b6cd650abccef3389baa\": container with ID starting with 5d4e4d796f3e9f903c11a264ac4eca8c07e322e42169b6cd650abccef3389baa not found: ID does not exist" containerID="5d4e4d796f3e9f903c11a264ac4eca8c07e322e42169b6cd650abccef3389baa" Jan 20 17:00:15 crc kubenswrapper[4558]: I0120 17:00:15.540271 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5d4e4d796f3e9f903c11a264ac4eca8c07e322e42169b6cd650abccef3389baa"} err="failed to get container status \"5d4e4d796f3e9f903c11a264ac4eca8c07e322e42169b6cd650abccef3389baa\": rpc error: code = NotFound desc = could not find container \"5d4e4d796f3e9f903c11a264ac4eca8c07e322e42169b6cd650abccef3389baa\": container with ID starting with 5d4e4d796f3e9f903c11a264ac4eca8c07e322e42169b6cd650abccef3389baa not found: ID does not exist" Jan 20 17:00:15 crc kubenswrapper[4558]: I0120 17:00:15.602800 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/e554db35-2dba-4138-9ca0-bd1371a9c63d-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"e554db35-2dba-4138-9ca0-bd1371a9c63d\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:00:15 crc kubenswrapper[4558]: I0120 17:00:15.602871 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/cae28711-fbe7-40cc-8f31-4b6332ab5378-public-tls-certs\") pod \"nova-api-0\" (UID: \"cae28711-fbe7-40cc-8f31-4b6332ab5378\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:00:15 crc kubenswrapper[4558]: I0120 17:00:15.602915 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e554db35-2dba-4138-9ca0-bd1371a9c63d-logs\") pod \"nova-metadata-0\" (UID: \"e554db35-2dba-4138-9ca0-bd1371a9c63d\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:00:15 crc kubenswrapper[4558]: I0120 17:00:15.602983 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cae28711-fbe7-40cc-8f31-4b6332ab5378-internal-tls-certs\") pod \"nova-api-0\" (UID: \"cae28711-fbe7-40cc-8f31-4b6332ab5378\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:00:15 crc kubenswrapper[4558]: I0120 17:00:15.603007 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cae28711-fbe7-40cc-8f31-4b6332ab5378-logs\") pod \"nova-api-0\" (UID: \"cae28711-fbe7-40cc-8f31-4b6332ab5378\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:00:15 crc kubenswrapper[4558]: I0120 17:00:15.603033 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wxx67\" (UniqueName: \"kubernetes.io/projected/e554db35-2dba-4138-9ca0-bd1371a9c63d-kube-api-access-wxx67\") pod \"nova-metadata-0\" (UID: \"e554db35-2dba-4138-9ca0-bd1371a9c63d\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:00:15 crc kubenswrapper[4558]: I0120 17:00:15.603047 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e554db35-2dba-4138-9ca0-bd1371a9c63d-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"e554db35-2dba-4138-9ca0-bd1371a9c63d\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:00:15 crc kubenswrapper[4558]: I0120 17:00:15.603107 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cae28711-fbe7-40cc-8f31-4b6332ab5378-config-data\") pod \"nova-api-0\" (UID: \"cae28711-fbe7-40cc-8f31-4b6332ab5378\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:00:15 crc kubenswrapper[4558]: I0120 17:00:15.603143 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e554db35-2dba-4138-9ca0-bd1371a9c63d-config-data\") pod \"nova-metadata-0\" (UID: \"e554db35-2dba-4138-9ca0-bd1371a9c63d\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:00:15 crc kubenswrapper[4558]: I0120 17:00:15.603176 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cae28711-fbe7-40cc-8f31-4b6332ab5378-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"cae28711-fbe7-40cc-8f31-4b6332ab5378\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:00:15 crc kubenswrapper[4558]: I0120 17:00:15.603204 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z8rg2\" (UniqueName: \"kubernetes.io/projected/cae28711-fbe7-40cc-8f31-4b6332ab5378-kube-api-access-z8rg2\") pod \"nova-api-0\" (UID: \"cae28711-fbe7-40cc-8f31-4b6332ab5378\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:00:15 crc kubenswrapper[4558]: I0120 17:00:15.704851 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wxx67\" (UniqueName: \"kubernetes.io/projected/e554db35-2dba-4138-9ca0-bd1371a9c63d-kube-api-access-wxx67\") pod \"nova-metadata-0\" (UID: \"e554db35-2dba-4138-9ca0-bd1371a9c63d\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:00:15 crc kubenswrapper[4558]: I0120 17:00:15.704886 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e554db35-2dba-4138-9ca0-bd1371a9c63d-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"e554db35-2dba-4138-9ca0-bd1371a9c63d\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:00:15 crc kubenswrapper[4558]: I0120 17:00:15.704907 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cae28711-fbe7-40cc-8f31-4b6332ab5378-config-data\") pod \"nova-api-0\" (UID: \"cae28711-fbe7-40cc-8f31-4b6332ab5378\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:00:15 crc kubenswrapper[4558]: I0120 17:00:15.704939 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e554db35-2dba-4138-9ca0-bd1371a9c63d-config-data\") pod \"nova-metadata-0\" (UID: \"e554db35-2dba-4138-9ca0-bd1371a9c63d\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:00:15 crc kubenswrapper[4558]: I0120 17:00:15.704960 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cae28711-fbe7-40cc-8f31-4b6332ab5378-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"cae28711-fbe7-40cc-8f31-4b6332ab5378\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:00:15 crc kubenswrapper[4558]: I0120 17:00:15.704975 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z8rg2\" (UniqueName: \"kubernetes.io/projected/cae28711-fbe7-40cc-8f31-4b6332ab5378-kube-api-access-z8rg2\") pod \"nova-api-0\" (UID: \"cae28711-fbe7-40cc-8f31-4b6332ab5378\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:00:15 crc kubenswrapper[4558]: I0120 17:00:15.705003 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/e554db35-2dba-4138-9ca0-bd1371a9c63d-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"e554db35-2dba-4138-9ca0-bd1371a9c63d\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:00:15 crc kubenswrapper[4558]: I0120 17:00:15.705044 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/cae28711-fbe7-40cc-8f31-4b6332ab5378-public-tls-certs\") pod \"nova-api-0\" (UID: \"cae28711-fbe7-40cc-8f31-4b6332ab5378\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:00:15 crc kubenswrapper[4558]: I0120 17:00:15.705073 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e554db35-2dba-4138-9ca0-bd1371a9c63d-logs\") pod \"nova-metadata-0\" (UID: \"e554db35-2dba-4138-9ca0-bd1371a9c63d\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:00:15 crc kubenswrapper[4558]: I0120 17:00:15.705158 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cae28711-fbe7-40cc-8f31-4b6332ab5378-internal-tls-certs\") pod \"nova-api-0\" (UID: \"cae28711-fbe7-40cc-8f31-4b6332ab5378\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:00:15 crc kubenswrapper[4558]: I0120 17:00:15.705207 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cae28711-fbe7-40cc-8f31-4b6332ab5378-logs\") pod \"nova-api-0\" (UID: \"cae28711-fbe7-40cc-8f31-4b6332ab5378\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:00:15 crc kubenswrapper[4558]: I0120 17:00:15.705508 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cae28711-fbe7-40cc-8f31-4b6332ab5378-logs\") pod \"nova-api-0\" (UID: \"cae28711-fbe7-40cc-8f31-4b6332ab5378\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:00:15 crc kubenswrapper[4558]: I0120 17:00:15.706343 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e554db35-2dba-4138-9ca0-bd1371a9c63d-logs\") pod \"nova-metadata-0\" (UID: \"e554db35-2dba-4138-9ca0-bd1371a9c63d\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:00:15 crc kubenswrapper[4558]: I0120 17:00:15.708439 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/cae28711-fbe7-40cc-8f31-4b6332ab5378-public-tls-certs\") pod \"nova-api-0\" (UID: \"cae28711-fbe7-40cc-8f31-4b6332ab5378\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:00:15 crc kubenswrapper[4558]: I0120 17:00:15.708519 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cae28711-fbe7-40cc-8f31-4b6332ab5378-internal-tls-certs\") pod \"nova-api-0\" (UID: \"cae28711-fbe7-40cc-8f31-4b6332ab5378\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:00:15 crc kubenswrapper[4558]: I0120 17:00:15.708856 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/e554db35-2dba-4138-9ca0-bd1371a9c63d-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"e554db35-2dba-4138-9ca0-bd1371a9c63d\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:00:15 crc kubenswrapper[4558]: I0120 17:00:15.709508 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cae28711-fbe7-40cc-8f31-4b6332ab5378-config-data\") pod \"nova-api-0\" (UID: \"cae28711-fbe7-40cc-8f31-4b6332ab5378\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:00:15 crc kubenswrapper[4558]: I0120 17:00:15.709629 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e554db35-2dba-4138-9ca0-bd1371a9c63d-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"e554db35-2dba-4138-9ca0-bd1371a9c63d\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:00:15 crc kubenswrapper[4558]: I0120 17:00:15.709783 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cae28711-fbe7-40cc-8f31-4b6332ab5378-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"cae28711-fbe7-40cc-8f31-4b6332ab5378\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:00:15 crc kubenswrapper[4558]: I0120 17:00:15.709863 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e554db35-2dba-4138-9ca0-bd1371a9c63d-config-data\") pod \"nova-metadata-0\" (UID: \"e554db35-2dba-4138-9ca0-bd1371a9c63d\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:00:15 crc kubenswrapper[4558]: I0120 17:00:15.726705 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z8rg2\" (UniqueName: \"kubernetes.io/projected/cae28711-fbe7-40cc-8f31-4b6332ab5378-kube-api-access-z8rg2\") pod \"nova-api-0\" (UID: \"cae28711-fbe7-40cc-8f31-4b6332ab5378\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:00:15 crc kubenswrapper[4558]: I0120 17:00:15.727035 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wxx67\" (UniqueName: \"kubernetes.io/projected/e554db35-2dba-4138-9ca0-bd1371a9c63d-kube-api-access-wxx67\") pod \"nova-metadata-0\" (UID: \"e554db35-2dba-4138-9ca0-bd1371a9c63d\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:00:15 crc kubenswrapper[4558]: I0120 17:00:15.827067 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:00:15 crc kubenswrapper[4558]: I0120 17:00:15.840190 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:00:16 crc kubenswrapper[4558]: I0120 17:00:16.230297 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:00:16 crc kubenswrapper[4558]: W0120 17:00:16.231698 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcae28711_fbe7_40cc_8f31_4b6332ab5378.slice/crio-e3192ae96299f5cf335275f46b8831871adafba93bf06e759065461f515313f9 WatchSource:0}: Error finding container e3192ae96299f5cf335275f46b8831871adafba93bf06e759065461f515313f9: Status 404 returned error can't find the container with id e3192ae96299f5cf335275f46b8831871adafba93bf06e759065461f515313f9 Jan 20 17:00:16 crc kubenswrapper[4558]: I0120 17:00:16.296121 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:00:16 crc kubenswrapper[4558]: W0120 17:00:16.313466 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode554db35_2dba_4138_9ca0_bd1371a9c63d.slice/crio-3351f20bfe9120a3bb72867d82de9088a995b7400fcc95668e83d9d78c871d67 WatchSource:0}: Error finding container 3351f20bfe9120a3bb72867d82de9088a995b7400fcc95668e83d9d78c871d67: Status 404 returned error can't find the container with id 3351f20bfe9120a3bb72867d82de9088a995b7400fcc95668e83d9d78c871d67 Jan 20 17:00:16 crc kubenswrapper[4558]: I0120 17:00:16.429832 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"cae28711-fbe7-40cc-8f31-4b6332ab5378","Type":"ContainerStarted","Data":"7f9dbce9ba44238985c3e3d3fb5a42a8258f76871b2f32a165f9c1ead22275de"} Jan 20 17:00:16 crc kubenswrapper[4558]: I0120 17:00:16.429869 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"cae28711-fbe7-40cc-8f31-4b6332ab5378","Type":"ContainerStarted","Data":"e3192ae96299f5cf335275f46b8831871adafba93bf06e759065461f515313f9"} Jan 20 17:00:16 crc kubenswrapper[4558]: I0120 17:00:16.436045 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"e554db35-2dba-4138-9ca0-bd1371a9c63d","Type":"ContainerStarted","Data":"60c80db7b21795bae641e9692b2deb97128b55cb001a272936bfaf668d4bfcfb"} Jan 20 17:00:16 crc kubenswrapper[4558]: I0120 17:00:16.436071 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"e554db35-2dba-4138-9ca0-bd1371a9c63d","Type":"ContainerStarted","Data":"3351f20bfe9120a3bb72867d82de9088a995b7400fcc95668e83d9d78c871d67"} Jan 20 17:00:16 crc kubenswrapper[4558]: I0120 17:00:16.576027 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6afafa3e-aec8-4d69-beb3-ae8d48c9aad4" path="/var/lib/kubelet/pods/6afafa3e-aec8-4d69-beb3-ae8d48c9aad4/volumes" Jan 20 17:00:16 crc kubenswrapper[4558]: I0120 17:00:16.576701 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aa2a96c7-ff73-49de-9acc-7e5ce8791029" path="/var/lib/kubelet/pods/aa2a96c7-ff73-49de-9acc-7e5ce8791029/volumes" Jan 20 17:00:17 crc kubenswrapper[4558]: I0120 17:00:17.445260 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"cae28711-fbe7-40cc-8f31-4b6332ab5378","Type":"ContainerStarted","Data":"27c4f1ab95cdf957befc95b02a160445223047218baee3603004129258c0b5fa"} Jan 20 17:00:17 crc kubenswrapper[4558]: I0120 17:00:17.446932 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"e554db35-2dba-4138-9ca0-bd1371a9c63d","Type":"ContainerStarted","Data":"62565ba736515d1bd9fcfd983ba37f6e8370144dc694ce16eecef9e3a396d592"} Jan 20 17:00:17 crc kubenswrapper[4558]: I0120 17:00:17.457858 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-api-0" podStartSLOduration=2.457844761 podStartE2EDuration="2.457844761s" podCreationTimestamp="2026-01-20 17:00:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:00:17.457667708 +0000 UTC m=+1111.218005676" watchObservedRunningTime="2026-01-20 17:00:17.457844761 +0000 UTC m=+1111.218182729" Jan 20 17:00:17 crc kubenswrapper[4558]: I0120 17:00:17.476548 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-metadata-0" podStartSLOduration=2.476533105 podStartE2EDuration="2.476533105s" podCreationTimestamp="2026-01-20 17:00:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:00:17.470591091 +0000 UTC m=+1111.230929058" watchObservedRunningTime="2026-01-20 17:00:17.476533105 +0000 UTC m=+1111.236871072" Jan 20 17:00:18 crc kubenswrapper[4558]: I0120 17:00:18.753775 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:00:20 crc kubenswrapper[4558]: I0120 17:00:20.840453 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:00:20 crc kubenswrapper[4558]: I0120 17:00:20.840663 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:00:23 crc kubenswrapper[4558]: I0120 17:00:23.754683 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:00:23 crc kubenswrapper[4558]: I0120 17:00:23.773645 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:00:24 crc kubenswrapper[4558]: I0120 17:00:24.509726 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:00:25 crc kubenswrapper[4558]: I0120 17:00:25.827550 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:00:25 crc kubenswrapper[4558]: I0120 17:00:25.827769 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:00:25 crc kubenswrapper[4558]: I0120 17:00:25.841193 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:00:25 crc kubenswrapper[4558]: I0120 17:00:25.841270 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:00:26 crc kubenswrapper[4558]: I0120 17:00:26.839281 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-api-0" podUID="cae28711-fbe7-40cc-8f31-4b6332ab5378" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.188:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 20 17:00:26 crc kubenswrapper[4558]: I0120 17:00:26.839309 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-api-0" podUID="cae28711-fbe7-40cc-8f31-4b6332ab5378" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.188:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 20 17:00:26 crc kubenswrapper[4558]: I0120 17:00:26.851278 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-metadata-0" podUID="e554db35-2dba-4138-9ca0-bd1371a9c63d" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.189:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 20 17:00:26 crc kubenswrapper[4558]: I0120 17:00:26.851289 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-metadata-0" podUID="e554db35-2dba-4138-9ca0-bd1371a9c63d" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.189:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 20 17:00:27 crc kubenswrapper[4558]: I0120 17:00:27.330240 4558 patch_prober.go:28] interesting pod/machine-config-daemon-2vr4r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 20 17:00:27 crc kubenswrapper[4558]: I0120 17:00:27.330280 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 20 17:00:27 crc kubenswrapper[4558]: I0120 17:00:27.330310 4558 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" Jan 20 17:00:27 crc kubenswrapper[4558]: I0120 17:00:27.330817 4558 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"ed4e09803bdddac3e0ae0eefbe36e242b8c6be19d37cc1c52ac24044a0f94446"} pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 20 17:00:27 crc kubenswrapper[4558]: I0120 17:00:27.330863 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" containerID="cri-o://ed4e09803bdddac3e0ae0eefbe36e242b8c6be19d37cc1c52ac24044a0f94446" gracePeriod=600 Jan 20 17:00:27 crc kubenswrapper[4558]: I0120 17:00:27.509583 4558 generic.go:334] "Generic (PLEG): container finished" podID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerID="ed4e09803bdddac3e0ae0eefbe36e242b8c6be19d37cc1c52ac24044a0f94446" exitCode=0 Jan 20 17:00:27 crc kubenswrapper[4558]: I0120 17:00:27.509733 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" event={"ID":"68337d27-3fa6-4a29-88b0-82e60c3739eb","Type":"ContainerDied","Data":"ed4e09803bdddac3e0ae0eefbe36e242b8c6be19d37cc1c52ac24044a0f94446"} Jan 20 17:00:27 crc kubenswrapper[4558]: I0120 17:00:27.509761 4558 scope.go:117] "RemoveContainer" containerID="caa2a6c1a9115d7646c6f8d1705231c6b0c61a1f8491c5197970cd8a3fb6bbc2" Jan 20 17:00:28 crc kubenswrapper[4558]: I0120 17:00:28.518172 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" event={"ID":"68337d27-3fa6-4a29-88b0-82e60c3739eb","Type":"ContainerStarted","Data":"27436b5757afe01efb5672d1056f7069457346353ae2e6eed76a9879c2ed4ed6"} Jan 20 17:00:30 crc kubenswrapper[4558]: I0120 17:00:30.617013 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:00:35 crc kubenswrapper[4558]: I0120 17:00:35.833567 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:00:35 crc kubenswrapper[4558]: I0120 17:00:35.834458 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:00:35 crc kubenswrapper[4558]: I0120 17:00:35.836375 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:00:35 crc kubenswrapper[4558]: I0120 17:00:35.839694 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:00:35 crc kubenswrapper[4558]: I0120 17:00:35.845061 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:00:35 crc kubenswrapper[4558]: I0120 17:00:35.845371 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:00:35 crc kubenswrapper[4558]: I0120 17:00:35.851083 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:00:36 crc kubenswrapper[4558]: I0120 17:00:36.572661 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:00:36 crc kubenswrapper[4558]: I0120 17:00:36.584944 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:00:36 crc kubenswrapper[4558]: I0120 17:00:36.585457 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:00:40 crc kubenswrapper[4558]: I0120 17:00:40.580280 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-nb-0"] Jan 20 17:00:40 crc kubenswrapper[4558]: I0120 17:00:40.581263 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ovsdbserver-nb-0" podUID="8805bbf2-dd7b-41db-89ee-8b1d3053bf02" containerName="openstack-network-exporter" containerID="cri-o://110cfb1c28534cceb930fa4d3f07dc6b34a497e54cb137356436aeb344c172e2" gracePeriod=300 Jan 20 17:00:40 crc kubenswrapper[4558]: I0120 17:00:40.602090 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:00:40 crc kubenswrapper[4558]: I0120 17:00:40.603256 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-api-0" podUID="cae28711-fbe7-40cc-8f31-4b6332ab5378" containerName="nova-api-log" containerID="cri-o://7f9dbce9ba44238985c3e3d3fb5a42a8258f76871b2f32a165f9c1ead22275de" gracePeriod=30 Jan 20 17:00:40 crc kubenswrapper[4558]: I0120 17:00:40.603528 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-api-0" podUID="cae28711-fbe7-40cc-8f31-4b6332ab5378" containerName="nova-api-api" containerID="cri-o://27c4f1ab95cdf957befc95b02a160445223047218baee3603004129258c0b5fa" gracePeriod=30 Jan 20 17:00:40 crc kubenswrapper[4558]: I0120 17:00:40.665158 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/openstackclient"] Jan 20 17:00:40 crc kubenswrapper[4558]: I0120 17:00:40.665422 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/openstackclient" podUID="4fe22ec2-af71-47bf-8053-86dee012df91" containerName="openstackclient" containerID="cri-o://00c9d686e4cf00913af81033ddbff9abfc51f8f3c4f1da58ce8a3bff41d506fa" gracePeriod=2 Jan 20 17:00:40 crc kubenswrapper[4558]: I0120 17:00:40.679310 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/openstackclient"] Jan 20 17:00:40 crc kubenswrapper[4558]: I0120 17:00:40.692270 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovn-northd-0"] Jan 20 17:00:40 crc kubenswrapper[4558]: I0120 17:00:40.692625 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ovn-northd-0" podUID="0c535ada-ed58-4c94-82c9-d6b1a35f78be" containerName="ovn-northd" containerID="cri-o://bf9413112ce86eb3a9f0b2a6c2c7bc61341d600bd5c50302655d4e980e7c1cc1" gracePeriod=30 Jan 20 17:00:40 crc kubenswrapper[4558]: I0120 17:00:40.692934 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ovn-northd-0" podUID="0c535ada-ed58-4c94-82c9-d6b1a35f78be" containerName="openstack-network-exporter" containerID="cri-o://644a2c213ff45475c1b94047c5229ac3c3fe716ff973c59e6790b1314050f1f3" gracePeriod=30 Jan 20 17:00:40 crc kubenswrapper[4558]: I0120 17:00:40.709423 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:00:40 crc kubenswrapper[4558]: I0120 17:00:40.709676 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" podUID="316ffdc9-b0e1-47d9-90f8-45a8ceb87353" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://1e113a1489cb99ef7da66f10ddf493b5813e1713d85e6d3338e7700ffa711d7c" gracePeriod=30 Jan 20 17:00:40 crc kubenswrapper[4558]: I0120 17:00:40.739878 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/memcached-0"] Jan 20 17:00:40 crc kubenswrapper[4558]: I0120 17:00:40.740107 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/memcached-0" podUID="324ace6f-8bac-4269-a674-d9b6e990cd18" containerName="memcached" containerID="cri-o://93541e9fd292f8f2cfa2ba3ec3bd317e9352f88de020c5959cf2c84ba5b458ba" gracePeriod=30 Jan 20 17:00:40 crc kubenswrapper[4558]: I0120 17:00:40.793234 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/barbican-worker-c4f864f89-58gb9"] Jan 20 17:00:40 crc kubenswrapper[4558]: E0120 17:00:40.793564 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4fe22ec2-af71-47bf-8053-86dee012df91" containerName="openstackclient" Jan 20 17:00:40 crc kubenswrapper[4558]: I0120 17:00:40.793578 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="4fe22ec2-af71-47bf-8053-86dee012df91" containerName="openstackclient" Jan 20 17:00:40 crc kubenswrapper[4558]: I0120 17:00:40.793721 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="4fe22ec2-af71-47bf-8053-86dee012df91" containerName="openstackclient" Jan 20 17:00:40 crc kubenswrapper[4558]: I0120 17:00:40.794548 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-worker-c4f864f89-58gb9" Jan 20 17:00:40 crc kubenswrapper[4558]: I0120 17:00:40.832751 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/barbican-keystone-listener-7fc5477d66-pz8q2"] Jan 20 17:00:40 crc kubenswrapper[4558]: I0120 17:00:40.837705 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-keystone-listener-7fc5477d66-pz8q2" Jan 20 17:00:40 crc kubenswrapper[4558]: I0120 17:00:40.841984 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:00:40 crc kubenswrapper[4558]: I0120 17:00:40.842133 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-metadata-0" podUID="e554db35-2dba-4138-9ca0-bd1371a9c63d" containerName="nova-metadata-log" containerID="cri-o://60c80db7b21795bae641e9692b2deb97128b55cb001a272936bfaf668d4bfcfb" gracePeriod=30 Jan 20 17:00:40 crc kubenswrapper[4558]: I0120 17:00:40.842301 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-metadata-0" podUID="e554db35-2dba-4138-9ca0-bd1371a9c63d" containerName="nova-metadata-metadata" containerID="cri-o://62565ba736515d1bd9fcfd983ba37f6e8370144dc694ce16eecef9e3a396d592" gracePeriod=30 Jan 20 17:00:40 crc kubenswrapper[4558]: I0120 17:00:40.850860 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:00:40 crc kubenswrapper[4558]: I0120 17:00:40.851071 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-scheduler-0" podUID="10c92abf-cd48-4659-8595-ce9610c0fe2e" containerName="nova-scheduler-scheduler" containerID="cri-o://6c101cbe30eb5d950364b9539489f846cc6d1f7298614409d0709726ebba0b06" gracePeriod=30 Jan 20 17:00:40 crc kubenswrapper[4558]: I0120 17:00:40.862362 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:00:40 crc kubenswrapper[4558]: I0120 17:00:40.862562 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-external-api-0" podUID="d2ccd35d-37c4-450d-b04c-ac505e35b0e8" containerName="glance-log" containerID="cri-o://2f43a86fbf8e121d1c2b64dc721cdb7d0f3e79ac1af1c83f48879ac356eba5d7" gracePeriod=30 Jan 20 17:00:40 crc kubenswrapper[4558]: I0120 17:00:40.862691 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-external-api-0" podUID="d2ccd35d-37c4-450d-b04c-ac505e35b0e8" containerName="glance-httpd" containerID="cri-o://3a7a952277efc163959dfa821edbe7eed459482ba7b2f738ddd5297a1de3b321" gracePeriod=30 Jan 20 17:00:40 crc kubenswrapper[4558]: I0120 17:00:40.876218 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-keystone-listener-7fc5477d66-pz8q2"] Jan 20 17:00:40 crc kubenswrapper[4558]: I0120 17:00:40.880464 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/nova-metadata-0" podUID="e554db35-2dba-4138-9ca0-bd1371a9c63d" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.189:8775/\": EOF" Jan 20 17:00:40 crc kubenswrapper[4558]: I0120 17:00:40.880639 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/nova-metadata-0" podUID="e554db35-2dba-4138-9ca0-bd1371a9c63d" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.189:8775/\": EOF" Jan 20 17:00:40 crc kubenswrapper[4558]: I0120 17:00:40.890800 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-worker-c4f864f89-58gb9"] Jan 20 17:00:40 crc kubenswrapper[4558]: I0120 17:00:40.918285 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:00:40 crc kubenswrapper[4558]: I0120 17:00:40.918699 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/cinder-scheduler-0" podUID="b8911a4f-a706-4956-9028-138c018a92ba" containerName="cinder-scheduler" containerID="cri-o://8c01eafed347e73a4f6bbd3d6df2573984ce045e60275b9c6bccf3c8df4b43d7" gracePeriod=30 Jan 20 17:00:40 crc kubenswrapper[4558]: I0120 17:00:40.918903 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/cinder-scheduler-0" podUID="b8911a4f-a706-4956-9028-138c018a92ba" containerName="probe" containerID="cri-o://d3bb244923af1279a2fec8556cc18369058a01a9dc93f5e798291670f59b37c6" gracePeriod=30 Jan 20 17:00:40 crc kubenswrapper[4558]: I0120 17:00:40.920024 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/de7f5467-1e83-42f0-86bb-ade85deec8f3-config-data\") pod \"barbican-worker-c4f864f89-58gb9\" (UID: \"de7f5467-1e83-42f0-86bb-ade85deec8f3\") " pod="openstack-kuttl-tests/barbican-worker-c4f864f89-58gb9" Jan 20 17:00:40 crc kubenswrapper[4558]: I0120 17:00:40.920111 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7776f921-0ae7-4a8c-b444-cf5b4b9a4f65-combined-ca-bundle\") pod \"barbican-keystone-listener-7fc5477d66-pz8q2\" (UID: \"7776f921-0ae7-4a8c-b444-cf5b4b9a4f65\") " pod="openstack-kuttl-tests/barbican-keystone-listener-7fc5477d66-pz8q2" Jan 20 17:00:40 crc kubenswrapper[4558]: I0120 17:00:40.920201 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7776f921-0ae7-4a8c-b444-cf5b4b9a4f65-logs\") pod \"barbican-keystone-listener-7fc5477d66-pz8q2\" (UID: \"7776f921-0ae7-4a8c-b444-cf5b4b9a4f65\") " pod="openstack-kuttl-tests/barbican-keystone-listener-7fc5477d66-pz8q2" Jan 20 17:00:40 crc kubenswrapper[4558]: I0120 17:00:40.920302 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/de7f5467-1e83-42f0-86bb-ade85deec8f3-logs\") pod \"barbican-worker-c4f864f89-58gb9\" (UID: \"de7f5467-1e83-42f0-86bb-ade85deec8f3\") " pod="openstack-kuttl-tests/barbican-worker-c4f864f89-58gb9" Jan 20 17:00:40 crc kubenswrapper[4558]: I0120 17:00:40.920387 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7776f921-0ae7-4a8c-b444-cf5b4b9a4f65-config-data-custom\") pod \"barbican-keystone-listener-7fc5477d66-pz8q2\" (UID: \"7776f921-0ae7-4a8c-b444-cf5b4b9a4f65\") " pod="openstack-kuttl-tests/barbican-keystone-listener-7fc5477d66-pz8q2" Jan 20 17:00:40 crc kubenswrapper[4558]: I0120 17:00:40.920475 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/de7f5467-1e83-42f0-86bb-ade85deec8f3-config-data-custom\") pod \"barbican-worker-c4f864f89-58gb9\" (UID: \"de7f5467-1e83-42f0-86bb-ade85deec8f3\") " pod="openstack-kuttl-tests/barbican-worker-c4f864f89-58gb9" Jan 20 17:00:40 crc kubenswrapper[4558]: I0120 17:00:40.920564 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mss66\" (UniqueName: \"kubernetes.io/projected/de7f5467-1e83-42f0-86bb-ade85deec8f3-kube-api-access-mss66\") pod \"barbican-worker-c4f864f89-58gb9\" (UID: \"de7f5467-1e83-42f0-86bb-ade85deec8f3\") " pod="openstack-kuttl-tests/barbican-worker-c4f864f89-58gb9" Jan 20 17:00:40 crc kubenswrapper[4558]: I0120 17:00:40.920646 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7776f921-0ae7-4a8c-b444-cf5b4b9a4f65-config-data\") pod \"barbican-keystone-listener-7fc5477d66-pz8q2\" (UID: \"7776f921-0ae7-4a8c-b444-cf5b4b9a4f65\") " pod="openstack-kuttl-tests/barbican-keystone-listener-7fc5477d66-pz8q2" Jan 20 17:00:40 crc kubenswrapper[4558]: I0120 17:00:40.920708 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fr8kp\" (UniqueName: \"kubernetes.io/projected/7776f921-0ae7-4a8c-b444-cf5b4b9a4f65-kube-api-access-fr8kp\") pod \"barbican-keystone-listener-7fc5477d66-pz8q2\" (UID: \"7776f921-0ae7-4a8c-b444-cf5b4b9a4f65\") " pod="openstack-kuttl-tests/barbican-keystone-listener-7fc5477d66-pz8q2" Jan 20 17:00:40 crc kubenswrapper[4558]: I0120 17:00:40.920794 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/de7f5467-1e83-42f0-86bb-ade85deec8f3-combined-ca-bundle\") pod \"barbican-worker-c4f864f89-58gb9\" (UID: \"de7f5467-1e83-42f0-86bb-ade85deec8f3\") " pod="openstack-kuttl-tests/barbican-worker-c4f864f89-58gb9" Jan 20 17:00:40 crc kubenswrapper[4558]: I0120 17:00:40.945296 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ovsdbserver-nb-0" podUID="8805bbf2-dd7b-41db-89ee-8b1d3053bf02" containerName="ovsdbserver-nb" containerID="cri-o://3827b2816f3cb25c8c183f31035843c25fc855df97604f4ab511f8bc21e7c1ef" gracePeriod=300 Jan 20 17:00:40 crc kubenswrapper[4558]: I0120 17:00:40.948813 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-0"] Jan 20 17:00:40 crc kubenswrapper[4558]: I0120 17:00:40.949124 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-cell1-conductor-0" podUID="180488ea-6eeb-4078-9b57-351bdfb54f5d" containerName="nova-cell1-conductor-conductor" containerID="cri-o://14704db98182ec5a7c903f3bee7ae82b53751a4fd86ecd1c586dd1360d6439c9" gracePeriod=30 Jan 20 17:00:40 crc kubenswrapper[4558]: I0120 17:00:40.959389 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/placement-d54787f8d-7fznp"] Jan 20 17:00:40 crc kubenswrapper[4558]: I0120 17:00:40.960818 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-d54787f8d-7fznp" Jan 20 17:00:40 crc kubenswrapper[4558]: I0120 17:00:40.987075 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:00:40 crc kubenswrapper[4558]: I0120 17:00:40.987337 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/cinder-api-0" podUID="764c7a15-6a1a-470c-9d0b-a63ed418cc09" containerName="cinder-api-log" containerID="cri-o://5fcc9425b33989b0ac07f50e1aa0774a071177ee9453a094ad6d619c7974603b" gracePeriod=30 Jan 20 17:00:40 crc kubenswrapper[4558]: I0120 17:00:40.987447 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/cinder-api-0" podUID="764c7a15-6a1a-470c-9d0b-a63ed418cc09" containerName="cinder-api" containerID="cri-o://44cfb2c4bfc8454ac6259ca35fbc70464e44213a38cdb484b02b66bc424c90d5" gracePeriod=30 Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.004157 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/keystone-55ff946595-f8gjf"] Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.005276 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-55ff946595-f8gjf" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.018232 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-d54787f8d-7fznp"] Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.022428 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/de7f5467-1e83-42f0-86bb-ade85deec8f3-combined-ca-bundle\") pod \"barbican-worker-c4f864f89-58gb9\" (UID: \"de7f5467-1e83-42f0-86bb-ade85deec8f3\") " pod="openstack-kuttl-tests/barbican-worker-c4f864f89-58gb9" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.023146 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c268a2f5-7c67-4935-8f3e-bdd83aeccc95-internal-tls-certs\") pod \"placement-d54787f8d-7fznp\" (UID: \"c268a2f5-7c67-4935-8f3e-bdd83aeccc95\") " pod="openstack-kuttl-tests/placement-d54787f8d-7fznp" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.023236 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c268a2f5-7c67-4935-8f3e-bdd83aeccc95-logs\") pod \"placement-d54787f8d-7fznp\" (UID: \"c268a2f5-7c67-4935-8f3e-bdd83aeccc95\") " pod="openstack-kuttl-tests/placement-d54787f8d-7fznp" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.023266 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/de7f5467-1e83-42f0-86bb-ade85deec8f3-config-data\") pod \"barbican-worker-c4f864f89-58gb9\" (UID: \"de7f5467-1e83-42f0-86bb-ade85deec8f3\") " pod="openstack-kuttl-tests/barbican-worker-c4f864f89-58gb9" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.023284 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7776f921-0ae7-4a8c-b444-cf5b4b9a4f65-combined-ca-bundle\") pod \"barbican-keystone-listener-7fc5477d66-pz8q2\" (UID: \"7776f921-0ae7-4a8c-b444-cf5b4b9a4f65\") " pod="openstack-kuttl-tests/barbican-keystone-listener-7fc5477d66-pz8q2" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.023301 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7776f921-0ae7-4a8c-b444-cf5b4b9a4f65-logs\") pod \"barbican-keystone-listener-7fc5477d66-pz8q2\" (UID: \"7776f921-0ae7-4a8c-b444-cf5b4b9a4f65\") " pod="openstack-kuttl-tests/barbican-keystone-listener-7fc5477d66-pz8q2" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.023338 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/de7f5467-1e83-42f0-86bb-ade85deec8f3-logs\") pod \"barbican-worker-c4f864f89-58gb9\" (UID: \"de7f5467-1e83-42f0-86bb-ade85deec8f3\") " pod="openstack-kuttl-tests/barbican-worker-c4f864f89-58gb9" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.023366 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7776f921-0ae7-4a8c-b444-cf5b4b9a4f65-config-data-custom\") pod \"barbican-keystone-listener-7fc5477d66-pz8q2\" (UID: \"7776f921-0ae7-4a8c-b444-cf5b4b9a4f65\") " pod="openstack-kuttl-tests/barbican-keystone-listener-7fc5477d66-pz8q2" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.023384 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c268a2f5-7c67-4935-8f3e-bdd83aeccc95-scripts\") pod \"placement-d54787f8d-7fznp\" (UID: \"c268a2f5-7c67-4935-8f3e-bdd83aeccc95\") " pod="openstack-kuttl-tests/placement-d54787f8d-7fznp" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.023405 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c268a2f5-7c67-4935-8f3e-bdd83aeccc95-config-data\") pod \"placement-d54787f8d-7fznp\" (UID: \"c268a2f5-7c67-4935-8f3e-bdd83aeccc95\") " pod="openstack-kuttl-tests/placement-d54787f8d-7fznp" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.023440 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/de7f5467-1e83-42f0-86bb-ade85deec8f3-config-data-custom\") pod \"barbican-worker-c4f864f89-58gb9\" (UID: \"de7f5467-1e83-42f0-86bb-ade85deec8f3\") " pod="openstack-kuttl-tests/barbican-worker-c4f864f89-58gb9" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.023458 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c268a2f5-7c67-4935-8f3e-bdd83aeccc95-combined-ca-bundle\") pod \"placement-d54787f8d-7fznp\" (UID: \"c268a2f5-7c67-4935-8f3e-bdd83aeccc95\") " pod="openstack-kuttl-tests/placement-d54787f8d-7fznp" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.023473 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bgb25\" (UniqueName: \"kubernetes.io/projected/c268a2f5-7c67-4935-8f3e-bdd83aeccc95-kube-api-access-bgb25\") pod \"placement-d54787f8d-7fznp\" (UID: \"c268a2f5-7c67-4935-8f3e-bdd83aeccc95\") " pod="openstack-kuttl-tests/placement-d54787f8d-7fznp" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.023507 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mss66\" (UniqueName: \"kubernetes.io/projected/de7f5467-1e83-42f0-86bb-ade85deec8f3-kube-api-access-mss66\") pod \"barbican-worker-c4f864f89-58gb9\" (UID: \"de7f5467-1e83-42f0-86bb-ade85deec8f3\") " pod="openstack-kuttl-tests/barbican-worker-c4f864f89-58gb9" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.023536 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7776f921-0ae7-4a8c-b444-cf5b4b9a4f65-config-data\") pod \"barbican-keystone-listener-7fc5477d66-pz8q2\" (UID: \"7776f921-0ae7-4a8c-b444-cf5b4b9a4f65\") " pod="openstack-kuttl-tests/barbican-keystone-listener-7fc5477d66-pz8q2" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.023554 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fr8kp\" (UniqueName: \"kubernetes.io/projected/7776f921-0ae7-4a8c-b444-cf5b4b9a4f65-kube-api-access-fr8kp\") pod \"barbican-keystone-listener-7fc5477d66-pz8q2\" (UID: \"7776f921-0ae7-4a8c-b444-cf5b4b9a4f65\") " pod="openstack-kuttl-tests/barbican-keystone-listener-7fc5477d66-pz8q2" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.023576 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c268a2f5-7c67-4935-8f3e-bdd83aeccc95-public-tls-certs\") pod \"placement-d54787f8d-7fznp\" (UID: \"c268a2f5-7c67-4935-8f3e-bdd83aeccc95\") " pod="openstack-kuttl-tests/placement-d54787f8d-7fznp" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.024428 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/de7f5467-1e83-42f0-86bb-ade85deec8f3-logs\") pod \"barbican-worker-c4f864f89-58gb9\" (UID: \"de7f5467-1e83-42f0-86bb-ade85deec8f3\") " pod="openstack-kuttl-tests/barbican-worker-c4f864f89-58gb9" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.024678 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7776f921-0ae7-4a8c-b444-cf5b4b9a4f65-logs\") pod \"barbican-keystone-listener-7fc5477d66-pz8q2\" (UID: \"7776f921-0ae7-4a8c-b444-cf5b4b9a4f65\") " pod="openstack-kuttl-tests/barbican-keystone-listener-7fc5477d66-pz8q2" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.027732 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7776f921-0ae7-4a8c-b444-cf5b4b9a4f65-combined-ca-bundle\") pod \"barbican-keystone-listener-7fc5477d66-pz8q2\" (UID: \"7776f921-0ae7-4a8c-b444-cf5b4b9a4f65\") " pod="openstack-kuttl-tests/barbican-keystone-listener-7fc5477d66-pz8q2" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.029504 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/barbican-api-5df68bf4dd-zbx8h"] Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.030903 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-5df68bf4dd-zbx8h" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.032148 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7776f921-0ae7-4a8c-b444-cf5b4b9a4f65-config-data\") pod \"barbican-keystone-listener-7fc5477d66-pz8q2\" (UID: \"7776f921-0ae7-4a8c-b444-cf5b4b9a4f65\") " pod="openstack-kuttl-tests/barbican-keystone-listener-7fc5477d66-pz8q2" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.033400 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/de7f5467-1e83-42f0-86bb-ade85deec8f3-combined-ca-bundle\") pod \"barbican-worker-c4f864f89-58gb9\" (UID: \"de7f5467-1e83-42f0-86bb-ade85deec8f3\") " pod="openstack-kuttl-tests/barbican-worker-c4f864f89-58gb9" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.033865 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/de7f5467-1e83-42f0-86bb-ade85deec8f3-config-data-custom\") pod \"barbican-worker-c4f864f89-58gb9\" (UID: \"de7f5467-1e83-42f0-86bb-ade85deec8f3\") " pod="openstack-kuttl-tests/barbican-worker-c4f864f89-58gb9" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.034805 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/de7f5467-1e83-42f0-86bb-ade85deec8f3-config-data\") pod \"barbican-worker-c4f864f89-58gb9\" (UID: \"de7f5467-1e83-42f0-86bb-ade85deec8f3\") " pod="openstack-kuttl-tests/barbican-worker-c4f864f89-58gb9" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.036324 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-55ff946595-f8gjf"] Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.037558 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7776f921-0ae7-4a8c-b444-cf5b4b9a4f65-config-data-custom\") pod \"barbican-keystone-listener-7fc5477d66-pz8q2\" (UID: \"7776f921-0ae7-4a8c-b444-cf5b4b9a4f65\") " pod="openstack-kuttl-tests/barbican-keystone-listener-7fc5477d66-pz8q2" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.039668 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fr8kp\" (UniqueName: \"kubernetes.io/projected/7776f921-0ae7-4a8c-b444-cf5b4b9a4f65-kube-api-access-fr8kp\") pod \"barbican-keystone-listener-7fc5477d66-pz8q2\" (UID: \"7776f921-0ae7-4a8c-b444-cf5b4b9a4f65\") " pod="openstack-kuttl-tests/barbican-keystone-listener-7fc5477d66-pz8q2" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.044957 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mss66\" (UniqueName: \"kubernetes.io/projected/de7f5467-1e83-42f0-86bb-ade85deec8f3-kube-api-access-mss66\") pod \"barbican-worker-c4f864f89-58gb9\" (UID: \"de7f5467-1e83-42f0-86bb-ade85deec8f3\") " pod="openstack-kuttl-tests/barbican-worker-c4f864f89-58gb9" Jan 20 17:00:41 crc kubenswrapper[4558]: E0120 17:00:41.051141 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 3827b2816f3cb25c8c183f31035843c25fc855df97604f4ab511f8bc21e7c1ef is running failed: container process not found" containerID="3827b2816f3cb25c8c183f31035843c25fc855df97604f4ab511f8bc21e7c1ef" cmd=["/usr/bin/pidof","ovsdb-server"] Jan 20 17:00:41 crc kubenswrapper[4558]: E0120 17:00:41.055277 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 3827b2816f3cb25c8c183f31035843c25fc855df97604f4ab511f8bc21e7c1ef is running failed: container process not found" containerID="3827b2816f3cb25c8c183f31035843c25fc855df97604f4ab511f8bc21e7c1ef" cmd=["/usr/bin/pidof","ovsdb-server"] Jan 20 17:00:41 crc kubenswrapper[4558]: E0120 17:00:41.056391 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 3827b2816f3cb25c8c183f31035843c25fc855df97604f4ab511f8bc21e7c1ef is running failed: container process not found" containerID="3827b2816f3cb25c8c183f31035843c25fc855df97604f4ab511f8bc21e7c1ef" cmd=["/usr/bin/pidof","ovsdb-server"] Jan 20 17:00:41 crc kubenswrapper[4558]: E0120 17:00:41.056425 4558 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 3827b2816f3cb25c8c183f31035843c25fc855df97604f4ab511f8bc21e7c1ef is running failed: container process not found" probeType="Readiness" pod="openstack-kuttl-tests/ovsdbserver-nb-0" podUID="8805bbf2-dd7b-41db-89ee-8b1d3053bf02" containerName="ovsdbserver-nb" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.063630 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-api-5df68bf4dd-zbx8h"] Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.068530 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-0"] Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.068824 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-cell0-conductor-0" podUID="9ea3b871-4db3-4108-baea-e57a23d9d6c5" containerName="nova-cell0-conductor-conductor" containerID="cri-o://6074e6e8906baa2f1dffa9b96ff41ea08d5604ac43ef2c6593b686fd35af868a" gracePeriod=30 Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.073754 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/openstack-galera-0"] Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.078226 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-sb-0"] Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.078592 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ovsdbserver-sb-0" podUID="1c0d7766-0fb5-47ec-87f4-6644f5b1afd9" containerName="openstack-network-exporter" containerID="cri-o://6fa53430a72d92b62c65c8602e3f7ecea38be4fd2c7715f04903b7006b2c6625" gracePeriod=300 Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.091651 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.091877 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-internal-api-0" podUID="b28e6999-784e-4577-88bb-db648f7a3cbc" containerName="glance-log" containerID="cri-o://ed2110d076583425a8af34103095d99e22acc0c9b835eb2f9a4e87e9dc475ca6" gracePeriod=30 Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.092017 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-internal-api-0" podUID="b28e6999-784e-4577-88bb-db648f7a3cbc" containerName="glance-httpd" containerID="cri-o://a79b4e6f8ce00fed51a727ad2bc430351f4d330704b2b9cb8bb5ce9d963a3a12" gracePeriod=30 Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.098015 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/openstack-cell1-galera-0"] Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.103395 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/neutron-6b64ccd79d-5qj2x"] Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.104607 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-6b64ccd79d-5qj2x" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.114377 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-6b64ccd79d-5qj2x"] Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.124551 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c268a2f5-7c67-4935-8f3e-bdd83aeccc95-internal-tls-certs\") pod \"placement-d54787f8d-7fznp\" (UID: \"c268a2f5-7c67-4935-8f3e-bdd83aeccc95\") " pod="openstack-kuttl-tests/placement-d54787f8d-7fznp" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.124587 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3735ca3d-3764-4d36-b912-fbf0bfb96dd8-public-tls-certs\") pod \"barbican-api-5df68bf4dd-zbx8h\" (UID: \"3735ca3d-3764-4d36-b912-fbf0bfb96dd8\") " pod="openstack-kuttl-tests/barbican-api-5df68bf4dd-zbx8h" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.124615 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/4969c1af-53c0-435a-bd06-6bd493c81c80-credential-keys\") pod \"keystone-55ff946595-f8gjf\" (UID: \"4969c1af-53c0-435a-bd06-6bd493c81c80\") " pod="openstack-kuttl-tests/keystone-55ff946595-f8gjf" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.124641 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3735ca3d-3764-4d36-b912-fbf0bfb96dd8-logs\") pod \"barbican-api-5df68bf4dd-zbx8h\" (UID: \"3735ca3d-3764-4d36-b912-fbf0bfb96dd8\") " pod="openstack-kuttl-tests/barbican-api-5df68bf4dd-zbx8h" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.124664 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c268a2f5-7c67-4935-8f3e-bdd83aeccc95-logs\") pod \"placement-d54787f8d-7fznp\" (UID: \"c268a2f5-7c67-4935-8f3e-bdd83aeccc95\") " pod="openstack-kuttl-tests/placement-d54787f8d-7fznp" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.124691 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3735ca3d-3764-4d36-b912-fbf0bfb96dd8-config-data-custom\") pod \"barbican-api-5df68bf4dd-zbx8h\" (UID: \"3735ca3d-3764-4d36-b912-fbf0bfb96dd8\") " pod="openstack-kuttl-tests/barbican-api-5df68bf4dd-zbx8h" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.124794 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x9556\" (UniqueName: \"kubernetes.io/projected/3735ca3d-3764-4d36-b912-fbf0bfb96dd8-kube-api-access-x9556\") pod \"barbican-api-5df68bf4dd-zbx8h\" (UID: \"3735ca3d-3764-4d36-b912-fbf0bfb96dd8\") " pod="openstack-kuttl-tests/barbican-api-5df68bf4dd-zbx8h" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.124851 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mq58f\" (UniqueName: \"kubernetes.io/projected/4969c1af-53c0-435a-bd06-6bd493c81c80-kube-api-access-mq58f\") pod \"keystone-55ff946595-f8gjf\" (UID: \"4969c1af-53c0-435a-bd06-6bd493c81c80\") " pod="openstack-kuttl-tests/keystone-55ff946595-f8gjf" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.124888 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c268a2f5-7c67-4935-8f3e-bdd83aeccc95-scripts\") pod \"placement-d54787f8d-7fznp\" (UID: \"c268a2f5-7c67-4935-8f3e-bdd83aeccc95\") " pod="openstack-kuttl-tests/placement-d54787f8d-7fznp" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.125289 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c268a2f5-7c67-4935-8f3e-bdd83aeccc95-config-data\") pod \"placement-d54787f8d-7fznp\" (UID: \"c268a2f5-7c67-4935-8f3e-bdd83aeccc95\") " pod="openstack-kuttl-tests/placement-d54787f8d-7fznp" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.125333 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4969c1af-53c0-435a-bd06-6bd493c81c80-internal-tls-certs\") pod \"keystone-55ff946595-f8gjf\" (UID: \"4969c1af-53c0-435a-bd06-6bd493c81c80\") " pod="openstack-kuttl-tests/keystone-55ff946595-f8gjf" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.125361 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4969c1af-53c0-435a-bd06-6bd493c81c80-public-tls-certs\") pod \"keystone-55ff946595-f8gjf\" (UID: \"4969c1af-53c0-435a-bd06-6bd493c81c80\") " pod="openstack-kuttl-tests/keystone-55ff946595-f8gjf" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.125382 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4969c1af-53c0-435a-bd06-6bd493c81c80-scripts\") pod \"keystone-55ff946595-f8gjf\" (UID: \"4969c1af-53c0-435a-bd06-6bd493c81c80\") " pod="openstack-kuttl-tests/keystone-55ff946595-f8gjf" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.125416 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c268a2f5-7c67-4935-8f3e-bdd83aeccc95-combined-ca-bundle\") pod \"placement-d54787f8d-7fznp\" (UID: \"c268a2f5-7c67-4935-8f3e-bdd83aeccc95\") " pod="openstack-kuttl-tests/placement-d54787f8d-7fznp" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.125432 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bgb25\" (UniqueName: \"kubernetes.io/projected/c268a2f5-7c67-4935-8f3e-bdd83aeccc95-kube-api-access-bgb25\") pod \"placement-d54787f8d-7fznp\" (UID: \"c268a2f5-7c67-4935-8f3e-bdd83aeccc95\") " pod="openstack-kuttl-tests/placement-d54787f8d-7fznp" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.125455 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4969c1af-53c0-435a-bd06-6bd493c81c80-config-data\") pod \"keystone-55ff946595-f8gjf\" (UID: \"4969c1af-53c0-435a-bd06-6bd493c81c80\") " pod="openstack-kuttl-tests/keystone-55ff946595-f8gjf" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.125471 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4969c1af-53c0-435a-bd06-6bd493c81c80-combined-ca-bundle\") pod \"keystone-55ff946595-f8gjf\" (UID: \"4969c1af-53c0-435a-bd06-6bd493c81c80\") " pod="openstack-kuttl-tests/keystone-55ff946595-f8gjf" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.125519 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/4969c1af-53c0-435a-bd06-6bd493c81c80-fernet-keys\") pod \"keystone-55ff946595-f8gjf\" (UID: \"4969c1af-53c0-435a-bd06-6bd493c81c80\") " pod="openstack-kuttl-tests/keystone-55ff946595-f8gjf" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.125575 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3735ca3d-3764-4d36-b912-fbf0bfb96dd8-config-data\") pod \"barbican-api-5df68bf4dd-zbx8h\" (UID: \"3735ca3d-3764-4d36-b912-fbf0bfb96dd8\") " pod="openstack-kuttl-tests/barbican-api-5df68bf4dd-zbx8h" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.125599 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3735ca3d-3764-4d36-b912-fbf0bfb96dd8-internal-tls-certs\") pod \"barbican-api-5df68bf4dd-zbx8h\" (UID: \"3735ca3d-3764-4d36-b912-fbf0bfb96dd8\") " pod="openstack-kuttl-tests/barbican-api-5df68bf4dd-zbx8h" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.125623 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3735ca3d-3764-4d36-b912-fbf0bfb96dd8-combined-ca-bundle\") pod \"barbican-api-5df68bf4dd-zbx8h\" (UID: \"3735ca3d-3764-4d36-b912-fbf0bfb96dd8\") " pod="openstack-kuttl-tests/barbican-api-5df68bf4dd-zbx8h" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.125648 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c268a2f5-7c67-4935-8f3e-bdd83aeccc95-public-tls-certs\") pod \"placement-d54787f8d-7fznp\" (UID: \"c268a2f5-7c67-4935-8f3e-bdd83aeccc95\") " pod="openstack-kuttl-tests/placement-d54787f8d-7fznp" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.128354 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c268a2f5-7c67-4935-8f3e-bdd83aeccc95-public-tls-certs\") pod \"placement-d54787f8d-7fznp\" (UID: \"c268a2f5-7c67-4935-8f3e-bdd83aeccc95\") " pod="openstack-kuttl-tests/placement-d54787f8d-7fznp" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.129021 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c268a2f5-7c67-4935-8f3e-bdd83aeccc95-internal-tls-certs\") pod \"placement-d54787f8d-7fznp\" (UID: \"c268a2f5-7c67-4935-8f3e-bdd83aeccc95\") " pod="openstack-kuttl-tests/placement-d54787f8d-7fznp" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.129519 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c268a2f5-7c67-4935-8f3e-bdd83aeccc95-logs\") pod \"placement-d54787f8d-7fznp\" (UID: \"c268a2f5-7c67-4935-8f3e-bdd83aeccc95\") " pod="openstack-kuttl-tests/placement-d54787f8d-7fznp" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.129712 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c268a2f5-7c67-4935-8f3e-bdd83aeccc95-config-data\") pod \"placement-d54787f8d-7fznp\" (UID: \"c268a2f5-7c67-4935-8f3e-bdd83aeccc95\") " pod="openstack-kuttl-tests/placement-d54787f8d-7fznp" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.134028 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ovsdbserver-sb-0" podUID="1c0d7766-0fb5-47ec-87f4-6644f5b1afd9" containerName="ovsdbserver-sb" containerID="cri-o://20a844916c974def3060dd83baa14a82ac52a34e846bc5a85b9c976771061eb1" gracePeriod=300 Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.135583 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c268a2f5-7c67-4935-8f3e-bdd83aeccc95-combined-ca-bundle\") pod \"placement-d54787f8d-7fznp\" (UID: \"c268a2f5-7c67-4935-8f3e-bdd83aeccc95\") " pod="openstack-kuttl-tests/placement-d54787f8d-7fznp" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.142419 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c268a2f5-7c67-4935-8f3e-bdd83aeccc95-scripts\") pod \"placement-d54787f8d-7fznp\" (UID: \"c268a2f5-7c67-4935-8f3e-bdd83aeccc95\") " pod="openstack-kuttl-tests/placement-d54787f8d-7fznp" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.147652 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bgb25\" (UniqueName: \"kubernetes.io/projected/c268a2f5-7c67-4935-8f3e-bdd83aeccc95-kube-api-access-bgb25\") pod \"placement-d54787f8d-7fznp\" (UID: \"c268a2f5-7c67-4935-8f3e-bdd83aeccc95\") " pod="openstack-kuttl-tests/placement-d54787f8d-7fznp" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.161608 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-worker-c4f864f89-58gb9" Jan 20 17:00:41 crc kubenswrapper[4558]: E0120 17:00:41.190130 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="14704db98182ec5a7c903f3bee7ae82b53751a4fd86ecd1c586dd1360d6439c9" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:00:41 crc kubenswrapper[4558]: E0120 17:00:41.191479 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="14704db98182ec5a7c903f3bee7ae82b53751a4fd86ecd1c586dd1360d6439c9" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:00:41 crc kubenswrapper[4558]: E0120 17:00:41.192415 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="14704db98182ec5a7c903f3bee7ae82b53751a4fd86ecd1c586dd1360d6439c9" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:00:41 crc kubenswrapper[4558]: E0120 17:00:41.192439 4558 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack-kuttl-tests/nova-cell1-conductor-0" podUID="180488ea-6eeb-4078-9b57-351bdfb54f5d" containerName="nova-cell1-conductor-conductor" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.196153 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-keystone-listener-7fc5477d66-pz8q2" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.227087 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3735ca3d-3764-4d36-b912-fbf0bfb96dd8-combined-ca-bundle\") pod \"barbican-api-5df68bf4dd-zbx8h\" (UID: \"3735ca3d-3764-4d36-b912-fbf0bfb96dd8\") " pod="openstack-kuttl-tests/barbican-api-5df68bf4dd-zbx8h" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.227182 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4cln2\" (UniqueName: \"kubernetes.io/projected/5d3423c4-ff78-4ff7-b42b-b3c93b309d52-kube-api-access-4cln2\") pod \"neutron-6b64ccd79d-5qj2x\" (UID: \"5d3423c4-ff78-4ff7-b42b-b3c93b309d52\") " pod="openstack-kuttl-tests/neutron-6b64ccd79d-5qj2x" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.227203 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3735ca3d-3764-4d36-b912-fbf0bfb96dd8-public-tls-certs\") pod \"barbican-api-5df68bf4dd-zbx8h\" (UID: \"3735ca3d-3764-4d36-b912-fbf0bfb96dd8\") " pod="openstack-kuttl-tests/barbican-api-5df68bf4dd-zbx8h" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.227237 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/4969c1af-53c0-435a-bd06-6bd493c81c80-credential-keys\") pod \"keystone-55ff946595-f8gjf\" (UID: \"4969c1af-53c0-435a-bd06-6bd493c81c80\") " pod="openstack-kuttl-tests/keystone-55ff946595-f8gjf" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.227279 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3735ca3d-3764-4d36-b912-fbf0bfb96dd8-logs\") pod \"barbican-api-5df68bf4dd-zbx8h\" (UID: \"3735ca3d-3764-4d36-b912-fbf0bfb96dd8\") " pod="openstack-kuttl-tests/barbican-api-5df68bf4dd-zbx8h" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.227314 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/5d3423c4-ff78-4ff7-b42b-b3c93b309d52-config\") pod \"neutron-6b64ccd79d-5qj2x\" (UID: \"5d3423c4-ff78-4ff7-b42b-b3c93b309d52\") " pod="openstack-kuttl-tests/neutron-6b64ccd79d-5qj2x" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.227342 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3735ca3d-3764-4d36-b912-fbf0bfb96dd8-config-data-custom\") pod \"barbican-api-5df68bf4dd-zbx8h\" (UID: \"3735ca3d-3764-4d36-b912-fbf0bfb96dd8\") " pod="openstack-kuttl-tests/barbican-api-5df68bf4dd-zbx8h" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.227369 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x9556\" (UniqueName: \"kubernetes.io/projected/3735ca3d-3764-4d36-b912-fbf0bfb96dd8-kube-api-access-x9556\") pod \"barbican-api-5df68bf4dd-zbx8h\" (UID: \"3735ca3d-3764-4d36-b912-fbf0bfb96dd8\") " pod="openstack-kuttl-tests/barbican-api-5df68bf4dd-zbx8h" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.227390 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/5d3423c4-ff78-4ff7-b42b-b3c93b309d52-ovndb-tls-certs\") pod \"neutron-6b64ccd79d-5qj2x\" (UID: \"5d3423c4-ff78-4ff7-b42b-b3c93b309d52\") " pod="openstack-kuttl-tests/neutron-6b64ccd79d-5qj2x" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.227412 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mq58f\" (UniqueName: \"kubernetes.io/projected/4969c1af-53c0-435a-bd06-6bd493c81c80-kube-api-access-mq58f\") pod \"keystone-55ff946595-f8gjf\" (UID: \"4969c1af-53c0-435a-bd06-6bd493c81c80\") " pod="openstack-kuttl-tests/keystone-55ff946595-f8gjf" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.227461 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4969c1af-53c0-435a-bd06-6bd493c81c80-internal-tls-certs\") pod \"keystone-55ff946595-f8gjf\" (UID: \"4969c1af-53c0-435a-bd06-6bd493c81c80\") " pod="openstack-kuttl-tests/keystone-55ff946595-f8gjf" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.227481 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5d3423c4-ff78-4ff7-b42b-b3c93b309d52-combined-ca-bundle\") pod \"neutron-6b64ccd79d-5qj2x\" (UID: \"5d3423c4-ff78-4ff7-b42b-b3c93b309d52\") " pod="openstack-kuttl-tests/neutron-6b64ccd79d-5qj2x" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.227502 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4969c1af-53c0-435a-bd06-6bd493c81c80-scripts\") pod \"keystone-55ff946595-f8gjf\" (UID: \"4969c1af-53c0-435a-bd06-6bd493c81c80\") " pod="openstack-kuttl-tests/keystone-55ff946595-f8gjf" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.227517 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4969c1af-53c0-435a-bd06-6bd493c81c80-public-tls-certs\") pod \"keystone-55ff946595-f8gjf\" (UID: \"4969c1af-53c0-435a-bd06-6bd493c81c80\") " pod="openstack-kuttl-tests/keystone-55ff946595-f8gjf" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.227550 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5d3423c4-ff78-4ff7-b42b-b3c93b309d52-internal-tls-certs\") pod \"neutron-6b64ccd79d-5qj2x\" (UID: \"5d3423c4-ff78-4ff7-b42b-b3c93b309d52\") " pod="openstack-kuttl-tests/neutron-6b64ccd79d-5qj2x" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.227578 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4969c1af-53c0-435a-bd06-6bd493c81c80-config-data\") pod \"keystone-55ff946595-f8gjf\" (UID: \"4969c1af-53c0-435a-bd06-6bd493c81c80\") " pod="openstack-kuttl-tests/keystone-55ff946595-f8gjf" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.227595 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4969c1af-53c0-435a-bd06-6bd493c81c80-combined-ca-bundle\") pod \"keystone-55ff946595-f8gjf\" (UID: \"4969c1af-53c0-435a-bd06-6bd493c81c80\") " pod="openstack-kuttl-tests/keystone-55ff946595-f8gjf" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.227625 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/4969c1af-53c0-435a-bd06-6bd493c81c80-fernet-keys\") pod \"keystone-55ff946595-f8gjf\" (UID: \"4969c1af-53c0-435a-bd06-6bd493c81c80\") " pod="openstack-kuttl-tests/keystone-55ff946595-f8gjf" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.227650 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5d3423c4-ff78-4ff7-b42b-b3c93b309d52-public-tls-certs\") pod \"neutron-6b64ccd79d-5qj2x\" (UID: \"5d3423c4-ff78-4ff7-b42b-b3c93b309d52\") " pod="openstack-kuttl-tests/neutron-6b64ccd79d-5qj2x" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.227669 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/5d3423c4-ff78-4ff7-b42b-b3c93b309d52-httpd-config\") pod \"neutron-6b64ccd79d-5qj2x\" (UID: \"5d3423c4-ff78-4ff7-b42b-b3c93b309d52\") " pod="openstack-kuttl-tests/neutron-6b64ccd79d-5qj2x" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.227693 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3735ca3d-3764-4d36-b912-fbf0bfb96dd8-config-data\") pod \"barbican-api-5df68bf4dd-zbx8h\" (UID: \"3735ca3d-3764-4d36-b912-fbf0bfb96dd8\") " pod="openstack-kuttl-tests/barbican-api-5df68bf4dd-zbx8h" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.228781 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3735ca3d-3764-4d36-b912-fbf0bfb96dd8-logs\") pod \"barbican-api-5df68bf4dd-zbx8h\" (UID: \"3735ca3d-3764-4d36-b912-fbf0bfb96dd8\") " pod="openstack-kuttl-tests/barbican-api-5df68bf4dd-zbx8h" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.229897 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3735ca3d-3764-4d36-b912-fbf0bfb96dd8-internal-tls-certs\") pod \"barbican-api-5df68bf4dd-zbx8h\" (UID: \"3735ca3d-3764-4d36-b912-fbf0bfb96dd8\") " pod="openstack-kuttl-tests/barbican-api-5df68bf4dd-zbx8h" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.232666 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3735ca3d-3764-4d36-b912-fbf0bfb96dd8-config-data-custom\") pod \"barbican-api-5df68bf4dd-zbx8h\" (UID: \"3735ca3d-3764-4d36-b912-fbf0bfb96dd8\") " pod="openstack-kuttl-tests/barbican-api-5df68bf4dd-zbx8h" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.232965 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/4969c1af-53c0-435a-bd06-6bd493c81c80-fernet-keys\") pod \"keystone-55ff946595-f8gjf\" (UID: \"4969c1af-53c0-435a-bd06-6bd493c81c80\") " pod="openstack-kuttl-tests/keystone-55ff946595-f8gjf" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.232986 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/4969c1af-53c0-435a-bd06-6bd493c81c80-credential-keys\") pod \"keystone-55ff946595-f8gjf\" (UID: \"4969c1af-53c0-435a-bd06-6bd493c81c80\") " pod="openstack-kuttl-tests/keystone-55ff946595-f8gjf" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.234664 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4969c1af-53c0-435a-bd06-6bd493c81c80-public-tls-certs\") pod \"keystone-55ff946595-f8gjf\" (UID: \"4969c1af-53c0-435a-bd06-6bd493c81c80\") " pod="openstack-kuttl-tests/keystone-55ff946595-f8gjf" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.235583 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3735ca3d-3764-4d36-b912-fbf0bfb96dd8-config-data\") pod \"barbican-api-5df68bf4dd-zbx8h\" (UID: \"3735ca3d-3764-4d36-b912-fbf0bfb96dd8\") " pod="openstack-kuttl-tests/barbican-api-5df68bf4dd-zbx8h" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.238922 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4969c1af-53c0-435a-bd06-6bd493c81c80-config-data\") pod \"keystone-55ff946595-f8gjf\" (UID: \"4969c1af-53c0-435a-bd06-6bd493c81c80\") " pod="openstack-kuttl-tests/keystone-55ff946595-f8gjf" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.240040 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3735ca3d-3764-4d36-b912-fbf0bfb96dd8-combined-ca-bundle\") pod \"barbican-api-5df68bf4dd-zbx8h\" (UID: \"3735ca3d-3764-4d36-b912-fbf0bfb96dd8\") " pod="openstack-kuttl-tests/barbican-api-5df68bf4dd-zbx8h" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.240671 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3735ca3d-3764-4d36-b912-fbf0bfb96dd8-internal-tls-certs\") pod \"barbican-api-5df68bf4dd-zbx8h\" (UID: \"3735ca3d-3764-4d36-b912-fbf0bfb96dd8\") " pod="openstack-kuttl-tests/barbican-api-5df68bf4dd-zbx8h" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.240718 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4969c1af-53c0-435a-bd06-6bd493c81c80-scripts\") pod \"keystone-55ff946595-f8gjf\" (UID: \"4969c1af-53c0-435a-bd06-6bd493c81c80\") " pod="openstack-kuttl-tests/keystone-55ff946595-f8gjf" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.241866 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4969c1af-53c0-435a-bd06-6bd493c81c80-combined-ca-bundle\") pod \"keystone-55ff946595-f8gjf\" (UID: \"4969c1af-53c0-435a-bd06-6bd493c81c80\") " pod="openstack-kuttl-tests/keystone-55ff946595-f8gjf" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.242442 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/openstack-cell1-galera-0" podUID="6374f20d-dc8a-4f1c-9df3-086e6904b394" containerName="galera" containerID="cri-o://14cc352d3f4cfc88e8fb040c94fae40dfc936c241aaccfd3a719350b13e1a6d1" gracePeriod=30 Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.245300 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3735ca3d-3764-4d36-b912-fbf0bfb96dd8-public-tls-certs\") pod \"barbican-api-5df68bf4dd-zbx8h\" (UID: \"3735ca3d-3764-4d36-b912-fbf0bfb96dd8\") " pod="openstack-kuttl-tests/barbican-api-5df68bf4dd-zbx8h" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.246640 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x9556\" (UniqueName: \"kubernetes.io/projected/3735ca3d-3764-4d36-b912-fbf0bfb96dd8-kube-api-access-x9556\") pod \"barbican-api-5df68bf4dd-zbx8h\" (UID: \"3735ca3d-3764-4d36-b912-fbf0bfb96dd8\") " pod="openstack-kuttl-tests/barbican-api-5df68bf4dd-zbx8h" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.247286 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4969c1af-53c0-435a-bd06-6bd493c81c80-internal-tls-certs\") pod \"keystone-55ff946595-f8gjf\" (UID: \"4969c1af-53c0-435a-bd06-6bd493c81c80\") " pod="openstack-kuttl-tests/keystone-55ff946595-f8gjf" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.257668 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mq58f\" (UniqueName: \"kubernetes.io/projected/4969c1af-53c0-435a-bd06-6bd493c81c80-kube-api-access-mq58f\") pod \"keystone-55ff946595-f8gjf\" (UID: \"4969c1af-53c0-435a-bd06-6bd493c81c80\") " pod="openstack-kuttl-tests/keystone-55ff946595-f8gjf" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.262394 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/openstack-galera-0" podUID="0e28ce69-8b5d-4e5e-b8bb-860e6e3745ae" containerName="galera" containerID="cri-o://b8b4710c6079f7f8ec88f1289e78374ee84e488da8b6978ef790e87233e7f5fb" gracePeriod=30 Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.280203 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovn-northd-0_0c535ada-ed58-4c94-82c9-d6b1a35f78be/ovn-northd/0.log" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.280275 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.331373 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/5d3423c4-ff78-4ff7-b42b-b3c93b309d52-config\") pod \"neutron-6b64ccd79d-5qj2x\" (UID: \"5d3423c4-ff78-4ff7-b42b-b3c93b309d52\") " pod="openstack-kuttl-tests/neutron-6b64ccd79d-5qj2x" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.331618 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/5d3423c4-ff78-4ff7-b42b-b3c93b309d52-ovndb-tls-certs\") pod \"neutron-6b64ccd79d-5qj2x\" (UID: \"5d3423c4-ff78-4ff7-b42b-b3c93b309d52\") " pod="openstack-kuttl-tests/neutron-6b64ccd79d-5qj2x" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.331674 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5d3423c4-ff78-4ff7-b42b-b3c93b309d52-combined-ca-bundle\") pod \"neutron-6b64ccd79d-5qj2x\" (UID: \"5d3423c4-ff78-4ff7-b42b-b3c93b309d52\") " pod="openstack-kuttl-tests/neutron-6b64ccd79d-5qj2x" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.331703 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5d3423c4-ff78-4ff7-b42b-b3c93b309d52-internal-tls-certs\") pod \"neutron-6b64ccd79d-5qj2x\" (UID: \"5d3423c4-ff78-4ff7-b42b-b3c93b309d52\") " pod="openstack-kuttl-tests/neutron-6b64ccd79d-5qj2x" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.331757 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5d3423c4-ff78-4ff7-b42b-b3c93b309d52-public-tls-certs\") pod \"neutron-6b64ccd79d-5qj2x\" (UID: \"5d3423c4-ff78-4ff7-b42b-b3c93b309d52\") " pod="openstack-kuttl-tests/neutron-6b64ccd79d-5qj2x" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.331776 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/5d3423c4-ff78-4ff7-b42b-b3c93b309d52-httpd-config\") pod \"neutron-6b64ccd79d-5qj2x\" (UID: \"5d3423c4-ff78-4ff7-b42b-b3c93b309d52\") " pod="openstack-kuttl-tests/neutron-6b64ccd79d-5qj2x" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.331839 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4cln2\" (UniqueName: \"kubernetes.io/projected/5d3423c4-ff78-4ff7-b42b-b3c93b309d52-kube-api-access-4cln2\") pod \"neutron-6b64ccd79d-5qj2x\" (UID: \"5d3423c4-ff78-4ff7-b42b-b3c93b309d52\") " pod="openstack-kuttl-tests/neutron-6b64ccd79d-5qj2x" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.339989 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5d3423c4-ff78-4ff7-b42b-b3c93b309d52-public-tls-certs\") pod \"neutron-6b64ccd79d-5qj2x\" (UID: \"5d3423c4-ff78-4ff7-b42b-b3c93b309d52\") " pod="openstack-kuttl-tests/neutron-6b64ccd79d-5qj2x" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.341224 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/5d3423c4-ff78-4ff7-b42b-b3c93b309d52-config\") pod \"neutron-6b64ccd79d-5qj2x\" (UID: \"5d3423c4-ff78-4ff7-b42b-b3c93b309d52\") " pod="openstack-kuttl-tests/neutron-6b64ccd79d-5qj2x" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.341669 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5d3423c4-ff78-4ff7-b42b-b3c93b309d52-internal-tls-certs\") pod \"neutron-6b64ccd79d-5qj2x\" (UID: \"5d3423c4-ff78-4ff7-b42b-b3c93b309d52\") " pod="openstack-kuttl-tests/neutron-6b64ccd79d-5qj2x" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.348758 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/5d3423c4-ff78-4ff7-b42b-b3c93b309d52-ovndb-tls-certs\") pod \"neutron-6b64ccd79d-5qj2x\" (UID: \"5d3423c4-ff78-4ff7-b42b-b3c93b309d52\") " pod="openstack-kuttl-tests/neutron-6b64ccd79d-5qj2x" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.350746 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4cln2\" (UniqueName: \"kubernetes.io/projected/5d3423c4-ff78-4ff7-b42b-b3c93b309d52-kube-api-access-4cln2\") pod \"neutron-6b64ccd79d-5qj2x\" (UID: \"5d3423c4-ff78-4ff7-b42b-b3c93b309d52\") " pod="openstack-kuttl-tests/neutron-6b64ccd79d-5qj2x" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.351319 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/5d3423c4-ff78-4ff7-b42b-b3c93b309d52-httpd-config\") pod \"neutron-6b64ccd79d-5qj2x\" (UID: \"5d3423c4-ff78-4ff7-b42b-b3c93b309d52\") " pod="openstack-kuttl-tests/neutron-6b64ccd79d-5qj2x" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.354798 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5d3423c4-ff78-4ff7-b42b-b3c93b309d52-combined-ca-bundle\") pod \"neutron-6b64ccd79d-5qj2x\" (UID: \"5d3423c4-ff78-4ff7-b42b-b3c93b309d52\") " pod="openstack-kuttl-tests/neutron-6b64ccd79d-5qj2x" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.384556 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-d54787f8d-7fznp" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.393407 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-55ff946595-f8gjf" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.403142 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-5df68bf4dd-zbx8h" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.430348 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-6b64ccd79d-5qj2x" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.432603 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/0c535ada-ed58-4c94-82c9-d6b1a35f78be-metrics-certs-tls-certs\") pod \"0c535ada-ed58-4c94-82c9-d6b1a35f78be\" (UID: \"0c535ada-ed58-4c94-82c9-d6b1a35f78be\") " Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.432716 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/0c535ada-ed58-4c94-82c9-d6b1a35f78be-ovn-northd-tls-certs\") pod \"0c535ada-ed58-4c94-82c9-d6b1a35f78be\" (UID: \"0c535ada-ed58-4c94-82c9-d6b1a35f78be\") " Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.432761 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/0c535ada-ed58-4c94-82c9-d6b1a35f78be-ovn-rundir\") pod \"0c535ada-ed58-4c94-82c9-d6b1a35f78be\" (UID: \"0c535ada-ed58-4c94-82c9-d6b1a35f78be\") " Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.432805 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0c535ada-ed58-4c94-82c9-d6b1a35f78be-config\") pod \"0c535ada-ed58-4c94-82c9-d6b1a35f78be\" (UID: \"0c535ada-ed58-4c94-82c9-d6b1a35f78be\") " Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.432858 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tv7cz\" (UniqueName: \"kubernetes.io/projected/0c535ada-ed58-4c94-82c9-d6b1a35f78be-kube-api-access-tv7cz\") pod \"0c535ada-ed58-4c94-82c9-d6b1a35f78be\" (UID: \"0c535ada-ed58-4c94-82c9-d6b1a35f78be\") " Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.432886 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0c535ada-ed58-4c94-82c9-d6b1a35f78be-combined-ca-bundle\") pod \"0c535ada-ed58-4c94-82c9-d6b1a35f78be\" (UID: \"0c535ada-ed58-4c94-82c9-d6b1a35f78be\") " Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.432903 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0c535ada-ed58-4c94-82c9-d6b1a35f78be-scripts\") pod \"0c535ada-ed58-4c94-82c9-d6b1a35f78be\" (UID: \"0c535ada-ed58-4c94-82c9-d6b1a35f78be\") " Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.433968 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0c535ada-ed58-4c94-82c9-d6b1a35f78be-scripts" (OuterVolumeSpecName: "scripts") pod "0c535ada-ed58-4c94-82c9-d6b1a35f78be" (UID: "0c535ada-ed58-4c94-82c9-d6b1a35f78be"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.434673 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0c535ada-ed58-4c94-82c9-d6b1a35f78be-config" (OuterVolumeSpecName: "config") pod "0c535ada-ed58-4c94-82c9-d6b1a35f78be" (UID: "0c535ada-ed58-4c94-82c9-d6b1a35f78be"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.434912 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0c535ada-ed58-4c94-82c9-d6b1a35f78be-ovn-rundir" (OuterVolumeSpecName: "ovn-rundir") pod "0c535ada-ed58-4c94-82c9-d6b1a35f78be" (UID: "0c535ada-ed58-4c94-82c9-d6b1a35f78be"). InnerVolumeSpecName "ovn-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.448371 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0c535ada-ed58-4c94-82c9-d6b1a35f78be-kube-api-access-tv7cz" (OuterVolumeSpecName: "kube-api-access-tv7cz") pod "0c535ada-ed58-4c94-82c9-d6b1a35f78be" (UID: "0c535ada-ed58-4c94-82c9-d6b1a35f78be"). InnerVolumeSpecName "kube-api-access-tv7cz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.468778 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0c535ada-ed58-4c94-82c9-d6b1a35f78be-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0c535ada-ed58-4c94-82c9-d6b1a35f78be" (UID: "0c535ada-ed58-4c94-82c9-d6b1a35f78be"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.514589 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovsdbserver-nb-0_8805bbf2-dd7b-41db-89ee-8b1d3053bf02/ovsdbserver-nb/0.log" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.514801 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.515668 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0c535ada-ed58-4c94-82c9-d6b1a35f78be-ovn-northd-tls-certs" (OuterVolumeSpecName: "ovn-northd-tls-certs") pod "0c535ada-ed58-4c94-82c9-d6b1a35f78be" (UID: "0c535ada-ed58-4c94-82c9-d6b1a35f78be"). InnerVolumeSpecName "ovn-northd-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.543963 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tv7cz\" (UniqueName: \"kubernetes.io/projected/0c535ada-ed58-4c94-82c9-d6b1a35f78be-kube-api-access-tv7cz\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.543990 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0c535ada-ed58-4c94-82c9-d6b1a35f78be-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.544000 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0c535ada-ed58-4c94-82c9-d6b1a35f78be-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.544009 4558 reconciler_common.go:293] "Volume detached for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/0c535ada-ed58-4c94-82c9-d6b1a35f78be-ovn-northd-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.544017 4558 reconciler_common.go:293] "Volume detached for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/0c535ada-ed58-4c94-82c9-d6b1a35f78be-ovn-rundir\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.544026 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0c535ada-ed58-4c94-82c9-d6b1a35f78be-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.552082 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0c535ada-ed58-4c94-82c9-d6b1a35f78be-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "0c535ada-ed58-4c94-82c9-d6b1a35f78be" (UID: "0c535ada-ed58-4c94-82c9-d6b1a35f78be"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.647823 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/8805bbf2-dd7b-41db-89ee-8b1d3053bf02-ovsdb-rundir\") pod \"8805bbf2-dd7b-41db-89ee-8b1d3053bf02\" (UID: \"8805bbf2-dd7b-41db-89ee-8b1d3053bf02\") " Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.647869 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/8805bbf2-dd7b-41db-89ee-8b1d3053bf02-metrics-certs-tls-certs\") pod \"8805bbf2-dd7b-41db-89ee-8b1d3053bf02\" (UID: \"8805bbf2-dd7b-41db-89ee-8b1d3053bf02\") " Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.647962 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8805bbf2-dd7b-41db-89ee-8b1d3053bf02-scripts\") pod \"8805bbf2-dd7b-41db-89ee-8b1d3053bf02\" (UID: \"8805bbf2-dd7b-41db-89ee-8b1d3053bf02\") " Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.647991 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/8805bbf2-dd7b-41db-89ee-8b1d3053bf02-ovsdbserver-nb-tls-certs\") pod \"8805bbf2-dd7b-41db-89ee-8b1d3053bf02\" (UID: \"8805bbf2-dd7b-41db-89ee-8b1d3053bf02\") " Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.648031 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cccjj\" (UniqueName: \"kubernetes.io/projected/8805bbf2-dd7b-41db-89ee-8b1d3053bf02-kube-api-access-cccjj\") pod \"8805bbf2-dd7b-41db-89ee-8b1d3053bf02\" (UID: \"8805bbf2-dd7b-41db-89ee-8b1d3053bf02\") " Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.648072 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndbcluster-nb-etc-ovn\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"8805bbf2-dd7b-41db-89ee-8b1d3053bf02\" (UID: \"8805bbf2-dd7b-41db-89ee-8b1d3053bf02\") " Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.648090 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8805bbf2-dd7b-41db-89ee-8b1d3053bf02-config\") pod \"8805bbf2-dd7b-41db-89ee-8b1d3053bf02\" (UID: \"8805bbf2-dd7b-41db-89ee-8b1d3053bf02\") " Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.648128 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8805bbf2-dd7b-41db-89ee-8b1d3053bf02-combined-ca-bundle\") pod \"8805bbf2-dd7b-41db-89ee-8b1d3053bf02\" (UID: \"8805bbf2-dd7b-41db-89ee-8b1d3053bf02\") " Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.648615 4558 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/0c535ada-ed58-4c94-82c9-d6b1a35f78be-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.663397 4558 generic.go:334] "Generic (PLEG): container finished" podID="316ffdc9-b0e1-47d9-90f8-45a8ceb87353" containerID="1e113a1489cb99ef7da66f10ddf493b5813e1713d85e6d3338e7700ffa711d7c" exitCode=0 Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.663477 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"316ffdc9-b0e1-47d9-90f8-45a8ceb87353","Type":"ContainerDied","Data":"1e113a1489cb99ef7da66f10ddf493b5813e1713d85e6d3338e7700ffa711d7c"} Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.667717 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8805bbf2-dd7b-41db-89ee-8b1d3053bf02-ovsdb-rundir" (OuterVolumeSpecName: "ovsdb-rundir") pod "8805bbf2-dd7b-41db-89ee-8b1d3053bf02" (UID: "8805bbf2-dd7b-41db-89ee-8b1d3053bf02"). InnerVolumeSpecName "ovsdb-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.670485 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8805bbf2-dd7b-41db-89ee-8b1d3053bf02-scripts" (OuterVolumeSpecName: "scripts") pod "8805bbf2-dd7b-41db-89ee-8b1d3053bf02" (UID: "8805bbf2-dd7b-41db-89ee-8b1d3053bf02"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.670646 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8805bbf2-dd7b-41db-89ee-8b1d3053bf02-config" (OuterVolumeSpecName: "config") pod "8805bbf2-dd7b-41db-89ee-8b1d3053bf02" (UID: "8805bbf2-dd7b-41db-89ee-8b1d3053bf02"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.670782 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage08-crc" (OuterVolumeSpecName: "ovndbcluster-nb-etc-ovn") pod "8805bbf2-dd7b-41db-89ee-8b1d3053bf02" (UID: "8805bbf2-dd7b-41db-89ee-8b1d3053bf02"). InnerVolumeSpecName "local-storage08-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.708539 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovsdbserver-sb-0_1c0d7766-0fb5-47ec-87f4-6644f5b1afd9/ovsdbserver-sb/0.log" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.708580 4558 generic.go:334] "Generic (PLEG): container finished" podID="1c0d7766-0fb5-47ec-87f4-6644f5b1afd9" containerID="6fa53430a72d92b62c65c8602e3f7ecea38be4fd2c7715f04903b7006b2c6625" exitCode=2 Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.708595 4558 generic.go:334] "Generic (PLEG): container finished" podID="1c0d7766-0fb5-47ec-87f4-6644f5b1afd9" containerID="20a844916c974def3060dd83baa14a82ac52a34e846bc5a85b9c976771061eb1" exitCode=143 Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.708651 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-sb-0" event={"ID":"1c0d7766-0fb5-47ec-87f4-6644f5b1afd9","Type":"ContainerDied","Data":"6fa53430a72d92b62c65c8602e3f7ecea38be4fd2c7715f04903b7006b2c6625"} Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.708676 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-sb-0" event={"ID":"1c0d7766-0fb5-47ec-87f4-6644f5b1afd9","Type":"ContainerDied","Data":"20a844916c974def3060dd83baa14a82ac52a34e846bc5a85b9c976771061eb1"} Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.718504 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8805bbf2-dd7b-41db-89ee-8b1d3053bf02-kube-api-access-cccjj" (OuterVolumeSpecName: "kube-api-access-cccjj") pod "8805bbf2-dd7b-41db-89ee-8b1d3053bf02" (UID: "8805bbf2-dd7b-41db-89ee-8b1d3053bf02"). InnerVolumeSpecName "kube-api-access-cccjj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.778106 4558 generic.go:334] "Generic (PLEG): container finished" podID="cae28711-fbe7-40cc-8f31-4b6332ab5378" containerID="7f9dbce9ba44238985c3e3d3fb5a42a8258f76871b2f32a165f9c1ead22275de" exitCode=143 Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.778361 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"cae28711-fbe7-40cc-8f31-4b6332ab5378","Type":"ContainerDied","Data":"7f9dbce9ba44238985c3e3d3fb5a42a8258f76871b2f32a165f9c1ead22275de"} Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.783352 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8805bbf2-dd7b-41db-89ee-8b1d3053bf02-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.783382 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cccjj\" (UniqueName: \"kubernetes.io/projected/8805bbf2-dd7b-41db-89ee-8b1d3053bf02-kube-api-access-cccjj\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.783403 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" " Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.783412 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8805bbf2-dd7b-41db-89ee-8b1d3053bf02-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.783421 4558 reconciler_common.go:293] "Volume detached for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/8805bbf2-dd7b-41db-89ee-8b1d3053bf02-ovsdb-rundir\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.799460 4558 generic.go:334] "Generic (PLEG): container finished" podID="d2ccd35d-37c4-450d-b04c-ac505e35b0e8" containerID="2f43a86fbf8e121d1c2b64dc721cdb7d0f3e79ac1af1c83f48879ac356eba5d7" exitCode=143 Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.799522 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"d2ccd35d-37c4-450d-b04c-ac505e35b0e8","Type":"ContainerDied","Data":"2f43a86fbf8e121d1c2b64dc721cdb7d0f3e79ac1af1c83f48879ac356eba5d7"} Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.809197 4558 generic.go:334] "Generic (PLEG): container finished" podID="e554db35-2dba-4138-9ca0-bd1371a9c63d" containerID="60c80db7b21795bae641e9692b2deb97128b55cb001a272936bfaf668d4bfcfb" exitCode=143 Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.809297 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"e554db35-2dba-4138-9ca0-bd1371a9c63d","Type":"ContainerDied","Data":"60c80db7b21795bae641e9692b2deb97128b55cb001a272936bfaf668d4bfcfb"} Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.813952 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovsdbserver-nb-0_8805bbf2-dd7b-41db-89ee-8b1d3053bf02/ovsdbserver-nb/0.log" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.813987 4558 generic.go:334] "Generic (PLEG): container finished" podID="8805bbf2-dd7b-41db-89ee-8b1d3053bf02" containerID="110cfb1c28534cceb930fa4d3f07dc6b34a497e54cb137356436aeb344c172e2" exitCode=2 Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.814002 4558 generic.go:334] "Generic (PLEG): container finished" podID="8805bbf2-dd7b-41db-89ee-8b1d3053bf02" containerID="3827b2816f3cb25c8c183f31035843c25fc855df97604f4ab511f8bc21e7c1ef" exitCode=143 Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.814039 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-nb-0" event={"ID":"8805bbf2-dd7b-41db-89ee-8b1d3053bf02","Type":"ContainerDied","Data":"110cfb1c28534cceb930fa4d3f07dc6b34a497e54cb137356436aeb344c172e2"} Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.814060 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-nb-0" event={"ID":"8805bbf2-dd7b-41db-89ee-8b1d3053bf02","Type":"ContainerDied","Data":"3827b2816f3cb25c8c183f31035843c25fc855df97604f4ab511f8bc21e7c1ef"} Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.814069 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-nb-0" event={"ID":"8805bbf2-dd7b-41db-89ee-8b1d3053bf02","Type":"ContainerDied","Data":"6af10391dc0133fa2595b1be05648390a7170e51d6d9a3dbae2c77b9b689b77c"} Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.814083 4558 scope.go:117] "RemoveContainer" containerID="110cfb1c28534cceb930fa4d3f07dc6b34a497e54cb137356436aeb344c172e2" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.814243 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.829116 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8805bbf2-dd7b-41db-89ee-8b1d3053bf02-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8805bbf2-dd7b-41db-89ee-8b1d3053bf02" (UID: "8805bbf2-dd7b-41db-89ee-8b1d3053bf02"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.832973 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8805bbf2-dd7b-41db-89ee-8b1d3053bf02-ovsdbserver-nb-tls-certs" (OuterVolumeSpecName: "ovsdbserver-nb-tls-certs") pod "8805bbf2-dd7b-41db-89ee-8b1d3053bf02" (UID: "8805bbf2-dd7b-41db-89ee-8b1d3053bf02"). InnerVolumeSpecName "ovsdbserver-nb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.841685 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage08-crc" (UniqueName: "kubernetes.io/local-volume/local-storage08-crc") on node "crc" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.855875 4558 generic.go:334] "Generic (PLEG): container finished" podID="764c7a15-6a1a-470c-9d0b-a63ed418cc09" containerID="5fcc9425b33989b0ac07f50e1aa0774a071177ee9453a094ad6d619c7974603b" exitCode=143 Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.855951 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"764c7a15-6a1a-470c-9d0b-a63ed418cc09","Type":"ContainerDied","Data":"5fcc9425b33989b0ac07f50e1aa0774a071177ee9453a094ad6d619c7974603b"} Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.878389 4558 generic.go:334] "Generic (PLEG): container finished" podID="b28e6999-784e-4577-88bb-db648f7a3cbc" containerID="ed2110d076583425a8af34103095d99e22acc0c9b835eb2f9a4e87e9dc475ca6" exitCode=143 Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.878442 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"b28e6999-784e-4577-88bb-db648f7a3cbc","Type":"ContainerDied","Data":"ed2110d076583425a8af34103095d99e22acc0c9b835eb2f9a4e87e9dc475ca6"} Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.895366 4558 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/8805bbf2-dd7b-41db-89ee-8b1d3053bf02-ovsdbserver-nb-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.895391 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.895403 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8805bbf2-dd7b-41db-89ee-8b1d3053bf02-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.910110 4558 generic.go:334] "Generic (PLEG): container finished" podID="b8911a4f-a706-4956-9028-138c018a92ba" containerID="d3bb244923af1279a2fec8556cc18369058a01a9dc93f5e798291670f59b37c6" exitCode=0 Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.910258 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"b8911a4f-a706-4956-9028-138c018a92ba","Type":"ContainerDied","Data":"d3bb244923af1279a2fec8556cc18369058a01a9dc93f5e798291670f59b37c6"} Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.910300 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8805bbf2-dd7b-41db-89ee-8b1d3053bf02-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "8805bbf2-dd7b-41db-89ee-8b1d3053bf02" (UID: "8805bbf2-dd7b-41db-89ee-8b1d3053bf02"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.954702 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-worker-c4f864f89-58gb9"] Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.955120 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovn-northd-0_0c535ada-ed58-4c94-82c9-d6b1a35f78be/ovn-northd/0.log" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.955180 4558 generic.go:334] "Generic (PLEG): container finished" podID="0c535ada-ed58-4c94-82c9-d6b1a35f78be" containerID="644a2c213ff45475c1b94047c5229ac3c3fe716ff973c59e6790b1314050f1f3" exitCode=2 Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.955197 4558 generic.go:334] "Generic (PLEG): container finished" podID="0c535ada-ed58-4c94-82c9-d6b1a35f78be" containerID="bf9413112ce86eb3a9f0b2a6c2c7bc61341d600bd5c50302655d4e980e7c1cc1" exitCode=143 Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.955222 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-northd-0" event={"ID":"0c535ada-ed58-4c94-82c9-d6b1a35f78be","Type":"ContainerDied","Data":"644a2c213ff45475c1b94047c5229ac3c3fe716ff973c59e6790b1314050f1f3"} Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.955260 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-northd-0" event={"ID":"0c535ada-ed58-4c94-82c9-d6b1a35f78be","Type":"ContainerDied","Data":"bf9413112ce86eb3a9f0b2a6c2c7bc61341d600bd5c50302655d4e980e7c1cc1"} Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.955269 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-northd-0" event={"ID":"0c535ada-ed58-4c94-82c9-d6b1a35f78be","Type":"ContainerDied","Data":"1e0838a1f2660b8c9c0cd79709800f43140956dd9e3eb4529eb3969ecad42019"} Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.955338 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:00:41 crc kubenswrapper[4558]: I0120 17:00:41.999051 4558 scope.go:117] "RemoveContainer" containerID="3827b2816f3cb25c8c183f31035843c25fc855df97604f4ab511f8bc21e7c1ef" Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:41.999794 4558 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/8805bbf2-dd7b-41db-89ee-8b1d3053bf02-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:42 crc kubenswrapper[4558]: E0120 17:00:42.005272 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="6074e6e8906baa2f1dffa9b96ff41ea08d5604ac43ef2c6593b686fd35af868a" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.011502 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-keystone-listener-7fc5477d66-pz8q2"] Jan 20 17:00:42 crc kubenswrapper[4558]: E0120 17:00:42.049491 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="6074e6e8906baa2f1dffa9b96ff41ea08d5604ac43ef2c6593b686fd35af868a" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.050140 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-f537-account-create-update-5crz6"] Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.088241 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/cinder-f537-account-create-update-5crz6"] Jan 20 17:00:42 crc kubenswrapper[4558]: E0120 17:00:42.092329 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="6074e6e8906baa2f1dffa9b96ff41ea08d5604ac43ef2c6593b686fd35af868a" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:00:42 crc kubenswrapper[4558]: E0120 17:00:42.092405 4558 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack-kuttl-tests/nova-cell0-conductor-0" podUID="9ea3b871-4db3-4108-baea-e57a23d9d6c5" containerName="nova-cell0-conductor-conductor" Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.163529 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovsdbserver-sb-0_1c0d7766-0fb5-47ec-87f4-6644f5b1afd9/ovsdbserver-sb/0.log" Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.163595 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.171773 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-b594-account-create-update-8q4dg"] Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.175896 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.184123 4558 scope.go:117] "RemoveContainer" containerID="110cfb1c28534cceb930fa4d3f07dc6b34a497e54cb137356436aeb344c172e2" Jan 20 17:00:42 crc kubenswrapper[4558]: E0120 17:00:42.197693 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"110cfb1c28534cceb930fa4d3f07dc6b34a497e54cb137356436aeb344c172e2\": container with ID starting with 110cfb1c28534cceb930fa4d3f07dc6b34a497e54cb137356436aeb344c172e2 not found: ID does not exist" containerID="110cfb1c28534cceb930fa4d3f07dc6b34a497e54cb137356436aeb344c172e2" Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.197724 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"110cfb1c28534cceb930fa4d3f07dc6b34a497e54cb137356436aeb344c172e2"} err="failed to get container status \"110cfb1c28534cceb930fa4d3f07dc6b34a497e54cb137356436aeb344c172e2\": rpc error: code = NotFound desc = could not find container \"110cfb1c28534cceb930fa4d3f07dc6b34a497e54cb137356436aeb344c172e2\": container with ID starting with 110cfb1c28534cceb930fa4d3f07dc6b34a497e54cb137356436aeb344c172e2 not found: ID does not exist" Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.197746 4558 scope.go:117] "RemoveContainer" containerID="3827b2816f3cb25c8c183f31035843c25fc855df97604f4ab511f8bc21e7c1ef" Jan 20 17:00:42 crc kubenswrapper[4558]: E0120 17:00:42.199501 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3827b2816f3cb25c8c183f31035843c25fc855df97604f4ab511f8bc21e7c1ef\": container with ID starting with 3827b2816f3cb25c8c183f31035843c25fc855df97604f4ab511f8bc21e7c1ef not found: ID does not exist" containerID="3827b2816f3cb25c8c183f31035843c25fc855df97604f4ab511f8bc21e7c1ef" Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.199540 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3827b2816f3cb25c8c183f31035843c25fc855df97604f4ab511f8bc21e7c1ef"} err="failed to get container status \"3827b2816f3cb25c8c183f31035843c25fc855df97604f4ab511f8bc21e7c1ef\": rpc error: code = NotFound desc = could not find container \"3827b2816f3cb25c8c183f31035843c25fc855df97604f4ab511f8bc21e7c1ef\": container with ID starting with 3827b2816f3cb25c8c183f31035843c25fc855df97604f4ab511f8bc21e7c1ef not found: ID does not exist" Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.199582 4558 scope.go:117] "RemoveContainer" containerID="110cfb1c28534cceb930fa4d3f07dc6b34a497e54cb137356436aeb344c172e2" Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.213282 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"110cfb1c28534cceb930fa4d3f07dc6b34a497e54cb137356436aeb344c172e2"} err="failed to get container status \"110cfb1c28534cceb930fa4d3f07dc6b34a497e54cb137356436aeb344c172e2\": rpc error: code = NotFound desc = could not find container \"110cfb1c28534cceb930fa4d3f07dc6b34a497e54cb137356436aeb344c172e2\": container with ID starting with 110cfb1c28534cceb930fa4d3f07dc6b34a497e54cb137356436aeb344c172e2 not found: ID does not exist" Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.213315 4558 scope.go:117] "RemoveContainer" containerID="3827b2816f3cb25c8c183f31035843c25fc855df97604f4ab511f8bc21e7c1ef" Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.219371 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3827b2816f3cb25c8c183f31035843c25fc855df97604f4ab511f8bc21e7c1ef"} err="failed to get container status \"3827b2816f3cb25c8c183f31035843c25fc855df97604f4ab511f8bc21e7c1ef\": rpc error: code = NotFound desc = could not find container \"3827b2816f3cb25c8c183f31035843c25fc855df97604f4ab511f8bc21e7c1ef\": container with ID starting with 3827b2816f3cb25c8c183f31035843c25fc855df97604f4ab511f8bc21e7c1ef not found: ID does not exist" Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.219395 4558 scope.go:117] "RemoveContainer" containerID="644a2c213ff45475c1b94047c5229ac3c3fe716ff973c59e6790b1314050f1f3" Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.224890 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/glance-b594-account-create-update-8q4dg"] Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.256340 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovn-northd-0"] Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.274381 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ovn-northd-0"] Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.283421 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/rabbitmq-cell1-server-0"] Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.301733 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ovn-northd-0"] Jan 20 17:00:42 crc kubenswrapper[4558]: E0120 17:00:42.302105 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0c535ada-ed58-4c94-82c9-d6b1a35f78be" containerName="openstack-network-exporter" Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.302122 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="0c535ada-ed58-4c94-82c9-d6b1a35f78be" containerName="openstack-network-exporter" Jan 20 17:00:42 crc kubenswrapper[4558]: E0120 17:00:42.302141 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0c535ada-ed58-4c94-82c9-d6b1a35f78be" containerName="ovn-northd" Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.302147 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="0c535ada-ed58-4c94-82c9-d6b1a35f78be" containerName="ovn-northd" Jan 20 17:00:42 crc kubenswrapper[4558]: E0120 17:00:42.302182 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8805bbf2-dd7b-41db-89ee-8b1d3053bf02" containerName="openstack-network-exporter" Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.302188 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="8805bbf2-dd7b-41db-89ee-8b1d3053bf02" containerName="openstack-network-exporter" Jan 20 17:00:42 crc kubenswrapper[4558]: E0120 17:00:42.302197 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="316ffdc9-b0e1-47d9-90f8-45a8ceb87353" containerName="nova-cell1-novncproxy-novncproxy" Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.302203 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="316ffdc9-b0e1-47d9-90f8-45a8ceb87353" containerName="nova-cell1-novncproxy-novncproxy" Jan 20 17:00:42 crc kubenswrapper[4558]: E0120 17:00:42.302225 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1c0d7766-0fb5-47ec-87f4-6644f5b1afd9" containerName="openstack-network-exporter" Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.302238 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="1c0d7766-0fb5-47ec-87f4-6644f5b1afd9" containerName="openstack-network-exporter" Jan 20 17:00:42 crc kubenswrapper[4558]: E0120 17:00:42.302251 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1c0d7766-0fb5-47ec-87f4-6644f5b1afd9" containerName="ovsdbserver-sb" Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.302257 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="1c0d7766-0fb5-47ec-87f4-6644f5b1afd9" containerName="ovsdbserver-sb" Jan 20 17:00:42 crc kubenswrapper[4558]: E0120 17:00:42.302270 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8805bbf2-dd7b-41db-89ee-8b1d3053bf02" containerName="ovsdbserver-nb" Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.302275 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="8805bbf2-dd7b-41db-89ee-8b1d3053bf02" containerName="ovsdbserver-nb" Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.302448 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="1c0d7766-0fb5-47ec-87f4-6644f5b1afd9" containerName="ovsdbserver-sb" Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.302462 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="8805bbf2-dd7b-41db-89ee-8b1d3053bf02" containerName="ovsdbserver-nb" Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.302470 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="316ffdc9-b0e1-47d9-90f8-45a8ceb87353" containerName="nova-cell1-novncproxy-novncproxy" Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.302482 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="1c0d7766-0fb5-47ec-87f4-6644f5b1afd9" containerName="openstack-network-exporter" Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.302490 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="0c535ada-ed58-4c94-82c9-d6b1a35f78be" containerName="openstack-network-exporter" Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.302499 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="8805bbf2-dd7b-41db-89ee-8b1d3053bf02" containerName="openstack-network-exporter" Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.302507 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="0c535ada-ed58-4c94-82c9-d6b1a35f78be" containerName="ovn-northd" Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.303376 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.307850 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"ovnnorthd-scripts" Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.308302 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"ovnnorthd-config" Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.308437 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-ovnnorthd-ovndbs" Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.308548 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ovnnorthd-ovnnorthd-dockercfg-czv6c" Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.314233 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/root-account-create-update-fbtv5"] Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.315117 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-fbtv5" Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.315876 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/1c0d7766-0fb5-47ec-87f4-6644f5b1afd9-ovsdb-rundir\") pod \"1c0d7766-0fb5-47ec-87f4-6644f5b1afd9\" (UID: \"1c0d7766-0fb5-47ec-87f4-6644f5b1afd9\") " Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.315928 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/1c0d7766-0fb5-47ec-87f4-6644f5b1afd9-metrics-certs-tls-certs\") pod \"1c0d7766-0fb5-47ec-87f4-6644f5b1afd9\" (UID: \"1c0d7766-0fb5-47ec-87f4-6644f5b1afd9\") " Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.315951 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1c0d7766-0fb5-47ec-87f4-6644f5b1afd9-config\") pod \"1c0d7766-0fb5-47ec-87f4-6644f5b1afd9\" (UID: \"1c0d7766-0fb5-47ec-87f4-6644f5b1afd9\") " Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.315983 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndbcluster-sb-etc-ovn\" (UniqueName: \"kubernetes.io/local-volume/local-storage19-crc\") pod \"1c0d7766-0fb5-47ec-87f4-6644f5b1afd9\" (UID: \"1c0d7766-0fb5-47ec-87f4-6644f5b1afd9\") " Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.316011 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/1c0d7766-0fb5-47ec-87f4-6644f5b1afd9-ovsdbserver-sb-tls-certs\") pod \"1c0d7766-0fb5-47ec-87f4-6644f5b1afd9\" (UID: \"1c0d7766-0fb5-47ec-87f4-6644f5b1afd9\") " Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.316036 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xtfh\" (UniqueName: \"kubernetes.io/projected/316ffdc9-b0e1-47d9-90f8-45a8ceb87353-kube-api-access-9xtfh\") pod \"316ffdc9-b0e1-47d9-90f8-45a8ceb87353\" (UID: \"316ffdc9-b0e1-47d9-90f8-45a8ceb87353\") " Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.316050 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1c0d7766-0fb5-47ec-87f4-6644f5b1afd9-scripts\") pod \"1c0d7766-0fb5-47ec-87f4-6644f5b1afd9\" (UID: \"1c0d7766-0fb5-47ec-87f4-6644f5b1afd9\") " Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.316099 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/316ffdc9-b0e1-47d9-90f8-45a8ceb87353-combined-ca-bundle\") pod \"316ffdc9-b0e1-47d9-90f8-45a8ceb87353\" (UID: \"316ffdc9-b0e1-47d9-90f8-45a8ceb87353\") " Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.316150 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/316ffdc9-b0e1-47d9-90f8-45a8ceb87353-config-data\") pod \"316ffdc9-b0e1-47d9-90f8-45a8ceb87353\" (UID: \"316ffdc9-b0e1-47d9-90f8-45a8ceb87353\") " Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.316184 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/316ffdc9-b0e1-47d9-90f8-45a8ceb87353-nova-novncproxy-tls-certs\") pod \"316ffdc9-b0e1-47d9-90f8-45a8ceb87353\" (UID: \"316ffdc9-b0e1-47d9-90f8-45a8ceb87353\") " Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.316218 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6sgcz\" (UniqueName: \"kubernetes.io/projected/1c0d7766-0fb5-47ec-87f4-6644f5b1afd9-kube-api-access-6sgcz\") pod \"1c0d7766-0fb5-47ec-87f4-6644f5b1afd9\" (UID: \"1c0d7766-0fb5-47ec-87f4-6644f5b1afd9\") " Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.316236 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c0d7766-0fb5-47ec-87f4-6644f5b1afd9-combined-ca-bundle\") pod \"1c0d7766-0fb5-47ec-87f4-6644f5b1afd9\" (UID: \"1c0d7766-0fb5-47ec-87f4-6644f5b1afd9\") " Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.316250 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/316ffdc9-b0e1-47d9-90f8-45a8ceb87353-vencrypt-tls-certs\") pod \"316ffdc9-b0e1-47d9-90f8-45a8ceb87353\" (UID: \"316ffdc9-b0e1-47d9-90f8-45a8ceb87353\") " Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.316489 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1c0d7766-0fb5-47ec-87f4-6644f5b1afd9-config" (OuterVolumeSpecName: "config") pod "1c0d7766-0fb5-47ec-87f4-6644f5b1afd9" (UID: "1c0d7766-0fb5-47ec-87f4-6644f5b1afd9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.316805 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1c0d7766-0fb5-47ec-87f4-6644f5b1afd9-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.316978 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1c0d7766-0fb5-47ec-87f4-6644f5b1afd9-ovsdb-rundir" (OuterVolumeSpecName: "ovsdb-rundir") pod "1c0d7766-0fb5-47ec-87f4-6644f5b1afd9" (UID: "1c0d7766-0fb5-47ec-87f4-6644f5b1afd9"). InnerVolumeSpecName "ovsdb-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.319938 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1c0d7766-0fb5-47ec-87f4-6644f5b1afd9-scripts" (OuterVolumeSpecName: "scripts") pod "1c0d7766-0fb5-47ec-87f4-6644f5b1afd9" (UID: "1c0d7766-0fb5-47ec-87f4-6644f5b1afd9"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.338431 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"openstack-cell1-mariadb-root-db-secret" Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.352923 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovn-northd-0"] Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.362490 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage19-crc" (OuterVolumeSpecName: "ovndbcluster-sb-etc-ovn") pod "1c0d7766-0fb5-47ec-87f4-6644f5b1afd9" (UID: "1c0d7766-0fb5-47ec-87f4-6644f5b1afd9"). InnerVolumeSpecName "local-storage19-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.363354 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/316ffdc9-b0e1-47d9-90f8-45a8ceb87353-kube-api-access-9xtfh" (OuterVolumeSpecName: "kube-api-access-9xtfh") pod "316ffdc9-b0e1-47d9-90f8-45a8ceb87353" (UID: "316ffdc9-b0e1-47d9-90f8-45a8ceb87353"). InnerVolumeSpecName "kube-api-access-9xtfh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.372540 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1c0d7766-0fb5-47ec-87f4-6644f5b1afd9-kube-api-access-6sgcz" (OuterVolumeSpecName: "kube-api-access-6sgcz") pod "1c0d7766-0fb5-47ec-87f4-6644f5b1afd9" (UID: "1c0d7766-0fb5-47ec-87f4-6644f5b1afd9"). InnerVolumeSpecName "kube-api-access-6sgcz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.406995 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/316ffdc9-b0e1-47d9-90f8-45a8ceb87353-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "316ffdc9-b0e1-47d9-90f8-45a8ceb87353" (UID: "316ffdc9-b0e1-47d9-90f8-45a8ceb87353"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.422010 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/54c4bfe3-42bf-46ea-8c23-0621f7b65686-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"54c4bfe3-42bf-46ea-8c23-0621f7b65686\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.422043 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m6sxc\" (UniqueName: \"kubernetes.io/projected/e353328b-4fe7-4637-8174-a3b227ad9761-kube-api-access-m6sxc\") pod \"root-account-create-update-fbtv5\" (UID: \"e353328b-4fe7-4637-8174-a3b227ad9761\") " pod="openstack-kuttl-tests/root-account-create-update-fbtv5" Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.422100 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/54c4bfe3-42bf-46ea-8c23-0621f7b65686-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"54c4bfe3-42bf-46ea-8c23-0621f7b65686\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.422126 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/54c4bfe3-42bf-46ea-8c23-0621f7b65686-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"54c4bfe3-42bf-46ea-8c23-0621f7b65686\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.422304 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54c4bfe3-42bf-46ea-8c23-0621f7b65686-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"54c4bfe3-42bf-46ea-8c23-0621f7b65686\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.422328 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/54c4bfe3-42bf-46ea-8c23-0621f7b65686-scripts\") pod \"ovn-northd-0\" (UID: \"54c4bfe3-42bf-46ea-8c23-0621f7b65686\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.422359 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/54c4bfe3-42bf-46ea-8c23-0621f7b65686-config\") pod \"ovn-northd-0\" (UID: \"54c4bfe3-42bf-46ea-8c23-0621f7b65686\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.422377 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e353328b-4fe7-4637-8174-a3b227ad9761-operator-scripts\") pod \"root-account-create-update-fbtv5\" (UID: \"e353328b-4fe7-4637-8174-a3b227ad9761\") " pod="openstack-kuttl-tests/root-account-create-update-fbtv5" Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.422398 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zscx2\" (UniqueName: \"kubernetes.io/projected/54c4bfe3-42bf-46ea-8c23-0621f7b65686-kube-api-access-zscx2\") pod \"ovn-northd-0\" (UID: \"54c4bfe3-42bf-46ea-8c23-0621f7b65686\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.422507 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xtfh\" (UniqueName: \"kubernetes.io/projected/316ffdc9-b0e1-47d9-90f8-45a8ceb87353-kube-api-access-9xtfh\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.422517 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1c0d7766-0fb5-47ec-87f4-6644f5b1afd9-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.422526 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/316ffdc9-b0e1-47d9-90f8-45a8ceb87353-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.422534 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6sgcz\" (UniqueName: \"kubernetes.io/projected/1c0d7766-0fb5-47ec-87f4-6644f5b1afd9-kube-api-access-6sgcz\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.422541 4558 reconciler_common.go:293] "Volume detached for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/1c0d7766-0fb5-47ec-87f4-6644f5b1afd9-ovsdb-rundir\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.422557 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage19-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage19-crc\") on node \"crc\" " Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.425807 4558 scope.go:117] "RemoveContainer" containerID="bf9413112ce86eb3a9f0b2a6c2c7bc61341d600bd5c50302655d4e980e7c1cc1" Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.426583 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-fbtv5"] Jan 20 17:00:42 crc kubenswrapper[4558]: E0120 17:00:42.426714 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Jan 20 17:00:42 crc kubenswrapper[4558]: E0120 17:00:42.426759 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/bf355276-9e62-474e-bfb1-616dde5b83bc-config-data podName:bf355276-9e62-474e-bfb1-616dde5b83bc nodeName:}" failed. No retries permitted until 2026-01-20 17:00:42.926745694 +0000 UTC m=+1136.687083661 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/bf355276-9e62-474e-bfb1-616dde5b83bc-config-data") pod "rabbitmq-cell1-server-0" (UID: "bf355276-9e62-474e-bfb1-616dde5b83bc") : configmap "rabbitmq-cell1-config-data" not found Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.464486 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/316ffdc9-b0e1-47d9-90f8-45a8ceb87353-config-data" (OuterVolumeSpecName: "config-data") pod "316ffdc9-b0e1-47d9-90f8-45a8ceb87353" (UID: "316ffdc9-b0e1-47d9-90f8-45a8ceb87353"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.496392 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-nb-0"] Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.509249 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-nb-0"] Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.554559 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54c4bfe3-42bf-46ea-8c23-0621f7b65686-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"54c4bfe3-42bf-46ea-8c23-0621f7b65686\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.554613 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/54c4bfe3-42bf-46ea-8c23-0621f7b65686-scripts\") pod \"ovn-northd-0\" (UID: \"54c4bfe3-42bf-46ea-8c23-0621f7b65686\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.554659 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/54c4bfe3-42bf-46ea-8c23-0621f7b65686-config\") pod \"ovn-northd-0\" (UID: \"54c4bfe3-42bf-46ea-8c23-0621f7b65686\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.554685 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e353328b-4fe7-4637-8174-a3b227ad9761-operator-scripts\") pod \"root-account-create-update-fbtv5\" (UID: \"e353328b-4fe7-4637-8174-a3b227ad9761\") " pod="openstack-kuttl-tests/root-account-create-update-fbtv5" Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.554714 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zscx2\" (UniqueName: \"kubernetes.io/projected/54c4bfe3-42bf-46ea-8c23-0621f7b65686-kube-api-access-zscx2\") pod \"ovn-northd-0\" (UID: \"54c4bfe3-42bf-46ea-8c23-0621f7b65686\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.554854 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/54c4bfe3-42bf-46ea-8c23-0621f7b65686-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"54c4bfe3-42bf-46ea-8c23-0621f7b65686\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.554880 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m6sxc\" (UniqueName: \"kubernetes.io/projected/e353328b-4fe7-4637-8174-a3b227ad9761-kube-api-access-m6sxc\") pod \"root-account-create-update-fbtv5\" (UID: \"e353328b-4fe7-4637-8174-a3b227ad9761\") " pod="openstack-kuttl-tests/root-account-create-update-fbtv5" Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.554943 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/54c4bfe3-42bf-46ea-8c23-0621f7b65686-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"54c4bfe3-42bf-46ea-8c23-0621f7b65686\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.554975 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/54c4bfe3-42bf-46ea-8c23-0621f7b65686-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"54c4bfe3-42bf-46ea-8c23-0621f7b65686\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.555184 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/316ffdc9-b0e1-47d9-90f8-45a8ceb87353-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.555656 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/54c4bfe3-42bf-46ea-8c23-0621f7b65686-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"54c4bfe3-42bf-46ea-8c23-0621f7b65686\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:00:42 crc kubenswrapper[4558]: E0120 17:00:42.555754 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/combined-ca-bundle: secret "combined-ca-bundle" not found Jan 20 17:00:42 crc kubenswrapper[4558]: E0120 17:00:42.555798 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/54c4bfe3-42bf-46ea-8c23-0621f7b65686-combined-ca-bundle podName:54c4bfe3-42bf-46ea-8c23-0621f7b65686 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:43.055784914 +0000 UTC m=+1136.816122881 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/54c4bfe3-42bf-46ea-8c23-0621f7b65686-combined-ca-bundle") pod "ovn-northd-0" (UID: "54c4bfe3-42bf-46ea-8c23-0621f7b65686") : secret "combined-ca-bundle" not found Jan 20 17:00:42 crc kubenswrapper[4558]: E0120 17:00:42.555840 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/ovnnorthd-scripts: configmap "ovnnorthd-scripts" not found Jan 20 17:00:42 crc kubenswrapper[4558]: E0120 17:00:42.555863 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/54c4bfe3-42bf-46ea-8c23-0621f7b65686-scripts podName:54c4bfe3-42bf-46ea-8c23-0621f7b65686 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:43.055856799 +0000 UTC m=+1136.816194766 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "scripts" (UniqueName: "kubernetes.io/configmap/54c4bfe3-42bf-46ea-8c23-0621f7b65686-scripts") pod "ovn-northd-0" (UID: "54c4bfe3-42bf-46ea-8c23-0621f7b65686") : configmap "ovnnorthd-scripts" not found Jan 20 17:00:42 crc kubenswrapper[4558]: E0120 17:00:42.555892 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/ovnnorthd-config: configmap "ovnnorthd-config" not found Jan 20 17:00:42 crc kubenswrapper[4558]: E0120 17:00:42.555911 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/54c4bfe3-42bf-46ea-8c23-0621f7b65686-config podName:54c4bfe3-42bf-46ea-8c23-0621f7b65686 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:43.055906752 +0000 UTC m=+1136.816244719 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config" (UniqueName: "kubernetes.io/configmap/54c4bfe3-42bf-46ea-8c23-0621f7b65686-config") pod "ovn-northd-0" (UID: "54c4bfe3-42bf-46ea-8c23-0621f7b65686") : configmap "ovnnorthd-config" not found Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.556462 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e353328b-4fe7-4637-8174-a3b227ad9761-operator-scripts\") pod \"root-account-create-update-fbtv5\" (UID: \"e353328b-4fe7-4637-8174-a3b227ad9761\") " pod="openstack-kuttl-tests/root-account-create-update-fbtv5" Jan 20 17:00:42 crc kubenswrapper[4558]: E0120 17:00:42.556765 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-ovnnorthd-ovndbs: secret "cert-ovnnorthd-ovndbs" not found Jan 20 17:00:42 crc kubenswrapper[4558]: E0120 17:00:42.556797 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/54c4bfe3-42bf-46ea-8c23-0621f7b65686-ovn-northd-tls-certs podName:54c4bfe3-42bf-46ea-8c23-0621f7b65686 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:43.056786668 +0000 UTC m=+1136.817124635 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "ovn-northd-tls-certs" (UniqueName: "kubernetes.io/secret/54c4bfe3-42bf-46ea-8c23-0621f7b65686-ovn-northd-tls-certs") pod "ovn-northd-0" (UID: "54c4bfe3-42bf-46ea-8c23-0621f7b65686") : secret "cert-ovnnorthd-ovndbs" not found Jan 20 17:00:42 crc kubenswrapper[4558]: E0120 17:00:42.556963 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-ovn-metrics: secret "cert-ovn-metrics" not found Jan 20 17:00:42 crc kubenswrapper[4558]: E0120 17:00:42.556989 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/54c4bfe3-42bf-46ea-8c23-0621f7b65686-metrics-certs-tls-certs podName:54c4bfe3-42bf-46ea-8c23-0621f7b65686 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:43.056982335 +0000 UTC m=+1136.817320302 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs-tls-certs" (UniqueName: "kubernetes.io/secret/54c4bfe3-42bf-46ea-8c23-0621f7b65686-metrics-certs-tls-certs") pod "ovn-northd-0" (UID: "54c4bfe3-42bf-46ea-8c23-0621f7b65686") : secret "cert-ovn-metrics" not found Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.557512 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/placement-64a7-account-create-update-5hw2k"] Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.563376 4558 scope.go:117] "RemoveContainer" containerID="644a2c213ff45475c1b94047c5229ac3c3fe716ff973c59e6790b1314050f1f3" Jan 20 17:00:42 crc kubenswrapper[4558]: E0120 17:00:42.570940 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"644a2c213ff45475c1b94047c5229ac3c3fe716ff973c59e6790b1314050f1f3\": container with ID starting with 644a2c213ff45475c1b94047c5229ac3c3fe716ff973c59e6790b1314050f1f3 not found: ID does not exist" containerID="644a2c213ff45475c1b94047c5229ac3c3fe716ff973c59e6790b1314050f1f3" Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.570988 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"644a2c213ff45475c1b94047c5229ac3c3fe716ff973c59e6790b1314050f1f3"} err="failed to get container status \"644a2c213ff45475c1b94047c5229ac3c3fe716ff973c59e6790b1314050f1f3\": rpc error: code = NotFound desc = could not find container \"644a2c213ff45475c1b94047c5229ac3c3fe716ff973c59e6790b1314050f1f3\": container with ID starting with 644a2c213ff45475c1b94047c5229ac3c3fe716ff973c59e6790b1314050f1f3 not found: ID does not exist" Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.571008 4558 scope.go:117] "RemoveContainer" containerID="bf9413112ce86eb3a9f0b2a6c2c7bc61341d600bd5c50302655d4e980e7c1cc1" Jan 20 17:00:42 crc kubenswrapper[4558]: E0120 17:00:42.574561 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bf9413112ce86eb3a9f0b2a6c2c7bc61341d600bd5c50302655d4e980e7c1cc1\": container with ID starting with bf9413112ce86eb3a9f0b2a6c2c7bc61341d600bd5c50302655d4e980e7c1cc1 not found: ID does not exist" containerID="bf9413112ce86eb3a9f0b2a6c2c7bc61341d600bd5c50302655d4e980e7c1cc1" Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.574595 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bf9413112ce86eb3a9f0b2a6c2c7bc61341d600bd5c50302655d4e980e7c1cc1"} err="failed to get container status \"bf9413112ce86eb3a9f0b2a6c2c7bc61341d600bd5c50302655d4e980e7c1cc1\": rpc error: code = NotFound desc = could not find container \"bf9413112ce86eb3a9f0b2a6c2c7bc61341d600bd5c50302655d4e980e7c1cc1\": container with ID starting with bf9413112ce86eb3a9f0b2a6c2c7bc61341d600bd5c50302655d4e980e7c1cc1 not found: ID does not exist" Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.574632 4558 scope.go:117] "RemoveContainer" containerID="644a2c213ff45475c1b94047c5229ac3c3fe716ff973c59e6790b1314050f1f3" Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.587260 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"644a2c213ff45475c1b94047c5229ac3c3fe716ff973c59e6790b1314050f1f3"} err="failed to get container status \"644a2c213ff45475c1b94047c5229ac3c3fe716ff973c59e6790b1314050f1f3\": rpc error: code = NotFound desc = could not find container \"644a2c213ff45475c1b94047c5229ac3c3fe716ff973c59e6790b1314050f1f3\": container with ID starting with 644a2c213ff45475c1b94047c5229ac3c3fe716ff973c59e6790b1314050f1f3 not found: ID does not exist" Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.587293 4558 scope.go:117] "RemoveContainer" containerID="bf9413112ce86eb3a9f0b2a6c2c7bc61341d600bd5c50302655d4e980e7c1cc1" Jan 20 17:00:42 crc kubenswrapper[4558]: E0120 17:00:42.590740 4558 projected.go:194] Error preparing data for projected volume kube-api-access-zscx2 for pod openstack-kuttl-tests/ovn-northd-0: failed to fetch token: serviceaccounts "ovnnorthd-ovnnorthd" not found Jan 20 17:00:42 crc kubenswrapper[4558]: E0120 17:00:42.590800 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/54c4bfe3-42bf-46ea-8c23-0621f7b65686-kube-api-access-zscx2 podName:54c4bfe3-42bf-46ea-8c23-0621f7b65686 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:43.090781475 +0000 UTC m=+1136.851119442 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-zscx2" (UniqueName: "kubernetes.io/projected/54c4bfe3-42bf-46ea-8c23-0621f7b65686-kube-api-access-zscx2") pod "ovn-northd-0" (UID: "54c4bfe3-42bf-46ea-8c23-0621f7b65686") : failed to fetch token: serviceaccounts "ovnnorthd-ovnnorthd" not found Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.601478 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bf9413112ce86eb3a9f0b2a6c2c7bc61341d600bd5c50302655d4e980e7c1cc1"} err="failed to get container status \"bf9413112ce86eb3a9f0b2a6c2c7bc61341d600bd5c50302655d4e980e7c1cc1\": rpc error: code = NotFound desc = could not find container \"bf9413112ce86eb3a9f0b2a6c2c7bc61341d600bd5c50302655d4e980e7c1cc1\": container with ID starting with bf9413112ce86eb3a9f0b2a6c2c7bc61341d600bd5c50302655d4e980e7c1cc1 not found: ID does not exist" Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.629261 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m6sxc\" (UniqueName: \"kubernetes.io/projected/e353328b-4fe7-4637-8174-a3b227ad9761-kube-api-access-m6sxc\") pod \"root-account-create-update-fbtv5\" (UID: \"e353328b-4fe7-4637-8174-a3b227ad9761\") " pod="openstack-kuttl-tests/root-account-create-update-fbtv5" Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.649192 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0c535ada-ed58-4c94-82c9-d6b1a35f78be" path="/var/lib/kubelet/pods/0c535ada-ed58-4c94-82c9-d6b1a35f78be/volumes" Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.649948 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2c7d824d-0fa4-4a19-971b-88e9f1da0707" path="/var/lib/kubelet/pods/2c7d824d-0fa4-4a19-971b-88e9f1da0707/volumes" Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.652513 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8805bbf2-dd7b-41db-89ee-8b1d3053bf02" path="/var/lib/kubelet/pods/8805bbf2-dd7b-41db-89ee-8b1d3053bf02/volumes" Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.653307 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e0c2bc6c-ea80-49a0-876f-f0a4d29c12bd" path="/var/lib/kubelet/pods/e0c2bc6c-ea80-49a0-876f-f0a4d29c12bd/volumes" Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.667301 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/placement-64a7-account-create-update-5hw2k"] Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.667346 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell0-b5f5-account-create-update-5mqdg"] Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.703924 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell0-b5f5-account-create-update-5mqdg"] Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.704517 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-fbtv5" Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.718095 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage19-crc" (UniqueName: "kubernetes.io/local-volume/local-storage19-crc") on node "crc" Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.772362 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/316ffdc9-b0e1-47d9-90f8-45a8ceb87353-vencrypt-tls-certs" (OuterVolumeSpecName: "vencrypt-tls-certs") pod "316ffdc9-b0e1-47d9-90f8-45a8ceb87353" (UID: "316ffdc9-b0e1-47d9-90f8-45a8ceb87353"). InnerVolumeSpecName "vencrypt-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.776149 4558 reconciler_common.go:293] "Volume detached for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/316ffdc9-b0e1-47d9-90f8-45a8ceb87353-vencrypt-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.776200 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage19-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage19-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.799386 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-hmgt7"] Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.809764 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1c0d7766-0fb5-47ec-87f4-6644f5b1afd9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1c0d7766-0fb5-47ec-87f4-6644f5b1afd9" (UID: "1c0d7766-0fb5-47ec-87f4-6644f5b1afd9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.823577 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-hmgt7"] Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.840018 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/316ffdc9-b0e1-47d9-90f8-45a8ceb87353-nova-novncproxy-tls-certs" (OuterVolumeSpecName: "nova-novncproxy-tls-certs") pod "316ffdc9-b0e1-47d9-90f8-45a8ceb87353" (UID: "316ffdc9-b0e1-47d9-90f8-45a8ceb87353"). InnerVolumeSpecName "nova-novncproxy-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.842335 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1c0d7766-0fb5-47ec-87f4-6644f5b1afd9-ovsdbserver-sb-tls-certs" (OuterVolumeSpecName: "ovsdbserver-sb-tls-certs") pod "1c0d7766-0fb5-47ec-87f4-6644f5b1afd9" (UID: "1c0d7766-0fb5-47ec-87f4-6644f5b1afd9"). InnerVolumeSpecName "ovsdbserver-sb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.875750 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1c0d7766-0fb5-47ec-87f4-6644f5b1afd9-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "1c0d7766-0fb5-47ec-87f4-6644f5b1afd9" (UID: "1c0d7766-0fb5-47ec-87f4-6644f5b1afd9"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.877825 4558 reconciler_common.go:293] "Volume detached for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/316ffdc9-b0e1-47d9-90f8-45a8ceb87353-nova-novncproxy-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.877840 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c0d7766-0fb5-47ec-87f4-6644f5b1afd9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.877849 4558 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/1c0d7766-0fb5-47ec-87f4-6644f5b1afd9-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.877860 4558 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/1c0d7766-0fb5-47ec-87f4-6644f5b1afd9-ovsdbserver-sb-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.889413 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-db-sync-2hhvl"] Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.912424 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/glance-db-sync-2hhvl"] Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.954438 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-db-sync-jwg24"] Jan 20 17:00:42 crc kubenswrapper[4558]: I0120 17:00:42.981035 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/cinder-db-sync-jwg24"] Jan 20 17:00:42 crc kubenswrapper[4558]: E0120 17:00:42.982499 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Jan 20 17:00:42 crc kubenswrapper[4558]: E0120 17:00:42.982553 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/bf355276-9e62-474e-bfb1-616dde5b83bc-config-data podName:bf355276-9e62-474e-bfb1-616dde5b83bc nodeName:}" failed. No retries permitted until 2026-01-20 17:00:43.982539892 +0000 UTC m=+1137.742877859 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/bf355276-9e62-474e-bfb1-616dde5b83bc-config-data") pod "rabbitmq-cell1-server-0" (UID: "bf355276-9e62-474e-bfb1-616dde5b83bc") : configmap "rabbitmq-cell1-config-data" not found Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:42.997332 4558 generic.go:334] "Generic (PLEG): container finished" podID="324ace6f-8bac-4269-a674-d9b6e990cd18" containerID="93541e9fd292f8f2cfa2ba3ec3bd317e9352f88de020c5959cf2c84ba5b458ba" exitCode=0 Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:42.997449 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/memcached-0" event={"ID":"324ace6f-8bac-4269-a674-d9b6e990cd18","Type":"ContainerDied","Data":"93541e9fd292f8f2cfa2ba3ec3bd317e9352f88de020c5959cf2c84ba5b458ba"} Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.003412 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-55ff946595-f8gjf"] Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.009960 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-d54787f8d-7fznp"] Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.009980 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-6b64ccd79d-5qj2x" event={"ID":"5d3423c4-ff78-4ff7-b42b-b3c93b309d52","Type":"ContainerStarted","Data":"be6803793562d2e8a11899885d3f7d76044a96ffc3c42039db66dbfe66c75870"} Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.011384 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovsdbserver-sb-0_1c0d7766-0fb5-47ec-87f4-6644f5b1afd9/ovsdbserver-sb/0.log" Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.011437 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-sb-0" event={"ID":"1c0d7766-0fb5-47ec-87f4-6644f5b1afd9","Type":"ContainerDied","Data":"580e103bbc5f07f4f33408a521c976313d301c9db8538c7a4d7ef229996839a0"} Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.011464 4558 scope.go:117] "RemoveContainer" containerID="6fa53430a72d92b62c65c8602e3f7ecea38be4fd2c7715f04903b7006b2c6625" Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.011553 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.016309 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-9ff1-account-create-update-zlhvt"] Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.019458 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-keystone-listener-7fc5477d66-pz8q2" event={"ID":"7776f921-0ae7-4a8c-b444-cf5b4b9a4f65","Type":"ContainerStarted","Data":"522cb191427e167fad026915618db8f72d3e02041ca1567fe15a15da684ab56d"} Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.021603 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell1-9ff1-account-create-update-zlhvt"] Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.025470 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-55ff946595-f8gjf" event={"ID":"4969c1af-53c0-435a-bd06-6bd493c81c80","Type":"ContainerStarted","Data":"09444088968f7e54351d7b5dedb081cfcb5cef9b269cf941bc6cb339aae637c4"} Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.026454 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.027314 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="8074f6f3-f564-42bc-b08b-28ffe75bbbc5" containerName="ceilometer-central-agent" containerID="cri-o://cc735ea1e3ff81a6e1e946cb0561445b4ff17c02725e6a7c5e3b8fb04ea0d90d" gracePeriod=30 Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.027343 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="8074f6f3-f564-42bc-b08b-28ffe75bbbc5" containerName="sg-core" containerID="cri-o://37fbfba85457fa31861b89574c96f8a2be864e14e57e69c2dae33967885a1430" gracePeriod=30 Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.027383 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="8074f6f3-f564-42bc-b08b-28ffe75bbbc5" containerName="ceilometer-notification-agent" containerID="cri-o://255c4f9294e2677844d684383d43130961e7ee3e4db470ecb77910d99c7f5de5" gracePeriod=30 Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.027424 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="8074f6f3-f564-42bc-b08b-28ffe75bbbc5" containerName="proxy-httpd" containerID="cri-o://20cf0ea0e052fa597f57928ad72ba1f7803b3e9d839d21a4246b51731cb87948" gracePeriod=30 Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.042669 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-d54787f8d-7fznp" event={"ID":"c268a2f5-7c67-4935-8f3e-bdd83aeccc95","Type":"ContainerStarted","Data":"ed64705417da772675613972d81e2d37068b2ee1e7b594e60bfac14293e54bd0"} Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.042998 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-cell-mapping-6zmz2"] Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.044728 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-worker-c4f864f89-58gb9" event={"ID":"de7f5467-1e83-42f0-86bb-ade85deec8f3","Type":"ContainerStarted","Data":"6a1a27bb8b609381b6880d7309447e5d15f349864bd55fc2d2324115d9939990"} Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.044754 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-worker-c4f864f89-58gb9" event={"ID":"de7f5467-1e83-42f0-86bb-ade85deec8f3","Type":"ContainerStarted","Data":"90bf224df8f11e1c14a75f4497785be566707b0ba13b9201222741bded247634"} Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.064450 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell1-cell-mapping-6zmz2"] Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.064766 4558 generic.go:334] "Generic (PLEG): container finished" podID="6374f20d-dc8a-4f1c-9df3-086e6904b394" containerID="14cc352d3f4cfc88e8fb040c94fae40dfc936c241aaccfd3a719350b13e1a6d1" exitCode=0 Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.066184 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-cell1-galera-0" event={"ID":"6374f20d-dc8a-4f1c-9df3-086e6904b394","Type":"ContainerDied","Data":"14cc352d3f4cfc88e8fb040c94fae40dfc936c241aaccfd3a719350b13e1a6d1"} Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.067482 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"316ffdc9-b0e1-47d9-90f8-45a8ceb87353","Type":"ContainerDied","Data":"22ed144e22a0a787f2f5ee5d3f4132a65715e67507c454535805f97f41f9b903"} Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.067911 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.081779 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-5df68bf4dd-zbx8h" event={"ID":"3735ca3d-3764-4d36-b912-fbf0bfb96dd8","Type":"ContainerStarted","Data":"9c6d137248fccebe77001c6e099d43d98f6be1d320a8e169d6e922bf865a5bc8"} Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.083543 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/54c4bfe3-42bf-46ea-8c23-0621f7b65686-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"54c4bfe3-42bf-46ea-8c23-0621f7b65686\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.083602 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/54c4bfe3-42bf-46ea-8c23-0621f7b65686-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"54c4bfe3-42bf-46ea-8c23-0621f7b65686\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.083654 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54c4bfe3-42bf-46ea-8c23-0621f7b65686-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"54c4bfe3-42bf-46ea-8c23-0621f7b65686\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.083676 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/54c4bfe3-42bf-46ea-8c23-0621f7b65686-scripts\") pod \"ovn-northd-0\" (UID: \"54c4bfe3-42bf-46ea-8c23-0621f7b65686\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.083701 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/54c4bfe3-42bf-46ea-8c23-0621f7b65686-config\") pod \"ovn-northd-0\" (UID: \"54c4bfe3-42bf-46ea-8c23-0621f7b65686\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:00:43 crc kubenswrapper[4558]: E0120 17:00:43.083777 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/ovnnorthd-scripts: configmap "ovnnorthd-scripts" not found Jan 20 17:00:43 crc kubenswrapper[4558]: E0120 17:00:43.083792 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-ovn-metrics: secret "cert-ovn-metrics" not found Jan 20 17:00:43 crc kubenswrapper[4558]: E0120 17:00:43.083816 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/ovnnorthd-config: configmap "ovnnorthd-config" not found Jan 20 17:00:43 crc kubenswrapper[4558]: E0120 17:00:43.083834 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/54c4bfe3-42bf-46ea-8c23-0621f7b65686-scripts podName:54c4bfe3-42bf-46ea-8c23-0621f7b65686 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:44.083819279 +0000 UTC m=+1137.844157246 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "scripts" (UniqueName: "kubernetes.io/configmap/54c4bfe3-42bf-46ea-8c23-0621f7b65686-scripts") pod "ovn-northd-0" (UID: "54c4bfe3-42bf-46ea-8c23-0621f7b65686") : configmap "ovnnorthd-scripts" not found Jan 20 17:00:43 crc kubenswrapper[4558]: E0120 17:00:43.083853 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/54c4bfe3-42bf-46ea-8c23-0621f7b65686-config podName:54c4bfe3-42bf-46ea-8c23-0621f7b65686 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:44.08384119 +0000 UTC m=+1137.844179157 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config" (UniqueName: "kubernetes.io/configmap/54c4bfe3-42bf-46ea-8c23-0621f7b65686-config") pod "ovn-northd-0" (UID: "54c4bfe3-42bf-46ea-8c23-0621f7b65686") : configmap "ovnnorthd-config" not found Jan 20 17:00:43 crc kubenswrapper[4558]: E0120 17:00:43.083866 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/54c4bfe3-42bf-46ea-8c23-0621f7b65686-metrics-certs-tls-certs podName:54c4bfe3-42bf-46ea-8c23-0621f7b65686 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:44.083860466 +0000 UTC m=+1137.844198433 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs-tls-certs" (UniqueName: "kubernetes.io/secret/54c4bfe3-42bf-46ea-8c23-0621f7b65686-metrics-certs-tls-certs") pod "ovn-northd-0" (UID: "54c4bfe3-42bf-46ea-8c23-0621f7b65686") : secret "cert-ovn-metrics" not found Jan 20 17:00:43 crc kubenswrapper[4558]: E0120 17:00:43.083852 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/combined-ca-bundle: secret "combined-ca-bundle" not found Jan 20 17:00:43 crc kubenswrapper[4558]: E0120 17:00:43.083969 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/54c4bfe3-42bf-46ea-8c23-0621f7b65686-combined-ca-bundle podName:54c4bfe3-42bf-46ea-8c23-0621f7b65686 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:44.083955606 +0000 UTC m=+1137.844293562 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/54c4bfe3-42bf-46ea-8c23-0621f7b65686-combined-ca-bundle") pod "ovn-northd-0" (UID: "54c4bfe3-42bf-46ea-8c23-0621f7b65686") : secret "combined-ca-bundle" not found Jan 20 17:00:43 crc kubenswrapper[4558]: E0120 17:00:43.084262 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-ovnnorthd-ovndbs: secret "cert-ovnnorthd-ovndbs" not found Jan 20 17:00:43 crc kubenswrapper[4558]: E0120 17:00:43.084319 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/54c4bfe3-42bf-46ea-8c23-0621f7b65686-ovn-northd-tls-certs podName:54c4bfe3-42bf-46ea-8c23-0621f7b65686 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:44.084303019 +0000 UTC m=+1137.844640986 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "ovn-northd-tls-certs" (UniqueName: "kubernetes.io/secret/54c4bfe3-42bf-46ea-8c23-0621f7b65686-ovn-northd-tls-certs") pod "ovn-northd-0" (UID: "54c4bfe3-42bf-46ea-8c23-0621f7b65686") : secret "cert-ovnnorthd-ovndbs" not found Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.084502 4558 generic.go:334] "Generic (PLEG): container finished" podID="4fe22ec2-af71-47bf-8053-86dee012df91" containerID="00c9d686e4cf00913af81033ddbff9abfc51f8f3c4f1da58ce8a3bff41d506fa" exitCode=137 Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.096910 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell0-cell-mapping-wn5b6"] Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.115570 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell0-cell-mapping-wn5b6"] Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.124435 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-api-5df68bf4dd-zbx8h"] Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.135376 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-eb81-account-create-update-sr6r9"] Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.138944 4558 scope.go:117] "RemoveContainer" containerID="20a844916c974def3060dd83baa14a82ac52a34e846bc5a85b9c976771061eb1" Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.164067 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/memcached-0" Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.171476 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/barbican-eb81-account-create-update-sr6r9"] Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.183130 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-6b64ccd79d-5qj2x"] Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.186023 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zscx2\" (UniqueName: \"kubernetes.io/projected/54c4bfe3-42bf-46ea-8c23-0621f7b65686-kube-api-access-zscx2\") pod \"ovn-northd-0\" (UID: \"54c4bfe3-42bf-46ea-8c23-0621f7b65686\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:00:43 crc kubenswrapper[4558]: E0120 17:00:43.189847 4558 projected.go:194] Error preparing data for projected volume kube-api-access-zscx2 for pod openstack-kuttl-tests/ovn-northd-0: failed to fetch token: serviceaccounts "ovnnorthd-ovnnorthd" not found Jan 20 17:00:43 crc kubenswrapper[4558]: E0120 17:00:43.189915 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/54c4bfe3-42bf-46ea-8c23-0621f7b65686-kube-api-access-zscx2 podName:54c4bfe3-42bf-46ea-8c23-0621f7b65686 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:44.189901107 +0000 UTC m=+1137.950239074 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-zscx2" (UniqueName: "kubernetes.io/projected/54c4bfe3-42bf-46ea-8c23-0621f7b65686-kube-api-access-zscx2") pod "ovn-northd-0" (UID: "54c4bfe3-42bf-46ea-8c23-0621f7b65686") : failed to fetch token: serviceaccounts "ovnnorthd-ovnnorthd" not found Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.191537 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/placement-db-sync-d6pfc"] Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.219793 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/placement-db-sync-d6pfc"] Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.248106 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/rabbitmq-server-0"] Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.256888 4558 scope.go:117] "RemoveContainer" containerID="1e113a1489cb99ef7da66f10ddf493b5813e1713d85e6d3338e7700ffa711d7c" Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.258385 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-db-sync-ckhth"] Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.273798 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/neutron-db-sync-ckhth"] Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.285433 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-sb-0"] Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.286656 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/324ace6f-8bac-4269-a674-d9b6e990cd18-combined-ca-bundle\") pod \"324ace6f-8bac-4269-a674-d9b6e990cd18\" (UID: \"324ace6f-8bac-4269-a674-d9b6e990cd18\") " Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.286812 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/324ace6f-8bac-4269-a674-d9b6e990cd18-memcached-tls-certs\") pod \"324ace6f-8bac-4269-a674-d9b6e990cd18\" (UID: \"324ace6f-8bac-4269-a674-d9b6e990cd18\") " Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.286871 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-st8tp\" (UniqueName: \"kubernetes.io/projected/324ace6f-8bac-4269-a674-d9b6e990cd18-kube-api-access-st8tp\") pod \"324ace6f-8bac-4269-a674-d9b6e990cd18\" (UID: \"324ace6f-8bac-4269-a674-d9b6e990cd18\") " Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.286922 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/324ace6f-8bac-4269-a674-d9b6e990cd18-config-data\") pod \"324ace6f-8bac-4269-a674-d9b6e990cd18\" (UID: \"324ace6f-8bac-4269-a674-d9b6e990cd18\") " Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.286964 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/324ace6f-8bac-4269-a674-d9b6e990cd18-kolla-config\") pod \"324ace6f-8bac-4269-a674-d9b6e990cd18\" (UID: \"324ace6f-8bac-4269-a674-d9b6e990cd18\") " Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.288331 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/324ace6f-8bac-4269-a674-d9b6e990cd18-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "324ace6f-8bac-4269-a674-d9b6e990cd18" (UID: "324ace6f-8bac-4269-a674-d9b6e990cd18"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.292625 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/324ace6f-8bac-4269-a674-d9b6e990cd18-config-data" (OuterVolumeSpecName: "config-data") pod "324ace6f-8bac-4269-a674-d9b6e990cd18" (UID: "324ace6f-8bac-4269-a674-d9b6e990cd18"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.303295 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-sb-0"] Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.303442 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/324ace6f-8bac-4269-a674-d9b6e990cd18-kube-api-access-st8tp" (OuterVolumeSpecName: "kube-api-access-st8tp") pod "324ace6f-8bac-4269-a674-d9b6e990cd18" (UID: "324ace6f-8bac-4269-a674-d9b6e990cd18"). InnerVolumeSpecName "kube-api-access-st8tp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.309367 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-a57a-account-create-update-76pmk"] Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.318721 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/neutron-a57a-account-create-update-76pmk"] Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.320092 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.362921 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.365260 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:00:43 crc kubenswrapper[4558]: E0120 17:00:43.409571 4558 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 192.168.25.8:34414->192.168.25.8:43883: write tcp 192.168.25.8:34414->192.168.25.8:43883: write: broken pipe Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.422071 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-st8tp\" (UniqueName: \"kubernetes.io/projected/324ace6f-8bac-4269-a674-d9b6e990cd18-kube-api-access-st8tp\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.427260 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-db-sync-mpmc8"] Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.430646 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/324ace6f-8bac-4269-a674-d9b6e990cd18-memcached-tls-certs" (OuterVolumeSpecName: "memcached-tls-certs") pod "324ace6f-8bac-4269-a674-d9b6e990cd18" (UID: "324ace6f-8bac-4269-a674-d9b6e990cd18"). InnerVolumeSpecName "memcached-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:00:43 crc kubenswrapper[4558]: E0120 17:00:43.435225 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Jan 20 17:00:43 crc kubenswrapper[4558]: E0120 17:00:43.435278 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/3f868eba-f4e1-4e32-b271-391cf271fe97-config-data podName:3f868eba-f4e1-4e32-b271-391cf271fe97 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:43.935261551 +0000 UTC m=+1137.695599518 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/3f868eba-f4e1-4e32-b271-391cf271fe97-config-data") pod "rabbitmq-server-0" (UID: "3f868eba-f4e1-4e32-b271-391cf271fe97") : configmap "rabbitmq-config-data" not found Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.436669 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstackclient" Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.436682 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/324ace6f-8bac-4269-a674-d9b6e990cd18-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.436701 4558 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/324ace6f-8bac-4269-a674-d9b6e990cd18-kolla-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.436741 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/barbican-db-sync-mpmc8"] Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.444443 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/324ace6f-8bac-4269-a674-d9b6e990cd18-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "324ace6f-8bac-4269-a674-d9b6e990cd18" (UID: "324ace6f-8bac-4269-a674-d9b6e990cd18"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.456260 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.456582 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/kube-state-metrics-0" podUID="3b2ffa52-4d19-46f6-aea5-62fc758def73" containerName="kube-state-metrics" containerID="cri-o://3d3a8f2db93b034973dfab0303e49c56217b13cf7cd940baab7ecf8cfbd2745f" gracePeriod=30 Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.482516 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovn-northd-0"] Jan 20 17:00:43 crc kubenswrapper[4558]: E0120 17:00:43.483300 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[combined-ca-bundle config kube-api-access-zscx2 metrics-certs-tls-certs ovn-northd-tls-certs scripts], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack-kuttl-tests/ovn-northd-0" podUID="54c4bfe3-42bf-46ea-8c23-0621f7b65686" Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.502880 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-fbtv5"] Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.536510 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/keystone-7e4f-account-create-update-pl5sm"] Jan 20 17:00:43 crc kubenswrapper[4558]: E0120 17:00:43.537260 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6374f20d-dc8a-4f1c-9df3-086e6904b394" containerName="mysql-bootstrap" Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.537279 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="6374f20d-dc8a-4f1c-9df3-086e6904b394" containerName="mysql-bootstrap" Jan 20 17:00:43 crc kubenswrapper[4558]: E0120 17:00:43.537294 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="324ace6f-8bac-4269-a674-d9b6e990cd18" containerName="memcached" Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.537300 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="324ace6f-8bac-4269-a674-d9b6e990cd18" containerName="memcached" Jan 20 17:00:43 crc kubenswrapper[4558]: E0120 17:00:43.537314 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6374f20d-dc8a-4f1c-9df3-086e6904b394" containerName="galera" Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.537320 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="6374f20d-dc8a-4f1c-9df3-086e6904b394" containerName="galera" Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.537507 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="324ace6f-8bac-4269-a674-d9b6e990cd18" containerName="memcached" Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.537526 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="6374f20d-dc8a-4f1c-9df3-086e6904b394" containerName="galera" Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.538064 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-7e4f-account-create-update-pl5sm" Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.538093 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mysql-db\" (UniqueName: \"kubernetes.io/local-volume/local-storage17-crc\") pod \"6374f20d-dc8a-4f1c-9df3-086e6904b394\" (UID: \"6374f20d-dc8a-4f1c-9df3-086e6904b394\") " Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.538157 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/6374f20d-dc8a-4f1c-9df3-086e6904b394-config-data-generated\") pod \"6374f20d-dc8a-4f1c-9df3-086e6904b394\" (UID: \"6374f20d-dc8a-4f1c-9df3-086e6904b394\") " Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.538198 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6374f20d-dc8a-4f1c-9df3-086e6904b394-combined-ca-bundle\") pod \"6374f20d-dc8a-4f1c-9df3-086e6904b394\" (UID: \"6374f20d-dc8a-4f1c-9df3-086e6904b394\") " Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.538234 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/6374f20d-dc8a-4f1c-9df3-086e6904b394-galera-tls-certs\") pod \"6374f20d-dc8a-4f1c-9df3-086e6904b394\" (UID: \"6374f20d-dc8a-4f1c-9df3-086e6904b394\") " Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.538265 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/6374f20d-dc8a-4f1c-9df3-086e6904b394-kolla-config\") pod \"6374f20d-dc8a-4f1c-9df3-086e6904b394\" (UID: \"6374f20d-dc8a-4f1c-9df3-086e6904b394\") " Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.538280 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6374f20d-dc8a-4f1c-9df3-086e6904b394-operator-scripts\") pod \"6374f20d-dc8a-4f1c-9df3-086e6904b394\" (UID: \"6374f20d-dc8a-4f1c-9df3-086e6904b394\") " Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.538409 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/6374f20d-dc8a-4f1c-9df3-086e6904b394-config-data-default\") pod \"6374f20d-dc8a-4f1c-9df3-086e6904b394\" (UID: \"6374f20d-dc8a-4f1c-9df3-086e6904b394\") " Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.538449 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-97bg4\" (UniqueName: \"kubernetes.io/projected/6374f20d-dc8a-4f1c-9df3-086e6904b394-kube-api-access-97bg4\") pod \"6374f20d-dc8a-4f1c-9df3-086e6904b394\" (UID: \"6374f20d-dc8a-4f1c-9df3-086e6904b394\") " Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.539377 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/324ace6f-8bac-4269-a674-d9b6e990cd18-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.539391 4558 reconciler_common.go:293] "Volume detached for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/324ace6f-8bac-4269-a674-d9b6e990cd18-memcached-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.541442 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-db-secret" Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.541654 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6374f20d-dc8a-4f1c-9df3-086e6904b394-kube-api-access-97bg4" (OuterVolumeSpecName: "kube-api-access-97bg4") pod "6374f20d-dc8a-4f1c-9df3-086e6904b394" (UID: "6374f20d-dc8a-4f1c-9df3-086e6904b394"). InnerVolumeSpecName "kube-api-access-97bg4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.543465 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6374f20d-dc8a-4f1c-9df3-086e6904b394-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "6374f20d-dc8a-4f1c-9df3-086e6904b394" (UID: "6374f20d-dc8a-4f1c-9df3-086e6904b394"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.543881 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-7e4f-account-create-update-pl5sm"] Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.543958 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6374f20d-dc8a-4f1c-9df3-086e6904b394-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "6374f20d-dc8a-4f1c-9df3-086e6904b394" (UID: "6374f20d-dc8a-4f1c-9df3-086e6904b394"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.544371 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6374f20d-dc8a-4f1c-9df3-086e6904b394-config-data-generated" (OuterVolumeSpecName: "config-data-generated") pod "6374f20d-dc8a-4f1c-9df3-086e6904b394" (UID: "6374f20d-dc8a-4f1c-9df3-086e6904b394"). InnerVolumeSpecName "config-data-generated". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.544435 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6374f20d-dc8a-4f1c-9df3-086e6904b394-config-data-default" (OuterVolumeSpecName: "config-data-default") pod "6374f20d-dc8a-4f1c-9df3-086e6904b394" (UID: "6374f20d-dc8a-4f1c-9df3-086e6904b394"). InnerVolumeSpecName "config-data-default". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.560245 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage17-crc" (OuterVolumeSpecName: "mysql-db") pod "6374f20d-dc8a-4f1c-9df3-086e6904b394" (UID: "6374f20d-dc8a-4f1c-9df3-086e6904b394"). InnerVolumeSpecName "local-storage17-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.566627 4558 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="openstack-kuttl-tests/neutron-5d557df858-frznf" secret="" err="secret \"neutron-neutron-dockercfg-7d264\" not found" Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.584331 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6374f20d-dc8a-4f1c-9df3-086e6904b394-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6374f20d-dc8a-4f1c-9df3-086e6904b394" (UID: "6374f20d-dc8a-4f1c-9df3-086e6904b394"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:00:43 crc kubenswrapper[4558]: E0120 17:00:43.586694 4558 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 20 17:00:43 crc kubenswrapper[4558]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[/bin/sh -c #!/bin/bash Jan 20 17:00:43 crc kubenswrapper[4558]: Jan 20 17:00:43 crc kubenswrapper[4558]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 20 17:00:43 crc kubenswrapper[4558]: Jan 20 17:00:43 crc kubenswrapper[4558]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 20 17:00:43 crc kubenswrapper[4558]: Jan 20 17:00:43 crc kubenswrapper[4558]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 20 17:00:43 crc kubenswrapper[4558]: Jan 20 17:00:43 crc kubenswrapper[4558]: if [ -n "" ]; then Jan 20 17:00:43 crc kubenswrapper[4558]: GRANT_DATABASE="" Jan 20 17:00:43 crc kubenswrapper[4558]: else Jan 20 17:00:43 crc kubenswrapper[4558]: GRANT_DATABASE="*" Jan 20 17:00:43 crc kubenswrapper[4558]: fi Jan 20 17:00:43 crc kubenswrapper[4558]: Jan 20 17:00:43 crc kubenswrapper[4558]: # going for maximum compatibility here: Jan 20 17:00:43 crc kubenswrapper[4558]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 20 17:00:43 crc kubenswrapper[4558]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 20 17:00:43 crc kubenswrapper[4558]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 20 17:00:43 crc kubenswrapper[4558]: # support updates Jan 20 17:00:43 crc kubenswrapper[4558]: Jan 20 17:00:43 crc kubenswrapper[4558]: $MYSQL_CMD < logger="UnhandledError" Jan 20 17:00:43 crc kubenswrapper[4558]: E0120 17:00:43.588271 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"openstack-cell1-mariadb-root-db-secret\\\" not found\"" pod="openstack-kuttl-tests/root-account-create-update-fbtv5" podUID="e353328b-4fe7-4637-8174-a3b227ad9761" Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.611434 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6374f20d-dc8a-4f1c-9df3-086e6904b394-galera-tls-certs" (OuterVolumeSpecName: "galera-tls-certs") pod "6374f20d-dc8a-4f1c-9df3-086e6904b394" (UID: "6374f20d-dc8a-4f1c-9df3-086e6904b394"). InnerVolumeSpecName "galera-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.640678 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4fe22ec2-af71-47bf-8053-86dee012df91-combined-ca-bundle\") pod \"4fe22ec2-af71-47bf-8053-86dee012df91\" (UID: \"4fe22ec2-af71-47bf-8053-86dee012df91\") " Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.640769 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/4fe22ec2-af71-47bf-8053-86dee012df91-openstack-config\") pod \"4fe22ec2-af71-47bf-8053-86dee012df91\" (UID: \"4fe22ec2-af71-47bf-8053-86dee012df91\") " Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.640803 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-82mbz\" (UniqueName: \"kubernetes.io/projected/4fe22ec2-af71-47bf-8053-86dee012df91-kube-api-access-82mbz\") pod \"4fe22ec2-af71-47bf-8053-86dee012df91\" (UID: \"4fe22ec2-af71-47bf-8053-86dee012df91\") " Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.640901 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/4fe22ec2-af71-47bf-8053-86dee012df91-openstack-config-secret\") pod \"4fe22ec2-af71-47bf-8053-86dee012df91\" (UID: \"4fe22ec2-af71-47bf-8053-86dee012df91\") " Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.641128 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-456km\" (UniqueName: \"kubernetes.io/projected/bf9a27d0-1f64-4f43-a2a7-ce0902c6cd25-kube-api-access-456km\") pod \"keystone-7e4f-account-create-update-pl5sm\" (UID: \"bf9a27d0-1f64-4f43-a2a7-ce0902c6cd25\") " pod="openstack-kuttl-tests/keystone-7e4f-account-create-update-pl5sm" Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.641255 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bf9a27d0-1f64-4f43-a2a7-ce0902c6cd25-operator-scripts\") pod \"keystone-7e4f-account-create-update-pl5sm\" (UID: \"bf9a27d0-1f64-4f43-a2a7-ce0902c6cd25\") " pod="openstack-kuttl-tests/keystone-7e4f-account-create-update-pl5sm" Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.641374 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6374f20d-dc8a-4f1c-9df3-086e6904b394-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.641385 4558 reconciler_common.go:293] "Volume detached for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/6374f20d-dc8a-4f1c-9df3-086e6904b394-galera-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.641394 4558 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/6374f20d-dc8a-4f1c-9df3-086e6904b394-kolla-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.641401 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6374f20d-dc8a-4f1c-9df3-086e6904b394-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.641409 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/6374f20d-dc8a-4f1c-9df3-086e6904b394-config-data-default\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.641417 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-97bg4\" (UniqueName: \"kubernetes.io/projected/6374f20d-dc8a-4f1c-9df3-086e6904b394-kube-api-access-97bg4\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.641432 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage17-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage17-crc\") on node \"crc\" " Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.641441 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/6374f20d-dc8a-4f1c-9df3-086e6904b394-config-data-generated\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.664275 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4fe22ec2-af71-47bf-8053-86dee012df91-kube-api-access-82mbz" (OuterVolumeSpecName: "kube-api-access-82mbz") pod "4fe22ec2-af71-47bf-8053-86dee012df91" (UID: "4fe22ec2-af71-47bf-8053-86dee012df91"). InnerVolumeSpecName "kube-api-access-82mbz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.678743 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage17-crc" (UniqueName: "kubernetes.io/local-volume/local-storage17-crc") on node "crc" Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.681637 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4fe22ec2-af71-47bf-8053-86dee012df91-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4fe22ec2-af71-47bf-8053-86dee012df91" (UID: "4fe22ec2-af71-47bf-8053-86dee012df91"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.735674 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4fe22ec2-af71-47bf-8053-86dee012df91-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "4fe22ec2-af71-47bf-8053-86dee012df91" (UID: "4fe22ec2-af71-47bf-8053-86dee012df91"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.742597 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bf9a27d0-1f64-4f43-a2a7-ce0902c6cd25-operator-scripts\") pod \"keystone-7e4f-account-create-update-pl5sm\" (UID: \"bf9a27d0-1f64-4f43-a2a7-ce0902c6cd25\") " pod="openstack-kuttl-tests/keystone-7e4f-account-create-update-pl5sm" Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.742783 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-456km\" (UniqueName: \"kubernetes.io/projected/bf9a27d0-1f64-4f43-a2a7-ce0902c6cd25-kube-api-access-456km\") pod \"keystone-7e4f-account-create-update-pl5sm\" (UID: \"bf9a27d0-1f64-4f43-a2a7-ce0902c6cd25\") " pod="openstack-kuttl-tests/keystone-7e4f-account-create-update-pl5sm" Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.742823 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage17-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage17-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.742833 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4fe22ec2-af71-47bf-8053-86dee012df91-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.742843 4558 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/4fe22ec2-af71-47bf-8053-86dee012df91-openstack-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.742852 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-82mbz\" (UniqueName: \"kubernetes.io/projected/4fe22ec2-af71-47bf-8053-86dee012df91-kube-api-access-82mbz\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:43 crc kubenswrapper[4558]: E0120 17:00:43.744042 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-neutron-ovndbs: secret "cert-neutron-ovndbs" not found Jan 20 17:00:43 crc kubenswrapper[4558]: E0120 17:00:43.744078 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7d0b0e33-9ce7-44d9-8b09-6008715fe1a0-ovndb-tls-certs podName:7d0b0e33-9ce7-44d9-8b09-6008715fe1a0 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:44.244065411 +0000 UTC m=+1138.004403378 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "ovndb-tls-certs" (UniqueName: "kubernetes.io/secret/7d0b0e33-9ce7-44d9-8b09-6008715fe1a0-ovndb-tls-certs") pod "neutron-5d557df858-frznf" (UID: "7d0b0e33-9ce7-44d9-8b09-6008715fe1a0") : secret "cert-neutron-ovndbs" not found Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.748840 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bf9a27d0-1f64-4f43-a2a7-ce0902c6cd25-operator-scripts\") pod \"keystone-7e4f-account-create-update-pl5sm\" (UID: \"bf9a27d0-1f64-4f43-a2a7-ce0902c6cd25\") " pod="openstack-kuttl-tests/keystone-7e4f-account-create-update-pl5sm" Jan 20 17:00:43 crc kubenswrapper[4558]: E0120 17:00:43.748902 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-neutron-internal-svc: secret "cert-neutron-internal-svc" not found Jan 20 17:00:43 crc kubenswrapper[4558]: E0120 17:00:43.748927 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7d0b0e33-9ce7-44d9-8b09-6008715fe1a0-internal-tls-certs podName:7d0b0e33-9ce7-44d9-8b09-6008715fe1a0 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:44.248919068 +0000 UTC m=+1138.009257035 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "internal-tls-certs" (UniqueName: "kubernetes.io/secret/7d0b0e33-9ce7-44d9-8b09-6008715fe1a0-internal-tls-certs") pod "neutron-5d557df858-frznf" (UID: "7d0b0e33-9ce7-44d9-8b09-6008715fe1a0") : secret "cert-neutron-internal-svc" not found Jan 20 17:00:43 crc kubenswrapper[4558]: E0120 17:00:43.749093 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-neutron-public-svc: secret "cert-neutron-public-svc" not found Jan 20 17:00:43 crc kubenswrapper[4558]: E0120 17:00:43.749114 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7d0b0e33-9ce7-44d9-8b09-6008715fe1a0-public-tls-certs podName:7d0b0e33-9ce7-44d9-8b09-6008715fe1a0 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:44.249107773 +0000 UTC m=+1138.009445740 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "public-tls-certs" (UniqueName: "kubernetes.io/secret/7d0b0e33-9ce7-44d9-8b09-6008715fe1a0-public-tls-certs") pod "neutron-5d557df858-frznf" (UID: "7d0b0e33-9ce7-44d9-8b09-6008715fe1a0") : secret "cert-neutron-public-svc" not found Jan 20 17:00:43 crc kubenswrapper[4558]: E0120 17:00:43.749141 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/neutron-config: secret "neutron-config" not found Jan 20 17:00:43 crc kubenswrapper[4558]: E0120 17:00:43.749158 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7d0b0e33-9ce7-44d9-8b09-6008715fe1a0-config podName:7d0b0e33-9ce7-44d9-8b09-6008715fe1a0 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:44.249152546 +0000 UTC m=+1138.009490514 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config" (UniqueName: "kubernetes.io/secret/7d0b0e33-9ce7-44d9-8b09-6008715fe1a0-config") pod "neutron-5d557df858-frznf" (UID: "7d0b0e33-9ce7-44d9-8b09-6008715fe1a0") : secret "neutron-config" not found Jan 20 17:00:43 crc kubenswrapper[4558]: E0120 17:00:43.749362 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/neutron-httpd-config: secret "neutron-httpd-config" not found Jan 20 17:00:43 crc kubenswrapper[4558]: E0120 17:00:43.749383 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7d0b0e33-9ce7-44d9-8b09-6008715fe1a0-httpd-config podName:7d0b0e33-9ce7-44d9-8b09-6008715fe1a0 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:44.249376117 +0000 UTC m=+1138.009714084 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "httpd-config" (UniqueName: "kubernetes.io/secret/7d0b0e33-9ce7-44d9-8b09-6008715fe1a0-httpd-config") pod "neutron-5d557df858-frznf" (UID: "7d0b0e33-9ce7-44d9-8b09-6008715fe1a0") : secret "neutron-httpd-config" not found Jan 20 17:00:43 crc kubenswrapper[4558]: E0120 17:00:43.749415 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/combined-ca-bundle: secret "combined-ca-bundle" not found Jan 20 17:00:43 crc kubenswrapper[4558]: E0120 17:00:43.749431 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7d0b0e33-9ce7-44d9-8b09-6008715fe1a0-combined-ca-bundle podName:7d0b0e33-9ce7-44d9-8b09-6008715fe1a0 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:44.249426703 +0000 UTC m=+1138.009764670 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/7d0b0e33-9ce7-44d9-8b09-6008715fe1a0-combined-ca-bundle") pod "neutron-5d557df858-frznf" (UID: "7d0b0e33-9ce7-44d9-8b09-6008715fe1a0") : secret "combined-ca-bundle" not found Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.785613 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-456km\" (UniqueName: \"kubernetes.io/projected/bf9a27d0-1f64-4f43-a2a7-ce0902c6cd25-kube-api-access-456km\") pod \"keystone-7e4f-account-create-update-pl5sm\" (UID: \"bf9a27d0-1f64-4f43-a2a7-ce0902c6cd25\") " pod="openstack-kuttl-tests/keystone-7e4f-account-create-update-pl5sm" Jan 20 17:00:43 crc kubenswrapper[4558]: E0120 17:00:43.785624 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="6c101cbe30eb5d950364b9539489f846cc6d1f7298614409d0709726ebba0b06" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 20 17:00:43 crc kubenswrapper[4558]: E0120 17:00:43.787961 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="6c101cbe30eb5d950364b9539489f846cc6d1f7298614409d0709726ebba0b06" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 20 17:00:43 crc kubenswrapper[4558]: E0120 17:00:43.794344 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="6c101cbe30eb5d950364b9539489f846cc6d1f7298614409d0709726ebba0b06" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 20 17:00:43 crc kubenswrapper[4558]: E0120 17:00:43.794381 4558 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack-kuttl-tests/nova-scheduler-0" podUID="10c92abf-cd48-4659-8595-ce9610c0fe2e" containerName="nova-scheduler-scheduler" Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.794536 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-db-create-6cd95"] Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.809519 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/cinder-db-create-6cd95"] Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.819225 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/placement-5d77597494-bkh4z"] Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.819433 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/placement-5d77597494-bkh4z" podUID="a140c9a4-bec6-42e6-bc6c-d63566e4f7f6" containerName="placement-log" containerID="cri-o://b78d30b920cedd5deca6765214e5cedff3241e2599a5afce55dcb23d1cc75565" gracePeriod=30 Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.819757 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/placement-5d77597494-bkh4z" podUID="a140c9a4-bec6-42e6-bc6c-d63566e4f7f6" containerName="placement-api" containerID="cri-o://620ea4b124f9388111a2afb26185bb93d973f00636b50d2b8aed2e76b5d21043" gracePeriod=30 Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.823100 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/placement-d54787f8d-7fznp"] Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.841450 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4fe22ec2-af71-47bf-8053-86dee012df91-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "4fe22ec2-af71-47bf-8053-86dee012df91" (UID: "4fe22ec2-af71-47bf-8053-86dee012df91"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.844800 4558 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/4fe22ec2-af71-47bf-8053-86dee012df91-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.865012 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-7e4f-account-create-update-pl5sm" Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.905219 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/swift-ring-rebalance-49f5x"] Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.925412 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/swift-ring-rebalance-49f5x"] Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.929232 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-6b64ccd79d-5qj2x"] Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.936955 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-5d557df858-frznf"] Jan 20 17:00:43 crc kubenswrapper[4558]: E0120 17:00:43.946491 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Jan 20 17:00:43 crc kubenswrapper[4558]: E0120 17:00:43.946543 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/3f868eba-f4e1-4e32-b271-391cf271fe97-config-data podName:3f868eba-f4e1-4e32-b271-391cf271fe97 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:44.946530499 +0000 UTC m=+1138.706868466 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/3f868eba-f4e1-4e32-b271-391cf271fe97-config-data") pod "rabbitmq-server-0" (UID: "3f868eba-f4e1-4e32-b271-391cf271fe97") : configmap "rabbitmq-config-data" not found Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.957469 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/swift-storage-0"] Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.957808 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="ad708448-38df-4494-bca7-fe394c9b53a7" containerName="account-server" containerID="cri-o://24445a4a28eeea3fb63d875b16f990af43a36181cb7778f0d316092430ca7285" gracePeriod=30 Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.959258 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="ad708448-38df-4494-bca7-fe394c9b53a7" containerName="swift-recon-cron" containerID="cri-o://e1f52810d18aeba836cd426714154488ddb699c7ef7cb6b0251ec23a6345ebf4" gracePeriod=30 Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.959353 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="ad708448-38df-4494-bca7-fe394c9b53a7" containerName="rsync" containerID="cri-o://a0064904f7cf7b2f3f57d0cbd50180940cf612a73b85319dcd1b4cc9e5286c49" gracePeriod=30 Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.959389 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="ad708448-38df-4494-bca7-fe394c9b53a7" containerName="object-expirer" containerID="cri-o://4fd13baec559163b2b66474d46cf26e69eb2ac8b71ab5c82119b909d78b9c3ec" gracePeriod=30 Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.959419 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="ad708448-38df-4494-bca7-fe394c9b53a7" containerName="object-updater" containerID="cri-o://7f4031ea42bd00352739604cfd6ebff1f1c035663b1e3b5d3f1ba025cafadaf4" gracePeriod=30 Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.959449 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="ad708448-38df-4494-bca7-fe394c9b53a7" containerName="object-auditor" containerID="cri-o://b729d20938039dd3cd6701145536a565bd85b83711e04e7135e5d6f37060390e" gracePeriod=30 Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.959474 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="ad708448-38df-4494-bca7-fe394c9b53a7" containerName="object-replicator" containerID="cri-o://5d77e588e0ee15e79eb372260c13103acd9bceda72bb63cee529297cc6f3bf98" gracePeriod=30 Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.959500 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="ad708448-38df-4494-bca7-fe394c9b53a7" containerName="object-server" containerID="cri-o://90254ca174d78ea243b5ca11d013559dbc865a160f1d264e2b31efdc48f96768" gracePeriod=30 Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.959525 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="ad708448-38df-4494-bca7-fe394c9b53a7" containerName="container-updater" containerID="cri-o://aa379f22b8042a8997aeff6f2e901aac68a2feff4a66a0636172020228c651ff" gracePeriod=30 Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.959552 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="ad708448-38df-4494-bca7-fe394c9b53a7" containerName="container-auditor" containerID="cri-o://58081c924356de25ebdeeecdbf39084210cf71b6f92814049f47601bfce7e1ef" gracePeriod=30 Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.959579 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="ad708448-38df-4494-bca7-fe394c9b53a7" containerName="container-replicator" containerID="cri-o://e121933f3a246845b6169455f9e1e99e6b2a61bce703613cb6fb31821719438d" gracePeriod=30 Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.959603 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="ad708448-38df-4494-bca7-fe394c9b53a7" containerName="container-server" containerID="cri-o://1e0a8b23c89daae563498da37b455997040b6b47b7a29c5c9590feba957d5e7e" gracePeriod=30 Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.959628 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="ad708448-38df-4494-bca7-fe394c9b53a7" containerName="account-reaper" containerID="cri-o://4c8b93db2628bf0f39a03755d724643f7c79df42b7cf8e54ef0a1ee499194855" gracePeriod=30 Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.959654 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="ad708448-38df-4494-bca7-fe394c9b53a7" containerName="account-auditor" containerID="cri-o://9502d1598a1db6c1187fe75f1d6475e3133bb27c977752998b79b425efafa253" gracePeriod=30 Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.959679 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="ad708448-38df-4494-bca7-fe394c9b53a7" containerName="account-replicator" containerID="cri-o://e13ad48d737a5f4a2b0376363ec37ed14a4cc5193e36abfdc9038bf0f4fc0e27" gracePeriod=30 Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.981032 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-rqg8f"] Jan 20 17:00:43 crc kubenswrapper[4558]: I0120 17:00:43.983417 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-rqg8f" Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.026157 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-rqg8f"] Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.039277 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/placement-db-create-kds4k"] Jan 20 17:00:44 crc kubenswrapper[4558]: E0120 17:00:44.047859 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Jan 20 17:00:44 crc kubenswrapper[4558]: E0120 17:00:44.047918 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/bf355276-9e62-474e-bfb1-616dde5b83bc-config-data podName:bf355276-9e62-474e-bfb1-616dde5b83bc nodeName:}" failed. No retries permitted until 2026-01-20 17:00:46.047907029 +0000 UTC m=+1139.808244996 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/bf355276-9e62-474e-bfb1-616dde5b83bc-config-data") pod "rabbitmq-cell1-server-0" (UID: "bf355276-9e62-474e-bfb1-616dde5b83bc") : configmap "rabbitmq-cell1-config-data" not found Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.051177 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.056538 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/placement-db-create-kds4k"] Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.073481 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-db-create-8jk7l"] Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.092955 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-db-create-ztgv2"] Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.098594 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell0-db-create-bcpmp"] Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.126293 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/glance-db-create-ztgv2"] Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.128201 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-d54787f8d-7fznp" event={"ID":"c268a2f5-7c67-4935-8f3e-bdd83aeccc95","Type":"ContainerStarted","Data":"7711caea9ce1243c202dbf4adf45769a91fe36f8d08e7c2d792cbee835637b2d"} Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.139250 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell0-db-create-bcpmp"] Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.144943 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell1-db-create-8jk7l"] Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.148936 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0e28ce69-8b5d-4e5e-b8bb-860e6e3745ae-operator-scripts\") pod \"0e28ce69-8b5d-4e5e-b8bb-860e6e3745ae\" (UID: \"0e28ce69-8b5d-4e5e-b8bb-860e6e3745ae\") " Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.148980 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/0e28ce69-8b5d-4e5e-b8bb-860e6e3745ae-kolla-config\") pod \"0e28ce69-8b5d-4e5e-b8bb-860e6e3745ae\" (UID: \"0e28ce69-8b5d-4e5e-b8bb-860e6e3745ae\") " Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.149074 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/0e28ce69-8b5d-4e5e-b8bb-860e6e3745ae-galera-tls-certs\") pod \"0e28ce69-8b5d-4e5e-b8bb-860e6e3745ae\" (UID: \"0e28ce69-8b5d-4e5e-b8bb-860e6e3745ae\") " Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.149116 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/0e28ce69-8b5d-4e5e-b8bb-860e6e3745ae-config-data-generated\") pod \"0e28ce69-8b5d-4e5e-b8bb-860e6e3745ae\" (UID: \"0e28ce69-8b5d-4e5e-b8bb-860e6e3745ae\") " Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.149188 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0e28ce69-8b5d-4e5e-b8bb-860e6e3745ae-combined-ca-bundle\") pod \"0e28ce69-8b5d-4e5e-b8bb-860e6e3745ae\" (UID: \"0e28ce69-8b5d-4e5e-b8bb-860e6e3745ae\") " Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.149233 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mysql-db\" (UniqueName: \"kubernetes.io/local-volume/local-storage13-crc\") pod \"0e28ce69-8b5d-4e5e-b8bb-860e6e3745ae\" (UID: \"0e28ce69-8b5d-4e5e-b8bb-860e6e3745ae\") " Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.149254 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mxkf7\" (UniqueName: \"kubernetes.io/projected/0e28ce69-8b5d-4e5e-b8bb-860e6e3745ae-kube-api-access-mxkf7\") pod \"0e28ce69-8b5d-4e5e-b8bb-860e6e3745ae\" (UID: \"0e28ce69-8b5d-4e5e-b8bb-860e6e3745ae\") " Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.149317 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/0e28ce69-8b5d-4e5e-b8bb-860e6e3745ae-config-data-default\") pod \"0e28ce69-8b5d-4e5e-b8bb-860e6e3745ae\" (UID: \"0e28ce69-8b5d-4e5e-b8bb-860e6e3745ae\") " Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.149536 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/54c4bfe3-42bf-46ea-8c23-0621f7b65686-config\") pod \"ovn-northd-0\" (UID: \"54c4bfe3-42bf-46ea-8c23-0621f7b65686\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.150156 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/54c4bfe3-42bf-46ea-8c23-0621f7b65686-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"54c4bfe3-42bf-46ea-8c23-0621f7b65686\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.150219 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hjt82\" (UniqueName: \"kubernetes.io/projected/f881d3ea-5346-49f4-8618-7271150ba300-kube-api-access-hjt82\") pod \"dnsmasq-dnsmasq-84b9f45d47-rqg8f\" (UID: \"f881d3ea-5346-49f4-8618-7271150ba300\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-rqg8f" Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.150248 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/f881d3ea-5346-49f4-8618-7271150ba300-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-84b9f45d47-rqg8f\" (UID: \"f881d3ea-5346-49f4-8618-7271150ba300\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-rqg8f" Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.150269 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/54c4bfe3-42bf-46ea-8c23-0621f7b65686-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"54c4bfe3-42bf-46ea-8c23-0621f7b65686\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.150318 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f881d3ea-5346-49f4-8618-7271150ba300-config\") pod \"dnsmasq-dnsmasq-84b9f45d47-rqg8f\" (UID: \"f881d3ea-5346-49f4-8618-7271150ba300\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-rqg8f" Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.150345 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54c4bfe3-42bf-46ea-8c23-0621f7b65686-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"54c4bfe3-42bf-46ea-8c23-0621f7b65686\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.150367 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/54c4bfe3-42bf-46ea-8c23-0621f7b65686-scripts\") pod \"ovn-northd-0\" (UID: \"54c4bfe3-42bf-46ea-8c23-0621f7b65686\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:00:44 crc kubenswrapper[4558]: E0120 17:00:44.150449 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/ovnnorthd-scripts: configmap "ovnnorthd-scripts" not found Jan 20 17:00:44 crc kubenswrapper[4558]: E0120 17:00:44.150492 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/54c4bfe3-42bf-46ea-8c23-0621f7b65686-scripts podName:54c4bfe3-42bf-46ea-8c23-0621f7b65686 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:46.150479899 +0000 UTC m=+1139.910817855 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "scripts" (UniqueName: "kubernetes.io/configmap/54c4bfe3-42bf-46ea-8c23-0621f7b65686-scripts") pod "ovn-northd-0" (UID: "54c4bfe3-42bf-46ea-8c23-0621f7b65686") : configmap "ovnnorthd-scripts" not found Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.150631 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0e28ce69-8b5d-4e5e-b8bb-860e6e3745ae-config-data-generated" (OuterVolumeSpecName: "config-data-generated") pod "0e28ce69-8b5d-4e5e-b8bb-860e6e3745ae" (UID: "0e28ce69-8b5d-4e5e-b8bb-860e6e3745ae"). InnerVolumeSpecName "config-data-generated". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.150809 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0e28ce69-8b5d-4e5e-b8bb-860e6e3745ae-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "0e28ce69-8b5d-4e5e-b8bb-860e6e3745ae" (UID: "0e28ce69-8b5d-4e5e-b8bb-860e6e3745ae"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.151639 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0e28ce69-8b5d-4e5e-b8bb-860e6e3745ae-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "0e28ce69-8b5d-4e5e-b8bb-860e6e3745ae" (UID: "0e28ce69-8b5d-4e5e-b8bb-860e6e3745ae"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.151652 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0e28ce69-8b5d-4e5e-b8bb-860e6e3745ae-config-data-default" (OuterVolumeSpecName: "config-data-default") pod "0e28ce69-8b5d-4e5e-b8bb-860e6e3745ae" (UID: "0e28ce69-8b5d-4e5e-b8bb-860e6e3745ae"). InnerVolumeSpecName "config-data-default". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:00:44 crc kubenswrapper[4558]: E0120 17:00:44.151709 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/ovnnorthd-config: configmap "ovnnorthd-config" not found Jan 20 17:00:44 crc kubenswrapper[4558]: E0120 17:00:44.151744 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/54c4bfe3-42bf-46ea-8c23-0621f7b65686-config podName:54c4bfe3-42bf-46ea-8c23-0621f7b65686 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:46.151731703 +0000 UTC m=+1139.912069670 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "config" (UniqueName: "kubernetes.io/configmap/54c4bfe3-42bf-46ea-8c23-0621f7b65686-config") pod "ovn-northd-0" (UID: "54c4bfe3-42bf-46ea-8c23-0621f7b65686") : configmap "ovnnorthd-config" not found Jan 20 17:00:44 crc kubenswrapper[4558]: E0120 17:00:44.151845 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-ovnnorthd-ovndbs: secret "cert-ovnnorthd-ovndbs" not found Jan 20 17:00:44 crc kubenswrapper[4558]: E0120 17:00:44.151868 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/54c4bfe3-42bf-46ea-8c23-0621f7b65686-ovn-northd-tls-certs podName:54c4bfe3-42bf-46ea-8c23-0621f7b65686 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:46.151861446 +0000 UTC m=+1139.912199413 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "ovn-northd-tls-certs" (UniqueName: "kubernetes.io/secret/54c4bfe3-42bf-46ea-8c23-0621f7b65686-ovn-northd-tls-certs") pod "ovn-northd-0" (UID: "54c4bfe3-42bf-46ea-8c23-0621f7b65686") : secret "cert-ovnnorthd-ovndbs" not found Jan 20 17:00:44 crc kubenswrapper[4558]: E0120 17:00:44.151917 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-ovn-metrics: secret "cert-ovn-metrics" not found Jan 20 17:00:44 crc kubenswrapper[4558]: E0120 17:00:44.151935 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/54c4bfe3-42bf-46ea-8c23-0621f7b65686-metrics-certs-tls-certs podName:54c4bfe3-42bf-46ea-8c23-0621f7b65686 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:46.151929104 +0000 UTC m=+1139.912267070 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs-tls-certs" (UniqueName: "kubernetes.io/secret/54c4bfe3-42bf-46ea-8c23-0621f7b65686-metrics-certs-tls-certs") pod "ovn-northd-0" (UID: "54c4bfe3-42bf-46ea-8c23-0621f7b65686") : secret "cert-ovn-metrics" not found Jan 20 17:00:44 crc kubenswrapper[4558]: E0120 17:00:44.151974 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/combined-ca-bundle: secret "combined-ca-bundle" not found Jan 20 17:00:44 crc kubenswrapper[4558]: E0120 17:00:44.151991 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/54c4bfe3-42bf-46ea-8c23-0621f7b65686-combined-ca-bundle podName:54c4bfe3-42bf-46ea-8c23-0621f7b65686 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:46.151985319 +0000 UTC m=+1139.912323286 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/54c4bfe3-42bf-46ea-8c23-0621f7b65686-combined-ca-bundle") pod "ovn-northd-0" (UID: "54c4bfe3-42bf-46ea-8c23-0621f7b65686") : secret "combined-ca-bundle" not found Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.165008 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0e28ce69-8b5d-4e5e-b8bb-860e6e3745ae-kube-api-access-mxkf7" (OuterVolumeSpecName: "kube-api-access-mxkf7") pod "0e28ce69-8b5d-4e5e-b8bb-860e6e3745ae" (UID: "0e28ce69-8b5d-4e5e-b8bb-860e6e3745ae"). InnerVolumeSpecName "kube-api-access-mxkf7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.190280 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage13-crc" (OuterVolumeSpecName: "mysql-db") pod "0e28ce69-8b5d-4e5e-b8bb-860e6e3745ae" (UID: "0e28ce69-8b5d-4e5e-b8bb-860e6e3745ae"). InnerVolumeSpecName "local-storage13-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.190339 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-db-create-9rlkr"] Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.208351 4558 generic.go:334] "Generic (PLEG): container finished" podID="ad708448-38df-4494-bca7-fe394c9b53a7" containerID="7f4031ea42bd00352739604cfd6ebff1f1c035663b1e3b5d3f1ba025cafadaf4" exitCode=0 Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.208466 4558 generic.go:334] "Generic (PLEG): container finished" podID="ad708448-38df-4494-bca7-fe394c9b53a7" containerID="b729d20938039dd3cd6701145536a565bd85b83711e04e7135e5d6f37060390e" exitCode=0 Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.208522 4558 generic.go:334] "Generic (PLEG): container finished" podID="ad708448-38df-4494-bca7-fe394c9b53a7" containerID="aa379f22b8042a8997aeff6f2e901aac68a2feff4a66a0636172020228c651ff" exitCode=0 Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.208571 4558 generic.go:334] "Generic (PLEG): container finished" podID="ad708448-38df-4494-bca7-fe394c9b53a7" containerID="58081c924356de25ebdeeecdbf39084210cf71b6f92814049f47601bfce7e1ef" exitCode=0 Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.208618 4558 generic.go:334] "Generic (PLEG): container finished" podID="ad708448-38df-4494-bca7-fe394c9b53a7" containerID="9502d1598a1db6c1187fe75f1d6475e3133bb27c977752998b79b425efafa253" exitCode=0 Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.208662 4558 generic.go:334] "Generic (PLEG): container finished" podID="ad708448-38df-4494-bca7-fe394c9b53a7" containerID="e13ad48d737a5f4a2b0376363ec37ed14a4cc5193e36abfdc9038bf0f4fc0e27" exitCode=0 Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.208750 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"ad708448-38df-4494-bca7-fe394c9b53a7","Type":"ContainerDied","Data":"7f4031ea42bd00352739604cfd6ebff1f1c035663b1e3b5d3f1ba025cafadaf4"} Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.208839 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"ad708448-38df-4494-bca7-fe394c9b53a7","Type":"ContainerDied","Data":"b729d20938039dd3cd6701145536a565bd85b83711e04e7135e5d6f37060390e"} Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.208903 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"ad708448-38df-4494-bca7-fe394c9b53a7","Type":"ContainerDied","Data":"aa379f22b8042a8997aeff6f2e901aac68a2feff4a66a0636172020228c651ff"} Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.208957 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"ad708448-38df-4494-bca7-fe394c9b53a7","Type":"ContainerDied","Data":"58081c924356de25ebdeeecdbf39084210cf71b6f92814049f47601bfce7e1ef"} Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.209007 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"ad708448-38df-4494-bca7-fe394c9b53a7","Type":"ContainerDied","Data":"9502d1598a1db6c1187fe75f1d6475e3133bb27c977752998b79b425efafa253"} Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.209058 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"ad708448-38df-4494-bca7-fe394c9b53a7","Type":"ContainerDied","Data":"e13ad48d737a5f4a2b0376363ec37ed14a4cc5193e36abfdc9038bf0f4fc0e27"} Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.223670 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-api-db-create-9rlkr"] Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.223722 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/root-account-create-update-fbtv5" event={"ID":"e353328b-4fe7-4637-8174-a3b227ad9761","Type":"ContainerStarted","Data":"a68a81065b6949392acd6d15a4b91cb38706154b8afc2adbe6c374896e7b9115"} Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.224511 4558 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="openstack-kuttl-tests/root-account-create-update-fbtv5" secret="" err="secret \"galera-openstack-cell1-dockercfg-npsbd\" not found" Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.237535 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-b4f9-account-create-update-smxt8"] Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.265402 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/cinder-api-0" podUID="764c7a15-6a1a-470c-9d0b-a63ed418cc09" containerName="cinder-api" probeResult="failure" output="Get \"https://10.217.0.151:8776/healthcheck\": read tcp 10.217.0.2:54744->10.217.0.151:8776: read: connection reset by peer" Jan 20 17:00:44 crc kubenswrapper[4558]: E0120 17:00:44.265759 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-neutron-internal-svc: secret "cert-neutron-internal-svc" not found Jan 20 17:00:44 crc kubenswrapper[4558]: E0120 17:00:44.265811 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7d0b0e33-9ce7-44d9-8b09-6008715fe1a0-internal-tls-certs podName:7d0b0e33-9ce7-44d9-8b09-6008715fe1a0 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:45.265794729 +0000 UTC m=+1139.026132696 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "internal-tls-certs" (UniqueName: "kubernetes.io/secret/7d0b0e33-9ce7-44d9-8b09-6008715fe1a0-internal-tls-certs") pod "neutron-5d557df858-frznf" (UID: "7d0b0e33-9ce7-44d9-8b09-6008715fe1a0") : secret "cert-neutron-internal-svc" not found Jan 20 17:00:44 crc kubenswrapper[4558]: E0120 17:00:44.269612 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-neutron-public-svc: secret "cert-neutron-public-svc" not found Jan 20 17:00:44 crc kubenswrapper[4558]: E0120 17:00:44.269656 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7d0b0e33-9ce7-44d9-8b09-6008715fe1a0-public-tls-certs podName:7d0b0e33-9ce7-44d9-8b09-6008715fe1a0 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:45.269644058 +0000 UTC m=+1139.029982025 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "public-tls-certs" (UniqueName: "kubernetes.io/secret/7d0b0e33-9ce7-44d9-8b09-6008715fe1a0-public-tls-certs") pod "neutron-5d557df858-frznf" (UID: "7d0b0e33-9ce7-44d9-8b09-6008715fe1a0") : secret "cert-neutron-public-svc" not found Jan 20 17:00:44 crc kubenswrapper[4558]: E0120 17:00:44.269761 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/neutron-config: secret "neutron-config" not found Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.269819 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zscx2\" (UniqueName: \"kubernetes.io/projected/54c4bfe3-42bf-46ea-8c23-0621f7b65686-kube-api-access-zscx2\") pod \"ovn-northd-0\" (UID: \"54c4bfe3-42bf-46ea-8c23-0621f7b65686\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.270049 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hjt82\" (UniqueName: \"kubernetes.io/projected/f881d3ea-5346-49f4-8618-7271150ba300-kube-api-access-hjt82\") pod \"dnsmasq-dnsmasq-84b9f45d47-rqg8f\" (UID: \"f881d3ea-5346-49f4-8618-7271150ba300\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-rqg8f" Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.270088 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/f881d3ea-5346-49f4-8618-7271150ba300-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-84b9f45d47-rqg8f\" (UID: \"f881d3ea-5346-49f4-8618-7271150ba300\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-rqg8f" Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.270236 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f881d3ea-5346-49f4-8618-7271150ba300-config\") pod \"dnsmasq-dnsmasq-84b9f45d47-rqg8f\" (UID: \"f881d3ea-5346-49f4-8618-7271150ba300\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-rqg8f" Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.270297 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/0e28ce69-8b5d-4e5e-b8bb-860e6e3745ae-config-data-default\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.270308 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0e28ce69-8b5d-4e5e-b8bb-860e6e3745ae-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.270316 4558 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/0e28ce69-8b5d-4e5e-b8bb-860e6e3745ae-kolla-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.270341 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/0e28ce69-8b5d-4e5e-b8bb-860e6e3745ae-config-data-generated\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.270359 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage13-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage13-crc\") on node \"crc\" " Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.270368 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mxkf7\" (UniqueName: \"kubernetes.io/projected/0e28ce69-8b5d-4e5e-b8bb-860e6e3745ae-kube-api-access-mxkf7\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:44 crc kubenswrapper[4558]: E0120 17:00:44.271895 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/neutron-httpd-config: secret "neutron-httpd-config" not found Jan 20 17:00:44 crc kubenswrapper[4558]: E0120 17:00:44.271944 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7d0b0e33-9ce7-44d9-8b09-6008715fe1a0-httpd-config podName:7d0b0e33-9ce7-44d9-8b09-6008715fe1a0 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:45.271932331 +0000 UTC m=+1139.032270298 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "httpd-config" (UniqueName: "kubernetes.io/secret/7d0b0e33-9ce7-44d9-8b09-6008715fe1a0-httpd-config") pod "neutron-5d557df858-frznf" (UID: "7d0b0e33-9ce7-44d9-8b09-6008715fe1a0") : secret "neutron-httpd-config" not found Jan 20 17:00:44 crc kubenswrapper[4558]: E0120 17:00:44.272138 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7d0b0e33-9ce7-44d9-8b09-6008715fe1a0-config podName:7d0b0e33-9ce7-44d9-8b09-6008715fe1a0 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:45.272129291 +0000 UTC m=+1139.032467258 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config" (UniqueName: "kubernetes.io/secret/7d0b0e33-9ce7-44d9-8b09-6008715fe1a0-config") pod "neutron-5d557df858-frznf" (UID: "7d0b0e33-9ce7-44d9-8b09-6008715fe1a0") : secret "neutron-config" not found Jan 20 17:00:44 crc kubenswrapper[4558]: E0120 17:00:44.272203 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/combined-ca-bundle: secret "combined-ca-bundle" not found Jan 20 17:00:44 crc kubenswrapper[4558]: E0120 17:00:44.272238 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7d0b0e33-9ce7-44d9-8b09-6008715fe1a0-combined-ca-bundle podName:7d0b0e33-9ce7-44d9-8b09-6008715fe1a0 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:45.272231483 +0000 UTC m=+1139.032569451 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/7d0b0e33-9ce7-44d9-8b09-6008715fe1a0-combined-ca-bundle") pod "neutron-5d557df858-frznf" (UID: "7d0b0e33-9ce7-44d9-8b09-6008715fe1a0") : secret "combined-ca-bundle" not found Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.272772 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-api-b4f9-account-create-update-smxt8"] Jan 20 17:00:44 crc kubenswrapper[4558]: E0120 17:00:44.272894 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-neutron-ovndbs: secret "cert-neutron-ovndbs" not found Jan 20 17:00:44 crc kubenswrapper[4558]: E0120 17:00:44.272940 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7d0b0e33-9ce7-44d9-8b09-6008715fe1a0-ovndb-tls-certs podName:7d0b0e33-9ce7-44d9-8b09-6008715fe1a0 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:45.272925348 +0000 UTC m=+1139.033263316 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "ovndb-tls-certs" (UniqueName: "kubernetes.io/secret/7d0b0e33-9ce7-44d9-8b09-6008715fe1a0-ovndb-tls-certs") pod "neutron-5d557df858-frznf" (UID: "7d0b0e33-9ce7-44d9-8b09-6008715fe1a0") : secret "cert-neutron-ovndbs" not found Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.272940 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/f881d3ea-5346-49f4-8618-7271150ba300-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-84b9f45d47-rqg8f\" (UID: \"f881d3ea-5346-49f4-8618-7271150ba300\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-rqg8f" Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.273676 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f881d3ea-5346-49f4-8618-7271150ba300-config\") pod \"dnsmasq-dnsmasq-84b9f45d47-rqg8f\" (UID: \"f881d3ea-5346-49f4-8618-7271150ba300\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-rqg8f" Jan 20 17:00:44 crc kubenswrapper[4558]: E0120 17:00:44.277850 4558 projected.go:194] Error preparing data for projected volume kube-api-access-zscx2 for pod openstack-kuttl-tests/ovn-northd-0: failed to fetch token: serviceaccounts "ovnnorthd-ovnnorthd" not found Jan 20 17:00:44 crc kubenswrapper[4558]: E0120 17:00:44.277901 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/54c4bfe3-42bf-46ea-8c23-0621f7b65686-kube-api-access-zscx2 podName:54c4bfe3-42bf-46ea-8c23-0621f7b65686 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:46.277886268 +0000 UTC m=+1140.038224235 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-zscx2" (UniqueName: "kubernetes.io/projected/54c4bfe3-42bf-46ea-8c23-0621f7b65686-kube-api-access-zscx2") pod "ovn-northd-0" (UID: "54c4bfe3-42bf-46ea-8c23-0621f7b65686") : failed to fetch token: serviceaccounts "ovnnorthd-ovnnorthd" not found Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.279708 4558 generic.go:334] "Generic (PLEG): container finished" podID="a140c9a4-bec6-42e6-bc6c-d63566e4f7f6" containerID="b78d30b920cedd5deca6765214e5cedff3241e2599a5afce55dcb23d1cc75565" exitCode=143 Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.279754 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-5d77597494-bkh4z" event={"ID":"a140c9a4-bec6-42e6-bc6c-d63566e4f7f6","Type":"ContainerDied","Data":"b78d30b920cedd5deca6765214e5cedff3241e2599a5afce55dcb23d1cc75565"} Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.302442 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-cell1-galera-0" event={"ID":"6374f20d-dc8a-4f1c-9df3-086e6904b394","Type":"ContainerDied","Data":"d35a18247b7e91cdcddd888317e88cf08b8eb185e0a9afca25f7db86dfb4e278"} Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.302489 4558 scope.go:117] "RemoveContainer" containerID="14cc352d3f4cfc88e8fb040c94fae40dfc936c241aaccfd3a719350b13e1a6d1" Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.302617 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.302977 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0e28ce69-8b5d-4e5e-b8bb-860e6e3745ae-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0e28ce69-8b5d-4e5e-b8bb-860e6e3745ae" (UID: "0e28ce69-8b5d-4e5e-b8bb-860e6e3745ae"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:00:44 crc kubenswrapper[4558]: E0120 17:00:44.303643 4558 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 20 17:00:44 crc kubenswrapper[4558]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[/bin/sh -c #!/bin/bash Jan 20 17:00:44 crc kubenswrapper[4558]: Jan 20 17:00:44 crc kubenswrapper[4558]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 20 17:00:44 crc kubenswrapper[4558]: Jan 20 17:00:44 crc kubenswrapper[4558]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 20 17:00:44 crc kubenswrapper[4558]: Jan 20 17:00:44 crc kubenswrapper[4558]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 20 17:00:44 crc kubenswrapper[4558]: Jan 20 17:00:44 crc kubenswrapper[4558]: if [ -n "" ]; then Jan 20 17:00:44 crc kubenswrapper[4558]: GRANT_DATABASE="" Jan 20 17:00:44 crc kubenswrapper[4558]: else Jan 20 17:00:44 crc kubenswrapper[4558]: GRANT_DATABASE="*" Jan 20 17:00:44 crc kubenswrapper[4558]: fi Jan 20 17:00:44 crc kubenswrapper[4558]: Jan 20 17:00:44 crc kubenswrapper[4558]: # going for maximum compatibility here: Jan 20 17:00:44 crc kubenswrapper[4558]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 20 17:00:44 crc kubenswrapper[4558]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 20 17:00:44 crc kubenswrapper[4558]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 20 17:00:44 crc kubenswrapper[4558]: # support updates Jan 20 17:00:44 crc kubenswrapper[4558]: Jan 20 17:00:44 crc kubenswrapper[4558]: $MYSQL_CMD < logger="UnhandledError" Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.306583 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstackclient" Jan 20 17:00:44 crc kubenswrapper[4558]: E0120 17:00:44.308368 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"openstack-cell1-mariadb-root-db-secret\\\" not found\"" pod="openstack-kuttl-tests/root-account-create-update-fbtv5" podUID="e353328b-4fe7-4637-8174-a3b227ad9761" Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.346073 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hjt82\" (UniqueName: \"kubernetes.io/projected/f881d3ea-5346-49f4-8618-7271150ba300-kube-api-access-hjt82\") pod \"dnsmasq-dnsmasq-84b9f45d47-rqg8f\" (UID: \"f881d3ea-5346-49f4-8618-7271150ba300\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-rqg8f" Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.350379 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0e28ce69-8b5d-4e5e-b8bb-860e6e3745ae-galera-tls-certs" (OuterVolumeSpecName: "galera-tls-certs") pod "0e28ce69-8b5d-4e5e-b8bb-860e6e3745ae" (UID: "0e28ce69-8b5d-4e5e-b8bb-860e6e3745ae"). InnerVolumeSpecName "galera-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.359886 4558 generic.go:334] "Generic (PLEG): container finished" podID="0e28ce69-8b5d-4e5e-b8bb-860e6e3745ae" containerID="b8b4710c6079f7f8ec88f1289e78374ee84e488da8b6978ef790e87233e7f5fb" exitCode=0 Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.359962 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-galera-0" event={"ID":"0e28ce69-8b5d-4e5e-b8bb-860e6e3745ae","Type":"ContainerDied","Data":"b8b4710c6079f7f8ec88f1289e78374ee84e488da8b6978ef790e87233e7f5fb"} Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.359988 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-galera-0" event={"ID":"0e28ce69-8b5d-4e5e-b8bb-860e6e3745ae","Type":"ContainerDied","Data":"9386e80aa945354b7a1f22d3025aa79ac74ed014a7a4ee404aae6cc4b3238b4f"} Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.360080 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.367969 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-55ff946595-f8gjf" event={"ID":"4969c1af-53c0-435a-bd06-6bd493c81c80","Type":"ContainerStarted","Data":"43610636e4831cc5451e75428b042d0c9d0a563ad6b875c786d70693be0d98f3"} Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.368603 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/keystone-55ff946595-f8gjf" Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.372809 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0e28ce69-8b5d-4e5e-b8bb-860e6e3745ae-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.372836 4558 reconciler_common.go:293] "Volume detached for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/0e28ce69-8b5d-4e5e-b8bb-860e6e3745ae-galera-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:44 crc kubenswrapper[4558]: E0120 17:00:44.372914 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/openstack-cell1-scripts: configmap "openstack-cell1-scripts" not found Jan 20 17:00:44 crc kubenswrapper[4558]: E0120 17:00:44.372954 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/e353328b-4fe7-4637-8174-a3b227ad9761-operator-scripts podName:e353328b-4fe7-4637-8174-a3b227ad9761 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:44.872938364 +0000 UTC m=+1138.633276332 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/e353328b-4fe7-4637-8174-a3b227ad9761-operator-scripts") pod "root-account-create-update-fbtv5" (UID: "e353328b-4fe7-4637-8174-a3b227ad9761") : configmap "openstack-cell1-scripts" not found Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.376882 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage13-crc" (UniqueName: "kubernetes.io/local-volume/local-storage13-crc") on node "crc" Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.384073 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-worker-c4f864f89-58gb9" event={"ID":"de7f5467-1e83-42f0-86bb-ade85deec8f3","Type":"ContainerStarted","Data":"ee8ddf384630628e434ded9ac4a8dd2f149e7593afd334474324206c1828ec18"} Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.384682 4558 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="openstack-kuttl-tests/barbican-worker-c4f864f89-58gb9" secret="" err="secret \"barbican-barbican-dockercfg-z9dpp\" not found" Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.407742 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-7e4f-account-create-update-nq676"] Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.416399 4558 generic.go:334] "Generic (PLEG): container finished" podID="8074f6f3-f564-42bc-b08b-28ffe75bbbc5" containerID="20cf0ea0e052fa597f57928ad72ba1f7803b3e9d839d21a4246b51731cb87948" exitCode=0 Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.416432 4558 generic.go:334] "Generic (PLEG): container finished" podID="8074f6f3-f564-42bc-b08b-28ffe75bbbc5" containerID="37fbfba85457fa31861b89574c96f8a2be864e14e57e69c2dae33967885a1430" exitCode=2 Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.416441 4558 generic.go:334] "Generic (PLEG): container finished" podID="8074f6f3-f564-42bc-b08b-28ffe75bbbc5" containerID="cc735ea1e3ff81a6e1e946cb0561445b4ff17c02725e6a7c5e3b8fb04ea0d90d" exitCode=0 Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.416488 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"8074f6f3-f564-42bc-b08b-28ffe75bbbc5","Type":"ContainerDied","Data":"20cf0ea0e052fa597f57928ad72ba1f7803b3e9d839d21a4246b51731cb87948"} Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.416511 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"8074f6f3-f564-42bc-b08b-28ffe75bbbc5","Type":"ContainerDied","Data":"37fbfba85457fa31861b89574c96f8a2be864e14e57e69c2dae33967885a1430"} Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.416522 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"8074f6f3-f564-42bc-b08b-28ffe75bbbc5","Type":"ContainerDied","Data":"cc735ea1e3ff81a6e1e946cb0561445b4ff17c02725e6a7c5e3b8fb04ea0d90d"} Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.450025 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/keystone-7e4f-account-create-update-nq676"] Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.476289 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-db-sync-k6vkv"] Jan 20 17:00:44 crc kubenswrapper[4558]: E0120 17:00:44.477334 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-keystone-public-svc: secret "cert-keystone-public-svc" not found Jan 20 17:00:44 crc kubenswrapper[4558]: E0120 17:00:44.477447 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4969c1af-53c0-435a-bd06-6bd493c81c80-public-tls-certs podName:4969c1af-53c0-435a-bd06-6bd493c81c80 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:44.977431154 +0000 UTC m=+1138.737769121 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "public-tls-certs" (UniqueName: "kubernetes.io/secret/4969c1af-53c0-435a-bd06-6bd493c81c80-public-tls-certs") pod "keystone-55ff946595-f8gjf" (UID: "4969c1af-53c0-435a-bd06-6bd493c81c80") : secret "cert-keystone-public-svc" not found Jan 20 17:00:44 crc kubenswrapper[4558]: E0120 17:00:44.477529 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/barbican-worker-config-data: secret "barbican-worker-config-data" not found Jan 20 17:00:44 crc kubenswrapper[4558]: E0120 17:00:44.477577 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/de7f5467-1e83-42f0-86bb-ade85deec8f3-config-data-custom podName:de7f5467-1e83-42f0-86bb-ade85deec8f3 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:44.977564435 +0000 UTC m=+1138.737902402 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-data-custom" (UniqueName: "kubernetes.io/secret/de7f5467-1e83-42f0-86bb-ade85deec8f3-config-data-custom") pod "barbican-worker-c4f864f89-58gb9" (UID: "de7f5467-1e83-42f0-86bb-ade85deec8f3") : secret "barbican-worker-config-data" not found Jan 20 17:00:44 crc kubenswrapper[4558]: E0120 17:00:44.477609 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/keystone-config-data: secret "keystone-config-data" not found Jan 20 17:00:44 crc kubenswrapper[4558]: E0120 17:00:44.477626 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4969c1af-53c0-435a-bd06-6bd493c81c80-config-data podName:4969c1af-53c0-435a-bd06-6bd493c81c80 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:44.97762084 +0000 UTC m=+1138.737958808 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/secret/4969c1af-53c0-435a-bd06-6bd493c81c80-config-data") pod "keystone-55ff946595-f8gjf" (UID: "4969c1af-53c0-435a-bd06-6bd493c81c80") : secret "keystone-config-data" not found Jan 20 17:00:44 crc kubenswrapper[4558]: E0120 17:00:44.477650 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/combined-ca-bundle: secret "combined-ca-bundle" not found Jan 20 17:00:44 crc kubenswrapper[4558]: E0120 17:00:44.477667 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4969c1af-53c0-435a-bd06-6bd493c81c80-combined-ca-bundle podName:4969c1af-53c0-435a-bd06-6bd493c81c80 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:44.977662901 +0000 UTC m=+1138.738000868 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/4969c1af-53c0-435a-bd06-6bd493c81c80-combined-ca-bundle") pod "keystone-55ff946595-f8gjf" (UID: "4969c1af-53c0-435a-bd06-6bd493c81c80") : secret "combined-ca-bundle" not found Jan 20 17:00:44 crc kubenswrapper[4558]: E0120 17:00:44.477689 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/keystone: secret "keystone" not found Jan 20 17:00:44 crc kubenswrapper[4558]: E0120 17:00:44.477703 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4969c1af-53c0-435a-bd06-6bd493c81c80-fernet-keys podName:4969c1af-53c0-435a-bd06-6bd493c81c80 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:44.977698858 +0000 UTC m=+1138.738036825 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "fernet-keys" (UniqueName: "kubernetes.io/secret/4969c1af-53c0-435a-bd06-6bd493c81c80-fernet-keys") pod "keystone-55ff946595-f8gjf" (UID: "4969c1af-53c0-435a-bd06-6bd493c81c80") : secret "keystone" not found Jan 20 17:00:44 crc kubenswrapper[4558]: E0120 17:00:44.477505 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/combined-ca-bundle: secret "combined-ca-bundle" not found Jan 20 17:00:44 crc kubenswrapper[4558]: E0120 17:00:44.478149 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/de7f5467-1e83-42f0-86bb-ade85deec8f3-combined-ca-bundle podName:de7f5467-1e83-42f0-86bb-ade85deec8f3 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:44.978141139 +0000 UTC m=+1138.738479106 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/de7f5467-1e83-42f0-86bb-ade85deec8f3-combined-ca-bundle") pod "barbican-worker-c4f864f89-58gb9" (UID: "de7f5467-1e83-42f0-86bb-ade85deec8f3") : secret "combined-ca-bundle" not found Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.478119 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage13-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage13-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:44 crc kubenswrapper[4558]: E0120 17:00:44.478392 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/keystone: secret "keystone" not found Jan 20 17:00:44 crc kubenswrapper[4558]: E0120 17:00:44.478420 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4969c1af-53c0-435a-bd06-6bd493c81c80-credential-keys podName:4969c1af-53c0-435a-bd06-6bd493c81c80 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:44.978413883 +0000 UTC m=+1138.738751850 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "credential-keys" (UniqueName: "kubernetes.io/secret/4969c1af-53c0-435a-bd06-6bd493c81c80-credential-keys") pod "keystone-55ff946595-f8gjf" (UID: "4969c1af-53c0-435a-bd06-6bd493c81c80") : secret "keystone" not found Jan 20 17:00:44 crc kubenswrapper[4558]: E0120 17:00:44.478451 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/barbican-config-data: secret "barbican-config-data" not found Jan 20 17:00:44 crc kubenswrapper[4558]: E0120 17:00:44.478467 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/de7f5467-1e83-42f0-86bb-ade85deec8f3-config-data podName:de7f5467-1e83-42f0-86bb-ade85deec8f3 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:44.978462073 +0000 UTC m=+1138.738800040 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/secret/de7f5467-1e83-42f0-86bb-ade85deec8f3-config-data") pod "barbican-worker-c4f864f89-58gb9" (UID: "de7f5467-1e83-42f0-86bb-ade85deec8f3") : secret "barbican-config-data" not found Jan 20 17:00:44 crc kubenswrapper[4558]: E0120 17:00:44.478494 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-keystone-internal-svc: secret "cert-keystone-internal-svc" not found Jan 20 17:00:44 crc kubenswrapper[4558]: E0120 17:00:44.478510 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4969c1af-53c0-435a-bd06-6bd493c81c80-internal-tls-certs podName:4969c1af-53c0-435a-bd06-6bd493c81c80 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:44.978505915 +0000 UTC m=+1138.738843882 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "internal-tls-certs" (UniqueName: "kubernetes.io/secret/4969c1af-53c0-435a-bd06-6bd493c81c80-internal-tls-certs") pod "keystone-55ff946595-f8gjf" (UID: "4969c1af-53c0-435a-bd06-6bd493c81c80") : secret "cert-keystone-internal-svc" not found Jan 20 17:00:44 crc kubenswrapper[4558]: E0120 17:00:44.478831 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/keystone-scripts: secret "keystone-scripts" not found Jan 20 17:00:44 crc kubenswrapper[4558]: E0120 17:00:44.478933 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4969c1af-53c0-435a-bd06-6bd493c81c80-scripts podName:4969c1af-53c0-435a-bd06-6bd493c81c80 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:44.978921156 +0000 UTC m=+1138.739259123 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "scripts" (UniqueName: "kubernetes.io/secret/4969c1af-53c0-435a-bd06-6bd493c81c80-scripts") pod "keystone-55ff946595-f8gjf" (UID: "4969c1af-53c0-435a-bd06-6bd493c81c80") : secret "keystone-scripts" not found Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.500742 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-db-sync-k6vkv"] Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.500924 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/keystone-55ff946595-f8gjf" podStartSLOduration=4.500913724 podStartE2EDuration="4.500913724s" podCreationTimestamp="2026-01-20 17:00:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:00:44.392979863 +0000 UTC m=+1138.153317830" watchObservedRunningTime="2026-01-20 17:00:44.500913724 +0000 UTC m=+1138.261251691" Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.500956 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-6b64ccd79d-5qj2x" event={"ID":"5d3423c4-ff78-4ff7-b42b-b3c93b309d52","Type":"ContainerStarted","Data":"b24670af574f9ed9a920845a520dd0b88057aa73de3d8a8c8308fb29c005498d"} Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.501432 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/neutron-6b64ccd79d-5qj2x" podUID="5d3423c4-ff78-4ff7-b42b-b3c93b309d52" containerName="neutron-api" containerID="cri-o://b24670af574f9ed9a920845a520dd0b88057aa73de3d8a8c8308fb29c005498d" gracePeriod=30 Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.501521 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/neutron-6b64ccd79d-5qj2x" Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.501641 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/neutron-6b64ccd79d-5qj2x" podUID="5d3423c4-ff78-4ff7-b42b-b3c93b309d52" containerName="neutron-httpd" containerID="cri-o://37aa08ee2771757bea1365d0523225133f3f9a35715f5122f8e028516447be51" gracePeriod=30 Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.515046 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-5df68bf4dd-zbx8h" event={"ID":"3735ca3d-3764-4d36-b912-fbf0bfb96dd8","Type":"ContainerStarted","Data":"cecf310a1a5c01a8e2cb6f8db17408c6746dfda1028e4915a95ed043910b2cbd"} Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.515096 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-5df68bf4dd-zbx8h" event={"ID":"3735ca3d-3764-4d36-b912-fbf0bfb96dd8","Type":"ContainerStarted","Data":"b6c74e47f2a677093337ddf0004171daa0ef22786b50d9ee2e84816a4f1d1a44"} Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.515620 4558 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="openstack-kuttl-tests/barbican-api-5df68bf4dd-zbx8h" secret="" err="secret \"barbican-barbican-dockercfg-z9dpp\" not found" Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.515838 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/barbican-api-5df68bf4dd-zbx8h" Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.515869 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/barbican-api-5df68bf4dd-zbx8h" Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.539949 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-keystone-listener-7fc5477d66-pz8q2" event={"ID":"7776f921-0ae7-4a8c-b444-cf5b4b9a4f65","Type":"ContainerStarted","Data":"6c9fee82734b4e79e64bd03d6cf2d27e8723c07ddab900b9256e6a58cb6d0abe"} Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.539984 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-keystone-listener-7fc5477d66-pz8q2" event={"ID":"7776f921-0ae7-4a8c-b444-cf5b4b9a4f65","Type":"ContainerStarted","Data":"6b14e8b1ef1901e159022cfb9e13791cc8bca3602aeca47855720efbfe3f6831"} Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.540490 4558 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="openstack-kuttl-tests/barbican-keystone-listener-7fc5477d66-pz8q2" secret="" err="secret \"barbican-barbican-dockercfg-z9dpp\" not found" Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.545932 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/rabbitmq-cell1-server-0"] Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.551193 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-db-sync-mj7lb"] Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.557517 4558 generic.go:334] "Generic (PLEG): container finished" podID="3b2ffa52-4d19-46f6-aea5-62fc758def73" containerID="3d3a8f2db93b034973dfab0303e49c56217b13cf7cd940baab7ecf8cfbd2745f" exitCode=2 Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.557611 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-db-sync-mj7lb"] Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.557635 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/kube-state-metrics-0" event={"ID":"3b2ffa52-4d19-46f6-aea5-62fc758def73","Type":"ContainerDied","Data":"3d3a8f2db93b034973dfab0303e49c56217b13cf7cd940baab7ecf8cfbd2745f"} Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.563447 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.564660 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/memcached-0" Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.567188 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/memcached-0" event={"ID":"324ace6f-8bac-4269-a674-d9b6e990cd18","Type":"ContainerDied","Data":"2596754423a638526c01cacc69d8c2528039a5fb4569f763f1e4c4fc4242e1cc"} Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.568750 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/neutron-5d557df858-frznf" podUID="7d0b0e33-9ce7-44d9-8b09-6008715fe1a0" containerName="neutron-api" containerID="cri-o://759720ba5c84a0de6cd2dbe18773a4daeffbac9fb1b15d97e54f0ba7761ed858" gracePeriod=30 Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.568941 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/neutron-5d557df858-frznf" podUID="7d0b0e33-9ce7-44d9-8b09-6008715fe1a0" containerName="neutron-httpd" containerID="cri-o://0db5ea38beca13c9805cb3405d85b10e954c953b65c96cec72dc60b73ae7e610" gracePeriod=30 Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.568394 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/barbican-worker-c4f864f89-58gb9" podStartSLOduration=4.568379293 podStartE2EDuration="4.568379293s" podCreationTimestamp="2026-01-20 17:00:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:00:44.415652779 +0000 UTC m=+1138.175990746" watchObservedRunningTime="2026-01-20 17:00:44.568379293 +0000 UTC m=+1138.328717260" Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.576879 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-rqg8f" Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.607052 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0cd03d7d-cae3-4c24-90db-5f8598078f52" path="/var/lib/kubelet/pods/0cd03d7d-cae3-4c24-90db-5f8598078f52/volumes" Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.614185 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0e936a71-a599-4e5a-a8b7-9cde8054a1b5" path="/var/lib/kubelet/pods/0e936a71-a599-4e5a-a8b7-9cde8054a1b5/volumes" Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.615444 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1c0d7766-0fb5-47ec-87f4-6644f5b1afd9" path="/var/lib/kubelet/pods/1c0d7766-0fb5-47ec-87f4-6644f5b1afd9/volumes" Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.616496 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/neutron-6b64ccd79d-5qj2x" podStartSLOduration=4.616483724 podStartE2EDuration="4.616483724s" podCreationTimestamp="2026-01-20 17:00:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:00:44.536785922 +0000 UTC m=+1138.297123889" watchObservedRunningTime="2026-01-20 17:00:44.616483724 +0000 UTC m=+1138.376821691" Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.617659 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20f64bc1-f5bb-48bc-a586-eadcfad3fa6b" path="/var/lib/kubelet/pods/20f64bc1-f5bb-48bc-a586-eadcfad3fa6b/volumes" Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.618114 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22adc061-9ded-4378-abcc-1e86a3985839" path="/var/lib/kubelet/pods/22adc061-9ded-4378-abcc-1e86a3985839/volumes" Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.619295 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="24ec82c6-7ca6-47e0-9291-17327cb4b222" path="/var/lib/kubelet/pods/24ec82c6-7ca6-47e0-9291-17327cb4b222/volumes" Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.622143 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="316ffdc9-b0e1-47d9-90f8-45a8ceb87353" path="/var/lib/kubelet/pods/316ffdc9-b0e1-47d9-90f8-45a8ceb87353/volumes" Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.624843 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3252a616-e809-4059-9c6a-cf8af5aef236" path="/var/lib/kubelet/pods/3252a616-e809-4059-9c6a-cf8af5aef236/volumes" Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.626142 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4fe22ec2-af71-47bf-8053-86dee012df91" path="/var/lib/kubelet/pods/4fe22ec2-af71-47bf-8053-86dee012df91/volumes" Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.631598 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="66a36d00-ae7a-4881-b8b0-1cab43a25547" path="/var/lib/kubelet/pods/66a36d00-ae7a-4881-b8b0-1cab43a25547/volumes" Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.632898 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6aaeaa31-51e8-451a-9a40-6880c278ee0e" path="/var/lib/kubelet/pods/6aaeaa31-51e8-451a-9a40-6880c278ee0e/volumes" Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.633547 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7b8d1710-d43e-4f4d-a16b-fb97e8bb7ffa" path="/var/lib/kubelet/pods/7b8d1710-d43e-4f4d-a16b-fb97e8bb7ffa/volumes" Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.636879 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8074b22b-e6be-4d65-a8fd-293434b77010" path="/var/lib/kubelet/pods/8074b22b-e6be-4d65-a8fd-293434b77010/volumes" Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.637584 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="95fbc547-5231-4cbb-ba75-b1766ca8c903" path="/var/lib/kubelet/pods/95fbc547-5231-4cbb-ba75-b1766ca8c903/volumes" Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.639523 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9cc658b3-2ddb-4902-bd24-a0eb9e2ad7c4" path="/var/lib/kubelet/pods/9cc658b3-2ddb-4902-bd24-a0eb9e2ad7c4/volumes" Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.640218 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9f6c4192-92da-4aff-ade3-e502935bc96d" path="/var/lib/kubelet/pods/9f6c4192-92da-4aff-ade3-e502935bc96d/volumes" Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.640875 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a66af596-2f1d-496b-87cc-c67d42f99a56" path="/var/lib/kubelet/pods/a66af596-2f1d-496b-87cc-c67d42f99a56/volumes" Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.647337 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b1e54a34-818b-4918-baeb-586ff225fe1f" path="/var/lib/kubelet/pods/b1e54a34-818b-4918-baeb-586ff225fe1f/volumes" Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.648025 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b3c9bd31-20f5-45ea-ae8d-987a8a70e321" path="/var/lib/kubelet/pods/b3c9bd31-20f5-45ea-ae8d-987a8a70e321/volumes" Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.648537 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b79fb42a-3e70-4daa-b0ea-fe854def5825" path="/var/lib/kubelet/pods/b79fb42a-3e70-4daa-b0ea-fe854def5825/volumes" Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.665351 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.665583 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cc72bb7b-054c-4141-9db7-1bf5a8b716bb" path="/var/lib/kubelet/pods/cc72bb7b-054c-4141-9db7-1bf5a8b716bb/volumes" Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.666918 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cdd71514-e929-4a96-b799-91eecad5ac02" path="/var/lib/kubelet/pods/cdd71514-e929-4a96-b799-91eecad5ac02/volumes" Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.667656 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d804cfef-d66b-4e73-9eaa-32500f685367" path="/var/lib/kubelet/pods/d804cfef-d66b-4e73-9eaa-32500f685367/volumes" Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.669846 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e221e8f5-cc9e-4399-8b97-92ef8104bb70" path="/var/lib/kubelet/pods/e221e8f5-cc9e-4399-8b97-92ef8104bb70/volumes" Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.670636 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ec56e0f2-2aaa-458d-8c02-e3dba504c48a" path="/var/lib/kubelet/pods/ec56e0f2-2aaa-458d-8c02-e3dba504c48a/volumes" Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.671134 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4589cf0-10b5-4bd9-a06d-58481ece189a" path="/var/lib/kubelet/pods/f4589cf0-10b5-4bd9-a06d-58481ece189a/volumes" Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.671665 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fe5cc26d-38d6-43e0-8977-6ff7e991ab6c" path="/var/lib/kubelet/pods/fe5cc26d-38d6-43e0-8977-6ff7e991ab6c/volumes" Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.671201 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/barbican-api-5df68bf4dd-zbx8h" podStartSLOduration=4.671191653 podStartE2EDuration="4.671191653s" podCreationTimestamp="2026-01-20 17:00:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:00:44.557919134 +0000 UTC m=+1138.318257100" watchObservedRunningTime="2026-01-20 17:00:44.671191653 +0000 UTC m=+1138.431529621" Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.676787 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-7e4f-account-create-update-pl5sm"] Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.676814 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-db-create-9zm4f"] Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.676826 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/neutron-db-create-9zm4f"] Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.676840 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-db-create-w6fpd"] Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.676848 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/barbican-db-create-w6fpd"] Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.676857 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/swift-proxy-6b7b5d66dd-8plzj"] Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.676870 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-fbtv5"] Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.676879 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-worker-7649847b9c-9wk9v"] Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.677025 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-worker-7649847b9c-9wk9v" podUID="40234b95-d302-420a-96d7-c56ffe609530" containerName="barbican-worker-log" containerID="cri-o://4a04ce46269bb3a821a0882c3df0c15680043afbc05a402c642893802ec047fb" gracePeriod=30 Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.678250 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-proxy-6b7b5d66dd-8plzj" podUID="1d5c2e68-fb9d-4818-a0c2-27db2516e92b" containerName="proxy-httpd" containerID="cri-o://2c68bcde1b4540d7e10ed3de55e085fa25f7fe350c76cf38edeedca6d5734cb7" gracePeriod=30 Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.678413 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-worker-7649847b9c-9wk9v" podUID="40234b95-d302-420a-96d7-c56ffe609530" containerName="barbican-worker" containerID="cri-o://8645a2a68bc998fad70edddaf73505c46e74320243525ed7f7c2c0c00d201e22" gracePeriod=30 Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.678696 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-proxy-6b7b5d66dd-8plzj" podUID="1d5c2e68-fb9d-4818-a0c2-27db2516e92b" containerName="proxy-server" containerID="cri-o://2c012c9aed3c1cbbfd0f3adb398aa6e942741aa3f3d75ea5d680bf8543a23be2" gracePeriod=30 Jan 20 17:00:44 crc kubenswrapper[4558]: E0120 17:00:44.683552 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/barbican-keystone-listener-config-data: secret "barbican-keystone-listener-config-data" not found Jan 20 17:00:44 crc kubenswrapper[4558]: E0120 17:00:44.683587 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7776f921-0ae7-4a8c-b444-cf5b4b9a4f65-config-data-custom podName:7776f921-0ae7-4a8c-b444-cf5b4b9a4f65 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:45.183576924 +0000 UTC m=+1138.943914891 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-data-custom" (UniqueName: "kubernetes.io/secret/7776f921-0ae7-4a8c-b444-cf5b4b9a4f65-config-data-custom") pod "barbican-keystone-listener-7fc5477d66-pz8q2" (UID: "7776f921-0ae7-4a8c-b444-cf5b4b9a4f65") : secret "barbican-keystone-listener-config-data" not found Jan 20 17:00:44 crc kubenswrapper[4558]: E0120 17:00:44.683633 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/barbican-config-data: secret "barbican-config-data" not found Jan 20 17:00:44 crc kubenswrapper[4558]: E0120 17:00:44.683653 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3735ca3d-3764-4d36-b912-fbf0bfb96dd8-config-data podName:3735ca3d-3764-4d36-b912-fbf0bfb96dd8 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:45.183647306 +0000 UTC m=+1138.943985273 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/secret/3735ca3d-3764-4d36-b912-fbf0bfb96dd8-config-data") pod "barbican-api-5df68bf4dd-zbx8h" (UID: "3735ca3d-3764-4d36-b912-fbf0bfb96dd8") : secret "barbican-config-data" not found Jan 20 17:00:44 crc kubenswrapper[4558]: E0120 17:00:44.683681 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/barbican-config-data: secret "barbican-config-data" not found Jan 20 17:00:44 crc kubenswrapper[4558]: E0120 17:00:44.683698 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7776f921-0ae7-4a8c-b444-cf5b4b9a4f65-config-data podName:7776f921-0ae7-4a8c-b444-cf5b4b9a4f65 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:45.18369196 +0000 UTC m=+1138.944029927 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/secret/7776f921-0ae7-4a8c-b444-cf5b4b9a4f65-config-data") pod "barbican-keystone-listener-7fc5477d66-pz8q2" (UID: "7776f921-0ae7-4a8c-b444-cf5b4b9a4f65") : secret "barbican-config-data" not found Jan 20 17:00:44 crc kubenswrapper[4558]: E0120 17:00:44.683949 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-barbican-internal-svc: secret "cert-barbican-internal-svc" not found Jan 20 17:00:44 crc kubenswrapper[4558]: E0120 17:00:44.683972 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3735ca3d-3764-4d36-b912-fbf0bfb96dd8-internal-tls-certs podName:3735ca3d-3764-4d36-b912-fbf0bfb96dd8 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:45.183964493 +0000 UTC m=+1138.944302460 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "internal-tls-certs" (UniqueName: "kubernetes.io/secret/3735ca3d-3764-4d36-b912-fbf0bfb96dd8-internal-tls-certs") pod "barbican-api-5df68bf4dd-zbx8h" (UID: "3735ca3d-3764-4d36-b912-fbf0bfb96dd8") : secret "cert-barbican-internal-svc" not found Jan 20 17:00:44 crc kubenswrapper[4558]: E0120 17:00:44.684003 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/combined-ca-bundle: secret "combined-ca-bundle" not found Jan 20 17:00:44 crc kubenswrapper[4558]: E0120 17:00:44.684019 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3735ca3d-3764-4d36-b912-fbf0bfb96dd8-combined-ca-bundle podName:3735ca3d-3764-4d36-b912-fbf0bfb96dd8 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:45.184014106 +0000 UTC m=+1138.944352073 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/3735ca3d-3764-4d36-b912-fbf0bfb96dd8-combined-ca-bundle") pod "barbican-api-5df68bf4dd-zbx8h" (UID: "3735ca3d-3764-4d36-b912-fbf0bfb96dd8") : secret "combined-ca-bundle" not found Jan 20 17:00:44 crc kubenswrapper[4558]: E0120 17:00:44.684045 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-barbican-public-svc: secret "cert-barbican-public-svc" not found Jan 20 17:00:44 crc kubenswrapper[4558]: E0120 17:00:44.684061 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3735ca3d-3764-4d36-b912-fbf0bfb96dd8-public-tls-certs podName:3735ca3d-3764-4d36-b912-fbf0bfb96dd8 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:45.184056555 +0000 UTC m=+1138.944394513 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "public-tls-certs" (UniqueName: "kubernetes.io/secret/3735ca3d-3764-4d36-b912-fbf0bfb96dd8-public-tls-certs") pod "barbican-api-5df68bf4dd-zbx8h" (UID: "3735ca3d-3764-4d36-b912-fbf0bfb96dd8") : secret "cert-barbican-public-svc" not found Jan 20 17:00:44 crc kubenswrapper[4558]: E0120 17:00:44.684085 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/combined-ca-bundle: secret "combined-ca-bundle" not found Jan 20 17:00:44 crc kubenswrapper[4558]: E0120 17:00:44.684100 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7776f921-0ae7-4a8c-b444-cf5b4b9a4f65-combined-ca-bundle podName:7776f921-0ae7-4a8c-b444-cf5b4b9a4f65 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:45.18409582 +0000 UTC m=+1138.944433787 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/7776f921-0ae7-4a8c-b444-cf5b4b9a4f65-combined-ca-bundle") pod "barbican-keystone-listener-7fc5477d66-pz8q2" (UID: "7776f921-0ae7-4a8c-b444-cf5b4b9a4f65") : secret "combined-ca-bundle" not found Jan 20 17:00:44 crc kubenswrapper[4558]: E0120 17:00:44.684123 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/barbican-api-config-data: secret "barbican-api-config-data" not found Jan 20 17:00:44 crc kubenswrapper[4558]: E0120 17:00:44.684138 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3735ca3d-3764-4d36-b912-fbf0bfb96dd8-config-data-custom podName:3735ca3d-3764-4d36-b912-fbf0bfb96dd8 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:45.184134162 +0000 UTC m=+1138.944472129 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-data-custom" (UniqueName: "kubernetes.io/secret/3735ca3d-3764-4d36-b912-fbf0bfb96dd8-config-data-custom") pod "barbican-api-5df68bf4dd-zbx8h" (UID: "3735ca3d-3764-4d36-b912-fbf0bfb96dd8") : secret "barbican-api-config-data" not found Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.711938 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-worker-c4f864f89-58gb9"] Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.729999 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-keystone-listener-6b66757dfd-k89tm"] Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.731508 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-keystone-listener-6b66757dfd-k89tm" podUID="69c5c1c6-5c3f-48d4-8d14-c3c7cdcb1915" containerName="barbican-keystone-listener-log" containerID="cri-o://43ce3893218e12bafcf3980d1a94c5b6f79210afef04e341874b8c63d98def1c" gracePeriod=30 Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.731615 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-keystone-listener-6b66757dfd-k89tm" podUID="69c5c1c6-5c3f-48d4-8d14-c3c7cdcb1915" containerName="barbican-keystone-listener" containerID="cri-o://5a4543f4f5068e723c149b86d815e8bf663acc3075125bad09cc80cc4d87503d" gracePeriod=30 Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.770498 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-api-595757ff5d-frx87"] Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.770939 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-api-595757ff5d-frx87" podUID="3c6de94f-c605-43d9-97b5-ccf91e49d1fb" containerName="barbican-api-log" containerID="cri-o://065a0343a2fa20d49288d6aad3e2c8001219424d475f4cf3fa0b18d5f777af97" gracePeriod=30 Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.771109 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-api-595757ff5d-frx87" podUID="3c6de94f-c605-43d9-97b5-ccf91e49d1fb" containerName="barbican-api" containerID="cri-o://ff9f3ae585b003190c9a4e57172ef28f205a59206653a665e67865ef3bac9d47" gracePeriod=30 Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.785616 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/54c4bfe3-42bf-46ea-8c23-0621f7b65686-ovn-rundir\") pod \"54c4bfe3-42bf-46ea-8c23-0621f7b65686\" (UID: \"54c4bfe3-42bf-46ea-8c23-0621f7b65686\") " Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.787147 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/54c4bfe3-42bf-46ea-8c23-0621f7b65686-ovn-rundir" (OuterVolumeSpecName: "ovn-rundir") pod "54c4bfe3-42bf-46ea-8c23-0621f7b65686" (UID: "54c4bfe3-42bf-46ea-8c23-0621f7b65686"). InnerVolumeSpecName "ovn-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.794794 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" podUID="bf355276-9e62-474e-bfb1-616dde5b83bc" containerName="rabbitmq" containerID="cri-o://1e8c1d79d17c69967f38cfb7502a531e6019712fea6395ed1952fa83bb5dfd3a" gracePeriod=604800 Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.796620 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/barbican-keystone-listener-7fc5477d66-pz8q2" podStartSLOduration=4.796601258 podStartE2EDuration="4.796601258s" podCreationTimestamp="2026-01-20 17:00:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:00:44.577550731 +0000 UTC m=+1138.337888697" watchObservedRunningTime="2026-01-20 17:00:44.796601258 +0000 UTC m=+1138.556939225" Jan 20 17:00:44 crc kubenswrapper[4558]: E0120 17:00:44.827993 4558 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 20 17:00:44 crc kubenswrapper[4558]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[/bin/sh -c #!/bin/bash Jan 20 17:00:44 crc kubenswrapper[4558]: Jan 20 17:00:44 crc kubenswrapper[4558]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 20 17:00:44 crc kubenswrapper[4558]: Jan 20 17:00:44 crc kubenswrapper[4558]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 20 17:00:44 crc kubenswrapper[4558]: Jan 20 17:00:44 crc kubenswrapper[4558]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 20 17:00:44 crc kubenswrapper[4558]: Jan 20 17:00:44 crc kubenswrapper[4558]: if [ -n "keystone" ]; then Jan 20 17:00:44 crc kubenswrapper[4558]: GRANT_DATABASE="keystone" Jan 20 17:00:44 crc kubenswrapper[4558]: else Jan 20 17:00:44 crc kubenswrapper[4558]: GRANT_DATABASE="*" Jan 20 17:00:44 crc kubenswrapper[4558]: fi Jan 20 17:00:44 crc kubenswrapper[4558]: Jan 20 17:00:44 crc kubenswrapper[4558]: # going for maximum compatibility here: Jan 20 17:00:44 crc kubenswrapper[4558]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 20 17:00:44 crc kubenswrapper[4558]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 20 17:00:44 crc kubenswrapper[4558]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 20 17:00:44 crc kubenswrapper[4558]: # support updates Jan 20 17:00:44 crc kubenswrapper[4558]: Jan 20 17:00:44 crc kubenswrapper[4558]: $MYSQL_CMD < logger="UnhandledError" Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.829573 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-keystone-listener-7fc5477d66-pz8q2"] Jan 20 17:00:44 crc kubenswrapper[4558]: E0120 17:00:44.829641 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"keystone-db-secret\\\" not found\"" pod="openstack-kuttl-tests/keystone-7e4f-account-create-update-pl5sm" podUID="bf9a27d0-1f64-4f43-a2a7-ce0902c6cd25" Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.843657 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-api-5df68bf4dd-zbx8h"] Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.847070 4558 scope.go:117] "RemoveContainer" containerID="de6e1b519940940afb11dd7e03f1334e5085e524686926bc4bad044033126f7f" Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.854636 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-db-sync-wpq8b"] Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.885066 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/keystone-db-sync-wpq8b"] Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.885113 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/openstack-galera-0"] Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.888352 4558 reconciler_common.go:293] "Volume detached for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/54c4bfe3-42bf-46ea-8c23-0621f7b65686-ovn-rundir\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:44 crc kubenswrapper[4558]: E0120 17:00:44.888420 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/openstack-cell1-scripts: configmap "openstack-cell1-scripts" not found Jan 20 17:00:44 crc kubenswrapper[4558]: E0120 17:00:44.888458 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/e353328b-4fe7-4637-8174-a3b227ad9761-operator-scripts podName:e353328b-4fe7-4637-8174-a3b227ad9761 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:45.888446604 +0000 UTC m=+1139.648784572 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/e353328b-4fe7-4637-8174-a3b227ad9761-operator-scripts") pod "root-account-create-update-fbtv5" (UID: "e353328b-4fe7-4637-8174-a3b227ad9761") : configmap "openstack-cell1-scripts" not found Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.903446 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-bootstrap-vkskq"] Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.911455 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/keystone-bootstrap-vkskq"] Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.922868 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/openstack-galera-0"] Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.922910 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/openstack-cell1-galera-0"] Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.923641 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/openstack-cell1-galera-0"] Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.934261 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/openstack-galera-0"] Jan 20 17:00:44 crc kubenswrapper[4558]: E0120 17:00:44.934624 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0e28ce69-8b5d-4e5e-b8bb-860e6e3745ae" containerName="mysql-bootstrap" Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.934636 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="0e28ce69-8b5d-4e5e-b8bb-860e6e3745ae" containerName="mysql-bootstrap" Jan 20 17:00:44 crc kubenswrapper[4558]: E0120 17:00:44.934654 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0e28ce69-8b5d-4e5e-b8bb-860e6e3745ae" containerName="galera" Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.934660 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="0e28ce69-8b5d-4e5e-b8bb-860e6e3745ae" containerName="galera" Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.934800 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="0e28ce69-8b5d-4e5e-b8bb-860e6e3745ae" containerName="galera" Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.935597 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.939662 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-config-data" Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.940248 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/openstack-galera-0"] Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.940353 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-galera-openstack-svc" Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.945350 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/memcached-0"] Jan 20 17:00:44 crc kubenswrapper[4558]: I0120 17:00:44.969847 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/memcached-0"] Jan 20 17:00:44 crc kubenswrapper[4558]: E0120 17:00:44.991667 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/keystone: secret "keystone" not found Jan 20 17:00:44 crc kubenswrapper[4558]: E0120 17:00:44.991721 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4969c1af-53c0-435a-bd06-6bd493c81c80-credential-keys podName:4969c1af-53c0-435a-bd06-6bd493c81c80 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:45.991708499 +0000 UTC m=+1139.752046456 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "credential-keys" (UniqueName: "kubernetes.io/secret/4969c1af-53c0-435a-bd06-6bd493c81c80-credential-keys") pod "keystone-55ff946595-f8gjf" (UID: "4969c1af-53c0-435a-bd06-6bd493c81c80") : secret "keystone" not found Jan 20 17:00:44 crc kubenswrapper[4558]: E0120 17:00:44.991763 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/barbican-config-data: secret "barbican-config-data" not found Jan 20 17:00:44 crc kubenswrapper[4558]: E0120 17:00:44.991782 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/de7f5467-1e83-42f0-86bb-ade85deec8f3-config-data podName:de7f5467-1e83-42f0-86bb-ade85deec8f3 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:45.991775656 +0000 UTC m=+1139.752113623 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/secret/de7f5467-1e83-42f0-86bb-ade85deec8f3-config-data") pod "barbican-worker-c4f864f89-58gb9" (UID: "de7f5467-1e83-42f0-86bb-ade85deec8f3") : secret "barbican-config-data" not found Jan 20 17:00:44 crc kubenswrapper[4558]: E0120 17:00:44.991810 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-keystone-internal-svc: secret "cert-keystone-internal-svc" not found Jan 20 17:00:44 crc kubenswrapper[4558]: E0120 17:00:44.991826 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4969c1af-53c0-435a-bd06-6bd493c81c80-internal-tls-certs podName:4969c1af-53c0-435a-bd06-6bd493c81c80 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:45.991821712 +0000 UTC m=+1139.752159680 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "internal-tls-certs" (UniqueName: "kubernetes.io/secret/4969c1af-53c0-435a-bd06-6bd493c81c80-internal-tls-certs") pod "keystone-55ff946595-f8gjf" (UID: "4969c1af-53c0-435a-bd06-6bd493c81c80") : secret "cert-keystone-internal-svc" not found Jan 20 17:00:44 crc kubenswrapper[4558]: E0120 17:00:44.991853 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/keystone-scripts: secret "keystone-scripts" not found Jan 20 17:00:44 crc kubenswrapper[4558]: E0120 17:00:44.991869 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4969c1af-53c0-435a-bd06-6bd493c81c80-scripts podName:4969c1af-53c0-435a-bd06-6bd493c81c80 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:45.991864453 +0000 UTC m=+1139.752202419 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "scripts" (UniqueName: "kubernetes.io/secret/4969c1af-53c0-435a-bd06-6bd493c81c80-scripts") pod "keystone-55ff946595-f8gjf" (UID: "4969c1af-53c0-435a-bd06-6bd493c81c80") : secret "keystone-scripts" not found Jan 20 17:00:44 crc kubenswrapper[4558]: E0120 17:00:44.991893 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-keystone-public-svc: secret "cert-keystone-public-svc" not found Jan 20 17:00:44 crc kubenswrapper[4558]: E0120 17:00:44.991908 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4969c1af-53c0-435a-bd06-6bd493c81c80-public-tls-certs podName:4969c1af-53c0-435a-bd06-6bd493c81c80 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:45.991904036 +0000 UTC m=+1139.752242004 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "public-tls-certs" (UniqueName: "kubernetes.io/secret/4969c1af-53c0-435a-bd06-6bd493c81c80-public-tls-certs") pod "keystone-55ff946595-f8gjf" (UID: "4969c1af-53c0-435a-bd06-6bd493c81c80") : secret "cert-keystone-public-svc" not found Jan 20 17:00:44 crc kubenswrapper[4558]: E0120 17:00:44.991934 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/barbican-worker-config-data: secret "barbican-worker-config-data" not found Jan 20 17:00:44 crc kubenswrapper[4558]: E0120 17:00:44.991949 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/de7f5467-1e83-42f0-86bb-ade85deec8f3-config-data-custom podName:de7f5467-1e83-42f0-86bb-ade85deec8f3 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:45.991944834 +0000 UTC m=+1139.752282801 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config-data-custom" (UniqueName: "kubernetes.io/secret/de7f5467-1e83-42f0-86bb-ade85deec8f3-config-data-custom") pod "barbican-worker-c4f864f89-58gb9" (UID: "de7f5467-1e83-42f0-86bb-ade85deec8f3") : secret "barbican-worker-config-data" not found Jan 20 17:00:44 crc kubenswrapper[4558]: E0120 17:00:44.991972 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Jan 20 17:00:44 crc kubenswrapper[4558]: E0120 17:00:44.991988 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/3f868eba-f4e1-4e32-b271-391cf271fe97-config-data podName:3f868eba-f4e1-4e32-b271-391cf271fe97 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:46.991983487 +0000 UTC m=+1140.752321454 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/3f868eba-f4e1-4e32-b271-391cf271fe97-config-data") pod "rabbitmq-server-0" (UID: "3f868eba-f4e1-4e32-b271-391cf271fe97") : configmap "rabbitmq-config-data" not found Jan 20 17:00:44 crc kubenswrapper[4558]: E0120 17:00:44.992013 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/keystone-config-data: secret "keystone-config-data" not found Jan 20 17:00:44 crc kubenswrapper[4558]: E0120 17:00:44.992027 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4969c1af-53c0-435a-bd06-6bd493c81c80-config-data podName:4969c1af-53c0-435a-bd06-6bd493c81c80 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:45.992022971 +0000 UTC m=+1139.752360928 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/secret/4969c1af-53c0-435a-bd06-6bd493c81c80-config-data") pod "keystone-55ff946595-f8gjf" (UID: "4969c1af-53c0-435a-bd06-6bd493c81c80") : secret "keystone-config-data" not found Jan 20 17:00:44 crc kubenswrapper[4558]: E0120 17:00:44.992049 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/combined-ca-bundle: secret "combined-ca-bundle" not found Jan 20 17:00:44 crc kubenswrapper[4558]: E0120 17:00:44.992063 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4969c1af-53c0-435a-bd06-6bd493c81c80-combined-ca-bundle podName:4969c1af-53c0-435a-bd06-6bd493c81c80 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:45.992058427 +0000 UTC m=+1139.752396395 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/4969c1af-53c0-435a-bd06-6bd493c81c80-combined-ca-bundle") pod "keystone-55ff946595-f8gjf" (UID: "4969c1af-53c0-435a-bd06-6bd493c81c80") : secret "combined-ca-bundle" not found Jan 20 17:00:44 crc kubenswrapper[4558]: E0120 17:00:44.992085 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/keystone: secret "keystone" not found Jan 20 17:00:44 crc kubenswrapper[4558]: E0120 17:00:44.992100 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4969c1af-53c0-435a-bd06-6bd493c81c80-fernet-keys podName:4969c1af-53c0-435a-bd06-6bd493c81c80 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:45.992096228 +0000 UTC m=+1139.752434195 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "fernet-keys" (UniqueName: "kubernetes.io/secret/4969c1af-53c0-435a-bd06-6bd493c81c80-fernet-keys") pod "keystone-55ff946595-f8gjf" (UID: "4969c1af-53c0-435a-bd06-6bd493c81c80") : secret "keystone" not found Jan 20 17:00:44 crc kubenswrapper[4558]: E0120 17:00:44.992123 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/combined-ca-bundle: secret "combined-ca-bundle" not found Jan 20 17:00:44 crc kubenswrapper[4558]: E0120 17:00:44.992137 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/de7f5467-1e83-42f0-86bb-ade85deec8f3-combined-ca-bundle podName:de7f5467-1e83-42f0-86bb-ade85deec8f3 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:45.992133539 +0000 UTC m=+1139.752471506 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/de7f5467-1e83-42f0-86bb-ade85deec8f3-combined-ca-bundle") pod "barbican-worker-c4f864f89-58gb9" (UID: "de7f5467-1e83-42f0-86bb-ade85deec8f3") : secret "combined-ca-bundle" not found Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.003222 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/rabbitmq-server-0"] Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.032682 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-6494546496-58v7b"] Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.032737 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-55ff946595-f8gjf"] Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.032975 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/keystone-6494546496-58v7b" podUID="43781c23-b22a-4449-8306-67efbe8dd6fc" containerName="keystone-api" containerID="cri-o://c6dd483fc55626bbad92f374c9cd3e8b6e558a86ecdf6e7a87f558af6aed4a91" gracePeriod=30 Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.034429 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-7e4f-account-create-update-pl5sm"] Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.039423 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/openstack-galera-0"] Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.045412 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-db-create-2jqq4"] Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.059673 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/keystone-db-create-2jqq4"] Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.069804 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/rabbitmq-server-0" podUID="3f868eba-f4e1-4e32-b271-391cf271fe97" containerName="rabbitmq" containerID="cri-o://7186b3f7bece934410de1e6b9f5050c2ca0be8663df3206ff0ed31c8bdab8497" gracePeriod=604800 Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.085405 4558 scope.go:117] "RemoveContainer" containerID="00c9d686e4cf00913af81033ddbff9abfc51f8f3c4f1da58ce8a3bff41d506fa" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.100906 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d3643a65-70b3-488e-ad03-8b7ad6ae4539-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"d3643a65-70b3-488e-ad03-8b7ad6ae4539\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.101003 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage13-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage13-crc\") pod \"openstack-galera-0\" (UID: \"d3643a65-70b3-488e-ad03-8b7ad6ae4539\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.101021 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/d3643a65-70b3-488e-ad03-8b7ad6ae4539-config-data-default\") pod \"openstack-galera-0\" (UID: \"d3643a65-70b3-488e-ad03-8b7ad6ae4539\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.101094 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/d3643a65-70b3-488e-ad03-8b7ad6ae4539-kolla-config\") pod \"openstack-galera-0\" (UID: \"d3643a65-70b3-488e-ad03-8b7ad6ae4539\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.101125 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7lzzs\" (UniqueName: \"kubernetes.io/projected/d3643a65-70b3-488e-ad03-8b7ad6ae4539-kube-api-access-7lzzs\") pod \"openstack-galera-0\" (UID: \"d3643a65-70b3-488e-ad03-8b7ad6ae4539\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.101213 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d3643a65-70b3-488e-ad03-8b7ad6ae4539-operator-scripts\") pod \"openstack-galera-0\" (UID: \"d3643a65-70b3-488e-ad03-8b7ad6ae4539\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.101320 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/d3643a65-70b3-488e-ad03-8b7ad6ae4539-config-data-generated\") pod \"openstack-galera-0\" (UID: \"d3643a65-70b3-488e-ad03-8b7ad6ae4539\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.101341 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/d3643a65-70b3-488e-ad03-8b7ad6ae4539-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"d3643a65-70b3-488e-ad03-8b7ad6ae4539\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:00:45 crc kubenswrapper[4558]: E0120 17:00:45.116786 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[combined-ca-bundle config-data-default config-data-generated galera-tls-certs kolla-config kube-api-access-7lzzs mysql-db operator-scripts], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack-kuttl-tests/openstack-galera-0" podUID="d3643a65-70b3-488e-ad03-8b7ad6ae4539" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.132071 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.153015 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.194142 4558 scope.go:117] "RemoveContainer" containerID="b8b4710c6079f7f8ec88f1289e78374ee84e488da8b6978ef790e87233e7f5fb" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.202312 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b2ffa52-4d19-46f6-aea5-62fc758def73-combined-ca-bundle\") pod \"3b2ffa52-4d19-46f6-aea5-62fc758def73\" (UID: \"3b2ffa52-4d19-46f6-aea5-62fc758def73\") " Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.202352 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/3b2ffa52-4d19-46f6-aea5-62fc758def73-kube-state-metrics-tls-certs\") pod \"3b2ffa52-4d19-46f6-aea5-62fc758def73\" (UID: \"3b2ffa52-4d19-46f6-aea5-62fc758def73\") " Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.202402 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cae28711-fbe7-40cc-8f31-4b6332ab5378-internal-tls-certs\") pod \"cae28711-fbe7-40cc-8f31-4b6332ab5378\" (UID: \"cae28711-fbe7-40cc-8f31-4b6332ab5378\") " Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.202447 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cae28711-fbe7-40cc-8f31-4b6332ab5378-config-data\") pod \"cae28711-fbe7-40cc-8f31-4b6332ab5378\" (UID: \"cae28711-fbe7-40cc-8f31-4b6332ab5378\") " Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.202483 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/cae28711-fbe7-40cc-8f31-4b6332ab5378-public-tls-certs\") pod \"cae28711-fbe7-40cc-8f31-4b6332ab5378\" (UID: \"cae28711-fbe7-40cc-8f31-4b6332ab5378\") " Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.202506 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cae28711-fbe7-40cc-8f31-4b6332ab5378-combined-ca-bundle\") pod \"cae28711-fbe7-40cc-8f31-4b6332ab5378\" (UID: \"cae28711-fbe7-40cc-8f31-4b6332ab5378\") " Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.202529 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cgjvx\" (UniqueName: \"kubernetes.io/projected/3b2ffa52-4d19-46f6-aea5-62fc758def73-kube-api-access-cgjvx\") pod \"3b2ffa52-4d19-46f6-aea5-62fc758def73\" (UID: \"3b2ffa52-4d19-46f6-aea5-62fc758def73\") " Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.202574 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/3b2ffa52-4d19-46f6-aea5-62fc758def73-kube-state-metrics-tls-config\") pod \"3b2ffa52-4d19-46f6-aea5-62fc758def73\" (UID: \"3b2ffa52-4d19-46f6-aea5-62fc758def73\") " Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.202598 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z8rg2\" (UniqueName: \"kubernetes.io/projected/cae28711-fbe7-40cc-8f31-4b6332ab5378-kube-api-access-z8rg2\") pod \"cae28711-fbe7-40cc-8f31-4b6332ab5378\" (UID: \"cae28711-fbe7-40cc-8f31-4b6332ab5378\") " Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.202631 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cae28711-fbe7-40cc-8f31-4b6332ab5378-logs\") pod \"cae28711-fbe7-40cc-8f31-4b6332ab5378\" (UID: \"cae28711-fbe7-40cc-8f31-4b6332ab5378\") " Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.202901 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/d3643a65-70b3-488e-ad03-8b7ad6ae4539-config-data-generated\") pod \"openstack-galera-0\" (UID: \"d3643a65-70b3-488e-ad03-8b7ad6ae4539\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.202937 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/d3643a65-70b3-488e-ad03-8b7ad6ae4539-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"d3643a65-70b3-488e-ad03-8b7ad6ae4539\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.202975 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d3643a65-70b3-488e-ad03-8b7ad6ae4539-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"d3643a65-70b3-488e-ad03-8b7ad6ae4539\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.203066 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage13-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage13-crc\") pod \"openstack-galera-0\" (UID: \"d3643a65-70b3-488e-ad03-8b7ad6ae4539\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.203080 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/d3643a65-70b3-488e-ad03-8b7ad6ae4539-config-data-default\") pod \"openstack-galera-0\" (UID: \"d3643a65-70b3-488e-ad03-8b7ad6ae4539\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.203149 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/d3643a65-70b3-488e-ad03-8b7ad6ae4539-kolla-config\") pod \"openstack-galera-0\" (UID: \"d3643a65-70b3-488e-ad03-8b7ad6ae4539\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.203271 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7lzzs\" (UniqueName: \"kubernetes.io/projected/d3643a65-70b3-488e-ad03-8b7ad6ae4539-kube-api-access-7lzzs\") pod \"openstack-galera-0\" (UID: \"d3643a65-70b3-488e-ad03-8b7ad6ae4539\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.203350 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d3643a65-70b3-488e-ad03-8b7ad6ae4539-operator-scripts\") pod \"openstack-galera-0\" (UID: \"d3643a65-70b3-488e-ad03-8b7ad6ae4539\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:00:45 crc kubenswrapper[4558]: E0120 17:00:45.203444 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/openstack-scripts: configmap "openstack-scripts" not found Jan 20 17:00:45 crc kubenswrapper[4558]: E0120 17:00:45.203480 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/d3643a65-70b3-488e-ad03-8b7ad6ae4539-operator-scripts podName:d3643a65-70b3-488e-ad03-8b7ad6ae4539 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:45.703469558 +0000 UTC m=+1139.463807526 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/d3643a65-70b3-488e-ad03-8b7ad6ae4539-operator-scripts") pod "openstack-galera-0" (UID: "d3643a65-70b3-488e-ad03-8b7ad6ae4539") : configmap "openstack-scripts" not found Jan 20 17:00:45 crc kubenswrapper[4558]: E0120 17:00:45.207720 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/combined-ca-bundle: secret "combined-ca-bundle" not found Jan 20 17:00:45 crc kubenswrapper[4558]: E0120 17:00:45.207932 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d3643a65-70b3-488e-ad03-8b7ad6ae4539-combined-ca-bundle podName:d3643a65-70b3-488e-ad03-8b7ad6ae4539 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:45.70791623 +0000 UTC m=+1139.468254198 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/d3643a65-70b3-488e-ad03-8b7ad6ae4539-combined-ca-bundle") pod "openstack-galera-0" (UID: "d3643a65-70b3-488e-ad03-8b7ad6ae4539") : secret "combined-ca-bundle" not found Jan 20 17:00:45 crc kubenswrapper[4558]: E0120 17:00:45.210281 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/openstack-config-data: configmap "openstack-config-data" not found Jan 20 17:00:45 crc kubenswrapper[4558]: E0120 17:00:45.210349 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/d3643a65-70b3-488e-ad03-8b7ad6ae4539-config-data-default podName:d3643a65-70b3-488e-ad03-8b7ad6ae4539 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:45.710332966 +0000 UTC m=+1139.470670933 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-data-default" (UniqueName: "kubernetes.io/configmap/d3643a65-70b3-488e-ad03-8b7ad6ae4539-config-data-default") pod "openstack-galera-0" (UID: "d3643a65-70b3-488e-ad03-8b7ad6ae4539") : configmap "openstack-config-data" not found Jan 20 17:00:45 crc kubenswrapper[4558]: E0120 17:00:45.210409 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/barbican-config-data: secret "barbican-config-data" not found Jan 20 17:00:45 crc kubenswrapper[4558]: E0120 17:00:45.210430 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3735ca3d-3764-4d36-b912-fbf0bfb96dd8-config-data podName:3735ca3d-3764-4d36-b912-fbf0bfb96dd8 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:46.210423285 +0000 UTC m=+1139.970761252 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/secret/3735ca3d-3764-4d36-b912-fbf0bfb96dd8-config-data") pod "barbican-api-5df68bf4dd-zbx8h" (UID: "3735ca3d-3764-4d36-b912-fbf0bfb96dd8") : secret "barbican-config-data" not found Jan 20 17:00:45 crc kubenswrapper[4558]: E0120 17:00:45.210459 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/barbican-config-data: secret "barbican-config-data" not found Jan 20 17:00:45 crc kubenswrapper[4558]: E0120 17:00:45.210477 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7776f921-0ae7-4a8c-b444-cf5b4b9a4f65-config-data podName:7776f921-0ae7-4a8c-b444-cf5b4b9a4f65 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:46.210472367 +0000 UTC m=+1139.970810335 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/secret/7776f921-0ae7-4a8c-b444-cf5b4b9a4f65-config-data") pod "barbican-keystone-listener-7fc5477d66-pz8q2" (UID: "7776f921-0ae7-4a8c-b444-cf5b4b9a4f65") : secret "barbican-config-data" not found Jan 20 17:00:45 crc kubenswrapper[4558]: E0120 17:00:45.210503 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-barbican-internal-svc: secret "cert-barbican-internal-svc" not found Jan 20 17:00:45 crc kubenswrapper[4558]: E0120 17:00:45.210519 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3735ca3d-3764-4d36-b912-fbf0bfb96dd8-internal-tls-certs podName:3735ca3d-3764-4d36-b912-fbf0bfb96dd8 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:46.210514117 +0000 UTC m=+1139.970852084 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "internal-tls-certs" (UniqueName: "kubernetes.io/secret/3735ca3d-3764-4d36-b912-fbf0bfb96dd8-internal-tls-certs") pod "barbican-api-5df68bf4dd-zbx8h" (UID: "3735ca3d-3764-4d36-b912-fbf0bfb96dd8") : secret "cert-barbican-internal-svc" not found Jan 20 17:00:45 crc kubenswrapper[4558]: E0120 17:00:45.210554 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/combined-ca-bundle: secret "combined-ca-bundle" not found Jan 20 17:00:45 crc kubenswrapper[4558]: E0120 17:00:45.210569 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3735ca3d-3764-4d36-b912-fbf0bfb96dd8-combined-ca-bundle podName:3735ca3d-3764-4d36-b912-fbf0bfb96dd8 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:46.210564361 +0000 UTC m=+1139.970902328 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/3735ca3d-3764-4d36-b912-fbf0bfb96dd8-combined-ca-bundle") pod "barbican-api-5df68bf4dd-zbx8h" (UID: "3735ca3d-3764-4d36-b912-fbf0bfb96dd8") : secret "combined-ca-bundle" not found Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.210708 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage13-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage13-crc\") pod \"openstack-galera-0\" (UID: \"d3643a65-70b3-488e-ad03-8b7ad6ae4539\") device mount path \"/mnt/openstack/pv13\"" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:00:45 crc kubenswrapper[4558]: E0120 17:00:45.212759 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/barbican-keystone-listener-config-data: secret "barbican-keystone-listener-config-data" not found Jan 20 17:00:45 crc kubenswrapper[4558]: E0120 17:00:45.212799 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7776f921-0ae7-4a8c-b444-cf5b4b9a4f65-config-data-custom podName:7776f921-0ae7-4a8c-b444-cf5b4b9a4f65 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:46.212789245 +0000 UTC m=+1139.973127212 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config-data-custom" (UniqueName: "kubernetes.io/secret/7776f921-0ae7-4a8c-b444-cf5b4b9a4f65-config-data-custom") pod "barbican-keystone-listener-7fc5477d66-pz8q2" (UID: "7776f921-0ae7-4a8c-b444-cf5b4b9a4f65") : secret "barbican-keystone-listener-config-data" not found Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.213569 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cae28711-fbe7-40cc-8f31-4b6332ab5378-logs" (OuterVolumeSpecName: "logs") pod "cae28711-fbe7-40cc-8f31-4b6332ab5378" (UID: "cae28711-fbe7-40cc-8f31-4b6332ab5378"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.214252 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/d3643a65-70b3-488e-ad03-8b7ad6ae4539-config-data-generated\") pod \"openstack-galera-0\" (UID: \"d3643a65-70b3-488e-ad03-8b7ad6ae4539\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:00:45 crc kubenswrapper[4558]: E0120 17:00:45.214413 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-galera-openstack-svc: secret "cert-galera-openstack-svc" not found Jan 20 17:00:45 crc kubenswrapper[4558]: E0120 17:00:45.214459 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-barbican-public-svc: secret "cert-barbican-public-svc" not found Jan 20 17:00:45 crc kubenswrapper[4558]: E0120 17:00:45.214492 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d3643a65-70b3-488e-ad03-8b7ad6ae4539-galera-tls-certs podName:d3643a65-70b3-488e-ad03-8b7ad6ae4539 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:45.714453704 +0000 UTC m=+1139.474791672 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "galera-tls-certs" (UniqueName: "kubernetes.io/secret/d3643a65-70b3-488e-ad03-8b7ad6ae4539-galera-tls-certs") pod "openstack-galera-0" (UID: "d3643a65-70b3-488e-ad03-8b7ad6ae4539") : secret "cert-galera-openstack-svc" not found Jan 20 17:00:45 crc kubenswrapper[4558]: E0120 17:00:45.214503 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/openstack-config-data: configmap "openstack-config-data" not found Jan 20 17:00:45 crc kubenswrapper[4558]: E0120 17:00:45.214509 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3735ca3d-3764-4d36-b912-fbf0bfb96dd8-public-tls-certs podName:3735ca3d-3764-4d36-b912-fbf0bfb96dd8 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:46.214502596 +0000 UTC m=+1139.974840554 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "public-tls-certs" (UniqueName: "kubernetes.io/secret/3735ca3d-3764-4d36-b912-fbf0bfb96dd8-public-tls-certs") pod "barbican-api-5df68bf4dd-zbx8h" (UID: "3735ca3d-3764-4d36-b912-fbf0bfb96dd8") : secret "cert-barbican-public-svc" not found Jan 20 17:00:45 crc kubenswrapper[4558]: E0120 17:00:45.214528 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/d3643a65-70b3-488e-ad03-8b7ad6ae4539-kolla-config podName:d3643a65-70b3-488e-ad03-8b7ad6ae4539 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:45.714515761 +0000 UTC m=+1139.474853728 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kolla-config" (UniqueName: "kubernetes.io/configmap/d3643a65-70b3-488e-ad03-8b7ad6ae4539-kolla-config") pod "openstack-galera-0" (UID: "d3643a65-70b3-488e-ad03-8b7ad6ae4539") : configmap "openstack-config-data" not found Jan 20 17:00:45 crc kubenswrapper[4558]: E0120 17:00:45.218493 4558 projected.go:194] Error preparing data for projected volume kube-api-access-7lzzs for pod openstack-kuttl-tests/openstack-galera-0: failed to fetch token: serviceaccounts "galera-openstack" not found Jan 20 17:00:45 crc kubenswrapper[4558]: E0120 17:00:45.218570 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/d3643a65-70b3-488e-ad03-8b7ad6ae4539-kube-api-access-7lzzs podName:d3643a65-70b3-488e-ad03-8b7ad6ae4539 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:45.718553234 +0000 UTC m=+1139.478891201 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-7lzzs" (UniqueName: "kubernetes.io/projected/d3643a65-70b3-488e-ad03-8b7ad6ae4539-kube-api-access-7lzzs") pod "openstack-galera-0" (UID: "d3643a65-70b3-488e-ad03-8b7ad6ae4539") : failed to fetch token: serviceaccounts "galera-openstack" not found Jan 20 17:00:45 crc kubenswrapper[4558]: E0120 17:00:45.218651 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/barbican-api-config-data: secret "barbican-api-config-data" not found Jan 20 17:00:45 crc kubenswrapper[4558]: E0120 17:00:45.218675 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3735ca3d-3764-4d36-b912-fbf0bfb96dd8-config-data-custom podName:3735ca3d-3764-4d36-b912-fbf0bfb96dd8 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:46.218667709 +0000 UTC m=+1139.979005676 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config-data-custom" (UniqueName: "kubernetes.io/secret/3735ca3d-3764-4d36-b912-fbf0bfb96dd8-config-data-custom") pod "barbican-api-5df68bf4dd-zbx8h" (UID: "3735ca3d-3764-4d36-b912-fbf0bfb96dd8") : secret "barbican-api-config-data" not found Jan 20 17:00:45 crc kubenswrapper[4558]: E0120 17:00:45.218708 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/combined-ca-bundle: secret "combined-ca-bundle" not found Jan 20 17:00:45 crc kubenswrapper[4558]: E0120 17:00:45.218727 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7776f921-0ae7-4a8c-b444-cf5b4b9a4f65-combined-ca-bundle podName:7776f921-0ae7-4a8c-b444-cf5b4b9a4f65 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:46.2187213 +0000 UTC m=+1139.979059267 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/7776f921-0ae7-4a8c-b444-cf5b4b9a4f65-combined-ca-bundle") pod "barbican-keystone-listener-7fc5477d66-pz8q2" (UID: "7776f921-0ae7-4a8c-b444-cf5b4b9a4f65") : secret "combined-ca-bundle" not found Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.233918 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3b2ffa52-4d19-46f6-aea5-62fc758def73-kube-api-access-cgjvx" (OuterVolumeSpecName: "kube-api-access-cgjvx") pod "3b2ffa52-4d19-46f6-aea5-62fc758def73" (UID: "3b2ffa52-4d19-46f6-aea5-62fc758def73"). InnerVolumeSpecName "kube-api-access-cgjvx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.238416 4558 scope.go:117] "RemoveContainer" containerID="dca4ae7dca5eb96c8fcc474ac58d3706e17fe078d95aa681217db1b6da9140a3" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.241588 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cae28711-fbe7-40cc-8f31-4b6332ab5378-kube-api-access-z8rg2" (OuterVolumeSpecName: "kube-api-access-z8rg2") pod "cae28711-fbe7-40cc-8f31-4b6332ab5378" (UID: "cae28711-fbe7-40cc-8f31-4b6332ab5378"). InnerVolumeSpecName "kube-api-access-z8rg2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.255458 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage13-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage13-crc\") pod \"openstack-galera-0\" (UID: \"d3643a65-70b3-488e-ad03-8b7ad6ae4539\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.292224 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cae28711-fbe7-40cc-8f31-4b6332ab5378-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "cae28711-fbe7-40cc-8f31-4b6332ab5378" (UID: "cae28711-fbe7-40cc-8f31-4b6332ab5378"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:00:45 crc kubenswrapper[4558]: E0120 17:00:45.305489 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-neutron-public-svc: secret "cert-neutron-public-svc" not found Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.305517 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cae28711-fbe7-40cc-8f31-4b6332ab5378-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:45 crc kubenswrapper[4558]: E0120 17:00:45.305544 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7d0b0e33-9ce7-44d9-8b09-6008715fe1a0-public-tls-certs podName:7d0b0e33-9ce7-44d9-8b09-6008715fe1a0 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:47.305530646 +0000 UTC m=+1141.065868614 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "public-tls-certs" (UniqueName: "kubernetes.io/secret/7d0b0e33-9ce7-44d9-8b09-6008715fe1a0-public-tls-certs") pod "neutron-5d557df858-frznf" (UID: "7d0b0e33-9ce7-44d9-8b09-6008715fe1a0") : secret "cert-neutron-public-svc" not found Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.305569 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cae28711-fbe7-40cc-8f31-4b6332ab5378-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.305582 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cgjvx\" (UniqueName: \"kubernetes.io/projected/3b2ffa52-4d19-46f6-aea5-62fc758def73-kube-api-access-cgjvx\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.305594 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z8rg2\" (UniqueName: \"kubernetes.io/projected/cae28711-fbe7-40cc-8f31-4b6332ab5378-kube-api-access-z8rg2\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:45 crc kubenswrapper[4558]: E0120 17:00:45.305597 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/neutron-httpd-config: secret "neutron-httpd-config" not found Jan 20 17:00:45 crc kubenswrapper[4558]: E0120 17:00:45.305632 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-neutron-ovndbs: secret "cert-neutron-ovndbs" not found Jan 20 17:00:45 crc kubenswrapper[4558]: E0120 17:00:45.305641 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7d0b0e33-9ce7-44d9-8b09-6008715fe1a0-httpd-config podName:7d0b0e33-9ce7-44d9-8b09-6008715fe1a0 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:47.305627147 +0000 UTC m=+1141.065965114 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "httpd-config" (UniqueName: "kubernetes.io/secret/7d0b0e33-9ce7-44d9-8b09-6008715fe1a0-httpd-config") pod "neutron-5d557df858-frznf" (UID: "7d0b0e33-9ce7-44d9-8b09-6008715fe1a0") : secret "neutron-httpd-config" not found Jan 20 17:00:45 crc kubenswrapper[4558]: E0120 17:00:45.305654 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7d0b0e33-9ce7-44d9-8b09-6008715fe1a0-ovndb-tls-certs podName:7d0b0e33-9ce7-44d9-8b09-6008715fe1a0 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:47.305647345 +0000 UTC m=+1141.065985312 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "ovndb-tls-certs" (UniqueName: "kubernetes.io/secret/7d0b0e33-9ce7-44d9-8b09-6008715fe1a0-ovndb-tls-certs") pod "neutron-5d557df858-frznf" (UID: "7d0b0e33-9ce7-44d9-8b09-6008715fe1a0") : secret "cert-neutron-ovndbs" not found Jan 20 17:00:45 crc kubenswrapper[4558]: E0120 17:00:45.305673 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/neutron-config: secret "neutron-config" not found Jan 20 17:00:45 crc kubenswrapper[4558]: E0120 17:00:45.305682 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-neutron-internal-svc: secret "cert-neutron-internal-svc" not found Jan 20 17:00:45 crc kubenswrapper[4558]: E0120 17:00:45.305691 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7d0b0e33-9ce7-44d9-8b09-6008715fe1a0-config podName:7d0b0e33-9ce7-44d9-8b09-6008715fe1a0 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:47.305685147 +0000 UTC m=+1141.066023114 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "config" (UniqueName: "kubernetes.io/secret/7d0b0e33-9ce7-44d9-8b09-6008715fe1a0-config") pod "neutron-5d557df858-frznf" (UID: "7d0b0e33-9ce7-44d9-8b09-6008715fe1a0") : secret "neutron-config" not found Jan 20 17:00:45 crc kubenswrapper[4558]: E0120 17:00:45.305703 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7d0b0e33-9ce7-44d9-8b09-6008715fe1a0-internal-tls-certs podName:7d0b0e33-9ce7-44d9-8b09-6008715fe1a0 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:47.305698161 +0000 UTC m=+1141.066036129 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "internal-tls-certs" (UniqueName: "kubernetes.io/secret/7d0b0e33-9ce7-44d9-8b09-6008715fe1a0-internal-tls-certs") pod "neutron-5d557df858-frznf" (UID: "7d0b0e33-9ce7-44d9-8b09-6008715fe1a0") : secret "cert-neutron-internal-svc" not found Jan 20 17:00:45 crc kubenswrapper[4558]: E0120 17:00:45.305728 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/combined-ca-bundle: secret "combined-ca-bundle" not found Jan 20 17:00:45 crc kubenswrapper[4558]: E0120 17:00:45.305744 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7d0b0e33-9ce7-44d9-8b09-6008715fe1a0-combined-ca-bundle podName:7d0b0e33-9ce7-44d9-8b09-6008715fe1a0 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:47.30573983 +0000 UTC m=+1141.066077797 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/7d0b0e33-9ce7-44d9-8b09-6008715fe1a0-combined-ca-bundle") pod "neutron-5d557df858-frznf" (UID: "7d0b0e33-9ce7-44d9-8b09-6008715fe1a0") : secret "combined-ca-bundle" not found Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.334492 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.352002 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3b2ffa52-4d19-46f6-aea5-62fc758def73-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3b2ffa52-4d19-46f6-aea5-62fc758def73" (UID: "3b2ffa52-4d19-46f6-aea5-62fc758def73"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.361603 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.366881 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cae28711-fbe7-40cc-8f31-4b6332ab5378-config-data" (OuterVolumeSpecName: "config-data") pod "cae28711-fbe7-40cc-8f31-4b6332ab5378" (UID: "cae28711-fbe7-40cc-8f31-4b6332ab5378"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.366908 4558 scope.go:117] "RemoveContainer" containerID="b8b4710c6079f7f8ec88f1289e78374ee84e488da8b6978ef790e87233e7f5fb" Jan 20 17:00:45 crc kubenswrapper[4558]: E0120 17:00:45.382988 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b8b4710c6079f7f8ec88f1289e78374ee84e488da8b6978ef790e87233e7f5fb\": container with ID starting with b8b4710c6079f7f8ec88f1289e78374ee84e488da8b6978ef790e87233e7f5fb not found: ID does not exist" containerID="b8b4710c6079f7f8ec88f1289e78374ee84e488da8b6978ef790e87233e7f5fb" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.383235 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b8b4710c6079f7f8ec88f1289e78374ee84e488da8b6978ef790e87233e7f5fb"} err="failed to get container status \"b8b4710c6079f7f8ec88f1289e78374ee84e488da8b6978ef790e87233e7f5fb\": rpc error: code = NotFound desc = could not find container \"b8b4710c6079f7f8ec88f1289e78374ee84e488da8b6978ef790e87233e7f5fb\": container with ID starting with b8b4710c6079f7f8ec88f1289e78374ee84e488da8b6978ef790e87233e7f5fb not found: ID does not exist" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.383324 4558 scope.go:117] "RemoveContainer" containerID="dca4ae7dca5eb96c8fcc474ac58d3706e17fe078d95aa681217db1b6da9140a3" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.383536 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3b2ffa52-4d19-46f6-aea5-62fc758def73-kube-state-metrics-tls-config" (OuterVolumeSpecName: "kube-state-metrics-tls-config") pod "3b2ffa52-4d19-46f6-aea5-62fc758def73" (UID: "3b2ffa52-4d19-46f6-aea5-62fc758def73"). InnerVolumeSpecName "kube-state-metrics-tls-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.385780 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cae28711-fbe7-40cc-8f31-4b6332ab5378-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "cae28711-fbe7-40cc-8f31-4b6332ab5378" (UID: "cae28711-fbe7-40cc-8f31-4b6332ab5378"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:00:45 crc kubenswrapper[4558]: E0120 17:00:45.385827 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dca4ae7dca5eb96c8fcc474ac58d3706e17fe078d95aa681217db1b6da9140a3\": container with ID starting with dca4ae7dca5eb96c8fcc474ac58d3706e17fe078d95aa681217db1b6da9140a3 not found: ID does not exist" containerID="dca4ae7dca5eb96c8fcc474ac58d3706e17fe078d95aa681217db1b6da9140a3" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.385860 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dca4ae7dca5eb96c8fcc474ac58d3706e17fe078d95aa681217db1b6da9140a3"} err="failed to get container status \"dca4ae7dca5eb96c8fcc474ac58d3706e17fe078d95aa681217db1b6da9140a3\": rpc error: code = NotFound desc = could not find container \"dca4ae7dca5eb96c8fcc474ac58d3706e17fe078d95aa681217db1b6da9140a3\": container with ID starting with dca4ae7dca5eb96c8fcc474ac58d3706e17fe078d95aa681217db1b6da9140a3 not found: ID does not exist" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.385884 4558 scope.go:117] "RemoveContainer" containerID="93541e9fd292f8f2cfa2ba3ec3bd317e9352f88de020c5959cf2c84ba5b458ba" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.393356 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cae28711-fbe7-40cc-8f31-4b6332ab5378-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "cae28711-fbe7-40cc-8f31-4b6332ab5378" (UID: "cae28711-fbe7-40cc-8f31-4b6332ab5378"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.404877 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.406915 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9wtwl\" (UniqueName: \"kubernetes.io/projected/b28e6999-784e-4577-88bb-db648f7a3cbc-kube-api-access-9wtwl\") pod \"b28e6999-784e-4577-88bb-db648f7a3cbc\" (UID: \"b28e6999-784e-4577-88bb-db648f7a3cbc\") " Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.406947 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"b28e6999-784e-4577-88bb-db648f7a3cbc\" (UID: \"b28e6999-784e-4577-88bb-db648f7a3cbc\") " Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.406993 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/764c7a15-6a1a-470c-9d0b-a63ed418cc09-internal-tls-certs\") pod \"764c7a15-6a1a-470c-9d0b-a63ed418cc09\" (UID: \"764c7a15-6a1a-470c-9d0b-a63ed418cc09\") " Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.407028 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b28e6999-784e-4577-88bb-db648f7a3cbc-httpd-run\") pod \"b28e6999-784e-4577-88bb-db648f7a3cbc\" (UID: \"b28e6999-784e-4577-88bb-db648f7a3cbc\") " Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.407050 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/764c7a15-6a1a-470c-9d0b-a63ed418cc09-logs\") pod \"764c7a15-6a1a-470c-9d0b-a63ed418cc09\" (UID: \"764c7a15-6a1a-470c-9d0b-a63ed418cc09\") " Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.407085 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b28e6999-784e-4577-88bb-db648f7a3cbc-logs\") pod \"b28e6999-784e-4577-88bb-db648f7a3cbc\" (UID: \"b28e6999-784e-4577-88bb-db648f7a3cbc\") " Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.407104 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b28e6999-784e-4577-88bb-db648f7a3cbc-internal-tls-certs\") pod \"b28e6999-784e-4577-88bb-db648f7a3cbc\" (UID: \"b28e6999-784e-4577-88bb-db648f7a3cbc\") " Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.407137 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/764c7a15-6a1a-470c-9d0b-a63ed418cc09-config-data-custom\") pod \"764c7a15-6a1a-470c-9d0b-a63ed418cc09\" (UID: \"764c7a15-6a1a-470c-9d0b-a63ed418cc09\") " Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.407153 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/764c7a15-6a1a-470c-9d0b-a63ed418cc09-etc-machine-id\") pod \"764c7a15-6a1a-470c-9d0b-a63ed418cc09\" (UID: \"764c7a15-6a1a-470c-9d0b-a63ed418cc09\") " Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.407184 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/764c7a15-6a1a-470c-9d0b-a63ed418cc09-public-tls-certs\") pod \"764c7a15-6a1a-470c-9d0b-a63ed418cc09\" (UID: \"764c7a15-6a1a-470c-9d0b-a63ed418cc09\") " Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.407220 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b28e6999-784e-4577-88bb-db648f7a3cbc-scripts\") pod \"b28e6999-784e-4577-88bb-db648f7a3cbc\" (UID: \"b28e6999-784e-4577-88bb-db648f7a3cbc\") " Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.407253 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/764c7a15-6a1a-470c-9d0b-a63ed418cc09-config-data\") pod \"764c7a15-6a1a-470c-9d0b-a63ed418cc09\" (UID: \"764c7a15-6a1a-470c-9d0b-a63ed418cc09\") " Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.407313 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b28e6999-784e-4577-88bb-db648f7a3cbc-combined-ca-bundle\") pod \"b28e6999-784e-4577-88bb-db648f7a3cbc\" (UID: \"b28e6999-784e-4577-88bb-db648f7a3cbc\") " Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.407328 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/764c7a15-6a1a-470c-9d0b-a63ed418cc09-combined-ca-bundle\") pod \"764c7a15-6a1a-470c-9d0b-a63ed418cc09\" (UID: \"764c7a15-6a1a-470c-9d0b-a63ed418cc09\") " Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.407356 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9vwjp\" (UniqueName: \"kubernetes.io/projected/764c7a15-6a1a-470c-9d0b-a63ed418cc09-kube-api-access-9vwjp\") pod \"764c7a15-6a1a-470c-9d0b-a63ed418cc09\" (UID: \"764c7a15-6a1a-470c-9d0b-a63ed418cc09\") " Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.407416 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b28e6999-784e-4577-88bb-db648f7a3cbc-config-data\") pod \"b28e6999-784e-4577-88bb-db648f7a3cbc\" (UID: \"b28e6999-784e-4577-88bb-db648f7a3cbc\") " Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.407441 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/764c7a15-6a1a-470c-9d0b-a63ed418cc09-scripts\") pod \"764c7a15-6a1a-470c-9d0b-a63ed418cc09\" (UID: \"764c7a15-6a1a-470c-9d0b-a63ed418cc09\") " Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.407955 4558 reconciler_common.go:293] "Volume detached for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/3b2ffa52-4d19-46f6-aea5-62fc758def73-kube-state-metrics-tls-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.407969 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b2ffa52-4d19-46f6-aea5-62fc758def73-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.407979 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cae28711-fbe7-40cc-8f31-4b6332ab5378-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.407989 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cae28711-fbe7-40cc-8f31-4b6332ab5378-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.407996 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/cae28711-fbe7-40cc-8f31-4b6332ab5378-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.410275 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b28e6999-784e-4577-88bb-db648f7a3cbc-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "b28e6999-784e-4577-88bb-db648f7a3cbc" (UID: "b28e6999-784e-4577-88bb-db648f7a3cbc"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.410279 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/764c7a15-6a1a-470c-9d0b-a63ed418cc09-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "764c7a15-6a1a-470c-9d0b-a63ed418cc09" (UID: "764c7a15-6a1a-470c-9d0b-a63ed418cc09"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.410349 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/764c7a15-6a1a-470c-9d0b-a63ed418cc09-logs" (OuterVolumeSpecName: "logs") pod "764c7a15-6a1a-470c-9d0b-a63ed418cc09" (UID: "764c7a15-6a1a-470c-9d0b-a63ed418cc09"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.410487 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b28e6999-784e-4577-88bb-db648f7a3cbc-logs" (OuterVolumeSpecName: "logs") pod "b28e6999-784e-4577-88bb-db648f7a3cbc" (UID: "b28e6999-784e-4577-88bb-db648f7a3cbc"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.420449 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage07-crc" (OuterVolumeSpecName: "glance") pod "b28e6999-784e-4577-88bb-db648f7a3cbc" (UID: "b28e6999-784e-4577-88bb-db648f7a3cbc"). InnerVolumeSpecName "local-storage07-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.421351 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/764c7a15-6a1a-470c-9d0b-a63ed418cc09-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "764c7a15-6a1a-470c-9d0b-a63ed418cc09" (UID: "764c7a15-6a1a-470c-9d0b-a63ed418cc09"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.421903 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/764c7a15-6a1a-470c-9d0b-a63ed418cc09-scripts" (OuterVolumeSpecName: "scripts") pod "764c7a15-6a1a-470c-9d0b-a63ed418cc09" (UID: "764c7a15-6a1a-470c-9d0b-a63ed418cc09"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.434345 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b28e6999-784e-4577-88bb-db648f7a3cbc-scripts" (OuterVolumeSpecName: "scripts") pod "b28e6999-784e-4577-88bb-db648f7a3cbc" (UID: "b28e6999-784e-4577-88bb-db648f7a3cbc"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.434521 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/764c7a15-6a1a-470c-9d0b-a63ed418cc09-kube-api-access-9vwjp" (OuterVolumeSpecName: "kube-api-access-9vwjp") pod "764c7a15-6a1a-470c-9d0b-a63ed418cc09" (UID: "764c7a15-6a1a-470c-9d0b-a63ed418cc09"). InnerVolumeSpecName "kube-api-access-9vwjp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.434717 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b28e6999-784e-4577-88bb-db648f7a3cbc-kube-api-access-9wtwl" (OuterVolumeSpecName: "kube-api-access-9wtwl") pod "b28e6999-784e-4577-88bb-db648f7a3cbc" (UID: "b28e6999-784e-4577-88bb-db648f7a3cbc"). InnerVolumeSpecName "kube-api-access-9wtwl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.441741 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3b2ffa52-4d19-46f6-aea5-62fc758def73-kube-state-metrics-tls-certs" (OuterVolumeSpecName: "kube-state-metrics-tls-certs") pod "3b2ffa52-4d19-46f6-aea5-62fc758def73" (UID: "3b2ffa52-4d19-46f6-aea5-62fc758def73"). InnerVolumeSpecName "kube-state-metrics-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.477586 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b28e6999-784e-4577-88bb-db648f7a3cbc-config-data" (OuterVolumeSpecName: "config-data") pod "b28e6999-784e-4577-88bb-db648f7a3cbc" (UID: "b28e6999-784e-4577-88bb-db648f7a3cbc"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.478290 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.478910 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/764c7a15-6a1a-470c-9d0b-a63ed418cc09-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "764c7a15-6a1a-470c-9d0b-a63ed418cc09" (UID: "764c7a15-6a1a-470c-9d0b-a63ed418cc09"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.479374 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.508519 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/e554db35-2dba-4138-9ca0-bd1371a9c63d-nova-metadata-tls-certs\") pod \"e554db35-2dba-4138-9ca0-bd1371a9c63d\" (UID: \"e554db35-2dba-4138-9ca0-bd1371a9c63d\") " Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.508622 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e554db35-2dba-4138-9ca0-bd1371a9c63d-combined-ca-bundle\") pod \"e554db35-2dba-4138-9ca0-bd1371a9c63d\" (UID: \"e554db35-2dba-4138-9ca0-bd1371a9c63d\") " Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.508650 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxx67\" (UniqueName: \"kubernetes.io/projected/e554db35-2dba-4138-9ca0-bd1371a9c63d-kube-api-access-wxx67\") pod \"e554db35-2dba-4138-9ca0-bd1371a9c63d\" (UID: \"e554db35-2dba-4138-9ca0-bd1371a9c63d\") " Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.508666 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e554db35-2dba-4138-9ca0-bd1371a9c63d-config-data\") pod \"e554db35-2dba-4138-9ca0-bd1371a9c63d\" (UID: \"e554db35-2dba-4138-9ca0-bd1371a9c63d\") " Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.508689 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e554db35-2dba-4138-9ca0-bd1371a9c63d-logs\") pod \"e554db35-2dba-4138-9ca0-bd1371a9c63d\" (UID: \"e554db35-2dba-4138-9ca0-bd1371a9c63d\") " Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.508834 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/764c7a15-6a1a-470c-9d0b-a63ed418cc09-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "764c7a15-6a1a-470c-9d0b-a63ed418cc09" (UID: "764c7a15-6a1a-470c-9d0b-a63ed418cc09"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.509482 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e554db35-2dba-4138-9ca0-bd1371a9c63d-logs" (OuterVolumeSpecName: "logs") pod "e554db35-2dba-4138-9ca0-bd1371a9c63d" (UID: "e554db35-2dba-4138-9ca0-bd1371a9c63d"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.509513 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b28e6999-784e-4577-88bb-db648f7a3cbc-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.509645 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/764c7a15-6a1a-470c-9d0b-a63ed418cc09-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.509736 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9wtwl\" (UniqueName: \"kubernetes.io/projected/b28e6999-784e-4577-88bb-db648f7a3cbc-kube-api-access-9wtwl\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.509807 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" " Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.509861 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/764c7a15-6a1a-470c-9d0b-a63ed418cc09-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.509924 4558 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b28e6999-784e-4577-88bb-db648f7a3cbc-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.509975 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/764c7a15-6a1a-470c-9d0b-a63ed418cc09-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.510024 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b28e6999-784e-4577-88bb-db648f7a3cbc-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.510073 4558 reconciler_common.go:293] "Volume detached for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/3b2ffa52-4d19-46f6-aea5-62fc758def73-kube-state-metrics-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.510122 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/764c7a15-6a1a-470c-9d0b-a63ed418cc09-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.510192 4558 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/764c7a15-6a1a-470c-9d0b-a63ed418cc09-etc-machine-id\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.510266 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/764c7a15-6a1a-470c-9d0b-a63ed418cc09-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.510320 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b28e6999-784e-4577-88bb-db648f7a3cbc-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.510370 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9vwjp\" (UniqueName: \"kubernetes.io/projected/764c7a15-6a1a-470c-9d0b-a63ed418cc09-kube-api-access-9vwjp\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.515454 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e554db35-2dba-4138-9ca0-bd1371a9c63d-kube-api-access-wxx67" (OuterVolumeSpecName: "kube-api-access-wxx67") pod "e554db35-2dba-4138-9ca0-bd1371a9c63d" (UID: "e554db35-2dba-4138-9ca0-bd1371a9c63d"). InnerVolumeSpecName "kube-api-access-wxx67". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.516982 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b28e6999-784e-4577-88bb-db648f7a3cbc-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "b28e6999-784e-4577-88bb-db648f7a3cbc" (UID: "b28e6999-784e-4577-88bb-db648f7a3cbc"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.529099 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage07-crc" (UniqueName: "kubernetes.io/local-volume/local-storage07-crc") on node "crc" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.544598 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/764c7a15-6a1a-470c-9d0b-a63ed418cc09-config-data" (OuterVolumeSpecName: "config-data") pod "764c7a15-6a1a-470c-9d0b-a63ed418cc09" (UID: "764c7a15-6a1a-470c-9d0b-a63ed418cc09"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.561362 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b28e6999-784e-4577-88bb-db648f7a3cbc-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b28e6999-784e-4577-88bb-db648f7a3cbc" (UID: "b28e6999-784e-4577-88bb-db648f7a3cbc"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.562313 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e554db35-2dba-4138-9ca0-bd1371a9c63d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e554db35-2dba-4138-9ca0-bd1371a9c63d" (UID: "e554db35-2dba-4138-9ca0-bd1371a9c63d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.566085 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e554db35-2dba-4138-9ca0-bd1371a9c63d-config-data" (OuterVolumeSpecName: "config-data") pod "e554db35-2dba-4138-9ca0-bd1371a9c63d" (UID: "e554db35-2dba-4138-9ca0-bd1371a9c63d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.572325 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/764c7a15-6a1a-470c-9d0b-a63ed418cc09-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "764c7a15-6a1a-470c-9d0b-a63ed418cc09" (UID: "764c7a15-6a1a-470c-9d0b-a63ed418cc09"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.575448 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-d54787f8d-7fznp" event={"ID":"c268a2f5-7c67-4935-8f3e-bdd83aeccc95","Type":"ContainerStarted","Data":"174b23feee097674daeefb60dbe7c17cfa25832ffd4e0ad08f3add5f527ebf7e"} Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.575663 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/placement-d54787f8d-7fznp" podUID="c268a2f5-7c67-4935-8f3e-bdd83aeccc95" containerName="placement-log" containerID="cri-o://7711caea9ce1243c202dbf4adf45769a91fe36f8d08e7c2d792cbee835637b2d" gracePeriod=30 Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.575688 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/placement-d54787f8d-7fznp" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.575718 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/placement-d54787f8d-7fznp" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.575740 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/placement-d54787f8d-7fznp" podUID="c268a2f5-7c67-4935-8f3e-bdd83aeccc95" containerName="placement-api" containerID="cri-o://174b23feee097674daeefb60dbe7c17cfa25832ffd4e0ad08f3add5f527ebf7e" gracePeriod=30 Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.586588 4558 generic.go:334] "Generic (PLEG): container finished" podID="cae28711-fbe7-40cc-8f31-4b6332ab5378" containerID="27c4f1ab95cdf957befc95b02a160445223047218baee3603004129258c0b5fa" exitCode=0 Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.586752 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"cae28711-fbe7-40cc-8f31-4b6332ab5378","Type":"ContainerDied","Data":"27c4f1ab95cdf957befc95b02a160445223047218baee3603004129258c0b5fa"} Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.586796 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"cae28711-fbe7-40cc-8f31-4b6332ab5378","Type":"ContainerDied","Data":"e3192ae96299f5cf335275f46b8831871adafba93bf06e759065461f515313f9"} Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.586815 4558 scope.go:117] "RemoveContainer" containerID="27c4f1ab95cdf957befc95b02a160445223047218baee3603004129258c0b5fa" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.586892 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.600672 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/placement-d54787f8d-7fznp" podStartSLOduration=5.600655593 podStartE2EDuration="5.600655593s" podCreationTimestamp="2026-01-20 17:00:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:00:45.598491032 +0000 UTC m=+1139.358828999" watchObservedRunningTime="2026-01-20 17:00:45.600655593 +0000 UTC m=+1139.360993560" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.600969 4558 generic.go:334] "Generic (PLEG): container finished" podID="7d0b0e33-9ce7-44d9-8b09-6008715fe1a0" containerID="0db5ea38beca13c9805cb3405d85b10e954c953b65c96cec72dc60b73ae7e610" exitCode=0 Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.601035 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-5d557df858-frznf" event={"ID":"7d0b0e33-9ce7-44d9-8b09-6008715fe1a0","Type":"ContainerDied","Data":"0db5ea38beca13c9805cb3405d85b10e954c953b65c96cec72dc60b73ae7e610"} Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.604239 4558 generic.go:334] "Generic (PLEG): container finished" podID="d2ccd35d-37c4-450d-b04c-ac505e35b0e8" containerID="3a7a952277efc163959dfa821edbe7eed459482ba7b2f738ddd5297a1de3b321" exitCode=0 Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.604289 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"d2ccd35d-37c4-450d-b04c-ac505e35b0e8","Type":"ContainerDied","Data":"3a7a952277efc163959dfa821edbe7eed459482ba7b2f738ddd5297a1de3b321"} Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.604315 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"d2ccd35d-37c4-450d-b04c-ac505e35b0e8","Type":"ContainerDied","Data":"aebe50fb6ffccea5e397bc2849dcc3242a96e322c460d57c3ee743b4a1c87e39"} Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.604445 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.610693 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d2ccd35d-37c4-450d-b04c-ac505e35b0e8-scripts\") pod \"d2ccd35d-37c4-450d-b04c-ac505e35b0e8\" (UID: \"d2ccd35d-37c4-450d-b04c-ac505e35b0e8\") " Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.610720 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d2ccd35d-37c4-450d-b04c-ac505e35b0e8-httpd-run\") pod \"d2ccd35d-37c4-450d-b04c-ac505e35b0e8\" (UID: \"d2ccd35d-37c4-450d-b04c-ac505e35b0e8\") " Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.610748 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d2ccd35d-37c4-450d-b04c-ac505e35b0e8-combined-ca-bundle\") pod \"d2ccd35d-37c4-450d-b04c-ac505e35b0e8\" (UID: \"d2ccd35d-37c4-450d-b04c-ac505e35b0e8\") " Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.610830 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b8911a4f-a706-4956-9028-138c018a92ba-config-data-custom\") pod \"b8911a4f-a706-4956-9028-138c018a92ba\" (UID: \"b8911a4f-a706-4956-9028-138c018a92ba\") " Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.610877 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b8911a4f-a706-4956-9028-138c018a92ba-etc-machine-id\") pod \"b8911a4f-a706-4956-9028-138c018a92ba\" (UID: \"b8911a4f-a706-4956-9028-138c018a92ba\") " Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.610906 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d2ccd35d-37c4-450d-b04c-ac505e35b0e8-config-data\") pod \"d2ccd35d-37c4-450d-b04c-ac505e35b0e8\" (UID: \"d2ccd35d-37c4-450d-b04c-ac505e35b0e8\") " Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.610946 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"d2ccd35d-37c4-450d-b04c-ac505e35b0e8\" (UID: \"d2ccd35d-37c4-450d-b04c-ac505e35b0e8\") " Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.611054 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b8911a4f-a706-4956-9028-138c018a92ba-config-data\") pod \"b8911a4f-a706-4956-9028-138c018a92ba\" (UID: \"b8911a4f-a706-4956-9028-138c018a92ba\") " Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.611129 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-drvw9\" (UniqueName: \"kubernetes.io/projected/d2ccd35d-37c4-450d-b04c-ac505e35b0e8-kube-api-access-drvw9\") pod \"d2ccd35d-37c4-450d-b04c-ac505e35b0e8\" (UID: \"d2ccd35d-37c4-450d-b04c-ac505e35b0e8\") " Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.611237 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d2ccd35d-37c4-450d-b04c-ac505e35b0e8-public-tls-certs\") pod \"d2ccd35d-37c4-450d-b04c-ac505e35b0e8\" (UID: \"d2ccd35d-37c4-450d-b04c-ac505e35b0e8\") " Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.611259 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5l9bc\" (UniqueName: \"kubernetes.io/projected/b8911a4f-a706-4956-9028-138c018a92ba-kube-api-access-5l9bc\") pod \"b8911a4f-a706-4956-9028-138c018a92ba\" (UID: \"b8911a4f-a706-4956-9028-138c018a92ba\") " Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.611305 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b8911a4f-a706-4956-9028-138c018a92ba-scripts\") pod \"b8911a4f-a706-4956-9028-138c018a92ba\" (UID: \"b8911a4f-a706-4956-9028-138c018a92ba\") " Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.611366 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d2ccd35d-37c4-450d-b04c-ac505e35b0e8-logs\") pod \"d2ccd35d-37c4-450d-b04c-ac505e35b0e8\" (UID: \"d2ccd35d-37c4-450d-b04c-ac505e35b0e8\") " Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.611392 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8911a4f-a706-4956-9028-138c018a92ba-combined-ca-bundle\") pod \"b8911a4f-a706-4956-9028-138c018a92ba\" (UID: \"b8911a4f-a706-4956-9028-138c018a92ba\") " Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.612037 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/764c7a15-6a1a-470c-9d0b-a63ed418cc09-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.612055 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b28e6999-784e-4577-88bb-db648f7a3cbc-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.612064 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/764c7a15-6a1a-470c-9d0b-a63ed418cc09-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.612073 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e554db35-2dba-4138-9ca0-bd1371a9c63d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.612082 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxx67\" (UniqueName: \"kubernetes.io/projected/e554db35-2dba-4138-9ca0-bd1371a9c63d-kube-api-access-wxx67\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.612090 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e554db35-2dba-4138-9ca0-bd1371a9c63d-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.612114 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e554db35-2dba-4138-9ca0-bd1371a9c63d-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.612123 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.612130 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b28e6999-784e-4577-88bb-db648f7a3cbc-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.612811 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d2ccd35d-37c4-450d-b04c-ac505e35b0e8-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "d2ccd35d-37c4-450d-b04c-ac505e35b0e8" (UID: "d2ccd35d-37c4-450d-b04c-ac505e35b0e8"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.618869 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage01-crc" (OuterVolumeSpecName: "glance") pod "d2ccd35d-37c4-450d-b04c-ac505e35b0e8" (UID: "d2ccd35d-37c4-450d-b04c-ac505e35b0e8"). InnerVolumeSpecName "local-storage01-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.620572 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b8911a4f-a706-4956-9028-138c018a92ba-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "b8911a4f-a706-4956-9028-138c018a92ba" (UID: "b8911a4f-a706-4956-9028-138c018a92ba"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.620587 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d2ccd35d-37c4-450d-b04c-ac505e35b0e8-logs" (OuterVolumeSpecName: "logs") pod "d2ccd35d-37c4-450d-b04c-ac505e35b0e8" (UID: "d2ccd35d-37c4-450d-b04c-ac505e35b0e8"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.622728 4558 generic.go:334] "Generic (PLEG): container finished" podID="10c92abf-cd48-4659-8595-ce9610c0fe2e" containerID="6c101cbe30eb5d950364b9539489f846cc6d1f7298614409d0709726ebba0b06" exitCode=0 Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.622855 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"10c92abf-cd48-4659-8595-ce9610c0fe2e","Type":"ContainerDied","Data":"6c101cbe30eb5d950364b9539489f846cc6d1f7298614409d0709726ebba0b06"} Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.625335 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d2ccd35d-37c4-450d-b04c-ac505e35b0e8-kube-api-access-drvw9" (OuterVolumeSpecName: "kube-api-access-drvw9") pod "d2ccd35d-37c4-450d-b04c-ac505e35b0e8" (UID: "d2ccd35d-37c4-450d-b04c-ac505e35b0e8"). InnerVolumeSpecName "kube-api-access-drvw9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.626586 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.626753 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.626787 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/kube-state-metrics-0" event={"ID":"3b2ffa52-4d19-46f6-aea5-62fc758def73","Type":"ContainerDied","Data":"f5a8cebf2ed809f975d647e2856e22eeb5999054069b00b7875007c34dce0cfb"} Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.626850 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d2ccd35d-37c4-450d-b04c-ac505e35b0e8-scripts" (OuterVolumeSpecName: "scripts") pod "d2ccd35d-37c4-450d-b04c-ac505e35b0e8" (UID: "d2ccd35d-37c4-450d-b04c-ac505e35b0e8"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.631047 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.633795 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b8911a4f-a706-4956-9028-138c018a92ba-scripts" (OuterVolumeSpecName: "scripts") pod "b8911a4f-a706-4956-9028-138c018a92ba" (UID: "b8911a4f-a706-4956-9028-138c018a92ba"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.634501 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-7e4f-account-create-update-pl5sm" event={"ID":"bf9a27d0-1f64-4f43-a2a7-ce0902c6cd25","Type":"ContainerStarted","Data":"f9b251e73fdf0e6718ee26d92886dcdbac0ca8bedbf7a3dda606ca3e8f2b2aa0"} Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.634560 4558 scope.go:117] "RemoveContainer" containerID="7f9dbce9ba44238985c3e3d3fb5a42a8258f76871b2f32a165f9c1ead22275de" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.634955 4558 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="openstack-kuttl-tests/keystone-7e4f-account-create-update-pl5sm" secret="" err="secret \"galera-openstack-dockercfg-7wr65\" not found" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.638512 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b8911a4f-a706-4956-9028-138c018a92ba-kube-api-access-5l9bc" (OuterVolumeSpecName: "kube-api-access-5l9bc") pod "b8911a4f-a706-4956-9028-138c018a92ba" (UID: "b8911a4f-a706-4956-9028-138c018a92ba"). InnerVolumeSpecName "kube-api-access-5l9bc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.638517 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b8911a4f-a706-4956-9028-138c018a92ba-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "b8911a4f-a706-4956-9028-138c018a92ba" (UID: "b8911a4f-a706-4956-9028-138c018a92ba"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:00:45 crc kubenswrapper[4558]: E0120 17:00:45.643471 4558 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 20 17:00:45 crc kubenswrapper[4558]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[/bin/sh -c #!/bin/bash Jan 20 17:00:45 crc kubenswrapper[4558]: Jan 20 17:00:45 crc kubenswrapper[4558]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 20 17:00:45 crc kubenswrapper[4558]: Jan 20 17:00:45 crc kubenswrapper[4558]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 20 17:00:45 crc kubenswrapper[4558]: Jan 20 17:00:45 crc kubenswrapper[4558]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 20 17:00:45 crc kubenswrapper[4558]: Jan 20 17:00:45 crc kubenswrapper[4558]: if [ -n "keystone" ]; then Jan 20 17:00:45 crc kubenswrapper[4558]: GRANT_DATABASE="keystone" Jan 20 17:00:45 crc kubenswrapper[4558]: else Jan 20 17:00:45 crc kubenswrapper[4558]: GRANT_DATABASE="*" Jan 20 17:00:45 crc kubenswrapper[4558]: fi Jan 20 17:00:45 crc kubenswrapper[4558]: Jan 20 17:00:45 crc kubenswrapper[4558]: # going for maximum compatibility here: Jan 20 17:00:45 crc kubenswrapper[4558]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 20 17:00:45 crc kubenswrapper[4558]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 20 17:00:45 crc kubenswrapper[4558]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 20 17:00:45 crc kubenswrapper[4558]: # support updates Jan 20 17:00:45 crc kubenswrapper[4558]: Jan 20 17:00:45 crc kubenswrapper[4558]: $MYSQL_CMD < logger="UnhandledError" Jan 20 17:00:45 crc kubenswrapper[4558]: E0120 17:00:45.644861 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"keystone-db-secret\\\" not found\"" pod="openstack-kuttl-tests/keystone-7e4f-account-create-update-pl5sm" podUID="bf9a27d0-1f64-4f43-a2a7-ce0902c6cd25" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.648096 4558 generic.go:334] "Generic (PLEG): container finished" podID="40234b95-d302-420a-96d7-c56ffe609530" containerID="4a04ce46269bb3a821a0882c3df0c15680043afbc05a402c642893802ec047fb" exitCode=143 Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.648183 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-worker-7649847b9c-9wk9v" event={"ID":"40234b95-d302-420a-96d7-c56ffe609530","Type":"ContainerDied","Data":"4a04ce46269bb3a821a0882c3df0c15680043afbc05a402c642893802ec047fb"} Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.650904 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.656630 4558 generic.go:334] "Generic (PLEG): container finished" podID="b8911a4f-a706-4956-9028-138c018a92ba" containerID="8c01eafed347e73a4f6bbd3d6df2573984ce045e60275b9c6bccf3c8df4b43d7" exitCode=0 Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.656734 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"b8911a4f-a706-4956-9028-138c018a92ba","Type":"ContainerDied","Data":"8c01eafed347e73a4f6bbd3d6df2573984ce045e60275b9c6bccf3c8df4b43d7"} Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.656815 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"b8911a4f-a706-4956-9028-138c018a92ba","Type":"ContainerDied","Data":"bb97393e531ae7f10b20f798c00eb1b76a194278be5ba6288ed018bb78edba2b"} Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.656905 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.657755 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-proxy-6b7b5d66dd-8plzj" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.659796 4558 generic.go:334] "Generic (PLEG): container finished" podID="764c7a15-6a1a-470c-9d0b-a63ed418cc09" containerID="44cfb2c4bfc8454ac6259ca35fbc70464e44213a38cdb484b02b66bc424c90d5" exitCode=0 Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.659846 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"764c7a15-6a1a-470c-9d0b-a63ed418cc09","Type":"ContainerDied","Data":"44cfb2c4bfc8454ac6259ca35fbc70464e44213a38cdb484b02b66bc424c90d5"} Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.659869 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"764c7a15-6a1a-470c-9d0b-a63ed418cc09","Type":"ContainerDied","Data":"b1cb1f1f4309dc51f85279c592856913ccf83c72a5d397ad1932b9a9a1ae4bf2"} Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.659904 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.667674 4558 generic.go:334] "Generic (PLEG): container finished" podID="1d5c2e68-fb9d-4818-a0c2-27db2516e92b" containerID="2c012c9aed3c1cbbfd0f3adb398aa6e942741aa3f3d75ea5d680bf8543a23be2" exitCode=0 Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.667704 4558 generic.go:334] "Generic (PLEG): container finished" podID="1d5c2e68-fb9d-4818-a0c2-27db2516e92b" containerID="2c68bcde1b4540d7e10ed3de55e085fa25f7fe350c76cf38edeedca6d5734cb7" exitCode=0 Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.667738 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-proxy-6b7b5d66dd-8plzj" event={"ID":"1d5c2e68-fb9d-4818-a0c2-27db2516e92b","Type":"ContainerDied","Data":"2c012c9aed3c1cbbfd0f3adb398aa6e942741aa3f3d75ea5d680bf8543a23be2"} Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.667761 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-proxy-6b7b5d66dd-8plzj" event={"ID":"1d5c2e68-fb9d-4818-a0c2-27db2516e92b","Type":"ContainerDied","Data":"2c68bcde1b4540d7e10ed3de55e085fa25f7fe350c76cf38edeedca6d5734cb7"} Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.667815 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-proxy-6b7b5d66dd-8plzj" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.673227 4558 generic.go:334] "Generic (PLEG): container finished" podID="3c6de94f-c605-43d9-97b5-ccf91e49d1fb" containerID="065a0343a2fa20d49288d6aad3e2c8001219424d475f4cf3fa0b18d5f777af97" exitCode=143 Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.673281 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-595757ff5d-frx87" event={"ID":"3c6de94f-c605-43d9-97b5-ccf91e49d1fb","Type":"ContainerDied","Data":"065a0343a2fa20d49288d6aad3e2c8001219424d475f4cf3fa0b18d5f777af97"} Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.675370 4558 scope.go:117] "RemoveContainer" containerID="27c4f1ab95cdf957befc95b02a160445223047218baee3603004129258c0b5fa" Jan 20 17:00:45 crc kubenswrapper[4558]: E0120 17:00:45.676650 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"27c4f1ab95cdf957befc95b02a160445223047218baee3603004129258c0b5fa\": container with ID starting with 27c4f1ab95cdf957befc95b02a160445223047218baee3603004129258c0b5fa not found: ID does not exist" containerID="27c4f1ab95cdf957befc95b02a160445223047218baee3603004129258c0b5fa" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.676681 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"27c4f1ab95cdf957befc95b02a160445223047218baee3603004129258c0b5fa"} err="failed to get container status \"27c4f1ab95cdf957befc95b02a160445223047218baee3603004129258c0b5fa\": rpc error: code = NotFound desc = could not find container \"27c4f1ab95cdf957befc95b02a160445223047218baee3603004129258c0b5fa\": container with ID starting with 27c4f1ab95cdf957befc95b02a160445223047218baee3603004129258c0b5fa not found: ID does not exist" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.676699 4558 scope.go:117] "RemoveContainer" containerID="7f9dbce9ba44238985c3e3d3fb5a42a8258f76871b2f32a165f9c1ead22275de" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.676807 4558 generic.go:334] "Generic (PLEG): container finished" podID="180488ea-6eeb-4078-9b57-351bdfb54f5d" containerID="14704db98182ec5a7c903f3bee7ae82b53751a4fd86ecd1c586dd1360d6439c9" exitCode=0 Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.676855 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-0" event={"ID":"180488ea-6eeb-4078-9b57-351bdfb54f5d","Type":"ContainerDied","Data":"14704db98182ec5a7c903f3bee7ae82b53751a4fd86ecd1c586dd1360d6439c9"} Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.676976 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:00:45 crc kubenswrapper[4558]: E0120 17:00:45.679600 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7f9dbce9ba44238985c3e3d3fb5a42a8258f76871b2f32a165f9c1ead22275de\": container with ID starting with 7f9dbce9ba44238985c3e3d3fb5a42a8258f76871b2f32a165f9c1ead22275de not found: ID does not exist" containerID="7f9dbce9ba44238985c3e3d3fb5a42a8258f76871b2f32a165f9c1ead22275de" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.679629 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7f9dbce9ba44238985c3e3d3fb5a42a8258f76871b2f32a165f9c1ead22275de"} err="failed to get container status \"7f9dbce9ba44238985c3e3d3fb5a42a8258f76871b2f32a165f9c1ead22275de\": rpc error: code = NotFound desc = could not find container \"7f9dbce9ba44238985c3e3d3fb5a42a8258f76871b2f32a165f9c1ead22275de\": container with ID starting with 7f9dbce9ba44238985c3e3d3fb5a42a8258f76871b2f32a165f9c1ead22275de not found: ID does not exist" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.679655 4558 scope.go:117] "RemoveContainer" containerID="3a7a952277efc163959dfa821edbe7eed459482ba7b2f738ddd5297a1de3b321" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.684678 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.689651 4558 generic.go:334] "Generic (PLEG): container finished" podID="b28e6999-784e-4577-88bb-db648f7a3cbc" containerID="a79b4e6f8ce00fed51a727ad2bc430351f4d330704b2b9cb8bb5ce9d963a3a12" exitCode=0 Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.689698 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"b28e6999-784e-4577-88bb-db648f7a3cbc","Type":"ContainerDied","Data":"a79b4e6f8ce00fed51a727ad2bc430351f4d330704b2b9cb8bb5ce9d963a3a12"} Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.689717 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"b28e6999-784e-4577-88bb-db648f7a3cbc","Type":"ContainerDied","Data":"44e57d20387387b66617c4a66fe7686baa61d70b4659ee4cac3fd72de4c54b44"} Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.689760 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.710792 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.711877 4558 generic.go:334] "Generic (PLEG): container finished" podID="8074f6f3-f564-42bc-b08b-28ffe75bbbc5" containerID="255c4f9294e2677844d684383d43130961e7ee3e4db470ecb77910d99c7f5de5" exitCode=0 Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.712115 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"8074f6f3-f564-42bc-b08b-28ffe75bbbc5","Type":"ContainerDied","Data":"255c4f9294e2677844d684383d43130961e7ee3e4db470ecb77910d99c7f5de5"} Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.713488 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1d5c2e68-fb9d-4818-a0c2-27db2516e92b-combined-ca-bundle\") pod \"1d5c2e68-fb9d-4818-a0c2-27db2516e92b\" (UID: \"1d5c2e68-fb9d-4818-a0c2-27db2516e92b\") " Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.713590 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/180488ea-6eeb-4078-9b57-351bdfb54f5d-config-data\") pod \"180488ea-6eeb-4078-9b57-351bdfb54f5d\" (UID: \"180488ea-6eeb-4078-9b57-351bdfb54f5d\") " Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.713623 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tp72f\" (UniqueName: \"kubernetes.io/projected/180488ea-6eeb-4078-9b57-351bdfb54f5d-kube-api-access-tp72f\") pod \"180488ea-6eeb-4078-9b57-351bdfb54f5d\" (UID: \"180488ea-6eeb-4078-9b57-351bdfb54f5d\") " Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.713679 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1d5c2e68-fb9d-4818-a0c2-27db2516e92b-internal-tls-certs\") pod \"1d5c2e68-fb9d-4818-a0c2-27db2516e92b\" (UID: \"1d5c2e68-fb9d-4818-a0c2-27db2516e92b\") " Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.713765 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1d5c2e68-fb9d-4818-a0c2-27db2516e92b-log-httpd\") pod \"1d5c2e68-fb9d-4818-a0c2-27db2516e92b\" (UID: \"1d5c2e68-fb9d-4818-a0c2-27db2516e92b\") " Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.713851 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1d5c2e68-fb9d-4818-a0c2-27db2516e92b-run-httpd\") pod \"1d5c2e68-fb9d-4818-a0c2-27db2516e92b\" (UID: \"1d5c2e68-fb9d-4818-a0c2-27db2516e92b\") " Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.713868 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/1d5c2e68-fb9d-4818-a0c2-27db2516e92b-etc-swift\") pod \"1d5c2e68-fb9d-4818-a0c2-27db2516e92b\" (UID: \"1d5c2e68-fb9d-4818-a0c2-27db2516e92b\") " Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.713884 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1d5c2e68-fb9d-4818-a0c2-27db2516e92b-config-data\") pod \"1d5c2e68-fb9d-4818-a0c2-27db2516e92b\" (UID: \"1d5c2e68-fb9d-4818-a0c2-27db2516e92b\") " Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.713919 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/180488ea-6eeb-4078-9b57-351bdfb54f5d-combined-ca-bundle\") pod \"180488ea-6eeb-4078-9b57-351bdfb54f5d\" (UID: \"180488ea-6eeb-4078-9b57-351bdfb54f5d\") " Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.713937 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1d5c2e68-fb9d-4818-a0c2-27db2516e92b-public-tls-certs\") pod \"1d5c2e68-fb9d-4818-a0c2-27db2516e92b\" (UID: \"1d5c2e68-fb9d-4818-a0c2-27db2516e92b\") " Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.714015 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hw7sd\" (UniqueName: \"kubernetes.io/projected/1d5c2e68-fb9d-4818-a0c2-27db2516e92b-kube-api-access-hw7sd\") pod \"1d5c2e68-fb9d-4818-a0c2-27db2516e92b\" (UID: \"1d5c2e68-fb9d-4818-a0c2-27db2516e92b\") " Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.714355 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d3643a65-70b3-488e-ad03-8b7ad6ae4539-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"d3643a65-70b3-488e-ad03-8b7ad6ae4539\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.714765 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d5c2e68-fb9d-4818-a0c2-27db2516e92b-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "1d5c2e68-fb9d-4818-a0c2-27db2516e92b" (UID: "1d5c2e68-fb9d-4818-a0c2-27db2516e92b"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.715256 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d5c2e68-fb9d-4818-a0c2-27db2516e92b-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "1d5c2e68-fb9d-4818-a0c2-27db2516e92b" (UID: "1d5c2e68-fb9d-4818-a0c2-27db2516e92b"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.715706 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e554db35-2dba-4138-9ca0-bd1371a9c63d-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "e554db35-2dba-4138-9ca0-bd1371a9c63d" (UID: "e554db35-2dba-4138-9ca0-bd1371a9c63d"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.718956 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/d3643a65-70b3-488e-ad03-8b7ad6ae4539-config-data-default\") pod \"openstack-galera-0\" (UID: \"d3643a65-70b3-488e-ad03-8b7ad6ae4539\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.719062 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/d3643a65-70b3-488e-ad03-8b7ad6ae4539-kolla-config\") pod \"openstack-galera-0\" (UID: \"d3643a65-70b3-488e-ad03-8b7ad6ae4539\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.719106 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7lzzs\" (UniqueName: \"kubernetes.io/projected/d3643a65-70b3-488e-ad03-8b7ad6ae4539-kube-api-access-7lzzs\") pod \"openstack-galera-0\" (UID: \"d3643a65-70b3-488e-ad03-8b7ad6ae4539\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.719259 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d3643a65-70b3-488e-ad03-8b7ad6ae4539-operator-scripts\") pod \"openstack-galera-0\" (UID: \"d3643a65-70b3-488e-ad03-8b7ad6ae4539\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.719384 4558 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b8911a4f-a706-4956-9028-138c018a92ba-etc-machine-id\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.719410 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" " Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.719421 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-drvw9\" (UniqueName: \"kubernetes.io/projected/d2ccd35d-37c4-450d-b04c-ac505e35b0e8-kube-api-access-drvw9\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.719431 4558 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1d5c2e68-fb9d-4818-a0c2-27db2516e92b-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.719440 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5l9bc\" (UniqueName: \"kubernetes.io/projected/b8911a4f-a706-4956-9028-138c018a92ba-kube-api-access-5l9bc\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.719448 4558 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/e554db35-2dba-4138-9ca0-bd1371a9c63d-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.719456 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b8911a4f-a706-4956-9028-138c018a92ba-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.719463 4558 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1d5c2e68-fb9d-4818-a0c2-27db2516e92b-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.719470 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d2ccd35d-37c4-450d-b04c-ac505e35b0e8-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.719478 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d2ccd35d-37c4-450d-b04c-ac505e35b0e8-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.719485 4558 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d2ccd35d-37c4-450d-b04c-ac505e35b0e8-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.719494 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b8911a4f-a706-4956-9028-138c018a92ba-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:45 crc kubenswrapper[4558]: E0120 17:00:45.721542 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/openstack-config-data: configmap "openstack-config-data" not found Jan 20 17:00:45 crc kubenswrapper[4558]: E0120 17:00:45.721601 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/d3643a65-70b3-488e-ad03-8b7ad6ae4539-kolla-config podName:d3643a65-70b3-488e-ad03-8b7ad6ae4539 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:46.721586284 +0000 UTC m=+1140.481924251 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kolla-config" (UniqueName: "kubernetes.io/configmap/d3643a65-70b3-488e-ad03-8b7ad6ae4539-kolla-config") pod "openstack-galera-0" (UID: "d3643a65-70b3-488e-ad03-8b7ad6ae4539") : configmap "openstack-config-data" not found Jan 20 17:00:45 crc kubenswrapper[4558]: E0120 17:00:45.721969 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/combined-ca-bundle: secret "combined-ca-bundle" not found Jan 20 17:00:45 crc kubenswrapper[4558]: E0120 17:00:45.722044 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d3643a65-70b3-488e-ad03-8b7ad6ae4539-combined-ca-bundle podName:d3643a65-70b3-488e-ad03-8b7ad6ae4539 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:46.722034176 +0000 UTC m=+1140.482372134 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/d3643a65-70b3-488e-ad03-8b7ad6ae4539-combined-ca-bundle") pod "openstack-galera-0" (UID: "d3643a65-70b3-488e-ad03-8b7ad6ae4539") : secret "combined-ca-bundle" not found Jan 20 17:00:45 crc kubenswrapper[4558]: E0120 17:00:45.722078 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/openstack-scripts: configmap "openstack-scripts" not found Jan 20 17:00:45 crc kubenswrapper[4558]: E0120 17:00:45.722096 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/bf9a27d0-1f64-4f43-a2a7-ce0902c6cd25-operator-scripts podName:bf9a27d0-1f64-4f43-a2a7-ce0902c6cd25 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:46.222090823 +0000 UTC m=+1139.982428790 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/bf9a27d0-1f64-4f43-a2a7-ce0902c6cd25-operator-scripts") pod "keystone-7e4f-account-create-update-pl5sm" (UID: "bf9a27d0-1f64-4f43-a2a7-ce0902c6cd25") : configmap "openstack-scripts" not found Jan 20 17:00:45 crc kubenswrapper[4558]: E0120 17:00:45.722119 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/openstack-config-data: configmap "openstack-config-data" not found Jan 20 17:00:45 crc kubenswrapper[4558]: E0120 17:00:45.722135 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/d3643a65-70b3-488e-ad03-8b7ad6ae4539-config-data-default podName:d3643a65-70b3-488e-ad03-8b7ad6ae4539 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:46.722130427 +0000 UTC m=+1140.482468394 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config-data-default" (UniqueName: "kubernetes.io/configmap/d3643a65-70b3-488e-ad03-8b7ad6ae4539-config-data-default") pod "openstack-galera-0" (UID: "d3643a65-70b3-488e-ad03-8b7ad6ae4539") : configmap "openstack-config-data" not found Jan 20 17:00:45 crc kubenswrapper[4558]: E0120 17:00:45.722178 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/openstack-scripts: configmap "openstack-scripts" not found Jan 20 17:00:45 crc kubenswrapper[4558]: E0120 17:00:45.722218 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/d3643a65-70b3-488e-ad03-8b7ad6ae4539-operator-scripts podName:d3643a65-70b3-488e-ad03-8b7ad6ae4539 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:46.722191462 +0000 UTC m=+1140.482529430 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/d3643a65-70b3-488e-ad03-8b7ad6ae4539-operator-scripts") pod "openstack-galera-0" (UID: "d3643a65-70b3-488e-ad03-8b7ad6ae4539") : configmap "openstack-scripts" not found Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.728492 4558 scope.go:117] "RemoveContainer" containerID="2f43a86fbf8e121d1c2b64dc721cdb7d0f3e79ac1af1c83f48879ac356eba5d7" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.730517 4558 generic.go:334] "Generic (PLEG): container finished" podID="ad708448-38df-4494-bca7-fe394c9b53a7" containerID="a0064904f7cf7b2f3f57d0cbd50180940cf612a73b85319dcd1b4cc9e5286c49" exitCode=0 Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.730541 4558 generic.go:334] "Generic (PLEG): container finished" podID="ad708448-38df-4494-bca7-fe394c9b53a7" containerID="4fd13baec559163b2b66474d46cf26e69eb2ac8b71ab5c82119b909d78b9c3ec" exitCode=0 Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.730548 4558 generic.go:334] "Generic (PLEG): container finished" podID="ad708448-38df-4494-bca7-fe394c9b53a7" containerID="5d77e588e0ee15e79eb372260c13103acd9bceda72bb63cee529297cc6f3bf98" exitCode=0 Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.730555 4558 generic.go:334] "Generic (PLEG): container finished" podID="ad708448-38df-4494-bca7-fe394c9b53a7" containerID="90254ca174d78ea243b5ca11d013559dbc865a160f1d264e2b31efdc48f96768" exitCode=0 Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.730560 4558 generic.go:334] "Generic (PLEG): container finished" podID="ad708448-38df-4494-bca7-fe394c9b53a7" containerID="e121933f3a246845b6169455f9e1e99e6b2a61bce703613cb6fb31821719438d" exitCode=0 Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.730566 4558 generic.go:334] "Generic (PLEG): container finished" podID="ad708448-38df-4494-bca7-fe394c9b53a7" containerID="1e0a8b23c89daae563498da37b455997040b6b47b7a29c5c9590feba957d5e7e" exitCode=0 Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.730572 4558 generic.go:334] "Generic (PLEG): container finished" podID="ad708448-38df-4494-bca7-fe394c9b53a7" containerID="4c8b93db2628bf0f39a03755d724643f7c79df42b7cf8e54ef0a1ee499194855" exitCode=0 Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.730577 4558 generic.go:334] "Generic (PLEG): container finished" podID="ad708448-38df-4494-bca7-fe394c9b53a7" containerID="24445a4a28eeea3fb63d875b16f990af43a36181cb7778f0d316092430ca7285" exitCode=0 Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.730616 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"ad708448-38df-4494-bca7-fe394c9b53a7","Type":"ContainerDied","Data":"a0064904f7cf7b2f3f57d0cbd50180940cf612a73b85319dcd1b4cc9e5286c49"} Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.730637 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"ad708448-38df-4494-bca7-fe394c9b53a7","Type":"ContainerDied","Data":"4fd13baec559163b2b66474d46cf26e69eb2ac8b71ab5c82119b909d78b9c3ec"} Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.730646 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"ad708448-38df-4494-bca7-fe394c9b53a7","Type":"ContainerDied","Data":"5d77e588e0ee15e79eb372260c13103acd9bceda72bb63cee529297cc6f3bf98"} Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.730654 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"ad708448-38df-4494-bca7-fe394c9b53a7","Type":"ContainerDied","Data":"90254ca174d78ea243b5ca11d013559dbc865a160f1d264e2b31efdc48f96768"} Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.730661 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"ad708448-38df-4494-bca7-fe394c9b53a7","Type":"ContainerDied","Data":"e121933f3a246845b6169455f9e1e99e6b2a61bce703613cb6fb31821719438d"} Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.730668 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"ad708448-38df-4494-bca7-fe394c9b53a7","Type":"ContainerDied","Data":"1e0a8b23c89daae563498da37b455997040b6b47b7a29c5c9590feba957d5e7e"} Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.730676 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"ad708448-38df-4494-bca7-fe394c9b53a7","Type":"ContainerDied","Data":"4c8b93db2628bf0f39a03755d724643f7c79df42b7cf8e54ef0a1ee499194855"} Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.730683 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"ad708448-38df-4494-bca7-fe394c9b53a7","Type":"ContainerDied","Data":"24445a4a28eeea3fb63d875b16f990af43a36181cb7778f0d316092430ca7285"} Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.733103 4558 generic.go:334] "Generic (PLEG): container finished" podID="e554db35-2dba-4138-9ca0-bd1371a9c63d" containerID="62565ba736515d1bd9fcfd983ba37f6e8370144dc694ce16eecef9e3a396d592" exitCode=0 Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.733139 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"e554db35-2dba-4138-9ca0-bd1371a9c63d","Type":"ContainerDied","Data":"62565ba736515d1bd9fcfd983ba37f6e8370144dc694ce16eecef9e3a396d592"} Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.733155 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"e554db35-2dba-4138-9ca0-bd1371a9c63d","Type":"ContainerDied","Data":"3351f20bfe9120a3bb72867d82de9088a995b7400fcc95668e83d9d78c871d67"} Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.733243 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:00:45 crc kubenswrapper[4558]: E0120 17:00:45.735774 4558 projected.go:194] Error preparing data for projected volume kube-api-access-7lzzs for pod openstack-kuttl-tests/openstack-galera-0: failed to fetch token: serviceaccounts "galera-openstack" not found Jan 20 17:00:45 crc kubenswrapper[4558]: E0120 17:00:45.735819 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/d3643a65-70b3-488e-ad03-8b7ad6ae4539-kube-api-access-7lzzs podName:d3643a65-70b3-488e-ad03-8b7ad6ae4539 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:46.735807257 +0000 UTC m=+1140.496145224 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-7lzzs" (UniqueName: "kubernetes.io/projected/d3643a65-70b3-488e-ad03-8b7ad6ae4539-kube-api-access-7lzzs") pod "openstack-galera-0" (UID: "d3643a65-70b3-488e-ad03-8b7ad6ae4539") : failed to fetch token: serviceaccounts "galera-openstack" not found Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.736102 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d2ccd35d-37c4-450d-b04c-ac505e35b0e8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d2ccd35d-37c4-450d-b04c-ac505e35b0e8" (UID: "d2ccd35d-37c4-450d-b04c-ac505e35b0e8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.737819 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.738202 4558 generic.go:334] "Generic (PLEG): container finished" podID="9ea3b871-4db3-4108-baea-e57a23d9d6c5" containerID="6074e6e8906baa2f1dffa9b96ff41ea08d5604ac43ef2c6593b686fd35af868a" exitCode=0 Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.738275 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-0" event={"ID":"9ea3b871-4db3-4108-baea-e57a23d9d6c5","Type":"ContainerDied","Data":"6074e6e8906baa2f1dffa9b96ff41ea08d5604ac43ef2c6593b686fd35af868a"} Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.739992 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-6b64ccd79d-5qj2x" event={"ID":"5d3423c4-ff78-4ff7-b42b-b3c93b309d52","Type":"ContainerStarted","Data":"37aa08ee2771757bea1365d0523225133f3f9a35715f5122f8e028516447be51"} Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.744340 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/180488ea-6eeb-4078-9b57-351bdfb54f5d-kube-api-access-tp72f" (OuterVolumeSpecName: "kube-api-access-tp72f") pod "180488ea-6eeb-4078-9b57-351bdfb54f5d" (UID: "180488ea-6eeb-4078-9b57-351bdfb54f5d"). InnerVolumeSpecName "kube-api-access-tp72f". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.744584 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d5c2e68-fb9d-4818-a0c2-27db2516e92b-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "1d5c2e68-fb9d-4818-a0c2-27db2516e92b" (UID: "1d5c2e68-fb9d-4818-a0c2-27db2516e92b"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.746350 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.748774 4558 generic.go:334] "Generic (PLEG): container finished" podID="69c5c1c6-5c3f-48d4-8d14-c3c7cdcb1915" containerID="43ce3893218e12bafcf3980d1a94c5b6f79210afef04e341874b8c63d98def1c" exitCode=143 Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.748843 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.748897 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-keystone-listener-6b66757dfd-k89tm" event={"ID":"69c5c1c6-5c3f-48d4-8d14-c3c7cdcb1915","Type":"ContainerDied","Data":"43ce3893218e12bafcf3980d1a94c5b6f79210afef04e341874b8c63d98def1c"} Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.749363 4558 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="openstack-kuttl-tests/root-account-create-update-fbtv5" secret="" err="secret \"galera-openstack-cell1-dockercfg-npsbd\" not found" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.749789 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.750628 4558 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="openstack-kuttl-tests/keystone-55ff946595-f8gjf" secret="" err="secret \"keystone-keystone-dockercfg-wlvl5\" not found" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.750857 4558 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="openstack-kuttl-tests/barbican-worker-c4f864f89-58gb9" secret="" err="secret \"barbican-barbican-dockercfg-z9dpp\" not found" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.750972 4558 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="openstack-kuttl-tests/barbican-keystone-listener-7fc5477d66-pz8q2" secret="" err="secret \"barbican-barbican-dockercfg-z9dpp\" not found" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.751138 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.751425 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d5c2e68-fb9d-4818-a0c2-27db2516e92b-kube-api-access-hw7sd" (OuterVolumeSpecName: "kube-api-access-hw7sd") pod "1d5c2e68-fb9d-4818-a0c2-27db2516e92b" (UID: "1d5c2e68-fb9d-4818-a0c2-27db2516e92b"). InnerVolumeSpecName "kube-api-access-hw7sd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:00:45 crc kubenswrapper[4558]: E0120 17:00:45.755020 4558 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 20 17:00:45 crc kubenswrapper[4558]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[/bin/sh -c #!/bin/bash Jan 20 17:00:45 crc kubenswrapper[4558]: Jan 20 17:00:45 crc kubenswrapper[4558]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 20 17:00:45 crc kubenswrapper[4558]: Jan 20 17:00:45 crc kubenswrapper[4558]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 20 17:00:45 crc kubenswrapper[4558]: Jan 20 17:00:45 crc kubenswrapper[4558]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 20 17:00:45 crc kubenswrapper[4558]: Jan 20 17:00:45 crc kubenswrapper[4558]: if [ -n "" ]; then Jan 20 17:00:45 crc kubenswrapper[4558]: GRANT_DATABASE="" Jan 20 17:00:45 crc kubenswrapper[4558]: else Jan 20 17:00:45 crc kubenswrapper[4558]: GRANT_DATABASE="*" Jan 20 17:00:45 crc kubenswrapper[4558]: fi Jan 20 17:00:45 crc kubenswrapper[4558]: Jan 20 17:00:45 crc kubenswrapper[4558]: # going for maximum compatibility here: Jan 20 17:00:45 crc kubenswrapper[4558]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 20 17:00:45 crc kubenswrapper[4558]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 20 17:00:45 crc kubenswrapper[4558]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 20 17:00:45 crc kubenswrapper[4558]: # support updates Jan 20 17:00:45 crc kubenswrapper[4558]: Jan 20 17:00:45 crc kubenswrapper[4558]: $MYSQL_CMD < logger="UnhandledError" Jan 20 17:00:45 crc kubenswrapper[4558]: E0120 17:00:45.756329 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"openstack-cell1-mariadb-root-db-secret\\\" not found\"" pod="openstack-kuttl-tests/root-account-create-update-fbtv5" podUID="e353328b-4fe7-4637-8174-a3b227ad9761" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.756745 4558 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="openstack-kuttl-tests/barbican-api-5df68bf4dd-zbx8h" secret="" err="secret \"barbican-barbican-dockercfg-z9dpp\" not found" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.769110 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.815383 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b8911a4f-a706-4956-9028-138c018a92ba-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b8911a4f-a706-4956-9028-138c018a92ba" (UID: "b8911a4f-a706-4956-9028-138c018a92ba"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.822584 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/d3643a65-70b3-488e-ad03-8b7ad6ae4539-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"d3643a65-70b3-488e-ad03-8b7ad6ae4539\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:00:45 crc kubenswrapper[4558]: E0120 17:00:45.823313 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-galera-openstack-svc: secret "cert-galera-openstack-svc" not found Jan 20 17:00:45 crc kubenswrapper[4558]: E0120 17:00:45.823498 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d3643a65-70b3-488e-ad03-8b7ad6ae4539-galera-tls-certs podName:d3643a65-70b3-488e-ad03-8b7ad6ae4539 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:46.823483824 +0000 UTC m=+1140.583821791 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "galera-tls-certs" (UniqueName: "kubernetes.io/secret/d3643a65-70b3-488e-ad03-8b7ad6ae4539-galera-tls-certs") pod "openstack-galera-0" (UID: "d3643a65-70b3-488e-ad03-8b7ad6ae4539") : secret "cert-galera-openstack-svc" not found Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.824452 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tp72f\" (UniqueName: \"kubernetes.io/projected/180488ea-6eeb-4078-9b57-351bdfb54f5d-kube-api-access-tp72f\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.824773 4558 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/1d5c2e68-fb9d-4818-a0c2-27db2516e92b-etc-swift\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.824794 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8911a4f-a706-4956-9028-138c018a92ba-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.824809 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hw7sd\" (UniqueName: \"kubernetes.io/projected/1d5c2e68-fb9d-4818-a0c2-27db2516e92b-kube-api-access-hw7sd\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.824818 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d2ccd35d-37c4-450d-b04c-ac505e35b0e8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.830187 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage01-crc" (UniqueName: "kubernetes.io/local-volume/local-storage01-crc") on node "crc" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.838862 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/180488ea-6eeb-4078-9b57-351bdfb54f5d-config-data" (OuterVolumeSpecName: "config-data") pod "180488ea-6eeb-4078-9b57-351bdfb54f5d" (UID: "180488ea-6eeb-4078-9b57-351bdfb54f5d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.857567 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1d5c2e68-fb9d-4818-a0c2-27db2516e92b-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "1d5c2e68-fb9d-4818-a0c2-27db2516e92b" (UID: "1d5c2e68-fb9d-4818-a0c2-27db2516e92b"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.862303 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d2ccd35d-37c4-450d-b04c-ac505e35b0e8-config-data" (OuterVolumeSpecName: "config-data") pod "d2ccd35d-37c4-450d-b04c-ac505e35b0e8" (UID: "d2ccd35d-37c4-450d-b04c-ac505e35b0e8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.862729 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d2ccd35d-37c4-450d-b04c-ac505e35b0e8-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "d2ccd35d-37c4-450d-b04c-ac505e35b0e8" (UID: "d2ccd35d-37c4-450d-b04c-ac505e35b0e8"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.866738 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1d5c2e68-fb9d-4818-a0c2-27db2516e92b-config-data" (OuterVolumeSpecName: "config-data") pod "1d5c2e68-fb9d-4818-a0c2-27db2516e92b" (UID: "1d5c2e68-fb9d-4818-a0c2-27db2516e92b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.865690 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/180488ea-6eeb-4078-9b57-351bdfb54f5d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "180488ea-6eeb-4078-9b57-351bdfb54f5d" (UID: "180488ea-6eeb-4078-9b57-351bdfb54f5d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.868793 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1d5c2e68-fb9d-4818-a0c2-27db2516e92b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1d5c2e68-fb9d-4818-a0c2-27db2516e92b" (UID: "1d5c2e68-fb9d-4818-a0c2-27db2516e92b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.877857 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1d5c2e68-fb9d-4818-a0c2-27db2516e92b-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "1d5c2e68-fb9d-4818-a0c2-27db2516e92b" (UID: "1d5c2e68-fb9d-4818-a0c2-27db2516e92b"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.896086 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" podUID="bf355276-9e62-474e-bfb1-616dde5b83bc" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.96:5671: connect: connection refused" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.928783 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/180488ea-6eeb-4078-9b57-351bdfb54f5d-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.928810 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d2ccd35d-37c4-450d-b04c-ac505e35b0e8-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.928821 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1d5c2e68-fb9d-4818-a0c2-27db2516e92b-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.928831 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.928840 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d2ccd35d-37c4-450d-b04c-ac505e35b0e8-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.928848 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1d5c2e68-fb9d-4818-a0c2-27db2516e92b-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.928856 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/180488ea-6eeb-4078-9b57-351bdfb54f5d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.928864 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1d5c2e68-fb9d-4818-a0c2-27db2516e92b-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.928872 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1d5c2e68-fb9d-4818-a0c2-27db2516e92b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:45 crc kubenswrapper[4558]: E0120 17:00:45.928921 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/openstack-cell1-scripts: configmap "openstack-cell1-scripts" not found Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.929090 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b8911a4f-a706-4956-9028-138c018a92ba-config-data" (OuterVolumeSpecName: "config-data") pod "b8911a4f-a706-4956-9028-138c018a92ba" (UID: "b8911a4f-a706-4956-9028-138c018a92ba"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:00:45 crc kubenswrapper[4558]: E0120 17:00:45.929318 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/e353328b-4fe7-4637-8174-a3b227ad9761-operator-scripts podName:e353328b-4fe7-4637-8174-a3b227ad9761 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:47.929303428 +0000 UTC m=+1141.689641395 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/e353328b-4fe7-4637-8174-a3b227ad9761-operator-scripts") pod "root-account-create-update-fbtv5" (UID: "e353328b-4fe7-4637-8174-a3b227ad9761") : configmap "openstack-cell1-scripts" not found Jan 20 17:00:45 crc kubenswrapper[4558]: I0120 17:00:45.969391 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-rqg8f"] Jan 20 17:00:46 crc kubenswrapper[4558]: E0120 17:00:46.030193 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/keystone-scripts: secret "keystone-scripts" not found Jan 20 17:00:46 crc kubenswrapper[4558]: E0120 17:00:46.030264 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4969c1af-53c0-435a-bd06-6bd493c81c80-scripts podName:4969c1af-53c0-435a-bd06-6bd493c81c80 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:48.03024962 +0000 UTC m=+1141.790587587 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "scripts" (UniqueName: "kubernetes.io/secret/4969c1af-53c0-435a-bd06-6bd493c81c80-scripts") pod "keystone-55ff946595-f8gjf" (UID: "4969c1af-53c0-435a-bd06-6bd493c81c80") : secret "keystone-scripts" not found Jan 20 17:00:46 crc kubenswrapper[4558]: E0120 17:00:46.030541 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-keystone-internal-svc: secret "cert-keystone-internal-svc" not found Jan 20 17:00:46 crc kubenswrapper[4558]: E0120 17:00:46.030574 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4969c1af-53c0-435a-bd06-6bd493c81c80-internal-tls-certs podName:4969c1af-53c0-435a-bd06-6bd493c81c80 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:48.030567156 +0000 UTC m=+1141.790905124 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "internal-tls-certs" (UniqueName: "kubernetes.io/secret/4969c1af-53c0-435a-bd06-6bd493c81c80-internal-tls-certs") pod "keystone-55ff946595-f8gjf" (UID: "4969c1af-53c0-435a-bd06-6bd493c81c80") : secret "cert-keystone-internal-svc" not found Jan 20 17:00:46 crc kubenswrapper[4558]: E0120 17:00:46.030610 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-keystone-public-svc: secret "cert-keystone-public-svc" not found Jan 20 17:00:46 crc kubenswrapper[4558]: E0120 17:00:46.030627 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4969c1af-53c0-435a-bd06-6bd493c81c80-public-tls-certs podName:4969c1af-53c0-435a-bd06-6bd493c81c80 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:48.030622251 +0000 UTC m=+1141.790960217 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "public-tls-certs" (UniqueName: "kubernetes.io/secret/4969c1af-53c0-435a-bd06-6bd493c81c80-public-tls-certs") pod "keystone-55ff946595-f8gjf" (UID: "4969c1af-53c0-435a-bd06-6bd493c81c80") : secret "cert-keystone-public-svc" not found Jan 20 17:00:46 crc kubenswrapper[4558]: E0120 17:00:46.030656 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/keystone-config-data: secret "keystone-config-data" not found Jan 20 17:00:46 crc kubenswrapper[4558]: E0120 17:00:46.030672 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4969c1af-53c0-435a-bd06-6bd493c81c80-config-data podName:4969c1af-53c0-435a-bd06-6bd493c81c80 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:48.030667385 +0000 UTC m=+1141.791005353 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/secret/4969c1af-53c0-435a-bd06-6bd493c81c80-config-data") pod "keystone-55ff946595-f8gjf" (UID: "4969c1af-53c0-435a-bd06-6bd493c81c80") : secret "keystone-config-data" not found Jan 20 17:00:46 crc kubenswrapper[4558]: E0120 17:00:46.030700 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/combined-ca-bundle: secret "combined-ca-bundle" not found Jan 20 17:00:46 crc kubenswrapper[4558]: E0120 17:00:46.030716 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4969c1af-53c0-435a-bd06-6bd493c81c80-combined-ca-bundle podName:4969c1af-53c0-435a-bd06-6bd493c81c80 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:48.030711298 +0000 UTC m=+1141.791049264 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/4969c1af-53c0-435a-bd06-6bd493c81c80-combined-ca-bundle") pod "keystone-55ff946595-f8gjf" (UID: "4969c1af-53c0-435a-bd06-6bd493c81c80") : secret "combined-ca-bundle" not found Jan 20 17:00:46 crc kubenswrapper[4558]: E0120 17:00:46.030847 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/keystone: secret "keystone" not found Jan 20 17:00:46 crc kubenswrapper[4558]: E0120 17:00:46.030870 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4969c1af-53c0-435a-bd06-6bd493c81c80-fernet-keys podName:4969c1af-53c0-435a-bd06-6bd493c81c80 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:48.030864145 +0000 UTC m=+1141.791202112 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "fernet-keys" (UniqueName: "kubernetes.io/secret/4969c1af-53c0-435a-bd06-6bd493c81c80-fernet-keys") pod "keystone-55ff946595-f8gjf" (UID: "4969c1af-53c0-435a-bd06-6bd493c81c80") : secret "keystone" not found Jan 20 17:00:46 crc kubenswrapper[4558]: E0120 17:00:46.032342 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/barbican-worker-config-data: secret "barbican-worker-config-data" not found Jan 20 17:00:46 crc kubenswrapper[4558]: E0120 17:00:46.032455 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/de7f5467-1e83-42f0-86bb-ade85deec8f3-config-data-custom podName:de7f5467-1e83-42f0-86bb-ade85deec8f3 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:48.032422966 +0000 UTC m=+1141.792760933 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "config-data-custom" (UniqueName: "kubernetes.io/secret/de7f5467-1e83-42f0-86bb-ade85deec8f3-config-data-custom") pod "barbican-worker-c4f864f89-58gb9" (UID: "de7f5467-1e83-42f0-86bb-ade85deec8f3") : secret "barbican-worker-config-data" not found Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.033029 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b8911a4f-a706-4956-9028-138c018a92ba-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:46 crc kubenswrapper[4558]: E0120 17:00:46.033116 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/barbican-config-data: secret "barbican-config-data" not found Jan 20 17:00:46 crc kubenswrapper[4558]: E0120 17:00:46.033204 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/de7f5467-1e83-42f0-86bb-ade85deec8f3-config-data podName:de7f5467-1e83-42f0-86bb-ade85deec8f3 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:48.033152779 +0000 UTC m=+1141.793490735 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/secret/de7f5467-1e83-42f0-86bb-ade85deec8f3-config-data") pod "barbican-worker-c4f864f89-58gb9" (UID: "de7f5467-1e83-42f0-86bb-ade85deec8f3") : secret "barbican-config-data" not found Jan 20 17:00:46 crc kubenswrapper[4558]: E0120 17:00:46.033347 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/combined-ca-bundle: secret "combined-ca-bundle" not found Jan 20 17:00:46 crc kubenswrapper[4558]: E0120 17:00:46.033374 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/de7f5467-1e83-42f0-86bb-ade85deec8f3-combined-ca-bundle podName:de7f5467-1e83-42f0-86bb-ade85deec8f3 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:48.033368334 +0000 UTC m=+1141.793706300 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/de7f5467-1e83-42f0-86bb-ade85deec8f3-combined-ca-bundle") pod "barbican-worker-c4f864f89-58gb9" (UID: "de7f5467-1e83-42f0-86bb-ade85deec8f3") : secret "combined-ca-bundle" not found Jan 20 17:00:46 crc kubenswrapper[4558]: E0120 17:00:46.033406 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/keystone: secret "keystone" not found Jan 20 17:00:46 crc kubenswrapper[4558]: E0120 17:00:46.033426 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4969c1af-53c0-435a-bd06-6bd493c81c80-credential-keys podName:4969c1af-53c0-435a-bd06-6bd493c81c80 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:48.033420603 +0000 UTC m=+1141.793758570 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "credential-keys" (UniqueName: "kubernetes.io/secret/4969c1af-53c0-435a-bd06-6bd493c81c80-credential-keys") pod "keystone-55ff946595-f8gjf" (UID: "4969c1af-53c0-435a-bd06-6bd493c81c80") : secret "keystone" not found Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.074846 4558 scope.go:117] "RemoveContainer" containerID="3a7a952277efc163959dfa821edbe7eed459482ba7b2f738ddd5297a1de3b321" Jan 20 17:00:46 crc kubenswrapper[4558]: E0120 17:00:46.075371 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3a7a952277efc163959dfa821edbe7eed459482ba7b2f738ddd5297a1de3b321\": container with ID starting with 3a7a952277efc163959dfa821edbe7eed459482ba7b2f738ddd5297a1de3b321 not found: ID does not exist" containerID="3a7a952277efc163959dfa821edbe7eed459482ba7b2f738ddd5297a1de3b321" Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.075404 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3a7a952277efc163959dfa821edbe7eed459482ba7b2f738ddd5297a1de3b321"} err="failed to get container status \"3a7a952277efc163959dfa821edbe7eed459482ba7b2f738ddd5297a1de3b321\": rpc error: code = NotFound desc = could not find container \"3a7a952277efc163959dfa821edbe7eed459482ba7b2f738ddd5297a1de3b321\": container with ID starting with 3a7a952277efc163959dfa821edbe7eed459482ba7b2f738ddd5297a1de3b321 not found: ID does not exist" Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.075425 4558 scope.go:117] "RemoveContainer" containerID="2f43a86fbf8e121d1c2b64dc721cdb7d0f3e79ac1af1c83f48879ac356eba5d7" Jan 20 17:00:46 crc kubenswrapper[4558]: E0120 17:00:46.075656 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2f43a86fbf8e121d1c2b64dc721cdb7d0f3e79ac1af1c83f48879ac356eba5d7\": container with ID starting with 2f43a86fbf8e121d1c2b64dc721cdb7d0f3e79ac1af1c83f48879ac356eba5d7 not found: ID does not exist" containerID="2f43a86fbf8e121d1c2b64dc721cdb7d0f3e79ac1af1c83f48879ac356eba5d7" Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.075686 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2f43a86fbf8e121d1c2b64dc721cdb7d0f3e79ac1af1c83f48879ac356eba5d7"} err="failed to get container status \"2f43a86fbf8e121d1c2b64dc721cdb7d0f3e79ac1af1c83f48879ac356eba5d7\": rpc error: code = NotFound desc = could not find container \"2f43a86fbf8e121d1c2b64dc721cdb7d0f3e79ac1af1c83f48879ac356eba5d7\": container with ID starting with 2f43a86fbf8e121d1c2b64dc721cdb7d0f3e79ac1af1c83f48879ac356eba5d7 not found: ID does not exist" Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.075702 4558 scope.go:117] "RemoveContainer" containerID="3d3a8f2db93b034973dfab0303e49c56217b13cf7cd940baab7ecf8cfbd2745f" Jan 20 17:00:46 crc kubenswrapper[4558]: E0120 17:00:46.134050 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Jan 20 17:00:46 crc kubenswrapper[4558]: E0120 17:00:46.134114 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/bf355276-9e62-474e-bfb1-616dde5b83bc-config-data podName:bf355276-9e62-474e-bfb1-616dde5b83bc nodeName:}" failed. No retries permitted until 2026-01-20 17:00:50.13409882 +0000 UTC m=+1143.894436787 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/bf355276-9e62-474e-bfb1-616dde5b83bc-config-data") pod "rabbitmq-cell1-server-0" (UID: "bf355276-9e62-474e-bfb1-616dde5b83bc") : configmap "rabbitmq-cell1-config-data" not found Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.136837 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.148535 4558 scope.go:117] "RemoveContainer" containerID="d3bb244923af1279a2fec8556cc18369058a01a9dc93f5e798291670f59b37c6" Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.168568 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.184781 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.187674 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.191771 4558 scope.go:117] "RemoveContainer" containerID="8c01eafed347e73a4f6bbd3d6df2573984ce045e60275b9c6bccf3c8df4b43d7" Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.203601 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.216665 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.219447 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-d54787f8d-7fznp" Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.224017 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/swift-proxy-6b7b5d66dd-8plzj"] Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.229573 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/swift-proxy-6b7b5d66dd-8plzj"] Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.234895 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.235223 4558 scope.go:117] "RemoveContainer" containerID="d3bb244923af1279a2fec8556cc18369058a01a9dc93f5e798291670f59b37c6" Jan 20 17:00:46 crc kubenswrapper[4558]: E0120 17:00:46.235675 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d3bb244923af1279a2fec8556cc18369058a01a9dc93f5e798291670f59b37c6\": container with ID starting with d3bb244923af1279a2fec8556cc18369058a01a9dc93f5e798291670f59b37c6 not found: ID does not exist" containerID="d3bb244923af1279a2fec8556cc18369058a01a9dc93f5e798291670f59b37c6" Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.235707 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d3bb244923af1279a2fec8556cc18369058a01a9dc93f5e798291670f59b37c6"} err="failed to get container status \"d3bb244923af1279a2fec8556cc18369058a01a9dc93f5e798291670f59b37c6\": rpc error: code = NotFound desc = could not find container \"d3bb244923af1279a2fec8556cc18369058a01a9dc93f5e798291670f59b37c6\": container with ID starting with d3bb244923af1279a2fec8556cc18369058a01a9dc93f5e798291670f59b37c6 not found: ID does not exist" Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.235776 4558 scope.go:117] "RemoveContainer" containerID="8c01eafed347e73a4f6bbd3d6df2573984ce045e60275b9c6bccf3c8df4b43d7" Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.235956 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/d3643a65-70b3-488e-ad03-8b7ad6ae4539-config-data-generated\") pod \"d3643a65-70b3-488e-ad03-8b7ad6ae4539\" (UID: \"d3643a65-70b3-488e-ad03-8b7ad6ae4539\") " Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.236285 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d3643a65-70b3-488e-ad03-8b7ad6ae4539-config-data-generated" (OuterVolumeSpecName: "config-data-generated") pod "d3643a65-70b3-488e-ad03-8b7ad6ae4539" (UID: "d3643a65-70b3-488e-ad03-8b7ad6ae4539"). InnerVolumeSpecName "config-data-generated". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:00:46 crc kubenswrapper[4558]: E0120 17:00:46.236367 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8c01eafed347e73a4f6bbd3d6df2573984ce045e60275b9c6bccf3c8df4b43d7\": container with ID starting with 8c01eafed347e73a4f6bbd3d6df2573984ce045e60275b9c6bccf3c8df4b43d7 not found: ID does not exist" containerID="8c01eafed347e73a4f6bbd3d6df2573984ce045e60275b9c6bccf3c8df4b43d7" Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.236548 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8c01eafed347e73a4f6bbd3d6df2573984ce045e60275b9c6bccf3c8df4b43d7"} err="failed to get container status \"8c01eafed347e73a4f6bbd3d6df2573984ce045e60275b9c6bccf3c8df4b43d7\": rpc error: code = NotFound desc = could not find container \"8c01eafed347e73a4f6bbd3d6df2573984ce045e60275b9c6bccf3c8df4b43d7\": container with ID starting with 8c01eafed347e73a4f6bbd3d6df2573984ce045e60275b9c6bccf3c8df4b43d7 not found: ID does not exist" Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.236567 4558 scope.go:117] "RemoveContainer" containerID="44cfb2c4bfc8454ac6259ca35fbc70464e44213a38cdb484b02b66bc424c90d5" Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.236484 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8074f6f3-f564-42bc-b08b-28ffe75bbbc5-run-httpd\") pod \"8074f6f3-f564-42bc-b08b-28ffe75bbbc5\" (UID: \"8074f6f3-f564-42bc-b08b-28ffe75bbbc5\") " Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.237519 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8074f6f3-f564-42bc-b08b-28ffe75bbbc5-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "8074f6f3-f564-42bc-b08b-28ffe75bbbc5" (UID: "8074f6f3-f564-42bc-b08b-28ffe75bbbc5"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.237617 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8074f6f3-f564-42bc-b08b-28ffe75bbbc5-scripts\") pod \"8074f6f3-f564-42bc-b08b-28ffe75bbbc5\" (UID: \"8074f6f3-f564-42bc-b08b-28ffe75bbbc5\") " Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.238156 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8074f6f3-f564-42bc-b08b-28ffe75bbbc5-sg-core-conf-yaml\") pod \"8074f6f3-f564-42bc-b08b-28ffe75bbbc5\" (UID: \"8074f6f3-f564-42bc-b08b-28ffe75bbbc5\") " Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.238227 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8074f6f3-f564-42bc-b08b-28ffe75bbbc5-combined-ca-bundle\") pod \"8074f6f3-f564-42bc-b08b-28ffe75bbbc5\" (UID: \"8074f6f3-f564-42bc-b08b-28ffe75bbbc5\") " Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.238273 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/8074f6f3-f564-42bc-b08b-28ffe75bbbc5-ceilometer-tls-certs\") pod \"8074f6f3-f564-42bc-b08b-28ffe75bbbc5\" (UID: \"8074f6f3-f564-42bc-b08b-28ffe75bbbc5\") " Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.238611 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8074f6f3-f564-42bc-b08b-28ffe75bbbc5-config-data\") pod \"8074f6f3-f564-42bc-b08b-28ffe75bbbc5\" (UID: \"8074f6f3-f564-42bc-b08b-28ffe75bbbc5\") " Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.238644 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8074f6f3-f564-42bc-b08b-28ffe75bbbc5-log-httpd\") pod \"8074f6f3-f564-42bc-b08b-28ffe75bbbc5\" (UID: \"8074f6f3-f564-42bc-b08b-28ffe75bbbc5\") " Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.238692 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mysql-db\" (UniqueName: \"kubernetes.io/local-volume/local-storage13-crc\") pod \"d3643a65-70b3-488e-ad03-8b7ad6ae4539\" (UID: \"d3643a65-70b3-488e-ad03-8b7ad6ae4539\") " Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.238711 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4vb45\" (UniqueName: \"kubernetes.io/projected/8074f6f3-f564-42bc-b08b-28ffe75bbbc5-kube-api-access-4vb45\") pod \"8074f6f3-f564-42bc-b08b-28ffe75bbbc5\" (UID: \"8074f6f3-f564-42bc-b08b-28ffe75bbbc5\") " Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.239154 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/54c4bfe3-42bf-46ea-8c23-0621f7b65686-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"54c4bfe3-42bf-46ea-8c23-0621f7b65686\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.239293 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/54c4bfe3-42bf-46ea-8c23-0621f7b65686-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"54c4bfe3-42bf-46ea-8c23-0621f7b65686\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.239609 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54c4bfe3-42bf-46ea-8c23-0621f7b65686-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"54c4bfe3-42bf-46ea-8c23-0621f7b65686\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.239915 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/54c4bfe3-42bf-46ea-8c23-0621f7b65686-scripts\") pod \"ovn-northd-0\" (UID: \"54c4bfe3-42bf-46ea-8c23-0621f7b65686\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.240009 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/54c4bfe3-42bf-46ea-8c23-0621f7b65686-config\") pod \"ovn-northd-0\" (UID: \"54c4bfe3-42bf-46ea-8c23-0621f7b65686\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.240317 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/d3643a65-70b3-488e-ad03-8b7ad6ae4539-config-data-generated\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.240356 4558 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8074f6f3-f564-42bc-b08b-28ffe75bbbc5-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:46 crc kubenswrapper[4558]: E0120 17:00:46.240530 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-barbican-public-svc: secret "cert-barbican-public-svc" not found Jan 20 17:00:46 crc kubenswrapper[4558]: E0120 17:00:46.240566 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3735ca3d-3764-4d36-b912-fbf0bfb96dd8-public-tls-certs podName:3735ca3d-3764-4d36-b912-fbf0bfb96dd8 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:48.2405545 +0000 UTC m=+1142.000892468 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "public-tls-certs" (UniqueName: "kubernetes.io/secret/3735ca3d-3764-4d36-b912-fbf0bfb96dd8-public-tls-certs") pod "barbican-api-5df68bf4dd-zbx8h" (UID: "3735ca3d-3764-4d36-b912-fbf0bfb96dd8") : secret "cert-barbican-public-svc" not found Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.240700 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8074f6f3-f564-42bc-b08b-28ffe75bbbc5-scripts" (OuterVolumeSpecName: "scripts") pod "8074f6f3-f564-42bc-b08b-28ffe75bbbc5" (UID: "8074f6f3-f564-42bc-b08b-28ffe75bbbc5"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.242533 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8074f6f3-f564-42bc-b08b-28ffe75bbbc5-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "8074f6f3-f564-42bc-b08b-28ffe75bbbc5" (UID: "8074f6f3-f564-42bc-b08b-28ffe75bbbc5"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.243761 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:00:46 crc kubenswrapper[4558]: E0120 17:00:46.243846 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-ovnnorthd-ovndbs: object "openstack-kuttl-tests"/"cert-ovnnorthd-ovndbs" not registered Jan 20 17:00:46 crc kubenswrapper[4558]: E0120 17:00:46.243878 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/54c4bfe3-42bf-46ea-8c23-0621f7b65686-ovn-northd-tls-certs podName:54c4bfe3-42bf-46ea-8c23-0621f7b65686 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:50.243867731 +0000 UTC m=+1144.004205698 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "ovn-northd-tls-certs" (UniqueName: "kubernetes.io/secret/54c4bfe3-42bf-46ea-8c23-0621f7b65686-ovn-northd-tls-certs") pod "ovn-northd-0" (UID: "54c4bfe3-42bf-46ea-8c23-0621f7b65686") : object "openstack-kuttl-tests"/"cert-ovnnorthd-ovndbs" not registered Jan 20 17:00:46 crc kubenswrapper[4558]: E0120 17:00:46.243883 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/barbican-config-data: secret "barbican-config-data" not found Jan 20 17:00:46 crc kubenswrapper[4558]: E0120 17:00:46.243954 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7776f921-0ae7-4a8c-b444-cf5b4b9a4f65-config-data podName:7776f921-0ae7-4a8c-b444-cf5b4b9a4f65 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:48.243938434 +0000 UTC m=+1142.004276401 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/secret/7776f921-0ae7-4a8c-b444-cf5b4b9a4f65-config-data") pod "barbican-keystone-listener-7fc5477d66-pz8q2" (UID: "7776f921-0ae7-4a8c-b444-cf5b4b9a4f65") : secret "barbican-config-data" not found Jan 20 17:00:46 crc kubenswrapper[4558]: E0120 17:00:46.243997 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/barbican-keystone-listener-config-data: secret "barbican-keystone-listener-config-data" not found Jan 20 17:00:46 crc kubenswrapper[4558]: E0120 17:00:46.244023 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7776f921-0ae7-4a8c-b444-cf5b4b9a4f65-config-data-custom podName:7776f921-0ae7-4a8c-b444-cf5b4b9a4f65 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:48.244015208 +0000 UTC m=+1142.004353175 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "config-data-custom" (UniqueName: "kubernetes.io/secret/7776f921-0ae7-4a8c-b444-cf5b4b9a4f65-config-data-custom") pod "barbican-keystone-listener-7fc5477d66-pz8q2" (UID: "7776f921-0ae7-4a8c-b444-cf5b4b9a4f65") : secret "barbican-keystone-listener-config-data" not found Jan 20 17:00:46 crc kubenswrapper[4558]: E0120 17:00:46.244055 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-ovn-metrics: object "openstack-kuttl-tests"/"cert-ovn-metrics" not registered Jan 20 17:00:46 crc kubenswrapper[4558]: E0120 17:00:46.244072 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/54c4bfe3-42bf-46ea-8c23-0621f7b65686-metrics-certs-tls-certs podName:54c4bfe3-42bf-46ea-8c23-0621f7b65686 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:50.244067808 +0000 UTC m=+1144.004405775 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs-tls-certs" (UniqueName: "kubernetes.io/secret/54c4bfe3-42bf-46ea-8c23-0621f7b65686-metrics-certs-tls-certs") pod "ovn-northd-0" (UID: "54c4bfe3-42bf-46ea-8c23-0621f7b65686") : object "openstack-kuttl-tests"/"cert-ovn-metrics" not registered Jan 20 17:00:46 crc kubenswrapper[4558]: E0120 17:00:46.244100 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/combined-ca-bundle: secret "combined-ca-bundle" not found Jan 20 17:00:46 crc kubenswrapper[4558]: E0120 17:00:46.244116 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/54c4bfe3-42bf-46ea-8c23-0621f7b65686-combined-ca-bundle podName:54c4bfe3-42bf-46ea-8c23-0621f7b65686 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:50.244111028 +0000 UTC m=+1144.004448996 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/54c4bfe3-42bf-46ea-8c23-0621f7b65686-combined-ca-bundle") pod "ovn-northd-0" (UID: "54c4bfe3-42bf-46ea-8c23-0621f7b65686") : secret "combined-ca-bundle" not found Jan 20 17:00:46 crc kubenswrapper[4558]: E0120 17:00:46.244147 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/combined-ca-bundle: secret "combined-ca-bundle" not found Jan 20 17:00:46 crc kubenswrapper[4558]: E0120 17:00:46.244183 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7776f921-0ae7-4a8c-b444-cf5b4b9a4f65-combined-ca-bundle podName:7776f921-0ae7-4a8c-b444-cf5b4b9a4f65 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:48.244157956 +0000 UTC m=+1142.004495924 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/7776f921-0ae7-4a8c-b444-cf5b4b9a4f65-combined-ca-bundle") pod "barbican-keystone-listener-7fc5477d66-pz8q2" (UID: "7776f921-0ae7-4a8c-b444-cf5b4b9a4f65") : secret "combined-ca-bundle" not found Jan 20 17:00:46 crc kubenswrapper[4558]: E0120 17:00:46.244226 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/barbican-api-config-data: secret "barbican-api-config-data" not found Jan 20 17:00:46 crc kubenswrapper[4558]: E0120 17:00:46.244244 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3735ca3d-3764-4d36-b912-fbf0bfb96dd8-config-data-custom podName:3735ca3d-3764-4d36-b912-fbf0bfb96dd8 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:48.244239641 +0000 UTC m=+1142.004577608 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "config-data-custom" (UniqueName: "kubernetes.io/secret/3735ca3d-3764-4d36-b912-fbf0bfb96dd8-config-data-custom") pod "barbican-api-5df68bf4dd-zbx8h" (UID: "3735ca3d-3764-4d36-b912-fbf0bfb96dd8") : secret "barbican-api-config-data" not found Jan 20 17:00:46 crc kubenswrapper[4558]: E0120 17:00:46.244270 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/ovnnorthd-scripts: object "openstack-kuttl-tests"/"ovnnorthd-scripts" not registered Jan 20 17:00:46 crc kubenswrapper[4558]: E0120 17:00:46.244288 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/54c4bfe3-42bf-46ea-8c23-0621f7b65686-scripts podName:54c4bfe3-42bf-46ea-8c23-0621f7b65686 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:50.244282281 +0000 UTC m=+1144.004620247 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "scripts" (UniqueName: "kubernetes.io/configmap/54c4bfe3-42bf-46ea-8c23-0621f7b65686-scripts") pod "ovn-northd-0" (UID: "54c4bfe3-42bf-46ea-8c23-0621f7b65686") : object "openstack-kuttl-tests"/"ovnnorthd-scripts" not registered Jan 20 17:00:46 crc kubenswrapper[4558]: E0120 17:00:46.244315 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/barbican-config-data: secret "barbican-config-data" not found Jan 20 17:00:46 crc kubenswrapper[4558]: E0120 17:00:46.244329 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3735ca3d-3764-4d36-b912-fbf0bfb96dd8-config-data podName:3735ca3d-3764-4d36-b912-fbf0bfb96dd8 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:48.24432467 +0000 UTC m=+1142.004662637 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/secret/3735ca3d-3764-4d36-b912-fbf0bfb96dd8-config-data") pod "barbican-api-5df68bf4dd-zbx8h" (UID: "3735ca3d-3764-4d36-b912-fbf0bfb96dd8") : secret "barbican-config-data" not found Jan 20 17:00:46 crc kubenswrapper[4558]: E0120 17:00:46.244350 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/openstack-scripts: configmap "openstack-scripts" not found Jan 20 17:00:46 crc kubenswrapper[4558]: E0120 17:00:46.244365 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/bf9a27d0-1f64-4f43-a2a7-ce0902c6cd25-operator-scripts podName:bf9a27d0-1f64-4f43-a2a7-ce0902c6cd25 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:47.244360788 +0000 UTC m=+1141.004698755 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/bf9a27d0-1f64-4f43-a2a7-ce0902c6cd25-operator-scripts") pod "keystone-7e4f-account-create-update-pl5sm" (UID: "bf9a27d0-1f64-4f43-a2a7-ce0902c6cd25") : configmap "openstack-scripts" not found Jan 20 17:00:46 crc kubenswrapper[4558]: E0120 17:00:46.244390 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-barbican-internal-svc: secret "cert-barbican-internal-svc" not found Jan 20 17:00:46 crc kubenswrapper[4558]: E0120 17:00:46.244404 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3735ca3d-3764-4d36-b912-fbf0bfb96dd8-internal-tls-certs podName:3735ca3d-3764-4d36-b912-fbf0bfb96dd8 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:48.244400504 +0000 UTC m=+1142.004738470 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "internal-tls-certs" (UniqueName: "kubernetes.io/secret/3735ca3d-3764-4d36-b912-fbf0bfb96dd8-internal-tls-certs") pod "barbican-api-5df68bf4dd-zbx8h" (UID: "3735ca3d-3764-4d36-b912-fbf0bfb96dd8") : secret "cert-barbican-internal-svc" not found Jan 20 17:00:46 crc kubenswrapper[4558]: E0120 17:00:46.244424 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/ovnnorthd-config: object "openstack-kuttl-tests"/"ovnnorthd-config" not registered Jan 20 17:00:46 crc kubenswrapper[4558]: E0120 17:00:46.244437 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/54c4bfe3-42bf-46ea-8c23-0621f7b65686-config podName:54c4bfe3-42bf-46ea-8c23-0621f7b65686 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:50.244433555 +0000 UTC m=+1144.004771522 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "config" (UniqueName: "kubernetes.io/configmap/54c4bfe3-42bf-46ea-8c23-0621f7b65686-config") pod "ovn-northd-0" (UID: "54c4bfe3-42bf-46ea-8c23-0621f7b65686") : object "openstack-kuttl-tests"/"ovnnorthd-config" not registered Jan 20 17:00:46 crc kubenswrapper[4558]: E0120 17:00:46.244460 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/combined-ca-bundle: secret "combined-ca-bundle" not found Jan 20 17:00:46 crc kubenswrapper[4558]: E0120 17:00:46.244475 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3735ca3d-3764-4d36-b912-fbf0bfb96dd8-combined-ca-bundle podName:3735ca3d-3764-4d36-b912-fbf0bfb96dd8 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:48.244470194 +0000 UTC m=+1142.004808151 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/3735ca3d-3764-4d36-b912-fbf0bfb96dd8-combined-ca-bundle") pod "barbican-api-5df68bf4dd-zbx8h" (UID: "3735ca3d-3764-4d36-b912-fbf0bfb96dd8") : secret "combined-ca-bundle" not found Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.252823 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8074f6f3-f564-42bc-b08b-28ffe75bbbc5-kube-api-access-4vb45" (OuterVolumeSpecName: "kube-api-access-4vb45") pod "8074f6f3-f564-42bc-b08b-28ffe75bbbc5" (UID: "8074f6f3-f564-42bc-b08b-28ffe75bbbc5"). InnerVolumeSpecName "kube-api-access-4vb45". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.253362 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.274394 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.280873 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage13-crc" (OuterVolumeSpecName: "mysql-db") pod "d3643a65-70b3-488e-ad03-8b7ad6ae4539" (UID: "d3643a65-70b3-488e-ad03-8b7ad6ae4539"). InnerVolumeSpecName "local-storage13-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.289388 4558 scope.go:117] "RemoveContainer" containerID="5fcc9425b33989b0ac07f50e1aa0774a071177ee9453a094ad6d619c7974603b" Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.300054 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovn-northd-0"] Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.314295 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ovn-northd-0"] Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.340305 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-0"] Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.340904 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9ea3b871-4db3-4108-baea-e57a23d9d6c5-config-data\") pod \"9ea3b871-4db3-4108-baea-e57a23d9d6c5\" (UID: \"9ea3b871-4db3-4108-baea-e57a23d9d6c5\") " Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.341012 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c268a2f5-7c67-4935-8f3e-bdd83aeccc95-logs\") pod \"c268a2f5-7c67-4935-8f3e-bdd83aeccc95\" (UID: \"c268a2f5-7c67-4935-8f3e-bdd83aeccc95\") " Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.341055 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bgb25\" (UniqueName: \"kubernetes.io/projected/c268a2f5-7c67-4935-8f3e-bdd83aeccc95-kube-api-access-bgb25\") pod \"c268a2f5-7c67-4935-8f3e-bdd83aeccc95\" (UID: \"c268a2f5-7c67-4935-8f3e-bdd83aeccc95\") " Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.341075 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c268a2f5-7c67-4935-8f3e-bdd83aeccc95-internal-tls-certs\") pod \"c268a2f5-7c67-4935-8f3e-bdd83aeccc95\" (UID: \"c268a2f5-7c67-4935-8f3e-bdd83aeccc95\") " Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.341098 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/10c92abf-cd48-4659-8595-ce9610c0fe2e-combined-ca-bundle\") pod \"10c92abf-cd48-4659-8595-ce9610c0fe2e\" (UID: \"10c92abf-cd48-4659-8595-ce9610c0fe2e\") " Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.341186 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c268a2f5-7c67-4935-8f3e-bdd83aeccc95-scripts\") pod \"c268a2f5-7c67-4935-8f3e-bdd83aeccc95\" (UID: \"c268a2f5-7c67-4935-8f3e-bdd83aeccc95\") " Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.341213 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6r6jm\" (UniqueName: \"kubernetes.io/projected/9ea3b871-4db3-4108-baea-e57a23d9d6c5-kube-api-access-6r6jm\") pod \"9ea3b871-4db3-4108-baea-e57a23d9d6c5\" (UID: \"9ea3b871-4db3-4108-baea-e57a23d9d6c5\") " Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.341230 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c268a2f5-7c67-4935-8f3e-bdd83aeccc95-config-data\") pod \"c268a2f5-7c67-4935-8f3e-bdd83aeccc95\" (UID: \"c268a2f5-7c67-4935-8f3e-bdd83aeccc95\") " Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.341281 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ddb5d\" (UniqueName: \"kubernetes.io/projected/10c92abf-cd48-4659-8595-ce9610c0fe2e-kube-api-access-ddb5d\") pod \"10c92abf-cd48-4659-8595-ce9610c0fe2e\" (UID: \"10c92abf-cd48-4659-8595-ce9610c0fe2e\") " Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.341337 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c268a2f5-7c67-4935-8f3e-bdd83aeccc95-public-tls-certs\") pod \"c268a2f5-7c67-4935-8f3e-bdd83aeccc95\" (UID: \"c268a2f5-7c67-4935-8f3e-bdd83aeccc95\") " Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.341361 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/10c92abf-cd48-4659-8595-ce9610c0fe2e-config-data\") pod \"10c92abf-cd48-4659-8595-ce9610c0fe2e\" (UID: \"10c92abf-cd48-4659-8595-ce9610c0fe2e\") " Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.341397 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c268a2f5-7c67-4935-8f3e-bdd83aeccc95-combined-ca-bundle\") pod \"c268a2f5-7c67-4935-8f3e-bdd83aeccc95\" (UID: \"c268a2f5-7c67-4935-8f3e-bdd83aeccc95\") " Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.341449 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9ea3b871-4db3-4108-baea-e57a23d9d6c5-combined-ca-bundle\") pod \"9ea3b871-4db3-4108-baea-e57a23d9d6c5\" (UID: \"9ea3b871-4db3-4108-baea-e57a23d9d6c5\") " Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.342042 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zscx2\" (UniqueName: \"kubernetes.io/projected/54c4bfe3-42bf-46ea-8c23-0621f7b65686-kube-api-access-zscx2\") pod \"ovn-northd-0\" (UID: \"54c4bfe3-42bf-46ea-8c23-0621f7b65686\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.342144 4558 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8074f6f3-f564-42bc-b08b-28ffe75bbbc5-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.342187 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage13-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage13-crc\") on node \"crc\" " Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.342198 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4vb45\" (UniqueName: \"kubernetes.io/projected/8074f6f3-f564-42bc-b08b-28ffe75bbbc5-kube-api-access-4vb45\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.342215 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8074f6f3-f564-42bc-b08b-28ffe75bbbc5-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.344936 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-0"] Jan 20 17:00:46 crc kubenswrapper[4558]: E0120 17:00:46.345130 4558 projected.go:194] Error preparing data for projected volume kube-api-access-zscx2 for pod openstack-kuttl-tests/ovn-northd-0: failed to fetch token: serviceaccounts "ovnnorthd-ovnnorthd" is forbidden: User "system:node:crc" cannot create resource "serviceaccounts/token" in API group "" in the namespace "openstack-kuttl-tests": no relationship found between node 'crc' and this object Jan 20 17:00:46 crc kubenswrapper[4558]: E0120 17:00:46.345190 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/54c4bfe3-42bf-46ea-8c23-0621f7b65686-kube-api-access-zscx2 podName:54c4bfe3-42bf-46ea-8c23-0621f7b65686 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:50.345175682 +0000 UTC m=+1144.105513649 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-zscx2" (UniqueName: "kubernetes.io/projected/54c4bfe3-42bf-46ea-8c23-0621f7b65686-kube-api-access-zscx2") pod "ovn-northd-0" (UID: "54c4bfe3-42bf-46ea-8c23-0621f7b65686") : failed to fetch token: serviceaccounts "ovnnorthd-ovnnorthd" is forbidden: User "system:node:crc" cannot create resource "serviceaccounts/token" in API group "" in the namespace "openstack-kuttl-tests": no relationship found between node 'crc' and this object Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.349198 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c268a2f5-7c67-4935-8f3e-bdd83aeccc95-logs" (OuterVolumeSpecName: "logs") pod "c268a2f5-7c67-4935-8f3e-bdd83aeccc95" (UID: "c268a2f5-7c67-4935-8f3e-bdd83aeccc95"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.362996 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c268a2f5-7c67-4935-8f3e-bdd83aeccc95-kube-api-access-bgb25" (OuterVolumeSpecName: "kube-api-access-bgb25") pod "c268a2f5-7c67-4935-8f3e-bdd83aeccc95" (UID: "c268a2f5-7c67-4935-8f3e-bdd83aeccc95"). InnerVolumeSpecName "kube-api-access-bgb25". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.363090 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/10c92abf-cd48-4659-8595-ce9610c0fe2e-kube-api-access-ddb5d" (OuterVolumeSpecName: "kube-api-access-ddb5d") pod "10c92abf-cd48-4659-8595-ce9610c0fe2e" (UID: "10c92abf-cd48-4659-8595-ce9610c0fe2e"). InnerVolumeSpecName "kube-api-access-ddb5d". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.363560 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9ea3b871-4db3-4108-baea-e57a23d9d6c5-kube-api-access-6r6jm" (OuterVolumeSpecName: "kube-api-access-6r6jm") pod "9ea3b871-4db3-4108-baea-e57a23d9d6c5" (UID: "9ea3b871-4db3-4108-baea-e57a23d9d6c5"). InnerVolumeSpecName "kube-api-access-6r6jm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.363623 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c268a2f5-7c67-4935-8f3e-bdd83aeccc95-scripts" (OuterVolumeSpecName: "scripts") pod "c268a2f5-7c67-4935-8f3e-bdd83aeccc95" (UID: "c268a2f5-7c67-4935-8f3e-bdd83aeccc95"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.376242 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8074f6f3-f564-42bc-b08b-28ffe75bbbc5-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "8074f6f3-f564-42bc-b08b-28ffe75bbbc5" (UID: "8074f6f3-f564-42bc-b08b-28ffe75bbbc5"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.401485 4558 scope.go:117] "RemoveContainer" containerID="44cfb2c4bfc8454ac6259ca35fbc70464e44213a38cdb484b02b66bc424c90d5" Jan 20 17:00:46 crc kubenswrapper[4558]: E0120 17:00:46.402459 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"44cfb2c4bfc8454ac6259ca35fbc70464e44213a38cdb484b02b66bc424c90d5\": container with ID starting with 44cfb2c4bfc8454ac6259ca35fbc70464e44213a38cdb484b02b66bc424c90d5 not found: ID does not exist" containerID="44cfb2c4bfc8454ac6259ca35fbc70464e44213a38cdb484b02b66bc424c90d5" Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.402492 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"44cfb2c4bfc8454ac6259ca35fbc70464e44213a38cdb484b02b66bc424c90d5"} err="failed to get container status \"44cfb2c4bfc8454ac6259ca35fbc70464e44213a38cdb484b02b66bc424c90d5\": rpc error: code = NotFound desc = could not find container \"44cfb2c4bfc8454ac6259ca35fbc70464e44213a38cdb484b02b66bc424c90d5\": container with ID starting with 44cfb2c4bfc8454ac6259ca35fbc70464e44213a38cdb484b02b66bc424c90d5 not found: ID does not exist" Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.402514 4558 scope.go:117] "RemoveContainer" containerID="5fcc9425b33989b0ac07f50e1aa0774a071177ee9453a094ad6d619c7974603b" Jan 20 17:00:46 crc kubenswrapper[4558]: E0120 17:00:46.402976 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5fcc9425b33989b0ac07f50e1aa0774a071177ee9453a094ad6d619c7974603b\": container with ID starting with 5fcc9425b33989b0ac07f50e1aa0774a071177ee9453a094ad6d619c7974603b not found: ID does not exist" containerID="5fcc9425b33989b0ac07f50e1aa0774a071177ee9453a094ad6d619c7974603b" Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.402998 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5fcc9425b33989b0ac07f50e1aa0774a071177ee9453a094ad6d619c7974603b"} err="failed to get container status \"5fcc9425b33989b0ac07f50e1aa0774a071177ee9453a094ad6d619c7974603b\": rpc error: code = NotFound desc = could not find container \"5fcc9425b33989b0ac07f50e1aa0774a071177ee9453a094ad6d619c7974603b\": container with ID starting with 5fcc9425b33989b0ac07f50e1aa0774a071177ee9453a094ad6d619c7974603b not found: ID does not exist" Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.403011 4558 scope.go:117] "RemoveContainer" containerID="2c012c9aed3c1cbbfd0f3adb398aa6e942741aa3f3d75ea5d680bf8543a23be2" Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.413842 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/10c92abf-cd48-4659-8595-ce9610c0fe2e-config-data" (OuterVolumeSpecName: "config-data") pod "10c92abf-cd48-4659-8595-ce9610c0fe2e" (UID: "10c92abf-cd48-4659-8595-ce9610c0fe2e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.420023 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9ea3b871-4db3-4108-baea-e57a23d9d6c5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9ea3b871-4db3-4108-baea-e57a23d9d6c5" (UID: "9ea3b871-4db3-4108-baea-e57a23d9d6c5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.430408 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage13-crc" (UniqueName: "kubernetes.io/local-volume/local-storage13-crc") on node "crc" Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.435864 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9ea3b871-4db3-4108-baea-e57a23d9d6c5-config-data" (OuterVolumeSpecName: "config-data") pod "9ea3b871-4db3-4108-baea-e57a23d9d6c5" (UID: "9ea3b871-4db3-4108-baea-e57a23d9d6c5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.443352 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8074f6f3-f564-42bc-b08b-28ffe75bbbc5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8074f6f3-f564-42bc-b08b-28ffe75bbbc5" (UID: "8074f6f3-f564-42bc-b08b-28ffe75bbbc5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.443713 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/54c4bfe3-42bf-46ea-8c23-0621f7b65686-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.443728 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9ea3b871-4db3-4108-baea-e57a23d9d6c5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.443738 4558 reconciler_common.go:293] "Volume detached for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/54c4bfe3-42bf-46ea-8c23-0621f7b65686-ovn-northd-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.443746 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9ea3b871-4db3-4108-baea-e57a23d9d6c5-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.443754 4558 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8074f6f3-f564-42bc-b08b-28ffe75bbbc5-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.443763 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c268a2f5-7c67-4935-8f3e-bdd83aeccc95-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.443771 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bgb25\" (UniqueName: \"kubernetes.io/projected/c268a2f5-7c67-4935-8f3e-bdd83aeccc95-kube-api-access-bgb25\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.443778 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage13-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage13-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.443787 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zscx2\" (UniqueName: \"kubernetes.io/projected/54c4bfe3-42bf-46ea-8c23-0621f7b65686-kube-api-access-zscx2\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.443794 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c268a2f5-7c67-4935-8f3e-bdd83aeccc95-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.443802 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6r6jm\" (UniqueName: \"kubernetes.io/projected/9ea3b871-4db3-4108-baea-e57a23d9d6c5-kube-api-access-6r6jm\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.443810 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/54c4bfe3-42bf-46ea-8c23-0621f7b65686-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.443817 4558 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/54c4bfe3-42bf-46ea-8c23-0621f7b65686-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.443825 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ddb5d\" (UniqueName: \"kubernetes.io/projected/10c92abf-cd48-4659-8595-ce9610c0fe2e-kube-api-access-ddb5d\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.443833 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54c4bfe3-42bf-46ea-8c23-0621f7b65686-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.443842 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/10c92abf-cd48-4659-8595-ce9610c0fe2e-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.445900 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c268a2f5-7c67-4935-8f3e-bdd83aeccc95-config-data" (OuterVolumeSpecName: "config-data") pod "c268a2f5-7c67-4935-8f3e-bdd83aeccc95" (UID: "c268a2f5-7c67-4935-8f3e-bdd83aeccc95"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.447340 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/10c92abf-cd48-4659-8595-ce9610c0fe2e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "10c92abf-cd48-4659-8595-ce9610c0fe2e" (UID: "10c92abf-cd48-4659-8595-ce9610c0fe2e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.450737 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8074f6f3-f564-42bc-b08b-28ffe75bbbc5-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "8074f6f3-f564-42bc-b08b-28ffe75bbbc5" (UID: "8074f6f3-f564-42bc-b08b-28ffe75bbbc5"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.458973 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c268a2f5-7c67-4935-8f3e-bdd83aeccc95-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c268a2f5-7c67-4935-8f3e-bdd83aeccc95" (UID: "c268a2f5-7c67-4935-8f3e-bdd83aeccc95"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.477301 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c268a2f5-7c67-4935-8f3e-bdd83aeccc95-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "c268a2f5-7c67-4935-8f3e-bdd83aeccc95" (UID: "c268a2f5-7c67-4935-8f3e-bdd83aeccc95"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.486366 4558 scope.go:117] "RemoveContainer" containerID="2c68bcde1b4540d7e10ed3de55e085fa25f7fe350c76cf38edeedca6d5734cb7" Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.490536 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8074f6f3-f564-42bc-b08b-28ffe75bbbc5-config-data" (OuterVolumeSpecName: "config-data") pod "8074f6f3-f564-42bc-b08b-28ffe75bbbc5" (UID: "8074f6f3-f564-42bc-b08b-28ffe75bbbc5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.503534 4558 scope.go:117] "RemoveContainer" containerID="2c012c9aed3c1cbbfd0f3adb398aa6e942741aa3f3d75ea5d680bf8543a23be2" Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.503777 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c268a2f5-7c67-4935-8f3e-bdd83aeccc95-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "c268a2f5-7c67-4935-8f3e-bdd83aeccc95" (UID: "c268a2f5-7c67-4935-8f3e-bdd83aeccc95"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:00:46 crc kubenswrapper[4558]: E0120 17:00:46.503838 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2c012c9aed3c1cbbfd0f3adb398aa6e942741aa3f3d75ea5d680bf8543a23be2\": container with ID starting with 2c012c9aed3c1cbbfd0f3adb398aa6e942741aa3f3d75ea5d680bf8543a23be2 not found: ID does not exist" containerID="2c012c9aed3c1cbbfd0f3adb398aa6e942741aa3f3d75ea5d680bf8543a23be2" Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.503863 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2c012c9aed3c1cbbfd0f3adb398aa6e942741aa3f3d75ea5d680bf8543a23be2"} err="failed to get container status \"2c012c9aed3c1cbbfd0f3adb398aa6e942741aa3f3d75ea5d680bf8543a23be2\": rpc error: code = NotFound desc = could not find container \"2c012c9aed3c1cbbfd0f3adb398aa6e942741aa3f3d75ea5d680bf8543a23be2\": container with ID starting with 2c012c9aed3c1cbbfd0f3adb398aa6e942741aa3f3d75ea5d680bf8543a23be2 not found: ID does not exist" Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.503886 4558 scope.go:117] "RemoveContainer" containerID="2c68bcde1b4540d7e10ed3de55e085fa25f7fe350c76cf38edeedca6d5734cb7" Jan 20 17:00:46 crc kubenswrapper[4558]: E0120 17:00:46.504484 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2c68bcde1b4540d7e10ed3de55e085fa25f7fe350c76cf38edeedca6d5734cb7\": container with ID starting with 2c68bcde1b4540d7e10ed3de55e085fa25f7fe350c76cf38edeedca6d5734cb7 not found: ID does not exist" containerID="2c68bcde1b4540d7e10ed3de55e085fa25f7fe350c76cf38edeedca6d5734cb7" Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.504513 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2c68bcde1b4540d7e10ed3de55e085fa25f7fe350c76cf38edeedca6d5734cb7"} err="failed to get container status \"2c68bcde1b4540d7e10ed3de55e085fa25f7fe350c76cf38edeedca6d5734cb7\": rpc error: code = NotFound desc = could not find container \"2c68bcde1b4540d7e10ed3de55e085fa25f7fe350c76cf38edeedca6d5734cb7\": container with ID starting with 2c68bcde1b4540d7e10ed3de55e085fa25f7fe350c76cf38edeedca6d5734cb7 not found: ID does not exist" Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.504530 4558 scope.go:117] "RemoveContainer" containerID="14704db98182ec5a7c903f3bee7ae82b53751a4fd86ecd1c586dd1360d6439c9" Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.531045 4558 scope.go:117] "RemoveContainer" containerID="a79b4e6f8ce00fed51a727ad2bc430351f4d330704b2b9cb8bb5ce9d963a3a12" Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.545197 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c268a2f5-7c67-4935-8f3e-bdd83aeccc95-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.545229 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/10c92abf-cd48-4659-8595-ce9610c0fe2e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.545239 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8074f6f3-f564-42bc-b08b-28ffe75bbbc5-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.545248 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c268a2f5-7c67-4935-8f3e-bdd83aeccc95-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.545257 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c268a2f5-7c67-4935-8f3e-bdd83aeccc95-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.545266 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c268a2f5-7c67-4935-8f3e-bdd83aeccc95-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.545273 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8074f6f3-f564-42bc-b08b-28ffe75bbbc5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.545281 4558 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/8074f6f3-f564-42bc-b08b-28ffe75bbbc5-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.547759 4558 scope.go:117] "RemoveContainer" containerID="ed2110d076583425a8af34103095d99e22acc0c9b835eb2f9a4e87e9dc475ca6" Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.582447 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-keystone-listener-6b66757dfd-k89tm" Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.587866 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0e28ce69-8b5d-4e5e-b8bb-860e6e3745ae" path="/var/lib/kubelet/pods/0e28ce69-8b5d-4e5e-b8bb-860e6e3745ae/volumes" Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.588528 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="180488ea-6eeb-4078-9b57-351bdfb54f5d" path="/var/lib/kubelet/pods/180488ea-6eeb-4078-9b57-351bdfb54f5d/volumes" Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.589102 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d5c2e68-fb9d-4818-a0c2-27db2516e92b" path="/var/lib/kubelet/pods/1d5c2e68-fb9d-4818-a0c2-27db2516e92b/volumes" Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.590174 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="324ace6f-8bac-4269-a674-d9b6e990cd18" path="/var/lib/kubelet/pods/324ace6f-8bac-4269-a674-d9b6e990cd18/volumes" Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.590599 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3b2ffa52-4d19-46f6-aea5-62fc758def73" path="/var/lib/kubelet/pods/3b2ffa52-4d19-46f6-aea5-62fc758def73/volumes" Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.591032 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3da300ce-eec2-414b-afdd-01d3f3bd71c5" path="/var/lib/kubelet/pods/3da300ce-eec2-414b-afdd-01d3f3bd71c5/volumes" Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.597524 4558 scope.go:117] "RemoveContainer" containerID="a79b4e6f8ce00fed51a727ad2bc430351f4d330704b2b9cb8bb5ce9d963a3a12" Jan 20 17:00:46 crc kubenswrapper[4558]: E0120 17:00:46.597818 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a79b4e6f8ce00fed51a727ad2bc430351f4d330704b2b9cb8bb5ce9d963a3a12\": container with ID starting with a79b4e6f8ce00fed51a727ad2bc430351f4d330704b2b9cb8bb5ce9d963a3a12 not found: ID does not exist" containerID="a79b4e6f8ce00fed51a727ad2bc430351f4d330704b2b9cb8bb5ce9d963a3a12" Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.597844 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a79b4e6f8ce00fed51a727ad2bc430351f4d330704b2b9cb8bb5ce9d963a3a12"} err="failed to get container status \"a79b4e6f8ce00fed51a727ad2bc430351f4d330704b2b9cb8bb5ce9d963a3a12\": rpc error: code = NotFound desc = could not find container \"a79b4e6f8ce00fed51a727ad2bc430351f4d330704b2b9cb8bb5ce9d963a3a12\": container with ID starting with a79b4e6f8ce00fed51a727ad2bc430351f4d330704b2b9cb8bb5ce9d963a3a12 not found: ID does not exist" Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.597865 4558 scope.go:117] "RemoveContainer" containerID="ed2110d076583425a8af34103095d99e22acc0c9b835eb2f9a4e87e9dc475ca6" Jan 20 17:00:46 crc kubenswrapper[4558]: E0120 17:00:46.599153 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ed2110d076583425a8af34103095d99e22acc0c9b835eb2f9a4e87e9dc475ca6\": container with ID starting with ed2110d076583425a8af34103095d99e22acc0c9b835eb2f9a4e87e9dc475ca6 not found: ID does not exist" containerID="ed2110d076583425a8af34103095d99e22acc0c9b835eb2f9a4e87e9dc475ca6" Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.599197 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ed2110d076583425a8af34103095d99e22acc0c9b835eb2f9a4e87e9dc475ca6"} err="failed to get container status \"ed2110d076583425a8af34103095d99e22acc0c9b835eb2f9a4e87e9dc475ca6\": rpc error: code = NotFound desc = could not find container \"ed2110d076583425a8af34103095d99e22acc0c9b835eb2f9a4e87e9dc475ca6\": container with ID starting with ed2110d076583425a8af34103095d99e22acc0c9b835eb2f9a4e87e9dc475ca6 not found: ID does not exist" Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.599225 4558 scope.go:117] "RemoveContainer" containerID="62565ba736515d1bd9fcfd983ba37f6e8370144dc694ce16eecef9e3a396d592" Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.610924 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="54c4bfe3-42bf-46ea-8c23-0621f7b65686" path="/var/lib/kubelet/pods/54c4bfe3-42bf-46ea-8c23-0621f7b65686/volumes" Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.611320 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="567ff26d-1c6e-4117-869d-3bfcb1a705fc" path="/var/lib/kubelet/pods/567ff26d-1c6e-4117-869d-3bfcb1a705fc/volumes" Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.611778 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5a82a446-869f-466c-a345-bd211d1851c2" path="/var/lib/kubelet/pods/5a82a446-869f-466c-a345-bd211d1851c2/volumes" Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.612346 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6374f20d-dc8a-4f1c-9df3-086e6904b394" path="/var/lib/kubelet/pods/6374f20d-dc8a-4f1c-9df3-086e6904b394/volumes" Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.614399 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="764c7a15-6a1a-470c-9d0b-a63ed418cc09" path="/var/lib/kubelet/pods/764c7a15-6a1a-470c-9d0b-a63ed418cc09/volumes" Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.619615 4558 scope.go:117] "RemoveContainer" containerID="60c80db7b21795bae641e9692b2deb97128b55cb001a272936bfaf668d4bfcfb" Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.620941 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7b3471c1-c42b-4be4-96fe-7e43ef86ba15" path="/var/lib/kubelet/pods/7b3471c1-c42b-4be4-96fe-7e43ef86ba15/volumes" Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.621688 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b28e6999-784e-4577-88bb-db648f7a3cbc" path="/var/lib/kubelet/pods/b28e6999-784e-4577-88bb-db648f7a3cbc/volumes" Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.622512 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b8911a4f-a706-4956-9028-138c018a92ba" path="/var/lib/kubelet/pods/b8911a4f-a706-4956-9028-138c018a92ba/volumes" Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.623122 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cae28711-fbe7-40cc-8f31-4b6332ab5378" path="/var/lib/kubelet/pods/cae28711-fbe7-40cc-8f31-4b6332ab5378/volumes" Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.624141 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cde9092f-c62d-406c-b800-87c38b56402b" path="/var/lib/kubelet/pods/cde9092f-c62d-406c-b800-87c38b56402b/volumes" Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.624724 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d2ccd35d-37c4-450d-b04c-ac505e35b0e8" path="/var/lib/kubelet/pods/d2ccd35d-37c4-450d-b04c-ac505e35b0e8/volumes" Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.643655 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e554db35-2dba-4138-9ca0-bd1371a9c63d" path="/var/lib/kubelet/pods/e554db35-2dba-4138-9ca0-bd1371a9c63d/volumes" Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.645447 4558 scope.go:117] "RemoveContainer" containerID="62565ba736515d1bd9fcfd983ba37f6e8370144dc694ce16eecef9e3a396d592" Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.645723 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/69c5c1c6-5c3f-48d4-8d14-c3c7cdcb1915-config-data\") pod \"69c5c1c6-5c3f-48d4-8d14-c3c7cdcb1915\" (UID: \"69c5c1c6-5c3f-48d4-8d14-c3c7cdcb1915\") " Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.645774 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/69c5c1c6-5c3f-48d4-8d14-c3c7cdcb1915-combined-ca-bundle\") pod \"69c5c1c6-5c3f-48d4-8d14-c3c7cdcb1915\" (UID: \"69c5c1c6-5c3f-48d4-8d14-c3c7cdcb1915\") " Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.645911 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/69c5c1c6-5c3f-48d4-8d14-c3c7cdcb1915-logs\") pod \"69c5c1c6-5c3f-48d4-8d14-c3c7cdcb1915\" (UID: \"69c5c1c6-5c3f-48d4-8d14-c3c7cdcb1915\") " Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.645961 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/69c5c1c6-5c3f-48d4-8d14-c3c7cdcb1915-config-data-custom\") pod \"69c5c1c6-5c3f-48d4-8d14-c3c7cdcb1915\" (UID: \"69c5c1c6-5c3f-48d4-8d14-c3c7cdcb1915\") " Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.645992 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kw87t\" (UniqueName: \"kubernetes.io/projected/69c5c1c6-5c3f-48d4-8d14-c3c7cdcb1915-kube-api-access-kw87t\") pod \"69c5c1c6-5c3f-48d4-8d14-c3c7cdcb1915\" (UID: \"69c5c1c6-5c3f-48d4-8d14-c3c7cdcb1915\") " Jan 20 17:00:46 crc kubenswrapper[4558]: E0120 17:00:46.646307 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"62565ba736515d1bd9fcfd983ba37f6e8370144dc694ce16eecef9e3a396d592\": container with ID starting with 62565ba736515d1bd9fcfd983ba37f6e8370144dc694ce16eecef9e3a396d592 not found: ID does not exist" containerID="62565ba736515d1bd9fcfd983ba37f6e8370144dc694ce16eecef9e3a396d592" Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.646401 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"62565ba736515d1bd9fcfd983ba37f6e8370144dc694ce16eecef9e3a396d592"} err="failed to get container status \"62565ba736515d1bd9fcfd983ba37f6e8370144dc694ce16eecef9e3a396d592\": rpc error: code = NotFound desc = could not find container \"62565ba736515d1bd9fcfd983ba37f6e8370144dc694ce16eecef9e3a396d592\": container with ID starting with 62565ba736515d1bd9fcfd983ba37f6e8370144dc694ce16eecef9e3a396d592 not found: ID does not exist" Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.646473 4558 scope.go:117] "RemoveContainer" containerID="60c80db7b21795bae641e9692b2deb97128b55cb001a272936bfaf668d4bfcfb" Jan 20 17:00:46 crc kubenswrapper[4558]: E0120 17:00:46.647032 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"60c80db7b21795bae641e9692b2deb97128b55cb001a272936bfaf668d4bfcfb\": container with ID starting with 60c80db7b21795bae641e9692b2deb97128b55cb001a272936bfaf668d4bfcfb not found: ID does not exist" containerID="60c80db7b21795bae641e9692b2deb97128b55cb001a272936bfaf668d4bfcfb" Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.647058 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"60c80db7b21795bae641e9692b2deb97128b55cb001a272936bfaf668d4bfcfb"} err="failed to get container status \"60c80db7b21795bae641e9692b2deb97128b55cb001a272936bfaf668d4bfcfb\": rpc error: code = NotFound desc = could not find container \"60c80db7b21795bae641e9692b2deb97128b55cb001a272936bfaf668d4bfcfb\": container with ID starting with 60c80db7b21795bae641e9692b2deb97128b55cb001a272936bfaf668d4bfcfb not found: ID does not exist" Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.647606 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/69c5c1c6-5c3f-48d4-8d14-c3c7cdcb1915-logs" (OuterVolumeSpecName: "logs") pod "69c5c1c6-5c3f-48d4-8d14-c3c7cdcb1915" (UID: "69c5c1c6-5c3f-48d4-8d14-c3c7cdcb1915"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.650174 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/69c5c1c6-5c3f-48d4-8d14-c3c7cdcb1915-kube-api-access-kw87t" (OuterVolumeSpecName: "kube-api-access-kw87t") pod "69c5c1c6-5c3f-48d4-8d14-c3c7cdcb1915" (UID: "69c5c1c6-5c3f-48d4-8d14-c3c7cdcb1915"). InnerVolumeSpecName "kube-api-access-kw87t". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.650739 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/69c5c1c6-5c3f-48d4-8d14-c3c7cdcb1915-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "69c5c1c6-5c3f-48d4-8d14-c3c7cdcb1915" (UID: "69c5c1c6-5c3f-48d4-8d14-c3c7cdcb1915"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.671922 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/69c5c1c6-5c3f-48d4-8d14-c3c7cdcb1915-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "69c5c1c6-5c3f-48d4-8d14-c3c7cdcb1915" (UID: "69c5c1c6-5c3f-48d4-8d14-c3c7cdcb1915"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.681122 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/rabbitmq-server-0" podUID="3f868eba-f4e1-4e32-b271-391cf271fe97" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.97:5671: connect: connection refused" Jan 20 17:00:46 crc kubenswrapper[4558]: I0120 17:00:46.719547 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/69c5c1c6-5c3f-48d4-8d14-c3c7cdcb1915-config-data" (OuterVolumeSpecName: "config-data") pod "69c5c1c6-5c3f-48d4-8d14-c3c7cdcb1915" (UID: "69c5c1c6-5c3f-48d4-8d14-c3c7cdcb1915"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:46.748226 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/d3643a65-70b3-488e-ad03-8b7ad6ae4539-kolla-config\") pod \"openstack-galera-0\" (UID: \"d3643a65-70b3-488e-ad03-8b7ad6ae4539\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:46.748315 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7lzzs\" (UniqueName: \"kubernetes.io/projected/d3643a65-70b3-488e-ad03-8b7ad6ae4539-kube-api-access-7lzzs\") pod \"openstack-galera-0\" (UID: \"d3643a65-70b3-488e-ad03-8b7ad6ae4539\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:46.748431 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d3643a65-70b3-488e-ad03-8b7ad6ae4539-operator-scripts\") pod \"openstack-galera-0\" (UID: \"d3643a65-70b3-488e-ad03-8b7ad6ae4539\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:46.748618 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d3643a65-70b3-488e-ad03-8b7ad6ae4539-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"d3643a65-70b3-488e-ad03-8b7ad6ae4539\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:46.748702 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/d3643a65-70b3-488e-ad03-8b7ad6ae4539-config-data-default\") pod \"openstack-galera-0\" (UID: \"d3643a65-70b3-488e-ad03-8b7ad6ae4539\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:46.748790 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/69c5c1c6-5c3f-48d4-8d14-c3c7cdcb1915-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:46.748800 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/69c5c1c6-5c3f-48d4-8d14-c3c7cdcb1915-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:46.748810 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/69c5c1c6-5c3f-48d4-8d14-c3c7cdcb1915-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:46.748818 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/69c5c1c6-5c3f-48d4-8d14-c3c7cdcb1915-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:46.748826 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kw87t\" (UniqueName: \"kubernetes.io/projected/69c5c1c6-5c3f-48d4-8d14-c3c7cdcb1915-kube-api-access-kw87t\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:47 crc kubenswrapper[4558]: E0120 17:00:46.748886 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/openstack-config-data: configmap "openstack-config-data" not found Jan 20 17:00:47 crc kubenswrapper[4558]: E0120 17:00:46.749073 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/openstack-config-data: configmap "openstack-config-data" not found Jan 20 17:00:47 crc kubenswrapper[4558]: E0120 17:00:46.749463 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/openstack-scripts: configmap "openstack-scripts" not found Jan 20 17:00:47 crc kubenswrapper[4558]: E0120 17:00:46.749553 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/combined-ca-bundle: secret "combined-ca-bundle" not found Jan 20 17:00:47 crc kubenswrapper[4558]: E0120 17:00:46.748928 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/d3643a65-70b3-488e-ad03-8b7ad6ae4539-config-data-default podName:d3643a65-70b3-488e-ad03-8b7ad6ae4539 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:48.748914098 +0000 UTC m=+1142.509252065 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "config-data-default" (UniqueName: "kubernetes.io/configmap/d3643a65-70b3-488e-ad03-8b7ad6ae4539-config-data-default") pod "openstack-galera-0" (UID: "d3643a65-70b3-488e-ad03-8b7ad6ae4539") : configmap "openstack-config-data" not found Jan 20 17:00:47 crc kubenswrapper[4558]: E0120 17:00:46.749655 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/d3643a65-70b3-488e-ad03-8b7ad6ae4539-kolla-config podName:d3643a65-70b3-488e-ad03-8b7ad6ae4539 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:48.749578116 +0000 UTC m=+1142.509916083 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kolla-config" (UniqueName: "kubernetes.io/configmap/d3643a65-70b3-488e-ad03-8b7ad6ae4539-kolla-config") pod "openstack-galera-0" (UID: "d3643a65-70b3-488e-ad03-8b7ad6ae4539") : configmap "openstack-config-data" not found Jan 20 17:00:47 crc kubenswrapper[4558]: E0120 17:00:46.749668 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/d3643a65-70b3-488e-ad03-8b7ad6ae4539-operator-scripts podName:d3643a65-70b3-488e-ad03-8b7ad6ae4539 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:48.749663397 +0000 UTC m=+1142.510001364 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/d3643a65-70b3-488e-ad03-8b7ad6ae4539-operator-scripts") pod "openstack-galera-0" (UID: "d3643a65-70b3-488e-ad03-8b7ad6ae4539") : configmap "openstack-scripts" not found Jan 20 17:00:47 crc kubenswrapper[4558]: E0120 17:00:46.749678 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d3643a65-70b3-488e-ad03-8b7ad6ae4539-combined-ca-bundle podName:d3643a65-70b3-488e-ad03-8b7ad6ae4539 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:48.749673135 +0000 UTC m=+1142.510011102 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/d3643a65-70b3-488e-ad03-8b7ad6ae4539-combined-ca-bundle") pod "openstack-galera-0" (UID: "d3643a65-70b3-488e-ad03-8b7ad6ae4539") : secret "combined-ca-bundle" not found Jan 20 17:00:47 crc kubenswrapper[4558]: E0120 17:00:46.751583 4558 projected.go:194] Error preparing data for projected volume kube-api-access-7lzzs for pod openstack-kuttl-tests/openstack-galera-0: failed to fetch token: serviceaccounts "galera-openstack" not found Jan 20 17:00:47 crc kubenswrapper[4558]: E0120 17:00:46.751612 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/d3643a65-70b3-488e-ad03-8b7ad6ae4539-kube-api-access-7lzzs podName:d3643a65-70b3-488e-ad03-8b7ad6ae4539 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:48.751604015 +0000 UTC m=+1142.511941982 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-7lzzs" (UniqueName: "kubernetes.io/projected/d3643a65-70b3-488e-ad03-8b7ad6ae4539-kube-api-access-7lzzs") pod "openstack-galera-0" (UID: "d3643a65-70b3-488e-ad03-8b7ad6ae4539") : failed to fetch token: serviceaccounts "galera-openstack" not found Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:46.773834 4558 generic.go:334] "Generic (PLEG): container finished" podID="c268a2f5-7c67-4935-8f3e-bdd83aeccc95" containerID="174b23feee097674daeefb60dbe7c17cfa25832ffd4e0ad08f3add5f527ebf7e" exitCode=0 Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:46.773855 4558 generic.go:334] "Generic (PLEG): container finished" podID="c268a2f5-7c67-4935-8f3e-bdd83aeccc95" containerID="7711caea9ce1243c202dbf4adf45769a91fe36f8d08e7c2d792cbee835637b2d" exitCode=143 Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:46.773884 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-d54787f8d-7fznp" event={"ID":"c268a2f5-7c67-4935-8f3e-bdd83aeccc95","Type":"ContainerDied","Data":"174b23feee097674daeefb60dbe7c17cfa25832ffd4e0ad08f3add5f527ebf7e"} Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:46.773903 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-d54787f8d-7fznp" event={"ID":"c268a2f5-7c67-4935-8f3e-bdd83aeccc95","Type":"ContainerDied","Data":"7711caea9ce1243c202dbf4adf45769a91fe36f8d08e7c2d792cbee835637b2d"} Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:46.773913 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-d54787f8d-7fznp" event={"ID":"c268a2f5-7c67-4935-8f3e-bdd83aeccc95","Type":"ContainerDied","Data":"ed64705417da772675613972d81e2d37068b2ee1e7b594e60bfac14293e54bd0"} Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:46.773927 4558 scope.go:117] "RemoveContainer" containerID="174b23feee097674daeefb60dbe7c17cfa25832ffd4e0ad08f3add5f527ebf7e" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:46.774015 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-d54787f8d-7fznp" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:46.779729 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"10c92abf-cd48-4659-8595-ce9610c0fe2e","Type":"ContainerDied","Data":"8e9bddfc70e9709e67f22104987e15e8c3a3f1b421349d32348a0a6dbd582a39"} Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:46.779790 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:46.782749 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-0" event={"ID":"9ea3b871-4db3-4108-baea-e57a23d9d6c5","Type":"ContainerDied","Data":"434d0d0f14cb12224054882a03ccf11144ee356b713b8f02b115a3bd459ff3c0"} Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:46.782820 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:46.790862 4558 generic.go:334] "Generic (PLEG): container finished" podID="40234b95-d302-420a-96d7-c56ffe609530" containerID="8645a2a68bc998fad70edddaf73505c46e74320243525ed7f7c2c0c00d201e22" exitCode=0 Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:46.790895 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-worker-7649847b9c-9wk9v" event={"ID":"40234b95-d302-420a-96d7-c56ffe609530","Type":"ContainerDied","Data":"8645a2a68bc998fad70edddaf73505c46e74320243525ed7f7c2c0c00d201e22"} Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:46.790910 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-worker-7649847b9c-9wk9v" event={"ID":"40234b95-d302-420a-96d7-c56ffe609530","Type":"ContainerDied","Data":"760e77d6a0cd8419ad9a68e3cf141ac4c57667551c43955981772f313ed258f5"} Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:46.790919 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="760e77d6a0cd8419ad9a68e3cf141ac4c57667551c43955981772f313ed258f5" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:46.793260 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"8074f6f3-f564-42bc-b08b-28ffe75bbbc5","Type":"ContainerDied","Data":"4c55940b3048dc7379bec7d4ab8d303574107c3616709230db18b4ab5204470b"} Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:46.793321 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:46.794849 4558 generic.go:334] "Generic (PLEG): container finished" podID="f881d3ea-5346-49f4-8618-7271150ba300" containerID="333bbcc9943770e21b9d9307f5f97b0c97ba4ca4036f8cc5c7aade2abf4d0a2a" exitCode=0 Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:46.794880 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-rqg8f" event={"ID":"f881d3ea-5346-49f4-8618-7271150ba300","Type":"ContainerDied","Data":"333bbcc9943770e21b9d9307f5f97b0c97ba4ca4036f8cc5c7aade2abf4d0a2a"} Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:46.794893 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-rqg8f" event={"ID":"f881d3ea-5346-49f4-8618-7271150ba300","Type":"ContainerStarted","Data":"c75020bce4190fc5fc326bf1e1bc3c73686d3abcdcb0b8a8a1f7e2444f209db5"} Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:46.799333 4558 generic.go:334] "Generic (PLEG): container finished" podID="69c5c1c6-5c3f-48d4-8d14-c3c7cdcb1915" containerID="5a4543f4f5068e723c149b86d815e8bf663acc3075125bad09cc80cc4d87503d" exitCode=0 Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:46.799400 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-keystone-listener-6b66757dfd-k89tm" event={"ID":"69c5c1c6-5c3f-48d4-8d14-c3c7cdcb1915","Type":"ContainerDied","Data":"5a4543f4f5068e723c149b86d815e8bf663acc3075125bad09cc80cc4d87503d"} Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:46.799459 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-keystone-listener-6b66757dfd-k89tm" event={"ID":"69c5c1c6-5c3f-48d4-8d14-c3c7cdcb1915","Type":"ContainerDied","Data":"61fa0fb541c45a5e261d0db1712d9f845beda9e8cf2cd1e2bc7309d58b007aac"} Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:46.799509 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-keystone-listener-6b66757dfd-k89tm" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:46.802012 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/keystone-55ff946595-f8gjf" podUID="4969c1af-53c0-435a-bd06-6bd493c81c80" containerName="keystone-api" containerID="cri-o://43610636e4831cc5451e75428b042d0c9d0a563ad6b875c786d70693be0d98f3" gracePeriod=30 Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:46.802126 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:46.802686 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-worker-c4f864f89-58gb9" podUID="de7f5467-1e83-42f0-86bb-ade85deec8f3" containerName="barbican-worker-log" containerID="cri-o://6a1a27bb8b609381b6880d7309447e5d15f349864bd55fc2d2324115d9939990" gracePeriod=30 Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:46.802800 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-keystone-listener-7fc5477d66-pz8q2" podUID="7776f921-0ae7-4a8c-b444-cf5b4b9a4f65" containerName="barbican-keystone-listener-log" containerID="cri-o://6b14e8b1ef1901e159022cfb9e13791cc8bca3602aeca47855720efbfe3f6831" gracePeriod=30 Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:46.803042 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-api-5df68bf4dd-zbx8h" podUID="3735ca3d-3764-4d36-b912-fbf0bfb96dd8" containerName="barbican-api-log" containerID="cri-o://b6c74e47f2a677093337ddf0004171daa0ef22786b50d9ee2e84816a4f1d1a44" gracePeriod=30 Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:46.803345 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-worker-c4f864f89-58gb9" podUID="de7f5467-1e83-42f0-86bb-ade85deec8f3" containerName="barbican-worker" containerID="cri-o://ee8ddf384630628e434ded9ac4a8dd2f149e7593afd334474324206c1828ec18" gracePeriod=30 Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:46.803425 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-keystone-listener-7fc5477d66-pz8q2" podUID="7776f921-0ae7-4a8c-b444-cf5b4b9a4f65" containerName="barbican-keystone-listener" containerID="cri-o://6c9fee82734b4e79e64bd03d6cf2d27e8723c07ddab900b9256e6a58cb6d0abe" gracePeriod=30 Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:46.803558 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-api-5df68bf4dd-zbx8h" podUID="3735ca3d-3764-4d36-b912-fbf0bfb96dd8" containerName="barbican-api" containerID="cri-o://cecf310a1a5c01a8e2cb6f8db17408c6746dfda1028e4915a95ed043910b2cbd" gracePeriod=30 Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:46.870047 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/d3643a65-70b3-488e-ad03-8b7ad6ae4539-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"d3643a65-70b3-488e-ad03-8b7ad6ae4539\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:00:47 crc kubenswrapper[4558]: E0120 17:00:46.870340 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-galera-openstack-svc: object "openstack-kuttl-tests"/"cert-galera-openstack-svc" not registered Jan 20 17:00:47 crc kubenswrapper[4558]: E0120 17:00:46.870376 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d3643a65-70b3-488e-ad03-8b7ad6ae4539-galera-tls-certs podName:d3643a65-70b3-488e-ad03-8b7ad6ae4539 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:48.870364907 +0000 UTC m=+1142.630702874 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "galera-tls-certs" (UniqueName: "kubernetes.io/secret/d3643a65-70b3-488e-ad03-8b7ad6ae4539-galera-tls-certs") pod "openstack-galera-0" (UID: "d3643a65-70b3-488e-ad03-8b7ad6ae4539") : object "openstack-kuttl-tests"/"cert-galera-openstack-svc" not registered Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:46.898896 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-worker-7649847b9c-9wk9v" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:46.899257 4558 scope.go:117] "RemoveContainer" containerID="7711caea9ce1243c202dbf4adf45769a91fe36f8d08e7c2d792cbee835637b2d" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:46.942320 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/placement-d54787f8d-7fznp"] Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:46.942467 4558 scope.go:117] "RemoveContainer" containerID="174b23feee097674daeefb60dbe7c17cfa25832ffd4e0ad08f3add5f527ebf7e" Jan 20 17:00:47 crc kubenswrapper[4558]: E0120 17:00:46.943175 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"174b23feee097674daeefb60dbe7c17cfa25832ffd4e0ad08f3add5f527ebf7e\": container with ID starting with 174b23feee097674daeefb60dbe7c17cfa25832ffd4e0ad08f3add5f527ebf7e not found: ID does not exist" containerID="174b23feee097674daeefb60dbe7c17cfa25832ffd4e0ad08f3add5f527ebf7e" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:46.943218 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"174b23feee097674daeefb60dbe7c17cfa25832ffd4e0ad08f3add5f527ebf7e"} err="failed to get container status \"174b23feee097674daeefb60dbe7c17cfa25832ffd4e0ad08f3add5f527ebf7e\": rpc error: code = NotFound desc = could not find container \"174b23feee097674daeefb60dbe7c17cfa25832ffd4e0ad08f3add5f527ebf7e\": container with ID starting with 174b23feee097674daeefb60dbe7c17cfa25832ffd4e0ad08f3add5f527ebf7e not found: ID does not exist" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:46.943243 4558 scope.go:117] "RemoveContainer" containerID="7711caea9ce1243c202dbf4adf45769a91fe36f8d08e7c2d792cbee835637b2d" Jan 20 17:00:47 crc kubenswrapper[4558]: E0120 17:00:46.943489 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7711caea9ce1243c202dbf4adf45769a91fe36f8d08e7c2d792cbee835637b2d\": container with ID starting with 7711caea9ce1243c202dbf4adf45769a91fe36f8d08e7c2d792cbee835637b2d not found: ID does not exist" containerID="7711caea9ce1243c202dbf4adf45769a91fe36f8d08e7c2d792cbee835637b2d" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:46.943508 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7711caea9ce1243c202dbf4adf45769a91fe36f8d08e7c2d792cbee835637b2d"} err="failed to get container status \"7711caea9ce1243c202dbf4adf45769a91fe36f8d08e7c2d792cbee835637b2d\": rpc error: code = NotFound desc = could not find container \"7711caea9ce1243c202dbf4adf45769a91fe36f8d08e7c2d792cbee835637b2d\": container with ID starting with 7711caea9ce1243c202dbf4adf45769a91fe36f8d08e7c2d792cbee835637b2d not found: ID does not exist" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:46.943522 4558 scope.go:117] "RemoveContainer" containerID="174b23feee097674daeefb60dbe7c17cfa25832ffd4e0ad08f3add5f527ebf7e" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:46.943685 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"174b23feee097674daeefb60dbe7c17cfa25832ffd4e0ad08f3add5f527ebf7e"} err="failed to get container status \"174b23feee097674daeefb60dbe7c17cfa25832ffd4e0ad08f3add5f527ebf7e\": rpc error: code = NotFound desc = could not find container \"174b23feee097674daeefb60dbe7c17cfa25832ffd4e0ad08f3add5f527ebf7e\": container with ID starting with 174b23feee097674daeefb60dbe7c17cfa25832ffd4e0ad08f3add5f527ebf7e not found: ID does not exist" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:46.943699 4558 scope.go:117] "RemoveContainer" containerID="7711caea9ce1243c202dbf4adf45769a91fe36f8d08e7c2d792cbee835637b2d" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:46.943858 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7711caea9ce1243c202dbf4adf45769a91fe36f8d08e7c2d792cbee835637b2d"} err="failed to get container status \"7711caea9ce1243c202dbf4adf45769a91fe36f8d08e7c2d792cbee835637b2d\": rpc error: code = NotFound desc = could not find container \"7711caea9ce1243c202dbf4adf45769a91fe36f8d08e7c2d792cbee835637b2d\": container with ID starting with 7711caea9ce1243c202dbf4adf45769a91fe36f8d08e7c2d792cbee835637b2d not found: ID does not exist" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:46.943871 4558 scope.go:117] "RemoveContainer" containerID="6c101cbe30eb5d950364b9539489f846cc6d1f7298614409d0709726ebba0b06" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:46.946322 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/placement-d54787f8d-7fznp"] Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:46.950707 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:46.954949 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:46.987154 4558 scope.go:117] "RemoveContainer" containerID="6074e6e8906baa2f1dffa9b96ff41ea08d5604ac43ef2c6593b686fd35af868a" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.006929 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/openstack-galera-0"] Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.012032 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/openstack-galera-0"] Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.016849 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-keystone-listener-6b66757dfd-k89tm"] Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.021278 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/barbican-keystone-listener-6b66757dfd-k89tm"] Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.021402 4558 scope.go:117] "RemoveContainer" containerID="20cf0ea0e052fa597f57928ad72ba1f7803b3e9d839d21a4246b51731cb87948" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.028581 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-0"] Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.030500 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-0"] Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.033696 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.037182 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.073575 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40234b95-d302-420a-96d7-c56ffe609530-combined-ca-bundle\") pod \"40234b95-d302-420a-96d7-c56ffe609530\" (UID: \"40234b95-d302-420a-96d7-c56ffe609530\") " Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.073951 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/40234b95-d302-420a-96d7-c56ffe609530-config-data\") pod \"40234b95-d302-420a-96d7-c56ffe609530\" (UID: \"40234b95-d302-420a-96d7-c56ffe609530\") " Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.073991 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/40234b95-d302-420a-96d7-c56ffe609530-logs\") pod \"40234b95-d302-420a-96d7-c56ffe609530\" (UID: \"40234b95-d302-420a-96d7-c56ffe609530\") " Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.074085 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/40234b95-d302-420a-96d7-c56ffe609530-config-data-custom\") pod \"40234b95-d302-420a-96d7-c56ffe609530\" (UID: \"40234b95-d302-420a-96d7-c56ffe609530\") " Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.074109 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wrts6\" (UniqueName: \"kubernetes.io/projected/40234b95-d302-420a-96d7-c56ffe609530-kube-api-access-wrts6\") pod \"40234b95-d302-420a-96d7-c56ffe609530\" (UID: \"40234b95-d302-420a-96d7-c56ffe609530\") " Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.074639 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/40234b95-d302-420a-96d7-c56ffe609530-logs" (OuterVolumeSpecName: "logs") pod "40234b95-d302-420a-96d7-c56ffe609530" (UID: "40234b95-d302-420a-96d7-c56ffe609530"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.074819 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/40234b95-d302-420a-96d7-c56ffe609530-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:47 crc kubenswrapper[4558]: E0120 17:00:47.074865 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Jan 20 17:00:47 crc kubenswrapper[4558]: E0120 17:00:47.074913 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/3f868eba-f4e1-4e32-b271-391cf271fe97-config-data podName:3f868eba-f4e1-4e32-b271-391cf271fe97 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:51.074901411 +0000 UTC m=+1144.835239378 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/3f868eba-f4e1-4e32-b271-391cf271fe97-config-data") pod "rabbitmq-server-0" (UID: "3f868eba-f4e1-4e32-b271-391cf271fe97") : configmap "rabbitmq-config-data" not found Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.077800 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/40234b95-d302-420a-96d7-c56ffe609530-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "40234b95-d302-420a-96d7-c56ffe609530" (UID: "40234b95-d302-420a-96d7-c56ffe609530"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.078915 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/40234b95-d302-420a-96d7-c56ffe609530-kube-api-access-wrts6" (OuterVolumeSpecName: "kube-api-access-wrts6") pod "40234b95-d302-420a-96d7-c56ffe609530" (UID: "40234b95-d302-420a-96d7-c56ffe609530"). InnerVolumeSpecName "kube-api-access-wrts6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.110390 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/40234b95-d302-420a-96d7-c56ffe609530-config-data" (OuterVolumeSpecName: "config-data") pod "40234b95-d302-420a-96d7-c56ffe609530" (UID: "40234b95-d302-420a-96d7-c56ffe609530"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.130522 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/40234b95-d302-420a-96d7-c56ffe609530-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "40234b95-d302-420a-96d7-c56ffe609530" (UID: "40234b95-d302-420a-96d7-c56ffe609530"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.176100 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/d3643a65-70b3-488e-ad03-8b7ad6ae4539-config-data-default\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.176120 4558 reconciler_common.go:293] "Volume detached for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/d3643a65-70b3-488e-ad03-8b7ad6ae4539-galera-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.176129 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d3643a65-70b3-488e-ad03-8b7ad6ae4539-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.176138 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40234b95-d302-420a-96d7-c56ffe609530-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.176145 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/40234b95-d302-420a-96d7-c56ffe609530-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.176153 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/40234b95-d302-420a-96d7-c56ffe609530-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.176175 4558 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/d3643a65-70b3-488e-ad03-8b7ad6ae4539-kolla-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.176184 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wrts6\" (UniqueName: \"kubernetes.io/projected/40234b95-d302-420a-96d7-c56ffe609530-kube-api-access-wrts6\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.176192 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7lzzs\" (UniqueName: \"kubernetes.io/projected/d3643a65-70b3-488e-ad03-8b7ad6ae4539-kube-api-access-7lzzs\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.176200 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d3643a65-70b3-488e-ad03-8b7ad6ae4539-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.186526 4558 scope.go:117] "RemoveContainer" containerID="37fbfba85457fa31861b89574c96f8a2be864e14e57e69c2dae33967885a1430" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.188191 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-fbtv5" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.205719 4558 scope.go:117] "RemoveContainer" containerID="255c4f9294e2677844d684383d43130961e7ee3e4db470ecb77910d99c7f5de5" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.221108 4558 scope.go:117] "RemoveContainer" containerID="cc735ea1e3ff81a6e1e946cb0561445b4ff17c02725e6a7c5e3b8fb04ea0d90d" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.234295 4558 scope.go:117] "RemoveContainer" containerID="5a4543f4f5068e723c149b86d815e8bf663acc3075125bad09cc80cc4d87503d" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.247222 4558 scope.go:117] "RemoveContainer" containerID="43ce3893218e12bafcf3980d1a94c5b6f79210afef04e341874b8c63d98def1c" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.260521 4558 scope.go:117] "RemoveContainer" containerID="5a4543f4f5068e723c149b86d815e8bf663acc3075125bad09cc80cc4d87503d" Jan 20 17:00:47 crc kubenswrapper[4558]: E0120 17:00:47.260785 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5a4543f4f5068e723c149b86d815e8bf663acc3075125bad09cc80cc4d87503d\": container with ID starting with 5a4543f4f5068e723c149b86d815e8bf663acc3075125bad09cc80cc4d87503d not found: ID does not exist" containerID="5a4543f4f5068e723c149b86d815e8bf663acc3075125bad09cc80cc4d87503d" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.260807 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5a4543f4f5068e723c149b86d815e8bf663acc3075125bad09cc80cc4d87503d"} err="failed to get container status \"5a4543f4f5068e723c149b86d815e8bf663acc3075125bad09cc80cc4d87503d\": rpc error: code = NotFound desc = could not find container \"5a4543f4f5068e723c149b86d815e8bf663acc3075125bad09cc80cc4d87503d\": container with ID starting with 5a4543f4f5068e723c149b86d815e8bf663acc3075125bad09cc80cc4d87503d not found: ID does not exist" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.260826 4558 scope.go:117] "RemoveContainer" containerID="43ce3893218e12bafcf3980d1a94c5b6f79210afef04e341874b8c63d98def1c" Jan 20 17:00:47 crc kubenswrapper[4558]: E0120 17:00:47.261039 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"43ce3893218e12bafcf3980d1a94c5b6f79210afef04e341874b8c63d98def1c\": container with ID starting with 43ce3893218e12bafcf3980d1a94c5b6f79210afef04e341874b8c63d98def1c not found: ID does not exist" containerID="43ce3893218e12bafcf3980d1a94c5b6f79210afef04e341874b8c63d98def1c" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.261055 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"43ce3893218e12bafcf3980d1a94c5b6f79210afef04e341874b8c63d98def1c"} err="failed to get container status \"43ce3893218e12bafcf3980d1a94c5b6f79210afef04e341874b8c63d98def1c\": rpc error: code = NotFound desc = could not find container \"43ce3893218e12bafcf3980d1a94c5b6f79210afef04e341874b8c63d98def1c\": container with ID starting with 43ce3893218e12bafcf3980d1a94c5b6f79210afef04e341874b8c63d98def1c not found: ID does not exist" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.276856 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m6sxc\" (UniqueName: \"kubernetes.io/projected/e353328b-4fe7-4637-8174-a3b227ad9761-kube-api-access-m6sxc\") pod \"e353328b-4fe7-4637-8174-a3b227ad9761\" (UID: \"e353328b-4fe7-4637-8174-a3b227ad9761\") " Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.276966 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e353328b-4fe7-4637-8174-a3b227ad9761-operator-scripts\") pod \"e353328b-4fe7-4637-8174-a3b227ad9761\" (UID: \"e353328b-4fe7-4637-8174-a3b227ad9761\") " Jan 20 17:00:47 crc kubenswrapper[4558]: E0120 17:00:47.277523 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/openstack-scripts: configmap "openstack-scripts" not found Jan 20 17:00:47 crc kubenswrapper[4558]: E0120 17:00:47.277559 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/bf9a27d0-1f64-4f43-a2a7-ce0902c6cd25-operator-scripts podName:bf9a27d0-1f64-4f43-a2a7-ce0902c6cd25 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:49.277547669 +0000 UTC m=+1143.037885636 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/bf9a27d0-1f64-4f43-a2a7-ce0902c6cd25-operator-scripts") pod "keystone-7e4f-account-create-update-pl5sm" (UID: "bf9a27d0-1f64-4f43-a2a7-ce0902c6cd25") : configmap "openstack-scripts" not found Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.278388 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e353328b-4fe7-4637-8174-a3b227ad9761-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "e353328b-4fe7-4637-8174-a3b227ad9761" (UID: "e353328b-4fe7-4637-8174-a3b227ad9761"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.282087 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e353328b-4fe7-4637-8174-a3b227ad9761-kube-api-access-m6sxc" (OuterVolumeSpecName: "kube-api-access-m6sxc") pod "e353328b-4fe7-4637-8174-a3b227ad9761" (UID: "e353328b-4fe7-4637-8174-a3b227ad9761"). InnerVolumeSpecName "kube-api-access-m6sxc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:00:47 crc kubenswrapper[4558]: E0120 17:00:47.379427 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-neutron-ovndbs: secret "cert-neutron-ovndbs" not found Jan 20 17:00:47 crc kubenswrapper[4558]: E0120 17:00:47.379477 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7d0b0e33-9ce7-44d9-8b09-6008715fe1a0-ovndb-tls-certs podName:7d0b0e33-9ce7-44d9-8b09-6008715fe1a0 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:51.379464074 +0000 UTC m=+1145.139802042 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "ovndb-tls-certs" (UniqueName: "kubernetes.io/secret/7d0b0e33-9ce7-44d9-8b09-6008715fe1a0-ovndb-tls-certs") pod "neutron-5d557df858-frznf" (UID: "7d0b0e33-9ce7-44d9-8b09-6008715fe1a0") : secret "cert-neutron-ovndbs" not found Jan 20 17:00:47 crc kubenswrapper[4558]: E0120 17:00:47.379561 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/combined-ca-bundle: secret "combined-ca-bundle" not found Jan 20 17:00:47 crc kubenswrapper[4558]: E0120 17:00:47.379612 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/neutron-httpd-config: secret "neutron-httpd-config" not found Jan 20 17:00:47 crc kubenswrapper[4558]: E0120 17:00:47.379630 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7d0b0e33-9ce7-44d9-8b09-6008715fe1a0-combined-ca-bundle podName:7d0b0e33-9ce7-44d9-8b09-6008715fe1a0 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:51.379616902 +0000 UTC m=+1145.139954870 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/7d0b0e33-9ce7-44d9-8b09-6008715fe1a0-combined-ca-bundle") pod "neutron-5d557df858-frznf" (UID: "7d0b0e33-9ce7-44d9-8b09-6008715fe1a0") : secret "combined-ca-bundle" not found Jan 20 17:00:47 crc kubenswrapper[4558]: E0120 17:00:47.379645 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7d0b0e33-9ce7-44d9-8b09-6008715fe1a0-httpd-config podName:7d0b0e33-9ce7-44d9-8b09-6008715fe1a0 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:51.379639604 +0000 UTC m=+1145.139977572 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "httpd-config" (UniqueName: "kubernetes.io/secret/7d0b0e33-9ce7-44d9-8b09-6008715fe1a0-httpd-config") pod "neutron-5d557df858-frznf" (UID: "7d0b0e33-9ce7-44d9-8b09-6008715fe1a0") : secret "neutron-httpd-config" not found Jan 20 17:00:47 crc kubenswrapper[4558]: E0120 17:00:47.379695 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-neutron-internal-svc: secret "cert-neutron-internal-svc" not found Jan 20 17:00:47 crc kubenswrapper[4558]: E0120 17:00:47.379717 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7d0b0e33-9ce7-44d9-8b09-6008715fe1a0-internal-tls-certs podName:7d0b0e33-9ce7-44d9-8b09-6008715fe1a0 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:51.379711229 +0000 UTC m=+1145.140049196 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "internal-tls-certs" (UniqueName: "kubernetes.io/secret/7d0b0e33-9ce7-44d9-8b09-6008715fe1a0-internal-tls-certs") pod "neutron-5d557df858-frznf" (UID: "7d0b0e33-9ce7-44d9-8b09-6008715fe1a0") : secret "cert-neutron-internal-svc" not found Jan 20 17:00:47 crc kubenswrapper[4558]: E0120 17:00:47.379764 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-neutron-public-svc: secret "cert-neutron-public-svc" not found Jan 20 17:00:47 crc kubenswrapper[4558]: E0120 17:00:47.379783 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7d0b0e33-9ce7-44d9-8b09-6008715fe1a0-public-tls-certs podName:7d0b0e33-9ce7-44d9-8b09-6008715fe1a0 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:51.379777704 +0000 UTC m=+1145.140115672 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "public-tls-certs" (UniqueName: "kubernetes.io/secret/7d0b0e33-9ce7-44d9-8b09-6008715fe1a0-public-tls-certs") pod "neutron-5d557df858-frznf" (UID: "7d0b0e33-9ce7-44d9-8b09-6008715fe1a0") : secret "cert-neutron-public-svc" not found Jan 20 17:00:47 crc kubenswrapper[4558]: E0120 17:00:47.379808 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/neutron-config: secret "neutron-config" not found Jan 20 17:00:47 crc kubenswrapper[4558]: E0120 17:00:47.379845 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7d0b0e33-9ce7-44d9-8b09-6008715fe1a0-config podName:7d0b0e33-9ce7-44d9-8b09-6008715fe1a0 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:51.379840052 +0000 UTC m=+1145.140178019 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "config" (UniqueName: "kubernetes.io/secret/7d0b0e33-9ce7-44d9-8b09-6008715fe1a0-config") pod "neutron-5d557df858-frznf" (UID: "7d0b0e33-9ce7-44d9-8b09-6008715fe1a0") : secret "neutron-config" not found Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.379583 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e353328b-4fe7-4637-8174-a3b227ad9761-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.379867 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m6sxc\" (UniqueName: \"kubernetes.io/projected/e353328b-4fe7-4637-8174-a3b227ad9761-kube-api-access-m6sxc\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.528144 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-7e4f-account-create-update-pl5sm" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.606373 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-5d77597494-bkh4z" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.608918 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-55ff946595-f8gjf" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.613237 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-5df68bf4dd-zbx8h" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.685247 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bf9a27d0-1f64-4f43-a2a7-ce0902c6cd25-operator-scripts\") pod \"bf9a27d0-1f64-4f43-a2a7-ce0902c6cd25\" (UID: \"bf9a27d0-1f64-4f43-a2a7-ce0902c6cd25\") " Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.685360 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-456km\" (UniqueName: \"kubernetes.io/projected/bf9a27d0-1f64-4f43-a2a7-ce0902c6cd25-kube-api-access-456km\") pod \"bf9a27d0-1f64-4f43-a2a7-ce0902c6cd25\" (UID: \"bf9a27d0-1f64-4f43-a2a7-ce0902c6cd25\") " Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.686649 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf9a27d0-1f64-4f43-a2a7-ce0902c6cd25-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "bf9a27d0-1f64-4f43-a2a7-ce0902c6cd25" (UID: "bf9a27d0-1f64-4f43-a2a7-ce0902c6cd25"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.689013 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf9a27d0-1f64-4f43-a2a7-ce0902c6cd25-kube-api-access-456km" (OuterVolumeSpecName: "kube-api-access-456km") pod "bf9a27d0-1f64-4f43-a2a7-ce0902c6cd25" (UID: "bf9a27d0-1f64-4f43-a2a7-ce0902c6cd25"). InnerVolumeSpecName "kube-api-access-456km". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.786723 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3735ca3d-3764-4d36-b912-fbf0bfb96dd8-combined-ca-bundle\") pod \"3735ca3d-3764-4d36-b912-fbf0bfb96dd8\" (UID: \"3735ca3d-3764-4d36-b912-fbf0bfb96dd8\") " Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.786757 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3735ca3d-3764-4d36-b912-fbf0bfb96dd8-config-data-custom\") pod \"3735ca3d-3764-4d36-b912-fbf0bfb96dd8\" (UID: \"3735ca3d-3764-4d36-b912-fbf0bfb96dd8\") " Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.786781 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3735ca3d-3764-4d36-b912-fbf0bfb96dd8-config-data\") pod \"3735ca3d-3764-4d36-b912-fbf0bfb96dd8\" (UID: \"3735ca3d-3764-4d36-b912-fbf0bfb96dd8\") " Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.786809 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4969c1af-53c0-435a-bd06-6bd493c81c80-public-tls-certs\") pod \"4969c1af-53c0-435a-bd06-6bd493c81c80\" (UID: \"4969c1af-53c0-435a-bd06-6bd493c81c80\") " Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.786835 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rskmp\" (UniqueName: \"kubernetes.io/projected/a140c9a4-bec6-42e6-bc6c-d63566e4f7f6-kube-api-access-rskmp\") pod \"a140c9a4-bec6-42e6-bc6c-d63566e4f7f6\" (UID: \"a140c9a4-bec6-42e6-bc6c-d63566e4f7f6\") " Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.786860 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4969c1af-53c0-435a-bd06-6bd493c81c80-internal-tls-certs\") pod \"4969c1af-53c0-435a-bd06-6bd493c81c80\" (UID: \"4969c1af-53c0-435a-bd06-6bd493c81c80\") " Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.786874 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3735ca3d-3764-4d36-b912-fbf0bfb96dd8-internal-tls-certs\") pod \"3735ca3d-3764-4d36-b912-fbf0bfb96dd8\" (UID: \"3735ca3d-3764-4d36-b912-fbf0bfb96dd8\") " Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.786891 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a140c9a4-bec6-42e6-bc6c-d63566e4f7f6-scripts\") pod \"a140c9a4-bec6-42e6-bc6c-d63566e4f7f6\" (UID: \"a140c9a4-bec6-42e6-bc6c-d63566e4f7f6\") " Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.786908 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/4969c1af-53c0-435a-bd06-6bd493c81c80-credential-keys\") pod \"4969c1af-53c0-435a-bd06-6bd493c81c80\" (UID: \"4969c1af-53c0-435a-bd06-6bd493c81c80\") " Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.786925 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x9556\" (UniqueName: \"kubernetes.io/projected/3735ca3d-3764-4d36-b912-fbf0bfb96dd8-kube-api-access-x9556\") pod \"3735ca3d-3764-4d36-b912-fbf0bfb96dd8\" (UID: \"3735ca3d-3764-4d36-b912-fbf0bfb96dd8\") " Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.786948 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a140c9a4-bec6-42e6-bc6c-d63566e4f7f6-combined-ca-bundle\") pod \"a140c9a4-bec6-42e6-bc6c-d63566e4f7f6\" (UID: \"a140c9a4-bec6-42e6-bc6c-d63566e4f7f6\") " Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.786967 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a140c9a4-bec6-42e6-bc6c-d63566e4f7f6-public-tls-certs\") pod \"a140c9a4-bec6-42e6-bc6c-d63566e4f7f6\" (UID: \"a140c9a4-bec6-42e6-bc6c-d63566e4f7f6\") " Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.786982 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4969c1af-53c0-435a-bd06-6bd493c81c80-config-data\") pod \"4969c1af-53c0-435a-bd06-6bd493c81c80\" (UID: \"4969c1af-53c0-435a-bd06-6bd493c81c80\") " Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.786997 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3735ca3d-3764-4d36-b912-fbf0bfb96dd8-logs\") pod \"3735ca3d-3764-4d36-b912-fbf0bfb96dd8\" (UID: \"3735ca3d-3764-4d36-b912-fbf0bfb96dd8\") " Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.787013 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a140c9a4-bec6-42e6-bc6c-d63566e4f7f6-internal-tls-certs\") pod \"a140c9a4-bec6-42e6-bc6c-d63566e4f7f6\" (UID: \"a140c9a4-bec6-42e6-bc6c-d63566e4f7f6\") " Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.787035 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3735ca3d-3764-4d36-b912-fbf0bfb96dd8-public-tls-certs\") pod \"3735ca3d-3764-4d36-b912-fbf0bfb96dd8\" (UID: \"3735ca3d-3764-4d36-b912-fbf0bfb96dd8\") " Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.787050 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a140c9a4-bec6-42e6-bc6c-d63566e4f7f6-config-data\") pod \"a140c9a4-bec6-42e6-bc6c-d63566e4f7f6\" (UID: \"a140c9a4-bec6-42e6-bc6c-d63566e4f7f6\") " Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.787077 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mq58f\" (UniqueName: \"kubernetes.io/projected/4969c1af-53c0-435a-bd06-6bd493c81c80-kube-api-access-mq58f\") pod \"4969c1af-53c0-435a-bd06-6bd493c81c80\" (UID: \"4969c1af-53c0-435a-bd06-6bd493c81c80\") " Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.787094 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/4969c1af-53c0-435a-bd06-6bd493c81c80-fernet-keys\") pod \"4969c1af-53c0-435a-bd06-6bd493c81c80\" (UID: \"4969c1af-53c0-435a-bd06-6bd493c81c80\") " Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.787111 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4969c1af-53c0-435a-bd06-6bd493c81c80-combined-ca-bundle\") pod \"4969c1af-53c0-435a-bd06-6bd493c81c80\" (UID: \"4969c1af-53c0-435a-bd06-6bd493c81c80\") " Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.787132 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a140c9a4-bec6-42e6-bc6c-d63566e4f7f6-logs\") pod \"a140c9a4-bec6-42e6-bc6c-d63566e4f7f6\" (UID: \"a140c9a4-bec6-42e6-bc6c-d63566e4f7f6\") " Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.787146 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4969c1af-53c0-435a-bd06-6bd493c81c80-scripts\") pod \"4969c1af-53c0-435a-bd06-6bd493c81c80\" (UID: \"4969c1af-53c0-435a-bd06-6bd493c81c80\") " Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.787522 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-456km\" (UniqueName: \"kubernetes.io/projected/bf9a27d0-1f64-4f43-a2a7-ce0902c6cd25-kube-api-access-456km\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.787539 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bf9a27d0-1f64-4f43-a2a7-ce0902c6cd25-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.788141 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3735ca3d-3764-4d36-b912-fbf0bfb96dd8-logs" (OuterVolumeSpecName: "logs") pod "3735ca3d-3764-4d36-b912-fbf0bfb96dd8" (UID: "3735ca3d-3764-4d36-b912-fbf0bfb96dd8"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.789285 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a140c9a4-bec6-42e6-bc6c-d63566e4f7f6-logs" (OuterVolumeSpecName: "logs") pod "a140c9a4-bec6-42e6-bc6c-d63566e4f7f6" (UID: "a140c9a4-bec6-42e6-bc6c-d63566e4f7f6"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.790444 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a140c9a4-bec6-42e6-bc6c-d63566e4f7f6-kube-api-access-rskmp" (OuterVolumeSpecName: "kube-api-access-rskmp") pod "a140c9a4-bec6-42e6-bc6c-d63566e4f7f6" (UID: "a140c9a4-bec6-42e6-bc6c-d63566e4f7f6"). InnerVolumeSpecName "kube-api-access-rskmp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.791825 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a140c9a4-bec6-42e6-bc6c-d63566e4f7f6-scripts" (OuterVolumeSpecName: "scripts") pod "a140c9a4-bec6-42e6-bc6c-d63566e4f7f6" (UID: "a140c9a4-bec6-42e6-bc6c-d63566e4f7f6"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.792358 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4969c1af-53c0-435a-bd06-6bd493c81c80-kube-api-access-mq58f" (OuterVolumeSpecName: "kube-api-access-mq58f") pod "4969c1af-53c0-435a-bd06-6bd493c81c80" (UID: "4969c1af-53c0-435a-bd06-6bd493c81c80"). InnerVolumeSpecName "kube-api-access-mq58f". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.792517 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3735ca3d-3764-4d36-b912-fbf0bfb96dd8-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "3735ca3d-3764-4d36-b912-fbf0bfb96dd8" (UID: "3735ca3d-3764-4d36-b912-fbf0bfb96dd8"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.792593 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4969c1af-53c0-435a-bd06-6bd493c81c80-scripts" (OuterVolumeSpecName: "scripts") pod "4969c1af-53c0-435a-bd06-6bd493c81c80" (UID: "4969c1af-53c0-435a-bd06-6bd493c81c80"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.794333 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4969c1af-53c0-435a-bd06-6bd493c81c80-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "4969c1af-53c0-435a-bd06-6bd493c81c80" (UID: "4969c1af-53c0-435a-bd06-6bd493c81c80"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.794501 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3735ca3d-3764-4d36-b912-fbf0bfb96dd8-kube-api-access-x9556" (OuterVolumeSpecName: "kube-api-access-x9556") pod "3735ca3d-3764-4d36-b912-fbf0bfb96dd8" (UID: "3735ca3d-3764-4d36-b912-fbf0bfb96dd8"). InnerVolumeSpecName "kube-api-access-x9556". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.803080 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4969c1af-53c0-435a-bd06-6bd493c81c80-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "4969c1af-53c0-435a-bd06-6bd493c81c80" (UID: "4969c1af-53c0-435a-bd06-6bd493c81c80"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.810763 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/root-account-create-update-fbtv5" event={"ID":"e353328b-4fe7-4637-8174-a3b227ad9761","Type":"ContainerDied","Data":"a68a81065b6949392acd6d15a4b91cb38706154b8afc2adbe6c374896e7b9115"} Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.810833 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-fbtv5" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.816875 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3735ca3d-3764-4d36-b912-fbf0bfb96dd8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3735ca3d-3764-4d36-b912-fbf0bfb96dd8" (UID: "3735ca3d-3764-4d36-b912-fbf0bfb96dd8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.817718 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4969c1af-53c0-435a-bd06-6bd493c81c80-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4969c1af-53c0-435a-bd06-6bd493c81c80" (UID: "4969c1af-53c0-435a-bd06-6bd493c81c80"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.819284 4558 generic.go:334] "Generic (PLEG): container finished" podID="7776f921-0ae7-4a8c-b444-cf5b4b9a4f65" containerID="6b14e8b1ef1901e159022cfb9e13791cc8bca3602aeca47855720efbfe3f6831" exitCode=143 Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.819369 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-keystone-listener-7fc5477d66-pz8q2" event={"ID":"7776f921-0ae7-4a8c-b444-cf5b4b9a4f65","Type":"ContainerDied","Data":"6b14e8b1ef1901e159022cfb9e13791cc8bca3602aeca47855720efbfe3f6831"} Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.820285 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4969c1af-53c0-435a-bd06-6bd493c81c80-config-data" (OuterVolumeSpecName: "config-data") pod "4969c1af-53c0-435a-bd06-6bd493c81c80" (UID: "4969c1af-53c0-435a-bd06-6bd493c81c80"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.822987 4558 generic.go:334] "Generic (PLEG): container finished" podID="de7f5467-1e83-42f0-86bb-ade85deec8f3" containerID="6a1a27bb8b609381b6880d7309447e5d15f349864bd55fc2d2324115d9939990" exitCode=143 Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.823034 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-worker-c4f864f89-58gb9" event={"ID":"de7f5467-1e83-42f0-86bb-ade85deec8f3","Type":"ContainerDied","Data":"6a1a27bb8b609381b6880d7309447e5d15f349864bd55fc2d2324115d9939990"} Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.825501 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-7e4f-account-create-update-pl5sm" event={"ID":"bf9a27d0-1f64-4f43-a2a7-ce0902c6cd25","Type":"ContainerDied","Data":"f9b251e73fdf0e6718ee26d92886dcdbac0ca8bedbf7a3dda606ca3e8f2b2aa0"} Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.825504 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-7e4f-account-create-update-pl5sm" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.827138 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4969c1af-53c0-435a-bd06-6bd493c81c80-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "4969c1af-53c0-435a-bd06-6bd493c81c80" (UID: "4969c1af-53c0-435a-bd06-6bd493c81c80"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.831008 4558 generic.go:334] "Generic (PLEG): container finished" podID="3735ca3d-3764-4d36-b912-fbf0bfb96dd8" containerID="cecf310a1a5c01a8e2cb6f8db17408c6746dfda1028e4915a95ed043910b2cbd" exitCode=0 Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.831030 4558 generic.go:334] "Generic (PLEG): container finished" podID="3735ca3d-3764-4d36-b912-fbf0bfb96dd8" containerID="b6c74e47f2a677093337ddf0004171daa0ef22786b50d9ee2e84816a4f1d1a44" exitCode=143 Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.831075 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-5df68bf4dd-zbx8h" event={"ID":"3735ca3d-3764-4d36-b912-fbf0bfb96dd8","Type":"ContainerDied","Data":"cecf310a1a5c01a8e2cb6f8db17408c6746dfda1028e4915a95ed043910b2cbd"} Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.831102 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-5df68bf4dd-zbx8h" event={"ID":"3735ca3d-3764-4d36-b912-fbf0bfb96dd8","Type":"ContainerDied","Data":"b6c74e47f2a677093337ddf0004171daa0ef22786b50d9ee2e84816a4f1d1a44"} Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.831113 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-5df68bf4dd-zbx8h" event={"ID":"3735ca3d-3764-4d36-b912-fbf0bfb96dd8","Type":"ContainerDied","Data":"9c6d137248fccebe77001c6e099d43d98f6be1d320a8e169d6e922bf865a5bc8"} Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.831131 4558 scope.go:117] "RemoveContainer" containerID="cecf310a1a5c01a8e2cb6f8db17408c6746dfda1028e4915a95ed043910b2cbd" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.831372 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-5df68bf4dd-zbx8h" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.833480 4558 generic.go:334] "Generic (PLEG): container finished" podID="4969c1af-53c0-435a-bd06-6bd493c81c80" containerID="43610636e4831cc5451e75428b042d0c9d0a563ad6b875c786d70693be0d98f3" exitCode=0 Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.833566 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-55ff946595-f8gjf" event={"ID":"4969c1af-53c0-435a-bd06-6bd493c81c80","Type":"ContainerDied","Data":"43610636e4831cc5451e75428b042d0c9d0a563ad6b875c786d70693be0d98f3"} Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.833578 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-55ff946595-f8gjf" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.833597 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-55ff946595-f8gjf" event={"ID":"4969c1af-53c0-435a-bd06-6bd493c81c80","Type":"ContainerDied","Data":"09444088968f7e54351d7b5dedb081cfcb5cef9b269cf941bc6cb339aae637c4"} Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.836646 4558 generic.go:334] "Generic (PLEG): container finished" podID="a140c9a4-bec6-42e6-bc6c-d63566e4f7f6" containerID="620ea4b124f9388111a2afb26185bb93d973f00636b50d2b8aed2e76b5d21043" exitCode=0 Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.836712 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-5d77597494-bkh4z" event={"ID":"a140c9a4-bec6-42e6-bc6c-d63566e4f7f6","Type":"ContainerDied","Data":"620ea4b124f9388111a2afb26185bb93d973f00636b50d2b8aed2e76b5d21043"} Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.836738 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-5d77597494-bkh4z" event={"ID":"a140c9a4-bec6-42e6-bc6c-d63566e4f7f6","Type":"ContainerDied","Data":"22aef6e67fa6ccb296984c72ec063d7c974b405920e883d538f54b5078fdb6ac"} Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.836753 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-5d77597494-bkh4z" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.838227 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-rqg8f" event={"ID":"f881d3ea-5346-49f4-8618-7271150ba300","Type":"ContainerStarted","Data":"df74271e073d2a1874b1967c7e995ccbdd4ce2707596e404c8cd9904e3bfaddd"} Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.838359 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-rqg8f" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.839240 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-worker-7649847b9c-9wk9v" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.842903 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a140c9a4-bec6-42e6-bc6c-d63566e4f7f6-config-data" (OuterVolumeSpecName: "config-data") pod "a140c9a4-bec6-42e6-bc6c-d63566e4f7f6" (UID: "a140c9a4-bec6-42e6-bc6c-d63566e4f7f6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.852685 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3735ca3d-3764-4d36-b912-fbf0bfb96dd8-config-data" (OuterVolumeSpecName: "config-data") pod "3735ca3d-3764-4d36-b912-fbf0bfb96dd8" (UID: "3735ca3d-3764-4d36-b912-fbf0bfb96dd8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.855375 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3735ca3d-3764-4d36-b912-fbf0bfb96dd8-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "3735ca3d-3764-4d36-b912-fbf0bfb96dd8" (UID: "3735ca3d-3764-4d36-b912-fbf0bfb96dd8"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.856004 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3735ca3d-3764-4d36-b912-fbf0bfb96dd8-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "3735ca3d-3764-4d36-b912-fbf0bfb96dd8" (UID: "3735ca3d-3764-4d36-b912-fbf0bfb96dd8"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.857267 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4969c1af-53c0-435a-bd06-6bd493c81c80-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "4969c1af-53c0-435a-bd06-6bd493c81c80" (UID: "4969c1af-53c0-435a-bd06-6bd493c81c80"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.857819 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-rqg8f" podStartSLOduration=4.857806082 podStartE2EDuration="4.857806082s" podCreationTimestamp="2026-01-20 17:00:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:00:47.850661337 +0000 UTC m=+1141.610999303" watchObservedRunningTime="2026-01-20 17:00:47.857806082 +0000 UTC m=+1141.618144050" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.858812 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a140c9a4-bec6-42e6-bc6c-d63566e4f7f6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a140c9a4-bec6-42e6-bc6c-d63566e4f7f6" (UID: "a140c9a4-bec6-42e6-bc6c-d63566e4f7f6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.881028 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a140c9a4-bec6-42e6-bc6c-d63566e4f7f6-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "a140c9a4-bec6-42e6-bc6c-d63566e4f7f6" (UID: "a140c9a4-bec6-42e6-bc6c-d63566e4f7f6"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.883919 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a140c9a4-bec6-42e6-bc6c-d63566e4f7f6-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "a140c9a4-bec6-42e6-bc6c-d63566e4f7f6" (UID: "a140c9a4-bec6-42e6-bc6c-d63566e4f7f6"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.888638 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rskmp\" (UniqueName: \"kubernetes.io/projected/a140c9a4-bec6-42e6-bc6c-d63566e4f7f6-kube-api-access-rskmp\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.888660 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4969c1af-53c0-435a-bd06-6bd493c81c80-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.888670 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3735ca3d-3764-4d36-b912-fbf0bfb96dd8-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.888679 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a140c9a4-bec6-42e6-bc6c-d63566e4f7f6-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.888687 4558 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/4969c1af-53c0-435a-bd06-6bd493c81c80-credential-keys\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.888694 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x9556\" (UniqueName: \"kubernetes.io/projected/3735ca3d-3764-4d36-b912-fbf0bfb96dd8-kube-api-access-x9556\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.888702 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a140c9a4-bec6-42e6-bc6c-d63566e4f7f6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.888709 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a140c9a4-bec6-42e6-bc6c-d63566e4f7f6-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.888716 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4969c1af-53c0-435a-bd06-6bd493c81c80-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.888726 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3735ca3d-3764-4d36-b912-fbf0bfb96dd8-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.888734 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a140c9a4-bec6-42e6-bc6c-d63566e4f7f6-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.888742 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a140c9a4-bec6-42e6-bc6c-d63566e4f7f6-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.888750 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3735ca3d-3764-4d36-b912-fbf0bfb96dd8-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.888758 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mq58f\" (UniqueName: \"kubernetes.io/projected/4969c1af-53c0-435a-bd06-6bd493c81c80-kube-api-access-mq58f\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.888765 4558 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/4969c1af-53c0-435a-bd06-6bd493c81c80-fernet-keys\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.888773 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4969c1af-53c0-435a-bd06-6bd493c81c80-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.888780 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a140c9a4-bec6-42e6-bc6c-d63566e4f7f6-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.888787 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4969c1af-53c0-435a-bd06-6bd493c81c80-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.888794 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3735ca3d-3764-4d36-b912-fbf0bfb96dd8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.888801 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3735ca3d-3764-4d36-b912-fbf0bfb96dd8-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.888810 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3735ca3d-3764-4d36-b912-fbf0bfb96dd8-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.888819 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4969c1af-53c0-435a-bd06-6bd493c81c80-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.949141 4558 scope.go:117] "RemoveContainer" containerID="b6c74e47f2a677093337ddf0004171daa0ef22786b50d9ee2e84816a4f1d1a44" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.964315 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-worker-7649847b9c-9wk9v"] Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.971253 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/barbican-worker-7649847b9c-9wk9v"] Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.980537 4558 scope.go:117] "RemoveContainer" containerID="cecf310a1a5c01a8e2cb6f8db17408c6746dfda1028e4915a95ed043910b2cbd" Jan 20 17:00:47 crc kubenswrapper[4558]: E0120 17:00:47.981084 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cecf310a1a5c01a8e2cb6f8db17408c6746dfda1028e4915a95ed043910b2cbd\": container with ID starting with cecf310a1a5c01a8e2cb6f8db17408c6746dfda1028e4915a95ed043910b2cbd not found: ID does not exist" containerID="cecf310a1a5c01a8e2cb6f8db17408c6746dfda1028e4915a95ed043910b2cbd" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.981119 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cecf310a1a5c01a8e2cb6f8db17408c6746dfda1028e4915a95ed043910b2cbd"} err="failed to get container status \"cecf310a1a5c01a8e2cb6f8db17408c6746dfda1028e4915a95ed043910b2cbd\": rpc error: code = NotFound desc = could not find container \"cecf310a1a5c01a8e2cb6f8db17408c6746dfda1028e4915a95ed043910b2cbd\": container with ID starting with cecf310a1a5c01a8e2cb6f8db17408c6746dfda1028e4915a95ed043910b2cbd not found: ID does not exist" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.981144 4558 scope.go:117] "RemoveContainer" containerID="b6c74e47f2a677093337ddf0004171daa0ef22786b50d9ee2e84816a4f1d1a44" Jan 20 17:00:47 crc kubenswrapper[4558]: E0120 17:00:47.981670 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b6c74e47f2a677093337ddf0004171daa0ef22786b50d9ee2e84816a4f1d1a44\": container with ID starting with b6c74e47f2a677093337ddf0004171daa0ef22786b50d9ee2e84816a4f1d1a44 not found: ID does not exist" containerID="b6c74e47f2a677093337ddf0004171daa0ef22786b50d9ee2e84816a4f1d1a44" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.981685 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b6c74e47f2a677093337ddf0004171daa0ef22786b50d9ee2e84816a4f1d1a44"} err="failed to get container status \"b6c74e47f2a677093337ddf0004171daa0ef22786b50d9ee2e84816a4f1d1a44\": rpc error: code = NotFound desc = could not find container \"b6c74e47f2a677093337ddf0004171daa0ef22786b50d9ee2e84816a4f1d1a44\": container with ID starting with b6c74e47f2a677093337ddf0004171daa0ef22786b50d9ee2e84816a4f1d1a44 not found: ID does not exist" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.981698 4558 scope.go:117] "RemoveContainer" containerID="cecf310a1a5c01a8e2cb6f8db17408c6746dfda1028e4915a95ed043910b2cbd" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.981919 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cecf310a1a5c01a8e2cb6f8db17408c6746dfda1028e4915a95ed043910b2cbd"} err="failed to get container status \"cecf310a1a5c01a8e2cb6f8db17408c6746dfda1028e4915a95ed043910b2cbd\": rpc error: code = NotFound desc = could not find container \"cecf310a1a5c01a8e2cb6f8db17408c6746dfda1028e4915a95ed043910b2cbd\": container with ID starting with cecf310a1a5c01a8e2cb6f8db17408c6746dfda1028e4915a95ed043910b2cbd not found: ID does not exist" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.981932 4558 scope.go:117] "RemoveContainer" containerID="b6c74e47f2a677093337ddf0004171daa0ef22786b50d9ee2e84816a4f1d1a44" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.982062 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b6c74e47f2a677093337ddf0004171daa0ef22786b50d9ee2e84816a4f1d1a44"} err="failed to get container status \"b6c74e47f2a677093337ddf0004171daa0ef22786b50d9ee2e84816a4f1d1a44\": rpc error: code = NotFound desc = could not find container \"b6c74e47f2a677093337ddf0004171daa0ef22786b50d9ee2e84816a4f1d1a44\": container with ID starting with b6c74e47f2a677093337ddf0004171daa0ef22786b50d9ee2e84816a4f1d1a44 not found: ID does not exist" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.982073 4558 scope.go:117] "RemoveContainer" containerID="43610636e4831cc5451e75428b042d0c9d0a563ad6b875c786d70693be0d98f3" Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.983250 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-fbtv5"] Jan 20 17:00:47 crc kubenswrapper[4558]: I0120 17:00:47.988594 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-fbtv5"] Jan 20 17:00:48 crc kubenswrapper[4558]: I0120 17:00:48.002150 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-7e4f-account-create-update-pl5sm"] Jan 20 17:00:48 crc kubenswrapper[4558]: I0120 17:00:48.005646 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/keystone-7e4f-account-create-update-pl5sm"] Jan 20 17:00:48 crc kubenswrapper[4558]: I0120 17:00:48.040057 4558 scope.go:117] "RemoveContainer" containerID="43610636e4831cc5451e75428b042d0c9d0a563ad6b875c786d70693be0d98f3" Jan 20 17:00:48 crc kubenswrapper[4558]: E0120 17:00:48.040429 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"43610636e4831cc5451e75428b042d0c9d0a563ad6b875c786d70693be0d98f3\": container with ID starting with 43610636e4831cc5451e75428b042d0c9d0a563ad6b875c786d70693be0d98f3 not found: ID does not exist" containerID="43610636e4831cc5451e75428b042d0c9d0a563ad6b875c786d70693be0d98f3" Jan 20 17:00:48 crc kubenswrapper[4558]: I0120 17:00:48.040453 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"43610636e4831cc5451e75428b042d0c9d0a563ad6b875c786d70693be0d98f3"} err="failed to get container status \"43610636e4831cc5451e75428b042d0c9d0a563ad6b875c786d70693be0d98f3\": rpc error: code = NotFound desc = could not find container \"43610636e4831cc5451e75428b042d0c9d0a563ad6b875c786d70693be0d98f3\": container with ID starting with 43610636e4831cc5451e75428b042d0c9d0a563ad6b875c786d70693be0d98f3 not found: ID does not exist" Jan 20 17:00:48 crc kubenswrapper[4558]: I0120 17:00:48.040472 4558 scope.go:117] "RemoveContainer" containerID="620ea4b124f9388111a2afb26185bb93d973f00636b50d2b8aed2e76b5d21043" Jan 20 17:00:48 crc kubenswrapper[4558]: I0120 17:00:48.056074 4558 scope.go:117] "RemoveContainer" containerID="b78d30b920cedd5deca6765214e5cedff3241e2599a5afce55dcb23d1cc75565" Jan 20 17:00:48 crc kubenswrapper[4558]: I0120 17:00:48.078789 4558 scope.go:117] "RemoveContainer" containerID="620ea4b124f9388111a2afb26185bb93d973f00636b50d2b8aed2e76b5d21043" Jan 20 17:00:48 crc kubenswrapper[4558]: E0120 17:00:48.079085 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"620ea4b124f9388111a2afb26185bb93d973f00636b50d2b8aed2e76b5d21043\": container with ID starting with 620ea4b124f9388111a2afb26185bb93d973f00636b50d2b8aed2e76b5d21043 not found: ID does not exist" containerID="620ea4b124f9388111a2afb26185bb93d973f00636b50d2b8aed2e76b5d21043" Jan 20 17:00:48 crc kubenswrapper[4558]: I0120 17:00:48.079114 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"620ea4b124f9388111a2afb26185bb93d973f00636b50d2b8aed2e76b5d21043"} err="failed to get container status \"620ea4b124f9388111a2afb26185bb93d973f00636b50d2b8aed2e76b5d21043\": rpc error: code = NotFound desc = could not find container \"620ea4b124f9388111a2afb26185bb93d973f00636b50d2b8aed2e76b5d21043\": container with ID starting with 620ea4b124f9388111a2afb26185bb93d973f00636b50d2b8aed2e76b5d21043 not found: ID does not exist" Jan 20 17:00:48 crc kubenswrapper[4558]: I0120 17:00:48.079133 4558 scope.go:117] "RemoveContainer" containerID="b78d30b920cedd5deca6765214e5cedff3241e2599a5afce55dcb23d1cc75565" Jan 20 17:00:48 crc kubenswrapper[4558]: E0120 17:00:48.079356 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b78d30b920cedd5deca6765214e5cedff3241e2599a5afce55dcb23d1cc75565\": container with ID starting with b78d30b920cedd5deca6765214e5cedff3241e2599a5afce55dcb23d1cc75565 not found: ID does not exist" containerID="b78d30b920cedd5deca6765214e5cedff3241e2599a5afce55dcb23d1cc75565" Jan 20 17:00:48 crc kubenswrapper[4558]: I0120 17:00:48.079380 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b78d30b920cedd5deca6765214e5cedff3241e2599a5afce55dcb23d1cc75565"} err="failed to get container status \"b78d30b920cedd5deca6765214e5cedff3241e2599a5afce55dcb23d1cc75565\": rpc error: code = NotFound desc = could not find container \"b78d30b920cedd5deca6765214e5cedff3241e2599a5afce55dcb23d1cc75565\": container with ID starting with b78d30b920cedd5deca6765214e5cedff3241e2599a5afce55dcb23d1cc75565 not found: ID does not exist" Jan 20 17:00:48 crc kubenswrapper[4558]: E0120 17:00:48.093272 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/barbican-config-data: secret "barbican-config-data" not found Jan 20 17:00:48 crc kubenswrapper[4558]: E0120 17:00:48.093330 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/de7f5467-1e83-42f0-86bb-ade85deec8f3-config-data podName:de7f5467-1e83-42f0-86bb-ade85deec8f3 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:52.093314851 +0000 UTC m=+1145.853652817 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/secret/de7f5467-1e83-42f0-86bb-ade85deec8f3-config-data") pod "barbican-worker-c4f864f89-58gb9" (UID: "de7f5467-1e83-42f0-86bb-ade85deec8f3") : secret "barbican-config-data" not found Jan 20 17:00:48 crc kubenswrapper[4558]: E0120 17:00:48.093344 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/barbican-worker-config-data: secret "barbican-worker-config-data" not found Jan 20 17:00:48 crc kubenswrapper[4558]: E0120 17:00:48.093394 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/de7f5467-1e83-42f0-86bb-ade85deec8f3-config-data-custom podName:de7f5467-1e83-42f0-86bb-ade85deec8f3 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:52.093379251 +0000 UTC m=+1145.853717218 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "config-data-custom" (UniqueName: "kubernetes.io/secret/de7f5467-1e83-42f0-86bb-ade85deec8f3-config-data-custom") pod "barbican-worker-c4f864f89-58gb9" (UID: "de7f5467-1e83-42f0-86bb-ade85deec8f3") : secret "barbican-worker-config-data" not found Jan 20 17:00:48 crc kubenswrapper[4558]: E0120 17:00:48.093506 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/combined-ca-bundle: secret "combined-ca-bundle" not found Jan 20 17:00:48 crc kubenswrapper[4558]: E0120 17:00:48.093533 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/de7f5467-1e83-42f0-86bb-ade85deec8f3-combined-ca-bundle podName:de7f5467-1e83-42f0-86bb-ade85deec8f3 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:52.093527239 +0000 UTC m=+1145.853865197 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/de7f5467-1e83-42f0-86bb-ade85deec8f3-combined-ca-bundle") pod "barbican-worker-c4f864f89-58gb9" (UID: "de7f5467-1e83-42f0-86bb-ade85deec8f3") : secret "combined-ca-bundle" not found Jan 20 17:00:48 crc kubenswrapper[4558]: I0120 17:00:48.156653 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-api-5df68bf4dd-zbx8h"] Jan 20 17:00:48 crc kubenswrapper[4558]: I0120 17:00:48.165520 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/barbican-api-5df68bf4dd-zbx8h"] Jan 20 17:00:48 crc kubenswrapper[4558]: I0120 17:00:48.172133 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-55ff946595-f8gjf"] Jan 20 17:00:48 crc kubenswrapper[4558]: I0120 17:00:48.178422 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/keystone-55ff946595-f8gjf"] Jan 20 17:00:48 crc kubenswrapper[4558]: I0120 17:00:48.183125 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-595757ff5d-frx87" Jan 20 17:00:48 crc kubenswrapper[4558]: I0120 17:00:48.183285 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/placement-5d77597494-bkh4z"] Jan 20 17:00:48 crc kubenswrapper[4558]: I0120 17:00:48.187515 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/placement-5d77597494-bkh4z"] Jan 20 17:00:48 crc kubenswrapper[4558]: I0120 17:00:48.298075 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3c6de94f-c605-43d9-97b5-ccf91e49d1fb-logs\") pod \"3c6de94f-c605-43d9-97b5-ccf91e49d1fb\" (UID: \"3c6de94f-c605-43d9-97b5-ccf91e49d1fb\") " Jan 20 17:00:48 crc kubenswrapper[4558]: I0120 17:00:48.298139 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3c6de94f-c605-43d9-97b5-ccf91e49d1fb-public-tls-certs\") pod \"3c6de94f-c605-43d9-97b5-ccf91e49d1fb\" (UID: \"3c6de94f-c605-43d9-97b5-ccf91e49d1fb\") " Jan 20 17:00:48 crc kubenswrapper[4558]: I0120 17:00:48.298268 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3c6de94f-c605-43d9-97b5-ccf91e49d1fb-internal-tls-certs\") pod \"3c6de94f-c605-43d9-97b5-ccf91e49d1fb\" (UID: \"3c6de94f-c605-43d9-97b5-ccf91e49d1fb\") " Jan 20 17:00:48 crc kubenswrapper[4558]: I0120 17:00:48.298521 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3c6de94f-c605-43d9-97b5-ccf91e49d1fb-combined-ca-bundle\") pod \"3c6de94f-c605-43d9-97b5-ccf91e49d1fb\" (UID: \"3c6de94f-c605-43d9-97b5-ccf91e49d1fb\") " Jan 20 17:00:48 crc kubenswrapper[4558]: I0120 17:00:48.298647 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-66xl6\" (UniqueName: \"kubernetes.io/projected/3c6de94f-c605-43d9-97b5-ccf91e49d1fb-kube-api-access-66xl6\") pod \"3c6de94f-c605-43d9-97b5-ccf91e49d1fb\" (UID: \"3c6de94f-c605-43d9-97b5-ccf91e49d1fb\") " Jan 20 17:00:48 crc kubenswrapper[4558]: I0120 17:00:48.298695 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3c6de94f-c605-43d9-97b5-ccf91e49d1fb-config-data-custom\") pod \"3c6de94f-c605-43d9-97b5-ccf91e49d1fb\" (UID: \"3c6de94f-c605-43d9-97b5-ccf91e49d1fb\") " Jan 20 17:00:48 crc kubenswrapper[4558]: I0120 17:00:48.298714 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3c6de94f-c605-43d9-97b5-ccf91e49d1fb-config-data\") pod \"3c6de94f-c605-43d9-97b5-ccf91e49d1fb\" (UID: \"3c6de94f-c605-43d9-97b5-ccf91e49d1fb\") " Jan 20 17:00:48 crc kubenswrapper[4558]: E0120 17:00:48.300013 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/barbican-config-data: secret "barbican-config-data" not found Jan 20 17:00:48 crc kubenswrapper[4558]: E0120 17:00:48.300929 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7776f921-0ae7-4a8c-b444-cf5b4b9a4f65-config-data podName:7776f921-0ae7-4a8c-b444-cf5b4b9a4f65 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:52.300909534 +0000 UTC m=+1146.061247501 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/secret/7776f921-0ae7-4a8c-b444-cf5b4b9a4f65-config-data") pod "barbican-keystone-listener-7fc5477d66-pz8q2" (UID: "7776f921-0ae7-4a8c-b444-cf5b4b9a4f65") : secret "barbican-config-data" not found Jan 20 17:00:48 crc kubenswrapper[4558]: E0120 17:00:48.300109 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/barbican-keystone-listener-config-data: secret "barbican-keystone-listener-config-data" not found Jan 20 17:00:48 crc kubenswrapper[4558]: E0120 17:00:48.301419 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7776f921-0ae7-4a8c-b444-cf5b4b9a4f65-config-data-custom podName:7776f921-0ae7-4a8c-b444-cf5b4b9a4f65 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:52.301408543 +0000 UTC m=+1146.061746510 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "config-data-custom" (UniqueName: "kubernetes.io/secret/7776f921-0ae7-4a8c-b444-cf5b4b9a4f65-config-data-custom") pod "barbican-keystone-listener-7fc5477d66-pz8q2" (UID: "7776f921-0ae7-4a8c-b444-cf5b4b9a4f65") : secret "barbican-keystone-listener-config-data" not found Jan 20 17:00:48 crc kubenswrapper[4558]: E0120 17:00:48.300150 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/combined-ca-bundle: secret "combined-ca-bundle" not found Jan 20 17:00:48 crc kubenswrapper[4558]: E0120 17:00:48.301593 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7776f921-0ae7-4a8c-b444-cf5b4b9a4f65-combined-ca-bundle podName:7776f921-0ae7-4a8c-b444-cf5b4b9a4f65 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:52.301584104 +0000 UTC m=+1146.061922070 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/7776f921-0ae7-4a8c-b444-cf5b4b9a4f65-combined-ca-bundle") pod "barbican-keystone-listener-7fc5477d66-pz8q2" (UID: "7776f921-0ae7-4a8c-b444-cf5b4b9a4f65") : secret "combined-ca-bundle" not found Jan 20 17:00:48 crc kubenswrapper[4558]: I0120 17:00:48.300654 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3c6de94f-c605-43d9-97b5-ccf91e49d1fb-logs" (OuterVolumeSpecName: "logs") pod "3c6de94f-c605-43d9-97b5-ccf91e49d1fb" (UID: "3c6de94f-c605-43d9-97b5-ccf91e49d1fb"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:00:48 crc kubenswrapper[4558]: I0120 17:00:48.302877 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3c6de94f-c605-43d9-97b5-ccf91e49d1fb-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "3c6de94f-c605-43d9-97b5-ccf91e49d1fb" (UID: "3c6de94f-c605-43d9-97b5-ccf91e49d1fb"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:00:48 crc kubenswrapper[4558]: I0120 17:00:48.302903 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3c6de94f-c605-43d9-97b5-ccf91e49d1fb-kube-api-access-66xl6" (OuterVolumeSpecName: "kube-api-access-66xl6") pod "3c6de94f-c605-43d9-97b5-ccf91e49d1fb" (UID: "3c6de94f-c605-43d9-97b5-ccf91e49d1fb"). InnerVolumeSpecName "kube-api-access-66xl6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:00:48 crc kubenswrapper[4558]: I0120 17:00:48.315310 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3c6de94f-c605-43d9-97b5-ccf91e49d1fb-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3c6de94f-c605-43d9-97b5-ccf91e49d1fb" (UID: "3c6de94f-c605-43d9-97b5-ccf91e49d1fb"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:00:48 crc kubenswrapper[4558]: I0120 17:00:48.326756 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3c6de94f-c605-43d9-97b5-ccf91e49d1fb-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "3c6de94f-c605-43d9-97b5-ccf91e49d1fb" (UID: "3c6de94f-c605-43d9-97b5-ccf91e49d1fb"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:00:48 crc kubenswrapper[4558]: I0120 17:00:48.329476 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3c6de94f-c605-43d9-97b5-ccf91e49d1fb-config-data" (OuterVolumeSpecName: "config-data") pod "3c6de94f-c605-43d9-97b5-ccf91e49d1fb" (UID: "3c6de94f-c605-43d9-97b5-ccf91e49d1fb"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:00:48 crc kubenswrapper[4558]: I0120 17:00:48.329938 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3c6de94f-c605-43d9-97b5-ccf91e49d1fb-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "3c6de94f-c605-43d9-97b5-ccf91e49d1fb" (UID: "3c6de94f-c605-43d9-97b5-ccf91e49d1fb"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:00:48 crc kubenswrapper[4558]: I0120 17:00:48.400372 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3c6de94f-c605-43d9-97b5-ccf91e49d1fb-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:48 crc kubenswrapper[4558]: I0120 17:00:48.400397 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3c6de94f-c605-43d9-97b5-ccf91e49d1fb-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:48 crc kubenswrapper[4558]: I0120 17:00:48.400407 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-66xl6\" (UniqueName: \"kubernetes.io/projected/3c6de94f-c605-43d9-97b5-ccf91e49d1fb-kube-api-access-66xl6\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:48 crc kubenswrapper[4558]: I0120 17:00:48.400416 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3c6de94f-c605-43d9-97b5-ccf91e49d1fb-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:48 crc kubenswrapper[4558]: I0120 17:00:48.400424 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3c6de94f-c605-43d9-97b5-ccf91e49d1fb-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:48 crc kubenswrapper[4558]: I0120 17:00:48.400432 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3c6de94f-c605-43d9-97b5-ccf91e49d1fb-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:48 crc kubenswrapper[4558]: I0120 17:00:48.400439 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3c6de94f-c605-43d9-97b5-ccf91e49d1fb-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:48 crc kubenswrapper[4558]: I0120 17:00:48.575536 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="10c92abf-cd48-4659-8595-ce9610c0fe2e" path="/var/lib/kubelet/pods/10c92abf-cd48-4659-8595-ce9610c0fe2e/volumes" Jan 20 17:00:48 crc kubenswrapper[4558]: I0120 17:00:48.576633 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3735ca3d-3764-4d36-b912-fbf0bfb96dd8" path="/var/lib/kubelet/pods/3735ca3d-3764-4d36-b912-fbf0bfb96dd8/volumes" Jan 20 17:00:48 crc kubenswrapper[4558]: I0120 17:00:48.578021 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="40234b95-d302-420a-96d7-c56ffe609530" path="/var/lib/kubelet/pods/40234b95-d302-420a-96d7-c56ffe609530/volumes" Jan 20 17:00:48 crc kubenswrapper[4558]: I0120 17:00:48.580039 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4969c1af-53c0-435a-bd06-6bd493c81c80" path="/var/lib/kubelet/pods/4969c1af-53c0-435a-bd06-6bd493c81c80/volumes" Jan 20 17:00:48 crc kubenswrapper[4558]: I0120 17:00:48.580588 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="69c5c1c6-5c3f-48d4-8d14-c3c7cdcb1915" path="/var/lib/kubelet/pods/69c5c1c6-5c3f-48d4-8d14-c3c7cdcb1915/volumes" Jan 20 17:00:48 crc kubenswrapper[4558]: I0120 17:00:48.581628 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8074f6f3-f564-42bc-b08b-28ffe75bbbc5" path="/var/lib/kubelet/pods/8074f6f3-f564-42bc-b08b-28ffe75bbbc5/volumes" Jan 20 17:00:48 crc kubenswrapper[4558]: I0120 17:00:48.582921 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9ea3b871-4db3-4108-baea-e57a23d9d6c5" path="/var/lib/kubelet/pods/9ea3b871-4db3-4108-baea-e57a23d9d6c5/volumes" Jan 20 17:00:48 crc kubenswrapper[4558]: I0120 17:00:48.583465 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a140c9a4-bec6-42e6-bc6c-d63566e4f7f6" path="/var/lib/kubelet/pods/a140c9a4-bec6-42e6-bc6c-d63566e4f7f6/volumes" Jan 20 17:00:48 crc kubenswrapper[4558]: I0120 17:00:48.585046 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf9a27d0-1f64-4f43-a2a7-ce0902c6cd25" path="/var/lib/kubelet/pods/bf9a27d0-1f64-4f43-a2a7-ce0902c6cd25/volumes" Jan 20 17:00:48 crc kubenswrapper[4558]: I0120 17:00:48.585515 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c268a2f5-7c67-4935-8f3e-bdd83aeccc95" path="/var/lib/kubelet/pods/c268a2f5-7c67-4935-8f3e-bdd83aeccc95/volumes" Jan 20 17:00:48 crc kubenswrapper[4558]: I0120 17:00:48.586056 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d3643a65-70b3-488e-ad03-8b7ad6ae4539" path="/var/lib/kubelet/pods/d3643a65-70b3-488e-ad03-8b7ad6ae4539/volumes" Jan 20 17:00:48 crc kubenswrapper[4558]: I0120 17:00:48.586458 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e353328b-4fe7-4637-8174-a3b227ad9761" path="/var/lib/kubelet/pods/e353328b-4fe7-4637-8174-a3b227ad9761/volumes" Jan 20 17:00:48 crc kubenswrapper[4558]: I0120 17:00:48.779965 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-6494546496-58v7b" Jan 20 17:00:48 crc kubenswrapper[4558]: I0120 17:00:48.846579 4558 generic.go:334] "Generic (PLEG): container finished" podID="43781c23-b22a-4449-8306-67efbe8dd6fc" containerID="c6dd483fc55626bbad92f374c9cd3e8b6e558a86ecdf6e7a87f558af6aed4a91" exitCode=0 Jan 20 17:00:48 crc kubenswrapper[4558]: I0120 17:00:48.846613 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-6494546496-58v7b" Jan 20 17:00:48 crc kubenswrapper[4558]: I0120 17:00:48.846626 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-6494546496-58v7b" event={"ID":"43781c23-b22a-4449-8306-67efbe8dd6fc","Type":"ContainerDied","Data":"c6dd483fc55626bbad92f374c9cd3e8b6e558a86ecdf6e7a87f558af6aed4a91"} Jan 20 17:00:48 crc kubenswrapper[4558]: I0120 17:00:48.847140 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-6494546496-58v7b" event={"ID":"43781c23-b22a-4449-8306-67efbe8dd6fc","Type":"ContainerDied","Data":"0314642ddff11a7a338c7adcef338a679612552d0433d7fda0bc23af866db076"} Jan 20 17:00:48 crc kubenswrapper[4558]: I0120 17:00:48.847175 4558 scope.go:117] "RemoveContainer" containerID="c6dd483fc55626bbad92f374c9cd3e8b6e558a86ecdf6e7a87f558af6aed4a91" Jan 20 17:00:48 crc kubenswrapper[4558]: I0120 17:00:48.848788 4558 generic.go:334] "Generic (PLEG): container finished" podID="3c6de94f-c605-43d9-97b5-ccf91e49d1fb" containerID="ff9f3ae585b003190c9a4e57172ef28f205a59206653a665e67865ef3bac9d47" exitCode=0 Jan 20 17:00:48 crc kubenswrapper[4558]: I0120 17:00:48.848824 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-595757ff5d-frx87" Jan 20 17:00:48 crc kubenswrapper[4558]: I0120 17:00:48.848827 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-595757ff5d-frx87" event={"ID":"3c6de94f-c605-43d9-97b5-ccf91e49d1fb","Type":"ContainerDied","Data":"ff9f3ae585b003190c9a4e57172ef28f205a59206653a665e67865ef3bac9d47"} Jan 20 17:00:48 crc kubenswrapper[4558]: I0120 17:00:48.848869 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-595757ff5d-frx87" event={"ID":"3c6de94f-c605-43d9-97b5-ccf91e49d1fb","Type":"ContainerDied","Data":"c495fe3ee551481098ce5d2fed77126628308deee2ebfade29bf4bdd17f835c0"} Jan 20 17:00:48 crc kubenswrapper[4558]: I0120 17:00:48.866139 4558 scope.go:117] "RemoveContainer" containerID="c6dd483fc55626bbad92f374c9cd3e8b6e558a86ecdf6e7a87f558af6aed4a91" Jan 20 17:00:48 crc kubenswrapper[4558]: I0120 17:00:48.867525 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-api-595757ff5d-frx87"] Jan 20 17:00:48 crc kubenswrapper[4558]: E0120 17:00:48.867714 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c6dd483fc55626bbad92f374c9cd3e8b6e558a86ecdf6e7a87f558af6aed4a91\": container with ID starting with c6dd483fc55626bbad92f374c9cd3e8b6e558a86ecdf6e7a87f558af6aed4a91 not found: ID does not exist" containerID="c6dd483fc55626bbad92f374c9cd3e8b6e558a86ecdf6e7a87f558af6aed4a91" Jan 20 17:00:48 crc kubenswrapper[4558]: I0120 17:00:48.867742 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c6dd483fc55626bbad92f374c9cd3e8b6e558a86ecdf6e7a87f558af6aed4a91"} err="failed to get container status \"c6dd483fc55626bbad92f374c9cd3e8b6e558a86ecdf6e7a87f558af6aed4a91\": rpc error: code = NotFound desc = could not find container \"c6dd483fc55626bbad92f374c9cd3e8b6e558a86ecdf6e7a87f558af6aed4a91\": container with ID starting with c6dd483fc55626bbad92f374c9cd3e8b6e558a86ecdf6e7a87f558af6aed4a91 not found: ID does not exist" Jan 20 17:00:48 crc kubenswrapper[4558]: I0120 17:00:48.867763 4558 scope.go:117] "RemoveContainer" containerID="ff9f3ae585b003190c9a4e57172ef28f205a59206653a665e67865ef3bac9d47" Jan 20 17:00:48 crc kubenswrapper[4558]: I0120 17:00:48.871560 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/barbican-api-595757ff5d-frx87"] Jan 20 17:00:48 crc kubenswrapper[4558]: I0120 17:00:48.882512 4558 scope.go:117] "RemoveContainer" containerID="065a0343a2fa20d49288d6aad3e2c8001219424d475f4cf3fa0b18d5f777af97" Jan 20 17:00:48 crc kubenswrapper[4558]: I0120 17:00:48.895500 4558 scope.go:117] "RemoveContainer" containerID="ff9f3ae585b003190c9a4e57172ef28f205a59206653a665e67865ef3bac9d47" Jan 20 17:00:48 crc kubenswrapper[4558]: E0120 17:00:48.895782 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ff9f3ae585b003190c9a4e57172ef28f205a59206653a665e67865ef3bac9d47\": container with ID starting with ff9f3ae585b003190c9a4e57172ef28f205a59206653a665e67865ef3bac9d47 not found: ID does not exist" containerID="ff9f3ae585b003190c9a4e57172ef28f205a59206653a665e67865ef3bac9d47" Jan 20 17:00:48 crc kubenswrapper[4558]: I0120 17:00:48.895816 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ff9f3ae585b003190c9a4e57172ef28f205a59206653a665e67865ef3bac9d47"} err="failed to get container status \"ff9f3ae585b003190c9a4e57172ef28f205a59206653a665e67865ef3bac9d47\": rpc error: code = NotFound desc = could not find container \"ff9f3ae585b003190c9a4e57172ef28f205a59206653a665e67865ef3bac9d47\": container with ID starting with ff9f3ae585b003190c9a4e57172ef28f205a59206653a665e67865ef3bac9d47 not found: ID does not exist" Jan 20 17:00:48 crc kubenswrapper[4558]: I0120 17:00:48.895839 4558 scope.go:117] "RemoveContainer" containerID="065a0343a2fa20d49288d6aad3e2c8001219424d475f4cf3fa0b18d5f777af97" Jan 20 17:00:48 crc kubenswrapper[4558]: E0120 17:00:48.896321 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"065a0343a2fa20d49288d6aad3e2c8001219424d475f4cf3fa0b18d5f777af97\": container with ID starting with 065a0343a2fa20d49288d6aad3e2c8001219424d475f4cf3fa0b18d5f777af97 not found: ID does not exist" containerID="065a0343a2fa20d49288d6aad3e2c8001219424d475f4cf3fa0b18d5f777af97" Jan 20 17:00:48 crc kubenswrapper[4558]: I0120 17:00:48.896455 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"065a0343a2fa20d49288d6aad3e2c8001219424d475f4cf3fa0b18d5f777af97"} err="failed to get container status \"065a0343a2fa20d49288d6aad3e2c8001219424d475f4cf3fa0b18d5f777af97\": rpc error: code = NotFound desc = could not find container \"065a0343a2fa20d49288d6aad3e2c8001219424d475f4cf3fa0b18d5f777af97\": container with ID starting with 065a0343a2fa20d49288d6aad3e2c8001219424d475f4cf3fa0b18d5f777af97 not found: ID does not exist" Jan 20 17:00:48 crc kubenswrapper[4558]: I0120 17:00:48.908788 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/43781c23-b22a-4449-8306-67efbe8dd6fc-fernet-keys\") pod \"43781c23-b22a-4449-8306-67efbe8dd6fc\" (UID: \"43781c23-b22a-4449-8306-67efbe8dd6fc\") " Jan 20 17:00:48 crc kubenswrapper[4558]: I0120 17:00:48.908838 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/43781c23-b22a-4449-8306-67efbe8dd6fc-scripts\") pod \"43781c23-b22a-4449-8306-67efbe8dd6fc\" (UID: \"43781c23-b22a-4449-8306-67efbe8dd6fc\") " Jan 20 17:00:48 crc kubenswrapper[4558]: I0120 17:00:48.908939 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/43781c23-b22a-4449-8306-67efbe8dd6fc-config-data\") pod \"43781c23-b22a-4449-8306-67efbe8dd6fc\" (UID: \"43781c23-b22a-4449-8306-67efbe8dd6fc\") " Jan 20 17:00:48 crc kubenswrapper[4558]: I0120 17:00:48.908983 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/43781c23-b22a-4449-8306-67efbe8dd6fc-public-tls-certs\") pod \"43781c23-b22a-4449-8306-67efbe8dd6fc\" (UID: \"43781c23-b22a-4449-8306-67efbe8dd6fc\") " Jan 20 17:00:48 crc kubenswrapper[4558]: I0120 17:00:48.909004 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-snwkf\" (UniqueName: \"kubernetes.io/projected/43781c23-b22a-4449-8306-67efbe8dd6fc-kube-api-access-snwkf\") pod \"43781c23-b22a-4449-8306-67efbe8dd6fc\" (UID: \"43781c23-b22a-4449-8306-67efbe8dd6fc\") " Jan 20 17:00:48 crc kubenswrapper[4558]: I0120 17:00:48.909024 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/43781c23-b22a-4449-8306-67efbe8dd6fc-internal-tls-certs\") pod \"43781c23-b22a-4449-8306-67efbe8dd6fc\" (UID: \"43781c23-b22a-4449-8306-67efbe8dd6fc\") " Jan 20 17:00:48 crc kubenswrapper[4558]: I0120 17:00:48.909047 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/43781c23-b22a-4449-8306-67efbe8dd6fc-combined-ca-bundle\") pod \"43781c23-b22a-4449-8306-67efbe8dd6fc\" (UID: \"43781c23-b22a-4449-8306-67efbe8dd6fc\") " Jan 20 17:00:48 crc kubenswrapper[4558]: I0120 17:00:48.909099 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/43781c23-b22a-4449-8306-67efbe8dd6fc-credential-keys\") pod \"43781c23-b22a-4449-8306-67efbe8dd6fc\" (UID: \"43781c23-b22a-4449-8306-67efbe8dd6fc\") " Jan 20 17:00:48 crc kubenswrapper[4558]: I0120 17:00:48.913720 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43781c23-b22a-4449-8306-67efbe8dd6fc-kube-api-access-snwkf" (OuterVolumeSpecName: "kube-api-access-snwkf") pod "43781c23-b22a-4449-8306-67efbe8dd6fc" (UID: "43781c23-b22a-4449-8306-67efbe8dd6fc"). InnerVolumeSpecName "kube-api-access-snwkf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:00:48 crc kubenswrapper[4558]: I0120 17:00:48.914252 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43781c23-b22a-4449-8306-67efbe8dd6fc-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "43781c23-b22a-4449-8306-67efbe8dd6fc" (UID: "43781c23-b22a-4449-8306-67efbe8dd6fc"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:00:48 crc kubenswrapper[4558]: I0120 17:00:48.921293 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43781c23-b22a-4449-8306-67efbe8dd6fc-scripts" (OuterVolumeSpecName: "scripts") pod "43781c23-b22a-4449-8306-67efbe8dd6fc" (UID: "43781c23-b22a-4449-8306-67efbe8dd6fc"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:00:48 crc kubenswrapper[4558]: I0120 17:00:48.926406 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43781c23-b22a-4449-8306-67efbe8dd6fc-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "43781c23-b22a-4449-8306-67efbe8dd6fc" (UID: "43781c23-b22a-4449-8306-67efbe8dd6fc"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:00:48 crc kubenswrapper[4558]: I0120 17:00:48.927095 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43781c23-b22a-4449-8306-67efbe8dd6fc-config-data" (OuterVolumeSpecName: "config-data") pod "43781c23-b22a-4449-8306-67efbe8dd6fc" (UID: "43781c23-b22a-4449-8306-67efbe8dd6fc"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:00:48 crc kubenswrapper[4558]: I0120 17:00:48.935377 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43781c23-b22a-4449-8306-67efbe8dd6fc-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "43781c23-b22a-4449-8306-67efbe8dd6fc" (UID: "43781c23-b22a-4449-8306-67efbe8dd6fc"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:00:48 crc kubenswrapper[4558]: I0120 17:00:48.937522 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43781c23-b22a-4449-8306-67efbe8dd6fc-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "43781c23-b22a-4449-8306-67efbe8dd6fc" (UID: "43781c23-b22a-4449-8306-67efbe8dd6fc"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:00:48 crc kubenswrapper[4558]: I0120 17:00:48.944319 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43781c23-b22a-4449-8306-67efbe8dd6fc-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "43781c23-b22a-4449-8306-67efbe8dd6fc" (UID: "43781c23-b22a-4449-8306-67efbe8dd6fc"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:00:49 crc kubenswrapper[4558]: I0120 17:00:49.010819 4558 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/43781c23-b22a-4449-8306-67efbe8dd6fc-fernet-keys\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:49 crc kubenswrapper[4558]: I0120 17:00:49.010894 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/43781c23-b22a-4449-8306-67efbe8dd6fc-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:49 crc kubenswrapper[4558]: I0120 17:00:49.010917 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/43781c23-b22a-4449-8306-67efbe8dd6fc-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:49 crc kubenswrapper[4558]: I0120 17:00:49.010927 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/43781c23-b22a-4449-8306-67efbe8dd6fc-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:49 crc kubenswrapper[4558]: I0120 17:00:49.010957 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-snwkf\" (UniqueName: \"kubernetes.io/projected/43781c23-b22a-4449-8306-67efbe8dd6fc-kube-api-access-snwkf\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:49 crc kubenswrapper[4558]: I0120 17:00:49.010967 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/43781c23-b22a-4449-8306-67efbe8dd6fc-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:49 crc kubenswrapper[4558]: I0120 17:00:49.010975 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/43781c23-b22a-4449-8306-67efbe8dd6fc-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:49 crc kubenswrapper[4558]: I0120 17:00:49.010984 4558 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/43781c23-b22a-4449-8306-67efbe8dd6fc-credential-keys\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:49 crc kubenswrapper[4558]: I0120 17:00:49.169892 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-6494546496-58v7b"] Jan 20 17:00:49 crc kubenswrapper[4558]: I0120 17:00:49.174148 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/keystone-6494546496-58v7b"] Jan 20 17:00:50 crc kubenswrapper[4558]: E0120 17:00:50.227939 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Jan 20 17:00:50 crc kubenswrapper[4558]: E0120 17:00:50.227991 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/bf355276-9e62-474e-bfb1-616dde5b83bc-config-data podName:bf355276-9e62-474e-bfb1-616dde5b83bc nodeName:}" failed. No retries permitted until 2026-01-20 17:00:58.227978283 +0000 UTC m=+1151.988316250 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/bf355276-9e62-474e-bfb1-616dde5b83bc-config-data") pod "rabbitmq-cell1-server-0" (UID: "bf355276-9e62-474e-bfb1-616dde5b83bc") : configmap "rabbitmq-cell1-config-data" not found Jan 20 17:00:50 crc kubenswrapper[4558]: I0120 17:00:50.573723 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3c6de94f-c605-43d9-97b5-ccf91e49d1fb" path="/var/lib/kubelet/pods/3c6de94f-c605-43d9-97b5-ccf91e49d1fb/volumes" Jan 20 17:00:50 crc kubenswrapper[4558]: I0120 17:00:50.574245 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43781c23-b22a-4449-8306-67efbe8dd6fc" path="/var/lib/kubelet/pods/43781c23-b22a-4449-8306-67efbe8dd6fc/volumes" Jan 20 17:00:51 crc kubenswrapper[4558]: E0120 17:00:51.140802 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Jan 20 17:00:51 crc kubenswrapper[4558]: E0120 17:00:51.141424 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/3f868eba-f4e1-4e32-b271-391cf271fe97-config-data podName:3f868eba-f4e1-4e32-b271-391cf271fe97 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:59.141410834 +0000 UTC m=+1152.901748800 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/3f868eba-f4e1-4e32-b271-391cf271fe97-config-data") pod "rabbitmq-server-0" (UID: "3f868eba-f4e1-4e32-b271-391cf271fe97") : configmap "rabbitmq-config-data" not found Jan 20 17:00:51 crc kubenswrapper[4558]: I0120 17:00:51.279526 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:00:51 crc kubenswrapper[4558]: I0120 17:00:51.343253 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/bf355276-9e62-474e-bfb1-616dde5b83bc-rabbitmq-erlang-cookie\") pod \"bf355276-9e62-474e-bfb1-616dde5b83bc\" (UID: \"bf355276-9e62-474e-bfb1-616dde5b83bc\") " Jan 20 17:00:51 crc kubenswrapper[4558]: I0120 17:00:51.343288 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/bf355276-9e62-474e-bfb1-616dde5b83bc-server-conf\") pod \"bf355276-9e62-474e-bfb1-616dde5b83bc\" (UID: \"bf355276-9e62-474e-bfb1-616dde5b83bc\") " Jan 20 17:00:51 crc kubenswrapper[4558]: I0120 17:00:51.343316 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/bf355276-9e62-474e-bfb1-616dde5b83bc-rabbitmq-tls\") pod \"bf355276-9e62-474e-bfb1-616dde5b83bc\" (UID: \"bf355276-9e62-474e-bfb1-616dde5b83bc\") " Jan 20 17:00:51 crc kubenswrapper[4558]: I0120 17:00:51.343332 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/bf355276-9e62-474e-bfb1-616dde5b83bc-plugins-conf\") pod \"bf355276-9e62-474e-bfb1-616dde5b83bc\" (UID: \"bf355276-9e62-474e-bfb1-616dde5b83bc\") " Jan 20 17:00:51 crc kubenswrapper[4558]: I0120 17:00:51.343354 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"bf355276-9e62-474e-bfb1-616dde5b83bc\" (UID: \"bf355276-9e62-474e-bfb1-616dde5b83bc\") " Jan 20 17:00:51 crc kubenswrapper[4558]: I0120 17:00:51.343371 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/bf355276-9e62-474e-bfb1-616dde5b83bc-erlang-cookie-secret\") pod \"bf355276-9e62-474e-bfb1-616dde5b83bc\" (UID: \"bf355276-9e62-474e-bfb1-616dde5b83bc\") " Jan 20 17:00:51 crc kubenswrapper[4558]: I0120 17:00:51.343393 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/bf355276-9e62-474e-bfb1-616dde5b83bc-config-data\") pod \"bf355276-9e62-474e-bfb1-616dde5b83bc\" (UID: \"bf355276-9e62-474e-bfb1-616dde5b83bc\") " Jan 20 17:00:51 crc kubenswrapper[4558]: I0120 17:00:51.343412 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/bf355276-9e62-474e-bfb1-616dde5b83bc-rabbitmq-plugins\") pod \"bf355276-9e62-474e-bfb1-616dde5b83bc\" (UID: \"bf355276-9e62-474e-bfb1-616dde5b83bc\") " Jan 20 17:00:51 crc kubenswrapper[4558]: I0120 17:00:51.343430 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/bf355276-9e62-474e-bfb1-616dde5b83bc-pod-info\") pod \"bf355276-9e62-474e-bfb1-616dde5b83bc\" (UID: \"bf355276-9e62-474e-bfb1-616dde5b83bc\") " Jan 20 17:00:51 crc kubenswrapper[4558]: I0120 17:00:51.343451 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wfp6r\" (UniqueName: \"kubernetes.io/projected/bf355276-9e62-474e-bfb1-616dde5b83bc-kube-api-access-wfp6r\") pod \"bf355276-9e62-474e-bfb1-616dde5b83bc\" (UID: \"bf355276-9e62-474e-bfb1-616dde5b83bc\") " Jan 20 17:00:51 crc kubenswrapper[4558]: I0120 17:00:51.343465 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/bf355276-9e62-474e-bfb1-616dde5b83bc-rabbitmq-confd\") pod \"bf355276-9e62-474e-bfb1-616dde5b83bc\" (UID: \"bf355276-9e62-474e-bfb1-616dde5b83bc\") " Jan 20 17:00:51 crc kubenswrapper[4558]: I0120 17:00:51.344717 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bf355276-9e62-474e-bfb1-616dde5b83bc-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "bf355276-9e62-474e-bfb1-616dde5b83bc" (UID: "bf355276-9e62-474e-bfb1-616dde5b83bc"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:00:51 crc kubenswrapper[4558]: I0120 17:00:51.344850 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf355276-9e62-474e-bfb1-616dde5b83bc-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "bf355276-9e62-474e-bfb1-616dde5b83bc" (UID: "bf355276-9e62-474e-bfb1-616dde5b83bc"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:00:51 crc kubenswrapper[4558]: I0120 17:00:51.345515 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bf355276-9e62-474e-bfb1-616dde5b83bc-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "bf355276-9e62-474e-bfb1-616dde5b83bc" (UID: "bf355276-9e62-474e-bfb1-616dde5b83bc"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:00:51 crc kubenswrapper[4558]: I0120 17:00:51.347587 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf355276-9e62-474e-bfb1-616dde5b83bc-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "bf355276-9e62-474e-bfb1-616dde5b83bc" (UID: "bf355276-9e62-474e-bfb1-616dde5b83bc"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:00:51 crc kubenswrapper[4558]: I0120 17:00:51.348423 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf355276-9e62-474e-bfb1-616dde5b83bc-kube-api-access-wfp6r" (OuterVolumeSpecName: "kube-api-access-wfp6r") pod "bf355276-9e62-474e-bfb1-616dde5b83bc" (UID: "bf355276-9e62-474e-bfb1-616dde5b83bc"). InnerVolumeSpecName "kube-api-access-wfp6r". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:00:51 crc kubenswrapper[4558]: I0120 17:00:51.348433 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf355276-9e62-474e-bfb1-616dde5b83bc-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "bf355276-9e62-474e-bfb1-616dde5b83bc" (UID: "bf355276-9e62-474e-bfb1-616dde5b83bc"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:00:51 crc kubenswrapper[4558]: I0120 17:00:51.349546 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage02-crc" (OuterVolumeSpecName: "persistence") pod "bf355276-9e62-474e-bfb1-616dde5b83bc" (UID: "bf355276-9e62-474e-bfb1-616dde5b83bc"). InnerVolumeSpecName "local-storage02-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:00:51 crc kubenswrapper[4558]: I0120 17:00:51.359848 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf355276-9e62-474e-bfb1-616dde5b83bc-config-data" (OuterVolumeSpecName: "config-data") pod "bf355276-9e62-474e-bfb1-616dde5b83bc" (UID: "bf355276-9e62-474e-bfb1-616dde5b83bc"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:00:51 crc kubenswrapper[4558]: I0120 17:00:51.360255 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/bf355276-9e62-474e-bfb1-616dde5b83bc-pod-info" (OuterVolumeSpecName: "pod-info") pod "bf355276-9e62-474e-bfb1-616dde5b83bc" (UID: "bf355276-9e62-474e-bfb1-616dde5b83bc"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Jan 20 17:00:51 crc kubenswrapper[4558]: I0120 17:00:51.371641 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf355276-9e62-474e-bfb1-616dde5b83bc-server-conf" (OuterVolumeSpecName: "server-conf") pod "bf355276-9e62-474e-bfb1-616dde5b83bc" (UID: "bf355276-9e62-474e-bfb1-616dde5b83bc"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:00:51 crc kubenswrapper[4558]: I0120 17:00:51.393333 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf355276-9e62-474e-bfb1-616dde5b83bc-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "bf355276-9e62-474e-bfb1-616dde5b83bc" (UID: "bf355276-9e62-474e-bfb1-616dde5b83bc"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:00:51 crc kubenswrapper[4558]: I0120 17:00:51.421606 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:00:51 crc kubenswrapper[4558]: E0120 17:00:51.444725 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-neutron-internal-svc: secret "cert-neutron-internal-svc" not found Jan 20 17:00:51 crc kubenswrapper[4558]: E0120 17:00:51.444741 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-neutron-public-svc: secret "cert-neutron-public-svc" not found Jan 20 17:00:51 crc kubenswrapper[4558]: E0120 17:00:51.444761 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/combined-ca-bundle: secret "combined-ca-bundle" not found Jan 20 17:00:51 crc kubenswrapper[4558]: E0120 17:00:51.444774 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7d0b0e33-9ce7-44d9-8b09-6008715fe1a0-internal-tls-certs podName:7d0b0e33-9ce7-44d9-8b09-6008715fe1a0 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:59.44476173 +0000 UTC m=+1153.205099696 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "internal-tls-certs" (UniqueName: "kubernetes.io/secret/7d0b0e33-9ce7-44d9-8b09-6008715fe1a0-internal-tls-certs") pod "neutron-5d557df858-frznf" (UID: "7d0b0e33-9ce7-44d9-8b09-6008715fe1a0") : secret "cert-neutron-internal-svc" not found Jan 20 17:00:51 crc kubenswrapper[4558]: E0120 17:00:51.444725 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-neutron-ovndbs: secret "cert-neutron-ovndbs" not found Jan 20 17:00:51 crc kubenswrapper[4558]: E0120 17:00:51.444794 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7d0b0e33-9ce7-44d9-8b09-6008715fe1a0-combined-ca-bundle podName:7d0b0e33-9ce7-44d9-8b09-6008715fe1a0 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:59.444784432 +0000 UTC m=+1153.205122399 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/7d0b0e33-9ce7-44d9-8b09-6008715fe1a0-combined-ca-bundle") pod "neutron-5d557df858-frznf" (UID: "7d0b0e33-9ce7-44d9-8b09-6008715fe1a0") : secret "combined-ca-bundle" not found Jan 20 17:00:51 crc kubenswrapper[4558]: E0120 17:00:51.444803 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/neutron-httpd-config: secret "neutron-httpd-config" not found Jan 20 17:00:51 crc kubenswrapper[4558]: E0120 17:00:51.444807 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7d0b0e33-9ce7-44d9-8b09-6008715fe1a0-ovndb-tls-certs podName:7d0b0e33-9ce7-44d9-8b09-6008715fe1a0 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:59.444801714 +0000 UTC m=+1153.205139681 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "ovndb-tls-certs" (UniqueName: "kubernetes.io/secret/7d0b0e33-9ce7-44d9-8b09-6008715fe1a0-ovndb-tls-certs") pod "neutron-5d557df858-frznf" (UID: "7d0b0e33-9ce7-44d9-8b09-6008715fe1a0") : secret "cert-neutron-ovndbs" not found Jan 20 17:00:51 crc kubenswrapper[4558]: I0120 17:00:51.444849 4558 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/bf355276-9e62-474e-bfb1-616dde5b83bc-pod-info\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:51 crc kubenswrapper[4558]: E0120 17:00:51.444866 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7d0b0e33-9ce7-44d9-8b09-6008715fe1a0-public-tls-certs podName:7d0b0e33-9ce7-44d9-8b09-6008715fe1a0 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:59.444856126 +0000 UTC m=+1153.205194094 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "public-tls-certs" (UniqueName: "kubernetes.io/secret/7d0b0e33-9ce7-44d9-8b09-6008715fe1a0-public-tls-certs") pod "neutron-5d557df858-frznf" (UID: "7d0b0e33-9ce7-44d9-8b09-6008715fe1a0") : secret "cert-neutron-public-svc" not found Jan 20 17:00:51 crc kubenswrapper[4558]: I0120 17:00:51.444877 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wfp6r\" (UniqueName: \"kubernetes.io/projected/bf355276-9e62-474e-bfb1-616dde5b83bc-kube-api-access-wfp6r\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:51 crc kubenswrapper[4558]: E0120 17:00:51.444889 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7d0b0e33-9ce7-44d9-8b09-6008715fe1a0-httpd-config podName:7d0b0e33-9ce7-44d9-8b09-6008715fe1a0 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:59.444883679 +0000 UTC m=+1153.205221646 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "httpd-config" (UniqueName: "kubernetes.io/secret/7d0b0e33-9ce7-44d9-8b09-6008715fe1a0-httpd-config") pod "neutron-5d557df858-frznf" (UID: "7d0b0e33-9ce7-44d9-8b09-6008715fe1a0") : secret "neutron-httpd-config" not found Jan 20 17:00:51 crc kubenswrapper[4558]: I0120 17:00:51.444897 4558 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/bf355276-9e62-474e-bfb1-616dde5b83bc-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:51 crc kubenswrapper[4558]: I0120 17:00:51.444906 4558 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/bf355276-9e62-474e-bfb1-616dde5b83bc-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:51 crc kubenswrapper[4558]: I0120 17:00:51.444915 4558 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/bf355276-9e62-474e-bfb1-616dde5b83bc-server-conf\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:51 crc kubenswrapper[4558]: I0120 17:00:51.444922 4558 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/bf355276-9e62-474e-bfb1-616dde5b83bc-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:51 crc kubenswrapper[4558]: I0120 17:00:51.444930 4558 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/bf355276-9e62-474e-bfb1-616dde5b83bc-plugins-conf\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:51 crc kubenswrapper[4558]: I0120 17:00:51.444945 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" " Jan 20 17:00:51 crc kubenswrapper[4558]: I0120 17:00:51.444954 4558 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/bf355276-9e62-474e-bfb1-616dde5b83bc-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:51 crc kubenswrapper[4558]: I0120 17:00:51.444961 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/bf355276-9e62-474e-bfb1-616dde5b83bc-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:51 crc kubenswrapper[4558]: I0120 17:00:51.444968 4558 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/bf355276-9e62-474e-bfb1-616dde5b83bc-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:51 crc kubenswrapper[4558]: E0120 17:00:51.445314 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/neutron-config: secret "neutron-config" not found Jan 20 17:00:51 crc kubenswrapper[4558]: E0120 17:00:51.445408 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7d0b0e33-9ce7-44d9-8b09-6008715fe1a0-config podName:7d0b0e33-9ce7-44d9-8b09-6008715fe1a0 nodeName:}" failed. No retries permitted until 2026-01-20 17:00:59.44539521 +0000 UTC m=+1153.205733178 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "config" (UniqueName: "kubernetes.io/secret/7d0b0e33-9ce7-44d9-8b09-6008715fe1a0-config") pod "neutron-5d557df858-frznf" (UID: "7d0b0e33-9ce7-44d9-8b09-6008715fe1a0") : secret "neutron-config" not found Jan 20 17:00:51 crc kubenswrapper[4558]: I0120 17:00:51.457333 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage02-crc" (UniqueName: "kubernetes.io/local-volume/local-storage02-crc") on node "crc" Jan 20 17:00:51 crc kubenswrapper[4558]: I0120 17:00:51.545505 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/3f868eba-f4e1-4e32-b271-391cf271fe97-rabbitmq-tls\") pod \"3f868eba-f4e1-4e32-b271-391cf271fe97\" (UID: \"3f868eba-f4e1-4e32-b271-391cf271fe97\") " Jan 20 17:00:51 crc kubenswrapper[4558]: I0120 17:00:51.545645 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/3f868eba-f4e1-4e32-b271-391cf271fe97-erlang-cookie-secret\") pod \"3f868eba-f4e1-4e32-b271-391cf271fe97\" (UID: \"3f868eba-f4e1-4e32-b271-391cf271fe97\") " Jan 20 17:00:51 crc kubenswrapper[4558]: I0120 17:00:51.545712 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/3f868eba-f4e1-4e32-b271-391cf271fe97-rabbitmq-erlang-cookie\") pod \"3f868eba-f4e1-4e32-b271-391cf271fe97\" (UID: \"3f868eba-f4e1-4e32-b271-391cf271fe97\") " Jan 20 17:00:51 crc kubenswrapper[4558]: I0120 17:00:51.545776 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/3f868eba-f4e1-4e32-b271-391cf271fe97-server-conf\") pod \"3f868eba-f4e1-4e32-b271-391cf271fe97\" (UID: \"3f868eba-f4e1-4e32-b271-391cf271fe97\") " Jan 20 17:00:51 crc kubenswrapper[4558]: I0120 17:00:51.545864 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/3f868eba-f4e1-4e32-b271-391cf271fe97-plugins-conf\") pod \"3f868eba-f4e1-4e32-b271-391cf271fe97\" (UID: \"3f868eba-f4e1-4e32-b271-391cf271fe97\") " Jan 20 17:00:51 crc kubenswrapper[4558]: I0120 17:00:51.545922 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w8fm7\" (UniqueName: \"kubernetes.io/projected/3f868eba-f4e1-4e32-b271-391cf271fe97-kube-api-access-w8fm7\") pod \"3f868eba-f4e1-4e32-b271-391cf271fe97\" (UID: \"3f868eba-f4e1-4e32-b271-391cf271fe97\") " Jan 20 17:00:51 crc kubenswrapper[4558]: I0120 17:00:51.546012 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/3f868eba-f4e1-4e32-b271-391cf271fe97-pod-info\") pod \"3f868eba-f4e1-4e32-b271-391cf271fe97\" (UID: \"3f868eba-f4e1-4e32-b271-391cf271fe97\") " Jan 20 17:00:51 crc kubenswrapper[4558]: I0120 17:00:51.546095 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage20-crc\") pod \"3f868eba-f4e1-4e32-b271-391cf271fe97\" (UID: \"3f868eba-f4e1-4e32-b271-391cf271fe97\") " Jan 20 17:00:51 crc kubenswrapper[4558]: I0120 17:00:51.546190 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/3f868eba-f4e1-4e32-b271-391cf271fe97-config-data\") pod \"3f868eba-f4e1-4e32-b271-391cf271fe97\" (UID: \"3f868eba-f4e1-4e32-b271-391cf271fe97\") " Jan 20 17:00:51 crc kubenswrapper[4558]: I0120 17:00:51.546287 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/3f868eba-f4e1-4e32-b271-391cf271fe97-rabbitmq-plugins\") pod \"3f868eba-f4e1-4e32-b271-391cf271fe97\" (UID: \"3f868eba-f4e1-4e32-b271-391cf271fe97\") " Jan 20 17:00:51 crc kubenswrapper[4558]: I0120 17:00:51.546359 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/3f868eba-f4e1-4e32-b271-391cf271fe97-rabbitmq-confd\") pod \"3f868eba-f4e1-4e32-b271-391cf271fe97\" (UID: \"3f868eba-f4e1-4e32-b271-391cf271fe97\") " Jan 20 17:00:51 crc kubenswrapper[4558]: I0120 17:00:51.546698 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:51 crc kubenswrapper[4558]: I0120 17:00:51.547595 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3f868eba-f4e1-4e32-b271-391cf271fe97-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "3f868eba-f4e1-4e32-b271-391cf271fe97" (UID: "3f868eba-f4e1-4e32-b271-391cf271fe97"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:00:51 crc kubenswrapper[4558]: I0120 17:00:51.547653 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3f868eba-f4e1-4e32-b271-391cf271fe97-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "3f868eba-f4e1-4e32-b271-391cf271fe97" (UID: "3f868eba-f4e1-4e32-b271-391cf271fe97"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:00:51 crc kubenswrapper[4558]: I0120 17:00:51.547637 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3f868eba-f4e1-4e32-b271-391cf271fe97-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "3f868eba-f4e1-4e32-b271-391cf271fe97" (UID: "3f868eba-f4e1-4e32-b271-391cf271fe97"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:00:51 crc kubenswrapper[4558]: I0120 17:00:51.549084 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/3f868eba-f4e1-4e32-b271-391cf271fe97-pod-info" (OuterVolumeSpecName: "pod-info") pod "3f868eba-f4e1-4e32-b271-391cf271fe97" (UID: "3f868eba-f4e1-4e32-b271-391cf271fe97"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Jan 20 17:00:51 crc kubenswrapper[4558]: I0120 17:00:51.549249 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3f868eba-f4e1-4e32-b271-391cf271fe97-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "3f868eba-f4e1-4e32-b271-391cf271fe97" (UID: "3f868eba-f4e1-4e32-b271-391cf271fe97"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:00:51 crc kubenswrapper[4558]: I0120 17:00:51.549482 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage20-crc" (OuterVolumeSpecName: "persistence") pod "3f868eba-f4e1-4e32-b271-391cf271fe97" (UID: "3f868eba-f4e1-4e32-b271-391cf271fe97"). InnerVolumeSpecName "local-storage20-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:00:51 crc kubenswrapper[4558]: I0120 17:00:51.549832 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3f868eba-f4e1-4e32-b271-391cf271fe97-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "3f868eba-f4e1-4e32-b271-391cf271fe97" (UID: "3f868eba-f4e1-4e32-b271-391cf271fe97"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:00:51 crc kubenswrapper[4558]: I0120 17:00:51.550206 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3f868eba-f4e1-4e32-b271-391cf271fe97-kube-api-access-w8fm7" (OuterVolumeSpecName: "kube-api-access-w8fm7") pod "3f868eba-f4e1-4e32-b271-391cf271fe97" (UID: "3f868eba-f4e1-4e32-b271-391cf271fe97"). InnerVolumeSpecName "kube-api-access-w8fm7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:00:51 crc kubenswrapper[4558]: I0120 17:00:51.560450 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3f868eba-f4e1-4e32-b271-391cf271fe97-config-data" (OuterVolumeSpecName: "config-data") pod "3f868eba-f4e1-4e32-b271-391cf271fe97" (UID: "3f868eba-f4e1-4e32-b271-391cf271fe97"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:00:51 crc kubenswrapper[4558]: I0120 17:00:51.570869 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3f868eba-f4e1-4e32-b271-391cf271fe97-server-conf" (OuterVolumeSpecName: "server-conf") pod "3f868eba-f4e1-4e32-b271-391cf271fe97" (UID: "3f868eba-f4e1-4e32-b271-391cf271fe97"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:00:51 crc kubenswrapper[4558]: I0120 17:00:51.594453 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3f868eba-f4e1-4e32-b271-391cf271fe97-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "3f868eba-f4e1-4e32-b271-391cf271fe97" (UID: "3f868eba-f4e1-4e32-b271-391cf271fe97"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:00:51 crc kubenswrapper[4558]: I0120 17:00:51.647494 4558 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/3f868eba-f4e1-4e32-b271-391cf271fe97-pod-info\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:51 crc kubenswrapper[4558]: I0120 17:00:51.647590 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage20-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage20-crc\") on node \"crc\" " Jan 20 17:00:51 crc kubenswrapper[4558]: I0120 17:00:51.647646 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/3f868eba-f4e1-4e32-b271-391cf271fe97-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:51 crc kubenswrapper[4558]: I0120 17:00:51.647759 4558 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/3f868eba-f4e1-4e32-b271-391cf271fe97-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:51 crc kubenswrapper[4558]: I0120 17:00:51.647815 4558 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/3f868eba-f4e1-4e32-b271-391cf271fe97-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:51 crc kubenswrapper[4558]: I0120 17:00:51.647860 4558 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/3f868eba-f4e1-4e32-b271-391cf271fe97-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:51 crc kubenswrapper[4558]: I0120 17:00:51.647902 4558 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/3f868eba-f4e1-4e32-b271-391cf271fe97-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:51 crc kubenswrapper[4558]: I0120 17:00:51.647944 4558 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/3f868eba-f4e1-4e32-b271-391cf271fe97-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:51 crc kubenswrapper[4558]: I0120 17:00:51.647988 4558 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/3f868eba-f4e1-4e32-b271-391cf271fe97-server-conf\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:51 crc kubenswrapper[4558]: I0120 17:00:51.648037 4558 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/3f868eba-f4e1-4e32-b271-391cf271fe97-plugins-conf\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:51 crc kubenswrapper[4558]: I0120 17:00:51.648082 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w8fm7\" (UniqueName: \"kubernetes.io/projected/3f868eba-f4e1-4e32-b271-391cf271fe97-kube-api-access-w8fm7\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:51 crc kubenswrapper[4558]: I0120 17:00:51.658343 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage20-crc" (UniqueName: "kubernetes.io/local-volume/local-storage20-crc") on node "crc" Jan 20 17:00:51 crc kubenswrapper[4558]: I0120 17:00:51.748905 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage20-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage20-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:51 crc kubenswrapper[4558]: I0120 17:00:51.875284 4558 generic.go:334] "Generic (PLEG): container finished" podID="3f868eba-f4e1-4e32-b271-391cf271fe97" containerID="7186b3f7bece934410de1e6b9f5050c2ca0be8663df3206ff0ed31c8bdab8497" exitCode=0 Jan 20 17:00:51 crc kubenswrapper[4558]: I0120 17:00:51.875337 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-server-0" event={"ID":"3f868eba-f4e1-4e32-b271-391cf271fe97","Type":"ContainerDied","Data":"7186b3f7bece934410de1e6b9f5050c2ca0be8663df3206ff0ed31c8bdab8497"} Jan 20 17:00:51 crc kubenswrapper[4558]: I0120 17:00:51.875360 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-server-0" event={"ID":"3f868eba-f4e1-4e32-b271-391cf271fe97","Type":"ContainerDied","Data":"f311bc2280f5763ffdea30dc963d623368834e0e8968e555959763b8021c1baa"} Jan 20 17:00:51 crc kubenswrapper[4558]: I0120 17:00:51.875379 4558 scope.go:117] "RemoveContainer" containerID="7186b3f7bece934410de1e6b9f5050c2ca0be8663df3206ff0ed31c8bdab8497" Jan 20 17:00:51 crc kubenswrapper[4558]: I0120 17:00:51.875464 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:00:51 crc kubenswrapper[4558]: I0120 17:00:51.878860 4558 generic.go:334] "Generic (PLEG): container finished" podID="bf355276-9e62-474e-bfb1-616dde5b83bc" containerID="1e8c1d79d17c69967f38cfb7502a531e6019712fea6395ed1952fa83bb5dfd3a" exitCode=0 Jan 20 17:00:51 crc kubenswrapper[4558]: I0120 17:00:51.878893 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:00:51 crc kubenswrapper[4558]: I0120 17:00:51.878902 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" event={"ID":"bf355276-9e62-474e-bfb1-616dde5b83bc","Type":"ContainerDied","Data":"1e8c1d79d17c69967f38cfb7502a531e6019712fea6395ed1952fa83bb5dfd3a"} Jan 20 17:00:51 crc kubenswrapper[4558]: I0120 17:00:51.878928 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" event={"ID":"bf355276-9e62-474e-bfb1-616dde5b83bc","Type":"ContainerDied","Data":"724277ae417b63e0868ec82c9d4f15f8b41d086acf0d26215797be7ac3368f0c"} Jan 20 17:00:51 crc kubenswrapper[4558]: I0120 17:00:51.900716 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/rabbitmq-server-0"] Jan 20 17:00:51 crc kubenswrapper[4558]: I0120 17:00:51.902290 4558 scope.go:117] "RemoveContainer" containerID="1cbb8a79828c1017ed54f50de2db6768bd2eb566cb59fce2be3b621f69a500cd" Jan 20 17:00:51 crc kubenswrapper[4558]: I0120 17:00:51.904793 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/rabbitmq-server-0"] Jan 20 17:00:51 crc kubenswrapper[4558]: I0120 17:00:51.925288 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/rabbitmq-cell1-server-0"] Jan 20 17:00:51 crc kubenswrapper[4558]: I0120 17:00:51.930267 4558 scope.go:117] "RemoveContainer" containerID="7186b3f7bece934410de1e6b9f5050c2ca0be8663df3206ff0ed31c8bdab8497" Jan 20 17:00:51 crc kubenswrapper[4558]: E0120 17:00:51.931426 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7186b3f7bece934410de1e6b9f5050c2ca0be8663df3206ff0ed31c8bdab8497\": container with ID starting with 7186b3f7bece934410de1e6b9f5050c2ca0be8663df3206ff0ed31c8bdab8497 not found: ID does not exist" containerID="7186b3f7bece934410de1e6b9f5050c2ca0be8663df3206ff0ed31c8bdab8497" Jan 20 17:00:51 crc kubenswrapper[4558]: I0120 17:00:51.931459 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7186b3f7bece934410de1e6b9f5050c2ca0be8663df3206ff0ed31c8bdab8497"} err="failed to get container status \"7186b3f7bece934410de1e6b9f5050c2ca0be8663df3206ff0ed31c8bdab8497\": rpc error: code = NotFound desc = could not find container \"7186b3f7bece934410de1e6b9f5050c2ca0be8663df3206ff0ed31c8bdab8497\": container with ID starting with 7186b3f7bece934410de1e6b9f5050c2ca0be8663df3206ff0ed31c8bdab8497 not found: ID does not exist" Jan 20 17:00:51 crc kubenswrapper[4558]: I0120 17:00:51.931480 4558 scope.go:117] "RemoveContainer" containerID="1cbb8a79828c1017ed54f50de2db6768bd2eb566cb59fce2be3b621f69a500cd" Jan 20 17:00:51 crc kubenswrapper[4558]: E0120 17:00:51.932017 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1cbb8a79828c1017ed54f50de2db6768bd2eb566cb59fce2be3b621f69a500cd\": container with ID starting with 1cbb8a79828c1017ed54f50de2db6768bd2eb566cb59fce2be3b621f69a500cd not found: ID does not exist" containerID="1cbb8a79828c1017ed54f50de2db6768bd2eb566cb59fce2be3b621f69a500cd" Jan 20 17:00:51 crc kubenswrapper[4558]: I0120 17:00:51.932061 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1cbb8a79828c1017ed54f50de2db6768bd2eb566cb59fce2be3b621f69a500cd"} err="failed to get container status \"1cbb8a79828c1017ed54f50de2db6768bd2eb566cb59fce2be3b621f69a500cd\": rpc error: code = NotFound desc = could not find container \"1cbb8a79828c1017ed54f50de2db6768bd2eb566cb59fce2be3b621f69a500cd\": container with ID starting with 1cbb8a79828c1017ed54f50de2db6768bd2eb566cb59fce2be3b621f69a500cd not found: ID does not exist" Jan 20 17:00:51 crc kubenswrapper[4558]: I0120 17:00:51.932085 4558 scope.go:117] "RemoveContainer" containerID="1e8c1d79d17c69967f38cfb7502a531e6019712fea6395ed1952fa83bb5dfd3a" Jan 20 17:00:51 crc kubenswrapper[4558]: I0120 17:00:51.935543 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/rabbitmq-cell1-server-0"] Jan 20 17:00:51 crc kubenswrapper[4558]: I0120 17:00:51.947879 4558 scope.go:117] "RemoveContainer" containerID="e602226e4f361c85082bf7248cd70619371bfecaf58e19de2a31846417adbebd" Jan 20 17:00:51 crc kubenswrapper[4558]: I0120 17:00:51.964949 4558 scope.go:117] "RemoveContainer" containerID="1e8c1d79d17c69967f38cfb7502a531e6019712fea6395ed1952fa83bb5dfd3a" Jan 20 17:00:51 crc kubenswrapper[4558]: E0120 17:00:51.965359 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1e8c1d79d17c69967f38cfb7502a531e6019712fea6395ed1952fa83bb5dfd3a\": container with ID starting with 1e8c1d79d17c69967f38cfb7502a531e6019712fea6395ed1952fa83bb5dfd3a not found: ID does not exist" containerID="1e8c1d79d17c69967f38cfb7502a531e6019712fea6395ed1952fa83bb5dfd3a" Jan 20 17:00:51 crc kubenswrapper[4558]: I0120 17:00:51.965387 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1e8c1d79d17c69967f38cfb7502a531e6019712fea6395ed1952fa83bb5dfd3a"} err="failed to get container status \"1e8c1d79d17c69967f38cfb7502a531e6019712fea6395ed1952fa83bb5dfd3a\": rpc error: code = NotFound desc = could not find container \"1e8c1d79d17c69967f38cfb7502a531e6019712fea6395ed1952fa83bb5dfd3a\": container with ID starting with 1e8c1d79d17c69967f38cfb7502a531e6019712fea6395ed1952fa83bb5dfd3a not found: ID does not exist" Jan 20 17:00:51 crc kubenswrapper[4558]: I0120 17:00:51.965403 4558 scope.go:117] "RemoveContainer" containerID="e602226e4f361c85082bf7248cd70619371bfecaf58e19de2a31846417adbebd" Jan 20 17:00:51 crc kubenswrapper[4558]: E0120 17:00:51.965646 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e602226e4f361c85082bf7248cd70619371bfecaf58e19de2a31846417adbebd\": container with ID starting with e602226e4f361c85082bf7248cd70619371bfecaf58e19de2a31846417adbebd not found: ID does not exist" containerID="e602226e4f361c85082bf7248cd70619371bfecaf58e19de2a31846417adbebd" Jan 20 17:00:51 crc kubenswrapper[4558]: I0120 17:00:51.965665 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e602226e4f361c85082bf7248cd70619371bfecaf58e19de2a31846417adbebd"} err="failed to get container status \"e602226e4f361c85082bf7248cd70619371bfecaf58e19de2a31846417adbebd\": rpc error: code = NotFound desc = could not find container \"e602226e4f361c85082bf7248cd70619371bfecaf58e19de2a31846417adbebd\": container with ID starting with e602226e4f361c85082bf7248cd70619371bfecaf58e19de2a31846417adbebd not found: ID does not exist" Jan 20 17:00:52 crc kubenswrapper[4558]: E0120 17:00:52.153067 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/combined-ca-bundle: secret "combined-ca-bundle" not found Jan 20 17:00:52 crc kubenswrapper[4558]: E0120 17:00:52.153134 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/de7f5467-1e83-42f0-86bb-ade85deec8f3-combined-ca-bundle podName:de7f5467-1e83-42f0-86bb-ade85deec8f3 nodeName:}" failed. No retries permitted until 2026-01-20 17:01:00.153119936 +0000 UTC m=+1153.913457903 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/de7f5467-1e83-42f0-86bb-ade85deec8f3-combined-ca-bundle") pod "barbican-worker-c4f864f89-58gb9" (UID: "de7f5467-1e83-42f0-86bb-ade85deec8f3") : secret "combined-ca-bundle" not found Jan 20 17:00:52 crc kubenswrapper[4558]: E0120 17:00:52.153180 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/barbican-worker-config-data: secret "barbican-worker-config-data" not found Jan 20 17:00:52 crc kubenswrapper[4558]: E0120 17:00:52.153242 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/de7f5467-1e83-42f0-86bb-ade85deec8f3-config-data-custom podName:de7f5467-1e83-42f0-86bb-ade85deec8f3 nodeName:}" failed. No retries permitted until 2026-01-20 17:01:00.153229413 +0000 UTC m=+1153.913567379 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "config-data-custom" (UniqueName: "kubernetes.io/secret/de7f5467-1e83-42f0-86bb-ade85deec8f3-config-data-custom") pod "barbican-worker-c4f864f89-58gb9" (UID: "de7f5467-1e83-42f0-86bb-ade85deec8f3") : secret "barbican-worker-config-data" not found Jan 20 17:00:52 crc kubenswrapper[4558]: E0120 17:00:52.153186 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/barbican-config-data: secret "barbican-config-data" not found Jan 20 17:00:52 crc kubenswrapper[4558]: E0120 17:00:52.153273 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/de7f5467-1e83-42f0-86bb-ade85deec8f3-config-data podName:de7f5467-1e83-42f0-86bb-ade85deec8f3 nodeName:}" failed. No retries permitted until 2026-01-20 17:01:00.153267504 +0000 UTC m=+1153.913605471 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/secret/de7f5467-1e83-42f0-86bb-ade85deec8f3-config-data") pod "barbican-worker-c4f864f89-58gb9" (UID: "de7f5467-1e83-42f0-86bb-ade85deec8f3") : secret "barbican-config-data" not found Jan 20 17:00:52 crc kubenswrapper[4558]: E0120 17:00:52.355502 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/combined-ca-bundle: secret "combined-ca-bundle" not found Jan 20 17:00:52 crc kubenswrapper[4558]: E0120 17:00:52.355523 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/barbican-config-data: secret "barbican-config-data" not found Jan 20 17:00:52 crc kubenswrapper[4558]: E0120 17:00:52.355577 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7776f921-0ae7-4a8c-b444-cf5b4b9a4f65-config-data podName:7776f921-0ae7-4a8c-b444-cf5b4b9a4f65 nodeName:}" failed. No retries permitted until 2026-01-20 17:01:00.355564446 +0000 UTC m=+1154.115902413 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/secret/7776f921-0ae7-4a8c-b444-cf5b4b9a4f65-config-data") pod "barbican-keystone-listener-7fc5477d66-pz8q2" (UID: "7776f921-0ae7-4a8c-b444-cf5b4b9a4f65") : secret "barbican-config-data" not found Jan 20 17:00:52 crc kubenswrapper[4558]: E0120 17:00:52.355594 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7776f921-0ae7-4a8c-b444-cf5b4b9a4f65-combined-ca-bundle podName:7776f921-0ae7-4a8c-b444-cf5b4b9a4f65 nodeName:}" failed. No retries permitted until 2026-01-20 17:01:00.355586788 +0000 UTC m=+1154.115924755 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/7776f921-0ae7-4a8c-b444-cf5b4b9a4f65-combined-ca-bundle") pod "barbican-keystone-listener-7fc5477d66-pz8q2" (UID: "7776f921-0ae7-4a8c-b444-cf5b4b9a4f65") : secret "combined-ca-bundle" not found Jan 20 17:00:52 crc kubenswrapper[4558]: E0120 17:00:52.355611 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/barbican-keystone-listener-config-data: secret "barbican-keystone-listener-config-data" not found Jan 20 17:00:52 crc kubenswrapper[4558]: E0120 17:00:52.355741 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7776f921-0ae7-4a8c-b444-cf5b4b9a4f65-config-data-custom podName:7776f921-0ae7-4a8c-b444-cf5b4b9a4f65 nodeName:}" failed. No retries permitted until 2026-01-20 17:01:00.355712133 +0000 UTC m=+1154.116050110 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "config-data-custom" (UniqueName: "kubernetes.io/secret/7776f921-0ae7-4a8c-b444-cf5b4b9a4f65-config-data-custom") pod "barbican-keystone-listener-7fc5477d66-pz8q2" (UID: "7776f921-0ae7-4a8c-b444-cf5b4b9a4f65") : secret "barbican-keystone-listener-config-data" not found Jan 20 17:00:52 crc kubenswrapper[4558]: I0120 17:00:52.580835 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3f868eba-f4e1-4e32-b271-391cf271fe97" path="/var/lib/kubelet/pods/3f868eba-f4e1-4e32-b271-391cf271fe97/volumes" Jan 20 17:00:52 crc kubenswrapper[4558]: I0120 17:00:52.584452 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf355276-9e62-474e-bfb1-616dde5b83bc" path="/var/lib/kubelet/pods/bf355276-9e62-474e-bfb1-616dde5b83bc/volumes" Jan 20 17:00:54 crc kubenswrapper[4558]: I0120 17:00:54.580839 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-rqg8f" Jan 20 17:00:54 crc kubenswrapper[4558]: I0120 17:00:54.630418 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-58b8ddd7c-rr7mz"] Jan 20 17:00:54 crc kubenswrapper[4558]: I0120 17:00:54.630643 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-58b8ddd7c-rr7mz" podUID="d9be5f16-16de-4ae6-9007-cca3e80501b9" containerName="dnsmasq-dns" containerID="cri-o://45b2a75ffe97418a0ca920c583c6b2e02b1fd292c83ac512e2a46d1e00cba17b" gracePeriod=10 Jan 20 17:00:54 crc kubenswrapper[4558]: I0120 17:00:54.916845 4558 generic.go:334] "Generic (PLEG): container finished" podID="d9be5f16-16de-4ae6-9007-cca3e80501b9" containerID="45b2a75ffe97418a0ca920c583c6b2e02b1fd292c83ac512e2a46d1e00cba17b" exitCode=0 Jan 20 17:00:54 crc kubenswrapper[4558]: I0120 17:00:54.917037 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-58b8ddd7c-rr7mz" event={"ID":"d9be5f16-16de-4ae6-9007-cca3e80501b9","Type":"ContainerDied","Data":"45b2a75ffe97418a0ca920c583c6b2e02b1fd292c83ac512e2a46d1e00cba17b"} Jan 20 17:00:55 crc kubenswrapper[4558]: I0120 17:00:55.000866 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-58b8ddd7c-rr7mz" Jan 20 17:00:55 crc kubenswrapper[4558]: I0120 17:00:55.195489 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d9be5f16-16de-4ae6-9007-cca3e80501b9-config\") pod \"d9be5f16-16de-4ae6-9007-cca3e80501b9\" (UID: \"d9be5f16-16de-4ae6-9007-cca3e80501b9\") " Jan 20 17:00:55 crc kubenswrapper[4558]: I0120 17:00:55.195595 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2lr8g\" (UniqueName: \"kubernetes.io/projected/d9be5f16-16de-4ae6-9007-cca3e80501b9-kube-api-access-2lr8g\") pod \"d9be5f16-16de-4ae6-9007-cca3e80501b9\" (UID: \"d9be5f16-16de-4ae6-9007-cca3e80501b9\") " Jan 20 17:00:55 crc kubenswrapper[4558]: I0120 17:00:55.195642 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/d9be5f16-16de-4ae6-9007-cca3e80501b9-dnsmasq-svc\") pod \"d9be5f16-16de-4ae6-9007-cca3e80501b9\" (UID: \"d9be5f16-16de-4ae6-9007-cca3e80501b9\") " Jan 20 17:00:55 crc kubenswrapper[4558]: I0120 17:00:55.195666 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d9be5f16-16de-4ae6-9007-cca3e80501b9-dns-swift-storage-0\") pod \"d9be5f16-16de-4ae6-9007-cca3e80501b9\" (UID: \"d9be5f16-16de-4ae6-9007-cca3e80501b9\") " Jan 20 17:00:55 crc kubenswrapper[4558]: I0120 17:00:55.226257 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d9be5f16-16de-4ae6-9007-cca3e80501b9-kube-api-access-2lr8g" (OuterVolumeSpecName: "kube-api-access-2lr8g") pod "d9be5f16-16de-4ae6-9007-cca3e80501b9" (UID: "d9be5f16-16de-4ae6-9007-cca3e80501b9"). InnerVolumeSpecName "kube-api-access-2lr8g". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:00:55 crc kubenswrapper[4558]: I0120 17:00:55.252994 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d9be5f16-16de-4ae6-9007-cca3e80501b9-dnsmasq-svc" (OuterVolumeSpecName: "dnsmasq-svc") pod "d9be5f16-16de-4ae6-9007-cca3e80501b9" (UID: "d9be5f16-16de-4ae6-9007-cca3e80501b9"). InnerVolumeSpecName "dnsmasq-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:00:55 crc kubenswrapper[4558]: I0120 17:00:55.267615 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d9be5f16-16de-4ae6-9007-cca3e80501b9-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "d9be5f16-16de-4ae6-9007-cca3e80501b9" (UID: "d9be5f16-16de-4ae6-9007-cca3e80501b9"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:00:55 crc kubenswrapper[4558]: I0120 17:00:55.293935 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d9be5f16-16de-4ae6-9007-cca3e80501b9-config" (OuterVolumeSpecName: "config") pod "d9be5f16-16de-4ae6-9007-cca3e80501b9" (UID: "d9be5f16-16de-4ae6-9007-cca3e80501b9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:00:55 crc kubenswrapper[4558]: I0120 17:00:55.298457 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2lr8g\" (UniqueName: \"kubernetes.io/projected/d9be5f16-16de-4ae6-9007-cca3e80501b9-kube-api-access-2lr8g\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:55 crc kubenswrapper[4558]: I0120 17:00:55.298483 4558 reconciler_common.go:293] "Volume detached for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/d9be5f16-16de-4ae6-9007-cca3e80501b9-dnsmasq-svc\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:55 crc kubenswrapper[4558]: I0120 17:00:55.298496 4558 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d9be5f16-16de-4ae6-9007-cca3e80501b9-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:55 crc kubenswrapper[4558]: I0120 17:00:55.298506 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d9be5f16-16de-4ae6-9007-cca3e80501b9-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:00:55 crc kubenswrapper[4558]: I0120 17:00:55.634765 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/neutron-5d557df858-frznf" podUID="7d0b0e33-9ce7-44d9-8b09-6008715fe1a0" containerName="neutron-httpd" probeResult="failure" output="Get \"https://10.217.0.142:9696/\": dial tcp 10.217.0.142:9696: connect: connection refused" Jan 20 17:00:55 crc kubenswrapper[4558]: I0120 17:00:55.926558 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-58b8ddd7c-rr7mz" event={"ID":"d9be5f16-16de-4ae6-9007-cca3e80501b9","Type":"ContainerDied","Data":"393c240e307e6f1c510b640fb0e60d5df3df97c49287fbfe0533b6fedc013ca9"} Jan 20 17:00:55 crc kubenswrapper[4558]: I0120 17:00:55.926601 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-58b8ddd7c-rr7mz" Jan 20 17:00:55 crc kubenswrapper[4558]: I0120 17:00:55.926774 4558 scope.go:117] "RemoveContainer" containerID="45b2a75ffe97418a0ca920c583c6b2e02b1fd292c83ac512e2a46d1e00cba17b" Jan 20 17:00:55 crc kubenswrapper[4558]: I0120 17:00:55.950332 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-58b8ddd7c-rr7mz"] Jan 20 17:00:55 crc kubenswrapper[4558]: I0120 17:00:55.952329 4558 scope.go:117] "RemoveContainer" containerID="7477443fea20b5ececf459b62e43d6fa92bceedf2e0c238ff19891cd39e64fc4" Jan 20 17:00:55 crc kubenswrapper[4558]: I0120 17:00:55.954431 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-58b8ddd7c-rr7mz"] Jan 20 17:00:56 crc kubenswrapper[4558]: I0120 17:00:56.573939 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d9be5f16-16de-4ae6-9007-cca3e80501b9" path="/var/lib/kubelet/pods/d9be5f16-16de-4ae6-9007-cca3e80501b9/volumes" Jan 20 17:00:59 crc kubenswrapper[4558]: E0120 17:00:59.460855 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-neutron-internal-svc: secret "cert-neutron-internal-svc" not found Jan 20 17:00:59 crc kubenswrapper[4558]: E0120 17:00:59.461136 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7d0b0e33-9ce7-44d9-8b09-6008715fe1a0-internal-tls-certs podName:7d0b0e33-9ce7-44d9-8b09-6008715fe1a0 nodeName:}" failed. No retries permitted until 2026-01-20 17:01:15.461123371 +0000 UTC m=+1169.221461339 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "internal-tls-certs" (UniqueName: "kubernetes.io/secret/7d0b0e33-9ce7-44d9-8b09-6008715fe1a0-internal-tls-certs") pod "neutron-5d557df858-frznf" (UID: "7d0b0e33-9ce7-44d9-8b09-6008715fe1a0") : secret "cert-neutron-internal-svc" not found Jan 20 17:00:59 crc kubenswrapper[4558]: E0120 17:00:59.460937 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/neutron-httpd-config: secret "neutron-httpd-config" not found Jan 20 17:00:59 crc kubenswrapper[4558]: E0120 17:00:59.461314 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7d0b0e33-9ce7-44d9-8b09-6008715fe1a0-httpd-config podName:7d0b0e33-9ce7-44d9-8b09-6008715fe1a0 nodeName:}" failed. No retries permitted until 2026-01-20 17:01:15.461306014 +0000 UTC m=+1169.221643982 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "httpd-config" (UniqueName: "kubernetes.io/secret/7d0b0e33-9ce7-44d9-8b09-6008715fe1a0-httpd-config") pod "neutron-5d557df858-frznf" (UID: "7d0b0e33-9ce7-44d9-8b09-6008715fe1a0") : secret "neutron-httpd-config" not found Jan 20 17:00:59 crc kubenswrapper[4558]: E0120 17:00:59.460968 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-neutron-public-svc: secret "cert-neutron-public-svc" not found Jan 20 17:00:59 crc kubenswrapper[4558]: E0120 17:00:59.461392 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7d0b0e33-9ce7-44d9-8b09-6008715fe1a0-public-tls-certs podName:7d0b0e33-9ce7-44d9-8b09-6008715fe1a0 nodeName:}" failed. No retries permitted until 2026-01-20 17:01:15.461386556 +0000 UTC m=+1169.221724523 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "public-tls-certs" (UniqueName: "kubernetes.io/secret/7d0b0e33-9ce7-44d9-8b09-6008715fe1a0-public-tls-certs") pod "neutron-5d557df858-frznf" (UID: "7d0b0e33-9ce7-44d9-8b09-6008715fe1a0") : secret "cert-neutron-public-svc" not found Jan 20 17:00:59 crc kubenswrapper[4558]: E0120 17:00:59.460994 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/neutron-config: secret "neutron-config" not found Jan 20 17:00:59 crc kubenswrapper[4558]: E0120 17:00:59.461459 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7d0b0e33-9ce7-44d9-8b09-6008715fe1a0-config podName:7d0b0e33-9ce7-44d9-8b09-6008715fe1a0 nodeName:}" failed. No retries permitted until 2026-01-20 17:01:15.461454013 +0000 UTC m=+1169.221791980 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "config" (UniqueName: "kubernetes.io/secret/7d0b0e33-9ce7-44d9-8b09-6008715fe1a0-config") pod "neutron-5d557df858-frznf" (UID: "7d0b0e33-9ce7-44d9-8b09-6008715fe1a0") : secret "neutron-config" not found Jan 20 17:00:59 crc kubenswrapper[4558]: E0120 17:00:59.461018 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/combined-ca-bundle: secret "combined-ca-bundle" not found Jan 20 17:00:59 crc kubenswrapper[4558]: E0120 17:00:59.461529 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7d0b0e33-9ce7-44d9-8b09-6008715fe1a0-combined-ca-bundle podName:7d0b0e33-9ce7-44d9-8b09-6008715fe1a0 nodeName:}" failed. No retries permitted until 2026-01-20 17:01:15.461523704 +0000 UTC m=+1169.221861671 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/7d0b0e33-9ce7-44d9-8b09-6008715fe1a0-combined-ca-bundle") pod "neutron-5d557df858-frznf" (UID: "7d0b0e33-9ce7-44d9-8b09-6008715fe1a0") : secret "combined-ca-bundle" not found Jan 20 17:00:59 crc kubenswrapper[4558]: E0120 17:00:59.461047 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-neutron-ovndbs: secret "cert-neutron-ovndbs" not found Jan 20 17:00:59 crc kubenswrapper[4558]: E0120 17:00:59.461593 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7d0b0e33-9ce7-44d9-8b09-6008715fe1a0-ovndb-tls-certs podName:7d0b0e33-9ce7-44d9-8b09-6008715fe1a0 nodeName:}" failed. No retries permitted until 2026-01-20 17:01:15.461588445 +0000 UTC m=+1169.221926413 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "ovndb-tls-certs" (UniqueName: "kubernetes.io/secret/7d0b0e33-9ce7-44d9-8b09-6008715fe1a0-ovndb-tls-certs") pod "neutron-5d557df858-frznf" (UID: "7d0b0e33-9ce7-44d9-8b09-6008715fe1a0") : secret "cert-neutron-ovndbs" not found Jan 20 17:01:00 crc kubenswrapper[4558]: E0120 17:01:00.168462 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/barbican-worker-config-data: secret "barbican-worker-config-data" not found Jan 20 17:01:00 crc kubenswrapper[4558]: E0120 17:01:00.168495 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/combined-ca-bundle: secret "combined-ca-bundle" not found Jan 20 17:01:00 crc kubenswrapper[4558]: E0120 17:01:00.168528 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/de7f5467-1e83-42f0-86bb-ade85deec8f3-config-data-custom podName:de7f5467-1e83-42f0-86bb-ade85deec8f3 nodeName:}" failed. No retries permitted until 2026-01-20 17:01:16.168514099 +0000 UTC m=+1169.928852076 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "config-data-custom" (UniqueName: "kubernetes.io/secret/de7f5467-1e83-42f0-86bb-ade85deec8f3-config-data-custom") pod "barbican-worker-c4f864f89-58gb9" (UID: "de7f5467-1e83-42f0-86bb-ade85deec8f3") : secret "barbican-worker-config-data" not found Jan 20 17:01:00 crc kubenswrapper[4558]: E0120 17:01:00.168547 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/de7f5467-1e83-42f0-86bb-ade85deec8f3-combined-ca-bundle podName:de7f5467-1e83-42f0-86bb-ade85deec8f3 nodeName:}" failed. No retries permitted until 2026-01-20 17:01:16.168537523 +0000 UTC m=+1169.928875490 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/de7f5467-1e83-42f0-86bb-ade85deec8f3-combined-ca-bundle") pod "barbican-worker-c4f864f89-58gb9" (UID: "de7f5467-1e83-42f0-86bb-ade85deec8f3") : secret "combined-ca-bundle" not found Jan 20 17:01:00 crc kubenswrapper[4558]: E0120 17:01:00.168468 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/barbican-config-data: secret "barbican-config-data" not found Jan 20 17:01:00 crc kubenswrapper[4558]: E0120 17:01:00.168571 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/de7f5467-1e83-42f0-86bb-ade85deec8f3-config-data podName:de7f5467-1e83-42f0-86bb-ade85deec8f3 nodeName:}" failed. No retries permitted until 2026-01-20 17:01:16.168566056 +0000 UTC m=+1169.928904023 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/secret/de7f5467-1e83-42f0-86bb-ade85deec8f3-config-data") pod "barbican-worker-c4f864f89-58gb9" (UID: "de7f5467-1e83-42f0-86bb-ade85deec8f3") : secret "barbican-config-data" not found Jan 20 17:01:00 crc kubenswrapper[4558]: E0120 17:01:00.370250 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/barbican-config-data: secret "barbican-config-data" not found Jan 20 17:01:00 crc kubenswrapper[4558]: E0120 17:01:00.370301 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7776f921-0ae7-4a8c-b444-cf5b4b9a4f65-config-data podName:7776f921-0ae7-4a8c-b444-cf5b4b9a4f65 nodeName:}" failed. No retries permitted until 2026-01-20 17:01:16.37028948 +0000 UTC m=+1170.130627447 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/secret/7776f921-0ae7-4a8c-b444-cf5b4b9a4f65-config-data") pod "barbican-keystone-listener-7fc5477d66-pz8q2" (UID: "7776f921-0ae7-4a8c-b444-cf5b4b9a4f65") : secret "barbican-config-data" not found Jan 20 17:01:00 crc kubenswrapper[4558]: E0120 17:01:00.370368 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/combined-ca-bundle: secret "combined-ca-bundle" not found Jan 20 17:01:00 crc kubenswrapper[4558]: E0120 17:01:00.370414 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7776f921-0ae7-4a8c-b444-cf5b4b9a4f65-combined-ca-bundle podName:7776f921-0ae7-4a8c-b444-cf5b4b9a4f65 nodeName:}" failed. No retries permitted until 2026-01-20 17:01:16.370401961 +0000 UTC m=+1170.130739928 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/7776f921-0ae7-4a8c-b444-cf5b4b9a4f65-combined-ca-bundle") pod "barbican-keystone-listener-7fc5477d66-pz8q2" (UID: "7776f921-0ae7-4a8c-b444-cf5b4b9a4f65") : secret "combined-ca-bundle" not found Jan 20 17:01:00 crc kubenswrapper[4558]: E0120 17:01:00.370426 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/barbican-keystone-listener-config-data: secret "barbican-keystone-listener-config-data" not found Jan 20 17:01:00 crc kubenswrapper[4558]: E0120 17:01:00.370450 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7776f921-0ae7-4a8c-b444-cf5b4b9a4f65-config-data-custom podName:7776f921-0ae7-4a8c-b444-cf5b4b9a4f65 nodeName:}" failed. No retries permitted until 2026-01-20 17:01:16.370443699 +0000 UTC m=+1170.130781666 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "config-data-custom" (UniqueName: "kubernetes.io/secret/7776f921-0ae7-4a8c-b444-cf5b4b9a4f65-config-data-custom") pod "barbican-keystone-listener-7fc5477d66-pz8q2" (UID: "7776f921-0ae7-4a8c-b444-cf5b4b9a4f65") : secret "barbican-keystone-listener-config-data" not found Jan 20 17:01:11 crc kubenswrapper[4558]: I0120 17:01:11.437197 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/neutron-6b64ccd79d-5qj2x" podUID="5d3423c4-ff78-4ff7-b42b-b3c93b309d52" containerName="neutron-httpd" probeResult="failure" output="HTTP probe failed with statuscode: 503" Jan 20 17:01:14 crc kubenswrapper[4558]: I0120 17:01:14.252526 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:01:14 crc kubenswrapper[4558]: I0120 17:01:14.436319 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swift\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"ad708448-38df-4494-bca7-fe394c9b53a7\" (UID: \"ad708448-38df-4494-bca7-fe394c9b53a7\") " Jan 20 17:01:14 crc kubenswrapper[4558]: I0120 17:01:14.436522 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/ad708448-38df-4494-bca7-fe394c9b53a7-etc-swift\") pod \"ad708448-38df-4494-bca7-fe394c9b53a7\" (UID: \"ad708448-38df-4494-bca7-fe394c9b53a7\") " Jan 20 17:01:14 crc kubenswrapper[4558]: I0120 17:01:14.436634 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/ad708448-38df-4494-bca7-fe394c9b53a7-lock\") pod \"ad708448-38df-4494-bca7-fe394c9b53a7\" (UID: \"ad708448-38df-4494-bca7-fe394c9b53a7\") " Jan 20 17:01:14 crc kubenswrapper[4558]: I0120 17:01:14.436711 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2ql5k\" (UniqueName: \"kubernetes.io/projected/ad708448-38df-4494-bca7-fe394c9b53a7-kube-api-access-2ql5k\") pod \"ad708448-38df-4494-bca7-fe394c9b53a7\" (UID: \"ad708448-38df-4494-bca7-fe394c9b53a7\") " Jan 20 17:01:14 crc kubenswrapper[4558]: I0120 17:01:14.436817 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/ad708448-38df-4494-bca7-fe394c9b53a7-cache\") pod \"ad708448-38df-4494-bca7-fe394c9b53a7\" (UID: \"ad708448-38df-4494-bca7-fe394c9b53a7\") " Jan 20 17:01:14 crc kubenswrapper[4558]: I0120 17:01:14.437672 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ad708448-38df-4494-bca7-fe394c9b53a7-cache" (OuterVolumeSpecName: "cache") pod "ad708448-38df-4494-bca7-fe394c9b53a7" (UID: "ad708448-38df-4494-bca7-fe394c9b53a7"). InnerVolumeSpecName "cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:01:14 crc kubenswrapper[4558]: I0120 17:01:14.438490 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ad708448-38df-4494-bca7-fe394c9b53a7-lock" (OuterVolumeSpecName: "lock") pod "ad708448-38df-4494-bca7-fe394c9b53a7" (UID: "ad708448-38df-4494-bca7-fe394c9b53a7"). InnerVolumeSpecName "lock". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:01:14 crc kubenswrapper[4558]: I0120 17:01:14.442103 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ad708448-38df-4494-bca7-fe394c9b53a7-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "ad708448-38df-4494-bca7-fe394c9b53a7" (UID: "ad708448-38df-4494-bca7-fe394c9b53a7"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:01:14 crc kubenswrapper[4558]: I0120 17:01:14.442274 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ad708448-38df-4494-bca7-fe394c9b53a7-kube-api-access-2ql5k" (OuterVolumeSpecName: "kube-api-access-2ql5k") pod "ad708448-38df-4494-bca7-fe394c9b53a7" (UID: "ad708448-38df-4494-bca7-fe394c9b53a7"). InnerVolumeSpecName "kube-api-access-2ql5k". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:01:14 crc kubenswrapper[4558]: I0120 17:01:14.451213 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage05-crc" (OuterVolumeSpecName: "swift") pod "ad708448-38df-4494-bca7-fe394c9b53a7" (UID: "ad708448-38df-4494-bca7-fe394c9b53a7"). InnerVolumeSpecName "local-storage05-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:01:14 crc kubenswrapper[4558]: I0120 17:01:14.538140 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" " Jan 20 17:01:14 crc kubenswrapper[4558]: I0120 17:01:14.538204 4558 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/ad708448-38df-4494-bca7-fe394c9b53a7-etc-swift\") on node \"crc\" DevicePath \"\"" Jan 20 17:01:14 crc kubenswrapper[4558]: I0120 17:01:14.538215 4558 reconciler_common.go:293] "Volume detached for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/ad708448-38df-4494-bca7-fe394c9b53a7-lock\") on node \"crc\" DevicePath \"\"" Jan 20 17:01:14 crc kubenswrapper[4558]: I0120 17:01:14.538232 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2ql5k\" (UniqueName: \"kubernetes.io/projected/ad708448-38df-4494-bca7-fe394c9b53a7-kube-api-access-2ql5k\") on node \"crc\" DevicePath \"\"" Jan 20 17:01:14 crc kubenswrapper[4558]: I0120 17:01:14.538241 4558 reconciler_common.go:293] "Volume detached for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/ad708448-38df-4494-bca7-fe394c9b53a7-cache\") on node \"crc\" DevicePath \"\"" Jan 20 17:01:14 crc kubenswrapper[4558]: I0120 17:01:14.548937 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage05-crc" (UniqueName: "kubernetes.io/local-volume/local-storage05-crc") on node "crc" Jan 20 17:01:14 crc kubenswrapper[4558]: I0120 17:01:14.639673 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:01:14 crc kubenswrapper[4558]: I0120 17:01:14.789367 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_neutron-6b64ccd79d-5qj2x_5d3423c4-ff78-4ff7-b42b-b3c93b309d52/neutron-httpd/0.log" Jan 20 17:01:14 crc kubenswrapper[4558]: I0120 17:01:14.789709 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_neutron-6b64ccd79d-5qj2x_5d3423c4-ff78-4ff7-b42b-b3c93b309d52/neutron-api/0.log" Jan 20 17:01:14 crc kubenswrapper[4558]: I0120 17:01:14.789765 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-6b64ccd79d-5qj2x" Jan 20 17:01:14 crc kubenswrapper[4558]: I0120 17:01:14.864623 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_neutron-5d557df858-frznf_7d0b0e33-9ce7-44d9-8b09-6008715fe1a0/neutron-api/0.log" Jan 20 17:01:14 crc kubenswrapper[4558]: I0120 17:01:14.864682 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-5d557df858-frznf" Jan 20 17:01:14 crc kubenswrapper[4558]: I0120 17:01:14.943565 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/5d3423c4-ff78-4ff7-b42b-b3c93b309d52-ovndb-tls-certs\") pod \"5d3423c4-ff78-4ff7-b42b-b3c93b309d52\" (UID: \"5d3423c4-ff78-4ff7-b42b-b3c93b309d52\") " Jan 20 17:01:14 crc kubenswrapper[4558]: I0120 17:01:14.943606 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5d3423c4-ff78-4ff7-b42b-b3c93b309d52-public-tls-certs\") pod \"5d3423c4-ff78-4ff7-b42b-b3c93b309d52\" (UID: \"5d3423c4-ff78-4ff7-b42b-b3c93b309d52\") " Jan 20 17:01:14 crc kubenswrapper[4558]: I0120 17:01:14.943636 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/5d3423c4-ff78-4ff7-b42b-b3c93b309d52-config\") pod \"5d3423c4-ff78-4ff7-b42b-b3c93b309d52\" (UID: \"5d3423c4-ff78-4ff7-b42b-b3c93b309d52\") " Jan 20 17:01:14 crc kubenswrapper[4558]: I0120 17:01:14.943708 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/5d3423c4-ff78-4ff7-b42b-b3c93b309d52-httpd-config\") pod \"5d3423c4-ff78-4ff7-b42b-b3c93b309d52\" (UID: \"5d3423c4-ff78-4ff7-b42b-b3c93b309d52\") " Jan 20 17:01:14 crc kubenswrapper[4558]: I0120 17:01:14.943731 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5d3423c4-ff78-4ff7-b42b-b3c93b309d52-combined-ca-bundle\") pod \"5d3423c4-ff78-4ff7-b42b-b3c93b309d52\" (UID: \"5d3423c4-ff78-4ff7-b42b-b3c93b309d52\") " Jan 20 17:01:14 crc kubenswrapper[4558]: I0120 17:01:14.943938 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5d3423c4-ff78-4ff7-b42b-b3c93b309d52-internal-tls-certs\") pod \"5d3423c4-ff78-4ff7-b42b-b3c93b309d52\" (UID: \"5d3423c4-ff78-4ff7-b42b-b3c93b309d52\") " Jan 20 17:01:14 crc kubenswrapper[4558]: I0120 17:01:14.943975 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4cln2\" (UniqueName: \"kubernetes.io/projected/5d3423c4-ff78-4ff7-b42b-b3c93b309d52-kube-api-access-4cln2\") pod \"5d3423c4-ff78-4ff7-b42b-b3c93b309d52\" (UID: \"5d3423c4-ff78-4ff7-b42b-b3c93b309d52\") " Jan 20 17:01:14 crc kubenswrapper[4558]: I0120 17:01:14.943997 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g7g6w\" (UniqueName: \"kubernetes.io/projected/7d0b0e33-9ce7-44d9-8b09-6008715fe1a0-kube-api-access-g7g6w\") pod \"7d0b0e33-9ce7-44d9-8b09-6008715fe1a0\" (UID: \"7d0b0e33-9ce7-44d9-8b09-6008715fe1a0\") " Jan 20 17:01:14 crc kubenswrapper[4558]: I0120 17:01:14.944023 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7d0b0e33-9ce7-44d9-8b09-6008715fe1a0-public-tls-certs\") pod \"7d0b0e33-9ce7-44d9-8b09-6008715fe1a0\" (UID: \"7d0b0e33-9ce7-44d9-8b09-6008715fe1a0\") " Jan 20 17:01:14 crc kubenswrapper[4558]: I0120 17:01:14.946694 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5d3423c4-ff78-4ff7-b42b-b3c93b309d52-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "5d3423c4-ff78-4ff7-b42b-b3c93b309d52" (UID: "5d3423c4-ff78-4ff7-b42b-b3c93b309d52"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:01:14 crc kubenswrapper[4558]: I0120 17:01:14.946838 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7d0b0e33-9ce7-44d9-8b09-6008715fe1a0-kube-api-access-g7g6w" (OuterVolumeSpecName: "kube-api-access-g7g6w") pod "7d0b0e33-9ce7-44d9-8b09-6008715fe1a0" (UID: "7d0b0e33-9ce7-44d9-8b09-6008715fe1a0"). InnerVolumeSpecName "kube-api-access-g7g6w". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:01:14 crc kubenswrapper[4558]: I0120 17:01:14.946965 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5d3423c4-ff78-4ff7-b42b-b3c93b309d52-kube-api-access-4cln2" (OuterVolumeSpecName: "kube-api-access-4cln2") pod "5d3423c4-ff78-4ff7-b42b-b3c93b309d52" (UID: "5d3423c4-ff78-4ff7-b42b-b3c93b309d52"). InnerVolumeSpecName "kube-api-access-4cln2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:01:14 crc kubenswrapper[4558]: I0120 17:01:14.967802 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5d3423c4-ff78-4ff7-b42b-b3c93b309d52-config" (OuterVolumeSpecName: "config") pod "5d3423c4-ff78-4ff7-b42b-b3c93b309d52" (UID: "5d3423c4-ff78-4ff7-b42b-b3c93b309d52"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:01:14 crc kubenswrapper[4558]: I0120 17:01:14.968868 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7d0b0e33-9ce7-44d9-8b09-6008715fe1a0-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "7d0b0e33-9ce7-44d9-8b09-6008715fe1a0" (UID: "7d0b0e33-9ce7-44d9-8b09-6008715fe1a0"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:01:14 crc kubenswrapper[4558]: I0120 17:01:14.969338 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5d3423c4-ff78-4ff7-b42b-b3c93b309d52-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "5d3423c4-ff78-4ff7-b42b-b3c93b309d52" (UID: "5d3423c4-ff78-4ff7-b42b-b3c93b309d52"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:01:14 crc kubenswrapper[4558]: I0120 17:01:14.969810 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5d3423c4-ff78-4ff7-b42b-b3c93b309d52-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "5d3423c4-ff78-4ff7-b42b-b3c93b309d52" (UID: "5d3423c4-ff78-4ff7-b42b-b3c93b309d52"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:01:14 crc kubenswrapper[4558]: I0120 17:01:14.970069 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5d3423c4-ff78-4ff7-b42b-b3c93b309d52-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5d3423c4-ff78-4ff7-b42b-b3c93b309d52" (UID: "5d3423c4-ff78-4ff7-b42b-b3c93b309d52"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:01:14 crc kubenswrapper[4558]: I0120 17:01:14.978091 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5d3423c4-ff78-4ff7-b42b-b3c93b309d52-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "5d3423c4-ff78-4ff7-b42b-b3c93b309d52" (UID: "5d3423c4-ff78-4ff7-b42b-b3c93b309d52"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:01:15 crc kubenswrapper[4558]: I0120 17:01:15.034973 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_neutron-5d557df858-frznf_7d0b0e33-9ce7-44d9-8b09-6008715fe1a0/neutron-api/0.log" Jan 20 17:01:15 crc kubenswrapper[4558]: I0120 17:01:15.035017 4558 generic.go:334] "Generic (PLEG): container finished" podID="7d0b0e33-9ce7-44d9-8b09-6008715fe1a0" containerID="759720ba5c84a0de6cd2dbe18773a4daeffbac9fb1b15d97e54f0ba7761ed858" exitCode=137 Jan 20 17:01:15 crc kubenswrapper[4558]: I0120 17:01:15.035066 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-5d557df858-frznf" event={"ID":"7d0b0e33-9ce7-44d9-8b09-6008715fe1a0","Type":"ContainerDied","Data":"759720ba5c84a0de6cd2dbe18773a4daeffbac9fb1b15d97e54f0ba7761ed858"} Jan 20 17:01:15 crc kubenswrapper[4558]: I0120 17:01:15.035093 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-5d557df858-frznf" Jan 20 17:01:15 crc kubenswrapper[4558]: I0120 17:01:15.035106 4558 scope.go:117] "RemoveContainer" containerID="0db5ea38beca13c9805cb3405d85b10e954c953b65c96cec72dc60b73ae7e610" Jan 20 17:01:15 crc kubenswrapper[4558]: I0120 17:01:15.035095 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-5d557df858-frznf" event={"ID":"7d0b0e33-9ce7-44d9-8b09-6008715fe1a0","Type":"ContainerDied","Data":"37a6708a9860e88b4efa532ce9bee2f4585543237bb73c3a902befede3cff640"} Jan 20 17:01:15 crc kubenswrapper[4558]: I0120 17:01:15.038005 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_neutron-6b64ccd79d-5qj2x_5d3423c4-ff78-4ff7-b42b-b3c93b309d52/neutron-httpd/0.log" Jan 20 17:01:15 crc kubenswrapper[4558]: I0120 17:01:15.038492 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_neutron-6b64ccd79d-5qj2x_5d3423c4-ff78-4ff7-b42b-b3c93b309d52/neutron-api/0.log" Jan 20 17:01:15 crc kubenswrapper[4558]: I0120 17:01:15.038540 4558 generic.go:334] "Generic (PLEG): container finished" podID="5d3423c4-ff78-4ff7-b42b-b3c93b309d52" containerID="37aa08ee2771757bea1365d0523225133f3f9a35715f5122f8e028516447be51" exitCode=137 Jan 20 17:01:15 crc kubenswrapper[4558]: I0120 17:01:15.038556 4558 generic.go:334] "Generic (PLEG): container finished" podID="5d3423c4-ff78-4ff7-b42b-b3c93b309d52" containerID="b24670af574f9ed9a920845a520dd0b88057aa73de3d8a8c8308fb29c005498d" exitCode=137 Jan 20 17:01:15 crc kubenswrapper[4558]: I0120 17:01:15.038588 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-6b64ccd79d-5qj2x" Jan 20 17:01:15 crc kubenswrapper[4558]: I0120 17:01:15.038607 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-6b64ccd79d-5qj2x" event={"ID":"5d3423c4-ff78-4ff7-b42b-b3c93b309d52","Type":"ContainerDied","Data":"37aa08ee2771757bea1365d0523225133f3f9a35715f5122f8e028516447be51"} Jan 20 17:01:15 crc kubenswrapper[4558]: I0120 17:01:15.038629 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-6b64ccd79d-5qj2x" event={"ID":"5d3423c4-ff78-4ff7-b42b-b3c93b309d52","Type":"ContainerDied","Data":"b24670af574f9ed9a920845a520dd0b88057aa73de3d8a8c8308fb29c005498d"} Jan 20 17:01:15 crc kubenswrapper[4558]: I0120 17:01:15.038639 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-6b64ccd79d-5qj2x" event={"ID":"5d3423c4-ff78-4ff7-b42b-b3c93b309d52","Type":"ContainerDied","Data":"be6803793562d2e8a11899885d3f7d76044a96ffc3c42039db66dbfe66c75870"} Jan 20 17:01:15 crc kubenswrapper[4558]: I0120 17:01:15.046040 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7d0b0e33-9ce7-44d9-8b09-6008715fe1a0-internal-tls-certs\") pod \"7d0b0e33-9ce7-44d9-8b09-6008715fe1a0\" (UID: \"7d0b0e33-9ce7-44d9-8b09-6008715fe1a0\") " Jan 20 17:01:15 crc kubenswrapper[4558]: I0120 17:01:15.046073 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/7d0b0e33-9ce7-44d9-8b09-6008715fe1a0-ovndb-tls-certs\") pod \"7d0b0e33-9ce7-44d9-8b09-6008715fe1a0\" (UID: \"7d0b0e33-9ce7-44d9-8b09-6008715fe1a0\") " Jan 20 17:01:15 crc kubenswrapper[4558]: I0120 17:01:15.046097 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/7d0b0e33-9ce7-44d9-8b09-6008715fe1a0-config\") pod \"7d0b0e33-9ce7-44d9-8b09-6008715fe1a0\" (UID: \"7d0b0e33-9ce7-44d9-8b09-6008715fe1a0\") " Jan 20 17:01:15 crc kubenswrapper[4558]: I0120 17:01:15.046179 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/7d0b0e33-9ce7-44d9-8b09-6008715fe1a0-httpd-config\") pod \"7d0b0e33-9ce7-44d9-8b09-6008715fe1a0\" (UID: \"7d0b0e33-9ce7-44d9-8b09-6008715fe1a0\") " Jan 20 17:01:15 crc kubenswrapper[4558]: I0120 17:01:15.046276 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7d0b0e33-9ce7-44d9-8b09-6008715fe1a0-combined-ca-bundle\") pod \"7d0b0e33-9ce7-44d9-8b09-6008715fe1a0\" (UID: \"7d0b0e33-9ce7-44d9-8b09-6008715fe1a0\") " Jan 20 17:01:15 crc kubenswrapper[4558]: I0120 17:01:15.046591 4558 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/5d3423c4-ff78-4ff7-b42b-b3c93b309d52-httpd-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:01:15 crc kubenswrapper[4558]: I0120 17:01:15.046607 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5d3423c4-ff78-4ff7-b42b-b3c93b309d52-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:01:15 crc kubenswrapper[4558]: I0120 17:01:15.046617 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5d3423c4-ff78-4ff7-b42b-b3c93b309d52-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:01:15 crc kubenswrapper[4558]: I0120 17:01:15.046627 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4cln2\" (UniqueName: \"kubernetes.io/projected/5d3423c4-ff78-4ff7-b42b-b3c93b309d52-kube-api-access-4cln2\") on node \"crc\" DevicePath \"\"" Jan 20 17:01:15 crc kubenswrapper[4558]: I0120 17:01:15.046635 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g7g6w\" (UniqueName: \"kubernetes.io/projected/7d0b0e33-9ce7-44d9-8b09-6008715fe1a0-kube-api-access-g7g6w\") on node \"crc\" DevicePath \"\"" Jan 20 17:01:15 crc kubenswrapper[4558]: I0120 17:01:15.046644 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7d0b0e33-9ce7-44d9-8b09-6008715fe1a0-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:01:15 crc kubenswrapper[4558]: I0120 17:01:15.046651 4558 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/5d3423c4-ff78-4ff7-b42b-b3c93b309d52-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:01:15 crc kubenswrapper[4558]: I0120 17:01:15.046659 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5d3423c4-ff78-4ff7-b42b-b3c93b309d52-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:01:15 crc kubenswrapper[4558]: I0120 17:01:15.046666 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/5d3423c4-ff78-4ff7-b42b-b3c93b309d52-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:01:15 crc kubenswrapper[4558]: I0120 17:01:15.048187 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7d0b0e33-9ce7-44d9-8b09-6008715fe1a0-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "7d0b0e33-9ce7-44d9-8b09-6008715fe1a0" (UID: "7d0b0e33-9ce7-44d9-8b09-6008715fe1a0"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:01:15 crc kubenswrapper[4558]: I0120 17:01:15.049025 4558 generic.go:334] "Generic (PLEG): container finished" podID="ad708448-38df-4494-bca7-fe394c9b53a7" containerID="e1f52810d18aeba836cd426714154488ddb699c7ef7cb6b0251ec23a6345ebf4" exitCode=137 Jan 20 17:01:15 crc kubenswrapper[4558]: I0120 17:01:15.049059 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"ad708448-38df-4494-bca7-fe394c9b53a7","Type":"ContainerDied","Data":"e1f52810d18aeba836cd426714154488ddb699c7ef7cb6b0251ec23a6345ebf4"} Jan 20 17:01:15 crc kubenswrapper[4558]: I0120 17:01:15.049083 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"ad708448-38df-4494-bca7-fe394c9b53a7","Type":"ContainerDied","Data":"5d7945bb752c856572c7eb12ee3598fa0c9ce27902b86d35bf4dc5047564ded4"} Jan 20 17:01:15 crc kubenswrapper[4558]: I0120 17:01:15.049235 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:01:15 crc kubenswrapper[4558]: I0120 17:01:15.049665 4558 scope.go:117] "RemoveContainer" containerID="759720ba5c84a0de6cd2dbe18773a4daeffbac9fb1b15d97e54f0ba7761ed858" Jan 20 17:01:15 crc kubenswrapper[4558]: I0120 17:01:15.070901 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7d0b0e33-9ce7-44d9-8b09-6008715fe1a0-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "7d0b0e33-9ce7-44d9-8b09-6008715fe1a0" (UID: "7d0b0e33-9ce7-44d9-8b09-6008715fe1a0"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:01:15 crc kubenswrapper[4558]: I0120 17:01:15.071895 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7d0b0e33-9ce7-44d9-8b09-6008715fe1a0-config" (OuterVolumeSpecName: "config") pod "7d0b0e33-9ce7-44d9-8b09-6008715fe1a0" (UID: "7d0b0e33-9ce7-44d9-8b09-6008715fe1a0"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:01:15 crc kubenswrapper[4558]: I0120 17:01:15.073885 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7d0b0e33-9ce7-44d9-8b09-6008715fe1a0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7d0b0e33-9ce7-44d9-8b09-6008715fe1a0" (UID: "7d0b0e33-9ce7-44d9-8b09-6008715fe1a0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:01:15 crc kubenswrapper[4558]: I0120 17:01:15.083668 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7d0b0e33-9ce7-44d9-8b09-6008715fe1a0-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "7d0b0e33-9ce7-44d9-8b09-6008715fe1a0" (UID: "7d0b0e33-9ce7-44d9-8b09-6008715fe1a0"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:01:15 crc kubenswrapper[4558]: I0120 17:01:15.132711 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-6b64ccd79d-5qj2x"] Jan 20 17:01:15 crc kubenswrapper[4558]: I0120 17:01:15.133286 4558 scope.go:117] "RemoveContainer" containerID="0db5ea38beca13c9805cb3405d85b10e954c953b65c96cec72dc60b73ae7e610" Jan 20 17:01:15 crc kubenswrapper[4558]: E0120 17:01:15.133657 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0db5ea38beca13c9805cb3405d85b10e954c953b65c96cec72dc60b73ae7e610\": container with ID starting with 0db5ea38beca13c9805cb3405d85b10e954c953b65c96cec72dc60b73ae7e610 not found: ID does not exist" containerID="0db5ea38beca13c9805cb3405d85b10e954c953b65c96cec72dc60b73ae7e610" Jan 20 17:01:15 crc kubenswrapper[4558]: I0120 17:01:15.133687 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0db5ea38beca13c9805cb3405d85b10e954c953b65c96cec72dc60b73ae7e610"} err="failed to get container status \"0db5ea38beca13c9805cb3405d85b10e954c953b65c96cec72dc60b73ae7e610\": rpc error: code = NotFound desc = could not find container \"0db5ea38beca13c9805cb3405d85b10e954c953b65c96cec72dc60b73ae7e610\": container with ID starting with 0db5ea38beca13c9805cb3405d85b10e954c953b65c96cec72dc60b73ae7e610 not found: ID does not exist" Jan 20 17:01:15 crc kubenswrapper[4558]: I0120 17:01:15.133710 4558 scope.go:117] "RemoveContainer" containerID="759720ba5c84a0de6cd2dbe18773a4daeffbac9fb1b15d97e54f0ba7761ed858" Jan 20 17:01:15 crc kubenswrapper[4558]: E0120 17:01:15.133982 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"759720ba5c84a0de6cd2dbe18773a4daeffbac9fb1b15d97e54f0ba7761ed858\": container with ID starting with 759720ba5c84a0de6cd2dbe18773a4daeffbac9fb1b15d97e54f0ba7761ed858 not found: ID does not exist" containerID="759720ba5c84a0de6cd2dbe18773a4daeffbac9fb1b15d97e54f0ba7761ed858" Jan 20 17:01:15 crc kubenswrapper[4558]: I0120 17:01:15.134008 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"759720ba5c84a0de6cd2dbe18773a4daeffbac9fb1b15d97e54f0ba7761ed858"} err="failed to get container status \"759720ba5c84a0de6cd2dbe18773a4daeffbac9fb1b15d97e54f0ba7761ed858\": rpc error: code = NotFound desc = could not find container \"759720ba5c84a0de6cd2dbe18773a4daeffbac9fb1b15d97e54f0ba7761ed858\": container with ID starting with 759720ba5c84a0de6cd2dbe18773a4daeffbac9fb1b15d97e54f0ba7761ed858 not found: ID does not exist" Jan 20 17:01:15 crc kubenswrapper[4558]: I0120 17:01:15.134025 4558 scope.go:117] "RemoveContainer" containerID="37aa08ee2771757bea1365d0523225133f3f9a35715f5122f8e028516447be51" Jan 20 17:01:15 crc kubenswrapper[4558]: I0120 17:01:15.137775 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/neutron-6b64ccd79d-5qj2x"] Jan 20 17:01:15 crc kubenswrapper[4558]: I0120 17:01:15.142104 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/swift-storage-0"] Jan 20 17:01:15 crc kubenswrapper[4558]: I0120 17:01:15.145468 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/swift-storage-0"] Jan 20 17:01:15 crc kubenswrapper[4558]: I0120 17:01:15.148153 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7d0b0e33-9ce7-44d9-8b09-6008715fe1a0-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:01:15 crc kubenswrapper[4558]: I0120 17:01:15.148185 4558 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/7d0b0e33-9ce7-44d9-8b09-6008715fe1a0-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:01:15 crc kubenswrapper[4558]: I0120 17:01:15.148194 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/7d0b0e33-9ce7-44d9-8b09-6008715fe1a0-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:01:15 crc kubenswrapper[4558]: I0120 17:01:15.148203 4558 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/7d0b0e33-9ce7-44d9-8b09-6008715fe1a0-httpd-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:01:15 crc kubenswrapper[4558]: I0120 17:01:15.148212 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7d0b0e33-9ce7-44d9-8b09-6008715fe1a0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:01:15 crc kubenswrapper[4558]: I0120 17:01:15.151360 4558 scope.go:117] "RemoveContainer" containerID="b24670af574f9ed9a920845a520dd0b88057aa73de3d8a8c8308fb29c005498d" Jan 20 17:01:15 crc kubenswrapper[4558]: I0120 17:01:15.164581 4558 scope.go:117] "RemoveContainer" containerID="37aa08ee2771757bea1365d0523225133f3f9a35715f5122f8e028516447be51" Jan 20 17:01:15 crc kubenswrapper[4558]: E0120 17:01:15.164808 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"37aa08ee2771757bea1365d0523225133f3f9a35715f5122f8e028516447be51\": container with ID starting with 37aa08ee2771757bea1365d0523225133f3f9a35715f5122f8e028516447be51 not found: ID does not exist" containerID="37aa08ee2771757bea1365d0523225133f3f9a35715f5122f8e028516447be51" Jan 20 17:01:15 crc kubenswrapper[4558]: I0120 17:01:15.164833 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"37aa08ee2771757bea1365d0523225133f3f9a35715f5122f8e028516447be51"} err="failed to get container status \"37aa08ee2771757bea1365d0523225133f3f9a35715f5122f8e028516447be51\": rpc error: code = NotFound desc = could not find container \"37aa08ee2771757bea1365d0523225133f3f9a35715f5122f8e028516447be51\": container with ID starting with 37aa08ee2771757bea1365d0523225133f3f9a35715f5122f8e028516447be51 not found: ID does not exist" Jan 20 17:01:15 crc kubenswrapper[4558]: I0120 17:01:15.164849 4558 scope.go:117] "RemoveContainer" containerID="b24670af574f9ed9a920845a520dd0b88057aa73de3d8a8c8308fb29c005498d" Jan 20 17:01:15 crc kubenswrapper[4558]: E0120 17:01:15.165130 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b24670af574f9ed9a920845a520dd0b88057aa73de3d8a8c8308fb29c005498d\": container with ID starting with b24670af574f9ed9a920845a520dd0b88057aa73de3d8a8c8308fb29c005498d not found: ID does not exist" containerID="b24670af574f9ed9a920845a520dd0b88057aa73de3d8a8c8308fb29c005498d" Jan 20 17:01:15 crc kubenswrapper[4558]: I0120 17:01:15.165190 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b24670af574f9ed9a920845a520dd0b88057aa73de3d8a8c8308fb29c005498d"} err="failed to get container status \"b24670af574f9ed9a920845a520dd0b88057aa73de3d8a8c8308fb29c005498d\": rpc error: code = NotFound desc = could not find container \"b24670af574f9ed9a920845a520dd0b88057aa73de3d8a8c8308fb29c005498d\": container with ID starting with b24670af574f9ed9a920845a520dd0b88057aa73de3d8a8c8308fb29c005498d not found: ID does not exist" Jan 20 17:01:15 crc kubenswrapper[4558]: I0120 17:01:15.165216 4558 scope.go:117] "RemoveContainer" containerID="37aa08ee2771757bea1365d0523225133f3f9a35715f5122f8e028516447be51" Jan 20 17:01:15 crc kubenswrapper[4558]: I0120 17:01:15.165567 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"37aa08ee2771757bea1365d0523225133f3f9a35715f5122f8e028516447be51"} err="failed to get container status \"37aa08ee2771757bea1365d0523225133f3f9a35715f5122f8e028516447be51\": rpc error: code = NotFound desc = could not find container \"37aa08ee2771757bea1365d0523225133f3f9a35715f5122f8e028516447be51\": container with ID starting with 37aa08ee2771757bea1365d0523225133f3f9a35715f5122f8e028516447be51 not found: ID does not exist" Jan 20 17:01:15 crc kubenswrapper[4558]: I0120 17:01:15.165588 4558 scope.go:117] "RemoveContainer" containerID="b24670af574f9ed9a920845a520dd0b88057aa73de3d8a8c8308fb29c005498d" Jan 20 17:01:15 crc kubenswrapper[4558]: I0120 17:01:15.165753 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b24670af574f9ed9a920845a520dd0b88057aa73de3d8a8c8308fb29c005498d"} err="failed to get container status \"b24670af574f9ed9a920845a520dd0b88057aa73de3d8a8c8308fb29c005498d\": rpc error: code = NotFound desc = could not find container \"b24670af574f9ed9a920845a520dd0b88057aa73de3d8a8c8308fb29c005498d\": container with ID starting with b24670af574f9ed9a920845a520dd0b88057aa73de3d8a8c8308fb29c005498d not found: ID does not exist" Jan 20 17:01:15 crc kubenswrapper[4558]: I0120 17:01:15.165771 4558 scope.go:117] "RemoveContainer" containerID="e1f52810d18aeba836cd426714154488ddb699c7ef7cb6b0251ec23a6345ebf4" Jan 20 17:01:15 crc kubenswrapper[4558]: I0120 17:01:15.178617 4558 scope.go:117] "RemoveContainer" containerID="a0064904f7cf7b2f3f57d0cbd50180940cf612a73b85319dcd1b4cc9e5286c49" Jan 20 17:01:15 crc kubenswrapper[4558]: I0120 17:01:15.192833 4558 scope.go:117] "RemoveContainer" containerID="4fd13baec559163b2b66474d46cf26e69eb2ac8b71ab5c82119b909d78b9c3ec" Jan 20 17:01:15 crc kubenswrapper[4558]: I0120 17:01:15.203825 4558 scope.go:117] "RemoveContainer" containerID="7f4031ea42bd00352739604cfd6ebff1f1c035663b1e3b5d3f1ba025cafadaf4" Jan 20 17:01:15 crc kubenswrapper[4558]: I0120 17:01:15.215756 4558 scope.go:117] "RemoveContainer" containerID="b729d20938039dd3cd6701145536a565bd85b83711e04e7135e5d6f37060390e" Jan 20 17:01:15 crc kubenswrapper[4558]: I0120 17:01:15.227024 4558 scope.go:117] "RemoveContainer" containerID="5d77e588e0ee15e79eb372260c13103acd9bceda72bb63cee529297cc6f3bf98" Jan 20 17:01:15 crc kubenswrapper[4558]: I0120 17:01:15.238028 4558 scope.go:117] "RemoveContainer" containerID="90254ca174d78ea243b5ca11d013559dbc865a160f1d264e2b31efdc48f96768" Jan 20 17:01:15 crc kubenswrapper[4558]: I0120 17:01:15.250050 4558 scope.go:117] "RemoveContainer" containerID="aa379f22b8042a8997aeff6f2e901aac68a2feff4a66a0636172020228c651ff" Jan 20 17:01:15 crc kubenswrapper[4558]: I0120 17:01:15.260301 4558 scope.go:117] "RemoveContainer" containerID="58081c924356de25ebdeeecdbf39084210cf71b6f92814049f47601bfce7e1ef" Jan 20 17:01:15 crc kubenswrapper[4558]: I0120 17:01:15.271386 4558 scope.go:117] "RemoveContainer" containerID="e121933f3a246845b6169455f9e1e99e6b2a61bce703613cb6fb31821719438d" Jan 20 17:01:15 crc kubenswrapper[4558]: I0120 17:01:15.282120 4558 scope.go:117] "RemoveContainer" containerID="1e0a8b23c89daae563498da37b455997040b6b47b7a29c5c9590feba957d5e7e" Jan 20 17:01:15 crc kubenswrapper[4558]: I0120 17:01:15.293837 4558 scope.go:117] "RemoveContainer" containerID="4c8b93db2628bf0f39a03755d724643f7c79df42b7cf8e54ef0a1ee499194855" Jan 20 17:01:15 crc kubenswrapper[4558]: I0120 17:01:15.304554 4558 scope.go:117] "RemoveContainer" containerID="9502d1598a1db6c1187fe75f1d6475e3133bb27c977752998b79b425efafa253" Jan 20 17:01:15 crc kubenswrapper[4558]: I0120 17:01:15.315741 4558 scope.go:117] "RemoveContainer" containerID="e13ad48d737a5f4a2b0376363ec37ed14a4cc5193e36abfdc9038bf0f4fc0e27" Jan 20 17:01:15 crc kubenswrapper[4558]: I0120 17:01:15.326417 4558 scope.go:117] "RemoveContainer" containerID="24445a4a28eeea3fb63d875b16f990af43a36181cb7778f0d316092430ca7285" Jan 20 17:01:15 crc kubenswrapper[4558]: I0120 17:01:15.338259 4558 scope.go:117] "RemoveContainer" containerID="e1f52810d18aeba836cd426714154488ddb699c7ef7cb6b0251ec23a6345ebf4" Jan 20 17:01:15 crc kubenswrapper[4558]: E0120 17:01:15.338529 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e1f52810d18aeba836cd426714154488ddb699c7ef7cb6b0251ec23a6345ebf4\": container with ID starting with e1f52810d18aeba836cd426714154488ddb699c7ef7cb6b0251ec23a6345ebf4 not found: ID does not exist" containerID="e1f52810d18aeba836cd426714154488ddb699c7ef7cb6b0251ec23a6345ebf4" Jan 20 17:01:15 crc kubenswrapper[4558]: I0120 17:01:15.338563 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e1f52810d18aeba836cd426714154488ddb699c7ef7cb6b0251ec23a6345ebf4"} err="failed to get container status \"e1f52810d18aeba836cd426714154488ddb699c7ef7cb6b0251ec23a6345ebf4\": rpc error: code = NotFound desc = could not find container \"e1f52810d18aeba836cd426714154488ddb699c7ef7cb6b0251ec23a6345ebf4\": container with ID starting with e1f52810d18aeba836cd426714154488ddb699c7ef7cb6b0251ec23a6345ebf4 not found: ID does not exist" Jan 20 17:01:15 crc kubenswrapper[4558]: I0120 17:01:15.338589 4558 scope.go:117] "RemoveContainer" containerID="a0064904f7cf7b2f3f57d0cbd50180940cf612a73b85319dcd1b4cc9e5286c49" Jan 20 17:01:15 crc kubenswrapper[4558]: E0120 17:01:15.338824 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a0064904f7cf7b2f3f57d0cbd50180940cf612a73b85319dcd1b4cc9e5286c49\": container with ID starting with a0064904f7cf7b2f3f57d0cbd50180940cf612a73b85319dcd1b4cc9e5286c49 not found: ID does not exist" containerID="a0064904f7cf7b2f3f57d0cbd50180940cf612a73b85319dcd1b4cc9e5286c49" Jan 20 17:01:15 crc kubenswrapper[4558]: I0120 17:01:15.338848 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a0064904f7cf7b2f3f57d0cbd50180940cf612a73b85319dcd1b4cc9e5286c49"} err="failed to get container status \"a0064904f7cf7b2f3f57d0cbd50180940cf612a73b85319dcd1b4cc9e5286c49\": rpc error: code = NotFound desc = could not find container \"a0064904f7cf7b2f3f57d0cbd50180940cf612a73b85319dcd1b4cc9e5286c49\": container with ID starting with a0064904f7cf7b2f3f57d0cbd50180940cf612a73b85319dcd1b4cc9e5286c49 not found: ID does not exist" Jan 20 17:01:15 crc kubenswrapper[4558]: I0120 17:01:15.338862 4558 scope.go:117] "RemoveContainer" containerID="4fd13baec559163b2b66474d46cf26e69eb2ac8b71ab5c82119b909d78b9c3ec" Jan 20 17:01:15 crc kubenswrapper[4558]: E0120 17:01:15.339078 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4fd13baec559163b2b66474d46cf26e69eb2ac8b71ab5c82119b909d78b9c3ec\": container with ID starting with 4fd13baec559163b2b66474d46cf26e69eb2ac8b71ab5c82119b909d78b9c3ec not found: ID does not exist" containerID="4fd13baec559163b2b66474d46cf26e69eb2ac8b71ab5c82119b909d78b9c3ec" Jan 20 17:01:15 crc kubenswrapper[4558]: I0120 17:01:15.339106 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4fd13baec559163b2b66474d46cf26e69eb2ac8b71ab5c82119b909d78b9c3ec"} err="failed to get container status \"4fd13baec559163b2b66474d46cf26e69eb2ac8b71ab5c82119b909d78b9c3ec\": rpc error: code = NotFound desc = could not find container \"4fd13baec559163b2b66474d46cf26e69eb2ac8b71ab5c82119b909d78b9c3ec\": container with ID starting with 4fd13baec559163b2b66474d46cf26e69eb2ac8b71ab5c82119b909d78b9c3ec not found: ID does not exist" Jan 20 17:01:15 crc kubenswrapper[4558]: I0120 17:01:15.339125 4558 scope.go:117] "RemoveContainer" containerID="7f4031ea42bd00352739604cfd6ebff1f1c035663b1e3b5d3f1ba025cafadaf4" Jan 20 17:01:15 crc kubenswrapper[4558]: E0120 17:01:15.339419 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7f4031ea42bd00352739604cfd6ebff1f1c035663b1e3b5d3f1ba025cafadaf4\": container with ID starting with 7f4031ea42bd00352739604cfd6ebff1f1c035663b1e3b5d3f1ba025cafadaf4 not found: ID does not exist" containerID="7f4031ea42bd00352739604cfd6ebff1f1c035663b1e3b5d3f1ba025cafadaf4" Jan 20 17:01:15 crc kubenswrapper[4558]: I0120 17:01:15.339439 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7f4031ea42bd00352739604cfd6ebff1f1c035663b1e3b5d3f1ba025cafadaf4"} err="failed to get container status \"7f4031ea42bd00352739604cfd6ebff1f1c035663b1e3b5d3f1ba025cafadaf4\": rpc error: code = NotFound desc = could not find container \"7f4031ea42bd00352739604cfd6ebff1f1c035663b1e3b5d3f1ba025cafadaf4\": container with ID starting with 7f4031ea42bd00352739604cfd6ebff1f1c035663b1e3b5d3f1ba025cafadaf4 not found: ID does not exist" Jan 20 17:01:15 crc kubenswrapper[4558]: I0120 17:01:15.339453 4558 scope.go:117] "RemoveContainer" containerID="b729d20938039dd3cd6701145536a565bd85b83711e04e7135e5d6f37060390e" Jan 20 17:01:15 crc kubenswrapper[4558]: E0120 17:01:15.339635 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b729d20938039dd3cd6701145536a565bd85b83711e04e7135e5d6f37060390e\": container with ID starting with b729d20938039dd3cd6701145536a565bd85b83711e04e7135e5d6f37060390e not found: ID does not exist" containerID="b729d20938039dd3cd6701145536a565bd85b83711e04e7135e5d6f37060390e" Jan 20 17:01:15 crc kubenswrapper[4558]: I0120 17:01:15.339659 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b729d20938039dd3cd6701145536a565bd85b83711e04e7135e5d6f37060390e"} err="failed to get container status \"b729d20938039dd3cd6701145536a565bd85b83711e04e7135e5d6f37060390e\": rpc error: code = NotFound desc = could not find container \"b729d20938039dd3cd6701145536a565bd85b83711e04e7135e5d6f37060390e\": container with ID starting with b729d20938039dd3cd6701145536a565bd85b83711e04e7135e5d6f37060390e not found: ID does not exist" Jan 20 17:01:15 crc kubenswrapper[4558]: I0120 17:01:15.339673 4558 scope.go:117] "RemoveContainer" containerID="5d77e588e0ee15e79eb372260c13103acd9bceda72bb63cee529297cc6f3bf98" Jan 20 17:01:15 crc kubenswrapper[4558]: E0120 17:01:15.339904 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5d77e588e0ee15e79eb372260c13103acd9bceda72bb63cee529297cc6f3bf98\": container with ID starting with 5d77e588e0ee15e79eb372260c13103acd9bceda72bb63cee529297cc6f3bf98 not found: ID does not exist" containerID="5d77e588e0ee15e79eb372260c13103acd9bceda72bb63cee529297cc6f3bf98" Jan 20 17:01:15 crc kubenswrapper[4558]: I0120 17:01:15.339928 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5d77e588e0ee15e79eb372260c13103acd9bceda72bb63cee529297cc6f3bf98"} err="failed to get container status \"5d77e588e0ee15e79eb372260c13103acd9bceda72bb63cee529297cc6f3bf98\": rpc error: code = NotFound desc = could not find container \"5d77e588e0ee15e79eb372260c13103acd9bceda72bb63cee529297cc6f3bf98\": container with ID starting with 5d77e588e0ee15e79eb372260c13103acd9bceda72bb63cee529297cc6f3bf98 not found: ID does not exist" Jan 20 17:01:15 crc kubenswrapper[4558]: I0120 17:01:15.339942 4558 scope.go:117] "RemoveContainer" containerID="90254ca174d78ea243b5ca11d013559dbc865a160f1d264e2b31efdc48f96768" Jan 20 17:01:15 crc kubenswrapper[4558]: E0120 17:01:15.340195 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"90254ca174d78ea243b5ca11d013559dbc865a160f1d264e2b31efdc48f96768\": container with ID starting with 90254ca174d78ea243b5ca11d013559dbc865a160f1d264e2b31efdc48f96768 not found: ID does not exist" containerID="90254ca174d78ea243b5ca11d013559dbc865a160f1d264e2b31efdc48f96768" Jan 20 17:01:15 crc kubenswrapper[4558]: I0120 17:01:15.340226 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"90254ca174d78ea243b5ca11d013559dbc865a160f1d264e2b31efdc48f96768"} err="failed to get container status \"90254ca174d78ea243b5ca11d013559dbc865a160f1d264e2b31efdc48f96768\": rpc error: code = NotFound desc = could not find container \"90254ca174d78ea243b5ca11d013559dbc865a160f1d264e2b31efdc48f96768\": container with ID starting with 90254ca174d78ea243b5ca11d013559dbc865a160f1d264e2b31efdc48f96768 not found: ID does not exist" Jan 20 17:01:15 crc kubenswrapper[4558]: I0120 17:01:15.340240 4558 scope.go:117] "RemoveContainer" containerID="aa379f22b8042a8997aeff6f2e901aac68a2feff4a66a0636172020228c651ff" Jan 20 17:01:15 crc kubenswrapper[4558]: E0120 17:01:15.340480 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aa379f22b8042a8997aeff6f2e901aac68a2feff4a66a0636172020228c651ff\": container with ID starting with aa379f22b8042a8997aeff6f2e901aac68a2feff4a66a0636172020228c651ff not found: ID does not exist" containerID="aa379f22b8042a8997aeff6f2e901aac68a2feff4a66a0636172020228c651ff" Jan 20 17:01:15 crc kubenswrapper[4558]: I0120 17:01:15.340504 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aa379f22b8042a8997aeff6f2e901aac68a2feff4a66a0636172020228c651ff"} err="failed to get container status \"aa379f22b8042a8997aeff6f2e901aac68a2feff4a66a0636172020228c651ff\": rpc error: code = NotFound desc = could not find container \"aa379f22b8042a8997aeff6f2e901aac68a2feff4a66a0636172020228c651ff\": container with ID starting with aa379f22b8042a8997aeff6f2e901aac68a2feff4a66a0636172020228c651ff not found: ID does not exist" Jan 20 17:01:15 crc kubenswrapper[4558]: I0120 17:01:15.340518 4558 scope.go:117] "RemoveContainer" containerID="58081c924356de25ebdeeecdbf39084210cf71b6f92814049f47601bfce7e1ef" Jan 20 17:01:15 crc kubenswrapper[4558]: E0120 17:01:15.340713 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"58081c924356de25ebdeeecdbf39084210cf71b6f92814049f47601bfce7e1ef\": container with ID starting with 58081c924356de25ebdeeecdbf39084210cf71b6f92814049f47601bfce7e1ef not found: ID does not exist" containerID="58081c924356de25ebdeeecdbf39084210cf71b6f92814049f47601bfce7e1ef" Jan 20 17:01:15 crc kubenswrapper[4558]: I0120 17:01:15.340736 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"58081c924356de25ebdeeecdbf39084210cf71b6f92814049f47601bfce7e1ef"} err="failed to get container status \"58081c924356de25ebdeeecdbf39084210cf71b6f92814049f47601bfce7e1ef\": rpc error: code = NotFound desc = could not find container \"58081c924356de25ebdeeecdbf39084210cf71b6f92814049f47601bfce7e1ef\": container with ID starting with 58081c924356de25ebdeeecdbf39084210cf71b6f92814049f47601bfce7e1ef not found: ID does not exist" Jan 20 17:01:15 crc kubenswrapper[4558]: I0120 17:01:15.340750 4558 scope.go:117] "RemoveContainer" containerID="e121933f3a246845b6169455f9e1e99e6b2a61bce703613cb6fb31821719438d" Jan 20 17:01:15 crc kubenswrapper[4558]: E0120 17:01:15.340964 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e121933f3a246845b6169455f9e1e99e6b2a61bce703613cb6fb31821719438d\": container with ID starting with e121933f3a246845b6169455f9e1e99e6b2a61bce703613cb6fb31821719438d not found: ID does not exist" containerID="e121933f3a246845b6169455f9e1e99e6b2a61bce703613cb6fb31821719438d" Jan 20 17:01:15 crc kubenswrapper[4558]: I0120 17:01:15.340983 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e121933f3a246845b6169455f9e1e99e6b2a61bce703613cb6fb31821719438d"} err="failed to get container status \"e121933f3a246845b6169455f9e1e99e6b2a61bce703613cb6fb31821719438d\": rpc error: code = NotFound desc = could not find container \"e121933f3a246845b6169455f9e1e99e6b2a61bce703613cb6fb31821719438d\": container with ID starting with e121933f3a246845b6169455f9e1e99e6b2a61bce703613cb6fb31821719438d not found: ID does not exist" Jan 20 17:01:15 crc kubenswrapper[4558]: I0120 17:01:15.341001 4558 scope.go:117] "RemoveContainer" containerID="1e0a8b23c89daae563498da37b455997040b6b47b7a29c5c9590feba957d5e7e" Jan 20 17:01:15 crc kubenswrapper[4558]: E0120 17:01:15.341261 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1e0a8b23c89daae563498da37b455997040b6b47b7a29c5c9590feba957d5e7e\": container with ID starting with 1e0a8b23c89daae563498da37b455997040b6b47b7a29c5c9590feba957d5e7e not found: ID does not exist" containerID="1e0a8b23c89daae563498da37b455997040b6b47b7a29c5c9590feba957d5e7e" Jan 20 17:01:15 crc kubenswrapper[4558]: I0120 17:01:15.341282 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1e0a8b23c89daae563498da37b455997040b6b47b7a29c5c9590feba957d5e7e"} err="failed to get container status \"1e0a8b23c89daae563498da37b455997040b6b47b7a29c5c9590feba957d5e7e\": rpc error: code = NotFound desc = could not find container \"1e0a8b23c89daae563498da37b455997040b6b47b7a29c5c9590feba957d5e7e\": container with ID starting with 1e0a8b23c89daae563498da37b455997040b6b47b7a29c5c9590feba957d5e7e not found: ID does not exist" Jan 20 17:01:15 crc kubenswrapper[4558]: I0120 17:01:15.341296 4558 scope.go:117] "RemoveContainer" containerID="4c8b93db2628bf0f39a03755d724643f7c79df42b7cf8e54ef0a1ee499194855" Jan 20 17:01:15 crc kubenswrapper[4558]: E0120 17:01:15.341521 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4c8b93db2628bf0f39a03755d724643f7c79df42b7cf8e54ef0a1ee499194855\": container with ID starting with 4c8b93db2628bf0f39a03755d724643f7c79df42b7cf8e54ef0a1ee499194855 not found: ID does not exist" containerID="4c8b93db2628bf0f39a03755d724643f7c79df42b7cf8e54ef0a1ee499194855" Jan 20 17:01:15 crc kubenswrapper[4558]: I0120 17:01:15.341541 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4c8b93db2628bf0f39a03755d724643f7c79df42b7cf8e54ef0a1ee499194855"} err="failed to get container status \"4c8b93db2628bf0f39a03755d724643f7c79df42b7cf8e54ef0a1ee499194855\": rpc error: code = NotFound desc = could not find container \"4c8b93db2628bf0f39a03755d724643f7c79df42b7cf8e54ef0a1ee499194855\": container with ID starting with 4c8b93db2628bf0f39a03755d724643f7c79df42b7cf8e54ef0a1ee499194855 not found: ID does not exist" Jan 20 17:01:15 crc kubenswrapper[4558]: I0120 17:01:15.341553 4558 scope.go:117] "RemoveContainer" containerID="9502d1598a1db6c1187fe75f1d6475e3133bb27c977752998b79b425efafa253" Jan 20 17:01:15 crc kubenswrapper[4558]: E0120 17:01:15.341747 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9502d1598a1db6c1187fe75f1d6475e3133bb27c977752998b79b425efafa253\": container with ID starting with 9502d1598a1db6c1187fe75f1d6475e3133bb27c977752998b79b425efafa253 not found: ID does not exist" containerID="9502d1598a1db6c1187fe75f1d6475e3133bb27c977752998b79b425efafa253" Jan 20 17:01:15 crc kubenswrapper[4558]: I0120 17:01:15.341773 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9502d1598a1db6c1187fe75f1d6475e3133bb27c977752998b79b425efafa253"} err="failed to get container status \"9502d1598a1db6c1187fe75f1d6475e3133bb27c977752998b79b425efafa253\": rpc error: code = NotFound desc = could not find container \"9502d1598a1db6c1187fe75f1d6475e3133bb27c977752998b79b425efafa253\": container with ID starting with 9502d1598a1db6c1187fe75f1d6475e3133bb27c977752998b79b425efafa253 not found: ID does not exist" Jan 20 17:01:15 crc kubenswrapper[4558]: I0120 17:01:15.341787 4558 scope.go:117] "RemoveContainer" containerID="e13ad48d737a5f4a2b0376363ec37ed14a4cc5193e36abfdc9038bf0f4fc0e27" Jan 20 17:01:15 crc kubenswrapper[4558]: E0120 17:01:15.342289 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e13ad48d737a5f4a2b0376363ec37ed14a4cc5193e36abfdc9038bf0f4fc0e27\": container with ID starting with e13ad48d737a5f4a2b0376363ec37ed14a4cc5193e36abfdc9038bf0f4fc0e27 not found: ID does not exist" containerID="e13ad48d737a5f4a2b0376363ec37ed14a4cc5193e36abfdc9038bf0f4fc0e27" Jan 20 17:01:15 crc kubenswrapper[4558]: I0120 17:01:15.342309 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e13ad48d737a5f4a2b0376363ec37ed14a4cc5193e36abfdc9038bf0f4fc0e27"} err="failed to get container status \"e13ad48d737a5f4a2b0376363ec37ed14a4cc5193e36abfdc9038bf0f4fc0e27\": rpc error: code = NotFound desc = could not find container \"e13ad48d737a5f4a2b0376363ec37ed14a4cc5193e36abfdc9038bf0f4fc0e27\": container with ID starting with e13ad48d737a5f4a2b0376363ec37ed14a4cc5193e36abfdc9038bf0f4fc0e27 not found: ID does not exist" Jan 20 17:01:15 crc kubenswrapper[4558]: I0120 17:01:15.342320 4558 scope.go:117] "RemoveContainer" containerID="24445a4a28eeea3fb63d875b16f990af43a36181cb7778f0d316092430ca7285" Jan 20 17:01:15 crc kubenswrapper[4558]: E0120 17:01:15.342717 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"24445a4a28eeea3fb63d875b16f990af43a36181cb7778f0d316092430ca7285\": container with ID starting with 24445a4a28eeea3fb63d875b16f990af43a36181cb7778f0d316092430ca7285 not found: ID does not exist" containerID="24445a4a28eeea3fb63d875b16f990af43a36181cb7778f0d316092430ca7285" Jan 20 17:01:15 crc kubenswrapper[4558]: I0120 17:01:15.342737 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"24445a4a28eeea3fb63d875b16f990af43a36181cb7778f0d316092430ca7285"} err="failed to get container status \"24445a4a28eeea3fb63d875b16f990af43a36181cb7778f0d316092430ca7285\": rpc error: code = NotFound desc = could not find container \"24445a4a28eeea3fb63d875b16f990af43a36181cb7778f0d316092430ca7285\": container with ID starting with 24445a4a28eeea3fb63d875b16f990af43a36181cb7778f0d316092430ca7285 not found: ID does not exist" Jan 20 17:01:15 crc kubenswrapper[4558]: I0120 17:01:15.364186 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-5d557df858-frznf"] Jan 20 17:01:15 crc kubenswrapper[4558]: I0120 17:01:15.368264 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/neutron-5d557df858-frznf"] Jan 20 17:01:16 crc kubenswrapper[4558]: E0120 17:01:16.169950 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/barbican-config-data: secret "barbican-config-data" not found Jan 20 17:01:16 crc kubenswrapper[4558]: E0120 17:01:16.170202 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/de7f5467-1e83-42f0-86bb-ade85deec8f3-config-data podName:de7f5467-1e83-42f0-86bb-ade85deec8f3 nodeName:}" failed. No retries permitted until 2026-01-20 17:01:48.170189383 +0000 UTC m=+1201.930527349 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/secret/de7f5467-1e83-42f0-86bb-ade85deec8f3-config-data") pod "barbican-worker-c4f864f89-58gb9" (UID: "de7f5467-1e83-42f0-86bb-ade85deec8f3") : secret "barbican-config-data" not found Jan 20 17:01:16 crc kubenswrapper[4558]: E0120 17:01:16.169957 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/combined-ca-bundle: secret "combined-ca-bundle" not found Jan 20 17:01:16 crc kubenswrapper[4558]: E0120 17:01:16.170247 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/de7f5467-1e83-42f0-86bb-ade85deec8f3-combined-ca-bundle podName:de7f5467-1e83-42f0-86bb-ade85deec8f3 nodeName:}" failed. No retries permitted until 2026-01-20 17:01:48.170241091 +0000 UTC m=+1201.930579058 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/de7f5467-1e83-42f0-86bb-ade85deec8f3-combined-ca-bundle") pod "barbican-worker-c4f864f89-58gb9" (UID: "de7f5467-1e83-42f0-86bb-ade85deec8f3") : secret "combined-ca-bundle" not found Jan 20 17:01:16 crc kubenswrapper[4558]: E0120 17:01:16.170014 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/barbican-worker-config-data: secret "barbican-worker-config-data" not found Jan 20 17:01:16 crc kubenswrapper[4558]: E0120 17:01:16.170311 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/de7f5467-1e83-42f0-86bb-ade85deec8f3-config-data-custom podName:de7f5467-1e83-42f0-86bb-ade85deec8f3 nodeName:}" failed. No retries permitted until 2026-01-20 17:01:48.170299129 +0000 UTC m=+1201.930637097 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "config-data-custom" (UniqueName: "kubernetes.io/secret/de7f5467-1e83-42f0-86bb-ade85deec8f3-config-data-custom") pod "barbican-worker-c4f864f89-58gb9" (UID: "de7f5467-1e83-42f0-86bb-ade85deec8f3") : secret "barbican-worker-config-data" not found Jan 20 17:01:16 crc kubenswrapper[4558]: E0120 17:01:16.372016 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/barbican-config-data: secret "barbican-config-data" not found Jan 20 17:01:16 crc kubenswrapper[4558]: E0120 17:01:16.372070 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7776f921-0ae7-4a8c-b444-cf5b4b9a4f65-config-data podName:7776f921-0ae7-4a8c-b444-cf5b4b9a4f65 nodeName:}" failed. No retries permitted until 2026-01-20 17:01:48.372057121 +0000 UTC m=+1202.132395088 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/secret/7776f921-0ae7-4a8c-b444-cf5b4b9a4f65-config-data") pod "barbican-keystone-listener-7fc5477d66-pz8q2" (UID: "7776f921-0ae7-4a8c-b444-cf5b4b9a4f65") : secret "barbican-config-data" not found Jan 20 17:01:16 crc kubenswrapper[4558]: E0120 17:01:16.372120 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/barbican-keystone-listener-config-data: secret "barbican-keystone-listener-config-data" not found Jan 20 17:01:16 crc kubenswrapper[4558]: E0120 17:01:16.372149 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/combined-ca-bundle: secret "combined-ca-bundle" not found Jan 20 17:01:16 crc kubenswrapper[4558]: E0120 17:01:16.372199 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7776f921-0ae7-4a8c-b444-cf5b4b9a4f65-config-data-custom podName:7776f921-0ae7-4a8c-b444-cf5b4b9a4f65 nodeName:}" failed. No retries permitted until 2026-01-20 17:01:48.372184851 +0000 UTC m=+1202.132522818 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "config-data-custom" (UniqueName: "kubernetes.io/secret/7776f921-0ae7-4a8c-b444-cf5b4b9a4f65-config-data-custom") pod "barbican-keystone-listener-7fc5477d66-pz8q2" (UID: "7776f921-0ae7-4a8c-b444-cf5b4b9a4f65") : secret "barbican-keystone-listener-config-data" not found Jan 20 17:01:16 crc kubenswrapper[4558]: E0120 17:01:16.372244 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7776f921-0ae7-4a8c-b444-cf5b4b9a4f65-combined-ca-bundle podName:7776f921-0ae7-4a8c-b444-cf5b4b9a4f65 nodeName:}" failed. No retries permitted until 2026-01-20 17:01:48.372215659 +0000 UTC m=+1202.132553636 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/7776f921-0ae7-4a8c-b444-cf5b4b9a4f65-combined-ca-bundle") pod "barbican-keystone-listener-7fc5477d66-pz8q2" (UID: "7776f921-0ae7-4a8c-b444-cf5b4b9a4f65") : secret "combined-ca-bundle" not found Jan 20 17:01:16 crc kubenswrapper[4558]: I0120 17:01:16.571947 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5d3423c4-ff78-4ff7-b42b-b3c93b309d52" path="/var/lib/kubelet/pods/5d3423c4-ff78-4ff7-b42b-b3c93b309d52/volumes" Jan 20 17:01:16 crc kubenswrapper[4558]: I0120 17:01:16.572493 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7d0b0e33-9ce7-44d9-8b09-6008715fe1a0" path="/var/lib/kubelet/pods/7d0b0e33-9ce7-44d9-8b09-6008715fe1a0/volumes" Jan 20 17:01:16 crc kubenswrapper[4558]: I0120 17:01:16.573004 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ad708448-38df-4494-bca7-fe394c9b53a7" path="/var/lib/kubelet/pods/ad708448-38df-4494-bca7-fe394c9b53a7/volumes" Jan 20 17:01:17 crc kubenswrapper[4558]: I0120 17:01:17.063973 4558 generic.go:334] "Generic (PLEG): container finished" podID="7776f921-0ae7-4a8c-b444-cf5b4b9a4f65" containerID="6c9fee82734b4e79e64bd03d6cf2d27e8723c07ddab900b9256e6a58cb6d0abe" exitCode=137 Jan 20 17:01:17 crc kubenswrapper[4558]: I0120 17:01:17.064020 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-keystone-listener-7fc5477d66-pz8q2" event={"ID":"7776f921-0ae7-4a8c-b444-cf5b4b9a4f65","Type":"ContainerDied","Data":"6c9fee82734b4e79e64bd03d6cf2d27e8723c07ddab900b9256e6a58cb6d0abe"} Jan 20 17:01:17 crc kubenswrapper[4558]: I0120 17:01:17.066245 4558 generic.go:334] "Generic (PLEG): container finished" podID="de7f5467-1e83-42f0-86bb-ade85deec8f3" containerID="ee8ddf384630628e434ded9ac4a8dd2f149e7593afd334474324206c1828ec18" exitCode=137 Jan 20 17:01:17 crc kubenswrapper[4558]: I0120 17:01:17.066262 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-worker-c4f864f89-58gb9" event={"ID":"de7f5467-1e83-42f0-86bb-ade85deec8f3","Type":"ContainerDied","Data":"ee8ddf384630628e434ded9ac4a8dd2f149e7593afd334474324206c1828ec18"} Jan 20 17:01:17 crc kubenswrapper[4558]: I0120 17:01:17.138143 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-worker-c4f864f89-58gb9" Jan 20 17:01:17 crc kubenswrapper[4558]: I0120 17:01:17.139384 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-keystone-listener-7fc5477d66-pz8q2" Jan 20 17:01:17 crc kubenswrapper[4558]: I0120 17:01:17.282608 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/de7f5467-1e83-42f0-86bb-ade85deec8f3-config-data\") pod \"de7f5467-1e83-42f0-86bb-ade85deec8f3\" (UID: \"de7f5467-1e83-42f0-86bb-ade85deec8f3\") " Jan 20 17:01:17 crc kubenswrapper[4558]: I0120 17:01:17.282672 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mss66\" (UniqueName: \"kubernetes.io/projected/de7f5467-1e83-42f0-86bb-ade85deec8f3-kube-api-access-mss66\") pod \"de7f5467-1e83-42f0-86bb-ade85deec8f3\" (UID: \"de7f5467-1e83-42f0-86bb-ade85deec8f3\") " Jan 20 17:01:17 crc kubenswrapper[4558]: I0120 17:01:17.282725 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7776f921-0ae7-4a8c-b444-cf5b4b9a4f65-combined-ca-bundle\") pod \"7776f921-0ae7-4a8c-b444-cf5b4b9a4f65\" (UID: \"7776f921-0ae7-4a8c-b444-cf5b4b9a4f65\") " Jan 20 17:01:17 crc kubenswrapper[4558]: I0120 17:01:17.282744 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fr8kp\" (UniqueName: \"kubernetes.io/projected/7776f921-0ae7-4a8c-b444-cf5b4b9a4f65-kube-api-access-fr8kp\") pod \"7776f921-0ae7-4a8c-b444-cf5b4b9a4f65\" (UID: \"7776f921-0ae7-4a8c-b444-cf5b4b9a4f65\") " Jan 20 17:01:17 crc kubenswrapper[4558]: I0120 17:01:17.282775 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/de7f5467-1e83-42f0-86bb-ade85deec8f3-logs\") pod \"de7f5467-1e83-42f0-86bb-ade85deec8f3\" (UID: \"de7f5467-1e83-42f0-86bb-ade85deec8f3\") " Jan 20 17:01:17 crc kubenswrapper[4558]: I0120 17:01:17.282793 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7776f921-0ae7-4a8c-b444-cf5b4b9a4f65-config-data\") pod \"7776f921-0ae7-4a8c-b444-cf5b4b9a4f65\" (UID: \"7776f921-0ae7-4a8c-b444-cf5b4b9a4f65\") " Jan 20 17:01:17 crc kubenswrapper[4558]: I0120 17:01:17.282819 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7776f921-0ae7-4a8c-b444-cf5b4b9a4f65-logs\") pod \"7776f921-0ae7-4a8c-b444-cf5b4b9a4f65\" (UID: \"7776f921-0ae7-4a8c-b444-cf5b4b9a4f65\") " Jan 20 17:01:17 crc kubenswrapper[4558]: I0120 17:01:17.282848 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/de7f5467-1e83-42f0-86bb-ade85deec8f3-config-data-custom\") pod \"de7f5467-1e83-42f0-86bb-ade85deec8f3\" (UID: \"de7f5467-1e83-42f0-86bb-ade85deec8f3\") " Jan 20 17:01:17 crc kubenswrapper[4558]: I0120 17:01:17.282871 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/de7f5467-1e83-42f0-86bb-ade85deec8f3-combined-ca-bundle\") pod \"de7f5467-1e83-42f0-86bb-ade85deec8f3\" (UID: \"de7f5467-1e83-42f0-86bb-ade85deec8f3\") " Jan 20 17:01:17 crc kubenswrapper[4558]: I0120 17:01:17.282958 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7776f921-0ae7-4a8c-b444-cf5b4b9a4f65-config-data-custom\") pod \"7776f921-0ae7-4a8c-b444-cf5b4b9a4f65\" (UID: \"7776f921-0ae7-4a8c-b444-cf5b4b9a4f65\") " Jan 20 17:01:17 crc kubenswrapper[4558]: I0120 17:01:17.283241 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/de7f5467-1e83-42f0-86bb-ade85deec8f3-logs" (OuterVolumeSpecName: "logs") pod "de7f5467-1e83-42f0-86bb-ade85deec8f3" (UID: "de7f5467-1e83-42f0-86bb-ade85deec8f3"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:01:17 crc kubenswrapper[4558]: I0120 17:01:17.283428 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7776f921-0ae7-4a8c-b444-cf5b4b9a4f65-logs" (OuterVolumeSpecName: "logs") pod "7776f921-0ae7-4a8c-b444-cf5b4b9a4f65" (UID: "7776f921-0ae7-4a8c-b444-cf5b4b9a4f65"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:01:17 crc kubenswrapper[4558]: I0120 17:01:17.283538 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/de7f5467-1e83-42f0-86bb-ade85deec8f3-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:01:17 crc kubenswrapper[4558]: I0120 17:01:17.283559 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7776f921-0ae7-4a8c-b444-cf5b4b9a4f65-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:01:17 crc kubenswrapper[4558]: I0120 17:01:17.286197 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/de7f5467-1e83-42f0-86bb-ade85deec8f3-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "de7f5467-1e83-42f0-86bb-ade85deec8f3" (UID: "de7f5467-1e83-42f0-86bb-ade85deec8f3"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:01:17 crc kubenswrapper[4558]: I0120 17:01:17.286287 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/de7f5467-1e83-42f0-86bb-ade85deec8f3-kube-api-access-mss66" (OuterVolumeSpecName: "kube-api-access-mss66") pod "de7f5467-1e83-42f0-86bb-ade85deec8f3" (UID: "de7f5467-1e83-42f0-86bb-ade85deec8f3"). InnerVolumeSpecName "kube-api-access-mss66". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:01:17 crc kubenswrapper[4558]: I0120 17:01:17.286430 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7776f921-0ae7-4a8c-b444-cf5b4b9a4f65-kube-api-access-fr8kp" (OuterVolumeSpecName: "kube-api-access-fr8kp") pod "7776f921-0ae7-4a8c-b444-cf5b4b9a4f65" (UID: "7776f921-0ae7-4a8c-b444-cf5b4b9a4f65"). InnerVolumeSpecName "kube-api-access-fr8kp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:01:17 crc kubenswrapper[4558]: I0120 17:01:17.287291 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7776f921-0ae7-4a8c-b444-cf5b4b9a4f65-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "7776f921-0ae7-4a8c-b444-cf5b4b9a4f65" (UID: "7776f921-0ae7-4a8c-b444-cf5b4b9a4f65"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:01:17 crc kubenswrapper[4558]: I0120 17:01:17.299496 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/de7f5467-1e83-42f0-86bb-ade85deec8f3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "de7f5467-1e83-42f0-86bb-ade85deec8f3" (UID: "de7f5467-1e83-42f0-86bb-ade85deec8f3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:01:17 crc kubenswrapper[4558]: I0120 17:01:17.299893 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7776f921-0ae7-4a8c-b444-cf5b4b9a4f65-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7776f921-0ae7-4a8c-b444-cf5b4b9a4f65" (UID: "7776f921-0ae7-4a8c-b444-cf5b4b9a4f65"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:01:17 crc kubenswrapper[4558]: I0120 17:01:17.310822 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/de7f5467-1e83-42f0-86bb-ade85deec8f3-config-data" (OuterVolumeSpecName: "config-data") pod "de7f5467-1e83-42f0-86bb-ade85deec8f3" (UID: "de7f5467-1e83-42f0-86bb-ade85deec8f3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:01:17 crc kubenswrapper[4558]: I0120 17:01:17.319256 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7776f921-0ae7-4a8c-b444-cf5b4b9a4f65-config-data" (OuterVolumeSpecName: "config-data") pod "7776f921-0ae7-4a8c-b444-cf5b4b9a4f65" (UID: "7776f921-0ae7-4a8c-b444-cf5b4b9a4f65"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:01:17 crc kubenswrapper[4558]: I0120 17:01:17.384413 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7776f921-0ae7-4a8c-b444-cf5b4b9a4f65-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:01:17 crc kubenswrapper[4558]: I0120 17:01:17.385034 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fr8kp\" (UniqueName: \"kubernetes.io/projected/7776f921-0ae7-4a8c-b444-cf5b4b9a4f65-kube-api-access-fr8kp\") on node \"crc\" DevicePath \"\"" Jan 20 17:01:17 crc kubenswrapper[4558]: I0120 17:01:17.385093 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7776f921-0ae7-4a8c-b444-cf5b4b9a4f65-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:01:17 crc kubenswrapper[4558]: I0120 17:01:17.385232 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/de7f5467-1e83-42f0-86bb-ade85deec8f3-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:01:17 crc kubenswrapper[4558]: I0120 17:01:17.385289 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/de7f5467-1e83-42f0-86bb-ade85deec8f3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:01:17 crc kubenswrapper[4558]: I0120 17:01:17.385334 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7776f921-0ae7-4a8c-b444-cf5b4b9a4f65-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:01:17 crc kubenswrapper[4558]: I0120 17:01:17.385424 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/de7f5467-1e83-42f0-86bb-ade85deec8f3-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:01:17 crc kubenswrapper[4558]: I0120 17:01:17.385461 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mss66\" (UniqueName: \"kubernetes.io/projected/de7f5467-1e83-42f0-86bb-ade85deec8f3-kube-api-access-mss66\") on node \"crc\" DevicePath \"\"" Jan 20 17:01:18 crc kubenswrapper[4558]: I0120 17:01:18.073679 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-keystone-listener-7fc5477d66-pz8q2" event={"ID":"7776f921-0ae7-4a8c-b444-cf5b4b9a4f65","Type":"ContainerDied","Data":"522cb191427e167fad026915618db8f72d3e02041ca1567fe15a15da684ab56d"} Jan 20 17:01:18 crc kubenswrapper[4558]: I0120 17:01:18.073696 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-keystone-listener-7fc5477d66-pz8q2" Jan 20 17:01:18 crc kubenswrapper[4558]: I0120 17:01:18.073723 4558 scope.go:117] "RemoveContainer" containerID="6c9fee82734b4e79e64bd03d6cf2d27e8723c07ddab900b9256e6a58cb6d0abe" Jan 20 17:01:18 crc kubenswrapper[4558]: I0120 17:01:18.076771 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-worker-c4f864f89-58gb9" event={"ID":"de7f5467-1e83-42f0-86bb-ade85deec8f3","Type":"ContainerDied","Data":"90bf224df8f11e1c14a75f4497785be566707b0ba13b9201222741bded247634"} Jan 20 17:01:18 crc kubenswrapper[4558]: I0120 17:01:18.076794 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-worker-c4f864f89-58gb9" Jan 20 17:01:18 crc kubenswrapper[4558]: I0120 17:01:18.092148 4558 scope.go:117] "RemoveContainer" containerID="6b14e8b1ef1901e159022cfb9e13791cc8bca3602aeca47855720efbfe3f6831" Jan 20 17:01:18 crc kubenswrapper[4558]: I0120 17:01:18.099427 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-keystone-listener-7fc5477d66-pz8q2"] Jan 20 17:01:18 crc kubenswrapper[4558]: I0120 17:01:18.105566 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/barbican-keystone-listener-7fc5477d66-pz8q2"] Jan 20 17:01:18 crc kubenswrapper[4558]: I0120 17:01:18.111275 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-worker-c4f864f89-58gb9"] Jan 20 17:01:18 crc kubenswrapper[4558]: I0120 17:01:18.111744 4558 scope.go:117] "RemoveContainer" containerID="ee8ddf384630628e434ded9ac4a8dd2f149e7593afd334474324206c1828ec18" Jan 20 17:01:18 crc kubenswrapper[4558]: I0120 17:01:18.115779 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/barbican-worker-c4f864f89-58gb9"] Jan 20 17:01:18 crc kubenswrapper[4558]: I0120 17:01:18.126331 4558 scope.go:117] "RemoveContainer" containerID="6a1a27bb8b609381b6880d7309447e5d15f349864bd55fc2d2324115d9939990" Jan 20 17:01:18 crc kubenswrapper[4558]: I0120 17:01:18.571897 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7776f921-0ae7-4a8c-b444-cf5b4b9a4f65" path="/var/lib/kubelet/pods/7776f921-0ae7-4a8c-b444-cf5b4b9a4f65/volumes" Jan 20 17:01:18 crc kubenswrapper[4558]: I0120 17:01:18.572638 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="de7f5467-1e83-42f0-86bb-ade85deec8f3" path="/var/lib/kubelet/pods/de7f5467-1e83-42f0-86bb-ade85deec8f3/volumes" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.939135 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/rabbitmq-cell1-server-0"] Jan 20 17:01:24 crc kubenswrapper[4558]: E0120 17:01:24.939679 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad708448-38df-4494-bca7-fe394c9b53a7" containerName="object-expirer" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.939692 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad708448-38df-4494-bca7-fe394c9b53a7" containerName="object-expirer" Jan 20 17:01:24 crc kubenswrapper[4558]: E0120 17:01:24.939707 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad708448-38df-4494-bca7-fe394c9b53a7" containerName="object-auditor" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.939712 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad708448-38df-4494-bca7-fe394c9b53a7" containerName="object-auditor" Jan 20 17:01:24 crc kubenswrapper[4558]: E0120 17:01:24.939717 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a140c9a4-bec6-42e6-bc6c-d63566e4f7f6" containerName="placement-log" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.939722 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="a140c9a4-bec6-42e6-bc6c-d63566e4f7f6" containerName="placement-log" Jan 20 17:01:24 crc kubenswrapper[4558]: E0120 17:01:24.939732 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c268a2f5-7c67-4935-8f3e-bdd83aeccc95" containerName="placement-log" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.939737 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="c268a2f5-7c67-4935-8f3e-bdd83aeccc95" containerName="placement-log" Jan 20 17:01:24 crc kubenswrapper[4558]: E0120 17:01:24.939744 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad708448-38df-4494-bca7-fe394c9b53a7" containerName="object-server" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.939749 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad708448-38df-4494-bca7-fe394c9b53a7" containerName="object-server" Jan 20 17:01:24 crc kubenswrapper[4558]: E0120 17:01:24.939759 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="180488ea-6eeb-4078-9b57-351bdfb54f5d" containerName="nova-cell1-conductor-conductor" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.939764 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="180488ea-6eeb-4078-9b57-351bdfb54f5d" containerName="nova-cell1-conductor-conductor" Jan 20 17:01:24 crc kubenswrapper[4558]: E0120 17:01:24.939772 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3f868eba-f4e1-4e32-b271-391cf271fe97" containerName="rabbitmq" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.939777 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="3f868eba-f4e1-4e32-b271-391cf271fe97" containerName="rabbitmq" Jan 20 17:01:24 crc kubenswrapper[4558]: E0120 17:01:24.939784 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3f868eba-f4e1-4e32-b271-391cf271fe97" containerName="setup-container" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.939789 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="3f868eba-f4e1-4e32-b271-391cf271fe97" containerName="setup-container" Jan 20 17:01:24 crc kubenswrapper[4558]: E0120 17:01:24.939799 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="764c7a15-6a1a-470c-9d0b-a63ed418cc09" containerName="cinder-api" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.939804 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="764c7a15-6a1a-470c-9d0b-a63ed418cc09" containerName="cinder-api" Jan 20 17:01:24 crc kubenswrapper[4558]: E0120 17:01:24.939810 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad708448-38df-4494-bca7-fe394c9b53a7" containerName="account-reaper" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.939815 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad708448-38df-4494-bca7-fe394c9b53a7" containerName="account-reaper" Jan 20 17:01:24 crc kubenswrapper[4558]: E0120 17:01:24.939824 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="de7f5467-1e83-42f0-86bb-ade85deec8f3" containerName="barbican-worker-log" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.939829 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="de7f5467-1e83-42f0-86bb-ade85deec8f3" containerName="barbican-worker-log" Jan 20 17:01:24 crc kubenswrapper[4558]: E0120 17:01:24.939834 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad708448-38df-4494-bca7-fe394c9b53a7" containerName="rsync" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.939838 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad708448-38df-4494-bca7-fe394c9b53a7" containerName="rsync" Jan 20 17:01:24 crc kubenswrapper[4558]: E0120 17:01:24.939846 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad708448-38df-4494-bca7-fe394c9b53a7" containerName="account-auditor" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.939851 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad708448-38df-4494-bca7-fe394c9b53a7" containerName="account-auditor" Jan 20 17:01:24 crc kubenswrapper[4558]: E0120 17:01:24.939860 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad708448-38df-4494-bca7-fe394c9b53a7" containerName="account-server" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.939866 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad708448-38df-4494-bca7-fe394c9b53a7" containerName="account-server" Jan 20 17:01:24 crc kubenswrapper[4558]: E0120 17:01:24.939871 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bf355276-9e62-474e-bfb1-616dde5b83bc" containerName="setup-container" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.939876 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="bf355276-9e62-474e-bfb1-616dde5b83bc" containerName="setup-container" Jan 20 17:01:24 crc kubenswrapper[4558]: E0120 17:01:24.939882 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="40234b95-d302-420a-96d7-c56ffe609530" containerName="barbican-worker-log" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.939887 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="40234b95-d302-420a-96d7-c56ffe609530" containerName="barbican-worker-log" Jan 20 17:01:24 crc kubenswrapper[4558]: E0120 17:01:24.939895 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b8911a4f-a706-4956-9028-138c018a92ba" containerName="cinder-scheduler" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.939901 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="b8911a4f-a706-4956-9028-138c018a92ba" containerName="cinder-scheduler" Jan 20 17:01:24 crc kubenswrapper[4558]: E0120 17:01:24.939907 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5d3423c4-ff78-4ff7-b42b-b3c93b309d52" containerName="neutron-httpd" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.939912 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="5d3423c4-ff78-4ff7-b42b-b3c93b309d52" containerName="neutron-httpd" Jan 20 17:01:24 crc kubenswrapper[4558]: E0120 17:01:24.939917 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="43781c23-b22a-4449-8306-67efbe8dd6fc" containerName="keystone-api" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.939922 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="43781c23-b22a-4449-8306-67efbe8dd6fc" containerName="keystone-api" Jan 20 17:01:24 crc kubenswrapper[4558]: E0120 17:01:24.939931 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad708448-38df-4494-bca7-fe394c9b53a7" containerName="container-replicator" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.939936 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad708448-38df-4494-bca7-fe394c9b53a7" containerName="container-replicator" Jan 20 17:01:24 crc kubenswrapper[4558]: E0120 17:01:24.939942 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7776f921-0ae7-4a8c-b444-cf5b4b9a4f65" containerName="barbican-keystone-listener-log" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.939948 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="7776f921-0ae7-4a8c-b444-cf5b4b9a4f65" containerName="barbican-keystone-listener-log" Jan 20 17:01:24 crc kubenswrapper[4558]: E0120 17:01:24.939957 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8074f6f3-f564-42bc-b08b-28ffe75bbbc5" containerName="ceilometer-central-agent" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.939962 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="8074f6f3-f564-42bc-b08b-28ffe75bbbc5" containerName="ceilometer-central-agent" Jan 20 17:01:24 crc kubenswrapper[4558]: E0120 17:01:24.939968 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8074f6f3-f564-42bc-b08b-28ffe75bbbc5" containerName="proxy-httpd" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.939974 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="8074f6f3-f564-42bc-b08b-28ffe75bbbc5" containerName="proxy-httpd" Jan 20 17:01:24 crc kubenswrapper[4558]: E0120 17:01:24.939979 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5d3423c4-ff78-4ff7-b42b-b3c93b309d52" containerName="neutron-api" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.939984 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="5d3423c4-ff78-4ff7-b42b-b3c93b309d52" containerName="neutron-api" Jan 20 17:01:24 crc kubenswrapper[4558]: E0120 17:01:24.939992 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b28e6999-784e-4577-88bb-db648f7a3cbc" containerName="glance-httpd" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.939997 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="b28e6999-784e-4577-88bb-db648f7a3cbc" containerName="glance-httpd" Jan 20 17:01:24 crc kubenswrapper[4558]: E0120 17:01:24.940006 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7d0b0e33-9ce7-44d9-8b09-6008715fe1a0" containerName="neutron-httpd" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.940011 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="7d0b0e33-9ce7-44d9-8b09-6008715fe1a0" containerName="neutron-httpd" Jan 20 17:01:24 crc kubenswrapper[4558]: E0120 17:01:24.940016 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="764c7a15-6a1a-470c-9d0b-a63ed418cc09" containerName="cinder-api-log" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.940022 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="764c7a15-6a1a-470c-9d0b-a63ed418cc09" containerName="cinder-api-log" Jan 20 17:01:24 crc kubenswrapper[4558]: E0120 17:01:24.940027 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7776f921-0ae7-4a8c-b444-cf5b4b9a4f65" containerName="barbican-keystone-listener" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.940032 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="7776f921-0ae7-4a8c-b444-cf5b4b9a4f65" containerName="barbican-keystone-listener" Jan 20 17:01:24 crc kubenswrapper[4558]: E0120 17:01:24.940040 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="10c92abf-cd48-4659-8595-ce9610c0fe2e" containerName="nova-scheduler-scheduler" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.940044 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="10c92abf-cd48-4659-8595-ce9610c0fe2e" containerName="nova-scheduler-scheduler" Jan 20 17:01:24 crc kubenswrapper[4558]: E0120 17:01:24.940053 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1d5c2e68-fb9d-4818-a0c2-27db2516e92b" containerName="proxy-server" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.940058 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="1d5c2e68-fb9d-4818-a0c2-27db2516e92b" containerName="proxy-server" Jan 20 17:01:24 crc kubenswrapper[4558]: E0120 17:01:24.940065 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8074f6f3-f564-42bc-b08b-28ffe75bbbc5" containerName="ceilometer-notification-agent" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.940069 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="8074f6f3-f564-42bc-b08b-28ffe75bbbc5" containerName="ceilometer-notification-agent" Jan 20 17:01:24 crc kubenswrapper[4558]: E0120 17:01:24.940077 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="69c5c1c6-5c3f-48d4-8d14-c3c7cdcb1915" containerName="barbican-keystone-listener-log" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.940082 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="69c5c1c6-5c3f-48d4-8d14-c3c7cdcb1915" containerName="barbican-keystone-listener-log" Jan 20 17:01:24 crc kubenswrapper[4558]: E0120 17:01:24.940087 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cae28711-fbe7-40cc-8f31-4b6332ab5378" containerName="nova-api-api" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.940092 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="cae28711-fbe7-40cc-8f31-4b6332ab5378" containerName="nova-api-api" Jan 20 17:01:24 crc kubenswrapper[4558]: E0120 17:01:24.940098 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad708448-38df-4494-bca7-fe394c9b53a7" containerName="object-replicator" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.940103 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad708448-38df-4494-bca7-fe394c9b53a7" containerName="object-replicator" Jan 20 17:01:24 crc kubenswrapper[4558]: E0120 17:01:24.940109 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3b2ffa52-4d19-46f6-aea5-62fc758def73" containerName="kube-state-metrics" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.940114 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="3b2ffa52-4d19-46f6-aea5-62fc758def73" containerName="kube-state-metrics" Jan 20 17:01:24 crc kubenswrapper[4558]: E0120 17:01:24.940120 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad708448-38df-4494-bca7-fe394c9b53a7" containerName="container-updater" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.940124 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad708448-38df-4494-bca7-fe394c9b53a7" containerName="container-updater" Jan 20 17:01:24 crc kubenswrapper[4558]: E0120 17:01:24.940132 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8074f6f3-f564-42bc-b08b-28ffe75bbbc5" containerName="sg-core" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.940137 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="8074f6f3-f564-42bc-b08b-28ffe75bbbc5" containerName="sg-core" Jan 20 17:01:24 crc kubenswrapper[4558]: E0120 17:01:24.940145 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d9be5f16-16de-4ae6-9007-cca3e80501b9" containerName="init" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.940150 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d9be5f16-16de-4ae6-9007-cca3e80501b9" containerName="init" Jan 20 17:01:24 crc kubenswrapper[4558]: E0120 17:01:24.940158 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="69c5c1c6-5c3f-48d4-8d14-c3c7cdcb1915" containerName="barbican-keystone-listener" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.940175 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="69c5c1c6-5c3f-48d4-8d14-c3c7cdcb1915" containerName="barbican-keystone-listener" Jan 20 17:01:24 crc kubenswrapper[4558]: E0120 17:01:24.940183 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cae28711-fbe7-40cc-8f31-4b6332ab5378" containerName="nova-api-log" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.940188 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="cae28711-fbe7-40cc-8f31-4b6332ab5378" containerName="nova-api-log" Jan 20 17:01:24 crc kubenswrapper[4558]: E0120 17:01:24.940194 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e554db35-2dba-4138-9ca0-bd1371a9c63d" containerName="nova-metadata-log" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.940199 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="e554db35-2dba-4138-9ca0-bd1371a9c63d" containerName="nova-metadata-log" Jan 20 17:01:24 crc kubenswrapper[4558]: E0120 17:01:24.940207 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3c6de94f-c605-43d9-97b5-ccf91e49d1fb" containerName="barbican-api-log" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.940212 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="3c6de94f-c605-43d9-97b5-ccf91e49d1fb" containerName="barbican-api-log" Jan 20 17:01:24 crc kubenswrapper[4558]: E0120 17:01:24.940218 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4969c1af-53c0-435a-bd06-6bd493c81c80" containerName="keystone-api" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.940232 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="4969c1af-53c0-435a-bd06-6bd493c81c80" containerName="keystone-api" Jan 20 17:01:24 crc kubenswrapper[4558]: E0120 17:01:24.940240 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c268a2f5-7c67-4935-8f3e-bdd83aeccc95" containerName="placement-api" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.940245 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="c268a2f5-7c67-4935-8f3e-bdd83aeccc95" containerName="placement-api" Jan 20 17:01:24 crc kubenswrapper[4558]: E0120 17:01:24.940252 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad708448-38df-4494-bca7-fe394c9b53a7" containerName="account-replicator" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.940258 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad708448-38df-4494-bca7-fe394c9b53a7" containerName="account-replicator" Jan 20 17:01:24 crc kubenswrapper[4558]: E0120 17:01:24.940264 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a140c9a4-bec6-42e6-bc6c-d63566e4f7f6" containerName="placement-api" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.940269 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="a140c9a4-bec6-42e6-bc6c-d63566e4f7f6" containerName="placement-api" Jan 20 17:01:24 crc kubenswrapper[4558]: E0120 17:01:24.940275 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="40234b95-d302-420a-96d7-c56ffe609530" containerName="barbican-worker" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.940280 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="40234b95-d302-420a-96d7-c56ffe609530" containerName="barbican-worker" Jan 20 17:01:24 crc kubenswrapper[4558]: E0120 17:01:24.940288 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="de7f5467-1e83-42f0-86bb-ade85deec8f3" containerName="barbican-worker" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.940293 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="de7f5467-1e83-42f0-86bb-ade85deec8f3" containerName="barbican-worker" Jan 20 17:01:24 crc kubenswrapper[4558]: E0120 17:01:24.940298 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad708448-38df-4494-bca7-fe394c9b53a7" containerName="object-updater" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.940302 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad708448-38df-4494-bca7-fe394c9b53a7" containerName="object-updater" Jan 20 17:01:24 crc kubenswrapper[4558]: E0120 17:01:24.940308 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3735ca3d-3764-4d36-b912-fbf0bfb96dd8" containerName="barbican-api-log" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.940313 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="3735ca3d-3764-4d36-b912-fbf0bfb96dd8" containerName="barbican-api-log" Jan 20 17:01:24 crc kubenswrapper[4558]: E0120 17:01:24.940320 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b28e6999-784e-4577-88bb-db648f7a3cbc" containerName="glance-log" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.940325 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="b28e6999-784e-4577-88bb-db648f7a3cbc" containerName="glance-log" Jan 20 17:01:24 crc kubenswrapper[4558]: E0120 17:01:24.940331 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad708448-38df-4494-bca7-fe394c9b53a7" containerName="container-server" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.940336 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad708448-38df-4494-bca7-fe394c9b53a7" containerName="container-server" Jan 20 17:01:24 crc kubenswrapper[4558]: E0120 17:01:24.940343 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3c6de94f-c605-43d9-97b5-ccf91e49d1fb" containerName="barbican-api" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.940348 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="3c6de94f-c605-43d9-97b5-ccf91e49d1fb" containerName="barbican-api" Jan 20 17:01:24 crc kubenswrapper[4558]: E0120 17:01:24.940355 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3735ca3d-3764-4d36-b912-fbf0bfb96dd8" containerName="barbican-api" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.940360 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="3735ca3d-3764-4d36-b912-fbf0bfb96dd8" containerName="barbican-api" Jan 20 17:01:24 crc kubenswrapper[4558]: E0120 17:01:24.940368 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9ea3b871-4db3-4108-baea-e57a23d9d6c5" containerName="nova-cell0-conductor-conductor" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.940373 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="9ea3b871-4db3-4108-baea-e57a23d9d6c5" containerName="nova-cell0-conductor-conductor" Jan 20 17:01:24 crc kubenswrapper[4558]: E0120 17:01:24.940381 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bf355276-9e62-474e-bfb1-616dde5b83bc" containerName="rabbitmq" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.940386 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="bf355276-9e62-474e-bfb1-616dde5b83bc" containerName="rabbitmq" Jan 20 17:01:24 crc kubenswrapper[4558]: E0120 17:01:24.940397 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d2ccd35d-37c4-450d-b04c-ac505e35b0e8" containerName="glance-log" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.940401 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d2ccd35d-37c4-450d-b04c-ac505e35b0e8" containerName="glance-log" Jan 20 17:01:24 crc kubenswrapper[4558]: E0120 17:01:24.940407 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad708448-38df-4494-bca7-fe394c9b53a7" containerName="container-auditor" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.940412 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad708448-38df-4494-bca7-fe394c9b53a7" containerName="container-auditor" Jan 20 17:01:24 crc kubenswrapper[4558]: E0120 17:01:24.940420 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e554db35-2dba-4138-9ca0-bd1371a9c63d" containerName="nova-metadata-metadata" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.940425 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="e554db35-2dba-4138-9ca0-bd1371a9c63d" containerName="nova-metadata-metadata" Jan 20 17:01:24 crc kubenswrapper[4558]: E0120 17:01:24.940432 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7d0b0e33-9ce7-44d9-8b09-6008715fe1a0" containerName="neutron-api" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.940437 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="7d0b0e33-9ce7-44d9-8b09-6008715fe1a0" containerName="neutron-api" Jan 20 17:01:24 crc kubenswrapper[4558]: E0120 17:01:24.940446 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad708448-38df-4494-bca7-fe394c9b53a7" containerName="swift-recon-cron" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.940451 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad708448-38df-4494-bca7-fe394c9b53a7" containerName="swift-recon-cron" Jan 20 17:01:24 crc kubenswrapper[4558]: E0120 17:01:24.940457 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1d5c2e68-fb9d-4818-a0c2-27db2516e92b" containerName="proxy-httpd" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.940462 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="1d5c2e68-fb9d-4818-a0c2-27db2516e92b" containerName="proxy-httpd" Jan 20 17:01:24 crc kubenswrapper[4558]: E0120 17:01:24.940468 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d2ccd35d-37c4-450d-b04c-ac505e35b0e8" containerName="glance-httpd" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.940472 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d2ccd35d-37c4-450d-b04c-ac505e35b0e8" containerName="glance-httpd" Jan 20 17:01:24 crc kubenswrapper[4558]: E0120 17:01:24.940479 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b8911a4f-a706-4956-9028-138c018a92ba" containerName="probe" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.940483 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="b8911a4f-a706-4956-9028-138c018a92ba" containerName="probe" Jan 20 17:01:24 crc kubenswrapper[4558]: E0120 17:01:24.940490 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d9be5f16-16de-4ae6-9007-cca3e80501b9" containerName="dnsmasq-dns" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.940495 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d9be5f16-16de-4ae6-9007-cca3e80501b9" containerName="dnsmasq-dns" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.940614 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="ad708448-38df-4494-bca7-fe394c9b53a7" containerName="swift-recon-cron" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.940626 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="3c6de94f-c605-43d9-97b5-ccf91e49d1fb" containerName="barbican-api" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.940635 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="1d5c2e68-fb9d-4818-a0c2-27db2516e92b" containerName="proxy-httpd" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.940644 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="ad708448-38df-4494-bca7-fe394c9b53a7" containerName="account-reaper" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.940651 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="ad708448-38df-4494-bca7-fe394c9b53a7" containerName="object-replicator" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.940658 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="3c6de94f-c605-43d9-97b5-ccf91e49d1fb" containerName="barbican-api-log" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.940664 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="8074f6f3-f564-42bc-b08b-28ffe75bbbc5" containerName="ceilometer-central-agent" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.940671 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="3735ca3d-3764-4d36-b912-fbf0bfb96dd8" containerName="barbican-api-log" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.940677 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="a140c9a4-bec6-42e6-bc6c-d63566e4f7f6" containerName="placement-api" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.940682 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="ad708448-38df-4494-bca7-fe394c9b53a7" containerName="account-replicator" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.940688 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="69c5c1c6-5c3f-48d4-8d14-c3c7cdcb1915" containerName="barbican-keystone-listener" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.940697 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="c268a2f5-7c67-4935-8f3e-bdd83aeccc95" containerName="placement-log" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.940702 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="b28e6999-784e-4577-88bb-db648f7a3cbc" containerName="glance-log" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.940711 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="43781c23-b22a-4449-8306-67efbe8dd6fc" containerName="keystone-api" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.940717 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="de7f5467-1e83-42f0-86bb-ade85deec8f3" containerName="barbican-worker" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.940725 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="d2ccd35d-37c4-450d-b04c-ac505e35b0e8" containerName="glance-log" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.940732 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="1d5c2e68-fb9d-4818-a0c2-27db2516e92b" containerName="proxy-server" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.940739 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="ad708448-38df-4494-bca7-fe394c9b53a7" containerName="container-updater" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.940745 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="e554db35-2dba-4138-9ca0-bd1371a9c63d" containerName="nova-metadata-metadata" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.940750 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="9ea3b871-4db3-4108-baea-e57a23d9d6c5" containerName="nova-cell0-conductor-conductor" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.940757 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="8074f6f3-f564-42bc-b08b-28ffe75bbbc5" containerName="sg-core" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.940763 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="c268a2f5-7c67-4935-8f3e-bdd83aeccc95" containerName="placement-api" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.940769 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="d9be5f16-16de-4ae6-9007-cca3e80501b9" containerName="dnsmasq-dns" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.940776 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="8074f6f3-f564-42bc-b08b-28ffe75bbbc5" containerName="ceilometer-notification-agent" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.940782 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="8074f6f3-f564-42bc-b08b-28ffe75bbbc5" containerName="proxy-httpd" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.940787 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="ad708448-38df-4494-bca7-fe394c9b53a7" containerName="object-auditor" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.940794 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="ad708448-38df-4494-bca7-fe394c9b53a7" containerName="container-server" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.940800 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="180488ea-6eeb-4078-9b57-351bdfb54f5d" containerName="nova-cell1-conductor-conductor" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.940809 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="10c92abf-cd48-4659-8595-ce9610c0fe2e" containerName="nova-scheduler-scheduler" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.940818 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="ad708448-38df-4494-bca7-fe394c9b53a7" containerName="object-expirer" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.940824 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="d2ccd35d-37c4-450d-b04c-ac505e35b0e8" containerName="glance-httpd" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.940829 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="3b2ffa52-4d19-46f6-aea5-62fc758def73" containerName="kube-state-metrics" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.940838 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="3f868eba-f4e1-4e32-b271-391cf271fe97" containerName="rabbitmq" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.940845 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="ad708448-38df-4494-bca7-fe394c9b53a7" containerName="rsync" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.940852 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="ad708448-38df-4494-bca7-fe394c9b53a7" containerName="object-updater" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.940859 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="3735ca3d-3764-4d36-b912-fbf0bfb96dd8" containerName="barbican-api" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.940867 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="ad708448-38df-4494-bca7-fe394c9b53a7" containerName="object-server" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.940873 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="e554db35-2dba-4138-9ca0-bd1371a9c63d" containerName="nova-metadata-log" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.940878 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="764c7a15-6a1a-470c-9d0b-a63ed418cc09" containerName="cinder-api" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.940884 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="ad708448-38df-4494-bca7-fe394c9b53a7" containerName="account-server" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.940891 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="ad708448-38df-4494-bca7-fe394c9b53a7" containerName="container-replicator" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.940897 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="b8911a4f-a706-4956-9028-138c018a92ba" containerName="cinder-scheduler" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.940903 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="b8911a4f-a706-4956-9028-138c018a92ba" containerName="probe" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.940909 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="7d0b0e33-9ce7-44d9-8b09-6008715fe1a0" containerName="neutron-api" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.940915 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="4969c1af-53c0-435a-bd06-6bd493c81c80" containerName="keystone-api" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.940921 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="b28e6999-784e-4577-88bb-db648f7a3cbc" containerName="glance-httpd" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.940929 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="cae28711-fbe7-40cc-8f31-4b6332ab5378" containerName="nova-api-api" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.940935 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="de7f5467-1e83-42f0-86bb-ade85deec8f3" containerName="barbican-worker-log" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.940941 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="ad708448-38df-4494-bca7-fe394c9b53a7" containerName="account-auditor" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.940948 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="a140c9a4-bec6-42e6-bc6c-d63566e4f7f6" containerName="placement-log" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.940954 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="7776f921-0ae7-4a8c-b444-cf5b4b9a4f65" containerName="barbican-keystone-listener" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.940959 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="bf355276-9e62-474e-bfb1-616dde5b83bc" containerName="rabbitmq" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.940965 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="40234b95-d302-420a-96d7-c56ffe609530" containerName="barbican-worker" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.940972 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="69c5c1c6-5c3f-48d4-8d14-c3c7cdcb1915" containerName="barbican-keystone-listener-log" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.940979 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="cae28711-fbe7-40cc-8f31-4b6332ab5378" containerName="nova-api-log" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.940986 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="7d0b0e33-9ce7-44d9-8b09-6008715fe1a0" containerName="neutron-httpd" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.940993 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="5d3423c4-ff78-4ff7-b42b-b3c93b309d52" containerName="neutron-api" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.940999 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="7776f921-0ae7-4a8c-b444-cf5b4b9a4f65" containerName="barbican-keystone-listener-log" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.941005 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="ad708448-38df-4494-bca7-fe394c9b53a7" containerName="container-auditor" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.941011 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="40234b95-d302-420a-96d7-c56ffe609530" containerName="barbican-worker-log" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.941017 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="764c7a15-6a1a-470c-9d0b-a63ed418cc09" containerName="cinder-api-log" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.941023 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="5d3423c4-ff78-4ff7-b42b-b3c93b309d52" containerName="neutron-httpd" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.941618 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.943771 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"rabbitmq-cell1-server-conf" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.944017 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"rabbitmq-cell1-plugins-conf" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.944138 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"rabbitmq-cell1-default-user" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.944294 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-rabbitmq-cell1-svc" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.944457 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"rabbitmq-cell1-erlang-cookie" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.944572 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"rabbitmq-cell1-config-data" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.944695 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"rabbitmq-cell1-server-dockercfg-w5cnb" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.955098 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/rabbitmq-cell1-server-0"] Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.976425 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"0bfc3458-cc0f-4bea-9794-52c5e81fe055\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.976464 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/0bfc3458-cc0f-4bea-9794-52c5e81fe055-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"0bfc3458-cc0f-4bea-9794-52c5e81fe055\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.976494 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/0bfc3458-cc0f-4bea-9794-52c5e81fe055-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"0bfc3458-cc0f-4bea-9794-52c5e81fe055\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.976541 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/0bfc3458-cc0f-4bea-9794-52c5e81fe055-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"0bfc3458-cc0f-4bea-9794-52c5e81fe055\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.976565 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/0bfc3458-cc0f-4bea-9794-52c5e81fe055-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"0bfc3458-cc0f-4bea-9794-52c5e81fe055\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.976584 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/0bfc3458-cc0f-4bea-9794-52c5e81fe055-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"0bfc3458-cc0f-4bea-9794-52c5e81fe055\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.976610 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/0bfc3458-cc0f-4bea-9794-52c5e81fe055-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"0bfc3458-cc0f-4bea-9794-52c5e81fe055\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.976625 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/0bfc3458-cc0f-4bea-9794-52c5e81fe055-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"0bfc3458-cc0f-4bea-9794-52c5e81fe055\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.976655 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/0bfc3458-cc0f-4bea-9794-52c5e81fe055-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"0bfc3458-cc0f-4bea-9794-52c5e81fe055\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.976674 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/0bfc3458-cc0f-4bea-9794-52c5e81fe055-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"0bfc3458-cc0f-4bea-9794-52c5e81fe055\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:01:24 crc kubenswrapper[4558]: I0120 17:01:24.976693 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m6dlz\" (UniqueName: \"kubernetes.io/projected/0bfc3458-cc0f-4bea-9794-52c5e81fe055-kube-api-access-m6dlz\") pod \"rabbitmq-cell1-server-0\" (UID: \"0bfc3458-cc0f-4bea-9794-52c5e81fe055\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:01:25 crc kubenswrapper[4558]: I0120 17:01:25.077762 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/0bfc3458-cc0f-4bea-9794-52c5e81fe055-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"0bfc3458-cc0f-4bea-9794-52c5e81fe055\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:01:25 crc kubenswrapper[4558]: I0120 17:01:25.077802 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/0bfc3458-cc0f-4bea-9794-52c5e81fe055-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"0bfc3458-cc0f-4bea-9794-52c5e81fe055\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:01:25 crc kubenswrapper[4558]: I0120 17:01:25.077819 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/0bfc3458-cc0f-4bea-9794-52c5e81fe055-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"0bfc3458-cc0f-4bea-9794-52c5e81fe055\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:01:25 crc kubenswrapper[4558]: I0120 17:01:25.077846 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/0bfc3458-cc0f-4bea-9794-52c5e81fe055-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"0bfc3458-cc0f-4bea-9794-52c5e81fe055\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:01:25 crc kubenswrapper[4558]: I0120 17:01:25.077861 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/0bfc3458-cc0f-4bea-9794-52c5e81fe055-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"0bfc3458-cc0f-4bea-9794-52c5e81fe055\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:01:25 crc kubenswrapper[4558]: I0120 17:01:25.077890 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/0bfc3458-cc0f-4bea-9794-52c5e81fe055-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"0bfc3458-cc0f-4bea-9794-52c5e81fe055\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:01:25 crc kubenswrapper[4558]: I0120 17:01:25.077908 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/0bfc3458-cc0f-4bea-9794-52c5e81fe055-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"0bfc3458-cc0f-4bea-9794-52c5e81fe055\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:01:25 crc kubenswrapper[4558]: I0120 17:01:25.077928 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m6dlz\" (UniqueName: \"kubernetes.io/projected/0bfc3458-cc0f-4bea-9794-52c5e81fe055-kube-api-access-m6dlz\") pod \"rabbitmq-cell1-server-0\" (UID: \"0bfc3458-cc0f-4bea-9794-52c5e81fe055\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:01:25 crc kubenswrapper[4558]: I0120 17:01:25.077964 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"0bfc3458-cc0f-4bea-9794-52c5e81fe055\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:01:25 crc kubenswrapper[4558]: I0120 17:01:25.077988 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/0bfc3458-cc0f-4bea-9794-52c5e81fe055-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"0bfc3458-cc0f-4bea-9794-52c5e81fe055\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:01:25 crc kubenswrapper[4558]: I0120 17:01:25.078014 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/0bfc3458-cc0f-4bea-9794-52c5e81fe055-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"0bfc3458-cc0f-4bea-9794-52c5e81fe055\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:01:25 crc kubenswrapper[4558]: I0120 17:01:25.078559 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"0bfc3458-cc0f-4bea-9794-52c5e81fe055\") device mount path \"/mnt/openstack/pv10\"" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:01:25 crc kubenswrapper[4558]: I0120 17:01:25.078906 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/0bfc3458-cc0f-4bea-9794-52c5e81fe055-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"0bfc3458-cc0f-4bea-9794-52c5e81fe055\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:01:25 crc kubenswrapper[4558]: I0120 17:01:25.078912 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/0bfc3458-cc0f-4bea-9794-52c5e81fe055-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"0bfc3458-cc0f-4bea-9794-52c5e81fe055\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:01:25 crc kubenswrapper[4558]: I0120 17:01:25.079192 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/0bfc3458-cc0f-4bea-9794-52c5e81fe055-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"0bfc3458-cc0f-4bea-9794-52c5e81fe055\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:01:25 crc kubenswrapper[4558]: I0120 17:01:25.079368 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/0bfc3458-cc0f-4bea-9794-52c5e81fe055-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"0bfc3458-cc0f-4bea-9794-52c5e81fe055\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:01:25 crc kubenswrapper[4558]: I0120 17:01:25.079511 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/0bfc3458-cc0f-4bea-9794-52c5e81fe055-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"0bfc3458-cc0f-4bea-9794-52c5e81fe055\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:01:25 crc kubenswrapper[4558]: I0120 17:01:25.082259 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/0bfc3458-cc0f-4bea-9794-52c5e81fe055-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"0bfc3458-cc0f-4bea-9794-52c5e81fe055\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:01:25 crc kubenswrapper[4558]: I0120 17:01:25.082756 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/0bfc3458-cc0f-4bea-9794-52c5e81fe055-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"0bfc3458-cc0f-4bea-9794-52c5e81fe055\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:01:25 crc kubenswrapper[4558]: I0120 17:01:25.082985 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/0bfc3458-cc0f-4bea-9794-52c5e81fe055-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"0bfc3458-cc0f-4bea-9794-52c5e81fe055\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:01:25 crc kubenswrapper[4558]: I0120 17:01:25.083665 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/0bfc3458-cc0f-4bea-9794-52c5e81fe055-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"0bfc3458-cc0f-4bea-9794-52c5e81fe055\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:01:25 crc kubenswrapper[4558]: I0120 17:01:25.091033 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m6dlz\" (UniqueName: \"kubernetes.io/projected/0bfc3458-cc0f-4bea-9794-52c5e81fe055-kube-api-access-m6dlz\") pod \"rabbitmq-cell1-server-0\" (UID: \"0bfc3458-cc0f-4bea-9794-52c5e81fe055\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:01:25 crc kubenswrapper[4558]: I0120 17:01:25.093859 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"0bfc3458-cc0f-4bea-9794-52c5e81fe055\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:01:25 crc kubenswrapper[4558]: I0120 17:01:25.262833 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:01:25 crc kubenswrapper[4558]: I0120 17:01:25.611872 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/rabbitmq-cell1-server-0"] Jan 20 17:01:25 crc kubenswrapper[4558]: I0120 17:01:25.880691 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/rabbitmq-server-0"] Jan 20 17:01:25 crc kubenswrapper[4558]: I0120 17:01:25.881801 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:01:25 crc kubenswrapper[4558]: I0120 17:01:25.882964 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"rabbitmq-erlang-cookie" Jan 20 17:01:25 crc kubenswrapper[4558]: I0120 17:01:25.883412 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"rabbitmq-server-dockercfg-fltmk" Jan 20 17:01:25 crc kubenswrapper[4558]: I0120 17:01:25.883674 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"rabbitmq-server-conf" Jan 20 17:01:25 crc kubenswrapper[4558]: I0120 17:01:25.883831 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"rabbitmq-default-user" Jan 20 17:01:25 crc kubenswrapper[4558]: I0120 17:01:25.884329 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"rabbitmq-plugins-conf" Jan 20 17:01:25 crc kubenswrapper[4558]: I0120 17:01:25.884610 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"rabbitmq-config-data" Jan 20 17:01:25 crc kubenswrapper[4558]: I0120 17:01:25.884633 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-rabbitmq-svc" Jan 20 17:01:25 crc kubenswrapper[4558]: I0120 17:01:25.889077 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/rabbitmq-server-0"] Jan 20 17:01:25 crc kubenswrapper[4558]: I0120 17:01:25.988852 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pmzzv\" (UniqueName: \"kubernetes.io/projected/ac55b716-d8fd-4628-8627-f94b5a4e7c78-kube-api-access-pmzzv\") pod \"rabbitmq-server-0\" (UID: \"ac55b716-d8fd-4628-8627-f94b5a4e7c78\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:01:25 crc kubenswrapper[4558]: I0120 17:01:25.988891 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/ac55b716-d8fd-4628-8627-f94b5a4e7c78-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"ac55b716-d8fd-4628-8627-f94b5a4e7c78\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:01:25 crc kubenswrapper[4558]: I0120 17:01:25.988910 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/ac55b716-d8fd-4628-8627-f94b5a4e7c78-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"ac55b716-d8fd-4628-8627-f94b5a4e7c78\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:01:25 crc kubenswrapper[4558]: I0120 17:01:25.988927 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/ac55b716-d8fd-4628-8627-f94b5a4e7c78-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"ac55b716-d8fd-4628-8627-f94b5a4e7c78\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:01:25 crc kubenswrapper[4558]: I0120 17:01:25.988941 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/ac55b716-d8fd-4628-8627-f94b5a4e7c78-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"ac55b716-d8fd-4628-8627-f94b5a4e7c78\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:01:25 crc kubenswrapper[4558]: I0120 17:01:25.988976 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/ac55b716-d8fd-4628-8627-f94b5a4e7c78-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"ac55b716-d8fd-4628-8627-f94b5a4e7c78\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:01:25 crc kubenswrapper[4558]: I0120 17:01:25.988993 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/ac55b716-d8fd-4628-8627-f94b5a4e7c78-pod-info\") pod \"rabbitmq-server-0\" (UID: \"ac55b716-d8fd-4628-8627-f94b5a4e7c78\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:01:25 crc kubenswrapper[4558]: I0120 17:01:25.989020 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-server-0\" (UID: \"ac55b716-d8fd-4628-8627-f94b5a4e7c78\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:01:25 crc kubenswrapper[4558]: I0120 17:01:25.989044 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/ac55b716-d8fd-4628-8627-f94b5a4e7c78-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"ac55b716-d8fd-4628-8627-f94b5a4e7c78\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:01:25 crc kubenswrapper[4558]: I0120 17:01:25.989084 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ac55b716-d8fd-4628-8627-f94b5a4e7c78-config-data\") pod \"rabbitmq-server-0\" (UID: \"ac55b716-d8fd-4628-8627-f94b5a4e7c78\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:01:25 crc kubenswrapper[4558]: I0120 17:01:25.989112 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/ac55b716-d8fd-4628-8627-f94b5a4e7c78-server-conf\") pod \"rabbitmq-server-0\" (UID: \"ac55b716-d8fd-4628-8627-f94b5a4e7c78\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:01:26 crc kubenswrapper[4558]: I0120 17:01:26.090433 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ac55b716-d8fd-4628-8627-f94b5a4e7c78-config-data\") pod \"rabbitmq-server-0\" (UID: \"ac55b716-d8fd-4628-8627-f94b5a4e7c78\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:01:26 crc kubenswrapper[4558]: I0120 17:01:26.090473 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/ac55b716-d8fd-4628-8627-f94b5a4e7c78-server-conf\") pod \"rabbitmq-server-0\" (UID: \"ac55b716-d8fd-4628-8627-f94b5a4e7c78\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:01:26 crc kubenswrapper[4558]: I0120 17:01:26.090522 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pmzzv\" (UniqueName: \"kubernetes.io/projected/ac55b716-d8fd-4628-8627-f94b5a4e7c78-kube-api-access-pmzzv\") pod \"rabbitmq-server-0\" (UID: \"ac55b716-d8fd-4628-8627-f94b5a4e7c78\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:01:26 crc kubenswrapper[4558]: I0120 17:01:26.090543 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/ac55b716-d8fd-4628-8627-f94b5a4e7c78-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"ac55b716-d8fd-4628-8627-f94b5a4e7c78\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:01:26 crc kubenswrapper[4558]: I0120 17:01:26.090564 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/ac55b716-d8fd-4628-8627-f94b5a4e7c78-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"ac55b716-d8fd-4628-8627-f94b5a4e7c78\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:01:26 crc kubenswrapper[4558]: I0120 17:01:26.090578 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/ac55b716-d8fd-4628-8627-f94b5a4e7c78-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"ac55b716-d8fd-4628-8627-f94b5a4e7c78\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:01:26 crc kubenswrapper[4558]: I0120 17:01:26.090591 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/ac55b716-d8fd-4628-8627-f94b5a4e7c78-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"ac55b716-d8fd-4628-8627-f94b5a4e7c78\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:01:26 crc kubenswrapper[4558]: I0120 17:01:26.090615 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/ac55b716-d8fd-4628-8627-f94b5a4e7c78-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"ac55b716-d8fd-4628-8627-f94b5a4e7c78\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:01:26 crc kubenswrapper[4558]: I0120 17:01:26.090629 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/ac55b716-d8fd-4628-8627-f94b5a4e7c78-pod-info\") pod \"rabbitmq-server-0\" (UID: \"ac55b716-d8fd-4628-8627-f94b5a4e7c78\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:01:26 crc kubenswrapper[4558]: I0120 17:01:26.090652 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-server-0\" (UID: \"ac55b716-d8fd-4628-8627-f94b5a4e7c78\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:01:26 crc kubenswrapper[4558]: I0120 17:01:26.090671 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/ac55b716-d8fd-4628-8627-f94b5a4e7c78-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"ac55b716-d8fd-4628-8627-f94b5a4e7c78\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:01:26 crc kubenswrapper[4558]: I0120 17:01:26.091357 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ac55b716-d8fd-4628-8627-f94b5a4e7c78-config-data\") pod \"rabbitmq-server-0\" (UID: \"ac55b716-d8fd-4628-8627-f94b5a4e7c78\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:01:26 crc kubenswrapper[4558]: I0120 17:01:26.091485 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-server-0\" (UID: \"ac55b716-d8fd-4628-8627-f94b5a4e7c78\") device mount path \"/mnt/openstack/pv03\"" pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:01:26 crc kubenswrapper[4558]: I0120 17:01:26.091636 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/ac55b716-d8fd-4628-8627-f94b5a4e7c78-server-conf\") pod \"rabbitmq-server-0\" (UID: \"ac55b716-d8fd-4628-8627-f94b5a4e7c78\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:01:26 crc kubenswrapper[4558]: I0120 17:01:26.091824 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/ac55b716-d8fd-4628-8627-f94b5a4e7c78-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"ac55b716-d8fd-4628-8627-f94b5a4e7c78\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:01:26 crc kubenswrapper[4558]: I0120 17:01:26.094196 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/ac55b716-d8fd-4628-8627-f94b5a4e7c78-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"ac55b716-d8fd-4628-8627-f94b5a4e7c78\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:01:26 crc kubenswrapper[4558]: I0120 17:01:26.094247 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/ac55b716-d8fd-4628-8627-f94b5a4e7c78-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"ac55b716-d8fd-4628-8627-f94b5a4e7c78\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:01:26 crc kubenswrapper[4558]: I0120 17:01:26.097647 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/ac55b716-d8fd-4628-8627-f94b5a4e7c78-pod-info\") pod \"rabbitmq-server-0\" (UID: \"ac55b716-d8fd-4628-8627-f94b5a4e7c78\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:01:26 crc kubenswrapper[4558]: I0120 17:01:26.097710 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/ac55b716-d8fd-4628-8627-f94b5a4e7c78-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"ac55b716-d8fd-4628-8627-f94b5a4e7c78\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:01:26 crc kubenswrapper[4558]: I0120 17:01:26.097902 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/ac55b716-d8fd-4628-8627-f94b5a4e7c78-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"ac55b716-d8fd-4628-8627-f94b5a4e7c78\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:01:26 crc kubenswrapper[4558]: I0120 17:01:26.101371 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/ac55b716-d8fd-4628-8627-f94b5a4e7c78-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"ac55b716-d8fd-4628-8627-f94b5a4e7c78\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:01:26 crc kubenswrapper[4558]: I0120 17:01:26.108810 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pmzzv\" (UniqueName: \"kubernetes.io/projected/ac55b716-d8fd-4628-8627-f94b5a4e7c78-kube-api-access-pmzzv\") pod \"rabbitmq-server-0\" (UID: \"ac55b716-d8fd-4628-8627-f94b5a4e7c78\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:01:26 crc kubenswrapper[4558]: I0120 17:01:26.113024 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-server-0\" (UID: \"ac55b716-d8fd-4628-8627-f94b5a4e7c78\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:01:26 crc kubenswrapper[4558]: I0120 17:01:26.132100 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" event={"ID":"0bfc3458-cc0f-4bea-9794-52c5e81fe055","Type":"ContainerStarted","Data":"3c0f4928b3e51534677c8f6e406cffa9ff0f5b03b6f0a82165ab1b09f880329d"} Jan 20 17:01:26 crc kubenswrapper[4558]: I0120 17:01:26.202869 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:01:26 crc kubenswrapper[4558]: I0120 17:01:26.315795 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/openstack-galera-0"] Jan 20 17:01:26 crc kubenswrapper[4558]: I0120 17:01:26.316895 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:01:26 crc kubenswrapper[4558]: I0120 17:01:26.319983 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-scripts" Jan 20 17:01:26 crc kubenswrapper[4558]: I0120 17:01:26.320155 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"galera-openstack-dockercfg-xxw26" Jan 20 17:01:26 crc kubenswrapper[4558]: I0120 17:01:26.320395 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-galera-openstack-svc" Jan 20 17:01:26 crc kubenswrapper[4558]: I0120 17:01:26.325069 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-config-data" Jan 20 17:01:26 crc kubenswrapper[4558]: I0120 17:01:26.330614 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"combined-ca-bundle" Jan 20 17:01:26 crc kubenswrapper[4558]: I0120 17:01:26.330725 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/openstack-galera-0"] Jan 20 17:01:26 crc kubenswrapper[4558]: I0120 17:01:26.395071 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ad58cc97-cbed-48c8-ab51-ebb920a1454c-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"ad58cc97-cbed-48c8-ab51-ebb920a1454c\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:01:26 crc kubenswrapper[4558]: I0120 17:01:26.395114 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k7ntl\" (UniqueName: \"kubernetes.io/projected/ad58cc97-cbed-48c8-ab51-ebb920a1454c-kube-api-access-k7ntl\") pod \"openstack-galera-0\" (UID: \"ad58cc97-cbed-48c8-ab51-ebb920a1454c\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:01:26 crc kubenswrapper[4558]: I0120 17:01:26.395245 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/ad58cc97-cbed-48c8-ab51-ebb920a1454c-config-data-default\") pod \"openstack-galera-0\" (UID: \"ad58cc97-cbed-48c8-ab51-ebb920a1454c\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:01:26 crc kubenswrapper[4558]: I0120 17:01:26.395291 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ad58cc97-cbed-48c8-ab51-ebb920a1454c-operator-scripts\") pod \"openstack-galera-0\" (UID: \"ad58cc97-cbed-48c8-ab51-ebb920a1454c\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:01:26 crc kubenswrapper[4558]: I0120 17:01:26.395361 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/ad58cc97-cbed-48c8-ab51-ebb920a1454c-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"ad58cc97-cbed-48c8-ab51-ebb920a1454c\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:01:26 crc kubenswrapper[4558]: I0120 17:01:26.395377 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/ad58cc97-cbed-48c8-ab51-ebb920a1454c-config-data-generated\") pod \"openstack-galera-0\" (UID: \"ad58cc97-cbed-48c8-ab51-ebb920a1454c\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:01:26 crc kubenswrapper[4558]: I0120 17:01:26.395430 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/ad58cc97-cbed-48c8-ab51-ebb920a1454c-kolla-config\") pod \"openstack-galera-0\" (UID: \"ad58cc97-cbed-48c8-ab51-ebb920a1454c\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:01:26 crc kubenswrapper[4558]: I0120 17:01:26.395450 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage13-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage13-crc\") pod \"openstack-galera-0\" (UID: \"ad58cc97-cbed-48c8-ab51-ebb920a1454c\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:01:26 crc kubenswrapper[4558]: I0120 17:01:26.496798 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/ad58cc97-cbed-48c8-ab51-ebb920a1454c-kolla-config\") pod \"openstack-galera-0\" (UID: \"ad58cc97-cbed-48c8-ab51-ebb920a1454c\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:01:26 crc kubenswrapper[4558]: I0120 17:01:26.496844 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage13-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage13-crc\") pod \"openstack-galera-0\" (UID: \"ad58cc97-cbed-48c8-ab51-ebb920a1454c\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:01:26 crc kubenswrapper[4558]: I0120 17:01:26.496883 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ad58cc97-cbed-48c8-ab51-ebb920a1454c-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"ad58cc97-cbed-48c8-ab51-ebb920a1454c\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:01:26 crc kubenswrapper[4558]: I0120 17:01:26.496900 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k7ntl\" (UniqueName: \"kubernetes.io/projected/ad58cc97-cbed-48c8-ab51-ebb920a1454c-kube-api-access-k7ntl\") pod \"openstack-galera-0\" (UID: \"ad58cc97-cbed-48c8-ab51-ebb920a1454c\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:01:26 crc kubenswrapper[4558]: I0120 17:01:26.496927 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/ad58cc97-cbed-48c8-ab51-ebb920a1454c-config-data-default\") pod \"openstack-galera-0\" (UID: \"ad58cc97-cbed-48c8-ab51-ebb920a1454c\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:01:26 crc kubenswrapper[4558]: I0120 17:01:26.496950 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ad58cc97-cbed-48c8-ab51-ebb920a1454c-operator-scripts\") pod \"openstack-galera-0\" (UID: \"ad58cc97-cbed-48c8-ab51-ebb920a1454c\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:01:26 crc kubenswrapper[4558]: I0120 17:01:26.496984 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/ad58cc97-cbed-48c8-ab51-ebb920a1454c-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"ad58cc97-cbed-48c8-ab51-ebb920a1454c\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:01:26 crc kubenswrapper[4558]: I0120 17:01:26.496998 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/ad58cc97-cbed-48c8-ab51-ebb920a1454c-config-data-generated\") pod \"openstack-galera-0\" (UID: \"ad58cc97-cbed-48c8-ab51-ebb920a1454c\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:01:26 crc kubenswrapper[4558]: I0120 17:01:26.497318 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage13-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage13-crc\") pod \"openstack-galera-0\" (UID: \"ad58cc97-cbed-48c8-ab51-ebb920a1454c\") device mount path \"/mnt/openstack/pv13\"" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:01:26 crc kubenswrapper[4558]: I0120 17:01:26.497541 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/ad58cc97-cbed-48c8-ab51-ebb920a1454c-kolla-config\") pod \"openstack-galera-0\" (UID: \"ad58cc97-cbed-48c8-ab51-ebb920a1454c\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:01:26 crc kubenswrapper[4558]: I0120 17:01:26.497876 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/ad58cc97-cbed-48c8-ab51-ebb920a1454c-config-data-default\") pod \"openstack-galera-0\" (UID: \"ad58cc97-cbed-48c8-ab51-ebb920a1454c\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:01:26 crc kubenswrapper[4558]: I0120 17:01:26.498086 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/ad58cc97-cbed-48c8-ab51-ebb920a1454c-config-data-generated\") pod \"openstack-galera-0\" (UID: \"ad58cc97-cbed-48c8-ab51-ebb920a1454c\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:01:26 crc kubenswrapper[4558]: I0120 17:01:26.498463 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ad58cc97-cbed-48c8-ab51-ebb920a1454c-operator-scripts\") pod \"openstack-galera-0\" (UID: \"ad58cc97-cbed-48c8-ab51-ebb920a1454c\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:01:26 crc kubenswrapper[4558]: I0120 17:01:26.502688 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ad58cc97-cbed-48c8-ab51-ebb920a1454c-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"ad58cc97-cbed-48c8-ab51-ebb920a1454c\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:01:26 crc kubenswrapper[4558]: I0120 17:01:26.505654 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/ad58cc97-cbed-48c8-ab51-ebb920a1454c-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"ad58cc97-cbed-48c8-ab51-ebb920a1454c\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:01:26 crc kubenswrapper[4558]: I0120 17:01:26.512426 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage13-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage13-crc\") pod \"openstack-galera-0\" (UID: \"ad58cc97-cbed-48c8-ab51-ebb920a1454c\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:01:26 crc kubenswrapper[4558]: I0120 17:01:26.514001 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k7ntl\" (UniqueName: \"kubernetes.io/projected/ad58cc97-cbed-48c8-ab51-ebb920a1454c-kube-api-access-k7ntl\") pod \"openstack-galera-0\" (UID: \"ad58cc97-cbed-48c8-ab51-ebb920a1454c\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:01:26 crc kubenswrapper[4558]: I0120 17:01:26.609603 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/rabbitmq-server-0"] Jan 20 17:01:26 crc kubenswrapper[4558]: W0120 17:01:26.614613 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podac55b716_d8fd_4628_8627_f94b5a4e7c78.slice/crio-95121bc78a0c3e8ae6bd3053e586139acb7bcbd2005925044af78716bc5d3507 WatchSource:0}: Error finding container 95121bc78a0c3e8ae6bd3053e586139acb7bcbd2005925044af78716bc5d3507: Status 404 returned error can't find the container with id 95121bc78a0c3e8ae6bd3053e586139acb7bcbd2005925044af78716bc5d3507 Jan 20 17:01:26 crc kubenswrapper[4558]: I0120 17:01:26.642135 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:01:26 crc kubenswrapper[4558]: I0120 17:01:26.994932 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/openstack-galera-0"] Jan 20 17:01:27 crc kubenswrapper[4558]: W0120 17:01:27.095765 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podad58cc97_cbed_48c8_ab51_ebb920a1454c.slice/crio-0f5b5bb20c4f4a6ee245c0c6e38b3e7165ab520d961a3c86674c0add57eb1395 WatchSource:0}: Error finding container 0f5b5bb20c4f4a6ee245c0c6e38b3e7165ab520d961a3c86674c0add57eb1395: Status 404 returned error can't find the container with id 0f5b5bb20c4f4a6ee245c0c6e38b3e7165ab520d961a3c86674c0add57eb1395 Jan 20 17:01:27 crc kubenswrapper[4558]: I0120 17:01:27.150567 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-galera-0" event={"ID":"ad58cc97-cbed-48c8-ab51-ebb920a1454c","Type":"ContainerStarted","Data":"0f5b5bb20c4f4a6ee245c0c6e38b3e7165ab520d961a3c86674c0add57eb1395"} Jan 20 17:01:27 crc kubenswrapper[4558]: I0120 17:01:27.170277 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-server-0" event={"ID":"ac55b716-d8fd-4628-8627-f94b5a4e7c78","Type":"ContainerStarted","Data":"95121bc78a0c3e8ae6bd3053e586139acb7bcbd2005925044af78716bc5d3507"} Jan 20 17:01:27 crc kubenswrapper[4558]: I0120 17:01:27.177377 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" event={"ID":"0bfc3458-cc0f-4bea-9794-52c5e81fe055","Type":"ContainerStarted","Data":"211ea3122594cd690108a40fb561073e8fbaa0a0b8f082460cd412375c3bbcfa"} Jan 20 17:01:27 crc kubenswrapper[4558]: I0120 17:01:27.645851 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/openstack-cell1-galera-0"] Jan 20 17:01:27 crc kubenswrapper[4558]: I0120 17:01:27.646990 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:01:27 crc kubenswrapper[4558]: I0120 17:01:27.648425 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-cell1-config-data" Jan 20 17:01:27 crc kubenswrapper[4558]: I0120 17:01:27.648792 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-galera-openstack-cell1-svc" Jan 20 17:01:27 crc kubenswrapper[4558]: I0120 17:01:27.648980 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"galera-openstack-cell1-dockercfg-mxc4s" Jan 20 17:01:27 crc kubenswrapper[4558]: I0120 17:01:27.649427 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-cell1-scripts" Jan 20 17:01:27 crc kubenswrapper[4558]: I0120 17:01:27.656908 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/openstack-cell1-galera-0"] Jan 20 17:01:27 crc kubenswrapper[4558]: I0120 17:01:27.812038 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/6df6ca0e-78e9-4248-8cbe-b9934e0ad090-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"6df6ca0e-78e9-4248-8cbe-b9934e0ad090\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:01:27 crc kubenswrapper[4558]: I0120 17:01:27.812088 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/6df6ca0e-78e9-4248-8cbe-b9934e0ad090-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"6df6ca0e-78e9-4248-8cbe-b9934e0ad090\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:01:27 crc kubenswrapper[4558]: I0120 17:01:27.812108 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6df6ca0e-78e9-4248-8cbe-b9934e0ad090-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"6df6ca0e-78e9-4248-8cbe-b9934e0ad090\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:01:27 crc kubenswrapper[4558]: I0120 17:01:27.812191 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/6df6ca0e-78e9-4248-8cbe-b9934e0ad090-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"6df6ca0e-78e9-4248-8cbe-b9934e0ad090\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:01:27 crc kubenswrapper[4558]: I0120 17:01:27.812211 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6df6ca0e-78e9-4248-8cbe-b9934e0ad090-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"6df6ca0e-78e9-4248-8cbe-b9934e0ad090\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:01:27 crc kubenswrapper[4558]: I0120 17:01:27.812242 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bbllh\" (UniqueName: \"kubernetes.io/projected/6df6ca0e-78e9-4248-8cbe-b9934e0ad090-kube-api-access-bbllh\") pod \"openstack-cell1-galera-0\" (UID: \"6df6ca0e-78e9-4248-8cbe-b9934e0ad090\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:01:27 crc kubenswrapper[4558]: I0120 17:01:27.812281 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage17-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage17-crc\") pod \"openstack-cell1-galera-0\" (UID: \"6df6ca0e-78e9-4248-8cbe-b9934e0ad090\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:01:27 crc kubenswrapper[4558]: I0120 17:01:27.812309 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/6df6ca0e-78e9-4248-8cbe-b9934e0ad090-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"6df6ca0e-78e9-4248-8cbe-b9934e0ad090\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:01:27 crc kubenswrapper[4558]: I0120 17:01:27.913373 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage17-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage17-crc\") pod \"openstack-cell1-galera-0\" (UID: \"6df6ca0e-78e9-4248-8cbe-b9934e0ad090\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:01:27 crc kubenswrapper[4558]: I0120 17:01:27.913422 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/6df6ca0e-78e9-4248-8cbe-b9934e0ad090-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"6df6ca0e-78e9-4248-8cbe-b9934e0ad090\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:01:27 crc kubenswrapper[4558]: I0120 17:01:27.913528 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/6df6ca0e-78e9-4248-8cbe-b9934e0ad090-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"6df6ca0e-78e9-4248-8cbe-b9934e0ad090\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:01:27 crc kubenswrapper[4558]: I0120 17:01:27.913551 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/6df6ca0e-78e9-4248-8cbe-b9934e0ad090-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"6df6ca0e-78e9-4248-8cbe-b9934e0ad090\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:01:27 crc kubenswrapper[4558]: I0120 17:01:27.913568 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6df6ca0e-78e9-4248-8cbe-b9934e0ad090-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"6df6ca0e-78e9-4248-8cbe-b9934e0ad090\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:01:27 crc kubenswrapper[4558]: I0120 17:01:27.913597 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/6df6ca0e-78e9-4248-8cbe-b9934e0ad090-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"6df6ca0e-78e9-4248-8cbe-b9934e0ad090\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:01:27 crc kubenswrapper[4558]: I0120 17:01:27.913610 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6df6ca0e-78e9-4248-8cbe-b9934e0ad090-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"6df6ca0e-78e9-4248-8cbe-b9934e0ad090\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:01:27 crc kubenswrapper[4558]: I0120 17:01:27.913629 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bbllh\" (UniqueName: \"kubernetes.io/projected/6df6ca0e-78e9-4248-8cbe-b9934e0ad090-kube-api-access-bbllh\") pod \"openstack-cell1-galera-0\" (UID: \"6df6ca0e-78e9-4248-8cbe-b9934e0ad090\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:01:27 crc kubenswrapper[4558]: I0120 17:01:27.913729 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage17-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage17-crc\") pod \"openstack-cell1-galera-0\" (UID: \"6df6ca0e-78e9-4248-8cbe-b9934e0ad090\") device mount path \"/mnt/openstack/pv17\"" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:01:27 crc kubenswrapper[4558]: I0120 17:01:27.914364 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/6df6ca0e-78e9-4248-8cbe-b9934e0ad090-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"6df6ca0e-78e9-4248-8cbe-b9934e0ad090\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:01:27 crc kubenswrapper[4558]: I0120 17:01:27.914511 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/6df6ca0e-78e9-4248-8cbe-b9934e0ad090-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"6df6ca0e-78e9-4248-8cbe-b9934e0ad090\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:01:27 crc kubenswrapper[4558]: I0120 17:01:27.914802 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/6df6ca0e-78e9-4248-8cbe-b9934e0ad090-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"6df6ca0e-78e9-4248-8cbe-b9934e0ad090\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:01:27 crc kubenswrapper[4558]: I0120 17:01:27.915326 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6df6ca0e-78e9-4248-8cbe-b9934e0ad090-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"6df6ca0e-78e9-4248-8cbe-b9934e0ad090\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:01:27 crc kubenswrapper[4558]: I0120 17:01:27.917660 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/6df6ca0e-78e9-4248-8cbe-b9934e0ad090-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"6df6ca0e-78e9-4248-8cbe-b9934e0ad090\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:01:27 crc kubenswrapper[4558]: I0120 17:01:27.918139 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6df6ca0e-78e9-4248-8cbe-b9934e0ad090-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"6df6ca0e-78e9-4248-8cbe-b9934e0ad090\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:01:27 crc kubenswrapper[4558]: I0120 17:01:27.927874 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bbllh\" (UniqueName: \"kubernetes.io/projected/6df6ca0e-78e9-4248-8cbe-b9934e0ad090-kube-api-access-bbllh\") pod \"openstack-cell1-galera-0\" (UID: \"6df6ca0e-78e9-4248-8cbe-b9934e0ad090\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:01:27 crc kubenswrapper[4558]: I0120 17:01:27.928082 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage17-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage17-crc\") pod \"openstack-cell1-galera-0\" (UID: \"6df6ca0e-78e9-4248-8cbe-b9934e0ad090\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:01:27 crc kubenswrapper[4558]: I0120 17:01:27.961612 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:01:28 crc kubenswrapper[4558]: I0120 17:01:28.184597 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-galera-0" event={"ID":"ad58cc97-cbed-48c8-ab51-ebb920a1454c","Type":"ContainerStarted","Data":"6b30ba291754c6b147355d2af5037f3a797be5fec9ed22ddfa4282645c1aabd8"} Jan 20 17:01:28 crc kubenswrapper[4558]: I0120 17:01:28.187320 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-server-0" event={"ID":"ac55b716-d8fd-4628-8627-f94b5a4e7c78","Type":"ContainerStarted","Data":"763e019dc0645475db7f3291e11eaa6439d09116fc1541ec554386e84ad53b91"} Jan 20 17:01:28 crc kubenswrapper[4558]: I0120 17:01:28.327516 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/openstack-cell1-galera-0"] Jan 20 17:01:29 crc kubenswrapper[4558]: I0120 17:01:29.017550 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/memcached-0"] Jan 20 17:01:29 crc kubenswrapper[4558]: I0120 17:01:29.018511 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/memcached-0" Jan 20 17:01:29 crc kubenswrapper[4558]: I0120 17:01:29.020911 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"memcached-config-data" Jan 20 17:01:29 crc kubenswrapper[4558]: I0120 17:01:29.020924 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"memcached-memcached-dockercfg-4ll6b" Jan 20 17:01:29 crc kubenswrapper[4558]: I0120 17:01:29.021218 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-memcached-svc" Jan 20 17:01:29 crc kubenswrapper[4558]: I0120 17:01:29.026234 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/memcached-0"] Jan 20 17:01:29 crc kubenswrapper[4558]: I0120 17:01:29.130587 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/aed5856e-9412-48a1-ac94-cc2f4bd05633-config-data\") pod \"memcached-0\" (UID: \"aed5856e-9412-48a1-ac94-cc2f4bd05633\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:01:29 crc kubenswrapper[4558]: I0120 17:01:29.130648 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jfg2s\" (UniqueName: \"kubernetes.io/projected/aed5856e-9412-48a1-ac94-cc2f4bd05633-kube-api-access-jfg2s\") pod \"memcached-0\" (UID: \"aed5856e-9412-48a1-ac94-cc2f4bd05633\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:01:29 crc kubenswrapper[4558]: I0120 17:01:29.130674 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aed5856e-9412-48a1-ac94-cc2f4bd05633-combined-ca-bundle\") pod \"memcached-0\" (UID: \"aed5856e-9412-48a1-ac94-cc2f4bd05633\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:01:29 crc kubenswrapper[4558]: I0120 17:01:29.130883 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/aed5856e-9412-48a1-ac94-cc2f4bd05633-memcached-tls-certs\") pod \"memcached-0\" (UID: \"aed5856e-9412-48a1-ac94-cc2f4bd05633\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:01:29 crc kubenswrapper[4558]: I0120 17:01:29.130965 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/aed5856e-9412-48a1-ac94-cc2f4bd05633-kolla-config\") pod \"memcached-0\" (UID: \"aed5856e-9412-48a1-ac94-cc2f4bd05633\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:01:29 crc kubenswrapper[4558]: I0120 17:01:29.193218 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-cell1-galera-0" event={"ID":"6df6ca0e-78e9-4248-8cbe-b9934e0ad090","Type":"ContainerStarted","Data":"e2b2159ece9fe35f6232ad479a5441bc80baee8db96eab6040c5486631ebcd3b"} Jan 20 17:01:29 crc kubenswrapper[4558]: I0120 17:01:29.193289 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-cell1-galera-0" event={"ID":"6df6ca0e-78e9-4248-8cbe-b9934e0ad090","Type":"ContainerStarted","Data":"bb9a7be9d730d7a53cc3193f2bf9b7844d3c2fc3d1f33cd0fe0c6034fbf0a2b0"} Jan 20 17:01:29 crc kubenswrapper[4558]: I0120 17:01:29.232241 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aed5856e-9412-48a1-ac94-cc2f4bd05633-combined-ca-bundle\") pod \"memcached-0\" (UID: \"aed5856e-9412-48a1-ac94-cc2f4bd05633\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:01:29 crc kubenswrapper[4558]: I0120 17:01:29.232391 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/aed5856e-9412-48a1-ac94-cc2f4bd05633-memcached-tls-certs\") pod \"memcached-0\" (UID: \"aed5856e-9412-48a1-ac94-cc2f4bd05633\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:01:29 crc kubenswrapper[4558]: I0120 17:01:29.232426 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/aed5856e-9412-48a1-ac94-cc2f4bd05633-kolla-config\") pod \"memcached-0\" (UID: \"aed5856e-9412-48a1-ac94-cc2f4bd05633\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:01:29 crc kubenswrapper[4558]: I0120 17:01:29.232520 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/aed5856e-9412-48a1-ac94-cc2f4bd05633-config-data\") pod \"memcached-0\" (UID: \"aed5856e-9412-48a1-ac94-cc2f4bd05633\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:01:29 crc kubenswrapper[4558]: I0120 17:01:29.232558 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jfg2s\" (UniqueName: \"kubernetes.io/projected/aed5856e-9412-48a1-ac94-cc2f4bd05633-kube-api-access-jfg2s\") pod \"memcached-0\" (UID: \"aed5856e-9412-48a1-ac94-cc2f4bd05633\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:01:29 crc kubenswrapper[4558]: I0120 17:01:29.233304 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/aed5856e-9412-48a1-ac94-cc2f4bd05633-kolla-config\") pod \"memcached-0\" (UID: \"aed5856e-9412-48a1-ac94-cc2f4bd05633\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:01:29 crc kubenswrapper[4558]: I0120 17:01:29.233381 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/aed5856e-9412-48a1-ac94-cc2f4bd05633-config-data\") pod \"memcached-0\" (UID: \"aed5856e-9412-48a1-ac94-cc2f4bd05633\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:01:29 crc kubenswrapper[4558]: I0120 17:01:29.235821 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/aed5856e-9412-48a1-ac94-cc2f4bd05633-memcached-tls-certs\") pod \"memcached-0\" (UID: \"aed5856e-9412-48a1-ac94-cc2f4bd05633\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:01:29 crc kubenswrapper[4558]: I0120 17:01:29.236600 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aed5856e-9412-48a1-ac94-cc2f4bd05633-combined-ca-bundle\") pod \"memcached-0\" (UID: \"aed5856e-9412-48a1-ac94-cc2f4bd05633\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:01:29 crc kubenswrapper[4558]: I0120 17:01:29.247628 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jfg2s\" (UniqueName: \"kubernetes.io/projected/aed5856e-9412-48a1-ac94-cc2f4bd05633-kube-api-access-jfg2s\") pod \"memcached-0\" (UID: \"aed5856e-9412-48a1-ac94-cc2f4bd05633\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:01:29 crc kubenswrapper[4558]: I0120 17:01:29.333353 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/memcached-0" Jan 20 17:01:29 crc kubenswrapper[4558]: I0120 17:01:29.692905 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/memcached-0"] Jan 20 17:01:30 crc kubenswrapper[4558]: I0120 17:01:30.203099 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/memcached-0" event={"ID":"aed5856e-9412-48a1-ac94-cc2f4bd05633","Type":"ContainerStarted","Data":"8029c61f17dcdbc8fe0413af1d186a7d097951d5e9f7472abca626983580e5f9"} Jan 20 17:01:30 crc kubenswrapper[4558]: I0120 17:01:30.203630 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/memcached-0" event={"ID":"aed5856e-9412-48a1-ac94-cc2f4bd05633","Type":"ContainerStarted","Data":"0f730c94c3c1e790432bf0de8379ee4742303bb73bc2fffb2de4fb18a9670da2"} Jan 20 17:01:30 crc kubenswrapper[4558]: I0120 17:01:30.203719 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/memcached-0" Jan 20 17:01:30 crc kubenswrapper[4558]: I0120 17:01:30.205780 4558 generic.go:334] "Generic (PLEG): container finished" podID="ad58cc97-cbed-48c8-ab51-ebb920a1454c" containerID="6b30ba291754c6b147355d2af5037f3a797be5fec9ed22ddfa4282645c1aabd8" exitCode=0 Jan 20 17:01:30 crc kubenswrapper[4558]: I0120 17:01:30.205867 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-galera-0" event={"ID":"ad58cc97-cbed-48c8-ab51-ebb920a1454c","Type":"ContainerDied","Data":"6b30ba291754c6b147355d2af5037f3a797be5fec9ed22ddfa4282645c1aabd8"} Jan 20 17:01:30 crc kubenswrapper[4558]: I0120 17:01:30.225383 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/memcached-0" podStartSLOduration=1.225368333 podStartE2EDuration="1.225368333s" podCreationTimestamp="2026-01-20 17:01:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:01:30.220176689 +0000 UTC m=+1183.980514655" watchObservedRunningTime="2026-01-20 17:01:30.225368333 +0000 UTC m=+1183.985706299" Jan 20 17:01:30 crc kubenswrapper[4558]: I0120 17:01:30.604604 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 17:01:30 crc kubenswrapper[4558]: I0120 17:01:30.605835 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:01:30 crc kubenswrapper[4558]: I0120 17:01:30.607376 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"telemetry-ceilometer-dockercfg-ct76g" Jan 20 17:01:30 crc kubenswrapper[4558]: I0120 17:01:30.617761 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 17:01:30 crc kubenswrapper[4558]: I0120 17:01:30.651864 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hf7p8\" (UniqueName: \"kubernetes.io/projected/55c237c8-936c-45d1-b06c-374da878c2b7-kube-api-access-hf7p8\") pod \"kube-state-metrics-0\" (UID: \"55c237c8-936c-45d1-b06c-374da878c2b7\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:01:30 crc kubenswrapper[4558]: I0120 17:01:30.752880 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hf7p8\" (UniqueName: \"kubernetes.io/projected/55c237c8-936c-45d1-b06c-374da878c2b7-kube-api-access-hf7p8\") pod \"kube-state-metrics-0\" (UID: \"55c237c8-936c-45d1-b06c-374da878c2b7\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:01:30 crc kubenswrapper[4558]: I0120 17:01:30.769621 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hf7p8\" (UniqueName: \"kubernetes.io/projected/55c237c8-936c-45d1-b06c-374da878c2b7-kube-api-access-hf7p8\") pod \"kube-state-metrics-0\" (UID: \"55c237c8-936c-45d1-b06c-374da878c2b7\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:01:30 crc kubenswrapper[4558]: I0120 17:01:30.924705 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:01:31 crc kubenswrapper[4558]: I0120 17:01:31.214739 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-galera-0" event={"ID":"ad58cc97-cbed-48c8-ab51-ebb920a1454c","Type":"ContainerStarted","Data":"5f0d33357c659b175318f67f48f0766deb32c17bae3e330e4f6f14c73b8bc529"} Jan 20 17:01:31 crc kubenswrapper[4558]: I0120 17:01:31.216826 4558 generic.go:334] "Generic (PLEG): container finished" podID="6df6ca0e-78e9-4248-8cbe-b9934e0ad090" containerID="e2b2159ece9fe35f6232ad479a5441bc80baee8db96eab6040c5486631ebcd3b" exitCode=0 Jan 20 17:01:31 crc kubenswrapper[4558]: I0120 17:01:31.216914 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-cell1-galera-0" event={"ID":"6df6ca0e-78e9-4248-8cbe-b9934e0ad090","Type":"ContainerDied","Data":"e2b2159ece9fe35f6232ad479a5441bc80baee8db96eab6040c5486631ebcd3b"} Jan 20 17:01:31 crc kubenswrapper[4558]: I0120 17:01:31.233493 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/openstack-galera-0" podStartSLOduration=5.233474455 podStartE2EDuration="5.233474455s" podCreationTimestamp="2026-01-20 17:01:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:01:31.228796717 +0000 UTC m=+1184.989134684" watchObservedRunningTime="2026-01-20 17:01:31.233474455 +0000 UTC m=+1184.993812422" Jan 20 17:01:31 crc kubenswrapper[4558]: I0120 17:01:31.321205 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 17:01:31 crc kubenswrapper[4558]: W0120 17:01:31.321559 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod55c237c8_936c_45d1_b06c_374da878c2b7.slice/crio-87af8981e510ec9fdbcbf38749659350d0e429a3b5aff714261014a5b42e5314 WatchSource:0}: Error finding container 87af8981e510ec9fdbcbf38749659350d0e429a3b5aff714261014a5b42e5314: Status 404 returned error can't find the container with id 87af8981e510ec9fdbcbf38749659350d0e429a3b5aff714261014a5b42e5314 Jan 20 17:01:31 crc kubenswrapper[4558]: I0120 17:01:31.325576 4558 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 20 17:01:32 crc kubenswrapper[4558]: I0120 17:01:32.224426 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/kube-state-metrics-0" event={"ID":"55c237c8-936c-45d1-b06c-374da878c2b7","Type":"ContainerStarted","Data":"40a664774128414719591bb05b093341822cb024885c28f9e0a4e07573f067a1"} Jan 20 17:01:32 crc kubenswrapper[4558]: I0120 17:01:32.224675 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/kube-state-metrics-0" event={"ID":"55c237c8-936c-45d1-b06c-374da878c2b7","Type":"ContainerStarted","Data":"87af8981e510ec9fdbcbf38749659350d0e429a3b5aff714261014a5b42e5314"} Jan 20 17:01:32 crc kubenswrapper[4558]: I0120 17:01:32.224691 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:01:32 crc kubenswrapper[4558]: I0120 17:01:32.226765 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-cell1-galera-0" event={"ID":"6df6ca0e-78e9-4248-8cbe-b9934e0ad090","Type":"ContainerStarted","Data":"3ce6116ef9c85330a51ece86b35504a1ac4b48ba623b6c363ad2b015e33fec35"} Jan 20 17:01:32 crc kubenswrapper[4558]: I0120 17:01:32.237391 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/kube-state-metrics-0" podStartSLOduration=1.936613086 podStartE2EDuration="2.237379405s" podCreationTimestamp="2026-01-20 17:01:30 +0000 UTC" firstStartedPulling="2026-01-20 17:01:31.325359706 +0000 UTC m=+1185.085697673" lastFinishedPulling="2026-01-20 17:01:31.626126025 +0000 UTC m=+1185.386463992" observedRunningTime="2026-01-20 17:01:32.232838245 +0000 UTC m=+1185.993176212" watchObservedRunningTime="2026-01-20 17:01:32.237379405 +0000 UTC m=+1185.997717372" Jan 20 17:01:32 crc kubenswrapper[4558]: I0120 17:01:32.249847 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/openstack-cell1-galera-0" podStartSLOduration=5.249835871 podStartE2EDuration="5.249835871s" podCreationTimestamp="2026-01-20 17:01:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:01:32.246844845 +0000 UTC m=+1186.007182813" watchObservedRunningTime="2026-01-20 17:01:32.249835871 +0000 UTC m=+1186.010173838" Jan 20 17:01:34 crc kubenswrapper[4558]: I0120 17:01:34.219834 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ovsdbserver-nb-0"] Jan 20 17:01:34 crc kubenswrapper[4558]: I0120 17:01:34.220794 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:01:34 crc kubenswrapper[4558]: I0120 17:01:34.223803 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-ovndbcluster-nb-ovndbs" Jan 20 17:01:34 crc kubenswrapper[4558]: I0120 17:01:34.223853 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"ovndbcluster-nb-config" Jan 20 17:01:34 crc kubenswrapper[4558]: I0120 17:01:34.223893 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-ovn-metrics" Jan 20 17:01:34 crc kubenswrapper[4558]: I0120 17:01:34.224255 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"ovndbcluster-nb-scripts" Jan 20 17:01:34 crc kubenswrapper[4558]: I0120 17:01:34.225104 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ovncluster-ovndbcluster-nb-dockercfg-ld444" Jan 20 17:01:34 crc kubenswrapper[4558]: I0120 17:01:34.268199 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-nb-0"] Jan 20 17:01:34 crc kubenswrapper[4558]: I0120 17:01:34.308016 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2290f4e8-39b3-49d2-8ef6-8fa94c9e3cdf-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"2290f4e8-39b3-49d2-8ef6-8fa94c9e3cdf\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:01:34 crc kubenswrapper[4558]: I0120 17:01:34.308058 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/2290f4e8-39b3-49d2-8ef6-8fa94c9e3cdf-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"2290f4e8-39b3-49d2-8ef6-8fa94c9e3cdf\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:01:34 crc kubenswrapper[4558]: I0120 17:01:34.308079 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2290f4e8-39b3-49d2-8ef6-8fa94c9e3cdf-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"2290f4e8-39b3-49d2-8ef6-8fa94c9e3cdf\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:01:34 crc kubenswrapper[4558]: I0120 17:01:34.308135 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/2290f4e8-39b3-49d2-8ef6-8fa94c9e3cdf-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"2290f4e8-39b3-49d2-8ef6-8fa94c9e3cdf\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:01:34 crc kubenswrapper[4558]: I0120 17:01:34.308179 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2w5d8\" (UniqueName: \"kubernetes.io/projected/2290f4e8-39b3-49d2-8ef6-8fa94c9e3cdf-kube-api-access-2w5d8\") pod \"ovsdbserver-nb-0\" (UID: \"2290f4e8-39b3-49d2-8ef6-8fa94c9e3cdf\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:01:34 crc kubenswrapper[4558]: I0120 17:01:34.308249 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/2290f4e8-39b3-49d2-8ef6-8fa94c9e3cdf-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"2290f4e8-39b3-49d2-8ef6-8fa94c9e3cdf\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:01:34 crc kubenswrapper[4558]: I0120 17:01:34.308271 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2290f4e8-39b3-49d2-8ef6-8fa94c9e3cdf-config\") pod \"ovsdbserver-nb-0\" (UID: \"2290f4e8-39b3-49d2-8ef6-8fa94c9e3cdf\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:01:34 crc kubenswrapper[4558]: I0120 17:01:34.308305 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"ovsdbserver-nb-0\" (UID: \"2290f4e8-39b3-49d2-8ef6-8fa94c9e3cdf\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:01:34 crc kubenswrapper[4558]: I0120 17:01:34.334884 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/memcached-0" Jan 20 17:01:34 crc kubenswrapper[4558]: I0120 17:01:34.409683 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2290f4e8-39b3-49d2-8ef6-8fa94c9e3cdf-config\") pod \"ovsdbserver-nb-0\" (UID: \"2290f4e8-39b3-49d2-8ef6-8fa94c9e3cdf\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:01:34 crc kubenswrapper[4558]: I0120 17:01:34.409744 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"ovsdbserver-nb-0\" (UID: \"2290f4e8-39b3-49d2-8ef6-8fa94c9e3cdf\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:01:34 crc kubenswrapper[4558]: I0120 17:01:34.409796 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2290f4e8-39b3-49d2-8ef6-8fa94c9e3cdf-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"2290f4e8-39b3-49d2-8ef6-8fa94c9e3cdf\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:01:34 crc kubenswrapper[4558]: I0120 17:01:34.409823 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/2290f4e8-39b3-49d2-8ef6-8fa94c9e3cdf-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"2290f4e8-39b3-49d2-8ef6-8fa94c9e3cdf\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:01:34 crc kubenswrapper[4558]: I0120 17:01:34.409860 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2290f4e8-39b3-49d2-8ef6-8fa94c9e3cdf-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"2290f4e8-39b3-49d2-8ef6-8fa94c9e3cdf\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:01:34 crc kubenswrapper[4558]: I0120 17:01:34.409890 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/2290f4e8-39b3-49d2-8ef6-8fa94c9e3cdf-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"2290f4e8-39b3-49d2-8ef6-8fa94c9e3cdf\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:01:34 crc kubenswrapper[4558]: I0120 17:01:34.409909 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2w5d8\" (UniqueName: \"kubernetes.io/projected/2290f4e8-39b3-49d2-8ef6-8fa94c9e3cdf-kube-api-access-2w5d8\") pod \"ovsdbserver-nb-0\" (UID: \"2290f4e8-39b3-49d2-8ef6-8fa94c9e3cdf\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:01:34 crc kubenswrapper[4558]: I0120 17:01:34.409968 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/2290f4e8-39b3-49d2-8ef6-8fa94c9e3cdf-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"2290f4e8-39b3-49d2-8ef6-8fa94c9e3cdf\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:01:34 crc kubenswrapper[4558]: I0120 17:01:34.410080 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"ovsdbserver-nb-0\" (UID: \"2290f4e8-39b3-49d2-8ef6-8fa94c9e3cdf\") device mount path \"/mnt/openstack/pv08\"" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:01:34 crc kubenswrapper[4558]: I0120 17:01:34.410398 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/2290f4e8-39b3-49d2-8ef6-8fa94c9e3cdf-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"2290f4e8-39b3-49d2-8ef6-8fa94c9e3cdf\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:01:34 crc kubenswrapper[4558]: I0120 17:01:34.410626 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2290f4e8-39b3-49d2-8ef6-8fa94c9e3cdf-config\") pod \"ovsdbserver-nb-0\" (UID: \"2290f4e8-39b3-49d2-8ef6-8fa94c9e3cdf\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:01:34 crc kubenswrapper[4558]: I0120 17:01:34.410890 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2290f4e8-39b3-49d2-8ef6-8fa94c9e3cdf-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"2290f4e8-39b3-49d2-8ef6-8fa94c9e3cdf\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:01:34 crc kubenswrapper[4558]: I0120 17:01:34.414954 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/2290f4e8-39b3-49d2-8ef6-8fa94c9e3cdf-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"2290f4e8-39b3-49d2-8ef6-8fa94c9e3cdf\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:01:34 crc kubenswrapper[4558]: I0120 17:01:34.415056 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/2290f4e8-39b3-49d2-8ef6-8fa94c9e3cdf-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"2290f4e8-39b3-49d2-8ef6-8fa94c9e3cdf\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:01:34 crc kubenswrapper[4558]: I0120 17:01:34.415739 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2290f4e8-39b3-49d2-8ef6-8fa94c9e3cdf-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"2290f4e8-39b3-49d2-8ef6-8fa94c9e3cdf\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:01:34 crc kubenswrapper[4558]: I0120 17:01:34.424069 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2w5d8\" (UniqueName: \"kubernetes.io/projected/2290f4e8-39b3-49d2-8ef6-8fa94c9e3cdf-kube-api-access-2w5d8\") pod \"ovsdbserver-nb-0\" (UID: \"2290f4e8-39b3-49d2-8ef6-8fa94c9e3cdf\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:01:34 crc kubenswrapper[4558]: I0120 17:01:34.426547 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"ovsdbserver-nb-0\" (UID: \"2290f4e8-39b3-49d2-8ef6-8fa94c9e3cdf\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:01:34 crc kubenswrapper[4558]: I0120 17:01:34.534519 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:01:34 crc kubenswrapper[4558]: I0120 17:01:34.905919 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-nb-0"] Jan 20 17:01:34 crc kubenswrapper[4558]: W0120 17:01:34.910107 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2290f4e8_39b3_49d2_8ef6_8fa94c9e3cdf.slice/crio-0cde45e90d60dd6a806a5f8b43101fdb431dbc690d894e7c9e5ad65d02015fdc WatchSource:0}: Error finding container 0cde45e90d60dd6a806a5f8b43101fdb431dbc690d894e7c9e5ad65d02015fdc: Status 404 returned error can't find the container with id 0cde45e90d60dd6a806a5f8b43101fdb431dbc690d894e7c9e5ad65d02015fdc Jan 20 17:01:35 crc kubenswrapper[4558]: I0120 17:01:35.245262 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-nb-0" event={"ID":"2290f4e8-39b3-49d2-8ef6-8fa94c9e3cdf","Type":"ContainerStarted","Data":"6b320eb22df0758cdab9680050c01fd3a7007ed6383b4dc60f038863e7bdaedc"} Jan 20 17:01:35 crc kubenswrapper[4558]: I0120 17:01:35.245491 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-nb-0" event={"ID":"2290f4e8-39b3-49d2-8ef6-8fa94c9e3cdf","Type":"ContainerStarted","Data":"a7862609eb045b31d1cf0e6477c1e8599625e319c33baf93c6ef05261a3fad12"} Jan 20 17:01:35 crc kubenswrapper[4558]: I0120 17:01:35.245502 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-nb-0" event={"ID":"2290f4e8-39b3-49d2-8ef6-8fa94c9e3cdf","Type":"ContainerStarted","Data":"0cde45e90d60dd6a806a5f8b43101fdb431dbc690d894e7c9e5ad65d02015fdc"} Jan 20 17:01:35 crc kubenswrapper[4558]: I0120 17:01:35.270198 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ovsdbserver-nb-0" podStartSLOduration=1.27018416 podStartE2EDuration="1.27018416s" podCreationTimestamp="2026-01-20 17:01:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:01:35.266222359 +0000 UTC m=+1189.026560326" watchObservedRunningTime="2026-01-20 17:01:35.27018416 +0000 UTC m=+1189.030522127" Jan 20 17:01:35 crc kubenswrapper[4558]: I0120 17:01:35.545660 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ovsdbserver-sb-0"] Jan 20 17:01:35 crc kubenswrapper[4558]: I0120 17:01:35.554396 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:01:35 crc kubenswrapper[4558]: I0120 17:01:35.557474 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"ovndbcluster-sb-config" Jan 20 17:01:35 crc kubenswrapper[4558]: I0120 17:01:35.557890 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"ovndbcluster-sb-scripts" Jan 20 17:01:35 crc kubenswrapper[4558]: I0120 17:01:35.557896 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-ovndbcluster-sb-ovndbs" Jan 20 17:01:35 crc kubenswrapper[4558]: I0120 17:01:35.558065 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ovncluster-ovndbcluster-sb-dockercfg-xl6bn" Jan 20 17:01:35 crc kubenswrapper[4558]: I0120 17:01:35.564129 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-sb-0"] Jan 20 17:01:35 crc kubenswrapper[4558]: I0120 17:01:35.627414 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage19-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage19-crc\") pod \"ovsdbserver-sb-0\" (UID: \"37e84078-defe-4c61-ac89-878165827bba\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:01:35 crc kubenswrapper[4558]: I0120 17:01:35.627459 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/37e84078-defe-4c61-ac89-878165827bba-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"37e84078-defe-4c61-ac89-878165827bba\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:01:35 crc kubenswrapper[4558]: I0120 17:01:35.627506 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/37e84078-defe-4c61-ac89-878165827bba-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"37e84078-defe-4c61-ac89-878165827bba\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:01:35 crc kubenswrapper[4558]: I0120 17:01:35.627545 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/37e84078-defe-4c61-ac89-878165827bba-config\") pod \"ovsdbserver-sb-0\" (UID: \"37e84078-defe-4c61-ac89-878165827bba\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:01:35 crc kubenswrapper[4558]: I0120 17:01:35.627564 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/37e84078-defe-4c61-ac89-878165827bba-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"37e84078-defe-4c61-ac89-878165827bba\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:01:35 crc kubenswrapper[4558]: I0120 17:01:35.627595 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/37e84078-defe-4c61-ac89-878165827bba-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"37e84078-defe-4c61-ac89-878165827bba\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:01:35 crc kubenswrapper[4558]: I0120 17:01:35.627615 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mqlxw\" (UniqueName: \"kubernetes.io/projected/37e84078-defe-4c61-ac89-878165827bba-kube-api-access-mqlxw\") pod \"ovsdbserver-sb-0\" (UID: \"37e84078-defe-4c61-ac89-878165827bba\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:01:35 crc kubenswrapper[4558]: I0120 17:01:35.627741 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/37e84078-defe-4c61-ac89-878165827bba-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"37e84078-defe-4c61-ac89-878165827bba\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:01:35 crc kubenswrapper[4558]: I0120 17:01:35.728852 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/37e84078-defe-4c61-ac89-878165827bba-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"37e84078-defe-4c61-ac89-878165827bba\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:01:35 crc kubenswrapper[4558]: I0120 17:01:35.728913 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage19-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage19-crc\") pod \"ovsdbserver-sb-0\" (UID: \"37e84078-defe-4c61-ac89-878165827bba\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:01:35 crc kubenswrapper[4558]: I0120 17:01:35.728937 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/37e84078-defe-4c61-ac89-878165827bba-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"37e84078-defe-4c61-ac89-878165827bba\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:01:35 crc kubenswrapper[4558]: I0120 17:01:35.728964 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/37e84078-defe-4c61-ac89-878165827bba-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"37e84078-defe-4c61-ac89-878165827bba\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:01:35 crc kubenswrapper[4558]: I0120 17:01:35.728995 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/37e84078-defe-4c61-ac89-878165827bba-config\") pod \"ovsdbserver-sb-0\" (UID: \"37e84078-defe-4c61-ac89-878165827bba\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:01:35 crc kubenswrapper[4558]: I0120 17:01:35.729010 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/37e84078-defe-4c61-ac89-878165827bba-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"37e84078-defe-4c61-ac89-878165827bba\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:01:35 crc kubenswrapper[4558]: I0120 17:01:35.729027 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/37e84078-defe-4c61-ac89-878165827bba-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"37e84078-defe-4c61-ac89-878165827bba\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:01:35 crc kubenswrapper[4558]: I0120 17:01:35.729048 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mqlxw\" (UniqueName: \"kubernetes.io/projected/37e84078-defe-4c61-ac89-878165827bba-kube-api-access-mqlxw\") pod \"ovsdbserver-sb-0\" (UID: \"37e84078-defe-4c61-ac89-878165827bba\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:01:35 crc kubenswrapper[4558]: I0120 17:01:35.729427 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage19-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage19-crc\") pod \"ovsdbserver-sb-0\" (UID: \"37e84078-defe-4c61-ac89-878165827bba\") device mount path \"/mnt/openstack/pv19\"" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:01:35 crc kubenswrapper[4558]: I0120 17:01:35.729570 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/37e84078-defe-4c61-ac89-878165827bba-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"37e84078-defe-4c61-ac89-878165827bba\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:01:35 crc kubenswrapper[4558]: I0120 17:01:35.729954 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/37e84078-defe-4c61-ac89-878165827bba-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"37e84078-defe-4c61-ac89-878165827bba\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:01:35 crc kubenswrapper[4558]: I0120 17:01:35.731746 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/37e84078-defe-4c61-ac89-878165827bba-config\") pod \"ovsdbserver-sb-0\" (UID: \"37e84078-defe-4c61-ac89-878165827bba\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:01:35 crc kubenswrapper[4558]: I0120 17:01:35.732278 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/37e84078-defe-4c61-ac89-878165827bba-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"37e84078-defe-4c61-ac89-878165827bba\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:01:35 crc kubenswrapper[4558]: I0120 17:01:35.732531 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/37e84078-defe-4c61-ac89-878165827bba-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"37e84078-defe-4c61-ac89-878165827bba\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:01:35 crc kubenswrapper[4558]: I0120 17:01:35.733507 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/37e84078-defe-4c61-ac89-878165827bba-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"37e84078-defe-4c61-ac89-878165827bba\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:01:35 crc kubenswrapper[4558]: I0120 17:01:35.742220 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mqlxw\" (UniqueName: \"kubernetes.io/projected/37e84078-defe-4c61-ac89-878165827bba-kube-api-access-mqlxw\") pod \"ovsdbserver-sb-0\" (UID: \"37e84078-defe-4c61-ac89-878165827bba\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:01:35 crc kubenswrapper[4558]: I0120 17:01:35.746195 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage19-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage19-crc\") pod \"ovsdbserver-sb-0\" (UID: \"37e84078-defe-4c61-ac89-878165827bba\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:01:35 crc kubenswrapper[4558]: I0120 17:01:35.887770 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:01:36 crc kubenswrapper[4558]: I0120 17:01:36.254895 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-sb-0"] Jan 20 17:01:36 crc kubenswrapper[4558]: W0120 17:01:36.257956 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod37e84078_defe_4c61_ac89_878165827bba.slice/crio-27b39e26ce833ccaa98b7846021157d8c65814486b55b98a3db00426ce6a14c4 WatchSource:0}: Error finding container 27b39e26ce833ccaa98b7846021157d8c65814486b55b98a3db00426ce6a14c4: Status 404 returned error can't find the container with id 27b39e26ce833ccaa98b7846021157d8c65814486b55b98a3db00426ce6a14c4 Jan 20 17:01:36 crc kubenswrapper[4558]: I0120 17:01:36.642689 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:01:36 crc kubenswrapper[4558]: I0120 17:01:36.642900 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:01:36 crc kubenswrapper[4558]: I0120 17:01:36.693308 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:01:37 crc kubenswrapper[4558]: I0120 17:01:37.257099 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-sb-0" event={"ID":"37e84078-defe-4c61-ac89-878165827bba","Type":"ContainerStarted","Data":"79b080f284ed7a9adbc1915885043c35d4710d77244162383b4e53aa16558542"} Jan 20 17:01:37 crc kubenswrapper[4558]: I0120 17:01:37.257137 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-sb-0" event={"ID":"37e84078-defe-4c61-ac89-878165827bba","Type":"ContainerStarted","Data":"b7a6ed7425377191f27273a9edf53dbd7c7ed2de00fbc9623792e4bfc9ffed21"} Jan 20 17:01:37 crc kubenswrapper[4558]: I0120 17:01:37.257146 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-sb-0" event={"ID":"37e84078-defe-4c61-ac89-878165827bba","Type":"ContainerStarted","Data":"27b39e26ce833ccaa98b7846021157d8c65814486b55b98a3db00426ce6a14c4"} Jan 20 17:01:37 crc kubenswrapper[4558]: I0120 17:01:37.305282 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:01:37 crc kubenswrapper[4558]: I0120 17:01:37.321816 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ovsdbserver-sb-0" podStartSLOduration=2.321804743 podStartE2EDuration="2.321804743s" podCreationTimestamp="2026-01-20 17:01:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:01:37.27076222 +0000 UTC m=+1191.031100188" watchObservedRunningTime="2026-01-20 17:01:37.321804743 +0000 UTC m=+1191.082142710" Jan 20 17:01:37 crc kubenswrapper[4558]: I0120 17:01:37.534893 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:01:37 crc kubenswrapper[4558]: I0120 17:01:37.961940 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:01:37 crc kubenswrapper[4558]: I0120 17:01:37.961971 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:01:38 crc kubenswrapper[4558]: I0120 17:01:38.014290 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:01:38 crc kubenswrapper[4558]: I0120 17:01:38.309510 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:01:38 crc kubenswrapper[4558]: I0120 17:01:38.887873 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:01:38 crc kubenswrapper[4558]: I0120 17:01:38.920140 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/keystone-db-create-pxkff"] Jan 20 17:01:38 crc kubenswrapper[4558]: I0120 17:01:38.920939 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-db-create-pxkff" Jan 20 17:01:38 crc kubenswrapper[4558]: I0120 17:01:38.927119 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-db-create-pxkff"] Jan 20 17:01:39 crc kubenswrapper[4558]: I0120 17:01:39.009967 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/keystone-6218-account-create-update-gbcj6"] Jan 20 17:01:39 crc kubenswrapper[4558]: I0120 17:01:39.010824 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-6218-account-create-update-gbcj6" Jan 20 17:01:39 crc kubenswrapper[4558]: I0120 17:01:39.013619 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-db-secret" Jan 20 17:01:39 crc kubenswrapper[4558]: I0120 17:01:39.017255 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-6218-account-create-update-gbcj6"] Jan 20 17:01:39 crc kubenswrapper[4558]: I0120 17:01:39.072810 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/79ceb3e8-184f-4c1d-bceb-89c6f69ee670-operator-scripts\") pod \"keystone-db-create-pxkff\" (UID: \"79ceb3e8-184f-4c1d-bceb-89c6f69ee670\") " pod="openstack-kuttl-tests/keystone-db-create-pxkff" Jan 20 17:01:39 crc kubenswrapper[4558]: I0120 17:01:39.072961 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bb2sm\" (UniqueName: \"kubernetes.io/projected/79ceb3e8-184f-4c1d-bceb-89c6f69ee670-kube-api-access-bb2sm\") pod \"keystone-db-create-pxkff\" (UID: \"79ceb3e8-184f-4c1d-bceb-89c6f69ee670\") " pod="openstack-kuttl-tests/keystone-db-create-pxkff" Jan 20 17:01:39 crc kubenswrapper[4558]: I0120 17:01:39.174714 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/79ceb3e8-184f-4c1d-bceb-89c6f69ee670-operator-scripts\") pod \"keystone-db-create-pxkff\" (UID: \"79ceb3e8-184f-4c1d-bceb-89c6f69ee670\") " pod="openstack-kuttl-tests/keystone-db-create-pxkff" Jan 20 17:01:39 crc kubenswrapper[4558]: I0120 17:01:39.174865 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bb2sm\" (UniqueName: \"kubernetes.io/projected/79ceb3e8-184f-4c1d-bceb-89c6f69ee670-kube-api-access-bb2sm\") pod \"keystone-db-create-pxkff\" (UID: \"79ceb3e8-184f-4c1d-bceb-89c6f69ee670\") " pod="openstack-kuttl-tests/keystone-db-create-pxkff" Jan 20 17:01:39 crc kubenswrapper[4558]: I0120 17:01:39.174897 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8b64ebf3-99cf-495a-a82d-0a27123f5d7a-operator-scripts\") pod \"keystone-6218-account-create-update-gbcj6\" (UID: \"8b64ebf3-99cf-495a-a82d-0a27123f5d7a\") " pod="openstack-kuttl-tests/keystone-6218-account-create-update-gbcj6" Jan 20 17:01:39 crc kubenswrapper[4558]: I0120 17:01:39.174958 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9cjmp\" (UniqueName: \"kubernetes.io/projected/8b64ebf3-99cf-495a-a82d-0a27123f5d7a-kube-api-access-9cjmp\") pod \"keystone-6218-account-create-update-gbcj6\" (UID: \"8b64ebf3-99cf-495a-a82d-0a27123f5d7a\") " pod="openstack-kuttl-tests/keystone-6218-account-create-update-gbcj6" Jan 20 17:01:39 crc kubenswrapper[4558]: I0120 17:01:39.175429 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/79ceb3e8-184f-4c1d-bceb-89c6f69ee670-operator-scripts\") pod \"keystone-db-create-pxkff\" (UID: \"79ceb3e8-184f-4c1d-bceb-89c6f69ee670\") " pod="openstack-kuttl-tests/keystone-db-create-pxkff" Jan 20 17:01:39 crc kubenswrapper[4558]: I0120 17:01:39.194303 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bb2sm\" (UniqueName: \"kubernetes.io/projected/79ceb3e8-184f-4c1d-bceb-89c6f69ee670-kube-api-access-bb2sm\") pod \"keystone-db-create-pxkff\" (UID: \"79ceb3e8-184f-4c1d-bceb-89c6f69ee670\") " pod="openstack-kuttl-tests/keystone-db-create-pxkff" Jan 20 17:01:39 crc kubenswrapper[4558]: I0120 17:01:39.235539 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-db-create-pxkff" Jan 20 17:01:39 crc kubenswrapper[4558]: I0120 17:01:39.242736 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/placement-db-create-x7dh9"] Jan 20 17:01:39 crc kubenswrapper[4558]: I0120 17:01:39.256779 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-db-create-x7dh9" Jan 20 17:01:39 crc kubenswrapper[4558]: I0120 17:01:39.264556 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-db-create-x7dh9"] Jan 20 17:01:39 crc kubenswrapper[4558]: I0120 17:01:39.276637 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8b64ebf3-99cf-495a-a82d-0a27123f5d7a-operator-scripts\") pod \"keystone-6218-account-create-update-gbcj6\" (UID: \"8b64ebf3-99cf-495a-a82d-0a27123f5d7a\") " pod="openstack-kuttl-tests/keystone-6218-account-create-update-gbcj6" Jan 20 17:01:39 crc kubenswrapper[4558]: I0120 17:01:39.276905 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9cjmp\" (UniqueName: \"kubernetes.io/projected/8b64ebf3-99cf-495a-a82d-0a27123f5d7a-kube-api-access-9cjmp\") pod \"keystone-6218-account-create-update-gbcj6\" (UID: \"8b64ebf3-99cf-495a-a82d-0a27123f5d7a\") " pod="openstack-kuttl-tests/keystone-6218-account-create-update-gbcj6" Jan 20 17:01:39 crc kubenswrapper[4558]: I0120 17:01:39.277724 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8b64ebf3-99cf-495a-a82d-0a27123f5d7a-operator-scripts\") pod \"keystone-6218-account-create-update-gbcj6\" (UID: \"8b64ebf3-99cf-495a-a82d-0a27123f5d7a\") " pod="openstack-kuttl-tests/keystone-6218-account-create-update-gbcj6" Jan 20 17:01:39 crc kubenswrapper[4558]: I0120 17:01:39.305611 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9cjmp\" (UniqueName: \"kubernetes.io/projected/8b64ebf3-99cf-495a-a82d-0a27123f5d7a-kube-api-access-9cjmp\") pod \"keystone-6218-account-create-update-gbcj6\" (UID: \"8b64ebf3-99cf-495a-a82d-0a27123f5d7a\") " pod="openstack-kuttl-tests/keystone-6218-account-create-update-gbcj6" Jan 20 17:01:39 crc kubenswrapper[4558]: I0120 17:01:39.323427 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-6218-account-create-update-gbcj6" Jan 20 17:01:39 crc kubenswrapper[4558]: I0120 17:01:39.335348 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/placement-6cbc-account-create-update-n6t9t"] Jan 20 17:01:39 crc kubenswrapper[4558]: I0120 17:01:39.336157 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-6cbc-account-create-update-n6t9t" Jan 20 17:01:39 crc kubenswrapper[4558]: I0120 17:01:39.338330 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"placement-db-secret" Jan 20 17:01:39 crc kubenswrapper[4558]: I0120 17:01:39.345066 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-6cbc-account-create-update-n6t9t"] Jan 20 17:01:39 crc kubenswrapper[4558]: I0120 17:01:39.378371 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s2qqn\" (UniqueName: \"kubernetes.io/projected/affe1aaa-7cef-46d2-82f8-1205ce2ff96b-kube-api-access-s2qqn\") pod \"placement-db-create-x7dh9\" (UID: \"affe1aaa-7cef-46d2-82f8-1205ce2ff96b\") " pod="openstack-kuttl-tests/placement-db-create-x7dh9" Jan 20 17:01:39 crc kubenswrapper[4558]: I0120 17:01:39.378508 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/affe1aaa-7cef-46d2-82f8-1205ce2ff96b-operator-scripts\") pod \"placement-db-create-x7dh9\" (UID: \"affe1aaa-7cef-46d2-82f8-1205ce2ff96b\") " pod="openstack-kuttl-tests/placement-db-create-x7dh9" Jan 20 17:01:39 crc kubenswrapper[4558]: I0120 17:01:39.480128 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0d6ee4cd-ee0d-45e6-9f55-92a5ffa7b7d7-operator-scripts\") pod \"placement-6cbc-account-create-update-n6t9t\" (UID: \"0d6ee4cd-ee0d-45e6-9f55-92a5ffa7b7d7\") " pod="openstack-kuttl-tests/placement-6cbc-account-create-update-n6t9t" Jan 20 17:01:39 crc kubenswrapper[4558]: I0120 17:01:39.480245 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sjf99\" (UniqueName: \"kubernetes.io/projected/0d6ee4cd-ee0d-45e6-9f55-92a5ffa7b7d7-kube-api-access-sjf99\") pod \"placement-6cbc-account-create-update-n6t9t\" (UID: \"0d6ee4cd-ee0d-45e6-9f55-92a5ffa7b7d7\") " pod="openstack-kuttl-tests/placement-6cbc-account-create-update-n6t9t" Jan 20 17:01:39 crc kubenswrapper[4558]: I0120 17:01:39.480768 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2qqn\" (UniqueName: \"kubernetes.io/projected/affe1aaa-7cef-46d2-82f8-1205ce2ff96b-kube-api-access-s2qqn\") pod \"placement-db-create-x7dh9\" (UID: \"affe1aaa-7cef-46d2-82f8-1205ce2ff96b\") " pod="openstack-kuttl-tests/placement-db-create-x7dh9" Jan 20 17:01:39 crc kubenswrapper[4558]: I0120 17:01:39.480887 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/affe1aaa-7cef-46d2-82f8-1205ce2ff96b-operator-scripts\") pod \"placement-db-create-x7dh9\" (UID: \"affe1aaa-7cef-46d2-82f8-1205ce2ff96b\") " pod="openstack-kuttl-tests/placement-db-create-x7dh9" Jan 20 17:01:39 crc kubenswrapper[4558]: I0120 17:01:39.481708 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/affe1aaa-7cef-46d2-82f8-1205ce2ff96b-operator-scripts\") pod \"placement-db-create-x7dh9\" (UID: \"affe1aaa-7cef-46d2-82f8-1205ce2ff96b\") " pod="openstack-kuttl-tests/placement-db-create-x7dh9" Jan 20 17:01:39 crc kubenswrapper[4558]: I0120 17:01:39.495348 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2qqn\" (UniqueName: \"kubernetes.io/projected/affe1aaa-7cef-46d2-82f8-1205ce2ff96b-kube-api-access-s2qqn\") pod \"placement-db-create-x7dh9\" (UID: \"affe1aaa-7cef-46d2-82f8-1205ce2ff96b\") " pod="openstack-kuttl-tests/placement-db-create-x7dh9" Jan 20 17:01:39 crc kubenswrapper[4558]: I0120 17:01:39.535473 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:01:39 crc kubenswrapper[4558]: I0120 17:01:39.582711 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sjf99\" (UniqueName: \"kubernetes.io/projected/0d6ee4cd-ee0d-45e6-9f55-92a5ffa7b7d7-kube-api-access-sjf99\") pod \"placement-6cbc-account-create-update-n6t9t\" (UID: \"0d6ee4cd-ee0d-45e6-9f55-92a5ffa7b7d7\") " pod="openstack-kuttl-tests/placement-6cbc-account-create-update-n6t9t" Jan 20 17:01:39 crc kubenswrapper[4558]: I0120 17:01:39.582898 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0d6ee4cd-ee0d-45e6-9f55-92a5ffa7b7d7-operator-scripts\") pod \"placement-6cbc-account-create-update-n6t9t\" (UID: \"0d6ee4cd-ee0d-45e6-9f55-92a5ffa7b7d7\") " pod="openstack-kuttl-tests/placement-6cbc-account-create-update-n6t9t" Jan 20 17:01:39 crc kubenswrapper[4558]: I0120 17:01:39.583564 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0d6ee4cd-ee0d-45e6-9f55-92a5ffa7b7d7-operator-scripts\") pod \"placement-6cbc-account-create-update-n6t9t\" (UID: \"0d6ee4cd-ee0d-45e6-9f55-92a5ffa7b7d7\") " pod="openstack-kuttl-tests/placement-6cbc-account-create-update-n6t9t" Jan 20 17:01:39 crc kubenswrapper[4558]: I0120 17:01:39.595357 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sjf99\" (UniqueName: \"kubernetes.io/projected/0d6ee4cd-ee0d-45e6-9f55-92a5ffa7b7d7-kube-api-access-sjf99\") pod \"placement-6cbc-account-create-update-n6t9t\" (UID: \"0d6ee4cd-ee0d-45e6-9f55-92a5ffa7b7d7\") " pod="openstack-kuttl-tests/placement-6cbc-account-create-update-n6t9t" Jan 20 17:01:39 crc kubenswrapper[4558]: W0120 17:01:39.663065 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod79ceb3e8_184f_4c1d_bceb_89c6f69ee670.slice/crio-e4529312cb954a3cef9eadf2c9b962115231e6927efd437e4a874c7c5ff17fbb WatchSource:0}: Error finding container e4529312cb954a3cef9eadf2c9b962115231e6927efd437e4a874c7c5ff17fbb: Status 404 returned error can't find the container with id e4529312cb954a3cef9eadf2c9b962115231e6927efd437e4a874c7c5ff17fbb Jan 20 17:01:39 crc kubenswrapper[4558]: I0120 17:01:39.663223 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-db-create-pxkff"] Jan 20 17:01:39 crc kubenswrapper[4558]: I0120 17:01:39.691678 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-db-create-x7dh9" Jan 20 17:01:39 crc kubenswrapper[4558]: I0120 17:01:39.701582 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-6cbc-account-create-update-n6t9t" Jan 20 17:01:39 crc kubenswrapper[4558]: I0120 17:01:39.726572 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-6218-account-create-update-gbcj6"] Jan 20 17:01:39 crc kubenswrapper[4558]: W0120 17:01:39.729827 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8b64ebf3_99cf_495a_a82d_0a27123f5d7a.slice/crio-ea10752a26ee8847f254923c6e3ac85df1daf94cc4aa8a437d53f02e3f45140c WatchSource:0}: Error finding container ea10752a26ee8847f254923c6e3ac85df1daf94cc4aa8a437d53f02e3f45140c: Status 404 returned error can't find the container with id ea10752a26ee8847f254923c6e3ac85df1daf94cc4aa8a437d53f02e3f45140c Jan 20 17:01:40 crc kubenswrapper[4558]: I0120 17:01:40.059501 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-db-create-x7dh9"] Jan 20 17:01:40 crc kubenswrapper[4558]: W0120 17:01:40.062382 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podaffe1aaa_7cef_46d2_82f8_1205ce2ff96b.slice/crio-45bfd6626265be3b54e209ad9589b0d6eb617072eb3589fd093f42565878df4a WatchSource:0}: Error finding container 45bfd6626265be3b54e209ad9589b0d6eb617072eb3589fd093f42565878df4a: Status 404 returned error can't find the container with id 45bfd6626265be3b54e209ad9589b0d6eb617072eb3589fd093f42565878df4a Jan 20 17:01:40 crc kubenswrapper[4558]: I0120 17:01:40.117553 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-6cbc-account-create-update-n6t9t"] Jan 20 17:01:40 crc kubenswrapper[4558]: W0120 17:01:40.130139 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0d6ee4cd_ee0d_45e6_9f55_92a5ffa7b7d7.slice/crio-5ef0acccfd52a8e7acea8ee27cf4d83be5325f37c086f04806b25678490a5fbf WatchSource:0}: Error finding container 5ef0acccfd52a8e7acea8ee27cf4d83be5325f37c086f04806b25678490a5fbf: Status 404 returned error can't find the container with id 5ef0acccfd52a8e7acea8ee27cf4d83be5325f37c086f04806b25678490a5fbf Jan 20 17:01:40 crc kubenswrapper[4558]: I0120 17:01:40.302556 4558 generic.go:334] "Generic (PLEG): container finished" podID="affe1aaa-7cef-46d2-82f8-1205ce2ff96b" containerID="185a881c6adc45360908ee6b237afe1d3e54a3888c07e7e92e0b666fe25a7f54" exitCode=0 Jan 20 17:01:40 crc kubenswrapper[4558]: I0120 17:01:40.302607 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-db-create-x7dh9" event={"ID":"affe1aaa-7cef-46d2-82f8-1205ce2ff96b","Type":"ContainerDied","Data":"185a881c6adc45360908ee6b237afe1d3e54a3888c07e7e92e0b666fe25a7f54"} Jan 20 17:01:40 crc kubenswrapper[4558]: I0120 17:01:40.302630 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-db-create-x7dh9" event={"ID":"affe1aaa-7cef-46d2-82f8-1205ce2ff96b","Type":"ContainerStarted","Data":"45bfd6626265be3b54e209ad9589b0d6eb617072eb3589fd093f42565878df4a"} Jan 20 17:01:40 crc kubenswrapper[4558]: I0120 17:01:40.304085 4558 generic.go:334] "Generic (PLEG): container finished" podID="8b64ebf3-99cf-495a-a82d-0a27123f5d7a" containerID="d5c274045b643e68cde6efde01b4bc5498d5fc7ca7cc737f8b037b3d7603c41b" exitCode=0 Jan 20 17:01:40 crc kubenswrapper[4558]: I0120 17:01:40.304122 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-6218-account-create-update-gbcj6" event={"ID":"8b64ebf3-99cf-495a-a82d-0a27123f5d7a","Type":"ContainerDied","Data":"d5c274045b643e68cde6efde01b4bc5498d5fc7ca7cc737f8b037b3d7603c41b"} Jan 20 17:01:40 crc kubenswrapper[4558]: I0120 17:01:40.304304 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-6218-account-create-update-gbcj6" event={"ID":"8b64ebf3-99cf-495a-a82d-0a27123f5d7a","Type":"ContainerStarted","Data":"ea10752a26ee8847f254923c6e3ac85df1daf94cc4aa8a437d53f02e3f45140c"} Jan 20 17:01:40 crc kubenswrapper[4558]: I0120 17:01:40.305500 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-6cbc-account-create-update-n6t9t" event={"ID":"0d6ee4cd-ee0d-45e6-9f55-92a5ffa7b7d7","Type":"ContainerStarted","Data":"90086f287523470f86b4f198e93742684b994c6b5d64645ae662556bcec04dd6"} Jan 20 17:01:40 crc kubenswrapper[4558]: I0120 17:01:40.305579 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-6cbc-account-create-update-n6t9t" event={"ID":"0d6ee4cd-ee0d-45e6-9f55-92a5ffa7b7d7","Type":"ContainerStarted","Data":"5ef0acccfd52a8e7acea8ee27cf4d83be5325f37c086f04806b25678490a5fbf"} Jan 20 17:01:40 crc kubenswrapper[4558]: I0120 17:01:40.306630 4558 generic.go:334] "Generic (PLEG): container finished" podID="79ceb3e8-184f-4c1d-bceb-89c6f69ee670" containerID="c114c7e9df385088d0043b3929229f461356f434bf7a638790b4aa8b5de9a942" exitCode=0 Jan 20 17:01:40 crc kubenswrapper[4558]: I0120 17:01:40.306664 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-db-create-pxkff" event={"ID":"79ceb3e8-184f-4c1d-bceb-89c6f69ee670","Type":"ContainerDied","Data":"c114c7e9df385088d0043b3929229f461356f434bf7a638790b4aa8b5de9a942"} Jan 20 17:01:40 crc kubenswrapper[4558]: I0120 17:01:40.306687 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-db-create-pxkff" event={"ID":"79ceb3e8-184f-4c1d-bceb-89c6f69ee670","Type":"ContainerStarted","Data":"e4529312cb954a3cef9eadf2c9b962115231e6927efd437e4a874c7c5ff17fbb"} Jan 20 17:01:40 crc kubenswrapper[4558]: I0120 17:01:40.334741 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/placement-6cbc-account-create-update-n6t9t" podStartSLOduration=1.334730272 podStartE2EDuration="1.334730272s" podCreationTimestamp="2026-01-20 17:01:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:01:40.33248523 +0000 UTC m=+1194.092823197" watchObservedRunningTime="2026-01-20 17:01:40.334730272 +0000 UTC m=+1194.095068238" Jan 20 17:01:40 crc kubenswrapper[4558]: I0120 17:01:40.562082 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:01:40 crc kubenswrapper[4558]: I0120 17:01:40.587380 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:01:40 crc kubenswrapper[4558]: I0120 17:01:40.861571 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/swift-storage-0"] Jan 20 17:01:40 crc kubenswrapper[4558]: I0120 17:01:40.865669 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:01:40 crc kubenswrapper[4558]: I0120 17:01:40.867605 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"swift-ring-files" Jan 20 17:01:40 crc kubenswrapper[4558]: I0120 17:01:40.867775 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"swift-conf" Jan 20 17:01:40 crc kubenswrapper[4558]: I0120 17:01:40.867950 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"swift-storage-config-data" Jan 20 17:01:40 crc kubenswrapper[4558]: I0120 17:01:40.868205 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"swift-swift-dockercfg-d5jgf" Jan 20 17:01:40 crc kubenswrapper[4558]: I0120 17:01:40.881961 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/swift-storage-0"] Jan 20 17:01:40 crc kubenswrapper[4558]: I0120 17:01:40.888857 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:01:40 crc kubenswrapper[4558]: I0120 17:01:40.930515 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:01:40 crc kubenswrapper[4558]: I0120 17:01:40.981985 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/swift-ring-rebalance-sk2wr"] Jan 20 17:01:40 crc kubenswrapper[4558]: I0120 17:01:40.982822 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-ring-rebalance-sk2wr" Jan 20 17:01:40 crc kubenswrapper[4558]: I0120 17:01:40.984768 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"swift-proxy-config-data" Jan 20 17:01:40 crc kubenswrapper[4558]: I0120 17:01:40.984835 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"swift-ring-scripts" Jan 20 17:01:40 crc kubenswrapper[4558]: I0120 17:01:40.984773 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"swift-ring-config-data" Jan 20 17:01:41 crc kubenswrapper[4558]: I0120 17:01:41.006823 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/10edb733-8fab-4543-ad29-3568e3de5aea-cache\") pod \"swift-storage-0\" (UID: \"10edb733-8fab-4543-ad29-3568e3de5aea\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:01:41 crc kubenswrapper[4558]: I0120 17:01:41.006884 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/10edb733-8fab-4543-ad29-3568e3de5aea-etc-swift\") pod \"swift-storage-0\" (UID: \"10edb733-8fab-4543-ad29-3568e3de5aea\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:01:41 crc kubenswrapper[4558]: I0120 17:01:41.006917 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/10edb733-8fab-4543-ad29-3568e3de5aea-lock\") pod \"swift-storage-0\" (UID: \"10edb733-8fab-4543-ad29-3568e3de5aea\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:01:41 crc kubenswrapper[4558]: I0120 17:01:41.006957 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xkq7g\" (UniqueName: \"kubernetes.io/projected/10edb733-8fab-4543-ad29-3568e3de5aea-kube-api-access-xkq7g\") pod \"swift-storage-0\" (UID: \"10edb733-8fab-4543-ad29-3568e3de5aea\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:01:41 crc kubenswrapper[4558]: I0120 17:01:41.006985 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"swift-storage-0\" (UID: \"10edb733-8fab-4543-ad29-3568e3de5aea\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:01:41 crc kubenswrapper[4558]: I0120 17:01:41.014930 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/swift-ring-rebalance-sk2wr"] Jan 20 17:01:41 crc kubenswrapper[4558]: E0120 17:01:41.015450 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[combined-ca-bundle dispersionconf etc-swift kube-api-access-gkdzq ring-data-devices scripts swiftconf], unattached volumes=[], failed to process volumes=[combined-ca-bundle dispersionconf etc-swift kube-api-access-gkdzq ring-data-devices scripts swiftconf]: context canceled" pod="openstack-kuttl-tests/swift-ring-rebalance-sk2wr" podUID="88cab55b-95bb-45b7-95d3-4a1d68312e49" Jan 20 17:01:41 crc kubenswrapper[4558]: I0120 17:01:41.028288 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/swift-ring-rebalance-v4kx6"] Jan 20 17:01:41 crc kubenswrapper[4558]: I0120 17:01:41.029072 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-ring-rebalance-v4kx6" Jan 20 17:01:41 crc kubenswrapper[4558]: I0120 17:01:41.038394 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/swift-ring-rebalance-v4kx6"] Jan 20 17:01:41 crc kubenswrapper[4558]: I0120 17:01:41.047489 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/swift-ring-rebalance-sk2wr"] Jan 20 17:01:41 crc kubenswrapper[4558]: I0120 17:01:41.108400 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/88cab55b-95bb-45b7-95d3-4a1d68312e49-ring-data-devices\") pod \"swift-ring-rebalance-sk2wr\" (UID: \"88cab55b-95bb-45b7-95d3-4a1d68312e49\") " pod="openstack-kuttl-tests/swift-ring-rebalance-sk2wr" Jan 20 17:01:41 crc kubenswrapper[4558]: I0120 17:01:41.108489 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/10edb733-8fab-4543-ad29-3568e3de5aea-cache\") pod \"swift-storage-0\" (UID: \"10edb733-8fab-4543-ad29-3568e3de5aea\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:01:41 crc kubenswrapper[4558]: I0120 17:01:41.108520 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sc7g5\" (UniqueName: \"kubernetes.io/projected/07a241d6-001d-4677-a411-58e84c8f21ba-kube-api-access-sc7g5\") pod \"swift-ring-rebalance-v4kx6\" (UID: \"07a241d6-001d-4677-a411-58e84c8f21ba\") " pod="openstack-kuttl-tests/swift-ring-rebalance-v4kx6" Jan 20 17:01:41 crc kubenswrapper[4558]: I0120 17:01:41.108543 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/07a241d6-001d-4677-a411-58e84c8f21ba-dispersionconf\") pod \"swift-ring-rebalance-v4kx6\" (UID: \"07a241d6-001d-4677-a411-58e84c8f21ba\") " pod="openstack-kuttl-tests/swift-ring-rebalance-v4kx6" Jan 20 17:01:41 crc kubenswrapper[4558]: I0120 17:01:41.108569 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/88cab55b-95bb-45b7-95d3-4a1d68312e49-etc-swift\") pod \"swift-ring-rebalance-sk2wr\" (UID: \"88cab55b-95bb-45b7-95d3-4a1d68312e49\") " pod="openstack-kuttl-tests/swift-ring-rebalance-sk2wr" Jan 20 17:01:41 crc kubenswrapper[4558]: I0120 17:01:41.108586 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/88cab55b-95bb-45b7-95d3-4a1d68312e49-swiftconf\") pod \"swift-ring-rebalance-sk2wr\" (UID: \"88cab55b-95bb-45b7-95d3-4a1d68312e49\") " pod="openstack-kuttl-tests/swift-ring-rebalance-sk2wr" Jan 20 17:01:41 crc kubenswrapper[4558]: I0120 17:01:41.108652 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/88cab55b-95bb-45b7-95d3-4a1d68312e49-dispersionconf\") pod \"swift-ring-rebalance-sk2wr\" (UID: \"88cab55b-95bb-45b7-95d3-4a1d68312e49\") " pod="openstack-kuttl-tests/swift-ring-rebalance-sk2wr" Jan 20 17:01:41 crc kubenswrapper[4558]: I0120 17:01:41.108679 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/07a241d6-001d-4677-a411-58e84c8f21ba-ring-data-devices\") pod \"swift-ring-rebalance-v4kx6\" (UID: \"07a241d6-001d-4677-a411-58e84c8f21ba\") " pod="openstack-kuttl-tests/swift-ring-rebalance-v4kx6" Jan 20 17:01:41 crc kubenswrapper[4558]: I0120 17:01:41.108704 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/10edb733-8fab-4543-ad29-3568e3de5aea-etc-swift\") pod \"swift-storage-0\" (UID: \"10edb733-8fab-4543-ad29-3568e3de5aea\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:01:41 crc kubenswrapper[4558]: I0120 17:01:41.108756 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/10edb733-8fab-4543-ad29-3568e3de5aea-lock\") pod \"swift-storage-0\" (UID: \"10edb733-8fab-4543-ad29-3568e3de5aea\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:01:41 crc kubenswrapper[4558]: I0120 17:01:41.108777 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/88cab55b-95bb-45b7-95d3-4a1d68312e49-scripts\") pod \"swift-ring-rebalance-sk2wr\" (UID: \"88cab55b-95bb-45b7-95d3-4a1d68312e49\") " pod="openstack-kuttl-tests/swift-ring-rebalance-sk2wr" Jan 20 17:01:41 crc kubenswrapper[4558]: E0120 17:01:41.108793 4558 projected.go:288] Couldn't get configMap openstack-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Jan 20 17:01:41 crc kubenswrapper[4558]: E0120 17:01:41.108814 4558 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack-kuttl-tests/swift-storage-0: configmap "swift-ring-files" not found Jan 20 17:01:41 crc kubenswrapper[4558]: E0120 17:01:41.108854 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/10edb733-8fab-4543-ad29-3568e3de5aea-etc-swift podName:10edb733-8fab-4543-ad29-3568e3de5aea nodeName:}" failed. No retries permitted until 2026-01-20 17:01:41.608840114 +0000 UTC m=+1195.369178081 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/10edb733-8fab-4543-ad29-3568e3de5aea-etc-swift") pod "swift-storage-0" (UID: "10edb733-8fab-4543-ad29-3568e3de5aea") : configmap "swift-ring-files" not found Jan 20 17:01:41 crc kubenswrapper[4558]: I0120 17:01:41.108839 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/10edb733-8fab-4543-ad29-3568e3de5aea-cache\") pod \"swift-storage-0\" (UID: \"10edb733-8fab-4543-ad29-3568e3de5aea\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:01:41 crc kubenswrapper[4558]: I0120 17:01:41.108877 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xkq7g\" (UniqueName: \"kubernetes.io/projected/10edb733-8fab-4543-ad29-3568e3de5aea-kube-api-access-xkq7g\") pod \"swift-storage-0\" (UID: \"10edb733-8fab-4543-ad29-3568e3de5aea\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:01:41 crc kubenswrapper[4558]: I0120 17:01:41.108918 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/07a241d6-001d-4677-a411-58e84c8f21ba-swiftconf\") pod \"swift-ring-rebalance-v4kx6\" (UID: \"07a241d6-001d-4677-a411-58e84c8f21ba\") " pod="openstack-kuttl-tests/swift-ring-rebalance-v4kx6" Jan 20 17:01:41 crc kubenswrapper[4558]: I0120 17:01:41.108949 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/07a241d6-001d-4677-a411-58e84c8f21ba-scripts\") pod \"swift-ring-rebalance-v4kx6\" (UID: \"07a241d6-001d-4677-a411-58e84c8f21ba\") " pod="openstack-kuttl-tests/swift-ring-rebalance-v4kx6" Jan 20 17:01:41 crc kubenswrapper[4558]: I0120 17:01:41.108968 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gkdzq\" (UniqueName: \"kubernetes.io/projected/88cab55b-95bb-45b7-95d3-4a1d68312e49-kube-api-access-gkdzq\") pod \"swift-ring-rebalance-sk2wr\" (UID: \"88cab55b-95bb-45b7-95d3-4a1d68312e49\") " pod="openstack-kuttl-tests/swift-ring-rebalance-sk2wr" Jan 20 17:01:41 crc kubenswrapper[4558]: I0120 17:01:41.109005 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"swift-storage-0\" (UID: \"10edb733-8fab-4543-ad29-3568e3de5aea\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:01:41 crc kubenswrapper[4558]: I0120 17:01:41.109032 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/07a241d6-001d-4677-a411-58e84c8f21ba-combined-ca-bundle\") pod \"swift-ring-rebalance-v4kx6\" (UID: \"07a241d6-001d-4677-a411-58e84c8f21ba\") " pod="openstack-kuttl-tests/swift-ring-rebalance-v4kx6" Jan 20 17:01:41 crc kubenswrapper[4558]: I0120 17:01:41.109048 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/07a241d6-001d-4677-a411-58e84c8f21ba-etc-swift\") pod \"swift-ring-rebalance-v4kx6\" (UID: \"07a241d6-001d-4677-a411-58e84c8f21ba\") " pod="openstack-kuttl-tests/swift-ring-rebalance-v4kx6" Jan 20 17:01:41 crc kubenswrapper[4558]: I0120 17:01:41.109071 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/88cab55b-95bb-45b7-95d3-4a1d68312e49-combined-ca-bundle\") pod \"swift-ring-rebalance-sk2wr\" (UID: \"88cab55b-95bb-45b7-95d3-4a1d68312e49\") " pod="openstack-kuttl-tests/swift-ring-rebalance-sk2wr" Jan 20 17:01:41 crc kubenswrapper[4558]: I0120 17:01:41.109124 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/10edb733-8fab-4543-ad29-3568e3de5aea-lock\") pod \"swift-storage-0\" (UID: \"10edb733-8fab-4543-ad29-3568e3de5aea\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:01:41 crc kubenswrapper[4558]: I0120 17:01:41.109243 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"swift-storage-0\" (UID: \"10edb733-8fab-4543-ad29-3568e3de5aea\") device mount path \"/mnt/openstack/pv05\"" pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:01:41 crc kubenswrapper[4558]: I0120 17:01:41.126419 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"swift-storage-0\" (UID: \"10edb733-8fab-4543-ad29-3568e3de5aea\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:01:41 crc kubenswrapper[4558]: I0120 17:01:41.128283 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xkq7g\" (UniqueName: \"kubernetes.io/projected/10edb733-8fab-4543-ad29-3568e3de5aea-kube-api-access-xkq7g\") pod \"swift-storage-0\" (UID: \"10edb733-8fab-4543-ad29-3568e3de5aea\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:01:41 crc kubenswrapper[4558]: I0120 17:01:41.210597 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sc7g5\" (UniqueName: \"kubernetes.io/projected/07a241d6-001d-4677-a411-58e84c8f21ba-kube-api-access-sc7g5\") pod \"swift-ring-rebalance-v4kx6\" (UID: \"07a241d6-001d-4677-a411-58e84c8f21ba\") " pod="openstack-kuttl-tests/swift-ring-rebalance-v4kx6" Jan 20 17:01:41 crc kubenswrapper[4558]: I0120 17:01:41.210637 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/07a241d6-001d-4677-a411-58e84c8f21ba-dispersionconf\") pod \"swift-ring-rebalance-v4kx6\" (UID: \"07a241d6-001d-4677-a411-58e84c8f21ba\") " pod="openstack-kuttl-tests/swift-ring-rebalance-v4kx6" Jan 20 17:01:41 crc kubenswrapper[4558]: I0120 17:01:41.210668 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/88cab55b-95bb-45b7-95d3-4a1d68312e49-etc-swift\") pod \"swift-ring-rebalance-sk2wr\" (UID: \"88cab55b-95bb-45b7-95d3-4a1d68312e49\") " pod="openstack-kuttl-tests/swift-ring-rebalance-sk2wr" Jan 20 17:01:41 crc kubenswrapper[4558]: I0120 17:01:41.210687 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/88cab55b-95bb-45b7-95d3-4a1d68312e49-swiftconf\") pod \"swift-ring-rebalance-sk2wr\" (UID: \"88cab55b-95bb-45b7-95d3-4a1d68312e49\") " pod="openstack-kuttl-tests/swift-ring-rebalance-sk2wr" Jan 20 17:01:41 crc kubenswrapper[4558]: I0120 17:01:41.210716 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/88cab55b-95bb-45b7-95d3-4a1d68312e49-dispersionconf\") pod \"swift-ring-rebalance-sk2wr\" (UID: \"88cab55b-95bb-45b7-95d3-4a1d68312e49\") " pod="openstack-kuttl-tests/swift-ring-rebalance-sk2wr" Jan 20 17:01:41 crc kubenswrapper[4558]: I0120 17:01:41.210736 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/07a241d6-001d-4677-a411-58e84c8f21ba-ring-data-devices\") pod \"swift-ring-rebalance-v4kx6\" (UID: \"07a241d6-001d-4677-a411-58e84c8f21ba\") " pod="openstack-kuttl-tests/swift-ring-rebalance-v4kx6" Jan 20 17:01:41 crc kubenswrapper[4558]: I0120 17:01:41.210785 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/88cab55b-95bb-45b7-95d3-4a1d68312e49-scripts\") pod \"swift-ring-rebalance-sk2wr\" (UID: \"88cab55b-95bb-45b7-95d3-4a1d68312e49\") " pod="openstack-kuttl-tests/swift-ring-rebalance-sk2wr" Jan 20 17:01:41 crc kubenswrapper[4558]: I0120 17:01:41.210835 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/07a241d6-001d-4677-a411-58e84c8f21ba-swiftconf\") pod \"swift-ring-rebalance-v4kx6\" (UID: \"07a241d6-001d-4677-a411-58e84c8f21ba\") " pod="openstack-kuttl-tests/swift-ring-rebalance-v4kx6" Jan 20 17:01:41 crc kubenswrapper[4558]: I0120 17:01:41.210857 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/07a241d6-001d-4677-a411-58e84c8f21ba-scripts\") pod \"swift-ring-rebalance-v4kx6\" (UID: \"07a241d6-001d-4677-a411-58e84c8f21ba\") " pod="openstack-kuttl-tests/swift-ring-rebalance-v4kx6" Jan 20 17:01:41 crc kubenswrapper[4558]: I0120 17:01:41.210873 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gkdzq\" (UniqueName: \"kubernetes.io/projected/88cab55b-95bb-45b7-95d3-4a1d68312e49-kube-api-access-gkdzq\") pod \"swift-ring-rebalance-sk2wr\" (UID: \"88cab55b-95bb-45b7-95d3-4a1d68312e49\") " pod="openstack-kuttl-tests/swift-ring-rebalance-sk2wr" Jan 20 17:01:41 crc kubenswrapper[4558]: I0120 17:01:41.210894 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/07a241d6-001d-4677-a411-58e84c8f21ba-combined-ca-bundle\") pod \"swift-ring-rebalance-v4kx6\" (UID: \"07a241d6-001d-4677-a411-58e84c8f21ba\") " pod="openstack-kuttl-tests/swift-ring-rebalance-v4kx6" Jan 20 17:01:41 crc kubenswrapper[4558]: I0120 17:01:41.210908 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/07a241d6-001d-4677-a411-58e84c8f21ba-etc-swift\") pod \"swift-ring-rebalance-v4kx6\" (UID: \"07a241d6-001d-4677-a411-58e84c8f21ba\") " pod="openstack-kuttl-tests/swift-ring-rebalance-v4kx6" Jan 20 17:01:41 crc kubenswrapper[4558]: I0120 17:01:41.210926 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/88cab55b-95bb-45b7-95d3-4a1d68312e49-combined-ca-bundle\") pod \"swift-ring-rebalance-sk2wr\" (UID: \"88cab55b-95bb-45b7-95d3-4a1d68312e49\") " pod="openstack-kuttl-tests/swift-ring-rebalance-sk2wr" Jan 20 17:01:41 crc kubenswrapper[4558]: I0120 17:01:41.210959 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/88cab55b-95bb-45b7-95d3-4a1d68312e49-ring-data-devices\") pod \"swift-ring-rebalance-sk2wr\" (UID: \"88cab55b-95bb-45b7-95d3-4a1d68312e49\") " pod="openstack-kuttl-tests/swift-ring-rebalance-sk2wr" Jan 20 17:01:41 crc kubenswrapper[4558]: I0120 17:01:41.211124 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/88cab55b-95bb-45b7-95d3-4a1d68312e49-etc-swift\") pod \"swift-ring-rebalance-sk2wr\" (UID: \"88cab55b-95bb-45b7-95d3-4a1d68312e49\") " pod="openstack-kuttl-tests/swift-ring-rebalance-sk2wr" Jan 20 17:01:41 crc kubenswrapper[4558]: I0120 17:01:41.211651 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/88cab55b-95bb-45b7-95d3-4a1d68312e49-ring-data-devices\") pod \"swift-ring-rebalance-sk2wr\" (UID: \"88cab55b-95bb-45b7-95d3-4a1d68312e49\") " pod="openstack-kuttl-tests/swift-ring-rebalance-sk2wr" Jan 20 17:01:41 crc kubenswrapper[4558]: I0120 17:01:41.211869 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/07a241d6-001d-4677-a411-58e84c8f21ba-etc-swift\") pod \"swift-ring-rebalance-v4kx6\" (UID: \"07a241d6-001d-4677-a411-58e84c8f21ba\") " pod="openstack-kuttl-tests/swift-ring-rebalance-v4kx6" Jan 20 17:01:41 crc kubenswrapper[4558]: I0120 17:01:41.211976 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/07a241d6-001d-4677-a411-58e84c8f21ba-ring-data-devices\") pod \"swift-ring-rebalance-v4kx6\" (UID: \"07a241d6-001d-4677-a411-58e84c8f21ba\") " pod="openstack-kuttl-tests/swift-ring-rebalance-v4kx6" Jan 20 17:01:41 crc kubenswrapper[4558]: I0120 17:01:41.212021 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/88cab55b-95bb-45b7-95d3-4a1d68312e49-scripts\") pod \"swift-ring-rebalance-sk2wr\" (UID: \"88cab55b-95bb-45b7-95d3-4a1d68312e49\") " pod="openstack-kuttl-tests/swift-ring-rebalance-sk2wr" Jan 20 17:01:41 crc kubenswrapper[4558]: I0120 17:01:41.212325 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/07a241d6-001d-4677-a411-58e84c8f21ba-scripts\") pod \"swift-ring-rebalance-v4kx6\" (UID: \"07a241d6-001d-4677-a411-58e84c8f21ba\") " pod="openstack-kuttl-tests/swift-ring-rebalance-v4kx6" Jan 20 17:01:41 crc kubenswrapper[4558]: I0120 17:01:41.213924 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/88cab55b-95bb-45b7-95d3-4a1d68312e49-swiftconf\") pod \"swift-ring-rebalance-sk2wr\" (UID: \"88cab55b-95bb-45b7-95d3-4a1d68312e49\") " pod="openstack-kuttl-tests/swift-ring-rebalance-sk2wr" Jan 20 17:01:41 crc kubenswrapper[4558]: I0120 17:01:41.214213 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/88cab55b-95bb-45b7-95d3-4a1d68312e49-combined-ca-bundle\") pod \"swift-ring-rebalance-sk2wr\" (UID: \"88cab55b-95bb-45b7-95d3-4a1d68312e49\") " pod="openstack-kuttl-tests/swift-ring-rebalance-sk2wr" Jan 20 17:01:41 crc kubenswrapper[4558]: I0120 17:01:41.215738 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/07a241d6-001d-4677-a411-58e84c8f21ba-dispersionconf\") pod \"swift-ring-rebalance-v4kx6\" (UID: \"07a241d6-001d-4677-a411-58e84c8f21ba\") " pod="openstack-kuttl-tests/swift-ring-rebalance-v4kx6" Jan 20 17:01:41 crc kubenswrapper[4558]: I0120 17:01:41.216922 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/88cab55b-95bb-45b7-95d3-4a1d68312e49-dispersionconf\") pod \"swift-ring-rebalance-sk2wr\" (UID: \"88cab55b-95bb-45b7-95d3-4a1d68312e49\") " pod="openstack-kuttl-tests/swift-ring-rebalance-sk2wr" Jan 20 17:01:41 crc kubenswrapper[4558]: I0120 17:01:41.217487 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/07a241d6-001d-4677-a411-58e84c8f21ba-swiftconf\") pod \"swift-ring-rebalance-v4kx6\" (UID: \"07a241d6-001d-4677-a411-58e84c8f21ba\") " pod="openstack-kuttl-tests/swift-ring-rebalance-v4kx6" Jan 20 17:01:41 crc kubenswrapper[4558]: I0120 17:01:41.217567 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/07a241d6-001d-4677-a411-58e84c8f21ba-combined-ca-bundle\") pod \"swift-ring-rebalance-v4kx6\" (UID: \"07a241d6-001d-4677-a411-58e84c8f21ba\") " pod="openstack-kuttl-tests/swift-ring-rebalance-v4kx6" Jan 20 17:01:41 crc kubenswrapper[4558]: I0120 17:01:41.226576 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gkdzq\" (UniqueName: \"kubernetes.io/projected/88cab55b-95bb-45b7-95d3-4a1d68312e49-kube-api-access-gkdzq\") pod \"swift-ring-rebalance-sk2wr\" (UID: \"88cab55b-95bb-45b7-95d3-4a1d68312e49\") " pod="openstack-kuttl-tests/swift-ring-rebalance-sk2wr" Jan 20 17:01:41 crc kubenswrapper[4558]: I0120 17:01:41.227944 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sc7g5\" (UniqueName: \"kubernetes.io/projected/07a241d6-001d-4677-a411-58e84c8f21ba-kube-api-access-sc7g5\") pod \"swift-ring-rebalance-v4kx6\" (UID: \"07a241d6-001d-4677-a411-58e84c8f21ba\") " pod="openstack-kuttl-tests/swift-ring-rebalance-v4kx6" Jan 20 17:01:41 crc kubenswrapper[4558]: I0120 17:01:41.313061 4558 generic.go:334] "Generic (PLEG): container finished" podID="0d6ee4cd-ee0d-45e6-9f55-92a5ffa7b7d7" containerID="90086f287523470f86b4f198e93742684b994c6b5d64645ae662556bcec04dd6" exitCode=0 Jan 20 17:01:41 crc kubenswrapper[4558]: I0120 17:01:41.313412 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-6cbc-account-create-update-n6t9t" event={"ID":"0d6ee4cd-ee0d-45e6-9f55-92a5ffa7b7d7","Type":"ContainerDied","Data":"90086f287523470f86b4f198e93742684b994c6b5d64645ae662556bcec04dd6"} Jan 20 17:01:41 crc kubenswrapper[4558]: I0120 17:01:41.313453 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-ring-rebalance-sk2wr" Jan 20 17:01:41 crc kubenswrapper[4558]: I0120 17:01:41.323184 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-ring-rebalance-sk2wr" Jan 20 17:01:41 crc kubenswrapper[4558]: I0120 17:01:41.346029 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-ring-rebalance-v4kx6" Jan 20 17:01:41 crc kubenswrapper[4558]: I0120 17:01:41.412840 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/88cab55b-95bb-45b7-95d3-4a1d68312e49-etc-swift\") pod \"88cab55b-95bb-45b7-95d3-4a1d68312e49\" (UID: \"88cab55b-95bb-45b7-95d3-4a1d68312e49\") " Jan 20 17:01:41 crc kubenswrapper[4558]: I0120 17:01:41.413077 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/88cab55b-95bb-45b7-95d3-4a1d68312e49-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "88cab55b-95bb-45b7-95d3-4a1d68312e49" (UID: "88cab55b-95bb-45b7-95d3-4a1d68312e49"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:01:41 crc kubenswrapper[4558]: I0120 17:01:41.413145 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/88cab55b-95bb-45b7-95d3-4a1d68312e49-ring-data-devices\") pod \"88cab55b-95bb-45b7-95d3-4a1d68312e49\" (UID: \"88cab55b-95bb-45b7-95d3-4a1d68312e49\") " Jan 20 17:01:41 crc kubenswrapper[4558]: I0120 17:01:41.413250 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/88cab55b-95bb-45b7-95d3-4a1d68312e49-swiftconf\") pod \"88cab55b-95bb-45b7-95d3-4a1d68312e49\" (UID: \"88cab55b-95bb-45b7-95d3-4a1d68312e49\") " Jan 20 17:01:41 crc kubenswrapper[4558]: I0120 17:01:41.413303 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/88cab55b-95bb-45b7-95d3-4a1d68312e49-dispersionconf\") pod \"88cab55b-95bb-45b7-95d3-4a1d68312e49\" (UID: \"88cab55b-95bb-45b7-95d3-4a1d68312e49\") " Jan 20 17:01:41 crc kubenswrapper[4558]: I0120 17:01:41.413355 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/88cab55b-95bb-45b7-95d3-4a1d68312e49-combined-ca-bundle\") pod \"88cab55b-95bb-45b7-95d3-4a1d68312e49\" (UID: \"88cab55b-95bb-45b7-95d3-4a1d68312e49\") " Jan 20 17:01:41 crc kubenswrapper[4558]: I0120 17:01:41.413406 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gkdzq\" (UniqueName: \"kubernetes.io/projected/88cab55b-95bb-45b7-95d3-4a1d68312e49-kube-api-access-gkdzq\") pod \"88cab55b-95bb-45b7-95d3-4a1d68312e49\" (UID: \"88cab55b-95bb-45b7-95d3-4a1d68312e49\") " Jan 20 17:01:41 crc kubenswrapper[4558]: I0120 17:01:41.413450 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/88cab55b-95bb-45b7-95d3-4a1d68312e49-scripts\") pod \"88cab55b-95bb-45b7-95d3-4a1d68312e49\" (UID: \"88cab55b-95bb-45b7-95d3-4a1d68312e49\") " Jan 20 17:01:41 crc kubenswrapper[4558]: I0120 17:01:41.413541 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/88cab55b-95bb-45b7-95d3-4a1d68312e49-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "88cab55b-95bb-45b7-95d3-4a1d68312e49" (UID: "88cab55b-95bb-45b7-95d3-4a1d68312e49"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:01:41 crc kubenswrapper[4558]: I0120 17:01:41.413987 4558 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/88cab55b-95bb-45b7-95d3-4a1d68312e49-etc-swift\") on node \"crc\" DevicePath \"\"" Jan 20 17:01:41 crc kubenswrapper[4558]: I0120 17:01:41.414004 4558 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/88cab55b-95bb-45b7-95d3-4a1d68312e49-ring-data-devices\") on node \"crc\" DevicePath \"\"" Jan 20 17:01:41 crc kubenswrapper[4558]: I0120 17:01:41.414126 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/88cab55b-95bb-45b7-95d3-4a1d68312e49-scripts" (OuterVolumeSpecName: "scripts") pod "88cab55b-95bb-45b7-95d3-4a1d68312e49" (UID: "88cab55b-95bb-45b7-95d3-4a1d68312e49"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:01:41 crc kubenswrapper[4558]: I0120 17:01:41.417754 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/88cab55b-95bb-45b7-95d3-4a1d68312e49-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "88cab55b-95bb-45b7-95d3-4a1d68312e49" (UID: "88cab55b-95bb-45b7-95d3-4a1d68312e49"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:01:41 crc kubenswrapper[4558]: I0120 17:01:41.418434 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/88cab55b-95bb-45b7-95d3-4a1d68312e49-kube-api-access-gkdzq" (OuterVolumeSpecName: "kube-api-access-gkdzq") pod "88cab55b-95bb-45b7-95d3-4a1d68312e49" (UID: "88cab55b-95bb-45b7-95d3-4a1d68312e49"). InnerVolumeSpecName "kube-api-access-gkdzq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:01:41 crc kubenswrapper[4558]: I0120 17:01:41.419533 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/88cab55b-95bb-45b7-95d3-4a1d68312e49-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "88cab55b-95bb-45b7-95d3-4a1d68312e49" (UID: "88cab55b-95bb-45b7-95d3-4a1d68312e49"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:01:41 crc kubenswrapper[4558]: I0120 17:01:41.420144 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/88cab55b-95bb-45b7-95d3-4a1d68312e49-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "88cab55b-95bb-45b7-95d3-4a1d68312e49" (UID: "88cab55b-95bb-45b7-95d3-4a1d68312e49"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:01:41 crc kubenswrapper[4558]: I0120 17:01:41.518121 4558 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/88cab55b-95bb-45b7-95d3-4a1d68312e49-swiftconf\") on node \"crc\" DevicePath \"\"" Jan 20 17:01:41 crc kubenswrapper[4558]: I0120 17:01:41.518150 4558 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/88cab55b-95bb-45b7-95d3-4a1d68312e49-dispersionconf\") on node \"crc\" DevicePath \"\"" Jan 20 17:01:41 crc kubenswrapper[4558]: I0120 17:01:41.518174 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/88cab55b-95bb-45b7-95d3-4a1d68312e49-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:01:41 crc kubenswrapper[4558]: I0120 17:01:41.518184 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gkdzq\" (UniqueName: \"kubernetes.io/projected/88cab55b-95bb-45b7-95d3-4a1d68312e49-kube-api-access-gkdzq\") on node \"crc\" DevicePath \"\"" Jan 20 17:01:41 crc kubenswrapper[4558]: I0120 17:01:41.518194 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/88cab55b-95bb-45b7-95d3-4a1d68312e49-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:01:41 crc kubenswrapper[4558]: I0120 17:01:41.594666 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-db-create-pxkff" Jan 20 17:01:41 crc kubenswrapper[4558]: I0120 17:01:41.618997 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/10edb733-8fab-4543-ad29-3568e3de5aea-etc-swift\") pod \"swift-storage-0\" (UID: \"10edb733-8fab-4543-ad29-3568e3de5aea\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:01:41 crc kubenswrapper[4558]: E0120 17:01:41.619133 4558 projected.go:288] Couldn't get configMap openstack-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Jan 20 17:01:41 crc kubenswrapper[4558]: E0120 17:01:41.619153 4558 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack-kuttl-tests/swift-storage-0: configmap "swift-ring-files" not found Jan 20 17:01:41 crc kubenswrapper[4558]: E0120 17:01:41.619251 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/10edb733-8fab-4543-ad29-3568e3de5aea-etc-swift podName:10edb733-8fab-4543-ad29-3568e3de5aea nodeName:}" failed. No retries permitted until 2026-01-20 17:01:42.619238153 +0000 UTC m=+1196.379576119 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/10edb733-8fab-4543-ad29-3568e3de5aea-etc-swift") pod "swift-storage-0" (UID: "10edb733-8fab-4543-ad29-3568e3de5aea") : configmap "swift-ring-files" not found Jan 20 17:01:41 crc kubenswrapper[4558]: I0120 17:01:41.662659 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-db-create-x7dh9" Jan 20 17:01:41 crc kubenswrapper[4558]: I0120 17:01:41.667625 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-6218-account-create-update-gbcj6" Jan 20 17:01:41 crc kubenswrapper[4558]: I0120 17:01:41.719782 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bb2sm\" (UniqueName: \"kubernetes.io/projected/79ceb3e8-184f-4c1d-bceb-89c6f69ee670-kube-api-access-bb2sm\") pod \"79ceb3e8-184f-4c1d-bceb-89c6f69ee670\" (UID: \"79ceb3e8-184f-4c1d-bceb-89c6f69ee670\") " Jan 20 17:01:41 crc kubenswrapper[4558]: I0120 17:01:41.719849 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/79ceb3e8-184f-4c1d-bceb-89c6f69ee670-operator-scripts\") pod \"79ceb3e8-184f-4c1d-bceb-89c6f69ee670\" (UID: \"79ceb3e8-184f-4c1d-bceb-89c6f69ee670\") " Jan 20 17:01:41 crc kubenswrapper[4558]: I0120 17:01:41.721783 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/79ceb3e8-184f-4c1d-bceb-89c6f69ee670-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "79ceb3e8-184f-4c1d-bceb-89c6f69ee670" (UID: "79ceb3e8-184f-4c1d-bceb-89c6f69ee670"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:01:41 crc kubenswrapper[4558]: I0120 17:01:41.725689 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/79ceb3e8-184f-4c1d-bceb-89c6f69ee670-kube-api-access-bb2sm" (OuterVolumeSpecName: "kube-api-access-bb2sm") pod "79ceb3e8-184f-4c1d-bceb-89c6f69ee670" (UID: "79ceb3e8-184f-4c1d-bceb-89c6f69ee670"). InnerVolumeSpecName "kube-api-access-bb2sm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:01:41 crc kubenswrapper[4558]: I0120 17:01:41.795983 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/swift-ring-rebalance-v4kx6"] Jan 20 17:01:41 crc kubenswrapper[4558]: W0120 17:01:41.798372 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod07a241d6_001d_4677_a411_58e84c8f21ba.slice/crio-445f11a481dd2cee027eb7e59f4fb0a15a9a8bc80cafdd32c161d47874148769 WatchSource:0}: Error finding container 445f11a481dd2cee027eb7e59f4fb0a15a9a8bc80cafdd32c161d47874148769: Status 404 returned error can't find the container with id 445f11a481dd2cee027eb7e59f4fb0a15a9a8bc80cafdd32c161d47874148769 Jan 20 17:01:41 crc kubenswrapper[4558]: I0120 17:01:41.821907 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/affe1aaa-7cef-46d2-82f8-1205ce2ff96b-operator-scripts\") pod \"affe1aaa-7cef-46d2-82f8-1205ce2ff96b\" (UID: \"affe1aaa-7cef-46d2-82f8-1205ce2ff96b\") " Jan 20 17:01:41 crc kubenswrapper[4558]: I0120 17:01:41.822444 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s2qqn\" (UniqueName: \"kubernetes.io/projected/affe1aaa-7cef-46d2-82f8-1205ce2ff96b-kube-api-access-s2qqn\") pod \"affe1aaa-7cef-46d2-82f8-1205ce2ff96b\" (UID: \"affe1aaa-7cef-46d2-82f8-1205ce2ff96b\") " Jan 20 17:01:41 crc kubenswrapper[4558]: I0120 17:01:41.822486 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9cjmp\" (UniqueName: \"kubernetes.io/projected/8b64ebf3-99cf-495a-a82d-0a27123f5d7a-kube-api-access-9cjmp\") pod \"8b64ebf3-99cf-495a-a82d-0a27123f5d7a\" (UID: \"8b64ebf3-99cf-495a-a82d-0a27123f5d7a\") " Jan 20 17:01:41 crc kubenswrapper[4558]: I0120 17:01:41.822507 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8b64ebf3-99cf-495a-a82d-0a27123f5d7a-operator-scripts\") pod \"8b64ebf3-99cf-495a-a82d-0a27123f5d7a\" (UID: \"8b64ebf3-99cf-495a-a82d-0a27123f5d7a\") " Jan 20 17:01:41 crc kubenswrapper[4558]: I0120 17:01:41.822624 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/affe1aaa-7cef-46d2-82f8-1205ce2ff96b-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "affe1aaa-7cef-46d2-82f8-1205ce2ff96b" (UID: "affe1aaa-7cef-46d2-82f8-1205ce2ff96b"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:01:41 crc kubenswrapper[4558]: I0120 17:01:41.823257 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8b64ebf3-99cf-495a-a82d-0a27123f5d7a-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "8b64ebf3-99cf-495a-a82d-0a27123f5d7a" (UID: "8b64ebf3-99cf-495a-a82d-0a27123f5d7a"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:01:41 crc kubenswrapper[4558]: I0120 17:01:41.823308 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/affe1aaa-7cef-46d2-82f8-1205ce2ff96b-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:01:41 crc kubenswrapper[4558]: I0120 17:01:41.823325 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bb2sm\" (UniqueName: \"kubernetes.io/projected/79ceb3e8-184f-4c1d-bceb-89c6f69ee670-kube-api-access-bb2sm\") on node \"crc\" DevicePath \"\"" Jan 20 17:01:41 crc kubenswrapper[4558]: I0120 17:01:41.823340 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/79ceb3e8-184f-4c1d-bceb-89c6f69ee670-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:01:41 crc kubenswrapper[4558]: I0120 17:01:41.824483 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/affe1aaa-7cef-46d2-82f8-1205ce2ff96b-kube-api-access-s2qqn" (OuterVolumeSpecName: "kube-api-access-s2qqn") pod "affe1aaa-7cef-46d2-82f8-1205ce2ff96b" (UID: "affe1aaa-7cef-46d2-82f8-1205ce2ff96b"). InnerVolumeSpecName "kube-api-access-s2qqn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:01:41 crc kubenswrapper[4558]: I0120 17:01:41.824748 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8b64ebf3-99cf-495a-a82d-0a27123f5d7a-kube-api-access-9cjmp" (OuterVolumeSpecName: "kube-api-access-9cjmp") pod "8b64ebf3-99cf-495a-a82d-0a27123f5d7a" (UID: "8b64ebf3-99cf-495a-a82d-0a27123f5d7a"). InnerVolumeSpecName "kube-api-access-9cjmp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:01:41 crc kubenswrapper[4558]: I0120 17:01:41.925623 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s2qqn\" (UniqueName: \"kubernetes.io/projected/affe1aaa-7cef-46d2-82f8-1205ce2ff96b-kube-api-access-s2qqn\") on node \"crc\" DevicePath \"\"" Jan 20 17:01:41 crc kubenswrapper[4558]: I0120 17:01:41.925653 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9cjmp\" (UniqueName: \"kubernetes.io/projected/8b64ebf3-99cf-495a-a82d-0a27123f5d7a-kube-api-access-9cjmp\") on node \"crc\" DevicePath \"\"" Jan 20 17:01:41 crc kubenswrapper[4558]: I0120 17:01:41.925662 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8b64ebf3-99cf-495a-a82d-0a27123f5d7a-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:01:41 crc kubenswrapper[4558]: I0120 17:01:41.928319 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:01:41 crc kubenswrapper[4558]: I0120 17:01:41.958680 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:01:42 crc kubenswrapper[4558]: I0120 17:01:42.157642 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ovn-northd-0"] Jan 20 17:01:42 crc kubenswrapper[4558]: E0120 17:01:42.157995 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8b64ebf3-99cf-495a-a82d-0a27123f5d7a" containerName="mariadb-account-create-update" Jan 20 17:01:42 crc kubenswrapper[4558]: I0120 17:01:42.158071 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="8b64ebf3-99cf-495a-a82d-0a27123f5d7a" containerName="mariadb-account-create-update" Jan 20 17:01:42 crc kubenswrapper[4558]: E0120 17:01:42.158131 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="affe1aaa-7cef-46d2-82f8-1205ce2ff96b" containerName="mariadb-database-create" Jan 20 17:01:42 crc kubenswrapper[4558]: I0120 17:01:42.158224 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="affe1aaa-7cef-46d2-82f8-1205ce2ff96b" containerName="mariadb-database-create" Jan 20 17:01:42 crc kubenswrapper[4558]: E0120 17:01:42.158293 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="79ceb3e8-184f-4c1d-bceb-89c6f69ee670" containerName="mariadb-database-create" Jan 20 17:01:42 crc kubenswrapper[4558]: I0120 17:01:42.158337 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="79ceb3e8-184f-4c1d-bceb-89c6f69ee670" containerName="mariadb-database-create" Jan 20 17:01:42 crc kubenswrapper[4558]: I0120 17:01:42.158517 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="8b64ebf3-99cf-495a-a82d-0a27123f5d7a" containerName="mariadb-account-create-update" Jan 20 17:01:42 crc kubenswrapper[4558]: I0120 17:01:42.158585 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="affe1aaa-7cef-46d2-82f8-1205ce2ff96b" containerName="mariadb-database-create" Jan 20 17:01:42 crc kubenswrapper[4558]: I0120 17:01:42.158636 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="79ceb3e8-184f-4c1d-bceb-89c6f69ee670" containerName="mariadb-database-create" Jan 20 17:01:42 crc kubenswrapper[4558]: I0120 17:01:42.159299 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:01:42 crc kubenswrapper[4558]: I0120 17:01:42.160940 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ovnnorthd-ovnnorthd-dockercfg-9tvzr" Jan 20 17:01:42 crc kubenswrapper[4558]: I0120 17:01:42.161192 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-ovnnorthd-ovndbs" Jan 20 17:01:42 crc kubenswrapper[4558]: I0120 17:01:42.161431 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"ovnnorthd-config" Jan 20 17:01:42 crc kubenswrapper[4558]: I0120 17:01:42.161530 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"ovnnorthd-scripts" Jan 20 17:01:42 crc kubenswrapper[4558]: I0120 17:01:42.175943 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovn-northd-0"] Jan 20 17:01:42 crc kubenswrapper[4558]: I0120 17:01:42.229598 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/33be1904-bd58-48cc-806a-af1dc751717c-scripts\") pod \"ovn-northd-0\" (UID: \"33be1904-bd58-48cc-806a-af1dc751717c\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:01:42 crc kubenswrapper[4558]: I0120 17:01:42.229630 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r95cx\" (UniqueName: \"kubernetes.io/projected/33be1904-bd58-48cc-806a-af1dc751717c-kube-api-access-r95cx\") pod \"ovn-northd-0\" (UID: \"33be1904-bd58-48cc-806a-af1dc751717c\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:01:42 crc kubenswrapper[4558]: I0120 17:01:42.229653 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/33be1904-bd58-48cc-806a-af1dc751717c-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"33be1904-bd58-48cc-806a-af1dc751717c\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:01:42 crc kubenswrapper[4558]: I0120 17:01:42.229670 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/33be1904-bd58-48cc-806a-af1dc751717c-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"33be1904-bd58-48cc-806a-af1dc751717c\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:01:42 crc kubenswrapper[4558]: I0120 17:01:42.229792 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/33be1904-bd58-48cc-806a-af1dc751717c-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"33be1904-bd58-48cc-806a-af1dc751717c\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:01:42 crc kubenswrapper[4558]: I0120 17:01:42.229849 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/33be1904-bd58-48cc-806a-af1dc751717c-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"33be1904-bd58-48cc-806a-af1dc751717c\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:01:42 crc kubenswrapper[4558]: I0120 17:01:42.230085 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/33be1904-bd58-48cc-806a-af1dc751717c-config\") pod \"ovn-northd-0\" (UID: \"33be1904-bd58-48cc-806a-af1dc751717c\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:01:42 crc kubenswrapper[4558]: I0120 17:01:42.321113 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-db-create-pxkff" event={"ID":"79ceb3e8-184f-4c1d-bceb-89c6f69ee670","Type":"ContainerDied","Data":"e4529312cb954a3cef9eadf2c9b962115231e6927efd437e4a874c7c5ff17fbb"} Jan 20 17:01:42 crc kubenswrapper[4558]: I0120 17:01:42.321150 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e4529312cb954a3cef9eadf2c9b962115231e6927efd437e4a874c7c5ff17fbb" Jan 20 17:01:42 crc kubenswrapper[4558]: I0120 17:01:42.321213 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-db-create-pxkff" Jan 20 17:01:42 crc kubenswrapper[4558]: I0120 17:01:42.328207 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-ring-rebalance-v4kx6" event={"ID":"07a241d6-001d-4677-a411-58e84c8f21ba","Type":"ContainerStarted","Data":"590af9e8b321dd601e68895bca62499efbd67321a304b402ec4829504f95989f"} Jan 20 17:01:42 crc kubenswrapper[4558]: I0120 17:01:42.328258 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-ring-rebalance-v4kx6" event={"ID":"07a241d6-001d-4677-a411-58e84c8f21ba","Type":"ContainerStarted","Data":"445f11a481dd2cee027eb7e59f4fb0a15a9a8bc80cafdd32c161d47874148769"} Jan 20 17:01:42 crc kubenswrapper[4558]: I0120 17:01:42.331428 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/33be1904-bd58-48cc-806a-af1dc751717c-config\") pod \"ovn-northd-0\" (UID: \"33be1904-bd58-48cc-806a-af1dc751717c\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:01:42 crc kubenswrapper[4558]: I0120 17:01:42.331500 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/33be1904-bd58-48cc-806a-af1dc751717c-scripts\") pod \"ovn-northd-0\" (UID: \"33be1904-bd58-48cc-806a-af1dc751717c\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:01:42 crc kubenswrapper[4558]: I0120 17:01:42.331522 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r95cx\" (UniqueName: \"kubernetes.io/projected/33be1904-bd58-48cc-806a-af1dc751717c-kube-api-access-r95cx\") pod \"ovn-northd-0\" (UID: \"33be1904-bd58-48cc-806a-af1dc751717c\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:01:42 crc kubenswrapper[4558]: I0120 17:01:42.331544 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/33be1904-bd58-48cc-806a-af1dc751717c-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"33be1904-bd58-48cc-806a-af1dc751717c\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:01:42 crc kubenswrapper[4558]: I0120 17:01:42.331560 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/33be1904-bd58-48cc-806a-af1dc751717c-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"33be1904-bd58-48cc-806a-af1dc751717c\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:01:42 crc kubenswrapper[4558]: I0120 17:01:42.331582 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/33be1904-bd58-48cc-806a-af1dc751717c-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"33be1904-bd58-48cc-806a-af1dc751717c\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:01:42 crc kubenswrapper[4558]: I0120 17:01:42.331607 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/33be1904-bd58-48cc-806a-af1dc751717c-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"33be1904-bd58-48cc-806a-af1dc751717c\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:01:42 crc kubenswrapper[4558]: I0120 17:01:42.332275 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/33be1904-bd58-48cc-806a-af1dc751717c-config\") pod \"ovn-northd-0\" (UID: \"33be1904-bd58-48cc-806a-af1dc751717c\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:01:42 crc kubenswrapper[4558]: I0120 17:01:42.333328 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/33be1904-bd58-48cc-806a-af1dc751717c-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"33be1904-bd58-48cc-806a-af1dc751717c\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:01:42 crc kubenswrapper[4558]: I0120 17:01:42.333483 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-db-create-x7dh9" event={"ID":"affe1aaa-7cef-46d2-82f8-1205ce2ff96b","Type":"ContainerDied","Data":"45bfd6626265be3b54e209ad9589b0d6eb617072eb3589fd093f42565878df4a"} Jan 20 17:01:42 crc kubenswrapper[4558]: I0120 17:01:42.333514 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="45bfd6626265be3b54e209ad9589b0d6eb617072eb3589fd093f42565878df4a" Jan 20 17:01:42 crc kubenswrapper[4558]: I0120 17:01:42.333566 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-db-create-x7dh9" Jan 20 17:01:42 crc kubenswrapper[4558]: I0120 17:01:42.336125 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/33be1904-bd58-48cc-806a-af1dc751717c-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"33be1904-bd58-48cc-806a-af1dc751717c\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:01:42 crc kubenswrapper[4558]: I0120 17:01:42.336213 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/33be1904-bd58-48cc-806a-af1dc751717c-scripts\") pod \"ovn-northd-0\" (UID: \"33be1904-bd58-48cc-806a-af1dc751717c\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:01:42 crc kubenswrapper[4558]: I0120 17:01:42.341767 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/33be1904-bd58-48cc-806a-af1dc751717c-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"33be1904-bd58-48cc-806a-af1dc751717c\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:01:42 crc kubenswrapper[4558]: I0120 17:01:42.342379 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-6218-account-create-update-gbcj6" Jan 20 17:01:42 crc kubenswrapper[4558]: I0120 17:01:42.342414 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-ring-rebalance-sk2wr" Jan 20 17:01:42 crc kubenswrapper[4558]: I0120 17:01:42.344472 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-6218-account-create-update-gbcj6" event={"ID":"8b64ebf3-99cf-495a-a82d-0a27123f5d7a","Type":"ContainerDied","Data":"ea10752a26ee8847f254923c6e3ac85df1daf94cc4aa8a437d53f02e3f45140c"} Jan 20 17:01:42 crc kubenswrapper[4558]: I0120 17:01:42.345495 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ea10752a26ee8847f254923c6e3ac85df1daf94cc4aa8a437d53f02e3f45140c" Jan 20 17:01:42 crc kubenswrapper[4558]: I0120 17:01:42.345859 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/33be1904-bd58-48cc-806a-af1dc751717c-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"33be1904-bd58-48cc-806a-af1dc751717c\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:01:42 crc kubenswrapper[4558]: I0120 17:01:42.360312 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r95cx\" (UniqueName: \"kubernetes.io/projected/33be1904-bd58-48cc-806a-af1dc751717c-kube-api-access-r95cx\") pod \"ovn-northd-0\" (UID: \"33be1904-bd58-48cc-806a-af1dc751717c\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:01:42 crc kubenswrapper[4558]: I0120 17:01:42.367942 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/swift-ring-rebalance-v4kx6" podStartSLOduration=1.367925493 podStartE2EDuration="1.367925493s" podCreationTimestamp="2026-01-20 17:01:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:01:42.351454848 +0000 UTC m=+1196.111792815" watchObservedRunningTime="2026-01-20 17:01:42.367925493 +0000 UTC m=+1196.128263461" Jan 20 17:01:42 crc kubenswrapper[4558]: I0120 17:01:42.417139 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/swift-ring-rebalance-sk2wr"] Jan 20 17:01:42 crc kubenswrapper[4558]: I0120 17:01:42.425602 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/swift-ring-rebalance-sk2wr"] Jan 20 17:01:42 crc kubenswrapper[4558]: I0120 17:01:42.474988 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:01:42 crc kubenswrapper[4558]: I0120 17:01:42.574046 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="88cab55b-95bb-45b7-95d3-4a1d68312e49" path="/var/lib/kubelet/pods/88cab55b-95bb-45b7-95d3-4a1d68312e49/volumes" Jan 20 17:01:42 crc kubenswrapper[4558]: I0120 17:01:42.607204 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-6cbc-account-create-update-n6t9t" Jan 20 17:01:42 crc kubenswrapper[4558]: I0120 17:01:42.635793 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/10edb733-8fab-4543-ad29-3568e3de5aea-etc-swift\") pod \"swift-storage-0\" (UID: \"10edb733-8fab-4543-ad29-3568e3de5aea\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:01:42 crc kubenswrapper[4558]: E0120 17:01:42.635996 4558 projected.go:288] Couldn't get configMap openstack-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Jan 20 17:01:42 crc kubenswrapper[4558]: E0120 17:01:42.636037 4558 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack-kuttl-tests/swift-storage-0: configmap "swift-ring-files" not found Jan 20 17:01:42 crc kubenswrapper[4558]: E0120 17:01:42.636082 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/10edb733-8fab-4543-ad29-3568e3de5aea-etc-swift podName:10edb733-8fab-4543-ad29-3568e3de5aea nodeName:}" failed. No retries permitted until 2026-01-20 17:01:44.636066157 +0000 UTC m=+1198.396404124 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/10edb733-8fab-4543-ad29-3568e3de5aea-etc-swift") pod "swift-storage-0" (UID: "10edb733-8fab-4543-ad29-3568e3de5aea") : configmap "swift-ring-files" not found Jan 20 17:01:42 crc kubenswrapper[4558]: I0120 17:01:42.737013 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0d6ee4cd-ee0d-45e6-9f55-92a5ffa7b7d7-operator-scripts\") pod \"0d6ee4cd-ee0d-45e6-9f55-92a5ffa7b7d7\" (UID: \"0d6ee4cd-ee0d-45e6-9f55-92a5ffa7b7d7\") " Jan 20 17:01:42 crc kubenswrapper[4558]: I0120 17:01:42.737094 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sjf99\" (UniqueName: \"kubernetes.io/projected/0d6ee4cd-ee0d-45e6-9f55-92a5ffa7b7d7-kube-api-access-sjf99\") pod \"0d6ee4cd-ee0d-45e6-9f55-92a5ffa7b7d7\" (UID: \"0d6ee4cd-ee0d-45e6-9f55-92a5ffa7b7d7\") " Jan 20 17:01:42 crc kubenswrapper[4558]: I0120 17:01:42.738036 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0d6ee4cd-ee0d-45e6-9f55-92a5ffa7b7d7-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "0d6ee4cd-ee0d-45e6-9f55-92a5ffa7b7d7" (UID: "0d6ee4cd-ee0d-45e6-9f55-92a5ffa7b7d7"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:01:42 crc kubenswrapper[4558]: I0120 17:01:42.743110 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0d6ee4cd-ee0d-45e6-9f55-92a5ffa7b7d7-kube-api-access-sjf99" (OuterVolumeSpecName: "kube-api-access-sjf99") pod "0d6ee4cd-ee0d-45e6-9f55-92a5ffa7b7d7" (UID: "0d6ee4cd-ee0d-45e6-9f55-92a5ffa7b7d7"). InnerVolumeSpecName "kube-api-access-sjf99". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:01:42 crc kubenswrapper[4558]: I0120 17:01:42.838578 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0d6ee4cd-ee0d-45e6-9f55-92a5ffa7b7d7-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:01:42 crc kubenswrapper[4558]: I0120 17:01:42.838889 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sjf99\" (UniqueName: \"kubernetes.io/projected/0d6ee4cd-ee0d-45e6-9f55-92a5ffa7b7d7-kube-api-access-sjf99\") on node \"crc\" DevicePath \"\"" Jan 20 17:01:42 crc kubenswrapper[4558]: I0120 17:01:42.856243 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovn-northd-0"] Jan 20 17:01:42 crc kubenswrapper[4558]: W0120 17:01:42.859888 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod33be1904_bd58_48cc_806a_af1dc751717c.slice/crio-ddc37301ab3aa36d3ab1a40427fee1461327dafd15990353b26f1b5e385a5fff WatchSource:0}: Error finding container ddc37301ab3aa36d3ab1a40427fee1461327dafd15990353b26f1b5e385a5fff: Status 404 returned error can't find the container with id ddc37301ab3aa36d3ab1a40427fee1461327dafd15990353b26f1b5e385a5fff Jan 20 17:01:43 crc kubenswrapper[4558]: I0120 17:01:43.350121 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-6cbc-account-create-update-n6t9t" event={"ID":"0d6ee4cd-ee0d-45e6-9f55-92a5ffa7b7d7","Type":"ContainerDied","Data":"5ef0acccfd52a8e7acea8ee27cf4d83be5325f37c086f04806b25678490a5fbf"} Jan 20 17:01:43 crc kubenswrapper[4558]: I0120 17:01:43.350155 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5ef0acccfd52a8e7acea8ee27cf4d83be5325f37c086f04806b25678490a5fbf" Jan 20 17:01:43 crc kubenswrapper[4558]: I0120 17:01:43.350140 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-6cbc-account-create-update-n6t9t" Jan 20 17:01:43 crc kubenswrapper[4558]: I0120 17:01:43.351687 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-northd-0" event={"ID":"33be1904-bd58-48cc-806a-af1dc751717c","Type":"ContainerStarted","Data":"eb1f0784a2f2122587c3f2a6924c91413be937ed5c1f85ceae798d13f078873b"} Jan 20 17:01:43 crc kubenswrapper[4558]: I0120 17:01:43.351724 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-northd-0" event={"ID":"33be1904-bd58-48cc-806a-af1dc751717c","Type":"ContainerStarted","Data":"811380372fc9996d1d29a2c8b3863bd8fa15b182a9a97db33dd3fd3b535d9455"} Jan 20 17:01:43 crc kubenswrapper[4558]: I0120 17:01:43.351734 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-northd-0" event={"ID":"33be1904-bd58-48cc-806a-af1dc751717c","Type":"ContainerStarted","Data":"ddc37301ab3aa36d3ab1a40427fee1461327dafd15990353b26f1b5e385a5fff"} Jan 20 17:01:43 crc kubenswrapper[4558]: I0120 17:01:43.351914 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:01:43 crc kubenswrapper[4558]: I0120 17:01:43.382042 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ovn-northd-0" podStartSLOduration=1.382024676 podStartE2EDuration="1.382024676s" podCreationTimestamp="2026-01-20 17:01:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:01:43.376737293 +0000 UTC m=+1197.137075260" watchObservedRunningTime="2026-01-20 17:01:43.382024676 +0000 UTC m=+1197.142362643" Jan 20 17:01:44 crc kubenswrapper[4558]: I0120 17:01:44.448697 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/glance-db-create-bj62z"] Jan 20 17:01:44 crc kubenswrapper[4558]: E0120 17:01:44.449112 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d6ee4cd-ee0d-45e6-9f55-92a5ffa7b7d7" containerName="mariadb-account-create-update" Jan 20 17:01:44 crc kubenswrapper[4558]: I0120 17:01:44.449129 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d6ee4cd-ee0d-45e6-9f55-92a5ffa7b7d7" containerName="mariadb-account-create-update" Jan 20 17:01:44 crc kubenswrapper[4558]: I0120 17:01:44.449350 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="0d6ee4cd-ee0d-45e6-9f55-92a5ffa7b7d7" containerName="mariadb-account-create-update" Jan 20 17:01:44 crc kubenswrapper[4558]: I0120 17:01:44.449947 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-db-create-bj62z" Jan 20 17:01:44 crc kubenswrapper[4558]: I0120 17:01:44.458444 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-db-create-bj62z"] Jan 20 17:01:44 crc kubenswrapper[4558]: I0120 17:01:44.562765 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/glance-103a-account-create-update-xfb6j"] Jan 20 17:01:44 crc kubenswrapper[4558]: I0120 17:01:44.563913 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-103a-account-create-update-xfb6j" Jan 20 17:01:44 crc kubenswrapper[4558]: I0120 17:01:44.565781 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-db-secret" Jan 20 17:01:44 crc kubenswrapper[4558]: I0120 17:01:44.568949 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tnzkg\" (UniqueName: \"kubernetes.io/projected/d1fde627-f808-4f60-82f9-3a9d13491932-kube-api-access-tnzkg\") pod \"glance-db-create-bj62z\" (UID: \"d1fde627-f808-4f60-82f9-3a9d13491932\") " pod="openstack-kuttl-tests/glance-db-create-bj62z" Jan 20 17:01:44 crc kubenswrapper[4558]: I0120 17:01:44.569323 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d1fde627-f808-4f60-82f9-3a9d13491932-operator-scripts\") pod \"glance-db-create-bj62z\" (UID: \"d1fde627-f808-4f60-82f9-3a9d13491932\") " pod="openstack-kuttl-tests/glance-db-create-bj62z" Jan 20 17:01:44 crc kubenswrapper[4558]: I0120 17:01:44.573321 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-103a-account-create-update-xfb6j"] Jan 20 17:01:44 crc kubenswrapper[4558]: I0120 17:01:44.671051 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d1fde627-f808-4f60-82f9-3a9d13491932-operator-scripts\") pod \"glance-db-create-bj62z\" (UID: \"d1fde627-f808-4f60-82f9-3a9d13491932\") " pod="openstack-kuttl-tests/glance-db-create-bj62z" Jan 20 17:01:44 crc kubenswrapper[4558]: I0120 17:01:44.671182 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n8pt7\" (UniqueName: \"kubernetes.io/projected/c1ca9fc0-7d35-467b-8b84-da1e4d6bfa49-kube-api-access-n8pt7\") pod \"glance-103a-account-create-update-xfb6j\" (UID: \"c1ca9fc0-7d35-467b-8b84-da1e4d6bfa49\") " pod="openstack-kuttl-tests/glance-103a-account-create-update-xfb6j" Jan 20 17:01:44 crc kubenswrapper[4558]: I0120 17:01:44.671224 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c1ca9fc0-7d35-467b-8b84-da1e4d6bfa49-operator-scripts\") pod \"glance-103a-account-create-update-xfb6j\" (UID: \"c1ca9fc0-7d35-467b-8b84-da1e4d6bfa49\") " pod="openstack-kuttl-tests/glance-103a-account-create-update-xfb6j" Jan 20 17:01:44 crc kubenswrapper[4558]: I0120 17:01:44.671272 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tnzkg\" (UniqueName: \"kubernetes.io/projected/d1fde627-f808-4f60-82f9-3a9d13491932-kube-api-access-tnzkg\") pod \"glance-db-create-bj62z\" (UID: \"d1fde627-f808-4f60-82f9-3a9d13491932\") " pod="openstack-kuttl-tests/glance-db-create-bj62z" Jan 20 17:01:44 crc kubenswrapper[4558]: I0120 17:01:44.671311 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/10edb733-8fab-4543-ad29-3568e3de5aea-etc-swift\") pod \"swift-storage-0\" (UID: \"10edb733-8fab-4543-ad29-3568e3de5aea\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:01:44 crc kubenswrapper[4558]: E0120 17:01:44.671441 4558 projected.go:288] Couldn't get configMap openstack-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Jan 20 17:01:44 crc kubenswrapper[4558]: E0120 17:01:44.671459 4558 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack-kuttl-tests/swift-storage-0: configmap "swift-ring-files" not found Jan 20 17:01:44 crc kubenswrapper[4558]: E0120 17:01:44.671498 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/10edb733-8fab-4543-ad29-3568e3de5aea-etc-swift podName:10edb733-8fab-4543-ad29-3568e3de5aea nodeName:}" failed. No retries permitted until 2026-01-20 17:01:48.671483497 +0000 UTC m=+1202.431821465 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/10edb733-8fab-4543-ad29-3568e3de5aea-etc-swift") pod "swift-storage-0" (UID: "10edb733-8fab-4543-ad29-3568e3de5aea") : configmap "swift-ring-files" not found Jan 20 17:01:44 crc kubenswrapper[4558]: I0120 17:01:44.671765 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d1fde627-f808-4f60-82f9-3a9d13491932-operator-scripts\") pod \"glance-db-create-bj62z\" (UID: \"d1fde627-f808-4f60-82f9-3a9d13491932\") " pod="openstack-kuttl-tests/glance-db-create-bj62z" Jan 20 17:01:44 crc kubenswrapper[4558]: I0120 17:01:44.696826 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tnzkg\" (UniqueName: \"kubernetes.io/projected/d1fde627-f808-4f60-82f9-3a9d13491932-kube-api-access-tnzkg\") pod \"glance-db-create-bj62z\" (UID: \"d1fde627-f808-4f60-82f9-3a9d13491932\") " pod="openstack-kuttl-tests/glance-db-create-bj62z" Jan 20 17:01:44 crc kubenswrapper[4558]: I0120 17:01:44.773756 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n8pt7\" (UniqueName: \"kubernetes.io/projected/c1ca9fc0-7d35-467b-8b84-da1e4d6bfa49-kube-api-access-n8pt7\") pod \"glance-103a-account-create-update-xfb6j\" (UID: \"c1ca9fc0-7d35-467b-8b84-da1e4d6bfa49\") " pod="openstack-kuttl-tests/glance-103a-account-create-update-xfb6j" Jan 20 17:01:44 crc kubenswrapper[4558]: I0120 17:01:44.773852 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c1ca9fc0-7d35-467b-8b84-da1e4d6bfa49-operator-scripts\") pod \"glance-103a-account-create-update-xfb6j\" (UID: \"c1ca9fc0-7d35-467b-8b84-da1e4d6bfa49\") " pod="openstack-kuttl-tests/glance-103a-account-create-update-xfb6j" Jan 20 17:01:44 crc kubenswrapper[4558]: I0120 17:01:44.774055 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-db-create-bj62z" Jan 20 17:01:44 crc kubenswrapper[4558]: I0120 17:01:44.774577 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c1ca9fc0-7d35-467b-8b84-da1e4d6bfa49-operator-scripts\") pod \"glance-103a-account-create-update-xfb6j\" (UID: \"c1ca9fc0-7d35-467b-8b84-da1e4d6bfa49\") " pod="openstack-kuttl-tests/glance-103a-account-create-update-xfb6j" Jan 20 17:01:44 crc kubenswrapper[4558]: I0120 17:01:44.798228 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n8pt7\" (UniqueName: \"kubernetes.io/projected/c1ca9fc0-7d35-467b-8b84-da1e4d6bfa49-kube-api-access-n8pt7\") pod \"glance-103a-account-create-update-xfb6j\" (UID: \"c1ca9fc0-7d35-467b-8b84-da1e4d6bfa49\") " pod="openstack-kuttl-tests/glance-103a-account-create-update-xfb6j" Jan 20 17:01:44 crc kubenswrapper[4558]: I0120 17:01:44.878015 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-103a-account-create-update-xfb6j" Jan 20 17:01:45 crc kubenswrapper[4558]: I0120 17:01:45.145553 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-db-create-bj62z"] Jan 20 17:01:45 crc kubenswrapper[4558]: W0120 17:01:45.149018 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd1fde627_f808_4f60_82f9_3a9d13491932.slice/crio-5ca1195dedc50084a69bf476156bff15afb3e023633be4ad989dfff37420c9cb WatchSource:0}: Error finding container 5ca1195dedc50084a69bf476156bff15afb3e023633be4ad989dfff37420c9cb: Status 404 returned error can't find the container with id 5ca1195dedc50084a69bf476156bff15afb3e023633be4ad989dfff37420c9cb Jan 20 17:01:45 crc kubenswrapper[4558]: I0120 17:01:45.253279 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-103a-account-create-update-xfb6j"] Jan 20 17:01:45 crc kubenswrapper[4558]: W0120 17:01:45.259052 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc1ca9fc0_7d35_467b_8b84_da1e4d6bfa49.slice/crio-d96bc574f4ff8a6559574a40805623c079ff70600b669dfd9203f3c004d0fbd2 WatchSource:0}: Error finding container d96bc574f4ff8a6559574a40805623c079ff70600b669dfd9203f3c004d0fbd2: Status 404 returned error can't find the container with id d96bc574f4ff8a6559574a40805623c079ff70600b669dfd9203f3c004d0fbd2 Jan 20 17:01:45 crc kubenswrapper[4558]: I0120 17:01:45.366739 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-db-create-bj62z" event={"ID":"d1fde627-f808-4f60-82f9-3a9d13491932","Type":"ContainerStarted","Data":"945d6cf4d7e0ae9b1db59237517dbc0df3d7e1a5de00383bf5d52e1df143d4d4"} Jan 20 17:01:45 crc kubenswrapper[4558]: I0120 17:01:45.367101 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-db-create-bj62z" event={"ID":"d1fde627-f808-4f60-82f9-3a9d13491932","Type":"ContainerStarted","Data":"5ca1195dedc50084a69bf476156bff15afb3e023633be4ad989dfff37420c9cb"} Jan 20 17:01:45 crc kubenswrapper[4558]: I0120 17:01:45.368868 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-103a-account-create-update-xfb6j" event={"ID":"c1ca9fc0-7d35-467b-8b84-da1e4d6bfa49","Type":"ContainerStarted","Data":"d96bc574f4ff8a6559574a40805623c079ff70600b669dfd9203f3c004d0fbd2"} Jan 20 17:01:45 crc kubenswrapper[4558]: I0120 17:01:45.383356 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/glance-db-create-bj62z" podStartSLOduration=1.383333962 podStartE2EDuration="1.383333962s" podCreationTimestamp="2026-01-20 17:01:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:01:45.378191552 +0000 UTC m=+1199.138529508" watchObservedRunningTime="2026-01-20 17:01:45.383333962 +0000 UTC m=+1199.143671929" Jan 20 17:01:46 crc kubenswrapper[4558]: I0120 17:01:46.291599 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/root-account-create-update-4mm2d"] Jan 20 17:01:46 crc kubenswrapper[4558]: I0120 17:01:46.292509 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-4mm2d" Jan 20 17:01:46 crc kubenswrapper[4558]: I0120 17:01:46.294110 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"openstack-mariadb-root-db-secret" Jan 20 17:01:46 crc kubenswrapper[4558]: I0120 17:01:46.299752 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-4mm2d"] Jan 20 17:01:46 crc kubenswrapper[4558]: I0120 17:01:46.376290 4558 generic.go:334] "Generic (PLEG): container finished" podID="d1fde627-f808-4f60-82f9-3a9d13491932" containerID="945d6cf4d7e0ae9b1db59237517dbc0df3d7e1a5de00383bf5d52e1df143d4d4" exitCode=0 Jan 20 17:01:46 crc kubenswrapper[4558]: I0120 17:01:46.376327 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-db-create-bj62z" event={"ID":"d1fde627-f808-4f60-82f9-3a9d13491932","Type":"ContainerDied","Data":"945d6cf4d7e0ae9b1db59237517dbc0df3d7e1a5de00383bf5d52e1df143d4d4"} Jan 20 17:01:46 crc kubenswrapper[4558]: I0120 17:01:46.377749 4558 generic.go:334] "Generic (PLEG): container finished" podID="c1ca9fc0-7d35-467b-8b84-da1e4d6bfa49" containerID="4f9b85a4d21c95ef631ed6bcbf55d246c6b1aa708349dc4c85de6b2d2a17fd09" exitCode=0 Jan 20 17:01:46 crc kubenswrapper[4558]: I0120 17:01:46.377783 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-103a-account-create-update-xfb6j" event={"ID":"c1ca9fc0-7d35-467b-8b84-da1e4d6bfa49","Type":"ContainerDied","Data":"4f9b85a4d21c95ef631ed6bcbf55d246c6b1aa708349dc4c85de6b2d2a17fd09"} Jan 20 17:01:46 crc kubenswrapper[4558]: I0120 17:01:46.400001 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/162a1081-b509-4f76-82ee-f3470f788b2d-operator-scripts\") pod \"root-account-create-update-4mm2d\" (UID: \"162a1081-b509-4f76-82ee-f3470f788b2d\") " pod="openstack-kuttl-tests/root-account-create-update-4mm2d" Jan 20 17:01:46 crc kubenswrapper[4558]: I0120 17:01:46.400070 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cc9h5\" (UniqueName: \"kubernetes.io/projected/162a1081-b509-4f76-82ee-f3470f788b2d-kube-api-access-cc9h5\") pod \"root-account-create-update-4mm2d\" (UID: \"162a1081-b509-4f76-82ee-f3470f788b2d\") " pod="openstack-kuttl-tests/root-account-create-update-4mm2d" Jan 20 17:01:46 crc kubenswrapper[4558]: I0120 17:01:46.501530 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cc9h5\" (UniqueName: \"kubernetes.io/projected/162a1081-b509-4f76-82ee-f3470f788b2d-kube-api-access-cc9h5\") pod \"root-account-create-update-4mm2d\" (UID: \"162a1081-b509-4f76-82ee-f3470f788b2d\") " pod="openstack-kuttl-tests/root-account-create-update-4mm2d" Jan 20 17:01:46 crc kubenswrapper[4558]: I0120 17:01:46.501734 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/162a1081-b509-4f76-82ee-f3470f788b2d-operator-scripts\") pod \"root-account-create-update-4mm2d\" (UID: \"162a1081-b509-4f76-82ee-f3470f788b2d\") " pod="openstack-kuttl-tests/root-account-create-update-4mm2d" Jan 20 17:01:46 crc kubenswrapper[4558]: I0120 17:01:46.502415 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/162a1081-b509-4f76-82ee-f3470f788b2d-operator-scripts\") pod \"root-account-create-update-4mm2d\" (UID: \"162a1081-b509-4f76-82ee-f3470f788b2d\") " pod="openstack-kuttl-tests/root-account-create-update-4mm2d" Jan 20 17:01:46 crc kubenswrapper[4558]: I0120 17:01:46.516633 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cc9h5\" (UniqueName: \"kubernetes.io/projected/162a1081-b509-4f76-82ee-f3470f788b2d-kube-api-access-cc9h5\") pod \"root-account-create-update-4mm2d\" (UID: \"162a1081-b509-4f76-82ee-f3470f788b2d\") " pod="openstack-kuttl-tests/root-account-create-update-4mm2d" Jan 20 17:01:46 crc kubenswrapper[4558]: I0120 17:01:46.607333 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-4mm2d" Jan 20 17:01:46 crc kubenswrapper[4558]: I0120 17:01:46.955772 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-4mm2d"] Jan 20 17:01:46 crc kubenswrapper[4558]: W0120 17:01:46.961419 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod162a1081_b509_4f76_82ee_f3470f788b2d.slice/crio-b391b842a4b69f486bbc56c0accd9e10fbbf9633e494c3ae7b5574cfd9d31a0a WatchSource:0}: Error finding container b391b842a4b69f486bbc56c0accd9e10fbbf9633e494c3ae7b5574cfd9d31a0a: Status 404 returned error can't find the container with id b391b842a4b69f486bbc56c0accd9e10fbbf9633e494c3ae7b5574cfd9d31a0a Jan 20 17:01:46 crc kubenswrapper[4558]: I0120 17:01:46.964753 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"openstack-mariadb-root-db-secret" Jan 20 17:01:47 crc kubenswrapper[4558]: I0120 17:01:47.385190 4558 generic.go:334] "Generic (PLEG): container finished" podID="07a241d6-001d-4677-a411-58e84c8f21ba" containerID="590af9e8b321dd601e68895bca62499efbd67321a304b402ec4829504f95989f" exitCode=0 Jan 20 17:01:47 crc kubenswrapper[4558]: I0120 17:01:47.385259 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-ring-rebalance-v4kx6" event={"ID":"07a241d6-001d-4677-a411-58e84c8f21ba","Type":"ContainerDied","Data":"590af9e8b321dd601e68895bca62499efbd67321a304b402ec4829504f95989f"} Jan 20 17:01:47 crc kubenswrapper[4558]: I0120 17:01:47.386676 4558 generic.go:334] "Generic (PLEG): container finished" podID="162a1081-b509-4f76-82ee-f3470f788b2d" containerID="30e334d21be63f799a062bbdac433a435c365279315262bd4f809fb8f1cd4b61" exitCode=0 Jan 20 17:01:47 crc kubenswrapper[4558]: I0120 17:01:47.386734 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/root-account-create-update-4mm2d" event={"ID":"162a1081-b509-4f76-82ee-f3470f788b2d","Type":"ContainerDied","Data":"30e334d21be63f799a062bbdac433a435c365279315262bd4f809fb8f1cd4b61"} Jan 20 17:01:47 crc kubenswrapper[4558]: I0120 17:01:47.386761 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/root-account-create-update-4mm2d" event={"ID":"162a1081-b509-4f76-82ee-f3470f788b2d","Type":"ContainerStarted","Data":"b391b842a4b69f486bbc56c0accd9e10fbbf9633e494c3ae7b5574cfd9d31a0a"} Jan 20 17:01:47 crc kubenswrapper[4558]: I0120 17:01:47.727430 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-db-create-bj62z" Jan 20 17:01:47 crc kubenswrapper[4558]: I0120 17:01:47.731921 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-103a-account-create-update-xfb6j" Jan 20 17:01:47 crc kubenswrapper[4558]: I0120 17:01:47.827293 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n8pt7\" (UniqueName: \"kubernetes.io/projected/c1ca9fc0-7d35-467b-8b84-da1e4d6bfa49-kube-api-access-n8pt7\") pod \"c1ca9fc0-7d35-467b-8b84-da1e4d6bfa49\" (UID: \"c1ca9fc0-7d35-467b-8b84-da1e4d6bfa49\") " Jan 20 17:01:47 crc kubenswrapper[4558]: I0120 17:01:47.827353 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d1fde627-f808-4f60-82f9-3a9d13491932-operator-scripts\") pod \"d1fde627-f808-4f60-82f9-3a9d13491932\" (UID: \"d1fde627-f808-4f60-82f9-3a9d13491932\") " Jan 20 17:01:47 crc kubenswrapper[4558]: I0120 17:01:47.827371 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c1ca9fc0-7d35-467b-8b84-da1e4d6bfa49-operator-scripts\") pod \"c1ca9fc0-7d35-467b-8b84-da1e4d6bfa49\" (UID: \"c1ca9fc0-7d35-467b-8b84-da1e4d6bfa49\") " Jan 20 17:01:47 crc kubenswrapper[4558]: I0120 17:01:47.827388 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tnzkg\" (UniqueName: \"kubernetes.io/projected/d1fde627-f808-4f60-82f9-3a9d13491932-kube-api-access-tnzkg\") pod \"d1fde627-f808-4f60-82f9-3a9d13491932\" (UID: \"d1fde627-f808-4f60-82f9-3a9d13491932\") " Jan 20 17:01:47 crc kubenswrapper[4558]: I0120 17:01:47.828245 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c1ca9fc0-7d35-467b-8b84-da1e4d6bfa49-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "c1ca9fc0-7d35-467b-8b84-da1e4d6bfa49" (UID: "c1ca9fc0-7d35-467b-8b84-da1e4d6bfa49"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:01:47 crc kubenswrapper[4558]: I0120 17:01:47.828247 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d1fde627-f808-4f60-82f9-3a9d13491932-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "d1fde627-f808-4f60-82f9-3a9d13491932" (UID: "d1fde627-f808-4f60-82f9-3a9d13491932"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:01:47 crc kubenswrapper[4558]: I0120 17:01:47.831846 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d1fde627-f808-4f60-82f9-3a9d13491932-kube-api-access-tnzkg" (OuterVolumeSpecName: "kube-api-access-tnzkg") pod "d1fde627-f808-4f60-82f9-3a9d13491932" (UID: "d1fde627-f808-4f60-82f9-3a9d13491932"). InnerVolumeSpecName "kube-api-access-tnzkg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:01:47 crc kubenswrapper[4558]: I0120 17:01:47.831881 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c1ca9fc0-7d35-467b-8b84-da1e4d6bfa49-kube-api-access-n8pt7" (OuterVolumeSpecName: "kube-api-access-n8pt7") pod "c1ca9fc0-7d35-467b-8b84-da1e4d6bfa49" (UID: "c1ca9fc0-7d35-467b-8b84-da1e4d6bfa49"). InnerVolumeSpecName "kube-api-access-n8pt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:01:47 crc kubenswrapper[4558]: I0120 17:01:47.929138 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n8pt7\" (UniqueName: \"kubernetes.io/projected/c1ca9fc0-7d35-467b-8b84-da1e4d6bfa49-kube-api-access-n8pt7\") on node \"crc\" DevicePath \"\"" Jan 20 17:01:47 crc kubenswrapper[4558]: I0120 17:01:47.929179 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d1fde627-f808-4f60-82f9-3a9d13491932-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:01:47 crc kubenswrapper[4558]: I0120 17:01:47.929190 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c1ca9fc0-7d35-467b-8b84-da1e4d6bfa49-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:01:47 crc kubenswrapper[4558]: I0120 17:01:47.929199 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tnzkg\" (UniqueName: \"kubernetes.io/projected/d1fde627-f808-4f60-82f9-3a9d13491932-kube-api-access-tnzkg\") on node \"crc\" DevicePath \"\"" Jan 20 17:01:48 crc kubenswrapper[4558]: I0120 17:01:48.393650 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-103a-account-create-update-xfb6j" event={"ID":"c1ca9fc0-7d35-467b-8b84-da1e4d6bfa49","Type":"ContainerDied","Data":"d96bc574f4ff8a6559574a40805623c079ff70600b669dfd9203f3c004d0fbd2"} Jan 20 17:01:48 crc kubenswrapper[4558]: I0120 17:01:48.393681 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d96bc574f4ff8a6559574a40805623c079ff70600b669dfd9203f3c004d0fbd2" Jan 20 17:01:48 crc kubenswrapper[4558]: I0120 17:01:48.395136 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-103a-account-create-update-xfb6j" Jan 20 17:01:48 crc kubenswrapper[4558]: I0120 17:01:48.395146 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-db-create-bj62z" event={"ID":"d1fde627-f808-4f60-82f9-3a9d13491932","Type":"ContainerDied","Data":"5ca1195dedc50084a69bf476156bff15afb3e023633be4ad989dfff37420c9cb"} Jan 20 17:01:48 crc kubenswrapper[4558]: I0120 17:01:48.395278 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-db-create-bj62z" Jan 20 17:01:48 crc kubenswrapper[4558]: I0120 17:01:48.395338 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5ca1195dedc50084a69bf476156bff15afb3e023633be4ad989dfff37420c9cb" Jan 20 17:01:48 crc kubenswrapper[4558]: I0120 17:01:48.694230 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-ring-rebalance-v4kx6" Jan 20 17:01:48 crc kubenswrapper[4558]: I0120 17:01:48.740686 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/10edb733-8fab-4543-ad29-3568e3de5aea-etc-swift\") pod \"swift-storage-0\" (UID: \"10edb733-8fab-4543-ad29-3568e3de5aea\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:01:48 crc kubenswrapper[4558]: I0120 17:01:48.753932 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/10edb733-8fab-4543-ad29-3568e3de5aea-etc-swift\") pod \"swift-storage-0\" (UID: \"10edb733-8fab-4543-ad29-3568e3de5aea\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:01:48 crc kubenswrapper[4558]: I0120 17:01:48.755901 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-4mm2d" Jan 20 17:01:48 crc kubenswrapper[4558]: I0120 17:01:48.841753 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/07a241d6-001d-4677-a411-58e84c8f21ba-ring-data-devices\") pod \"07a241d6-001d-4677-a411-58e84c8f21ba\" (UID: \"07a241d6-001d-4677-a411-58e84c8f21ba\") " Jan 20 17:01:48 crc kubenswrapper[4558]: I0120 17:01:48.841988 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/07a241d6-001d-4677-a411-58e84c8f21ba-swiftconf\") pod \"07a241d6-001d-4677-a411-58e84c8f21ba\" (UID: \"07a241d6-001d-4677-a411-58e84c8f21ba\") " Jan 20 17:01:48 crc kubenswrapper[4558]: I0120 17:01:48.842092 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/07a241d6-001d-4677-a411-58e84c8f21ba-dispersionconf\") pod \"07a241d6-001d-4677-a411-58e84c8f21ba\" (UID: \"07a241d6-001d-4677-a411-58e84c8f21ba\") " Jan 20 17:01:48 crc kubenswrapper[4558]: I0120 17:01:48.842156 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/07a241d6-001d-4677-a411-58e84c8f21ba-combined-ca-bundle\") pod \"07a241d6-001d-4677-a411-58e84c8f21ba\" (UID: \"07a241d6-001d-4677-a411-58e84c8f21ba\") " Jan 20 17:01:48 crc kubenswrapper[4558]: I0120 17:01:48.842273 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/07a241d6-001d-4677-a411-58e84c8f21ba-scripts\") pod \"07a241d6-001d-4677-a411-58e84c8f21ba\" (UID: \"07a241d6-001d-4677-a411-58e84c8f21ba\") " Jan 20 17:01:48 crc kubenswrapper[4558]: I0120 17:01:48.842339 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/162a1081-b509-4f76-82ee-f3470f788b2d-operator-scripts\") pod \"162a1081-b509-4f76-82ee-f3470f788b2d\" (UID: \"162a1081-b509-4f76-82ee-f3470f788b2d\") " Jan 20 17:01:48 crc kubenswrapper[4558]: I0120 17:01:48.842482 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sc7g5\" (UniqueName: \"kubernetes.io/projected/07a241d6-001d-4677-a411-58e84c8f21ba-kube-api-access-sc7g5\") pod \"07a241d6-001d-4677-a411-58e84c8f21ba\" (UID: \"07a241d6-001d-4677-a411-58e84c8f21ba\") " Jan 20 17:01:48 crc kubenswrapper[4558]: I0120 17:01:48.842561 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/07a241d6-001d-4677-a411-58e84c8f21ba-etc-swift\") pod \"07a241d6-001d-4677-a411-58e84c8f21ba\" (UID: \"07a241d6-001d-4677-a411-58e84c8f21ba\") " Jan 20 17:01:48 crc kubenswrapper[4558]: I0120 17:01:48.842667 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cc9h5\" (UniqueName: \"kubernetes.io/projected/162a1081-b509-4f76-82ee-f3470f788b2d-kube-api-access-cc9h5\") pod \"162a1081-b509-4f76-82ee-f3470f788b2d\" (UID: \"162a1081-b509-4f76-82ee-f3470f788b2d\") " Jan 20 17:01:48 crc kubenswrapper[4558]: I0120 17:01:48.842889 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/07a241d6-001d-4677-a411-58e84c8f21ba-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "07a241d6-001d-4677-a411-58e84c8f21ba" (UID: "07a241d6-001d-4677-a411-58e84c8f21ba"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:01:48 crc kubenswrapper[4558]: I0120 17:01:48.842887 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/162a1081-b509-4f76-82ee-f3470f788b2d-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "162a1081-b509-4f76-82ee-f3470f788b2d" (UID: "162a1081-b509-4f76-82ee-f3470f788b2d"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:01:48 crc kubenswrapper[4558]: I0120 17:01:48.843202 4558 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/07a241d6-001d-4677-a411-58e84c8f21ba-ring-data-devices\") on node \"crc\" DevicePath \"\"" Jan 20 17:01:48 crc kubenswrapper[4558]: I0120 17:01:48.843278 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/162a1081-b509-4f76-82ee-f3470f788b2d-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:01:48 crc kubenswrapper[4558]: I0120 17:01:48.843208 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/07a241d6-001d-4677-a411-58e84c8f21ba-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "07a241d6-001d-4677-a411-58e84c8f21ba" (UID: "07a241d6-001d-4677-a411-58e84c8f21ba"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:01:48 crc kubenswrapper[4558]: I0120 17:01:48.845054 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/07a241d6-001d-4677-a411-58e84c8f21ba-kube-api-access-sc7g5" (OuterVolumeSpecName: "kube-api-access-sc7g5") pod "07a241d6-001d-4677-a411-58e84c8f21ba" (UID: "07a241d6-001d-4677-a411-58e84c8f21ba"). InnerVolumeSpecName "kube-api-access-sc7g5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:01:48 crc kubenswrapper[4558]: I0120 17:01:48.845374 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/162a1081-b509-4f76-82ee-f3470f788b2d-kube-api-access-cc9h5" (OuterVolumeSpecName: "kube-api-access-cc9h5") pod "162a1081-b509-4f76-82ee-f3470f788b2d" (UID: "162a1081-b509-4f76-82ee-f3470f788b2d"). InnerVolumeSpecName "kube-api-access-cc9h5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:01:48 crc kubenswrapper[4558]: I0120 17:01:48.846843 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/07a241d6-001d-4677-a411-58e84c8f21ba-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "07a241d6-001d-4677-a411-58e84c8f21ba" (UID: "07a241d6-001d-4677-a411-58e84c8f21ba"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:01:48 crc kubenswrapper[4558]: I0120 17:01:48.857290 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/07a241d6-001d-4677-a411-58e84c8f21ba-scripts" (OuterVolumeSpecName: "scripts") pod "07a241d6-001d-4677-a411-58e84c8f21ba" (UID: "07a241d6-001d-4677-a411-58e84c8f21ba"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:01:48 crc kubenswrapper[4558]: I0120 17:01:48.857760 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/07a241d6-001d-4677-a411-58e84c8f21ba-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "07a241d6-001d-4677-a411-58e84c8f21ba" (UID: "07a241d6-001d-4677-a411-58e84c8f21ba"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:01:48 crc kubenswrapper[4558]: I0120 17:01:48.858812 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/07a241d6-001d-4677-a411-58e84c8f21ba-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "07a241d6-001d-4677-a411-58e84c8f21ba" (UID: "07a241d6-001d-4677-a411-58e84c8f21ba"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:01:48 crc kubenswrapper[4558]: I0120 17:01:48.944139 4558 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/07a241d6-001d-4677-a411-58e84c8f21ba-dispersionconf\") on node \"crc\" DevicePath \"\"" Jan 20 17:01:48 crc kubenswrapper[4558]: I0120 17:01:48.944260 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/07a241d6-001d-4677-a411-58e84c8f21ba-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:01:48 crc kubenswrapper[4558]: I0120 17:01:48.944318 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/07a241d6-001d-4677-a411-58e84c8f21ba-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:01:48 crc kubenswrapper[4558]: I0120 17:01:48.944379 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sc7g5\" (UniqueName: \"kubernetes.io/projected/07a241d6-001d-4677-a411-58e84c8f21ba-kube-api-access-sc7g5\") on node \"crc\" DevicePath \"\"" Jan 20 17:01:48 crc kubenswrapper[4558]: I0120 17:01:48.944432 4558 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/07a241d6-001d-4677-a411-58e84c8f21ba-etc-swift\") on node \"crc\" DevicePath \"\"" Jan 20 17:01:48 crc kubenswrapper[4558]: I0120 17:01:48.944479 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cc9h5\" (UniqueName: \"kubernetes.io/projected/162a1081-b509-4f76-82ee-f3470f788b2d-kube-api-access-cc9h5\") on node \"crc\" DevicePath \"\"" Jan 20 17:01:48 crc kubenswrapper[4558]: I0120 17:01:48.944529 4558 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/07a241d6-001d-4677-a411-58e84c8f21ba-swiftconf\") on node \"crc\" DevicePath \"\"" Jan 20 17:01:48 crc kubenswrapper[4558]: I0120 17:01:48.979287 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:01:49 crc kubenswrapper[4558]: I0120 17:01:49.334148 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/swift-storage-0"] Jan 20 17:01:49 crc kubenswrapper[4558]: W0120 17:01:49.336828 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod10edb733_8fab_4543_ad29_3568e3de5aea.slice/crio-48ec79c7d990680327d80e2f93a0b1b8dc3dbcc046f63622f6eacf24bab8908c WatchSource:0}: Error finding container 48ec79c7d990680327d80e2f93a0b1b8dc3dbcc046f63622f6eacf24bab8908c: Status 404 returned error can't find the container with id 48ec79c7d990680327d80e2f93a0b1b8dc3dbcc046f63622f6eacf24bab8908c Jan 20 17:01:49 crc kubenswrapper[4558]: I0120 17:01:49.401804 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"10edb733-8fab-4543-ad29-3568e3de5aea","Type":"ContainerStarted","Data":"48ec79c7d990680327d80e2f93a0b1b8dc3dbcc046f63622f6eacf24bab8908c"} Jan 20 17:01:49 crc kubenswrapper[4558]: I0120 17:01:49.402973 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-4mm2d" Jan 20 17:01:49 crc kubenswrapper[4558]: I0120 17:01:49.403288 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/root-account-create-update-4mm2d" event={"ID":"162a1081-b509-4f76-82ee-f3470f788b2d","Type":"ContainerDied","Data":"b391b842a4b69f486bbc56c0accd9e10fbbf9633e494c3ae7b5574cfd9d31a0a"} Jan 20 17:01:49 crc kubenswrapper[4558]: I0120 17:01:49.403331 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b391b842a4b69f486bbc56c0accd9e10fbbf9633e494c3ae7b5574cfd9d31a0a" Jan 20 17:01:49 crc kubenswrapper[4558]: I0120 17:01:49.404538 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-ring-rebalance-v4kx6" event={"ID":"07a241d6-001d-4677-a411-58e84c8f21ba","Type":"ContainerDied","Data":"445f11a481dd2cee027eb7e59f4fb0a15a9a8bc80cafdd32c161d47874148769"} Jan 20 17:01:49 crc kubenswrapper[4558]: I0120 17:01:49.404564 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="445f11a481dd2cee027eb7e59f4fb0a15a9a8bc80cafdd32c161d47874148769" Jan 20 17:01:49 crc kubenswrapper[4558]: I0120 17:01:49.404582 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-ring-rebalance-v4kx6" Jan 20 17:01:49 crc kubenswrapper[4558]: I0120 17:01:49.866437 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/glance-db-sync-hsdcl"] Jan 20 17:01:49 crc kubenswrapper[4558]: E0120 17:01:49.867104 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="162a1081-b509-4f76-82ee-f3470f788b2d" containerName="mariadb-account-create-update" Jan 20 17:01:49 crc kubenswrapper[4558]: I0120 17:01:49.867252 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="162a1081-b509-4f76-82ee-f3470f788b2d" containerName="mariadb-account-create-update" Jan 20 17:01:49 crc kubenswrapper[4558]: E0120 17:01:49.867334 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="07a241d6-001d-4677-a411-58e84c8f21ba" containerName="swift-ring-rebalance" Jan 20 17:01:49 crc kubenswrapper[4558]: I0120 17:01:49.867382 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="07a241d6-001d-4677-a411-58e84c8f21ba" containerName="swift-ring-rebalance" Jan 20 17:01:49 crc kubenswrapper[4558]: E0120 17:01:49.867443 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d1fde627-f808-4f60-82f9-3a9d13491932" containerName="mariadb-database-create" Jan 20 17:01:49 crc kubenswrapper[4558]: I0120 17:01:49.867486 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d1fde627-f808-4f60-82f9-3a9d13491932" containerName="mariadb-database-create" Jan 20 17:01:49 crc kubenswrapper[4558]: E0120 17:01:49.867538 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c1ca9fc0-7d35-467b-8b84-da1e4d6bfa49" containerName="mariadb-account-create-update" Jan 20 17:01:49 crc kubenswrapper[4558]: I0120 17:01:49.867579 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="c1ca9fc0-7d35-467b-8b84-da1e4d6bfa49" containerName="mariadb-account-create-update" Jan 20 17:01:49 crc kubenswrapper[4558]: I0120 17:01:49.867779 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="c1ca9fc0-7d35-467b-8b84-da1e4d6bfa49" containerName="mariadb-account-create-update" Jan 20 17:01:49 crc kubenswrapper[4558]: I0120 17:01:49.867833 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="162a1081-b509-4f76-82ee-f3470f788b2d" containerName="mariadb-account-create-update" Jan 20 17:01:49 crc kubenswrapper[4558]: I0120 17:01:49.867891 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="07a241d6-001d-4677-a411-58e84c8f21ba" containerName="swift-ring-rebalance" Jan 20 17:01:49 crc kubenswrapper[4558]: I0120 17:01:49.867953 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="d1fde627-f808-4f60-82f9-3a9d13491932" containerName="mariadb-database-create" Jan 20 17:01:49 crc kubenswrapper[4558]: I0120 17:01:49.868611 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-db-sync-hsdcl" Jan 20 17:01:49 crc kubenswrapper[4558]: I0120 17:01:49.871577 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-glance-dockercfg-hb57g" Jan 20 17:01:49 crc kubenswrapper[4558]: I0120 17:01:49.871756 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-config-data" Jan 20 17:01:49 crc kubenswrapper[4558]: I0120 17:01:49.881207 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-db-sync-hsdcl"] Jan 20 17:01:49 crc kubenswrapper[4558]: I0120 17:01:49.960971 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hkxvl\" (UniqueName: \"kubernetes.io/projected/9a51157e-3cf1-473a-b0e5-d9a8d4fd5f84-kube-api-access-hkxvl\") pod \"glance-db-sync-hsdcl\" (UID: \"9a51157e-3cf1-473a-b0e5-d9a8d4fd5f84\") " pod="openstack-kuttl-tests/glance-db-sync-hsdcl" Jan 20 17:01:49 crc kubenswrapper[4558]: I0120 17:01:49.961102 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9a51157e-3cf1-473a-b0e5-d9a8d4fd5f84-config-data\") pod \"glance-db-sync-hsdcl\" (UID: \"9a51157e-3cf1-473a-b0e5-d9a8d4fd5f84\") " pod="openstack-kuttl-tests/glance-db-sync-hsdcl" Jan 20 17:01:49 crc kubenswrapper[4558]: I0120 17:01:49.961345 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9a51157e-3cf1-473a-b0e5-d9a8d4fd5f84-combined-ca-bundle\") pod \"glance-db-sync-hsdcl\" (UID: \"9a51157e-3cf1-473a-b0e5-d9a8d4fd5f84\") " pod="openstack-kuttl-tests/glance-db-sync-hsdcl" Jan 20 17:01:49 crc kubenswrapper[4558]: I0120 17:01:49.961446 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/9a51157e-3cf1-473a-b0e5-d9a8d4fd5f84-db-sync-config-data\") pod \"glance-db-sync-hsdcl\" (UID: \"9a51157e-3cf1-473a-b0e5-d9a8d4fd5f84\") " pod="openstack-kuttl-tests/glance-db-sync-hsdcl" Jan 20 17:01:50 crc kubenswrapper[4558]: I0120 17:01:50.062930 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9a51157e-3cf1-473a-b0e5-d9a8d4fd5f84-combined-ca-bundle\") pod \"glance-db-sync-hsdcl\" (UID: \"9a51157e-3cf1-473a-b0e5-d9a8d4fd5f84\") " pod="openstack-kuttl-tests/glance-db-sync-hsdcl" Jan 20 17:01:50 crc kubenswrapper[4558]: I0120 17:01:50.063061 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/9a51157e-3cf1-473a-b0e5-d9a8d4fd5f84-db-sync-config-data\") pod \"glance-db-sync-hsdcl\" (UID: \"9a51157e-3cf1-473a-b0e5-d9a8d4fd5f84\") " pod="openstack-kuttl-tests/glance-db-sync-hsdcl" Jan 20 17:01:50 crc kubenswrapper[4558]: I0120 17:01:50.063152 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hkxvl\" (UniqueName: \"kubernetes.io/projected/9a51157e-3cf1-473a-b0e5-d9a8d4fd5f84-kube-api-access-hkxvl\") pod \"glance-db-sync-hsdcl\" (UID: \"9a51157e-3cf1-473a-b0e5-d9a8d4fd5f84\") " pod="openstack-kuttl-tests/glance-db-sync-hsdcl" Jan 20 17:01:50 crc kubenswrapper[4558]: I0120 17:01:50.063268 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9a51157e-3cf1-473a-b0e5-d9a8d4fd5f84-config-data\") pod \"glance-db-sync-hsdcl\" (UID: \"9a51157e-3cf1-473a-b0e5-d9a8d4fd5f84\") " pod="openstack-kuttl-tests/glance-db-sync-hsdcl" Jan 20 17:01:50 crc kubenswrapper[4558]: I0120 17:01:50.067568 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9a51157e-3cf1-473a-b0e5-d9a8d4fd5f84-combined-ca-bundle\") pod \"glance-db-sync-hsdcl\" (UID: \"9a51157e-3cf1-473a-b0e5-d9a8d4fd5f84\") " pod="openstack-kuttl-tests/glance-db-sync-hsdcl" Jan 20 17:01:50 crc kubenswrapper[4558]: I0120 17:01:50.067582 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/9a51157e-3cf1-473a-b0e5-d9a8d4fd5f84-db-sync-config-data\") pod \"glance-db-sync-hsdcl\" (UID: \"9a51157e-3cf1-473a-b0e5-d9a8d4fd5f84\") " pod="openstack-kuttl-tests/glance-db-sync-hsdcl" Jan 20 17:01:50 crc kubenswrapper[4558]: I0120 17:01:50.068399 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9a51157e-3cf1-473a-b0e5-d9a8d4fd5f84-config-data\") pod \"glance-db-sync-hsdcl\" (UID: \"9a51157e-3cf1-473a-b0e5-d9a8d4fd5f84\") " pod="openstack-kuttl-tests/glance-db-sync-hsdcl" Jan 20 17:01:50 crc kubenswrapper[4558]: I0120 17:01:50.076383 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hkxvl\" (UniqueName: \"kubernetes.io/projected/9a51157e-3cf1-473a-b0e5-d9a8d4fd5f84-kube-api-access-hkxvl\") pod \"glance-db-sync-hsdcl\" (UID: \"9a51157e-3cf1-473a-b0e5-d9a8d4fd5f84\") " pod="openstack-kuttl-tests/glance-db-sync-hsdcl" Jan 20 17:01:50 crc kubenswrapper[4558]: I0120 17:01:50.203901 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-db-sync-hsdcl" Jan 20 17:01:50 crc kubenswrapper[4558]: I0120 17:01:50.413613 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"10edb733-8fab-4543-ad29-3568e3de5aea","Type":"ContainerStarted","Data":"7da72c8a88027dd1e413e53fd692c99fc03f3e37634323ae66f7e9f55370719e"} Jan 20 17:01:50 crc kubenswrapper[4558]: I0120 17:01:50.413802 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"10edb733-8fab-4543-ad29-3568e3de5aea","Type":"ContainerStarted","Data":"8f45926ec3655a6641edb5a009493f355b1f6533cdddced2af79c34663d3397c"} Jan 20 17:01:50 crc kubenswrapper[4558]: I0120 17:01:50.413813 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"10edb733-8fab-4543-ad29-3568e3de5aea","Type":"ContainerStarted","Data":"576e4656d99cd9370e77241bb14cd66a4de3e94deaef7d304253b1816ec67dd5"} Jan 20 17:01:50 crc kubenswrapper[4558]: I0120 17:01:50.413821 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"10edb733-8fab-4543-ad29-3568e3de5aea","Type":"ContainerStarted","Data":"f2f9c003229a8c4bff074beb609ad6fba9bf0167b989fcddbe4ee674bb6a73bd"} Jan 20 17:01:50 crc kubenswrapper[4558]: I0120 17:01:50.413829 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"10edb733-8fab-4543-ad29-3568e3de5aea","Type":"ContainerStarted","Data":"6e25c21c1436e8d3c70552c967000697e095b890d75c613e94acbebcf601e385"} Jan 20 17:01:50 crc kubenswrapper[4558]: I0120 17:01:50.413837 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"10edb733-8fab-4543-ad29-3568e3de5aea","Type":"ContainerStarted","Data":"fc9635ec26b842f65146e97cb678f89fadc8730a998b6cc02a5c959a0601c4bf"} Jan 20 17:01:50 crc kubenswrapper[4558]: I0120 17:01:50.413844 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"10edb733-8fab-4543-ad29-3568e3de5aea","Type":"ContainerStarted","Data":"103cf0e411f4211e757684612d3274497733c55f59ba7dc63dd3ad2bfec99d18"} Jan 20 17:01:50 crc kubenswrapper[4558]: I0120 17:01:50.413851 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"10edb733-8fab-4543-ad29-3568e3de5aea","Type":"ContainerStarted","Data":"6d71dae893fc6a1962df504002d772c4cfe40ee0a96ad60259292b2cd410de30"} Jan 20 17:01:50 crc kubenswrapper[4558]: I0120 17:01:50.413858 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"10edb733-8fab-4543-ad29-3568e3de5aea","Type":"ContainerStarted","Data":"87f7b0a910a29230a9846bbbde9e7c30fd5ead2357d9f18d957764288be50fe1"} Jan 20 17:01:50 crc kubenswrapper[4558]: I0120 17:01:50.561911 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-db-sync-hsdcl"] Jan 20 17:01:50 crc kubenswrapper[4558]: W0120 17:01:50.568481 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9a51157e_3cf1_473a_b0e5_d9a8d4fd5f84.slice/crio-353e6d539b1b66f960d44bad2d14e5d47f42c4c8fa0d0755eb02257000840cd7 WatchSource:0}: Error finding container 353e6d539b1b66f960d44bad2d14e5d47f42c4c8fa0d0755eb02257000840cd7: Status 404 returned error can't find the container with id 353e6d539b1b66f960d44bad2d14e5d47f42c4c8fa0d0755eb02257000840cd7 Jan 20 17:01:51 crc kubenswrapper[4558]: I0120 17:01:51.421594 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-db-sync-hsdcl" event={"ID":"9a51157e-3cf1-473a-b0e5-d9a8d4fd5f84","Type":"ContainerStarted","Data":"f6c131a484e6463785eddbec476157d62fb901a314f15a2feebc53547803030e"} Jan 20 17:01:51 crc kubenswrapper[4558]: I0120 17:01:51.421861 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-db-sync-hsdcl" event={"ID":"9a51157e-3cf1-473a-b0e5-d9a8d4fd5f84","Type":"ContainerStarted","Data":"353e6d539b1b66f960d44bad2d14e5d47f42c4c8fa0d0755eb02257000840cd7"} Jan 20 17:01:51 crc kubenswrapper[4558]: I0120 17:01:51.426266 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"10edb733-8fab-4543-ad29-3568e3de5aea","Type":"ContainerStarted","Data":"2063896701b2ecd29a95f5f7c1e566bc1a196a26a0392d6e32c7bb743ea5dbde"} Jan 20 17:01:51 crc kubenswrapper[4558]: I0120 17:01:51.426304 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"10edb733-8fab-4543-ad29-3568e3de5aea","Type":"ContainerStarted","Data":"944fc0bf7c16ccd30559b80cdc699372748ca2f14ed832fbb7c9cbf4c5083aed"} Jan 20 17:01:51 crc kubenswrapper[4558]: I0120 17:01:51.426316 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"10edb733-8fab-4543-ad29-3568e3de5aea","Type":"ContainerStarted","Data":"8e36100266a8d13e7ab5ec75f4795cb2bfafd04327c6000ba4236e64554b079e"} Jan 20 17:01:51 crc kubenswrapper[4558]: I0120 17:01:51.426324 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"10edb733-8fab-4543-ad29-3568e3de5aea","Type":"ContainerStarted","Data":"064c39cc0ed32246609bc4e80f5c679dc77d71ae4e0e7325c8b80942e4b6cd46"} Jan 20 17:01:51 crc kubenswrapper[4558]: I0120 17:01:51.426331 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"10edb733-8fab-4543-ad29-3568e3de5aea","Type":"ContainerStarted","Data":"a00e2bc91063a2dabb4c2ce5cfdc3e96683c6b04440f6eeeadc69adf7e0e8a23"} Jan 20 17:01:51 crc kubenswrapper[4558]: I0120 17:01:51.426339 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"10edb733-8fab-4543-ad29-3568e3de5aea","Type":"ContainerStarted","Data":"1ad6198d74b43bff1f98c2de1da53f4e18e93c848f58a003ec586b574d66eb1f"} Jan 20 17:01:51 crc kubenswrapper[4558]: I0120 17:01:51.471816 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/glance-db-sync-hsdcl" podStartSLOduration=2.47180215 podStartE2EDuration="2.47180215s" podCreationTimestamp="2026-01-20 17:01:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:01:51.440862084 +0000 UTC m=+1205.201200050" watchObservedRunningTime="2026-01-20 17:01:51.47180215 +0000 UTC m=+1205.232140116" Jan 20 17:01:51 crc kubenswrapper[4558]: I0120 17:01:51.473324 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/swift-storage-0" podStartSLOduration=11.473318953 podStartE2EDuration="11.473318953s" podCreationTimestamp="2026-01-20 17:01:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:01:51.468560013 +0000 UTC m=+1205.228897979" watchObservedRunningTime="2026-01-20 17:01:51.473318953 +0000 UTC m=+1205.233656920" Jan 20 17:01:51 crc kubenswrapper[4558]: I0120 17:01:51.581761 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-6db984dd9-47b5d"] Jan 20 17:01:51 crc kubenswrapper[4558]: I0120 17:01:51.582897 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-6db984dd9-47b5d" Jan 20 17:01:51 crc kubenswrapper[4558]: I0120 17:01:51.586476 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"dns-swift-storage-0" Jan 20 17:01:51 crc kubenswrapper[4558]: I0120 17:01:51.589718 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-6db984dd9-47b5d"] Jan 20 17:01:51 crc kubenswrapper[4558]: I0120 17:01:51.684981 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6846508c-01bc-49e8-b98b-5df9ec57f030-config\") pod \"dnsmasq-dnsmasq-6db984dd9-47b5d\" (UID: \"6846508c-01bc-49e8-b98b-5df9ec57f030\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-6db984dd9-47b5d" Jan 20 17:01:51 crc kubenswrapper[4558]: I0120 17:01:51.685016 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/6846508c-01bc-49e8-b98b-5df9ec57f030-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-6db984dd9-47b5d\" (UID: \"6846508c-01bc-49e8-b98b-5df9ec57f030\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-6db984dd9-47b5d" Jan 20 17:01:51 crc kubenswrapper[4558]: I0120 17:01:51.685137 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/6846508c-01bc-49e8-b98b-5df9ec57f030-dns-swift-storage-0\") pod \"dnsmasq-dnsmasq-6db984dd9-47b5d\" (UID: \"6846508c-01bc-49e8-b98b-5df9ec57f030\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-6db984dd9-47b5d" Jan 20 17:01:51 crc kubenswrapper[4558]: I0120 17:01:51.685269 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-55qxz\" (UniqueName: \"kubernetes.io/projected/6846508c-01bc-49e8-b98b-5df9ec57f030-kube-api-access-55qxz\") pod \"dnsmasq-dnsmasq-6db984dd9-47b5d\" (UID: \"6846508c-01bc-49e8-b98b-5df9ec57f030\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-6db984dd9-47b5d" Jan 20 17:01:51 crc kubenswrapper[4558]: I0120 17:01:51.786947 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/6846508c-01bc-49e8-b98b-5df9ec57f030-dns-swift-storage-0\") pod \"dnsmasq-dnsmasq-6db984dd9-47b5d\" (UID: \"6846508c-01bc-49e8-b98b-5df9ec57f030\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-6db984dd9-47b5d" Jan 20 17:01:51 crc kubenswrapper[4558]: I0120 17:01:51.787035 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-55qxz\" (UniqueName: \"kubernetes.io/projected/6846508c-01bc-49e8-b98b-5df9ec57f030-kube-api-access-55qxz\") pod \"dnsmasq-dnsmasq-6db984dd9-47b5d\" (UID: \"6846508c-01bc-49e8-b98b-5df9ec57f030\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-6db984dd9-47b5d" Jan 20 17:01:51 crc kubenswrapper[4558]: I0120 17:01:51.787158 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6846508c-01bc-49e8-b98b-5df9ec57f030-config\") pod \"dnsmasq-dnsmasq-6db984dd9-47b5d\" (UID: \"6846508c-01bc-49e8-b98b-5df9ec57f030\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-6db984dd9-47b5d" Jan 20 17:01:51 crc kubenswrapper[4558]: I0120 17:01:51.787190 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/6846508c-01bc-49e8-b98b-5df9ec57f030-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-6db984dd9-47b5d\" (UID: \"6846508c-01bc-49e8-b98b-5df9ec57f030\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-6db984dd9-47b5d" Jan 20 17:01:51 crc kubenswrapper[4558]: I0120 17:01:51.787772 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/6846508c-01bc-49e8-b98b-5df9ec57f030-dns-swift-storage-0\") pod \"dnsmasq-dnsmasq-6db984dd9-47b5d\" (UID: \"6846508c-01bc-49e8-b98b-5df9ec57f030\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-6db984dd9-47b5d" Jan 20 17:01:51 crc kubenswrapper[4558]: I0120 17:01:51.787789 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/6846508c-01bc-49e8-b98b-5df9ec57f030-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-6db984dd9-47b5d\" (UID: \"6846508c-01bc-49e8-b98b-5df9ec57f030\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-6db984dd9-47b5d" Jan 20 17:01:51 crc kubenswrapper[4558]: I0120 17:01:51.787831 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6846508c-01bc-49e8-b98b-5df9ec57f030-config\") pod \"dnsmasq-dnsmasq-6db984dd9-47b5d\" (UID: \"6846508c-01bc-49e8-b98b-5df9ec57f030\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-6db984dd9-47b5d" Jan 20 17:01:51 crc kubenswrapper[4558]: I0120 17:01:51.804752 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-55qxz\" (UniqueName: \"kubernetes.io/projected/6846508c-01bc-49e8-b98b-5df9ec57f030-kube-api-access-55qxz\") pod \"dnsmasq-dnsmasq-6db984dd9-47b5d\" (UID: \"6846508c-01bc-49e8-b98b-5df9ec57f030\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-6db984dd9-47b5d" Jan 20 17:01:51 crc kubenswrapper[4558]: I0120 17:01:51.904601 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-6db984dd9-47b5d" Jan 20 17:01:52 crc kubenswrapper[4558]: I0120 17:01:52.354721 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-6db984dd9-47b5d"] Jan 20 17:01:52 crc kubenswrapper[4558]: I0120 17:01:52.432216 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-6db984dd9-47b5d" event={"ID":"6846508c-01bc-49e8-b98b-5df9ec57f030","Type":"ContainerStarted","Data":"146767f56896346d3b4b1ec97e73ee66cabf2384ba4ee7402dcc81c81a3b0f8a"} Jan 20 17:01:52 crc kubenswrapper[4558]: I0120 17:01:52.519202 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:01:52 crc kubenswrapper[4558]: I0120 17:01:52.644806 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-4mm2d"] Jan 20 17:01:52 crc kubenswrapper[4558]: I0120 17:01:52.648902 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-4mm2d"] Jan 20 17:01:53 crc kubenswrapper[4558]: I0120 17:01:53.439352 4558 generic.go:334] "Generic (PLEG): container finished" podID="9a51157e-3cf1-473a-b0e5-d9a8d4fd5f84" containerID="f6c131a484e6463785eddbec476157d62fb901a314f15a2feebc53547803030e" exitCode=0 Jan 20 17:01:53 crc kubenswrapper[4558]: I0120 17:01:53.439423 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-db-sync-hsdcl" event={"ID":"9a51157e-3cf1-473a-b0e5-d9a8d4fd5f84","Type":"ContainerDied","Data":"f6c131a484e6463785eddbec476157d62fb901a314f15a2feebc53547803030e"} Jan 20 17:01:53 crc kubenswrapper[4558]: I0120 17:01:53.440791 4558 generic.go:334] "Generic (PLEG): container finished" podID="6846508c-01bc-49e8-b98b-5df9ec57f030" containerID="741ed849a76d51e10ae4f6bfc13c4ea0462a1f19837b48799ff426dd12db2262" exitCode=0 Jan 20 17:01:53 crc kubenswrapper[4558]: I0120 17:01:53.440829 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-6db984dd9-47b5d" event={"ID":"6846508c-01bc-49e8-b98b-5df9ec57f030","Type":"ContainerDied","Data":"741ed849a76d51e10ae4f6bfc13c4ea0462a1f19837b48799ff426dd12db2262"} Jan 20 17:01:54 crc kubenswrapper[4558]: I0120 17:01:54.448690 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-6db984dd9-47b5d" event={"ID":"6846508c-01bc-49e8-b98b-5df9ec57f030","Type":"ContainerStarted","Data":"b49dd264bdbda0a5b2edd1aecfbbd56731980f938da69a90df913ea26b0db35c"} Jan 20 17:01:54 crc kubenswrapper[4558]: I0120 17:01:54.449028 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-6db984dd9-47b5d" Jan 20 17:01:54 crc kubenswrapper[4558]: I0120 17:01:54.460670 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-6db984dd9-47b5d" podStartSLOduration=3.4606552280000002 podStartE2EDuration="3.460655228s" podCreationTimestamp="2026-01-20 17:01:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:01:54.459506989 +0000 UTC m=+1208.219844955" watchObservedRunningTime="2026-01-20 17:01:54.460655228 +0000 UTC m=+1208.220993194" Jan 20 17:01:54 crc kubenswrapper[4558]: I0120 17:01:54.583015 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="162a1081-b509-4f76-82ee-f3470f788b2d" path="/var/lib/kubelet/pods/162a1081-b509-4f76-82ee-f3470f788b2d/volumes" Jan 20 17:01:54 crc kubenswrapper[4558]: I0120 17:01:54.732376 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-db-sync-hsdcl" Jan 20 17:01:54 crc kubenswrapper[4558]: I0120 17:01:54.831059 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/9a51157e-3cf1-473a-b0e5-d9a8d4fd5f84-db-sync-config-data\") pod \"9a51157e-3cf1-473a-b0e5-d9a8d4fd5f84\" (UID: \"9a51157e-3cf1-473a-b0e5-d9a8d4fd5f84\") " Jan 20 17:01:54 crc kubenswrapper[4558]: I0120 17:01:54.831299 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9a51157e-3cf1-473a-b0e5-d9a8d4fd5f84-combined-ca-bundle\") pod \"9a51157e-3cf1-473a-b0e5-d9a8d4fd5f84\" (UID: \"9a51157e-3cf1-473a-b0e5-d9a8d4fd5f84\") " Jan 20 17:01:54 crc kubenswrapper[4558]: I0120 17:01:54.831483 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9a51157e-3cf1-473a-b0e5-d9a8d4fd5f84-config-data\") pod \"9a51157e-3cf1-473a-b0e5-d9a8d4fd5f84\" (UID: \"9a51157e-3cf1-473a-b0e5-d9a8d4fd5f84\") " Jan 20 17:01:54 crc kubenswrapper[4558]: I0120 17:01:54.831946 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hkxvl\" (UniqueName: \"kubernetes.io/projected/9a51157e-3cf1-473a-b0e5-d9a8d4fd5f84-kube-api-access-hkxvl\") pod \"9a51157e-3cf1-473a-b0e5-d9a8d4fd5f84\" (UID: \"9a51157e-3cf1-473a-b0e5-d9a8d4fd5f84\") " Jan 20 17:01:54 crc kubenswrapper[4558]: I0120 17:01:54.845546 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9a51157e-3cf1-473a-b0e5-d9a8d4fd5f84-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "9a51157e-3cf1-473a-b0e5-d9a8d4fd5f84" (UID: "9a51157e-3cf1-473a-b0e5-d9a8d4fd5f84"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:01:54 crc kubenswrapper[4558]: I0120 17:01:54.846215 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9a51157e-3cf1-473a-b0e5-d9a8d4fd5f84-kube-api-access-hkxvl" (OuterVolumeSpecName: "kube-api-access-hkxvl") pod "9a51157e-3cf1-473a-b0e5-d9a8d4fd5f84" (UID: "9a51157e-3cf1-473a-b0e5-d9a8d4fd5f84"). InnerVolumeSpecName "kube-api-access-hkxvl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:01:54 crc kubenswrapper[4558]: I0120 17:01:54.847686 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9a51157e-3cf1-473a-b0e5-d9a8d4fd5f84-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9a51157e-3cf1-473a-b0e5-d9a8d4fd5f84" (UID: "9a51157e-3cf1-473a-b0e5-d9a8d4fd5f84"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:01:54 crc kubenswrapper[4558]: I0120 17:01:54.859435 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9a51157e-3cf1-473a-b0e5-d9a8d4fd5f84-config-data" (OuterVolumeSpecName: "config-data") pod "9a51157e-3cf1-473a-b0e5-d9a8d4fd5f84" (UID: "9a51157e-3cf1-473a-b0e5-d9a8d4fd5f84"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:01:54 crc kubenswrapper[4558]: I0120 17:01:54.933687 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9a51157e-3cf1-473a-b0e5-d9a8d4fd5f84-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:01:54 crc kubenswrapper[4558]: I0120 17:01:54.933710 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hkxvl\" (UniqueName: \"kubernetes.io/projected/9a51157e-3cf1-473a-b0e5-d9a8d4fd5f84-kube-api-access-hkxvl\") on node \"crc\" DevicePath \"\"" Jan 20 17:01:54 crc kubenswrapper[4558]: I0120 17:01:54.933720 4558 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/9a51157e-3cf1-473a-b0e5-d9a8d4fd5f84-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:01:54 crc kubenswrapper[4558]: I0120 17:01:54.933729 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9a51157e-3cf1-473a-b0e5-d9a8d4fd5f84-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:01:55 crc kubenswrapper[4558]: I0120 17:01:55.458730 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-db-sync-hsdcl" event={"ID":"9a51157e-3cf1-473a-b0e5-d9a8d4fd5f84","Type":"ContainerDied","Data":"353e6d539b1b66f960d44bad2d14e5d47f42c4c8fa0d0755eb02257000840cd7"} Jan 20 17:01:55 crc kubenswrapper[4558]: I0120 17:01:55.459683 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="353e6d539b1b66f960d44bad2d14e5d47f42c4c8fa0d0755eb02257000840cd7" Jan 20 17:01:55 crc kubenswrapper[4558]: I0120 17:01:55.458788 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-db-sync-hsdcl" Jan 20 17:01:57 crc kubenswrapper[4558]: I0120 17:01:57.648264 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/root-account-create-update-6d9jk"] Jan 20 17:01:57 crc kubenswrapper[4558]: E0120 17:01:57.649305 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9a51157e-3cf1-473a-b0e5-d9a8d4fd5f84" containerName="glance-db-sync" Jan 20 17:01:57 crc kubenswrapper[4558]: I0120 17:01:57.649381 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="9a51157e-3cf1-473a-b0e5-d9a8d4fd5f84" containerName="glance-db-sync" Jan 20 17:01:57 crc kubenswrapper[4558]: I0120 17:01:57.649625 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="9a51157e-3cf1-473a-b0e5-d9a8d4fd5f84" containerName="glance-db-sync" Jan 20 17:01:57 crc kubenswrapper[4558]: I0120 17:01:57.650089 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-6d9jk" Jan 20 17:01:57 crc kubenswrapper[4558]: I0120 17:01:57.652183 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"openstack-cell1-mariadb-root-db-secret" Jan 20 17:01:57 crc kubenswrapper[4558]: I0120 17:01:57.655980 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-6d9jk"] Jan 20 17:01:57 crc kubenswrapper[4558]: I0120 17:01:57.771998 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7644e6dc-6bdb-4e65-b6c3-be904f9bbf90-operator-scripts\") pod \"root-account-create-update-6d9jk\" (UID: \"7644e6dc-6bdb-4e65-b6c3-be904f9bbf90\") " pod="openstack-kuttl-tests/root-account-create-update-6d9jk" Jan 20 17:01:57 crc kubenswrapper[4558]: I0120 17:01:57.772063 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rcftr\" (UniqueName: \"kubernetes.io/projected/7644e6dc-6bdb-4e65-b6c3-be904f9bbf90-kube-api-access-rcftr\") pod \"root-account-create-update-6d9jk\" (UID: \"7644e6dc-6bdb-4e65-b6c3-be904f9bbf90\") " pod="openstack-kuttl-tests/root-account-create-update-6d9jk" Jan 20 17:01:57 crc kubenswrapper[4558]: I0120 17:01:57.873766 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7644e6dc-6bdb-4e65-b6c3-be904f9bbf90-operator-scripts\") pod \"root-account-create-update-6d9jk\" (UID: \"7644e6dc-6bdb-4e65-b6c3-be904f9bbf90\") " pod="openstack-kuttl-tests/root-account-create-update-6d9jk" Jan 20 17:01:57 crc kubenswrapper[4558]: I0120 17:01:57.873862 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rcftr\" (UniqueName: \"kubernetes.io/projected/7644e6dc-6bdb-4e65-b6c3-be904f9bbf90-kube-api-access-rcftr\") pod \"root-account-create-update-6d9jk\" (UID: \"7644e6dc-6bdb-4e65-b6c3-be904f9bbf90\") " pod="openstack-kuttl-tests/root-account-create-update-6d9jk" Jan 20 17:01:57 crc kubenswrapper[4558]: I0120 17:01:57.874681 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7644e6dc-6bdb-4e65-b6c3-be904f9bbf90-operator-scripts\") pod \"root-account-create-update-6d9jk\" (UID: \"7644e6dc-6bdb-4e65-b6c3-be904f9bbf90\") " pod="openstack-kuttl-tests/root-account-create-update-6d9jk" Jan 20 17:01:57 crc kubenswrapper[4558]: I0120 17:01:57.888582 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rcftr\" (UniqueName: \"kubernetes.io/projected/7644e6dc-6bdb-4e65-b6c3-be904f9bbf90-kube-api-access-rcftr\") pod \"root-account-create-update-6d9jk\" (UID: \"7644e6dc-6bdb-4e65-b6c3-be904f9bbf90\") " pod="openstack-kuttl-tests/root-account-create-update-6d9jk" Jan 20 17:01:57 crc kubenswrapper[4558]: I0120 17:01:57.963959 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-6d9jk" Jan 20 17:01:58 crc kubenswrapper[4558]: I0120 17:01:58.316405 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-6d9jk"] Jan 20 17:01:58 crc kubenswrapper[4558]: W0120 17:01:58.320817 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7644e6dc_6bdb_4e65_b6c3_be904f9bbf90.slice/crio-ded8490be4ca687452307a89bf64ec308bb175b6acb1e09bb4695d33f71fc17f WatchSource:0}: Error finding container ded8490be4ca687452307a89bf64ec308bb175b6acb1e09bb4695d33f71fc17f: Status 404 returned error can't find the container with id ded8490be4ca687452307a89bf64ec308bb175b6acb1e09bb4695d33f71fc17f Jan 20 17:01:58 crc kubenswrapper[4558]: I0120 17:01:58.475882 4558 generic.go:334] "Generic (PLEG): container finished" podID="0bfc3458-cc0f-4bea-9794-52c5e81fe055" containerID="211ea3122594cd690108a40fb561073e8fbaa0a0b8f082460cd412375c3bbcfa" exitCode=0 Jan 20 17:01:58 crc kubenswrapper[4558]: I0120 17:01:58.475955 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" event={"ID":"0bfc3458-cc0f-4bea-9794-52c5e81fe055","Type":"ContainerDied","Data":"211ea3122594cd690108a40fb561073e8fbaa0a0b8f082460cd412375c3bbcfa"} Jan 20 17:01:58 crc kubenswrapper[4558]: I0120 17:01:58.477258 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/root-account-create-update-6d9jk" event={"ID":"7644e6dc-6bdb-4e65-b6c3-be904f9bbf90","Type":"ContainerStarted","Data":"d09c1f5cb99b9abbcc9bafcadac3092d4b77d95efd6cfae624db258c91598c5b"} Jan 20 17:01:58 crc kubenswrapper[4558]: I0120 17:01:58.477303 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/root-account-create-update-6d9jk" event={"ID":"7644e6dc-6bdb-4e65-b6c3-be904f9bbf90","Type":"ContainerStarted","Data":"ded8490be4ca687452307a89bf64ec308bb175b6acb1e09bb4695d33f71fc17f"} Jan 20 17:01:58 crc kubenswrapper[4558]: I0120 17:01:58.510581 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/root-account-create-update-6d9jk" podStartSLOduration=1.510566498 podStartE2EDuration="1.510566498s" podCreationTimestamp="2026-01-20 17:01:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:01:58.503109435 +0000 UTC m=+1212.263447402" watchObservedRunningTime="2026-01-20 17:01:58.510566498 +0000 UTC m=+1212.270904466" Jan 20 17:01:59 crc kubenswrapper[4558]: I0120 17:01:59.483963 4558 generic.go:334] "Generic (PLEG): container finished" podID="ac55b716-d8fd-4628-8627-f94b5a4e7c78" containerID="763e019dc0645475db7f3291e11eaa6439d09116fc1541ec554386e84ad53b91" exitCode=0 Jan 20 17:01:59 crc kubenswrapper[4558]: I0120 17:01:59.484048 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-server-0" event={"ID":"ac55b716-d8fd-4628-8627-f94b5a4e7c78","Type":"ContainerDied","Data":"763e019dc0645475db7f3291e11eaa6439d09116fc1541ec554386e84ad53b91"} Jan 20 17:01:59 crc kubenswrapper[4558]: I0120 17:01:59.485846 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" event={"ID":"0bfc3458-cc0f-4bea-9794-52c5e81fe055","Type":"ContainerStarted","Data":"604aea968b2534ec5bcdc7b53dbf247a7fe69e766dfd7c01c1c3821d16d1ac11"} Jan 20 17:01:59 crc kubenswrapper[4558]: I0120 17:01:59.486024 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:01:59 crc kubenswrapper[4558]: I0120 17:01:59.487260 4558 generic.go:334] "Generic (PLEG): container finished" podID="7644e6dc-6bdb-4e65-b6c3-be904f9bbf90" containerID="d09c1f5cb99b9abbcc9bafcadac3092d4b77d95efd6cfae624db258c91598c5b" exitCode=0 Jan 20 17:01:59 crc kubenswrapper[4558]: I0120 17:01:59.487292 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/root-account-create-update-6d9jk" event={"ID":"7644e6dc-6bdb-4e65-b6c3-be904f9bbf90","Type":"ContainerDied","Data":"d09c1f5cb99b9abbcc9bafcadac3092d4b77d95efd6cfae624db258c91598c5b"} Jan 20 17:01:59 crc kubenswrapper[4558]: I0120 17:01:59.556039 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" podStartSLOduration=36.556026098 podStartE2EDuration="36.556026098s" podCreationTimestamp="2026-01-20 17:01:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:01:59.540933335 +0000 UTC m=+1213.301271301" watchObservedRunningTime="2026-01-20 17:01:59.556026098 +0000 UTC m=+1213.316364065" Jan 20 17:02:00 crc kubenswrapper[4558]: I0120 17:02:00.494789 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-server-0" event={"ID":"ac55b716-d8fd-4628-8627-f94b5a4e7c78","Type":"ContainerStarted","Data":"04bd1c8749941bda61752f5a94063fae9cbc9fde43b1430193379fd01f69c3ff"} Jan 20 17:02:00 crc kubenswrapper[4558]: I0120 17:02:00.495115 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:02:00 crc kubenswrapper[4558]: I0120 17:02:00.515356 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/rabbitmq-server-0" podStartSLOduration=36.515341221 podStartE2EDuration="36.515341221s" podCreationTimestamp="2026-01-20 17:01:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:02:00.51062395 +0000 UTC m=+1214.270961917" watchObservedRunningTime="2026-01-20 17:02:00.515341221 +0000 UTC m=+1214.275679188" Jan 20 17:02:00 crc kubenswrapper[4558]: I0120 17:02:00.767358 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-6d9jk" Jan 20 17:02:00 crc kubenswrapper[4558]: I0120 17:02:00.812811 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rcftr\" (UniqueName: \"kubernetes.io/projected/7644e6dc-6bdb-4e65-b6c3-be904f9bbf90-kube-api-access-rcftr\") pod \"7644e6dc-6bdb-4e65-b6c3-be904f9bbf90\" (UID: \"7644e6dc-6bdb-4e65-b6c3-be904f9bbf90\") " Jan 20 17:02:00 crc kubenswrapper[4558]: I0120 17:02:00.812933 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7644e6dc-6bdb-4e65-b6c3-be904f9bbf90-operator-scripts\") pod \"7644e6dc-6bdb-4e65-b6c3-be904f9bbf90\" (UID: \"7644e6dc-6bdb-4e65-b6c3-be904f9bbf90\") " Jan 20 17:02:00 crc kubenswrapper[4558]: I0120 17:02:00.813477 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7644e6dc-6bdb-4e65-b6c3-be904f9bbf90-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "7644e6dc-6bdb-4e65-b6c3-be904f9bbf90" (UID: "7644e6dc-6bdb-4e65-b6c3-be904f9bbf90"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:02:00 crc kubenswrapper[4558]: I0120 17:02:00.816005 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7644e6dc-6bdb-4e65-b6c3-be904f9bbf90-kube-api-access-rcftr" (OuterVolumeSpecName: "kube-api-access-rcftr") pod "7644e6dc-6bdb-4e65-b6c3-be904f9bbf90" (UID: "7644e6dc-6bdb-4e65-b6c3-be904f9bbf90"). InnerVolumeSpecName "kube-api-access-rcftr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:02:00 crc kubenswrapper[4558]: I0120 17:02:00.914699 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rcftr\" (UniqueName: \"kubernetes.io/projected/7644e6dc-6bdb-4e65-b6c3-be904f9bbf90-kube-api-access-rcftr\") on node \"crc\" DevicePath \"\"" Jan 20 17:02:00 crc kubenswrapper[4558]: I0120 17:02:00.914727 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7644e6dc-6bdb-4e65-b6c3-be904f9bbf90-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:02:01 crc kubenswrapper[4558]: I0120 17:02:01.503080 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/root-account-create-update-6d9jk" event={"ID":"7644e6dc-6bdb-4e65-b6c3-be904f9bbf90","Type":"ContainerDied","Data":"ded8490be4ca687452307a89bf64ec308bb175b6acb1e09bb4695d33f71fc17f"} Jan 20 17:02:01 crc kubenswrapper[4558]: I0120 17:02:01.503127 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-6d9jk" Jan 20 17:02:01 crc kubenswrapper[4558]: I0120 17:02:01.503143 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ded8490be4ca687452307a89bf64ec308bb175b6acb1e09bb4695d33f71fc17f" Jan 20 17:02:01 crc kubenswrapper[4558]: I0120 17:02:01.905780 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-6db984dd9-47b5d" Jan 20 17:02:01 crc kubenswrapper[4558]: I0120 17:02:01.941152 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-rqg8f"] Jan 20 17:02:01 crc kubenswrapper[4558]: I0120 17:02:01.941464 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-rqg8f" podUID="f881d3ea-5346-49f4-8618-7271150ba300" containerName="dnsmasq-dns" containerID="cri-o://df74271e073d2a1874b1967c7e995ccbdd4ce2707596e404c8cd9904e3bfaddd" gracePeriod=10 Jan 20 17:02:02 crc kubenswrapper[4558]: I0120 17:02:02.347404 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-rqg8f" Jan 20 17:02:02 crc kubenswrapper[4558]: I0120 17:02:02.433542 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f881d3ea-5346-49f4-8618-7271150ba300-config\") pod \"f881d3ea-5346-49f4-8618-7271150ba300\" (UID: \"f881d3ea-5346-49f4-8618-7271150ba300\") " Jan 20 17:02:02 crc kubenswrapper[4558]: I0120 17:02:02.433623 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hjt82\" (UniqueName: \"kubernetes.io/projected/f881d3ea-5346-49f4-8618-7271150ba300-kube-api-access-hjt82\") pod \"f881d3ea-5346-49f4-8618-7271150ba300\" (UID: \"f881d3ea-5346-49f4-8618-7271150ba300\") " Jan 20 17:02:02 crc kubenswrapper[4558]: I0120 17:02:02.433654 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/f881d3ea-5346-49f4-8618-7271150ba300-dnsmasq-svc\") pod \"f881d3ea-5346-49f4-8618-7271150ba300\" (UID: \"f881d3ea-5346-49f4-8618-7271150ba300\") " Jan 20 17:02:02 crc kubenswrapper[4558]: I0120 17:02:02.438410 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f881d3ea-5346-49f4-8618-7271150ba300-kube-api-access-hjt82" (OuterVolumeSpecName: "kube-api-access-hjt82") pod "f881d3ea-5346-49f4-8618-7271150ba300" (UID: "f881d3ea-5346-49f4-8618-7271150ba300"). InnerVolumeSpecName "kube-api-access-hjt82". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:02:02 crc kubenswrapper[4558]: I0120 17:02:02.460767 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f881d3ea-5346-49f4-8618-7271150ba300-dnsmasq-svc" (OuterVolumeSpecName: "dnsmasq-svc") pod "f881d3ea-5346-49f4-8618-7271150ba300" (UID: "f881d3ea-5346-49f4-8618-7271150ba300"). InnerVolumeSpecName "dnsmasq-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:02:02 crc kubenswrapper[4558]: I0120 17:02:02.462736 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f881d3ea-5346-49f4-8618-7271150ba300-config" (OuterVolumeSpecName: "config") pod "f881d3ea-5346-49f4-8618-7271150ba300" (UID: "f881d3ea-5346-49f4-8618-7271150ba300"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:02:02 crc kubenswrapper[4558]: I0120 17:02:02.510323 4558 generic.go:334] "Generic (PLEG): container finished" podID="f881d3ea-5346-49f4-8618-7271150ba300" containerID="df74271e073d2a1874b1967c7e995ccbdd4ce2707596e404c8cd9904e3bfaddd" exitCode=0 Jan 20 17:02:02 crc kubenswrapper[4558]: I0120 17:02:02.510361 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-rqg8f" event={"ID":"f881d3ea-5346-49f4-8618-7271150ba300","Type":"ContainerDied","Data":"df74271e073d2a1874b1967c7e995ccbdd4ce2707596e404c8cd9904e3bfaddd"} Jan 20 17:02:02 crc kubenswrapper[4558]: I0120 17:02:02.510377 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-rqg8f" Jan 20 17:02:02 crc kubenswrapper[4558]: I0120 17:02:02.510393 4558 scope.go:117] "RemoveContainer" containerID="df74271e073d2a1874b1967c7e995ccbdd4ce2707596e404c8cd9904e3bfaddd" Jan 20 17:02:02 crc kubenswrapper[4558]: I0120 17:02:02.510384 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-rqg8f" event={"ID":"f881d3ea-5346-49f4-8618-7271150ba300","Type":"ContainerDied","Data":"c75020bce4190fc5fc326bf1e1bc3c73686d3abcdcb0b8a8a1f7e2444f209db5"} Jan 20 17:02:02 crc kubenswrapper[4558]: I0120 17:02:02.527827 4558 scope.go:117] "RemoveContainer" containerID="333bbcc9943770e21b9d9307f5f97b0c97ba4ca4036f8cc5c7aade2abf4d0a2a" Jan 20 17:02:02 crc kubenswrapper[4558]: I0120 17:02:02.530535 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-rqg8f"] Jan 20 17:02:02 crc kubenswrapper[4558]: I0120 17:02:02.535775 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f881d3ea-5346-49f4-8618-7271150ba300-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:02:02 crc kubenswrapper[4558]: I0120 17:02:02.535800 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hjt82\" (UniqueName: \"kubernetes.io/projected/f881d3ea-5346-49f4-8618-7271150ba300-kube-api-access-hjt82\") on node \"crc\" DevicePath \"\"" Jan 20 17:02:02 crc kubenswrapper[4558]: I0120 17:02:02.535810 4558 reconciler_common.go:293] "Volume detached for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/f881d3ea-5346-49f4-8618-7271150ba300-dnsmasq-svc\") on node \"crc\" DevicePath \"\"" Jan 20 17:02:02 crc kubenswrapper[4558]: I0120 17:02:02.536929 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-rqg8f"] Jan 20 17:02:02 crc kubenswrapper[4558]: I0120 17:02:02.559090 4558 scope.go:117] "RemoveContainer" containerID="df74271e073d2a1874b1967c7e995ccbdd4ce2707596e404c8cd9904e3bfaddd" Jan 20 17:02:02 crc kubenswrapper[4558]: E0120 17:02:02.559418 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"df74271e073d2a1874b1967c7e995ccbdd4ce2707596e404c8cd9904e3bfaddd\": container with ID starting with df74271e073d2a1874b1967c7e995ccbdd4ce2707596e404c8cd9904e3bfaddd not found: ID does not exist" containerID="df74271e073d2a1874b1967c7e995ccbdd4ce2707596e404c8cd9904e3bfaddd" Jan 20 17:02:02 crc kubenswrapper[4558]: I0120 17:02:02.559453 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"df74271e073d2a1874b1967c7e995ccbdd4ce2707596e404c8cd9904e3bfaddd"} err="failed to get container status \"df74271e073d2a1874b1967c7e995ccbdd4ce2707596e404c8cd9904e3bfaddd\": rpc error: code = NotFound desc = could not find container \"df74271e073d2a1874b1967c7e995ccbdd4ce2707596e404c8cd9904e3bfaddd\": container with ID starting with df74271e073d2a1874b1967c7e995ccbdd4ce2707596e404c8cd9904e3bfaddd not found: ID does not exist" Jan 20 17:02:02 crc kubenswrapper[4558]: I0120 17:02:02.559473 4558 scope.go:117] "RemoveContainer" containerID="333bbcc9943770e21b9d9307f5f97b0c97ba4ca4036f8cc5c7aade2abf4d0a2a" Jan 20 17:02:02 crc kubenswrapper[4558]: E0120 17:02:02.559751 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"333bbcc9943770e21b9d9307f5f97b0c97ba4ca4036f8cc5c7aade2abf4d0a2a\": container with ID starting with 333bbcc9943770e21b9d9307f5f97b0c97ba4ca4036f8cc5c7aade2abf4d0a2a not found: ID does not exist" containerID="333bbcc9943770e21b9d9307f5f97b0c97ba4ca4036f8cc5c7aade2abf4d0a2a" Jan 20 17:02:02 crc kubenswrapper[4558]: I0120 17:02:02.559770 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"333bbcc9943770e21b9d9307f5f97b0c97ba4ca4036f8cc5c7aade2abf4d0a2a"} err="failed to get container status \"333bbcc9943770e21b9d9307f5f97b0c97ba4ca4036f8cc5c7aade2abf4d0a2a\": rpc error: code = NotFound desc = could not find container \"333bbcc9943770e21b9d9307f5f97b0c97ba4ca4036f8cc5c7aade2abf4d0a2a\": container with ID starting with 333bbcc9943770e21b9d9307f5f97b0c97ba4ca4036f8cc5c7aade2abf4d0a2a not found: ID does not exist" Jan 20 17:02:02 crc kubenswrapper[4558]: I0120 17:02:02.571970 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f881d3ea-5346-49f4-8618-7271150ba300" path="/var/lib/kubelet/pods/f881d3ea-5346-49f4-8618-7271150ba300/volumes" Jan 20 17:02:15 crc kubenswrapper[4558]: I0120 17:02:15.266349 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:02:16 crc kubenswrapper[4558]: I0120 17:02:16.205302 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:02:16 crc kubenswrapper[4558]: I0120 17:02:16.807060 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/barbican-db-create-gwczq"] Jan 20 17:02:16 crc kubenswrapper[4558]: E0120 17:02:16.807350 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f881d3ea-5346-49f4-8618-7271150ba300" containerName="dnsmasq-dns" Jan 20 17:02:16 crc kubenswrapper[4558]: I0120 17:02:16.807361 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="f881d3ea-5346-49f4-8618-7271150ba300" containerName="dnsmasq-dns" Jan 20 17:02:16 crc kubenswrapper[4558]: E0120 17:02:16.807384 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7644e6dc-6bdb-4e65-b6c3-be904f9bbf90" containerName="mariadb-account-create-update" Jan 20 17:02:16 crc kubenswrapper[4558]: I0120 17:02:16.807390 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="7644e6dc-6bdb-4e65-b6c3-be904f9bbf90" containerName="mariadb-account-create-update" Jan 20 17:02:16 crc kubenswrapper[4558]: E0120 17:02:16.807400 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f881d3ea-5346-49f4-8618-7271150ba300" containerName="init" Jan 20 17:02:16 crc kubenswrapper[4558]: I0120 17:02:16.807405 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="f881d3ea-5346-49f4-8618-7271150ba300" containerName="init" Jan 20 17:02:16 crc kubenswrapper[4558]: I0120 17:02:16.807518 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="7644e6dc-6bdb-4e65-b6c3-be904f9bbf90" containerName="mariadb-account-create-update" Jan 20 17:02:16 crc kubenswrapper[4558]: I0120 17:02:16.807533 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="f881d3ea-5346-49f4-8618-7271150ba300" containerName="dnsmasq-dns" Jan 20 17:02:16 crc kubenswrapper[4558]: I0120 17:02:16.807940 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-db-create-gwczq" Jan 20 17:02:16 crc kubenswrapper[4558]: I0120 17:02:16.821093 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-db-create-gwczq"] Jan 20 17:02:16 crc kubenswrapper[4558]: I0120 17:02:16.908570 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/neutron-7436-account-create-update-hmcdh"] Jan 20 17:02:16 crc kubenswrapper[4558]: I0120 17:02:16.909453 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-7436-account-create-update-hmcdh" Jan 20 17:02:16 crc kubenswrapper[4558]: I0120 17:02:16.913729 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/cinder-db-create-k8g6l"] Jan 20 17:02:16 crc kubenswrapper[4558]: I0120 17:02:16.913966 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"neutron-db-secret" Jan 20 17:02:16 crc kubenswrapper[4558]: I0120 17:02:16.914611 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-db-create-k8g6l" Jan 20 17:02:16 crc kubenswrapper[4558]: I0120 17:02:16.916985 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/98aac22a-672a-4029-8d83-be51df654f6e-operator-scripts\") pod \"barbican-db-create-gwczq\" (UID: \"98aac22a-672a-4029-8d83-be51df654f6e\") " pod="openstack-kuttl-tests/barbican-db-create-gwczq" Jan 20 17:02:16 crc kubenswrapper[4558]: I0120 17:02:16.917259 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n2r2d\" (UniqueName: \"kubernetes.io/projected/98aac22a-672a-4029-8d83-be51df654f6e-kube-api-access-n2r2d\") pod \"barbican-db-create-gwczq\" (UID: \"98aac22a-672a-4029-8d83-be51df654f6e\") " pod="openstack-kuttl-tests/barbican-db-create-gwczq" Jan 20 17:02:16 crc kubenswrapper[4558]: I0120 17:02:16.920136 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-7436-account-create-update-hmcdh"] Jan 20 17:02:16 crc kubenswrapper[4558]: I0120 17:02:16.925137 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-db-create-k8g6l"] Jan 20 17:02:17 crc kubenswrapper[4558]: I0120 17:02:17.003011 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/neutron-db-create-bp498"] Jan 20 17:02:17 crc kubenswrapper[4558]: I0120 17:02:17.003845 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-db-create-bp498" Jan 20 17:02:17 crc kubenswrapper[4558]: I0120 17:02:17.011112 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/cinder-bfd5-account-create-update-vhkrw"] Jan 20 17:02:17 crc kubenswrapper[4558]: I0120 17:02:17.012033 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-bfd5-account-create-update-vhkrw" Jan 20 17:02:17 crc kubenswrapper[4558]: I0120 17:02:17.014150 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cinder-db-secret" Jan 20 17:02:17 crc kubenswrapper[4558]: I0120 17:02:17.016232 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-db-create-bp498"] Jan 20 17:02:17 crc kubenswrapper[4558]: I0120 17:02:17.018357 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gsslf\" (UniqueName: \"kubernetes.io/projected/c5521aa3-d4e7-4339-9c59-749808bacb09-kube-api-access-gsslf\") pod \"neutron-7436-account-create-update-hmcdh\" (UID: \"c5521aa3-d4e7-4339-9c59-749808bacb09\") " pod="openstack-kuttl-tests/neutron-7436-account-create-update-hmcdh" Jan 20 17:02:17 crc kubenswrapper[4558]: I0120 17:02:17.018388 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6nl59\" (UniqueName: \"kubernetes.io/projected/f2a9bc2f-a177-4c7e-9036-c52825816ffe-kube-api-access-6nl59\") pod \"cinder-db-create-k8g6l\" (UID: \"f2a9bc2f-a177-4c7e-9036-c52825816ffe\") " pod="openstack-kuttl-tests/cinder-db-create-k8g6l" Jan 20 17:02:17 crc kubenswrapper[4558]: I0120 17:02:17.018419 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c5521aa3-d4e7-4339-9c59-749808bacb09-operator-scripts\") pod \"neutron-7436-account-create-update-hmcdh\" (UID: \"c5521aa3-d4e7-4339-9c59-749808bacb09\") " pod="openstack-kuttl-tests/neutron-7436-account-create-update-hmcdh" Jan 20 17:02:17 crc kubenswrapper[4558]: I0120 17:02:17.018451 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n2r2d\" (UniqueName: \"kubernetes.io/projected/98aac22a-672a-4029-8d83-be51df654f6e-kube-api-access-n2r2d\") pod \"barbican-db-create-gwczq\" (UID: \"98aac22a-672a-4029-8d83-be51df654f6e\") " pod="openstack-kuttl-tests/barbican-db-create-gwczq" Jan 20 17:02:17 crc kubenswrapper[4558]: I0120 17:02:17.018469 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f2a9bc2f-a177-4c7e-9036-c52825816ffe-operator-scripts\") pod \"cinder-db-create-k8g6l\" (UID: \"f2a9bc2f-a177-4c7e-9036-c52825816ffe\") " pod="openstack-kuttl-tests/cinder-db-create-k8g6l" Jan 20 17:02:17 crc kubenswrapper[4558]: I0120 17:02:17.018497 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/98aac22a-672a-4029-8d83-be51df654f6e-operator-scripts\") pod \"barbican-db-create-gwczq\" (UID: \"98aac22a-672a-4029-8d83-be51df654f6e\") " pod="openstack-kuttl-tests/barbican-db-create-gwczq" Jan 20 17:02:17 crc kubenswrapper[4558]: I0120 17:02:17.019068 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/98aac22a-672a-4029-8d83-be51df654f6e-operator-scripts\") pod \"barbican-db-create-gwczq\" (UID: \"98aac22a-672a-4029-8d83-be51df654f6e\") " pod="openstack-kuttl-tests/barbican-db-create-gwczq" Jan 20 17:02:17 crc kubenswrapper[4558]: I0120 17:02:17.033399 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-bfd5-account-create-update-vhkrw"] Jan 20 17:02:17 crc kubenswrapper[4558]: I0120 17:02:17.036182 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n2r2d\" (UniqueName: \"kubernetes.io/projected/98aac22a-672a-4029-8d83-be51df654f6e-kube-api-access-n2r2d\") pod \"barbican-db-create-gwczq\" (UID: \"98aac22a-672a-4029-8d83-be51df654f6e\") " pod="openstack-kuttl-tests/barbican-db-create-gwczq" Jan 20 17:02:17 crc kubenswrapper[4558]: I0120 17:02:17.111062 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/barbican-bb2e-account-create-update-5hchq"] Jan 20 17:02:17 crc kubenswrapper[4558]: I0120 17:02:17.111935 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-bb2e-account-create-update-5hchq" Jan 20 17:02:17 crc kubenswrapper[4558]: I0120 17:02:17.113279 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"barbican-db-secret" Jan 20 17:02:17 crc kubenswrapper[4558]: I0120 17:02:17.118447 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-bb2e-account-create-update-5hchq"] Jan 20 17:02:17 crc kubenswrapper[4558]: I0120 17:02:17.119296 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gsslf\" (UniqueName: \"kubernetes.io/projected/c5521aa3-d4e7-4339-9c59-749808bacb09-kube-api-access-gsslf\") pod \"neutron-7436-account-create-update-hmcdh\" (UID: \"c5521aa3-d4e7-4339-9c59-749808bacb09\") " pod="openstack-kuttl-tests/neutron-7436-account-create-update-hmcdh" Jan 20 17:02:17 crc kubenswrapper[4558]: I0120 17:02:17.119325 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6nl59\" (UniqueName: \"kubernetes.io/projected/f2a9bc2f-a177-4c7e-9036-c52825816ffe-kube-api-access-6nl59\") pod \"cinder-db-create-k8g6l\" (UID: \"f2a9bc2f-a177-4c7e-9036-c52825816ffe\") " pod="openstack-kuttl-tests/cinder-db-create-k8g6l" Jan 20 17:02:17 crc kubenswrapper[4558]: I0120 17:02:17.119368 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5zzk5\" (UniqueName: \"kubernetes.io/projected/e574e9d4-62db-4b0d-8dd3-caa44f7b4534-kube-api-access-5zzk5\") pod \"cinder-bfd5-account-create-update-vhkrw\" (UID: \"e574e9d4-62db-4b0d-8dd3-caa44f7b4534\") " pod="openstack-kuttl-tests/cinder-bfd5-account-create-update-vhkrw" Jan 20 17:02:17 crc kubenswrapper[4558]: I0120 17:02:17.119388 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c5521aa3-d4e7-4339-9c59-749808bacb09-operator-scripts\") pod \"neutron-7436-account-create-update-hmcdh\" (UID: \"c5521aa3-d4e7-4339-9c59-749808bacb09\") " pod="openstack-kuttl-tests/neutron-7436-account-create-update-hmcdh" Jan 20 17:02:17 crc kubenswrapper[4558]: I0120 17:02:17.119446 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f2a9bc2f-a177-4c7e-9036-c52825816ffe-operator-scripts\") pod \"cinder-db-create-k8g6l\" (UID: \"f2a9bc2f-a177-4c7e-9036-c52825816ffe\") " pod="openstack-kuttl-tests/cinder-db-create-k8g6l" Jan 20 17:02:17 crc kubenswrapper[4558]: I0120 17:02:17.119495 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a0a56a29-2029-4a64-bb0c-539c45a09175-operator-scripts\") pod \"neutron-db-create-bp498\" (UID: \"a0a56a29-2029-4a64-bb0c-539c45a09175\") " pod="openstack-kuttl-tests/neutron-db-create-bp498" Jan 20 17:02:17 crc kubenswrapper[4558]: I0120 17:02:17.119524 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e574e9d4-62db-4b0d-8dd3-caa44f7b4534-operator-scripts\") pod \"cinder-bfd5-account-create-update-vhkrw\" (UID: \"e574e9d4-62db-4b0d-8dd3-caa44f7b4534\") " pod="openstack-kuttl-tests/cinder-bfd5-account-create-update-vhkrw" Jan 20 17:02:17 crc kubenswrapper[4558]: I0120 17:02:17.119568 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cw58f\" (UniqueName: \"kubernetes.io/projected/a0a56a29-2029-4a64-bb0c-539c45a09175-kube-api-access-cw58f\") pod \"neutron-db-create-bp498\" (UID: \"a0a56a29-2029-4a64-bb0c-539c45a09175\") " pod="openstack-kuttl-tests/neutron-db-create-bp498" Jan 20 17:02:17 crc kubenswrapper[4558]: I0120 17:02:17.120049 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c5521aa3-d4e7-4339-9c59-749808bacb09-operator-scripts\") pod \"neutron-7436-account-create-update-hmcdh\" (UID: \"c5521aa3-d4e7-4339-9c59-749808bacb09\") " pod="openstack-kuttl-tests/neutron-7436-account-create-update-hmcdh" Jan 20 17:02:17 crc kubenswrapper[4558]: I0120 17:02:17.120093 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f2a9bc2f-a177-4c7e-9036-c52825816ffe-operator-scripts\") pod \"cinder-db-create-k8g6l\" (UID: \"f2a9bc2f-a177-4c7e-9036-c52825816ffe\") " pod="openstack-kuttl-tests/cinder-db-create-k8g6l" Jan 20 17:02:17 crc kubenswrapper[4558]: I0120 17:02:17.127081 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-db-create-gwczq" Jan 20 17:02:17 crc kubenswrapper[4558]: I0120 17:02:17.139524 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gsslf\" (UniqueName: \"kubernetes.io/projected/c5521aa3-d4e7-4339-9c59-749808bacb09-kube-api-access-gsslf\") pod \"neutron-7436-account-create-update-hmcdh\" (UID: \"c5521aa3-d4e7-4339-9c59-749808bacb09\") " pod="openstack-kuttl-tests/neutron-7436-account-create-update-hmcdh" Jan 20 17:02:17 crc kubenswrapper[4558]: I0120 17:02:17.141525 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6nl59\" (UniqueName: \"kubernetes.io/projected/f2a9bc2f-a177-4c7e-9036-c52825816ffe-kube-api-access-6nl59\") pod \"cinder-db-create-k8g6l\" (UID: \"f2a9bc2f-a177-4c7e-9036-c52825816ffe\") " pod="openstack-kuttl-tests/cinder-db-create-k8g6l" Jan 20 17:02:17 crc kubenswrapper[4558]: I0120 17:02:17.220639 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dddqm\" (UniqueName: \"kubernetes.io/projected/52c6bb4f-39bf-4834-a35c-c7d3234e6799-kube-api-access-dddqm\") pod \"barbican-bb2e-account-create-update-5hchq\" (UID: \"52c6bb4f-39bf-4834-a35c-c7d3234e6799\") " pod="openstack-kuttl-tests/barbican-bb2e-account-create-update-5hchq" Jan 20 17:02:17 crc kubenswrapper[4558]: I0120 17:02:17.220685 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a0a56a29-2029-4a64-bb0c-539c45a09175-operator-scripts\") pod \"neutron-db-create-bp498\" (UID: \"a0a56a29-2029-4a64-bb0c-539c45a09175\") " pod="openstack-kuttl-tests/neutron-db-create-bp498" Jan 20 17:02:17 crc kubenswrapper[4558]: I0120 17:02:17.220735 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e574e9d4-62db-4b0d-8dd3-caa44f7b4534-operator-scripts\") pod \"cinder-bfd5-account-create-update-vhkrw\" (UID: \"e574e9d4-62db-4b0d-8dd3-caa44f7b4534\") " pod="openstack-kuttl-tests/cinder-bfd5-account-create-update-vhkrw" Jan 20 17:02:17 crc kubenswrapper[4558]: I0120 17:02:17.220826 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cw58f\" (UniqueName: \"kubernetes.io/projected/a0a56a29-2029-4a64-bb0c-539c45a09175-kube-api-access-cw58f\") pod \"neutron-db-create-bp498\" (UID: \"a0a56a29-2029-4a64-bb0c-539c45a09175\") " pod="openstack-kuttl-tests/neutron-db-create-bp498" Jan 20 17:02:17 crc kubenswrapper[4558]: I0120 17:02:17.220916 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/52c6bb4f-39bf-4834-a35c-c7d3234e6799-operator-scripts\") pod \"barbican-bb2e-account-create-update-5hchq\" (UID: \"52c6bb4f-39bf-4834-a35c-c7d3234e6799\") " pod="openstack-kuttl-tests/barbican-bb2e-account-create-update-5hchq" Jan 20 17:02:17 crc kubenswrapper[4558]: I0120 17:02:17.220953 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5zzk5\" (UniqueName: \"kubernetes.io/projected/e574e9d4-62db-4b0d-8dd3-caa44f7b4534-kube-api-access-5zzk5\") pod \"cinder-bfd5-account-create-update-vhkrw\" (UID: \"e574e9d4-62db-4b0d-8dd3-caa44f7b4534\") " pod="openstack-kuttl-tests/cinder-bfd5-account-create-update-vhkrw" Jan 20 17:02:17 crc kubenswrapper[4558]: I0120 17:02:17.221712 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e574e9d4-62db-4b0d-8dd3-caa44f7b4534-operator-scripts\") pod \"cinder-bfd5-account-create-update-vhkrw\" (UID: \"e574e9d4-62db-4b0d-8dd3-caa44f7b4534\") " pod="openstack-kuttl-tests/cinder-bfd5-account-create-update-vhkrw" Jan 20 17:02:17 crc kubenswrapper[4558]: I0120 17:02:17.221821 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a0a56a29-2029-4a64-bb0c-539c45a09175-operator-scripts\") pod \"neutron-db-create-bp498\" (UID: \"a0a56a29-2029-4a64-bb0c-539c45a09175\") " pod="openstack-kuttl-tests/neutron-db-create-bp498" Jan 20 17:02:17 crc kubenswrapper[4558]: I0120 17:02:17.225840 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-7436-account-create-update-hmcdh" Jan 20 17:02:17 crc kubenswrapper[4558]: I0120 17:02:17.235924 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-db-create-k8g6l" Jan 20 17:02:17 crc kubenswrapper[4558]: I0120 17:02:17.237513 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/keystone-db-sync-9pt6z"] Jan 20 17:02:17 crc kubenswrapper[4558]: I0120 17:02:17.238373 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-db-sync-9pt6z" Jan 20 17:02:17 crc kubenswrapper[4558]: I0120 17:02:17.239958 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cw58f\" (UniqueName: \"kubernetes.io/projected/a0a56a29-2029-4a64-bb0c-539c45a09175-kube-api-access-cw58f\") pod \"neutron-db-create-bp498\" (UID: \"a0a56a29-2029-4a64-bb0c-539c45a09175\") " pod="openstack-kuttl-tests/neutron-db-create-bp498" Jan 20 17:02:17 crc kubenswrapper[4558]: I0120 17:02:17.249823 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5zzk5\" (UniqueName: \"kubernetes.io/projected/e574e9d4-62db-4b0d-8dd3-caa44f7b4534-kube-api-access-5zzk5\") pod \"cinder-bfd5-account-create-update-vhkrw\" (UID: \"e574e9d4-62db-4b0d-8dd3-caa44f7b4534\") " pod="openstack-kuttl-tests/cinder-bfd5-account-create-update-vhkrw" Jan 20 17:02:17 crc kubenswrapper[4558]: I0120 17:02:17.250054 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-config-data" Jan 20 17:02:17 crc kubenswrapper[4558]: I0120 17:02:17.250375 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-scripts" Jan 20 17:02:17 crc kubenswrapper[4558]: I0120 17:02:17.250508 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone" Jan 20 17:02:17 crc kubenswrapper[4558]: I0120 17:02:17.250616 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-keystone-dockercfg-4bhst" Jan 20 17:02:17 crc kubenswrapper[4558]: I0120 17:02:17.256586 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-db-sync-9pt6z"] Jan 20 17:02:17 crc kubenswrapper[4558]: I0120 17:02:17.318796 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-db-create-bp498" Jan 20 17:02:17 crc kubenswrapper[4558]: I0120 17:02:17.322312 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d97707d1-0217-458b-88ea-5589b040f499-combined-ca-bundle\") pod \"keystone-db-sync-9pt6z\" (UID: \"d97707d1-0217-458b-88ea-5589b040f499\") " pod="openstack-kuttl-tests/keystone-db-sync-9pt6z" Jan 20 17:02:17 crc kubenswrapper[4558]: I0120 17:02:17.322357 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dddqm\" (UniqueName: \"kubernetes.io/projected/52c6bb4f-39bf-4834-a35c-c7d3234e6799-kube-api-access-dddqm\") pod \"barbican-bb2e-account-create-update-5hchq\" (UID: \"52c6bb4f-39bf-4834-a35c-c7d3234e6799\") " pod="openstack-kuttl-tests/barbican-bb2e-account-create-update-5hchq" Jan 20 17:02:17 crc kubenswrapper[4558]: I0120 17:02:17.322457 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d97707d1-0217-458b-88ea-5589b040f499-config-data\") pod \"keystone-db-sync-9pt6z\" (UID: \"d97707d1-0217-458b-88ea-5589b040f499\") " pod="openstack-kuttl-tests/keystone-db-sync-9pt6z" Jan 20 17:02:17 crc kubenswrapper[4558]: I0120 17:02:17.322546 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/52c6bb4f-39bf-4834-a35c-c7d3234e6799-operator-scripts\") pod \"barbican-bb2e-account-create-update-5hchq\" (UID: \"52c6bb4f-39bf-4834-a35c-c7d3234e6799\") " pod="openstack-kuttl-tests/barbican-bb2e-account-create-update-5hchq" Jan 20 17:02:17 crc kubenswrapper[4558]: I0120 17:02:17.322594 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ptb5w\" (UniqueName: \"kubernetes.io/projected/d97707d1-0217-458b-88ea-5589b040f499-kube-api-access-ptb5w\") pod \"keystone-db-sync-9pt6z\" (UID: \"d97707d1-0217-458b-88ea-5589b040f499\") " pod="openstack-kuttl-tests/keystone-db-sync-9pt6z" Jan 20 17:02:17 crc kubenswrapper[4558]: I0120 17:02:17.323115 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/52c6bb4f-39bf-4834-a35c-c7d3234e6799-operator-scripts\") pod \"barbican-bb2e-account-create-update-5hchq\" (UID: \"52c6bb4f-39bf-4834-a35c-c7d3234e6799\") " pod="openstack-kuttl-tests/barbican-bb2e-account-create-update-5hchq" Jan 20 17:02:17 crc kubenswrapper[4558]: I0120 17:02:17.325984 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-bfd5-account-create-update-vhkrw" Jan 20 17:02:17 crc kubenswrapper[4558]: I0120 17:02:17.338808 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dddqm\" (UniqueName: \"kubernetes.io/projected/52c6bb4f-39bf-4834-a35c-c7d3234e6799-kube-api-access-dddqm\") pod \"barbican-bb2e-account-create-update-5hchq\" (UID: \"52c6bb4f-39bf-4834-a35c-c7d3234e6799\") " pod="openstack-kuttl-tests/barbican-bb2e-account-create-update-5hchq" Jan 20 17:02:17 crc kubenswrapper[4558]: I0120 17:02:17.423596 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d97707d1-0217-458b-88ea-5589b040f499-combined-ca-bundle\") pod \"keystone-db-sync-9pt6z\" (UID: \"d97707d1-0217-458b-88ea-5589b040f499\") " pod="openstack-kuttl-tests/keystone-db-sync-9pt6z" Jan 20 17:02:17 crc kubenswrapper[4558]: I0120 17:02:17.423813 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d97707d1-0217-458b-88ea-5589b040f499-config-data\") pod \"keystone-db-sync-9pt6z\" (UID: \"d97707d1-0217-458b-88ea-5589b040f499\") " pod="openstack-kuttl-tests/keystone-db-sync-9pt6z" Jan 20 17:02:17 crc kubenswrapper[4558]: I0120 17:02:17.423940 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ptb5w\" (UniqueName: \"kubernetes.io/projected/d97707d1-0217-458b-88ea-5589b040f499-kube-api-access-ptb5w\") pod \"keystone-db-sync-9pt6z\" (UID: \"d97707d1-0217-458b-88ea-5589b040f499\") " pod="openstack-kuttl-tests/keystone-db-sync-9pt6z" Jan 20 17:02:17 crc kubenswrapper[4558]: I0120 17:02:17.424342 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-bb2e-account-create-update-5hchq" Jan 20 17:02:17 crc kubenswrapper[4558]: I0120 17:02:17.426484 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d97707d1-0217-458b-88ea-5589b040f499-config-data\") pod \"keystone-db-sync-9pt6z\" (UID: \"d97707d1-0217-458b-88ea-5589b040f499\") " pod="openstack-kuttl-tests/keystone-db-sync-9pt6z" Jan 20 17:02:17 crc kubenswrapper[4558]: I0120 17:02:17.426501 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d97707d1-0217-458b-88ea-5589b040f499-combined-ca-bundle\") pod \"keystone-db-sync-9pt6z\" (UID: \"d97707d1-0217-458b-88ea-5589b040f499\") " pod="openstack-kuttl-tests/keystone-db-sync-9pt6z" Jan 20 17:02:17 crc kubenswrapper[4558]: I0120 17:02:17.441021 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ptb5w\" (UniqueName: \"kubernetes.io/projected/d97707d1-0217-458b-88ea-5589b040f499-kube-api-access-ptb5w\") pod \"keystone-db-sync-9pt6z\" (UID: \"d97707d1-0217-458b-88ea-5589b040f499\") " pod="openstack-kuttl-tests/keystone-db-sync-9pt6z" Jan 20 17:02:17 crc kubenswrapper[4558]: I0120 17:02:17.519599 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-db-create-gwczq"] Jan 20 17:02:17 crc kubenswrapper[4558]: I0120 17:02:17.604644 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-db-sync-9pt6z" Jan 20 17:02:17 crc kubenswrapper[4558]: I0120 17:02:17.609047 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-db-create-gwczq" event={"ID":"98aac22a-672a-4029-8d83-be51df654f6e","Type":"ContainerStarted","Data":"0a1dd3aea73cbdfdf9de1d7748f2de093d129bfe26ac2823bb43caceb8f417e3"} Jan 20 17:02:17 crc kubenswrapper[4558]: I0120 17:02:17.654225 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-db-create-k8g6l"] Jan 20 17:02:17 crc kubenswrapper[4558]: W0120 17:02:17.664501 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf2a9bc2f_a177_4c7e_9036_c52825816ffe.slice/crio-b7bf484e30c2e415902ab6334f9fa58be49874a515db97645803c603ebf93062 WatchSource:0}: Error finding container b7bf484e30c2e415902ab6334f9fa58be49874a515db97645803c603ebf93062: Status 404 returned error can't find the container with id b7bf484e30c2e415902ab6334f9fa58be49874a515db97645803c603ebf93062 Jan 20 17:02:17 crc kubenswrapper[4558]: I0120 17:02:17.704674 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-7436-account-create-update-hmcdh"] Jan 20 17:02:17 crc kubenswrapper[4558]: I0120 17:02:17.827306 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-bfd5-account-create-update-vhkrw"] Jan 20 17:02:17 crc kubenswrapper[4558]: I0120 17:02:17.856309 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-db-sync-9pt6z"] Jan 20 17:02:17 crc kubenswrapper[4558]: I0120 17:02:17.882766 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-db-create-bp498"] Jan 20 17:02:17 crc kubenswrapper[4558]: W0120 17:02:17.897596 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda0a56a29_2029_4a64_bb0c_539c45a09175.slice/crio-34a921044fb70dea12cc1aa23f2a00484f447defcb0987f1ff33fafbf680732e WatchSource:0}: Error finding container 34a921044fb70dea12cc1aa23f2a00484f447defcb0987f1ff33fafbf680732e: Status 404 returned error can't find the container with id 34a921044fb70dea12cc1aa23f2a00484f447defcb0987f1ff33fafbf680732e Jan 20 17:02:17 crc kubenswrapper[4558]: I0120 17:02:17.981696 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-bb2e-account-create-update-5hchq"] Jan 20 17:02:17 crc kubenswrapper[4558]: W0120 17:02:17.987175 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod52c6bb4f_39bf_4834_a35c_c7d3234e6799.slice/crio-8d4a5503725b1a75bb08cd77c7ff149c18e5fb22e2eb46dca347fe14d450fd2e WatchSource:0}: Error finding container 8d4a5503725b1a75bb08cd77c7ff149c18e5fb22e2eb46dca347fe14d450fd2e: Status 404 returned error can't find the container with id 8d4a5503725b1a75bb08cd77c7ff149c18e5fb22e2eb46dca347fe14d450fd2e Jan 20 17:02:18 crc kubenswrapper[4558]: I0120 17:02:18.616431 4558 generic.go:334] "Generic (PLEG): container finished" podID="98aac22a-672a-4029-8d83-be51df654f6e" containerID="6f75a2e587b8aeda7d220c1e548eaebda4e03638ed963e8d38e9e47793f84d9e" exitCode=0 Jan 20 17:02:18 crc kubenswrapper[4558]: I0120 17:02:18.616628 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-db-create-gwczq" event={"ID":"98aac22a-672a-4029-8d83-be51df654f6e","Type":"ContainerDied","Data":"6f75a2e587b8aeda7d220c1e548eaebda4e03638ed963e8d38e9e47793f84d9e"} Jan 20 17:02:18 crc kubenswrapper[4558]: I0120 17:02:18.618278 4558 generic.go:334] "Generic (PLEG): container finished" podID="f2a9bc2f-a177-4c7e-9036-c52825816ffe" containerID="9990e29de8eb44da080f3921fca3c78f78c2476ef82affd2a61ff9d7a9c4550b" exitCode=0 Jan 20 17:02:18 crc kubenswrapper[4558]: I0120 17:02:18.618333 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-db-create-k8g6l" event={"ID":"f2a9bc2f-a177-4c7e-9036-c52825816ffe","Type":"ContainerDied","Data":"9990e29de8eb44da080f3921fca3c78f78c2476ef82affd2a61ff9d7a9c4550b"} Jan 20 17:02:18 crc kubenswrapper[4558]: I0120 17:02:18.618354 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-db-create-k8g6l" event={"ID":"f2a9bc2f-a177-4c7e-9036-c52825816ffe","Type":"ContainerStarted","Data":"b7bf484e30c2e415902ab6334f9fa58be49874a515db97645803c603ebf93062"} Jan 20 17:02:18 crc kubenswrapper[4558]: I0120 17:02:18.619863 4558 generic.go:334] "Generic (PLEG): container finished" podID="a0a56a29-2029-4a64-bb0c-539c45a09175" containerID="188c3e342f37b5a0e2973b3d0a9ce309da3207e014b9fd3f20fdbe5d6d224911" exitCode=0 Jan 20 17:02:18 crc kubenswrapper[4558]: I0120 17:02:18.619938 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-db-create-bp498" event={"ID":"a0a56a29-2029-4a64-bb0c-539c45a09175","Type":"ContainerDied","Data":"188c3e342f37b5a0e2973b3d0a9ce309da3207e014b9fd3f20fdbe5d6d224911"} Jan 20 17:02:18 crc kubenswrapper[4558]: I0120 17:02:18.619965 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-db-create-bp498" event={"ID":"a0a56a29-2029-4a64-bb0c-539c45a09175","Type":"ContainerStarted","Data":"34a921044fb70dea12cc1aa23f2a00484f447defcb0987f1ff33fafbf680732e"} Jan 20 17:02:18 crc kubenswrapper[4558]: I0120 17:02:18.620891 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-db-sync-9pt6z" event={"ID":"d97707d1-0217-458b-88ea-5589b040f499","Type":"ContainerStarted","Data":"6cedc115d4e6af60550ede3121827d9dbbc1313246a8ea36817021a7ace12bae"} Jan 20 17:02:18 crc kubenswrapper[4558]: I0120 17:02:18.620977 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-db-sync-9pt6z" event={"ID":"d97707d1-0217-458b-88ea-5589b040f499","Type":"ContainerStarted","Data":"c2d4ac0ce4294b29ecb513e2d2e70356349c2e92c10ca08c0497ec66cfdcdc7f"} Jan 20 17:02:18 crc kubenswrapper[4558]: I0120 17:02:18.622284 4558 generic.go:334] "Generic (PLEG): container finished" podID="52c6bb4f-39bf-4834-a35c-c7d3234e6799" containerID="62550ed538832efcdb4e52bf31aa3d0f46c4b167fffd93e4a2f4e812472afffe" exitCode=0 Jan 20 17:02:18 crc kubenswrapper[4558]: I0120 17:02:18.622388 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-bb2e-account-create-update-5hchq" event={"ID":"52c6bb4f-39bf-4834-a35c-c7d3234e6799","Type":"ContainerDied","Data":"62550ed538832efcdb4e52bf31aa3d0f46c4b167fffd93e4a2f4e812472afffe"} Jan 20 17:02:18 crc kubenswrapper[4558]: I0120 17:02:18.622462 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-bb2e-account-create-update-5hchq" event={"ID":"52c6bb4f-39bf-4834-a35c-c7d3234e6799","Type":"ContainerStarted","Data":"8d4a5503725b1a75bb08cd77c7ff149c18e5fb22e2eb46dca347fe14d450fd2e"} Jan 20 17:02:18 crc kubenswrapper[4558]: I0120 17:02:18.623812 4558 generic.go:334] "Generic (PLEG): container finished" podID="c5521aa3-d4e7-4339-9c59-749808bacb09" containerID="82423a979fb5acca56c55fc07937fea00be152ca157837a8e089a8d66ed5523d" exitCode=0 Jan 20 17:02:18 crc kubenswrapper[4558]: I0120 17:02:18.623880 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-7436-account-create-update-hmcdh" event={"ID":"c5521aa3-d4e7-4339-9c59-749808bacb09","Type":"ContainerDied","Data":"82423a979fb5acca56c55fc07937fea00be152ca157837a8e089a8d66ed5523d"} Jan 20 17:02:18 crc kubenswrapper[4558]: I0120 17:02:18.623898 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-7436-account-create-update-hmcdh" event={"ID":"c5521aa3-d4e7-4339-9c59-749808bacb09","Type":"ContainerStarted","Data":"d8e9b66e76d321ad81e463e465dbeae17c41b1de90b76e96fbca0dc73e9f93e9"} Jan 20 17:02:18 crc kubenswrapper[4558]: I0120 17:02:18.627886 4558 generic.go:334] "Generic (PLEG): container finished" podID="e574e9d4-62db-4b0d-8dd3-caa44f7b4534" containerID="849d022aa7e01656bcda911e10553d7e4a4b77905285c453f857bea7c5853f1c" exitCode=0 Jan 20 17:02:18 crc kubenswrapper[4558]: I0120 17:02:18.627975 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-bfd5-account-create-update-vhkrw" event={"ID":"e574e9d4-62db-4b0d-8dd3-caa44f7b4534","Type":"ContainerDied","Data":"849d022aa7e01656bcda911e10553d7e4a4b77905285c453f857bea7c5853f1c"} Jan 20 17:02:18 crc kubenswrapper[4558]: I0120 17:02:18.628037 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-bfd5-account-create-update-vhkrw" event={"ID":"e574e9d4-62db-4b0d-8dd3-caa44f7b4534","Type":"ContainerStarted","Data":"b23cff0a7e1c5f1de88a4ce7dfc4ed77b4d2bb9e97309d16c192a736a66992ef"} Jan 20 17:02:18 crc kubenswrapper[4558]: I0120 17:02:18.694124 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/keystone-db-sync-9pt6z" podStartSLOduration=1.6941086570000001 podStartE2EDuration="1.694108657s" podCreationTimestamp="2026-01-20 17:02:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:02:18.689844106 +0000 UTC m=+1232.450182074" watchObservedRunningTime="2026-01-20 17:02:18.694108657 +0000 UTC m=+1232.454446624" Jan 20 17:02:19 crc kubenswrapper[4558]: I0120 17:02:19.634744 4558 generic.go:334] "Generic (PLEG): container finished" podID="d97707d1-0217-458b-88ea-5589b040f499" containerID="6cedc115d4e6af60550ede3121827d9dbbc1313246a8ea36817021a7ace12bae" exitCode=0 Jan 20 17:02:19 crc kubenswrapper[4558]: I0120 17:02:19.634790 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-db-sync-9pt6z" event={"ID":"d97707d1-0217-458b-88ea-5589b040f499","Type":"ContainerDied","Data":"6cedc115d4e6af60550ede3121827d9dbbc1313246a8ea36817021a7ace12bae"} Jan 20 17:02:19 crc kubenswrapper[4558]: I0120 17:02:19.925112 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-bb2e-account-create-update-5hchq" Jan 20 17:02:19 crc kubenswrapper[4558]: I0120 17:02:19.960349 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/52c6bb4f-39bf-4834-a35c-c7d3234e6799-operator-scripts\") pod \"52c6bb4f-39bf-4834-a35c-c7d3234e6799\" (UID: \"52c6bb4f-39bf-4834-a35c-c7d3234e6799\") " Jan 20 17:02:19 crc kubenswrapper[4558]: I0120 17:02:19.960464 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dddqm\" (UniqueName: \"kubernetes.io/projected/52c6bb4f-39bf-4834-a35c-c7d3234e6799-kube-api-access-dddqm\") pod \"52c6bb4f-39bf-4834-a35c-c7d3234e6799\" (UID: \"52c6bb4f-39bf-4834-a35c-c7d3234e6799\") " Jan 20 17:02:19 crc kubenswrapper[4558]: I0120 17:02:19.961190 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/52c6bb4f-39bf-4834-a35c-c7d3234e6799-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "52c6bb4f-39bf-4834-a35c-c7d3234e6799" (UID: "52c6bb4f-39bf-4834-a35c-c7d3234e6799"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:02:19 crc kubenswrapper[4558]: I0120 17:02:19.986772 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/52c6bb4f-39bf-4834-a35c-c7d3234e6799-kube-api-access-dddqm" (OuterVolumeSpecName: "kube-api-access-dddqm") pod "52c6bb4f-39bf-4834-a35c-c7d3234e6799" (UID: "52c6bb4f-39bf-4834-a35c-c7d3234e6799"). InnerVolumeSpecName "kube-api-access-dddqm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:02:20 crc kubenswrapper[4558]: I0120 17:02:20.061745 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dddqm\" (UniqueName: \"kubernetes.io/projected/52c6bb4f-39bf-4834-a35c-c7d3234e6799-kube-api-access-dddqm\") on node \"crc\" DevicePath \"\"" Jan 20 17:02:20 crc kubenswrapper[4558]: I0120 17:02:20.061836 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/52c6bb4f-39bf-4834-a35c-c7d3234e6799-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:02:20 crc kubenswrapper[4558]: I0120 17:02:20.072665 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-db-create-k8g6l" Jan 20 17:02:20 crc kubenswrapper[4558]: I0120 17:02:20.076935 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-bfd5-account-create-update-vhkrw" Jan 20 17:02:20 crc kubenswrapper[4558]: I0120 17:02:20.084138 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-db-create-gwczq" Jan 20 17:02:20 crc kubenswrapper[4558]: I0120 17:02:20.089627 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-7436-account-create-update-hmcdh" Jan 20 17:02:20 crc kubenswrapper[4558]: I0120 17:02:20.099071 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-db-create-bp498" Jan 20 17:02:20 crc kubenswrapper[4558]: I0120 17:02:20.162875 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n2r2d\" (UniqueName: \"kubernetes.io/projected/98aac22a-672a-4029-8d83-be51df654f6e-kube-api-access-n2r2d\") pod \"98aac22a-672a-4029-8d83-be51df654f6e\" (UID: \"98aac22a-672a-4029-8d83-be51df654f6e\") " Jan 20 17:02:20 crc kubenswrapper[4558]: I0120 17:02:20.162912 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f2a9bc2f-a177-4c7e-9036-c52825816ffe-operator-scripts\") pod \"f2a9bc2f-a177-4c7e-9036-c52825816ffe\" (UID: \"f2a9bc2f-a177-4c7e-9036-c52825816ffe\") " Jan 20 17:02:20 crc kubenswrapper[4558]: I0120 17:02:20.162949 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5zzk5\" (UniqueName: \"kubernetes.io/projected/e574e9d4-62db-4b0d-8dd3-caa44f7b4534-kube-api-access-5zzk5\") pod \"e574e9d4-62db-4b0d-8dd3-caa44f7b4534\" (UID: \"e574e9d4-62db-4b0d-8dd3-caa44f7b4534\") " Jan 20 17:02:20 crc kubenswrapper[4558]: I0120 17:02:20.163033 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/98aac22a-672a-4029-8d83-be51df654f6e-operator-scripts\") pod \"98aac22a-672a-4029-8d83-be51df654f6e\" (UID: \"98aac22a-672a-4029-8d83-be51df654f6e\") " Jan 20 17:02:20 crc kubenswrapper[4558]: I0120 17:02:20.163055 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c5521aa3-d4e7-4339-9c59-749808bacb09-operator-scripts\") pod \"c5521aa3-d4e7-4339-9c59-749808bacb09\" (UID: \"c5521aa3-d4e7-4339-9c59-749808bacb09\") " Jan 20 17:02:20 crc kubenswrapper[4558]: I0120 17:02:20.163076 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a0a56a29-2029-4a64-bb0c-539c45a09175-operator-scripts\") pod \"a0a56a29-2029-4a64-bb0c-539c45a09175\" (UID: \"a0a56a29-2029-4a64-bb0c-539c45a09175\") " Jan 20 17:02:20 crc kubenswrapper[4558]: I0120 17:02:20.163095 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e574e9d4-62db-4b0d-8dd3-caa44f7b4534-operator-scripts\") pod \"e574e9d4-62db-4b0d-8dd3-caa44f7b4534\" (UID: \"e574e9d4-62db-4b0d-8dd3-caa44f7b4534\") " Jan 20 17:02:20 crc kubenswrapper[4558]: I0120 17:02:20.163128 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gsslf\" (UniqueName: \"kubernetes.io/projected/c5521aa3-d4e7-4339-9c59-749808bacb09-kube-api-access-gsslf\") pod \"c5521aa3-d4e7-4339-9c59-749808bacb09\" (UID: \"c5521aa3-d4e7-4339-9c59-749808bacb09\") " Jan 20 17:02:20 crc kubenswrapper[4558]: I0120 17:02:20.163144 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cw58f\" (UniqueName: \"kubernetes.io/projected/a0a56a29-2029-4a64-bb0c-539c45a09175-kube-api-access-cw58f\") pod \"a0a56a29-2029-4a64-bb0c-539c45a09175\" (UID: \"a0a56a29-2029-4a64-bb0c-539c45a09175\") " Jan 20 17:02:20 crc kubenswrapper[4558]: I0120 17:02:20.163178 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6nl59\" (UniqueName: \"kubernetes.io/projected/f2a9bc2f-a177-4c7e-9036-c52825816ffe-kube-api-access-6nl59\") pod \"f2a9bc2f-a177-4c7e-9036-c52825816ffe\" (UID: \"f2a9bc2f-a177-4c7e-9036-c52825816ffe\") " Jan 20 17:02:20 crc kubenswrapper[4558]: I0120 17:02:20.163478 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f2a9bc2f-a177-4c7e-9036-c52825816ffe-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "f2a9bc2f-a177-4c7e-9036-c52825816ffe" (UID: "f2a9bc2f-a177-4c7e-9036-c52825816ffe"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:02:20 crc kubenswrapper[4558]: I0120 17:02:20.163516 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a0a56a29-2029-4a64-bb0c-539c45a09175-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "a0a56a29-2029-4a64-bb0c-539c45a09175" (UID: "a0a56a29-2029-4a64-bb0c-539c45a09175"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:02:20 crc kubenswrapper[4558]: I0120 17:02:20.163684 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c5521aa3-d4e7-4339-9c59-749808bacb09-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "c5521aa3-d4e7-4339-9c59-749808bacb09" (UID: "c5521aa3-d4e7-4339-9c59-749808bacb09"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:02:20 crc kubenswrapper[4558]: I0120 17:02:20.163738 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/98aac22a-672a-4029-8d83-be51df654f6e-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "98aac22a-672a-4029-8d83-be51df654f6e" (UID: "98aac22a-672a-4029-8d83-be51df654f6e"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:02:20 crc kubenswrapper[4558]: I0120 17:02:20.163993 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e574e9d4-62db-4b0d-8dd3-caa44f7b4534-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "e574e9d4-62db-4b0d-8dd3-caa44f7b4534" (UID: "e574e9d4-62db-4b0d-8dd3-caa44f7b4534"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:02:20 crc kubenswrapper[4558]: I0120 17:02:20.164137 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f2a9bc2f-a177-4c7e-9036-c52825816ffe-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:02:20 crc kubenswrapper[4558]: I0120 17:02:20.164157 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/98aac22a-672a-4029-8d83-be51df654f6e-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:02:20 crc kubenswrapper[4558]: I0120 17:02:20.164181 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c5521aa3-d4e7-4339-9c59-749808bacb09-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:02:20 crc kubenswrapper[4558]: I0120 17:02:20.164190 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a0a56a29-2029-4a64-bb0c-539c45a09175-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:02:20 crc kubenswrapper[4558]: I0120 17:02:20.164197 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e574e9d4-62db-4b0d-8dd3-caa44f7b4534-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:02:20 crc kubenswrapper[4558]: I0120 17:02:20.166218 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f2a9bc2f-a177-4c7e-9036-c52825816ffe-kube-api-access-6nl59" (OuterVolumeSpecName: "kube-api-access-6nl59") pod "f2a9bc2f-a177-4c7e-9036-c52825816ffe" (UID: "f2a9bc2f-a177-4c7e-9036-c52825816ffe"). InnerVolumeSpecName "kube-api-access-6nl59". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:02:20 crc kubenswrapper[4558]: I0120 17:02:20.166252 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c5521aa3-d4e7-4339-9c59-749808bacb09-kube-api-access-gsslf" (OuterVolumeSpecName: "kube-api-access-gsslf") pod "c5521aa3-d4e7-4339-9c59-749808bacb09" (UID: "c5521aa3-d4e7-4339-9c59-749808bacb09"). InnerVolumeSpecName "kube-api-access-gsslf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:02:20 crc kubenswrapper[4558]: I0120 17:02:20.166280 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0a56a29-2029-4a64-bb0c-539c45a09175-kube-api-access-cw58f" (OuterVolumeSpecName: "kube-api-access-cw58f") pod "a0a56a29-2029-4a64-bb0c-539c45a09175" (UID: "a0a56a29-2029-4a64-bb0c-539c45a09175"). InnerVolumeSpecName "kube-api-access-cw58f". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:02:20 crc kubenswrapper[4558]: I0120 17:02:20.166299 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/98aac22a-672a-4029-8d83-be51df654f6e-kube-api-access-n2r2d" (OuterVolumeSpecName: "kube-api-access-n2r2d") pod "98aac22a-672a-4029-8d83-be51df654f6e" (UID: "98aac22a-672a-4029-8d83-be51df654f6e"). InnerVolumeSpecName "kube-api-access-n2r2d". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:02:20 crc kubenswrapper[4558]: I0120 17:02:20.166455 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e574e9d4-62db-4b0d-8dd3-caa44f7b4534-kube-api-access-5zzk5" (OuterVolumeSpecName: "kube-api-access-5zzk5") pod "e574e9d4-62db-4b0d-8dd3-caa44f7b4534" (UID: "e574e9d4-62db-4b0d-8dd3-caa44f7b4534"). InnerVolumeSpecName "kube-api-access-5zzk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:02:20 crc kubenswrapper[4558]: I0120 17:02:20.265229 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5zzk5\" (UniqueName: \"kubernetes.io/projected/e574e9d4-62db-4b0d-8dd3-caa44f7b4534-kube-api-access-5zzk5\") on node \"crc\" DevicePath \"\"" Jan 20 17:02:20 crc kubenswrapper[4558]: I0120 17:02:20.265844 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gsslf\" (UniqueName: \"kubernetes.io/projected/c5521aa3-d4e7-4339-9c59-749808bacb09-kube-api-access-gsslf\") on node \"crc\" DevicePath \"\"" Jan 20 17:02:20 crc kubenswrapper[4558]: I0120 17:02:20.265856 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cw58f\" (UniqueName: \"kubernetes.io/projected/a0a56a29-2029-4a64-bb0c-539c45a09175-kube-api-access-cw58f\") on node \"crc\" DevicePath \"\"" Jan 20 17:02:20 crc kubenswrapper[4558]: I0120 17:02:20.265864 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6nl59\" (UniqueName: \"kubernetes.io/projected/f2a9bc2f-a177-4c7e-9036-c52825816ffe-kube-api-access-6nl59\") on node \"crc\" DevicePath \"\"" Jan 20 17:02:20 crc kubenswrapper[4558]: I0120 17:02:20.265873 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n2r2d\" (UniqueName: \"kubernetes.io/projected/98aac22a-672a-4029-8d83-be51df654f6e-kube-api-access-n2r2d\") on node \"crc\" DevicePath \"\"" Jan 20 17:02:20 crc kubenswrapper[4558]: I0120 17:02:20.642749 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-db-create-bp498" Jan 20 17:02:20 crc kubenswrapper[4558]: I0120 17:02:20.642748 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-db-create-bp498" event={"ID":"a0a56a29-2029-4a64-bb0c-539c45a09175","Type":"ContainerDied","Data":"34a921044fb70dea12cc1aa23f2a00484f447defcb0987f1ff33fafbf680732e"} Jan 20 17:02:20 crc kubenswrapper[4558]: I0120 17:02:20.642855 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="34a921044fb70dea12cc1aa23f2a00484f447defcb0987f1ff33fafbf680732e" Jan 20 17:02:20 crc kubenswrapper[4558]: I0120 17:02:20.644122 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-bb2e-account-create-update-5hchq" Jan 20 17:02:20 crc kubenswrapper[4558]: I0120 17:02:20.644108 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-bb2e-account-create-update-5hchq" event={"ID":"52c6bb4f-39bf-4834-a35c-c7d3234e6799","Type":"ContainerDied","Data":"8d4a5503725b1a75bb08cd77c7ff149c18e5fb22e2eb46dca347fe14d450fd2e"} Jan 20 17:02:20 crc kubenswrapper[4558]: I0120 17:02:20.644417 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8d4a5503725b1a75bb08cd77c7ff149c18e5fb22e2eb46dca347fe14d450fd2e" Jan 20 17:02:20 crc kubenswrapper[4558]: I0120 17:02:20.645695 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-7436-account-create-update-hmcdh" Jan 20 17:02:20 crc kubenswrapper[4558]: I0120 17:02:20.645701 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-7436-account-create-update-hmcdh" event={"ID":"c5521aa3-d4e7-4339-9c59-749808bacb09","Type":"ContainerDied","Data":"d8e9b66e76d321ad81e463e465dbeae17c41b1de90b76e96fbca0dc73e9f93e9"} Jan 20 17:02:20 crc kubenswrapper[4558]: I0120 17:02:20.645944 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d8e9b66e76d321ad81e463e465dbeae17c41b1de90b76e96fbca0dc73e9f93e9" Jan 20 17:02:20 crc kubenswrapper[4558]: I0120 17:02:20.647198 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-bfd5-account-create-update-vhkrw" event={"ID":"e574e9d4-62db-4b0d-8dd3-caa44f7b4534","Type":"ContainerDied","Data":"b23cff0a7e1c5f1de88a4ce7dfc4ed77b4d2bb9e97309d16c192a736a66992ef"} Jan 20 17:02:20 crc kubenswrapper[4558]: I0120 17:02:20.647222 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b23cff0a7e1c5f1de88a4ce7dfc4ed77b4d2bb9e97309d16c192a736a66992ef" Jan 20 17:02:20 crc kubenswrapper[4558]: I0120 17:02:20.647206 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-bfd5-account-create-update-vhkrw" Jan 20 17:02:20 crc kubenswrapper[4558]: I0120 17:02:20.648359 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-db-create-gwczq" event={"ID":"98aac22a-672a-4029-8d83-be51df654f6e","Type":"ContainerDied","Data":"0a1dd3aea73cbdfdf9de1d7748f2de093d129bfe26ac2823bb43caceb8f417e3"} Jan 20 17:02:20 crc kubenswrapper[4558]: I0120 17:02:20.648382 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0a1dd3aea73cbdfdf9de1d7748f2de093d129bfe26ac2823bb43caceb8f417e3" Jan 20 17:02:20 crc kubenswrapper[4558]: I0120 17:02:20.648417 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-db-create-gwczq" Jan 20 17:02:20 crc kubenswrapper[4558]: I0120 17:02:20.650539 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-db-create-k8g6l" Jan 20 17:02:20 crc kubenswrapper[4558]: I0120 17:02:20.650538 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-db-create-k8g6l" event={"ID":"f2a9bc2f-a177-4c7e-9036-c52825816ffe","Type":"ContainerDied","Data":"b7bf484e30c2e415902ab6334f9fa58be49874a515db97645803c603ebf93062"} Jan 20 17:02:20 crc kubenswrapper[4558]: I0120 17:02:20.650663 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b7bf484e30c2e415902ab6334f9fa58be49874a515db97645803c603ebf93062" Jan 20 17:02:20 crc kubenswrapper[4558]: I0120 17:02:20.837262 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-db-sync-9pt6z" Jan 20 17:02:20 crc kubenswrapper[4558]: I0120 17:02:20.872639 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ptb5w\" (UniqueName: \"kubernetes.io/projected/d97707d1-0217-458b-88ea-5589b040f499-kube-api-access-ptb5w\") pod \"d97707d1-0217-458b-88ea-5589b040f499\" (UID: \"d97707d1-0217-458b-88ea-5589b040f499\") " Jan 20 17:02:20 crc kubenswrapper[4558]: I0120 17:02:20.872711 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d97707d1-0217-458b-88ea-5589b040f499-combined-ca-bundle\") pod \"d97707d1-0217-458b-88ea-5589b040f499\" (UID: \"d97707d1-0217-458b-88ea-5589b040f499\") " Jan 20 17:02:20 crc kubenswrapper[4558]: I0120 17:02:20.872788 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d97707d1-0217-458b-88ea-5589b040f499-config-data\") pod \"d97707d1-0217-458b-88ea-5589b040f499\" (UID: \"d97707d1-0217-458b-88ea-5589b040f499\") " Jan 20 17:02:20 crc kubenswrapper[4558]: I0120 17:02:20.875571 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d97707d1-0217-458b-88ea-5589b040f499-kube-api-access-ptb5w" (OuterVolumeSpecName: "kube-api-access-ptb5w") pod "d97707d1-0217-458b-88ea-5589b040f499" (UID: "d97707d1-0217-458b-88ea-5589b040f499"). InnerVolumeSpecName "kube-api-access-ptb5w". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:02:20 crc kubenswrapper[4558]: I0120 17:02:20.889670 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d97707d1-0217-458b-88ea-5589b040f499-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d97707d1-0217-458b-88ea-5589b040f499" (UID: "d97707d1-0217-458b-88ea-5589b040f499"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:02:20 crc kubenswrapper[4558]: I0120 17:02:20.900479 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d97707d1-0217-458b-88ea-5589b040f499-config-data" (OuterVolumeSpecName: "config-data") pod "d97707d1-0217-458b-88ea-5589b040f499" (UID: "d97707d1-0217-458b-88ea-5589b040f499"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:02:20 crc kubenswrapper[4558]: I0120 17:02:20.974809 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d97707d1-0217-458b-88ea-5589b040f499-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:02:20 crc kubenswrapper[4558]: I0120 17:02:20.975013 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ptb5w\" (UniqueName: \"kubernetes.io/projected/d97707d1-0217-458b-88ea-5589b040f499-kube-api-access-ptb5w\") on node \"crc\" DevicePath \"\"" Jan 20 17:02:20 crc kubenswrapper[4558]: I0120 17:02:20.975081 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d97707d1-0217-458b-88ea-5589b040f499-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:02:21 crc kubenswrapper[4558]: I0120 17:02:21.657423 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-db-sync-9pt6z" event={"ID":"d97707d1-0217-458b-88ea-5589b040f499","Type":"ContainerDied","Data":"c2d4ac0ce4294b29ecb513e2d2e70356349c2e92c10ca08c0497ec66cfdcdc7f"} Jan 20 17:02:21 crc kubenswrapper[4558]: I0120 17:02:21.657612 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c2d4ac0ce4294b29ecb513e2d2e70356349c2e92c10ca08c0497ec66cfdcdc7f" Jan 20 17:02:21 crc kubenswrapper[4558]: I0120 17:02:21.657481 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-db-sync-9pt6z" Jan 20 17:02:21 crc kubenswrapper[4558]: I0120 17:02:21.999186 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/keystone-bootstrap-fkvmr"] Jan 20 17:02:21 crc kubenswrapper[4558]: E0120 17:02:21.999448 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c5521aa3-d4e7-4339-9c59-749808bacb09" containerName="mariadb-account-create-update" Jan 20 17:02:21 crc kubenswrapper[4558]: I0120 17:02:21.999464 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="c5521aa3-d4e7-4339-9c59-749808bacb09" containerName="mariadb-account-create-update" Jan 20 17:02:21 crc kubenswrapper[4558]: E0120 17:02:21.999477 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d97707d1-0217-458b-88ea-5589b040f499" containerName="keystone-db-sync" Jan 20 17:02:21 crc kubenswrapper[4558]: I0120 17:02:21.999482 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d97707d1-0217-458b-88ea-5589b040f499" containerName="keystone-db-sync" Jan 20 17:02:21 crc kubenswrapper[4558]: E0120 17:02:21.999491 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e574e9d4-62db-4b0d-8dd3-caa44f7b4534" containerName="mariadb-account-create-update" Jan 20 17:02:21 crc kubenswrapper[4558]: I0120 17:02:21.999496 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="e574e9d4-62db-4b0d-8dd3-caa44f7b4534" containerName="mariadb-account-create-update" Jan 20 17:02:21 crc kubenswrapper[4558]: E0120 17:02:21.999514 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="98aac22a-672a-4029-8d83-be51df654f6e" containerName="mariadb-database-create" Jan 20 17:02:21 crc kubenswrapper[4558]: I0120 17:02:21.999519 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="98aac22a-672a-4029-8d83-be51df654f6e" containerName="mariadb-database-create" Jan 20 17:02:21 crc kubenswrapper[4558]: E0120 17:02:21.999530 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="52c6bb4f-39bf-4834-a35c-c7d3234e6799" containerName="mariadb-account-create-update" Jan 20 17:02:21 crc kubenswrapper[4558]: I0120 17:02:21.999536 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="52c6bb4f-39bf-4834-a35c-c7d3234e6799" containerName="mariadb-account-create-update" Jan 20 17:02:21 crc kubenswrapper[4558]: E0120 17:02:21.999543 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a0a56a29-2029-4a64-bb0c-539c45a09175" containerName="mariadb-database-create" Jan 20 17:02:21 crc kubenswrapper[4558]: I0120 17:02:21.999548 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="a0a56a29-2029-4a64-bb0c-539c45a09175" containerName="mariadb-database-create" Jan 20 17:02:21 crc kubenswrapper[4558]: E0120 17:02:21.999558 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f2a9bc2f-a177-4c7e-9036-c52825816ffe" containerName="mariadb-database-create" Jan 20 17:02:21 crc kubenswrapper[4558]: I0120 17:02:21.999563 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="f2a9bc2f-a177-4c7e-9036-c52825816ffe" containerName="mariadb-database-create" Jan 20 17:02:21 crc kubenswrapper[4558]: I0120 17:02:21.999670 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="f2a9bc2f-a177-4c7e-9036-c52825816ffe" containerName="mariadb-database-create" Jan 20 17:02:21 crc kubenswrapper[4558]: I0120 17:02:21.999679 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="d97707d1-0217-458b-88ea-5589b040f499" containerName="keystone-db-sync" Jan 20 17:02:21 crc kubenswrapper[4558]: I0120 17:02:21.999688 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="e574e9d4-62db-4b0d-8dd3-caa44f7b4534" containerName="mariadb-account-create-update" Jan 20 17:02:21 crc kubenswrapper[4558]: I0120 17:02:21.999697 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="52c6bb4f-39bf-4834-a35c-c7d3234e6799" containerName="mariadb-account-create-update" Jan 20 17:02:21 crc kubenswrapper[4558]: I0120 17:02:21.999708 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="c5521aa3-d4e7-4339-9c59-749808bacb09" containerName="mariadb-account-create-update" Jan 20 17:02:21 crc kubenswrapper[4558]: I0120 17:02:21.999717 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="98aac22a-672a-4029-8d83-be51df654f6e" containerName="mariadb-database-create" Jan 20 17:02:21 crc kubenswrapper[4558]: I0120 17:02:21.999724 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="a0a56a29-2029-4a64-bb0c-539c45a09175" containerName="mariadb-database-create" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.000116 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-bootstrap-fkvmr" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.001758 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"osp-secret" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.003020 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-keystone-dockercfg-4bhst" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.003054 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-scripts" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.005803 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-config-data" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.006014 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.008260 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-bootstrap-fkvmr"] Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.055600 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.056739 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.058364 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-scripts" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.058485 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-glance-dockercfg-hb57g" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.058494 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-default-external-config-data" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.062000 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-glance-default-public-svc" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.082079 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.091710 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/97ced1d2-4a68-4832-b175-82a06d263ce0-credential-keys\") pod \"keystone-bootstrap-fkvmr\" (UID: \"97ced1d2-4a68-4832-b175-82a06d263ce0\") " pod="openstack-kuttl-tests/keystone-bootstrap-fkvmr" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.091765 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/97ced1d2-4a68-4832-b175-82a06d263ce0-combined-ca-bundle\") pod \"keystone-bootstrap-fkvmr\" (UID: \"97ced1d2-4a68-4832-b175-82a06d263ce0\") " pod="openstack-kuttl-tests/keystone-bootstrap-fkvmr" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.091788 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jsdrc\" (UniqueName: \"kubernetes.io/projected/97ced1d2-4a68-4832-b175-82a06d263ce0-kube-api-access-jsdrc\") pod \"keystone-bootstrap-fkvmr\" (UID: \"97ced1d2-4a68-4832-b175-82a06d263ce0\") " pod="openstack-kuttl-tests/keystone-bootstrap-fkvmr" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.091838 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/97ced1d2-4a68-4832-b175-82a06d263ce0-scripts\") pod \"keystone-bootstrap-fkvmr\" (UID: \"97ced1d2-4a68-4832-b175-82a06d263ce0\") " pod="openstack-kuttl-tests/keystone-bootstrap-fkvmr" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.091874 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/97ced1d2-4a68-4832-b175-82a06d263ce0-config-data\") pod \"keystone-bootstrap-fkvmr\" (UID: \"97ced1d2-4a68-4832-b175-82a06d263ce0\") " pod="openstack-kuttl-tests/keystone-bootstrap-fkvmr" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.091897 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/97ced1d2-4a68-4832-b175-82a06d263ce0-fernet-keys\") pod \"keystone-bootstrap-fkvmr\" (UID: \"97ced1d2-4a68-4832-b175-82a06d263ce0\") " pod="openstack-kuttl-tests/keystone-bootstrap-fkvmr" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.117514 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.118990 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.121392 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-config-data" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.126180 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.127202 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-scripts" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.136619 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.137974 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-glance-default-internal-svc" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.138363 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-default-internal-config-data" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.146424 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.170230 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.193311 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3153c81a-39a0-4250-b53b-154a6a08ccbf-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"3153c81a-39a0-4250-b53b-154a6a08ccbf\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.193349 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/60f62690-3d64-489d-81cc-9f4df72d8de2-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"60f62690-3d64-489d-81cc-9f4df72d8de2\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.193380 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/97ced1d2-4a68-4832-b175-82a06d263ce0-credential-keys\") pod \"keystone-bootstrap-fkvmr\" (UID: \"97ced1d2-4a68-4832-b175-82a06d263ce0\") " pod="openstack-kuttl-tests/keystone-bootstrap-fkvmr" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.193395 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3c5b9a60-72a8-411a-aefe-2e44369cde84-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"3c5b9a60-72a8-411a-aefe-2e44369cde84\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.193412 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"3c5b9a60-72a8-411a-aefe-2e44369cde84\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.193433 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fjscm\" (UniqueName: \"kubernetes.io/projected/60f62690-3d64-489d-81cc-9f4df72d8de2-kube-api-access-fjscm\") pod \"glance-default-external-api-0\" (UID: \"60f62690-3d64-489d-81cc-9f4df72d8de2\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.193451 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/60f62690-3d64-489d-81cc-9f4df72d8de2-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"60f62690-3d64-489d-81cc-9f4df72d8de2\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.193464 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/3c5b9a60-72a8-411a-aefe-2e44369cde84-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"3c5b9a60-72a8-411a-aefe-2e44369cde84\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.193478 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-external-api-0\" (UID: \"60f62690-3d64-489d-81cc-9f4df72d8de2\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.193498 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/97ced1d2-4a68-4832-b175-82a06d263ce0-combined-ca-bundle\") pod \"keystone-bootstrap-fkvmr\" (UID: \"97ced1d2-4a68-4832-b175-82a06d263ce0\") " pod="openstack-kuttl-tests/keystone-bootstrap-fkvmr" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.193517 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jsdrc\" (UniqueName: \"kubernetes.io/projected/97ced1d2-4a68-4832-b175-82a06d263ce0-kube-api-access-jsdrc\") pod \"keystone-bootstrap-fkvmr\" (UID: \"97ced1d2-4a68-4832-b175-82a06d263ce0\") " pod="openstack-kuttl-tests/keystone-bootstrap-fkvmr" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.193542 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7r6st\" (UniqueName: \"kubernetes.io/projected/3153c81a-39a0-4250-b53b-154a6a08ccbf-kube-api-access-7r6st\") pod \"ceilometer-0\" (UID: \"3153c81a-39a0-4250-b53b-154a6a08ccbf\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.193559 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/97ced1d2-4a68-4832-b175-82a06d263ce0-scripts\") pod \"keystone-bootstrap-fkvmr\" (UID: \"97ced1d2-4a68-4832-b175-82a06d263ce0\") " pod="openstack-kuttl-tests/keystone-bootstrap-fkvmr" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.193573 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3c5b9a60-72a8-411a-aefe-2e44369cde84-scripts\") pod \"glance-default-internal-api-0\" (UID: \"3c5b9a60-72a8-411a-aefe-2e44369cde84\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.193587 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3153c81a-39a0-4250-b53b-154a6a08ccbf-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"3153c81a-39a0-4250-b53b-154a6a08ccbf\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.193601 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3c5b9a60-72a8-411a-aefe-2e44369cde84-config-data\") pod \"glance-default-internal-api-0\" (UID: \"3c5b9a60-72a8-411a-aefe-2e44369cde84\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.193621 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3153c81a-39a0-4250-b53b-154a6a08ccbf-log-httpd\") pod \"ceilometer-0\" (UID: \"3153c81a-39a0-4250-b53b-154a6a08ccbf\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.193635 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3153c81a-39a0-4250-b53b-154a6a08ccbf-run-httpd\") pod \"ceilometer-0\" (UID: \"3153c81a-39a0-4250-b53b-154a6a08ccbf\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.193651 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r9g6d\" (UniqueName: \"kubernetes.io/projected/3c5b9a60-72a8-411a-aefe-2e44369cde84-kube-api-access-r9g6d\") pod \"glance-default-internal-api-0\" (UID: \"3c5b9a60-72a8-411a-aefe-2e44369cde84\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.193669 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3153c81a-39a0-4250-b53b-154a6a08ccbf-config-data\") pod \"ceilometer-0\" (UID: \"3153c81a-39a0-4250-b53b-154a6a08ccbf\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.193684 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/97ced1d2-4a68-4832-b175-82a06d263ce0-config-data\") pod \"keystone-bootstrap-fkvmr\" (UID: \"97ced1d2-4a68-4832-b175-82a06d263ce0\") " pod="openstack-kuttl-tests/keystone-bootstrap-fkvmr" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.193703 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/60f62690-3d64-489d-81cc-9f4df72d8de2-scripts\") pod \"glance-default-external-api-0\" (UID: \"60f62690-3d64-489d-81cc-9f4df72d8de2\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.193716 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/97ced1d2-4a68-4832-b175-82a06d263ce0-fernet-keys\") pod \"keystone-bootstrap-fkvmr\" (UID: \"97ced1d2-4a68-4832-b175-82a06d263ce0\") " pod="openstack-kuttl-tests/keystone-bootstrap-fkvmr" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.193733 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3153c81a-39a0-4250-b53b-154a6a08ccbf-scripts\") pod \"ceilometer-0\" (UID: \"3153c81a-39a0-4250-b53b-154a6a08ccbf\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.193752 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/60f62690-3d64-489d-81cc-9f4df72d8de2-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"60f62690-3d64-489d-81cc-9f4df72d8de2\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.193780 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/60f62690-3d64-489d-81cc-9f4df72d8de2-config-data\") pod \"glance-default-external-api-0\" (UID: \"60f62690-3d64-489d-81cc-9f4df72d8de2\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.193793 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/60f62690-3d64-489d-81cc-9f4df72d8de2-logs\") pod \"glance-default-external-api-0\" (UID: \"60f62690-3d64-489d-81cc-9f4df72d8de2\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.193860 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3c5b9a60-72a8-411a-aefe-2e44369cde84-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"3c5b9a60-72a8-411a-aefe-2e44369cde84\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.193900 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3c5b9a60-72a8-411a-aefe-2e44369cde84-logs\") pod \"glance-default-internal-api-0\" (UID: \"3c5b9a60-72a8-411a-aefe-2e44369cde84\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.203329 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/97ced1d2-4a68-4832-b175-82a06d263ce0-combined-ca-bundle\") pod \"keystone-bootstrap-fkvmr\" (UID: \"97ced1d2-4a68-4832-b175-82a06d263ce0\") " pod="openstack-kuttl-tests/keystone-bootstrap-fkvmr" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.205089 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/97ced1d2-4a68-4832-b175-82a06d263ce0-scripts\") pod \"keystone-bootstrap-fkvmr\" (UID: \"97ced1d2-4a68-4832-b175-82a06d263ce0\") " pod="openstack-kuttl-tests/keystone-bootstrap-fkvmr" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.206765 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/97ced1d2-4a68-4832-b175-82a06d263ce0-config-data\") pod \"keystone-bootstrap-fkvmr\" (UID: \"97ced1d2-4a68-4832-b175-82a06d263ce0\") " pod="openstack-kuttl-tests/keystone-bootstrap-fkvmr" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.213515 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/97ced1d2-4a68-4832-b175-82a06d263ce0-fernet-keys\") pod \"keystone-bootstrap-fkvmr\" (UID: \"97ced1d2-4a68-4832-b175-82a06d263ce0\") " pod="openstack-kuttl-tests/keystone-bootstrap-fkvmr" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.213745 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jsdrc\" (UniqueName: \"kubernetes.io/projected/97ced1d2-4a68-4832-b175-82a06d263ce0-kube-api-access-jsdrc\") pod \"keystone-bootstrap-fkvmr\" (UID: \"97ced1d2-4a68-4832-b175-82a06d263ce0\") " pod="openstack-kuttl-tests/keystone-bootstrap-fkvmr" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.213935 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/97ced1d2-4a68-4832-b175-82a06d263ce0-credential-keys\") pod \"keystone-bootstrap-fkvmr\" (UID: \"97ced1d2-4a68-4832-b175-82a06d263ce0\") " pod="openstack-kuttl-tests/keystone-bootstrap-fkvmr" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.294745 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3153c81a-39a0-4250-b53b-154a6a08ccbf-config-data\") pod \"ceilometer-0\" (UID: \"3153c81a-39a0-4250-b53b-154a6a08ccbf\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.294789 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/60f62690-3d64-489d-81cc-9f4df72d8de2-scripts\") pod \"glance-default-external-api-0\" (UID: \"60f62690-3d64-489d-81cc-9f4df72d8de2\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.294811 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3153c81a-39a0-4250-b53b-154a6a08ccbf-scripts\") pod \"ceilometer-0\" (UID: \"3153c81a-39a0-4250-b53b-154a6a08ccbf\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.294830 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/60f62690-3d64-489d-81cc-9f4df72d8de2-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"60f62690-3d64-489d-81cc-9f4df72d8de2\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.294849 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/60f62690-3d64-489d-81cc-9f4df72d8de2-logs\") pod \"glance-default-external-api-0\" (UID: \"60f62690-3d64-489d-81cc-9f4df72d8de2\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.294870 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3c5b9a60-72a8-411a-aefe-2e44369cde84-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"3c5b9a60-72a8-411a-aefe-2e44369cde84\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.294884 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/60f62690-3d64-489d-81cc-9f4df72d8de2-config-data\") pod \"glance-default-external-api-0\" (UID: \"60f62690-3d64-489d-81cc-9f4df72d8de2\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.294897 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3c5b9a60-72a8-411a-aefe-2e44369cde84-logs\") pod \"glance-default-internal-api-0\" (UID: \"3c5b9a60-72a8-411a-aefe-2e44369cde84\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.294921 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3153c81a-39a0-4250-b53b-154a6a08ccbf-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"3153c81a-39a0-4250-b53b-154a6a08ccbf\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.294935 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/60f62690-3d64-489d-81cc-9f4df72d8de2-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"60f62690-3d64-489d-81cc-9f4df72d8de2\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.294956 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3c5b9a60-72a8-411a-aefe-2e44369cde84-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"3c5b9a60-72a8-411a-aefe-2e44369cde84\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.294971 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"3c5b9a60-72a8-411a-aefe-2e44369cde84\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.294994 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fjscm\" (UniqueName: \"kubernetes.io/projected/60f62690-3d64-489d-81cc-9f4df72d8de2-kube-api-access-fjscm\") pod \"glance-default-external-api-0\" (UID: \"60f62690-3d64-489d-81cc-9f4df72d8de2\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.295022 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/60f62690-3d64-489d-81cc-9f4df72d8de2-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"60f62690-3d64-489d-81cc-9f4df72d8de2\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.295036 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/3c5b9a60-72a8-411a-aefe-2e44369cde84-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"3c5b9a60-72a8-411a-aefe-2e44369cde84\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.295052 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-external-api-0\" (UID: \"60f62690-3d64-489d-81cc-9f4df72d8de2\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.295079 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7r6st\" (UniqueName: \"kubernetes.io/projected/3153c81a-39a0-4250-b53b-154a6a08ccbf-kube-api-access-7r6st\") pod \"ceilometer-0\" (UID: \"3153c81a-39a0-4250-b53b-154a6a08ccbf\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.295093 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3c5b9a60-72a8-411a-aefe-2e44369cde84-scripts\") pod \"glance-default-internal-api-0\" (UID: \"3c5b9a60-72a8-411a-aefe-2e44369cde84\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.295108 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3153c81a-39a0-4250-b53b-154a6a08ccbf-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"3153c81a-39a0-4250-b53b-154a6a08ccbf\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.295124 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3c5b9a60-72a8-411a-aefe-2e44369cde84-config-data\") pod \"glance-default-internal-api-0\" (UID: \"3c5b9a60-72a8-411a-aefe-2e44369cde84\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.295142 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3153c81a-39a0-4250-b53b-154a6a08ccbf-log-httpd\") pod \"ceilometer-0\" (UID: \"3153c81a-39a0-4250-b53b-154a6a08ccbf\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.295156 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3153c81a-39a0-4250-b53b-154a6a08ccbf-run-httpd\") pod \"ceilometer-0\" (UID: \"3153c81a-39a0-4250-b53b-154a6a08ccbf\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.295193 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r9g6d\" (UniqueName: \"kubernetes.io/projected/3c5b9a60-72a8-411a-aefe-2e44369cde84-kube-api-access-r9g6d\") pod \"glance-default-internal-api-0\" (UID: \"3c5b9a60-72a8-411a-aefe-2e44369cde84\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.295790 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"3c5b9a60-72a8-411a-aefe-2e44369cde84\") device mount path \"/mnt/openstack/pv07\"" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.297980 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/60f62690-3d64-489d-81cc-9f4df72d8de2-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"60f62690-3d64-489d-81cc-9f4df72d8de2\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.298387 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/3c5b9a60-72a8-411a-aefe-2e44369cde84-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"3c5b9a60-72a8-411a-aefe-2e44369cde84\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.298635 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-external-api-0\" (UID: \"60f62690-3d64-489d-81cc-9f4df72d8de2\") device mount path \"/mnt/openstack/pv01\"" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.304327 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3153c81a-39a0-4250-b53b-154a6a08ccbf-log-httpd\") pod \"ceilometer-0\" (UID: \"3153c81a-39a0-4250-b53b-154a6a08ccbf\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.304353 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3c5b9a60-72a8-411a-aefe-2e44369cde84-logs\") pod \"glance-default-internal-api-0\" (UID: \"3c5b9a60-72a8-411a-aefe-2e44369cde84\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.304415 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/60f62690-3d64-489d-81cc-9f4df72d8de2-logs\") pod \"glance-default-external-api-0\" (UID: \"60f62690-3d64-489d-81cc-9f4df72d8de2\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.304568 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3153c81a-39a0-4250-b53b-154a6a08ccbf-run-httpd\") pod \"ceilometer-0\" (UID: \"3153c81a-39a0-4250-b53b-154a6a08ccbf\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.304630 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3153c81a-39a0-4250-b53b-154a6a08ccbf-config-data\") pod \"ceilometer-0\" (UID: \"3153c81a-39a0-4250-b53b-154a6a08ccbf\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.308877 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/60f62690-3d64-489d-81cc-9f4df72d8de2-config-data\") pod \"glance-default-external-api-0\" (UID: \"60f62690-3d64-489d-81cc-9f4df72d8de2\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.310605 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3c5b9a60-72a8-411a-aefe-2e44369cde84-scripts\") pod \"glance-default-internal-api-0\" (UID: \"3c5b9a60-72a8-411a-aefe-2e44369cde84\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.315529 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3153c81a-39a0-4250-b53b-154a6a08ccbf-scripts\") pod \"ceilometer-0\" (UID: \"3153c81a-39a0-4250-b53b-154a6a08ccbf\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.315577 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/neutron-db-sync-x2xk2"] Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.315813 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3c5b9a60-72a8-411a-aefe-2e44369cde84-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"3c5b9a60-72a8-411a-aefe-2e44369cde84\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.316450 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-db-sync-x2xk2" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.317156 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-bootstrap-fkvmr" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.320122 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/60f62690-3d64-489d-81cc-9f4df72d8de2-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"60f62690-3d64-489d-81cc-9f4df72d8de2\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.323262 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"neutron-neutron-dockercfg-52956" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.323413 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/60f62690-3d64-489d-81cc-9f4df72d8de2-scripts\") pod \"glance-default-external-api-0\" (UID: \"60f62690-3d64-489d-81cc-9f4df72d8de2\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.323449 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"neutron-httpd-config" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.323496 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"neutron-config" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.324366 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3c5b9a60-72a8-411a-aefe-2e44369cde84-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"3c5b9a60-72a8-411a-aefe-2e44369cde84\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.325459 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/60f62690-3d64-489d-81cc-9f4df72d8de2-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"60f62690-3d64-489d-81cc-9f4df72d8de2\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.328975 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3153c81a-39a0-4250-b53b-154a6a08ccbf-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"3153c81a-39a0-4250-b53b-154a6a08ccbf\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.329058 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-db-sync-x2xk2"] Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.335388 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-external-api-0\" (UID: \"60f62690-3d64-489d-81cc-9f4df72d8de2\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.335530 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fjscm\" (UniqueName: \"kubernetes.io/projected/60f62690-3d64-489d-81cc-9f4df72d8de2-kube-api-access-fjscm\") pod \"glance-default-external-api-0\" (UID: \"60f62690-3d64-489d-81cc-9f4df72d8de2\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.348632 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/cinder-db-sync-qchhz"] Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.349530 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-db-sync-qchhz" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.360890 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cinder-config-data" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.361106 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cinder-scripts" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.361310 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cinder-cinder-dockercfg-85x2r" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.361673 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3153c81a-39a0-4250-b53b-154a6a08ccbf-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"3153c81a-39a0-4250-b53b-154a6a08ccbf\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.361117 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r9g6d\" (UniqueName: \"kubernetes.io/projected/3c5b9a60-72a8-411a-aefe-2e44369cde84-kube-api-access-r9g6d\") pod \"glance-default-internal-api-0\" (UID: \"3c5b9a60-72a8-411a-aefe-2e44369cde84\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.361128 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7r6st\" (UniqueName: \"kubernetes.io/projected/3153c81a-39a0-4250-b53b-154a6a08ccbf-kube-api-access-7r6st\") pod \"ceilometer-0\" (UID: \"3153c81a-39a0-4250-b53b-154a6a08ccbf\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.361897 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3c5b9a60-72a8-411a-aefe-2e44369cde84-config-data\") pod \"glance-default-internal-api-0\" (UID: \"3c5b9a60-72a8-411a-aefe-2e44369cde84\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.369287 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/placement-db-sync-bdqwq"] Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.371032 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.371294 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-db-sync-bdqwq" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.375682 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-db-sync-qchhz"] Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.376831 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"placement-scripts" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.377054 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"placement-placement-dockercfg-skc27" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.377985 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"3c5b9a60-72a8-411a-aefe-2e44369cde84\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.383959 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-db-sync-bdqwq"] Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.386741 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"placement-config-data" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.396134 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n6tt5\" (UniqueName: \"kubernetes.io/projected/1f9b2eba-d2f3-4391-9420-3d797982db8a-kube-api-access-n6tt5\") pod \"neutron-db-sync-x2xk2\" (UID: \"1f9b2eba-d2f3-4391-9420-3d797982db8a\") " pod="openstack-kuttl-tests/neutron-db-sync-x2xk2" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.396208 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/be13d82f-4865-44dc-8a5a-4ecc7f2cabd9-logs\") pod \"placement-db-sync-bdqwq\" (UID: \"be13d82f-4865-44dc-8a5a-4ecc7f2cabd9\") " pod="openstack-kuttl-tests/placement-db-sync-bdqwq" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.396237 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2p6xd\" (UniqueName: \"kubernetes.io/projected/e2597e53-47a7-4b6a-9fc1-06d6561cd69f-kube-api-access-2p6xd\") pod \"cinder-db-sync-qchhz\" (UID: \"e2597e53-47a7-4b6a-9fc1-06d6561cd69f\") " pod="openstack-kuttl-tests/cinder-db-sync-qchhz" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.396265 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e2597e53-47a7-4b6a-9fc1-06d6561cd69f-combined-ca-bundle\") pod \"cinder-db-sync-qchhz\" (UID: \"e2597e53-47a7-4b6a-9fc1-06d6561cd69f\") " pod="openstack-kuttl-tests/cinder-db-sync-qchhz" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.396290 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/1f9b2eba-d2f3-4391-9420-3d797982db8a-config\") pod \"neutron-db-sync-x2xk2\" (UID: \"1f9b2eba-d2f3-4391-9420-3d797982db8a\") " pod="openstack-kuttl-tests/neutron-db-sync-x2xk2" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.396305 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/be13d82f-4865-44dc-8a5a-4ecc7f2cabd9-scripts\") pod \"placement-db-sync-bdqwq\" (UID: \"be13d82f-4865-44dc-8a5a-4ecc7f2cabd9\") " pod="openstack-kuttl-tests/placement-db-sync-bdqwq" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.396319 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be13d82f-4865-44dc-8a5a-4ecc7f2cabd9-combined-ca-bundle\") pod \"placement-db-sync-bdqwq\" (UID: \"be13d82f-4865-44dc-8a5a-4ecc7f2cabd9\") " pod="openstack-kuttl-tests/placement-db-sync-bdqwq" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.396333 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1f9b2eba-d2f3-4391-9420-3d797982db8a-combined-ca-bundle\") pod \"neutron-db-sync-x2xk2\" (UID: \"1f9b2eba-d2f3-4391-9420-3d797982db8a\") " pod="openstack-kuttl-tests/neutron-db-sync-x2xk2" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.396357 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e2597e53-47a7-4b6a-9fc1-06d6561cd69f-scripts\") pod \"cinder-db-sync-qchhz\" (UID: \"e2597e53-47a7-4b6a-9fc1-06d6561cd69f\") " pod="openstack-kuttl-tests/cinder-db-sync-qchhz" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.396393 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f6hhj\" (UniqueName: \"kubernetes.io/projected/be13d82f-4865-44dc-8a5a-4ecc7f2cabd9-kube-api-access-f6hhj\") pod \"placement-db-sync-bdqwq\" (UID: \"be13d82f-4865-44dc-8a5a-4ecc7f2cabd9\") " pod="openstack-kuttl-tests/placement-db-sync-bdqwq" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.396411 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/be13d82f-4865-44dc-8a5a-4ecc7f2cabd9-config-data\") pod \"placement-db-sync-bdqwq\" (UID: \"be13d82f-4865-44dc-8a5a-4ecc7f2cabd9\") " pod="openstack-kuttl-tests/placement-db-sync-bdqwq" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.396433 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e2597e53-47a7-4b6a-9fc1-06d6561cd69f-config-data\") pod \"cinder-db-sync-qchhz\" (UID: \"e2597e53-47a7-4b6a-9fc1-06d6561cd69f\") " pod="openstack-kuttl-tests/cinder-db-sync-qchhz" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.396447 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/e2597e53-47a7-4b6a-9fc1-06d6561cd69f-etc-machine-id\") pod \"cinder-db-sync-qchhz\" (UID: \"e2597e53-47a7-4b6a-9fc1-06d6561cd69f\") " pod="openstack-kuttl-tests/cinder-db-sync-qchhz" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.396480 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/e2597e53-47a7-4b6a-9fc1-06d6561cd69f-db-sync-config-data\") pod \"cinder-db-sync-qchhz\" (UID: \"e2597e53-47a7-4b6a-9fc1-06d6561cd69f\") " pod="openstack-kuttl-tests/cinder-db-sync-qchhz" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.430862 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.448534 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.478128 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/barbican-db-sync-tvtwc"] Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.479731 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-db-sync-tvtwc" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.508097 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"barbican-barbican-dockercfg-cvbvq" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.512325 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"barbican-config-data" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.512810 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-db-sync-tvtwc"] Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.513635 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e2597e53-47a7-4b6a-9fc1-06d6561cd69f-config-data\") pod \"cinder-db-sync-qchhz\" (UID: \"e2597e53-47a7-4b6a-9fc1-06d6561cd69f\") " pod="openstack-kuttl-tests/cinder-db-sync-qchhz" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.513670 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/e2597e53-47a7-4b6a-9fc1-06d6561cd69f-etc-machine-id\") pod \"cinder-db-sync-qchhz\" (UID: \"e2597e53-47a7-4b6a-9fc1-06d6561cd69f\") " pod="openstack-kuttl-tests/cinder-db-sync-qchhz" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.513733 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/0ccba453-b5b1-4a1c-9e37-1c4c991cfb93-db-sync-config-data\") pod \"barbican-db-sync-tvtwc\" (UID: \"0ccba453-b5b1-4a1c-9e37-1c4c991cfb93\") " pod="openstack-kuttl-tests/barbican-db-sync-tvtwc" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.513784 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/e2597e53-47a7-4b6a-9fc1-06d6561cd69f-db-sync-config-data\") pod \"cinder-db-sync-qchhz\" (UID: \"e2597e53-47a7-4b6a-9fc1-06d6561cd69f\") " pod="openstack-kuttl-tests/cinder-db-sync-qchhz" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.513813 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n6tt5\" (UniqueName: \"kubernetes.io/projected/1f9b2eba-d2f3-4391-9420-3d797982db8a-kube-api-access-n6tt5\") pod \"neutron-db-sync-x2xk2\" (UID: \"1f9b2eba-d2f3-4391-9420-3d797982db8a\") " pod="openstack-kuttl-tests/neutron-db-sync-x2xk2" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.513874 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/be13d82f-4865-44dc-8a5a-4ecc7f2cabd9-logs\") pod \"placement-db-sync-bdqwq\" (UID: \"be13d82f-4865-44dc-8a5a-4ecc7f2cabd9\") " pod="openstack-kuttl-tests/placement-db-sync-bdqwq" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.513908 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0ccba453-b5b1-4a1c-9e37-1c4c991cfb93-combined-ca-bundle\") pod \"barbican-db-sync-tvtwc\" (UID: \"0ccba453-b5b1-4a1c-9e37-1c4c991cfb93\") " pod="openstack-kuttl-tests/barbican-db-sync-tvtwc" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.513940 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2p6xd\" (UniqueName: \"kubernetes.io/projected/e2597e53-47a7-4b6a-9fc1-06d6561cd69f-kube-api-access-2p6xd\") pod \"cinder-db-sync-qchhz\" (UID: \"e2597e53-47a7-4b6a-9fc1-06d6561cd69f\") " pod="openstack-kuttl-tests/cinder-db-sync-qchhz" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.513957 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e2597e53-47a7-4b6a-9fc1-06d6561cd69f-combined-ca-bundle\") pod \"cinder-db-sync-qchhz\" (UID: \"e2597e53-47a7-4b6a-9fc1-06d6561cd69f\") " pod="openstack-kuttl-tests/cinder-db-sync-qchhz" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.513977 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f7d47\" (UniqueName: \"kubernetes.io/projected/0ccba453-b5b1-4a1c-9e37-1c4c991cfb93-kube-api-access-f7d47\") pod \"barbican-db-sync-tvtwc\" (UID: \"0ccba453-b5b1-4a1c-9e37-1c4c991cfb93\") " pod="openstack-kuttl-tests/barbican-db-sync-tvtwc" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.514006 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/1f9b2eba-d2f3-4391-9420-3d797982db8a-config\") pod \"neutron-db-sync-x2xk2\" (UID: \"1f9b2eba-d2f3-4391-9420-3d797982db8a\") " pod="openstack-kuttl-tests/neutron-db-sync-x2xk2" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.514024 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/be13d82f-4865-44dc-8a5a-4ecc7f2cabd9-scripts\") pod \"placement-db-sync-bdqwq\" (UID: \"be13d82f-4865-44dc-8a5a-4ecc7f2cabd9\") " pod="openstack-kuttl-tests/placement-db-sync-bdqwq" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.514044 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be13d82f-4865-44dc-8a5a-4ecc7f2cabd9-combined-ca-bundle\") pod \"placement-db-sync-bdqwq\" (UID: \"be13d82f-4865-44dc-8a5a-4ecc7f2cabd9\") " pod="openstack-kuttl-tests/placement-db-sync-bdqwq" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.514061 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1f9b2eba-d2f3-4391-9420-3d797982db8a-combined-ca-bundle\") pod \"neutron-db-sync-x2xk2\" (UID: \"1f9b2eba-d2f3-4391-9420-3d797982db8a\") " pod="openstack-kuttl-tests/neutron-db-sync-x2xk2" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.514107 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e2597e53-47a7-4b6a-9fc1-06d6561cd69f-scripts\") pod \"cinder-db-sync-qchhz\" (UID: \"e2597e53-47a7-4b6a-9fc1-06d6561cd69f\") " pod="openstack-kuttl-tests/cinder-db-sync-qchhz" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.514136 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f6hhj\" (UniqueName: \"kubernetes.io/projected/be13d82f-4865-44dc-8a5a-4ecc7f2cabd9-kube-api-access-f6hhj\") pod \"placement-db-sync-bdqwq\" (UID: \"be13d82f-4865-44dc-8a5a-4ecc7f2cabd9\") " pod="openstack-kuttl-tests/placement-db-sync-bdqwq" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.514153 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/be13d82f-4865-44dc-8a5a-4ecc7f2cabd9-config-data\") pod \"placement-db-sync-bdqwq\" (UID: \"be13d82f-4865-44dc-8a5a-4ecc7f2cabd9\") " pod="openstack-kuttl-tests/placement-db-sync-bdqwq" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.527992 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/be13d82f-4865-44dc-8a5a-4ecc7f2cabd9-logs\") pod \"placement-db-sync-bdqwq\" (UID: \"be13d82f-4865-44dc-8a5a-4ecc7f2cabd9\") " pod="openstack-kuttl-tests/placement-db-sync-bdqwq" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.529978 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/e2597e53-47a7-4b6a-9fc1-06d6561cd69f-etc-machine-id\") pod \"cinder-db-sync-qchhz\" (UID: \"e2597e53-47a7-4b6a-9fc1-06d6561cd69f\") " pod="openstack-kuttl-tests/cinder-db-sync-qchhz" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.531020 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/e2597e53-47a7-4b6a-9fc1-06d6561cd69f-db-sync-config-data\") pod \"cinder-db-sync-qchhz\" (UID: \"e2597e53-47a7-4b6a-9fc1-06d6561cd69f\") " pod="openstack-kuttl-tests/cinder-db-sync-qchhz" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.542299 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e2597e53-47a7-4b6a-9fc1-06d6561cd69f-scripts\") pod \"cinder-db-sync-qchhz\" (UID: \"e2597e53-47a7-4b6a-9fc1-06d6561cd69f\") " pod="openstack-kuttl-tests/cinder-db-sync-qchhz" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.547410 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/be13d82f-4865-44dc-8a5a-4ecc7f2cabd9-scripts\") pod \"placement-db-sync-bdqwq\" (UID: \"be13d82f-4865-44dc-8a5a-4ecc7f2cabd9\") " pod="openstack-kuttl-tests/placement-db-sync-bdqwq" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.549518 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e2597e53-47a7-4b6a-9fc1-06d6561cd69f-combined-ca-bundle\") pod \"cinder-db-sync-qchhz\" (UID: \"e2597e53-47a7-4b6a-9fc1-06d6561cd69f\") " pod="openstack-kuttl-tests/cinder-db-sync-qchhz" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.550359 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1f9b2eba-d2f3-4391-9420-3d797982db8a-combined-ca-bundle\") pod \"neutron-db-sync-x2xk2\" (UID: \"1f9b2eba-d2f3-4391-9420-3d797982db8a\") " pod="openstack-kuttl-tests/neutron-db-sync-x2xk2" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.568126 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/1f9b2eba-d2f3-4391-9420-3d797982db8a-config\") pod \"neutron-db-sync-x2xk2\" (UID: \"1f9b2eba-d2f3-4391-9420-3d797982db8a\") " pod="openstack-kuttl-tests/neutron-db-sync-x2xk2" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.568622 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2p6xd\" (UniqueName: \"kubernetes.io/projected/e2597e53-47a7-4b6a-9fc1-06d6561cd69f-kube-api-access-2p6xd\") pod \"cinder-db-sync-qchhz\" (UID: \"e2597e53-47a7-4b6a-9fc1-06d6561cd69f\") " pod="openstack-kuttl-tests/cinder-db-sync-qchhz" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.552378 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be13d82f-4865-44dc-8a5a-4ecc7f2cabd9-combined-ca-bundle\") pod \"placement-db-sync-bdqwq\" (UID: \"be13d82f-4865-44dc-8a5a-4ecc7f2cabd9\") " pod="openstack-kuttl-tests/placement-db-sync-bdqwq" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.568880 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f6hhj\" (UniqueName: \"kubernetes.io/projected/be13d82f-4865-44dc-8a5a-4ecc7f2cabd9-kube-api-access-f6hhj\") pod \"placement-db-sync-bdqwq\" (UID: \"be13d82f-4865-44dc-8a5a-4ecc7f2cabd9\") " pod="openstack-kuttl-tests/placement-db-sync-bdqwq" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.569156 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e2597e53-47a7-4b6a-9fc1-06d6561cd69f-config-data\") pod \"cinder-db-sync-qchhz\" (UID: \"e2597e53-47a7-4b6a-9fc1-06d6561cd69f\") " pod="openstack-kuttl-tests/cinder-db-sync-qchhz" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.570328 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/be13d82f-4865-44dc-8a5a-4ecc7f2cabd9-config-data\") pod \"placement-db-sync-bdqwq\" (UID: \"be13d82f-4865-44dc-8a5a-4ecc7f2cabd9\") " pod="openstack-kuttl-tests/placement-db-sync-bdqwq" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.574325 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n6tt5\" (UniqueName: \"kubernetes.io/projected/1f9b2eba-d2f3-4391-9420-3d797982db8a-kube-api-access-n6tt5\") pod \"neutron-db-sync-x2xk2\" (UID: \"1f9b2eba-d2f3-4391-9420-3d797982db8a\") " pod="openstack-kuttl-tests/neutron-db-sync-x2xk2" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.625483 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f7d47\" (UniqueName: \"kubernetes.io/projected/0ccba453-b5b1-4a1c-9e37-1c4c991cfb93-kube-api-access-f7d47\") pod \"barbican-db-sync-tvtwc\" (UID: \"0ccba453-b5b1-4a1c-9e37-1c4c991cfb93\") " pod="openstack-kuttl-tests/barbican-db-sync-tvtwc" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.625689 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/0ccba453-b5b1-4a1c-9e37-1c4c991cfb93-db-sync-config-data\") pod \"barbican-db-sync-tvtwc\" (UID: \"0ccba453-b5b1-4a1c-9e37-1c4c991cfb93\") " pod="openstack-kuttl-tests/barbican-db-sync-tvtwc" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.625840 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0ccba453-b5b1-4a1c-9e37-1c4c991cfb93-combined-ca-bundle\") pod \"barbican-db-sync-tvtwc\" (UID: \"0ccba453-b5b1-4a1c-9e37-1c4c991cfb93\") " pod="openstack-kuttl-tests/barbican-db-sync-tvtwc" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.629931 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0ccba453-b5b1-4a1c-9e37-1c4c991cfb93-combined-ca-bundle\") pod \"barbican-db-sync-tvtwc\" (UID: \"0ccba453-b5b1-4a1c-9e37-1c4c991cfb93\") " pod="openstack-kuttl-tests/barbican-db-sync-tvtwc" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.630703 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/0ccba453-b5b1-4a1c-9e37-1c4c991cfb93-db-sync-config-data\") pod \"barbican-db-sync-tvtwc\" (UID: \"0ccba453-b5b1-4a1c-9e37-1c4c991cfb93\") " pod="openstack-kuttl-tests/barbican-db-sync-tvtwc" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.638826 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f7d47\" (UniqueName: \"kubernetes.io/projected/0ccba453-b5b1-4a1c-9e37-1c4c991cfb93-kube-api-access-f7d47\") pod \"barbican-db-sync-tvtwc\" (UID: \"0ccba453-b5b1-4a1c-9e37-1c4c991cfb93\") " pod="openstack-kuttl-tests/barbican-db-sync-tvtwc" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.769191 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-db-sync-x2xk2" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.782441 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-db-sync-qchhz" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.807998 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-bootstrap-fkvmr"] Jan 20 17:02:22 crc kubenswrapper[4558]: W0120 17:02:22.818944 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod97ced1d2_4a68_4832_b175_82a06d263ce0.slice/crio-e96708ccbe696068203e788b006d844360990db1bb1a21a87153f341e8ac2d39 WatchSource:0}: Error finding container e96708ccbe696068203e788b006d844360990db1bb1a21a87153f341e8ac2d39: Status 404 returned error can't find the container with id e96708ccbe696068203e788b006d844360990db1bb1a21a87153f341e8ac2d39 Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.848671 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-db-sync-tvtwc" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.849150 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-db-sync-bdqwq" Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.941822 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.955317 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:02:22 crc kubenswrapper[4558]: I0120 17:02:22.962754 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:02:23 crc kubenswrapper[4558]: I0120 17:02:23.240793 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-db-sync-qchhz"] Jan 20 17:02:23 crc kubenswrapper[4558]: W0120 17:02:23.255627 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode2597e53_47a7_4b6a_9fc1_06d6561cd69f.slice/crio-1139fc00b65572f8f043c75b41c11e21c6b6e82e42fe077bbe7be5802149714a WatchSource:0}: Error finding container 1139fc00b65572f8f043c75b41c11e21c6b6e82e42fe077bbe7be5802149714a: Status 404 returned error can't find the container with id 1139fc00b65572f8f043c75b41c11e21c6b6e82e42fe077bbe7be5802149714a Jan 20 17:02:23 crc kubenswrapper[4558]: I0120 17:02:23.300390 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-db-sync-x2xk2"] Jan 20 17:02:23 crc kubenswrapper[4558]: I0120 17:02:23.383466 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-db-sync-bdqwq"] Jan 20 17:02:23 crc kubenswrapper[4558]: I0120 17:02:23.393636 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-db-sync-tvtwc"] Jan 20 17:02:23 crc kubenswrapper[4558]: W0120 17:02:23.395222 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbe13d82f_4865_44dc_8a5a_4ecc7f2cabd9.slice/crio-8209aa75c3d7e523620509d0833d4fa6890ec7c4e9f922ea52746384e53f2366 WatchSource:0}: Error finding container 8209aa75c3d7e523620509d0833d4fa6890ec7c4e9f922ea52746384e53f2366: Status 404 returned error can't find the container with id 8209aa75c3d7e523620509d0833d4fa6890ec7c4e9f922ea52746384e53f2366 Jan 20 17:02:23 crc kubenswrapper[4558]: W0120 17:02:23.410388 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0ccba453_b5b1_4a1c_9e37_1c4c991cfb93.slice/crio-e88377dfc7730541bfaecfcce2415332175ab0557b2d4e2a80de72f12c684ed7 WatchSource:0}: Error finding container e88377dfc7730541bfaecfcce2415332175ab0557b2d4e2a80de72f12c684ed7: Status 404 returned error can't find the container with id e88377dfc7730541bfaecfcce2415332175ab0557b2d4e2a80de72f12c684ed7 Jan 20 17:02:23 crc kubenswrapper[4558]: I0120 17:02:23.682895 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"3153c81a-39a0-4250-b53b-154a6a08ccbf","Type":"ContainerStarted","Data":"d26af524473190ce9b550786eb42b07afbf9d6fc4768c9ef25b2e971bcaa3e9b"} Jan 20 17:02:23 crc kubenswrapper[4558]: I0120 17:02:23.684455 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"3c5b9a60-72a8-411a-aefe-2e44369cde84","Type":"ContainerStarted","Data":"1f07e720da87df55a95ebae79180be8b685ae03e0a0fe1bb02aab9e6634970b7"} Jan 20 17:02:23 crc kubenswrapper[4558]: I0120 17:02:23.684478 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"3c5b9a60-72a8-411a-aefe-2e44369cde84","Type":"ContainerStarted","Data":"0778d458de2f864328aeba1949a011a6174794051664d10b02d57717281b7917"} Jan 20 17:02:23 crc kubenswrapper[4558]: I0120 17:02:23.685604 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"60f62690-3d64-489d-81cc-9f4df72d8de2","Type":"ContainerStarted","Data":"80313731d842ee7b3e898a7907d796bf070ce325c33e2b622c15260ba6f3930f"} Jan 20 17:02:23 crc kubenswrapper[4558]: I0120 17:02:23.685625 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"60f62690-3d64-489d-81cc-9f4df72d8de2","Type":"ContainerStarted","Data":"e7c2c33672032f8e3e93b84cec181a314a4c38c2112b9a11303fbb4c920cd775"} Jan 20 17:02:23 crc kubenswrapper[4558]: I0120 17:02:23.687913 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-bootstrap-fkvmr" event={"ID":"97ced1d2-4a68-4832-b175-82a06d263ce0","Type":"ContainerStarted","Data":"49b1d953e107342a6b4a2ead13c7460b3febff9178ceee056591b0e1435dc0d9"} Jan 20 17:02:23 crc kubenswrapper[4558]: I0120 17:02:23.687939 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-bootstrap-fkvmr" event={"ID":"97ced1d2-4a68-4832-b175-82a06d263ce0","Type":"ContainerStarted","Data":"e96708ccbe696068203e788b006d844360990db1bb1a21a87153f341e8ac2d39"} Jan 20 17:02:23 crc kubenswrapper[4558]: I0120 17:02:23.695644 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-db-sync-tvtwc" event={"ID":"0ccba453-b5b1-4a1c-9e37-1c4c991cfb93","Type":"ContainerStarted","Data":"0418065a826dccc72f46275e64ebe555b3a206061ecb094ddae0dd94701f192f"} Jan 20 17:02:23 crc kubenswrapper[4558]: I0120 17:02:23.695668 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-db-sync-tvtwc" event={"ID":"0ccba453-b5b1-4a1c-9e37-1c4c991cfb93","Type":"ContainerStarted","Data":"e88377dfc7730541bfaecfcce2415332175ab0557b2d4e2a80de72f12c684ed7"} Jan 20 17:02:23 crc kubenswrapper[4558]: I0120 17:02:23.703135 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/keystone-bootstrap-fkvmr" podStartSLOduration=2.703107846 podStartE2EDuration="2.703107846s" podCreationTimestamp="2026-01-20 17:02:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:02:23.701465548 +0000 UTC m=+1237.461803515" watchObservedRunningTime="2026-01-20 17:02:23.703107846 +0000 UTC m=+1237.463445813" Jan 20 17:02:23 crc kubenswrapper[4558]: I0120 17:02:23.704030 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-db-sync-bdqwq" event={"ID":"be13d82f-4865-44dc-8a5a-4ecc7f2cabd9","Type":"ContainerStarted","Data":"e73316f86d69e1990f728687e78cbc371595994b43c68ca7476b2c35486e8305"} Jan 20 17:02:23 crc kubenswrapper[4558]: I0120 17:02:23.704071 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-db-sync-bdqwq" event={"ID":"be13d82f-4865-44dc-8a5a-4ecc7f2cabd9","Type":"ContainerStarted","Data":"8209aa75c3d7e523620509d0833d4fa6890ec7c4e9f922ea52746384e53f2366"} Jan 20 17:02:23 crc kubenswrapper[4558]: I0120 17:02:23.705485 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-db-sync-qchhz" event={"ID":"e2597e53-47a7-4b6a-9fc1-06d6561cd69f","Type":"ContainerStarted","Data":"1139fc00b65572f8f043c75b41c11e21c6b6e82e42fe077bbe7be5802149714a"} Jan 20 17:02:23 crc kubenswrapper[4558]: I0120 17:02:23.706661 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-db-sync-x2xk2" event={"ID":"1f9b2eba-d2f3-4391-9420-3d797982db8a","Type":"ContainerStarted","Data":"4f36c092e43707a5550308954378b43c241d5129d5701e92f63c5d91f23f9e25"} Jan 20 17:02:23 crc kubenswrapper[4558]: I0120 17:02:23.706696 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-db-sync-x2xk2" event={"ID":"1f9b2eba-d2f3-4391-9420-3d797982db8a","Type":"ContainerStarted","Data":"7887cc040f82b4f6d4ca9afe68e21d92b0876db5d05a26ca73a0e603982e939b"} Jan 20 17:02:23 crc kubenswrapper[4558]: I0120 17:02:23.727005 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/barbican-db-sync-tvtwc" podStartSLOduration=1.726992234 podStartE2EDuration="1.726992234s" podCreationTimestamp="2026-01-20 17:02:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:02:23.715045847 +0000 UTC m=+1237.475383814" watchObservedRunningTime="2026-01-20 17:02:23.726992234 +0000 UTC m=+1237.487330201" Jan 20 17:02:23 crc kubenswrapper[4558]: I0120 17:02:23.738181 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/neutron-db-sync-x2xk2" podStartSLOduration=1.7381572410000001 podStartE2EDuration="1.738157241s" podCreationTimestamp="2026-01-20 17:02:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:02:23.736586667 +0000 UTC m=+1237.496924635" watchObservedRunningTime="2026-01-20 17:02:23.738157241 +0000 UTC m=+1237.498495208" Jan 20 17:02:23 crc kubenswrapper[4558]: I0120 17:02:23.739334 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/placement-db-sync-bdqwq" podStartSLOduration=1.739329055 podStartE2EDuration="1.739329055s" podCreationTimestamp="2026-01-20 17:02:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:02:23.728077575 +0000 UTC m=+1237.488415542" watchObservedRunningTime="2026-01-20 17:02:23.739329055 +0000 UTC m=+1237.499667021" Jan 20 17:02:23 crc kubenswrapper[4558]: I0120 17:02:23.928601 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:02:23 crc kubenswrapper[4558]: I0120 17:02:23.983203 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:02:23 crc kubenswrapper[4558]: I0120 17:02:23.988024 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:02:24 crc kubenswrapper[4558]: I0120 17:02:24.731311 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"3c5b9a60-72a8-411a-aefe-2e44369cde84","Type":"ContainerStarted","Data":"b6a8528cca745ee557a1732567a161b240e4b1a2da9c6be9cb6c07cb9cb77dd0"} Jan 20 17:02:24 crc kubenswrapper[4558]: I0120 17:02:24.731414 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-internal-api-0" podUID="3c5b9a60-72a8-411a-aefe-2e44369cde84" containerName="glance-log" containerID="cri-o://1f07e720da87df55a95ebae79180be8b685ae03e0a0fe1bb02aab9e6634970b7" gracePeriod=30 Jan 20 17:02:24 crc kubenswrapper[4558]: I0120 17:02:24.731660 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-internal-api-0" podUID="3c5b9a60-72a8-411a-aefe-2e44369cde84" containerName="glance-httpd" containerID="cri-o://b6a8528cca745ee557a1732567a161b240e4b1a2da9c6be9cb6c07cb9cb77dd0" gracePeriod=30 Jan 20 17:02:24 crc kubenswrapper[4558]: I0120 17:02:24.737385 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"60f62690-3d64-489d-81cc-9f4df72d8de2","Type":"ContainerStarted","Data":"5dbf118996210b555ca5f1c9147db4a4010d4e30ab3a403dc4c53ac87a5dc236"} Jan 20 17:02:24 crc kubenswrapper[4558]: I0120 17:02:24.737530 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-external-api-0" podUID="60f62690-3d64-489d-81cc-9f4df72d8de2" containerName="glance-log" containerID="cri-o://80313731d842ee7b3e898a7907d796bf070ce325c33e2b622c15260ba6f3930f" gracePeriod=30 Jan 20 17:02:24 crc kubenswrapper[4558]: I0120 17:02:24.737637 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-external-api-0" podUID="60f62690-3d64-489d-81cc-9f4df72d8de2" containerName="glance-httpd" containerID="cri-o://5dbf118996210b555ca5f1c9147db4a4010d4e30ab3a403dc4c53ac87a5dc236" gracePeriod=30 Jan 20 17:02:24 crc kubenswrapper[4558]: I0120 17:02:24.746541 4558 generic.go:334] "Generic (PLEG): container finished" podID="0ccba453-b5b1-4a1c-9e37-1c4c991cfb93" containerID="0418065a826dccc72f46275e64ebe555b3a206061ecb094ddae0dd94701f192f" exitCode=0 Jan 20 17:02:24 crc kubenswrapper[4558]: I0120 17:02:24.746599 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-db-sync-tvtwc" event={"ID":"0ccba453-b5b1-4a1c-9e37-1c4c991cfb93","Type":"ContainerDied","Data":"0418065a826dccc72f46275e64ebe555b3a206061ecb094ddae0dd94701f192f"} Jan 20 17:02:24 crc kubenswrapper[4558]: I0120 17:02:24.748263 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-db-sync-bdqwq" event={"ID":"be13d82f-4865-44dc-8a5a-4ecc7f2cabd9","Type":"ContainerDied","Data":"e73316f86d69e1990f728687e78cbc371595994b43c68ca7476b2c35486e8305"} Jan 20 17:02:24 crc kubenswrapper[4558]: I0120 17:02:24.748413 4558 generic.go:334] "Generic (PLEG): container finished" podID="be13d82f-4865-44dc-8a5a-4ecc7f2cabd9" containerID="e73316f86d69e1990f728687e78cbc371595994b43c68ca7476b2c35486e8305" exitCode=0 Jan 20 17:02:24 crc kubenswrapper[4558]: I0120 17:02:24.751273 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/glance-default-internal-api-0" podStartSLOduration=2.751264428 podStartE2EDuration="2.751264428s" podCreationTimestamp="2026-01-20 17:02:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:02:24.747576733 +0000 UTC m=+1238.507914700" watchObservedRunningTime="2026-01-20 17:02:24.751264428 +0000 UTC m=+1238.511602395" Jan 20 17:02:24 crc kubenswrapper[4558]: I0120 17:02:24.753618 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-db-sync-qchhz" event={"ID":"e2597e53-47a7-4b6a-9fc1-06d6561cd69f","Type":"ContainerStarted","Data":"61908f05aa1a1c1d9c5f59500dcb21ba49538140328cfaf4bca4fda3098e6ad6"} Jan 20 17:02:24 crc kubenswrapper[4558]: I0120 17:02:24.757027 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"3153c81a-39a0-4250-b53b-154a6a08ccbf","Type":"ContainerStarted","Data":"7de085a2635c97438a08689bb27694547ed0ecc2e18c9400eb1019ac044e74af"} Jan 20 17:02:24 crc kubenswrapper[4558]: I0120 17:02:24.778406 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/glance-default-external-api-0" podStartSLOduration=2.778389431 podStartE2EDuration="2.778389431s" podCreationTimestamp="2026-01-20 17:02:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:02:24.776407082 +0000 UTC m=+1238.536745050" watchObservedRunningTime="2026-01-20 17:02:24.778389431 +0000 UTC m=+1238.538727398" Jan 20 17:02:24 crc kubenswrapper[4558]: I0120 17:02:24.800050 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/cinder-db-sync-qchhz" podStartSLOduration=2.800036861 podStartE2EDuration="2.800036861s" podCreationTimestamp="2026-01-20 17:02:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:02:24.796571564 +0000 UTC m=+1238.556909531" watchObservedRunningTime="2026-01-20 17:02:24.800036861 +0000 UTC m=+1238.560374828" Jan 20 17:02:25 crc kubenswrapper[4558]: I0120 17:02:25.285039 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:02:25 crc kubenswrapper[4558]: I0120 17:02:25.288333 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:02:25 crc kubenswrapper[4558]: I0120 17:02:25.367218 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3c5b9a60-72a8-411a-aefe-2e44369cde84-logs\") pod \"3c5b9a60-72a8-411a-aefe-2e44369cde84\" (UID: \"3c5b9a60-72a8-411a-aefe-2e44369cde84\") " Jan 20 17:02:25 crc kubenswrapper[4558]: I0120 17:02:25.367461 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r9g6d\" (UniqueName: \"kubernetes.io/projected/3c5b9a60-72a8-411a-aefe-2e44369cde84-kube-api-access-r9g6d\") pod \"3c5b9a60-72a8-411a-aefe-2e44369cde84\" (UID: \"3c5b9a60-72a8-411a-aefe-2e44369cde84\") " Jan 20 17:02:25 crc kubenswrapper[4558]: I0120 17:02:25.367487 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/60f62690-3d64-489d-81cc-9f4df72d8de2-logs\") pod \"60f62690-3d64-489d-81cc-9f4df72d8de2\" (UID: \"60f62690-3d64-489d-81cc-9f4df72d8de2\") " Jan 20 17:02:25 crc kubenswrapper[4558]: I0120 17:02:25.367521 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"60f62690-3d64-489d-81cc-9f4df72d8de2\" (UID: \"60f62690-3d64-489d-81cc-9f4df72d8de2\") " Jan 20 17:02:25 crc kubenswrapper[4558]: I0120 17:02:25.367540 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/60f62690-3d64-489d-81cc-9f4df72d8de2-scripts\") pod \"60f62690-3d64-489d-81cc-9f4df72d8de2\" (UID: \"60f62690-3d64-489d-81cc-9f4df72d8de2\") " Jan 20 17:02:25 crc kubenswrapper[4558]: I0120 17:02:25.367559 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3c5b9a60-72a8-411a-aefe-2e44369cde84-logs" (OuterVolumeSpecName: "logs") pod "3c5b9a60-72a8-411a-aefe-2e44369cde84" (UID: "3c5b9a60-72a8-411a-aefe-2e44369cde84"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:02:25 crc kubenswrapper[4558]: I0120 17:02:25.367574 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3c5b9a60-72a8-411a-aefe-2e44369cde84-config-data\") pod \"3c5b9a60-72a8-411a-aefe-2e44369cde84\" (UID: \"3c5b9a60-72a8-411a-aefe-2e44369cde84\") " Jan 20 17:02:25 crc kubenswrapper[4558]: I0120 17:02:25.367592 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3c5b9a60-72a8-411a-aefe-2e44369cde84-internal-tls-certs\") pod \"3c5b9a60-72a8-411a-aefe-2e44369cde84\" (UID: \"3c5b9a60-72a8-411a-aefe-2e44369cde84\") " Jan 20 17:02:25 crc kubenswrapper[4558]: I0120 17:02:25.367619 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/60f62690-3d64-489d-81cc-9f4df72d8de2-combined-ca-bundle\") pod \"60f62690-3d64-489d-81cc-9f4df72d8de2\" (UID: \"60f62690-3d64-489d-81cc-9f4df72d8de2\") " Jan 20 17:02:25 crc kubenswrapper[4558]: I0120 17:02:25.367640 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/60f62690-3d64-489d-81cc-9f4df72d8de2-config-data\") pod \"60f62690-3d64-489d-81cc-9f4df72d8de2\" (UID: \"60f62690-3d64-489d-81cc-9f4df72d8de2\") " Jan 20 17:02:25 crc kubenswrapper[4558]: I0120 17:02:25.367665 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/60f62690-3d64-489d-81cc-9f4df72d8de2-httpd-run\") pod \"60f62690-3d64-489d-81cc-9f4df72d8de2\" (UID: \"60f62690-3d64-489d-81cc-9f4df72d8de2\") " Jan 20 17:02:25 crc kubenswrapper[4558]: I0120 17:02:25.367924 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/60f62690-3d64-489d-81cc-9f4df72d8de2-logs" (OuterVolumeSpecName: "logs") pod "60f62690-3d64-489d-81cc-9f4df72d8de2" (UID: "60f62690-3d64-489d-81cc-9f4df72d8de2"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:02:25 crc kubenswrapper[4558]: I0120 17:02:25.368061 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fjscm\" (UniqueName: \"kubernetes.io/projected/60f62690-3d64-489d-81cc-9f4df72d8de2-kube-api-access-fjscm\") pod \"60f62690-3d64-489d-81cc-9f4df72d8de2\" (UID: \"60f62690-3d64-489d-81cc-9f4df72d8de2\") " Jan 20 17:02:25 crc kubenswrapper[4558]: I0120 17:02:25.368092 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/3c5b9a60-72a8-411a-aefe-2e44369cde84-httpd-run\") pod \"3c5b9a60-72a8-411a-aefe-2e44369cde84\" (UID: \"3c5b9a60-72a8-411a-aefe-2e44369cde84\") " Jan 20 17:02:25 crc kubenswrapper[4558]: I0120 17:02:25.368144 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3c5b9a60-72a8-411a-aefe-2e44369cde84-combined-ca-bundle\") pod \"3c5b9a60-72a8-411a-aefe-2e44369cde84\" (UID: \"3c5b9a60-72a8-411a-aefe-2e44369cde84\") " Jan 20 17:02:25 crc kubenswrapper[4558]: I0120 17:02:25.368175 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3c5b9a60-72a8-411a-aefe-2e44369cde84-scripts\") pod \"3c5b9a60-72a8-411a-aefe-2e44369cde84\" (UID: \"3c5b9a60-72a8-411a-aefe-2e44369cde84\") " Jan 20 17:02:25 crc kubenswrapper[4558]: I0120 17:02:25.368192 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"3c5b9a60-72a8-411a-aefe-2e44369cde84\" (UID: \"3c5b9a60-72a8-411a-aefe-2e44369cde84\") " Jan 20 17:02:25 crc kubenswrapper[4558]: I0120 17:02:25.368207 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/60f62690-3d64-489d-81cc-9f4df72d8de2-public-tls-certs\") pod \"60f62690-3d64-489d-81cc-9f4df72d8de2\" (UID: \"60f62690-3d64-489d-81cc-9f4df72d8de2\") " Jan 20 17:02:25 crc kubenswrapper[4558]: I0120 17:02:25.368364 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/60f62690-3d64-489d-81cc-9f4df72d8de2-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "60f62690-3d64-489d-81cc-9f4df72d8de2" (UID: "60f62690-3d64-489d-81cc-9f4df72d8de2"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:02:25 crc kubenswrapper[4558]: I0120 17:02:25.368777 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3c5b9a60-72a8-411a-aefe-2e44369cde84-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:02:25 crc kubenswrapper[4558]: I0120 17:02:25.368796 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/60f62690-3d64-489d-81cc-9f4df72d8de2-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:02:25 crc kubenswrapper[4558]: I0120 17:02:25.368805 4558 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/60f62690-3d64-489d-81cc-9f4df72d8de2-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 20 17:02:25 crc kubenswrapper[4558]: I0120 17:02:25.374424 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3c5b9a60-72a8-411a-aefe-2e44369cde84-scripts" (OuterVolumeSpecName: "scripts") pod "3c5b9a60-72a8-411a-aefe-2e44369cde84" (UID: "3c5b9a60-72a8-411a-aefe-2e44369cde84"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:02:25 crc kubenswrapper[4558]: I0120 17:02:25.374564 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage01-crc" (OuterVolumeSpecName: "glance") pod "60f62690-3d64-489d-81cc-9f4df72d8de2" (UID: "60f62690-3d64-489d-81cc-9f4df72d8de2"). InnerVolumeSpecName "local-storage01-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:02:25 crc kubenswrapper[4558]: I0120 17:02:25.374712 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3c5b9a60-72a8-411a-aefe-2e44369cde84-kube-api-access-r9g6d" (OuterVolumeSpecName: "kube-api-access-r9g6d") pod "3c5b9a60-72a8-411a-aefe-2e44369cde84" (UID: "3c5b9a60-72a8-411a-aefe-2e44369cde84"). InnerVolumeSpecName "kube-api-access-r9g6d". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:02:25 crc kubenswrapper[4558]: I0120 17:02:25.374718 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3c5b9a60-72a8-411a-aefe-2e44369cde84-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "3c5b9a60-72a8-411a-aefe-2e44369cde84" (UID: "3c5b9a60-72a8-411a-aefe-2e44369cde84"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:02:25 crc kubenswrapper[4558]: I0120 17:02:25.376625 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage07-crc" (OuterVolumeSpecName: "glance") pod "3c5b9a60-72a8-411a-aefe-2e44369cde84" (UID: "3c5b9a60-72a8-411a-aefe-2e44369cde84"). InnerVolumeSpecName "local-storage07-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:02:25 crc kubenswrapper[4558]: I0120 17:02:25.384367 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/60f62690-3d64-489d-81cc-9f4df72d8de2-scripts" (OuterVolumeSpecName: "scripts") pod "60f62690-3d64-489d-81cc-9f4df72d8de2" (UID: "60f62690-3d64-489d-81cc-9f4df72d8de2"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:02:25 crc kubenswrapper[4558]: I0120 17:02:25.398283 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/60f62690-3d64-489d-81cc-9f4df72d8de2-kube-api-access-fjscm" (OuterVolumeSpecName: "kube-api-access-fjscm") pod "60f62690-3d64-489d-81cc-9f4df72d8de2" (UID: "60f62690-3d64-489d-81cc-9f4df72d8de2"). InnerVolumeSpecName "kube-api-access-fjscm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:02:25 crc kubenswrapper[4558]: I0120 17:02:25.400274 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3c5b9a60-72a8-411a-aefe-2e44369cde84-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3c5b9a60-72a8-411a-aefe-2e44369cde84" (UID: "3c5b9a60-72a8-411a-aefe-2e44369cde84"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:02:25 crc kubenswrapper[4558]: I0120 17:02:25.408790 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3c5b9a60-72a8-411a-aefe-2e44369cde84-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "3c5b9a60-72a8-411a-aefe-2e44369cde84" (UID: "3c5b9a60-72a8-411a-aefe-2e44369cde84"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:02:25 crc kubenswrapper[4558]: I0120 17:02:25.412107 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/60f62690-3d64-489d-81cc-9f4df72d8de2-config-data" (OuterVolumeSpecName: "config-data") pod "60f62690-3d64-489d-81cc-9f4df72d8de2" (UID: "60f62690-3d64-489d-81cc-9f4df72d8de2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:02:25 crc kubenswrapper[4558]: I0120 17:02:25.412271 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/60f62690-3d64-489d-81cc-9f4df72d8de2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "60f62690-3d64-489d-81cc-9f4df72d8de2" (UID: "60f62690-3d64-489d-81cc-9f4df72d8de2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:02:25 crc kubenswrapper[4558]: I0120 17:02:25.416448 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/60f62690-3d64-489d-81cc-9f4df72d8de2-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "60f62690-3d64-489d-81cc-9f4df72d8de2" (UID: "60f62690-3d64-489d-81cc-9f4df72d8de2"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:02:25 crc kubenswrapper[4558]: I0120 17:02:25.422534 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3c5b9a60-72a8-411a-aefe-2e44369cde84-config-data" (OuterVolumeSpecName: "config-data") pod "3c5b9a60-72a8-411a-aefe-2e44369cde84" (UID: "3c5b9a60-72a8-411a-aefe-2e44369cde84"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:02:25 crc kubenswrapper[4558]: I0120 17:02:25.469283 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/60f62690-3d64-489d-81cc-9f4df72d8de2-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:02:25 crc kubenswrapper[4558]: I0120 17:02:25.469305 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3c5b9a60-72a8-411a-aefe-2e44369cde84-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:02:25 crc kubenswrapper[4558]: I0120 17:02:25.469315 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3c5b9a60-72a8-411a-aefe-2e44369cde84-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:02:25 crc kubenswrapper[4558]: I0120 17:02:25.469324 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/60f62690-3d64-489d-81cc-9f4df72d8de2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:02:25 crc kubenswrapper[4558]: I0120 17:02:25.469333 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/60f62690-3d64-489d-81cc-9f4df72d8de2-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:02:25 crc kubenswrapper[4558]: I0120 17:02:25.469341 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fjscm\" (UniqueName: \"kubernetes.io/projected/60f62690-3d64-489d-81cc-9f4df72d8de2-kube-api-access-fjscm\") on node \"crc\" DevicePath \"\"" Jan 20 17:02:25 crc kubenswrapper[4558]: I0120 17:02:25.469349 4558 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/3c5b9a60-72a8-411a-aefe-2e44369cde84-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 20 17:02:25 crc kubenswrapper[4558]: I0120 17:02:25.469356 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3c5b9a60-72a8-411a-aefe-2e44369cde84-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:02:25 crc kubenswrapper[4558]: I0120 17:02:25.469364 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3c5b9a60-72a8-411a-aefe-2e44369cde84-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:02:25 crc kubenswrapper[4558]: I0120 17:02:25.469382 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" " Jan 20 17:02:25 crc kubenswrapper[4558]: I0120 17:02:25.469389 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/60f62690-3d64-489d-81cc-9f4df72d8de2-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:02:25 crc kubenswrapper[4558]: I0120 17:02:25.469400 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r9g6d\" (UniqueName: \"kubernetes.io/projected/3c5b9a60-72a8-411a-aefe-2e44369cde84-kube-api-access-r9g6d\") on node \"crc\" DevicePath \"\"" Jan 20 17:02:25 crc kubenswrapper[4558]: I0120 17:02:25.469412 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" " Jan 20 17:02:25 crc kubenswrapper[4558]: I0120 17:02:25.491495 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage07-crc" (UniqueName: "kubernetes.io/local-volume/local-storage07-crc") on node "crc" Jan 20 17:02:25 crc kubenswrapper[4558]: I0120 17:02:25.491881 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage01-crc" (UniqueName: "kubernetes.io/local-volume/local-storage01-crc") on node "crc" Jan 20 17:02:25 crc kubenswrapper[4558]: I0120 17:02:25.570658 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:02:25 crc kubenswrapper[4558]: I0120 17:02:25.570685 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:02:25 crc kubenswrapper[4558]: I0120 17:02:25.765355 4558 generic.go:334] "Generic (PLEG): container finished" podID="97ced1d2-4a68-4832-b175-82a06d263ce0" containerID="49b1d953e107342a6b4a2ead13c7460b3febff9178ceee056591b0e1435dc0d9" exitCode=0 Jan 20 17:02:25 crc kubenswrapper[4558]: I0120 17:02:25.765407 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-bootstrap-fkvmr" event={"ID":"97ced1d2-4a68-4832-b175-82a06d263ce0","Type":"ContainerDied","Data":"49b1d953e107342a6b4a2ead13c7460b3febff9178ceee056591b0e1435dc0d9"} Jan 20 17:02:25 crc kubenswrapper[4558]: I0120 17:02:25.767533 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"3153c81a-39a0-4250-b53b-154a6a08ccbf","Type":"ContainerStarted","Data":"05509ededdd56f8c84dfeff661cfc5d2c11b221207e6cd3b144d11afd0842490"} Jan 20 17:02:25 crc kubenswrapper[4558]: I0120 17:02:25.775280 4558 generic.go:334] "Generic (PLEG): container finished" podID="3c5b9a60-72a8-411a-aefe-2e44369cde84" containerID="b6a8528cca745ee557a1732567a161b240e4b1a2da9c6be9cb6c07cb9cb77dd0" exitCode=0 Jan 20 17:02:25 crc kubenswrapper[4558]: I0120 17:02:25.775302 4558 generic.go:334] "Generic (PLEG): container finished" podID="3c5b9a60-72a8-411a-aefe-2e44369cde84" containerID="1f07e720da87df55a95ebae79180be8b685ae03e0a0fe1bb02aab9e6634970b7" exitCode=143 Jan 20 17:02:25 crc kubenswrapper[4558]: I0120 17:02:25.775336 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"3c5b9a60-72a8-411a-aefe-2e44369cde84","Type":"ContainerDied","Data":"b6a8528cca745ee557a1732567a161b240e4b1a2da9c6be9cb6c07cb9cb77dd0"} Jan 20 17:02:25 crc kubenswrapper[4558]: I0120 17:02:25.775355 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"3c5b9a60-72a8-411a-aefe-2e44369cde84","Type":"ContainerDied","Data":"1f07e720da87df55a95ebae79180be8b685ae03e0a0fe1bb02aab9e6634970b7"} Jan 20 17:02:25 crc kubenswrapper[4558]: I0120 17:02:25.775364 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"3c5b9a60-72a8-411a-aefe-2e44369cde84","Type":"ContainerDied","Data":"0778d458de2f864328aeba1949a011a6174794051664d10b02d57717281b7917"} Jan 20 17:02:25 crc kubenswrapper[4558]: I0120 17:02:25.775378 4558 scope.go:117] "RemoveContainer" containerID="b6a8528cca745ee557a1732567a161b240e4b1a2da9c6be9cb6c07cb9cb77dd0" Jan 20 17:02:25 crc kubenswrapper[4558]: I0120 17:02:25.775472 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:02:25 crc kubenswrapper[4558]: I0120 17:02:25.782303 4558 generic.go:334] "Generic (PLEG): container finished" podID="60f62690-3d64-489d-81cc-9f4df72d8de2" containerID="5dbf118996210b555ca5f1c9147db4a4010d4e30ab3a403dc4c53ac87a5dc236" exitCode=0 Jan 20 17:02:25 crc kubenswrapper[4558]: I0120 17:02:25.782323 4558 generic.go:334] "Generic (PLEG): container finished" podID="60f62690-3d64-489d-81cc-9f4df72d8de2" containerID="80313731d842ee7b3e898a7907d796bf070ce325c33e2b622c15260ba6f3930f" exitCode=143 Jan 20 17:02:25 crc kubenswrapper[4558]: I0120 17:02:25.782630 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"60f62690-3d64-489d-81cc-9f4df72d8de2","Type":"ContainerDied","Data":"5dbf118996210b555ca5f1c9147db4a4010d4e30ab3a403dc4c53ac87a5dc236"} Jan 20 17:02:25 crc kubenswrapper[4558]: I0120 17:02:25.783102 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"60f62690-3d64-489d-81cc-9f4df72d8de2","Type":"ContainerDied","Data":"80313731d842ee7b3e898a7907d796bf070ce325c33e2b622c15260ba6f3930f"} Jan 20 17:02:25 crc kubenswrapper[4558]: I0120 17:02:25.783117 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"60f62690-3d64-489d-81cc-9f4df72d8de2","Type":"ContainerDied","Data":"e7c2c33672032f8e3e93b84cec181a314a4c38c2112b9a11303fbb4c920cd775"} Jan 20 17:02:25 crc kubenswrapper[4558]: I0120 17:02:25.782757 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:02:25 crc kubenswrapper[4558]: I0120 17:02:25.996711 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.014640 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.048753 4558 scope.go:117] "RemoveContainer" containerID="1f07e720da87df55a95ebae79180be8b685ae03e0a0fe1bb02aab9e6634970b7" Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.049669 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:02:26 crc kubenswrapper[4558]: E0120 17:02:26.049973 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="60f62690-3d64-489d-81cc-9f4df72d8de2" containerName="glance-log" Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.049991 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="60f62690-3d64-489d-81cc-9f4df72d8de2" containerName="glance-log" Jan 20 17:02:26 crc kubenswrapper[4558]: E0120 17:02:26.050008 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3c5b9a60-72a8-411a-aefe-2e44369cde84" containerName="glance-log" Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.050013 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="3c5b9a60-72a8-411a-aefe-2e44369cde84" containerName="glance-log" Jan 20 17:02:26 crc kubenswrapper[4558]: E0120 17:02:26.050028 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3c5b9a60-72a8-411a-aefe-2e44369cde84" containerName="glance-httpd" Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.050034 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="3c5b9a60-72a8-411a-aefe-2e44369cde84" containerName="glance-httpd" Jan 20 17:02:26 crc kubenswrapper[4558]: E0120 17:02:26.050043 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="60f62690-3d64-489d-81cc-9f4df72d8de2" containerName="glance-httpd" Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.050048 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="60f62690-3d64-489d-81cc-9f4df72d8de2" containerName="glance-httpd" Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.050246 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="60f62690-3d64-489d-81cc-9f4df72d8de2" containerName="glance-log" Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.050270 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="60f62690-3d64-489d-81cc-9f4df72d8de2" containerName="glance-httpd" Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.050283 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="3c5b9a60-72a8-411a-aefe-2e44369cde84" containerName="glance-log" Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.050294 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="3c5b9a60-72a8-411a-aefe-2e44369cde84" containerName="glance-httpd" Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.051016 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.057361 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-default-internal-config-data" Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.057529 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-glance-dockercfg-hb57g" Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.059113 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-scripts" Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.059299 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-glance-default-internal-svc" Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.065328 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.077155 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/25076c04-6d0d-4c02-a58f-de14094b79b5-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"25076c04-6d0d-4c02-a58f-de14094b79b5\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.077201 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"25076c04-6d0d-4c02-a58f-de14094b79b5\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.077237 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/25076c04-6d0d-4c02-a58f-de14094b79b5-scripts\") pod \"glance-default-internal-api-0\" (UID: \"25076c04-6d0d-4c02-a58f-de14094b79b5\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.077280 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/25076c04-6d0d-4c02-a58f-de14094b79b5-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"25076c04-6d0d-4c02-a58f-de14094b79b5\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.077336 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/25076c04-6d0d-4c02-a58f-de14094b79b5-logs\") pod \"glance-default-internal-api-0\" (UID: \"25076c04-6d0d-4c02-a58f-de14094b79b5\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.077367 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qlfjl\" (UniqueName: \"kubernetes.io/projected/25076c04-6d0d-4c02-a58f-de14094b79b5-kube-api-access-qlfjl\") pod \"glance-default-internal-api-0\" (UID: \"25076c04-6d0d-4c02-a58f-de14094b79b5\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.077407 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/25076c04-6d0d-4c02-a58f-de14094b79b5-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"25076c04-6d0d-4c02-a58f-de14094b79b5\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.077451 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/25076c04-6d0d-4c02-a58f-de14094b79b5-config-data\") pod \"glance-default-internal-api-0\" (UID: \"25076c04-6d0d-4c02-a58f-de14094b79b5\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.077584 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.085416 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.091504 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.092055 4558 scope.go:117] "RemoveContainer" containerID="b6a8528cca745ee557a1732567a161b240e4b1a2da9c6be9cb6c07cb9cb77dd0" Jan 20 17:02:26 crc kubenswrapper[4558]: E0120 17:02:26.092413 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b6a8528cca745ee557a1732567a161b240e4b1a2da9c6be9cb6c07cb9cb77dd0\": container with ID starting with b6a8528cca745ee557a1732567a161b240e4b1a2da9c6be9cb6c07cb9cb77dd0 not found: ID does not exist" containerID="b6a8528cca745ee557a1732567a161b240e4b1a2da9c6be9cb6c07cb9cb77dd0" Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.092443 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b6a8528cca745ee557a1732567a161b240e4b1a2da9c6be9cb6c07cb9cb77dd0"} err="failed to get container status \"b6a8528cca745ee557a1732567a161b240e4b1a2da9c6be9cb6c07cb9cb77dd0\": rpc error: code = NotFound desc = could not find container \"b6a8528cca745ee557a1732567a161b240e4b1a2da9c6be9cb6c07cb9cb77dd0\": container with ID starting with b6a8528cca745ee557a1732567a161b240e4b1a2da9c6be9cb6c07cb9cb77dd0 not found: ID does not exist" Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.092463 4558 scope.go:117] "RemoveContainer" containerID="1f07e720da87df55a95ebae79180be8b685ae03e0a0fe1bb02aab9e6634970b7" Jan 20 17:02:26 crc kubenswrapper[4558]: E0120 17:02:26.092661 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1f07e720da87df55a95ebae79180be8b685ae03e0a0fe1bb02aab9e6634970b7\": container with ID starting with 1f07e720da87df55a95ebae79180be8b685ae03e0a0fe1bb02aab9e6634970b7 not found: ID does not exist" containerID="1f07e720da87df55a95ebae79180be8b685ae03e0a0fe1bb02aab9e6634970b7" Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.092681 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1f07e720da87df55a95ebae79180be8b685ae03e0a0fe1bb02aab9e6634970b7"} err="failed to get container status \"1f07e720da87df55a95ebae79180be8b685ae03e0a0fe1bb02aab9e6634970b7\": rpc error: code = NotFound desc = could not find container \"1f07e720da87df55a95ebae79180be8b685ae03e0a0fe1bb02aab9e6634970b7\": container with ID starting with 1f07e720da87df55a95ebae79180be8b685ae03e0a0fe1bb02aab9e6634970b7 not found: ID does not exist" Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.092693 4558 scope.go:117] "RemoveContainer" containerID="b6a8528cca745ee557a1732567a161b240e4b1a2da9c6be9cb6c07cb9cb77dd0" Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.092851 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b6a8528cca745ee557a1732567a161b240e4b1a2da9c6be9cb6c07cb9cb77dd0"} err="failed to get container status \"b6a8528cca745ee557a1732567a161b240e4b1a2da9c6be9cb6c07cb9cb77dd0\": rpc error: code = NotFound desc = could not find container \"b6a8528cca745ee557a1732567a161b240e4b1a2da9c6be9cb6c07cb9cb77dd0\": container with ID starting with b6a8528cca745ee557a1732567a161b240e4b1a2da9c6be9cb6c07cb9cb77dd0 not found: ID does not exist" Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.092869 4558 scope.go:117] "RemoveContainer" containerID="1f07e720da87df55a95ebae79180be8b685ae03e0a0fe1bb02aab9e6634970b7" Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.092874 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.093179 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1f07e720da87df55a95ebae79180be8b685ae03e0a0fe1bb02aab9e6634970b7"} err="failed to get container status \"1f07e720da87df55a95ebae79180be8b685ae03e0a0fe1bb02aab9e6634970b7\": rpc error: code = NotFound desc = could not find container \"1f07e720da87df55a95ebae79180be8b685ae03e0a0fe1bb02aab9e6634970b7\": container with ID starting with 1f07e720da87df55a95ebae79180be8b685ae03e0a0fe1bb02aab9e6634970b7 not found: ID does not exist" Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.093201 4558 scope.go:117] "RemoveContainer" containerID="5dbf118996210b555ca5f1c9147db4a4010d4e30ab3a403dc4c53ac87a5dc236" Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.094740 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-default-external-config-data" Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.094828 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-glance-default-public-svc" Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.116456 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.133640 4558 scope.go:117] "RemoveContainer" containerID="80313731d842ee7b3e898a7907d796bf070ce325c33e2b622c15260ba6f3930f" Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.158385 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-db-sync-tvtwc" Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.170101 4558 scope.go:117] "RemoveContainer" containerID="5dbf118996210b555ca5f1c9147db4a4010d4e30ab3a403dc4c53ac87a5dc236" Jan 20 17:02:26 crc kubenswrapper[4558]: E0120 17:02:26.171571 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5dbf118996210b555ca5f1c9147db4a4010d4e30ab3a403dc4c53ac87a5dc236\": container with ID starting with 5dbf118996210b555ca5f1c9147db4a4010d4e30ab3a403dc4c53ac87a5dc236 not found: ID does not exist" containerID="5dbf118996210b555ca5f1c9147db4a4010d4e30ab3a403dc4c53ac87a5dc236" Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.171673 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5dbf118996210b555ca5f1c9147db4a4010d4e30ab3a403dc4c53ac87a5dc236"} err="failed to get container status \"5dbf118996210b555ca5f1c9147db4a4010d4e30ab3a403dc4c53ac87a5dc236\": rpc error: code = NotFound desc = could not find container \"5dbf118996210b555ca5f1c9147db4a4010d4e30ab3a403dc4c53ac87a5dc236\": container with ID starting with 5dbf118996210b555ca5f1c9147db4a4010d4e30ab3a403dc4c53ac87a5dc236 not found: ID does not exist" Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.171774 4558 scope.go:117] "RemoveContainer" containerID="80313731d842ee7b3e898a7907d796bf070ce325c33e2b622c15260ba6f3930f" Jan 20 17:02:26 crc kubenswrapper[4558]: E0120 17:02:26.176332 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"80313731d842ee7b3e898a7907d796bf070ce325c33e2b622c15260ba6f3930f\": container with ID starting with 80313731d842ee7b3e898a7907d796bf070ce325c33e2b622c15260ba6f3930f not found: ID does not exist" containerID="80313731d842ee7b3e898a7907d796bf070ce325c33e2b622c15260ba6f3930f" Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.176509 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"80313731d842ee7b3e898a7907d796bf070ce325c33e2b622c15260ba6f3930f"} err="failed to get container status \"80313731d842ee7b3e898a7907d796bf070ce325c33e2b622c15260ba6f3930f\": rpc error: code = NotFound desc = could not find container \"80313731d842ee7b3e898a7907d796bf070ce325c33e2b622c15260ba6f3930f\": container with ID starting with 80313731d842ee7b3e898a7907d796bf070ce325c33e2b622c15260ba6f3930f not found: ID does not exist" Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.177067 4558 scope.go:117] "RemoveContainer" containerID="5dbf118996210b555ca5f1c9147db4a4010d4e30ab3a403dc4c53ac87a5dc236" Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.177708 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5dbf118996210b555ca5f1c9147db4a4010d4e30ab3a403dc4c53ac87a5dc236"} err="failed to get container status \"5dbf118996210b555ca5f1c9147db4a4010d4e30ab3a403dc4c53ac87a5dc236\": rpc error: code = NotFound desc = could not find container \"5dbf118996210b555ca5f1c9147db4a4010d4e30ab3a403dc4c53ac87a5dc236\": container with ID starting with 5dbf118996210b555ca5f1c9147db4a4010d4e30ab3a403dc4c53ac87a5dc236 not found: ID does not exist" Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.177747 4558 scope.go:117] "RemoveContainer" containerID="80313731d842ee7b3e898a7907d796bf070ce325c33e2b622c15260ba6f3930f" Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.178642 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"80313731d842ee7b3e898a7907d796bf070ce325c33e2b622c15260ba6f3930f"} err="failed to get container status \"80313731d842ee7b3e898a7907d796bf070ce325c33e2b622c15260ba6f3930f\": rpc error: code = NotFound desc = could not find container \"80313731d842ee7b3e898a7907d796bf070ce325c33e2b622c15260ba6f3930f\": container with ID starting with 80313731d842ee7b3e898a7907d796bf070ce325c33e2b622c15260ba6f3930f not found: ID does not exist" Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.183940 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b6a69309-962d-4a4a-8fd0-e25a8a14cee1-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"b6a69309-962d-4a4a-8fd0-e25a8a14cee1\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.184037 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b6a69309-962d-4a4a-8fd0-e25a8a14cee1-logs\") pod \"glance-default-external-api-0\" (UID: \"b6a69309-962d-4a4a-8fd0-e25a8a14cee1\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.184055 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b6a69309-962d-4a4a-8fd0-e25a8a14cee1-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"b6a69309-962d-4a4a-8fd0-e25a8a14cee1\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.184087 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b6a69309-962d-4a4a-8fd0-e25a8a14cee1-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"b6a69309-962d-4a4a-8fd0-e25a8a14cee1\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.184122 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b6a69309-962d-4a4a-8fd0-e25a8a14cee1-config-data\") pod \"glance-default-external-api-0\" (UID: \"b6a69309-962d-4a4a-8fd0-e25a8a14cee1\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.184144 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/25076c04-6d0d-4c02-a58f-de14094b79b5-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"25076c04-6d0d-4c02-a58f-de14094b79b5\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.184172 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"25076c04-6d0d-4c02-a58f-de14094b79b5\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.184196 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b6a69309-962d-4a4a-8fd0-e25a8a14cee1-scripts\") pod \"glance-default-external-api-0\" (UID: \"b6a69309-962d-4a4a-8fd0-e25a8a14cee1\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.184213 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/25076c04-6d0d-4c02-a58f-de14094b79b5-scripts\") pod \"glance-default-internal-api-0\" (UID: \"25076c04-6d0d-4c02-a58f-de14094b79b5\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.184235 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/25076c04-6d0d-4c02-a58f-de14094b79b5-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"25076c04-6d0d-4c02-a58f-de14094b79b5\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.184290 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-external-api-0\" (UID: \"b6a69309-962d-4a4a-8fd0-e25a8a14cee1\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.184308 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lh9dk\" (UniqueName: \"kubernetes.io/projected/b6a69309-962d-4a4a-8fd0-e25a8a14cee1-kube-api-access-lh9dk\") pod \"glance-default-external-api-0\" (UID: \"b6a69309-962d-4a4a-8fd0-e25a8a14cee1\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.184343 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/25076c04-6d0d-4c02-a58f-de14094b79b5-logs\") pod \"glance-default-internal-api-0\" (UID: \"25076c04-6d0d-4c02-a58f-de14094b79b5\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.184374 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qlfjl\" (UniqueName: \"kubernetes.io/projected/25076c04-6d0d-4c02-a58f-de14094b79b5-kube-api-access-qlfjl\") pod \"glance-default-internal-api-0\" (UID: \"25076c04-6d0d-4c02-a58f-de14094b79b5\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.184419 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/25076c04-6d0d-4c02-a58f-de14094b79b5-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"25076c04-6d0d-4c02-a58f-de14094b79b5\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.184467 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/25076c04-6d0d-4c02-a58f-de14094b79b5-config-data\") pod \"glance-default-internal-api-0\" (UID: \"25076c04-6d0d-4c02-a58f-de14094b79b5\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.188175 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/25076c04-6d0d-4c02-a58f-de14094b79b5-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"25076c04-6d0d-4c02-a58f-de14094b79b5\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.188654 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"25076c04-6d0d-4c02-a58f-de14094b79b5\") device mount path \"/mnt/openstack/pv07\"" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.195284 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/25076c04-6d0d-4c02-a58f-de14094b79b5-logs\") pod \"glance-default-internal-api-0\" (UID: \"25076c04-6d0d-4c02-a58f-de14094b79b5\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.196783 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/25076c04-6d0d-4c02-a58f-de14094b79b5-scripts\") pod \"glance-default-internal-api-0\" (UID: \"25076c04-6d0d-4c02-a58f-de14094b79b5\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.199873 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/25076c04-6d0d-4c02-a58f-de14094b79b5-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"25076c04-6d0d-4c02-a58f-de14094b79b5\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.210341 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qlfjl\" (UniqueName: \"kubernetes.io/projected/25076c04-6d0d-4c02-a58f-de14094b79b5-kube-api-access-qlfjl\") pod \"glance-default-internal-api-0\" (UID: \"25076c04-6d0d-4c02-a58f-de14094b79b5\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.222030 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/25076c04-6d0d-4c02-a58f-de14094b79b5-config-data\") pod \"glance-default-internal-api-0\" (UID: \"25076c04-6d0d-4c02-a58f-de14094b79b5\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.229593 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/25076c04-6d0d-4c02-a58f-de14094b79b5-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"25076c04-6d0d-4c02-a58f-de14094b79b5\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.255972 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"25076c04-6d0d-4c02-a58f-de14094b79b5\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.286344 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/0ccba453-b5b1-4a1c-9e37-1c4c991cfb93-db-sync-config-data\") pod \"0ccba453-b5b1-4a1c-9e37-1c4c991cfb93\" (UID: \"0ccba453-b5b1-4a1c-9e37-1c4c991cfb93\") " Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.286396 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0ccba453-b5b1-4a1c-9e37-1c4c991cfb93-combined-ca-bundle\") pod \"0ccba453-b5b1-4a1c-9e37-1c4c991cfb93\" (UID: \"0ccba453-b5b1-4a1c-9e37-1c4c991cfb93\") " Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.286431 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f7d47\" (UniqueName: \"kubernetes.io/projected/0ccba453-b5b1-4a1c-9e37-1c4c991cfb93-kube-api-access-f7d47\") pod \"0ccba453-b5b1-4a1c-9e37-1c4c991cfb93\" (UID: \"0ccba453-b5b1-4a1c-9e37-1c4c991cfb93\") " Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.286682 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-external-api-0\" (UID: \"b6a69309-962d-4a4a-8fd0-e25a8a14cee1\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.286709 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lh9dk\" (UniqueName: \"kubernetes.io/projected/b6a69309-962d-4a4a-8fd0-e25a8a14cee1-kube-api-access-lh9dk\") pod \"glance-default-external-api-0\" (UID: \"b6a69309-962d-4a4a-8fd0-e25a8a14cee1\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.286802 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b6a69309-962d-4a4a-8fd0-e25a8a14cee1-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"b6a69309-962d-4a4a-8fd0-e25a8a14cee1\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.286853 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b6a69309-962d-4a4a-8fd0-e25a8a14cee1-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"b6a69309-962d-4a4a-8fd0-e25a8a14cee1\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.286871 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b6a69309-962d-4a4a-8fd0-e25a8a14cee1-logs\") pod \"glance-default-external-api-0\" (UID: \"b6a69309-962d-4a4a-8fd0-e25a8a14cee1\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.286895 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b6a69309-962d-4a4a-8fd0-e25a8a14cee1-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"b6a69309-962d-4a4a-8fd0-e25a8a14cee1\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.286922 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b6a69309-962d-4a4a-8fd0-e25a8a14cee1-config-data\") pod \"glance-default-external-api-0\" (UID: \"b6a69309-962d-4a4a-8fd0-e25a8a14cee1\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.286941 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b6a69309-962d-4a4a-8fd0-e25a8a14cee1-scripts\") pod \"glance-default-external-api-0\" (UID: \"b6a69309-962d-4a4a-8fd0-e25a8a14cee1\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.287826 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b6a69309-962d-4a4a-8fd0-e25a8a14cee1-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"b6a69309-962d-4a4a-8fd0-e25a8a14cee1\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.288060 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b6a69309-962d-4a4a-8fd0-e25a8a14cee1-logs\") pod \"glance-default-external-api-0\" (UID: \"b6a69309-962d-4a4a-8fd0-e25a8a14cee1\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.288399 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-external-api-0\" (UID: \"b6a69309-962d-4a4a-8fd0-e25a8a14cee1\") device mount path \"/mnt/openstack/pv01\"" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.289214 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0ccba453-b5b1-4a1c-9e37-1c4c991cfb93-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "0ccba453-b5b1-4a1c-9e37-1c4c991cfb93" (UID: "0ccba453-b5b1-4a1c-9e37-1c4c991cfb93"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.289968 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0ccba453-b5b1-4a1c-9e37-1c4c991cfb93-kube-api-access-f7d47" (OuterVolumeSpecName: "kube-api-access-f7d47") pod "0ccba453-b5b1-4a1c-9e37-1c4c991cfb93" (UID: "0ccba453-b5b1-4a1c-9e37-1c4c991cfb93"). InnerVolumeSpecName "kube-api-access-f7d47". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.290075 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b6a69309-962d-4a4a-8fd0-e25a8a14cee1-scripts\") pod \"glance-default-external-api-0\" (UID: \"b6a69309-962d-4a4a-8fd0-e25a8a14cee1\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.292945 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b6a69309-962d-4a4a-8fd0-e25a8a14cee1-config-data\") pod \"glance-default-external-api-0\" (UID: \"b6a69309-962d-4a4a-8fd0-e25a8a14cee1\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.293461 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b6a69309-962d-4a4a-8fd0-e25a8a14cee1-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"b6a69309-962d-4a4a-8fd0-e25a8a14cee1\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.295917 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b6a69309-962d-4a4a-8fd0-e25a8a14cee1-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"b6a69309-962d-4a4a-8fd0-e25a8a14cee1\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.302736 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lh9dk\" (UniqueName: \"kubernetes.io/projected/b6a69309-962d-4a4a-8fd0-e25a8a14cee1-kube-api-access-lh9dk\") pod \"glance-default-external-api-0\" (UID: \"b6a69309-962d-4a4a-8fd0-e25a8a14cee1\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.308885 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-external-api-0\" (UID: \"b6a69309-962d-4a4a-8fd0-e25a8a14cee1\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.311071 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0ccba453-b5b1-4a1c-9e37-1c4c991cfb93-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0ccba453-b5b1-4a1c-9e37-1c4c991cfb93" (UID: "0ccba453-b5b1-4a1c-9e37-1c4c991cfb93"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.388332 4558 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/0ccba453-b5b1-4a1c-9e37-1c4c991cfb93-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.388361 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0ccba453-b5b1-4a1c-9e37-1c4c991cfb93-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.388372 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f7d47\" (UniqueName: \"kubernetes.io/projected/0ccba453-b5b1-4a1c-9e37-1c4c991cfb93-kube-api-access-f7d47\") on node \"crc\" DevicePath \"\"" Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.395906 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.419220 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-db-sync-bdqwq" Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.448545 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.574812 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3c5b9a60-72a8-411a-aefe-2e44369cde84" path="/var/lib/kubelet/pods/3c5b9a60-72a8-411a-aefe-2e44369cde84/volumes" Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.581928 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="60f62690-3d64-489d-81cc-9f4df72d8de2" path="/var/lib/kubelet/pods/60f62690-3d64-489d-81cc-9f4df72d8de2/volumes" Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.589958 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be13d82f-4865-44dc-8a5a-4ecc7f2cabd9-combined-ca-bundle\") pod \"be13d82f-4865-44dc-8a5a-4ecc7f2cabd9\" (UID: \"be13d82f-4865-44dc-8a5a-4ecc7f2cabd9\") " Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.590153 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/be13d82f-4865-44dc-8a5a-4ecc7f2cabd9-logs\") pod \"be13d82f-4865-44dc-8a5a-4ecc7f2cabd9\" (UID: \"be13d82f-4865-44dc-8a5a-4ecc7f2cabd9\") " Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.591371 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f6hhj\" (UniqueName: \"kubernetes.io/projected/be13d82f-4865-44dc-8a5a-4ecc7f2cabd9-kube-api-access-f6hhj\") pod \"be13d82f-4865-44dc-8a5a-4ecc7f2cabd9\" (UID: \"be13d82f-4865-44dc-8a5a-4ecc7f2cabd9\") " Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.590795 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/be13d82f-4865-44dc-8a5a-4ecc7f2cabd9-logs" (OuterVolumeSpecName: "logs") pod "be13d82f-4865-44dc-8a5a-4ecc7f2cabd9" (UID: "be13d82f-4865-44dc-8a5a-4ecc7f2cabd9"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.591796 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/be13d82f-4865-44dc-8a5a-4ecc7f2cabd9-scripts\") pod \"be13d82f-4865-44dc-8a5a-4ecc7f2cabd9\" (UID: \"be13d82f-4865-44dc-8a5a-4ecc7f2cabd9\") " Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.591831 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/be13d82f-4865-44dc-8a5a-4ecc7f2cabd9-config-data\") pod \"be13d82f-4865-44dc-8a5a-4ecc7f2cabd9\" (UID: \"be13d82f-4865-44dc-8a5a-4ecc7f2cabd9\") " Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.592637 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/be13d82f-4865-44dc-8a5a-4ecc7f2cabd9-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.593581 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/be13d82f-4865-44dc-8a5a-4ecc7f2cabd9-kube-api-access-f6hhj" (OuterVolumeSpecName: "kube-api-access-f6hhj") pod "be13d82f-4865-44dc-8a5a-4ecc7f2cabd9" (UID: "be13d82f-4865-44dc-8a5a-4ecc7f2cabd9"). InnerVolumeSpecName "kube-api-access-f6hhj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.596114 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/be13d82f-4865-44dc-8a5a-4ecc7f2cabd9-scripts" (OuterVolumeSpecName: "scripts") pod "be13d82f-4865-44dc-8a5a-4ecc7f2cabd9" (UID: "be13d82f-4865-44dc-8a5a-4ecc7f2cabd9"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.613635 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/be13d82f-4865-44dc-8a5a-4ecc7f2cabd9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "be13d82f-4865-44dc-8a5a-4ecc7f2cabd9" (UID: "be13d82f-4865-44dc-8a5a-4ecc7f2cabd9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.627306 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/be13d82f-4865-44dc-8a5a-4ecc7f2cabd9-config-data" (OuterVolumeSpecName: "config-data") pod "be13d82f-4865-44dc-8a5a-4ecc7f2cabd9" (UID: "be13d82f-4865-44dc-8a5a-4ecc7f2cabd9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.693692 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f6hhj\" (UniqueName: \"kubernetes.io/projected/be13d82f-4865-44dc-8a5a-4ecc7f2cabd9-kube-api-access-f6hhj\") on node \"crc\" DevicePath \"\"" Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.693719 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/be13d82f-4865-44dc-8a5a-4ecc7f2cabd9-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.693728 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/be13d82f-4865-44dc-8a5a-4ecc7f2cabd9-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.693737 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/be13d82f-4865-44dc-8a5a-4ecc7f2cabd9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.791073 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"3153c81a-39a0-4250-b53b-154a6a08ccbf","Type":"ContainerStarted","Data":"e953f183e89c1ebf59cf5382c9b6fb4c44d30f7154ff1ec6ec58493ff83c1178"} Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.794905 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-db-sync-tvtwc" event={"ID":"0ccba453-b5b1-4a1c-9e37-1c4c991cfb93","Type":"ContainerDied","Data":"e88377dfc7730541bfaecfcce2415332175ab0557b2d4e2a80de72f12c684ed7"} Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.794932 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e88377dfc7730541bfaecfcce2415332175ab0557b2d4e2a80de72f12c684ed7" Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.794996 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-db-sync-tvtwc" Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.796709 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-db-sync-bdqwq" event={"ID":"be13d82f-4865-44dc-8a5a-4ecc7f2cabd9","Type":"ContainerDied","Data":"8209aa75c3d7e523620509d0833d4fa6890ec7c4e9f922ea52746384e53f2366"} Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.796732 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8209aa75c3d7e523620509d0833d4fa6890ec7c4e9f922ea52746384e53f2366" Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.796763 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-db-sync-bdqwq" Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.802197 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.803007 4558 generic.go:334] "Generic (PLEG): container finished" podID="e2597e53-47a7-4b6a-9fc1-06d6561cd69f" containerID="61908f05aa1a1c1d9c5f59500dcb21ba49538140328cfaf4bca4fda3098e6ad6" exitCode=0 Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.803066 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-db-sync-qchhz" event={"ID":"e2597e53-47a7-4b6a-9fc1-06d6561cd69f","Type":"ContainerDied","Data":"61908f05aa1a1c1d9c5f59500dcb21ba49538140328cfaf4bca4fda3098e6ad6"} Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.804650 4558 generic.go:334] "Generic (PLEG): container finished" podID="1f9b2eba-d2f3-4391-9420-3d797982db8a" containerID="4f36c092e43707a5550308954378b43c241d5129d5701e92f63c5d91f23f9e25" exitCode=0 Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.804674 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-db-sync-x2xk2" event={"ID":"1f9b2eba-d2f3-4391-9420-3d797982db8a","Type":"ContainerDied","Data":"4f36c092e43707a5550308954378b43c241d5129d5701e92f63c5d91f23f9e25"} Jan 20 17:02:26 crc kubenswrapper[4558]: W0120 17:02:26.806363 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod25076c04_6d0d_4c02_a58f_de14094b79b5.slice/crio-b5b27ddbc918c18ad0aeb6ee2a137e215568ac839c0f5f060fab85a4c4116768 WatchSource:0}: Error finding container b5b27ddbc918c18ad0aeb6ee2a137e215568ac839c0f5f060fab85a4c4116768: Status 404 returned error can't find the container with id b5b27ddbc918c18ad0aeb6ee2a137e215568ac839c0f5f060fab85a4c4116768 Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.917502 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.934659 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/barbican-worker-599d6c8df7-lcnrd"] Jan 20 17:02:26 crc kubenswrapper[4558]: E0120 17:02:26.934964 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="be13d82f-4865-44dc-8a5a-4ecc7f2cabd9" containerName="placement-db-sync" Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.934981 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="be13d82f-4865-44dc-8a5a-4ecc7f2cabd9" containerName="placement-db-sync" Jan 20 17:02:26 crc kubenswrapper[4558]: E0120 17:02:26.934996 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0ccba453-b5b1-4a1c-9e37-1c4c991cfb93" containerName="barbican-db-sync" Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.935002 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="0ccba453-b5b1-4a1c-9e37-1c4c991cfb93" containerName="barbican-db-sync" Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.935149 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="be13d82f-4865-44dc-8a5a-4ecc7f2cabd9" containerName="placement-db-sync" Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.935176 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="0ccba453-b5b1-4a1c-9e37-1c4c991cfb93" containerName="barbican-db-sync" Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.935878 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-worker-599d6c8df7-lcnrd" Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.940156 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"barbican-config-data" Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.940381 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"barbican-barbican-dockercfg-cvbvq" Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.940511 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"barbican-worker-config-data" Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.965538 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/barbican-keystone-listener-65f6b4bfbb-llhdw"] Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.966856 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-keystone-listener-65f6b4bfbb-llhdw" Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.969097 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"barbican-keystone-listener-config-data" Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.971263 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-worker-599d6c8df7-lcnrd"] Jan 20 17:02:26 crc kubenswrapper[4558]: I0120 17:02:26.978146 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-keystone-listener-65f6b4bfbb-llhdw"] Jan 20 17:02:27 crc kubenswrapper[4558]: I0120 17:02:27.039233 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/barbican-api-85c96db9fd-dcjxv"] Jan 20 17:02:27 crc kubenswrapper[4558]: I0120 17:02:27.040816 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-85c96db9fd-dcjxv" Jan 20 17:02:27 crc kubenswrapper[4558]: I0120 17:02:27.050432 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"barbican-api-config-data" Jan 20 17:02:27 crc kubenswrapper[4558]: I0120 17:02:27.056956 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-api-85c96db9fd-dcjxv"] Jan 20 17:02:27 crc kubenswrapper[4558]: I0120 17:02:27.100495 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bfbf270c-1853-4113-b0e4-6d192abb5c5d-config-data-custom\") pod \"barbican-worker-599d6c8df7-lcnrd\" (UID: \"bfbf270c-1853-4113-b0e4-6d192abb5c5d\") " pod="openstack-kuttl-tests/barbican-worker-599d6c8df7-lcnrd" Jan 20 17:02:27 crc kubenswrapper[4558]: I0120 17:02:27.100806 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qr7sr\" (UniqueName: \"kubernetes.io/projected/bc2c7d29-d967-414d-8bb9-1bf70bcacdcd-kube-api-access-qr7sr\") pod \"barbican-keystone-listener-65f6b4bfbb-llhdw\" (UID: \"bc2c7d29-d967-414d-8bb9-1bf70bcacdcd\") " pod="openstack-kuttl-tests/barbican-keystone-listener-65f6b4bfbb-llhdw" Jan 20 17:02:27 crc kubenswrapper[4558]: I0120 17:02:27.100850 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bc2c7d29-d967-414d-8bb9-1bf70bcacdcd-combined-ca-bundle\") pod \"barbican-keystone-listener-65f6b4bfbb-llhdw\" (UID: \"bc2c7d29-d967-414d-8bb9-1bf70bcacdcd\") " pod="openstack-kuttl-tests/barbican-keystone-listener-65f6b4bfbb-llhdw" Jan 20 17:02:27 crc kubenswrapper[4558]: I0120 17:02:27.100875 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bc2c7d29-d967-414d-8bb9-1bf70bcacdcd-config-data-custom\") pod \"barbican-keystone-listener-65f6b4bfbb-llhdw\" (UID: \"bc2c7d29-d967-414d-8bb9-1bf70bcacdcd\") " pod="openstack-kuttl-tests/barbican-keystone-listener-65f6b4bfbb-llhdw" Jan 20 17:02:27 crc kubenswrapper[4558]: I0120 17:02:27.100920 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bfbf270c-1853-4113-b0e4-6d192abb5c5d-logs\") pod \"barbican-worker-599d6c8df7-lcnrd\" (UID: \"bfbf270c-1853-4113-b0e4-6d192abb5c5d\") " pod="openstack-kuttl-tests/barbican-worker-599d6c8df7-lcnrd" Jan 20 17:02:27 crc kubenswrapper[4558]: I0120 17:02:27.100938 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bc2c7d29-d967-414d-8bb9-1bf70bcacdcd-logs\") pod \"barbican-keystone-listener-65f6b4bfbb-llhdw\" (UID: \"bc2c7d29-d967-414d-8bb9-1bf70bcacdcd\") " pod="openstack-kuttl-tests/barbican-keystone-listener-65f6b4bfbb-llhdw" Jan 20 17:02:27 crc kubenswrapper[4558]: I0120 17:02:27.101040 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bfbf270c-1853-4113-b0e4-6d192abb5c5d-combined-ca-bundle\") pod \"barbican-worker-599d6c8df7-lcnrd\" (UID: \"bfbf270c-1853-4113-b0e4-6d192abb5c5d\") " pod="openstack-kuttl-tests/barbican-worker-599d6c8df7-lcnrd" Jan 20 17:02:27 crc kubenswrapper[4558]: I0120 17:02:27.101102 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z66v6\" (UniqueName: \"kubernetes.io/projected/bfbf270c-1853-4113-b0e4-6d192abb5c5d-kube-api-access-z66v6\") pod \"barbican-worker-599d6c8df7-lcnrd\" (UID: \"bfbf270c-1853-4113-b0e4-6d192abb5c5d\") " pod="openstack-kuttl-tests/barbican-worker-599d6c8df7-lcnrd" Jan 20 17:02:27 crc kubenswrapper[4558]: I0120 17:02:27.101150 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bc2c7d29-d967-414d-8bb9-1bf70bcacdcd-config-data\") pod \"barbican-keystone-listener-65f6b4bfbb-llhdw\" (UID: \"bc2c7d29-d967-414d-8bb9-1bf70bcacdcd\") " pod="openstack-kuttl-tests/barbican-keystone-listener-65f6b4bfbb-llhdw" Jan 20 17:02:27 crc kubenswrapper[4558]: I0120 17:02:27.101191 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bfbf270c-1853-4113-b0e4-6d192abb5c5d-config-data\") pod \"barbican-worker-599d6c8df7-lcnrd\" (UID: \"bfbf270c-1853-4113-b0e4-6d192abb5c5d\") " pod="openstack-kuttl-tests/barbican-worker-599d6c8df7-lcnrd" Jan 20 17:02:27 crc kubenswrapper[4558]: I0120 17:02:27.157093 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-bootstrap-fkvmr" Jan 20 17:02:27 crc kubenswrapper[4558]: I0120 17:02:27.202379 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bc2c7d29-d967-414d-8bb9-1bf70bcacdcd-combined-ca-bundle\") pod \"barbican-keystone-listener-65f6b4bfbb-llhdw\" (UID: \"bc2c7d29-d967-414d-8bb9-1bf70bcacdcd\") " pod="openstack-kuttl-tests/barbican-keystone-listener-65f6b4bfbb-llhdw" Jan 20 17:02:27 crc kubenswrapper[4558]: I0120 17:02:27.202426 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bc2c7d29-d967-414d-8bb9-1bf70bcacdcd-config-data-custom\") pod \"barbican-keystone-listener-65f6b4bfbb-llhdw\" (UID: \"bc2c7d29-d967-414d-8bb9-1bf70bcacdcd\") " pod="openstack-kuttl-tests/barbican-keystone-listener-65f6b4bfbb-llhdw" Jan 20 17:02:27 crc kubenswrapper[4558]: I0120 17:02:27.202450 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5ae48763-b66b-4038-855d-f700a2dbe040-logs\") pod \"barbican-api-85c96db9fd-dcjxv\" (UID: \"5ae48763-b66b-4038-855d-f700a2dbe040\") " pod="openstack-kuttl-tests/barbican-api-85c96db9fd-dcjxv" Jan 20 17:02:27 crc kubenswrapper[4558]: I0120 17:02:27.202495 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bfbf270c-1853-4113-b0e4-6d192abb5c5d-logs\") pod \"barbican-worker-599d6c8df7-lcnrd\" (UID: \"bfbf270c-1853-4113-b0e4-6d192abb5c5d\") " pod="openstack-kuttl-tests/barbican-worker-599d6c8df7-lcnrd" Jan 20 17:02:27 crc kubenswrapper[4558]: I0120 17:02:27.202513 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bc2c7d29-d967-414d-8bb9-1bf70bcacdcd-logs\") pod \"barbican-keystone-listener-65f6b4bfbb-llhdw\" (UID: \"bc2c7d29-d967-414d-8bb9-1bf70bcacdcd\") " pod="openstack-kuttl-tests/barbican-keystone-listener-65f6b4bfbb-llhdw" Jan 20 17:02:27 crc kubenswrapper[4558]: I0120 17:02:27.202535 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bfbf270c-1853-4113-b0e4-6d192abb5c5d-combined-ca-bundle\") pod \"barbican-worker-599d6c8df7-lcnrd\" (UID: \"bfbf270c-1853-4113-b0e4-6d192abb5c5d\") " pod="openstack-kuttl-tests/barbican-worker-599d6c8df7-lcnrd" Jan 20 17:02:27 crc kubenswrapper[4558]: I0120 17:02:27.202559 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z66v6\" (UniqueName: \"kubernetes.io/projected/bfbf270c-1853-4113-b0e4-6d192abb5c5d-kube-api-access-z66v6\") pod \"barbican-worker-599d6c8df7-lcnrd\" (UID: \"bfbf270c-1853-4113-b0e4-6d192abb5c5d\") " pod="openstack-kuttl-tests/barbican-worker-599d6c8df7-lcnrd" Jan 20 17:02:27 crc kubenswrapper[4558]: I0120 17:02:27.202581 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-52t9z\" (UniqueName: \"kubernetes.io/projected/5ae48763-b66b-4038-855d-f700a2dbe040-kube-api-access-52t9z\") pod \"barbican-api-85c96db9fd-dcjxv\" (UID: \"5ae48763-b66b-4038-855d-f700a2dbe040\") " pod="openstack-kuttl-tests/barbican-api-85c96db9fd-dcjxv" Jan 20 17:02:27 crc kubenswrapper[4558]: I0120 17:02:27.202597 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bc2c7d29-d967-414d-8bb9-1bf70bcacdcd-config-data\") pod \"barbican-keystone-listener-65f6b4bfbb-llhdw\" (UID: \"bc2c7d29-d967-414d-8bb9-1bf70bcacdcd\") " pod="openstack-kuttl-tests/barbican-keystone-listener-65f6b4bfbb-llhdw" Jan 20 17:02:27 crc kubenswrapper[4558]: I0120 17:02:27.202613 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bfbf270c-1853-4113-b0e4-6d192abb5c5d-config-data\") pod \"barbican-worker-599d6c8df7-lcnrd\" (UID: \"bfbf270c-1853-4113-b0e4-6d192abb5c5d\") " pod="openstack-kuttl-tests/barbican-worker-599d6c8df7-lcnrd" Jan 20 17:02:27 crc kubenswrapper[4558]: I0120 17:02:27.202637 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bfbf270c-1853-4113-b0e4-6d192abb5c5d-config-data-custom\") pod \"barbican-worker-599d6c8df7-lcnrd\" (UID: \"bfbf270c-1853-4113-b0e4-6d192abb5c5d\") " pod="openstack-kuttl-tests/barbican-worker-599d6c8df7-lcnrd" Jan 20 17:02:27 crc kubenswrapper[4558]: I0120 17:02:27.202651 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5ae48763-b66b-4038-855d-f700a2dbe040-combined-ca-bundle\") pod \"barbican-api-85c96db9fd-dcjxv\" (UID: \"5ae48763-b66b-4038-855d-f700a2dbe040\") " pod="openstack-kuttl-tests/barbican-api-85c96db9fd-dcjxv" Jan 20 17:02:27 crc kubenswrapper[4558]: I0120 17:02:27.202669 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5ae48763-b66b-4038-855d-f700a2dbe040-config-data\") pod \"barbican-api-85c96db9fd-dcjxv\" (UID: \"5ae48763-b66b-4038-855d-f700a2dbe040\") " pod="openstack-kuttl-tests/barbican-api-85c96db9fd-dcjxv" Jan 20 17:02:27 crc kubenswrapper[4558]: I0120 17:02:27.202684 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5ae48763-b66b-4038-855d-f700a2dbe040-config-data-custom\") pod \"barbican-api-85c96db9fd-dcjxv\" (UID: \"5ae48763-b66b-4038-855d-f700a2dbe040\") " pod="openstack-kuttl-tests/barbican-api-85c96db9fd-dcjxv" Jan 20 17:02:27 crc kubenswrapper[4558]: I0120 17:02:27.202708 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qr7sr\" (UniqueName: \"kubernetes.io/projected/bc2c7d29-d967-414d-8bb9-1bf70bcacdcd-kube-api-access-qr7sr\") pod \"barbican-keystone-listener-65f6b4bfbb-llhdw\" (UID: \"bc2c7d29-d967-414d-8bb9-1bf70bcacdcd\") " pod="openstack-kuttl-tests/barbican-keystone-listener-65f6b4bfbb-llhdw" Jan 20 17:02:27 crc kubenswrapper[4558]: I0120 17:02:27.203587 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bfbf270c-1853-4113-b0e4-6d192abb5c5d-logs\") pod \"barbican-worker-599d6c8df7-lcnrd\" (UID: \"bfbf270c-1853-4113-b0e4-6d192abb5c5d\") " pod="openstack-kuttl-tests/barbican-worker-599d6c8df7-lcnrd" Jan 20 17:02:27 crc kubenswrapper[4558]: I0120 17:02:27.203846 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bc2c7d29-d967-414d-8bb9-1bf70bcacdcd-logs\") pod \"barbican-keystone-listener-65f6b4bfbb-llhdw\" (UID: \"bc2c7d29-d967-414d-8bb9-1bf70bcacdcd\") " pod="openstack-kuttl-tests/barbican-keystone-listener-65f6b4bfbb-llhdw" Jan 20 17:02:27 crc kubenswrapper[4558]: I0120 17:02:27.209784 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bc2c7d29-d967-414d-8bb9-1bf70bcacdcd-config-data-custom\") pod \"barbican-keystone-listener-65f6b4bfbb-llhdw\" (UID: \"bc2c7d29-d967-414d-8bb9-1bf70bcacdcd\") " pod="openstack-kuttl-tests/barbican-keystone-listener-65f6b4bfbb-llhdw" Jan 20 17:02:27 crc kubenswrapper[4558]: I0120 17:02:27.210788 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bc2c7d29-d967-414d-8bb9-1bf70bcacdcd-config-data\") pod \"barbican-keystone-listener-65f6b4bfbb-llhdw\" (UID: \"bc2c7d29-d967-414d-8bb9-1bf70bcacdcd\") " pod="openstack-kuttl-tests/barbican-keystone-listener-65f6b4bfbb-llhdw" Jan 20 17:02:27 crc kubenswrapper[4558]: I0120 17:02:27.212587 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bfbf270c-1853-4113-b0e4-6d192abb5c5d-config-data-custom\") pod \"barbican-worker-599d6c8df7-lcnrd\" (UID: \"bfbf270c-1853-4113-b0e4-6d192abb5c5d\") " pod="openstack-kuttl-tests/barbican-worker-599d6c8df7-lcnrd" Jan 20 17:02:27 crc kubenswrapper[4558]: I0120 17:02:27.214330 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bfbf270c-1853-4113-b0e4-6d192abb5c5d-config-data\") pod \"barbican-worker-599d6c8df7-lcnrd\" (UID: \"bfbf270c-1853-4113-b0e4-6d192abb5c5d\") " pod="openstack-kuttl-tests/barbican-worker-599d6c8df7-lcnrd" Jan 20 17:02:27 crc kubenswrapper[4558]: I0120 17:02:27.215566 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qr7sr\" (UniqueName: \"kubernetes.io/projected/bc2c7d29-d967-414d-8bb9-1bf70bcacdcd-kube-api-access-qr7sr\") pod \"barbican-keystone-listener-65f6b4bfbb-llhdw\" (UID: \"bc2c7d29-d967-414d-8bb9-1bf70bcacdcd\") " pod="openstack-kuttl-tests/barbican-keystone-listener-65f6b4bfbb-llhdw" Jan 20 17:02:27 crc kubenswrapper[4558]: I0120 17:02:27.217062 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bc2c7d29-d967-414d-8bb9-1bf70bcacdcd-combined-ca-bundle\") pod \"barbican-keystone-listener-65f6b4bfbb-llhdw\" (UID: \"bc2c7d29-d967-414d-8bb9-1bf70bcacdcd\") " pod="openstack-kuttl-tests/barbican-keystone-listener-65f6b4bfbb-llhdw" Jan 20 17:02:27 crc kubenswrapper[4558]: I0120 17:02:27.234353 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z66v6\" (UniqueName: \"kubernetes.io/projected/bfbf270c-1853-4113-b0e4-6d192abb5c5d-kube-api-access-z66v6\") pod \"barbican-worker-599d6c8df7-lcnrd\" (UID: \"bfbf270c-1853-4113-b0e4-6d192abb5c5d\") " pod="openstack-kuttl-tests/barbican-worker-599d6c8df7-lcnrd" Jan 20 17:02:27 crc kubenswrapper[4558]: I0120 17:02:27.234434 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bfbf270c-1853-4113-b0e4-6d192abb5c5d-combined-ca-bundle\") pod \"barbican-worker-599d6c8df7-lcnrd\" (UID: \"bfbf270c-1853-4113-b0e4-6d192abb5c5d\") " pod="openstack-kuttl-tests/barbican-worker-599d6c8df7-lcnrd" Jan 20 17:02:27 crc kubenswrapper[4558]: I0120 17:02:27.274455 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-worker-599d6c8df7-lcnrd" Jan 20 17:02:27 crc kubenswrapper[4558]: I0120 17:02:27.286046 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-keystone-listener-65f6b4bfbb-llhdw" Jan 20 17:02:27 crc kubenswrapper[4558]: I0120 17:02:27.305655 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/97ced1d2-4a68-4832-b175-82a06d263ce0-fernet-keys\") pod \"97ced1d2-4a68-4832-b175-82a06d263ce0\" (UID: \"97ced1d2-4a68-4832-b175-82a06d263ce0\") " Jan 20 17:02:27 crc kubenswrapper[4558]: I0120 17:02:27.305707 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/97ced1d2-4a68-4832-b175-82a06d263ce0-scripts\") pod \"97ced1d2-4a68-4832-b175-82a06d263ce0\" (UID: \"97ced1d2-4a68-4832-b175-82a06d263ce0\") " Jan 20 17:02:27 crc kubenswrapper[4558]: I0120 17:02:27.305904 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/97ced1d2-4a68-4832-b175-82a06d263ce0-credential-keys\") pod \"97ced1d2-4a68-4832-b175-82a06d263ce0\" (UID: \"97ced1d2-4a68-4832-b175-82a06d263ce0\") " Jan 20 17:02:27 crc kubenswrapper[4558]: I0120 17:02:27.306030 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jsdrc\" (UniqueName: \"kubernetes.io/projected/97ced1d2-4a68-4832-b175-82a06d263ce0-kube-api-access-jsdrc\") pod \"97ced1d2-4a68-4832-b175-82a06d263ce0\" (UID: \"97ced1d2-4a68-4832-b175-82a06d263ce0\") " Jan 20 17:02:27 crc kubenswrapper[4558]: I0120 17:02:27.306555 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/97ced1d2-4a68-4832-b175-82a06d263ce0-combined-ca-bundle\") pod \"97ced1d2-4a68-4832-b175-82a06d263ce0\" (UID: \"97ced1d2-4a68-4832-b175-82a06d263ce0\") " Jan 20 17:02:27 crc kubenswrapper[4558]: I0120 17:02:27.306580 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/97ced1d2-4a68-4832-b175-82a06d263ce0-config-data\") pod \"97ced1d2-4a68-4832-b175-82a06d263ce0\" (UID: \"97ced1d2-4a68-4832-b175-82a06d263ce0\") " Jan 20 17:02:27 crc kubenswrapper[4558]: I0120 17:02:27.306841 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-52t9z\" (UniqueName: \"kubernetes.io/projected/5ae48763-b66b-4038-855d-f700a2dbe040-kube-api-access-52t9z\") pod \"barbican-api-85c96db9fd-dcjxv\" (UID: \"5ae48763-b66b-4038-855d-f700a2dbe040\") " pod="openstack-kuttl-tests/barbican-api-85c96db9fd-dcjxv" Jan 20 17:02:27 crc kubenswrapper[4558]: I0120 17:02:27.306879 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5ae48763-b66b-4038-855d-f700a2dbe040-combined-ca-bundle\") pod \"barbican-api-85c96db9fd-dcjxv\" (UID: \"5ae48763-b66b-4038-855d-f700a2dbe040\") " pod="openstack-kuttl-tests/barbican-api-85c96db9fd-dcjxv" Jan 20 17:02:27 crc kubenswrapper[4558]: I0120 17:02:27.306896 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5ae48763-b66b-4038-855d-f700a2dbe040-config-data\") pod \"barbican-api-85c96db9fd-dcjxv\" (UID: \"5ae48763-b66b-4038-855d-f700a2dbe040\") " pod="openstack-kuttl-tests/barbican-api-85c96db9fd-dcjxv" Jan 20 17:02:27 crc kubenswrapper[4558]: I0120 17:02:27.306912 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5ae48763-b66b-4038-855d-f700a2dbe040-config-data-custom\") pod \"barbican-api-85c96db9fd-dcjxv\" (UID: \"5ae48763-b66b-4038-855d-f700a2dbe040\") " pod="openstack-kuttl-tests/barbican-api-85c96db9fd-dcjxv" Jan 20 17:02:27 crc kubenswrapper[4558]: I0120 17:02:27.306954 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5ae48763-b66b-4038-855d-f700a2dbe040-logs\") pod \"barbican-api-85c96db9fd-dcjxv\" (UID: \"5ae48763-b66b-4038-855d-f700a2dbe040\") " pod="openstack-kuttl-tests/barbican-api-85c96db9fd-dcjxv" Jan 20 17:02:27 crc kubenswrapper[4558]: I0120 17:02:27.308271 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5ae48763-b66b-4038-855d-f700a2dbe040-logs\") pod \"barbican-api-85c96db9fd-dcjxv\" (UID: \"5ae48763-b66b-4038-855d-f700a2dbe040\") " pod="openstack-kuttl-tests/barbican-api-85c96db9fd-dcjxv" Jan 20 17:02:27 crc kubenswrapper[4558]: I0120 17:02:27.312458 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/97ced1d2-4a68-4832-b175-82a06d263ce0-scripts" (OuterVolumeSpecName: "scripts") pod "97ced1d2-4a68-4832-b175-82a06d263ce0" (UID: "97ced1d2-4a68-4832-b175-82a06d263ce0"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:02:27 crc kubenswrapper[4558]: I0120 17:02:27.312609 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/97ced1d2-4a68-4832-b175-82a06d263ce0-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "97ced1d2-4a68-4832-b175-82a06d263ce0" (UID: "97ced1d2-4a68-4832-b175-82a06d263ce0"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:02:27 crc kubenswrapper[4558]: I0120 17:02:27.314501 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5ae48763-b66b-4038-855d-f700a2dbe040-config-data-custom\") pod \"barbican-api-85c96db9fd-dcjxv\" (UID: \"5ae48763-b66b-4038-855d-f700a2dbe040\") " pod="openstack-kuttl-tests/barbican-api-85c96db9fd-dcjxv" Jan 20 17:02:27 crc kubenswrapper[4558]: I0120 17:02:27.314830 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/97ced1d2-4a68-4832-b175-82a06d263ce0-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "97ced1d2-4a68-4832-b175-82a06d263ce0" (UID: "97ced1d2-4a68-4832-b175-82a06d263ce0"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:02:27 crc kubenswrapper[4558]: I0120 17:02:27.315814 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5ae48763-b66b-4038-855d-f700a2dbe040-config-data\") pod \"barbican-api-85c96db9fd-dcjxv\" (UID: \"5ae48763-b66b-4038-855d-f700a2dbe040\") " pod="openstack-kuttl-tests/barbican-api-85c96db9fd-dcjxv" Jan 20 17:02:27 crc kubenswrapper[4558]: I0120 17:02:27.318015 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5ae48763-b66b-4038-855d-f700a2dbe040-combined-ca-bundle\") pod \"barbican-api-85c96db9fd-dcjxv\" (UID: \"5ae48763-b66b-4038-855d-f700a2dbe040\") " pod="openstack-kuttl-tests/barbican-api-85c96db9fd-dcjxv" Jan 20 17:02:27 crc kubenswrapper[4558]: I0120 17:02:27.318581 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/97ced1d2-4a68-4832-b175-82a06d263ce0-kube-api-access-jsdrc" (OuterVolumeSpecName: "kube-api-access-jsdrc") pod "97ced1d2-4a68-4832-b175-82a06d263ce0" (UID: "97ced1d2-4a68-4832-b175-82a06d263ce0"). InnerVolumeSpecName "kube-api-access-jsdrc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:02:27 crc kubenswrapper[4558]: I0120 17:02:27.320984 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-52t9z\" (UniqueName: \"kubernetes.io/projected/5ae48763-b66b-4038-855d-f700a2dbe040-kube-api-access-52t9z\") pod \"barbican-api-85c96db9fd-dcjxv\" (UID: \"5ae48763-b66b-4038-855d-f700a2dbe040\") " pod="openstack-kuttl-tests/barbican-api-85c96db9fd-dcjxv" Jan 20 17:02:27 crc kubenswrapper[4558]: I0120 17:02:27.329829 4558 patch_prober.go:28] interesting pod/machine-config-daemon-2vr4r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 20 17:02:27 crc kubenswrapper[4558]: I0120 17:02:27.329865 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 20 17:02:27 crc kubenswrapper[4558]: I0120 17:02:27.334021 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/97ced1d2-4a68-4832-b175-82a06d263ce0-config-data" (OuterVolumeSpecName: "config-data") pod "97ced1d2-4a68-4832-b175-82a06d263ce0" (UID: "97ced1d2-4a68-4832-b175-82a06d263ce0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:02:27 crc kubenswrapper[4558]: I0120 17:02:27.343046 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/97ced1d2-4a68-4832-b175-82a06d263ce0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "97ced1d2-4a68-4832-b175-82a06d263ce0" (UID: "97ced1d2-4a68-4832-b175-82a06d263ce0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:02:27 crc kubenswrapper[4558]: I0120 17:02:27.363519 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-85c96db9fd-dcjxv" Jan 20 17:02:27 crc kubenswrapper[4558]: I0120 17:02:27.409027 4558 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/97ced1d2-4a68-4832-b175-82a06d263ce0-credential-keys\") on node \"crc\" DevicePath \"\"" Jan 20 17:02:27 crc kubenswrapper[4558]: I0120 17:02:27.409854 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jsdrc\" (UniqueName: \"kubernetes.io/projected/97ced1d2-4a68-4832-b175-82a06d263ce0-kube-api-access-jsdrc\") on node \"crc\" DevicePath \"\"" Jan 20 17:02:27 crc kubenswrapper[4558]: I0120 17:02:27.409873 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/97ced1d2-4a68-4832-b175-82a06d263ce0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:02:27 crc kubenswrapper[4558]: I0120 17:02:27.409883 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/97ced1d2-4a68-4832-b175-82a06d263ce0-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:02:27 crc kubenswrapper[4558]: I0120 17:02:27.409898 4558 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/97ced1d2-4a68-4832-b175-82a06d263ce0-fernet-keys\") on node \"crc\" DevicePath \"\"" Jan 20 17:02:27 crc kubenswrapper[4558]: I0120 17:02:27.409907 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/97ced1d2-4a68-4832-b175-82a06d263ce0-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:02:27 crc kubenswrapper[4558]: I0120 17:02:27.602818 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/placement-9b48ffd74-btjvf"] Jan 20 17:02:27 crc kubenswrapper[4558]: E0120 17:02:27.603760 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="97ced1d2-4a68-4832-b175-82a06d263ce0" containerName="keystone-bootstrap" Jan 20 17:02:27 crc kubenswrapper[4558]: I0120 17:02:27.603828 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="97ced1d2-4a68-4832-b175-82a06d263ce0" containerName="keystone-bootstrap" Jan 20 17:02:27 crc kubenswrapper[4558]: I0120 17:02:27.604050 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="97ced1d2-4a68-4832-b175-82a06d263ce0" containerName="keystone-bootstrap" Jan 20 17:02:27 crc kubenswrapper[4558]: I0120 17:02:27.607424 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-9b48ffd74-btjvf" Jan 20 17:02:27 crc kubenswrapper[4558]: I0120 17:02:27.610400 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"placement-config-data" Jan 20 17:02:27 crc kubenswrapper[4558]: I0120 17:02:27.610561 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"placement-placement-dockercfg-skc27" Jan 20 17:02:27 crc kubenswrapper[4558]: I0120 17:02:27.610457 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"placement-scripts" Jan 20 17:02:27 crc kubenswrapper[4558]: I0120 17:02:27.611185 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-9b48ffd74-btjvf"] Jan 20 17:02:27 crc kubenswrapper[4558]: I0120 17:02:27.715657 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6dfbc47c-776a-4037-b444-06871e236fa8-scripts\") pod \"placement-9b48ffd74-btjvf\" (UID: \"6dfbc47c-776a-4037-b444-06871e236fa8\") " pod="openstack-kuttl-tests/placement-9b48ffd74-btjvf" Jan 20 17:02:27 crc kubenswrapper[4558]: I0120 17:02:27.715732 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6dfbc47c-776a-4037-b444-06871e236fa8-logs\") pod \"placement-9b48ffd74-btjvf\" (UID: \"6dfbc47c-776a-4037-b444-06871e236fa8\") " pod="openstack-kuttl-tests/placement-9b48ffd74-btjvf" Jan 20 17:02:27 crc kubenswrapper[4558]: I0120 17:02:27.715773 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zcghn\" (UniqueName: \"kubernetes.io/projected/6dfbc47c-776a-4037-b444-06871e236fa8-kube-api-access-zcghn\") pod \"placement-9b48ffd74-btjvf\" (UID: \"6dfbc47c-776a-4037-b444-06871e236fa8\") " pod="openstack-kuttl-tests/placement-9b48ffd74-btjvf" Jan 20 17:02:27 crc kubenswrapper[4558]: I0120 17:02:27.715789 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6dfbc47c-776a-4037-b444-06871e236fa8-config-data\") pod \"placement-9b48ffd74-btjvf\" (UID: \"6dfbc47c-776a-4037-b444-06871e236fa8\") " pod="openstack-kuttl-tests/placement-9b48ffd74-btjvf" Jan 20 17:02:27 crc kubenswrapper[4558]: I0120 17:02:27.715816 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6dfbc47c-776a-4037-b444-06871e236fa8-combined-ca-bundle\") pod \"placement-9b48ffd74-btjvf\" (UID: \"6dfbc47c-776a-4037-b444-06871e236fa8\") " pod="openstack-kuttl-tests/placement-9b48ffd74-btjvf" Jan 20 17:02:27 crc kubenswrapper[4558]: I0120 17:02:27.779485 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-keystone-listener-65f6b4bfbb-llhdw"] Jan 20 17:02:27 crc kubenswrapper[4558]: W0120 17:02:27.786452 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbc2c7d29_d967_414d_8bb9_1bf70bcacdcd.slice/crio-41eb8df402352603fb269b2c30ccda60ab2499f18d874b03fb79c1eee7bd7993 WatchSource:0}: Error finding container 41eb8df402352603fb269b2c30ccda60ab2499f18d874b03fb79c1eee7bd7993: Status 404 returned error can't find the container with id 41eb8df402352603fb269b2c30ccda60ab2499f18d874b03fb79c1eee7bd7993 Jan 20 17:02:27 crc kubenswrapper[4558]: I0120 17:02:27.795668 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-worker-599d6c8df7-lcnrd"] Jan 20 17:02:27 crc kubenswrapper[4558]: I0120 17:02:27.824615 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6dfbc47c-776a-4037-b444-06871e236fa8-scripts\") pod \"placement-9b48ffd74-btjvf\" (UID: \"6dfbc47c-776a-4037-b444-06871e236fa8\") " pod="openstack-kuttl-tests/placement-9b48ffd74-btjvf" Jan 20 17:02:27 crc kubenswrapper[4558]: I0120 17:02:27.824692 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6dfbc47c-776a-4037-b444-06871e236fa8-logs\") pod \"placement-9b48ffd74-btjvf\" (UID: \"6dfbc47c-776a-4037-b444-06871e236fa8\") " pod="openstack-kuttl-tests/placement-9b48ffd74-btjvf" Jan 20 17:02:27 crc kubenswrapper[4558]: I0120 17:02:27.824725 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zcghn\" (UniqueName: \"kubernetes.io/projected/6dfbc47c-776a-4037-b444-06871e236fa8-kube-api-access-zcghn\") pod \"placement-9b48ffd74-btjvf\" (UID: \"6dfbc47c-776a-4037-b444-06871e236fa8\") " pod="openstack-kuttl-tests/placement-9b48ffd74-btjvf" Jan 20 17:02:27 crc kubenswrapper[4558]: I0120 17:02:27.824745 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6dfbc47c-776a-4037-b444-06871e236fa8-config-data\") pod \"placement-9b48ffd74-btjvf\" (UID: \"6dfbc47c-776a-4037-b444-06871e236fa8\") " pod="openstack-kuttl-tests/placement-9b48ffd74-btjvf" Jan 20 17:02:27 crc kubenswrapper[4558]: I0120 17:02:27.824768 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6dfbc47c-776a-4037-b444-06871e236fa8-combined-ca-bundle\") pod \"placement-9b48ffd74-btjvf\" (UID: \"6dfbc47c-776a-4037-b444-06871e236fa8\") " pod="openstack-kuttl-tests/placement-9b48ffd74-btjvf" Jan 20 17:02:27 crc kubenswrapper[4558]: I0120 17:02:27.825245 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6dfbc47c-776a-4037-b444-06871e236fa8-logs\") pod \"placement-9b48ffd74-btjvf\" (UID: \"6dfbc47c-776a-4037-b444-06871e236fa8\") " pod="openstack-kuttl-tests/placement-9b48ffd74-btjvf" Jan 20 17:02:27 crc kubenswrapper[4558]: I0120 17:02:27.831481 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6dfbc47c-776a-4037-b444-06871e236fa8-scripts\") pod \"placement-9b48ffd74-btjvf\" (UID: \"6dfbc47c-776a-4037-b444-06871e236fa8\") " pod="openstack-kuttl-tests/placement-9b48ffd74-btjvf" Jan 20 17:02:27 crc kubenswrapper[4558]: I0120 17:02:27.834794 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6dfbc47c-776a-4037-b444-06871e236fa8-combined-ca-bundle\") pod \"placement-9b48ffd74-btjvf\" (UID: \"6dfbc47c-776a-4037-b444-06871e236fa8\") " pod="openstack-kuttl-tests/placement-9b48ffd74-btjvf" Jan 20 17:02:27 crc kubenswrapper[4558]: I0120 17:02:27.840240 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"b6a69309-962d-4a4a-8fd0-e25a8a14cee1","Type":"ContainerStarted","Data":"5bb4f48f82b9f26f09753ee0b330ab1b18a6ee75ea0a48f26d817acaf1398abd"} Jan 20 17:02:27 crc kubenswrapper[4558]: I0120 17:02:27.840287 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"b6a69309-962d-4a4a-8fd0-e25a8a14cee1","Type":"ContainerStarted","Data":"d50487afc6d3eddd59efff8521e89952d0cbbfa251cbe70b7e1c3e469fa599c4"} Jan 20 17:02:27 crc kubenswrapper[4558]: I0120 17:02:27.841833 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6dfbc47c-776a-4037-b444-06871e236fa8-config-data\") pod \"placement-9b48ffd74-btjvf\" (UID: \"6dfbc47c-776a-4037-b444-06871e236fa8\") " pod="openstack-kuttl-tests/placement-9b48ffd74-btjvf" Jan 20 17:02:27 crc kubenswrapper[4558]: I0120 17:02:27.844528 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-bootstrap-fkvmr" event={"ID":"97ced1d2-4a68-4832-b175-82a06d263ce0","Type":"ContainerDied","Data":"e96708ccbe696068203e788b006d844360990db1bb1a21a87153f341e8ac2d39"} Jan 20 17:02:27 crc kubenswrapper[4558]: I0120 17:02:27.844561 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e96708ccbe696068203e788b006d844360990db1bb1a21a87153f341e8ac2d39" Jan 20 17:02:27 crc kubenswrapper[4558]: I0120 17:02:27.844627 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-bootstrap-fkvmr" Jan 20 17:02:27 crc kubenswrapper[4558]: I0120 17:02:27.850355 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zcghn\" (UniqueName: \"kubernetes.io/projected/6dfbc47c-776a-4037-b444-06871e236fa8-kube-api-access-zcghn\") pod \"placement-9b48ffd74-btjvf\" (UID: \"6dfbc47c-776a-4037-b444-06871e236fa8\") " pod="openstack-kuttl-tests/placement-9b48ffd74-btjvf" Jan 20 17:02:27 crc kubenswrapper[4558]: I0120 17:02:27.869240 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"25076c04-6d0d-4c02-a58f-de14094b79b5","Type":"ContainerStarted","Data":"59d23ff3cd2fbff08e58e91492448fe0e07b2099c36d37f02ee2bf1870adf4fc"} Jan 20 17:02:27 crc kubenswrapper[4558]: I0120 17:02:27.869295 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"25076c04-6d0d-4c02-a58f-de14094b79b5","Type":"ContainerStarted","Data":"b5b27ddbc918c18ad0aeb6ee2a137e215568ac839c0f5f060fab85a4c4116768"} Jan 20 17:02:27 crc kubenswrapper[4558]: I0120 17:02:27.872603 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-keystone-listener-65f6b4bfbb-llhdw" event={"ID":"bc2c7d29-d967-414d-8bb9-1bf70bcacdcd","Type":"ContainerStarted","Data":"41eb8df402352603fb269b2c30ccda60ab2499f18d874b03fb79c1eee7bd7993"} Jan 20 17:02:27 crc kubenswrapper[4558]: I0120 17:02:27.881229 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-worker-599d6c8df7-lcnrd" event={"ID":"bfbf270c-1853-4113-b0e4-6d192abb5c5d","Type":"ContainerStarted","Data":"f2bb3dc6999859bba49c421c21bcbd591d4b55a8331c1a1a17376de1eee20a20"} Jan 20 17:02:27 crc kubenswrapper[4558]: I0120 17:02:27.883721 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"3153c81a-39a0-4250-b53b-154a6a08ccbf","Type":"ContainerStarted","Data":"06a748aa8f05f8ede56d2f5afbb1a1b0a772310929fdd0ba5437983cf338895d"} Jan 20 17:02:27 crc kubenswrapper[4558]: I0120 17:02:27.883952 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="3153c81a-39a0-4250-b53b-154a6a08ccbf" containerName="ceilometer-central-agent" containerID="cri-o://7de085a2635c97438a08689bb27694547ed0ecc2e18c9400eb1019ac044e74af" gracePeriod=30 Jan 20 17:02:27 crc kubenswrapper[4558]: I0120 17:02:27.884022 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="3153c81a-39a0-4250-b53b-154a6a08ccbf" containerName="sg-core" containerID="cri-o://e953f183e89c1ebf59cf5382c9b6fb4c44d30f7154ff1ec6ec58493ff83c1178" gracePeriod=30 Jan 20 17:02:27 crc kubenswrapper[4558]: I0120 17:02:27.884041 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="3153c81a-39a0-4250-b53b-154a6a08ccbf" containerName="ceilometer-notification-agent" containerID="cri-o://05509ededdd56f8c84dfeff661cfc5d2c11b221207e6cd3b144d11afd0842490" gracePeriod=30 Jan 20 17:02:27 crc kubenswrapper[4558]: I0120 17:02:27.884105 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="3153c81a-39a0-4250-b53b-154a6a08ccbf" containerName="proxy-httpd" containerID="cri-o://06a748aa8f05f8ede56d2f5afbb1a1b0a772310929fdd0ba5437983cf338895d" gracePeriod=30 Jan 20 17:02:27 crc kubenswrapper[4558]: I0120 17:02:27.885113 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-bootstrap-fkvmr"] Jan 20 17:02:27 crc kubenswrapper[4558]: I0120 17:02:27.894696 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/keystone-bootstrap-fkvmr"] Jan 20 17:02:27 crc kubenswrapper[4558]: I0120 17:02:27.900955 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ceilometer-0" podStartSLOduration=1.324186646 podStartE2EDuration="5.900945153s" podCreationTimestamp="2026-01-20 17:02:22 +0000 UTC" firstStartedPulling="2026-01-20 17:02:22.96310615 +0000 UTC m=+1236.723444116" lastFinishedPulling="2026-01-20 17:02:27.539864655 +0000 UTC m=+1241.300202623" observedRunningTime="2026-01-20 17:02:27.899174423 +0000 UTC m=+1241.659512390" watchObservedRunningTime="2026-01-20 17:02:27.900945153 +0000 UTC m=+1241.661283121" Jan 20 17:02:27 crc kubenswrapper[4558]: I0120 17:02:27.927003 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-9b48ffd74-btjvf" Jan 20 17:02:27 crc kubenswrapper[4558]: I0120 17:02:27.964617 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-api-85c96db9fd-dcjxv"] Jan 20 17:02:28 crc kubenswrapper[4558]: W0120 17:02:28.008802 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5ae48763_b66b_4038_855d_f700a2dbe040.slice/crio-d4918c414ed902dd782099323ffd6f3e98df9568cff4ead858e221b4d080bfe1 WatchSource:0}: Error finding container d4918c414ed902dd782099323ffd6f3e98df9568cff4ead858e221b4d080bfe1: Status 404 returned error can't find the container with id d4918c414ed902dd782099323ffd6f3e98df9568cff4ead858e221b4d080bfe1 Jan 20 17:02:28 crc kubenswrapper[4558]: I0120 17:02:28.010011 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/keystone-bootstrap-xq579"] Jan 20 17:02:28 crc kubenswrapper[4558]: I0120 17:02:28.010907 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-bootstrap-xq579" Jan 20 17:02:28 crc kubenswrapper[4558]: I0120 17:02:28.016890 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone" Jan 20 17:02:28 crc kubenswrapper[4558]: I0120 17:02:28.017271 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-config-data" Jan 20 17:02:28 crc kubenswrapper[4558]: I0120 17:02:28.017530 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-scripts" Jan 20 17:02:28 crc kubenswrapper[4558]: I0120 17:02:28.018515 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-keystone-dockercfg-4bhst" Jan 20 17:02:28 crc kubenswrapper[4558]: I0120 17:02:28.021902 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-bootstrap-xq579"] Jan 20 17:02:28 crc kubenswrapper[4558]: I0120 17:02:28.029578 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"osp-secret" Jan 20 17:02:28 crc kubenswrapper[4558]: I0120 17:02:28.136839 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/66dc4df8-a7a7-452b-a8b2-b639d84ad42f-fernet-keys\") pod \"keystone-bootstrap-xq579\" (UID: \"66dc4df8-a7a7-452b-a8b2-b639d84ad42f\") " pod="openstack-kuttl-tests/keystone-bootstrap-xq579" Jan 20 17:02:28 crc kubenswrapper[4558]: I0120 17:02:28.137238 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/66dc4df8-a7a7-452b-a8b2-b639d84ad42f-credential-keys\") pod \"keystone-bootstrap-xq579\" (UID: \"66dc4df8-a7a7-452b-a8b2-b639d84ad42f\") " pod="openstack-kuttl-tests/keystone-bootstrap-xq579" Jan 20 17:02:28 crc kubenswrapper[4558]: I0120 17:02:28.137449 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/66dc4df8-a7a7-452b-a8b2-b639d84ad42f-scripts\") pod \"keystone-bootstrap-xq579\" (UID: \"66dc4df8-a7a7-452b-a8b2-b639d84ad42f\") " pod="openstack-kuttl-tests/keystone-bootstrap-xq579" Jan 20 17:02:28 crc kubenswrapper[4558]: I0120 17:02:28.137521 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/66dc4df8-a7a7-452b-a8b2-b639d84ad42f-config-data\") pod \"keystone-bootstrap-xq579\" (UID: \"66dc4df8-a7a7-452b-a8b2-b639d84ad42f\") " pod="openstack-kuttl-tests/keystone-bootstrap-xq579" Jan 20 17:02:28 crc kubenswrapper[4558]: I0120 17:02:28.137581 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/66dc4df8-a7a7-452b-a8b2-b639d84ad42f-combined-ca-bundle\") pod \"keystone-bootstrap-xq579\" (UID: \"66dc4df8-a7a7-452b-a8b2-b639d84ad42f\") " pod="openstack-kuttl-tests/keystone-bootstrap-xq579" Jan 20 17:02:28 crc kubenswrapper[4558]: I0120 17:02:28.137643 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n7wt8\" (UniqueName: \"kubernetes.io/projected/66dc4df8-a7a7-452b-a8b2-b639d84ad42f-kube-api-access-n7wt8\") pod \"keystone-bootstrap-xq579\" (UID: \"66dc4df8-a7a7-452b-a8b2-b639d84ad42f\") " pod="openstack-kuttl-tests/keystone-bootstrap-xq579" Jan 20 17:02:28 crc kubenswrapper[4558]: I0120 17:02:28.219962 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/placement-fbd5dff98-mmhdt"] Jan 20 17:02:28 crc kubenswrapper[4558]: I0120 17:02:28.222573 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-fbd5dff98-mmhdt" Jan 20 17:02:28 crc kubenswrapper[4558]: I0120 17:02:28.225341 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-placement-internal-svc" Jan 20 17:02:28 crc kubenswrapper[4558]: I0120 17:02:28.228328 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-placement-public-svc" Jan 20 17:02:28 crc kubenswrapper[4558]: I0120 17:02:28.230656 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-fbd5dff98-mmhdt"] Jan 20 17:02:28 crc kubenswrapper[4558]: I0120 17:02:28.239392 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/66dc4df8-a7a7-452b-a8b2-b639d84ad42f-scripts\") pod \"keystone-bootstrap-xq579\" (UID: \"66dc4df8-a7a7-452b-a8b2-b639d84ad42f\") " pod="openstack-kuttl-tests/keystone-bootstrap-xq579" Jan 20 17:02:28 crc kubenswrapper[4558]: I0120 17:02:28.239430 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/66dc4df8-a7a7-452b-a8b2-b639d84ad42f-config-data\") pod \"keystone-bootstrap-xq579\" (UID: \"66dc4df8-a7a7-452b-a8b2-b639d84ad42f\") " pod="openstack-kuttl-tests/keystone-bootstrap-xq579" Jan 20 17:02:28 crc kubenswrapper[4558]: I0120 17:02:28.239451 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/66dc4df8-a7a7-452b-a8b2-b639d84ad42f-combined-ca-bundle\") pod \"keystone-bootstrap-xq579\" (UID: \"66dc4df8-a7a7-452b-a8b2-b639d84ad42f\") " pod="openstack-kuttl-tests/keystone-bootstrap-xq579" Jan 20 17:02:28 crc kubenswrapper[4558]: I0120 17:02:28.239472 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n7wt8\" (UniqueName: \"kubernetes.io/projected/66dc4df8-a7a7-452b-a8b2-b639d84ad42f-kube-api-access-n7wt8\") pod \"keystone-bootstrap-xq579\" (UID: \"66dc4df8-a7a7-452b-a8b2-b639d84ad42f\") " pod="openstack-kuttl-tests/keystone-bootstrap-xq579" Jan 20 17:02:28 crc kubenswrapper[4558]: I0120 17:02:28.239506 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/66dc4df8-a7a7-452b-a8b2-b639d84ad42f-fernet-keys\") pod \"keystone-bootstrap-xq579\" (UID: \"66dc4df8-a7a7-452b-a8b2-b639d84ad42f\") " pod="openstack-kuttl-tests/keystone-bootstrap-xq579" Jan 20 17:02:28 crc kubenswrapper[4558]: I0120 17:02:28.239567 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/66dc4df8-a7a7-452b-a8b2-b639d84ad42f-credential-keys\") pod \"keystone-bootstrap-xq579\" (UID: \"66dc4df8-a7a7-452b-a8b2-b639d84ad42f\") " pod="openstack-kuttl-tests/keystone-bootstrap-xq579" Jan 20 17:02:28 crc kubenswrapper[4558]: I0120 17:02:28.242949 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/66dc4df8-a7a7-452b-a8b2-b639d84ad42f-credential-keys\") pod \"keystone-bootstrap-xq579\" (UID: \"66dc4df8-a7a7-452b-a8b2-b639d84ad42f\") " pod="openstack-kuttl-tests/keystone-bootstrap-xq579" Jan 20 17:02:28 crc kubenswrapper[4558]: I0120 17:02:28.243447 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/66dc4df8-a7a7-452b-a8b2-b639d84ad42f-combined-ca-bundle\") pod \"keystone-bootstrap-xq579\" (UID: \"66dc4df8-a7a7-452b-a8b2-b639d84ad42f\") " pod="openstack-kuttl-tests/keystone-bootstrap-xq579" Jan 20 17:02:28 crc kubenswrapper[4558]: I0120 17:02:28.244956 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/66dc4df8-a7a7-452b-a8b2-b639d84ad42f-fernet-keys\") pod \"keystone-bootstrap-xq579\" (UID: \"66dc4df8-a7a7-452b-a8b2-b639d84ad42f\") " pod="openstack-kuttl-tests/keystone-bootstrap-xq579" Jan 20 17:02:28 crc kubenswrapper[4558]: I0120 17:02:28.250415 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/66dc4df8-a7a7-452b-a8b2-b639d84ad42f-scripts\") pod \"keystone-bootstrap-xq579\" (UID: \"66dc4df8-a7a7-452b-a8b2-b639d84ad42f\") " pod="openstack-kuttl-tests/keystone-bootstrap-xq579" Jan 20 17:02:28 crc kubenswrapper[4558]: I0120 17:02:28.255452 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/66dc4df8-a7a7-452b-a8b2-b639d84ad42f-config-data\") pod \"keystone-bootstrap-xq579\" (UID: \"66dc4df8-a7a7-452b-a8b2-b639d84ad42f\") " pod="openstack-kuttl-tests/keystone-bootstrap-xq579" Jan 20 17:02:28 crc kubenswrapper[4558]: I0120 17:02:28.255687 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n7wt8\" (UniqueName: \"kubernetes.io/projected/66dc4df8-a7a7-452b-a8b2-b639d84ad42f-kube-api-access-n7wt8\") pod \"keystone-bootstrap-xq579\" (UID: \"66dc4df8-a7a7-452b-a8b2-b639d84ad42f\") " pod="openstack-kuttl-tests/keystone-bootstrap-xq579" Jan 20 17:02:28 crc kubenswrapper[4558]: I0120 17:02:28.341926 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a98c5227-c8a8-4cbc-8039-461dd14fbd5b-logs\") pod \"placement-fbd5dff98-mmhdt\" (UID: \"a98c5227-c8a8-4cbc-8039-461dd14fbd5b\") " pod="openstack-kuttl-tests/placement-fbd5dff98-mmhdt" Jan 20 17:02:28 crc kubenswrapper[4558]: I0120 17:02:28.342284 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dlqw5\" (UniqueName: \"kubernetes.io/projected/a98c5227-c8a8-4cbc-8039-461dd14fbd5b-kube-api-access-dlqw5\") pod \"placement-fbd5dff98-mmhdt\" (UID: \"a98c5227-c8a8-4cbc-8039-461dd14fbd5b\") " pod="openstack-kuttl-tests/placement-fbd5dff98-mmhdt" Jan 20 17:02:28 crc kubenswrapper[4558]: I0120 17:02:28.342449 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a98c5227-c8a8-4cbc-8039-461dd14fbd5b-internal-tls-certs\") pod \"placement-fbd5dff98-mmhdt\" (UID: \"a98c5227-c8a8-4cbc-8039-461dd14fbd5b\") " pod="openstack-kuttl-tests/placement-fbd5dff98-mmhdt" Jan 20 17:02:28 crc kubenswrapper[4558]: I0120 17:02:28.342520 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a98c5227-c8a8-4cbc-8039-461dd14fbd5b-combined-ca-bundle\") pod \"placement-fbd5dff98-mmhdt\" (UID: \"a98c5227-c8a8-4cbc-8039-461dd14fbd5b\") " pod="openstack-kuttl-tests/placement-fbd5dff98-mmhdt" Jan 20 17:02:28 crc kubenswrapper[4558]: I0120 17:02:28.342563 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a98c5227-c8a8-4cbc-8039-461dd14fbd5b-config-data\") pod \"placement-fbd5dff98-mmhdt\" (UID: \"a98c5227-c8a8-4cbc-8039-461dd14fbd5b\") " pod="openstack-kuttl-tests/placement-fbd5dff98-mmhdt" Jan 20 17:02:28 crc kubenswrapper[4558]: I0120 17:02:28.342620 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a98c5227-c8a8-4cbc-8039-461dd14fbd5b-public-tls-certs\") pod \"placement-fbd5dff98-mmhdt\" (UID: \"a98c5227-c8a8-4cbc-8039-461dd14fbd5b\") " pod="openstack-kuttl-tests/placement-fbd5dff98-mmhdt" Jan 20 17:02:28 crc kubenswrapper[4558]: I0120 17:02:28.342811 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a98c5227-c8a8-4cbc-8039-461dd14fbd5b-scripts\") pod \"placement-fbd5dff98-mmhdt\" (UID: \"a98c5227-c8a8-4cbc-8039-461dd14fbd5b\") " pod="openstack-kuttl-tests/placement-fbd5dff98-mmhdt" Jan 20 17:02:28 crc kubenswrapper[4558]: I0120 17:02:28.360258 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-bootstrap-xq579" Jan 20 17:02:28 crc kubenswrapper[4558]: I0120 17:02:28.443905 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a98c5227-c8a8-4cbc-8039-461dd14fbd5b-public-tls-certs\") pod \"placement-fbd5dff98-mmhdt\" (UID: \"a98c5227-c8a8-4cbc-8039-461dd14fbd5b\") " pod="openstack-kuttl-tests/placement-fbd5dff98-mmhdt" Jan 20 17:02:28 crc kubenswrapper[4558]: I0120 17:02:28.443994 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a98c5227-c8a8-4cbc-8039-461dd14fbd5b-scripts\") pod \"placement-fbd5dff98-mmhdt\" (UID: \"a98c5227-c8a8-4cbc-8039-461dd14fbd5b\") " pod="openstack-kuttl-tests/placement-fbd5dff98-mmhdt" Jan 20 17:02:28 crc kubenswrapper[4558]: I0120 17:02:28.444052 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a98c5227-c8a8-4cbc-8039-461dd14fbd5b-logs\") pod \"placement-fbd5dff98-mmhdt\" (UID: \"a98c5227-c8a8-4cbc-8039-461dd14fbd5b\") " pod="openstack-kuttl-tests/placement-fbd5dff98-mmhdt" Jan 20 17:02:28 crc kubenswrapper[4558]: I0120 17:02:28.444090 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dlqw5\" (UniqueName: \"kubernetes.io/projected/a98c5227-c8a8-4cbc-8039-461dd14fbd5b-kube-api-access-dlqw5\") pod \"placement-fbd5dff98-mmhdt\" (UID: \"a98c5227-c8a8-4cbc-8039-461dd14fbd5b\") " pod="openstack-kuttl-tests/placement-fbd5dff98-mmhdt" Jan 20 17:02:28 crc kubenswrapper[4558]: I0120 17:02:28.444136 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a98c5227-c8a8-4cbc-8039-461dd14fbd5b-internal-tls-certs\") pod \"placement-fbd5dff98-mmhdt\" (UID: \"a98c5227-c8a8-4cbc-8039-461dd14fbd5b\") " pod="openstack-kuttl-tests/placement-fbd5dff98-mmhdt" Jan 20 17:02:28 crc kubenswrapper[4558]: I0120 17:02:28.444180 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a98c5227-c8a8-4cbc-8039-461dd14fbd5b-combined-ca-bundle\") pod \"placement-fbd5dff98-mmhdt\" (UID: \"a98c5227-c8a8-4cbc-8039-461dd14fbd5b\") " pod="openstack-kuttl-tests/placement-fbd5dff98-mmhdt" Jan 20 17:02:28 crc kubenswrapper[4558]: I0120 17:02:28.444200 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a98c5227-c8a8-4cbc-8039-461dd14fbd5b-config-data\") pod \"placement-fbd5dff98-mmhdt\" (UID: \"a98c5227-c8a8-4cbc-8039-461dd14fbd5b\") " pod="openstack-kuttl-tests/placement-fbd5dff98-mmhdt" Jan 20 17:02:28 crc kubenswrapper[4558]: I0120 17:02:28.446966 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a98c5227-c8a8-4cbc-8039-461dd14fbd5b-logs\") pod \"placement-fbd5dff98-mmhdt\" (UID: \"a98c5227-c8a8-4cbc-8039-461dd14fbd5b\") " pod="openstack-kuttl-tests/placement-fbd5dff98-mmhdt" Jan 20 17:02:28 crc kubenswrapper[4558]: I0120 17:02:28.448069 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a98c5227-c8a8-4cbc-8039-461dd14fbd5b-scripts\") pod \"placement-fbd5dff98-mmhdt\" (UID: \"a98c5227-c8a8-4cbc-8039-461dd14fbd5b\") " pod="openstack-kuttl-tests/placement-fbd5dff98-mmhdt" Jan 20 17:02:28 crc kubenswrapper[4558]: I0120 17:02:28.448513 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a98c5227-c8a8-4cbc-8039-461dd14fbd5b-config-data\") pod \"placement-fbd5dff98-mmhdt\" (UID: \"a98c5227-c8a8-4cbc-8039-461dd14fbd5b\") " pod="openstack-kuttl-tests/placement-fbd5dff98-mmhdt" Jan 20 17:02:28 crc kubenswrapper[4558]: I0120 17:02:28.449268 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a98c5227-c8a8-4cbc-8039-461dd14fbd5b-public-tls-certs\") pod \"placement-fbd5dff98-mmhdt\" (UID: \"a98c5227-c8a8-4cbc-8039-461dd14fbd5b\") " pod="openstack-kuttl-tests/placement-fbd5dff98-mmhdt" Jan 20 17:02:28 crc kubenswrapper[4558]: I0120 17:02:28.469118 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a98c5227-c8a8-4cbc-8039-461dd14fbd5b-combined-ca-bundle\") pod \"placement-fbd5dff98-mmhdt\" (UID: \"a98c5227-c8a8-4cbc-8039-461dd14fbd5b\") " pod="openstack-kuttl-tests/placement-fbd5dff98-mmhdt" Jan 20 17:02:28 crc kubenswrapper[4558]: I0120 17:02:28.469719 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a98c5227-c8a8-4cbc-8039-461dd14fbd5b-internal-tls-certs\") pod \"placement-fbd5dff98-mmhdt\" (UID: \"a98c5227-c8a8-4cbc-8039-461dd14fbd5b\") " pod="openstack-kuttl-tests/placement-fbd5dff98-mmhdt" Jan 20 17:02:28 crc kubenswrapper[4558]: I0120 17:02:28.481770 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dlqw5\" (UniqueName: \"kubernetes.io/projected/a98c5227-c8a8-4cbc-8039-461dd14fbd5b-kube-api-access-dlqw5\") pod \"placement-fbd5dff98-mmhdt\" (UID: \"a98c5227-c8a8-4cbc-8039-461dd14fbd5b\") " pod="openstack-kuttl-tests/placement-fbd5dff98-mmhdt" Jan 20 17:02:28 crc kubenswrapper[4558]: I0120 17:02:28.486838 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-fbd5dff98-mmhdt" Jan 20 17:02:28 crc kubenswrapper[4558]: I0120 17:02:28.515674 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-db-sync-qchhz" Jan 20 17:02:28 crc kubenswrapper[4558]: I0120 17:02:28.523394 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-db-sync-x2xk2" Jan 20 17:02:28 crc kubenswrapper[4558]: I0120 17:02:28.534382 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-9b48ffd74-btjvf"] Jan 20 17:02:28 crc kubenswrapper[4558]: I0120 17:02:28.588009 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="97ced1d2-4a68-4832-b175-82a06d263ce0" path="/var/lib/kubelet/pods/97ced1d2-4a68-4832-b175-82a06d263ce0/volumes" Jan 20 17:02:28 crc kubenswrapper[4558]: I0120 17:02:28.648730 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e2597e53-47a7-4b6a-9fc1-06d6561cd69f-combined-ca-bundle\") pod \"e2597e53-47a7-4b6a-9fc1-06d6561cd69f\" (UID: \"e2597e53-47a7-4b6a-9fc1-06d6561cd69f\") " Jan 20 17:02:28 crc kubenswrapper[4558]: I0120 17:02:28.648766 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e2597e53-47a7-4b6a-9fc1-06d6561cd69f-scripts\") pod \"e2597e53-47a7-4b6a-9fc1-06d6561cd69f\" (UID: \"e2597e53-47a7-4b6a-9fc1-06d6561cd69f\") " Jan 20 17:02:28 crc kubenswrapper[4558]: I0120 17:02:28.648808 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/e2597e53-47a7-4b6a-9fc1-06d6561cd69f-db-sync-config-data\") pod \"e2597e53-47a7-4b6a-9fc1-06d6561cd69f\" (UID: \"e2597e53-47a7-4b6a-9fc1-06d6561cd69f\") " Jan 20 17:02:28 crc kubenswrapper[4558]: I0120 17:02:28.648881 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2p6xd\" (UniqueName: \"kubernetes.io/projected/e2597e53-47a7-4b6a-9fc1-06d6561cd69f-kube-api-access-2p6xd\") pod \"e2597e53-47a7-4b6a-9fc1-06d6561cd69f\" (UID: \"e2597e53-47a7-4b6a-9fc1-06d6561cd69f\") " Jan 20 17:02:28 crc kubenswrapper[4558]: I0120 17:02:28.648907 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1f9b2eba-d2f3-4391-9420-3d797982db8a-combined-ca-bundle\") pod \"1f9b2eba-d2f3-4391-9420-3d797982db8a\" (UID: \"1f9b2eba-d2f3-4391-9420-3d797982db8a\") " Jan 20 17:02:28 crc kubenswrapper[4558]: I0120 17:02:28.648949 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e2597e53-47a7-4b6a-9fc1-06d6561cd69f-config-data\") pod \"e2597e53-47a7-4b6a-9fc1-06d6561cd69f\" (UID: \"e2597e53-47a7-4b6a-9fc1-06d6561cd69f\") " Jan 20 17:02:28 crc kubenswrapper[4558]: I0120 17:02:28.648987 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/1f9b2eba-d2f3-4391-9420-3d797982db8a-config\") pod \"1f9b2eba-d2f3-4391-9420-3d797982db8a\" (UID: \"1f9b2eba-d2f3-4391-9420-3d797982db8a\") " Jan 20 17:02:28 crc kubenswrapper[4558]: I0120 17:02:28.649019 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n6tt5\" (UniqueName: \"kubernetes.io/projected/1f9b2eba-d2f3-4391-9420-3d797982db8a-kube-api-access-n6tt5\") pod \"1f9b2eba-d2f3-4391-9420-3d797982db8a\" (UID: \"1f9b2eba-d2f3-4391-9420-3d797982db8a\") " Jan 20 17:02:28 crc kubenswrapper[4558]: I0120 17:02:28.649049 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/e2597e53-47a7-4b6a-9fc1-06d6561cd69f-etc-machine-id\") pod \"e2597e53-47a7-4b6a-9fc1-06d6561cd69f\" (UID: \"e2597e53-47a7-4b6a-9fc1-06d6561cd69f\") " Jan 20 17:02:28 crc kubenswrapper[4558]: I0120 17:02:28.653246 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e2597e53-47a7-4b6a-9fc1-06d6561cd69f-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "e2597e53-47a7-4b6a-9fc1-06d6561cd69f" (UID: "e2597e53-47a7-4b6a-9fc1-06d6561cd69f"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 17:02:28 crc kubenswrapper[4558]: I0120 17:02:28.693327 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e2597e53-47a7-4b6a-9fc1-06d6561cd69f-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "e2597e53-47a7-4b6a-9fc1-06d6561cd69f" (UID: "e2597e53-47a7-4b6a-9fc1-06d6561cd69f"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:02:28 crc kubenswrapper[4558]: I0120 17:02:28.693343 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e2597e53-47a7-4b6a-9fc1-06d6561cd69f-scripts" (OuterVolumeSpecName: "scripts") pod "e2597e53-47a7-4b6a-9fc1-06d6561cd69f" (UID: "e2597e53-47a7-4b6a-9fc1-06d6561cd69f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:02:28 crc kubenswrapper[4558]: I0120 17:02:28.694726 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e2597e53-47a7-4b6a-9fc1-06d6561cd69f-kube-api-access-2p6xd" (OuterVolumeSpecName: "kube-api-access-2p6xd") pod "e2597e53-47a7-4b6a-9fc1-06d6561cd69f" (UID: "e2597e53-47a7-4b6a-9fc1-06d6561cd69f"). InnerVolumeSpecName "kube-api-access-2p6xd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:02:28 crc kubenswrapper[4558]: I0120 17:02:28.697192 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1f9b2eba-d2f3-4391-9420-3d797982db8a-kube-api-access-n6tt5" (OuterVolumeSpecName: "kube-api-access-n6tt5") pod "1f9b2eba-d2f3-4391-9420-3d797982db8a" (UID: "1f9b2eba-d2f3-4391-9420-3d797982db8a"). InnerVolumeSpecName "kube-api-access-n6tt5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:02:28 crc kubenswrapper[4558]: I0120 17:02:28.751750 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2p6xd\" (UniqueName: \"kubernetes.io/projected/e2597e53-47a7-4b6a-9fc1-06d6561cd69f-kube-api-access-2p6xd\") on node \"crc\" DevicePath \"\"" Jan 20 17:02:28 crc kubenswrapper[4558]: I0120 17:02:28.751778 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n6tt5\" (UniqueName: \"kubernetes.io/projected/1f9b2eba-d2f3-4391-9420-3d797982db8a-kube-api-access-n6tt5\") on node \"crc\" DevicePath \"\"" Jan 20 17:02:28 crc kubenswrapper[4558]: I0120 17:02:28.751788 4558 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/e2597e53-47a7-4b6a-9fc1-06d6561cd69f-etc-machine-id\") on node \"crc\" DevicePath \"\"" Jan 20 17:02:28 crc kubenswrapper[4558]: I0120 17:02:28.751797 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e2597e53-47a7-4b6a-9fc1-06d6561cd69f-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:02:28 crc kubenswrapper[4558]: I0120 17:02:28.751806 4558 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/e2597e53-47a7-4b6a-9fc1-06d6561cd69f-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:02:28 crc kubenswrapper[4558]: I0120 17:02:28.818501 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:02:28 crc kubenswrapper[4558]: I0120 17:02:28.941426 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-9b48ffd74-btjvf" event={"ID":"6dfbc47c-776a-4037-b444-06871e236fa8","Type":"ContainerStarted","Data":"fcbfc00db1b39d53e5293b14c4e92c6c6a34cea796566e8674dd88daaa1134de"} Jan 20 17:02:28 crc kubenswrapper[4558]: I0120 17:02:28.989093 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3153c81a-39a0-4250-b53b-154a6a08ccbf-scripts\") pod \"3153c81a-39a0-4250-b53b-154a6a08ccbf\" (UID: \"3153c81a-39a0-4250-b53b-154a6a08ccbf\") " Jan 20 17:02:28 crc kubenswrapper[4558]: I0120 17:02:28.989244 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3153c81a-39a0-4250-b53b-154a6a08ccbf-combined-ca-bundle\") pod \"3153c81a-39a0-4250-b53b-154a6a08ccbf\" (UID: \"3153c81a-39a0-4250-b53b-154a6a08ccbf\") " Jan 20 17:02:28 crc kubenswrapper[4558]: I0120 17:02:28.989372 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7r6st\" (UniqueName: \"kubernetes.io/projected/3153c81a-39a0-4250-b53b-154a6a08ccbf-kube-api-access-7r6st\") pod \"3153c81a-39a0-4250-b53b-154a6a08ccbf\" (UID: \"3153c81a-39a0-4250-b53b-154a6a08ccbf\") " Jan 20 17:02:28 crc kubenswrapper[4558]: I0120 17:02:28.989456 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3153c81a-39a0-4250-b53b-154a6a08ccbf-sg-core-conf-yaml\") pod \"3153c81a-39a0-4250-b53b-154a6a08ccbf\" (UID: \"3153c81a-39a0-4250-b53b-154a6a08ccbf\") " Jan 20 17:02:28 crc kubenswrapper[4558]: I0120 17:02:28.989528 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3153c81a-39a0-4250-b53b-154a6a08ccbf-log-httpd\") pod \"3153c81a-39a0-4250-b53b-154a6a08ccbf\" (UID: \"3153c81a-39a0-4250-b53b-154a6a08ccbf\") " Jan 20 17:02:28 crc kubenswrapper[4558]: I0120 17:02:28.989592 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3153c81a-39a0-4250-b53b-154a6a08ccbf-config-data\") pod \"3153c81a-39a0-4250-b53b-154a6a08ccbf\" (UID: \"3153c81a-39a0-4250-b53b-154a6a08ccbf\") " Jan 20 17:02:28 crc kubenswrapper[4558]: I0120 17:02:28.989856 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3153c81a-39a0-4250-b53b-154a6a08ccbf-run-httpd\") pod \"3153c81a-39a0-4250-b53b-154a6a08ccbf\" (UID: \"3153c81a-39a0-4250-b53b-154a6a08ccbf\") " Jan 20 17:02:28 crc kubenswrapper[4558]: I0120 17:02:28.991593 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3153c81a-39a0-4250-b53b-154a6a08ccbf-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "3153c81a-39a0-4250-b53b-154a6a08ccbf" (UID: "3153c81a-39a0-4250-b53b-154a6a08ccbf"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:02:28 crc kubenswrapper[4558]: I0120 17:02:28.994293 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3153c81a-39a0-4250-b53b-154a6a08ccbf-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "3153c81a-39a0-4250-b53b-154a6a08ccbf" (UID: "3153c81a-39a0-4250-b53b-154a6a08ccbf"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.008277 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:02:29 crc kubenswrapper[4558]: E0120 17:02:29.008761 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3153c81a-39a0-4250-b53b-154a6a08ccbf" containerName="sg-core" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.008772 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="3153c81a-39a0-4250-b53b-154a6a08ccbf" containerName="sg-core" Jan 20 17:02:29 crc kubenswrapper[4558]: E0120 17:02:29.008792 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3153c81a-39a0-4250-b53b-154a6a08ccbf" containerName="ceilometer-notification-agent" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.008797 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="3153c81a-39a0-4250-b53b-154a6a08ccbf" containerName="ceilometer-notification-agent" Jan 20 17:02:29 crc kubenswrapper[4558]: E0120 17:02:29.008812 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3153c81a-39a0-4250-b53b-154a6a08ccbf" containerName="ceilometer-central-agent" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.008817 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="3153c81a-39a0-4250-b53b-154a6a08ccbf" containerName="ceilometer-central-agent" Jan 20 17:02:29 crc kubenswrapper[4558]: E0120 17:02:29.008831 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e2597e53-47a7-4b6a-9fc1-06d6561cd69f" containerName="cinder-db-sync" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.008837 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="e2597e53-47a7-4b6a-9fc1-06d6561cd69f" containerName="cinder-db-sync" Jan 20 17:02:29 crc kubenswrapper[4558]: E0120 17:02:29.008845 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1f9b2eba-d2f3-4391-9420-3d797982db8a" containerName="neutron-db-sync" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.008851 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="1f9b2eba-d2f3-4391-9420-3d797982db8a" containerName="neutron-db-sync" Jan 20 17:02:29 crc kubenswrapper[4558]: E0120 17:02:29.008859 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3153c81a-39a0-4250-b53b-154a6a08ccbf" containerName="proxy-httpd" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.008864 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="3153c81a-39a0-4250-b53b-154a6a08ccbf" containerName="proxy-httpd" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.008998 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="e2597e53-47a7-4b6a-9fc1-06d6561cd69f" containerName="cinder-db-sync" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.009008 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="1f9b2eba-d2f3-4391-9420-3d797982db8a" containerName="neutron-db-sync" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.009018 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="3153c81a-39a0-4250-b53b-154a6a08ccbf" containerName="ceilometer-central-agent" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.009026 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="3153c81a-39a0-4250-b53b-154a6a08ccbf" containerName="sg-core" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.009035 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="3153c81a-39a0-4250-b53b-154a6a08ccbf" containerName="proxy-httpd" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.009049 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="3153c81a-39a0-4250-b53b-154a6a08ccbf" containerName="ceilometer-notification-agent" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.009776 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.009934 4558 generic.go:334] "Generic (PLEG): container finished" podID="3153c81a-39a0-4250-b53b-154a6a08ccbf" containerID="06a748aa8f05f8ede56d2f5afbb1a1b0a772310929fdd0ba5437983cf338895d" exitCode=0 Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.009969 4558 generic.go:334] "Generic (PLEG): container finished" podID="3153c81a-39a0-4250-b53b-154a6a08ccbf" containerID="e953f183e89c1ebf59cf5382c9b6fb4c44d30f7154ff1ec6ec58493ff83c1178" exitCode=2 Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.009977 4558 generic.go:334] "Generic (PLEG): container finished" podID="3153c81a-39a0-4250-b53b-154a6a08ccbf" containerID="05509ededdd56f8c84dfeff661cfc5d2c11b221207e6cd3b144d11afd0842490" exitCode=0 Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.009987 4558 generic.go:334] "Generic (PLEG): container finished" podID="3153c81a-39a0-4250-b53b-154a6a08ccbf" containerID="7de085a2635c97438a08689bb27694547ed0ecc2e18c9400eb1019ac044e74af" exitCode=0 Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.010076 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"3153c81a-39a0-4250-b53b-154a6a08ccbf","Type":"ContainerDied","Data":"06a748aa8f05f8ede56d2f5afbb1a1b0a772310929fdd0ba5437983cf338895d"} Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.010112 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"3153c81a-39a0-4250-b53b-154a6a08ccbf","Type":"ContainerDied","Data":"e953f183e89c1ebf59cf5382c9b6fb4c44d30f7154ff1ec6ec58493ff83c1178"} Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.010123 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"3153c81a-39a0-4250-b53b-154a6a08ccbf","Type":"ContainerDied","Data":"05509ededdd56f8c84dfeff661cfc5d2c11b221207e6cd3b144d11afd0842490"} Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.010132 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"3153c81a-39a0-4250-b53b-154a6a08ccbf","Type":"ContainerDied","Data":"7de085a2635c97438a08689bb27694547ed0ecc2e18c9400eb1019ac044e74af"} Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.010141 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"3153c81a-39a0-4250-b53b-154a6a08ccbf","Type":"ContainerDied","Data":"d26af524473190ce9b550786eb42b07afbf9d6fc4768c9ef25b2e971bcaa3e9b"} Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.010158 4558 scope.go:117] "RemoveContainer" containerID="06a748aa8f05f8ede56d2f5afbb1a1b0a772310929fdd0ba5437983cf338895d" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.010421 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.025467 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cinder-scheduler-config-data" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.027692 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3153c81a-39a0-4250-b53b-154a6a08ccbf-kube-api-access-7r6st" (OuterVolumeSpecName: "kube-api-access-7r6st") pod "3153c81a-39a0-4250-b53b-154a6a08ccbf" (UID: "3153c81a-39a0-4250-b53b-154a6a08ccbf"). InnerVolumeSpecName "kube-api-access-7r6st". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.030974 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e2597e53-47a7-4b6a-9fc1-06d6561cd69f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e2597e53-47a7-4b6a-9fc1-06d6561cd69f" (UID: "e2597e53-47a7-4b6a-9fc1-06d6561cd69f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.031032 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/neutron-5f668b67df-bhm4t"] Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.032319 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-5f668b67df-bhm4t" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.033409 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"25076c04-6d0d-4c02-a58f-de14094b79b5","Type":"ContainerStarted","Data":"ad62e8788a08fae6beba57e28c588e43f4c96d6537c969b42d6e6044e423af6f"} Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.035217 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-5f668b67df-bhm4t"] Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.035812 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-neutron-ovndbs" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.042412 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.046984 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-bootstrap-xq579"] Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.050392 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3153c81a-39a0-4250-b53b-154a6a08ccbf-scripts" (OuterVolumeSpecName: "scripts") pod "3153c81a-39a0-4250-b53b-154a6a08ccbf" (UID: "3153c81a-39a0-4250-b53b-154a6a08ccbf"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.063391 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-keystone-listener-65f6b4bfbb-llhdw" event={"ID":"bc2c7d29-d967-414d-8bb9-1bf70bcacdcd","Type":"ContainerStarted","Data":"ca91ff4227eff077e8448a3f7e4d144f18ec13ff9ee590460efb5d52dcae5abe"} Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.077731 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/glance-default-internal-api-0" podStartSLOduration=4.077711492 podStartE2EDuration="4.077711492s" podCreationTimestamp="2026-01-20 17:02:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:02:29.06361968 +0000 UTC m=+1242.823957647" watchObservedRunningTime="2026-01-20 17:02:29.077711492 +0000 UTC m=+1242.838049459" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.088986 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-85c96db9fd-dcjxv" event={"ID":"5ae48763-b66b-4038-855d-f700a2dbe040","Type":"ContainerStarted","Data":"f5cc513f503187d3adffbb539aaa9f0f0f46917cc694f0a826736be7e143eb27"} Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.089040 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-85c96db9fd-dcjxv" event={"ID":"5ae48763-b66b-4038-855d-f700a2dbe040","Type":"ContainerStarted","Data":"d4918c414ed902dd782099323ffd6f3e98df9568cff4ead858e221b4d080bfe1"} Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.092377 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/694530bc-dda1-4895-be8d-90c2f02af6cb-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"694530bc-dda1-4895-be8d-90c2f02af6cb\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.092510 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/561ded1f-50d2-4eb9-8ceb-c6f587ee80d6-ovndb-tls-certs\") pod \"neutron-5f668b67df-bhm4t\" (UID: \"561ded1f-50d2-4eb9-8ceb-c6f587ee80d6\") " pod="openstack-kuttl-tests/neutron-5f668b67df-bhm4t" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.092600 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/694530bc-dda1-4895-be8d-90c2f02af6cb-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"694530bc-dda1-4895-be8d-90c2f02af6cb\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.092665 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/694530bc-dda1-4895-be8d-90c2f02af6cb-scripts\") pod \"cinder-scheduler-0\" (UID: \"694530bc-dda1-4895-be8d-90c2f02af6cb\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.092734 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/561ded1f-50d2-4eb9-8ceb-c6f587ee80d6-config\") pod \"neutron-5f668b67df-bhm4t\" (UID: \"561ded1f-50d2-4eb9-8ceb-c6f587ee80d6\") " pod="openstack-kuttl-tests/neutron-5f668b67df-bhm4t" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.092797 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/561ded1f-50d2-4eb9-8ceb-c6f587ee80d6-combined-ca-bundle\") pod \"neutron-5f668b67df-bhm4t\" (UID: \"561ded1f-50d2-4eb9-8ceb-c6f587ee80d6\") " pod="openstack-kuttl-tests/neutron-5f668b67df-bhm4t" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.092854 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/694530bc-dda1-4895-be8d-90c2f02af6cb-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"694530bc-dda1-4895-be8d-90c2f02af6cb\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.092917 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gsqqb\" (UniqueName: \"kubernetes.io/projected/694530bc-dda1-4895-be8d-90c2f02af6cb-kube-api-access-gsqqb\") pod \"cinder-scheduler-0\" (UID: \"694530bc-dda1-4895-be8d-90c2f02af6cb\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.093031 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/694530bc-dda1-4895-be8d-90c2f02af6cb-config-data\") pod \"cinder-scheduler-0\" (UID: \"694530bc-dda1-4895-be8d-90c2f02af6cb\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.093099 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gd92p\" (UniqueName: \"kubernetes.io/projected/561ded1f-50d2-4eb9-8ceb-c6f587ee80d6-kube-api-access-gd92p\") pod \"neutron-5f668b67df-bhm4t\" (UID: \"561ded1f-50d2-4eb9-8ceb-c6f587ee80d6\") " pod="openstack-kuttl-tests/neutron-5f668b67df-bhm4t" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.093186 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/561ded1f-50d2-4eb9-8ceb-c6f587ee80d6-httpd-config\") pod \"neutron-5f668b67df-bhm4t\" (UID: \"561ded1f-50d2-4eb9-8ceb-c6f587ee80d6\") " pod="openstack-kuttl-tests/neutron-5f668b67df-bhm4t" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.093305 4558 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3153c81a-39a0-4250-b53b-154a6a08ccbf-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.093376 4558 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/3153c81a-39a0-4250-b53b-154a6a08ccbf-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.093427 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e2597e53-47a7-4b6a-9fc1-06d6561cd69f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.093476 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3153c81a-39a0-4250-b53b-154a6a08ccbf-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.093522 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7r6st\" (UniqueName: \"kubernetes.io/projected/3153c81a-39a0-4250-b53b-154a6a08ccbf-kube-api-access-7r6st\") on node \"crc\" DevicePath \"\"" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.096365 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-db-sync-qchhz" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.096841 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-db-sync-qchhz" event={"ID":"e2597e53-47a7-4b6a-9fc1-06d6561cd69f","Type":"ContainerDied","Data":"1139fc00b65572f8f043c75b41c11e21c6b6e82e42fe077bbe7be5802149714a"} Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.096895 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1139fc00b65572f8f043c75b41c11e21c6b6e82e42fe077bbe7be5802149714a" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.118641 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-worker-599d6c8df7-lcnrd" event={"ID":"bfbf270c-1853-4113-b0e4-6d192abb5c5d","Type":"ContainerStarted","Data":"1656c9f8a5d976f0a32142d5398076a41236be2ec81b6e074cfc899864057916"} Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.118686 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-worker-599d6c8df7-lcnrd" event={"ID":"bfbf270c-1853-4113-b0e4-6d192abb5c5d","Type":"ContainerStarted","Data":"880f39ca241fd9f400e76ce2cfd51b2dad6e3d3b73759064be5dea602507b366"} Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.123828 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.125313 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.130445 4558 scope.go:117] "RemoveContainer" containerID="e953f183e89c1ebf59cf5382c9b6fb4c44d30f7154ff1ec6ec58493ff83c1178" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.131080 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cinder-api-config-data" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.131678 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-db-sync-x2xk2" event={"ID":"1f9b2eba-d2f3-4391-9420-3d797982db8a","Type":"ContainerDied","Data":"7887cc040f82b4f6d4ca9afe68e21d92b0876db5d05a26ca73a0e603982e939b"} Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.131708 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7887cc040f82b4f6d4ca9afe68e21d92b0876db5d05a26ca73a0e603982e939b" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.131771 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-db-sync-x2xk2" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.141070 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.149618 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/barbican-worker-599d6c8df7-lcnrd" podStartSLOduration=3.149599256 podStartE2EDuration="3.149599256s" podCreationTimestamp="2026-01-20 17:02:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:02:29.131451687 +0000 UTC m=+1242.891789655" watchObservedRunningTime="2026-01-20 17:02:29.149599256 +0000 UTC m=+1242.909937223" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.184555 4558 scope.go:117] "RemoveContainer" containerID="05509ededdd56f8c84dfeff661cfc5d2c11b221207e6cd3b144d11afd0842490" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.194846 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/694530bc-dda1-4895-be8d-90c2f02af6cb-scripts\") pod \"cinder-scheduler-0\" (UID: \"694530bc-dda1-4895-be8d-90c2f02af6cb\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.194882 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/561ded1f-50d2-4eb9-8ceb-c6f587ee80d6-config\") pod \"neutron-5f668b67df-bhm4t\" (UID: \"561ded1f-50d2-4eb9-8ceb-c6f587ee80d6\") " pod="openstack-kuttl-tests/neutron-5f668b67df-bhm4t" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.194904 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/561ded1f-50d2-4eb9-8ceb-c6f587ee80d6-combined-ca-bundle\") pod \"neutron-5f668b67df-bhm4t\" (UID: \"561ded1f-50d2-4eb9-8ceb-c6f587ee80d6\") " pod="openstack-kuttl-tests/neutron-5f668b67df-bhm4t" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.194920 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/694530bc-dda1-4895-be8d-90c2f02af6cb-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"694530bc-dda1-4895-be8d-90c2f02af6cb\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.194935 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gsqqb\" (UniqueName: \"kubernetes.io/projected/694530bc-dda1-4895-be8d-90c2f02af6cb-kube-api-access-gsqqb\") pod \"cinder-scheduler-0\" (UID: \"694530bc-dda1-4895-be8d-90c2f02af6cb\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.194964 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nh9gt\" (UniqueName: \"kubernetes.io/projected/6ab1bdd8-60bb-49a1-b1c8-e42f634e79e0-kube-api-access-nh9gt\") pod \"cinder-api-0\" (UID: \"6ab1bdd8-60bb-49a1-b1c8-e42f634e79e0\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.195008 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6ab1bdd8-60bb-49a1-b1c8-e42f634e79e0-config-data\") pod \"cinder-api-0\" (UID: \"6ab1bdd8-60bb-49a1-b1c8-e42f634e79e0\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.195024 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ab1bdd8-60bb-49a1-b1c8-e42f634e79e0-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"6ab1bdd8-60bb-49a1-b1c8-e42f634e79e0\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.195055 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/694530bc-dda1-4895-be8d-90c2f02af6cb-config-data\") pod \"cinder-scheduler-0\" (UID: \"694530bc-dda1-4895-be8d-90c2f02af6cb\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.195076 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/6ab1bdd8-60bb-49a1-b1c8-e42f634e79e0-etc-machine-id\") pod \"cinder-api-0\" (UID: \"6ab1bdd8-60bb-49a1-b1c8-e42f634e79e0\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.195104 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gd92p\" (UniqueName: \"kubernetes.io/projected/561ded1f-50d2-4eb9-8ceb-c6f587ee80d6-kube-api-access-gd92p\") pod \"neutron-5f668b67df-bhm4t\" (UID: \"561ded1f-50d2-4eb9-8ceb-c6f587ee80d6\") " pod="openstack-kuttl-tests/neutron-5f668b67df-bhm4t" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.195120 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/561ded1f-50d2-4eb9-8ceb-c6f587ee80d6-httpd-config\") pod \"neutron-5f668b67df-bhm4t\" (UID: \"561ded1f-50d2-4eb9-8ceb-c6f587ee80d6\") " pod="openstack-kuttl-tests/neutron-5f668b67df-bhm4t" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.195136 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6ab1bdd8-60bb-49a1-b1c8-e42f634e79e0-config-data-custom\") pod \"cinder-api-0\" (UID: \"6ab1bdd8-60bb-49a1-b1c8-e42f634e79e0\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.195157 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6ab1bdd8-60bb-49a1-b1c8-e42f634e79e0-logs\") pod \"cinder-api-0\" (UID: \"6ab1bdd8-60bb-49a1-b1c8-e42f634e79e0\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.195187 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/694530bc-dda1-4895-be8d-90c2f02af6cb-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"694530bc-dda1-4895-be8d-90c2f02af6cb\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.195212 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6ab1bdd8-60bb-49a1-b1c8-e42f634e79e0-scripts\") pod \"cinder-api-0\" (UID: \"6ab1bdd8-60bb-49a1-b1c8-e42f634e79e0\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.195235 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/561ded1f-50d2-4eb9-8ceb-c6f587ee80d6-ovndb-tls-certs\") pod \"neutron-5f668b67df-bhm4t\" (UID: \"561ded1f-50d2-4eb9-8ceb-c6f587ee80d6\") " pod="openstack-kuttl-tests/neutron-5f668b67df-bhm4t" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.195269 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/694530bc-dda1-4895-be8d-90c2f02af6cb-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"694530bc-dda1-4895-be8d-90c2f02af6cb\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.199035 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/694530bc-dda1-4895-be8d-90c2f02af6cb-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"694530bc-dda1-4895-be8d-90c2f02af6cb\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.227132 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/694530bc-dda1-4895-be8d-90c2f02af6cb-config-data\") pod \"cinder-scheduler-0\" (UID: \"694530bc-dda1-4895-be8d-90c2f02af6cb\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.227361 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/561ded1f-50d2-4eb9-8ceb-c6f587ee80d6-config\") pod \"neutron-5f668b67df-bhm4t\" (UID: \"561ded1f-50d2-4eb9-8ceb-c6f587ee80d6\") " pod="openstack-kuttl-tests/neutron-5f668b67df-bhm4t" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.227844 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/561ded1f-50d2-4eb9-8ceb-c6f587ee80d6-httpd-config\") pod \"neutron-5f668b67df-bhm4t\" (UID: \"561ded1f-50d2-4eb9-8ceb-c6f587ee80d6\") " pod="openstack-kuttl-tests/neutron-5f668b67df-bhm4t" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.228432 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/561ded1f-50d2-4eb9-8ceb-c6f587ee80d6-combined-ca-bundle\") pod \"neutron-5f668b67df-bhm4t\" (UID: \"561ded1f-50d2-4eb9-8ceb-c6f587ee80d6\") " pod="openstack-kuttl-tests/neutron-5f668b67df-bhm4t" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.228713 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/694530bc-dda1-4895-be8d-90c2f02af6cb-scripts\") pod \"cinder-scheduler-0\" (UID: \"694530bc-dda1-4895-be8d-90c2f02af6cb\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.231242 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/694530bc-dda1-4895-be8d-90c2f02af6cb-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"694530bc-dda1-4895-be8d-90c2f02af6cb\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.231871 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/694530bc-dda1-4895-be8d-90c2f02af6cb-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"694530bc-dda1-4895-be8d-90c2f02af6cb\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.232849 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gsqqb\" (UniqueName: \"kubernetes.io/projected/694530bc-dda1-4895-be8d-90c2f02af6cb-kube-api-access-gsqqb\") pod \"cinder-scheduler-0\" (UID: \"694530bc-dda1-4895-be8d-90c2f02af6cb\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.234352 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gd92p\" (UniqueName: \"kubernetes.io/projected/561ded1f-50d2-4eb9-8ceb-c6f587ee80d6-kube-api-access-gd92p\") pod \"neutron-5f668b67df-bhm4t\" (UID: \"561ded1f-50d2-4eb9-8ceb-c6f587ee80d6\") " pod="openstack-kuttl-tests/neutron-5f668b67df-bhm4t" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.237743 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/561ded1f-50d2-4eb9-8ceb-c6f587ee80d6-ovndb-tls-certs\") pod \"neutron-5f668b67df-bhm4t\" (UID: \"561ded1f-50d2-4eb9-8ceb-c6f587ee80d6\") " pod="openstack-kuttl-tests/neutron-5f668b67df-bhm4t" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.246430 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3153c81a-39a0-4250-b53b-154a6a08ccbf-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "3153c81a-39a0-4250-b53b-154a6a08ccbf" (UID: "3153c81a-39a0-4250-b53b-154a6a08ccbf"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.266689 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1f9b2eba-d2f3-4391-9420-3d797982db8a-config" (OuterVolumeSpecName: "config") pod "1f9b2eba-d2f3-4391-9420-3d797982db8a" (UID: "1f9b2eba-d2f3-4391-9420-3d797982db8a"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.291438 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1f9b2eba-d2f3-4391-9420-3d797982db8a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1f9b2eba-d2f3-4391-9420-3d797982db8a" (UID: "1f9b2eba-d2f3-4391-9420-3d797982db8a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.297010 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nh9gt\" (UniqueName: \"kubernetes.io/projected/6ab1bdd8-60bb-49a1-b1c8-e42f634e79e0-kube-api-access-nh9gt\") pod \"cinder-api-0\" (UID: \"6ab1bdd8-60bb-49a1-b1c8-e42f634e79e0\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.297082 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6ab1bdd8-60bb-49a1-b1c8-e42f634e79e0-config-data\") pod \"cinder-api-0\" (UID: \"6ab1bdd8-60bb-49a1-b1c8-e42f634e79e0\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.297108 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ab1bdd8-60bb-49a1-b1c8-e42f634e79e0-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"6ab1bdd8-60bb-49a1-b1c8-e42f634e79e0\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.297155 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/6ab1bdd8-60bb-49a1-b1c8-e42f634e79e0-etc-machine-id\") pod \"cinder-api-0\" (UID: \"6ab1bdd8-60bb-49a1-b1c8-e42f634e79e0\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.297278 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6ab1bdd8-60bb-49a1-b1c8-e42f634e79e0-config-data-custom\") pod \"cinder-api-0\" (UID: \"6ab1bdd8-60bb-49a1-b1c8-e42f634e79e0\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.297314 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6ab1bdd8-60bb-49a1-b1c8-e42f634e79e0-logs\") pod \"cinder-api-0\" (UID: \"6ab1bdd8-60bb-49a1-b1c8-e42f634e79e0\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.297351 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6ab1bdd8-60bb-49a1-b1c8-e42f634e79e0-scripts\") pod \"cinder-api-0\" (UID: \"6ab1bdd8-60bb-49a1-b1c8-e42f634e79e0\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.297468 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/1f9b2eba-d2f3-4391-9420-3d797982db8a-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.297483 4558 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/3153c81a-39a0-4250-b53b-154a6a08ccbf-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.297494 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1f9b2eba-d2f3-4391-9420-3d797982db8a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.297687 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/6ab1bdd8-60bb-49a1-b1c8-e42f634e79e0-etc-machine-id\") pod \"cinder-api-0\" (UID: \"6ab1bdd8-60bb-49a1-b1c8-e42f634e79e0\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.298107 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6ab1bdd8-60bb-49a1-b1c8-e42f634e79e0-logs\") pod \"cinder-api-0\" (UID: \"6ab1bdd8-60bb-49a1-b1c8-e42f634e79e0\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.301538 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6ab1bdd8-60bb-49a1-b1c8-e42f634e79e0-scripts\") pod \"cinder-api-0\" (UID: \"6ab1bdd8-60bb-49a1-b1c8-e42f634e79e0\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.306507 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6ab1bdd8-60bb-49a1-b1c8-e42f634e79e0-config-data-custom\") pod \"cinder-api-0\" (UID: \"6ab1bdd8-60bb-49a1-b1c8-e42f634e79e0\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.306663 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ab1bdd8-60bb-49a1-b1c8-e42f634e79e0-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"6ab1bdd8-60bb-49a1-b1c8-e42f634e79e0\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.309180 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6ab1bdd8-60bb-49a1-b1c8-e42f634e79e0-config-data\") pod \"cinder-api-0\" (UID: \"6ab1bdd8-60bb-49a1-b1c8-e42f634e79e0\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.325450 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nh9gt\" (UniqueName: \"kubernetes.io/projected/6ab1bdd8-60bb-49a1-b1c8-e42f634e79e0-kube-api-access-nh9gt\") pod \"cinder-api-0\" (UID: \"6ab1bdd8-60bb-49a1-b1c8-e42f634e79e0\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.348664 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e2597e53-47a7-4b6a-9fc1-06d6561cd69f-config-data" (OuterVolumeSpecName: "config-data") pod "e2597e53-47a7-4b6a-9fc1-06d6561cd69f" (UID: "e2597e53-47a7-4b6a-9fc1-06d6561cd69f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.367141 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.371121 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-fbd5dff98-mmhdt"] Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.408494 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e2597e53-47a7-4b6a-9fc1-06d6561cd69f-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.409233 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-5f668b67df-bhm4t" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.420364 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3153c81a-39a0-4250-b53b-154a6a08ccbf-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3153c81a-39a0-4250-b53b-154a6a08ccbf" (UID: "3153c81a-39a0-4250-b53b-154a6a08ccbf"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.448303 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.511449 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3153c81a-39a0-4250-b53b-154a6a08ccbf-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.536126 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3153c81a-39a0-4250-b53b-154a6a08ccbf-config-data" (OuterVolumeSpecName: "config-data") pod "3153c81a-39a0-4250-b53b-154a6a08ccbf" (UID: "3153c81a-39a0-4250-b53b-154a6a08ccbf"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.576436 4558 scope.go:117] "RemoveContainer" containerID="7de085a2635c97438a08689bb27694547ed0ecc2e18c9400eb1019ac044e74af" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.612911 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3153c81a-39a0-4250-b53b-154a6a08ccbf-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.664471 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.687528 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.691097 4558 scope.go:117] "RemoveContainer" containerID="06a748aa8f05f8ede56d2f5afbb1a1b0a772310929fdd0ba5437983cf338895d" Jan 20 17:02:29 crc kubenswrapper[4558]: E0120 17:02:29.692341 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"06a748aa8f05f8ede56d2f5afbb1a1b0a772310929fdd0ba5437983cf338895d\": container with ID starting with 06a748aa8f05f8ede56d2f5afbb1a1b0a772310929fdd0ba5437983cf338895d not found: ID does not exist" containerID="06a748aa8f05f8ede56d2f5afbb1a1b0a772310929fdd0ba5437983cf338895d" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.692390 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"06a748aa8f05f8ede56d2f5afbb1a1b0a772310929fdd0ba5437983cf338895d"} err="failed to get container status \"06a748aa8f05f8ede56d2f5afbb1a1b0a772310929fdd0ba5437983cf338895d\": rpc error: code = NotFound desc = could not find container \"06a748aa8f05f8ede56d2f5afbb1a1b0a772310929fdd0ba5437983cf338895d\": container with ID starting with 06a748aa8f05f8ede56d2f5afbb1a1b0a772310929fdd0ba5437983cf338895d not found: ID does not exist" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.692420 4558 scope.go:117] "RemoveContainer" containerID="e953f183e89c1ebf59cf5382c9b6fb4c44d30f7154ff1ec6ec58493ff83c1178" Jan 20 17:02:29 crc kubenswrapper[4558]: E0120 17:02:29.693593 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e953f183e89c1ebf59cf5382c9b6fb4c44d30f7154ff1ec6ec58493ff83c1178\": container with ID starting with e953f183e89c1ebf59cf5382c9b6fb4c44d30f7154ff1ec6ec58493ff83c1178 not found: ID does not exist" containerID="e953f183e89c1ebf59cf5382c9b6fb4c44d30f7154ff1ec6ec58493ff83c1178" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.693620 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e953f183e89c1ebf59cf5382c9b6fb4c44d30f7154ff1ec6ec58493ff83c1178"} err="failed to get container status \"e953f183e89c1ebf59cf5382c9b6fb4c44d30f7154ff1ec6ec58493ff83c1178\": rpc error: code = NotFound desc = could not find container \"e953f183e89c1ebf59cf5382c9b6fb4c44d30f7154ff1ec6ec58493ff83c1178\": container with ID starting with e953f183e89c1ebf59cf5382c9b6fb4c44d30f7154ff1ec6ec58493ff83c1178 not found: ID does not exist" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.693632 4558 scope.go:117] "RemoveContainer" containerID="05509ededdd56f8c84dfeff661cfc5d2c11b221207e6cd3b144d11afd0842490" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.698649 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:02:29 crc kubenswrapper[4558]: E0120 17:02:29.700102 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"05509ededdd56f8c84dfeff661cfc5d2c11b221207e6cd3b144d11afd0842490\": container with ID starting with 05509ededdd56f8c84dfeff661cfc5d2c11b221207e6cd3b144d11afd0842490 not found: ID does not exist" containerID="05509ededdd56f8c84dfeff661cfc5d2c11b221207e6cd3b144d11afd0842490" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.705859 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"05509ededdd56f8c84dfeff661cfc5d2c11b221207e6cd3b144d11afd0842490"} err="failed to get container status \"05509ededdd56f8c84dfeff661cfc5d2c11b221207e6cd3b144d11afd0842490\": rpc error: code = NotFound desc = could not find container \"05509ededdd56f8c84dfeff661cfc5d2c11b221207e6cd3b144d11afd0842490\": container with ID starting with 05509ededdd56f8c84dfeff661cfc5d2c11b221207e6cd3b144d11afd0842490 not found: ID does not exist" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.705885 4558 scope.go:117] "RemoveContainer" containerID="7de085a2635c97438a08689bb27694547ed0ecc2e18c9400eb1019ac044e74af" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.707924 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:02:29 crc kubenswrapper[4558]: E0120 17:02:29.708281 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7de085a2635c97438a08689bb27694547ed0ecc2e18c9400eb1019ac044e74af\": container with ID starting with 7de085a2635c97438a08689bb27694547ed0ecc2e18c9400eb1019ac044e74af not found: ID does not exist" containerID="7de085a2635c97438a08689bb27694547ed0ecc2e18c9400eb1019ac044e74af" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.708309 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7de085a2635c97438a08689bb27694547ed0ecc2e18c9400eb1019ac044e74af"} err="failed to get container status \"7de085a2635c97438a08689bb27694547ed0ecc2e18c9400eb1019ac044e74af\": rpc error: code = NotFound desc = could not find container \"7de085a2635c97438a08689bb27694547ed0ecc2e18c9400eb1019ac044e74af\": container with ID starting with 7de085a2635c97438a08689bb27694547ed0ecc2e18c9400eb1019ac044e74af not found: ID does not exist" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.708327 4558 scope.go:117] "RemoveContainer" containerID="06a748aa8f05f8ede56d2f5afbb1a1b0a772310929fdd0ba5437983cf338895d" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.709942 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-config-data" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.710104 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"06a748aa8f05f8ede56d2f5afbb1a1b0a772310929fdd0ba5437983cf338895d"} err="failed to get container status \"06a748aa8f05f8ede56d2f5afbb1a1b0a772310929fdd0ba5437983cf338895d\": rpc error: code = NotFound desc = could not find container \"06a748aa8f05f8ede56d2f5afbb1a1b0a772310929fdd0ba5437983cf338895d\": container with ID starting with 06a748aa8f05f8ede56d2f5afbb1a1b0a772310929fdd0ba5437983cf338895d not found: ID does not exist" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.710125 4558 scope.go:117] "RemoveContainer" containerID="e953f183e89c1ebf59cf5382c9b6fb4c44d30f7154ff1ec6ec58493ff83c1178" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.710996 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e953f183e89c1ebf59cf5382c9b6fb4c44d30f7154ff1ec6ec58493ff83c1178"} err="failed to get container status \"e953f183e89c1ebf59cf5382c9b6fb4c44d30f7154ff1ec6ec58493ff83c1178\": rpc error: code = NotFound desc = could not find container \"e953f183e89c1ebf59cf5382c9b6fb4c44d30f7154ff1ec6ec58493ff83c1178\": container with ID starting with e953f183e89c1ebf59cf5382c9b6fb4c44d30f7154ff1ec6ec58493ff83c1178 not found: ID does not exist" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.711018 4558 scope.go:117] "RemoveContainer" containerID="05509ededdd56f8c84dfeff661cfc5d2c11b221207e6cd3b144d11afd0842490" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.712044 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-scripts" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.712007 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"05509ededdd56f8c84dfeff661cfc5d2c11b221207e6cd3b144d11afd0842490"} err="failed to get container status \"05509ededdd56f8c84dfeff661cfc5d2c11b221207e6cd3b144d11afd0842490\": rpc error: code = NotFound desc = could not find container \"05509ededdd56f8c84dfeff661cfc5d2c11b221207e6cd3b144d11afd0842490\": container with ID starting with 05509ededdd56f8c84dfeff661cfc5d2c11b221207e6cd3b144d11afd0842490 not found: ID does not exist" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.712503 4558 scope.go:117] "RemoveContainer" containerID="7de085a2635c97438a08689bb27694547ed0ecc2e18c9400eb1019ac044e74af" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.718841 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7de085a2635c97438a08689bb27694547ed0ecc2e18c9400eb1019ac044e74af"} err="failed to get container status \"7de085a2635c97438a08689bb27694547ed0ecc2e18c9400eb1019ac044e74af\": rpc error: code = NotFound desc = could not find container \"7de085a2635c97438a08689bb27694547ed0ecc2e18c9400eb1019ac044e74af\": container with ID starting with 7de085a2635c97438a08689bb27694547ed0ecc2e18c9400eb1019ac044e74af not found: ID does not exist" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.718865 4558 scope.go:117] "RemoveContainer" containerID="06a748aa8f05f8ede56d2f5afbb1a1b0a772310929fdd0ba5437983cf338895d" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.719106 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"06a748aa8f05f8ede56d2f5afbb1a1b0a772310929fdd0ba5437983cf338895d"} err="failed to get container status \"06a748aa8f05f8ede56d2f5afbb1a1b0a772310929fdd0ba5437983cf338895d\": rpc error: code = NotFound desc = could not find container \"06a748aa8f05f8ede56d2f5afbb1a1b0a772310929fdd0ba5437983cf338895d\": container with ID starting with 06a748aa8f05f8ede56d2f5afbb1a1b0a772310929fdd0ba5437983cf338895d not found: ID does not exist" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.719127 4558 scope.go:117] "RemoveContainer" containerID="e953f183e89c1ebf59cf5382c9b6fb4c44d30f7154ff1ec6ec58493ff83c1178" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.719318 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e953f183e89c1ebf59cf5382c9b6fb4c44d30f7154ff1ec6ec58493ff83c1178"} err="failed to get container status \"e953f183e89c1ebf59cf5382c9b6fb4c44d30f7154ff1ec6ec58493ff83c1178\": rpc error: code = NotFound desc = could not find container \"e953f183e89c1ebf59cf5382c9b6fb4c44d30f7154ff1ec6ec58493ff83c1178\": container with ID starting with e953f183e89c1ebf59cf5382c9b6fb4c44d30f7154ff1ec6ec58493ff83c1178 not found: ID does not exist" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.719336 4558 scope.go:117] "RemoveContainer" containerID="05509ededdd56f8c84dfeff661cfc5d2c11b221207e6cd3b144d11afd0842490" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.724136 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"05509ededdd56f8c84dfeff661cfc5d2c11b221207e6cd3b144d11afd0842490"} err="failed to get container status \"05509ededdd56f8c84dfeff661cfc5d2c11b221207e6cd3b144d11afd0842490\": rpc error: code = NotFound desc = could not find container \"05509ededdd56f8c84dfeff661cfc5d2c11b221207e6cd3b144d11afd0842490\": container with ID starting with 05509ededdd56f8c84dfeff661cfc5d2c11b221207e6cd3b144d11afd0842490 not found: ID does not exist" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.724185 4558 scope.go:117] "RemoveContainer" containerID="7de085a2635c97438a08689bb27694547ed0ecc2e18c9400eb1019ac044e74af" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.726141 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7de085a2635c97438a08689bb27694547ed0ecc2e18c9400eb1019ac044e74af"} err="failed to get container status \"7de085a2635c97438a08689bb27694547ed0ecc2e18c9400eb1019ac044e74af\": rpc error: code = NotFound desc = could not find container \"7de085a2635c97438a08689bb27694547ed0ecc2e18c9400eb1019ac044e74af\": container with ID starting with 7de085a2635c97438a08689bb27694547ed0ecc2e18c9400eb1019ac044e74af not found: ID does not exist" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.726175 4558 scope.go:117] "RemoveContainer" containerID="06a748aa8f05f8ede56d2f5afbb1a1b0a772310929fdd0ba5437983cf338895d" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.726386 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"06a748aa8f05f8ede56d2f5afbb1a1b0a772310929fdd0ba5437983cf338895d"} err="failed to get container status \"06a748aa8f05f8ede56d2f5afbb1a1b0a772310929fdd0ba5437983cf338895d\": rpc error: code = NotFound desc = could not find container \"06a748aa8f05f8ede56d2f5afbb1a1b0a772310929fdd0ba5437983cf338895d\": container with ID starting with 06a748aa8f05f8ede56d2f5afbb1a1b0a772310929fdd0ba5437983cf338895d not found: ID does not exist" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.726405 4558 scope.go:117] "RemoveContainer" containerID="e953f183e89c1ebf59cf5382c9b6fb4c44d30f7154ff1ec6ec58493ff83c1178" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.727350 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e953f183e89c1ebf59cf5382c9b6fb4c44d30f7154ff1ec6ec58493ff83c1178"} err="failed to get container status \"e953f183e89c1ebf59cf5382c9b6fb4c44d30f7154ff1ec6ec58493ff83c1178\": rpc error: code = NotFound desc = could not find container \"e953f183e89c1ebf59cf5382c9b6fb4c44d30f7154ff1ec6ec58493ff83c1178\": container with ID starting with e953f183e89c1ebf59cf5382c9b6fb4c44d30f7154ff1ec6ec58493ff83c1178 not found: ID does not exist" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.727374 4558 scope.go:117] "RemoveContainer" containerID="05509ededdd56f8c84dfeff661cfc5d2c11b221207e6cd3b144d11afd0842490" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.727698 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"05509ededdd56f8c84dfeff661cfc5d2c11b221207e6cd3b144d11afd0842490"} err="failed to get container status \"05509ededdd56f8c84dfeff661cfc5d2c11b221207e6cd3b144d11afd0842490\": rpc error: code = NotFound desc = could not find container \"05509ededdd56f8c84dfeff661cfc5d2c11b221207e6cd3b144d11afd0842490\": container with ID starting with 05509ededdd56f8c84dfeff661cfc5d2c11b221207e6cd3b144d11afd0842490 not found: ID does not exist" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.727712 4558 scope.go:117] "RemoveContainer" containerID="7de085a2635c97438a08689bb27694547ed0ecc2e18c9400eb1019ac044e74af" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.727881 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7de085a2635c97438a08689bb27694547ed0ecc2e18c9400eb1019ac044e74af"} err="failed to get container status \"7de085a2635c97438a08689bb27694547ed0ecc2e18c9400eb1019ac044e74af\": rpc error: code = NotFound desc = could not find container \"7de085a2635c97438a08689bb27694547ed0ecc2e18c9400eb1019ac044e74af\": container with ID starting with 7de085a2635c97438a08689bb27694547ed0ecc2e18c9400eb1019ac044e74af not found: ID does not exist" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.738191 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.759297 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.817427 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1fa82f11-190d-4f03-90eb-93ce1d0caf36-config-data\") pod \"ceilometer-0\" (UID: \"1fa82f11-190d-4f03-90eb-93ce1d0caf36\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.817488 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1fa82f11-190d-4f03-90eb-93ce1d0caf36-scripts\") pod \"ceilometer-0\" (UID: \"1fa82f11-190d-4f03-90eb-93ce1d0caf36\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.817515 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1fa82f11-190d-4f03-90eb-93ce1d0caf36-log-httpd\") pod \"ceilometer-0\" (UID: \"1fa82f11-190d-4f03-90eb-93ce1d0caf36\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.817547 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5lb85\" (UniqueName: \"kubernetes.io/projected/1fa82f11-190d-4f03-90eb-93ce1d0caf36-kube-api-access-5lb85\") pod \"ceilometer-0\" (UID: \"1fa82f11-190d-4f03-90eb-93ce1d0caf36\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.817621 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1fa82f11-190d-4f03-90eb-93ce1d0caf36-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"1fa82f11-190d-4f03-90eb-93ce1d0caf36\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.817652 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1fa82f11-190d-4f03-90eb-93ce1d0caf36-run-httpd\") pod \"ceilometer-0\" (UID: \"1fa82f11-190d-4f03-90eb-93ce1d0caf36\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.817684 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/1fa82f11-190d-4f03-90eb-93ce1d0caf36-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"1fa82f11-190d-4f03-90eb-93ce1d0caf36\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.920083 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5lb85\" (UniqueName: \"kubernetes.io/projected/1fa82f11-190d-4f03-90eb-93ce1d0caf36-kube-api-access-5lb85\") pod \"ceilometer-0\" (UID: \"1fa82f11-190d-4f03-90eb-93ce1d0caf36\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.920181 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1fa82f11-190d-4f03-90eb-93ce1d0caf36-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"1fa82f11-190d-4f03-90eb-93ce1d0caf36\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.920221 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1fa82f11-190d-4f03-90eb-93ce1d0caf36-run-httpd\") pod \"ceilometer-0\" (UID: \"1fa82f11-190d-4f03-90eb-93ce1d0caf36\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.920247 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/1fa82f11-190d-4f03-90eb-93ce1d0caf36-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"1fa82f11-190d-4f03-90eb-93ce1d0caf36\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.920300 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1fa82f11-190d-4f03-90eb-93ce1d0caf36-config-data\") pod \"ceilometer-0\" (UID: \"1fa82f11-190d-4f03-90eb-93ce1d0caf36\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.920325 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1fa82f11-190d-4f03-90eb-93ce1d0caf36-scripts\") pod \"ceilometer-0\" (UID: \"1fa82f11-190d-4f03-90eb-93ce1d0caf36\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.920339 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1fa82f11-190d-4f03-90eb-93ce1d0caf36-log-httpd\") pod \"ceilometer-0\" (UID: \"1fa82f11-190d-4f03-90eb-93ce1d0caf36\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.920980 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1fa82f11-190d-4f03-90eb-93ce1d0caf36-log-httpd\") pod \"ceilometer-0\" (UID: \"1fa82f11-190d-4f03-90eb-93ce1d0caf36\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.922517 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1fa82f11-190d-4f03-90eb-93ce1d0caf36-run-httpd\") pod \"ceilometer-0\" (UID: \"1fa82f11-190d-4f03-90eb-93ce1d0caf36\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.924523 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1fa82f11-190d-4f03-90eb-93ce1d0caf36-config-data\") pod \"ceilometer-0\" (UID: \"1fa82f11-190d-4f03-90eb-93ce1d0caf36\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.924805 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/1fa82f11-190d-4f03-90eb-93ce1d0caf36-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"1fa82f11-190d-4f03-90eb-93ce1d0caf36\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.925087 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1fa82f11-190d-4f03-90eb-93ce1d0caf36-scripts\") pod \"ceilometer-0\" (UID: \"1fa82f11-190d-4f03-90eb-93ce1d0caf36\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.926207 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1fa82f11-190d-4f03-90eb-93ce1d0caf36-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"1fa82f11-190d-4f03-90eb-93ce1d0caf36\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:02:29 crc kubenswrapper[4558]: I0120 17:02:29.938942 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5lb85\" (UniqueName: \"kubernetes.io/projected/1fa82f11-190d-4f03-90eb-93ce1d0caf36-kube-api-access-5lb85\") pod \"ceilometer-0\" (UID: \"1fa82f11-190d-4f03-90eb-93ce1d0caf36\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:02:30 crc kubenswrapper[4558]: I0120 17:02:30.072317 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:02:30 crc kubenswrapper[4558]: W0120 17:02:30.082132 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6ab1bdd8_60bb_49a1_b1c8_e42f634e79e0.slice/crio-7a65367bfb962523881df570625a982087c1fee88420808715e879deb8a66e3a WatchSource:0}: Error finding container 7a65367bfb962523881df570625a982087c1fee88420808715e879deb8a66e3a: Status 404 returned error can't find the container with id 7a65367bfb962523881df570625a982087c1fee88420808715e879deb8a66e3a Jan 20 17:02:30 crc kubenswrapper[4558]: I0120 17:02:30.100633 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:02:30 crc kubenswrapper[4558]: I0120 17:02:30.136855 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-5f668b67df-bhm4t"] Jan 20 17:02:30 crc kubenswrapper[4558]: I0120 17:02:30.146828 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-85c96db9fd-dcjxv" event={"ID":"5ae48763-b66b-4038-855d-f700a2dbe040","Type":"ContainerStarted","Data":"b6e7adc6c85b1c4072ddc0ea64fb22919e97a66275f2f29b604d26d6e0a3864f"} Jan 20 17:02:30 crc kubenswrapper[4558]: I0120 17:02:30.148103 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/barbican-api-85c96db9fd-dcjxv" Jan 20 17:02:30 crc kubenswrapper[4558]: I0120 17:02:30.148137 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/barbican-api-85c96db9fd-dcjxv" Jan 20 17:02:30 crc kubenswrapper[4558]: W0120 17:02:30.152764 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod561ded1f_50d2_4eb9_8ceb_c6f587ee80d6.slice/crio-79cc6d091a1acd1070f9122c8d0cdefa50fc802a0386b14788849334d6ca9a85 WatchSource:0}: Error finding container 79cc6d091a1acd1070f9122c8d0cdefa50fc802a0386b14788849334d6ca9a85: Status 404 returned error can't find the container with id 79cc6d091a1acd1070f9122c8d0cdefa50fc802a0386b14788849334d6ca9a85 Jan 20 17:02:30 crc kubenswrapper[4558]: I0120 17:02:30.154026 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-9b48ffd74-btjvf" event={"ID":"6dfbc47c-776a-4037-b444-06871e236fa8","Type":"ContainerStarted","Data":"df2d929cd85144a28ff8ba0e06c2eaaa403cb15d51885680f97f5b7df9640e84"} Jan 20 17:02:30 crc kubenswrapper[4558]: I0120 17:02:30.154051 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-9b48ffd74-btjvf" event={"ID":"6dfbc47c-776a-4037-b444-06871e236fa8","Type":"ContainerStarted","Data":"014f5e272ac85a6ef2a4fbfb0ab6a52407f35dbb51b22e035984305b9a965c73"} Jan 20 17:02:30 crc kubenswrapper[4558]: I0120 17:02:30.154724 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/placement-9b48ffd74-btjvf" Jan 20 17:02:30 crc kubenswrapper[4558]: I0120 17:02:30.154749 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/placement-9b48ffd74-btjvf" Jan 20 17:02:30 crc kubenswrapper[4558]: I0120 17:02:30.157579 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"b6a69309-962d-4a4a-8fd0-e25a8a14cee1","Type":"ContainerStarted","Data":"bc6ce507a944a4c0944bcc1982c4d469e2d7fdba5e2e507de9cffdb943ca267c"} Jan 20 17:02:30 crc kubenswrapper[4558]: I0120 17:02:30.164020 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/barbican-api-85c96db9fd-dcjxv" podStartSLOduration=3.163984305 podStartE2EDuration="3.163984305s" podCreationTimestamp="2026-01-20 17:02:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:02:30.159571578 +0000 UTC m=+1243.919909544" watchObservedRunningTime="2026-01-20 17:02:30.163984305 +0000 UTC m=+1243.924322272" Jan 20 17:02:30 crc kubenswrapper[4558]: I0120 17:02:30.170078 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"6ab1bdd8-60bb-49a1-b1c8-e42f634e79e0","Type":"ContainerStarted","Data":"7a65367bfb962523881df570625a982087c1fee88420808715e879deb8a66e3a"} Jan 20 17:02:30 crc kubenswrapper[4558]: I0120 17:02:30.208358 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/glance-default-external-api-0" podStartSLOduration=4.208328521 podStartE2EDuration="4.208328521s" podCreationTimestamp="2026-01-20 17:02:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:02:30.182619832 +0000 UTC m=+1243.942957799" watchObservedRunningTime="2026-01-20 17:02:30.208328521 +0000 UTC m=+1243.968666498" Jan 20 17:02:30 crc kubenswrapper[4558]: I0120 17:02:30.214421 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-fbd5dff98-mmhdt" event={"ID":"a98c5227-c8a8-4cbc-8039-461dd14fbd5b","Type":"ContainerStarted","Data":"bd40ce3a39fbdd59afdf9e62cf2e6f662f5e7a7d8647d8f283efcb8f7b52ebc2"} Jan 20 17:02:30 crc kubenswrapper[4558]: I0120 17:02:30.214473 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-fbd5dff98-mmhdt" event={"ID":"a98c5227-c8a8-4cbc-8039-461dd14fbd5b","Type":"ContainerStarted","Data":"bdd46cdb21b22eb84d6fd244e570bc9831757537e8b49ef22aaf635a1aa83768"} Jan 20 17:02:30 crc kubenswrapper[4558]: I0120 17:02:30.214486 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-fbd5dff98-mmhdt" event={"ID":"a98c5227-c8a8-4cbc-8039-461dd14fbd5b","Type":"ContainerStarted","Data":"008e17f898f6bf0143ec5e258597f523d7316f6923096087b15b4e69dec9c782"} Jan 20 17:02:30 crc kubenswrapper[4558]: I0120 17:02:30.216497 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/placement-fbd5dff98-mmhdt" Jan 20 17:02:30 crc kubenswrapper[4558]: I0120 17:02:30.216863 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/placement-fbd5dff98-mmhdt" Jan 20 17:02:30 crc kubenswrapper[4558]: I0120 17:02:30.229854 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/placement-9b48ffd74-btjvf" podStartSLOduration=3.229840136 podStartE2EDuration="3.229840136s" podCreationTimestamp="2026-01-20 17:02:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:02:30.198107629 +0000 UTC m=+1243.958445596" watchObservedRunningTime="2026-01-20 17:02:30.229840136 +0000 UTC m=+1243.990178113" Jan 20 17:02:30 crc kubenswrapper[4558]: I0120 17:02:30.279508 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/placement-fbd5dff98-mmhdt" podStartSLOduration=2.279487695 podStartE2EDuration="2.279487695s" podCreationTimestamp="2026-01-20 17:02:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:02:30.260638336 +0000 UTC m=+1244.020976303" watchObservedRunningTime="2026-01-20 17:02:30.279487695 +0000 UTC m=+1244.039825662" Jan 20 17:02:30 crc kubenswrapper[4558]: I0120 17:02:30.283823 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"694530bc-dda1-4895-be8d-90c2f02af6cb","Type":"ContainerStarted","Data":"6227332080e50e94d7418909eec1841d572760ab845e16704bcfe9ff6c0e352c"} Jan 20 17:02:30 crc kubenswrapper[4558]: I0120 17:02:30.294007 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-keystone-listener-65f6b4bfbb-llhdw" event={"ID":"bc2c7d29-d967-414d-8bb9-1bf70bcacdcd","Type":"ContainerStarted","Data":"3a9e47d6638b5d013f9248c5040a1853dbc404d976176e3415eea7e361d8e47b"} Jan 20 17:02:30 crc kubenswrapper[4558]: I0120 17:02:30.323900 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/barbican-keystone-listener-65f6b4bfbb-llhdw" podStartSLOduration=4.323881222 podStartE2EDuration="4.323881222s" podCreationTimestamp="2026-01-20 17:02:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:02:30.309651151 +0000 UTC m=+1244.069989138" watchObservedRunningTime="2026-01-20 17:02:30.323881222 +0000 UTC m=+1244.084219189" Jan 20 17:02:30 crc kubenswrapper[4558]: I0120 17:02:30.361967 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-bootstrap-xq579" event={"ID":"66dc4df8-a7a7-452b-a8b2-b639d84ad42f","Type":"ContainerStarted","Data":"7a5c7c5046f9233504ec28fc00ba05146028e8398f93bed66b2944a461db8e6b"} Jan 20 17:02:30 crc kubenswrapper[4558]: I0120 17:02:30.362042 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-bootstrap-xq579" event={"ID":"66dc4df8-a7a7-452b-a8b2-b639d84ad42f","Type":"ContainerStarted","Data":"e9180bb8fa7ee60016bb81436a8395c7bab11ad65c94ad1e52ca9826df7c5eba"} Jan 20 17:02:30 crc kubenswrapper[4558]: I0120 17:02:30.391462 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/keystone-bootstrap-xq579" podStartSLOduration=3.391444684 podStartE2EDuration="3.391444684s" podCreationTimestamp="2026-01-20 17:02:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:02:30.384547643 +0000 UTC m=+1244.144885610" watchObservedRunningTime="2026-01-20 17:02:30.391444684 +0000 UTC m=+1244.151782650" Jan 20 17:02:30 crc kubenswrapper[4558]: I0120 17:02:30.578211 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3153c81a-39a0-4250-b53b-154a6a08ccbf" path="/var/lib/kubelet/pods/3153c81a-39a0-4250-b53b-154a6a08ccbf/volumes" Jan 20 17:02:30 crc kubenswrapper[4558]: I0120 17:02:30.630341 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:02:31 crc kubenswrapper[4558]: I0120 17:02:31.377897 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-5f668b67df-bhm4t" event={"ID":"561ded1f-50d2-4eb9-8ceb-c6f587ee80d6","Type":"ContainerStarted","Data":"1f93abb9eab5658358835514b46ebdffd05153b84dd13189515b67613f63f304"} Jan 20 17:02:31 crc kubenswrapper[4558]: I0120 17:02:31.378432 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/neutron-5f668b67df-bhm4t" Jan 20 17:02:31 crc kubenswrapper[4558]: I0120 17:02:31.378445 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-5f668b67df-bhm4t" event={"ID":"561ded1f-50d2-4eb9-8ceb-c6f587ee80d6","Type":"ContainerStarted","Data":"b1010de6c5451fa8fdfb551cfb1d14c2ce97f03ca7a9a4dfc2b5fa33e42ee8b3"} Jan 20 17:02:31 crc kubenswrapper[4558]: I0120 17:02:31.378455 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-5f668b67df-bhm4t" event={"ID":"561ded1f-50d2-4eb9-8ceb-c6f587ee80d6","Type":"ContainerStarted","Data":"79cc6d091a1acd1070f9122c8d0cdefa50fc802a0386b14788849334d6ca9a85"} Jan 20 17:02:31 crc kubenswrapper[4558]: I0120 17:02:31.380973 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"6ab1bdd8-60bb-49a1-b1c8-e42f634e79e0","Type":"ContainerStarted","Data":"f18df28315d07537d6230a73dbc5fbcef4eabce89e460f00f0c9180738214941"} Jan 20 17:02:31 crc kubenswrapper[4558]: I0120 17:02:31.382629 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"1fa82f11-190d-4f03-90eb-93ce1d0caf36","Type":"ContainerStarted","Data":"71c561b0c5426060c5c7a25047a00e71bca020a116c5379dce18e337c20af72f"} Jan 20 17:02:31 crc kubenswrapper[4558]: I0120 17:02:31.387780 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"694530bc-dda1-4895-be8d-90c2f02af6cb","Type":"ContainerStarted","Data":"b84bd6b58ffa917ac3faa7295198afe244eeb4419913377dd962ffcef7a733d2"} Jan 20 17:02:31 crc kubenswrapper[4558]: I0120 17:02:31.387886 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"694530bc-dda1-4895-be8d-90c2f02af6cb","Type":"ContainerStarted","Data":"bef8c06774ffa6560d532facfe7b66342e3ce0fb39349186cc64614d58fb666f"} Jan 20 17:02:31 crc kubenswrapper[4558]: I0120 17:02:31.406752 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/neutron-5f668b67df-bhm4t" podStartSLOduration=3.406739584 podStartE2EDuration="3.406739584s" podCreationTimestamp="2026-01-20 17:02:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:02:31.394822744 +0000 UTC m=+1245.155160711" watchObservedRunningTime="2026-01-20 17:02:31.406739584 +0000 UTC m=+1245.167077551" Jan 20 17:02:31 crc kubenswrapper[4558]: I0120 17:02:31.425262 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/cinder-scheduler-0" podStartSLOduration=3.425223496 podStartE2EDuration="3.425223496s" podCreationTimestamp="2026-01-20 17:02:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:02:31.420825326 +0000 UTC m=+1245.181163313" watchObservedRunningTime="2026-01-20 17:02:31.425223496 +0000 UTC m=+1245.185561464" Jan 20 17:02:32 crc kubenswrapper[4558]: I0120 17:02:32.396055 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"1fa82f11-190d-4f03-90eb-93ce1d0caf36","Type":"ContainerStarted","Data":"f1726311e40abb9f8cd333b1557f3a45f29a8c750b323e60afc3571fcd98b0d9"} Jan 20 17:02:32 crc kubenswrapper[4558]: I0120 17:02:32.396382 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"1fa82f11-190d-4f03-90eb-93ce1d0caf36","Type":"ContainerStarted","Data":"5da46a41a4afdd1907ffd116daa323945a958ea2d34c974bd437261d9f1d45b9"} Jan 20 17:02:32 crc kubenswrapper[4558]: I0120 17:02:32.397324 4558 generic.go:334] "Generic (PLEG): container finished" podID="66dc4df8-a7a7-452b-a8b2-b639d84ad42f" containerID="7a5c7c5046f9233504ec28fc00ba05146028e8398f93bed66b2944a461db8e6b" exitCode=0 Jan 20 17:02:32 crc kubenswrapper[4558]: I0120 17:02:32.397413 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-bootstrap-xq579" event={"ID":"66dc4df8-a7a7-452b-a8b2-b639d84ad42f","Type":"ContainerDied","Data":"7a5c7c5046f9233504ec28fc00ba05146028e8398f93bed66b2944a461db8e6b"} Jan 20 17:02:32 crc kubenswrapper[4558]: I0120 17:02:32.399343 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"6ab1bdd8-60bb-49a1-b1c8-e42f634e79e0","Type":"ContainerStarted","Data":"d35690d5e8e57ea89beb3eb31aab4905ad216bba7b3e508c030bbce695df1a64"} Jan 20 17:02:32 crc kubenswrapper[4558]: I0120 17:02:32.437211 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/cinder-api-0" podStartSLOduration=3.437196581 podStartE2EDuration="3.437196581s" podCreationTimestamp="2026-01-20 17:02:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:02:32.43189515 +0000 UTC m=+1246.192233117" watchObservedRunningTime="2026-01-20 17:02:32.437196581 +0000 UTC m=+1246.197534547" Jan 20 17:02:33 crc kubenswrapper[4558]: I0120 17:02:33.409074 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"1fa82f11-190d-4f03-90eb-93ce1d0caf36","Type":"ContainerStarted","Data":"9e41d81a80fc745c023df19c0397a12429361410c19b1d7b04de7da6ee1b3b38"} Jan 20 17:02:33 crc kubenswrapper[4558]: I0120 17:02:33.409502 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:02:33 crc kubenswrapper[4558]: I0120 17:02:33.687634 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:02:33 crc kubenswrapper[4558]: I0120 17:02:33.702214 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-bootstrap-xq579" Jan 20 17:02:33 crc kubenswrapper[4558]: I0120 17:02:33.825018 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/66dc4df8-a7a7-452b-a8b2-b639d84ad42f-fernet-keys\") pod \"66dc4df8-a7a7-452b-a8b2-b639d84ad42f\" (UID: \"66dc4df8-a7a7-452b-a8b2-b639d84ad42f\") " Jan 20 17:02:33 crc kubenswrapper[4558]: I0120 17:02:33.825566 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n7wt8\" (UniqueName: \"kubernetes.io/projected/66dc4df8-a7a7-452b-a8b2-b639d84ad42f-kube-api-access-n7wt8\") pod \"66dc4df8-a7a7-452b-a8b2-b639d84ad42f\" (UID: \"66dc4df8-a7a7-452b-a8b2-b639d84ad42f\") " Jan 20 17:02:33 crc kubenswrapper[4558]: I0120 17:02:33.825615 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/66dc4df8-a7a7-452b-a8b2-b639d84ad42f-combined-ca-bundle\") pod \"66dc4df8-a7a7-452b-a8b2-b639d84ad42f\" (UID: \"66dc4df8-a7a7-452b-a8b2-b639d84ad42f\") " Jan 20 17:02:33 crc kubenswrapper[4558]: I0120 17:02:33.825651 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/66dc4df8-a7a7-452b-a8b2-b639d84ad42f-scripts\") pod \"66dc4df8-a7a7-452b-a8b2-b639d84ad42f\" (UID: \"66dc4df8-a7a7-452b-a8b2-b639d84ad42f\") " Jan 20 17:02:33 crc kubenswrapper[4558]: I0120 17:02:33.825689 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/66dc4df8-a7a7-452b-a8b2-b639d84ad42f-config-data\") pod \"66dc4df8-a7a7-452b-a8b2-b639d84ad42f\" (UID: \"66dc4df8-a7a7-452b-a8b2-b639d84ad42f\") " Jan 20 17:02:33 crc kubenswrapper[4558]: I0120 17:02:33.825754 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/66dc4df8-a7a7-452b-a8b2-b639d84ad42f-credential-keys\") pod \"66dc4df8-a7a7-452b-a8b2-b639d84ad42f\" (UID: \"66dc4df8-a7a7-452b-a8b2-b639d84ad42f\") " Jan 20 17:02:33 crc kubenswrapper[4558]: I0120 17:02:33.831196 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/66dc4df8-a7a7-452b-a8b2-b639d84ad42f-scripts" (OuterVolumeSpecName: "scripts") pod "66dc4df8-a7a7-452b-a8b2-b639d84ad42f" (UID: "66dc4df8-a7a7-452b-a8b2-b639d84ad42f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:02:33 crc kubenswrapper[4558]: I0120 17:02:33.831363 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/66dc4df8-a7a7-452b-a8b2-b639d84ad42f-kube-api-access-n7wt8" (OuterVolumeSpecName: "kube-api-access-n7wt8") pod "66dc4df8-a7a7-452b-a8b2-b639d84ad42f" (UID: "66dc4df8-a7a7-452b-a8b2-b639d84ad42f"). InnerVolumeSpecName "kube-api-access-n7wt8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:02:33 crc kubenswrapper[4558]: I0120 17:02:33.845556 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/66dc4df8-a7a7-452b-a8b2-b639d84ad42f-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "66dc4df8-a7a7-452b-a8b2-b639d84ad42f" (UID: "66dc4df8-a7a7-452b-a8b2-b639d84ad42f"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:02:33 crc kubenswrapper[4558]: I0120 17:02:33.851821 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/66dc4df8-a7a7-452b-a8b2-b639d84ad42f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "66dc4df8-a7a7-452b-a8b2-b639d84ad42f" (UID: "66dc4df8-a7a7-452b-a8b2-b639d84ad42f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:02:33 crc kubenswrapper[4558]: I0120 17:02:33.852211 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/66dc4df8-a7a7-452b-a8b2-b639d84ad42f-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "66dc4df8-a7a7-452b-a8b2-b639d84ad42f" (UID: "66dc4df8-a7a7-452b-a8b2-b639d84ad42f"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:02:33 crc kubenswrapper[4558]: I0120 17:02:33.856642 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/66dc4df8-a7a7-452b-a8b2-b639d84ad42f-config-data" (OuterVolumeSpecName: "config-data") pod "66dc4df8-a7a7-452b-a8b2-b639d84ad42f" (UID: "66dc4df8-a7a7-452b-a8b2-b639d84ad42f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:02:33 crc kubenswrapper[4558]: I0120 17:02:33.928337 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n7wt8\" (UniqueName: \"kubernetes.io/projected/66dc4df8-a7a7-452b-a8b2-b639d84ad42f-kube-api-access-n7wt8\") on node \"crc\" DevicePath \"\"" Jan 20 17:02:33 crc kubenswrapper[4558]: I0120 17:02:33.928372 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/66dc4df8-a7a7-452b-a8b2-b639d84ad42f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:02:33 crc kubenswrapper[4558]: I0120 17:02:33.928384 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/66dc4df8-a7a7-452b-a8b2-b639d84ad42f-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:02:33 crc kubenswrapper[4558]: I0120 17:02:33.928395 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/66dc4df8-a7a7-452b-a8b2-b639d84ad42f-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:02:33 crc kubenswrapper[4558]: I0120 17:02:33.928407 4558 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/66dc4df8-a7a7-452b-a8b2-b639d84ad42f-credential-keys\") on node \"crc\" DevicePath \"\"" Jan 20 17:02:33 crc kubenswrapper[4558]: I0120 17:02:33.928416 4558 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/66dc4df8-a7a7-452b-a8b2-b639d84ad42f-fernet-keys\") on node \"crc\" DevicePath \"\"" Jan 20 17:02:34 crc kubenswrapper[4558]: I0120 17:02:34.368680 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:02:34 crc kubenswrapper[4558]: I0120 17:02:34.417432 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-bootstrap-xq579" event={"ID":"66dc4df8-a7a7-452b-a8b2-b639d84ad42f","Type":"ContainerDied","Data":"e9180bb8fa7ee60016bb81436a8395c7bab11ad65c94ad1e52ca9826df7c5eba"} Jan 20 17:02:34 crc kubenswrapper[4558]: I0120 17:02:34.418371 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e9180bb8fa7ee60016bb81436a8395c7bab11ad65c94ad1e52ca9826df7c5eba" Jan 20 17:02:34 crc kubenswrapper[4558]: I0120 17:02:34.417687 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-bootstrap-xq579" Jan 20 17:02:34 crc kubenswrapper[4558]: I0120 17:02:34.420480 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"1fa82f11-190d-4f03-90eb-93ce1d0caf36","Type":"ContainerStarted","Data":"f9d98cfacb9e982fff6de888dacf794e1dbf9cc9e06d9800767b7d467cb73658"} Jan 20 17:02:34 crc kubenswrapper[4558]: I0120 17:02:34.420526 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:02:34 crc kubenswrapper[4558]: I0120 17:02:34.439740 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ceilometer-0" podStartSLOduration=1.89257919 podStartE2EDuration="5.439729287s" podCreationTimestamp="2026-01-20 17:02:29 +0000 UTC" firstStartedPulling="2026-01-20 17:02:30.661456024 +0000 UTC m=+1244.421793991" lastFinishedPulling="2026-01-20 17:02:34.208606121 +0000 UTC m=+1247.968944088" observedRunningTime="2026-01-20 17:02:34.435940261 +0000 UTC m=+1248.196278229" watchObservedRunningTime="2026-01-20 17:02:34.439729287 +0000 UTC m=+1248.200067255" Jan 20 17:02:34 crc kubenswrapper[4558]: I0120 17:02:34.575720 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/keystone-65d75d5fbb-nkccr"] Jan 20 17:02:34 crc kubenswrapper[4558]: E0120 17:02:34.575996 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="66dc4df8-a7a7-452b-a8b2-b639d84ad42f" containerName="keystone-bootstrap" Jan 20 17:02:34 crc kubenswrapper[4558]: I0120 17:02:34.576014 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="66dc4df8-a7a7-452b-a8b2-b639d84ad42f" containerName="keystone-bootstrap" Jan 20 17:02:34 crc kubenswrapper[4558]: I0120 17:02:34.576233 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="66dc4df8-a7a7-452b-a8b2-b639d84ad42f" containerName="keystone-bootstrap" Jan 20 17:02:34 crc kubenswrapper[4558]: I0120 17:02:34.576763 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-65d75d5fbb-nkccr" Jan 20 17:02:34 crc kubenswrapper[4558]: I0120 17:02:34.585694 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone" Jan 20 17:02:34 crc kubenswrapper[4558]: I0120 17:02:34.585761 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-keystone-public-svc" Jan 20 17:02:34 crc kubenswrapper[4558]: I0120 17:02:34.585806 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-keystone-internal-svc" Jan 20 17:02:34 crc kubenswrapper[4558]: I0120 17:02:34.585710 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-scripts" Jan 20 17:02:34 crc kubenswrapper[4558]: I0120 17:02:34.585901 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-config-data" Jan 20 17:02:34 crc kubenswrapper[4558]: I0120 17:02:34.585986 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-keystone-dockercfg-4bhst" Jan 20 17:02:34 crc kubenswrapper[4558]: I0120 17:02:34.588473 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-65d75d5fbb-nkccr"] Jan 20 17:02:34 crc kubenswrapper[4558]: I0120 17:02:34.744415 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/8b9f5a8b-ee37-483c-9a86-38cc24dcb388-credential-keys\") pod \"keystone-65d75d5fbb-nkccr\" (UID: \"8b9f5a8b-ee37-483c-9a86-38cc24dcb388\") " pod="openstack-kuttl-tests/keystone-65d75d5fbb-nkccr" Jan 20 17:02:34 crc kubenswrapper[4558]: I0120 17:02:34.744538 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/8b9f5a8b-ee37-483c-9a86-38cc24dcb388-fernet-keys\") pod \"keystone-65d75d5fbb-nkccr\" (UID: \"8b9f5a8b-ee37-483c-9a86-38cc24dcb388\") " pod="openstack-kuttl-tests/keystone-65d75d5fbb-nkccr" Jan 20 17:02:34 crc kubenswrapper[4558]: I0120 17:02:34.744656 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8b9f5a8b-ee37-483c-9a86-38cc24dcb388-internal-tls-certs\") pod \"keystone-65d75d5fbb-nkccr\" (UID: \"8b9f5a8b-ee37-483c-9a86-38cc24dcb388\") " pod="openstack-kuttl-tests/keystone-65d75d5fbb-nkccr" Jan 20 17:02:34 crc kubenswrapper[4558]: I0120 17:02:34.744734 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8b9f5a8b-ee37-483c-9a86-38cc24dcb388-public-tls-certs\") pod \"keystone-65d75d5fbb-nkccr\" (UID: \"8b9f5a8b-ee37-483c-9a86-38cc24dcb388\") " pod="openstack-kuttl-tests/keystone-65d75d5fbb-nkccr" Jan 20 17:02:34 crc kubenswrapper[4558]: I0120 17:02:34.744782 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8b9f5a8b-ee37-483c-9a86-38cc24dcb388-scripts\") pod \"keystone-65d75d5fbb-nkccr\" (UID: \"8b9f5a8b-ee37-483c-9a86-38cc24dcb388\") " pod="openstack-kuttl-tests/keystone-65d75d5fbb-nkccr" Jan 20 17:02:34 crc kubenswrapper[4558]: I0120 17:02:34.744883 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8b9f5a8b-ee37-483c-9a86-38cc24dcb388-combined-ca-bundle\") pod \"keystone-65d75d5fbb-nkccr\" (UID: \"8b9f5a8b-ee37-483c-9a86-38cc24dcb388\") " pod="openstack-kuttl-tests/keystone-65d75d5fbb-nkccr" Jan 20 17:02:34 crc kubenswrapper[4558]: I0120 17:02:34.744930 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8b9f5a8b-ee37-483c-9a86-38cc24dcb388-config-data\") pod \"keystone-65d75d5fbb-nkccr\" (UID: \"8b9f5a8b-ee37-483c-9a86-38cc24dcb388\") " pod="openstack-kuttl-tests/keystone-65d75d5fbb-nkccr" Jan 20 17:02:34 crc kubenswrapper[4558]: I0120 17:02:34.744985 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xdsrw\" (UniqueName: \"kubernetes.io/projected/8b9f5a8b-ee37-483c-9a86-38cc24dcb388-kube-api-access-xdsrw\") pod \"keystone-65d75d5fbb-nkccr\" (UID: \"8b9f5a8b-ee37-483c-9a86-38cc24dcb388\") " pod="openstack-kuttl-tests/keystone-65d75d5fbb-nkccr" Jan 20 17:02:34 crc kubenswrapper[4558]: I0120 17:02:34.847073 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/8b9f5a8b-ee37-483c-9a86-38cc24dcb388-credential-keys\") pod \"keystone-65d75d5fbb-nkccr\" (UID: \"8b9f5a8b-ee37-483c-9a86-38cc24dcb388\") " pod="openstack-kuttl-tests/keystone-65d75d5fbb-nkccr" Jan 20 17:02:34 crc kubenswrapper[4558]: I0120 17:02:34.847121 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/8b9f5a8b-ee37-483c-9a86-38cc24dcb388-fernet-keys\") pod \"keystone-65d75d5fbb-nkccr\" (UID: \"8b9f5a8b-ee37-483c-9a86-38cc24dcb388\") " pod="openstack-kuttl-tests/keystone-65d75d5fbb-nkccr" Jan 20 17:02:34 crc kubenswrapper[4558]: I0120 17:02:34.847158 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8b9f5a8b-ee37-483c-9a86-38cc24dcb388-internal-tls-certs\") pod \"keystone-65d75d5fbb-nkccr\" (UID: \"8b9f5a8b-ee37-483c-9a86-38cc24dcb388\") " pod="openstack-kuttl-tests/keystone-65d75d5fbb-nkccr" Jan 20 17:02:34 crc kubenswrapper[4558]: I0120 17:02:34.847232 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8b9f5a8b-ee37-483c-9a86-38cc24dcb388-public-tls-certs\") pod \"keystone-65d75d5fbb-nkccr\" (UID: \"8b9f5a8b-ee37-483c-9a86-38cc24dcb388\") " pod="openstack-kuttl-tests/keystone-65d75d5fbb-nkccr" Jan 20 17:02:34 crc kubenswrapper[4558]: I0120 17:02:34.847249 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8b9f5a8b-ee37-483c-9a86-38cc24dcb388-scripts\") pod \"keystone-65d75d5fbb-nkccr\" (UID: \"8b9f5a8b-ee37-483c-9a86-38cc24dcb388\") " pod="openstack-kuttl-tests/keystone-65d75d5fbb-nkccr" Jan 20 17:02:34 crc kubenswrapper[4558]: I0120 17:02:34.847301 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8b9f5a8b-ee37-483c-9a86-38cc24dcb388-combined-ca-bundle\") pod \"keystone-65d75d5fbb-nkccr\" (UID: \"8b9f5a8b-ee37-483c-9a86-38cc24dcb388\") " pod="openstack-kuttl-tests/keystone-65d75d5fbb-nkccr" Jan 20 17:02:34 crc kubenswrapper[4558]: I0120 17:02:34.847321 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8b9f5a8b-ee37-483c-9a86-38cc24dcb388-config-data\") pod \"keystone-65d75d5fbb-nkccr\" (UID: \"8b9f5a8b-ee37-483c-9a86-38cc24dcb388\") " pod="openstack-kuttl-tests/keystone-65d75d5fbb-nkccr" Jan 20 17:02:34 crc kubenswrapper[4558]: I0120 17:02:34.847349 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xdsrw\" (UniqueName: \"kubernetes.io/projected/8b9f5a8b-ee37-483c-9a86-38cc24dcb388-kube-api-access-xdsrw\") pod \"keystone-65d75d5fbb-nkccr\" (UID: \"8b9f5a8b-ee37-483c-9a86-38cc24dcb388\") " pod="openstack-kuttl-tests/keystone-65d75d5fbb-nkccr" Jan 20 17:02:34 crc kubenswrapper[4558]: I0120 17:02:34.851736 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/8b9f5a8b-ee37-483c-9a86-38cc24dcb388-credential-keys\") pod \"keystone-65d75d5fbb-nkccr\" (UID: \"8b9f5a8b-ee37-483c-9a86-38cc24dcb388\") " pod="openstack-kuttl-tests/keystone-65d75d5fbb-nkccr" Jan 20 17:02:34 crc kubenswrapper[4558]: I0120 17:02:34.852898 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8b9f5a8b-ee37-483c-9a86-38cc24dcb388-public-tls-certs\") pod \"keystone-65d75d5fbb-nkccr\" (UID: \"8b9f5a8b-ee37-483c-9a86-38cc24dcb388\") " pod="openstack-kuttl-tests/keystone-65d75d5fbb-nkccr" Jan 20 17:02:34 crc kubenswrapper[4558]: I0120 17:02:34.853130 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8b9f5a8b-ee37-483c-9a86-38cc24dcb388-config-data\") pod \"keystone-65d75d5fbb-nkccr\" (UID: \"8b9f5a8b-ee37-483c-9a86-38cc24dcb388\") " pod="openstack-kuttl-tests/keystone-65d75d5fbb-nkccr" Jan 20 17:02:34 crc kubenswrapper[4558]: I0120 17:02:34.853227 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8b9f5a8b-ee37-483c-9a86-38cc24dcb388-internal-tls-certs\") pod \"keystone-65d75d5fbb-nkccr\" (UID: \"8b9f5a8b-ee37-483c-9a86-38cc24dcb388\") " pod="openstack-kuttl-tests/keystone-65d75d5fbb-nkccr" Jan 20 17:02:34 crc kubenswrapper[4558]: I0120 17:02:34.853243 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8b9f5a8b-ee37-483c-9a86-38cc24dcb388-combined-ca-bundle\") pod \"keystone-65d75d5fbb-nkccr\" (UID: \"8b9f5a8b-ee37-483c-9a86-38cc24dcb388\") " pod="openstack-kuttl-tests/keystone-65d75d5fbb-nkccr" Jan 20 17:02:34 crc kubenswrapper[4558]: I0120 17:02:34.853346 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/8b9f5a8b-ee37-483c-9a86-38cc24dcb388-fernet-keys\") pod \"keystone-65d75d5fbb-nkccr\" (UID: \"8b9f5a8b-ee37-483c-9a86-38cc24dcb388\") " pod="openstack-kuttl-tests/keystone-65d75d5fbb-nkccr" Jan 20 17:02:34 crc kubenswrapper[4558]: I0120 17:02:34.857507 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8b9f5a8b-ee37-483c-9a86-38cc24dcb388-scripts\") pod \"keystone-65d75d5fbb-nkccr\" (UID: \"8b9f5a8b-ee37-483c-9a86-38cc24dcb388\") " pod="openstack-kuttl-tests/keystone-65d75d5fbb-nkccr" Jan 20 17:02:34 crc kubenswrapper[4558]: I0120 17:02:34.866601 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xdsrw\" (UniqueName: \"kubernetes.io/projected/8b9f5a8b-ee37-483c-9a86-38cc24dcb388-kube-api-access-xdsrw\") pod \"keystone-65d75d5fbb-nkccr\" (UID: \"8b9f5a8b-ee37-483c-9a86-38cc24dcb388\") " pod="openstack-kuttl-tests/keystone-65d75d5fbb-nkccr" Jan 20 17:02:34 crc kubenswrapper[4558]: I0120 17:02:34.895767 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-65d75d5fbb-nkccr" Jan 20 17:02:35 crc kubenswrapper[4558]: I0120 17:02:35.307185 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-65d75d5fbb-nkccr"] Jan 20 17:02:35 crc kubenswrapper[4558]: W0120 17:02:35.311123 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8b9f5a8b_ee37_483c_9a86_38cc24dcb388.slice/crio-fad79c24b80f092567d316ecb4bfbc3ae5aebc926661fa3565dfa02fbfee3931 WatchSource:0}: Error finding container fad79c24b80f092567d316ecb4bfbc3ae5aebc926661fa3565dfa02fbfee3931: Status 404 returned error can't find the container with id fad79c24b80f092567d316ecb4bfbc3ae5aebc926661fa3565dfa02fbfee3931 Jan 20 17:02:35 crc kubenswrapper[4558]: I0120 17:02:35.426559 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/cinder-api-0" podUID="6ab1bdd8-60bb-49a1-b1c8-e42f634e79e0" containerName="cinder-api-log" containerID="cri-o://f18df28315d07537d6230a73dbc5fbcef4eabce89e460f00f0c9180738214941" gracePeriod=30 Jan 20 17:02:35 crc kubenswrapper[4558]: I0120 17:02:35.426844 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-65d75d5fbb-nkccr" event={"ID":"8b9f5a8b-ee37-483c-9a86-38cc24dcb388","Type":"ContainerStarted","Data":"fad79c24b80f092567d316ecb4bfbc3ae5aebc926661fa3565dfa02fbfee3931"} Jan 20 17:02:35 crc kubenswrapper[4558]: I0120 17:02:35.427792 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/cinder-api-0" podUID="6ab1bdd8-60bb-49a1-b1c8-e42f634e79e0" containerName="cinder-api" containerID="cri-o://d35690d5e8e57ea89beb3eb31aab4905ad216bba7b3e508c030bbce695df1a64" gracePeriod=30 Jan 20 17:02:35 crc kubenswrapper[4558]: I0120 17:02:35.880421 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:02:35 crc kubenswrapper[4558]: I0120 17:02:35.963081 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/6ab1bdd8-60bb-49a1-b1c8-e42f634e79e0-etc-machine-id\") pod \"6ab1bdd8-60bb-49a1-b1c8-e42f634e79e0\" (UID: \"6ab1bdd8-60bb-49a1-b1c8-e42f634e79e0\") " Jan 20 17:02:35 crc kubenswrapper[4558]: I0120 17:02:35.963199 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/6ab1bdd8-60bb-49a1-b1c8-e42f634e79e0-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "6ab1bdd8-60bb-49a1-b1c8-e42f634e79e0" (UID: "6ab1bdd8-60bb-49a1-b1c8-e42f634e79e0"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 17:02:35 crc kubenswrapper[4558]: I0120 17:02:35.963228 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ab1bdd8-60bb-49a1-b1c8-e42f634e79e0-combined-ca-bundle\") pod \"6ab1bdd8-60bb-49a1-b1c8-e42f634e79e0\" (UID: \"6ab1bdd8-60bb-49a1-b1c8-e42f634e79e0\") " Jan 20 17:02:35 crc kubenswrapper[4558]: I0120 17:02:35.963282 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6ab1bdd8-60bb-49a1-b1c8-e42f634e79e0-config-data-custom\") pod \"6ab1bdd8-60bb-49a1-b1c8-e42f634e79e0\" (UID: \"6ab1bdd8-60bb-49a1-b1c8-e42f634e79e0\") " Jan 20 17:02:35 crc kubenswrapper[4558]: I0120 17:02:35.963388 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6ab1bdd8-60bb-49a1-b1c8-e42f634e79e0-config-data\") pod \"6ab1bdd8-60bb-49a1-b1c8-e42f634e79e0\" (UID: \"6ab1bdd8-60bb-49a1-b1c8-e42f634e79e0\") " Jan 20 17:02:35 crc kubenswrapper[4558]: I0120 17:02:35.963425 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6ab1bdd8-60bb-49a1-b1c8-e42f634e79e0-logs\") pod \"6ab1bdd8-60bb-49a1-b1c8-e42f634e79e0\" (UID: \"6ab1bdd8-60bb-49a1-b1c8-e42f634e79e0\") " Jan 20 17:02:35 crc kubenswrapper[4558]: I0120 17:02:35.963444 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nh9gt\" (UniqueName: \"kubernetes.io/projected/6ab1bdd8-60bb-49a1-b1c8-e42f634e79e0-kube-api-access-nh9gt\") pod \"6ab1bdd8-60bb-49a1-b1c8-e42f634e79e0\" (UID: \"6ab1bdd8-60bb-49a1-b1c8-e42f634e79e0\") " Jan 20 17:02:35 crc kubenswrapper[4558]: I0120 17:02:35.963468 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6ab1bdd8-60bb-49a1-b1c8-e42f634e79e0-scripts\") pod \"6ab1bdd8-60bb-49a1-b1c8-e42f634e79e0\" (UID: \"6ab1bdd8-60bb-49a1-b1c8-e42f634e79e0\") " Jan 20 17:02:35 crc kubenswrapper[4558]: I0120 17:02:35.963865 4558 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/6ab1bdd8-60bb-49a1-b1c8-e42f634e79e0-etc-machine-id\") on node \"crc\" DevicePath \"\"" Jan 20 17:02:35 crc kubenswrapper[4558]: I0120 17:02:35.963945 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6ab1bdd8-60bb-49a1-b1c8-e42f634e79e0-logs" (OuterVolumeSpecName: "logs") pod "6ab1bdd8-60bb-49a1-b1c8-e42f634e79e0" (UID: "6ab1bdd8-60bb-49a1-b1c8-e42f634e79e0"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:02:35 crc kubenswrapper[4558]: I0120 17:02:35.969811 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ab1bdd8-60bb-49a1-b1c8-e42f634e79e0-scripts" (OuterVolumeSpecName: "scripts") pod "6ab1bdd8-60bb-49a1-b1c8-e42f634e79e0" (UID: "6ab1bdd8-60bb-49a1-b1c8-e42f634e79e0"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:02:35 crc kubenswrapper[4558]: I0120 17:02:35.969906 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ab1bdd8-60bb-49a1-b1c8-e42f634e79e0-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "6ab1bdd8-60bb-49a1-b1c8-e42f634e79e0" (UID: "6ab1bdd8-60bb-49a1-b1c8-e42f634e79e0"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:02:35 crc kubenswrapper[4558]: I0120 17:02:35.970068 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ab1bdd8-60bb-49a1-b1c8-e42f634e79e0-kube-api-access-nh9gt" (OuterVolumeSpecName: "kube-api-access-nh9gt") pod "6ab1bdd8-60bb-49a1-b1c8-e42f634e79e0" (UID: "6ab1bdd8-60bb-49a1-b1c8-e42f634e79e0"). InnerVolumeSpecName "kube-api-access-nh9gt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:02:35 crc kubenswrapper[4558]: I0120 17:02:35.992316 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ab1bdd8-60bb-49a1-b1c8-e42f634e79e0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6ab1bdd8-60bb-49a1-b1c8-e42f634e79e0" (UID: "6ab1bdd8-60bb-49a1-b1c8-e42f634e79e0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:02:36 crc kubenswrapper[4558]: I0120 17:02:36.002803 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ab1bdd8-60bb-49a1-b1c8-e42f634e79e0-config-data" (OuterVolumeSpecName: "config-data") pod "6ab1bdd8-60bb-49a1-b1c8-e42f634e79e0" (UID: "6ab1bdd8-60bb-49a1-b1c8-e42f634e79e0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:02:36 crc kubenswrapper[4558]: I0120 17:02:36.065378 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6ab1bdd8-60bb-49a1-b1c8-e42f634e79e0-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:02:36 crc kubenswrapper[4558]: I0120 17:02:36.065469 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6ab1bdd8-60bb-49a1-b1c8-e42f634e79e0-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:02:36 crc kubenswrapper[4558]: I0120 17:02:36.065525 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6ab1bdd8-60bb-49a1-b1c8-e42f634e79e0-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:02:36 crc kubenswrapper[4558]: I0120 17:02:36.065575 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nh9gt\" (UniqueName: \"kubernetes.io/projected/6ab1bdd8-60bb-49a1-b1c8-e42f634e79e0-kube-api-access-nh9gt\") on node \"crc\" DevicePath \"\"" Jan 20 17:02:36 crc kubenswrapper[4558]: I0120 17:02:36.065639 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6ab1bdd8-60bb-49a1-b1c8-e42f634e79e0-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:02:36 crc kubenswrapper[4558]: I0120 17:02:36.065688 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ab1bdd8-60bb-49a1-b1c8-e42f634e79e0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:02:36 crc kubenswrapper[4558]: I0120 17:02:36.396882 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:02:36 crc kubenswrapper[4558]: I0120 17:02:36.397117 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:02:36 crc kubenswrapper[4558]: I0120 17:02:36.432831 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:02:36 crc kubenswrapper[4558]: I0120 17:02:36.432905 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:02:36 crc kubenswrapper[4558]: I0120 17:02:36.436463 4558 generic.go:334] "Generic (PLEG): container finished" podID="6ab1bdd8-60bb-49a1-b1c8-e42f634e79e0" containerID="d35690d5e8e57ea89beb3eb31aab4905ad216bba7b3e508c030bbce695df1a64" exitCode=0 Jan 20 17:02:36 crc kubenswrapper[4558]: I0120 17:02:36.436485 4558 generic.go:334] "Generic (PLEG): container finished" podID="6ab1bdd8-60bb-49a1-b1c8-e42f634e79e0" containerID="f18df28315d07537d6230a73dbc5fbcef4eabce89e460f00f0c9180738214941" exitCode=143 Jan 20 17:02:36 crc kubenswrapper[4558]: I0120 17:02:36.436533 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"6ab1bdd8-60bb-49a1-b1c8-e42f634e79e0","Type":"ContainerDied","Data":"d35690d5e8e57ea89beb3eb31aab4905ad216bba7b3e508c030bbce695df1a64"} Jan 20 17:02:36 crc kubenswrapper[4558]: I0120 17:02:36.436559 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"6ab1bdd8-60bb-49a1-b1c8-e42f634e79e0","Type":"ContainerDied","Data":"f18df28315d07537d6230a73dbc5fbcef4eabce89e460f00f0c9180738214941"} Jan 20 17:02:36 crc kubenswrapper[4558]: I0120 17:02:36.436572 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"6ab1bdd8-60bb-49a1-b1c8-e42f634e79e0","Type":"ContainerDied","Data":"7a65367bfb962523881df570625a982087c1fee88420808715e879deb8a66e3a"} Jan 20 17:02:36 crc kubenswrapper[4558]: I0120 17:02:36.436586 4558 scope.go:117] "RemoveContainer" containerID="d35690d5e8e57ea89beb3eb31aab4905ad216bba7b3e508c030bbce695df1a64" Jan 20 17:02:36 crc kubenswrapper[4558]: I0120 17:02:36.436724 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:02:36 crc kubenswrapper[4558]: I0120 17:02:36.437868 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-65d75d5fbb-nkccr" event={"ID":"8b9f5a8b-ee37-483c-9a86-38cc24dcb388","Type":"ContainerStarted","Data":"4b73519a38610ac22d4ce96ba40226baebd8e6660f77f9013e57a5c57815f3a9"} Jan 20 17:02:36 crc kubenswrapper[4558]: I0120 17:02:36.438089 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:02:36 crc kubenswrapper[4558]: I0120 17:02:36.438210 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:02:36 crc kubenswrapper[4558]: I0120 17:02:36.450232 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:02:36 crc kubenswrapper[4558]: I0120 17:02:36.450271 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:02:36 crc kubenswrapper[4558]: I0120 17:02:36.462270 4558 scope.go:117] "RemoveContainer" containerID="f18df28315d07537d6230a73dbc5fbcef4eabce89e460f00f0c9180738214941" Jan 20 17:02:36 crc kubenswrapper[4558]: I0120 17:02:36.473026 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/keystone-65d75d5fbb-nkccr" podStartSLOduration=2.473014689 podStartE2EDuration="2.473014689s" podCreationTimestamp="2026-01-20 17:02:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:02:36.468425539 +0000 UTC m=+1250.228763505" watchObservedRunningTime="2026-01-20 17:02:36.473014689 +0000 UTC m=+1250.233352655" Jan 20 17:02:36 crc kubenswrapper[4558]: I0120 17:02:36.483791 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:02:36 crc kubenswrapper[4558]: I0120 17:02:36.484374 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:02:36 crc kubenswrapper[4558]: I0120 17:02:36.492764 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:02:36 crc kubenswrapper[4558]: I0120 17:02:36.496273 4558 scope.go:117] "RemoveContainer" containerID="d35690d5e8e57ea89beb3eb31aab4905ad216bba7b3e508c030bbce695df1a64" Jan 20 17:02:36 crc kubenswrapper[4558]: I0120 17:02:36.496568 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:02:36 crc kubenswrapper[4558]: E0120 17:02:36.498281 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d35690d5e8e57ea89beb3eb31aab4905ad216bba7b3e508c030bbce695df1a64\": container with ID starting with d35690d5e8e57ea89beb3eb31aab4905ad216bba7b3e508c030bbce695df1a64 not found: ID does not exist" containerID="d35690d5e8e57ea89beb3eb31aab4905ad216bba7b3e508c030bbce695df1a64" Jan 20 17:02:36 crc kubenswrapper[4558]: I0120 17:02:36.498309 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d35690d5e8e57ea89beb3eb31aab4905ad216bba7b3e508c030bbce695df1a64"} err="failed to get container status \"d35690d5e8e57ea89beb3eb31aab4905ad216bba7b3e508c030bbce695df1a64\": rpc error: code = NotFound desc = could not find container \"d35690d5e8e57ea89beb3eb31aab4905ad216bba7b3e508c030bbce695df1a64\": container with ID starting with d35690d5e8e57ea89beb3eb31aab4905ad216bba7b3e508c030bbce695df1a64 not found: ID does not exist" Jan 20 17:02:36 crc kubenswrapper[4558]: I0120 17:02:36.498328 4558 scope.go:117] "RemoveContainer" containerID="f18df28315d07537d6230a73dbc5fbcef4eabce89e460f00f0c9180738214941" Jan 20 17:02:36 crc kubenswrapper[4558]: E0120 17:02:36.498766 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f18df28315d07537d6230a73dbc5fbcef4eabce89e460f00f0c9180738214941\": container with ID starting with f18df28315d07537d6230a73dbc5fbcef4eabce89e460f00f0c9180738214941 not found: ID does not exist" containerID="f18df28315d07537d6230a73dbc5fbcef4eabce89e460f00f0c9180738214941" Jan 20 17:02:36 crc kubenswrapper[4558]: I0120 17:02:36.498798 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f18df28315d07537d6230a73dbc5fbcef4eabce89e460f00f0c9180738214941"} err="failed to get container status \"f18df28315d07537d6230a73dbc5fbcef4eabce89e460f00f0c9180738214941\": rpc error: code = NotFound desc = could not find container \"f18df28315d07537d6230a73dbc5fbcef4eabce89e460f00f0c9180738214941\": container with ID starting with f18df28315d07537d6230a73dbc5fbcef4eabce89e460f00f0c9180738214941 not found: ID does not exist" Jan 20 17:02:36 crc kubenswrapper[4558]: I0120 17:02:36.498812 4558 scope.go:117] "RemoveContainer" containerID="d35690d5e8e57ea89beb3eb31aab4905ad216bba7b3e508c030bbce695df1a64" Jan 20 17:02:36 crc kubenswrapper[4558]: I0120 17:02:36.499340 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d35690d5e8e57ea89beb3eb31aab4905ad216bba7b3e508c030bbce695df1a64"} err="failed to get container status \"d35690d5e8e57ea89beb3eb31aab4905ad216bba7b3e508c030bbce695df1a64\": rpc error: code = NotFound desc = could not find container \"d35690d5e8e57ea89beb3eb31aab4905ad216bba7b3e508c030bbce695df1a64\": container with ID starting with d35690d5e8e57ea89beb3eb31aab4905ad216bba7b3e508c030bbce695df1a64 not found: ID does not exist" Jan 20 17:02:36 crc kubenswrapper[4558]: I0120 17:02:36.499361 4558 scope.go:117] "RemoveContainer" containerID="f18df28315d07537d6230a73dbc5fbcef4eabce89e460f00f0c9180738214941" Jan 20 17:02:36 crc kubenswrapper[4558]: I0120 17:02:36.500976 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f18df28315d07537d6230a73dbc5fbcef4eabce89e460f00f0c9180738214941"} err="failed to get container status \"f18df28315d07537d6230a73dbc5fbcef4eabce89e460f00f0c9180738214941\": rpc error: code = NotFound desc = could not find container \"f18df28315d07537d6230a73dbc5fbcef4eabce89e460f00f0c9180738214941\": container with ID starting with f18df28315d07537d6230a73dbc5fbcef4eabce89e460f00f0c9180738214941 not found: ID does not exist" Jan 20 17:02:36 crc kubenswrapper[4558]: I0120 17:02:36.512266 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:02:36 crc kubenswrapper[4558]: E0120 17:02:36.512591 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6ab1bdd8-60bb-49a1-b1c8-e42f634e79e0" containerName="cinder-api-log" Jan 20 17:02:36 crc kubenswrapper[4558]: I0120 17:02:36.512608 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="6ab1bdd8-60bb-49a1-b1c8-e42f634e79e0" containerName="cinder-api-log" Jan 20 17:02:36 crc kubenswrapper[4558]: E0120 17:02:36.512619 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6ab1bdd8-60bb-49a1-b1c8-e42f634e79e0" containerName="cinder-api" Jan 20 17:02:36 crc kubenswrapper[4558]: I0120 17:02:36.512625 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="6ab1bdd8-60bb-49a1-b1c8-e42f634e79e0" containerName="cinder-api" Jan 20 17:02:36 crc kubenswrapper[4558]: I0120 17:02:36.512825 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="6ab1bdd8-60bb-49a1-b1c8-e42f634e79e0" containerName="cinder-api-log" Jan 20 17:02:36 crc kubenswrapper[4558]: I0120 17:02:36.512842 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="6ab1bdd8-60bb-49a1-b1c8-e42f634e79e0" containerName="cinder-api" Jan 20 17:02:36 crc kubenswrapper[4558]: I0120 17:02:36.513591 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:02:36 crc kubenswrapper[4558]: I0120 17:02:36.519318 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-cinder-public-svc" Jan 20 17:02:36 crc kubenswrapper[4558]: I0120 17:02:36.519447 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cinder-api-config-data" Jan 20 17:02:36 crc kubenswrapper[4558]: I0120 17:02:36.519549 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-cinder-internal-svc" Jan 20 17:02:36 crc kubenswrapper[4558]: I0120 17:02:36.545766 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:02:36 crc kubenswrapper[4558]: I0120 17:02:36.575177 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ab1bdd8-60bb-49a1-b1c8-e42f634e79e0" path="/var/lib/kubelet/pods/6ab1bdd8-60bb-49a1-b1c8-e42f634e79e0/volumes" Jan 20 17:02:36 crc kubenswrapper[4558]: I0120 17:02:36.577061 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/2ba42f6a-1ade-45ae-bdc4-117ad2fa866b-etc-machine-id\") pod \"cinder-api-0\" (UID: \"2ba42f6a-1ade-45ae-bdc4-117ad2fa866b\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:02:36 crc kubenswrapper[4558]: I0120 17:02:36.577789 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2ba42f6a-1ade-45ae-bdc4-117ad2fa866b-logs\") pod \"cinder-api-0\" (UID: \"2ba42f6a-1ade-45ae-bdc4-117ad2fa866b\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:02:36 crc kubenswrapper[4558]: I0120 17:02:36.577829 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2ba42f6a-1ade-45ae-bdc4-117ad2fa866b-public-tls-certs\") pod \"cinder-api-0\" (UID: \"2ba42f6a-1ade-45ae-bdc4-117ad2fa866b\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:02:36 crc kubenswrapper[4558]: I0120 17:02:36.577956 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2ba42f6a-1ade-45ae-bdc4-117ad2fa866b-config-data-custom\") pod \"cinder-api-0\" (UID: \"2ba42f6a-1ade-45ae-bdc4-117ad2fa866b\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:02:36 crc kubenswrapper[4558]: I0120 17:02:36.578129 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2ba42f6a-1ade-45ae-bdc4-117ad2fa866b-config-data\") pod \"cinder-api-0\" (UID: \"2ba42f6a-1ade-45ae-bdc4-117ad2fa866b\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:02:36 crc kubenswrapper[4558]: I0120 17:02:36.578222 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2ba42f6a-1ade-45ae-bdc4-117ad2fa866b-scripts\") pod \"cinder-api-0\" (UID: \"2ba42f6a-1ade-45ae-bdc4-117ad2fa866b\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:02:36 crc kubenswrapper[4558]: I0120 17:02:36.578281 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2ba42f6a-1ade-45ae-bdc4-117ad2fa866b-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"2ba42f6a-1ade-45ae-bdc4-117ad2fa866b\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:02:36 crc kubenswrapper[4558]: I0120 17:02:36.578314 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tlnbb\" (UniqueName: \"kubernetes.io/projected/2ba42f6a-1ade-45ae-bdc4-117ad2fa866b-kube-api-access-tlnbb\") pod \"cinder-api-0\" (UID: \"2ba42f6a-1ade-45ae-bdc4-117ad2fa866b\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:02:36 crc kubenswrapper[4558]: I0120 17:02:36.578428 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2ba42f6a-1ade-45ae-bdc4-117ad2fa866b-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"2ba42f6a-1ade-45ae-bdc4-117ad2fa866b\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:02:36 crc kubenswrapper[4558]: I0120 17:02:36.680395 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2ba42f6a-1ade-45ae-bdc4-117ad2fa866b-config-data\") pod \"cinder-api-0\" (UID: \"2ba42f6a-1ade-45ae-bdc4-117ad2fa866b\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:02:36 crc kubenswrapper[4558]: I0120 17:02:36.681293 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2ba42f6a-1ade-45ae-bdc4-117ad2fa866b-scripts\") pod \"cinder-api-0\" (UID: \"2ba42f6a-1ade-45ae-bdc4-117ad2fa866b\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:02:36 crc kubenswrapper[4558]: I0120 17:02:36.681453 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2ba42f6a-1ade-45ae-bdc4-117ad2fa866b-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"2ba42f6a-1ade-45ae-bdc4-117ad2fa866b\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:02:36 crc kubenswrapper[4558]: I0120 17:02:36.681544 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tlnbb\" (UniqueName: \"kubernetes.io/projected/2ba42f6a-1ade-45ae-bdc4-117ad2fa866b-kube-api-access-tlnbb\") pod \"cinder-api-0\" (UID: \"2ba42f6a-1ade-45ae-bdc4-117ad2fa866b\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:02:36 crc kubenswrapper[4558]: I0120 17:02:36.681696 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2ba42f6a-1ade-45ae-bdc4-117ad2fa866b-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"2ba42f6a-1ade-45ae-bdc4-117ad2fa866b\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:02:36 crc kubenswrapper[4558]: I0120 17:02:36.681795 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/2ba42f6a-1ade-45ae-bdc4-117ad2fa866b-etc-machine-id\") pod \"cinder-api-0\" (UID: \"2ba42f6a-1ade-45ae-bdc4-117ad2fa866b\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:02:36 crc kubenswrapper[4558]: I0120 17:02:36.682014 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2ba42f6a-1ade-45ae-bdc4-117ad2fa866b-logs\") pod \"cinder-api-0\" (UID: \"2ba42f6a-1ade-45ae-bdc4-117ad2fa866b\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:02:36 crc kubenswrapper[4558]: I0120 17:02:36.682070 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/2ba42f6a-1ade-45ae-bdc4-117ad2fa866b-etc-machine-id\") pod \"cinder-api-0\" (UID: \"2ba42f6a-1ade-45ae-bdc4-117ad2fa866b\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:02:36 crc kubenswrapper[4558]: I0120 17:02:36.682136 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2ba42f6a-1ade-45ae-bdc4-117ad2fa866b-public-tls-certs\") pod \"cinder-api-0\" (UID: \"2ba42f6a-1ade-45ae-bdc4-117ad2fa866b\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:02:36 crc kubenswrapper[4558]: I0120 17:02:36.682303 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2ba42f6a-1ade-45ae-bdc4-117ad2fa866b-config-data-custom\") pod \"cinder-api-0\" (UID: \"2ba42f6a-1ade-45ae-bdc4-117ad2fa866b\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:02:36 crc kubenswrapper[4558]: I0120 17:02:36.682453 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2ba42f6a-1ade-45ae-bdc4-117ad2fa866b-logs\") pod \"cinder-api-0\" (UID: \"2ba42f6a-1ade-45ae-bdc4-117ad2fa866b\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:02:36 crc kubenswrapper[4558]: I0120 17:02:36.685462 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2ba42f6a-1ade-45ae-bdc4-117ad2fa866b-config-data\") pod \"cinder-api-0\" (UID: \"2ba42f6a-1ade-45ae-bdc4-117ad2fa866b\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:02:36 crc kubenswrapper[4558]: I0120 17:02:36.688875 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2ba42f6a-1ade-45ae-bdc4-117ad2fa866b-scripts\") pod \"cinder-api-0\" (UID: \"2ba42f6a-1ade-45ae-bdc4-117ad2fa866b\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:02:36 crc kubenswrapper[4558]: I0120 17:02:36.690624 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2ba42f6a-1ade-45ae-bdc4-117ad2fa866b-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"2ba42f6a-1ade-45ae-bdc4-117ad2fa866b\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:02:36 crc kubenswrapper[4558]: I0120 17:02:36.692020 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2ba42f6a-1ade-45ae-bdc4-117ad2fa866b-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"2ba42f6a-1ade-45ae-bdc4-117ad2fa866b\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:02:36 crc kubenswrapper[4558]: I0120 17:02:36.693527 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2ba42f6a-1ade-45ae-bdc4-117ad2fa866b-public-tls-certs\") pod \"cinder-api-0\" (UID: \"2ba42f6a-1ade-45ae-bdc4-117ad2fa866b\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:02:36 crc kubenswrapper[4558]: I0120 17:02:36.694697 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2ba42f6a-1ade-45ae-bdc4-117ad2fa866b-config-data-custom\") pod \"cinder-api-0\" (UID: \"2ba42f6a-1ade-45ae-bdc4-117ad2fa866b\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:02:36 crc kubenswrapper[4558]: I0120 17:02:36.705360 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tlnbb\" (UniqueName: \"kubernetes.io/projected/2ba42f6a-1ade-45ae-bdc4-117ad2fa866b-kube-api-access-tlnbb\") pod \"cinder-api-0\" (UID: \"2ba42f6a-1ade-45ae-bdc4-117ad2fa866b\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:02:36 crc kubenswrapper[4558]: I0120 17:02:36.834313 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:02:37 crc kubenswrapper[4558]: I0120 17:02:37.244356 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:02:37 crc kubenswrapper[4558]: I0120 17:02:37.450326 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"2ba42f6a-1ade-45ae-bdc4-117ad2fa866b","Type":"ContainerStarted","Data":"e3ed7eb8d0b3d38d0512c2f86d418adeb35f23fda8f7c085ab363db73fe8ad6e"} Jan 20 17:02:37 crc kubenswrapper[4558]: I0120 17:02:37.452107 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/keystone-65d75d5fbb-nkccr" Jan 20 17:02:37 crc kubenswrapper[4558]: I0120 17:02:37.452150 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:02:37 crc kubenswrapper[4558]: I0120 17:02:37.452180 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:02:38 crc kubenswrapper[4558]: I0120 17:02:38.052989 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/neutron-7fdccbb69d-fng8p"] Jan 20 17:02:38 crc kubenswrapper[4558]: I0120 17:02:38.054538 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-7fdccbb69d-fng8p" Jan 20 17:02:38 crc kubenswrapper[4558]: I0120 17:02:38.070560 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-neutron-internal-svc" Jan 20 17:02:38 crc kubenswrapper[4558]: I0120 17:02:38.070734 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-neutron-public-svc" Jan 20 17:02:38 crc kubenswrapper[4558]: I0120 17:02:38.100299 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-7fdccbb69d-fng8p"] Jan 20 17:02:38 crc kubenswrapper[4558]: I0120 17:02:38.113756 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/fa011f19-1245-4ae8-ae2b-8773036498b3-ovndb-tls-certs\") pod \"neutron-7fdccbb69d-fng8p\" (UID: \"fa011f19-1245-4ae8-ae2b-8773036498b3\") " pod="openstack-kuttl-tests/neutron-7fdccbb69d-fng8p" Jan 20 17:02:38 crc kubenswrapper[4558]: I0120 17:02:38.113798 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jfjmw\" (UniqueName: \"kubernetes.io/projected/fa011f19-1245-4ae8-ae2b-8773036498b3-kube-api-access-jfjmw\") pod \"neutron-7fdccbb69d-fng8p\" (UID: \"fa011f19-1245-4ae8-ae2b-8773036498b3\") " pod="openstack-kuttl-tests/neutron-7fdccbb69d-fng8p" Jan 20 17:02:38 crc kubenswrapper[4558]: I0120 17:02:38.113818 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/fa011f19-1245-4ae8-ae2b-8773036498b3-httpd-config\") pod \"neutron-7fdccbb69d-fng8p\" (UID: \"fa011f19-1245-4ae8-ae2b-8773036498b3\") " pod="openstack-kuttl-tests/neutron-7fdccbb69d-fng8p" Jan 20 17:02:38 crc kubenswrapper[4558]: I0120 17:02:38.113879 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fa011f19-1245-4ae8-ae2b-8773036498b3-combined-ca-bundle\") pod \"neutron-7fdccbb69d-fng8p\" (UID: \"fa011f19-1245-4ae8-ae2b-8773036498b3\") " pod="openstack-kuttl-tests/neutron-7fdccbb69d-fng8p" Jan 20 17:02:38 crc kubenswrapper[4558]: I0120 17:02:38.113904 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/fa011f19-1245-4ae8-ae2b-8773036498b3-config\") pod \"neutron-7fdccbb69d-fng8p\" (UID: \"fa011f19-1245-4ae8-ae2b-8773036498b3\") " pod="openstack-kuttl-tests/neutron-7fdccbb69d-fng8p" Jan 20 17:02:38 crc kubenswrapper[4558]: I0120 17:02:38.113924 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/fa011f19-1245-4ae8-ae2b-8773036498b3-internal-tls-certs\") pod \"neutron-7fdccbb69d-fng8p\" (UID: \"fa011f19-1245-4ae8-ae2b-8773036498b3\") " pod="openstack-kuttl-tests/neutron-7fdccbb69d-fng8p" Jan 20 17:02:38 crc kubenswrapper[4558]: I0120 17:02:38.114001 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/fa011f19-1245-4ae8-ae2b-8773036498b3-public-tls-certs\") pod \"neutron-7fdccbb69d-fng8p\" (UID: \"fa011f19-1245-4ae8-ae2b-8773036498b3\") " pod="openstack-kuttl-tests/neutron-7fdccbb69d-fng8p" Jan 20 17:02:38 crc kubenswrapper[4558]: I0120 17:02:38.216224 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/fa011f19-1245-4ae8-ae2b-8773036498b3-public-tls-certs\") pod \"neutron-7fdccbb69d-fng8p\" (UID: \"fa011f19-1245-4ae8-ae2b-8773036498b3\") " pod="openstack-kuttl-tests/neutron-7fdccbb69d-fng8p" Jan 20 17:02:38 crc kubenswrapper[4558]: I0120 17:02:38.216334 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/fa011f19-1245-4ae8-ae2b-8773036498b3-ovndb-tls-certs\") pod \"neutron-7fdccbb69d-fng8p\" (UID: \"fa011f19-1245-4ae8-ae2b-8773036498b3\") " pod="openstack-kuttl-tests/neutron-7fdccbb69d-fng8p" Jan 20 17:02:38 crc kubenswrapper[4558]: I0120 17:02:38.216367 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jfjmw\" (UniqueName: \"kubernetes.io/projected/fa011f19-1245-4ae8-ae2b-8773036498b3-kube-api-access-jfjmw\") pod \"neutron-7fdccbb69d-fng8p\" (UID: \"fa011f19-1245-4ae8-ae2b-8773036498b3\") " pod="openstack-kuttl-tests/neutron-7fdccbb69d-fng8p" Jan 20 17:02:38 crc kubenswrapper[4558]: I0120 17:02:38.216388 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/fa011f19-1245-4ae8-ae2b-8773036498b3-httpd-config\") pod \"neutron-7fdccbb69d-fng8p\" (UID: \"fa011f19-1245-4ae8-ae2b-8773036498b3\") " pod="openstack-kuttl-tests/neutron-7fdccbb69d-fng8p" Jan 20 17:02:38 crc kubenswrapper[4558]: I0120 17:02:38.216468 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fa011f19-1245-4ae8-ae2b-8773036498b3-combined-ca-bundle\") pod \"neutron-7fdccbb69d-fng8p\" (UID: \"fa011f19-1245-4ae8-ae2b-8773036498b3\") " pod="openstack-kuttl-tests/neutron-7fdccbb69d-fng8p" Jan 20 17:02:38 crc kubenswrapper[4558]: I0120 17:02:38.216506 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/fa011f19-1245-4ae8-ae2b-8773036498b3-config\") pod \"neutron-7fdccbb69d-fng8p\" (UID: \"fa011f19-1245-4ae8-ae2b-8773036498b3\") " pod="openstack-kuttl-tests/neutron-7fdccbb69d-fng8p" Jan 20 17:02:38 crc kubenswrapper[4558]: I0120 17:02:38.216540 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/fa011f19-1245-4ae8-ae2b-8773036498b3-internal-tls-certs\") pod \"neutron-7fdccbb69d-fng8p\" (UID: \"fa011f19-1245-4ae8-ae2b-8773036498b3\") " pod="openstack-kuttl-tests/neutron-7fdccbb69d-fng8p" Jan 20 17:02:38 crc kubenswrapper[4558]: I0120 17:02:38.223498 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/fa011f19-1245-4ae8-ae2b-8773036498b3-internal-tls-certs\") pod \"neutron-7fdccbb69d-fng8p\" (UID: \"fa011f19-1245-4ae8-ae2b-8773036498b3\") " pod="openstack-kuttl-tests/neutron-7fdccbb69d-fng8p" Jan 20 17:02:38 crc kubenswrapper[4558]: I0120 17:02:38.225714 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/fa011f19-1245-4ae8-ae2b-8773036498b3-public-tls-certs\") pod \"neutron-7fdccbb69d-fng8p\" (UID: \"fa011f19-1245-4ae8-ae2b-8773036498b3\") " pod="openstack-kuttl-tests/neutron-7fdccbb69d-fng8p" Jan 20 17:02:38 crc kubenswrapper[4558]: I0120 17:02:38.225834 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/fa011f19-1245-4ae8-ae2b-8773036498b3-httpd-config\") pod \"neutron-7fdccbb69d-fng8p\" (UID: \"fa011f19-1245-4ae8-ae2b-8773036498b3\") " pod="openstack-kuttl-tests/neutron-7fdccbb69d-fng8p" Jan 20 17:02:38 crc kubenswrapper[4558]: I0120 17:02:38.229777 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fa011f19-1245-4ae8-ae2b-8773036498b3-combined-ca-bundle\") pod \"neutron-7fdccbb69d-fng8p\" (UID: \"fa011f19-1245-4ae8-ae2b-8773036498b3\") " pod="openstack-kuttl-tests/neutron-7fdccbb69d-fng8p" Jan 20 17:02:38 crc kubenswrapper[4558]: I0120 17:02:38.229934 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/fa011f19-1245-4ae8-ae2b-8773036498b3-config\") pod \"neutron-7fdccbb69d-fng8p\" (UID: \"fa011f19-1245-4ae8-ae2b-8773036498b3\") " pod="openstack-kuttl-tests/neutron-7fdccbb69d-fng8p" Jan 20 17:02:38 crc kubenswrapper[4558]: I0120 17:02:38.232028 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/fa011f19-1245-4ae8-ae2b-8773036498b3-ovndb-tls-certs\") pod \"neutron-7fdccbb69d-fng8p\" (UID: \"fa011f19-1245-4ae8-ae2b-8773036498b3\") " pod="openstack-kuttl-tests/neutron-7fdccbb69d-fng8p" Jan 20 17:02:38 crc kubenswrapper[4558]: I0120 17:02:38.236140 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jfjmw\" (UniqueName: \"kubernetes.io/projected/fa011f19-1245-4ae8-ae2b-8773036498b3-kube-api-access-jfjmw\") pod \"neutron-7fdccbb69d-fng8p\" (UID: \"fa011f19-1245-4ae8-ae2b-8773036498b3\") " pod="openstack-kuttl-tests/neutron-7fdccbb69d-fng8p" Jan 20 17:02:38 crc kubenswrapper[4558]: I0120 17:02:38.386227 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-7fdccbb69d-fng8p" Jan 20 17:02:38 crc kubenswrapper[4558]: I0120 17:02:38.466024 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"2ba42f6a-1ade-45ae-bdc4-117ad2fa866b","Type":"ContainerStarted","Data":"e91b0886ad44950f7bbfb790fe06f0f90a6fc3aaa9d7fe9e510d991406a8e51c"} Jan 20 17:02:38 crc kubenswrapper[4558]: I0120 17:02:38.466370 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"2ba42f6a-1ade-45ae-bdc4-117ad2fa866b","Type":"ContainerStarted","Data":"a818a123ac4745b3c52a6e67fb8633fbe730b34a827cef46353c35fb329810d7"} Jan 20 17:02:38 crc kubenswrapper[4558]: I0120 17:02:38.485895 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/cinder-api-0" podStartSLOduration=2.485876051 podStartE2EDuration="2.485876051s" podCreationTimestamp="2026-01-20 17:02:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:02:38.478589347 +0000 UTC m=+1252.238927313" watchObservedRunningTime="2026-01-20 17:02:38.485876051 +0000 UTC m=+1252.246214017" Jan 20 17:02:38 crc kubenswrapper[4558]: I0120 17:02:38.512095 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:02:38 crc kubenswrapper[4558]: I0120 17:02:38.512232 4558 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 20 17:02:38 crc kubenswrapper[4558]: I0120 17:02:38.521865 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:02:38 crc kubenswrapper[4558]: I0120 17:02:38.893581 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-7fdccbb69d-fng8p"] Jan 20 17:02:38 crc kubenswrapper[4558]: I0120 17:02:38.930507 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/barbican-api-85c96db9fd-dcjxv" Jan 20 17:02:39 crc kubenswrapper[4558]: I0120 17:02:39.046866 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/barbican-api-85c96db9fd-dcjxv" Jan 20 17:02:39 crc kubenswrapper[4558]: I0120 17:02:39.391005 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:02:39 crc kubenswrapper[4558]: I0120 17:02:39.496262 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-7fdccbb69d-fng8p" event={"ID":"fa011f19-1245-4ae8-ae2b-8773036498b3","Type":"ContainerStarted","Data":"edc8f84d18297fb81c87a4195f78806a1f8712a6680f4253cb7972ff04e84213"} Jan 20 17:02:39 crc kubenswrapper[4558]: I0120 17:02:39.496967 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:02:39 crc kubenswrapper[4558]: I0120 17:02:39.496983 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-7fdccbb69d-fng8p" event={"ID":"fa011f19-1245-4ae8-ae2b-8773036498b3","Type":"ContainerStarted","Data":"3911201e3273f3fa00ea218fb0ebd9dc9eae236976ff9415310c50b8be0289ef"} Jan 20 17:02:39 crc kubenswrapper[4558]: I0120 17:02:39.496270 4558 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 20 17:02:39 crc kubenswrapper[4558]: I0120 17:02:39.567405 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:02:39 crc kubenswrapper[4558]: I0120 17:02:39.599080 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:02:39 crc kubenswrapper[4558]: I0120 17:02:39.646693 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:02:40 crc kubenswrapper[4558]: I0120 17:02:40.512844 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-7fdccbb69d-fng8p" event={"ID":"fa011f19-1245-4ae8-ae2b-8773036498b3","Type":"ContainerStarted","Data":"a508351390349a4d085a4b101af8917b6baf005aac6d69c3b56e8bb73832583f"} Jan 20 17:02:40 crc kubenswrapper[4558]: I0120 17:02:40.513916 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/cinder-scheduler-0" podUID="694530bc-dda1-4895-be8d-90c2f02af6cb" containerName="cinder-scheduler" containerID="cri-o://bef8c06774ffa6560d532facfe7b66342e3ce0fb39349186cc64614d58fb666f" gracePeriod=30 Jan 20 17:02:40 crc kubenswrapper[4558]: I0120 17:02:40.513973 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/cinder-scheduler-0" podUID="694530bc-dda1-4895-be8d-90c2f02af6cb" containerName="probe" containerID="cri-o://b84bd6b58ffa917ac3faa7295198afe244eeb4419913377dd962ffcef7a733d2" gracePeriod=30 Jan 20 17:02:40 crc kubenswrapper[4558]: I0120 17:02:40.535226 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/neutron-7fdccbb69d-fng8p" podStartSLOduration=2.535210504 podStartE2EDuration="2.535210504s" podCreationTimestamp="2026-01-20 17:02:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:02:40.528555298 +0000 UTC m=+1254.288893265" watchObservedRunningTime="2026-01-20 17:02:40.535210504 +0000 UTC m=+1254.295548471" Jan 20 17:02:41 crc kubenswrapper[4558]: I0120 17:02:41.186877 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/barbican-api-869df765b4-9hk5q"] Jan 20 17:02:41 crc kubenswrapper[4558]: I0120 17:02:41.188316 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-869df765b4-9hk5q" Jan 20 17:02:41 crc kubenswrapper[4558]: I0120 17:02:41.189890 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-barbican-internal-svc" Jan 20 17:02:41 crc kubenswrapper[4558]: I0120 17:02:41.190020 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-barbican-public-svc" Jan 20 17:02:41 crc kubenswrapper[4558]: I0120 17:02:41.200684 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-api-869df765b4-9hk5q"] Jan 20 17:02:41 crc kubenswrapper[4558]: I0120 17:02:41.289882 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-86cwc\" (UniqueName: \"kubernetes.io/projected/6c75ee5c-beea-4b1e-b429-91e83a472529-kube-api-access-86cwc\") pod \"barbican-api-869df765b4-9hk5q\" (UID: \"6c75ee5c-beea-4b1e-b429-91e83a472529\") " pod="openstack-kuttl-tests/barbican-api-869df765b4-9hk5q" Jan 20 17:02:41 crc kubenswrapper[4558]: I0120 17:02:41.290045 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6c75ee5c-beea-4b1e-b429-91e83a472529-public-tls-certs\") pod \"barbican-api-869df765b4-9hk5q\" (UID: \"6c75ee5c-beea-4b1e-b429-91e83a472529\") " pod="openstack-kuttl-tests/barbican-api-869df765b4-9hk5q" Jan 20 17:02:41 crc kubenswrapper[4558]: I0120 17:02:41.290261 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6c75ee5c-beea-4b1e-b429-91e83a472529-internal-tls-certs\") pod \"barbican-api-869df765b4-9hk5q\" (UID: \"6c75ee5c-beea-4b1e-b429-91e83a472529\") " pod="openstack-kuttl-tests/barbican-api-869df765b4-9hk5q" Jan 20 17:02:41 crc kubenswrapper[4558]: I0120 17:02:41.290303 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6c75ee5c-beea-4b1e-b429-91e83a472529-logs\") pod \"barbican-api-869df765b4-9hk5q\" (UID: \"6c75ee5c-beea-4b1e-b429-91e83a472529\") " pod="openstack-kuttl-tests/barbican-api-869df765b4-9hk5q" Jan 20 17:02:41 crc kubenswrapper[4558]: I0120 17:02:41.290335 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6c75ee5c-beea-4b1e-b429-91e83a472529-combined-ca-bundle\") pod \"barbican-api-869df765b4-9hk5q\" (UID: \"6c75ee5c-beea-4b1e-b429-91e83a472529\") " pod="openstack-kuttl-tests/barbican-api-869df765b4-9hk5q" Jan 20 17:02:41 crc kubenswrapper[4558]: I0120 17:02:41.290360 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6c75ee5c-beea-4b1e-b429-91e83a472529-config-data-custom\") pod \"barbican-api-869df765b4-9hk5q\" (UID: \"6c75ee5c-beea-4b1e-b429-91e83a472529\") " pod="openstack-kuttl-tests/barbican-api-869df765b4-9hk5q" Jan 20 17:02:41 crc kubenswrapper[4558]: I0120 17:02:41.290451 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6c75ee5c-beea-4b1e-b429-91e83a472529-config-data\") pod \"barbican-api-869df765b4-9hk5q\" (UID: \"6c75ee5c-beea-4b1e-b429-91e83a472529\") " pod="openstack-kuttl-tests/barbican-api-869df765b4-9hk5q" Jan 20 17:02:41 crc kubenswrapper[4558]: I0120 17:02:41.395564 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6c75ee5c-beea-4b1e-b429-91e83a472529-internal-tls-certs\") pod \"barbican-api-869df765b4-9hk5q\" (UID: \"6c75ee5c-beea-4b1e-b429-91e83a472529\") " pod="openstack-kuttl-tests/barbican-api-869df765b4-9hk5q" Jan 20 17:02:41 crc kubenswrapper[4558]: I0120 17:02:41.395606 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6c75ee5c-beea-4b1e-b429-91e83a472529-logs\") pod \"barbican-api-869df765b4-9hk5q\" (UID: \"6c75ee5c-beea-4b1e-b429-91e83a472529\") " pod="openstack-kuttl-tests/barbican-api-869df765b4-9hk5q" Jan 20 17:02:41 crc kubenswrapper[4558]: I0120 17:02:41.395657 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6c75ee5c-beea-4b1e-b429-91e83a472529-combined-ca-bundle\") pod \"barbican-api-869df765b4-9hk5q\" (UID: \"6c75ee5c-beea-4b1e-b429-91e83a472529\") " pod="openstack-kuttl-tests/barbican-api-869df765b4-9hk5q" Jan 20 17:02:41 crc kubenswrapper[4558]: I0120 17:02:41.395678 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6c75ee5c-beea-4b1e-b429-91e83a472529-config-data-custom\") pod \"barbican-api-869df765b4-9hk5q\" (UID: \"6c75ee5c-beea-4b1e-b429-91e83a472529\") " pod="openstack-kuttl-tests/barbican-api-869df765b4-9hk5q" Jan 20 17:02:41 crc kubenswrapper[4558]: I0120 17:02:41.396070 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6c75ee5c-beea-4b1e-b429-91e83a472529-logs\") pod \"barbican-api-869df765b4-9hk5q\" (UID: \"6c75ee5c-beea-4b1e-b429-91e83a472529\") " pod="openstack-kuttl-tests/barbican-api-869df765b4-9hk5q" Jan 20 17:02:41 crc kubenswrapper[4558]: I0120 17:02:41.396407 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6c75ee5c-beea-4b1e-b429-91e83a472529-config-data\") pod \"barbican-api-869df765b4-9hk5q\" (UID: \"6c75ee5c-beea-4b1e-b429-91e83a472529\") " pod="openstack-kuttl-tests/barbican-api-869df765b4-9hk5q" Jan 20 17:02:41 crc kubenswrapper[4558]: I0120 17:02:41.396939 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-86cwc\" (UniqueName: \"kubernetes.io/projected/6c75ee5c-beea-4b1e-b429-91e83a472529-kube-api-access-86cwc\") pod \"barbican-api-869df765b4-9hk5q\" (UID: \"6c75ee5c-beea-4b1e-b429-91e83a472529\") " pod="openstack-kuttl-tests/barbican-api-869df765b4-9hk5q" Jan 20 17:02:41 crc kubenswrapper[4558]: I0120 17:02:41.397139 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6c75ee5c-beea-4b1e-b429-91e83a472529-public-tls-certs\") pod \"barbican-api-869df765b4-9hk5q\" (UID: \"6c75ee5c-beea-4b1e-b429-91e83a472529\") " pod="openstack-kuttl-tests/barbican-api-869df765b4-9hk5q" Jan 20 17:02:41 crc kubenswrapper[4558]: I0120 17:02:41.401717 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6c75ee5c-beea-4b1e-b429-91e83a472529-public-tls-certs\") pod \"barbican-api-869df765b4-9hk5q\" (UID: \"6c75ee5c-beea-4b1e-b429-91e83a472529\") " pod="openstack-kuttl-tests/barbican-api-869df765b4-9hk5q" Jan 20 17:02:41 crc kubenswrapper[4558]: I0120 17:02:41.401783 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6c75ee5c-beea-4b1e-b429-91e83a472529-config-data-custom\") pod \"barbican-api-869df765b4-9hk5q\" (UID: \"6c75ee5c-beea-4b1e-b429-91e83a472529\") " pod="openstack-kuttl-tests/barbican-api-869df765b4-9hk5q" Jan 20 17:02:41 crc kubenswrapper[4558]: I0120 17:02:41.402716 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6c75ee5c-beea-4b1e-b429-91e83a472529-internal-tls-certs\") pod \"barbican-api-869df765b4-9hk5q\" (UID: \"6c75ee5c-beea-4b1e-b429-91e83a472529\") " pod="openstack-kuttl-tests/barbican-api-869df765b4-9hk5q" Jan 20 17:02:41 crc kubenswrapper[4558]: I0120 17:02:41.407672 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6c75ee5c-beea-4b1e-b429-91e83a472529-config-data\") pod \"barbican-api-869df765b4-9hk5q\" (UID: \"6c75ee5c-beea-4b1e-b429-91e83a472529\") " pod="openstack-kuttl-tests/barbican-api-869df765b4-9hk5q" Jan 20 17:02:41 crc kubenswrapper[4558]: I0120 17:02:41.410674 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6c75ee5c-beea-4b1e-b429-91e83a472529-combined-ca-bundle\") pod \"barbican-api-869df765b4-9hk5q\" (UID: \"6c75ee5c-beea-4b1e-b429-91e83a472529\") " pod="openstack-kuttl-tests/barbican-api-869df765b4-9hk5q" Jan 20 17:02:41 crc kubenswrapper[4558]: I0120 17:02:41.418584 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-86cwc\" (UniqueName: \"kubernetes.io/projected/6c75ee5c-beea-4b1e-b429-91e83a472529-kube-api-access-86cwc\") pod \"barbican-api-869df765b4-9hk5q\" (UID: \"6c75ee5c-beea-4b1e-b429-91e83a472529\") " pod="openstack-kuttl-tests/barbican-api-869df765b4-9hk5q" Jan 20 17:02:41 crc kubenswrapper[4558]: I0120 17:02:41.520923 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:02:41 crc kubenswrapper[4558]: I0120 17:02:41.522318 4558 generic.go:334] "Generic (PLEG): container finished" podID="694530bc-dda1-4895-be8d-90c2f02af6cb" containerID="b84bd6b58ffa917ac3faa7295198afe244eeb4419913377dd962ffcef7a733d2" exitCode=0 Jan 20 17:02:41 crc kubenswrapper[4558]: I0120 17:02:41.522367 4558 generic.go:334] "Generic (PLEG): container finished" podID="694530bc-dda1-4895-be8d-90c2f02af6cb" containerID="bef8c06774ffa6560d532facfe7b66342e3ce0fb39349186cc64614d58fb666f" exitCode=0 Jan 20 17:02:41 crc kubenswrapper[4558]: I0120 17:02:41.522413 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"694530bc-dda1-4895-be8d-90c2f02af6cb","Type":"ContainerDied","Data":"b84bd6b58ffa917ac3faa7295198afe244eeb4419913377dd962ffcef7a733d2"} Jan 20 17:02:41 crc kubenswrapper[4558]: I0120 17:02:41.522446 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"694530bc-dda1-4895-be8d-90c2f02af6cb","Type":"ContainerDied","Data":"bef8c06774ffa6560d532facfe7b66342e3ce0fb39349186cc64614d58fb666f"} Jan 20 17:02:41 crc kubenswrapper[4558]: I0120 17:02:41.522482 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"694530bc-dda1-4895-be8d-90c2f02af6cb","Type":"ContainerDied","Data":"6227332080e50e94d7418909eec1841d572760ab845e16704bcfe9ff6c0e352c"} Jan 20 17:02:41 crc kubenswrapper[4558]: I0120 17:02:41.522503 4558 scope.go:117] "RemoveContainer" containerID="b84bd6b58ffa917ac3faa7295198afe244eeb4419913377dd962ffcef7a733d2" Jan 20 17:02:41 crc kubenswrapper[4558]: I0120 17:02:41.523318 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/neutron-7fdccbb69d-fng8p" Jan 20 17:02:41 crc kubenswrapper[4558]: I0120 17:02:41.554941 4558 scope.go:117] "RemoveContainer" containerID="bef8c06774ffa6560d532facfe7b66342e3ce0fb39349186cc64614d58fb666f" Jan 20 17:02:41 crc kubenswrapper[4558]: I0120 17:02:41.558183 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-869df765b4-9hk5q" Jan 20 17:02:41 crc kubenswrapper[4558]: I0120 17:02:41.575192 4558 scope.go:117] "RemoveContainer" containerID="b84bd6b58ffa917ac3faa7295198afe244eeb4419913377dd962ffcef7a733d2" Jan 20 17:02:41 crc kubenswrapper[4558]: E0120 17:02:41.575643 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b84bd6b58ffa917ac3faa7295198afe244eeb4419913377dd962ffcef7a733d2\": container with ID starting with b84bd6b58ffa917ac3faa7295198afe244eeb4419913377dd962ffcef7a733d2 not found: ID does not exist" containerID="b84bd6b58ffa917ac3faa7295198afe244eeb4419913377dd962ffcef7a733d2" Jan 20 17:02:41 crc kubenswrapper[4558]: I0120 17:02:41.575684 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b84bd6b58ffa917ac3faa7295198afe244eeb4419913377dd962ffcef7a733d2"} err="failed to get container status \"b84bd6b58ffa917ac3faa7295198afe244eeb4419913377dd962ffcef7a733d2\": rpc error: code = NotFound desc = could not find container \"b84bd6b58ffa917ac3faa7295198afe244eeb4419913377dd962ffcef7a733d2\": container with ID starting with b84bd6b58ffa917ac3faa7295198afe244eeb4419913377dd962ffcef7a733d2 not found: ID does not exist" Jan 20 17:02:41 crc kubenswrapper[4558]: I0120 17:02:41.575713 4558 scope.go:117] "RemoveContainer" containerID="bef8c06774ffa6560d532facfe7b66342e3ce0fb39349186cc64614d58fb666f" Jan 20 17:02:41 crc kubenswrapper[4558]: E0120 17:02:41.575997 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bef8c06774ffa6560d532facfe7b66342e3ce0fb39349186cc64614d58fb666f\": container with ID starting with bef8c06774ffa6560d532facfe7b66342e3ce0fb39349186cc64614d58fb666f not found: ID does not exist" containerID="bef8c06774ffa6560d532facfe7b66342e3ce0fb39349186cc64614d58fb666f" Jan 20 17:02:41 crc kubenswrapper[4558]: I0120 17:02:41.576031 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bef8c06774ffa6560d532facfe7b66342e3ce0fb39349186cc64614d58fb666f"} err="failed to get container status \"bef8c06774ffa6560d532facfe7b66342e3ce0fb39349186cc64614d58fb666f\": rpc error: code = NotFound desc = could not find container \"bef8c06774ffa6560d532facfe7b66342e3ce0fb39349186cc64614d58fb666f\": container with ID starting with bef8c06774ffa6560d532facfe7b66342e3ce0fb39349186cc64614d58fb666f not found: ID does not exist" Jan 20 17:02:41 crc kubenswrapper[4558]: I0120 17:02:41.576057 4558 scope.go:117] "RemoveContainer" containerID="b84bd6b58ffa917ac3faa7295198afe244eeb4419913377dd962ffcef7a733d2" Jan 20 17:02:41 crc kubenswrapper[4558]: I0120 17:02:41.576625 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b84bd6b58ffa917ac3faa7295198afe244eeb4419913377dd962ffcef7a733d2"} err="failed to get container status \"b84bd6b58ffa917ac3faa7295198afe244eeb4419913377dd962ffcef7a733d2\": rpc error: code = NotFound desc = could not find container \"b84bd6b58ffa917ac3faa7295198afe244eeb4419913377dd962ffcef7a733d2\": container with ID starting with b84bd6b58ffa917ac3faa7295198afe244eeb4419913377dd962ffcef7a733d2 not found: ID does not exist" Jan 20 17:02:41 crc kubenswrapper[4558]: I0120 17:02:41.576649 4558 scope.go:117] "RemoveContainer" containerID="bef8c06774ffa6560d532facfe7b66342e3ce0fb39349186cc64614d58fb666f" Jan 20 17:02:41 crc kubenswrapper[4558]: I0120 17:02:41.576864 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bef8c06774ffa6560d532facfe7b66342e3ce0fb39349186cc64614d58fb666f"} err="failed to get container status \"bef8c06774ffa6560d532facfe7b66342e3ce0fb39349186cc64614d58fb666f\": rpc error: code = NotFound desc = could not find container \"bef8c06774ffa6560d532facfe7b66342e3ce0fb39349186cc64614d58fb666f\": container with ID starting with bef8c06774ffa6560d532facfe7b66342e3ce0fb39349186cc64614d58fb666f not found: ID does not exist" Jan 20 17:02:41 crc kubenswrapper[4558]: I0120 17:02:41.600009 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/694530bc-dda1-4895-be8d-90c2f02af6cb-config-data\") pod \"694530bc-dda1-4895-be8d-90c2f02af6cb\" (UID: \"694530bc-dda1-4895-be8d-90c2f02af6cb\") " Jan 20 17:02:41 crc kubenswrapper[4558]: I0120 17:02:41.600088 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gsqqb\" (UniqueName: \"kubernetes.io/projected/694530bc-dda1-4895-be8d-90c2f02af6cb-kube-api-access-gsqqb\") pod \"694530bc-dda1-4895-be8d-90c2f02af6cb\" (UID: \"694530bc-dda1-4895-be8d-90c2f02af6cb\") " Jan 20 17:02:41 crc kubenswrapper[4558]: I0120 17:02:41.600142 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/694530bc-dda1-4895-be8d-90c2f02af6cb-scripts\") pod \"694530bc-dda1-4895-be8d-90c2f02af6cb\" (UID: \"694530bc-dda1-4895-be8d-90c2f02af6cb\") " Jan 20 17:02:41 crc kubenswrapper[4558]: I0120 17:02:41.600300 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/694530bc-dda1-4895-be8d-90c2f02af6cb-config-data-custom\") pod \"694530bc-dda1-4895-be8d-90c2f02af6cb\" (UID: \"694530bc-dda1-4895-be8d-90c2f02af6cb\") " Jan 20 17:02:41 crc kubenswrapper[4558]: I0120 17:02:41.600344 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/694530bc-dda1-4895-be8d-90c2f02af6cb-etc-machine-id\") pod \"694530bc-dda1-4895-be8d-90c2f02af6cb\" (UID: \"694530bc-dda1-4895-be8d-90c2f02af6cb\") " Jan 20 17:02:41 crc kubenswrapper[4558]: I0120 17:02:41.600380 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/694530bc-dda1-4895-be8d-90c2f02af6cb-combined-ca-bundle\") pod \"694530bc-dda1-4895-be8d-90c2f02af6cb\" (UID: \"694530bc-dda1-4895-be8d-90c2f02af6cb\") " Jan 20 17:02:41 crc kubenswrapper[4558]: I0120 17:02:41.606751 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/694530bc-dda1-4895-be8d-90c2f02af6cb-kube-api-access-gsqqb" (OuterVolumeSpecName: "kube-api-access-gsqqb") pod "694530bc-dda1-4895-be8d-90c2f02af6cb" (UID: "694530bc-dda1-4895-be8d-90c2f02af6cb"). InnerVolumeSpecName "kube-api-access-gsqqb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:02:41 crc kubenswrapper[4558]: I0120 17:02:41.606896 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/694530bc-dda1-4895-be8d-90c2f02af6cb-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "694530bc-dda1-4895-be8d-90c2f02af6cb" (UID: "694530bc-dda1-4895-be8d-90c2f02af6cb"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 17:02:41 crc kubenswrapper[4558]: I0120 17:02:41.609003 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/694530bc-dda1-4895-be8d-90c2f02af6cb-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "694530bc-dda1-4895-be8d-90c2f02af6cb" (UID: "694530bc-dda1-4895-be8d-90c2f02af6cb"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:02:41 crc kubenswrapper[4558]: I0120 17:02:41.609939 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/694530bc-dda1-4895-be8d-90c2f02af6cb-scripts" (OuterVolumeSpecName: "scripts") pod "694530bc-dda1-4895-be8d-90c2f02af6cb" (UID: "694530bc-dda1-4895-be8d-90c2f02af6cb"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:02:41 crc kubenswrapper[4558]: I0120 17:02:41.647112 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/694530bc-dda1-4895-be8d-90c2f02af6cb-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "694530bc-dda1-4895-be8d-90c2f02af6cb" (UID: "694530bc-dda1-4895-be8d-90c2f02af6cb"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:02:41 crc kubenswrapper[4558]: I0120 17:02:41.702537 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gsqqb\" (UniqueName: \"kubernetes.io/projected/694530bc-dda1-4895-be8d-90c2f02af6cb-kube-api-access-gsqqb\") on node \"crc\" DevicePath \"\"" Jan 20 17:02:41 crc kubenswrapper[4558]: I0120 17:02:41.702572 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/694530bc-dda1-4895-be8d-90c2f02af6cb-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:02:41 crc kubenswrapper[4558]: I0120 17:02:41.702584 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/694530bc-dda1-4895-be8d-90c2f02af6cb-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:02:41 crc kubenswrapper[4558]: I0120 17:02:41.702597 4558 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/694530bc-dda1-4895-be8d-90c2f02af6cb-etc-machine-id\") on node \"crc\" DevicePath \"\"" Jan 20 17:02:41 crc kubenswrapper[4558]: I0120 17:02:41.702607 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/694530bc-dda1-4895-be8d-90c2f02af6cb-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:02:41 crc kubenswrapper[4558]: I0120 17:02:41.705270 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/694530bc-dda1-4895-be8d-90c2f02af6cb-config-data" (OuterVolumeSpecName: "config-data") pod "694530bc-dda1-4895-be8d-90c2f02af6cb" (UID: "694530bc-dda1-4895-be8d-90c2f02af6cb"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:02:41 crc kubenswrapper[4558]: I0120 17:02:41.810155 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/694530bc-dda1-4895-be8d-90c2f02af6cb-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:02:41 crc kubenswrapper[4558]: I0120 17:02:41.995742 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-api-869df765b4-9hk5q"] Jan 20 17:02:41 crc kubenswrapper[4558]: W0120 17:02:41.999005 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6c75ee5c_beea_4b1e_b429_91e83a472529.slice/crio-cda8462cafc6b4135f29e794c8f02dc33b9c301c7a707bb88a79a986d70f78e6 WatchSource:0}: Error finding container cda8462cafc6b4135f29e794c8f02dc33b9c301c7a707bb88a79a986d70f78e6: Status 404 returned error can't find the container with id cda8462cafc6b4135f29e794c8f02dc33b9c301c7a707bb88a79a986d70f78e6 Jan 20 17:02:42 crc kubenswrapper[4558]: I0120 17:02:42.563150 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:02:42 crc kubenswrapper[4558]: I0120 17:02:42.575672 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/barbican-api-869df765b4-9hk5q" Jan 20 17:02:42 crc kubenswrapper[4558]: I0120 17:02:42.575708 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-869df765b4-9hk5q" event={"ID":"6c75ee5c-beea-4b1e-b429-91e83a472529","Type":"ContainerStarted","Data":"be2a0248e5982920b07bb828a104cefcb46f9233f457cf95302aceef0c8d8b6f"} Jan 20 17:02:42 crc kubenswrapper[4558]: I0120 17:02:42.575730 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/barbican-api-869df765b4-9hk5q" Jan 20 17:02:42 crc kubenswrapper[4558]: I0120 17:02:42.575741 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-869df765b4-9hk5q" event={"ID":"6c75ee5c-beea-4b1e-b429-91e83a472529","Type":"ContainerStarted","Data":"aa3653d2225383bf934da0f655ed12217fcafbbb93b425d7a7cf38609e8256e9"} Jan 20 17:02:42 crc kubenswrapper[4558]: I0120 17:02:42.575753 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-869df765b4-9hk5q" event={"ID":"6c75ee5c-beea-4b1e-b429-91e83a472529","Type":"ContainerStarted","Data":"cda8462cafc6b4135f29e794c8f02dc33b9c301c7a707bb88a79a986d70f78e6"} Jan 20 17:02:42 crc kubenswrapper[4558]: I0120 17:02:42.633060 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/barbican-api-869df765b4-9hk5q" podStartSLOduration=1.633038486 podStartE2EDuration="1.633038486s" podCreationTimestamp="2026-01-20 17:02:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:02:42.596579851 +0000 UTC m=+1256.356917818" watchObservedRunningTime="2026-01-20 17:02:42.633038486 +0000 UTC m=+1256.393376454" Jan 20 17:02:42 crc kubenswrapper[4558]: I0120 17:02:42.636018 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:02:42 crc kubenswrapper[4558]: I0120 17:02:42.649718 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:02:42 crc kubenswrapper[4558]: I0120 17:02:42.662411 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:02:42 crc kubenswrapper[4558]: E0120 17:02:42.662971 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="694530bc-dda1-4895-be8d-90c2f02af6cb" containerName="cinder-scheduler" Jan 20 17:02:42 crc kubenswrapper[4558]: I0120 17:02:42.662993 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="694530bc-dda1-4895-be8d-90c2f02af6cb" containerName="cinder-scheduler" Jan 20 17:02:42 crc kubenswrapper[4558]: E0120 17:02:42.663020 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="694530bc-dda1-4895-be8d-90c2f02af6cb" containerName="probe" Jan 20 17:02:42 crc kubenswrapper[4558]: I0120 17:02:42.663027 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="694530bc-dda1-4895-be8d-90c2f02af6cb" containerName="probe" Jan 20 17:02:42 crc kubenswrapper[4558]: I0120 17:02:42.663316 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="694530bc-dda1-4895-be8d-90c2f02af6cb" containerName="cinder-scheduler" Jan 20 17:02:42 crc kubenswrapper[4558]: I0120 17:02:42.663372 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="694530bc-dda1-4895-be8d-90c2f02af6cb" containerName="probe" Jan 20 17:02:42 crc kubenswrapper[4558]: I0120 17:02:42.664610 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:02:42 crc kubenswrapper[4558]: I0120 17:02:42.666896 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cinder-scheduler-config-data" Jan 20 17:02:42 crc kubenswrapper[4558]: I0120 17:02:42.683460 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:02:42 crc kubenswrapper[4558]: I0120 17:02:42.728088 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1f9ceb49-977a-47b3-a1f3-10d68c96ab0f-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"1f9ceb49-977a-47b3-a1f3-10d68c96ab0f\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:02:42 crc kubenswrapper[4558]: I0120 17:02:42.728158 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hh87b\" (UniqueName: \"kubernetes.io/projected/1f9ceb49-977a-47b3-a1f3-10d68c96ab0f-kube-api-access-hh87b\") pod \"cinder-scheduler-0\" (UID: \"1f9ceb49-977a-47b3-a1f3-10d68c96ab0f\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:02:42 crc kubenswrapper[4558]: I0120 17:02:42.728282 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/1f9ceb49-977a-47b3-a1f3-10d68c96ab0f-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"1f9ceb49-977a-47b3-a1f3-10d68c96ab0f\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:02:42 crc kubenswrapper[4558]: I0120 17:02:42.728338 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1f9ceb49-977a-47b3-a1f3-10d68c96ab0f-config-data\") pod \"cinder-scheduler-0\" (UID: \"1f9ceb49-977a-47b3-a1f3-10d68c96ab0f\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:02:42 crc kubenswrapper[4558]: I0120 17:02:42.728357 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1f9ceb49-977a-47b3-a1f3-10d68c96ab0f-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"1f9ceb49-977a-47b3-a1f3-10d68c96ab0f\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:02:42 crc kubenswrapper[4558]: I0120 17:02:42.728398 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1f9ceb49-977a-47b3-a1f3-10d68c96ab0f-scripts\") pod \"cinder-scheduler-0\" (UID: \"1f9ceb49-977a-47b3-a1f3-10d68c96ab0f\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:02:42 crc kubenswrapper[4558]: I0120 17:02:42.830971 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1f9ceb49-977a-47b3-a1f3-10d68c96ab0f-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"1f9ceb49-977a-47b3-a1f3-10d68c96ab0f\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:02:42 crc kubenswrapper[4558]: I0120 17:02:42.831102 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1f9ceb49-977a-47b3-a1f3-10d68c96ab0f-scripts\") pod \"cinder-scheduler-0\" (UID: \"1f9ceb49-977a-47b3-a1f3-10d68c96ab0f\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:02:42 crc kubenswrapper[4558]: I0120 17:02:42.831376 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1f9ceb49-977a-47b3-a1f3-10d68c96ab0f-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"1f9ceb49-977a-47b3-a1f3-10d68c96ab0f\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:02:42 crc kubenswrapper[4558]: I0120 17:02:42.831489 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hh87b\" (UniqueName: \"kubernetes.io/projected/1f9ceb49-977a-47b3-a1f3-10d68c96ab0f-kube-api-access-hh87b\") pod \"cinder-scheduler-0\" (UID: \"1f9ceb49-977a-47b3-a1f3-10d68c96ab0f\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:02:42 crc kubenswrapper[4558]: I0120 17:02:42.831598 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/1f9ceb49-977a-47b3-a1f3-10d68c96ab0f-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"1f9ceb49-977a-47b3-a1f3-10d68c96ab0f\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:02:42 crc kubenswrapper[4558]: I0120 17:02:42.831662 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1f9ceb49-977a-47b3-a1f3-10d68c96ab0f-config-data\") pod \"cinder-scheduler-0\" (UID: \"1f9ceb49-977a-47b3-a1f3-10d68c96ab0f\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:02:42 crc kubenswrapper[4558]: I0120 17:02:42.831856 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/1f9ceb49-977a-47b3-a1f3-10d68c96ab0f-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"1f9ceb49-977a-47b3-a1f3-10d68c96ab0f\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:02:42 crc kubenswrapper[4558]: I0120 17:02:42.836467 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1f9ceb49-977a-47b3-a1f3-10d68c96ab0f-config-data\") pod \"cinder-scheduler-0\" (UID: \"1f9ceb49-977a-47b3-a1f3-10d68c96ab0f\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:02:42 crc kubenswrapper[4558]: I0120 17:02:42.839931 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1f9ceb49-977a-47b3-a1f3-10d68c96ab0f-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"1f9ceb49-977a-47b3-a1f3-10d68c96ab0f\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:02:42 crc kubenswrapper[4558]: I0120 17:02:42.841527 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1f9ceb49-977a-47b3-a1f3-10d68c96ab0f-scripts\") pod \"cinder-scheduler-0\" (UID: \"1f9ceb49-977a-47b3-a1f3-10d68c96ab0f\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:02:42 crc kubenswrapper[4558]: I0120 17:02:42.842031 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1f9ceb49-977a-47b3-a1f3-10d68c96ab0f-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"1f9ceb49-977a-47b3-a1f3-10d68c96ab0f\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:02:42 crc kubenswrapper[4558]: I0120 17:02:42.854377 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hh87b\" (UniqueName: \"kubernetes.io/projected/1f9ceb49-977a-47b3-a1f3-10d68c96ab0f-kube-api-access-hh87b\") pod \"cinder-scheduler-0\" (UID: \"1f9ceb49-977a-47b3-a1f3-10d68c96ab0f\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:02:42 crc kubenswrapper[4558]: I0120 17:02:42.993182 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:02:43 crc kubenswrapper[4558]: I0120 17:02:43.392487 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:02:43 crc kubenswrapper[4558]: I0120 17:02:43.581804 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"1f9ceb49-977a-47b3-a1f3-10d68c96ab0f","Type":"ContainerStarted","Data":"6e9fb4ffddd82ffd7a6eb8aed3420075d665cc16aa8a41ff92116bdd4299c6a4"} Jan 20 17:02:44 crc kubenswrapper[4558]: I0120 17:02:44.575438 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="694530bc-dda1-4895-be8d-90c2f02af6cb" path="/var/lib/kubelet/pods/694530bc-dda1-4895-be8d-90c2f02af6cb/volumes" Jan 20 17:02:44 crc kubenswrapper[4558]: I0120 17:02:44.591896 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"1f9ceb49-977a-47b3-a1f3-10d68c96ab0f","Type":"ContainerStarted","Data":"26bbc4f2aaf805eb72b1daca0bda5b9b7dad475314c7f9cbfdfce43f6fb9bfbc"} Jan 20 17:02:44 crc kubenswrapper[4558]: I0120 17:02:44.591940 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"1f9ceb49-977a-47b3-a1f3-10d68c96ab0f","Type":"ContainerStarted","Data":"ea575d2847a6cae8660fd45625a0be98895c2c6f684014b389a8ca1cc29ab736"} Jan 20 17:02:44 crc kubenswrapper[4558]: I0120 17:02:44.608692 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/cinder-scheduler-0" podStartSLOduration=2.6086775859999998 podStartE2EDuration="2.608677586s" podCreationTimestamp="2026-01-20 17:02:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:02:44.604410241 +0000 UTC m=+1258.364748209" watchObservedRunningTime="2026-01-20 17:02:44.608677586 +0000 UTC m=+1258.369015553" Jan 20 17:02:47 crc kubenswrapper[4558]: I0120 17:02:47.993639 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:02:48 crc kubenswrapper[4558]: I0120 17:02:48.311018 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:02:52 crc kubenswrapper[4558]: I0120 17:02:52.656036 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/barbican-api-869df765b4-9hk5q" Jan 20 17:02:52 crc kubenswrapper[4558]: I0120 17:02:52.675630 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/barbican-api-869df765b4-9hk5q" Jan 20 17:02:52 crc kubenswrapper[4558]: I0120 17:02:52.725431 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-api-85c96db9fd-dcjxv"] Jan 20 17:02:52 crc kubenswrapper[4558]: I0120 17:02:52.725970 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-api-85c96db9fd-dcjxv" podUID="5ae48763-b66b-4038-855d-f700a2dbe040" containerName="barbican-api-log" containerID="cri-o://f5cc513f503187d3adffbb539aaa9f0f0f46917cc694f0a826736be7e143eb27" gracePeriod=30 Jan 20 17:02:52 crc kubenswrapper[4558]: I0120 17:02:52.726060 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-api-85c96db9fd-dcjxv" podUID="5ae48763-b66b-4038-855d-f700a2dbe040" containerName="barbican-api" containerID="cri-o://b6e7adc6c85b1c4072ddc0ea64fb22919e97a66275f2f29b604d26d6e0a3864f" gracePeriod=30 Jan 20 17:02:53 crc kubenswrapper[4558]: I0120 17:02:53.193236 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:02:53 crc kubenswrapper[4558]: I0120 17:02:53.662998 4558 generic.go:334] "Generic (PLEG): container finished" podID="5ae48763-b66b-4038-855d-f700a2dbe040" containerID="f5cc513f503187d3adffbb539aaa9f0f0f46917cc694f0a826736be7e143eb27" exitCode=143 Jan 20 17:02:53 crc kubenswrapper[4558]: I0120 17:02:53.663027 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-85c96db9fd-dcjxv" event={"ID":"5ae48763-b66b-4038-855d-f700a2dbe040","Type":"ContainerDied","Data":"f5cc513f503187d3adffbb539aaa9f0f0f46917cc694f0a826736be7e143eb27"} Jan 20 17:02:53 crc kubenswrapper[4558]: I0120 17:02:53.850865 4558 scope.go:117] "RemoveContainer" containerID="ff1ae6eb7658764a108f757416b422839e42b8f1bcf59e8c59c55baa302f8c10" Jan 20 17:02:53 crc kubenswrapper[4558]: I0120 17:02:53.885734 4558 scope.go:117] "RemoveContainer" containerID="4a2010948788fe3337e1ffdf2267cc309f34c059666b5da0de06d4433df1c518" Jan 20 17:02:53 crc kubenswrapper[4558]: I0120 17:02:53.915844 4558 scope.go:117] "RemoveContainer" containerID="9836d70012398b9d97dedfe1f355a7b969edd38aa09ecfb16ea977a111566eb7" Jan 20 17:02:53 crc kubenswrapper[4558]: I0120 17:02:53.943965 4558 scope.go:117] "RemoveContainer" containerID="e7af03c31a0a54d7310b8a8e36f1a031462a9a3a9c87a532af17774e273cf04c" Jan 20 17:02:53 crc kubenswrapper[4558]: I0120 17:02:53.986893 4558 scope.go:117] "RemoveContainer" containerID="81a298964f3ebaf7af8b941cde650e1463fc246ad00310e6cefc05c9e48309b1" Jan 20 17:02:54 crc kubenswrapper[4558]: I0120 17:02:54.006650 4558 scope.go:117] "RemoveContainer" containerID="36d3c626c341df4133e753cd213edab18c0255979df43a32b39a7004eacff55d" Jan 20 17:02:54 crc kubenswrapper[4558]: I0120 17:02:54.031562 4558 scope.go:117] "RemoveContainer" containerID="b196841960ac1c7f6a7c02f8f3bfbe22cecef21da30a06b127c88a4183e3b847" Jan 20 17:02:54 crc kubenswrapper[4558]: I0120 17:02:54.052918 4558 scope.go:117] "RemoveContainer" containerID="5109d61a58515d57c16c7eef2904c3a44f55c305b1540fae92275e63c46df394" Jan 20 17:02:54 crc kubenswrapper[4558]: I0120 17:02:54.071581 4558 scope.go:117] "RemoveContainer" containerID="4c8dd757b611d7337e141eef01ee239352334771cae4ea64722f647fea518f2e" Jan 20 17:02:56 crc kubenswrapper[4558]: I0120 17:02:56.208926 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-85c96db9fd-dcjxv" Jan 20 17:02:56 crc kubenswrapper[4558]: I0120 17:02:56.274741 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-52t9z\" (UniqueName: \"kubernetes.io/projected/5ae48763-b66b-4038-855d-f700a2dbe040-kube-api-access-52t9z\") pod \"5ae48763-b66b-4038-855d-f700a2dbe040\" (UID: \"5ae48763-b66b-4038-855d-f700a2dbe040\") " Jan 20 17:02:56 crc kubenswrapper[4558]: I0120 17:02:56.275009 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5ae48763-b66b-4038-855d-f700a2dbe040-config-data\") pod \"5ae48763-b66b-4038-855d-f700a2dbe040\" (UID: \"5ae48763-b66b-4038-855d-f700a2dbe040\") " Jan 20 17:02:56 crc kubenswrapper[4558]: I0120 17:02:56.275028 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5ae48763-b66b-4038-855d-f700a2dbe040-config-data-custom\") pod \"5ae48763-b66b-4038-855d-f700a2dbe040\" (UID: \"5ae48763-b66b-4038-855d-f700a2dbe040\") " Jan 20 17:02:56 crc kubenswrapper[4558]: I0120 17:02:56.275046 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5ae48763-b66b-4038-855d-f700a2dbe040-logs\") pod \"5ae48763-b66b-4038-855d-f700a2dbe040\" (UID: \"5ae48763-b66b-4038-855d-f700a2dbe040\") " Jan 20 17:02:56 crc kubenswrapper[4558]: I0120 17:02:56.275467 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5ae48763-b66b-4038-855d-f700a2dbe040-logs" (OuterVolumeSpecName: "logs") pod "5ae48763-b66b-4038-855d-f700a2dbe040" (UID: "5ae48763-b66b-4038-855d-f700a2dbe040"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:02:56 crc kubenswrapper[4558]: I0120 17:02:56.275114 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5ae48763-b66b-4038-855d-f700a2dbe040-combined-ca-bundle\") pod \"5ae48763-b66b-4038-855d-f700a2dbe040\" (UID: \"5ae48763-b66b-4038-855d-f700a2dbe040\") " Jan 20 17:02:56 crc kubenswrapper[4558]: I0120 17:02:56.276009 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5ae48763-b66b-4038-855d-f700a2dbe040-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:02:56 crc kubenswrapper[4558]: I0120 17:02:56.279747 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5ae48763-b66b-4038-855d-f700a2dbe040-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "5ae48763-b66b-4038-855d-f700a2dbe040" (UID: "5ae48763-b66b-4038-855d-f700a2dbe040"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:02:56 crc kubenswrapper[4558]: I0120 17:02:56.280367 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5ae48763-b66b-4038-855d-f700a2dbe040-kube-api-access-52t9z" (OuterVolumeSpecName: "kube-api-access-52t9z") pod "5ae48763-b66b-4038-855d-f700a2dbe040" (UID: "5ae48763-b66b-4038-855d-f700a2dbe040"). InnerVolumeSpecName "kube-api-access-52t9z". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:02:56 crc kubenswrapper[4558]: I0120 17:02:56.295035 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5ae48763-b66b-4038-855d-f700a2dbe040-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5ae48763-b66b-4038-855d-f700a2dbe040" (UID: "5ae48763-b66b-4038-855d-f700a2dbe040"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:02:56 crc kubenswrapper[4558]: I0120 17:02:56.310445 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5ae48763-b66b-4038-855d-f700a2dbe040-config-data" (OuterVolumeSpecName: "config-data") pod "5ae48763-b66b-4038-855d-f700a2dbe040" (UID: "5ae48763-b66b-4038-855d-f700a2dbe040"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:02:56 crc kubenswrapper[4558]: I0120 17:02:56.377409 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-52t9z\" (UniqueName: \"kubernetes.io/projected/5ae48763-b66b-4038-855d-f700a2dbe040-kube-api-access-52t9z\") on node \"crc\" DevicePath \"\"" Jan 20 17:02:56 crc kubenswrapper[4558]: I0120 17:02:56.377438 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5ae48763-b66b-4038-855d-f700a2dbe040-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:02:56 crc kubenswrapper[4558]: I0120 17:02:56.377448 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5ae48763-b66b-4038-855d-f700a2dbe040-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:02:56 crc kubenswrapper[4558]: I0120 17:02:56.377456 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5ae48763-b66b-4038-855d-f700a2dbe040-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:02:56 crc kubenswrapper[4558]: I0120 17:02:56.689393 4558 generic.go:334] "Generic (PLEG): container finished" podID="5ae48763-b66b-4038-855d-f700a2dbe040" containerID="b6e7adc6c85b1c4072ddc0ea64fb22919e97a66275f2f29b604d26d6e0a3864f" exitCode=0 Jan 20 17:02:56 crc kubenswrapper[4558]: I0120 17:02:56.689640 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-85c96db9fd-dcjxv" Jan 20 17:02:56 crc kubenswrapper[4558]: I0120 17:02:56.689654 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-85c96db9fd-dcjxv" event={"ID":"5ae48763-b66b-4038-855d-f700a2dbe040","Type":"ContainerDied","Data":"b6e7adc6c85b1c4072ddc0ea64fb22919e97a66275f2f29b604d26d6e0a3864f"} Jan 20 17:02:56 crc kubenswrapper[4558]: I0120 17:02:56.689855 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-85c96db9fd-dcjxv" event={"ID":"5ae48763-b66b-4038-855d-f700a2dbe040","Type":"ContainerDied","Data":"d4918c414ed902dd782099323ffd6f3e98df9568cff4ead858e221b4d080bfe1"} Jan 20 17:02:56 crc kubenswrapper[4558]: I0120 17:02:56.689880 4558 scope.go:117] "RemoveContainer" containerID="b6e7adc6c85b1c4072ddc0ea64fb22919e97a66275f2f29b604d26d6e0a3864f" Jan 20 17:02:56 crc kubenswrapper[4558]: I0120 17:02:56.707179 4558 scope.go:117] "RemoveContainer" containerID="f5cc513f503187d3adffbb539aaa9f0f0f46917cc694f0a826736be7e143eb27" Jan 20 17:02:56 crc kubenswrapper[4558]: I0120 17:02:56.707156 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-api-85c96db9fd-dcjxv"] Jan 20 17:02:56 crc kubenswrapper[4558]: I0120 17:02:56.716778 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/barbican-api-85c96db9fd-dcjxv"] Jan 20 17:02:56 crc kubenswrapper[4558]: I0120 17:02:56.719722 4558 scope.go:117] "RemoveContainer" containerID="b6e7adc6c85b1c4072ddc0ea64fb22919e97a66275f2f29b604d26d6e0a3864f" Jan 20 17:02:56 crc kubenswrapper[4558]: E0120 17:02:56.720091 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b6e7adc6c85b1c4072ddc0ea64fb22919e97a66275f2f29b604d26d6e0a3864f\": container with ID starting with b6e7adc6c85b1c4072ddc0ea64fb22919e97a66275f2f29b604d26d6e0a3864f not found: ID does not exist" containerID="b6e7adc6c85b1c4072ddc0ea64fb22919e97a66275f2f29b604d26d6e0a3864f" Jan 20 17:02:56 crc kubenswrapper[4558]: I0120 17:02:56.720202 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b6e7adc6c85b1c4072ddc0ea64fb22919e97a66275f2f29b604d26d6e0a3864f"} err="failed to get container status \"b6e7adc6c85b1c4072ddc0ea64fb22919e97a66275f2f29b604d26d6e0a3864f\": rpc error: code = NotFound desc = could not find container \"b6e7adc6c85b1c4072ddc0ea64fb22919e97a66275f2f29b604d26d6e0a3864f\": container with ID starting with b6e7adc6c85b1c4072ddc0ea64fb22919e97a66275f2f29b604d26d6e0a3864f not found: ID does not exist" Jan 20 17:02:56 crc kubenswrapper[4558]: I0120 17:02:56.720289 4558 scope.go:117] "RemoveContainer" containerID="f5cc513f503187d3adffbb539aaa9f0f0f46917cc694f0a826736be7e143eb27" Jan 20 17:02:56 crc kubenswrapper[4558]: E0120 17:02:56.720609 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f5cc513f503187d3adffbb539aaa9f0f0f46917cc694f0a826736be7e143eb27\": container with ID starting with f5cc513f503187d3adffbb539aaa9f0f0f46917cc694f0a826736be7e143eb27 not found: ID does not exist" containerID="f5cc513f503187d3adffbb539aaa9f0f0f46917cc694f0a826736be7e143eb27" Jan 20 17:02:56 crc kubenswrapper[4558]: I0120 17:02:56.720682 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f5cc513f503187d3adffbb539aaa9f0f0f46917cc694f0a826736be7e143eb27"} err="failed to get container status \"f5cc513f503187d3adffbb539aaa9f0f0f46917cc694f0a826736be7e143eb27\": rpc error: code = NotFound desc = could not find container \"f5cc513f503187d3adffbb539aaa9f0f0f46917cc694f0a826736be7e143eb27\": container with ID starting with f5cc513f503187d3adffbb539aaa9f0f0f46917cc694f0a826736be7e143eb27 not found: ID does not exist" Jan 20 17:02:57 crc kubenswrapper[4558]: I0120 17:02:57.329962 4558 patch_prober.go:28] interesting pod/machine-config-daemon-2vr4r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 20 17:02:57 crc kubenswrapper[4558]: I0120 17:02:57.330213 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 20 17:02:58 crc kubenswrapper[4558]: I0120 17:02:58.572537 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5ae48763-b66b-4038-855d-f700a2dbe040" path="/var/lib/kubelet/pods/5ae48763-b66b-4038-855d-f700a2dbe040/volumes" Jan 20 17:02:58 crc kubenswrapper[4558]: I0120 17:02:58.755679 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/placement-9b48ffd74-btjvf" Jan 20 17:02:58 crc kubenswrapper[4558]: I0120 17:02:58.761601 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/placement-9b48ffd74-btjvf" Jan 20 17:02:59 crc kubenswrapper[4558]: I0120 17:02:59.344675 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/placement-fbd5dff98-mmhdt" Jan 20 17:02:59 crc kubenswrapper[4558]: I0120 17:02:59.345945 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/placement-fbd5dff98-mmhdt" Jan 20 17:02:59 crc kubenswrapper[4558]: I0120 17:02:59.395421 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/placement-9b48ffd74-btjvf"] Jan 20 17:02:59 crc kubenswrapper[4558]: I0120 17:02:59.421745 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/neutron-5f668b67df-bhm4t" Jan 20 17:03:00 crc kubenswrapper[4558]: I0120 17:03:00.106275 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:03:00 crc kubenswrapper[4558]: I0120 17:03:00.726569 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/placement-9b48ffd74-btjvf" podUID="6dfbc47c-776a-4037-b444-06871e236fa8" containerName="placement-log" containerID="cri-o://014f5e272ac85a6ef2a4fbfb0ab6a52407f35dbb51b22e035984305b9a965c73" gracePeriod=30 Jan 20 17:03:00 crc kubenswrapper[4558]: I0120 17:03:00.726611 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/placement-9b48ffd74-btjvf" podUID="6dfbc47c-776a-4037-b444-06871e236fa8" containerName="placement-api" containerID="cri-o://df2d929cd85144a28ff8ba0e06c2eaaa403cb15d51885680f97f5b7df9640e84" gracePeriod=30 Jan 20 17:03:01 crc kubenswrapper[4558]: I0120 17:03:01.735308 4558 generic.go:334] "Generic (PLEG): container finished" podID="6dfbc47c-776a-4037-b444-06871e236fa8" containerID="014f5e272ac85a6ef2a4fbfb0ab6a52407f35dbb51b22e035984305b9a965c73" exitCode=143 Jan 20 17:03:01 crc kubenswrapper[4558]: I0120 17:03:01.735392 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-9b48ffd74-btjvf" event={"ID":"6dfbc47c-776a-4037-b444-06871e236fa8","Type":"ContainerDied","Data":"014f5e272ac85a6ef2a4fbfb0ab6a52407f35dbb51b22e035984305b9a965c73"} Jan 20 17:03:04 crc kubenswrapper[4558]: I0120 17:03:04.076363 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-9b48ffd74-btjvf" Jan 20 17:03:04 crc kubenswrapper[4558]: I0120 17:03:04.083668 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6dfbc47c-776a-4037-b444-06871e236fa8-config-data\") pod \"6dfbc47c-776a-4037-b444-06871e236fa8\" (UID: \"6dfbc47c-776a-4037-b444-06871e236fa8\") " Jan 20 17:03:04 crc kubenswrapper[4558]: I0120 17:03:04.083706 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6dfbc47c-776a-4037-b444-06871e236fa8-scripts\") pod \"6dfbc47c-776a-4037-b444-06871e236fa8\" (UID: \"6dfbc47c-776a-4037-b444-06871e236fa8\") " Jan 20 17:03:04 crc kubenswrapper[4558]: I0120 17:03:04.083803 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6dfbc47c-776a-4037-b444-06871e236fa8-combined-ca-bundle\") pod \"6dfbc47c-776a-4037-b444-06871e236fa8\" (UID: \"6dfbc47c-776a-4037-b444-06871e236fa8\") " Jan 20 17:03:04 crc kubenswrapper[4558]: I0120 17:03:04.083827 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zcghn\" (UniqueName: \"kubernetes.io/projected/6dfbc47c-776a-4037-b444-06871e236fa8-kube-api-access-zcghn\") pod \"6dfbc47c-776a-4037-b444-06871e236fa8\" (UID: \"6dfbc47c-776a-4037-b444-06871e236fa8\") " Jan 20 17:03:04 crc kubenswrapper[4558]: I0120 17:03:04.083961 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6dfbc47c-776a-4037-b444-06871e236fa8-logs\") pod \"6dfbc47c-776a-4037-b444-06871e236fa8\" (UID: \"6dfbc47c-776a-4037-b444-06871e236fa8\") " Jan 20 17:03:04 crc kubenswrapper[4558]: I0120 17:03:04.084336 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6dfbc47c-776a-4037-b444-06871e236fa8-logs" (OuterVolumeSpecName: "logs") pod "6dfbc47c-776a-4037-b444-06871e236fa8" (UID: "6dfbc47c-776a-4037-b444-06871e236fa8"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:03:04 crc kubenswrapper[4558]: I0120 17:03:04.088673 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6dfbc47c-776a-4037-b444-06871e236fa8-kube-api-access-zcghn" (OuterVolumeSpecName: "kube-api-access-zcghn") pod "6dfbc47c-776a-4037-b444-06871e236fa8" (UID: "6dfbc47c-776a-4037-b444-06871e236fa8"). InnerVolumeSpecName "kube-api-access-zcghn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:03:04 crc kubenswrapper[4558]: I0120 17:03:04.104250 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6dfbc47c-776a-4037-b444-06871e236fa8-scripts" (OuterVolumeSpecName: "scripts") pod "6dfbc47c-776a-4037-b444-06871e236fa8" (UID: "6dfbc47c-776a-4037-b444-06871e236fa8"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:03:04 crc kubenswrapper[4558]: I0120 17:03:04.129044 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6dfbc47c-776a-4037-b444-06871e236fa8-config-data" (OuterVolumeSpecName: "config-data") pod "6dfbc47c-776a-4037-b444-06871e236fa8" (UID: "6dfbc47c-776a-4037-b444-06871e236fa8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:03:04 crc kubenswrapper[4558]: I0120 17:03:04.133401 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6dfbc47c-776a-4037-b444-06871e236fa8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6dfbc47c-776a-4037-b444-06871e236fa8" (UID: "6dfbc47c-776a-4037-b444-06871e236fa8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:03:04 crc kubenswrapper[4558]: I0120 17:03:04.185022 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6dfbc47c-776a-4037-b444-06871e236fa8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:03:04 crc kubenswrapper[4558]: I0120 17:03:04.185048 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zcghn\" (UniqueName: \"kubernetes.io/projected/6dfbc47c-776a-4037-b444-06871e236fa8-kube-api-access-zcghn\") on node \"crc\" DevicePath \"\"" Jan 20 17:03:04 crc kubenswrapper[4558]: I0120 17:03:04.185060 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6dfbc47c-776a-4037-b444-06871e236fa8-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:03:04 crc kubenswrapper[4558]: I0120 17:03:04.185068 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6dfbc47c-776a-4037-b444-06871e236fa8-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:03:04 crc kubenswrapper[4558]: I0120 17:03:04.185077 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6dfbc47c-776a-4037-b444-06871e236fa8-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:03:04 crc kubenswrapper[4558]: I0120 17:03:04.755685 4558 generic.go:334] "Generic (PLEG): container finished" podID="6dfbc47c-776a-4037-b444-06871e236fa8" containerID="df2d929cd85144a28ff8ba0e06c2eaaa403cb15d51885680f97f5b7df9640e84" exitCode=0 Jan 20 17:03:04 crc kubenswrapper[4558]: I0120 17:03:04.755785 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-9b48ffd74-btjvf" Jan 20 17:03:04 crc kubenswrapper[4558]: I0120 17:03:04.755801 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-9b48ffd74-btjvf" event={"ID":"6dfbc47c-776a-4037-b444-06871e236fa8","Type":"ContainerDied","Data":"df2d929cd85144a28ff8ba0e06c2eaaa403cb15d51885680f97f5b7df9640e84"} Jan 20 17:03:04 crc kubenswrapper[4558]: I0120 17:03:04.756000 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-9b48ffd74-btjvf" event={"ID":"6dfbc47c-776a-4037-b444-06871e236fa8","Type":"ContainerDied","Data":"fcbfc00db1b39d53e5293b14c4e92c6c6a34cea796566e8674dd88daaa1134de"} Jan 20 17:03:04 crc kubenswrapper[4558]: I0120 17:03:04.756022 4558 scope.go:117] "RemoveContainer" containerID="df2d929cd85144a28ff8ba0e06c2eaaa403cb15d51885680f97f5b7df9640e84" Jan 20 17:03:04 crc kubenswrapper[4558]: I0120 17:03:04.773697 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/placement-9b48ffd74-btjvf"] Jan 20 17:03:04 crc kubenswrapper[4558]: I0120 17:03:04.775769 4558 scope.go:117] "RemoveContainer" containerID="014f5e272ac85a6ef2a4fbfb0ab6a52407f35dbb51b22e035984305b9a965c73" Jan 20 17:03:04 crc kubenswrapper[4558]: I0120 17:03:04.783141 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/placement-9b48ffd74-btjvf"] Jan 20 17:03:04 crc kubenswrapper[4558]: I0120 17:03:04.791000 4558 scope.go:117] "RemoveContainer" containerID="df2d929cd85144a28ff8ba0e06c2eaaa403cb15d51885680f97f5b7df9640e84" Jan 20 17:03:04 crc kubenswrapper[4558]: E0120 17:03:04.791722 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"df2d929cd85144a28ff8ba0e06c2eaaa403cb15d51885680f97f5b7df9640e84\": container with ID starting with df2d929cd85144a28ff8ba0e06c2eaaa403cb15d51885680f97f5b7df9640e84 not found: ID does not exist" containerID="df2d929cd85144a28ff8ba0e06c2eaaa403cb15d51885680f97f5b7df9640e84" Jan 20 17:03:04 crc kubenswrapper[4558]: I0120 17:03:04.791758 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"df2d929cd85144a28ff8ba0e06c2eaaa403cb15d51885680f97f5b7df9640e84"} err="failed to get container status \"df2d929cd85144a28ff8ba0e06c2eaaa403cb15d51885680f97f5b7df9640e84\": rpc error: code = NotFound desc = could not find container \"df2d929cd85144a28ff8ba0e06c2eaaa403cb15d51885680f97f5b7df9640e84\": container with ID starting with df2d929cd85144a28ff8ba0e06c2eaaa403cb15d51885680f97f5b7df9640e84 not found: ID does not exist" Jan 20 17:03:04 crc kubenswrapper[4558]: I0120 17:03:04.791783 4558 scope.go:117] "RemoveContainer" containerID="014f5e272ac85a6ef2a4fbfb0ab6a52407f35dbb51b22e035984305b9a965c73" Jan 20 17:03:04 crc kubenswrapper[4558]: E0120 17:03:04.792068 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"014f5e272ac85a6ef2a4fbfb0ab6a52407f35dbb51b22e035984305b9a965c73\": container with ID starting with 014f5e272ac85a6ef2a4fbfb0ab6a52407f35dbb51b22e035984305b9a965c73 not found: ID does not exist" containerID="014f5e272ac85a6ef2a4fbfb0ab6a52407f35dbb51b22e035984305b9a965c73" Jan 20 17:03:04 crc kubenswrapper[4558]: I0120 17:03:04.792096 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"014f5e272ac85a6ef2a4fbfb0ab6a52407f35dbb51b22e035984305b9a965c73"} err="failed to get container status \"014f5e272ac85a6ef2a4fbfb0ab6a52407f35dbb51b22e035984305b9a965c73\": rpc error: code = NotFound desc = could not find container \"014f5e272ac85a6ef2a4fbfb0ab6a52407f35dbb51b22e035984305b9a965c73\": container with ID starting with 014f5e272ac85a6ef2a4fbfb0ab6a52407f35dbb51b22e035984305b9a965c73 not found: ID does not exist" Jan 20 17:03:06 crc kubenswrapper[4558]: I0120 17:03:06.158004 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/keystone-65d75d5fbb-nkccr" Jan 20 17:03:06 crc kubenswrapper[4558]: I0120 17:03:06.573325 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6dfbc47c-776a-4037-b444-06871e236fa8" path="/var/lib/kubelet/pods/6dfbc47c-776a-4037-b444-06871e236fa8/volumes" Jan 20 17:03:08 crc kubenswrapper[4558]: I0120 17:03:08.412980 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/neutron-7fdccbb69d-fng8p" Jan 20 17:03:08 crc kubenswrapper[4558]: I0120 17:03:08.458454 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-5f668b67df-bhm4t"] Jan 20 17:03:08 crc kubenswrapper[4558]: I0120 17:03:08.458649 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/neutron-5f668b67df-bhm4t" podUID="561ded1f-50d2-4eb9-8ceb-c6f587ee80d6" containerName="neutron-api" containerID="cri-o://b1010de6c5451fa8fdfb551cfb1d14c2ce97f03ca7a9a4dfc2b5fa33e42ee8b3" gracePeriod=30 Jan 20 17:03:08 crc kubenswrapper[4558]: I0120 17:03:08.458722 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/neutron-5f668b67df-bhm4t" podUID="561ded1f-50d2-4eb9-8ceb-c6f587ee80d6" containerName="neutron-httpd" containerID="cri-o://1f93abb9eab5658358835514b46ebdffd05153b84dd13189515b67613f63f304" gracePeriod=30 Jan 20 17:03:08 crc kubenswrapper[4558]: I0120 17:03:08.781577 4558 generic.go:334] "Generic (PLEG): container finished" podID="561ded1f-50d2-4eb9-8ceb-c6f587ee80d6" containerID="1f93abb9eab5658358835514b46ebdffd05153b84dd13189515b67613f63f304" exitCode=0 Jan 20 17:03:08 crc kubenswrapper[4558]: I0120 17:03:08.781648 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-5f668b67df-bhm4t" event={"ID":"561ded1f-50d2-4eb9-8ceb-c6f587ee80d6","Type":"ContainerDied","Data":"1f93abb9eab5658358835514b46ebdffd05153b84dd13189515b67613f63f304"} Jan 20 17:03:09 crc kubenswrapper[4558]: I0120 17:03:09.893246 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:03:09 crc kubenswrapper[4558]: I0120 17:03:09.893468 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="1fa82f11-190d-4f03-90eb-93ce1d0caf36" containerName="ceilometer-central-agent" containerID="cri-o://5da46a41a4afdd1907ffd116daa323945a958ea2d34c974bd437261d9f1d45b9" gracePeriod=30 Jan 20 17:03:09 crc kubenswrapper[4558]: I0120 17:03:09.893508 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="1fa82f11-190d-4f03-90eb-93ce1d0caf36" containerName="proxy-httpd" containerID="cri-o://f9d98cfacb9e982fff6de888dacf794e1dbf9cc9e06d9800767b7d467cb73658" gracePeriod=30 Jan 20 17:03:09 crc kubenswrapper[4558]: I0120 17:03:09.893551 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="1fa82f11-190d-4f03-90eb-93ce1d0caf36" containerName="ceilometer-notification-agent" containerID="cri-o://f1726311e40abb9f8cd333b1557f3a45f29a8c750b323e60afc3571fcd98b0d9" gracePeriod=30 Jan 20 17:03:09 crc kubenswrapper[4558]: I0120 17:03:09.893547 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="1fa82f11-190d-4f03-90eb-93ce1d0caf36" containerName="sg-core" containerID="cri-o://9e41d81a80fc745c023df19c0397a12429361410c19b1d7b04de7da6ee1b3b38" gracePeriod=30 Jan 20 17:03:10 crc kubenswrapper[4558]: I0120 17:03:10.315027 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/openstackclient"] Jan 20 17:03:10 crc kubenswrapper[4558]: E0120 17:03:10.315491 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6dfbc47c-776a-4037-b444-06871e236fa8" containerName="placement-api" Jan 20 17:03:10 crc kubenswrapper[4558]: I0120 17:03:10.315507 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="6dfbc47c-776a-4037-b444-06871e236fa8" containerName="placement-api" Jan 20 17:03:10 crc kubenswrapper[4558]: E0120 17:03:10.315518 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6dfbc47c-776a-4037-b444-06871e236fa8" containerName="placement-log" Jan 20 17:03:10 crc kubenswrapper[4558]: I0120 17:03:10.315525 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="6dfbc47c-776a-4037-b444-06871e236fa8" containerName="placement-log" Jan 20 17:03:10 crc kubenswrapper[4558]: E0120 17:03:10.315532 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5ae48763-b66b-4038-855d-f700a2dbe040" containerName="barbican-api" Jan 20 17:03:10 crc kubenswrapper[4558]: I0120 17:03:10.315537 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="5ae48763-b66b-4038-855d-f700a2dbe040" containerName="barbican-api" Jan 20 17:03:10 crc kubenswrapper[4558]: E0120 17:03:10.315552 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5ae48763-b66b-4038-855d-f700a2dbe040" containerName="barbican-api-log" Jan 20 17:03:10 crc kubenswrapper[4558]: I0120 17:03:10.315558 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="5ae48763-b66b-4038-855d-f700a2dbe040" containerName="barbican-api-log" Jan 20 17:03:10 crc kubenswrapper[4558]: I0120 17:03:10.315679 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="6dfbc47c-776a-4037-b444-06871e236fa8" containerName="placement-api" Jan 20 17:03:10 crc kubenswrapper[4558]: I0120 17:03:10.315691 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="6dfbc47c-776a-4037-b444-06871e236fa8" containerName="placement-log" Jan 20 17:03:10 crc kubenswrapper[4558]: I0120 17:03:10.315704 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="5ae48763-b66b-4038-855d-f700a2dbe040" containerName="barbican-api-log" Jan 20 17:03:10 crc kubenswrapper[4558]: I0120 17:03:10.315713 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="5ae48763-b66b-4038-855d-f700a2dbe040" containerName="barbican-api" Jan 20 17:03:10 crc kubenswrapper[4558]: I0120 17:03:10.316131 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstackclient" Jan 20 17:03:10 crc kubenswrapper[4558]: I0120 17:03:10.317504 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"openstack-config-secret" Jan 20 17:03:10 crc kubenswrapper[4558]: I0120 17:03:10.318322 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-config" Jan 20 17:03:10 crc kubenswrapper[4558]: I0120 17:03:10.319542 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"openstackclient-openstackclient-dockercfg-f8l7w" Jan 20 17:03:10 crc kubenswrapper[4558]: I0120 17:03:10.338796 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/openstackclient"] Jan 20 17:03:10 crc kubenswrapper[4558]: I0120 17:03:10.368318 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/2e9a9b09-70ea-4d56-87ac-5a2c3461d2df-openstack-config\") pod \"openstackclient\" (UID: \"2e9a9b09-70ea-4d56-87ac-5a2c3461d2df\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:03:10 crc kubenswrapper[4558]: I0120 17:03:10.368378 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-67zld\" (UniqueName: \"kubernetes.io/projected/2e9a9b09-70ea-4d56-87ac-5a2c3461d2df-kube-api-access-67zld\") pod \"openstackclient\" (UID: \"2e9a9b09-70ea-4d56-87ac-5a2c3461d2df\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:03:10 crc kubenswrapper[4558]: I0120 17:03:10.368671 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2e9a9b09-70ea-4d56-87ac-5a2c3461d2df-combined-ca-bundle\") pod \"openstackclient\" (UID: \"2e9a9b09-70ea-4d56-87ac-5a2c3461d2df\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:03:10 crc kubenswrapper[4558]: I0120 17:03:10.368745 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/2e9a9b09-70ea-4d56-87ac-5a2c3461d2df-openstack-config-secret\") pod \"openstackclient\" (UID: \"2e9a9b09-70ea-4d56-87ac-5a2c3461d2df\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:03:10 crc kubenswrapper[4558]: I0120 17:03:10.469754 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2e9a9b09-70ea-4d56-87ac-5a2c3461d2df-combined-ca-bundle\") pod \"openstackclient\" (UID: \"2e9a9b09-70ea-4d56-87ac-5a2c3461d2df\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:03:10 crc kubenswrapper[4558]: I0120 17:03:10.469819 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/2e9a9b09-70ea-4d56-87ac-5a2c3461d2df-openstack-config-secret\") pod \"openstackclient\" (UID: \"2e9a9b09-70ea-4d56-87ac-5a2c3461d2df\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:03:10 crc kubenswrapper[4558]: I0120 17:03:10.469883 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/2e9a9b09-70ea-4d56-87ac-5a2c3461d2df-openstack-config\") pod \"openstackclient\" (UID: \"2e9a9b09-70ea-4d56-87ac-5a2c3461d2df\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:03:10 crc kubenswrapper[4558]: I0120 17:03:10.469913 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-67zld\" (UniqueName: \"kubernetes.io/projected/2e9a9b09-70ea-4d56-87ac-5a2c3461d2df-kube-api-access-67zld\") pod \"openstackclient\" (UID: \"2e9a9b09-70ea-4d56-87ac-5a2c3461d2df\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:03:10 crc kubenswrapper[4558]: I0120 17:03:10.471141 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/2e9a9b09-70ea-4d56-87ac-5a2c3461d2df-openstack-config\") pod \"openstackclient\" (UID: \"2e9a9b09-70ea-4d56-87ac-5a2c3461d2df\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:03:10 crc kubenswrapper[4558]: I0120 17:03:10.474455 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/2e9a9b09-70ea-4d56-87ac-5a2c3461d2df-openstack-config-secret\") pod \"openstackclient\" (UID: \"2e9a9b09-70ea-4d56-87ac-5a2c3461d2df\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:03:10 crc kubenswrapper[4558]: I0120 17:03:10.474874 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2e9a9b09-70ea-4d56-87ac-5a2c3461d2df-combined-ca-bundle\") pod \"openstackclient\" (UID: \"2e9a9b09-70ea-4d56-87ac-5a2c3461d2df\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:03:10 crc kubenswrapper[4558]: I0120 17:03:10.486988 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-67zld\" (UniqueName: \"kubernetes.io/projected/2e9a9b09-70ea-4d56-87ac-5a2c3461d2df-kube-api-access-67zld\") pod \"openstackclient\" (UID: \"2e9a9b09-70ea-4d56-87ac-5a2c3461d2df\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:03:10 crc kubenswrapper[4558]: I0120 17:03:10.628654 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstackclient" Jan 20 17:03:10 crc kubenswrapper[4558]: I0120 17:03:10.797821 4558 generic.go:334] "Generic (PLEG): container finished" podID="1fa82f11-190d-4f03-90eb-93ce1d0caf36" containerID="f9d98cfacb9e982fff6de888dacf794e1dbf9cc9e06d9800767b7d467cb73658" exitCode=0 Jan 20 17:03:10 crc kubenswrapper[4558]: I0120 17:03:10.797844 4558 generic.go:334] "Generic (PLEG): container finished" podID="1fa82f11-190d-4f03-90eb-93ce1d0caf36" containerID="9e41d81a80fc745c023df19c0397a12429361410c19b1d7b04de7da6ee1b3b38" exitCode=2 Jan 20 17:03:10 crc kubenswrapper[4558]: I0120 17:03:10.797851 4558 generic.go:334] "Generic (PLEG): container finished" podID="1fa82f11-190d-4f03-90eb-93ce1d0caf36" containerID="5da46a41a4afdd1907ffd116daa323945a958ea2d34c974bd437261d9f1d45b9" exitCode=0 Jan 20 17:03:10 crc kubenswrapper[4558]: I0120 17:03:10.797867 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"1fa82f11-190d-4f03-90eb-93ce1d0caf36","Type":"ContainerDied","Data":"f9d98cfacb9e982fff6de888dacf794e1dbf9cc9e06d9800767b7d467cb73658"} Jan 20 17:03:10 crc kubenswrapper[4558]: I0120 17:03:10.797889 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"1fa82f11-190d-4f03-90eb-93ce1d0caf36","Type":"ContainerDied","Data":"9e41d81a80fc745c023df19c0397a12429361410c19b1d7b04de7da6ee1b3b38"} Jan 20 17:03:10 crc kubenswrapper[4558]: I0120 17:03:10.797899 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"1fa82f11-190d-4f03-90eb-93ce1d0caf36","Type":"ContainerDied","Data":"5da46a41a4afdd1907ffd116daa323945a958ea2d34c974bd437261d9f1d45b9"} Jan 20 17:03:10 crc kubenswrapper[4558]: I0120 17:03:10.986875 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/openstackclient"] Jan 20 17:03:11 crc kubenswrapper[4558]: I0120 17:03:11.805340 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstackclient" event={"ID":"2e9a9b09-70ea-4d56-87ac-5a2c3461d2df","Type":"ContainerStarted","Data":"14b34f3f512d1c97cde26d0f645f8f50baa327877aab8009b5d6e09f075e96b7"} Jan 20 17:03:11 crc kubenswrapper[4558]: I0120 17:03:11.805565 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstackclient" event={"ID":"2e9a9b09-70ea-4d56-87ac-5a2c3461d2df","Type":"ContainerStarted","Data":"310050bcb64c74a6ae360757aed6d4caece725cdc9f8535eb5928d87a511b09b"} Jan 20 17:03:11 crc kubenswrapper[4558]: I0120 17:03:11.816768 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/openstackclient" podStartSLOduration=1.816756325 podStartE2EDuration="1.816756325s" podCreationTimestamp="2026-01-20 17:03:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:03:11.815513388 +0000 UTC m=+1285.575851344" watchObservedRunningTime="2026-01-20 17:03:11.816756325 +0000 UTC m=+1285.577094292" Jan 20 17:03:12 crc kubenswrapper[4558]: I0120 17:03:12.515401 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/swift-proxy-595b48fd98-kbjv6"] Jan 20 17:03:12 crc kubenswrapper[4558]: I0120 17:03:12.516784 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-proxy-595b48fd98-kbjv6" Jan 20 17:03:12 crc kubenswrapper[4558]: I0120 17:03:12.519032 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"swift-proxy-config-data" Jan 20 17:03:12 crc kubenswrapper[4558]: I0120 17:03:12.519254 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-swift-internal-svc" Jan 20 17:03:12 crc kubenswrapper[4558]: I0120 17:03:12.519384 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-swift-public-svc" Jan 20 17:03:12 crc kubenswrapper[4558]: I0120 17:03:12.525996 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/swift-proxy-595b48fd98-kbjv6"] Jan 20 17:03:12 crc kubenswrapper[4558]: I0120 17:03:12.703060 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wbsp4\" (UniqueName: \"kubernetes.io/projected/32a3bcd7-25d6-45f5-8ce6-66949357504c-kube-api-access-wbsp4\") pod \"swift-proxy-595b48fd98-kbjv6\" (UID: \"32a3bcd7-25d6-45f5-8ce6-66949357504c\") " pod="openstack-kuttl-tests/swift-proxy-595b48fd98-kbjv6" Jan 20 17:03:12 crc kubenswrapper[4558]: I0120 17:03:12.703119 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/32a3bcd7-25d6-45f5-8ce6-66949357504c-log-httpd\") pod \"swift-proxy-595b48fd98-kbjv6\" (UID: \"32a3bcd7-25d6-45f5-8ce6-66949357504c\") " pod="openstack-kuttl-tests/swift-proxy-595b48fd98-kbjv6" Jan 20 17:03:12 crc kubenswrapper[4558]: I0120 17:03:12.703326 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/32a3bcd7-25d6-45f5-8ce6-66949357504c-public-tls-certs\") pod \"swift-proxy-595b48fd98-kbjv6\" (UID: \"32a3bcd7-25d6-45f5-8ce6-66949357504c\") " pod="openstack-kuttl-tests/swift-proxy-595b48fd98-kbjv6" Jan 20 17:03:12 crc kubenswrapper[4558]: I0120 17:03:12.703431 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/32a3bcd7-25d6-45f5-8ce6-66949357504c-run-httpd\") pod \"swift-proxy-595b48fd98-kbjv6\" (UID: \"32a3bcd7-25d6-45f5-8ce6-66949357504c\") " pod="openstack-kuttl-tests/swift-proxy-595b48fd98-kbjv6" Jan 20 17:03:12 crc kubenswrapper[4558]: I0120 17:03:12.703493 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/32a3bcd7-25d6-45f5-8ce6-66949357504c-internal-tls-certs\") pod \"swift-proxy-595b48fd98-kbjv6\" (UID: \"32a3bcd7-25d6-45f5-8ce6-66949357504c\") " pod="openstack-kuttl-tests/swift-proxy-595b48fd98-kbjv6" Jan 20 17:03:12 crc kubenswrapper[4558]: I0120 17:03:12.703527 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/32a3bcd7-25d6-45f5-8ce6-66949357504c-config-data\") pod \"swift-proxy-595b48fd98-kbjv6\" (UID: \"32a3bcd7-25d6-45f5-8ce6-66949357504c\") " pod="openstack-kuttl-tests/swift-proxy-595b48fd98-kbjv6" Jan 20 17:03:12 crc kubenswrapper[4558]: I0120 17:03:12.703810 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/32a3bcd7-25d6-45f5-8ce6-66949357504c-etc-swift\") pod \"swift-proxy-595b48fd98-kbjv6\" (UID: \"32a3bcd7-25d6-45f5-8ce6-66949357504c\") " pod="openstack-kuttl-tests/swift-proxy-595b48fd98-kbjv6" Jan 20 17:03:12 crc kubenswrapper[4558]: I0120 17:03:12.703841 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/32a3bcd7-25d6-45f5-8ce6-66949357504c-combined-ca-bundle\") pod \"swift-proxy-595b48fd98-kbjv6\" (UID: \"32a3bcd7-25d6-45f5-8ce6-66949357504c\") " pod="openstack-kuttl-tests/swift-proxy-595b48fd98-kbjv6" Jan 20 17:03:12 crc kubenswrapper[4558]: I0120 17:03:12.806222 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/32a3bcd7-25d6-45f5-8ce6-66949357504c-etc-swift\") pod \"swift-proxy-595b48fd98-kbjv6\" (UID: \"32a3bcd7-25d6-45f5-8ce6-66949357504c\") " pod="openstack-kuttl-tests/swift-proxy-595b48fd98-kbjv6" Jan 20 17:03:12 crc kubenswrapper[4558]: I0120 17:03:12.806257 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/32a3bcd7-25d6-45f5-8ce6-66949357504c-combined-ca-bundle\") pod \"swift-proxy-595b48fd98-kbjv6\" (UID: \"32a3bcd7-25d6-45f5-8ce6-66949357504c\") " pod="openstack-kuttl-tests/swift-proxy-595b48fd98-kbjv6" Jan 20 17:03:12 crc kubenswrapper[4558]: I0120 17:03:12.806347 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wbsp4\" (UniqueName: \"kubernetes.io/projected/32a3bcd7-25d6-45f5-8ce6-66949357504c-kube-api-access-wbsp4\") pod \"swift-proxy-595b48fd98-kbjv6\" (UID: \"32a3bcd7-25d6-45f5-8ce6-66949357504c\") " pod="openstack-kuttl-tests/swift-proxy-595b48fd98-kbjv6" Jan 20 17:03:12 crc kubenswrapper[4558]: I0120 17:03:12.806398 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/32a3bcd7-25d6-45f5-8ce6-66949357504c-log-httpd\") pod \"swift-proxy-595b48fd98-kbjv6\" (UID: \"32a3bcd7-25d6-45f5-8ce6-66949357504c\") " pod="openstack-kuttl-tests/swift-proxy-595b48fd98-kbjv6" Jan 20 17:03:12 crc kubenswrapper[4558]: I0120 17:03:12.806436 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/32a3bcd7-25d6-45f5-8ce6-66949357504c-public-tls-certs\") pod \"swift-proxy-595b48fd98-kbjv6\" (UID: \"32a3bcd7-25d6-45f5-8ce6-66949357504c\") " pod="openstack-kuttl-tests/swift-proxy-595b48fd98-kbjv6" Jan 20 17:03:12 crc kubenswrapper[4558]: I0120 17:03:12.806459 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/32a3bcd7-25d6-45f5-8ce6-66949357504c-run-httpd\") pod \"swift-proxy-595b48fd98-kbjv6\" (UID: \"32a3bcd7-25d6-45f5-8ce6-66949357504c\") " pod="openstack-kuttl-tests/swift-proxy-595b48fd98-kbjv6" Jan 20 17:03:12 crc kubenswrapper[4558]: I0120 17:03:12.806479 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/32a3bcd7-25d6-45f5-8ce6-66949357504c-internal-tls-certs\") pod \"swift-proxy-595b48fd98-kbjv6\" (UID: \"32a3bcd7-25d6-45f5-8ce6-66949357504c\") " pod="openstack-kuttl-tests/swift-proxy-595b48fd98-kbjv6" Jan 20 17:03:12 crc kubenswrapper[4558]: I0120 17:03:12.806501 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/32a3bcd7-25d6-45f5-8ce6-66949357504c-config-data\") pod \"swift-proxy-595b48fd98-kbjv6\" (UID: \"32a3bcd7-25d6-45f5-8ce6-66949357504c\") " pod="openstack-kuttl-tests/swift-proxy-595b48fd98-kbjv6" Jan 20 17:03:12 crc kubenswrapper[4558]: I0120 17:03:12.807591 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/32a3bcd7-25d6-45f5-8ce6-66949357504c-log-httpd\") pod \"swift-proxy-595b48fd98-kbjv6\" (UID: \"32a3bcd7-25d6-45f5-8ce6-66949357504c\") " pod="openstack-kuttl-tests/swift-proxy-595b48fd98-kbjv6" Jan 20 17:03:12 crc kubenswrapper[4558]: I0120 17:03:12.807987 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/32a3bcd7-25d6-45f5-8ce6-66949357504c-run-httpd\") pod \"swift-proxy-595b48fd98-kbjv6\" (UID: \"32a3bcd7-25d6-45f5-8ce6-66949357504c\") " pod="openstack-kuttl-tests/swift-proxy-595b48fd98-kbjv6" Jan 20 17:03:12 crc kubenswrapper[4558]: I0120 17:03:12.811475 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/32a3bcd7-25d6-45f5-8ce6-66949357504c-config-data\") pod \"swift-proxy-595b48fd98-kbjv6\" (UID: \"32a3bcd7-25d6-45f5-8ce6-66949357504c\") " pod="openstack-kuttl-tests/swift-proxy-595b48fd98-kbjv6" Jan 20 17:03:12 crc kubenswrapper[4558]: I0120 17:03:12.811676 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/32a3bcd7-25d6-45f5-8ce6-66949357504c-public-tls-certs\") pod \"swift-proxy-595b48fd98-kbjv6\" (UID: \"32a3bcd7-25d6-45f5-8ce6-66949357504c\") " pod="openstack-kuttl-tests/swift-proxy-595b48fd98-kbjv6" Jan 20 17:03:12 crc kubenswrapper[4558]: I0120 17:03:12.811783 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/32a3bcd7-25d6-45f5-8ce6-66949357504c-internal-tls-certs\") pod \"swift-proxy-595b48fd98-kbjv6\" (UID: \"32a3bcd7-25d6-45f5-8ce6-66949357504c\") " pod="openstack-kuttl-tests/swift-proxy-595b48fd98-kbjv6" Jan 20 17:03:12 crc kubenswrapper[4558]: I0120 17:03:12.813036 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/32a3bcd7-25d6-45f5-8ce6-66949357504c-etc-swift\") pod \"swift-proxy-595b48fd98-kbjv6\" (UID: \"32a3bcd7-25d6-45f5-8ce6-66949357504c\") " pod="openstack-kuttl-tests/swift-proxy-595b48fd98-kbjv6" Jan 20 17:03:12 crc kubenswrapper[4558]: I0120 17:03:12.816774 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/32a3bcd7-25d6-45f5-8ce6-66949357504c-combined-ca-bundle\") pod \"swift-proxy-595b48fd98-kbjv6\" (UID: \"32a3bcd7-25d6-45f5-8ce6-66949357504c\") " pod="openstack-kuttl-tests/swift-proxy-595b48fd98-kbjv6" Jan 20 17:03:12 crc kubenswrapper[4558]: I0120 17:03:12.835776 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wbsp4\" (UniqueName: \"kubernetes.io/projected/32a3bcd7-25d6-45f5-8ce6-66949357504c-kube-api-access-wbsp4\") pod \"swift-proxy-595b48fd98-kbjv6\" (UID: \"32a3bcd7-25d6-45f5-8ce6-66949357504c\") " pod="openstack-kuttl-tests/swift-proxy-595b48fd98-kbjv6" Jan 20 17:03:12 crc kubenswrapper[4558]: I0120 17:03:12.845417 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-proxy-595b48fd98-kbjv6" Jan 20 17:03:13 crc kubenswrapper[4558]: I0120 17:03:13.222436 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/swift-proxy-595b48fd98-kbjv6"] Jan 20 17:03:13 crc kubenswrapper[4558]: I0120 17:03:13.494019 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-5f668b67df-bhm4t" Jan 20 17:03:13 crc kubenswrapper[4558]: I0120 17:03:13.519793 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/561ded1f-50d2-4eb9-8ceb-c6f587ee80d6-combined-ca-bundle\") pod \"561ded1f-50d2-4eb9-8ceb-c6f587ee80d6\" (UID: \"561ded1f-50d2-4eb9-8ceb-c6f587ee80d6\") " Jan 20 17:03:13 crc kubenswrapper[4558]: I0120 17:03:13.519847 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/561ded1f-50d2-4eb9-8ceb-c6f587ee80d6-httpd-config\") pod \"561ded1f-50d2-4eb9-8ceb-c6f587ee80d6\" (UID: \"561ded1f-50d2-4eb9-8ceb-c6f587ee80d6\") " Jan 20 17:03:13 crc kubenswrapper[4558]: I0120 17:03:13.519912 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gd92p\" (UniqueName: \"kubernetes.io/projected/561ded1f-50d2-4eb9-8ceb-c6f587ee80d6-kube-api-access-gd92p\") pod \"561ded1f-50d2-4eb9-8ceb-c6f587ee80d6\" (UID: \"561ded1f-50d2-4eb9-8ceb-c6f587ee80d6\") " Jan 20 17:03:13 crc kubenswrapper[4558]: I0120 17:03:13.519932 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/561ded1f-50d2-4eb9-8ceb-c6f587ee80d6-config\") pod \"561ded1f-50d2-4eb9-8ceb-c6f587ee80d6\" (UID: \"561ded1f-50d2-4eb9-8ceb-c6f587ee80d6\") " Jan 20 17:03:13 crc kubenswrapper[4558]: I0120 17:03:13.519950 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/561ded1f-50d2-4eb9-8ceb-c6f587ee80d6-ovndb-tls-certs\") pod \"561ded1f-50d2-4eb9-8ceb-c6f587ee80d6\" (UID: \"561ded1f-50d2-4eb9-8ceb-c6f587ee80d6\") " Jan 20 17:03:13 crc kubenswrapper[4558]: I0120 17:03:13.524074 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/561ded1f-50d2-4eb9-8ceb-c6f587ee80d6-kube-api-access-gd92p" (OuterVolumeSpecName: "kube-api-access-gd92p") pod "561ded1f-50d2-4eb9-8ceb-c6f587ee80d6" (UID: "561ded1f-50d2-4eb9-8ceb-c6f587ee80d6"). InnerVolumeSpecName "kube-api-access-gd92p". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:03:13 crc kubenswrapper[4558]: I0120 17:03:13.527321 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/561ded1f-50d2-4eb9-8ceb-c6f587ee80d6-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "561ded1f-50d2-4eb9-8ceb-c6f587ee80d6" (UID: "561ded1f-50d2-4eb9-8ceb-c6f587ee80d6"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:03:13 crc kubenswrapper[4558]: I0120 17:03:13.548154 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 17:03:13 crc kubenswrapper[4558]: I0120 17:03:13.548407 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/kube-state-metrics-0" podUID="55c237c8-936c-45d1-b06c-374da878c2b7" containerName="kube-state-metrics" containerID="cri-o://40a664774128414719591bb05b093341822cb024885c28f9e0a4e07573f067a1" gracePeriod=30 Jan 20 17:03:13 crc kubenswrapper[4558]: I0120 17:03:13.575467 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/561ded1f-50d2-4eb9-8ceb-c6f587ee80d6-config" (OuterVolumeSpecName: "config") pod "561ded1f-50d2-4eb9-8ceb-c6f587ee80d6" (UID: "561ded1f-50d2-4eb9-8ceb-c6f587ee80d6"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:03:13 crc kubenswrapper[4558]: I0120 17:03:13.580823 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/561ded1f-50d2-4eb9-8ceb-c6f587ee80d6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "561ded1f-50d2-4eb9-8ceb-c6f587ee80d6" (UID: "561ded1f-50d2-4eb9-8ceb-c6f587ee80d6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:03:13 crc kubenswrapper[4558]: I0120 17:03:13.584739 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/561ded1f-50d2-4eb9-8ceb-c6f587ee80d6-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "561ded1f-50d2-4eb9-8ceb-c6f587ee80d6" (UID: "561ded1f-50d2-4eb9-8ceb-c6f587ee80d6"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:03:13 crc kubenswrapper[4558]: I0120 17:03:13.625549 4558 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/561ded1f-50d2-4eb9-8ceb-c6f587ee80d6-httpd-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:03:13 crc kubenswrapper[4558]: I0120 17:03:13.625675 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gd92p\" (UniqueName: \"kubernetes.io/projected/561ded1f-50d2-4eb9-8ceb-c6f587ee80d6-kube-api-access-gd92p\") on node \"crc\" DevicePath \"\"" Jan 20 17:03:13 crc kubenswrapper[4558]: I0120 17:03:13.625735 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/561ded1f-50d2-4eb9-8ceb-c6f587ee80d6-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:03:13 crc kubenswrapper[4558]: I0120 17:03:13.625801 4558 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/561ded1f-50d2-4eb9-8ceb-c6f587ee80d6-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:03:13 crc kubenswrapper[4558]: I0120 17:03:13.625866 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/561ded1f-50d2-4eb9-8ceb-c6f587ee80d6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:03:13 crc kubenswrapper[4558]: I0120 17:03:13.785760 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:03:13 crc kubenswrapper[4558]: I0120 17:03:13.825502 4558 generic.go:334] "Generic (PLEG): container finished" podID="55c237c8-936c-45d1-b06c-374da878c2b7" containerID="40a664774128414719591bb05b093341822cb024885c28f9e0a4e07573f067a1" exitCode=2 Jan 20 17:03:13 crc kubenswrapper[4558]: I0120 17:03:13.825549 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/kube-state-metrics-0" event={"ID":"55c237c8-936c-45d1-b06c-374da878c2b7","Type":"ContainerDied","Data":"40a664774128414719591bb05b093341822cb024885c28f9e0a4e07573f067a1"} Jan 20 17:03:13 crc kubenswrapper[4558]: I0120 17:03:13.827002 4558 generic.go:334] "Generic (PLEG): container finished" podID="561ded1f-50d2-4eb9-8ceb-c6f587ee80d6" containerID="b1010de6c5451fa8fdfb551cfb1d14c2ce97f03ca7a9a4dfc2b5fa33e42ee8b3" exitCode=0 Jan 20 17:03:13 crc kubenswrapper[4558]: I0120 17:03:13.827038 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-5f668b67df-bhm4t" event={"ID":"561ded1f-50d2-4eb9-8ceb-c6f587ee80d6","Type":"ContainerDied","Data":"b1010de6c5451fa8fdfb551cfb1d14c2ce97f03ca7a9a4dfc2b5fa33e42ee8b3"} Jan 20 17:03:13 crc kubenswrapper[4558]: I0120 17:03:13.827052 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-5f668b67df-bhm4t" event={"ID":"561ded1f-50d2-4eb9-8ceb-c6f587ee80d6","Type":"ContainerDied","Data":"79cc6d091a1acd1070f9122c8d0cdefa50fc802a0386b14788849334d6ca9a85"} Jan 20 17:03:13 crc kubenswrapper[4558]: I0120 17:03:13.827067 4558 scope.go:117] "RemoveContainer" containerID="1f93abb9eab5658358835514b46ebdffd05153b84dd13189515b67613f63f304" Jan 20 17:03:13 crc kubenswrapper[4558]: I0120 17:03:13.827147 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-5f668b67df-bhm4t" Jan 20 17:03:13 crc kubenswrapper[4558]: I0120 17:03:13.829539 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1fa82f11-190d-4f03-90eb-93ce1d0caf36-config-data\") pod \"1fa82f11-190d-4f03-90eb-93ce1d0caf36\" (UID: \"1fa82f11-190d-4f03-90eb-93ce1d0caf36\") " Jan 20 17:03:13 crc kubenswrapper[4558]: I0120 17:03:13.829637 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1fa82f11-190d-4f03-90eb-93ce1d0caf36-run-httpd\") pod \"1fa82f11-190d-4f03-90eb-93ce1d0caf36\" (UID: \"1fa82f11-190d-4f03-90eb-93ce1d0caf36\") " Jan 20 17:03:13 crc kubenswrapper[4558]: I0120 17:03:13.829702 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1fa82f11-190d-4f03-90eb-93ce1d0caf36-log-httpd\") pod \"1fa82f11-190d-4f03-90eb-93ce1d0caf36\" (UID: \"1fa82f11-190d-4f03-90eb-93ce1d0caf36\") " Jan 20 17:03:13 crc kubenswrapper[4558]: I0120 17:03:13.829867 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1fa82f11-190d-4f03-90eb-93ce1d0caf36-scripts\") pod \"1fa82f11-190d-4f03-90eb-93ce1d0caf36\" (UID: \"1fa82f11-190d-4f03-90eb-93ce1d0caf36\") " Jan 20 17:03:13 crc kubenswrapper[4558]: I0120 17:03:13.829909 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1fa82f11-190d-4f03-90eb-93ce1d0caf36-combined-ca-bundle\") pod \"1fa82f11-190d-4f03-90eb-93ce1d0caf36\" (UID: \"1fa82f11-190d-4f03-90eb-93ce1d0caf36\") " Jan 20 17:03:13 crc kubenswrapper[4558]: I0120 17:03:13.829978 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5lb85\" (UniqueName: \"kubernetes.io/projected/1fa82f11-190d-4f03-90eb-93ce1d0caf36-kube-api-access-5lb85\") pod \"1fa82f11-190d-4f03-90eb-93ce1d0caf36\" (UID: \"1fa82f11-190d-4f03-90eb-93ce1d0caf36\") " Jan 20 17:03:13 crc kubenswrapper[4558]: I0120 17:03:13.830004 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/1fa82f11-190d-4f03-90eb-93ce1d0caf36-sg-core-conf-yaml\") pod \"1fa82f11-190d-4f03-90eb-93ce1d0caf36\" (UID: \"1fa82f11-190d-4f03-90eb-93ce1d0caf36\") " Jan 20 17:03:13 crc kubenswrapper[4558]: I0120 17:03:13.830245 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1fa82f11-190d-4f03-90eb-93ce1d0caf36-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "1fa82f11-190d-4f03-90eb-93ce1d0caf36" (UID: "1fa82f11-190d-4f03-90eb-93ce1d0caf36"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:03:13 crc kubenswrapper[4558]: I0120 17:03:13.830481 4558 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1fa82f11-190d-4f03-90eb-93ce1d0caf36-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:03:13 crc kubenswrapper[4558]: I0120 17:03:13.832535 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1fa82f11-190d-4f03-90eb-93ce1d0caf36-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "1fa82f11-190d-4f03-90eb-93ce1d0caf36" (UID: "1fa82f11-190d-4f03-90eb-93ce1d0caf36"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:03:13 crc kubenswrapper[4558]: I0120 17:03:13.841844 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1fa82f11-190d-4f03-90eb-93ce1d0caf36-kube-api-access-5lb85" (OuterVolumeSpecName: "kube-api-access-5lb85") pod "1fa82f11-190d-4f03-90eb-93ce1d0caf36" (UID: "1fa82f11-190d-4f03-90eb-93ce1d0caf36"). InnerVolumeSpecName "kube-api-access-5lb85". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:03:13 crc kubenswrapper[4558]: I0120 17:03:13.847760 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1fa82f11-190d-4f03-90eb-93ce1d0caf36-scripts" (OuterVolumeSpecName: "scripts") pod "1fa82f11-190d-4f03-90eb-93ce1d0caf36" (UID: "1fa82f11-190d-4f03-90eb-93ce1d0caf36"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:03:13 crc kubenswrapper[4558]: I0120 17:03:13.848373 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-proxy-595b48fd98-kbjv6" event={"ID":"32a3bcd7-25d6-45f5-8ce6-66949357504c","Type":"ContainerStarted","Data":"b2c58a9c1693c20653debec2694a95f53b8b1e5e7675f997990f41753ef39a46"} Jan 20 17:03:13 crc kubenswrapper[4558]: I0120 17:03:13.848409 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-proxy-595b48fd98-kbjv6" event={"ID":"32a3bcd7-25d6-45f5-8ce6-66949357504c","Type":"ContainerStarted","Data":"1e67af77bc964f499418d3c854f0b6163e2f81ff3e6521a66f14aa8ee4590e1c"} Jan 20 17:03:13 crc kubenswrapper[4558]: I0120 17:03:13.848420 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-proxy-595b48fd98-kbjv6" event={"ID":"32a3bcd7-25d6-45f5-8ce6-66949357504c","Type":"ContainerStarted","Data":"aca90fe826962a0425f1b8f940f88055423c5f619ddea8b4007cc84ac876fd8a"} Jan 20 17:03:13 crc kubenswrapper[4558]: I0120 17:03:13.848625 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/swift-proxy-595b48fd98-kbjv6" Jan 20 17:03:13 crc kubenswrapper[4558]: I0120 17:03:13.848662 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/swift-proxy-595b48fd98-kbjv6" Jan 20 17:03:13 crc kubenswrapper[4558]: I0120 17:03:13.853094 4558 generic.go:334] "Generic (PLEG): container finished" podID="1fa82f11-190d-4f03-90eb-93ce1d0caf36" containerID="f1726311e40abb9f8cd333b1557f3a45f29a8c750b323e60afc3571fcd98b0d9" exitCode=0 Jan 20 17:03:13 crc kubenswrapper[4558]: I0120 17:03:13.853132 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"1fa82f11-190d-4f03-90eb-93ce1d0caf36","Type":"ContainerDied","Data":"f1726311e40abb9f8cd333b1557f3a45f29a8c750b323e60afc3571fcd98b0d9"} Jan 20 17:03:13 crc kubenswrapper[4558]: I0120 17:03:13.853149 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"1fa82f11-190d-4f03-90eb-93ce1d0caf36","Type":"ContainerDied","Data":"71c561b0c5426060c5c7a25047a00e71bca020a116c5379dce18e337c20af72f"} Jan 20 17:03:13 crc kubenswrapper[4558]: I0120 17:03:13.853206 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:03:13 crc kubenswrapper[4558]: I0120 17:03:13.873826 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/swift-proxy-595b48fd98-kbjv6" podStartSLOduration=1.873811629 podStartE2EDuration="1.873811629s" podCreationTimestamp="2026-01-20 17:03:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:03:13.861428541 +0000 UTC m=+1287.621766529" watchObservedRunningTime="2026-01-20 17:03:13.873811629 +0000 UTC m=+1287.634149596" Jan 20 17:03:13 crc kubenswrapper[4558]: I0120 17:03:13.886785 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1fa82f11-190d-4f03-90eb-93ce1d0caf36-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "1fa82f11-190d-4f03-90eb-93ce1d0caf36" (UID: "1fa82f11-190d-4f03-90eb-93ce1d0caf36"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:03:13 crc kubenswrapper[4558]: I0120 17:03:13.917105 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1fa82f11-190d-4f03-90eb-93ce1d0caf36-config-data" (OuterVolumeSpecName: "config-data") pod "1fa82f11-190d-4f03-90eb-93ce1d0caf36" (UID: "1fa82f11-190d-4f03-90eb-93ce1d0caf36"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:03:13 crc kubenswrapper[4558]: I0120 17:03:13.927382 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1fa82f11-190d-4f03-90eb-93ce1d0caf36-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1fa82f11-190d-4f03-90eb-93ce1d0caf36" (UID: "1fa82f11-190d-4f03-90eb-93ce1d0caf36"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:03:13 crc kubenswrapper[4558]: I0120 17:03:13.933762 4558 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1fa82f11-190d-4f03-90eb-93ce1d0caf36-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:03:13 crc kubenswrapper[4558]: I0120 17:03:13.933788 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1fa82f11-190d-4f03-90eb-93ce1d0caf36-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:03:13 crc kubenswrapper[4558]: I0120 17:03:13.933798 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1fa82f11-190d-4f03-90eb-93ce1d0caf36-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:03:13 crc kubenswrapper[4558]: I0120 17:03:13.933808 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5lb85\" (UniqueName: \"kubernetes.io/projected/1fa82f11-190d-4f03-90eb-93ce1d0caf36-kube-api-access-5lb85\") on node \"crc\" DevicePath \"\"" Jan 20 17:03:13 crc kubenswrapper[4558]: I0120 17:03:13.933816 4558 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/1fa82f11-190d-4f03-90eb-93ce1d0caf36-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 20 17:03:13 crc kubenswrapper[4558]: I0120 17:03:13.933825 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1fa82f11-190d-4f03-90eb-93ce1d0caf36-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:03:13 crc kubenswrapper[4558]: I0120 17:03:13.990632 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:03:13 crc kubenswrapper[4558]: I0120 17:03:13.999808 4558 scope.go:117] "RemoveContainer" containerID="b1010de6c5451fa8fdfb551cfb1d14c2ce97f03ca7a9a4dfc2b5fa33e42ee8b3" Jan 20 17:03:14 crc kubenswrapper[4558]: I0120 17:03:14.002082 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-5f668b67df-bhm4t"] Jan 20 17:03:14 crc kubenswrapper[4558]: I0120 17:03:14.008610 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/neutron-5f668b67df-bhm4t"] Jan 20 17:03:14 crc kubenswrapper[4558]: I0120 17:03:14.020355 4558 scope.go:117] "RemoveContainer" containerID="1f93abb9eab5658358835514b46ebdffd05153b84dd13189515b67613f63f304" Jan 20 17:03:14 crc kubenswrapper[4558]: E0120 17:03:14.021801 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1f93abb9eab5658358835514b46ebdffd05153b84dd13189515b67613f63f304\": container with ID starting with 1f93abb9eab5658358835514b46ebdffd05153b84dd13189515b67613f63f304 not found: ID does not exist" containerID="1f93abb9eab5658358835514b46ebdffd05153b84dd13189515b67613f63f304" Jan 20 17:03:14 crc kubenswrapper[4558]: I0120 17:03:14.021838 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1f93abb9eab5658358835514b46ebdffd05153b84dd13189515b67613f63f304"} err="failed to get container status \"1f93abb9eab5658358835514b46ebdffd05153b84dd13189515b67613f63f304\": rpc error: code = NotFound desc = could not find container \"1f93abb9eab5658358835514b46ebdffd05153b84dd13189515b67613f63f304\": container with ID starting with 1f93abb9eab5658358835514b46ebdffd05153b84dd13189515b67613f63f304 not found: ID does not exist" Jan 20 17:03:14 crc kubenswrapper[4558]: I0120 17:03:14.021861 4558 scope.go:117] "RemoveContainer" containerID="b1010de6c5451fa8fdfb551cfb1d14c2ce97f03ca7a9a4dfc2b5fa33e42ee8b3" Jan 20 17:03:14 crc kubenswrapper[4558]: E0120 17:03:14.022208 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b1010de6c5451fa8fdfb551cfb1d14c2ce97f03ca7a9a4dfc2b5fa33e42ee8b3\": container with ID starting with b1010de6c5451fa8fdfb551cfb1d14c2ce97f03ca7a9a4dfc2b5fa33e42ee8b3 not found: ID does not exist" containerID="b1010de6c5451fa8fdfb551cfb1d14c2ce97f03ca7a9a4dfc2b5fa33e42ee8b3" Jan 20 17:03:14 crc kubenswrapper[4558]: I0120 17:03:14.022238 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b1010de6c5451fa8fdfb551cfb1d14c2ce97f03ca7a9a4dfc2b5fa33e42ee8b3"} err="failed to get container status \"b1010de6c5451fa8fdfb551cfb1d14c2ce97f03ca7a9a4dfc2b5fa33e42ee8b3\": rpc error: code = NotFound desc = could not find container \"b1010de6c5451fa8fdfb551cfb1d14c2ce97f03ca7a9a4dfc2b5fa33e42ee8b3\": container with ID starting with b1010de6c5451fa8fdfb551cfb1d14c2ce97f03ca7a9a4dfc2b5fa33e42ee8b3 not found: ID does not exist" Jan 20 17:03:14 crc kubenswrapper[4558]: I0120 17:03:14.022255 4558 scope.go:117] "RemoveContainer" containerID="f9d98cfacb9e982fff6de888dacf794e1dbf9cc9e06d9800767b7d467cb73658" Jan 20 17:03:14 crc kubenswrapper[4558]: I0120 17:03:14.038570 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hf7p8\" (UniqueName: \"kubernetes.io/projected/55c237c8-936c-45d1-b06c-374da878c2b7-kube-api-access-hf7p8\") pod \"55c237c8-936c-45d1-b06c-374da878c2b7\" (UID: \"55c237c8-936c-45d1-b06c-374da878c2b7\") " Jan 20 17:03:14 crc kubenswrapper[4558]: I0120 17:03:14.045930 4558 scope.go:117] "RemoveContainer" containerID="9e41d81a80fc745c023df19c0397a12429361410c19b1d7b04de7da6ee1b3b38" Jan 20 17:03:14 crc kubenswrapper[4558]: I0120 17:03:14.046761 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/55c237c8-936c-45d1-b06c-374da878c2b7-kube-api-access-hf7p8" (OuterVolumeSpecName: "kube-api-access-hf7p8") pod "55c237c8-936c-45d1-b06c-374da878c2b7" (UID: "55c237c8-936c-45d1-b06c-374da878c2b7"). InnerVolumeSpecName "kube-api-access-hf7p8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:03:14 crc kubenswrapper[4558]: I0120 17:03:14.079706 4558 scope.go:117] "RemoveContainer" containerID="f1726311e40abb9f8cd333b1557f3a45f29a8c750b323e60afc3571fcd98b0d9" Jan 20 17:03:14 crc kubenswrapper[4558]: I0120 17:03:14.092440 4558 scope.go:117] "RemoveContainer" containerID="5da46a41a4afdd1907ffd116daa323945a958ea2d34c974bd437261d9f1d45b9" Jan 20 17:03:14 crc kubenswrapper[4558]: I0120 17:03:14.106895 4558 scope.go:117] "RemoveContainer" containerID="f9d98cfacb9e982fff6de888dacf794e1dbf9cc9e06d9800767b7d467cb73658" Jan 20 17:03:14 crc kubenswrapper[4558]: E0120 17:03:14.107203 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f9d98cfacb9e982fff6de888dacf794e1dbf9cc9e06d9800767b7d467cb73658\": container with ID starting with f9d98cfacb9e982fff6de888dacf794e1dbf9cc9e06d9800767b7d467cb73658 not found: ID does not exist" containerID="f9d98cfacb9e982fff6de888dacf794e1dbf9cc9e06d9800767b7d467cb73658" Jan 20 17:03:14 crc kubenswrapper[4558]: I0120 17:03:14.107232 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f9d98cfacb9e982fff6de888dacf794e1dbf9cc9e06d9800767b7d467cb73658"} err="failed to get container status \"f9d98cfacb9e982fff6de888dacf794e1dbf9cc9e06d9800767b7d467cb73658\": rpc error: code = NotFound desc = could not find container \"f9d98cfacb9e982fff6de888dacf794e1dbf9cc9e06d9800767b7d467cb73658\": container with ID starting with f9d98cfacb9e982fff6de888dacf794e1dbf9cc9e06d9800767b7d467cb73658 not found: ID does not exist" Jan 20 17:03:14 crc kubenswrapper[4558]: I0120 17:03:14.107252 4558 scope.go:117] "RemoveContainer" containerID="9e41d81a80fc745c023df19c0397a12429361410c19b1d7b04de7da6ee1b3b38" Jan 20 17:03:14 crc kubenswrapper[4558]: E0120 17:03:14.107524 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9e41d81a80fc745c023df19c0397a12429361410c19b1d7b04de7da6ee1b3b38\": container with ID starting with 9e41d81a80fc745c023df19c0397a12429361410c19b1d7b04de7da6ee1b3b38 not found: ID does not exist" containerID="9e41d81a80fc745c023df19c0397a12429361410c19b1d7b04de7da6ee1b3b38" Jan 20 17:03:14 crc kubenswrapper[4558]: I0120 17:03:14.107556 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9e41d81a80fc745c023df19c0397a12429361410c19b1d7b04de7da6ee1b3b38"} err="failed to get container status \"9e41d81a80fc745c023df19c0397a12429361410c19b1d7b04de7da6ee1b3b38\": rpc error: code = NotFound desc = could not find container \"9e41d81a80fc745c023df19c0397a12429361410c19b1d7b04de7da6ee1b3b38\": container with ID starting with 9e41d81a80fc745c023df19c0397a12429361410c19b1d7b04de7da6ee1b3b38 not found: ID does not exist" Jan 20 17:03:14 crc kubenswrapper[4558]: I0120 17:03:14.107580 4558 scope.go:117] "RemoveContainer" containerID="f1726311e40abb9f8cd333b1557f3a45f29a8c750b323e60afc3571fcd98b0d9" Jan 20 17:03:14 crc kubenswrapper[4558]: E0120 17:03:14.107797 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f1726311e40abb9f8cd333b1557f3a45f29a8c750b323e60afc3571fcd98b0d9\": container with ID starting with f1726311e40abb9f8cd333b1557f3a45f29a8c750b323e60afc3571fcd98b0d9 not found: ID does not exist" containerID="f1726311e40abb9f8cd333b1557f3a45f29a8c750b323e60afc3571fcd98b0d9" Jan 20 17:03:14 crc kubenswrapper[4558]: I0120 17:03:14.107823 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f1726311e40abb9f8cd333b1557f3a45f29a8c750b323e60afc3571fcd98b0d9"} err="failed to get container status \"f1726311e40abb9f8cd333b1557f3a45f29a8c750b323e60afc3571fcd98b0d9\": rpc error: code = NotFound desc = could not find container \"f1726311e40abb9f8cd333b1557f3a45f29a8c750b323e60afc3571fcd98b0d9\": container with ID starting with f1726311e40abb9f8cd333b1557f3a45f29a8c750b323e60afc3571fcd98b0d9 not found: ID does not exist" Jan 20 17:03:14 crc kubenswrapper[4558]: I0120 17:03:14.107841 4558 scope.go:117] "RemoveContainer" containerID="5da46a41a4afdd1907ffd116daa323945a958ea2d34c974bd437261d9f1d45b9" Jan 20 17:03:14 crc kubenswrapper[4558]: E0120 17:03:14.108056 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5da46a41a4afdd1907ffd116daa323945a958ea2d34c974bd437261d9f1d45b9\": container with ID starting with 5da46a41a4afdd1907ffd116daa323945a958ea2d34c974bd437261d9f1d45b9 not found: ID does not exist" containerID="5da46a41a4afdd1907ffd116daa323945a958ea2d34c974bd437261d9f1d45b9" Jan 20 17:03:14 crc kubenswrapper[4558]: I0120 17:03:14.108080 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5da46a41a4afdd1907ffd116daa323945a958ea2d34c974bd437261d9f1d45b9"} err="failed to get container status \"5da46a41a4afdd1907ffd116daa323945a958ea2d34c974bd437261d9f1d45b9\": rpc error: code = NotFound desc = could not find container \"5da46a41a4afdd1907ffd116daa323945a958ea2d34c974bd437261d9f1d45b9\": container with ID starting with 5da46a41a4afdd1907ffd116daa323945a958ea2d34c974bd437261d9f1d45b9 not found: ID does not exist" Jan 20 17:03:14 crc kubenswrapper[4558]: I0120 17:03:14.140555 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hf7p8\" (UniqueName: \"kubernetes.io/projected/55c237c8-936c-45d1-b06c-374da878c2b7-kube-api-access-hf7p8\") on node \"crc\" DevicePath \"\"" Jan 20 17:03:14 crc kubenswrapper[4558]: I0120 17:03:14.178587 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:03:14 crc kubenswrapper[4558]: I0120 17:03:14.183758 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:03:14 crc kubenswrapper[4558]: I0120 17:03:14.193711 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:03:14 crc kubenswrapper[4558]: E0120 17:03:14.193958 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1fa82f11-190d-4f03-90eb-93ce1d0caf36" containerName="proxy-httpd" Jan 20 17:03:14 crc kubenswrapper[4558]: I0120 17:03:14.193975 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="1fa82f11-190d-4f03-90eb-93ce1d0caf36" containerName="proxy-httpd" Jan 20 17:03:14 crc kubenswrapper[4558]: E0120 17:03:14.193987 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1fa82f11-190d-4f03-90eb-93ce1d0caf36" containerName="sg-core" Jan 20 17:03:14 crc kubenswrapper[4558]: I0120 17:03:14.193993 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="1fa82f11-190d-4f03-90eb-93ce1d0caf36" containerName="sg-core" Jan 20 17:03:14 crc kubenswrapper[4558]: E0120 17:03:14.194004 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1fa82f11-190d-4f03-90eb-93ce1d0caf36" containerName="ceilometer-central-agent" Jan 20 17:03:14 crc kubenswrapper[4558]: I0120 17:03:14.194009 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="1fa82f11-190d-4f03-90eb-93ce1d0caf36" containerName="ceilometer-central-agent" Jan 20 17:03:14 crc kubenswrapper[4558]: E0120 17:03:14.194014 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1fa82f11-190d-4f03-90eb-93ce1d0caf36" containerName="ceilometer-notification-agent" Jan 20 17:03:14 crc kubenswrapper[4558]: I0120 17:03:14.194020 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="1fa82f11-190d-4f03-90eb-93ce1d0caf36" containerName="ceilometer-notification-agent" Jan 20 17:03:14 crc kubenswrapper[4558]: E0120 17:03:14.194033 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="55c237c8-936c-45d1-b06c-374da878c2b7" containerName="kube-state-metrics" Jan 20 17:03:14 crc kubenswrapper[4558]: I0120 17:03:14.194038 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="55c237c8-936c-45d1-b06c-374da878c2b7" containerName="kube-state-metrics" Jan 20 17:03:14 crc kubenswrapper[4558]: E0120 17:03:14.194049 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="561ded1f-50d2-4eb9-8ceb-c6f587ee80d6" containerName="neutron-api" Jan 20 17:03:14 crc kubenswrapper[4558]: I0120 17:03:14.194055 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="561ded1f-50d2-4eb9-8ceb-c6f587ee80d6" containerName="neutron-api" Jan 20 17:03:14 crc kubenswrapper[4558]: E0120 17:03:14.194069 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="561ded1f-50d2-4eb9-8ceb-c6f587ee80d6" containerName="neutron-httpd" Jan 20 17:03:14 crc kubenswrapper[4558]: I0120 17:03:14.194075 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="561ded1f-50d2-4eb9-8ceb-c6f587ee80d6" containerName="neutron-httpd" Jan 20 17:03:14 crc kubenswrapper[4558]: I0120 17:03:14.194212 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="1fa82f11-190d-4f03-90eb-93ce1d0caf36" containerName="proxy-httpd" Jan 20 17:03:14 crc kubenswrapper[4558]: I0120 17:03:14.194224 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="55c237c8-936c-45d1-b06c-374da878c2b7" containerName="kube-state-metrics" Jan 20 17:03:14 crc kubenswrapper[4558]: I0120 17:03:14.194233 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="561ded1f-50d2-4eb9-8ceb-c6f587ee80d6" containerName="neutron-api" Jan 20 17:03:14 crc kubenswrapper[4558]: I0120 17:03:14.194244 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="1fa82f11-190d-4f03-90eb-93ce1d0caf36" containerName="sg-core" Jan 20 17:03:14 crc kubenswrapper[4558]: I0120 17:03:14.194257 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="1fa82f11-190d-4f03-90eb-93ce1d0caf36" containerName="ceilometer-notification-agent" Jan 20 17:03:14 crc kubenswrapper[4558]: I0120 17:03:14.194266 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="1fa82f11-190d-4f03-90eb-93ce1d0caf36" containerName="ceilometer-central-agent" Jan 20 17:03:14 crc kubenswrapper[4558]: I0120 17:03:14.194293 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="561ded1f-50d2-4eb9-8ceb-c6f587ee80d6" containerName="neutron-httpd" Jan 20 17:03:14 crc kubenswrapper[4558]: I0120 17:03:14.195451 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:03:14 crc kubenswrapper[4558]: I0120 17:03:14.203353 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:03:14 crc kubenswrapper[4558]: I0120 17:03:14.204667 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-config-data" Jan 20 17:03:14 crc kubenswrapper[4558]: I0120 17:03:14.204982 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-scripts" Jan 20 17:03:14 crc kubenswrapper[4558]: I0120 17:03:14.241518 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9047bd36-d5f2-4fef-870e-1228092716d7-log-httpd\") pod \"ceilometer-0\" (UID: \"9047bd36-d5f2-4fef-870e-1228092716d7\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:03:14 crc kubenswrapper[4558]: I0120 17:03:14.241553 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5cwrb\" (UniqueName: \"kubernetes.io/projected/9047bd36-d5f2-4fef-870e-1228092716d7-kube-api-access-5cwrb\") pod \"ceilometer-0\" (UID: \"9047bd36-d5f2-4fef-870e-1228092716d7\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:03:14 crc kubenswrapper[4558]: I0120 17:03:14.241594 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9047bd36-d5f2-4fef-870e-1228092716d7-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"9047bd36-d5f2-4fef-870e-1228092716d7\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:03:14 crc kubenswrapper[4558]: I0120 17:03:14.241728 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9047bd36-d5f2-4fef-870e-1228092716d7-config-data\") pod \"ceilometer-0\" (UID: \"9047bd36-d5f2-4fef-870e-1228092716d7\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:03:14 crc kubenswrapper[4558]: I0120 17:03:14.241819 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9047bd36-d5f2-4fef-870e-1228092716d7-run-httpd\") pod \"ceilometer-0\" (UID: \"9047bd36-d5f2-4fef-870e-1228092716d7\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:03:14 crc kubenswrapper[4558]: I0120 17:03:14.241845 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/9047bd36-d5f2-4fef-870e-1228092716d7-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"9047bd36-d5f2-4fef-870e-1228092716d7\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:03:14 crc kubenswrapper[4558]: I0120 17:03:14.242009 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9047bd36-d5f2-4fef-870e-1228092716d7-scripts\") pod \"ceilometer-0\" (UID: \"9047bd36-d5f2-4fef-870e-1228092716d7\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:03:14 crc kubenswrapper[4558]: I0120 17:03:14.343181 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9047bd36-d5f2-4fef-870e-1228092716d7-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"9047bd36-d5f2-4fef-870e-1228092716d7\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:03:14 crc kubenswrapper[4558]: I0120 17:03:14.343227 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9047bd36-d5f2-4fef-870e-1228092716d7-config-data\") pod \"ceilometer-0\" (UID: \"9047bd36-d5f2-4fef-870e-1228092716d7\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:03:14 crc kubenswrapper[4558]: I0120 17:03:14.343269 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9047bd36-d5f2-4fef-870e-1228092716d7-run-httpd\") pod \"ceilometer-0\" (UID: \"9047bd36-d5f2-4fef-870e-1228092716d7\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:03:14 crc kubenswrapper[4558]: I0120 17:03:14.343294 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/9047bd36-d5f2-4fef-870e-1228092716d7-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"9047bd36-d5f2-4fef-870e-1228092716d7\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:03:14 crc kubenswrapper[4558]: I0120 17:03:14.343363 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9047bd36-d5f2-4fef-870e-1228092716d7-scripts\") pod \"ceilometer-0\" (UID: \"9047bd36-d5f2-4fef-870e-1228092716d7\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:03:14 crc kubenswrapper[4558]: I0120 17:03:14.343384 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9047bd36-d5f2-4fef-870e-1228092716d7-log-httpd\") pod \"ceilometer-0\" (UID: \"9047bd36-d5f2-4fef-870e-1228092716d7\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:03:14 crc kubenswrapper[4558]: I0120 17:03:14.343397 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5cwrb\" (UniqueName: \"kubernetes.io/projected/9047bd36-d5f2-4fef-870e-1228092716d7-kube-api-access-5cwrb\") pod \"ceilometer-0\" (UID: \"9047bd36-d5f2-4fef-870e-1228092716d7\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:03:14 crc kubenswrapper[4558]: I0120 17:03:14.343992 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9047bd36-d5f2-4fef-870e-1228092716d7-run-httpd\") pod \"ceilometer-0\" (UID: \"9047bd36-d5f2-4fef-870e-1228092716d7\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:03:14 crc kubenswrapper[4558]: I0120 17:03:14.343991 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9047bd36-d5f2-4fef-870e-1228092716d7-log-httpd\") pod \"ceilometer-0\" (UID: \"9047bd36-d5f2-4fef-870e-1228092716d7\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:03:14 crc kubenswrapper[4558]: I0120 17:03:14.346022 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9047bd36-d5f2-4fef-870e-1228092716d7-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"9047bd36-d5f2-4fef-870e-1228092716d7\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:03:14 crc kubenswrapper[4558]: I0120 17:03:14.346599 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9047bd36-d5f2-4fef-870e-1228092716d7-scripts\") pod \"ceilometer-0\" (UID: \"9047bd36-d5f2-4fef-870e-1228092716d7\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:03:14 crc kubenswrapper[4558]: I0120 17:03:14.347067 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/9047bd36-d5f2-4fef-870e-1228092716d7-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"9047bd36-d5f2-4fef-870e-1228092716d7\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:03:14 crc kubenswrapper[4558]: I0120 17:03:14.347156 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9047bd36-d5f2-4fef-870e-1228092716d7-config-data\") pod \"ceilometer-0\" (UID: \"9047bd36-d5f2-4fef-870e-1228092716d7\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:03:14 crc kubenswrapper[4558]: I0120 17:03:14.356526 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5cwrb\" (UniqueName: \"kubernetes.io/projected/9047bd36-d5f2-4fef-870e-1228092716d7-kube-api-access-5cwrb\") pod \"ceilometer-0\" (UID: \"9047bd36-d5f2-4fef-870e-1228092716d7\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:03:14 crc kubenswrapper[4558]: I0120 17:03:14.516724 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:03:14 crc kubenswrapper[4558]: I0120 17:03:14.607391 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1fa82f11-190d-4f03-90eb-93ce1d0caf36" path="/var/lib/kubelet/pods/1fa82f11-190d-4f03-90eb-93ce1d0caf36/volumes" Jan 20 17:03:14 crc kubenswrapper[4558]: I0120 17:03:14.608089 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="561ded1f-50d2-4eb9-8ceb-c6f587ee80d6" path="/var/lib/kubelet/pods/561ded1f-50d2-4eb9-8ceb-c6f587ee80d6/volumes" Jan 20 17:03:14 crc kubenswrapper[4558]: I0120 17:03:14.864045 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/kube-state-metrics-0" event={"ID":"55c237c8-936c-45d1-b06c-374da878c2b7","Type":"ContainerDied","Data":"87af8981e510ec9fdbcbf38749659350d0e429a3b5aff714261014a5b42e5314"} Jan 20 17:03:14 crc kubenswrapper[4558]: I0120 17:03:14.864256 4558 scope.go:117] "RemoveContainer" containerID="40a664774128414719591bb05b093341822cb024885c28f9e0a4e07573f067a1" Jan 20 17:03:14 crc kubenswrapper[4558]: I0120 17:03:14.864064 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:03:14 crc kubenswrapper[4558]: I0120 17:03:14.886366 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 17:03:14 crc kubenswrapper[4558]: I0120 17:03:14.895307 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 17:03:14 crc kubenswrapper[4558]: I0120 17:03:14.903624 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 17:03:14 crc kubenswrapper[4558]: I0120 17:03:14.904665 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:03:14 crc kubenswrapper[4558]: I0120 17:03:14.910040 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-kube-state-metrics-svc" Jan 20 17:03:14 crc kubenswrapper[4558]: I0120 17:03:14.910105 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"kube-state-metrics-tls-config" Jan 20 17:03:14 crc kubenswrapper[4558]: I0120 17:03:14.912998 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 17:03:14 crc kubenswrapper[4558]: I0120 17:03:14.972483 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:03:14 crc kubenswrapper[4558]: I0120 17:03:14.980028 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9c2a3b1-71ed-4612-8cd0-22e396cd622c-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"b9c2a3b1-71ed-4612-8cd0-22e396cd622c\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:03:14 crc kubenswrapper[4558]: I0120 17:03:14.980262 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/b9c2a3b1-71ed-4612-8cd0-22e396cd622c-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"b9c2a3b1-71ed-4612-8cd0-22e396cd622c\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:03:14 crc kubenswrapper[4558]: I0120 17:03:14.980333 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nbdpg\" (UniqueName: \"kubernetes.io/projected/b9c2a3b1-71ed-4612-8cd0-22e396cd622c-kube-api-access-nbdpg\") pod \"kube-state-metrics-0\" (UID: \"b9c2a3b1-71ed-4612-8cd0-22e396cd622c\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:03:14 crc kubenswrapper[4558]: I0120 17:03:14.980397 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/b9c2a3b1-71ed-4612-8cd0-22e396cd622c-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"b9c2a3b1-71ed-4612-8cd0-22e396cd622c\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:03:15 crc kubenswrapper[4558]: I0120 17:03:15.081685 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/b9c2a3b1-71ed-4612-8cd0-22e396cd622c-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"b9c2a3b1-71ed-4612-8cd0-22e396cd622c\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:03:15 crc kubenswrapper[4558]: I0120 17:03:15.081932 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nbdpg\" (UniqueName: \"kubernetes.io/projected/b9c2a3b1-71ed-4612-8cd0-22e396cd622c-kube-api-access-nbdpg\") pod \"kube-state-metrics-0\" (UID: \"b9c2a3b1-71ed-4612-8cd0-22e396cd622c\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:03:15 crc kubenswrapper[4558]: I0120 17:03:15.081955 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/b9c2a3b1-71ed-4612-8cd0-22e396cd622c-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"b9c2a3b1-71ed-4612-8cd0-22e396cd622c\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:03:15 crc kubenswrapper[4558]: I0120 17:03:15.082000 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9c2a3b1-71ed-4612-8cd0-22e396cd622c-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"b9c2a3b1-71ed-4612-8cd0-22e396cd622c\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:03:15 crc kubenswrapper[4558]: I0120 17:03:15.086543 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9c2a3b1-71ed-4612-8cd0-22e396cd622c-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"b9c2a3b1-71ed-4612-8cd0-22e396cd622c\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:03:15 crc kubenswrapper[4558]: I0120 17:03:15.086550 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/b9c2a3b1-71ed-4612-8cd0-22e396cd622c-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"b9c2a3b1-71ed-4612-8cd0-22e396cd622c\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:03:15 crc kubenswrapper[4558]: I0120 17:03:15.086610 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/b9c2a3b1-71ed-4612-8cd0-22e396cd622c-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"b9c2a3b1-71ed-4612-8cd0-22e396cd622c\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:03:15 crc kubenswrapper[4558]: I0120 17:03:15.098156 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nbdpg\" (UniqueName: \"kubernetes.io/projected/b9c2a3b1-71ed-4612-8cd0-22e396cd622c-kube-api-access-nbdpg\") pod \"kube-state-metrics-0\" (UID: \"b9c2a3b1-71ed-4612-8cd0-22e396cd622c\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:03:15 crc kubenswrapper[4558]: I0120 17:03:15.220864 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:03:15 crc kubenswrapper[4558]: I0120 17:03:15.250020 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:03:15 crc kubenswrapper[4558]: I0120 17:03:15.590618 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 17:03:15 crc kubenswrapper[4558]: I0120 17:03:15.874084 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"9047bd36-d5f2-4fef-870e-1228092716d7","Type":"ContainerStarted","Data":"13e9017387ebd05b878e43e0354d0391c3db37380de58eb058352c89fd44c036"} Jan 20 17:03:15 crc kubenswrapper[4558]: I0120 17:03:15.875893 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/kube-state-metrics-0" event={"ID":"b9c2a3b1-71ed-4612-8cd0-22e396cd622c","Type":"ContainerStarted","Data":"24d2ad8b347da988c262c7a6dbc7c0779cc855440fe9d2a82930189a3c6b9e16"} Jan 20 17:03:16 crc kubenswrapper[4558]: I0120 17:03:16.577652 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="55c237c8-936c-45d1-b06c-374da878c2b7" path="/var/lib/kubelet/pods/55c237c8-936c-45d1-b06c-374da878c2b7/volumes" Jan 20 17:03:16 crc kubenswrapper[4558]: I0120 17:03:16.886364 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/kube-state-metrics-0" event={"ID":"b9c2a3b1-71ed-4612-8cd0-22e396cd622c","Type":"ContainerStarted","Data":"d79bd7c00910cb1ea357df6713d24c89de9eda30118ccacbc0177383e7169dd9"} Jan 20 17:03:16 crc kubenswrapper[4558]: I0120 17:03:16.886863 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:03:16 crc kubenswrapper[4558]: I0120 17:03:16.898552 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/kube-state-metrics-0" podStartSLOduration=2.64253236 podStartE2EDuration="2.898543151s" podCreationTimestamp="2026-01-20 17:03:14 +0000 UTC" firstStartedPulling="2026-01-20 17:03:15.594561298 +0000 UTC m=+1289.354899265" lastFinishedPulling="2026-01-20 17:03:15.850572088 +0000 UTC m=+1289.610910056" observedRunningTime="2026-01-20 17:03:16.896350898 +0000 UTC m=+1290.656688865" watchObservedRunningTime="2026-01-20 17:03:16.898543151 +0000 UTC m=+1290.658881118" Jan 20 17:03:17 crc kubenswrapper[4558]: I0120 17:03:17.752347 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-api-db-create-vzcfm"] Jan 20 17:03:17 crc kubenswrapper[4558]: I0120 17:03:17.753602 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-db-create-vzcfm" Jan 20 17:03:17 crc kubenswrapper[4558]: I0120 17:03:17.762504 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-db-create-vzcfm"] Jan 20 17:03:17 crc kubenswrapper[4558]: I0120 17:03:17.824865 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8505b3de-7094-43aa-a66b-13ba40bd4c13-operator-scripts\") pod \"nova-api-db-create-vzcfm\" (UID: \"8505b3de-7094-43aa-a66b-13ba40bd4c13\") " pod="openstack-kuttl-tests/nova-api-db-create-vzcfm" Jan 20 17:03:17 crc kubenswrapper[4558]: I0120 17:03:17.824915 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9q6qq\" (UniqueName: \"kubernetes.io/projected/8505b3de-7094-43aa-a66b-13ba40bd4c13-kube-api-access-9q6qq\") pod \"nova-api-db-create-vzcfm\" (UID: \"8505b3de-7094-43aa-a66b-13ba40bd4c13\") " pod="openstack-kuttl-tests/nova-api-db-create-vzcfm" Jan 20 17:03:17 crc kubenswrapper[4558]: I0120 17:03:17.852051 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-api-1b9c-account-create-update-dh8jf"] Jan 20 17:03:17 crc kubenswrapper[4558]: I0120 17:03:17.853186 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-1b9c-account-create-update-dh8jf" Jan 20 17:03:17 crc kubenswrapper[4558]: I0120 17:03:17.854582 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-api-db-secret" Jan 20 17:03:17 crc kubenswrapper[4558]: I0120 17:03:17.859595 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-1b9c-account-create-update-dh8jf"] Jan 20 17:03:17 crc kubenswrapper[4558]: I0120 17:03:17.894597 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"9047bd36-d5f2-4fef-870e-1228092716d7","Type":"ContainerStarted","Data":"a7dd103239d386df2b9abcfb0a1a1ccfcb59ea6199748b99c7ad89f0371890d4"} Jan 20 17:03:17 crc kubenswrapper[4558]: I0120 17:03:17.894642 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"9047bd36-d5f2-4fef-870e-1228092716d7","Type":"ContainerStarted","Data":"4d1561b3d4f216e5f14f5050766ba10fd5cbbe876476d575736b02e51572264a"} Jan 20 17:03:17 crc kubenswrapper[4558]: I0120 17:03:17.925848 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1f7c84b8-e07d-4268-b6e3-112cc6b2558e-operator-scripts\") pod \"nova-api-1b9c-account-create-update-dh8jf\" (UID: \"1f7c84b8-e07d-4268-b6e3-112cc6b2558e\") " pod="openstack-kuttl-tests/nova-api-1b9c-account-create-update-dh8jf" Jan 20 17:03:17 crc kubenswrapper[4558]: I0120 17:03:17.926037 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8505b3de-7094-43aa-a66b-13ba40bd4c13-operator-scripts\") pod \"nova-api-db-create-vzcfm\" (UID: \"8505b3de-7094-43aa-a66b-13ba40bd4c13\") " pod="openstack-kuttl-tests/nova-api-db-create-vzcfm" Jan 20 17:03:17 crc kubenswrapper[4558]: I0120 17:03:17.926111 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9q6qq\" (UniqueName: \"kubernetes.io/projected/8505b3de-7094-43aa-a66b-13ba40bd4c13-kube-api-access-9q6qq\") pod \"nova-api-db-create-vzcfm\" (UID: \"8505b3de-7094-43aa-a66b-13ba40bd4c13\") " pod="openstack-kuttl-tests/nova-api-db-create-vzcfm" Jan 20 17:03:17 crc kubenswrapper[4558]: I0120 17:03:17.926211 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-grhqg\" (UniqueName: \"kubernetes.io/projected/1f7c84b8-e07d-4268-b6e3-112cc6b2558e-kube-api-access-grhqg\") pod \"nova-api-1b9c-account-create-update-dh8jf\" (UID: \"1f7c84b8-e07d-4268-b6e3-112cc6b2558e\") " pod="openstack-kuttl-tests/nova-api-1b9c-account-create-update-dh8jf" Jan 20 17:03:17 crc kubenswrapper[4558]: I0120 17:03:17.927090 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8505b3de-7094-43aa-a66b-13ba40bd4c13-operator-scripts\") pod \"nova-api-db-create-vzcfm\" (UID: \"8505b3de-7094-43aa-a66b-13ba40bd4c13\") " pod="openstack-kuttl-tests/nova-api-db-create-vzcfm" Jan 20 17:03:17 crc kubenswrapper[4558]: I0120 17:03:17.945602 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9q6qq\" (UniqueName: \"kubernetes.io/projected/8505b3de-7094-43aa-a66b-13ba40bd4c13-kube-api-access-9q6qq\") pod \"nova-api-db-create-vzcfm\" (UID: \"8505b3de-7094-43aa-a66b-13ba40bd4c13\") " pod="openstack-kuttl-tests/nova-api-db-create-vzcfm" Jan 20 17:03:17 crc kubenswrapper[4558]: I0120 17:03:17.949131 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell0-db-create-fckx4"] Jan 20 17:03:17 crc kubenswrapper[4558]: I0120 17:03:17.950024 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-db-create-fckx4" Jan 20 17:03:17 crc kubenswrapper[4558]: I0120 17:03:17.956868 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-db-create-fckx4"] Jan 20 17:03:18 crc kubenswrapper[4558]: I0120 17:03:18.027590 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1f7c84b8-e07d-4268-b6e3-112cc6b2558e-operator-scripts\") pod \"nova-api-1b9c-account-create-update-dh8jf\" (UID: \"1f7c84b8-e07d-4268-b6e3-112cc6b2558e\") " pod="openstack-kuttl-tests/nova-api-1b9c-account-create-update-dh8jf" Jan 20 17:03:18 crc kubenswrapper[4558]: I0120 17:03:18.027656 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-82smt\" (UniqueName: \"kubernetes.io/projected/bf8f2ffa-65ef-49d0-ba6a-3bc80f0313ac-kube-api-access-82smt\") pod \"nova-cell0-db-create-fckx4\" (UID: \"bf8f2ffa-65ef-49d0-ba6a-3bc80f0313ac\") " pod="openstack-kuttl-tests/nova-cell0-db-create-fckx4" Jan 20 17:03:18 crc kubenswrapper[4558]: I0120 17:03:18.027742 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-grhqg\" (UniqueName: \"kubernetes.io/projected/1f7c84b8-e07d-4268-b6e3-112cc6b2558e-kube-api-access-grhqg\") pod \"nova-api-1b9c-account-create-update-dh8jf\" (UID: \"1f7c84b8-e07d-4268-b6e3-112cc6b2558e\") " pod="openstack-kuttl-tests/nova-api-1b9c-account-create-update-dh8jf" Jan 20 17:03:18 crc kubenswrapper[4558]: I0120 17:03:18.027800 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bf8f2ffa-65ef-49d0-ba6a-3bc80f0313ac-operator-scripts\") pod \"nova-cell0-db-create-fckx4\" (UID: \"bf8f2ffa-65ef-49d0-ba6a-3bc80f0313ac\") " pod="openstack-kuttl-tests/nova-cell0-db-create-fckx4" Jan 20 17:03:18 crc kubenswrapper[4558]: I0120 17:03:18.028217 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1f7c84b8-e07d-4268-b6e3-112cc6b2558e-operator-scripts\") pod \"nova-api-1b9c-account-create-update-dh8jf\" (UID: \"1f7c84b8-e07d-4268-b6e3-112cc6b2558e\") " pod="openstack-kuttl-tests/nova-api-1b9c-account-create-update-dh8jf" Jan 20 17:03:18 crc kubenswrapper[4558]: I0120 17:03:18.040949 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-grhqg\" (UniqueName: \"kubernetes.io/projected/1f7c84b8-e07d-4268-b6e3-112cc6b2558e-kube-api-access-grhqg\") pod \"nova-api-1b9c-account-create-update-dh8jf\" (UID: \"1f7c84b8-e07d-4268-b6e3-112cc6b2558e\") " pod="openstack-kuttl-tests/nova-api-1b9c-account-create-update-dh8jf" Jan 20 17:03:18 crc kubenswrapper[4558]: I0120 17:03:18.048767 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell1-db-create-7ns56"] Jan 20 17:03:18 crc kubenswrapper[4558]: I0120 17:03:18.049706 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-db-create-7ns56" Jan 20 17:03:18 crc kubenswrapper[4558]: I0120 17:03:18.058584 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell0-aebd-account-create-update-mnvvb"] Jan 20 17:03:18 crc kubenswrapper[4558]: I0120 17:03:18.059554 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-aebd-account-create-update-mnvvb" Jan 20 17:03:18 crc kubenswrapper[4558]: I0120 17:03:18.060949 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell0-db-secret" Jan 20 17:03:18 crc kubenswrapper[4558]: I0120 17:03:18.064070 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-db-create-7ns56"] Jan 20 17:03:18 crc kubenswrapper[4558]: I0120 17:03:18.081089 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-aebd-account-create-update-mnvvb"] Jan 20 17:03:18 crc kubenswrapper[4558]: I0120 17:03:18.117951 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-db-create-vzcfm" Jan 20 17:03:18 crc kubenswrapper[4558]: I0120 17:03:18.129565 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bf8f2ffa-65ef-49d0-ba6a-3bc80f0313ac-operator-scripts\") pod \"nova-cell0-db-create-fckx4\" (UID: \"bf8f2ffa-65ef-49d0-ba6a-3bc80f0313ac\") " pod="openstack-kuttl-tests/nova-cell0-db-create-fckx4" Jan 20 17:03:18 crc kubenswrapper[4558]: I0120 17:03:18.129642 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6ef75305-05ce-4939-bed4-16aaecedff78-operator-scripts\") pod \"nova-cell0-aebd-account-create-update-mnvvb\" (UID: \"6ef75305-05ce-4939-bed4-16aaecedff78\") " pod="openstack-kuttl-tests/nova-cell0-aebd-account-create-update-mnvvb" Jan 20 17:03:18 crc kubenswrapper[4558]: I0120 17:03:18.129672 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qkhj2\" (UniqueName: \"kubernetes.io/projected/49eb0897-5e0c-4075-8814-aa484a886e33-kube-api-access-qkhj2\") pod \"nova-cell1-db-create-7ns56\" (UID: \"49eb0897-5e0c-4075-8814-aa484a886e33\") " pod="openstack-kuttl-tests/nova-cell1-db-create-7ns56" Jan 20 17:03:18 crc kubenswrapper[4558]: I0120 17:03:18.129705 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-82smt\" (UniqueName: \"kubernetes.io/projected/bf8f2ffa-65ef-49d0-ba6a-3bc80f0313ac-kube-api-access-82smt\") pod \"nova-cell0-db-create-fckx4\" (UID: \"bf8f2ffa-65ef-49d0-ba6a-3bc80f0313ac\") " pod="openstack-kuttl-tests/nova-cell0-db-create-fckx4" Jan 20 17:03:18 crc kubenswrapper[4558]: I0120 17:03:18.129740 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/49eb0897-5e0c-4075-8814-aa484a886e33-operator-scripts\") pod \"nova-cell1-db-create-7ns56\" (UID: \"49eb0897-5e0c-4075-8814-aa484a886e33\") " pod="openstack-kuttl-tests/nova-cell1-db-create-7ns56" Jan 20 17:03:18 crc kubenswrapper[4558]: I0120 17:03:18.129792 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w4svw\" (UniqueName: \"kubernetes.io/projected/6ef75305-05ce-4939-bed4-16aaecedff78-kube-api-access-w4svw\") pod \"nova-cell0-aebd-account-create-update-mnvvb\" (UID: \"6ef75305-05ce-4939-bed4-16aaecedff78\") " pod="openstack-kuttl-tests/nova-cell0-aebd-account-create-update-mnvvb" Jan 20 17:03:18 crc kubenswrapper[4558]: I0120 17:03:18.130189 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bf8f2ffa-65ef-49d0-ba6a-3bc80f0313ac-operator-scripts\") pod \"nova-cell0-db-create-fckx4\" (UID: \"bf8f2ffa-65ef-49d0-ba6a-3bc80f0313ac\") " pod="openstack-kuttl-tests/nova-cell0-db-create-fckx4" Jan 20 17:03:18 crc kubenswrapper[4558]: I0120 17:03:18.150836 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-82smt\" (UniqueName: \"kubernetes.io/projected/bf8f2ffa-65ef-49d0-ba6a-3bc80f0313ac-kube-api-access-82smt\") pod \"nova-cell0-db-create-fckx4\" (UID: \"bf8f2ffa-65ef-49d0-ba6a-3bc80f0313ac\") " pod="openstack-kuttl-tests/nova-cell0-db-create-fckx4" Jan 20 17:03:18 crc kubenswrapper[4558]: I0120 17:03:18.185839 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-1b9c-account-create-update-dh8jf" Jan 20 17:03:18 crc kubenswrapper[4558]: I0120 17:03:18.231209 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/49eb0897-5e0c-4075-8814-aa484a886e33-operator-scripts\") pod \"nova-cell1-db-create-7ns56\" (UID: \"49eb0897-5e0c-4075-8814-aa484a886e33\") " pod="openstack-kuttl-tests/nova-cell1-db-create-7ns56" Jan 20 17:03:18 crc kubenswrapper[4558]: I0120 17:03:18.231472 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w4svw\" (UniqueName: \"kubernetes.io/projected/6ef75305-05ce-4939-bed4-16aaecedff78-kube-api-access-w4svw\") pod \"nova-cell0-aebd-account-create-update-mnvvb\" (UID: \"6ef75305-05ce-4939-bed4-16aaecedff78\") " pod="openstack-kuttl-tests/nova-cell0-aebd-account-create-update-mnvvb" Jan 20 17:03:18 crc kubenswrapper[4558]: I0120 17:03:18.231585 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6ef75305-05ce-4939-bed4-16aaecedff78-operator-scripts\") pod \"nova-cell0-aebd-account-create-update-mnvvb\" (UID: \"6ef75305-05ce-4939-bed4-16aaecedff78\") " pod="openstack-kuttl-tests/nova-cell0-aebd-account-create-update-mnvvb" Jan 20 17:03:18 crc kubenswrapper[4558]: I0120 17:03:18.231613 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qkhj2\" (UniqueName: \"kubernetes.io/projected/49eb0897-5e0c-4075-8814-aa484a886e33-kube-api-access-qkhj2\") pod \"nova-cell1-db-create-7ns56\" (UID: \"49eb0897-5e0c-4075-8814-aa484a886e33\") " pod="openstack-kuttl-tests/nova-cell1-db-create-7ns56" Jan 20 17:03:18 crc kubenswrapper[4558]: I0120 17:03:18.231915 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/49eb0897-5e0c-4075-8814-aa484a886e33-operator-scripts\") pod \"nova-cell1-db-create-7ns56\" (UID: \"49eb0897-5e0c-4075-8814-aa484a886e33\") " pod="openstack-kuttl-tests/nova-cell1-db-create-7ns56" Jan 20 17:03:18 crc kubenswrapper[4558]: I0120 17:03:18.232298 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6ef75305-05ce-4939-bed4-16aaecedff78-operator-scripts\") pod \"nova-cell0-aebd-account-create-update-mnvvb\" (UID: \"6ef75305-05ce-4939-bed4-16aaecedff78\") " pod="openstack-kuttl-tests/nova-cell0-aebd-account-create-update-mnvvb" Jan 20 17:03:18 crc kubenswrapper[4558]: I0120 17:03:18.248725 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w4svw\" (UniqueName: \"kubernetes.io/projected/6ef75305-05ce-4939-bed4-16aaecedff78-kube-api-access-w4svw\") pod \"nova-cell0-aebd-account-create-update-mnvvb\" (UID: \"6ef75305-05ce-4939-bed4-16aaecedff78\") " pod="openstack-kuttl-tests/nova-cell0-aebd-account-create-update-mnvvb" Jan 20 17:03:18 crc kubenswrapper[4558]: I0120 17:03:18.251306 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qkhj2\" (UniqueName: \"kubernetes.io/projected/49eb0897-5e0c-4075-8814-aa484a886e33-kube-api-access-qkhj2\") pod \"nova-cell1-db-create-7ns56\" (UID: \"49eb0897-5e0c-4075-8814-aa484a886e33\") " pod="openstack-kuttl-tests/nova-cell1-db-create-7ns56" Jan 20 17:03:18 crc kubenswrapper[4558]: I0120 17:03:18.268887 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell1-8d7e-account-create-update-zl2rq"] Jan 20 17:03:18 crc kubenswrapper[4558]: I0120 17:03:18.269916 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-8d7e-account-create-update-zl2rq" Jan 20 17:03:18 crc kubenswrapper[4558]: I0120 17:03:18.272205 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell1-db-secret" Jan 20 17:03:18 crc kubenswrapper[4558]: I0120 17:03:18.287309 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-8d7e-account-create-update-zl2rq"] Jan 20 17:03:18 crc kubenswrapper[4558]: I0120 17:03:18.287597 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-db-create-fckx4" Jan 20 17:03:18 crc kubenswrapper[4558]: I0120 17:03:18.334474 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nwnpl\" (UniqueName: \"kubernetes.io/projected/4c82394b-0dc4-4a8a-ae40-dc46e41ddc68-kube-api-access-nwnpl\") pod \"nova-cell1-8d7e-account-create-update-zl2rq\" (UID: \"4c82394b-0dc4-4a8a-ae40-dc46e41ddc68\") " pod="openstack-kuttl-tests/nova-cell1-8d7e-account-create-update-zl2rq" Jan 20 17:03:18 crc kubenswrapper[4558]: I0120 17:03:18.334602 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4c82394b-0dc4-4a8a-ae40-dc46e41ddc68-operator-scripts\") pod \"nova-cell1-8d7e-account-create-update-zl2rq\" (UID: \"4c82394b-0dc4-4a8a-ae40-dc46e41ddc68\") " pod="openstack-kuttl-tests/nova-cell1-8d7e-account-create-update-zl2rq" Jan 20 17:03:18 crc kubenswrapper[4558]: I0120 17:03:18.393945 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-db-create-7ns56" Jan 20 17:03:18 crc kubenswrapper[4558]: I0120 17:03:18.399474 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-aebd-account-create-update-mnvvb" Jan 20 17:03:18 crc kubenswrapper[4558]: I0120 17:03:18.436462 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nwnpl\" (UniqueName: \"kubernetes.io/projected/4c82394b-0dc4-4a8a-ae40-dc46e41ddc68-kube-api-access-nwnpl\") pod \"nova-cell1-8d7e-account-create-update-zl2rq\" (UID: \"4c82394b-0dc4-4a8a-ae40-dc46e41ddc68\") " pod="openstack-kuttl-tests/nova-cell1-8d7e-account-create-update-zl2rq" Jan 20 17:03:18 crc kubenswrapper[4558]: I0120 17:03:18.438741 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4c82394b-0dc4-4a8a-ae40-dc46e41ddc68-operator-scripts\") pod \"nova-cell1-8d7e-account-create-update-zl2rq\" (UID: \"4c82394b-0dc4-4a8a-ae40-dc46e41ddc68\") " pod="openstack-kuttl-tests/nova-cell1-8d7e-account-create-update-zl2rq" Jan 20 17:03:18 crc kubenswrapper[4558]: I0120 17:03:18.439676 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4c82394b-0dc4-4a8a-ae40-dc46e41ddc68-operator-scripts\") pod \"nova-cell1-8d7e-account-create-update-zl2rq\" (UID: \"4c82394b-0dc4-4a8a-ae40-dc46e41ddc68\") " pod="openstack-kuttl-tests/nova-cell1-8d7e-account-create-update-zl2rq" Jan 20 17:03:18 crc kubenswrapper[4558]: I0120 17:03:18.449353 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nwnpl\" (UniqueName: \"kubernetes.io/projected/4c82394b-0dc4-4a8a-ae40-dc46e41ddc68-kube-api-access-nwnpl\") pod \"nova-cell1-8d7e-account-create-update-zl2rq\" (UID: \"4c82394b-0dc4-4a8a-ae40-dc46e41ddc68\") " pod="openstack-kuttl-tests/nova-cell1-8d7e-account-create-update-zl2rq" Jan 20 17:03:18 crc kubenswrapper[4558]: I0120 17:03:18.559029 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-db-create-vzcfm"] Jan 20 17:03:18 crc kubenswrapper[4558]: W0120 17:03:18.560583 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8505b3de_7094_43aa_a66b_13ba40bd4c13.slice/crio-d7f2760e0477f8b2d0c354befa4c88dc514d026556630079e7b8e39b0ac47c2f WatchSource:0}: Error finding container d7f2760e0477f8b2d0c354befa4c88dc514d026556630079e7b8e39b0ac47c2f: Status 404 returned error can't find the container with id d7f2760e0477f8b2d0c354befa4c88dc514d026556630079e7b8e39b0ac47c2f Jan 20 17:03:18 crc kubenswrapper[4558]: I0120 17:03:18.578989 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-db-create-fckx4"] Jan 20 17:03:18 crc kubenswrapper[4558]: I0120 17:03:18.597107 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-8d7e-account-create-update-zl2rq" Jan 20 17:03:18 crc kubenswrapper[4558]: I0120 17:03:18.643689 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-1b9c-account-create-update-dh8jf"] Jan 20 17:03:18 crc kubenswrapper[4558]: W0120 17:03:18.671259 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1f7c84b8_e07d_4268_b6e3_112cc6b2558e.slice/crio-34ee9dd0519bac75f5d5d21d1722675d5a8d2380839ccdede0bb77b3bb098819 WatchSource:0}: Error finding container 34ee9dd0519bac75f5d5d21d1722675d5a8d2380839ccdede0bb77b3bb098819: Status 404 returned error can't find the container with id 34ee9dd0519bac75f5d5d21d1722675d5a8d2380839ccdede0bb77b3bb098819 Jan 20 17:03:18 crc kubenswrapper[4558]: I0120 17:03:18.858415 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-db-create-7ns56"] Jan 20 17:03:18 crc kubenswrapper[4558]: I0120 17:03:18.917148 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"9047bd36-d5f2-4fef-870e-1228092716d7","Type":"ContainerStarted","Data":"46b74cd34c7b0edc04f600c5fac0abd736f9fded1c408df432c3c5509a977ea2"} Jan 20 17:03:18 crc kubenswrapper[4558]: I0120 17:03:18.937855 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-1b9c-account-create-update-dh8jf" event={"ID":"1f7c84b8-e07d-4268-b6e3-112cc6b2558e","Type":"ContainerStarted","Data":"2f7dd16ad3f45538283e03f6272b991aea2504752f702631ee8b13f86d5e231b"} Jan 20 17:03:18 crc kubenswrapper[4558]: I0120 17:03:18.937899 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-1b9c-account-create-update-dh8jf" event={"ID":"1f7c84b8-e07d-4268-b6e3-112cc6b2558e","Type":"ContainerStarted","Data":"34ee9dd0519bac75f5d5d21d1722675d5a8d2380839ccdede0bb77b3bb098819"} Jan 20 17:03:18 crc kubenswrapper[4558]: I0120 17:03:18.939414 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-db-create-fckx4" event={"ID":"bf8f2ffa-65ef-49d0-ba6a-3bc80f0313ac","Type":"ContainerStarted","Data":"92b0792fd25f209b5d17ce804362725cc41d26b7bb428b94617376e79223ee79"} Jan 20 17:03:18 crc kubenswrapper[4558]: I0120 17:03:18.939441 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-db-create-fckx4" event={"ID":"bf8f2ffa-65ef-49d0-ba6a-3bc80f0313ac","Type":"ContainerStarted","Data":"eecfa02483b74d03a7a7e2b14b870a6f1a672f2325020b6d2134108f4754f7e8"} Jan 20 17:03:18 crc kubenswrapper[4558]: I0120 17:03:18.940321 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-aebd-account-create-update-mnvvb"] Jan 20 17:03:18 crc kubenswrapper[4558]: I0120 17:03:18.941660 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-db-create-vzcfm" event={"ID":"8505b3de-7094-43aa-a66b-13ba40bd4c13","Type":"ContainerStarted","Data":"c02c16b021c1478fe3fa6419ce9bea842560b0d70281d36ac4dc50c497b670fb"} Jan 20 17:03:18 crc kubenswrapper[4558]: I0120 17:03:18.942035 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-db-create-vzcfm" event={"ID":"8505b3de-7094-43aa-a66b-13ba40bd4c13","Type":"ContainerStarted","Data":"d7f2760e0477f8b2d0c354befa4c88dc514d026556630079e7b8e39b0ac47c2f"} Jan 20 17:03:18 crc kubenswrapper[4558]: I0120 17:03:18.943130 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-db-create-7ns56" event={"ID":"49eb0897-5e0c-4075-8814-aa484a886e33","Type":"ContainerStarted","Data":"ff7dccf6774a55b85637820ad208525e68da53811b1668c88367c9a1e95d4be5"} Jan 20 17:03:18 crc kubenswrapper[4558]: I0120 17:03:18.958069 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-api-1b9c-account-create-update-dh8jf" podStartSLOduration=1.95544736 podStartE2EDuration="1.95544736s" podCreationTimestamp="2026-01-20 17:03:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:03:18.949146711 +0000 UTC m=+1292.709484678" watchObservedRunningTime="2026-01-20 17:03:18.95544736 +0000 UTC m=+1292.715785328" Jan 20 17:03:18 crc kubenswrapper[4558]: I0120 17:03:18.975841 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell0-db-create-fckx4" podStartSLOduration=1.9757843670000002 podStartE2EDuration="1.975784367s" podCreationTimestamp="2026-01-20 17:03:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:03:18.9620251 +0000 UTC m=+1292.722363067" watchObservedRunningTime="2026-01-20 17:03:18.975784367 +0000 UTC m=+1292.736122334" Jan 20 17:03:18 crc kubenswrapper[4558]: I0120 17:03:18.993124 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-api-db-create-vzcfm" podStartSLOduration=1.9931041889999999 podStartE2EDuration="1.993104189s" podCreationTimestamp="2026-01-20 17:03:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:03:18.985388228 +0000 UTC m=+1292.745726196" watchObservedRunningTime="2026-01-20 17:03:18.993104189 +0000 UTC m=+1292.753442157" Jan 20 17:03:19 crc kubenswrapper[4558]: I0120 17:03:19.076960 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-8d7e-account-create-update-zl2rq"] Jan 20 17:03:19 crc kubenswrapper[4558]: I0120 17:03:19.951896 4558 generic.go:334] "Generic (PLEG): container finished" podID="49eb0897-5e0c-4075-8814-aa484a886e33" containerID="fe128214e7f772bc7935abe6b4072bef9cbf2631f9f205c7d0e3f2ffd9ff1572" exitCode=0 Jan 20 17:03:19 crc kubenswrapper[4558]: I0120 17:03:19.952149 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-db-create-7ns56" event={"ID":"49eb0897-5e0c-4075-8814-aa484a886e33","Type":"ContainerDied","Data":"fe128214e7f772bc7935abe6b4072bef9cbf2631f9f205c7d0e3f2ffd9ff1572"} Jan 20 17:03:19 crc kubenswrapper[4558]: I0120 17:03:19.953982 4558 generic.go:334] "Generic (PLEG): container finished" podID="4c82394b-0dc4-4a8a-ae40-dc46e41ddc68" containerID="7b9317fe62c5265ebcee0b8b4c582e9d2dbd8115c5f719f11d257204794edfb2" exitCode=0 Jan 20 17:03:19 crc kubenswrapper[4558]: I0120 17:03:19.954039 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-8d7e-account-create-update-zl2rq" event={"ID":"4c82394b-0dc4-4a8a-ae40-dc46e41ddc68","Type":"ContainerDied","Data":"7b9317fe62c5265ebcee0b8b4c582e9d2dbd8115c5f719f11d257204794edfb2"} Jan 20 17:03:19 crc kubenswrapper[4558]: I0120 17:03:19.954056 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-8d7e-account-create-update-zl2rq" event={"ID":"4c82394b-0dc4-4a8a-ae40-dc46e41ddc68","Type":"ContainerStarted","Data":"495067f18e1026e0321a181fed8b89693c9c43ff1fcb600c97833c200c8b8058"} Jan 20 17:03:19 crc kubenswrapper[4558]: I0120 17:03:19.955882 4558 generic.go:334] "Generic (PLEG): container finished" podID="1f7c84b8-e07d-4268-b6e3-112cc6b2558e" containerID="2f7dd16ad3f45538283e03f6272b991aea2504752f702631ee8b13f86d5e231b" exitCode=0 Jan 20 17:03:19 crc kubenswrapper[4558]: I0120 17:03:19.955922 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-1b9c-account-create-update-dh8jf" event={"ID":"1f7c84b8-e07d-4268-b6e3-112cc6b2558e","Type":"ContainerDied","Data":"2f7dd16ad3f45538283e03f6272b991aea2504752f702631ee8b13f86d5e231b"} Jan 20 17:03:19 crc kubenswrapper[4558]: I0120 17:03:19.957696 4558 generic.go:334] "Generic (PLEG): container finished" podID="6ef75305-05ce-4939-bed4-16aaecedff78" containerID="92f915a76088a8ba412c82688f4d1506c1c3c06ae16505ce91c0d9ea9dfe22aa" exitCode=0 Jan 20 17:03:19 crc kubenswrapper[4558]: I0120 17:03:19.957741 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-aebd-account-create-update-mnvvb" event={"ID":"6ef75305-05ce-4939-bed4-16aaecedff78","Type":"ContainerDied","Data":"92f915a76088a8ba412c82688f4d1506c1c3c06ae16505ce91c0d9ea9dfe22aa"} Jan 20 17:03:19 crc kubenswrapper[4558]: I0120 17:03:19.957756 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-aebd-account-create-update-mnvvb" event={"ID":"6ef75305-05ce-4939-bed4-16aaecedff78","Type":"ContainerStarted","Data":"d05ab4d5314a02654e856cb250f828a89e504f464f1c004f16204519254b2dcf"} Jan 20 17:03:19 crc kubenswrapper[4558]: I0120 17:03:19.959071 4558 generic.go:334] "Generic (PLEG): container finished" podID="bf8f2ffa-65ef-49d0-ba6a-3bc80f0313ac" containerID="92b0792fd25f209b5d17ce804362725cc41d26b7bb428b94617376e79223ee79" exitCode=0 Jan 20 17:03:19 crc kubenswrapper[4558]: I0120 17:03:19.959109 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-db-create-fckx4" event={"ID":"bf8f2ffa-65ef-49d0-ba6a-3bc80f0313ac","Type":"ContainerDied","Data":"92b0792fd25f209b5d17ce804362725cc41d26b7bb428b94617376e79223ee79"} Jan 20 17:03:19 crc kubenswrapper[4558]: I0120 17:03:19.960441 4558 generic.go:334] "Generic (PLEG): container finished" podID="8505b3de-7094-43aa-a66b-13ba40bd4c13" containerID="c02c16b021c1478fe3fa6419ce9bea842560b0d70281d36ac4dc50c497b670fb" exitCode=0 Jan 20 17:03:19 crc kubenswrapper[4558]: I0120 17:03:19.960482 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-db-create-vzcfm" event={"ID":"8505b3de-7094-43aa-a66b-13ba40bd4c13","Type":"ContainerDied","Data":"c02c16b021c1478fe3fa6419ce9bea842560b0d70281d36ac4dc50c497b670fb"} Jan 20 17:03:20 crc kubenswrapper[4558]: I0120 17:03:20.969304 4558 generic.go:334] "Generic (PLEG): container finished" podID="9047bd36-d5f2-4fef-870e-1228092716d7" containerID="9c38b4365b1e5eb2a1ca6eae89b998c0f51a31a38c55d668a417a7accb396a07" exitCode=1 Jan 20 17:03:20 crc kubenswrapper[4558]: I0120 17:03:20.969578 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="9047bd36-d5f2-4fef-870e-1228092716d7" containerName="sg-core" containerID="cri-o://46b74cd34c7b0edc04f600c5fac0abd736f9fded1c408df432c3c5509a977ea2" gracePeriod=30 Jan 20 17:03:20 crc kubenswrapper[4558]: I0120 17:03:20.969625 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="9047bd36-d5f2-4fef-870e-1228092716d7" containerName="ceilometer-notification-agent" containerID="cri-o://a7dd103239d386df2b9abcfb0a1a1ccfcb59ea6199748b99c7ad89f0371890d4" gracePeriod=30 Jan 20 17:03:20 crc kubenswrapper[4558]: I0120 17:03:20.969429 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="9047bd36-d5f2-4fef-870e-1228092716d7" containerName="ceilometer-central-agent" containerID="cri-o://4d1561b3d4f216e5f14f5050766ba10fd5cbbe876476d575736b02e51572264a" gracePeriod=30 Jan 20 17:03:20 crc kubenswrapper[4558]: I0120 17:03:20.969490 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"9047bd36-d5f2-4fef-870e-1228092716d7","Type":"ContainerDied","Data":"9c38b4365b1e5eb2a1ca6eae89b998c0f51a31a38c55d668a417a7accb396a07"} Jan 20 17:03:21 crc kubenswrapper[4558]: I0120 17:03:21.349777 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-aebd-account-create-update-mnvvb" Jan 20 17:03:21 crc kubenswrapper[4558]: I0120 17:03:21.359960 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-1b9c-account-create-update-dh8jf" Jan 20 17:03:21 crc kubenswrapper[4558]: I0120 17:03:21.373394 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-db-create-7ns56" Jan 20 17:03:21 crc kubenswrapper[4558]: I0120 17:03:21.386028 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6ef75305-05ce-4939-bed4-16aaecedff78-operator-scripts\") pod \"6ef75305-05ce-4939-bed4-16aaecedff78\" (UID: \"6ef75305-05ce-4939-bed4-16aaecedff78\") " Jan 20 17:03:21 crc kubenswrapper[4558]: I0120 17:03:21.386076 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4svw\" (UniqueName: \"kubernetes.io/projected/6ef75305-05ce-4939-bed4-16aaecedff78-kube-api-access-w4svw\") pod \"6ef75305-05ce-4939-bed4-16aaecedff78\" (UID: \"6ef75305-05ce-4939-bed4-16aaecedff78\") " Jan 20 17:03:21 crc kubenswrapper[4558]: I0120 17:03:21.386224 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1f7c84b8-e07d-4268-b6e3-112cc6b2558e-operator-scripts\") pod \"1f7c84b8-e07d-4268-b6e3-112cc6b2558e\" (UID: \"1f7c84b8-e07d-4268-b6e3-112cc6b2558e\") " Jan 20 17:03:21 crc kubenswrapper[4558]: I0120 17:03:21.386318 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-grhqg\" (UniqueName: \"kubernetes.io/projected/1f7c84b8-e07d-4268-b6e3-112cc6b2558e-kube-api-access-grhqg\") pod \"1f7c84b8-e07d-4268-b6e3-112cc6b2558e\" (UID: \"1f7c84b8-e07d-4268-b6e3-112cc6b2558e\") " Jan 20 17:03:21 crc kubenswrapper[4558]: I0120 17:03:21.386988 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1f7c84b8-e07d-4268-b6e3-112cc6b2558e-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "1f7c84b8-e07d-4268-b6e3-112cc6b2558e" (UID: "1f7c84b8-e07d-4268-b6e3-112cc6b2558e"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:03:21 crc kubenswrapper[4558]: I0120 17:03:21.389685 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ef75305-05ce-4939-bed4-16aaecedff78-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "6ef75305-05ce-4939-bed4-16aaecedff78" (UID: "6ef75305-05ce-4939-bed4-16aaecedff78"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:03:21 crc kubenswrapper[4558]: I0120 17:03:21.392528 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ef75305-05ce-4939-bed4-16aaecedff78-kube-api-access-w4svw" (OuterVolumeSpecName: "kube-api-access-w4svw") pod "6ef75305-05ce-4939-bed4-16aaecedff78" (UID: "6ef75305-05ce-4939-bed4-16aaecedff78"). InnerVolumeSpecName "kube-api-access-w4svw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:03:21 crc kubenswrapper[4558]: I0120 17:03:21.393376 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1f7c84b8-e07d-4268-b6e3-112cc6b2558e-kube-api-access-grhqg" (OuterVolumeSpecName: "kube-api-access-grhqg") pod "1f7c84b8-e07d-4268-b6e3-112cc6b2558e" (UID: "1f7c84b8-e07d-4268-b6e3-112cc6b2558e"). InnerVolumeSpecName "kube-api-access-grhqg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:03:21 crc kubenswrapper[4558]: I0120 17:03:21.471890 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-db-create-fckx4" Jan 20 17:03:21 crc kubenswrapper[4558]: I0120 17:03:21.475478 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-db-create-vzcfm" Jan 20 17:03:21 crc kubenswrapper[4558]: I0120 17:03:21.479202 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-8d7e-account-create-update-zl2rq" Jan 20 17:03:21 crc kubenswrapper[4558]: I0120 17:03:21.487785 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qkhj2\" (UniqueName: \"kubernetes.io/projected/49eb0897-5e0c-4075-8814-aa484a886e33-kube-api-access-qkhj2\") pod \"49eb0897-5e0c-4075-8814-aa484a886e33\" (UID: \"49eb0897-5e0c-4075-8814-aa484a886e33\") " Jan 20 17:03:21 crc kubenswrapper[4558]: I0120 17:03:21.487921 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/49eb0897-5e0c-4075-8814-aa484a886e33-operator-scripts\") pod \"49eb0897-5e0c-4075-8814-aa484a886e33\" (UID: \"49eb0897-5e0c-4075-8814-aa484a886e33\") " Jan 20 17:03:21 crc kubenswrapper[4558]: I0120 17:03:21.488223 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6ef75305-05ce-4939-bed4-16aaecedff78-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:03:21 crc kubenswrapper[4558]: I0120 17:03:21.488241 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4svw\" (UniqueName: \"kubernetes.io/projected/6ef75305-05ce-4939-bed4-16aaecedff78-kube-api-access-w4svw\") on node \"crc\" DevicePath \"\"" Jan 20 17:03:21 crc kubenswrapper[4558]: I0120 17:03:21.488252 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1f7c84b8-e07d-4268-b6e3-112cc6b2558e-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:03:21 crc kubenswrapper[4558]: I0120 17:03:21.488261 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-grhqg\" (UniqueName: \"kubernetes.io/projected/1f7c84b8-e07d-4268-b6e3-112cc6b2558e-kube-api-access-grhqg\") on node \"crc\" DevicePath \"\"" Jan 20 17:03:21 crc kubenswrapper[4558]: I0120 17:03:21.488431 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49eb0897-5e0c-4075-8814-aa484a886e33-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "49eb0897-5e0c-4075-8814-aa484a886e33" (UID: "49eb0897-5e0c-4075-8814-aa484a886e33"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:03:21 crc kubenswrapper[4558]: I0120 17:03:21.490388 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49eb0897-5e0c-4075-8814-aa484a886e33-kube-api-access-qkhj2" (OuterVolumeSpecName: "kube-api-access-qkhj2") pod "49eb0897-5e0c-4075-8814-aa484a886e33" (UID: "49eb0897-5e0c-4075-8814-aa484a886e33"). InnerVolumeSpecName "kube-api-access-qkhj2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:03:21 crc kubenswrapper[4558]: I0120 17:03:21.588885 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8505b3de-7094-43aa-a66b-13ba40bd4c13-operator-scripts\") pod \"8505b3de-7094-43aa-a66b-13ba40bd4c13\" (UID: \"8505b3de-7094-43aa-a66b-13ba40bd4c13\") " Jan 20 17:03:21 crc kubenswrapper[4558]: I0120 17:03:21.588919 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-82smt\" (UniqueName: \"kubernetes.io/projected/bf8f2ffa-65ef-49d0-ba6a-3bc80f0313ac-kube-api-access-82smt\") pod \"bf8f2ffa-65ef-49d0-ba6a-3bc80f0313ac\" (UID: \"bf8f2ffa-65ef-49d0-ba6a-3bc80f0313ac\") " Jan 20 17:03:21 crc kubenswrapper[4558]: I0120 17:03:21.588939 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9q6qq\" (UniqueName: \"kubernetes.io/projected/8505b3de-7094-43aa-a66b-13ba40bd4c13-kube-api-access-9q6qq\") pod \"8505b3de-7094-43aa-a66b-13ba40bd4c13\" (UID: \"8505b3de-7094-43aa-a66b-13ba40bd4c13\") " Jan 20 17:03:21 crc kubenswrapper[4558]: I0120 17:03:21.589260 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8505b3de-7094-43aa-a66b-13ba40bd4c13-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "8505b3de-7094-43aa-a66b-13ba40bd4c13" (UID: "8505b3de-7094-43aa-a66b-13ba40bd4c13"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:03:21 crc kubenswrapper[4558]: I0120 17:03:21.589360 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nwnpl\" (UniqueName: \"kubernetes.io/projected/4c82394b-0dc4-4a8a-ae40-dc46e41ddc68-kube-api-access-nwnpl\") pod \"4c82394b-0dc4-4a8a-ae40-dc46e41ddc68\" (UID: \"4c82394b-0dc4-4a8a-ae40-dc46e41ddc68\") " Jan 20 17:03:21 crc kubenswrapper[4558]: I0120 17:03:21.589459 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4c82394b-0dc4-4a8a-ae40-dc46e41ddc68-operator-scripts\") pod \"4c82394b-0dc4-4a8a-ae40-dc46e41ddc68\" (UID: \"4c82394b-0dc4-4a8a-ae40-dc46e41ddc68\") " Jan 20 17:03:21 crc kubenswrapper[4558]: I0120 17:03:21.589477 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bf8f2ffa-65ef-49d0-ba6a-3bc80f0313ac-operator-scripts\") pod \"bf8f2ffa-65ef-49d0-ba6a-3bc80f0313ac\" (UID: \"bf8f2ffa-65ef-49d0-ba6a-3bc80f0313ac\") " Jan 20 17:03:21 crc kubenswrapper[4558]: I0120 17:03:21.589966 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qkhj2\" (UniqueName: \"kubernetes.io/projected/49eb0897-5e0c-4075-8814-aa484a886e33-kube-api-access-qkhj2\") on node \"crc\" DevicePath \"\"" Jan 20 17:03:21 crc kubenswrapper[4558]: I0120 17:03:21.589983 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8505b3de-7094-43aa-a66b-13ba40bd4c13-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:03:21 crc kubenswrapper[4558]: I0120 17:03:21.589993 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/49eb0897-5e0c-4075-8814-aa484a886e33-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:03:21 crc kubenswrapper[4558]: I0120 17:03:21.590060 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4c82394b-0dc4-4a8a-ae40-dc46e41ddc68-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "4c82394b-0dc4-4a8a-ae40-dc46e41ddc68" (UID: "4c82394b-0dc4-4a8a-ae40-dc46e41ddc68"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:03:21 crc kubenswrapper[4558]: I0120 17:03:21.590104 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf8f2ffa-65ef-49d0-ba6a-3bc80f0313ac-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "bf8f2ffa-65ef-49d0-ba6a-3bc80f0313ac" (UID: "bf8f2ffa-65ef-49d0-ba6a-3bc80f0313ac"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:03:21 crc kubenswrapper[4558]: I0120 17:03:21.591507 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4c82394b-0dc4-4a8a-ae40-dc46e41ddc68-kube-api-access-nwnpl" (OuterVolumeSpecName: "kube-api-access-nwnpl") pod "4c82394b-0dc4-4a8a-ae40-dc46e41ddc68" (UID: "4c82394b-0dc4-4a8a-ae40-dc46e41ddc68"). InnerVolumeSpecName "kube-api-access-nwnpl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:03:21 crc kubenswrapper[4558]: I0120 17:03:21.591555 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8505b3de-7094-43aa-a66b-13ba40bd4c13-kube-api-access-9q6qq" (OuterVolumeSpecName: "kube-api-access-9q6qq") pod "8505b3de-7094-43aa-a66b-13ba40bd4c13" (UID: "8505b3de-7094-43aa-a66b-13ba40bd4c13"). InnerVolumeSpecName "kube-api-access-9q6qq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:03:21 crc kubenswrapper[4558]: I0120 17:03:21.592647 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf8f2ffa-65ef-49d0-ba6a-3bc80f0313ac-kube-api-access-82smt" (OuterVolumeSpecName: "kube-api-access-82smt") pod "bf8f2ffa-65ef-49d0-ba6a-3bc80f0313ac" (UID: "bf8f2ffa-65ef-49d0-ba6a-3bc80f0313ac"). InnerVolumeSpecName "kube-api-access-82smt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:03:21 crc kubenswrapper[4558]: I0120 17:03:21.691478 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4c82394b-0dc4-4a8a-ae40-dc46e41ddc68-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:03:21 crc kubenswrapper[4558]: I0120 17:03:21.691500 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bf8f2ffa-65ef-49d0-ba6a-3bc80f0313ac-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:03:21 crc kubenswrapper[4558]: I0120 17:03:21.691510 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9q6qq\" (UniqueName: \"kubernetes.io/projected/8505b3de-7094-43aa-a66b-13ba40bd4c13-kube-api-access-9q6qq\") on node \"crc\" DevicePath \"\"" Jan 20 17:03:21 crc kubenswrapper[4558]: I0120 17:03:21.691520 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-82smt\" (UniqueName: \"kubernetes.io/projected/bf8f2ffa-65ef-49d0-ba6a-3bc80f0313ac-kube-api-access-82smt\") on node \"crc\" DevicePath \"\"" Jan 20 17:03:21 crc kubenswrapper[4558]: I0120 17:03:21.691528 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nwnpl\" (UniqueName: \"kubernetes.io/projected/4c82394b-0dc4-4a8a-ae40-dc46e41ddc68-kube-api-access-nwnpl\") on node \"crc\" DevicePath \"\"" Jan 20 17:03:22 crc kubenswrapper[4558]: I0120 17:03:22.001158 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-aebd-account-create-update-mnvvb" event={"ID":"6ef75305-05ce-4939-bed4-16aaecedff78","Type":"ContainerDied","Data":"d05ab4d5314a02654e856cb250f828a89e504f464f1c004f16204519254b2dcf"} Jan 20 17:03:22 crc kubenswrapper[4558]: I0120 17:03:22.001218 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d05ab4d5314a02654e856cb250f828a89e504f464f1c004f16204519254b2dcf" Jan 20 17:03:22 crc kubenswrapper[4558]: I0120 17:03:22.001406 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-aebd-account-create-update-mnvvb" Jan 20 17:03:22 crc kubenswrapper[4558]: I0120 17:03:22.005921 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-db-create-fckx4" event={"ID":"bf8f2ffa-65ef-49d0-ba6a-3bc80f0313ac","Type":"ContainerDied","Data":"eecfa02483b74d03a7a7e2b14b870a6f1a672f2325020b6d2134108f4754f7e8"} Jan 20 17:03:22 crc kubenswrapper[4558]: I0120 17:03:22.005957 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="eecfa02483b74d03a7a7e2b14b870a6f1a672f2325020b6d2134108f4754f7e8" Jan 20 17:03:22 crc kubenswrapper[4558]: I0120 17:03:22.006022 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-db-create-fckx4" Jan 20 17:03:22 crc kubenswrapper[4558]: I0120 17:03:22.007553 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-db-create-vzcfm" event={"ID":"8505b3de-7094-43aa-a66b-13ba40bd4c13","Type":"ContainerDied","Data":"d7f2760e0477f8b2d0c354befa4c88dc514d026556630079e7b8e39b0ac47c2f"} Jan 20 17:03:22 crc kubenswrapper[4558]: I0120 17:03:22.007592 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d7f2760e0477f8b2d0c354befa4c88dc514d026556630079e7b8e39b0ac47c2f" Jan 20 17:03:22 crc kubenswrapper[4558]: I0120 17:03:22.007660 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-db-create-vzcfm" Jan 20 17:03:22 crc kubenswrapper[4558]: I0120 17:03:22.012429 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-db-create-7ns56" event={"ID":"49eb0897-5e0c-4075-8814-aa484a886e33","Type":"ContainerDied","Data":"ff7dccf6774a55b85637820ad208525e68da53811b1668c88367c9a1e95d4be5"} Jan 20 17:03:22 crc kubenswrapper[4558]: I0120 17:03:22.012474 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ff7dccf6774a55b85637820ad208525e68da53811b1668c88367c9a1e95d4be5" Jan 20 17:03:22 crc kubenswrapper[4558]: I0120 17:03:22.012525 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-db-create-7ns56" Jan 20 17:03:22 crc kubenswrapper[4558]: I0120 17:03:22.014926 4558 generic.go:334] "Generic (PLEG): container finished" podID="9047bd36-d5f2-4fef-870e-1228092716d7" containerID="46b74cd34c7b0edc04f600c5fac0abd736f9fded1c408df432c3c5509a977ea2" exitCode=2 Jan 20 17:03:22 crc kubenswrapper[4558]: I0120 17:03:22.014953 4558 generic.go:334] "Generic (PLEG): container finished" podID="9047bd36-d5f2-4fef-870e-1228092716d7" containerID="a7dd103239d386df2b9abcfb0a1a1ccfcb59ea6199748b99c7ad89f0371890d4" exitCode=0 Jan 20 17:03:22 crc kubenswrapper[4558]: I0120 17:03:22.014997 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"9047bd36-d5f2-4fef-870e-1228092716d7","Type":"ContainerDied","Data":"46b74cd34c7b0edc04f600c5fac0abd736f9fded1c408df432c3c5509a977ea2"} Jan 20 17:03:22 crc kubenswrapper[4558]: I0120 17:03:22.015036 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"9047bd36-d5f2-4fef-870e-1228092716d7","Type":"ContainerDied","Data":"a7dd103239d386df2b9abcfb0a1a1ccfcb59ea6199748b99c7ad89f0371890d4"} Jan 20 17:03:22 crc kubenswrapper[4558]: I0120 17:03:22.016328 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-8d7e-account-create-update-zl2rq" event={"ID":"4c82394b-0dc4-4a8a-ae40-dc46e41ddc68","Type":"ContainerDied","Data":"495067f18e1026e0321a181fed8b89693c9c43ff1fcb600c97833c200c8b8058"} Jan 20 17:03:22 crc kubenswrapper[4558]: I0120 17:03:22.016372 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="495067f18e1026e0321a181fed8b89693c9c43ff1fcb600c97833c200c8b8058" Jan 20 17:03:22 crc kubenswrapper[4558]: I0120 17:03:22.016346 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-8d7e-account-create-update-zl2rq" Jan 20 17:03:22 crc kubenswrapper[4558]: I0120 17:03:22.018292 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-1b9c-account-create-update-dh8jf" event={"ID":"1f7c84b8-e07d-4268-b6e3-112cc6b2558e","Type":"ContainerDied","Data":"34ee9dd0519bac75f5d5d21d1722675d5a8d2380839ccdede0bb77b3bb098819"} Jan 20 17:03:22 crc kubenswrapper[4558]: I0120 17:03:22.018339 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="34ee9dd0519bac75f5d5d21d1722675d5a8d2380839ccdede0bb77b3bb098819" Jan 20 17:03:22 crc kubenswrapper[4558]: I0120 17:03:22.018370 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-1b9c-account-create-update-dh8jf" Jan 20 17:03:22 crc kubenswrapper[4558]: I0120 17:03:22.851691 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/swift-proxy-595b48fd98-kbjv6" Jan 20 17:03:22 crc kubenswrapper[4558]: I0120 17:03:22.852938 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/swift-proxy-595b48fd98-kbjv6" Jan 20 17:03:23 crc kubenswrapper[4558]: I0120 17:03:23.500576 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-db-sync-xwwl5"] Jan 20 17:03:23 crc kubenswrapper[4558]: E0120 17:03:23.501242 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bf8f2ffa-65ef-49d0-ba6a-3bc80f0313ac" containerName="mariadb-database-create" Jan 20 17:03:23 crc kubenswrapper[4558]: I0120 17:03:23.501253 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="bf8f2ffa-65ef-49d0-ba6a-3bc80f0313ac" containerName="mariadb-database-create" Jan 20 17:03:23 crc kubenswrapper[4558]: E0120 17:03:23.501266 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="49eb0897-5e0c-4075-8814-aa484a886e33" containerName="mariadb-database-create" Jan 20 17:03:23 crc kubenswrapper[4558]: I0120 17:03:23.501271 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="49eb0897-5e0c-4075-8814-aa484a886e33" containerName="mariadb-database-create" Jan 20 17:03:23 crc kubenswrapper[4558]: E0120 17:03:23.501292 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1f7c84b8-e07d-4268-b6e3-112cc6b2558e" containerName="mariadb-account-create-update" Jan 20 17:03:23 crc kubenswrapper[4558]: I0120 17:03:23.501298 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="1f7c84b8-e07d-4268-b6e3-112cc6b2558e" containerName="mariadb-account-create-update" Jan 20 17:03:23 crc kubenswrapper[4558]: E0120 17:03:23.501311 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8505b3de-7094-43aa-a66b-13ba40bd4c13" containerName="mariadb-database-create" Jan 20 17:03:23 crc kubenswrapper[4558]: I0120 17:03:23.501316 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="8505b3de-7094-43aa-a66b-13ba40bd4c13" containerName="mariadb-database-create" Jan 20 17:03:23 crc kubenswrapper[4558]: E0120 17:03:23.501324 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6ef75305-05ce-4939-bed4-16aaecedff78" containerName="mariadb-account-create-update" Jan 20 17:03:23 crc kubenswrapper[4558]: I0120 17:03:23.501330 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="6ef75305-05ce-4939-bed4-16aaecedff78" containerName="mariadb-account-create-update" Jan 20 17:03:23 crc kubenswrapper[4558]: E0120 17:03:23.501348 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4c82394b-0dc4-4a8a-ae40-dc46e41ddc68" containerName="mariadb-account-create-update" Jan 20 17:03:23 crc kubenswrapper[4558]: I0120 17:03:23.501353 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="4c82394b-0dc4-4a8a-ae40-dc46e41ddc68" containerName="mariadb-account-create-update" Jan 20 17:03:23 crc kubenswrapper[4558]: I0120 17:03:23.501492 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="6ef75305-05ce-4939-bed4-16aaecedff78" containerName="mariadb-account-create-update" Jan 20 17:03:23 crc kubenswrapper[4558]: I0120 17:03:23.501506 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="8505b3de-7094-43aa-a66b-13ba40bd4c13" containerName="mariadb-database-create" Jan 20 17:03:23 crc kubenswrapper[4558]: I0120 17:03:23.501516 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="49eb0897-5e0c-4075-8814-aa484a886e33" containerName="mariadb-database-create" Jan 20 17:03:23 crc kubenswrapper[4558]: I0120 17:03:23.501524 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="bf8f2ffa-65ef-49d0-ba6a-3bc80f0313ac" containerName="mariadb-database-create" Jan 20 17:03:23 crc kubenswrapper[4558]: I0120 17:03:23.501535 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="1f7c84b8-e07d-4268-b6e3-112cc6b2558e" containerName="mariadb-account-create-update" Jan 20 17:03:23 crc kubenswrapper[4558]: I0120 17:03:23.501546 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="4c82394b-0dc4-4a8a-ae40-dc46e41ddc68" containerName="mariadb-account-create-update" Jan 20 17:03:23 crc kubenswrapper[4558]: I0120 17:03:23.501992 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-xwwl5" Jan 20 17:03:23 crc kubenswrapper[4558]: I0120 17:03:23.504055 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-nova-dockercfg-sggwd" Jan 20 17:03:23 crc kubenswrapper[4558]: I0120 17:03:23.504251 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell0-conductor-config-data" Jan 20 17:03:23 crc kubenswrapper[4558]: I0120 17:03:23.506103 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell0-conductor-scripts" Jan 20 17:03:23 crc kubenswrapper[4558]: I0120 17:03:23.511997 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-db-sync-xwwl5"] Jan 20 17:03:23 crc kubenswrapper[4558]: I0120 17:03:23.532076 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8efaa8ce-5cd9-4688-8cef-0e3fb667105d-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-xwwl5\" (UID: \"8efaa8ce-5cd9-4688-8cef-0e3fb667105d\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-xwwl5" Jan 20 17:03:23 crc kubenswrapper[4558]: I0120 17:03:23.532132 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8efaa8ce-5cd9-4688-8cef-0e3fb667105d-config-data\") pod \"nova-cell0-conductor-db-sync-xwwl5\" (UID: \"8efaa8ce-5cd9-4688-8cef-0e3fb667105d\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-xwwl5" Jan 20 17:03:23 crc kubenswrapper[4558]: I0120 17:03:23.532239 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8efaa8ce-5cd9-4688-8cef-0e3fb667105d-scripts\") pod \"nova-cell0-conductor-db-sync-xwwl5\" (UID: \"8efaa8ce-5cd9-4688-8cef-0e3fb667105d\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-xwwl5" Jan 20 17:03:23 crc kubenswrapper[4558]: I0120 17:03:23.532273 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zw4x4\" (UniqueName: \"kubernetes.io/projected/8efaa8ce-5cd9-4688-8cef-0e3fb667105d-kube-api-access-zw4x4\") pod \"nova-cell0-conductor-db-sync-xwwl5\" (UID: \"8efaa8ce-5cd9-4688-8cef-0e3fb667105d\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-xwwl5" Jan 20 17:03:23 crc kubenswrapper[4558]: I0120 17:03:23.633571 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8efaa8ce-5cd9-4688-8cef-0e3fb667105d-scripts\") pod \"nova-cell0-conductor-db-sync-xwwl5\" (UID: \"8efaa8ce-5cd9-4688-8cef-0e3fb667105d\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-xwwl5" Jan 20 17:03:23 crc kubenswrapper[4558]: I0120 17:03:23.633627 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zw4x4\" (UniqueName: \"kubernetes.io/projected/8efaa8ce-5cd9-4688-8cef-0e3fb667105d-kube-api-access-zw4x4\") pod \"nova-cell0-conductor-db-sync-xwwl5\" (UID: \"8efaa8ce-5cd9-4688-8cef-0e3fb667105d\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-xwwl5" Jan 20 17:03:23 crc kubenswrapper[4558]: I0120 17:03:23.633682 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8efaa8ce-5cd9-4688-8cef-0e3fb667105d-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-xwwl5\" (UID: \"8efaa8ce-5cd9-4688-8cef-0e3fb667105d\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-xwwl5" Jan 20 17:03:23 crc kubenswrapper[4558]: I0120 17:03:23.633723 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8efaa8ce-5cd9-4688-8cef-0e3fb667105d-config-data\") pod \"nova-cell0-conductor-db-sync-xwwl5\" (UID: \"8efaa8ce-5cd9-4688-8cef-0e3fb667105d\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-xwwl5" Jan 20 17:03:23 crc kubenswrapper[4558]: I0120 17:03:23.639185 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8efaa8ce-5cd9-4688-8cef-0e3fb667105d-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-xwwl5\" (UID: \"8efaa8ce-5cd9-4688-8cef-0e3fb667105d\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-xwwl5" Jan 20 17:03:23 crc kubenswrapper[4558]: I0120 17:03:23.639707 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8efaa8ce-5cd9-4688-8cef-0e3fb667105d-scripts\") pod \"nova-cell0-conductor-db-sync-xwwl5\" (UID: \"8efaa8ce-5cd9-4688-8cef-0e3fb667105d\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-xwwl5" Jan 20 17:03:23 crc kubenswrapper[4558]: I0120 17:03:23.647687 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zw4x4\" (UniqueName: \"kubernetes.io/projected/8efaa8ce-5cd9-4688-8cef-0e3fb667105d-kube-api-access-zw4x4\") pod \"nova-cell0-conductor-db-sync-xwwl5\" (UID: \"8efaa8ce-5cd9-4688-8cef-0e3fb667105d\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-xwwl5" Jan 20 17:03:23 crc kubenswrapper[4558]: I0120 17:03:23.647776 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8efaa8ce-5cd9-4688-8cef-0e3fb667105d-config-data\") pod \"nova-cell0-conductor-db-sync-xwwl5\" (UID: \"8efaa8ce-5cd9-4688-8cef-0e3fb667105d\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-xwwl5" Jan 20 17:03:23 crc kubenswrapper[4558]: I0120 17:03:23.784272 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:03:23 crc kubenswrapper[4558]: I0120 17:03:23.784497 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-external-api-0" podUID="b6a69309-962d-4a4a-8fd0-e25a8a14cee1" containerName="glance-log" containerID="cri-o://5bb4f48f82b9f26f09753ee0b330ab1b18a6ee75ea0a48f26d817acaf1398abd" gracePeriod=30 Jan 20 17:03:23 crc kubenswrapper[4558]: I0120 17:03:23.784609 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-external-api-0" podUID="b6a69309-962d-4a4a-8fd0-e25a8a14cee1" containerName="glance-httpd" containerID="cri-o://bc6ce507a944a4c0944bcc1982c4d469e2d7fdba5e2e507de9cffdb943ca267c" gracePeriod=30 Jan 20 17:03:23 crc kubenswrapper[4558]: I0120 17:03:23.822653 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-xwwl5" Jan 20 17:03:24 crc kubenswrapper[4558]: I0120 17:03:24.041816 4558 generic.go:334] "Generic (PLEG): container finished" podID="b6a69309-962d-4a4a-8fd0-e25a8a14cee1" containerID="5bb4f48f82b9f26f09753ee0b330ab1b18a6ee75ea0a48f26d817acaf1398abd" exitCode=143 Jan 20 17:03:24 crc kubenswrapper[4558]: I0120 17:03:24.042364 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"b6a69309-962d-4a4a-8fd0-e25a8a14cee1","Type":"ContainerDied","Data":"5bb4f48f82b9f26f09753ee0b330ab1b18a6ee75ea0a48f26d817acaf1398abd"} Jan 20 17:03:24 crc kubenswrapper[4558]: I0120 17:03:24.261229 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-db-sync-xwwl5"] Jan 20 17:03:24 crc kubenswrapper[4558]: I0120 17:03:24.603777 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:03:24 crc kubenswrapper[4558]: I0120 17:03:24.604381 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-internal-api-0" podUID="25076c04-6d0d-4c02-a58f-de14094b79b5" containerName="glance-log" containerID="cri-o://59d23ff3cd2fbff08e58e91492448fe0e07b2099c36d37f02ee2bf1870adf4fc" gracePeriod=30 Jan 20 17:03:24 crc kubenswrapper[4558]: I0120 17:03:24.604754 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-internal-api-0" podUID="25076c04-6d0d-4c02-a58f-de14094b79b5" containerName="glance-httpd" containerID="cri-o://ad62e8788a08fae6beba57e28c588e43f4c96d6537c969b42d6e6044e423af6f" gracePeriod=30 Jan 20 17:03:25 crc kubenswrapper[4558]: I0120 17:03:25.050921 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-xwwl5" event={"ID":"8efaa8ce-5cd9-4688-8cef-0e3fb667105d","Type":"ContainerStarted","Data":"535ff1923649723165cb5389ef0ba724b32d3a882103118bddd27f257b9974e3"} Jan 20 17:03:25 crc kubenswrapper[4558]: I0120 17:03:25.051317 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-xwwl5" event={"ID":"8efaa8ce-5cd9-4688-8cef-0e3fb667105d","Type":"ContainerStarted","Data":"e4aa2292a0de814eef0e7a7f0377313c92e8d1600aa0e6e3f9380b9dcfcafdf1"} Jan 20 17:03:25 crc kubenswrapper[4558]: I0120 17:03:25.053938 4558 generic.go:334] "Generic (PLEG): container finished" podID="9047bd36-d5f2-4fef-870e-1228092716d7" containerID="4d1561b3d4f216e5f14f5050766ba10fd5cbbe876476d575736b02e51572264a" exitCode=0 Jan 20 17:03:25 crc kubenswrapper[4558]: I0120 17:03:25.054004 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"9047bd36-d5f2-4fef-870e-1228092716d7","Type":"ContainerDied","Data":"4d1561b3d4f216e5f14f5050766ba10fd5cbbe876476d575736b02e51572264a"} Jan 20 17:03:25 crc kubenswrapper[4558]: I0120 17:03:25.056873 4558 generic.go:334] "Generic (PLEG): container finished" podID="25076c04-6d0d-4c02-a58f-de14094b79b5" containerID="59d23ff3cd2fbff08e58e91492448fe0e07b2099c36d37f02ee2bf1870adf4fc" exitCode=143 Jan 20 17:03:25 crc kubenswrapper[4558]: I0120 17:03:25.056939 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"25076c04-6d0d-4c02-a58f-de14094b79b5","Type":"ContainerDied","Data":"59d23ff3cd2fbff08e58e91492448fe0e07b2099c36d37f02ee2bf1870adf4fc"} Jan 20 17:03:25 crc kubenswrapper[4558]: I0120 17:03:25.069772 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-xwwl5" podStartSLOduration=2.069752888 podStartE2EDuration="2.069752888s" podCreationTimestamp="2026-01-20 17:03:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:03:25.065577937 +0000 UTC m=+1298.825915904" watchObservedRunningTime="2026-01-20 17:03:25.069752888 +0000 UTC m=+1298.830090856" Jan 20 17:03:25 crc kubenswrapper[4558]: I0120 17:03:25.226984 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:03:25 crc kubenswrapper[4558]: I0120 17:03:25.280416 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:03:25 crc kubenswrapper[4558]: I0120 17:03:25.366623 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9047bd36-d5f2-4fef-870e-1228092716d7-log-httpd\") pod \"9047bd36-d5f2-4fef-870e-1228092716d7\" (UID: \"9047bd36-d5f2-4fef-870e-1228092716d7\") " Jan 20 17:03:25 crc kubenswrapper[4558]: I0120 17:03:25.366684 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5cwrb\" (UniqueName: \"kubernetes.io/projected/9047bd36-d5f2-4fef-870e-1228092716d7-kube-api-access-5cwrb\") pod \"9047bd36-d5f2-4fef-870e-1228092716d7\" (UID: \"9047bd36-d5f2-4fef-870e-1228092716d7\") " Jan 20 17:03:25 crc kubenswrapper[4558]: I0120 17:03:25.366726 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/9047bd36-d5f2-4fef-870e-1228092716d7-sg-core-conf-yaml\") pod \"9047bd36-d5f2-4fef-870e-1228092716d7\" (UID: \"9047bd36-d5f2-4fef-870e-1228092716d7\") " Jan 20 17:03:25 crc kubenswrapper[4558]: I0120 17:03:25.366753 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9047bd36-d5f2-4fef-870e-1228092716d7-combined-ca-bundle\") pod \"9047bd36-d5f2-4fef-870e-1228092716d7\" (UID: \"9047bd36-d5f2-4fef-870e-1228092716d7\") " Jan 20 17:03:25 crc kubenswrapper[4558]: I0120 17:03:25.366772 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9047bd36-d5f2-4fef-870e-1228092716d7-scripts\") pod \"9047bd36-d5f2-4fef-870e-1228092716d7\" (UID: \"9047bd36-d5f2-4fef-870e-1228092716d7\") " Jan 20 17:03:25 crc kubenswrapper[4558]: I0120 17:03:25.366801 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9047bd36-d5f2-4fef-870e-1228092716d7-run-httpd\") pod \"9047bd36-d5f2-4fef-870e-1228092716d7\" (UID: \"9047bd36-d5f2-4fef-870e-1228092716d7\") " Jan 20 17:03:25 crc kubenswrapper[4558]: I0120 17:03:25.366908 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9047bd36-d5f2-4fef-870e-1228092716d7-config-data\") pod \"9047bd36-d5f2-4fef-870e-1228092716d7\" (UID: \"9047bd36-d5f2-4fef-870e-1228092716d7\") " Jan 20 17:03:25 crc kubenswrapper[4558]: I0120 17:03:25.367215 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9047bd36-d5f2-4fef-870e-1228092716d7-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "9047bd36-d5f2-4fef-870e-1228092716d7" (UID: "9047bd36-d5f2-4fef-870e-1228092716d7"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:03:25 crc kubenswrapper[4558]: I0120 17:03:25.367538 4558 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9047bd36-d5f2-4fef-870e-1228092716d7-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:03:25 crc kubenswrapper[4558]: I0120 17:03:25.367656 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9047bd36-d5f2-4fef-870e-1228092716d7-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "9047bd36-d5f2-4fef-870e-1228092716d7" (UID: "9047bd36-d5f2-4fef-870e-1228092716d7"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:03:25 crc kubenswrapper[4558]: I0120 17:03:25.372407 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9047bd36-d5f2-4fef-870e-1228092716d7-kube-api-access-5cwrb" (OuterVolumeSpecName: "kube-api-access-5cwrb") pod "9047bd36-d5f2-4fef-870e-1228092716d7" (UID: "9047bd36-d5f2-4fef-870e-1228092716d7"). InnerVolumeSpecName "kube-api-access-5cwrb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:03:25 crc kubenswrapper[4558]: I0120 17:03:25.372828 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9047bd36-d5f2-4fef-870e-1228092716d7-scripts" (OuterVolumeSpecName: "scripts") pod "9047bd36-d5f2-4fef-870e-1228092716d7" (UID: "9047bd36-d5f2-4fef-870e-1228092716d7"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:03:25 crc kubenswrapper[4558]: I0120 17:03:25.389338 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9047bd36-d5f2-4fef-870e-1228092716d7-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "9047bd36-d5f2-4fef-870e-1228092716d7" (UID: "9047bd36-d5f2-4fef-870e-1228092716d7"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:03:25 crc kubenswrapper[4558]: I0120 17:03:25.422371 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9047bd36-d5f2-4fef-870e-1228092716d7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9047bd36-d5f2-4fef-870e-1228092716d7" (UID: "9047bd36-d5f2-4fef-870e-1228092716d7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:03:25 crc kubenswrapper[4558]: I0120 17:03:25.434767 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9047bd36-d5f2-4fef-870e-1228092716d7-config-data" (OuterVolumeSpecName: "config-data") pod "9047bd36-d5f2-4fef-870e-1228092716d7" (UID: "9047bd36-d5f2-4fef-870e-1228092716d7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:03:25 crc kubenswrapper[4558]: I0120 17:03:25.469657 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9047bd36-d5f2-4fef-870e-1228092716d7-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:03:25 crc kubenswrapper[4558]: I0120 17:03:25.469683 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5cwrb\" (UniqueName: \"kubernetes.io/projected/9047bd36-d5f2-4fef-870e-1228092716d7-kube-api-access-5cwrb\") on node \"crc\" DevicePath \"\"" Jan 20 17:03:25 crc kubenswrapper[4558]: I0120 17:03:25.469696 4558 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/9047bd36-d5f2-4fef-870e-1228092716d7-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 20 17:03:25 crc kubenswrapper[4558]: I0120 17:03:25.469706 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9047bd36-d5f2-4fef-870e-1228092716d7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:03:25 crc kubenswrapper[4558]: I0120 17:03:25.469714 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9047bd36-d5f2-4fef-870e-1228092716d7-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:03:25 crc kubenswrapper[4558]: I0120 17:03:25.469725 4558 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9047bd36-d5f2-4fef-870e-1228092716d7-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:03:26 crc kubenswrapper[4558]: I0120 17:03:26.074505 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:03:26 crc kubenswrapper[4558]: I0120 17:03:26.078222 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"9047bd36-d5f2-4fef-870e-1228092716d7","Type":"ContainerDied","Data":"13e9017387ebd05b878e43e0354d0391c3db37380de58eb058352c89fd44c036"} Jan 20 17:03:26 crc kubenswrapper[4558]: I0120 17:03:26.078288 4558 scope.go:117] "RemoveContainer" containerID="9c38b4365b1e5eb2a1ca6eae89b998c0f51a31a38c55d668a417a7accb396a07" Jan 20 17:03:26 crc kubenswrapper[4558]: I0120 17:03:26.120928 4558 scope.go:117] "RemoveContainer" containerID="46b74cd34c7b0edc04f600c5fac0abd736f9fded1c408df432c3c5509a977ea2" Jan 20 17:03:26 crc kubenswrapper[4558]: I0120 17:03:26.146497 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:03:26 crc kubenswrapper[4558]: I0120 17:03:26.158365 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:03:26 crc kubenswrapper[4558]: I0120 17:03:26.160110 4558 scope.go:117] "RemoveContainer" containerID="a7dd103239d386df2b9abcfb0a1a1ccfcb59ea6199748b99c7ad89f0371890d4" Jan 20 17:03:26 crc kubenswrapper[4558]: I0120 17:03:26.172534 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:03:26 crc kubenswrapper[4558]: E0120 17:03:26.172891 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9047bd36-d5f2-4fef-870e-1228092716d7" containerName="proxy-httpd" Jan 20 17:03:26 crc kubenswrapper[4558]: I0120 17:03:26.172910 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="9047bd36-d5f2-4fef-870e-1228092716d7" containerName="proxy-httpd" Jan 20 17:03:26 crc kubenswrapper[4558]: E0120 17:03:26.172930 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9047bd36-d5f2-4fef-870e-1228092716d7" containerName="ceilometer-notification-agent" Jan 20 17:03:26 crc kubenswrapper[4558]: I0120 17:03:26.172936 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="9047bd36-d5f2-4fef-870e-1228092716d7" containerName="ceilometer-notification-agent" Jan 20 17:03:26 crc kubenswrapper[4558]: E0120 17:03:26.172954 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9047bd36-d5f2-4fef-870e-1228092716d7" containerName="ceilometer-central-agent" Jan 20 17:03:26 crc kubenswrapper[4558]: I0120 17:03:26.172960 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="9047bd36-d5f2-4fef-870e-1228092716d7" containerName="ceilometer-central-agent" Jan 20 17:03:26 crc kubenswrapper[4558]: E0120 17:03:26.172968 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9047bd36-d5f2-4fef-870e-1228092716d7" containerName="sg-core" Jan 20 17:03:26 crc kubenswrapper[4558]: I0120 17:03:26.172974 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="9047bd36-d5f2-4fef-870e-1228092716d7" containerName="sg-core" Jan 20 17:03:26 crc kubenswrapper[4558]: I0120 17:03:26.173190 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="9047bd36-d5f2-4fef-870e-1228092716d7" containerName="ceilometer-central-agent" Jan 20 17:03:26 crc kubenswrapper[4558]: I0120 17:03:26.173207 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="9047bd36-d5f2-4fef-870e-1228092716d7" containerName="sg-core" Jan 20 17:03:26 crc kubenswrapper[4558]: I0120 17:03:26.173231 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="9047bd36-d5f2-4fef-870e-1228092716d7" containerName="ceilometer-notification-agent" Jan 20 17:03:26 crc kubenswrapper[4558]: I0120 17:03:26.173238 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="9047bd36-d5f2-4fef-870e-1228092716d7" containerName="proxy-httpd" Jan 20 17:03:26 crc kubenswrapper[4558]: I0120 17:03:26.174503 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:03:26 crc kubenswrapper[4558]: I0120 17:03:26.176874 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-scripts" Jan 20 17:03:26 crc kubenswrapper[4558]: I0120 17:03:26.177017 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-config-data" Jan 20 17:03:26 crc kubenswrapper[4558]: I0120 17:03:26.177067 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-ceilometer-internal-svc" Jan 20 17:03:26 crc kubenswrapper[4558]: I0120 17:03:26.181719 4558 scope.go:117] "RemoveContainer" containerID="4d1561b3d4f216e5f14f5050766ba10fd5cbbe876476d575736b02e51572264a" Jan 20 17:03:26 crc kubenswrapper[4558]: I0120 17:03:26.195757 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:03:26 crc kubenswrapper[4558]: I0120 17:03:26.281132 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ed3e3e29-3089-4ee3-9cd6-2bfe6024822e-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ed3e3e29-3089-4ee3-9cd6-2bfe6024822e\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:03:26 crc kubenswrapper[4558]: I0120 17:03:26.281216 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ed3e3e29-3089-4ee3-9cd6-2bfe6024822e-config-data\") pod \"ceilometer-0\" (UID: \"ed3e3e29-3089-4ee3-9cd6-2bfe6024822e\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:03:26 crc kubenswrapper[4558]: I0120 17:03:26.281356 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ed3e3e29-3089-4ee3-9cd6-2bfe6024822e-run-httpd\") pod \"ceilometer-0\" (UID: \"ed3e3e29-3089-4ee3-9cd6-2bfe6024822e\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:03:26 crc kubenswrapper[4558]: I0120 17:03:26.281434 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ed3e3e29-3089-4ee3-9cd6-2bfe6024822e-log-httpd\") pod \"ceilometer-0\" (UID: \"ed3e3e29-3089-4ee3-9cd6-2bfe6024822e\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:03:26 crc kubenswrapper[4558]: I0120 17:03:26.281465 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ed3e3e29-3089-4ee3-9cd6-2bfe6024822e-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ed3e3e29-3089-4ee3-9cd6-2bfe6024822e\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:03:26 crc kubenswrapper[4558]: I0120 17:03:26.281512 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/ed3e3e29-3089-4ee3-9cd6-2bfe6024822e-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"ed3e3e29-3089-4ee3-9cd6-2bfe6024822e\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:03:26 crc kubenswrapper[4558]: I0120 17:03:26.281605 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ed3e3e29-3089-4ee3-9cd6-2bfe6024822e-scripts\") pod \"ceilometer-0\" (UID: \"ed3e3e29-3089-4ee3-9cd6-2bfe6024822e\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:03:26 crc kubenswrapper[4558]: I0120 17:03:26.281717 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p4b2c\" (UniqueName: \"kubernetes.io/projected/ed3e3e29-3089-4ee3-9cd6-2bfe6024822e-kube-api-access-p4b2c\") pod \"ceilometer-0\" (UID: \"ed3e3e29-3089-4ee3-9cd6-2bfe6024822e\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:03:26 crc kubenswrapper[4558]: I0120 17:03:26.382926 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ed3e3e29-3089-4ee3-9cd6-2bfe6024822e-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ed3e3e29-3089-4ee3-9cd6-2bfe6024822e\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:03:26 crc kubenswrapper[4558]: I0120 17:03:26.382970 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ed3e3e29-3089-4ee3-9cd6-2bfe6024822e-config-data\") pod \"ceilometer-0\" (UID: \"ed3e3e29-3089-4ee3-9cd6-2bfe6024822e\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:03:26 crc kubenswrapper[4558]: I0120 17:03:26.382999 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ed3e3e29-3089-4ee3-9cd6-2bfe6024822e-run-httpd\") pod \"ceilometer-0\" (UID: \"ed3e3e29-3089-4ee3-9cd6-2bfe6024822e\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:03:26 crc kubenswrapper[4558]: I0120 17:03:26.383023 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ed3e3e29-3089-4ee3-9cd6-2bfe6024822e-log-httpd\") pod \"ceilometer-0\" (UID: \"ed3e3e29-3089-4ee3-9cd6-2bfe6024822e\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:03:26 crc kubenswrapper[4558]: I0120 17:03:26.383039 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ed3e3e29-3089-4ee3-9cd6-2bfe6024822e-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ed3e3e29-3089-4ee3-9cd6-2bfe6024822e\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:03:26 crc kubenswrapper[4558]: I0120 17:03:26.383064 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/ed3e3e29-3089-4ee3-9cd6-2bfe6024822e-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"ed3e3e29-3089-4ee3-9cd6-2bfe6024822e\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:03:26 crc kubenswrapper[4558]: I0120 17:03:26.383097 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ed3e3e29-3089-4ee3-9cd6-2bfe6024822e-scripts\") pod \"ceilometer-0\" (UID: \"ed3e3e29-3089-4ee3-9cd6-2bfe6024822e\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:03:26 crc kubenswrapper[4558]: I0120 17:03:26.383136 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p4b2c\" (UniqueName: \"kubernetes.io/projected/ed3e3e29-3089-4ee3-9cd6-2bfe6024822e-kube-api-access-p4b2c\") pod \"ceilometer-0\" (UID: \"ed3e3e29-3089-4ee3-9cd6-2bfe6024822e\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:03:26 crc kubenswrapper[4558]: I0120 17:03:26.384327 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ed3e3e29-3089-4ee3-9cd6-2bfe6024822e-log-httpd\") pod \"ceilometer-0\" (UID: \"ed3e3e29-3089-4ee3-9cd6-2bfe6024822e\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:03:26 crc kubenswrapper[4558]: I0120 17:03:26.384612 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ed3e3e29-3089-4ee3-9cd6-2bfe6024822e-run-httpd\") pod \"ceilometer-0\" (UID: \"ed3e3e29-3089-4ee3-9cd6-2bfe6024822e\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:03:26 crc kubenswrapper[4558]: I0120 17:03:26.387307 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ed3e3e29-3089-4ee3-9cd6-2bfe6024822e-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ed3e3e29-3089-4ee3-9cd6-2bfe6024822e\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:03:26 crc kubenswrapper[4558]: I0120 17:03:26.387434 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/ed3e3e29-3089-4ee3-9cd6-2bfe6024822e-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"ed3e3e29-3089-4ee3-9cd6-2bfe6024822e\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:03:26 crc kubenswrapper[4558]: I0120 17:03:26.388031 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ed3e3e29-3089-4ee3-9cd6-2bfe6024822e-scripts\") pod \"ceilometer-0\" (UID: \"ed3e3e29-3089-4ee3-9cd6-2bfe6024822e\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:03:26 crc kubenswrapper[4558]: I0120 17:03:26.388293 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ed3e3e29-3089-4ee3-9cd6-2bfe6024822e-config-data\") pod \"ceilometer-0\" (UID: \"ed3e3e29-3089-4ee3-9cd6-2bfe6024822e\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:03:26 crc kubenswrapper[4558]: I0120 17:03:26.393830 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ed3e3e29-3089-4ee3-9cd6-2bfe6024822e-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ed3e3e29-3089-4ee3-9cd6-2bfe6024822e\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:03:26 crc kubenswrapper[4558]: I0120 17:03:26.405673 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p4b2c\" (UniqueName: \"kubernetes.io/projected/ed3e3e29-3089-4ee3-9cd6-2bfe6024822e-kube-api-access-p4b2c\") pod \"ceilometer-0\" (UID: \"ed3e3e29-3089-4ee3-9cd6-2bfe6024822e\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:03:26 crc kubenswrapper[4558]: I0120 17:03:26.488714 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:03:26 crc kubenswrapper[4558]: I0120 17:03:26.588790 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9047bd36-d5f2-4fef-870e-1228092716d7" path="/var/lib/kubelet/pods/9047bd36-d5f2-4fef-870e-1228092716d7/volumes" Jan 20 17:03:26 crc kubenswrapper[4558]: I0120 17:03:26.882451 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:03:26 crc kubenswrapper[4558]: I0120 17:03:26.917541 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:03:26 crc kubenswrapper[4558]: I0120 17:03:26.923321 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/glance-default-external-api-0" podUID="b6a69309-962d-4a4a-8fd0-e25a8a14cee1" containerName="glance-httpd" probeResult="failure" output="Get \"https://10.217.0.240:9292/healthcheck\": read tcp 10.217.0.2:43618->10.217.0.240:9292: read: connection reset by peer" Jan 20 17:03:26 crc kubenswrapper[4558]: I0120 17:03:26.923346 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/glance-default-external-api-0" podUID="b6a69309-962d-4a4a-8fd0-e25a8a14cee1" containerName="glance-log" probeResult="failure" output="Get \"https://10.217.0.240:9292/healthcheck\": read tcp 10.217.0.2:43620->10.217.0.240:9292: read: connection reset by peer" Jan 20 17:03:27 crc kubenswrapper[4558]: I0120 17:03:27.095816 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"ed3e3e29-3089-4ee3-9cd6-2bfe6024822e","Type":"ContainerStarted","Data":"107511e0c26dc982563b99121c7de0b93cfd7faa95eea66d5b4bd4a7412da3bb"} Jan 20 17:03:27 crc kubenswrapper[4558]: I0120 17:03:27.099782 4558 generic.go:334] "Generic (PLEG): container finished" podID="b6a69309-962d-4a4a-8fd0-e25a8a14cee1" containerID="bc6ce507a944a4c0944bcc1982c4d469e2d7fdba5e2e507de9cffdb943ca267c" exitCode=0 Jan 20 17:03:27 crc kubenswrapper[4558]: I0120 17:03:27.099838 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"b6a69309-962d-4a4a-8fd0-e25a8a14cee1","Type":"ContainerDied","Data":"bc6ce507a944a4c0944bcc1982c4d469e2d7fdba5e2e507de9cffdb943ca267c"} Jan 20 17:03:27 crc kubenswrapper[4558]: I0120 17:03:27.300447 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:03:27 crc kubenswrapper[4558]: I0120 17:03:27.329935 4558 patch_prober.go:28] interesting pod/machine-config-daemon-2vr4r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 20 17:03:27 crc kubenswrapper[4558]: I0120 17:03:27.329988 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 20 17:03:27 crc kubenswrapper[4558]: I0120 17:03:27.330034 4558 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" Jan 20 17:03:27 crc kubenswrapper[4558]: I0120 17:03:27.330887 4558 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"27436b5757afe01efb5672d1056f7069457346353ae2e6eed76a9879c2ed4ed6"} pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 20 17:03:27 crc kubenswrapper[4558]: I0120 17:03:27.330958 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" containerID="cri-o://27436b5757afe01efb5672d1056f7069457346353ae2e6eed76a9879c2ed4ed6" gracePeriod=600 Jan 20 17:03:27 crc kubenswrapper[4558]: I0120 17:03:27.408840 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lh9dk\" (UniqueName: \"kubernetes.io/projected/b6a69309-962d-4a4a-8fd0-e25a8a14cee1-kube-api-access-lh9dk\") pod \"b6a69309-962d-4a4a-8fd0-e25a8a14cee1\" (UID: \"b6a69309-962d-4a4a-8fd0-e25a8a14cee1\") " Jan 20 17:03:27 crc kubenswrapper[4558]: I0120 17:03:27.408946 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b6a69309-962d-4a4a-8fd0-e25a8a14cee1-public-tls-certs\") pod \"b6a69309-962d-4a4a-8fd0-e25a8a14cee1\" (UID: \"b6a69309-962d-4a4a-8fd0-e25a8a14cee1\") " Jan 20 17:03:27 crc kubenswrapper[4558]: I0120 17:03:27.408981 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b6a69309-962d-4a4a-8fd0-e25a8a14cee1-logs\") pod \"b6a69309-962d-4a4a-8fd0-e25a8a14cee1\" (UID: \"b6a69309-962d-4a4a-8fd0-e25a8a14cee1\") " Jan 20 17:03:27 crc kubenswrapper[4558]: I0120 17:03:27.409006 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b6a69309-962d-4a4a-8fd0-e25a8a14cee1-combined-ca-bundle\") pod \"b6a69309-962d-4a4a-8fd0-e25a8a14cee1\" (UID: \"b6a69309-962d-4a4a-8fd0-e25a8a14cee1\") " Jan 20 17:03:27 crc kubenswrapper[4558]: I0120 17:03:27.409052 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"b6a69309-962d-4a4a-8fd0-e25a8a14cee1\" (UID: \"b6a69309-962d-4a4a-8fd0-e25a8a14cee1\") " Jan 20 17:03:27 crc kubenswrapper[4558]: I0120 17:03:27.409076 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b6a69309-962d-4a4a-8fd0-e25a8a14cee1-config-data\") pod \"b6a69309-962d-4a4a-8fd0-e25a8a14cee1\" (UID: \"b6a69309-962d-4a4a-8fd0-e25a8a14cee1\") " Jan 20 17:03:27 crc kubenswrapper[4558]: I0120 17:03:27.409102 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b6a69309-962d-4a4a-8fd0-e25a8a14cee1-scripts\") pod \"b6a69309-962d-4a4a-8fd0-e25a8a14cee1\" (UID: \"b6a69309-962d-4a4a-8fd0-e25a8a14cee1\") " Jan 20 17:03:27 crc kubenswrapper[4558]: I0120 17:03:27.409199 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b6a69309-962d-4a4a-8fd0-e25a8a14cee1-httpd-run\") pod \"b6a69309-962d-4a4a-8fd0-e25a8a14cee1\" (UID: \"b6a69309-962d-4a4a-8fd0-e25a8a14cee1\") " Jan 20 17:03:27 crc kubenswrapper[4558]: I0120 17:03:27.409405 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b6a69309-962d-4a4a-8fd0-e25a8a14cee1-logs" (OuterVolumeSpecName: "logs") pod "b6a69309-962d-4a4a-8fd0-e25a8a14cee1" (UID: "b6a69309-962d-4a4a-8fd0-e25a8a14cee1"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:03:27 crc kubenswrapper[4558]: I0120 17:03:27.409673 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b6a69309-962d-4a4a-8fd0-e25a8a14cee1-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "b6a69309-962d-4a4a-8fd0-e25a8a14cee1" (UID: "b6a69309-962d-4a4a-8fd0-e25a8a14cee1"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:03:27 crc kubenswrapper[4558]: I0120 17:03:27.409931 4558 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b6a69309-962d-4a4a-8fd0-e25a8a14cee1-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 20 17:03:27 crc kubenswrapper[4558]: I0120 17:03:27.409948 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b6a69309-962d-4a4a-8fd0-e25a8a14cee1-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:03:27 crc kubenswrapper[4558]: I0120 17:03:27.414248 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6a69309-962d-4a4a-8fd0-e25a8a14cee1-kube-api-access-lh9dk" (OuterVolumeSpecName: "kube-api-access-lh9dk") pod "b6a69309-962d-4a4a-8fd0-e25a8a14cee1" (UID: "b6a69309-962d-4a4a-8fd0-e25a8a14cee1"). InnerVolumeSpecName "kube-api-access-lh9dk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:03:27 crc kubenswrapper[4558]: I0120 17:03:27.415611 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage01-crc" (OuterVolumeSpecName: "glance") pod "b6a69309-962d-4a4a-8fd0-e25a8a14cee1" (UID: "b6a69309-962d-4a4a-8fd0-e25a8a14cee1"). InnerVolumeSpecName "local-storage01-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:03:27 crc kubenswrapper[4558]: I0120 17:03:27.415772 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6a69309-962d-4a4a-8fd0-e25a8a14cee1-scripts" (OuterVolumeSpecName: "scripts") pod "b6a69309-962d-4a4a-8fd0-e25a8a14cee1" (UID: "b6a69309-962d-4a4a-8fd0-e25a8a14cee1"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:03:27 crc kubenswrapper[4558]: I0120 17:03:27.439209 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6a69309-962d-4a4a-8fd0-e25a8a14cee1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b6a69309-962d-4a4a-8fd0-e25a8a14cee1" (UID: "b6a69309-962d-4a4a-8fd0-e25a8a14cee1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:03:27 crc kubenswrapper[4558]: I0120 17:03:27.446252 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6a69309-962d-4a4a-8fd0-e25a8a14cee1-config-data" (OuterVolumeSpecName: "config-data") pod "b6a69309-962d-4a4a-8fd0-e25a8a14cee1" (UID: "b6a69309-962d-4a4a-8fd0-e25a8a14cee1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:03:27 crc kubenswrapper[4558]: I0120 17:03:27.447056 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6a69309-962d-4a4a-8fd0-e25a8a14cee1-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "b6a69309-962d-4a4a-8fd0-e25a8a14cee1" (UID: "b6a69309-962d-4a4a-8fd0-e25a8a14cee1"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:03:27 crc kubenswrapper[4558]: I0120 17:03:27.512672 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lh9dk\" (UniqueName: \"kubernetes.io/projected/b6a69309-962d-4a4a-8fd0-e25a8a14cee1-kube-api-access-lh9dk\") on node \"crc\" DevicePath \"\"" Jan 20 17:03:27 crc kubenswrapper[4558]: I0120 17:03:27.512704 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b6a69309-962d-4a4a-8fd0-e25a8a14cee1-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:03:27 crc kubenswrapper[4558]: I0120 17:03:27.512714 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b6a69309-962d-4a4a-8fd0-e25a8a14cee1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:03:27 crc kubenswrapper[4558]: I0120 17:03:27.512758 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" " Jan 20 17:03:27 crc kubenswrapper[4558]: I0120 17:03:27.512768 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b6a69309-962d-4a4a-8fd0-e25a8a14cee1-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:03:27 crc kubenswrapper[4558]: I0120 17:03:27.512776 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b6a69309-962d-4a4a-8fd0-e25a8a14cee1-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:03:27 crc kubenswrapper[4558]: I0120 17:03:27.531789 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage01-crc" (UniqueName: "kubernetes.io/local-volume/local-storage01-crc") on node "crc" Jan 20 17:03:27 crc kubenswrapper[4558]: I0120 17:03:27.614526 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:03:27 crc kubenswrapper[4558]: I0120 17:03:27.797240 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/glance-default-internal-api-0" podUID="25076c04-6d0d-4c02-a58f-de14094b79b5" containerName="glance-log" probeResult="failure" output="Get \"https://10.217.0.239:9292/healthcheck\": read tcp 10.217.0.2:37914->10.217.0.239:9292: read: connection reset by peer" Jan 20 17:03:27 crc kubenswrapper[4558]: I0120 17:03:27.797309 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/glance-default-internal-api-0" podUID="25076c04-6d0d-4c02-a58f-de14094b79b5" containerName="glance-httpd" probeResult="failure" output="Get \"https://10.217.0.239:9292/healthcheck\": read tcp 10.217.0.2:37912->10.217.0.239:9292: read: connection reset by peer" Jan 20 17:03:28 crc kubenswrapper[4558]: I0120 17:03:28.104725 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:03:28 crc kubenswrapper[4558]: I0120 17:03:28.112541 4558 generic.go:334] "Generic (PLEG): container finished" podID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerID="27436b5757afe01efb5672d1056f7069457346353ae2e6eed76a9879c2ed4ed6" exitCode=0 Jan 20 17:03:28 crc kubenswrapper[4558]: I0120 17:03:28.112595 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" event={"ID":"68337d27-3fa6-4a29-88b0-82e60c3739eb","Type":"ContainerDied","Data":"27436b5757afe01efb5672d1056f7069457346353ae2e6eed76a9879c2ed4ed6"} Jan 20 17:03:28 crc kubenswrapper[4558]: I0120 17:03:28.112642 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" event={"ID":"68337d27-3fa6-4a29-88b0-82e60c3739eb","Type":"ContainerStarted","Data":"a711a286282347590079d2447ef37fa9ed0f11085d6d7a65f85e65c1c2df608f"} Jan 20 17:03:28 crc kubenswrapper[4558]: I0120 17:03:28.112663 4558 scope.go:117] "RemoveContainer" containerID="ed4e09803bdddac3e0ae0eefbe36e242b8c6be19d37cc1c52ac24044a0f94446" Jan 20 17:03:28 crc kubenswrapper[4558]: I0120 17:03:28.118524 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"ed3e3e29-3089-4ee3-9cd6-2bfe6024822e","Type":"ContainerStarted","Data":"3c84dee1556591cd3998c2b76ae0683f6ba2f86529f3023972cb334f19db22a5"} Jan 20 17:03:28 crc kubenswrapper[4558]: I0120 17:03:28.125129 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/25076c04-6d0d-4c02-a58f-de14094b79b5-combined-ca-bundle\") pod \"25076c04-6d0d-4c02-a58f-de14094b79b5\" (UID: \"25076c04-6d0d-4c02-a58f-de14094b79b5\") " Jan 20 17:03:28 crc kubenswrapper[4558]: I0120 17:03:28.125471 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qlfjl\" (UniqueName: \"kubernetes.io/projected/25076c04-6d0d-4c02-a58f-de14094b79b5-kube-api-access-qlfjl\") pod \"25076c04-6d0d-4c02-a58f-de14094b79b5\" (UID: \"25076c04-6d0d-4c02-a58f-de14094b79b5\") " Jan 20 17:03:28 crc kubenswrapper[4558]: I0120 17:03:28.125531 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/25076c04-6d0d-4c02-a58f-de14094b79b5-logs\") pod \"25076c04-6d0d-4c02-a58f-de14094b79b5\" (UID: \"25076c04-6d0d-4c02-a58f-de14094b79b5\") " Jan 20 17:03:28 crc kubenswrapper[4558]: I0120 17:03:28.125569 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"b6a69309-962d-4a4a-8fd0-e25a8a14cee1","Type":"ContainerDied","Data":"d50487afc6d3eddd59efff8521e89952d0cbbfa251cbe70b7e1c3e469fa599c4"} Jan 20 17:03:28 crc kubenswrapper[4558]: I0120 17:03:28.125642 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/25076c04-6d0d-4c02-a58f-de14094b79b5-httpd-run\") pod \"25076c04-6d0d-4c02-a58f-de14094b79b5\" (UID: \"25076c04-6d0d-4c02-a58f-de14094b79b5\") " Jan 20 17:03:28 crc kubenswrapper[4558]: I0120 17:03:28.125673 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:03:28 crc kubenswrapper[4558]: I0120 17:03:28.125806 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"25076c04-6d0d-4c02-a58f-de14094b79b5\" (UID: \"25076c04-6d0d-4c02-a58f-de14094b79b5\") " Jan 20 17:03:28 crc kubenswrapper[4558]: I0120 17:03:28.125833 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/25076c04-6d0d-4c02-a58f-de14094b79b5-internal-tls-certs\") pod \"25076c04-6d0d-4c02-a58f-de14094b79b5\" (UID: \"25076c04-6d0d-4c02-a58f-de14094b79b5\") " Jan 20 17:03:28 crc kubenswrapper[4558]: I0120 17:03:28.125897 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/25076c04-6d0d-4c02-a58f-de14094b79b5-scripts\") pod \"25076c04-6d0d-4c02-a58f-de14094b79b5\" (UID: \"25076c04-6d0d-4c02-a58f-de14094b79b5\") " Jan 20 17:03:28 crc kubenswrapper[4558]: I0120 17:03:28.125973 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/25076c04-6d0d-4c02-a58f-de14094b79b5-config-data\") pod \"25076c04-6d0d-4c02-a58f-de14094b79b5\" (UID: \"25076c04-6d0d-4c02-a58f-de14094b79b5\") " Jan 20 17:03:28 crc kubenswrapper[4558]: I0120 17:03:28.128412 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/25076c04-6d0d-4c02-a58f-de14094b79b5-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "25076c04-6d0d-4c02-a58f-de14094b79b5" (UID: "25076c04-6d0d-4c02-a58f-de14094b79b5"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:03:28 crc kubenswrapper[4558]: I0120 17:03:28.130477 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/25076c04-6d0d-4c02-a58f-de14094b79b5-logs" (OuterVolumeSpecName: "logs") pod "25076c04-6d0d-4c02-a58f-de14094b79b5" (UID: "25076c04-6d0d-4c02-a58f-de14094b79b5"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:03:28 crc kubenswrapper[4558]: I0120 17:03:28.138086 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25076c04-6d0d-4c02-a58f-de14094b79b5-scripts" (OuterVolumeSpecName: "scripts") pod "25076c04-6d0d-4c02-a58f-de14094b79b5" (UID: "25076c04-6d0d-4c02-a58f-de14094b79b5"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:03:28 crc kubenswrapper[4558]: I0120 17:03:28.140156 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage07-crc" (OuterVolumeSpecName: "glance") pod "25076c04-6d0d-4c02-a58f-de14094b79b5" (UID: "25076c04-6d0d-4c02-a58f-de14094b79b5"). InnerVolumeSpecName "local-storage07-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:03:28 crc kubenswrapper[4558]: I0120 17:03:28.143972 4558 scope.go:117] "RemoveContainer" containerID="bc6ce507a944a4c0944bcc1982c4d469e2d7fdba5e2e507de9cffdb943ca267c" Jan 20 17:03:28 crc kubenswrapper[4558]: I0120 17:03:28.151378 4558 generic.go:334] "Generic (PLEG): container finished" podID="25076c04-6d0d-4c02-a58f-de14094b79b5" containerID="ad62e8788a08fae6beba57e28c588e43f4c96d6537c969b42d6e6044e423af6f" exitCode=0 Jan 20 17:03:28 crc kubenswrapper[4558]: I0120 17:03:28.151410 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25076c04-6d0d-4c02-a58f-de14094b79b5-kube-api-access-qlfjl" (OuterVolumeSpecName: "kube-api-access-qlfjl") pod "25076c04-6d0d-4c02-a58f-de14094b79b5" (UID: "25076c04-6d0d-4c02-a58f-de14094b79b5"). InnerVolumeSpecName "kube-api-access-qlfjl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:03:28 crc kubenswrapper[4558]: I0120 17:03:28.151423 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"25076c04-6d0d-4c02-a58f-de14094b79b5","Type":"ContainerDied","Data":"ad62e8788a08fae6beba57e28c588e43f4c96d6537c969b42d6e6044e423af6f"} Jan 20 17:03:28 crc kubenswrapper[4558]: I0120 17:03:28.151454 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"25076c04-6d0d-4c02-a58f-de14094b79b5","Type":"ContainerDied","Data":"b5b27ddbc918c18ad0aeb6ee2a137e215568ac839c0f5f060fab85a4c4116768"} Jan 20 17:03:28 crc kubenswrapper[4558]: I0120 17:03:28.151504 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:03:28 crc kubenswrapper[4558]: I0120 17:03:28.172431 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:03:28 crc kubenswrapper[4558]: I0120 17:03:28.183823 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:03:28 crc kubenswrapper[4558]: I0120 17:03:28.187900 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:03:28 crc kubenswrapper[4558]: E0120 17:03:28.196233 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="25076c04-6d0d-4c02-a58f-de14094b79b5" containerName="glance-httpd" Jan 20 17:03:28 crc kubenswrapper[4558]: I0120 17:03:28.196255 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="25076c04-6d0d-4c02-a58f-de14094b79b5" containerName="glance-httpd" Jan 20 17:03:28 crc kubenswrapper[4558]: E0120 17:03:28.196270 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="25076c04-6d0d-4c02-a58f-de14094b79b5" containerName="glance-log" Jan 20 17:03:28 crc kubenswrapper[4558]: I0120 17:03:28.196285 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="25076c04-6d0d-4c02-a58f-de14094b79b5" containerName="glance-log" Jan 20 17:03:28 crc kubenswrapper[4558]: E0120 17:03:28.196308 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b6a69309-962d-4a4a-8fd0-e25a8a14cee1" containerName="glance-httpd" Jan 20 17:03:28 crc kubenswrapper[4558]: I0120 17:03:28.196313 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="b6a69309-962d-4a4a-8fd0-e25a8a14cee1" containerName="glance-httpd" Jan 20 17:03:28 crc kubenswrapper[4558]: E0120 17:03:28.196322 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b6a69309-962d-4a4a-8fd0-e25a8a14cee1" containerName="glance-log" Jan 20 17:03:28 crc kubenswrapper[4558]: I0120 17:03:28.196327 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="b6a69309-962d-4a4a-8fd0-e25a8a14cee1" containerName="glance-log" Jan 20 17:03:28 crc kubenswrapper[4558]: I0120 17:03:28.196486 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="25076c04-6d0d-4c02-a58f-de14094b79b5" containerName="glance-httpd" Jan 20 17:03:28 crc kubenswrapper[4558]: I0120 17:03:28.196505 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="b6a69309-962d-4a4a-8fd0-e25a8a14cee1" containerName="glance-log" Jan 20 17:03:28 crc kubenswrapper[4558]: I0120 17:03:28.196514 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="25076c04-6d0d-4c02-a58f-de14094b79b5" containerName="glance-log" Jan 20 17:03:28 crc kubenswrapper[4558]: I0120 17:03:28.196526 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="b6a69309-962d-4a4a-8fd0-e25a8a14cee1" containerName="glance-httpd" Jan 20 17:03:28 crc kubenswrapper[4558]: I0120 17:03:28.197329 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:03:28 crc kubenswrapper[4558]: I0120 17:03:28.198754 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:03:28 crc kubenswrapper[4558]: I0120 17:03:28.199652 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25076c04-6d0d-4c02-a58f-de14094b79b5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "25076c04-6d0d-4c02-a58f-de14094b79b5" (UID: "25076c04-6d0d-4c02-a58f-de14094b79b5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:03:28 crc kubenswrapper[4558]: I0120 17:03:28.200527 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-default-external-config-data" Jan 20 17:03:28 crc kubenswrapper[4558]: I0120 17:03:28.200682 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-glance-default-public-svc" Jan 20 17:03:28 crc kubenswrapper[4558]: I0120 17:03:28.210087 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25076c04-6d0d-4c02-a58f-de14094b79b5-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "25076c04-6d0d-4c02-a58f-de14094b79b5" (UID: "25076c04-6d0d-4c02-a58f-de14094b79b5"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:03:28 crc kubenswrapper[4558]: I0120 17:03:28.212818 4558 scope.go:117] "RemoveContainer" containerID="5bb4f48f82b9f26f09753ee0b330ab1b18a6ee75ea0a48f26d817acaf1398abd" Jan 20 17:03:28 crc kubenswrapper[4558]: I0120 17:03:28.220585 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25076c04-6d0d-4c02-a58f-de14094b79b5-config-data" (OuterVolumeSpecName: "config-data") pod "25076c04-6d0d-4c02-a58f-de14094b79b5" (UID: "25076c04-6d0d-4c02-a58f-de14094b79b5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:03:28 crc kubenswrapper[4558]: I0120 17:03:28.227613 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d84w8\" (UniqueName: \"kubernetes.io/projected/20ba561a-58e7-459f-ba28-ed0b68cdab9b-kube-api-access-d84w8\") pod \"glance-default-external-api-0\" (UID: \"20ba561a-58e7-459f-ba28-ed0b68cdab9b\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:03:28 crc kubenswrapper[4558]: I0120 17:03:28.227696 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-external-api-0\" (UID: \"20ba561a-58e7-459f-ba28-ed0b68cdab9b\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:03:28 crc kubenswrapper[4558]: I0120 17:03:28.227725 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/20ba561a-58e7-459f-ba28-ed0b68cdab9b-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"20ba561a-58e7-459f-ba28-ed0b68cdab9b\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:03:28 crc kubenswrapper[4558]: I0120 17:03:28.227766 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/20ba561a-58e7-459f-ba28-ed0b68cdab9b-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"20ba561a-58e7-459f-ba28-ed0b68cdab9b\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:03:28 crc kubenswrapper[4558]: I0120 17:03:28.227801 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/20ba561a-58e7-459f-ba28-ed0b68cdab9b-logs\") pod \"glance-default-external-api-0\" (UID: \"20ba561a-58e7-459f-ba28-ed0b68cdab9b\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:03:28 crc kubenswrapper[4558]: I0120 17:03:28.227863 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/20ba561a-58e7-459f-ba28-ed0b68cdab9b-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"20ba561a-58e7-459f-ba28-ed0b68cdab9b\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:03:28 crc kubenswrapper[4558]: I0120 17:03:28.227893 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/20ba561a-58e7-459f-ba28-ed0b68cdab9b-config-data\") pod \"glance-default-external-api-0\" (UID: \"20ba561a-58e7-459f-ba28-ed0b68cdab9b\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:03:28 crc kubenswrapper[4558]: I0120 17:03:28.228045 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/20ba561a-58e7-459f-ba28-ed0b68cdab9b-scripts\") pod \"glance-default-external-api-0\" (UID: \"20ba561a-58e7-459f-ba28-ed0b68cdab9b\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:03:28 crc kubenswrapper[4558]: I0120 17:03:28.228150 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/25076c04-6d0d-4c02-a58f-de14094b79b5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:03:28 crc kubenswrapper[4558]: I0120 17:03:28.228208 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qlfjl\" (UniqueName: \"kubernetes.io/projected/25076c04-6d0d-4c02-a58f-de14094b79b5-kube-api-access-qlfjl\") on node \"crc\" DevicePath \"\"" Jan 20 17:03:28 crc kubenswrapper[4558]: I0120 17:03:28.228217 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/25076c04-6d0d-4c02-a58f-de14094b79b5-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:03:28 crc kubenswrapper[4558]: I0120 17:03:28.228225 4558 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/25076c04-6d0d-4c02-a58f-de14094b79b5-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 20 17:03:28 crc kubenswrapper[4558]: I0120 17:03:28.228244 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" " Jan 20 17:03:28 crc kubenswrapper[4558]: I0120 17:03:28.228255 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/25076c04-6d0d-4c02-a58f-de14094b79b5-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:03:28 crc kubenswrapper[4558]: I0120 17:03:28.228319 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/25076c04-6d0d-4c02-a58f-de14094b79b5-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:03:28 crc kubenswrapper[4558]: I0120 17:03:28.228328 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/25076c04-6d0d-4c02-a58f-de14094b79b5-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:03:28 crc kubenswrapper[4558]: I0120 17:03:28.238645 4558 scope.go:117] "RemoveContainer" containerID="ad62e8788a08fae6beba57e28c588e43f4c96d6537c969b42d6e6044e423af6f" Jan 20 17:03:28 crc kubenswrapper[4558]: I0120 17:03:28.246066 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage07-crc" (UniqueName: "kubernetes.io/local-volume/local-storage07-crc") on node "crc" Jan 20 17:03:28 crc kubenswrapper[4558]: I0120 17:03:28.276396 4558 scope.go:117] "RemoveContainer" containerID="59d23ff3cd2fbff08e58e91492448fe0e07b2099c36d37f02ee2bf1870adf4fc" Jan 20 17:03:28 crc kubenswrapper[4558]: I0120 17:03:28.294525 4558 scope.go:117] "RemoveContainer" containerID="ad62e8788a08fae6beba57e28c588e43f4c96d6537c969b42d6e6044e423af6f" Jan 20 17:03:28 crc kubenswrapper[4558]: E0120 17:03:28.294856 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ad62e8788a08fae6beba57e28c588e43f4c96d6537c969b42d6e6044e423af6f\": container with ID starting with ad62e8788a08fae6beba57e28c588e43f4c96d6537c969b42d6e6044e423af6f not found: ID does not exist" containerID="ad62e8788a08fae6beba57e28c588e43f4c96d6537c969b42d6e6044e423af6f" Jan 20 17:03:28 crc kubenswrapper[4558]: I0120 17:03:28.294890 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ad62e8788a08fae6beba57e28c588e43f4c96d6537c969b42d6e6044e423af6f"} err="failed to get container status \"ad62e8788a08fae6beba57e28c588e43f4c96d6537c969b42d6e6044e423af6f\": rpc error: code = NotFound desc = could not find container \"ad62e8788a08fae6beba57e28c588e43f4c96d6537c969b42d6e6044e423af6f\": container with ID starting with ad62e8788a08fae6beba57e28c588e43f4c96d6537c969b42d6e6044e423af6f not found: ID does not exist" Jan 20 17:03:28 crc kubenswrapper[4558]: I0120 17:03:28.294918 4558 scope.go:117] "RemoveContainer" containerID="59d23ff3cd2fbff08e58e91492448fe0e07b2099c36d37f02ee2bf1870adf4fc" Jan 20 17:03:28 crc kubenswrapper[4558]: E0120 17:03:28.295228 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"59d23ff3cd2fbff08e58e91492448fe0e07b2099c36d37f02ee2bf1870adf4fc\": container with ID starting with 59d23ff3cd2fbff08e58e91492448fe0e07b2099c36d37f02ee2bf1870adf4fc not found: ID does not exist" containerID="59d23ff3cd2fbff08e58e91492448fe0e07b2099c36d37f02ee2bf1870adf4fc" Jan 20 17:03:28 crc kubenswrapper[4558]: I0120 17:03:28.295255 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"59d23ff3cd2fbff08e58e91492448fe0e07b2099c36d37f02ee2bf1870adf4fc"} err="failed to get container status \"59d23ff3cd2fbff08e58e91492448fe0e07b2099c36d37f02ee2bf1870adf4fc\": rpc error: code = NotFound desc = could not find container \"59d23ff3cd2fbff08e58e91492448fe0e07b2099c36d37f02ee2bf1870adf4fc\": container with ID starting with 59d23ff3cd2fbff08e58e91492448fe0e07b2099c36d37f02ee2bf1870adf4fc not found: ID does not exist" Jan 20 17:03:28 crc kubenswrapper[4558]: I0120 17:03:28.329148 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/20ba561a-58e7-459f-ba28-ed0b68cdab9b-logs\") pod \"glance-default-external-api-0\" (UID: \"20ba561a-58e7-459f-ba28-ed0b68cdab9b\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:03:28 crc kubenswrapper[4558]: I0120 17:03:28.329324 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/20ba561a-58e7-459f-ba28-ed0b68cdab9b-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"20ba561a-58e7-459f-ba28-ed0b68cdab9b\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:03:28 crc kubenswrapper[4558]: I0120 17:03:28.329408 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/20ba561a-58e7-459f-ba28-ed0b68cdab9b-config-data\") pod \"glance-default-external-api-0\" (UID: \"20ba561a-58e7-459f-ba28-ed0b68cdab9b\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:03:28 crc kubenswrapper[4558]: I0120 17:03:28.329497 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/20ba561a-58e7-459f-ba28-ed0b68cdab9b-scripts\") pod \"glance-default-external-api-0\" (UID: \"20ba561a-58e7-459f-ba28-ed0b68cdab9b\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:03:28 crc kubenswrapper[4558]: I0120 17:03:28.329583 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d84w8\" (UniqueName: \"kubernetes.io/projected/20ba561a-58e7-459f-ba28-ed0b68cdab9b-kube-api-access-d84w8\") pod \"glance-default-external-api-0\" (UID: \"20ba561a-58e7-459f-ba28-ed0b68cdab9b\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:03:28 crc kubenswrapper[4558]: I0120 17:03:28.329676 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-external-api-0\" (UID: \"20ba561a-58e7-459f-ba28-ed0b68cdab9b\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:03:28 crc kubenswrapper[4558]: I0120 17:03:28.329740 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/20ba561a-58e7-459f-ba28-ed0b68cdab9b-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"20ba561a-58e7-459f-ba28-ed0b68cdab9b\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:03:28 crc kubenswrapper[4558]: I0120 17:03:28.329820 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/20ba561a-58e7-459f-ba28-ed0b68cdab9b-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"20ba561a-58e7-459f-ba28-ed0b68cdab9b\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:03:28 crc kubenswrapper[4558]: I0120 17:03:28.329533 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/20ba561a-58e7-459f-ba28-ed0b68cdab9b-logs\") pod \"glance-default-external-api-0\" (UID: \"20ba561a-58e7-459f-ba28-ed0b68cdab9b\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:03:28 crc kubenswrapper[4558]: I0120 17:03:28.330120 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:03:28 crc kubenswrapper[4558]: I0120 17:03:28.330193 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-external-api-0\" (UID: \"20ba561a-58e7-459f-ba28-ed0b68cdab9b\") device mount path \"/mnt/openstack/pv01\"" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:03:28 crc kubenswrapper[4558]: I0120 17:03:28.330436 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/20ba561a-58e7-459f-ba28-ed0b68cdab9b-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"20ba561a-58e7-459f-ba28-ed0b68cdab9b\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:03:28 crc kubenswrapper[4558]: I0120 17:03:28.332936 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/20ba561a-58e7-459f-ba28-ed0b68cdab9b-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"20ba561a-58e7-459f-ba28-ed0b68cdab9b\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:03:28 crc kubenswrapper[4558]: I0120 17:03:28.338682 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/20ba561a-58e7-459f-ba28-ed0b68cdab9b-config-data\") pod \"glance-default-external-api-0\" (UID: \"20ba561a-58e7-459f-ba28-ed0b68cdab9b\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:03:28 crc kubenswrapper[4558]: I0120 17:03:28.339534 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/20ba561a-58e7-459f-ba28-ed0b68cdab9b-scripts\") pod \"glance-default-external-api-0\" (UID: \"20ba561a-58e7-459f-ba28-ed0b68cdab9b\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:03:28 crc kubenswrapper[4558]: I0120 17:03:28.343208 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/20ba561a-58e7-459f-ba28-ed0b68cdab9b-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"20ba561a-58e7-459f-ba28-ed0b68cdab9b\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:03:28 crc kubenswrapper[4558]: I0120 17:03:28.343401 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d84w8\" (UniqueName: \"kubernetes.io/projected/20ba561a-58e7-459f-ba28-ed0b68cdab9b-kube-api-access-d84w8\") pod \"glance-default-external-api-0\" (UID: \"20ba561a-58e7-459f-ba28-ed0b68cdab9b\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:03:28 crc kubenswrapper[4558]: I0120 17:03:28.358343 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"glance-default-external-api-0\" (UID: \"20ba561a-58e7-459f-ba28-ed0b68cdab9b\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:03:28 crc kubenswrapper[4558]: I0120 17:03:28.529745 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:03:28 crc kubenswrapper[4558]: I0120 17:03:28.530548 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:03:28 crc kubenswrapper[4558]: I0120 17:03:28.554225 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:03:28 crc kubenswrapper[4558]: I0120 17:03:28.583800 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25076c04-6d0d-4c02-a58f-de14094b79b5" path="/var/lib/kubelet/pods/25076c04-6d0d-4c02-a58f-de14094b79b5/volumes" Jan 20 17:03:28 crc kubenswrapper[4558]: I0120 17:03:28.584755 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6a69309-962d-4a4a-8fd0-e25a8a14cee1" path="/var/lib/kubelet/pods/b6a69309-962d-4a4a-8fd0-e25a8a14cee1/volumes" Jan 20 17:03:28 crc kubenswrapper[4558]: I0120 17:03:28.632225 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:03:28 crc kubenswrapper[4558]: I0120 17:03:28.633885 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:03:28 crc kubenswrapper[4558]: I0120 17:03:28.636748 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-default-internal-config-data" Jan 20 17:03:28 crc kubenswrapper[4558]: I0120 17:03:28.638410 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-glance-default-internal-svc" Jan 20 17:03:28 crc kubenswrapper[4558]: I0120 17:03:28.648054 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:03:28 crc kubenswrapper[4558]: I0120 17:03:28.772302 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aa387033-dc08-48f4-bf56-06a7f316423c-scripts\") pod \"glance-default-internal-api-0\" (UID: \"aa387033-dc08-48f4-bf56-06a7f316423c\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:03:28 crc kubenswrapper[4558]: I0120 17:03:28.772342 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rs4zr\" (UniqueName: \"kubernetes.io/projected/aa387033-dc08-48f4-bf56-06a7f316423c-kube-api-access-rs4zr\") pod \"glance-default-internal-api-0\" (UID: \"aa387033-dc08-48f4-bf56-06a7f316423c\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:03:28 crc kubenswrapper[4558]: I0120 17:03:28.772363 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/aa387033-dc08-48f4-bf56-06a7f316423c-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"aa387033-dc08-48f4-bf56-06a7f316423c\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:03:28 crc kubenswrapper[4558]: I0120 17:03:28.772421 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/aa387033-dc08-48f4-bf56-06a7f316423c-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"aa387033-dc08-48f4-bf56-06a7f316423c\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:03:28 crc kubenswrapper[4558]: I0120 17:03:28.772459 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"aa387033-dc08-48f4-bf56-06a7f316423c\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:03:28 crc kubenswrapper[4558]: I0120 17:03:28.772516 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/aa387033-dc08-48f4-bf56-06a7f316423c-logs\") pod \"glance-default-internal-api-0\" (UID: \"aa387033-dc08-48f4-bf56-06a7f316423c\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:03:28 crc kubenswrapper[4558]: I0120 17:03:28.772545 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa387033-dc08-48f4-bf56-06a7f316423c-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"aa387033-dc08-48f4-bf56-06a7f316423c\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:03:28 crc kubenswrapper[4558]: I0120 17:03:28.772580 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa387033-dc08-48f4-bf56-06a7f316423c-config-data\") pod \"glance-default-internal-api-0\" (UID: \"aa387033-dc08-48f4-bf56-06a7f316423c\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:03:28 crc kubenswrapper[4558]: I0120 17:03:28.873620 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aa387033-dc08-48f4-bf56-06a7f316423c-scripts\") pod \"glance-default-internal-api-0\" (UID: \"aa387033-dc08-48f4-bf56-06a7f316423c\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:03:28 crc kubenswrapper[4558]: I0120 17:03:28.873842 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rs4zr\" (UniqueName: \"kubernetes.io/projected/aa387033-dc08-48f4-bf56-06a7f316423c-kube-api-access-rs4zr\") pod \"glance-default-internal-api-0\" (UID: \"aa387033-dc08-48f4-bf56-06a7f316423c\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:03:28 crc kubenswrapper[4558]: I0120 17:03:28.873880 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/aa387033-dc08-48f4-bf56-06a7f316423c-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"aa387033-dc08-48f4-bf56-06a7f316423c\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:03:28 crc kubenswrapper[4558]: I0120 17:03:28.873925 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/aa387033-dc08-48f4-bf56-06a7f316423c-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"aa387033-dc08-48f4-bf56-06a7f316423c\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:03:28 crc kubenswrapper[4558]: I0120 17:03:28.873955 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"aa387033-dc08-48f4-bf56-06a7f316423c\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:03:28 crc kubenswrapper[4558]: I0120 17:03:28.874003 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/aa387033-dc08-48f4-bf56-06a7f316423c-logs\") pod \"glance-default-internal-api-0\" (UID: \"aa387033-dc08-48f4-bf56-06a7f316423c\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:03:28 crc kubenswrapper[4558]: I0120 17:03:28.874025 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa387033-dc08-48f4-bf56-06a7f316423c-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"aa387033-dc08-48f4-bf56-06a7f316423c\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:03:28 crc kubenswrapper[4558]: I0120 17:03:28.874055 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa387033-dc08-48f4-bf56-06a7f316423c-config-data\") pod \"glance-default-internal-api-0\" (UID: \"aa387033-dc08-48f4-bf56-06a7f316423c\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:03:28 crc kubenswrapper[4558]: I0120 17:03:28.874400 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"aa387033-dc08-48f4-bf56-06a7f316423c\") device mount path \"/mnt/openstack/pv07\"" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:03:28 crc kubenswrapper[4558]: I0120 17:03:28.875649 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/aa387033-dc08-48f4-bf56-06a7f316423c-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"aa387033-dc08-48f4-bf56-06a7f316423c\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:03:28 crc kubenswrapper[4558]: I0120 17:03:28.875915 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/aa387033-dc08-48f4-bf56-06a7f316423c-logs\") pod \"glance-default-internal-api-0\" (UID: \"aa387033-dc08-48f4-bf56-06a7f316423c\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:03:28 crc kubenswrapper[4558]: I0120 17:03:28.877126 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aa387033-dc08-48f4-bf56-06a7f316423c-scripts\") pod \"glance-default-internal-api-0\" (UID: \"aa387033-dc08-48f4-bf56-06a7f316423c\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:03:28 crc kubenswrapper[4558]: I0120 17:03:28.882230 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/aa387033-dc08-48f4-bf56-06a7f316423c-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"aa387033-dc08-48f4-bf56-06a7f316423c\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:03:28 crc kubenswrapper[4558]: I0120 17:03:28.882623 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa387033-dc08-48f4-bf56-06a7f316423c-config-data\") pod \"glance-default-internal-api-0\" (UID: \"aa387033-dc08-48f4-bf56-06a7f316423c\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:03:28 crc kubenswrapper[4558]: I0120 17:03:28.884748 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa387033-dc08-48f4-bf56-06a7f316423c-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"aa387033-dc08-48f4-bf56-06a7f316423c\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:03:28 crc kubenswrapper[4558]: I0120 17:03:28.891538 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rs4zr\" (UniqueName: \"kubernetes.io/projected/aa387033-dc08-48f4-bf56-06a7f316423c-kube-api-access-rs4zr\") pod \"glance-default-internal-api-0\" (UID: \"aa387033-dc08-48f4-bf56-06a7f316423c\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:03:28 crc kubenswrapper[4558]: I0120 17:03:28.903325 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"aa387033-dc08-48f4-bf56-06a7f316423c\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:03:29 crc kubenswrapper[4558]: I0120 17:03:29.010913 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:03:29 crc kubenswrapper[4558]: I0120 17:03:29.052905 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:03:29 crc kubenswrapper[4558]: W0120 17:03:29.074185 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod20ba561a_58e7_459f_ba28_ed0b68cdab9b.slice/crio-2e3974a4783643c1db14827b3c03ac543eee73ccc3cffba0f3755b5b3aee4b20 WatchSource:0}: Error finding container 2e3974a4783643c1db14827b3c03ac543eee73ccc3cffba0f3755b5b3aee4b20: Status 404 returned error can't find the container with id 2e3974a4783643c1db14827b3c03ac543eee73ccc3cffba0f3755b5b3aee4b20 Jan 20 17:03:29 crc kubenswrapper[4558]: I0120 17:03:29.187494 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"20ba561a-58e7-459f-ba28-ed0b68cdab9b","Type":"ContainerStarted","Data":"2e3974a4783643c1db14827b3c03ac543eee73ccc3cffba0f3755b5b3aee4b20"} Jan 20 17:03:29 crc kubenswrapper[4558]: I0120 17:03:29.189230 4558 generic.go:334] "Generic (PLEG): container finished" podID="8efaa8ce-5cd9-4688-8cef-0e3fb667105d" containerID="535ff1923649723165cb5389ef0ba724b32d3a882103118bddd27f257b9974e3" exitCode=0 Jan 20 17:03:29 crc kubenswrapper[4558]: I0120 17:03:29.189313 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-xwwl5" event={"ID":"8efaa8ce-5cd9-4688-8cef-0e3fb667105d","Type":"ContainerDied","Data":"535ff1923649723165cb5389ef0ba724b32d3a882103118bddd27f257b9974e3"} Jan 20 17:03:29 crc kubenswrapper[4558]: I0120 17:03:29.194337 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"ed3e3e29-3089-4ee3-9cd6-2bfe6024822e","Type":"ContainerStarted","Data":"597f6f2f77f2e91bff6d9bd8e7e014f7a39687a17007d0b5e8f73f9db8e20fba"} Jan 20 17:03:29 crc kubenswrapper[4558]: I0120 17:03:29.439553 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:03:29 crc kubenswrapper[4558]: W0120 17:03:29.448226 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podaa387033_dc08_48f4_bf56_06a7f316423c.slice/crio-0896c30840f3c0d1f1d38416ad91e0d100b15726ec459c525d4c14686bd6c63c WatchSource:0}: Error finding container 0896c30840f3c0d1f1d38416ad91e0d100b15726ec459c525d4c14686bd6c63c: Status 404 returned error can't find the container with id 0896c30840f3c0d1f1d38416ad91e0d100b15726ec459c525d4c14686bd6c63c Jan 20 17:03:30 crc kubenswrapper[4558]: I0120 17:03:30.210801 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"aa387033-dc08-48f4-bf56-06a7f316423c","Type":"ContainerStarted","Data":"cb8be596c569251416c1e084cf32b390ad320911bfbd93edd669cf0bc7d40234"} Jan 20 17:03:30 crc kubenswrapper[4558]: I0120 17:03:30.211267 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"aa387033-dc08-48f4-bf56-06a7f316423c","Type":"ContainerStarted","Data":"0896c30840f3c0d1f1d38416ad91e0d100b15726ec459c525d4c14686bd6c63c"} Jan 20 17:03:30 crc kubenswrapper[4558]: I0120 17:03:30.218003 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"20ba561a-58e7-459f-ba28-ed0b68cdab9b","Type":"ContainerStarted","Data":"148dd9de98eda3e5b9625476910ce41f12b5496fe3cc4cb92bbf3686a771a8e5"} Jan 20 17:03:30 crc kubenswrapper[4558]: I0120 17:03:30.218049 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"20ba561a-58e7-459f-ba28-ed0b68cdab9b","Type":"ContainerStarted","Data":"225fabc2e274bc055279016bab31443db7f376181931205b6688497f431e61fd"} Jan 20 17:03:30 crc kubenswrapper[4558]: I0120 17:03:30.259262 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/glance-default-external-api-0" podStartSLOduration=2.2592433659999998 podStartE2EDuration="2.259243366s" podCreationTimestamp="2026-01-20 17:03:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:03:30.243956501 +0000 UTC m=+1304.004294469" watchObservedRunningTime="2026-01-20 17:03:30.259243366 +0000 UTC m=+1304.019581333" Jan 20 17:03:30 crc kubenswrapper[4558]: I0120 17:03:30.499387 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-xwwl5" Jan 20 17:03:30 crc kubenswrapper[4558]: I0120 17:03:30.610869 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8efaa8ce-5cd9-4688-8cef-0e3fb667105d-combined-ca-bundle\") pod \"8efaa8ce-5cd9-4688-8cef-0e3fb667105d\" (UID: \"8efaa8ce-5cd9-4688-8cef-0e3fb667105d\") " Jan 20 17:03:30 crc kubenswrapper[4558]: I0120 17:03:30.610914 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zw4x4\" (UniqueName: \"kubernetes.io/projected/8efaa8ce-5cd9-4688-8cef-0e3fb667105d-kube-api-access-zw4x4\") pod \"8efaa8ce-5cd9-4688-8cef-0e3fb667105d\" (UID: \"8efaa8ce-5cd9-4688-8cef-0e3fb667105d\") " Jan 20 17:03:30 crc kubenswrapper[4558]: I0120 17:03:30.611006 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8efaa8ce-5cd9-4688-8cef-0e3fb667105d-config-data\") pod \"8efaa8ce-5cd9-4688-8cef-0e3fb667105d\" (UID: \"8efaa8ce-5cd9-4688-8cef-0e3fb667105d\") " Jan 20 17:03:30 crc kubenswrapper[4558]: I0120 17:03:30.611131 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8efaa8ce-5cd9-4688-8cef-0e3fb667105d-scripts\") pod \"8efaa8ce-5cd9-4688-8cef-0e3fb667105d\" (UID: \"8efaa8ce-5cd9-4688-8cef-0e3fb667105d\") " Jan 20 17:03:30 crc kubenswrapper[4558]: I0120 17:03:30.618372 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8efaa8ce-5cd9-4688-8cef-0e3fb667105d-scripts" (OuterVolumeSpecName: "scripts") pod "8efaa8ce-5cd9-4688-8cef-0e3fb667105d" (UID: "8efaa8ce-5cd9-4688-8cef-0e3fb667105d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:03:30 crc kubenswrapper[4558]: I0120 17:03:30.618396 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8efaa8ce-5cd9-4688-8cef-0e3fb667105d-kube-api-access-zw4x4" (OuterVolumeSpecName: "kube-api-access-zw4x4") pod "8efaa8ce-5cd9-4688-8cef-0e3fb667105d" (UID: "8efaa8ce-5cd9-4688-8cef-0e3fb667105d"). InnerVolumeSpecName "kube-api-access-zw4x4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:03:30 crc kubenswrapper[4558]: I0120 17:03:30.641861 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8efaa8ce-5cd9-4688-8cef-0e3fb667105d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8efaa8ce-5cd9-4688-8cef-0e3fb667105d" (UID: "8efaa8ce-5cd9-4688-8cef-0e3fb667105d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:03:30 crc kubenswrapper[4558]: I0120 17:03:30.643318 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8efaa8ce-5cd9-4688-8cef-0e3fb667105d-config-data" (OuterVolumeSpecName: "config-data") pod "8efaa8ce-5cd9-4688-8cef-0e3fb667105d" (UID: "8efaa8ce-5cd9-4688-8cef-0e3fb667105d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:03:30 crc kubenswrapper[4558]: I0120 17:03:30.714056 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8efaa8ce-5cd9-4688-8cef-0e3fb667105d-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:03:30 crc kubenswrapper[4558]: I0120 17:03:30.714418 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8efaa8ce-5cd9-4688-8cef-0e3fb667105d-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:03:30 crc kubenswrapper[4558]: I0120 17:03:30.714436 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8efaa8ce-5cd9-4688-8cef-0e3fb667105d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:03:30 crc kubenswrapper[4558]: I0120 17:03:30.714449 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zw4x4\" (UniqueName: \"kubernetes.io/projected/8efaa8ce-5cd9-4688-8cef-0e3fb667105d-kube-api-access-zw4x4\") on node \"crc\" DevicePath \"\"" Jan 20 17:03:31 crc kubenswrapper[4558]: I0120 17:03:31.239483 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"ed3e3e29-3089-4ee3-9cd6-2bfe6024822e","Type":"ContainerStarted","Data":"dc07f35c47cd97b9f92c97b82d78090c6222ac01d9e5f27c7fb71c3aaa5d25ee"} Jan 20 17:03:31 crc kubenswrapper[4558]: I0120 17:03:31.242574 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-xwwl5" event={"ID":"8efaa8ce-5cd9-4688-8cef-0e3fb667105d","Type":"ContainerDied","Data":"e4aa2292a0de814eef0e7a7f0377313c92e8d1600aa0e6e3f9380b9dcfcafdf1"} Jan 20 17:03:31 crc kubenswrapper[4558]: I0120 17:03:31.242631 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e4aa2292a0de814eef0e7a7f0377313c92e8d1600aa0e6e3f9380b9dcfcafdf1" Jan 20 17:03:31 crc kubenswrapper[4558]: I0120 17:03:31.242695 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-xwwl5" Jan 20 17:03:31 crc kubenswrapper[4558]: I0120 17:03:31.250050 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"aa387033-dc08-48f4-bf56-06a7f316423c","Type":"ContainerStarted","Data":"32b6c39544892ca39cef55614e1cf5d22b40b7f421e0e33ba0fd15c7434cf00e"} Jan 20 17:03:31 crc kubenswrapper[4558]: I0120 17:03:31.272734 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/glance-default-internal-api-0" podStartSLOduration=3.272703147 podStartE2EDuration="3.272703147s" podCreationTimestamp="2026-01-20 17:03:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:03:31.267318172 +0000 UTC m=+1305.027656139" watchObservedRunningTime="2026-01-20 17:03:31.272703147 +0000 UTC m=+1305.033041114" Jan 20 17:03:31 crc kubenswrapper[4558]: I0120 17:03:31.298197 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-0"] Jan 20 17:03:31 crc kubenswrapper[4558]: E0120 17:03:31.298642 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8efaa8ce-5cd9-4688-8cef-0e3fb667105d" containerName="nova-cell0-conductor-db-sync" Jan 20 17:03:31 crc kubenswrapper[4558]: I0120 17:03:31.298663 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="8efaa8ce-5cd9-4688-8cef-0e3fb667105d" containerName="nova-cell0-conductor-db-sync" Jan 20 17:03:31 crc kubenswrapper[4558]: I0120 17:03:31.298864 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="8efaa8ce-5cd9-4688-8cef-0e3fb667105d" containerName="nova-cell0-conductor-db-sync" Jan 20 17:03:31 crc kubenswrapper[4558]: I0120 17:03:31.299473 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:03:31 crc kubenswrapper[4558]: I0120 17:03:31.311153 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-nova-dockercfg-sggwd" Jan 20 17:03:31 crc kubenswrapper[4558]: I0120 17:03:31.311578 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell0-conductor-config-data" Jan 20 17:03:31 crc kubenswrapper[4558]: I0120 17:03:31.316599 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-0"] Jan 20 17:03:31 crc kubenswrapper[4558]: I0120 17:03:31.426764 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9glw8\" (UniqueName: \"kubernetes.io/projected/55d5d50d-edc1-4a99-9540-72a7f5f0c622-kube-api-access-9glw8\") pod \"nova-cell0-conductor-0\" (UID: \"55d5d50d-edc1-4a99-9540-72a7f5f0c622\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:03:31 crc kubenswrapper[4558]: I0120 17:03:31.427242 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/55d5d50d-edc1-4a99-9540-72a7f5f0c622-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"55d5d50d-edc1-4a99-9540-72a7f5f0c622\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:03:31 crc kubenswrapper[4558]: I0120 17:03:31.427399 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55d5d50d-edc1-4a99-9540-72a7f5f0c622-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"55d5d50d-edc1-4a99-9540-72a7f5f0c622\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:03:31 crc kubenswrapper[4558]: I0120 17:03:31.528762 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/55d5d50d-edc1-4a99-9540-72a7f5f0c622-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"55d5d50d-edc1-4a99-9540-72a7f5f0c622\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:03:31 crc kubenswrapper[4558]: I0120 17:03:31.528836 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55d5d50d-edc1-4a99-9540-72a7f5f0c622-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"55d5d50d-edc1-4a99-9540-72a7f5f0c622\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:03:31 crc kubenswrapper[4558]: I0120 17:03:31.528903 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9glw8\" (UniqueName: \"kubernetes.io/projected/55d5d50d-edc1-4a99-9540-72a7f5f0c622-kube-api-access-9glw8\") pod \"nova-cell0-conductor-0\" (UID: \"55d5d50d-edc1-4a99-9540-72a7f5f0c622\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:03:31 crc kubenswrapper[4558]: I0120 17:03:31.534115 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55d5d50d-edc1-4a99-9540-72a7f5f0c622-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"55d5d50d-edc1-4a99-9540-72a7f5f0c622\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:03:31 crc kubenswrapper[4558]: I0120 17:03:31.550343 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/55d5d50d-edc1-4a99-9540-72a7f5f0c622-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"55d5d50d-edc1-4a99-9540-72a7f5f0c622\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:03:31 crc kubenswrapper[4558]: I0120 17:03:31.552202 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9glw8\" (UniqueName: \"kubernetes.io/projected/55d5d50d-edc1-4a99-9540-72a7f5f0c622-kube-api-access-9glw8\") pod \"nova-cell0-conductor-0\" (UID: \"55d5d50d-edc1-4a99-9540-72a7f5f0c622\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:03:31 crc kubenswrapper[4558]: I0120 17:03:31.626835 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:03:32 crc kubenswrapper[4558]: I0120 17:03:32.040599 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-0"] Jan 20 17:03:32 crc kubenswrapper[4558]: I0120 17:03:32.264302 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-0" event={"ID":"55d5d50d-edc1-4a99-9540-72a7f5f0c622","Type":"ContainerStarted","Data":"e3a0dcbc1a9d518fbc9c91fd7af2a3a13be64f9278f06d3e4672d909735d4773"} Jan 20 17:03:32 crc kubenswrapper[4558]: I0120 17:03:32.264553 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-0" event={"ID":"55d5d50d-edc1-4a99-9540-72a7f5f0c622","Type":"ContainerStarted","Data":"7dad1523c815b9ba558a68733253bdd758de2b9cb63d7ef00e262899b20e6c03"} Jan 20 17:03:32 crc kubenswrapper[4558]: I0120 17:03:32.264575 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:03:32 crc kubenswrapper[4558]: I0120 17:03:32.280795 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell0-conductor-0" podStartSLOduration=1.280776771 podStartE2EDuration="1.280776771s" podCreationTimestamp="2026-01-20 17:03:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:03:32.27957405 +0000 UTC m=+1306.039912017" watchObservedRunningTime="2026-01-20 17:03:32.280776771 +0000 UTC m=+1306.041114738" Jan 20 17:03:33 crc kubenswrapper[4558]: I0120 17:03:33.285529 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"ed3e3e29-3089-4ee3-9cd6-2bfe6024822e","Type":"ContainerStarted","Data":"a349550fd5a4a13d56641574f32fe6888b3a0a5869ba322fe163d8749ed33acc"} Jan 20 17:03:33 crc kubenswrapper[4558]: I0120 17:03:33.285852 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="ed3e3e29-3089-4ee3-9cd6-2bfe6024822e" containerName="ceilometer-central-agent" containerID="cri-o://3c84dee1556591cd3998c2b76ae0683f6ba2f86529f3023972cb334f19db22a5" gracePeriod=30 Jan 20 17:03:33 crc kubenswrapper[4558]: I0120 17:03:33.285916 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="ed3e3e29-3089-4ee3-9cd6-2bfe6024822e" containerName="proxy-httpd" containerID="cri-o://a349550fd5a4a13d56641574f32fe6888b3a0a5869ba322fe163d8749ed33acc" gracePeriod=30 Jan 20 17:03:33 crc kubenswrapper[4558]: I0120 17:03:33.285942 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="ed3e3e29-3089-4ee3-9cd6-2bfe6024822e" containerName="ceilometer-notification-agent" containerID="cri-o://597f6f2f77f2e91bff6d9bd8e7e014f7a39687a17007d0b5e8f73f9db8e20fba" gracePeriod=30 Jan 20 17:03:33 crc kubenswrapper[4558]: I0120 17:03:33.285918 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="ed3e3e29-3089-4ee3-9cd6-2bfe6024822e" containerName="sg-core" containerID="cri-o://dc07f35c47cd97b9f92c97b82d78090c6222ac01d9e5f27c7fb71c3aaa5d25ee" gracePeriod=30 Jan 20 17:03:33 crc kubenswrapper[4558]: I0120 17:03:33.286180 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:03:33 crc kubenswrapper[4558]: I0120 17:03:33.324524 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ceilometer-0" podStartSLOduration=2.116154357 podStartE2EDuration="7.324453059s" podCreationTimestamp="2026-01-20 17:03:26 +0000 UTC" firstStartedPulling="2026-01-20 17:03:26.924657029 +0000 UTC m=+1300.684994996" lastFinishedPulling="2026-01-20 17:03:32.132955731 +0000 UTC m=+1305.893293698" observedRunningTime="2026-01-20 17:03:33.31337588 +0000 UTC m=+1307.073713837" watchObservedRunningTime="2026-01-20 17:03:33.324453059 +0000 UTC m=+1307.084791026" Jan 20 17:03:34 crc kubenswrapper[4558]: I0120 17:03:34.120071 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:03:34 crc kubenswrapper[4558]: I0120 17:03:34.199120 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ed3e3e29-3089-4ee3-9cd6-2bfe6024822e-sg-core-conf-yaml\") pod \"ed3e3e29-3089-4ee3-9cd6-2bfe6024822e\" (UID: \"ed3e3e29-3089-4ee3-9cd6-2bfe6024822e\") " Jan 20 17:03:34 crc kubenswrapper[4558]: I0120 17:03:34.199526 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/ed3e3e29-3089-4ee3-9cd6-2bfe6024822e-ceilometer-tls-certs\") pod \"ed3e3e29-3089-4ee3-9cd6-2bfe6024822e\" (UID: \"ed3e3e29-3089-4ee3-9cd6-2bfe6024822e\") " Jan 20 17:03:34 crc kubenswrapper[4558]: I0120 17:03:34.199628 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ed3e3e29-3089-4ee3-9cd6-2bfe6024822e-log-httpd\") pod \"ed3e3e29-3089-4ee3-9cd6-2bfe6024822e\" (UID: \"ed3e3e29-3089-4ee3-9cd6-2bfe6024822e\") " Jan 20 17:03:34 crc kubenswrapper[4558]: I0120 17:03:34.199674 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ed3e3e29-3089-4ee3-9cd6-2bfe6024822e-combined-ca-bundle\") pod \"ed3e3e29-3089-4ee3-9cd6-2bfe6024822e\" (UID: \"ed3e3e29-3089-4ee3-9cd6-2bfe6024822e\") " Jan 20 17:03:34 crc kubenswrapper[4558]: I0120 17:03:34.199843 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ed3e3e29-3089-4ee3-9cd6-2bfe6024822e-config-data\") pod \"ed3e3e29-3089-4ee3-9cd6-2bfe6024822e\" (UID: \"ed3e3e29-3089-4ee3-9cd6-2bfe6024822e\") " Jan 20 17:03:34 crc kubenswrapper[4558]: I0120 17:03:34.199960 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p4b2c\" (UniqueName: \"kubernetes.io/projected/ed3e3e29-3089-4ee3-9cd6-2bfe6024822e-kube-api-access-p4b2c\") pod \"ed3e3e29-3089-4ee3-9cd6-2bfe6024822e\" (UID: \"ed3e3e29-3089-4ee3-9cd6-2bfe6024822e\") " Jan 20 17:03:34 crc kubenswrapper[4558]: I0120 17:03:34.199989 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ed3e3e29-3089-4ee3-9cd6-2bfe6024822e-run-httpd\") pod \"ed3e3e29-3089-4ee3-9cd6-2bfe6024822e\" (UID: \"ed3e3e29-3089-4ee3-9cd6-2bfe6024822e\") " Jan 20 17:03:34 crc kubenswrapper[4558]: I0120 17:03:34.200020 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ed3e3e29-3089-4ee3-9cd6-2bfe6024822e-scripts\") pod \"ed3e3e29-3089-4ee3-9cd6-2bfe6024822e\" (UID: \"ed3e3e29-3089-4ee3-9cd6-2bfe6024822e\") " Jan 20 17:03:34 crc kubenswrapper[4558]: I0120 17:03:34.200562 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ed3e3e29-3089-4ee3-9cd6-2bfe6024822e-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "ed3e3e29-3089-4ee3-9cd6-2bfe6024822e" (UID: "ed3e3e29-3089-4ee3-9cd6-2bfe6024822e"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:03:34 crc kubenswrapper[4558]: I0120 17:03:34.200768 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ed3e3e29-3089-4ee3-9cd6-2bfe6024822e-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "ed3e3e29-3089-4ee3-9cd6-2bfe6024822e" (UID: "ed3e3e29-3089-4ee3-9cd6-2bfe6024822e"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:03:34 crc kubenswrapper[4558]: I0120 17:03:34.200798 4558 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ed3e3e29-3089-4ee3-9cd6-2bfe6024822e-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:03:34 crc kubenswrapper[4558]: I0120 17:03:34.205349 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ed3e3e29-3089-4ee3-9cd6-2bfe6024822e-scripts" (OuterVolumeSpecName: "scripts") pod "ed3e3e29-3089-4ee3-9cd6-2bfe6024822e" (UID: "ed3e3e29-3089-4ee3-9cd6-2bfe6024822e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:03:34 crc kubenswrapper[4558]: I0120 17:03:34.205415 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ed3e3e29-3089-4ee3-9cd6-2bfe6024822e-kube-api-access-p4b2c" (OuterVolumeSpecName: "kube-api-access-p4b2c") pod "ed3e3e29-3089-4ee3-9cd6-2bfe6024822e" (UID: "ed3e3e29-3089-4ee3-9cd6-2bfe6024822e"). InnerVolumeSpecName "kube-api-access-p4b2c". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:03:34 crc kubenswrapper[4558]: I0120 17:03:34.229309 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ed3e3e29-3089-4ee3-9cd6-2bfe6024822e-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "ed3e3e29-3089-4ee3-9cd6-2bfe6024822e" (UID: "ed3e3e29-3089-4ee3-9cd6-2bfe6024822e"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:03:34 crc kubenswrapper[4558]: I0120 17:03:34.234296 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ed3e3e29-3089-4ee3-9cd6-2bfe6024822e-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "ed3e3e29-3089-4ee3-9cd6-2bfe6024822e" (UID: "ed3e3e29-3089-4ee3-9cd6-2bfe6024822e"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:03:34 crc kubenswrapper[4558]: I0120 17:03:34.257000 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ed3e3e29-3089-4ee3-9cd6-2bfe6024822e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ed3e3e29-3089-4ee3-9cd6-2bfe6024822e" (UID: "ed3e3e29-3089-4ee3-9cd6-2bfe6024822e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:03:34 crc kubenswrapper[4558]: I0120 17:03:34.266266 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ed3e3e29-3089-4ee3-9cd6-2bfe6024822e-config-data" (OuterVolumeSpecName: "config-data") pod "ed3e3e29-3089-4ee3-9cd6-2bfe6024822e" (UID: "ed3e3e29-3089-4ee3-9cd6-2bfe6024822e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:03:34 crc kubenswrapper[4558]: I0120 17:03:34.297537 4558 generic.go:334] "Generic (PLEG): container finished" podID="ed3e3e29-3089-4ee3-9cd6-2bfe6024822e" containerID="a349550fd5a4a13d56641574f32fe6888b3a0a5869ba322fe163d8749ed33acc" exitCode=0 Jan 20 17:03:34 crc kubenswrapper[4558]: I0120 17:03:34.297572 4558 generic.go:334] "Generic (PLEG): container finished" podID="ed3e3e29-3089-4ee3-9cd6-2bfe6024822e" containerID="dc07f35c47cd97b9f92c97b82d78090c6222ac01d9e5f27c7fb71c3aaa5d25ee" exitCode=2 Jan 20 17:03:34 crc kubenswrapper[4558]: I0120 17:03:34.297581 4558 generic.go:334] "Generic (PLEG): container finished" podID="ed3e3e29-3089-4ee3-9cd6-2bfe6024822e" containerID="597f6f2f77f2e91bff6d9bd8e7e014f7a39687a17007d0b5e8f73f9db8e20fba" exitCode=0 Jan 20 17:03:34 crc kubenswrapper[4558]: I0120 17:03:34.297593 4558 generic.go:334] "Generic (PLEG): container finished" podID="ed3e3e29-3089-4ee3-9cd6-2bfe6024822e" containerID="3c84dee1556591cd3998c2b76ae0683f6ba2f86529f3023972cb334f19db22a5" exitCode=0 Jan 20 17:03:34 crc kubenswrapper[4558]: I0120 17:03:34.297608 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"ed3e3e29-3089-4ee3-9cd6-2bfe6024822e","Type":"ContainerDied","Data":"a349550fd5a4a13d56641574f32fe6888b3a0a5869ba322fe163d8749ed33acc"} Jan 20 17:03:34 crc kubenswrapper[4558]: I0120 17:03:34.297667 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"ed3e3e29-3089-4ee3-9cd6-2bfe6024822e","Type":"ContainerDied","Data":"dc07f35c47cd97b9f92c97b82d78090c6222ac01d9e5f27c7fb71c3aaa5d25ee"} Jan 20 17:03:34 crc kubenswrapper[4558]: I0120 17:03:34.297683 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"ed3e3e29-3089-4ee3-9cd6-2bfe6024822e","Type":"ContainerDied","Data":"597f6f2f77f2e91bff6d9bd8e7e014f7a39687a17007d0b5e8f73f9db8e20fba"} Jan 20 17:03:34 crc kubenswrapper[4558]: I0120 17:03:34.297693 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"ed3e3e29-3089-4ee3-9cd6-2bfe6024822e","Type":"ContainerDied","Data":"3c84dee1556591cd3998c2b76ae0683f6ba2f86529f3023972cb334f19db22a5"} Jan 20 17:03:34 crc kubenswrapper[4558]: I0120 17:03:34.297702 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"ed3e3e29-3089-4ee3-9cd6-2bfe6024822e","Type":"ContainerDied","Data":"107511e0c26dc982563b99121c7de0b93cfd7faa95eea66d5b4bd4a7412da3bb"} Jan 20 17:03:34 crc kubenswrapper[4558]: I0120 17:03:34.297720 4558 scope.go:117] "RemoveContainer" containerID="a349550fd5a4a13d56641574f32fe6888b3a0a5869ba322fe163d8749ed33acc" Jan 20 17:03:34 crc kubenswrapper[4558]: I0120 17:03:34.298700 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:03:34 crc kubenswrapper[4558]: I0120 17:03:34.303729 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ed3e3e29-3089-4ee3-9cd6-2bfe6024822e-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:03:34 crc kubenswrapper[4558]: I0120 17:03:34.303775 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p4b2c\" (UniqueName: \"kubernetes.io/projected/ed3e3e29-3089-4ee3-9cd6-2bfe6024822e-kube-api-access-p4b2c\") on node \"crc\" DevicePath \"\"" Jan 20 17:03:34 crc kubenswrapper[4558]: I0120 17:03:34.303788 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ed3e3e29-3089-4ee3-9cd6-2bfe6024822e-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:03:34 crc kubenswrapper[4558]: I0120 17:03:34.303799 4558 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ed3e3e29-3089-4ee3-9cd6-2bfe6024822e-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 20 17:03:34 crc kubenswrapper[4558]: I0120 17:03:34.303812 4558 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/ed3e3e29-3089-4ee3-9cd6-2bfe6024822e-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:03:34 crc kubenswrapper[4558]: I0120 17:03:34.303821 4558 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ed3e3e29-3089-4ee3-9cd6-2bfe6024822e-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:03:34 crc kubenswrapper[4558]: I0120 17:03:34.303830 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ed3e3e29-3089-4ee3-9cd6-2bfe6024822e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:03:34 crc kubenswrapper[4558]: I0120 17:03:34.316004 4558 scope.go:117] "RemoveContainer" containerID="dc07f35c47cd97b9f92c97b82d78090c6222ac01d9e5f27c7fb71c3aaa5d25ee" Jan 20 17:03:34 crc kubenswrapper[4558]: I0120 17:03:34.336304 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:03:34 crc kubenswrapper[4558]: I0120 17:03:34.343392 4558 scope.go:117] "RemoveContainer" containerID="597f6f2f77f2e91bff6d9bd8e7e014f7a39687a17007d0b5e8f73f9db8e20fba" Jan 20 17:03:34 crc kubenswrapper[4558]: I0120 17:03:34.353065 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:03:34 crc kubenswrapper[4558]: I0120 17:03:34.358844 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:03:34 crc kubenswrapper[4558]: E0120 17:03:34.359295 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed3e3e29-3089-4ee3-9cd6-2bfe6024822e" containerName="ceilometer-notification-agent" Jan 20 17:03:34 crc kubenswrapper[4558]: I0120 17:03:34.359316 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed3e3e29-3089-4ee3-9cd6-2bfe6024822e" containerName="ceilometer-notification-agent" Jan 20 17:03:34 crc kubenswrapper[4558]: E0120 17:03:34.359342 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed3e3e29-3089-4ee3-9cd6-2bfe6024822e" containerName="sg-core" Jan 20 17:03:34 crc kubenswrapper[4558]: I0120 17:03:34.359350 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed3e3e29-3089-4ee3-9cd6-2bfe6024822e" containerName="sg-core" Jan 20 17:03:34 crc kubenswrapper[4558]: E0120 17:03:34.359358 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed3e3e29-3089-4ee3-9cd6-2bfe6024822e" containerName="proxy-httpd" Jan 20 17:03:34 crc kubenswrapper[4558]: I0120 17:03:34.359364 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed3e3e29-3089-4ee3-9cd6-2bfe6024822e" containerName="proxy-httpd" Jan 20 17:03:34 crc kubenswrapper[4558]: E0120 17:03:34.359371 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed3e3e29-3089-4ee3-9cd6-2bfe6024822e" containerName="ceilometer-central-agent" Jan 20 17:03:34 crc kubenswrapper[4558]: I0120 17:03:34.359377 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed3e3e29-3089-4ee3-9cd6-2bfe6024822e" containerName="ceilometer-central-agent" Jan 20 17:03:34 crc kubenswrapper[4558]: I0120 17:03:34.359525 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="ed3e3e29-3089-4ee3-9cd6-2bfe6024822e" containerName="sg-core" Jan 20 17:03:34 crc kubenswrapper[4558]: I0120 17:03:34.359539 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="ed3e3e29-3089-4ee3-9cd6-2bfe6024822e" containerName="proxy-httpd" Jan 20 17:03:34 crc kubenswrapper[4558]: I0120 17:03:34.359553 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="ed3e3e29-3089-4ee3-9cd6-2bfe6024822e" containerName="ceilometer-central-agent" Jan 20 17:03:34 crc kubenswrapper[4558]: I0120 17:03:34.359562 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="ed3e3e29-3089-4ee3-9cd6-2bfe6024822e" containerName="ceilometer-notification-agent" Jan 20 17:03:34 crc kubenswrapper[4558]: I0120 17:03:34.361117 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:03:34 crc kubenswrapper[4558]: I0120 17:03:34.368404 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-scripts" Jan 20 17:03:34 crc kubenswrapper[4558]: I0120 17:03:34.368592 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-config-data" Jan 20 17:03:34 crc kubenswrapper[4558]: I0120 17:03:34.368794 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-ceilometer-internal-svc" Jan 20 17:03:34 crc kubenswrapper[4558]: I0120 17:03:34.372533 4558 scope.go:117] "RemoveContainer" containerID="3c84dee1556591cd3998c2b76ae0683f6ba2f86529f3023972cb334f19db22a5" Jan 20 17:03:34 crc kubenswrapper[4558]: I0120 17:03:34.374410 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:03:34 crc kubenswrapper[4558]: I0120 17:03:34.396459 4558 scope.go:117] "RemoveContainer" containerID="a349550fd5a4a13d56641574f32fe6888b3a0a5869ba322fe163d8749ed33acc" Jan 20 17:03:34 crc kubenswrapper[4558]: E0120 17:03:34.396774 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a349550fd5a4a13d56641574f32fe6888b3a0a5869ba322fe163d8749ed33acc\": container with ID starting with a349550fd5a4a13d56641574f32fe6888b3a0a5869ba322fe163d8749ed33acc not found: ID does not exist" containerID="a349550fd5a4a13d56641574f32fe6888b3a0a5869ba322fe163d8749ed33acc" Jan 20 17:03:34 crc kubenswrapper[4558]: I0120 17:03:34.396803 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a349550fd5a4a13d56641574f32fe6888b3a0a5869ba322fe163d8749ed33acc"} err="failed to get container status \"a349550fd5a4a13d56641574f32fe6888b3a0a5869ba322fe163d8749ed33acc\": rpc error: code = NotFound desc = could not find container \"a349550fd5a4a13d56641574f32fe6888b3a0a5869ba322fe163d8749ed33acc\": container with ID starting with a349550fd5a4a13d56641574f32fe6888b3a0a5869ba322fe163d8749ed33acc not found: ID does not exist" Jan 20 17:03:34 crc kubenswrapper[4558]: I0120 17:03:34.396826 4558 scope.go:117] "RemoveContainer" containerID="dc07f35c47cd97b9f92c97b82d78090c6222ac01d9e5f27c7fb71c3aaa5d25ee" Jan 20 17:03:34 crc kubenswrapper[4558]: E0120 17:03:34.397544 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dc07f35c47cd97b9f92c97b82d78090c6222ac01d9e5f27c7fb71c3aaa5d25ee\": container with ID starting with dc07f35c47cd97b9f92c97b82d78090c6222ac01d9e5f27c7fb71c3aaa5d25ee not found: ID does not exist" containerID="dc07f35c47cd97b9f92c97b82d78090c6222ac01d9e5f27c7fb71c3aaa5d25ee" Jan 20 17:03:34 crc kubenswrapper[4558]: I0120 17:03:34.397588 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dc07f35c47cd97b9f92c97b82d78090c6222ac01d9e5f27c7fb71c3aaa5d25ee"} err="failed to get container status \"dc07f35c47cd97b9f92c97b82d78090c6222ac01d9e5f27c7fb71c3aaa5d25ee\": rpc error: code = NotFound desc = could not find container \"dc07f35c47cd97b9f92c97b82d78090c6222ac01d9e5f27c7fb71c3aaa5d25ee\": container with ID starting with dc07f35c47cd97b9f92c97b82d78090c6222ac01d9e5f27c7fb71c3aaa5d25ee not found: ID does not exist" Jan 20 17:03:34 crc kubenswrapper[4558]: I0120 17:03:34.397617 4558 scope.go:117] "RemoveContainer" containerID="597f6f2f77f2e91bff6d9bd8e7e014f7a39687a17007d0b5e8f73f9db8e20fba" Jan 20 17:03:34 crc kubenswrapper[4558]: E0120 17:03:34.398596 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"597f6f2f77f2e91bff6d9bd8e7e014f7a39687a17007d0b5e8f73f9db8e20fba\": container with ID starting with 597f6f2f77f2e91bff6d9bd8e7e014f7a39687a17007d0b5e8f73f9db8e20fba not found: ID does not exist" containerID="597f6f2f77f2e91bff6d9bd8e7e014f7a39687a17007d0b5e8f73f9db8e20fba" Jan 20 17:03:34 crc kubenswrapper[4558]: I0120 17:03:34.398628 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"597f6f2f77f2e91bff6d9bd8e7e014f7a39687a17007d0b5e8f73f9db8e20fba"} err="failed to get container status \"597f6f2f77f2e91bff6d9bd8e7e014f7a39687a17007d0b5e8f73f9db8e20fba\": rpc error: code = NotFound desc = could not find container \"597f6f2f77f2e91bff6d9bd8e7e014f7a39687a17007d0b5e8f73f9db8e20fba\": container with ID starting with 597f6f2f77f2e91bff6d9bd8e7e014f7a39687a17007d0b5e8f73f9db8e20fba not found: ID does not exist" Jan 20 17:03:34 crc kubenswrapper[4558]: I0120 17:03:34.398653 4558 scope.go:117] "RemoveContainer" containerID="3c84dee1556591cd3998c2b76ae0683f6ba2f86529f3023972cb334f19db22a5" Jan 20 17:03:34 crc kubenswrapper[4558]: E0120 17:03:34.399275 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3c84dee1556591cd3998c2b76ae0683f6ba2f86529f3023972cb334f19db22a5\": container with ID starting with 3c84dee1556591cd3998c2b76ae0683f6ba2f86529f3023972cb334f19db22a5 not found: ID does not exist" containerID="3c84dee1556591cd3998c2b76ae0683f6ba2f86529f3023972cb334f19db22a5" Jan 20 17:03:34 crc kubenswrapper[4558]: I0120 17:03:34.399304 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3c84dee1556591cd3998c2b76ae0683f6ba2f86529f3023972cb334f19db22a5"} err="failed to get container status \"3c84dee1556591cd3998c2b76ae0683f6ba2f86529f3023972cb334f19db22a5\": rpc error: code = NotFound desc = could not find container \"3c84dee1556591cd3998c2b76ae0683f6ba2f86529f3023972cb334f19db22a5\": container with ID starting with 3c84dee1556591cd3998c2b76ae0683f6ba2f86529f3023972cb334f19db22a5 not found: ID does not exist" Jan 20 17:03:34 crc kubenswrapper[4558]: I0120 17:03:34.399320 4558 scope.go:117] "RemoveContainer" containerID="a349550fd5a4a13d56641574f32fe6888b3a0a5869ba322fe163d8749ed33acc" Jan 20 17:03:34 crc kubenswrapper[4558]: I0120 17:03:34.399576 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a349550fd5a4a13d56641574f32fe6888b3a0a5869ba322fe163d8749ed33acc"} err="failed to get container status \"a349550fd5a4a13d56641574f32fe6888b3a0a5869ba322fe163d8749ed33acc\": rpc error: code = NotFound desc = could not find container \"a349550fd5a4a13d56641574f32fe6888b3a0a5869ba322fe163d8749ed33acc\": container with ID starting with a349550fd5a4a13d56641574f32fe6888b3a0a5869ba322fe163d8749ed33acc not found: ID does not exist" Jan 20 17:03:34 crc kubenswrapper[4558]: I0120 17:03:34.399598 4558 scope.go:117] "RemoveContainer" containerID="dc07f35c47cd97b9f92c97b82d78090c6222ac01d9e5f27c7fb71c3aaa5d25ee" Jan 20 17:03:34 crc kubenswrapper[4558]: I0120 17:03:34.399791 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dc07f35c47cd97b9f92c97b82d78090c6222ac01d9e5f27c7fb71c3aaa5d25ee"} err="failed to get container status \"dc07f35c47cd97b9f92c97b82d78090c6222ac01d9e5f27c7fb71c3aaa5d25ee\": rpc error: code = NotFound desc = could not find container \"dc07f35c47cd97b9f92c97b82d78090c6222ac01d9e5f27c7fb71c3aaa5d25ee\": container with ID starting with dc07f35c47cd97b9f92c97b82d78090c6222ac01d9e5f27c7fb71c3aaa5d25ee not found: ID does not exist" Jan 20 17:03:34 crc kubenswrapper[4558]: I0120 17:03:34.399809 4558 scope.go:117] "RemoveContainer" containerID="597f6f2f77f2e91bff6d9bd8e7e014f7a39687a17007d0b5e8f73f9db8e20fba" Jan 20 17:03:34 crc kubenswrapper[4558]: I0120 17:03:34.399986 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"597f6f2f77f2e91bff6d9bd8e7e014f7a39687a17007d0b5e8f73f9db8e20fba"} err="failed to get container status \"597f6f2f77f2e91bff6d9bd8e7e014f7a39687a17007d0b5e8f73f9db8e20fba\": rpc error: code = NotFound desc = could not find container \"597f6f2f77f2e91bff6d9bd8e7e014f7a39687a17007d0b5e8f73f9db8e20fba\": container with ID starting with 597f6f2f77f2e91bff6d9bd8e7e014f7a39687a17007d0b5e8f73f9db8e20fba not found: ID does not exist" Jan 20 17:03:34 crc kubenswrapper[4558]: I0120 17:03:34.400005 4558 scope.go:117] "RemoveContainer" containerID="3c84dee1556591cd3998c2b76ae0683f6ba2f86529f3023972cb334f19db22a5" Jan 20 17:03:34 crc kubenswrapper[4558]: I0120 17:03:34.400477 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3c84dee1556591cd3998c2b76ae0683f6ba2f86529f3023972cb334f19db22a5"} err="failed to get container status \"3c84dee1556591cd3998c2b76ae0683f6ba2f86529f3023972cb334f19db22a5\": rpc error: code = NotFound desc = could not find container \"3c84dee1556591cd3998c2b76ae0683f6ba2f86529f3023972cb334f19db22a5\": container with ID starting with 3c84dee1556591cd3998c2b76ae0683f6ba2f86529f3023972cb334f19db22a5 not found: ID does not exist" Jan 20 17:03:34 crc kubenswrapper[4558]: I0120 17:03:34.400507 4558 scope.go:117] "RemoveContainer" containerID="a349550fd5a4a13d56641574f32fe6888b3a0a5869ba322fe163d8749ed33acc" Jan 20 17:03:34 crc kubenswrapper[4558]: I0120 17:03:34.400866 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a349550fd5a4a13d56641574f32fe6888b3a0a5869ba322fe163d8749ed33acc"} err="failed to get container status \"a349550fd5a4a13d56641574f32fe6888b3a0a5869ba322fe163d8749ed33acc\": rpc error: code = NotFound desc = could not find container \"a349550fd5a4a13d56641574f32fe6888b3a0a5869ba322fe163d8749ed33acc\": container with ID starting with a349550fd5a4a13d56641574f32fe6888b3a0a5869ba322fe163d8749ed33acc not found: ID does not exist" Jan 20 17:03:34 crc kubenswrapper[4558]: I0120 17:03:34.400886 4558 scope.go:117] "RemoveContainer" containerID="dc07f35c47cd97b9f92c97b82d78090c6222ac01d9e5f27c7fb71c3aaa5d25ee" Jan 20 17:03:34 crc kubenswrapper[4558]: I0120 17:03:34.401090 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dc07f35c47cd97b9f92c97b82d78090c6222ac01d9e5f27c7fb71c3aaa5d25ee"} err="failed to get container status \"dc07f35c47cd97b9f92c97b82d78090c6222ac01d9e5f27c7fb71c3aaa5d25ee\": rpc error: code = NotFound desc = could not find container \"dc07f35c47cd97b9f92c97b82d78090c6222ac01d9e5f27c7fb71c3aaa5d25ee\": container with ID starting with dc07f35c47cd97b9f92c97b82d78090c6222ac01d9e5f27c7fb71c3aaa5d25ee not found: ID does not exist" Jan 20 17:03:34 crc kubenswrapper[4558]: I0120 17:03:34.401114 4558 scope.go:117] "RemoveContainer" containerID="597f6f2f77f2e91bff6d9bd8e7e014f7a39687a17007d0b5e8f73f9db8e20fba" Jan 20 17:03:34 crc kubenswrapper[4558]: I0120 17:03:34.401864 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"597f6f2f77f2e91bff6d9bd8e7e014f7a39687a17007d0b5e8f73f9db8e20fba"} err="failed to get container status \"597f6f2f77f2e91bff6d9bd8e7e014f7a39687a17007d0b5e8f73f9db8e20fba\": rpc error: code = NotFound desc = could not find container \"597f6f2f77f2e91bff6d9bd8e7e014f7a39687a17007d0b5e8f73f9db8e20fba\": container with ID starting with 597f6f2f77f2e91bff6d9bd8e7e014f7a39687a17007d0b5e8f73f9db8e20fba not found: ID does not exist" Jan 20 17:03:34 crc kubenswrapper[4558]: I0120 17:03:34.401907 4558 scope.go:117] "RemoveContainer" containerID="3c84dee1556591cd3998c2b76ae0683f6ba2f86529f3023972cb334f19db22a5" Jan 20 17:03:34 crc kubenswrapper[4558]: I0120 17:03:34.402239 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3c84dee1556591cd3998c2b76ae0683f6ba2f86529f3023972cb334f19db22a5"} err="failed to get container status \"3c84dee1556591cd3998c2b76ae0683f6ba2f86529f3023972cb334f19db22a5\": rpc error: code = NotFound desc = could not find container \"3c84dee1556591cd3998c2b76ae0683f6ba2f86529f3023972cb334f19db22a5\": container with ID starting with 3c84dee1556591cd3998c2b76ae0683f6ba2f86529f3023972cb334f19db22a5 not found: ID does not exist" Jan 20 17:03:34 crc kubenswrapper[4558]: I0120 17:03:34.402260 4558 scope.go:117] "RemoveContainer" containerID="a349550fd5a4a13d56641574f32fe6888b3a0a5869ba322fe163d8749ed33acc" Jan 20 17:03:34 crc kubenswrapper[4558]: I0120 17:03:34.402538 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a349550fd5a4a13d56641574f32fe6888b3a0a5869ba322fe163d8749ed33acc"} err="failed to get container status \"a349550fd5a4a13d56641574f32fe6888b3a0a5869ba322fe163d8749ed33acc\": rpc error: code = NotFound desc = could not find container \"a349550fd5a4a13d56641574f32fe6888b3a0a5869ba322fe163d8749ed33acc\": container with ID starting with a349550fd5a4a13d56641574f32fe6888b3a0a5869ba322fe163d8749ed33acc not found: ID does not exist" Jan 20 17:03:34 crc kubenswrapper[4558]: I0120 17:03:34.402564 4558 scope.go:117] "RemoveContainer" containerID="dc07f35c47cd97b9f92c97b82d78090c6222ac01d9e5f27c7fb71c3aaa5d25ee" Jan 20 17:03:34 crc kubenswrapper[4558]: I0120 17:03:34.402805 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dc07f35c47cd97b9f92c97b82d78090c6222ac01d9e5f27c7fb71c3aaa5d25ee"} err="failed to get container status \"dc07f35c47cd97b9f92c97b82d78090c6222ac01d9e5f27c7fb71c3aaa5d25ee\": rpc error: code = NotFound desc = could not find container \"dc07f35c47cd97b9f92c97b82d78090c6222ac01d9e5f27c7fb71c3aaa5d25ee\": container with ID starting with dc07f35c47cd97b9f92c97b82d78090c6222ac01d9e5f27c7fb71c3aaa5d25ee not found: ID does not exist" Jan 20 17:03:34 crc kubenswrapper[4558]: I0120 17:03:34.402824 4558 scope.go:117] "RemoveContainer" containerID="597f6f2f77f2e91bff6d9bd8e7e014f7a39687a17007d0b5e8f73f9db8e20fba" Jan 20 17:03:34 crc kubenswrapper[4558]: I0120 17:03:34.403046 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"597f6f2f77f2e91bff6d9bd8e7e014f7a39687a17007d0b5e8f73f9db8e20fba"} err="failed to get container status \"597f6f2f77f2e91bff6d9bd8e7e014f7a39687a17007d0b5e8f73f9db8e20fba\": rpc error: code = NotFound desc = could not find container \"597f6f2f77f2e91bff6d9bd8e7e014f7a39687a17007d0b5e8f73f9db8e20fba\": container with ID starting with 597f6f2f77f2e91bff6d9bd8e7e014f7a39687a17007d0b5e8f73f9db8e20fba not found: ID does not exist" Jan 20 17:03:34 crc kubenswrapper[4558]: I0120 17:03:34.403069 4558 scope.go:117] "RemoveContainer" containerID="3c84dee1556591cd3998c2b76ae0683f6ba2f86529f3023972cb334f19db22a5" Jan 20 17:03:34 crc kubenswrapper[4558]: I0120 17:03:34.403291 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3c84dee1556591cd3998c2b76ae0683f6ba2f86529f3023972cb334f19db22a5"} err="failed to get container status \"3c84dee1556591cd3998c2b76ae0683f6ba2f86529f3023972cb334f19db22a5\": rpc error: code = NotFound desc = could not find container \"3c84dee1556591cd3998c2b76ae0683f6ba2f86529f3023972cb334f19db22a5\": container with ID starting with 3c84dee1556591cd3998c2b76ae0683f6ba2f86529f3023972cb334f19db22a5 not found: ID does not exist" Jan 20 17:03:34 crc kubenswrapper[4558]: I0120 17:03:34.509234 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ef93decf-78a4-4439-bac3-36f5720d0589-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ef93decf-78a4-4439-bac3-36f5720d0589\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:03:34 crc kubenswrapper[4558]: I0120 17:03:34.509271 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ef93decf-78a4-4439-bac3-36f5720d0589-scripts\") pod \"ceilometer-0\" (UID: \"ef93decf-78a4-4439-bac3-36f5720d0589\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:03:34 crc kubenswrapper[4558]: I0120 17:03:34.509316 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ef93decf-78a4-4439-bac3-36f5720d0589-log-httpd\") pod \"ceilometer-0\" (UID: \"ef93decf-78a4-4439-bac3-36f5720d0589\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:03:34 crc kubenswrapper[4558]: I0120 17:03:34.509343 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sq64x\" (UniqueName: \"kubernetes.io/projected/ef93decf-78a4-4439-bac3-36f5720d0589-kube-api-access-sq64x\") pod \"ceilometer-0\" (UID: \"ef93decf-78a4-4439-bac3-36f5720d0589\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:03:34 crc kubenswrapper[4558]: I0120 17:03:34.509363 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ef93decf-78a4-4439-bac3-36f5720d0589-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ef93decf-78a4-4439-bac3-36f5720d0589\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:03:34 crc kubenswrapper[4558]: I0120 17:03:34.509397 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/ef93decf-78a4-4439-bac3-36f5720d0589-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"ef93decf-78a4-4439-bac3-36f5720d0589\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:03:34 crc kubenswrapper[4558]: I0120 17:03:34.509410 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ef93decf-78a4-4439-bac3-36f5720d0589-config-data\") pod \"ceilometer-0\" (UID: \"ef93decf-78a4-4439-bac3-36f5720d0589\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:03:34 crc kubenswrapper[4558]: I0120 17:03:34.509426 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ef93decf-78a4-4439-bac3-36f5720d0589-run-httpd\") pod \"ceilometer-0\" (UID: \"ef93decf-78a4-4439-bac3-36f5720d0589\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:03:34 crc kubenswrapper[4558]: I0120 17:03:34.574035 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ed3e3e29-3089-4ee3-9cd6-2bfe6024822e" path="/var/lib/kubelet/pods/ed3e3e29-3089-4ee3-9cd6-2bfe6024822e/volumes" Jan 20 17:03:34 crc kubenswrapper[4558]: I0120 17:03:34.611122 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ef93decf-78a4-4439-bac3-36f5720d0589-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ef93decf-78a4-4439-bac3-36f5720d0589\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:03:34 crc kubenswrapper[4558]: I0120 17:03:34.611216 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/ef93decf-78a4-4439-bac3-36f5720d0589-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"ef93decf-78a4-4439-bac3-36f5720d0589\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:03:34 crc kubenswrapper[4558]: I0120 17:03:34.611241 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ef93decf-78a4-4439-bac3-36f5720d0589-config-data\") pod \"ceilometer-0\" (UID: \"ef93decf-78a4-4439-bac3-36f5720d0589\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:03:34 crc kubenswrapper[4558]: I0120 17:03:34.611269 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ef93decf-78a4-4439-bac3-36f5720d0589-run-httpd\") pod \"ceilometer-0\" (UID: \"ef93decf-78a4-4439-bac3-36f5720d0589\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:03:34 crc kubenswrapper[4558]: I0120 17:03:34.611433 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ef93decf-78a4-4439-bac3-36f5720d0589-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ef93decf-78a4-4439-bac3-36f5720d0589\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:03:34 crc kubenswrapper[4558]: I0120 17:03:34.611465 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ef93decf-78a4-4439-bac3-36f5720d0589-scripts\") pod \"ceilometer-0\" (UID: \"ef93decf-78a4-4439-bac3-36f5720d0589\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:03:34 crc kubenswrapper[4558]: I0120 17:03:34.611494 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ef93decf-78a4-4439-bac3-36f5720d0589-log-httpd\") pod \"ceilometer-0\" (UID: \"ef93decf-78a4-4439-bac3-36f5720d0589\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:03:34 crc kubenswrapper[4558]: I0120 17:03:34.611535 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sq64x\" (UniqueName: \"kubernetes.io/projected/ef93decf-78a4-4439-bac3-36f5720d0589-kube-api-access-sq64x\") pod \"ceilometer-0\" (UID: \"ef93decf-78a4-4439-bac3-36f5720d0589\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:03:34 crc kubenswrapper[4558]: I0120 17:03:34.611932 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ef93decf-78a4-4439-bac3-36f5720d0589-run-httpd\") pod \"ceilometer-0\" (UID: \"ef93decf-78a4-4439-bac3-36f5720d0589\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:03:34 crc kubenswrapper[4558]: I0120 17:03:34.612502 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ef93decf-78a4-4439-bac3-36f5720d0589-log-httpd\") pod \"ceilometer-0\" (UID: \"ef93decf-78a4-4439-bac3-36f5720d0589\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:03:34 crc kubenswrapper[4558]: I0120 17:03:34.614300 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ef93decf-78a4-4439-bac3-36f5720d0589-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ef93decf-78a4-4439-bac3-36f5720d0589\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:03:34 crc kubenswrapper[4558]: I0120 17:03:34.614580 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ef93decf-78a4-4439-bac3-36f5720d0589-scripts\") pod \"ceilometer-0\" (UID: \"ef93decf-78a4-4439-bac3-36f5720d0589\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:03:34 crc kubenswrapper[4558]: I0120 17:03:34.614833 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ef93decf-78a4-4439-bac3-36f5720d0589-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ef93decf-78a4-4439-bac3-36f5720d0589\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:03:34 crc kubenswrapper[4558]: I0120 17:03:34.615558 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ef93decf-78a4-4439-bac3-36f5720d0589-config-data\") pod \"ceilometer-0\" (UID: \"ef93decf-78a4-4439-bac3-36f5720d0589\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:03:34 crc kubenswrapper[4558]: I0120 17:03:34.615843 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/ef93decf-78a4-4439-bac3-36f5720d0589-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"ef93decf-78a4-4439-bac3-36f5720d0589\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:03:34 crc kubenswrapper[4558]: I0120 17:03:34.625838 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sq64x\" (UniqueName: \"kubernetes.io/projected/ef93decf-78a4-4439-bac3-36f5720d0589-kube-api-access-sq64x\") pod \"ceilometer-0\" (UID: \"ef93decf-78a4-4439-bac3-36f5720d0589\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:03:34 crc kubenswrapper[4558]: I0120 17:03:34.679527 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:03:35 crc kubenswrapper[4558]: I0120 17:03:35.070892 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:03:35 crc kubenswrapper[4558]: I0120 17:03:35.307552 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"ef93decf-78a4-4439-bac3-36f5720d0589","Type":"ContainerStarted","Data":"985488fa54dbc7d629dc86759801222b1f8b940a3095fdd87504369d6b143582"} Jan 20 17:03:38 crc kubenswrapper[4558]: I0120 17:03:38.330959 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"ef93decf-78a4-4439-bac3-36f5720d0589","Type":"ContainerStarted","Data":"aceb6a1c812a55b8382e720fd667b1bce3e5e8b72e63d66d5d61404d004d5633"} Jan 20 17:03:38 crc kubenswrapper[4558]: I0120 17:03:38.531823 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:03:38 crc kubenswrapper[4558]: I0120 17:03:38.531897 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:03:38 crc kubenswrapper[4558]: I0120 17:03:38.561815 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:03:38 crc kubenswrapper[4558]: I0120 17:03:38.572945 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:03:39 crc kubenswrapper[4558]: I0120 17:03:39.011510 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:03:39 crc kubenswrapper[4558]: I0120 17:03:39.011809 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:03:39 crc kubenswrapper[4558]: I0120 17:03:39.036595 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:03:39 crc kubenswrapper[4558]: I0120 17:03:39.046399 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:03:39 crc kubenswrapper[4558]: I0120 17:03:39.340352 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"ef93decf-78a4-4439-bac3-36f5720d0589","Type":"ContainerStarted","Data":"d7039d4cfe22846fb1df116f46327e92f5a4fd9f2f28553fea21b2bc819b0c7e"} Jan 20 17:03:39 crc kubenswrapper[4558]: I0120 17:03:39.341763 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:03:39 crc kubenswrapper[4558]: I0120 17:03:39.341798 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:03:39 crc kubenswrapper[4558]: I0120 17:03:39.341809 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:03:39 crc kubenswrapper[4558]: I0120 17:03:39.341818 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:03:40 crc kubenswrapper[4558]: I0120 17:03:40.350432 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"ef93decf-78a4-4439-bac3-36f5720d0589","Type":"ContainerStarted","Data":"063770fda370183252496062f7a4063e3aa904aa813a1af436066509d93e1b0d"} Jan 20 17:03:40 crc kubenswrapper[4558]: I0120 17:03:40.899697 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:03:40 crc kubenswrapper[4558]: I0120 17:03:40.907362 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:03:41 crc kubenswrapper[4558]: I0120 17:03:41.007934 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:03:41 crc kubenswrapper[4558]: I0120 17:03:41.011551 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:03:41 crc kubenswrapper[4558]: I0120 17:03:41.656996 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:03:42 crc kubenswrapper[4558]: I0120 17:03:42.121386 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell0-cell-mapping-6ssx9"] Jan 20 17:03:42 crc kubenswrapper[4558]: I0120 17:03:42.123117 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-cell-mapping-6ssx9" Jan 20 17:03:42 crc kubenswrapper[4558]: I0120 17:03:42.125192 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell0-manage-scripts" Jan 20 17:03:42 crc kubenswrapper[4558]: I0120 17:03:42.125328 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell0-manage-config-data" Jan 20 17:03:42 crc kubenswrapper[4558]: I0120 17:03:42.132541 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-cell-mapping-6ssx9"] Jan 20 17:03:42 crc kubenswrapper[4558]: I0120 17:03:42.235904 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:03:42 crc kubenswrapper[4558]: I0120 17:03:42.237420 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:03:42 crc kubenswrapper[4558]: I0120 17:03:42.241643 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-api-config-data" Jan 20 17:03:42 crc kubenswrapper[4558]: I0120 17:03:42.266465 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:03:42 crc kubenswrapper[4558]: I0120 17:03:42.303033 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:03:42 crc kubenswrapper[4558]: I0120 17:03:42.303218 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tqp8g\" (UniqueName: \"kubernetes.io/projected/507235e3-a321-4617-aba7-eec9b5ca1cf0-kube-api-access-tqp8g\") pod \"nova-cell0-cell-mapping-6ssx9\" (UID: \"507235e3-a321-4617-aba7-eec9b5ca1cf0\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-6ssx9" Jan 20 17:03:42 crc kubenswrapper[4558]: I0120 17:03:42.303549 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/507235e3-a321-4617-aba7-eec9b5ca1cf0-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-6ssx9\" (UID: \"507235e3-a321-4617-aba7-eec9b5ca1cf0\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-6ssx9" Jan 20 17:03:42 crc kubenswrapper[4558]: I0120 17:03:42.303671 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/507235e3-a321-4617-aba7-eec9b5ca1cf0-config-data\") pod \"nova-cell0-cell-mapping-6ssx9\" (UID: \"507235e3-a321-4617-aba7-eec9b5ca1cf0\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-6ssx9" Jan 20 17:03:42 crc kubenswrapper[4558]: I0120 17:03:42.303730 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/507235e3-a321-4617-aba7-eec9b5ca1cf0-scripts\") pod \"nova-cell0-cell-mapping-6ssx9\" (UID: \"507235e3-a321-4617-aba7-eec9b5ca1cf0\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-6ssx9" Jan 20 17:03:42 crc kubenswrapper[4558]: I0120 17:03:42.304378 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:03:42 crc kubenswrapper[4558]: I0120 17:03:42.306324 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-metadata-config-data" Jan 20 17:03:42 crc kubenswrapper[4558]: I0120 17:03:42.366507 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:03:42 crc kubenswrapper[4558]: I0120 17:03:42.373318 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:03:42 crc kubenswrapper[4558]: I0120 17:03:42.374409 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:03:42 crc kubenswrapper[4558]: I0120 17:03:42.380033 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-scheduler-config-data" Jan 20 17:03:42 crc kubenswrapper[4558]: I0120 17:03:42.381154 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:03:42 crc kubenswrapper[4558]: I0120 17:03:42.405725 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/21607923-3f1e-41c5-86da-43a333ac050c-logs\") pod \"nova-api-0\" (UID: \"21607923-3f1e-41c5-86da-43a333ac050c\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:03:42 crc kubenswrapper[4558]: I0120 17:03:42.405778 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g2p6n\" (UniqueName: \"kubernetes.io/projected/21607923-3f1e-41c5-86da-43a333ac050c-kube-api-access-g2p6n\") pod \"nova-api-0\" (UID: \"21607923-3f1e-41c5-86da-43a333ac050c\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:03:42 crc kubenswrapper[4558]: I0120 17:03:42.405821 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/21607923-3f1e-41c5-86da-43a333ac050c-config-data\") pod \"nova-api-0\" (UID: \"21607923-3f1e-41c5-86da-43a333ac050c\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:03:42 crc kubenswrapper[4558]: I0120 17:03:42.405876 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21607923-3f1e-41c5-86da-43a333ac050c-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"21607923-3f1e-41c5-86da-43a333ac050c\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:03:42 crc kubenswrapper[4558]: I0120 17:03:42.405907 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1c035110-390f-44d3-ad09-a242f3bb37b1-logs\") pod \"nova-metadata-0\" (UID: \"1c035110-390f-44d3-ad09-a242f3bb37b1\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:03:42 crc kubenswrapper[4558]: I0120 17:03:42.405928 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/507235e3-a321-4617-aba7-eec9b5ca1cf0-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-6ssx9\" (UID: \"507235e3-a321-4617-aba7-eec9b5ca1cf0\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-6ssx9" Jan 20 17:03:42 crc kubenswrapper[4558]: I0120 17:03:42.405956 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fbpnt\" (UniqueName: \"kubernetes.io/projected/1c035110-390f-44d3-ad09-a242f3bb37b1-kube-api-access-fbpnt\") pod \"nova-metadata-0\" (UID: \"1c035110-390f-44d3-ad09-a242f3bb37b1\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:03:42 crc kubenswrapper[4558]: I0120 17:03:42.405980 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c035110-390f-44d3-ad09-a242f3bb37b1-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"1c035110-390f-44d3-ad09-a242f3bb37b1\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:03:42 crc kubenswrapper[4558]: I0120 17:03:42.406014 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/507235e3-a321-4617-aba7-eec9b5ca1cf0-config-data\") pod \"nova-cell0-cell-mapping-6ssx9\" (UID: \"507235e3-a321-4617-aba7-eec9b5ca1cf0\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-6ssx9" Jan 20 17:03:42 crc kubenswrapper[4558]: I0120 17:03:42.406036 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1c035110-390f-44d3-ad09-a242f3bb37b1-config-data\") pod \"nova-metadata-0\" (UID: \"1c035110-390f-44d3-ad09-a242f3bb37b1\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:03:42 crc kubenswrapper[4558]: I0120 17:03:42.406057 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/507235e3-a321-4617-aba7-eec9b5ca1cf0-scripts\") pod \"nova-cell0-cell-mapping-6ssx9\" (UID: \"507235e3-a321-4617-aba7-eec9b5ca1cf0\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-6ssx9" Jan 20 17:03:42 crc kubenswrapper[4558]: I0120 17:03:42.406105 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tqp8g\" (UniqueName: \"kubernetes.io/projected/507235e3-a321-4617-aba7-eec9b5ca1cf0-kube-api-access-tqp8g\") pod \"nova-cell0-cell-mapping-6ssx9\" (UID: \"507235e3-a321-4617-aba7-eec9b5ca1cf0\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-6ssx9" Jan 20 17:03:42 crc kubenswrapper[4558]: I0120 17:03:42.419697 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/507235e3-a321-4617-aba7-eec9b5ca1cf0-scripts\") pod \"nova-cell0-cell-mapping-6ssx9\" (UID: \"507235e3-a321-4617-aba7-eec9b5ca1cf0\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-6ssx9" Jan 20 17:03:42 crc kubenswrapper[4558]: I0120 17:03:42.424057 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/507235e3-a321-4617-aba7-eec9b5ca1cf0-config-data\") pod \"nova-cell0-cell-mapping-6ssx9\" (UID: \"507235e3-a321-4617-aba7-eec9b5ca1cf0\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-6ssx9" Jan 20 17:03:42 crc kubenswrapper[4558]: I0120 17:03:42.429090 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tqp8g\" (UniqueName: \"kubernetes.io/projected/507235e3-a321-4617-aba7-eec9b5ca1cf0-kube-api-access-tqp8g\") pod \"nova-cell0-cell-mapping-6ssx9\" (UID: \"507235e3-a321-4617-aba7-eec9b5ca1cf0\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-6ssx9" Jan 20 17:03:42 crc kubenswrapper[4558]: I0120 17:03:42.438178 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:03:42 crc kubenswrapper[4558]: I0120 17:03:42.439216 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:03:42 crc kubenswrapper[4558]: I0120 17:03:42.441002 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell1-novncproxy-config-data" Jan 20 17:03:42 crc kubenswrapper[4558]: I0120 17:03:42.447430 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/507235e3-a321-4617-aba7-eec9b5ca1cf0-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-6ssx9\" (UID: \"507235e3-a321-4617-aba7-eec9b5ca1cf0\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-6ssx9" Jan 20 17:03:42 crc kubenswrapper[4558]: I0120 17:03:42.463402 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:03:42 crc kubenswrapper[4558]: I0120 17:03:42.508756 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fbpnt\" (UniqueName: \"kubernetes.io/projected/1c035110-390f-44d3-ad09-a242f3bb37b1-kube-api-access-fbpnt\") pod \"nova-metadata-0\" (UID: \"1c035110-390f-44d3-ad09-a242f3bb37b1\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:03:42 crc kubenswrapper[4558]: I0120 17:03:42.508817 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c035110-390f-44d3-ad09-a242f3bb37b1-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"1c035110-390f-44d3-ad09-a242f3bb37b1\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:03:42 crc kubenswrapper[4558]: I0120 17:03:42.508880 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b667ea48-0add-41f2-8c52-75d06c1312fd-config-data\") pod \"nova-scheduler-0\" (UID: \"b667ea48-0add-41f2-8c52-75d06c1312fd\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:03:42 crc kubenswrapper[4558]: I0120 17:03:42.508903 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1c035110-390f-44d3-ad09-a242f3bb37b1-config-data\") pod \"nova-metadata-0\" (UID: \"1c035110-390f-44d3-ad09-a242f3bb37b1\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:03:42 crc kubenswrapper[4558]: I0120 17:03:42.508937 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tjcvp\" (UniqueName: \"kubernetes.io/projected/b667ea48-0add-41f2-8c52-75d06c1312fd-kube-api-access-tjcvp\") pod \"nova-scheduler-0\" (UID: \"b667ea48-0add-41f2-8c52-75d06c1312fd\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:03:42 crc kubenswrapper[4558]: I0120 17:03:42.509018 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/21607923-3f1e-41c5-86da-43a333ac050c-logs\") pod \"nova-api-0\" (UID: \"21607923-3f1e-41c5-86da-43a333ac050c\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:03:42 crc kubenswrapper[4558]: I0120 17:03:42.509046 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b667ea48-0add-41f2-8c52-75d06c1312fd-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"b667ea48-0add-41f2-8c52-75d06c1312fd\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:03:42 crc kubenswrapper[4558]: I0120 17:03:42.509067 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g2p6n\" (UniqueName: \"kubernetes.io/projected/21607923-3f1e-41c5-86da-43a333ac050c-kube-api-access-g2p6n\") pod \"nova-api-0\" (UID: \"21607923-3f1e-41c5-86da-43a333ac050c\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:03:42 crc kubenswrapper[4558]: I0120 17:03:42.509146 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/21607923-3f1e-41c5-86da-43a333ac050c-config-data\") pod \"nova-api-0\" (UID: \"21607923-3f1e-41c5-86da-43a333ac050c\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:03:42 crc kubenswrapper[4558]: I0120 17:03:42.509232 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21607923-3f1e-41c5-86da-43a333ac050c-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"21607923-3f1e-41c5-86da-43a333ac050c\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:03:42 crc kubenswrapper[4558]: I0120 17:03:42.509272 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1c035110-390f-44d3-ad09-a242f3bb37b1-logs\") pod \"nova-metadata-0\" (UID: \"1c035110-390f-44d3-ad09-a242f3bb37b1\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:03:42 crc kubenswrapper[4558]: I0120 17:03:42.517204 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/21607923-3f1e-41c5-86da-43a333ac050c-logs\") pod \"nova-api-0\" (UID: \"21607923-3f1e-41c5-86da-43a333ac050c\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:03:42 crc kubenswrapper[4558]: I0120 17:03:42.521338 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1c035110-390f-44d3-ad09-a242f3bb37b1-logs\") pod \"nova-metadata-0\" (UID: \"1c035110-390f-44d3-ad09-a242f3bb37b1\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:03:42 crc kubenswrapper[4558]: I0120 17:03:42.524140 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21607923-3f1e-41c5-86da-43a333ac050c-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"21607923-3f1e-41c5-86da-43a333ac050c\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:03:42 crc kubenswrapper[4558]: I0120 17:03:42.527598 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1c035110-390f-44d3-ad09-a242f3bb37b1-config-data\") pod \"nova-metadata-0\" (UID: \"1c035110-390f-44d3-ad09-a242f3bb37b1\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:03:42 crc kubenswrapper[4558]: I0120 17:03:42.534881 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c035110-390f-44d3-ad09-a242f3bb37b1-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"1c035110-390f-44d3-ad09-a242f3bb37b1\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:03:42 crc kubenswrapper[4558]: I0120 17:03:42.540203 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fbpnt\" (UniqueName: \"kubernetes.io/projected/1c035110-390f-44d3-ad09-a242f3bb37b1-kube-api-access-fbpnt\") pod \"nova-metadata-0\" (UID: \"1c035110-390f-44d3-ad09-a242f3bb37b1\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:03:42 crc kubenswrapper[4558]: I0120 17:03:42.541033 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g2p6n\" (UniqueName: \"kubernetes.io/projected/21607923-3f1e-41c5-86da-43a333ac050c-kube-api-access-g2p6n\") pod \"nova-api-0\" (UID: \"21607923-3f1e-41c5-86da-43a333ac050c\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:03:42 crc kubenswrapper[4558]: I0120 17:03:42.544849 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/21607923-3f1e-41c5-86da-43a333ac050c-config-data\") pod \"nova-api-0\" (UID: \"21607923-3f1e-41c5-86da-43a333ac050c\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:03:42 crc kubenswrapper[4558]: I0120 17:03:42.553947 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:03:42 crc kubenswrapper[4558]: I0120 17:03:42.612870 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b667ea48-0add-41f2-8c52-75d06c1312fd-config-data\") pod \"nova-scheduler-0\" (UID: \"b667ea48-0add-41f2-8c52-75d06c1312fd\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:03:42 crc kubenswrapper[4558]: I0120 17:03:42.612927 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tjcvp\" (UniqueName: \"kubernetes.io/projected/b667ea48-0add-41f2-8c52-75d06c1312fd-kube-api-access-tjcvp\") pod \"nova-scheduler-0\" (UID: \"b667ea48-0add-41f2-8c52-75d06c1312fd\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:03:42 crc kubenswrapper[4558]: I0120 17:03:42.612979 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2sr2f\" (UniqueName: \"kubernetes.io/projected/133f3509-3c18-4a9d-94fe-8026a3c77b12-kube-api-access-2sr2f\") pod \"nova-cell1-novncproxy-0\" (UID: \"133f3509-3c18-4a9d-94fe-8026a3c77b12\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:03:42 crc kubenswrapper[4558]: I0120 17:03:42.613014 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b667ea48-0add-41f2-8c52-75d06c1312fd-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"b667ea48-0add-41f2-8c52-75d06c1312fd\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:03:42 crc kubenswrapper[4558]: I0120 17:03:42.613081 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/133f3509-3c18-4a9d-94fe-8026a3c77b12-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"133f3509-3c18-4a9d-94fe-8026a3c77b12\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:03:42 crc kubenswrapper[4558]: I0120 17:03:42.613110 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/133f3509-3c18-4a9d-94fe-8026a3c77b12-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"133f3509-3c18-4a9d-94fe-8026a3c77b12\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:03:42 crc kubenswrapper[4558]: I0120 17:03:42.622247 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b667ea48-0add-41f2-8c52-75d06c1312fd-config-data\") pod \"nova-scheduler-0\" (UID: \"b667ea48-0add-41f2-8c52-75d06c1312fd\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:03:42 crc kubenswrapper[4558]: I0120 17:03:42.624931 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b667ea48-0add-41f2-8c52-75d06c1312fd-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"b667ea48-0add-41f2-8c52-75d06c1312fd\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:03:42 crc kubenswrapper[4558]: I0120 17:03:42.633384 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:03:42 crc kubenswrapper[4558]: I0120 17:03:42.647024 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tjcvp\" (UniqueName: \"kubernetes.io/projected/b667ea48-0add-41f2-8c52-75d06c1312fd-kube-api-access-tjcvp\") pod \"nova-scheduler-0\" (UID: \"b667ea48-0add-41f2-8c52-75d06c1312fd\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:03:42 crc kubenswrapper[4558]: I0120 17:03:42.708828 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:03:42 crc kubenswrapper[4558]: I0120 17:03:42.719721 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2sr2f\" (UniqueName: \"kubernetes.io/projected/133f3509-3c18-4a9d-94fe-8026a3c77b12-kube-api-access-2sr2f\") pod \"nova-cell1-novncproxy-0\" (UID: \"133f3509-3c18-4a9d-94fe-8026a3c77b12\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:03:42 crc kubenswrapper[4558]: I0120 17:03:42.719956 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/133f3509-3c18-4a9d-94fe-8026a3c77b12-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"133f3509-3c18-4a9d-94fe-8026a3c77b12\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:03:42 crc kubenswrapper[4558]: I0120 17:03:42.720004 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/133f3509-3c18-4a9d-94fe-8026a3c77b12-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"133f3509-3c18-4a9d-94fe-8026a3c77b12\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:03:42 crc kubenswrapper[4558]: I0120 17:03:42.724864 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/133f3509-3c18-4a9d-94fe-8026a3c77b12-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"133f3509-3c18-4a9d-94fe-8026a3c77b12\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:03:42 crc kubenswrapper[4558]: I0120 17:03:42.728605 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/133f3509-3c18-4a9d-94fe-8026a3c77b12-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"133f3509-3c18-4a9d-94fe-8026a3c77b12\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:03:42 crc kubenswrapper[4558]: I0120 17:03:42.744342 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2sr2f\" (UniqueName: \"kubernetes.io/projected/133f3509-3c18-4a9d-94fe-8026a3c77b12-kube-api-access-2sr2f\") pod \"nova-cell1-novncproxy-0\" (UID: \"133f3509-3c18-4a9d-94fe-8026a3c77b12\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:03:42 crc kubenswrapper[4558]: I0120 17:03:42.744792 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-cell-mapping-6ssx9" Jan 20 17:03:42 crc kubenswrapper[4558]: I0120 17:03:42.942405 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:03:43 crc kubenswrapper[4558]: W0120 17:03:43.164959 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod21607923_3f1e_41c5_86da_43a333ac050c.slice/crio-1b5ff821fc3b6c4d9a66b17b9b2efa207c59f4acdc4c53e4c94be46b20fe5b14 WatchSource:0}: Error finding container 1b5ff821fc3b6c4d9a66b17b9b2efa207c59f4acdc4c53e4c94be46b20fe5b14: Status 404 returned error can't find the container with id 1b5ff821fc3b6c4d9a66b17b9b2efa207c59f4acdc4c53e4c94be46b20fe5b14 Jan 20 17:03:43 crc kubenswrapper[4558]: I0120 17:03:43.166854 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:03:43 crc kubenswrapper[4558]: I0120 17:03:43.179431 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-db-sync-7d6hd"] Jan 20 17:03:43 crc kubenswrapper[4558]: I0120 17:03:43.180596 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-7d6hd" Jan 20 17:03:43 crc kubenswrapper[4558]: I0120 17:03:43.182148 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell1-conductor-config-data" Jan 20 17:03:43 crc kubenswrapper[4558]: I0120 17:03:43.182886 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell1-conductor-scripts" Jan 20 17:03:43 crc kubenswrapper[4558]: I0120 17:03:43.195315 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-db-sync-7d6hd"] Jan 20 17:03:43 crc kubenswrapper[4558]: I0120 17:03:43.206686 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:03:43 crc kubenswrapper[4558]: I0120 17:03:43.251031 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:03:43 crc kubenswrapper[4558]: I0120 17:03:43.333426 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hblf6\" (UniqueName: \"kubernetes.io/projected/769881ca-45e6-4943-9f14-51b0f78eea8c-kube-api-access-hblf6\") pod \"nova-cell1-conductor-db-sync-7d6hd\" (UID: \"769881ca-45e6-4943-9f14-51b0f78eea8c\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-7d6hd" Jan 20 17:03:43 crc kubenswrapper[4558]: I0120 17:03:43.333690 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/769881ca-45e6-4943-9f14-51b0f78eea8c-config-data\") pod \"nova-cell1-conductor-db-sync-7d6hd\" (UID: \"769881ca-45e6-4943-9f14-51b0f78eea8c\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-7d6hd" Jan 20 17:03:43 crc kubenswrapper[4558]: I0120 17:03:43.333730 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/769881ca-45e6-4943-9f14-51b0f78eea8c-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-7d6hd\" (UID: \"769881ca-45e6-4943-9f14-51b0f78eea8c\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-7d6hd" Jan 20 17:03:43 crc kubenswrapper[4558]: I0120 17:03:43.333842 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/769881ca-45e6-4943-9f14-51b0f78eea8c-scripts\") pod \"nova-cell1-conductor-db-sync-7d6hd\" (UID: \"769881ca-45e6-4943-9f14-51b0f78eea8c\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-7d6hd" Jan 20 17:03:43 crc kubenswrapper[4558]: I0120 17:03:43.384459 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"1c035110-390f-44d3-ad09-a242f3bb37b1","Type":"ContainerStarted","Data":"6ff2ee5356f11f8076e9a29cbd87ef8212f5ec180d673cd139f93b171a13a143"} Jan 20 17:03:43 crc kubenswrapper[4558]: I0120 17:03:43.387385 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"b667ea48-0add-41f2-8c52-75d06c1312fd","Type":"ContainerStarted","Data":"6cc28871a7b673e7396318c5f71406b90c7add304a379385cfae6dbcfc4465b2"} Jan 20 17:03:43 crc kubenswrapper[4558]: I0120 17:03:43.388752 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"21607923-3f1e-41c5-86da-43a333ac050c","Type":"ContainerStarted","Data":"70ca80861b8f90aed6f9f75f21e3b9fd92e85edd4b224c3ab9a29e62e7191a18"} Jan 20 17:03:43 crc kubenswrapper[4558]: I0120 17:03:43.388774 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"21607923-3f1e-41c5-86da-43a333ac050c","Type":"ContainerStarted","Data":"1b5ff821fc3b6c4d9a66b17b9b2efa207c59f4acdc4c53e4c94be46b20fe5b14"} Jan 20 17:03:43 crc kubenswrapper[4558]: I0120 17:03:43.391269 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"ef93decf-78a4-4439-bac3-36f5720d0589","Type":"ContainerStarted","Data":"05610e58069401e608b04b0fe07c9855dcd26069b41e0a19ce0940290d2c42b4"} Jan 20 17:03:43 crc kubenswrapper[4558]: I0120 17:03:43.392314 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:03:43 crc kubenswrapper[4558]: I0120 17:03:43.417214 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ceilometer-0" podStartSLOduration=1.690152264 podStartE2EDuration="9.417189082s" podCreationTimestamp="2026-01-20 17:03:34 +0000 UTC" firstStartedPulling="2026-01-20 17:03:35.080829567 +0000 UTC m=+1308.841167534" lastFinishedPulling="2026-01-20 17:03:42.807866385 +0000 UTC m=+1316.568204352" observedRunningTime="2026-01-20 17:03:43.414417531 +0000 UTC m=+1317.174755498" watchObservedRunningTime="2026-01-20 17:03:43.417189082 +0000 UTC m=+1317.177527049" Jan 20 17:03:43 crc kubenswrapper[4558]: I0120 17:03:43.436254 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/769881ca-45e6-4943-9f14-51b0f78eea8c-config-data\") pod \"nova-cell1-conductor-db-sync-7d6hd\" (UID: \"769881ca-45e6-4943-9f14-51b0f78eea8c\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-7d6hd" Jan 20 17:03:43 crc kubenswrapper[4558]: I0120 17:03:43.436301 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/769881ca-45e6-4943-9f14-51b0f78eea8c-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-7d6hd\" (UID: \"769881ca-45e6-4943-9f14-51b0f78eea8c\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-7d6hd" Jan 20 17:03:43 crc kubenswrapper[4558]: I0120 17:03:43.436362 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/769881ca-45e6-4943-9f14-51b0f78eea8c-scripts\") pod \"nova-cell1-conductor-db-sync-7d6hd\" (UID: \"769881ca-45e6-4943-9f14-51b0f78eea8c\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-7d6hd" Jan 20 17:03:43 crc kubenswrapper[4558]: I0120 17:03:43.436386 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hblf6\" (UniqueName: \"kubernetes.io/projected/769881ca-45e6-4943-9f14-51b0f78eea8c-kube-api-access-hblf6\") pod \"nova-cell1-conductor-db-sync-7d6hd\" (UID: \"769881ca-45e6-4943-9f14-51b0f78eea8c\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-7d6hd" Jan 20 17:03:43 crc kubenswrapper[4558]: I0120 17:03:43.439437 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/769881ca-45e6-4943-9f14-51b0f78eea8c-config-data\") pod \"nova-cell1-conductor-db-sync-7d6hd\" (UID: \"769881ca-45e6-4943-9f14-51b0f78eea8c\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-7d6hd" Jan 20 17:03:43 crc kubenswrapper[4558]: I0120 17:03:43.439977 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/769881ca-45e6-4943-9f14-51b0f78eea8c-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-7d6hd\" (UID: \"769881ca-45e6-4943-9f14-51b0f78eea8c\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-7d6hd" Jan 20 17:03:43 crc kubenswrapper[4558]: I0120 17:03:43.440806 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/769881ca-45e6-4943-9f14-51b0f78eea8c-scripts\") pod \"nova-cell1-conductor-db-sync-7d6hd\" (UID: \"769881ca-45e6-4943-9f14-51b0f78eea8c\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-7d6hd" Jan 20 17:03:43 crc kubenswrapper[4558]: I0120 17:03:43.453580 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hblf6\" (UniqueName: \"kubernetes.io/projected/769881ca-45e6-4943-9f14-51b0f78eea8c-kube-api-access-hblf6\") pod \"nova-cell1-conductor-db-sync-7d6hd\" (UID: \"769881ca-45e6-4943-9f14-51b0f78eea8c\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-7d6hd" Jan 20 17:03:43 crc kubenswrapper[4558]: I0120 17:03:43.498566 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-7d6hd" Jan 20 17:03:44 crc kubenswrapper[4558]: I0120 17:03:44.007335 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-cell-mapping-6ssx9"] Jan 20 17:03:44 crc kubenswrapper[4558]: W0120 17:03:44.007646 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod507235e3_a321_4617_aba7_eec9b5ca1cf0.slice/crio-bc18b269cfb6ddd5ddee2935184b63811bbf917c393aecacadf32b0937c5711b WatchSource:0}: Error finding container bc18b269cfb6ddd5ddee2935184b63811bbf917c393aecacadf32b0937c5711b: Status 404 returned error can't find the container with id bc18b269cfb6ddd5ddee2935184b63811bbf917c393aecacadf32b0937c5711b Jan 20 17:03:44 crc kubenswrapper[4558]: I0120 17:03:44.109745 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-db-sync-7d6hd"] Jan 20 17:03:44 crc kubenswrapper[4558]: I0120 17:03:44.115048 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:03:44 crc kubenswrapper[4558]: W0120 17:03:44.122105 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod769881ca_45e6_4943_9f14_51b0f78eea8c.slice/crio-c3b88b467c620efc3dcbf2b4a5f6eab176ccea53457b5252bf9768eff26e4b10 WatchSource:0}: Error finding container c3b88b467c620efc3dcbf2b4a5f6eab176ccea53457b5252bf9768eff26e4b10: Status 404 returned error can't find the container with id c3b88b467c620efc3dcbf2b4a5f6eab176ccea53457b5252bf9768eff26e4b10 Jan 20 17:03:44 crc kubenswrapper[4558]: I0120 17:03:44.288218 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:03:44 crc kubenswrapper[4558]: I0120 17:03:44.309483 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:03:44 crc kubenswrapper[4558]: I0120 17:03:44.405233 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-cell-mapping-6ssx9" event={"ID":"507235e3-a321-4617-aba7-eec9b5ca1cf0","Type":"ContainerStarted","Data":"6877fd17a18c33efd011fb22bc699b71b1f95e440e3bb32835f147ed97daf4a4"} Jan 20 17:03:44 crc kubenswrapper[4558]: I0120 17:03:44.405296 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-cell-mapping-6ssx9" event={"ID":"507235e3-a321-4617-aba7-eec9b5ca1cf0","Type":"ContainerStarted","Data":"bc18b269cfb6ddd5ddee2935184b63811bbf917c393aecacadf32b0937c5711b"} Jan 20 17:03:44 crc kubenswrapper[4558]: I0120 17:03:44.410346 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"1c035110-390f-44d3-ad09-a242f3bb37b1","Type":"ContainerStarted","Data":"0fcfed1e7cabed559c59a125f451d9bb46420c338c7019686ba4779a453c7bed"} Jan 20 17:03:44 crc kubenswrapper[4558]: I0120 17:03:44.410383 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"1c035110-390f-44d3-ad09-a242f3bb37b1","Type":"ContainerStarted","Data":"f6d6524f1eaca048f1517a673f77435a73e6ee7177c2be51384aa090b8468cd3"} Jan 20 17:03:44 crc kubenswrapper[4558]: I0120 17:03:44.414719 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-7d6hd" event={"ID":"769881ca-45e6-4943-9f14-51b0f78eea8c","Type":"ContainerStarted","Data":"2c83f13eea2134840701f05851db3d14cc87fed3be70ecde02d1c824d2f80a0d"} Jan 20 17:03:44 crc kubenswrapper[4558]: I0120 17:03:44.414753 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-7d6hd" event={"ID":"769881ca-45e6-4943-9f14-51b0f78eea8c","Type":"ContainerStarted","Data":"c3b88b467c620efc3dcbf2b4a5f6eab176ccea53457b5252bf9768eff26e4b10"} Jan 20 17:03:44 crc kubenswrapper[4558]: I0120 17:03:44.416342 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"b667ea48-0add-41f2-8c52-75d06c1312fd","Type":"ContainerStarted","Data":"534b771198d1dbaf2b6aacfd6763fdbe6efd5ed7600e072171f10f96a99a85d4"} Jan 20 17:03:44 crc kubenswrapper[4558]: I0120 17:03:44.418504 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"133f3509-3c18-4a9d-94fe-8026a3c77b12","Type":"ContainerStarted","Data":"44b948c3133051a2ebbf13f37804f61fcca40240d5de185a3c5506b11479d72f"} Jan 20 17:03:44 crc kubenswrapper[4558]: I0120 17:03:44.418532 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"133f3509-3c18-4a9d-94fe-8026a3c77b12","Type":"ContainerStarted","Data":"d2d01b1aa874a747e46deafd5ebcb37122861bb7359050fecb76d070250e94d0"} Jan 20 17:03:44 crc kubenswrapper[4558]: I0120 17:03:44.418610 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" podUID="133f3509-3c18-4a9d-94fe-8026a3c77b12" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://44b948c3133051a2ebbf13f37804f61fcca40240d5de185a3c5506b11479d72f" gracePeriod=30 Jan 20 17:03:44 crc kubenswrapper[4558]: I0120 17:03:44.428212 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"21607923-3f1e-41c5-86da-43a333ac050c","Type":"ContainerStarted","Data":"a0cdde937fc7d9c08b9a44eccbcf6dd2913c7e2c784cb09a9c39e414a254e024"} Jan 20 17:03:44 crc kubenswrapper[4558]: I0120 17:03:44.437114 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell0-cell-mapping-6ssx9" podStartSLOduration=2.437100655 podStartE2EDuration="2.437100655s" podCreationTimestamp="2026-01-20 17:03:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:03:44.426712863 +0000 UTC m=+1318.187050830" watchObservedRunningTime="2026-01-20 17:03:44.437100655 +0000 UTC m=+1318.197438622" Jan 20 17:03:44 crc kubenswrapper[4558]: I0120 17:03:44.448127 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-7d6hd" podStartSLOduration=1.44811709 podStartE2EDuration="1.44811709s" podCreationTimestamp="2026-01-20 17:03:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:03:44.447063609 +0000 UTC m=+1318.207401575" watchObservedRunningTime="2026-01-20 17:03:44.44811709 +0000 UTC m=+1318.208455057" Jan 20 17:03:44 crc kubenswrapper[4558]: I0120 17:03:44.476366 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" podStartSLOduration=2.476349495 podStartE2EDuration="2.476349495s" podCreationTimestamp="2026-01-20 17:03:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:03:44.458560165 +0000 UTC m=+1318.218898133" watchObservedRunningTime="2026-01-20 17:03:44.476349495 +0000 UTC m=+1318.236687462" Jan 20 17:03:44 crc kubenswrapper[4558]: I0120 17:03:44.482087 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-metadata-0" podStartSLOduration=2.482077836 podStartE2EDuration="2.482077836s" podCreationTimestamp="2026-01-20 17:03:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:03:44.472110124 +0000 UTC m=+1318.232448091" watchObservedRunningTime="2026-01-20 17:03:44.482077836 +0000 UTC m=+1318.242415803" Jan 20 17:03:44 crc kubenswrapper[4558]: I0120 17:03:44.493293 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-scheduler-0" podStartSLOduration=2.493284629 podStartE2EDuration="2.493284629s" podCreationTimestamp="2026-01-20 17:03:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:03:44.485628141 +0000 UTC m=+1318.245966109" watchObservedRunningTime="2026-01-20 17:03:44.493284629 +0000 UTC m=+1318.253622596" Jan 20 17:03:44 crc kubenswrapper[4558]: I0120 17:03:44.502636 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-api-0" podStartSLOduration=2.502625872 podStartE2EDuration="2.502625872s" podCreationTimestamp="2026-01-20 17:03:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:03:44.49680579 +0000 UTC m=+1318.257143757" watchObservedRunningTime="2026-01-20 17:03:44.502625872 +0000 UTC m=+1318.262963840" Jan 20 17:03:45 crc kubenswrapper[4558]: I0120 17:03:45.434726 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-metadata-0" podUID="1c035110-390f-44d3-ad09-a242f3bb37b1" containerName="nova-metadata-log" containerID="cri-o://f6d6524f1eaca048f1517a673f77435a73e6ee7177c2be51384aa090b8468cd3" gracePeriod=30 Jan 20 17:03:45 crc kubenswrapper[4558]: I0120 17:03:45.436055 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-metadata-0" podUID="1c035110-390f-44d3-ad09-a242f3bb37b1" containerName="nova-metadata-metadata" containerID="cri-o://0fcfed1e7cabed559c59a125f451d9bb46420c338c7019686ba4779a453c7bed" gracePeriod=30 Jan 20 17:03:45 crc kubenswrapper[4558]: I0120 17:03:45.949377 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:03:46 crc kubenswrapper[4558]: I0120 17:03:46.094352 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1c035110-390f-44d3-ad09-a242f3bb37b1-config-data\") pod \"1c035110-390f-44d3-ad09-a242f3bb37b1\" (UID: \"1c035110-390f-44d3-ad09-a242f3bb37b1\") " Jan 20 17:03:46 crc kubenswrapper[4558]: I0120 17:03:46.094902 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fbpnt\" (UniqueName: \"kubernetes.io/projected/1c035110-390f-44d3-ad09-a242f3bb37b1-kube-api-access-fbpnt\") pod \"1c035110-390f-44d3-ad09-a242f3bb37b1\" (UID: \"1c035110-390f-44d3-ad09-a242f3bb37b1\") " Jan 20 17:03:46 crc kubenswrapper[4558]: I0120 17:03:46.095037 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1c035110-390f-44d3-ad09-a242f3bb37b1-logs\") pod \"1c035110-390f-44d3-ad09-a242f3bb37b1\" (UID: \"1c035110-390f-44d3-ad09-a242f3bb37b1\") " Jan 20 17:03:46 crc kubenswrapper[4558]: I0120 17:03:46.095229 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c035110-390f-44d3-ad09-a242f3bb37b1-combined-ca-bundle\") pod \"1c035110-390f-44d3-ad09-a242f3bb37b1\" (UID: \"1c035110-390f-44d3-ad09-a242f3bb37b1\") " Jan 20 17:03:46 crc kubenswrapper[4558]: I0120 17:03:46.095394 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1c035110-390f-44d3-ad09-a242f3bb37b1-logs" (OuterVolumeSpecName: "logs") pod "1c035110-390f-44d3-ad09-a242f3bb37b1" (UID: "1c035110-390f-44d3-ad09-a242f3bb37b1"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:03:46 crc kubenswrapper[4558]: I0120 17:03:46.096408 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1c035110-390f-44d3-ad09-a242f3bb37b1-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:03:46 crc kubenswrapper[4558]: I0120 17:03:46.099207 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1c035110-390f-44d3-ad09-a242f3bb37b1-kube-api-access-fbpnt" (OuterVolumeSpecName: "kube-api-access-fbpnt") pod "1c035110-390f-44d3-ad09-a242f3bb37b1" (UID: "1c035110-390f-44d3-ad09-a242f3bb37b1"). InnerVolumeSpecName "kube-api-access-fbpnt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:03:46 crc kubenswrapper[4558]: I0120 17:03:46.117974 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1c035110-390f-44d3-ad09-a242f3bb37b1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1c035110-390f-44d3-ad09-a242f3bb37b1" (UID: "1c035110-390f-44d3-ad09-a242f3bb37b1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:03:46 crc kubenswrapper[4558]: I0120 17:03:46.119770 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1c035110-390f-44d3-ad09-a242f3bb37b1-config-data" (OuterVolumeSpecName: "config-data") pod "1c035110-390f-44d3-ad09-a242f3bb37b1" (UID: "1c035110-390f-44d3-ad09-a242f3bb37b1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:03:46 crc kubenswrapper[4558]: I0120 17:03:46.198829 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c035110-390f-44d3-ad09-a242f3bb37b1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:03:46 crc kubenswrapper[4558]: I0120 17:03:46.198865 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1c035110-390f-44d3-ad09-a242f3bb37b1-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:03:46 crc kubenswrapper[4558]: I0120 17:03:46.198877 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fbpnt\" (UniqueName: \"kubernetes.io/projected/1c035110-390f-44d3-ad09-a242f3bb37b1-kube-api-access-fbpnt\") on node \"crc\" DevicePath \"\"" Jan 20 17:03:46 crc kubenswrapper[4558]: I0120 17:03:46.445137 4558 generic.go:334] "Generic (PLEG): container finished" podID="1c035110-390f-44d3-ad09-a242f3bb37b1" containerID="0fcfed1e7cabed559c59a125f451d9bb46420c338c7019686ba4779a453c7bed" exitCode=0 Jan 20 17:03:46 crc kubenswrapper[4558]: I0120 17:03:46.445181 4558 generic.go:334] "Generic (PLEG): container finished" podID="1c035110-390f-44d3-ad09-a242f3bb37b1" containerID="f6d6524f1eaca048f1517a673f77435a73e6ee7177c2be51384aa090b8468cd3" exitCode=143 Jan 20 17:03:46 crc kubenswrapper[4558]: I0120 17:03:46.445203 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"1c035110-390f-44d3-ad09-a242f3bb37b1","Type":"ContainerDied","Data":"0fcfed1e7cabed559c59a125f451d9bb46420c338c7019686ba4779a453c7bed"} Jan 20 17:03:46 crc kubenswrapper[4558]: I0120 17:03:46.445251 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"1c035110-390f-44d3-ad09-a242f3bb37b1","Type":"ContainerDied","Data":"f6d6524f1eaca048f1517a673f77435a73e6ee7177c2be51384aa090b8468cd3"} Jan 20 17:03:46 crc kubenswrapper[4558]: I0120 17:03:46.445265 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"1c035110-390f-44d3-ad09-a242f3bb37b1","Type":"ContainerDied","Data":"6ff2ee5356f11f8076e9a29cbd87ef8212f5ec180d673cd139f93b171a13a143"} Jan 20 17:03:46 crc kubenswrapper[4558]: I0120 17:03:46.445295 4558 scope.go:117] "RemoveContainer" containerID="0fcfed1e7cabed559c59a125f451d9bb46420c338c7019686ba4779a453c7bed" Jan 20 17:03:46 crc kubenswrapper[4558]: I0120 17:03:46.446364 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:03:46 crc kubenswrapper[4558]: I0120 17:03:46.448206 4558 generic.go:334] "Generic (PLEG): container finished" podID="769881ca-45e6-4943-9f14-51b0f78eea8c" containerID="2c83f13eea2134840701f05851db3d14cc87fed3be70ecde02d1c824d2f80a0d" exitCode=0 Jan 20 17:03:46 crc kubenswrapper[4558]: I0120 17:03:46.448253 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-7d6hd" event={"ID":"769881ca-45e6-4943-9f14-51b0f78eea8c","Type":"ContainerDied","Data":"2c83f13eea2134840701f05851db3d14cc87fed3be70ecde02d1c824d2f80a0d"} Jan 20 17:03:46 crc kubenswrapper[4558]: I0120 17:03:46.500261 4558 scope.go:117] "RemoveContainer" containerID="f6d6524f1eaca048f1517a673f77435a73e6ee7177c2be51384aa090b8468cd3" Jan 20 17:03:46 crc kubenswrapper[4558]: I0120 17:03:46.539213 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:03:46 crc kubenswrapper[4558]: I0120 17:03:46.553707 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:03:46 crc kubenswrapper[4558]: I0120 17:03:46.574928 4558 scope.go:117] "RemoveContainer" containerID="0fcfed1e7cabed559c59a125f451d9bb46420c338c7019686ba4779a453c7bed" Jan 20 17:03:46 crc kubenswrapper[4558]: E0120 17:03:46.581968 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0fcfed1e7cabed559c59a125f451d9bb46420c338c7019686ba4779a453c7bed\": container with ID starting with 0fcfed1e7cabed559c59a125f451d9bb46420c338c7019686ba4779a453c7bed not found: ID does not exist" containerID="0fcfed1e7cabed559c59a125f451d9bb46420c338c7019686ba4779a453c7bed" Jan 20 17:03:46 crc kubenswrapper[4558]: I0120 17:03:46.582026 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0fcfed1e7cabed559c59a125f451d9bb46420c338c7019686ba4779a453c7bed"} err="failed to get container status \"0fcfed1e7cabed559c59a125f451d9bb46420c338c7019686ba4779a453c7bed\": rpc error: code = NotFound desc = could not find container \"0fcfed1e7cabed559c59a125f451d9bb46420c338c7019686ba4779a453c7bed\": container with ID starting with 0fcfed1e7cabed559c59a125f451d9bb46420c338c7019686ba4779a453c7bed not found: ID does not exist" Jan 20 17:03:46 crc kubenswrapper[4558]: I0120 17:03:46.582055 4558 scope.go:117] "RemoveContainer" containerID="f6d6524f1eaca048f1517a673f77435a73e6ee7177c2be51384aa090b8468cd3" Jan 20 17:03:46 crc kubenswrapper[4558]: E0120 17:03:46.609137 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f6d6524f1eaca048f1517a673f77435a73e6ee7177c2be51384aa090b8468cd3\": container with ID starting with f6d6524f1eaca048f1517a673f77435a73e6ee7177c2be51384aa090b8468cd3 not found: ID does not exist" containerID="f6d6524f1eaca048f1517a673f77435a73e6ee7177c2be51384aa090b8468cd3" Jan 20 17:03:46 crc kubenswrapper[4558]: I0120 17:03:46.609253 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f6d6524f1eaca048f1517a673f77435a73e6ee7177c2be51384aa090b8468cd3"} err="failed to get container status \"f6d6524f1eaca048f1517a673f77435a73e6ee7177c2be51384aa090b8468cd3\": rpc error: code = NotFound desc = could not find container \"f6d6524f1eaca048f1517a673f77435a73e6ee7177c2be51384aa090b8468cd3\": container with ID starting with f6d6524f1eaca048f1517a673f77435a73e6ee7177c2be51384aa090b8468cd3 not found: ID does not exist" Jan 20 17:03:46 crc kubenswrapper[4558]: I0120 17:03:46.609302 4558 scope.go:117] "RemoveContainer" containerID="0fcfed1e7cabed559c59a125f451d9bb46420c338c7019686ba4779a453c7bed" Jan 20 17:03:46 crc kubenswrapper[4558]: I0120 17:03:46.612785 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0fcfed1e7cabed559c59a125f451d9bb46420c338c7019686ba4779a453c7bed"} err="failed to get container status \"0fcfed1e7cabed559c59a125f451d9bb46420c338c7019686ba4779a453c7bed\": rpc error: code = NotFound desc = could not find container \"0fcfed1e7cabed559c59a125f451d9bb46420c338c7019686ba4779a453c7bed\": container with ID starting with 0fcfed1e7cabed559c59a125f451d9bb46420c338c7019686ba4779a453c7bed not found: ID does not exist" Jan 20 17:03:46 crc kubenswrapper[4558]: I0120 17:03:46.612821 4558 scope.go:117] "RemoveContainer" containerID="f6d6524f1eaca048f1517a673f77435a73e6ee7177c2be51384aa090b8468cd3" Jan 20 17:03:46 crc kubenswrapper[4558]: I0120 17:03:46.615911 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f6d6524f1eaca048f1517a673f77435a73e6ee7177c2be51384aa090b8468cd3"} err="failed to get container status \"f6d6524f1eaca048f1517a673f77435a73e6ee7177c2be51384aa090b8468cd3\": rpc error: code = NotFound desc = could not find container \"f6d6524f1eaca048f1517a673f77435a73e6ee7177c2be51384aa090b8468cd3\": container with ID starting with f6d6524f1eaca048f1517a673f77435a73e6ee7177c2be51384aa090b8468cd3 not found: ID does not exist" Jan 20 17:03:46 crc kubenswrapper[4558]: I0120 17:03:46.650338 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1c035110-390f-44d3-ad09-a242f3bb37b1" path="/var/lib/kubelet/pods/1c035110-390f-44d3-ad09-a242f3bb37b1/volumes" Jan 20 17:03:46 crc kubenswrapper[4558]: I0120 17:03:46.661687 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:03:46 crc kubenswrapper[4558]: E0120 17:03:46.662086 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1c035110-390f-44d3-ad09-a242f3bb37b1" containerName="nova-metadata-log" Jan 20 17:03:46 crc kubenswrapper[4558]: I0120 17:03:46.662109 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="1c035110-390f-44d3-ad09-a242f3bb37b1" containerName="nova-metadata-log" Jan 20 17:03:46 crc kubenswrapper[4558]: E0120 17:03:46.662148 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1c035110-390f-44d3-ad09-a242f3bb37b1" containerName="nova-metadata-metadata" Jan 20 17:03:46 crc kubenswrapper[4558]: I0120 17:03:46.662155 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="1c035110-390f-44d3-ad09-a242f3bb37b1" containerName="nova-metadata-metadata" Jan 20 17:03:46 crc kubenswrapper[4558]: I0120 17:03:46.662399 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="1c035110-390f-44d3-ad09-a242f3bb37b1" containerName="nova-metadata-log" Jan 20 17:03:46 crc kubenswrapper[4558]: I0120 17:03:46.662421 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="1c035110-390f-44d3-ad09-a242f3bb37b1" containerName="nova-metadata-metadata" Jan 20 17:03:46 crc kubenswrapper[4558]: I0120 17:03:46.663586 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:03:46 crc kubenswrapper[4558]: I0120 17:03:46.663715 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:03:46 crc kubenswrapper[4558]: I0120 17:03:46.668022 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-metadata-config-data" Jan 20 17:03:46 crc kubenswrapper[4558]: I0120 17:03:46.668337 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-metadata-internal-svc" Jan 20 17:03:46 crc kubenswrapper[4558]: I0120 17:03:46.832768 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pm8hf\" (UniqueName: \"kubernetes.io/projected/6e057ced-6312-4433-826b-3db68f422df1-kube-api-access-pm8hf\") pod \"nova-metadata-0\" (UID: \"6e057ced-6312-4433-826b-3db68f422df1\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:03:46 crc kubenswrapper[4558]: I0120 17:03:46.833107 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e057ced-6312-4433-826b-3db68f422df1-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"6e057ced-6312-4433-826b-3db68f422df1\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:03:46 crc kubenswrapper[4558]: I0120 17:03:46.833246 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6e057ced-6312-4433-826b-3db68f422df1-logs\") pod \"nova-metadata-0\" (UID: \"6e057ced-6312-4433-826b-3db68f422df1\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:03:46 crc kubenswrapper[4558]: I0120 17:03:46.833403 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/6e057ced-6312-4433-826b-3db68f422df1-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"6e057ced-6312-4433-826b-3db68f422df1\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:03:46 crc kubenswrapper[4558]: I0120 17:03:46.833475 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6e057ced-6312-4433-826b-3db68f422df1-config-data\") pod \"nova-metadata-0\" (UID: \"6e057ced-6312-4433-826b-3db68f422df1\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:03:46 crc kubenswrapper[4558]: I0120 17:03:46.936977 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/6e057ced-6312-4433-826b-3db68f422df1-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"6e057ced-6312-4433-826b-3db68f422df1\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:03:46 crc kubenswrapper[4558]: I0120 17:03:46.937115 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6e057ced-6312-4433-826b-3db68f422df1-config-data\") pod \"nova-metadata-0\" (UID: \"6e057ced-6312-4433-826b-3db68f422df1\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:03:46 crc kubenswrapper[4558]: I0120 17:03:46.937186 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pm8hf\" (UniqueName: \"kubernetes.io/projected/6e057ced-6312-4433-826b-3db68f422df1-kube-api-access-pm8hf\") pod \"nova-metadata-0\" (UID: \"6e057ced-6312-4433-826b-3db68f422df1\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:03:46 crc kubenswrapper[4558]: I0120 17:03:46.937229 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e057ced-6312-4433-826b-3db68f422df1-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"6e057ced-6312-4433-826b-3db68f422df1\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:03:46 crc kubenswrapper[4558]: I0120 17:03:46.937329 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6e057ced-6312-4433-826b-3db68f422df1-logs\") pod \"nova-metadata-0\" (UID: \"6e057ced-6312-4433-826b-3db68f422df1\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:03:46 crc kubenswrapper[4558]: I0120 17:03:46.938139 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6e057ced-6312-4433-826b-3db68f422df1-logs\") pod \"nova-metadata-0\" (UID: \"6e057ced-6312-4433-826b-3db68f422df1\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:03:46 crc kubenswrapper[4558]: I0120 17:03:46.942710 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6e057ced-6312-4433-826b-3db68f422df1-config-data\") pod \"nova-metadata-0\" (UID: \"6e057ced-6312-4433-826b-3db68f422df1\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:03:46 crc kubenswrapper[4558]: I0120 17:03:46.942709 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e057ced-6312-4433-826b-3db68f422df1-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"6e057ced-6312-4433-826b-3db68f422df1\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:03:46 crc kubenswrapper[4558]: I0120 17:03:46.946536 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/6e057ced-6312-4433-826b-3db68f422df1-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"6e057ced-6312-4433-826b-3db68f422df1\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:03:46 crc kubenswrapper[4558]: I0120 17:03:46.963463 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pm8hf\" (UniqueName: \"kubernetes.io/projected/6e057ced-6312-4433-826b-3db68f422df1-kube-api-access-pm8hf\") pod \"nova-metadata-0\" (UID: \"6e057ced-6312-4433-826b-3db68f422df1\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:03:46 crc kubenswrapper[4558]: I0120 17:03:46.982781 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:03:47 crc kubenswrapper[4558]: I0120 17:03:47.383620 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:03:47 crc kubenswrapper[4558]: W0120 17:03:47.389114 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6e057ced_6312_4433_826b_3db68f422df1.slice/crio-02d52b89f63dbcec559e9bd8d3439fbfe41f5ed281c2ec0439bc916951aea92c WatchSource:0}: Error finding container 02d52b89f63dbcec559e9bd8d3439fbfe41f5ed281c2ec0439bc916951aea92c: Status 404 returned error can't find the container with id 02d52b89f63dbcec559e9bd8d3439fbfe41f5ed281c2ec0439bc916951aea92c Jan 20 17:03:47 crc kubenswrapper[4558]: I0120 17:03:47.463919 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"6e057ced-6312-4433-826b-3db68f422df1","Type":"ContainerStarted","Data":"02d52b89f63dbcec559e9bd8d3439fbfe41f5ed281c2ec0439bc916951aea92c"} Jan 20 17:03:47 crc kubenswrapper[4558]: I0120 17:03:47.710251 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:03:47 crc kubenswrapper[4558]: I0120 17:03:47.758132 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-7d6hd" Jan 20 17:03:47 crc kubenswrapper[4558]: I0120 17:03:47.858051 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hblf6\" (UniqueName: \"kubernetes.io/projected/769881ca-45e6-4943-9f14-51b0f78eea8c-kube-api-access-hblf6\") pod \"769881ca-45e6-4943-9f14-51b0f78eea8c\" (UID: \"769881ca-45e6-4943-9f14-51b0f78eea8c\") " Jan 20 17:03:47 crc kubenswrapper[4558]: I0120 17:03:47.858217 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/769881ca-45e6-4943-9f14-51b0f78eea8c-config-data\") pod \"769881ca-45e6-4943-9f14-51b0f78eea8c\" (UID: \"769881ca-45e6-4943-9f14-51b0f78eea8c\") " Jan 20 17:03:47 crc kubenswrapper[4558]: I0120 17:03:47.858273 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/769881ca-45e6-4943-9f14-51b0f78eea8c-combined-ca-bundle\") pod \"769881ca-45e6-4943-9f14-51b0f78eea8c\" (UID: \"769881ca-45e6-4943-9f14-51b0f78eea8c\") " Jan 20 17:03:47 crc kubenswrapper[4558]: I0120 17:03:47.858366 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/769881ca-45e6-4943-9f14-51b0f78eea8c-scripts\") pod \"769881ca-45e6-4943-9f14-51b0f78eea8c\" (UID: \"769881ca-45e6-4943-9f14-51b0f78eea8c\") " Jan 20 17:03:47 crc kubenswrapper[4558]: I0120 17:03:47.861582 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/769881ca-45e6-4943-9f14-51b0f78eea8c-scripts" (OuterVolumeSpecName: "scripts") pod "769881ca-45e6-4943-9f14-51b0f78eea8c" (UID: "769881ca-45e6-4943-9f14-51b0f78eea8c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:03:47 crc kubenswrapper[4558]: I0120 17:03:47.861874 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/769881ca-45e6-4943-9f14-51b0f78eea8c-kube-api-access-hblf6" (OuterVolumeSpecName: "kube-api-access-hblf6") pod "769881ca-45e6-4943-9f14-51b0f78eea8c" (UID: "769881ca-45e6-4943-9f14-51b0f78eea8c"). InnerVolumeSpecName "kube-api-access-hblf6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:03:47 crc kubenswrapper[4558]: I0120 17:03:47.879744 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/769881ca-45e6-4943-9f14-51b0f78eea8c-config-data" (OuterVolumeSpecName: "config-data") pod "769881ca-45e6-4943-9f14-51b0f78eea8c" (UID: "769881ca-45e6-4943-9f14-51b0f78eea8c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:03:47 crc kubenswrapper[4558]: I0120 17:03:47.881778 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/769881ca-45e6-4943-9f14-51b0f78eea8c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "769881ca-45e6-4943-9f14-51b0f78eea8c" (UID: "769881ca-45e6-4943-9f14-51b0f78eea8c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:03:47 crc kubenswrapper[4558]: I0120 17:03:47.944650 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:03:47 crc kubenswrapper[4558]: I0120 17:03:47.961266 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/769881ca-45e6-4943-9f14-51b0f78eea8c-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:03:47 crc kubenswrapper[4558]: I0120 17:03:47.961348 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/769881ca-45e6-4943-9f14-51b0f78eea8c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:03:47 crc kubenswrapper[4558]: I0120 17:03:47.961363 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/769881ca-45e6-4943-9f14-51b0f78eea8c-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:03:47 crc kubenswrapper[4558]: I0120 17:03:47.961378 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hblf6\" (UniqueName: \"kubernetes.io/projected/769881ca-45e6-4943-9f14-51b0f78eea8c-kube-api-access-hblf6\") on node \"crc\" DevicePath \"\"" Jan 20 17:03:48 crc kubenswrapper[4558]: I0120 17:03:48.478211 4558 generic.go:334] "Generic (PLEG): container finished" podID="507235e3-a321-4617-aba7-eec9b5ca1cf0" containerID="6877fd17a18c33efd011fb22bc699b71b1f95e440e3bb32835f147ed97daf4a4" exitCode=0 Jan 20 17:03:48 crc kubenswrapper[4558]: I0120 17:03:48.478275 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-cell-mapping-6ssx9" event={"ID":"507235e3-a321-4617-aba7-eec9b5ca1cf0","Type":"ContainerDied","Data":"6877fd17a18c33efd011fb22bc699b71b1f95e440e3bb32835f147ed97daf4a4"} Jan 20 17:03:48 crc kubenswrapper[4558]: I0120 17:03:48.481956 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"6e057ced-6312-4433-826b-3db68f422df1","Type":"ContainerStarted","Data":"e6a4e63c96e557ad9d0f24db6e5510a1ca114d2430e3a70bc7b3cd19368426bb"} Jan 20 17:03:48 crc kubenswrapper[4558]: I0120 17:03:48.482052 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"6e057ced-6312-4433-826b-3db68f422df1","Type":"ContainerStarted","Data":"08abbee09c12cf3a64d6c392bdb49747bc0b49670d76c631df62291574b4b57b"} Jan 20 17:03:48 crc kubenswrapper[4558]: I0120 17:03:48.483376 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-7d6hd" event={"ID":"769881ca-45e6-4943-9f14-51b0f78eea8c","Type":"ContainerDied","Data":"c3b88b467c620efc3dcbf2b4a5f6eab176ccea53457b5252bf9768eff26e4b10"} Jan 20 17:03:48 crc kubenswrapper[4558]: I0120 17:03:48.483417 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c3b88b467c620efc3dcbf2b4a5f6eab176ccea53457b5252bf9768eff26e4b10" Jan 20 17:03:48 crc kubenswrapper[4558]: I0120 17:03:48.483423 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-7d6hd" Jan 20 17:03:48 crc kubenswrapper[4558]: I0120 17:03:48.519493 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-metadata-0" podStartSLOduration=2.519476359 podStartE2EDuration="2.519476359s" podCreationTimestamp="2026-01-20 17:03:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:03:48.516536532 +0000 UTC m=+1322.276874498" watchObservedRunningTime="2026-01-20 17:03:48.519476359 +0000 UTC m=+1322.279814326" Jan 20 17:03:48 crc kubenswrapper[4558]: I0120 17:03:48.542823 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-0"] Jan 20 17:03:48 crc kubenswrapper[4558]: E0120 17:03:48.543213 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="769881ca-45e6-4943-9f14-51b0f78eea8c" containerName="nova-cell1-conductor-db-sync" Jan 20 17:03:48 crc kubenswrapper[4558]: I0120 17:03:48.543231 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="769881ca-45e6-4943-9f14-51b0f78eea8c" containerName="nova-cell1-conductor-db-sync" Jan 20 17:03:48 crc kubenswrapper[4558]: I0120 17:03:48.543457 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="769881ca-45e6-4943-9f14-51b0f78eea8c" containerName="nova-cell1-conductor-db-sync" Jan 20 17:03:48 crc kubenswrapper[4558]: I0120 17:03:48.543997 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:03:48 crc kubenswrapper[4558]: I0120 17:03:48.549233 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell1-conductor-config-data" Jan 20 17:03:48 crc kubenswrapper[4558]: I0120 17:03:48.555432 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-0"] Jan 20 17:03:48 crc kubenswrapper[4558]: I0120 17:03:48.569659 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/084a5ce8-2844-42f1-92a9-973b78505050-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"084a5ce8-2844-42f1-92a9-973b78505050\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:03:48 crc kubenswrapper[4558]: I0120 17:03:48.569754 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nzvrq\" (UniqueName: \"kubernetes.io/projected/084a5ce8-2844-42f1-92a9-973b78505050-kube-api-access-nzvrq\") pod \"nova-cell1-conductor-0\" (UID: \"084a5ce8-2844-42f1-92a9-973b78505050\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:03:48 crc kubenswrapper[4558]: I0120 17:03:48.569797 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/084a5ce8-2844-42f1-92a9-973b78505050-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"084a5ce8-2844-42f1-92a9-973b78505050\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:03:48 crc kubenswrapper[4558]: I0120 17:03:48.671652 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nzvrq\" (UniqueName: \"kubernetes.io/projected/084a5ce8-2844-42f1-92a9-973b78505050-kube-api-access-nzvrq\") pod \"nova-cell1-conductor-0\" (UID: \"084a5ce8-2844-42f1-92a9-973b78505050\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:03:48 crc kubenswrapper[4558]: I0120 17:03:48.671709 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/084a5ce8-2844-42f1-92a9-973b78505050-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"084a5ce8-2844-42f1-92a9-973b78505050\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:03:48 crc kubenswrapper[4558]: I0120 17:03:48.671935 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/084a5ce8-2844-42f1-92a9-973b78505050-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"084a5ce8-2844-42f1-92a9-973b78505050\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:03:48 crc kubenswrapper[4558]: I0120 17:03:48.676516 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/084a5ce8-2844-42f1-92a9-973b78505050-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"084a5ce8-2844-42f1-92a9-973b78505050\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:03:48 crc kubenswrapper[4558]: I0120 17:03:48.677306 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/084a5ce8-2844-42f1-92a9-973b78505050-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"084a5ce8-2844-42f1-92a9-973b78505050\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:03:48 crc kubenswrapper[4558]: I0120 17:03:48.689087 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nzvrq\" (UniqueName: \"kubernetes.io/projected/084a5ce8-2844-42f1-92a9-973b78505050-kube-api-access-nzvrq\") pod \"nova-cell1-conductor-0\" (UID: \"084a5ce8-2844-42f1-92a9-973b78505050\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:03:48 crc kubenswrapper[4558]: I0120 17:03:48.896922 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:03:49 crc kubenswrapper[4558]: I0120 17:03:49.292132 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-0"] Jan 20 17:03:49 crc kubenswrapper[4558]: I0120 17:03:49.493978 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-0" event={"ID":"084a5ce8-2844-42f1-92a9-973b78505050","Type":"ContainerStarted","Data":"16d11c7e8c68131f8eb72146637f28a05ad570c65546debfb217fa0b00e58dae"} Jan 20 17:03:49 crc kubenswrapper[4558]: I0120 17:03:49.494250 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-0" event={"ID":"084a5ce8-2844-42f1-92a9-973b78505050","Type":"ContainerStarted","Data":"180d138c277666dc70017cc01b02accfa78bd539075052a5a8ffc82c79420eca"} Jan 20 17:03:49 crc kubenswrapper[4558]: I0120 17:03:49.514337 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell1-conductor-0" podStartSLOduration=1.514318083 podStartE2EDuration="1.514318083s" podCreationTimestamp="2026-01-20 17:03:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:03:49.511342198 +0000 UTC m=+1323.271680165" watchObservedRunningTime="2026-01-20 17:03:49.514318083 +0000 UTC m=+1323.274656050" Jan 20 17:03:49 crc kubenswrapper[4558]: I0120 17:03:49.818961 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-cell-mapping-6ssx9" Jan 20 17:03:49 crc kubenswrapper[4558]: I0120 17:03:49.902788 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/507235e3-a321-4617-aba7-eec9b5ca1cf0-combined-ca-bundle\") pod \"507235e3-a321-4617-aba7-eec9b5ca1cf0\" (UID: \"507235e3-a321-4617-aba7-eec9b5ca1cf0\") " Jan 20 17:03:49 crc kubenswrapper[4558]: I0120 17:03:49.902841 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/507235e3-a321-4617-aba7-eec9b5ca1cf0-scripts\") pod \"507235e3-a321-4617-aba7-eec9b5ca1cf0\" (UID: \"507235e3-a321-4617-aba7-eec9b5ca1cf0\") " Jan 20 17:03:49 crc kubenswrapper[4558]: I0120 17:03:49.902867 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tqp8g\" (UniqueName: \"kubernetes.io/projected/507235e3-a321-4617-aba7-eec9b5ca1cf0-kube-api-access-tqp8g\") pod \"507235e3-a321-4617-aba7-eec9b5ca1cf0\" (UID: \"507235e3-a321-4617-aba7-eec9b5ca1cf0\") " Jan 20 17:03:49 crc kubenswrapper[4558]: I0120 17:03:49.902911 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/507235e3-a321-4617-aba7-eec9b5ca1cf0-config-data\") pod \"507235e3-a321-4617-aba7-eec9b5ca1cf0\" (UID: \"507235e3-a321-4617-aba7-eec9b5ca1cf0\") " Jan 20 17:03:49 crc kubenswrapper[4558]: I0120 17:03:49.907957 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/507235e3-a321-4617-aba7-eec9b5ca1cf0-kube-api-access-tqp8g" (OuterVolumeSpecName: "kube-api-access-tqp8g") pod "507235e3-a321-4617-aba7-eec9b5ca1cf0" (UID: "507235e3-a321-4617-aba7-eec9b5ca1cf0"). InnerVolumeSpecName "kube-api-access-tqp8g". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:03:49 crc kubenswrapper[4558]: I0120 17:03:49.908405 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/507235e3-a321-4617-aba7-eec9b5ca1cf0-scripts" (OuterVolumeSpecName: "scripts") pod "507235e3-a321-4617-aba7-eec9b5ca1cf0" (UID: "507235e3-a321-4617-aba7-eec9b5ca1cf0"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:03:49 crc kubenswrapper[4558]: I0120 17:03:49.927401 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/507235e3-a321-4617-aba7-eec9b5ca1cf0-config-data" (OuterVolumeSpecName: "config-data") pod "507235e3-a321-4617-aba7-eec9b5ca1cf0" (UID: "507235e3-a321-4617-aba7-eec9b5ca1cf0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:03:49 crc kubenswrapper[4558]: I0120 17:03:49.927781 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/507235e3-a321-4617-aba7-eec9b5ca1cf0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "507235e3-a321-4617-aba7-eec9b5ca1cf0" (UID: "507235e3-a321-4617-aba7-eec9b5ca1cf0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:03:50 crc kubenswrapper[4558]: I0120 17:03:50.006121 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/507235e3-a321-4617-aba7-eec9b5ca1cf0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:03:50 crc kubenswrapper[4558]: I0120 17:03:50.006156 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/507235e3-a321-4617-aba7-eec9b5ca1cf0-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:03:50 crc kubenswrapper[4558]: I0120 17:03:50.006189 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tqp8g\" (UniqueName: \"kubernetes.io/projected/507235e3-a321-4617-aba7-eec9b5ca1cf0-kube-api-access-tqp8g\") on node \"crc\" DevicePath \"\"" Jan 20 17:03:50 crc kubenswrapper[4558]: I0120 17:03:50.006202 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/507235e3-a321-4617-aba7-eec9b5ca1cf0-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:03:50 crc kubenswrapper[4558]: I0120 17:03:50.502457 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-cell-mapping-6ssx9" Jan 20 17:03:50 crc kubenswrapper[4558]: I0120 17:03:50.502606 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-cell-mapping-6ssx9" event={"ID":"507235e3-a321-4617-aba7-eec9b5ca1cf0","Type":"ContainerDied","Data":"bc18b269cfb6ddd5ddee2935184b63811bbf917c393aecacadf32b0937c5711b"} Jan 20 17:03:50 crc kubenswrapper[4558]: I0120 17:03:50.502641 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bc18b269cfb6ddd5ddee2935184b63811bbf917c393aecacadf32b0937c5711b" Jan 20 17:03:50 crc kubenswrapper[4558]: I0120 17:03:50.502761 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:03:50 crc kubenswrapper[4558]: I0120 17:03:50.675363 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:03:50 crc kubenswrapper[4558]: I0120 17:03:50.675640 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-api-0" podUID="21607923-3f1e-41c5-86da-43a333ac050c" containerName="nova-api-log" containerID="cri-o://70ca80861b8f90aed6f9f75f21e3b9fd92e85edd4b224c3ab9a29e62e7191a18" gracePeriod=30 Jan 20 17:03:50 crc kubenswrapper[4558]: I0120 17:03:50.676209 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-api-0" podUID="21607923-3f1e-41c5-86da-43a333ac050c" containerName="nova-api-api" containerID="cri-o://a0cdde937fc7d9c08b9a44eccbcf6dd2913c7e2c784cb09a9c39e414a254e024" gracePeriod=30 Jan 20 17:03:50 crc kubenswrapper[4558]: I0120 17:03:50.684221 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:03:50 crc kubenswrapper[4558]: I0120 17:03:50.684428 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-scheduler-0" podUID="b667ea48-0add-41f2-8c52-75d06c1312fd" containerName="nova-scheduler-scheduler" containerID="cri-o://534b771198d1dbaf2b6aacfd6763fdbe6efd5ed7600e072171f10f96a99a85d4" gracePeriod=30 Jan 20 17:03:50 crc kubenswrapper[4558]: I0120 17:03:50.694383 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:03:50 crc kubenswrapper[4558]: I0120 17:03:50.694640 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-metadata-0" podUID="6e057ced-6312-4433-826b-3db68f422df1" containerName="nova-metadata-log" containerID="cri-o://08abbee09c12cf3a64d6c392bdb49747bc0b49670d76c631df62291574b4b57b" gracePeriod=30 Jan 20 17:03:50 crc kubenswrapper[4558]: I0120 17:03:50.694701 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-metadata-0" podUID="6e057ced-6312-4433-826b-3db68f422df1" containerName="nova-metadata-metadata" containerID="cri-o://e6a4e63c96e557ad9d0f24db6e5510a1ca114d2430e3a70bc7b3cd19368426bb" gracePeriod=30 Jan 20 17:03:51 crc kubenswrapper[4558]: I0120 17:03:51.212345 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:03:51 crc kubenswrapper[4558]: I0120 17:03:51.220405 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:03:51 crc kubenswrapper[4558]: I0120 17:03:51.228090 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pm8hf\" (UniqueName: \"kubernetes.io/projected/6e057ced-6312-4433-826b-3db68f422df1-kube-api-access-pm8hf\") pod \"6e057ced-6312-4433-826b-3db68f422df1\" (UID: \"6e057ced-6312-4433-826b-3db68f422df1\") " Jan 20 17:03:51 crc kubenswrapper[4558]: I0120 17:03:51.228154 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6e057ced-6312-4433-826b-3db68f422df1-logs\") pod \"6e057ced-6312-4433-826b-3db68f422df1\" (UID: \"6e057ced-6312-4433-826b-3db68f422df1\") " Jan 20 17:03:51 crc kubenswrapper[4558]: I0120 17:03:51.228219 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/21607923-3f1e-41c5-86da-43a333ac050c-logs\") pod \"21607923-3f1e-41c5-86da-43a333ac050c\" (UID: \"21607923-3f1e-41c5-86da-43a333ac050c\") " Jan 20 17:03:51 crc kubenswrapper[4558]: I0120 17:03:51.228244 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/21607923-3f1e-41c5-86da-43a333ac050c-config-data\") pod \"21607923-3f1e-41c5-86da-43a333ac050c\" (UID: \"21607923-3f1e-41c5-86da-43a333ac050c\") " Jan 20 17:03:51 crc kubenswrapper[4558]: I0120 17:03:51.228325 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g2p6n\" (UniqueName: \"kubernetes.io/projected/21607923-3f1e-41c5-86da-43a333ac050c-kube-api-access-g2p6n\") pod \"21607923-3f1e-41c5-86da-43a333ac050c\" (UID: \"21607923-3f1e-41c5-86da-43a333ac050c\") " Jan 20 17:03:51 crc kubenswrapper[4558]: I0120 17:03:51.228358 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21607923-3f1e-41c5-86da-43a333ac050c-combined-ca-bundle\") pod \"21607923-3f1e-41c5-86da-43a333ac050c\" (UID: \"21607923-3f1e-41c5-86da-43a333ac050c\") " Jan 20 17:03:51 crc kubenswrapper[4558]: I0120 17:03:51.228463 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e057ced-6312-4433-826b-3db68f422df1-combined-ca-bundle\") pod \"6e057ced-6312-4433-826b-3db68f422df1\" (UID: \"6e057ced-6312-4433-826b-3db68f422df1\") " Jan 20 17:03:51 crc kubenswrapper[4558]: I0120 17:03:51.228489 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6e057ced-6312-4433-826b-3db68f422df1-config-data\") pod \"6e057ced-6312-4433-826b-3db68f422df1\" (UID: \"6e057ced-6312-4433-826b-3db68f422df1\") " Jan 20 17:03:51 crc kubenswrapper[4558]: I0120 17:03:51.228537 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6e057ced-6312-4433-826b-3db68f422df1-logs" (OuterVolumeSpecName: "logs") pod "6e057ced-6312-4433-826b-3db68f422df1" (UID: "6e057ced-6312-4433-826b-3db68f422df1"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:03:51 crc kubenswrapper[4558]: I0120 17:03:51.228601 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/6e057ced-6312-4433-826b-3db68f422df1-nova-metadata-tls-certs\") pod \"6e057ced-6312-4433-826b-3db68f422df1\" (UID: \"6e057ced-6312-4433-826b-3db68f422df1\") " Jan 20 17:03:51 crc kubenswrapper[4558]: I0120 17:03:51.228909 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6e057ced-6312-4433-826b-3db68f422df1-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:03:51 crc kubenswrapper[4558]: I0120 17:03:51.230553 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/21607923-3f1e-41c5-86da-43a333ac050c-logs" (OuterVolumeSpecName: "logs") pod "21607923-3f1e-41c5-86da-43a333ac050c" (UID: "21607923-3f1e-41c5-86da-43a333ac050c"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:03:51 crc kubenswrapper[4558]: I0120 17:03:51.239947 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/21607923-3f1e-41c5-86da-43a333ac050c-kube-api-access-g2p6n" (OuterVolumeSpecName: "kube-api-access-g2p6n") pod "21607923-3f1e-41c5-86da-43a333ac050c" (UID: "21607923-3f1e-41c5-86da-43a333ac050c"). InnerVolumeSpecName "kube-api-access-g2p6n". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:03:51 crc kubenswrapper[4558]: I0120 17:03:51.240067 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6e057ced-6312-4433-826b-3db68f422df1-kube-api-access-pm8hf" (OuterVolumeSpecName: "kube-api-access-pm8hf") pod "6e057ced-6312-4433-826b-3db68f422df1" (UID: "6e057ced-6312-4433-826b-3db68f422df1"). InnerVolumeSpecName "kube-api-access-pm8hf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:03:51 crc kubenswrapper[4558]: I0120 17:03:51.267958 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/21607923-3f1e-41c5-86da-43a333ac050c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "21607923-3f1e-41c5-86da-43a333ac050c" (UID: "21607923-3f1e-41c5-86da-43a333ac050c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:03:51 crc kubenswrapper[4558]: I0120 17:03:51.271533 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6e057ced-6312-4433-826b-3db68f422df1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6e057ced-6312-4433-826b-3db68f422df1" (UID: "6e057ced-6312-4433-826b-3db68f422df1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:03:51 crc kubenswrapper[4558]: I0120 17:03:51.274501 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/21607923-3f1e-41c5-86da-43a333ac050c-config-data" (OuterVolumeSpecName: "config-data") pod "21607923-3f1e-41c5-86da-43a333ac050c" (UID: "21607923-3f1e-41c5-86da-43a333ac050c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:03:51 crc kubenswrapper[4558]: I0120 17:03:51.287136 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6e057ced-6312-4433-826b-3db68f422df1-config-data" (OuterVolumeSpecName: "config-data") pod "6e057ced-6312-4433-826b-3db68f422df1" (UID: "6e057ced-6312-4433-826b-3db68f422df1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:03:51 crc kubenswrapper[4558]: I0120 17:03:51.287917 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6e057ced-6312-4433-826b-3db68f422df1-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "6e057ced-6312-4433-826b-3db68f422df1" (UID: "6e057ced-6312-4433-826b-3db68f422df1"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:03:51 crc kubenswrapper[4558]: I0120 17:03:51.331368 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e057ced-6312-4433-826b-3db68f422df1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:03:51 crc kubenswrapper[4558]: I0120 17:03:51.331396 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6e057ced-6312-4433-826b-3db68f422df1-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:03:51 crc kubenswrapper[4558]: I0120 17:03:51.331407 4558 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/6e057ced-6312-4433-826b-3db68f422df1-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:03:51 crc kubenswrapper[4558]: I0120 17:03:51.331420 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pm8hf\" (UniqueName: \"kubernetes.io/projected/6e057ced-6312-4433-826b-3db68f422df1-kube-api-access-pm8hf\") on node \"crc\" DevicePath \"\"" Jan 20 17:03:51 crc kubenswrapper[4558]: I0120 17:03:51.331430 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/21607923-3f1e-41c5-86da-43a333ac050c-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:03:51 crc kubenswrapper[4558]: I0120 17:03:51.331441 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/21607923-3f1e-41c5-86da-43a333ac050c-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:03:51 crc kubenswrapper[4558]: I0120 17:03:51.331450 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g2p6n\" (UniqueName: \"kubernetes.io/projected/21607923-3f1e-41c5-86da-43a333ac050c-kube-api-access-g2p6n\") on node \"crc\" DevicePath \"\"" Jan 20 17:03:51 crc kubenswrapper[4558]: I0120 17:03:51.331459 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21607923-3f1e-41c5-86da-43a333ac050c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:03:51 crc kubenswrapper[4558]: I0120 17:03:51.512609 4558 generic.go:334] "Generic (PLEG): container finished" podID="21607923-3f1e-41c5-86da-43a333ac050c" containerID="a0cdde937fc7d9c08b9a44eccbcf6dd2913c7e2c784cb09a9c39e414a254e024" exitCode=0 Jan 20 17:03:51 crc kubenswrapper[4558]: I0120 17:03:51.512643 4558 generic.go:334] "Generic (PLEG): container finished" podID="21607923-3f1e-41c5-86da-43a333ac050c" containerID="70ca80861b8f90aed6f9f75f21e3b9fd92e85edd4b224c3ab9a29e62e7191a18" exitCode=143 Jan 20 17:03:51 crc kubenswrapper[4558]: I0120 17:03:51.512693 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"21607923-3f1e-41c5-86da-43a333ac050c","Type":"ContainerDied","Data":"a0cdde937fc7d9c08b9a44eccbcf6dd2913c7e2c784cb09a9c39e414a254e024"} Jan 20 17:03:51 crc kubenswrapper[4558]: I0120 17:03:51.512724 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"21607923-3f1e-41c5-86da-43a333ac050c","Type":"ContainerDied","Data":"70ca80861b8f90aed6f9f75f21e3b9fd92e85edd4b224c3ab9a29e62e7191a18"} Jan 20 17:03:51 crc kubenswrapper[4558]: I0120 17:03:51.512735 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"21607923-3f1e-41c5-86da-43a333ac050c","Type":"ContainerDied","Data":"1b5ff821fc3b6c4d9a66b17b9b2efa207c59f4acdc4c53e4c94be46b20fe5b14"} Jan 20 17:03:51 crc kubenswrapper[4558]: I0120 17:03:51.512750 4558 scope.go:117] "RemoveContainer" containerID="a0cdde937fc7d9c08b9a44eccbcf6dd2913c7e2c784cb09a9c39e414a254e024" Jan 20 17:03:51 crc kubenswrapper[4558]: I0120 17:03:51.513590 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:03:51 crc kubenswrapper[4558]: I0120 17:03:51.514601 4558 generic.go:334] "Generic (PLEG): container finished" podID="6e057ced-6312-4433-826b-3db68f422df1" containerID="e6a4e63c96e557ad9d0f24db6e5510a1ca114d2430e3a70bc7b3cd19368426bb" exitCode=0 Jan 20 17:03:51 crc kubenswrapper[4558]: I0120 17:03:51.514621 4558 generic.go:334] "Generic (PLEG): container finished" podID="6e057ced-6312-4433-826b-3db68f422df1" containerID="08abbee09c12cf3a64d6c392bdb49747bc0b49670d76c631df62291574b4b57b" exitCode=143 Jan 20 17:03:51 crc kubenswrapper[4558]: I0120 17:03:51.514631 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"6e057ced-6312-4433-826b-3db68f422df1","Type":"ContainerDied","Data":"e6a4e63c96e557ad9d0f24db6e5510a1ca114d2430e3a70bc7b3cd19368426bb"} Jan 20 17:03:51 crc kubenswrapper[4558]: I0120 17:03:51.514673 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"6e057ced-6312-4433-826b-3db68f422df1","Type":"ContainerDied","Data":"08abbee09c12cf3a64d6c392bdb49747bc0b49670d76c631df62291574b4b57b"} Jan 20 17:03:51 crc kubenswrapper[4558]: I0120 17:03:51.514687 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"6e057ced-6312-4433-826b-3db68f422df1","Type":"ContainerDied","Data":"02d52b89f63dbcec559e9bd8d3439fbfe41f5ed281c2ec0439bc916951aea92c"} Jan 20 17:03:51 crc kubenswrapper[4558]: I0120 17:03:51.518548 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:03:51 crc kubenswrapper[4558]: I0120 17:03:51.540314 4558 scope.go:117] "RemoveContainer" containerID="70ca80861b8f90aed6f9f75f21e3b9fd92e85edd4b224c3ab9a29e62e7191a18" Jan 20 17:03:51 crc kubenswrapper[4558]: I0120 17:03:51.559833 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:03:51 crc kubenswrapper[4558]: I0120 17:03:51.574248 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:03:51 crc kubenswrapper[4558]: I0120 17:03:51.584359 4558 scope.go:117] "RemoveContainer" containerID="a0cdde937fc7d9c08b9a44eccbcf6dd2913c7e2c784cb09a9c39e414a254e024" Jan 20 17:03:51 crc kubenswrapper[4558]: E0120 17:03:51.584907 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a0cdde937fc7d9c08b9a44eccbcf6dd2913c7e2c784cb09a9c39e414a254e024\": container with ID starting with a0cdde937fc7d9c08b9a44eccbcf6dd2913c7e2c784cb09a9c39e414a254e024 not found: ID does not exist" containerID="a0cdde937fc7d9c08b9a44eccbcf6dd2913c7e2c784cb09a9c39e414a254e024" Jan 20 17:03:51 crc kubenswrapper[4558]: I0120 17:03:51.584971 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a0cdde937fc7d9c08b9a44eccbcf6dd2913c7e2c784cb09a9c39e414a254e024"} err="failed to get container status \"a0cdde937fc7d9c08b9a44eccbcf6dd2913c7e2c784cb09a9c39e414a254e024\": rpc error: code = NotFound desc = could not find container \"a0cdde937fc7d9c08b9a44eccbcf6dd2913c7e2c784cb09a9c39e414a254e024\": container with ID starting with a0cdde937fc7d9c08b9a44eccbcf6dd2913c7e2c784cb09a9c39e414a254e024 not found: ID does not exist" Jan 20 17:03:51 crc kubenswrapper[4558]: I0120 17:03:51.585003 4558 scope.go:117] "RemoveContainer" containerID="70ca80861b8f90aed6f9f75f21e3b9fd92e85edd4b224c3ab9a29e62e7191a18" Jan 20 17:03:51 crc kubenswrapper[4558]: E0120 17:03:51.585407 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"70ca80861b8f90aed6f9f75f21e3b9fd92e85edd4b224c3ab9a29e62e7191a18\": container with ID starting with 70ca80861b8f90aed6f9f75f21e3b9fd92e85edd4b224c3ab9a29e62e7191a18 not found: ID does not exist" containerID="70ca80861b8f90aed6f9f75f21e3b9fd92e85edd4b224c3ab9a29e62e7191a18" Jan 20 17:03:51 crc kubenswrapper[4558]: I0120 17:03:51.585428 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"70ca80861b8f90aed6f9f75f21e3b9fd92e85edd4b224c3ab9a29e62e7191a18"} err="failed to get container status \"70ca80861b8f90aed6f9f75f21e3b9fd92e85edd4b224c3ab9a29e62e7191a18\": rpc error: code = NotFound desc = could not find container \"70ca80861b8f90aed6f9f75f21e3b9fd92e85edd4b224c3ab9a29e62e7191a18\": container with ID starting with 70ca80861b8f90aed6f9f75f21e3b9fd92e85edd4b224c3ab9a29e62e7191a18 not found: ID does not exist" Jan 20 17:03:51 crc kubenswrapper[4558]: I0120 17:03:51.585444 4558 scope.go:117] "RemoveContainer" containerID="a0cdde937fc7d9c08b9a44eccbcf6dd2913c7e2c784cb09a9c39e414a254e024" Jan 20 17:03:51 crc kubenswrapper[4558]: I0120 17:03:51.585653 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a0cdde937fc7d9c08b9a44eccbcf6dd2913c7e2c784cb09a9c39e414a254e024"} err="failed to get container status \"a0cdde937fc7d9c08b9a44eccbcf6dd2913c7e2c784cb09a9c39e414a254e024\": rpc error: code = NotFound desc = could not find container \"a0cdde937fc7d9c08b9a44eccbcf6dd2913c7e2c784cb09a9c39e414a254e024\": container with ID starting with a0cdde937fc7d9c08b9a44eccbcf6dd2913c7e2c784cb09a9c39e414a254e024 not found: ID does not exist" Jan 20 17:03:51 crc kubenswrapper[4558]: I0120 17:03:51.585672 4558 scope.go:117] "RemoveContainer" containerID="70ca80861b8f90aed6f9f75f21e3b9fd92e85edd4b224c3ab9a29e62e7191a18" Jan 20 17:03:51 crc kubenswrapper[4558]: I0120 17:03:51.585999 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"70ca80861b8f90aed6f9f75f21e3b9fd92e85edd4b224c3ab9a29e62e7191a18"} err="failed to get container status \"70ca80861b8f90aed6f9f75f21e3b9fd92e85edd4b224c3ab9a29e62e7191a18\": rpc error: code = NotFound desc = could not find container \"70ca80861b8f90aed6f9f75f21e3b9fd92e85edd4b224c3ab9a29e62e7191a18\": container with ID starting with 70ca80861b8f90aed6f9f75f21e3b9fd92e85edd4b224c3ab9a29e62e7191a18 not found: ID does not exist" Jan 20 17:03:51 crc kubenswrapper[4558]: I0120 17:03:51.586036 4558 scope.go:117] "RemoveContainer" containerID="e6a4e63c96e557ad9d0f24db6e5510a1ca114d2430e3a70bc7b3cd19368426bb" Jan 20 17:03:51 crc kubenswrapper[4558]: I0120 17:03:51.597496 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:03:51 crc kubenswrapper[4558]: I0120 17:03:51.606403 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:03:51 crc kubenswrapper[4558]: I0120 17:03:51.610427 4558 scope.go:117] "RemoveContainer" containerID="08abbee09c12cf3a64d6c392bdb49747bc0b49670d76c631df62291574b4b57b" Jan 20 17:03:51 crc kubenswrapper[4558]: I0120 17:03:51.614293 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:03:51 crc kubenswrapper[4558]: E0120 17:03:51.614965 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="507235e3-a321-4617-aba7-eec9b5ca1cf0" containerName="nova-manage" Jan 20 17:03:51 crc kubenswrapper[4558]: I0120 17:03:51.614999 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="507235e3-a321-4617-aba7-eec9b5ca1cf0" containerName="nova-manage" Jan 20 17:03:51 crc kubenswrapper[4558]: E0120 17:03:51.615039 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="21607923-3f1e-41c5-86da-43a333ac050c" containerName="nova-api-log" Jan 20 17:03:51 crc kubenswrapper[4558]: I0120 17:03:51.615047 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="21607923-3f1e-41c5-86da-43a333ac050c" containerName="nova-api-log" Jan 20 17:03:51 crc kubenswrapper[4558]: E0120 17:03:51.615067 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6e057ced-6312-4433-826b-3db68f422df1" containerName="nova-metadata-metadata" Jan 20 17:03:51 crc kubenswrapper[4558]: I0120 17:03:51.615079 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="6e057ced-6312-4433-826b-3db68f422df1" containerName="nova-metadata-metadata" Jan 20 17:03:51 crc kubenswrapper[4558]: E0120 17:03:51.615090 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6e057ced-6312-4433-826b-3db68f422df1" containerName="nova-metadata-log" Jan 20 17:03:51 crc kubenswrapper[4558]: I0120 17:03:51.615096 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="6e057ced-6312-4433-826b-3db68f422df1" containerName="nova-metadata-log" Jan 20 17:03:51 crc kubenswrapper[4558]: E0120 17:03:51.615106 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="21607923-3f1e-41c5-86da-43a333ac050c" containerName="nova-api-api" Jan 20 17:03:51 crc kubenswrapper[4558]: I0120 17:03:51.615113 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="21607923-3f1e-41c5-86da-43a333ac050c" containerName="nova-api-api" Jan 20 17:03:51 crc kubenswrapper[4558]: I0120 17:03:51.615452 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="6e057ced-6312-4433-826b-3db68f422df1" containerName="nova-metadata-metadata" Jan 20 17:03:51 crc kubenswrapper[4558]: I0120 17:03:51.615484 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="21607923-3f1e-41c5-86da-43a333ac050c" containerName="nova-api-log" Jan 20 17:03:51 crc kubenswrapper[4558]: I0120 17:03:51.615501 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="6e057ced-6312-4433-826b-3db68f422df1" containerName="nova-metadata-log" Jan 20 17:03:51 crc kubenswrapper[4558]: I0120 17:03:51.615514 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="507235e3-a321-4617-aba7-eec9b5ca1cf0" containerName="nova-manage" Jan 20 17:03:51 crc kubenswrapper[4558]: I0120 17:03:51.615522 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="21607923-3f1e-41c5-86da-43a333ac050c" containerName="nova-api-api" Jan 20 17:03:51 crc kubenswrapper[4558]: I0120 17:03:51.617055 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:03:51 crc kubenswrapper[4558]: I0120 17:03:51.619436 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-api-config-data" Jan 20 17:03:51 crc kubenswrapper[4558]: I0120 17:03:51.623374 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:03:51 crc kubenswrapper[4558]: I0120 17:03:51.626239 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:03:51 crc kubenswrapper[4558]: I0120 17:03:51.628215 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-metadata-config-data" Jan 20 17:03:51 crc kubenswrapper[4558]: I0120 17:03:51.628463 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-metadata-internal-svc" Jan 20 17:03:51 crc kubenswrapper[4558]: I0120 17:03:51.642802 4558 scope.go:117] "RemoveContainer" containerID="e6a4e63c96e557ad9d0f24db6e5510a1ca114d2430e3a70bc7b3cd19368426bb" Jan 20 17:03:51 crc kubenswrapper[4558]: I0120 17:03:51.643030 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8b67b016-0bfa-42ec-8c8a-8ea7afb366ac-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"8b67b016-0bfa-42ec-8c8a-8ea7afb366ac\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:03:51 crc kubenswrapper[4558]: I0120 17:03:51.643119 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8b67b016-0bfa-42ec-8c8a-8ea7afb366ac-config-data\") pod \"nova-metadata-0\" (UID: \"8b67b016-0bfa-42ec-8c8a-8ea7afb366ac\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:03:51 crc kubenswrapper[4558]: I0120 17:03:51.643236 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8b67b016-0bfa-42ec-8c8a-8ea7afb366ac-logs\") pod \"nova-metadata-0\" (UID: \"8b67b016-0bfa-42ec-8c8a-8ea7afb366ac\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:03:51 crc kubenswrapper[4558]: I0120 17:03:51.643590 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/8b67b016-0bfa-42ec-8c8a-8ea7afb366ac-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"8b67b016-0bfa-42ec-8c8a-8ea7afb366ac\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:03:51 crc kubenswrapper[4558]: E0120 17:03:51.643674 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e6a4e63c96e557ad9d0f24db6e5510a1ca114d2430e3a70bc7b3cd19368426bb\": container with ID starting with e6a4e63c96e557ad9d0f24db6e5510a1ca114d2430e3a70bc7b3cd19368426bb not found: ID does not exist" containerID="e6a4e63c96e557ad9d0f24db6e5510a1ca114d2430e3a70bc7b3cd19368426bb" Jan 20 17:03:51 crc kubenswrapper[4558]: I0120 17:03:51.643721 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/64da18fe-7d5d-4ab0-a28b-9edc20799711-logs\") pod \"nova-api-0\" (UID: \"64da18fe-7d5d-4ab0-a28b-9edc20799711\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:03:51 crc kubenswrapper[4558]: I0120 17:03:51.643716 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e6a4e63c96e557ad9d0f24db6e5510a1ca114d2430e3a70bc7b3cd19368426bb"} err="failed to get container status \"e6a4e63c96e557ad9d0f24db6e5510a1ca114d2430e3a70bc7b3cd19368426bb\": rpc error: code = NotFound desc = could not find container \"e6a4e63c96e557ad9d0f24db6e5510a1ca114d2430e3a70bc7b3cd19368426bb\": container with ID starting with e6a4e63c96e557ad9d0f24db6e5510a1ca114d2430e3a70bc7b3cd19368426bb not found: ID does not exist" Jan 20 17:03:51 crc kubenswrapper[4558]: I0120 17:03:51.643749 4558 scope.go:117] "RemoveContainer" containerID="08abbee09c12cf3a64d6c392bdb49747bc0b49670d76c631df62291574b4b57b" Jan 20 17:03:51 crc kubenswrapper[4558]: I0120 17:03:51.643760 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64da18fe-7d5d-4ab0-a28b-9edc20799711-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"64da18fe-7d5d-4ab0-a28b-9edc20799711\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:03:51 crc kubenswrapper[4558]: I0120 17:03:51.643789 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tbxqq\" (UniqueName: \"kubernetes.io/projected/64da18fe-7d5d-4ab0-a28b-9edc20799711-kube-api-access-tbxqq\") pod \"nova-api-0\" (UID: \"64da18fe-7d5d-4ab0-a28b-9edc20799711\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:03:51 crc kubenswrapper[4558]: I0120 17:03:51.643811 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/64da18fe-7d5d-4ab0-a28b-9edc20799711-config-data\") pod \"nova-api-0\" (UID: \"64da18fe-7d5d-4ab0-a28b-9edc20799711\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:03:51 crc kubenswrapper[4558]: I0120 17:03:51.643833 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pwbdx\" (UniqueName: \"kubernetes.io/projected/8b67b016-0bfa-42ec-8c8a-8ea7afb366ac-kube-api-access-pwbdx\") pod \"nova-metadata-0\" (UID: \"8b67b016-0bfa-42ec-8c8a-8ea7afb366ac\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:03:51 crc kubenswrapper[4558]: I0120 17:03:51.644112 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:03:51 crc kubenswrapper[4558]: E0120 17:03:51.644155 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"08abbee09c12cf3a64d6c392bdb49747bc0b49670d76c631df62291574b4b57b\": container with ID starting with 08abbee09c12cf3a64d6c392bdb49747bc0b49670d76c631df62291574b4b57b not found: ID does not exist" containerID="08abbee09c12cf3a64d6c392bdb49747bc0b49670d76c631df62291574b4b57b" Jan 20 17:03:51 crc kubenswrapper[4558]: I0120 17:03:51.644223 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"08abbee09c12cf3a64d6c392bdb49747bc0b49670d76c631df62291574b4b57b"} err="failed to get container status \"08abbee09c12cf3a64d6c392bdb49747bc0b49670d76c631df62291574b4b57b\": rpc error: code = NotFound desc = could not find container \"08abbee09c12cf3a64d6c392bdb49747bc0b49670d76c631df62291574b4b57b\": container with ID starting with 08abbee09c12cf3a64d6c392bdb49747bc0b49670d76c631df62291574b4b57b not found: ID does not exist" Jan 20 17:03:51 crc kubenswrapper[4558]: I0120 17:03:51.644252 4558 scope.go:117] "RemoveContainer" containerID="e6a4e63c96e557ad9d0f24db6e5510a1ca114d2430e3a70bc7b3cd19368426bb" Jan 20 17:03:51 crc kubenswrapper[4558]: I0120 17:03:51.645349 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e6a4e63c96e557ad9d0f24db6e5510a1ca114d2430e3a70bc7b3cd19368426bb"} err="failed to get container status \"e6a4e63c96e557ad9d0f24db6e5510a1ca114d2430e3a70bc7b3cd19368426bb\": rpc error: code = NotFound desc = could not find container \"e6a4e63c96e557ad9d0f24db6e5510a1ca114d2430e3a70bc7b3cd19368426bb\": container with ID starting with e6a4e63c96e557ad9d0f24db6e5510a1ca114d2430e3a70bc7b3cd19368426bb not found: ID does not exist" Jan 20 17:03:51 crc kubenswrapper[4558]: I0120 17:03:51.645452 4558 scope.go:117] "RemoveContainer" containerID="08abbee09c12cf3a64d6c392bdb49747bc0b49670d76c631df62291574b4b57b" Jan 20 17:03:51 crc kubenswrapper[4558]: I0120 17:03:51.647528 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"08abbee09c12cf3a64d6c392bdb49747bc0b49670d76c631df62291574b4b57b"} err="failed to get container status \"08abbee09c12cf3a64d6c392bdb49747bc0b49670d76c631df62291574b4b57b\": rpc error: code = NotFound desc = could not find container \"08abbee09c12cf3a64d6c392bdb49747bc0b49670d76c631df62291574b4b57b\": container with ID starting with 08abbee09c12cf3a64d6c392bdb49747bc0b49670d76c631df62291574b4b57b not found: ID does not exist" Jan 20 17:03:51 crc kubenswrapper[4558]: I0120 17:03:51.663612 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:03:51 crc kubenswrapper[4558]: I0120 17:03:51.747124 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8b67b016-0bfa-42ec-8c8a-8ea7afb366ac-config-data\") pod \"nova-metadata-0\" (UID: \"8b67b016-0bfa-42ec-8c8a-8ea7afb366ac\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:03:51 crc kubenswrapper[4558]: I0120 17:03:51.747234 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8b67b016-0bfa-42ec-8c8a-8ea7afb366ac-logs\") pod \"nova-metadata-0\" (UID: \"8b67b016-0bfa-42ec-8c8a-8ea7afb366ac\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:03:51 crc kubenswrapper[4558]: I0120 17:03:51.747308 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/8b67b016-0bfa-42ec-8c8a-8ea7afb366ac-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"8b67b016-0bfa-42ec-8c8a-8ea7afb366ac\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:03:51 crc kubenswrapper[4558]: I0120 17:03:51.747442 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/64da18fe-7d5d-4ab0-a28b-9edc20799711-logs\") pod \"nova-api-0\" (UID: \"64da18fe-7d5d-4ab0-a28b-9edc20799711\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:03:51 crc kubenswrapper[4558]: I0120 17:03:51.747493 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64da18fe-7d5d-4ab0-a28b-9edc20799711-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"64da18fe-7d5d-4ab0-a28b-9edc20799711\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:03:51 crc kubenswrapper[4558]: I0120 17:03:51.747521 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tbxqq\" (UniqueName: \"kubernetes.io/projected/64da18fe-7d5d-4ab0-a28b-9edc20799711-kube-api-access-tbxqq\") pod \"nova-api-0\" (UID: \"64da18fe-7d5d-4ab0-a28b-9edc20799711\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:03:51 crc kubenswrapper[4558]: I0120 17:03:51.747541 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/64da18fe-7d5d-4ab0-a28b-9edc20799711-config-data\") pod \"nova-api-0\" (UID: \"64da18fe-7d5d-4ab0-a28b-9edc20799711\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:03:51 crc kubenswrapper[4558]: I0120 17:03:51.747558 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pwbdx\" (UniqueName: \"kubernetes.io/projected/8b67b016-0bfa-42ec-8c8a-8ea7afb366ac-kube-api-access-pwbdx\") pod \"nova-metadata-0\" (UID: \"8b67b016-0bfa-42ec-8c8a-8ea7afb366ac\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:03:51 crc kubenswrapper[4558]: I0120 17:03:51.747586 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8b67b016-0bfa-42ec-8c8a-8ea7afb366ac-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"8b67b016-0bfa-42ec-8c8a-8ea7afb366ac\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:03:51 crc kubenswrapper[4558]: I0120 17:03:51.748058 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/64da18fe-7d5d-4ab0-a28b-9edc20799711-logs\") pod \"nova-api-0\" (UID: \"64da18fe-7d5d-4ab0-a28b-9edc20799711\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:03:51 crc kubenswrapper[4558]: I0120 17:03:51.748335 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8b67b016-0bfa-42ec-8c8a-8ea7afb366ac-logs\") pod \"nova-metadata-0\" (UID: \"8b67b016-0bfa-42ec-8c8a-8ea7afb366ac\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:03:51 crc kubenswrapper[4558]: I0120 17:03:51.755087 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/8b67b016-0bfa-42ec-8c8a-8ea7afb366ac-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"8b67b016-0bfa-42ec-8c8a-8ea7afb366ac\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:03:51 crc kubenswrapper[4558]: I0120 17:03:51.755119 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8b67b016-0bfa-42ec-8c8a-8ea7afb366ac-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"8b67b016-0bfa-42ec-8c8a-8ea7afb366ac\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:03:51 crc kubenswrapper[4558]: I0120 17:03:51.755143 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/64da18fe-7d5d-4ab0-a28b-9edc20799711-config-data\") pod \"nova-api-0\" (UID: \"64da18fe-7d5d-4ab0-a28b-9edc20799711\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:03:51 crc kubenswrapper[4558]: I0120 17:03:51.755149 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64da18fe-7d5d-4ab0-a28b-9edc20799711-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"64da18fe-7d5d-4ab0-a28b-9edc20799711\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:03:51 crc kubenswrapper[4558]: I0120 17:03:51.757055 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8b67b016-0bfa-42ec-8c8a-8ea7afb366ac-config-data\") pod \"nova-metadata-0\" (UID: \"8b67b016-0bfa-42ec-8c8a-8ea7afb366ac\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:03:51 crc kubenswrapper[4558]: I0120 17:03:51.763657 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pwbdx\" (UniqueName: \"kubernetes.io/projected/8b67b016-0bfa-42ec-8c8a-8ea7afb366ac-kube-api-access-pwbdx\") pod \"nova-metadata-0\" (UID: \"8b67b016-0bfa-42ec-8c8a-8ea7afb366ac\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:03:51 crc kubenswrapper[4558]: I0120 17:03:51.764362 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tbxqq\" (UniqueName: \"kubernetes.io/projected/64da18fe-7d5d-4ab0-a28b-9edc20799711-kube-api-access-tbxqq\") pod \"nova-api-0\" (UID: \"64da18fe-7d5d-4ab0-a28b-9edc20799711\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:03:51 crc kubenswrapper[4558]: I0120 17:03:51.947379 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:03:51 crc kubenswrapper[4558]: I0120 17:03:51.956957 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:03:52 crc kubenswrapper[4558]: I0120 17:03:52.387704 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:03:52 crc kubenswrapper[4558]: I0120 17:03:52.448829 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:03:52 crc kubenswrapper[4558]: W0120 17:03:52.450155 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8b67b016_0bfa_42ec_8c8a_8ea7afb366ac.slice/crio-eaa202950dcd12b1c87e651ff0733331eb771ec431dee9f29a17f6beafd2829c WatchSource:0}: Error finding container eaa202950dcd12b1c87e651ff0733331eb771ec431dee9f29a17f6beafd2829c: Status 404 returned error can't find the container with id eaa202950dcd12b1c87e651ff0733331eb771ec431dee9f29a17f6beafd2829c Jan 20 17:03:52 crc kubenswrapper[4558]: I0120 17:03:52.527400 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"8b67b016-0bfa-42ec-8c8a-8ea7afb366ac","Type":"ContainerStarted","Data":"eaa202950dcd12b1c87e651ff0733331eb771ec431dee9f29a17f6beafd2829c"} Jan 20 17:03:52 crc kubenswrapper[4558]: I0120 17:03:52.529580 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"64da18fe-7d5d-4ab0-a28b-9edc20799711","Type":"ContainerStarted","Data":"540a46873df86712f782dc0899c18bb3764b1ca19ca5d1b5ccd1f8eaecee2ee8"} Jan 20 17:03:52 crc kubenswrapper[4558]: I0120 17:03:52.577777 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="21607923-3f1e-41c5-86da-43a333ac050c" path="/var/lib/kubelet/pods/21607923-3f1e-41c5-86da-43a333ac050c/volumes" Jan 20 17:03:52 crc kubenswrapper[4558]: I0120 17:03:52.578648 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6e057ced-6312-4433-826b-3db68f422df1" path="/var/lib/kubelet/pods/6e057ced-6312-4433-826b-3db68f422df1/volumes" Jan 20 17:03:53 crc kubenswrapper[4558]: I0120 17:03:53.541257 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"64da18fe-7d5d-4ab0-a28b-9edc20799711","Type":"ContainerStarted","Data":"a251c18c801ffe160629886a4b6b4210a9566cb9ca6a002cb04dc61b4e70e92e"} Jan 20 17:03:53 crc kubenswrapper[4558]: I0120 17:03:53.543362 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"64da18fe-7d5d-4ab0-a28b-9edc20799711","Type":"ContainerStarted","Data":"23107775542e5733d1e13dad30a984dcfbdd2dbdf2408e2ba49363edba3604e6"} Jan 20 17:03:53 crc kubenswrapper[4558]: I0120 17:03:53.543400 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"8b67b016-0bfa-42ec-8c8a-8ea7afb366ac","Type":"ContainerStarted","Data":"df1010392aaec0a4a83f5a826cb22e3e0ee83e3dd3dcbe213e6b5fa4528c4b32"} Jan 20 17:03:53 crc kubenswrapper[4558]: I0120 17:03:53.543414 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"8b67b016-0bfa-42ec-8c8a-8ea7afb366ac","Type":"ContainerStarted","Data":"6cfda644c89e82e0c471df3db327f1fa97a02c27b7636bf22e85b9d3fe0134fe"} Jan 20 17:03:53 crc kubenswrapper[4558]: I0120 17:03:53.557814 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-api-0" podStartSLOduration=2.557799824 podStartE2EDuration="2.557799824s" podCreationTimestamp="2026-01-20 17:03:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:03:53.557788172 +0000 UTC m=+1327.318126139" watchObservedRunningTime="2026-01-20 17:03:53.557799824 +0000 UTC m=+1327.318137792" Jan 20 17:03:53 crc kubenswrapper[4558]: I0120 17:03:53.577392 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-metadata-0" podStartSLOduration=2.577380913 podStartE2EDuration="2.577380913s" podCreationTimestamp="2026-01-20 17:03:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:03:53.573750376 +0000 UTC m=+1327.334088343" watchObservedRunningTime="2026-01-20 17:03:53.577380913 +0000 UTC m=+1327.337718880" Jan 20 17:03:54 crc kubenswrapper[4558]: I0120 17:03:54.289011 4558 scope.go:117] "RemoveContainer" containerID="f01eee54b0096cee030175949e49bf2c2cae19d2175a23fa1d7f52c350aef666" Jan 20 17:03:54 crc kubenswrapper[4558]: I0120 17:03:54.344180 4558 scope.go:117] "RemoveContainer" containerID="e4a4514a229126f0ce1c13f448b71f47a96fadb42e52108d7c6e75d1a62f839b" Jan 20 17:03:54 crc kubenswrapper[4558]: I0120 17:03:54.367784 4558 scope.go:117] "RemoveContainer" containerID="6b4dcc6e19550c83acc36231d55fd104b76cb31fdde78c00b5a97e00712e1b7b" Jan 20 17:03:54 crc kubenswrapper[4558]: I0120 17:03:54.395181 4558 scope.go:117] "RemoveContainer" containerID="f0cbb47b3b8abf3b84596f44ff9f49275729177b2875c4e8e764e817b8d6eb91" Jan 20 17:03:54 crc kubenswrapper[4558]: I0120 17:03:54.457581 4558 scope.go:117] "RemoveContainer" containerID="132812cda13f3aa921d317d99a570a33e0780093d77639760f5e2e76e1f5cc09" Jan 20 17:03:54 crc kubenswrapper[4558]: I0120 17:03:54.496427 4558 scope.go:117] "RemoveContainer" containerID="7405f2673cf5b03bf250ba407fb04abd2bfacbb3ce9e9f78626cf77ca6e5282f" Jan 20 17:03:54 crc kubenswrapper[4558]: I0120 17:03:54.512595 4558 scope.go:117] "RemoveContainer" containerID="218f67d660ed8f9f78b141cd28c9a5dcb77d2f1c59769fc787f836f2d3a884a4" Jan 20 17:03:54 crc kubenswrapper[4558]: I0120 17:03:54.541926 4558 scope.go:117] "RemoveContainer" containerID="2a069dfdf3bb503b2cf66c545612acb06ac462ac0963d7e87cbba6a225f63a2b" Jan 20 17:03:54 crc kubenswrapper[4558]: I0120 17:03:54.557385 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:03:54 crc kubenswrapper[4558]: I0120 17:03:54.567077 4558 generic.go:334] "Generic (PLEG): container finished" podID="b667ea48-0add-41f2-8c52-75d06c1312fd" containerID="534b771198d1dbaf2b6aacfd6763fdbe6efd5ed7600e072171f10f96a99a85d4" exitCode=0 Jan 20 17:03:54 crc kubenswrapper[4558]: I0120 17:03:54.567251 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:03:54 crc kubenswrapper[4558]: I0120 17:03:54.567608 4558 scope.go:117] "RemoveContainer" containerID="329b7d530c48ee6b77abaf4d3f063c42bc1f88503f6401038f6906a9d41fa856" Jan 20 17:03:54 crc kubenswrapper[4558]: I0120 17:03:54.583251 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"b667ea48-0add-41f2-8c52-75d06c1312fd","Type":"ContainerDied","Data":"534b771198d1dbaf2b6aacfd6763fdbe6efd5ed7600e072171f10f96a99a85d4"} Jan 20 17:03:54 crc kubenswrapper[4558]: I0120 17:03:54.583335 4558 scope.go:117] "RemoveContainer" containerID="534b771198d1dbaf2b6aacfd6763fdbe6efd5ed7600e072171f10f96a99a85d4" Jan 20 17:03:54 crc kubenswrapper[4558]: I0120 17:03:54.596515 4558 scope.go:117] "RemoveContainer" containerID="4f8ad4db714d7249bbcc2a75589c40c4641e949e1fac475245dd93d0418a8537" Jan 20 17:03:54 crc kubenswrapper[4558]: I0120 17:03:54.609021 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b667ea48-0add-41f2-8c52-75d06c1312fd-combined-ca-bundle\") pod \"b667ea48-0add-41f2-8c52-75d06c1312fd\" (UID: \"b667ea48-0add-41f2-8c52-75d06c1312fd\") " Jan 20 17:03:54 crc kubenswrapper[4558]: I0120 17:03:54.609290 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tjcvp\" (UniqueName: \"kubernetes.io/projected/b667ea48-0add-41f2-8c52-75d06c1312fd-kube-api-access-tjcvp\") pod \"b667ea48-0add-41f2-8c52-75d06c1312fd\" (UID: \"b667ea48-0add-41f2-8c52-75d06c1312fd\") " Jan 20 17:03:54 crc kubenswrapper[4558]: I0120 17:03:54.609401 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b667ea48-0add-41f2-8c52-75d06c1312fd-config-data\") pod \"b667ea48-0add-41f2-8c52-75d06c1312fd\" (UID: \"b667ea48-0add-41f2-8c52-75d06c1312fd\") " Jan 20 17:03:54 crc kubenswrapper[4558]: I0120 17:03:54.623765 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b667ea48-0add-41f2-8c52-75d06c1312fd-kube-api-access-tjcvp" (OuterVolumeSpecName: "kube-api-access-tjcvp") pod "b667ea48-0add-41f2-8c52-75d06c1312fd" (UID: "b667ea48-0add-41f2-8c52-75d06c1312fd"). InnerVolumeSpecName "kube-api-access-tjcvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:03:54 crc kubenswrapper[4558]: E0120 17:03:54.634348 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b667ea48-0add-41f2-8c52-75d06c1312fd-config-data podName:b667ea48-0add-41f2-8c52-75d06c1312fd nodeName:}" failed. No retries permitted until 2026-01-20 17:03:55.134297487 +0000 UTC m=+1328.894635454 (durationBeforeRetry 500ms). Error: error cleaning subPath mounts for volume "config-data" (UniqueName: "kubernetes.io/secret/b667ea48-0add-41f2-8c52-75d06c1312fd-config-data") pod "b667ea48-0add-41f2-8c52-75d06c1312fd" (UID: "b667ea48-0add-41f2-8c52-75d06c1312fd") : error deleting /var/lib/kubelet/pods/b667ea48-0add-41f2-8c52-75d06c1312fd/volume-subpaths: remove /var/lib/kubelet/pods/b667ea48-0add-41f2-8c52-75d06c1312fd/volume-subpaths: no such file or directory Jan 20 17:03:54 crc kubenswrapper[4558]: I0120 17:03:54.634833 4558 scope.go:117] "RemoveContainer" containerID="ce1f05df89b8a08d15ce18251031109452f2c5fc73b5986865a48c1a2ee95f36" Jan 20 17:03:54 crc kubenswrapper[4558]: I0120 17:03:54.636992 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b667ea48-0add-41f2-8c52-75d06c1312fd-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b667ea48-0add-41f2-8c52-75d06c1312fd" (UID: "b667ea48-0add-41f2-8c52-75d06c1312fd"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:03:54 crc kubenswrapper[4558]: I0120 17:03:54.657297 4558 scope.go:117] "RemoveContainer" containerID="2f2f250179bc5be2a7d3735356c0ccab7139bbcd6422209731717b7c77640b5c" Jan 20 17:03:54 crc kubenswrapper[4558]: I0120 17:03:54.676145 4558 scope.go:117] "RemoveContainer" containerID="0af9671e129d86a8a04ea896394c1f0ccdaf1e564becf0be8550e2230fea5ee9" Jan 20 17:03:54 crc kubenswrapper[4558]: I0120 17:03:54.705500 4558 scope.go:117] "RemoveContainer" containerID="dd12e62385b08aec22ded13c6449e48eeba8060a664335bfc549ee00f3e80b1a" Jan 20 17:03:54 crc kubenswrapper[4558]: I0120 17:03:54.712070 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b667ea48-0add-41f2-8c52-75d06c1312fd-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:03:54 crc kubenswrapper[4558]: I0120 17:03:54.712096 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tjcvp\" (UniqueName: \"kubernetes.io/projected/b667ea48-0add-41f2-8c52-75d06c1312fd-kube-api-access-tjcvp\") on node \"crc\" DevicePath \"\"" Jan 20 17:03:54 crc kubenswrapper[4558]: I0120 17:03:54.722266 4558 scope.go:117] "RemoveContainer" containerID="06a97eded1d2e3329c2e57b788138eac12416ca0b8878a26d14405b5accd7c67" Jan 20 17:03:54 crc kubenswrapper[4558]: I0120 17:03:54.743193 4558 scope.go:117] "RemoveContainer" containerID="3c5d4636841c553689fe99af2f7d7d5a269bad5dc903333adc71ad557501864a" Jan 20 17:03:54 crc kubenswrapper[4558]: I0120 17:03:54.771084 4558 scope.go:117] "RemoveContainer" containerID="ff98c6e0fa3a0b949cb3e04983d372048c98008841cfee160a4116cbb2002809" Jan 20 17:03:55 crc kubenswrapper[4558]: I0120 17:03:55.220371 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b667ea48-0add-41f2-8c52-75d06c1312fd-config-data\") pod \"b667ea48-0add-41f2-8c52-75d06c1312fd\" (UID: \"b667ea48-0add-41f2-8c52-75d06c1312fd\") " Jan 20 17:03:55 crc kubenswrapper[4558]: I0120 17:03:55.224072 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b667ea48-0add-41f2-8c52-75d06c1312fd-config-data" (OuterVolumeSpecName: "config-data") pod "b667ea48-0add-41f2-8c52-75d06c1312fd" (UID: "b667ea48-0add-41f2-8c52-75d06c1312fd"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:03:55 crc kubenswrapper[4558]: I0120 17:03:55.322370 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b667ea48-0add-41f2-8c52-75d06c1312fd-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:03:55 crc kubenswrapper[4558]: I0120 17:03:55.498587 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:03:55 crc kubenswrapper[4558]: I0120 17:03:55.510772 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:03:55 crc kubenswrapper[4558]: I0120 17:03:55.519244 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:03:55 crc kubenswrapper[4558]: E0120 17:03:55.519714 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b667ea48-0add-41f2-8c52-75d06c1312fd" containerName="nova-scheduler-scheduler" Jan 20 17:03:55 crc kubenswrapper[4558]: I0120 17:03:55.519738 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="b667ea48-0add-41f2-8c52-75d06c1312fd" containerName="nova-scheduler-scheduler" Jan 20 17:03:55 crc kubenswrapper[4558]: I0120 17:03:55.519955 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="b667ea48-0add-41f2-8c52-75d06c1312fd" containerName="nova-scheduler-scheduler" Jan 20 17:03:55 crc kubenswrapper[4558]: I0120 17:03:55.520702 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:03:55 crc kubenswrapper[4558]: I0120 17:03:55.522913 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-scheduler-config-data" Jan 20 17:03:55 crc kubenswrapper[4558]: I0120 17:03:55.531837 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:03:55 crc kubenswrapper[4558]: I0120 17:03:55.626729 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-shzrt\" (UniqueName: \"kubernetes.io/projected/c3bd0148-350f-4351-8a04-a3009cfdeb29-kube-api-access-shzrt\") pod \"nova-scheduler-0\" (UID: \"c3bd0148-350f-4351-8a04-a3009cfdeb29\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:03:55 crc kubenswrapper[4558]: I0120 17:03:55.626791 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c3bd0148-350f-4351-8a04-a3009cfdeb29-config-data\") pod \"nova-scheduler-0\" (UID: \"c3bd0148-350f-4351-8a04-a3009cfdeb29\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:03:55 crc kubenswrapper[4558]: I0120 17:03:55.626949 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c3bd0148-350f-4351-8a04-a3009cfdeb29-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"c3bd0148-350f-4351-8a04-a3009cfdeb29\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:03:55 crc kubenswrapper[4558]: I0120 17:03:55.728520 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-shzrt\" (UniqueName: \"kubernetes.io/projected/c3bd0148-350f-4351-8a04-a3009cfdeb29-kube-api-access-shzrt\") pod \"nova-scheduler-0\" (UID: \"c3bd0148-350f-4351-8a04-a3009cfdeb29\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:03:55 crc kubenswrapper[4558]: I0120 17:03:55.728594 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c3bd0148-350f-4351-8a04-a3009cfdeb29-config-data\") pod \"nova-scheduler-0\" (UID: \"c3bd0148-350f-4351-8a04-a3009cfdeb29\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:03:55 crc kubenswrapper[4558]: I0120 17:03:55.728695 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c3bd0148-350f-4351-8a04-a3009cfdeb29-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"c3bd0148-350f-4351-8a04-a3009cfdeb29\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:03:55 crc kubenswrapper[4558]: I0120 17:03:55.732473 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c3bd0148-350f-4351-8a04-a3009cfdeb29-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"c3bd0148-350f-4351-8a04-a3009cfdeb29\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:03:55 crc kubenswrapper[4558]: I0120 17:03:55.733010 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c3bd0148-350f-4351-8a04-a3009cfdeb29-config-data\") pod \"nova-scheduler-0\" (UID: \"c3bd0148-350f-4351-8a04-a3009cfdeb29\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:03:55 crc kubenswrapper[4558]: I0120 17:03:55.744628 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-shzrt\" (UniqueName: \"kubernetes.io/projected/c3bd0148-350f-4351-8a04-a3009cfdeb29-kube-api-access-shzrt\") pod \"nova-scheduler-0\" (UID: \"c3bd0148-350f-4351-8a04-a3009cfdeb29\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:03:55 crc kubenswrapper[4558]: I0120 17:03:55.839148 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:03:56 crc kubenswrapper[4558]: I0120 17:03:56.238300 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:03:56 crc kubenswrapper[4558]: W0120 17:03:56.245757 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc3bd0148_350f_4351_8a04_a3009cfdeb29.slice/crio-dcc3820138343191288b84a22d14118f1a07aa0ece4aaeabfa81478c0881bcdb WatchSource:0}: Error finding container dcc3820138343191288b84a22d14118f1a07aa0ece4aaeabfa81478c0881bcdb: Status 404 returned error can't find the container with id dcc3820138343191288b84a22d14118f1a07aa0ece4aaeabfa81478c0881bcdb Jan 20 17:03:56 crc kubenswrapper[4558]: I0120 17:03:56.576035 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b667ea48-0add-41f2-8c52-75d06c1312fd" path="/var/lib/kubelet/pods/b667ea48-0add-41f2-8c52-75d06c1312fd/volumes" Jan 20 17:03:56 crc kubenswrapper[4558]: I0120 17:03:56.587110 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"c3bd0148-350f-4351-8a04-a3009cfdeb29","Type":"ContainerStarted","Data":"f94960e9c0eda36da8e549e42b6b9637878691b355b71f154c39b91497ab764e"} Jan 20 17:03:56 crc kubenswrapper[4558]: I0120 17:03:56.587157 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"c3bd0148-350f-4351-8a04-a3009cfdeb29","Type":"ContainerStarted","Data":"dcc3820138343191288b84a22d14118f1a07aa0ece4aaeabfa81478c0881bcdb"} Jan 20 17:03:56 crc kubenswrapper[4558]: I0120 17:03:56.602503 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-scheduler-0" podStartSLOduration=1.602482731 podStartE2EDuration="1.602482731s" podCreationTimestamp="2026-01-20 17:03:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:03:56.599519319 +0000 UTC m=+1330.359857287" watchObservedRunningTime="2026-01-20 17:03:56.602482731 +0000 UTC m=+1330.362820698" Jan 20 17:03:56 crc kubenswrapper[4558]: I0120 17:03:56.957101 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:03:56 crc kubenswrapper[4558]: I0120 17:03:56.957199 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:03:58 crc kubenswrapper[4558]: I0120 17:03:58.917989 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:04:00 crc kubenswrapper[4558]: I0120 17:04:00.839745 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:04:01 crc kubenswrapper[4558]: I0120 17:04:01.948520 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:04:01 crc kubenswrapper[4558]: I0120 17:04:01.948559 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:04:01 crc kubenswrapper[4558]: I0120 17:04:01.957267 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:04:01 crc kubenswrapper[4558]: I0120 17:04:01.957313 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:04:03 crc kubenswrapper[4558]: I0120 17:04:03.041923 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-api-0" podUID="64da18fe-7d5d-4ab0-a28b-9edc20799711" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.1.24:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 20 17:04:03 crc kubenswrapper[4558]: I0120 17:04:03.041964 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-metadata-0" podUID="8b67b016-0bfa-42ec-8c8a-8ea7afb366ac" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.1.25:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 20 17:04:03 crc kubenswrapper[4558]: I0120 17:04:03.041988 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-api-0" podUID="64da18fe-7d5d-4ab0-a28b-9edc20799711" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.1.24:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 20 17:04:03 crc kubenswrapper[4558]: I0120 17:04:03.042003 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-metadata-0" podUID="8b67b016-0bfa-42ec-8c8a-8ea7afb366ac" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.1.25:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 20 17:04:04 crc kubenswrapper[4558]: I0120 17:04:04.691732 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:04:05 crc kubenswrapper[4558]: I0120 17:04:05.840315 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:04:05 crc kubenswrapper[4558]: I0120 17:04:05.862350 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:04:06 crc kubenswrapper[4558]: I0120 17:04:06.688628 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:04:11 crc kubenswrapper[4558]: I0120 17:04:11.950931 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:04:11 crc kubenswrapper[4558]: I0120 17:04:11.951551 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:04:11 crc kubenswrapper[4558]: I0120 17:04:11.952631 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:04:11 crc kubenswrapper[4558]: I0120 17:04:11.953639 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:04:11 crc kubenswrapper[4558]: I0120 17:04:11.961868 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:04:11 crc kubenswrapper[4558]: I0120 17:04:11.961908 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:04:11 crc kubenswrapper[4558]: I0120 17:04:11.967219 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:04:11 crc kubenswrapper[4558]: I0120 17:04:11.971927 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:04:12 crc kubenswrapper[4558]: I0120 17:04:12.704877 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:04:12 crc kubenswrapper[4558]: I0120 17:04:12.707395 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:04:14 crc kubenswrapper[4558]: I0120 17:04:14.242008 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:04:14 crc kubenswrapper[4558]: I0120 17:04:14.242817 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="ef93decf-78a4-4439-bac3-36f5720d0589" containerName="ceilometer-central-agent" containerID="cri-o://aceb6a1c812a55b8382e720fd667b1bce3e5e8b72e63d66d5d61404d004d5633" gracePeriod=30 Jan 20 17:04:14 crc kubenswrapper[4558]: I0120 17:04:14.242932 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="ef93decf-78a4-4439-bac3-36f5720d0589" containerName="proxy-httpd" containerID="cri-o://05610e58069401e608b04b0fe07c9855dcd26069b41e0a19ce0940290d2c42b4" gracePeriod=30 Jan 20 17:04:14 crc kubenswrapper[4558]: I0120 17:04:14.242972 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="ef93decf-78a4-4439-bac3-36f5720d0589" containerName="sg-core" containerID="cri-o://063770fda370183252496062f7a4063e3aa904aa813a1af436066509d93e1b0d" gracePeriod=30 Jan 20 17:04:14 crc kubenswrapper[4558]: I0120 17:04:14.243003 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="ef93decf-78a4-4439-bac3-36f5720d0589" containerName="ceilometer-notification-agent" containerID="cri-o://d7039d4cfe22846fb1df116f46327e92f5a4fd9f2f28553fea21b2bc819b0c7e" gracePeriod=30 Jan 20 17:04:14 crc kubenswrapper[4558]: I0120 17:04:14.719255 4558 generic.go:334] "Generic (PLEG): container finished" podID="ef93decf-78a4-4439-bac3-36f5720d0589" containerID="05610e58069401e608b04b0fe07c9855dcd26069b41e0a19ce0940290d2c42b4" exitCode=0 Jan 20 17:04:14 crc kubenswrapper[4558]: I0120 17:04:14.719558 4558 generic.go:334] "Generic (PLEG): container finished" podID="ef93decf-78a4-4439-bac3-36f5720d0589" containerID="063770fda370183252496062f7a4063e3aa904aa813a1af436066509d93e1b0d" exitCode=2 Jan 20 17:04:14 crc kubenswrapper[4558]: I0120 17:04:14.719568 4558 generic.go:334] "Generic (PLEG): container finished" podID="ef93decf-78a4-4439-bac3-36f5720d0589" containerID="aceb6a1c812a55b8382e720fd667b1bce3e5e8b72e63d66d5d61404d004d5633" exitCode=0 Jan 20 17:04:14 crc kubenswrapper[4558]: I0120 17:04:14.719334 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"ef93decf-78a4-4439-bac3-36f5720d0589","Type":"ContainerDied","Data":"05610e58069401e608b04b0fe07c9855dcd26069b41e0a19ce0940290d2c42b4"} Jan 20 17:04:14 crc kubenswrapper[4558]: I0120 17:04:14.719655 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"ef93decf-78a4-4439-bac3-36f5720d0589","Type":"ContainerDied","Data":"063770fda370183252496062f7a4063e3aa904aa813a1af436066509d93e1b0d"} Jan 20 17:04:14 crc kubenswrapper[4558]: I0120 17:04:14.719670 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"ef93decf-78a4-4439-bac3-36f5720d0589","Type":"ContainerDied","Data":"aceb6a1c812a55b8382e720fd667b1bce3e5e8b72e63d66d5d61404d004d5633"} Jan 20 17:04:14 crc kubenswrapper[4558]: I0120 17:04:14.721697 4558 generic.go:334] "Generic (PLEG): container finished" podID="133f3509-3c18-4a9d-94fe-8026a3c77b12" containerID="44b948c3133051a2ebbf13f37804f61fcca40240d5de185a3c5506b11479d72f" exitCode=137 Jan 20 17:04:14 crc kubenswrapper[4558]: I0120 17:04:14.721726 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"133f3509-3c18-4a9d-94fe-8026a3c77b12","Type":"ContainerDied","Data":"44b948c3133051a2ebbf13f37804f61fcca40240d5de185a3c5506b11479d72f"} Jan 20 17:04:14 crc kubenswrapper[4558]: I0120 17:04:14.776041 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:04:14 crc kubenswrapper[4558]: I0120 17:04:14.809868 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/133f3509-3c18-4a9d-94fe-8026a3c77b12-combined-ca-bundle\") pod \"133f3509-3c18-4a9d-94fe-8026a3c77b12\" (UID: \"133f3509-3c18-4a9d-94fe-8026a3c77b12\") " Jan 20 17:04:14 crc kubenswrapper[4558]: I0120 17:04:14.809913 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2sr2f\" (UniqueName: \"kubernetes.io/projected/133f3509-3c18-4a9d-94fe-8026a3c77b12-kube-api-access-2sr2f\") pod \"133f3509-3c18-4a9d-94fe-8026a3c77b12\" (UID: \"133f3509-3c18-4a9d-94fe-8026a3c77b12\") " Jan 20 17:04:14 crc kubenswrapper[4558]: I0120 17:04:14.810042 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/133f3509-3c18-4a9d-94fe-8026a3c77b12-config-data\") pod \"133f3509-3c18-4a9d-94fe-8026a3c77b12\" (UID: \"133f3509-3c18-4a9d-94fe-8026a3c77b12\") " Jan 20 17:04:14 crc kubenswrapper[4558]: I0120 17:04:14.816032 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/133f3509-3c18-4a9d-94fe-8026a3c77b12-kube-api-access-2sr2f" (OuterVolumeSpecName: "kube-api-access-2sr2f") pod "133f3509-3c18-4a9d-94fe-8026a3c77b12" (UID: "133f3509-3c18-4a9d-94fe-8026a3c77b12"). InnerVolumeSpecName "kube-api-access-2sr2f". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:04:14 crc kubenswrapper[4558]: I0120 17:04:14.832064 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/133f3509-3c18-4a9d-94fe-8026a3c77b12-config-data" (OuterVolumeSpecName: "config-data") pod "133f3509-3c18-4a9d-94fe-8026a3c77b12" (UID: "133f3509-3c18-4a9d-94fe-8026a3c77b12"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:04:14 crc kubenswrapper[4558]: I0120 17:04:14.833375 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/133f3509-3c18-4a9d-94fe-8026a3c77b12-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "133f3509-3c18-4a9d-94fe-8026a3c77b12" (UID: "133f3509-3c18-4a9d-94fe-8026a3c77b12"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:04:14 crc kubenswrapper[4558]: I0120 17:04:14.912499 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/133f3509-3c18-4a9d-94fe-8026a3c77b12-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:04:14 crc kubenswrapper[4558]: I0120 17:04:14.912547 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/133f3509-3c18-4a9d-94fe-8026a3c77b12-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:04:14 crc kubenswrapper[4558]: I0120 17:04:14.912563 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2sr2f\" (UniqueName: \"kubernetes.io/projected/133f3509-3c18-4a9d-94fe-8026a3c77b12-kube-api-access-2sr2f\") on node \"crc\" DevicePath \"\"" Jan 20 17:04:15 crc kubenswrapper[4558]: I0120 17:04:15.305105 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:04:15 crc kubenswrapper[4558]: I0120 17:04:15.730483 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"133f3509-3c18-4a9d-94fe-8026a3c77b12","Type":"ContainerDied","Data":"d2d01b1aa874a747e46deafd5ebcb37122861bb7359050fecb76d070250e94d0"} Jan 20 17:04:15 crc kubenswrapper[4558]: I0120 17:04:15.730515 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:04:15 crc kubenswrapper[4558]: I0120 17:04:15.730773 4558 scope.go:117] "RemoveContainer" containerID="44b948c3133051a2ebbf13f37804f61fcca40240d5de185a3c5506b11479d72f" Jan 20 17:04:15 crc kubenswrapper[4558]: I0120 17:04:15.731072 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-api-0" podUID="64da18fe-7d5d-4ab0-a28b-9edc20799711" containerName="nova-api-api" containerID="cri-o://a251c18c801ffe160629886a4b6b4210a9566cb9ca6a002cb04dc61b4e70e92e" gracePeriod=30 Jan 20 17:04:15 crc kubenswrapper[4558]: I0120 17:04:15.731039 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-api-0" podUID="64da18fe-7d5d-4ab0-a28b-9edc20799711" containerName="nova-api-log" containerID="cri-o://23107775542e5733d1e13dad30a984dcfbdd2dbdf2408e2ba49363edba3604e6" gracePeriod=30 Jan 20 17:04:15 crc kubenswrapper[4558]: I0120 17:04:15.756230 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:04:15 crc kubenswrapper[4558]: I0120 17:04:15.762907 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:04:15 crc kubenswrapper[4558]: I0120 17:04:15.772248 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:04:15 crc kubenswrapper[4558]: E0120 17:04:15.772616 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="133f3509-3c18-4a9d-94fe-8026a3c77b12" containerName="nova-cell1-novncproxy-novncproxy" Jan 20 17:04:15 crc kubenswrapper[4558]: I0120 17:04:15.772632 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="133f3509-3c18-4a9d-94fe-8026a3c77b12" containerName="nova-cell1-novncproxy-novncproxy" Jan 20 17:04:15 crc kubenswrapper[4558]: I0120 17:04:15.772813 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="133f3509-3c18-4a9d-94fe-8026a3c77b12" containerName="nova-cell1-novncproxy-novncproxy" Jan 20 17:04:15 crc kubenswrapper[4558]: I0120 17:04:15.773343 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:04:15 crc kubenswrapper[4558]: I0120 17:04:15.775700 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-novncproxy-cell1-public-svc" Jan 20 17:04:15 crc kubenswrapper[4558]: I0120 17:04:15.775710 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell1-novncproxy-config-data" Jan 20 17:04:15 crc kubenswrapper[4558]: I0120 17:04:15.775705 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-novncproxy-cell1-vencrypt" Jan 20 17:04:15 crc kubenswrapper[4558]: I0120 17:04:15.781152 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:04:15 crc kubenswrapper[4558]: I0120 17:04:15.835036 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/1176545e-611b-4fc7-8b03-e91ee7813fd3-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"1176545e-611b-4fc7-8b03-e91ee7813fd3\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:04:15 crc kubenswrapper[4558]: I0120 17:04:15.835085 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1176545e-611b-4fc7-8b03-e91ee7813fd3-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"1176545e-611b-4fc7-8b03-e91ee7813fd3\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:04:15 crc kubenswrapper[4558]: I0120 17:04:15.835189 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9mkqv\" (UniqueName: \"kubernetes.io/projected/1176545e-611b-4fc7-8b03-e91ee7813fd3-kube-api-access-9mkqv\") pod \"nova-cell1-novncproxy-0\" (UID: \"1176545e-611b-4fc7-8b03-e91ee7813fd3\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:04:15 crc kubenswrapper[4558]: I0120 17:04:15.835217 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1176545e-611b-4fc7-8b03-e91ee7813fd3-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"1176545e-611b-4fc7-8b03-e91ee7813fd3\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:04:15 crc kubenswrapper[4558]: I0120 17:04:15.835239 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/1176545e-611b-4fc7-8b03-e91ee7813fd3-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"1176545e-611b-4fc7-8b03-e91ee7813fd3\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:04:15 crc kubenswrapper[4558]: I0120 17:04:15.937144 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/1176545e-611b-4fc7-8b03-e91ee7813fd3-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"1176545e-611b-4fc7-8b03-e91ee7813fd3\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:04:15 crc kubenswrapper[4558]: I0120 17:04:15.937214 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1176545e-611b-4fc7-8b03-e91ee7813fd3-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"1176545e-611b-4fc7-8b03-e91ee7813fd3\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:04:15 crc kubenswrapper[4558]: I0120 17:04:15.937428 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9mkqv\" (UniqueName: \"kubernetes.io/projected/1176545e-611b-4fc7-8b03-e91ee7813fd3-kube-api-access-9mkqv\") pod \"nova-cell1-novncproxy-0\" (UID: \"1176545e-611b-4fc7-8b03-e91ee7813fd3\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:04:15 crc kubenswrapper[4558]: I0120 17:04:15.937472 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1176545e-611b-4fc7-8b03-e91ee7813fd3-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"1176545e-611b-4fc7-8b03-e91ee7813fd3\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:04:15 crc kubenswrapper[4558]: I0120 17:04:15.937496 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/1176545e-611b-4fc7-8b03-e91ee7813fd3-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"1176545e-611b-4fc7-8b03-e91ee7813fd3\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:04:15 crc kubenswrapper[4558]: I0120 17:04:15.950813 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1176545e-611b-4fc7-8b03-e91ee7813fd3-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"1176545e-611b-4fc7-8b03-e91ee7813fd3\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:04:15 crc kubenswrapper[4558]: I0120 17:04:15.951275 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/1176545e-611b-4fc7-8b03-e91ee7813fd3-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"1176545e-611b-4fc7-8b03-e91ee7813fd3\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:04:15 crc kubenswrapper[4558]: I0120 17:04:15.953517 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/1176545e-611b-4fc7-8b03-e91ee7813fd3-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"1176545e-611b-4fc7-8b03-e91ee7813fd3\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:04:15 crc kubenswrapper[4558]: I0120 17:04:15.955533 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1176545e-611b-4fc7-8b03-e91ee7813fd3-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"1176545e-611b-4fc7-8b03-e91ee7813fd3\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:04:15 crc kubenswrapper[4558]: I0120 17:04:15.969446 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9mkqv\" (UniqueName: \"kubernetes.io/projected/1176545e-611b-4fc7-8b03-e91ee7813fd3-kube-api-access-9mkqv\") pod \"nova-cell1-novncproxy-0\" (UID: \"1176545e-611b-4fc7-8b03-e91ee7813fd3\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:04:16 crc kubenswrapper[4558]: I0120 17:04:16.086389 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:04:16 crc kubenswrapper[4558]: I0120 17:04:16.510457 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:04:16 crc kubenswrapper[4558]: I0120 17:04:16.587656 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="133f3509-3c18-4a9d-94fe-8026a3c77b12" path="/var/lib/kubelet/pods/133f3509-3c18-4a9d-94fe-8026a3c77b12/volumes" Jan 20 17:04:16 crc kubenswrapper[4558]: I0120 17:04:16.739895 4558 generic.go:334] "Generic (PLEG): container finished" podID="64da18fe-7d5d-4ab0-a28b-9edc20799711" containerID="23107775542e5733d1e13dad30a984dcfbdd2dbdf2408e2ba49363edba3604e6" exitCode=143 Jan 20 17:04:16 crc kubenswrapper[4558]: I0120 17:04:16.739968 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"64da18fe-7d5d-4ab0-a28b-9edc20799711","Type":"ContainerDied","Data":"23107775542e5733d1e13dad30a984dcfbdd2dbdf2408e2ba49363edba3604e6"} Jan 20 17:04:16 crc kubenswrapper[4558]: I0120 17:04:16.742235 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"1176545e-611b-4fc7-8b03-e91ee7813fd3","Type":"ContainerStarted","Data":"6b8fce6bafaad0a422a208a3cb35d584fc27bf9b3ba41bf28947f865c272fe06"} Jan 20 17:04:16 crc kubenswrapper[4558]: I0120 17:04:16.742304 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"1176545e-611b-4fc7-8b03-e91ee7813fd3","Type":"ContainerStarted","Data":"b7a110a5acf1795f5d71c5768e80b2ceb59f8ca89b967a6f5e9cb5c240b37f68"} Jan 20 17:04:16 crc kubenswrapper[4558]: I0120 17:04:16.757151 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" podStartSLOduration=1.757118325 podStartE2EDuration="1.757118325s" podCreationTimestamp="2026-01-20 17:04:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:04:16.756871701 +0000 UTC m=+1350.517209668" watchObservedRunningTime="2026-01-20 17:04:16.757118325 +0000 UTC m=+1350.517456292" Jan 20 17:04:19 crc kubenswrapper[4558]: I0120 17:04:19.313845 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:04:19 crc kubenswrapper[4558]: I0120 17:04:19.396855 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/64da18fe-7d5d-4ab0-a28b-9edc20799711-config-data\") pod \"64da18fe-7d5d-4ab0-a28b-9edc20799711\" (UID: \"64da18fe-7d5d-4ab0-a28b-9edc20799711\") " Jan 20 17:04:19 crc kubenswrapper[4558]: I0120 17:04:19.396917 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tbxqq\" (UniqueName: \"kubernetes.io/projected/64da18fe-7d5d-4ab0-a28b-9edc20799711-kube-api-access-tbxqq\") pod \"64da18fe-7d5d-4ab0-a28b-9edc20799711\" (UID: \"64da18fe-7d5d-4ab0-a28b-9edc20799711\") " Jan 20 17:04:19 crc kubenswrapper[4558]: I0120 17:04:19.397033 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/64da18fe-7d5d-4ab0-a28b-9edc20799711-logs\") pod \"64da18fe-7d5d-4ab0-a28b-9edc20799711\" (UID: \"64da18fe-7d5d-4ab0-a28b-9edc20799711\") " Jan 20 17:04:19 crc kubenswrapper[4558]: I0120 17:04:19.397238 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64da18fe-7d5d-4ab0-a28b-9edc20799711-combined-ca-bundle\") pod \"64da18fe-7d5d-4ab0-a28b-9edc20799711\" (UID: \"64da18fe-7d5d-4ab0-a28b-9edc20799711\") " Jan 20 17:04:19 crc kubenswrapper[4558]: I0120 17:04:19.397546 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/64da18fe-7d5d-4ab0-a28b-9edc20799711-logs" (OuterVolumeSpecName: "logs") pod "64da18fe-7d5d-4ab0-a28b-9edc20799711" (UID: "64da18fe-7d5d-4ab0-a28b-9edc20799711"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:04:19 crc kubenswrapper[4558]: I0120 17:04:19.397815 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/64da18fe-7d5d-4ab0-a28b-9edc20799711-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:04:19 crc kubenswrapper[4558]: I0120 17:04:19.404687 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/64da18fe-7d5d-4ab0-a28b-9edc20799711-kube-api-access-tbxqq" (OuterVolumeSpecName: "kube-api-access-tbxqq") pod "64da18fe-7d5d-4ab0-a28b-9edc20799711" (UID: "64da18fe-7d5d-4ab0-a28b-9edc20799711"). InnerVolumeSpecName "kube-api-access-tbxqq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:04:19 crc kubenswrapper[4558]: I0120 17:04:19.417744 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/64da18fe-7d5d-4ab0-a28b-9edc20799711-config-data" (OuterVolumeSpecName: "config-data") pod "64da18fe-7d5d-4ab0-a28b-9edc20799711" (UID: "64da18fe-7d5d-4ab0-a28b-9edc20799711"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:04:19 crc kubenswrapper[4558]: I0120 17:04:19.418952 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/64da18fe-7d5d-4ab0-a28b-9edc20799711-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "64da18fe-7d5d-4ab0-a28b-9edc20799711" (UID: "64da18fe-7d5d-4ab0-a28b-9edc20799711"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:04:19 crc kubenswrapper[4558]: I0120 17:04:19.500473 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64da18fe-7d5d-4ab0-a28b-9edc20799711-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:04:19 crc kubenswrapper[4558]: I0120 17:04:19.500505 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/64da18fe-7d5d-4ab0-a28b-9edc20799711-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:04:19 crc kubenswrapper[4558]: I0120 17:04:19.500515 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tbxqq\" (UniqueName: \"kubernetes.io/projected/64da18fe-7d5d-4ab0-a28b-9edc20799711-kube-api-access-tbxqq\") on node \"crc\" DevicePath \"\"" Jan 20 17:04:19 crc kubenswrapper[4558]: I0120 17:04:19.764579 4558 generic.go:334] "Generic (PLEG): container finished" podID="64da18fe-7d5d-4ab0-a28b-9edc20799711" containerID="a251c18c801ffe160629886a4b6b4210a9566cb9ca6a002cb04dc61b4e70e92e" exitCode=0 Jan 20 17:04:19 crc kubenswrapper[4558]: I0120 17:04:19.764626 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:04:19 crc kubenswrapper[4558]: I0120 17:04:19.764643 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"64da18fe-7d5d-4ab0-a28b-9edc20799711","Type":"ContainerDied","Data":"a251c18c801ffe160629886a4b6b4210a9566cb9ca6a002cb04dc61b4e70e92e"} Jan 20 17:04:19 crc kubenswrapper[4558]: I0120 17:04:19.764947 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"64da18fe-7d5d-4ab0-a28b-9edc20799711","Type":"ContainerDied","Data":"540a46873df86712f782dc0899c18bb3764b1ca19ca5d1b5ccd1f8eaecee2ee8"} Jan 20 17:04:19 crc kubenswrapper[4558]: I0120 17:04:19.764968 4558 scope.go:117] "RemoveContainer" containerID="a251c18c801ffe160629886a4b6b4210a9566cb9ca6a002cb04dc61b4e70e92e" Jan 20 17:04:19 crc kubenswrapper[4558]: I0120 17:04:19.788018 4558 scope.go:117] "RemoveContainer" containerID="23107775542e5733d1e13dad30a984dcfbdd2dbdf2408e2ba49363edba3604e6" Jan 20 17:04:19 crc kubenswrapper[4558]: I0120 17:04:19.788992 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:04:19 crc kubenswrapper[4558]: I0120 17:04:19.795722 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:04:19 crc kubenswrapper[4558]: I0120 17:04:19.815318 4558 scope.go:117] "RemoveContainer" containerID="a251c18c801ffe160629886a4b6b4210a9566cb9ca6a002cb04dc61b4e70e92e" Jan 20 17:04:19 crc kubenswrapper[4558]: E0120 17:04:19.817499 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a251c18c801ffe160629886a4b6b4210a9566cb9ca6a002cb04dc61b4e70e92e\": container with ID starting with a251c18c801ffe160629886a4b6b4210a9566cb9ca6a002cb04dc61b4e70e92e not found: ID does not exist" containerID="a251c18c801ffe160629886a4b6b4210a9566cb9ca6a002cb04dc61b4e70e92e" Jan 20 17:04:19 crc kubenswrapper[4558]: I0120 17:04:19.817551 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a251c18c801ffe160629886a4b6b4210a9566cb9ca6a002cb04dc61b4e70e92e"} err="failed to get container status \"a251c18c801ffe160629886a4b6b4210a9566cb9ca6a002cb04dc61b4e70e92e\": rpc error: code = NotFound desc = could not find container \"a251c18c801ffe160629886a4b6b4210a9566cb9ca6a002cb04dc61b4e70e92e\": container with ID starting with a251c18c801ffe160629886a4b6b4210a9566cb9ca6a002cb04dc61b4e70e92e not found: ID does not exist" Jan 20 17:04:19 crc kubenswrapper[4558]: I0120 17:04:19.817579 4558 scope.go:117] "RemoveContainer" containerID="23107775542e5733d1e13dad30a984dcfbdd2dbdf2408e2ba49363edba3604e6" Jan 20 17:04:19 crc kubenswrapper[4558]: E0120 17:04:19.817945 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"23107775542e5733d1e13dad30a984dcfbdd2dbdf2408e2ba49363edba3604e6\": container with ID starting with 23107775542e5733d1e13dad30a984dcfbdd2dbdf2408e2ba49363edba3604e6 not found: ID does not exist" containerID="23107775542e5733d1e13dad30a984dcfbdd2dbdf2408e2ba49363edba3604e6" Jan 20 17:04:19 crc kubenswrapper[4558]: I0120 17:04:19.817966 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"23107775542e5733d1e13dad30a984dcfbdd2dbdf2408e2ba49363edba3604e6"} err="failed to get container status \"23107775542e5733d1e13dad30a984dcfbdd2dbdf2408e2ba49363edba3604e6\": rpc error: code = NotFound desc = could not find container \"23107775542e5733d1e13dad30a984dcfbdd2dbdf2408e2ba49363edba3604e6\": container with ID starting with 23107775542e5733d1e13dad30a984dcfbdd2dbdf2408e2ba49363edba3604e6 not found: ID does not exist" Jan 20 17:04:19 crc kubenswrapper[4558]: I0120 17:04:19.824182 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:04:19 crc kubenswrapper[4558]: E0120 17:04:19.824749 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64da18fe-7d5d-4ab0-a28b-9edc20799711" containerName="nova-api-log" Jan 20 17:04:19 crc kubenswrapper[4558]: I0120 17:04:19.824770 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="64da18fe-7d5d-4ab0-a28b-9edc20799711" containerName="nova-api-log" Jan 20 17:04:19 crc kubenswrapper[4558]: E0120 17:04:19.824787 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64da18fe-7d5d-4ab0-a28b-9edc20799711" containerName="nova-api-api" Jan 20 17:04:19 crc kubenswrapper[4558]: I0120 17:04:19.824795 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="64da18fe-7d5d-4ab0-a28b-9edc20799711" containerName="nova-api-api" Jan 20 17:04:19 crc kubenswrapper[4558]: I0120 17:04:19.825130 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="64da18fe-7d5d-4ab0-a28b-9edc20799711" containerName="nova-api-log" Jan 20 17:04:19 crc kubenswrapper[4558]: I0120 17:04:19.825151 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="64da18fe-7d5d-4ab0-a28b-9edc20799711" containerName="nova-api-api" Jan 20 17:04:19 crc kubenswrapper[4558]: I0120 17:04:19.826997 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:04:19 crc kubenswrapper[4558]: I0120 17:04:19.828154 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:04:19 crc kubenswrapper[4558]: I0120 17:04:19.829131 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-api-config-data" Jan 20 17:04:19 crc kubenswrapper[4558]: I0120 17:04:19.830994 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-internal-svc" Jan 20 17:04:19 crc kubenswrapper[4558]: I0120 17:04:19.831265 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-public-svc" Jan 20 17:04:19 crc kubenswrapper[4558]: I0120 17:04:19.926906 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3acae54e-bbd3-4f43-a95f-3b1442bac970-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"3acae54e-bbd3-4f43-a95f-3b1442bac970\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:04:19 crc kubenswrapper[4558]: I0120 17:04:19.927042 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3acae54e-bbd3-4f43-a95f-3b1442bac970-internal-tls-certs\") pod \"nova-api-0\" (UID: \"3acae54e-bbd3-4f43-a95f-3b1442bac970\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:04:19 crc kubenswrapper[4558]: I0120 17:04:19.927102 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3acae54e-bbd3-4f43-a95f-3b1442bac970-logs\") pod \"nova-api-0\" (UID: \"3acae54e-bbd3-4f43-a95f-3b1442bac970\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:04:19 crc kubenswrapper[4558]: I0120 17:04:19.927179 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3acae54e-bbd3-4f43-a95f-3b1442bac970-config-data\") pod \"nova-api-0\" (UID: \"3acae54e-bbd3-4f43-a95f-3b1442bac970\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:04:19 crc kubenswrapper[4558]: I0120 17:04:19.927377 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3acae54e-bbd3-4f43-a95f-3b1442bac970-public-tls-certs\") pod \"nova-api-0\" (UID: \"3acae54e-bbd3-4f43-a95f-3b1442bac970\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:04:19 crc kubenswrapper[4558]: I0120 17:04:19.927470 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dd6rq\" (UniqueName: \"kubernetes.io/projected/3acae54e-bbd3-4f43-a95f-3b1442bac970-kube-api-access-dd6rq\") pod \"nova-api-0\" (UID: \"3acae54e-bbd3-4f43-a95f-3b1442bac970\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:04:20 crc kubenswrapper[4558]: I0120 17:04:20.029929 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3acae54e-bbd3-4f43-a95f-3b1442bac970-public-tls-certs\") pod \"nova-api-0\" (UID: \"3acae54e-bbd3-4f43-a95f-3b1442bac970\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:04:20 crc kubenswrapper[4558]: I0120 17:04:20.030000 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dd6rq\" (UniqueName: \"kubernetes.io/projected/3acae54e-bbd3-4f43-a95f-3b1442bac970-kube-api-access-dd6rq\") pod \"nova-api-0\" (UID: \"3acae54e-bbd3-4f43-a95f-3b1442bac970\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:04:20 crc kubenswrapper[4558]: I0120 17:04:20.030110 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3acae54e-bbd3-4f43-a95f-3b1442bac970-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"3acae54e-bbd3-4f43-a95f-3b1442bac970\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:04:20 crc kubenswrapper[4558]: I0120 17:04:20.030218 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3acae54e-bbd3-4f43-a95f-3b1442bac970-internal-tls-certs\") pod \"nova-api-0\" (UID: \"3acae54e-bbd3-4f43-a95f-3b1442bac970\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:04:20 crc kubenswrapper[4558]: I0120 17:04:20.030277 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3acae54e-bbd3-4f43-a95f-3b1442bac970-logs\") pod \"nova-api-0\" (UID: \"3acae54e-bbd3-4f43-a95f-3b1442bac970\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:04:20 crc kubenswrapper[4558]: I0120 17:04:20.030322 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3acae54e-bbd3-4f43-a95f-3b1442bac970-config-data\") pod \"nova-api-0\" (UID: \"3acae54e-bbd3-4f43-a95f-3b1442bac970\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:04:20 crc kubenswrapper[4558]: I0120 17:04:20.030866 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3acae54e-bbd3-4f43-a95f-3b1442bac970-logs\") pod \"nova-api-0\" (UID: \"3acae54e-bbd3-4f43-a95f-3b1442bac970\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:04:20 crc kubenswrapper[4558]: I0120 17:04:20.035850 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3acae54e-bbd3-4f43-a95f-3b1442bac970-internal-tls-certs\") pod \"nova-api-0\" (UID: \"3acae54e-bbd3-4f43-a95f-3b1442bac970\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:04:20 crc kubenswrapper[4558]: I0120 17:04:20.035950 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3acae54e-bbd3-4f43-a95f-3b1442bac970-config-data\") pod \"nova-api-0\" (UID: \"3acae54e-bbd3-4f43-a95f-3b1442bac970\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:04:20 crc kubenswrapper[4558]: I0120 17:04:20.036339 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3acae54e-bbd3-4f43-a95f-3b1442bac970-public-tls-certs\") pod \"nova-api-0\" (UID: \"3acae54e-bbd3-4f43-a95f-3b1442bac970\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:04:20 crc kubenswrapper[4558]: I0120 17:04:20.036447 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3acae54e-bbd3-4f43-a95f-3b1442bac970-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"3acae54e-bbd3-4f43-a95f-3b1442bac970\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:04:20 crc kubenswrapper[4558]: I0120 17:04:20.044752 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dd6rq\" (UniqueName: \"kubernetes.io/projected/3acae54e-bbd3-4f43-a95f-3b1442bac970-kube-api-access-dd6rq\") pod \"nova-api-0\" (UID: \"3acae54e-bbd3-4f43-a95f-3b1442bac970\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:04:20 crc kubenswrapper[4558]: I0120 17:04:20.148708 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:04:20 crc kubenswrapper[4558]: I0120 17:04:20.530803 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:04:20 crc kubenswrapper[4558]: W0120 17:04:20.536033 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3acae54e_bbd3_4f43_a95f_3b1442bac970.slice/crio-f16b2f39668040ed8ca1503d01970b63e9cbd299181451454261bd6975ffeb74 WatchSource:0}: Error finding container f16b2f39668040ed8ca1503d01970b63e9cbd299181451454261bd6975ffeb74: Status 404 returned error can't find the container with id f16b2f39668040ed8ca1503d01970b63e9cbd299181451454261bd6975ffeb74 Jan 20 17:04:20 crc kubenswrapper[4558]: I0120 17:04:20.573364 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="64da18fe-7d5d-4ab0-a28b-9edc20799711" path="/var/lib/kubelet/pods/64da18fe-7d5d-4ab0-a28b-9edc20799711/volumes" Jan 20 17:04:20 crc kubenswrapper[4558]: I0120 17:04:20.777213 4558 generic.go:334] "Generic (PLEG): container finished" podID="ef93decf-78a4-4439-bac3-36f5720d0589" containerID="d7039d4cfe22846fb1df116f46327e92f5a4fd9f2f28553fea21b2bc819b0c7e" exitCode=0 Jan 20 17:04:20 crc kubenswrapper[4558]: I0120 17:04:20.777276 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"ef93decf-78a4-4439-bac3-36f5720d0589","Type":"ContainerDied","Data":"d7039d4cfe22846fb1df116f46327e92f5a4fd9f2f28553fea21b2bc819b0c7e"} Jan 20 17:04:20 crc kubenswrapper[4558]: I0120 17:04:20.780032 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"3acae54e-bbd3-4f43-a95f-3b1442bac970","Type":"ContainerStarted","Data":"344ade4fe227710f2214c70ce7b21795e9e2804e0d89472fbf05cf4a8eb1bb79"} Jan 20 17:04:20 crc kubenswrapper[4558]: I0120 17:04:20.780075 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"3acae54e-bbd3-4f43-a95f-3b1442bac970","Type":"ContainerStarted","Data":"f16b2f39668040ed8ca1503d01970b63e9cbd299181451454261bd6975ffeb74"} Jan 20 17:04:20 crc kubenswrapper[4558]: I0120 17:04:20.884407 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:04:20 crc kubenswrapper[4558]: I0120 17:04:20.944504 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sq64x\" (UniqueName: \"kubernetes.io/projected/ef93decf-78a4-4439-bac3-36f5720d0589-kube-api-access-sq64x\") pod \"ef93decf-78a4-4439-bac3-36f5720d0589\" (UID: \"ef93decf-78a4-4439-bac3-36f5720d0589\") " Jan 20 17:04:20 crc kubenswrapper[4558]: I0120 17:04:20.944609 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ef93decf-78a4-4439-bac3-36f5720d0589-run-httpd\") pod \"ef93decf-78a4-4439-bac3-36f5720d0589\" (UID: \"ef93decf-78a4-4439-bac3-36f5720d0589\") " Jan 20 17:04:20 crc kubenswrapper[4558]: I0120 17:04:20.944649 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ef93decf-78a4-4439-bac3-36f5720d0589-config-data\") pod \"ef93decf-78a4-4439-bac3-36f5720d0589\" (UID: \"ef93decf-78a4-4439-bac3-36f5720d0589\") " Jan 20 17:04:20 crc kubenswrapper[4558]: I0120 17:04:20.944668 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ef93decf-78a4-4439-bac3-36f5720d0589-scripts\") pod \"ef93decf-78a4-4439-bac3-36f5720d0589\" (UID: \"ef93decf-78a4-4439-bac3-36f5720d0589\") " Jan 20 17:04:20 crc kubenswrapper[4558]: I0120 17:04:20.944730 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ef93decf-78a4-4439-bac3-36f5720d0589-sg-core-conf-yaml\") pod \"ef93decf-78a4-4439-bac3-36f5720d0589\" (UID: \"ef93decf-78a4-4439-bac3-36f5720d0589\") " Jan 20 17:04:20 crc kubenswrapper[4558]: I0120 17:04:20.944796 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/ef93decf-78a4-4439-bac3-36f5720d0589-ceilometer-tls-certs\") pod \"ef93decf-78a4-4439-bac3-36f5720d0589\" (UID: \"ef93decf-78a4-4439-bac3-36f5720d0589\") " Jan 20 17:04:20 crc kubenswrapper[4558]: I0120 17:04:20.944850 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ef93decf-78a4-4439-bac3-36f5720d0589-combined-ca-bundle\") pod \"ef93decf-78a4-4439-bac3-36f5720d0589\" (UID: \"ef93decf-78a4-4439-bac3-36f5720d0589\") " Jan 20 17:04:20 crc kubenswrapper[4558]: I0120 17:04:20.944899 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ef93decf-78a4-4439-bac3-36f5720d0589-log-httpd\") pod \"ef93decf-78a4-4439-bac3-36f5720d0589\" (UID: \"ef93decf-78a4-4439-bac3-36f5720d0589\") " Jan 20 17:04:20 crc kubenswrapper[4558]: I0120 17:04:20.945536 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ef93decf-78a4-4439-bac3-36f5720d0589-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "ef93decf-78a4-4439-bac3-36f5720d0589" (UID: "ef93decf-78a4-4439-bac3-36f5720d0589"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:04:20 crc kubenswrapper[4558]: I0120 17:04:20.945839 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ef93decf-78a4-4439-bac3-36f5720d0589-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "ef93decf-78a4-4439-bac3-36f5720d0589" (UID: "ef93decf-78a4-4439-bac3-36f5720d0589"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:04:20 crc kubenswrapper[4558]: I0120 17:04:20.949099 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ef93decf-78a4-4439-bac3-36f5720d0589-scripts" (OuterVolumeSpecName: "scripts") pod "ef93decf-78a4-4439-bac3-36f5720d0589" (UID: "ef93decf-78a4-4439-bac3-36f5720d0589"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:04:20 crc kubenswrapper[4558]: I0120 17:04:20.949346 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ef93decf-78a4-4439-bac3-36f5720d0589-kube-api-access-sq64x" (OuterVolumeSpecName: "kube-api-access-sq64x") pod "ef93decf-78a4-4439-bac3-36f5720d0589" (UID: "ef93decf-78a4-4439-bac3-36f5720d0589"). InnerVolumeSpecName "kube-api-access-sq64x". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:04:20 crc kubenswrapper[4558]: I0120 17:04:20.966094 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ef93decf-78a4-4439-bac3-36f5720d0589-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "ef93decf-78a4-4439-bac3-36f5720d0589" (UID: "ef93decf-78a4-4439-bac3-36f5720d0589"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:04:20 crc kubenswrapper[4558]: I0120 17:04:20.980721 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ef93decf-78a4-4439-bac3-36f5720d0589-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "ef93decf-78a4-4439-bac3-36f5720d0589" (UID: "ef93decf-78a4-4439-bac3-36f5720d0589"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:04:20 crc kubenswrapper[4558]: I0120 17:04:20.995319 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ef93decf-78a4-4439-bac3-36f5720d0589-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ef93decf-78a4-4439-bac3-36f5720d0589" (UID: "ef93decf-78a4-4439-bac3-36f5720d0589"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:04:21 crc kubenswrapper[4558]: I0120 17:04:21.013583 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ef93decf-78a4-4439-bac3-36f5720d0589-config-data" (OuterVolumeSpecName: "config-data") pod "ef93decf-78a4-4439-bac3-36f5720d0589" (UID: "ef93decf-78a4-4439-bac3-36f5720d0589"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:04:21 crc kubenswrapper[4558]: I0120 17:04:21.050453 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ef93decf-78a4-4439-bac3-36f5720d0589-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:04:21 crc kubenswrapper[4558]: I0120 17:04:21.050483 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ef93decf-78a4-4439-bac3-36f5720d0589-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:04:21 crc kubenswrapper[4558]: I0120 17:04:21.050492 4558 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ef93decf-78a4-4439-bac3-36f5720d0589-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 20 17:04:21 crc kubenswrapper[4558]: I0120 17:04:21.050503 4558 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/ef93decf-78a4-4439-bac3-36f5720d0589-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:04:21 crc kubenswrapper[4558]: I0120 17:04:21.050512 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ef93decf-78a4-4439-bac3-36f5720d0589-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:04:21 crc kubenswrapper[4558]: I0120 17:04:21.050520 4558 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ef93decf-78a4-4439-bac3-36f5720d0589-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:04:21 crc kubenswrapper[4558]: I0120 17:04:21.050530 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sq64x\" (UniqueName: \"kubernetes.io/projected/ef93decf-78a4-4439-bac3-36f5720d0589-kube-api-access-sq64x\") on node \"crc\" DevicePath \"\"" Jan 20 17:04:21 crc kubenswrapper[4558]: I0120 17:04:21.050538 4558 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ef93decf-78a4-4439-bac3-36f5720d0589-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:04:21 crc kubenswrapper[4558]: I0120 17:04:21.088123 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:04:21 crc kubenswrapper[4558]: I0120 17:04:21.790910 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"3acae54e-bbd3-4f43-a95f-3b1442bac970","Type":"ContainerStarted","Data":"509826e2026006c0644f79e43d9cd18e5b35820d25605554f35cc4ef5a824a71"} Jan 20 17:04:21 crc kubenswrapper[4558]: I0120 17:04:21.793879 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"ef93decf-78a4-4439-bac3-36f5720d0589","Type":"ContainerDied","Data":"985488fa54dbc7d629dc86759801222b1f8b940a3095fdd87504369d6b143582"} Jan 20 17:04:21 crc kubenswrapper[4558]: I0120 17:04:21.793917 4558 scope.go:117] "RemoveContainer" containerID="05610e58069401e608b04b0fe07c9855dcd26069b41e0a19ce0940290d2c42b4" Jan 20 17:04:21 crc kubenswrapper[4558]: I0120 17:04:21.794033 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:04:21 crc kubenswrapper[4558]: I0120 17:04:21.812741 4558 scope.go:117] "RemoveContainer" containerID="063770fda370183252496062f7a4063e3aa904aa813a1af436066509d93e1b0d" Jan 20 17:04:21 crc kubenswrapper[4558]: I0120 17:04:21.827862 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-api-0" podStartSLOduration=2.827828287 podStartE2EDuration="2.827828287s" podCreationTimestamp="2026-01-20 17:04:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:04:21.809857667 +0000 UTC m=+1355.570195634" watchObservedRunningTime="2026-01-20 17:04:21.827828287 +0000 UTC m=+1355.588166255" Jan 20 17:04:21 crc kubenswrapper[4558]: I0120 17:04:21.831574 4558 scope.go:117] "RemoveContainer" containerID="d7039d4cfe22846fb1df116f46327e92f5a4fd9f2f28553fea21b2bc819b0c7e" Jan 20 17:04:21 crc kubenswrapper[4558]: I0120 17:04:21.836117 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:04:21 crc kubenswrapper[4558]: I0120 17:04:21.846132 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:04:21 crc kubenswrapper[4558]: I0120 17:04:21.853373 4558 scope.go:117] "RemoveContainer" containerID="aceb6a1c812a55b8382e720fd667b1bce3e5e8b72e63d66d5d61404d004d5633" Jan 20 17:04:21 crc kubenswrapper[4558]: I0120 17:04:21.856238 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:04:21 crc kubenswrapper[4558]: E0120 17:04:21.856746 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ef93decf-78a4-4439-bac3-36f5720d0589" containerName="ceilometer-central-agent" Jan 20 17:04:21 crc kubenswrapper[4558]: I0120 17:04:21.856767 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ef93decf-78a4-4439-bac3-36f5720d0589" containerName="ceilometer-central-agent" Jan 20 17:04:21 crc kubenswrapper[4558]: E0120 17:04:21.856779 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ef93decf-78a4-4439-bac3-36f5720d0589" containerName="sg-core" Jan 20 17:04:21 crc kubenswrapper[4558]: I0120 17:04:21.856784 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ef93decf-78a4-4439-bac3-36f5720d0589" containerName="sg-core" Jan 20 17:04:21 crc kubenswrapper[4558]: E0120 17:04:21.856793 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ef93decf-78a4-4439-bac3-36f5720d0589" containerName="ceilometer-notification-agent" Jan 20 17:04:21 crc kubenswrapper[4558]: I0120 17:04:21.856798 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ef93decf-78a4-4439-bac3-36f5720d0589" containerName="ceilometer-notification-agent" Jan 20 17:04:21 crc kubenswrapper[4558]: E0120 17:04:21.856806 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ef93decf-78a4-4439-bac3-36f5720d0589" containerName="proxy-httpd" Jan 20 17:04:21 crc kubenswrapper[4558]: I0120 17:04:21.856812 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ef93decf-78a4-4439-bac3-36f5720d0589" containerName="proxy-httpd" Jan 20 17:04:21 crc kubenswrapper[4558]: I0120 17:04:21.856962 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="ef93decf-78a4-4439-bac3-36f5720d0589" containerName="sg-core" Jan 20 17:04:21 crc kubenswrapper[4558]: I0120 17:04:21.856992 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="ef93decf-78a4-4439-bac3-36f5720d0589" containerName="ceilometer-notification-agent" Jan 20 17:04:21 crc kubenswrapper[4558]: I0120 17:04:21.857004 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="ef93decf-78a4-4439-bac3-36f5720d0589" containerName="proxy-httpd" Jan 20 17:04:21 crc kubenswrapper[4558]: I0120 17:04:21.857015 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="ef93decf-78a4-4439-bac3-36f5720d0589" containerName="ceilometer-central-agent" Jan 20 17:04:21 crc kubenswrapper[4558]: I0120 17:04:21.858524 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:04:21 crc kubenswrapper[4558]: I0120 17:04:21.861874 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-config-data" Jan 20 17:04:21 crc kubenswrapper[4558]: I0120 17:04:21.862145 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-scripts" Jan 20 17:04:21 crc kubenswrapper[4558]: I0120 17:04:21.862203 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-ceilometer-internal-svc" Jan 20 17:04:21 crc kubenswrapper[4558]: I0120 17:04:21.865267 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:04:21 crc kubenswrapper[4558]: I0120 17:04:21.970956 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c7187d14-f25b-4344-bf36-7d56d8e1b79c-log-httpd\") pod \"ceilometer-0\" (UID: \"c7187d14-f25b-4344-bf36-7d56d8e1b79c\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:04:21 crc kubenswrapper[4558]: I0120 17:04:21.971097 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c7187d14-f25b-4344-bf36-7d56d8e1b79c-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"c7187d14-f25b-4344-bf36-7d56d8e1b79c\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:04:21 crc kubenswrapper[4558]: I0120 17:04:21.971132 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c7187d14-f25b-4344-bf36-7d56d8e1b79c-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"c7187d14-f25b-4344-bf36-7d56d8e1b79c\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:04:21 crc kubenswrapper[4558]: I0120 17:04:21.971301 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d8wkp\" (UniqueName: \"kubernetes.io/projected/c7187d14-f25b-4344-bf36-7d56d8e1b79c-kube-api-access-d8wkp\") pod \"ceilometer-0\" (UID: \"c7187d14-f25b-4344-bf36-7d56d8e1b79c\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:04:21 crc kubenswrapper[4558]: I0120 17:04:21.971366 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/c7187d14-f25b-4344-bf36-7d56d8e1b79c-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"c7187d14-f25b-4344-bf36-7d56d8e1b79c\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:04:21 crc kubenswrapper[4558]: I0120 17:04:21.971418 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c7187d14-f25b-4344-bf36-7d56d8e1b79c-scripts\") pod \"ceilometer-0\" (UID: \"c7187d14-f25b-4344-bf36-7d56d8e1b79c\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:04:21 crc kubenswrapper[4558]: I0120 17:04:21.971479 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c7187d14-f25b-4344-bf36-7d56d8e1b79c-run-httpd\") pod \"ceilometer-0\" (UID: \"c7187d14-f25b-4344-bf36-7d56d8e1b79c\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:04:21 crc kubenswrapper[4558]: I0120 17:04:21.971551 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c7187d14-f25b-4344-bf36-7d56d8e1b79c-config-data\") pod \"ceilometer-0\" (UID: \"c7187d14-f25b-4344-bf36-7d56d8e1b79c\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:04:22 crc kubenswrapper[4558]: I0120 17:04:22.073462 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d8wkp\" (UniqueName: \"kubernetes.io/projected/c7187d14-f25b-4344-bf36-7d56d8e1b79c-kube-api-access-d8wkp\") pod \"ceilometer-0\" (UID: \"c7187d14-f25b-4344-bf36-7d56d8e1b79c\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:04:22 crc kubenswrapper[4558]: I0120 17:04:22.073627 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/c7187d14-f25b-4344-bf36-7d56d8e1b79c-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"c7187d14-f25b-4344-bf36-7d56d8e1b79c\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:04:22 crc kubenswrapper[4558]: I0120 17:04:22.073718 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c7187d14-f25b-4344-bf36-7d56d8e1b79c-scripts\") pod \"ceilometer-0\" (UID: \"c7187d14-f25b-4344-bf36-7d56d8e1b79c\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:04:22 crc kubenswrapper[4558]: I0120 17:04:22.073843 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c7187d14-f25b-4344-bf36-7d56d8e1b79c-run-httpd\") pod \"ceilometer-0\" (UID: \"c7187d14-f25b-4344-bf36-7d56d8e1b79c\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:04:22 crc kubenswrapper[4558]: I0120 17:04:22.073927 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c7187d14-f25b-4344-bf36-7d56d8e1b79c-config-data\") pod \"ceilometer-0\" (UID: \"c7187d14-f25b-4344-bf36-7d56d8e1b79c\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:04:22 crc kubenswrapper[4558]: I0120 17:04:22.074106 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c7187d14-f25b-4344-bf36-7d56d8e1b79c-log-httpd\") pod \"ceilometer-0\" (UID: \"c7187d14-f25b-4344-bf36-7d56d8e1b79c\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:04:22 crc kubenswrapper[4558]: I0120 17:04:22.074233 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c7187d14-f25b-4344-bf36-7d56d8e1b79c-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"c7187d14-f25b-4344-bf36-7d56d8e1b79c\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:04:22 crc kubenswrapper[4558]: I0120 17:04:22.074325 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c7187d14-f25b-4344-bf36-7d56d8e1b79c-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"c7187d14-f25b-4344-bf36-7d56d8e1b79c\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:04:22 crc kubenswrapper[4558]: I0120 17:04:22.074233 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c7187d14-f25b-4344-bf36-7d56d8e1b79c-run-httpd\") pod \"ceilometer-0\" (UID: \"c7187d14-f25b-4344-bf36-7d56d8e1b79c\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:04:22 crc kubenswrapper[4558]: I0120 17:04:22.074468 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c7187d14-f25b-4344-bf36-7d56d8e1b79c-log-httpd\") pod \"ceilometer-0\" (UID: \"c7187d14-f25b-4344-bf36-7d56d8e1b79c\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:04:22 crc kubenswrapper[4558]: I0120 17:04:22.078860 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c7187d14-f25b-4344-bf36-7d56d8e1b79c-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"c7187d14-f25b-4344-bf36-7d56d8e1b79c\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:04:22 crc kubenswrapper[4558]: I0120 17:04:22.079083 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/c7187d14-f25b-4344-bf36-7d56d8e1b79c-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"c7187d14-f25b-4344-bf36-7d56d8e1b79c\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:04:22 crc kubenswrapper[4558]: I0120 17:04:22.079434 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c7187d14-f25b-4344-bf36-7d56d8e1b79c-config-data\") pod \"ceilometer-0\" (UID: \"c7187d14-f25b-4344-bf36-7d56d8e1b79c\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:04:22 crc kubenswrapper[4558]: I0120 17:04:22.079634 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c7187d14-f25b-4344-bf36-7d56d8e1b79c-scripts\") pod \"ceilometer-0\" (UID: \"c7187d14-f25b-4344-bf36-7d56d8e1b79c\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:04:22 crc kubenswrapper[4558]: I0120 17:04:22.079992 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c7187d14-f25b-4344-bf36-7d56d8e1b79c-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"c7187d14-f25b-4344-bf36-7d56d8e1b79c\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:04:22 crc kubenswrapper[4558]: I0120 17:04:22.087851 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d8wkp\" (UniqueName: \"kubernetes.io/projected/c7187d14-f25b-4344-bf36-7d56d8e1b79c-kube-api-access-d8wkp\") pod \"ceilometer-0\" (UID: \"c7187d14-f25b-4344-bf36-7d56d8e1b79c\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:04:22 crc kubenswrapper[4558]: I0120 17:04:22.187765 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:04:22 crc kubenswrapper[4558]: I0120 17:04:22.574123 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ef93decf-78a4-4439-bac3-36f5720d0589" path="/var/lib/kubelet/pods/ef93decf-78a4-4439-bac3-36f5720d0589/volumes" Jan 20 17:04:22 crc kubenswrapper[4558]: I0120 17:04:22.576121 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:04:22 crc kubenswrapper[4558]: W0120 17:04:22.577687 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc7187d14_f25b_4344_bf36_7d56d8e1b79c.slice/crio-21ff66014e3f5bcbb545b0096dc7040b680c353d52452cb43f0529712310ca7b WatchSource:0}: Error finding container 21ff66014e3f5bcbb545b0096dc7040b680c353d52452cb43f0529712310ca7b: Status 404 returned error can't find the container with id 21ff66014e3f5bcbb545b0096dc7040b680c353d52452cb43f0529712310ca7b Jan 20 17:04:22 crc kubenswrapper[4558]: I0120 17:04:22.804619 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"c7187d14-f25b-4344-bf36-7d56d8e1b79c","Type":"ContainerStarted","Data":"21ff66014e3f5bcbb545b0096dc7040b680c353d52452cb43f0529712310ca7b"} Jan 20 17:04:23 crc kubenswrapper[4558]: I0120 17:04:23.814299 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"c7187d14-f25b-4344-bf36-7d56d8e1b79c","Type":"ContainerStarted","Data":"97f14c8dcf0f7e744564168d9a5f79f08919d2ff3285eef66b5001b20e3c69d7"} Jan 20 17:04:24 crc kubenswrapper[4558]: I0120 17:04:24.824494 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"c7187d14-f25b-4344-bf36-7d56d8e1b79c","Type":"ContainerStarted","Data":"469eb7fe8ee3af85b606966788c6c91603f071d07ac40bc36607962b279dc510"} Jan 20 17:04:25 crc kubenswrapper[4558]: I0120 17:04:25.832227 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"c7187d14-f25b-4344-bf36-7d56d8e1b79c","Type":"ContainerStarted","Data":"2af5466604135693231e3743b389224d86d75c0a2d399c8a0f6df2491983b42f"} Jan 20 17:04:26 crc kubenswrapper[4558]: I0120 17:04:26.088435 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:04:26 crc kubenswrapper[4558]: I0120 17:04:26.105405 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:04:26 crc kubenswrapper[4558]: I0120 17:04:26.843398 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"c7187d14-f25b-4344-bf36-7d56d8e1b79c","Type":"ContainerStarted","Data":"c6607b53445fad5655dbb034a5a76d6a87a2e8a96bd8d11ed9d596a7971d2d55"} Jan 20 17:04:26 crc kubenswrapper[4558]: I0120 17:04:26.844086 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:04:26 crc kubenswrapper[4558]: I0120 17:04:26.864482 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ceilometer-0" podStartSLOduration=2.027963387 podStartE2EDuration="5.86446463s" podCreationTimestamp="2026-01-20 17:04:21 +0000 UTC" firstStartedPulling="2026-01-20 17:04:22.579732743 +0000 UTC m=+1356.340070710" lastFinishedPulling="2026-01-20 17:04:26.416233987 +0000 UTC m=+1360.176571953" observedRunningTime="2026-01-20 17:04:26.857116232 +0000 UTC m=+1360.617454200" watchObservedRunningTime="2026-01-20 17:04:26.86446463 +0000 UTC m=+1360.624802597" Jan 20 17:04:26 crc kubenswrapper[4558]: I0120 17:04:26.866392 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:04:27 crc kubenswrapper[4558]: I0120 17:04:27.016448 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell1-cell-mapping-9v2w5"] Jan 20 17:04:27 crc kubenswrapper[4558]: I0120 17:04:27.017434 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-cell-mapping-9v2w5" Jan 20 17:04:27 crc kubenswrapper[4558]: I0120 17:04:27.018977 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell1-manage-scripts" Jan 20 17:04:27 crc kubenswrapper[4558]: I0120 17:04:27.021521 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell1-manage-config-data" Jan 20 17:04:27 crc kubenswrapper[4558]: I0120 17:04:27.023780 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-cell-mapping-9v2w5"] Jan 20 17:04:27 crc kubenswrapper[4558]: I0120 17:04:27.076655 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/506aa756-70ad-4884-8ea6-b356ee840e38-scripts\") pod \"nova-cell1-cell-mapping-9v2w5\" (UID: \"506aa756-70ad-4884-8ea6-b356ee840e38\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-9v2w5" Jan 20 17:04:27 crc kubenswrapper[4558]: I0120 17:04:27.076718 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/506aa756-70ad-4884-8ea6-b356ee840e38-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-9v2w5\" (UID: \"506aa756-70ad-4884-8ea6-b356ee840e38\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-9v2w5" Jan 20 17:04:27 crc kubenswrapper[4558]: I0120 17:04:27.076828 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dgnzg\" (UniqueName: \"kubernetes.io/projected/506aa756-70ad-4884-8ea6-b356ee840e38-kube-api-access-dgnzg\") pod \"nova-cell1-cell-mapping-9v2w5\" (UID: \"506aa756-70ad-4884-8ea6-b356ee840e38\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-9v2w5" Jan 20 17:04:27 crc kubenswrapper[4558]: I0120 17:04:27.076858 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/506aa756-70ad-4884-8ea6-b356ee840e38-config-data\") pod \"nova-cell1-cell-mapping-9v2w5\" (UID: \"506aa756-70ad-4884-8ea6-b356ee840e38\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-9v2w5" Jan 20 17:04:27 crc kubenswrapper[4558]: I0120 17:04:27.179125 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/506aa756-70ad-4884-8ea6-b356ee840e38-scripts\") pod \"nova-cell1-cell-mapping-9v2w5\" (UID: \"506aa756-70ad-4884-8ea6-b356ee840e38\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-9v2w5" Jan 20 17:04:27 crc kubenswrapper[4558]: I0120 17:04:27.179219 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/506aa756-70ad-4884-8ea6-b356ee840e38-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-9v2w5\" (UID: \"506aa756-70ad-4884-8ea6-b356ee840e38\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-9v2w5" Jan 20 17:04:27 crc kubenswrapper[4558]: I0120 17:04:27.179280 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dgnzg\" (UniqueName: \"kubernetes.io/projected/506aa756-70ad-4884-8ea6-b356ee840e38-kube-api-access-dgnzg\") pod \"nova-cell1-cell-mapping-9v2w5\" (UID: \"506aa756-70ad-4884-8ea6-b356ee840e38\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-9v2w5" Jan 20 17:04:27 crc kubenswrapper[4558]: I0120 17:04:27.179314 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/506aa756-70ad-4884-8ea6-b356ee840e38-config-data\") pod \"nova-cell1-cell-mapping-9v2w5\" (UID: \"506aa756-70ad-4884-8ea6-b356ee840e38\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-9v2w5" Jan 20 17:04:27 crc kubenswrapper[4558]: I0120 17:04:27.185383 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/506aa756-70ad-4884-8ea6-b356ee840e38-scripts\") pod \"nova-cell1-cell-mapping-9v2w5\" (UID: \"506aa756-70ad-4884-8ea6-b356ee840e38\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-9v2w5" Jan 20 17:04:27 crc kubenswrapper[4558]: I0120 17:04:27.185952 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/506aa756-70ad-4884-8ea6-b356ee840e38-config-data\") pod \"nova-cell1-cell-mapping-9v2w5\" (UID: \"506aa756-70ad-4884-8ea6-b356ee840e38\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-9v2w5" Jan 20 17:04:27 crc kubenswrapper[4558]: I0120 17:04:27.193026 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/506aa756-70ad-4884-8ea6-b356ee840e38-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-9v2w5\" (UID: \"506aa756-70ad-4884-8ea6-b356ee840e38\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-9v2w5" Jan 20 17:04:27 crc kubenswrapper[4558]: I0120 17:04:27.193337 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dgnzg\" (UniqueName: \"kubernetes.io/projected/506aa756-70ad-4884-8ea6-b356ee840e38-kube-api-access-dgnzg\") pod \"nova-cell1-cell-mapping-9v2w5\" (UID: \"506aa756-70ad-4884-8ea6-b356ee840e38\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-9v2w5" Jan 20 17:04:27 crc kubenswrapper[4558]: I0120 17:04:27.335730 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-cell-mapping-9v2w5" Jan 20 17:04:27 crc kubenswrapper[4558]: W0120 17:04:27.723475 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod506aa756_70ad_4884_8ea6_b356ee840e38.slice/crio-cc906b637b7d71b5346d5115b94eacde724f85f0a3c32adaa14fe671c9594d5f WatchSource:0}: Error finding container cc906b637b7d71b5346d5115b94eacde724f85f0a3c32adaa14fe671c9594d5f: Status 404 returned error can't find the container with id cc906b637b7d71b5346d5115b94eacde724f85f0a3c32adaa14fe671c9594d5f Jan 20 17:04:27 crc kubenswrapper[4558]: I0120 17:04:27.733115 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-cell-mapping-9v2w5"] Jan 20 17:04:27 crc kubenswrapper[4558]: I0120 17:04:27.850066 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-cell-mapping-9v2w5" event={"ID":"506aa756-70ad-4884-8ea6-b356ee840e38","Type":"ContainerStarted","Data":"cc906b637b7d71b5346d5115b94eacde724f85f0a3c32adaa14fe671c9594d5f"} Jan 20 17:04:28 crc kubenswrapper[4558]: I0120 17:04:28.859529 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-cell-mapping-9v2w5" event={"ID":"506aa756-70ad-4884-8ea6-b356ee840e38","Type":"ContainerStarted","Data":"f820bd8add1ea95d24868db10ab96d6351761090b1af2d6f03fc4e29de25ecac"} Jan 20 17:04:28 crc kubenswrapper[4558]: I0120 17:04:28.874397 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell1-cell-mapping-9v2w5" podStartSLOduration=1.8743835770000001 podStartE2EDuration="1.874383577s" podCreationTimestamp="2026-01-20 17:04:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:04:28.868409453 +0000 UTC m=+1362.628747421" watchObservedRunningTime="2026-01-20 17:04:28.874383577 +0000 UTC m=+1362.634721544" Jan 20 17:04:30 crc kubenswrapper[4558]: I0120 17:04:30.148885 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:04:30 crc kubenswrapper[4558]: I0120 17:04:30.148929 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:04:31 crc kubenswrapper[4558]: I0120 17:04:31.163274 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-api-0" podUID="3acae54e-bbd3-4f43-a95f-3b1442bac970" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.1.28:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 20 17:04:31 crc kubenswrapper[4558]: I0120 17:04:31.163465 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-api-0" podUID="3acae54e-bbd3-4f43-a95f-3b1442bac970" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.1.28:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 20 17:04:31 crc kubenswrapper[4558]: I0120 17:04:31.880066 4558 generic.go:334] "Generic (PLEG): container finished" podID="506aa756-70ad-4884-8ea6-b356ee840e38" containerID="f820bd8add1ea95d24868db10ab96d6351761090b1af2d6f03fc4e29de25ecac" exitCode=0 Jan 20 17:04:31 crc kubenswrapper[4558]: I0120 17:04:31.880105 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-cell-mapping-9v2w5" event={"ID":"506aa756-70ad-4884-8ea6-b356ee840e38","Type":"ContainerDied","Data":"f820bd8add1ea95d24868db10ab96d6351761090b1af2d6f03fc4e29de25ecac"} Jan 20 17:04:33 crc kubenswrapper[4558]: I0120 17:04:33.149056 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-cell-mapping-9v2w5" Jan 20 17:04:33 crc kubenswrapper[4558]: I0120 17:04:33.267110 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/506aa756-70ad-4884-8ea6-b356ee840e38-config-data\") pod \"506aa756-70ad-4884-8ea6-b356ee840e38\" (UID: \"506aa756-70ad-4884-8ea6-b356ee840e38\") " Jan 20 17:04:33 crc kubenswrapper[4558]: I0120 17:04:33.267150 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/506aa756-70ad-4884-8ea6-b356ee840e38-scripts\") pod \"506aa756-70ad-4884-8ea6-b356ee840e38\" (UID: \"506aa756-70ad-4884-8ea6-b356ee840e38\") " Jan 20 17:04:33 crc kubenswrapper[4558]: I0120 17:04:33.267270 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dgnzg\" (UniqueName: \"kubernetes.io/projected/506aa756-70ad-4884-8ea6-b356ee840e38-kube-api-access-dgnzg\") pod \"506aa756-70ad-4884-8ea6-b356ee840e38\" (UID: \"506aa756-70ad-4884-8ea6-b356ee840e38\") " Jan 20 17:04:33 crc kubenswrapper[4558]: I0120 17:04:33.267315 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/506aa756-70ad-4884-8ea6-b356ee840e38-combined-ca-bundle\") pod \"506aa756-70ad-4884-8ea6-b356ee840e38\" (UID: \"506aa756-70ad-4884-8ea6-b356ee840e38\") " Jan 20 17:04:33 crc kubenswrapper[4558]: I0120 17:04:33.271694 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/506aa756-70ad-4884-8ea6-b356ee840e38-scripts" (OuterVolumeSpecName: "scripts") pod "506aa756-70ad-4884-8ea6-b356ee840e38" (UID: "506aa756-70ad-4884-8ea6-b356ee840e38"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:04:33 crc kubenswrapper[4558]: I0120 17:04:33.271939 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/506aa756-70ad-4884-8ea6-b356ee840e38-kube-api-access-dgnzg" (OuterVolumeSpecName: "kube-api-access-dgnzg") pod "506aa756-70ad-4884-8ea6-b356ee840e38" (UID: "506aa756-70ad-4884-8ea6-b356ee840e38"). InnerVolumeSpecName "kube-api-access-dgnzg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:04:33 crc kubenswrapper[4558]: I0120 17:04:33.287770 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/506aa756-70ad-4884-8ea6-b356ee840e38-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "506aa756-70ad-4884-8ea6-b356ee840e38" (UID: "506aa756-70ad-4884-8ea6-b356ee840e38"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:04:33 crc kubenswrapper[4558]: I0120 17:04:33.289847 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/506aa756-70ad-4884-8ea6-b356ee840e38-config-data" (OuterVolumeSpecName: "config-data") pod "506aa756-70ad-4884-8ea6-b356ee840e38" (UID: "506aa756-70ad-4884-8ea6-b356ee840e38"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:04:33 crc kubenswrapper[4558]: I0120 17:04:33.369473 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/506aa756-70ad-4884-8ea6-b356ee840e38-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:04:33 crc kubenswrapper[4558]: I0120 17:04:33.369497 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/506aa756-70ad-4884-8ea6-b356ee840e38-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:04:33 crc kubenswrapper[4558]: I0120 17:04:33.369508 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dgnzg\" (UniqueName: \"kubernetes.io/projected/506aa756-70ad-4884-8ea6-b356ee840e38-kube-api-access-dgnzg\") on node \"crc\" DevicePath \"\"" Jan 20 17:04:33 crc kubenswrapper[4558]: I0120 17:04:33.369519 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/506aa756-70ad-4884-8ea6-b356ee840e38-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:04:33 crc kubenswrapper[4558]: I0120 17:04:33.894063 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-cell-mapping-9v2w5" event={"ID":"506aa756-70ad-4884-8ea6-b356ee840e38","Type":"ContainerDied","Data":"cc906b637b7d71b5346d5115b94eacde724f85f0a3c32adaa14fe671c9594d5f"} Jan 20 17:04:33 crc kubenswrapper[4558]: I0120 17:04:33.894239 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cc906b637b7d71b5346d5115b94eacde724f85f0a3c32adaa14fe671c9594d5f" Jan 20 17:04:33 crc kubenswrapper[4558]: I0120 17:04:33.894311 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-cell-mapping-9v2w5" Jan 20 17:04:34 crc kubenswrapper[4558]: I0120 17:04:34.052006 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:04:34 crc kubenswrapper[4558]: I0120 17:04:34.052262 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-scheduler-0" podUID="c3bd0148-350f-4351-8a04-a3009cfdeb29" containerName="nova-scheduler-scheduler" containerID="cri-o://f94960e9c0eda36da8e549e42b6b9637878691b355b71f154c39b91497ab764e" gracePeriod=30 Jan 20 17:04:34 crc kubenswrapper[4558]: I0120 17:04:34.058421 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:04:34 crc kubenswrapper[4558]: I0120 17:04:34.058635 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-api-0" podUID="3acae54e-bbd3-4f43-a95f-3b1442bac970" containerName="nova-api-log" containerID="cri-o://344ade4fe227710f2214c70ce7b21795e9e2804e0d89472fbf05cf4a8eb1bb79" gracePeriod=30 Jan 20 17:04:34 crc kubenswrapper[4558]: I0120 17:04:34.058699 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-api-0" podUID="3acae54e-bbd3-4f43-a95f-3b1442bac970" containerName="nova-api-api" containerID="cri-o://509826e2026006c0644f79e43d9cd18e5b35820d25605554f35cc4ef5a824a71" gracePeriod=30 Jan 20 17:04:34 crc kubenswrapper[4558]: I0120 17:04:34.064563 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:04:34 crc kubenswrapper[4558]: I0120 17:04:34.064748 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-metadata-0" podUID="8b67b016-0bfa-42ec-8c8a-8ea7afb366ac" containerName="nova-metadata-log" containerID="cri-o://6cfda644c89e82e0c471df3db327f1fa97a02c27b7636bf22e85b9d3fe0134fe" gracePeriod=30 Jan 20 17:04:34 crc kubenswrapper[4558]: I0120 17:04:34.064803 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-metadata-0" podUID="8b67b016-0bfa-42ec-8c8a-8ea7afb366ac" containerName="nova-metadata-metadata" containerID="cri-o://df1010392aaec0a4a83f5a826cb22e3e0ee83e3dd3dcbe213e6b5fa4528c4b32" gracePeriod=30 Jan 20 17:04:34 crc kubenswrapper[4558]: I0120 17:04:34.902281 4558 generic.go:334] "Generic (PLEG): container finished" podID="3acae54e-bbd3-4f43-a95f-3b1442bac970" containerID="344ade4fe227710f2214c70ce7b21795e9e2804e0d89472fbf05cf4a8eb1bb79" exitCode=143 Jan 20 17:04:34 crc kubenswrapper[4558]: I0120 17:04:34.902350 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"3acae54e-bbd3-4f43-a95f-3b1442bac970","Type":"ContainerDied","Data":"344ade4fe227710f2214c70ce7b21795e9e2804e0d89472fbf05cf4a8eb1bb79"} Jan 20 17:04:34 crc kubenswrapper[4558]: I0120 17:04:34.903738 4558 generic.go:334] "Generic (PLEG): container finished" podID="8b67b016-0bfa-42ec-8c8a-8ea7afb366ac" containerID="6cfda644c89e82e0c471df3db327f1fa97a02c27b7636bf22e85b9d3fe0134fe" exitCode=143 Jan 20 17:04:34 crc kubenswrapper[4558]: I0120 17:04:34.903767 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"8b67b016-0bfa-42ec-8c8a-8ea7afb366ac","Type":"ContainerDied","Data":"6cfda644c89e82e0c471df3db327f1fa97a02c27b7636bf22e85b9d3fe0134fe"} Jan 20 17:04:35 crc kubenswrapper[4558]: E0120 17:04:35.841859 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="f94960e9c0eda36da8e549e42b6b9637878691b355b71f154c39b91497ab764e" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 20 17:04:35 crc kubenswrapper[4558]: E0120 17:04:35.843308 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="f94960e9c0eda36da8e549e42b6b9637878691b355b71f154c39b91497ab764e" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 20 17:04:35 crc kubenswrapper[4558]: E0120 17:04:35.844268 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="f94960e9c0eda36da8e549e42b6b9637878691b355b71f154c39b91497ab764e" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 20 17:04:35 crc kubenswrapper[4558]: E0120 17:04:35.844324 4558 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack-kuttl-tests/nova-scheduler-0" podUID="c3bd0148-350f-4351-8a04-a3009cfdeb29" containerName="nova-scheduler-scheduler" Jan 20 17:04:37 crc kubenswrapper[4558]: I0120 17:04:37.179727 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/nova-metadata-0" podUID="8b67b016-0bfa-42ec-8c8a-8ea7afb366ac" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.1.25:8775/\": read tcp 10.217.0.2:45996->10.217.1.25:8775: read: connection reset by peer" Jan 20 17:04:37 crc kubenswrapper[4558]: I0120 17:04:37.179735 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/nova-metadata-0" podUID="8b67b016-0bfa-42ec-8c8a-8ea7afb366ac" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.1.25:8775/\": read tcp 10.217.0.2:45998->10.217.1.25:8775: read: connection reset by peer" Jan 20 17:04:37 crc kubenswrapper[4558]: I0120 17:04:37.593050 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:04:37 crc kubenswrapper[4558]: I0120 17:04:37.597189 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:04:37 crc kubenswrapper[4558]: I0120 17:04:37.730505 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8b67b016-0bfa-42ec-8c8a-8ea7afb366ac-logs\") pod \"8b67b016-0bfa-42ec-8c8a-8ea7afb366ac\" (UID: \"8b67b016-0bfa-42ec-8c8a-8ea7afb366ac\") " Jan 20 17:04:37 crc kubenswrapper[4558]: I0120 17:04:37.730760 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3acae54e-bbd3-4f43-a95f-3b1442bac970-public-tls-certs\") pod \"3acae54e-bbd3-4f43-a95f-3b1442bac970\" (UID: \"3acae54e-bbd3-4f43-a95f-3b1442bac970\") " Jan 20 17:04:37 crc kubenswrapper[4558]: I0120 17:04:37.730801 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pwbdx\" (UniqueName: \"kubernetes.io/projected/8b67b016-0bfa-42ec-8c8a-8ea7afb366ac-kube-api-access-pwbdx\") pod \"8b67b016-0bfa-42ec-8c8a-8ea7afb366ac\" (UID: \"8b67b016-0bfa-42ec-8c8a-8ea7afb366ac\") " Jan 20 17:04:37 crc kubenswrapper[4558]: I0120 17:04:37.730817 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3acae54e-bbd3-4f43-a95f-3b1442bac970-logs\") pod \"3acae54e-bbd3-4f43-a95f-3b1442bac970\" (UID: \"3acae54e-bbd3-4f43-a95f-3b1442bac970\") " Jan 20 17:04:37 crc kubenswrapper[4558]: I0120 17:04:37.730853 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8b67b016-0bfa-42ec-8c8a-8ea7afb366ac-logs" (OuterVolumeSpecName: "logs") pod "8b67b016-0bfa-42ec-8c8a-8ea7afb366ac" (UID: "8b67b016-0bfa-42ec-8c8a-8ea7afb366ac"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:04:37 crc kubenswrapper[4558]: I0120 17:04:37.730865 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/8b67b016-0bfa-42ec-8c8a-8ea7afb366ac-nova-metadata-tls-certs\") pod \"8b67b016-0bfa-42ec-8c8a-8ea7afb366ac\" (UID: \"8b67b016-0bfa-42ec-8c8a-8ea7afb366ac\") " Jan 20 17:04:37 crc kubenswrapper[4558]: I0120 17:04:37.730888 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8b67b016-0bfa-42ec-8c8a-8ea7afb366ac-config-data\") pod \"8b67b016-0bfa-42ec-8c8a-8ea7afb366ac\" (UID: \"8b67b016-0bfa-42ec-8c8a-8ea7afb366ac\") " Jan 20 17:04:37 crc kubenswrapper[4558]: I0120 17:04:37.730952 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8b67b016-0bfa-42ec-8c8a-8ea7afb366ac-combined-ca-bundle\") pod \"8b67b016-0bfa-42ec-8c8a-8ea7afb366ac\" (UID: \"8b67b016-0bfa-42ec-8c8a-8ea7afb366ac\") " Jan 20 17:04:37 crc kubenswrapper[4558]: I0120 17:04:37.730989 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3acae54e-bbd3-4f43-a95f-3b1442bac970-combined-ca-bundle\") pod \"3acae54e-bbd3-4f43-a95f-3b1442bac970\" (UID: \"3acae54e-bbd3-4f43-a95f-3b1442bac970\") " Jan 20 17:04:37 crc kubenswrapper[4558]: I0120 17:04:37.731033 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3acae54e-bbd3-4f43-a95f-3b1442bac970-internal-tls-certs\") pod \"3acae54e-bbd3-4f43-a95f-3b1442bac970\" (UID: \"3acae54e-bbd3-4f43-a95f-3b1442bac970\") " Jan 20 17:04:37 crc kubenswrapper[4558]: I0120 17:04:37.731079 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dd6rq\" (UniqueName: \"kubernetes.io/projected/3acae54e-bbd3-4f43-a95f-3b1442bac970-kube-api-access-dd6rq\") pod \"3acae54e-bbd3-4f43-a95f-3b1442bac970\" (UID: \"3acae54e-bbd3-4f43-a95f-3b1442bac970\") " Jan 20 17:04:37 crc kubenswrapper[4558]: I0120 17:04:37.731090 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3acae54e-bbd3-4f43-a95f-3b1442bac970-logs" (OuterVolumeSpecName: "logs") pod "3acae54e-bbd3-4f43-a95f-3b1442bac970" (UID: "3acae54e-bbd3-4f43-a95f-3b1442bac970"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:04:37 crc kubenswrapper[4558]: I0120 17:04:37.731093 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3acae54e-bbd3-4f43-a95f-3b1442bac970-config-data\") pod \"3acae54e-bbd3-4f43-a95f-3b1442bac970\" (UID: \"3acae54e-bbd3-4f43-a95f-3b1442bac970\") " Jan 20 17:04:37 crc kubenswrapper[4558]: I0120 17:04:37.731903 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8b67b016-0bfa-42ec-8c8a-8ea7afb366ac-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:04:37 crc kubenswrapper[4558]: I0120 17:04:37.731923 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3acae54e-bbd3-4f43-a95f-3b1442bac970-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:04:37 crc kubenswrapper[4558]: I0120 17:04:37.736381 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3acae54e-bbd3-4f43-a95f-3b1442bac970-kube-api-access-dd6rq" (OuterVolumeSpecName: "kube-api-access-dd6rq") pod "3acae54e-bbd3-4f43-a95f-3b1442bac970" (UID: "3acae54e-bbd3-4f43-a95f-3b1442bac970"). InnerVolumeSpecName "kube-api-access-dd6rq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:04:37 crc kubenswrapper[4558]: I0120 17:04:37.736428 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8b67b016-0bfa-42ec-8c8a-8ea7afb366ac-kube-api-access-pwbdx" (OuterVolumeSpecName: "kube-api-access-pwbdx") pod "8b67b016-0bfa-42ec-8c8a-8ea7afb366ac" (UID: "8b67b016-0bfa-42ec-8c8a-8ea7afb366ac"). InnerVolumeSpecName "kube-api-access-pwbdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:04:37 crc kubenswrapper[4558]: I0120 17:04:37.752226 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3acae54e-bbd3-4f43-a95f-3b1442bac970-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3acae54e-bbd3-4f43-a95f-3b1442bac970" (UID: "3acae54e-bbd3-4f43-a95f-3b1442bac970"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:04:37 crc kubenswrapper[4558]: I0120 17:04:37.752631 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8b67b016-0bfa-42ec-8c8a-8ea7afb366ac-config-data" (OuterVolumeSpecName: "config-data") pod "8b67b016-0bfa-42ec-8c8a-8ea7afb366ac" (UID: "8b67b016-0bfa-42ec-8c8a-8ea7afb366ac"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:04:37 crc kubenswrapper[4558]: I0120 17:04:37.753486 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3acae54e-bbd3-4f43-a95f-3b1442bac970-config-data" (OuterVolumeSpecName: "config-data") pod "3acae54e-bbd3-4f43-a95f-3b1442bac970" (UID: "3acae54e-bbd3-4f43-a95f-3b1442bac970"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:04:37 crc kubenswrapper[4558]: I0120 17:04:37.753634 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8b67b016-0bfa-42ec-8c8a-8ea7afb366ac-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8b67b016-0bfa-42ec-8c8a-8ea7afb366ac" (UID: "8b67b016-0bfa-42ec-8c8a-8ea7afb366ac"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:04:37 crc kubenswrapper[4558]: I0120 17:04:37.766420 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3acae54e-bbd3-4f43-a95f-3b1442bac970-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "3acae54e-bbd3-4f43-a95f-3b1442bac970" (UID: "3acae54e-bbd3-4f43-a95f-3b1442bac970"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:04:37 crc kubenswrapper[4558]: I0120 17:04:37.769706 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8b67b016-0bfa-42ec-8c8a-8ea7afb366ac-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "8b67b016-0bfa-42ec-8c8a-8ea7afb366ac" (UID: "8b67b016-0bfa-42ec-8c8a-8ea7afb366ac"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:04:37 crc kubenswrapper[4558]: I0120 17:04:37.777327 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3acae54e-bbd3-4f43-a95f-3b1442bac970-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "3acae54e-bbd3-4f43-a95f-3b1442bac970" (UID: "3acae54e-bbd3-4f43-a95f-3b1442bac970"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:04:37 crc kubenswrapper[4558]: I0120 17:04:37.834184 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3acae54e-bbd3-4f43-a95f-3b1442bac970-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:04:37 crc kubenswrapper[4558]: I0120 17:04:37.834208 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pwbdx\" (UniqueName: \"kubernetes.io/projected/8b67b016-0bfa-42ec-8c8a-8ea7afb366ac-kube-api-access-pwbdx\") on node \"crc\" DevicePath \"\"" Jan 20 17:04:37 crc kubenswrapper[4558]: I0120 17:04:37.834219 4558 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/8b67b016-0bfa-42ec-8c8a-8ea7afb366ac-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:04:37 crc kubenswrapper[4558]: I0120 17:04:37.834229 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8b67b016-0bfa-42ec-8c8a-8ea7afb366ac-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:04:37 crc kubenswrapper[4558]: I0120 17:04:37.834237 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8b67b016-0bfa-42ec-8c8a-8ea7afb366ac-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:04:37 crc kubenswrapper[4558]: I0120 17:04:37.834245 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3acae54e-bbd3-4f43-a95f-3b1442bac970-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:04:37 crc kubenswrapper[4558]: I0120 17:04:37.834253 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3acae54e-bbd3-4f43-a95f-3b1442bac970-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:04:37 crc kubenswrapper[4558]: I0120 17:04:37.834260 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dd6rq\" (UniqueName: \"kubernetes.io/projected/3acae54e-bbd3-4f43-a95f-3b1442bac970-kube-api-access-dd6rq\") on node \"crc\" DevicePath \"\"" Jan 20 17:04:37 crc kubenswrapper[4558]: I0120 17:04:37.834268 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3acae54e-bbd3-4f43-a95f-3b1442bac970-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:04:37 crc kubenswrapper[4558]: I0120 17:04:37.926600 4558 generic.go:334] "Generic (PLEG): container finished" podID="3acae54e-bbd3-4f43-a95f-3b1442bac970" containerID="509826e2026006c0644f79e43d9cd18e5b35820d25605554f35cc4ef5a824a71" exitCode=0 Jan 20 17:04:37 crc kubenswrapper[4558]: I0120 17:04:37.926645 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:04:37 crc kubenswrapper[4558]: I0120 17:04:37.926691 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"3acae54e-bbd3-4f43-a95f-3b1442bac970","Type":"ContainerDied","Data":"509826e2026006c0644f79e43d9cd18e5b35820d25605554f35cc4ef5a824a71"} Jan 20 17:04:37 crc kubenswrapper[4558]: I0120 17:04:37.926957 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"3acae54e-bbd3-4f43-a95f-3b1442bac970","Type":"ContainerDied","Data":"f16b2f39668040ed8ca1503d01970b63e9cbd299181451454261bd6975ffeb74"} Jan 20 17:04:37 crc kubenswrapper[4558]: I0120 17:04:37.926985 4558 scope.go:117] "RemoveContainer" containerID="509826e2026006c0644f79e43d9cd18e5b35820d25605554f35cc4ef5a824a71" Jan 20 17:04:37 crc kubenswrapper[4558]: I0120 17:04:37.928914 4558 generic.go:334] "Generic (PLEG): container finished" podID="8b67b016-0bfa-42ec-8c8a-8ea7afb366ac" containerID="df1010392aaec0a4a83f5a826cb22e3e0ee83e3dd3dcbe213e6b5fa4528c4b32" exitCode=0 Jan 20 17:04:37 crc kubenswrapper[4558]: I0120 17:04:37.928937 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:04:37 crc kubenswrapper[4558]: I0120 17:04:37.928954 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"8b67b016-0bfa-42ec-8c8a-8ea7afb366ac","Type":"ContainerDied","Data":"df1010392aaec0a4a83f5a826cb22e3e0ee83e3dd3dcbe213e6b5fa4528c4b32"} Jan 20 17:04:37 crc kubenswrapper[4558]: I0120 17:04:37.929015 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"8b67b016-0bfa-42ec-8c8a-8ea7afb366ac","Type":"ContainerDied","Data":"eaa202950dcd12b1c87e651ff0733331eb771ec431dee9f29a17f6beafd2829c"} Jan 20 17:04:37 crc kubenswrapper[4558]: I0120 17:04:37.953133 4558 scope.go:117] "RemoveContainer" containerID="344ade4fe227710f2214c70ce7b21795e9e2804e0d89472fbf05cf4a8eb1bb79" Jan 20 17:04:37 crc kubenswrapper[4558]: I0120 17:04:37.966781 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:04:37 crc kubenswrapper[4558]: I0120 17:04:37.976141 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:04:37 crc kubenswrapper[4558]: I0120 17:04:37.988554 4558 scope.go:117] "RemoveContainer" containerID="509826e2026006c0644f79e43d9cd18e5b35820d25605554f35cc4ef5a824a71" Jan 20 17:04:37 crc kubenswrapper[4558]: E0120 17:04:37.989028 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"509826e2026006c0644f79e43d9cd18e5b35820d25605554f35cc4ef5a824a71\": container with ID starting with 509826e2026006c0644f79e43d9cd18e5b35820d25605554f35cc4ef5a824a71 not found: ID does not exist" containerID="509826e2026006c0644f79e43d9cd18e5b35820d25605554f35cc4ef5a824a71" Jan 20 17:04:37 crc kubenswrapper[4558]: I0120 17:04:37.989054 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"509826e2026006c0644f79e43d9cd18e5b35820d25605554f35cc4ef5a824a71"} err="failed to get container status \"509826e2026006c0644f79e43d9cd18e5b35820d25605554f35cc4ef5a824a71\": rpc error: code = NotFound desc = could not find container \"509826e2026006c0644f79e43d9cd18e5b35820d25605554f35cc4ef5a824a71\": container with ID starting with 509826e2026006c0644f79e43d9cd18e5b35820d25605554f35cc4ef5a824a71 not found: ID does not exist" Jan 20 17:04:37 crc kubenswrapper[4558]: I0120 17:04:37.989074 4558 scope.go:117] "RemoveContainer" containerID="344ade4fe227710f2214c70ce7b21795e9e2804e0d89472fbf05cf4a8eb1bb79" Jan 20 17:04:37 crc kubenswrapper[4558]: I0120 17:04:37.989125 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:04:37 crc kubenswrapper[4558]: E0120 17:04:37.989374 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"344ade4fe227710f2214c70ce7b21795e9e2804e0d89472fbf05cf4a8eb1bb79\": container with ID starting with 344ade4fe227710f2214c70ce7b21795e9e2804e0d89472fbf05cf4a8eb1bb79 not found: ID does not exist" containerID="344ade4fe227710f2214c70ce7b21795e9e2804e0d89472fbf05cf4a8eb1bb79" Jan 20 17:04:37 crc kubenswrapper[4558]: I0120 17:04:37.989398 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"344ade4fe227710f2214c70ce7b21795e9e2804e0d89472fbf05cf4a8eb1bb79"} err="failed to get container status \"344ade4fe227710f2214c70ce7b21795e9e2804e0d89472fbf05cf4a8eb1bb79\": rpc error: code = NotFound desc = could not find container \"344ade4fe227710f2214c70ce7b21795e9e2804e0d89472fbf05cf4a8eb1bb79\": container with ID starting with 344ade4fe227710f2214c70ce7b21795e9e2804e0d89472fbf05cf4a8eb1bb79 not found: ID does not exist" Jan 20 17:04:37 crc kubenswrapper[4558]: I0120 17:04:37.989412 4558 scope.go:117] "RemoveContainer" containerID="df1010392aaec0a4a83f5a826cb22e3e0ee83e3dd3dcbe213e6b5fa4528c4b32" Jan 20 17:04:37 crc kubenswrapper[4558]: I0120 17:04:37.995944 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:04:38 crc kubenswrapper[4558]: I0120 17:04:38.005755 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:04:38 crc kubenswrapper[4558]: E0120 17:04:38.006114 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3acae54e-bbd3-4f43-a95f-3b1442bac970" containerName="nova-api-log" Jan 20 17:04:38 crc kubenswrapper[4558]: I0120 17:04:38.006134 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="3acae54e-bbd3-4f43-a95f-3b1442bac970" containerName="nova-api-log" Jan 20 17:04:38 crc kubenswrapper[4558]: E0120 17:04:38.006150 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="506aa756-70ad-4884-8ea6-b356ee840e38" containerName="nova-manage" Jan 20 17:04:38 crc kubenswrapper[4558]: I0120 17:04:38.006155 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="506aa756-70ad-4884-8ea6-b356ee840e38" containerName="nova-manage" Jan 20 17:04:38 crc kubenswrapper[4558]: E0120 17:04:38.006200 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3acae54e-bbd3-4f43-a95f-3b1442bac970" containerName="nova-api-api" Jan 20 17:04:38 crc kubenswrapper[4558]: I0120 17:04:38.006206 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="3acae54e-bbd3-4f43-a95f-3b1442bac970" containerName="nova-api-api" Jan 20 17:04:38 crc kubenswrapper[4558]: E0120 17:04:38.006220 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8b67b016-0bfa-42ec-8c8a-8ea7afb366ac" containerName="nova-metadata-log" Jan 20 17:04:38 crc kubenswrapper[4558]: I0120 17:04:38.006225 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="8b67b016-0bfa-42ec-8c8a-8ea7afb366ac" containerName="nova-metadata-log" Jan 20 17:04:38 crc kubenswrapper[4558]: E0120 17:04:38.006236 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8b67b016-0bfa-42ec-8c8a-8ea7afb366ac" containerName="nova-metadata-metadata" Jan 20 17:04:38 crc kubenswrapper[4558]: I0120 17:04:38.006242 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="8b67b016-0bfa-42ec-8c8a-8ea7afb366ac" containerName="nova-metadata-metadata" Jan 20 17:04:38 crc kubenswrapper[4558]: I0120 17:04:38.006422 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="8b67b016-0bfa-42ec-8c8a-8ea7afb366ac" containerName="nova-metadata-metadata" Jan 20 17:04:38 crc kubenswrapper[4558]: I0120 17:04:38.006436 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="3acae54e-bbd3-4f43-a95f-3b1442bac970" containerName="nova-api-api" Jan 20 17:04:38 crc kubenswrapper[4558]: I0120 17:04:38.006444 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="506aa756-70ad-4884-8ea6-b356ee840e38" containerName="nova-manage" Jan 20 17:04:38 crc kubenswrapper[4558]: I0120 17:04:38.006455 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="3acae54e-bbd3-4f43-a95f-3b1442bac970" containerName="nova-api-log" Jan 20 17:04:38 crc kubenswrapper[4558]: I0120 17:04:38.006467 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="8b67b016-0bfa-42ec-8c8a-8ea7afb366ac" containerName="nova-metadata-log" Jan 20 17:04:38 crc kubenswrapper[4558]: I0120 17:04:38.007319 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:04:38 crc kubenswrapper[4558]: I0120 17:04:38.010254 4558 scope.go:117] "RemoveContainer" containerID="6cfda644c89e82e0c471df3db327f1fa97a02c27b7636bf22e85b9d3fe0134fe" Jan 20 17:04:38 crc kubenswrapper[4558]: I0120 17:04:38.010703 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-api-config-data" Jan 20 17:04:38 crc kubenswrapper[4558]: I0120 17:04:38.010784 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-public-svc" Jan 20 17:04:38 crc kubenswrapper[4558]: I0120 17:04:38.011092 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-internal-svc" Jan 20 17:04:38 crc kubenswrapper[4558]: I0120 17:04:38.014709 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:04:38 crc kubenswrapper[4558]: I0120 17:04:38.023231 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:04:38 crc kubenswrapper[4558]: I0120 17:04:38.024520 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:04:38 crc kubenswrapper[4558]: I0120 17:04:38.027433 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-metadata-config-data" Jan 20 17:04:38 crc kubenswrapper[4558]: I0120 17:04:38.027518 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-metadata-internal-svc" Jan 20 17:04:38 crc kubenswrapper[4558]: I0120 17:04:38.032362 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:04:38 crc kubenswrapper[4558]: I0120 17:04:38.033264 4558 scope.go:117] "RemoveContainer" containerID="df1010392aaec0a4a83f5a826cb22e3e0ee83e3dd3dcbe213e6b5fa4528c4b32" Jan 20 17:04:38 crc kubenswrapper[4558]: E0120 17:04:38.033572 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"df1010392aaec0a4a83f5a826cb22e3e0ee83e3dd3dcbe213e6b5fa4528c4b32\": container with ID starting with df1010392aaec0a4a83f5a826cb22e3e0ee83e3dd3dcbe213e6b5fa4528c4b32 not found: ID does not exist" containerID="df1010392aaec0a4a83f5a826cb22e3e0ee83e3dd3dcbe213e6b5fa4528c4b32" Jan 20 17:04:38 crc kubenswrapper[4558]: I0120 17:04:38.033591 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"df1010392aaec0a4a83f5a826cb22e3e0ee83e3dd3dcbe213e6b5fa4528c4b32"} err="failed to get container status \"df1010392aaec0a4a83f5a826cb22e3e0ee83e3dd3dcbe213e6b5fa4528c4b32\": rpc error: code = NotFound desc = could not find container \"df1010392aaec0a4a83f5a826cb22e3e0ee83e3dd3dcbe213e6b5fa4528c4b32\": container with ID starting with df1010392aaec0a4a83f5a826cb22e3e0ee83e3dd3dcbe213e6b5fa4528c4b32 not found: ID does not exist" Jan 20 17:04:38 crc kubenswrapper[4558]: I0120 17:04:38.033605 4558 scope.go:117] "RemoveContainer" containerID="6cfda644c89e82e0c471df3db327f1fa97a02c27b7636bf22e85b9d3fe0134fe" Jan 20 17:04:38 crc kubenswrapper[4558]: E0120 17:04:38.033786 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6cfda644c89e82e0c471df3db327f1fa97a02c27b7636bf22e85b9d3fe0134fe\": container with ID starting with 6cfda644c89e82e0c471df3db327f1fa97a02c27b7636bf22e85b9d3fe0134fe not found: ID does not exist" containerID="6cfda644c89e82e0c471df3db327f1fa97a02c27b7636bf22e85b9d3fe0134fe" Jan 20 17:04:38 crc kubenswrapper[4558]: I0120 17:04:38.033801 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6cfda644c89e82e0c471df3db327f1fa97a02c27b7636bf22e85b9d3fe0134fe"} err="failed to get container status \"6cfda644c89e82e0c471df3db327f1fa97a02c27b7636bf22e85b9d3fe0134fe\": rpc error: code = NotFound desc = could not find container \"6cfda644c89e82e0c471df3db327f1fa97a02c27b7636bf22e85b9d3fe0134fe\": container with ID starting with 6cfda644c89e82e0c471df3db327f1fa97a02c27b7636bf22e85b9d3fe0134fe not found: ID does not exist" Jan 20 17:04:38 crc kubenswrapper[4558]: I0120 17:04:38.138331 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-knpz9\" (UniqueName: \"kubernetes.io/projected/51e263b2-d42e-46fa-99e1-e0c5aa23bcf5-kube-api-access-knpz9\") pod \"nova-api-0\" (UID: \"51e263b2-d42e-46fa-99e1-e0c5aa23bcf5\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:04:38 crc kubenswrapper[4558]: I0120 17:04:38.138390 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1fccb241-c75e-4cec-b3c6-3855bd6c1161-config-data\") pod \"nova-metadata-0\" (UID: \"1fccb241-c75e-4cec-b3c6-3855bd6c1161\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:04:38 crc kubenswrapper[4558]: I0120 17:04:38.138410 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/1fccb241-c75e-4cec-b3c6-3855bd6c1161-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"1fccb241-c75e-4cec-b3c6-3855bd6c1161\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:04:38 crc kubenswrapper[4558]: I0120 17:04:38.138661 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/51e263b2-d42e-46fa-99e1-e0c5aa23bcf5-internal-tls-certs\") pod \"nova-api-0\" (UID: \"51e263b2-d42e-46fa-99e1-e0c5aa23bcf5\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:04:38 crc kubenswrapper[4558]: I0120 17:04:38.138694 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1fccb241-c75e-4cec-b3c6-3855bd6c1161-logs\") pod \"nova-metadata-0\" (UID: \"1fccb241-c75e-4cec-b3c6-3855bd6c1161\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:04:38 crc kubenswrapper[4558]: I0120 17:04:38.138728 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/51e263b2-d42e-46fa-99e1-e0c5aa23bcf5-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"51e263b2-d42e-46fa-99e1-e0c5aa23bcf5\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:04:38 crc kubenswrapper[4558]: I0120 17:04:38.138797 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/51e263b2-d42e-46fa-99e1-e0c5aa23bcf5-config-data\") pod \"nova-api-0\" (UID: \"51e263b2-d42e-46fa-99e1-e0c5aa23bcf5\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:04:38 crc kubenswrapper[4558]: I0120 17:04:38.138830 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/51e263b2-d42e-46fa-99e1-e0c5aa23bcf5-public-tls-certs\") pod \"nova-api-0\" (UID: \"51e263b2-d42e-46fa-99e1-e0c5aa23bcf5\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:04:38 crc kubenswrapper[4558]: I0120 17:04:38.138847 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1fccb241-c75e-4cec-b3c6-3855bd6c1161-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"1fccb241-c75e-4cec-b3c6-3855bd6c1161\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:04:38 crc kubenswrapper[4558]: I0120 17:04:38.138863 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-95wrc\" (UniqueName: \"kubernetes.io/projected/1fccb241-c75e-4cec-b3c6-3855bd6c1161-kube-api-access-95wrc\") pod \"nova-metadata-0\" (UID: \"1fccb241-c75e-4cec-b3c6-3855bd6c1161\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:04:38 crc kubenswrapper[4558]: I0120 17:04:38.138900 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/51e263b2-d42e-46fa-99e1-e0c5aa23bcf5-logs\") pod \"nova-api-0\" (UID: \"51e263b2-d42e-46fa-99e1-e0c5aa23bcf5\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:04:38 crc kubenswrapper[4558]: I0120 17:04:38.240499 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/51e263b2-d42e-46fa-99e1-e0c5aa23bcf5-internal-tls-certs\") pod \"nova-api-0\" (UID: \"51e263b2-d42e-46fa-99e1-e0c5aa23bcf5\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:04:38 crc kubenswrapper[4558]: I0120 17:04:38.240526 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1fccb241-c75e-4cec-b3c6-3855bd6c1161-logs\") pod \"nova-metadata-0\" (UID: \"1fccb241-c75e-4cec-b3c6-3855bd6c1161\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:04:38 crc kubenswrapper[4558]: I0120 17:04:38.240554 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/51e263b2-d42e-46fa-99e1-e0c5aa23bcf5-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"51e263b2-d42e-46fa-99e1-e0c5aa23bcf5\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:04:38 crc kubenswrapper[4558]: I0120 17:04:38.240574 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/51e263b2-d42e-46fa-99e1-e0c5aa23bcf5-config-data\") pod \"nova-api-0\" (UID: \"51e263b2-d42e-46fa-99e1-e0c5aa23bcf5\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:04:38 crc kubenswrapper[4558]: I0120 17:04:38.240588 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/51e263b2-d42e-46fa-99e1-e0c5aa23bcf5-public-tls-certs\") pod \"nova-api-0\" (UID: \"51e263b2-d42e-46fa-99e1-e0c5aa23bcf5\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:04:38 crc kubenswrapper[4558]: I0120 17:04:38.240600 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1fccb241-c75e-4cec-b3c6-3855bd6c1161-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"1fccb241-c75e-4cec-b3c6-3855bd6c1161\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:04:38 crc kubenswrapper[4558]: I0120 17:04:38.240626 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-95wrc\" (UniqueName: \"kubernetes.io/projected/1fccb241-c75e-4cec-b3c6-3855bd6c1161-kube-api-access-95wrc\") pod \"nova-metadata-0\" (UID: \"1fccb241-c75e-4cec-b3c6-3855bd6c1161\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:04:38 crc kubenswrapper[4558]: I0120 17:04:38.240657 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/51e263b2-d42e-46fa-99e1-e0c5aa23bcf5-logs\") pod \"nova-api-0\" (UID: \"51e263b2-d42e-46fa-99e1-e0c5aa23bcf5\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:04:38 crc kubenswrapper[4558]: I0120 17:04:38.240681 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-knpz9\" (UniqueName: \"kubernetes.io/projected/51e263b2-d42e-46fa-99e1-e0c5aa23bcf5-kube-api-access-knpz9\") pod \"nova-api-0\" (UID: \"51e263b2-d42e-46fa-99e1-e0c5aa23bcf5\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:04:38 crc kubenswrapper[4558]: I0120 17:04:38.240699 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1fccb241-c75e-4cec-b3c6-3855bd6c1161-config-data\") pod \"nova-metadata-0\" (UID: \"1fccb241-c75e-4cec-b3c6-3855bd6c1161\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:04:38 crc kubenswrapper[4558]: I0120 17:04:38.240716 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/1fccb241-c75e-4cec-b3c6-3855bd6c1161-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"1fccb241-c75e-4cec-b3c6-3855bd6c1161\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:04:38 crc kubenswrapper[4558]: I0120 17:04:38.240900 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1fccb241-c75e-4cec-b3c6-3855bd6c1161-logs\") pod \"nova-metadata-0\" (UID: \"1fccb241-c75e-4cec-b3c6-3855bd6c1161\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:04:38 crc kubenswrapper[4558]: I0120 17:04:38.241331 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/51e263b2-d42e-46fa-99e1-e0c5aa23bcf5-logs\") pod \"nova-api-0\" (UID: \"51e263b2-d42e-46fa-99e1-e0c5aa23bcf5\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:04:38 crc kubenswrapper[4558]: I0120 17:04:38.243560 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/51e263b2-d42e-46fa-99e1-e0c5aa23bcf5-config-data\") pod \"nova-api-0\" (UID: \"51e263b2-d42e-46fa-99e1-e0c5aa23bcf5\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:04:38 crc kubenswrapper[4558]: I0120 17:04:38.243599 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1fccb241-c75e-4cec-b3c6-3855bd6c1161-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"1fccb241-c75e-4cec-b3c6-3855bd6c1161\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:04:38 crc kubenswrapper[4558]: I0120 17:04:38.243701 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1fccb241-c75e-4cec-b3c6-3855bd6c1161-config-data\") pod \"nova-metadata-0\" (UID: \"1fccb241-c75e-4cec-b3c6-3855bd6c1161\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:04:38 crc kubenswrapper[4558]: I0120 17:04:38.244003 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/51e263b2-d42e-46fa-99e1-e0c5aa23bcf5-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"51e263b2-d42e-46fa-99e1-e0c5aa23bcf5\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:04:38 crc kubenswrapper[4558]: I0120 17:04:38.244027 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/51e263b2-d42e-46fa-99e1-e0c5aa23bcf5-public-tls-certs\") pod \"nova-api-0\" (UID: \"51e263b2-d42e-46fa-99e1-e0c5aa23bcf5\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:04:38 crc kubenswrapper[4558]: I0120 17:04:38.244124 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/1fccb241-c75e-4cec-b3c6-3855bd6c1161-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"1fccb241-c75e-4cec-b3c6-3855bd6c1161\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:04:38 crc kubenswrapper[4558]: I0120 17:04:38.245420 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/51e263b2-d42e-46fa-99e1-e0c5aa23bcf5-internal-tls-certs\") pod \"nova-api-0\" (UID: \"51e263b2-d42e-46fa-99e1-e0c5aa23bcf5\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:04:38 crc kubenswrapper[4558]: I0120 17:04:38.253398 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-knpz9\" (UniqueName: \"kubernetes.io/projected/51e263b2-d42e-46fa-99e1-e0c5aa23bcf5-kube-api-access-knpz9\") pod \"nova-api-0\" (UID: \"51e263b2-d42e-46fa-99e1-e0c5aa23bcf5\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:04:38 crc kubenswrapper[4558]: I0120 17:04:38.253875 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-95wrc\" (UniqueName: \"kubernetes.io/projected/1fccb241-c75e-4cec-b3c6-3855bd6c1161-kube-api-access-95wrc\") pod \"nova-metadata-0\" (UID: \"1fccb241-c75e-4cec-b3c6-3855bd6c1161\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:04:38 crc kubenswrapper[4558]: I0120 17:04:38.334897 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:04:38 crc kubenswrapper[4558]: I0120 17:04:38.342556 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:04:38 crc kubenswrapper[4558]: I0120 17:04:38.575592 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3acae54e-bbd3-4f43-a95f-3b1442bac970" path="/var/lib/kubelet/pods/3acae54e-bbd3-4f43-a95f-3b1442bac970/volumes" Jan 20 17:04:38 crc kubenswrapper[4558]: I0120 17:04:38.576314 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8b67b016-0bfa-42ec-8c8a-8ea7afb366ac" path="/var/lib/kubelet/pods/8b67b016-0bfa-42ec-8c8a-8ea7afb366ac/volumes" Jan 20 17:04:38 crc kubenswrapper[4558]: I0120 17:04:38.710786 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:04:38 crc kubenswrapper[4558]: W0120 17:04:38.748475 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod51e263b2_d42e_46fa_99e1_e0c5aa23bcf5.slice/crio-b261479d8214b4c91b06791120f4707885ae512c223f9d9f954e0a00e2256c0c WatchSource:0}: Error finding container b261479d8214b4c91b06791120f4707885ae512c223f9d9f954e0a00e2256c0c: Status 404 returned error can't find the container with id b261479d8214b4c91b06791120f4707885ae512c223f9d9f954e0a00e2256c0c Jan 20 17:04:38 crc kubenswrapper[4558]: I0120 17:04:38.767862 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:04:38 crc kubenswrapper[4558]: W0120 17:04:38.779676 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1fccb241_c75e_4cec_b3c6_3855bd6c1161.slice/crio-cdf7d182a2c2cc94e19bcd44382c4ae7cad216f7aaf562ecfb18164494c47997 WatchSource:0}: Error finding container cdf7d182a2c2cc94e19bcd44382c4ae7cad216f7aaf562ecfb18164494c47997: Status 404 returned error can't find the container with id cdf7d182a2c2cc94e19bcd44382c4ae7cad216f7aaf562ecfb18164494c47997 Jan 20 17:04:38 crc kubenswrapper[4558]: I0120 17:04:38.936592 4558 generic.go:334] "Generic (PLEG): container finished" podID="c3bd0148-350f-4351-8a04-a3009cfdeb29" containerID="f94960e9c0eda36da8e549e42b6b9637878691b355b71f154c39b91497ab764e" exitCode=0 Jan 20 17:04:38 crc kubenswrapper[4558]: I0120 17:04:38.936670 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"c3bd0148-350f-4351-8a04-a3009cfdeb29","Type":"ContainerDied","Data":"f94960e9c0eda36da8e549e42b6b9637878691b355b71f154c39b91497ab764e"} Jan 20 17:04:38 crc kubenswrapper[4558]: I0120 17:04:38.938686 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"1fccb241-c75e-4cec-b3c6-3855bd6c1161","Type":"ContainerStarted","Data":"e6a99988097b2873c0b2ced77f6588f66ac0fb5bb22b3ef8e1e7a52ca42e4820"} Jan 20 17:04:38 crc kubenswrapper[4558]: I0120 17:04:38.938729 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"1fccb241-c75e-4cec-b3c6-3855bd6c1161","Type":"ContainerStarted","Data":"cdf7d182a2c2cc94e19bcd44382c4ae7cad216f7aaf562ecfb18164494c47997"} Jan 20 17:04:38 crc kubenswrapper[4558]: I0120 17:04:38.939697 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"51e263b2-d42e-46fa-99e1-e0c5aa23bcf5","Type":"ContainerStarted","Data":"762c3b53bff8276c53ff1ba0f6fcbbc8ea3579b91262bc868d821bf5b7740f32"} Jan 20 17:04:38 crc kubenswrapper[4558]: I0120 17:04:38.939723 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"51e263b2-d42e-46fa-99e1-e0c5aa23bcf5","Type":"ContainerStarted","Data":"b261479d8214b4c91b06791120f4707885ae512c223f9d9f954e0a00e2256c0c"} Jan 20 17:04:38 crc kubenswrapper[4558]: I0120 17:04:38.985012 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:04:39 crc kubenswrapper[4558]: I0120 17:04:39.053335 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c3bd0148-350f-4351-8a04-a3009cfdeb29-combined-ca-bundle\") pod \"c3bd0148-350f-4351-8a04-a3009cfdeb29\" (UID: \"c3bd0148-350f-4351-8a04-a3009cfdeb29\") " Jan 20 17:04:39 crc kubenswrapper[4558]: I0120 17:04:39.053378 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c3bd0148-350f-4351-8a04-a3009cfdeb29-config-data\") pod \"c3bd0148-350f-4351-8a04-a3009cfdeb29\" (UID: \"c3bd0148-350f-4351-8a04-a3009cfdeb29\") " Jan 20 17:04:39 crc kubenswrapper[4558]: I0120 17:04:39.053418 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-shzrt\" (UniqueName: \"kubernetes.io/projected/c3bd0148-350f-4351-8a04-a3009cfdeb29-kube-api-access-shzrt\") pod \"c3bd0148-350f-4351-8a04-a3009cfdeb29\" (UID: \"c3bd0148-350f-4351-8a04-a3009cfdeb29\") " Jan 20 17:04:39 crc kubenswrapper[4558]: I0120 17:04:39.056018 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c3bd0148-350f-4351-8a04-a3009cfdeb29-kube-api-access-shzrt" (OuterVolumeSpecName: "kube-api-access-shzrt") pod "c3bd0148-350f-4351-8a04-a3009cfdeb29" (UID: "c3bd0148-350f-4351-8a04-a3009cfdeb29"). InnerVolumeSpecName "kube-api-access-shzrt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:04:39 crc kubenswrapper[4558]: I0120 17:04:39.071659 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c3bd0148-350f-4351-8a04-a3009cfdeb29-config-data" (OuterVolumeSpecName: "config-data") pod "c3bd0148-350f-4351-8a04-a3009cfdeb29" (UID: "c3bd0148-350f-4351-8a04-a3009cfdeb29"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:04:39 crc kubenswrapper[4558]: I0120 17:04:39.072300 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c3bd0148-350f-4351-8a04-a3009cfdeb29-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c3bd0148-350f-4351-8a04-a3009cfdeb29" (UID: "c3bd0148-350f-4351-8a04-a3009cfdeb29"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:04:39 crc kubenswrapper[4558]: I0120 17:04:39.155983 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c3bd0148-350f-4351-8a04-a3009cfdeb29-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:04:39 crc kubenswrapper[4558]: I0120 17:04:39.156017 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c3bd0148-350f-4351-8a04-a3009cfdeb29-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:04:39 crc kubenswrapper[4558]: I0120 17:04:39.156029 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-shzrt\" (UniqueName: \"kubernetes.io/projected/c3bd0148-350f-4351-8a04-a3009cfdeb29-kube-api-access-shzrt\") on node \"crc\" DevicePath \"\"" Jan 20 17:04:39 crc kubenswrapper[4558]: I0120 17:04:39.948491 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"51e263b2-d42e-46fa-99e1-e0c5aa23bcf5","Type":"ContainerStarted","Data":"5b5134f4161398c21fc398c89b30ad1744c35cb876a3e5afd52d7adcc9031cd4"} Jan 20 17:04:39 crc kubenswrapper[4558]: I0120 17:04:39.950069 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"c3bd0148-350f-4351-8a04-a3009cfdeb29","Type":"ContainerDied","Data":"dcc3820138343191288b84a22d14118f1a07aa0ece4aaeabfa81478c0881bcdb"} Jan 20 17:04:39 crc kubenswrapper[4558]: I0120 17:04:39.950096 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:04:39 crc kubenswrapper[4558]: I0120 17:04:39.950118 4558 scope.go:117] "RemoveContainer" containerID="f94960e9c0eda36da8e549e42b6b9637878691b355b71f154c39b91497ab764e" Jan 20 17:04:39 crc kubenswrapper[4558]: I0120 17:04:39.952414 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"1fccb241-c75e-4cec-b3c6-3855bd6c1161","Type":"ContainerStarted","Data":"1e838eb49cb5fab8df3c83e562edc9dac0c018daeb2274638eb55d7398246708"} Jan 20 17:04:39 crc kubenswrapper[4558]: I0120 17:04:39.978544 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-api-0" podStartSLOduration=2.978527984 podStartE2EDuration="2.978527984s" podCreationTimestamp="2026-01-20 17:04:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:04:39.969467578 +0000 UTC m=+1373.729805565" watchObservedRunningTime="2026-01-20 17:04:39.978527984 +0000 UTC m=+1373.738865951" Jan 20 17:04:39 crc kubenswrapper[4558]: I0120 17:04:39.986654 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-metadata-0" podStartSLOduration=2.986641099 podStartE2EDuration="2.986641099s" podCreationTimestamp="2026-01-20 17:04:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:04:39.982325525 +0000 UTC m=+1373.742663492" watchObservedRunningTime="2026-01-20 17:04:39.986641099 +0000 UTC m=+1373.746979066" Jan 20 17:04:40 crc kubenswrapper[4558]: I0120 17:04:40.004101 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:04:40 crc kubenswrapper[4558]: I0120 17:04:40.012601 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:04:40 crc kubenswrapper[4558]: I0120 17:04:40.022083 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:04:40 crc kubenswrapper[4558]: E0120 17:04:40.022506 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c3bd0148-350f-4351-8a04-a3009cfdeb29" containerName="nova-scheduler-scheduler" Jan 20 17:04:40 crc kubenswrapper[4558]: I0120 17:04:40.022523 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="c3bd0148-350f-4351-8a04-a3009cfdeb29" containerName="nova-scheduler-scheduler" Jan 20 17:04:40 crc kubenswrapper[4558]: I0120 17:04:40.022773 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="c3bd0148-350f-4351-8a04-a3009cfdeb29" containerName="nova-scheduler-scheduler" Jan 20 17:04:40 crc kubenswrapper[4558]: I0120 17:04:40.023357 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:04:40 crc kubenswrapper[4558]: I0120 17:04:40.024941 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-scheduler-config-data" Jan 20 17:04:40 crc kubenswrapper[4558]: I0120 17:04:40.031316 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:04:40 crc kubenswrapper[4558]: I0120 17:04:40.172836 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/38786f1c-754c-488d-8a13-aad7001ad778-config-data\") pod \"nova-scheduler-0\" (UID: \"38786f1c-754c-488d-8a13-aad7001ad778\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:04:40 crc kubenswrapper[4558]: I0120 17:04:40.172915 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-scwwb\" (UniqueName: \"kubernetes.io/projected/38786f1c-754c-488d-8a13-aad7001ad778-kube-api-access-scwwb\") pod \"nova-scheduler-0\" (UID: \"38786f1c-754c-488d-8a13-aad7001ad778\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:04:40 crc kubenswrapper[4558]: I0120 17:04:40.173079 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/38786f1c-754c-488d-8a13-aad7001ad778-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"38786f1c-754c-488d-8a13-aad7001ad778\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:04:40 crc kubenswrapper[4558]: I0120 17:04:40.274718 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/38786f1c-754c-488d-8a13-aad7001ad778-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"38786f1c-754c-488d-8a13-aad7001ad778\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:04:40 crc kubenswrapper[4558]: I0120 17:04:40.274780 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/38786f1c-754c-488d-8a13-aad7001ad778-config-data\") pod \"nova-scheduler-0\" (UID: \"38786f1c-754c-488d-8a13-aad7001ad778\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:04:40 crc kubenswrapper[4558]: I0120 17:04:40.274835 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-scwwb\" (UniqueName: \"kubernetes.io/projected/38786f1c-754c-488d-8a13-aad7001ad778-kube-api-access-scwwb\") pod \"nova-scheduler-0\" (UID: \"38786f1c-754c-488d-8a13-aad7001ad778\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:04:40 crc kubenswrapper[4558]: I0120 17:04:40.278596 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/38786f1c-754c-488d-8a13-aad7001ad778-config-data\") pod \"nova-scheduler-0\" (UID: \"38786f1c-754c-488d-8a13-aad7001ad778\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:04:40 crc kubenswrapper[4558]: I0120 17:04:40.279201 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/38786f1c-754c-488d-8a13-aad7001ad778-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"38786f1c-754c-488d-8a13-aad7001ad778\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:04:40 crc kubenswrapper[4558]: I0120 17:04:40.288062 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-scwwb\" (UniqueName: \"kubernetes.io/projected/38786f1c-754c-488d-8a13-aad7001ad778-kube-api-access-scwwb\") pod \"nova-scheduler-0\" (UID: \"38786f1c-754c-488d-8a13-aad7001ad778\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:04:40 crc kubenswrapper[4558]: I0120 17:04:40.342338 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:04:40 crc kubenswrapper[4558]: I0120 17:04:40.573686 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c3bd0148-350f-4351-8a04-a3009cfdeb29" path="/var/lib/kubelet/pods/c3bd0148-350f-4351-8a04-a3009cfdeb29/volumes" Jan 20 17:04:40 crc kubenswrapper[4558]: I0120 17:04:40.699912 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:04:40 crc kubenswrapper[4558]: W0120 17:04:40.702388 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod38786f1c_754c_488d_8a13_aad7001ad778.slice/crio-b0201f422520b19ef548df4a913ff33872222b88fc9de492698fde77bdab6397 WatchSource:0}: Error finding container b0201f422520b19ef548df4a913ff33872222b88fc9de492698fde77bdab6397: Status 404 returned error can't find the container with id b0201f422520b19ef548df4a913ff33872222b88fc9de492698fde77bdab6397 Jan 20 17:04:40 crc kubenswrapper[4558]: I0120 17:04:40.959092 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"38786f1c-754c-488d-8a13-aad7001ad778","Type":"ContainerStarted","Data":"95f19d7a9e9522b1398a79cebf0602dd58a2e2f0ffdd88865efd947c83bbb895"} Jan 20 17:04:40 crc kubenswrapper[4558]: I0120 17:04:40.959134 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"38786f1c-754c-488d-8a13-aad7001ad778","Type":"ContainerStarted","Data":"b0201f422520b19ef548df4a913ff33872222b88fc9de492698fde77bdab6397"} Jan 20 17:04:40 crc kubenswrapper[4558]: I0120 17:04:40.974913 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-scheduler-0" podStartSLOduration=1.97490221 podStartE2EDuration="1.97490221s" podCreationTimestamp="2026-01-20 17:04:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:04:40.970111562 +0000 UTC m=+1374.730449529" watchObservedRunningTime="2026-01-20 17:04:40.97490221 +0000 UTC m=+1374.735240177" Jan 20 17:04:43 crc kubenswrapper[4558]: I0120 17:04:43.343377 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:04:43 crc kubenswrapper[4558]: I0120 17:04:43.343698 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:04:45 crc kubenswrapper[4558]: I0120 17:04:45.342702 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:04:48 crc kubenswrapper[4558]: I0120 17:04:48.335225 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:04:48 crc kubenswrapper[4558]: I0120 17:04:48.335559 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:04:48 crc kubenswrapper[4558]: I0120 17:04:48.342900 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:04:48 crc kubenswrapper[4558]: I0120 17:04:48.342971 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:04:49 crc kubenswrapper[4558]: I0120 17:04:49.348303 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-api-0" podUID="51e263b2-d42e-46fa-99e1-e0c5aa23bcf5" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.1.31:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 20 17:04:49 crc kubenswrapper[4558]: I0120 17:04:49.348334 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-api-0" podUID="51e263b2-d42e-46fa-99e1-e0c5aa23bcf5" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.1.31:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 20 17:04:49 crc kubenswrapper[4558]: I0120 17:04:49.358280 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-metadata-0" podUID="1fccb241-c75e-4cec-b3c6-3855bd6c1161" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.1.32:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 20 17:04:49 crc kubenswrapper[4558]: I0120 17:04:49.358306 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-metadata-0" podUID="1fccb241-c75e-4cec-b3c6-3855bd6c1161" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.1.32:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 20 17:04:50 crc kubenswrapper[4558]: I0120 17:04:50.343501 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:04:50 crc kubenswrapper[4558]: I0120 17:04:50.365149 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:04:51 crc kubenswrapper[4558]: I0120 17:04:51.066609 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:04:52 crc kubenswrapper[4558]: I0120 17:04:52.194785 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:04:55 crc kubenswrapper[4558]: I0120 17:04:55.215825 4558 scope.go:117] "RemoveContainer" containerID="4a04ce46269bb3a821a0882c3df0c15680043afbc05a402c642893802ec047fb" Jan 20 17:04:55 crc kubenswrapper[4558]: I0120 17:04:55.241063 4558 scope.go:117] "RemoveContainer" containerID="8b7b47b4782e3e9aacef0f5eea2d8d7e660530d49bda8b1aa64c6b0f0a557740" Jan 20 17:04:55 crc kubenswrapper[4558]: I0120 17:04:55.259284 4558 scope.go:117] "RemoveContainer" containerID="696014d7096181a1aaf1489ef7979589908b3b72428090b1c90f5210f07074f6" Jan 20 17:04:55 crc kubenswrapper[4558]: I0120 17:04:55.298552 4558 scope.go:117] "RemoveContainer" containerID="9dc6d188111caa16be7e42f9dc05193b4a08598807aae37d3d8b8dc08bb5b17e" Jan 20 17:04:55 crc kubenswrapper[4558]: I0120 17:04:55.325584 4558 scope.go:117] "RemoveContainer" containerID="03b367715211c190150b7a3c5d23027458fc3dfb546230b77e8b7b4f4e3cbe86" Jan 20 17:04:55 crc kubenswrapper[4558]: I0120 17:04:55.351502 4558 scope.go:117] "RemoveContainer" containerID="8645a2a68bc998fad70edddaf73505c46e74320243525ed7f7c2c0c00d201e22" Jan 20 17:04:55 crc kubenswrapper[4558]: I0120 17:04:55.371544 4558 scope.go:117] "RemoveContainer" containerID="3f5e26b3d7dc3e18ff91d7a5ac659587be205d5cf45b39eccf8b2cb509ff08d2" Jan 20 17:04:55 crc kubenswrapper[4558]: I0120 17:04:55.400069 4558 scope.go:117] "RemoveContainer" containerID="591c362bc682cbd8e0d3cbd8295b50cde5fa85ed1fb56d6f747d6009d445c395" Jan 20 17:04:55 crc kubenswrapper[4558]: I0120 17:04:55.426489 4558 scope.go:117] "RemoveContainer" containerID="b64bb5dba514acffdebd80ad0a9174f80f0ddc9282c335ff220ed3779508fb8b" Jan 20 17:04:55 crc kubenswrapper[4558]: I0120 17:04:55.453746 4558 scope.go:117] "RemoveContainer" containerID="d61066676614c4ff8053fc80cbca3d6e61c8c8994020479c786429e5a7609638" Jan 20 17:04:55 crc kubenswrapper[4558]: I0120 17:04:55.470619 4558 scope.go:117] "RemoveContainer" containerID="243e6138e05c0c7fcce3ba9f609d21d90a0fef60ee11f1b373592271c6c011c0" Jan 20 17:04:58 crc kubenswrapper[4558]: I0120 17:04:58.341557 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:04:58 crc kubenswrapper[4558]: I0120 17:04:58.342200 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:04:58 crc kubenswrapper[4558]: I0120 17:04:58.342590 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:04:58 crc kubenswrapper[4558]: I0120 17:04:58.346395 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:04:58 crc kubenswrapper[4558]: I0120 17:04:58.346685 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:04:58 crc kubenswrapper[4558]: I0120 17:04:58.349316 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:04:58 crc kubenswrapper[4558]: I0120 17:04:58.351101 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:04:59 crc kubenswrapper[4558]: I0120 17:04:59.111250 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:04:59 crc kubenswrapper[4558]: I0120 17:04:59.115956 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:04:59 crc kubenswrapper[4558]: I0120 17:04:59.117437 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:05:01 crc kubenswrapper[4558]: I0120 17:05:01.543059 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ovn-controller-87pjw"] Jan 20 17:05:01 crc kubenswrapper[4558]: I0120 17:05:01.545456 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-controller-87pjw" Jan 20 17:05:01 crc kubenswrapper[4558]: I0120 17:05:01.551045 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"ovncontroller-scripts" Jan 20 17:05:01 crc kubenswrapper[4558]: I0120 17:05:01.551081 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-ovncontroller-ovndbs" Jan 20 17:05:01 crc kubenswrapper[4558]: I0120 17:05:01.551108 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ovn-controller-ovs-ccvgc"] Jan 20 17:05:01 crc kubenswrapper[4558]: I0120 17:05:01.551343 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ovncontroller-ovncontroller-dockercfg-2xg9k" Jan 20 17:05:01 crc kubenswrapper[4558]: I0120 17:05:01.553092 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-controller-ovs-ccvgc" Jan 20 17:05:01 crc kubenswrapper[4558]: I0120 17:05:01.557382 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovn-controller-87pjw"] Jan 20 17:05:01 crc kubenswrapper[4558]: I0120 17:05:01.579837 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovn-controller-ovs-ccvgc"] Jan 20 17:05:01 crc kubenswrapper[4558]: I0120 17:05:01.634570 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ovn-controller-metrics-x6ptv"] Jan 20 17:05:01 crc kubenswrapper[4558]: I0120 17:05:01.635725 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-controller-metrics-x6ptv" Jan 20 17:05:01 crc kubenswrapper[4558]: I0120 17:05:01.640748 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"ovncontroller-metrics-config" Jan 20 17:05:01 crc kubenswrapper[4558]: I0120 17:05:01.659211 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovn-controller-metrics-x6ptv"] Jan 20 17:05:01 crc kubenswrapper[4558]: I0120 17:05:01.673218 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/2606dddf-c407-4594-a587-c4eaaa573634-var-log\") pod \"ovn-controller-ovs-ccvgc\" (UID: \"2606dddf-c407-4594-a587-c4eaaa573634\") " pod="openstack-kuttl-tests/ovn-controller-ovs-ccvgc" Jan 20 17:05:01 crc kubenswrapper[4558]: I0120 17:05:01.673383 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/1cde556c-b9b1-484a-b512-ca44c5f47353-var-run\") pod \"ovn-controller-87pjw\" (UID: \"1cde556c-b9b1-484a-b512-ca44c5f47353\") " pod="openstack-kuttl-tests/ovn-controller-87pjw" Jan 20 17:05:01 crc kubenswrapper[4558]: I0120 17:05:01.673441 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/2606dddf-c407-4594-a587-c4eaaa573634-var-lib\") pod \"ovn-controller-ovs-ccvgc\" (UID: \"2606dddf-c407-4594-a587-c4eaaa573634\") " pod="openstack-kuttl-tests/ovn-controller-ovs-ccvgc" Jan 20 17:05:01 crc kubenswrapper[4558]: I0120 17:05:01.673471 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/1cde556c-b9b1-484a-b512-ca44c5f47353-var-run-ovn\") pod \"ovn-controller-87pjw\" (UID: \"1cde556c-b9b1-484a-b512-ca44c5f47353\") " pod="openstack-kuttl-tests/ovn-controller-87pjw" Jan 20 17:05:01 crc kubenswrapper[4558]: I0120 17:05:01.673490 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/1cde556c-b9b1-484a-b512-ca44c5f47353-ovn-controller-tls-certs\") pod \"ovn-controller-87pjw\" (UID: \"1cde556c-b9b1-484a-b512-ca44c5f47353\") " pod="openstack-kuttl-tests/ovn-controller-87pjw" Jan 20 17:05:01 crc kubenswrapper[4558]: I0120 17:05:01.673523 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1cde556c-b9b1-484a-b512-ca44c5f47353-scripts\") pod \"ovn-controller-87pjw\" (UID: \"1cde556c-b9b1-484a-b512-ca44c5f47353\") " pod="openstack-kuttl-tests/ovn-controller-87pjw" Jan 20 17:05:01 crc kubenswrapper[4558]: I0120 17:05:01.673539 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2606dddf-c407-4594-a587-c4eaaa573634-scripts\") pod \"ovn-controller-ovs-ccvgc\" (UID: \"2606dddf-c407-4594-a587-c4eaaa573634\") " pod="openstack-kuttl-tests/ovn-controller-ovs-ccvgc" Jan 20 17:05:01 crc kubenswrapper[4558]: I0120 17:05:01.673603 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gjftp\" (UniqueName: \"kubernetes.io/projected/2606dddf-c407-4594-a587-c4eaaa573634-kube-api-access-gjftp\") pod \"ovn-controller-ovs-ccvgc\" (UID: \"2606dddf-c407-4594-a587-c4eaaa573634\") " pod="openstack-kuttl-tests/ovn-controller-ovs-ccvgc" Jan 20 17:05:01 crc kubenswrapper[4558]: I0120 17:05:01.673653 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h7m4z\" (UniqueName: \"kubernetes.io/projected/1cde556c-b9b1-484a-b512-ca44c5f47353-kube-api-access-h7m4z\") pod \"ovn-controller-87pjw\" (UID: \"1cde556c-b9b1-484a-b512-ca44c5f47353\") " pod="openstack-kuttl-tests/ovn-controller-87pjw" Jan 20 17:05:01 crc kubenswrapper[4558]: I0120 17:05:01.673714 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/1cde556c-b9b1-484a-b512-ca44c5f47353-var-log-ovn\") pod \"ovn-controller-87pjw\" (UID: \"1cde556c-b9b1-484a-b512-ca44c5f47353\") " pod="openstack-kuttl-tests/ovn-controller-87pjw" Jan 20 17:05:01 crc kubenswrapper[4558]: I0120 17:05:01.673769 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1cde556c-b9b1-484a-b512-ca44c5f47353-combined-ca-bundle\") pod \"ovn-controller-87pjw\" (UID: \"1cde556c-b9b1-484a-b512-ca44c5f47353\") " pod="openstack-kuttl-tests/ovn-controller-87pjw" Jan 20 17:05:01 crc kubenswrapper[4558]: I0120 17:05:01.673832 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/2606dddf-c407-4594-a587-c4eaaa573634-var-run\") pod \"ovn-controller-ovs-ccvgc\" (UID: \"2606dddf-c407-4594-a587-c4eaaa573634\") " pod="openstack-kuttl-tests/ovn-controller-ovs-ccvgc" Jan 20 17:05:01 crc kubenswrapper[4558]: I0120 17:05:01.673867 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/2606dddf-c407-4594-a587-c4eaaa573634-etc-ovs\") pod \"ovn-controller-ovs-ccvgc\" (UID: \"2606dddf-c407-4594-a587-c4eaaa573634\") " pod="openstack-kuttl-tests/ovn-controller-ovs-ccvgc" Jan 20 17:05:01 crc kubenswrapper[4558]: I0120 17:05:01.776913 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/bb2560f9-fe1c-4251-89ae-0b242a9243bc-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-x6ptv\" (UID: \"bb2560f9-fe1c-4251-89ae-0b242a9243bc\") " pod="openstack-kuttl-tests/ovn-controller-metrics-x6ptv" Jan 20 17:05:01 crc kubenswrapper[4558]: I0120 17:05:01.776967 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1cde556c-b9b1-484a-b512-ca44c5f47353-combined-ca-bundle\") pod \"ovn-controller-87pjw\" (UID: \"1cde556c-b9b1-484a-b512-ca44c5f47353\") " pod="openstack-kuttl-tests/ovn-controller-87pjw" Jan 20 17:05:01 crc kubenswrapper[4558]: I0120 17:05:01.777095 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/2606dddf-c407-4594-a587-c4eaaa573634-var-run\") pod \"ovn-controller-ovs-ccvgc\" (UID: \"2606dddf-c407-4594-a587-c4eaaa573634\") " pod="openstack-kuttl-tests/ovn-controller-ovs-ccvgc" Jan 20 17:05:01 crc kubenswrapper[4558]: I0120 17:05:01.777175 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/2606dddf-c407-4594-a587-c4eaaa573634-etc-ovs\") pod \"ovn-controller-ovs-ccvgc\" (UID: \"2606dddf-c407-4594-a587-c4eaaa573634\") " pod="openstack-kuttl-tests/ovn-controller-ovs-ccvgc" Jan 20 17:05:01 crc kubenswrapper[4558]: I0120 17:05:01.777237 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/bb2560f9-fe1c-4251-89ae-0b242a9243bc-ovn-rundir\") pod \"ovn-controller-metrics-x6ptv\" (UID: \"bb2560f9-fe1c-4251-89ae-0b242a9243bc\") " pod="openstack-kuttl-tests/ovn-controller-metrics-x6ptv" Jan 20 17:05:01 crc kubenswrapper[4558]: I0120 17:05:01.777275 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/2606dddf-c407-4594-a587-c4eaaa573634-var-log\") pod \"ovn-controller-ovs-ccvgc\" (UID: \"2606dddf-c407-4594-a587-c4eaaa573634\") " pod="openstack-kuttl-tests/ovn-controller-ovs-ccvgc" Jan 20 17:05:01 crc kubenswrapper[4558]: I0120 17:05:01.777320 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bb2560f9-fe1c-4251-89ae-0b242a9243bc-combined-ca-bundle\") pod \"ovn-controller-metrics-x6ptv\" (UID: \"bb2560f9-fe1c-4251-89ae-0b242a9243bc\") " pod="openstack-kuttl-tests/ovn-controller-metrics-x6ptv" Jan 20 17:05:01 crc kubenswrapper[4558]: I0120 17:05:01.777365 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4mprp\" (UniqueName: \"kubernetes.io/projected/bb2560f9-fe1c-4251-89ae-0b242a9243bc-kube-api-access-4mprp\") pod \"ovn-controller-metrics-x6ptv\" (UID: \"bb2560f9-fe1c-4251-89ae-0b242a9243bc\") " pod="openstack-kuttl-tests/ovn-controller-metrics-x6ptv" Jan 20 17:05:01 crc kubenswrapper[4558]: I0120 17:05:01.777422 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bb2560f9-fe1c-4251-89ae-0b242a9243bc-config\") pod \"ovn-controller-metrics-x6ptv\" (UID: \"bb2560f9-fe1c-4251-89ae-0b242a9243bc\") " pod="openstack-kuttl-tests/ovn-controller-metrics-x6ptv" Jan 20 17:05:01 crc kubenswrapper[4558]: I0120 17:05:01.777537 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/1cde556c-b9b1-484a-b512-ca44c5f47353-var-run\") pod \"ovn-controller-87pjw\" (UID: \"1cde556c-b9b1-484a-b512-ca44c5f47353\") " pod="openstack-kuttl-tests/ovn-controller-87pjw" Jan 20 17:05:01 crc kubenswrapper[4558]: I0120 17:05:01.777622 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/2606dddf-c407-4594-a587-c4eaaa573634-var-lib\") pod \"ovn-controller-ovs-ccvgc\" (UID: \"2606dddf-c407-4594-a587-c4eaaa573634\") " pod="openstack-kuttl-tests/ovn-controller-ovs-ccvgc" Jan 20 17:05:01 crc kubenswrapper[4558]: I0120 17:05:01.777653 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/2606dddf-c407-4594-a587-c4eaaa573634-etc-ovs\") pod \"ovn-controller-ovs-ccvgc\" (UID: \"2606dddf-c407-4594-a587-c4eaaa573634\") " pod="openstack-kuttl-tests/ovn-controller-ovs-ccvgc" Jan 20 17:05:01 crc kubenswrapper[4558]: I0120 17:05:01.777669 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/1cde556c-b9b1-484a-b512-ca44c5f47353-var-run-ovn\") pod \"ovn-controller-87pjw\" (UID: \"1cde556c-b9b1-484a-b512-ca44c5f47353\") " pod="openstack-kuttl-tests/ovn-controller-87pjw" Jan 20 17:05:01 crc kubenswrapper[4558]: I0120 17:05:01.777689 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/1cde556c-b9b1-484a-b512-ca44c5f47353-ovn-controller-tls-certs\") pod \"ovn-controller-87pjw\" (UID: \"1cde556c-b9b1-484a-b512-ca44c5f47353\") " pod="openstack-kuttl-tests/ovn-controller-87pjw" Jan 20 17:05:01 crc kubenswrapper[4558]: I0120 17:05:01.777725 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/2606dddf-c407-4594-a587-c4eaaa573634-var-run\") pod \"ovn-controller-ovs-ccvgc\" (UID: \"2606dddf-c407-4594-a587-c4eaaa573634\") " pod="openstack-kuttl-tests/ovn-controller-ovs-ccvgc" Jan 20 17:05:01 crc kubenswrapper[4558]: I0120 17:05:01.777823 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/1cde556c-b9b1-484a-b512-ca44c5f47353-var-run-ovn\") pod \"ovn-controller-87pjw\" (UID: \"1cde556c-b9b1-484a-b512-ca44c5f47353\") " pod="openstack-kuttl-tests/ovn-controller-87pjw" Jan 20 17:05:01 crc kubenswrapper[4558]: I0120 17:05:01.777826 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/2606dddf-c407-4594-a587-c4eaaa573634-var-log\") pod \"ovn-controller-ovs-ccvgc\" (UID: \"2606dddf-c407-4594-a587-c4eaaa573634\") " pod="openstack-kuttl-tests/ovn-controller-ovs-ccvgc" Jan 20 17:05:01 crc kubenswrapper[4558]: I0120 17:05:01.777857 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/2606dddf-c407-4594-a587-c4eaaa573634-var-lib\") pod \"ovn-controller-ovs-ccvgc\" (UID: \"2606dddf-c407-4594-a587-c4eaaa573634\") " pod="openstack-kuttl-tests/ovn-controller-ovs-ccvgc" Jan 20 17:05:01 crc kubenswrapper[4558]: I0120 17:05:01.777898 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/1cde556c-b9b1-484a-b512-ca44c5f47353-var-run\") pod \"ovn-controller-87pjw\" (UID: \"1cde556c-b9b1-484a-b512-ca44c5f47353\") " pod="openstack-kuttl-tests/ovn-controller-87pjw" Jan 20 17:05:01 crc kubenswrapper[4558]: I0120 17:05:01.777941 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1cde556c-b9b1-484a-b512-ca44c5f47353-scripts\") pod \"ovn-controller-87pjw\" (UID: \"1cde556c-b9b1-484a-b512-ca44c5f47353\") " pod="openstack-kuttl-tests/ovn-controller-87pjw" Jan 20 17:05:01 crc kubenswrapper[4558]: I0120 17:05:01.777967 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2606dddf-c407-4594-a587-c4eaaa573634-scripts\") pod \"ovn-controller-ovs-ccvgc\" (UID: \"2606dddf-c407-4594-a587-c4eaaa573634\") " pod="openstack-kuttl-tests/ovn-controller-ovs-ccvgc" Jan 20 17:05:01 crc kubenswrapper[4558]: I0120 17:05:01.778059 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gjftp\" (UniqueName: \"kubernetes.io/projected/2606dddf-c407-4594-a587-c4eaaa573634-kube-api-access-gjftp\") pod \"ovn-controller-ovs-ccvgc\" (UID: \"2606dddf-c407-4594-a587-c4eaaa573634\") " pod="openstack-kuttl-tests/ovn-controller-ovs-ccvgc" Jan 20 17:05:01 crc kubenswrapper[4558]: I0120 17:05:01.778131 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h7m4z\" (UniqueName: \"kubernetes.io/projected/1cde556c-b9b1-484a-b512-ca44c5f47353-kube-api-access-h7m4z\") pod \"ovn-controller-87pjw\" (UID: \"1cde556c-b9b1-484a-b512-ca44c5f47353\") " pod="openstack-kuttl-tests/ovn-controller-87pjw" Jan 20 17:05:01 crc kubenswrapper[4558]: I0120 17:05:01.778220 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/bb2560f9-fe1c-4251-89ae-0b242a9243bc-ovs-rundir\") pod \"ovn-controller-metrics-x6ptv\" (UID: \"bb2560f9-fe1c-4251-89ae-0b242a9243bc\") " pod="openstack-kuttl-tests/ovn-controller-metrics-x6ptv" Jan 20 17:05:01 crc kubenswrapper[4558]: I0120 17:05:01.778246 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/1cde556c-b9b1-484a-b512-ca44c5f47353-var-log-ovn\") pod \"ovn-controller-87pjw\" (UID: \"1cde556c-b9b1-484a-b512-ca44c5f47353\") " pod="openstack-kuttl-tests/ovn-controller-87pjw" Jan 20 17:05:01 crc kubenswrapper[4558]: I0120 17:05:01.778473 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/1cde556c-b9b1-484a-b512-ca44c5f47353-var-log-ovn\") pod \"ovn-controller-87pjw\" (UID: \"1cde556c-b9b1-484a-b512-ca44c5f47353\") " pod="openstack-kuttl-tests/ovn-controller-87pjw" Jan 20 17:05:01 crc kubenswrapper[4558]: I0120 17:05:01.779811 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1cde556c-b9b1-484a-b512-ca44c5f47353-scripts\") pod \"ovn-controller-87pjw\" (UID: \"1cde556c-b9b1-484a-b512-ca44c5f47353\") " pod="openstack-kuttl-tests/ovn-controller-87pjw" Jan 20 17:05:01 crc kubenswrapper[4558]: I0120 17:05:01.779816 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2606dddf-c407-4594-a587-c4eaaa573634-scripts\") pod \"ovn-controller-ovs-ccvgc\" (UID: \"2606dddf-c407-4594-a587-c4eaaa573634\") " pod="openstack-kuttl-tests/ovn-controller-ovs-ccvgc" Jan 20 17:05:01 crc kubenswrapper[4558]: I0120 17:05:01.783037 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/1cde556c-b9b1-484a-b512-ca44c5f47353-ovn-controller-tls-certs\") pod \"ovn-controller-87pjw\" (UID: \"1cde556c-b9b1-484a-b512-ca44c5f47353\") " pod="openstack-kuttl-tests/ovn-controller-87pjw" Jan 20 17:05:01 crc kubenswrapper[4558]: I0120 17:05:01.783076 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1cde556c-b9b1-484a-b512-ca44c5f47353-combined-ca-bundle\") pod \"ovn-controller-87pjw\" (UID: \"1cde556c-b9b1-484a-b512-ca44c5f47353\") " pod="openstack-kuttl-tests/ovn-controller-87pjw" Jan 20 17:05:01 crc kubenswrapper[4558]: I0120 17:05:01.793513 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h7m4z\" (UniqueName: \"kubernetes.io/projected/1cde556c-b9b1-484a-b512-ca44c5f47353-kube-api-access-h7m4z\") pod \"ovn-controller-87pjw\" (UID: \"1cde556c-b9b1-484a-b512-ca44c5f47353\") " pod="openstack-kuttl-tests/ovn-controller-87pjw" Jan 20 17:05:01 crc kubenswrapper[4558]: I0120 17:05:01.795633 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gjftp\" (UniqueName: \"kubernetes.io/projected/2606dddf-c407-4594-a587-c4eaaa573634-kube-api-access-gjftp\") pod \"ovn-controller-ovs-ccvgc\" (UID: \"2606dddf-c407-4594-a587-c4eaaa573634\") " pod="openstack-kuttl-tests/ovn-controller-ovs-ccvgc" Jan 20 17:05:01 crc kubenswrapper[4558]: I0120 17:05:01.879769 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-controller-87pjw" Jan 20 17:05:01 crc kubenswrapper[4558]: I0120 17:05:01.881458 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bb2560f9-fe1c-4251-89ae-0b242a9243bc-config\") pod \"ovn-controller-metrics-x6ptv\" (UID: \"bb2560f9-fe1c-4251-89ae-0b242a9243bc\") " pod="openstack-kuttl-tests/ovn-controller-metrics-x6ptv" Jan 20 17:05:01 crc kubenswrapper[4558]: I0120 17:05:01.881666 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/bb2560f9-fe1c-4251-89ae-0b242a9243bc-ovs-rundir\") pod \"ovn-controller-metrics-x6ptv\" (UID: \"bb2560f9-fe1c-4251-89ae-0b242a9243bc\") " pod="openstack-kuttl-tests/ovn-controller-metrics-x6ptv" Jan 20 17:05:01 crc kubenswrapper[4558]: I0120 17:05:01.881720 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/bb2560f9-fe1c-4251-89ae-0b242a9243bc-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-x6ptv\" (UID: \"bb2560f9-fe1c-4251-89ae-0b242a9243bc\") " pod="openstack-kuttl-tests/ovn-controller-metrics-x6ptv" Jan 20 17:05:01 crc kubenswrapper[4558]: I0120 17:05:01.881768 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/bb2560f9-fe1c-4251-89ae-0b242a9243bc-ovn-rundir\") pod \"ovn-controller-metrics-x6ptv\" (UID: \"bb2560f9-fe1c-4251-89ae-0b242a9243bc\") " pod="openstack-kuttl-tests/ovn-controller-metrics-x6ptv" Jan 20 17:05:01 crc kubenswrapper[4558]: I0120 17:05:01.881802 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bb2560f9-fe1c-4251-89ae-0b242a9243bc-combined-ca-bundle\") pod \"ovn-controller-metrics-x6ptv\" (UID: \"bb2560f9-fe1c-4251-89ae-0b242a9243bc\") " pod="openstack-kuttl-tests/ovn-controller-metrics-x6ptv" Jan 20 17:05:01 crc kubenswrapper[4558]: I0120 17:05:01.881835 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4mprp\" (UniqueName: \"kubernetes.io/projected/bb2560f9-fe1c-4251-89ae-0b242a9243bc-kube-api-access-4mprp\") pod \"ovn-controller-metrics-x6ptv\" (UID: \"bb2560f9-fe1c-4251-89ae-0b242a9243bc\") " pod="openstack-kuttl-tests/ovn-controller-metrics-x6ptv" Jan 20 17:05:01 crc kubenswrapper[4558]: I0120 17:05:01.882053 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/bb2560f9-fe1c-4251-89ae-0b242a9243bc-ovn-rundir\") pod \"ovn-controller-metrics-x6ptv\" (UID: \"bb2560f9-fe1c-4251-89ae-0b242a9243bc\") " pod="openstack-kuttl-tests/ovn-controller-metrics-x6ptv" Jan 20 17:05:01 crc kubenswrapper[4558]: I0120 17:05:01.882083 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/bb2560f9-fe1c-4251-89ae-0b242a9243bc-ovs-rundir\") pod \"ovn-controller-metrics-x6ptv\" (UID: \"bb2560f9-fe1c-4251-89ae-0b242a9243bc\") " pod="openstack-kuttl-tests/ovn-controller-metrics-x6ptv" Jan 20 17:05:01 crc kubenswrapper[4558]: I0120 17:05:01.884074 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bb2560f9-fe1c-4251-89ae-0b242a9243bc-config\") pod \"ovn-controller-metrics-x6ptv\" (UID: \"bb2560f9-fe1c-4251-89ae-0b242a9243bc\") " pod="openstack-kuttl-tests/ovn-controller-metrics-x6ptv" Jan 20 17:05:01 crc kubenswrapper[4558]: I0120 17:05:01.885644 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/bb2560f9-fe1c-4251-89ae-0b242a9243bc-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-x6ptv\" (UID: \"bb2560f9-fe1c-4251-89ae-0b242a9243bc\") " pod="openstack-kuttl-tests/ovn-controller-metrics-x6ptv" Jan 20 17:05:01 crc kubenswrapper[4558]: I0120 17:05:01.885832 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bb2560f9-fe1c-4251-89ae-0b242a9243bc-combined-ca-bundle\") pod \"ovn-controller-metrics-x6ptv\" (UID: \"bb2560f9-fe1c-4251-89ae-0b242a9243bc\") " pod="openstack-kuttl-tests/ovn-controller-metrics-x6ptv" Jan 20 17:05:01 crc kubenswrapper[4558]: I0120 17:05:01.886635 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-controller-ovs-ccvgc" Jan 20 17:05:01 crc kubenswrapper[4558]: I0120 17:05:01.896275 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4mprp\" (UniqueName: \"kubernetes.io/projected/bb2560f9-fe1c-4251-89ae-0b242a9243bc-kube-api-access-4mprp\") pod \"ovn-controller-metrics-x6ptv\" (UID: \"bb2560f9-fe1c-4251-89ae-0b242a9243bc\") " pod="openstack-kuttl-tests/ovn-controller-metrics-x6ptv" Jan 20 17:05:01 crc kubenswrapper[4558]: I0120 17:05:01.959310 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-controller-metrics-x6ptv" Jan 20 17:05:02 crc kubenswrapper[4558]: I0120 17:05:02.288359 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovn-controller-metrics-x6ptv"] Jan 20 17:05:02 crc kubenswrapper[4558]: W0120 17:05:02.295930 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbb2560f9_fe1c_4251_89ae_0b242a9243bc.slice/crio-e25e6ff539df9d5181e8397891b47acd55270bb236dcb20a819dcc5cc486a830 WatchSource:0}: Error finding container e25e6ff539df9d5181e8397891b47acd55270bb236dcb20a819dcc5cc486a830: Status 404 returned error can't find the container with id e25e6ff539df9d5181e8397891b47acd55270bb236dcb20a819dcc5cc486a830 Jan 20 17:05:02 crc kubenswrapper[4558]: I0120 17:05:02.339181 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovn-controller-87pjw"] Jan 20 17:05:02 crc kubenswrapper[4558]: W0120 17:05:02.347285 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1cde556c_b9b1_484a_b512_ca44c5f47353.slice/crio-e0f45a42920d496bf05650e985dc05cb2835b918d786165c4acd8208dce67e32 WatchSource:0}: Error finding container e0f45a42920d496bf05650e985dc05cb2835b918d786165c4acd8208dce67e32: Status 404 returned error can't find the container with id e0f45a42920d496bf05650e985dc05cb2835b918d786165c4acd8208dce67e32 Jan 20 17:05:02 crc kubenswrapper[4558]: I0120 17:05:02.486949 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovn-controller-ovs-ccvgc"] Jan 20 17:05:02 crc kubenswrapper[4558]: W0120 17:05:02.491301 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2606dddf_c407_4594_a587_c4eaaa573634.slice/crio-6b656c176a87b422fbf11c01a7fa880d76638190ebd6699e7732e755b8a75b68 WatchSource:0}: Error finding container 6b656c176a87b422fbf11c01a7fa880d76638190ebd6699e7732e755b8a75b68: Status 404 returned error can't find the container with id 6b656c176a87b422fbf11c01a7fa880d76638190ebd6699e7732e755b8a75b68 Jan 20 17:05:03 crc kubenswrapper[4558]: I0120 17:05:03.151576 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-controller-metrics-x6ptv" event={"ID":"bb2560f9-fe1c-4251-89ae-0b242a9243bc","Type":"ContainerStarted","Data":"f8289d46cef13048b1da599d8f884a8ca8552346ce31e55069458d4dcbbf3e26"} Jan 20 17:05:03 crc kubenswrapper[4558]: I0120 17:05:03.151862 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-controller-metrics-x6ptv" event={"ID":"bb2560f9-fe1c-4251-89ae-0b242a9243bc","Type":"ContainerStarted","Data":"e25e6ff539df9d5181e8397891b47acd55270bb236dcb20a819dcc5cc486a830"} Jan 20 17:05:03 crc kubenswrapper[4558]: I0120 17:05:03.153157 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-controller-ovs-ccvgc" event={"ID":"2606dddf-c407-4594-a587-c4eaaa573634","Type":"ContainerStarted","Data":"6b656c176a87b422fbf11c01a7fa880d76638190ebd6699e7732e755b8a75b68"} Jan 20 17:05:03 crc kubenswrapper[4558]: I0120 17:05:03.154235 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-controller-87pjw" event={"ID":"1cde556c-b9b1-484a-b512-ca44c5f47353","Type":"ContainerStarted","Data":"e0f45a42920d496bf05650e985dc05cb2835b918d786165c4acd8208dce67e32"} Jan 20 17:05:03 crc kubenswrapper[4558]: I0120 17:05:03.195857 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ovn-controller-metrics-x6ptv" podStartSLOduration=2.195831261 podStartE2EDuration="2.195831261s" podCreationTimestamp="2026-01-20 17:05:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:05:03.194287889 +0000 UTC m=+1396.954625856" watchObservedRunningTime="2026-01-20 17:05:03.195831261 +0000 UTC m=+1396.956169227" Jan 20 17:05:04 crc kubenswrapper[4558]: I0120 17:05:04.166152 4558 generic.go:334] "Generic (PLEG): container finished" podID="2606dddf-c407-4594-a587-c4eaaa573634" containerID="9ac149b2100f0c79088616e525733e35778cad9b57e72c217002d1cbea3e83b8" exitCode=0 Jan 20 17:05:04 crc kubenswrapper[4558]: I0120 17:05:04.166312 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-controller-ovs-ccvgc" event={"ID":"2606dddf-c407-4594-a587-c4eaaa573634","Type":"ContainerDied","Data":"9ac149b2100f0c79088616e525733e35778cad9b57e72c217002d1cbea3e83b8"} Jan 20 17:05:05 crc kubenswrapper[4558]: I0120 17:05:05.175882 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-controller-ovs-ccvgc" event={"ID":"2606dddf-c407-4594-a587-c4eaaa573634","Type":"ContainerStarted","Data":"cf17a65eef2baaf7212d90a4dcef30684c696bbbb91387878866000f91b1add6"} Jan 20 17:05:05 crc kubenswrapper[4558]: I0120 17:05:05.176194 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ovn-controller-ovs-ccvgc" Jan 20 17:05:05 crc kubenswrapper[4558]: I0120 17:05:05.176206 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-controller-ovs-ccvgc" event={"ID":"2606dddf-c407-4594-a587-c4eaaa573634","Type":"ContainerStarted","Data":"f06e0862613d1bd692c6d6b3f79313ebad2c7126dc824b5a7038f08b4a349268"} Jan 20 17:05:05 crc kubenswrapper[4558]: I0120 17:05:05.192572 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ovn-controller-ovs-ccvgc" podStartSLOduration=3.398636861 podStartE2EDuration="4.192547531s" podCreationTimestamp="2026-01-20 17:05:01 +0000 UTC" firstStartedPulling="2026-01-20 17:05:02.497733147 +0000 UTC m=+1396.258071115" lastFinishedPulling="2026-01-20 17:05:03.291643818 +0000 UTC m=+1397.051981785" observedRunningTime="2026-01-20 17:05:05.189369285 +0000 UTC m=+1398.949707252" watchObservedRunningTime="2026-01-20 17:05:05.192547531 +0000 UTC m=+1398.952885498" Jan 20 17:05:06 crc kubenswrapper[4558]: I0120 17:05:06.186455 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ovn-controller-ovs-ccvgc" Jan 20 17:05:09 crc kubenswrapper[4558]: I0120 17:05:09.219031 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-controller-87pjw" event={"ID":"1cde556c-b9b1-484a-b512-ca44c5f47353","Type":"ContainerStarted","Data":"77dc8ef651195b6de90f94beb1d611b853eb6e2a29f2f0daf81b7f5003f08520"} Jan 20 17:05:09 crc kubenswrapper[4558]: I0120 17:05:09.219446 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ovn-controller-87pjw" Jan 20 17:05:09 crc kubenswrapper[4558]: I0120 17:05:09.238213 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ovn-controller-87pjw" podStartSLOduration=2.5178659469999998 podStartE2EDuration="8.23819748s" podCreationTimestamp="2026-01-20 17:05:01 +0000 UTC" firstStartedPulling="2026-01-20 17:05:02.350151418 +0000 UTC m=+1396.110489385" lastFinishedPulling="2026-01-20 17:05:08.070482951 +0000 UTC m=+1401.830820918" observedRunningTime="2026-01-20 17:05:09.232553407 +0000 UTC m=+1402.992891374" watchObservedRunningTime="2026-01-20 17:05:09.23819748 +0000 UTC m=+1402.998535446" Jan 20 17:05:09 crc kubenswrapper[4558]: I0120 17:05:09.870031 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovn-controller-metrics-x6ptv"] Jan 20 17:05:09 crc kubenswrapper[4558]: I0120 17:05:09.870537 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ovn-controller-metrics-x6ptv" podUID="bb2560f9-fe1c-4251-89ae-0b242a9243bc" containerName="openstack-network-exporter" containerID="cri-o://f8289d46cef13048b1da599d8f884a8ca8552346ce31e55069458d4dcbbf3e26" gracePeriod=30 Jan 20 17:05:09 crc kubenswrapper[4558]: I0120 17:05:09.879980 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovn-controller-ovs-ccvgc"] Jan 20 17:05:09 crc kubenswrapper[4558]: I0120 17:05:09.886742 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovn-controller-87pjw"] Jan 20 17:05:10 crc kubenswrapper[4558]: I0120 17:05:10.156757 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ovn-controller-ovs-ccvgc" podUID="2606dddf-c407-4594-a587-c4eaaa573634" containerName="ovs-vswitchd" containerID="cri-o://cf17a65eef2baaf7212d90a4dcef30684c696bbbb91387878866000f91b1add6" gracePeriod=30 Jan 20 17:05:10 crc kubenswrapper[4558]: I0120 17:05:10.229598 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovn-controller-metrics-x6ptv_bb2560f9-fe1c-4251-89ae-0b242a9243bc/openstack-network-exporter/0.log" Jan 20 17:05:10 crc kubenswrapper[4558]: I0120 17:05:10.229953 4558 generic.go:334] "Generic (PLEG): container finished" podID="bb2560f9-fe1c-4251-89ae-0b242a9243bc" containerID="f8289d46cef13048b1da599d8f884a8ca8552346ce31e55069458d4dcbbf3e26" exitCode=2 Jan 20 17:05:10 crc kubenswrapper[4558]: I0120 17:05:10.231093 4558 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="openstack-kuttl-tests/ovn-controller-87pjw" secret="" err="secret \"ovncontroller-ovncontroller-dockercfg-2xg9k\" not found" Jan 20 17:05:10 crc kubenswrapper[4558]: I0120 17:05:10.231357 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-controller-metrics-x6ptv" event={"ID":"bb2560f9-fe1c-4251-89ae-0b242a9243bc","Type":"ContainerDied","Data":"f8289d46cef13048b1da599d8f884a8ca8552346ce31e55069458d4dcbbf3e26"} Jan 20 17:05:10 crc kubenswrapper[4558]: I0120 17:05:10.231397 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-controller-metrics-x6ptv" event={"ID":"bb2560f9-fe1c-4251-89ae-0b242a9243bc","Type":"ContainerDied","Data":"e25e6ff539df9d5181e8397891b47acd55270bb236dcb20a819dcc5cc486a830"} Jan 20 17:05:10 crc kubenswrapper[4558]: I0120 17:05:10.231414 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e25e6ff539df9d5181e8397891b47acd55270bb236dcb20a819dcc5cc486a830" Jan 20 17:05:10 crc kubenswrapper[4558]: I0120 17:05:10.249061 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovn-controller-metrics-x6ptv_bb2560f9-fe1c-4251-89ae-0b242a9243bc/openstack-network-exporter/0.log" Jan 20 17:05:10 crc kubenswrapper[4558]: I0120 17:05:10.249279 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-controller-metrics-x6ptv" Jan 20 17:05:10 crc kubenswrapper[4558]: E0120 17:05:10.278321 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/ovncontroller-scripts: configmap "ovncontroller-scripts" not found Jan 20 17:05:10 crc kubenswrapper[4558]: E0120 17:05:10.278400 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/1cde556c-b9b1-484a-b512-ca44c5f47353-scripts podName:1cde556c-b9b1-484a-b512-ca44c5f47353 nodeName:}" failed. No retries permitted until 2026-01-20 17:05:10.778380439 +0000 UTC m=+1404.538718406 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "scripts" (UniqueName: "kubernetes.io/configmap/1cde556c-b9b1-484a-b512-ca44c5f47353-scripts") pod "ovn-controller-87pjw" (UID: "1cde556c-b9b1-484a-b512-ca44c5f47353") : configmap "ovncontroller-scripts" not found Jan 20 17:05:10 crc kubenswrapper[4558]: I0120 17:05:10.378853 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/bb2560f9-fe1c-4251-89ae-0b242a9243bc-metrics-certs-tls-certs\") pod \"bb2560f9-fe1c-4251-89ae-0b242a9243bc\" (UID: \"bb2560f9-fe1c-4251-89ae-0b242a9243bc\") " Jan 20 17:05:10 crc kubenswrapper[4558]: I0120 17:05:10.378905 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/bb2560f9-fe1c-4251-89ae-0b242a9243bc-ovn-rundir\") pod \"bb2560f9-fe1c-4251-89ae-0b242a9243bc\" (UID: \"bb2560f9-fe1c-4251-89ae-0b242a9243bc\") " Jan 20 17:05:10 crc kubenswrapper[4558]: I0120 17:05:10.378928 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4mprp\" (UniqueName: \"kubernetes.io/projected/bb2560f9-fe1c-4251-89ae-0b242a9243bc-kube-api-access-4mprp\") pod \"bb2560f9-fe1c-4251-89ae-0b242a9243bc\" (UID: \"bb2560f9-fe1c-4251-89ae-0b242a9243bc\") " Jan 20 17:05:10 crc kubenswrapper[4558]: I0120 17:05:10.378959 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bb2560f9-fe1c-4251-89ae-0b242a9243bc-combined-ca-bundle\") pod \"bb2560f9-fe1c-4251-89ae-0b242a9243bc\" (UID: \"bb2560f9-fe1c-4251-89ae-0b242a9243bc\") " Jan 20 17:05:10 crc kubenswrapper[4558]: I0120 17:05:10.379030 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/bb2560f9-fe1c-4251-89ae-0b242a9243bc-ovs-rundir\") pod \"bb2560f9-fe1c-4251-89ae-0b242a9243bc\" (UID: \"bb2560f9-fe1c-4251-89ae-0b242a9243bc\") " Jan 20 17:05:10 crc kubenswrapper[4558]: I0120 17:05:10.379219 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bb2560f9-fe1c-4251-89ae-0b242a9243bc-config\") pod \"bb2560f9-fe1c-4251-89ae-0b242a9243bc\" (UID: \"bb2560f9-fe1c-4251-89ae-0b242a9243bc\") " Jan 20 17:05:10 crc kubenswrapper[4558]: I0120 17:05:10.379269 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/bb2560f9-fe1c-4251-89ae-0b242a9243bc-ovn-rundir" (OuterVolumeSpecName: "ovn-rundir") pod "bb2560f9-fe1c-4251-89ae-0b242a9243bc" (UID: "bb2560f9-fe1c-4251-89ae-0b242a9243bc"). InnerVolumeSpecName "ovn-rundir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 17:05:10 crc kubenswrapper[4558]: I0120 17:05:10.379413 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/bb2560f9-fe1c-4251-89ae-0b242a9243bc-ovs-rundir" (OuterVolumeSpecName: "ovs-rundir") pod "bb2560f9-fe1c-4251-89ae-0b242a9243bc" (UID: "bb2560f9-fe1c-4251-89ae-0b242a9243bc"). InnerVolumeSpecName "ovs-rundir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 17:05:10 crc kubenswrapper[4558]: I0120 17:05:10.380005 4558 reconciler_common.go:293] "Volume detached for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/bb2560f9-fe1c-4251-89ae-0b242a9243bc-ovn-rundir\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:10 crc kubenswrapper[4558]: I0120 17:05:10.380024 4558 reconciler_common.go:293] "Volume detached for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/bb2560f9-fe1c-4251-89ae-0b242a9243bc-ovs-rundir\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:10 crc kubenswrapper[4558]: I0120 17:05:10.380148 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bb2560f9-fe1c-4251-89ae-0b242a9243bc-config" (OuterVolumeSpecName: "config") pod "bb2560f9-fe1c-4251-89ae-0b242a9243bc" (UID: "bb2560f9-fe1c-4251-89ae-0b242a9243bc"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:05:10 crc kubenswrapper[4558]: I0120 17:05:10.387005 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bb2560f9-fe1c-4251-89ae-0b242a9243bc-kube-api-access-4mprp" (OuterVolumeSpecName: "kube-api-access-4mprp") pod "bb2560f9-fe1c-4251-89ae-0b242a9243bc" (UID: "bb2560f9-fe1c-4251-89ae-0b242a9243bc"). InnerVolumeSpecName "kube-api-access-4mprp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:05:10 crc kubenswrapper[4558]: I0120 17:05:10.402489 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bb2560f9-fe1c-4251-89ae-0b242a9243bc-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "bb2560f9-fe1c-4251-89ae-0b242a9243bc" (UID: "bb2560f9-fe1c-4251-89ae-0b242a9243bc"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:05:10 crc kubenswrapper[4558]: I0120 17:05:10.432813 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bb2560f9-fe1c-4251-89ae-0b242a9243bc-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "bb2560f9-fe1c-4251-89ae-0b242a9243bc" (UID: "bb2560f9-fe1c-4251-89ae-0b242a9243bc"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:05:10 crc kubenswrapper[4558]: E0120 17:05:10.462470 4558 handlers.go:78] "Exec lifecycle hook for Container in Pod failed" err=< Jan 20 17:05:10 crc kubenswrapper[4558]: command '/usr/local/bin/container-scripts/stop-ovsdb-server.sh' exited with 137: ++ dirname /usr/local/bin/container-scripts/stop-ovsdb-server.sh Jan 20 17:05:10 crc kubenswrapper[4558]: + source /usr/local/bin/container-scripts/functions Jan 20 17:05:10 crc kubenswrapper[4558]: ++ OVNBridge=br-int Jan 20 17:05:10 crc kubenswrapper[4558]: ++ OVNRemote=tcp:localhost:6642 Jan 20 17:05:10 crc kubenswrapper[4558]: ++ OVNEncapType=geneve Jan 20 17:05:10 crc kubenswrapper[4558]: ++ OVNAvailabilityZones= Jan 20 17:05:10 crc kubenswrapper[4558]: ++ EnableChassisAsGateway=true Jan 20 17:05:10 crc kubenswrapper[4558]: ++ PhysicalNetworks= Jan 20 17:05:10 crc kubenswrapper[4558]: ++ OVNHostName= Jan 20 17:05:10 crc kubenswrapper[4558]: ++ DB_FILE=/etc/openvswitch/conf.db Jan 20 17:05:10 crc kubenswrapper[4558]: ++ ovs_dir=/var/lib/openvswitch Jan 20 17:05:10 crc kubenswrapper[4558]: ++ FLOWS_RESTORE_SCRIPT=/var/lib/openvswitch/flows-script Jan 20 17:05:10 crc kubenswrapper[4558]: ++ FLOWS_RESTORE_DIR=/var/lib/openvswitch/saved-flows Jan 20 17:05:10 crc kubenswrapper[4558]: ++ SAFE_TO_STOP_OVSDB_SERVER_SEMAPHORE=/var/lib/openvswitch/is_safe_to_stop_ovsdb_server Jan 20 17:05:10 crc kubenswrapper[4558]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Jan 20 17:05:10 crc kubenswrapper[4558]: + sleep 0.5 Jan 20 17:05:10 crc kubenswrapper[4558]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Jan 20 17:05:10 crc kubenswrapper[4558]: + cleanup_ovsdb_server_semaphore Jan 20 17:05:10 crc kubenswrapper[4558]: + rm -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server Jan 20 17:05:10 crc kubenswrapper[4558]: + /usr/share/openvswitch/scripts/ovs-ctl stop --no-ovs-vswitchd Jan 20 17:05:10 crc kubenswrapper[4558]: > execCommand=["/usr/local/bin/container-scripts/stop-ovsdb-server.sh"] containerName="ovsdb-server" pod="openstack-kuttl-tests/ovn-controller-ovs-ccvgc" message=< Jan 20 17:05:10 crc kubenswrapper[4558]: Exiting ovsdb-server (5) [ OK ] Jan 20 17:05:10 crc kubenswrapper[4558]: ++ dirname /usr/local/bin/container-scripts/stop-ovsdb-server.sh Jan 20 17:05:10 crc kubenswrapper[4558]: + source /usr/local/bin/container-scripts/functions Jan 20 17:05:10 crc kubenswrapper[4558]: ++ OVNBridge=br-int Jan 20 17:05:10 crc kubenswrapper[4558]: ++ OVNRemote=tcp:localhost:6642 Jan 20 17:05:10 crc kubenswrapper[4558]: ++ OVNEncapType=geneve Jan 20 17:05:10 crc kubenswrapper[4558]: ++ OVNAvailabilityZones= Jan 20 17:05:10 crc kubenswrapper[4558]: ++ EnableChassisAsGateway=true Jan 20 17:05:10 crc kubenswrapper[4558]: ++ PhysicalNetworks= Jan 20 17:05:10 crc kubenswrapper[4558]: ++ OVNHostName= Jan 20 17:05:10 crc kubenswrapper[4558]: ++ DB_FILE=/etc/openvswitch/conf.db Jan 20 17:05:10 crc kubenswrapper[4558]: ++ ovs_dir=/var/lib/openvswitch Jan 20 17:05:10 crc kubenswrapper[4558]: ++ FLOWS_RESTORE_SCRIPT=/var/lib/openvswitch/flows-script Jan 20 17:05:10 crc kubenswrapper[4558]: ++ FLOWS_RESTORE_DIR=/var/lib/openvswitch/saved-flows Jan 20 17:05:10 crc kubenswrapper[4558]: ++ SAFE_TO_STOP_OVSDB_SERVER_SEMAPHORE=/var/lib/openvswitch/is_safe_to_stop_ovsdb_server Jan 20 17:05:10 crc kubenswrapper[4558]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Jan 20 17:05:10 crc kubenswrapper[4558]: + sleep 0.5 Jan 20 17:05:10 crc kubenswrapper[4558]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Jan 20 17:05:10 crc kubenswrapper[4558]: + cleanup_ovsdb_server_semaphore Jan 20 17:05:10 crc kubenswrapper[4558]: + rm -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server Jan 20 17:05:10 crc kubenswrapper[4558]: + /usr/share/openvswitch/scripts/ovs-ctl stop --no-ovs-vswitchd Jan 20 17:05:10 crc kubenswrapper[4558]: > Jan 20 17:05:10 crc kubenswrapper[4558]: E0120 17:05:10.462521 4558 kuberuntime_container.go:691] "PreStop hook failed" err=< Jan 20 17:05:10 crc kubenswrapper[4558]: command '/usr/local/bin/container-scripts/stop-ovsdb-server.sh' exited with 137: ++ dirname /usr/local/bin/container-scripts/stop-ovsdb-server.sh Jan 20 17:05:10 crc kubenswrapper[4558]: + source /usr/local/bin/container-scripts/functions Jan 20 17:05:10 crc kubenswrapper[4558]: ++ OVNBridge=br-int Jan 20 17:05:10 crc kubenswrapper[4558]: ++ OVNRemote=tcp:localhost:6642 Jan 20 17:05:10 crc kubenswrapper[4558]: ++ OVNEncapType=geneve Jan 20 17:05:10 crc kubenswrapper[4558]: ++ OVNAvailabilityZones= Jan 20 17:05:10 crc kubenswrapper[4558]: ++ EnableChassisAsGateway=true Jan 20 17:05:10 crc kubenswrapper[4558]: ++ PhysicalNetworks= Jan 20 17:05:10 crc kubenswrapper[4558]: ++ OVNHostName= Jan 20 17:05:10 crc kubenswrapper[4558]: ++ DB_FILE=/etc/openvswitch/conf.db Jan 20 17:05:10 crc kubenswrapper[4558]: ++ ovs_dir=/var/lib/openvswitch Jan 20 17:05:10 crc kubenswrapper[4558]: ++ FLOWS_RESTORE_SCRIPT=/var/lib/openvswitch/flows-script Jan 20 17:05:10 crc kubenswrapper[4558]: ++ FLOWS_RESTORE_DIR=/var/lib/openvswitch/saved-flows Jan 20 17:05:10 crc kubenswrapper[4558]: ++ SAFE_TO_STOP_OVSDB_SERVER_SEMAPHORE=/var/lib/openvswitch/is_safe_to_stop_ovsdb_server Jan 20 17:05:10 crc kubenswrapper[4558]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Jan 20 17:05:10 crc kubenswrapper[4558]: + sleep 0.5 Jan 20 17:05:10 crc kubenswrapper[4558]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Jan 20 17:05:10 crc kubenswrapper[4558]: + cleanup_ovsdb_server_semaphore Jan 20 17:05:10 crc kubenswrapper[4558]: + rm -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server Jan 20 17:05:10 crc kubenswrapper[4558]: + /usr/share/openvswitch/scripts/ovs-ctl stop --no-ovs-vswitchd Jan 20 17:05:10 crc kubenswrapper[4558]: > pod="openstack-kuttl-tests/ovn-controller-ovs-ccvgc" podUID="2606dddf-c407-4594-a587-c4eaaa573634" containerName="ovsdb-server" containerID="cri-o://f06e0862613d1bd692c6d6b3f79313ebad2c7126dc824b5a7038f08b4a349268" Jan 20 17:05:10 crc kubenswrapper[4558]: I0120 17:05:10.462562 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ovn-controller-ovs-ccvgc" podUID="2606dddf-c407-4594-a587-c4eaaa573634" containerName="ovsdb-server" containerID="cri-o://f06e0862613d1bd692c6d6b3f79313ebad2c7126dc824b5a7038f08b4a349268" gracePeriod=30 Jan 20 17:05:10 crc kubenswrapper[4558]: I0120 17:05:10.481841 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bb2560f9-fe1c-4251-89ae-0b242a9243bc-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:10 crc kubenswrapper[4558]: I0120 17:05:10.481880 4558 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/bb2560f9-fe1c-4251-89ae-0b242a9243bc-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:10 crc kubenswrapper[4558]: I0120 17:05:10.481892 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4mprp\" (UniqueName: \"kubernetes.io/projected/bb2560f9-fe1c-4251-89ae-0b242a9243bc-kube-api-access-4mprp\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:10 crc kubenswrapper[4558]: I0120 17:05:10.481901 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bb2560f9-fe1c-4251-89ae-0b242a9243bc-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:10 crc kubenswrapper[4558]: E0120 17:05:10.788550 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/ovncontroller-scripts: configmap "ovncontroller-scripts" not found Jan 20 17:05:10 crc kubenswrapper[4558]: E0120 17:05:10.788625 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/1cde556c-b9b1-484a-b512-ca44c5f47353-scripts podName:1cde556c-b9b1-484a-b512-ca44c5f47353 nodeName:}" failed. No retries permitted until 2026-01-20 17:05:11.788610151 +0000 UTC m=+1405.548948118 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "scripts" (UniqueName: "kubernetes.io/configmap/1cde556c-b9b1-484a-b512-ca44c5f47353-scripts") pod "ovn-controller-87pjw" (UID: "1cde556c-b9b1-484a-b512-ca44c5f47353") : configmap "ovncontroller-scripts" not found Jan 20 17:05:11 crc kubenswrapper[4558]: I0120 17:05:11.239848 4558 generic.go:334] "Generic (PLEG): container finished" podID="2606dddf-c407-4594-a587-c4eaaa573634" containerID="f06e0862613d1bd692c6d6b3f79313ebad2c7126dc824b5a7038f08b4a349268" exitCode=0 Jan 20 17:05:11 crc kubenswrapper[4558]: I0120 17:05:11.239920 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-controller-ovs-ccvgc" event={"ID":"2606dddf-c407-4594-a587-c4eaaa573634","Type":"ContainerDied","Data":"f06e0862613d1bd692c6d6b3f79313ebad2c7126dc824b5a7038f08b4a349268"} Jan 20 17:05:11 crc kubenswrapper[4558]: I0120 17:05:11.240217 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-controller-metrics-x6ptv" Jan 20 17:05:11 crc kubenswrapper[4558]: I0120 17:05:11.260846 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovn-controller-metrics-x6ptv"] Jan 20 17:05:11 crc kubenswrapper[4558]: I0120 17:05:11.267443 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ovn-controller-metrics-x6ptv"] Jan 20 17:05:11 crc kubenswrapper[4558]: E0120 17:05:11.305701 4558 handlers.go:78] "Exec lifecycle hook for Container in Pod failed" err="command '/usr/share/ovn/scripts/ovn-ctl stop_controller' exited with 137: " execCommand=["/usr/share/ovn/scripts/ovn-ctl","stop_controller"] containerName="ovn-controller" pod="openstack-kuttl-tests/ovn-controller-87pjw" message=< Jan 20 17:05:11 crc kubenswrapper[4558]: Exiting ovn-controller (1) [ OK ] Jan 20 17:05:11 crc kubenswrapper[4558]: > Jan 20 17:05:11 crc kubenswrapper[4558]: E0120 17:05:11.305760 4558 kuberuntime_container.go:691] "PreStop hook failed" err="command '/usr/share/ovn/scripts/ovn-ctl stop_controller' exited with 137: " pod="openstack-kuttl-tests/ovn-controller-87pjw" podUID="1cde556c-b9b1-484a-b512-ca44c5f47353" containerName="ovn-controller" containerID="cri-o://77dc8ef651195b6de90f94beb1d611b853eb6e2a29f2f0daf81b7f5003f08520" Jan 20 17:05:11 crc kubenswrapper[4558]: I0120 17:05:11.305803 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ovn-controller-87pjw" podUID="1cde556c-b9b1-484a-b512-ca44c5f47353" containerName="ovn-controller" containerID="cri-o://77dc8ef651195b6de90f94beb1d611b853eb6e2a29f2f0daf81b7f5003f08520" gracePeriod=30 Jan 20 17:05:11 crc kubenswrapper[4558]: I0120 17:05:11.593586 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-controller-87pjw" Jan 20 17:05:11 crc kubenswrapper[4558]: I0120 17:05:11.703994 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1cde556c-b9b1-484a-b512-ca44c5f47353-combined-ca-bundle\") pod \"1cde556c-b9b1-484a-b512-ca44c5f47353\" (UID: \"1cde556c-b9b1-484a-b512-ca44c5f47353\") " Jan 20 17:05:11 crc kubenswrapper[4558]: I0120 17:05:11.704079 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/1cde556c-b9b1-484a-b512-ca44c5f47353-var-log-ovn\") pod \"1cde556c-b9b1-484a-b512-ca44c5f47353\" (UID: \"1cde556c-b9b1-484a-b512-ca44c5f47353\") " Jan 20 17:05:11 crc kubenswrapper[4558]: I0120 17:05:11.704127 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/1cde556c-b9b1-484a-b512-ca44c5f47353-var-run\") pod \"1cde556c-b9b1-484a-b512-ca44c5f47353\" (UID: \"1cde556c-b9b1-484a-b512-ca44c5f47353\") " Jan 20 17:05:11 crc kubenswrapper[4558]: I0120 17:05:11.704306 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/1cde556c-b9b1-484a-b512-ca44c5f47353-var-run-ovn\") pod \"1cde556c-b9b1-484a-b512-ca44c5f47353\" (UID: \"1cde556c-b9b1-484a-b512-ca44c5f47353\") " Jan 20 17:05:11 crc kubenswrapper[4558]: I0120 17:05:11.704358 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/1cde556c-b9b1-484a-b512-ca44c5f47353-ovn-controller-tls-certs\") pod \"1cde556c-b9b1-484a-b512-ca44c5f47353\" (UID: \"1cde556c-b9b1-484a-b512-ca44c5f47353\") " Jan 20 17:05:11 crc kubenswrapper[4558]: I0120 17:05:11.704498 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h7m4z\" (UniqueName: \"kubernetes.io/projected/1cde556c-b9b1-484a-b512-ca44c5f47353-kube-api-access-h7m4z\") pod \"1cde556c-b9b1-484a-b512-ca44c5f47353\" (UID: \"1cde556c-b9b1-484a-b512-ca44c5f47353\") " Jan 20 17:05:11 crc kubenswrapper[4558]: I0120 17:05:11.704544 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1cde556c-b9b1-484a-b512-ca44c5f47353-scripts\") pod \"1cde556c-b9b1-484a-b512-ca44c5f47353\" (UID: \"1cde556c-b9b1-484a-b512-ca44c5f47353\") " Jan 20 17:05:11 crc kubenswrapper[4558]: I0120 17:05:11.704554 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1cde556c-b9b1-484a-b512-ca44c5f47353-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "1cde556c-b9b1-484a-b512-ca44c5f47353" (UID: "1cde556c-b9b1-484a-b512-ca44c5f47353"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 17:05:11 crc kubenswrapper[4558]: I0120 17:05:11.704598 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1cde556c-b9b1-484a-b512-ca44c5f47353-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "1cde556c-b9b1-484a-b512-ca44c5f47353" (UID: "1cde556c-b9b1-484a-b512-ca44c5f47353"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 17:05:11 crc kubenswrapper[4558]: I0120 17:05:11.704674 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1cde556c-b9b1-484a-b512-ca44c5f47353-var-run" (OuterVolumeSpecName: "var-run") pod "1cde556c-b9b1-484a-b512-ca44c5f47353" (UID: "1cde556c-b9b1-484a-b512-ca44c5f47353"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 17:05:11 crc kubenswrapper[4558]: I0120 17:05:11.705715 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1cde556c-b9b1-484a-b512-ca44c5f47353-scripts" (OuterVolumeSpecName: "scripts") pod "1cde556c-b9b1-484a-b512-ca44c5f47353" (UID: "1cde556c-b9b1-484a-b512-ca44c5f47353"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:05:11 crc kubenswrapper[4558]: I0120 17:05:11.706450 4558 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/1cde556c-b9b1-484a-b512-ca44c5f47353-var-log-ovn\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:11 crc kubenswrapper[4558]: I0120 17:05:11.706480 4558 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/1cde556c-b9b1-484a-b512-ca44c5f47353-var-run\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:11 crc kubenswrapper[4558]: I0120 17:05:11.706490 4558 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/1cde556c-b9b1-484a-b512-ca44c5f47353-var-run-ovn\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:11 crc kubenswrapper[4558]: I0120 17:05:11.706500 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1cde556c-b9b1-484a-b512-ca44c5f47353-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:11 crc kubenswrapper[4558]: I0120 17:05:11.709260 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1cde556c-b9b1-484a-b512-ca44c5f47353-kube-api-access-h7m4z" (OuterVolumeSpecName: "kube-api-access-h7m4z") pod "1cde556c-b9b1-484a-b512-ca44c5f47353" (UID: "1cde556c-b9b1-484a-b512-ca44c5f47353"). InnerVolumeSpecName "kube-api-access-h7m4z". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:05:11 crc kubenswrapper[4558]: I0120 17:05:11.726123 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1cde556c-b9b1-484a-b512-ca44c5f47353-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1cde556c-b9b1-484a-b512-ca44c5f47353" (UID: "1cde556c-b9b1-484a-b512-ca44c5f47353"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:05:11 crc kubenswrapper[4558]: I0120 17:05:11.761003 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1cde556c-b9b1-484a-b512-ca44c5f47353-ovn-controller-tls-certs" (OuterVolumeSpecName: "ovn-controller-tls-certs") pod "1cde556c-b9b1-484a-b512-ca44c5f47353" (UID: "1cde556c-b9b1-484a-b512-ca44c5f47353"). InnerVolumeSpecName "ovn-controller-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:05:11 crc kubenswrapper[4558]: I0120 17:05:11.808769 4558 reconciler_common.go:293] "Volume detached for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/1cde556c-b9b1-484a-b512-ca44c5f47353-ovn-controller-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:11 crc kubenswrapper[4558]: I0120 17:05:11.808795 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h7m4z\" (UniqueName: \"kubernetes.io/projected/1cde556c-b9b1-484a-b512-ca44c5f47353-kube-api-access-h7m4z\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:11 crc kubenswrapper[4558]: I0120 17:05:11.808807 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1cde556c-b9b1-484a-b512-ca44c5f47353-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:12 crc kubenswrapper[4558]: I0120 17:05:12.248116 4558 generic.go:334] "Generic (PLEG): container finished" podID="1cde556c-b9b1-484a-b512-ca44c5f47353" containerID="77dc8ef651195b6de90f94beb1d611b853eb6e2a29f2f0daf81b7f5003f08520" exitCode=0 Jan 20 17:05:12 crc kubenswrapper[4558]: I0120 17:05:12.248149 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-controller-87pjw" event={"ID":"1cde556c-b9b1-484a-b512-ca44c5f47353","Type":"ContainerDied","Data":"77dc8ef651195b6de90f94beb1d611b853eb6e2a29f2f0daf81b7f5003f08520"} Jan 20 17:05:12 crc kubenswrapper[4558]: I0120 17:05:12.249044 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-controller-87pjw" event={"ID":"1cde556c-b9b1-484a-b512-ca44c5f47353","Type":"ContainerDied","Data":"e0f45a42920d496bf05650e985dc05cb2835b918d786165c4acd8208dce67e32"} Jan 20 17:05:12 crc kubenswrapper[4558]: I0120 17:05:12.249108 4558 scope.go:117] "RemoveContainer" containerID="77dc8ef651195b6de90f94beb1d611b853eb6e2a29f2f0daf81b7f5003f08520" Jan 20 17:05:12 crc kubenswrapper[4558]: I0120 17:05:12.248174 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-controller-87pjw" Jan 20 17:05:12 crc kubenswrapper[4558]: I0120 17:05:12.271393 4558 scope.go:117] "RemoveContainer" containerID="77dc8ef651195b6de90f94beb1d611b853eb6e2a29f2f0daf81b7f5003f08520" Jan 20 17:05:12 crc kubenswrapper[4558]: E0120 17:05:12.271685 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"77dc8ef651195b6de90f94beb1d611b853eb6e2a29f2f0daf81b7f5003f08520\": container with ID starting with 77dc8ef651195b6de90f94beb1d611b853eb6e2a29f2f0daf81b7f5003f08520 not found: ID does not exist" containerID="77dc8ef651195b6de90f94beb1d611b853eb6e2a29f2f0daf81b7f5003f08520" Jan 20 17:05:12 crc kubenswrapper[4558]: I0120 17:05:12.271712 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"77dc8ef651195b6de90f94beb1d611b853eb6e2a29f2f0daf81b7f5003f08520"} err="failed to get container status \"77dc8ef651195b6de90f94beb1d611b853eb6e2a29f2f0daf81b7f5003f08520\": rpc error: code = NotFound desc = could not find container \"77dc8ef651195b6de90f94beb1d611b853eb6e2a29f2f0daf81b7f5003f08520\": container with ID starting with 77dc8ef651195b6de90f94beb1d611b853eb6e2a29f2f0daf81b7f5003f08520 not found: ID does not exist" Jan 20 17:05:12 crc kubenswrapper[4558]: I0120 17:05:12.272126 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovn-controller-87pjw"] Jan 20 17:05:12 crc kubenswrapper[4558]: I0120 17:05:12.278823 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ovn-controller-87pjw"] Jan 20 17:05:12 crc kubenswrapper[4558]: I0120 17:05:12.574185 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1cde556c-b9b1-484a-b512-ca44c5f47353" path="/var/lib/kubelet/pods/1cde556c-b9b1-484a-b512-ca44c5f47353/volumes" Jan 20 17:05:12 crc kubenswrapper[4558]: I0120 17:05:12.574750 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bb2560f9-fe1c-4251-89ae-0b242a9243bc" path="/var/lib/kubelet/pods/bb2560f9-fe1c-4251-89ae-0b242a9243bc/volumes" Jan 20 17:05:27 crc kubenswrapper[4558]: I0120 17:05:27.330102 4558 patch_prober.go:28] interesting pod/machine-config-daemon-2vr4r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 20 17:05:27 crc kubenswrapper[4558]: I0120 17:05:27.330483 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 20 17:05:36 crc kubenswrapper[4558]: E0120 17:05:36.887221 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of f06e0862613d1bd692c6d6b3f79313ebad2c7126dc824b5a7038f08b4a349268 is running failed: container process not found" containerID="f06e0862613d1bd692c6d6b3f79313ebad2c7126dc824b5a7038f08b4a349268" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Jan 20 17:05:36 crc kubenswrapper[4558]: E0120 17:05:36.887876 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of f06e0862613d1bd692c6d6b3f79313ebad2c7126dc824b5a7038f08b4a349268 is running failed: container process not found" containerID="f06e0862613d1bd692c6d6b3f79313ebad2c7126dc824b5a7038f08b4a349268" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Jan 20 17:05:36 crc kubenswrapper[4558]: E0120 17:05:36.888159 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="cf17a65eef2baaf7212d90a4dcef30684c696bbbb91387878866000f91b1add6" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Jan 20 17:05:36 crc kubenswrapper[4558]: E0120 17:05:36.888344 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of f06e0862613d1bd692c6d6b3f79313ebad2c7126dc824b5a7038f08b4a349268 is running failed: container process not found" containerID="f06e0862613d1bd692c6d6b3f79313ebad2c7126dc824b5a7038f08b4a349268" cmd=["/usr/local/bin/container-scripts/ovsdb_server_readiness.sh"] Jan 20 17:05:36 crc kubenswrapper[4558]: E0120 17:05:36.888372 4558 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of f06e0862613d1bd692c6d6b3f79313ebad2c7126dc824b5a7038f08b4a349268 is running failed: container process not found" probeType="Readiness" pod="openstack-kuttl-tests/ovn-controller-ovs-ccvgc" podUID="2606dddf-c407-4594-a587-c4eaaa573634" containerName="ovsdb-server" Jan 20 17:05:36 crc kubenswrapper[4558]: E0120 17:05:36.889262 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="cf17a65eef2baaf7212d90a4dcef30684c696bbbb91387878866000f91b1add6" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Jan 20 17:05:36 crc kubenswrapper[4558]: E0120 17:05:36.890364 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="cf17a65eef2baaf7212d90a4dcef30684c696bbbb91387878866000f91b1add6" cmd=["/usr/local/bin/container-scripts/vswitchd_readiness.sh"] Jan 20 17:05:36 crc kubenswrapper[4558]: E0120 17:05:36.890397 4558 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack-kuttl-tests/ovn-controller-ovs-ccvgc" podUID="2606dddf-c407-4594-a587-c4eaaa573634" containerName="ovs-vswitchd" Jan 20 17:05:40 crc kubenswrapper[4558]: I0120 17:05:40.447982 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovn-controller-ovs-ccvgc_2606dddf-c407-4594-a587-c4eaaa573634/ovs-vswitchd/0.log" Jan 20 17:05:40 crc kubenswrapper[4558]: I0120 17:05:40.448889 4558 generic.go:334] "Generic (PLEG): container finished" podID="2606dddf-c407-4594-a587-c4eaaa573634" containerID="cf17a65eef2baaf7212d90a4dcef30684c696bbbb91387878866000f91b1add6" exitCode=137 Jan 20 17:05:40 crc kubenswrapper[4558]: I0120 17:05:40.448918 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-controller-ovs-ccvgc" event={"ID":"2606dddf-c407-4594-a587-c4eaaa573634","Type":"ContainerDied","Data":"cf17a65eef2baaf7212d90a4dcef30684c696bbbb91387878866000f91b1add6"} Jan 20 17:05:40 crc kubenswrapper[4558]: I0120 17:05:40.509668 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovn-controller-ovs-ccvgc_2606dddf-c407-4594-a587-c4eaaa573634/ovs-vswitchd/0.log" Jan 20 17:05:40 crc kubenswrapper[4558]: I0120 17:05:40.510731 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-controller-ovs-ccvgc" Jan 20 17:05:40 crc kubenswrapper[4558]: I0120 17:05:40.626803 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2606dddf-c407-4594-a587-c4eaaa573634-scripts\") pod \"2606dddf-c407-4594-a587-c4eaaa573634\" (UID: \"2606dddf-c407-4594-a587-c4eaaa573634\") " Jan 20 17:05:40 crc kubenswrapper[4558]: I0120 17:05:40.626874 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gjftp\" (UniqueName: \"kubernetes.io/projected/2606dddf-c407-4594-a587-c4eaaa573634-kube-api-access-gjftp\") pod \"2606dddf-c407-4594-a587-c4eaaa573634\" (UID: \"2606dddf-c407-4594-a587-c4eaaa573634\") " Jan 20 17:05:40 crc kubenswrapper[4558]: I0120 17:05:40.626985 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/2606dddf-c407-4594-a587-c4eaaa573634-var-log\") pod \"2606dddf-c407-4594-a587-c4eaaa573634\" (UID: \"2606dddf-c407-4594-a587-c4eaaa573634\") " Jan 20 17:05:40 crc kubenswrapper[4558]: I0120 17:05:40.627001 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/2606dddf-c407-4594-a587-c4eaaa573634-var-lib\") pod \"2606dddf-c407-4594-a587-c4eaaa573634\" (UID: \"2606dddf-c407-4594-a587-c4eaaa573634\") " Jan 20 17:05:40 crc kubenswrapper[4558]: I0120 17:05:40.627038 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/2606dddf-c407-4594-a587-c4eaaa573634-etc-ovs\") pod \"2606dddf-c407-4594-a587-c4eaaa573634\" (UID: \"2606dddf-c407-4594-a587-c4eaaa573634\") " Jan 20 17:05:40 crc kubenswrapper[4558]: I0120 17:05:40.627046 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2606dddf-c407-4594-a587-c4eaaa573634-var-lib" (OuterVolumeSpecName: "var-lib") pod "2606dddf-c407-4594-a587-c4eaaa573634" (UID: "2606dddf-c407-4594-a587-c4eaaa573634"). InnerVolumeSpecName "var-lib". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 17:05:40 crc kubenswrapper[4558]: I0120 17:05:40.627079 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/2606dddf-c407-4594-a587-c4eaaa573634-var-run\") pod \"2606dddf-c407-4594-a587-c4eaaa573634\" (UID: \"2606dddf-c407-4594-a587-c4eaaa573634\") " Jan 20 17:05:40 crc kubenswrapper[4558]: I0120 17:05:40.627094 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2606dddf-c407-4594-a587-c4eaaa573634-var-log" (OuterVolumeSpecName: "var-log") pod "2606dddf-c407-4594-a587-c4eaaa573634" (UID: "2606dddf-c407-4594-a587-c4eaaa573634"). InnerVolumeSpecName "var-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 17:05:40 crc kubenswrapper[4558]: I0120 17:05:40.627113 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2606dddf-c407-4594-a587-c4eaaa573634-etc-ovs" (OuterVolumeSpecName: "etc-ovs") pod "2606dddf-c407-4594-a587-c4eaaa573634" (UID: "2606dddf-c407-4594-a587-c4eaaa573634"). InnerVolumeSpecName "etc-ovs". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 17:05:40 crc kubenswrapper[4558]: I0120 17:05:40.627195 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2606dddf-c407-4594-a587-c4eaaa573634-var-run" (OuterVolumeSpecName: "var-run") pod "2606dddf-c407-4594-a587-c4eaaa573634" (UID: "2606dddf-c407-4594-a587-c4eaaa573634"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 17:05:40 crc kubenswrapper[4558]: I0120 17:05:40.627668 4558 reconciler_common.go:293] "Volume detached for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/2606dddf-c407-4594-a587-c4eaaa573634-var-log\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:40 crc kubenswrapper[4558]: I0120 17:05:40.627688 4558 reconciler_common.go:293] "Volume detached for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/2606dddf-c407-4594-a587-c4eaaa573634-var-lib\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:40 crc kubenswrapper[4558]: I0120 17:05:40.627696 4558 reconciler_common.go:293] "Volume detached for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/2606dddf-c407-4594-a587-c4eaaa573634-etc-ovs\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:40 crc kubenswrapper[4558]: I0120 17:05:40.627705 4558 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/2606dddf-c407-4594-a587-c4eaaa573634-var-run\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:40 crc kubenswrapper[4558]: I0120 17:05:40.628118 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2606dddf-c407-4594-a587-c4eaaa573634-scripts" (OuterVolumeSpecName: "scripts") pod "2606dddf-c407-4594-a587-c4eaaa573634" (UID: "2606dddf-c407-4594-a587-c4eaaa573634"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:05:40 crc kubenswrapper[4558]: I0120 17:05:40.631899 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2606dddf-c407-4594-a587-c4eaaa573634-kube-api-access-gjftp" (OuterVolumeSpecName: "kube-api-access-gjftp") pod "2606dddf-c407-4594-a587-c4eaaa573634" (UID: "2606dddf-c407-4594-a587-c4eaaa573634"). InnerVolumeSpecName "kube-api-access-gjftp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:05:40 crc kubenswrapper[4558]: I0120 17:05:40.729917 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2606dddf-c407-4594-a587-c4eaaa573634-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:40 crc kubenswrapper[4558]: I0120 17:05:40.730113 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gjftp\" (UniqueName: \"kubernetes.io/projected/2606dddf-c407-4594-a587-c4eaaa573634-kube-api-access-gjftp\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:41 crc kubenswrapper[4558]: I0120 17:05:41.458472 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovn-controller-ovs-ccvgc_2606dddf-c407-4594-a587-c4eaaa573634/ovs-vswitchd/0.log" Jan 20 17:05:41 crc kubenswrapper[4558]: I0120 17:05:41.459205 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-controller-ovs-ccvgc" event={"ID":"2606dddf-c407-4594-a587-c4eaaa573634","Type":"ContainerDied","Data":"6b656c176a87b422fbf11c01a7fa880d76638190ebd6699e7732e755b8a75b68"} Jan 20 17:05:41 crc kubenswrapper[4558]: I0120 17:05:41.459262 4558 scope.go:117] "RemoveContainer" containerID="cf17a65eef2baaf7212d90a4dcef30684c696bbbb91387878866000f91b1add6" Jan 20 17:05:41 crc kubenswrapper[4558]: I0120 17:05:41.459263 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-controller-ovs-ccvgc" Jan 20 17:05:41 crc kubenswrapper[4558]: I0120 17:05:41.478818 4558 scope.go:117] "RemoveContainer" containerID="f06e0862613d1bd692c6d6b3f79313ebad2c7126dc824b5a7038f08b4a349268" Jan 20 17:05:41 crc kubenswrapper[4558]: I0120 17:05:41.484585 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovn-controller-ovs-ccvgc"] Jan 20 17:05:41 crc kubenswrapper[4558]: I0120 17:05:41.491309 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ovn-controller-ovs-ccvgc"] Jan 20 17:05:41 crc kubenswrapper[4558]: I0120 17:05:41.493656 4558 scope.go:117] "RemoveContainer" containerID="9ac149b2100f0c79088616e525733e35778cad9b57e72c217002d1cbea3e83b8" Jan 20 17:05:42 crc kubenswrapper[4558]: I0120 17:05:42.574208 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2606dddf-c407-4594-a587-c4eaaa573634" path="/var/lib/kubelet/pods/2606dddf-c407-4594-a587-c4eaaa573634/volumes" Jan 20 17:05:42 crc kubenswrapper[4558]: I0120 17:05:42.993330 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/root-account-create-update-vcf64"] Jan 20 17:05:42 crc kubenswrapper[4558]: E0120 17:05:42.993649 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bb2560f9-fe1c-4251-89ae-0b242a9243bc" containerName="openstack-network-exporter" Jan 20 17:05:42 crc kubenswrapper[4558]: I0120 17:05:42.993666 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="bb2560f9-fe1c-4251-89ae-0b242a9243bc" containerName="openstack-network-exporter" Jan 20 17:05:42 crc kubenswrapper[4558]: E0120 17:05:42.993684 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2606dddf-c407-4594-a587-c4eaaa573634" containerName="ovsdb-server" Jan 20 17:05:42 crc kubenswrapper[4558]: I0120 17:05:42.993690 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="2606dddf-c407-4594-a587-c4eaaa573634" containerName="ovsdb-server" Jan 20 17:05:42 crc kubenswrapper[4558]: E0120 17:05:42.993700 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2606dddf-c407-4594-a587-c4eaaa573634" containerName="ovs-vswitchd" Jan 20 17:05:42 crc kubenswrapper[4558]: I0120 17:05:42.993707 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="2606dddf-c407-4594-a587-c4eaaa573634" containerName="ovs-vswitchd" Jan 20 17:05:42 crc kubenswrapper[4558]: E0120 17:05:42.993727 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1cde556c-b9b1-484a-b512-ca44c5f47353" containerName="ovn-controller" Jan 20 17:05:42 crc kubenswrapper[4558]: I0120 17:05:42.993740 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="1cde556c-b9b1-484a-b512-ca44c5f47353" containerName="ovn-controller" Jan 20 17:05:42 crc kubenswrapper[4558]: E0120 17:05:42.993752 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2606dddf-c407-4594-a587-c4eaaa573634" containerName="ovsdb-server-init" Jan 20 17:05:42 crc kubenswrapper[4558]: I0120 17:05:42.993757 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="2606dddf-c407-4594-a587-c4eaaa573634" containerName="ovsdb-server-init" Jan 20 17:05:42 crc kubenswrapper[4558]: I0120 17:05:42.993907 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="bb2560f9-fe1c-4251-89ae-0b242a9243bc" containerName="openstack-network-exporter" Jan 20 17:05:42 crc kubenswrapper[4558]: I0120 17:05:42.993923 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="2606dddf-c407-4594-a587-c4eaaa573634" containerName="ovsdb-server" Jan 20 17:05:42 crc kubenswrapper[4558]: I0120 17:05:42.993931 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="1cde556c-b9b1-484a-b512-ca44c5f47353" containerName="ovn-controller" Jan 20 17:05:42 crc kubenswrapper[4558]: I0120 17:05:42.993940 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="2606dddf-c407-4594-a587-c4eaaa573634" containerName="ovs-vswitchd" Jan 20 17:05:42 crc kubenswrapper[4558]: I0120 17:05:42.994476 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-vcf64" Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.001332 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"openstack-mariadb-root-db-secret" Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.008189 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-vcf64"] Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.060222 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/19189ead-bcd7-4806-be88-43cc27d5f202-operator-scripts\") pod \"root-account-create-update-vcf64\" (UID: \"19189ead-bcd7-4806-be88-43cc27d5f202\") " pod="openstack-kuttl-tests/root-account-create-update-vcf64" Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.060360 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gzzbd\" (UniqueName: \"kubernetes.io/projected/19189ead-bcd7-4806-be88-43cc27d5f202-kube-api-access-gzzbd\") pod \"root-account-create-update-vcf64\" (UID: \"19189ead-bcd7-4806-be88-43cc27d5f202\") " pod="openstack-kuttl-tests/root-account-create-update-vcf64" Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.095445 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-api-1b9c-account-create-update-9w5vq"] Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.096515 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-1b9c-account-create-update-9w5vq" Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.101990 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-1b9c-account-create-update-dh8jf"] Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.108760 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-api-1b9c-account-create-update-dh8jf"] Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.120241 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-1b9c-account-create-update-9w5vq"] Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.120987 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-api-db-secret" Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.166527 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3ae31408-2eb5-445e-8f66-f0f0a6a7f9f2-operator-scripts\") pod \"nova-api-1b9c-account-create-update-9w5vq\" (UID: \"3ae31408-2eb5-445e-8f66-f0f0a6a7f9f2\") " pod="openstack-kuttl-tests/nova-api-1b9c-account-create-update-9w5vq" Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.166583 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qhbl4\" (UniqueName: \"kubernetes.io/projected/3ae31408-2eb5-445e-8f66-f0f0a6a7f9f2-kube-api-access-qhbl4\") pod \"nova-api-1b9c-account-create-update-9w5vq\" (UID: \"3ae31408-2eb5-445e-8f66-f0f0a6a7f9f2\") " pod="openstack-kuttl-tests/nova-api-1b9c-account-create-update-9w5vq" Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.166640 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/19189ead-bcd7-4806-be88-43cc27d5f202-operator-scripts\") pod \"root-account-create-update-vcf64\" (UID: \"19189ead-bcd7-4806-be88-43cc27d5f202\") " pod="openstack-kuttl-tests/root-account-create-update-vcf64" Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.166714 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gzzbd\" (UniqueName: \"kubernetes.io/projected/19189ead-bcd7-4806-be88-43cc27d5f202-kube-api-access-gzzbd\") pod \"root-account-create-update-vcf64\" (UID: \"19189ead-bcd7-4806-be88-43cc27d5f202\") " pod="openstack-kuttl-tests/root-account-create-update-vcf64" Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.167652 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/19189ead-bcd7-4806-be88-43cc27d5f202-operator-scripts\") pod \"root-account-create-update-vcf64\" (UID: \"19189ead-bcd7-4806-be88-43cc27d5f202\") " pod="openstack-kuttl-tests/root-account-create-update-vcf64" Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.170578 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/cinder-bfd5-account-create-update-rvnw4"] Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.171551 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-bfd5-account-create-update-rvnw4" Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.173262 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cinder-db-secret" Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.210155 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/barbican-keystone-listener-767d5d98bd-zk227"] Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.211480 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-keystone-listener-767d5d98bd-zk227" Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.221422 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gzzbd\" (UniqueName: \"kubernetes.io/projected/19189ead-bcd7-4806-be88-43cc27d5f202-kube-api-access-gzzbd\") pod \"root-account-create-update-vcf64\" (UID: \"19189ead-bcd7-4806-be88-43cc27d5f202\") " pod="openstack-kuttl-tests/root-account-create-update-vcf64" Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.233513 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-6d9jk"] Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.245899 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/glance-103a-account-create-update-whkzm"] Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.246934 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-103a-account-create-update-whkzm" Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.251643 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-db-secret" Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.267947 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/barbican-worker-57fd8b7745-hswtl"] Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.269519 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-worker-57fd8b7745-hswtl" Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.269550 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ef319bad-004a-4140-b9a3-e34b376460da-config-data-custom\") pod \"barbican-keystone-listener-767d5d98bd-zk227\" (UID: \"ef319bad-004a-4140-b9a3-e34b376460da\") " pod="openstack-kuttl-tests/barbican-keystone-listener-767d5d98bd-zk227" Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.269603 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3ae31408-2eb5-445e-8f66-f0f0a6a7f9f2-operator-scripts\") pod \"nova-api-1b9c-account-create-update-9w5vq\" (UID: \"3ae31408-2eb5-445e-8f66-f0f0a6a7f9f2\") " pod="openstack-kuttl-tests/nova-api-1b9c-account-create-update-9w5vq" Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.269627 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qhbl4\" (UniqueName: \"kubernetes.io/projected/3ae31408-2eb5-445e-8f66-f0f0a6a7f9f2-kube-api-access-qhbl4\") pod \"nova-api-1b9c-account-create-update-9w5vq\" (UID: \"3ae31408-2eb5-445e-8f66-f0f0a6a7f9f2\") " pod="openstack-kuttl-tests/nova-api-1b9c-account-create-update-9w5vq" Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.269649 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bnwf5\" (UniqueName: \"kubernetes.io/projected/ef319bad-004a-4140-b9a3-e34b376460da-kube-api-access-bnwf5\") pod \"barbican-keystone-listener-767d5d98bd-zk227\" (UID: \"ef319bad-004a-4140-b9a3-e34b376460da\") " pod="openstack-kuttl-tests/barbican-keystone-listener-767d5d98bd-zk227" Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.269699 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ef319bad-004a-4140-b9a3-e34b376460da-combined-ca-bundle\") pod \"barbican-keystone-listener-767d5d98bd-zk227\" (UID: \"ef319bad-004a-4140-b9a3-e34b376460da\") " pod="openstack-kuttl-tests/barbican-keystone-listener-767d5d98bd-zk227" Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.269716 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/79c1324d-aa7f-4add-a2c3-12f6d4c6216f-operator-scripts\") pod \"cinder-bfd5-account-create-update-rvnw4\" (UID: \"79c1324d-aa7f-4add-a2c3-12f6d4c6216f\") " pod="openstack-kuttl-tests/cinder-bfd5-account-create-update-rvnw4" Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.269749 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-45lgx\" (UniqueName: \"kubernetes.io/projected/f6a8779d-bb69-43c5-96d1-a7669b5dd9ea-kube-api-access-45lgx\") pod \"glance-103a-account-create-update-whkzm\" (UID: \"f6a8779d-bb69-43c5-96d1-a7669b5dd9ea\") " pod="openstack-kuttl-tests/glance-103a-account-create-update-whkzm" Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.269768 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mcn45\" (UniqueName: \"kubernetes.io/projected/79c1324d-aa7f-4add-a2c3-12f6d4c6216f-kube-api-access-mcn45\") pod \"cinder-bfd5-account-create-update-rvnw4\" (UID: \"79c1324d-aa7f-4add-a2c3-12f6d4c6216f\") " pod="openstack-kuttl-tests/cinder-bfd5-account-create-update-rvnw4" Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.269783 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ef319bad-004a-4140-b9a3-e34b376460da-logs\") pod \"barbican-keystone-listener-767d5d98bd-zk227\" (UID: \"ef319bad-004a-4140-b9a3-e34b376460da\") " pod="openstack-kuttl-tests/barbican-keystone-listener-767d5d98bd-zk227" Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.269834 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ef319bad-004a-4140-b9a3-e34b376460da-config-data\") pod \"barbican-keystone-listener-767d5d98bd-zk227\" (UID: \"ef319bad-004a-4140-b9a3-e34b376460da\") " pod="openstack-kuttl-tests/barbican-keystone-listener-767d5d98bd-zk227" Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.269853 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f6a8779d-bb69-43c5-96d1-a7669b5dd9ea-operator-scripts\") pod \"glance-103a-account-create-update-whkzm\" (UID: \"f6a8779d-bb69-43c5-96d1-a7669b5dd9ea\") " pod="openstack-kuttl-tests/glance-103a-account-create-update-whkzm" Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.270468 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3ae31408-2eb5-445e-8f66-f0f0a6a7f9f2-operator-scripts\") pod \"nova-api-1b9c-account-create-update-9w5vq\" (UID: \"3ae31408-2eb5-445e-8f66-f0f0a6a7f9f2\") " pod="openstack-kuttl-tests/nova-api-1b9c-account-create-update-9w5vq" Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.284250 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-6d9jk"] Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.293362 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-keystone-listener-767d5d98bd-zk227"] Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.307438 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-vcf64" Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.308956 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/rabbitmq-cell1-server-0"] Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.335580 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qhbl4\" (UniqueName: \"kubernetes.io/projected/3ae31408-2eb5-445e-8f66-f0f0a6a7f9f2-kube-api-access-qhbl4\") pod \"nova-api-1b9c-account-create-update-9w5vq\" (UID: \"3ae31408-2eb5-445e-8f66-f0f0a6a7f9f2\") " pod="openstack-kuttl-tests/nova-api-1b9c-account-create-update-9w5vq" Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.375286 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell1-8d7e-account-create-update-jzmzm"] Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.377030 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bnwf5\" (UniqueName: \"kubernetes.io/projected/ef319bad-004a-4140-b9a3-e34b376460da-kube-api-access-bnwf5\") pod \"barbican-keystone-listener-767d5d98bd-zk227\" (UID: \"ef319bad-004a-4140-b9a3-e34b376460da\") " pod="openstack-kuttl-tests/barbican-keystone-listener-767d5d98bd-zk227" Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.377112 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-8d7e-account-create-update-jzmzm" Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.377140 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ef319bad-004a-4140-b9a3-e34b376460da-combined-ca-bundle\") pod \"barbican-keystone-listener-767d5d98bd-zk227\" (UID: \"ef319bad-004a-4140-b9a3-e34b376460da\") " pod="openstack-kuttl-tests/barbican-keystone-listener-767d5d98bd-zk227" Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.377182 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/79c1324d-aa7f-4add-a2c3-12f6d4c6216f-operator-scripts\") pod \"cinder-bfd5-account-create-update-rvnw4\" (UID: \"79c1324d-aa7f-4add-a2c3-12f6d4c6216f\") " pod="openstack-kuttl-tests/cinder-bfd5-account-create-update-rvnw4" Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.377237 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-45lgx\" (UniqueName: \"kubernetes.io/projected/f6a8779d-bb69-43c5-96d1-a7669b5dd9ea-kube-api-access-45lgx\") pod \"glance-103a-account-create-update-whkzm\" (UID: \"f6a8779d-bb69-43c5-96d1-a7669b5dd9ea\") " pod="openstack-kuttl-tests/glance-103a-account-create-update-whkzm" Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.377269 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mcn45\" (UniqueName: \"kubernetes.io/projected/79c1324d-aa7f-4add-a2c3-12f6d4c6216f-kube-api-access-mcn45\") pod \"cinder-bfd5-account-create-update-rvnw4\" (UID: \"79c1324d-aa7f-4add-a2c3-12f6d4c6216f\") " pod="openstack-kuttl-tests/cinder-bfd5-account-create-update-rvnw4" Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.377286 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ef319bad-004a-4140-b9a3-e34b376460da-logs\") pod \"barbican-keystone-listener-767d5d98bd-zk227\" (UID: \"ef319bad-004a-4140-b9a3-e34b376460da\") " pod="openstack-kuttl-tests/barbican-keystone-listener-767d5d98bd-zk227" Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.377387 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ef319bad-004a-4140-b9a3-e34b376460da-config-data\") pod \"barbican-keystone-listener-767d5d98bd-zk227\" (UID: \"ef319bad-004a-4140-b9a3-e34b376460da\") " pod="openstack-kuttl-tests/barbican-keystone-listener-767d5d98bd-zk227" Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.377421 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f6a8779d-bb69-43c5-96d1-a7669b5dd9ea-operator-scripts\") pod \"glance-103a-account-create-update-whkzm\" (UID: \"f6a8779d-bb69-43c5-96d1-a7669b5dd9ea\") " pod="openstack-kuttl-tests/glance-103a-account-create-update-whkzm" Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.377451 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0fcabe47-75cd-4bff-ba51-8a65a23b3f1f-logs\") pod \"barbican-worker-57fd8b7745-hswtl\" (UID: \"0fcabe47-75cd-4bff-ba51-8a65a23b3f1f\") " pod="openstack-kuttl-tests/barbican-worker-57fd8b7745-hswtl" Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.377483 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0fcabe47-75cd-4bff-ba51-8a65a23b3f1f-config-data-custom\") pod \"barbican-worker-57fd8b7745-hswtl\" (UID: \"0fcabe47-75cd-4bff-ba51-8a65a23b3f1f\") " pod="openstack-kuttl-tests/barbican-worker-57fd8b7745-hswtl" Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.377503 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g2dlb\" (UniqueName: \"kubernetes.io/projected/0fcabe47-75cd-4bff-ba51-8a65a23b3f1f-kube-api-access-g2dlb\") pod \"barbican-worker-57fd8b7745-hswtl\" (UID: \"0fcabe47-75cd-4bff-ba51-8a65a23b3f1f\") " pod="openstack-kuttl-tests/barbican-worker-57fd8b7745-hswtl" Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.377526 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ef319bad-004a-4140-b9a3-e34b376460da-config-data-custom\") pod \"barbican-keystone-listener-767d5d98bd-zk227\" (UID: \"ef319bad-004a-4140-b9a3-e34b376460da\") " pod="openstack-kuttl-tests/barbican-keystone-listener-767d5d98bd-zk227" Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.377550 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0fcabe47-75cd-4bff-ba51-8a65a23b3f1f-combined-ca-bundle\") pod \"barbican-worker-57fd8b7745-hswtl\" (UID: \"0fcabe47-75cd-4bff-ba51-8a65a23b3f1f\") " pod="openstack-kuttl-tests/barbican-worker-57fd8b7745-hswtl" Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.377615 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0fcabe47-75cd-4bff-ba51-8a65a23b3f1f-config-data\") pod \"barbican-worker-57fd8b7745-hswtl\" (UID: \"0fcabe47-75cd-4bff-ba51-8a65a23b3f1f\") " pod="openstack-kuttl-tests/barbican-worker-57fd8b7745-hswtl" Jan 20 17:05:43 crc kubenswrapper[4558]: E0120 17:05:43.377900 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/combined-ca-bundle: secret "combined-ca-bundle" not found Jan 20 17:05:43 crc kubenswrapper[4558]: E0120 17:05:43.377947 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ef319bad-004a-4140-b9a3-e34b376460da-combined-ca-bundle podName:ef319bad-004a-4140-b9a3-e34b376460da nodeName:}" failed. No retries permitted until 2026-01-20 17:05:43.877934255 +0000 UTC m=+1437.638272222 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/ef319bad-004a-4140-b9a3-e34b376460da-combined-ca-bundle") pod "barbican-keystone-listener-767d5d98bd-zk227" (UID: "ef319bad-004a-4140-b9a3-e34b376460da") : secret "combined-ca-bundle" not found Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.378189 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ef319bad-004a-4140-b9a3-e34b376460da-logs\") pod \"barbican-keystone-listener-767d5d98bd-zk227\" (UID: \"ef319bad-004a-4140-b9a3-e34b376460da\") " pod="openstack-kuttl-tests/barbican-keystone-listener-767d5d98bd-zk227" Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.378636 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/79c1324d-aa7f-4add-a2c3-12f6d4c6216f-operator-scripts\") pod \"cinder-bfd5-account-create-update-rvnw4\" (UID: \"79c1324d-aa7f-4add-a2c3-12f6d4c6216f\") " pod="openstack-kuttl-tests/cinder-bfd5-account-create-update-rvnw4" Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.378789 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f6a8779d-bb69-43c5-96d1-a7669b5dd9ea-operator-scripts\") pod \"glance-103a-account-create-update-whkzm\" (UID: \"f6a8779d-bb69-43c5-96d1-a7669b5dd9ea\") " pod="openstack-kuttl-tests/glance-103a-account-create-update-whkzm" Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.384515 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ef319bad-004a-4140-b9a3-e34b376460da-config-data\") pod \"barbican-keystone-listener-767d5d98bd-zk227\" (UID: \"ef319bad-004a-4140-b9a3-e34b376460da\") " pod="openstack-kuttl-tests/barbican-keystone-listener-767d5d98bd-zk227" Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.385886 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ef319bad-004a-4140-b9a3-e34b376460da-config-data-custom\") pod \"barbican-keystone-listener-767d5d98bd-zk227\" (UID: \"ef319bad-004a-4140-b9a3-e34b376460da\") " pod="openstack-kuttl-tests/barbican-keystone-listener-767d5d98bd-zk227" Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.404233 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/openstackclient"] Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.404464 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/openstackclient" podUID="2e9a9b09-70ea-4d56-87ac-5a2c3461d2df" containerName="openstackclient" containerID="cri-o://14b34f3f512d1c97cde26d0f645f8f50baa327877aab8009b5d6e09f075e96b7" gracePeriod=2 Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.419632 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell1-db-secret" Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.420200 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-1b9c-account-create-update-9w5vq" Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.427214 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-worker-57fd8b7745-hswtl"] Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.437750 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/openstackclient"] Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.448622 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-8d7e-account-create-update-jzmzm"] Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.477489 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-bfd5-account-create-update-rvnw4"] Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.491133 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-bfd5-account-create-update-vhkrw"] Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.495880 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0fcabe47-75cd-4bff-ba51-8a65a23b3f1f-logs\") pod \"barbican-worker-57fd8b7745-hswtl\" (UID: \"0fcabe47-75cd-4bff-ba51-8a65a23b3f1f\") " pod="openstack-kuttl-tests/barbican-worker-57fd8b7745-hswtl" Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.495926 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0fcabe47-75cd-4bff-ba51-8a65a23b3f1f-config-data-custom\") pod \"barbican-worker-57fd8b7745-hswtl\" (UID: \"0fcabe47-75cd-4bff-ba51-8a65a23b3f1f\") " pod="openstack-kuttl-tests/barbican-worker-57fd8b7745-hswtl" Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.495947 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g2dlb\" (UniqueName: \"kubernetes.io/projected/0fcabe47-75cd-4bff-ba51-8a65a23b3f1f-kube-api-access-g2dlb\") pod \"barbican-worker-57fd8b7745-hswtl\" (UID: \"0fcabe47-75cd-4bff-ba51-8a65a23b3f1f\") " pod="openstack-kuttl-tests/barbican-worker-57fd8b7745-hswtl" Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.495988 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0fcabe47-75cd-4bff-ba51-8a65a23b3f1f-combined-ca-bundle\") pod \"barbican-worker-57fd8b7745-hswtl\" (UID: \"0fcabe47-75cd-4bff-ba51-8a65a23b3f1f\") " pod="openstack-kuttl-tests/barbican-worker-57fd8b7745-hswtl" Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.496030 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0fcabe47-75cd-4bff-ba51-8a65a23b3f1f-config-data\") pod \"barbican-worker-57fd8b7745-hswtl\" (UID: \"0fcabe47-75cd-4bff-ba51-8a65a23b3f1f\") " pod="openstack-kuttl-tests/barbican-worker-57fd8b7745-hswtl" Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.497671 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-103a-account-create-update-xfb6j"] Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.502134 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/cinder-bfd5-account-create-update-vhkrw"] Jan 20 17:05:43 crc kubenswrapper[4558]: E0120 17:05:43.502640 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/combined-ca-bundle: secret "combined-ca-bundle" not found Jan 20 17:05:43 crc kubenswrapper[4558]: E0120 17:05:43.502694 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0fcabe47-75cd-4bff-ba51-8a65a23b3f1f-combined-ca-bundle podName:0fcabe47-75cd-4bff-ba51-8a65a23b3f1f nodeName:}" failed. No retries permitted until 2026-01-20 17:05:44.002677667 +0000 UTC m=+1437.763015623 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/0fcabe47-75cd-4bff-ba51-8a65a23b3f1f-combined-ca-bundle") pod "barbican-worker-57fd8b7745-hswtl" (UID: "0fcabe47-75cd-4bff-ba51-8a65a23b3f1f") : secret "combined-ca-bundle" not found Jan 20 17:05:43 crc kubenswrapper[4558]: E0120 17:05:43.502735 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Jan 20 17:05:43 crc kubenswrapper[4558]: E0120 17:05:43.502756 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/0bfc3458-cc0f-4bea-9794-52c5e81fe055-config-data podName:0bfc3458-cc0f-4bea-9794-52c5e81fe055 nodeName:}" failed. No retries permitted until 2026-01-20 17:05:44.002750193 +0000 UTC m=+1437.763088160 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/0bfc3458-cc0f-4bea-9794-52c5e81fe055-config-data") pod "rabbitmq-cell1-server-0" (UID: "0bfc3458-cc0f-4bea-9794-52c5e81fe055") : configmap "rabbitmq-cell1-config-data" not found Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.502798 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0fcabe47-75cd-4bff-ba51-8a65a23b3f1f-logs\") pod \"barbican-worker-57fd8b7745-hswtl\" (UID: \"0fcabe47-75cd-4bff-ba51-8a65a23b3f1f\") " pod="openstack-kuttl-tests/barbican-worker-57fd8b7745-hswtl" Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.525535 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/glance-103a-account-create-update-xfb6j"] Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.527998 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0fcabe47-75cd-4bff-ba51-8a65a23b3f1f-config-data\") pod \"barbican-worker-57fd8b7745-hswtl\" (UID: \"0fcabe47-75cd-4bff-ba51-8a65a23b3f1f\") " pod="openstack-kuttl-tests/barbican-worker-57fd8b7745-hswtl" Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.528643 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-45lgx\" (UniqueName: \"kubernetes.io/projected/f6a8779d-bb69-43c5-96d1-a7669b5dd9ea-kube-api-access-45lgx\") pod \"glance-103a-account-create-update-whkzm\" (UID: \"f6a8779d-bb69-43c5-96d1-a7669b5dd9ea\") " pod="openstack-kuttl-tests/glance-103a-account-create-update-whkzm" Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.538718 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mcn45\" (UniqueName: \"kubernetes.io/projected/79c1324d-aa7f-4add-a2c3-12f6d4c6216f-kube-api-access-mcn45\") pod \"cinder-bfd5-account-create-update-rvnw4\" (UID: \"79c1324d-aa7f-4add-a2c3-12f6d4c6216f\") " pod="openstack-kuttl-tests/cinder-bfd5-account-create-update-rvnw4" Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.542490 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bnwf5\" (UniqueName: \"kubernetes.io/projected/ef319bad-004a-4140-b9a3-e34b376460da-kube-api-access-bnwf5\") pod \"barbican-keystone-listener-767d5d98bd-zk227\" (UID: \"ef319bad-004a-4140-b9a3-e34b376460da\") " pod="openstack-kuttl-tests/barbican-keystone-listener-767d5d98bd-zk227" Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.558576 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-103a-account-create-update-whkzm"] Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.563792 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0fcabe47-75cd-4bff-ba51-8a65a23b3f1f-config-data-custom\") pod \"barbican-worker-57fd8b7745-hswtl\" (UID: \"0fcabe47-75cd-4bff-ba51-8a65a23b3f1f\") " pod="openstack-kuttl-tests/barbican-worker-57fd8b7745-hswtl" Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.568581 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-103a-account-create-update-whkzm" Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.568916 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g2dlb\" (UniqueName: \"kubernetes.io/projected/0fcabe47-75cd-4bff-ba51-8a65a23b3f1f-kube-api-access-g2dlb\") pod \"barbican-worker-57fd8b7745-hswtl\" (UID: \"0fcabe47-75cd-4bff-ba51-8a65a23b3f1f\") " pod="openstack-kuttl-tests/barbican-worker-57fd8b7745-hswtl" Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.574323 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell0-aebd-account-create-update-z79l4"] Jan 20 17:05:43 crc kubenswrapper[4558]: E0120 17:05:43.574713 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2e9a9b09-70ea-4d56-87ac-5a2c3461d2df" containerName="openstackclient" Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.574725 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="2e9a9b09-70ea-4d56-87ac-5a2c3461d2df" containerName="openstackclient" Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.574912 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="2e9a9b09-70ea-4d56-87ac-5a2c3461d2df" containerName="openstackclient" Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.581198 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-aebd-account-create-update-z79l4" Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.593260 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell0-db-secret" Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.598064 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fkfw5\" (UniqueName: \"kubernetes.io/projected/43e20233-57f1-4b80-8717-1668dee2a884-kube-api-access-fkfw5\") pod \"nova-cell1-8d7e-account-create-update-jzmzm\" (UID: \"43e20233-57f1-4b80-8717-1668dee2a884\") " pod="openstack-kuttl-tests/nova-cell1-8d7e-account-create-update-jzmzm" Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.598226 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/43e20233-57f1-4b80-8717-1668dee2a884-operator-scripts\") pod \"nova-cell1-8d7e-account-create-update-jzmzm\" (UID: \"43e20233-57f1-4b80-8717-1668dee2a884\") " pod="openstack-kuttl-tests/nova-cell1-8d7e-account-create-update-jzmzm" Jan 20 17:05:43 crc kubenswrapper[4558]: E0120 17:05:43.598450 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/combined-ca-bundle: secret "combined-ca-bundle" not found Jan 20 17:05:43 crc kubenswrapper[4558]: E0120 17:05:43.598488 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b9c2a3b1-71ed-4612-8cd0-22e396cd622c-combined-ca-bundle podName:b9c2a3b1-71ed-4612-8cd0-22e396cd622c nodeName:}" failed. No retries permitted until 2026-01-20 17:05:44.098476712 +0000 UTC m=+1437.858814679 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/b9c2a3b1-71ed-4612-8cd0-22e396cd622c-combined-ca-bundle") pod "kube-state-metrics-0" (UID: "b9c2a3b1-71ed-4612-8cd0-22e396cd622c") : secret "combined-ca-bundle" not found Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.621686 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-aebd-account-create-update-z79l4"] Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.649228 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/neutron-7436-account-create-update-hpxxl"] Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.650419 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-7436-account-create-update-hpxxl" Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.660654 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"neutron-db-secret" Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.670095 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-7436-account-create-update-hpxxl"] Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.686103 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-8d7e-account-create-update-zl2rq"] Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.704231 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/placement-db-sync-bdqwq"] Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.705271 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qlbzm\" (UniqueName: \"kubernetes.io/projected/1fbb53e2-e2a2-4d62-a392-027b1f3f3232-kube-api-access-qlbzm\") pod \"neutron-7436-account-create-update-hpxxl\" (UID: \"1fbb53e2-e2a2-4d62-a392-027b1f3f3232\") " pod="openstack-kuttl-tests/neutron-7436-account-create-update-hpxxl" Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.705336 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/43e20233-57f1-4b80-8717-1668dee2a884-operator-scripts\") pod \"nova-cell1-8d7e-account-create-update-jzmzm\" (UID: \"43e20233-57f1-4b80-8717-1668dee2a884\") " pod="openstack-kuttl-tests/nova-cell1-8d7e-account-create-update-jzmzm" Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.705541 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fkfw5\" (UniqueName: \"kubernetes.io/projected/43e20233-57f1-4b80-8717-1668dee2a884-kube-api-access-fkfw5\") pod \"nova-cell1-8d7e-account-create-update-jzmzm\" (UID: \"43e20233-57f1-4b80-8717-1668dee2a884\") " pod="openstack-kuttl-tests/nova-cell1-8d7e-account-create-update-jzmzm" Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.705582 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1fbb53e2-e2a2-4d62-a392-027b1f3f3232-operator-scripts\") pod \"neutron-7436-account-create-update-hpxxl\" (UID: \"1fbb53e2-e2a2-4d62-a392-027b1f3f3232\") " pod="openstack-kuttl-tests/neutron-7436-account-create-update-hpxxl" Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.705621 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mf2t2\" (UniqueName: \"kubernetes.io/projected/3e05451a-8619-4559-9648-cc40e9ea5bb0-kube-api-access-mf2t2\") pod \"nova-cell0-aebd-account-create-update-z79l4\" (UID: \"3e05451a-8619-4559-9648-cc40e9ea5bb0\") " pod="openstack-kuttl-tests/nova-cell0-aebd-account-create-update-z79l4" Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.705700 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3e05451a-8619-4559-9648-cc40e9ea5bb0-operator-scripts\") pod \"nova-cell0-aebd-account-create-update-z79l4\" (UID: \"3e05451a-8619-4559-9648-cc40e9ea5bb0\") " pod="openstack-kuttl-tests/nova-cell0-aebd-account-create-update-z79l4" Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.706326 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/43e20233-57f1-4b80-8717-1668dee2a884-operator-scripts\") pod \"nova-cell1-8d7e-account-create-update-jzmzm\" (UID: \"43e20233-57f1-4b80-8717-1668dee2a884\") " pod="openstack-kuttl-tests/nova-cell1-8d7e-account-create-update-jzmzm" Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.724477 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/placement-db-sync-bdqwq"] Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.742455 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fkfw5\" (UniqueName: \"kubernetes.io/projected/43e20233-57f1-4b80-8717-1668dee2a884-kube-api-access-fkfw5\") pod \"nova-cell1-8d7e-account-create-update-jzmzm\" (UID: \"43e20233-57f1-4b80-8717-1668dee2a884\") " pod="openstack-kuttl-tests/nova-cell1-8d7e-account-create-update-jzmzm" Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.746357 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell1-8d7e-account-create-update-zl2rq"] Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.772233 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell0-aebd-account-create-update-mnvvb"] Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.790436 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-bfd5-account-create-update-rvnw4" Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.801432 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell0-aebd-account-create-update-mnvvb"] Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.807606 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovn-northd-0"] Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.807817 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ovn-northd-0" podUID="33be1904-bd58-48cc-806a-af1dc751717c" containerName="ovn-northd" containerID="cri-o://811380372fc9996d1d29a2c8b3863bd8fa15b182a9a97db33dd3fd3b535d9455" gracePeriod=30 Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.808408 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ovn-northd-0" podUID="33be1904-bd58-48cc-806a-af1dc751717c" containerName="openstack-network-exporter" containerID="cri-o://eb1f0784a2f2122587c3f2a6924c91413be937ed5c1f85ceae798d13f078873b" gracePeriod=30 Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.809257 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1fbb53e2-e2a2-4d62-a392-027b1f3f3232-operator-scripts\") pod \"neutron-7436-account-create-update-hpxxl\" (UID: \"1fbb53e2-e2a2-4d62-a392-027b1f3f3232\") " pod="openstack-kuttl-tests/neutron-7436-account-create-update-hpxxl" Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.809342 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mf2t2\" (UniqueName: \"kubernetes.io/projected/3e05451a-8619-4559-9648-cc40e9ea5bb0-kube-api-access-mf2t2\") pod \"nova-cell0-aebd-account-create-update-z79l4\" (UID: \"3e05451a-8619-4559-9648-cc40e9ea5bb0\") " pod="openstack-kuttl-tests/nova-cell0-aebd-account-create-update-z79l4" Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.809427 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3e05451a-8619-4559-9648-cc40e9ea5bb0-operator-scripts\") pod \"nova-cell0-aebd-account-create-update-z79l4\" (UID: \"3e05451a-8619-4559-9648-cc40e9ea5bb0\") " pod="openstack-kuttl-tests/nova-cell0-aebd-account-create-update-z79l4" Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.809470 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qlbzm\" (UniqueName: \"kubernetes.io/projected/1fbb53e2-e2a2-4d62-a392-027b1f3f3232-kube-api-access-qlbzm\") pod \"neutron-7436-account-create-update-hpxxl\" (UID: \"1fbb53e2-e2a2-4d62-a392-027b1f3f3232\") " pod="openstack-kuttl-tests/neutron-7436-account-create-update-hpxxl" Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.810590 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1fbb53e2-e2a2-4d62-a392-027b1f3f3232-operator-scripts\") pod \"neutron-7436-account-create-update-hpxxl\" (UID: \"1fbb53e2-e2a2-4d62-a392-027b1f3f3232\") " pod="openstack-kuttl-tests/neutron-7436-account-create-update-hpxxl" Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.811233 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3e05451a-8619-4559-9648-cc40e9ea5bb0-operator-scripts\") pod \"nova-cell0-aebd-account-create-update-z79l4\" (UID: \"3e05451a-8619-4559-9648-cc40e9ea5bb0\") " pod="openstack-kuttl-tests/nova-cell0-aebd-account-create-update-z79l4" Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.829338 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-8d7e-account-create-update-jzmzm" Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.834912 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-7436-account-create-update-hmcdh"] Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.837935 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/neutron-7436-account-create-update-hmcdh"] Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.848689 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mf2t2\" (UniqueName: \"kubernetes.io/projected/3e05451a-8619-4559-9648-cc40e9ea5bb0-kube-api-access-mf2t2\") pod \"nova-cell0-aebd-account-create-update-z79l4\" (UID: \"3e05451a-8619-4559-9648-cc40e9ea5bb0\") " pod="openstack-kuttl-tests/nova-cell0-aebd-account-create-update-z79l4" Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.859823 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qlbzm\" (UniqueName: \"kubernetes.io/projected/1fbb53e2-e2a2-4d62-a392-027b1f3f3232-kube-api-access-qlbzm\") pod \"neutron-7436-account-create-update-hpxxl\" (UID: \"1fbb53e2-e2a2-4d62-a392-027b1f3f3232\") " pod="openstack-kuttl-tests/neutron-7436-account-create-update-hpxxl" Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.872374 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-nb-0"] Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.872960 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ovsdbserver-nb-0" podUID="2290f4e8-39b3-49d2-8ef6-8fa94c9e3cdf" containerName="openstack-network-exporter" containerID="cri-o://6b320eb22df0758cdab9680050c01fd3a7007ed6383b4dc60f038863e7bdaedc" gracePeriod=300 Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.890147 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-db-sync-qchhz"] Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.903850 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/barbican-bb2e-account-create-update-gqjcg"] Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.905007 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-bb2e-account-create-update-gqjcg" Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.911799 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"barbican-db-secret" Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.914386 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ef319bad-004a-4140-b9a3-e34b376460da-combined-ca-bundle\") pod \"barbican-keystone-listener-767d5d98bd-zk227\" (UID: \"ef319bad-004a-4140-b9a3-e34b376460da\") " pod="openstack-kuttl-tests/barbican-keystone-listener-767d5d98bd-zk227" Jan 20 17:05:43 crc kubenswrapper[4558]: E0120 17:05:43.914639 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/combined-ca-bundle: secret "combined-ca-bundle" not found Jan 20 17:05:43 crc kubenswrapper[4558]: E0120 17:05:43.914674 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ef319bad-004a-4140-b9a3-e34b376460da-combined-ca-bundle podName:ef319bad-004a-4140-b9a3-e34b376460da nodeName:}" failed. No retries permitted until 2026-01-20 17:05:44.914662714 +0000 UTC m=+1438.675000682 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/ef319bad-004a-4140-b9a3-e34b376460da-combined-ca-bundle") pod "barbican-keystone-listener-767d5d98bd-zk227" (UID: "ef319bad-004a-4140-b9a3-e34b376460da") : secret "combined-ca-bundle" not found Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.926494 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/cinder-db-sync-qchhz"] Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.942688 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-bb2e-account-create-update-gqjcg"] Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.955770 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-sb-0"] Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.957467 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ovsdbserver-sb-0" podUID="37e84078-defe-4c61-ac89-878165827bba" containerName="openstack-network-exporter" containerID="cri-o://79b080f284ed7a9adbc1915885043c35d4710d77244162383b4e53aa16558542" gracePeriod=300 Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.986836 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-bb2e-account-create-update-5hchq"] Jan 20 17:05:43 crc kubenswrapper[4558]: I0120 17:05:43.988640 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-aebd-account-create-update-z79l4" Jan 20 17:05:44 crc kubenswrapper[4558]: I0120 17:05:44.013228 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-7436-account-create-update-hpxxl" Jan 20 17:05:44 crc kubenswrapper[4558]: I0120 17:05:44.018131 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d4457629-a457-4a4c-b285-eab441f8d319-operator-scripts\") pod \"barbican-bb2e-account-create-update-gqjcg\" (UID: \"d4457629-a457-4a4c-b285-eab441f8d319\") " pod="openstack-kuttl-tests/barbican-bb2e-account-create-update-gqjcg" Jan 20 17:05:44 crc kubenswrapper[4558]: I0120 17:05:44.018321 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0fcabe47-75cd-4bff-ba51-8a65a23b3f1f-combined-ca-bundle\") pod \"barbican-worker-57fd8b7745-hswtl\" (UID: \"0fcabe47-75cd-4bff-ba51-8a65a23b3f1f\") " pod="openstack-kuttl-tests/barbican-worker-57fd8b7745-hswtl" Jan 20 17:05:44 crc kubenswrapper[4558]: I0120 17:05:44.018359 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-97jgv\" (UniqueName: \"kubernetes.io/projected/d4457629-a457-4a4c-b285-eab441f8d319-kube-api-access-97jgv\") pod \"barbican-bb2e-account-create-update-gqjcg\" (UID: \"d4457629-a457-4a4c-b285-eab441f8d319\") " pod="openstack-kuttl-tests/barbican-bb2e-account-create-update-gqjcg" Jan 20 17:05:44 crc kubenswrapper[4558]: E0120 17:05:44.018561 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Jan 20 17:05:44 crc kubenswrapper[4558]: E0120 17:05:44.018605 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/0bfc3458-cc0f-4bea-9794-52c5e81fe055-config-data podName:0bfc3458-cc0f-4bea-9794-52c5e81fe055 nodeName:}" failed. No retries permitted until 2026-01-20 17:05:45.018590736 +0000 UTC m=+1438.778928703 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/0bfc3458-cc0f-4bea-9794-52c5e81fe055-config-data") pod "rabbitmq-cell1-server-0" (UID: "0bfc3458-cc0f-4bea-9794-52c5e81fe055") : configmap "rabbitmq-cell1-config-data" not found Jan 20 17:05:44 crc kubenswrapper[4558]: E0120 17:05:44.019679 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/combined-ca-bundle: secret "combined-ca-bundle" not found Jan 20 17:05:44 crc kubenswrapper[4558]: E0120 17:05:44.019751 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0fcabe47-75cd-4bff-ba51-8a65a23b3f1f-combined-ca-bundle podName:0fcabe47-75cd-4bff-ba51-8a65a23b3f1f nodeName:}" failed. No retries permitted until 2026-01-20 17:05:45.019726762 +0000 UTC m=+1438.780064729 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/0fcabe47-75cd-4bff-ba51-8a65a23b3f1f-combined-ca-bundle") pod "barbican-worker-57fd8b7745-hswtl" (UID: "0fcabe47-75cd-4bff-ba51-8a65a23b3f1f") : secret "combined-ca-bundle" not found Jan 20 17:05:44 crc kubenswrapper[4558]: I0120 17:05:44.061728 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-db-sync-hsdcl"] Jan 20 17:05:44 crc kubenswrapper[4558]: I0120 17:05:44.078506 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/glance-db-sync-hsdcl"] Jan 20 17:05:44 crc kubenswrapper[4558]: I0120 17:05:44.086277 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ovsdbserver-nb-0" podUID="2290f4e8-39b3-49d2-8ef6-8fa94c9e3cdf" containerName="ovsdbserver-nb" containerID="cri-o://a7862609eb045b31d1cf0e6477c1e8599625e319c33baf93c6ef05261a3fad12" gracePeriod=300 Jan 20 17:05:44 crc kubenswrapper[4558]: I0120 17:05:44.096210 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/barbican-bb2e-account-create-update-5hchq"] Jan 20 17:05:44 crc kubenswrapper[4558]: I0120 17:05:44.114870 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-cell-mapping-9v2w5"] Jan 20 17:05:44 crc kubenswrapper[4558]: I0120 17:05:44.120992 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-97jgv\" (UniqueName: \"kubernetes.io/projected/d4457629-a457-4a4c-b285-eab441f8d319-kube-api-access-97jgv\") pod \"barbican-bb2e-account-create-update-gqjcg\" (UID: \"d4457629-a457-4a4c-b285-eab441f8d319\") " pod="openstack-kuttl-tests/barbican-bb2e-account-create-update-gqjcg" Jan 20 17:05:44 crc kubenswrapper[4558]: I0120 17:05:44.121115 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d4457629-a457-4a4c-b285-eab441f8d319-operator-scripts\") pod \"barbican-bb2e-account-create-update-gqjcg\" (UID: \"d4457629-a457-4a4c-b285-eab441f8d319\") " pod="openstack-kuttl-tests/barbican-bb2e-account-create-update-gqjcg" Jan 20 17:05:44 crc kubenswrapper[4558]: I0120 17:05:44.122210 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d4457629-a457-4a4c-b285-eab441f8d319-operator-scripts\") pod \"barbican-bb2e-account-create-update-gqjcg\" (UID: \"d4457629-a457-4a4c-b285-eab441f8d319\") " pod="openstack-kuttl-tests/barbican-bb2e-account-create-update-gqjcg" Jan 20 17:05:44 crc kubenswrapper[4558]: E0120 17:05:44.122313 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/combined-ca-bundle: secret "combined-ca-bundle" not found Jan 20 17:05:44 crc kubenswrapper[4558]: E0120 17:05:44.122396 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b9c2a3b1-71ed-4612-8cd0-22e396cd622c-combined-ca-bundle podName:b9c2a3b1-71ed-4612-8cd0-22e396cd622c nodeName:}" failed. No retries permitted until 2026-01-20 17:05:45.122384345 +0000 UTC m=+1438.882722312 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/b9c2a3b1-71ed-4612-8cd0-22e396cd622c-combined-ca-bundle") pod "kube-state-metrics-0" (UID: "b9c2a3b1-71ed-4612-8cd0-22e396cd622c") : secret "combined-ca-bundle" not found Jan 20 17:05:44 crc kubenswrapper[4558]: I0120 17:05:44.126202 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell1-cell-mapping-9v2w5"] Jan 20 17:05:44 crc kubenswrapper[4558]: I0120 17:05:44.136326 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ovsdbserver-sb-0" podUID="37e84078-defe-4c61-ac89-878165827bba" containerName="ovsdbserver-sb" containerID="cri-o://b7a6ed7425377191f27273a9edf53dbd7c7ed2de00fbc9623792e4bfc9ffed21" gracePeriod=300 Jan 20 17:05:44 crc kubenswrapper[4558]: I0120 17:05:44.136352 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell0-cell-mapping-6ssx9"] Jan 20 17:05:44 crc kubenswrapper[4558]: I0120 17:05:44.144622 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-97jgv\" (UniqueName: \"kubernetes.io/projected/d4457629-a457-4a4c-b285-eab441f8d319-kube-api-access-97jgv\") pod \"barbican-bb2e-account-create-update-gqjcg\" (UID: \"d4457629-a457-4a4c-b285-eab441f8d319\") " pod="openstack-kuttl-tests/barbican-bb2e-account-create-update-gqjcg" Jan 20 17:05:44 crc kubenswrapper[4558]: I0120 17:05:44.144678 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell0-cell-mapping-6ssx9"] Jan 20 17:05:44 crc kubenswrapper[4558]: I0120 17:05:44.163371 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/rabbitmq-server-0"] Jan 20 17:05:44 crc kubenswrapper[4558]: I0120 17:05:44.175325 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-db-sync-x2xk2"] Jan 20 17:05:44 crc kubenswrapper[4558]: I0120 17:05:44.182401 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/neutron-db-sync-x2xk2"] Jan 20 17:05:44 crc kubenswrapper[4558]: E0120 17:05:44.237796 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Jan 20 17:05:44 crc kubenswrapper[4558]: E0120 17:05:44.237833 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/ac55b716-d8fd-4628-8627-f94b5a4e7c78-config-data podName:ac55b716-d8fd-4628-8627-f94b5a4e7c78 nodeName:}" failed. No retries permitted until 2026-01-20 17:05:44.737822039 +0000 UTC m=+1438.498160006 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/ac55b716-d8fd-4628-8627-f94b5a4e7c78-config-data") pod "rabbitmq-server-0" (UID: "ac55b716-d8fd-4628-8627-f94b5a4e7c78") : configmap "rabbitmq-config-data" not found Jan 20 17:05:44 crc kubenswrapper[4558]: I0120 17:05:44.296231 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-vcf64"] Jan 20 17:05:44 crc kubenswrapper[4558]: I0120 17:05:44.344205 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/swift-ring-rebalance-v4kx6"] Jan 20 17:05:44 crc kubenswrapper[4558]: I0120 17:05:44.352688 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/swift-ring-rebalance-v4kx6"] Jan 20 17:05:44 crc kubenswrapper[4558]: I0120 17:05:44.364794 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/placement-fbd5dff98-mmhdt"] Jan 20 17:05:44 crc kubenswrapper[4558]: I0120 17:05:44.365001 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/placement-fbd5dff98-mmhdt" podUID="a98c5227-c8a8-4cbc-8039-461dd14fbd5b" containerName="placement-log" containerID="cri-o://bdd46cdb21b22eb84d6fd244e570bc9831757537e8b49ef22aaf635a1aa83768" gracePeriod=30 Jan 20 17:05:44 crc kubenswrapper[4558]: I0120 17:05:44.365118 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/placement-fbd5dff98-mmhdt" podUID="a98c5227-c8a8-4cbc-8039-461dd14fbd5b" containerName="placement-api" containerID="cri-o://bd40ce3a39fbdd59afdf9e62cf2e6f662f5e7a7d8647d8f283efcb8f7b52ebc2" gracePeriod=30 Jan 20 17:05:44 crc kubenswrapper[4558]: I0120 17:05:44.382963 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/swift-storage-0"] Jan 20 17:05:44 crc kubenswrapper[4558]: I0120 17:05:44.383370 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="10edb733-8fab-4543-ad29-3568e3de5aea" containerName="account-server" containerID="cri-o://87f7b0a910a29230a9846bbbde9e7c30fd5ead2357d9f18d957764288be50fe1" gracePeriod=30 Jan 20 17:05:44 crc kubenswrapper[4558]: I0120 17:05:44.383710 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="10edb733-8fab-4543-ad29-3568e3de5aea" containerName="swift-recon-cron" containerID="cri-o://2063896701b2ecd29a95f5f7c1e566bc1a196a26a0392d6e32c7bb743ea5dbde" gracePeriod=30 Jan 20 17:05:44 crc kubenswrapper[4558]: I0120 17:05:44.383751 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="10edb733-8fab-4543-ad29-3568e3de5aea" containerName="rsync" containerID="cri-o://944fc0bf7c16ccd30559b80cdc699372748ca2f14ed832fbb7c9cbf4c5083aed" gracePeriod=30 Jan 20 17:05:44 crc kubenswrapper[4558]: I0120 17:05:44.383779 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="10edb733-8fab-4543-ad29-3568e3de5aea" containerName="object-expirer" containerID="cri-o://8e36100266a8d13e7ab5ec75f4795cb2bfafd04327c6000ba4236e64554b079e" gracePeriod=30 Jan 20 17:05:44 crc kubenswrapper[4558]: I0120 17:05:44.383822 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="10edb733-8fab-4543-ad29-3568e3de5aea" containerName="object-updater" containerID="cri-o://064c39cc0ed32246609bc4e80f5c679dc77d71ae4e0e7325c8b80942e4b6cd46" gracePeriod=30 Jan 20 17:05:44 crc kubenswrapper[4558]: I0120 17:05:44.383852 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="10edb733-8fab-4543-ad29-3568e3de5aea" containerName="object-auditor" containerID="cri-o://a00e2bc91063a2dabb4c2ce5cfdc3e96683c6b04440f6eeeadc69adf7e0e8a23" gracePeriod=30 Jan 20 17:05:44 crc kubenswrapper[4558]: I0120 17:05:44.383877 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="10edb733-8fab-4543-ad29-3568e3de5aea" containerName="object-replicator" containerID="cri-o://1ad6198d74b43bff1f98c2de1da53f4e18e93c848f58a003ec586b574d66eb1f" gracePeriod=30 Jan 20 17:05:44 crc kubenswrapper[4558]: I0120 17:05:44.383905 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="10edb733-8fab-4543-ad29-3568e3de5aea" containerName="object-server" containerID="cri-o://7da72c8a88027dd1e413e53fd692c99fc03f3e37634323ae66f7e9f55370719e" gracePeriod=30 Jan 20 17:05:44 crc kubenswrapper[4558]: I0120 17:05:44.383931 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="10edb733-8fab-4543-ad29-3568e3de5aea" containerName="container-updater" containerID="cri-o://8f45926ec3655a6641edb5a009493f355b1f6533cdddced2af79c34663d3397c" gracePeriod=30 Jan 20 17:05:44 crc kubenswrapper[4558]: I0120 17:05:44.383957 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="10edb733-8fab-4543-ad29-3568e3de5aea" containerName="container-auditor" containerID="cri-o://576e4656d99cd9370e77241bb14cd66a4de3e94deaef7d304253b1816ec67dd5" gracePeriod=30 Jan 20 17:05:44 crc kubenswrapper[4558]: I0120 17:05:44.383989 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="10edb733-8fab-4543-ad29-3568e3de5aea" containerName="container-replicator" containerID="cri-o://f2f9c003229a8c4bff074beb609ad6fba9bf0167b989fcddbe4ee674bb6a73bd" gracePeriod=30 Jan 20 17:05:44 crc kubenswrapper[4558]: I0120 17:05:44.384044 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="10edb733-8fab-4543-ad29-3568e3de5aea" containerName="container-server" containerID="cri-o://6e25c21c1436e8d3c70552c967000697e095b890d75c613e94acbebcf601e385" gracePeriod=30 Jan 20 17:05:44 crc kubenswrapper[4558]: I0120 17:05:44.384078 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="10edb733-8fab-4543-ad29-3568e3de5aea" containerName="account-reaper" containerID="cri-o://fc9635ec26b842f65146e97cb678f89fadc8730a998b6cc02a5c959a0601c4bf" gracePeriod=30 Jan 20 17:05:44 crc kubenswrapper[4558]: I0120 17:05:44.384102 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="10edb733-8fab-4543-ad29-3568e3de5aea" containerName="account-auditor" containerID="cri-o://103cf0e411f4211e757684612d3274497733c55f59ba7dc63dd3ad2bfec99d18" gracePeriod=30 Jan 20 17:05:44 crc kubenswrapper[4558]: I0120 17:05:44.384124 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="10edb733-8fab-4543-ad29-3568e3de5aea" containerName="account-replicator" containerID="cri-o://6d71dae893fc6a1962df504002d772c4cfe40ee0a96ad60259292b2cd410de30" gracePeriod=30 Jan 20 17:05:44 crc kubenswrapper[4558]: I0120 17:05:44.468861 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-bb2e-account-create-update-gqjcg" Jan 20 17:05:44 crc kubenswrapper[4558]: I0120 17:05:44.485075 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:05:44 crc kubenswrapper[4558]: I0120 17:05:44.488459 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/cinder-scheduler-0" podUID="1f9ceb49-977a-47b3-a1f3-10d68c96ab0f" containerName="cinder-scheduler" containerID="cri-o://ea575d2847a6cae8660fd45625a0be98895c2c6f684014b389a8ca1cc29ab736" gracePeriod=30 Jan 20 17:05:44 crc kubenswrapper[4558]: I0120 17:05:44.488601 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/cinder-scheduler-0" podUID="1f9ceb49-977a-47b3-a1f3-10d68c96ab0f" containerName="probe" containerID="cri-o://26bbc4f2aaf805eb72b1daca0bda5b9b7dad475314c7f9cbfdfce43f6fb9bfbc" gracePeriod=30 Jan 20 17:05:44 crc kubenswrapper[4558]: I0120 17:05:44.516364 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-kwtcx"] Jan 20 17:05:44 crc kubenswrapper[4558]: I0120 17:05:44.517909 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-kwtcx" Jan 20 17:05:44 crc kubenswrapper[4558]: E0120 17:05:44.535401 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of a7862609eb045b31d1cf0e6477c1e8599625e319c33baf93c6ef05261a3fad12 is running failed: container process not found" containerID="a7862609eb045b31d1cf0e6477c1e8599625e319c33baf93c6ef05261a3fad12" cmd=["/usr/bin/pidof","ovsdb-server"] Jan 20 17:05:44 crc kubenswrapper[4558]: E0120 17:05:44.535652 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of a7862609eb045b31d1cf0e6477c1e8599625e319c33baf93c6ef05261a3fad12 is running failed: container process not found" containerID="a7862609eb045b31d1cf0e6477c1e8599625e319c33baf93c6ef05261a3fad12" cmd=["/usr/bin/pidof","ovsdb-server"] Jan 20 17:05:44 crc kubenswrapper[4558]: E0120 17:05:44.535872 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of a7862609eb045b31d1cf0e6477c1e8599625e319c33baf93c6ef05261a3fad12 is running failed: container process not found" containerID="a7862609eb045b31d1cf0e6477c1e8599625e319c33baf93c6ef05261a3fad12" cmd=["/usr/bin/pidof","ovsdb-server"] Jan 20 17:05:44 crc kubenswrapper[4558]: E0120 17:05:44.535908 4558 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of a7862609eb045b31d1cf0e6477c1e8599625e319c33baf93c6ef05261a3fad12 is running failed: container process not found" probeType="Readiness" pod="openstack-kuttl-tests/ovsdbserver-nb-0" podUID="2290f4e8-39b3-49d2-8ef6-8fa94c9e3cdf" containerName="ovsdbserver-nb" Jan 20 17:05:44 crc kubenswrapper[4558]: I0120 17:05:44.536546 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovsdbserver-nb-0_2290f4e8-39b3-49d2-8ef6-8fa94c9e3cdf/ovsdbserver-nb/0.log" Jan 20 17:05:44 crc kubenswrapper[4558]: I0120 17:05:44.536591 4558 generic.go:334] "Generic (PLEG): container finished" podID="2290f4e8-39b3-49d2-8ef6-8fa94c9e3cdf" containerID="6b320eb22df0758cdab9680050c01fd3a7007ed6383b4dc60f038863e7bdaedc" exitCode=2 Jan 20 17:05:44 crc kubenswrapper[4558]: I0120 17:05:44.536607 4558 generic.go:334] "Generic (PLEG): container finished" podID="2290f4e8-39b3-49d2-8ef6-8fa94c9e3cdf" containerID="a7862609eb045b31d1cf0e6477c1e8599625e319c33baf93c6ef05261a3fad12" exitCode=143 Jan 20 17:05:44 crc kubenswrapper[4558]: I0120 17:05:44.536670 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-nb-0" event={"ID":"2290f4e8-39b3-49d2-8ef6-8fa94c9e3cdf","Type":"ContainerDied","Data":"6b320eb22df0758cdab9680050c01fd3a7007ed6383b4dc60f038863e7bdaedc"} Jan 20 17:05:44 crc kubenswrapper[4558]: I0120 17:05:44.536694 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-nb-0" event={"ID":"2290f4e8-39b3-49d2-8ef6-8fa94c9e3cdf","Type":"ContainerDied","Data":"a7862609eb045b31d1cf0e6477c1e8599625e319c33baf93c6ef05261a3fad12"} Jan 20 17:05:44 crc kubenswrapper[4558]: I0120 17:05:44.542895 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/root-account-create-update-vcf64" event={"ID":"19189ead-bcd7-4806-be88-43cc27d5f202","Type":"ContainerStarted","Data":"5b8f3b8f32f06168109d738f3af28d5697b6014269d9fe52d5fa889ecc6735ad"} Jan 20 17:05:44 crc kubenswrapper[4558]: I0120 17:05:44.548394 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-kwtcx"] Jan 20 17:05:44 crc kubenswrapper[4558]: I0120 17:05:44.564444 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovsdbserver-sb-0_37e84078-defe-4c61-ac89-878165827bba/ovsdbserver-sb/0.log" Jan 20 17:05:44 crc kubenswrapper[4558]: I0120 17:05:44.564483 4558 generic.go:334] "Generic (PLEG): container finished" podID="37e84078-defe-4c61-ac89-878165827bba" containerID="79b080f284ed7a9adbc1915885043c35d4710d77244162383b4e53aa16558542" exitCode=2 Jan 20 17:05:44 crc kubenswrapper[4558]: I0120 17:05:44.564519 4558 generic.go:334] "Generic (PLEG): container finished" podID="37e84078-defe-4c61-ac89-878165827bba" containerID="b7a6ed7425377191f27273a9edf53dbd7c7ed2de00fbc9623792e4bfc9ffed21" exitCode=143 Jan 20 17:05:44 crc kubenswrapper[4558]: I0120 17:05:44.564599 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-sb-0" event={"ID":"37e84078-defe-4c61-ac89-878165827bba","Type":"ContainerDied","Data":"79b080f284ed7a9adbc1915885043c35d4710d77244162383b4e53aa16558542"} Jan 20 17:05:44 crc kubenswrapper[4558]: I0120 17:05:44.564635 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-sb-0" event={"ID":"37e84078-defe-4c61-ac89-878165827bba","Type":"ContainerDied","Data":"b7a6ed7425377191f27273a9edf53dbd7c7ed2de00fbc9623792e4bfc9ffed21"} Jan 20 17:05:44 crc kubenswrapper[4558]: I0120 17:05:44.578912 4558 generic.go:334] "Generic (PLEG): container finished" podID="33be1904-bd58-48cc-806a-af1dc751717c" containerID="eb1f0784a2f2122587c3f2a6924c91413be937ed5c1f85ceae798d13f078873b" exitCode=2 Jan 20 17:05:44 crc kubenswrapper[4558]: I0120 17:05:44.582117 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="07a241d6-001d-4677-a411-58e84c8f21ba" path="/var/lib/kubelet/pods/07a241d6-001d-4677-a411-58e84c8f21ba/volumes" Jan 20 17:05:44 crc kubenswrapper[4558]: I0120 17:05:44.584235 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1f7c84b8-e07d-4268-b6e3-112cc6b2558e" path="/var/lib/kubelet/pods/1f7c84b8-e07d-4268-b6e3-112cc6b2558e/volumes" Jan 20 17:05:44 crc kubenswrapper[4558]: I0120 17:05:44.587643 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1f9b2eba-d2f3-4391-9420-3d797982db8a" path="/var/lib/kubelet/pods/1f9b2eba-d2f3-4391-9420-3d797982db8a/volumes" Jan 20 17:05:44 crc kubenswrapper[4558]: I0120 17:05:44.591968 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4c82394b-0dc4-4a8a-ae40-dc46e41ddc68" path="/var/lib/kubelet/pods/4c82394b-0dc4-4a8a-ae40-dc46e41ddc68/volumes" Jan 20 17:05:44 crc kubenswrapper[4558]: I0120 17:05:44.592725 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="506aa756-70ad-4884-8ea6-b356ee840e38" path="/var/lib/kubelet/pods/506aa756-70ad-4884-8ea6-b356ee840e38/volumes" Jan 20 17:05:44 crc kubenswrapper[4558]: I0120 17:05:44.593221 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="507235e3-a321-4617-aba7-eec9b5ca1cf0" path="/var/lib/kubelet/pods/507235e3-a321-4617-aba7-eec9b5ca1cf0/volumes" Jan 20 17:05:44 crc kubenswrapper[4558]: I0120 17:05:44.594419 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="52c6bb4f-39bf-4834-a35c-c7d3234e6799" path="/var/lib/kubelet/pods/52c6bb4f-39bf-4834-a35c-c7d3234e6799/volumes" Jan 20 17:05:44 crc kubenswrapper[4558]: I0120 17:05:44.596040 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ef75305-05ce-4939-bed4-16aaecedff78" path="/var/lib/kubelet/pods/6ef75305-05ce-4939-bed4-16aaecedff78/volumes" Jan 20 17:05:44 crc kubenswrapper[4558]: I0120 17:05:44.596883 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7644e6dc-6bdb-4e65-b6c3-be904f9bbf90" path="/var/lib/kubelet/pods/7644e6dc-6bdb-4e65-b6c3-be904f9bbf90/volumes" Jan 20 17:05:44 crc kubenswrapper[4558]: I0120 17:05:44.598472 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9a51157e-3cf1-473a-b0e5-d9a8d4fd5f84" path="/var/lib/kubelet/pods/9a51157e-3cf1-473a-b0e5-d9a8d4fd5f84/volumes" Jan 20 17:05:44 crc kubenswrapper[4558]: I0120 17:05:44.600982 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="be13d82f-4865-44dc-8a5a-4ecc7f2cabd9" path="/var/lib/kubelet/pods/be13d82f-4865-44dc-8a5a-4ecc7f2cabd9/volumes" Jan 20 17:05:44 crc kubenswrapper[4558]: I0120 17:05:44.605320 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c1ca9fc0-7d35-467b-8b84-da1e4d6bfa49" path="/var/lib/kubelet/pods/c1ca9fc0-7d35-467b-8b84-da1e4d6bfa49/volumes" Jan 20 17:05:44 crc kubenswrapper[4558]: I0120 17:05:44.607180 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c5521aa3-d4e7-4339-9c59-749808bacb09" path="/var/lib/kubelet/pods/c5521aa3-d4e7-4339-9c59-749808bacb09/volumes" Jan 20 17:05:44 crc kubenswrapper[4558]: I0120 17:05:44.607693 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e2597e53-47a7-4b6a-9fc1-06d6561cd69f" path="/var/lib/kubelet/pods/e2597e53-47a7-4b6a-9fc1-06d6561cd69f/volumes" Jan 20 17:05:44 crc kubenswrapper[4558]: I0120 17:05:44.614821 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e574e9d4-62db-4b0d-8dd3-caa44f7b4534" path="/var/lib/kubelet/pods/e574e9d4-62db-4b0d-8dd3-caa44f7b4534/volumes" Jan 20 17:05:44 crc kubenswrapper[4558]: I0120 17:05:44.615806 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:05:44 crc kubenswrapper[4558]: I0120 17:05:44.615836 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-northd-0" event={"ID":"33be1904-bd58-48cc-806a-af1dc751717c","Type":"ContainerDied","Data":"eb1f0784a2f2122587c3f2a6924c91413be937ed5c1f85ceae798d13f078873b"} Jan 20 17:05:44 crc kubenswrapper[4558]: I0120 17:05:44.615878 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/placement-6cbc-account-create-update-n6t9t"] Jan 20 17:05:44 crc kubenswrapper[4558]: I0120 17:05:44.615894 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/placement-6cbc-account-create-update-n6t9t"] Jan 20 17:05:44 crc kubenswrapper[4558]: I0120 17:05:44.615907 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/placement-db-create-x7dh9"] Jan 20 17:05:44 crc kubenswrapper[4558]: I0120 17:05:44.615918 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/placement-db-create-x7dh9"] Jan 20 17:05:44 crc kubenswrapper[4558]: I0120 17:05:44.616158 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/cinder-api-0" podUID="2ba42f6a-1ade-45ae-bdc4-117ad2fa866b" containerName="cinder-api-log" containerID="cri-o://a818a123ac4745b3c52a6e67fb8633fbe730b34a827cef46353c35fb329810d7" gracePeriod=30 Jan 20 17:05:44 crc kubenswrapper[4558]: I0120 17:05:44.616330 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/cinder-api-0" podUID="2ba42f6a-1ade-45ae-bdc4-117ad2fa866b" containerName="cinder-api" containerID="cri-o://e91b0886ad44950f7bbfb790fe06f0f90a6fc3aaa9d7fe9e510d991406a8e51c" gracePeriod=30 Jan 20 17:05:44 crc kubenswrapper[4558]: I0120 17:05:44.620366 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:05:44 crc kubenswrapper[4558]: I0120 17:05:44.620538 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-external-api-0" podUID="20ba561a-58e7-459f-ba28-ed0b68cdab9b" containerName="glance-log" containerID="cri-o://225fabc2e274bc055279016bab31443db7f376181931205b6688497f431e61fd" gracePeriod=30 Jan 20 17:05:44 crc kubenswrapper[4558]: I0120 17:05:44.620628 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-external-api-0" podUID="20ba561a-58e7-459f-ba28-ed0b68cdab9b" containerName="glance-httpd" containerID="cri-o://148dd9de98eda3e5b9625476910ce41f12b5496fe3cc4cb92bbf3686a771a8e5" gracePeriod=30 Jan 20 17:05:44 crc kubenswrapper[4558]: I0120 17:05:44.647315 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-db-create-k8g6l"] Jan 20 17:05:44 crc kubenswrapper[4558]: I0120 17:05:44.665229 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/cinder-db-create-k8g6l"] Jan 20 17:05:44 crc kubenswrapper[4558]: I0120 17:05:44.665426 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/25a5f425-926a-40eb-8f77-fcc3cdd9880c-config\") pod \"dnsmasq-dnsmasq-84b9f45d47-kwtcx\" (UID: \"25a5f425-926a-40eb-8f77-fcc3cdd9880c\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-kwtcx" Jan 20 17:05:44 crc kubenswrapper[4558]: I0120 17:05:44.672002 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/25a5f425-926a-40eb-8f77-fcc3cdd9880c-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-84b9f45d47-kwtcx\" (UID: \"25a5f425-926a-40eb-8f77-fcc3cdd9880c\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-kwtcx" Jan 20 17:05:44 crc kubenswrapper[4558]: I0120 17:05:44.672309 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pfk8t\" (UniqueName: \"kubernetes.io/projected/25a5f425-926a-40eb-8f77-fcc3cdd9880c-kube-api-access-pfk8t\") pod \"dnsmasq-dnsmasq-84b9f45d47-kwtcx\" (UID: \"25a5f425-926a-40eb-8f77-fcc3cdd9880c\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-kwtcx" Jan 20 17:05:44 crc kubenswrapper[4558]: I0120 17:05:44.740198 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-db-sync-tvtwc"] Jan 20 17:05:44 crc kubenswrapper[4558]: I0120 17:05:44.773811 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/barbican-db-sync-tvtwc"] Jan 20 17:05:44 crc kubenswrapper[4558]: I0120 17:05:44.775763 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/25a5f425-926a-40eb-8f77-fcc3cdd9880c-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-84b9f45d47-kwtcx\" (UID: \"25a5f425-926a-40eb-8f77-fcc3cdd9880c\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-kwtcx" Jan 20 17:05:44 crc kubenswrapper[4558]: I0120 17:05:44.776611 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/25a5f425-926a-40eb-8f77-fcc3cdd9880c-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-84b9f45d47-kwtcx\" (UID: \"25a5f425-926a-40eb-8f77-fcc3cdd9880c\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-kwtcx" Jan 20 17:05:44 crc kubenswrapper[4558]: I0120 17:05:44.782078 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pfk8t\" (UniqueName: \"kubernetes.io/projected/25a5f425-926a-40eb-8f77-fcc3cdd9880c-kube-api-access-pfk8t\") pod \"dnsmasq-dnsmasq-84b9f45d47-kwtcx\" (UID: \"25a5f425-926a-40eb-8f77-fcc3cdd9880c\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-kwtcx" Jan 20 17:05:44 crc kubenswrapper[4558]: I0120 17:05:44.782138 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/25a5f425-926a-40eb-8f77-fcc3cdd9880c-config\") pod \"dnsmasq-dnsmasq-84b9f45d47-kwtcx\" (UID: \"25a5f425-926a-40eb-8f77-fcc3cdd9880c\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-kwtcx" Jan 20 17:05:44 crc kubenswrapper[4558]: E0120 17:05:44.782665 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Jan 20 17:05:44 crc kubenswrapper[4558]: E0120 17:05:44.782780 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/ac55b716-d8fd-4628-8627-f94b5a4e7c78-config-data podName:ac55b716-d8fd-4628-8627-f94b5a4e7c78 nodeName:}" failed. No retries permitted until 2026-01-20 17:05:45.782759925 +0000 UTC m=+1439.543097892 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/ac55b716-d8fd-4628-8627-f94b5a4e7c78-config-data") pod "rabbitmq-server-0" (UID: "ac55b716-d8fd-4628-8627-f94b5a4e7c78") : configmap "rabbitmq-config-data" not found Jan 20 17:05:44 crc kubenswrapper[4558]: I0120 17:05:44.802091 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/25a5f425-926a-40eb-8f77-fcc3cdd9880c-config\") pod \"dnsmasq-dnsmasq-84b9f45d47-kwtcx\" (UID: \"25a5f425-926a-40eb-8f77-fcc3cdd9880c\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-kwtcx" Jan 20 17:05:44 crc kubenswrapper[4558]: I0120 17:05:44.827598 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pfk8t\" (UniqueName: \"kubernetes.io/projected/25a5f425-926a-40eb-8f77-fcc3cdd9880c-kube-api-access-pfk8t\") pod \"dnsmasq-dnsmasq-84b9f45d47-kwtcx\" (UID: \"25a5f425-926a-40eb-8f77-fcc3cdd9880c\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-kwtcx" Jan 20 17:05:44 crc kubenswrapper[4558]: I0120 17:05:44.862618 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-bfd5-account-create-update-rvnw4"] Jan 20 17:05:44 crc kubenswrapper[4558]: I0120 17:05:44.878249 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:05:44 crc kubenswrapper[4558]: I0120 17:05:44.878670 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-internal-api-0" podUID="aa387033-dc08-48f4-bf56-06a7f316423c" containerName="glance-log" containerID="cri-o://cb8be596c569251416c1e084cf32b390ad320911bfbd93edd669cf0bc7d40234" gracePeriod=30 Jan 20 17:05:44 crc kubenswrapper[4558]: I0120 17:05:44.879098 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-internal-api-0" podUID="aa387033-dc08-48f4-bf56-06a7f316423c" containerName="glance-httpd" containerID="cri-o://32b6c39544892ca39cef55614e1cf5d22b40b7f421e0e33ba0fd15c7434cf00e" gracePeriod=30 Jan 20 17:05:44 crc kubenswrapper[4558]: W0120 17:05:44.912288 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3ae31408_2eb5_445e_8f66_f0f0a6a7f9f2.slice/crio-5c77e694bb7361cf18c1aff6a8f9c39181215afe810852c20b37cfeef865d9fe WatchSource:0}: Error finding container 5c77e694bb7361cf18c1aff6a8f9c39181215afe810852c20b37cfeef865d9fe: Status 404 returned error can't find the container with id 5c77e694bb7361cf18c1aff6a8f9c39181215afe810852c20b37cfeef865d9fe Jan 20 17:05:44 crc kubenswrapper[4558]: I0120 17:05:44.919861 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-1b9c-account-create-update-9w5vq"] Jan 20 17:05:44 crc kubenswrapper[4558]: I0120 17:05:44.920113 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-kwtcx" Jan 20 17:05:44 crc kubenswrapper[4558]: I0120 17:05:44.947264 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/openstack-cell1-galera-0"] Jan 20 17:05:44 crc kubenswrapper[4558]: I0120 17:05:44.966543 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:05:44 crc kubenswrapper[4558]: I0120 17:05:44.967191 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-scheduler-0" podUID="38786f1c-754c-488d-8a13-aad7001ad778" containerName="nova-scheduler-scheduler" containerID="cri-o://95f19d7a9e9522b1398a79cebf0602dd58a2e2f0ffdd88865efd947c83bbb895" gracePeriod=30 Jan 20 17:05:44 crc kubenswrapper[4558]: E0120 17:05:44.972225 4558 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 20 17:05:44 crc kubenswrapper[4558]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[/bin/sh -c #!/bin/bash Jan 20 17:05:44 crc kubenswrapper[4558]: Jan 20 17:05:44 crc kubenswrapper[4558]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 20 17:05:44 crc kubenswrapper[4558]: Jan 20 17:05:44 crc kubenswrapper[4558]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 20 17:05:44 crc kubenswrapper[4558]: Jan 20 17:05:44 crc kubenswrapper[4558]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 20 17:05:44 crc kubenswrapper[4558]: Jan 20 17:05:44 crc kubenswrapper[4558]: if [ -n "nova_api" ]; then Jan 20 17:05:44 crc kubenswrapper[4558]: GRANT_DATABASE="nova_api" Jan 20 17:05:44 crc kubenswrapper[4558]: else Jan 20 17:05:44 crc kubenswrapper[4558]: GRANT_DATABASE="*" Jan 20 17:05:44 crc kubenswrapper[4558]: fi Jan 20 17:05:44 crc kubenswrapper[4558]: Jan 20 17:05:44 crc kubenswrapper[4558]: # going for maximum compatibility here: Jan 20 17:05:44 crc kubenswrapper[4558]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 20 17:05:44 crc kubenswrapper[4558]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 20 17:05:44 crc kubenswrapper[4558]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 20 17:05:44 crc kubenswrapper[4558]: # support updates Jan 20 17:05:44 crc kubenswrapper[4558]: Jan 20 17:05:44 crc kubenswrapper[4558]: $MYSQL_CMD < logger="UnhandledError" Jan 20 17:05:44 crc kubenswrapper[4558]: I0120 17:05:44.980222 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-103a-account-create-update-whkzm"] Jan 20 17:05:44 crc kubenswrapper[4558]: E0120 17:05:44.980597 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"nova-api-db-secret\\\" not found\"" pod="openstack-kuttl-tests/nova-api-1b9c-account-create-update-9w5vq" podUID="3ae31408-2eb5-445e-8f66-f0f0a6a7f9f2" Jan 20 17:05:44 crc kubenswrapper[4558]: E0120 17:05:44.984836 4558 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 20 17:05:44 crc kubenswrapper[4558]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[/bin/sh -c #!/bin/bash Jan 20 17:05:44 crc kubenswrapper[4558]: Jan 20 17:05:44 crc kubenswrapper[4558]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 20 17:05:44 crc kubenswrapper[4558]: Jan 20 17:05:44 crc kubenswrapper[4558]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 20 17:05:44 crc kubenswrapper[4558]: Jan 20 17:05:44 crc kubenswrapper[4558]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 20 17:05:44 crc kubenswrapper[4558]: Jan 20 17:05:44 crc kubenswrapper[4558]: if [ -n "glance" ]; then Jan 20 17:05:44 crc kubenswrapper[4558]: GRANT_DATABASE="glance" Jan 20 17:05:44 crc kubenswrapper[4558]: else Jan 20 17:05:44 crc kubenswrapper[4558]: GRANT_DATABASE="*" Jan 20 17:05:44 crc kubenswrapper[4558]: fi Jan 20 17:05:44 crc kubenswrapper[4558]: Jan 20 17:05:44 crc kubenswrapper[4558]: # going for maximum compatibility here: Jan 20 17:05:44 crc kubenswrapper[4558]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 20 17:05:44 crc kubenswrapper[4558]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 20 17:05:44 crc kubenswrapper[4558]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 20 17:05:44 crc kubenswrapper[4558]: # support updates Jan 20 17:05:44 crc kubenswrapper[4558]: Jan 20 17:05:44 crc kubenswrapper[4558]: $MYSQL_CMD < logger="UnhandledError" Jan 20 17:05:44 crc kubenswrapper[4558]: E0120 17:05:44.987894 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"glance-db-secret\\\" not found\"" pod="openstack-kuttl-tests/glance-103a-account-create-update-whkzm" podUID="f6a8779d-bb69-43c5-96d1-a7669b5dd9ea" Jan 20 17:05:44 crc kubenswrapper[4558]: I0120 17:05:44.994638 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-7fdccbb69d-fng8p"] Jan 20 17:05:44 crc kubenswrapper[4558]: I0120 17:05:44.994929 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/neutron-7fdccbb69d-fng8p" podUID="fa011f19-1245-4ae8-ae2b-8773036498b3" containerName="neutron-api" containerID="cri-o://edc8f84d18297fb81c87a4195f78806a1f8712a6680f4253cb7972ff04e84213" gracePeriod=30 Jan 20 17:05:44 crc kubenswrapper[4558]: I0120 17:05:44.995863 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ef319bad-004a-4140-b9a3-e34b376460da-combined-ca-bundle\") pod \"barbican-keystone-listener-767d5d98bd-zk227\" (UID: \"ef319bad-004a-4140-b9a3-e34b376460da\") " pod="openstack-kuttl-tests/barbican-keystone-listener-767d5d98bd-zk227" Jan 20 17:05:44 crc kubenswrapper[4558]: E0120 17:05:44.996019 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/combined-ca-bundle: secret "combined-ca-bundle" not found Jan 20 17:05:44 crc kubenswrapper[4558]: E0120 17:05:44.996062 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ef319bad-004a-4140-b9a3-e34b376460da-combined-ca-bundle podName:ef319bad-004a-4140-b9a3-e34b376460da nodeName:}" failed. No retries permitted until 2026-01-20 17:05:46.996049445 +0000 UTC m=+1440.756387412 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/ef319bad-004a-4140-b9a3-e34b376460da-combined-ca-bundle") pod "barbican-keystone-listener-767d5d98bd-zk227" (UID: "ef319bad-004a-4140-b9a3-e34b376460da") : secret "combined-ca-bundle" not found Jan 20 17:05:44 crc kubenswrapper[4558]: I0120 17:05:44.996247 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/neutron-7fdccbb69d-fng8p" podUID="fa011f19-1245-4ae8-ae2b-8773036498b3" containerName="neutron-httpd" containerID="cri-o://a508351390349a4d085a4b101af8917b6baf005aac6d69c3b56e8bb73832583f" gracePeriod=30 Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.001379 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-db-create-bj62z"] Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.015512 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovsdbserver-sb-0_37e84078-defe-4c61-ac89-878165827bba/ovsdbserver-sb/0.log" Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.015581 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.038240 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/glance-db-create-bj62z"] Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.051508 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.051715 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-metadata-0" podUID="1fccb241-c75e-4cec-b3c6-3855bd6c1161" containerName="nova-metadata-log" containerID="cri-o://e6a99988097b2873c0b2ced77f6588f66ac0fb5bb22b3ef8e1e7a52ca42e4820" gracePeriod=30 Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.052066 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-metadata-0" podUID="1fccb241-c75e-4cec-b3c6-3855bd6c1161" containerName="nova-metadata-metadata" containerID="cri-o://1e838eb49cb5fab8df3c83e562edc9dac0c018daeb2274638eb55d7398246708" gracePeriod=30 Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.060338 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/swift-proxy-595b48fd98-kbjv6"] Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.060524 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-proxy-595b48fd98-kbjv6" podUID="32a3bcd7-25d6-45f5-8ce6-66949357504c" containerName="proxy-httpd" containerID="cri-o://1e67af77bc964f499418d3c854f0b6163e2f81ff3e6521a66f14aa8ee4590e1c" gracePeriod=30 Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.060625 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-proxy-595b48fd98-kbjv6" podUID="32a3bcd7-25d6-45f5-8ce6-66949357504c" containerName="proxy-server" containerID="cri-o://b2c58a9c1693c20653debec2694a95f53b8b1e5e7675f997990f41753ef39a46" gracePeriod=30 Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.085336 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-103a-account-create-update-whkzm"] Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.106271 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mqlxw\" (UniqueName: \"kubernetes.io/projected/37e84078-defe-4c61-ac89-878165827bba-kube-api-access-mqlxw\") pod \"37e84078-defe-4c61-ac89-878165827bba\" (UID: \"37e84078-defe-4c61-ac89-878165827bba\") " Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.106321 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/37e84078-defe-4c61-ac89-878165827bba-metrics-certs-tls-certs\") pod \"37e84078-defe-4c61-ac89-878165827bba\" (UID: \"37e84078-defe-4c61-ac89-878165827bba\") " Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.106341 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/37e84078-defe-4c61-ac89-878165827bba-ovsdb-rundir\") pod \"37e84078-defe-4c61-ac89-878165827bba\" (UID: \"37e84078-defe-4c61-ac89-878165827bba\") " Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.106359 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/37e84078-defe-4c61-ac89-878165827bba-combined-ca-bundle\") pod \"37e84078-defe-4c61-ac89-878165827bba\" (UID: \"37e84078-defe-4c61-ac89-878165827bba\") " Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.106393 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/37e84078-defe-4c61-ac89-878165827bba-ovsdbserver-sb-tls-certs\") pod \"37e84078-defe-4c61-ac89-878165827bba\" (UID: \"37e84078-defe-4c61-ac89-878165827bba\") " Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.106419 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/37e84078-defe-4c61-ac89-878165827bba-scripts\") pod \"37e84078-defe-4c61-ac89-878165827bba\" (UID: \"37e84078-defe-4c61-ac89-878165827bba\") " Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.106443 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/37e84078-defe-4c61-ac89-878165827bba-config\") pod \"37e84078-defe-4c61-ac89-878165827bba\" (UID: \"37e84078-defe-4c61-ac89-878165827bba\") " Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.106469 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndbcluster-sb-etc-ovn\" (UniqueName: \"kubernetes.io/local-volume/local-storage19-crc\") pod \"37e84078-defe-4c61-ac89-878165827bba\" (UID: \"37e84078-defe-4c61-ac89-878165827bba\") " Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.106765 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0fcabe47-75cd-4bff-ba51-8a65a23b3f1f-combined-ca-bundle\") pod \"barbican-worker-57fd8b7745-hswtl\" (UID: \"0fcabe47-75cd-4bff-ba51-8a65a23b3f1f\") " pod="openstack-kuttl-tests/barbican-worker-57fd8b7745-hswtl" Jan 20 17:05:45 crc kubenswrapper[4558]: E0120 17:05:45.106919 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/combined-ca-bundle: secret "combined-ca-bundle" not found Jan 20 17:05:45 crc kubenswrapper[4558]: E0120 17:05:45.106959 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0fcabe47-75cd-4bff-ba51-8a65a23b3f1f-combined-ca-bundle podName:0fcabe47-75cd-4bff-ba51-8a65a23b3f1f nodeName:}" failed. No retries permitted until 2026-01-20 17:05:47.106946881 +0000 UTC m=+1440.867284848 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/0fcabe47-75cd-4bff-ba51-8a65a23b3f1f-combined-ca-bundle") pod "barbican-worker-57fd8b7745-hswtl" (UID: "0fcabe47-75cd-4bff-ba51-8a65a23b3f1f") : secret "combined-ca-bundle" not found Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.110714 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/37e84078-defe-4c61-ac89-878165827bba-scripts" (OuterVolumeSpecName: "scripts") pod "37e84078-defe-4c61-ac89-878165827bba" (UID: "37e84078-defe-4c61-ac89-878165827bba"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:05:45 crc kubenswrapper[4558]: E0120 17:05:45.111829 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Jan 20 17:05:45 crc kubenswrapper[4558]: E0120 17:05:45.111891 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/0bfc3458-cc0f-4bea-9794-52c5e81fe055-config-data podName:0bfc3458-cc0f-4bea-9794-52c5e81fe055 nodeName:}" failed. No retries permitted until 2026-01-20 17:05:47.111874617 +0000 UTC m=+1440.872212584 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/0bfc3458-cc0f-4bea-9794-52c5e81fe055-config-data") pod "rabbitmq-cell1-server-0" (UID: "0bfc3458-cc0f-4bea-9794-52c5e81fe055") : configmap "rabbitmq-cell1-config-data" not found Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.113276 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/37e84078-defe-4c61-ac89-878165827bba-ovsdb-rundir" (OuterVolumeSpecName: "ovsdb-rundir") pod "37e84078-defe-4c61-ac89-878165827bba" (UID: "37e84078-defe-4c61-ac89-878165827bba"). InnerVolumeSpecName "ovsdb-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.115760 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/37e84078-defe-4c61-ac89-878165827bba-config" (OuterVolumeSpecName: "config") pod "37e84078-defe-4c61-ac89-878165827bba" (UID: "37e84078-defe-4c61-ac89-878165827bba"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.117473 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/37e84078-defe-4c61-ac89-878165827bba-kube-api-access-mqlxw" (OuterVolumeSpecName: "kube-api-access-mqlxw") pod "37e84078-defe-4c61-ac89-878165827bba" (UID: "37e84078-defe-4c61-ac89-878165827bba"). InnerVolumeSpecName "kube-api-access-mqlxw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.123404 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.123628 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="c7187d14-f25b-4344-bf36-7d56d8e1b79c" containerName="ceilometer-central-agent" containerID="cri-o://97f14c8dcf0f7e744564168d9a5f79f08919d2ff3285eef66b5001b20e3c69d7" gracePeriod=30 Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.123909 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="c7187d14-f25b-4344-bf36-7d56d8e1b79c" containerName="proxy-httpd" containerID="cri-o://c6607b53445fad5655dbb034a5a76d6a87a2e8a96bd8d11ed9d596a7971d2d55" gracePeriod=30 Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.123948 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="c7187d14-f25b-4344-bf36-7d56d8e1b79c" containerName="sg-core" containerID="cri-o://2af5466604135693231e3743b389224d86d75c0a2d399c8a0f6df2491983b42f" gracePeriod=30 Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.123980 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="c7187d14-f25b-4344-bf36-7d56d8e1b79c" containerName="ceilometer-notification-agent" containerID="cri-o://469eb7fe8ee3af85b606966788c6c91603f071d07ac40bc36607962b279dc510" gracePeriod=30 Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.126829 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.127023 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-api-0" podUID="51e263b2-d42e-46fa-99e1-e0c5aa23bcf5" containerName="nova-api-log" containerID="cri-o://762c3b53bff8276c53ff1ba0f6fcbbc8ea3579b91262bc868d821bf5b7740f32" gracePeriod=30 Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.127150 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-api-0" podUID="51e263b2-d42e-46fa-99e1-e0c5aa23bcf5" containerName="nova-api-api" containerID="cri-o://5b5134f4161398c21fc398c89b30ad1744c35cb876a3e5afd52d7adcc9031cd4" gracePeriod=30 Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.178395 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage19-crc" (OuterVolumeSpecName: "ovndbcluster-sb-etc-ovn") pod "37e84078-defe-4c61-ac89-878165827bba" (UID: "37e84078-defe-4c61-ac89-878165827bba"). InnerVolumeSpecName "local-storage19-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.213515 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/openstack-cell1-galera-0" podUID="6df6ca0e-78e9-4248-8cbe-b9934e0ad090" containerName="galera" containerID="cri-o://3ce6116ef9c85330a51ece86b35504a1ac4b48ba623b6c363ad2b015e33fec35" gracePeriod=30 Jan 20 17:05:45 crc kubenswrapper[4558]: E0120 17:05:45.213765 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/combined-ca-bundle: secret "combined-ca-bundle" not found Jan 20 17:05:45 crc kubenswrapper[4558]: E0120 17:05:45.213816 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b9c2a3b1-71ed-4612-8cd0-22e396cd622c-combined-ca-bundle podName:b9c2a3b1-71ed-4612-8cd0-22e396cd622c nodeName:}" failed. No retries permitted until 2026-01-20 17:05:47.213800925 +0000 UTC m=+1440.974138892 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/b9c2a3b1-71ed-4612-8cd0-22e396cd622c-combined-ca-bundle") pod "kube-state-metrics-0" (UID: "b9c2a3b1-71ed-4612-8cd0-22e396cd622c") : secret "combined-ca-bundle" not found Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.217536 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage19-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage19-crc\") on node \"crc\" " Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.217594 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mqlxw\" (UniqueName: \"kubernetes.io/projected/37e84078-defe-4c61-ac89-878165827bba-kube-api-access-mqlxw\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.217607 4558 reconciler_common.go:293] "Volume detached for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/37e84078-defe-4c61-ac89-878165827bba-ovsdb-rundir\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.217616 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/37e84078-defe-4c61-ac89-878165827bba-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.217625 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/37e84078-defe-4c61-ac89-878165827bba-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.263694 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/37e84078-defe-4c61-ac89-878165827bba-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "37e84078-defe-4c61-ac89-878165827bba" (UID: "37e84078-defe-4c61-ac89-878165827bba"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.277634 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage19-crc" (UniqueName: "kubernetes.io/local-volume/local-storage19-crc") on node "crc" Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.289539 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell0-aebd-account-create-update-z79l4"] Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.318254 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/37e84078-defe-4c61-ac89-878165827bba-ovsdbserver-sb-tls-certs" (OuterVolumeSpecName: "ovsdbserver-sb-tls-certs") pod "37e84078-defe-4c61-ac89-878165827bba" (UID: "37e84078-defe-4c61-ac89-878165827bba"). InnerVolumeSpecName "ovsdbserver-sb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.322728 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/37e84078-defe-4c61-ac89-878165827bba-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.322768 4558 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/37e84078-defe-4c61-ac89-878165827bba-ovsdbserver-sb-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.322787 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage19-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage19-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:45 crc kubenswrapper[4558]: E0120 17:05:45.329154 4558 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfa011f19_1245_4ae8_ae2b_8773036498b3.slice/crio-a508351390349a4d085a4b101af8917b6baf005aac6d69c3b56e8bb73832583f.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod51e263b2_d42e_46fa_99e1_e0c5aa23bcf5.slice/crio-762c3b53bff8276c53ff1ba0f6fcbbc8ea3579b91262bc868d821bf5b7740f32.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc7187d14_f25b_4344_bf36_7d56d8e1b79c.slice/crio-2af5466604135693231e3743b389224d86d75c0a2d399c8a0f6df2491983b42f.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1f9ceb49_977a_47b3_a1f3_10d68c96ab0f.slice/crio-26bbc4f2aaf805eb72b1daca0bda5b9b7dad475314c7f9cbfdfce43f6fb9bfbc.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1f9ceb49_977a_47b3_a1f3_10d68c96ab0f.slice/crio-conmon-26bbc4f2aaf805eb72b1daca0bda5b9b7dad475314c7f9cbfdfce43f6fb9bfbc.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod32a3bcd7_25d6_45f5_8ce6_66949357504c.slice/crio-1e67af77bc964f499418d3c854f0b6163e2f81ff3e6521a66f14aa8ee4590e1c.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc7187d14_f25b_4344_bf36_7d56d8e1b79c.slice/crio-c6607b53445fad5655dbb034a5a76d6a87a2e8a96bd8d11ed9d596a7971d2d55.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod32a3bcd7_25d6_45f5_8ce6_66949357504c.slice/crio-conmon-1e67af77bc964f499418d3c854f0b6163e2f81ff3e6521a66f14aa8ee4590e1c.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfa011f19_1245_4ae8_ae2b_8773036498b3.slice/crio-conmon-a508351390349a4d085a4b101af8917b6baf005aac6d69c3b56e8bb73832583f.scope\": RecentStats: unable to find data in memory cache]" Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.337918 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell0-db-create-fckx4"] Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.348867 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell0-db-create-fckx4"] Jan 20 17:05:45 crc kubenswrapper[4558]: E0120 17:05:45.350060 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="95f19d7a9e9522b1398a79cebf0602dd58a2e2f0ffdd88865efd947c83bbb895" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.356598 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-db-create-7ns56"] Jan 20 17:05:45 crc kubenswrapper[4558]: E0120 17:05:45.362554 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="95f19d7a9e9522b1398a79cebf0602dd58a2e2f0ffdd88865efd947c83bbb895" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 20 17:05:45 crc kubenswrapper[4558]: E0120 17:05:45.376342 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="95f19d7a9e9522b1398a79cebf0602dd58a2e2f0ffdd88865efd947c83bbb895" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 20 17:05:45 crc kubenswrapper[4558]: E0120 17:05:45.376475 4558 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack-kuttl-tests/nova-scheduler-0" podUID="38786f1c-754c-488d-8a13-aad7001ad778" containerName="nova-scheduler-scheduler" Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.383499 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell1-db-create-7ns56"] Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.383681 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/37e84078-defe-4c61-ac89-878165827bba-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "37e84078-defe-4c61-ac89-878165827bba" (UID: "37e84078-defe-4c61-ac89-878165827bba"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.392663 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-8d7e-account-create-update-jzmzm"] Jan 20 17:05:45 crc kubenswrapper[4558]: E0120 17:05:45.420235 4558 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 20 17:05:45 crc kubenswrapper[4558]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[/bin/sh -c #!/bin/bash Jan 20 17:05:45 crc kubenswrapper[4558]: Jan 20 17:05:45 crc kubenswrapper[4558]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 20 17:05:45 crc kubenswrapper[4558]: Jan 20 17:05:45 crc kubenswrapper[4558]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 20 17:05:45 crc kubenswrapper[4558]: Jan 20 17:05:45 crc kubenswrapper[4558]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 20 17:05:45 crc kubenswrapper[4558]: Jan 20 17:05:45 crc kubenswrapper[4558]: if [ -n "cinder" ]; then Jan 20 17:05:45 crc kubenswrapper[4558]: GRANT_DATABASE="cinder" Jan 20 17:05:45 crc kubenswrapper[4558]: else Jan 20 17:05:45 crc kubenswrapper[4558]: GRANT_DATABASE="*" Jan 20 17:05:45 crc kubenswrapper[4558]: fi Jan 20 17:05:45 crc kubenswrapper[4558]: Jan 20 17:05:45 crc kubenswrapper[4558]: # going for maximum compatibility here: Jan 20 17:05:45 crc kubenswrapper[4558]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 20 17:05:45 crc kubenswrapper[4558]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 20 17:05:45 crc kubenswrapper[4558]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 20 17:05:45 crc kubenswrapper[4558]: # support updates Jan 20 17:05:45 crc kubenswrapper[4558]: Jan 20 17:05:45 crc kubenswrapper[4558]: $MYSQL_CMD < logger="UnhandledError" Jan 20 17:05:45 crc kubenswrapper[4558]: E0120 17:05:45.420696 4558 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 20 17:05:45 crc kubenswrapper[4558]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[/bin/sh -c #!/bin/bash Jan 20 17:05:45 crc kubenswrapper[4558]: Jan 20 17:05:45 crc kubenswrapper[4558]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 20 17:05:45 crc kubenswrapper[4558]: Jan 20 17:05:45 crc kubenswrapper[4558]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 20 17:05:45 crc kubenswrapper[4558]: Jan 20 17:05:45 crc kubenswrapper[4558]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 20 17:05:45 crc kubenswrapper[4558]: Jan 20 17:05:45 crc kubenswrapper[4558]: if [ -n "nova_cell1" ]; then Jan 20 17:05:45 crc kubenswrapper[4558]: GRANT_DATABASE="nova_cell1" Jan 20 17:05:45 crc kubenswrapper[4558]: else Jan 20 17:05:45 crc kubenswrapper[4558]: GRANT_DATABASE="*" Jan 20 17:05:45 crc kubenswrapper[4558]: fi Jan 20 17:05:45 crc kubenswrapper[4558]: Jan 20 17:05:45 crc kubenswrapper[4558]: # going for maximum compatibility here: Jan 20 17:05:45 crc kubenswrapper[4558]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 20 17:05:45 crc kubenswrapper[4558]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 20 17:05:45 crc kubenswrapper[4558]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 20 17:05:45 crc kubenswrapper[4558]: # support updates Jan 20 17:05:45 crc kubenswrapper[4558]: Jan 20 17:05:45 crc kubenswrapper[4558]: $MYSQL_CMD < logger="UnhandledError" Jan 20 17:05:45 crc kubenswrapper[4558]: E0120 17:05:45.421386 4558 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 20 17:05:45 crc kubenswrapper[4558]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[/bin/sh -c #!/bin/bash Jan 20 17:05:45 crc kubenswrapper[4558]: Jan 20 17:05:45 crc kubenswrapper[4558]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 20 17:05:45 crc kubenswrapper[4558]: Jan 20 17:05:45 crc kubenswrapper[4558]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 20 17:05:45 crc kubenswrapper[4558]: Jan 20 17:05:45 crc kubenswrapper[4558]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 20 17:05:45 crc kubenswrapper[4558]: Jan 20 17:05:45 crc kubenswrapper[4558]: if [ -n "neutron" ]; then Jan 20 17:05:45 crc kubenswrapper[4558]: GRANT_DATABASE="neutron" Jan 20 17:05:45 crc kubenswrapper[4558]: else Jan 20 17:05:45 crc kubenswrapper[4558]: GRANT_DATABASE="*" Jan 20 17:05:45 crc kubenswrapper[4558]: fi Jan 20 17:05:45 crc kubenswrapper[4558]: Jan 20 17:05:45 crc kubenswrapper[4558]: # going for maximum compatibility here: Jan 20 17:05:45 crc kubenswrapper[4558]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 20 17:05:45 crc kubenswrapper[4558]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 20 17:05:45 crc kubenswrapper[4558]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 20 17:05:45 crc kubenswrapper[4558]: # support updates Jan 20 17:05:45 crc kubenswrapper[4558]: Jan 20 17:05:45 crc kubenswrapper[4558]: $MYSQL_CMD < logger="UnhandledError" Jan 20 17:05:45 crc kubenswrapper[4558]: E0120 17:05:45.421456 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"cinder-db-secret\\\" not found\"" pod="openstack-kuttl-tests/cinder-bfd5-account-create-update-rvnw4" podUID="79c1324d-aa7f-4add-a2c3-12f6d4c6216f" Jan 20 17:05:45 crc kubenswrapper[4558]: E0120 17:05:45.421646 4558 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 20 17:05:45 crc kubenswrapper[4558]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[/bin/sh -c #!/bin/bash Jan 20 17:05:45 crc kubenswrapper[4558]: Jan 20 17:05:45 crc kubenswrapper[4558]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 20 17:05:45 crc kubenswrapper[4558]: Jan 20 17:05:45 crc kubenswrapper[4558]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 20 17:05:45 crc kubenswrapper[4558]: Jan 20 17:05:45 crc kubenswrapper[4558]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 20 17:05:45 crc kubenswrapper[4558]: Jan 20 17:05:45 crc kubenswrapper[4558]: if [ -n "nova_cell0" ]; then Jan 20 17:05:45 crc kubenswrapper[4558]: GRANT_DATABASE="nova_cell0" Jan 20 17:05:45 crc kubenswrapper[4558]: else Jan 20 17:05:45 crc kubenswrapper[4558]: GRANT_DATABASE="*" Jan 20 17:05:45 crc kubenswrapper[4558]: fi Jan 20 17:05:45 crc kubenswrapper[4558]: Jan 20 17:05:45 crc kubenswrapper[4558]: # going for maximum compatibility here: Jan 20 17:05:45 crc kubenswrapper[4558]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 20 17:05:45 crc kubenswrapper[4558]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 20 17:05:45 crc kubenswrapper[4558]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 20 17:05:45 crc kubenswrapper[4558]: # support updates Jan 20 17:05:45 crc kubenswrapper[4558]: Jan 20 17:05:45 crc kubenswrapper[4558]: $MYSQL_CMD < logger="UnhandledError" Jan 20 17:05:45 crc kubenswrapper[4558]: E0120 17:05:45.422068 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"nova-cell1-db-secret\\\" not found\"" pod="openstack-kuttl-tests/nova-cell1-8d7e-account-create-update-jzmzm" podUID="43e20233-57f1-4b80-8717-1668dee2a884" Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.423177 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-db-create-bp498"] Jan 20 17:05:45 crc kubenswrapper[4558]: E0120 17:05:45.423250 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"neutron-db-secret\\\" not found\"" pod="openstack-kuttl-tests/neutron-7436-account-create-update-hpxxl" podUID="1fbb53e2-e2a2-4d62-a392-027b1f3f3232" Jan 20 17:05:45 crc kubenswrapper[4558]: E0120 17:05:45.423971 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"nova-cell0-db-secret\\\" not found\"" pod="openstack-kuttl-tests/nova-cell0-aebd-account-create-update-z79l4" podUID="3e05451a-8619-4559-9648-cc40e9ea5bb0" Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.424529 4558 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/37e84078-defe-4c61-ac89-878165827bba-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.453748 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/neutron-db-create-bp498"] Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.495349 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-7436-account-create-update-hpxxl"] Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.505285 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-db-create-vzcfm"] Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.515905 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-api-db-create-vzcfm"] Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.523252 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-1b9c-account-create-update-9w5vq"] Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.554903 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.555199 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" podUID="1176545e-611b-4fc7-8b03-e91ee7813fd3" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://6b8fce6bafaad0a422a208a3cb35d584fc27bf9b3ba41bf28947f865c272fe06" gracePeriod=30 Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.562462 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/rabbitmq-cell1-server-0"] Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.577317 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-db-create-gwczq"] Jan 20 17:05:45 crc kubenswrapper[4558]: E0120 17:05:45.589536 4558 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 20 17:05:45 crc kubenswrapper[4558]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[/bin/sh -c #!/bin/bash Jan 20 17:05:45 crc kubenswrapper[4558]: Jan 20 17:05:45 crc kubenswrapper[4558]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 20 17:05:45 crc kubenswrapper[4558]: Jan 20 17:05:45 crc kubenswrapper[4558]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 20 17:05:45 crc kubenswrapper[4558]: Jan 20 17:05:45 crc kubenswrapper[4558]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 20 17:05:45 crc kubenswrapper[4558]: Jan 20 17:05:45 crc kubenswrapper[4558]: if [ -n "barbican" ]; then Jan 20 17:05:45 crc kubenswrapper[4558]: GRANT_DATABASE="barbican" Jan 20 17:05:45 crc kubenswrapper[4558]: else Jan 20 17:05:45 crc kubenswrapper[4558]: GRANT_DATABASE="*" Jan 20 17:05:45 crc kubenswrapper[4558]: fi Jan 20 17:05:45 crc kubenswrapper[4558]: Jan 20 17:05:45 crc kubenswrapper[4558]: # going for maximum compatibility here: Jan 20 17:05:45 crc kubenswrapper[4558]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 20 17:05:45 crc kubenswrapper[4558]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 20 17:05:45 crc kubenswrapper[4558]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 20 17:05:45 crc kubenswrapper[4558]: # support updates Jan 20 17:05:45 crc kubenswrapper[4558]: Jan 20 17:05:45 crc kubenswrapper[4558]: $MYSQL_CMD < logger="UnhandledError" Jan 20 17:05:45 crc kubenswrapper[4558]: E0120 17:05:45.591995 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"barbican-db-secret\\\" not found\"" pod="openstack-kuttl-tests/barbican-bb2e-account-create-update-gqjcg" podUID="d4457629-a457-4a4c-b285-eab441f8d319" Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.593348 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/barbican-db-create-gwczq"] Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.613286 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovsdbserver-nb-0_2290f4e8-39b3-49d2-8ef6-8fa94c9e3cdf/ovsdbserver-nb/0.log" Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.613365 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.614221 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-aebd-account-create-update-z79l4" event={"ID":"3e05451a-8619-4559-9648-cc40e9ea5bb0","Type":"ContainerStarted","Data":"007eec0dd27bcdbed48c82b64f44f510ea69933870ac097d391773eeee037bf6"} Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.617255 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-bb2e-account-create-update-gqjcg"] Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.633109 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.633426 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/kube-state-metrics-0" podUID="b9c2a3b1-71ed-4612-8cd0-22e396cd622c" containerName="kube-state-metrics" containerID="cri-o://d79bd7c00910cb1ea357df6713d24c89de9eda30118ccacbc0177383e7169dd9" gracePeriod=30 Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.649416 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" podUID="0bfc3458-cc0f-4bea-9794-52c5e81fe055" containerName="rabbitmq" containerID="cri-o://604aea968b2534ec5bcdc7b53dbf247a7fe69e766dfd7c01c1c3821d16d1ac11" gracePeriod=604800 Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.654499 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-bfd5-account-create-update-rvnw4"] Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.660785 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-8d7e-account-create-update-jzmzm"] Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.663015 4558 generic.go:334] "Generic (PLEG): container finished" podID="10edb733-8fab-4543-ad29-3568e3de5aea" containerID="944fc0bf7c16ccd30559b80cdc699372748ca2f14ed832fbb7c9cbf4c5083aed" exitCode=0 Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.663033 4558 generic.go:334] "Generic (PLEG): container finished" podID="10edb733-8fab-4543-ad29-3568e3de5aea" containerID="8e36100266a8d13e7ab5ec75f4795cb2bfafd04327c6000ba4236e64554b079e" exitCode=0 Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.663040 4558 generic.go:334] "Generic (PLEG): container finished" podID="10edb733-8fab-4543-ad29-3568e3de5aea" containerID="064c39cc0ed32246609bc4e80f5c679dc77d71ae4e0e7325c8b80942e4b6cd46" exitCode=0 Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.663048 4558 generic.go:334] "Generic (PLEG): container finished" podID="10edb733-8fab-4543-ad29-3568e3de5aea" containerID="a00e2bc91063a2dabb4c2ce5cfdc3e96683c6b04440f6eeeadc69adf7e0e8a23" exitCode=0 Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.663054 4558 generic.go:334] "Generic (PLEG): container finished" podID="10edb733-8fab-4543-ad29-3568e3de5aea" containerID="1ad6198d74b43bff1f98c2de1da53f4e18e93c848f58a003ec586b574d66eb1f" exitCode=0 Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.663060 4558 generic.go:334] "Generic (PLEG): container finished" podID="10edb733-8fab-4543-ad29-3568e3de5aea" containerID="7da72c8a88027dd1e413e53fd692c99fc03f3e37634323ae66f7e9f55370719e" exitCode=0 Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.663066 4558 generic.go:334] "Generic (PLEG): container finished" podID="10edb733-8fab-4543-ad29-3568e3de5aea" containerID="8f45926ec3655a6641edb5a009493f355b1f6533cdddced2af79c34663d3397c" exitCode=0 Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.663072 4558 generic.go:334] "Generic (PLEG): container finished" podID="10edb733-8fab-4543-ad29-3568e3de5aea" containerID="576e4656d99cd9370e77241bb14cd66a4de3e94deaef7d304253b1816ec67dd5" exitCode=0 Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.663077 4558 generic.go:334] "Generic (PLEG): container finished" podID="10edb733-8fab-4543-ad29-3568e3de5aea" containerID="f2f9c003229a8c4bff074beb609ad6fba9bf0167b989fcddbe4ee674bb6a73bd" exitCode=0 Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.663082 4558 generic.go:334] "Generic (PLEG): container finished" podID="10edb733-8fab-4543-ad29-3568e3de5aea" containerID="6e25c21c1436e8d3c70552c967000697e095b890d75c613e94acbebcf601e385" exitCode=0 Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.663088 4558 generic.go:334] "Generic (PLEG): container finished" podID="10edb733-8fab-4543-ad29-3568e3de5aea" containerID="fc9635ec26b842f65146e97cb678f89fadc8730a998b6cc02a5c959a0601c4bf" exitCode=0 Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.663093 4558 generic.go:334] "Generic (PLEG): container finished" podID="10edb733-8fab-4543-ad29-3568e3de5aea" containerID="103cf0e411f4211e757684612d3274497733c55f59ba7dc63dd3ad2bfec99d18" exitCode=0 Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.663099 4558 generic.go:334] "Generic (PLEG): container finished" podID="10edb733-8fab-4543-ad29-3568e3de5aea" containerID="6d71dae893fc6a1962df504002d772c4cfe40ee0a96ad60259292b2cd410de30" exitCode=0 Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.663104 4558 generic.go:334] "Generic (PLEG): container finished" podID="10edb733-8fab-4543-ad29-3568e3de5aea" containerID="87f7b0a910a29230a9846bbbde9e7c30fd5ead2357d9f18d957764288be50fe1" exitCode=0 Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.663136 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"10edb733-8fab-4543-ad29-3568e3de5aea","Type":"ContainerDied","Data":"944fc0bf7c16ccd30559b80cdc699372748ca2f14ed832fbb7c9cbf4c5083aed"} Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.663154 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"10edb733-8fab-4543-ad29-3568e3de5aea","Type":"ContainerDied","Data":"8e36100266a8d13e7ab5ec75f4795cb2bfafd04327c6000ba4236e64554b079e"} Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.663184 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"10edb733-8fab-4543-ad29-3568e3de5aea","Type":"ContainerDied","Data":"064c39cc0ed32246609bc4e80f5c679dc77d71ae4e0e7325c8b80942e4b6cd46"} Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.663194 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"10edb733-8fab-4543-ad29-3568e3de5aea","Type":"ContainerDied","Data":"a00e2bc91063a2dabb4c2ce5cfdc3e96683c6b04440f6eeeadc69adf7e0e8a23"} Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.663201 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"10edb733-8fab-4543-ad29-3568e3de5aea","Type":"ContainerDied","Data":"1ad6198d74b43bff1f98c2de1da53f4e18e93c848f58a003ec586b574d66eb1f"} Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.663208 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"10edb733-8fab-4543-ad29-3568e3de5aea","Type":"ContainerDied","Data":"7da72c8a88027dd1e413e53fd692c99fc03f3e37634323ae66f7e9f55370719e"} Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.663216 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"10edb733-8fab-4543-ad29-3568e3de5aea","Type":"ContainerDied","Data":"8f45926ec3655a6641edb5a009493f355b1f6533cdddced2af79c34663d3397c"} Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.663223 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"10edb733-8fab-4543-ad29-3568e3de5aea","Type":"ContainerDied","Data":"576e4656d99cd9370e77241bb14cd66a4de3e94deaef7d304253b1816ec67dd5"} Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.663230 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"10edb733-8fab-4543-ad29-3568e3de5aea","Type":"ContainerDied","Data":"f2f9c003229a8c4bff074beb609ad6fba9bf0167b989fcddbe4ee674bb6a73bd"} Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.663238 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"10edb733-8fab-4543-ad29-3568e3de5aea","Type":"ContainerDied","Data":"6e25c21c1436e8d3c70552c967000697e095b890d75c613e94acbebcf601e385"} Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.663245 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"10edb733-8fab-4543-ad29-3568e3de5aea","Type":"ContainerDied","Data":"fc9635ec26b842f65146e97cb678f89fadc8730a998b6cc02a5c959a0601c4bf"} Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.663253 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"10edb733-8fab-4543-ad29-3568e3de5aea","Type":"ContainerDied","Data":"103cf0e411f4211e757684612d3274497733c55f59ba7dc63dd3ad2bfec99d18"} Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.663262 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"10edb733-8fab-4543-ad29-3568e3de5aea","Type":"ContainerDied","Data":"6d71dae893fc6a1962df504002d772c4cfe40ee0a96ad60259292b2cd410de30"} Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.663270 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"10edb733-8fab-4543-ad29-3568e3de5aea","Type":"ContainerDied","Data":"87f7b0a910a29230a9846bbbde9e7c30fd5ead2357d9f18d957764288be50fe1"} Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.670347 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell0-aebd-account-create-update-z79l4"] Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.674479 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-7436-account-create-update-hpxxl"] Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.678094 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/rabbitmq-server-0"] Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.685789 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/keystone-6218-account-create-update-gkxlw"] Jan 20 17:05:45 crc kubenswrapper[4558]: E0120 17:05:45.686123 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2290f4e8-39b3-49d2-8ef6-8fa94c9e3cdf" containerName="ovsdbserver-nb" Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.686135 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="2290f4e8-39b3-49d2-8ef6-8fa94c9e3cdf" containerName="ovsdbserver-nb" Jan 20 17:05:45 crc kubenswrapper[4558]: E0120 17:05:45.686146 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2290f4e8-39b3-49d2-8ef6-8fa94c9e3cdf" containerName="openstack-network-exporter" Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.686153 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="2290f4e8-39b3-49d2-8ef6-8fa94c9e3cdf" containerName="openstack-network-exporter" Jan 20 17:05:45 crc kubenswrapper[4558]: E0120 17:05:45.686185 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="37e84078-defe-4c61-ac89-878165827bba" containerName="openstack-network-exporter" Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.686192 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="37e84078-defe-4c61-ac89-878165827bba" containerName="openstack-network-exporter" Jan 20 17:05:45 crc kubenswrapper[4558]: E0120 17:05:45.686220 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="37e84078-defe-4c61-ac89-878165827bba" containerName="ovsdbserver-sb" Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.686225 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="37e84078-defe-4c61-ac89-878165827bba" containerName="ovsdbserver-sb" Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.686386 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="2290f4e8-39b3-49d2-8ef6-8fa94c9e3cdf" containerName="openstack-network-exporter" Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.686406 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="2290f4e8-39b3-49d2-8ef6-8fa94c9e3cdf" containerName="ovsdbserver-nb" Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.686412 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="37e84078-defe-4c61-ac89-878165827bba" containerName="openstack-network-exporter" Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.686420 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="37e84078-defe-4c61-ac89-878165827bba" containerName="ovsdbserver-sb" Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.691146 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-6218-account-create-update-gkxlw"] Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.691233 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-6218-account-create-update-gkxlw" Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.697894 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-worker-599d6c8df7-lcnrd"] Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.698081 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-worker-599d6c8df7-lcnrd" podUID="bfbf270c-1853-4113-b0e4-6d192abb5c5d" containerName="barbican-worker-log" containerID="cri-o://880f39ca241fd9f400e76ce2cfd51b2dad6e3d3b73759064be5dea602507b366" gracePeriod=30 Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.698472 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-worker-599d6c8df7-lcnrd" podUID="bfbf270c-1853-4113-b0e4-6d192abb5c5d" containerName="barbican-worker" containerID="cri-o://1656c9f8a5d976f0a32142d5398076a41236be2ec81b6e074cfc899864057916" gracePeriod=30 Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.700097 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-db-secret" Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.703837 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-kwtcx"] Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.720282 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovsdbserver-sb-0_37e84078-defe-4c61-ac89-878165827bba/ovsdbserver-sb/0.log" Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.720499 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.720522 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-sb-0" event={"ID":"37e84078-defe-4c61-ac89-878165827bba","Type":"ContainerDied","Data":"27b39e26ce833ccaa98b7846021157d8c65814486b55b98a3db00426ce6a14c4"} Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.720599 4558 scope.go:117] "RemoveContainer" containerID="79b080f284ed7a9adbc1915885043c35d4710d77244162383b4e53aa16558542" Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.733154 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-worker-57fd8b7745-hswtl"] Jan 20 17:05:45 crc kubenswrapper[4558]: E0120 17:05:45.735561 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[combined-ca-bundle], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack-kuttl-tests/barbican-worker-57fd8b7745-hswtl" podUID="0fcabe47-75cd-4bff-ba51-8a65a23b3f1f" Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.737007 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-1b9c-account-create-update-9w5vq" event={"ID":"3ae31408-2eb5-445e-8f66-f0f0a6a7f9f2","Type":"ContainerStarted","Data":"5c77e694bb7361cf18c1aff6a8f9c39181215afe810852c20b37cfeef865d9fe"} Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.746444 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-keystone-listener-65f6b4bfbb-llhdw"] Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.746624 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-keystone-listener-65f6b4bfbb-llhdw" podUID="bc2c7d29-d967-414d-8bb9-1bf70bcacdcd" containerName="barbican-keystone-listener-log" containerID="cri-o://ca91ff4227eff077e8448a3f7e4d144f18ec13ff9ee590460efb5d52dcae5abe" gracePeriod=30 Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.746719 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-keystone-listener-65f6b4bfbb-llhdw" podUID="bc2c7d29-d967-414d-8bb9-1bf70bcacdcd" containerName="barbican-keystone-listener" containerID="cri-o://3a9e47d6638b5d013f9248c5040a1853dbc404d976176e3415eea7e361d8e47b" gracePeriod=30 Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.748948 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/2290f4e8-39b3-49d2-8ef6-8fa94c9e3cdf-ovsdb-rundir\") pod \"2290f4e8-39b3-49d2-8ef6-8fa94c9e3cdf\" (UID: \"2290f4e8-39b3-49d2-8ef6-8fa94c9e3cdf\") " Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.749033 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndbcluster-nb-etc-ovn\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"2290f4e8-39b3-49d2-8ef6-8fa94c9e3cdf\" (UID: \"2290f4e8-39b3-49d2-8ef6-8fa94c9e3cdf\") " Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.749136 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2290f4e8-39b3-49d2-8ef6-8fa94c9e3cdf-scripts\") pod \"2290f4e8-39b3-49d2-8ef6-8fa94c9e3cdf\" (UID: \"2290f4e8-39b3-49d2-8ef6-8fa94c9e3cdf\") " Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.749208 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/2290f4e8-39b3-49d2-8ef6-8fa94c9e3cdf-ovsdbserver-nb-tls-certs\") pod \"2290f4e8-39b3-49d2-8ef6-8fa94c9e3cdf\" (UID: \"2290f4e8-39b3-49d2-8ef6-8fa94c9e3cdf\") " Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.749338 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2290f4e8-39b3-49d2-8ef6-8fa94c9e3cdf-combined-ca-bundle\") pod \"2290f4e8-39b3-49d2-8ef6-8fa94c9e3cdf\" (UID: \"2290f4e8-39b3-49d2-8ef6-8fa94c9e3cdf\") " Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.749406 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2290f4e8-39b3-49d2-8ef6-8fa94c9e3cdf-config\") pod \"2290f4e8-39b3-49d2-8ef6-8fa94c9e3cdf\" (UID: \"2290f4e8-39b3-49d2-8ef6-8fa94c9e3cdf\") " Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.752179 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/2290f4e8-39b3-49d2-8ef6-8fa94c9e3cdf-metrics-certs-tls-certs\") pod \"2290f4e8-39b3-49d2-8ef6-8fa94c9e3cdf\" (UID: \"2290f4e8-39b3-49d2-8ef6-8fa94c9e3cdf\") " Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.752256 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w5d8\" (UniqueName: \"kubernetes.io/projected/2290f4e8-39b3-49d2-8ef6-8fa94c9e3cdf-kube-api-access-2w5d8\") pod \"2290f4e8-39b3-49d2-8ef6-8fa94c9e3cdf\" (UID: \"2290f4e8-39b3-49d2-8ef6-8fa94c9e3cdf\") " Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.759241 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2290f4e8-39b3-49d2-8ef6-8fa94c9e3cdf-kube-api-access-2w5d8" (OuterVolumeSpecName: "kube-api-access-2w5d8") pod "2290f4e8-39b3-49d2-8ef6-8fa94c9e3cdf" (UID: "2290f4e8-39b3-49d2-8ef6-8fa94c9e3cdf"). InnerVolumeSpecName "kube-api-access-2w5d8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.768767 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2290f4e8-39b3-49d2-8ef6-8fa94c9e3cdf-ovsdb-rundir" (OuterVolumeSpecName: "ovsdb-rundir") pod "2290f4e8-39b3-49d2-8ef6-8fa94c9e3cdf" (UID: "2290f4e8-39b3-49d2-8ef6-8fa94c9e3cdf"). InnerVolumeSpecName "ovsdb-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.774345 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/rabbitmq-server-0" podUID="ac55b716-d8fd-4628-8627-f94b5a4e7c78" containerName="rabbitmq" containerID="cri-o://04bd1c8749941bda61752f5a94063fae9cbc9fde43b1430193379fd01f69c3ff" gracePeriod=604800 Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.774702 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage08-crc" (OuterVolumeSpecName: "ovndbcluster-nb-etc-ovn") pod "2290f4e8-39b3-49d2-8ef6-8fa94c9e3cdf" (UID: "2290f4e8-39b3-49d2-8ef6-8fa94c9e3cdf"). InnerVolumeSpecName "local-storage08-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.775194 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2290f4e8-39b3-49d2-8ef6-8fa94c9e3cdf-config" (OuterVolumeSpecName: "config") pod "2290f4e8-39b3-49d2-8ef6-8fa94c9e3cdf" (UID: "2290f4e8-39b3-49d2-8ef6-8fa94c9e3cdf"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.775703 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2290f4e8-39b3-49d2-8ef6-8fa94c9e3cdf-scripts" (OuterVolumeSpecName: "scripts") pod "2290f4e8-39b3-49d2-8ef6-8fa94c9e3cdf" (UID: "2290f4e8-39b3-49d2-8ef6-8fa94c9e3cdf"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.780187 4558 generic.go:334] "Generic (PLEG): container finished" podID="1f9ceb49-977a-47b3-a1f3-10d68c96ab0f" containerID="26bbc4f2aaf805eb72b1daca0bda5b9b7dad475314c7f9cbfdfce43f6fb9bfbc" exitCode=0 Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.780235 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"1f9ceb49-977a-47b3-a1f3-10d68c96ab0f","Type":"ContainerDied","Data":"26bbc4f2aaf805eb72b1daca0bda5b9b7dad475314c7f9cbfdfce43f6fb9bfbc"} Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.786977 4558 generic.go:334] "Generic (PLEG): container finished" podID="a98c5227-c8a8-4cbc-8039-461dd14fbd5b" containerID="bdd46cdb21b22eb84d6fd244e570bc9831757537e8b49ef22aaf635a1aa83768" exitCode=143 Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.787028 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-fbd5dff98-mmhdt" event={"ID":"a98c5227-c8a8-4cbc-8039-461dd14fbd5b","Type":"ContainerDied","Data":"bdd46cdb21b22eb84d6fd244e570bc9831757537e8b49ef22aaf635a1aa83768"} Jan 20 17:05:45 crc kubenswrapper[4558]: E0120 17:05:45.791845 4558 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 20 17:05:45 crc kubenswrapper[4558]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[/bin/sh -c #!/bin/bash Jan 20 17:05:45 crc kubenswrapper[4558]: Jan 20 17:05:45 crc kubenswrapper[4558]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 20 17:05:45 crc kubenswrapper[4558]: Jan 20 17:05:45 crc kubenswrapper[4558]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 20 17:05:45 crc kubenswrapper[4558]: Jan 20 17:05:45 crc kubenswrapper[4558]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 20 17:05:45 crc kubenswrapper[4558]: Jan 20 17:05:45 crc kubenswrapper[4558]: if [ -n "nova_api" ]; then Jan 20 17:05:45 crc kubenswrapper[4558]: GRANT_DATABASE="nova_api" Jan 20 17:05:45 crc kubenswrapper[4558]: else Jan 20 17:05:45 crc kubenswrapper[4558]: GRANT_DATABASE="*" Jan 20 17:05:45 crc kubenswrapper[4558]: fi Jan 20 17:05:45 crc kubenswrapper[4558]: Jan 20 17:05:45 crc kubenswrapper[4558]: # going for maximum compatibility here: Jan 20 17:05:45 crc kubenswrapper[4558]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 20 17:05:45 crc kubenswrapper[4558]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 20 17:05:45 crc kubenswrapper[4558]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 20 17:05:45 crc kubenswrapper[4558]: # support updates Jan 20 17:05:45 crc kubenswrapper[4558]: Jan 20 17:05:45 crc kubenswrapper[4558]: $MYSQL_CMD < logger="UnhandledError" Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.792182 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-api-869df765b4-9hk5q"] Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.792351 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-api-869df765b4-9hk5q" podUID="6c75ee5c-beea-4b1e-b429-91e83a472529" containerName="barbican-api-log" containerID="cri-o://aa3653d2225383bf934da0f655ed12217fcafbbb93b425d7a7cf38609e8256e9" gracePeriod=30 Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.792597 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-api-869df765b4-9hk5q" podUID="6c75ee5c-beea-4b1e-b429-91e83a472529" containerName="barbican-api" containerID="cri-o://be2a0248e5982920b07bb828a104cefcb46f9233f457cf95302aceef0c8d8b6f" gracePeriod=30 Jan 20 17:05:45 crc kubenswrapper[4558]: E0120 17:05:45.792961 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"nova-api-db-secret\\\" not found\"" pod="openstack-kuttl-tests/nova-api-1b9c-account-create-update-9w5vq" podUID="3ae31408-2eb5-445e-8f66-f0f0a6a7f9f2" Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.801224 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-bb2e-account-create-update-gqjcg"] Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.827606 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-keystone-listener-767d5d98bd-zk227"] Jan 20 17:05:45 crc kubenswrapper[4558]: E0120 17:05:45.828933 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[combined-ca-bundle], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack-kuttl-tests/barbican-keystone-listener-767d5d98bd-zk227" podUID="ef319bad-004a-4140-b9a3-e34b376460da" Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.830066 4558 generic.go:334] "Generic (PLEG): container finished" podID="2e9a9b09-70ea-4d56-87ac-5a2c3461d2df" containerID="14b34f3f512d1c97cde26d0f645f8f50baa327877aab8009b5d6e09f075e96b7" exitCode=137 Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.839652 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2290f4e8-39b3-49d2-8ef6-8fa94c9e3cdf-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2290f4e8-39b3-49d2-8ef6-8fa94c9e3cdf" (UID: "2290f4e8-39b3-49d2-8ef6-8fa94c9e3cdf"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.843594 4558 generic.go:334] "Generic (PLEG): container finished" podID="1fccb241-c75e-4cec-b3c6-3855bd6c1161" containerID="e6a99988097b2873c0b2ced77f6588f66ac0fb5bb22b3ef8e1e7a52ca42e4820" exitCode=143 Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.843655 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"1fccb241-c75e-4cec-b3c6-3855bd6c1161","Type":"ContainerDied","Data":"e6a99988097b2873c0b2ced77f6588f66ac0fb5bb22b3ef8e1e7a52ca42e4820"} Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.850914 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-7436-account-create-update-hpxxl" event={"ID":"1fbb53e2-e2a2-4d62-a392-027b1f3f3232","Type":"ContainerStarted","Data":"c17bc22e927e22aa6bd3a92db42892dbac2eeb0a0b9e08b80efa5aea443f1e36"} Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.853837 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-42zjp\" (UniqueName: \"kubernetes.io/projected/f6dee0f3-6849-442d-b8fc-417c8540c9f9-kube-api-access-42zjp\") pod \"keystone-6218-account-create-update-gkxlw\" (UID: \"f6dee0f3-6849-442d-b8fc-417c8540c9f9\") " pod="openstack-kuttl-tests/keystone-6218-account-create-update-gkxlw" Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.853893 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f6dee0f3-6849-442d-b8fc-417c8540c9f9-operator-scripts\") pod \"keystone-6218-account-create-update-gkxlw\" (UID: \"f6dee0f3-6849-442d-b8fc-417c8540c9f9\") " pod="openstack-kuttl-tests/keystone-6218-account-create-update-gkxlw" Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.853932 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2290f4e8-39b3-49d2-8ef6-8fa94c9e3cdf-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.853942 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2290f4e8-39b3-49d2-8ef6-8fa94c9e3cdf-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.853959 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w5d8\" (UniqueName: \"kubernetes.io/projected/2290f4e8-39b3-49d2-8ef6-8fa94c9e3cdf-kube-api-access-2w5d8\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.853970 4558 reconciler_common.go:293] "Volume detached for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/2290f4e8-39b3-49d2-8ef6-8fa94c9e3cdf-ovsdb-rundir\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.853986 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" " Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.853994 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2290f4e8-39b3-49d2-8ef6-8fa94c9e3cdf-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:45 crc kubenswrapper[4558]: E0120 17:05:45.854840 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Jan 20 17:05:45 crc kubenswrapper[4558]: E0120 17:05:45.854885 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/ac55b716-d8fd-4628-8627-f94b5a4e7c78-config-data podName:ac55b716-d8fd-4628-8627-f94b5a4e7c78 nodeName:}" failed. No retries permitted until 2026-01-20 17:05:47.854871945 +0000 UTC m=+1441.615209912 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/ac55b716-d8fd-4628-8627-f94b5a4e7c78-config-data") pod "rabbitmq-server-0" (UID: "ac55b716-d8fd-4628-8627-f94b5a4e7c78") : configmap "rabbitmq-config-data" not found Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.863558 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstackclient" Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.878922 4558 scope.go:117] "RemoveContainer" containerID="b7a6ed7425377191f27273a9edf53dbd7c7ed2de00fbc9623792e4bfc9ffed21" Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.883370 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-6218-account-create-update-gbcj6"] Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.888115 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage08-crc" (UniqueName: "kubernetes.io/local-volume/local-storage08-crc") on node "crc" Jan 20 17:05:45 crc kubenswrapper[4558]: E0120 17:05:45.894674 4558 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 20 17:05:45 crc kubenswrapper[4558]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[/bin/sh -c #!/bin/bash Jan 20 17:05:45 crc kubenswrapper[4558]: Jan 20 17:05:45 crc kubenswrapper[4558]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 20 17:05:45 crc kubenswrapper[4558]: Jan 20 17:05:45 crc kubenswrapper[4558]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 20 17:05:45 crc kubenswrapper[4558]: Jan 20 17:05:45 crc kubenswrapper[4558]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 20 17:05:45 crc kubenswrapper[4558]: Jan 20 17:05:45 crc kubenswrapper[4558]: if [ -n "neutron" ]; then Jan 20 17:05:45 crc kubenswrapper[4558]: GRANT_DATABASE="neutron" Jan 20 17:05:45 crc kubenswrapper[4558]: else Jan 20 17:05:45 crc kubenswrapper[4558]: GRANT_DATABASE="*" Jan 20 17:05:45 crc kubenswrapper[4558]: fi Jan 20 17:05:45 crc kubenswrapper[4558]: Jan 20 17:05:45 crc kubenswrapper[4558]: # going for maximum compatibility here: Jan 20 17:05:45 crc kubenswrapper[4558]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 20 17:05:45 crc kubenswrapper[4558]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 20 17:05:45 crc kubenswrapper[4558]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 20 17:05:45 crc kubenswrapper[4558]: # support updates Jan 20 17:05:45 crc kubenswrapper[4558]: Jan 20 17:05:45 crc kubenswrapper[4558]: $MYSQL_CMD < logger="UnhandledError" Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.895291 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-db-sync-7d6hd"] Jan 20 17:05:45 crc kubenswrapper[4558]: E0120 17:05:45.898188 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"neutron-db-secret\\\" not found\"" pod="openstack-kuttl-tests/neutron-7436-account-create-update-hpxxl" podUID="1fbb53e2-e2a2-4d62-a392-027b1f3f3232" Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.898736 4558 generic.go:334] "Generic (PLEG): container finished" podID="2ba42f6a-1ade-45ae-bdc4-117ad2fa866b" containerID="a818a123ac4745b3c52a6e67fb8633fbe730b34a827cef46353c35fb329810d7" exitCode=143 Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.898820 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"2ba42f6a-1ade-45ae-bdc4-117ad2fa866b","Type":"ContainerDied","Data":"a818a123ac4745b3c52a6e67fb8633fbe730b34a827cef46353c35fb329810d7"} Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.924977 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-bb2e-account-create-update-gqjcg" event={"ID":"d4457629-a457-4a4c-b285-eab441f8d319","Type":"ContainerStarted","Data":"2a36753532c302eafe94010be58608974d3d3bc8a21d5cee36ed28552862db4d"} Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.929250 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/keystone-6218-account-create-update-gbcj6"] Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.942575 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-kwtcx" event={"ID":"25a5f425-926a-40eb-8f77-fcc3cdd9880c","Type":"ContainerStarted","Data":"a4a807d9647e363790dd8d22220e3e3eba75c497a5707d099399fbee0d254fe8"} Jan 20 17:05:45 crc kubenswrapper[4558]: E0120 17:05:45.943870 4558 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 20 17:05:45 crc kubenswrapper[4558]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[/bin/sh -c #!/bin/bash Jan 20 17:05:45 crc kubenswrapper[4558]: Jan 20 17:05:45 crc kubenswrapper[4558]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 20 17:05:45 crc kubenswrapper[4558]: Jan 20 17:05:45 crc kubenswrapper[4558]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 20 17:05:45 crc kubenswrapper[4558]: Jan 20 17:05:45 crc kubenswrapper[4558]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 20 17:05:45 crc kubenswrapper[4558]: Jan 20 17:05:45 crc kubenswrapper[4558]: if [ -n "barbican" ]; then Jan 20 17:05:45 crc kubenswrapper[4558]: GRANT_DATABASE="barbican" Jan 20 17:05:45 crc kubenswrapper[4558]: else Jan 20 17:05:45 crc kubenswrapper[4558]: GRANT_DATABASE="*" Jan 20 17:05:45 crc kubenswrapper[4558]: fi Jan 20 17:05:45 crc kubenswrapper[4558]: Jan 20 17:05:45 crc kubenswrapper[4558]: # going for maximum compatibility here: Jan 20 17:05:45 crc kubenswrapper[4558]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 20 17:05:45 crc kubenswrapper[4558]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 20 17:05:45 crc kubenswrapper[4558]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 20 17:05:45 crc kubenswrapper[4558]: # support updates Jan 20 17:05:45 crc kubenswrapper[4558]: Jan 20 17:05:45 crc kubenswrapper[4558]: $MYSQL_CMD < logger="UnhandledError" Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.947073 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-db-sync-7d6hd"] Jan 20 17:05:45 crc kubenswrapper[4558]: E0120 17:05:45.947582 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"barbican-db-secret\\\" not found\"" pod="openstack-kuttl-tests/barbican-bb2e-account-create-update-gqjcg" podUID="d4457629-a457-4a4c-b285-eab441f8d319" Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.963531 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/2e9a9b09-70ea-4d56-87ac-5a2c3461d2df-openstack-config\") pod \"2e9a9b09-70ea-4d56-87ac-5a2c3461d2df\" (UID: \"2e9a9b09-70ea-4d56-87ac-5a2c3461d2df\") " Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.963609 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-67zld\" (UniqueName: \"kubernetes.io/projected/2e9a9b09-70ea-4d56-87ac-5a2c3461d2df-kube-api-access-67zld\") pod \"2e9a9b09-70ea-4d56-87ac-5a2c3461d2df\" (UID: \"2e9a9b09-70ea-4d56-87ac-5a2c3461d2df\") " Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.963735 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/2e9a9b09-70ea-4d56-87ac-5a2c3461d2df-openstack-config-secret\") pod \"2e9a9b09-70ea-4d56-87ac-5a2c3461d2df\" (UID: \"2e9a9b09-70ea-4d56-87ac-5a2c3461d2df\") " Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.963853 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2e9a9b09-70ea-4d56-87ac-5a2c3461d2df-combined-ca-bundle\") pod \"2e9a9b09-70ea-4d56-87ac-5a2c3461d2df\" (UID: \"2e9a9b09-70ea-4d56-87ac-5a2c3461d2df\") " Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.964071 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f6dee0f3-6849-442d-b8fc-417c8540c9f9-operator-scripts\") pod \"keystone-6218-account-create-update-gkxlw\" (UID: \"f6dee0f3-6849-442d-b8fc-417c8540c9f9\") " pod="openstack-kuttl-tests/keystone-6218-account-create-update-gkxlw" Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.964252 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-42zjp\" (UniqueName: \"kubernetes.io/projected/f6dee0f3-6849-442d-b8fc-417c8540c9f9-kube-api-access-42zjp\") pod \"keystone-6218-account-create-update-gkxlw\" (UID: \"f6dee0f3-6849-442d-b8fc-417c8540c9f9\") " pod="openstack-kuttl-tests/keystone-6218-account-create-update-gkxlw" Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.964313 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:45 crc kubenswrapper[4558]: E0120 17:05:45.967452 4558 projected.go:194] Error preparing data for projected volume kube-api-access-42zjp for pod openstack-kuttl-tests/keystone-6218-account-create-update-gkxlw: failed to fetch token: serviceaccounts "galera-openstack" not found Jan 20 17:05:45 crc kubenswrapper[4558]: E0120 17:05:45.967505 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/f6dee0f3-6849-442d-b8fc-417c8540c9f9-kube-api-access-42zjp podName:f6dee0f3-6849-442d-b8fc-417c8540c9f9 nodeName:}" failed. No retries permitted until 2026-01-20 17:05:46.467489026 +0000 UTC m=+1440.227826992 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-42zjp" (UniqueName: "kubernetes.io/projected/f6dee0f3-6849-442d-b8fc-417c8540c9f9-kube-api-access-42zjp") pod "keystone-6218-account-create-update-gkxlw" (UID: "f6dee0f3-6849-442d-b8fc-417c8540c9f9") : failed to fetch token: serviceaccounts "galera-openstack" not found Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.967772 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-8d7e-account-create-update-jzmzm" event={"ID":"43e20233-57f1-4b80-8717-1668dee2a884","Type":"ContainerStarted","Data":"4ab5b7132c0a8b17725a8ac7b1101d96ed6ee666eac819a6a5f4700277179686"} Jan 20 17:05:45 crc kubenswrapper[4558]: E0120 17:05:45.968139 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/openstack-scripts: configmap "openstack-scripts" not found Jan 20 17:05:45 crc kubenswrapper[4558]: E0120 17:05:45.968204 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/f6dee0f3-6849-442d-b8fc-417c8540c9f9-operator-scripts podName:f6dee0f3-6849-442d-b8fc-417c8540c9f9 nodeName:}" failed. No retries permitted until 2026-01-20 17:05:46.46815608 +0000 UTC m=+1440.228494046 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/f6dee0f3-6849-442d-b8fc-417c8540c9f9-operator-scripts") pod "keystone-6218-account-create-update-gkxlw" (UID: "f6dee0f3-6849-442d-b8fc-417c8540c9f9") : configmap "openstack-scripts" not found Jan 20 17:05:45 crc kubenswrapper[4558]: E0120 17:05:45.968734 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/openstack-scripts: configmap "openstack-scripts" not found Jan 20 17:05:45 crc kubenswrapper[4558]: E0120 17:05:45.968757 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/1fbb53e2-e2a2-4d62-a392-027b1f3f3232-operator-scripts podName:1fbb53e2-e2a2-4d62-a392-027b1f3f3232 nodeName:}" failed. No retries permitted until 2026-01-20 17:05:46.468750016 +0000 UTC m=+1440.229087973 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/1fbb53e2-e2a2-4d62-a392-027b1f3f3232-operator-scripts") pod "neutron-7436-account-create-update-hpxxl" (UID: "1fbb53e2-e2a2-4d62-a392-027b1f3f3232") : configmap "openstack-scripts" not found Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.978772 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2e9a9b09-70ea-4d56-87ac-5a2c3461d2df-kube-api-access-67zld" (OuterVolumeSpecName: "kube-api-access-67zld") pod "2e9a9b09-70ea-4d56-87ac-5a2c3461d2df" (UID: "2e9a9b09-70ea-4d56-87ac-5a2c3461d2df"). InnerVolumeSpecName "kube-api-access-67zld". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.981993 4558 generic.go:334] "Generic (PLEG): container finished" podID="19189ead-bcd7-4806-be88-43cc27d5f202" containerID="e7f17901adbdb0ade1bb599a686da509a4f272513ee6686f67a6c29495fa2a19" exitCode=1 Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.982042 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/root-account-create-update-vcf64" event={"ID":"19189ead-bcd7-4806-be88-43cc27d5f202","Type":"ContainerDied","Data":"e7f17901adbdb0ade1bb599a686da509a4f272513ee6686f67a6c29495fa2a19"} Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.982517 4558 scope.go:117] "RemoveContainer" containerID="e7f17901adbdb0ade1bb599a686da509a4f272513ee6686f67a6c29495fa2a19" Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.994460 4558 generic.go:334] "Generic (PLEG): container finished" podID="fa011f19-1245-4ae8-ae2b-8773036498b3" containerID="a508351390349a4d085a4b101af8917b6baf005aac6d69c3b56e8bb73832583f" exitCode=0 Jan 20 17:05:45 crc kubenswrapper[4558]: I0120 17:05:45.994509 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-7fdccbb69d-fng8p" event={"ID":"fa011f19-1245-4ae8-ae2b-8773036498b3","Type":"ContainerDied","Data":"a508351390349a4d085a4b101af8917b6baf005aac6d69c3b56e8bb73832583f"} Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.007011 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-0"] Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.007208 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-cell1-conductor-0" podUID="084a5ce8-2844-42f1-92a9-973b78505050" containerName="nova-cell1-conductor-conductor" containerID="cri-o://16d11c7e8c68131f8eb72146637f28a05ad570c65546debfb217fa0b00e58dae" gracePeriod=30 Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.007373 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2290f4e8-39b3-49d2-8ef6-8fa94c9e3cdf-ovsdbserver-nb-tls-certs" (OuterVolumeSpecName: "ovsdbserver-nb-tls-certs") pod "2290f4e8-39b3-49d2-8ef6-8fa94c9e3cdf" (UID: "2290f4e8-39b3-49d2-8ef6-8fa94c9e3cdf"). InnerVolumeSpecName "ovsdbserver-nb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.007478 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2e9a9b09-70ea-4d56-87ac-5a2c3461d2df-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "2e9a9b09-70ea-4d56-87ac-5a2c3461d2df" (UID: "2e9a9b09-70ea-4d56-87ac-5a2c3461d2df"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.011987 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2290f4e8-39b3-49d2-8ef6-8fa94c9e3cdf-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "2290f4e8-39b3-49d2-8ef6-8fa94c9e3cdf" (UID: "2290f4e8-39b3-49d2-8ef6-8fa94c9e3cdf"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.013850 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-db-sync-xwwl5"] Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.016763 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-103a-account-create-update-whkzm" event={"ID":"f6a8779d-bb69-43c5-96d1-a7669b5dd9ea","Type":"ContainerStarted","Data":"8a63e3123e7318fb99c1d664774b7d9155d07515aaac65a7c132ef2027029ad8"} Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.017318 4558 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="openstack-kuttl-tests/glance-103a-account-create-update-whkzm" secret="" err="secret \"galera-openstack-dockercfg-xxw26\" not found" Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.017709 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-db-sync-xwwl5"] Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.027405 4558 generic.go:334] "Generic (PLEG): container finished" podID="20ba561a-58e7-459f-ba28-ed0b68cdab9b" containerID="225fabc2e274bc055279016bab31443db7f376181931205b6688497f431e61fd" exitCode=143 Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.027478 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-0"] Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.027501 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"20ba561a-58e7-459f-ba28-ed0b68cdab9b","Type":"ContainerDied","Data":"225fabc2e274bc055279016bab31443db7f376181931205b6688497f431e61fd"} Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.027625 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-cell0-conductor-0" podUID="55d5d50d-edc1-4a99-9540-72a7f5f0c622" containerName="nova-cell0-conductor-conductor" containerID="cri-o://e3a0dcbc1a9d518fbc9c91fd7af2a3a13be64f9278f06d3e4672d909735d4773" gracePeriod=30 Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.039487 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-sb-0"] Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.040023 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2e9a9b09-70ea-4d56-87ac-5a2c3461d2df-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2e9a9b09-70ea-4d56-87ac-5a2c3461d2df" (UID: "2e9a9b09-70ea-4d56-87ac-5a2c3461d2df"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:05:46 crc kubenswrapper[4558]: E0120 17:05:46.042859 4558 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 20 17:05:46 crc kubenswrapper[4558]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[/bin/sh -c #!/bin/bash Jan 20 17:05:46 crc kubenswrapper[4558]: Jan 20 17:05:46 crc kubenswrapper[4558]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 20 17:05:46 crc kubenswrapper[4558]: Jan 20 17:05:46 crc kubenswrapper[4558]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 20 17:05:46 crc kubenswrapper[4558]: Jan 20 17:05:46 crc kubenswrapper[4558]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 20 17:05:46 crc kubenswrapper[4558]: Jan 20 17:05:46 crc kubenswrapper[4558]: if [ -n "glance" ]; then Jan 20 17:05:46 crc kubenswrapper[4558]: GRANT_DATABASE="glance" Jan 20 17:05:46 crc kubenswrapper[4558]: else Jan 20 17:05:46 crc kubenswrapper[4558]: GRANT_DATABASE="*" Jan 20 17:05:46 crc kubenswrapper[4558]: fi Jan 20 17:05:46 crc kubenswrapper[4558]: Jan 20 17:05:46 crc kubenswrapper[4558]: # going for maximum compatibility here: Jan 20 17:05:46 crc kubenswrapper[4558]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 20 17:05:46 crc kubenswrapper[4558]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 20 17:05:46 crc kubenswrapper[4558]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 20 17:05:46 crc kubenswrapper[4558]: # support updates Jan 20 17:05:46 crc kubenswrapper[4558]: Jan 20 17:05:46 crc kubenswrapper[4558]: $MYSQL_CMD < logger="UnhandledError" Jan 20 17:05:46 crc kubenswrapper[4558]: E0120 17:05:46.043981 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"glance-db-secret\\\" not found\"" pod="openstack-kuttl-tests/glance-103a-account-create-update-whkzm" podUID="f6a8779d-bb69-43c5-96d1-a7669b5dd9ea" Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.046364 4558 generic.go:334] "Generic (PLEG): container finished" podID="32a3bcd7-25d6-45f5-8ce6-66949357504c" containerID="b2c58a9c1693c20653debec2694a95f53b8b1e5e7675f997990f41753ef39a46" exitCode=0 Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.046388 4558 generic.go:334] "Generic (PLEG): container finished" podID="32a3bcd7-25d6-45f5-8ce6-66949357504c" containerID="1e67af77bc964f499418d3c854f0b6163e2f81ff3e6521a66f14aa8ee4590e1c" exitCode=0 Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.046424 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-proxy-595b48fd98-kbjv6" event={"ID":"32a3bcd7-25d6-45f5-8ce6-66949357504c","Type":"ContainerDied","Data":"b2c58a9c1693c20653debec2694a95f53b8b1e5e7675f997990f41753ef39a46"} Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.046444 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-proxy-595b48fd98-kbjv6" event={"ID":"32a3bcd7-25d6-45f5-8ce6-66949357504c","Type":"ContainerDied","Data":"1e67af77bc964f499418d3c854f0b6163e2f81ff3e6521a66f14aa8ee4590e1c"} Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.050522 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/memcached-0"] Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.050667 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/memcached-0" podUID="aed5856e-9412-48a1-ac94-cc2f4bd05633" containerName="memcached" containerID="cri-o://8029c61f17dcdbc8fe0413af1d186a7d097951d5e9f7472abca626983580e5f9" gracePeriod=30 Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.057586 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-sb-0"] Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.059611 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2e9a9b09-70ea-4d56-87ac-5a2c3461d2df-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "2e9a9b09-70ea-4d56-87ac-5a2c3461d2df" (UID: "2e9a9b09-70ea-4d56-87ac-5a2c3461d2df"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.062707 4558 generic.go:334] "Generic (PLEG): container finished" podID="c7187d14-f25b-4344-bf36-7d56d8e1b79c" containerID="c6607b53445fad5655dbb034a5a76d6a87a2e8a96bd8d11ed9d596a7971d2d55" exitCode=0 Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.062730 4558 generic.go:334] "Generic (PLEG): container finished" podID="c7187d14-f25b-4344-bf36-7d56d8e1b79c" containerID="2af5466604135693231e3743b389224d86d75c0a2d399c8a0f6df2491983b42f" exitCode=2 Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.062739 4558 generic.go:334] "Generic (PLEG): container finished" podID="c7187d14-f25b-4344-bf36-7d56d8e1b79c" containerID="97f14c8dcf0f7e744564168d9a5f79f08919d2ff3285eef66b5001b20e3c69d7" exitCode=0 Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.062777 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"c7187d14-f25b-4344-bf36-7d56d8e1b79c","Type":"ContainerDied","Data":"c6607b53445fad5655dbb034a5a76d6a87a2e8a96bd8d11ed9d596a7971d2d55"} Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.062798 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"c7187d14-f25b-4344-bf36-7d56d8e1b79c","Type":"ContainerDied","Data":"2af5466604135693231e3743b389224d86d75c0a2d399c8a0f6df2491983b42f"} Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.062806 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"c7187d14-f25b-4344-bf36-7d56d8e1b79c","Type":"ContainerDied","Data":"97f14c8dcf0f7e744564168d9a5f79f08919d2ff3285eef66b5001b20e3c69d7"} Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.065759 4558 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/2290f4e8-39b3-49d2-8ef6-8fa94c9e3cdf-ovsdbserver-nb-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.065782 4558 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/2e9a9b09-70ea-4d56-87ac-5a2c3461d2df-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.065791 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2e9a9b09-70ea-4d56-87ac-5a2c3461d2df-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.065802 4558 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/2290f4e8-39b3-49d2-8ef6-8fa94c9e3cdf-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.065810 4558 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/2e9a9b09-70ea-4d56-87ac-5a2c3461d2df-openstack-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.065819 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-67zld\" (UniqueName: \"kubernetes.io/projected/2e9a9b09-70ea-4d56-87ac-5a2c3461d2df-kube-api-access-67zld\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:46 crc kubenswrapper[4558]: E0120 17:05:46.065887 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/openstack-scripts: configmap "openstack-scripts" not found Jan 20 17:05:46 crc kubenswrapper[4558]: E0120 17:05:46.065920 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/d4457629-a457-4a4c-b285-eab441f8d319-operator-scripts podName:d4457629-a457-4a4c-b285-eab441f8d319 nodeName:}" failed. No retries permitted until 2026-01-20 17:05:46.565907486 +0000 UTC m=+1440.326245453 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/d4457629-a457-4a4c-b285-eab441f8d319-operator-scripts") pod "barbican-bb2e-account-create-update-gqjcg" (UID: "d4457629-a457-4a4c-b285-eab441f8d319") : configmap "openstack-scripts" not found Jan 20 17:05:46 crc kubenswrapper[4558]: E0120 17:05:46.066072 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/openstack-scripts: configmap "openstack-scripts" not found Jan 20 17:05:46 crc kubenswrapper[4558]: E0120 17:05:46.066100 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/19189ead-bcd7-4806-be88-43cc27d5f202-operator-scripts podName:19189ead-bcd7-4806-be88-43cc27d5f202 nodeName:}" failed. No retries permitted until 2026-01-20 17:05:46.566092815 +0000 UTC m=+1440.326430782 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/19189ead-bcd7-4806-be88-43cc27d5f202-operator-scripts") pod "root-account-create-update-vcf64" (UID: "19189ead-bcd7-4806-be88-43cc27d5f202") : configmap "openstack-scripts" not found Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.072114 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-bfd5-account-create-update-rvnw4" event={"ID":"79c1324d-aa7f-4add-a2c3-12f6d4c6216f","Type":"ContainerStarted","Data":"2dc7324ebd2373169b2bc30a14b997917f8e31a9070dd2b5e660a188293bcf87"} Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.074880 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-bootstrap-xq579"] Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.079213 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/keystone-bootstrap-xq579"] Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.082607 4558 generic.go:334] "Generic (PLEG): container finished" podID="51e263b2-d42e-46fa-99e1-e0c5aa23bcf5" containerID="762c3b53bff8276c53ff1ba0f6fcbbc8ea3579b91262bc868d821bf5b7740f32" exitCode=143 Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.082664 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"51e263b2-d42e-46fa-99e1-e0c5aa23bcf5","Type":"ContainerDied","Data":"762c3b53bff8276c53ff1ba0f6fcbbc8ea3579b91262bc868d821bf5b7740f32"} Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.088032 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-db-sync-9pt6z"] Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.091338 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/keystone-db-sync-9pt6z"] Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.091775 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" podUID="1176545e-611b-4fc7-8b03-e91ee7813fd3" containerName="nova-cell1-novncproxy-novncproxy" probeResult="failure" output="Get \"https://10.217.1.27:6080/vnc_lite.html\": dial tcp 10.217.1.27:6080: connect: connection refused" Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.097954 4558 generic.go:334] "Generic (PLEG): container finished" podID="aa387033-dc08-48f4-bf56-06a7f316423c" containerID="cb8be596c569251416c1e084cf32b390ad320911bfbd93edd669cf0bc7d40234" exitCode=143 Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.098046 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"aa387033-dc08-48f4-bf56-06a7f316423c","Type":"ContainerDied","Data":"cb8be596c569251416c1e084cf32b390ad320911bfbd93edd669cf0bc7d40234"} Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.099224 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-6218-account-create-update-gkxlw"] Jan 20 17:05:46 crc kubenswrapper[4558]: E0120 17:05:46.099893 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[kube-api-access-42zjp operator-scripts], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack-kuttl-tests/keystone-6218-account-create-update-gkxlw" podUID="f6dee0f3-6849-442d-b8fc-417c8540c9f9" Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.119396 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovsdbserver-nb-0_2290f4e8-39b3-49d2-8ef6-8fa94c9e3cdf/ovsdbserver-nb/0.log" Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.119560 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-nb-0" event={"ID":"2290f4e8-39b3-49d2-8ef6-8fa94c9e3cdf","Type":"ContainerDied","Data":"0cde45e90d60dd6a806a5f8b43101fdb431dbc690d894e7c9e5ad65d02015fdc"} Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.119701 4558 scope.go:117] "RemoveContainer" containerID="6b320eb22df0758cdab9680050c01fd3a7007ed6383b4dc60f038863e7bdaedc" Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.120311 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.121734 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-proxy-595b48fd98-kbjv6" Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.150199 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/openstack-galera-0"] Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.166428 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-db-create-pxkff"] Jan 20 17:05:46 crc kubenswrapper[4558]: E0120 17:05:46.168248 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/openstack-scripts: configmap "openstack-scripts" not found Jan 20 17:05:46 crc kubenswrapper[4558]: E0120 17:05:46.168323 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/f6a8779d-bb69-43c5-96d1-a7669b5dd9ea-operator-scripts podName:f6a8779d-bb69-43c5-96d1-a7669b5dd9ea nodeName:}" failed. No retries permitted until 2026-01-20 17:05:46.668293018 +0000 UTC m=+1440.428630985 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/f6a8779d-bb69-43c5-96d1-a7669b5dd9ea-operator-scripts") pod "glance-103a-account-create-update-whkzm" (UID: "f6a8779d-bb69-43c5-96d1-a7669b5dd9ea") : configmap "openstack-scripts" not found Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.176078 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/keystone-db-create-pxkff"] Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.183900 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-65d75d5fbb-nkccr"] Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.184115 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/keystone-65d75d5fbb-nkccr" podUID="8b9f5a8b-ee37-483c-9a86-38cc24dcb388" containerName="keystone-api" containerID="cri-o://4b73519a38610ac22d4ce96ba40226baebd8e6660f77f9013e57a5c57815f3a9" gracePeriod=30 Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.201817 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-vcf64"] Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.208260 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/rabbitmq-server-0" podUID="ac55b716-d8fd-4628-8627-f94b5a4e7c78" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.202:5671: connect: connection refused" Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.252503 4558 scope.go:117] "RemoveContainer" containerID="a7862609eb045b31d1cf0e6477c1e8599625e319c33baf93c6ef05261a3fad12" Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.271223 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/32a3bcd7-25d6-45f5-8ce6-66949357504c-run-httpd\") pod \"32a3bcd7-25d6-45f5-8ce6-66949357504c\" (UID: \"32a3bcd7-25d6-45f5-8ce6-66949357504c\") " Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.271291 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/32a3bcd7-25d6-45f5-8ce6-66949357504c-log-httpd\") pod \"32a3bcd7-25d6-45f5-8ce6-66949357504c\" (UID: \"32a3bcd7-25d6-45f5-8ce6-66949357504c\") " Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.271501 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wbsp4\" (UniqueName: \"kubernetes.io/projected/32a3bcd7-25d6-45f5-8ce6-66949357504c-kube-api-access-wbsp4\") pod \"32a3bcd7-25d6-45f5-8ce6-66949357504c\" (UID: \"32a3bcd7-25d6-45f5-8ce6-66949357504c\") " Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.271537 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/32a3bcd7-25d6-45f5-8ce6-66949357504c-config-data\") pod \"32a3bcd7-25d6-45f5-8ce6-66949357504c\" (UID: \"32a3bcd7-25d6-45f5-8ce6-66949357504c\") " Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.271574 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/32a3bcd7-25d6-45f5-8ce6-66949357504c-internal-tls-certs\") pod \"32a3bcd7-25d6-45f5-8ce6-66949357504c\" (UID: \"32a3bcd7-25d6-45f5-8ce6-66949357504c\") " Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.271601 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/32a3bcd7-25d6-45f5-8ce6-66949357504c-etc-swift\") pod \"32a3bcd7-25d6-45f5-8ce6-66949357504c\" (UID: \"32a3bcd7-25d6-45f5-8ce6-66949357504c\") " Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.271677 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/32a3bcd7-25d6-45f5-8ce6-66949357504c-combined-ca-bundle\") pod \"32a3bcd7-25d6-45f5-8ce6-66949357504c\" (UID: \"32a3bcd7-25d6-45f5-8ce6-66949357504c\") " Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.271744 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/32a3bcd7-25d6-45f5-8ce6-66949357504c-public-tls-certs\") pod \"32a3bcd7-25d6-45f5-8ce6-66949357504c\" (UID: \"32a3bcd7-25d6-45f5-8ce6-66949357504c\") " Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.274654 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/32a3bcd7-25d6-45f5-8ce6-66949357504c-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "32a3bcd7-25d6-45f5-8ce6-66949357504c" (UID: "32a3bcd7-25d6-45f5-8ce6-66949357504c"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.285688 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/32a3bcd7-25d6-45f5-8ce6-66949357504c-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "32a3bcd7-25d6-45f5-8ce6-66949357504c" (UID: "32a3bcd7-25d6-45f5-8ce6-66949357504c"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.308699 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/32a3bcd7-25d6-45f5-8ce6-66949357504c-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "32a3bcd7-25d6-45f5-8ce6-66949357504c" (UID: "32a3bcd7-25d6-45f5-8ce6-66949357504c"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.326533 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/32a3bcd7-25d6-45f5-8ce6-66949357504c-kube-api-access-wbsp4" (OuterVolumeSpecName: "kube-api-access-wbsp4") pod "32a3bcd7-25d6-45f5-8ce6-66949357504c" (UID: "32a3bcd7-25d6-45f5-8ce6-66949357504c"). InnerVolumeSpecName "kube-api-access-wbsp4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.375968 4558 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/32a3bcd7-25d6-45f5-8ce6-66949357504c-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.375987 4558 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/32a3bcd7-25d6-45f5-8ce6-66949357504c-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.375996 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wbsp4\" (UniqueName: \"kubernetes.io/projected/32a3bcd7-25d6-45f5-8ce6-66949357504c-kube-api-access-wbsp4\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.376006 4558 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/32a3bcd7-25d6-45f5-8ce6-66949357504c-etc-swift\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.392537 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/openstack-galera-0" podUID="ad58cc97-cbed-48c8-ab51-ebb920a1454c" containerName="galera" containerID="cri-o://5f0d33357c659b175318f67f48f0766deb32c17bae3e330e4f6f14c73b8bc529" gracePeriod=30 Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.409247 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/32a3bcd7-25d6-45f5-8ce6-66949357504c-config-data" (OuterVolumeSpecName: "config-data") pod "32a3bcd7-25d6-45f5-8ce6-66949357504c" (UID: "32a3bcd7-25d6-45f5-8ce6-66949357504c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.411173 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/32a3bcd7-25d6-45f5-8ce6-66949357504c-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "32a3bcd7-25d6-45f5-8ce6-66949357504c" (UID: "32a3bcd7-25d6-45f5-8ce6-66949357504c"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.430260 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/32a3bcd7-25d6-45f5-8ce6-66949357504c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "32a3bcd7-25d6-45f5-8ce6-66949357504c" (UID: "32a3bcd7-25d6-45f5-8ce6-66949357504c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.433824 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/32a3bcd7-25d6-45f5-8ce6-66949357504c-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "32a3bcd7-25d6-45f5-8ce6-66949357504c" (UID: "32a3bcd7-25d6-45f5-8ce6-66949357504c"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.490757 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-42zjp\" (UniqueName: \"kubernetes.io/projected/f6dee0f3-6849-442d-b8fc-417c8540c9f9-kube-api-access-42zjp\") pod \"keystone-6218-account-create-update-gkxlw\" (UID: \"f6dee0f3-6849-442d-b8fc-417c8540c9f9\") " pod="openstack-kuttl-tests/keystone-6218-account-create-update-gkxlw" Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.490878 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f6dee0f3-6849-442d-b8fc-417c8540c9f9-operator-scripts\") pod \"keystone-6218-account-create-update-gkxlw\" (UID: \"f6dee0f3-6849-442d-b8fc-417c8540c9f9\") " pod="openstack-kuttl-tests/keystone-6218-account-create-update-gkxlw" Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.491087 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/32a3bcd7-25d6-45f5-8ce6-66949357504c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.491104 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/32a3bcd7-25d6-45f5-8ce6-66949357504c-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.491114 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/32a3bcd7-25d6-45f5-8ce6-66949357504c-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.491124 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/32a3bcd7-25d6-45f5-8ce6-66949357504c-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:46 crc kubenswrapper[4558]: E0120 17:05:46.491200 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/openstack-scripts: configmap "openstack-scripts" not found Jan 20 17:05:46 crc kubenswrapper[4558]: E0120 17:05:46.491240 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/1fbb53e2-e2a2-4d62-a392-027b1f3f3232-operator-scripts podName:1fbb53e2-e2a2-4d62-a392-027b1f3f3232 nodeName:}" failed. No retries permitted until 2026-01-20 17:05:47.491226307 +0000 UTC m=+1441.251564274 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/1fbb53e2-e2a2-4d62-a392-027b1f3f3232-operator-scripts") pod "neutron-7436-account-create-update-hpxxl" (UID: "1fbb53e2-e2a2-4d62-a392-027b1f3f3232") : configmap "openstack-scripts" not found Jan 20 17:05:46 crc kubenswrapper[4558]: E0120 17:05:46.491657 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/openstack-scripts: configmap "openstack-scripts" not found Jan 20 17:05:46 crc kubenswrapper[4558]: E0120 17:05:46.491687 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/f6dee0f3-6849-442d-b8fc-417c8540c9f9-operator-scripts podName:f6dee0f3-6849-442d-b8fc-417c8540c9f9 nodeName:}" failed. No retries permitted until 2026-01-20 17:05:47.49167995 +0000 UTC m=+1441.252017917 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/f6dee0f3-6849-442d-b8fc-417c8540c9f9-operator-scripts") pod "keystone-6218-account-create-update-gkxlw" (UID: "f6dee0f3-6849-442d-b8fc-417c8540c9f9") : configmap "openstack-scripts" not found Jan 20 17:05:46 crc kubenswrapper[4558]: E0120 17:05:46.496789 4558 projected.go:194] Error preparing data for projected volume kube-api-access-42zjp for pod openstack-kuttl-tests/keystone-6218-account-create-update-gkxlw: failed to fetch token: serviceaccounts "galera-openstack" not found Jan 20 17:05:46 crc kubenswrapper[4558]: E0120 17:05:46.496908 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/f6dee0f3-6849-442d-b8fc-417c8540c9f9-kube-api-access-42zjp podName:f6dee0f3-6849-442d-b8fc-417c8540c9f9 nodeName:}" failed. No retries permitted until 2026-01-20 17:05:47.49689198 +0000 UTC m=+1441.257229947 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-42zjp" (UniqueName: "kubernetes.io/projected/f6dee0f3-6849-442d-b8fc-417c8540c9f9-kube-api-access-42zjp") pod "keystone-6218-account-create-update-gkxlw" (UID: "f6dee0f3-6849-442d-b8fc-417c8540c9f9") : failed to fetch token: serviceaccounts "galera-openstack" not found Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.580112 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0ccba453-b5b1-4a1c-9e37-1c4c991cfb93" path="/var/lib/kubelet/pods/0ccba453-b5b1-4a1c-9e37-1c4c991cfb93/volumes" Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.581749 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0d6ee4cd-ee0d-45e6-9f55-92a5ffa7b7d7" path="/var/lib/kubelet/pods/0d6ee4cd-ee0d-45e6-9f55-92a5ffa7b7d7/volumes" Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.584180 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2e9a9b09-70ea-4d56-87ac-5a2c3461d2df" path="/var/lib/kubelet/pods/2e9a9b09-70ea-4d56-87ac-5a2c3461d2df/volumes" Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.585139 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="37e84078-defe-4c61-ac89-878165827bba" path="/var/lib/kubelet/pods/37e84078-defe-4c61-ac89-878165827bba/volumes" Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.585750 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49eb0897-5e0c-4075-8814-aa484a886e33" path="/var/lib/kubelet/pods/49eb0897-5e0c-4075-8814-aa484a886e33/volumes" Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.587525 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="66dc4df8-a7a7-452b-a8b2-b639d84ad42f" path="/var/lib/kubelet/pods/66dc4df8-a7a7-452b-a8b2-b639d84ad42f/volumes" Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.588075 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="769881ca-45e6-4943-9f14-51b0f78eea8c" path="/var/lib/kubelet/pods/769881ca-45e6-4943-9f14-51b0f78eea8c/volumes" Jan 20 17:05:46 crc kubenswrapper[4558]: E0120 17:05:46.595885 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/openstack-scripts: configmap "openstack-scripts" not found Jan 20 17:05:46 crc kubenswrapper[4558]: E0120 17:05:46.596115 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/d4457629-a457-4a4c-b285-eab441f8d319-operator-scripts podName:d4457629-a457-4a4c-b285-eab441f8d319 nodeName:}" failed. No retries permitted until 2026-01-20 17:05:47.596098634 +0000 UTC m=+1441.356436600 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/d4457629-a457-4a4c-b285-eab441f8d319-operator-scripts") pod "barbican-bb2e-account-create-update-gqjcg" (UID: "d4457629-a457-4a4c-b285-eab441f8d319") : configmap "openstack-scripts" not found Jan 20 17:05:46 crc kubenswrapper[4558]: E0120 17:05:46.597226 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/openstack-scripts: configmap "openstack-scripts" not found Jan 20 17:05:46 crc kubenswrapper[4558]: E0120 17:05:46.597349 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/19189ead-bcd7-4806-be88-43cc27d5f202-operator-scripts podName:19189ead-bcd7-4806-be88-43cc27d5f202 nodeName:}" failed. No retries permitted until 2026-01-20 17:05:47.597293791 +0000 UTC m=+1441.357631758 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/19189ead-bcd7-4806-be88-43cc27d5f202-operator-scripts") pod "root-account-create-update-vcf64" (UID: "19189ead-bcd7-4806-be88-43cc27d5f202") : configmap "openstack-scripts" not found Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.597572 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="79ceb3e8-184f-4c1d-bceb-89c6f69ee670" path="/var/lib/kubelet/pods/79ceb3e8-184f-4c1d-bceb-89c6f69ee670/volumes" Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.599961 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8505b3de-7094-43aa-a66b-13ba40bd4c13" path="/var/lib/kubelet/pods/8505b3de-7094-43aa-a66b-13ba40bd4c13/volumes" Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.600497 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8b64ebf3-99cf-495a-a82d-0a27123f5d7a" path="/var/lib/kubelet/pods/8b64ebf3-99cf-495a-a82d-0a27123f5d7a/volumes" Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.600974 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8efaa8ce-5cd9-4688-8cef-0e3fb667105d" path="/var/lib/kubelet/pods/8efaa8ce-5cd9-4688-8cef-0e3fb667105d/volumes" Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.602040 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="98aac22a-672a-4029-8d83-be51df654f6e" path="/var/lib/kubelet/pods/98aac22a-672a-4029-8d83-be51df654f6e/volumes" Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.602539 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0a56a29-2029-4a64-bb0c-539c45a09175" path="/var/lib/kubelet/pods/a0a56a29-2029-4a64-bb0c-539c45a09175/volumes" Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.603010 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="affe1aaa-7cef-46d2-82f8-1205ce2ff96b" path="/var/lib/kubelet/pods/affe1aaa-7cef-46d2-82f8-1205ce2ff96b/volumes" Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.604028 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf8f2ffa-65ef-49d0-ba6a-3bc80f0313ac" path="/var/lib/kubelet/pods/bf8f2ffa-65ef-49d0-ba6a-3bc80f0313ac/volumes" Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.604546 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d1fde627-f808-4f60-82f9-3a9d13491932" path="/var/lib/kubelet/pods/d1fde627-f808-4f60-82f9-3a9d13491932/volumes" Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.605011 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d97707d1-0217-458b-88ea-5589b040f499" path="/var/lib/kubelet/pods/d97707d1-0217-458b-88ea-5589b040f499/volumes" Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.605513 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f2a9bc2f-a177-4c7e-9036-c52825816ffe" path="/var/lib/kubelet/pods/f2a9bc2f-a177-4c7e-9036-c52825816ffe/volumes" Jan 20 17:05:46 crc kubenswrapper[4558]: E0120 17:05:46.630198 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="e3a0dcbc1a9d518fbc9c91fd7af2a3a13be64f9278f06d3e4672d909735d4773" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:05:46 crc kubenswrapper[4558]: E0120 17:05:46.632027 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="e3a0dcbc1a9d518fbc9c91fd7af2a3a13be64f9278f06d3e4672d909735d4773" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:05:46 crc kubenswrapper[4558]: E0120 17:05:46.633214 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="e3a0dcbc1a9d518fbc9c91fd7af2a3a13be64f9278f06d3e4672d909735d4773" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:05:46 crc kubenswrapper[4558]: E0120 17:05:46.633326 4558 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack-kuttl-tests/nova-cell0-conductor-0" podUID="55d5d50d-edc1-4a99-9540-72a7f5f0c622" containerName="nova-cell0-conductor-conductor" Jan 20 17:05:46 crc kubenswrapper[4558]: E0120 17:05:46.644486 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="5f0d33357c659b175318f67f48f0766deb32c17bae3e330e4f6f14c73b8bc529" cmd=["/bin/bash","/var/lib/operator-scripts/mysql_probe.sh","readiness"] Jan 20 17:05:46 crc kubenswrapper[4558]: E0120 17:05:46.646036 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="5f0d33357c659b175318f67f48f0766deb32c17bae3e330e4f6f14c73b8bc529" cmd=["/bin/bash","/var/lib/operator-scripts/mysql_probe.sh","readiness"] Jan 20 17:05:46 crc kubenswrapper[4558]: E0120 17:05:46.655332 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="5f0d33357c659b175318f67f48f0766deb32c17bae3e330e4f6f14c73b8bc529" cmd=["/bin/bash","/var/lib/operator-scripts/mysql_probe.sh","readiness"] Jan 20 17:05:46 crc kubenswrapper[4558]: E0120 17:05:46.655404 4558 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack-kuttl-tests/openstack-galera-0" podUID="ad58cc97-cbed-48c8-ab51-ebb920a1454c" containerName="galera" Jan 20 17:05:46 crc kubenswrapper[4558]: E0120 17:05:46.697402 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/openstack-scripts: configmap "openstack-scripts" not found Jan 20 17:05:46 crc kubenswrapper[4558]: E0120 17:05:46.697476 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/f6a8779d-bb69-43c5-96d1-a7669b5dd9ea-operator-scripts podName:f6a8779d-bb69-43c5-96d1-a7669b5dd9ea nodeName:}" failed. No retries permitted until 2026-01-20 17:05:47.697459268 +0000 UTC m=+1441.457797235 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/f6a8779d-bb69-43c5-96d1-a7669b5dd9ea-operator-scripts") pod "glance-103a-account-create-update-whkzm" (UID: "f6a8779d-bb69-43c5-96d1-a7669b5dd9ea") : configmap "openstack-scripts" not found Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.739623 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-aebd-account-create-update-z79l4" Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.741629 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-nb-0"] Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.744181 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.753207 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-nb-0"] Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.754413 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-8d7e-account-create-update-jzmzm" Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.763452 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.769826 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.772975 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-bfd5-account-create-update-rvnw4" Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.779237 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.905860 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mysql-db\" (UniqueName: \"kubernetes.io/local-volume/local-storage17-crc\") pod \"6df6ca0e-78e9-4248-8cbe-b9934e0ad090\" (UID: \"6df6ca0e-78e9-4248-8cbe-b9934e0ad090\") " Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.905917 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/6df6ca0e-78e9-4248-8cbe-b9934e0ad090-galera-tls-certs\") pod \"6df6ca0e-78e9-4248-8cbe-b9934e0ad090\" (UID: \"6df6ca0e-78e9-4248-8cbe-b9934e0ad090\") " Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.905942 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9mkqv\" (UniqueName: \"kubernetes.io/projected/1176545e-611b-4fc7-8b03-e91ee7813fd3-kube-api-access-9mkqv\") pod \"1176545e-611b-4fc7-8b03-e91ee7813fd3\" (UID: \"1176545e-611b-4fc7-8b03-e91ee7813fd3\") " Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.906013 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/b9c2a3b1-71ed-4612-8cd0-22e396cd622c-kube-state-metrics-tls-config\") pod \"b9c2a3b1-71ed-4612-8cd0-22e396cd622c\" (UID: \"b9c2a3b1-71ed-4612-8cd0-22e396cd622c\") " Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.906044 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/6df6ca0e-78e9-4248-8cbe-b9934e0ad090-config-data-default\") pod \"6df6ca0e-78e9-4248-8cbe-b9934e0ad090\" (UID: \"6df6ca0e-78e9-4248-8cbe-b9934e0ad090\") " Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.906064 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/38786f1c-754c-488d-8a13-aad7001ad778-config-data\") pod \"38786f1c-754c-488d-8a13-aad7001ad778\" (UID: \"38786f1c-754c-488d-8a13-aad7001ad778\") " Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.906086 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bbllh\" (UniqueName: \"kubernetes.io/projected/6df6ca0e-78e9-4248-8cbe-b9934e0ad090-kube-api-access-bbllh\") pod \"6df6ca0e-78e9-4248-8cbe-b9934e0ad090\" (UID: \"6df6ca0e-78e9-4248-8cbe-b9934e0ad090\") " Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.906101 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fkfw5\" (UniqueName: \"kubernetes.io/projected/43e20233-57f1-4b80-8717-1668dee2a884-kube-api-access-fkfw5\") pod \"43e20233-57f1-4b80-8717-1668dee2a884\" (UID: \"43e20233-57f1-4b80-8717-1668dee2a884\") " Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.906122 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1176545e-611b-4fc7-8b03-e91ee7813fd3-combined-ca-bundle\") pod \"1176545e-611b-4fc7-8b03-e91ee7813fd3\" (UID: \"1176545e-611b-4fc7-8b03-e91ee7813fd3\") " Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.906172 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/1176545e-611b-4fc7-8b03-e91ee7813fd3-vencrypt-tls-certs\") pod \"1176545e-611b-4fc7-8b03-e91ee7813fd3\" (UID: \"1176545e-611b-4fc7-8b03-e91ee7813fd3\") " Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.906628 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/b9c2a3b1-71ed-4612-8cd0-22e396cd622c-kube-state-metrics-tls-certs\") pod \"b9c2a3b1-71ed-4612-8cd0-22e396cd622c\" (UID: \"b9c2a3b1-71ed-4612-8cd0-22e396cd622c\") " Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.906654 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/1176545e-611b-4fc7-8b03-e91ee7813fd3-nova-novncproxy-tls-certs\") pod \"1176545e-611b-4fc7-8b03-e91ee7813fd3\" (UID: \"1176545e-611b-4fc7-8b03-e91ee7813fd3\") " Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.906678 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6df6ca0e-78e9-4248-8cbe-b9934e0ad090-operator-scripts\") pod \"6df6ca0e-78e9-4248-8cbe-b9934e0ad090\" (UID: \"6df6ca0e-78e9-4248-8cbe-b9934e0ad090\") " Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.906712 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6df6ca0e-78e9-4248-8cbe-b9934e0ad090-combined-ca-bundle\") pod \"6df6ca0e-78e9-4248-8cbe-b9934e0ad090\" (UID: \"6df6ca0e-78e9-4248-8cbe-b9934e0ad090\") " Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.906746 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/43e20233-57f1-4b80-8717-1668dee2a884-operator-scripts\") pod \"43e20233-57f1-4b80-8717-1668dee2a884\" (UID: \"43e20233-57f1-4b80-8717-1668dee2a884\") " Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.906768 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/6df6ca0e-78e9-4248-8cbe-b9934e0ad090-kolla-config\") pod \"6df6ca0e-78e9-4248-8cbe-b9934e0ad090\" (UID: \"6df6ca0e-78e9-4248-8cbe-b9934e0ad090\") " Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.906791 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mcn45\" (UniqueName: \"kubernetes.io/projected/79c1324d-aa7f-4add-a2c3-12f6d4c6216f-kube-api-access-mcn45\") pod \"79c1324d-aa7f-4add-a2c3-12f6d4c6216f\" (UID: \"79c1324d-aa7f-4add-a2c3-12f6d4c6216f\") " Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.906815 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/6df6ca0e-78e9-4248-8cbe-b9934e0ad090-config-data-generated\") pod \"6df6ca0e-78e9-4248-8cbe-b9934e0ad090\" (UID: \"6df6ca0e-78e9-4248-8cbe-b9934e0ad090\") " Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.906900 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3e05451a-8619-4559-9648-cc40e9ea5bb0-operator-scripts\") pod \"3e05451a-8619-4559-9648-cc40e9ea5bb0\" (UID: \"3e05451a-8619-4559-9648-cc40e9ea5bb0\") " Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.906921 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/79c1324d-aa7f-4add-a2c3-12f6d4c6216f-operator-scripts\") pod \"79c1324d-aa7f-4add-a2c3-12f6d4c6216f\" (UID: \"79c1324d-aa7f-4add-a2c3-12f6d4c6216f\") " Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.906957 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1176545e-611b-4fc7-8b03-e91ee7813fd3-config-data\") pod \"1176545e-611b-4fc7-8b03-e91ee7813fd3\" (UID: \"1176545e-611b-4fc7-8b03-e91ee7813fd3\") " Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.906990 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/38786f1c-754c-488d-8a13-aad7001ad778-combined-ca-bundle\") pod \"38786f1c-754c-488d-8a13-aad7001ad778\" (UID: \"38786f1c-754c-488d-8a13-aad7001ad778\") " Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.907006 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-scwwb\" (UniqueName: \"kubernetes.io/projected/38786f1c-754c-488d-8a13-aad7001ad778-kube-api-access-scwwb\") pod \"38786f1c-754c-488d-8a13-aad7001ad778\" (UID: \"38786f1c-754c-488d-8a13-aad7001ad778\") " Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.907046 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nbdpg\" (UniqueName: \"kubernetes.io/projected/b9c2a3b1-71ed-4612-8cd0-22e396cd622c-kube-api-access-nbdpg\") pod \"b9c2a3b1-71ed-4612-8cd0-22e396cd622c\" (UID: \"b9c2a3b1-71ed-4612-8cd0-22e396cd622c\") " Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.907074 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mf2t2\" (UniqueName: \"kubernetes.io/projected/3e05451a-8619-4559-9648-cc40e9ea5bb0-kube-api-access-mf2t2\") pod \"3e05451a-8619-4559-9648-cc40e9ea5bb0\" (UID: \"3e05451a-8619-4559-9648-cc40e9ea5bb0\") " Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.907155 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9c2a3b1-71ed-4612-8cd0-22e396cd622c-combined-ca-bundle\") pod \"b9c2a3b1-71ed-4612-8cd0-22e396cd622c\" (UID: \"b9c2a3b1-71ed-4612-8cd0-22e396cd622c\") " Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.907803 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43e20233-57f1-4b80-8717-1668dee2a884-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "43e20233-57f1-4b80-8717-1668dee2a884" (UID: "43e20233-57f1-4b80-8717-1668dee2a884"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.908652 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6df6ca0e-78e9-4248-8cbe-b9934e0ad090-config-data-default" (OuterVolumeSpecName: "config-data-default") pod "6df6ca0e-78e9-4248-8cbe-b9934e0ad090" (UID: "6df6ca0e-78e9-4248-8cbe-b9934e0ad090"). InnerVolumeSpecName "config-data-default". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.909241 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3e05451a-8619-4559-9648-cc40e9ea5bb0-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "3e05451a-8619-4559-9648-cc40e9ea5bb0" (UID: "3e05451a-8619-4559-9648-cc40e9ea5bb0"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.910078 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6df6ca0e-78e9-4248-8cbe-b9934e0ad090-config-data-generated" (OuterVolumeSpecName: "config-data-generated") pod "6df6ca0e-78e9-4248-8cbe-b9934e0ad090" (UID: "6df6ca0e-78e9-4248-8cbe-b9934e0ad090"). InnerVolumeSpecName "config-data-generated". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.910863 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6df6ca0e-78e9-4248-8cbe-b9934e0ad090-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "6df6ca0e-78e9-4248-8cbe-b9934e0ad090" (UID: "6df6ca0e-78e9-4248-8cbe-b9934e0ad090"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.911249 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/79c1324d-aa7f-4add-a2c3-12f6d4c6216f-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "79c1324d-aa7f-4add-a2c3-12f6d4c6216f" (UID: "79c1324d-aa7f-4add-a2c3-12f6d4c6216f"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.917755 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3e05451a-8619-4559-9648-cc40e9ea5bb0-kube-api-access-mf2t2" (OuterVolumeSpecName: "kube-api-access-mf2t2") pod "3e05451a-8619-4559-9648-cc40e9ea5bb0" (UID: "3e05451a-8619-4559-9648-cc40e9ea5bb0"). InnerVolumeSpecName "kube-api-access-mf2t2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.917840 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1176545e-611b-4fc7-8b03-e91ee7813fd3-kube-api-access-9mkqv" (OuterVolumeSpecName: "kube-api-access-9mkqv") pod "1176545e-611b-4fc7-8b03-e91ee7813fd3" (UID: "1176545e-611b-4fc7-8b03-e91ee7813fd3"). InnerVolumeSpecName "kube-api-access-9mkqv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.917892 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43e20233-57f1-4b80-8717-1668dee2a884-kube-api-access-fkfw5" (OuterVolumeSpecName: "kube-api-access-fkfw5") pod "43e20233-57f1-4b80-8717-1668dee2a884" (UID: "43e20233-57f1-4b80-8717-1668dee2a884"). InnerVolumeSpecName "kube-api-access-fkfw5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.928416 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6df6ca0e-78e9-4248-8cbe-b9934e0ad090-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "6df6ca0e-78e9-4248-8cbe-b9934e0ad090" (UID: "6df6ca0e-78e9-4248-8cbe-b9934e0ad090"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.929740 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/memcached-0" Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.932383 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b9c2a3b1-71ed-4612-8cd0-22e396cd622c-kube-api-access-nbdpg" (OuterVolumeSpecName: "kube-api-access-nbdpg") pod "b9c2a3b1-71ed-4612-8cd0-22e396cd622c" (UID: "b9c2a3b1-71ed-4612-8cd0-22e396cd622c"). InnerVolumeSpecName "kube-api-access-nbdpg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.932559 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6df6ca0e-78e9-4248-8cbe-b9934e0ad090-kube-api-access-bbllh" (OuterVolumeSpecName: "kube-api-access-bbllh") pod "6df6ca0e-78e9-4248-8cbe-b9934e0ad090" (UID: "6df6ca0e-78e9-4248-8cbe-b9934e0ad090"). InnerVolumeSpecName "kube-api-access-bbllh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.939587 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/79c1324d-aa7f-4add-a2c3-12f6d4c6216f-kube-api-access-mcn45" (OuterVolumeSpecName: "kube-api-access-mcn45") pod "79c1324d-aa7f-4add-a2c3-12f6d4c6216f" (UID: "79c1324d-aa7f-4add-a2c3-12f6d4c6216f"). InnerVolumeSpecName "kube-api-access-mcn45". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.946511 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/38786f1c-754c-488d-8a13-aad7001ad778-kube-api-access-scwwb" (OuterVolumeSpecName: "kube-api-access-scwwb") pod "38786f1c-754c-488d-8a13-aad7001ad778" (UID: "38786f1c-754c-488d-8a13-aad7001ad778"). InnerVolumeSpecName "kube-api-access-scwwb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.956193 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage17-crc" (OuterVolumeSpecName: "mysql-db") pod "6df6ca0e-78e9-4248-8cbe-b9934e0ad090" (UID: "6df6ca0e-78e9-4248-8cbe-b9934e0ad090"). InnerVolumeSpecName "local-storage17-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.963465 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b9c2a3b1-71ed-4612-8cd0-22e396cd622c-kube-state-metrics-tls-config" (OuterVolumeSpecName: "kube-state-metrics-tls-config") pod "b9c2a3b1-71ed-4612-8cd0-22e396cd622c" (UID: "b9c2a3b1-71ed-4612-8cd0-22e396cd622c"). InnerVolumeSpecName "kube-state-metrics-tls-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:05:46 crc kubenswrapper[4558]: I0120 17:05:46.984845 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/38786f1c-754c-488d-8a13-aad7001ad778-config-data" (OuterVolumeSpecName: "config-data") pod "38786f1c-754c-488d-8a13-aad7001ad778" (UID: "38786f1c-754c-488d-8a13-aad7001ad778"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.005071 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.010441 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ef319bad-004a-4140-b9a3-e34b376460da-combined-ca-bundle\") pod \"barbican-keystone-listener-767d5d98bd-zk227\" (UID: \"ef319bad-004a-4140-b9a3-e34b376460da\") " pod="openstack-kuttl-tests/barbican-keystone-listener-767d5d98bd-zk227" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.010951 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage17-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage17-crc\") on node \"crc\" " Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.010973 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9mkqv\" (UniqueName: \"kubernetes.io/projected/1176545e-611b-4fc7-8b03-e91ee7813fd3-kube-api-access-9mkqv\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.010984 4558 reconciler_common.go:293] "Volume detached for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/b9c2a3b1-71ed-4612-8cd0-22e396cd622c-kube-state-metrics-tls-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.010993 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/38786f1c-754c-488d-8a13-aad7001ad778-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.011002 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/6df6ca0e-78e9-4248-8cbe-b9934e0ad090-config-data-default\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.011010 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bbllh\" (UniqueName: \"kubernetes.io/projected/6df6ca0e-78e9-4248-8cbe-b9934e0ad090-kube-api-access-bbllh\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.011039 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fkfw5\" (UniqueName: \"kubernetes.io/projected/43e20233-57f1-4b80-8717-1668dee2a884-kube-api-access-fkfw5\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.011047 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6df6ca0e-78e9-4248-8cbe-b9934e0ad090-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.011055 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/43e20233-57f1-4b80-8717-1668dee2a884-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.011063 4558 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/6df6ca0e-78e9-4248-8cbe-b9934e0ad090-kolla-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.011075 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mcn45\" (UniqueName: \"kubernetes.io/projected/79c1324d-aa7f-4add-a2c3-12f6d4c6216f-kube-api-access-mcn45\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.011083 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/6df6ca0e-78e9-4248-8cbe-b9934e0ad090-config-data-generated\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.011143 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3e05451a-8619-4559-9648-cc40e9ea5bb0-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.011155 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/79c1324d-aa7f-4add-a2c3-12f6d4c6216f-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.011187 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-scwwb\" (UniqueName: \"kubernetes.io/projected/38786f1c-754c-488d-8a13-aad7001ad778-kube-api-access-scwwb\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.011197 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nbdpg\" (UniqueName: \"kubernetes.io/projected/b9c2a3b1-71ed-4612-8cd0-22e396cd622c-kube-api-access-nbdpg\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.011205 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mf2t2\" (UniqueName: \"kubernetes.io/projected/3e05451a-8619-4559-9648-cc40e9ea5bb0-kube-api-access-mf2t2\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:47 crc kubenswrapper[4558]: E0120 17:05:47.011788 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/combined-ca-bundle: secret "combined-ca-bundle" not found Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.012011 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b9c2a3b1-71ed-4612-8cd0-22e396cd622c-kube-state-metrics-tls-certs" (OuterVolumeSpecName: "kube-state-metrics-tls-certs") pod "b9c2a3b1-71ed-4612-8cd0-22e396cd622c" (UID: "b9c2a3b1-71ed-4612-8cd0-22e396cd622c"). InnerVolumeSpecName "kube-state-metrics-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:05:47 crc kubenswrapper[4558]: E0120 17:05:47.012402 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ef319bad-004a-4140-b9a3-e34b376460da-combined-ca-bundle podName:ef319bad-004a-4140-b9a3-e34b376460da nodeName:}" failed. No retries permitted until 2026-01-20 17:05:51.012382405 +0000 UTC m=+1444.772720372 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/ef319bad-004a-4140-b9a3-e34b376460da-combined-ca-bundle") pod "barbican-keystone-listener-767d5d98bd-zk227" (UID: "ef319bad-004a-4140-b9a3-e34b376460da") : secret "combined-ca-bundle" not found Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.013577 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1176545e-611b-4fc7-8b03-e91ee7813fd3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1176545e-611b-4fc7-8b03-e91ee7813fd3" (UID: "1176545e-611b-4fc7-8b03-e91ee7813fd3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.023400 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1176545e-611b-4fc7-8b03-e91ee7813fd3-nova-novncproxy-tls-certs" (OuterVolumeSpecName: "nova-novncproxy-tls-certs") pod "1176545e-611b-4fc7-8b03-e91ee7813fd3" (UID: "1176545e-611b-4fc7-8b03-e91ee7813fd3"). InnerVolumeSpecName "nova-novncproxy-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.029645 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage17-crc" (UniqueName: "kubernetes.io/local-volume/local-storage17-crc") on node "crc" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.032626 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1176545e-611b-4fc7-8b03-e91ee7813fd3-config-data" (OuterVolumeSpecName: "config-data") pod "1176545e-611b-4fc7-8b03-e91ee7813fd3" (UID: "1176545e-611b-4fc7-8b03-e91ee7813fd3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:05:47 crc kubenswrapper[4558]: E0120 17:05:47.047867 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6df6ca0e-78e9-4248-8cbe-b9934e0ad090-galera-tls-certs podName:6df6ca0e-78e9-4248-8cbe-b9934e0ad090 nodeName:}" failed. No retries permitted until 2026-01-20 17:05:47.547842382 +0000 UTC m=+1441.308180349 (durationBeforeRetry 500ms). Error: error cleaning subPath mounts for volume "galera-tls-certs" (UniqueName: "kubernetes.io/secret/6df6ca0e-78e9-4248-8cbe-b9934e0ad090-galera-tls-certs") pod "6df6ca0e-78e9-4248-8cbe-b9934e0ad090" (UID: "6df6ca0e-78e9-4248-8cbe-b9934e0ad090") : error deleting /var/lib/kubelet/pods/6df6ca0e-78e9-4248-8cbe-b9934e0ad090/volume-subpaths: remove /var/lib/kubelet/pods/6df6ca0e-78e9-4248-8cbe-b9934e0ad090/volume-subpaths: no such file or directory Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.051553 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6df6ca0e-78e9-4248-8cbe-b9934e0ad090-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6df6ca0e-78e9-4248-8cbe-b9934e0ad090" (UID: "6df6ca0e-78e9-4248-8cbe-b9934e0ad090"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.051904 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/38786f1c-754c-488d-8a13-aad7001ad778-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "38786f1c-754c-488d-8a13-aad7001ad778" (UID: "38786f1c-754c-488d-8a13-aad7001ad778"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.052717 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b9c2a3b1-71ed-4612-8cd0-22e396cd622c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b9c2a3b1-71ed-4612-8cd0-22e396cd622c" (UID: "b9c2a3b1-71ed-4612-8cd0-22e396cd622c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.066609 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1176545e-611b-4fc7-8b03-e91ee7813fd3-vencrypt-tls-certs" (OuterVolumeSpecName: "vencrypt-tls-certs") pod "1176545e-611b-4fc7-8b03-e91ee7813fd3" (UID: "1176545e-611b-4fc7-8b03-e91ee7813fd3"). InnerVolumeSpecName "vencrypt-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.112252 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aed5856e-9412-48a1-ac94-cc2f4bd05633-combined-ca-bundle\") pod \"aed5856e-9412-48a1-ac94-cc2f4bd05633\" (UID: \"aed5856e-9412-48a1-ac94-cc2f4bd05633\") " Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.112337 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1f9ceb49-977a-47b3-a1f3-10d68c96ab0f-config-data\") pod \"1f9ceb49-977a-47b3-a1f3-10d68c96ab0f\" (UID: \"1f9ceb49-977a-47b3-a1f3-10d68c96ab0f\") " Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.112363 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1f9ceb49-977a-47b3-a1f3-10d68c96ab0f-config-data-custom\") pod \"1f9ceb49-977a-47b3-a1f3-10d68c96ab0f\" (UID: \"1f9ceb49-977a-47b3-a1f3-10d68c96ab0f\") " Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.112396 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/aed5856e-9412-48a1-ac94-cc2f4bd05633-kolla-config\") pod \"aed5856e-9412-48a1-ac94-cc2f4bd05633\" (UID: \"aed5856e-9412-48a1-ac94-cc2f4bd05633\") " Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.112419 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1f9ceb49-977a-47b3-a1f3-10d68c96ab0f-scripts\") pod \"1f9ceb49-977a-47b3-a1f3-10d68c96ab0f\" (UID: \"1f9ceb49-977a-47b3-a1f3-10d68c96ab0f\") " Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.112447 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/aed5856e-9412-48a1-ac94-cc2f4bd05633-memcached-tls-certs\") pod \"aed5856e-9412-48a1-ac94-cc2f4bd05633\" (UID: \"aed5856e-9412-48a1-ac94-cc2f4bd05633\") " Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.112884 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/aed5856e-9412-48a1-ac94-cc2f4bd05633-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "aed5856e-9412-48a1-ac94-cc2f4bd05633" (UID: "aed5856e-9412-48a1-ac94-cc2f4bd05633"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.114461 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/aed5856e-9412-48a1-ac94-cc2f4bd05633-config-data\") pod \"aed5856e-9412-48a1-ac94-cc2f4bd05633\" (UID: \"aed5856e-9412-48a1-ac94-cc2f4bd05633\") " Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.114553 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hh87b\" (UniqueName: \"kubernetes.io/projected/1f9ceb49-977a-47b3-a1f3-10d68c96ab0f-kube-api-access-hh87b\") pod \"1f9ceb49-977a-47b3-a1f3-10d68c96ab0f\" (UID: \"1f9ceb49-977a-47b3-a1f3-10d68c96ab0f\") " Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.114636 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/1f9ceb49-977a-47b3-a1f3-10d68c96ab0f-etc-machine-id\") pod \"1f9ceb49-977a-47b3-a1f3-10d68c96ab0f\" (UID: \"1f9ceb49-977a-47b3-a1f3-10d68c96ab0f\") " Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.114698 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1f9ceb49-977a-47b3-a1f3-10d68c96ab0f-combined-ca-bundle\") pod \"1f9ceb49-977a-47b3-a1f3-10d68c96ab0f\" (UID: \"1f9ceb49-977a-47b3-a1f3-10d68c96ab0f\") " Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.115352 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1f9ceb49-977a-47b3-a1f3-10d68c96ab0f-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "1f9ceb49-977a-47b3-a1f3-10d68c96ab0f" (UID: "1f9ceb49-977a-47b3-a1f3-10d68c96ab0f"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.115687 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/aed5856e-9412-48a1-ac94-cc2f4bd05633-config-data" (OuterVolumeSpecName: "config-data") pod "aed5856e-9412-48a1-ac94-cc2f4bd05633" (UID: "aed5856e-9412-48a1-ac94-cc2f4bd05633"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.117214 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jfg2s\" (UniqueName: \"kubernetes.io/projected/aed5856e-9412-48a1-ac94-cc2f4bd05633-kube-api-access-jfg2s\") pod \"aed5856e-9412-48a1-ac94-cc2f4bd05633\" (UID: \"aed5856e-9412-48a1-ac94-cc2f4bd05633\") " Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.117837 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0fcabe47-75cd-4bff-ba51-8a65a23b3f1f-combined-ca-bundle\") pod \"barbican-worker-57fd8b7745-hswtl\" (UID: \"0fcabe47-75cd-4bff-ba51-8a65a23b3f1f\") " pod="openstack-kuttl-tests/barbican-worker-57fd8b7745-hswtl" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.117976 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/38786f1c-754c-488d-8a13-aad7001ad778-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.117990 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1f9ceb49-977a-47b3-a1f3-10d68c96ab0f-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.118001 4558 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/aed5856e-9412-48a1-ac94-cc2f4bd05633-kolla-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.118011 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9c2a3b1-71ed-4612-8cd0-22e396cd622c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.118022 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage17-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage17-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.118031 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/aed5856e-9412-48a1-ac94-cc2f4bd05633-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.118040 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1176545e-611b-4fc7-8b03-e91ee7813fd3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.118049 4558 reconciler_common.go:293] "Volume detached for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/1176545e-611b-4fc7-8b03-e91ee7813fd3-vencrypt-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.118058 4558 reconciler_common.go:293] "Volume detached for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/b9c2a3b1-71ed-4612-8cd0-22e396cd622c-kube-state-metrics-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.118068 4558 reconciler_common.go:293] "Volume detached for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/1176545e-611b-4fc7-8b03-e91ee7813fd3-nova-novncproxy-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.118077 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6df6ca0e-78e9-4248-8cbe-b9934e0ad090-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.118086 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1176545e-611b-4fc7-8b03-e91ee7813fd3-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:47 crc kubenswrapper[4558]: E0120 17:05:47.118206 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/combined-ca-bundle: secret "combined-ca-bundle" not found Jan 20 17:05:47 crc kubenswrapper[4558]: E0120 17:05:47.118251 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0fcabe47-75cd-4bff-ba51-8a65a23b3f1f-combined-ca-bundle podName:0fcabe47-75cd-4bff-ba51-8a65a23b3f1f nodeName:}" failed. No retries permitted until 2026-01-20 17:05:51.11823766 +0000 UTC m=+1444.878575627 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/0fcabe47-75cd-4bff-ba51-8a65a23b3f1f-combined-ca-bundle") pod "barbican-worker-57fd8b7745-hswtl" (UID: "0fcabe47-75cd-4bff-ba51-8a65a23b3f1f") : secret "combined-ca-bundle" not found Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.119257 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1f9ceb49-977a-47b3-a1f3-10d68c96ab0f-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "1f9ceb49-977a-47b3-a1f3-10d68c96ab0f" (UID: "1f9ceb49-977a-47b3-a1f3-10d68c96ab0f"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 17:05:47 crc kubenswrapper[4558]: E0120 17:05:47.120843 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Jan 20 17:05:47 crc kubenswrapper[4558]: E0120 17:05:47.121207 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/0bfc3458-cc0f-4bea-9794-52c5e81fe055-config-data podName:0bfc3458-cc0f-4bea-9794-52c5e81fe055 nodeName:}" failed. No retries permitted until 2026-01-20 17:05:51.121158351 +0000 UTC m=+1444.881496319 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/0bfc3458-cc0f-4bea-9794-52c5e81fe055-config-data") pod "rabbitmq-cell1-server-0" (UID: "0bfc3458-cc0f-4bea-9794-52c5e81fe055") : configmap "rabbitmq-cell1-config-data" not found Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.125218 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1f9ceb49-977a-47b3-a1f3-10d68c96ab0f-scripts" (OuterVolumeSpecName: "scripts") pod "1f9ceb49-977a-47b3-a1f3-10d68c96ab0f" (UID: "1f9ceb49-977a-47b3-a1f3-10d68c96ab0f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.125954 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1f9ceb49-977a-47b3-a1f3-10d68c96ab0f-kube-api-access-hh87b" (OuterVolumeSpecName: "kube-api-access-hh87b") pod "1f9ceb49-977a-47b3-a1f3-10d68c96ab0f" (UID: "1f9ceb49-977a-47b3-a1f3-10d68c96ab0f"). InnerVolumeSpecName "kube-api-access-hh87b". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.129110 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aed5856e-9412-48a1-ac94-cc2f4bd05633-kube-api-access-jfg2s" (OuterVolumeSpecName: "kube-api-access-jfg2s") pod "aed5856e-9412-48a1-ac94-cc2f4bd05633" (UID: "aed5856e-9412-48a1-ac94-cc2f4bd05633"). InnerVolumeSpecName "kube-api-access-jfg2s". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.136710 4558 generic.go:334] "Generic (PLEG): container finished" podID="1f9ceb49-977a-47b3-a1f3-10d68c96ab0f" containerID="ea575d2847a6cae8660fd45625a0be98895c2c6f684014b389a8ca1cc29ab736" exitCode=0 Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.136822 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"1f9ceb49-977a-47b3-a1f3-10d68c96ab0f","Type":"ContainerDied","Data":"ea575d2847a6cae8660fd45625a0be98895c2c6f684014b389a8ca1cc29ab736"} Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.136886 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"1f9ceb49-977a-47b3-a1f3-10d68c96ab0f","Type":"ContainerDied","Data":"6e9fb4ffddd82ffd7a6eb8aed3420075d665cc16aa8a41ff92116bdd4299c6a4"} Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.136916 4558 scope.go:117] "RemoveContainer" containerID="26bbc4f2aaf805eb72b1daca0bda5b9b7dad475314c7f9cbfdfce43f6fb9bfbc" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.137129 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.143749 4558 generic.go:334] "Generic (PLEG): container finished" podID="6df6ca0e-78e9-4248-8cbe-b9934e0ad090" containerID="3ce6116ef9c85330a51ece86b35504a1ac4b48ba623b6c363ad2b015e33fec35" exitCode=0 Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.143811 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-cell1-galera-0" event={"ID":"6df6ca0e-78e9-4248-8cbe-b9934e0ad090","Type":"ContainerDied","Data":"3ce6116ef9c85330a51ece86b35504a1ac4b48ba623b6c363ad2b015e33fec35"} Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.143841 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-cell1-galera-0" event={"ID":"6df6ca0e-78e9-4248-8cbe-b9934e0ad090","Type":"ContainerDied","Data":"bb9a7be9d730d7a53cc3193f2bf9b7844d3c2fc3d1f33cd0fe0c6034fbf0a2b0"} Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.143904 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.145266 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aed5856e-9412-48a1-ac94-cc2f4bd05633-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "aed5856e-9412-48a1-ac94-cc2f4bd05633" (UID: "aed5856e-9412-48a1-ac94-cc2f4bd05633"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.153346 4558 generic.go:334] "Generic (PLEG): container finished" podID="1176545e-611b-4fc7-8b03-e91ee7813fd3" containerID="6b8fce6bafaad0a422a208a3cb35d584fc27bf9b3ba41bf28947f865c272fe06" exitCode=0 Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.153409 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"1176545e-611b-4fc7-8b03-e91ee7813fd3","Type":"ContainerDied","Data":"6b8fce6bafaad0a422a208a3cb35d584fc27bf9b3ba41bf28947f865c272fe06"} Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.153436 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"1176545e-611b-4fc7-8b03-e91ee7813fd3","Type":"ContainerDied","Data":"b7a110a5acf1795f5d71c5768e80b2ceb59f8ca89b967a6f5e9cb5c240b37f68"} Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.153490 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.155358 4558 generic.go:334] "Generic (PLEG): container finished" podID="6c75ee5c-beea-4b1e-b429-91e83a472529" containerID="aa3653d2225383bf934da0f655ed12217fcafbbb93b425d7a7cf38609e8256e9" exitCode=143 Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.155446 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-869df765b4-9hk5q" event={"ID":"6c75ee5c-beea-4b1e-b429-91e83a472529","Type":"ContainerDied","Data":"aa3653d2225383bf934da0f655ed12217fcafbbb93b425d7a7cf38609e8256e9"} Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.157410 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-aebd-account-create-update-z79l4" event={"ID":"3e05451a-8619-4559-9648-cc40e9ea5bb0","Type":"ContainerDied","Data":"007eec0dd27bcdbed48c82b64f44f510ea69933870ac097d391773eeee037bf6"} Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.157475 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-aebd-account-create-update-z79l4" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.161045 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-proxy-595b48fd98-kbjv6" event={"ID":"32a3bcd7-25d6-45f5-8ce6-66949357504c","Type":"ContainerDied","Data":"aca90fe826962a0425f1b8f940f88055423c5f619ddea8b4007cc84ac876fd8a"} Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.161138 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-proxy-595b48fd98-kbjv6" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.163655 4558 generic.go:334] "Generic (PLEG): container finished" podID="aed5856e-9412-48a1-ac94-cc2f4bd05633" containerID="8029c61f17dcdbc8fe0413af1d186a7d097951d5e9f7472abca626983580e5f9" exitCode=0 Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.163686 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/memcached-0" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.163726 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/memcached-0" event={"ID":"aed5856e-9412-48a1-ac94-cc2f4bd05633","Type":"ContainerDied","Data":"8029c61f17dcdbc8fe0413af1d186a7d097951d5e9f7472abca626983580e5f9"} Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.163748 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/memcached-0" event={"ID":"aed5856e-9412-48a1-ac94-cc2f4bd05633","Type":"ContainerDied","Data":"0f730c94c3c1e790432bf0de8379ee4742303bb73bc2fffb2de4fb18a9670da2"} Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.167541 4558 scope.go:117] "RemoveContainer" containerID="ea575d2847a6cae8660fd45625a0be98895c2c6f684014b389a8ca1cc29ab736" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.171636 4558 generic.go:334] "Generic (PLEG): container finished" podID="ad58cc97-cbed-48c8-ab51-ebb920a1454c" containerID="5f0d33357c659b175318f67f48f0766deb32c17bae3e330e4f6f14c73b8bc529" exitCode=0 Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.171715 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-galera-0" event={"ID":"ad58cc97-cbed-48c8-ab51-ebb920a1454c","Type":"ContainerDied","Data":"5f0d33357c659b175318f67f48f0766deb32c17bae3e330e4f6f14c73b8bc529"} Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.173631 4558 generic.go:334] "Generic (PLEG): container finished" podID="25a5f425-926a-40eb-8f77-fcc3cdd9880c" containerID="1780af7f16c0ecd4cd006248610583e6b3a6c467aa3d8978ecf2993a1d76cdc8" exitCode=0 Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.173684 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-kwtcx" event={"ID":"25a5f425-926a-40eb-8f77-fcc3cdd9880c","Type":"ContainerDied","Data":"1780af7f16c0ecd4cd006248610583e6b3a6c467aa3d8978ecf2993a1d76cdc8"} Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.174083 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aed5856e-9412-48a1-ac94-cc2f4bd05633-memcached-tls-certs" (OuterVolumeSpecName: "memcached-tls-certs") pod "aed5856e-9412-48a1-ac94-cc2f4bd05633" (UID: "aed5856e-9412-48a1-ac94-cc2f4bd05633"). InnerVolumeSpecName "memcached-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.176874 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-8d7e-account-create-update-jzmzm" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.177284 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-8d7e-account-create-update-jzmzm" event={"ID":"43e20233-57f1-4b80-8717-1668dee2a884","Type":"ContainerDied","Data":"4ab5b7132c0a8b17725a8ac7b1101d96ed6ee666eac819a6a5f4700277179686"} Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.185436 4558 generic.go:334] "Generic (PLEG): container finished" podID="bfbf270c-1853-4113-b0e4-6d192abb5c5d" containerID="880f39ca241fd9f400e76ce2cfd51b2dad6e3d3b73759064be5dea602507b366" exitCode=143 Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.187252 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-worker-599d6c8df7-lcnrd" event={"ID":"bfbf270c-1853-4113-b0e4-6d192abb5c5d","Type":"ContainerDied","Data":"880f39ca241fd9f400e76ce2cfd51b2dad6e3d3b73759064be5dea602507b366"} Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.191952 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.193203 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.193316 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-bfd5-account-create-update-rvnw4" event={"ID":"79c1324d-aa7f-4add-a2c3-12f6d4c6216f","Type":"ContainerDied","Data":"2dc7324ebd2373169b2bc30a14b997917f8e31a9070dd2b5e660a188293bcf87"} Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.193361 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-bfd5-account-create-update-rvnw4" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.196819 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1f9ceb49-977a-47b3-a1f3-10d68c96ab0f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1f9ceb49-977a-47b3-a1f3-10d68c96ab0f" (UID: "1f9ceb49-977a-47b3-a1f3-10d68c96ab0f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.209812 4558 scope.go:117] "RemoveContainer" containerID="26bbc4f2aaf805eb72b1daca0bda5b9b7dad475314c7f9cbfdfce43f6fb9bfbc" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.210057 4558 generic.go:334] "Generic (PLEG): container finished" podID="b9c2a3b1-71ed-4612-8cd0-22e396cd622c" containerID="d79bd7c00910cb1ea357df6713d24c89de9eda30118ccacbc0177383e7169dd9" exitCode=2 Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.210129 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/kube-state-metrics-0" event={"ID":"b9c2a3b1-71ed-4612-8cd0-22e396cd622c","Type":"ContainerDied","Data":"d79bd7c00910cb1ea357df6713d24c89de9eda30118ccacbc0177383e7169dd9"} Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.210180 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/kube-state-metrics-0" event={"ID":"b9c2a3b1-71ed-4612-8cd0-22e396cd622c","Type":"ContainerDied","Data":"24d2ad8b347da988c262c7a6dbc7c0779cc855440fe9d2a82930189a3c6b9e16"} Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.210270 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:05:47 crc kubenswrapper[4558]: E0120 17:05:47.213034 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"26bbc4f2aaf805eb72b1daca0bda5b9b7dad475314c7f9cbfdfce43f6fb9bfbc\": container with ID starting with 26bbc4f2aaf805eb72b1daca0bda5b9b7dad475314c7f9cbfdfce43f6fb9bfbc not found: ID does not exist" containerID="26bbc4f2aaf805eb72b1daca0bda5b9b7dad475314c7f9cbfdfce43f6fb9bfbc" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.213239 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"26bbc4f2aaf805eb72b1daca0bda5b9b7dad475314c7f9cbfdfce43f6fb9bfbc"} err="failed to get container status \"26bbc4f2aaf805eb72b1daca0bda5b9b7dad475314c7f9cbfdfce43f6fb9bfbc\": rpc error: code = NotFound desc = could not find container \"26bbc4f2aaf805eb72b1daca0bda5b9b7dad475314c7f9cbfdfce43f6fb9bfbc\": container with ID starting with 26bbc4f2aaf805eb72b1daca0bda5b9b7dad475314c7f9cbfdfce43f6fb9bfbc not found: ID does not exist" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.213278 4558 scope.go:117] "RemoveContainer" containerID="ea575d2847a6cae8660fd45625a0be98895c2c6f684014b389a8ca1cc29ab736" Jan 20 17:05:47 crc kubenswrapper[4558]: E0120 17:05:47.213625 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ea575d2847a6cae8660fd45625a0be98895c2c6f684014b389a8ca1cc29ab736\": container with ID starting with ea575d2847a6cae8660fd45625a0be98895c2c6f684014b389a8ca1cc29ab736 not found: ID does not exist" containerID="ea575d2847a6cae8660fd45625a0be98895c2c6f684014b389a8ca1cc29ab736" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.213672 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ea575d2847a6cae8660fd45625a0be98895c2c6f684014b389a8ca1cc29ab736"} err="failed to get container status \"ea575d2847a6cae8660fd45625a0be98895c2c6f684014b389a8ca1cc29ab736\": rpc error: code = NotFound desc = could not find container \"ea575d2847a6cae8660fd45625a0be98895c2c6f684014b389a8ca1cc29ab736\": container with ID starting with ea575d2847a6cae8660fd45625a0be98895c2c6f684014b389a8ca1cc29ab736 not found: ID does not exist" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.213702 4558 scope.go:117] "RemoveContainer" containerID="3ce6116ef9c85330a51ece86b35504a1ac4b48ba623b6c363ad2b015e33fec35" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.216571 4558 generic.go:334] "Generic (PLEG): container finished" podID="19189ead-bcd7-4806-be88-43cc27d5f202" containerID="80a7fbf9ebd999f9e255d17b6edb57d7cd1a880d930872a51bd1d1c2a19b20a5" exitCode=1 Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.216644 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/root-account-create-update-vcf64" event={"ID":"19189ead-bcd7-4806-be88-43cc27d5f202","Type":"ContainerDied","Data":"80a7fbf9ebd999f9e255d17b6edb57d7cd1a880d930872a51bd1d1c2a19b20a5"} Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.221247 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1f9ceb49-977a-47b3-a1f3-10d68c96ab0f-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.221332 4558 reconciler_common.go:293] "Volume detached for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/aed5856e-9412-48a1-ac94-cc2f4bd05633-memcached-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.221370 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hh87b\" (UniqueName: \"kubernetes.io/projected/1f9ceb49-977a-47b3-a1f3-10d68c96ab0f-kube-api-access-hh87b\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.221381 4558 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/1f9ceb49-977a-47b3-a1f3-10d68c96ab0f-etc-machine-id\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.221390 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1f9ceb49-977a-47b3-a1f3-10d68c96ab0f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.221399 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jfg2s\" (UniqueName: \"kubernetes.io/projected/aed5856e-9412-48a1-ac94-cc2f4bd05633-kube-api-access-jfg2s\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.221412 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aed5856e-9412-48a1-ac94-cc2f4bd05633-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.229151 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/swift-proxy-595b48fd98-kbjv6"] Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.233142 4558 generic.go:334] "Generic (PLEG): container finished" podID="38786f1c-754c-488d-8a13-aad7001ad778" containerID="95f19d7a9e9522b1398a79cebf0602dd58a2e2f0ffdd88865efd947c83bbb895" exitCode=0 Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.233381 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.233406 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"38786f1c-754c-488d-8a13-aad7001ad778","Type":"ContainerDied","Data":"95f19d7a9e9522b1398a79cebf0602dd58a2e2f0ffdd88865efd947c83bbb895"} Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.233733 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"38786f1c-754c-488d-8a13-aad7001ad778","Type":"ContainerDied","Data":"b0201f422520b19ef548df4a913ff33872222b88fc9de492698fde77bdab6397"} Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.237977 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstackclient" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.242468 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.243653 4558 generic.go:334] "Generic (PLEG): container finished" podID="bc2c7d29-d967-414d-8bb9-1bf70bcacdcd" containerID="ca91ff4227eff077e8448a3f7e4d144f18ec13ff9ee590460efb5d52dcae5abe" exitCode=143 Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.243838 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-keystone-listener-65f6b4bfbb-llhdw" event={"ID":"bc2c7d29-d967-414d-8bb9-1bf70bcacdcd","Type":"ContainerDied","Data":"ca91ff4227eff077e8448a3f7e4d144f18ec13ff9ee590460efb5d52dcae5abe"} Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.243929 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-6218-account-create-update-gkxlw" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.246636 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-worker-57fd8b7745-hswtl" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.246652 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-keystone-listener-767d5d98bd-zk227" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.258905 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1f9ceb49-977a-47b3-a1f3-10d68c96ab0f-config-data" (OuterVolumeSpecName: "config-data") pod "1f9ceb49-977a-47b3-a1f3-10d68c96ab0f" (UID: "1f9ceb49-977a-47b3-a1f3-10d68c96ab0f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.259497 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/swift-proxy-595b48fd98-kbjv6"] Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.292227 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell0-aebd-account-create-update-z79l4"] Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.296665 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell0-aebd-account-create-update-z79l4"] Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.330068 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1f9ceb49-977a-47b3-a1f3-10d68c96ab0f-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.333138 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-8d7e-account-create-update-jzmzm"] Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.363936 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell1-8d7e-account-create-update-jzmzm"] Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.411281 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-6218-account-create-update-gkxlw" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.412881 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-worker-57fd8b7745-hswtl" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.431482 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k7ntl\" (UniqueName: \"kubernetes.io/projected/ad58cc97-cbed-48c8-ab51-ebb920a1454c-kube-api-access-k7ntl\") pod \"ad58cc97-cbed-48c8-ab51-ebb920a1454c\" (UID: \"ad58cc97-cbed-48c8-ab51-ebb920a1454c\") " Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.431535 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/ad58cc97-cbed-48c8-ab51-ebb920a1454c-config-data-default\") pod \"ad58cc97-cbed-48c8-ab51-ebb920a1454c\" (UID: \"ad58cc97-cbed-48c8-ab51-ebb920a1454c\") " Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.432825 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/ad58cc97-cbed-48c8-ab51-ebb920a1454c-config-data-generated\") pod \"ad58cc97-cbed-48c8-ab51-ebb920a1454c\" (UID: \"ad58cc97-cbed-48c8-ab51-ebb920a1454c\") " Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.432979 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/ad58cc97-cbed-48c8-ab51-ebb920a1454c-kolla-config\") pod \"ad58cc97-cbed-48c8-ab51-ebb920a1454c\" (UID: \"ad58cc97-cbed-48c8-ab51-ebb920a1454c\") " Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.433008 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/ad58cc97-cbed-48c8-ab51-ebb920a1454c-galera-tls-certs\") pod \"ad58cc97-cbed-48c8-ab51-ebb920a1454c\" (UID: \"ad58cc97-cbed-48c8-ab51-ebb920a1454c\") " Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.433081 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ad58cc97-cbed-48c8-ab51-ebb920a1454c-combined-ca-bundle\") pod \"ad58cc97-cbed-48c8-ab51-ebb920a1454c\" (UID: \"ad58cc97-cbed-48c8-ab51-ebb920a1454c\") " Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.433107 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mysql-db\" (UniqueName: \"kubernetes.io/local-volume/local-storage13-crc\") pod \"ad58cc97-cbed-48c8-ab51-ebb920a1454c\" (UID: \"ad58cc97-cbed-48c8-ab51-ebb920a1454c\") " Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.433136 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ad58cc97-cbed-48c8-ab51-ebb920a1454c-operator-scripts\") pod \"ad58cc97-cbed-48c8-ab51-ebb920a1454c\" (UID: \"ad58cc97-cbed-48c8-ab51-ebb920a1454c\") " Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.433608 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ad58cc97-cbed-48c8-ab51-ebb920a1454c-config-data-default" (OuterVolumeSpecName: "config-data-default") pod "ad58cc97-cbed-48c8-ab51-ebb920a1454c" (UID: "ad58cc97-cbed-48c8-ab51-ebb920a1454c"). InnerVolumeSpecName "config-data-default". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.434177 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ad58cc97-cbed-48c8-ab51-ebb920a1454c-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "ad58cc97-cbed-48c8-ab51-ebb920a1454c" (UID: "ad58cc97-cbed-48c8-ab51-ebb920a1454c"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.434749 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ad58cc97-cbed-48c8-ab51-ebb920a1454c-config-data-generated" (OuterVolumeSpecName: "config-data-generated") pod "ad58cc97-cbed-48c8-ab51-ebb920a1454c" (UID: "ad58cc97-cbed-48c8-ab51-ebb920a1454c"). InnerVolumeSpecName "config-data-generated". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.435254 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ad58cc97-cbed-48c8-ab51-ebb920a1454c-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "ad58cc97-cbed-48c8-ab51-ebb920a1454c" (UID: "ad58cc97-cbed-48c8-ab51-ebb920a1454c"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.441323 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ad58cc97-cbed-48c8-ab51-ebb920a1454c-kube-api-access-k7ntl" (OuterVolumeSpecName: "kube-api-access-k7ntl") pod "ad58cc97-cbed-48c8-ab51-ebb920a1454c" (UID: "ad58cc97-cbed-48c8-ab51-ebb920a1454c"). InnerVolumeSpecName "kube-api-access-k7ntl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.441424 4558 scope.go:117] "RemoveContainer" containerID="e2b2159ece9fe35f6232ad479a5441bc80baee8db96eab6040c5486631ebcd3b" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.442031 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-bfd5-account-create-update-rvnw4"] Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.444790 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/cinder-bfd5-account-create-update-rvnw4"] Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.447477 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-keystone-listener-767d5d98bd-zk227" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.458633 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage13-crc" (OuterVolumeSpecName: "mysql-db") pod "ad58cc97-cbed-48c8-ab51-ebb920a1454c" (UID: "ad58cc97-cbed-48c8-ab51-ebb920a1454c"). InnerVolumeSpecName "local-storage13-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.479765 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:05:47 crc kubenswrapper[4558]: E0120 17:05:47.481018 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="811380372fc9996d1d29a2c8b3863bd8fa15b182a9a97db33dd3fd3b535d9455" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Jan 20 17:05:47 crc kubenswrapper[4558]: E0120 17:05:47.483541 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="811380372fc9996d1d29a2c8b3863bd8fa15b182a9a97db33dd3fd3b535d9455" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.486840 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ad58cc97-cbed-48c8-ab51-ebb920a1454c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ad58cc97-cbed-48c8-ab51-ebb920a1454c" (UID: "ad58cc97-cbed-48c8-ab51-ebb920a1454c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.489306 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ad58cc97-cbed-48c8-ab51-ebb920a1454c-galera-tls-certs" (OuterVolumeSpecName: "galera-tls-certs") pod "ad58cc97-cbed-48c8-ab51-ebb920a1454c" (UID: "ad58cc97-cbed-48c8-ab51-ebb920a1454c"). InnerVolumeSpecName "galera-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:05:47 crc kubenswrapper[4558]: E0120 17:05:47.489388 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="811380372fc9996d1d29a2c8b3863bd8fa15b182a9a97db33dd3fd3b535d9455" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Jan 20 17:05:47 crc kubenswrapper[4558]: E0120 17:05:47.489430 4558 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack-kuttl-tests/ovn-northd-0" podUID="33be1904-bd58-48cc-806a-af1dc751717c" containerName="ovn-northd" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.493481 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.500041 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.504784 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.535259 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0fcabe47-75cd-4bff-ba51-8a65a23b3f1f-logs\") pod \"0fcabe47-75cd-4bff-ba51-8a65a23b3f1f\" (UID: \"0fcabe47-75cd-4bff-ba51-8a65a23b3f1f\") " Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.535330 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ef319bad-004a-4140-b9a3-e34b376460da-config-data\") pod \"ef319bad-004a-4140-b9a3-e34b376460da\" (UID: \"ef319bad-004a-4140-b9a3-e34b376460da\") " Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.535390 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ef319bad-004a-4140-b9a3-e34b376460da-logs\") pod \"ef319bad-004a-4140-b9a3-e34b376460da\" (UID: \"ef319bad-004a-4140-b9a3-e34b376460da\") " Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.535553 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0fcabe47-75cd-4bff-ba51-8a65a23b3f1f-config-data\") pod \"0fcabe47-75cd-4bff-ba51-8a65a23b3f1f\" (UID: \"0fcabe47-75cd-4bff-ba51-8a65a23b3f1f\") " Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.535651 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g2dlb\" (UniqueName: \"kubernetes.io/projected/0fcabe47-75cd-4bff-ba51-8a65a23b3f1f-kube-api-access-g2dlb\") pod \"0fcabe47-75cd-4bff-ba51-8a65a23b3f1f\" (UID: \"0fcabe47-75cd-4bff-ba51-8a65a23b3f1f\") " Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.535696 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0fcabe47-75cd-4bff-ba51-8a65a23b3f1f-config-data-custom\") pod \"0fcabe47-75cd-4bff-ba51-8a65a23b3f1f\" (UID: \"0fcabe47-75cd-4bff-ba51-8a65a23b3f1f\") " Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.535736 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ef319bad-004a-4140-b9a3-e34b376460da-config-data-custom\") pod \"ef319bad-004a-4140-b9a3-e34b376460da\" (UID: \"ef319bad-004a-4140-b9a3-e34b376460da\") " Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.535775 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bnwf5\" (UniqueName: \"kubernetes.io/projected/ef319bad-004a-4140-b9a3-e34b376460da-kube-api-access-bnwf5\") pod \"ef319bad-004a-4140-b9a3-e34b376460da\" (UID: \"ef319bad-004a-4140-b9a3-e34b376460da\") " Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.536069 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-42zjp\" (UniqueName: \"kubernetes.io/projected/f6dee0f3-6849-442d-b8fc-417c8540c9f9-kube-api-access-42zjp\") pod \"keystone-6218-account-create-update-gkxlw\" (UID: \"f6dee0f3-6849-442d-b8fc-417c8540c9f9\") " pod="openstack-kuttl-tests/keystone-6218-account-create-update-gkxlw" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.536214 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f6dee0f3-6849-442d-b8fc-417c8540c9f9-operator-scripts\") pod \"keystone-6218-account-create-update-gkxlw\" (UID: \"f6dee0f3-6849-442d-b8fc-417c8540c9f9\") " pod="openstack-kuttl-tests/keystone-6218-account-create-update-gkxlw" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.536356 4558 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/ad58cc97-cbed-48c8-ab51-ebb920a1454c-kolla-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.536377 4558 reconciler_common.go:293] "Volume detached for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/ad58cc97-cbed-48c8-ab51-ebb920a1454c-galera-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.536387 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ad58cc97-cbed-48c8-ab51-ebb920a1454c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.536412 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage13-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage13-crc\") on node \"crc\" " Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.536422 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ad58cc97-cbed-48c8-ab51-ebb920a1454c-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.536433 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k7ntl\" (UniqueName: \"kubernetes.io/projected/ad58cc97-cbed-48c8-ab51-ebb920a1454c-kube-api-access-k7ntl\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.536444 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/ad58cc97-cbed-48c8-ab51-ebb920a1454c-config-data-default\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.536453 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/ad58cc97-cbed-48c8-ab51-ebb920a1454c-config-data-generated\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.539833 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0fcabe47-75cd-4bff-ba51-8a65a23b3f1f-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "0fcabe47-75cd-4bff-ba51-8a65a23b3f1f" (UID: "0fcabe47-75cd-4bff-ba51-8a65a23b3f1f"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.540034 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0fcabe47-75cd-4bff-ba51-8a65a23b3f1f-logs" (OuterVolumeSpecName: "logs") pod "0fcabe47-75cd-4bff-ba51-8a65a23b3f1f" (UID: "0fcabe47-75cd-4bff-ba51-8a65a23b3f1f"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.545534 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ef319bad-004a-4140-b9a3-e34b376460da-config-data" (OuterVolumeSpecName: "config-data") pod "ef319bad-004a-4140-b9a3-e34b376460da" (UID: "ef319bad-004a-4140-b9a3-e34b376460da"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.545733 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ef319bad-004a-4140-b9a3-e34b376460da-logs" (OuterVolumeSpecName: "logs") pod "ef319bad-004a-4140-b9a3-e34b376460da" (UID: "ef319bad-004a-4140-b9a3-e34b376460da"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.551484 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage13-crc" (UniqueName: "kubernetes.io/local-volume/local-storage13-crc") on node "crc" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.551624 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0fcabe47-75cd-4bff-ba51-8a65a23b3f1f-config-data" (OuterVolumeSpecName: "config-data") pod "0fcabe47-75cd-4bff-ba51-8a65a23b3f1f" (UID: "0fcabe47-75cd-4bff-ba51-8a65a23b3f1f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:05:47 crc kubenswrapper[4558]: E0120 17:05:47.553367 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/openstack-scripts: configmap "openstack-scripts" not found Jan 20 17:05:47 crc kubenswrapper[4558]: E0120 17:05:47.553427 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/f6dee0f3-6849-442d-b8fc-417c8540c9f9-operator-scripts podName:f6dee0f3-6849-442d-b8fc-417c8540c9f9 nodeName:}" failed. No retries permitted until 2026-01-20 17:05:49.553410228 +0000 UTC m=+1443.313748196 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/f6dee0f3-6849-442d-b8fc-417c8540c9f9-operator-scripts") pod "keystone-6218-account-create-update-gkxlw" (UID: "f6dee0f3-6849-442d-b8fc-417c8540c9f9") : configmap "openstack-scripts" not found Jan 20 17:05:47 crc kubenswrapper[4558]: E0120 17:05:47.553580 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/openstack-scripts: configmap "openstack-scripts" not found Jan 20 17:05:47 crc kubenswrapper[4558]: E0120 17:05:47.553654 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/1fbb53e2-e2a2-4d62-a392-027b1f3f3232-operator-scripts podName:1fbb53e2-e2a2-4d62-a392-027b1f3f3232 nodeName:}" failed. No retries permitted until 2026-01-20 17:05:49.553636654 +0000 UTC m=+1443.313974621 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/1fbb53e2-e2a2-4d62-a392-027b1f3f3232-operator-scripts") pod "neutron-7436-account-create-update-hpxxl" (UID: "1fbb53e2-e2a2-4d62-a392-027b1f3f3232") : configmap "openstack-scripts" not found Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.558995 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0fcabe47-75cd-4bff-ba51-8a65a23b3f1f-kube-api-access-g2dlb" (OuterVolumeSpecName: "kube-api-access-g2dlb") pod "0fcabe47-75cd-4bff-ba51-8a65a23b3f1f" (UID: "0fcabe47-75cd-4bff-ba51-8a65a23b3f1f"). InnerVolumeSpecName "kube-api-access-g2dlb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:05:47 crc kubenswrapper[4558]: E0120 17:05:47.559261 4558 projected.go:194] Error preparing data for projected volume kube-api-access-42zjp for pod openstack-kuttl-tests/keystone-6218-account-create-update-gkxlw: failed to fetch token: serviceaccounts "galera-openstack" not found Jan 20 17:05:47 crc kubenswrapper[4558]: E0120 17:05:47.559313 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/f6dee0f3-6849-442d-b8fc-417c8540c9f9-kube-api-access-42zjp podName:f6dee0f3-6849-442d-b8fc-417c8540c9f9 nodeName:}" failed. No retries permitted until 2026-01-20 17:05:49.55930316 +0000 UTC m=+1443.319641126 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-42zjp" (UniqueName: "kubernetes.io/projected/f6dee0f3-6849-442d-b8fc-417c8540c9f9-kube-api-access-42zjp") pod "keystone-6218-account-create-update-gkxlw" (UID: "f6dee0f3-6849-442d-b8fc-417c8540c9f9") : failed to fetch token: serviceaccounts "galera-openstack" not found Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.559585 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ef319bad-004a-4140-b9a3-e34b376460da-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "ef319bad-004a-4140-b9a3-e34b376460da" (UID: "ef319bad-004a-4140-b9a3-e34b376460da"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.565283 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ef319bad-004a-4140-b9a3-e34b376460da-kube-api-access-bnwf5" (OuterVolumeSpecName: "kube-api-access-bnwf5") pod "ef319bad-004a-4140-b9a3-e34b376460da" (UID: "ef319bad-004a-4140-b9a3-e34b376460da"). InnerVolumeSpecName "kube-api-access-bnwf5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.638094 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/6df6ca0e-78e9-4248-8cbe-b9934e0ad090-galera-tls-certs\") pod \"6df6ca0e-78e9-4248-8cbe-b9934e0ad090\" (UID: \"6df6ca0e-78e9-4248-8cbe-b9934e0ad090\") " Jan 20 17:05:47 crc kubenswrapper[4558]: E0120 17:05:47.639229 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/openstack-scripts: configmap "openstack-scripts" not found Jan 20 17:05:47 crc kubenswrapper[4558]: E0120 17:05:47.639308 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/d4457629-a457-4a4c-b285-eab441f8d319-operator-scripts podName:d4457629-a457-4a4c-b285-eab441f8d319 nodeName:}" failed. No retries permitted until 2026-01-20 17:05:49.639281817 +0000 UTC m=+1443.399619785 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/d4457629-a457-4a4c-b285-eab441f8d319-operator-scripts") pod "barbican-bb2e-account-create-update-gqjcg" (UID: "d4457629-a457-4a4c-b285-eab441f8d319") : configmap "openstack-scripts" not found Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.640247 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ef319bad-004a-4140-b9a3-e34b376460da-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.640283 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bnwf5\" (UniqueName: \"kubernetes.io/projected/ef319bad-004a-4140-b9a3-e34b376460da-kube-api-access-bnwf5\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.640310 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0fcabe47-75cd-4bff-ba51-8a65a23b3f1f-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.640321 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ef319bad-004a-4140-b9a3-e34b376460da-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.640331 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ef319bad-004a-4140-b9a3-e34b376460da-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.640342 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0fcabe47-75cd-4bff-ba51-8a65a23b3f1f-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.640352 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g2dlb\" (UniqueName: \"kubernetes.io/projected/0fcabe47-75cd-4bff-ba51-8a65a23b3f1f-kube-api-access-g2dlb\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.640364 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage13-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage13-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.640376 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0fcabe47-75cd-4bff-ba51-8a65a23b3f1f-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:47 crc kubenswrapper[4558]: E0120 17:05:47.640373 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/openstack-scripts: configmap "openstack-scripts" not found Jan 20 17:05:47 crc kubenswrapper[4558]: E0120 17:05:47.640428 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/19189ead-bcd7-4806-be88-43cc27d5f202-operator-scripts podName:19189ead-bcd7-4806-be88-43cc27d5f202 nodeName:}" failed. No retries permitted until 2026-01-20 17:05:49.64042114 +0000 UTC m=+1443.400759107 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/19189ead-bcd7-4806-be88-43cc27d5f202-operator-scripts") pod "root-account-create-update-vcf64" (UID: "19189ead-bcd7-4806-be88-43cc27d5f202") : configmap "openstack-scripts" not found Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.642058 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6df6ca0e-78e9-4248-8cbe-b9934e0ad090-galera-tls-certs" (OuterVolumeSpecName: "galera-tls-certs") pod "6df6ca0e-78e9-4248-8cbe-b9934e0ad090" (UID: "6df6ca0e-78e9-4248-8cbe-b9934e0ad090"). InnerVolumeSpecName "galera-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.700245 4558 scope.go:117] "RemoveContainer" containerID="3ce6116ef9c85330a51ece86b35504a1ac4b48ba623b6c363ad2b015e33fec35" Jan 20 17:05:47 crc kubenswrapper[4558]: E0120 17:05:47.700877 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3ce6116ef9c85330a51ece86b35504a1ac4b48ba623b6c363ad2b015e33fec35\": container with ID starting with 3ce6116ef9c85330a51ece86b35504a1ac4b48ba623b6c363ad2b015e33fec35 not found: ID does not exist" containerID="3ce6116ef9c85330a51ece86b35504a1ac4b48ba623b6c363ad2b015e33fec35" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.700929 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3ce6116ef9c85330a51ece86b35504a1ac4b48ba623b6c363ad2b015e33fec35"} err="failed to get container status \"3ce6116ef9c85330a51ece86b35504a1ac4b48ba623b6c363ad2b015e33fec35\": rpc error: code = NotFound desc = could not find container \"3ce6116ef9c85330a51ece86b35504a1ac4b48ba623b6c363ad2b015e33fec35\": container with ID starting with 3ce6116ef9c85330a51ece86b35504a1ac4b48ba623b6c363ad2b015e33fec35 not found: ID does not exist" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.700962 4558 scope.go:117] "RemoveContainer" containerID="e2b2159ece9fe35f6232ad479a5441bc80baee8db96eab6040c5486631ebcd3b" Jan 20 17:05:47 crc kubenswrapper[4558]: E0120 17:05:47.701413 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e2b2159ece9fe35f6232ad479a5441bc80baee8db96eab6040c5486631ebcd3b\": container with ID starting with e2b2159ece9fe35f6232ad479a5441bc80baee8db96eab6040c5486631ebcd3b not found: ID does not exist" containerID="e2b2159ece9fe35f6232ad479a5441bc80baee8db96eab6040c5486631ebcd3b" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.701452 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e2b2159ece9fe35f6232ad479a5441bc80baee8db96eab6040c5486631ebcd3b"} err="failed to get container status \"e2b2159ece9fe35f6232ad479a5441bc80baee8db96eab6040c5486631ebcd3b\": rpc error: code = NotFound desc = could not find container \"e2b2159ece9fe35f6232ad479a5441bc80baee8db96eab6040c5486631ebcd3b\": container with ID starting with e2b2159ece9fe35f6232ad479a5441bc80baee8db96eab6040c5486631ebcd3b not found: ID does not exist" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.701475 4558 scope.go:117] "RemoveContainer" containerID="6b8fce6bafaad0a422a208a3cb35d584fc27bf9b3ba41bf28947f865c272fe06" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.727745 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-bb2e-account-create-update-gqjcg" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.743445 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:05:47 crc kubenswrapper[4558]: E0120 17:05:47.744406 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/openstack-scripts: configmap "openstack-scripts" not found Jan 20 17:05:47 crc kubenswrapper[4558]: E0120 17:05:47.744511 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/f6a8779d-bb69-43c5-96d1-a7669b5dd9ea-operator-scripts podName:f6a8779d-bb69-43c5-96d1-a7669b5dd9ea nodeName:}" failed. No retries permitted until 2026-01-20 17:05:49.744497681 +0000 UTC m=+1443.504835648 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/f6a8779d-bb69-43c5-96d1-a7669b5dd9ea-operator-scripts") pod "glance-103a-account-create-update-whkzm" (UID: "f6a8779d-bb69-43c5-96d1-a7669b5dd9ea") : configmap "openstack-scripts" not found Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.744853 4558 reconciler_common.go:293] "Volume detached for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/6df6ca0e-78e9-4248-8cbe-b9934e0ad090-galera-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.756016 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.759151 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/memcached-0"] Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.761988 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/memcached-0"] Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.772447 4558 scope.go:117] "RemoveContainer" containerID="6b8fce6bafaad0a422a208a3cb35d584fc27bf9b3ba41bf28947f865c272fe06" Jan 20 17:05:47 crc kubenswrapper[4558]: E0120 17:05:47.773864 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6b8fce6bafaad0a422a208a3cb35d584fc27bf9b3ba41bf28947f865c272fe06\": container with ID starting with 6b8fce6bafaad0a422a208a3cb35d584fc27bf9b3ba41bf28947f865c272fe06 not found: ID does not exist" containerID="6b8fce6bafaad0a422a208a3cb35d584fc27bf9b3ba41bf28947f865c272fe06" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.773903 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6b8fce6bafaad0a422a208a3cb35d584fc27bf9b3ba41bf28947f865c272fe06"} err="failed to get container status \"6b8fce6bafaad0a422a208a3cb35d584fc27bf9b3ba41bf28947f865c272fe06\": rpc error: code = NotFound desc = could not find container \"6b8fce6bafaad0a422a208a3cb35d584fc27bf9b3ba41bf28947f865c272fe06\": container with ID starting with 6b8fce6bafaad0a422a208a3cb35d584fc27bf9b3ba41bf28947f865c272fe06 not found: ID does not exist" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.773928 4558 scope.go:117] "RemoveContainer" containerID="b2c58a9c1693c20653debec2694a95f53b8b1e5e7675f997990f41753ef39a46" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.809445 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/cinder-api-0" podUID="2ba42f6a-1ade-45ae-bdc4-117ad2fa866b" containerName="cinder-api" probeResult="failure" output="Get \"https://10.217.0.252:8776/healthcheck\": read tcp 10.217.0.2:47472->10.217.0.252:8776: read: connection reset by peer" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.809754 4558 scope.go:117] "RemoveContainer" containerID="1e67af77bc964f499418d3c854f0b6163e2f81ff3e6521a66f14aa8ee4590e1c" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.813289 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/openstack-cell1-galera-0"] Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.823496 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/openstack-cell1-galera-0"] Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.857667 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d4457629-a457-4a4c-b285-eab441f8d319-operator-scripts\") pod \"d4457629-a457-4a4c-b285-eab441f8d319\" (UID: \"d4457629-a457-4a4c-b285-eab441f8d319\") " Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.857806 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-97jgv\" (UniqueName: \"kubernetes.io/projected/d4457629-a457-4a4c-b285-eab441f8d319-kube-api-access-97jgv\") pod \"d4457629-a457-4a4c-b285-eab441f8d319\" (UID: \"d4457629-a457-4a4c-b285-eab441f8d319\") " Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.858204 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d4457629-a457-4a4c-b285-eab441f8d319-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "d4457629-a457-4a4c-b285-eab441f8d319" (UID: "d4457629-a457-4a4c-b285-eab441f8d319"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:05:47 crc kubenswrapper[4558]: E0120 17:05:47.858348 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Jan 20 17:05:47 crc kubenswrapper[4558]: E0120 17:05:47.858397 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/ac55b716-d8fd-4628-8627-f94b5a4e7c78-config-data podName:ac55b716-d8fd-4628-8627-f94b5a4e7c78 nodeName:}" failed. No retries permitted until 2026-01-20 17:05:51.858384018 +0000 UTC m=+1445.618721984 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/ac55b716-d8fd-4628-8627-f94b5a4e7c78-config-data") pod "rabbitmq-server-0" (UID: "ac55b716-d8fd-4628-8627-f94b5a4e7c78") : configmap "rabbitmq-config-data" not found Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.858940 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d4457629-a457-4a4c-b285-eab441f8d319-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.863756 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d4457629-a457-4a4c-b285-eab441f8d319-kube-api-access-97jgv" (OuterVolumeSpecName: "kube-api-access-97jgv") pod "d4457629-a457-4a4c-b285-eab441f8d319" (UID: "d4457629-a457-4a4c-b285-eab441f8d319"). InnerVolumeSpecName "kube-api-access-97jgv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.953191 4558 scope.go:117] "RemoveContainer" containerID="8029c61f17dcdbc8fe0413af1d186a7d097951d5e9f7472abca626983580e5f9" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.955058 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-7436-account-create-update-hpxxl" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.961637 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-97jgv\" (UniqueName: \"kubernetes.io/projected/d4457629-a457-4a4c-b285-eab441f8d319-kube-api-access-97jgv\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.963601 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-103a-account-create-update-whkzm" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.971545 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-1b9c-account-create-update-9w5vq" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.986267 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-vcf64" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.989737 4558 scope.go:117] "RemoveContainer" containerID="8029c61f17dcdbc8fe0413af1d186a7d097951d5e9f7472abca626983580e5f9" Jan 20 17:05:47 crc kubenswrapper[4558]: E0120 17:05:47.990406 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8029c61f17dcdbc8fe0413af1d186a7d097951d5e9f7472abca626983580e5f9\": container with ID starting with 8029c61f17dcdbc8fe0413af1d186a7d097951d5e9f7472abca626983580e5f9 not found: ID does not exist" containerID="8029c61f17dcdbc8fe0413af1d186a7d097951d5e9f7472abca626983580e5f9" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.990430 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8029c61f17dcdbc8fe0413af1d186a7d097951d5e9f7472abca626983580e5f9"} err="failed to get container status \"8029c61f17dcdbc8fe0413af1d186a7d097951d5e9f7472abca626983580e5f9\": rpc error: code = NotFound desc = could not find container \"8029c61f17dcdbc8fe0413af1d186a7d097951d5e9f7472abca626983580e5f9\": container with ID starting with 8029c61f17dcdbc8fe0413af1d186a7d097951d5e9f7472abca626983580e5f9 not found: ID does not exist" Jan 20 17:05:47 crc kubenswrapper[4558]: I0120 17:05:47.990452 4558 scope.go:117] "RemoveContainer" containerID="d79bd7c00910cb1ea357df6713d24c89de9eda30118ccacbc0177383e7169dd9" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.033919 4558 scope.go:117] "RemoveContainer" containerID="d79bd7c00910cb1ea357df6713d24c89de9eda30118ccacbc0177383e7169dd9" Jan 20 17:05:48 crc kubenswrapper[4558]: E0120 17:05:48.034289 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d79bd7c00910cb1ea357df6713d24c89de9eda30118ccacbc0177383e7169dd9\": container with ID starting with d79bd7c00910cb1ea357df6713d24c89de9eda30118ccacbc0177383e7169dd9 not found: ID does not exist" containerID="d79bd7c00910cb1ea357df6713d24c89de9eda30118ccacbc0177383e7169dd9" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.034325 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d79bd7c00910cb1ea357df6713d24c89de9eda30118ccacbc0177383e7169dd9"} err="failed to get container status \"d79bd7c00910cb1ea357df6713d24c89de9eda30118ccacbc0177383e7169dd9\": rpc error: code = NotFound desc = could not find container \"d79bd7c00910cb1ea357df6713d24c89de9eda30118ccacbc0177383e7169dd9\": container with ID starting with d79bd7c00910cb1ea357df6713d24c89de9eda30118ccacbc0177383e7169dd9 not found: ID does not exist" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.034346 4558 scope.go:117] "RemoveContainer" containerID="e7f17901adbdb0ade1bb599a686da509a4f272513ee6686f67a6c29495fa2a19" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.057942 4558 scope.go:117] "RemoveContainer" containerID="95f19d7a9e9522b1398a79cebf0602dd58a2e2f0ffdd88865efd947c83bbb895" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.062756 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qhbl4\" (UniqueName: \"kubernetes.io/projected/3ae31408-2eb5-445e-8f66-f0f0a6a7f9f2-kube-api-access-qhbl4\") pod \"3ae31408-2eb5-445e-8f66-f0f0a6a7f9f2\" (UID: \"3ae31408-2eb5-445e-8f66-f0f0a6a7f9f2\") " Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.062844 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gzzbd\" (UniqueName: \"kubernetes.io/projected/19189ead-bcd7-4806-be88-43cc27d5f202-kube-api-access-gzzbd\") pod \"19189ead-bcd7-4806-be88-43cc27d5f202\" (UID: \"19189ead-bcd7-4806-be88-43cc27d5f202\") " Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.062908 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qlbzm\" (UniqueName: \"kubernetes.io/projected/1fbb53e2-e2a2-4d62-a392-027b1f3f3232-kube-api-access-qlbzm\") pod \"1fbb53e2-e2a2-4d62-a392-027b1f3f3232\" (UID: \"1fbb53e2-e2a2-4d62-a392-027b1f3f3232\") " Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.062927 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-45lgx\" (UniqueName: \"kubernetes.io/projected/f6a8779d-bb69-43c5-96d1-a7669b5dd9ea-kube-api-access-45lgx\") pod \"f6a8779d-bb69-43c5-96d1-a7669b5dd9ea\" (UID: \"f6a8779d-bb69-43c5-96d1-a7669b5dd9ea\") " Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.062962 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f6a8779d-bb69-43c5-96d1-a7669b5dd9ea-operator-scripts\") pod \"f6a8779d-bb69-43c5-96d1-a7669b5dd9ea\" (UID: \"f6a8779d-bb69-43c5-96d1-a7669b5dd9ea\") " Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.063035 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/19189ead-bcd7-4806-be88-43cc27d5f202-operator-scripts\") pod \"19189ead-bcd7-4806-be88-43cc27d5f202\" (UID: \"19189ead-bcd7-4806-be88-43cc27d5f202\") " Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.063083 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1fbb53e2-e2a2-4d62-a392-027b1f3f3232-operator-scripts\") pod \"1fbb53e2-e2a2-4d62-a392-027b1f3f3232\" (UID: \"1fbb53e2-e2a2-4d62-a392-027b1f3f3232\") " Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.063150 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3ae31408-2eb5-445e-8f66-f0f0a6a7f9f2-operator-scripts\") pod \"3ae31408-2eb5-445e-8f66-f0f0a6a7f9f2\" (UID: \"3ae31408-2eb5-445e-8f66-f0f0a6a7f9f2\") " Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.063961 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f6a8779d-bb69-43c5-96d1-a7669b5dd9ea-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "f6a8779d-bb69-43c5-96d1-a7669b5dd9ea" (UID: "f6a8779d-bb69-43c5-96d1-a7669b5dd9ea"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.063967 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3ae31408-2eb5-445e-8f66-f0f0a6a7f9f2-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "3ae31408-2eb5-445e-8f66-f0f0a6a7f9f2" (UID: "3ae31408-2eb5-445e-8f66-f0f0a6a7f9f2"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.064529 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1fbb53e2-e2a2-4d62-a392-027b1f3f3232-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "1fbb53e2-e2a2-4d62-a392-027b1f3f3232" (UID: "1fbb53e2-e2a2-4d62-a392-027b1f3f3232"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.064588 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/19189ead-bcd7-4806-be88-43cc27d5f202-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "19189ead-bcd7-4806-be88-43cc27d5f202" (UID: "19189ead-bcd7-4806-be88-43cc27d5f202"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.066916 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ae31408-2eb5-445e-8f66-f0f0a6a7f9f2-kube-api-access-qhbl4" (OuterVolumeSpecName: "kube-api-access-qhbl4") pod "3ae31408-2eb5-445e-8f66-f0f0a6a7f9f2" (UID: "3ae31408-2eb5-445e-8f66-f0f0a6a7f9f2"). InnerVolumeSpecName "kube-api-access-qhbl4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.067451 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/19189ead-bcd7-4806-be88-43cc27d5f202-kube-api-access-gzzbd" (OuterVolumeSpecName: "kube-api-access-gzzbd") pod "19189ead-bcd7-4806-be88-43cc27d5f202" (UID: "19189ead-bcd7-4806-be88-43cc27d5f202"). InnerVolumeSpecName "kube-api-access-gzzbd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.067963 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1fbb53e2-e2a2-4d62-a392-027b1f3f3232-kube-api-access-qlbzm" (OuterVolumeSpecName: "kube-api-access-qlbzm") pod "1fbb53e2-e2a2-4d62-a392-027b1f3f3232" (UID: "1fbb53e2-e2a2-4d62-a392-027b1f3f3232"). InnerVolumeSpecName "kube-api-access-qlbzm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.068044 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f6a8779d-bb69-43c5-96d1-a7669b5dd9ea-kube-api-access-45lgx" (OuterVolumeSpecName: "kube-api-access-45lgx") pod "f6a8779d-bb69-43c5-96d1-a7669b5dd9ea" (UID: "f6a8779d-bb69-43c5-96d1-a7669b5dd9ea"). InnerVolumeSpecName "kube-api-access-45lgx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.121207 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-fbd5dff98-mmhdt" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.166102 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qlbzm\" (UniqueName: \"kubernetes.io/projected/1fbb53e2-e2a2-4d62-a392-027b1f3f3232-kube-api-access-qlbzm\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.166158 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-45lgx\" (UniqueName: \"kubernetes.io/projected/f6a8779d-bb69-43c5-96d1-a7669b5dd9ea-kube-api-access-45lgx\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.166202 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f6a8779d-bb69-43c5-96d1-a7669b5dd9ea-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.166213 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/19189ead-bcd7-4806-be88-43cc27d5f202-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.166224 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1fbb53e2-e2a2-4d62-a392-027b1f3f3232-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.166235 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3ae31408-2eb5-445e-8f66-f0f0a6a7f9f2-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.166244 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qhbl4\" (UniqueName: \"kubernetes.io/projected/3ae31408-2eb5-445e-8f66-f0f0a6a7f9f2-kube-api-access-qhbl4\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.166277 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gzzbd\" (UniqueName: \"kubernetes.io/projected/19189ead-bcd7-4806-be88-43cc27d5f202-kube-api-access-gzzbd\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.181753 4558 scope.go:117] "RemoveContainer" containerID="95f19d7a9e9522b1398a79cebf0602dd58a2e2f0ffdd88865efd947c83bbb895" Jan 20 17:05:48 crc kubenswrapper[4558]: E0120 17:05:48.183551 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"95f19d7a9e9522b1398a79cebf0602dd58a2e2f0ffdd88865efd947c83bbb895\": container with ID starting with 95f19d7a9e9522b1398a79cebf0602dd58a2e2f0ffdd88865efd947c83bbb895 not found: ID does not exist" containerID="95f19d7a9e9522b1398a79cebf0602dd58a2e2f0ffdd88865efd947c83bbb895" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.183613 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"95f19d7a9e9522b1398a79cebf0602dd58a2e2f0ffdd88865efd947c83bbb895"} err="failed to get container status \"95f19d7a9e9522b1398a79cebf0602dd58a2e2f0ffdd88865efd947c83bbb895\": rpc error: code = NotFound desc = could not find container \"95f19d7a9e9522b1398a79cebf0602dd58a2e2f0ffdd88865efd947c83bbb895\": container with ID starting with 95f19d7a9e9522b1398a79cebf0602dd58a2e2f0ffdd88865efd947c83bbb895 not found: ID does not exist" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.183659 4558 scope.go:117] "RemoveContainer" containerID="14b34f3f512d1c97cde26d0f645f8f50baa327877aab8009b5d6e09f075e96b7" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.253971 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-kwtcx" event={"ID":"25a5f425-926a-40eb-8f77-fcc3cdd9880c","Type":"ContainerStarted","Data":"00a971f299806aa024b2084c6e7df03107799f9177171fc10a718d8c3f3f3860"} Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.254194 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-kwtcx" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.258570 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-103a-account-create-update-whkzm" event={"ID":"f6a8779d-bb69-43c5-96d1-a7669b5dd9ea","Type":"ContainerDied","Data":"8a63e3123e7318fb99c1d664774b7d9155d07515aaac65a7c132ef2027029ad8"} Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.258657 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-103a-account-create-update-whkzm" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.274703 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dlqw5\" (UniqueName: \"kubernetes.io/projected/a98c5227-c8a8-4cbc-8039-461dd14fbd5b-kube-api-access-dlqw5\") pod \"a98c5227-c8a8-4cbc-8039-461dd14fbd5b\" (UID: \"a98c5227-c8a8-4cbc-8039-461dd14fbd5b\") " Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.275527 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a98c5227-c8a8-4cbc-8039-461dd14fbd5b-internal-tls-certs\") pod \"a98c5227-c8a8-4cbc-8039-461dd14fbd5b\" (UID: \"a98c5227-c8a8-4cbc-8039-461dd14fbd5b\") " Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.275727 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a98c5227-c8a8-4cbc-8039-461dd14fbd5b-public-tls-certs\") pod \"a98c5227-c8a8-4cbc-8039-461dd14fbd5b\" (UID: \"a98c5227-c8a8-4cbc-8039-461dd14fbd5b\") " Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.275764 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a98c5227-c8a8-4cbc-8039-461dd14fbd5b-config-data\") pod \"a98c5227-c8a8-4cbc-8039-461dd14fbd5b\" (UID: \"a98c5227-c8a8-4cbc-8039-461dd14fbd5b\") " Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.275788 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a98c5227-c8a8-4cbc-8039-461dd14fbd5b-combined-ca-bundle\") pod \"a98c5227-c8a8-4cbc-8039-461dd14fbd5b\" (UID: \"a98c5227-c8a8-4cbc-8039-461dd14fbd5b\") " Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.275842 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a98c5227-c8a8-4cbc-8039-461dd14fbd5b-logs\") pod \"a98c5227-c8a8-4cbc-8039-461dd14fbd5b\" (UID: \"a98c5227-c8a8-4cbc-8039-461dd14fbd5b\") " Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.276002 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a98c5227-c8a8-4cbc-8039-461dd14fbd5b-scripts\") pod \"a98c5227-c8a8-4cbc-8039-461dd14fbd5b\" (UID: \"a98c5227-c8a8-4cbc-8039-461dd14fbd5b\") " Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.276721 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a98c5227-c8a8-4cbc-8039-461dd14fbd5b-logs" (OuterVolumeSpecName: "logs") pod "a98c5227-c8a8-4cbc-8039-461dd14fbd5b" (UID: "a98c5227-c8a8-4cbc-8039-461dd14fbd5b"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.288869 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a98c5227-c8a8-4cbc-8039-461dd14fbd5b-kube-api-access-dlqw5" (OuterVolumeSpecName: "kube-api-access-dlqw5") pod "a98c5227-c8a8-4cbc-8039-461dd14fbd5b" (UID: "a98c5227-c8a8-4cbc-8039-461dd14fbd5b"). InnerVolumeSpecName "kube-api-access-dlqw5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.291483 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-kwtcx" podStartSLOduration=4.29146858 podStartE2EDuration="4.29146858s" podCreationTimestamp="2026-01-20 17:05:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:05:48.27698215 +0000 UTC m=+1442.037320118" watchObservedRunningTime="2026-01-20 17:05:48.29146858 +0000 UTC m=+1442.051806547" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.301367 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-7436-account-create-update-hpxxl" event={"ID":"1fbb53e2-e2a2-4d62-a392-027b1f3f3232","Type":"ContainerDied","Data":"c17bc22e927e22aa6bd3a92db42892dbac2eeb0a0b9e08b80efa5aea443f1e36"} Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.301495 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-7436-account-create-update-hpxxl" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.343870 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a98c5227-c8a8-4cbc-8039-461dd14fbd5b-scripts" (OuterVolumeSpecName: "scripts") pod "a98c5227-c8a8-4cbc-8039-461dd14fbd5b" (UID: "a98c5227-c8a8-4cbc-8039-461dd14fbd5b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.343943 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/nova-metadata-0" podUID="1fccb241-c75e-4cec-b3c6-3855bd6c1161" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.1.32:8775/\": dial tcp 10.217.1.32:8775: connect: connection refused" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.344179 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/nova-metadata-0" podUID="1fccb241-c75e-4cec-b3c6-3855bd6c1161" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.1.32:8775/\": dial tcp 10.217.1.32:8775: connect: connection refused" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.346189 4558 generic.go:334] "Generic (PLEG): container finished" podID="20ba561a-58e7-459f-ba28-ed0b68cdab9b" containerID="148dd9de98eda3e5b9625476910ce41f12b5496fe3cc4cb92bbf3686a771a8e5" exitCode=0 Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.346275 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"20ba561a-58e7-459f-ba28-ed0b68cdab9b","Type":"ContainerDied","Data":"148dd9de98eda3e5b9625476910ce41f12b5496fe3cc4cb92bbf3686a771a8e5"} Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.346576 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"20ba561a-58e7-459f-ba28-ed0b68cdab9b","Type":"ContainerDied","Data":"2e3974a4783643c1db14827b3c03ac543eee73ccc3cffba0f3755b5b3aee4b20"} Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.347485 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2e3974a4783643c1db14827b3c03ac543eee73ccc3cffba0f3755b5b3aee4b20" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.351953 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-bb2e-account-create-update-gqjcg" event={"ID":"d4457629-a457-4a4c-b285-eab441f8d319","Type":"ContainerDied","Data":"2a36753532c302eafe94010be58608974d3d3bc8a21d5cee36ed28552862db4d"} Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.352054 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-bb2e-account-create-update-gqjcg" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.365668 4558 generic.go:334] "Generic (PLEG): container finished" podID="a98c5227-c8a8-4cbc-8039-461dd14fbd5b" containerID="bd40ce3a39fbdd59afdf9e62cf2e6f662f5e7a7d8647d8f283efcb8f7b52ebc2" exitCode=0 Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.365727 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-fbd5dff98-mmhdt" event={"ID":"a98c5227-c8a8-4cbc-8039-461dd14fbd5b","Type":"ContainerDied","Data":"bd40ce3a39fbdd59afdf9e62cf2e6f662f5e7a7d8647d8f283efcb8f7b52ebc2"} Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.365754 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-fbd5dff98-mmhdt" event={"ID":"a98c5227-c8a8-4cbc-8039-461dd14fbd5b","Type":"ContainerDied","Data":"008e17f898f6bf0143ec5e258597f523d7316f6923096087b15b4e69dec9c782"} Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.365772 4558 scope.go:117] "RemoveContainer" containerID="bd40ce3a39fbdd59afdf9e62cf2e6f662f5e7a7d8647d8f283efcb8f7b52ebc2" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.365931 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-fbd5dff98-mmhdt" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.371517 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/root-account-create-update-vcf64" event={"ID":"19189ead-bcd7-4806-be88-43cc27d5f202","Type":"ContainerDied","Data":"5b8f3b8f32f06168109d738f3af28d5697b6014269d9fe52d5fa889ecc6735ad"} Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.371595 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-vcf64" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.378433 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dlqw5\" (UniqueName: \"kubernetes.io/projected/a98c5227-c8a8-4cbc-8039-461dd14fbd5b-kube-api-access-dlqw5\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.378462 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a98c5227-c8a8-4cbc-8039-461dd14fbd5b-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.378473 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a98c5227-c8a8-4cbc-8039-461dd14fbd5b-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.386305 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a98c5227-c8a8-4cbc-8039-461dd14fbd5b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a98c5227-c8a8-4cbc-8039-461dd14fbd5b" (UID: "a98c5227-c8a8-4cbc-8039-461dd14fbd5b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.386349 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-1b9c-account-create-update-9w5vq" event={"ID":"3ae31408-2eb5-445e-8f66-f0f0a6a7f9f2","Type":"ContainerDied","Data":"5c77e694bb7361cf18c1aff6a8f9c39181215afe810852c20b37cfeef865d9fe"} Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.386432 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-1b9c-account-create-update-9w5vq" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.388981 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-galera-0" event={"ID":"ad58cc97-cbed-48c8-ab51-ebb920a1454c","Type":"ContainerDied","Data":"0f5b5bb20c4f4a6ee245c0c6e38b3e7165ab520d961a3c86674c0add57eb1395"} Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.389015 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.390232 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a98c5227-c8a8-4cbc-8039-461dd14fbd5b-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "a98c5227-c8a8-4cbc-8039-461dd14fbd5b" (UID: "a98c5227-c8a8-4cbc-8039-461dd14fbd5b"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.390933 4558 generic.go:334] "Generic (PLEG): container finished" podID="aa387033-dc08-48f4-bf56-06a7f316423c" containerID="32b6c39544892ca39cef55614e1cf5d22b40b7f421e0e33ba0fd15c7434cf00e" exitCode=0 Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.390987 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"aa387033-dc08-48f4-bf56-06a7f316423c","Type":"ContainerDied","Data":"32b6c39544892ca39cef55614e1cf5d22b40b7f421e0e33ba0fd15c7434cf00e"} Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.393686 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a98c5227-c8a8-4cbc-8039-461dd14fbd5b-config-data" (OuterVolumeSpecName: "config-data") pod "a98c5227-c8a8-4cbc-8039-461dd14fbd5b" (UID: "a98c5227-c8a8-4cbc-8039-461dd14fbd5b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.396184 4558 generic.go:334] "Generic (PLEG): container finished" podID="2ba42f6a-1ade-45ae-bdc4-117ad2fa866b" containerID="e91b0886ad44950f7bbfb790fe06f0f90a6fc3aaa9d7fe9e510d991406a8e51c" exitCode=0 Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.396279 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-6218-account-create-update-gkxlw" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.399676 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-keystone-listener-767d5d98bd-zk227" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.401566 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"2ba42f6a-1ade-45ae-bdc4-117ad2fa866b","Type":"ContainerDied","Data":"e91b0886ad44950f7bbfb790fe06f0f90a6fc3aaa9d7fe9e510d991406a8e51c"} Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.401673 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-worker-57fd8b7745-hswtl" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.439354 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a98c5227-c8a8-4cbc-8039-461dd14fbd5b-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "a98c5227-c8a8-4cbc-8039-461dd14fbd5b" (UID: "a98c5227-c8a8-4cbc-8039-461dd14fbd5b"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.479858 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a98c5227-c8a8-4cbc-8039-461dd14fbd5b-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.479887 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a98c5227-c8a8-4cbc-8039-461dd14fbd5b-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.479898 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a98c5227-c8a8-4cbc-8039-461dd14fbd5b-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.479907 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a98c5227-c8a8-4cbc-8039-461dd14fbd5b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.582216 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.585402 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1176545e-611b-4fc7-8b03-e91ee7813fd3" path="/var/lib/kubelet/pods/1176545e-611b-4fc7-8b03-e91ee7813fd3/volumes" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.586353 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1f9ceb49-977a-47b3-a1f3-10d68c96ab0f" path="/var/lib/kubelet/pods/1f9ceb49-977a-47b3-a1f3-10d68c96ab0f/volumes" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.587399 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2290f4e8-39b3-49d2-8ef6-8fa94c9e3cdf" path="/var/lib/kubelet/pods/2290f4e8-39b3-49d2-8ef6-8fa94c9e3cdf/volumes" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.588689 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="32a3bcd7-25d6-45f5-8ce6-66949357504c" path="/var/lib/kubelet/pods/32a3bcd7-25d6-45f5-8ce6-66949357504c/volumes" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.589436 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="38786f1c-754c-488d-8a13-aad7001ad778" path="/var/lib/kubelet/pods/38786f1c-754c-488d-8a13-aad7001ad778/volumes" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.590008 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3e05451a-8619-4559-9648-cc40e9ea5bb0" path="/var/lib/kubelet/pods/3e05451a-8619-4559-9648-cc40e9ea5bb0/volumes" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.590809 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43e20233-57f1-4b80-8717-1668dee2a884" path="/var/lib/kubelet/pods/43e20233-57f1-4b80-8717-1668dee2a884/volumes" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.591257 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6df6ca0e-78e9-4248-8cbe-b9934e0ad090" path="/var/lib/kubelet/pods/6df6ca0e-78e9-4248-8cbe-b9934e0ad090/volumes" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.591955 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="79c1324d-aa7f-4add-a2c3-12f6d4c6216f" path="/var/lib/kubelet/pods/79c1324d-aa7f-4add-a2c3-12f6d4c6216f/volumes" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.592405 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aed5856e-9412-48a1-ac94-cc2f4bd05633" path="/var/lib/kubelet/pods/aed5856e-9412-48a1-ac94-cc2f4bd05633/volumes" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.593289 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b9c2a3b1-71ed-4612-8cd0-22e396cd622c" path="/var/lib/kubelet/pods/b9c2a3b1-71ed-4612-8cd0-22e396cd622c/volumes" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.600406 4558 scope.go:117] "RemoveContainer" containerID="bdd46cdb21b22eb84d6fd244e570bc9831757537e8b49ef22aaf635a1aa83768" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.634487 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.642897 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.654476 4558 scope.go:117] "RemoveContainer" containerID="bd40ce3a39fbdd59afdf9e62cf2e6f662f5e7a7d8647d8f283efcb8f7b52ebc2" Jan 20 17:05:48 crc kubenswrapper[4558]: E0120 17:05:48.656258 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bd40ce3a39fbdd59afdf9e62cf2e6f662f5e7a7d8647d8f283efcb8f7b52ebc2\": container with ID starting with bd40ce3a39fbdd59afdf9e62cf2e6f662f5e7a7d8647d8f283efcb8f7b52ebc2 not found: ID does not exist" containerID="bd40ce3a39fbdd59afdf9e62cf2e6f662f5e7a7d8647d8f283efcb8f7b52ebc2" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.656308 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bd40ce3a39fbdd59afdf9e62cf2e6f662f5e7a7d8647d8f283efcb8f7b52ebc2"} err="failed to get container status \"bd40ce3a39fbdd59afdf9e62cf2e6f662f5e7a7d8647d8f283efcb8f7b52ebc2\": rpc error: code = NotFound desc = could not find container \"bd40ce3a39fbdd59afdf9e62cf2e6f662f5e7a7d8647d8f283efcb8f7b52ebc2\": container with ID starting with bd40ce3a39fbdd59afdf9e62cf2e6f662f5e7a7d8647d8f283efcb8f7b52ebc2 not found: ID does not exist" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.656333 4558 scope.go:117] "RemoveContainer" containerID="bdd46cdb21b22eb84d6fd244e570bc9831757537e8b49ef22aaf635a1aa83768" Jan 20 17:05:48 crc kubenswrapper[4558]: E0120 17:05:48.656923 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bdd46cdb21b22eb84d6fd244e570bc9831757537e8b49ef22aaf635a1aa83768\": container with ID starting with bdd46cdb21b22eb84d6fd244e570bc9831757537e8b49ef22aaf635a1aa83768 not found: ID does not exist" containerID="bdd46cdb21b22eb84d6fd244e570bc9831757537e8b49ef22aaf635a1aa83768" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.656984 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bdd46cdb21b22eb84d6fd244e570bc9831757537e8b49ef22aaf635a1aa83768"} err="failed to get container status \"bdd46cdb21b22eb84d6fd244e570bc9831757537e8b49ef22aaf635a1aa83768\": rpc error: code = NotFound desc = could not find container \"bdd46cdb21b22eb84d6fd244e570bc9831757537e8b49ef22aaf635a1aa83768\": container with ID starting with bdd46cdb21b22eb84d6fd244e570bc9831757537e8b49ef22aaf635a1aa83768 not found: ID does not exist" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.657017 4558 scope.go:117] "RemoveContainer" containerID="80a7fbf9ebd999f9e255d17b6edb57d7cd1a880d930872a51bd1d1c2a19b20a5" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.657586 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.684042 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"20ba561a-58e7-459f-ba28-ed0b68cdab9b\" (UID: \"20ba561a-58e7-459f-ba28-ed0b68cdab9b\") " Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.685130 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/20ba561a-58e7-459f-ba28-ed0b68cdab9b-combined-ca-bundle\") pod \"20ba561a-58e7-459f-ba28-ed0b68cdab9b\" (UID: \"20ba561a-58e7-459f-ba28-ed0b68cdab9b\") " Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.685293 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/20ba561a-58e7-459f-ba28-ed0b68cdab9b-scripts\") pod \"20ba561a-58e7-459f-ba28-ed0b68cdab9b\" (UID: \"20ba561a-58e7-459f-ba28-ed0b68cdab9b\") " Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.686035 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d84w8\" (UniqueName: \"kubernetes.io/projected/20ba561a-58e7-459f-ba28-ed0b68cdab9b-kube-api-access-d84w8\") pod \"20ba561a-58e7-459f-ba28-ed0b68cdab9b\" (UID: \"20ba561a-58e7-459f-ba28-ed0b68cdab9b\") " Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.686152 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/20ba561a-58e7-459f-ba28-ed0b68cdab9b-config-data\") pod \"20ba561a-58e7-459f-ba28-ed0b68cdab9b\" (UID: \"20ba561a-58e7-459f-ba28-ed0b68cdab9b\") " Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.686271 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/20ba561a-58e7-459f-ba28-ed0b68cdab9b-logs\") pod \"20ba561a-58e7-459f-ba28-ed0b68cdab9b\" (UID: \"20ba561a-58e7-459f-ba28-ed0b68cdab9b\") " Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.686370 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/20ba561a-58e7-459f-ba28-ed0b68cdab9b-public-tls-certs\") pod \"20ba561a-58e7-459f-ba28-ed0b68cdab9b\" (UID: \"20ba561a-58e7-459f-ba28-ed0b68cdab9b\") " Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.686461 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/20ba561a-58e7-459f-ba28-ed0b68cdab9b-httpd-run\") pod \"20ba561a-58e7-459f-ba28-ed0b68cdab9b\" (UID: \"20ba561a-58e7-459f-ba28-ed0b68cdab9b\") " Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.687862 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/20ba561a-58e7-459f-ba28-ed0b68cdab9b-logs" (OuterVolumeSpecName: "logs") pod "20ba561a-58e7-459f-ba28-ed0b68cdab9b" (UID: "20ba561a-58e7-459f-ba28-ed0b68cdab9b"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.688018 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/20ba561a-58e7-459f-ba28-ed0b68cdab9b-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "20ba561a-58e7-459f-ba28-ed0b68cdab9b" (UID: "20ba561a-58e7-459f-ba28-ed0b68cdab9b"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.693094 4558 scope.go:117] "RemoveContainer" containerID="5f0d33357c659b175318f67f48f0766deb32c17bae3e330e4f6f14c73b8bc529" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.693781 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-7436-account-create-update-hpxxl"] Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.698249 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20ba561a-58e7-459f-ba28-ed0b68cdab9b-scripts" (OuterVolumeSpecName: "scripts") pod "20ba561a-58e7-459f-ba28-ed0b68cdab9b" (UID: "20ba561a-58e7-459f-ba28-ed0b68cdab9b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.698361 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage01-crc" (OuterVolumeSpecName: "glance") pod "20ba561a-58e7-459f-ba28-ed0b68cdab9b" (UID: "20ba561a-58e7-459f-ba28-ed0b68cdab9b"). InnerVolumeSpecName "local-storage01-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.700836 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20ba561a-58e7-459f-ba28-ed0b68cdab9b-kube-api-access-d84w8" (OuterVolumeSpecName: "kube-api-access-d84w8") pod "20ba561a-58e7-459f-ba28-ed0b68cdab9b" (UID: "20ba561a-58e7-459f-ba28-ed0b68cdab9b"). InnerVolumeSpecName "kube-api-access-d84w8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.702444 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/neutron-7436-account-create-update-hpxxl"] Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.703781 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovn-northd-0_33be1904-bd58-48cc-806a-af1dc751717c/ovn-northd/0.log" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.703836 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.711988 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20ba561a-58e7-459f-ba28-ed0b68cdab9b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "20ba561a-58e7-459f-ba28-ed0b68cdab9b" (UID: "20ba561a-58e7-459f-ba28-ed0b68cdab9b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.726346 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-keystone-listener-767d5d98bd-zk227"] Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.732601 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/barbican-keystone-listener-767d5d98bd-zk227"] Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.747323 4558 scope.go:117] "RemoveContainer" containerID="6b30ba291754c6b147355d2af5037f3a797be5fec9ed22ddfa4282645c1aabd8" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.749468 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20ba561a-58e7-459f-ba28-ed0b68cdab9b-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "20ba561a-58e7-459f-ba28-ed0b68cdab9b" (UID: "20ba561a-58e7-459f-ba28-ed0b68cdab9b"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.757796 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-1b9c-account-create-update-9w5vq"] Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.760275 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20ba561a-58e7-459f-ba28-ed0b68cdab9b-config-data" (OuterVolumeSpecName: "config-data") pod "20ba561a-58e7-459f-ba28-ed0b68cdab9b" (UID: "20ba561a-58e7-459f-ba28-ed0b68cdab9b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.767424 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-api-1b9c-account-create-update-9w5vq"] Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.774021 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-vcf64"] Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.779817 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-vcf64"] Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.789236 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1fccb241-c75e-4cec-b3c6-3855bd6c1161-logs\") pod \"1fccb241-c75e-4cec-b3c6-3855bd6c1161\" (UID: \"1fccb241-c75e-4cec-b3c6-3855bd6c1161\") " Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.789344 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1fccb241-c75e-4cec-b3c6-3855bd6c1161-combined-ca-bundle\") pod \"1fccb241-c75e-4cec-b3c6-3855bd6c1161\" (UID: \"1fccb241-c75e-4cec-b3c6-3855bd6c1161\") " Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.789431 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/aa387033-dc08-48f4-bf56-06a7f316423c-logs\") pod \"aa387033-dc08-48f4-bf56-06a7f316423c\" (UID: \"aa387033-dc08-48f4-bf56-06a7f316423c\") " Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.789526 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2ba42f6a-1ade-45ae-bdc4-117ad2fa866b-public-tls-certs\") pod \"2ba42f6a-1ade-45ae-bdc4-117ad2fa866b\" (UID: \"2ba42f6a-1ade-45ae-bdc4-117ad2fa866b\") " Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.789592 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"aa387033-dc08-48f4-bf56-06a7f316423c\" (UID: \"aa387033-dc08-48f4-bf56-06a7f316423c\") " Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.789679 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/33be1904-bd58-48cc-806a-af1dc751717c-scripts\") pod \"33be1904-bd58-48cc-806a-af1dc751717c\" (UID: \"33be1904-bd58-48cc-806a-af1dc751717c\") " Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.789747 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2ba42f6a-1ade-45ae-bdc4-117ad2fa866b-config-data-custom\") pod \"2ba42f6a-1ade-45ae-bdc4-117ad2fa866b\" (UID: \"2ba42f6a-1ade-45ae-bdc4-117ad2fa866b\") " Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.789808 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2ba42f6a-1ade-45ae-bdc4-117ad2fa866b-scripts\") pod \"2ba42f6a-1ade-45ae-bdc4-117ad2fa866b\" (UID: \"2ba42f6a-1ade-45ae-bdc4-117ad2fa866b\") " Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.789893 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/33be1904-bd58-48cc-806a-af1dc751717c-metrics-certs-tls-certs\") pod \"33be1904-bd58-48cc-806a-af1dc751717c\" (UID: \"33be1904-bd58-48cc-806a-af1dc751717c\") " Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.789953 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2ba42f6a-1ade-45ae-bdc4-117ad2fa866b-logs\") pod \"2ba42f6a-1ade-45ae-bdc4-117ad2fa866b\" (UID: \"2ba42f6a-1ade-45ae-bdc4-117ad2fa866b\") " Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.790030 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/aa387033-dc08-48f4-bf56-06a7f316423c-httpd-run\") pod \"aa387033-dc08-48f4-bf56-06a7f316423c\" (UID: \"aa387033-dc08-48f4-bf56-06a7f316423c\") " Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.790091 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/33be1904-bd58-48cc-806a-af1dc751717c-ovn-northd-tls-certs\") pod \"33be1904-bd58-48cc-806a-af1dc751717c\" (UID: \"33be1904-bd58-48cc-806a-af1dc751717c\") " Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.790148 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1fccb241-c75e-4cec-b3c6-3855bd6c1161-config-data\") pod \"1fccb241-c75e-4cec-b3c6-3855bd6c1161\" (UID: \"1fccb241-c75e-4cec-b3c6-3855bd6c1161\") " Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.790238 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa387033-dc08-48f4-bf56-06a7f316423c-combined-ca-bundle\") pod \"aa387033-dc08-48f4-bf56-06a7f316423c\" (UID: \"aa387033-dc08-48f4-bf56-06a7f316423c\") " Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.790331 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/2ba42f6a-1ade-45ae-bdc4-117ad2fa866b-etc-machine-id\") pod \"2ba42f6a-1ade-45ae-bdc4-117ad2fa866b\" (UID: \"2ba42f6a-1ade-45ae-bdc4-117ad2fa866b\") " Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.790473 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-95wrc\" (UniqueName: \"kubernetes.io/projected/1fccb241-c75e-4cec-b3c6-3855bd6c1161-kube-api-access-95wrc\") pod \"1fccb241-c75e-4cec-b3c6-3855bd6c1161\" (UID: \"1fccb241-c75e-4cec-b3c6-3855bd6c1161\") " Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.790574 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r95cx\" (UniqueName: \"kubernetes.io/projected/33be1904-bd58-48cc-806a-af1dc751717c-kube-api-access-r95cx\") pod \"33be1904-bd58-48cc-806a-af1dc751717c\" (UID: \"33be1904-bd58-48cc-806a-af1dc751717c\") " Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.790655 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aa387033-dc08-48f4-bf56-06a7f316423c-scripts\") pod \"aa387033-dc08-48f4-bf56-06a7f316423c\" (UID: \"aa387033-dc08-48f4-bf56-06a7f316423c\") " Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.790875 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/1fccb241-c75e-4cec-b3c6-3855bd6c1161-nova-metadata-tls-certs\") pod \"1fccb241-c75e-4cec-b3c6-3855bd6c1161\" (UID: \"1fccb241-c75e-4cec-b3c6-3855bd6c1161\") " Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.790991 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rs4zr\" (UniqueName: \"kubernetes.io/projected/aa387033-dc08-48f4-bf56-06a7f316423c-kube-api-access-rs4zr\") pod \"aa387033-dc08-48f4-bf56-06a7f316423c\" (UID: \"aa387033-dc08-48f4-bf56-06a7f316423c\") " Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.791078 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/33be1904-bd58-48cc-806a-af1dc751717c-ovn-rundir\") pod \"33be1904-bd58-48cc-806a-af1dc751717c\" (UID: \"33be1904-bd58-48cc-806a-af1dc751717c\") " Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.791138 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tlnbb\" (UniqueName: \"kubernetes.io/projected/2ba42f6a-1ade-45ae-bdc4-117ad2fa866b-kube-api-access-tlnbb\") pod \"2ba42f6a-1ade-45ae-bdc4-117ad2fa866b\" (UID: \"2ba42f6a-1ade-45ae-bdc4-117ad2fa866b\") " Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.791221 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/33be1904-bd58-48cc-806a-af1dc751717c-combined-ca-bundle\") pod \"33be1904-bd58-48cc-806a-af1dc751717c\" (UID: \"33be1904-bd58-48cc-806a-af1dc751717c\") " Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.791305 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/aa387033-dc08-48f4-bf56-06a7f316423c-internal-tls-certs\") pod \"aa387033-dc08-48f4-bf56-06a7f316423c\" (UID: \"aa387033-dc08-48f4-bf56-06a7f316423c\") " Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.791367 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2ba42f6a-1ade-45ae-bdc4-117ad2fa866b-config-data\") pod \"2ba42f6a-1ade-45ae-bdc4-117ad2fa866b\" (UID: \"2ba42f6a-1ade-45ae-bdc4-117ad2fa866b\") " Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.791425 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2ba42f6a-1ade-45ae-bdc4-117ad2fa866b-combined-ca-bundle\") pod \"2ba42f6a-1ade-45ae-bdc4-117ad2fa866b\" (UID: \"2ba42f6a-1ade-45ae-bdc4-117ad2fa866b\") " Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.791483 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa387033-dc08-48f4-bf56-06a7f316423c-config-data\") pod \"aa387033-dc08-48f4-bf56-06a7f316423c\" (UID: \"aa387033-dc08-48f4-bf56-06a7f316423c\") " Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.791569 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/33be1904-bd58-48cc-806a-af1dc751717c-config\") pod \"33be1904-bd58-48cc-806a-af1dc751717c\" (UID: \"33be1904-bd58-48cc-806a-af1dc751717c\") " Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.791657 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2ba42f6a-1ade-45ae-bdc4-117ad2fa866b-internal-tls-certs\") pod \"2ba42f6a-1ade-45ae-bdc4-117ad2fa866b\" (UID: \"2ba42f6a-1ade-45ae-bdc4-117ad2fa866b\") " Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.792025 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d84w8\" (UniqueName: \"kubernetes.io/projected/20ba561a-58e7-459f-ba28-ed0b68cdab9b-kube-api-access-d84w8\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.792349 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/20ba561a-58e7-459f-ba28-ed0b68cdab9b-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.792426 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/20ba561a-58e7-459f-ba28-ed0b68cdab9b-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.792479 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/20ba561a-58e7-459f-ba28-ed0b68cdab9b-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.792528 4558 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/20ba561a-58e7-459f-ba28-ed0b68cdab9b-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.792597 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" " Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.792648 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/20ba561a-58e7-459f-ba28-ed0b68cdab9b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.792698 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/20ba561a-58e7-459f-ba28-ed0b68cdab9b-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.789909 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1fccb241-c75e-4cec-b3c6-3855bd6c1161-logs" (OuterVolumeSpecName: "logs") pod "1fccb241-c75e-4cec-b3c6-3855bd6c1161" (UID: "1fccb241-c75e-4cec-b3c6-3855bd6c1161"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.790112 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/aa387033-dc08-48f4-bf56-06a7f316423c-logs" (OuterVolumeSpecName: "logs") pod "aa387033-dc08-48f4-bf56-06a7f316423c" (UID: "aa387033-dc08-48f4-bf56-06a7f316423c"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.791271 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2ba42f6a-1ade-45ae-bdc4-117ad2fa866b-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "2ba42f6a-1ade-45ae-bdc4-117ad2fa866b" (UID: "2ba42f6a-1ade-45ae-bdc4-117ad2fa866b"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.791747 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/aa387033-dc08-48f4-bf56-06a7f316423c-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "aa387033-dc08-48f4-bf56-06a7f316423c" (UID: "aa387033-dc08-48f4-bf56-06a7f316423c"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.792435 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/33be1904-bd58-48cc-806a-af1dc751717c-scripts" (OuterVolumeSpecName: "scripts") pod "33be1904-bd58-48cc-806a-af1dc751717c" (UID: "33be1904-bd58-48cc-806a-af1dc751717c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.796491 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage07-crc" (OuterVolumeSpecName: "glance") pod "aa387033-dc08-48f4-bf56-06a7f316423c" (UID: "aa387033-dc08-48f4-bf56-06a7f316423c"). InnerVolumeSpecName "local-storage07-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.798010 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2ba42f6a-1ade-45ae-bdc4-117ad2fa866b-scripts" (OuterVolumeSpecName: "scripts") pod "2ba42f6a-1ade-45ae-bdc4-117ad2fa866b" (UID: "2ba42f6a-1ade-45ae-bdc4-117ad2fa866b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.809960 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-103a-account-create-update-whkzm"] Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.810141 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/33be1904-bd58-48cc-806a-af1dc751717c-ovn-rundir" (OuterVolumeSpecName: "ovn-rundir") pod "33be1904-bd58-48cc-806a-af1dc751717c" (UID: "33be1904-bd58-48cc-806a-af1dc751717c"). InnerVolumeSpecName "ovn-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.810355 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/33be1904-bd58-48cc-806a-af1dc751717c-config" (OuterVolumeSpecName: "config") pod "33be1904-bd58-48cc-806a-af1dc751717c" (UID: "33be1904-bd58-48cc-806a-af1dc751717c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.811444 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2ba42f6a-1ade-45ae-bdc4-117ad2fa866b-logs" (OuterVolumeSpecName: "logs") pod "2ba42f6a-1ade-45ae-bdc4-117ad2fa866b" (UID: "2ba42f6a-1ade-45ae-bdc4-117ad2fa866b"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.811806 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage01-crc" (UniqueName: "kubernetes.io/local-volume/local-storage01-crc") on node "crc" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.811971 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2ba42f6a-1ade-45ae-bdc4-117ad2fa866b-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "2ba42f6a-1ade-45ae-bdc4-117ad2fa866b" (UID: "2ba42f6a-1ade-45ae-bdc4-117ad2fa866b"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.813378 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/glance-103a-account-create-update-whkzm"] Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.817890 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aa387033-dc08-48f4-bf56-06a7f316423c-kube-api-access-rs4zr" (OuterVolumeSpecName: "kube-api-access-rs4zr") pod "aa387033-dc08-48f4-bf56-06a7f316423c" (UID: "aa387033-dc08-48f4-bf56-06a7f316423c"). InnerVolumeSpecName "kube-api-access-rs4zr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.818111 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2ba42f6a-1ade-45ae-bdc4-117ad2fa866b-kube-api-access-tlnbb" (OuterVolumeSpecName: "kube-api-access-tlnbb") pod "2ba42f6a-1ade-45ae-bdc4-117ad2fa866b" (UID: "2ba42f6a-1ade-45ae-bdc4-117ad2fa866b"). InnerVolumeSpecName "kube-api-access-tlnbb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.826415 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/33be1904-bd58-48cc-806a-af1dc751717c-kube-api-access-r95cx" (OuterVolumeSpecName: "kube-api-access-r95cx") pod "33be1904-bd58-48cc-806a-af1dc751717c" (UID: "33be1904-bd58-48cc-806a-af1dc751717c"). InnerVolumeSpecName "kube-api-access-r95cx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.832608 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-6218-account-create-update-gkxlw"] Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.833738 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aa387033-dc08-48f4-bf56-06a7f316423c-scripts" (OuterVolumeSpecName: "scripts") pod "aa387033-dc08-48f4-bf56-06a7f316423c" (UID: "aa387033-dc08-48f4-bf56-06a7f316423c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.834673 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1fccb241-c75e-4cec-b3c6-3855bd6c1161-kube-api-access-95wrc" (OuterVolumeSpecName: "kube-api-access-95wrc") pod "1fccb241-c75e-4cec-b3c6-3855bd6c1161" (UID: "1fccb241-c75e-4cec-b3c6-3855bd6c1161"). InnerVolumeSpecName "kube-api-access-95wrc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.842856 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/keystone-6218-account-create-update-gkxlw"] Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.848561 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-bb2e-account-create-update-gqjcg"] Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.864506 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/barbican-bb2e-account-create-update-gqjcg"] Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.868577 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/openstack-galera-0"] Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.872387 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/openstack-galera-0"] Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.878030 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1fccb241-c75e-4cec-b3c6-3855bd6c1161-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1fccb241-c75e-4cec-b3c6-3855bd6c1161" (UID: "1fccb241-c75e-4cec-b3c6-3855bd6c1161"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.878673 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/33be1904-bd58-48cc-806a-af1dc751717c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "33be1904-bd58-48cc-806a-af1dc751717c" (UID: "33be1904-bd58-48cc-806a-af1dc751717c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.894786 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/placement-fbd5dff98-mmhdt"] Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.897128 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1fccb241-c75e-4cec-b3c6-3855bd6c1161-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.897149 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1fccb241-c75e-4cec-b3c6-3855bd6c1161-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.897174 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/aa387033-dc08-48f4-bf56-06a7f316423c-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.897185 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ef319bad-004a-4140-b9a3-e34b376460da-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.897211 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" " Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.897221 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/33be1904-bd58-48cc-806a-af1dc751717c-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.897230 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2ba42f6a-1ade-45ae-bdc4-117ad2fa866b-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.897244 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2ba42f6a-1ade-45ae-bdc4-117ad2fa866b-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.897253 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2ba42f6a-1ade-45ae-bdc4-117ad2fa866b-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.897262 4558 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/aa387033-dc08-48f4-bf56-06a7f316423c-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.897273 4558 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/2ba42f6a-1ade-45ae-bdc4-117ad2fa866b-etc-machine-id\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.897284 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-95wrc\" (UniqueName: \"kubernetes.io/projected/1fccb241-c75e-4cec-b3c6-3855bd6c1161-kube-api-access-95wrc\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.897293 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.897308 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aa387033-dc08-48f4-bf56-06a7f316423c-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.897320 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r95cx\" (UniqueName: \"kubernetes.io/projected/33be1904-bd58-48cc-806a-af1dc751717c-kube-api-access-r95cx\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.897329 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rs4zr\" (UniqueName: \"kubernetes.io/projected/aa387033-dc08-48f4-bf56-06a7f316423c-kube-api-access-rs4zr\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.897338 4558 reconciler_common.go:293] "Volume detached for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/33be1904-bd58-48cc-806a-af1dc751717c-ovn-rundir\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.897346 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tlnbb\" (UniqueName: \"kubernetes.io/projected/2ba42f6a-1ade-45ae-bdc4-117ad2fa866b-kube-api-access-tlnbb\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.897358 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/33be1904-bd58-48cc-806a-af1dc751717c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.897367 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/33be1904-bd58-48cc-806a-af1dc751717c-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:48 crc kubenswrapper[4558]: E0120 17:05:48.899753 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="16d11c7e8c68131f8eb72146637f28a05ad570c65546debfb217fa0b00e58dae" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:05:48 crc kubenswrapper[4558]: E0120 17:05:48.901855 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="16d11c7e8c68131f8eb72146637f28a05ad570c65546debfb217fa0b00e58dae" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.901930 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/placement-fbd5dff98-mmhdt"] Jan 20 17:05:48 crc kubenswrapper[4558]: E0120 17:05:48.903206 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="16d11c7e8c68131f8eb72146637f28a05ad570c65546debfb217fa0b00e58dae" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:05:48 crc kubenswrapper[4558]: E0120 17:05:48.903340 4558 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack-kuttl-tests/nova-cell1-conductor-0" podUID="084a5ce8-2844-42f1-92a9-973b78505050" containerName="nova-cell1-conductor-conductor" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.915059 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2ba42f6a-1ade-45ae-bdc4-117ad2fa866b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2ba42f6a-1ade-45ae-bdc4-117ad2fa866b" (UID: "2ba42f6a-1ade-45ae-bdc4-117ad2fa866b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.915482 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aa387033-dc08-48f4-bf56-06a7f316423c-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "aa387033-dc08-48f4-bf56-06a7f316423c" (UID: "aa387033-dc08-48f4-bf56-06a7f316423c"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.915534 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-worker-57fd8b7745-hswtl"] Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.918574 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2ba42f6a-1ade-45ae-bdc4-117ad2fa866b-config-data" (OuterVolumeSpecName: "config-data") pod "2ba42f6a-1ade-45ae-bdc4-117ad2fa866b" (UID: "2ba42f6a-1ade-45ae-bdc4-117ad2fa866b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.920427 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/barbican-worker-57fd8b7745-hswtl"] Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.925858 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aa387033-dc08-48f4-bf56-06a7f316423c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "aa387033-dc08-48f4-bf56-06a7f316423c" (UID: "aa387033-dc08-48f4-bf56-06a7f316423c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.926467 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1fccb241-c75e-4cec-b3c6-3855bd6c1161-config-data" (OuterVolumeSpecName: "config-data") pod "1fccb241-c75e-4cec-b3c6-3855bd6c1161" (UID: "1fccb241-c75e-4cec-b3c6-3855bd6c1161"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.929430 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aa387033-dc08-48f4-bf56-06a7f316423c-config-data" (OuterVolumeSpecName: "config-data") pod "aa387033-dc08-48f4-bf56-06a7f316423c" (UID: "aa387033-dc08-48f4-bf56-06a7f316423c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.929959 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage07-crc" (UniqueName: "kubernetes.io/local-volume/local-storage07-crc") on node "crc" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.930077 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2ba42f6a-1ade-45ae-bdc4-117ad2fa866b-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "2ba42f6a-1ade-45ae-bdc4-117ad2fa866b" (UID: "2ba42f6a-1ade-45ae-bdc4-117ad2fa866b"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.938850 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1fccb241-c75e-4cec-b3c6-3855bd6c1161-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "1fccb241-c75e-4cec-b3c6-3855bd6c1161" (UID: "1fccb241-c75e-4cec-b3c6-3855bd6c1161"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.939413 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/33be1904-bd58-48cc-806a-af1dc751717c-ovn-northd-tls-certs" (OuterVolumeSpecName: "ovn-northd-tls-certs") pod "33be1904-bd58-48cc-806a-af1dc751717c" (UID: "33be1904-bd58-48cc-806a-af1dc751717c"). InnerVolumeSpecName "ovn-northd-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.943857 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2ba42f6a-1ade-45ae-bdc4-117ad2fa866b-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "2ba42f6a-1ade-45ae-bdc4-117ad2fa866b" (UID: "2ba42f6a-1ade-45ae-bdc4-117ad2fa866b"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.949193 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/33be1904-bd58-48cc-806a-af1dc751717c-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "33be1904-bd58-48cc-806a-af1dc751717c" (UID: "33be1904-bd58-48cc-806a-af1dc751717c"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.983025 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/barbican-api-869df765b4-9hk5q" podUID="6c75ee5c-beea-4b1e-b429-91e83a472529" containerName="barbican-api" probeResult="failure" output="Get \"https://10.217.0.254:9311/healthcheck\": read tcp 10.217.0.2:58354->10.217.0.254:9311: read: connection reset by peer" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.983071 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/barbican-api-869df765b4-9hk5q" podUID="6c75ee5c-beea-4b1e-b429-91e83a472529" containerName="barbican-api-log" probeResult="failure" output="Get \"https://10.217.0.254:9311/healthcheck\": read tcp 10.217.0.2:58352->10.217.0.254:9311: read: connection reset by peer" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.999029 4558 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/1fccb241-c75e-4cec-b3c6-3855bd6c1161-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.999065 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f6dee0f3-6849-442d-b8fc-417c8540c9f9-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.999076 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/aa387033-dc08-48f4-bf56-06a7f316423c-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.999085 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2ba42f6a-1ade-45ae-bdc4-117ad2fa866b-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.999095 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2ba42f6a-1ade-45ae-bdc4-117ad2fa866b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.999103 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa387033-dc08-48f4-bf56-06a7f316423c-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.999113 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2ba42f6a-1ade-45ae-bdc4-117ad2fa866b-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.999123 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-42zjp\" (UniqueName: \"kubernetes.io/projected/f6dee0f3-6849-442d-b8fc-417c8540c9f9-kube-api-access-42zjp\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.999131 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2ba42f6a-1ade-45ae-bdc4-117ad2fa866b-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.999139 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.999148 4558 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/33be1904-bd58-48cc-806a-af1dc751717c-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.999156 4558 reconciler_common.go:293] "Volume detached for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/33be1904-bd58-48cc-806a-af1dc751717c-ovn-northd-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.999176 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1fccb241-c75e-4cec-b3c6-3855bd6c1161-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:48 crc kubenswrapper[4558]: I0120 17:05:48.999185 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa387033-dc08-48f4-bf56-06a7f316423c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:49 crc kubenswrapper[4558]: I0120 17:05:49.100996 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0fcabe47-75cd-4bff-ba51-8a65a23b3f1f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:49 crc kubenswrapper[4558]: I0120 17:05:49.414205 4558 generic.go:334] "Generic (PLEG): container finished" podID="1fccb241-c75e-4cec-b3c6-3855bd6c1161" containerID="1e838eb49cb5fab8df3c83e562edc9dac0c018daeb2274638eb55d7398246708" exitCode=0 Jan 20 17:05:49 crc kubenswrapper[4558]: I0120 17:05:49.414371 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"1fccb241-c75e-4cec-b3c6-3855bd6c1161","Type":"ContainerDied","Data":"1e838eb49cb5fab8df3c83e562edc9dac0c018daeb2274638eb55d7398246708"} Jan 20 17:05:49 crc kubenswrapper[4558]: I0120 17:05:49.414693 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"1fccb241-c75e-4cec-b3c6-3855bd6c1161","Type":"ContainerDied","Data":"cdf7d182a2c2cc94e19bcd44382c4ae7cad216f7aaf562ecfb18164494c47997"} Jan 20 17:05:49 crc kubenswrapper[4558]: I0120 17:05:49.414737 4558 scope.go:117] "RemoveContainer" containerID="1e838eb49cb5fab8df3c83e562edc9dac0c018daeb2274638eb55d7398246708" Jan 20 17:05:49 crc kubenswrapper[4558]: I0120 17:05:49.414487 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:05:49 crc kubenswrapper[4558]: I0120 17:05:49.420933 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"aa387033-dc08-48f4-bf56-06a7f316423c","Type":"ContainerDied","Data":"0896c30840f3c0d1f1d38416ad91e0d100b15726ec459c525d4c14686bd6c63c"} Jan 20 17:05:49 crc kubenswrapper[4558]: I0120 17:05:49.421026 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:05:49 crc kubenswrapper[4558]: I0120 17:05:49.425842 4558 generic.go:334] "Generic (PLEG): container finished" podID="8b9f5a8b-ee37-483c-9a86-38cc24dcb388" containerID="4b73519a38610ac22d4ce96ba40226baebd8e6660f77f9013e57a5c57815f3a9" exitCode=0 Jan 20 17:05:49 crc kubenswrapper[4558]: I0120 17:05:49.425927 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-65d75d5fbb-nkccr" event={"ID":"8b9f5a8b-ee37-483c-9a86-38cc24dcb388","Type":"ContainerDied","Data":"4b73519a38610ac22d4ce96ba40226baebd8e6660f77f9013e57a5c57815f3a9"} Jan 20 17:05:49 crc kubenswrapper[4558]: I0120 17:05:49.435910 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"2ba42f6a-1ade-45ae-bdc4-117ad2fa866b","Type":"ContainerDied","Data":"e3ed7eb8d0b3d38d0512c2f86d418adeb35f23fda8f7c085ab363db73fe8ad6e"} Jan 20 17:05:49 crc kubenswrapper[4558]: I0120 17:05:49.435994 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:05:49 crc kubenswrapper[4558]: I0120 17:05:49.439540 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovn-northd-0_33be1904-bd58-48cc-806a-af1dc751717c/ovn-northd/0.log" Jan 20 17:05:49 crc kubenswrapper[4558]: I0120 17:05:49.439595 4558 generic.go:334] "Generic (PLEG): container finished" podID="33be1904-bd58-48cc-806a-af1dc751717c" containerID="811380372fc9996d1d29a2c8b3863bd8fa15b182a9a97db33dd3fd3b535d9455" exitCode=139 Jan 20 17:05:49 crc kubenswrapper[4558]: I0120 17:05:49.439623 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-northd-0" event={"ID":"33be1904-bd58-48cc-806a-af1dc751717c","Type":"ContainerDied","Data":"811380372fc9996d1d29a2c8b3863bd8fa15b182a9a97db33dd3fd3b535d9455"} Jan 20 17:05:49 crc kubenswrapper[4558]: I0120 17:05:49.439649 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-northd-0" event={"ID":"33be1904-bd58-48cc-806a-af1dc751717c","Type":"ContainerDied","Data":"ddc37301ab3aa36d3ab1a40427fee1461327dafd15990353b26f1b5e385a5fff"} Jan 20 17:05:49 crc kubenswrapper[4558]: I0120 17:05:49.439694 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:05:49 crc kubenswrapper[4558]: I0120 17:05:49.444645 4558 generic.go:334] "Generic (PLEG): container finished" podID="6c75ee5c-beea-4b1e-b429-91e83a472529" containerID="be2a0248e5982920b07bb828a104cefcb46f9233f457cf95302aceef0c8d8b6f" exitCode=0 Jan 20 17:05:49 crc kubenswrapper[4558]: I0120 17:05:49.444702 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-869df765b4-9hk5q" event={"ID":"6c75ee5c-beea-4b1e-b429-91e83a472529","Type":"ContainerDied","Data":"be2a0248e5982920b07bb828a104cefcb46f9233f457cf95302aceef0c8d8b6f"} Jan 20 17:05:49 crc kubenswrapper[4558]: I0120 17:05:49.444720 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-869df765b4-9hk5q" event={"ID":"6c75ee5c-beea-4b1e-b429-91e83a472529","Type":"ContainerDied","Data":"cda8462cafc6b4135f29e794c8f02dc33b9c301c7a707bb88a79a986d70f78e6"} Jan 20 17:05:49 crc kubenswrapper[4558]: I0120 17:05:49.444733 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cda8462cafc6b4135f29e794c8f02dc33b9c301c7a707bb88a79a986d70f78e6" Jan 20 17:05:49 crc kubenswrapper[4558]: I0120 17:05:49.457642 4558 generic.go:334] "Generic (PLEG): container finished" podID="084a5ce8-2844-42f1-92a9-973b78505050" containerID="16d11c7e8c68131f8eb72146637f28a05ad570c65546debfb217fa0b00e58dae" exitCode=0 Jan 20 17:05:49 crc kubenswrapper[4558]: I0120 17:05:49.458436 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-0" event={"ID":"084a5ce8-2844-42f1-92a9-973b78505050","Type":"ContainerDied","Data":"16d11c7e8c68131f8eb72146637f28a05ad570c65546debfb217fa0b00e58dae"} Jan 20 17:05:49 crc kubenswrapper[4558]: I0120 17:05:49.458467 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-0" event={"ID":"084a5ce8-2844-42f1-92a9-973b78505050","Type":"ContainerDied","Data":"180d138c277666dc70017cc01b02accfa78bd539075052a5a8ffc82c79420eca"} Jan 20 17:05:49 crc kubenswrapper[4558]: I0120 17:05:49.458480 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="180d138c277666dc70017cc01b02accfa78bd539075052a5a8ffc82c79420eca" Jan 20 17:05:49 crc kubenswrapper[4558]: I0120 17:05:49.458537 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:05:49 crc kubenswrapper[4558]: I0120 17:05:49.469062 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-869df765b4-9hk5q" Jan 20 17:05:49 crc kubenswrapper[4558]: I0120 17:05:49.480935 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:05:49 crc kubenswrapper[4558]: I0120 17:05:49.485530 4558 scope.go:117] "RemoveContainer" containerID="e6a99988097b2873c0b2ced77f6588f66ac0fb5bb22b3ef8e1e7a52ca42e4820" Jan 20 17:05:49 crc kubenswrapper[4558]: I0120 17:05:49.517244 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:05:49 crc kubenswrapper[4558]: I0120 17:05:49.518349 4558 scope.go:117] "RemoveContainer" containerID="1e838eb49cb5fab8df3c83e562edc9dac0c018daeb2274638eb55d7398246708" Jan 20 17:05:49 crc kubenswrapper[4558]: E0120 17:05:49.518700 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1e838eb49cb5fab8df3c83e562edc9dac0c018daeb2274638eb55d7398246708\": container with ID starting with 1e838eb49cb5fab8df3c83e562edc9dac0c018daeb2274638eb55d7398246708 not found: ID does not exist" containerID="1e838eb49cb5fab8df3c83e562edc9dac0c018daeb2274638eb55d7398246708" Jan 20 17:05:49 crc kubenswrapper[4558]: I0120 17:05:49.518730 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1e838eb49cb5fab8df3c83e562edc9dac0c018daeb2274638eb55d7398246708"} err="failed to get container status \"1e838eb49cb5fab8df3c83e562edc9dac0c018daeb2274638eb55d7398246708\": rpc error: code = NotFound desc = could not find container \"1e838eb49cb5fab8df3c83e562edc9dac0c018daeb2274638eb55d7398246708\": container with ID starting with 1e838eb49cb5fab8df3c83e562edc9dac0c018daeb2274638eb55d7398246708 not found: ID does not exist" Jan 20 17:05:49 crc kubenswrapper[4558]: I0120 17:05:49.518752 4558 scope.go:117] "RemoveContainer" containerID="e6a99988097b2873c0b2ced77f6588f66ac0fb5bb22b3ef8e1e7a52ca42e4820" Jan 20 17:05:49 crc kubenswrapper[4558]: E0120 17:05:49.525871 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e6a99988097b2873c0b2ced77f6588f66ac0fb5bb22b3ef8e1e7a52ca42e4820\": container with ID starting with e6a99988097b2873c0b2ced77f6588f66ac0fb5bb22b3ef8e1e7a52ca42e4820 not found: ID does not exist" containerID="e6a99988097b2873c0b2ced77f6588f66ac0fb5bb22b3ef8e1e7a52ca42e4820" Jan 20 17:05:49 crc kubenswrapper[4558]: I0120 17:05:49.525895 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e6a99988097b2873c0b2ced77f6588f66ac0fb5bb22b3ef8e1e7a52ca42e4820"} err="failed to get container status \"e6a99988097b2873c0b2ced77f6588f66ac0fb5bb22b3ef8e1e7a52ca42e4820\": rpc error: code = NotFound desc = could not find container \"e6a99988097b2873c0b2ced77f6588f66ac0fb5bb22b3ef8e1e7a52ca42e4820\": container with ID starting with e6a99988097b2873c0b2ced77f6588f66ac0fb5bb22b3ef8e1e7a52ca42e4820 not found: ID does not exist" Jan 20 17:05:49 crc kubenswrapper[4558]: I0120 17:05:49.525911 4558 scope.go:117] "RemoveContainer" containerID="32b6c39544892ca39cef55614e1cf5d22b40b7f421e0e33ba0fd15c7434cf00e" Jan 20 17:05:49 crc kubenswrapper[4558]: I0120 17:05:49.528543 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:05:49 crc kubenswrapper[4558]: I0120 17:05:49.537784 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:05:49 crc kubenswrapper[4558]: I0120 17:05:49.541922 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:05:49 crc kubenswrapper[4558]: I0120 17:05:49.547669 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovn-northd-0"] Jan 20 17:05:49 crc kubenswrapper[4558]: I0120 17:05:49.550806 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ovn-northd-0"] Jan 20 17:05:49 crc kubenswrapper[4558]: I0120 17:05:49.554959 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:05:49 crc kubenswrapper[4558]: I0120 17:05:49.560122 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:05:49 crc kubenswrapper[4558]: I0120 17:05:49.568072 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:05:49 crc kubenswrapper[4558]: I0120 17:05:49.571737 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:05:49 crc kubenswrapper[4558]: I0120 17:05:49.609212 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6c75ee5c-beea-4b1e-b429-91e83a472529-internal-tls-certs\") pod \"6c75ee5c-beea-4b1e-b429-91e83a472529\" (UID: \"6c75ee5c-beea-4b1e-b429-91e83a472529\") " Jan 20 17:05:49 crc kubenswrapper[4558]: I0120 17:05:49.609269 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/084a5ce8-2844-42f1-92a9-973b78505050-combined-ca-bundle\") pod \"084a5ce8-2844-42f1-92a9-973b78505050\" (UID: \"084a5ce8-2844-42f1-92a9-973b78505050\") " Jan 20 17:05:49 crc kubenswrapper[4558]: I0120 17:05:49.609327 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6c75ee5c-beea-4b1e-b429-91e83a472529-logs\") pod \"6c75ee5c-beea-4b1e-b429-91e83a472529\" (UID: \"6c75ee5c-beea-4b1e-b429-91e83a472529\") " Jan 20 17:05:49 crc kubenswrapper[4558]: I0120 17:05:49.609365 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzvrq\" (UniqueName: \"kubernetes.io/projected/084a5ce8-2844-42f1-92a9-973b78505050-kube-api-access-nzvrq\") pod \"084a5ce8-2844-42f1-92a9-973b78505050\" (UID: \"084a5ce8-2844-42f1-92a9-973b78505050\") " Jan 20 17:05:49 crc kubenswrapper[4558]: I0120 17:05:49.609468 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6c75ee5c-beea-4b1e-b429-91e83a472529-combined-ca-bundle\") pod \"6c75ee5c-beea-4b1e-b429-91e83a472529\" (UID: \"6c75ee5c-beea-4b1e-b429-91e83a472529\") " Jan 20 17:05:49 crc kubenswrapper[4558]: I0120 17:05:49.609524 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/084a5ce8-2844-42f1-92a9-973b78505050-config-data\") pod \"084a5ce8-2844-42f1-92a9-973b78505050\" (UID: \"084a5ce8-2844-42f1-92a9-973b78505050\") " Jan 20 17:05:49 crc kubenswrapper[4558]: I0120 17:05:49.609615 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6c75ee5c-beea-4b1e-b429-91e83a472529-public-tls-certs\") pod \"6c75ee5c-beea-4b1e-b429-91e83a472529\" (UID: \"6c75ee5c-beea-4b1e-b429-91e83a472529\") " Jan 20 17:05:49 crc kubenswrapper[4558]: I0120 17:05:49.609639 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-86cwc\" (UniqueName: \"kubernetes.io/projected/6c75ee5c-beea-4b1e-b429-91e83a472529-kube-api-access-86cwc\") pod \"6c75ee5c-beea-4b1e-b429-91e83a472529\" (UID: \"6c75ee5c-beea-4b1e-b429-91e83a472529\") " Jan 20 17:05:49 crc kubenswrapper[4558]: I0120 17:05:49.609707 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6c75ee5c-beea-4b1e-b429-91e83a472529-config-data-custom\") pod \"6c75ee5c-beea-4b1e-b429-91e83a472529\" (UID: \"6c75ee5c-beea-4b1e-b429-91e83a472529\") " Jan 20 17:05:49 crc kubenswrapper[4558]: I0120 17:05:49.609763 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6c75ee5c-beea-4b1e-b429-91e83a472529-config-data\") pod \"6c75ee5c-beea-4b1e-b429-91e83a472529\" (UID: \"6c75ee5c-beea-4b1e-b429-91e83a472529\") " Jan 20 17:05:49 crc kubenswrapper[4558]: I0120 17:05:49.614611 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6c75ee5c-beea-4b1e-b429-91e83a472529-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "6c75ee5c-beea-4b1e-b429-91e83a472529" (UID: "6c75ee5c-beea-4b1e-b429-91e83a472529"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:05:49 crc kubenswrapper[4558]: I0120 17:05:49.614794 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6c75ee5c-beea-4b1e-b429-91e83a472529-logs" (OuterVolumeSpecName: "logs") pod "6c75ee5c-beea-4b1e-b429-91e83a472529" (UID: "6c75ee5c-beea-4b1e-b429-91e83a472529"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:05:49 crc kubenswrapper[4558]: I0120 17:05:49.615094 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6c75ee5c-beea-4b1e-b429-91e83a472529-kube-api-access-86cwc" (OuterVolumeSpecName: "kube-api-access-86cwc") pod "6c75ee5c-beea-4b1e-b429-91e83a472529" (UID: "6c75ee5c-beea-4b1e-b429-91e83a472529"). InnerVolumeSpecName "kube-api-access-86cwc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:05:49 crc kubenswrapper[4558]: I0120 17:05:49.619063 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/084a5ce8-2844-42f1-92a9-973b78505050-kube-api-access-nzvrq" (OuterVolumeSpecName: "kube-api-access-nzvrq") pod "084a5ce8-2844-42f1-92a9-973b78505050" (UID: "084a5ce8-2844-42f1-92a9-973b78505050"). InnerVolumeSpecName "kube-api-access-nzvrq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:05:49 crc kubenswrapper[4558]: I0120 17:05:49.643350 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-65d75d5fbb-nkccr" Jan 20 17:05:49 crc kubenswrapper[4558]: I0120 17:05:49.644174 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/084a5ce8-2844-42f1-92a9-973b78505050-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "084a5ce8-2844-42f1-92a9-973b78505050" (UID: "084a5ce8-2844-42f1-92a9-973b78505050"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:05:49 crc kubenswrapper[4558]: I0120 17:05:49.645618 4558 scope.go:117] "RemoveContainer" containerID="cb8be596c569251416c1e084cf32b390ad320911bfbd93edd669cf0bc7d40234" Jan 20 17:05:49 crc kubenswrapper[4558]: I0120 17:05:49.649535 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6c75ee5c-beea-4b1e-b429-91e83a472529-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "6c75ee5c-beea-4b1e-b429-91e83a472529" (UID: "6c75ee5c-beea-4b1e-b429-91e83a472529"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:05:49 crc kubenswrapper[4558]: I0120 17:05:49.651209 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6c75ee5c-beea-4b1e-b429-91e83a472529-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "6c75ee5c-beea-4b1e-b429-91e83a472529" (UID: "6c75ee5c-beea-4b1e-b429-91e83a472529"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:05:49 crc kubenswrapper[4558]: I0120 17:05:49.660140 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/084a5ce8-2844-42f1-92a9-973b78505050-config-data" (OuterVolumeSpecName: "config-data") pod "084a5ce8-2844-42f1-92a9-973b78505050" (UID: "084a5ce8-2844-42f1-92a9-973b78505050"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:05:49 crc kubenswrapper[4558]: I0120 17:05:49.667952 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6c75ee5c-beea-4b1e-b429-91e83a472529-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6c75ee5c-beea-4b1e-b429-91e83a472529" (UID: "6c75ee5c-beea-4b1e-b429-91e83a472529"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:05:49 crc kubenswrapper[4558]: I0120 17:05:49.669098 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6c75ee5c-beea-4b1e-b429-91e83a472529-config-data" (OuterVolumeSpecName: "config-data") pod "6c75ee5c-beea-4b1e-b429-91e83a472529" (UID: "6c75ee5c-beea-4b1e-b429-91e83a472529"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:05:49 crc kubenswrapper[4558]: I0120 17:05:49.670934 4558 scope.go:117] "RemoveContainer" containerID="e91b0886ad44950f7bbfb790fe06f0f90a6fc3aaa9d7fe9e510d991406a8e51c" Jan 20 17:05:49 crc kubenswrapper[4558]: I0120 17:05:49.704483 4558 scope.go:117] "RemoveContainer" containerID="a818a123ac4745b3c52a6e67fb8633fbe730b34a827cef46353c35fb329810d7" Jan 20 17:05:49 crc kubenswrapper[4558]: I0120 17:05:49.712588 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6c75ee5c-beea-4b1e-b429-91e83a472529-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:49 crc kubenswrapper[4558]: I0120 17:05:49.712622 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6c75ee5c-beea-4b1e-b429-91e83a472529-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:49 crc kubenswrapper[4558]: I0120 17:05:49.712640 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/084a5ce8-2844-42f1-92a9-973b78505050-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:49 crc kubenswrapper[4558]: I0120 17:05:49.712660 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6c75ee5c-beea-4b1e-b429-91e83a472529-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:49 crc kubenswrapper[4558]: I0120 17:05:49.712671 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzvrq\" (UniqueName: \"kubernetes.io/projected/084a5ce8-2844-42f1-92a9-973b78505050-kube-api-access-nzvrq\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:49 crc kubenswrapper[4558]: I0120 17:05:49.712726 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6c75ee5c-beea-4b1e-b429-91e83a472529-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:49 crc kubenswrapper[4558]: I0120 17:05:49.712743 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/084a5ce8-2844-42f1-92a9-973b78505050-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:49 crc kubenswrapper[4558]: I0120 17:05:49.712754 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6c75ee5c-beea-4b1e-b429-91e83a472529-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:49 crc kubenswrapper[4558]: I0120 17:05:49.712766 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-86cwc\" (UniqueName: \"kubernetes.io/projected/6c75ee5c-beea-4b1e-b429-91e83a472529-kube-api-access-86cwc\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:49 crc kubenswrapper[4558]: I0120 17:05:49.712778 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6c75ee5c-beea-4b1e-b429-91e83a472529-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:49 crc kubenswrapper[4558]: I0120 17:05:49.725605 4558 scope.go:117] "RemoveContainer" containerID="eb1f0784a2f2122587c3f2a6924c91413be937ed5c1f85ceae798d13f078873b" Jan 20 17:05:49 crc kubenswrapper[4558]: I0120 17:05:49.745432 4558 scope.go:117] "RemoveContainer" containerID="811380372fc9996d1d29a2c8b3863bd8fa15b182a9a97db33dd3fd3b535d9455" Jan 20 17:05:49 crc kubenswrapper[4558]: I0120 17:05:49.759663 4558 scope.go:117] "RemoveContainer" containerID="eb1f0784a2f2122587c3f2a6924c91413be937ed5c1f85ceae798d13f078873b" Jan 20 17:05:49 crc kubenswrapper[4558]: E0120 17:05:49.760178 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eb1f0784a2f2122587c3f2a6924c91413be937ed5c1f85ceae798d13f078873b\": container with ID starting with eb1f0784a2f2122587c3f2a6924c91413be937ed5c1f85ceae798d13f078873b not found: ID does not exist" containerID="eb1f0784a2f2122587c3f2a6924c91413be937ed5c1f85ceae798d13f078873b" Jan 20 17:05:49 crc kubenswrapper[4558]: I0120 17:05:49.760240 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eb1f0784a2f2122587c3f2a6924c91413be937ed5c1f85ceae798d13f078873b"} err="failed to get container status \"eb1f0784a2f2122587c3f2a6924c91413be937ed5c1f85ceae798d13f078873b\": rpc error: code = NotFound desc = could not find container \"eb1f0784a2f2122587c3f2a6924c91413be937ed5c1f85ceae798d13f078873b\": container with ID starting with eb1f0784a2f2122587c3f2a6924c91413be937ed5c1f85ceae798d13f078873b not found: ID does not exist" Jan 20 17:05:49 crc kubenswrapper[4558]: I0120 17:05:49.760280 4558 scope.go:117] "RemoveContainer" containerID="811380372fc9996d1d29a2c8b3863bd8fa15b182a9a97db33dd3fd3b535d9455" Jan 20 17:05:49 crc kubenswrapper[4558]: E0120 17:05:49.760855 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"811380372fc9996d1d29a2c8b3863bd8fa15b182a9a97db33dd3fd3b535d9455\": container with ID starting with 811380372fc9996d1d29a2c8b3863bd8fa15b182a9a97db33dd3fd3b535d9455 not found: ID does not exist" containerID="811380372fc9996d1d29a2c8b3863bd8fa15b182a9a97db33dd3fd3b535d9455" Jan 20 17:05:49 crc kubenswrapper[4558]: I0120 17:05:49.760897 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"811380372fc9996d1d29a2c8b3863bd8fa15b182a9a97db33dd3fd3b535d9455"} err="failed to get container status \"811380372fc9996d1d29a2c8b3863bd8fa15b182a9a97db33dd3fd3b535d9455\": rpc error: code = NotFound desc = could not find container \"811380372fc9996d1d29a2c8b3863bd8fa15b182a9a97db33dd3fd3b535d9455\": container with ID starting with 811380372fc9996d1d29a2c8b3863bd8fa15b182a9a97db33dd3fd3b535d9455 not found: ID does not exist" Jan 20 17:05:49 crc kubenswrapper[4558]: I0120 17:05:49.813969 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8b9f5a8b-ee37-483c-9a86-38cc24dcb388-scripts\") pod \"8b9f5a8b-ee37-483c-9a86-38cc24dcb388\" (UID: \"8b9f5a8b-ee37-483c-9a86-38cc24dcb388\") " Jan 20 17:05:49 crc kubenswrapper[4558]: I0120 17:05:49.814028 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/8b9f5a8b-ee37-483c-9a86-38cc24dcb388-credential-keys\") pod \"8b9f5a8b-ee37-483c-9a86-38cc24dcb388\" (UID: \"8b9f5a8b-ee37-483c-9a86-38cc24dcb388\") " Jan 20 17:05:49 crc kubenswrapper[4558]: I0120 17:05:49.814054 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8b9f5a8b-ee37-483c-9a86-38cc24dcb388-combined-ca-bundle\") pod \"8b9f5a8b-ee37-483c-9a86-38cc24dcb388\" (UID: \"8b9f5a8b-ee37-483c-9a86-38cc24dcb388\") " Jan 20 17:05:49 crc kubenswrapper[4558]: I0120 17:05:49.814106 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8b9f5a8b-ee37-483c-9a86-38cc24dcb388-public-tls-certs\") pod \"8b9f5a8b-ee37-483c-9a86-38cc24dcb388\" (UID: \"8b9f5a8b-ee37-483c-9a86-38cc24dcb388\") " Jan 20 17:05:49 crc kubenswrapper[4558]: I0120 17:05:49.814551 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8b9f5a8b-ee37-483c-9a86-38cc24dcb388-internal-tls-certs\") pod \"8b9f5a8b-ee37-483c-9a86-38cc24dcb388\" (UID: \"8b9f5a8b-ee37-483c-9a86-38cc24dcb388\") " Jan 20 17:05:49 crc kubenswrapper[4558]: I0120 17:05:49.814604 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/8b9f5a8b-ee37-483c-9a86-38cc24dcb388-fernet-keys\") pod \"8b9f5a8b-ee37-483c-9a86-38cc24dcb388\" (UID: \"8b9f5a8b-ee37-483c-9a86-38cc24dcb388\") " Jan 20 17:05:49 crc kubenswrapper[4558]: I0120 17:05:49.814624 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8b9f5a8b-ee37-483c-9a86-38cc24dcb388-config-data\") pod \"8b9f5a8b-ee37-483c-9a86-38cc24dcb388\" (UID: \"8b9f5a8b-ee37-483c-9a86-38cc24dcb388\") " Jan 20 17:05:49 crc kubenswrapper[4558]: I0120 17:05:49.814652 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xdsrw\" (UniqueName: \"kubernetes.io/projected/8b9f5a8b-ee37-483c-9a86-38cc24dcb388-kube-api-access-xdsrw\") pod \"8b9f5a8b-ee37-483c-9a86-38cc24dcb388\" (UID: \"8b9f5a8b-ee37-483c-9a86-38cc24dcb388\") " Jan 20 17:05:49 crc kubenswrapper[4558]: I0120 17:05:49.817923 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8b9f5a8b-ee37-483c-9a86-38cc24dcb388-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "8b9f5a8b-ee37-483c-9a86-38cc24dcb388" (UID: "8b9f5a8b-ee37-483c-9a86-38cc24dcb388"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:05:49 crc kubenswrapper[4558]: I0120 17:05:49.818260 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8b9f5a8b-ee37-483c-9a86-38cc24dcb388-scripts" (OuterVolumeSpecName: "scripts") pod "8b9f5a8b-ee37-483c-9a86-38cc24dcb388" (UID: "8b9f5a8b-ee37-483c-9a86-38cc24dcb388"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:05:49 crc kubenswrapper[4558]: I0120 17:05:49.818325 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8b9f5a8b-ee37-483c-9a86-38cc24dcb388-kube-api-access-xdsrw" (OuterVolumeSpecName: "kube-api-access-xdsrw") pod "8b9f5a8b-ee37-483c-9a86-38cc24dcb388" (UID: "8b9f5a8b-ee37-483c-9a86-38cc24dcb388"). InnerVolumeSpecName "kube-api-access-xdsrw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:05:49 crc kubenswrapper[4558]: I0120 17:05:49.818831 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8b9f5a8b-ee37-483c-9a86-38cc24dcb388-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "8b9f5a8b-ee37-483c-9a86-38cc24dcb388" (UID: "8b9f5a8b-ee37-483c-9a86-38cc24dcb388"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:05:49 crc kubenswrapper[4558]: I0120 17:05:49.833798 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8b9f5a8b-ee37-483c-9a86-38cc24dcb388-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8b9f5a8b-ee37-483c-9a86-38cc24dcb388" (UID: "8b9f5a8b-ee37-483c-9a86-38cc24dcb388"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:05:49 crc kubenswrapper[4558]: I0120 17:05:49.835687 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8b9f5a8b-ee37-483c-9a86-38cc24dcb388-config-data" (OuterVolumeSpecName: "config-data") pod "8b9f5a8b-ee37-483c-9a86-38cc24dcb388" (UID: "8b9f5a8b-ee37-483c-9a86-38cc24dcb388"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:05:49 crc kubenswrapper[4558]: I0120 17:05:49.845077 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8b9f5a8b-ee37-483c-9a86-38cc24dcb388-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "8b9f5a8b-ee37-483c-9a86-38cc24dcb388" (UID: "8b9f5a8b-ee37-483c-9a86-38cc24dcb388"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:05:49 crc kubenswrapper[4558]: I0120 17:05:49.845549 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8b9f5a8b-ee37-483c-9a86-38cc24dcb388-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "8b9f5a8b-ee37-483c-9a86-38cc24dcb388" (UID: "8b9f5a8b-ee37-483c-9a86-38cc24dcb388"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:05:49 crc kubenswrapper[4558]: I0120 17:05:49.862247 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:05:49 crc kubenswrapper[4558]: I0120 17:05:49.917667 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8b9f5a8b-ee37-483c-9a86-38cc24dcb388-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:49 crc kubenswrapper[4558]: I0120 17:05:49.917773 4558 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/8b9f5a8b-ee37-483c-9a86-38cc24dcb388-fernet-keys\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:49 crc kubenswrapper[4558]: I0120 17:05:49.917838 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8b9f5a8b-ee37-483c-9a86-38cc24dcb388-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:49 crc kubenswrapper[4558]: I0120 17:05:49.917890 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xdsrw\" (UniqueName: \"kubernetes.io/projected/8b9f5a8b-ee37-483c-9a86-38cc24dcb388-kube-api-access-xdsrw\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:49 crc kubenswrapper[4558]: I0120 17:05:49.917937 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8b9f5a8b-ee37-483c-9a86-38cc24dcb388-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:49 crc kubenswrapper[4558]: I0120 17:05:49.917982 4558 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/8b9f5a8b-ee37-483c-9a86-38cc24dcb388-credential-keys\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:49 crc kubenswrapper[4558]: I0120 17:05:49.918034 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8b9f5a8b-ee37-483c-9a86-38cc24dcb388-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:49 crc kubenswrapper[4558]: I0120 17:05:49.918082 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8b9f5a8b-ee37-483c-9a86-38cc24dcb388-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:50 crc kubenswrapper[4558]: I0120 17:05:50.020707 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/51e263b2-d42e-46fa-99e1-e0c5aa23bcf5-logs\") pod \"51e263b2-d42e-46fa-99e1-e0c5aa23bcf5\" (UID: \"51e263b2-d42e-46fa-99e1-e0c5aa23bcf5\") " Jan 20 17:05:50 crc kubenswrapper[4558]: I0120 17:05:50.020801 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/51e263b2-d42e-46fa-99e1-e0c5aa23bcf5-combined-ca-bundle\") pod \"51e263b2-d42e-46fa-99e1-e0c5aa23bcf5\" (UID: \"51e263b2-d42e-46fa-99e1-e0c5aa23bcf5\") " Jan 20 17:05:50 crc kubenswrapper[4558]: I0120 17:05:50.020842 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/51e263b2-d42e-46fa-99e1-e0c5aa23bcf5-internal-tls-certs\") pod \"51e263b2-d42e-46fa-99e1-e0c5aa23bcf5\" (UID: \"51e263b2-d42e-46fa-99e1-e0c5aa23bcf5\") " Jan 20 17:05:50 crc kubenswrapper[4558]: I0120 17:05:50.020862 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-knpz9\" (UniqueName: \"kubernetes.io/projected/51e263b2-d42e-46fa-99e1-e0c5aa23bcf5-kube-api-access-knpz9\") pod \"51e263b2-d42e-46fa-99e1-e0c5aa23bcf5\" (UID: \"51e263b2-d42e-46fa-99e1-e0c5aa23bcf5\") " Jan 20 17:05:50 crc kubenswrapper[4558]: I0120 17:05:50.020956 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/51e263b2-d42e-46fa-99e1-e0c5aa23bcf5-config-data\") pod \"51e263b2-d42e-46fa-99e1-e0c5aa23bcf5\" (UID: \"51e263b2-d42e-46fa-99e1-e0c5aa23bcf5\") " Jan 20 17:05:50 crc kubenswrapper[4558]: I0120 17:05:50.020990 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/51e263b2-d42e-46fa-99e1-e0c5aa23bcf5-public-tls-certs\") pod \"51e263b2-d42e-46fa-99e1-e0c5aa23bcf5\" (UID: \"51e263b2-d42e-46fa-99e1-e0c5aa23bcf5\") " Jan 20 17:05:50 crc kubenswrapper[4558]: I0120 17:05:50.021266 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/51e263b2-d42e-46fa-99e1-e0c5aa23bcf5-logs" (OuterVolumeSpecName: "logs") pod "51e263b2-d42e-46fa-99e1-e0c5aa23bcf5" (UID: "51e263b2-d42e-46fa-99e1-e0c5aa23bcf5"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:05:50 crc kubenswrapper[4558]: I0120 17:05:50.022380 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/51e263b2-d42e-46fa-99e1-e0c5aa23bcf5-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:50 crc kubenswrapper[4558]: I0120 17:05:50.025612 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/51e263b2-d42e-46fa-99e1-e0c5aa23bcf5-kube-api-access-knpz9" (OuterVolumeSpecName: "kube-api-access-knpz9") pod "51e263b2-d42e-46fa-99e1-e0c5aa23bcf5" (UID: "51e263b2-d42e-46fa-99e1-e0c5aa23bcf5"). InnerVolumeSpecName "kube-api-access-knpz9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:05:50 crc kubenswrapper[4558]: I0120 17:05:50.038389 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/51e263b2-d42e-46fa-99e1-e0c5aa23bcf5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "51e263b2-d42e-46fa-99e1-e0c5aa23bcf5" (UID: "51e263b2-d42e-46fa-99e1-e0c5aa23bcf5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:05:50 crc kubenswrapper[4558]: I0120 17:05:50.038692 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/51e263b2-d42e-46fa-99e1-e0c5aa23bcf5-config-data" (OuterVolumeSpecName: "config-data") pod "51e263b2-d42e-46fa-99e1-e0c5aa23bcf5" (UID: "51e263b2-d42e-46fa-99e1-e0c5aa23bcf5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:05:50 crc kubenswrapper[4558]: I0120 17:05:50.050094 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/51e263b2-d42e-46fa-99e1-e0c5aa23bcf5-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "51e263b2-d42e-46fa-99e1-e0c5aa23bcf5" (UID: "51e263b2-d42e-46fa-99e1-e0c5aa23bcf5"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:05:50 crc kubenswrapper[4558]: I0120 17:05:50.052582 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/51e263b2-d42e-46fa-99e1-e0c5aa23bcf5-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "51e263b2-d42e-46fa-99e1-e0c5aa23bcf5" (UID: "51e263b2-d42e-46fa-99e1-e0c5aa23bcf5"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:05:50 crc kubenswrapper[4558]: I0120 17:05:50.127353 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/51e263b2-d42e-46fa-99e1-e0c5aa23bcf5-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:50 crc kubenswrapper[4558]: I0120 17:05:50.127462 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/51e263b2-d42e-46fa-99e1-e0c5aa23bcf5-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:50 crc kubenswrapper[4558]: I0120 17:05:50.127537 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/51e263b2-d42e-46fa-99e1-e0c5aa23bcf5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:50 crc kubenswrapper[4558]: I0120 17:05:50.127601 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/51e263b2-d42e-46fa-99e1-e0c5aa23bcf5-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:50 crc kubenswrapper[4558]: I0120 17:05:50.127665 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-knpz9\" (UniqueName: \"kubernetes.io/projected/51e263b2-d42e-46fa-99e1-e0c5aa23bcf5-kube-api-access-knpz9\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:50 crc kubenswrapper[4558]: I0120 17:05:50.472137 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-65d75d5fbb-nkccr" event={"ID":"8b9f5a8b-ee37-483c-9a86-38cc24dcb388","Type":"ContainerDied","Data":"fad79c24b80f092567d316ecb4bfbc3ae5aebc926661fa3565dfa02fbfee3931"} Jan 20 17:05:50 crc kubenswrapper[4558]: I0120 17:05:50.472521 4558 scope.go:117] "RemoveContainer" containerID="4b73519a38610ac22d4ce96ba40226baebd8e6660f77f9013e57a5c57815f3a9" Jan 20 17:05:50 crc kubenswrapper[4558]: I0120 17:05:50.472197 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-65d75d5fbb-nkccr" Jan 20 17:05:50 crc kubenswrapper[4558]: I0120 17:05:50.477832 4558 generic.go:334] "Generic (PLEG): container finished" podID="51e263b2-d42e-46fa-99e1-e0c5aa23bcf5" containerID="5b5134f4161398c21fc398c89b30ad1744c35cb876a3e5afd52d7adcc9031cd4" exitCode=0 Jan 20 17:05:50 crc kubenswrapper[4558]: I0120 17:05:50.477878 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:05:50 crc kubenswrapper[4558]: I0120 17:05:50.477898 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"51e263b2-d42e-46fa-99e1-e0c5aa23bcf5","Type":"ContainerDied","Data":"5b5134f4161398c21fc398c89b30ad1744c35cb876a3e5afd52d7adcc9031cd4"} Jan 20 17:05:50 crc kubenswrapper[4558]: I0120 17:05:50.478096 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"51e263b2-d42e-46fa-99e1-e0c5aa23bcf5","Type":"ContainerDied","Data":"b261479d8214b4c91b06791120f4707885ae512c223f9d9f954e0a00e2256c0c"} Jan 20 17:05:50 crc kubenswrapper[4558]: I0120 17:05:50.478150 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:05:50 crc kubenswrapper[4558]: I0120 17:05:50.478273 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-869df765b4-9hk5q" Jan 20 17:05:50 crc kubenswrapper[4558]: I0120 17:05:50.510419 4558 scope.go:117] "RemoveContainer" containerID="5b5134f4161398c21fc398c89b30ad1744c35cb876a3e5afd52d7adcc9031cd4" Jan 20 17:05:50 crc kubenswrapper[4558]: I0120 17:05:50.512459 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-65d75d5fbb-nkccr"] Jan 20 17:05:50 crc kubenswrapper[4558]: I0120 17:05:50.516279 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/keystone-65d75d5fbb-nkccr"] Jan 20 17:05:50 crc kubenswrapper[4558]: I0120 17:05:50.531888 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:05:50 crc kubenswrapper[4558]: I0120 17:05:50.532955 4558 scope.go:117] "RemoveContainer" containerID="762c3b53bff8276c53ff1ba0f6fcbbc8ea3579b91262bc868d821bf5b7740f32" Jan 20 17:05:50 crc kubenswrapper[4558]: I0120 17:05:50.539979 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:05:50 crc kubenswrapper[4558]: I0120 17:05:50.547933 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-0"] Jan 20 17:05:50 crc kubenswrapper[4558]: I0120 17:05:50.554510 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-0"] Jan 20 17:05:50 crc kubenswrapper[4558]: I0120 17:05:50.554591 4558 scope.go:117] "RemoveContainer" containerID="5b5134f4161398c21fc398c89b30ad1744c35cb876a3e5afd52d7adcc9031cd4" Jan 20 17:05:50 crc kubenswrapper[4558]: E0120 17:05:50.555589 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5b5134f4161398c21fc398c89b30ad1744c35cb876a3e5afd52d7adcc9031cd4\": container with ID starting with 5b5134f4161398c21fc398c89b30ad1744c35cb876a3e5afd52d7adcc9031cd4 not found: ID does not exist" containerID="5b5134f4161398c21fc398c89b30ad1744c35cb876a3e5afd52d7adcc9031cd4" Jan 20 17:05:50 crc kubenswrapper[4558]: I0120 17:05:50.555643 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5b5134f4161398c21fc398c89b30ad1744c35cb876a3e5afd52d7adcc9031cd4"} err="failed to get container status \"5b5134f4161398c21fc398c89b30ad1744c35cb876a3e5afd52d7adcc9031cd4\": rpc error: code = NotFound desc = could not find container \"5b5134f4161398c21fc398c89b30ad1744c35cb876a3e5afd52d7adcc9031cd4\": container with ID starting with 5b5134f4161398c21fc398c89b30ad1744c35cb876a3e5afd52d7adcc9031cd4 not found: ID does not exist" Jan 20 17:05:50 crc kubenswrapper[4558]: I0120 17:05:50.555677 4558 scope.go:117] "RemoveContainer" containerID="762c3b53bff8276c53ff1ba0f6fcbbc8ea3579b91262bc868d821bf5b7740f32" Jan 20 17:05:50 crc kubenswrapper[4558]: E0120 17:05:50.559452 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"762c3b53bff8276c53ff1ba0f6fcbbc8ea3579b91262bc868d821bf5b7740f32\": container with ID starting with 762c3b53bff8276c53ff1ba0f6fcbbc8ea3579b91262bc868d821bf5b7740f32 not found: ID does not exist" containerID="762c3b53bff8276c53ff1ba0f6fcbbc8ea3579b91262bc868d821bf5b7740f32" Jan 20 17:05:50 crc kubenswrapper[4558]: I0120 17:05:50.559499 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"762c3b53bff8276c53ff1ba0f6fcbbc8ea3579b91262bc868d821bf5b7740f32"} err="failed to get container status \"762c3b53bff8276c53ff1ba0f6fcbbc8ea3579b91262bc868d821bf5b7740f32\": rpc error: code = NotFound desc = could not find container \"762c3b53bff8276c53ff1ba0f6fcbbc8ea3579b91262bc868d821bf5b7740f32\": container with ID starting with 762c3b53bff8276c53ff1ba0f6fcbbc8ea3579b91262bc868d821bf5b7740f32 not found: ID does not exist" Jan 20 17:05:50 crc kubenswrapper[4558]: I0120 17:05:50.562269 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-api-869df765b4-9hk5q"] Jan 20 17:05:50 crc kubenswrapper[4558]: I0120 17:05:50.575636 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="084a5ce8-2844-42f1-92a9-973b78505050" path="/var/lib/kubelet/pods/084a5ce8-2844-42f1-92a9-973b78505050/volumes" Jan 20 17:05:50 crc kubenswrapper[4558]: I0120 17:05:50.576220 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0fcabe47-75cd-4bff-ba51-8a65a23b3f1f" path="/var/lib/kubelet/pods/0fcabe47-75cd-4bff-ba51-8a65a23b3f1f/volumes" Jan 20 17:05:50 crc kubenswrapper[4558]: I0120 17:05:50.576586 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="19189ead-bcd7-4806-be88-43cc27d5f202" path="/var/lib/kubelet/pods/19189ead-bcd7-4806-be88-43cc27d5f202/volumes" Jan 20 17:05:50 crc kubenswrapper[4558]: I0120 17:05:50.577061 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1fbb53e2-e2a2-4d62-a392-027b1f3f3232" path="/var/lib/kubelet/pods/1fbb53e2-e2a2-4d62-a392-027b1f3f3232/volumes" Jan 20 17:05:50 crc kubenswrapper[4558]: I0120 17:05:50.577969 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1fccb241-c75e-4cec-b3c6-3855bd6c1161" path="/var/lib/kubelet/pods/1fccb241-c75e-4cec-b3c6-3855bd6c1161/volumes" Jan 20 17:05:50 crc kubenswrapper[4558]: I0120 17:05:50.578598 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20ba561a-58e7-459f-ba28-ed0b68cdab9b" path="/var/lib/kubelet/pods/20ba561a-58e7-459f-ba28-ed0b68cdab9b/volumes" Jan 20 17:05:50 crc kubenswrapper[4558]: I0120 17:05:50.579383 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2ba42f6a-1ade-45ae-bdc4-117ad2fa866b" path="/var/lib/kubelet/pods/2ba42f6a-1ade-45ae-bdc4-117ad2fa866b/volumes" Jan 20 17:05:50 crc kubenswrapper[4558]: I0120 17:05:50.580745 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="33be1904-bd58-48cc-806a-af1dc751717c" path="/var/lib/kubelet/pods/33be1904-bd58-48cc-806a-af1dc751717c/volumes" Jan 20 17:05:50 crc kubenswrapper[4558]: I0120 17:05:50.581284 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ae31408-2eb5-445e-8f66-f0f0a6a7f9f2" path="/var/lib/kubelet/pods/3ae31408-2eb5-445e-8f66-f0f0a6a7f9f2/volumes" Jan 20 17:05:50 crc kubenswrapper[4558]: I0120 17:05:50.581658 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="51e263b2-d42e-46fa-99e1-e0c5aa23bcf5" path="/var/lib/kubelet/pods/51e263b2-d42e-46fa-99e1-e0c5aa23bcf5/volumes" Jan 20 17:05:50 crc kubenswrapper[4558]: I0120 17:05:50.582677 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8b9f5a8b-ee37-483c-9a86-38cc24dcb388" path="/var/lib/kubelet/pods/8b9f5a8b-ee37-483c-9a86-38cc24dcb388/volumes" Jan 20 17:05:50 crc kubenswrapper[4558]: I0120 17:05:50.583182 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a98c5227-c8a8-4cbc-8039-461dd14fbd5b" path="/var/lib/kubelet/pods/a98c5227-c8a8-4cbc-8039-461dd14fbd5b/volumes" Jan 20 17:05:50 crc kubenswrapper[4558]: I0120 17:05:50.583775 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aa387033-dc08-48f4-bf56-06a7f316423c" path="/var/lib/kubelet/pods/aa387033-dc08-48f4-bf56-06a7f316423c/volumes" Jan 20 17:05:50 crc kubenswrapper[4558]: I0120 17:05:50.584871 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ad58cc97-cbed-48c8-ab51-ebb920a1454c" path="/var/lib/kubelet/pods/ad58cc97-cbed-48c8-ab51-ebb920a1454c/volumes" Jan 20 17:05:50 crc kubenswrapper[4558]: I0120 17:05:50.585538 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d4457629-a457-4a4c-b285-eab441f8d319" path="/var/lib/kubelet/pods/d4457629-a457-4a4c-b285-eab441f8d319/volumes" Jan 20 17:05:50 crc kubenswrapper[4558]: I0120 17:05:50.585893 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ef319bad-004a-4140-b9a3-e34b376460da" path="/var/lib/kubelet/pods/ef319bad-004a-4140-b9a3-e34b376460da/volumes" Jan 20 17:05:50 crc kubenswrapper[4558]: I0120 17:05:50.586910 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f6a8779d-bb69-43c5-96d1-a7669b5dd9ea" path="/var/lib/kubelet/pods/f6a8779d-bb69-43c5-96d1-a7669b5dd9ea/volumes" Jan 20 17:05:50 crc kubenswrapper[4558]: I0120 17:05:50.587131 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f6dee0f3-6849-442d-b8fc-417c8540c9f9" path="/var/lib/kubelet/pods/f6dee0f3-6849-442d-b8fc-417c8540c9f9/volumes" Jan 20 17:05:50 crc kubenswrapper[4558]: I0120 17:05:50.587426 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/barbican-api-869df765b4-9hk5q"] Jan 20 17:05:51 crc kubenswrapper[4558]: E0120 17:05:51.144671 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Jan 20 17:05:51 crc kubenswrapper[4558]: E0120 17:05:51.144775 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/0bfc3458-cc0f-4bea-9794-52c5e81fe055-config-data podName:0bfc3458-cc0f-4bea-9794-52c5e81fe055 nodeName:}" failed. No retries permitted until 2026-01-20 17:05:59.144751181 +0000 UTC m=+1452.905089148 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/0bfc3458-cc0f-4bea-9794-52c5e81fe055-config-data") pod "rabbitmq-cell1-server-0" (UID: "0bfc3458-cc0f-4bea-9794-52c5e81fe055") : configmap "rabbitmq-cell1-config-data" not found Jan 20 17:05:51 crc kubenswrapper[4558]: E0120 17:05:51.628378 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="e3a0dcbc1a9d518fbc9c91fd7af2a3a13be64f9278f06d3e4672d909735d4773" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:05:51 crc kubenswrapper[4558]: E0120 17:05:51.631835 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="e3a0dcbc1a9d518fbc9c91fd7af2a3a13be64f9278f06d3e4672d909735d4773" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:05:51 crc kubenswrapper[4558]: E0120 17:05:51.633645 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="e3a0dcbc1a9d518fbc9c91fd7af2a3a13be64f9278f06d3e4672d909735d4773" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:05:51 crc kubenswrapper[4558]: E0120 17:05:51.633682 4558 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack-kuttl-tests/nova-cell0-conductor-0" podUID="55d5d50d-edc1-4a99-9540-72a7f5f0c622" containerName="nova-cell0-conductor-conductor" Jan 20 17:05:51 crc kubenswrapper[4558]: E0120 17:05:51.864279 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Jan 20 17:05:51 crc kubenswrapper[4558]: E0120 17:05:51.864359 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/ac55b716-d8fd-4628-8627-f94b5a4e7c78-config-data podName:ac55b716-d8fd-4628-8627-f94b5a4e7c78 nodeName:}" failed. No retries permitted until 2026-01-20 17:05:59.86434332 +0000 UTC m=+1453.624681286 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/ac55b716-d8fd-4628-8627-f94b5a4e7c78-config-data") pod "rabbitmq-server-0" (UID: "ac55b716-d8fd-4628-8627-f94b5a4e7c78") : configmap "rabbitmq-config-data" not found Jan 20 17:05:52 crc kubenswrapper[4558]: I0120 17:05:52.035654 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:05:52 crc kubenswrapper[4558]: I0120 17:05:52.168898 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/0bfc3458-cc0f-4bea-9794-52c5e81fe055-plugins-conf\") pod \"0bfc3458-cc0f-4bea-9794-52c5e81fe055\" (UID: \"0bfc3458-cc0f-4bea-9794-52c5e81fe055\") " Jan 20 17:05:52 crc kubenswrapper[4558]: I0120 17:05:52.168943 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m6dlz\" (UniqueName: \"kubernetes.io/projected/0bfc3458-cc0f-4bea-9794-52c5e81fe055-kube-api-access-m6dlz\") pod \"0bfc3458-cc0f-4bea-9794-52c5e81fe055\" (UID: \"0bfc3458-cc0f-4bea-9794-52c5e81fe055\") " Jan 20 17:05:52 crc kubenswrapper[4558]: I0120 17:05:52.168964 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/0bfc3458-cc0f-4bea-9794-52c5e81fe055-pod-info\") pod \"0bfc3458-cc0f-4bea-9794-52c5e81fe055\" (UID: \"0bfc3458-cc0f-4bea-9794-52c5e81fe055\") " Jan 20 17:05:52 crc kubenswrapper[4558]: I0120 17:05:52.168988 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"0bfc3458-cc0f-4bea-9794-52c5e81fe055\" (UID: \"0bfc3458-cc0f-4bea-9794-52c5e81fe055\") " Jan 20 17:05:52 crc kubenswrapper[4558]: I0120 17:05:52.169036 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/0bfc3458-cc0f-4bea-9794-52c5e81fe055-rabbitmq-erlang-cookie\") pod \"0bfc3458-cc0f-4bea-9794-52c5e81fe055\" (UID: \"0bfc3458-cc0f-4bea-9794-52c5e81fe055\") " Jan 20 17:05:52 crc kubenswrapper[4558]: I0120 17:05:52.169061 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/0bfc3458-cc0f-4bea-9794-52c5e81fe055-rabbitmq-tls\") pod \"0bfc3458-cc0f-4bea-9794-52c5e81fe055\" (UID: \"0bfc3458-cc0f-4bea-9794-52c5e81fe055\") " Jan 20 17:05:52 crc kubenswrapper[4558]: I0120 17:05:52.169099 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/0bfc3458-cc0f-4bea-9794-52c5e81fe055-config-data\") pod \"0bfc3458-cc0f-4bea-9794-52c5e81fe055\" (UID: \"0bfc3458-cc0f-4bea-9794-52c5e81fe055\") " Jan 20 17:05:52 crc kubenswrapper[4558]: I0120 17:05:52.169133 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/0bfc3458-cc0f-4bea-9794-52c5e81fe055-rabbitmq-confd\") pod \"0bfc3458-cc0f-4bea-9794-52c5e81fe055\" (UID: \"0bfc3458-cc0f-4bea-9794-52c5e81fe055\") " Jan 20 17:05:52 crc kubenswrapper[4558]: I0120 17:05:52.169211 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/0bfc3458-cc0f-4bea-9794-52c5e81fe055-erlang-cookie-secret\") pod \"0bfc3458-cc0f-4bea-9794-52c5e81fe055\" (UID: \"0bfc3458-cc0f-4bea-9794-52c5e81fe055\") " Jan 20 17:05:52 crc kubenswrapper[4558]: I0120 17:05:52.169226 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/0bfc3458-cc0f-4bea-9794-52c5e81fe055-server-conf\") pod \"0bfc3458-cc0f-4bea-9794-52c5e81fe055\" (UID: \"0bfc3458-cc0f-4bea-9794-52c5e81fe055\") " Jan 20 17:05:52 crc kubenswrapper[4558]: I0120 17:05:52.169252 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/0bfc3458-cc0f-4bea-9794-52c5e81fe055-rabbitmq-plugins\") pod \"0bfc3458-cc0f-4bea-9794-52c5e81fe055\" (UID: \"0bfc3458-cc0f-4bea-9794-52c5e81fe055\") " Jan 20 17:05:52 crc kubenswrapper[4558]: I0120 17:05:52.170013 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0bfc3458-cc0f-4bea-9794-52c5e81fe055-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "0bfc3458-cc0f-4bea-9794-52c5e81fe055" (UID: "0bfc3458-cc0f-4bea-9794-52c5e81fe055"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:05:52 crc kubenswrapper[4558]: I0120 17:05:52.170053 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0bfc3458-cc0f-4bea-9794-52c5e81fe055-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "0bfc3458-cc0f-4bea-9794-52c5e81fe055" (UID: "0bfc3458-cc0f-4bea-9794-52c5e81fe055"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:05:52 crc kubenswrapper[4558]: I0120 17:05:52.171470 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0bfc3458-cc0f-4bea-9794-52c5e81fe055-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "0bfc3458-cc0f-4bea-9794-52c5e81fe055" (UID: "0bfc3458-cc0f-4bea-9794-52c5e81fe055"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:05:52 crc kubenswrapper[4558]: I0120 17:05:52.175439 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0bfc3458-cc0f-4bea-9794-52c5e81fe055-kube-api-access-m6dlz" (OuterVolumeSpecName: "kube-api-access-m6dlz") pod "0bfc3458-cc0f-4bea-9794-52c5e81fe055" (UID: "0bfc3458-cc0f-4bea-9794-52c5e81fe055"). InnerVolumeSpecName "kube-api-access-m6dlz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:05:52 crc kubenswrapper[4558]: I0120 17:05:52.180332 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0bfc3458-cc0f-4bea-9794-52c5e81fe055-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "0bfc3458-cc0f-4bea-9794-52c5e81fe055" (UID: "0bfc3458-cc0f-4bea-9794-52c5e81fe055"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:05:52 crc kubenswrapper[4558]: I0120 17:05:52.180338 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/0bfc3458-cc0f-4bea-9794-52c5e81fe055-pod-info" (OuterVolumeSpecName: "pod-info") pod "0bfc3458-cc0f-4bea-9794-52c5e81fe055" (UID: "0bfc3458-cc0f-4bea-9794-52c5e81fe055"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Jan 20 17:05:52 crc kubenswrapper[4558]: I0120 17:05:52.189913 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage10-crc" (OuterVolumeSpecName: "persistence") pod "0bfc3458-cc0f-4bea-9794-52c5e81fe055" (UID: "0bfc3458-cc0f-4bea-9794-52c5e81fe055"). InnerVolumeSpecName "local-storage10-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:05:52 crc kubenswrapper[4558]: I0120 17:05:52.190982 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/ceilometer-0" podUID="c7187d14-f25b-4344-bf36-7d56d8e1b79c" containerName="proxy-httpd" probeResult="failure" output="Get \"https://10.217.1.29:3000/\": dial tcp 10.217.1.29:3000: connect: connection refused" Jan 20 17:05:52 crc kubenswrapper[4558]: I0120 17:05:52.197528 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0bfc3458-cc0f-4bea-9794-52c5e81fe055-config-data" (OuterVolumeSpecName: "config-data") pod "0bfc3458-cc0f-4bea-9794-52c5e81fe055" (UID: "0bfc3458-cc0f-4bea-9794-52c5e81fe055"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:05:52 crc kubenswrapper[4558]: I0120 17:05:52.204941 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0bfc3458-cc0f-4bea-9794-52c5e81fe055-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "0bfc3458-cc0f-4bea-9794-52c5e81fe055" (UID: "0bfc3458-cc0f-4bea-9794-52c5e81fe055"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:05:52 crc kubenswrapper[4558]: I0120 17:05:52.221664 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0bfc3458-cc0f-4bea-9794-52c5e81fe055-server-conf" (OuterVolumeSpecName: "server-conf") pod "0bfc3458-cc0f-4bea-9794-52c5e81fe055" (UID: "0bfc3458-cc0f-4bea-9794-52c5e81fe055"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:05:52 crc kubenswrapper[4558]: I0120 17:05:52.235532 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0bfc3458-cc0f-4bea-9794-52c5e81fe055-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "0bfc3458-cc0f-4bea-9794-52c5e81fe055" (UID: "0bfc3458-cc0f-4bea-9794-52c5e81fe055"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:05:52 crc kubenswrapper[4558]: I0120 17:05:52.239742 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:05:52 crc kubenswrapper[4558]: I0120 17:05:52.272371 4558 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/0bfc3458-cc0f-4bea-9794-52c5e81fe055-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:52 crc kubenswrapper[4558]: I0120 17:05:52.272406 4558 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/0bfc3458-cc0f-4bea-9794-52c5e81fe055-server-conf\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:52 crc kubenswrapper[4558]: I0120 17:05:52.272419 4558 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/0bfc3458-cc0f-4bea-9794-52c5e81fe055-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:52 crc kubenswrapper[4558]: I0120 17:05:52.272437 4558 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/0bfc3458-cc0f-4bea-9794-52c5e81fe055-plugins-conf\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:52 crc kubenswrapper[4558]: I0120 17:05:52.272447 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m6dlz\" (UniqueName: \"kubernetes.io/projected/0bfc3458-cc0f-4bea-9794-52c5e81fe055-kube-api-access-m6dlz\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:52 crc kubenswrapper[4558]: I0120 17:05:52.272461 4558 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/0bfc3458-cc0f-4bea-9794-52c5e81fe055-pod-info\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:52 crc kubenswrapper[4558]: I0120 17:05:52.272492 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" " Jan 20 17:05:52 crc kubenswrapper[4558]: I0120 17:05:52.272514 4558 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/0bfc3458-cc0f-4bea-9794-52c5e81fe055-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:52 crc kubenswrapper[4558]: I0120 17:05:52.272528 4558 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/0bfc3458-cc0f-4bea-9794-52c5e81fe055-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:52 crc kubenswrapper[4558]: I0120 17:05:52.272540 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/0bfc3458-cc0f-4bea-9794-52c5e81fe055-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:52 crc kubenswrapper[4558]: I0120 17:05:52.272550 4558 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/0bfc3458-cc0f-4bea-9794-52c5e81fe055-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:52 crc kubenswrapper[4558]: I0120 17:05:52.291977 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage10-crc" (UniqueName: "kubernetes.io/local-volume/local-storage10-crc") on node "crc" Jan 20 17:05:52 crc kubenswrapper[4558]: I0120 17:05:52.373623 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ac55b716-d8fd-4628-8627-f94b5a4e7c78-config-data\") pod \"ac55b716-d8fd-4628-8627-f94b5a4e7c78\" (UID: \"ac55b716-d8fd-4628-8627-f94b5a4e7c78\") " Jan 20 17:05:52 crc kubenswrapper[4558]: I0120 17:05:52.373677 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"ac55b716-d8fd-4628-8627-f94b5a4e7c78\" (UID: \"ac55b716-d8fd-4628-8627-f94b5a4e7c78\") " Jan 20 17:05:52 crc kubenswrapper[4558]: I0120 17:05:52.373706 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/ac55b716-d8fd-4628-8627-f94b5a4e7c78-rabbitmq-tls\") pod \"ac55b716-d8fd-4628-8627-f94b5a4e7c78\" (UID: \"ac55b716-d8fd-4628-8627-f94b5a4e7c78\") " Jan 20 17:05:52 crc kubenswrapper[4558]: I0120 17:05:52.373733 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/ac55b716-d8fd-4628-8627-f94b5a4e7c78-rabbitmq-plugins\") pod \"ac55b716-d8fd-4628-8627-f94b5a4e7c78\" (UID: \"ac55b716-d8fd-4628-8627-f94b5a4e7c78\") " Jan 20 17:05:52 crc kubenswrapper[4558]: I0120 17:05:52.373778 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/ac55b716-d8fd-4628-8627-f94b5a4e7c78-pod-info\") pod \"ac55b716-d8fd-4628-8627-f94b5a4e7c78\" (UID: \"ac55b716-d8fd-4628-8627-f94b5a4e7c78\") " Jan 20 17:05:52 crc kubenswrapper[4558]: I0120 17:05:52.373795 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/ac55b716-d8fd-4628-8627-f94b5a4e7c78-plugins-conf\") pod \"ac55b716-d8fd-4628-8627-f94b5a4e7c78\" (UID: \"ac55b716-d8fd-4628-8627-f94b5a4e7c78\") " Jan 20 17:05:52 crc kubenswrapper[4558]: I0120 17:05:52.373882 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/ac55b716-d8fd-4628-8627-f94b5a4e7c78-server-conf\") pod \"ac55b716-d8fd-4628-8627-f94b5a4e7c78\" (UID: \"ac55b716-d8fd-4628-8627-f94b5a4e7c78\") " Jan 20 17:05:52 crc kubenswrapper[4558]: I0120 17:05:52.373914 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/ac55b716-d8fd-4628-8627-f94b5a4e7c78-rabbitmq-confd\") pod \"ac55b716-d8fd-4628-8627-f94b5a4e7c78\" (UID: \"ac55b716-d8fd-4628-8627-f94b5a4e7c78\") " Jan 20 17:05:52 crc kubenswrapper[4558]: I0120 17:05:52.373935 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/ac55b716-d8fd-4628-8627-f94b5a4e7c78-rabbitmq-erlang-cookie\") pod \"ac55b716-d8fd-4628-8627-f94b5a4e7c78\" (UID: \"ac55b716-d8fd-4628-8627-f94b5a4e7c78\") " Jan 20 17:05:52 crc kubenswrapper[4558]: I0120 17:05:52.373958 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/ac55b716-d8fd-4628-8627-f94b5a4e7c78-erlang-cookie-secret\") pod \"ac55b716-d8fd-4628-8627-f94b5a4e7c78\" (UID: \"ac55b716-d8fd-4628-8627-f94b5a4e7c78\") " Jan 20 17:05:52 crc kubenswrapper[4558]: I0120 17:05:52.373980 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pmzzv\" (UniqueName: \"kubernetes.io/projected/ac55b716-d8fd-4628-8627-f94b5a4e7c78-kube-api-access-pmzzv\") pod \"ac55b716-d8fd-4628-8627-f94b5a4e7c78\" (UID: \"ac55b716-d8fd-4628-8627-f94b5a4e7c78\") " Jan 20 17:05:52 crc kubenswrapper[4558]: I0120 17:05:52.374339 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ac55b716-d8fd-4628-8627-f94b5a4e7c78-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "ac55b716-d8fd-4628-8627-f94b5a4e7c78" (UID: "ac55b716-d8fd-4628-8627-f94b5a4e7c78"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:05:52 crc kubenswrapper[4558]: I0120 17:05:52.374356 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:52 crc kubenswrapper[4558]: I0120 17:05:52.374848 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ac55b716-d8fd-4628-8627-f94b5a4e7c78-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "ac55b716-d8fd-4628-8627-f94b5a4e7c78" (UID: "ac55b716-d8fd-4628-8627-f94b5a4e7c78"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:05:52 crc kubenswrapper[4558]: I0120 17:05:52.374983 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ac55b716-d8fd-4628-8627-f94b5a4e7c78-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "ac55b716-d8fd-4628-8627-f94b5a4e7c78" (UID: "ac55b716-d8fd-4628-8627-f94b5a4e7c78"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:05:52 crc kubenswrapper[4558]: I0120 17:05:52.377124 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/ac55b716-d8fd-4628-8627-f94b5a4e7c78-pod-info" (OuterVolumeSpecName: "pod-info") pod "ac55b716-d8fd-4628-8627-f94b5a4e7c78" (UID: "ac55b716-d8fd-4628-8627-f94b5a4e7c78"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Jan 20 17:05:52 crc kubenswrapper[4558]: I0120 17:05:52.377200 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ac55b716-d8fd-4628-8627-f94b5a4e7c78-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "ac55b716-d8fd-4628-8627-f94b5a4e7c78" (UID: "ac55b716-d8fd-4628-8627-f94b5a4e7c78"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:05:52 crc kubenswrapper[4558]: I0120 17:05:52.377513 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ac55b716-d8fd-4628-8627-f94b5a4e7c78-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "ac55b716-d8fd-4628-8627-f94b5a4e7c78" (UID: "ac55b716-d8fd-4628-8627-f94b5a4e7c78"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:05:52 crc kubenswrapper[4558]: I0120 17:05:52.377963 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ac55b716-d8fd-4628-8627-f94b5a4e7c78-kube-api-access-pmzzv" (OuterVolumeSpecName: "kube-api-access-pmzzv") pod "ac55b716-d8fd-4628-8627-f94b5a4e7c78" (UID: "ac55b716-d8fd-4628-8627-f94b5a4e7c78"). InnerVolumeSpecName "kube-api-access-pmzzv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:05:52 crc kubenswrapper[4558]: I0120 17:05:52.378505 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage03-crc" (OuterVolumeSpecName: "persistence") pod "ac55b716-d8fd-4628-8627-f94b5a4e7c78" (UID: "ac55b716-d8fd-4628-8627-f94b5a4e7c78"). InnerVolumeSpecName "local-storage03-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:05:52 crc kubenswrapper[4558]: I0120 17:05:52.388757 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ac55b716-d8fd-4628-8627-f94b5a4e7c78-config-data" (OuterVolumeSpecName: "config-data") pod "ac55b716-d8fd-4628-8627-f94b5a4e7c78" (UID: "ac55b716-d8fd-4628-8627-f94b5a4e7c78"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:05:52 crc kubenswrapper[4558]: I0120 17:05:52.402059 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ac55b716-d8fd-4628-8627-f94b5a4e7c78-server-conf" (OuterVolumeSpecName: "server-conf") pod "ac55b716-d8fd-4628-8627-f94b5a4e7c78" (UID: "ac55b716-d8fd-4628-8627-f94b5a4e7c78"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:05:52 crc kubenswrapper[4558]: I0120 17:05:52.427041 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ac55b716-d8fd-4628-8627-f94b5a4e7c78-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "ac55b716-d8fd-4628-8627-f94b5a4e7c78" (UID: "ac55b716-d8fd-4628-8627-f94b5a4e7c78"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:05:52 crc kubenswrapper[4558]: I0120 17:05:52.475773 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ac55b716-d8fd-4628-8627-f94b5a4e7c78-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:52 crc kubenswrapper[4558]: I0120 17:05:52.475821 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" " Jan 20 17:05:52 crc kubenswrapper[4558]: I0120 17:05:52.475832 4558 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/ac55b716-d8fd-4628-8627-f94b5a4e7c78-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:52 crc kubenswrapper[4558]: I0120 17:05:52.475846 4558 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/ac55b716-d8fd-4628-8627-f94b5a4e7c78-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:52 crc kubenswrapper[4558]: I0120 17:05:52.475856 4558 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/ac55b716-d8fd-4628-8627-f94b5a4e7c78-pod-info\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:52 crc kubenswrapper[4558]: I0120 17:05:52.475864 4558 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/ac55b716-d8fd-4628-8627-f94b5a4e7c78-plugins-conf\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:52 crc kubenswrapper[4558]: I0120 17:05:52.475872 4558 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/ac55b716-d8fd-4628-8627-f94b5a4e7c78-server-conf\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:52 crc kubenswrapper[4558]: I0120 17:05:52.475880 4558 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/ac55b716-d8fd-4628-8627-f94b5a4e7c78-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:52 crc kubenswrapper[4558]: I0120 17:05:52.475890 4558 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/ac55b716-d8fd-4628-8627-f94b5a4e7c78-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:52 crc kubenswrapper[4558]: I0120 17:05:52.475899 4558 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/ac55b716-d8fd-4628-8627-f94b5a4e7c78-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:52 crc kubenswrapper[4558]: I0120 17:05:52.475912 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pmzzv\" (UniqueName: \"kubernetes.io/projected/ac55b716-d8fd-4628-8627-f94b5a4e7c78-kube-api-access-pmzzv\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:52 crc kubenswrapper[4558]: I0120 17:05:52.487918 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage03-crc" (UniqueName: "kubernetes.io/local-volume/local-storage03-crc") on node "crc" Jan 20 17:05:52 crc kubenswrapper[4558]: I0120 17:05:52.497680 4558 generic.go:334] "Generic (PLEG): container finished" podID="0bfc3458-cc0f-4bea-9794-52c5e81fe055" containerID="604aea968b2534ec5bcdc7b53dbf247a7fe69e766dfd7c01c1c3821d16d1ac11" exitCode=0 Jan 20 17:05:52 crc kubenswrapper[4558]: I0120 17:05:52.497758 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" event={"ID":"0bfc3458-cc0f-4bea-9794-52c5e81fe055","Type":"ContainerDied","Data":"604aea968b2534ec5bcdc7b53dbf247a7fe69e766dfd7c01c1c3821d16d1ac11"} Jan 20 17:05:52 crc kubenswrapper[4558]: I0120 17:05:52.497796 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" event={"ID":"0bfc3458-cc0f-4bea-9794-52c5e81fe055","Type":"ContainerDied","Data":"3c0f4928b3e51534677c8f6e406cffa9ff0f5b03b6f0a82165ab1b09f880329d"} Jan 20 17:05:52 crc kubenswrapper[4558]: I0120 17:05:52.497772 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:05:52 crc kubenswrapper[4558]: I0120 17:05:52.497813 4558 scope.go:117] "RemoveContainer" containerID="604aea968b2534ec5bcdc7b53dbf247a7fe69e766dfd7c01c1c3821d16d1ac11" Jan 20 17:05:52 crc kubenswrapper[4558]: I0120 17:05:52.501093 4558 generic.go:334] "Generic (PLEG): container finished" podID="ac55b716-d8fd-4628-8627-f94b5a4e7c78" containerID="04bd1c8749941bda61752f5a94063fae9cbc9fde43b1430193379fd01f69c3ff" exitCode=0 Jan 20 17:05:52 crc kubenswrapper[4558]: I0120 17:05:52.501134 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-server-0" event={"ID":"ac55b716-d8fd-4628-8627-f94b5a4e7c78","Type":"ContainerDied","Data":"04bd1c8749941bda61752f5a94063fae9cbc9fde43b1430193379fd01f69c3ff"} Jan 20 17:05:52 crc kubenswrapper[4558]: I0120 17:05:52.501189 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-server-0" event={"ID":"ac55b716-d8fd-4628-8627-f94b5a4e7c78","Type":"ContainerDied","Data":"95121bc78a0c3e8ae6bd3053e586139acb7bcbd2005925044af78716bc5d3507"} Jan 20 17:05:52 crc kubenswrapper[4558]: I0120 17:05:52.501252 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:05:52 crc kubenswrapper[4558]: I0120 17:05:52.515568 4558 scope.go:117] "RemoveContainer" containerID="211ea3122594cd690108a40fb561073e8fbaa0a0b8f082460cd412375c3bbcfa" Jan 20 17:05:52 crc kubenswrapper[4558]: I0120 17:05:52.531111 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/rabbitmq-cell1-server-0"] Jan 20 17:05:52 crc kubenswrapper[4558]: I0120 17:05:52.542637 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/rabbitmq-cell1-server-0"] Jan 20 17:05:52 crc kubenswrapper[4558]: I0120 17:05:52.547886 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/rabbitmq-server-0"] Jan 20 17:05:52 crc kubenswrapper[4558]: I0120 17:05:52.552670 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/rabbitmq-server-0"] Jan 20 17:05:52 crc kubenswrapper[4558]: I0120 17:05:52.565069 4558 scope.go:117] "RemoveContainer" containerID="604aea968b2534ec5bcdc7b53dbf247a7fe69e766dfd7c01c1c3821d16d1ac11" Jan 20 17:05:52 crc kubenswrapper[4558]: E0120 17:05:52.565447 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"604aea968b2534ec5bcdc7b53dbf247a7fe69e766dfd7c01c1c3821d16d1ac11\": container with ID starting with 604aea968b2534ec5bcdc7b53dbf247a7fe69e766dfd7c01c1c3821d16d1ac11 not found: ID does not exist" containerID="604aea968b2534ec5bcdc7b53dbf247a7fe69e766dfd7c01c1c3821d16d1ac11" Jan 20 17:05:52 crc kubenswrapper[4558]: I0120 17:05:52.565480 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"604aea968b2534ec5bcdc7b53dbf247a7fe69e766dfd7c01c1c3821d16d1ac11"} err="failed to get container status \"604aea968b2534ec5bcdc7b53dbf247a7fe69e766dfd7c01c1c3821d16d1ac11\": rpc error: code = NotFound desc = could not find container \"604aea968b2534ec5bcdc7b53dbf247a7fe69e766dfd7c01c1c3821d16d1ac11\": container with ID starting with 604aea968b2534ec5bcdc7b53dbf247a7fe69e766dfd7c01c1c3821d16d1ac11 not found: ID does not exist" Jan 20 17:05:52 crc kubenswrapper[4558]: I0120 17:05:52.565503 4558 scope.go:117] "RemoveContainer" containerID="211ea3122594cd690108a40fb561073e8fbaa0a0b8f082460cd412375c3bbcfa" Jan 20 17:05:52 crc kubenswrapper[4558]: E0120 17:05:52.565695 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"211ea3122594cd690108a40fb561073e8fbaa0a0b8f082460cd412375c3bbcfa\": container with ID starting with 211ea3122594cd690108a40fb561073e8fbaa0a0b8f082460cd412375c3bbcfa not found: ID does not exist" containerID="211ea3122594cd690108a40fb561073e8fbaa0a0b8f082460cd412375c3bbcfa" Jan 20 17:05:52 crc kubenswrapper[4558]: I0120 17:05:52.565718 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"211ea3122594cd690108a40fb561073e8fbaa0a0b8f082460cd412375c3bbcfa"} err="failed to get container status \"211ea3122594cd690108a40fb561073e8fbaa0a0b8f082460cd412375c3bbcfa\": rpc error: code = NotFound desc = could not find container \"211ea3122594cd690108a40fb561073e8fbaa0a0b8f082460cd412375c3bbcfa\": container with ID starting with 211ea3122594cd690108a40fb561073e8fbaa0a0b8f082460cd412375c3bbcfa not found: ID does not exist" Jan 20 17:05:52 crc kubenswrapper[4558]: I0120 17:05:52.565732 4558 scope.go:117] "RemoveContainer" containerID="04bd1c8749941bda61752f5a94063fae9cbc9fde43b1430193379fd01f69c3ff" Jan 20 17:05:52 crc kubenswrapper[4558]: I0120 17:05:52.576941 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:52 crc kubenswrapper[4558]: I0120 17:05:52.577625 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0bfc3458-cc0f-4bea-9794-52c5e81fe055" path="/var/lib/kubelet/pods/0bfc3458-cc0f-4bea-9794-52c5e81fe055/volumes" Jan 20 17:05:52 crc kubenswrapper[4558]: I0120 17:05:52.578219 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6c75ee5c-beea-4b1e-b429-91e83a472529" path="/var/lib/kubelet/pods/6c75ee5c-beea-4b1e-b429-91e83a472529/volumes" Jan 20 17:05:52 crc kubenswrapper[4558]: I0120 17:05:52.579475 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ac55b716-d8fd-4628-8627-f94b5a4e7c78" path="/var/lib/kubelet/pods/ac55b716-d8fd-4628-8627-f94b5a4e7c78/volumes" Jan 20 17:05:52 crc kubenswrapper[4558]: I0120 17:05:52.585702 4558 scope.go:117] "RemoveContainer" containerID="763e019dc0645475db7f3291e11eaa6439d09116fc1541ec554386e84ad53b91" Jan 20 17:05:52 crc kubenswrapper[4558]: I0120 17:05:52.599568 4558 scope.go:117] "RemoveContainer" containerID="04bd1c8749941bda61752f5a94063fae9cbc9fde43b1430193379fd01f69c3ff" Jan 20 17:05:52 crc kubenswrapper[4558]: E0120 17:05:52.599896 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"04bd1c8749941bda61752f5a94063fae9cbc9fde43b1430193379fd01f69c3ff\": container with ID starting with 04bd1c8749941bda61752f5a94063fae9cbc9fde43b1430193379fd01f69c3ff not found: ID does not exist" containerID="04bd1c8749941bda61752f5a94063fae9cbc9fde43b1430193379fd01f69c3ff" Jan 20 17:05:52 crc kubenswrapper[4558]: I0120 17:05:52.599926 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"04bd1c8749941bda61752f5a94063fae9cbc9fde43b1430193379fd01f69c3ff"} err="failed to get container status \"04bd1c8749941bda61752f5a94063fae9cbc9fde43b1430193379fd01f69c3ff\": rpc error: code = NotFound desc = could not find container \"04bd1c8749941bda61752f5a94063fae9cbc9fde43b1430193379fd01f69c3ff\": container with ID starting with 04bd1c8749941bda61752f5a94063fae9cbc9fde43b1430193379fd01f69c3ff not found: ID does not exist" Jan 20 17:05:52 crc kubenswrapper[4558]: I0120 17:05:52.599948 4558 scope.go:117] "RemoveContainer" containerID="763e019dc0645475db7f3291e11eaa6439d09116fc1541ec554386e84ad53b91" Jan 20 17:05:52 crc kubenswrapper[4558]: E0120 17:05:52.600213 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"763e019dc0645475db7f3291e11eaa6439d09116fc1541ec554386e84ad53b91\": container with ID starting with 763e019dc0645475db7f3291e11eaa6439d09116fc1541ec554386e84ad53b91 not found: ID does not exist" containerID="763e019dc0645475db7f3291e11eaa6439d09116fc1541ec554386e84ad53b91" Jan 20 17:05:52 crc kubenswrapper[4558]: I0120 17:05:52.600240 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"763e019dc0645475db7f3291e11eaa6439d09116fc1541ec554386e84ad53b91"} err="failed to get container status \"763e019dc0645475db7f3291e11eaa6439d09116fc1541ec554386e84ad53b91\": rpc error: code = NotFound desc = could not find container \"763e019dc0645475db7f3291e11eaa6439d09116fc1541ec554386e84ad53b91\": container with ID starting with 763e019dc0645475db7f3291e11eaa6439d09116fc1541ec554386e84ad53b91 not found: ID does not exist" Jan 20 17:05:53 crc kubenswrapper[4558]: I0120 17:05:53.287464 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-worker-599d6c8df7-lcnrd" Jan 20 17:05:53 crc kubenswrapper[4558]: I0120 17:05:53.374511 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:05:53 crc kubenswrapper[4558]: I0120 17:05:53.391658 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bfbf270c-1853-4113-b0e4-6d192abb5c5d-config-data-custom\") pod \"bfbf270c-1853-4113-b0e4-6d192abb5c5d\" (UID: \"bfbf270c-1853-4113-b0e4-6d192abb5c5d\") " Jan 20 17:05:53 crc kubenswrapper[4558]: I0120 17:05:53.391937 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bfbf270c-1853-4113-b0e4-6d192abb5c5d-logs\") pod \"bfbf270c-1853-4113-b0e4-6d192abb5c5d\" (UID: \"bfbf270c-1853-4113-b0e4-6d192abb5c5d\") " Jan 20 17:05:53 crc kubenswrapper[4558]: I0120 17:05:53.392113 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bfbf270c-1853-4113-b0e4-6d192abb5c5d-config-data\") pod \"bfbf270c-1853-4113-b0e4-6d192abb5c5d\" (UID: \"bfbf270c-1853-4113-b0e4-6d192abb5c5d\") " Jan 20 17:05:53 crc kubenswrapper[4558]: I0120 17:05:53.392138 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z66v6\" (UniqueName: \"kubernetes.io/projected/bfbf270c-1853-4113-b0e4-6d192abb5c5d-kube-api-access-z66v6\") pod \"bfbf270c-1853-4113-b0e4-6d192abb5c5d\" (UID: \"bfbf270c-1853-4113-b0e4-6d192abb5c5d\") " Jan 20 17:05:53 crc kubenswrapper[4558]: I0120 17:05:53.392217 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bfbf270c-1853-4113-b0e4-6d192abb5c5d-combined-ca-bundle\") pod \"bfbf270c-1853-4113-b0e4-6d192abb5c5d\" (UID: \"bfbf270c-1853-4113-b0e4-6d192abb5c5d\") " Jan 20 17:05:53 crc kubenswrapper[4558]: I0120 17:05:53.396850 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bfbf270c-1853-4113-b0e4-6d192abb5c5d-logs" (OuterVolumeSpecName: "logs") pod "bfbf270c-1853-4113-b0e4-6d192abb5c5d" (UID: "bfbf270c-1853-4113-b0e4-6d192abb5c5d"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:05:53 crc kubenswrapper[4558]: I0120 17:05:53.399960 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bfbf270c-1853-4113-b0e4-6d192abb5c5d-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "bfbf270c-1853-4113-b0e4-6d192abb5c5d" (UID: "bfbf270c-1853-4113-b0e4-6d192abb5c5d"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:05:53 crc kubenswrapper[4558]: I0120 17:05:53.400381 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bfbf270c-1853-4113-b0e4-6d192abb5c5d-kube-api-access-z66v6" (OuterVolumeSpecName: "kube-api-access-z66v6") pod "bfbf270c-1853-4113-b0e4-6d192abb5c5d" (UID: "bfbf270c-1853-4113-b0e4-6d192abb5c5d"). InnerVolumeSpecName "kube-api-access-z66v6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:05:53 crc kubenswrapper[4558]: I0120 17:05:53.407992 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-keystone-listener-65f6b4bfbb-llhdw" Jan 20 17:05:53 crc kubenswrapper[4558]: I0120 17:05:53.414607 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bfbf270c-1853-4113-b0e4-6d192abb5c5d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "bfbf270c-1853-4113-b0e4-6d192abb5c5d" (UID: "bfbf270c-1853-4113-b0e4-6d192abb5c5d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:05:53 crc kubenswrapper[4558]: I0120 17:05:53.437938 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bfbf270c-1853-4113-b0e4-6d192abb5c5d-config-data" (OuterVolumeSpecName: "config-data") pod "bfbf270c-1853-4113-b0e4-6d192abb5c5d" (UID: "bfbf270c-1853-4113-b0e4-6d192abb5c5d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:05:53 crc kubenswrapper[4558]: I0120 17:05:53.495960 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c7187d14-f25b-4344-bf36-7d56d8e1b79c-log-httpd\") pod \"c7187d14-f25b-4344-bf36-7d56d8e1b79c\" (UID: \"c7187d14-f25b-4344-bf36-7d56d8e1b79c\") " Jan 20 17:05:53 crc kubenswrapper[4558]: I0120 17:05:53.496012 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c7187d14-f25b-4344-bf36-7d56d8e1b79c-combined-ca-bundle\") pod \"c7187d14-f25b-4344-bf36-7d56d8e1b79c\" (UID: \"c7187d14-f25b-4344-bf36-7d56d8e1b79c\") " Jan 20 17:05:53 crc kubenswrapper[4558]: I0120 17:05:53.496157 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c7187d14-f25b-4344-bf36-7d56d8e1b79c-config-data\") pod \"c7187d14-f25b-4344-bf36-7d56d8e1b79c\" (UID: \"c7187d14-f25b-4344-bf36-7d56d8e1b79c\") " Jan 20 17:05:53 crc kubenswrapper[4558]: I0120 17:05:53.496391 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c7187d14-f25b-4344-bf36-7d56d8e1b79c-sg-core-conf-yaml\") pod \"c7187d14-f25b-4344-bf36-7d56d8e1b79c\" (UID: \"c7187d14-f25b-4344-bf36-7d56d8e1b79c\") " Jan 20 17:05:53 crc kubenswrapper[4558]: I0120 17:05:53.496439 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d8wkp\" (UniqueName: \"kubernetes.io/projected/c7187d14-f25b-4344-bf36-7d56d8e1b79c-kube-api-access-d8wkp\") pod \"c7187d14-f25b-4344-bf36-7d56d8e1b79c\" (UID: \"c7187d14-f25b-4344-bf36-7d56d8e1b79c\") " Jan 20 17:05:53 crc kubenswrapper[4558]: I0120 17:05:53.496459 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c7187d14-f25b-4344-bf36-7d56d8e1b79c-scripts\") pod \"c7187d14-f25b-4344-bf36-7d56d8e1b79c\" (UID: \"c7187d14-f25b-4344-bf36-7d56d8e1b79c\") " Jan 20 17:05:53 crc kubenswrapper[4558]: I0120 17:05:53.496482 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c7187d14-f25b-4344-bf36-7d56d8e1b79c-run-httpd\") pod \"c7187d14-f25b-4344-bf36-7d56d8e1b79c\" (UID: \"c7187d14-f25b-4344-bf36-7d56d8e1b79c\") " Jan 20 17:05:53 crc kubenswrapper[4558]: I0120 17:05:53.496759 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/c7187d14-f25b-4344-bf36-7d56d8e1b79c-ceilometer-tls-certs\") pod \"c7187d14-f25b-4344-bf36-7d56d8e1b79c\" (UID: \"c7187d14-f25b-4344-bf36-7d56d8e1b79c\") " Jan 20 17:05:53 crc kubenswrapper[4558]: I0120 17:05:53.497075 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bfbf270c-1853-4113-b0e4-6d192abb5c5d-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:53 crc kubenswrapper[4558]: I0120 17:05:53.497330 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z66v6\" (UniqueName: \"kubernetes.io/projected/bfbf270c-1853-4113-b0e4-6d192abb5c5d-kube-api-access-z66v6\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:53 crc kubenswrapper[4558]: I0120 17:05:53.497350 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bfbf270c-1853-4113-b0e4-6d192abb5c5d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:53 crc kubenswrapper[4558]: I0120 17:05:53.497360 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bfbf270c-1853-4113-b0e4-6d192abb5c5d-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:53 crc kubenswrapper[4558]: I0120 17:05:53.497371 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bfbf270c-1853-4113-b0e4-6d192abb5c5d-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:53 crc kubenswrapper[4558]: I0120 17:05:53.496670 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c7187d14-f25b-4344-bf36-7d56d8e1b79c-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "c7187d14-f25b-4344-bf36-7d56d8e1b79c" (UID: "c7187d14-f25b-4344-bf36-7d56d8e1b79c"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:05:53 crc kubenswrapper[4558]: I0120 17:05:53.498125 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c7187d14-f25b-4344-bf36-7d56d8e1b79c-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "c7187d14-f25b-4344-bf36-7d56d8e1b79c" (UID: "c7187d14-f25b-4344-bf36-7d56d8e1b79c"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:05:53 crc kubenswrapper[4558]: I0120 17:05:53.500838 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c7187d14-f25b-4344-bf36-7d56d8e1b79c-scripts" (OuterVolumeSpecName: "scripts") pod "c7187d14-f25b-4344-bf36-7d56d8e1b79c" (UID: "c7187d14-f25b-4344-bf36-7d56d8e1b79c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:05:53 crc kubenswrapper[4558]: I0120 17:05:53.501501 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c7187d14-f25b-4344-bf36-7d56d8e1b79c-kube-api-access-d8wkp" (OuterVolumeSpecName: "kube-api-access-d8wkp") pod "c7187d14-f25b-4344-bf36-7d56d8e1b79c" (UID: "c7187d14-f25b-4344-bf36-7d56d8e1b79c"). InnerVolumeSpecName "kube-api-access-d8wkp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:05:53 crc kubenswrapper[4558]: I0120 17:05:53.509980 4558 generic.go:334] "Generic (PLEG): container finished" podID="55d5d50d-edc1-4a99-9540-72a7f5f0c622" containerID="e3a0dcbc1a9d518fbc9c91fd7af2a3a13be64f9278f06d3e4672d909735d4773" exitCode=0 Jan 20 17:05:53 crc kubenswrapper[4558]: I0120 17:05:53.510065 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-0" event={"ID":"55d5d50d-edc1-4a99-9540-72a7f5f0c622","Type":"ContainerDied","Data":"e3a0dcbc1a9d518fbc9c91fd7af2a3a13be64f9278f06d3e4672d909735d4773"} Jan 20 17:05:53 crc kubenswrapper[4558]: I0120 17:05:53.512471 4558 generic.go:334] "Generic (PLEG): container finished" podID="bc2c7d29-d967-414d-8bb9-1bf70bcacdcd" containerID="3a9e47d6638b5d013f9248c5040a1853dbc404d976176e3415eea7e361d8e47b" exitCode=0 Jan 20 17:05:53 crc kubenswrapper[4558]: I0120 17:05:53.512593 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-keystone-listener-65f6b4bfbb-llhdw" Jan 20 17:05:53 crc kubenswrapper[4558]: I0120 17:05:53.513063 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-keystone-listener-65f6b4bfbb-llhdw" event={"ID":"bc2c7d29-d967-414d-8bb9-1bf70bcacdcd","Type":"ContainerDied","Data":"3a9e47d6638b5d013f9248c5040a1853dbc404d976176e3415eea7e361d8e47b"} Jan 20 17:05:53 crc kubenswrapper[4558]: I0120 17:05:53.513103 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-keystone-listener-65f6b4bfbb-llhdw" event={"ID":"bc2c7d29-d967-414d-8bb9-1bf70bcacdcd","Type":"ContainerDied","Data":"41eb8df402352603fb269b2c30ccda60ab2499f18d874b03fb79c1eee7bd7993"} Jan 20 17:05:53 crc kubenswrapper[4558]: I0120 17:05:53.513125 4558 scope.go:117] "RemoveContainer" containerID="3a9e47d6638b5d013f9248c5040a1853dbc404d976176e3415eea7e361d8e47b" Jan 20 17:05:53 crc kubenswrapper[4558]: I0120 17:05:53.516525 4558 generic.go:334] "Generic (PLEG): container finished" podID="c7187d14-f25b-4344-bf36-7d56d8e1b79c" containerID="469eb7fe8ee3af85b606966788c6c91603f071d07ac40bc36607962b279dc510" exitCode=0 Jan 20 17:05:53 crc kubenswrapper[4558]: I0120 17:05:53.516601 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"c7187d14-f25b-4344-bf36-7d56d8e1b79c","Type":"ContainerDied","Data":"469eb7fe8ee3af85b606966788c6c91603f071d07ac40bc36607962b279dc510"} Jan 20 17:05:53 crc kubenswrapper[4558]: I0120 17:05:53.516625 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"c7187d14-f25b-4344-bf36-7d56d8e1b79c","Type":"ContainerDied","Data":"21ff66014e3f5bcbb545b0096dc7040b680c353d52452cb43f0529712310ca7b"} Jan 20 17:05:53 crc kubenswrapper[4558]: I0120 17:05:53.516645 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:05:53 crc kubenswrapper[4558]: I0120 17:05:53.518615 4558 generic.go:334] "Generic (PLEG): container finished" podID="bfbf270c-1853-4113-b0e4-6d192abb5c5d" containerID="1656c9f8a5d976f0a32142d5398076a41236be2ec81b6e074cfc899864057916" exitCode=0 Jan 20 17:05:53 crc kubenswrapper[4558]: I0120 17:05:53.518671 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-worker-599d6c8df7-lcnrd" event={"ID":"bfbf270c-1853-4113-b0e4-6d192abb5c5d","Type":"ContainerDied","Data":"1656c9f8a5d976f0a32142d5398076a41236be2ec81b6e074cfc899864057916"} Jan 20 17:05:53 crc kubenswrapper[4558]: I0120 17:05:53.518689 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-worker-599d6c8df7-lcnrd" event={"ID":"bfbf270c-1853-4113-b0e4-6d192abb5c5d","Type":"ContainerDied","Data":"f2bb3dc6999859bba49c421c21bcbd591d4b55a8331c1a1a17376de1eee20a20"} Jan 20 17:05:53 crc kubenswrapper[4558]: I0120 17:05:53.518759 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-worker-599d6c8df7-lcnrd" Jan 20 17:05:53 crc kubenswrapper[4558]: I0120 17:05:53.518782 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c7187d14-f25b-4344-bf36-7d56d8e1b79c-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "c7187d14-f25b-4344-bf36-7d56d8e1b79c" (UID: "c7187d14-f25b-4344-bf36-7d56d8e1b79c"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:05:53 crc kubenswrapper[4558]: I0120 17:05:53.530644 4558 scope.go:117] "RemoveContainer" containerID="ca91ff4227eff077e8448a3f7e4d144f18ec13ff9ee590460efb5d52dcae5abe" Jan 20 17:05:53 crc kubenswrapper[4558]: I0120 17:05:53.535479 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c7187d14-f25b-4344-bf36-7d56d8e1b79c-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "c7187d14-f25b-4344-bf36-7d56d8e1b79c" (UID: "c7187d14-f25b-4344-bf36-7d56d8e1b79c"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:05:53 crc kubenswrapper[4558]: I0120 17:05:53.552231 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c7187d14-f25b-4344-bf36-7d56d8e1b79c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c7187d14-f25b-4344-bf36-7d56d8e1b79c" (UID: "c7187d14-f25b-4344-bf36-7d56d8e1b79c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:05:53 crc kubenswrapper[4558]: I0120 17:05:53.552517 4558 scope.go:117] "RemoveContainer" containerID="3a9e47d6638b5d013f9248c5040a1853dbc404d976176e3415eea7e361d8e47b" Jan 20 17:05:53 crc kubenswrapper[4558]: E0120 17:05:53.552926 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3a9e47d6638b5d013f9248c5040a1853dbc404d976176e3415eea7e361d8e47b\": container with ID starting with 3a9e47d6638b5d013f9248c5040a1853dbc404d976176e3415eea7e361d8e47b not found: ID does not exist" containerID="3a9e47d6638b5d013f9248c5040a1853dbc404d976176e3415eea7e361d8e47b" Jan 20 17:05:53 crc kubenswrapper[4558]: I0120 17:05:53.552974 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3a9e47d6638b5d013f9248c5040a1853dbc404d976176e3415eea7e361d8e47b"} err="failed to get container status \"3a9e47d6638b5d013f9248c5040a1853dbc404d976176e3415eea7e361d8e47b\": rpc error: code = NotFound desc = could not find container \"3a9e47d6638b5d013f9248c5040a1853dbc404d976176e3415eea7e361d8e47b\": container with ID starting with 3a9e47d6638b5d013f9248c5040a1853dbc404d976176e3415eea7e361d8e47b not found: ID does not exist" Jan 20 17:05:53 crc kubenswrapper[4558]: I0120 17:05:53.553007 4558 scope.go:117] "RemoveContainer" containerID="ca91ff4227eff077e8448a3f7e4d144f18ec13ff9ee590460efb5d52dcae5abe" Jan 20 17:05:53 crc kubenswrapper[4558]: E0120 17:05:53.553408 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ca91ff4227eff077e8448a3f7e4d144f18ec13ff9ee590460efb5d52dcae5abe\": container with ID starting with ca91ff4227eff077e8448a3f7e4d144f18ec13ff9ee590460efb5d52dcae5abe not found: ID does not exist" containerID="ca91ff4227eff077e8448a3f7e4d144f18ec13ff9ee590460efb5d52dcae5abe" Jan 20 17:05:53 crc kubenswrapper[4558]: I0120 17:05:53.553431 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ca91ff4227eff077e8448a3f7e4d144f18ec13ff9ee590460efb5d52dcae5abe"} err="failed to get container status \"ca91ff4227eff077e8448a3f7e4d144f18ec13ff9ee590460efb5d52dcae5abe\": rpc error: code = NotFound desc = could not find container \"ca91ff4227eff077e8448a3f7e4d144f18ec13ff9ee590460efb5d52dcae5abe\": container with ID starting with ca91ff4227eff077e8448a3f7e4d144f18ec13ff9ee590460efb5d52dcae5abe not found: ID does not exist" Jan 20 17:05:53 crc kubenswrapper[4558]: I0120 17:05:53.553447 4558 scope.go:117] "RemoveContainer" containerID="c6607b53445fad5655dbb034a5a76d6a87a2e8a96bd8d11ed9d596a7971d2d55" Jan 20 17:05:53 crc kubenswrapper[4558]: I0120 17:05:53.562716 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-worker-599d6c8df7-lcnrd"] Jan 20 17:05:53 crc kubenswrapper[4558]: I0120 17:05:53.565662 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:05:53 crc kubenswrapper[4558]: I0120 17:05:53.568906 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/barbican-worker-599d6c8df7-lcnrd"] Jan 20 17:05:53 crc kubenswrapper[4558]: I0120 17:05:53.570916 4558 scope.go:117] "RemoveContainer" containerID="2af5466604135693231e3743b389224d86d75c0a2d399c8a0f6df2491983b42f" Jan 20 17:05:53 crc kubenswrapper[4558]: I0120 17:05:53.575377 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c7187d14-f25b-4344-bf36-7d56d8e1b79c-config-data" (OuterVolumeSpecName: "config-data") pod "c7187d14-f25b-4344-bf36-7d56d8e1b79c" (UID: "c7187d14-f25b-4344-bf36-7d56d8e1b79c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:05:53 crc kubenswrapper[4558]: I0120 17:05:53.588153 4558 scope.go:117] "RemoveContainer" containerID="469eb7fe8ee3af85b606966788c6c91603f071d07ac40bc36607962b279dc510" Jan 20 17:05:53 crc kubenswrapper[4558]: I0120 17:05:53.598618 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bc2c7d29-d967-414d-8bb9-1bf70bcacdcd-logs\") pod \"bc2c7d29-d967-414d-8bb9-1bf70bcacdcd\" (UID: \"bc2c7d29-d967-414d-8bb9-1bf70bcacdcd\") " Jan 20 17:05:53 crc kubenswrapper[4558]: I0120 17:05:53.598698 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bc2c7d29-d967-414d-8bb9-1bf70bcacdcd-config-data\") pod \"bc2c7d29-d967-414d-8bb9-1bf70bcacdcd\" (UID: \"bc2c7d29-d967-414d-8bb9-1bf70bcacdcd\") " Jan 20 17:05:53 crc kubenswrapper[4558]: I0120 17:05:53.598741 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bc2c7d29-d967-414d-8bb9-1bf70bcacdcd-config-data-custom\") pod \"bc2c7d29-d967-414d-8bb9-1bf70bcacdcd\" (UID: \"bc2c7d29-d967-414d-8bb9-1bf70bcacdcd\") " Jan 20 17:05:53 crc kubenswrapper[4558]: I0120 17:05:53.598822 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qr7sr\" (UniqueName: \"kubernetes.io/projected/bc2c7d29-d967-414d-8bb9-1bf70bcacdcd-kube-api-access-qr7sr\") pod \"bc2c7d29-d967-414d-8bb9-1bf70bcacdcd\" (UID: \"bc2c7d29-d967-414d-8bb9-1bf70bcacdcd\") " Jan 20 17:05:53 crc kubenswrapper[4558]: I0120 17:05:53.598917 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bc2c7d29-d967-414d-8bb9-1bf70bcacdcd-combined-ca-bundle\") pod \"bc2c7d29-d967-414d-8bb9-1bf70bcacdcd\" (UID: \"bc2c7d29-d967-414d-8bb9-1bf70bcacdcd\") " Jan 20 17:05:53 crc kubenswrapper[4558]: I0120 17:05:53.599446 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c7187d14-f25b-4344-bf36-7d56d8e1b79c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:53 crc kubenswrapper[4558]: I0120 17:05:53.599468 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c7187d14-f25b-4344-bf36-7d56d8e1b79c-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:53 crc kubenswrapper[4558]: I0120 17:05:53.599477 4558 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c7187d14-f25b-4344-bf36-7d56d8e1b79c-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:53 crc kubenswrapper[4558]: I0120 17:05:53.599488 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d8wkp\" (UniqueName: \"kubernetes.io/projected/c7187d14-f25b-4344-bf36-7d56d8e1b79c-kube-api-access-d8wkp\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:53 crc kubenswrapper[4558]: I0120 17:05:53.599498 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c7187d14-f25b-4344-bf36-7d56d8e1b79c-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:53 crc kubenswrapper[4558]: I0120 17:05:53.599508 4558 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c7187d14-f25b-4344-bf36-7d56d8e1b79c-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:53 crc kubenswrapper[4558]: I0120 17:05:53.599516 4558 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/c7187d14-f25b-4344-bf36-7d56d8e1b79c-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:53 crc kubenswrapper[4558]: I0120 17:05:53.599524 4558 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c7187d14-f25b-4344-bf36-7d56d8e1b79c-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:53 crc kubenswrapper[4558]: I0120 17:05:53.607338 4558 scope.go:117] "RemoveContainer" containerID="97f14c8dcf0f7e744564168d9a5f79f08919d2ff3285eef66b5001b20e3c69d7" Jan 20 17:05:53 crc kubenswrapper[4558]: I0120 17:05:53.607747 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc2c7d29-d967-414d-8bb9-1bf70bcacdcd-logs" (OuterVolumeSpecName: "logs") pod "bc2c7d29-d967-414d-8bb9-1bf70bcacdcd" (UID: "bc2c7d29-d967-414d-8bb9-1bf70bcacdcd"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:05:53 crc kubenswrapper[4558]: I0120 17:05:53.610098 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc2c7d29-d967-414d-8bb9-1bf70bcacdcd-kube-api-access-qr7sr" (OuterVolumeSpecName: "kube-api-access-qr7sr") pod "bc2c7d29-d967-414d-8bb9-1bf70bcacdcd" (UID: "bc2c7d29-d967-414d-8bb9-1bf70bcacdcd"). InnerVolumeSpecName "kube-api-access-qr7sr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:05:53 crc kubenswrapper[4558]: I0120 17:05:53.610466 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc2c7d29-d967-414d-8bb9-1bf70bcacdcd-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "bc2c7d29-d967-414d-8bb9-1bf70bcacdcd" (UID: "bc2c7d29-d967-414d-8bb9-1bf70bcacdcd"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:05:53 crc kubenswrapper[4558]: I0120 17:05:53.615655 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc2c7d29-d967-414d-8bb9-1bf70bcacdcd-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "bc2c7d29-d967-414d-8bb9-1bf70bcacdcd" (UID: "bc2c7d29-d967-414d-8bb9-1bf70bcacdcd"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:05:53 crc kubenswrapper[4558]: I0120 17:05:53.624584 4558 scope.go:117] "RemoveContainer" containerID="c6607b53445fad5655dbb034a5a76d6a87a2e8a96bd8d11ed9d596a7971d2d55" Jan 20 17:05:53 crc kubenswrapper[4558]: E0120 17:05:53.624982 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c6607b53445fad5655dbb034a5a76d6a87a2e8a96bd8d11ed9d596a7971d2d55\": container with ID starting with c6607b53445fad5655dbb034a5a76d6a87a2e8a96bd8d11ed9d596a7971d2d55 not found: ID does not exist" containerID="c6607b53445fad5655dbb034a5a76d6a87a2e8a96bd8d11ed9d596a7971d2d55" Jan 20 17:05:53 crc kubenswrapper[4558]: I0120 17:05:53.625019 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c6607b53445fad5655dbb034a5a76d6a87a2e8a96bd8d11ed9d596a7971d2d55"} err="failed to get container status \"c6607b53445fad5655dbb034a5a76d6a87a2e8a96bd8d11ed9d596a7971d2d55\": rpc error: code = NotFound desc = could not find container \"c6607b53445fad5655dbb034a5a76d6a87a2e8a96bd8d11ed9d596a7971d2d55\": container with ID starting with c6607b53445fad5655dbb034a5a76d6a87a2e8a96bd8d11ed9d596a7971d2d55 not found: ID does not exist" Jan 20 17:05:53 crc kubenswrapper[4558]: I0120 17:05:53.625052 4558 scope.go:117] "RemoveContainer" containerID="2af5466604135693231e3743b389224d86d75c0a2d399c8a0f6df2491983b42f" Jan 20 17:05:53 crc kubenswrapper[4558]: I0120 17:05:53.625383 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc2c7d29-d967-414d-8bb9-1bf70bcacdcd-config-data" (OuterVolumeSpecName: "config-data") pod "bc2c7d29-d967-414d-8bb9-1bf70bcacdcd" (UID: "bc2c7d29-d967-414d-8bb9-1bf70bcacdcd"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:05:53 crc kubenswrapper[4558]: E0120 17:05:53.625438 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2af5466604135693231e3743b389224d86d75c0a2d399c8a0f6df2491983b42f\": container with ID starting with 2af5466604135693231e3743b389224d86d75c0a2d399c8a0f6df2491983b42f not found: ID does not exist" containerID="2af5466604135693231e3743b389224d86d75c0a2d399c8a0f6df2491983b42f" Jan 20 17:05:53 crc kubenswrapper[4558]: I0120 17:05:53.625469 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2af5466604135693231e3743b389224d86d75c0a2d399c8a0f6df2491983b42f"} err="failed to get container status \"2af5466604135693231e3743b389224d86d75c0a2d399c8a0f6df2491983b42f\": rpc error: code = NotFound desc = could not find container \"2af5466604135693231e3743b389224d86d75c0a2d399c8a0f6df2491983b42f\": container with ID starting with 2af5466604135693231e3743b389224d86d75c0a2d399c8a0f6df2491983b42f not found: ID does not exist" Jan 20 17:05:53 crc kubenswrapper[4558]: I0120 17:05:53.625508 4558 scope.go:117] "RemoveContainer" containerID="469eb7fe8ee3af85b606966788c6c91603f071d07ac40bc36607962b279dc510" Jan 20 17:05:53 crc kubenswrapper[4558]: E0120 17:05:53.625810 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"469eb7fe8ee3af85b606966788c6c91603f071d07ac40bc36607962b279dc510\": container with ID starting with 469eb7fe8ee3af85b606966788c6c91603f071d07ac40bc36607962b279dc510 not found: ID does not exist" containerID="469eb7fe8ee3af85b606966788c6c91603f071d07ac40bc36607962b279dc510" Jan 20 17:05:53 crc kubenswrapper[4558]: I0120 17:05:53.625868 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"469eb7fe8ee3af85b606966788c6c91603f071d07ac40bc36607962b279dc510"} err="failed to get container status \"469eb7fe8ee3af85b606966788c6c91603f071d07ac40bc36607962b279dc510\": rpc error: code = NotFound desc = could not find container \"469eb7fe8ee3af85b606966788c6c91603f071d07ac40bc36607962b279dc510\": container with ID starting with 469eb7fe8ee3af85b606966788c6c91603f071d07ac40bc36607962b279dc510 not found: ID does not exist" Jan 20 17:05:53 crc kubenswrapper[4558]: I0120 17:05:53.625888 4558 scope.go:117] "RemoveContainer" containerID="97f14c8dcf0f7e744564168d9a5f79f08919d2ff3285eef66b5001b20e3c69d7" Jan 20 17:05:53 crc kubenswrapper[4558]: E0120 17:05:53.626184 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"97f14c8dcf0f7e744564168d9a5f79f08919d2ff3285eef66b5001b20e3c69d7\": container with ID starting with 97f14c8dcf0f7e744564168d9a5f79f08919d2ff3285eef66b5001b20e3c69d7 not found: ID does not exist" containerID="97f14c8dcf0f7e744564168d9a5f79f08919d2ff3285eef66b5001b20e3c69d7" Jan 20 17:05:53 crc kubenswrapper[4558]: I0120 17:05:53.626210 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"97f14c8dcf0f7e744564168d9a5f79f08919d2ff3285eef66b5001b20e3c69d7"} err="failed to get container status \"97f14c8dcf0f7e744564168d9a5f79f08919d2ff3285eef66b5001b20e3c69d7\": rpc error: code = NotFound desc = could not find container \"97f14c8dcf0f7e744564168d9a5f79f08919d2ff3285eef66b5001b20e3c69d7\": container with ID starting with 97f14c8dcf0f7e744564168d9a5f79f08919d2ff3285eef66b5001b20e3c69d7 not found: ID does not exist" Jan 20 17:05:53 crc kubenswrapper[4558]: I0120 17:05:53.626228 4558 scope.go:117] "RemoveContainer" containerID="1656c9f8a5d976f0a32142d5398076a41236be2ec81b6e074cfc899864057916" Jan 20 17:05:53 crc kubenswrapper[4558]: I0120 17:05:53.643963 4558 scope.go:117] "RemoveContainer" containerID="880f39ca241fd9f400e76ce2cfd51b2dad6e3d3b73759064be5dea602507b366" Jan 20 17:05:53 crc kubenswrapper[4558]: I0120 17:05:53.657689 4558 scope.go:117] "RemoveContainer" containerID="1656c9f8a5d976f0a32142d5398076a41236be2ec81b6e074cfc899864057916" Jan 20 17:05:53 crc kubenswrapper[4558]: E0120 17:05:53.657989 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1656c9f8a5d976f0a32142d5398076a41236be2ec81b6e074cfc899864057916\": container with ID starting with 1656c9f8a5d976f0a32142d5398076a41236be2ec81b6e074cfc899864057916 not found: ID does not exist" containerID="1656c9f8a5d976f0a32142d5398076a41236be2ec81b6e074cfc899864057916" Jan 20 17:05:53 crc kubenswrapper[4558]: I0120 17:05:53.658016 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1656c9f8a5d976f0a32142d5398076a41236be2ec81b6e074cfc899864057916"} err="failed to get container status \"1656c9f8a5d976f0a32142d5398076a41236be2ec81b6e074cfc899864057916\": rpc error: code = NotFound desc = could not find container \"1656c9f8a5d976f0a32142d5398076a41236be2ec81b6e074cfc899864057916\": container with ID starting with 1656c9f8a5d976f0a32142d5398076a41236be2ec81b6e074cfc899864057916 not found: ID does not exist" Jan 20 17:05:53 crc kubenswrapper[4558]: I0120 17:05:53.658032 4558 scope.go:117] "RemoveContainer" containerID="880f39ca241fd9f400e76ce2cfd51b2dad6e3d3b73759064be5dea602507b366" Jan 20 17:05:53 crc kubenswrapper[4558]: E0120 17:05:53.658352 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"880f39ca241fd9f400e76ce2cfd51b2dad6e3d3b73759064be5dea602507b366\": container with ID starting with 880f39ca241fd9f400e76ce2cfd51b2dad6e3d3b73759064be5dea602507b366 not found: ID does not exist" containerID="880f39ca241fd9f400e76ce2cfd51b2dad6e3d3b73759064be5dea602507b366" Jan 20 17:05:53 crc kubenswrapper[4558]: I0120 17:05:53.658372 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"880f39ca241fd9f400e76ce2cfd51b2dad6e3d3b73759064be5dea602507b366"} err="failed to get container status \"880f39ca241fd9f400e76ce2cfd51b2dad6e3d3b73759064be5dea602507b366\": rpc error: code = NotFound desc = could not find container \"880f39ca241fd9f400e76ce2cfd51b2dad6e3d3b73759064be5dea602507b366\": container with ID starting with 880f39ca241fd9f400e76ce2cfd51b2dad6e3d3b73759064be5dea602507b366 not found: ID does not exist" Jan 20 17:05:53 crc kubenswrapper[4558]: I0120 17:05:53.700900 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55d5d50d-edc1-4a99-9540-72a7f5f0c622-combined-ca-bundle\") pod \"55d5d50d-edc1-4a99-9540-72a7f5f0c622\" (UID: \"55d5d50d-edc1-4a99-9540-72a7f5f0c622\") " Jan 20 17:05:53 crc kubenswrapper[4558]: I0120 17:05:53.701541 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9glw8\" (UniqueName: \"kubernetes.io/projected/55d5d50d-edc1-4a99-9540-72a7f5f0c622-kube-api-access-9glw8\") pod \"55d5d50d-edc1-4a99-9540-72a7f5f0c622\" (UID: \"55d5d50d-edc1-4a99-9540-72a7f5f0c622\") " Jan 20 17:05:53 crc kubenswrapper[4558]: I0120 17:05:53.701636 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/55d5d50d-edc1-4a99-9540-72a7f5f0c622-config-data\") pod \"55d5d50d-edc1-4a99-9540-72a7f5f0c622\" (UID: \"55d5d50d-edc1-4a99-9540-72a7f5f0c622\") " Jan 20 17:05:53 crc kubenswrapper[4558]: I0120 17:05:53.702859 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bc2c7d29-d967-414d-8bb9-1bf70bcacdcd-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:53 crc kubenswrapper[4558]: I0120 17:05:53.702883 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bc2c7d29-d967-414d-8bb9-1bf70bcacdcd-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:53 crc kubenswrapper[4558]: I0120 17:05:53.702896 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bc2c7d29-d967-414d-8bb9-1bf70bcacdcd-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:53 crc kubenswrapper[4558]: I0120 17:05:53.702908 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bc2c7d29-d967-414d-8bb9-1bf70bcacdcd-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:53 crc kubenswrapper[4558]: I0120 17:05:53.702917 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qr7sr\" (UniqueName: \"kubernetes.io/projected/bc2c7d29-d967-414d-8bb9-1bf70bcacdcd-kube-api-access-qr7sr\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:53 crc kubenswrapper[4558]: I0120 17:05:53.705396 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/55d5d50d-edc1-4a99-9540-72a7f5f0c622-kube-api-access-9glw8" (OuterVolumeSpecName: "kube-api-access-9glw8") pod "55d5d50d-edc1-4a99-9540-72a7f5f0c622" (UID: "55d5d50d-edc1-4a99-9540-72a7f5f0c622"). InnerVolumeSpecName "kube-api-access-9glw8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:05:53 crc kubenswrapper[4558]: I0120 17:05:53.717314 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/55d5d50d-edc1-4a99-9540-72a7f5f0c622-config-data" (OuterVolumeSpecName: "config-data") pod "55d5d50d-edc1-4a99-9540-72a7f5f0c622" (UID: "55d5d50d-edc1-4a99-9540-72a7f5f0c622"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:05:53 crc kubenswrapper[4558]: I0120 17:05:53.717654 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/55d5d50d-edc1-4a99-9540-72a7f5f0c622-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "55d5d50d-edc1-4a99-9540-72a7f5f0c622" (UID: "55d5d50d-edc1-4a99-9540-72a7f5f0c622"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:05:53 crc kubenswrapper[4558]: I0120 17:05:53.804322 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9glw8\" (UniqueName: \"kubernetes.io/projected/55d5d50d-edc1-4a99-9540-72a7f5f0c622-kube-api-access-9glw8\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:53 crc kubenswrapper[4558]: I0120 17:05:53.804360 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/55d5d50d-edc1-4a99-9540-72a7f5f0c622-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:53 crc kubenswrapper[4558]: I0120 17:05:53.804374 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55d5d50d-edc1-4a99-9540-72a7f5f0c622-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:53 crc kubenswrapper[4558]: I0120 17:05:53.858142 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:05:53 crc kubenswrapper[4558]: I0120 17:05:53.868010 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:05:53 crc kubenswrapper[4558]: I0120 17:05:53.874395 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-keystone-listener-65f6b4bfbb-llhdw"] Jan 20 17:05:53 crc kubenswrapper[4558]: I0120 17:05:53.878700 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/barbican-keystone-listener-65f6b4bfbb-llhdw"] Jan 20 17:05:54 crc kubenswrapper[4558]: I0120 17:05:54.531367 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:05:54 crc kubenswrapper[4558]: I0120 17:05:54.531397 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-0" event={"ID":"55d5d50d-edc1-4a99-9540-72a7f5f0c622","Type":"ContainerDied","Data":"7dad1523c815b9ba558a68733253bdd758de2b9cb63d7ef00e262899b20e6c03"} Jan 20 17:05:54 crc kubenswrapper[4558]: I0120 17:05:54.531453 4558 scope.go:117] "RemoveContainer" containerID="e3a0dcbc1a9d518fbc9c91fd7af2a3a13be64f9278f06d3e4672d909735d4773" Jan 20 17:05:54 crc kubenswrapper[4558]: I0120 17:05:54.564276 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-0"] Jan 20 17:05:54 crc kubenswrapper[4558]: I0120 17:05:54.576448 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc2c7d29-d967-414d-8bb9-1bf70bcacdcd" path="/var/lib/kubelet/pods/bc2c7d29-d967-414d-8bb9-1bf70bcacdcd/volumes" Jan 20 17:05:54 crc kubenswrapper[4558]: I0120 17:05:54.577059 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bfbf270c-1853-4113-b0e4-6d192abb5c5d" path="/var/lib/kubelet/pods/bfbf270c-1853-4113-b0e4-6d192abb5c5d/volumes" Jan 20 17:05:54 crc kubenswrapper[4558]: I0120 17:05:54.577633 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c7187d14-f25b-4344-bf36-7d56d8e1b79c" path="/var/lib/kubelet/pods/c7187d14-f25b-4344-bf36-7d56d8e1b79c/volumes" Jan 20 17:05:54 crc kubenswrapper[4558]: I0120 17:05:54.578830 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-0"] Jan 20 17:05:54 crc kubenswrapper[4558]: I0120 17:05:54.921649 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-kwtcx" Jan 20 17:05:54 crc kubenswrapper[4558]: I0120 17:05:54.968477 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-6db984dd9-47b5d"] Jan 20 17:05:54 crc kubenswrapper[4558]: I0120 17:05:54.968707 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-6db984dd9-47b5d" podUID="6846508c-01bc-49e8-b98b-5df9ec57f030" containerName="dnsmasq-dns" containerID="cri-o://b49dd264bdbda0a5b2edd1aecfbbd56731980f938da69a90df913ea26b0db35c" gracePeriod=10 Jan 20 17:05:55 crc kubenswrapper[4558]: I0120 17:05:55.394806 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-6db984dd9-47b5d" Jan 20 17:05:55 crc kubenswrapper[4558]: I0120 17:05:55.526265 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/6846508c-01bc-49e8-b98b-5df9ec57f030-dns-swift-storage-0\") pod \"6846508c-01bc-49e8-b98b-5df9ec57f030\" (UID: \"6846508c-01bc-49e8-b98b-5df9ec57f030\") " Jan 20 17:05:55 crc kubenswrapper[4558]: I0120 17:05:55.526394 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/6846508c-01bc-49e8-b98b-5df9ec57f030-dnsmasq-svc\") pod \"6846508c-01bc-49e8-b98b-5df9ec57f030\" (UID: \"6846508c-01bc-49e8-b98b-5df9ec57f030\") " Jan 20 17:05:55 crc kubenswrapper[4558]: I0120 17:05:55.526555 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6846508c-01bc-49e8-b98b-5df9ec57f030-config\") pod \"6846508c-01bc-49e8-b98b-5df9ec57f030\" (UID: \"6846508c-01bc-49e8-b98b-5df9ec57f030\") " Jan 20 17:05:55 crc kubenswrapper[4558]: I0120 17:05:55.526601 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-55qxz\" (UniqueName: \"kubernetes.io/projected/6846508c-01bc-49e8-b98b-5df9ec57f030-kube-api-access-55qxz\") pod \"6846508c-01bc-49e8-b98b-5df9ec57f030\" (UID: \"6846508c-01bc-49e8-b98b-5df9ec57f030\") " Jan 20 17:05:55 crc kubenswrapper[4558]: E0120 17:05:55.529718 4558 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod19189ead_bcd7_4806_be88_43cc27d5f202.slice/crio-80a7fbf9ebd999f9e255d17b6edb57d7cd1a880d930872a51bd1d1c2a19b20a5.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod19189ead_bcd7_4806_be88_43cc27d5f202.slice/crio-conmon-80a7fbf9ebd999f9e255d17b6edb57d7cd1a880d930872a51bd1d1c2a19b20a5.scope\": RecentStats: unable to find data in memory cache]" Jan 20 17:05:55 crc kubenswrapper[4558]: I0120 17:05:55.537712 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6846508c-01bc-49e8-b98b-5df9ec57f030-kube-api-access-55qxz" (OuterVolumeSpecName: "kube-api-access-55qxz") pod "6846508c-01bc-49e8-b98b-5df9ec57f030" (UID: "6846508c-01bc-49e8-b98b-5df9ec57f030"). InnerVolumeSpecName "kube-api-access-55qxz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:05:55 crc kubenswrapper[4558]: I0120 17:05:55.564267 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6846508c-01bc-49e8-b98b-5df9ec57f030-dnsmasq-svc" (OuterVolumeSpecName: "dnsmasq-svc") pod "6846508c-01bc-49e8-b98b-5df9ec57f030" (UID: "6846508c-01bc-49e8-b98b-5df9ec57f030"). InnerVolumeSpecName "dnsmasq-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:05:55 crc kubenswrapper[4558]: I0120 17:05:55.564540 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6846508c-01bc-49e8-b98b-5df9ec57f030-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "6846508c-01bc-49e8-b98b-5df9ec57f030" (UID: "6846508c-01bc-49e8-b98b-5df9ec57f030"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:05:55 crc kubenswrapper[4558]: I0120 17:05:55.565935 4558 generic.go:334] "Generic (PLEG): container finished" podID="6846508c-01bc-49e8-b98b-5df9ec57f030" containerID="b49dd264bdbda0a5b2edd1aecfbbd56731980f938da69a90df913ea26b0db35c" exitCode=0 Jan 20 17:05:55 crc kubenswrapper[4558]: I0120 17:05:55.565972 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-6db984dd9-47b5d" event={"ID":"6846508c-01bc-49e8-b98b-5df9ec57f030","Type":"ContainerDied","Data":"b49dd264bdbda0a5b2edd1aecfbbd56731980f938da69a90df913ea26b0db35c"} Jan 20 17:05:55 crc kubenswrapper[4558]: I0120 17:05:55.565979 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-6db984dd9-47b5d" Jan 20 17:05:55 crc kubenswrapper[4558]: I0120 17:05:55.565998 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-6db984dd9-47b5d" event={"ID":"6846508c-01bc-49e8-b98b-5df9ec57f030","Type":"ContainerDied","Data":"146767f56896346d3b4b1ec97e73ee66cabf2384ba4ee7402dcc81c81a3b0f8a"} Jan 20 17:05:55 crc kubenswrapper[4558]: I0120 17:05:55.566014 4558 scope.go:117] "RemoveContainer" containerID="b49dd264bdbda0a5b2edd1aecfbbd56731980f938da69a90df913ea26b0db35c" Jan 20 17:05:55 crc kubenswrapper[4558]: I0120 17:05:55.566251 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6846508c-01bc-49e8-b98b-5df9ec57f030-config" (OuterVolumeSpecName: "config") pod "6846508c-01bc-49e8-b98b-5df9ec57f030" (UID: "6846508c-01bc-49e8-b98b-5df9ec57f030"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:05:55 crc kubenswrapper[4558]: I0120 17:05:55.587055 4558 scope.go:117] "RemoveContainer" containerID="741ed849a76d51e10ae4f6bfc13c4ea0462a1f19837b48799ff426dd12db2262" Jan 20 17:05:55 crc kubenswrapper[4558]: I0120 17:05:55.596009 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-6db984dd9-47b5d"] Jan 20 17:05:55 crc kubenswrapper[4558]: I0120 17:05:55.599754 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-6db984dd9-47b5d"] Jan 20 17:05:55 crc kubenswrapper[4558]: I0120 17:05:55.606678 4558 scope.go:117] "RemoveContainer" containerID="b49dd264bdbda0a5b2edd1aecfbbd56731980f938da69a90df913ea26b0db35c" Jan 20 17:05:55 crc kubenswrapper[4558]: E0120 17:05:55.607032 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b49dd264bdbda0a5b2edd1aecfbbd56731980f938da69a90df913ea26b0db35c\": container with ID starting with b49dd264bdbda0a5b2edd1aecfbbd56731980f938da69a90df913ea26b0db35c not found: ID does not exist" containerID="b49dd264bdbda0a5b2edd1aecfbbd56731980f938da69a90df913ea26b0db35c" Jan 20 17:05:55 crc kubenswrapper[4558]: I0120 17:05:55.607060 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b49dd264bdbda0a5b2edd1aecfbbd56731980f938da69a90df913ea26b0db35c"} err="failed to get container status \"b49dd264bdbda0a5b2edd1aecfbbd56731980f938da69a90df913ea26b0db35c\": rpc error: code = NotFound desc = could not find container \"b49dd264bdbda0a5b2edd1aecfbbd56731980f938da69a90df913ea26b0db35c\": container with ID starting with b49dd264bdbda0a5b2edd1aecfbbd56731980f938da69a90df913ea26b0db35c not found: ID does not exist" Jan 20 17:05:55 crc kubenswrapper[4558]: I0120 17:05:55.607079 4558 scope.go:117] "RemoveContainer" containerID="741ed849a76d51e10ae4f6bfc13c4ea0462a1f19837b48799ff426dd12db2262" Jan 20 17:05:55 crc kubenswrapper[4558]: E0120 17:05:55.607489 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"741ed849a76d51e10ae4f6bfc13c4ea0462a1f19837b48799ff426dd12db2262\": container with ID starting with 741ed849a76d51e10ae4f6bfc13c4ea0462a1f19837b48799ff426dd12db2262 not found: ID does not exist" containerID="741ed849a76d51e10ae4f6bfc13c4ea0462a1f19837b48799ff426dd12db2262" Jan 20 17:05:55 crc kubenswrapper[4558]: I0120 17:05:55.607517 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"741ed849a76d51e10ae4f6bfc13c4ea0462a1f19837b48799ff426dd12db2262"} err="failed to get container status \"741ed849a76d51e10ae4f6bfc13c4ea0462a1f19837b48799ff426dd12db2262\": rpc error: code = NotFound desc = could not find container \"741ed849a76d51e10ae4f6bfc13c4ea0462a1f19837b48799ff426dd12db2262\": container with ID starting with 741ed849a76d51e10ae4f6bfc13c4ea0462a1f19837b48799ff426dd12db2262 not found: ID does not exist" Jan 20 17:05:55 crc kubenswrapper[4558]: I0120 17:05:55.628311 4558 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/6846508c-01bc-49e8-b98b-5df9ec57f030-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:55 crc kubenswrapper[4558]: I0120 17:05:55.628335 4558 reconciler_common.go:293] "Volume detached for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/6846508c-01bc-49e8-b98b-5df9ec57f030-dnsmasq-svc\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:55 crc kubenswrapper[4558]: I0120 17:05:55.628345 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6846508c-01bc-49e8-b98b-5df9ec57f030-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:55 crc kubenswrapper[4558]: I0120 17:05:55.628353 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-55qxz\" (UniqueName: \"kubernetes.io/projected/6846508c-01bc-49e8-b98b-5df9ec57f030-kube-api-access-55qxz\") on node \"crc\" DevicePath \"\"" Jan 20 17:05:55 crc kubenswrapper[4558]: I0120 17:05:55.695335 4558 scope.go:117] "RemoveContainer" containerID="43e92682c057f8b00344d5b760cd89b42d21a4dde25d4648333ff3c54e65f150" Jan 20 17:05:55 crc kubenswrapper[4558]: I0120 17:05:55.723941 4558 scope.go:117] "RemoveContainer" containerID="363d8978a2d2487cb2e7b6e16d0c176bad4492f0362547057829d38e6bcf10f8" Jan 20 17:05:56 crc kubenswrapper[4558]: I0120 17:05:56.574849 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="55d5d50d-edc1-4a99-9540-72a7f5f0c622" path="/var/lib/kubelet/pods/55d5d50d-edc1-4a99-9540-72a7f5f0c622/volumes" Jan 20 17:05:56 crc kubenswrapper[4558]: I0120 17:05:56.575823 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6846508c-01bc-49e8-b98b-5df9ec57f030" path="/var/lib/kubelet/pods/6846508c-01bc-49e8-b98b-5df9ec57f030/volumes" Jan 20 17:05:57 crc kubenswrapper[4558]: I0120 17:05:57.330032 4558 patch_prober.go:28] interesting pod/machine-config-daemon-2vr4r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 20 17:05:57 crc kubenswrapper[4558]: I0120 17:05:57.330119 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 20 17:06:00 crc kubenswrapper[4558]: I0120 17:06:00.253704 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-7fdccbb69d-fng8p" Jan 20 17:06:00 crc kubenswrapper[4558]: I0120 17:06:00.400847 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/fa011f19-1245-4ae8-ae2b-8773036498b3-config\") pod \"fa011f19-1245-4ae8-ae2b-8773036498b3\" (UID: \"fa011f19-1245-4ae8-ae2b-8773036498b3\") " Jan 20 17:06:00 crc kubenswrapper[4558]: I0120 17:06:00.400913 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/fa011f19-1245-4ae8-ae2b-8773036498b3-internal-tls-certs\") pod \"fa011f19-1245-4ae8-ae2b-8773036498b3\" (UID: \"fa011f19-1245-4ae8-ae2b-8773036498b3\") " Jan 20 17:06:00 crc kubenswrapper[4558]: I0120 17:06:00.400985 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jfjmw\" (UniqueName: \"kubernetes.io/projected/fa011f19-1245-4ae8-ae2b-8773036498b3-kube-api-access-jfjmw\") pod \"fa011f19-1245-4ae8-ae2b-8773036498b3\" (UID: \"fa011f19-1245-4ae8-ae2b-8773036498b3\") " Jan 20 17:06:00 crc kubenswrapper[4558]: I0120 17:06:00.401082 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/fa011f19-1245-4ae8-ae2b-8773036498b3-public-tls-certs\") pod \"fa011f19-1245-4ae8-ae2b-8773036498b3\" (UID: \"fa011f19-1245-4ae8-ae2b-8773036498b3\") " Jan 20 17:06:00 crc kubenswrapper[4558]: I0120 17:06:00.401242 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/fa011f19-1245-4ae8-ae2b-8773036498b3-ovndb-tls-certs\") pod \"fa011f19-1245-4ae8-ae2b-8773036498b3\" (UID: \"fa011f19-1245-4ae8-ae2b-8773036498b3\") " Jan 20 17:06:00 crc kubenswrapper[4558]: I0120 17:06:00.401283 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fa011f19-1245-4ae8-ae2b-8773036498b3-combined-ca-bundle\") pod \"fa011f19-1245-4ae8-ae2b-8773036498b3\" (UID: \"fa011f19-1245-4ae8-ae2b-8773036498b3\") " Jan 20 17:06:00 crc kubenswrapper[4558]: I0120 17:06:00.401322 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/fa011f19-1245-4ae8-ae2b-8773036498b3-httpd-config\") pod \"fa011f19-1245-4ae8-ae2b-8773036498b3\" (UID: \"fa011f19-1245-4ae8-ae2b-8773036498b3\") " Jan 20 17:06:00 crc kubenswrapper[4558]: I0120 17:06:00.407331 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fa011f19-1245-4ae8-ae2b-8773036498b3-kube-api-access-jfjmw" (OuterVolumeSpecName: "kube-api-access-jfjmw") pod "fa011f19-1245-4ae8-ae2b-8773036498b3" (UID: "fa011f19-1245-4ae8-ae2b-8773036498b3"). InnerVolumeSpecName "kube-api-access-jfjmw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:06:00 crc kubenswrapper[4558]: I0120 17:06:00.407656 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fa011f19-1245-4ae8-ae2b-8773036498b3-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "fa011f19-1245-4ae8-ae2b-8773036498b3" (UID: "fa011f19-1245-4ae8-ae2b-8773036498b3"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:06:00 crc kubenswrapper[4558]: I0120 17:06:00.429907 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fa011f19-1245-4ae8-ae2b-8773036498b3-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "fa011f19-1245-4ae8-ae2b-8773036498b3" (UID: "fa011f19-1245-4ae8-ae2b-8773036498b3"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:06:00 crc kubenswrapper[4558]: I0120 17:06:00.432197 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fa011f19-1245-4ae8-ae2b-8773036498b3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fa011f19-1245-4ae8-ae2b-8773036498b3" (UID: "fa011f19-1245-4ae8-ae2b-8773036498b3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:06:00 crc kubenswrapper[4558]: I0120 17:06:00.434053 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fa011f19-1245-4ae8-ae2b-8773036498b3-config" (OuterVolumeSpecName: "config") pod "fa011f19-1245-4ae8-ae2b-8773036498b3" (UID: "fa011f19-1245-4ae8-ae2b-8773036498b3"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:06:00 crc kubenswrapper[4558]: I0120 17:06:00.434242 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fa011f19-1245-4ae8-ae2b-8773036498b3-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "fa011f19-1245-4ae8-ae2b-8773036498b3" (UID: "fa011f19-1245-4ae8-ae2b-8773036498b3"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:06:00 crc kubenswrapper[4558]: I0120 17:06:00.452451 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fa011f19-1245-4ae8-ae2b-8773036498b3-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "fa011f19-1245-4ae8-ae2b-8773036498b3" (UID: "fa011f19-1245-4ae8-ae2b-8773036498b3"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:06:00 crc kubenswrapper[4558]: I0120 17:06:00.503272 4558 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/fa011f19-1245-4ae8-ae2b-8773036498b3-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:06:00 crc kubenswrapper[4558]: I0120 17:06:00.503303 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fa011f19-1245-4ae8-ae2b-8773036498b3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:06:00 crc kubenswrapper[4558]: I0120 17:06:00.503314 4558 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/fa011f19-1245-4ae8-ae2b-8773036498b3-httpd-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:06:00 crc kubenswrapper[4558]: I0120 17:06:00.503323 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/fa011f19-1245-4ae8-ae2b-8773036498b3-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:06:00 crc kubenswrapper[4558]: I0120 17:06:00.503349 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/fa011f19-1245-4ae8-ae2b-8773036498b3-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:06:00 crc kubenswrapper[4558]: I0120 17:06:00.503358 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jfjmw\" (UniqueName: \"kubernetes.io/projected/fa011f19-1245-4ae8-ae2b-8773036498b3-kube-api-access-jfjmw\") on node \"crc\" DevicePath \"\"" Jan 20 17:06:00 crc kubenswrapper[4558]: I0120 17:06:00.503369 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/fa011f19-1245-4ae8-ae2b-8773036498b3-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:06:00 crc kubenswrapper[4558]: I0120 17:06:00.606459 4558 generic.go:334] "Generic (PLEG): container finished" podID="fa011f19-1245-4ae8-ae2b-8773036498b3" containerID="edc8f84d18297fb81c87a4195f78806a1f8712a6680f4253cb7972ff04e84213" exitCode=0 Jan 20 17:06:00 crc kubenswrapper[4558]: I0120 17:06:00.606498 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-7fdccbb69d-fng8p" event={"ID":"fa011f19-1245-4ae8-ae2b-8773036498b3","Type":"ContainerDied","Data":"edc8f84d18297fb81c87a4195f78806a1f8712a6680f4253cb7972ff04e84213"} Jan 20 17:06:00 crc kubenswrapper[4558]: I0120 17:06:00.606523 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-7fdccbb69d-fng8p" event={"ID":"fa011f19-1245-4ae8-ae2b-8773036498b3","Type":"ContainerDied","Data":"3911201e3273f3fa00ea218fb0ebd9dc9eae236976ff9415310c50b8be0289ef"} Jan 20 17:06:00 crc kubenswrapper[4558]: I0120 17:06:00.606540 4558 scope.go:117] "RemoveContainer" containerID="a508351390349a4d085a4b101af8917b6baf005aac6d69c3b56e8bb73832583f" Jan 20 17:06:00 crc kubenswrapper[4558]: I0120 17:06:00.606905 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-7fdccbb69d-fng8p" Jan 20 17:06:00 crc kubenswrapper[4558]: I0120 17:06:00.625289 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-7fdccbb69d-fng8p"] Jan 20 17:06:00 crc kubenswrapper[4558]: I0120 17:06:00.631231 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/neutron-7fdccbb69d-fng8p"] Jan 20 17:06:00 crc kubenswrapper[4558]: I0120 17:06:00.635408 4558 scope.go:117] "RemoveContainer" containerID="edc8f84d18297fb81c87a4195f78806a1f8712a6680f4253cb7972ff04e84213" Jan 20 17:06:00 crc kubenswrapper[4558]: I0120 17:06:00.652725 4558 scope.go:117] "RemoveContainer" containerID="a508351390349a4d085a4b101af8917b6baf005aac6d69c3b56e8bb73832583f" Jan 20 17:06:00 crc kubenswrapper[4558]: E0120 17:06:00.653141 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a508351390349a4d085a4b101af8917b6baf005aac6d69c3b56e8bb73832583f\": container with ID starting with a508351390349a4d085a4b101af8917b6baf005aac6d69c3b56e8bb73832583f not found: ID does not exist" containerID="a508351390349a4d085a4b101af8917b6baf005aac6d69c3b56e8bb73832583f" Jan 20 17:06:00 crc kubenswrapper[4558]: I0120 17:06:00.653213 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a508351390349a4d085a4b101af8917b6baf005aac6d69c3b56e8bb73832583f"} err="failed to get container status \"a508351390349a4d085a4b101af8917b6baf005aac6d69c3b56e8bb73832583f\": rpc error: code = NotFound desc = could not find container \"a508351390349a4d085a4b101af8917b6baf005aac6d69c3b56e8bb73832583f\": container with ID starting with a508351390349a4d085a4b101af8917b6baf005aac6d69c3b56e8bb73832583f not found: ID does not exist" Jan 20 17:06:00 crc kubenswrapper[4558]: I0120 17:06:00.653245 4558 scope.go:117] "RemoveContainer" containerID="edc8f84d18297fb81c87a4195f78806a1f8712a6680f4253cb7972ff04e84213" Jan 20 17:06:00 crc kubenswrapper[4558]: E0120 17:06:00.653651 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"edc8f84d18297fb81c87a4195f78806a1f8712a6680f4253cb7972ff04e84213\": container with ID starting with edc8f84d18297fb81c87a4195f78806a1f8712a6680f4253cb7972ff04e84213 not found: ID does not exist" containerID="edc8f84d18297fb81c87a4195f78806a1f8712a6680f4253cb7972ff04e84213" Jan 20 17:06:00 crc kubenswrapper[4558]: I0120 17:06:00.653696 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"edc8f84d18297fb81c87a4195f78806a1f8712a6680f4253cb7972ff04e84213"} err="failed to get container status \"edc8f84d18297fb81c87a4195f78806a1f8712a6680f4253cb7972ff04e84213\": rpc error: code = NotFound desc = could not find container \"edc8f84d18297fb81c87a4195f78806a1f8712a6680f4253cb7972ff04e84213\": container with ID starting with edc8f84d18297fb81c87a4195f78806a1f8712a6680f4253cb7972ff04e84213 not found: ID does not exist" Jan 20 17:06:02 crc kubenswrapper[4558]: I0120 17:06:02.576759 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fa011f19-1245-4ae8-ae2b-8773036498b3" path="/var/lib/kubelet/pods/fa011f19-1245-4ae8-ae2b-8773036498b3/volumes" Jan 20 17:06:05 crc kubenswrapper[4558]: E0120 17:06:05.668001 4558 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod19189ead_bcd7_4806_be88_43cc27d5f202.slice/crio-conmon-80a7fbf9ebd999f9e255d17b6edb57d7cd1a880d930872a51bd1d1c2a19b20a5.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod19189ead_bcd7_4806_be88_43cc27d5f202.slice/crio-80a7fbf9ebd999f9e255d17b6edb57d7cd1a880d930872a51bd1d1c2a19b20a5.scope\": RecentStats: unable to find data in memory cache]" Jan 20 17:06:14 crc kubenswrapper[4558]: I0120 17:06:14.682906 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:06:14 crc kubenswrapper[4558]: I0120 17:06:14.718639 4558 generic.go:334] "Generic (PLEG): container finished" podID="10edb733-8fab-4543-ad29-3568e3de5aea" containerID="2063896701b2ecd29a95f5f7c1e566bc1a196a26a0392d6e32c7bb743ea5dbde" exitCode=137 Jan 20 17:06:14 crc kubenswrapper[4558]: I0120 17:06:14.718681 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"10edb733-8fab-4543-ad29-3568e3de5aea","Type":"ContainerDied","Data":"2063896701b2ecd29a95f5f7c1e566bc1a196a26a0392d6e32c7bb743ea5dbde"} Jan 20 17:06:14 crc kubenswrapper[4558]: I0120 17:06:14.718707 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"10edb733-8fab-4543-ad29-3568e3de5aea","Type":"ContainerDied","Data":"48ec79c7d990680327d80e2f93a0b1b8dc3dbcc046f63622f6eacf24bab8908c"} Jan 20 17:06:14 crc kubenswrapper[4558]: I0120 17:06:14.718724 4558 scope.go:117] "RemoveContainer" containerID="2063896701b2ecd29a95f5f7c1e566bc1a196a26a0392d6e32c7bb743ea5dbde" Jan 20 17:06:14 crc kubenswrapper[4558]: I0120 17:06:14.718874 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:06:14 crc kubenswrapper[4558]: I0120 17:06:14.734817 4558 scope.go:117] "RemoveContainer" containerID="944fc0bf7c16ccd30559b80cdc699372748ca2f14ed832fbb7c9cbf4c5083aed" Jan 20 17:06:14 crc kubenswrapper[4558]: I0120 17:06:14.750735 4558 scope.go:117] "RemoveContainer" containerID="8e36100266a8d13e7ab5ec75f4795cb2bfafd04327c6000ba4236e64554b079e" Jan 20 17:06:14 crc kubenswrapper[4558]: I0120 17:06:14.764083 4558 scope.go:117] "RemoveContainer" containerID="064c39cc0ed32246609bc4e80f5c679dc77d71ae4e0e7325c8b80942e4b6cd46" Jan 20 17:06:14 crc kubenswrapper[4558]: I0120 17:06:14.776379 4558 scope.go:117] "RemoveContainer" containerID="a00e2bc91063a2dabb4c2ce5cfdc3e96683c6b04440f6eeeadc69adf7e0e8a23" Jan 20 17:06:14 crc kubenswrapper[4558]: I0120 17:06:14.792693 4558 scope.go:117] "RemoveContainer" containerID="1ad6198d74b43bff1f98c2de1da53f4e18e93c848f58a003ec586b574d66eb1f" Jan 20 17:06:14 crc kubenswrapper[4558]: I0120 17:06:14.809503 4558 scope.go:117] "RemoveContainer" containerID="7da72c8a88027dd1e413e53fd692c99fc03f3e37634323ae66f7e9f55370719e" Jan 20 17:06:14 crc kubenswrapper[4558]: I0120 17:06:14.825423 4558 scope.go:117] "RemoveContainer" containerID="8f45926ec3655a6641edb5a009493f355b1f6533cdddced2af79c34663d3397c" Jan 20 17:06:14 crc kubenswrapper[4558]: I0120 17:06:14.839144 4558 scope.go:117] "RemoveContainer" containerID="576e4656d99cd9370e77241bb14cd66a4de3e94deaef7d304253b1816ec67dd5" Jan 20 17:06:14 crc kubenswrapper[4558]: I0120 17:06:14.857045 4558 scope.go:117] "RemoveContainer" containerID="f2f9c003229a8c4bff074beb609ad6fba9bf0167b989fcddbe4ee674bb6a73bd" Jan 20 17:06:14 crc kubenswrapper[4558]: I0120 17:06:14.872038 4558 scope.go:117] "RemoveContainer" containerID="6e25c21c1436e8d3c70552c967000697e095b890d75c613e94acbebcf601e385" Jan 20 17:06:14 crc kubenswrapper[4558]: I0120 17:06:14.879494 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/10edb733-8fab-4543-ad29-3568e3de5aea-etc-swift\") pod \"10edb733-8fab-4543-ad29-3568e3de5aea\" (UID: \"10edb733-8fab-4543-ad29-3568e3de5aea\") " Jan 20 17:06:14 crc kubenswrapper[4558]: I0120 17:06:14.879549 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/10edb733-8fab-4543-ad29-3568e3de5aea-cache\") pod \"10edb733-8fab-4543-ad29-3568e3de5aea\" (UID: \"10edb733-8fab-4543-ad29-3568e3de5aea\") " Jan 20 17:06:14 crc kubenswrapper[4558]: I0120 17:06:14.879657 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swift\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"10edb733-8fab-4543-ad29-3568e3de5aea\" (UID: \"10edb733-8fab-4543-ad29-3568e3de5aea\") " Jan 20 17:06:14 crc kubenswrapper[4558]: I0120 17:06:14.879691 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/10edb733-8fab-4543-ad29-3568e3de5aea-lock\") pod \"10edb733-8fab-4543-ad29-3568e3de5aea\" (UID: \"10edb733-8fab-4543-ad29-3568e3de5aea\") " Jan 20 17:06:14 crc kubenswrapper[4558]: I0120 17:06:14.879733 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xkq7g\" (UniqueName: \"kubernetes.io/projected/10edb733-8fab-4543-ad29-3568e3de5aea-kube-api-access-xkq7g\") pod \"10edb733-8fab-4543-ad29-3568e3de5aea\" (UID: \"10edb733-8fab-4543-ad29-3568e3de5aea\") " Jan 20 17:06:14 crc kubenswrapper[4558]: I0120 17:06:14.880226 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/10edb733-8fab-4543-ad29-3568e3de5aea-cache" (OuterVolumeSpecName: "cache") pod "10edb733-8fab-4543-ad29-3568e3de5aea" (UID: "10edb733-8fab-4543-ad29-3568e3de5aea"). InnerVolumeSpecName "cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:06:14 crc kubenswrapper[4558]: I0120 17:06:14.880252 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/10edb733-8fab-4543-ad29-3568e3de5aea-lock" (OuterVolumeSpecName: "lock") pod "10edb733-8fab-4543-ad29-3568e3de5aea" (UID: "10edb733-8fab-4543-ad29-3568e3de5aea"). InnerVolumeSpecName "lock". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:06:14 crc kubenswrapper[4558]: I0120 17:06:14.887322 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage05-crc" (OuterVolumeSpecName: "swift") pod "10edb733-8fab-4543-ad29-3568e3de5aea" (UID: "10edb733-8fab-4543-ad29-3568e3de5aea"). InnerVolumeSpecName "local-storage05-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:06:14 crc kubenswrapper[4558]: I0120 17:06:14.887347 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/10edb733-8fab-4543-ad29-3568e3de5aea-kube-api-access-xkq7g" (OuterVolumeSpecName: "kube-api-access-xkq7g") pod "10edb733-8fab-4543-ad29-3568e3de5aea" (UID: "10edb733-8fab-4543-ad29-3568e3de5aea"). InnerVolumeSpecName "kube-api-access-xkq7g". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:06:14 crc kubenswrapper[4558]: I0120 17:06:14.887376 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/10edb733-8fab-4543-ad29-3568e3de5aea-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "10edb733-8fab-4543-ad29-3568e3de5aea" (UID: "10edb733-8fab-4543-ad29-3568e3de5aea"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:06:14 crc kubenswrapper[4558]: I0120 17:06:14.889238 4558 scope.go:117] "RemoveContainer" containerID="fc9635ec26b842f65146e97cb678f89fadc8730a998b6cc02a5c959a0601c4bf" Jan 20 17:06:14 crc kubenswrapper[4558]: I0120 17:06:14.929364 4558 scope.go:117] "RemoveContainer" containerID="103cf0e411f4211e757684612d3274497733c55f59ba7dc63dd3ad2bfec99d18" Jan 20 17:06:14 crc kubenswrapper[4558]: I0120 17:06:14.943732 4558 scope.go:117] "RemoveContainer" containerID="6d71dae893fc6a1962df504002d772c4cfe40ee0a96ad60259292b2cd410de30" Jan 20 17:06:14 crc kubenswrapper[4558]: I0120 17:06:14.957474 4558 scope.go:117] "RemoveContainer" containerID="87f7b0a910a29230a9846bbbde9e7c30fd5ead2357d9f18d957764288be50fe1" Jan 20 17:06:14 crc kubenswrapper[4558]: I0120 17:06:14.970842 4558 scope.go:117] "RemoveContainer" containerID="2063896701b2ecd29a95f5f7c1e566bc1a196a26a0392d6e32c7bb743ea5dbde" Jan 20 17:06:14 crc kubenswrapper[4558]: E0120 17:06:14.971230 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2063896701b2ecd29a95f5f7c1e566bc1a196a26a0392d6e32c7bb743ea5dbde\": container with ID starting with 2063896701b2ecd29a95f5f7c1e566bc1a196a26a0392d6e32c7bb743ea5dbde not found: ID does not exist" containerID="2063896701b2ecd29a95f5f7c1e566bc1a196a26a0392d6e32c7bb743ea5dbde" Jan 20 17:06:14 crc kubenswrapper[4558]: I0120 17:06:14.971272 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2063896701b2ecd29a95f5f7c1e566bc1a196a26a0392d6e32c7bb743ea5dbde"} err="failed to get container status \"2063896701b2ecd29a95f5f7c1e566bc1a196a26a0392d6e32c7bb743ea5dbde\": rpc error: code = NotFound desc = could not find container \"2063896701b2ecd29a95f5f7c1e566bc1a196a26a0392d6e32c7bb743ea5dbde\": container with ID starting with 2063896701b2ecd29a95f5f7c1e566bc1a196a26a0392d6e32c7bb743ea5dbde not found: ID does not exist" Jan 20 17:06:14 crc kubenswrapper[4558]: I0120 17:06:14.971299 4558 scope.go:117] "RemoveContainer" containerID="944fc0bf7c16ccd30559b80cdc699372748ca2f14ed832fbb7c9cbf4c5083aed" Jan 20 17:06:14 crc kubenswrapper[4558]: E0120 17:06:14.971591 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"944fc0bf7c16ccd30559b80cdc699372748ca2f14ed832fbb7c9cbf4c5083aed\": container with ID starting with 944fc0bf7c16ccd30559b80cdc699372748ca2f14ed832fbb7c9cbf4c5083aed not found: ID does not exist" containerID="944fc0bf7c16ccd30559b80cdc699372748ca2f14ed832fbb7c9cbf4c5083aed" Jan 20 17:06:14 crc kubenswrapper[4558]: I0120 17:06:14.971616 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"944fc0bf7c16ccd30559b80cdc699372748ca2f14ed832fbb7c9cbf4c5083aed"} err="failed to get container status \"944fc0bf7c16ccd30559b80cdc699372748ca2f14ed832fbb7c9cbf4c5083aed\": rpc error: code = NotFound desc = could not find container \"944fc0bf7c16ccd30559b80cdc699372748ca2f14ed832fbb7c9cbf4c5083aed\": container with ID starting with 944fc0bf7c16ccd30559b80cdc699372748ca2f14ed832fbb7c9cbf4c5083aed not found: ID does not exist" Jan 20 17:06:14 crc kubenswrapper[4558]: I0120 17:06:14.971632 4558 scope.go:117] "RemoveContainer" containerID="8e36100266a8d13e7ab5ec75f4795cb2bfafd04327c6000ba4236e64554b079e" Jan 20 17:06:14 crc kubenswrapper[4558]: E0120 17:06:14.971922 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8e36100266a8d13e7ab5ec75f4795cb2bfafd04327c6000ba4236e64554b079e\": container with ID starting with 8e36100266a8d13e7ab5ec75f4795cb2bfafd04327c6000ba4236e64554b079e not found: ID does not exist" containerID="8e36100266a8d13e7ab5ec75f4795cb2bfafd04327c6000ba4236e64554b079e" Jan 20 17:06:14 crc kubenswrapper[4558]: I0120 17:06:14.971951 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8e36100266a8d13e7ab5ec75f4795cb2bfafd04327c6000ba4236e64554b079e"} err="failed to get container status \"8e36100266a8d13e7ab5ec75f4795cb2bfafd04327c6000ba4236e64554b079e\": rpc error: code = NotFound desc = could not find container \"8e36100266a8d13e7ab5ec75f4795cb2bfafd04327c6000ba4236e64554b079e\": container with ID starting with 8e36100266a8d13e7ab5ec75f4795cb2bfafd04327c6000ba4236e64554b079e not found: ID does not exist" Jan 20 17:06:14 crc kubenswrapper[4558]: I0120 17:06:14.971971 4558 scope.go:117] "RemoveContainer" containerID="064c39cc0ed32246609bc4e80f5c679dc77d71ae4e0e7325c8b80942e4b6cd46" Jan 20 17:06:14 crc kubenswrapper[4558]: E0120 17:06:14.972356 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"064c39cc0ed32246609bc4e80f5c679dc77d71ae4e0e7325c8b80942e4b6cd46\": container with ID starting with 064c39cc0ed32246609bc4e80f5c679dc77d71ae4e0e7325c8b80942e4b6cd46 not found: ID does not exist" containerID="064c39cc0ed32246609bc4e80f5c679dc77d71ae4e0e7325c8b80942e4b6cd46" Jan 20 17:06:14 crc kubenswrapper[4558]: I0120 17:06:14.972382 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"064c39cc0ed32246609bc4e80f5c679dc77d71ae4e0e7325c8b80942e4b6cd46"} err="failed to get container status \"064c39cc0ed32246609bc4e80f5c679dc77d71ae4e0e7325c8b80942e4b6cd46\": rpc error: code = NotFound desc = could not find container \"064c39cc0ed32246609bc4e80f5c679dc77d71ae4e0e7325c8b80942e4b6cd46\": container with ID starting with 064c39cc0ed32246609bc4e80f5c679dc77d71ae4e0e7325c8b80942e4b6cd46 not found: ID does not exist" Jan 20 17:06:14 crc kubenswrapper[4558]: I0120 17:06:14.972396 4558 scope.go:117] "RemoveContainer" containerID="a00e2bc91063a2dabb4c2ce5cfdc3e96683c6b04440f6eeeadc69adf7e0e8a23" Jan 20 17:06:14 crc kubenswrapper[4558]: E0120 17:06:14.972589 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a00e2bc91063a2dabb4c2ce5cfdc3e96683c6b04440f6eeeadc69adf7e0e8a23\": container with ID starting with a00e2bc91063a2dabb4c2ce5cfdc3e96683c6b04440f6eeeadc69adf7e0e8a23 not found: ID does not exist" containerID="a00e2bc91063a2dabb4c2ce5cfdc3e96683c6b04440f6eeeadc69adf7e0e8a23" Jan 20 17:06:14 crc kubenswrapper[4558]: I0120 17:06:14.972610 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a00e2bc91063a2dabb4c2ce5cfdc3e96683c6b04440f6eeeadc69adf7e0e8a23"} err="failed to get container status \"a00e2bc91063a2dabb4c2ce5cfdc3e96683c6b04440f6eeeadc69adf7e0e8a23\": rpc error: code = NotFound desc = could not find container \"a00e2bc91063a2dabb4c2ce5cfdc3e96683c6b04440f6eeeadc69adf7e0e8a23\": container with ID starting with a00e2bc91063a2dabb4c2ce5cfdc3e96683c6b04440f6eeeadc69adf7e0e8a23 not found: ID does not exist" Jan 20 17:06:14 crc kubenswrapper[4558]: I0120 17:06:14.972623 4558 scope.go:117] "RemoveContainer" containerID="1ad6198d74b43bff1f98c2de1da53f4e18e93c848f58a003ec586b574d66eb1f" Jan 20 17:06:14 crc kubenswrapper[4558]: E0120 17:06:14.972813 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1ad6198d74b43bff1f98c2de1da53f4e18e93c848f58a003ec586b574d66eb1f\": container with ID starting with 1ad6198d74b43bff1f98c2de1da53f4e18e93c848f58a003ec586b574d66eb1f not found: ID does not exist" containerID="1ad6198d74b43bff1f98c2de1da53f4e18e93c848f58a003ec586b574d66eb1f" Jan 20 17:06:14 crc kubenswrapper[4558]: I0120 17:06:14.972833 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1ad6198d74b43bff1f98c2de1da53f4e18e93c848f58a003ec586b574d66eb1f"} err="failed to get container status \"1ad6198d74b43bff1f98c2de1da53f4e18e93c848f58a003ec586b574d66eb1f\": rpc error: code = NotFound desc = could not find container \"1ad6198d74b43bff1f98c2de1da53f4e18e93c848f58a003ec586b574d66eb1f\": container with ID starting with 1ad6198d74b43bff1f98c2de1da53f4e18e93c848f58a003ec586b574d66eb1f not found: ID does not exist" Jan 20 17:06:14 crc kubenswrapper[4558]: I0120 17:06:14.972845 4558 scope.go:117] "RemoveContainer" containerID="7da72c8a88027dd1e413e53fd692c99fc03f3e37634323ae66f7e9f55370719e" Jan 20 17:06:14 crc kubenswrapper[4558]: E0120 17:06:14.973017 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7da72c8a88027dd1e413e53fd692c99fc03f3e37634323ae66f7e9f55370719e\": container with ID starting with 7da72c8a88027dd1e413e53fd692c99fc03f3e37634323ae66f7e9f55370719e not found: ID does not exist" containerID="7da72c8a88027dd1e413e53fd692c99fc03f3e37634323ae66f7e9f55370719e" Jan 20 17:06:14 crc kubenswrapper[4558]: I0120 17:06:14.973041 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7da72c8a88027dd1e413e53fd692c99fc03f3e37634323ae66f7e9f55370719e"} err="failed to get container status \"7da72c8a88027dd1e413e53fd692c99fc03f3e37634323ae66f7e9f55370719e\": rpc error: code = NotFound desc = could not find container \"7da72c8a88027dd1e413e53fd692c99fc03f3e37634323ae66f7e9f55370719e\": container with ID starting with 7da72c8a88027dd1e413e53fd692c99fc03f3e37634323ae66f7e9f55370719e not found: ID does not exist" Jan 20 17:06:14 crc kubenswrapper[4558]: I0120 17:06:14.973056 4558 scope.go:117] "RemoveContainer" containerID="8f45926ec3655a6641edb5a009493f355b1f6533cdddced2af79c34663d3397c" Jan 20 17:06:14 crc kubenswrapper[4558]: E0120 17:06:14.973245 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8f45926ec3655a6641edb5a009493f355b1f6533cdddced2af79c34663d3397c\": container with ID starting with 8f45926ec3655a6641edb5a009493f355b1f6533cdddced2af79c34663d3397c not found: ID does not exist" containerID="8f45926ec3655a6641edb5a009493f355b1f6533cdddced2af79c34663d3397c" Jan 20 17:06:14 crc kubenswrapper[4558]: I0120 17:06:14.973265 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8f45926ec3655a6641edb5a009493f355b1f6533cdddced2af79c34663d3397c"} err="failed to get container status \"8f45926ec3655a6641edb5a009493f355b1f6533cdddced2af79c34663d3397c\": rpc error: code = NotFound desc = could not find container \"8f45926ec3655a6641edb5a009493f355b1f6533cdddced2af79c34663d3397c\": container with ID starting with 8f45926ec3655a6641edb5a009493f355b1f6533cdddced2af79c34663d3397c not found: ID does not exist" Jan 20 17:06:14 crc kubenswrapper[4558]: I0120 17:06:14.973281 4558 scope.go:117] "RemoveContainer" containerID="576e4656d99cd9370e77241bb14cd66a4de3e94deaef7d304253b1816ec67dd5" Jan 20 17:06:14 crc kubenswrapper[4558]: E0120 17:06:14.973502 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"576e4656d99cd9370e77241bb14cd66a4de3e94deaef7d304253b1816ec67dd5\": container with ID starting with 576e4656d99cd9370e77241bb14cd66a4de3e94deaef7d304253b1816ec67dd5 not found: ID does not exist" containerID="576e4656d99cd9370e77241bb14cd66a4de3e94deaef7d304253b1816ec67dd5" Jan 20 17:06:14 crc kubenswrapper[4558]: I0120 17:06:14.973531 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"576e4656d99cd9370e77241bb14cd66a4de3e94deaef7d304253b1816ec67dd5"} err="failed to get container status \"576e4656d99cd9370e77241bb14cd66a4de3e94deaef7d304253b1816ec67dd5\": rpc error: code = NotFound desc = could not find container \"576e4656d99cd9370e77241bb14cd66a4de3e94deaef7d304253b1816ec67dd5\": container with ID starting with 576e4656d99cd9370e77241bb14cd66a4de3e94deaef7d304253b1816ec67dd5 not found: ID does not exist" Jan 20 17:06:14 crc kubenswrapper[4558]: I0120 17:06:14.973544 4558 scope.go:117] "RemoveContainer" containerID="f2f9c003229a8c4bff074beb609ad6fba9bf0167b989fcddbe4ee674bb6a73bd" Jan 20 17:06:14 crc kubenswrapper[4558]: E0120 17:06:14.973741 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f2f9c003229a8c4bff074beb609ad6fba9bf0167b989fcddbe4ee674bb6a73bd\": container with ID starting with f2f9c003229a8c4bff074beb609ad6fba9bf0167b989fcddbe4ee674bb6a73bd not found: ID does not exist" containerID="f2f9c003229a8c4bff074beb609ad6fba9bf0167b989fcddbe4ee674bb6a73bd" Jan 20 17:06:14 crc kubenswrapper[4558]: I0120 17:06:14.973764 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f2f9c003229a8c4bff074beb609ad6fba9bf0167b989fcddbe4ee674bb6a73bd"} err="failed to get container status \"f2f9c003229a8c4bff074beb609ad6fba9bf0167b989fcddbe4ee674bb6a73bd\": rpc error: code = NotFound desc = could not find container \"f2f9c003229a8c4bff074beb609ad6fba9bf0167b989fcddbe4ee674bb6a73bd\": container with ID starting with f2f9c003229a8c4bff074beb609ad6fba9bf0167b989fcddbe4ee674bb6a73bd not found: ID does not exist" Jan 20 17:06:14 crc kubenswrapper[4558]: I0120 17:06:14.973778 4558 scope.go:117] "RemoveContainer" containerID="6e25c21c1436e8d3c70552c967000697e095b890d75c613e94acbebcf601e385" Jan 20 17:06:14 crc kubenswrapper[4558]: E0120 17:06:14.974004 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6e25c21c1436e8d3c70552c967000697e095b890d75c613e94acbebcf601e385\": container with ID starting with 6e25c21c1436e8d3c70552c967000697e095b890d75c613e94acbebcf601e385 not found: ID does not exist" containerID="6e25c21c1436e8d3c70552c967000697e095b890d75c613e94acbebcf601e385" Jan 20 17:06:14 crc kubenswrapper[4558]: I0120 17:06:14.974051 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6e25c21c1436e8d3c70552c967000697e095b890d75c613e94acbebcf601e385"} err="failed to get container status \"6e25c21c1436e8d3c70552c967000697e095b890d75c613e94acbebcf601e385\": rpc error: code = NotFound desc = could not find container \"6e25c21c1436e8d3c70552c967000697e095b890d75c613e94acbebcf601e385\": container with ID starting with 6e25c21c1436e8d3c70552c967000697e095b890d75c613e94acbebcf601e385 not found: ID does not exist" Jan 20 17:06:14 crc kubenswrapper[4558]: I0120 17:06:14.974092 4558 scope.go:117] "RemoveContainer" containerID="fc9635ec26b842f65146e97cb678f89fadc8730a998b6cc02a5c959a0601c4bf" Jan 20 17:06:14 crc kubenswrapper[4558]: E0120 17:06:14.974375 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fc9635ec26b842f65146e97cb678f89fadc8730a998b6cc02a5c959a0601c4bf\": container with ID starting with fc9635ec26b842f65146e97cb678f89fadc8730a998b6cc02a5c959a0601c4bf not found: ID does not exist" containerID="fc9635ec26b842f65146e97cb678f89fadc8730a998b6cc02a5c959a0601c4bf" Jan 20 17:06:14 crc kubenswrapper[4558]: I0120 17:06:14.974407 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fc9635ec26b842f65146e97cb678f89fadc8730a998b6cc02a5c959a0601c4bf"} err="failed to get container status \"fc9635ec26b842f65146e97cb678f89fadc8730a998b6cc02a5c959a0601c4bf\": rpc error: code = NotFound desc = could not find container \"fc9635ec26b842f65146e97cb678f89fadc8730a998b6cc02a5c959a0601c4bf\": container with ID starting with fc9635ec26b842f65146e97cb678f89fadc8730a998b6cc02a5c959a0601c4bf not found: ID does not exist" Jan 20 17:06:14 crc kubenswrapper[4558]: I0120 17:06:14.974423 4558 scope.go:117] "RemoveContainer" containerID="103cf0e411f4211e757684612d3274497733c55f59ba7dc63dd3ad2bfec99d18" Jan 20 17:06:14 crc kubenswrapper[4558]: E0120 17:06:14.974645 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"103cf0e411f4211e757684612d3274497733c55f59ba7dc63dd3ad2bfec99d18\": container with ID starting with 103cf0e411f4211e757684612d3274497733c55f59ba7dc63dd3ad2bfec99d18 not found: ID does not exist" containerID="103cf0e411f4211e757684612d3274497733c55f59ba7dc63dd3ad2bfec99d18" Jan 20 17:06:14 crc kubenswrapper[4558]: I0120 17:06:14.974678 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"103cf0e411f4211e757684612d3274497733c55f59ba7dc63dd3ad2bfec99d18"} err="failed to get container status \"103cf0e411f4211e757684612d3274497733c55f59ba7dc63dd3ad2bfec99d18\": rpc error: code = NotFound desc = could not find container \"103cf0e411f4211e757684612d3274497733c55f59ba7dc63dd3ad2bfec99d18\": container with ID starting with 103cf0e411f4211e757684612d3274497733c55f59ba7dc63dd3ad2bfec99d18 not found: ID does not exist" Jan 20 17:06:14 crc kubenswrapper[4558]: I0120 17:06:14.974704 4558 scope.go:117] "RemoveContainer" containerID="6d71dae893fc6a1962df504002d772c4cfe40ee0a96ad60259292b2cd410de30" Jan 20 17:06:14 crc kubenswrapper[4558]: E0120 17:06:14.974935 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6d71dae893fc6a1962df504002d772c4cfe40ee0a96ad60259292b2cd410de30\": container with ID starting with 6d71dae893fc6a1962df504002d772c4cfe40ee0a96ad60259292b2cd410de30 not found: ID does not exist" containerID="6d71dae893fc6a1962df504002d772c4cfe40ee0a96ad60259292b2cd410de30" Jan 20 17:06:14 crc kubenswrapper[4558]: I0120 17:06:14.974963 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6d71dae893fc6a1962df504002d772c4cfe40ee0a96ad60259292b2cd410de30"} err="failed to get container status \"6d71dae893fc6a1962df504002d772c4cfe40ee0a96ad60259292b2cd410de30\": rpc error: code = NotFound desc = could not find container \"6d71dae893fc6a1962df504002d772c4cfe40ee0a96ad60259292b2cd410de30\": container with ID starting with 6d71dae893fc6a1962df504002d772c4cfe40ee0a96ad60259292b2cd410de30 not found: ID does not exist" Jan 20 17:06:14 crc kubenswrapper[4558]: I0120 17:06:14.974980 4558 scope.go:117] "RemoveContainer" containerID="87f7b0a910a29230a9846bbbde9e7c30fd5ead2357d9f18d957764288be50fe1" Jan 20 17:06:14 crc kubenswrapper[4558]: E0120 17:06:14.975228 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"87f7b0a910a29230a9846bbbde9e7c30fd5ead2357d9f18d957764288be50fe1\": container with ID starting with 87f7b0a910a29230a9846bbbde9e7c30fd5ead2357d9f18d957764288be50fe1 not found: ID does not exist" containerID="87f7b0a910a29230a9846bbbde9e7c30fd5ead2357d9f18d957764288be50fe1" Jan 20 17:06:14 crc kubenswrapper[4558]: I0120 17:06:14.975263 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"87f7b0a910a29230a9846bbbde9e7c30fd5ead2357d9f18d957764288be50fe1"} err="failed to get container status \"87f7b0a910a29230a9846bbbde9e7c30fd5ead2357d9f18d957764288be50fe1\": rpc error: code = NotFound desc = could not find container \"87f7b0a910a29230a9846bbbde9e7c30fd5ead2357d9f18d957764288be50fe1\": container with ID starting with 87f7b0a910a29230a9846bbbde9e7c30fd5ead2357d9f18d957764288be50fe1 not found: ID does not exist" Jan 20 17:06:14 crc kubenswrapper[4558]: I0120 17:06:14.981485 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" " Jan 20 17:06:14 crc kubenswrapper[4558]: I0120 17:06:14.981510 4558 reconciler_common.go:293] "Volume detached for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/10edb733-8fab-4543-ad29-3568e3de5aea-lock\") on node \"crc\" DevicePath \"\"" Jan 20 17:06:14 crc kubenswrapper[4558]: I0120 17:06:14.981522 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xkq7g\" (UniqueName: \"kubernetes.io/projected/10edb733-8fab-4543-ad29-3568e3de5aea-kube-api-access-xkq7g\") on node \"crc\" DevicePath \"\"" Jan 20 17:06:14 crc kubenswrapper[4558]: I0120 17:06:14.981532 4558 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/10edb733-8fab-4543-ad29-3568e3de5aea-etc-swift\") on node \"crc\" DevicePath \"\"" Jan 20 17:06:14 crc kubenswrapper[4558]: I0120 17:06:14.981544 4558 reconciler_common.go:293] "Volume detached for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/10edb733-8fab-4543-ad29-3568e3de5aea-cache\") on node \"crc\" DevicePath \"\"" Jan 20 17:06:14 crc kubenswrapper[4558]: I0120 17:06:14.992046 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage05-crc" (UniqueName: "kubernetes.io/local-volume/local-storage05-crc") on node "crc" Jan 20 17:06:15 crc kubenswrapper[4558]: I0120 17:06:15.051845 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/swift-storage-0"] Jan 20 17:06:15 crc kubenswrapper[4558]: I0120 17:06:15.071439 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/swift-storage-0"] Jan 20 17:06:15 crc kubenswrapper[4558]: I0120 17:06:15.083399 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:06:15 crc kubenswrapper[4558]: E0120 17:06:15.824489 4558 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod19189ead_bcd7_4806_be88_43cc27d5f202.slice/crio-conmon-80a7fbf9ebd999f9e255d17b6edb57d7cd1a880d930872a51bd1d1c2a19b20a5.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod19189ead_bcd7_4806_be88_43cc27d5f202.slice/crio-80a7fbf9ebd999f9e255d17b6edb57d7cd1a880d930872a51bd1d1c2a19b20a5.scope\": RecentStats: unable to find data in memory cache]" Jan 20 17:06:16 crc kubenswrapper[4558]: I0120 17:06:16.573614 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="10edb733-8fab-4543-ad29-3568e3de5aea" path="/var/lib/kubelet/pods/10edb733-8fab-4543-ad29-3568e3de5aea/volumes" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.892953 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["crc-storage/crc-storage-crc-fnm2v"] Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.897287 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["crc-storage/crc-storage-crc-fnm2v"] Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.992899 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["crc-storage/crc-storage-crc-2ztxn"] Jan 20 17:06:23 crc kubenswrapper[4558]: E0120 17:06:23.993197 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="32a3bcd7-25d6-45f5-8ce6-66949357504c" containerName="proxy-httpd" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.993216 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="32a3bcd7-25d6-45f5-8ce6-66949357504c" containerName="proxy-httpd" Jan 20 17:06:23 crc kubenswrapper[4558]: E0120 17:06:23.993234 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="084a5ce8-2844-42f1-92a9-973b78505050" containerName="nova-cell1-conductor-conductor" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.993241 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="084a5ce8-2844-42f1-92a9-973b78505050" containerName="nova-cell1-conductor-conductor" Jan 20 17:06:23 crc kubenswrapper[4558]: E0120 17:06:23.993250 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="10edb733-8fab-4543-ad29-3568e3de5aea" containerName="container-updater" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.993257 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="10edb733-8fab-4543-ad29-3568e3de5aea" containerName="container-updater" Jan 20 17:06:23 crc kubenswrapper[4558]: E0120 17:06:23.993264 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aa387033-dc08-48f4-bf56-06a7f316423c" containerName="glance-httpd" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.993268 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="aa387033-dc08-48f4-bf56-06a7f316423c" containerName="glance-httpd" Jan 20 17:06:23 crc kubenswrapper[4558]: E0120 17:06:23.993276 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bc2c7d29-d967-414d-8bb9-1bf70bcacdcd" containerName="barbican-keystone-listener-log" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.993281 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="bc2c7d29-d967-414d-8bb9-1bf70bcacdcd" containerName="barbican-keystone-listener-log" Jan 20 17:06:23 crc kubenswrapper[4558]: E0120 17:06:23.993290 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c75ee5c-beea-4b1e-b429-91e83a472529" containerName="barbican-api" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.993295 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c75ee5c-beea-4b1e-b429-91e83a472529" containerName="barbican-api" Jan 20 17:06:23 crc kubenswrapper[4558]: E0120 17:06:23.993317 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="51e263b2-d42e-46fa-99e1-e0c5aa23bcf5" containerName="nova-api-api" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.993323 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="51e263b2-d42e-46fa-99e1-e0c5aa23bcf5" containerName="nova-api-api" Jan 20 17:06:23 crc kubenswrapper[4558]: E0120 17:06:23.993334 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c7187d14-f25b-4344-bf36-7d56d8e1b79c" containerName="ceilometer-central-agent" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.993340 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="c7187d14-f25b-4344-bf36-7d56d8e1b79c" containerName="ceilometer-central-agent" Jan 20 17:06:23 crc kubenswrapper[4558]: E0120 17:06:23.993349 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1fccb241-c75e-4cec-b3c6-3855bd6c1161" containerName="nova-metadata-log" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.993354 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="1fccb241-c75e-4cec-b3c6-3855bd6c1161" containerName="nova-metadata-log" Jan 20 17:06:23 crc kubenswrapper[4558]: E0120 17:06:23.993360 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="19189ead-bcd7-4806-be88-43cc27d5f202" containerName="mariadb-account-create-update" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.993365 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="19189ead-bcd7-4806-be88-43cc27d5f202" containerName="mariadb-account-create-update" Jan 20 17:06:23 crc kubenswrapper[4558]: E0120 17:06:23.993372 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1fccb241-c75e-4cec-b3c6-3855bd6c1161" containerName="nova-metadata-metadata" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.993377 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="1fccb241-c75e-4cec-b3c6-3855bd6c1161" containerName="nova-metadata-metadata" Jan 20 17:06:23 crc kubenswrapper[4558]: E0120 17:06:23.993385 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ac55b716-d8fd-4628-8627-f94b5a4e7c78" containerName="rabbitmq" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.993390 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ac55b716-d8fd-4628-8627-f94b5a4e7c78" containerName="rabbitmq" Jan 20 17:06:23 crc kubenswrapper[4558]: E0120 17:06:23.993399 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="51e263b2-d42e-46fa-99e1-e0c5aa23bcf5" containerName="nova-api-log" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.993404 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="51e263b2-d42e-46fa-99e1-e0c5aa23bcf5" containerName="nova-api-log" Jan 20 17:06:23 crc kubenswrapper[4558]: E0120 17:06:23.993413 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="33be1904-bd58-48cc-806a-af1dc751717c" containerName="ovn-northd" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.993419 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="33be1904-bd58-48cc-806a-af1dc751717c" containerName="ovn-northd" Jan 20 17:06:23 crc kubenswrapper[4558]: E0120 17:06:23.993424 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="33be1904-bd58-48cc-806a-af1dc751717c" containerName="openstack-network-exporter" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.993430 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="33be1904-bd58-48cc-806a-af1dc751717c" containerName="openstack-network-exporter" Jan 20 17:06:23 crc kubenswrapper[4558]: E0120 17:06:23.993435 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c7187d14-f25b-4344-bf36-7d56d8e1b79c" containerName="proxy-httpd" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.993440 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="c7187d14-f25b-4344-bf36-7d56d8e1b79c" containerName="proxy-httpd" Jan 20 17:06:23 crc kubenswrapper[4558]: E0120 17:06:23.993447 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="10edb733-8fab-4543-ad29-3568e3de5aea" containerName="rsync" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.993452 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="10edb733-8fab-4543-ad29-3568e3de5aea" containerName="rsync" Jan 20 17:06:23 crc kubenswrapper[4558]: E0120 17:06:23.993460 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bfbf270c-1853-4113-b0e4-6d192abb5c5d" containerName="barbican-worker-log" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.993465 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="bfbf270c-1853-4113-b0e4-6d192abb5c5d" containerName="barbican-worker-log" Jan 20 17:06:23 crc kubenswrapper[4558]: E0120 17:06:23.993471 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c7187d14-f25b-4344-bf36-7d56d8e1b79c" containerName="ceilometer-notification-agent" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.993476 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="c7187d14-f25b-4344-bf36-7d56d8e1b79c" containerName="ceilometer-notification-agent" Jan 20 17:06:23 crc kubenswrapper[4558]: E0120 17:06:23.993484 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa011f19-1245-4ae8-ae2b-8773036498b3" containerName="neutron-httpd" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.993488 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa011f19-1245-4ae8-ae2b-8773036498b3" containerName="neutron-httpd" Jan 20 17:06:23 crc kubenswrapper[4558]: E0120 17:06:23.993497 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="10edb733-8fab-4543-ad29-3568e3de5aea" containerName="object-expirer" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.993503 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="10edb733-8fab-4543-ad29-3568e3de5aea" containerName="object-expirer" Jan 20 17:06:23 crc kubenswrapper[4558]: E0120 17:06:23.993511 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bc2c7d29-d967-414d-8bb9-1bf70bcacdcd" containerName="barbican-keystone-listener" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.993516 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="bc2c7d29-d967-414d-8bb9-1bf70bcacdcd" containerName="barbican-keystone-listener" Jan 20 17:06:23 crc kubenswrapper[4558]: E0120 17:06:23.993524 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aa387033-dc08-48f4-bf56-06a7f316423c" containerName="glance-log" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.993529 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="aa387033-dc08-48f4-bf56-06a7f316423c" containerName="glance-log" Jan 20 17:06:23 crc kubenswrapper[4558]: E0120 17:06:23.993540 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="20ba561a-58e7-459f-ba28-ed0b68cdab9b" containerName="glance-httpd" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.993546 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="20ba561a-58e7-459f-ba28-ed0b68cdab9b" containerName="glance-httpd" Jan 20 17:06:23 crc kubenswrapper[4558]: E0120 17:06:23.993553 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a98c5227-c8a8-4cbc-8039-461dd14fbd5b" containerName="placement-api" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.993558 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="a98c5227-c8a8-4cbc-8039-461dd14fbd5b" containerName="placement-api" Jan 20 17:06:23 crc kubenswrapper[4558]: E0120 17:06:23.993565 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="32a3bcd7-25d6-45f5-8ce6-66949357504c" containerName="proxy-server" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.993570 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="32a3bcd7-25d6-45f5-8ce6-66949357504c" containerName="proxy-server" Jan 20 17:06:23 crc kubenswrapper[4558]: E0120 17:06:23.993577 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="10edb733-8fab-4543-ad29-3568e3de5aea" containerName="account-replicator" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.993582 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="10edb733-8fab-4543-ad29-3568e3de5aea" containerName="account-replicator" Jan 20 17:06:23 crc kubenswrapper[4558]: E0120 17:06:23.993587 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="10edb733-8fab-4543-ad29-3568e3de5aea" containerName="container-replicator" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.993592 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="10edb733-8fab-4543-ad29-3568e3de5aea" containerName="container-replicator" Jan 20 17:06:23 crc kubenswrapper[4558]: E0120 17:06:23.993598 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="20ba561a-58e7-459f-ba28-ed0b68cdab9b" containerName="glance-log" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.993603 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="20ba561a-58e7-459f-ba28-ed0b68cdab9b" containerName="glance-log" Jan 20 17:06:23 crc kubenswrapper[4558]: E0120 17:06:23.993610 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad58cc97-cbed-48c8-ab51-ebb920a1454c" containerName="galera" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.993615 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad58cc97-cbed-48c8-ab51-ebb920a1454c" containerName="galera" Jan 20 17:06:23 crc kubenswrapper[4558]: E0120 17:06:23.993621 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6df6ca0e-78e9-4248-8cbe-b9934e0ad090" containerName="galera" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.993625 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="6df6ca0e-78e9-4248-8cbe-b9934e0ad090" containerName="galera" Jan 20 17:06:23 crc kubenswrapper[4558]: E0120 17:06:23.993630 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c7187d14-f25b-4344-bf36-7d56d8e1b79c" containerName="sg-core" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.993635 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="c7187d14-f25b-4344-bf36-7d56d8e1b79c" containerName="sg-core" Jan 20 17:06:23 crc kubenswrapper[4558]: E0120 17:06:23.993641 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="38786f1c-754c-488d-8a13-aad7001ad778" containerName="nova-scheduler-scheduler" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.993646 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="38786f1c-754c-488d-8a13-aad7001ad778" containerName="nova-scheduler-scheduler" Jan 20 17:06:23 crc kubenswrapper[4558]: E0120 17:06:23.993652 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6df6ca0e-78e9-4248-8cbe-b9934e0ad090" containerName="mysql-bootstrap" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.993658 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="6df6ca0e-78e9-4248-8cbe-b9934e0ad090" containerName="mysql-bootstrap" Jan 20 17:06:23 crc kubenswrapper[4558]: E0120 17:06:23.993665 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="10edb733-8fab-4543-ad29-3568e3de5aea" containerName="object-replicator" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.993671 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="10edb733-8fab-4543-ad29-3568e3de5aea" containerName="object-replicator" Jan 20 17:06:23 crc kubenswrapper[4558]: E0120 17:06:23.993679 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad58cc97-cbed-48c8-ab51-ebb920a1454c" containerName="mysql-bootstrap" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.993684 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad58cc97-cbed-48c8-ab51-ebb920a1454c" containerName="mysql-bootstrap" Jan 20 17:06:23 crc kubenswrapper[4558]: E0120 17:06:23.993690 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="10edb733-8fab-4543-ad29-3568e3de5aea" containerName="account-server" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.993696 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="10edb733-8fab-4543-ad29-3568e3de5aea" containerName="account-server" Jan 20 17:06:23 crc kubenswrapper[4558]: E0120 17:06:23.993702 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b9c2a3b1-71ed-4612-8cd0-22e396cd622c" containerName="kube-state-metrics" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.993709 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="b9c2a3b1-71ed-4612-8cd0-22e396cd622c" containerName="kube-state-metrics" Jan 20 17:06:23 crc kubenswrapper[4558]: E0120 17:06:23.993716 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aed5856e-9412-48a1-ac94-cc2f4bd05633" containerName="memcached" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.993721 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="aed5856e-9412-48a1-ac94-cc2f4bd05633" containerName="memcached" Jan 20 17:06:23 crc kubenswrapper[4558]: E0120 17:06:23.993729 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="10edb733-8fab-4543-ad29-3568e3de5aea" containerName="account-auditor" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.993734 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="10edb733-8fab-4543-ad29-3568e3de5aea" containerName="account-auditor" Jan 20 17:06:23 crc kubenswrapper[4558]: E0120 17:06:23.993741 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c75ee5c-beea-4b1e-b429-91e83a472529" containerName="barbican-api-log" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.993747 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c75ee5c-beea-4b1e-b429-91e83a472529" containerName="barbican-api-log" Jan 20 17:06:23 crc kubenswrapper[4558]: E0120 17:06:23.993752 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6846508c-01bc-49e8-b98b-5df9ec57f030" containerName="init" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.993757 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="6846508c-01bc-49e8-b98b-5df9ec57f030" containerName="init" Jan 20 17:06:23 crc kubenswrapper[4558]: E0120 17:06:23.993764 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="10edb733-8fab-4543-ad29-3568e3de5aea" containerName="container-server" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.993769 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="10edb733-8fab-4543-ad29-3568e3de5aea" containerName="container-server" Jan 20 17:06:23 crc kubenswrapper[4558]: E0120 17:06:23.993775 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bfbf270c-1853-4113-b0e4-6d192abb5c5d" containerName="barbican-worker" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.993780 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="bfbf270c-1853-4113-b0e4-6d192abb5c5d" containerName="barbican-worker" Jan 20 17:06:23 crc kubenswrapper[4558]: E0120 17:06:23.993787 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a98c5227-c8a8-4cbc-8039-461dd14fbd5b" containerName="placement-log" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.993791 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="a98c5227-c8a8-4cbc-8039-461dd14fbd5b" containerName="placement-log" Jan 20 17:06:23 crc kubenswrapper[4558]: E0120 17:06:23.993798 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="10edb733-8fab-4543-ad29-3568e3de5aea" containerName="account-reaper" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.993802 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="10edb733-8fab-4543-ad29-3568e3de5aea" containerName="account-reaper" Jan 20 17:06:23 crc kubenswrapper[4558]: E0120 17:06:23.993808 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="10edb733-8fab-4543-ad29-3568e3de5aea" containerName="object-updater" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.993813 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="10edb733-8fab-4543-ad29-3568e3de5aea" containerName="object-updater" Jan 20 17:06:23 crc kubenswrapper[4558]: E0120 17:06:23.993821 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1f9ceb49-977a-47b3-a1f3-10d68c96ab0f" containerName="probe" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.993826 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="1f9ceb49-977a-47b3-a1f3-10d68c96ab0f" containerName="probe" Jan 20 17:06:23 crc kubenswrapper[4558]: E0120 17:06:23.993832 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ac55b716-d8fd-4628-8627-f94b5a4e7c78" containerName="setup-container" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.993836 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ac55b716-d8fd-4628-8627-f94b5a4e7c78" containerName="setup-container" Jan 20 17:06:23 crc kubenswrapper[4558]: E0120 17:06:23.993844 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="10edb733-8fab-4543-ad29-3568e3de5aea" containerName="object-server" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.993849 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="10edb733-8fab-4543-ad29-3568e3de5aea" containerName="object-server" Jan 20 17:06:23 crc kubenswrapper[4558]: E0120 17:06:23.993856 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="55d5d50d-edc1-4a99-9540-72a7f5f0c622" containerName="nova-cell0-conductor-conductor" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.993861 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="55d5d50d-edc1-4a99-9540-72a7f5f0c622" containerName="nova-cell0-conductor-conductor" Jan 20 17:06:23 crc kubenswrapper[4558]: E0120 17:06:23.993867 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0bfc3458-cc0f-4bea-9794-52c5e81fe055" containerName="rabbitmq" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.993872 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="0bfc3458-cc0f-4bea-9794-52c5e81fe055" containerName="rabbitmq" Jan 20 17:06:23 crc kubenswrapper[4558]: E0120 17:06:23.993880 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa011f19-1245-4ae8-ae2b-8773036498b3" containerName="neutron-api" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.993884 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa011f19-1245-4ae8-ae2b-8773036498b3" containerName="neutron-api" Jan 20 17:06:23 crc kubenswrapper[4558]: E0120 17:06:23.993892 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0bfc3458-cc0f-4bea-9794-52c5e81fe055" containerName="setup-container" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.993897 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="0bfc3458-cc0f-4bea-9794-52c5e81fe055" containerName="setup-container" Jan 20 17:06:23 crc kubenswrapper[4558]: E0120 17:06:23.993903 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="10edb733-8fab-4543-ad29-3568e3de5aea" containerName="object-auditor" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.993907 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="10edb733-8fab-4543-ad29-3568e3de5aea" containerName="object-auditor" Jan 20 17:06:23 crc kubenswrapper[4558]: E0120 17:06:23.993913 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="10edb733-8fab-4543-ad29-3568e3de5aea" containerName="swift-recon-cron" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.993918 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="10edb733-8fab-4543-ad29-3568e3de5aea" containerName="swift-recon-cron" Jan 20 17:06:23 crc kubenswrapper[4558]: E0120 17:06:23.993924 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2ba42f6a-1ade-45ae-bdc4-117ad2fa866b" containerName="cinder-api" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.993930 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="2ba42f6a-1ade-45ae-bdc4-117ad2fa866b" containerName="cinder-api" Jan 20 17:06:23 crc kubenswrapper[4558]: E0120 17:06:23.993937 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8b9f5a8b-ee37-483c-9a86-38cc24dcb388" containerName="keystone-api" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.993943 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="8b9f5a8b-ee37-483c-9a86-38cc24dcb388" containerName="keystone-api" Jan 20 17:06:23 crc kubenswrapper[4558]: E0120 17:06:23.993951 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6846508c-01bc-49e8-b98b-5df9ec57f030" containerName="dnsmasq-dns" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.993956 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="6846508c-01bc-49e8-b98b-5df9ec57f030" containerName="dnsmasq-dns" Jan 20 17:06:23 crc kubenswrapper[4558]: E0120 17:06:23.993962 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2ba42f6a-1ade-45ae-bdc4-117ad2fa866b" containerName="cinder-api-log" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.993967 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="2ba42f6a-1ade-45ae-bdc4-117ad2fa866b" containerName="cinder-api-log" Jan 20 17:06:23 crc kubenswrapper[4558]: E0120 17:06:23.993974 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1f9ceb49-977a-47b3-a1f3-10d68c96ab0f" containerName="cinder-scheduler" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.993979 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="1f9ceb49-977a-47b3-a1f3-10d68c96ab0f" containerName="cinder-scheduler" Jan 20 17:06:23 crc kubenswrapper[4558]: E0120 17:06:23.993986 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1176545e-611b-4fc7-8b03-e91ee7813fd3" containerName="nova-cell1-novncproxy-novncproxy" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.993991 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="1176545e-611b-4fc7-8b03-e91ee7813fd3" containerName="nova-cell1-novncproxy-novncproxy" Jan 20 17:06:23 crc kubenswrapper[4558]: E0120 17:06:23.993998 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="10edb733-8fab-4543-ad29-3568e3de5aea" containerName="container-auditor" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.994002 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="10edb733-8fab-4543-ad29-3568e3de5aea" containerName="container-auditor" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.994130 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="10edb733-8fab-4543-ad29-3568e3de5aea" containerName="account-server" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.994141 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="bc2c7d29-d967-414d-8bb9-1bf70bcacdcd" containerName="barbican-keystone-listener" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.994151 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="bc2c7d29-d967-414d-8bb9-1bf70bcacdcd" containerName="barbican-keystone-listener-log" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.994157 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="33be1904-bd58-48cc-806a-af1dc751717c" containerName="openstack-network-exporter" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.994176 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="a98c5227-c8a8-4cbc-8039-461dd14fbd5b" containerName="placement-api" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.994184 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="b9c2a3b1-71ed-4612-8cd0-22e396cd622c" containerName="kube-state-metrics" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.994191 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="fa011f19-1245-4ae8-ae2b-8773036498b3" containerName="neutron-api" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.994199 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="19189ead-bcd7-4806-be88-43cc27d5f202" containerName="mariadb-account-create-update" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.994206 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="ac55b716-d8fd-4628-8627-f94b5a4e7c78" containerName="rabbitmq" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.994212 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="2ba42f6a-1ade-45ae-bdc4-117ad2fa866b" containerName="cinder-api-log" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.994219 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="1fccb241-c75e-4cec-b3c6-3855bd6c1161" containerName="nova-metadata-metadata" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.994226 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="33be1904-bd58-48cc-806a-af1dc751717c" containerName="ovn-northd" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.994235 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="32a3bcd7-25d6-45f5-8ce6-66949357504c" containerName="proxy-httpd" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.994240 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="c7187d14-f25b-4344-bf36-7d56d8e1b79c" containerName="ceilometer-notification-agent" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.994247 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="10edb733-8fab-4543-ad29-3568e3de5aea" containerName="container-updater" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.994254 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="10edb733-8fab-4543-ad29-3568e3de5aea" containerName="swift-recon-cron" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.994261 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="8b9f5a8b-ee37-483c-9a86-38cc24dcb388" containerName="keystone-api" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.994268 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="0bfc3458-cc0f-4bea-9794-52c5e81fe055" containerName="rabbitmq" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.994276 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="aa387033-dc08-48f4-bf56-06a7f316423c" containerName="glance-httpd" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.994283 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="1f9ceb49-977a-47b3-a1f3-10d68c96ab0f" containerName="probe" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.994289 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="fa011f19-1245-4ae8-ae2b-8773036498b3" containerName="neutron-httpd" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.994296 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="084a5ce8-2844-42f1-92a9-973b78505050" containerName="nova-cell1-conductor-conductor" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.994315 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="2ba42f6a-1ade-45ae-bdc4-117ad2fa866b" containerName="cinder-api" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.994322 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="c7187d14-f25b-4344-bf36-7d56d8e1b79c" containerName="ceilometer-central-agent" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.994328 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="10edb733-8fab-4543-ad29-3568e3de5aea" containerName="object-replicator" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.994335 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="10edb733-8fab-4543-ad29-3568e3de5aea" containerName="container-server" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.994342 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="10edb733-8fab-4543-ad29-3568e3de5aea" containerName="object-expirer" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.994347 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="bfbf270c-1853-4113-b0e4-6d192abb5c5d" containerName="barbican-worker" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.994353 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="1176545e-611b-4fc7-8b03-e91ee7813fd3" containerName="nova-cell1-novncproxy-novncproxy" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.994360 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="10edb733-8fab-4543-ad29-3568e3de5aea" containerName="container-replicator" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.994368 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="55d5d50d-edc1-4a99-9540-72a7f5f0c622" containerName="nova-cell0-conductor-conductor" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.994373 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="ad58cc97-cbed-48c8-ab51-ebb920a1454c" containerName="galera" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.994380 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="aed5856e-9412-48a1-ac94-cc2f4bd05633" containerName="memcached" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.994387 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="10edb733-8fab-4543-ad29-3568e3de5aea" containerName="object-updater" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.994394 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="10edb733-8fab-4543-ad29-3568e3de5aea" containerName="rsync" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.994403 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="c7187d14-f25b-4344-bf36-7d56d8e1b79c" containerName="sg-core" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.994408 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="10edb733-8fab-4543-ad29-3568e3de5aea" containerName="account-replicator" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.994416 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="10edb733-8fab-4543-ad29-3568e3de5aea" containerName="account-reaper" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.994421 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="32a3bcd7-25d6-45f5-8ce6-66949357504c" containerName="proxy-server" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.994428 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="10edb733-8fab-4543-ad29-3568e3de5aea" containerName="object-auditor" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.994433 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="20ba561a-58e7-459f-ba28-ed0b68cdab9b" containerName="glance-httpd" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.994440 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="c7187d14-f25b-4344-bf36-7d56d8e1b79c" containerName="proxy-httpd" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.994447 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="51e263b2-d42e-46fa-99e1-e0c5aa23bcf5" containerName="nova-api-api" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.994456 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="6df6ca0e-78e9-4248-8cbe-b9934e0ad090" containerName="galera" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.994463 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="1f9ceb49-977a-47b3-a1f3-10d68c96ab0f" containerName="cinder-scheduler" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.994469 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="6c75ee5c-beea-4b1e-b429-91e83a472529" containerName="barbican-api-log" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.994475 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="38786f1c-754c-488d-8a13-aad7001ad778" containerName="nova-scheduler-scheduler" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.994482 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="10edb733-8fab-4543-ad29-3568e3de5aea" containerName="account-auditor" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.994490 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="10edb733-8fab-4543-ad29-3568e3de5aea" containerName="container-auditor" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.994498 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="a98c5227-c8a8-4cbc-8039-461dd14fbd5b" containerName="placement-log" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.994504 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="20ba561a-58e7-459f-ba28-ed0b68cdab9b" containerName="glance-log" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.994510 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="6c75ee5c-beea-4b1e-b429-91e83a472529" containerName="barbican-api" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.994517 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="bfbf270c-1853-4113-b0e4-6d192abb5c5d" containerName="barbican-worker-log" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.994524 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="aa387033-dc08-48f4-bf56-06a7f316423c" containerName="glance-log" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.994531 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="10edb733-8fab-4543-ad29-3568e3de5aea" containerName="object-server" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.994536 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="1fccb241-c75e-4cec-b3c6-3855bd6c1161" containerName="nova-metadata-log" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.994541 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="51e263b2-d42e-46fa-99e1-e0c5aa23bcf5" containerName="nova-api-log" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.994548 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="6846508c-01bc-49e8-b98b-5df9ec57f030" containerName="dnsmasq-dns" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.994968 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-2ztxn" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.996529 4558 reflector.go:368] Caches populated for *v1.Secret from object-"crc-storage"/"crc-storage-dockercfg-k9ht8" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.996734 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"crc-storage" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.997013 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"kube-root-ca.crt" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.997899 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"openshift-service-ca.crt" Jan 20 17:06:23 crc kubenswrapper[4558]: I0120 17:06:23.999684 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-2ztxn"] Jan 20 17:06:24 crc kubenswrapper[4558]: I0120 17:06:24.007245 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/c61df239-e671-4c01-8e3e-e66a37a3f4c7-crc-storage\") pod \"crc-storage-crc-2ztxn\" (UID: \"c61df239-e671-4c01-8e3e-e66a37a3f4c7\") " pod="crc-storage/crc-storage-crc-2ztxn" Jan 20 17:06:24 crc kubenswrapper[4558]: I0120 17:06:24.007291 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t9xh7\" (UniqueName: \"kubernetes.io/projected/c61df239-e671-4c01-8e3e-e66a37a3f4c7-kube-api-access-t9xh7\") pod \"crc-storage-crc-2ztxn\" (UID: \"c61df239-e671-4c01-8e3e-e66a37a3f4c7\") " pod="crc-storage/crc-storage-crc-2ztxn" Jan 20 17:06:24 crc kubenswrapper[4558]: I0120 17:06:24.007551 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/c61df239-e671-4c01-8e3e-e66a37a3f4c7-node-mnt\") pod \"crc-storage-crc-2ztxn\" (UID: \"c61df239-e671-4c01-8e3e-e66a37a3f4c7\") " pod="crc-storage/crc-storage-crc-2ztxn" Jan 20 17:06:24 crc kubenswrapper[4558]: I0120 17:06:24.108394 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/c61df239-e671-4c01-8e3e-e66a37a3f4c7-crc-storage\") pod \"crc-storage-crc-2ztxn\" (UID: \"c61df239-e671-4c01-8e3e-e66a37a3f4c7\") " pod="crc-storage/crc-storage-crc-2ztxn" Jan 20 17:06:24 crc kubenswrapper[4558]: I0120 17:06:24.108493 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t9xh7\" (UniqueName: \"kubernetes.io/projected/c61df239-e671-4c01-8e3e-e66a37a3f4c7-kube-api-access-t9xh7\") pod \"crc-storage-crc-2ztxn\" (UID: \"c61df239-e671-4c01-8e3e-e66a37a3f4c7\") " pod="crc-storage/crc-storage-crc-2ztxn" Jan 20 17:06:24 crc kubenswrapper[4558]: I0120 17:06:24.108658 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/c61df239-e671-4c01-8e3e-e66a37a3f4c7-node-mnt\") pod \"crc-storage-crc-2ztxn\" (UID: \"c61df239-e671-4c01-8e3e-e66a37a3f4c7\") " pod="crc-storage/crc-storage-crc-2ztxn" Jan 20 17:06:24 crc kubenswrapper[4558]: I0120 17:06:24.108878 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/c61df239-e671-4c01-8e3e-e66a37a3f4c7-node-mnt\") pod \"crc-storage-crc-2ztxn\" (UID: \"c61df239-e671-4c01-8e3e-e66a37a3f4c7\") " pod="crc-storage/crc-storage-crc-2ztxn" Jan 20 17:06:24 crc kubenswrapper[4558]: I0120 17:06:24.109074 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/c61df239-e671-4c01-8e3e-e66a37a3f4c7-crc-storage\") pod \"crc-storage-crc-2ztxn\" (UID: \"c61df239-e671-4c01-8e3e-e66a37a3f4c7\") " pod="crc-storage/crc-storage-crc-2ztxn" Jan 20 17:06:24 crc kubenswrapper[4558]: I0120 17:06:24.123637 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t9xh7\" (UniqueName: \"kubernetes.io/projected/c61df239-e671-4c01-8e3e-e66a37a3f4c7-kube-api-access-t9xh7\") pod \"crc-storage-crc-2ztxn\" (UID: \"c61df239-e671-4c01-8e3e-e66a37a3f4c7\") " pod="crc-storage/crc-storage-crc-2ztxn" Jan 20 17:06:24 crc kubenswrapper[4558]: I0120 17:06:24.309272 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-2ztxn" Jan 20 17:06:24 crc kubenswrapper[4558]: I0120 17:06:24.573136 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d6232781-485e-49dc-8a38-f5943e6c88e1" path="/var/lib/kubelet/pods/d6232781-485e-49dc-8a38-f5943e6c88e1/volumes" Jan 20 17:06:24 crc kubenswrapper[4558]: I0120 17:06:24.658561 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-2ztxn"] Jan 20 17:06:24 crc kubenswrapper[4558]: I0120 17:06:24.790635 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-2ztxn" event={"ID":"c61df239-e671-4c01-8e3e-e66a37a3f4c7","Type":"ContainerStarted","Data":"0a0eb9420adb9b47c83a19793c6c31023f34bddad7cc0dece74a8710bd2ede3e"} Jan 20 17:06:25 crc kubenswrapper[4558]: I0120 17:06:25.799115 4558 generic.go:334] "Generic (PLEG): container finished" podID="c61df239-e671-4c01-8e3e-e66a37a3f4c7" containerID="d9787b048d328e82e6bcbf1c8980b56892abf588f759d3c88cfa03d604a497d8" exitCode=0 Jan 20 17:06:25 crc kubenswrapper[4558]: I0120 17:06:25.799214 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-2ztxn" event={"ID":"c61df239-e671-4c01-8e3e-e66a37a3f4c7","Type":"ContainerDied","Data":"d9787b048d328e82e6bcbf1c8980b56892abf588f759d3c88cfa03d604a497d8"} Jan 20 17:06:25 crc kubenswrapper[4558]: E0120 17:06:25.972107 4558 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod19189ead_bcd7_4806_be88_43cc27d5f202.slice/crio-80a7fbf9ebd999f9e255d17b6edb57d7cd1a880d930872a51bd1d1c2a19b20a5.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod19189ead_bcd7_4806_be88_43cc27d5f202.slice/crio-conmon-80a7fbf9ebd999f9e255d17b6edb57d7cd1a880d930872a51bd1d1c2a19b20a5.scope\": RecentStats: unable to find data in memory cache]" Jan 20 17:06:27 crc kubenswrapper[4558]: I0120 17:06:27.012519 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-2ztxn" Jan 20 17:06:27 crc kubenswrapper[4558]: I0120 17:06:27.145574 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/c61df239-e671-4c01-8e3e-e66a37a3f4c7-crc-storage\") pod \"c61df239-e671-4c01-8e3e-e66a37a3f4c7\" (UID: \"c61df239-e671-4c01-8e3e-e66a37a3f4c7\") " Jan 20 17:06:27 crc kubenswrapper[4558]: I0120 17:06:27.145753 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t9xh7\" (UniqueName: \"kubernetes.io/projected/c61df239-e671-4c01-8e3e-e66a37a3f4c7-kube-api-access-t9xh7\") pod \"c61df239-e671-4c01-8e3e-e66a37a3f4c7\" (UID: \"c61df239-e671-4c01-8e3e-e66a37a3f4c7\") " Jan 20 17:06:27 crc kubenswrapper[4558]: I0120 17:06:27.145773 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/c61df239-e671-4c01-8e3e-e66a37a3f4c7-node-mnt\") pod \"c61df239-e671-4c01-8e3e-e66a37a3f4c7\" (UID: \"c61df239-e671-4c01-8e3e-e66a37a3f4c7\") " Jan 20 17:06:27 crc kubenswrapper[4558]: I0120 17:06:27.146007 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/c61df239-e671-4c01-8e3e-e66a37a3f4c7-node-mnt" (OuterVolumeSpecName: "node-mnt") pod "c61df239-e671-4c01-8e3e-e66a37a3f4c7" (UID: "c61df239-e671-4c01-8e3e-e66a37a3f4c7"). InnerVolumeSpecName "node-mnt". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 17:06:27 crc kubenswrapper[4558]: I0120 17:06:27.149855 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c61df239-e671-4c01-8e3e-e66a37a3f4c7-kube-api-access-t9xh7" (OuterVolumeSpecName: "kube-api-access-t9xh7") pod "c61df239-e671-4c01-8e3e-e66a37a3f4c7" (UID: "c61df239-e671-4c01-8e3e-e66a37a3f4c7"). InnerVolumeSpecName "kube-api-access-t9xh7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:06:27 crc kubenswrapper[4558]: I0120 17:06:27.159944 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c61df239-e671-4c01-8e3e-e66a37a3f4c7-crc-storage" (OuterVolumeSpecName: "crc-storage") pod "c61df239-e671-4c01-8e3e-e66a37a3f4c7" (UID: "c61df239-e671-4c01-8e3e-e66a37a3f4c7"). InnerVolumeSpecName "crc-storage". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:06:27 crc kubenswrapper[4558]: I0120 17:06:27.247022 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t9xh7\" (UniqueName: \"kubernetes.io/projected/c61df239-e671-4c01-8e3e-e66a37a3f4c7-kube-api-access-t9xh7\") on node \"crc\" DevicePath \"\"" Jan 20 17:06:27 crc kubenswrapper[4558]: I0120 17:06:27.247050 4558 reconciler_common.go:293] "Volume detached for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/c61df239-e671-4c01-8e3e-e66a37a3f4c7-node-mnt\") on node \"crc\" DevicePath \"\"" Jan 20 17:06:27 crc kubenswrapper[4558]: I0120 17:06:27.247061 4558 reconciler_common.go:293] "Volume detached for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/c61df239-e671-4c01-8e3e-e66a37a3f4c7-crc-storage\") on node \"crc\" DevicePath \"\"" Jan 20 17:06:27 crc kubenswrapper[4558]: I0120 17:06:27.329725 4558 patch_prober.go:28] interesting pod/machine-config-daemon-2vr4r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 20 17:06:27 crc kubenswrapper[4558]: I0120 17:06:27.329775 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 20 17:06:27 crc kubenswrapper[4558]: I0120 17:06:27.329810 4558 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" Jan 20 17:06:27 crc kubenswrapper[4558]: I0120 17:06:27.330384 4558 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"a711a286282347590079d2447ef37fa9ed0f11085d6d7a65f85e65c1c2df608f"} pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 20 17:06:27 crc kubenswrapper[4558]: I0120 17:06:27.330440 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" containerID="cri-o://a711a286282347590079d2447ef37fa9ed0f11085d6d7a65f85e65c1c2df608f" gracePeriod=600 Jan 20 17:06:27 crc kubenswrapper[4558]: E0120 17:06:27.445458 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 17:06:27 crc kubenswrapper[4558]: I0120 17:06:27.811903 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-2ztxn" Jan 20 17:06:27 crc kubenswrapper[4558]: I0120 17:06:27.812222 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-2ztxn" event={"ID":"c61df239-e671-4c01-8e3e-e66a37a3f4c7","Type":"ContainerDied","Data":"0a0eb9420adb9b47c83a19793c6c31023f34bddad7cc0dece74a8710bd2ede3e"} Jan 20 17:06:27 crc kubenswrapper[4558]: I0120 17:06:27.812259 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0a0eb9420adb9b47c83a19793c6c31023f34bddad7cc0dece74a8710bd2ede3e" Jan 20 17:06:27 crc kubenswrapper[4558]: I0120 17:06:27.814056 4558 generic.go:334] "Generic (PLEG): container finished" podID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerID="a711a286282347590079d2447ef37fa9ed0f11085d6d7a65f85e65c1c2df608f" exitCode=0 Jan 20 17:06:27 crc kubenswrapper[4558]: I0120 17:06:27.814091 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" event={"ID":"68337d27-3fa6-4a29-88b0-82e60c3739eb","Type":"ContainerDied","Data":"a711a286282347590079d2447ef37fa9ed0f11085d6d7a65f85e65c1c2df608f"} Jan 20 17:06:27 crc kubenswrapper[4558]: I0120 17:06:27.814121 4558 scope.go:117] "RemoveContainer" containerID="27436b5757afe01efb5672d1056f7069457346353ae2e6eed76a9879c2ed4ed6" Jan 20 17:06:27 crc kubenswrapper[4558]: I0120 17:06:27.814596 4558 scope.go:117] "RemoveContainer" containerID="a711a286282347590079d2447ef37fa9ed0f11085d6d7a65f85e65c1c2df608f" Jan 20 17:06:27 crc kubenswrapper[4558]: E0120 17:06:27.814829 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 17:06:29 crc kubenswrapper[4558]: I0120 17:06:29.723329 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["crc-storage/crc-storage-crc-2ztxn"] Jan 20 17:06:29 crc kubenswrapper[4558]: I0120 17:06:29.727117 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["crc-storage/crc-storage-crc-2ztxn"] Jan 20 17:06:29 crc kubenswrapper[4558]: I0120 17:06:29.818911 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["crc-storage/crc-storage-crc-p2tx9"] Jan 20 17:06:29 crc kubenswrapper[4558]: E0120 17:06:29.819189 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="19189ead-bcd7-4806-be88-43cc27d5f202" containerName="mariadb-account-create-update" Jan 20 17:06:29 crc kubenswrapper[4558]: I0120 17:06:29.819207 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="19189ead-bcd7-4806-be88-43cc27d5f202" containerName="mariadb-account-create-update" Jan 20 17:06:29 crc kubenswrapper[4558]: E0120 17:06:29.819228 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c61df239-e671-4c01-8e3e-e66a37a3f4c7" containerName="storage" Jan 20 17:06:29 crc kubenswrapper[4558]: I0120 17:06:29.819234 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="c61df239-e671-4c01-8e3e-e66a37a3f4c7" containerName="storage" Jan 20 17:06:29 crc kubenswrapper[4558]: I0120 17:06:29.819364 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="c61df239-e671-4c01-8e3e-e66a37a3f4c7" containerName="storage" Jan 20 17:06:29 crc kubenswrapper[4558]: I0120 17:06:29.819379 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="19189ead-bcd7-4806-be88-43cc27d5f202" containerName="mariadb-account-create-update" Jan 20 17:06:29 crc kubenswrapper[4558]: I0120 17:06:29.819778 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-p2tx9" Jan 20 17:06:29 crc kubenswrapper[4558]: I0120 17:06:29.821082 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"openshift-service-ca.crt" Jan 20 17:06:29 crc kubenswrapper[4558]: I0120 17:06:29.821112 4558 reflector.go:368] Caches populated for *v1.Secret from object-"crc-storage"/"crc-storage-dockercfg-k9ht8" Jan 20 17:06:29 crc kubenswrapper[4558]: I0120 17:06:29.821261 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"crc-storage" Jan 20 17:06:29 crc kubenswrapper[4558]: I0120 17:06:29.823398 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"kube-root-ca.crt" Jan 20 17:06:29 crc kubenswrapper[4558]: I0120 17:06:29.826131 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-p2tx9"] Jan 20 17:06:29 crc kubenswrapper[4558]: I0120 17:06:29.978334 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/06232454-6033-4d5a-98e9-10552b195792-node-mnt\") pod \"crc-storage-crc-p2tx9\" (UID: \"06232454-6033-4d5a-98e9-10552b195792\") " pod="crc-storage/crc-storage-crc-p2tx9" Jan 20 17:06:29 crc kubenswrapper[4558]: I0120 17:06:29.978390 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x2t94\" (UniqueName: \"kubernetes.io/projected/06232454-6033-4d5a-98e9-10552b195792-kube-api-access-x2t94\") pod \"crc-storage-crc-p2tx9\" (UID: \"06232454-6033-4d5a-98e9-10552b195792\") " pod="crc-storage/crc-storage-crc-p2tx9" Jan 20 17:06:29 crc kubenswrapper[4558]: I0120 17:06:29.978422 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/06232454-6033-4d5a-98e9-10552b195792-crc-storage\") pod \"crc-storage-crc-p2tx9\" (UID: \"06232454-6033-4d5a-98e9-10552b195792\") " pod="crc-storage/crc-storage-crc-p2tx9" Jan 20 17:06:30 crc kubenswrapper[4558]: I0120 17:06:30.079415 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/06232454-6033-4d5a-98e9-10552b195792-node-mnt\") pod \"crc-storage-crc-p2tx9\" (UID: \"06232454-6033-4d5a-98e9-10552b195792\") " pod="crc-storage/crc-storage-crc-p2tx9" Jan 20 17:06:30 crc kubenswrapper[4558]: I0120 17:06:30.079461 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x2t94\" (UniqueName: \"kubernetes.io/projected/06232454-6033-4d5a-98e9-10552b195792-kube-api-access-x2t94\") pod \"crc-storage-crc-p2tx9\" (UID: \"06232454-6033-4d5a-98e9-10552b195792\") " pod="crc-storage/crc-storage-crc-p2tx9" Jan 20 17:06:30 crc kubenswrapper[4558]: I0120 17:06:30.079485 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/06232454-6033-4d5a-98e9-10552b195792-crc-storage\") pod \"crc-storage-crc-p2tx9\" (UID: \"06232454-6033-4d5a-98e9-10552b195792\") " pod="crc-storage/crc-storage-crc-p2tx9" Jan 20 17:06:30 crc kubenswrapper[4558]: I0120 17:06:30.079596 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/06232454-6033-4d5a-98e9-10552b195792-node-mnt\") pod \"crc-storage-crc-p2tx9\" (UID: \"06232454-6033-4d5a-98e9-10552b195792\") " pod="crc-storage/crc-storage-crc-p2tx9" Jan 20 17:06:30 crc kubenswrapper[4558]: I0120 17:06:30.080054 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/06232454-6033-4d5a-98e9-10552b195792-crc-storage\") pod \"crc-storage-crc-p2tx9\" (UID: \"06232454-6033-4d5a-98e9-10552b195792\") " pod="crc-storage/crc-storage-crc-p2tx9" Jan 20 17:06:30 crc kubenswrapper[4558]: I0120 17:06:30.093717 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x2t94\" (UniqueName: \"kubernetes.io/projected/06232454-6033-4d5a-98e9-10552b195792-kube-api-access-x2t94\") pod \"crc-storage-crc-p2tx9\" (UID: \"06232454-6033-4d5a-98e9-10552b195792\") " pod="crc-storage/crc-storage-crc-p2tx9" Jan 20 17:06:30 crc kubenswrapper[4558]: I0120 17:06:30.134552 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-p2tx9" Jan 20 17:06:30 crc kubenswrapper[4558]: I0120 17:06:30.500428 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-p2tx9"] Jan 20 17:06:30 crc kubenswrapper[4558]: I0120 17:06:30.572430 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c61df239-e671-4c01-8e3e-e66a37a3f4c7" path="/var/lib/kubelet/pods/c61df239-e671-4c01-8e3e-e66a37a3f4c7/volumes" Jan 20 17:06:30 crc kubenswrapper[4558]: I0120 17:06:30.836860 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-p2tx9" event={"ID":"06232454-6033-4d5a-98e9-10552b195792","Type":"ContainerStarted","Data":"3804639b55b1ff221e04911079d444dcfb9e859686ee504abc7049f8ced197ca"} Jan 20 17:06:31 crc kubenswrapper[4558]: I0120 17:06:31.843715 4558 generic.go:334] "Generic (PLEG): container finished" podID="06232454-6033-4d5a-98e9-10552b195792" containerID="ad35d349271ee789d2afcaa6751a4b0d4ee214cd86c3bf00b2033a0275b6e913" exitCode=0 Jan 20 17:06:31 crc kubenswrapper[4558]: I0120 17:06:31.843919 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-p2tx9" event={"ID":"06232454-6033-4d5a-98e9-10552b195792","Type":"ContainerDied","Data":"ad35d349271ee789d2afcaa6751a4b0d4ee214cd86c3bf00b2033a0275b6e913"} Jan 20 17:06:33 crc kubenswrapper[4558]: I0120 17:06:33.048006 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-p2tx9" Jan 20 17:06:33 crc kubenswrapper[4558]: I0120 17:06:33.210969 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/06232454-6033-4d5a-98e9-10552b195792-node-mnt\") pod \"06232454-6033-4d5a-98e9-10552b195792\" (UID: \"06232454-6033-4d5a-98e9-10552b195792\") " Jan 20 17:06:33 crc kubenswrapper[4558]: I0120 17:06:33.211037 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2t94\" (UniqueName: \"kubernetes.io/projected/06232454-6033-4d5a-98e9-10552b195792-kube-api-access-x2t94\") pod \"06232454-6033-4d5a-98e9-10552b195792\" (UID: \"06232454-6033-4d5a-98e9-10552b195792\") " Jan 20 17:06:33 crc kubenswrapper[4558]: I0120 17:06:33.211076 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/06232454-6033-4d5a-98e9-10552b195792-node-mnt" (OuterVolumeSpecName: "node-mnt") pod "06232454-6033-4d5a-98e9-10552b195792" (UID: "06232454-6033-4d5a-98e9-10552b195792"). InnerVolumeSpecName "node-mnt". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 17:06:33 crc kubenswrapper[4558]: I0120 17:06:33.211106 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/06232454-6033-4d5a-98e9-10552b195792-crc-storage\") pod \"06232454-6033-4d5a-98e9-10552b195792\" (UID: \"06232454-6033-4d5a-98e9-10552b195792\") " Jan 20 17:06:33 crc kubenswrapper[4558]: I0120 17:06:33.211709 4558 reconciler_common.go:293] "Volume detached for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/06232454-6033-4d5a-98e9-10552b195792-node-mnt\") on node \"crc\" DevicePath \"\"" Jan 20 17:06:33 crc kubenswrapper[4558]: I0120 17:06:33.214559 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/06232454-6033-4d5a-98e9-10552b195792-kube-api-access-x2t94" (OuterVolumeSpecName: "kube-api-access-x2t94") pod "06232454-6033-4d5a-98e9-10552b195792" (UID: "06232454-6033-4d5a-98e9-10552b195792"). InnerVolumeSpecName "kube-api-access-x2t94". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:06:33 crc kubenswrapper[4558]: I0120 17:06:33.223856 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/06232454-6033-4d5a-98e9-10552b195792-crc-storage" (OuterVolumeSpecName: "crc-storage") pod "06232454-6033-4d5a-98e9-10552b195792" (UID: "06232454-6033-4d5a-98e9-10552b195792"). InnerVolumeSpecName "crc-storage". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:06:33 crc kubenswrapper[4558]: I0120 17:06:33.312914 4558 reconciler_common.go:293] "Volume detached for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/06232454-6033-4d5a-98e9-10552b195792-crc-storage\") on node \"crc\" DevicePath \"\"" Jan 20 17:06:33 crc kubenswrapper[4558]: I0120 17:06:33.312942 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2t94\" (UniqueName: \"kubernetes.io/projected/06232454-6033-4d5a-98e9-10552b195792-kube-api-access-x2t94\") on node \"crc\" DevicePath \"\"" Jan 20 17:06:33 crc kubenswrapper[4558]: I0120 17:06:33.855563 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-p2tx9" event={"ID":"06232454-6033-4d5a-98e9-10552b195792","Type":"ContainerDied","Data":"3804639b55b1ff221e04911079d444dcfb9e859686ee504abc7049f8ced197ca"} Jan 20 17:06:33 crc kubenswrapper[4558]: I0120 17:06:33.855603 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3804639b55b1ff221e04911079d444dcfb9e859686ee504abc7049f8ced197ca" Jan 20 17:06:33 crc kubenswrapper[4558]: I0120 17:06:33.855664 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-p2tx9" Jan 20 17:06:36 crc kubenswrapper[4558]: E0120 17:06:36.109543 4558 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod19189ead_bcd7_4806_be88_43cc27d5f202.slice/crio-80a7fbf9ebd999f9e255d17b6edb57d7cd1a880d930872a51bd1d1c2a19b20a5.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod19189ead_bcd7_4806_be88_43cc27d5f202.slice/crio-conmon-80a7fbf9ebd999f9e255d17b6edb57d7cd1a880d930872a51bd1d1c2a19b20a5.scope\": RecentStats: unable to find data in memory cache]" Jan 20 17:06:41 crc kubenswrapper[4558]: I0120 17:06:41.566206 4558 scope.go:117] "RemoveContainer" containerID="a711a286282347590079d2447ef37fa9ed0f11085d6d7a65f85e65c1c2df608f" Jan 20 17:06:41 crc kubenswrapper[4558]: E0120 17:06:41.566617 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 17:06:44 crc kubenswrapper[4558]: I0120 17:06:44.794536 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/rabbitmq-server-0"] Jan 20 17:06:44 crc kubenswrapper[4558]: E0120 17:06:44.795540 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="06232454-6033-4d5a-98e9-10552b195792" containerName="storage" Jan 20 17:06:44 crc kubenswrapper[4558]: I0120 17:06:44.795616 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="06232454-6033-4d5a-98e9-10552b195792" containerName="storage" Jan 20 17:06:44 crc kubenswrapper[4558]: I0120 17:06:44.795804 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="06232454-6033-4d5a-98e9-10552b195792" containerName="storage" Jan 20 17:06:44 crc kubenswrapper[4558]: I0120 17:06:44.796540 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:06:44 crc kubenswrapper[4558]: I0120 17:06:44.798297 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"rabbitmq-erlang-cookie" Jan 20 17:06:44 crc kubenswrapper[4558]: I0120 17:06:44.798845 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"rabbitmq-default-user" Jan 20 17:06:44 crc kubenswrapper[4558]: I0120 17:06:44.798983 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"rabbitmq-server-conf" Jan 20 17:06:44 crc kubenswrapper[4558]: I0120 17:06:44.799132 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"rabbitmq-config-data" Jan 20 17:06:44 crc kubenswrapper[4558]: I0120 17:06:44.799393 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-rabbitmq-svc" Jan 20 17:06:44 crc kubenswrapper[4558]: I0120 17:06:44.799442 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"rabbitmq-server-dockercfg-qv45t" Jan 20 17:06:44 crc kubenswrapper[4558]: I0120 17:06:44.799633 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"rabbitmq-plugins-conf" Jan 20 17:06:44 crc kubenswrapper[4558]: I0120 17:06:44.810806 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/rabbitmq-server-0"] Jan 20 17:06:44 crc kubenswrapper[4558]: I0120 17:06:44.946798 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fspbv\" (UniqueName: \"kubernetes.io/projected/07fab146-67be-42ba-b263-ee19fe95720b-kube-api-access-fspbv\") pod \"rabbitmq-server-0\" (UID: \"07fab146-67be-42ba-b263-ee19fe95720b\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:06:44 crc kubenswrapper[4558]: I0120 17:06:44.946849 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/07fab146-67be-42ba-b263-ee19fe95720b-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"07fab146-67be-42ba-b263-ee19fe95720b\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:06:44 crc kubenswrapper[4558]: I0120 17:06:44.946882 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/07fab146-67be-42ba-b263-ee19fe95720b-config-data\") pod \"rabbitmq-server-0\" (UID: \"07fab146-67be-42ba-b263-ee19fe95720b\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:06:44 crc kubenswrapper[4558]: I0120 17:06:44.946899 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/07fab146-67be-42ba-b263-ee19fe95720b-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"07fab146-67be-42ba-b263-ee19fe95720b\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:06:44 crc kubenswrapper[4558]: I0120 17:06:44.946946 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/07fab146-67be-42ba-b263-ee19fe95720b-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"07fab146-67be-42ba-b263-ee19fe95720b\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:06:44 crc kubenswrapper[4558]: I0120 17:06:44.946978 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/07fab146-67be-42ba-b263-ee19fe95720b-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"07fab146-67be-42ba-b263-ee19fe95720b\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:06:44 crc kubenswrapper[4558]: I0120 17:06:44.947013 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/07fab146-67be-42ba-b263-ee19fe95720b-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"07fab146-67be-42ba-b263-ee19fe95720b\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:06:44 crc kubenswrapper[4558]: I0120 17:06:44.947034 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/07fab146-67be-42ba-b263-ee19fe95720b-pod-info\") pod \"rabbitmq-server-0\" (UID: \"07fab146-67be-42ba-b263-ee19fe95720b\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:06:44 crc kubenswrapper[4558]: I0120 17:06:44.947063 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"rabbitmq-server-0\" (UID: \"07fab146-67be-42ba-b263-ee19fe95720b\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:06:44 crc kubenswrapper[4558]: I0120 17:06:44.947080 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/07fab146-67be-42ba-b263-ee19fe95720b-server-conf\") pod \"rabbitmq-server-0\" (UID: \"07fab146-67be-42ba-b263-ee19fe95720b\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:06:44 crc kubenswrapper[4558]: I0120 17:06:44.947096 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/07fab146-67be-42ba-b263-ee19fe95720b-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"07fab146-67be-42ba-b263-ee19fe95720b\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:06:44 crc kubenswrapper[4558]: I0120 17:06:44.991480 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/rabbitmq-cell1-server-0"] Jan 20 17:06:44 crc kubenswrapper[4558]: I0120 17:06:44.992488 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:06:44 crc kubenswrapper[4558]: I0120 17:06:44.994007 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"rabbitmq-cell1-config-data" Jan 20 17:06:44 crc kubenswrapper[4558]: I0120 17:06:44.994252 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-rabbitmq-cell1-svc" Jan 20 17:06:44 crc kubenswrapper[4558]: I0120 17:06:44.994442 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"rabbitmq-cell1-server-dockercfg-w99zs" Jan 20 17:06:44 crc kubenswrapper[4558]: I0120 17:06:44.994559 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"rabbitmq-cell1-erlang-cookie" Jan 20 17:06:44 crc kubenswrapper[4558]: I0120 17:06:44.994809 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"rabbitmq-cell1-default-user" Jan 20 17:06:44 crc kubenswrapper[4558]: I0120 17:06:44.994943 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"rabbitmq-cell1-server-conf" Jan 20 17:06:44 crc kubenswrapper[4558]: I0120 17:06:44.995062 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"rabbitmq-cell1-plugins-conf" Jan 20 17:06:45 crc kubenswrapper[4558]: I0120 17:06:45.000157 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/rabbitmq-cell1-server-0"] Jan 20 17:06:45 crc kubenswrapper[4558]: I0120 17:06:45.048361 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fspbv\" (UniqueName: \"kubernetes.io/projected/07fab146-67be-42ba-b263-ee19fe95720b-kube-api-access-fspbv\") pod \"rabbitmq-server-0\" (UID: \"07fab146-67be-42ba-b263-ee19fe95720b\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:06:45 crc kubenswrapper[4558]: I0120 17:06:45.048406 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/07fab146-67be-42ba-b263-ee19fe95720b-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"07fab146-67be-42ba-b263-ee19fe95720b\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:06:45 crc kubenswrapper[4558]: I0120 17:06:45.048449 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/07fab146-67be-42ba-b263-ee19fe95720b-config-data\") pod \"rabbitmq-server-0\" (UID: \"07fab146-67be-42ba-b263-ee19fe95720b\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:06:45 crc kubenswrapper[4558]: I0120 17:06:45.048466 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/07fab146-67be-42ba-b263-ee19fe95720b-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"07fab146-67be-42ba-b263-ee19fe95720b\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:06:45 crc kubenswrapper[4558]: I0120 17:06:45.048486 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/07fab146-67be-42ba-b263-ee19fe95720b-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"07fab146-67be-42ba-b263-ee19fe95720b\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:06:45 crc kubenswrapper[4558]: I0120 17:06:45.048517 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/07fab146-67be-42ba-b263-ee19fe95720b-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"07fab146-67be-42ba-b263-ee19fe95720b\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:06:45 crc kubenswrapper[4558]: I0120 17:06:45.048557 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/07fab146-67be-42ba-b263-ee19fe95720b-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"07fab146-67be-42ba-b263-ee19fe95720b\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:06:45 crc kubenswrapper[4558]: I0120 17:06:45.048578 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/07fab146-67be-42ba-b263-ee19fe95720b-pod-info\") pod \"rabbitmq-server-0\" (UID: \"07fab146-67be-42ba-b263-ee19fe95720b\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:06:45 crc kubenswrapper[4558]: I0120 17:06:45.048620 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"rabbitmq-server-0\" (UID: \"07fab146-67be-42ba-b263-ee19fe95720b\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:06:45 crc kubenswrapper[4558]: I0120 17:06:45.048637 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/07fab146-67be-42ba-b263-ee19fe95720b-server-conf\") pod \"rabbitmq-server-0\" (UID: \"07fab146-67be-42ba-b263-ee19fe95720b\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:06:45 crc kubenswrapper[4558]: I0120 17:06:45.048654 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/07fab146-67be-42ba-b263-ee19fe95720b-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"07fab146-67be-42ba-b263-ee19fe95720b\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:06:45 crc kubenswrapper[4558]: I0120 17:06:45.048918 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/07fab146-67be-42ba-b263-ee19fe95720b-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"07fab146-67be-42ba-b263-ee19fe95720b\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:06:45 crc kubenswrapper[4558]: I0120 17:06:45.049590 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/07fab146-67be-42ba-b263-ee19fe95720b-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"07fab146-67be-42ba-b263-ee19fe95720b\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:06:45 crc kubenswrapper[4558]: I0120 17:06:45.049715 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/07fab146-67be-42ba-b263-ee19fe95720b-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"07fab146-67be-42ba-b263-ee19fe95720b\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:06:45 crc kubenswrapper[4558]: I0120 17:06:45.049744 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/07fab146-67be-42ba-b263-ee19fe95720b-config-data\") pod \"rabbitmq-server-0\" (UID: \"07fab146-67be-42ba-b263-ee19fe95720b\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:06:45 crc kubenswrapper[4558]: I0120 17:06:45.049982 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/07fab146-67be-42ba-b263-ee19fe95720b-server-conf\") pod \"rabbitmq-server-0\" (UID: \"07fab146-67be-42ba-b263-ee19fe95720b\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:06:45 crc kubenswrapper[4558]: I0120 17:06:45.050255 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"rabbitmq-server-0\" (UID: \"07fab146-67be-42ba-b263-ee19fe95720b\") device mount path \"/mnt/openstack/pv11\"" pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:06:45 crc kubenswrapper[4558]: I0120 17:06:45.053791 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/07fab146-67be-42ba-b263-ee19fe95720b-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"07fab146-67be-42ba-b263-ee19fe95720b\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:06:45 crc kubenswrapper[4558]: I0120 17:06:45.053936 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/07fab146-67be-42ba-b263-ee19fe95720b-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"07fab146-67be-42ba-b263-ee19fe95720b\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:06:45 crc kubenswrapper[4558]: I0120 17:06:45.054996 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/07fab146-67be-42ba-b263-ee19fe95720b-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"07fab146-67be-42ba-b263-ee19fe95720b\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:06:45 crc kubenswrapper[4558]: I0120 17:06:45.055849 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/07fab146-67be-42ba-b263-ee19fe95720b-pod-info\") pod \"rabbitmq-server-0\" (UID: \"07fab146-67be-42ba-b263-ee19fe95720b\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:06:45 crc kubenswrapper[4558]: I0120 17:06:45.062061 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fspbv\" (UniqueName: \"kubernetes.io/projected/07fab146-67be-42ba-b263-ee19fe95720b-kube-api-access-fspbv\") pod \"rabbitmq-server-0\" (UID: \"07fab146-67be-42ba-b263-ee19fe95720b\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:06:45 crc kubenswrapper[4558]: I0120 17:06:45.075294 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"rabbitmq-server-0\" (UID: \"07fab146-67be-42ba-b263-ee19fe95720b\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:06:45 crc kubenswrapper[4558]: I0120 17:06:45.111391 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:06:45 crc kubenswrapper[4558]: I0120 17:06:45.150105 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/39403277-bf62-47c1-8e86-cdec59f2da7b-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"39403277-bf62-47c1-8e86-cdec59f2da7b\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:06:45 crc kubenswrapper[4558]: I0120 17:06:45.150192 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"39403277-bf62-47c1-8e86-cdec59f2da7b\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:06:45 crc kubenswrapper[4558]: I0120 17:06:45.150223 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/39403277-bf62-47c1-8e86-cdec59f2da7b-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"39403277-bf62-47c1-8e86-cdec59f2da7b\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:06:45 crc kubenswrapper[4558]: I0120 17:06:45.150248 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gs7c8\" (UniqueName: \"kubernetes.io/projected/39403277-bf62-47c1-8e86-cdec59f2da7b-kube-api-access-gs7c8\") pod \"rabbitmq-cell1-server-0\" (UID: \"39403277-bf62-47c1-8e86-cdec59f2da7b\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:06:45 crc kubenswrapper[4558]: I0120 17:06:45.150268 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/39403277-bf62-47c1-8e86-cdec59f2da7b-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"39403277-bf62-47c1-8e86-cdec59f2da7b\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:06:45 crc kubenswrapper[4558]: I0120 17:06:45.150283 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/39403277-bf62-47c1-8e86-cdec59f2da7b-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"39403277-bf62-47c1-8e86-cdec59f2da7b\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:06:45 crc kubenswrapper[4558]: I0120 17:06:45.150298 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/39403277-bf62-47c1-8e86-cdec59f2da7b-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"39403277-bf62-47c1-8e86-cdec59f2da7b\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:06:45 crc kubenswrapper[4558]: I0120 17:06:45.150330 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/39403277-bf62-47c1-8e86-cdec59f2da7b-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"39403277-bf62-47c1-8e86-cdec59f2da7b\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:06:45 crc kubenswrapper[4558]: I0120 17:06:45.150358 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/39403277-bf62-47c1-8e86-cdec59f2da7b-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"39403277-bf62-47c1-8e86-cdec59f2da7b\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:06:45 crc kubenswrapper[4558]: I0120 17:06:45.150396 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/39403277-bf62-47c1-8e86-cdec59f2da7b-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"39403277-bf62-47c1-8e86-cdec59f2da7b\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:06:45 crc kubenswrapper[4558]: I0120 17:06:45.150428 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/39403277-bf62-47c1-8e86-cdec59f2da7b-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"39403277-bf62-47c1-8e86-cdec59f2da7b\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:06:45 crc kubenswrapper[4558]: I0120 17:06:45.251480 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/39403277-bf62-47c1-8e86-cdec59f2da7b-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"39403277-bf62-47c1-8e86-cdec59f2da7b\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:06:45 crc kubenswrapper[4558]: I0120 17:06:45.251521 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gs7c8\" (UniqueName: \"kubernetes.io/projected/39403277-bf62-47c1-8e86-cdec59f2da7b-kube-api-access-gs7c8\") pod \"rabbitmq-cell1-server-0\" (UID: \"39403277-bf62-47c1-8e86-cdec59f2da7b\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:06:45 crc kubenswrapper[4558]: I0120 17:06:45.251545 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/39403277-bf62-47c1-8e86-cdec59f2da7b-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"39403277-bf62-47c1-8e86-cdec59f2da7b\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:06:45 crc kubenswrapper[4558]: I0120 17:06:45.251588 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/39403277-bf62-47c1-8e86-cdec59f2da7b-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"39403277-bf62-47c1-8e86-cdec59f2da7b\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:06:45 crc kubenswrapper[4558]: I0120 17:06:45.251605 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/39403277-bf62-47c1-8e86-cdec59f2da7b-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"39403277-bf62-47c1-8e86-cdec59f2da7b\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:06:45 crc kubenswrapper[4558]: I0120 17:06:45.251625 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/39403277-bf62-47c1-8e86-cdec59f2da7b-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"39403277-bf62-47c1-8e86-cdec59f2da7b\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:06:45 crc kubenswrapper[4558]: I0120 17:06:45.251639 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/39403277-bf62-47c1-8e86-cdec59f2da7b-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"39403277-bf62-47c1-8e86-cdec59f2da7b\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:06:45 crc kubenswrapper[4558]: I0120 17:06:45.251656 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/39403277-bf62-47c1-8e86-cdec59f2da7b-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"39403277-bf62-47c1-8e86-cdec59f2da7b\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:06:45 crc kubenswrapper[4558]: I0120 17:06:45.251673 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/39403277-bf62-47c1-8e86-cdec59f2da7b-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"39403277-bf62-47c1-8e86-cdec59f2da7b\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:06:45 crc kubenswrapper[4558]: I0120 17:06:45.251733 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/39403277-bf62-47c1-8e86-cdec59f2da7b-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"39403277-bf62-47c1-8e86-cdec59f2da7b\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:06:45 crc kubenswrapper[4558]: I0120 17:06:45.251772 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"39403277-bf62-47c1-8e86-cdec59f2da7b\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:06:45 crc kubenswrapper[4558]: I0120 17:06:45.251940 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"39403277-bf62-47c1-8e86-cdec59f2da7b\") device mount path \"/mnt/openstack/pv07\"" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:06:45 crc kubenswrapper[4558]: I0120 17:06:45.252291 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/39403277-bf62-47c1-8e86-cdec59f2da7b-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"39403277-bf62-47c1-8e86-cdec59f2da7b\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:06:45 crc kubenswrapper[4558]: I0120 17:06:45.252867 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/39403277-bf62-47c1-8e86-cdec59f2da7b-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"39403277-bf62-47c1-8e86-cdec59f2da7b\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:06:45 crc kubenswrapper[4558]: I0120 17:06:45.253012 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/39403277-bf62-47c1-8e86-cdec59f2da7b-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"39403277-bf62-47c1-8e86-cdec59f2da7b\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:06:45 crc kubenswrapper[4558]: I0120 17:06:45.253366 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/39403277-bf62-47c1-8e86-cdec59f2da7b-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"39403277-bf62-47c1-8e86-cdec59f2da7b\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:06:45 crc kubenswrapper[4558]: I0120 17:06:45.253942 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/39403277-bf62-47c1-8e86-cdec59f2da7b-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"39403277-bf62-47c1-8e86-cdec59f2da7b\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:06:45 crc kubenswrapper[4558]: I0120 17:06:45.255504 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/39403277-bf62-47c1-8e86-cdec59f2da7b-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"39403277-bf62-47c1-8e86-cdec59f2da7b\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:06:45 crc kubenswrapper[4558]: I0120 17:06:45.255729 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/39403277-bf62-47c1-8e86-cdec59f2da7b-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"39403277-bf62-47c1-8e86-cdec59f2da7b\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:06:45 crc kubenswrapper[4558]: I0120 17:06:45.255888 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/39403277-bf62-47c1-8e86-cdec59f2da7b-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"39403277-bf62-47c1-8e86-cdec59f2da7b\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:06:45 crc kubenswrapper[4558]: I0120 17:06:45.256881 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/39403277-bf62-47c1-8e86-cdec59f2da7b-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"39403277-bf62-47c1-8e86-cdec59f2da7b\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:06:45 crc kubenswrapper[4558]: I0120 17:06:45.265404 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gs7c8\" (UniqueName: \"kubernetes.io/projected/39403277-bf62-47c1-8e86-cdec59f2da7b-kube-api-access-gs7c8\") pod \"rabbitmq-cell1-server-0\" (UID: \"39403277-bf62-47c1-8e86-cdec59f2da7b\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:06:45 crc kubenswrapper[4558]: I0120 17:06:45.270547 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"39403277-bf62-47c1-8e86-cdec59f2da7b\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:06:45 crc kubenswrapper[4558]: I0120 17:06:45.310924 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:06:45 crc kubenswrapper[4558]: I0120 17:06:45.486425 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/rabbitmq-server-0"] Jan 20 17:06:45 crc kubenswrapper[4558]: W0120 17:06:45.499440 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod07fab146_67be_42ba_b263_ee19fe95720b.slice/crio-f851d4db758ec400cd2a5783e8b809166ecd88cabc95b5ae3feccf69f99f49b2 WatchSource:0}: Error finding container f851d4db758ec400cd2a5783e8b809166ecd88cabc95b5ae3feccf69f99f49b2: Status 404 returned error can't find the container with id f851d4db758ec400cd2a5783e8b809166ecd88cabc95b5ae3feccf69f99f49b2 Jan 20 17:06:45 crc kubenswrapper[4558]: I0120 17:06:45.699078 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/rabbitmq-cell1-server-0"] Jan 20 17:06:45 crc kubenswrapper[4558]: W0120 17:06:45.701344 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod39403277_bf62_47c1_8e86_cdec59f2da7b.slice/crio-115b9114314899b77d2e49048148e18468424f9c063b59a8eda0ecc0a2b4bdb7 WatchSource:0}: Error finding container 115b9114314899b77d2e49048148e18468424f9c063b59a8eda0ecc0a2b4bdb7: Status 404 returned error can't find the container with id 115b9114314899b77d2e49048148e18468424f9c063b59a8eda0ecc0a2b4bdb7 Jan 20 17:06:45 crc kubenswrapper[4558]: I0120 17:06:45.931247 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" event={"ID":"39403277-bf62-47c1-8e86-cdec59f2da7b","Type":"ContainerStarted","Data":"115b9114314899b77d2e49048148e18468424f9c063b59a8eda0ecc0a2b4bdb7"} Jan 20 17:06:45 crc kubenswrapper[4558]: I0120 17:06:45.932221 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-server-0" event={"ID":"07fab146-67be-42ba-b263-ee19fe95720b","Type":"ContainerStarted","Data":"f851d4db758ec400cd2a5783e8b809166ecd88cabc95b5ae3feccf69f99f49b2"} Jan 20 17:06:46 crc kubenswrapper[4558]: E0120 17:06:46.255321 4558 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod19189ead_bcd7_4806_be88_43cc27d5f202.slice/crio-80a7fbf9ebd999f9e255d17b6edb57d7cd1a880d930872a51bd1d1c2a19b20a5.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod19189ead_bcd7_4806_be88_43cc27d5f202.slice/crio-conmon-80a7fbf9ebd999f9e255d17b6edb57d7cd1a880d930872a51bd1d1c2a19b20a5.scope\": RecentStats: unable to find data in memory cache]" Jan 20 17:06:46 crc kubenswrapper[4558]: I0120 17:06:46.573720 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/openstack-galera-0"] Jan 20 17:06:46 crc kubenswrapper[4558]: I0120 17:06:46.574727 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:06:46 crc kubenswrapper[4558]: I0120 17:06:46.574811 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/openstack-galera-0"] Jan 20 17:06:46 crc kubenswrapper[4558]: I0120 17:06:46.578585 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-galera-openstack-svc" Jan 20 17:06:46 crc kubenswrapper[4558]: I0120 17:06:46.578781 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"galera-openstack-dockercfg-8mnqk" Jan 20 17:06:46 crc kubenswrapper[4558]: I0120 17:06:46.579018 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-scripts" Jan 20 17:06:46 crc kubenswrapper[4558]: I0120 17:06:46.579198 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-config-data" Jan 20 17:06:46 crc kubenswrapper[4558]: I0120 17:06:46.583836 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"combined-ca-bundle" Jan 20 17:06:46 crc kubenswrapper[4558]: I0120 17:06:46.668729 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/5558aeea-2a7f-4654-a261-a2902c9434e0-kolla-config\") pod \"openstack-galera-0\" (UID: \"5558aeea-2a7f-4654-a261-a2902c9434e0\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:06:46 crc kubenswrapper[4558]: I0120 17:06:46.668810 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/5558aeea-2a7f-4654-a261-a2902c9434e0-config-data-default\") pod \"openstack-galera-0\" (UID: \"5558aeea-2a7f-4654-a261-a2902c9434e0\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:06:46 crc kubenswrapper[4558]: I0120 17:06:46.668831 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5558aeea-2a7f-4654-a261-a2902c9434e0-operator-scripts\") pod \"openstack-galera-0\" (UID: \"5558aeea-2a7f-4654-a261-a2902c9434e0\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:06:46 crc kubenswrapper[4558]: I0120 17:06:46.668856 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5558aeea-2a7f-4654-a261-a2902c9434e0-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"5558aeea-2a7f-4654-a261-a2902c9434e0\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:06:46 crc kubenswrapper[4558]: I0120 17:06:46.668895 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4gxxw\" (UniqueName: \"kubernetes.io/projected/5558aeea-2a7f-4654-a261-a2902c9434e0-kube-api-access-4gxxw\") pod \"openstack-galera-0\" (UID: \"5558aeea-2a7f-4654-a261-a2902c9434e0\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:06:46 crc kubenswrapper[4558]: I0120 17:06:46.668933 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/5558aeea-2a7f-4654-a261-a2902c9434e0-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"5558aeea-2a7f-4654-a261-a2902c9434e0\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:06:46 crc kubenswrapper[4558]: I0120 17:06:46.669005 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/5558aeea-2a7f-4654-a261-a2902c9434e0-config-data-generated\") pod \"openstack-galera-0\" (UID: \"5558aeea-2a7f-4654-a261-a2902c9434e0\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:06:46 crc kubenswrapper[4558]: I0120 17:06:46.669083 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage16-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage16-crc\") pod \"openstack-galera-0\" (UID: \"5558aeea-2a7f-4654-a261-a2902c9434e0\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:06:46 crc kubenswrapper[4558]: I0120 17:06:46.770924 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/5558aeea-2a7f-4654-a261-a2902c9434e0-config-data-default\") pod \"openstack-galera-0\" (UID: \"5558aeea-2a7f-4654-a261-a2902c9434e0\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:06:46 crc kubenswrapper[4558]: I0120 17:06:46.770959 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5558aeea-2a7f-4654-a261-a2902c9434e0-operator-scripts\") pod \"openstack-galera-0\" (UID: \"5558aeea-2a7f-4654-a261-a2902c9434e0\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:06:46 crc kubenswrapper[4558]: I0120 17:06:46.770993 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5558aeea-2a7f-4654-a261-a2902c9434e0-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"5558aeea-2a7f-4654-a261-a2902c9434e0\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:06:46 crc kubenswrapper[4558]: I0120 17:06:46.771060 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4gxxw\" (UniqueName: \"kubernetes.io/projected/5558aeea-2a7f-4654-a261-a2902c9434e0-kube-api-access-4gxxw\") pod \"openstack-galera-0\" (UID: \"5558aeea-2a7f-4654-a261-a2902c9434e0\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:06:46 crc kubenswrapper[4558]: I0120 17:06:46.771098 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/5558aeea-2a7f-4654-a261-a2902c9434e0-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"5558aeea-2a7f-4654-a261-a2902c9434e0\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:06:46 crc kubenswrapper[4558]: I0120 17:06:46.771146 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/5558aeea-2a7f-4654-a261-a2902c9434e0-config-data-generated\") pod \"openstack-galera-0\" (UID: \"5558aeea-2a7f-4654-a261-a2902c9434e0\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:06:46 crc kubenswrapper[4558]: I0120 17:06:46.771236 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage16-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage16-crc\") pod \"openstack-galera-0\" (UID: \"5558aeea-2a7f-4654-a261-a2902c9434e0\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:06:46 crc kubenswrapper[4558]: I0120 17:06:46.771281 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/5558aeea-2a7f-4654-a261-a2902c9434e0-kolla-config\") pod \"openstack-galera-0\" (UID: \"5558aeea-2a7f-4654-a261-a2902c9434e0\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:06:46 crc kubenswrapper[4558]: I0120 17:06:46.771531 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage16-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage16-crc\") pod \"openstack-galera-0\" (UID: \"5558aeea-2a7f-4654-a261-a2902c9434e0\") device mount path \"/mnt/openstack/pv16\"" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:06:46 crc kubenswrapper[4558]: I0120 17:06:46.771578 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/5558aeea-2a7f-4654-a261-a2902c9434e0-config-data-generated\") pod \"openstack-galera-0\" (UID: \"5558aeea-2a7f-4654-a261-a2902c9434e0\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:06:46 crc kubenswrapper[4558]: I0120 17:06:46.771871 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/5558aeea-2a7f-4654-a261-a2902c9434e0-config-data-default\") pod \"openstack-galera-0\" (UID: \"5558aeea-2a7f-4654-a261-a2902c9434e0\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:06:46 crc kubenswrapper[4558]: I0120 17:06:46.771897 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/5558aeea-2a7f-4654-a261-a2902c9434e0-kolla-config\") pod \"openstack-galera-0\" (UID: \"5558aeea-2a7f-4654-a261-a2902c9434e0\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:06:46 crc kubenswrapper[4558]: I0120 17:06:46.772470 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5558aeea-2a7f-4654-a261-a2902c9434e0-operator-scripts\") pod \"openstack-galera-0\" (UID: \"5558aeea-2a7f-4654-a261-a2902c9434e0\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:06:46 crc kubenswrapper[4558]: I0120 17:06:46.781837 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/5558aeea-2a7f-4654-a261-a2902c9434e0-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"5558aeea-2a7f-4654-a261-a2902c9434e0\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:06:46 crc kubenswrapper[4558]: I0120 17:06:46.782696 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5558aeea-2a7f-4654-a261-a2902c9434e0-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"5558aeea-2a7f-4654-a261-a2902c9434e0\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:06:46 crc kubenswrapper[4558]: I0120 17:06:46.784034 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4gxxw\" (UniqueName: \"kubernetes.io/projected/5558aeea-2a7f-4654-a261-a2902c9434e0-kube-api-access-4gxxw\") pod \"openstack-galera-0\" (UID: \"5558aeea-2a7f-4654-a261-a2902c9434e0\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:06:46 crc kubenswrapper[4558]: I0120 17:06:46.789110 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage16-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage16-crc\") pod \"openstack-galera-0\" (UID: \"5558aeea-2a7f-4654-a261-a2902c9434e0\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:06:46 crc kubenswrapper[4558]: I0120 17:06:46.903849 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:06:46 crc kubenswrapper[4558]: I0120 17:06:46.969489 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" event={"ID":"39403277-bf62-47c1-8e86-cdec59f2da7b","Type":"ContainerStarted","Data":"2a031a75cc169d112132928bc181cdf5e2a9498a2ceea6c4d92265e6e5d5799c"} Jan 20 17:06:46 crc kubenswrapper[4558]: I0120 17:06:46.987440 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-server-0" event={"ID":"07fab146-67be-42ba-b263-ee19fe95720b","Type":"ContainerStarted","Data":"89eca221d9e593e3a9aecdfa6ea1b10c75f176fb44073dcdb0a8d8482121ba81"} Jan 20 17:06:47 crc kubenswrapper[4558]: I0120 17:06:47.294982 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/openstack-galera-0"] Jan 20 17:06:47 crc kubenswrapper[4558]: W0120 17:06:47.300739 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5558aeea_2a7f_4654_a261_a2902c9434e0.slice/crio-34b1ad77eceb5d064cd6b97ce02e8b533b8f06fddeef342d20a8e7595ec71ac6 WatchSource:0}: Error finding container 34b1ad77eceb5d064cd6b97ce02e8b533b8f06fddeef342d20a8e7595ec71ac6: Status 404 returned error can't find the container with id 34b1ad77eceb5d064cd6b97ce02e8b533b8f06fddeef342d20a8e7595ec71ac6 Jan 20 17:06:47 crc kubenswrapper[4558]: I0120 17:06:47.941328 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/openstack-cell1-galera-0"] Jan 20 17:06:47 crc kubenswrapper[4558]: I0120 17:06:47.942490 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:06:47 crc kubenswrapper[4558]: I0120 17:06:47.945204 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-cell1-config-data" Jan 20 17:06:47 crc kubenswrapper[4558]: I0120 17:06:47.945664 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-galera-openstack-cell1-svc" Jan 20 17:06:47 crc kubenswrapper[4558]: I0120 17:06:47.946703 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"galera-openstack-cell1-dockercfg-8pjjh" Jan 20 17:06:47 crc kubenswrapper[4558]: I0120 17:06:47.946802 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-cell1-scripts" Jan 20 17:06:47 crc kubenswrapper[4558]: I0120 17:06:47.953000 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/openstack-cell1-galera-0"] Jan 20 17:06:47 crc kubenswrapper[4558]: I0120 17:06:47.995087 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-galera-0" event={"ID":"5558aeea-2a7f-4654-a261-a2902c9434e0","Type":"ContainerStarted","Data":"b0e23acf14cdde4592922d77c67da28178ab391aa9f227444148a567d640e55d"} Jan 20 17:06:47 crc kubenswrapper[4558]: I0120 17:06:47.995129 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-galera-0" event={"ID":"5558aeea-2a7f-4654-a261-a2902c9434e0","Type":"ContainerStarted","Data":"34b1ad77eceb5d064cd6b97ce02e8b533b8f06fddeef342d20a8e7595ec71ac6"} Jan 20 17:06:48 crc kubenswrapper[4558]: I0120 17:06:48.090914 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/01d65c2e-73b2-4d9e-a659-c384a3a3e2fe-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"01d65c2e-73b2-4d9e-a659-c384a3a3e2fe\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:06:48 crc kubenswrapper[4558]: I0120 17:06:48.091104 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage13-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage13-crc\") pod \"openstack-cell1-galera-0\" (UID: \"01d65c2e-73b2-4d9e-a659-c384a3a3e2fe\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:06:48 crc kubenswrapper[4558]: I0120 17:06:48.091143 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/01d65c2e-73b2-4d9e-a659-c384a3a3e2fe-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"01d65c2e-73b2-4d9e-a659-c384a3a3e2fe\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:06:48 crc kubenswrapper[4558]: I0120 17:06:48.091386 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/01d65c2e-73b2-4d9e-a659-c384a3a3e2fe-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"01d65c2e-73b2-4d9e-a659-c384a3a3e2fe\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:06:48 crc kubenswrapper[4558]: I0120 17:06:48.091421 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/01d65c2e-73b2-4d9e-a659-c384a3a3e2fe-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"01d65c2e-73b2-4d9e-a659-c384a3a3e2fe\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:06:48 crc kubenswrapper[4558]: I0120 17:06:48.091448 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hqqmd\" (UniqueName: \"kubernetes.io/projected/01d65c2e-73b2-4d9e-a659-c384a3a3e2fe-kube-api-access-hqqmd\") pod \"openstack-cell1-galera-0\" (UID: \"01d65c2e-73b2-4d9e-a659-c384a3a3e2fe\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:06:48 crc kubenswrapper[4558]: I0120 17:06:48.091461 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/01d65c2e-73b2-4d9e-a659-c384a3a3e2fe-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"01d65c2e-73b2-4d9e-a659-c384a3a3e2fe\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:06:48 crc kubenswrapper[4558]: I0120 17:06:48.091508 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/01d65c2e-73b2-4d9e-a659-c384a3a3e2fe-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"01d65c2e-73b2-4d9e-a659-c384a3a3e2fe\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:06:48 crc kubenswrapper[4558]: I0120 17:06:48.193188 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/01d65c2e-73b2-4d9e-a659-c384a3a3e2fe-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"01d65c2e-73b2-4d9e-a659-c384a3a3e2fe\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:06:48 crc kubenswrapper[4558]: I0120 17:06:48.193257 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage13-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage13-crc\") pod \"openstack-cell1-galera-0\" (UID: \"01d65c2e-73b2-4d9e-a659-c384a3a3e2fe\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:06:48 crc kubenswrapper[4558]: I0120 17:06:48.193301 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/01d65c2e-73b2-4d9e-a659-c384a3a3e2fe-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"01d65c2e-73b2-4d9e-a659-c384a3a3e2fe\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:06:48 crc kubenswrapper[4558]: I0120 17:06:48.193541 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage13-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage13-crc\") pod \"openstack-cell1-galera-0\" (UID: \"01d65c2e-73b2-4d9e-a659-c384a3a3e2fe\") device mount path \"/mnt/openstack/pv13\"" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:06:48 crc kubenswrapper[4558]: I0120 17:06:48.193694 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/01d65c2e-73b2-4d9e-a659-c384a3a3e2fe-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"01d65c2e-73b2-4d9e-a659-c384a3a3e2fe\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:06:48 crc kubenswrapper[4558]: I0120 17:06:48.193722 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/01d65c2e-73b2-4d9e-a659-c384a3a3e2fe-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"01d65c2e-73b2-4d9e-a659-c384a3a3e2fe\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:06:48 crc kubenswrapper[4558]: I0120 17:06:48.193742 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hqqmd\" (UniqueName: \"kubernetes.io/projected/01d65c2e-73b2-4d9e-a659-c384a3a3e2fe-kube-api-access-hqqmd\") pod \"openstack-cell1-galera-0\" (UID: \"01d65c2e-73b2-4d9e-a659-c384a3a3e2fe\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:06:48 crc kubenswrapper[4558]: I0120 17:06:48.193755 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/01d65c2e-73b2-4d9e-a659-c384a3a3e2fe-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"01d65c2e-73b2-4d9e-a659-c384a3a3e2fe\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:06:48 crc kubenswrapper[4558]: I0120 17:06:48.193759 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/01d65c2e-73b2-4d9e-a659-c384a3a3e2fe-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"01d65c2e-73b2-4d9e-a659-c384a3a3e2fe\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:06:48 crc kubenswrapper[4558]: I0120 17:06:48.193814 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/01d65c2e-73b2-4d9e-a659-c384a3a3e2fe-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"01d65c2e-73b2-4d9e-a659-c384a3a3e2fe\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:06:48 crc kubenswrapper[4558]: I0120 17:06:48.194493 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/01d65c2e-73b2-4d9e-a659-c384a3a3e2fe-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"01d65c2e-73b2-4d9e-a659-c384a3a3e2fe\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:06:48 crc kubenswrapper[4558]: I0120 17:06:48.194583 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/01d65c2e-73b2-4d9e-a659-c384a3a3e2fe-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"01d65c2e-73b2-4d9e-a659-c384a3a3e2fe\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:06:48 crc kubenswrapper[4558]: I0120 17:06:48.194992 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/01d65c2e-73b2-4d9e-a659-c384a3a3e2fe-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"01d65c2e-73b2-4d9e-a659-c384a3a3e2fe\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:06:48 crc kubenswrapper[4558]: I0120 17:06:48.196785 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/01d65c2e-73b2-4d9e-a659-c384a3a3e2fe-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"01d65c2e-73b2-4d9e-a659-c384a3a3e2fe\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:06:48 crc kubenswrapper[4558]: I0120 17:06:48.198693 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/01d65c2e-73b2-4d9e-a659-c384a3a3e2fe-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"01d65c2e-73b2-4d9e-a659-c384a3a3e2fe\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:06:48 crc kubenswrapper[4558]: I0120 17:06:48.210606 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage13-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage13-crc\") pod \"openstack-cell1-galera-0\" (UID: \"01d65c2e-73b2-4d9e-a659-c384a3a3e2fe\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:06:48 crc kubenswrapper[4558]: I0120 17:06:48.216640 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hqqmd\" (UniqueName: \"kubernetes.io/projected/01d65c2e-73b2-4d9e-a659-c384a3a3e2fe-kube-api-access-hqqmd\") pod \"openstack-cell1-galera-0\" (UID: \"01d65c2e-73b2-4d9e-a659-c384a3a3e2fe\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:06:48 crc kubenswrapper[4558]: I0120 17:06:48.220267 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/memcached-0"] Jan 20 17:06:48 crc kubenswrapper[4558]: I0120 17:06:48.221023 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/memcached-0" Jan 20 17:06:48 crc kubenswrapper[4558]: I0120 17:06:48.223023 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-memcached-svc" Jan 20 17:06:48 crc kubenswrapper[4558]: I0120 17:06:48.223075 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"memcached-config-data" Jan 20 17:06:48 crc kubenswrapper[4558]: I0120 17:06:48.226234 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"memcached-memcached-dockercfg-7jwkl" Jan 20 17:06:48 crc kubenswrapper[4558]: I0120 17:06:48.231725 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/memcached-0"] Jan 20 17:06:48 crc kubenswrapper[4558]: I0120 17:06:48.256021 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:06:48 crc kubenswrapper[4558]: I0120 17:06:48.295324 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4406b36c-adb2-4042-bc92-9efed5a43942-combined-ca-bundle\") pod \"memcached-0\" (UID: \"4406b36c-adb2-4042-bc92-9efed5a43942\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:06:48 crc kubenswrapper[4558]: I0120 17:06:48.295559 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/4406b36c-adb2-4042-bc92-9efed5a43942-kolla-config\") pod \"memcached-0\" (UID: \"4406b36c-adb2-4042-bc92-9efed5a43942\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:06:48 crc kubenswrapper[4558]: I0120 17:06:48.295604 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4406b36c-adb2-4042-bc92-9efed5a43942-config-data\") pod \"memcached-0\" (UID: \"4406b36c-adb2-4042-bc92-9efed5a43942\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:06:48 crc kubenswrapper[4558]: I0120 17:06:48.295668 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/4406b36c-adb2-4042-bc92-9efed5a43942-memcached-tls-certs\") pod \"memcached-0\" (UID: \"4406b36c-adb2-4042-bc92-9efed5a43942\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:06:48 crc kubenswrapper[4558]: I0120 17:06:48.295770 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m7rcs\" (UniqueName: \"kubernetes.io/projected/4406b36c-adb2-4042-bc92-9efed5a43942-kube-api-access-m7rcs\") pod \"memcached-0\" (UID: \"4406b36c-adb2-4042-bc92-9efed5a43942\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:06:48 crc kubenswrapper[4558]: I0120 17:06:48.397074 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4406b36c-adb2-4042-bc92-9efed5a43942-combined-ca-bundle\") pod \"memcached-0\" (UID: \"4406b36c-adb2-4042-bc92-9efed5a43942\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:06:48 crc kubenswrapper[4558]: I0120 17:06:48.397114 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/4406b36c-adb2-4042-bc92-9efed5a43942-kolla-config\") pod \"memcached-0\" (UID: \"4406b36c-adb2-4042-bc92-9efed5a43942\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:06:48 crc kubenswrapper[4558]: I0120 17:06:48.397144 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4406b36c-adb2-4042-bc92-9efed5a43942-config-data\") pod \"memcached-0\" (UID: \"4406b36c-adb2-4042-bc92-9efed5a43942\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:06:48 crc kubenswrapper[4558]: I0120 17:06:48.397190 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/4406b36c-adb2-4042-bc92-9efed5a43942-memcached-tls-certs\") pod \"memcached-0\" (UID: \"4406b36c-adb2-4042-bc92-9efed5a43942\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:06:48 crc kubenswrapper[4558]: I0120 17:06:48.397240 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m7rcs\" (UniqueName: \"kubernetes.io/projected/4406b36c-adb2-4042-bc92-9efed5a43942-kube-api-access-m7rcs\") pod \"memcached-0\" (UID: \"4406b36c-adb2-4042-bc92-9efed5a43942\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:06:48 crc kubenswrapper[4558]: I0120 17:06:48.398614 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4406b36c-adb2-4042-bc92-9efed5a43942-config-data\") pod \"memcached-0\" (UID: \"4406b36c-adb2-4042-bc92-9efed5a43942\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:06:48 crc kubenswrapper[4558]: I0120 17:06:48.398941 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/4406b36c-adb2-4042-bc92-9efed5a43942-kolla-config\") pod \"memcached-0\" (UID: \"4406b36c-adb2-4042-bc92-9efed5a43942\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:06:48 crc kubenswrapper[4558]: I0120 17:06:48.404802 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4406b36c-adb2-4042-bc92-9efed5a43942-combined-ca-bundle\") pod \"memcached-0\" (UID: \"4406b36c-adb2-4042-bc92-9efed5a43942\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:06:48 crc kubenswrapper[4558]: I0120 17:06:48.404820 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/4406b36c-adb2-4042-bc92-9efed5a43942-memcached-tls-certs\") pod \"memcached-0\" (UID: \"4406b36c-adb2-4042-bc92-9efed5a43942\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:06:48 crc kubenswrapper[4558]: I0120 17:06:48.417213 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m7rcs\" (UniqueName: \"kubernetes.io/projected/4406b36c-adb2-4042-bc92-9efed5a43942-kube-api-access-m7rcs\") pod \"memcached-0\" (UID: \"4406b36c-adb2-4042-bc92-9efed5a43942\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:06:48 crc kubenswrapper[4558]: I0120 17:06:48.555206 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/memcached-0" Jan 20 17:06:48 crc kubenswrapper[4558]: I0120 17:06:48.637409 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/openstack-cell1-galera-0"] Jan 20 17:06:48 crc kubenswrapper[4558]: W0120 17:06:48.645465 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod01d65c2e_73b2_4d9e_a659_c384a3a3e2fe.slice/crio-89b649c708086702888eb623a2f0f213c0d91f2609ba354b60a11cf2432bbb05 WatchSource:0}: Error finding container 89b649c708086702888eb623a2f0f213c0d91f2609ba354b60a11cf2432bbb05: Status 404 returned error can't find the container with id 89b649c708086702888eb623a2f0f213c0d91f2609ba354b60a11cf2432bbb05 Jan 20 17:06:48 crc kubenswrapper[4558]: I0120 17:06:48.916001 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/memcached-0"] Jan 20 17:06:48 crc kubenswrapper[4558]: W0120 17:06:48.918366 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4406b36c_adb2_4042_bc92_9efed5a43942.slice/crio-6e0726e59512b0c7bc84ee5bbc07cb0daf6bafc6946d1f94340a122d3ddae6aa WatchSource:0}: Error finding container 6e0726e59512b0c7bc84ee5bbc07cb0daf6bafc6946d1f94340a122d3ddae6aa: Status 404 returned error can't find the container with id 6e0726e59512b0c7bc84ee5bbc07cb0daf6bafc6946d1f94340a122d3ddae6aa Jan 20 17:06:49 crc kubenswrapper[4558]: I0120 17:06:49.006775 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/memcached-0" event={"ID":"4406b36c-adb2-4042-bc92-9efed5a43942","Type":"ContainerStarted","Data":"6e0726e59512b0c7bc84ee5bbc07cb0daf6bafc6946d1f94340a122d3ddae6aa"} Jan 20 17:06:49 crc kubenswrapper[4558]: I0120 17:06:49.010223 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-cell1-galera-0" event={"ID":"01d65c2e-73b2-4d9e-a659-c384a3a3e2fe","Type":"ContainerStarted","Data":"f02cae86071c5890d7fc01b166be5c563da48c0b99d44dd72907dfa48eedd578"} Jan 20 17:06:49 crc kubenswrapper[4558]: I0120 17:06:49.010266 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-cell1-galera-0" event={"ID":"01d65c2e-73b2-4d9e-a659-c384a3a3e2fe","Type":"ContainerStarted","Data":"89b649c708086702888eb623a2f0f213c0d91f2609ba354b60a11cf2432bbb05"} Jan 20 17:06:49 crc kubenswrapper[4558]: I0120 17:06:49.960927 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 17:06:49 crc kubenswrapper[4558]: I0120 17:06:49.962083 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:06:49 crc kubenswrapper[4558]: I0120 17:06:49.965467 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"telemetry-ceilometer-dockercfg-9f8dn" Jan 20 17:06:49 crc kubenswrapper[4558]: I0120 17:06:49.969029 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 17:06:50 crc kubenswrapper[4558]: I0120 17:06:50.015595 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/memcached-0" event={"ID":"4406b36c-adb2-4042-bc92-9efed5a43942","Type":"ContainerStarted","Data":"133261d6818bfb9dc626eb1b737b442170f15e8780d606e9ddad15e359c16063"} Jan 20 17:06:50 crc kubenswrapper[4558]: I0120 17:06:50.015770 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/memcached-0" Jan 20 17:06:50 crc kubenswrapper[4558]: I0120 17:06:50.019172 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-57djt\" (UniqueName: \"kubernetes.io/projected/09945b97-d2ef-4ce6-b8af-9612a4dd3482-kube-api-access-57djt\") pod \"kube-state-metrics-0\" (UID: \"09945b97-d2ef-4ce6-b8af-9612a4dd3482\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:06:50 crc kubenswrapper[4558]: I0120 17:06:50.027623 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/memcached-0" podStartSLOduration=2.027612204 podStartE2EDuration="2.027612204s" podCreationTimestamp="2026-01-20 17:06:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:06:50.027475597 +0000 UTC m=+1503.787813564" watchObservedRunningTime="2026-01-20 17:06:50.027612204 +0000 UTC m=+1503.787950172" Jan 20 17:06:50 crc kubenswrapper[4558]: I0120 17:06:50.120866 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-57djt\" (UniqueName: \"kubernetes.io/projected/09945b97-d2ef-4ce6-b8af-9612a4dd3482-kube-api-access-57djt\") pod \"kube-state-metrics-0\" (UID: \"09945b97-d2ef-4ce6-b8af-9612a4dd3482\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:06:50 crc kubenswrapper[4558]: I0120 17:06:50.135239 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-57djt\" (UniqueName: \"kubernetes.io/projected/09945b97-d2ef-4ce6-b8af-9612a4dd3482-kube-api-access-57djt\") pod \"kube-state-metrics-0\" (UID: \"09945b97-d2ef-4ce6-b8af-9612a4dd3482\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:06:50 crc kubenswrapper[4558]: I0120 17:06:50.285910 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:06:50 crc kubenswrapper[4558]: I0120 17:06:50.645611 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 17:06:50 crc kubenswrapper[4558]: W0120 17:06:50.648370 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod09945b97_d2ef_4ce6_b8af_9612a4dd3482.slice/crio-98883ad4db19024fe00d11fc9a21c52ea1d1ebdab4ef91c668c16a5f4b3f420c WatchSource:0}: Error finding container 98883ad4db19024fe00d11fc9a21c52ea1d1ebdab4ef91c668c16a5f4b3f420c: Status 404 returned error can't find the container with id 98883ad4db19024fe00d11fc9a21c52ea1d1ebdab4ef91c668c16a5f4b3f420c Jan 20 17:06:50 crc kubenswrapper[4558]: I0120 17:06:50.651293 4558 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 20 17:06:51 crc kubenswrapper[4558]: I0120 17:06:51.022057 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/kube-state-metrics-0" event={"ID":"09945b97-d2ef-4ce6-b8af-9612a4dd3482","Type":"ContainerStarted","Data":"98883ad4db19024fe00d11fc9a21c52ea1d1ebdab4ef91c668c16a5f4b3f420c"} Jan 20 17:06:51 crc kubenswrapper[4558]: I0120 17:06:51.023432 4558 generic.go:334] "Generic (PLEG): container finished" podID="5558aeea-2a7f-4654-a261-a2902c9434e0" containerID="b0e23acf14cdde4592922d77c67da28178ab391aa9f227444148a567d640e55d" exitCode=0 Jan 20 17:06:51 crc kubenswrapper[4558]: I0120 17:06:51.023512 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-galera-0" event={"ID":"5558aeea-2a7f-4654-a261-a2902c9434e0","Type":"ContainerDied","Data":"b0e23acf14cdde4592922d77c67da28178ab391aa9f227444148a567d640e55d"} Jan 20 17:06:52 crc kubenswrapper[4558]: I0120 17:06:52.030093 4558 generic.go:334] "Generic (PLEG): container finished" podID="01d65c2e-73b2-4d9e-a659-c384a3a3e2fe" containerID="f02cae86071c5890d7fc01b166be5c563da48c0b99d44dd72907dfa48eedd578" exitCode=0 Jan 20 17:06:52 crc kubenswrapper[4558]: I0120 17:06:52.030325 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-cell1-galera-0" event={"ID":"01d65c2e-73b2-4d9e-a659-c384a3a3e2fe","Type":"ContainerDied","Data":"f02cae86071c5890d7fc01b166be5c563da48c0b99d44dd72907dfa48eedd578"} Jan 20 17:06:52 crc kubenswrapper[4558]: I0120 17:06:52.032012 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/kube-state-metrics-0" event={"ID":"09945b97-d2ef-4ce6-b8af-9612a4dd3482","Type":"ContainerStarted","Data":"7c92f532e9da8f4dcfbeda22a4be95c8aaf993e53dc631d60e9def3a4379b4ae"} Jan 20 17:06:52 crc kubenswrapper[4558]: I0120 17:06:52.032130 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:06:52 crc kubenswrapper[4558]: I0120 17:06:52.034338 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-galera-0" event={"ID":"5558aeea-2a7f-4654-a261-a2902c9434e0","Type":"ContainerStarted","Data":"0334d5f9b2258c2ff98366283a9bd0e77fed9de54416a50e2a8bd36057e911d2"} Jan 20 17:06:52 crc kubenswrapper[4558]: I0120 17:06:52.055449 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/kube-state-metrics-0" podStartSLOduration=2.763974128 podStartE2EDuration="3.05543587s" podCreationTimestamp="2026-01-20 17:06:49 +0000 UTC" firstStartedPulling="2026-01-20 17:06:50.651048074 +0000 UTC m=+1504.411386041" lastFinishedPulling="2026-01-20 17:06:50.942509815 +0000 UTC m=+1504.702847783" observedRunningTime="2026-01-20 17:06:52.055272364 +0000 UTC m=+1505.815610331" watchObservedRunningTime="2026-01-20 17:06:52.05543587 +0000 UTC m=+1505.815773837" Jan 20 17:06:52 crc kubenswrapper[4558]: I0120 17:06:52.074985 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/openstack-galera-0" podStartSLOduration=7.074969811 podStartE2EDuration="7.074969811s" podCreationTimestamp="2026-01-20 17:06:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:06:52.070282788 +0000 UTC m=+1505.830620756" watchObservedRunningTime="2026-01-20 17:06:52.074969811 +0000 UTC m=+1505.835307778" Jan 20 17:06:53 crc kubenswrapper[4558]: I0120 17:06:53.042300 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-cell1-galera-0" event={"ID":"01d65c2e-73b2-4d9e-a659-c384a3a3e2fe","Type":"ContainerStarted","Data":"7f4378e692fe617442cc1cc02c71c9e5beb567c1bbfc8f7fa7801b7b523d4e8b"} Jan 20 17:06:53 crc kubenswrapper[4558]: I0120 17:06:53.071917 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/openstack-cell1-galera-0" podStartSLOduration=7.071902107 podStartE2EDuration="7.071902107s" podCreationTimestamp="2026-01-20 17:06:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:06:53.061977335 +0000 UTC m=+1506.822315302" watchObservedRunningTime="2026-01-20 17:06:53.071902107 +0000 UTC m=+1506.832240074" Jan 20 17:06:54 crc kubenswrapper[4558]: I0120 17:06:54.079441 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ovsdbserver-nb-0"] Jan 20 17:06:54 crc kubenswrapper[4558]: I0120 17:06:54.080787 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:06:54 crc kubenswrapper[4558]: I0120 17:06:54.082290 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ovncluster-ovndbcluster-nb-dockercfg-96rsj" Jan 20 17:06:54 crc kubenswrapper[4558]: I0120 17:06:54.082629 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"ovndbcluster-nb-scripts" Jan 20 17:06:54 crc kubenswrapper[4558]: I0120 17:06:54.082792 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"ovndbcluster-nb-config" Jan 20 17:06:54 crc kubenswrapper[4558]: I0120 17:06:54.082932 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-ovndbcluster-nb-ovndbs" Jan 20 17:06:54 crc kubenswrapper[4558]: I0120 17:06:54.083041 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-ovn-metrics" Jan 20 17:06:54 crc kubenswrapper[4558]: I0120 17:06:54.095913 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-nb-0"] Jan 20 17:06:54 crc kubenswrapper[4558]: I0120 17:06:54.173072 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/f98dbc6f-bd60-4064-b0d8-947d4080b9ec-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"f98dbc6f-bd60-4064-b0d8-947d4080b9ec\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:06:54 crc kubenswrapper[4558]: I0120 17:06:54.173141 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/f98dbc6f-bd60-4064-b0d8-947d4080b9ec-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"f98dbc6f-bd60-4064-b0d8-947d4080b9ec\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:06:54 crc kubenswrapper[4558]: I0120 17:06:54.173211 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f98dbc6f-bd60-4064-b0d8-947d4080b9ec-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"f98dbc6f-bd60-4064-b0d8-947d4080b9ec\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:06:54 crc kubenswrapper[4558]: I0120 17:06:54.173283 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"ovsdbserver-nb-0\" (UID: \"f98dbc6f-bd60-4064-b0d8-947d4080b9ec\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:06:54 crc kubenswrapper[4558]: I0120 17:06:54.173303 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zk4dr\" (UniqueName: \"kubernetes.io/projected/f98dbc6f-bd60-4064-b0d8-947d4080b9ec-kube-api-access-zk4dr\") pod \"ovsdbserver-nb-0\" (UID: \"f98dbc6f-bd60-4064-b0d8-947d4080b9ec\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:06:54 crc kubenswrapper[4558]: I0120 17:06:54.173344 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f98dbc6f-bd60-4064-b0d8-947d4080b9ec-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"f98dbc6f-bd60-4064-b0d8-947d4080b9ec\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:06:54 crc kubenswrapper[4558]: I0120 17:06:54.173378 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f98dbc6f-bd60-4064-b0d8-947d4080b9ec-config\") pod \"ovsdbserver-nb-0\" (UID: \"f98dbc6f-bd60-4064-b0d8-947d4080b9ec\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:06:54 crc kubenswrapper[4558]: I0120 17:06:54.173410 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/f98dbc6f-bd60-4064-b0d8-947d4080b9ec-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"f98dbc6f-bd60-4064-b0d8-947d4080b9ec\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:06:54 crc kubenswrapper[4558]: I0120 17:06:54.274256 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"ovsdbserver-nb-0\" (UID: \"f98dbc6f-bd60-4064-b0d8-947d4080b9ec\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:06:54 crc kubenswrapper[4558]: I0120 17:06:54.274294 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zk4dr\" (UniqueName: \"kubernetes.io/projected/f98dbc6f-bd60-4064-b0d8-947d4080b9ec-kube-api-access-zk4dr\") pod \"ovsdbserver-nb-0\" (UID: \"f98dbc6f-bd60-4064-b0d8-947d4080b9ec\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:06:54 crc kubenswrapper[4558]: I0120 17:06:54.274343 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f98dbc6f-bd60-4064-b0d8-947d4080b9ec-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"f98dbc6f-bd60-4064-b0d8-947d4080b9ec\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:06:54 crc kubenswrapper[4558]: I0120 17:06:54.274381 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f98dbc6f-bd60-4064-b0d8-947d4080b9ec-config\") pod \"ovsdbserver-nb-0\" (UID: \"f98dbc6f-bd60-4064-b0d8-947d4080b9ec\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:06:54 crc kubenswrapper[4558]: I0120 17:06:54.274397 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/f98dbc6f-bd60-4064-b0d8-947d4080b9ec-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"f98dbc6f-bd60-4064-b0d8-947d4080b9ec\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:06:54 crc kubenswrapper[4558]: I0120 17:06:54.274437 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/f98dbc6f-bd60-4064-b0d8-947d4080b9ec-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"f98dbc6f-bd60-4064-b0d8-947d4080b9ec\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:06:54 crc kubenswrapper[4558]: I0120 17:06:54.274480 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/f98dbc6f-bd60-4064-b0d8-947d4080b9ec-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"f98dbc6f-bd60-4064-b0d8-947d4080b9ec\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:06:54 crc kubenswrapper[4558]: I0120 17:06:54.274527 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f98dbc6f-bd60-4064-b0d8-947d4080b9ec-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"f98dbc6f-bd60-4064-b0d8-947d4080b9ec\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:06:54 crc kubenswrapper[4558]: I0120 17:06:54.274862 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"ovsdbserver-nb-0\" (UID: \"f98dbc6f-bd60-4064-b0d8-947d4080b9ec\") device mount path \"/mnt/openstack/pv09\"" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:06:54 crc kubenswrapper[4558]: I0120 17:06:54.275018 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/f98dbc6f-bd60-4064-b0d8-947d4080b9ec-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"f98dbc6f-bd60-4064-b0d8-947d4080b9ec\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:06:54 crc kubenswrapper[4558]: I0120 17:06:54.275300 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f98dbc6f-bd60-4064-b0d8-947d4080b9ec-config\") pod \"ovsdbserver-nb-0\" (UID: \"f98dbc6f-bd60-4064-b0d8-947d4080b9ec\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:06:54 crc kubenswrapper[4558]: I0120 17:06:54.275531 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f98dbc6f-bd60-4064-b0d8-947d4080b9ec-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"f98dbc6f-bd60-4064-b0d8-947d4080b9ec\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:06:54 crc kubenswrapper[4558]: I0120 17:06:54.279700 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/f98dbc6f-bd60-4064-b0d8-947d4080b9ec-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"f98dbc6f-bd60-4064-b0d8-947d4080b9ec\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:06:54 crc kubenswrapper[4558]: I0120 17:06:54.280007 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/f98dbc6f-bd60-4064-b0d8-947d4080b9ec-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"f98dbc6f-bd60-4064-b0d8-947d4080b9ec\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:06:54 crc kubenswrapper[4558]: I0120 17:06:54.280158 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f98dbc6f-bd60-4064-b0d8-947d4080b9ec-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"f98dbc6f-bd60-4064-b0d8-947d4080b9ec\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:06:54 crc kubenswrapper[4558]: I0120 17:06:54.286442 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zk4dr\" (UniqueName: \"kubernetes.io/projected/f98dbc6f-bd60-4064-b0d8-947d4080b9ec-kube-api-access-zk4dr\") pod \"ovsdbserver-nb-0\" (UID: \"f98dbc6f-bd60-4064-b0d8-947d4080b9ec\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:06:54 crc kubenswrapper[4558]: I0120 17:06:54.291461 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"ovsdbserver-nb-0\" (UID: \"f98dbc6f-bd60-4064-b0d8-947d4080b9ec\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:06:54 crc kubenswrapper[4558]: I0120 17:06:54.400468 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:06:54 crc kubenswrapper[4558]: I0120 17:06:54.568264 4558 scope.go:117] "RemoveContainer" containerID="a711a286282347590079d2447ef37fa9ed0f11085d6d7a65f85e65c1c2df608f" Jan 20 17:06:54 crc kubenswrapper[4558]: E0120 17:06:54.568581 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 17:06:54 crc kubenswrapper[4558]: I0120 17:06:54.758709 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-nb-0"] Jan 20 17:06:54 crc kubenswrapper[4558]: W0120 17:06:54.763682 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf98dbc6f_bd60_4064_b0d8_947d4080b9ec.slice/crio-19366b51047dcffb431ed74f700176cc3bdbee5cc52bf32768fe8be71c1e1092 WatchSource:0}: Error finding container 19366b51047dcffb431ed74f700176cc3bdbee5cc52bf32768fe8be71c1e1092: Status 404 returned error can't find the container with id 19366b51047dcffb431ed74f700176cc3bdbee5cc52bf32768fe8be71c1e1092 Jan 20 17:06:55 crc kubenswrapper[4558]: I0120 17:06:55.058815 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-nb-0" event={"ID":"f98dbc6f-bd60-4064-b0d8-947d4080b9ec","Type":"ContainerStarted","Data":"7cbff1daf53c24e61424f2057794575c2b66d928a2e2c492b88b52dfa56a5ee3"} Jan 20 17:06:55 crc kubenswrapper[4558]: I0120 17:06:55.058854 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-nb-0" event={"ID":"f98dbc6f-bd60-4064-b0d8-947d4080b9ec","Type":"ContainerStarted","Data":"ec0223d973099ec19b7266b05971f33d632f61aa218c28a0e77f251b638c66e8"} Jan 20 17:06:55 crc kubenswrapper[4558]: I0120 17:06:55.058865 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-nb-0" event={"ID":"f98dbc6f-bd60-4064-b0d8-947d4080b9ec","Type":"ContainerStarted","Data":"19366b51047dcffb431ed74f700176cc3bdbee5cc52bf32768fe8be71c1e1092"} Jan 20 17:06:55 crc kubenswrapper[4558]: I0120 17:06:55.074833 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ovsdbserver-nb-0" podStartSLOduration=2.07481856 podStartE2EDuration="2.07481856s" podCreationTimestamp="2026-01-20 17:06:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:06:55.070653868 +0000 UTC m=+1508.830991835" watchObservedRunningTime="2026-01-20 17:06:55.07481856 +0000 UTC m=+1508.835156526" Jan 20 17:06:56 crc kubenswrapper[4558]: I0120 17:06:56.243741 4558 scope.go:117] "RemoveContainer" containerID="39af34e7d1a9f016585ee97297ecd80fd7ecc02479b2a62eee07c83b322cb2a8" Jan 20 17:06:56 crc kubenswrapper[4558]: I0120 17:06:56.275980 4558 scope.go:117] "RemoveContainer" containerID="16451790b38caf12a201fb0b698455f8add58eb4ef0a84fc3e537dd73d5257ea" Jan 20 17:06:56 crc kubenswrapper[4558]: I0120 17:06:56.904858 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:06:56 crc kubenswrapper[4558]: I0120 17:06:56.904915 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:06:56 crc kubenswrapper[4558]: I0120 17:06:56.951318 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ovsdbserver-sb-0"] Jan 20 17:06:56 crc kubenswrapper[4558]: I0120 17:06:56.955713 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:06:56 crc kubenswrapper[4558]: I0120 17:06:56.960191 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ovncluster-ovndbcluster-sb-dockercfg-6vzxr" Jan 20 17:06:56 crc kubenswrapper[4558]: I0120 17:06:56.962280 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"ovndbcluster-sb-scripts" Jan 20 17:06:56 crc kubenswrapper[4558]: I0120 17:06:56.962786 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-ovndbcluster-sb-ovndbs" Jan 20 17:06:56 crc kubenswrapper[4558]: I0120 17:06:56.964059 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"ovndbcluster-sb-config" Jan 20 17:06:56 crc kubenswrapper[4558]: I0120 17:06:56.976993 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-sb-0"] Jan 20 17:06:57 crc kubenswrapper[4558]: I0120 17:06:57.012060 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/6f1a8225-d143-4ae0-9301-8025f1b639e5-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"6f1a8225-d143-4ae0-9301-8025f1b639e5\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:06:57 crc kubenswrapper[4558]: I0120 17:06:57.012138 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6f1a8225-d143-4ae0-9301-8025f1b639e5-config\") pod \"ovsdbserver-sb-0\" (UID: \"6f1a8225-d143-4ae0-9301-8025f1b639e5\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:06:57 crc kubenswrapper[4558]: I0120 17:06:57.012353 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"ovsdbserver-sb-0\" (UID: \"6f1a8225-d143-4ae0-9301-8025f1b639e5\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:06:57 crc kubenswrapper[4558]: I0120 17:06:57.012389 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6f1a8225-d143-4ae0-9301-8025f1b639e5-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"6f1a8225-d143-4ae0-9301-8025f1b639e5\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:06:57 crc kubenswrapper[4558]: I0120 17:06:57.012423 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/6f1a8225-d143-4ae0-9301-8025f1b639e5-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"6f1a8225-d143-4ae0-9301-8025f1b639e5\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:06:57 crc kubenswrapper[4558]: I0120 17:06:57.012582 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6f1a8225-d143-4ae0-9301-8025f1b639e5-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"6f1a8225-d143-4ae0-9301-8025f1b639e5\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:06:57 crc kubenswrapper[4558]: I0120 17:06:57.012697 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q4kpc\" (UniqueName: \"kubernetes.io/projected/6f1a8225-d143-4ae0-9301-8025f1b639e5-kube-api-access-q4kpc\") pod \"ovsdbserver-sb-0\" (UID: \"6f1a8225-d143-4ae0-9301-8025f1b639e5\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:06:57 crc kubenswrapper[4558]: I0120 17:06:57.012795 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/6f1a8225-d143-4ae0-9301-8025f1b639e5-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"6f1a8225-d143-4ae0-9301-8025f1b639e5\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:06:57 crc kubenswrapper[4558]: I0120 17:06:57.114654 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6f1a8225-d143-4ae0-9301-8025f1b639e5-config\") pod \"ovsdbserver-sb-0\" (UID: \"6f1a8225-d143-4ae0-9301-8025f1b639e5\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:06:57 crc kubenswrapper[4558]: I0120 17:06:57.114704 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"ovsdbserver-sb-0\" (UID: \"6f1a8225-d143-4ae0-9301-8025f1b639e5\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:06:57 crc kubenswrapper[4558]: I0120 17:06:57.114722 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6f1a8225-d143-4ae0-9301-8025f1b639e5-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"6f1a8225-d143-4ae0-9301-8025f1b639e5\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:06:57 crc kubenswrapper[4558]: I0120 17:06:57.114753 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/6f1a8225-d143-4ae0-9301-8025f1b639e5-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"6f1a8225-d143-4ae0-9301-8025f1b639e5\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:06:57 crc kubenswrapper[4558]: I0120 17:06:57.114838 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6f1a8225-d143-4ae0-9301-8025f1b639e5-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"6f1a8225-d143-4ae0-9301-8025f1b639e5\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:06:57 crc kubenswrapper[4558]: I0120 17:06:57.114880 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q4kpc\" (UniqueName: \"kubernetes.io/projected/6f1a8225-d143-4ae0-9301-8025f1b639e5-kube-api-access-q4kpc\") pod \"ovsdbserver-sb-0\" (UID: \"6f1a8225-d143-4ae0-9301-8025f1b639e5\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:06:57 crc kubenswrapper[4558]: I0120 17:06:57.114914 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/6f1a8225-d143-4ae0-9301-8025f1b639e5-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"6f1a8225-d143-4ae0-9301-8025f1b639e5\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:06:57 crc kubenswrapper[4558]: I0120 17:06:57.114958 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/6f1a8225-d143-4ae0-9301-8025f1b639e5-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"6f1a8225-d143-4ae0-9301-8025f1b639e5\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:06:57 crc kubenswrapper[4558]: I0120 17:06:57.115555 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"ovsdbserver-sb-0\" (UID: \"6f1a8225-d143-4ae0-9301-8025f1b639e5\") device mount path \"/mnt/openstack/pv01\"" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:06:57 crc kubenswrapper[4558]: I0120 17:06:57.116304 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6f1a8225-d143-4ae0-9301-8025f1b639e5-config\") pod \"ovsdbserver-sb-0\" (UID: \"6f1a8225-d143-4ae0-9301-8025f1b639e5\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:06:57 crc kubenswrapper[4558]: I0120 17:06:57.117451 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6f1a8225-d143-4ae0-9301-8025f1b639e5-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"6f1a8225-d143-4ae0-9301-8025f1b639e5\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:06:57 crc kubenswrapper[4558]: I0120 17:06:57.117458 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/6f1a8225-d143-4ae0-9301-8025f1b639e5-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"6f1a8225-d143-4ae0-9301-8025f1b639e5\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:06:57 crc kubenswrapper[4558]: I0120 17:06:57.121182 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/6f1a8225-d143-4ae0-9301-8025f1b639e5-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"6f1a8225-d143-4ae0-9301-8025f1b639e5\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:06:57 crc kubenswrapper[4558]: I0120 17:06:57.121758 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/6f1a8225-d143-4ae0-9301-8025f1b639e5-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"6f1a8225-d143-4ae0-9301-8025f1b639e5\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:06:57 crc kubenswrapper[4558]: I0120 17:06:57.122608 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6f1a8225-d143-4ae0-9301-8025f1b639e5-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"6f1a8225-d143-4ae0-9301-8025f1b639e5\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:06:57 crc kubenswrapper[4558]: I0120 17:06:57.132863 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q4kpc\" (UniqueName: \"kubernetes.io/projected/6f1a8225-d143-4ae0-9301-8025f1b639e5-kube-api-access-q4kpc\") pod \"ovsdbserver-sb-0\" (UID: \"6f1a8225-d143-4ae0-9301-8025f1b639e5\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:06:57 crc kubenswrapper[4558]: I0120 17:06:57.138579 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"ovsdbserver-sb-0\" (UID: \"6f1a8225-d143-4ae0-9301-8025f1b639e5\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:06:57 crc kubenswrapper[4558]: I0120 17:06:57.271809 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:06:57 crc kubenswrapper[4558]: I0120 17:06:57.404037 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:06:57 crc kubenswrapper[4558]: I0120 17:06:57.433433 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:06:57 crc kubenswrapper[4558]: I0120 17:06:57.645707 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-sb-0"] Jan 20 17:06:58 crc kubenswrapper[4558]: I0120 17:06:58.078388 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-sb-0" event={"ID":"6f1a8225-d143-4ae0-9301-8025f1b639e5","Type":"ContainerStarted","Data":"27f4034104e6baa2769f9fc2dec19cbc295799af2b5e24f6d5ea225a19e1124b"} Jan 20 17:06:58 crc kubenswrapper[4558]: I0120 17:06:58.078428 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-sb-0" event={"ID":"6f1a8225-d143-4ae0-9301-8025f1b639e5","Type":"ContainerStarted","Data":"0ed7f1c17ddf0fc44bcd37dd9e0ff1ad788674cd606ea41336b8a5c6132d841d"} Jan 20 17:06:58 crc kubenswrapper[4558]: I0120 17:06:58.078451 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-sb-0" event={"ID":"6f1a8225-d143-4ae0-9301-8025f1b639e5","Type":"ContainerStarted","Data":"5cbdc6f3d74248287ce88a82613cee08d93bdaf72455ee389246f8d62b9c7405"} Jan 20 17:06:58 crc kubenswrapper[4558]: I0120 17:06:58.078594 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:06:58 crc kubenswrapper[4558]: I0120 17:06:58.093093 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ovsdbserver-sb-0" podStartSLOduration=3.093083507 podStartE2EDuration="3.093083507s" podCreationTimestamp="2026-01-20 17:06:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:06:58.091461546 +0000 UTC m=+1511.851799504" watchObservedRunningTime="2026-01-20 17:06:58.093083507 +0000 UTC m=+1511.853421474" Jan 20 17:06:58 crc kubenswrapper[4558]: I0120 17:06:58.256945 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:06:58 crc kubenswrapper[4558]: I0120 17:06:58.256996 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:06:58 crc kubenswrapper[4558]: I0120 17:06:58.556654 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/memcached-0" Jan 20 17:06:58 crc kubenswrapper[4558]: I0120 17:06:58.961328 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:06:59 crc kubenswrapper[4558]: I0120 17:06:59.113335 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:06:59 crc kubenswrapper[4558]: I0120 17:06:59.132481 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:06:59 crc kubenswrapper[4558]: I0120 17:06:59.151148 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:06:59 crc kubenswrapper[4558]: I0120 17:06:59.208550 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:07:00 crc kubenswrapper[4558]: I0120 17:07:00.272663 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:07:00 crc kubenswrapper[4558]: I0120 17:07:00.290248 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:07:00 crc kubenswrapper[4558]: I0120 17:07:00.304385 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:07:01 crc kubenswrapper[4558]: I0120 17:07:01.095689 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:07:01 crc kubenswrapper[4558]: I0120 17:07:01.390106 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/swift-storage-0"] Jan 20 17:07:01 crc kubenswrapper[4558]: I0120 17:07:01.394100 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:07:01 crc kubenswrapper[4558]: I0120 17:07:01.395484 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"swift-conf" Jan 20 17:07:01 crc kubenswrapper[4558]: I0120 17:07:01.395722 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"swift-swift-dockercfg-48r6k" Jan 20 17:07:01 crc kubenswrapper[4558]: I0120 17:07:01.395762 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"swift-ring-files" Jan 20 17:07:01 crc kubenswrapper[4558]: I0120 17:07:01.395773 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"swift-storage-config-data" Jan 20 17:07:01 crc kubenswrapper[4558]: I0120 17:07:01.405665 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/swift-storage-0"] Jan 20 17:07:01 crc kubenswrapper[4558]: I0120 17:07:01.474718 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rhtdc\" (UniqueName: \"kubernetes.io/projected/fc023815-924a-4a04-bad2-5fc862ef20ed-kube-api-access-rhtdc\") pod \"swift-storage-0\" (UID: \"fc023815-924a-4a04-bad2-5fc862ef20ed\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:07:01 crc kubenswrapper[4558]: I0120 17:07:01.474782 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"swift-storage-0\" (UID: \"fc023815-924a-4a04-bad2-5fc862ef20ed\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:07:01 crc kubenswrapper[4558]: I0120 17:07:01.474812 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/fc023815-924a-4a04-bad2-5fc862ef20ed-etc-swift\") pod \"swift-storage-0\" (UID: \"fc023815-924a-4a04-bad2-5fc862ef20ed\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:07:01 crc kubenswrapper[4558]: I0120 17:07:01.474883 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/fc023815-924a-4a04-bad2-5fc862ef20ed-cache\") pod \"swift-storage-0\" (UID: \"fc023815-924a-4a04-bad2-5fc862ef20ed\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:07:01 crc kubenswrapper[4558]: I0120 17:07:01.474932 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/fc023815-924a-4a04-bad2-5fc862ef20ed-lock\") pod \"swift-storage-0\" (UID: \"fc023815-924a-4a04-bad2-5fc862ef20ed\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:07:01 crc kubenswrapper[4558]: I0120 17:07:01.576049 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"swift-storage-0\" (UID: \"fc023815-924a-4a04-bad2-5fc862ef20ed\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:07:01 crc kubenswrapper[4558]: I0120 17:07:01.576110 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/fc023815-924a-4a04-bad2-5fc862ef20ed-etc-swift\") pod \"swift-storage-0\" (UID: \"fc023815-924a-4a04-bad2-5fc862ef20ed\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:07:01 crc kubenswrapper[4558]: I0120 17:07:01.576229 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/fc023815-924a-4a04-bad2-5fc862ef20ed-cache\") pod \"swift-storage-0\" (UID: \"fc023815-924a-4a04-bad2-5fc862ef20ed\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:07:01 crc kubenswrapper[4558]: I0120 17:07:01.576295 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/fc023815-924a-4a04-bad2-5fc862ef20ed-lock\") pod \"swift-storage-0\" (UID: \"fc023815-924a-4a04-bad2-5fc862ef20ed\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:07:01 crc kubenswrapper[4558]: I0120 17:07:01.576344 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rhtdc\" (UniqueName: \"kubernetes.io/projected/fc023815-924a-4a04-bad2-5fc862ef20ed-kube-api-access-rhtdc\") pod \"swift-storage-0\" (UID: \"fc023815-924a-4a04-bad2-5fc862ef20ed\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:07:01 crc kubenswrapper[4558]: I0120 17:07:01.576371 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"swift-storage-0\" (UID: \"fc023815-924a-4a04-bad2-5fc862ef20ed\") device mount path \"/mnt/openstack/pv02\"" pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:07:01 crc kubenswrapper[4558]: E0120 17:07:01.576292 4558 projected.go:288] Couldn't get configMap openstack-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Jan 20 17:07:01 crc kubenswrapper[4558]: E0120 17:07:01.576401 4558 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack-kuttl-tests/swift-storage-0: configmap "swift-ring-files" not found Jan 20 17:07:01 crc kubenswrapper[4558]: E0120 17:07:01.576443 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/fc023815-924a-4a04-bad2-5fc862ef20ed-etc-swift podName:fc023815-924a-4a04-bad2-5fc862ef20ed nodeName:}" failed. No retries permitted until 2026-01-20 17:07:02.076427486 +0000 UTC m=+1515.836765453 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/fc023815-924a-4a04-bad2-5fc862ef20ed-etc-swift") pod "swift-storage-0" (UID: "fc023815-924a-4a04-bad2-5fc862ef20ed") : configmap "swift-ring-files" not found Jan 20 17:07:01 crc kubenswrapper[4558]: I0120 17:07:01.576666 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/fc023815-924a-4a04-bad2-5fc862ef20ed-lock\") pod \"swift-storage-0\" (UID: \"fc023815-924a-4a04-bad2-5fc862ef20ed\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:07:01 crc kubenswrapper[4558]: I0120 17:07:01.576717 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/fc023815-924a-4a04-bad2-5fc862ef20ed-cache\") pod \"swift-storage-0\" (UID: \"fc023815-924a-4a04-bad2-5fc862ef20ed\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:07:01 crc kubenswrapper[4558]: I0120 17:07:01.593299 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rhtdc\" (UniqueName: \"kubernetes.io/projected/fc023815-924a-4a04-bad2-5fc862ef20ed-kube-api-access-rhtdc\") pod \"swift-storage-0\" (UID: \"fc023815-924a-4a04-bad2-5fc862ef20ed\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:07:01 crc kubenswrapper[4558]: I0120 17:07:01.595704 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"swift-storage-0\" (UID: \"fc023815-924a-4a04-bad2-5fc862ef20ed\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:07:01 crc kubenswrapper[4558]: I0120 17:07:01.777857 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/swift-ring-rebalance-nnjf9"] Jan 20 17:07:01 crc kubenswrapper[4558]: I0120 17:07:01.778826 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-ring-rebalance-nnjf9" Jan 20 17:07:01 crc kubenswrapper[4558]: I0120 17:07:01.782508 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"swift-ring-config-data" Jan 20 17:07:01 crc kubenswrapper[4558]: I0120 17:07:01.783270 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"swift-ring-scripts" Jan 20 17:07:01 crc kubenswrapper[4558]: I0120 17:07:01.791825 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"swift-proxy-config-data" Jan 20 17:07:01 crc kubenswrapper[4558]: I0120 17:07:01.808485 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/swift-ring-rebalance-6hv7n"] Jan 20 17:07:01 crc kubenswrapper[4558]: I0120 17:07:01.809677 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-ring-rebalance-6hv7n" Jan 20 17:07:01 crc kubenswrapper[4558]: I0120 17:07:01.845872 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/swift-ring-rebalance-nnjf9"] Jan 20 17:07:01 crc kubenswrapper[4558]: E0120 17:07:01.846536 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[combined-ca-bundle dispersionconf etc-swift kube-api-access-pms59 ring-data-devices scripts swiftconf], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack-kuttl-tests/swift-ring-rebalance-nnjf9" podUID="82a17e5c-992c-4152-99c5-7f7d9b7e707f" Jan 20 17:07:01 crc kubenswrapper[4558]: I0120 17:07:01.850558 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/swift-ring-rebalance-6hv7n"] Jan 20 17:07:01 crc kubenswrapper[4558]: I0120 17:07:01.858042 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/swift-ring-rebalance-nnjf9"] Jan 20 17:07:01 crc kubenswrapper[4558]: I0120 17:07:01.882265 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/82a17e5c-992c-4152-99c5-7f7d9b7e707f-dispersionconf\") pod \"swift-ring-rebalance-nnjf9\" (UID: \"82a17e5c-992c-4152-99c5-7f7d9b7e707f\") " pod="openstack-kuttl-tests/swift-ring-rebalance-nnjf9" Jan 20 17:07:01 crc kubenswrapper[4558]: I0120 17:07:01.882348 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/07a478af-b5f6-4daa-8940-f40b9fb00e2f-dispersionconf\") pod \"swift-ring-rebalance-6hv7n\" (UID: \"07a478af-b5f6-4daa-8940-f40b9fb00e2f\") " pod="openstack-kuttl-tests/swift-ring-rebalance-6hv7n" Jan 20 17:07:01 crc kubenswrapper[4558]: I0120 17:07:01.882384 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/07a478af-b5f6-4daa-8940-f40b9fb00e2f-combined-ca-bundle\") pod \"swift-ring-rebalance-6hv7n\" (UID: \"07a478af-b5f6-4daa-8940-f40b9fb00e2f\") " pod="openstack-kuttl-tests/swift-ring-rebalance-6hv7n" Jan 20 17:07:01 crc kubenswrapper[4558]: I0120 17:07:01.882438 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/82a17e5c-992c-4152-99c5-7f7d9b7e707f-etc-swift\") pod \"swift-ring-rebalance-nnjf9\" (UID: \"82a17e5c-992c-4152-99c5-7f7d9b7e707f\") " pod="openstack-kuttl-tests/swift-ring-rebalance-nnjf9" Jan 20 17:07:01 crc kubenswrapper[4558]: I0120 17:07:01.882462 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pms59\" (UniqueName: \"kubernetes.io/projected/82a17e5c-992c-4152-99c5-7f7d9b7e707f-kube-api-access-pms59\") pod \"swift-ring-rebalance-nnjf9\" (UID: \"82a17e5c-992c-4152-99c5-7f7d9b7e707f\") " pod="openstack-kuttl-tests/swift-ring-rebalance-nnjf9" Jan 20 17:07:01 crc kubenswrapper[4558]: I0120 17:07:01.882650 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/82a17e5c-992c-4152-99c5-7f7d9b7e707f-swiftconf\") pod \"swift-ring-rebalance-nnjf9\" (UID: \"82a17e5c-992c-4152-99c5-7f7d9b7e707f\") " pod="openstack-kuttl-tests/swift-ring-rebalance-nnjf9" Jan 20 17:07:01 crc kubenswrapper[4558]: I0120 17:07:01.882717 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/82a17e5c-992c-4152-99c5-7f7d9b7e707f-ring-data-devices\") pod \"swift-ring-rebalance-nnjf9\" (UID: \"82a17e5c-992c-4152-99c5-7f7d9b7e707f\") " pod="openstack-kuttl-tests/swift-ring-rebalance-nnjf9" Jan 20 17:07:01 crc kubenswrapper[4558]: I0120 17:07:01.882771 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nl9zr\" (UniqueName: \"kubernetes.io/projected/07a478af-b5f6-4daa-8940-f40b9fb00e2f-kube-api-access-nl9zr\") pod \"swift-ring-rebalance-6hv7n\" (UID: \"07a478af-b5f6-4daa-8940-f40b9fb00e2f\") " pod="openstack-kuttl-tests/swift-ring-rebalance-6hv7n" Jan 20 17:07:01 crc kubenswrapper[4558]: I0120 17:07:01.882860 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/07a478af-b5f6-4daa-8940-f40b9fb00e2f-scripts\") pod \"swift-ring-rebalance-6hv7n\" (UID: \"07a478af-b5f6-4daa-8940-f40b9fb00e2f\") " pod="openstack-kuttl-tests/swift-ring-rebalance-6hv7n" Jan 20 17:07:01 crc kubenswrapper[4558]: I0120 17:07:01.882881 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/07a478af-b5f6-4daa-8940-f40b9fb00e2f-etc-swift\") pod \"swift-ring-rebalance-6hv7n\" (UID: \"07a478af-b5f6-4daa-8940-f40b9fb00e2f\") " pod="openstack-kuttl-tests/swift-ring-rebalance-6hv7n" Jan 20 17:07:01 crc kubenswrapper[4558]: I0120 17:07:01.882996 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/07a478af-b5f6-4daa-8940-f40b9fb00e2f-swiftconf\") pod \"swift-ring-rebalance-6hv7n\" (UID: \"07a478af-b5f6-4daa-8940-f40b9fb00e2f\") " pod="openstack-kuttl-tests/swift-ring-rebalance-6hv7n" Jan 20 17:07:01 crc kubenswrapper[4558]: I0120 17:07:01.883078 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/82a17e5c-992c-4152-99c5-7f7d9b7e707f-combined-ca-bundle\") pod \"swift-ring-rebalance-nnjf9\" (UID: \"82a17e5c-992c-4152-99c5-7f7d9b7e707f\") " pod="openstack-kuttl-tests/swift-ring-rebalance-nnjf9" Jan 20 17:07:01 crc kubenswrapper[4558]: I0120 17:07:01.883119 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/07a478af-b5f6-4daa-8940-f40b9fb00e2f-ring-data-devices\") pod \"swift-ring-rebalance-6hv7n\" (UID: \"07a478af-b5f6-4daa-8940-f40b9fb00e2f\") " pod="openstack-kuttl-tests/swift-ring-rebalance-6hv7n" Jan 20 17:07:01 crc kubenswrapper[4558]: I0120 17:07:01.883223 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/82a17e5c-992c-4152-99c5-7f7d9b7e707f-scripts\") pod \"swift-ring-rebalance-nnjf9\" (UID: \"82a17e5c-992c-4152-99c5-7f7d9b7e707f\") " pod="openstack-kuttl-tests/swift-ring-rebalance-nnjf9" Jan 20 17:07:01 crc kubenswrapper[4558]: I0120 17:07:01.984761 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/82a17e5c-992c-4152-99c5-7f7d9b7e707f-etc-swift\") pod \"swift-ring-rebalance-nnjf9\" (UID: \"82a17e5c-992c-4152-99c5-7f7d9b7e707f\") " pod="openstack-kuttl-tests/swift-ring-rebalance-nnjf9" Jan 20 17:07:01 crc kubenswrapper[4558]: I0120 17:07:01.984802 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pms59\" (UniqueName: \"kubernetes.io/projected/82a17e5c-992c-4152-99c5-7f7d9b7e707f-kube-api-access-pms59\") pod \"swift-ring-rebalance-nnjf9\" (UID: \"82a17e5c-992c-4152-99c5-7f7d9b7e707f\") " pod="openstack-kuttl-tests/swift-ring-rebalance-nnjf9" Jan 20 17:07:01 crc kubenswrapper[4558]: I0120 17:07:01.984828 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/82a17e5c-992c-4152-99c5-7f7d9b7e707f-swiftconf\") pod \"swift-ring-rebalance-nnjf9\" (UID: \"82a17e5c-992c-4152-99c5-7f7d9b7e707f\") " pod="openstack-kuttl-tests/swift-ring-rebalance-nnjf9" Jan 20 17:07:01 crc kubenswrapper[4558]: I0120 17:07:01.984850 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/82a17e5c-992c-4152-99c5-7f7d9b7e707f-ring-data-devices\") pod \"swift-ring-rebalance-nnjf9\" (UID: \"82a17e5c-992c-4152-99c5-7f7d9b7e707f\") " pod="openstack-kuttl-tests/swift-ring-rebalance-nnjf9" Jan 20 17:07:01 crc kubenswrapper[4558]: I0120 17:07:01.984872 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nl9zr\" (UniqueName: \"kubernetes.io/projected/07a478af-b5f6-4daa-8940-f40b9fb00e2f-kube-api-access-nl9zr\") pod \"swift-ring-rebalance-6hv7n\" (UID: \"07a478af-b5f6-4daa-8940-f40b9fb00e2f\") " pod="openstack-kuttl-tests/swift-ring-rebalance-6hv7n" Jan 20 17:07:01 crc kubenswrapper[4558]: I0120 17:07:01.984912 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/07a478af-b5f6-4daa-8940-f40b9fb00e2f-scripts\") pod \"swift-ring-rebalance-6hv7n\" (UID: \"07a478af-b5f6-4daa-8940-f40b9fb00e2f\") " pod="openstack-kuttl-tests/swift-ring-rebalance-6hv7n" Jan 20 17:07:01 crc kubenswrapper[4558]: I0120 17:07:01.984928 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/07a478af-b5f6-4daa-8940-f40b9fb00e2f-etc-swift\") pod \"swift-ring-rebalance-6hv7n\" (UID: \"07a478af-b5f6-4daa-8940-f40b9fb00e2f\") " pod="openstack-kuttl-tests/swift-ring-rebalance-6hv7n" Jan 20 17:07:01 crc kubenswrapper[4558]: I0120 17:07:01.984957 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/07a478af-b5f6-4daa-8940-f40b9fb00e2f-swiftconf\") pod \"swift-ring-rebalance-6hv7n\" (UID: \"07a478af-b5f6-4daa-8940-f40b9fb00e2f\") " pod="openstack-kuttl-tests/swift-ring-rebalance-6hv7n" Jan 20 17:07:01 crc kubenswrapper[4558]: I0120 17:07:01.984989 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/82a17e5c-992c-4152-99c5-7f7d9b7e707f-combined-ca-bundle\") pod \"swift-ring-rebalance-nnjf9\" (UID: \"82a17e5c-992c-4152-99c5-7f7d9b7e707f\") " pod="openstack-kuttl-tests/swift-ring-rebalance-nnjf9" Jan 20 17:07:01 crc kubenswrapper[4558]: I0120 17:07:01.985011 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/07a478af-b5f6-4daa-8940-f40b9fb00e2f-ring-data-devices\") pod \"swift-ring-rebalance-6hv7n\" (UID: \"07a478af-b5f6-4daa-8940-f40b9fb00e2f\") " pod="openstack-kuttl-tests/swift-ring-rebalance-6hv7n" Jan 20 17:07:01 crc kubenswrapper[4558]: I0120 17:07:01.985046 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/82a17e5c-992c-4152-99c5-7f7d9b7e707f-scripts\") pod \"swift-ring-rebalance-nnjf9\" (UID: \"82a17e5c-992c-4152-99c5-7f7d9b7e707f\") " pod="openstack-kuttl-tests/swift-ring-rebalance-nnjf9" Jan 20 17:07:01 crc kubenswrapper[4558]: I0120 17:07:01.985070 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/82a17e5c-992c-4152-99c5-7f7d9b7e707f-dispersionconf\") pod \"swift-ring-rebalance-nnjf9\" (UID: \"82a17e5c-992c-4152-99c5-7f7d9b7e707f\") " pod="openstack-kuttl-tests/swift-ring-rebalance-nnjf9" Jan 20 17:07:01 crc kubenswrapper[4558]: I0120 17:07:01.985089 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/07a478af-b5f6-4daa-8940-f40b9fb00e2f-dispersionconf\") pod \"swift-ring-rebalance-6hv7n\" (UID: \"07a478af-b5f6-4daa-8940-f40b9fb00e2f\") " pod="openstack-kuttl-tests/swift-ring-rebalance-6hv7n" Jan 20 17:07:01 crc kubenswrapper[4558]: I0120 17:07:01.985106 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/07a478af-b5f6-4daa-8940-f40b9fb00e2f-combined-ca-bundle\") pod \"swift-ring-rebalance-6hv7n\" (UID: \"07a478af-b5f6-4daa-8940-f40b9fb00e2f\") " pod="openstack-kuttl-tests/swift-ring-rebalance-6hv7n" Jan 20 17:07:01 crc kubenswrapper[4558]: I0120 17:07:01.985200 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/82a17e5c-992c-4152-99c5-7f7d9b7e707f-etc-swift\") pod \"swift-ring-rebalance-nnjf9\" (UID: \"82a17e5c-992c-4152-99c5-7f7d9b7e707f\") " pod="openstack-kuttl-tests/swift-ring-rebalance-nnjf9" Jan 20 17:07:01 crc kubenswrapper[4558]: I0120 17:07:01.985365 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/07a478af-b5f6-4daa-8940-f40b9fb00e2f-etc-swift\") pod \"swift-ring-rebalance-6hv7n\" (UID: \"07a478af-b5f6-4daa-8940-f40b9fb00e2f\") " pod="openstack-kuttl-tests/swift-ring-rebalance-6hv7n" Jan 20 17:07:01 crc kubenswrapper[4558]: I0120 17:07:01.985954 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/82a17e5c-992c-4152-99c5-7f7d9b7e707f-scripts\") pod \"swift-ring-rebalance-nnjf9\" (UID: \"82a17e5c-992c-4152-99c5-7f7d9b7e707f\") " pod="openstack-kuttl-tests/swift-ring-rebalance-nnjf9" Jan 20 17:07:01 crc kubenswrapper[4558]: I0120 17:07:01.985976 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/07a478af-b5f6-4daa-8940-f40b9fb00e2f-ring-data-devices\") pod \"swift-ring-rebalance-6hv7n\" (UID: \"07a478af-b5f6-4daa-8940-f40b9fb00e2f\") " pod="openstack-kuttl-tests/swift-ring-rebalance-6hv7n" Jan 20 17:07:01 crc kubenswrapper[4558]: I0120 17:07:01.985995 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/07a478af-b5f6-4daa-8940-f40b9fb00e2f-scripts\") pod \"swift-ring-rebalance-6hv7n\" (UID: \"07a478af-b5f6-4daa-8940-f40b9fb00e2f\") " pod="openstack-kuttl-tests/swift-ring-rebalance-6hv7n" Jan 20 17:07:01 crc kubenswrapper[4558]: I0120 17:07:01.986003 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/82a17e5c-992c-4152-99c5-7f7d9b7e707f-ring-data-devices\") pod \"swift-ring-rebalance-nnjf9\" (UID: \"82a17e5c-992c-4152-99c5-7f7d9b7e707f\") " pod="openstack-kuttl-tests/swift-ring-rebalance-nnjf9" Jan 20 17:07:01 crc kubenswrapper[4558]: I0120 17:07:01.987822 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/82a17e5c-992c-4152-99c5-7f7d9b7e707f-swiftconf\") pod \"swift-ring-rebalance-nnjf9\" (UID: \"82a17e5c-992c-4152-99c5-7f7d9b7e707f\") " pod="openstack-kuttl-tests/swift-ring-rebalance-nnjf9" Jan 20 17:07:01 crc kubenswrapper[4558]: I0120 17:07:01.987853 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/07a478af-b5f6-4daa-8940-f40b9fb00e2f-swiftconf\") pod \"swift-ring-rebalance-6hv7n\" (UID: \"07a478af-b5f6-4daa-8940-f40b9fb00e2f\") " pod="openstack-kuttl-tests/swift-ring-rebalance-6hv7n" Jan 20 17:07:01 crc kubenswrapper[4558]: I0120 17:07:01.988563 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/82a17e5c-992c-4152-99c5-7f7d9b7e707f-combined-ca-bundle\") pod \"swift-ring-rebalance-nnjf9\" (UID: \"82a17e5c-992c-4152-99c5-7f7d9b7e707f\") " pod="openstack-kuttl-tests/swift-ring-rebalance-nnjf9" Jan 20 17:07:01 crc kubenswrapper[4558]: I0120 17:07:01.989142 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/07a478af-b5f6-4daa-8940-f40b9fb00e2f-combined-ca-bundle\") pod \"swift-ring-rebalance-6hv7n\" (UID: \"07a478af-b5f6-4daa-8940-f40b9fb00e2f\") " pod="openstack-kuttl-tests/swift-ring-rebalance-6hv7n" Jan 20 17:07:01 crc kubenswrapper[4558]: I0120 17:07:01.989219 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/82a17e5c-992c-4152-99c5-7f7d9b7e707f-dispersionconf\") pod \"swift-ring-rebalance-nnjf9\" (UID: \"82a17e5c-992c-4152-99c5-7f7d9b7e707f\") " pod="openstack-kuttl-tests/swift-ring-rebalance-nnjf9" Jan 20 17:07:01 crc kubenswrapper[4558]: I0120 17:07:01.989945 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/07a478af-b5f6-4daa-8940-f40b9fb00e2f-dispersionconf\") pod \"swift-ring-rebalance-6hv7n\" (UID: \"07a478af-b5f6-4daa-8940-f40b9fb00e2f\") " pod="openstack-kuttl-tests/swift-ring-rebalance-6hv7n" Jan 20 17:07:01 crc kubenswrapper[4558]: I0120 17:07:01.999069 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pms59\" (UniqueName: \"kubernetes.io/projected/82a17e5c-992c-4152-99c5-7f7d9b7e707f-kube-api-access-pms59\") pod \"swift-ring-rebalance-nnjf9\" (UID: \"82a17e5c-992c-4152-99c5-7f7d9b7e707f\") " pod="openstack-kuttl-tests/swift-ring-rebalance-nnjf9" Jan 20 17:07:02 crc kubenswrapper[4558]: I0120 17:07:02.000570 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nl9zr\" (UniqueName: \"kubernetes.io/projected/07a478af-b5f6-4daa-8940-f40b9fb00e2f-kube-api-access-nl9zr\") pod \"swift-ring-rebalance-6hv7n\" (UID: \"07a478af-b5f6-4daa-8940-f40b9fb00e2f\") " pod="openstack-kuttl-tests/swift-ring-rebalance-6hv7n" Jan 20 17:07:02 crc kubenswrapper[4558]: I0120 17:07:02.086554 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/fc023815-924a-4a04-bad2-5fc862ef20ed-etc-swift\") pod \"swift-storage-0\" (UID: \"fc023815-924a-4a04-bad2-5fc862ef20ed\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:07:02 crc kubenswrapper[4558]: E0120 17:07:02.086691 4558 projected.go:288] Couldn't get configMap openstack-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Jan 20 17:07:02 crc kubenswrapper[4558]: E0120 17:07:02.086712 4558 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack-kuttl-tests/swift-storage-0: configmap "swift-ring-files" not found Jan 20 17:07:02 crc kubenswrapper[4558]: E0120 17:07:02.086766 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/fc023815-924a-4a04-bad2-5fc862ef20ed-etc-swift podName:fc023815-924a-4a04-bad2-5fc862ef20ed nodeName:}" failed. No retries permitted until 2026-01-20 17:07:03.086748391 +0000 UTC m=+1516.847086357 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/fc023815-924a-4a04-bad2-5fc862ef20ed-etc-swift") pod "swift-storage-0" (UID: "fc023815-924a-4a04-bad2-5fc862ef20ed") : configmap "swift-ring-files" not found Jan 20 17:07:02 crc kubenswrapper[4558]: I0120 17:07:02.100741 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-ring-rebalance-nnjf9" Jan 20 17:07:02 crc kubenswrapper[4558]: I0120 17:07:02.110391 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-ring-rebalance-nnjf9" Jan 20 17:07:02 crc kubenswrapper[4558]: I0120 17:07:02.123488 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-ring-rebalance-6hv7n" Jan 20 17:07:02 crc kubenswrapper[4558]: I0120 17:07:02.129844 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:07:02 crc kubenswrapper[4558]: I0120 17:07:02.187764 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/82a17e5c-992c-4152-99c5-7f7d9b7e707f-scripts\") pod \"82a17e5c-992c-4152-99c5-7f7d9b7e707f\" (UID: \"82a17e5c-992c-4152-99c5-7f7d9b7e707f\") " Jan 20 17:07:02 crc kubenswrapper[4558]: I0120 17:07:02.188050 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/82a17e5c-992c-4152-99c5-7f7d9b7e707f-combined-ca-bundle\") pod \"82a17e5c-992c-4152-99c5-7f7d9b7e707f\" (UID: \"82a17e5c-992c-4152-99c5-7f7d9b7e707f\") " Jan 20 17:07:02 crc kubenswrapper[4558]: I0120 17:07:02.188258 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/82a17e5c-992c-4152-99c5-7f7d9b7e707f-swiftconf\") pod \"82a17e5c-992c-4152-99c5-7f7d9b7e707f\" (UID: \"82a17e5c-992c-4152-99c5-7f7d9b7e707f\") " Jan 20 17:07:02 crc kubenswrapper[4558]: I0120 17:07:02.188300 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/82a17e5c-992c-4152-99c5-7f7d9b7e707f-ring-data-devices\") pod \"82a17e5c-992c-4152-99c5-7f7d9b7e707f\" (UID: \"82a17e5c-992c-4152-99c5-7f7d9b7e707f\") " Jan 20 17:07:02 crc kubenswrapper[4558]: I0120 17:07:02.188329 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/82a17e5c-992c-4152-99c5-7f7d9b7e707f-etc-swift\") pod \"82a17e5c-992c-4152-99c5-7f7d9b7e707f\" (UID: \"82a17e5c-992c-4152-99c5-7f7d9b7e707f\") " Jan 20 17:07:02 crc kubenswrapper[4558]: I0120 17:07:02.188327 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/82a17e5c-992c-4152-99c5-7f7d9b7e707f-scripts" (OuterVolumeSpecName: "scripts") pod "82a17e5c-992c-4152-99c5-7f7d9b7e707f" (UID: "82a17e5c-992c-4152-99c5-7f7d9b7e707f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:07:02 crc kubenswrapper[4558]: I0120 17:07:02.188363 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/82a17e5c-992c-4152-99c5-7f7d9b7e707f-dispersionconf\") pod \"82a17e5c-992c-4152-99c5-7f7d9b7e707f\" (UID: \"82a17e5c-992c-4152-99c5-7f7d9b7e707f\") " Jan 20 17:07:02 crc kubenswrapper[4558]: I0120 17:07:02.188409 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pms59\" (UniqueName: \"kubernetes.io/projected/82a17e5c-992c-4152-99c5-7f7d9b7e707f-kube-api-access-pms59\") pod \"82a17e5c-992c-4152-99c5-7f7d9b7e707f\" (UID: \"82a17e5c-992c-4152-99c5-7f7d9b7e707f\") " Jan 20 17:07:02 crc kubenswrapper[4558]: I0120 17:07:02.188566 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/82a17e5c-992c-4152-99c5-7f7d9b7e707f-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "82a17e5c-992c-4152-99c5-7f7d9b7e707f" (UID: "82a17e5c-992c-4152-99c5-7f7d9b7e707f"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:07:02 crc kubenswrapper[4558]: I0120 17:07:02.188598 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/82a17e5c-992c-4152-99c5-7f7d9b7e707f-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "82a17e5c-992c-4152-99c5-7f7d9b7e707f" (UID: "82a17e5c-992c-4152-99c5-7f7d9b7e707f"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:07:02 crc kubenswrapper[4558]: I0120 17:07:02.189322 4558 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/82a17e5c-992c-4152-99c5-7f7d9b7e707f-ring-data-devices\") on node \"crc\" DevicePath \"\"" Jan 20 17:07:02 crc kubenswrapper[4558]: I0120 17:07:02.189338 4558 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/82a17e5c-992c-4152-99c5-7f7d9b7e707f-etc-swift\") on node \"crc\" DevicePath \"\"" Jan 20 17:07:02 crc kubenswrapper[4558]: I0120 17:07:02.189347 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/82a17e5c-992c-4152-99c5-7f7d9b7e707f-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:07:02 crc kubenswrapper[4558]: I0120 17:07:02.190951 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/82a17e5c-992c-4152-99c5-7f7d9b7e707f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "82a17e5c-992c-4152-99c5-7f7d9b7e707f" (UID: "82a17e5c-992c-4152-99c5-7f7d9b7e707f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:07:02 crc kubenswrapper[4558]: I0120 17:07:02.191013 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/82a17e5c-992c-4152-99c5-7f7d9b7e707f-kube-api-access-pms59" (OuterVolumeSpecName: "kube-api-access-pms59") pod "82a17e5c-992c-4152-99c5-7f7d9b7e707f" (UID: "82a17e5c-992c-4152-99c5-7f7d9b7e707f"). InnerVolumeSpecName "kube-api-access-pms59". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:07:02 crc kubenswrapper[4558]: I0120 17:07:02.191051 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/82a17e5c-992c-4152-99c5-7f7d9b7e707f-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "82a17e5c-992c-4152-99c5-7f7d9b7e707f" (UID: "82a17e5c-992c-4152-99c5-7f7d9b7e707f"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:07:02 crc kubenswrapper[4558]: I0120 17:07:02.191644 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/82a17e5c-992c-4152-99c5-7f7d9b7e707f-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "82a17e5c-992c-4152-99c5-7f7d9b7e707f" (UID: "82a17e5c-992c-4152-99c5-7f7d9b7e707f"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:07:02 crc kubenswrapper[4558]: I0120 17:07:02.258607 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ovn-northd-0"] Jan 20 17:07:02 crc kubenswrapper[4558]: I0120 17:07:02.259959 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:07:02 crc kubenswrapper[4558]: I0120 17:07:02.262733 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ovnnorthd-ovnnorthd-dockercfg-dt9hf" Jan 20 17:07:02 crc kubenswrapper[4558]: I0120 17:07:02.262922 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"ovnnorthd-scripts" Jan 20 17:07:02 crc kubenswrapper[4558]: I0120 17:07:02.263045 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"ovnnorthd-config" Jan 20 17:07:02 crc kubenswrapper[4558]: I0120 17:07:02.263610 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-ovnnorthd-ovndbs" Jan 20 17:07:02 crc kubenswrapper[4558]: I0120 17:07:02.263880 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovn-northd-0"] Jan 20 17:07:02 crc kubenswrapper[4558]: I0120 17:07:02.293892 4558 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/82a17e5c-992c-4152-99c5-7f7d9b7e707f-swiftconf\") on node \"crc\" DevicePath \"\"" Jan 20 17:07:02 crc kubenswrapper[4558]: I0120 17:07:02.293924 4558 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/82a17e5c-992c-4152-99c5-7f7d9b7e707f-dispersionconf\") on node \"crc\" DevicePath \"\"" Jan 20 17:07:02 crc kubenswrapper[4558]: I0120 17:07:02.293941 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pms59\" (UniqueName: \"kubernetes.io/projected/82a17e5c-992c-4152-99c5-7f7d9b7e707f-kube-api-access-pms59\") on node \"crc\" DevicePath \"\"" Jan 20 17:07:02 crc kubenswrapper[4558]: I0120 17:07:02.293954 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/82a17e5c-992c-4152-99c5-7f7d9b7e707f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:07:02 crc kubenswrapper[4558]: I0120 17:07:02.395512 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c5zts\" (UniqueName: \"kubernetes.io/projected/c6a5b342-eaf2-408c-828e-9bc0bf10d09e-kube-api-access-c5zts\") pod \"ovn-northd-0\" (UID: \"c6a5b342-eaf2-408c-828e-9bc0bf10d09e\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:07:02 crc kubenswrapper[4558]: I0120 17:07:02.395579 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c6a5b342-eaf2-408c-828e-9bc0bf10d09e-scripts\") pod \"ovn-northd-0\" (UID: \"c6a5b342-eaf2-408c-828e-9bc0bf10d09e\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:07:02 crc kubenswrapper[4558]: I0120 17:07:02.395741 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/c6a5b342-eaf2-408c-828e-9bc0bf10d09e-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"c6a5b342-eaf2-408c-828e-9bc0bf10d09e\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:07:02 crc kubenswrapper[4558]: I0120 17:07:02.395842 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/c6a5b342-eaf2-408c-828e-9bc0bf10d09e-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"c6a5b342-eaf2-408c-828e-9bc0bf10d09e\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:07:02 crc kubenswrapper[4558]: I0120 17:07:02.395919 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c6a5b342-eaf2-408c-828e-9bc0bf10d09e-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"c6a5b342-eaf2-408c-828e-9bc0bf10d09e\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:07:02 crc kubenswrapper[4558]: I0120 17:07:02.395943 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c6a5b342-eaf2-408c-828e-9bc0bf10d09e-config\") pod \"ovn-northd-0\" (UID: \"c6a5b342-eaf2-408c-828e-9bc0bf10d09e\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:07:02 crc kubenswrapper[4558]: I0120 17:07:02.395961 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/c6a5b342-eaf2-408c-828e-9bc0bf10d09e-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"c6a5b342-eaf2-408c-828e-9bc0bf10d09e\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:07:02 crc kubenswrapper[4558]: I0120 17:07:02.497387 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/c6a5b342-eaf2-408c-828e-9bc0bf10d09e-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"c6a5b342-eaf2-408c-828e-9bc0bf10d09e\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:07:02 crc kubenswrapper[4558]: I0120 17:07:02.497438 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/c6a5b342-eaf2-408c-828e-9bc0bf10d09e-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"c6a5b342-eaf2-408c-828e-9bc0bf10d09e\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:07:02 crc kubenswrapper[4558]: I0120 17:07:02.497489 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c6a5b342-eaf2-408c-828e-9bc0bf10d09e-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"c6a5b342-eaf2-408c-828e-9bc0bf10d09e\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:07:02 crc kubenswrapper[4558]: I0120 17:07:02.497509 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c6a5b342-eaf2-408c-828e-9bc0bf10d09e-config\") pod \"ovn-northd-0\" (UID: \"c6a5b342-eaf2-408c-828e-9bc0bf10d09e\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:07:02 crc kubenswrapper[4558]: I0120 17:07:02.497527 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/c6a5b342-eaf2-408c-828e-9bc0bf10d09e-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"c6a5b342-eaf2-408c-828e-9bc0bf10d09e\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:07:02 crc kubenswrapper[4558]: I0120 17:07:02.497558 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c5zts\" (UniqueName: \"kubernetes.io/projected/c6a5b342-eaf2-408c-828e-9bc0bf10d09e-kube-api-access-c5zts\") pod \"ovn-northd-0\" (UID: \"c6a5b342-eaf2-408c-828e-9bc0bf10d09e\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:07:02 crc kubenswrapper[4558]: I0120 17:07:02.497580 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c6a5b342-eaf2-408c-828e-9bc0bf10d09e-scripts\") pod \"ovn-northd-0\" (UID: \"c6a5b342-eaf2-408c-828e-9bc0bf10d09e\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:07:02 crc kubenswrapper[4558]: I0120 17:07:02.498283 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c6a5b342-eaf2-408c-828e-9bc0bf10d09e-scripts\") pod \"ovn-northd-0\" (UID: \"c6a5b342-eaf2-408c-828e-9bc0bf10d09e\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:07:02 crc kubenswrapper[4558]: I0120 17:07:02.499334 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/c6a5b342-eaf2-408c-828e-9bc0bf10d09e-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"c6a5b342-eaf2-408c-828e-9bc0bf10d09e\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:07:02 crc kubenswrapper[4558]: I0120 17:07:02.499336 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c6a5b342-eaf2-408c-828e-9bc0bf10d09e-config\") pod \"ovn-northd-0\" (UID: \"c6a5b342-eaf2-408c-828e-9bc0bf10d09e\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:07:02 crc kubenswrapper[4558]: I0120 17:07:02.502816 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/c6a5b342-eaf2-408c-828e-9bc0bf10d09e-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"c6a5b342-eaf2-408c-828e-9bc0bf10d09e\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:07:02 crc kubenswrapper[4558]: I0120 17:07:02.502829 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/c6a5b342-eaf2-408c-828e-9bc0bf10d09e-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"c6a5b342-eaf2-408c-828e-9bc0bf10d09e\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:07:02 crc kubenswrapper[4558]: I0120 17:07:02.503686 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c6a5b342-eaf2-408c-828e-9bc0bf10d09e-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"c6a5b342-eaf2-408c-828e-9bc0bf10d09e\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:07:02 crc kubenswrapper[4558]: I0120 17:07:02.518151 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/swift-ring-rebalance-6hv7n"] Jan 20 17:07:02 crc kubenswrapper[4558]: I0120 17:07:02.518944 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c5zts\" (UniqueName: \"kubernetes.io/projected/c6a5b342-eaf2-408c-828e-9bc0bf10d09e-kube-api-access-c5zts\") pod \"ovn-northd-0\" (UID: \"c6a5b342-eaf2-408c-828e-9bc0bf10d09e\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:07:02 crc kubenswrapper[4558]: I0120 17:07:02.575833 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:07:02 crc kubenswrapper[4558]: I0120 17:07:02.975800 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovn-northd-0"] Jan 20 17:07:03 crc kubenswrapper[4558]: I0120 17:07:03.114790 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-ring-rebalance-6hv7n" event={"ID":"07a478af-b5f6-4daa-8940-f40b9fb00e2f","Type":"ContainerStarted","Data":"6b1947119d80650089ad705d253d7d5004d6ff8894f34d16dafbb8da8a02235d"} Jan 20 17:07:03 crc kubenswrapper[4558]: I0120 17:07:03.114837 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-ring-rebalance-6hv7n" event={"ID":"07a478af-b5f6-4daa-8940-f40b9fb00e2f","Type":"ContainerStarted","Data":"2255d0ce22a5af41c9467f6b07e1375f8b33217a2e8fb25daa7754509171c7b1"} Jan 20 17:07:03 crc kubenswrapper[4558]: I0120 17:07:03.117879 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/fc023815-924a-4a04-bad2-5fc862ef20ed-etc-swift\") pod \"swift-storage-0\" (UID: \"fc023815-924a-4a04-bad2-5fc862ef20ed\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:07:03 crc kubenswrapper[4558]: E0120 17:07:03.118028 4558 projected.go:288] Couldn't get configMap openstack-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Jan 20 17:07:03 crc kubenswrapper[4558]: E0120 17:07:03.118048 4558 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack-kuttl-tests/swift-storage-0: configmap "swift-ring-files" not found Jan 20 17:07:03 crc kubenswrapper[4558]: E0120 17:07:03.118092 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/fc023815-924a-4a04-bad2-5fc862ef20ed-etc-swift podName:fc023815-924a-4a04-bad2-5fc862ef20ed nodeName:}" failed. No retries permitted until 2026-01-20 17:07:05.11807465 +0000 UTC m=+1518.878412616 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/fc023815-924a-4a04-bad2-5fc862ef20ed-etc-swift") pod "swift-storage-0" (UID: "fc023815-924a-4a04-bad2-5fc862ef20ed") : configmap "swift-ring-files" not found Jan 20 17:07:03 crc kubenswrapper[4558]: I0120 17:07:03.118190 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-northd-0" event={"ID":"c6a5b342-eaf2-408c-828e-9bc0bf10d09e","Type":"ContainerStarted","Data":"f7cd21ca9b08cd59a47ba406223b128ca14eff70ac8265c04176b9f42593f90c"} Jan 20 17:07:03 crc kubenswrapper[4558]: I0120 17:07:03.118214 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-ring-rebalance-nnjf9" Jan 20 17:07:03 crc kubenswrapper[4558]: I0120 17:07:03.134042 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/swift-ring-rebalance-6hv7n" podStartSLOduration=2.134027386 podStartE2EDuration="2.134027386s" podCreationTimestamp="2026-01-20 17:07:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:07:03.132065226 +0000 UTC m=+1516.892403193" watchObservedRunningTime="2026-01-20 17:07:03.134027386 +0000 UTC m=+1516.894365353" Jan 20 17:07:03 crc kubenswrapper[4558]: I0120 17:07:03.167257 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/swift-ring-rebalance-nnjf9"] Jan 20 17:07:03 crc kubenswrapper[4558]: I0120 17:07:03.172179 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/swift-ring-rebalance-nnjf9"] Jan 20 17:07:04 crc kubenswrapper[4558]: I0120 17:07:04.127504 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-northd-0" event={"ID":"c6a5b342-eaf2-408c-828e-9bc0bf10d09e","Type":"ContainerStarted","Data":"26c2170a07662d397db2cd598e3f8c54887440864fb9753f04f3060866b77ade"} Jan 20 17:07:04 crc kubenswrapper[4558]: I0120 17:07:04.127928 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-northd-0" event={"ID":"c6a5b342-eaf2-408c-828e-9bc0bf10d09e","Type":"ContainerStarted","Data":"14a9e266c3e7805021dc147f925c01b6b1d5dec0c4a69dce9222790ed28a80b1"} Jan 20 17:07:04 crc kubenswrapper[4558]: I0120 17:07:04.151655 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ovn-northd-0" podStartSLOduration=2.151636962 podStartE2EDuration="2.151636962s" podCreationTimestamp="2026-01-20 17:07:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:07:04.148137081 +0000 UTC m=+1517.908475048" watchObservedRunningTime="2026-01-20 17:07:04.151636962 +0000 UTC m=+1517.911974929" Jan 20 17:07:04 crc kubenswrapper[4558]: I0120 17:07:04.572540 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="82a17e5c-992c-4152-99c5-7f7d9b7e707f" path="/var/lib/kubelet/pods/82a17e5c-992c-4152-99c5-7f7d9b7e707f/volumes" Jan 20 17:07:05 crc kubenswrapper[4558]: I0120 17:07:05.133785 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:07:05 crc kubenswrapper[4558]: I0120 17:07:05.146784 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/fc023815-924a-4a04-bad2-5fc862ef20ed-etc-swift\") pod \"swift-storage-0\" (UID: \"fc023815-924a-4a04-bad2-5fc862ef20ed\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:07:05 crc kubenswrapper[4558]: E0120 17:07:05.146953 4558 projected.go:288] Couldn't get configMap openstack-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Jan 20 17:07:05 crc kubenswrapper[4558]: E0120 17:07:05.146969 4558 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack-kuttl-tests/swift-storage-0: configmap "swift-ring-files" not found Jan 20 17:07:05 crc kubenswrapper[4558]: E0120 17:07:05.147003 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/fc023815-924a-4a04-bad2-5fc862ef20ed-etc-swift podName:fc023815-924a-4a04-bad2-5fc862ef20ed nodeName:}" failed. No retries permitted until 2026-01-20 17:07:09.1469914 +0000 UTC m=+1522.907329368 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/fc023815-924a-4a04-bad2-5fc862ef20ed-etc-swift") pod "swift-storage-0" (UID: "fc023815-924a-4a04-bad2-5fc862ef20ed") : configmap "swift-ring-files" not found Jan 20 17:07:05 crc kubenswrapper[4558]: I0120 17:07:05.531817 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/root-account-create-update-pq9r8"] Jan 20 17:07:05 crc kubenswrapper[4558]: I0120 17:07:05.532894 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-pq9r8" Jan 20 17:07:05 crc kubenswrapper[4558]: I0120 17:07:05.534367 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"openstack-mariadb-root-db-secret" Jan 20 17:07:05 crc kubenswrapper[4558]: I0120 17:07:05.539390 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-pq9r8"] Jan 20 17:07:05 crc kubenswrapper[4558]: I0120 17:07:05.652787 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ddd4d7ed-8a4b-40de-88b9-b4d7e90e9f33-operator-scripts\") pod \"root-account-create-update-pq9r8\" (UID: \"ddd4d7ed-8a4b-40de-88b9-b4d7e90e9f33\") " pod="openstack-kuttl-tests/root-account-create-update-pq9r8" Jan 20 17:07:05 crc kubenswrapper[4558]: I0120 17:07:05.652826 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qnc5b\" (UniqueName: \"kubernetes.io/projected/ddd4d7ed-8a4b-40de-88b9-b4d7e90e9f33-kube-api-access-qnc5b\") pod \"root-account-create-update-pq9r8\" (UID: \"ddd4d7ed-8a4b-40de-88b9-b4d7e90e9f33\") " pod="openstack-kuttl-tests/root-account-create-update-pq9r8" Jan 20 17:07:05 crc kubenswrapper[4558]: I0120 17:07:05.755643 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ddd4d7ed-8a4b-40de-88b9-b4d7e90e9f33-operator-scripts\") pod \"root-account-create-update-pq9r8\" (UID: \"ddd4d7ed-8a4b-40de-88b9-b4d7e90e9f33\") " pod="openstack-kuttl-tests/root-account-create-update-pq9r8" Jan 20 17:07:05 crc kubenswrapper[4558]: I0120 17:07:05.756444 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qnc5b\" (UniqueName: \"kubernetes.io/projected/ddd4d7ed-8a4b-40de-88b9-b4d7e90e9f33-kube-api-access-qnc5b\") pod \"root-account-create-update-pq9r8\" (UID: \"ddd4d7ed-8a4b-40de-88b9-b4d7e90e9f33\") " pod="openstack-kuttl-tests/root-account-create-update-pq9r8" Jan 20 17:07:05 crc kubenswrapper[4558]: I0120 17:07:05.756858 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ddd4d7ed-8a4b-40de-88b9-b4d7e90e9f33-operator-scripts\") pod \"root-account-create-update-pq9r8\" (UID: \"ddd4d7ed-8a4b-40de-88b9-b4d7e90e9f33\") " pod="openstack-kuttl-tests/root-account-create-update-pq9r8" Jan 20 17:07:05 crc kubenswrapper[4558]: I0120 17:07:05.780685 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qnc5b\" (UniqueName: \"kubernetes.io/projected/ddd4d7ed-8a4b-40de-88b9-b4d7e90e9f33-kube-api-access-qnc5b\") pod \"root-account-create-update-pq9r8\" (UID: \"ddd4d7ed-8a4b-40de-88b9-b4d7e90e9f33\") " pod="openstack-kuttl-tests/root-account-create-update-pq9r8" Jan 20 17:07:05 crc kubenswrapper[4558]: I0120 17:07:05.850648 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-pq9r8" Jan 20 17:07:06 crc kubenswrapper[4558]: I0120 17:07:06.214009 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-pq9r8"] Jan 20 17:07:06 crc kubenswrapper[4558]: W0120 17:07:06.217995 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podddd4d7ed_8a4b_40de_88b9_b4d7e90e9f33.slice/crio-9a0301ddea2b6f7f0532c78dbc7c438f6e1070ae8bd613cd17b6e3598dc06f40 WatchSource:0}: Error finding container 9a0301ddea2b6f7f0532c78dbc7c438f6e1070ae8bd613cd17b6e3598dc06f40: Status 404 returned error can't find the container with id 9a0301ddea2b6f7f0532c78dbc7c438f6e1070ae8bd613cd17b6e3598dc06f40 Jan 20 17:07:06 crc kubenswrapper[4558]: E0120 17:07:06.568157 4558 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podddd4d7ed_8a4b_40de_88b9_b4d7e90e9f33.slice/crio-2401ca3f0a9fa22defdde8ce4f9940679e94f9352ac4844cdba2183eaa8ea52b.scope\": RecentStats: unable to find data in memory cache]" Jan 20 17:07:07 crc kubenswrapper[4558]: I0120 17:07:07.170025 4558 generic.go:334] "Generic (PLEG): container finished" podID="ddd4d7ed-8a4b-40de-88b9-b4d7e90e9f33" containerID="2401ca3f0a9fa22defdde8ce4f9940679e94f9352ac4844cdba2183eaa8ea52b" exitCode=0 Jan 20 17:07:07 crc kubenswrapper[4558]: I0120 17:07:07.170136 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/root-account-create-update-pq9r8" event={"ID":"ddd4d7ed-8a4b-40de-88b9-b4d7e90e9f33","Type":"ContainerDied","Data":"2401ca3f0a9fa22defdde8ce4f9940679e94f9352ac4844cdba2183eaa8ea52b"} Jan 20 17:07:07 crc kubenswrapper[4558]: I0120 17:07:07.170372 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/root-account-create-update-pq9r8" event={"ID":"ddd4d7ed-8a4b-40de-88b9-b4d7e90e9f33","Type":"ContainerStarted","Data":"9a0301ddea2b6f7f0532c78dbc7c438f6e1070ae8bd613cd17b6e3598dc06f40"} Jan 20 17:07:08 crc kubenswrapper[4558]: I0120 17:07:08.118433 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/keystone-db-create-mjhrj"] Jan 20 17:07:08 crc kubenswrapper[4558]: I0120 17:07:08.119624 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-db-create-mjhrj" Jan 20 17:07:08 crc kubenswrapper[4558]: I0120 17:07:08.126516 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-db-create-mjhrj"] Jan 20 17:07:08 crc kubenswrapper[4558]: I0120 17:07:08.182643 4558 generic.go:334] "Generic (PLEG): container finished" podID="07a478af-b5f6-4daa-8940-f40b9fb00e2f" containerID="6b1947119d80650089ad705d253d7d5004d6ff8894f34d16dafbb8da8a02235d" exitCode=0 Jan 20 17:07:08 crc kubenswrapper[4558]: I0120 17:07:08.182694 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-ring-rebalance-6hv7n" event={"ID":"07a478af-b5f6-4daa-8940-f40b9fb00e2f","Type":"ContainerDied","Data":"6b1947119d80650089ad705d253d7d5004d6ff8894f34d16dafbb8da8a02235d"} Jan 20 17:07:08 crc kubenswrapper[4558]: I0120 17:07:08.229474 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/keystone-c88a-account-create-update-vglhd"] Jan 20 17:07:08 crc kubenswrapper[4558]: I0120 17:07:08.230501 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-c88a-account-create-update-vglhd" Jan 20 17:07:08 crc kubenswrapper[4558]: I0120 17:07:08.231986 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-db-secret" Jan 20 17:07:08 crc kubenswrapper[4558]: I0120 17:07:08.247027 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-c88a-account-create-update-vglhd"] Jan 20 17:07:08 crc kubenswrapper[4558]: I0120 17:07:08.311873 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2ba3bb38-522d-444e-bfe4-510b9fa17227-operator-scripts\") pod \"keystone-db-create-mjhrj\" (UID: \"2ba3bb38-522d-444e-bfe4-510b9fa17227\") " pod="openstack-kuttl-tests/keystone-db-create-mjhrj" Jan 20 17:07:08 crc kubenswrapper[4558]: I0120 17:07:08.312190 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z9gf2\" (UniqueName: \"kubernetes.io/projected/2ba3bb38-522d-444e-bfe4-510b9fa17227-kube-api-access-z9gf2\") pod \"keystone-db-create-mjhrj\" (UID: \"2ba3bb38-522d-444e-bfe4-510b9fa17227\") " pod="openstack-kuttl-tests/keystone-db-create-mjhrj" Jan 20 17:07:08 crc kubenswrapper[4558]: I0120 17:07:08.413864 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z9gf2\" (UniqueName: \"kubernetes.io/projected/2ba3bb38-522d-444e-bfe4-510b9fa17227-kube-api-access-z9gf2\") pod \"keystone-db-create-mjhrj\" (UID: \"2ba3bb38-522d-444e-bfe4-510b9fa17227\") " pod="openstack-kuttl-tests/keystone-db-create-mjhrj" Jan 20 17:07:08 crc kubenswrapper[4558]: I0120 17:07:08.413978 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2ba3bb38-522d-444e-bfe4-510b9fa17227-operator-scripts\") pod \"keystone-db-create-mjhrj\" (UID: \"2ba3bb38-522d-444e-bfe4-510b9fa17227\") " pod="openstack-kuttl-tests/keystone-db-create-mjhrj" Jan 20 17:07:08 crc kubenswrapper[4558]: I0120 17:07:08.414015 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f6e1d86c-7bad-427d-8b89-f1df1213ccc5-operator-scripts\") pod \"keystone-c88a-account-create-update-vglhd\" (UID: \"f6e1d86c-7bad-427d-8b89-f1df1213ccc5\") " pod="openstack-kuttl-tests/keystone-c88a-account-create-update-vglhd" Jan 20 17:07:08 crc kubenswrapper[4558]: I0120 17:07:08.414060 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zb2hc\" (UniqueName: \"kubernetes.io/projected/f6e1d86c-7bad-427d-8b89-f1df1213ccc5-kube-api-access-zb2hc\") pod \"keystone-c88a-account-create-update-vglhd\" (UID: \"f6e1d86c-7bad-427d-8b89-f1df1213ccc5\") " pod="openstack-kuttl-tests/keystone-c88a-account-create-update-vglhd" Jan 20 17:07:08 crc kubenswrapper[4558]: I0120 17:07:08.415082 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2ba3bb38-522d-444e-bfe4-510b9fa17227-operator-scripts\") pod \"keystone-db-create-mjhrj\" (UID: \"2ba3bb38-522d-444e-bfe4-510b9fa17227\") " pod="openstack-kuttl-tests/keystone-db-create-mjhrj" Jan 20 17:07:08 crc kubenswrapper[4558]: I0120 17:07:08.434709 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z9gf2\" (UniqueName: \"kubernetes.io/projected/2ba3bb38-522d-444e-bfe4-510b9fa17227-kube-api-access-z9gf2\") pod \"keystone-db-create-mjhrj\" (UID: \"2ba3bb38-522d-444e-bfe4-510b9fa17227\") " pod="openstack-kuttl-tests/keystone-db-create-mjhrj" Jan 20 17:07:08 crc kubenswrapper[4558]: I0120 17:07:08.446212 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-db-create-mjhrj" Jan 20 17:07:08 crc kubenswrapper[4558]: I0120 17:07:08.450006 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/placement-db-create-mcpzb"] Jan 20 17:07:08 crc kubenswrapper[4558]: I0120 17:07:08.451104 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-db-create-mcpzb" Jan 20 17:07:08 crc kubenswrapper[4558]: I0120 17:07:08.461395 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-db-create-mcpzb"] Jan 20 17:07:08 crc kubenswrapper[4558]: I0120 17:07:08.504367 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-pq9r8" Jan 20 17:07:08 crc kubenswrapper[4558]: I0120 17:07:08.515420 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f6e1d86c-7bad-427d-8b89-f1df1213ccc5-operator-scripts\") pod \"keystone-c88a-account-create-update-vglhd\" (UID: \"f6e1d86c-7bad-427d-8b89-f1df1213ccc5\") " pod="openstack-kuttl-tests/keystone-c88a-account-create-update-vglhd" Jan 20 17:07:08 crc kubenswrapper[4558]: I0120 17:07:08.515478 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zb2hc\" (UniqueName: \"kubernetes.io/projected/f6e1d86c-7bad-427d-8b89-f1df1213ccc5-kube-api-access-zb2hc\") pod \"keystone-c88a-account-create-update-vglhd\" (UID: \"f6e1d86c-7bad-427d-8b89-f1df1213ccc5\") " pod="openstack-kuttl-tests/keystone-c88a-account-create-update-vglhd" Jan 20 17:07:08 crc kubenswrapper[4558]: I0120 17:07:08.516107 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f6e1d86c-7bad-427d-8b89-f1df1213ccc5-operator-scripts\") pod \"keystone-c88a-account-create-update-vglhd\" (UID: \"f6e1d86c-7bad-427d-8b89-f1df1213ccc5\") " pod="openstack-kuttl-tests/keystone-c88a-account-create-update-vglhd" Jan 20 17:07:08 crc kubenswrapper[4558]: I0120 17:07:08.532497 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zb2hc\" (UniqueName: \"kubernetes.io/projected/f6e1d86c-7bad-427d-8b89-f1df1213ccc5-kube-api-access-zb2hc\") pod \"keystone-c88a-account-create-update-vglhd\" (UID: \"f6e1d86c-7bad-427d-8b89-f1df1213ccc5\") " pod="openstack-kuttl-tests/keystone-c88a-account-create-update-vglhd" Jan 20 17:07:08 crc kubenswrapper[4558]: I0120 17:07:08.559081 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-c88a-account-create-update-vglhd" Jan 20 17:07:08 crc kubenswrapper[4558]: I0120 17:07:08.563046 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/placement-64d3-account-create-update-gx7gt"] Jan 20 17:07:08 crc kubenswrapper[4558]: E0120 17:07:08.563546 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ddd4d7ed-8a4b-40de-88b9-b4d7e90e9f33" containerName="mariadb-account-create-update" Jan 20 17:07:08 crc kubenswrapper[4558]: I0120 17:07:08.563567 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ddd4d7ed-8a4b-40de-88b9-b4d7e90e9f33" containerName="mariadb-account-create-update" Jan 20 17:07:08 crc kubenswrapper[4558]: I0120 17:07:08.563767 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="ddd4d7ed-8a4b-40de-88b9-b4d7e90e9f33" containerName="mariadb-account-create-update" Jan 20 17:07:08 crc kubenswrapper[4558]: I0120 17:07:08.564569 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-64d3-account-create-update-gx7gt" Jan 20 17:07:08 crc kubenswrapper[4558]: I0120 17:07:08.566084 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"placement-db-secret" Jan 20 17:07:08 crc kubenswrapper[4558]: I0120 17:07:08.598330 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-64d3-account-create-update-gx7gt"] Jan 20 17:07:08 crc kubenswrapper[4558]: I0120 17:07:08.617023 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qnc5b\" (UniqueName: \"kubernetes.io/projected/ddd4d7ed-8a4b-40de-88b9-b4d7e90e9f33-kube-api-access-qnc5b\") pod \"ddd4d7ed-8a4b-40de-88b9-b4d7e90e9f33\" (UID: \"ddd4d7ed-8a4b-40de-88b9-b4d7e90e9f33\") " Jan 20 17:07:08 crc kubenswrapper[4558]: I0120 17:07:08.617236 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ddd4d7ed-8a4b-40de-88b9-b4d7e90e9f33-operator-scripts\") pod \"ddd4d7ed-8a4b-40de-88b9-b4d7e90e9f33\" (UID: \"ddd4d7ed-8a4b-40de-88b9-b4d7e90e9f33\") " Jan 20 17:07:08 crc kubenswrapper[4558]: I0120 17:07:08.617660 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-98jg6\" (UniqueName: \"kubernetes.io/projected/69d7d5b9-efa3-4a24-88ca-e3f6932188d0-kube-api-access-98jg6\") pod \"placement-db-create-mcpzb\" (UID: \"69d7d5b9-efa3-4a24-88ca-e3f6932188d0\") " pod="openstack-kuttl-tests/placement-db-create-mcpzb" Jan 20 17:07:08 crc kubenswrapper[4558]: I0120 17:07:08.617745 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/69d7d5b9-efa3-4a24-88ca-e3f6932188d0-operator-scripts\") pod \"placement-db-create-mcpzb\" (UID: \"69d7d5b9-efa3-4a24-88ca-e3f6932188d0\") " pod="openstack-kuttl-tests/placement-db-create-mcpzb" Jan 20 17:07:08 crc kubenswrapper[4558]: I0120 17:07:08.618478 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ddd4d7ed-8a4b-40de-88b9-b4d7e90e9f33-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "ddd4d7ed-8a4b-40de-88b9-b4d7e90e9f33" (UID: "ddd4d7ed-8a4b-40de-88b9-b4d7e90e9f33"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:07:08 crc kubenswrapper[4558]: I0120 17:07:08.621286 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ddd4d7ed-8a4b-40de-88b9-b4d7e90e9f33-kube-api-access-qnc5b" (OuterVolumeSpecName: "kube-api-access-qnc5b") pod "ddd4d7ed-8a4b-40de-88b9-b4d7e90e9f33" (UID: "ddd4d7ed-8a4b-40de-88b9-b4d7e90e9f33"). InnerVolumeSpecName "kube-api-access-qnc5b". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:07:08 crc kubenswrapper[4558]: I0120 17:07:08.668014 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/glance-db-create-nq6nj"] Jan 20 17:07:08 crc kubenswrapper[4558]: I0120 17:07:08.669073 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-db-create-nq6nj" Jan 20 17:07:08 crc kubenswrapper[4558]: I0120 17:07:08.674793 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-db-create-nq6nj"] Jan 20 17:07:08 crc kubenswrapper[4558]: I0120 17:07:08.720126 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/69d7d5b9-efa3-4a24-88ca-e3f6932188d0-operator-scripts\") pod \"placement-db-create-mcpzb\" (UID: \"69d7d5b9-efa3-4a24-88ca-e3f6932188d0\") " pod="openstack-kuttl-tests/placement-db-create-mcpzb" Jan 20 17:07:08 crc kubenswrapper[4558]: I0120 17:07:08.720335 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/05bee7dd-6a89-4bbd-a097-2f2c66d038e7-operator-scripts\") pod \"placement-64d3-account-create-update-gx7gt\" (UID: \"05bee7dd-6a89-4bbd-a097-2f2c66d038e7\") " pod="openstack-kuttl-tests/placement-64d3-account-create-update-gx7gt" Jan 20 17:07:08 crc kubenswrapper[4558]: I0120 17:07:08.720365 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n5v28\" (UniqueName: \"kubernetes.io/projected/05bee7dd-6a89-4bbd-a097-2f2c66d038e7-kube-api-access-n5v28\") pod \"placement-64d3-account-create-update-gx7gt\" (UID: \"05bee7dd-6a89-4bbd-a097-2f2c66d038e7\") " pod="openstack-kuttl-tests/placement-64d3-account-create-update-gx7gt" Jan 20 17:07:08 crc kubenswrapper[4558]: I0120 17:07:08.720397 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-98jg6\" (UniqueName: \"kubernetes.io/projected/69d7d5b9-efa3-4a24-88ca-e3f6932188d0-kube-api-access-98jg6\") pod \"placement-db-create-mcpzb\" (UID: \"69d7d5b9-efa3-4a24-88ca-e3f6932188d0\") " pod="openstack-kuttl-tests/placement-db-create-mcpzb" Jan 20 17:07:08 crc kubenswrapper[4558]: I0120 17:07:08.720734 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/69d7d5b9-efa3-4a24-88ca-e3f6932188d0-operator-scripts\") pod \"placement-db-create-mcpzb\" (UID: \"69d7d5b9-efa3-4a24-88ca-e3f6932188d0\") " pod="openstack-kuttl-tests/placement-db-create-mcpzb" Jan 20 17:07:08 crc kubenswrapper[4558]: I0120 17:07:08.720981 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qnc5b\" (UniqueName: \"kubernetes.io/projected/ddd4d7ed-8a4b-40de-88b9-b4d7e90e9f33-kube-api-access-qnc5b\") on node \"crc\" DevicePath \"\"" Jan 20 17:07:08 crc kubenswrapper[4558]: I0120 17:07:08.721005 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ddd4d7ed-8a4b-40de-88b9-b4d7e90e9f33-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:07:08 crc kubenswrapper[4558]: I0120 17:07:08.734667 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-98jg6\" (UniqueName: \"kubernetes.io/projected/69d7d5b9-efa3-4a24-88ca-e3f6932188d0-kube-api-access-98jg6\") pod \"placement-db-create-mcpzb\" (UID: \"69d7d5b9-efa3-4a24-88ca-e3f6932188d0\") " pod="openstack-kuttl-tests/placement-db-create-mcpzb" Jan 20 17:07:08 crc kubenswrapper[4558]: I0120 17:07:08.767641 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/glance-cba5-account-create-update-wsmw9"] Jan 20 17:07:08 crc kubenswrapper[4558]: I0120 17:07:08.769263 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-cba5-account-create-update-wsmw9" Jan 20 17:07:08 crc kubenswrapper[4558]: I0120 17:07:08.771713 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-db-secret" Jan 20 17:07:08 crc kubenswrapper[4558]: I0120 17:07:08.777575 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-cba5-account-create-update-wsmw9"] Jan 20 17:07:08 crc kubenswrapper[4558]: I0120 17:07:08.818120 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-db-create-mcpzb" Jan 20 17:07:08 crc kubenswrapper[4558]: I0120 17:07:08.822535 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/81c02319-4baf-44d1-b0b2-ae946049fb60-operator-scripts\") pod \"glance-db-create-nq6nj\" (UID: \"81c02319-4baf-44d1-b0b2-ae946049fb60\") " pod="openstack-kuttl-tests/glance-db-create-nq6nj" Jan 20 17:07:08 crc kubenswrapper[4558]: I0120 17:07:08.822611 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/05bee7dd-6a89-4bbd-a097-2f2c66d038e7-operator-scripts\") pod \"placement-64d3-account-create-update-gx7gt\" (UID: \"05bee7dd-6a89-4bbd-a097-2f2c66d038e7\") " pod="openstack-kuttl-tests/placement-64d3-account-create-update-gx7gt" Jan 20 17:07:08 crc kubenswrapper[4558]: I0120 17:07:08.822675 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n5v28\" (UniqueName: \"kubernetes.io/projected/05bee7dd-6a89-4bbd-a097-2f2c66d038e7-kube-api-access-n5v28\") pod \"placement-64d3-account-create-update-gx7gt\" (UID: \"05bee7dd-6a89-4bbd-a097-2f2c66d038e7\") " pod="openstack-kuttl-tests/placement-64d3-account-create-update-gx7gt" Jan 20 17:07:08 crc kubenswrapper[4558]: I0120 17:07:08.822749 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bfd5m\" (UniqueName: \"kubernetes.io/projected/81c02319-4baf-44d1-b0b2-ae946049fb60-kube-api-access-bfd5m\") pod \"glance-db-create-nq6nj\" (UID: \"81c02319-4baf-44d1-b0b2-ae946049fb60\") " pod="openstack-kuttl-tests/glance-db-create-nq6nj" Jan 20 17:07:08 crc kubenswrapper[4558]: I0120 17:07:08.823334 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/05bee7dd-6a89-4bbd-a097-2f2c66d038e7-operator-scripts\") pod \"placement-64d3-account-create-update-gx7gt\" (UID: \"05bee7dd-6a89-4bbd-a097-2f2c66d038e7\") " pod="openstack-kuttl-tests/placement-64d3-account-create-update-gx7gt" Jan 20 17:07:08 crc kubenswrapper[4558]: I0120 17:07:08.837421 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n5v28\" (UniqueName: \"kubernetes.io/projected/05bee7dd-6a89-4bbd-a097-2f2c66d038e7-kube-api-access-n5v28\") pod \"placement-64d3-account-create-update-gx7gt\" (UID: \"05bee7dd-6a89-4bbd-a097-2f2c66d038e7\") " pod="openstack-kuttl-tests/placement-64d3-account-create-update-gx7gt" Jan 20 17:07:08 crc kubenswrapper[4558]: I0120 17:07:08.855908 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-db-create-mjhrj"] Jan 20 17:07:08 crc kubenswrapper[4558]: I0120 17:07:08.904375 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-64d3-account-create-update-gx7gt" Jan 20 17:07:08 crc kubenswrapper[4558]: I0120 17:07:08.923995 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/81c02319-4baf-44d1-b0b2-ae946049fb60-operator-scripts\") pod \"glance-db-create-nq6nj\" (UID: \"81c02319-4baf-44d1-b0b2-ae946049fb60\") " pod="openstack-kuttl-tests/glance-db-create-nq6nj" Jan 20 17:07:08 crc kubenswrapper[4558]: I0120 17:07:08.924072 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7fadd286-85f0-45eb-a8fc-c408074c178c-operator-scripts\") pod \"glance-cba5-account-create-update-wsmw9\" (UID: \"7fadd286-85f0-45eb-a8fc-c408074c178c\") " pod="openstack-kuttl-tests/glance-cba5-account-create-update-wsmw9" Jan 20 17:07:08 crc kubenswrapper[4558]: I0120 17:07:08.924200 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dnc9x\" (UniqueName: \"kubernetes.io/projected/7fadd286-85f0-45eb-a8fc-c408074c178c-kube-api-access-dnc9x\") pod \"glance-cba5-account-create-update-wsmw9\" (UID: \"7fadd286-85f0-45eb-a8fc-c408074c178c\") " pod="openstack-kuttl-tests/glance-cba5-account-create-update-wsmw9" Jan 20 17:07:08 crc kubenswrapper[4558]: I0120 17:07:08.924238 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bfd5m\" (UniqueName: \"kubernetes.io/projected/81c02319-4baf-44d1-b0b2-ae946049fb60-kube-api-access-bfd5m\") pod \"glance-db-create-nq6nj\" (UID: \"81c02319-4baf-44d1-b0b2-ae946049fb60\") " pod="openstack-kuttl-tests/glance-db-create-nq6nj" Jan 20 17:07:08 crc kubenswrapper[4558]: I0120 17:07:08.926443 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/81c02319-4baf-44d1-b0b2-ae946049fb60-operator-scripts\") pod \"glance-db-create-nq6nj\" (UID: \"81c02319-4baf-44d1-b0b2-ae946049fb60\") " pod="openstack-kuttl-tests/glance-db-create-nq6nj" Jan 20 17:07:08 crc kubenswrapper[4558]: I0120 17:07:08.939942 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bfd5m\" (UniqueName: \"kubernetes.io/projected/81c02319-4baf-44d1-b0b2-ae946049fb60-kube-api-access-bfd5m\") pod \"glance-db-create-nq6nj\" (UID: \"81c02319-4baf-44d1-b0b2-ae946049fb60\") " pod="openstack-kuttl-tests/glance-db-create-nq6nj" Jan 20 17:07:08 crc kubenswrapper[4558]: I0120 17:07:08.974103 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-c88a-account-create-update-vglhd"] Jan 20 17:07:08 crc kubenswrapper[4558]: I0120 17:07:08.984711 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-db-create-nq6nj" Jan 20 17:07:09 crc kubenswrapper[4558]: I0120 17:07:09.025522 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dnc9x\" (UniqueName: \"kubernetes.io/projected/7fadd286-85f0-45eb-a8fc-c408074c178c-kube-api-access-dnc9x\") pod \"glance-cba5-account-create-update-wsmw9\" (UID: \"7fadd286-85f0-45eb-a8fc-c408074c178c\") " pod="openstack-kuttl-tests/glance-cba5-account-create-update-wsmw9" Jan 20 17:07:09 crc kubenswrapper[4558]: I0120 17:07:09.025730 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7fadd286-85f0-45eb-a8fc-c408074c178c-operator-scripts\") pod \"glance-cba5-account-create-update-wsmw9\" (UID: \"7fadd286-85f0-45eb-a8fc-c408074c178c\") " pod="openstack-kuttl-tests/glance-cba5-account-create-update-wsmw9" Jan 20 17:07:09 crc kubenswrapper[4558]: I0120 17:07:09.026499 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7fadd286-85f0-45eb-a8fc-c408074c178c-operator-scripts\") pod \"glance-cba5-account-create-update-wsmw9\" (UID: \"7fadd286-85f0-45eb-a8fc-c408074c178c\") " pod="openstack-kuttl-tests/glance-cba5-account-create-update-wsmw9" Jan 20 17:07:09 crc kubenswrapper[4558]: I0120 17:07:09.041611 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dnc9x\" (UniqueName: \"kubernetes.io/projected/7fadd286-85f0-45eb-a8fc-c408074c178c-kube-api-access-dnc9x\") pod \"glance-cba5-account-create-update-wsmw9\" (UID: \"7fadd286-85f0-45eb-a8fc-c408074c178c\") " pod="openstack-kuttl-tests/glance-cba5-account-create-update-wsmw9" Jan 20 17:07:09 crc kubenswrapper[4558]: I0120 17:07:09.086468 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-cba5-account-create-update-wsmw9" Jan 20 17:07:09 crc kubenswrapper[4558]: I0120 17:07:09.192597 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/root-account-create-update-pq9r8" event={"ID":"ddd4d7ed-8a4b-40de-88b9-b4d7e90e9f33","Type":"ContainerDied","Data":"9a0301ddea2b6f7f0532c78dbc7c438f6e1070ae8bd613cd17b6e3598dc06f40"} Jan 20 17:07:09 crc kubenswrapper[4558]: I0120 17:07:09.192628 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-pq9r8" Jan 20 17:07:09 crc kubenswrapper[4558]: I0120 17:07:09.192639 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9a0301ddea2b6f7f0532c78dbc7c438f6e1070ae8bd613cd17b6e3598dc06f40" Jan 20 17:07:09 crc kubenswrapper[4558]: I0120 17:07:09.205201 4558 generic.go:334] "Generic (PLEG): container finished" podID="2ba3bb38-522d-444e-bfe4-510b9fa17227" containerID="16d31d54d596530bd1cc93b119b9b9cc979a47d6a7dd2c11504f7b06e4e859d5" exitCode=0 Jan 20 17:07:09 crc kubenswrapper[4558]: I0120 17:07:09.205277 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-db-create-mjhrj" event={"ID":"2ba3bb38-522d-444e-bfe4-510b9fa17227","Type":"ContainerDied","Data":"16d31d54d596530bd1cc93b119b9b9cc979a47d6a7dd2c11504f7b06e4e859d5"} Jan 20 17:07:09 crc kubenswrapper[4558]: I0120 17:07:09.205307 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-db-create-mjhrj" event={"ID":"2ba3bb38-522d-444e-bfe4-510b9fa17227","Type":"ContainerStarted","Data":"da99040c19b874398cc49166ea1dd8329b5a2bc79db6a1008663b6c933d31a77"} Jan 20 17:07:09 crc kubenswrapper[4558]: I0120 17:07:09.207459 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-c88a-account-create-update-vglhd" event={"ID":"f6e1d86c-7bad-427d-8b89-f1df1213ccc5","Type":"ContainerStarted","Data":"29cc24c38e89d551c6feb9ca7e8c3ccf2dba9bb46a4cf839f1e0d5911675a5fb"} Jan 20 17:07:09 crc kubenswrapper[4558]: I0120 17:07:09.207486 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-c88a-account-create-update-vglhd" event={"ID":"f6e1d86c-7bad-427d-8b89-f1df1213ccc5","Type":"ContainerStarted","Data":"a37b192c775c0d31c9594dd83a91686446a94c17a84ecdf6c5a9851cb76caa8b"} Jan 20 17:07:09 crc kubenswrapper[4558]: I0120 17:07:09.229401 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/fc023815-924a-4a04-bad2-5fc862ef20ed-etc-swift\") pod \"swift-storage-0\" (UID: \"fc023815-924a-4a04-bad2-5fc862ef20ed\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:07:09 crc kubenswrapper[4558]: I0120 17:07:09.234182 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/fc023815-924a-4a04-bad2-5fc862ef20ed-etc-swift\") pod \"swift-storage-0\" (UID: \"fc023815-924a-4a04-bad2-5fc862ef20ed\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:07:09 crc kubenswrapper[4558]: I0120 17:07:09.234762 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-db-create-mcpzb"] Jan 20 17:07:09 crc kubenswrapper[4558]: I0120 17:07:09.254238 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/keystone-c88a-account-create-update-vglhd" podStartSLOduration=1.254216529 podStartE2EDuration="1.254216529s" podCreationTimestamp="2026-01-20 17:07:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:07:09.245774956 +0000 UTC m=+1523.006112923" watchObservedRunningTime="2026-01-20 17:07:09.254216529 +0000 UTC m=+1523.014554495" Jan 20 17:07:09 crc kubenswrapper[4558]: I0120 17:07:09.327520 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-64d3-account-create-update-gx7gt"] Jan 20 17:07:09 crc kubenswrapper[4558]: I0120 17:07:09.417150 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-db-create-nq6nj"] Jan 20 17:07:09 crc kubenswrapper[4558]: W0120 17:07:09.421730 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod05bee7dd_6a89_4bbd_a097_2f2c66d038e7.slice/crio-78069bdb88957b9aedc41f5be24256aca1636c2b32205fdc96bebaf580c4dd32 WatchSource:0}: Error finding container 78069bdb88957b9aedc41f5be24256aca1636c2b32205fdc96bebaf580c4dd32: Status 404 returned error can't find the container with id 78069bdb88957b9aedc41f5be24256aca1636c2b32205fdc96bebaf580c4dd32 Jan 20 17:07:09 crc kubenswrapper[4558]: I0120 17:07:09.513419 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:07:09 crc kubenswrapper[4558]: I0120 17:07:09.562625 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-cba5-account-create-update-wsmw9"] Jan 20 17:07:09 crc kubenswrapper[4558]: I0120 17:07:09.566621 4558 scope.go:117] "RemoveContainer" containerID="a711a286282347590079d2447ef37fa9ed0f11085d6d7a65f85e65c1c2df608f" Jan 20 17:07:09 crc kubenswrapper[4558]: E0120 17:07:09.566848 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 17:07:09 crc kubenswrapper[4558]: W0120 17:07:09.642106 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7fadd286_85f0_45eb_a8fc_c408074c178c.slice/crio-7fc76b8c42aa9c1ce65bf99f1f0ec76f7160bef03acd22cdfb9524fcfc82ba25 WatchSource:0}: Error finding container 7fc76b8c42aa9c1ce65bf99f1f0ec76f7160bef03acd22cdfb9524fcfc82ba25: Status 404 returned error can't find the container with id 7fc76b8c42aa9c1ce65bf99f1f0ec76f7160bef03acd22cdfb9524fcfc82ba25 Jan 20 17:07:09 crc kubenswrapper[4558]: I0120 17:07:09.644566 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-ring-rebalance-6hv7n" Jan 20 17:07:09 crc kubenswrapper[4558]: I0120 17:07:09.847685 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/07a478af-b5f6-4daa-8940-f40b9fb00e2f-combined-ca-bundle\") pod \"07a478af-b5f6-4daa-8940-f40b9fb00e2f\" (UID: \"07a478af-b5f6-4daa-8940-f40b9fb00e2f\") " Jan 20 17:07:09 crc kubenswrapper[4558]: I0120 17:07:09.847738 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/07a478af-b5f6-4daa-8940-f40b9fb00e2f-dispersionconf\") pod \"07a478af-b5f6-4daa-8940-f40b9fb00e2f\" (UID: \"07a478af-b5f6-4daa-8940-f40b9fb00e2f\") " Jan 20 17:07:09 crc kubenswrapper[4558]: I0120 17:07:09.847761 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/07a478af-b5f6-4daa-8940-f40b9fb00e2f-etc-swift\") pod \"07a478af-b5f6-4daa-8940-f40b9fb00e2f\" (UID: \"07a478af-b5f6-4daa-8940-f40b9fb00e2f\") " Jan 20 17:07:09 crc kubenswrapper[4558]: I0120 17:07:09.847937 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/07a478af-b5f6-4daa-8940-f40b9fb00e2f-ring-data-devices\") pod \"07a478af-b5f6-4daa-8940-f40b9fb00e2f\" (UID: \"07a478af-b5f6-4daa-8940-f40b9fb00e2f\") " Jan 20 17:07:09 crc kubenswrapper[4558]: I0120 17:07:09.847964 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/07a478af-b5f6-4daa-8940-f40b9fb00e2f-swiftconf\") pod \"07a478af-b5f6-4daa-8940-f40b9fb00e2f\" (UID: \"07a478af-b5f6-4daa-8940-f40b9fb00e2f\") " Jan 20 17:07:09 crc kubenswrapper[4558]: I0120 17:07:09.847988 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nl9zr\" (UniqueName: \"kubernetes.io/projected/07a478af-b5f6-4daa-8940-f40b9fb00e2f-kube-api-access-nl9zr\") pod \"07a478af-b5f6-4daa-8940-f40b9fb00e2f\" (UID: \"07a478af-b5f6-4daa-8940-f40b9fb00e2f\") " Jan 20 17:07:09 crc kubenswrapper[4558]: I0120 17:07:09.848007 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/07a478af-b5f6-4daa-8940-f40b9fb00e2f-scripts\") pod \"07a478af-b5f6-4daa-8940-f40b9fb00e2f\" (UID: \"07a478af-b5f6-4daa-8940-f40b9fb00e2f\") " Jan 20 17:07:09 crc kubenswrapper[4558]: I0120 17:07:09.848615 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/07a478af-b5f6-4daa-8940-f40b9fb00e2f-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "07a478af-b5f6-4daa-8940-f40b9fb00e2f" (UID: "07a478af-b5f6-4daa-8940-f40b9fb00e2f"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:07:09 crc kubenswrapper[4558]: I0120 17:07:09.849401 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/07a478af-b5f6-4daa-8940-f40b9fb00e2f-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "07a478af-b5f6-4daa-8940-f40b9fb00e2f" (UID: "07a478af-b5f6-4daa-8940-f40b9fb00e2f"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:07:09 crc kubenswrapper[4558]: I0120 17:07:09.853325 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/07a478af-b5f6-4daa-8940-f40b9fb00e2f-kube-api-access-nl9zr" (OuterVolumeSpecName: "kube-api-access-nl9zr") pod "07a478af-b5f6-4daa-8940-f40b9fb00e2f" (UID: "07a478af-b5f6-4daa-8940-f40b9fb00e2f"). InnerVolumeSpecName "kube-api-access-nl9zr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:07:09 crc kubenswrapper[4558]: I0120 17:07:09.855666 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/07a478af-b5f6-4daa-8940-f40b9fb00e2f-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "07a478af-b5f6-4daa-8940-f40b9fb00e2f" (UID: "07a478af-b5f6-4daa-8940-f40b9fb00e2f"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:07:09 crc kubenswrapper[4558]: I0120 17:07:09.866716 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/07a478af-b5f6-4daa-8940-f40b9fb00e2f-scripts" (OuterVolumeSpecName: "scripts") pod "07a478af-b5f6-4daa-8940-f40b9fb00e2f" (UID: "07a478af-b5f6-4daa-8940-f40b9fb00e2f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:07:09 crc kubenswrapper[4558]: I0120 17:07:09.868025 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/07a478af-b5f6-4daa-8940-f40b9fb00e2f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "07a478af-b5f6-4daa-8940-f40b9fb00e2f" (UID: "07a478af-b5f6-4daa-8940-f40b9fb00e2f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:07:09 crc kubenswrapper[4558]: I0120 17:07:09.868943 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/07a478af-b5f6-4daa-8940-f40b9fb00e2f-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "07a478af-b5f6-4daa-8940-f40b9fb00e2f" (UID: "07a478af-b5f6-4daa-8940-f40b9fb00e2f"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:07:09 crc kubenswrapper[4558]: I0120 17:07:09.907972 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/swift-storage-0"] Jan 20 17:07:09 crc kubenswrapper[4558]: I0120 17:07:09.951382 4558 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/07a478af-b5f6-4daa-8940-f40b9fb00e2f-ring-data-devices\") on node \"crc\" DevicePath \"\"" Jan 20 17:07:09 crc kubenswrapper[4558]: I0120 17:07:09.951421 4558 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/07a478af-b5f6-4daa-8940-f40b9fb00e2f-swiftconf\") on node \"crc\" DevicePath \"\"" Jan 20 17:07:09 crc kubenswrapper[4558]: I0120 17:07:09.951435 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nl9zr\" (UniqueName: \"kubernetes.io/projected/07a478af-b5f6-4daa-8940-f40b9fb00e2f-kube-api-access-nl9zr\") on node \"crc\" DevicePath \"\"" Jan 20 17:07:09 crc kubenswrapper[4558]: I0120 17:07:09.951446 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/07a478af-b5f6-4daa-8940-f40b9fb00e2f-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:07:09 crc kubenswrapper[4558]: I0120 17:07:09.951460 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/07a478af-b5f6-4daa-8940-f40b9fb00e2f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:07:09 crc kubenswrapper[4558]: I0120 17:07:09.951468 4558 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/07a478af-b5f6-4daa-8940-f40b9fb00e2f-dispersionconf\") on node \"crc\" DevicePath \"\"" Jan 20 17:07:09 crc kubenswrapper[4558]: I0120 17:07:09.951477 4558 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/07a478af-b5f6-4daa-8940-f40b9fb00e2f-etc-swift\") on node \"crc\" DevicePath \"\"" Jan 20 17:07:10 crc kubenswrapper[4558]: W0120 17:07:10.012935 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfc023815_924a_4a04_bad2_5fc862ef20ed.slice/crio-c1f8a2c586c1976b6cb7f85d1a6a5b6b148309e4fecdd10861bb4a6fb7dea098 WatchSource:0}: Error finding container c1f8a2c586c1976b6cb7f85d1a6a5b6b148309e4fecdd10861bb4a6fb7dea098: Status 404 returned error can't find the container with id c1f8a2c586c1976b6cb7f85d1a6a5b6b148309e4fecdd10861bb4a6fb7dea098 Jan 20 17:07:10 crc kubenswrapper[4558]: I0120 17:07:10.221847 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-ring-rebalance-6hv7n" event={"ID":"07a478af-b5f6-4daa-8940-f40b9fb00e2f","Type":"ContainerDied","Data":"2255d0ce22a5af41c9467f6b07e1375f8b33217a2e8fb25daa7754509171c7b1"} Jan 20 17:07:10 crc kubenswrapper[4558]: I0120 17:07:10.222227 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2255d0ce22a5af41c9467f6b07e1375f8b33217a2e8fb25daa7754509171c7b1" Jan 20 17:07:10 crc kubenswrapper[4558]: I0120 17:07:10.222102 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-ring-rebalance-6hv7n" Jan 20 17:07:10 crc kubenswrapper[4558]: I0120 17:07:10.224322 4558 generic.go:334] "Generic (PLEG): container finished" podID="69d7d5b9-efa3-4a24-88ca-e3f6932188d0" containerID="32427dea9b72d979ea2f99684179f56d767ac269c899e70e736d6c6ed049f52c" exitCode=0 Jan 20 17:07:10 crc kubenswrapper[4558]: I0120 17:07:10.224389 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-db-create-mcpzb" event={"ID":"69d7d5b9-efa3-4a24-88ca-e3f6932188d0","Type":"ContainerDied","Data":"32427dea9b72d979ea2f99684179f56d767ac269c899e70e736d6c6ed049f52c"} Jan 20 17:07:10 crc kubenswrapper[4558]: I0120 17:07:10.224418 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-db-create-mcpzb" event={"ID":"69d7d5b9-efa3-4a24-88ca-e3f6932188d0","Type":"ContainerStarted","Data":"e21e10f069e962c325da7a7cdf95ed527fde92669ec032cc883865f397ecdc2f"} Jan 20 17:07:10 crc kubenswrapper[4558]: I0120 17:07:10.225618 4558 generic.go:334] "Generic (PLEG): container finished" podID="7fadd286-85f0-45eb-a8fc-c408074c178c" containerID="b5598fdef3344f2fd0700ea5b28128a663e40fc58f479d51a5d3dd26322c1fa5" exitCode=0 Jan 20 17:07:10 crc kubenswrapper[4558]: I0120 17:07:10.225718 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-cba5-account-create-update-wsmw9" event={"ID":"7fadd286-85f0-45eb-a8fc-c408074c178c","Type":"ContainerDied","Data":"b5598fdef3344f2fd0700ea5b28128a663e40fc58f479d51a5d3dd26322c1fa5"} Jan 20 17:07:10 crc kubenswrapper[4558]: I0120 17:07:10.225740 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-cba5-account-create-update-wsmw9" event={"ID":"7fadd286-85f0-45eb-a8fc-c408074c178c","Type":"ContainerStarted","Data":"7fc76b8c42aa9c1ce65bf99f1f0ec76f7160bef03acd22cdfb9524fcfc82ba25"} Jan 20 17:07:10 crc kubenswrapper[4558]: I0120 17:07:10.236156 4558 generic.go:334] "Generic (PLEG): container finished" podID="f6e1d86c-7bad-427d-8b89-f1df1213ccc5" containerID="29cc24c38e89d551c6feb9ca7e8c3ccf2dba9bb46a4cf839f1e0d5911675a5fb" exitCode=0 Jan 20 17:07:10 crc kubenswrapper[4558]: I0120 17:07:10.236264 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-c88a-account-create-update-vglhd" event={"ID":"f6e1d86c-7bad-427d-8b89-f1df1213ccc5","Type":"ContainerDied","Data":"29cc24c38e89d551c6feb9ca7e8c3ccf2dba9bb46a4cf839f1e0d5911675a5fb"} Jan 20 17:07:10 crc kubenswrapper[4558]: I0120 17:07:10.238467 4558 generic.go:334] "Generic (PLEG): container finished" podID="05bee7dd-6a89-4bbd-a097-2f2c66d038e7" containerID="6d2fb7f79a476206ea9a340648fc10811998e0e677155a9b0fcf13275927b0db" exitCode=0 Jan 20 17:07:10 crc kubenswrapper[4558]: I0120 17:07:10.238587 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-64d3-account-create-update-gx7gt" event={"ID":"05bee7dd-6a89-4bbd-a097-2f2c66d038e7","Type":"ContainerDied","Data":"6d2fb7f79a476206ea9a340648fc10811998e0e677155a9b0fcf13275927b0db"} Jan 20 17:07:10 crc kubenswrapper[4558]: I0120 17:07:10.238615 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-64d3-account-create-update-gx7gt" event={"ID":"05bee7dd-6a89-4bbd-a097-2f2c66d038e7","Type":"ContainerStarted","Data":"78069bdb88957b9aedc41f5be24256aca1636c2b32205fdc96bebaf580c4dd32"} Jan 20 17:07:10 crc kubenswrapper[4558]: I0120 17:07:10.244269 4558 generic.go:334] "Generic (PLEG): container finished" podID="81c02319-4baf-44d1-b0b2-ae946049fb60" containerID="59a7305eb2948167d417b047936332c60a3a27b73ade2362490b6ee4221588fc" exitCode=0 Jan 20 17:07:10 crc kubenswrapper[4558]: I0120 17:07:10.244364 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-db-create-nq6nj" event={"ID":"81c02319-4baf-44d1-b0b2-ae946049fb60","Type":"ContainerDied","Data":"59a7305eb2948167d417b047936332c60a3a27b73ade2362490b6ee4221588fc"} Jan 20 17:07:10 crc kubenswrapper[4558]: I0120 17:07:10.244400 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-db-create-nq6nj" event={"ID":"81c02319-4baf-44d1-b0b2-ae946049fb60","Type":"ContainerStarted","Data":"19090cd5de59728b510750a0f11a5f23cc2b26f186ad468b041b109919e960e5"} Jan 20 17:07:10 crc kubenswrapper[4558]: I0120 17:07:10.249857 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"fc023815-924a-4a04-bad2-5fc862ef20ed","Type":"ContainerStarted","Data":"a8fa945168b61b5421cd9a0b28d23083787cacb8489bf80416f7599e933bb729"} Jan 20 17:07:10 crc kubenswrapper[4558]: I0120 17:07:10.250197 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"fc023815-924a-4a04-bad2-5fc862ef20ed","Type":"ContainerStarted","Data":"c1f8a2c586c1976b6cb7f85d1a6a5b6b148309e4fecdd10861bb4a6fb7dea098"} Jan 20 17:07:10 crc kubenswrapper[4558]: I0120 17:07:10.579945 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-db-create-mjhrj" Jan 20 17:07:10 crc kubenswrapper[4558]: I0120 17:07:10.772235 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2ba3bb38-522d-444e-bfe4-510b9fa17227-operator-scripts\") pod \"2ba3bb38-522d-444e-bfe4-510b9fa17227\" (UID: \"2ba3bb38-522d-444e-bfe4-510b9fa17227\") " Jan 20 17:07:10 crc kubenswrapper[4558]: I0120 17:07:10.773155 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2ba3bb38-522d-444e-bfe4-510b9fa17227-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "2ba3bb38-522d-444e-bfe4-510b9fa17227" (UID: "2ba3bb38-522d-444e-bfe4-510b9fa17227"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:07:10 crc kubenswrapper[4558]: I0120 17:07:10.774526 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z9gf2\" (UniqueName: \"kubernetes.io/projected/2ba3bb38-522d-444e-bfe4-510b9fa17227-kube-api-access-z9gf2\") pod \"2ba3bb38-522d-444e-bfe4-510b9fa17227\" (UID: \"2ba3bb38-522d-444e-bfe4-510b9fa17227\") " Jan 20 17:07:10 crc kubenswrapper[4558]: I0120 17:07:10.775797 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2ba3bb38-522d-444e-bfe4-510b9fa17227-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:07:10 crc kubenswrapper[4558]: I0120 17:07:10.785533 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2ba3bb38-522d-444e-bfe4-510b9fa17227-kube-api-access-z9gf2" (OuterVolumeSpecName: "kube-api-access-z9gf2") pod "2ba3bb38-522d-444e-bfe4-510b9fa17227" (UID: "2ba3bb38-522d-444e-bfe4-510b9fa17227"). InnerVolumeSpecName "kube-api-access-z9gf2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:07:10 crc kubenswrapper[4558]: I0120 17:07:10.877055 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z9gf2\" (UniqueName: \"kubernetes.io/projected/2ba3bb38-522d-444e-bfe4-510b9fa17227-kube-api-access-z9gf2\") on node \"crc\" DevicePath \"\"" Jan 20 17:07:11 crc kubenswrapper[4558]: I0120 17:07:11.260752 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-db-create-mjhrj" Jan 20 17:07:11 crc kubenswrapper[4558]: I0120 17:07:11.260771 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-db-create-mjhrj" event={"ID":"2ba3bb38-522d-444e-bfe4-510b9fa17227","Type":"ContainerDied","Data":"da99040c19b874398cc49166ea1dd8329b5a2bc79db6a1008663b6c933d31a77"} Jan 20 17:07:11 crc kubenswrapper[4558]: I0120 17:07:11.260832 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="da99040c19b874398cc49166ea1dd8329b5a2bc79db6a1008663b6c933d31a77" Jan 20 17:07:11 crc kubenswrapper[4558]: I0120 17:07:11.265879 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"fc023815-924a-4a04-bad2-5fc862ef20ed","Type":"ContainerStarted","Data":"e39b88b3c77bb6d2be10abb480b241402fde95d8a3a44afc3741d81cd38c9f4f"} Jan 20 17:07:11 crc kubenswrapper[4558]: I0120 17:07:11.265931 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"fc023815-924a-4a04-bad2-5fc862ef20ed","Type":"ContainerStarted","Data":"3517eea5a519d4ed5dbc9c6b97d0c39a1746f6138a5b79baf3fd8a2d54e4293f"} Jan 20 17:07:11 crc kubenswrapper[4558]: I0120 17:07:11.265947 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"fc023815-924a-4a04-bad2-5fc862ef20ed","Type":"ContainerStarted","Data":"9f3a700b66f96079e1d010bea1fc7bf937b11f9fea1048aaff6419fd900af7c5"} Jan 20 17:07:11 crc kubenswrapper[4558]: I0120 17:07:11.265959 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"fc023815-924a-4a04-bad2-5fc862ef20ed","Type":"ContainerStarted","Data":"8c2d36ea5a305d3a1da0f10e70884099eb310895225cdbcdf413bf2c005effc4"} Jan 20 17:07:11 crc kubenswrapper[4558]: I0120 17:07:11.265969 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"fc023815-924a-4a04-bad2-5fc862ef20ed","Type":"ContainerStarted","Data":"1656f891b22791ec6d9f940bf33e0630746d220bfffb0046c75f48d2afa2d782"} Jan 20 17:07:11 crc kubenswrapper[4558]: I0120 17:07:11.265982 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"fc023815-924a-4a04-bad2-5fc862ef20ed","Type":"ContainerStarted","Data":"d2f143dbca4d3796805d8445782c84acc22a2c02590f44e1ad34b26b6934656f"} Jan 20 17:07:11 crc kubenswrapper[4558]: I0120 17:07:11.265992 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"fc023815-924a-4a04-bad2-5fc862ef20ed","Type":"ContainerStarted","Data":"101d312b0bf3e3b73451053b69e1450a946df99e2bf194fa8970254181c3653e"} Jan 20 17:07:11 crc kubenswrapper[4558]: I0120 17:07:11.266002 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"fc023815-924a-4a04-bad2-5fc862ef20ed","Type":"ContainerStarted","Data":"0b1bf5c7f6bc3d5fc7206be9bc6c12aadefeeeb27348b35c50c441224ffe435b"} Jan 20 17:07:11 crc kubenswrapper[4558]: I0120 17:07:11.644102 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-c88a-account-create-update-vglhd" Jan 20 17:07:11 crc kubenswrapper[4558]: I0120 17:07:11.697712 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zb2hc\" (UniqueName: \"kubernetes.io/projected/f6e1d86c-7bad-427d-8b89-f1df1213ccc5-kube-api-access-zb2hc\") pod \"f6e1d86c-7bad-427d-8b89-f1df1213ccc5\" (UID: \"f6e1d86c-7bad-427d-8b89-f1df1213ccc5\") " Jan 20 17:07:11 crc kubenswrapper[4558]: I0120 17:07:11.697856 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f6e1d86c-7bad-427d-8b89-f1df1213ccc5-operator-scripts\") pod \"f6e1d86c-7bad-427d-8b89-f1df1213ccc5\" (UID: \"f6e1d86c-7bad-427d-8b89-f1df1213ccc5\") " Jan 20 17:07:11 crc kubenswrapper[4558]: I0120 17:07:11.699365 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f6e1d86c-7bad-427d-8b89-f1df1213ccc5-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "f6e1d86c-7bad-427d-8b89-f1df1213ccc5" (UID: "f6e1d86c-7bad-427d-8b89-f1df1213ccc5"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:07:11 crc kubenswrapper[4558]: I0120 17:07:11.703042 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f6e1d86c-7bad-427d-8b89-f1df1213ccc5-kube-api-access-zb2hc" (OuterVolumeSpecName: "kube-api-access-zb2hc") pod "f6e1d86c-7bad-427d-8b89-f1df1213ccc5" (UID: "f6e1d86c-7bad-427d-8b89-f1df1213ccc5"). InnerVolumeSpecName "kube-api-access-zb2hc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:07:11 crc kubenswrapper[4558]: I0120 17:07:11.800037 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zb2hc\" (UniqueName: \"kubernetes.io/projected/f6e1d86c-7bad-427d-8b89-f1df1213ccc5-kube-api-access-zb2hc\") on node \"crc\" DevicePath \"\"" Jan 20 17:07:11 crc kubenswrapper[4558]: I0120 17:07:11.800189 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f6e1d86c-7bad-427d-8b89-f1df1213ccc5-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:07:11 crc kubenswrapper[4558]: I0120 17:07:11.829403 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-db-create-nq6nj" Jan 20 17:07:11 crc kubenswrapper[4558]: I0120 17:07:11.836135 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-db-create-mcpzb" Jan 20 17:07:11 crc kubenswrapper[4558]: I0120 17:07:11.845647 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-64d3-account-create-update-gx7gt" Jan 20 17:07:11 crc kubenswrapper[4558]: I0120 17:07:11.854416 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-cba5-account-create-update-wsmw9" Jan 20 17:07:11 crc kubenswrapper[4558]: I0120 17:07:11.917586 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-pq9r8"] Jan 20 17:07:11 crc kubenswrapper[4558]: I0120 17:07:11.922339 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-pq9r8"] Jan 20 17:07:12 crc kubenswrapper[4558]: I0120 17:07:12.002082 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-98jg6\" (UniqueName: \"kubernetes.io/projected/69d7d5b9-efa3-4a24-88ca-e3f6932188d0-kube-api-access-98jg6\") pod \"69d7d5b9-efa3-4a24-88ca-e3f6932188d0\" (UID: \"69d7d5b9-efa3-4a24-88ca-e3f6932188d0\") " Jan 20 17:07:12 crc kubenswrapper[4558]: I0120 17:07:12.002143 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/69d7d5b9-efa3-4a24-88ca-e3f6932188d0-operator-scripts\") pod \"69d7d5b9-efa3-4a24-88ca-e3f6932188d0\" (UID: \"69d7d5b9-efa3-4a24-88ca-e3f6932188d0\") " Jan 20 17:07:12 crc kubenswrapper[4558]: I0120 17:07:12.002254 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7fadd286-85f0-45eb-a8fc-c408074c178c-operator-scripts\") pod \"7fadd286-85f0-45eb-a8fc-c408074c178c\" (UID: \"7fadd286-85f0-45eb-a8fc-c408074c178c\") " Jan 20 17:07:12 crc kubenswrapper[4558]: I0120 17:07:12.002301 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dnc9x\" (UniqueName: \"kubernetes.io/projected/7fadd286-85f0-45eb-a8fc-c408074c178c-kube-api-access-dnc9x\") pod \"7fadd286-85f0-45eb-a8fc-c408074c178c\" (UID: \"7fadd286-85f0-45eb-a8fc-c408074c178c\") " Jan 20 17:07:12 crc kubenswrapper[4558]: I0120 17:07:12.002385 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n5v28\" (UniqueName: \"kubernetes.io/projected/05bee7dd-6a89-4bbd-a097-2f2c66d038e7-kube-api-access-n5v28\") pod \"05bee7dd-6a89-4bbd-a097-2f2c66d038e7\" (UID: \"05bee7dd-6a89-4bbd-a097-2f2c66d038e7\") " Jan 20 17:07:12 crc kubenswrapper[4558]: I0120 17:07:12.002429 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/81c02319-4baf-44d1-b0b2-ae946049fb60-operator-scripts\") pod \"81c02319-4baf-44d1-b0b2-ae946049fb60\" (UID: \"81c02319-4baf-44d1-b0b2-ae946049fb60\") " Jan 20 17:07:12 crc kubenswrapper[4558]: I0120 17:07:12.002460 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/05bee7dd-6a89-4bbd-a097-2f2c66d038e7-operator-scripts\") pod \"05bee7dd-6a89-4bbd-a097-2f2c66d038e7\" (UID: \"05bee7dd-6a89-4bbd-a097-2f2c66d038e7\") " Jan 20 17:07:12 crc kubenswrapper[4558]: I0120 17:07:12.002508 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bfd5m\" (UniqueName: \"kubernetes.io/projected/81c02319-4baf-44d1-b0b2-ae946049fb60-kube-api-access-bfd5m\") pod \"81c02319-4baf-44d1-b0b2-ae946049fb60\" (UID: \"81c02319-4baf-44d1-b0b2-ae946049fb60\") " Jan 20 17:07:12 crc kubenswrapper[4558]: I0120 17:07:12.002649 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7fadd286-85f0-45eb-a8fc-c408074c178c-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "7fadd286-85f0-45eb-a8fc-c408074c178c" (UID: "7fadd286-85f0-45eb-a8fc-c408074c178c"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:07:12 crc kubenswrapper[4558]: I0120 17:07:12.002927 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7fadd286-85f0-45eb-a8fc-c408074c178c-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:07:12 crc kubenswrapper[4558]: I0120 17:07:12.003103 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/81c02319-4baf-44d1-b0b2-ae946049fb60-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "81c02319-4baf-44d1-b0b2-ae946049fb60" (UID: "81c02319-4baf-44d1-b0b2-ae946049fb60"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:07:12 crc kubenswrapper[4558]: I0120 17:07:12.003186 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/69d7d5b9-efa3-4a24-88ca-e3f6932188d0-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "69d7d5b9-efa3-4a24-88ca-e3f6932188d0" (UID: "69d7d5b9-efa3-4a24-88ca-e3f6932188d0"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:07:12 crc kubenswrapper[4558]: I0120 17:07:12.003255 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/05bee7dd-6a89-4bbd-a097-2f2c66d038e7-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "05bee7dd-6a89-4bbd-a097-2f2c66d038e7" (UID: "05bee7dd-6a89-4bbd-a097-2f2c66d038e7"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:07:12 crc kubenswrapper[4558]: I0120 17:07:12.007553 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/81c02319-4baf-44d1-b0b2-ae946049fb60-kube-api-access-bfd5m" (OuterVolumeSpecName: "kube-api-access-bfd5m") pod "81c02319-4baf-44d1-b0b2-ae946049fb60" (UID: "81c02319-4baf-44d1-b0b2-ae946049fb60"). InnerVolumeSpecName "kube-api-access-bfd5m". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:07:12 crc kubenswrapper[4558]: I0120 17:07:12.007662 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/05bee7dd-6a89-4bbd-a097-2f2c66d038e7-kube-api-access-n5v28" (OuterVolumeSpecName: "kube-api-access-n5v28") pod "05bee7dd-6a89-4bbd-a097-2f2c66d038e7" (UID: "05bee7dd-6a89-4bbd-a097-2f2c66d038e7"). InnerVolumeSpecName "kube-api-access-n5v28". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:07:12 crc kubenswrapper[4558]: I0120 17:07:12.007709 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/69d7d5b9-efa3-4a24-88ca-e3f6932188d0-kube-api-access-98jg6" (OuterVolumeSpecName: "kube-api-access-98jg6") pod "69d7d5b9-efa3-4a24-88ca-e3f6932188d0" (UID: "69d7d5b9-efa3-4a24-88ca-e3f6932188d0"). InnerVolumeSpecName "kube-api-access-98jg6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:07:12 crc kubenswrapper[4558]: I0120 17:07:12.007727 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7fadd286-85f0-45eb-a8fc-c408074c178c-kube-api-access-dnc9x" (OuterVolumeSpecName: "kube-api-access-dnc9x") pod "7fadd286-85f0-45eb-a8fc-c408074c178c" (UID: "7fadd286-85f0-45eb-a8fc-c408074c178c"). InnerVolumeSpecName "kube-api-access-dnc9x". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:07:12 crc kubenswrapper[4558]: I0120 17:07:12.103713 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-98jg6\" (UniqueName: \"kubernetes.io/projected/69d7d5b9-efa3-4a24-88ca-e3f6932188d0-kube-api-access-98jg6\") on node \"crc\" DevicePath \"\"" Jan 20 17:07:12 crc kubenswrapper[4558]: I0120 17:07:12.103738 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/69d7d5b9-efa3-4a24-88ca-e3f6932188d0-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:07:12 crc kubenswrapper[4558]: I0120 17:07:12.103749 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dnc9x\" (UniqueName: \"kubernetes.io/projected/7fadd286-85f0-45eb-a8fc-c408074c178c-kube-api-access-dnc9x\") on node \"crc\" DevicePath \"\"" Jan 20 17:07:12 crc kubenswrapper[4558]: I0120 17:07:12.103759 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n5v28\" (UniqueName: \"kubernetes.io/projected/05bee7dd-6a89-4bbd-a097-2f2c66d038e7-kube-api-access-n5v28\") on node \"crc\" DevicePath \"\"" Jan 20 17:07:12 crc kubenswrapper[4558]: I0120 17:07:12.103767 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/81c02319-4baf-44d1-b0b2-ae946049fb60-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:07:12 crc kubenswrapper[4558]: I0120 17:07:12.103776 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/05bee7dd-6a89-4bbd-a097-2f2c66d038e7-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:07:12 crc kubenswrapper[4558]: I0120 17:07:12.103785 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bfd5m\" (UniqueName: \"kubernetes.io/projected/81c02319-4baf-44d1-b0b2-ae946049fb60-kube-api-access-bfd5m\") on node \"crc\" DevicePath \"\"" Jan 20 17:07:12 crc kubenswrapper[4558]: I0120 17:07:12.279644 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-c88a-account-create-update-vglhd" event={"ID":"f6e1d86c-7bad-427d-8b89-f1df1213ccc5","Type":"ContainerDied","Data":"a37b192c775c0d31c9594dd83a91686446a94c17a84ecdf6c5a9851cb76caa8b"} Jan 20 17:07:12 crc kubenswrapper[4558]: I0120 17:07:12.279671 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-c88a-account-create-update-vglhd" Jan 20 17:07:12 crc kubenswrapper[4558]: I0120 17:07:12.279697 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a37b192c775c0d31c9594dd83a91686446a94c17a84ecdf6c5a9851cb76caa8b" Jan 20 17:07:12 crc kubenswrapper[4558]: I0120 17:07:12.281341 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-64d3-account-create-update-gx7gt" Jan 20 17:07:12 crc kubenswrapper[4558]: I0120 17:07:12.281331 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-64d3-account-create-update-gx7gt" event={"ID":"05bee7dd-6a89-4bbd-a097-2f2c66d038e7","Type":"ContainerDied","Data":"78069bdb88957b9aedc41f5be24256aca1636c2b32205fdc96bebaf580c4dd32"} Jan 20 17:07:12 crc kubenswrapper[4558]: I0120 17:07:12.281448 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="78069bdb88957b9aedc41f5be24256aca1636c2b32205fdc96bebaf580c4dd32" Jan 20 17:07:12 crc kubenswrapper[4558]: I0120 17:07:12.282473 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-db-create-nq6nj" event={"ID":"81c02319-4baf-44d1-b0b2-ae946049fb60","Type":"ContainerDied","Data":"19090cd5de59728b510750a0f11a5f23cc2b26f186ad468b041b109919e960e5"} Jan 20 17:07:12 crc kubenswrapper[4558]: I0120 17:07:12.282517 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="19090cd5de59728b510750a0f11a5f23cc2b26f186ad468b041b109919e960e5" Jan 20 17:07:12 crc kubenswrapper[4558]: I0120 17:07:12.282591 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-db-create-nq6nj" Jan 20 17:07:12 crc kubenswrapper[4558]: I0120 17:07:12.300865 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"fc023815-924a-4a04-bad2-5fc862ef20ed","Type":"ContainerStarted","Data":"e1ee27aa87a720002e5126efcfb765b302b81489973de33a9ecf856e1da6cf32"} Jan 20 17:07:12 crc kubenswrapper[4558]: I0120 17:07:12.300914 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"fc023815-924a-4a04-bad2-5fc862ef20ed","Type":"ContainerStarted","Data":"9410708314b2bfc60099a915bc2c1d7c39d1320c24755c9f69a3d55337b01e9b"} Jan 20 17:07:12 crc kubenswrapper[4558]: I0120 17:07:12.300927 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"fc023815-924a-4a04-bad2-5fc862ef20ed","Type":"ContainerStarted","Data":"9528266b3b97d400ebef502af6d8130d7c0559f6df040c83585a2f746292ef2d"} Jan 20 17:07:12 crc kubenswrapper[4558]: I0120 17:07:12.300939 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"fc023815-924a-4a04-bad2-5fc862ef20ed","Type":"ContainerStarted","Data":"7b43c5f22df4aacb3c09f2db534e5fb1e65e53019530050eed242dc948ad8a62"} Jan 20 17:07:12 crc kubenswrapper[4558]: I0120 17:07:12.300950 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"fc023815-924a-4a04-bad2-5fc862ef20ed","Type":"ContainerStarted","Data":"375019cf206eb74d303642688e63e85c3a2faecea6a7d087bd3a333841cd82c8"} Jan 20 17:07:12 crc kubenswrapper[4558]: I0120 17:07:12.300960 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"fc023815-924a-4a04-bad2-5fc862ef20ed","Type":"ContainerStarted","Data":"18ccc604f3f78f7668fb5ba7e36fe96edce4c2dcfa1c6e7165a65ee3388df062"} Jan 20 17:07:12 crc kubenswrapper[4558]: I0120 17:07:12.303262 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-db-create-mcpzb" event={"ID":"69d7d5b9-efa3-4a24-88ca-e3f6932188d0","Type":"ContainerDied","Data":"e21e10f069e962c325da7a7cdf95ed527fde92669ec032cc883865f397ecdc2f"} Jan 20 17:07:12 crc kubenswrapper[4558]: I0120 17:07:12.303392 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e21e10f069e962c325da7a7cdf95ed527fde92669ec032cc883865f397ecdc2f" Jan 20 17:07:12 crc kubenswrapper[4558]: I0120 17:07:12.303272 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-db-create-mcpzb" Jan 20 17:07:12 crc kubenswrapper[4558]: I0120 17:07:12.307237 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-cba5-account-create-update-wsmw9" event={"ID":"7fadd286-85f0-45eb-a8fc-c408074c178c","Type":"ContainerDied","Data":"7fc76b8c42aa9c1ce65bf99f1f0ec76f7160bef03acd22cdfb9524fcfc82ba25"} Jan 20 17:07:12 crc kubenswrapper[4558]: I0120 17:07:12.307295 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7fc76b8c42aa9c1ce65bf99f1f0ec76f7160bef03acd22cdfb9524fcfc82ba25" Jan 20 17:07:12 crc kubenswrapper[4558]: I0120 17:07:12.307402 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-cba5-account-create-update-wsmw9" Jan 20 17:07:12 crc kubenswrapper[4558]: I0120 17:07:12.336244 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/swift-storage-0" podStartSLOduration=12.336227531 podStartE2EDuration="12.336227531s" podCreationTimestamp="2026-01-20 17:07:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:07:12.331664721 +0000 UTC m=+1526.092002688" watchObservedRunningTime="2026-01-20 17:07:12.336227531 +0000 UTC m=+1526.096565498" Jan 20 17:07:12 crc kubenswrapper[4558]: I0120 17:07:12.461773 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-85d5c6dbcc-qhtkq"] Jan 20 17:07:12 crc kubenswrapper[4558]: E0120 17:07:12.462481 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="69d7d5b9-efa3-4a24-88ca-e3f6932188d0" containerName="mariadb-database-create" Jan 20 17:07:12 crc kubenswrapper[4558]: I0120 17:07:12.462500 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="69d7d5b9-efa3-4a24-88ca-e3f6932188d0" containerName="mariadb-database-create" Jan 20 17:07:12 crc kubenswrapper[4558]: E0120 17:07:12.462518 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2ba3bb38-522d-444e-bfe4-510b9fa17227" containerName="mariadb-database-create" Jan 20 17:07:12 crc kubenswrapper[4558]: I0120 17:07:12.462524 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="2ba3bb38-522d-444e-bfe4-510b9fa17227" containerName="mariadb-database-create" Jan 20 17:07:12 crc kubenswrapper[4558]: E0120 17:07:12.462534 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="05bee7dd-6a89-4bbd-a097-2f2c66d038e7" containerName="mariadb-account-create-update" Jan 20 17:07:12 crc kubenswrapper[4558]: I0120 17:07:12.462542 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="05bee7dd-6a89-4bbd-a097-2f2c66d038e7" containerName="mariadb-account-create-update" Jan 20 17:07:12 crc kubenswrapper[4558]: E0120 17:07:12.462548 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7fadd286-85f0-45eb-a8fc-c408074c178c" containerName="mariadb-account-create-update" Jan 20 17:07:12 crc kubenswrapper[4558]: I0120 17:07:12.462555 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="7fadd286-85f0-45eb-a8fc-c408074c178c" containerName="mariadb-account-create-update" Jan 20 17:07:12 crc kubenswrapper[4558]: E0120 17:07:12.462568 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f6e1d86c-7bad-427d-8b89-f1df1213ccc5" containerName="mariadb-account-create-update" Jan 20 17:07:12 crc kubenswrapper[4558]: I0120 17:07:12.462575 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="f6e1d86c-7bad-427d-8b89-f1df1213ccc5" containerName="mariadb-account-create-update" Jan 20 17:07:12 crc kubenswrapper[4558]: E0120 17:07:12.462587 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="81c02319-4baf-44d1-b0b2-ae946049fb60" containerName="mariadb-database-create" Jan 20 17:07:12 crc kubenswrapper[4558]: I0120 17:07:12.462592 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="81c02319-4baf-44d1-b0b2-ae946049fb60" containerName="mariadb-database-create" Jan 20 17:07:12 crc kubenswrapper[4558]: E0120 17:07:12.462606 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="07a478af-b5f6-4daa-8940-f40b9fb00e2f" containerName="swift-ring-rebalance" Jan 20 17:07:12 crc kubenswrapper[4558]: I0120 17:07:12.462613 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="07a478af-b5f6-4daa-8940-f40b9fb00e2f" containerName="swift-ring-rebalance" Jan 20 17:07:12 crc kubenswrapper[4558]: I0120 17:07:12.463497 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="f6e1d86c-7bad-427d-8b89-f1df1213ccc5" containerName="mariadb-account-create-update" Jan 20 17:07:12 crc kubenswrapper[4558]: I0120 17:07:12.463520 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="2ba3bb38-522d-444e-bfe4-510b9fa17227" containerName="mariadb-database-create" Jan 20 17:07:12 crc kubenswrapper[4558]: I0120 17:07:12.463531 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="05bee7dd-6a89-4bbd-a097-2f2c66d038e7" containerName="mariadb-account-create-update" Jan 20 17:07:12 crc kubenswrapper[4558]: I0120 17:07:12.463541 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="81c02319-4baf-44d1-b0b2-ae946049fb60" containerName="mariadb-database-create" Jan 20 17:07:12 crc kubenswrapper[4558]: I0120 17:07:12.463550 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="7fadd286-85f0-45eb-a8fc-c408074c178c" containerName="mariadb-account-create-update" Jan 20 17:07:12 crc kubenswrapper[4558]: I0120 17:07:12.463558 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="69d7d5b9-efa3-4a24-88ca-e3f6932188d0" containerName="mariadb-database-create" Jan 20 17:07:12 crc kubenswrapper[4558]: I0120 17:07:12.463568 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="07a478af-b5f6-4daa-8940-f40b9fb00e2f" containerName="swift-ring-rebalance" Jan 20 17:07:12 crc kubenswrapper[4558]: I0120 17:07:12.464502 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-85d5c6dbcc-qhtkq" Jan 20 17:07:12 crc kubenswrapper[4558]: I0120 17:07:12.467426 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"dns-swift-storage-0" Jan 20 17:07:12 crc kubenswrapper[4558]: I0120 17:07:12.499442 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-85d5c6dbcc-qhtkq"] Jan 20 17:07:12 crc kubenswrapper[4558]: I0120 17:07:12.575822 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ddd4d7ed-8a4b-40de-88b9-b4d7e90e9f33" path="/var/lib/kubelet/pods/ddd4d7ed-8a4b-40de-88b9-b4d7e90e9f33/volumes" Jan 20 17:07:12 crc kubenswrapper[4558]: I0120 17:07:12.610569 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fgt4x\" (UniqueName: \"kubernetes.io/projected/d3959a6a-ea1a-489e-a441-3645144becc9-kube-api-access-fgt4x\") pod \"dnsmasq-dnsmasq-85d5c6dbcc-qhtkq\" (UID: \"d3959a6a-ea1a-489e-a441-3645144becc9\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-85d5c6dbcc-qhtkq" Jan 20 17:07:12 crc kubenswrapper[4558]: I0120 17:07:12.610684 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d3959a6a-ea1a-489e-a441-3645144becc9-config\") pod \"dnsmasq-dnsmasq-85d5c6dbcc-qhtkq\" (UID: \"d3959a6a-ea1a-489e-a441-3645144becc9\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-85d5c6dbcc-qhtkq" Jan 20 17:07:12 crc kubenswrapper[4558]: I0120 17:07:12.610718 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d3959a6a-ea1a-489e-a441-3645144becc9-dns-swift-storage-0\") pod \"dnsmasq-dnsmasq-85d5c6dbcc-qhtkq\" (UID: \"d3959a6a-ea1a-489e-a441-3645144becc9\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-85d5c6dbcc-qhtkq" Jan 20 17:07:12 crc kubenswrapper[4558]: I0120 17:07:12.610752 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/d3959a6a-ea1a-489e-a441-3645144becc9-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-85d5c6dbcc-qhtkq\" (UID: \"d3959a6a-ea1a-489e-a441-3645144becc9\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-85d5c6dbcc-qhtkq" Jan 20 17:07:12 crc kubenswrapper[4558]: I0120 17:07:12.712064 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d3959a6a-ea1a-489e-a441-3645144becc9-config\") pod \"dnsmasq-dnsmasq-85d5c6dbcc-qhtkq\" (UID: \"d3959a6a-ea1a-489e-a441-3645144becc9\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-85d5c6dbcc-qhtkq" Jan 20 17:07:12 crc kubenswrapper[4558]: I0120 17:07:12.712115 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d3959a6a-ea1a-489e-a441-3645144becc9-dns-swift-storage-0\") pod \"dnsmasq-dnsmasq-85d5c6dbcc-qhtkq\" (UID: \"d3959a6a-ea1a-489e-a441-3645144becc9\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-85d5c6dbcc-qhtkq" Jan 20 17:07:12 crc kubenswrapper[4558]: I0120 17:07:12.712155 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/d3959a6a-ea1a-489e-a441-3645144becc9-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-85d5c6dbcc-qhtkq\" (UID: \"d3959a6a-ea1a-489e-a441-3645144becc9\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-85d5c6dbcc-qhtkq" Jan 20 17:07:12 crc kubenswrapper[4558]: I0120 17:07:12.712344 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fgt4x\" (UniqueName: \"kubernetes.io/projected/d3959a6a-ea1a-489e-a441-3645144becc9-kube-api-access-fgt4x\") pod \"dnsmasq-dnsmasq-85d5c6dbcc-qhtkq\" (UID: \"d3959a6a-ea1a-489e-a441-3645144becc9\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-85d5c6dbcc-qhtkq" Jan 20 17:07:12 crc kubenswrapper[4558]: I0120 17:07:12.713225 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d3959a6a-ea1a-489e-a441-3645144becc9-dns-swift-storage-0\") pod \"dnsmasq-dnsmasq-85d5c6dbcc-qhtkq\" (UID: \"d3959a6a-ea1a-489e-a441-3645144becc9\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-85d5c6dbcc-qhtkq" Jan 20 17:07:12 crc kubenswrapper[4558]: I0120 17:07:12.715900 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d3959a6a-ea1a-489e-a441-3645144becc9-config\") pod \"dnsmasq-dnsmasq-85d5c6dbcc-qhtkq\" (UID: \"d3959a6a-ea1a-489e-a441-3645144becc9\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-85d5c6dbcc-qhtkq" Jan 20 17:07:12 crc kubenswrapper[4558]: I0120 17:07:12.716079 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/d3959a6a-ea1a-489e-a441-3645144becc9-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-85d5c6dbcc-qhtkq\" (UID: \"d3959a6a-ea1a-489e-a441-3645144becc9\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-85d5c6dbcc-qhtkq" Jan 20 17:07:12 crc kubenswrapper[4558]: I0120 17:07:12.732110 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fgt4x\" (UniqueName: \"kubernetes.io/projected/d3959a6a-ea1a-489e-a441-3645144becc9-kube-api-access-fgt4x\") pod \"dnsmasq-dnsmasq-85d5c6dbcc-qhtkq\" (UID: \"d3959a6a-ea1a-489e-a441-3645144becc9\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-85d5c6dbcc-qhtkq" Jan 20 17:07:12 crc kubenswrapper[4558]: I0120 17:07:12.816388 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-85d5c6dbcc-qhtkq" Jan 20 17:07:13 crc kubenswrapper[4558]: I0120 17:07:13.210456 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-85d5c6dbcc-qhtkq"] Jan 20 17:07:13 crc kubenswrapper[4558]: I0120 17:07:13.325253 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-85d5c6dbcc-qhtkq" event={"ID":"d3959a6a-ea1a-489e-a441-3645144becc9","Type":"ContainerStarted","Data":"c8b7f616c8c91d42aff417ce11a01c56bb1eaab75ef988ddada50e8ab04205c6"} Jan 20 17:07:13 crc kubenswrapper[4558]: I0120 17:07:13.986615 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/glance-db-sync-r7zgt"] Jan 20 17:07:13 crc kubenswrapper[4558]: I0120 17:07:13.987991 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-db-sync-r7zgt" Jan 20 17:07:13 crc kubenswrapper[4558]: I0120 17:07:13.990719 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-config-data" Jan 20 17:07:13 crc kubenswrapper[4558]: I0120 17:07:13.990780 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-glance-dockercfg-9qc5r" Jan 20 17:07:14 crc kubenswrapper[4558]: I0120 17:07:14.000785 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-db-sync-r7zgt"] Jan 20 17:07:14 crc kubenswrapper[4558]: I0120 17:07:14.138979 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/274045d4-1273-418b-9fd4-ba6886f98af6-combined-ca-bundle\") pod \"glance-db-sync-r7zgt\" (UID: \"274045d4-1273-418b-9fd4-ba6886f98af6\") " pod="openstack-kuttl-tests/glance-db-sync-r7zgt" Jan 20 17:07:14 crc kubenswrapper[4558]: I0120 17:07:14.139097 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qch9q\" (UniqueName: \"kubernetes.io/projected/274045d4-1273-418b-9fd4-ba6886f98af6-kube-api-access-qch9q\") pod \"glance-db-sync-r7zgt\" (UID: \"274045d4-1273-418b-9fd4-ba6886f98af6\") " pod="openstack-kuttl-tests/glance-db-sync-r7zgt" Jan 20 17:07:14 crc kubenswrapper[4558]: I0120 17:07:14.139135 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/274045d4-1273-418b-9fd4-ba6886f98af6-db-sync-config-data\") pod \"glance-db-sync-r7zgt\" (UID: \"274045d4-1273-418b-9fd4-ba6886f98af6\") " pod="openstack-kuttl-tests/glance-db-sync-r7zgt" Jan 20 17:07:14 crc kubenswrapper[4558]: I0120 17:07:14.139249 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/274045d4-1273-418b-9fd4-ba6886f98af6-config-data\") pod \"glance-db-sync-r7zgt\" (UID: \"274045d4-1273-418b-9fd4-ba6886f98af6\") " pod="openstack-kuttl-tests/glance-db-sync-r7zgt" Jan 20 17:07:14 crc kubenswrapper[4558]: I0120 17:07:14.240488 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qch9q\" (UniqueName: \"kubernetes.io/projected/274045d4-1273-418b-9fd4-ba6886f98af6-kube-api-access-qch9q\") pod \"glance-db-sync-r7zgt\" (UID: \"274045d4-1273-418b-9fd4-ba6886f98af6\") " pod="openstack-kuttl-tests/glance-db-sync-r7zgt" Jan 20 17:07:14 crc kubenswrapper[4558]: I0120 17:07:14.240531 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/274045d4-1273-418b-9fd4-ba6886f98af6-db-sync-config-data\") pod \"glance-db-sync-r7zgt\" (UID: \"274045d4-1273-418b-9fd4-ba6886f98af6\") " pod="openstack-kuttl-tests/glance-db-sync-r7zgt" Jan 20 17:07:14 crc kubenswrapper[4558]: I0120 17:07:14.240602 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/274045d4-1273-418b-9fd4-ba6886f98af6-config-data\") pod \"glance-db-sync-r7zgt\" (UID: \"274045d4-1273-418b-9fd4-ba6886f98af6\") " pod="openstack-kuttl-tests/glance-db-sync-r7zgt" Jan 20 17:07:14 crc kubenswrapper[4558]: I0120 17:07:14.240650 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/274045d4-1273-418b-9fd4-ba6886f98af6-combined-ca-bundle\") pod \"glance-db-sync-r7zgt\" (UID: \"274045d4-1273-418b-9fd4-ba6886f98af6\") " pod="openstack-kuttl-tests/glance-db-sync-r7zgt" Jan 20 17:07:14 crc kubenswrapper[4558]: I0120 17:07:14.245923 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/274045d4-1273-418b-9fd4-ba6886f98af6-config-data\") pod \"glance-db-sync-r7zgt\" (UID: \"274045d4-1273-418b-9fd4-ba6886f98af6\") " pod="openstack-kuttl-tests/glance-db-sync-r7zgt" Jan 20 17:07:14 crc kubenswrapper[4558]: I0120 17:07:14.246236 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/274045d4-1273-418b-9fd4-ba6886f98af6-combined-ca-bundle\") pod \"glance-db-sync-r7zgt\" (UID: \"274045d4-1273-418b-9fd4-ba6886f98af6\") " pod="openstack-kuttl-tests/glance-db-sync-r7zgt" Jan 20 17:07:14 crc kubenswrapper[4558]: I0120 17:07:14.246508 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/274045d4-1273-418b-9fd4-ba6886f98af6-db-sync-config-data\") pod \"glance-db-sync-r7zgt\" (UID: \"274045d4-1273-418b-9fd4-ba6886f98af6\") " pod="openstack-kuttl-tests/glance-db-sync-r7zgt" Jan 20 17:07:14 crc kubenswrapper[4558]: I0120 17:07:14.254070 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qch9q\" (UniqueName: \"kubernetes.io/projected/274045d4-1273-418b-9fd4-ba6886f98af6-kube-api-access-qch9q\") pod \"glance-db-sync-r7zgt\" (UID: \"274045d4-1273-418b-9fd4-ba6886f98af6\") " pod="openstack-kuttl-tests/glance-db-sync-r7zgt" Jan 20 17:07:14 crc kubenswrapper[4558]: I0120 17:07:14.309868 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-db-sync-r7zgt" Jan 20 17:07:14 crc kubenswrapper[4558]: I0120 17:07:14.337464 4558 generic.go:334] "Generic (PLEG): container finished" podID="d3959a6a-ea1a-489e-a441-3645144becc9" containerID="cfa88af8b8d3146e3428eb9c96d266f627f3f53714a121693b6baa9ba5c6c511" exitCode=0 Jan 20 17:07:14 crc kubenswrapper[4558]: I0120 17:07:14.337514 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-85d5c6dbcc-qhtkq" event={"ID":"d3959a6a-ea1a-489e-a441-3645144becc9","Type":"ContainerDied","Data":"cfa88af8b8d3146e3428eb9c96d266f627f3f53714a121693b6baa9ba5c6c511"} Jan 20 17:07:14 crc kubenswrapper[4558]: I0120 17:07:14.714741 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-db-sync-r7zgt"] Jan 20 17:07:15 crc kubenswrapper[4558]: I0120 17:07:15.347712 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-db-sync-r7zgt" event={"ID":"274045d4-1273-418b-9fd4-ba6886f98af6","Type":"ContainerStarted","Data":"22729bdda56e8d2c20ee63a4a6fb57799ef450e9e4e5a8468b37e26fe04a78b5"} Jan 20 17:07:15 crc kubenswrapper[4558]: I0120 17:07:15.348803 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-db-sync-r7zgt" event={"ID":"274045d4-1273-418b-9fd4-ba6886f98af6","Type":"ContainerStarted","Data":"b75f072e23ded169eefac7bdaa87d04e07685a71919df3e5693cd1a0d81e317f"} Jan 20 17:07:15 crc kubenswrapper[4558]: I0120 17:07:15.350386 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-85d5c6dbcc-qhtkq" event={"ID":"d3959a6a-ea1a-489e-a441-3645144becc9","Type":"ContainerStarted","Data":"178a4671de8370a23a24bda120d247c615e3cd49322b5534733e470d52a6d3ef"} Jan 20 17:07:15 crc kubenswrapper[4558]: I0120 17:07:15.366075 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/glance-db-sync-r7zgt" podStartSLOduration=2.366057694 podStartE2EDuration="2.366057694s" podCreationTimestamp="2026-01-20 17:07:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:07:15.361264151 +0000 UTC m=+1529.121602118" watchObservedRunningTime="2026-01-20 17:07:15.366057694 +0000 UTC m=+1529.126395651" Jan 20 17:07:15 crc kubenswrapper[4558]: I0120 17:07:15.390654 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-85d5c6dbcc-qhtkq" podStartSLOduration=3.390638765 podStartE2EDuration="3.390638765s" podCreationTimestamp="2026-01-20 17:07:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:07:15.382877672 +0000 UTC m=+1529.143215639" watchObservedRunningTime="2026-01-20 17:07:15.390638765 +0000 UTC m=+1529.150976732" Jan 20 17:07:16 crc kubenswrapper[4558]: I0120 17:07:16.360471 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-85d5c6dbcc-qhtkq" Jan 20 17:07:16 crc kubenswrapper[4558]: I0120 17:07:16.935933 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/root-account-create-update-q5wjq"] Jan 20 17:07:16 crc kubenswrapper[4558]: I0120 17:07:16.938193 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-q5wjq" Jan 20 17:07:16 crc kubenswrapper[4558]: I0120 17:07:16.939909 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"openstack-cell1-mariadb-root-db-secret" Jan 20 17:07:16 crc kubenswrapper[4558]: I0120 17:07:16.944501 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-q5wjq"] Jan 20 17:07:17 crc kubenswrapper[4558]: I0120 17:07:17.090721 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/500a1e2f-ae7f-41bf-9b31-c9c69386c774-operator-scripts\") pod \"root-account-create-update-q5wjq\" (UID: \"500a1e2f-ae7f-41bf-9b31-c9c69386c774\") " pod="openstack-kuttl-tests/root-account-create-update-q5wjq" Jan 20 17:07:17 crc kubenswrapper[4558]: I0120 17:07:17.091182 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xs5c8\" (UniqueName: \"kubernetes.io/projected/500a1e2f-ae7f-41bf-9b31-c9c69386c774-kube-api-access-xs5c8\") pod \"root-account-create-update-q5wjq\" (UID: \"500a1e2f-ae7f-41bf-9b31-c9c69386c774\") " pod="openstack-kuttl-tests/root-account-create-update-q5wjq" Jan 20 17:07:17 crc kubenswrapper[4558]: I0120 17:07:17.192751 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/500a1e2f-ae7f-41bf-9b31-c9c69386c774-operator-scripts\") pod \"root-account-create-update-q5wjq\" (UID: \"500a1e2f-ae7f-41bf-9b31-c9c69386c774\") " pod="openstack-kuttl-tests/root-account-create-update-q5wjq" Jan 20 17:07:17 crc kubenswrapper[4558]: I0120 17:07:17.192849 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xs5c8\" (UniqueName: \"kubernetes.io/projected/500a1e2f-ae7f-41bf-9b31-c9c69386c774-kube-api-access-xs5c8\") pod \"root-account-create-update-q5wjq\" (UID: \"500a1e2f-ae7f-41bf-9b31-c9c69386c774\") " pod="openstack-kuttl-tests/root-account-create-update-q5wjq" Jan 20 17:07:17 crc kubenswrapper[4558]: I0120 17:07:17.193810 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/500a1e2f-ae7f-41bf-9b31-c9c69386c774-operator-scripts\") pod \"root-account-create-update-q5wjq\" (UID: \"500a1e2f-ae7f-41bf-9b31-c9c69386c774\") " pod="openstack-kuttl-tests/root-account-create-update-q5wjq" Jan 20 17:07:17 crc kubenswrapper[4558]: I0120 17:07:17.224666 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xs5c8\" (UniqueName: \"kubernetes.io/projected/500a1e2f-ae7f-41bf-9b31-c9c69386c774-kube-api-access-xs5c8\") pod \"root-account-create-update-q5wjq\" (UID: \"500a1e2f-ae7f-41bf-9b31-c9c69386c774\") " pod="openstack-kuttl-tests/root-account-create-update-q5wjq" Jan 20 17:07:17 crc kubenswrapper[4558]: I0120 17:07:17.258399 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-q5wjq" Jan 20 17:07:17 crc kubenswrapper[4558]: I0120 17:07:17.635275 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:07:17 crc kubenswrapper[4558]: W0120 17:07:17.671473 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod500a1e2f_ae7f_41bf_9b31_c9c69386c774.slice/crio-d30bbdc1162f1be8b961ac4a1b97eb92d82d370290d9ebe105d716980317aa06 WatchSource:0}: Error finding container d30bbdc1162f1be8b961ac4a1b97eb92d82d370290d9ebe105d716980317aa06: Status 404 returned error can't find the container with id d30bbdc1162f1be8b961ac4a1b97eb92d82d370290d9ebe105d716980317aa06 Jan 20 17:07:17 crc kubenswrapper[4558]: I0120 17:07:17.671636 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-q5wjq"] Jan 20 17:07:18 crc kubenswrapper[4558]: I0120 17:07:18.379120 4558 generic.go:334] "Generic (PLEG): container finished" podID="39403277-bf62-47c1-8e86-cdec59f2da7b" containerID="2a031a75cc169d112132928bc181cdf5e2a9498a2ceea6c4d92265e6e5d5799c" exitCode=0 Jan 20 17:07:18 crc kubenswrapper[4558]: I0120 17:07:18.379219 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" event={"ID":"39403277-bf62-47c1-8e86-cdec59f2da7b","Type":"ContainerDied","Data":"2a031a75cc169d112132928bc181cdf5e2a9498a2ceea6c4d92265e6e5d5799c"} Jan 20 17:07:18 crc kubenswrapper[4558]: I0120 17:07:18.386733 4558 generic.go:334] "Generic (PLEG): container finished" podID="07fab146-67be-42ba-b263-ee19fe95720b" containerID="89eca221d9e593e3a9aecdfa6ea1b10c75f176fb44073dcdb0a8d8482121ba81" exitCode=0 Jan 20 17:07:18 crc kubenswrapper[4558]: I0120 17:07:18.386815 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-server-0" event={"ID":"07fab146-67be-42ba-b263-ee19fe95720b","Type":"ContainerDied","Data":"89eca221d9e593e3a9aecdfa6ea1b10c75f176fb44073dcdb0a8d8482121ba81"} Jan 20 17:07:18 crc kubenswrapper[4558]: I0120 17:07:18.391691 4558 generic.go:334] "Generic (PLEG): container finished" podID="500a1e2f-ae7f-41bf-9b31-c9c69386c774" containerID="d08a36e722fef6d092b220311cda4f15ef2817b2b5440e696ca33c3ddaa6fac8" exitCode=0 Jan 20 17:07:18 crc kubenswrapper[4558]: I0120 17:07:18.391782 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/root-account-create-update-q5wjq" event={"ID":"500a1e2f-ae7f-41bf-9b31-c9c69386c774","Type":"ContainerDied","Data":"d08a36e722fef6d092b220311cda4f15ef2817b2b5440e696ca33c3ddaa6fac8"} Jan 20 17:07:18 crc kubenswrapper[4558]: I0120 17:07:18.391871 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/root-account-create-update-q5wjq" event={"ID":"500a1e2f-ae7f-41bf-9b31-c9c69386c774","Type":"ContainerStarted","Data":"d30bbdc1162f1be8b961ac4a1b97eb92d82d370290d9ebe105d716980317aa06"} Jan 20 17:07:19 crc kubenswrapper[4558]: I0120 17:07:19.401878 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" event={"ID":"39403277-bf62-47c1-8e86-cdec59f2da7b","Type":"ContainerStarted","Data":"564f1f778e4e883e5ac1e2a892f2ceadcb69333aff26325a33259bf543a29685"} Jan 20 17:07:19 crc kubenswrapper[4558]: I0120 17:07:19.402396 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:07:19 crc kubenswrapper[4558]: I0120 17:07:19.404083 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-server-0" event={"ID":"07fab146-67be-42ba-b263-ee19fe95720b","Type":"ContainerStarted","Data":"ed0d0cf0a577509bde5ed7f30ee7d1552be82db06a4009c9c9826bf9daa8477a"} Jan 20 17:07:19 crc kubenswrapper[4558]: I0120 17:07:19.404336 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:07:19 crc kubenswrapper[4558]: I0120 17:07:19.405747 4558 generic.go:334] "Generic (PLEG): container finished" podID="274045d4-1273-418b-9fd4-ba6886f98af6" containerID="22729bdda56e8d2c20ee63a4a6fb57799ef450e9e4e5a8468b37e26fe04a78b5" exitCode=0 Jan 20 17:07:19 crc kubenswrapper[4558]: I0120 17:07:19.405926 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-db-sync-r7zgt" event={"ID":"274045d4-1273-418b-9fd4-ba6886f98af6","Type":"ContainerDied","Data":"22729bdda56e8d2c20ee63a4a6fb57799ef450e9e4e5a8468b37e26fe04a78b5"} Jan 20 17:07:19 crc kubenswrapper[4558]: I0120 17:07:19.421954 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" podStartSLOduration=36.421944468 podStartE2EDuration="36.421944468s" podCreationTimestamp="2026-01-20 17:06:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:07:19.417004489 +0000 UTC m=+1533.177342455" watchObservedRunningTime="2026-01-20 17:07:19.421944468 +0000 UTC m=+1533.182282435" Jan 20 17:07:19 crc kubenswrapper[4558]: I0120 17:07:19.442657 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/rabbitmq-server-0" podStartSLOduration=36.442645563 podStartE2EDuration="36.442645563s" podCreationTimestamp="2026-01-20 17:06:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:07:19.435773381 +0000 UTC m=+1533.196111358" watchObservedRunningTime="2026-01-20 17:07:19.442645563 +0000 UTC m=+1533.202983530" Jan 20 17:07:19 crc kubenswrapper[4558]: I0120 17:07:19.673765 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-q5wjq" Jan 20 17:07:19 crc kubenswrapper[4558]: I0120 17:07:19.846726 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/500a1e2f-ae7f-41bf-9b31-c9c69386c774-operator-scripts\") pod \"500a1e2f-ae7f-41bf-9b31-c9c69386c774\" (UID: \"500a1e2f-ae7f-41bf-9b31-c9c69386c774\") " Jan 20 17:07:19 crc kubenswrapper[4558]: I0120 17:07:19.846796 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xs5c8\" (UniqueName: \"kubernetes.io/projected/500a1e2f-ae7f-41bf-9b31-c9c69386c774-kube-api-access-xs5c8\") pod \"500a1e2f-ae7f-41bf-9b31-c9c69386c774\" (UID: \"500a1e2f-ae7f-41bf-9b31-c9c69386c774\") " Jan 20 17:07:19 crc kubenswrapper[4558]: I0120 17:07:19.847149 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/500a1e2f-ae7f-41bf-9b31-c9c69386c774-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "500a1e2f-ae7f-41bf-9b31-c9c69386c774" (UID: "500a1e2f-ae7f-41bf-9b31-c9c69386c774"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:07:19 crc kubenswrapper[4558]: I0120 17:07:19.852422 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/500a1e2f-ae7f-41bf-9b31-c9c69386c774-kube-api-access-xs5c8" (OuterVolumeSpecName: "kube-api-access-xs5c8") pod "500a1e2f-ae7f-41bf-9b31-c9c69386c774" (UID: "500a1e2f-ae7f-41bf-9b31-c9c69386c774"). InnerVolumeSpecName "kube-api-access-xs5c8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:07:19 crc kubenswrapper[4558]: I0120 17:07:19.948098 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/500a1e2f-ae7f-41bf-9b31-c9c69386c774-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:07:19 crc kubenswrapper[4558]: I0120 17:07:19.948136 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xs5c8\" (UniqueName: \"kubernetes.io/projected/500a1e2f-ae7f-41bf-9b31-c9c69386c774-kube-api-access-xs5c8\") on node \"crc\" DevicePath \"\"" Jan 20 17:07:20 crc kubenswrapper[4558]: I0120 17:07:20.416689 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/root-account-create-update-q5wjq" event={"ID":"500a1e2f-ae7f-41bf-9b31-c9c69386c774","Type":"ContainerDied","Data":"d30bbdc1162f1be8b961ac4a1b97eb92d82d370290d9ebe105d716980317aa06"} Jan 20 17:07:20 crc kubenswrapper[4558]: I0120 17:07:20.416803 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d30bbdc1162f1be8b961ac4a1b97eb92d82d370290d9ebe105d716980317aa06" Jan 20 17:07:20 crc kubenswrapper[4558]: I0120 17:07:20.416866 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-q5wjq" Jan 20 17:07:20 crc kubenswrapper[4558]: I0120 17:07:20.667392 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-db-sync-r7zgt" Jan 20 17:07:20 crc kubenswrapper[4558]: I0120 17:07:20.762409 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qch9q\" (UniqueName: \"kubernetes.io/projected/274045d4-1273-418b-9fd4-ba6886f98af6-kube-api-access-qch9q\") pod \"274045d4-1273-418b-9fd4-ba6886f98af6\" (UID: \"274045d4-1273-418b-9fd4-ba6886f98af6\") " Jan 20 17:07:20 crc kubenswrapper[4558]: I0120 17:07:20.762495 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/274045d4-1273-418b-9fd4-ba6886f98af6-combined-ca-bundle\") pod \"274045d4-1273-418b-9fd4-ba6886f98af6\" (UID: \"274045d4-1273-418b-9fd4-ba6886f98af6\") " Jan 20 17:07:20 crc kubenswrapper[4558]: I0120 17:07:20.762520 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/274045d4-1273-418b-9fd4-ba6886f98af6-config-data\") pod \"274045d4-1273-418b-9fd4-ba6886f98af6\" (UID: \"274045d4-1273-418b-9fd4-ba6886f98af6\") " Jan 20 17:07:20 crc kubenswrapper[4558]: I0120 17:07:20.762670 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/274045d4-1273-418b-9fd4-ba6886f98af6-db-sync-config-data\") pod \"274045d4-1273-418b-9fd4-ba6886f98af6\" (UID: \"274045d4-1273-418b-9fd4-ba6886f98af6\") " Jan 20 17:07:20 crc kubenswrapper[4558]: I0120 17:07:20.766811 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/274045d4-1273-418b-9fd4-ba6886f98af6-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "274045d4-1273-418b-9fd4-ba6886f98af6" (UID: "274045d4-1273-418b-9fd4-ba6886f98af6"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:07:20 crc kubenswrapper[4558]: I0120 17:07:20.767514 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/274045d4-1273-418b-9fd4-ba6886f98af6-kube-api-access-qch9q" (OuterVolumeSpecName: "kube-api-access-qch9q") pod "274045d4-1273-418b-9fd4-ba6886f98af6" (UID: "274045d4-1273-418b-9fd4-ba6886f98af6"). InnerVolumeSpecName "kube-api-access-qch9q". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:07:20 crc kubenswrapper[4558]: I0120 17:07:20.782488 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/274045d4-1273-418b-9fd4-ba6886f98af6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "274045d4-1273-418b-9fd4-ba6886f98af6" (UID: "274045d4-1273-418b-9fd4-ba6886f98af6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:07:20 crc kubenswrapper[4558]: I0120 17:07:20.795410 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/274045d4-1273-418b-9fd4-ba6886f98af6-config-data" (OuterVolumeSpecName: "config-data") pod "274045d4-1273-418b-9fd4-ba6886f98af6" (UID: "274045d4-1273-418b-9fd4-ba6886f98af6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:07:20 crc kubenswrapper[4558]: I0120 17:07:20.865470 4558 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/274045d4-1273-418b-9fd4-ba6886f98af6-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:07:20 crc kubenswrapper[4558]: I0120 17:07:20.865507 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qch9q\" (UniqueName: \"kubernetes.io/projected/274045d4-1273-418b-9fd4-ba6886f98af6-kube-api-access-qch9q\") on node \"crc\" DevicePath \"\"" Jan 20 17:07:20 crc kubenswrapper[4558]: I0120 17:07:20.865519 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/274045d4-1273-418b-9fd4-ba6886f98af6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:07:20 crc kubenswrapper[4558]: I0120 17:07:20.865530 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/274045d4-1273-418b-9fd4-ba6886f98af6-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:07:21 crc kubenswrapper[4558]: I0120 17:07:21.430774 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-db-sync-r7zgt" event={"ID":"274045d4-1273-418b-9fd4-ba6886f98af6","Type":"ContainerDied","Data":"b75f072e23ded169eefac7bdaa87d04e07685a71919df3e5693cd1a0d81e317f"} Jan 20 17:07:21 crc kubenswrapper[4558]: I0120 17:07:21.430831 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b75f072e23ded169eefac7bdaa87d04e07685a71919df3e5693cd1a0d81e317f" Jan 20 17:07:21 crc kubenswrapper[4558]: I0120 17:07:21.431067 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-db-sync-r7zgt" Jan 20 17:07:21 crc kubenswrapper[4558]: I0120 17:07:21.566275 4558 scope.go:117] "RemoveContainer" containerID="a711a286282347590079d2447ef37fa9ed0f11085d6d7a65f85e65c1c2df608f" Jan 20 17:07:21 crc kubenswrapper[4558]: E0120 17:07:21.566692 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 17:07:22 crc kubenswrapper[4558]: I0120 17:07:22.818310 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-85d5c6dbcc-qhtkq" Jan 20 17:07:22 crc kubenswrapper[4558]: I0120 17:07:22.873380 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-kwtcx"] Jan 20 17:07:22 crc kubenswrapper[4558]: I0120 17:07:22.873583 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-kwtcx" podUID="25a5f425-926a-40eb-8f77-fcc3cdd9880c" containerName="dnsmasq-dns" containerID="cri-o://00a971f299806aa024b2084c6e7df03107799f9177171fc10a718d8c3f3f3860" gracePeriod=10 Jan 20 17:07:23 crc kubenswrapper[4558]: I0120 17:07:23.316999 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-kwtcx" Jan 20 17:07:23 crc kubenswrapper[4558]: I0120 17:07:23.404912 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pfk8t\" (UniqueName: \"kubernetes.io/projected/25a5f425-926a-40eb-8f77-fcc3cdd9880c-kube-api-access-pfk8t\") pod \"25a5f425-926a-40eb-8f77-fcc3cdd9880c\" (UID: \"25a5f425-926a-40eb-8f77-fcc3cdd9880c\") " Jan 20 17:07:23 crc kubenswrapper[4558]: I0120 17:07:23.405026 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/25a5f425-926a-40eb-8f77-fcc3cdd9880c-dnsmasq-svc\") pod \"25a5f425-926a-40eb-8f77-fcc3cdd9880c\" (UID: \"25a5f425-926a-40eb-8f77-fcc3cdd9880c\") " Jan 20 17:07:23 crc kubenswrapper[4558]: I0120 17:07:23.405055 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/25a5f425-926a-40eb-8f77-fcc3cdd9880c-config\") pod \"25a5f425-926a-40eb-8f77-fcc3cdd9880c\" (UID: \"25a5f425-926a-40eb-8f77-fcc3cdd9880c\") " Jan 20 17:07:23 crc kubenswrapper[4558]: I0120 17:07:23.415425 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25a5f425-926a-40eb-8f77-fcc3cdd9880c-kube-api-access-pfk8t" (OuterVolumeSpecName: "kube-api-access-pfk8t") pod "25a5f425-926a-40eb-8f77-fcc3cdd9880c" (UID: "25a5f425-926a-40eb-8f77-fcc3cdd9880c"). InnerVolumeSpecName "kube-api-access-pfk8t". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:07:23 crc kubenswrapper[4558]: I0120 17:07:23.434531 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25a5f425-926a-40eb-8f77-fcc3cdd9880c-dnsmasq-svc" (OuterVolumeSpecName: "dnsmasq-svc") pod "25a5f425-926a-40eb-8f77-fcc3cdd9880c" (UID: "25a5f425-926a-40eb-8f77-fcc3cdd9880c"). InnerVolumeSpecName "dnsmasq-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:07:23 crc kubenswrapper[4558]: I0120 17:07:23.439018 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25a5f425-926a-40eb-8f77-fcc3cdd9880c-config" (OuterVolumeSpecName: "config") pod "25a5f425-926a-40eb-8f77-fcc3cdd9880c" (UID: "25a5f425-926a-40eb-8f77-fcc3cdd9880c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:07:23 crc kubenswrapper[4558]: I0120 17:07:23.450605 4558 generic.go:334] "Generic (PLEG): container finished" podID="25a5f425-926a-40eb-8f77-fcc3cdd9880c" containerID="00a971f299806aa024b2084c6e7df03107799f9177171fc10a718d8c3f3f3860" exitCode=0 Jan 20 17:07:23 crc kubenswrapper[4558]: I0120 17:07:23.450652 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-kwtcx" event={"ID":"25a5f425-926a-40eb-8f77-fcc3cdd9880c","Type":"ContainerDied","Data":"00a971f299806aa024b2084c6e7df03107799f9177171fc10a718d8c3f3f3860"} Jan 20 17:07:23 crc kubenswrapper[4558]: I0120 17:07:23.450685 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-kwtcx" event={"ID":"25a5f425-926a-40eb-8f77-fcc3cdd9880c","Type":"ContainerDied","Data":"a4a807d9647e363790dd8d22220e3e3eba75c497a5707d099399fbee0d254fe8"} Jan 20 17:07:23 crc kubenswrapper[4558]: I0120 17:07:23.450704 4558 scope.go:117] "RemoveContainer" containerID="00a971f299806aa024b2084c6e7df03107799f9177171fc10a718d8c3f3f3860" Jan 20 17:07:23 crc kubenswrapper[4558]: I0120 17:07:23.450866 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-kwtcx" Jan 20 17:07:23 crc kubenswrapper[4558]: I0120 17:07:23.472977 4558 scope.go:117] "RemoveContainer" containerID="1780af7f16c0ecd4cd006248610583e6b3a6c467aa3d8978ecf2993a1d76cdc8" Jan 20 17:07:23 crc kubenswrapper[4558]: I0120 17:07:23.486293 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-kwtcx"] Jan 20 17:07:23 crc kubenswrapper[4558]: I0120 17:07:23.489972 4558 scope.go:117] "RemoveContainer" containerID="00a971f299806aa024b2084c6e7df03107799f9177171fc10a718d8c3f3f3860" Jan 20 17:07:23 crc kubenswrapper[4558]: E0120 17:07:23.490429 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"00a971f299806aa024b2084c6e7df03107799f9177171fc10a718d8c3f3f3860\": container with ID starting with 00a971f299806aa024b2084c6e7df03107799f9177171fc10a718d8c3f3f3860 not found: ID does not exist" containerID="00a971f299806aa024b2084c6e7df03107799f9177171fc10a718d8c3f3f3860" Jan 20 17:07:23 crc kubenswrapper[4558]: I0120 17:07:23.490465 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"00a971f299806aa024b2084c6e7df03107799f9177171fc10a718d8c3f3f3860"} err="failed to get container status \"00a971f299806aa024b2084c6e7df03107799f9177171fc10a718d8c3f3f3860\": rpc error: code = NotFound desc = could not find container \"00a971f299806aa024b2084c6e7df03107799f9177171fc10a718d8c3f3f3860\": container with ID starting with 00a971f299806aa024b2084c6e7df03107799f9177171fc10a718d8c3f3f3860 not found: ID does not exist" Jan 20 17:07:23 crc kubenswrapper[4558]: I0120 17:07:23.490485 4558 scope.go:117] "RemoveContainer" containerID="1780af7f16c0ecd4cd006248610583e6b3a6c467aa3d8978ecf2993a1d76cdc8" Jan 20 17:07:23 crc kubenswrapper[4558]: E0120 17:07:23.490787 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1780af7f16c0ecd4cd006248610583e6b3a6c467aa3d8978ecf2993a1d76cdc8\": container with ID starting with 1780af7f16c0ecd4cd006248610583e6b3a6c467aa3d8978ecf2993a1d76cdc8 not found: ID does not exist" containerID="1780af7f16c0ecd4cd006248610583e6b3a6c467aa3d8978ecf2993a1d76cdc8" Jan 20 17:07:23 crc kubenswrapper[4558]: I0120 17:07:23.490823 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1780af7f16c0ecd4cd006248610583e6b3a6c467aa3d8978ecf2993a1d76cdc8"} err="failed to get container status \"1780af7f16c0ecd4cd006248610583e6b3a6c467aa3d8978ecf2993a1d76cdc8\": rpc error: code = NotFound desc = could not find container \"1780af7f16c0ecd4cd006248610583e6b3a6c467aa3d8978ecf2993a1d76cdc8\": container with ID starting with 1780af7f16c0ecd4cd006248610583e6b3a6c467aa3d8978ecf2993a1d76cdc8 not found: ID does not exist" Jan 20 17:07:23 crc kubenswrapper[4558]: I0120 17:07:23.492217 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-kwtcx"] Jan 20 17:07:23 crc kubenswrapper[4558]: I0120 17:07:23.506876 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pfk8t\" (UniqueName: \"kubernetes.io/projected/25a5f425-926a-40eb-8f77-fcc3cdd9880c-kube-api-access-pfk8t\") on node \"crc\" DevicePath \"\"" Jan 20 17:07:23 crc kubenswrapper[4558]: I0120 17:07:23.506901 4558 reconciler_common.go:293] "Volume detached for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/25a5f425-926a-40eb-8f77-fcc3cdd9880c-dnsmasq-svc\") on node \"crc\" DevicePath \"\"" Jan 20 17:07:23 crc kubenswrapper[4558]: I0120 17:07:23.506913 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/25a5f425-926a-40eb-8f77-fcc3cdd9880c-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:07:24 crc kubenswrapper[4558]: I0120 17:07:24.573906 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25a5f425-926a-40eb-8f77-fcc3cdd9880c" path="/var/lib/kubelet/pods/25a5f425-926a-40eb-8f77-fcc3cdd9880c/volumes" Jan 20 17:07:35 crc kubenswrapper[4558]: I0120 17:07:35.116463 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:07:35 crc kubenswrapper[4558]: I0120 17:07:35.313390 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:07:35 crc kubenswrapper[4558]: I0120 17:07:35.447121 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/cinder-db-create-vsd8c"] Jan 20 17:07:35 crc kubenswrapper[4558]: E0120 17:07:35.447401 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="25a5f425-926a-40eb-8f77-fcc3cdd9880c" containerName="dnsmasq-dns" Jan 20 17:07:35 crc kubenswrapper[4558]: I0120 17:07:35.447418 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="25a5f425-926a-40eb-8f77-fcc3cdd9880c" containerName="dnsmasq-dns" Jan 20 17:07:35 crc kubenswrapper[4558]: E0120 17:07:35.447428 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="500a1e2f-ae7f-41bf-9b31-c9c69386c774" containerName="mariadb-account-create-update" Jan 20 17:07:35 crc kubenswrapper[4558]: I0120 17:07:35.447434 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="500a1e2f-ae7f-41bf-9b31-c9c69386c774" containerName="mariadb-account-create-update" Jan 20 17:07:35 crc kubenswrapper[4558]: E0120 17:07:35.447446 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="25a5f425-926a-40eb-8f77-fcc3cdd9880c" containerName="init" Jan 20 17:07:35 crc kubenswrapper[4558]: I0120 17:07:35.447452 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="25a5f425-926a-40eb-8f77-fcc3cdd9880c" containerName="init" Jan 20 17:07:35 crc kubenswrapper[4558]: E0120 17:07:35.447463 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="274045d4-1273-418b-9fd4-ba6886f98af6" containerName="glance-db-sync" Jan 20 17:07:35 crc kubenswrapper[4558]: I0120 17:07:35.447468 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="274045d4-1273-418b-9fd4-ba6886f98af6" containerName="glance-db-sync" Jan 20 17:07:35 crc kubenswrapper[4558]: I0120 17:07:35.463677 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="500a1e2f-ae7f-41bf-9b31-c9c69386c774" containerName="mariadb-account-create-update" Jan 20 17:07:35 crc kubenswrapper[4558]: I0120 17:07:35.463720 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="274045d4-1273-418b-9fd4-ba6886f98af6" containerName="glance-db-sync" Jan 20 17:07:35 crc kubenswrapper[4558]: I0120 17:07:35.463749 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="25a5f425-926a-40eb-8f77-fcc3cdd9880c" containerName="dnsmasq-dns" Jan 20 17:07:35 crc kubenswrapper[4558]: I0120 17:07:35.464493 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/cinder-6a24-account-create-update-zn7sv"] Jan 20 17:07:35 crc kubenswrapper[4558]: I0120 17:07:35.466488 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-db-create-vsd8c" Jan 20 17:07:35 crc kubenswrapper[4558]: I0120 17:07:35.470673 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-6a24-account-create-update-zn7sv"] Jan 20 17:07:35 crc kubenswrapper[4558]: I0120 17:07:35.472699 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-6a24-account-create-update-zn7sv" Jan 20 17:07:35 crc kubenswrapper[4558]: I0120 17:07:35.474355 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cinder-db-secret" Jan 20 17:07:35 crc kubenswrapper[4558]: I0120 17:07:35.485558 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-db-create-vsd8c"] Jan 20 17:07:35 crc kubenswrapper[4558]: I0120 17:07:35.553643 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/barbican-db-create-4cht4"] Jan 20 17:07:35 crc kubenswrapper[4558]: I0120 17:07:35.554918 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-db-create-4cht4" Jan 20 17:07:35 crc kubenswrapper[4558]: I0120 17:07:35.557292 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/barbican-3e15-account-create-update-v7rcr"] Jan 20 17:07:35 crc kubenswrapper[4558]: I0120 17:07:35.558433 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-3e15-account-create-update-v7rcr" Jan 20 17:07:35 crc kubenswrapper[4558]: I0120 17:07:35.561488 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-db-create-4cht4"] Jan 20 17:07:35 crc kubenswrapper[4558]: I0120 17:07:35.562047 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"barbican-db-secret" Jan 20 17:07:35 crc kubenswrapper[4558]: I0120 17:07:35.565843 4558 scope.go:117] "RemoveContainer" containerID="a711a286282347590079d2447ef37fa9ed0f11085d6d7a65f85e65c1c2df608f" Jan 20 17:07:35 crc kubenswrapper[4558]: E0120 17:07:35.566083 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 17:07:35 crc kubenswrapper[4558]: I0120 17:07:35.566115 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-3e15-account-create-update-v7rcr"] Jan 20 17:07:35 crc kubenswrapper[4558]: I0120 17:07:35.592572 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1b1bf8d2-dad2-40e2-bf27-1a536f7a2ce5-operator-scripts\") pod \"cinder-6a24-account-create-update-zn7sv\" (UID: \"1b1bf8d2-dad2-40e2-bf27-1a536f7a2ce5\") " pod="openstack-kuttl-tests/cinder-6a24-account-create-update-zn7sv" Jan 20 17:07:35 crc kubenswrapper[4558]: I0120 17:07:35.592672 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wgf65\" (UniqueName: \"kubernetes.io/projected/906a9a7a-653d-44c7-ba0a-ec3b65ea7098-kube-api-access-wgf65\") pod \"cinder-db-create-vsd8c\" (UID: \"906a9a7a-653d-44c7-ba0a-ec3b65ea7098\") " pod="openstack-kuttl-tests/cinder-db-create-vsd8c" Jan 20 17:07:35 crc kubenswrapper[4558]: I0120 17:07:35.592702 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/906a9a7a-653d-44c7-ba0a-ec3b65ea7098-operator-scripts\") pod \"cinder-db-create-vsd8c\" (UID: \"906a9a7a-653d-44c7-ba0a-ec3b65ea7098\") " pod="openstack-kuttl-tests/cinder-db-create-vsd8c" Jan 20 17:07:35 crc kubenswrapper[4558]: I0120 17:07:35.592719 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qmdh5\" (UniqueName: \"kubernetes.io/projected/1b1bf8d2-dad2-40e2-bf27-1a536f7a2ce5-kube-api-access-qmdh5\") pod \"cinder-6a24-account-create-update-zn7sv\" (UID: \"1b1bf8d2-dad2-40e2-bf27-1a536f7a2ce5\") " pod="openstack-kuttl-tests/cinder-6a24-account-create-update-zn7sv" Jan 20 17:07:35 crc kubenswrapper[4558]: I0120 17:07:35.645646 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/neutron-db-create-8q2dt"] Jan 20 17:07:35 crc kubenswrapper[4558]: I0120 17:07:35.647511 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-db-create-8q2dt" Jan 20 17:07:35 crc kubenswrapper[4558]: I0120 17:07:35.653817 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/neutron-456e-account-create-update-6vj76"] Jan 20 17:07:35 crc kubenswrapper[4558]: I0120 17:07:35.655039 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-456e-account-create-update-6vj76" Jan 20 17:07:35 crc kubenswrapper[4558]: I0120 17:07:35.656914 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"neutron-db-secret" Jan 20 17:07:35 crc kubenswrapper[4558]: I0120 17:07:35.662790 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-db-create-8q2dt"] Jan 20 17:07:35 crc kubenswrapper[4558]: I0120 17:07:35.667395 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-456e-account-create-update-6vj76"] Jan 20 17:07:35 crc kubenswrapper[4558]: I0120 17:07:35.694847 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tns78\" (UniqueName: \"kubernetes.io/projected/33161661-cb8b-4f8b-b3da-c6c8bc9a0cd8-kube-api-access-tns78\") pod \"barbican-3e15-account-create-update-v7rcr\" (UID: \"33161661-cb8b-4f8b-b3da-c6c8bc9a0cd8\") " pod="openstack-kuttl-tests/barbican-3e15-account-create-update-v7rcr" Jan 20 17:07:35 crc kubenswrapper[4558]: I0120 17:07:35.695077 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1b1bf8d2-dad2-40e2-bf27-1a536f7a2ce5-operator-scripts\") pod \"cinder-6a24-account-create-update-zn7sv\" (UID: \"1b1bf8d2-dad2-40e2-bf27-1a536f7a2ce5\") " pod="openstack-kuttl-tests/cinder-6a24-account-create-update-zn7sv" Jan 20 17:07:35 crc kubenswrapper[4558]: I0120 17:07:35.695126 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/42aec55d-662f-4390-8b64-d12da6a53863-operator-scripts\") pod \"barbican-db-create-4cht4\" (UID: \"42aec55d-662f-4390-8b64-d12da6a53863\") " pod="openstack-kuttl-tests/barbican-db-create-4cht4" Jan 20 17:07:35 crc kubenswrapper[4558]: I0120 17:07:35.695193 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wgf65\" (UniqueName: \"kubernetes.io/projected/906a9a7a-653d-44c7-ba0a-ec3b65ea7098-kube-api-access-wgf65\") pod \"cinder-db-create-vsd8c\" (UID: \"906a9a7a-653d-44c7-ba0a-ec3b65ea7098\") " pod="openstack-kuttl-tests/cinder-db-create-vsd8c" Jan 20 17:07:35 crc kubenswrapper[4558]: I0120 17:07:35.695218 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/906a9a7a-653d-44c7-ba0a-ec3b65ea7098-operator-scripts\") pod \"cinder-db-create-vsd8c\" (UID: \"906a9a7a-653d-44c7-ba0a-ec3b65ea7098\") " pod="openstack-kuttl-tests/cinder-db-create-vsd8c" Jan 20 17:07:35 crc kubenswrapper[4558]: I0120 17:07:35.695236 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qmdh5\" (UniqueName: \"kubernetes.io/projected/1b1bf8d2-dad2-40e2-bf27-1a536f7a2ce5-kube-api-access-qmdh5\") pod \"cinder-6a24-account-create-update-zn7sv\" (UID: \"1b1bf8d2-dad2-40e2-bf27-1a536f7a2ce5\") " pod="openstack-kuttl-tests/cinder-6a24-account-create-update-zn7sv" Jan 20 17:07:35 crc kubenswrapper[4558]: I0120 17:07:35.695303 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/33161661-cb8b-4f8b-b3da-c6c8bc9a0cd8-operator-scripts\") pod \"barbican-3e15-account-create-update-v7rcr\" (UID: \"33161661-cb8b-4f8b-b3da-c6c8bc9a0cd8\") " pod="openstack-kuttl-tests/barbican-3e15-account-create-update-v7rcr" Jan 20 17:07:35 crc kubenswrapper[4558]: I0120 17:07:35.695340 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dlcbz\" (UniqueName: \"kubernetes.io/projected/42aec55d-662f-4390-8b64-d12da6a53863-kube-api-access-dlcbz\") pod \"barbican-db-create-4cht4\" (UID: \"42aec55d-662f-4390-8b64-d12da6a53863\") " pod="openstack-kuttl-tests/barbican-db-create-4cht4" Jan 20 17:07:35 crc kubenswrapper[4558]: I0120 17:07:35.696393 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/906a9a7a-653d-44c7-ba0a-ec3b65ea7098-operator-scripts\") pod \"cinder-db-create-vsd8c\" (UID: \"906a9a7a-653d-44c7-ba0a-ec3b65ea7098\") " pod="openstack-kuttl-tests/cinder-db-create-vsd8c" Jan 20 17:07:35 crc kubenswrapper[4558]: I0120 17:07:35.697194 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1b1bf8d2-dad2-40e2-bf27-1a536f7a2ce5-operator-scripts\") pod \"cinder-6a24-account-create-update-zn7sv\" (UID: \"1b1bf8d2-dad2-40e2-bf27-1a536f7a2ce5\") " pod="openstack-kuttl-tests/cinder-6a24-account-create-update-zn7sv" Jan 20 17:07:35 crc kubenswrapper[4558]: I0120 17:07:35.700223 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/keystone-db-sync-nrcvr"] Jan 20 17:07:35 crc kubenswrapper[4558]: I0120 17:07:35.702022 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-db-sync-nrcvr" Jan 20 17:07:35 crc kubenswrapper[4558]: I0120 17:07:35.703780 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-scripts" Jan 20 17:07:35 crc kubenswrapper[4558]: I0120 17:07:35.703913 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-keystone-dockercfg-lcqb5" Jan 20 17:07:35 crc kubenswrapper[4558]: I0120 17:07:35.703923 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone" Jan 20 17:07:35 crc kubenswrapper[4558]: I0120 17:07:35.704230 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-config-data" Jan 20 17:07:35 crc kubenswrapper[4558]: I0120 17:07:35.705748 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-db-sync-nrcvr"] Jan 20 17:07:35 crc kubenswrapper[4558]: I0120 17:07:35.718851 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qmdh5\" (UniqueName: \"kubernetes.io/projected/1b1bf8d2-dad2-40e2-bf27-1a536f7a2ce5-kube-api-access-qmdh5\") pod \"cinder-6a24-account-create-update-zn7sv\" (UID: \"1b1bf8d2-dad2-40e2-bf27-1a536f7a2ce5\") " pod="openstack-kuttl-tests/cinder-6a24-account-create-update-zn7sv" Jan 20 17:07:35 crc kubenswrapper[4558]: I0120 17:07:35.721782 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wgf65\" (UniqueName: \"kubernetes.io/projected/906a9a7a-653d-44c7-ba0a-ec3b65ea7098-kube-api-access-wgf65\") pod \"cinder-db-create-vsd8c\" (UID: \"906a9a7a-653d-44c7-ba0a-ec3b65ea7098\") " pod="openstack-kuttl-tests/cinder-db-create-vsd8c" Jan 20 17:07:35 crc kubenswrapper[4558]: I0120 17:07:35.788100 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-db-create-vsd8c" Jan 20 17:07:35 crc kubenswrapper[4558]: I0120 17:07:35.796593 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9vw8d\" (UniqueName: \"kubernetes.io/projected/32e12252-5dee-4b9d-9c75-fc273c840a11-kube-api-access-9vw8d\") pod \"neutron-db-create-8q2dt\" (UID: \"32e12252-5dee-4b9d-9c75-fc273c840a11\") " pod="openstack-kuttl-tests/neutron-db-create-8q2dt" Jan 20 17:07:35 crc kubenswrapper[4558]: I0120 17:07:35.796636 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/32e12252-5dee-4b9d-9c75-fc273c840a11-operator-scripts\") pod \"neutron-db-create-8q2dt\" (UID: \"32e12252-5dee-4b9d-9c75-fc273c840a11\") " pod="openstack-kuttl-tests/neutron-db-create-8q2dt" Jan 20 17:07:35 crc kubenswrapper[4558]: I0120 17:07:35.796676 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f1e655f-1cb1-483c-9dd4-cc58c0d72c45-combined-ca-bundle\") pod \"keystone-db-sync-nrcvr\" (UID: \"4f1e655f-1cb1-483c-9dd4-cc58c0d72c45\") " pod="openstack-kuttl-tests/keystone-db-sync-nrcvr" Jan 20 17:07:35 crc kubenswrapper[4558]: I0120 17:07:35.796718 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/42aec55d-662f-4390-8b64-d12da6a53863-operator-scripts\") pod \"barbican-db-create-4cht4\" (UID: \"42aec55d-662f-4390-8b64-d12da6a53863\") " pod="openstack-kuttl-tests/barbican-db-create-4cht4" Jan 20 17:07:35 crc kubenswrapper[4558]: I0120 17:07:35.796759 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jrw4b\" (UniqueName: \"kubernetes.io/projected/296f0b5e-f4bb-4d1d-a920-432cc483e950-kube-api-access-jrw4b\") pod \"neutron-456e-account-create-update-6vj76\" (UID: \"296f0b5e-f4bb-4d1d-a920-432cc483e950\") " pod="openstack-kuttl-tests/neutron-456e-account-create-update-6vj76" Jan 20 17:07:35 crc kubenswrapper[4558]: I0120 17:07:35.796781 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4f1e655f-1cb1-483c-9dd4-cc58c0d72c45-config-data\") pod \"keystone-db-sync-nrcvr\" (UID: \"4f1e655f-1cb1-483c-9dd4-cc58c0d72c45\") " pod="openstack-kuttl-tests/keystone-db-sync-nrcvr" Jan 20 17:07:35 crc kubenswrapper[4558]: I0120 17:07:35.796822 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p6r7w\" (UniqueName: \"kubernetes.io/projected/4f1e655f-1cb1-483c-9dd4-cc58c0d72c45-kube-api-access-p6r7w\") pod \"keystone-db-sync-nrcvr\" (UID: \"4f1e655f-1cb1-483c-9dd4-cc58c0d72c45\") " pod="openstack-kuttl-tests/keystone-db-sync-nrcvr" Jan 20 17:07:35 crc kubenswrapper[4558]: I0120 17:07:35.796856 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/296f0b5e-f4bb-4d1d-a920-432cc483e950-operator-scripts\") pod \"neutron-456e-account-create-update-6vj76\" (UID: \"296f0b5e-f4bb-4d1d-a920-432cc483e950\") " pod="openstack-kuttl-tests/neutron-456e-account-create-update-6vj76" Jan 20 17:07:35 crc kubenswrapper[4558]: I0120 17:07:35.796954 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/33161661-cb8b-4f8b-b3da-c6c8bc9a0cd8-operator-scripts\") pod \"barbican-3e15-account-create-update-v7rcr\" (UID: \"33161661-cb8b-4f8b-b3da-c6c8bc9a0cd8\") " pod="openstack-kuttl-tests/barbican-3e15-account-create-update-v7rcr" Jan 20 17:07:35 crc kubenswrapper[4558]: I0120 17:07:35.796994 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dlcbz\" (UniqueName: \"kubernetes.io/projected/42aec55d-662f-4390-8b64-d12da6a53863-kube-api-access-dlcbz\") pod \"barbican-db-create-4cht4\" (UID: \"42aec55d-662f-4390-8b64-d12da6a53863\") " pod="openstack-kuttl-tests/barbican-db-create-4cht4" Jan 20 17:07:35 crc kubenswrapper[4558]: I0120 17:07:35.797030 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tns78\" (UniqueName: \"kubernetes.io/projected/33161661-cb8b-4f8b-b3da-c6c8bc9a0cd8-kube-api-access-tns78\") pod \"barbican-3e15-account-create-update-v7rcr\" (UID: \"33161661-cb8b-4f8b-b3da-c6c8bc9a0cd8\") " pod="openstack-kuttl-tests/barbican-3e15-account-create-update-v7rcr" Jan 20 17:07:35 crc kubenswrapper[4558]: I0120 17:07:35.798197 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/42aec55d-662f-4390-8b64-d12da6a53863-operator-scripts\") pod \"barbican-db-create-4cht4\" (UID: \"42aec55d-662f-4390-8b64-d12da6a53863\") " pod="openstack-kuttl-tests/barbican-db-create-4cht4" Jan 20 17:07:35 crc kubenswrapper[4558]: I0120 17:07:35.798730 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/33161661-cb8b-4f8b-b3da-c6c8bc9a0cd8-operator-scripts\") pod \"barbican-3e15-account-create-update-v7rcr\" (UID: \"33161661-cb8b-4f8b-b3da-c6c8bc9a0cd8\") " pod="openstack-kuttl-tests/barbican-3e15-account-create-update-v7rcr" Jan 20 17:07:35 crc kubenswrapper[4558]: I0120 17:07:35.804914 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-6a24-account-create-update-zn7sv" Jan 20 17:07:35 crc kubenswrapper[4558]: I0120 17:07:35.812390 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dlcbz\" (UniqueName: \"kubernetes.io/projected/42aec55d-662f-4390-8b64-d12da6a53863-kube-api-access-dlcbz\") pod \"barbican-db-create-4cht4\" (UID: \"42aec55d-662f-4390-8b64-d12da6a53863\") " pod="openstack-kuttl-tests/barbican-db-create-4cht4" Jan 20 17:07:35 crc kubenswrapper[4558]: I0120 17:07:35.819669 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tns78\" (UniqueName: \"kubernetes.io/projected/33161661-cb8b-4f8b-b3da-c6c8bc9a0cd8-kube-api-access-tns78\") pod \"barbican-3e15-account-create-update-v7rcr\" (UID: \"33161661-cb8b-4f8b-b3da-c6c8bc9a0cd8\") " pod="openstack-kuttl-tests/barbican-3e15-account-create-update-v7rcr" Jan 20 17:07:35 crc kubenswrapper[4558]: I0120 17:07:35.871267 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-db-create-4cht4" Jan 20 17:07:35 crc kubenswrapper[4558]: I0120 17:07:35.879611 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-3e15-account-create-update-v7rcr" Jan 20 17:07:35 crc kubenswrapper[4558]: I0120 17:07:35.898845 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9vw8d\" (UniqueName: \"kubernetes.io/projected/32e12252-5dee-4b9d-9c75-fc273c840a11-kube-api-access-9vw8d\") pod \"neutron-db-create-8q2dt\" (UID: \"32e12252-5dee-4b9d-9c75-fc273c840a11\") " pod="openstack-kuttl-tests/neutron-db-create-8q2dt" Jan 20 17:07:35 crc kubenswrapper[4558]: I0120 17:07:35.898897 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/32e12252-5dee-4b9d-9c75-fc273c840a11-operator-scripts\") pod \"neutron-db-create-8q2dt\" (UID: \"32e12252-5dee-4b9d-9c75-fc273c840a11\") " pod="openstack-kuttl-tests/neutron-db-create-8q2dt" Jan 20 17:07:35 crc kubenswrapper[4558]: I0120 17:07:35.898929 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f1e655f-1cb1-483c-9dd4-cc58c0d72c45-combined-ca-bundle\") pod \"keystone-db-sync-nrcvr\" (UID: \"4f1e655f-1cb1-483c-9dd4-cc58c0d72c45\") " pod="openstack-kuttl-tests/keystone-db-sync-nrcvr" Jan 20 17:07:35 crc kubenswrapper[4558]: I0120 17:07:35.898955 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4f1e655f-1cb1-483c-9dd4-cc58c0d72c45-config-data\") pod \"keystone-db-sync-nrcvr\" (UID: \"4f1e655f-1cb1-483c-9dd4-cc58c0d72c45\") " pod="openstack-kuttl-tests/keystone-db-sync-nrcvr" Jan 20 17:07:35 crc kubenswrapper[4558]: I0120 17:07:35.898976 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jrw4b\" (UniqueName: \"kubernetes.io/projected/296f0b5e-f4bb-4d1d-a920-432cc483e950-kube-api-access-jrw4b\") pod \"neutron-456e-account-create-update-6vj76\" (UID: \"296f0b5e-f4bb-4d1d-a920-432cc483e950\") " pod="openstack-kuttl-tests/neutron-456e-account-create-update-6vj76" Jan 20 17:07:35 crc kubenswrapper[4558]: I0120 17:07:35.899006 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p6r7w\" (UniqueName: \"kubernetes.io/projected/4f1e655f-1cb1-483c-9dd4-cc58c0d72c45-kube-api-access-p6r7w\") pod \"keystone-db-sync-nrcvr\" (UID: \"4f1e655f-1cb1-483c-9dd4-cc58c0d72c45\") " pod="openstack-kuttl-tests/keystone-db-sync-nrcvr" Jan 20 17:07:35 crc kubenswrapper[4558]: I0120 17:07:35.899031 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/296f0b5e-f4bb-4d1d-a920-432cc483e950-operator-scripts\") pod \"neutron-456e-account-create-update-6vj76\" (UID: \"296f0b5e-f4bb-4d1d-a920-432cc483e950\") " pod="openstack-kuttl-tests/neutron-456e-account-create-update-6vj76" Jan 20 17:07:35 crc kubenswrapper[4558]: I0120 17:07:35.900055 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/296f0b5e-f4bb-4d1d-a920-432cc483e950-operator-scripts\") pod \"neutron-456e-account-create-update-6vj76\" (UID: \"296f0b5e-f4bb-4d1d-a920-432cc483e950\") " pod="openstack-kuttl-tests/neutron-456e-account-create-update-6vj76" Jan 20 17:07:35 crc kubenswrapper[4558]: I0120 17:07:35.901363 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/32e12252-5dee-4b9d-9c75-fc273c840a11-operator-scripts\") pod \"neutron-db-create-8q2dt\" (UID: \"32e12252-5dee-4b9d-9c75-fc273c840a11\") " pod="openstack-kuttl-tests/neutron-db-create-8q2dt" Jan 20 17:07:35 crc kubenswrapper[4558]: I0120 17:07:35.909962 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f1e655f-1cb1-483c-9dd4-cc58c0d72c45-combined-ca-bundle\") pod \"keystone-db-sync-nrcvr\" (UID: \"4f1e655f-1cb1-483c-9dd4-cc58c0d72c45\") " pod="openstack-kuttl-tests/keystone-db-sync-nrcvr" Jan 20 17:07:35 crc kubenswrapper[4558]: I0120 17:07:35.910413 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4f1e655f-1cb1-483c-9dd4-cc58c0d72c45-config-data\") pod \"keystone-db-sync-nrcvr\" (UID: \"4f1e655f-1cb1-483c-9dd4-cc58c0d72c45\") " pod="openstack-kuttl-tests/keystone-db-sync-nrcvr" Jan 20 17:07:35 crc kubenswrapper[4558]: I0120 17:07:35.920021 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jrw4b\" (UniqueName: \"kubernetes.io/projected/296f0b5e-f4bb-4d1d-a920-432cc483e950-kube-api-access-jrw4b\") pod \"neutron-456e-account-create-update-6vj76\" (UID: \"296f0b5e-f4bb-4d1d-a920-432cc483e950\") " pod="openstack-kuttl-tests/neutron-456e-account-create-update-6vj76" Jan 20 17:07:35 crc kubenswrapper[4558]: I0120 17:07:35.923096 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p6r7w\" (UniqueName: \"kubernetes.io/projected/4f1e655f-1cb1-483c-9dd4-cc58c0d72c45-kube-api-access-p6r7w\") pod \"keystone-db-sync-nrcvr\" (UID: \"4f1e655f-1cb1-483c-9dd4-cc58c0d72c45\") " pod="openstack-kuttl-tests/keystone-db-sync-nrcvr" Jan 20 17:07:35 crc kubenswrapper[4558]: I0120 17:07:35.923692 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9vw8d\" (UniqueName: \"kubernetes.io/projected/32e12252-5dee-4b9d-9c75-fc273c840a11-kube-api-access-9vw8d\") pod \"neutron-db-create-8q2dt\" (UID: \"32e12252-5dee-4b9d-9c75-fc273c840a11\") " pod="openstack-kuttl-tests/neutron-db-create-8q2dt" Jan 20 17:07:35 crc kubenswrapper[4558]: I0120 17:07:35.969313 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-db-create-8q2dt" Jan 20 17:07:35 crc kubenswrapper[4558]: I0120 17:07:35.980032 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-456e-account-create-update-6vj76" Jan 20 17:07:36 crc kubenswrapper[4558]: I0120 17:07:36.018135 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-db-sync-nrcvr" Jan 20 17:07:36 crc kubenswrapper[4558]: I0120 17:07:36.299309 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-db-create-vsd8c"] Jan 20 17:07:36 crc kubenswrapper[4558]: I0120 17:07:36.311924 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-6a24-account-create-update-zn7sv"] Jan 20 17:07:36 crc kubenswrapper[4558]: W0120 17:07:36.317426 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod906a9a7a_653d_44c7_ba0a_ec3b65ea7098.slice/crio-9538d049276b02279e49495a21755700a275b71c7fcf75886eeee709451cc610 WatchSource:0}: Error finding container 9538d049276b02279e49495a21755700a275b71c7fcf75886eeee709451cc610: Status 404 returned error can't find the container with id 9538d049276b02279e49495a21755700a275b71c7fcf75886eeee709451cc610 Jan 20 17:07:36 crc kubenswrapper[4558]: W0120 17:07:36.320968 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1b1bf8d2_dad2_40e2_bf27_1a536f7a2ce5.slice/crio-0bf3c1fa6192b9e51f84e5a41737589271b0dc34d6cabf3187b677bc2db9bcf1 WatchSource:0}: Error finding container 0bf3c1fa6192b9e51f84e5a41737589271b0dc34d6cabf3187b677bc2db9bcf1: Status 404 returned error can't find the container with id 0bf3c1fa6192b9e51f84e5a41737589271b0dc34d6cabf3187b677bc2db9bcf1 Jan 20 17:07:36 crc kubenswrapper[4558]: I0120 17:07:36.438505 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-db-sync-nrcvr"] Jan 20 17:07:36 crc kubenswrapper[4558]: I0120 17:07:36.467763 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-db-create-8q2dt"] Jan 20 17:07:36 crc kubenswrapper[4558]: I0120 17:07:36.482355 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-3e15-account-create-update-v7rcr"] Jan 20 17:07:36 crc kubenswrapper[4558]: I0120 17:07:36.488918 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-456e-account-create-update-6vj76"] Jan 20 17:07:36 crc kubenswrapper[4558]: I0120 17:07:36.506965 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-db-create-4cht4"] Jan 20 17:07:36 crc kubenswrapper[4558]: W0120 17:07:36.533857 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod42aec55d_662f_4390_8b64_d12da6a53863.slice/crio-3cfb051f3d249ad304464763866793cfb9ff6c1e98da52d221a262d1747c1321 WatchSource:0}: Error finding container 3cfb051f3d249ad304464763866793cfb9ff6c1e98da52d221a262d1747c1321: Status 404 returned error can't find the container with id 3cfb051f3d249ad304464763866793cfb9ff6c1e98da52d221a262d1747c1321 Jan 20 17:07:36 crc kubenswrapper[4558]: I0120 17:07:36.584406 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-456e-account-create-update-6vj76" event={"ID":"296f0b5e-f4bb-4d1d-a920-432cc483e950","Type":"ContainerStarted","Data":"dc95a395c3958ddfad0f1fdfb12f1c581424e4461de4c429cf3dc7b5f3b790f6"} Jan 20 17:07:36 crc kubenswrapper[4558]: I0120 17:07:36.584436 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-db-create-8q2dt" event={"ID":"32e12252-5dee-4b9d-9c75-fc273c840a11","Type":"ContainerStarted","Data":"8d4e090c2bc1bcb3b23663a7979dac1ba8d70b6707046dde57f8fe61a8bf46c6"} Jan 20 17:07:36 crc kubenswrapper[4558]: I0120 17:07:36.584449 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-6a24-account-create-update-zn7sv" event={"ID":"1b1bf8d2-dad2-40e2-bf27-1a536f7a2ce5","Type":"ContainerStarted","Data":"a1feb2942ef8ec9035f46ccb949b28abf9ee6144ba754bbfc58cb16aa1930cc0"} Jan 20 17:07:36 crc kubenswrapper[4558]: I0120 17:07:36.584459 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-6a24-account-create-update-zn7sv" event={"ID":"1b1bf8d2-dad2-40e2-bf27-1a536f7a2ce5","Type":"ContainerStarted","Data":"0bf3c1fa6192b9e51f84e5a41737589271b0dc34d6cabf3187b677bc2db9bcf1"} Jan 20 17:07:36 crc kubenswrapper[4558]: I0120 17:07:36.584544 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-3e15-account-create-update-v7rcr" event={"ID":"33161661-cb8b-4f8b-b3da-c6c8bc9a0cd8","Type":"ContainerStarted","Data":"1a9a4f0186a3cb94877192d70a625e26092284040bb7b89dedc0282be2e61bb6"} Jan 20 17:07:36 crc kubenswrapper[4558]: I0120 17:07:36.586519 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-db-create-4cht4" event={"ID":"42aec55d-662f-4390-8b64-d12da6a53863","Type":"ContainerStarted","Data":"3cfb051f3d249ad304464763866793cfb9ff6c1e98da52d221a262d1747c1321"} Jan 20 17:07:36 crc kubenswrapper[4558]: I0120 17:07:36.601434 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-db-create-vsd8c" event={"ID":"906a9a7a-653d-44c7-ba0a-ec3b65ea7098","Type":"ContainerStarted","Data":"d11163804592e684099fa46ac620c517657e03a9172c6a38e1115514a3db22fa"} Jan 20 17:07:36 crc kubenswrapper[4558]: I0120 17:07:36.601500 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-db-create-vsd8c" event={"ID":"906a9a7a-653d-44c7-ba0a-ec3b65ea7098","Type":"ContainerStarted","Data":"9538d049276b02279e49495a21755700a275b71c7fcf75886eeee709451cc610"} Jan 20 17:07:36 crc kubenswrapper[4558]: I0120 17:07:36.604975 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-db-sync-nrcvr" event={"ID":"4f1e655f-1cb1-483c-9dd4-cc58c0d72c45","Type":"ContainerStarted","Data":"117773d431c22f0a7570f52e691ef6e82d4ba988075c032f3be76a88adae182a"} Jan 20 17:07:36 crc kubenswrapper[4558]: I0120 17:07:36.752527 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/cinder-db-create-vsd8c" podStartSLOduration=1.752503296 podStartE2EDuration="1.752503296s" podCreationTimestamp="2026-01-20 17:07:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:07:36.731099689 +0000 UTC m=+1550.491437657" watchObservedRunningTime="2026-01-20 17:07:36.752503296 +0000 UTC m=+1550.512841263" Jan 20 17:07:36 crc kubenswrapper[4558]: I0120 17:07:36.755224 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/cinder-6a24-account-create-update-zn7sv" podStartSLOduration=1.7552202449999998 podStartE2EDuration="1.755220245s" podCreationTimestamp="2026-01-20 17:07:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:07:36.744750738 +0000 UTC m=+1550.505088705" watchObservedRunningTime="2026-01-20 17:07:36.755220245 +0000 UTC m=+1550.515558212" Jan 20 17:07:37 crc kubenswrapper[4558]: E0120 17:07:37.102678 4558 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod33161661_cb8b_4f8b_b3da_c6c8bc9a0cd8.slice/crio-b00c43b649e5e692c940930fe70e4fd392d52b887827633cf0858cc366463066.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod33161661_cb8b_4f8b_b3da_c6c8bc9a0cd8.slice/crio-conmon-b00c43b649e5e692c940930fe70e4fd392d52b887827633cf0858cc366463066.scope\": RecentStats: unable to find data in memory cache]" Jan 20 17:07:37 crc kubenswrapper[4558]: I0120 17:07:37.618516 4558 generic.go:334] "Generic (PLEG): container finished" podID="296f0b5e-f4bb-4d1d-a920-432cc483e950" containerID="397dc1ad776ba3a8529bc116085603412a742263395cc5fe26f66ac424ae8a4f" exitCode=0 Jan 20 17:07:37 crc kubenswrapper[4558]: I0120 17:07:37.618597 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-456e-account-create-update-6vj76" event={"ID":"296f0b5e-f4bb-4d1d-a920-432cc483e950","Type":"ContainerDied","Data":"397dc1ad776ba3a8529bc116085603412a742263395cc5fe26f66ac424ae8a4f"} Jan 20 17:07:37 crc kubenswrapper[4558]: I0120 17:07:37.622421 4558 generic.go:334] "Generic (PLEG): container finished" podID="32e12252-5dee-4b9d-9c75-fc273c840a11" containerID="cf2885dbcf415dfa13b8eb9cb926c8cf1c4dc26a15550166d7005e724bc40041" exitCode=0 Jan 20 17:07:37 crc kubenswrapper[4558]: I0120 17:07:37.622472 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-db-create-8q2dt" event={"ID":"32e12252-5dee-4b9d-9c75-fc273c840a11","Type":"ContainerDied","Data":"cf2885dbcf415dfa13b8eb9cb926c8cf1c4dc26a15550166d7005e724bc40041"} Jan 20 17:07:37 crc kubenswrapper[4558]: I0120 17:07:37.628334 4558 generic.go:334] "Generic (PLEG): container finished" podID="1b1bf8d2-dad2-40e2-bf27-1a536f7a2ce5" containerID="a1feb2942ef8ec9035f46ccb949b28abf9ee6144ba754bbfc58cb16aa1930cc0" exitCode=0 Jan 20 17:07:37 crc kubenswrapper[4558]: I0120 17:07:37.628376 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-6a24-account-create-update-zn7sv" event={"ID":"1b1bf8d2-dad2-40e2-bf27-1a536f7a2ce5","Type":"ContainerDied","Data":"a1feb2942ef8ec9035f46ccb949b28abf9ee6144ba754bbfc58cb16aa1930cc0"} Jan 20 17:07:37 crc kubenswrapper[4558]: I0120 17:07:37.630400 4558 generic.go:334] "Generic (PLEG): container finished" podID="33161661-cb8b-4f8b-b3da-c6c8bc9a0cd8" containerID="b00c43b649e5e692c940930fe70e4fd392d52b887827633cf0858cc366463066" exitCode=0 Jan 20 17:07:37 crc kubenswrapper[4558]: I0120 17:07:37.630443 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-3e15-account-create-update-v7rcr" event={"ID":"33161661-cb8b-4f8b-b3da-c6c8bc9a0cd8","Type":"ContainerDied","Data":"b00c43b649e5e692c940930fe70e4fd392d52b887827633cf0858cc366463066"} Jan 20 17:07:37 crc kubenswrapper[4558]: I0120 17:07:37.631559 4558 generic.go:334] "Generic (PLEG): container finished" podID="42aec55d-662f-4390-8b64-d12da6a53863" containerID="2d30f855d2909e2fba03e13362c8b595118ff00963aa9a93f120e33156d5f97a" exitCode=0 Jan 20 17:07:37 crc kubenswrapper[4558]: I0120 17:07:37.631598 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-db-create-4cht4" event={"ID":"42aec55d-662f-4390-8b64-d12da6a53863","Type":"ContainerDied","Data":"2d30f855d2909e2fba03e13362c8b595118ff00963aa9a93f120e33156d5f97a"} Jan 20 17:07:37 crc kubenswrapper[4558]: I0120 17:07:37.632558 4558 generic.go:334] "Generic (PLEG): container finished" podID="906a9a7a-653d-44c7-ba0a-ec3b65ea7098" containerID="d11163804592e684099fa46ac620c517657e03a9172c6a38e1115514a3db22fa" exitCode=0 Jan 20 17:07:37 crc kubenswrapper[4558]: I0120 17:07:37.632597 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-db-create-vsd8c" event={"ID":"906a9a7a-653d-44c7-ba0a-ec3b65ea7098","Type":"ContainerDied","Data":"d11163804592e684099fa46ac620c517657e03a9172c6a38e1115514a3db22fa"} Jan 20 17:07:37 crc kubenswrapper[4558]: I0120 17:07:37.633687 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-db-sync-nrcvr" event={"ID":"4f1e655f-1cb1-483c-9dd4-cc58c0d72c45","Type":"ContainerStarted","Data":"f4eb3aa9fec9fe4e1edca473d9d52013d4b4d121058769c4a99430648da5242c"} Jan 20 17:07:37 crc kubenswrapper[4558]: I0120 17:07:37.659604 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/keystone-db-sync-nrcvr" podStartSLOduration=2.659590862 podStartE2EDuration="2.659590862s" podCreationTimestamp="2026-01-20 17:07:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:07:37.655115726 +0000 UTC m=+1551.415453683" watchObservedRunningTime="2026-01-20 17:07:37.659590862 +0000 UTC m=+1551.419928818" Jan 20 17:07:38 crc kubenswrapper[4558]: I0120 17:07:38.642640 4558 generic.go:334] "Generic (PLEG): container finished" podID="4f1e655f-1cb1-483c-9dd4-cc58c0d72c45" containerID="f4eb3aa9fec9fe4e1edca473d9d52013d4b4d121058769c4a99430648da5242c" exitCode=0 Jan 20 17:07:38 crc kubenswrapper[4558]: I0120 17:07:38.642691 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-db-sync-nrcvr" event={"ID":"4f1e655f-1cb1-483c-9dd4-cc58c0d72c45","Type":"ContainerDied","Data":"f4eb3aa9fec9fe4e1edca473d9d52013d4b4d121058769c4a99430648da5242c"} Jan 20 17:07:38 crc kubenswrapper[4558]: I0120 17:07:38.934663 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-456e-account-create-update-6vj76" Jan 20 17:07:39 crc kubenswrapper[4558]: I0120 17:07:39.055072 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-db-create-vsd8c" Jan 20 17:07:39 crc kubenswrapper[4558]: I0120 17:07:39.074043 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jrw4b\" (UniqueName: \"kubernetes.io/projected/296f0b5e-f4bb-4d1d-a920-432cc483e950-kube-api-access-jrw4b\") pod \"296f0b5e-f4bb-4d1d-a920-432cc483e950\" (UID: \"296f0b5e-f4bb-4d1d-a920-432cc483e950\") " Jan 20 17:07:39 crc kubenswrapper[4558]: I0120 17:07:39.074147 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/296f0b5e-f4bb-4d1d-a920-432cc483e950-operator-scripts\") pod \"296f0b5e-f4bb-4d1d-a920-432cc483e950\" (UID: \"296f0b5e-f4bb-4d1d-a920-432cc483e950\") " Jan 20 17:07:39 crc kubenswrapper[4558]: I0120 17:07:39.076289 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/296f0b5e-f4bb-4d1d-a920-432cc483e950-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "296f0b5e-f4bb-4d1d-a920-432cc483e950" (UID: "296f0b5e-f4bb-4d1d-a920-432cc483e950"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:07:39 crc kubenswrapper[4558]: I0120 17:07:39.081739 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/296f0b5e-f4bb-4d1d-a920-432cc483e950-kube-api-access-jrw4b" (OuterVolumeSpecName: "kube-api-access-jrw4b") pod "296f0b5e-f4bb-4d1d-a920-432cc483e950" (UID: "296f0b5e-f4bb-4d1d-a920-432cc483e950"). InnerVolumeSpecName "kube-api-access-jrw4b". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:07:39 crc kubenswrapper[4558]: I0120 17:07:39.175712 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wgf65\" (UniqueName: \"kubernetes.io/projected/906a9a7a-653d-44c7-ba0a-ec3b65ea7098-kube-api-access-wgf65\") pod \"906a9a7a-653d-44c7-ba0a-ec3b65ea7098\" (UID: \"906a9a7a-653d-44c7-ba0a-ec3b65ea7098\") " Jan 20 17:07:39 crc kubenswrapper[4558]: I0120 17:07:39.175834 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/906a9a7a-653d-44c7-ba0a-ec3b65ea7098-operator-scripts\") pod \"906a9a7a-653d-44c7-ba0a-ec3b65ea7098\" (UID: \"906a9a7a-653d-44c7-ba0a-ec3b65ea7098\") " Jan 20 17:07:39 crc kubenswrapper[4558]: I0120 17:07:39.176386 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/906a9a7a-653d-44c7-ba0a-ec3b65ea7098-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "906a9a7a-653d-44c7-ba0a-ec3b65ea7098" (UID: "906a9a7a-653d-44c7-ba0a-ec3b65ea7098"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:07:39 crc kubenswrapper[4558]: I0120 17:07:39.176699 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jrw4b\" (UniqueName: \"kubernetes.io/projected/296f0b5e-f4bb-4d1d-a920-432cc483e950-kube-api-access-jrw4b\") on node \"crc\" DevicePath \"\"" Jan 20 17:07:39 crc kubenswrapper[4558]: I0120 17:07:39.176750 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/296f0b5e-f4bb-4d1d-a920-432cc483e950-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:07:39 crc kubenswrapper[4558]: I0120 17:07:39.176760 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/906a9a7a-653d-44c7-ba0a-ec3b65ea7098-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:07:39 crc kubenswrapper[4558]: I0120 17:07:39.177680 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-3e15-account-create-update-v7rcr" Jan 20 17:07:39 crc kubenswrapper[4558]: I0120 17:07:39.178757 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/906a9a7a-653d-44c7-ba0a-ec3b65ea7098-kube-api-access-wgf65" (OuterVolumeSpecName: "kube-api-access-wgf65") pod "906a9a7a-653d-44c7-ba0a-ec3b65ea7098" (UID: "906a9a7a-653d-44c7-ba0a-ec3b65ea7098"). InnerVolumeSpecName "kube-api-access-wgf65". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:07:39 crc kubenswrapper[4558]: I0120 17:07:39.190239 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-6a24-account-create-update-zn7sv" Jan 20 17:07:39 crc kubenswrapper[4558]: I0120 17:07:39.220303 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-db-create-4cht4" Jan 20 17:07:39 crc kubenswrapper[4558]: I0120 17:07:39.232789 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-db-create-8q2dt" Jan 20 17:07:39 crc kubenswrapper[4558]: I0120 17:07:39.278210 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tns78\" (UniqueName: \"kubernetes.io/projected/33161661-cb8b-4f8b-b3da-c6c8bc9a0cd8-kube-api-access-tns78\") pod \"33161661-cb8b-4f8b-b3da-c6c8bc9a0cd8\" (UID: \"33161661-cb8b-4f8b-b3da-c6c8bc9a0cd8\") " Jan 20 17:07:39 crc kubenswrapper[4558]: I0120 17:07:39.278296 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qmdh5\" (UniqueName: \"kubernetes.io/projected/1b1bf8d2-dad2-40e2-bf27-1a536f7a2ce5-kube-api-access-qmdh5\") pod \"1b1bf8d2-dad2-40e2-bf27-1a536f7a2ce5\" (UID: \"1b1bf8d2-dad2-40e2-bf27-1a536f7a2ce5\") " Jan 20 17:07:39 crc kubenswrapper[4558]: I0120 17:07:39.278375 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1b1bf8d2-dad2-40e2-bf27-1a536f7a2ce5-operator-scripts\") pod \"1b1bf8d2-dad2-40e2-bf27-1a536f7a2ce5\" (UID: \"1b1bf8d2-dad2-40e2-bf27-1a536f7a2ce5\") " Jan 20 17:07:39 crc kubenswrapper[4558]: I0120 17:07:39.278395 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/33161661-cb8b-4f8b-b3da-c6c8bc9a0cd8-operator-scripts\") pod \"33161661-cb8b-4f8b-b3da-c6c8bc9a0cd8\" (UID: \"33161661-cb8b-4f8b-b3da-c6c8bc9a0cd8\") " Jan 20 17:07:39 crc kubenswrapper[4558]: I0120 17:07:39.278804 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1b1bf8d2-dad2-40e2-bf27-1a536f7a2ce5-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "1b1bf8d2-dad2-40e2-bf27-1a536f7a2ce5" (UID: "1b1bf8d2-dad2-40e2-bf27-1a536f7a2ce5"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:07:39 crc kubenswrapper[4558]: I0120 17:07:39.278817 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wgf65\" (UniqueName: \"kubernetes.io/projected/906a9a7a-653d-44c7-ba0a-ec3b65ea7098-kube-api-access-wgf65\") on node \"crc\" DevicePath \"\"" Jan 20 17:07:39 crc kubenswrapper[4558]: I0120 17:07:39.279105 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/33161661-cb8b-4f8b-b3da-c6c8bc9a0cd8-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "33161661-cb8b-4f8b-b3da-c6c8bc9a0cd8" (UID: "33161661-cb8b-4f8b-b3da-c6c8bc9a0cd8"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:07:39 crc kubenswrapper[4558]: I0120 17:07:39.280951 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1b1bf8d2-dad2-40e2-bf27-1a536f7a2ce5-kube-api-access-qmdh5" (OuterVolumeSpecName: "kube-api-access-qmdh5") pod "1b1bf8d2-dad2-40e2-bf27-1a536f7a2ce5" (UID: "1b1bf8d2-dad2-40e2-bf27-1a536f7a2ce5"). InnerVolumeSpecName "kube-api-access-qmdh5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:07:39 crc kubenswrapper[4558]: I0120 17:07:39.280994 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/33161661-cb8b-4f8b-b3da-c6c8bc9a0cd8-kube-api-access-tns78" (OuterVolumeSpecName: "kube-api-access-tns78") pod "33161661-cb8b-4f8b-b3da-c6c8bc9a0cd8" (UID: "33161661-cb8b-4f8b-b3da-c6c8bc9a0cd8"). InnerVolumeSpecName "kube-api-access-tns78". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:07:39 crc kubenswrapper[4558]: I0120 17:07:39.380433 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dlcbz\" (UniqueName: \"kubernetes.io/projected/42aec55d-662f-4390-8b64-d12da6a53863-kube-api-access-dlcbz\") pod \"42aec55d-662f-4390-8b64-d12da6a53863\" (UID: \"42aec55d-662f-4390-8b64-d12da6a53863\") " Jan 20 17:07:39 crc kubenswrapper[4558]: I0120 17:07:39.380664 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/42aec55d-662f-4390-8b64-d12da6a53863-operator-scripts\") pod \"42aec55d-662f-4390-8b64-d12da6a53863\" (UID: \"42aec55d-662f-4390-8b64-d12da6a53863\") " Jan 20 17:07:39 crc kubenswrapper[4558]: I0120 17:07:39.380711 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/32e12252-5dee-4b9d-9c75-fc273c840a11-operator-scripts\") pod \"32e12252-5dee-4b9d-9c75-fc273c840a11\" (UID: \"32e12252-5dee-4b9d-9c75-fc273c840a11\") " Jan 20 17:07:39 crc kubenswrapper[4558]: I0120 17:07:39.380765 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9vw8d\" (UniqueName: \"kubernetes.io/projected/32e12252-5dee-4b9d-9c75-fc273c840a11-kube-api-access-9vw8d\") pod \"32e12252-5dee-4b9d-9c75-fc273c840a11\" (UID: \"32e12252-5dee-4b9d-9c75-fc273c840a11\") " Jan 20 17:07:39 crc kubenswrapper[4558]: I0120 17:07:39.381108 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/42aec55d-662f-4390-8b64-d12da6a53863-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "42aec55d-662f-4390-8b64-d12da6a53863" (UID: "42aec55d-662f-4390-8b64-d12da6a53863"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:07:39 crc kubenswrapper[4558]: I0120 17:07:39.381216 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/32e12252-5dee-4b9d-9c75-fc273c840a11-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "32e12252-5dee-4b9d-9c75-fc273c840a11" (UID: "32e12252-5dee-4b9d-9c75-fc273c840a11"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:07:39 crc kubenswrapper[4558]: I0120 17:07:39.381713 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qmdh5\" (UniqueName: \"kubernetes.io/projected/1b1bf8d2-dad2-40e2-bf27-1a536f7a2ce5-kube-api-access-qmdh5\") on node \"crc\" DevicePath \"\"" Jan 20 17:07:39 crc kubenswrapper[4558]: I0120 17:07:39.381743 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1b1bf8d2-dad2-40e2-bf27-1a536f7a2ce5-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:07:39 crc kubenswrapper[4558]: I0120 17:07:39.381756 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/33161661-cb8b-4f8b-b3da-c6c8bc9a0cd8-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:07:39 crc kubenswrapper[4558]: I0120 17:07:39.381769 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/42aec55d-662f-4390-8b64-d12da6a53863-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:07:39 crc kubenswrapper[4558]: I0120 17:07:39.381778 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/32e12252-5dee-4b9d-9c75-fc273c840a11-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:07:39 crc kubenswrapper[4558]: I0120 17:07:39.381789 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tns78\" (UniqueName: \"kubernetes.io/projected/33161661-cb8b-4f8b-b3da-c6c8bc9a0cd8-kube-api-access-tns78\") on node \"crc\" DevicePath \"\"" Jan 20 17:07:39 crc kubenswrapper[4558]: I0120 17:07:39.384223 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/42aec55d-662f-4390-8b64-d12da6a53863-kube-api-access-dlcbz" (OuterVolumeSpecName: "kube-api-access-dlcbz") pod "42aec55d-662f-4390-8b64-d12da6a53863" (UID: "42aec55d-662f-4390-8b64-d12da6a53863"). InnerVolumeSpecName "kube-api-access-dlcbz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:07:39 crc kubenswrapper[4558]: I0120 17:07:39.384788 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/32e12252-5dee-4b9d-9c75-fc273c840a11-kube-api-access-9vw8d" (OuterVolumeSpecName: "kube-api-access-9vw8d") pod "32e12252-5dee-4b9d-9c75-fc273c840a11" (UID: "32e12252-5dee-4b9d-9c75-fc273c840a11"). InnerVolumeSpecName "kube-api-access-9vw8d". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:07:39 crc kubenswrapper[4558]: I0120 17:07:39.483923 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9vw8d\" (UniqueName: \"kubernetes.io/projected/32e12252-5dee-4b9d-9c75-fc273c840a11-kube-api-access-9vw8d\") on node \"crc\" DevicePath \"\"" Jan 20 17:07:39 crc kubenswrapper[4558]: I0120 17:07:39.483959 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dlcbz\" (UniqueName: \"kubernetes.io/projected/42aec55d-662f-4390-8b64-d12da6a53863-kube-api-access-dlcbz\") on node \"crc\" DevicePath \"\"" Jan 20 17:07:39 crc kubenswrapper[4558]: I0120 17:07:39.652453 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-db-create-4cht4" Jan 20 17:07:39 crc kubenswrapper[4558]: I0120 17:07:39.653278 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-db-create-4cht4" event={"ID":"42aec55d-662f-4390-8b64-d12da6a53863","Type":"ContainerDied","Data":"3cfb051f3d249ad304464763866793cfb9ff6c1e98da52d221a262d1747c1321"} Jan 20 17:07:39 crc kubenswrapper[4558]: I0120 17:07:39.653346 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3cfb051f3d249ad304464763866793cfb9ff6c1e98da52d221a262d1747c1321" Jan 20 17:07:39 crc kubenswrapper[4558]: I0120 17:07:39.654076 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-db-create-vsd8c" event={"ID":"906a9a7a-653d-44c7-ba0a-ec3b65ea7098","Type":"ContainerDied","Data":"9538d049276b02279e49495a21755700a275b71c7fcf75886eeee709451cc610"} Jan 20 17:07:39 crc kubenswrapper[4558]: I0120 17:07:39.654098 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-db-create-vsd8c" Jan 20 17:07:39 crc kubenswrapper[4558]: I0120 17:07:39.654106 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9538d049276b02279e49495a21755700a275b71c7fcf75886eeee709451cc610" Jan 20 17:07:39 crc kubenswrapper[4558]: I0120 17:07:39.655538 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-456e-account-create-update-6vj76" event={"ID":"296f0b5e-f4bb-4d1d-a920-432cc483e950","Type":"ContainerDied","Data":"dc95a395c3958ddfad0f1fdfb12f1c581424e4461de4c429cf3dc7b5f3b790f6"} Jan 20 17:07:39 crc kubenswrapper[4558]: I0120 17:07:39.655564 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dc95a395c3958ddfad0f1fdfb12f1c581424e4461de4c429cf3dc7b5f3b790f6" Jan 20 17:07:39 crc kubenswrapper[4558]: I0120 17:07:39.655617 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-456e-account-create-update-6vj76" Jan 20 17:07:39 crc kubenswrapper[4558]: I0120 17:07:39.660154 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-db-create-8q2dt" event={"ID":"32e12252-5dee-4b9d-9c75-fc273c840a11","Type":"ContainerDied","Data":"8d4e090c2bc1bcb3b23663a7979dac1ba8d70b6707046dde57f8fe61a8bf46c6"} Jan 20 17:07:39 crc kubenswrapper[4558]: I0120 17:07:39.660210 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8d4e090c2bc1bcb3b23663a7979dac1ba8d70b6707046dde57f8fe61a8bf46c6" Jan 20 17:07:39 crc kubenswrapper[4558]: I0120 17:07:39.660263 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-db-create-8q2dt" Jan 20 17:07:39 crc kubenswrapper[4558]: I0120 17:07:39.661800 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-6a24-account-create-update-zn7sv" event={"ID":"1b1bf8d2-dad2-40e2-bf27-1a536f7a2ce5","Type":"ContainerDied","Data":"0bf3c1fa6192b9e51f84e5a41737589271b0dc34d6cabf3187b677bc2db9bcf1"} Jan 20 17:07:39 crc kubenswrapper[4558]: I0120 17:07:39.661854 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0bf3c1fa6192b9e51f84e5a41737589271b0dc34d6cabf3187b677bc2db9bcf1" Jan 20 17:07:39 crc kubenswrapper[4558]: I0120 17:07:39.661924 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-6a24-account-create-update-zn7sv" Jan 20 17:07:39 crc kubenswrapper[4558]: I0120 17:07:39.666362 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-3e15-account-create-update-v7rcr" Jan 20 17:07:39 crc kubenswrapper[4558]: I0120 17:07:39.666348 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-3e15-account-create-update-v7rcr" event={"ID":"33161661-cb8b-4f8b-b3da-c6c8bc9a0cd8","Type":"ContainerDied","Data":"1a9a4f0186a3cb94877192d70a625e26092284040bb7b89dedc0282be2e61bb6"} Jan 20 17:07:39 crc kubenswrapper[4558]: I0120 17:07:39.666479 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1a9a4f0186a3cb94877192d70a625e26092284040bb7b89dedc0282be2e61bb6" Jan 20 17:07:39 crc kubenswrapper[4558]: I0120 17:07:39.944534 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-db-sync-nrcvr" Jan 20 17:07:40 crc kubenswrapper[4558]: I0120 17:07:40.094728 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4f1e655f-1cb1-483c-9dd4-cc58c0d72c45-config-data\") pod \"4f1e655f-1cb1-483c-9dd4-cc58c0d72c45\" (UID: \"4f1e655f-1cb1-483c-9dd4-cc58c0d72c45\") " Jan 20 17:07:40 crc kubenswrapper[4558]: I0120 17:07:40.095053 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f1e655f-1cb1-483c-9dd4-cc58c0d72c45-combined-ca-bundle\") pod \"4f1e655f-1cb1-483c-9dd4-cc58c0d72c45\" (UID: \"4f1e655f-1cb1-483c-9dd4-cc58c0d72c45\") " Jan 20 17:07:40 crc kubenswrapper[4558]: I0120 17:07:40.095153 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p6r7w\" (UniqueName: \"kubernetes.io/projected/4f1e655f-1cb1-483c-9dd4-cc58c0d72c45-kube-api-access-p6r7w\") pod \"4f1e655f-1cb1-483c-9dd4-cc58c0d72c45\" (UID: \"4f1e655f-1cb1-483c-9dd4-cc58c0d72c45\") " Jan 20 17:07:40 crc kubenswrapper[4558]: I0120 17:07:40.099716 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4f1e655f-1cb1-483c-9dd4-cc58c0d72c45-kube-api-access-p6r7w" (OuterVolumeSpecName: "kube-api-access-p6r7w") pod "4f1e655f-1cb1-483c-9dd4-cc58c0d72c45" (UID: "4f1e655f-1cb1-483c-9dd4-cc58c0d72c45"). InnerVolumeSpecName "kube-api-access-p6r7w". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:07:40 crc kubenswrapper[4558]: I0120 17:07:40.116190 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4f1e655f-1cb1-483c-9dd4-cc58c0d72c45-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4f1e655f-1cb1-483c-9dd4-cc58c0d72c45" (UID: "4f1e655f-1cb1-483c-9dd4-cc58c0d72c45"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:07:40 crc kubenswrapper[4558]: I0120 17:07:40.129823 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4f1e655f-1cb1-483c-9dd4-cc58c0d72c45-config-data" (OuterVolumeSpecName: "config-data") pod "4f1e655f-1cb1-483c-9dd4-cc58c0d72c45" (UID: "4f1e655f-1cb1-483c-9dd4-cc58c0d72c45"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:07:40 crc kubenswrapper[4558]: I0120 17:07:40.198367 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4f1e655f-1cb1-483c-9dd4-cc58c0d72c45-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:07:40 crc kubenswrapper[4558]: I0120 17:07:40.198414 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f1e655f-1cb1-483c-9dd4-cc58c0d72c45-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:07:40 crc kubenswrapper[4558]: I0120 17:07:40.198434 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p6r7w\" (UniqueName: \"kubernetes.io/projected/4f1e655f-1cb1-483c-9dd4-cc58c0d72c45-kube-api-access-p6r7w\") on node \"crc\" DevicePath \"\"" Jan 20 17:07:40 crc kubenswrapper[4558]: I0120 17:07:40.675469 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-db-sync-nrcvr" event={"ID":"4f1e655f-1cb1-483c-9dd4-cc58c0d72c45","Type":"ContainerDied","Data":"117773d431c22f0a7570f52e691ef6e82d4ba988075c032f3be76a88adae182a"} Jan 20 17:07:40 crc kubenswrapper[4558]: I0120 17:07:40.675529 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="117773d431c22f0a7570f52e691ef6e82d4ba988075c032f3be76a88adae182a" Jan 20 17:07:40 crc kubenswrapper[4558]: I0120 17:07:40.675592 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-db-sync-nrcvr" Jan 20 17:07:40 crc kubenswrapper[4558]: I0120 17:07:40.833049 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/keystone-bootstrap-rmnb4"] Jan 20 17:07:40 crc kubenswrapper[4558]: E0120 17:07:40.833700 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="42aec55d-662f-4390-8b64-d12da6a53863" containerName="mariadb-database-create" Jan 20 17:07:40 crc kubenswrapper[4558]: I0120 17:07:40.833733 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="42aec55d-662f-4390-8b64-d12da6a53863" containerName="mariadb-database-create" Jan 20 17:07:40 crc kubenswrapper[4558]: E0120 17:07:40.833750 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1b1bf8d2-dad2-40e2-bf27-1a536f7a2ce5" containerName="mariadb-account-create-update" Jan 20 17:07:40 crc kubenswrapper[4558]: I0120 17:07:40.833762 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="1b1bf8d2-dad2-40e2-bf27-1a536f7a2ce5" containerName="mariadb-account-create-update" Jan 20 17:07:40 crc kubenswrapper[4558]: E0120 17:07:40.833774 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="296f0b5e-f4bb-4d1d-a920-432cc483e950" containerName="mariadb-account-create-update" Jan 20 17:07:40 crc kubenswrapper[4558]: I0120 17:07:40.833780 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="296f0b5e-f4bb-4d1d-a920-432cc483e950" containerName="mariadb-account-create-update" Jan 20 17:07:40 crc kubenswrapper[4558]: E0120 17:07:40.833791 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="33161661-cb8b-4f8b-b3da-c6c8bc9a0cd8" containerName="mariadb-account-create-update" Jan 20 17:07:40 crc kubenswrapper[4558]: I0120 17:07:40.833798 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="33161661-cb8b-4f8b-b3da-c6c8bc9a0cd8" containerName="mariadb-account-create-update" Jan 20 17:07:40 crc kubenswrapper[4558]: E0120 17:07:40.833812 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="32e12252-5dee-4b9d-9c75-fc273c840a11" containerName="mariadb-database-create" Jan 20 17:07:40 crc kubenswrapper[4558]: I0120 17:07:40.833817 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="32e12252-5dee-4b9d-9c75-fc273c840a11" containerName="mariadb-database-create" Jan 20 17:07:40 crc kubenswrapper[4558]: E0120 17:07:40.833829 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="906a9a7a-653d-44c7-ba0a-ec3b65ea7098" containerName="mariadb-database-create" Jan 20 17:07:40 crc kubenswrapper[4558]: I0120 17:07:40.833837 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="906a9a7a-653d-44c7-ba0a-ec3b65ea7098" containerName="mariadb-database-create" Jan 20 17:07:40 crc kubenswrapper[4558]: E0120 17:07:40.833855 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4f1e655f-1cb1-483c-9dd4-cc58c0d72c45" containerName="keystone-db-sync" Jan 20 17:07:40 crc kubenswrapper[4558]: I0120 17:07:40.833861 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="4f1e655f-1cb1-483c-9dd4-cc58c0d72c45" containerName="keystone-db-sync" Jan 20 17:07:40 crc kubenswrapper[4558]: I0120 17:07:40.834128 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="42aec55d-662f-4390-8b64-d12da6a53863" containerName="mariadb-database-create" Jan 20 17:07:40 crc kubenswrapper[4558]: I0120 17:07:40.834145 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="296f0b5e-f4bb-4d1d-a920-432cc483e950" containerName="mariadb-account-create-update" Jan 20 17:07:40 crc kubenswrapper[4558]: I0120 17:07:40.834185 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="33161661-cb8b-4f8b-b3da-c6c8bc9a0cd8" containerName="mariadb-account-create-update" Jan 20 17:07:40 crc kubenswrapper[4558]: I0120 17:07:40.834200 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="906a9a7a-653d-44c7-ba0a-ec3b65ea7098" containerName="mariadb-database-create" Jan 20 17:07:40 crc kubenswrapper[4558]: I0120 17:07:40.834209 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="32e12252-5dee-4b9d-9c75-fc273c840a11" containerName="mariadb-database-create" Jan 20 17:07:40 crc kubenswrapper[4558]: I0120 17:07:40.834216 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="4f1e655f-1cb1-483c-9dd4-cc58c0d72c45" containerName="keystone-db-sync" Jan 20 17:07:40 crc kubenswrapper[4558]: I0120 17:07:40.834225 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="1b1bf8d2-dad2-40e2-bf27-1a536f7a2ce5" containerName="mariadb-account-create-update" Jan 20 17:07:40 crc kubenswrapper[4558]: I0120 17:07:40.836816 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-bootstrap-rmnb4" Jan 20 17:07:40 crc kubenswrapper[4558]: I0120 17:07:40.844525 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-bootstrap-rmnb4"] Jan 20 17:07:40 crc kubenswrapper[4558]: I0120 17:07:40.844744 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-keystone-dockercfg-lcqb5" Jan 20 17:07:40 crc kubenswrapper[4558]: I0120 17:07:40.845734 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-scripts" Jan 20 17:07:40 crc kubenswrapper[4558]: I0120 17:07:40.846734 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone" Jan 20 17:07:40 crc kubenswrapper[4558]: I0120 17:07:40.851899 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"osp-secret" Jan 20 17:07:40 crc kubenswrapper[4558]: I0120 17:07:40.855469 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-config-data" Jan 20 17:07:40 crc kubenswrapper[4558]: I0120 17:07:40.909375 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c39fd5ad-a9fd-4240-a3f2-e7ad7c6bdee2-combined-ca-bundle\") pod \"keystone-bootstrap-rmnb4\" (UID: \"c39fd5ad-a9fd-4240-a3f2-e7ad7c6bdee2\") " pod="openstack-kuttl-tests/keystone-bootstrap-rmnb4" Jan 20 17:07:40 crc kubenswrapper[4558]: I0120 17:07:40.909430 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c39fd5ad-a9fd-4240-a3f2-e7ad7c6bdee2-scripts\") pod \"keystone-bootstrap-rmnb4\" (UID: \"c39fd5ad-a9fd-4240-a3f2-e7ad7c6bdee2\") " pod="openstack-kuttl-tests/keystone-bootstrap-rmnb4" Jan 20 17:07:40 crc kubenswrapper[4558]: I0120 17:07:40.909477 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/c39fd5ad-a9fd-4240-a3f2-e7ad7c6bdee2-credential-keys\") pod \"keystone-bootstrap-rmnb4\" (UID: \"c39fd5ad-a9fd-4240-a3f2-e7ad7c6bdee2\") " pod="openstack-kuttl-tests/keystone-bootstrap-rmnb4" Jan 20 17:07:40 crc kubenswrapper[4558]: I0120 17:07:40.909509 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c39fd5ad-a9fd-4240-a3f2-e7ad7c6bdee2-config-data\") pod \"keystone-bootstrap-rmnb4\" (UID: \"c39fd5ad-a9fd-4240-a3f2-e7ad7c6bdee2\") " pod="openstack-kuttl-tests/keystone-bootstrap-rmnb4" Jan 20 17:07:40 crc kubenswrapper[4558]: I0120 17:07:40.909556 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7rsc9\" (UniqueName: \"kubernetes.io/projected/c39fd5ad-a9fd-4240-a3f2-e7ad7c6bdee2-kube-api-access-7rsc9\") pod \"keystone-bootstrap-rmnb4\" (UID: \"c39fd5ad-a9fd-4240-a3f2-e7ad7c6bdee2\") " pod="openstack-kuttl-tests/keystone-bootstrap-rmnb4" Jan 20 17:07:40 crc kubenswrapper[4558]: I0120 17:07:40.909575 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/c39fd5ad-a9fd-4240-a3f2-e7ad7c6bdee2-fernet-keys\") pod \"keystone-bootstrap-rmnb4\" (UID: \"c39fd5ad-a9fd-4240-a3f2-e7ad7c6bdee2\") " pod="openstack-kuttl-tests/keystone-bootstrap-rmnb4" Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.011309 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/c39fd5ad-a9fd-4240-a3f2-e7ad7c6bdee2-credential-keys\") pod \"keystone-bootstrap-rmnb4\" (UID: \"c39fd5ad-a9fd-4240-a3f2-e7ad7c6bdee2\") " pod="openstack-kuttl-tests/keystone-bootstrap-rmnb4" Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.011376 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c39fd5ad-a9fd-4240-a3f2-e7ad7c6bdee2-config-data\") pod \"keystone-bootstrap-rmnb4\" (UID: \"c39fd5ad-a9fd-4240-a3f2-e7ad7c6bdee2\") " pod="openstack-kuttl-tests/keystone-bootstrap-rmnb4" Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.011430 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7rsc9\" (UniqueName: \"kubernetes.io/projected/c39fd5ad-a9fd-4240-a3f2-e7ad7c6bdee2-kube-api-access-7rsc9\") pod \"keystone-bootstrap-rmnb4\" (UID: \"c39fd5ad-a9fd-4240-a3f2-e7ad7c6bdee2\") " pod="openstack-kuttl-tests/keystone-bootstrap-rmnb4" Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.011465 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/c39fd5ad-a9fd-4240-a3f2-e7ad7c6bdee2-fernet-keys\") pod \"keystone-bootstrap-rmnb4\" (UID: \"c39fd5ad-a9fd-4240-a3f2-e7ad7c6bdee2\") " pod="openstack-kuttl-tests/keystone-bootstrap-rmnb4" Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.011498 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c39fd5ad-a9fd-4240-a3f2-e7ad7c6bdee2-combined-ca-bundle\") pod \"keystone-bootstrap-rmnb4\" (UID: \"c39fd5ad-a9fd-4240-a3f2-e7ad7c6bdee2\") " pod="openstack-kuttl-tests/keystone-bootstrap-rmnb4" Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.011527 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c39fd5ad-a9fd-4240-a3f2-e7ad7c6bdee2-scripts\") pod \"keystone-bootstrap-rmnb4\" (UID: \"c39fd5ad-a9fd-4240-a3f2-e7ad7c6bdee2\") " pod="openstack-kuttl-tests/keystone-bootstrap-rmnb4" Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.017947 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/c39fd5ad-a9fd-4240-a3f2-e7ad7c6bdee2-credential-keys\") pod \"keystone-bootstrap-rmnb4\" (UID: \"c39fd5ad-a9fd-4240-a3f2-e7ad7c6bdee2\") " pod="openstack-kuttl-tests/keystone-bootstrap-rmnb4" Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.018269 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c39fd5ad-a9fd-4240-a3f2-e7ad7c6bdee2-combined-ca-bundle\") pod \"keystone-bootstrap-rmnb4\" (UID: \"c39fd5ad-a9fd-4240-a3f2-e7ad7c6bdee2\") " pod="openstack-kuttl-tests/keystone-bootstrap-rmnb4" Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.022681 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/c39fd5ad-a9fd-4240-a3f2-e7ad7c6bdee2-fernet-keys\") pod \"keystone-bootstrap-rmnb4\" (UID: \"c39fd5ad-a9fd-4240-a3f2-e7ad7c6bdee2\") " pod="openstack-kuttl-tests/keystone-bootstrap-rmnb4" Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.025446 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c39fd5ad-a9fd-4240-a3f2-e7ad7c6bdee2-scripts\") pod \"keystone-bootstrap-rmnb4\" (UID: \"c39fd5ad-a9fd-4240-a3f2-e7ad7c6bdee2\") " pod="openstack-kuttl-tests/keystone-bootstrap-rmnb4" Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.040847 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7rsc9\" (UniqueName: \"kubernetes.io/projected/c39fd5ad-a9fd-4240-a3f2-e7ad7c6bdee2-kube-api-access-7rsc9\") pod \"keystone-bootstrap-rmnb4\" (UID: \"c39fd5ad-a9fd-4240-a3f2-e7ad7c6bdee2\") " pod="openstack-kuttl-tests/keystone-bootstrap-rmnb4" Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.041674 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c39fd5ad-a9fd-4240-a3f2-e7ad7c6bdee2-config-data\") pod \"keystone-bootstrap-rmnb4\" (UID: \"c39fd5ad-a9fd-4240-a3f2-e7ad7c6bdee2\") " pod="openstack-kuttl-tests/keystone-bootstrap-rmnb4" Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.089475 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.101525 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.117341 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.132650 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-scripts" Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.132861 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-config-data" Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.133694 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/barbican-db-sync-5rtqc"] Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.134639 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-db-sync-5rtqc" Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.140304 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"barbican-config-data" Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.140456 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"barbican-barbican-dockercfg-zpzhc" Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.155214 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-db-sync-5rtqc"] Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.158639 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-bootstrap-rmnb4" Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.178780 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/cinder-db-sync-blmnq"] Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.180117 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-db-sync-blmnq" Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.190079 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cinder-cinder-dockercfg-q8875" Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.190323 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cinder-scripts" Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.198105 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-db-sync-blmnq"] Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.209413 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cinder-config-data" Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.209567 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/neutron-db-sync-fs4x6"] Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.210649 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-db-sync-fs4x6" Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.215325 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e5918aaf-a79a-4f8e-af24-0a538b61af22-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"e5918aaf-a79a-4f8e-af24-0a538b61af22\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.215358 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/4fb0c521-d465-47b9-b859-199f53143dca-db-sync-config-data\") pod \"barbican-db-sync-5rtqc\" (UID: \"4fb0c521-d465-47b9-b859-199f53143dca\") " pod="openstack-kuttl-tests/barbican-db-sync-5rtqc" Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.215385 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e5918aaf-a79a-4f8e-af24-0a538b61af22-log-httpd\") pod \"ceilometer-0\" (UID: \"e5918aaf-a79a-4f8e-af24-0a538b61af22\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.215407 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e5918aaf-a79a-4f8e-af24-0a538b61af22-config-data\") pod \"ceilometer-0\" (UID: \"e5918aaf-a79a-4f8e-af24-0a538b61af22\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.215443 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hgvtf\" (UniqueName: \"kubernetes.io/projected/e5918aaf-a79a-4f8e-af24-0a538b61af22-kube-api-access-hgvtf\") pod \"ceilometer-0\" (UID: \"e5918aaf-a79a-4f8e-af24-0a538b61af22\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.215462 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e5918aaf-a79a-4f8e-af24-0a538b61af22-scripts\") pod \"ceilometer-0\" (UID: \"e5918aaf-a79a-4f8e-af24-0a538b61af22\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.215564 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e5918aaf-a79a-4f8e-af24-0a538b61af22-run-httpd\") pod \"ceilometer-0\" (UID: \"e5918aaf-a79a-4f8e-af24-0a538b61af22\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.215604 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4fb0c521-d465-47b9-b859-199f53143dca-combined-ca-bundle\") pod \"barbican-db-sync-5rtqc\" (UID: \"4fb0c521-d465-47b9-b859-199f53143dca\") " pod="openstack-kuttl-tests/barbican-db-sync-5rtqc" Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.215676 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e5918aaf-a79a-4f8e-af24-0a538b61af22-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"e5918aaf-a79a-4f8e-af24-0a538b61af22\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.215827 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bhc2r\" (UniqueName: \"kubernetes.io/projected/4fb0c521-d465-47b9-b859-199f53143dca-kube-api-access-bhc2r\") pod \"barbican-db-sync-5rtqc\" (UID: \"4fb0c521-d465-47b9-b859-199f53143dca\") " pod="openstack-kuttl-tests/barbican-db-sync-5rtqc" Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.219476 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"neutron-httpd-config" Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.219808 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"neutron-config" Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.219916 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"neutron-neutron-dockercfg-tp65d" Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.234630 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/placement-db-sync-zgncw"] Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.235836 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-db-sync-zgncw" Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.242578 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-db-sync-fs4x6"] Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.243482 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"placement-scripts" Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.243635 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"placement-placement-dockercfg-6fxmw" Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.243969 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"placement-config-data" Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.261014 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-db-sync-zgncw"] Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.318024 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/4c9f9871-91f4-4670-a6af-0c493f3ce85b-etc-machine-id\") pod \"cinder-db-sync-blmnq\" (UID: \"4c9f9871-91f4-4670-a6af-0c493f3ce85b\") " pod="openstack-kuttl-tests/cinder-db-sync-blmnq" Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.318082 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e5918aaf-a79a-4f8e-af24-0a538b61af22-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"e5918aaf-a79a-4f8e-af24-0a538b61af22\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.318119 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e152238-833a-4e1c-b854-0584daae3826-combined-ca-bundle\") pod \"placement-db-sync-zgncw\" (UID: \"7e152238-833a-4e1c-b854-0584daae3826\") " pod="openstack-kuttl-tests/placement-db-sync-zgncw" Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.318146 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/4c9f9871-91f4-4670-a6af-0c493f3ce85b-db-sync-config-data\") pod \"cinder-db-sync-blmnq\" (UID: \"4c9f9871-91f4-4670-a6af-0c493f3ce85b\") " pod="openstack-kuttl-tests/cinder-db-sync-blmnq" Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.318190 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4c9f9871-91f4-4670-a6af-0c493f3ce85b-scripts\") pod \"cinder-db-sync-blmnq\" (UID: \"4c9f9871-91f4-4670-a6af-0c493f3ce85b\") " pod="openstack-kuttl-tests/cinder-db-sync-blmnq" Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.318214 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sbvgv\" (UniqueName: \"kubernetes.io/projected/4c9f9871-91f4-4670-a6af-0c493f3ce85b-kube-api-access-sbvgv\") pod \"cinder-db-sync-blmnq\" (UID: \"4c9f9871-91f4-4670-a6af-0c493f3ce85b\") " pod="openstack-kuttl-tests/cinder-db-sync-blmnq" Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.318262 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7e152238-833a-4e1c-b854-0584daae3826-config-data\") pod \"placement-db-sync-zgncw\" (UID: \"7e152238-833a-4e1c-b854-0584daae3826\") " pod="openstack-kuttl-tests/placement-db-sync-zgncw" Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.318302 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bhc2r\" (UniqueName: \"kubernetes.io/projected/4fb0c521-d465-47b9-b859-199f53143dca-kube-api-access-bhc2r\") pod \"barbican-db-sync-5rtqc\" (UID: \"4fb0c521-d465-47b9-b859-199f53143dca\") " pod="openstack-kuttl-tests/barbican-db-sync-5rtqc" Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.318382 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c9f9871-91f4-4670-a6af-0c493f3ce85b-combined-ca-bundle\") pod \"cinder-db-sync-blmnq\" (UID: \"4c9f9871-91f4-4670-a6af-0c493f3ce85b\") " pod="openstack-kuttl-tests/cinder-db-sync-blmnq" Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.318403 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mvlqj\" (UniqueName: \"kubernetes.io/projected/8ea04114-b82b-4609-8f7f-a9ff17cb3b57-kube-api-access-mvlqj\") pod \"neutron-db-sync-fs4x6\" (UID: \"8ea04114-b82b-4609-8f7f-a9ff17cb3b57\") " pod="openstack-kuttl-tests/neutron-db-sync-fs4x6" Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.318429 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e5918aaf-a79a-4f8e-af24-0a538b61af22-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"e5918aaf-a79a-4f8e-af24-0a538b61af22\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.318456 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/4fb0c521-d465-47b9-b859-199f53143dca-db-sync-config-data\") pod \"barbican-db-sync-5rtqc\" (UID: \"4fb0c521-d465-47b9-b859-199f53143dca\") " pod="openstack-kuttl-tests/barbican-db-sync-5rtqc" Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.318479 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mkst2\" (UniqueName: \"kubernetes.io/projected/7e152238-833a-4e1c-b854-0584daae3826-kube-api-access-mkst2\") pod \"placement-db-sync-zgncw\" (UID: \"7e152238-833a-4e1c-b854-0584daae3826\") " pod="openstack-kuttl-tests/placement-db-sync-zgncw" Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.318504 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7e152238-833a-4e1c-b854-0584daae3826-scripts\") pod \"placement-db-sync-zgncw\" (UID: \"7e152238-833a-4e1c-b854-0584daae3826\") " pod="openstack-kuttl-tests/placement-db-sync-zgncw" Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.318528 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e5918aaf-a79a-4f8e-af24-0a538b61af22-log-httpd\") pod \"ceilometer-0\" (UID: \"e5918aaf-a79a-4f8e-af24-0a538b61af22\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.318566 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e5918aaf-a79a-4f8e-af24-0a538b61af22-config-data\") pod \"ceilometer-0\" (UID: \"e5918aaf-a79a-4f8e-af24-0a538b61af22\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.318592 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7e152238-833a-4e1c-b854-0584daae3826-logs\") pod \"placement-db-sync-zgncw\" (UID: \"7e152238-833a-4e1c-b854-0584daae3826\") " pod="openstack-kuttl-tests/placement-db-sync-zgncw" Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.318611 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4c9f9871-91f4-4670-a6af-0c493f3ce85b-config-data\") pod \"cinder-db-sync-blmnq\" (UID: \"4c9f9871-91f4-4670-a6af-0c493f3ce85b\") " pod="openstack-kuttl-tests/cinder-db-sync-blmnq" Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.318652 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hgvtf\" (UniqueName: \"kubernetes.io/projected/e5918aaf-a79a-4f8e-af24-0a538b61af22-kube-api-access-hgvtf\") pod \"ceilometer-0\" (UID: \"e5918aaf-a79a-4f8e-af24-0a538b61af22\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.318674 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e5918aaf-a79a-4f8e-af24-0a538b61af22-scripts\") pod \"ceilometer-0\" (UID: \"e5918aaf-a79a-4f8e-af24-0a538b61af22\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.318704 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e5918aaf-a79a-4f8e-af24-0a538b61af22-run-httpd\") pod \"ceilometer-0\" (UID: \"e5918aaf-a79a-4f8e-af24-0a538b61af22\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.318724 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/8ea04114-b82b-4609-8f7f-a9ff17cb3b57-config\") pod \"neutron-db-sync-fs4x6\" (UID: \"8ea04114-b82b-4609-8f7f-a9ff17cb3b57\") " pod="openstack-kuttl-tests/neutron-db-sync-fs4x6" Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.318744 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ea04114-b82b-4609-8f7f-a9ff17cb3b57-combined-ca-bundle\") pod \"neutron-db-sync-fs4x6\" (UID: \"8ea04114-b82b-4609-8f7f-a9ff17cb3b57\") " pod="openstack-kuttl-tests/neutron-db-sync-fs4x6" Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.318773 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4fb0c521-d465-47b9-b859-199f53143dca-combined-ca-bundle\") pod \"barbican-db-sync-5rtqc\" (UID: \"4fb0c521-d465-47b9-b859-199f53143dca\") " pod="openstack-kuttl-tests/barbican-db-sync-5rtqc" Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.325875 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e5918aaf-a79a-4f8e-af24-0a538b61af22-log-httpd\") pod \"ceilometer-0\" (UID: \"e5918aaf-a79a-4f8e-af24-0a538b61af22\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.326012 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e5918aaf-a79a-4f8e-af24-0a538b61af22-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"e5918aaf-a79a-4f8e-af24-0a538b61af22\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.326559 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/4fb0c521-d465-47b9-b859-199f53143dca-db-sync-config-data\") pod \"barbican-db-sync-5rtqc\" (UID: \"4fb0c521-d465-47b9-b859-199f53143dca\") " pod="openstack-kuttl-tests/barbican-db-sync-5rtqc" Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.326618 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e5918aaf-a79a-4f8e-af24-0a538b61af22-run-httpd\") pod \"ceilometer-0\" (UID: \"e5918aaf-a79a-4f8e-af24-0a538b61af22\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.333217 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e5918aaf-a79a-4f8e-af24-0a538b61af22-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"e5918aaf-a79a-4f8e-af24-0a538b61af22\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.338101 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e5918aaf-a79a-4f8e-af24-0a538b61af22-config-data\") pod \"ceilometer-0\" (UID: \"e5918aaf-a79a-4f8e-af24-0a538b61af22\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.346104 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e5918aaf-a79a-4f8e-af24-0a538b61af22-scripts\") pod \"ceilometer-0\" (UID: \"e5918aaf-a79a-4f8e-af24-0a538b61af22\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.346905 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4fb0c521-d465-47b9-b859-199f53143dca-combined-ca-bundle\") pod \"barbican-db-sync-5rtqc\" (UID: \"4fb0c521-d465-47b9-b859-199f53143dca\") " pod="openstack-kuttl-tests/barbican-db-sync-5rtqc" Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.351782 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bhc2r\" (UniqueName: \"kubernetes.io/projected/4fb0c521-d465-47b9-b859-199f53143dca-kube-api-access-bhc2r\") pod \"barbican-db-sync-5rtqc\" (UID: \"4fb0c521-d465-47b9-b859-199f53143dca\") " pod="openstack-kuttl-tests/barbican-db-sync-5rtqc" Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.354241 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hgvtf\" (UniqueName: \"kubernetes.io/projected/e5918aaf-a79a-4f8e-af24-0a538b61af22-kube-api-access-hgvtf\") pod \"ceilometer-0\" (UID: \"e5918aaf-a79a-4f8e-af24-0a538b61af22\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.421108 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7e152238-833a-4e1c-b854-0584daae3826-logs\") pod \"placement-db-sync-zgncw\" (UID: \"7e152238-833a-4e1c-b854-0584daae3826\") " pod="openstack-kuttl-tests/placement-db-sync-zgncw" Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.421149 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4c9f9871-91f4-4670-a6af-0c493f3ce85b-config-data\") pod \"cinder-db-sync-blmnq\" (UID: \"4c9f9871-91f4-4670-a6af-0c493f3ce85b\") " pod="openstack-kuttl-tests/cinder-db-sync-blmnq" Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.421231 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/8ea04114-b82b-4609-8f7f-a9ff17cb3b57-config\") pod \"neutron-db-sync-fs4x6\" (UID: \"8ea04114-b82b-4609-8f7f-a9ff17cb3b57\") " pod="openstack-kuttl-tests/neutron-db-sync-fs4x6" Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.421248 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ea04114-b82b-4609-8f7f-a9ff17cb3b57-combined-ca-bundle\") pod \"neutron-db-sync-fs4x6\" (UID: \"8ea04114-b82b-4609-8f7f-a9ff17cb3b57\") " pod="openstack-kuttl-tests/neutron-db-sync-fs4x6" Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.421288 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/4c9f9871-91f4-4670-a6af-0c493f3ce85b-etc-machine-id\") pod \"cinder-db-sync-blmnq\" (UID: \"4c9f9871-91f4-4670-a6af-0c493f3ce85b\") " pod="openstack-kuttl-tests/cinder-db-sync-blmnq" Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.421337 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e152238-833a-4e1c-b854-0584daae3826-combined-ca-bundle\") pod \"placement-db-sync-zgncw\" (UID: \"7e152238-833a-4e1c-b854-0584daae3826\") " pod="openstack-kuttl-tests/placement-db-sync-zgncw" Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.421360 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/4c9f9871-91f4-4670-a6af-0c493f3ce85b-db-sync-config-data\") pod \"cinder-db-sync-blmnq\" (UID: \"4c9f9871-91f4-4670-a6af-0c493f3ce85b\") " pod="openstack-kuttl-tests/cinder-db-sync-blmnq" Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.421387 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4c9f9871-91f4-4670-a6af-0c493f3ce85b-scripts\") pod \"cinder-db-sync-blmnq\" (UID: \"4c9f9871-91f4-4670-a6af-0c493f3ce85b\") " pod="openstack-kuttl-tests/cinder-db-sync-blmnq" Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.421411 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sbvgv\" (UniqueName: \"kubernetes.io/projected/4c9f9871-91f4-4670-a6af-0c493f3ce85b-kube-api-access-sbvgv\") pod \"cinder-db-sync-blmnq\" (UID: \"4c9f9871-91f4-4670-a6af-0c493f3ce85b\") " pod="openstack-kuttl-tests/cinder-db-sync-blmnq" Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.421443 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7e152238-833a-4e1c-b854-0584daae3826-config-data\") pod \"placement-db-sync-zgncw\" (UID: \"7e152238-833a-4e1c-b854-0584daae3826\") " pod="openstack-kuttl-tests/placement-db-sync-zgncw" Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.421514 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7e152238-833a-4e1c-b854-0584daae3826-logs\") pod \"placement-db-sync-zgncw\" (UID: \"7e152238-833a-4e1c-b854-0584daae3826\") " pod="openstack-kuttl-tests/placement-db-sync-zgncw" Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.421528 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c9f9871-91f4-4670-a6af-0c493f3ce85b-combined-ca-bundle\") pod \"cinder-db-sync-blmnq\" (UID: \"4c9f9871-91f4-4670-a6af-0c493f3ce85b\") " pod="openstack-kuttl-tests/cinder-db-sync-blmnq" Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.421585 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mvlqj\" (UniqueName: \"kubernetes.io/projected/8ea04114-b82b-4609-8f7f-a9ff17cb3b57-kube-api-access-mvlqj\") pod \"neutron-db-sync-fs4x6\" (UID: \"8ea04114-b82b-4609-8f7f-a9ff17cb3b57\") " pod="openstack-kuttl-tests/neutron-db-sync-fs4x6" Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.421639 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mkst2\" (UniqueName: \"kubernetes.io/projected/7e152238-833a-4e1c-b854-0584daae3826-kube-api-access-mkst2\") pod \"placement-db-sync-zgncw\" (UID: \"7e152238-833a-4e1c-b854-0584daae3826\") " pod="openstack-kuttl-tests/placement-db-sync-zgncw" Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.421673 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7e152238-833a-4e1c-b854-0584daae3826-scripts\") pod \"placement-db-sync-zgncw\" (UID: \"7e152238-833a-4e1c-b854-0584daae3826\") " pod="openstack-kuttl-tests/placement-db-sync-zgncw" Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.424666 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7e152238-833a-4e1c-b854-0584daae3826-scripts\") pod \"placement-db-sync-zgncw\" (UID: \"7e152238-833a-4e1c-b854-0584daae3826\") " pod="openstack-kuttl-tests/placement-db-sync-zgncw" Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.424971 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c9f9871-91f4-4670-a6af-0c493f3ce85b-combined-ca-bundle\") pod \"cinder-db-sync-blmnq\" (UID: \"4c9f9871-91f4-4670-a6af-0c493f3ce85b\") " pod="openstack-kuttl-tests/cinder-db-sync-blmnq" Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.436874 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/4c9f9871-91f4-4670-a6af-0c493f3ce85b-etc-machine-id\") pod \"cinder-db-sync-blmnq\" (UID: \"4c9f9871-91f4-4670-a6af-0c493f3ce85b\") " pod="openstack-kuttl-tests/cinder-db-sync-blmnq" Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.437838 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.437871 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e152238-833a-4e1c-b854-0584daae3826-combined-ca-bundle\") pod \"placement-db-sync-zgncw\" (UID: \"7e152238-833a-4e1c-b854-0584daae3826\") " pod="openstack-kuttl-tests/placement-db-sync-zgncw" Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.440009 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7e152238-833a-4e1c-b854-0584daae3826-config-data\") pod \"placement-db-sync-zgncw\" (UID: \"7e152238-833a-4e1c-b854-0584daae3826\") " pod="openstack-kuttl-tests/placement-db-sync-zgncw" Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.443199 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mkst2\" (UniqueName: \"kubernetes.io/projected/7e152238-833a-4e1c-b854-0584daae3826-kube-api-access-mkst2\") pod \"placement-db-sync-zgncw\" (UID: \"7e152238-833a-4e1c-b854-0584daae3826\") " pod="openstack-kuttl-tests/placement-db-sync-zgncw" Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.444615 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4c9f9871-91f4-4670-a6af-0c493f3ce85b-scripts\") pod \"cinder-db-sync-blmnq\" (UID: \"4c9f9871-91f4-4670-a6af-0c493f3ce85b\") " pod="openstack-kuttl-tests/cinder-db-sync-blmnq" Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.445453 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4c9f9871-91f4-4670-a6af-0c493f3ce85b-config-data\") pod \"cinder-db-sync-blmnq\" (UID: \"4c9f9871-91f4-4670-a6af-0c493f3ce85b\") " pod="openstack-kuttl-tests/cinder-db-sync-blmnq" Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.445902 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mvlqj\" (UniqueName: \"kubernetes.io/projected/8ea04114-b82b-4609-8f7f-a9ff17cb3b57-kube-api-access-mvlqj\") pod \"neutron-db-sync-fs4x6\" (UID: \"8ea04114-b82b-4609-8f7f-a9ff17cb3b57\") " pod="openstack-kuttl-tests/neutron-db-sync-fs4x6" Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.446857 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sbvgv\" (UniqueName: \"kubernetes.io/projected/4c9f9871-91f4-4670-a6af-0c493f3ce85b-kube-api-access-sbvgv\") pod \"cinder-db-sync-blmnq\" (UID: \"4c9f9871-91f4-4670-a6af-0c493f3ce85b\") " pod="openstack-kuttl-tests/cinder-db-sync-blmnq" Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.447312 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/4c9f9871-91f4-4670-a6af-0c493f3ce85b-db-sync-config-data\") pod \"cinder-db-sync-blmnq\" (UID: \"4c9f9871-91f4-4670-a6af-0c493f3ce85b\") " pod="openstack-kuttl-tests/cinder-db-sync-blmnq" Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.451514 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-db-sync-5rtqc" Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.451774 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/8ea04114-b82b-4609-8f7f-a9ff17cb3b57-config\") pod \"neutron-db-sync-fs4x6\" (UID: \"8ea04114-b82b-4609-8f7f-a9ff17cb3b57\") " pod="openstack-kuttl-tests/neutron-db-sync-fs4x6" Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.456589 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ea04114-b82b-4609-8f7f-a9ff17cb3b57-combined-ca-bundle\") pod \"neutron-db-sync-fs4x6\" (UID: \"8ea04114-b82b-4609-8f7f-a9ff17cb3b57\") " pod="openstack-kuttl-tests/neutron-db-sync-fs4x6" Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.562694 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-db-sync-blmnq" Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.572379 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-db-sync-fs4x6" Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.580788 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-db-sync-zgncw" Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.706444 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-bootstrap-rmnb4"] Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.861672 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:07:41 crc kubenswrapper[4558]: W0120 17:07:41.877501 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode5918aaf_a79a_4f8e_af24_0a538b61af22.slice/crio-45f60300a94bb431bf81c5b13486a19da286eb30558820f3b3af26729c13c5de WatchSource:0}: Error finding container 45f60300a94bb431bf81c5b13486a19da286eb30558820f3b3af26729c13c5de: Status 404 returned error can't find the container with id 45f60300a94bb431bf81c5b13486a19da286eb30558820f3b3af26729c13c5de Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.968957 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.970233 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.973916 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-glance-default-public-svc" Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.973943 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-default-external-config-data" Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.974062 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-glance-dockercfg-9qc5r" Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.974253 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-scripts" Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.976442 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:07:41 crc kubenswrapper[4558]: W0120 17:07:41.979781 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4fb0c521_d465_47b9_b859_199f53143dca.slice/crio-720cf8a66b7cad8587f8ef26f04899153239b9d5f060876b68844c1340f9e7ba WatchSource:0}: Error finding container 720cf8a66b7cad8587f8ef26f04899153239b9d5f060876b68844c1340f9e7ba: Status 404 returned error can't find the container with id 720cf8a66b7cad8587f8ef26f04899153239b9d5f060876b68844c1340f9e7ba Jan 20 17:07:41 crc kubenswrapper[4558]: I0120 17:07:41.982282 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-db-sync-5rtqc"] Jan 20 17:07:42 crc kubenswrapper[4558]: I0120 17:07:42.033367 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8b97c4d4-ebbc-44e8-bd58-ae0fffc623cb-scripts\") pod \"glance-default-external-api-0\" (UID: \"8b97c4d4-ebbc-44e8-bd58-ae0fffc623cb\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:07:42 crc kubenswrapper[4558]: I0120 17:07:42.033434 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-external-api-0\" (UID: \"8b97c4d4-ebbc-44e8-bd58-ae0fffc623cb\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:07:42 crc kubenswrapper[4558]: I0120 17:07:42.033486 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8b97c4d4-ebbc-44e8-bd58-ae0fffc623cb-logs\") pod \"glance-default-external-api-0\" (UID: \"8b97c4d4-ebbc-44e8-bd58-ae0fffc623cb\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:07:42 crc kubenswrapper[4558]: I0120 17:07:42.033525 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qwh45\" (UniqueName: \"kubernetes.io/projected/8b97c4d4-ebbc-44e8-bd58-ae0fffc623cb-kube-api-access-qwh45\") pod \"glance-default-external-api-0\" (UID: \"8b97c4d4-ebbc-44e8-bd58-ae0fffc623cb\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:07:42 crc kubenswrapper[4558]: I0120 17:07:42.033558 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8b97c4d4-ebbc-44e8-bd58-ae0fffc623cb-config-data\") pod \"glance-default-external-api-0\" (UID: \"8b97c4d4-ebbc-44e8-bd58-ae0fffc623cb\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:07:42 crc kubenswrapper[4558]: I0120 17:07:42.033599 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8b97c4d4-ebbc-44e8-bd58-ae0fffc623cb-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"8b97c4d4-ebbc-44e8-bd58-ae0fffc623cb\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:07:42 crc kubenswrapper[4558]: I0120 17:07:42.033642 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8b97c4d4-ebbc-44e8-bd58-ae0fffc623cb-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"8b97c4d4-ebbc-44e8-bd58-ae0fffc623cb\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:07:42 crc kubenswrapper[4558]: I0120 17:07:42.033700 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8b97c4d4-ebbc-44e8-bd58-ae0fffc623cb-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"8b97c4d4-ebbc-44e8-bd58-ae0fffc623cb\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:07:42 crc kubenswrapper[4558]: I0120 17:07:42.058471 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-db-sync-blmnq"] Jan 20 17:07:42 crc kubenswrapper[4558]: I0120 17:07:42.071507 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-db-sync-fs4x6"] Jan 20 17:07:42 crc kubenswrapper[4558]: I0120 17:07:42.105214 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:07:42 crc kubenswrapper[4558]: I0120 17:07:42.106829 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:07:42 crc kubenswrapper[4558]: I0120 17:07:42.108663 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-default-internal-config-data" Jan 20 17:07:42 crc kubenswrapper[4558]: I0120 17:07:42.108837 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-glance-default-internal-svc" Jan 20 17:07:42 crc kubenswrapper[4558]: I0120 17:07:42.123527 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:07:42 crc kubenswrapper[4558]: I0120 17:07:42.141386 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8b97c4d4-ebbc-44e8-bd58-ae0fffc623cb-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"8b97c4d4-ebbc-44e8-bd58-ae0fffc623cb\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:07:42 crc kubenswrapper[4558]: I0120 17:07:42.141478 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8b97c4d4-ebbc-44e8-bd58-ae0fffc623cb-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"8b97c4d4-ebbc-44e8-bd58-ae0fffc623cb\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:07:42 crc kubenswrapper[4558]: I0120 17:07:42.141529 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8b97c4d4-ebbc-44e8-bd58-ae0fffc623cb-scripts\") pod \"glance-default-external-api-0\" (UID: \"8b97c4d4-ebbc-44e8-bd58-ae0fffc623cb\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:07:42 crc kubenswrapper[4558]: I0120 17:07:42.141577 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-external-api-0\" (UID: \"8b97c4d4-ebbc-44e8-bd58-ae0fffc623cb\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:07:42 crc kubenswrapper[4558]: I0120 17:07:42.141634 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8b97c4d4-ebbc-44e8-bd58-ae0fffc623cb-logs\") pod \"glance-default-external-api-0\" (UID: \"8b97c4d4-ebbc-44e8-bd58-ae0fffc623cb\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:07:42 crc kubenswrapper[4558]: I0120 17:07:42.141681 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qwh45\" (UniqueName: \"kubernetes.io/projected/8b97c4d4-ebbc-44e8-bd58-ae0fffc623cb-kube-api-access-qwh45\") pod \"glance-default-external-api-0\" (UID: \"8b97c4d4-ebbc-44e8-bd58-ae0fffc623cb\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:07:42 crc kubenswrapper[4558]: I0120 17:07:42.141711 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8b97c4d4-ebbc-44e8-bd58-ae0fffc623cb-config-data\") pod \"glance-default-external-api-0\" (UID: \"8b97c4d4-ebbc-44e8-bd58-ae0fffc623cb\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:07:42 crc kubenswrapper[4558]: I0120 17:07:42.141759 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8b97c4d4-ebbc-44e8-bd58-ae0fffc623cb-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"8b97c4d4-ebbc-44e8-bd58-ae0fffc623cb\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:07:42 crc kubenswrapper[4558]: I0120 17:07:42.141776 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8b97c4d4-ebbc-44e8-bd58-ae0fffc623cb-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"8b97c4d4-ebbc-44e8-bd58-ae0fffc623cb\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:07:42 crc kubenswrapper[4558]: I0120 17:07:42.142005 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-external-api-0\" (UID: \"8b97c4d4-ebbc-44e8-bd58-ae0fffc623cb\") device mount path \"/mnt/openstack/pv03\"" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:07:42 crc kubenswrapper[4558]: I0120 17:07:42.142241 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8b97c4d4-ebbc-44e8-bd58-ae0fffc623cb-logs\") pod \"glance-default-external-api-0\" (UID: \"8b97c4d4-ebbc-44e8-bd58-ae0fffc623cb\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:07:42 crc kubenswrapper[4558]: I0120 17:07:42.155107 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8b97c4d4-ebbc-44e8-bd58-ae0fffc623cb-config-data\") pod \"glance-default-external-api-0\" (UID: \"8b97c4d4-ebbc-44e8-bd58-ae0fffc623cb\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:07:42 crc kubenswrapper[4558]: I0120 17:07:42.156469 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8b97c4d4-ebbc-44e8-bd58-ae0fffc623cb-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"8b97c4d4-ebbc-44e8-bd58-ae0fffc623cb\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:07:42 crc kubenswrapper[4558]: I0120 17:07:42.160930 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-db-sync-zgncw"] Jan 20 17:07:42 crc kubenswrapper[4558]: I0120 17:07:42.162451 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8b97c4d4-ebbc-44e8-bd58-ae0fffc623cb-scripts\") pod \"glance-default-external-api-0\" (UID: \"8b97c4d4-ebbc-44e8-bd58-ae0fffc623cb\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:07:42 crc kubenswrapper[4558]: I0120 17:07:42.184981 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8b97c4d4-ebbc-44e8-bd58-ae0fffc623cb-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"8b97c4d4-ebbc-44e8-bd58-ae0fffc623cb\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:07:42 crc kubenswrapper[4558]: I0120 17:07:42.189737 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qwh45\" (UniqueName: \"kubernetes.io/projected/8b97c4d4-ebbc-44e8-bd58-ae0fffc623cb-kube-api-access-qwh45\") pod \"glance-default-external-api-0\" (UID: \"8b97c4d4-ebbc-44e8-bd58-ae0fffc623cb\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:07:42 crc kubenswrapper[4558]: I0120 17:07:42.228405 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-external-api-0\" (UID: \"8b97c4d4-ebbc-44e8-bd58-ae0fffc623cb\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:07:42 crc kubenswrapper[4558]: I0120 17:07:42.243380 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8fd459ca-89a8-4eff-a178-a1cbef557923-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"8fd459ca-89a8-4eff-a178-a1cbef557923\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:07:42 crc kubenswrapper[4558]: I0120 17:07:42.243418 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8fd459ca-89a8-4eff-a178-a1cbef557923-config-data\") pod \"glance-default-internal-api-0\" (UID: \"8fd459ca-89a8-4eff-a178-a1cbef557923\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:07:42 crc kubenswrapper[4558]: I0120 17:07:42.243445 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hpwxc\" (UniqueName: \"kubernetes.io/projected/8fd459ca-89a8-4eff-a178-a1cbef557923-kube-api-access-hpwxc\") pod \"glance-default-internal-api-0\" (UID: \"8fd459ca-89a8-4eff-a178-a1cbef557923\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:07:42 crc kubenswrapper[4558]: I0120 17:07:42.243469 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8fd459ca-89a8-4eff-a178-a1cbef557923-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"8fd459ca-89a8-4eff-a178-a1cbef557923\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:07:42 crc kubenswrapper[4558]: I0120 17:07:42.243485 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-internal-api-0\" (UID: \"8fd459ca-89a8-4eff-a178-a1cbef557923\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:07:42 crc kubenswrapper[4558]: I0120 17:07:42.243572 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8fd459ca-89a8-4eff-a178-a1cbef557923-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"8fd459ca-89a8-4eff-a178-a1cbef557923\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:07:42 crc kubenswrapper[4558]: I0120 17:07:42.243590 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8fd459ca-89a8-4eff-a178-a1cbef557923-logs\") pod \"glance-default-internal-api-0\" (UID: \"8fd459ca-89a8-4eff-a178-a1cbef557923\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:07:42 crc kubenswrapper[4558]: I0120 17:07:42.243608 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8fd459ca-89a8-4eff-a178-a1cbef557923-scripts\") pod \"glance-default-internal-api-0\" (UID: \"8fd459ca-89a8-4eff-a178-a1cbef557923\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:07:42 crc kubenswrapper[4558]: I0120 17:07:42.307139 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:07:42 crc kubenswrapper[4558]: I0120 17:07:42.345239 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8fd459ca-89a8-4eff-a178-a1cbef557923-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"8fd459ca-89a8-4eff-a178-a1cbef557923\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:07:42 crc kubenswrapper[4558]: I0120 17:07:42.346052 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8fd459ca-89a8-4eff-a178-a1cbef557923-logs\") pod \"glance-default-internal-api-0\" (UID: \"8fd459ca-89a8-4eff-a178-a1cbef557923\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:07:42 crc kubenswrapper[4558]: I0120 17:07:42.345301 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8fd459ca-89a8-4eff-a178-a1cbef557923-logs\") pod \"glance-default-internal-api-0\" (UID: \"8fd459ca-89a8-4eff-a178-a1cbef557923\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:07:42 crc kubenswrapper[4558]: I0120 17:07:42.346205 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8fd459ca-89a8-4eff-a178-a1cbef557923-scripts\") pod \"glance-default-internal-api-0\" (UID: \"8fd459ca-89a8-4eff-a178-a1cbef557923\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:07:42 crc kubenswrapper[4558]: I0120 17:07:42.346263 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8fd459ca-89a8-4eff-a178-a1cbef557923-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"8fd459ca-89a8-4eff-a178-a1cbef557923\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:07:42 crc kubenswrapper[4558]: I0120 17:07:42.346679 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8fd459ca-89a8-4eff-a178-a1cbef557923-config-data\") pod \"glance-default-internal-api-0\" (UID: \"8fd459ca-89a8-4eff-a178-a1cbef557923\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:07:42 crc kubenswrapper[4558]: I0120 17:07:42.346769 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hpwxc\" (UniqueName: \"kubernetes.io/projected/8fd459ca-89a8-4eff-a178-a1cbef557923-kube-api-access-hpwxc\") pod \"glance-default-internal-api-0\" (UID: \"8fd459ca-89a8-4eff-a178-a1cbef557923\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:07:42 crc kubenswrapper[4558]: I0120 17:07:42.346824 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8fd459ca-89a8-4eff-a178-a1cbef557923-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"8fd459ca-89a8-4eff-a178-a1cbef557923\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:07:42 crc kubenswrapper[4558]: I0120 17:07:42.346843 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-internal-api-0\" (UID: \"8fd459ca-89a8-4eff-a178-a1cbef557923\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:07:42 crc kubenswrapper[4558]: I0120 17:07:42.347015 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-internal-api-0\" (UID: \"8fd459ca-89a8-4eff-a178-a1cbef557923\") device mount path \"/mnt/openstack/pv06\"" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:07:42 crc kubenswrapper[4558]: I0120 17:07:42.347205 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8fd459ca-89a8-4eff-a178-a1cbef557923-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"8fd459ca-89a8-4eff-a178-a1cbef557923\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:07:42 crc kubenswrapper[4558]: I0120 17:07:42.349725 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8fd459ca-89a8-4eff-a178-a1cbef557923-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"8fd459ca-89a8-4eff-a178-a1cbef557923\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:07:42 crc kubenswrapper[4558]: I0120 17:07:42.351123 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8fd459ca-89a8-4eff-a178-a1cbef557923-config-data\") pod \"glance-default-internal-api-0\" (UID: \"8fd459ca-89a8-4eff-a178-a1cbef557923\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:07:42 crc kubenswrapper[4558]: I0120 17:07:42.353955 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8fd459ca-89a8-4eff-a178-a1cbef557923-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"8fd459ca-89a8-4eff-a178-a1cbef557923\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:07:42 crc kubenswrapper[4558]: I0120 17:07:42.356812 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8fd459ca-89a8-4eff-a178-a1cbef557923-scripts\") pod \"glance-default-internal-api-0\" (UID: \"8fd459ca-89a8-4eff-a178-a1cbef557923\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:07:42 crc kubenswrapper[4558]: I0120 17:07:42.365564 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hpwxc\" (UniqueName: \"kubernetes.io/projected/8fd459ca-89a8-4eff-a178-a1cbef557923-kube-api-access-hpwxc\") pod \"glance-default-internal-api-0\" (UID: \"8fd459ca-89a8-4eff-a178-a1cbef557923\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:07:42 crc kubenswrapper[4558]: I0120 17:07:42.373722 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-internal-api-0\" (UID: \"8fd459ca-89a8-4eff-a178-a1cbef557923\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:07:42 crc kubenswrapper[4558]: I0120 17:07:42.562201 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:07:42 crc kubenswrapper[4558]: I0120 17:07:42.722128 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-db-sync-5rtqc" event={"ID":"4fb0c521-d465-47b9-b859-199f53143dca","Type":"ContainerStarted","Data":"be8c70d94caaadd4e739ca16f688e2a0b03fc39521354a36341ffe6bf26f22c9"} Jan 20 17:07:42 crc kubenswrapper[4558]: I0120 17:07:42.722673 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-db-sync-5rtqc" event={"ID":"4fb0c521-d465-47b9-b859-199f53143dca","Type":"ContainerStarted","Data":"720cf8a66b7cad8587f8ef26f04899153239b9d5f060876b68844c1340f9e7ba"} Jan 20 17:07:42 crc kubenswrapper[4558]: I0120 17:07:42.740862 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-db-sync-fs4x6" event={"ID":"8ea04114-b82b-4609-8f7f-a9ff17cb3b57","Type":"ContainerStarted","Data":"0a6a7ebe77f730e7953cf0ae10557aacb2780f78536cbdeb6ab122242e514827"} Jan 20 17:07:42 crc kubenswrapper[4558]: I0120 17:07:42.740892 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-db-sync-fs4x6" event={"ID":"8ea04114-b82b-4609-8f7f-a9ff17cb3b57","Type":"ContainerStarted","Data":"fa6108a7bbddfa2dedb798ffc1a4fabbdba705651c03d73d73a34373cf69e639"} Jan 20 17:07:42 crc kubenswrapper[4558]: I0120 17:07:42.742312 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/barbican-db-sync-5rtqc" podStartSLOduration=1.742287889 podStartE2EDuration="1.742287889s" podCreationTimestamp="2026-01-20 17:07:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:07:42.741375735 +0000 UTC m=+1556.501713702" watchObservedRunningTime="2026-01-20 17:07:42.742287889 +0000 UTC m=+1556.502625856" Jan 20 17:07:42 crc kubenswrapper[4558]: I0120 17:07:42.745934 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-bootstrap-rmnb4" event={"ID":"c39fd5ad-a9fd-4240-a3f2-e7ad7c6bdee2","Type":"ContainerStarted","Data":"9a3c955d5b11e9518eab345cc1f4a24a125317c2b69b6e6b2070a5417c63906f"} Jan 20 17:07:42 crc kubenswrapper[4558]: I0120 17:07:42.745974 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-bootstrap-rmnb4" event={"ID":"c39fd5ad-a9fd-4240-a3f2-e7ad7c6bdee2","Type":"ContainerStarted","Data":"a0907ff1318b0dd034d73b6c958017896882282ad7a0ee88e1684cc4ee2dffb2"} Jan 20 17:07:42 crc kubenswrapper[4558]: I0120 17:07:42.753999 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-db-sync-blmnq" event={"ID":"4c9f9871-91f4-4670-a6af-0c493f3ce85b","Type":"ContainerStarted","Data":"793413453fcd2492f837a460150f33af8f30eafd397c6efbdc01ad525b8e1075"} Jan 20 17:07:42 crc kubenswrapper[4558]: I0120 17:07:42.760895 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/neutron-db-sync-fs4x6" podStartSLOduration=1.7608715350000002 podStartE2EDuration="1.760871535s" podCreationTimestamp="2026-01-20 17:07:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:07:42.754552102 +0000 UTC m=+1556.514890068" watchObservedRunningTime="2026-01-20 17:07:42.760871535 +0000 UTC m=+1556.521209502" Jan 20 17:07:42 crc kubenswrapper[4558]: I0120 17:07:42.761111 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-db-sync-zgncw" event={"ID":"7e152238-833a-4e1c-b854-0584daae3826","Type":"ContainerStarted","Data":"941423064fa6ba0069e6771da6e9512b3e450564237394653ec2888a52642246"} Jan 20 17:07:42 crc kubenswrapper[4558]: I0120 17:07:42.761151 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-db-sync-zgncw" event={"ID":"7e152238-833a-4e1c-b854-0584daae3826","Type":"ContainerStarted","Data":"836df95f864cda22f81a38f41a6b0725bffa233d05c349e006515ab663e1adf2"} Jan 20 17:07:42 crc kubenswrapper[4558]: I0120 17:07:42.769490 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"e5918aaf-a79a-4f8e-af24-0a538b61af22","Type":"ContainerStarted","Data":"45f60300a94bb431bf81c5b13486a19da286eb30558820f3b3af26729c13c5de"} Jan 20 17:07:42 crc kubenswrapper[4558]: I0120 17:07:42.777106 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/keystone-bootstrap-rmnb4" podStartSLOduration=2.77708864 podStartE2EDuration="2.77708864s" podCreationTimestamp="2026-01-20 17:07:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:07:42.769487106 +0000 UTC m=+1556.529825073" watchObservedRunningTime="2026-01-20 17:07:42.77708864 +0000 UTC m=+1556.537426606" Jan 20 17:07:42 crc kubenswrapper[4558]: I0120 17:07:42.813354 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:07:42 crc kubenswrapper[4558]: I0120 17:07:42.815150 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/placement-db-sync-zgncw" podStartSLOduration=1.815134011 podStartE2EDuration="1.815134011s" podCreationTimestamp="2026-01-20 17:07:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:07:42.790554932 +0000 UTC m=+1556.550892899" watchObservedRunningTime="2026-01-20 17:07:42.815134011 +0000 UTC m=+1556.575471978" Jan 20 17:07:43 crc kubenswrapper[4558]: I0120 17:07:43.094404 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:07:43 crc kubenswrapper[4558]: I0120 17:07:43.124217 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:07:43 crc kubenswrapper[4558]: I0120 17:07:43.142396 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:07:43 crc kubenswrapper[4558]: I0120 17:07:43.175994 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:07:43 crc kubenswrapper[4558]: I0120 17:07:43.804465 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"8fd459ca-89a8-4eff-a178-a1cbef557923","Type":"ContainerStarted","Data":"c9f3137378ead48e88edb9721edbb2710a35ce62cd986156bcee4e170b6e6ec8"} Jan 20 17:07:43 crc kubenswrapper[4558]: I0120 17:07:43.813421 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-db-sync-blmnq" event={"ID":"4c9f9871-91f4-4670-a6af-0c493f3ce85b","Type":"ContainerStarted","Data":"742e670ce01715ecca8fb6a5ea852d6067112215a2e930c09ce883d2269c14b7"} Jan 20 17:07:43 crc kubenswrapper[4558]: I0120 17:07:43.819563 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"e5918aaf-a79a-4f8e-af24-0a538b61af22","Type":"ContainerStarted","Data":"c6d6179083d1da62058e87e75412018bf0cc96f7046edc5c269f6ba9f837fdca"} Jan 20 17:07:43 crc kubenswrapper[4558]: I0120 17:07:43.826676 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"8b97c4d4-ebbc-44e8-bd58-ae0fffc623cb","Type":"ContainerStarted","Data":"4324e2fce825da0de70111b8f7453e7392462ec09dad91158fed69d7d155e907"} Jan 20 17:07:43 crc kubenswrapper[4558]: I0120 17:07:43.826706 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"8b97c4d4-ebbc-44e8-bd58-ae0fffc623cb","Type":"ContainerStarted","Data":"32c05056d4cedf6055a7a99ee957fec8b0f11131fe9ea7e59b4d6a0ab073fa09"} Jan 20 17:07:43 crc kubenswrapper[4558]: I0120 17:07:43.833349 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/cinder-db-sync-blmnq" podStartSLOduration=2.8333314019999998 podStartE2EDuration="2.833331402s" podCreationTimestamp="2026-01-20 17:07:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:07:43.827386142 +0000 UTC m=+1557.587724109" watchObservedRunningTime="2026-01-20 17:07:43.833331402 +0000 UTC m=+1557.593669369" Jan 20 17:07:44 crc kubenswrapper[4558]: I0120 17:07:44.836702 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"8fd459ca-89a8-4eff-a178-a1cbef557923","Type":"ContainerStarted","Data":"979257cef910a5ce95066d2eddf8f8e11618f4acca7c04677ad4df5af1471e15"} Jan 20 17:07:44 crc kubenswrapper[4558]: I0120 17:07:44.837242 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"8fd459ca-89a8-4eff-a178-a1cbef557923","Type":"ContainerStarted","Data":"8bd898136be8319cc7e5ac8ab8fc67c0edb9c34b838f062d015f6f9e322ab2ea"} Jan 20 17:07:44 crc kubenswrapper[4558]: I0120 17:07:44.837270 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-internal-api-0" podUID="8fd459ca-89a8-4eff-a178-a1cbef557923" containerName="glance-log" containerID="cri-o://8bd898136be8319cc7e5ac8ab8fc67c0edb9c34b838f062d015f6f9e322ab2ea" gracePeriod=30 Jan 20 17:07:44 crc kubenswrapper[4558]: I0120 17:07:44.837327 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-internal-api-0" podUID="8fd459ca-89a8-4eff-a178-a1cbef557923" containerName="glance-httpd" containerID="cri-o://979257cef910a5ce95066d2eddf8f8e11618f4acca7c04677ad4df5af1471e15" gracePeriod=30 Jan 20 17:07:44 crc kubenswrapper[4558]: I0120 17:07:44.839133 4558 generic.go:334] "Generic (PLEG): container finished" podID="7e152238-833a-4e1c-b854-0584daae3826" containerID="941423064fa6ba0069e6771da6e9512b3e450564237394653ec2888a52642246" exitCode=0 Jan 20 17:07:44 crc kubenswrapper[4558]: I0120 17:07:44.839210 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-db-sync-zgncw" event={"ID":"7e152238-833a-4e1c-b854-0584daae3826","Type":"ContainerDied","Data":"941423064fa6ba0069e6771da6e9512b3e450564237394653ec2888a52642246"} Jan 20 17:07:44 crc kubenswrapper[4558]: I0120 17:07:44.842802 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"e5918aaf-a79a-4f8e-af24-0a538b61af22","Type":"ContainerStarted","Data":"daf89e9a5edbcd4dc5626bac0a6afd5dd0377257a844e9b1cbee689ca493d073"} Jan 20 17:07:44 crc kubenswrapper[4558]: I0120 17:07:44.844759 4558 generic.go:334] "Generic (PLEG): container finished" podID="4fb0c521-d465-47b9-b859-199f53143dca" containerID="be8c70d94caaadd4e739ca16f688e2a0b03fc39521354a36341ffe6bf26f22c9" exitCode=0 Jan 20 17:07:44 crc kubenswrapper[4558]: I0120 17:07:44.844815 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-db-sync-5rtqc" event={"ID":"4fb0c521-d465-47b9-b859-199f53143dca","Type":"ContainerDied","Data":"be8c70d94caaadd4e739ca16f688e2a0b03fc39521354a36341ffe6bf26f22c9"} Jan 20 17:07:44 crc kubenswrapper[4558]: I0120 17:07:44.847509 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-external-api-0" podUID="8b97c4d4-ebbc-44e8-bd58-ae0fffc623cb" containerName="glance-log" containerID="cri-o://4324e2fce825da0de70111b8f7453e7392462ec09dad91158fed69d7d155e907" gracePeriod=30 Jan 20 17:07:44 crc kubenswrapper[4558]: I0120 17:07:44.847684 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"8b97c4d4-ebbc-44e8-bd58-ae0fffc623cb","Type":"ContainerStarted","Data":"126e2e87e98ce1c0e15495659b9271a8a4ebe4ab8abefe2bdb3e8c4d1bd96de3"} Jan 20 17:07:44 crc kubenswrapper[4558]: I0120 17:07:44.847741 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-external-api-0" podUID="8b97c4d4-ebbc-44e8-bd58-ae0fffc623cb" containerName="glance-httpd" containerID="cri-o://126e2e87e98ce1c0e15495659b9271a8a4ebe4ab8abefe2bdb3e8c4d1bd96de3" gracePeriod=30 Jan 20 17:07:44 crc kubenswrapper[4558]: I0120 17:07:44.881236 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/glance-default-external-api-0" podStartSLOduration=4.8812249130000005 podStartE2EDuration="4.881224913s" podCreationTimestamp="2026-01-20 17:07:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:07:44.880227328 +0000 UTC m=+1558.640565294" watchObservedRunningTime="2026-01-20 17:07:44.881224913 +0000 UTC m=+1558.641562880" Jan 20 17:07:44 crc kubenswrapper[4558]: I0120 17:07:44.884832 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/glance-default-internal-api-0" podStartSLOduration=3.884824552 podStartE2EDuration="3.884824552s" podCreationTimestamp="2026-01-20 17:07:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:07:44.861293003 +0000 UTC m=+1558.621630970" watchObservedRunningTime="2026-01-20 17:07:44.884824552 +0000 UTC m=+1558.645162519" Jan 20 17:07:45 crc kubenswrapper[4558]: I0120 17:07:45.318270 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:07:45 crc kubenswrapper[4558]: I0120 17:07:45.383691 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:07:45 crc kubenswrapper[4558]: I0120 17:07:45.450984 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8fd459ca-89a8-4eff-a178-a1cbef557923-scripts\") pod \"8fd459ca-89a8-4eff-a178-a1cbef557923\" (UID: \"8fd459ca-89a8-4eff-a178-a1cbef557923\") " Jan 20 17:07:45 crc kubenswrapper[4558]: I0120 17:07:45.451085 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8fd459ca-89a8-4eff-a178-a1cbef557923-combined-ca-bundle\") pod \"8fd459ca-89a8-4eff-a178-a1cbef557923\" (UID: \"8fd459ca-89a8-4eff-a178-a1cbef557923\") " Jan 20 17:07:45 crc kubenswrapper[4558]: I0120 17:07:45.451117 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8b97c4d4-ebbc-44e8-bd58-ae0fffc623cb-combined-ca-bundle\") pod \"8b97c4d4-ebbc-44e8-bd58-ae0fffc623cb\" (UID: \"8b97c4d4-ebbc-44e8-bd58-ae0fffc623cb\") " Jan 20 17:07:45 crc kubenswrapper[4558]: I0120 17:07:45.451156 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8fd459ca-89a8-4eff-a178-a1cbef557923-logs\") pod \"8fd459ca-89a8-4eff-a178-a1cbef557923\" (UID: \"8fd459ca-89a8-4eff-a178-a1cbef557923\") " Jan 20 17:07:45 crc kubenswrapper[4558]: I0120 17:07:45.451201 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8fd459ca-89a8-4eff-a178-a1cbef557923-internal-tls-certs\") pod \"8fd459ca-89a8-4eff-a178-a1cbef557923\" (UID: \"8fd459ca-89a8-4eff-a178-a1cbef557923\") " Jan 20 17:07:45 crc kubenswrapper[4558]: I0120 17:07:45.451271 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8b97c4d4-ebbc-44e8-bd58-ae0fffc623cb-httpd-run\") pod \"8b97c4d4-ebbc-44e8-bd58-ae0fffc623cb\" (UID: \"8b97c4d4-ebbc-44e8-bd58-ae0fffc623cb\") " Jan 20 17:07:45 crc kubenswrapper[4558]: I0120 17:07:45.451332 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8b97c4d4-ebbc-44e8-bd58-ae0fffc623cb-public-tls-certs\") pod \"8b97c4d4-ebbc-44e8-bd58-ae0fffc623cb\" (UID: \"8b97c4d4-ebbc-44e8-bd58-ae0fffc623cb\") " Jan 20 17:07:45 crc kubenswrapper[4558]: I0120 17:07:45.451354 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8fd459ca-89a8-4eff-a178-a1cbef557923-httpd-run\") pod \"8fd459ca-89a8-4eff-a178-a1cbef557923\" (UID: \"8fd459ca-89a8-4eff-a178-a1cbef557923\") " Jan 20 17:07:45 crc kubenswrapper[4558]: I0120 17:07:45.451375 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"8b97c4d4-ebbc-44e8-bd58-ae0fffc623cb\" (UID: \"8b97c4d4-ebbc-44e8-bd58-ae0fffc623cb\") " Jan 20 17:07:45 crc kubenswrapper[4558]: I0120 17:07:45.451392 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8fd459ca-89a8-4eff-a178-a1cbef557923-config-data\") pod \"8fd459ca-89a8-4eff-a178-a1cbef557923\" (UID: \"8fd459ca-89a8-4eff-a178-a1cbef557923\") " Jan 20 17:07:45 crc kubenswrapper[4558]: I0120 17:07:45.451415 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8b97c4d4-ebbc-44e8-bd58-ae0fffc623cb-logs\") pod \"8b97c4d4-ebbc-44e8-bd58-ae0fffc623cb\" (UID: \"8b97c4d4-ebbc-44e8-bd58-ae0fffc623cb\") " Jan 20 17:07:45 crc kubenswrapper[4558]: I0120 17:07:45.451481 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8b97c4d4-ebbc-44e8-bd58-ae0fffc623cb-scripts\") pod \"8b97c4d4-ebbc-44e8-bd58-ae0fffc623cb\" (UID: \"8b97c4d4-ebbc-44e8-bd58-ae0fffc623cb\") " Jan 20 17:07:45 crc kubenswrapper[4558]: I0120 17:07:45.451496 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8b97c4d4-ebbc-44e8-bd58-ae0fffc623cb-config-data\") pod \"8b97c4d4-ebbc-44e8-bd58-ae0fffc623cb\" (UID: \"8b97c4d4-ebbc-44e8-bd58-ae0fffc623cb\") " Jan 20 17:07:45 crc kubenswrapper[4558]: I0120 17:07:45.451532 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hpwxc\" (UniqueName: \"kubernetes.io/projected/8fd459ca-89a8-4eff-a178-a1cbef557923-kube-api-access-hpwxc\") pod \"8fd459ca-89a8-4eff-a178-a1cbef557923\" (UID: \"8fd459ca-89a8-4eff-a178-a1cbef557923\") " Jan 20 17:07:45 crc kubenswrapper[4558]: I0120 17:07:45.451548 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"8fd459ca-89a8-4eff-a178-a1cbef557923\" (UID: \"8fd459ca-89a8-4eff-a178-a1cbef557923\") " Jan 20 17:07:45 crc kubenswrapper[4558]: I0120 17:07:45.451567 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qwh45\" (UniqueName: \"kubernetes.io/projected/8b97c4d4-ebbc-44e8-bd58-ae0fffc623cb-kube-api-access-qwh45\") pod \"8b97c4d4-ebbc-44e8-bd58-ae0fffc623cb\" (UID: \"8b97c4d4-ebbc-44e8-bd58-ae0fffc623cb\") " Jan 20 17:07:45 crc kubenswrapper[4558]: I0120 17:07:45.451761 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8fd459ca-89a8-4eff-a178-a1cbef557923-logs" (OuterVolumeSpecName: "logs") pod "8fd459ca-89a8-4eff-a178-a1cbef557923" (UID: "8fd459ca-89a8-4eff-a178-a1cbef557923"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:07:45 crc kubenswrapper[4558]: I0120 17:07:45.451941 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8fd459ca-89a8-4eff-a178-a1cbef557923-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:07:45 crc kubenswrapper[4558]: I0120 17:07:45.452290 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8b97c4d4-ebbc-44e8-bd58-ae0fffc623cb-logs" (OuterVolumeSpecName: "logs") pod "8b97c4d4-ebbc-44e8-bd58-ae0fffc623cb" (UID: "8b97c4d4-ebbc-44e8-bd58-ae0fffc623cb"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:07:45 crc kubenswrapper[4558]: I0120 17:07:45.452689 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8fd459ca-89a8-4eff-a178-a1cbef557923-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "8fd459ca-89a8-4eff-a178-a1cbef557923" (UID: "8fd459ca-89a8-4eff-a178-a1cbef557923"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:07:45 crc kubenswrapper[4558]: I0120 17:07:45.454068 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8b97c4d4-ebbc-44e8-bd58-ae0fffc623cb-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "8b97c4d4-ebbc-44e8-bd58-ae0fffc623cb" (UID: "8b97c4d4-ebbc-44e8-bd58-ae0fffc623cb"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:07:45 crc kubenswrapper[4558]: I0120 17:07:45.456556 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8fd459ca-89a8-4eff-a178-a1cbef557923-scripts" (OuterVolumeSpecName: "scripts") pod "8fd459ca-89a8-4eff-a178-a1cbef557923" (UID: "8fd459ca-89a8-4eff-a178-a1cbef557923"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:07:45 crc kubenswrapper[4558]: I0120 17:07:45.457391 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage06-crc" (OuterVolumeSpecName: "glance") pod "8fd459ca-89a8-4eff-a178-a1cbef557923" (UID: "8fd459ca-89a8-4eff-a178-a1cbef557923"). InnerVolumeSpecName "local-storage06-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:07:45 crc kubenswrapper[4558]: I0120 17:07:45.458600 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage03-crc" (OuterVolumeSpecName: "glance") pod "8b97c4d4-ebbc-44e8-bd58-ae0fffc623cb" (UID: "8b97c4d4-ebbc-44e8-bd58-ae0fffc623cb"). InnerVolumeSpecName "local-storage03-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:07:45 crc kubenswrapper[4558]: I0120 17:07:45.458719 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8fd459ca-89a8-4eff-a178-a1cbef557923-kube-api-access-hpwxc" (OuterVolumeSpecName: "kube-api-access-hpwxc") pod "8fd459ca-89a8-4eff-a178-a1cbef557923" (UID: "8fd459ca-89a8-4eff-a178-a1cbef557923"). InnerVolumeSpecName "kube-api-access-hpwxc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:07:45 crc kubenswrapper[4558]: I0120 17:07:45.462387 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8b97c4d4-ebbc-44e8-bd58-ae0fffc623cb-kube-api-access-qwh45" (OuterVolumeSpecName: "kube-api-access-qwh45") pod "8b97c4d4-ebbc-44e8-bd58-ae0fffc623cb" (UID: "8b97c4d4-ebbc-44e8-bd58-ae0fffc623cb"). InnerVolumeSpecName "kube-api-access-qwh45". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:07:45 crc kubenswrapper[4558]: I0120 17:07:45.467107 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8b97c4d4-ebbc-44e8-bd58-ae0fffc623cb-scripts" (OuterVolumeSpecName: "scripts") pod "8b97c4d4-ebbc-44e8-bd58-ae0fffc623cb" (UID: "8b97c4d4-ebbc-44e8-bd58-ae0fffc623cb"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:07:45 crc kubenswrapper[4558]: I0120 17:07:45.485627 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8fd459ca-89a8-4eff-a178-a1cbef557923-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8fd459ca-89a8-4eff-a178-a1cbef557923" (UID: "8fd459ca-89a8-4eff-a178-a1cbef557923"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:07:45 crc kubenswrapper[4558]: I0120 17:07:45.486500 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8b97c4d4-ebbc-44e8-bd58-ae0fffc623cb-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8b97c4d4-ebbc-44e8-bd58-ae0fffc623cb" (UID: "8b97c4d4-ebbc-44e8-bd58-ae0fffc623cb"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:07:45 crc kubenswrapper[4558]: I0120 17:07:45.497653 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8b97c4d4-ebbc-44e8-bd58-ae0fffc623cb-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "8b97c4d4-ebbc-44e8-bd58-ae0fffc623cb" (UID: "8b97c4d4-ebbc-44e8-bd58-ae0fffc623cb"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:07:45 crc kubenswrapper[4558]: I0120 17:07:45.501298 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8fd459ca-89a8-4eff-a178-a1cbef557923-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "8fd459ca-89a8-4eff-a178-a1cbef557923" (UID: "8fd459ca-89a8-4eff-a178-a1cbef557923"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:07:45 crc kubenswrapper[4558]: I0120 17:07:45.504182 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8fd459ca-89a8-4eff-a178-a1cbef557923-config-data" (OuterVolumeSpecName: "config-data") pod "8fd459ca-89a8-4eff-a178-a1cbef557923" (UID: "8fd459ca-89a8-4eff-a178-a1cbef557923"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:07:45 crc kubenswrapper[4558]: I0120 17:07:45.509802 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8b97c4d4-ebbc-44e8-bd58-ae0fffc623cb-config-data" (OuterVolumeSpecName: "config-data") pod "8b97c4d4-ebbc-44e8-bd58-ae0fffc623cb" (UID: "8b97c4d4-ebbc-44e8-bd58-ae0fffc623cb"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:07:45 crc kubenswrapper[4558]: I0120 17:07:45.554283 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8fd459ca-89a8-4eff-a178-a1cbef557923-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:07:45 crc kubenswrapper[4558]: I0120 17:07:45.554325 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8b97c4d4-ebbc-44e8-bd58-ae0fffc623cb-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:07:45 crc kubenswrapper[4558]: I0120 17:07:45.554337 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8fd459ca-89a8-4eff-a178-a1cbef557923-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:07:45 crc kubenswrapper[4558]: I0120 17:07:45.554350 4558 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8b97c4d4-ebbc-44e8-bd58-ae0fffc623cb-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 20 17:07:45 crc kubenswrapper[4558]: I0120 17:07:45.554361 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8b97c4d4-ebbc-44e8-bd58-ae0fffc623cb-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:07:45 crc kubenswrapper[4558]: I0120 17:07:45.554370 4558 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8fd459ca-89a8-4eff-a178-a1cbef557923-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 20 17:07:45 crc kubenswrapper[4558]: I0120 17:07:45.554410 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" " Jan 20 17:07:45 crc kubenswrapper[4558]: I0120 17:07:45.554422 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8fd459ca-89a8-4eff-a178-a1cbef557923-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:07:45 crc kubenswrapper[4558]: I0120 17:07:45.554431 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8b97c4d4-ebbc-44e8-bd58-ae0fffc623cb-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:07:45 crc kubenswrapper[4558]: I0120 17:07:45.554442 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8b97c4d4-ebbc-44e8-bd58-ae0fffc623cb-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:07:45 crc kubenswrapper[4558]: I0120 17:07:45.554451 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8b97c4d4-ebbc-44e8-bd58-ae0fffc623cb-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:07:45 crc kubenswrapper[4558]: I0120 17:07:45.554461 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hpwxc\" (UniqueName: \"kubernetes.io/projected/8fd459ca-89a8-4eff-a178-a1cbef557923-kube-api-access-hpwxc\") on node \"crc\" DevicePath \"\"" Jan 20 17:07:45 crc kubenswrapper[4558]: I0120 17:07:45.554478 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" " Jan 20 17:07:45 crc kubenswrapper[4558]: I0120 17:07:45.554488 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qwh45\" (UniqueName: \"kubernetes.io/projected/8b97c4d4-ebbc-44e8-bd58-ae0fffc623cb-kube-api-access-qwh45\") on node \"crc\" DevicePath \"\"" Jan 20 17:07:45 crc kubenswrapper[4558]: I0120 17:07:45.554496 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8fd459ca-89a8-4eff-a178-a1cbef557923-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:07:45 crc kubenswrapper[4558]: I0120 17:07:45.566808 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage03-crc" (UniqueName: "kubernetes.io/local-volume/local-storage03-crc") on node "crc" Jan 20 17:07:45 crc kubenswrapper[4558]: I0120 17:07:45.568262 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage06-crc" (UniqueName: "kubernetes.io/local-volume/local-storage06-crc") on node "crc" Jan 20 17:07:45 crc kubenswrapper[4558]: I0120 17:07:45.656802 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:07:45 crc kubenswrapper[4558]: I0120 17:07:45.656826 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:07:45 crc kubenswrapper[4558]: I0120 17:07:45.855698 4558 generic.go:334] "Generic (PLEG): container finished" podID="c39fd5ad-a9fd-4240-a3f2-e7ad7c6bdee2" containerID="9a3c955d5b11e9518eab345cc1f4a24a125317c2b69b6e6b2070a5417c63906f" exitCode=0 Jan 20 17:07:45 crc kubenswrapper[4558]: I0120 17:07:45.855785 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-bootstrap-rmnb4" event={"ID":"c39fd5ad-a9fd-4240-a3f2-e7ad7c6bdee2","Type":"ContainerDied","Data":"9a3c955d5b11e9518eab345cc1f4a24a125317c2b69b6e6b2070a5417c63906f"} Jan 20 17:07:45 crc kubenswrapper[4558]: I0120 17:07:45.857721 4558 generic.go:334] "Generic (PLEG): container finished" podID="8fd459ca-89a8-4eff-a178-a1cbef557923" containerID="979257cef910a5ce95066d2eddf8f8e11618f4acca7c04677ad4df5af1471e15" exitCode=143 Jan 20 17:07:45 crc kubenswrapper[4558]: I0120 17:07:45.857750 4558 generic.go:334] "Generic (PLEG): container finished" podID="8fd459ca-89a8-4eff-a178-a1cbef557923" containerID="8bd898136be8319cc7e5ac8ab8fc67c0edb9c34b838f062d015f6f9e322ab2ea" exitCode=143 Jan 20 17:07:45 crc kubenswrapper[4558]: I0120 17:07:45.857807 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:07:45 crc kubenswrapper[4558]: I0120 17:07:45.857829 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"8fd459ca-89a8-4eff-a178-a1cbef557923","Type":"ContainerDied","Data":"979257cef910a5ce95066d2eddf8f8e11618f4acca7c04677ad4df5af1471e15"} Jan 20 17:07:45 crc kubenswrapper[4558]: I0120 17:07:45.857888 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"8fd459ca-89a8-4eff-a178-a1cbef557923","Type":"ContainerDied","Data":"8bd898136be8319cc7e5ac8ab8fc67c0edb9c34b838f062d015f6f9e322ab2ea"} Jan 20 17:07:45 crc kubenswrapper[4558]: I0120 17:07:45.857905 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"8fd459ca-89a8-4eff-a178-a1cbef557923","Type":"ContainerDied","Data":"c9f3137378ead48e88edb9721edbb2710a35ce62cd986156bcee4e170b6e6ec8"} Jan 20 17:07:45 crc kubenswrapper[4558]: I0120 17:07:45.857926 4558 scope.go:117] "RemoveContainer" containerID="979257cef910a5ce95066d2eddf8f8e11618f4acca7c04677ad4df5af1471e15" Jan 20 17:07:45 crc kubenswrapper[4558]: I0120 17:07:45.860438 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"e5918aaf-a79a-4f8e-af24-0a538b61af22","Type":"ContainerStarted","Data":"f5e4f86823050fc5341884e7c3457363544d7680acfe65dbce1e28afaa63f176"} Jan 20 17:07:45 crc kubenswrapper[4558]: I0120 17:07:45.863040 4558 generic.go:334] "Generic (PLEG): container finished" podID="8b97c4d4-ebbc-44e8-bd58-ae0fffc623cb" containerID="126e2e87e98ce1c0e15495659b9271a8a4ebe4ab8abefe2bdb3e8c4d1bd96de3" exitCode=0 Jan 20 17:07:45 crc kubenswrapper[4558]: I0120 17:07:45.863077 4558 generic.go:334] "Generic (PLEG): container finished" podID="8b97c4d4-ebbc-44e8-bd58-ae0fffc623cb" containerID="4324e2fce825da0de70111b8f7453e7392462ec09dad91158fed69d7d155e907" exitCode=143 Jan 20 17:07:45 crc kubenswrapper[4558]: I0120 17:07:45.863303 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:07:45 crc kubenswrapper[4558]: I0120 17:07:45.865259 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"8b97c4d4-ebbc-44e8-bd58-ae0fffc623cb","Type":"ContainerDied","Data":"126e2e87e98ce1c0e15495659b9271a8a4ebe4ab8abefe2bdb3e8c4d1bd96de3"} Jan 20 17:07:45 crc kubenswrapper[4558]: I0120 17:07:45.865331 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"8b97c4d4-ebbc-44e8-bd58-ae0fffc623cb","Type":"ContainerDied","Data":"4324e2fce825da0de70111b8f7453e7392462ec09dad91158fed69d7d155e907"} Jan 20 17:07:45 crc kubenswrapper[4558]: I0120 17:07:45.865344 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"8b97c4d4-ebbc-44e8-bd58-ae0fffc623cb","Type":"ContainerDied","Data":"32c05056d4cedf6055a7a99ee957fec8b0f11131fe9ea7e59b4d6a0ab073fa09"} Jan 20 17:07:45 crc kubenswrapper[4558]: I0120 17:07:45.884420 4558 scope.go:117] "RemoveContainer" containerID="8bd898136be8319cc7e5ac8ab8fc67c0edb9c34b838f062d015f6f9e322ab2ea" Jan 20 17:07:45 crc kubenswrapper[4558]: I0120 17:07:45.898606 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:07:45 crc kubenswrapper[4558]: I0120 17:07:45.913029 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:07:45 crc kubenswrapper[4558]: I0120 17:07:45.925698 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:07:45 crc kubenswrapper[4558]: I0120 17:07:45.937219 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:07:45 crc kubenswrapper[4558]: I0120 17:07:45.951845 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:07:45 crc kubenswrapper[4558]: E0120 17:07:45.952255 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8fd459ca-89a8-4eff-a178-a1cbef557923" containerName="glance-log" Jan 20 17:07:45 crc kubenswrapper[4558]: I0120 17:07:45.952277 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="8fd459ca-89a8-4eff-a178-a1cbef557923" containerName="glance-log" Jan 20 17:07:45 crc kubenswrapper[4558]: E0120 17:07:45.952289 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8fd459ca-89a8-4eff-a178-a1cbef557923" containerName="glance-httpd" Jan 20 17:07:45 crc kubenswrapper[4558]: I0120 17:07:45.952295 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="8fd459ca-89a8-4eff-a178-a1cbef557923" containerName="glance-httpd" Jan 20 17:07:45 crc kubenswrapper[4558]: E0120 17:07:45.952305 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8b97c4d4-ebbc-44e8-bd58-ae0fffc623cb" containerName="glance-log" Jan 20 17:07:45 crc kubenswrapper[4558]: I0120 17:07:45.952312 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="8b97c4d4-ebbc-44e8-bd58-ae0fffc623cb" containerName="glance-log" Jan 20 17:07:45 crc kubenswrapper[4558]: E0120 17:07:45.952331 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8b97c4d4-ebbc-44e8-bd58-ae0fffc623cb" containerName="glance-httpd" Jan 20 17:07:45 crc kubenswrapper[4558]: I0120 17:07:45.952337 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="8b97c4d4-ebbc-44e8-bd58-ae0fffc623cb" containerName="glance-httpd" Jan 20 17:07:45 crc kubenswrapper[4558]: I0120 17:07:45.952484 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="8b97c4d4-ebbc-44e8-bd58-ae0fffc623cb" containerName="glance-httpd" Jan 20 17:07:45 crc kubenswrapper[4558]: I0120 17:07:45.952493 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="8fd459ca-89a8-4eff-a178-a1cbef557923" containerName="glance-httpd" Jan 20 17:07:45 crc kubenswrapper[4558]: I0120 17:07:45.952503 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="8fd459ca-89a8-4eff-a178-a1cbef557923" containerName="glance-log" Jan 20 17:07:45 crc kubenswrapper[4558]: I0120 17:07:45.952518 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="8b97c4d4-ebbc-44e8-bd58-ae0fffc623cb" containerName="glance-log" Jan 20 17:07:45 crc kubenswrapper[4558]: I0120 17:07:45.960565 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:07:45 crc kubenswrapper[4558]: I0120 17:07:45.962690 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:07:45 crc kubenswrapper[4558]: I0120 17:07:45.963143 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:07:45 crc kubenswrapper[4558]: I0120 17:07:45.968930 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-glance-dockercfg-9qc5r" Jan 20 17:07:45 crc kubenswrapper[4558]: I0120 17:07:45.969367 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-glance-default-internal-svc" Jan 20 17:07:45 crc kubenswrapper[4558]: I0120 17:07:45.969584 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-glance-default-public-svc" Jan 20 17:07:45 crc kubenswrapper[4558]: I0120 17:07:45.969694 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-default-internal-config-data" Jan 20 17:07:45 crc kubenswrapper[4558]: I0120 17:07:45.969802 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-default-external-config-data" Jan 20 17:07:45 crc kubenswrapper[4558]: I0120 17:07:45.969890 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-scripts" Jan 20 17:07:46 crc kubenswrapper[4558]: I0120 17:07:46.007301 4558 scope.go:117] "RemoveContainer" containerID="979257cef910a5ce95066d2eddf8f8e11618f4acca7c04677ad4df5af1471e15" Jan 20 17:07:46 crc kubenswrapper[4558]: E0120 17:07:46.008392 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"979257cef910a5ce95066d2eddf8f8e11618f4acca7c04677ad4df5af1471e15\": container with ID starting with 979257cef910a5ce95066d2eddf8f8e11618f4acca7c04677ad4df5af1471e15 not found: ID does not exist" containerID="979257cef910a5ce95066d2eddf8f8e11618f4acca7c04677ad4df5af1471e15" Jan 20 17:07:46 crc kubenswrapper[4558]: I0120 17:07:46.008432 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"979257cef910a5ce95066d2eddf8f8e11618f4acca7c04677ad4df5af1471e15"} err="failed to get container status \"979257cef910a5ce95066d2eddf8f8e11618f4acca7c04677ad4df5af1471e15\": rpc error: code = NotFound desc = could not find container \"979257cef910a5ce95066d2eddf8f8e11618f4acca7c04677ad4df5af1471e15\": container with ID starting with 979257cef910a5ce95066d2eddf8f8e11618f4acca7c04677ad4df5af1471e15 not found: ID does not exist" Jan 20 17:07:46 crc kubenswrapper[4558]: I0120 17:07:46.008457 4558 scope.go:117] "RemoveContainer" containerID="8bd898136be8319cc7e5ac8ab8fc67c0edb9c34b838f062d015f6f9e322ab2ea" Jan 20 17:07:46 crc kubenswrapper[4558]: E0120 17:07:46.012270 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8bd898136be8319cc7e5ac8ab8fc67c0edb9c34b838f062d015f6f9e322ab2ea\": container with ID starting with 8bd898136be8319cc7e5ac8ab8fc67c0edb9c34b838f062d015f6f9e322ab2ea not found: ID does not exist" containerID="8bd898136be8319cc7e5ac8ab8fc67c0edb9c34b838f062d015f6f9e322ab2ea" Jan 20 17:07:46 crc kubenswrapper[4558]: I0120 17:07:46.012351 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8bd898136be8319cc7e5ac8ab8fc67c0edb9c34b838f062d015f6f9e322ab2ea"} err="failed to get container status \"8bd898136be8319cc7e5ac8ab8fc67c0edb9c34b838f062d015f6f9e322ab2ea\": rpc error: code = NotFound desc = could not find container \"8bd898136be8319cc7e5ac8ab8fc67c0edb9c34b838f062d015f6f9e322ab2ea\": container with ID starting with 8bd898136be8319cc7e5ac8ab8fc67c0edb9c34b838f062d015f6f9e322ab2ea not found: ID does not exist" Jan 20 17:07:46 crc kubenswrapper[4558]: I0120 17:07:46.012372 4558 scope.go:117] "RemoveContainer" containerID="979257cef910a5ce95066d2eddf8f8e11618f4acca7c04677ad4df5af1471e15" Jan 20 17:07:46 crc kubenswrapper[4558]: I0120 17:07:46.014298 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"979257cef910a5ce95066d2eddf8f8e11618f4acca7c04677ad4df5af1471e15"} err="failed to get container status \"979257cef910a5ce95066d2eddf8f8e11618f4acca7c04677ad4df5af1471e15\": rpc error: code = NotFound desc = could not find container \"979257cef910a5ce95066d2eddf8f8e11618f4acca7c04677ad4df5af1471e15\": container with ID starting with 979257cef910a5ce95066d2eddf8f8e11618f4acca7c04677ad4df5af1471e15 not found: ID does not exist" Jan 20 17:07:46 crc kubenswrapper[4558]: I0120 17:07:46.014370 4558 scope.go:117] "RemoveContainer" containerID="8bd898136be8319cc7e5ac8ab8fc67c0edb9c34b838f062d015f6f9e322ab2ea" Jan 20 17:07:46 crc kubenswrapper[4558]: I0120 17:07:46.015134 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8bd898136be8319cc7e5ac8ab8fc67c0edb9c34b838f062d015f6f9e322ab2ea"} err="failed to get container status \"8bd898136be8319cc7e5ac8ab8fc67c0edb9c34b838f062d015f6f9e322ab2ea\": rpc error: code = NotFound desc = could not find container \"8bd898136be8319cc7e5ac8ab8fc67c0edb9c34b838f062d015f6f9e322ab2ea\": container with ID starting with 8bd898136be8319cc7e5ac8ab8fc67c0edb9c34b838f062d015f6f9e322ab2ea not found: ID does not exist" Jan 20 17:07:46 crc kubenswrapper[4558]: I0120 17:07:46.015155 4558 scope.go:117] "RemoveContainer" containerID="126e2e87e98ce1c0e15495659b9271a8a4ebe4ab8abefe2bdb3e8c4d1bd96de3" Jan 20 17:07:46 crc kubenswrapper[4558]: I0120 17:07:46.024530 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:07:46 crc kubenswrapper[4558]: I0120 17:07:46.049825 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:07:46 crc kubenswrapper[4558]: I0120 17:07:46.067852 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/91e1a579-d120-4f5d-9117-f5e48646a8eb-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"91e1a579-d120-4f5d-9117-f5e48646a8eb\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:07:46 crc kubenswrapper[4558]: I0120 17:07:46.067897 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/91e1a579-d120-4f5d-9117-f5e48646a8eb-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"91e1a579-d120-4f5d-9117-f5e48646a8eb\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:07:46 crc kubenswrapper[4558]: I0120 17:07:46.067935 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/53103fed-90a2-4307-873a-7aa8e9a0f858-logs\") pod \"glance-default-internal-api-0\" (UID: \"53103fed-90a2-4307-873a-7aa8e9a0f858\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:07:46 crc kubenswrapper[4558]: I0120 17:07:46.068010 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/91e1a579-d120-4f5d-9117-f5e48646a8eb-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"91e1a579-d120-4f5d-9117-f5e48646a8eb\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:07:46 crc kubenswrapper[4558]: I0120 17:07:46.068070 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-external-api-0\" (UID: \"91e1a579-d120-4f5d-9117-f5e48646a8eb\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:07:46 crc kubenswrapper[4558]: I0120 17:07:46.068122 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-97xp7\" (UniqueName: \"kubernetes.io/projected/53103fed-90a2-4307-873a-7aa8e9a0f858-kube-api-access-97xp7\") pod \"glance-default-internal-api-0\" (UID: \"53103fed-90a2-4307-873a-7aa8e9a0f858\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:07:46 crc kubenswrapper[4558]: I0120 17:07:46.068179 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/53103fed-90a2-4307-873a-7aa8e9a0f858-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"53103fed-90a2-4307-873a-7aa8e9a0f858\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:07:46 crc kubenswrapper[4558]: I0120 17:07:46.068219 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/53103fed-90a2-4307-873a-7aa8e9a0f858-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"53103fed-90a2-4307-873a-7aa8e9a0f858\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:07:46 crc kubenswrapper[4558]: I0120 17:07:46.068371 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fr9z2\" (UniqueName: \"kubernetes.io/projected/91e1a579-d120-4f5d-9117-f5e48646a8eb-kube-api-access-fr9z2\") pod \"glance-default-external-api-0\" (UID: \"91e1a579-d120-4f5d-9117-f5e48646a8eb\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:07:46 crc kubenswrapper[4558]: I0120 17:07:46.068406 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/91e1a579-d120-4f5d-9117-f5e48646a8eb-logs\") pod \"glance-default-external-api-0\" (UID: \"91e1a579-d120-4f5d-9117-f5e48646a8eb\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:07:46 crc kubenswrapper[4558]: I0120 17:07:46.068445 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/53103fed-90a2-4307-873a-7aa8e9a0f858-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"53103fed-90a2-4307-873a-7aa8e9a0f858\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:07:46 crc kubenswrapper[4558]: I0120 17:07:46.068520 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/53103fed-90a2-4307-873a-7aa8e9a0f858-scripts\") pod \"glance-default-internal-api-0\" (UID: \"53103fed-90a2-4307-873a-7aa8e9a0f858\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:07:46 crc kubenswrapper[4558]: I0120 17:07:46.068575 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/91e1a579-d120-4f5d-9117-f5e48646a8eb-scripts\") pod \"glance-default-external-api-0\" (UID: \"91e1a579-d120-4f5d-9117-f5e48646a8eb\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:07:46 crc kubenswrapper[4558]: I0120 17:07:46.068605 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/53103fed-90a2-4307-873a-7aa8e9a0f858-config-data\") pod \"glance-default-internal-api-0\" (UID: \"53103fed-90a2-4307-873a-7aa8e9a0f858\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:07:46 crc kubenswrapper[4558]: I0120 17:07:46.068642 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-internal-api-0\" (UID: \"53103fed-90a2-4307-873a-7aa8e9a0f858\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:07:46 crc kubenswrapper[4558]: I0120 17:07:46.068688 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/91e1a579-d120-4f5d-9117-f5e48646a8eb-config-data\") pod \"glance-default-external-api-0\" (UID: \"91e1a579-d120-4f5d-9117-f5e48646a8eb\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:07:46 crc kubenswrapper[4558]: I0120 17:07:46.094075 4558 scope.go:117] "RemoveContainer" containerID="4324e2fce825da0de70111b8f7453e7392462ec09dad91158fed69d7d155e907" Jan 20 17:07:46 crc kubenswrapper[4558]: I0120 17:07:46.118352 4558 scope.go:117] "RemoveContainer" containerID="126e2e87e98ce1c0e15495659b9271a8a4ebe4ab8abefe2bdb3e8c4d1bd96de3" Jan 20 17:07:46 crc kubenswrapper[4558]: E0120 17:07:46.127585 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"126e2e87e98ce1c0e15495659b9271a8a4ebe4ab8abefe2bdb3e8c4d1bd96de3\": container with ID starting with 126e2e87e98ce1c0e15495659b9271a8a4ebe4ab8abefe2bdb3e8c4d1bd96de3 not found: ID does not exist" containerID="126e2e87e98ce1c0e15495659b9271a8a4ebe4ab8abefe2bdb3e8c4d1bd96de3" Jan 20 17:07:46 crc kubenswrapper[4558]: I0120 17:07:46.127615 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"126e2e87e98ce1c0e15495659b9271a8a4ebe4ab8abefe2bdb3e8c4d1bd96de3"} err="failed to get container status \"126e2e87e98ce1c0e15495659b9271a8a4ebe4ab8abefe2bdb3e8c4d1bd96de3\": rpc error: code = NotFound desc = could not find container \"126e2e87e98ce1c0e15495659b9271a8a4ebe4ab8abefe2bdb3e8c4d1bd96de3\": container with ID starting with 126e2e87e98ce1c0e15495659b9271a8a4ebe4ab8abefe2bdb3e8c4d1bd96de3 not found: ID does not exist" Jan 20 17:07:46 crc kubenswrapper[4558]: I0120 17:07:46.127639 4558 scope.go:117] "RemoveContainer" containerID="4324e2fce825da0de70111b8f7453e7392462ec09dad91158fed69d7d155e907" Jan 20 17:07:46 crc kubenswrapper[4558]: E0120 17:07:46.136284 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4324e2fce825da0de70111b8f7453e7392462ec09dad91158fed69d7d155e907\": container with ID starting with 4324e2fce825da0de70111b8f7453e7392462ec09dad91158fed69d7d155e907 not found: ID does not exist" containerID="4324e2fce825da0de70111b8f7453e7392462ec09dad91158fed69d7d155e907" Jan 20 17:07:46 crc kubenswrapper[4558]: I0120 17:07:46.136346 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4324e2fce825da0de70111b8f7453e7392462ec09dad91158fed69d7d155e907"} err="failed to get container status \"4324e2fce825da0de70111b8f7453e7392462ec09dad91158fed69d7d155e907\": rpc error: code = NotFound desc = could not find container \"4324e2fce825da0de70111b8f7453e7392462ec09dad91158fed69d7d155e907\": container with ID starting with 4324e2fce825da0de70111b8f7453e7392462ec09dad91158fed69d7d155e907 not found: ID does not exist" Jan 20 17:07:46 crc kubenswrapper[4558]: I0120 17:07:46.136364 4558 scope.go:117] "RemoveContainer" containerID="126e2e87e98ce1c0e15495659b9271a8a4ebe4ab8abefe2bdb3e8c4d1bd96de3" Jan 20 17:07:46 crc kubenswrapper[4558]: I0120 17:07:46.136608 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"126e2e87e98ce1c0e15495659b9271a8a4ebe4ab8abefe2bdb3e8c4d1bd96de3"} err="failed to get container status \"126e2e87e98ce1c0e15495659b9271a8a4ebe4ab8abefe2bdb3e8c4d1bd96de3\": rpc error: code = NotFound desc = could not find container \"126e2e87e98ce1c0e15495659b9271a8a4ebe4ab8abefe2bdb3e8c4d1bd96de3\": container with ID starting with 126e2e87e98ce1c0e15495659b9271a8a4ebe4ab8abefe2bdb3e8c4d1bd96de3 not found: ID does not exist" Jan 20 17:07:46 crc kubenswrapper[4558]: I0120 17:07:46.136630 4558 scope.go:117] "RemoveContainer" containerID="4324e2fce825da0de70111b8f7453e7392462ec09dad91158fed69d7d155e907" Jan 20 17:07:46 crc kubenswrapper[4558]: I0120 17:07:46.136803 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4324e2fce825da0de70111b8f7453e7392462ec09dad91158fed69d7d155e907"} err="failed to get container status \"4324e2fce825da0de70111b8f7453e7392462ec09dad91158fed69d7d155e907\": rpc error: code = NotFound desc = could not find container \"4324e2fce825da0de70111b8f7453e7392462ec09dad91158fed69d7d155e907\": container with ID starting with 4324e2fce825da0de70111b8f7453e7392462ec09dad91158fed69d7d155e907 not found: ID does not exist" Jan 20 17:07:46 crc kubenswrapper[4558]: I0120 17:07:46.170111 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-internal-api-0\" (UID: \"53103fed-90a2-4307-873a-7aa8e9a0f858\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:07:46 crc kubenswrapper[4558]: I0120 17:07:46.170184 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/91e1a579-d120-4f5d-9117-f5e48646a8eb-config-data\") pod \"glance-default-external-api-0\" (UID: \"91e1a579-d120-4f5d-9117-f5e48646a8eb\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:07:46 crc kubenswrapper[4558]: I0120 17:07:46.170213 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/91e1a579-d120-4f5d-9117-f5e48646a8eb-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"91e1a579-d120-4f5d-9117-f5e48646a8eb\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:07:46 crc kubenswrapper[4558]: I0120 17:07:46.170261 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/91e1a579-d120-4f5d-9117-f5e48646a8eb-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"91e1a579-d120-4f5d-9117-f5e48646a8eb\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:07:46 crc kubenswrapper[4558]: I0120 17:07:46.170295 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/53103fed-90a2-4307-873a-7aa8e9a0f858-logs\") pod \"glance-default-internal-api-0\" (UID: \"53103fed-90a2-4307-873a-7aa8e9a0f858\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:07:46 crc kubenswrapper[4558]: I0120 17:07:46.170329 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/91e1a579-d120-4f5d-9117-f5e48646a8eb-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"91e1a579-d120-4f5d-9117-f5e48646a8eb\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:07:46 crc kubenswrapper[4558]: I0120 17:07:46.170353 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-external-api-0\" (UID: \"91e1a579-d120-4f5d-9117-f5e48646a8eb\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:07:46 crc kubenswrapper[4558]: I0120 17:07:46.170378 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-97xp7\" (UniqueName: \"kubernetes.io/projected/53103fed-90a2-4307-873a-7aa8e9a0f858-kube-api-access-97xp7\") pod \"glance-default-internal-api-0\" (UID: \"53103fed-90a2-4307-873a-7aa8e9a0f858\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:07:46 crc kubenswrapper[4558]: I0120 17:07:46.170399 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/53103fed-90a2-4307-873a-7aa8e9a0f858-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"53103fed-90a2-4307-873a-7aa8e9a0f858\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:07:46 crc kubenswrapper[4558]: I0120 17:07:46.170419 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/53103fed-90a2-4307-873a-7aa8e9a0f858-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"53103fed-90a2-4307-873a-7aa8e9a0f858\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:07:46 crc kubenswrapper[4558]: I0120 17:07:46.170452 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fr9z2\" (UniqueName: \"kubernetes.io/projected/91e1a579-d120-4f5d-9117-f5e48646a8eb-kube-api-access-fr9z2\") pod \"glance-default-external-api-0\" (UID: \"91e1a579-d120-4f5d-9117-f5e48646a8eb\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:07:46 crc kubenswrapper[4558]: I0120 17:07:46.170468 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/91e1a579-d120-4f5d-9117-f5e48646a8eb-logs\") pod \"glance-default-external-api-0\" (UID: \"91e1a579-d120-4f5d-9117-f5e48646a8eb\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:07:46 crc kubenswrapper[4558]: I0120 17:07:46.170489 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/53103fed-90a2-4307-873a-7aa8e9a0f858-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"53103fed-90a2-4307-873a-7aa8e9a0f858\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:07:46 crc kubenswrapper[4558]: I0120 17:07:46.170520 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/53103fed-90a2-4307-873a-7aa8e9a0f858-scripts\") pod \"glance-default-internal-api-0\" (UID: \"53103fed-90a2-4307-873a-7aa8e9a0f858\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:07:46 crc kubenswrapper[4558]: I0120 17:07:46.170543 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/91e1a579-d120-4f5d-9117-f5e48646a8eb-scripts\") pod \"glance-default-external-api-0\" (UID: \"91e1a579-d120-4f5d-9117-f5e48646a8eb\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:07:46 crc kubenswrapper[4558]: I0120 17:07:46.170562 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/53103fed-90a2-4307-873a-7aa8e9a0f858-config-data\") pod \"glance-default-internal-api-0\" (UID: \"53103fed-90a2-4307-873a-7aa8e9a0f858\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:07:46 crc kubenswrapper[4558]: I0120 17:07:46.170620 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-internal-api-0\" (UID: \"53103fed-90a2-4307-873a-7aa8e9a0f858\") device mount path \"/mnt/openstack/pv06\"" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:07:46 crc kubenswrapper[4558]: I0120 17:07:46.171241 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/53103fed-90a2-4307-873a-7aa8e9a0f858-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"53103fed-90a2-4307-873a-7aa8e9a0f858\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:07:46 crc kubenswrapper[4558]: I0120 17:07:46.171508 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/53103fed-90a2-4307-873a-7aa8e9a0f858-logs\") pod \"glance-default-internal-api-0\" (UID: \"53103fed-90a2-4307-873a-7aa8e9a0f858\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:07:46 crc kubenswrapper[4558]: I0120 17:07:46.175680 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/53103fed-90a2-4307-873a-7aa8e9a0f858-config-data\") pod \"glance-default-internal-api-0\" (UID: \"53103fed-90a2-4307-873a-7aa8e9a0f858\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:07:46 crc kubenswrapper[4558]: I0120 17:07:46.177293 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/91e1a579-d120-4f5d-9117-f5e48646a8eb-logs\") pod \"glance-default-external-api-0\" (UID: \"91e1a579-d120-4f5d-9117-f5e48646a8eb\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:07:46 crc kubenswrapper[4558]: I0120 17:07:46.180402 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/91e1a579-d120-4f5d-9117-f5e48646a8eb-config-data\") pod \"glance-default-external-api-0\" (UID: \"91e1a579-d120-4f5d-9117-f5e48646a8eb\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:07:46 crc kubenswrapper[4558]: I0120 17:07:46.180780 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/91e1a579-d120-4f5d-9117-f5e48646a8eb-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"91e1a579-d120-4f5d-9117-f5e48646a8eb\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:07:46 crc kubenswrapper[4558]: I0120 17:07:46.185890 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/91e1a579-d120-4f5d-9117-f5e48646a8eb-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"91e1a579-d120-4f5d-9117-f5e48646a8eb\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:07:46 crc kubenswrapper[4558]: I0120 17:07:46.186683 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/53103fed-90a2-4307-873a-7aa8e9a0f858-scripts\") pod \"glance-default-internal-api-0\" (UID: \"53103fed-90a2-4307-873a-7aa8e9a0f858\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:07:46 crc kubenswrapper[4558]: I0120 17:07:46.189868 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/91e1a579-d120-4f5d-9117-f5e48646a8eb-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"91e1a579-d120-4f5d-9117-f5e48646a8eb\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:07:46 crc kubenswrapper[4558]: I0120 17:07:46.190789 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/91e1a579-d120-4f5d-9117-f5e48646a8eb-scripts\") pod \"glance-default-external-api-0\" (UID: \"91e1a579-d120-4f5d-9117-f5e48646a8eb\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:07:46 crc kubenswrapper[4558]: I0120 17:07:46.192561 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/53103fed-90a2-4307-873a-7aa8e9a0f858-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"53103fed-90a2-4307-873a-7aa8e9a0f858\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:07:46 crc kubenswrapper[4558]: I0120 17:07:46.193311 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-external-api-0\" (UID: \"91e1a579-d120-4f5d-9117-f5e48646a8eb\") device mount path \"/mnt/openstack/pv03\"" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:07:46 crc kubenswrapper[4558]: I0120 17:07:46.195073 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fr9z2\" (UniqueName: \"kubernetes.io/projected/91e1a579-d120-4f5d-9117-f5e48646a8eb-kube-api-access-fr9z2\") pod \"glance-default-external-api-0\" (UID: \"91e1a579-d120-4f5d-9117-f5e48646a8eb\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:07:46 crc kubenswrapper[4558]: I0120 17:07:46.198030 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-97xp7\" (UniqueName: \"kubernetes.io/projected/53103fed-90a2-4307-873a-7aa8e9a0f858-kube-api-access-97xp7\") pod \"glance-default-internal-api-0\" (UID: \"53103fed-90a2-4307-873a-7aa8e9a0f858\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:07:46 crc kubenswrapper[4558]: I0120 17:07:46.204571 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/53103fed-90a2-4307-873a-7aa8e9a0f858-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"53103fed-90a2-4307-873a-7aa8e9a0f858\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:07:46 crc kubenswrapper[4558]: I0120 17:07:46.237383 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-external-api-0\" (UID: \"91e1a579-d120-4f5d-9117-f5e48646a8eb\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:07:46 crc kubenswrapper[4558]: I0120 17:07:46.256765 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-internal-api-0\" (UID: \"53103fed-90a2-4307-873a-7aa8e9a0f858\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:07:46 crc kubenswrapper[4558]: I0120 17:07:46.295226 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:07:46 crc kubenswrapper[4558]: I0120 17:07:46.308423 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:07:46 crc kubenswrapper[4558]: I0120 17:07:46.334659 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-db-sync-5rtqc" Jan 20 17:07:46 crc kubenswrapper[4558]: I0120 17:07:46.339922 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-db-sync-zgncw" Jan 20 17:07:46 crc kubenswrapper[4558]: I0120 17:07:46.375367 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/4fb0c521-d465-47b9-b859-199f53143dca-db-sync-config-data\") pod \"4fb0c521-d465-47b9-b859-199f53143dca\" (UID: \"4fb0c521-d465-47b9-b859-199f53143dca\") " Jan 20 17:07:46 crc kubenswrapper[4558]: I0120 17:07:46.375556 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bhc2r\" (UniqueName: \"kubernetes.io/projected/4fb0c521-d465-47b9-b859-199f53143dca-kube-api-access-bhc2r\") pod \"4fb0c521-d465-47b9-b859-199f53143dca\" (UID: \"4fb0c521-d465-47b9-b859-199f53143dca\") " Jan 20 17:07:46 crc kubenswrapper[4558]: I0120 17:07:46.375633 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mkst2\" (UniqueName: \"kubernetes.io/projected/7e152238-833a-4e1c-b854-0584daae3826-kube-api-access-mkst2\") pod \"7e152238-833a-4e1c-b854-0584daae3826\" (UID: \"7e152238-833a-4e1c-b854-0584daae3826\") " Jan 20 17:07:46 crc kubenswrapper[4558]: I0120 17:07:46.375682 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4fb0c521-d465-47b9-b859-199f53143dca-combined-ca-bundle\") pod \"4fb0c521-d465-47b9-b859-199f53143dca\" (UID: \"4fb0c521-d465-47b9-b859-199f53143dca\") " Jan 20 17:07:46 crc kubenswrapper[4558]: I0120 17:07:46.375718 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e152238-833a-4e1c-b854-0584daae3826-combined-ca-bundle\") pod \"7e152238-833a-4e1c-b854-0584daae3826\" (UID: \"7e152238-833a-4e1c-b854-0584daae3826\") " Jan 20 17:07:46 crc kubenswrapper[4558]: I0120 17:07:46.375740 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7e152238-833a-4e1c-b854-0584daae3826-logs\") pod \"7e152238-833a-4e1c-b854-0584daae3826\" (UID: \"7e152238-833a-4e1c-b854-0584daae3826\") " Jan 20 17:07:46 crc kubenswrapper[4558]: I0120 17:07:46.375760 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7e152238-833a-4e1c-b854-0584daae3826-scripts\") pod \"7e152238-833a-4e1c-b854-0584daae3826\" (UID: \"7e152238-833a-4e1c-b854-0584daae3826\") " Jan 20 17:07:46 crc kubenswrapper[4558]: I0120 17:07:46.375876 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7e152238-833a-4e1c-b854-0584daae3826-config-data\") pod \"7e152238-833a-4e1c-b854-0584daae3826\" (UID: \"7e152238-833a-4e1c-b854-0584daae3826\") " Jan 20 17:07:46 crc kubenswrapper[4558]: I0120 17:07:46.379653 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7e152238-833a-4e1c-b854-0584daae3826-logs" (OuterVolumeSpecName: "logs") pod "7e152238-833a-4e1c-b854-0584daae3826" (UID: "7e152238-833a-4e1c-b854-0584daae3826"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:07:46 crc kubenswrapper[4558]: I0120 17:07:46.385768 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4fb0c521-d465-47b9-b859-199f53143dca-kube-api-access-bhc2r" (OuterVolumeSpecName: "kube-api-access-bhc2r") pod "4fb0c521-d465-47b9-b859-199f53143dca" (UID: "4fb0c521-d465-47b9-b859-199f53143dca"). InnerVolumeSpecName "kube-api-access-bhc2r". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:07:46 crc kubenswrapper[4558]: I0120 17:07:46.387227 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7e152238-833a-4e1c-b854-0584daae3826-scripts" (OuterVolumeSpecName: "scripts") pod "7e152238-833a-4e1c-b854-0584daae3826" (UID: "7e152238-833a-4e1c-b854-0584daae3826"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:07:46 crc kubenswrapper[4558]: I0120 17:07:46.389982 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4fb0c521-d465-47b9-b859-199f53143dca-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "4fb0c521-d465-47b9-b859-199f53143dca" (UID: "4fb0c521-d465-47b9-b859-199f53143dca"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:07:46 crc kubenswrapper[4558]: I0120 17:07:46.406276 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7e152238-833a-4e1c-b854-0584daae3826-kube-api-access-mkst2" (OuterVolumeSpecName: "kube-api-access-mkst2") pod "7e152238-833a-4e1c-b854-0584daae3826" (UID: "7e152238-833a-4e1c-b854-0584daae3826"). InnerVolumeSpecName "kube-api-access-mkst2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:07:46 crc kubenswrapper[4558]: I0120 17:07:46.412719 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7e152238-833a-4e1c-b854-0584daae3826-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7e152238-833a-4e1c-b854-0584daae3826" (UID: "7e152238-833a-4e1c-b854-0584daae3826"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:07:46 crc kubenswrapper[4558]: I0120 17:07:46.416960 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4fb0c521-d465-47b9-b859-199f53143dca-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4fb0c521-d465-47b9-b859-199f53143dca" (UID: "4fb0c521-d465-47b9-b859-199f53143dca"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:07:46 crc kubenswrapper[4558]: I0120 17:07:46.417037 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7e152238-833a-4e1c-b854-0584daae3826-config-data" (OuterVolumeSpecName: "config-data") pod "7e152238-833a-4e1c-b854-0584daae3826" (UID: "7e152238-833a-4e1c-b854-0584daae3826"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:07:46 crc kubenswrapper[4558]: I0120 17:07:46.478548 4558 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/4fb0c521-d465-47b9-b859-199f53143dca-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:07:46 crc kubenswrapper[4558]: I0120 17:07:46.478568 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bhc2r\" (UniqueName: \"kubernetes.io/projected/4fb0c521-d465-47b9-b859-199f53143dca-kube-api-access-bhc2r\") on node \"crc\" DevicePath \"\"" Jan 20 17:07:46 crc kubenswrapper[4558]: I0120 17:07:46.478580 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mkst2\" (UniqueName: \"kubernetes.io/projected/7e152238-833a-4e1c-b854-0584daae3826-kube-api-access-mkst2\") on node \"crc\" DevicePath \"\"" Jan 20 17:07:46 crc kubenswrapper[4558]: I0120 17:07:46.478590 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4fb0c521-d465-47b9-b859-199f53143dca-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:07:46 crc kubenswrapper[4558]: I0120 17:07:46.478601 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e152238-833a-4e1c-b854-0584daae3826-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:07:46 crc kubenswrapper[4558]: I0120 17:07:46.478611 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7e152238-833a-4e1c-b854-0584daae3826-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:07:46 crc kubenswrapper[4558]: I0120 17:07:46.478621 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7e152238-833a-4e1c-b854-0584daae3826-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:07:46 crc kubenswrapper[4558]: I0120 17:07:46.478630 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7e152238-833a-4e1c-b854-0584daae3826-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:07:46 crc kubenswrapper[4558]: I0120 17:07:46.581177 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8b97c4d4-ebbc-44e8-bd58-ae0fffc623cb" path="/var/lib/kubelet/pods/8b97c4d4-ebbc-44e8-bd58-ae0fffc623cb/volumes" Jan 20 17:07:46 crc kubenswrapper[4558]: I0120 17:07:46.582921 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8fd459ca-89a8-4eff-a178-a1cbef557923" path="/var/lib/kubelet/pods/8fd459ca-89a8-4eff-a178-a1cbef557923/volumes" Jan 20 17:07:46 crc kubenswrapper[4558]: I0120 17:07:46.748327 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:07:46 crc kubenswrapper[4558]: I0120 17:07:46.844878 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:07:46 crc kubenswrapper[4558]: W0120 17:07:46.851595 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod91e1a579_d120_4f5d_9117_f5e48646a8eb.slice/crio-2b0643d8f4dd5e01e53550162a55cadaf752694bbff4f13711b326c93f685aa4 WatchSource:0}: Error finding container 2b0643d8f4dd5e01e53550162a55cadaf752694bbff4f13711b326c93f685aa4: Status 404 returned error can't find the container with id 2b0643d8f4dd5e01e53550162a55cadaf752694bbff4f13711b326c93f685aa4 Jan 20 17:07:46 crc kubenswrapper[4558]: I0120 17:07:46.881551 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"91e1a579-d120-4f5d-9117-f5e48646a8eb","Type":"ContainerStarted","Data":"2b0643d8f4dd5e01e53550162a55cadaf752694bbff4f13711b326c93f685aa4"} Jan 20 17:07:46 crc kubenswrapper[4558]: I0120 17:07:46.886041 4558 generic.go:334] "Generic (PLEG): container finished" podID="4c9f9871-91f4-4670-a6af-0c493f3ce85b" containerID="742e670ce01715ecca8fb6a5ea852d6067112215a2e930c09ce883d2269c14b7" exitCode=0 Jan 20 17:07:46 crc kubenswrapper[4558]: I0120 17:07:46.886104 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-db-sync-blmnq" event={"ID":"4c9f9871-91f4-4670-a6af-0c493f3ce85b","Type":"ContainerDied","Data":"742e670ce01715ecca8fb6a5ea852d6067112215a2e930c09ce883d2269c14b7"} Jan 20 17:07:46 crc kubenswrapper[4558]: I0120 17:07:46.890552 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-db-sync-zgncw" event={"ID":"7e152238-833a-4e1c-b854-0584daae3826","Type":"ContainerDied","Data":"836df95f864cda22f81a38f41a6b0725bffa233d05c349e006515ab663e1adf2"} Jan 20 17:07:46 crc kubenswrapper[4558]: I0120 17:07:46.890585 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="836df95f864cda22f81a38f41a6b0725bffa233d05c349e006515ab663e1adf2" Jan 20 17:07:46 crc kubenswrapper[4558]: I0120 17:07:46.890641 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-db-sync-zgncw" Jan 20 17:07:46 crc kubenswrapper[4558]: I0120 17:07:46.894199 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"53103fed-90a2-4307-873a-7aa8e9a0f858","Type":"ContainerStarted","Data":"7605e11b45ce155d20eaa8d8b26428ebf2f5b410e35ca872cd9b5349e845b6cd"} Jan 20 17:07:46 crc kubenswrapper[4558]: I0120 17:07:46.898538 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-db-sync-5rtqc" Jan 20 17:07:46 crc kubenswrapper[4558]: I0120 17:07:46.899375 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-db-sync-5rtqc" event={"ID":"4fb0c521-d465-47b9-b859-199f53143dca","Type":"ContainerDied","Data":"720cf8a66b7cad8587f8ef26f04899153239b9d5f060876b68844c1340f9e7ba"} Jan 20 17:07:46 crc kubenswrapper[4558]: I0120 17:07:46.899408 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="720cf8a66b7cad8587f8ef26f04899153239b9d5f060876b68844c1340f9e7ba" Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.032388 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/placement-b8dd4f99b-9gg5b"] Jan 20 17:07:47 crc kubenswrapper[4558]: E0120 17:07:47.032946 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7e152238-833a-4e1c-b854-0584daae3826" containerName="placement-db-sync" Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.032959 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="7e152238-833a-4e1c-b854-0584daae3826" containerName="placement-db-sync" Jan 20 17:07:47 crc kubenswrapper[4558]: E0120 17:07:47.032977 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4fb0c521-d465-47b9-b859-199f53143dca" containerName="barbican-db-sync" Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.032983 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="4fb0c521-d465-47b9-b859-199f53143dca" containerName="barbican-db-sync" Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.033133 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="7e152238-833a-4e1c-b854-0584daae3826" containerName="placement-db-sync" Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.033142 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="4fb0c521-d465-47b9-b859-199f53143dca" containerName="barbican-db-sync" Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.039564 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-b8dd4f99b-9gg5b" Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.043537 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"placement-scripts" Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.043745 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"placement-placement-dockercfg-6fxmw" Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.043886 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-placement-internal-svc" Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.047242 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-placement-public-svc" Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.067920 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"placement-config-data" Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.090643 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/043476ec-5392-41af-970c-89d20b6b30a5-config-data\") pod \"placement-b8dd4f99b-9gg5b\" (UID: \"043476ec-5392-41af-970c-89d20b6b30a5\") " pod="openstack-kuttl-tests/placement-b8dd4f99b-9gg5b" Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.090675 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tp4jm\" (UniqueName: \"kubernetes.io/projected/043476ec-5392-41af-970c-89d20b6b30a5-kube-api-access-tp4jm\") pod \"placement-b8dd4f99b-9gg5b\" (UID: \"043476ec-5392-41af-970c-89d20b6b30a5\") " pod="openstack-kuttl-tests/placement-b8dd4f99b-9gg5b" Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.090703 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/043476ec-5392-41af-970c-89d20b6b30a5-public-tls-certs\") pod \"placement-b8dd4f99b-9gg5b\" (UID: \"043476ec-5392-41af-970c-89d20b6b30a5\") " pod="openstack-kuttl-tests/placement-b8dd4f99b-9gg5b" Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.090722 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/043476ec-5392-41af-970c-89d20b6b30a5-combined-ca-bundle\") pod \"placement-b8dd4f99b-9gg5b\" (UID: \"043476ec-5392-41af-970c-89d20b6b30a5\") " pod="openstack-kuttl-tests/placement-b8dd4f99b-9gg5b" Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.090738 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/043476ec-5392-41af-970c-89d20b6b30a5-logs\") pod \"placement-b8dd4f99b-9gg5b\" (UID: \"043476ec-5392-41af-970c-89d20b6b30a5\") " pod="openstack-kuttl-tests/placement-b8dd4f99b-9gg5b" Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.090805 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/043476ec-5392-41af-970c-89d20b6b30a5-scripts\") pod \"placement-b8dd4f99b-9gg5b\" (UID: \"043476ec-5392-41af-970c-89d20b6b30a5\") " pod="openstack-kuttl-tests/placement-b8dd4f99b-9gg5b" Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.090828 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/043476ec-5392-41af-970c-89d20b6b30a5-internal-tls-certs\") pod \"placement-b8dd4f99b-9gg5b\" (UID: \"043476ec-5392-41af-970c-89d20b6b30a5\") " pod="openstack-kuttl-tests/placement-b8dd4f99b-9gg5b" Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.092780 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-b8dd4f99b-9gg5b"] Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.195463 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/043476ec-5392-41af-970c-89d20b6b30a5-config-data\") pod \"placement-b8dd4f99b-9gg5b\" (UID: \"043476ec-5392-41af-970c-89d20b6b30a5\") " pod="openstack-kuttl-tests/placement-b8dd4f99b-9gg5b" Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.195497 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tp4jm\" (UniqueName: \"kubernetes.io/projected/043476ec-5392-41af-970c-89d20b6b30a5-kube-api-access-tp4jm\") pod \"placement-b8dd4f99b-9gg5b\" (UID: \"043476ec-5392-41af-970c-89d20b6b30a5\") " pod="openstack-kuttl-tests/placement-b8dd4f99b-9gg5b" Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.195526 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/043476ec-5392-41af-970c-89d20b6b30a5-public-tls-certs\") pod \"placement-b8dd4f99b-9gg5b\" (UID: \"043476ec-5392-41af-970c-89d20b6b30a5\") " pod="openstack-kuttl-tests/placement-b8dd4f99b-9gg5b" Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.195546 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/043476ec-5392-41af-970c-89d20b6b30a5-combined-ca-bundle\") pod \"placement-b8dd4f99b-9gg5b\" (UID: \"043476ec-5392-41af-970c-89d20b6b30a5\") " pod="openstack-kuttl-tests/placement-b8dd4f99b-9gg5b" Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.195564 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/043476ec-5392-41af-970c-89d20b6b30a5-logs\") pod \"placement-b8dd4f99b-9gg5b\" (UID: \"043476ec-5392-41af-970c-89d20b6b30a5\") " pod="openstack-kuttl-tests/placement-b8dd4f99b-9gg5b" Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.195632 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/043476ec-5392-41af-970c-89d20b6b30a5-scripts\") pod \"placement-b8dd4f99b-9gg5b\" (UID: \"043476ec-5392-41af-970c-89d20b6b30a5\") " pod="openstack-kuttl-tests/placement-b8dd4f99b-9gg5b" Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.195657 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/043476ec-5392-41af-970c-89d20b6b30a5-internal-tls-certs\") pod \"placement-b8dd4f99b-9gg5b\" (UID: \"043476ec-5392-41af-970c-89d20b6b30a5\") " pod="openstack-kuttl-tests/placement-b8dd4f99b-9gg5b" Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.216153 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/barbican-worker-7bcc85c7d9-jm8qh"] Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.217994 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-worker-7bcc85c7d9-jm8qh" Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.220351 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"barbican-config-data" Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.220530 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"barbican-barbican-dockercfg-zpzhc" Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.220803 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"barbican-worker-config-data" Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.221637 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/043476ec-5392-41af-970c-89d20b6b30a5-public-tls-certs\") pod \"placement-b8dd4f99b-9gg5b\" (UID: \"043476ec-5392-41af-970c-89d20b6b30a5\") " pod="openstack-kuttl-tests/placement-b8dd4f99b-9gg5b" Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.221755 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/043476ec-5392-41af-970c-89d20b6b30a5-logs\") pod \"placement-b8dd4f99b-9gg5b\" (UID: \"043476ec-5392-41af-970c-89d20b6b30a5\") " pod="openstack-kuttl-tests/placement-b8dd4f99b-9gg5b" Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.231581 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/043476ec-5392-41af-970c-89d20b6b30a5-config-data\") pod \"placement-b8dd4f99b-9gg5b\" (UID: \"043476ec-5392-41af-970c-89d20b6b30a5\") " pod="openstack-kuttl-tests/placement-b8dd4f99b-9gg5b" Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.238514 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/043476ec-5392-41af-970c-89d20b6b30a5-scripts\") pod \"placement-b8dd4f99b-9gg5b\" (UID: \"043476ec-5392-41af-970c-89d20b6b30a5\") " pod="openstack-kuttl-tests/placement-b8dd4f99b-9gg5b" Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.242630 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/043476ec-5392-41af-970c-89d20b6b30a5-internal-tls-certs\") pod \"placement-b8dd4f99b-9gg5b\" (UID: \"043476ec-5392-41af-970c-89d20b6b30a5\") " pod="openstack-kuttl-tests/placement-b8dd4f99b-9gg5b" Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.247676 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tp4jm\" (UniqueName: \"kubernetes.io/projected/043476ec-5392-41af-970c-89d20b6b30a5-kube-api-access-tp4jm\") pod \"placement-b8dd4f99b-9gg5b\" (UID: \"043476ec-5392-41af-970c-89d20b6b30a5\") " pod="openstack-kuttl-tests/placement-b8dd4f99b-9gg5b" Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.278775 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/043476ec-5392-41af-970c-89d20b6b30a5-combined-ca-bundle\") pod \"placement-b8dd4f99b-9gg5b\" (UID: \"043476ec-5392-41af-970c-89d20b6b30a5\") " pod="openstack-kuttl-tests/placement-b8dd4f99b-9gg5b" Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.303357 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/faade961-fce0-4ad6-a039-4ba83a95dd68-combined-ca-bundle\") pod \"barbican-worker-7bcc85c7d9-jm8qh\" (UID: \"faade961-fce0-4ad6-a039-4ba83a95dd68\") " pod="openstack-kuttl-tests/barbican-worker-7bcc85c7d9-jm8qh" Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.303445 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r7std\" (UniqueName: \"kubernetes.io/projected/faade961-fce0-4ad6-a039-4ba83a95dd68-kube-api-access-r7std\") pod \"barbican-worker-7bcc85c7d9-jm8qh\" (UID: \"faade961-fce0-4ad6-a039-4ba83a95dd68\") " pod="openstack-kuttl-tests/barbican-worker-7bcc85c7d9-jm8qh" Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.303549 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/faade961-fce0-4ad6-a039-4ba83a95dd68-config-data\") pod \"barbican-worker-7bcc85c7d9-jm8qh\" (UID: \"faade961-fce0-4ad6-a039-4ba83a95dd68\") " pod="openstack-kuttl-tests/barbican-worker-7bcc85c7d9-jm8qh" Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.303585 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/faade961-fce0-4ad6-a039-4ba83a95dd68-config-data-custom\") pod \"barbican-worker-7bcc85c7d9-jm8qh\" (UID: \"faade961-fce0-4ad6-a039-4ba83a95dd68\") " pod="openstack-kuttl-tests/barbican-worker-7bcc85c7d9-jm8qh" Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.303626 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/faade961-fce0-4ad6-a039-4ba83a95dd68-logs\") pod \"barbican-worker-7bcc85c7d9-jm8qh\" (UID: \"faade961-fce0-4ad6-a039-4ba83a95dd68\") " pod="openstack-kuttl-tests/barbican-worker-7bcc85c7d9-jm8qh" Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.306732 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-worker-7bcc85c7d9-jm8qh"] Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.340230 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/barbican-keystone-listener-86d9c4d694-78ch5"] Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.341635 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-keystone-listener-86d9c4d694-78ch5" Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.346973 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"barbican-keystone-listener-config-data" Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.368365 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-keystone-listener-86d9c4d694-78ch5"] Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.430474 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/faade961-fce0-4ad6-a039-4ba83a95dd68-config-data\") pod \"barbican-worker-7bcc85c7d9-jm8qh\" (UID: \"faade961-fce0-4ad6-a039-4ba83a95dd68\") " pod="openstack-kuttl-tests/barbican-worker-7bcc85c7d9-jm8qh" Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.430525 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/faade961-fce0-4ad6-a039-4ba83a95dd68-config-data-custom\") pod \"barbican-worker-7bcc85c7d9-jm8qh\" (UID: \"faade961-fce0-4ad6-a039-4ba83a95dd68\") " pod="openstack-kuttl-tests/barbican-worker-7bcc85c7d9-jm8qh" Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.430559 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/209ec6bc-170f-4c8a-ad7e-e441ace95d1b-config-data-custom\") pod \"barbican-keystone-listener-86d9c4d694-78ch5\" (UID: \"209ec6bc-170f-4c8a-ad7e-e441ace95d1b\") " pod="openstack-kuttl-tests/barbican-keystone-listener-86d9c4d694-78ch5" Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.430588 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/209ec6bc-170f-4c8a-ad7e-e441ace95d1b-combined-ca-bundle\") pod \"barbican-keystone-listener-86d9c4d694-78ch5\" (UID: \"209ec6bc-170f-4c8a-ad7e-e441ace95d1b\") " pod="openstack-kuttl-tests/barbican-keystone-listener-86d9c4d694-78ch5" Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.430614 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/faade961-fce0-4ad6-a039-4ba83a95dd68-logs\") pod \"barbican-worker-7bcc85c7d9-jm8qh\" (UID: \"faade961-fce0-4ad6-a039-4ba83a95dd68\") " pod="openstack-kuttl-tests/barbican-worker-7bcc85c7d9-jm8qh" Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.430655 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/209ec6bc-170f-4c8a-ad7e-e441ace95d1b-logs\") pod \"barbican-keystone-listener-86d9c4d694-78ch5\" (UID: \"209ec6bc-170f-4c8a-ad7e-e441ace95d1b\") " pod="openstack-kuttl-tests/barbican-keystone-listener-86d9c4d694-78ch5" Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.430676 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/209ec6bc-170f-4c8a-ad7e-e441ace95d1b-config-data\") pod \"barbican-keystone-listener-86d9c4d694-78ch5\" (UID: \"209ec6bc-170f-4c8a-ad7e-e441ace95d1b\") " pod="openstack-kuttl-tests/barbican-keystone-listener-86d9c4d694-78ch5" Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.430706 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/faade961-fce0-4ad6-a039-4ba83a95dd68-combined-ca-bundle\") pod \"barbican-worker-7bcc85c7d9-jm8qh\" (UID: \"faade961-fce0-4ad6-a039-4ba83a95dd68\") " pod="openstack-kuttl-tests/barbican-worker-7bcc85c7d9-jm8qh" Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.430735 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lmmrc\" (UniqueName: \"kubernetes.io/projected/209ec6bc-170f-4c8a-ad7e-e441ace95d1b-kube-api-access-lmmrc\") pod \"barbican-keystone-listener-86d9c4d694-78ch5\" (UID: \"209ec6bc-170f-4c8a-ad7e-e441ace95d1b\") " pod="openstack-kuttl-tests/barbican-keystone-listener-86d9c4d694-78ch5" Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.430773 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r7std\" (UniqueName: \"kubernetes.io/projected/faade961-fce0-4ad6-a039-4ba83a95dd68-kube-api-access-r7std\") pod \"barbican-worker-7bcc85c7d9-jm8qh\" (UID: \"faade961-fce0-4ad6-a039-4ba83a95dd68\") " pod="openstack-kuttl-tests/barbican-worker-7bcc85c7d9-jm8qh" Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.435725 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/faade961-fce0-4ad6-a039-4ba83a95dd68-logs\") pod \"barbican-worker-7bcc85c7d9-jm8qh\" (UID: \"faade961-fce0-4ad6-a039-4ba83a95dd68\") " pod="openstack-kuttl-tests/barbican-worker-7bcc85c7d9-jm8qh" Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.436805 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-b8dd4f99b-9gg5b" Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.453392 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r7std\" (UniqueName: \"kubernetes.io/projected/faade961-fce0-4ad6-a039-4ba83a95dd68-kube-api-access-r7std\") pod \"barbican-worker-7bcc85c7d9-jm8qh\" (UID: \"faade961-fce0-4ad6-a039-4ba83a95dd68\") " pod="openstack-kuttl-tests/barbican-worker-7bcc85c7d9-jm8qh" Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.459800 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/faade961-fce0-4ad6-a039-4ba83a95dd68-combined-ca-bundle\") pod \"barbican-worker-7bcc85c7d9-jm8qh\" (UID: \"faade961-fce0-4ad6-a039-4ba83a95dd68\") " pod="openstack-kuttl-tests/barbican-worker-7bcc85c7d9-jm8qh" Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.508249 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/faade961-fce0-4ad6-a039-4ba83a95dd68-config-data-custom\") pod \"barbican-worker-7bcc85c7d9-jm8qh\" (UID: \"faade961-fce0-4ad6-a039-4ba83a95dd68\") " pod="openstack-kuttl-tests/barbican-worker-7bcc85c7d9-jm8qh" Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.510298 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/faade961-fce0-4ad6-a039-4ba83a95dd68-config-data\") pod \"barbican-worker-7bcc85c7d9-jm8qh\" (UID: \"faade961-fce0-4ad6-a039-4ba83a95dd68\") " pod="openstack-kuttl-tests/barbican-worker-7bcc85c7d9-jm8qh" Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.522150 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/barbican-api-5ffbfb44-sbdwx"] Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.525659 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-5ffbfb44-sbdwx" Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.532426 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"barbican-api-config-data" Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.534672 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lmmrc\" (UniqueName: \"kubernetes.io/projected/209ec6bc-170f-4c8a-ad7e-e441ace95d1b-kube-api-access-lmmrc\") pod \"barbican-keystone-listener-86d9c4d694-78ch5\" (UID: \"209ec6bc-170f-4c8a-ad7e-e441ace95d1b\") " pod="openstack-kuttl-tests/barbican-keystone-listener-86d9c4d694-78ch5" Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.534912 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/209ec6bc-170f-4c8a-ad7e-e441ace95d1b-config-data-custom\") pod \"barbican-keystone-listener-86d9c4d694-78ch5\" (UID: \"209ec6bc-170f-4c8a-ad7e-e441ace95d1b\") " pod="openstack-kuttl-tests/barbican-keystone-listener-86d9c4d694-78ch5" Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.534952 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/209ec6bc-170f-4c8a-ad7e-e441ace95d1b-combined-ca-bundle\") pod \"barbican-keystone-listener-86d9c4d694-78ch5\" (UID: \"209ec6bc-170f-4c8a-ad7e-e441ace95d1b\") " pod="openstack-kuttl-tests/barbican-keystone-listener-86d9c4d694-78ch5" Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.535030 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/209ec6bc-170f-4c8a-ad7e-e441ace95d1b-logs\") pod \"barbican-keystone-listener-86d9c4d694-78ch5\" (UID: \"209ec6bc-170f-4c8a-ad7e-e441ace95d1b\") " pod="openstack-kuttl-tests/barbican-keystone-listener-86d9c4d694-78ch5" Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.535057 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/209ec6bc-170f-4c8a-ad7e-e441ace95d1b-config-data\") pod \"barbican-keystone-listener-86d9c4d694-78ch5\" (UID: \"209ec6bc-170f-4c8a-ad7e-e441ace95d1b\") " pod="openstack-kuttl-tests/barbican-keystone-listener-86d9c4d694-78ch5" Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.538494 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/209ec6bc-170f-4c8a-ad7e-e441ace95d1b-logs\") pod \"barbican-keystone-listener-86d9c4d694-78ch5\" (UID: \"209ec6bc-170f-4c8a-ad7e-e441ace95d1b\") " pod="openstack-kuttl-tests/barbican-keystone-listener-86d9c4d694-78ch5" Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.542470 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/209ec6bc-170f-4c8a-ad7e-e441ace95d1b-config-data-custom\") pod \"barbican-keystone-listener-86d9c4d694-78ch5\" (UID: \"209ec6bc-170f-4c8a-ad7e-e441ace95d1b\") " pod="openstack-kuttl-tests/barbican-keystone-listener-86d9c4d694-78ch5" Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.543878 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/209ec6bc-170f-4c8a-ad7e-e441ace95d1b-combined-ca-bundle\") pod \"barbican-keystone-listener-86d9c4d694-78ch5\" (UID: \"209ec6bc-170f-4c8a-ad7e-e441ace95d1b\") " pod="openstack-kuttl-tests/barbican-keystone-listener-86d9c4d694-78ch5" Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.545063 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/209ec6bc-170f-4c8a-ad7e-e441ace95d1b-config-data\") pod \"barbican-keystone-listener-86d9c4d694-78ch5\" (UID: \"209ec6bc-170f-4c8a-ad7e-e441ace95d1b\") " pod="openstack-kuttl-tests/barbican-keystone-listener-86d9c4d694-78ch5" Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.557991 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lmmrc\" (UniqueName: \"kubernetes.io/projected/209ec6bc-170f-4c8a-ad7e-e441ace95d1b-kube-api-access-lmmrc\") pod \"barbican-keystone-listener-86d9c4d694-78ch5\" (UID: \"209ec6bc-170f-4c8a-ad7e-e441ace95d1b\") " pod="openstack-kuttl-tests/barbican-keystone-listener-86d9c4d694-78ch5" Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.558108 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-api-5ffbfb44-sbdwx"] Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.638185 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cd459d10-b71b-45e5-9af4-042acd8bd024-config-data\") pod \"barbican-api-5ffbfb44-sbdwx\" (UID: \"cd459d10-b71b-45e5-9af4-042acd8bd024\") " pod="openstack-kuttl-tests/barbican-api-5ffbfb44-sbdwx" Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.638305 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cd459d10-b71b-45e5-9af4-042acd8bd024-logs\") pod \"barbican-api-5ffbfb44-sbdwx\" (UID: \"cd459d10-b71b-45e5-9af4-042acd8bd024\") " pod="openstack-kuttl-tests/barbican-api-5ffbfb44-sbdwx" Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.638347 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/cd459d10-b71b-45e5-9af4-042acd8bd024-config-data-custom\") pod \"barbican-api-5ffbfb44-sbdwx\" (UID: \"cd459d10-b71b-45e5-9af4-042acd8bd024\") " pod="openstack-kuttl-tests/barbican-api-5ffbfb44-sbdwx" Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.638378 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-njbz4\" (UniqueName: \"kubernetes.io/projected/cd459d10-b71b-45e5-9af4-042acd8bd024-kube-api-access-njbz4\") pod \"barbican-api-5ffbfb44-sbdwx\" (UID: \"cd459d10-b71b-45e5-9af4-042acd8bd024\") " pod="openstack-kuttl-tests/barbican-api-5ffbfb44-sbdwx" Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.638431 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd459d10-b71b-45e5-9af4-042acd8bd024-combined-ca-bundle\") pod \"barbican-api-5ffbfb44-sbdwx\" (UID: \"cd459d10-b71b-45e5-9af4-042acd8bd024\") " pod="openstack-kuttl-tests/barbican-api-5ffbfb44-sbdwx" Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.656056 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-worker-7bcc85c7d9-jm8qh" Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.685480 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-bootstrap-rmnb4" Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.741484 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c39fd5ad-a9fd-4240-a3f2-e7ad7c6bdee2-config-data\") pod \"c39fd5ad-a9fd-4240-a3f2-e7ad7c6bdee2\" (UID: \"c39fd5ad-a9fd-4240-a3f2-e7ad7c6bdee2\") " Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.741555 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7rsc9\" (UniqueName: \"kubernetes.io/projected/c39fd5ad-a9fd-4240-a3f2-e7ad7c6bdee2-kube-api-access-7rsc9\") pod \"c39fd5ad-a9fd-4240-a3f2-e7ad7c6bdee2\" (UID: \"c39fd5ad-a9fd-4240-a3f2-e7ad7c6bdee2\") " Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.741601 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c39fd5ad-a9fd-4240-a3f2-e7ad7c6bdee2-combined-ca-bundle\") pod \"c39fd5ad-a9fd-4240-a3f2-e7ad7c6bdee2\" (UID: \"c39fd5ad-a9fd-4240-a3f2-e7ad7c6bdee2\") " Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.741641 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/c39fd5ad-a9fd-4240-a3f2-e7ad7c6bdee2-credential-keys\") pod \"c39fd5ad-a9fd-4240-a3f2-e7ad7c6bdee2\" (UID: \"c39fd5ad-a9fd-4240-a3f2-e7ad7c6bdee2\") " Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.742228 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c39fd5ad-a9fd-4240-a3f2-e7ad7c6bdee2-scripts\") pod \"c39fd5ad-a9fd-4240-a3f2-e7ad7c6bdee2\" (UID: \"c39fd5ad-a9fd-4240-a3f2-e7ad7c6bdee2\") " Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.743664 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/c39fd5ad-a9fd-4240-a3f2-e7ad7c6bdee2-fernet-keys\") pod \"c39fd5ad-a9fd-4240-a3f2-e7ad7c6bdee2\" (UID: \"c39fd5ad-a9fd-4240-a3f2-e7ad7c6bdee2\") " Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.744828 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cd459d10-b71b-45e5-9af4-042acd8bd024-config-data\") pod \"barbican-api-5ffbfb44-sbdwx\" (UID: \"cd459d10-b71b-45e5-9af4-042acd8bd024\") " pod="openstack-kuttl-tests/barbican-api-5ffbfb44-sbdwx" Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.745056 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cd459d10-b71b-45e5-9af4-042acd8bd024-logs\") pod \"barbican-api-5ffbfb44-sbdwx\" (UID: \"cd459d10-b71b-45e5-9af4-042acd8bd024\") " pod="openstack-kuttl-tests/barbican-api-5ffbfb44-sbdwx" Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.745116 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/cd459d10-b71b-45e5-9af4-042acd8bd024-config-data-custom\") pod \"barbican-api-5ffbfb44-sbdwx\" (UID: \"cd459d10-b71b-45e5-9af4-042acd8bd024\") " pod="openstack-kuttl-tests/barbican-api-5ffbfb44-sbdwx" Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.745341 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-njbz4\" (UniqueName: \"kubernetes.io/projected/cd459d10-b71b-45e5-9af4-042acd8bd024-kube-api-access-njbz4\") pod \"barbican-api-5ffbfb44-sbdwx\" (UID: \"cd459d10-b71b-45e5-9af4-042acd8bd024\") " pod="openstack-kuttl-tests/barbican-api-5ffbfb44-sbdwx" Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.745451 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd459d10-b71b-45e5-9af4-042acd8bd024-combined-ca-bundle\") pod \"barbican-api-5ffbfb44-sbdwx\" (UID: \"cd459d10-b71b-45e5-9af4-042acd8bd024\") " pod="openstack-kuttl-tests/barbican-api-5ffbfb44-sbdwx" Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.746823 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cd459d10-b71b-45e5-9af4-042acd8bd024-logs\") pod \"barbican-api-5ffbfb44-sbdwx\" (UID: \"cd459d10-b71b-45e5-9af4-042acd8bd024\") " pod="openstack-kuttl-tests/barbican-api-5ffbfb44-sbdwx" Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.749287 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c39fd5ad-a9fd-4240-a3f2-e7ad7c6bdee2-scripts" (OuterVolumeSpecName: "scripts") pod "c39fd5ad-a9fd-4240-a3f2-e7ad7c6bdee2" (UID: "c39fd5ad-a9fd-4240-a3f2-e7ad7c6bdee2"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.749702 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-keystone-listener-86d9c4d694-78ch5" Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.750820 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c39fd5ad-a9fd-4240-a3f2-e7ad7c6bdee2-kube-api-access-7rsc9" (OuterVolumeSpecName: "kube-api-access-7rsc9") pod "c39fd5ad-a9fd-4240-a3f2-e7ad7c6bdee2" (UID: "c39fd5ad-a9fd-4240-a3f2-e7ad7c6bdee2"). InnerVolumeSpecName "kube-api-access-7rsc9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.764374 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c39fd5ad-a9fd-4240-a3f2-e7ad7c6bdee2-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "c39fd5ad-a9fd-4240-a3f2-e7ad7c6bdee2" (UID: "c39fd5ad-a9fd-4240-a3f2-e7ad7c6bdee2"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.766878 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd459d10-b71b-45e5-9af4-042acd8bd024-combined-ca-bundle\") pod \"barbican-api-5ffbfb44-sbdwx\" (UID: \"cd459d10-b71b-45e5-9af4-042acd8bd024\") " pod="openstack-kuttl-tests/barbican-api-5ffbfb44-sbdwx" Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.767313 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/cd459d10-b71b-45e5-9af4-042acd8bd024-config-data-custom\") pod \"barbican-api-5ffbfb44-sbdwx\" (UID: \"cd459d10-b71b-45e5-9af4-042acd8bd024\") " pod="openstack-kuttl-tests/barbican-api-5ffbfb44-sbdwx" Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.769562 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-njbz4\" (UniqueName: \"kubernetes.io/projected/cd459d10-b71b-45e5-9af4-042acd8bd024-kube-api-access-njbz4\") pod \"barbican-api-5ffbfb44-sbdwx\" (UID: \"cd459d10-b71b-45e5-9af4-042acd8bd024\") " pod="openstack-kuttl-tests/barbican-api-5ffbfb44-sbdwx" Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.770555 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c39fd5ad-a9fd-4240-a3f2-e7ad7c6bdee2-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "c39fd5ad-a9fd-4240-a3f2-e7ad7c6bdee2" (UID: "c39fd5ad-a9fd-4240-a3f2-e7ad7c6bdee2"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.775109 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cd459d10-b71b-45e5-9af4-042acd8bd024-config-data\") pod \"barbican-api-5ffbfb44-sbdwx\" (UID: \"cd459d10-b71b-45e5-9af4-042acd8bd024\") " pod="openstack-kuttl-tests/barbican-api-5ffbfb44-sbdwx" Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.815504 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c39fd5ad-a9fd-4240-a3f2-e7ad7c6bdee2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c39fd5ad-a9fd-4240-a3f2-e7ad7c6bdee2" (UID: "c39fd5ad-a9fd-4240-a3f2-e7ad7c6bdee2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.816763 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c39fd5ad-a9fd-4240-a3f2-e7ad7c6bdee2-config-data" (OuterVolumeSpecName: "config-data") pod "c39fd5ad-a9fd-4240-a3f2-e7ad7c6bdee2" (UID: "c39fd5ad-a9fd-4240-a3f2-e7ad7c6bdee2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.851895 4558 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/c39fd5ad-a9fd-4240-a3f2-e7ad7c6bdee2-fernet-keys\") on node \"crc\" DevicePath \"\"" Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.852122 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c39fd5ad-a9fd-4240-a3f2-e7ad7c6bdee2-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.852134 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7rsc9\" (UniqueName: \"kubernetes.io/projected/c39fd5ad-a9fd-4240-a3f2-e7ad7c6bdee2-kube-api-access-7rsc9\") on node \"crc\" DevicePath \"\"" Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.852148 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c39fd5ad-a9fd-4240-a3f2-e7ad7c6bdee2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.852176 4558 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/c39fd5ad-a9fd-4240-a3f2-e7ad7c6bdee2-credential-keys\") on node \"crc\" DevicePath \"\"" Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.852186 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c39fd5ad-a9fd-4240-a3f2-e7ad7c6bdee2-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.917923 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"53103fed-90a2-4307-873a-7aa8e9a0f858","Type":"ContainerStarted","Data":"35ded3b25d0948e3d8b0f37bb581221a03212fb78cb9c59c0ecf8e53e679187e"} Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.923835 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"e5918aaf-a79a-4f8e-af24-0a538b61af22","Type":"ContainerStarted","Data":"3d3b3bad3315e7a5d2bf5acdd694484083370e47a7e9c70b7d81c8e0322b5a1c"} Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.927117 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="e5918aaf-a79a-4f8e-af24-0a538b61af22" containerName="ceilometer-central-agent" containerID="cri-o://c6d6179083d1da62058e87e75412018bf0cc96f7046edc5c269f6ba9f837fdca" gracePeriod=30 Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.927243 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.927371 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="e5918aaf-a79a-4f8e-af24-0a538b61af22" containerName="proxy-httpd" containerID="cri-o://3d3b3bad3315e7a5d2bf5acdd694484083370e47a7e9c70b7d81c8e0322b5a1c" gracePeriod=30 Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.927517 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="e5918aaf-a79a-4f8e-af24-0a538b61af22" containerName="ceilometer-notification-agent" containerID="cri-o://daf89e9a5edbcd4dc5626bac0a6afd5dd0377257a844e9b1cbee689ca493d073" gracePeriod=30 Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.927564 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="e5918aaf-a79a-4f8e-af24-0a538b61af22" containerName="sg-core" containerID="cri-o://f5e4f86823050fc5341884e7c3457363544d7680acfe65dbce1e28afaa63f176" gracePeriod=30 Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.930632 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-bootstrap-rmnb4" event={"ID":"c39fd5ad-a9fd-4240-a3f2-e7ad7c6bdee2","Type":"ContainerDied","Data":"a0907ff1318b0dd034d73b6c958017896882282ad7a0ee88e1684cc4ee2dffb2"} Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.930714 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a0907ff1318b0dd034d73b6c958017896882282ad7a0ee88e1684cc4ee2dffb2" Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.930819 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-bootstrap-rmnb4" Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.954146 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ceilometer-0" podStartSLOduration=1.657308899 podStartE2EDuration="6.954123037s" podCreationTimestamp="2026-01-20 17:07:41 +0000 UTC" firstStartedPulling="2026-01-20 17:07:41.89847776 +0000 UTC m=+1555.658815727" lastFinishedPulling="2026-01-20 17:07:47.195291897 +0000 UTC m=+1560.955629865" observedRunningTime="2026-01-20 17:07:47.944923739 +0000 UTC m=+1561.705261706" watchObservedRunningTime="2026-01-20 17:07:47.954123037 +0000 UTC m=+1561.714461004" Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.964915 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-bootstrap-rmnb4"] Jan 20 17:07:47 crc kubenswrapper[4558]: I0120 17:07:47.978340 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/keystone-bootstrap-rmnb4"] Jan 20 17:07:48 crc kubenswrapper[4558]: E0120 17:07:47.997656 4558 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4fb0c521_d465_47b9_b859_199f53143dca.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4fb0c521_d465_47b9_b859_199f53143dca.slice/crio-720cf8a66b7cad8587f8ef26f04899153239b9d5f060876b68844c1340f9e7ba\": RecentStats: unable to find data in memory cache]" Jan 20 17:07:48 crc kubenswrapper[4558]: I0120 17:07:48.038868 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-5ffbfb44-sbdwx" Jan 20 17:07:48 crc kubenswrapper[4558]: I0120 17:07:48.082710 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/keystone-bootstrap-v2krf"] Jan 20 17:07:48 crc kubenswrapper[4558]: E0120 17:07:48.083216 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c39fd5ad-a9fd-4240-a3f2-e7ad7c6bdee2" containerName="keystone-bootstrap" Jan 20 17:07:48 crc kubenswrapper[4558]: I0120 17:07:48.083236 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="c39fd5ad-a9fd-4240-a3f2-e7ad7c6bdee2" containerName="keystone-bootstrap" Jan 20 17:07:48 crc kubenswrapper[4558]: I0120 17:07:48.083413 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="c39fd5ad-a9fd-4240-a3f2-e7ad7c6bdee2" containerName="keystone-bootstrap" Jan 20 17:07:48 crc kubenswrapper[4558]: I0120 17:07:48.084065 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-bootstrap-v2krf" Jan 20 17:07:48 crc kubenswrapper[4558]: I0120 17:07:48.092977 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone" Jan 20 17:07:48 crc kubenswrapper[4558]: I0120 17:07:48.093138 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-config-data" Jan 20 17:07:48 crc kubenswrapper[4558]: I0120 17:07:48.094076 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-scripts" Jan 20 17:07:48 crc kubenswrapper[4558]: I0120 17:07:48.094253 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-keystone-dockercfg-lcqb5" Jan 20 17:07:48 crc kubenswrapper[4558]: I0120 17:07:48.094412 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"osp-secret" Jan 20 17:07:48 crc kubenswrapper[4558]: I0120 17:07:48.119258 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-bootstrap-v2krf"] Jan 20 17:07:48 crc kubenswrapper[4558]: I0120 17:07:48.136972 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-b8dd4f99b-9gg5b"] Jan 20 17:07:48 crc kubenswrapper[4558]: W0120 17:07:48.141276 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod043476ec_5392_41af_970c_89d20b6b30a5.slice/crio-4c84428a80cbfe98f47d97ac5e8c97645f3b7939e2b685b5e8bf9d6bc5ffd8f9 WatchSource:0}: Error finding container 4c84428a80cbfe98f47d97ac5e8c97645f3b7939e2b685b5e8bf9d6bc5ffd8f9: Status 404 returned error can't find the container with id 4c84428a80cbfe98f47d97ac5e8c97645f3b7939e2b685b5e8bf9d6bc5ffd8f9 Jan 20 17:07:48 crc kubenswrapper[4558]: I0120 17:07:48.175985 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/23f3fc6a-3d4e-4e04-87c7-c819069b4a89-scripts\") pod \"keystone-bootstrap-v2krf\" (UID: \"23f3fc6a-3d4e-4e04-87c7-c819069b4a89\") " pod="openstack-kuttl-tests/keystone-bootstrap-v2krf" Jan 20 17:07:48 crc kubenswrapper[4558]: I0120 17:07:48.176369 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/23f3fc6a-3d4e-4e04-87c7-c819069b4a89-combined-ca-bundle\") pod \"keystone-bootstrap-v2krf\" (UID: \"23f3fc6a-3d4e-4e04-87c7-c819069b4a89\") " pod="openstack-kuttl-tests/keystone-bootstrap-v2krf" Jan 20 17:07:48 crc kubenswrapper[4558]: I0120 17:07:48.176462 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4bq2x\" (UniqueName: \"kubernetes.io/projected/23f3fc6a-3d4e-4e04-87c7-c819069b4a89-kube-api-access-4bq2x\") pod \"keystone-bootstrap-v2krf\" (UID: \"23f3fc6a-3d4e-4e04-87c7-c819069b4a89\") " pod="openstack-kuttl-tests/keystone-bootstrap-v2krf" Jan 20 17:07:48 crc kubenswrapper[4558]: I0120 17:07:48.176588 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/23f3fc6a-3d4e-4e04-87c7-c819069b4a89-config-data\") pod \"keystone-bootstrap-v2krf\" (UID: \"23f3fc6a-3d4e-4e04-87c7-c819069b4a89\") " pod="openstack-kuttl-tests/keystone-bootstrap-v2krf" Jan 20 17:07:48 crc kubenswrapper[4558]: I0120 17:07:48.176719 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/23f3fc6a-3d4e-4e04-87c7-c819069b4a89-credential-keys\") pod \"keystone-bootstrap-v2krf\" (UID: \"23f3fc6a-3d4e-4e04-87c7-c819069b4a89\") " pod="openstack-kuttl-tests/keystone-bootstrap-v2krf" Jan 20 17:07:48 crc kubenswrapper[4558]: I0120 17:07:48.177635 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/23f3fc6a-3d4e-4e04-87c7-c819069b4a89-fernet-keys\") pod \"keystone-bootstrap-v2krf\" (UID: \"23f3fc6a-3d4e-4e04-87c7-c819069b4a89\") " pod="openstack-kuttl-tests/keystone-bootstrap-v2krf" Jan 20 17:07:48 crc kubenswrapper[4558]: I0120 17:07:48.280703 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/23f3fc6a-3d4e-4e04-87c7-c819069b4a89-credential-keys\") pod \"keystone-bootstrap-v2krf\" (UID: \"23f3fc6a-3d4e-4e04-87c7-c819069b4a89\") " pod="openstack-kuttl-tests/keystone-bootstrap-v2krf" Jan 20 17:07:48 crc kubenswrapper[4558]: I0120 17:07:48.280855 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/23f3fc6a-3d4e-4e04-87c7-c819069b4a89-fernet-keys\") pod \"keystone-bootstrap-v2krf\" (UID: \"23f3fc6a-3d4e-4e04-87c7-c819069b4a89\") " pod="openstack-kuttl-tests/keystone-bootstrap-v2krf" Jan 20 17:07:48 crc kubenswrapper[4558]: I0120 17:07:48.281017 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/23f3fc6a-3d4e-4e04-87c7-c819069b4a89-scripts\") pod \"keystone-bootstrap-v2krf\" (UID: \"23f3fc6a-3d4e-4e04-87c7-c819069b4a89\") " pod="openstack-kuttl-tests/keystone-bootstrap-v2krf" Jan 20 17:07:48 crc kubenswrapper[4558]: I0120 17:07:48.281107 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/23f3fc6a-3d4e-4e04-87c7-c819069b4a89-combined-ca-bundle\") pod \"keystone-bootstrap-v2krf\" (UID: \"23f3fc6a-3d4e-4e04-87c7-c819069b4a89\") " pod="openstack-kuttl-tests/keystone-bootstrap-v2krf" Jan 20 17:07:48 crc kubenswrapper[4558]: I0120 17:07:48.281218 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4bq2x\" (UniqueName: \"kubernetes.io/projected/23f3fc6a-3d4e-4e04-87c7-c819069b4a89-kube-api-access-4bq2x\") pod \"keystone-bootstrap-v2krf\" (UID: \"23f3fc6a-3d4e-4e04-87c7-c819069b4a89\") " pod="openstack-kuttl-tests/keystone-bootstrap-v2krf" Jan 20 17:07:48 crc kubenswrapper[4558]: I0120 17:07:48.281342 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/23f3fc6a-3d4e-4e04-87c7-c819069b4a89-config-data\") pod \"keystone-bootstrap-v2krf\" (UID: \"23f3fc6a-3d4e-4e04-87c7-c819069b4a89\") " pod="openstack-kuttl-tests/keystone-bootstrap-v2krf" Jan 20 17:07:48 crc kubenswrapper[4558]: I0120 17:07:48.286404 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/23f3fc6a-3d4e-4e04-87c7-c819069b4a89-credential-keys\") pod \"keystone-bootstrap-v2krf\" (UID: \"23f3fc6a-3d4e-4e04-87c7-c819069b4a89\") " pod="openstack-kuttl-tests/keystone-bootstrap-v2krf" Jan 20 17:07:48 crc kubenswrapper[4558]: I0120 17:07:48.288687 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/23f3fc6a-3d4e-4e04-87c7-c819069b4a89-fernet-keys\") pod \"keystone-bootstrap-v2krf\" (UID: \"23f3fc6a-3d4e-4e04-87c7-c819069b4a89\") " pod="openstack-kuttl-tests/keystone-bootstrap-v2krf" Jan 20 17:07:48 crc kubenswrapper[4558]: I0120 17:07:48.288717 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/23f3fc6a-3d4e-4e04-87c7-c819069b4a89-config-data\") pod \"keystone-bootstrap-v2krf\" (UID: \"23f3fc6a-3d4e-4e04-87c7-c819069b4a89\") " pod="openstack-kuttl-tests/keystone-bootstrap-v2krf" Jan 20 17:07:48 crc kubenswrapper[4558]: I0120 17:07:48.293152 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/23f3fc6a-3d4e-4e04-87c7-c819069b4a89-scripts\") pod \"keystone-bootstrap-v2krf\" (UID: \"23f3fc6a-3d4e-4e04-87c7-c819069b4a89\") " pod="openstack-kuttl-tests/keystone-bootstrap-v2krf" Jan 20 17:07:48 crc kubenswrapper[4558]: I0120 17:07:48.302441 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4bq2x\" (UniqueName: \"kubernetes.io/projected/23f3fc6a-3d4e-4e04-87c7-c819069b4a89-kube-api-access-4bq2x\") pod \"keystone-bootstrap-v2krf\" (UID: \"23f3fc6a-3d4e-4e04-87c7-c819069b4a89\") " pod="openstack-kuttl-tests/keystone-bootstrap-v2krf" Jan 20 17:07:48 crc kubenswrapper[4558]: I0120 17:07:48.305801 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/23f3fc6a-3d4e-4e04-87c7-c819069b4a89-combined-ca-bundle\") pod \"keystone-bootstrap-v2krf\" (UID: \"23f3fc6a-3d4e-4e04-87c7-c819069b4a89\") " pod="openstack-kuttl-tests/keystone-bootstrap-v2krf" Jan 20 17:07:48 crc kubenswrapper[4558]: I0120 17:07:48.375979 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-worker-7bcc85c7d9-jm8qh"] Jan 20 17:07:48 crc kubenswrapper[4558]: W0120 17:07:48.444641 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfaade961_fce0_4ad6_a039_4ba83a95dd68.slice/crio-f97ce9d190b163d24d32a36261ccedb84e9d2308cd6fb8b7d48d7eb4331e510a WatchSource:0}: Error finding container f97ce9d190b163d24d32a36261ccedb84e9d2308cd6fb8b7d48d7eb4331e510a: Status 404 returned error can't find the container with id f97ce9d190b163d24d32a36261ccedb84e9d2308cd6fb8b7d48d7eb4331e510a Jan 20 17:07:48 crc kubenswrapper[4558]: I0120 17:07:48.463860 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-db-sync-blmnq" Jan 20 17:07:48 crc kubenswrapper[4558]: I0120 17:07:48.536191 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-bootstrap-v2krf" Jan 20 17:07:48 crc kubenswrapper[4558]: I0120 17:07:48.553308 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-keystone-listener-86d9c4d694-78ch5"] Jan 20 17:07:48 crc kubenswrapper[4558]: I0120 17:07:48.566409 4558 scope.go:117] "RemoveContainer" containerID="a711a286282347590079d2447ef37fa9ed0f11085d6d7a65f85e65c1c2df608f" Jan 20 17:07:48 crc kubenswrapper[4558]: E0120 17:07:48.566790 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 17:07:48 crc kubenswrapper[4558]: I0120 17:07:48.576258 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c39fd5ad-a9fd-4240-a3f2-e7ad7c6bdee2" path="/var/lib/kubelet/pods/c39fd5ad-a9fd-4240-a3f2-e7ad7c6bdee2/volumes" Jan 20 17:07:48 crc kubenswrapper[4558]: I0120 17:07:48.595702 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4c9f9871-91f4-4670-a6af-0c493f3ce85b-config-data\") pod \"4c9f9871-91f4-4670-a6af-0c493f3ce85b\" (UID: \"4c9f9871-91f4-4670-a6af-0c493f3ce85b\") " Jan 20 17:07:48 crc kubenswrapper[4558]: I0120 17:07:48.595768 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sbvgv\" (UniqueName: \"kubernetes.io/projected/4c9f9871-91f4-4670-a6af-0c493f3ce85b-kube-api-access-sbvgv\") pod \"4c9f9871-91f4-4670-a6af-0c493f3ce85b\" (UID: \"4c9f9871-91f4-4670-a6af-0c493f3ce85b\") " Jan 20 17:07:48 crc kubenswrapper[4558]: I0120 17:07:48.595790 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/4c9f9871-91f4-4670-a6af-0c493f3ce85b-db-sync-config-data\") pod \"4c9f9871-91f4-4670-a6af-0c493f3ce85b\" (UID: \"4c9f9871-91f4-4670-a6af-0c493f3ce85b\") " Jan 20 17:07:48 crc kubenswrapper[4558]: I0120 17:07:48.595811 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/4c9f9871-91f4-4670-a6af-0c493f3ce85b-etc-machine-id\") pod \"4c9f9871-91f4-4670-a6af-0c493f3ce85b\" (UID: \"4c9f9871-91f4-4670-a6af-0c493f3ce85b\") " Jan 20 17:07:48 crc kubenswrapper[4558]: I0120 17:07:48.595847 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4c9f9871-91f4-4670-a6af-0c493f3ce85b-scripts\") pod \"4c9f9871-91f4-4670-a6af-0c493f3ce85b\" (UID: \"4c9f9871-91f4-4670-a6af-0c493f3ce85b\") " Jan 20 17:07:48 crc kubenswrapper[4558]: I0120 17:07:48.595935 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c9f9871-91f4-4670-a6af-0c493f3ce85b-combined-ca-bundle\") pod \"4c9f9871-91f4-4670-a6af-0c493f3ce85b\" (UID: \"4c9f9871-91f4-4670-a6af-0c493f3ce85b\") " Jan 20 17:07:48 crc kubenswrapper[4558]: I0120 17:07:48.598203 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4c9f9871-91f4-4670-a6af-0c493f3ce85b-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "4c9f9871-91f4-4670-a6af-0c493f3ce85b" (UID: "4c9f9871-91f4-4670-a6af-0c493f3ce85b"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 17:07:48 crc kubenswrapper[4558]: I0120 17:07:48.602947 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4c9f9871-91f4-4670-a6af-0c493f3ce85b-scripts" (OuterVolumeSpecName: "scripts") pod "4c9f9871-91f4-4670-a6af-0c493f3ce85b" (UID: "4c9f9871-91f4-4670-a6af-0c493f3ce85b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:07:48 crc kubenswrapper[4558]: I0120 17:07:48.603058 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4c9f9871-91f4-4670-a6af-0c493f3ce85b-kube-api-access-sbvgv" (OuterVolumeSpecName: "kube-api-access-sbvgv") pod "4c9f9871-91f4-4670-a6af-0c493f3ce85b" (UID: "4c9f9871-91f4-4670-a6af-0c493f3ce85b"). InnerVolumeSpecName "kube-api-access-sbvgv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:07:48 crc kubenswrapper[4558]: I0120 17:07:48.607950 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4c9f9871-91f4-4670-a6af-0c493f3ce85b-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "4c9f9871-91f4-4670-a6af-0c493f3ce85b" (UID: "4c9f9871-91f4-4670-a6af-0c493f3ce85b"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:07:48 crc kubenswrapper[4558]: I0120 17:07:48.629088 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4c9f9871-91f4-4670-a6af-0c493f3ce85b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4c9f9871-91f4-4670-a6af-0c493f3ce85b" (UID: "4c9f9871-91f4-4670-a6af-0c493f3ce85b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:07:48 crc kubenswrapper[4558]: I0120 17:07:48.664355 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4c9f9871-91f4-4670-a6af-0c493f3ce85b-config-data" (OuterVolumeSpecName: "config-data") pod "4c9f9871-91f4-4670-a6af-0c493f3ce85b" (UID: "4c9f9871-91f4-4670-a6af-0c493f3ce85b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:07:48 crc kubenswrapper[4558]: I0120 17:07:48.699025 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sbvgv\" (UniqueName: \"kubernetes.io/projected/4c9f9871-91f4-4670-a6af-0c493f3ce85b-kube-api-access-sbvgv\") on node \"crc\" DevicePath \"\"" Jan 20 17:07:48 crc kubenswrapper[4558]: I0120 17:07:48.699052 4558 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/4c9f9871-91f4-4670-a6af-0c493f3ce85b-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:07:48 crc kubenswrapper[4558]: I0120 17:07:48.699062 4558 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/4c9f9871-91f4-4670-a6af-0c493f3ce85b-etc-machine-id\") on node \"crc\" DevicePath \"\"" Jan 20 17:07:48 crc kubenswrapper[4558]: I0120 17:07:48.699070 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4c9f9871-91f4-4670-a6af-0c493f3ce85b-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:07:48 crc kubenswrapper[4558]: I0120 17:07:48.699081 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c9f9871-91f4-4670-a6af-0c493f3ce85b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:07:48 crc kubenswrapper[4558]: I0120 17:07:48.699091 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4c9f9871-91f4-4670-a6af-0c493f3ce85b-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:07:48 crc kubenswrapper[4558]: I0120 17:07:48.766525 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-api-5ffbfb44-sbdwx"] Jan 20 17:07:48 crc kubenswrapper[4558]: I0120 17:07:48.817731 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:07:48 crc kubenswrapper[4558]: I0120 17:07:48.905207 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e5918aaf-a79a-4f8e-af24-0a538b61af22-run-httpd\") pod \"e5918aaf-a79a-4f8e-af24-0a538b61af22\" (UID: \"e5918aaf-a79a-4f8e-af24-0a538b61af22\") " Jan 20 17:07:48 crc kubenswrapper[4558]: I0120 17:07:48.905290 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hgvtf\" (UniqueName: \"kubernetes.io/projected/e5918aaf-a79a-4f8e-af24-0a538b61af22-kube-api-access-hgvtf\") pod \"e5918aaf-a79a-4f8e-af24-0a538b61af22\" (UID: \"e5918aaf-a79a-4f8e-af24-0a538b61af22\") " Jan 20 17:07:48 crc kubenswrapper[4558]: I0120 17:07:48.905417 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e5918aaf-a79a-4f8e-af24-0a538b61af22-combined-ca-bundle\") pod \"e5918aaf-a79a-4f8e-af24-0a538b61af22\" (UID: \"e5918aaf-a79a-4f8e-af24-0a538b61af22\") " Jan 20 17:07:48 crc kubenswrapper[4558]: I0120 17:07:48.905566 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e5918aaf-a79a-4f8e-af24-0a538b61af22-sg-core-conf-yaml\") pod \"e5918aaf-a79a-4f8e-af24-0a538b61af22\" (UID: \"e5918aaf-a79a-4f8e-af24-0a538b61af22\") " Jan 20 17:07:48 crc kubenswrapper[4558]: I0120 17:07:48.905592 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e5918aaf-a79a-4f8e-af24-0a538b61af22-config-data\") pod \"e5918aaf-a79a-4f8e-af24-0a538b61af22\" (UID: \"e5918aaf-a79a-4f8e-af24-0a538b61af22\") " Jan 20 17:07:48 crc kubenswrapper[4558]: I0120 17:07:48.905609 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e5918aaf-a79a-4f8e-af24-0a538b61af22-log-httpd\") pod \"e5918aaf-a79a-4f8e-af24-0a538b61af22\" (UID: \"e5918aaf-a79a-4f8e-af24-0a538b61af22\") " Jan 20 17:07:48 crc kubenswrapper[4558]: I0120 17:07:48.905627 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e5918aaf-a79a-4f8e-af24-0a538b61af22-scripts\") pod \"e5918aaf-a79a-4f8e-af24-0a538b61af22\" (UID: \"e5918aaf-a79a-4f8e-af24-0a538b61af22\") " Jan 20 17:07:48 crc kubenswrapper[4558]: I0120 17:07:48.906362 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e5918aaf-a79a-4f8e-af24-0a538b61af22-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "e5918aaf-a79a-4f8e-af24-0a538b61af22" (UID: "e5918aaf-a79a-4f8e-af24-0a538b61af22"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:07:48 crc kubenswrapper[4558]: I0120 17:07:48.906634 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e5918aaf-a79a-4f8e-af24-0a538b61af22-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "e5918aaf-a79a-4f8e-af24-0a538b61af22" (UID: "e5918aaf-a79a-4f8e-af24-0a538b61af22"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:07:48 crc kubenswrapper[4558]: I0120 17:07:48.922483 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e5918aaf-a79a-4f8e-af24-0a538b61af22-scripts" (OuterVolumeSpecName: "scripts") pod "e5918aaf-a79a-4f8e-af24-0a538b61af22" (UID: "e5918aaf-a79a-4f8e-af24-0a538b61af22"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:07:48 crc kubenswrapper[4558]: I0120 17:07:48.922823 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e5918aaf-a79a-4f8e-af24-0a538b61af22-kube-api-access-hgvtf" (OuterVolumeSpecName: "kube-api-access-hgvtf") pod "e5918aaf-a79a-4f8e-af24-0a538b61af22" (UID: "e5918aaf-a79a-4f8e-af24-0a538b61af22"). InnerVolumeSpecName "kube-api-access-hgvtf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:07:48 crc kubenswrapper[4558]: I0120 17:07:48.968697 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e5918aaf-a79a-4f8e-af24-0a538b61af22-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "e5918aaf-a79a-4f8e-af24-0a538b61af22" (UID: "e5918aaf-a79a-4f8e-af24-0a538b61af22"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:07:48 crc kubenswrapper[4558]: I0120 17:07:48.990912 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-keystone-listener-86d9c4d694-78ch5" event={"ID":"209ec6bc-170f-4c8a-ad7e-e441ace95d1b","Type":"ContainerStarted","Data":"f36f1742d8b3097d9223becd7320632e27297558f65c436f5ad091f8b5074cbd"} Jan 20 17:07:48 crc kubenswrapper[4558]: I0120 17:07:48.990980 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-keystone-listener-86d9c4d694-78ch5" event={"ID":"209ec6bc-170f-4c8a-ad7e-e441ace95d1b","Type":"ContainerStarted","Data":"08de7bab4d88a30bfae5bb75c0e24a01f445b709d48ab81d3db8a6f2fc510505"} Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.005384 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"53103fed-90a2-4307-873a-7aa8e9a0f858","Type":"ContainerStarted","Data":"22accb0253cb0db6d14c25355240088e9f546a439c16b31aa2b65ef38ba02d2b"} Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.008720 4558 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e5918aaf-a79a-4f8e-af24-0a538b61af22-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.008745 4558 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e5918aaf-a79a-4f8e-af24-0a538b61af22-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.008754 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e5918aaf-a79a-4f8e-af24-0a538b61af22-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.008767 4558 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e5918aaf-a79a-4f8e-af24-0a538b61af22-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.008776 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hgvtf\" (UniqueName: \"kubernetes.io/projected/e5918aaf-a79a-4f8e-af24-0a538b61af22-kube-api-access-hgvtf\") on node \"crc\" DevicePath \"\"" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.032284 4558 generic.go:334] "Generic (PLEG): container finished" podID="e5918aaf-a79a-4f8e-af24-0a538b61af22" containerID="3d3b3bad3315e7a5d2bf5acdd694484083370e47a7e9c70b7d81c8e0322b5a1c" exitCode=0 Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.032315 4558 generic.go:334] "Generic (PLEG): container finished" podID="e5918aaf-a79a-4f8e-af24-0a538b61af22" containerID="f5e4f86823050fc5341884e7c3457363544d7680acfe65dbce1e28afaa63f176" exitCode=2 Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.032336 4558 generic.go:334] "Generic (PLEG): container finished" podID="e5918aaf-a79a-4f8e-af24-0a538b61af22" containerID="daf89e9a5edbcd4dc5626bac0a6afd5dd0377257a844e9b1cbee689ca493d073" exitCode=0 Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.032344 4558 generic.go:334] "Generic (PLEG): container finished" podID="e5918aaf-a79a-4f8e-af24-0a538b61af22" containerID="c6d6179083d1da62058e87e75412018bf0cc96f7046edc5c269f6ba9f837fdca" exitCode=0 Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.032399 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.032398 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"e5918aaf-a79a-4f8e-af24-0a538b61af22","Type":"ContainerDied","Data":"3d3b3bad3315e7a5d2bf5acdd694484083370e47a7e9c70b7d81c8e0322b5a1c"} Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.032988 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"e5918aaf-a79a-4f8e-af24-0a538b61af22","Type":"ContainerDied","Data":"f5e4f86823050fc5341884e7c3457363544d7680acfe65dbce1e28afaa63f176"} Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.033007 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"e5918aaf-a79a-4f8e-af24-0a538b61af22","Type":"ContainerDied","Data":"daf89e9a5edbcd4dc5626bac0a6afd5dd0377257a844e9b1cbee689ca493d073"} Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.033019 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"e5918aaf-a79a-4f8e-af24-0a538b61af22","Type":"ContainerDied","Data":"c6d6179083d1da62058e87e75412018bf0cc96f7046edc5c269f6ba9f837fdca"} Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.033030 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"e5918aaf-a79a-4f8e-af24-0a538b61af22","Type":"ContainerDied","Data":"45f60300a94bb431bf81c5b13486a19da286eb30558820f3b3af26729c13c5de"} Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.033049 4558 scope.go:117] "RemoveContainer" containerID="3d3b3bad3315e7a5d2bf5acdd694484083370e47a7e9c70b7d81c8e0322b5a1c" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.046138 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/glance-default-internal-api-0" podStartSLOduration=4.046117879 podStartE2EDuration="4.046117879s" podCreationTimestamp="2026-01-20 17:07:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:07:49.039647592 +0000 UTC m=+1562.799985558" watchObservedRunningTime="2026-01-20 17:07:49.046117879 +0000 UTC m=+1562.806455846" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.056401 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e5918aaf-a79a-4f8e-af24-0a538b61af22-config-data" (OuterVolumeSpecName: "config-data") pod "e5918aaf-a79a-4f8e-af24-0a538b61af22" (UID: "e5918aaf-a79a-4f8e-af24-0a538b61af22"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.084771 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e5918aaf-a79a-4f8e-af24-0a538b61af22-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e5918aaf-a79a-4f8e-af24-0a538b61af22" (UID: "e5918aaf-a79a-4f8e-af24-0a538b61af22"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.088432 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-5ffbfb44-sbdwx" event={"ID":"cd459d10-b71b-45e5-9af4-042acd8bd024","Type":"ContainerStarted","Data":"fbdaa6a00aab65fd715c1c09893e7d76edc2e170868300fe479d50fb4b6a8b7d"} Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.106481 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-worker-7bcc85c7d9-jm8qh" event={"ID":"faade961-fce0-4ad6-a039-4ba83a95dd68","Type":"ContainerStarted","Data":"987117f4a057251c5482c4e15328842920fdcea58544c68a18a70dee0c0bd727"} Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.106525 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-worker-7bcc85c7d9-jm8qh" event={"ID":"faade961-fce0-4ad6-a039-4ba83a95dd68","Type":"ContainerStarted","Data":"8bdce8800380698b3525e8fafce4e103f8ffe07d9a3881dbd922429612c13082"} Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.106537 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-worker-7bcc85c7d9-jm8qh" event={"ID":"faade961-fce0-4ad6-a039-4ba83a95dd68","Type":"ContainerStarted","Data":"f97ce9d190b163d24d32a36261ccedb84e9d2308cd6fb8b7d48d7eb4331e510a"} Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.116547 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-bootstrap-v2krf"] Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.120012 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e5918aaf-a79a-4f8e-af24-0a538b61af22-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.121279 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e5918aaf-a79a-4f8e-af24-0a538b61af22-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.136027 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-b8dd4f99b-9gg5b" event={"ID":"043476ec-5392-41af-970c-89d20b6b30a5","Type":"ContainerStarted","Data":"d1ba5118e075c9076247767e8ee5b5f7ddae3519d2f382217f34408cb902f7ec"} Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.136060 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-b8dd4f99b-9gg5b" event={"ID":"043476ec-5392-41af-970c-89d20b6b30a5","Type":"ContainerStarted","Data":"a18b5b718a98c1d90904f2a89cfdb392cf52b6b547739f0cef3c92632d95c1e4"} Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.136070 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-b8dd4f99b-9gg5b" event={"ID":"043476ec-5392-41af-970c-89d20b6b30a5","Type":"ContainerStarted","Data":"4c84428a80cbfe98f47d97ac5e8c97645f3b7939e2b685b5e8bf9d6bc5ffd8f9"} Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.139202 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/placement-b8dd4f99b-9gg5b" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.139260 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/placement-b8dd4f99b-9gg5b" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.172480 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/barbican-worker-7bcc85c7d9-jm8qh" podStartSLOduration=2.172464506 podStartE2EDuration="2.172464506s" podCreationTimestamp="2026-01-20 17:07:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:07:49.15860399 +0000 UTC m=+1562.918941958" watchObservedRunningTime="2026-01-20 17:07:49.172464506 +0000 UTC m=+1562.932802472" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.188482 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"91e1a579-d120-4f5d-9117-f5e48646a8eb","Type":"ContainerStarted","Data":"f4af7a607838a829488c78ebe28735c0640218c8a4d67ceed1f74cc4aa21ff78"} Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.188547 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"91e1a579-d120-4f5d-9117-f5e48646a8eb","Type":"ContainerStarted","Data":"db1a38fc6024093565d9088a46937eff818680f06d9ee97e07e63e11dde8ebfb"} Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.195856 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:07:49 crc kubenswrapper[4558]: E0120 17:07:49.196330 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e5918aaf-a79a-4f8e-af24-0a538b61af22" containerName="ceilometer-central-agent" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.196347 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="e5918aaf-a79a-4f8e-af24-0a538b61af22" containerName="ceilometer-central-agent" Jan 20 17:07:49 crc kubenswrapper[4558]: E0120 17:07:49.196363 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e5918aaf-a79a-4f8e-af24-0a538b61af22" containerName="ceilometer-notification-agent" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.196369 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="e5918aaf-a79a-4f8e-af24-0a538b61af22" containerName="ceilometer-notification-agent" Jan 20 17:07:49 crc kubenswrapper[4558]: E0120 17:07:49.196378 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e5918aaf-a79a-4f8e-af24-0a538b61af22" containerName="sg-core" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.196384 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="e5918aaf-a79a-4f8e-af24-0a538b61af22" containerName="sg-core" Jan 20 17:07:49 crc kubenswrapper[4558]: E0120 17:07:49.196394 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4c9f9871-91f4-4670-a6af-0c493f3ce85b" containerName="cinder-db-sync" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.196400 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="4c9f9871-91f4-4670-a6af-0c493f3ce85b" containerName="cinder-db-sync" Jan 20 17:07:49 crc kubenswrapper[4558]: E0120 17:07:49.196412 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e5918aaf-a79a-4f8e-af24-0a538b61af22" containerName="proxy-httpd" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.196417 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="e5918aaf-a79a-4f8e-af24-0a538b61af22" containerName="proxy-httpd" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.196609 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="e5918aaf-a79a-4f8e-af24-0a538b61af22" containerName="ceilometer-central-agent" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.196628 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="e5918aaf-a79a-4f8e-af24-0a538b61af22" containerName="proxy-httpd" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.196635 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="4c9f9871-91f4-4670-a6af-0c493f3ce85b" containerName="cinder-db-sync" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.196644 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="e5918aaf-a79a-4f8e-af24-0a538b61af22" containerName="sg-core" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.196656 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="e5918aaf-a79a-4f8e-af24-0a538b61af22" containerName="ceilometer-notification-agent" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.197599 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.202892 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.206587 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cinder-scheduler-config-data" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.208682 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/placement-b8dd4f99b-9gg5b" podStartSLOduration=3.208668774 podStartE2EDuration="3.208668774s" podCreationTimestamp="2026-01-20 17:07:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:07:49.208062645 +0000 UTC m=+1562.968400612" watchObservedRunningTime="2026-01-20 17:07:49.208668774 +0000 UTC m=+1562.969006742" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.208713 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-db-sync-blmnq" event={"ID":"4c9f9871-91f4-4670-a6af-0c493f3ce85b","Type":"ContainerDied","Data":"793413453fcd2492f837a460150f33af8f30eafd397c6efbdc01ad525b8e1075"} Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.208743 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="793413453fcd2492f837a460150f33af8f30eafd397c6efbdc01ad525b8e1075" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.208904 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-db-sync-blmnq" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.223328 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c6c600f3-c24a-4a8b-872d-679316f01e55-scripts\") pod \"cinder-scheduler-0\" (UID: \"c6c600f3-c24a-4a8b-872d-679316f01e55\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.223418 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c6c600f3-c24a-4a8b-872d-679316f01e55-config-data\") pod \"cinder-scheduler-0\" (UID: \"c6c600f3-c24a-4a8b-872d-679316f01e55\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.223476 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c6c600f3-c24a-4a8b-872d-679316f01e55-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"c6c600f3-c24a-4a8b-872d-679316f01e55\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.223555 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/c6c600f3-c24a-4a8b-872d-679316f01e55-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"c6c600f3-c24a-4a8b-872d-679316f01e55\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.223581 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c6c600f3-c24a-4a8b-872d-679316f01e55-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"c6c600f3-c24a-4a8b-872d-679316f01e55\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.223724 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xswnr\" (UniqueName: \"kubernetes.io/projected/c6c600f3-c24a-4a8b-872d-679316f01e55-kube-api-access-xswnr\") pod \"cinder-scheduler-0\" (UID: \"c6c600f3-c24a-4a8b-872d-679316f01e55\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.267516 4558 scope.go:117] "RemoveContainer" containerID="f5e4f86823050fc5341884e7c3457363544d7680acfe65dbce1e28afaa63f176" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.277364 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/glance-default-external-api-0" podStartSLOduration=4.2773285340000005 podStartE2EDuration="4.277328534s" podCreationTimestamp="2026-01-20 17:07:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:07:49.261230263 +0000 UTC m=+1563.021568229" watchObservedRunningTime="2026-01-20 17:07:49.277328534 +0000 UTC m=+1563.037666501" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.308691 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.310464 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.310571 4558 scope.go:117] "RemoveContainer" containerID="daf89e9a5edbcd4dc5626bac0a6afd5dd0377257a844e9b1cbee689ca493d073" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.319891 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cinder-api-config-data" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.326003 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9399ecb4-d974-4ebd-b695-4690fd4148b2-config-data-custom\") pod \"cinder-api-0\" (UID: \"9399ecb4-d974-4ebd-b695-4690fd4148b2\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.326037 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9399ecb4-d974-4ebd-b695-4690fd4148b2-logs\") pod \"cinder-api-0\" (UID: \"9399ecb4-d974-4ebd-b695-4690fd4148b2\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.326074 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xswnr\" (UniqueName: \"kubernetes.io/projected/c6c600f3-c24a-4a8b-872d-679316f01e55-kube-api-access-xswnr\") pod \"cinder-scheduler-0\" (UID: \"c6c600f3-c24a-4a8b-872d-679316f01e55\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.326110 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qsn5h\" (UniqueName: \"kubernetes.io/projected/9399ecb4-d974-4ebd-b695-4690fd4148b2-kube-api-access-qsn5h\") pod \"cinder-api-0\" (UID: \"9399ecb4-d974-4ebd-b695-4690fd4148b2\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.326150 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c6c600f3-c24a-4a8b-872d-679316f01e55-scripts\") pod \"cinder-scheduler-0\" (UID: \"c6c600f3-c24a-4a8b-872d-679316f01e55\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.326185 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9399ecb4-d974-4ebd-b695-4690fd4148b2-etc-machine-id\") pod \"cinder-api-0\" (UID: \"9399ecb4-d974-4ebd-b695-4690fd4148b2\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.326212 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c6c600f3-c24a-4a8b-872d-679316f01e55-config-data\") pod \"cinder-scheduler-0\" (UID: \"c6c600f3-c24a-4a8b-872d-679316f01e55\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.326234 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9399ecb4-d974-4ebd-b695-4690fd4148b2-config-data\") pod \"cinder-api-0\" (UID: \"9399ecb4-d974-4ebd-b695-4690fd4148b2\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.326254 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c6c600f3-c24a-4a8b-872d-679316f01e55-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"c6c600f3-c24a-4a8b-872d-679316f01e55\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.326275 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9399ecb4-d974-4ebd-b695-4690fd4148b2-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"9399ecb4-d974-4ebd-b695-4690fd4148b2\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.326306 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/c6c600f3-c24a-4a8b-872d-679316f01e55-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"c6c600f3-c24a-4a8b-872d-679316f01e55\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.326334 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c6c600f3-c24a-4a8b-872d-679316f01e55-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"c6c600f3-c24a-4a8b-872d-679316f01e55\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.326348 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9399ecb4-d974-4ebd-b695-4690fd4148b2-scripts\") pod \"cinder-api-0\" (UID: \"9399ecb4-d974-4ebd-b695-4690fd4148b2\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.327446 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/c6c600f3-c24a-4a8b-872d-679316f01e55-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"c6c600f3-c24a-4a8b-872d-679316f01e55\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.333880 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c6c600f3-c24a-4a8b-872d-679316f01e55-scripts\") pod \"cinder-scheduler-0\" (UID: \"c6c600f3-c24a-4a8b-872d-679316f01e55\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.338114 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c6c600f3-c24a-4a8b-872d-679316f01e55-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"c6c600f3-c24a-4a8b-872d-679316f01e55\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.345475 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c6c600f3-c24a-4a8b-872d-679316f01e55-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"c6c600f3-c24a-4a8b-872d-679316f01e55\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.345638 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c6c600f3-c24a-4a8b-872d-679316f01e55-config-data\") pod \"cinder-scheduler-0\" (UID: \"c6c600f3-c24a-4a8b-872d-679316f01e55\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.348961 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.350071 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xswnr\" (UniqueName: \"kubernetes.io/projected/c6c600f3-c24a-4a8b-872d-679316f01e55-kube-api-access-xswnr\") pod \"cinder-scheduler-0\" (UID: \"c6c600f3-c24a-4a8b-872d-679316f01e55\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.370050 4558 scope.go:117] "RemoveContainer" containerID="c6d6179083d1da62058e87e75412018bf0cc96f7046edc5c269f6ba9f837fdca" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.372357 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.376840 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.409802 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.411988 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.424797 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.425309 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-config-data" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.425566 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-scripts" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.428815 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9399ecb4-d974-4ebd-b695-4690fd4148b2-etc-machine-id\") pod \"cinder-api-0\" (UID: \"9399ecb4-d974-4ebd-b695-4690fd4148b2\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.428891 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/90d455f2-55fa-4028-85a0-346675b2194d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"90d455f2-55fa-4028-85a0-346675b2194d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.429018 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9399ecb4-d974-4ebd-b695-4690fd4148b2-config-data\") pod \"cinder-api-0\" (UID: \"9399ecb4-d974-4ebd-b695-4690fd4148b2\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.429033 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9399ecb4-d974-4ebd-b695-4690fd4148b2-etc-machine-id\") pod \"cinder-api-0\" (UID: \"9399ecb4-d974-4ebd-b695-4690fd4148b2\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.429048 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8b6pg\" (UniqueName: \"kubernetes.io/projected/90d455f2-55fa-4028-85a0-346675b2194d-kube-api-access-8b6pg\") pod \"ceilometer-0\" (UID: \"90d455f2-55fa-4028-85a0-346675b2194d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.429122 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9399ecb4-d974-4ebd-b695-4690fd4148b2-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"9399ecb4-d974-4ebd-b695-4690fd4148b2\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.431643 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9399ecb4-d974-4ebd-b695-4690fd4148b2-scripts\") pod \"cinder-api-0\" (UID: \"9399ecb4-d974-4ebd-b695-4690fd4148b2\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.431704 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/90d455f2-55fa-4028-85a0-346675b2194d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"90d455f2-55fa-4028-85a0-346675b2194d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.431816 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9399ecb4-d974-4ebd-b695-4690fd4148b2-config-data-custom\") pod \"cinder-api-0\" (UID: \"9399ecb4-d974-4ebd-b695-4690fd4148b2\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.431843 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9399ecb4-d974-4ebd-b695-4690fd4148b2-logs\") pod \"cinder-api-0\" (UID: \"9399ecb4-d974-4ebd-b695-4690fd4148b2\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.431863 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/90d455f2-55fa-4028-85a0-346675b2194d-run-httpd\") pod \"ceilometer-0\" (UID: \"90d455f2-55fa-4028-85a0-346675b2194d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.431891 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/90d455f2-55fa-4028-85a0-346675b2194d-config-data\") pod \"ceilometer-0\" (UID: \"90d455f2-55fa-4028-85a0-346675b2194d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.431959 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/90d455f2-55fa-4028-85a0-346675b2194d-log-httpd\") pod \"ceilometer-0\" (UID: \"90d455f2-55fa-4028-85a0-346675b2194d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.432013 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qsn5h\" (UniqueName: \"kubernetes.io/projected/9399ecb4-d974-4ebd-b695-4690fd4148b2-kube-api-access-qsn5h\") pod \"cinder-api-0\" (UID: \"9399ecb4-d974-4ebd-b695-4690fd4148b2\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.432066 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/90d455f2-55fa-4028-85a0-346675b2194d-scripts\") pod \"ceilometer-0\" (UID: \"90d455f2-55fa-4028-85a0-346675b2194d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.434146 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9399ecb4-d974-4ebd-b695-4690fd4148b2-logs\") pod \"cinder-api-0\" (UID: \"9399ecb4-d974-4ebd-b695-4690fd4148b2\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.434367 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9399ecb4-d974-4ebd-b695-4690fd4148b2-config-data\") pod \"cinder-api-0\" (UID: \"9399ecb4-d974-4ebd-b695-4690fd4148b2\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.436473 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9399ecb4-d974-4ebd-b695-4690fd4148b2-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"9399ecb4-d974-4ebd-b695-4690fd4148b2\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.438036 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9399ecb4-d974-4ebd-b695-4690fd4148b2-config-data-custom\") pod \"cinder-api-0\" (UID: \"9399ecb4-d974-4ebd-b695-4690fd4148b2\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.438639 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9399ecb4-d974-4ebd-b695-4690fd4148b2-scripts\") pod \"cinder-api-0\" (UID: \"9399ecb4-d974-4ebd-b695-4690fd4148b2\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.456623 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qsn5h\" (UniqueName: \"kubernetes.io/projected/9399ecb4-d974-4ebd-b695-4690fd4148b2-kube-api-access-qsn5h\") pod \"cinder-api-0\" (UID: \"9399ecb4-d974-4ebd-b695-4690fd4148b2\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.461236 4558 scope.go:117] "RemoveContainer" containerID="3d3b3bad3315e7a5d2bf5acdd694484083370e47a7e9c70b7d81c8e0322b5a1c" Jan 20 17:07:49 crc kubenswrapper[4558]: E0120 17:07:49.462496 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3d3b3bad3315e7a5d2bf5acdd694484083370e47a7e9c70b7d81c8e0322b5a1c\": container with ID starting with 3d3b3bad3315e7a5d2bf5acdd694484083370e47a7e9c70b7d81c8e0322b5a1c not found: ID does not exist" containerID="3d3b3bad3315e7a5d2bf5acdd694484083370e47a7e9c70b7d81c8e0322b5a1c" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.462545 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3d3b3bad3315e7a5d2bf5acdd694484083370e47a7e9c70b7d81c8e0322b5a1c"} err="failed to get container status \"3d3b3bad3315e7a5d2bf5acdd694484083370e47a7e9c70b7d81c8e0322b5a1c\": rpc error: code = NotFound desc = could not find container \"3d3b3bad3315e7a5d2bf5acdd694484083370e47a7e9c70b7d81c8e0322b5a1c\": container with ID starting with 3d3b3bad3315e7a5d2bf5acdd694484083370e47a7e9c70b7d81c8e0322b5a1c not found: ID does not exist" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.462567 4558 scope.go:117] "RemoveContainer" containerID="f5e4f86823050fc5341884e7c3457363544d7680acfe65dbce1e28afaa63f176" Jan 20 17:07:49 crc kubenswrapper[4558]: E0120 17:07:49.464637 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f5e4f86823050fc5341884e7c3457363544d7680acfe65dbce1e28afaa63f176\": container with ID starting with f5e4f86823050fc5341884e7c3457363544d7680acfe65dbce1e28afaa63f176 not found: ID does not exist" containerID="f5e4f86823050fc5341884e7c3457363544d7680acfe65dbce1e28afaa63f176" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.464681 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f5e4f86823050fc5341884e7c3457363544d7680acfe65dbce1e28afaa63f176"} err="failed to get container status \"f5e4f86823050fc5341884e7c3457363544d7680acfe65dbce1e28afaa63f176\": rpc error: code = NotFound desc = could not find container \"f5e4f86823050fc5341884e7c3457363544d7680acfe65dbce1e28afaa63f176\": container with ID starting with f5e4f86823050fc5341884e7c3457363544d7680acfe65dbce1e28afaa63f176 not found: ID does not exist" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.464713 4558 scope.go:117] "RemoveContainer" containerID="daf89e9a5edbcd4dc5626bac0a6afd5dd0377257a844e9b1cbee689ca493d073" Jan 20 17:07:49 crc kubenswrapper[4558]: E0120 17:07:49.465192 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"daf89e9a5edbcd4dc5626bac0a6afd5dd0377257a844e9b1cbee689ca493d073\": container with ID starting with daf89e9a5edbcd4dc5626bac0a6afd5dd0377257a844e9b1cbee689ca493d073 not found: ID does not exist" containerID="daf89e9a5edbcd4dc5626bac0a6afd5dd0377257a844e9b1cbee689ca493d073" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.465215 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"daf89e9a5edbcd4dc5626bac0a6afd5dd0377257a844e9b1cbee689ca493d073"} err="failed to get container status \"daf89e9a5edbcd4dc5626bac0a6afd5dd0377257a844e9b1cbee689ca493d073\": rpc error: code = NotFound desc = could not find container \"daf89e9a5edbcd4dc5626bac0a6afd5dd0377257a844e9b1cbee689ca493d073\": container with ID starting with daf89e9a5edbcd4dc5626bac0a6afd5dd0377257a844e9b1cbee689ca493d073 not found: ID does not exist" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.465232 4558 scope.go:117] "RemoveContainer" containerID="c6d6179083d1da62058e87e75412018bf0cc96f7046edc5c269f6ba9f837fdca" Jan 20 17:07:49 crc kubenswrapper[4558]: E0120 17:07:49.471015 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c6d6179083d1da62058e87e75412018bf0cc96f7046edc5c269f6ba9f837fdca\": container with ID starting with c6d6179083d1da62058e87e75412018bf0cc96f7046edc5c269f6ba9f837fdca not found: ID does not exist" containerID="c6d6179083d1da62058e87e75412018bf0cc96f7046edc5c269f6ba9f837fdca" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.471038 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c6d6179083d1da62058e87e75412018bf0cc96f7046edc5c269f6ba9f837fdca"} err="failed to get container status \"c6d6179083d1da62058e87e75412018bf0cc96f7046edc5c269f6ba9f837fdca\": rpc error: code = NotFound desc = could not find container \"c6d6179083d1da62058e87e75412018bf0cc96f7046edc5c269f6ba9f837fdca\": container with ID starting with c6d6179083d1da62058e87e75412018bf0cc96f7046edc5c269f6ba9f837fdca not found: ID does not exist" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.471053 4558 scope.go:117] "RemoveContainer" containerID="3d3b3bad3315e7a5d2bf5acdd694484083370e47a7e9c70b7d81c8e0322b5a1c" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.471577 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3d3b3bad3315e7a5d2bf5acdd694484083370e47a7e9c70b7d81c8e0322b5a1c"} err="failed to get container status \"3d3b3bad3315e7a5d2bf5acdd694484083370e47a7e9c70b7d81c8e0322b5a1c\": rpc error: code = NotFound desc = could not find container \"3d3b3bad3315e7a5d2bf5acdd694484083370e47a7e9c70b7d81c8e0322b5a1c\": container with ID starting with 3d3b3bad3315e7a5d2bf5acdd694484083370e47a7e9c70b7d81c8e0322b5a1c not found: ID does not exist" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.471632 4558 scope.go:117] "RemoveContainer" containerID="f5e4f86823050fc5341884e7c3457363544d7680acfe65dbce1e28afaa63f176" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.471989 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f5e4f86823050fc5341884e7c3457363544d7680acfe65dbce1e28afaa63f176"} err="failed to get container status \"f5e4f86823050fc5341884e7c3457363544d7680acfe65dbce1e28afaa63f176\": rpc error: code = NotFound desc = could not find container \"f5e4f86823050fc5341884e7c3457363544d7680acfe65dbce1e28afaa63f176\": container with ID starting with f5e4f86823050fc5341884e7c3457363544d7680acfe65dbce1e28afaa63f176 not found: ID does not exist" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.472042 4558 scope.go:117] "RemoveContainer" containerID="daf89e9a5edbcd4dc5626bac0a6afd5dd0377257a844e9b1cbee689ca493d073" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.472546 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"daf89e9a5edbcd4dc5626bac0a6afd5dd0377257a844e9b1cbee689ca493d073"} err="failed to get container status \"daf89e9a5edbcd4dc5626bac0a6afd5dd0377257a844e9b1cbee689ca493d073\": rpc error: code = NotFound desc = could not find container \"daf89e9a5edbcd4dc5626bac0a6afd5dd0377257a844e9b1cbee689ca493d073\": container with ID starting with daf89e9a5edbcd4dc5626bac0a6afd5dd0377257a844e9b1cbee689ca493d073 not found: ID does not exist" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.472586 4558 scope.go:117] "RemoveContainer" containerID="c6d6179083d1da62058e87e75412018bf0cc96f7046edc5c269f6ba9f837fdca" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.473482 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c6d6179083d1da62058e87e75412018bf0cc96f7046edc5c269f6ba9f837fdca"} err="failed to get container status \"c6d6179083d1da62058e87e75412018bf0cc96f7046edc5c269f6ba9f837fdca\": rpc error: code = NotFound desc = could not find container \"c6d6179083d1da62058e87e75412018bf0cc96f7046edc5c269f6ba9f837fdca\": container with ID starting with c6d6179083d1da62058e87e75412018bf0cc96f7046edc5c269f6ba9f837fdca not found: ID does not exist" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.473516 4558 scope.go:117] "RemoveContainer" containerID="3d3b3bad3315e7a5d2bf5acdd694484083370e47a7e9c70b7d81c8e0322b5a1c" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.474562 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3d3b3bad3315e7a5d2bf5acdd694484083370e47a7e9c70b7d81c8e0322b5a1c"} err="failed to get container status \"3d3b3bad3315e7a5d2bf5acdd694484083370e47a7e9c70b7d81c8e0322b5a1c\": rpc error: code = NotFound desc = could not find container \"3d3b3bad3315e7a5d2bf5acdd694484083370e47a7e9c70b7d81c8e0322b5a1c\": container with ID starting with 3d3b3bad3315e7a5d2bf5acdd694484083370e47a7e9c70b7d81c8e0322b5a1c not found: ID does not exist" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.474593 4558 scope.go:117] "RemoveContainer" containerID="f5e4f86823050fc5341884e7c3457363544d7680acfe65dbce1e28afaa63f176" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.474806 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f5e4f86823050fc5341884e7c3457363544d7680acfe65dbce1e28afaa63f176"} err="failed to get container status \"f5e4f86823050fc5341884e7c3457363544d7680acfe65dbce1e28afaa63f176\": rpc error: code = NotFound desc = could not find container \"f5e4f86823050fc5341884e7c3457363544d7680acfe65dbce1e28afaa63f176\": container with ID starting with f5e4f86823050fc5341884e7c3457363544d7680acfe65dbce1e28afaa63f176 not found: ID does not exist" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.474821 4558 scope.go:117] "RemoveContainer" containerID="daf89e9a5edbcd4dc5626bac0a6afd5dd0377257a844e9b1cbee689ca493d073" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.475102 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"daf89e9a5edbcd4dc5626bac0a6afd5dd0377257a844e9b1cbee689ca493d073"} err="failed to get container status \"daf89e9a5edbcd4dc5626bac0a6afd5dd0377257a844e9b1cbee689ca493d073\": rpc error: code = NotFound desc = could not find container \"daf89e9a5edbcd4dc5626bac0a6afd5dd0377257a844e9b1cbee689ca493d073\": container with ID starting with daf89e9a5edbcd4dc5626bac0a6afd5dd0377257a844e9b1cbee689ca493d073 not found: ID does not exist" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.475116 4558 scope.go:117] "RemoveContainer" containerID="c6d6179083d1da62058e87e75412018bf0cc96f7046edc5c269f6ba9f837fdca" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.475571 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c6d6179083d1da62058e87e75412018bf0cc96f7046edc5c269f6ba9f837fdca"} err="failed to get container status \"c6d6179083d1da62058e87e75412018bf0cc96f7046edc5c269f6ba9f837fdca\": rpc error: code = NotFound desc = could not find container \"c6d6179083d1da62058e87e75412018bf0cc96f7046edc5c269f6ba9f837fdca\": container with ID starting with c6d6179083d1da62058e87e75412018bf0cc96f7046edc5c269f6ba9f837fdca not found: ID does not exist" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.475593 4558 scope.go:117] "RemoveContainer" containerID="3d3b3bad3315e7a5d2bf5acdd694484083370e47a7e9c70b7d81c8e0322b5a1c" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.475827 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3d3b3bad3315e7a5d2bf5acdd694484083370e47a7e9c70b7d81c8e0322b5a1c"} err="failed to get container status \"3d3b3bad3315e7a5d2bf5acdd694484083370e47a7e9c70b7d81c8e0322b5a1c\": rpc error: code = NotFound desc = could not find container \"3d3b3bad3315e7a5d2bf5acdd694484083370e47a7e9c70b7d81c8e0322b5a1c\": container with ID starting with 3d3b3bad3315e7a5d2bf5acdd694484083370e47a7e9c70b7d81c8e0322b5a1c not found: ID does not exist" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.475845 4558 scope.go:117] "RemoveContainer" containerID="f5e4f86823050fc5341884e7c3457363544d7680acfe65dbce1e28afaa63f176" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.476643 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f5e4f86823050fc5341884e7c3457363544d7680acfe65dbce1e28afaa63f176"} err="failed to get container status \"f5e4f86823050fc5341884e7c3457363544d7680acfe65dbce1e28afaa63f176\": rpc error: code = NotFound desc = could not find container \"f5e4f86823050fc5341884e7c3457363544d7680acfe65dbce1e28afaa63f176\": container with ID starting with f5e4f86823050fc5341884e7c3457363544d7680acfe65dbce1e28afaa63f176 not found: ID does not exist" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.476680 4558 scope.go:117] "RemoveContainer" containerID="daf89e9a5edbcd4dc5626bac0a6afd5dd0377257a844e9b1cbee689ca493d073" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.479659 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"daf89e9a5edbcd4dc5626bac0a6afd5dd0377257a844e9b1cbee689ca493d073"} err="failed to get container status \"daf89e9a5edbcd4dc5626bac0a6afd5dd0377257a844e9b1cbee689ca493d073\": rpc error: code = NotFound desc = could not find container \"daf89e9a5edbcd4dc5626bac0a6afd5dd0377257a844e9b1cbee689ca493d073\": container with ID starting with daf89e9a5edbcd4dc5626bac0a6afd5dd0377257a844e9b1cbee689ca493d073 not found: ID does not exist" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.479687 4558 scope.go:117] "RemoveContainer" containerID="c6d6179083d1da62058e87e75412018bf0cc96f7046edc5c269f6ba9f837fdca" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.481620 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c6d6179083d1da62058e87e75412018bf0cc96f7046edc5c269f6ba9f837fdca"} err="failed to get container status \"c6d6179083d1da62058e87e75412018bf0cc96f7046edc5c269f6ba9f837fdca\": rpc error: code = NotFound desc = could not find container \"c6d6179083d1da62058e87e75412018bf0cc96f7046edc5c269f6ba9f837fdca\": container with ID starting with c6d6179083d1da62058e87e75412018bf0cc96f7046edc5c269f6ba9f837fdca not found: ID does not exist" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.533559 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/90d455f2-55fa-4028-85a0-346675b2194d-run-httpd\") pod \"ceilometer-0\" (UID: \"90d455f2-55fa-4028-85a0-346675b2194d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.533623 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/90d455f2-55fa-4028-85a0-346675b2194d-config-data\") pod \"ceilometer-0\" (UID: \"90d455f2-55fa-4028-85a0-346675b2194d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.533678 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/90d455f2-55fa-4028-85a0-346675b2194d-log-httpd\") pod \"ceilometer-0\" (UID: \"90d455f2-55fa-4028-85a0-346675b2194d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.533739 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/90d455f2-55fa-4028-85a0-346675b2194d-scripts\") pod \"ceilometer-0\" (UID: \"90d455f2-55fa-4028-85a0-346675b2194d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.533810 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/90d455f2-55fa-4028-85a0-346675b2194d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"90d455f2-55fa-4028-85a0-346675b2194d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.533869 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8b6pg\" (UniqueName: \"kubernetes.io/projected/90d455f2-55fa-4028-85a0-346675b2194d-kube-api-access-8b6pg\") pod \"ceilometer-0\" (UID: \"90d455f2-55fa-4028-85a0-346675b2194d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.533956 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/90d455f2-55fa-4028-85a0-346675b2194d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"90d455f2-55fa-4028-85a0-346675b2194d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.534790 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/90d455f2-55fa-4028-85a0-346675b2194d-run-httpd\") pod \"ceilometer-0\" (UID: \"90d455f2-55fa-4028-85a0-346675b2194d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.535184 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.535559 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/90d455f2-55fa-4028-85a0-346675b2194d-log-httpd\") pod \"ceilometer-0\" (UID: \"90d455f2-55fa-4028-85a0-346675b2194d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.538712 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/90d455f2-55fa-4028-85a0-346675b2194d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"90d455f2-55fa-4028-85a0-346675b2194d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.538723 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/90d455f2-55fa-4028-85a0-346675b2194d-config-data\") pod \"ceilometer-0\" (UID: \"90d455f2-55fa-4028-85a0-346675b2194d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.538860 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/90d455f2-55fa-4028-85a0-346675b2194d-scripts\") pod \"ceilometer-0\" (UID: \"90d455f2-55fa-4028-85a0-346675b2194d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.546477 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/90d455f2-55fa-4028-85a0-346675b2194d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"90d455f2-55fa-4028-85a0-346675b2194d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.549781 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8b6pg\" (UniqueName: \"kubernetes.io/projected/90d455f2-55fa-4028-85a0-346675b2194d-kube-api-access-8b6pg\") pod \"ceilometer-0\" (UID: \"90d455f2-55fa-4028-85a0-346675b2194d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.647030 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:07:49 crc kubenswrapper[4558]: I0120 17:07:49.734292 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:07:50 crc kubenswrapper[4558]: I0120 17:07:50.005712 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:07:50 crc kubenswrapper[4558]: I0120 17:07:50.137197 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:07:50 crc kubenswrapper[4558]: I0120 17:07:50.224639 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:07:50 crc kubenswrapper[4558]: W0120 17:07:50.227522 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod90d455f2_55fa_4028_85a0_346675b2194d.slice/crio-275d48aaffa9e56938daffb561b32f7f37dfe6673cd13d571a8763291224f6ea WatchSource:0}: Error finding container 275d48aaffa9e56938daffb561b32f7f37dfe6673cd13d571a8763291224f6ea: Status 404 returned error can't find the container with id 275d48aaffa9e56938daffb561b32f7f37dfe6673cd13d571a8763291224f6ea Jan 20 17:07:50 crc kubenswrapper[4558]: I0120 17:07:50.227965 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-keystone-listener-86d9c4d694-78ch5" event={"ID":"209ec6bc-170f-4c8a-ad7e-e441ace95d1b","Type":"ContainerStarted","Data":"63f037fefd6327562c7c2f8bf9caa685258bcc48a23627261cd9883239f176f0"} Jan 20 17:07:50 crc kubenswrapper[4558]: I0120 17:07:50.237644 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-5ffbfb44-sbdwx" event={"ID":"cd459d10-b71b-45e5-9af4-042acd8bd024","Type":"ContainerStarted","Data":"96e36a345e076d95ab8b9b5325771dea828755c4ed25a1e273a730a00f809fc3"} Jan 20 17:07:50 crc kubenswrapper[4558]: I0120 17:07:50.237678 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-5ffbfb44-sbdwx" event={"ID":"cd459d10-b71b-45e5-9af4-042acd8bd024","Type":"ContainerStarted","Data":"55a7a9bf0c591ba10d44b7ae55412d2ae7401940ae82cc0cc371573754636419"} Jan 20 17:07:50 crc kubenswrapper[4558]: I0120 17:07:50.237781 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/barbican-api-5ffbfb44-sbdwx" Jan 20 17:07:50 crc kubenswrapper[4558]: I0120 17:07:50.239695 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"9399ecb4-d974-4ebd-b695-4690fd4148b2","Type":"ContainerStarted","Data":"f56592ac5c257c3c00c4b138cc71818cb6ac3adda2b0e2263d32abf09218740e"} Jan 20 17:07:50 crc kubenswrapper[4558]: I0120 17:07:50.240783 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"c6c600f3-c24a-4a8b-872d-679316f01e55","Type":"ContainerStarted","Data":"de429302d8ea2b548235b2fdadaee86bf6e719d8fdf08a361aa6c245db9bcf83"} Jan 20 17:07:50 crc kubenswrapper[4558]: I0120 17:07:50.243033 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-bootstrap-v2krf" event={"ID":"23f3fc6a-3d4e-4e04-87c7-c819069b4a89","Type":"ContainerStarted","Data":"f9b4e33892cfb054ac4d9d36cf2ff5895e08ea9f1492b277bd8903e77f17643b"} Jan 20 17:07:50 crc kubenswrapper[4558]: I0120 17:07:50.243056 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-bootstrap-v2krf" event={"ID":"23f3fc6a-3d4e-4e04-87c7-c819069b4a89","Type":"ContainerStarted","Data":"31a7c56eed1c1d1ea44742f66032094b20676034a762e42d50458576ab81873a"} Jan 20 17:07:50 crc kubenswrapper[4558]: I0120 17:07:50.306415 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/barbican-keystone-listener-86d9c4d694-78ch5" podStartSLOduration=3.306385355 podStartE2EDuration="3.306385355s" podCreationTimestamp="2026-01-20 17:07:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:07:50.265865491 +0000 UTC m=+1564.026203457" watchObservedRunningTime="2026-01-20 17:07:50.306385355 +0000 UTC m=+1564.066723312" Jan 20 17:07:50 crc kubenswrapper[4558]: I0120 17:07:50.315068 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/keystone-bootstrap-v2krf" podStartSLOduration=2.315044538 podStartE2EDuration="2.315044538s" podCreationTimestamp="2026-01-20 17:07:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:07:50.281114405 +0000 UTC m=+1564.041452371" watchObservedRunningTime="2026-01-20 17:07:50.315044538 +0000 UTC m=+1564.075382505" Jan 20 17:07:50 crc kubenswrapper[4558]: I0120 17:07:50.331389 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/barbican-api-5ffbfb44-sbdwx" podStartSLOduration=3.331373113 podStartE2EDuration="3.331373113s" podCreationTimestamp="2026-01-20 17:07:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:07:50.295093622 +0000 UTC m=+1564.055431589" watchObservedRunningTime="2026-01-20 17:07:50.331373113 +0000 UTC m=+1564.091711079" Jan 20 17:07:50 crc kubenswrapper[4558]: I0120 17:07:50.586718 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e5918aaf-a79a-4f8e-af24-0a538b61af22" path="/var/lib/kubelet/pods/e5918aaf-a79a-4f8e-af24-0a538b61af22/volumes" Jan 20 17:07:51 crc kubenswrapper[4558]: I0120 17:07:51.264108 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"9399ecb4-d974-4ebd-b695-4690fd4148b2","Type":"ContainerStarted","Data":"fac678fd674af0bfa9f5c02dc46fd986a3d2785451706905e31ca835bcb7242a"} Jan 20 17:07:51 crc kubenswrapper[4558]: I0120 17:07:51.269536 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"90d455f2-55fa-4028-85a0-346675b2194d","Type":"ContainerStarted","Data":"a705863ebb9d52a220fb61755f22b86b64de0f93f5cff893ab6d746f51d915da"} Jan 20 17:07:51 crc kubenswrapper[4558]: I0120 17:07:51.269580 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"90d455f2-55fa-4028-85a0-346675b2194d","Type":"ContainerStarted","Data":"275d48aaffa9e56938daffb561b32f7f37dfe6673cd13d571a8763291224f6ea"} Jan 20 17:07:51 crc kubenswrapper[4558]: I0120 17:07:51.272958 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"c6c600f3-c24a-4a8b-872d-679316f01e55","Type":"ContainerStarted","Data":"01cbe3fb135e1ae6155a40c536acc9c77c3d28806d529f69929510c415648798"} Jan 20 17:07:51 crc kubenswrapper[4558]: I0120 17:07:51.274642 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/barbican-api-5ffbfb44-sbdwx" Jan 20 17:07:52 crc kubenswrapper[4558]: I0120 17:07:52.288499 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"9399ecb4-d974-4ebd-b695-4690fd4148b2","Type":"ContainerStarted","Data":"aa9d678e952e4cc0fb869c1b13463323d23601460d6a1e76b9034eddee08933b"} Jan 20 17:07:52 crc kubenswrapper[4558]: I0120 17:07:52.288800 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:07:52 crc kubenswrapper[4558]: I0120 17:07:52.290782 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"90d455f2-55fa-4028-85a0-346675b2194d","Type":"ContainerStarted","Data":"ff1eb8c91c9fbadd1d6273753e9b3efb911664d2db412ea5ed8946aecd1b1d70"} Jan 20 17:07:52 crc kubenswrapper[4558]: I0120 17:07:52.292630 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"c6c600f3-c24a-4a8b-872d-679316f01e55","Type":"ContainerStarted","Data":"26df03d917e9ae72a0576aea96b12fedd027b6583792494940fdbec87b664c67"} Jan 20 17:07:52 crc kubenswrapper[4558]: I0120 17:07:52.313840 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/cinder-api-0" podStartSLOduration=3.313817253 podStartE2EDuration="3.313817253s" podCreationTimestamp="2026-01-20 17:07:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:07:52.309886221 +0000 UTC m=+1566.070224188" watchObservedRunningTime="2026-01-20 17:07:52.313817253 +0000 UTC m=+1566.074155221" Jan 20 17:07:52 crc kubenswrapper[4558]: I0120 17:07:52.338307 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/cinder-scheduler-0" podStartSLOduration=3.338286154 podStartE2EDuration="3.338286154s" podCreationTimestamp="2026-01-20 17:07:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:07:52.333671558 +0000 UTC m=+1566.094009525" watchObservedRunningTime="2026-01-20 17:07:52.338286154 +0000 UTC m=+1566.098624122" Jan 20 17:07:53 crc kubenswrapper[4558]: I0120 17:07:53.303652 4558 generic.go:334] "Generic (PLEG): container finished" podID="23f3fc6a-3d4e-4e04-87c7-c819069b4a89" containerID="f9b4e33892cfb054ac4d9d36cf2ff5895e08ea9f1492b277bd8903e77f17643b" exitCode=0 Jan 20 17:07:53 crc kubenswrapper[4558]: I0120 17:07:53.303737 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-bootstrap-v2krf" event={"ID":"23f3fc6a-3d4e-4e04-87c7-c819069b4a89","Type":"ContainerDied","Data":"f9b4e33892cfb054ac4d9d36cf2ff5895e08ea9f1492b277bd8903e77f17643b"} Jan 20 17:07:53 crc kubenswrapper[4558]: I0120 17:07:53.306372 4558 generic.go:334] "Generic (PLEG): container finished" podID="8ea04114-b82b-4609-8f7f-a9ff17cb3b57" containerID="0a6a7ebe77f730e7953cf0ae10557aacb2780f78536cbdeb6ab122242e514827" exitCode=0 Jan 20 17:07:53 crc kubenswrapper[4558]: I0120 17:07:53.306399 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-db-sync-fs4x6" event={"ID":"8ea04114-b82b-4609-8f7f-a9ff17cb3b57","Type":"ContainerDied","Data":"0a6a7ebe77f730e7953cf0ae10557aacb2780f78536cbdeb6ab122242e514827"} Jan 20 17:07:53 crc kubenswrapper[4558]: I0120 17:07:53.667086 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:07:54 crc kubenswrapper[4558]: I0120 17:07:54.316316 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"90d455f2-55fa-4028-85a0-346675b2194d","Type":"ContainerStarted","Data":"b44c56e2a3bfa6f791c1435d62c65b4284e66eac297f83e08fcdbb309a19f3f6"} Jan 20 17:07:54 crc kubenswrapper[4558]: I0120 17:07:54.316587 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/cinder-api-0" podUID="9399ecb4-d974-4ebd-b695-4690fd4148b2" containerName="cinder-api-log" containerID="cri-o://fac678fd674af0bfa9f5c02dc46fd986a3d2785451706905e31ca835bcb7242a" gracePeriod=30 Jan 20 17:07:54 crc kubenswrapper[4558]: I0120 17:07:54.316622 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/cinder-api-0" podUID="9399ecb4-d974-4ebd-b695-4690fd4148b2" containerName="cinder-api" containerID="cri-o://aa9d678e952e4cc0fb869c1b13463323d23601460d6a1e76b9034eddee08933b" gracePeriod=30 Jan 20 17:07:54 crc kubenswrapper[4558]: I0120 17:07:54.431478 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/barbican-api-5ffbfb44-sbdwx" Jan 20 17:07:54 crc kubenswrapper[4558]: I0120 17:07:54.535790 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:07:54 crc kubenswrapper[4558]: I0120 17:07:54.692223 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-db-sync-fs4x6" Jan 20 17:07:54 crc kubenswrapper[4558]: I0120 17:07:54.720954 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-bootstrap-v2krf" Jan 20 17:07:54 crc kubenswrapper[4558]: I0120 17:07:54.840957 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/23f3fc6a-3d4e-4e04-87c7-c819069b4a89-config-data\") pod \"23f3fc6a-3d4e-4e04-87c7-c819069b4a89\" (UID: \"23f3fc6a-3d4e-4e04-87c7-c819069b4a89\") " Jan 20 17:07:54 crc kubenswrapper[4558]: I0120 17:07:54.841017 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/23f3fc6a-3d4e-4e04-87c7-c819069b4a89-scripts\") pod \"23f3fc6a-3d4e-4e04-87c7-c819069b4a89\" (UID: \"23f3fc6a-3d4e-4e04-87c7-c819069b4a89\") " Jan 20 17:07:54 crc kubenswrapper[4558]: I0120 17:07:54.841217 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mvlqj\" (UniqueName: \"kubernetes.io/projected/8ea04114-b82b-4609-8f7f-a9ff17cb3b57-kube-api-access-mvlqj\") pod \"8ea04114-b82b-4609-8f7f-a9ff17cb3b57\" (UID: \"8ea04114-b82b-4609-8f7f-a9ff17cb3b57\") " Jan 20 17:07:54 crc kubenswrapper[4558]: I0120 17:07:54.841265 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/8ea04114-b82b-4609-8f7f-a9ff17cb3b57-config\") pod \"8ea04114-b82b-4609-8f7f-a9ff17cb3b57\" (UID: \"8ea04114-b82b-4609-8f7f-a9ff17cb3b57\") " Jan 20 17:07:54 crc kubenswrapper[4558]: I0120 17:07:54.841338 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/23f3fc6a-3d4e-4e04-87c7-c819069b4a89-fernet-keys\") pod \"23f3fc6a-3d4e-4e04-87c7-c819069b4a89\" (UID: \"23f3fc6a-3d4e-4e04-87c7-c819069b4a89\") " Jan 20 17:07:54 crc kubenswrapper[4558]: I0120 17:07:54.841374 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ea04114-b82b-4609-8f7f-a9ff17cb3b57-combined-ca-bundle\") pod \"8ea04114-b82b-4609-8f7f-a9ff17cb3b57\" (UID: \"8ea04114-b82b-4609-8f7f-a9ff17cb3b57\") " Jan 20 17:07:54 crc kubenswrapper[4558]: I0120 17:07:54.841405 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/23f3fc6a-3d4e-4e04-87c7-c819069b4a89-combined-ca-bundle\") pod \"23f3fc6a-3d4e-4e04-87c7-c819069b4a89\" (UID: \"23f3fc6a-3d4e-4e04-87c7-c819069b4a89\") " Jan 20 17:07:54 crc kubenswrapper[4558]: I0120 17:07:54.841427 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4bq2x\" (UniqueName: \"kubernetes.io/projected/23f3fc6a-3d4e-4e04-87c7-c819069b4a89-kube-api-access-4bq2x\") pod \"23f3fc6a-3d4e-4e04-87c7-c819069b4a89\" (UID: \"23f3fc6a-3d4e-4e04-87c7-c819069b4a89\") " Jan 20 17:07:54 crc kubenswrapper[4558]: I0120 17:07:54.841483 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/23f3fc6a-3d4e-4e04-87c7-c819069b4a89-credential-keys\") pod \"23f3fc6a-3d4e-4e04-87c7-c819069b4a89\" (UID: \"23f3fc6a-3d4e-4e04-87c7-c819069b4a89\") " Jan 20 17:07:54 crc kubenswrapper[4558]: I0120 17:07:54.855378 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/23f3fc6a-3d4e-4e04-87c7-c819069b4a89-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "23f3fc6a-3d4e-4e04-87c7-c819069b4a89" (UID: "23f3fc6a-3d4e-4e04-87c7-c819069b4a89"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:07:54 crc kubenswrapper[4558]: I0120 17:07:54.859419 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8ea04114-b82b-4609-8f7f-a9ff17cb3b57-kube-api-access-mvlqj" (OuterVolumeSpecName: "kube-api-access-mvlqj") pod "8ea04114-b82b-4609-8f7f-a9ff17cb3b57" (UID: "8ea04114-b82b-4609-8f7f-a9ff17cb3b57"). InnerVolumeSpecName "kube-api-access-mvlqj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:07:54 crc kubenswrapper[4558]: I0120 17:07:54.859913 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/23f3fc6a-3d4e-4e04-87c7-c819069b4a89-scripts" (OuterVolumeSpecName: "scripts") pod "23f3fc6a-3d4e-4e04-87c7-c819069b4a89" (UID: "23f3fc6a-3d4e-4e04-87c7-c819069b4a89"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:07:54 crc kubenswrapper[4558]: I0120 17:07:54.890746 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/23f3fc6a-3d4e-4e04-87c7-c819069b4a89-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "23f3fc6a-3d4e-4e04-87c7-c819069b4a89" (UID: "23f3fc6a-3d4e-4e04-87c7-c819069b4a89"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:07:54 crc kubenswrapper[4558]: I0120 17:07:54.899846 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/23f3fc6a-3d4e-4e04-87c7-c819069b4a89-kube-api-access-4bq2x" (OuterVolumeSpecName: "kube-api-access-4bq2x") pod "23f3fc6a-3d4e-4e04-87c7-c819069b4a89" (UID: "23f3fc6a-3d4e-4e04-87c7-c819069b4a89"). InnerVolumeSpecName "kube-api-access-4bq2x". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:07:54 crc kubenswrapper[4558]: I0120 17:07:54.912234 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8ea04114-b82b-4609-8f7f-a9ff17cb3b57-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8ea04114-b82b-4609-8f7f-a9ff17cb3b57" (UID: "8ea04114-b82b-4609-8f7f-a9ff17cb3b57"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:07:54 crc kubenswrapper[4558]: I0120 17:07:54.915122 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/23f3fc6a-3d4e-4e04-87c7-c819069b4a89-config-data" (OuterVolumeSpecName: "config-data") pod "23f3fc6a-3d4e-4e04-87c7-c819069b4a89" (UID: "23f3fc6a-3d4e-4e04-87c7-c819069b4a89"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:07:54 crc kubenswrapper[4558]: I0120 17:07:54.939683 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/barbican-api-757fb55df8-9qzm2"] Jan 20 17:07:54 crc kubenswrapper[4558]: E0120 17:07:54.940011 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ea04114-b82b-4609-8f7f-a9ff17cb3b57" containerName="neutron-db-sync" Jan 20 17:07:54 crc kubenswrapper[4558]: I0120 17:07:54.940023 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ea04114-b82b-4609-8f7f-a9ff17cb3b57" containerName="neutron-db-sync" Jan 20 17:07:54 crc kubenswrapper[4558]: E0120 17:07:54.940048 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="23f3fc6a-3d4e-4e04-87c7-c819069b4a89" containerName="keystone-bootstrap" Jan 20 17:07:54 crc kubenswrapper[4558]: I0120 17:07:54.940054 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="23f3fc6a-3d4e-4e04-87c7-c819069b4a89" containerName="keystone-bootstrap" Jan 20 17:07:54 crc kubenswrapper[4558]: I0120 17:07:54.940222 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="8ea04114-b82b-4609-8f7f-a9ff17cb3b57" containerName="neutron-db-sync" Jan 20 17:07:54 crc kubenswrapper[4558]: I0120 17:07:54.940236 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="23f3fc6a-3d4e-4e04-87c7-c819069b4a89" containerName="keystone-bootstrap" Jan 20 17:07:54 crc kubenswrapper[4558]: I0120 17:07:54.940993 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-757fb55df8-9qzm2" Jan 20 17:07:54 crc kubenswrapper[4558]: I0120 17:07:54.943783 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/23f3fc6a-3d4e-4e04-87c7-c819069b4a89-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "23f3fc6a-3d4e-4e04-87c7-c819069b4a89" (UID: "23f3fc6a-3d4e-4e04-87c7-c819069b4a89"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:07:54 crc kubenswrapper[4558]: I0120 17:07:54.944417 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-barbican-internal-svc" Jan 20 17:07:54 crc kubenswrapper[4558]: I0120 17:07:54.944673 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-barbican-public-svc" Jan 20 17:07:54 crc kubenswrapper[4558]: I0120 17:07:54.946012 4558 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/23f3fc6a-3d4e-4e04-87c7-c819069b4a89-fernet-keys\") on node \"crc\" DevicePath \"\"" Jan 20 17:07:54 crc kubenswrapper[4558]: I0120 17:07:54.946035 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ea04114-b82b-4609-8f7f-a9ff17cb3b57-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:07:54 crc kubenswrapper[4558]: I0120 17:07:54.946047 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/23f3fc6a-3d4e-4e04-87c7-c819069b4a89-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:07:54 crc kubenswrapper[4558]: I0120 17:07:54.946055 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4bq2x\" (UniqueName: \"kubernetes.io/projected/23f3fc6a-3d4e-4e04-87c7-c819069b4a89-kube-api-access-4bq2x\") on node \"crc\" DevicePath \"\"" Jan 20 17:07:54 crc kubenswrapper[4558]: I0120 17:07:54.946064 4558 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/23f3fc6a-3d4e-4e04-87c7-c819069b4a89-credential-keys\") on node \"crc\" DevicePath \"\"" Jan 20 17:07:54 crc kubenswrapper[4558]: I0120 17:07:54.946072 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/23f3fc6a-3d4e-4e04-87c7-c819069b4a89-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:07:54 crc kubenswrapper[4558]: I0120 17:07:54.946080 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/23f3fc6a-3d4e-4e04-87c7-c819069b4a89-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:07:54 crc kubenswrapper[4558]: I0120 17:07:54.946088 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mvlqj\" (UniqueName: \"kubernetes.io/projected/8ea04114-b82b-4609-8f7f-a9ff17cb3b57-kube-api-access-mvlqj\") on node \"crc\" DevicePath \"\"" Jan 20 17:07:54 crc kubenswrapper[4558]: I0120 17:07:54.968011 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-api-757fb55df8-9qzm2"] Jan 20 17:07:54 crc kubenswrapper[4558]: I0120 17:07:54.988620 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8ea04114-b82b-4609-8f7f-a9ff17cb3b57-config" (OuterVolumeSpecName: "config") pod "8ea04114-b82b-4609-8f7f-a9ff17cb3b57" (UID: "8ea04114-b82b-4609-8f7f-a9ff17cb3b57"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.047443 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/87001341-5a74-4796-a332-6d57b2cf11c9-internal-tls-certs\") pod \"barbican-api-757fb55df8-9qzm2\" (UID: \"87001341-5a74-4796-a332-6d57b2cf11c9\") " pod="openstack-kuttl-tests/barbican-api-757fb55df8-9qzm2" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.047522 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fpt4j\" (UniqueName: \"kubernetes.io/projected/87001341-5a74-4796-a332-6d57b2cf11c9-kube-api-access-fpt4j\") pod \"barbican-api-757fb55df8-9qzm2\" (UID: \"87001341-5a74-4796-a332-6d57b2cf11c9\") " pod="openstack-kuttl-tests/barbican-api-757fb55df8-9qzm2" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.047653 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/87001341-5a74-4796-a332-6d57b2cf11c9-logs\") pod \"barbican-api-757fb55df8-9qzm2\" (UID: \"87001341-5a74-4796-a332-6d57b2cf11c9\") " pod="openstack-kuttl-tests/barbican-api-757fb55df8-9qzm2" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.047695 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/87001341-5a74-4796-a332-6d57b2cf11c9-config-data\") pod \"barbican-api-757fb55df8-9qzm2\" (UID: \"87001341-5a74-4796-a332-6d57b2cf11c9\") " pod="openstack-kuttl-tests/barbican-api-757fb55df8-9qzm2" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.047999 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/87001341-5a74-4796-a332-6d57b2cf11c9-public-tls-certs\") pod \"barbican-api-757fb55df8-9qzm2\" (UID: \"87001341-5a74-4796-a332-6d57b2cf11c9\") " pod="openstack-kuttl-tests/barbican-api-757fb55df8-9qzm2" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.048141 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/87001341-5a74-4796-a332-6d57b2cf11c9-config-data-custom\") pod \"barbican-api-757fb55df8-9qzm2\" (UID: \"87001341-5a74-4796-a332-6d57b2cf11c9\") " pod="openstack-kuttl-tests/barbican-api-757fb55df8-9qzm2" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.048351 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/87001341-5a74-4796-a332-6d57b2cf11c9-combined-ca-bundle\") pod \"barbican-api-757fb55df8-9qzm2\" (UID: \"87001341-5a74-4796-a332-6d57b2cf11c9\") " pod="openstack-kuttl-tests/barbican-api-757fb55df8-9qzm2" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.048462 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/8ea04114-b82b-4609-8f7f-a9ff17cb3b57-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.058137 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.150440 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9399ecb4-d974-4ebd-b695-4690fd4148b2-etc-machine-id\") pod \"9399ecb4-d974-4ebd-b695-4690fd4148b2\" (UID: \"9399ecb4-d974-4ebd-b695-4690fd4148b2\") " Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.150640 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9399ecb4-d974-4ebd-b695-4690fd4148b2-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "9399ecb4-d974-4ebd-b695-4690fd4148b2" (UID: "9399ecb4-d974-4ebd-b695-4690fd4148b2"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.150677 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9399ecb4-d974-4ebd-b695-4690fd4148b2-logs\") pod \"9399ecb4-d974-4ebd-b695-4690fd4148b2\" (UID: \"9399ecb4-d974-4ebd-b695-4690fd4148b2\") " Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.150794 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9399ecb4-d974-4ebd-b695-4690fd4148b2-combined-ca-bundle\") pod \"9399ecb4-d974-4ebd-b695-4690fd4148b2\" (UID: \"9399ecb4-d974-4ebd-b695-4690fd4148b2\") " Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.150945 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qsn5h\" (UniqueName: \"kubernetes.io/projected/9399ecb4-d974-4ebd-b695-4690fd4148b2-kube-api-access-qsn5h\") pod \"9399ecb4-d974-4ebd-b695-4690fd4148b2\" (UID: \"9399ecb4-d974-4ebd-b695-4690fd4148b2\") " Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.151047 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9399ecb4-d974-4ebd-b695-4690fd4148b2-logs" (OuterVolumeSpecName: "logs") pod "9399ecb4-d974-4ebd-b695-4690fd4148b2" (UID: "9399ecb4-d974-4ebd-b695-4690fd4148b2"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.151061 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9399ecb4-d974-4ebd-b695-4690fd4148b2-config-data\") pod \"9399ecb4-d974-4ebd-b695-4690fd4148b2\" (UID: \"9399ecb4-d974-4ebd-b695-4690fd4148b2\") " Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.151291 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9399ecb4-d974-4ebd-b695-4690fd4148b2-config-data-custom\") pod \"9399ecb4-d974-4ebd-b695-4690fd4148b2\" (UID: \"9399ecb4-d974-4ebd-b695-4690fd4148b2\") " Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.151447 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9399ecb4-d974-4ebd-b695-4690fd4148b2-scripts\") pod \"9399ecb4-d974-4ebd-b695-4690fd4148b2\" (UID: \"9399ecb4-d974-4ebd-b695-4690fd4148b2\") " Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.152364 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/87001341-5a74-4796-a332-6d57b2cf11c9-logs\") pod \"barbican-api-757fb55df8-9qzm2\" (UID: \"87001341-5a74-4796-a332-6d57b2cf11c9\") " pod="openstack-kuttl-tests/barbican-api-757fb55df8-9qzm2" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.152433 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/87001341-5a74-4796-a332-6d57b2cf11c9-config-data\") pod \"barbican-api-757fb55df8-9qzm2\" (UID: \"87001341-5a74-4796-a332-6d57b2cf11c9\") " pod="openstack-kuttl-tests/barbican-api-757fb55df8-9qzm2" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.152670 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/87001341-5a74-4796-a332-6d57b2cf11c9-public-tls-certs\") pod \"barbican-api-757fb55df8-9qzm2\" (UID: \"87001341-5a74-4796-a332-6d57b2cf11c9\") " pod="openstack-kuttl-tests/barbican-api-757fb55df8-9qzm2" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.152771 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/87001341-5a74-4796-a332-6d57b2cf11c9-config-data-custom\") pod \"barbican-api-757fb55df8-9qzm2\" (UID: \"87001341-5a74-4796-a332-6d57b2cf11c9\") " pod="openstack-kuttl-tests/barbican-api-757fb55df8-9qzm2" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.152890 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/87001341-5a74-4796-a332-6d57b2cf11c9-combined-ca-bundle\") pod \"barbican-api-757fb55df8-9qzm2\" (UID: \"87001341-5a74-4796-a332-6d57b2cf11c9\") " pod="openstack-kuttl-tests/barbican-api-757fb55df8-9qzm2" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.152961 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/87001341-5a74-4796-a332-6d57b2cf11c9-internal-tls-certs\") pod \"barbican-api-757fb55df8-9qzm2\" (UID: \"87001341-5a74-4796-a332-6d57b2cf11c9\") " pod="openstack-kuttl-tests/barbican-api-757fb55df8-9qzm2" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.153021 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fpt4j\" (UniqueName: \"kubernetes.io/projected/87001341-5a74-4796-a332-6d57b2cf11c9-kube-api-access-fpt4j\") pod \"barbican-api-757fb55df8-9qzm2\" (UID: \"87001341-5a74-4796-a332-6d57b2cf11c9\") " pod="openstack-kuttl-tests/barbican-api-757fb55df8-9qzm2" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.153152 4558 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9399ecb4-d974-4ebd-b695-4690fd4148b2-etc-machine-id\") on node \"crc\" DevicePath \"\"" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.153220 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9399ecb4-d974-4ebd-b695-4690fd4148b2-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.153456 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/87001341-5a74-4796-a332-6d57b2cf11c9-logs\") pod \"barbican-api-757fb55df8-9qzm2\" (UID: \"87001341-5a74-4796-a332-6d57b2cf11c9\") " pod="openstack-kuttl-tests/barbican-api-757fb55df8-9qzm2" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.156971 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9399ecb4-d974-4ebd-b695-4690fd4148b2-scripts" (OuterVolumeSpecName: "scripts") pod "9399ecb4-d974-4ebd-b695-4690fd4148b2" (UID: "9399ecb4-d974-4ebd-b695-4690fd4148b2"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.159125 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/87001341-5a74-4796-a332-6d57b2cf11c9-public-tls-certs\") pod \"barbican-api-757fb55df8-9qzm2\" (UID: \"87001341-5a74-4796-a332-6d57b2cf11c9\") " pod="openstack-kuttl-tests/barbican-api-757fb55df8-9qzm2" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.160239 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/87001341-5a74-4796-a332-6d57b2cf11c9-config-data\") pod \"barbican-api-757fb55df8-9qzm2\" (UID: \"87001341-5a74-4796-a332-6d57b2cf11c9\") " pod="openstack-kuttl-tests/barbican-api-757fb55df8-9qzm2" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.161269 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9399ecb4-d974-4ebd-b695-4690fd4148b2-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "9399ecb4-d974-4ebd-b695-4690fd4148b2" (UID: "9399ecb4-d974-4ebd-b695-4690fd4148b2"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.161348 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9399ecb4-d974-4ebd-b695-4690fd4148b2-kube-api-access-qsn5h" (OuterVolumeSpecName: "kube-api-access-qsn5h") pod "9399ecb4-d974-4ebd-b695-4690fd4148b2" (UID: "9399ecb4-d974-4ebd-b695-4690fd4148b2"). InnerVolumeSpecName "kube-api-access-qsn5h". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.162498 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/87001341-5a74-4796-a332-6d57b2cf11c9-internal-tls-certs\") pod \"barbican-api-757fb55df8-9qzm2\" (UID: \"87001341-5a74-4796-a332-6d57b2cf11c9\") " pod="openstack-kuttl-tests/barbican-api-757fb55df8-9qzm2" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.163983 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/87001341-5a74-4796-a332-6d57b2cf11c9-config-data-custom\") pod \"barbican-api-757fb55df8-9qzm2\" (UID: \"87001341-5a74-4796-a332-6d57b2cf11c9\") " pod="openstack-kuttl-tests/barbican-api-757fb55df8-9qzm2" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.169591 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fpt4j\" (UniqueName: \"kubernetes.io/projected/87001341-5a74-4796-a332-6d57b2cf11c9-kube-api-access-fpt4j\") pod \"barbican-api-757fb55df8-9qzm2\" (UID: \"87001341-5a74-4796-a332-6d57b2cf11c9\") " pod="openstack-kuttl-tests/barbican-api-757fb55df8-9qzm2" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.175744 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/87001341-5a74-4796-a332-6d57b2cf11c9-combined-ca-bundle\") pod \"barbican-api-757fb55df8-9qzm2\" (UID: \"87001341-5a74-4796-a332-6d57b2cf11c9\") " pod="openstack-kuttl-tests/barbican-api-757fb55df8-9qzm2" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.183944 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9399ecb4-d974-4ebd-b695-4690fd4148b2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9399ecb4-d974-4ebd-b695-4690fd4148b2" (UID: "9399ecb4-d974-4ebd-b695-4690fd4148b2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.201475 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9399ecb4-d974-4ebd-b695-4690fd4148b2-config-data" (OuterVolumeSpecName: "config-data") pod "9399ecb4-d974-4ebd-b695-4690fd4148b2" (UID: "9399ecb4-d974-4ebd-b695-4690fd4148b2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.256152 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9399ecb4-d974-4ebd-b695-4690fd4148b2-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.256209 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9399ecb4-d974-4ebd-b695-4690fd4148b2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.256225 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qsn5h\" (UniqueName: \"kubernetes.io/projected/9399ecb4-d974-4ebd-b695-4690fd4148b2-kube-api-access-qsn5h\") on node \"crc\" DevicePath \"\"" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.256237 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9399ecb4-d974-4ebd-b695-4690fd4148b2-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.256248 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9399ecb4-d974-4ebd-b695-4690fd4148b2-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.312646 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-757fb55df8-9qzm2" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.327755 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-db-sync-fs4x6" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.331773 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-db-sync-fs4x6" event={"ID":"8ea04114-b82b-4609-8f7f-a9ff17cb3b57","Type":"ContainerDied","Data":"fa6108a7bbddfa2dedb798ffc1a4fabbdba705651c03d73d73a34373cf69e639"} Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.331827 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fa6108a7bbddfa2dedb798ffc1a4fabbdba705651c03d73d73a34373cf69e639" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.334195 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-bootstrap-v2krf" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.334200 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-bootstrap-v2krf" event={"ID":"23f3fc6a-3d4e-4e04-87c7-c819069b4a89","Type":"ContainerDied","Data":"31a7c56eed1c1d1ea44742f66032094b20676034a762e42d50458576ab81873a"} Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.334297 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="31a7c56eed1c1d1ea44742f66032094b20676034a762e42d50458576ab81873a" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.339051 4558 generic.go:334] "Generic (PLEG): container finished" podID="9399ecb4-d974-4ebd-b695-4690fd4148b2" containerID="aa9d678e952e4cc0fb869c1b13463323d23601460d6a1e76b9034eddee08933b" exitCode=0 Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.339099 4558 generic.go:334] "Generic (PLEG): container finished" podID="9399ecb4-d974-4ebd-b695-4690fd4148b2" containerID="fac678fd674af0bfa9f5c02dc46fd986a3d2785451706905e31ca835bcb7242a" exitCode=143 Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.339133 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"9399ecb4-d974-4ebd-b695-4690fd4148b2","Type":"ContainerDied","Data":"aa9d678e952e4cc0fb869c1b13463323d23601460d6a1e76b9034eddee08933b"} Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.339202 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"9399ecb4-d974-4ebd-b695-4690fd4148b2","Type":"ContainerDied","Data":"fac678fd674af0bfa9f5c02dc46fd986a3d2785451706905e31ca835bcb7242a"} Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.339217 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"9399ecb4-d974-4ebd-b695-4690fd4148b2","Type":"ContainerDied","Data":"f56592ac5c257c3c00c4b138cc71818cb6ac3adda2b0e2263d32abf09218740e"} Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.339239 4558 scope.go:117] "RemoveContainer" containerID="aa9d678e952e4cc0fb869c1b13463323d23601460d6a1e76b9034eddee08933b" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.339483 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.437212 4558 scope.go:117] "RemoveContainer" containerID="fac678fd674af0bfa9f5c02dc46fd986a3d2785451706905e31ca835bcb7242a" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.446560 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.466776 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.473899 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:07:55 crc kubenswrapper[4558]: E0120 17:07:55.474332 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9399ecb4-d974-4ebd-b695-4690fd4148b2" containerName="cinder-api" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.474351 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="9399ecb4-d974-4ebd-b695-4690fd4148b2" containerName="cinder-api" Jan 20 17:07:55 crc kubenswrapper[4558]: E0120 17:07:55.474365 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9399ecb4-d974-4ebd-b695-4690fd4148b2" containerName="cinder-api-log" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.474370 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="9399ecb4-d974-4ebd-b695-4690fd4148b2" containerName="cinder-api-log" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.474545 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="9399ecb4-d974-4ebd-b695-4690fd4148b2" containerName="cinder-api-log" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.474568 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="9399ecb4-d974-4ebd-b695-4690fd4148b2" containerName="cinder-api" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.475431 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.486664 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-cinder-internal-svc" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.486924 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cinder-api-config-data" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.487032 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-cinder-public-svc" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.487601 4558 scope.go:117] "RemoveContainer" containerID="aa9d678e952e4cc0fb869c1b13463323d23601460d6a1e76b9034eddee08933b" Jan 20 17:07:55 crc kubenswrapper[4558]: E0120 17:07:55.489868 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aa9d678e952e4cc0fb869c1b13463323d23601460d6a1e76b9034eddee08933b\": container with ID starting with aa9d678e952e4cc0fb869c1b13463323d23601460d6a1e76b9034eddee08933b not found: ID does not exist" containerID="aa9d678e952e4cc0fb869c1b13463323d23601460d6a1e76b9034eddee08933b" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.489902 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aa9d678e952e4cc0fb869c1b13463323d23601460d6a1e76b9034eddee08933b"} err="failed to get container status \"aa9d678e952e4cc0fb869c1b13463323d23601460d6a1e76b9034eddee08933b\": rpc error: code = NotFound desc = could not find container \"aa9d678e952e4cc0fb869c1b13463323d23601460d6a1e76b9034eddee08933b\": container with ID starting with aa9d678e952e4cc0fb869c1b13463323d23601460d6a1e76b9034eddee08933b not found: ID does not exist" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.489929 4558 scope.go:117] "RemoveContainer" containerID="fac678fd674af0bfa9f5c02dc46fd986a3d2785451706905e31ca835bcb7242a" Jan 20 17:07:55 crc kubenswrapper[4558]: E0120 17:07:55.490889 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fac678fd674af0bfa9f5c02dc46fd986a3d2785451706905e31ca835bcb7242a\": container with ID starting with fac678fd674af0bfa9f5c02dc46fd986a3d2785451706905e31ca835bcb7242a not found: ID does not exist" containerID="fac678fd674af0bfa9f5c02dc46fd986a3d2785451706905e31ca835bcb7242a" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.490918 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fac678fd674af0bfa9f5c02dc46fd986a3d2785451706905e31ca835bcb7242a"} err="failed to get container status \"fac678fd674af0bfa9f5c02dc46fd986a3d2785451706905e31ca835bcb7242a\": rpc error: code = NotFound desc = could not find container \"fac678fd674af0bfa9f5c02dc46fd986a3d2785451706905e31ca835bcb7242a\": container with ID starting with fac678fd674af0bfa9f5c02dc46fd986a3d2785451706905e31ca835bcb7242a not found: ID does not exist" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.490934 4558 scope.go:117] "RemoveContainer" containerID="aa9d678e952e4cc0fb869c1b13463323d23601460d6a1e76b9034eddee08933b" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.491115 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aa9d678e952e4cc0fb869c1b13463323d23601460d6a1e76b9034eddee08933b"} err="failed to get container status \"aa9d678e952e4cc0fb869c1b13463323d23601460d6a1e76b9034eddee08933b\": rpc error: code = NotFound desc = could not find container \"aa9d678e952e4cc0fb869c1b13463323d23601460d6a1e76b9034eddee08933b\": container with ID starting with aa9d678e952e4cc0fb869c1b13463323d23601460d6a1e76b9034eddee08933b not found: ID does not exist" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.491129 4558 scope.go:117] "RemoveContainer" containerID="fac678fd674af0bfa9f5c02dc46fd986a3d2785451706905e31ca835bcb7242a" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.491444 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fac678fd674af0bfa9f5c02dc46fd986a3d2785451706905e31ca835bcb7242a"} err="failed to get container status \"fac678fd674af0bfa9f5c02dc46fd986a3d2785451706905e31ca835bcb7242a\": rpc error: code = NotFound desc = could not find container \"fac678fd674af0bfa9f5c02dc46fd986a3d2785451706905e31ca835bcb7242a\": container with ID starting with fac678fd674af0bfa9f5c02dc46fd986a3d2785451706905e31ca835bcb7242a not found: ID does not exist" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.509533 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.537332 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/keystone-679bbddb99-dh2h2"] Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.538476 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-679bbddb99-dh2h2" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.542284 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.542520 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-keystone-dockercfg-lcqb5" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.542642 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-scripts" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.542760 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-config-data" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.542910 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-keystone-internal-svc" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.550766 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-keystone-public-svc" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.553600 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-679bbddb99-dh2h2"] Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.596237 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/neutron-57bd5796c7-h85wn"] Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.598013 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-57bd5796c7-h85wn" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.603500 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"neutron-config" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.606112 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"neutron-httpd-config" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.606288 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-neutron-ovndbs" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.606424 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"neutron-neutron-dockercfg-tp65d" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.675539 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ee147895-0bad-4dc1-a348-1be3348a7180-logs\") pod \"cinder-api-0\" (UID: \"ee147895-0bad-4dc1-a348-1be3348a7180\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.675685 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ee147895-0bad-4dc1-a348-1be3348a7180-etc-machine-id\") pod \"cinder-api-0\" (UID: \"ee147895-0bad-4dc1-a348-1be3348a7180\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.675734 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xxj9h\" (UniqueName: \"kubernetes.io/projected/c3c7801f-4a15-44ce-8732-4f93b819a7c5-kube-api-access-xxj9h\") pod \"keystone-679bbddb99-dh2h2\" (UID: \"c3c7801f-4a15-44ce-8732-4f93b819a7c5\") " pod="openstack-kuttl-tests/keystone-679bbddb99-dh2h2" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.675783 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c3c7801f-4a15-44ce-8732-4f93b819a7c5-config-data\") pod \"keystone-679bbddb99-dh2h2\" (UID: \"c3c7801f-4a15-44ce-8732-4f93b819a7c5\") " pod="openstack-kuttl-tests/keystone-679bbddb99-dh2h2" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.675824 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ee147895-0bad-4dc1-a348-1be3348a7180-config-data-custom\") pod \"cinder-api-0\" (UID: \"ee147895-0bad-4dc1-a348-1be3348a7180\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.675891 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c3c7801f-4a15-44ce-8732-4f93b819a7c5-public-tls-certs\") pod \"keystone-679bbddb99-dh2h2\" (UID: \"c3c7801f-4a15-44ce-8732-4f93b819a7c5\") " pod="openstack-kuttl-tests/keystone-679bbddb99-dh2h2" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.675925 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ee147895-0bad-4dc1-a348-1be3348a7180-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"ee147895-0bad-4dc1-a348-1be3348a7180\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.677158 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee147895-0bad-4dc1-a348-1be3348a7180-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"ee147895-0bad-4dc1-a348-1be3348a7180\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.677604 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/c3c7801f-4a15-44ce-8732-4f93b819a7c5-fernet-keys\") pod \"keystone-679bbddb99-dh2h2\" (UID: \"c3c7801f-4a15-44ce-8732-4f93b819a7c5\") " pod="openstack-kuttl-tests/keystone-679bbddb99-dh2h2" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.677625 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ee147895-0bad-4dc1-a348-1be3348a7180-config-data\") pod \"cinder-api-0\" (UID: \"ee147895-0bad-4dc1-a348-1be3348a7180\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.677686 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c3c7801f-4a15-44ce-8732-4f93b819a7c5-internal-tls-certs\") pod \"keystone-679bbddb99-dh2h2\" (UID: \"c3c7801f-4a15-44ce-8732-4f93b819a7c5\") " pod="openstack-kuttl-tests/keystone-679bbddb99-dh2h2" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.677721 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ee147895-0bad-4dc1-a348-1be3348a7180-public-tls-certs\") pod \"cinder-api-0\" (UID: \"ee147895-0bad-4dc1-a348-1be3348a7180\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.677764 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ee147895-0bad-4dc1-a348-1be3348a7180-scripts\") pod \"cinder-api-0\" (UID: \"ee147895-0bad-4dc1-a348-1be3348a7180\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.677871 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c3c7801f-4a15-44ce-8732-4f93b819a7c5-combined-ca-bundle\") pod \"keystone-679bbddb99-dh2h2\" (UID: \"c3c7801f-4a15-44ce-8732-4f93b819a7c5\") " pod="openstack-kuttl-tests/keystone-679bbddb99-dh2h2" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.677944 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c3c7801f-4a15-44ce-8732-4f93b819a7c5-scripts\") pod \"keystone-679bbddb99-dh2h2\" (UID: \"c3c7801f-4a15-44ce-8732-4f93b819a7c5\") " pod="openstack-kuttl-tests/keystone-679bbddb99-dh2h2" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.677979 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dpg58\" (UniqueName: \"kubernetes.io/projected/ee147895-0bad-4dc1-a348-1be3348a7180-kube-api-access-dpg58\") pod \"cinder-api-0\" (UID: \"ee147895-0bad-4dc1-a348-1be3348a7180\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.678073 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/c3c7801f-4a15-44ce-8732-4f93b819a7c5-credential-keys\") pod \"keystone-679bbddb99-dh2h2\" (UID: \"c3c7801f-4a15-44ce-8732-4f93b819a7c5\") " pod="openstack-kuttl-tests/keystone-679bbddb99-dh2h2" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.682465 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-57bd5796c7-h85wn"] Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.779918 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee147895-0bad-4dc1-a348-1be3348a7180-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"ee147895-0bad-4dc1-a348-1be3348a7180\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.779970 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/e341749a-5099-4cd4-9c62-49231106d844-config\") pod \"neutron-57bd5796c7-h85wn\" (UID: \"e341749a-5099-4cd4-9c62-49231106d844\") " pod="openstack-kuttl-tests/neutron-57bd5796c7-h85wn" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.779993 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/c3c7801f-4a15-44ce-8732-4f93b819a7c5-fernet-keys\") pod \"keystone-679bbddb99-dh2h2\" (UID: \"c3c7801f-4a15-44ce-8732-4f93b819a7c5\") " pod="openstack-kuttl-tests/keystone-679bbddb99-dh2h2" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.780012 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ee147895-0bad-4dc1-a348-1be3348a7180-config-data\") pod \"cinder-api-0\" (UID: \"ee147895-0bad-4dc1-a348-1be3348a7180\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.780037 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c3c7801f-4a15-44ce-8732-4f93b819a7c5-internal-tls-certs\") pod \"keystone-679bbddb99-dh2h2\" (UID: \"c3c7801f-4a15-44ce-8732-4f93b819a7c5\") " pod="openstack-kuttl-tests/keystone-679bbddb99-dh2h2" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.780058 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ee147895-0bad-4dc1-a348-1be3348a7180-public-tls-certs\") pod \"cinder-api-0\" (UID: \"ee147895-0bad-4dc1-a348-1be3348a7180\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.780076 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e341749a-5099-4cd4-9c62-49231106d844-combined-ca-bundle\") pod \"neutron-57bd5796c7-h85wn\" (UID: \"e341749a-5099-4cd4-9c62-49231106d844\") " pod="openstack-kuttl-tests/neutron-57bd5796c7-h85wn" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.780095 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ee147895-0bad-4dc1-a348-1be3348a7180-scripts\") pod \"cinder-api-0\" (UID: \"ee147895-0bad-4dc1-a348-1be3348a7180\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.780116 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/e341749a-5099-4cd4-9c62-49231106d844-httpd-config\") pod \"neutron-57bd5796c7-h85wn\" (UID: \"e341749a-5099-4cd4-9c62-49231106d844\") " pod="openstack-kuttl-tests/neutron-57bd5796c7-h85wn" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.780144 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c3c7801f-4a15-44ce-8732-4f93b819a7c5-combined-ca-bundle\") pod \"keystone-679bbddb99-dh2h2\" (UID: \"c3c7801f-4a15-44ce-8732-4f93b819a7c5\") " pod="openstack-kuttl-tests/keystone-679bbddb99-dh2h2" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.780198 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c3c7801f-4a15-44ce-8732-4f93b819a7c5-scripts\") pod \"keystone-679bbddb99-dh2h2\" (UID: \"c3c7801f-4a15-44ce-8732-4f93b819a7c5\") " pod="openstack-kuttl-tests/keystone-679bbddb99-dh2h2" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.780219 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dpg58\" (UniqueName: \"kubernetes.io/projected/ee147895-0bad-4dc1-a348-1be3348a7180-kube-api-access-dpg58\") pod \"cinder-api-0\" (UID: \"ee147895-0bad-4dc1-a348-1be3348a7180\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.780248 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/c3c7801f-4a15-44ce-8732-4f93b819a7c5-credential-keys\") pod \"keystone-679bbddb99-dh2h2\" (UID: \"c3c7801f-4a15-44ce-8732-4f93b819a7c5\") " pod="openstack-kuttl-tests/keystone-679bbddb99-dh2h2" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.780274 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ee147895-0bad-4dc1-a348-1be3348a7180-logs\") pod \"cinder-api-0\" (UID: \"ee147895-0bad-4dc1-a348-1be3348a7180\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.780301 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ee147895-0bad-4dc1-a348-1be3348a7180-etc-machine-id\") pod \"cinder-api-0\" (UID: \"ee147895-0bad-4dc1-a348-1be3348a7180\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.780340 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xxj9h\" (UniqueName: \"kubernetes.io/projected/c3c7801f-4a15-44ce-8732-4f93b819a7c5-kube-api-access-xxj9h\") pod \"keystone-679bbddb99-dh2h2\" (UID: \"c3c7801f-4a15-44ce-8732-4f93b819a7c5\") " pod="openstack-kuttl-tests/keystone-679bbddb99-dh2h2" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.780361 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c3c7801f-4a15-44ce-8732-4f93b819a7c5-config-data\") pod \"keystone-679bbddb99-dh2h2\" (UID: \"c3c7801f-4a15-44ce-8732-4f93b819a7c5\") " pod="openstack-kuttl-tests/keystone-679bbddb99-dh2h2" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.780381 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ee147895-0bad-4dc1-a348-1be3348a7180-config-data-custom\") pod \"cinder-api-0\" (UID: \"ee147895-0bad-4dc1-a348-1be3348a7180\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.780400 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k5bn6\" (UniqueName: \"kubernetes.io/projected/e341749a-5099-4cd4-9c62-49231106d844-kube-api-access-k5bn6\") pod \"neutron-57bd5796c7-h85wn\" (UID: \"e341749a-5099-4cd4-9c62-49231106d844\") " pod="openstack-kuttl-tests/neutron-57bd5796c7-h85wn" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.780420 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/e341749a-5099-4cd4-9c62-49231106d844-ovndb-tls-certs\") pod \"neutron-57bd5796c7-h85wn\" (UID: \"e341749a-5099-4cd4-9c62-49231106d844\") " pod="openstack-kuttl-tests/neutron-57bd5796c7-h85wn" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.780443 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c3c7801f-4a15-44ce-8732-4f93b819a7c5-public-tls-certs\") pod \"keystone-679bbddb99-dh2h2\" (UID: \"c3c7801f-4a15-44ce-8732-4f93b819a7c5\") " pod="openstack-kuttl-tests/keystone-679bbddb99-dh2h2" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.780464 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ee147895-0bad-4dc1-a348-1be3348a7180-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"ee147895-0bad-4dc1-a348-1be3348a7180\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.782879 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ee147895-0bad-4dc1-a348-1be3348a7180-logs\") pod \"cinder-api-0\" (UID: \"ee147895-0bad-4dc1-a348-1be3348a7180\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.782949 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ee147895-0bad-4dc1-a348-1be3348a7180-etc-machine-id\") pod \"cinder-api-0\" (UID: \"ee147895-0bad-4dc1-a348-1be3348a7180\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.795983 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c3c7801f-4a15-44ce-8732-4f93b819a7c5-internal-tls-certs\") pod \"keystone-679bbddb99-dh2h2\" (UID: \"c3c7801f-4a15-44ce-8732-4f93b819a7c5\") " pod="openstack-kuttl-tests/keystone-679bbddb99-dh2h2" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.803697 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c3c7801f-4a15-44ce-8732-4f93b819a7c5-config-data\") pod \"keystone-679bbddb99-dh2h2\" (UID: \"c3c7801f-4a15-44ce-8732-4f93b819a7c5\") " pod="openstack-kuttl-tests/keystone-679bbddb99-dh2h2" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.804092 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/c3c7801f-4a15-44ce-8732-4f93b819a7c5-fernet-keys\") pod \"keystone-679bbddb99-dh2h2\" (UID: \"c3c7801f-4a15-44ce-8732-4f93b819a7c5\") " pod="openstack-kuttl-tests/keystone-679bbddb99-dh2h2" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.804312 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/c3c7801f-4a15-44ce-8732-4f93b819a7c5-credential-keys\") pod \"keystone-679bbddb99-dh2h2\" (UID: \"c3c7801f-4a15-44ce-8732-4f93b819a7c5\") " pod="openstack-kuttl-tests/keystone-679bbddb99-dh2h2" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.804428 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ee147895-0bad-4dc1-a348-1be3348a7180-scripts\") pod \"cinder-api-0\" (UID: \"ee147895-0bad-4dc1-a348-1be3348a7180\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.804477 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ee147895-0bad-4dc1-a348-1be3348a7180-config-data\") pod \"cinder-api-0\" (UID: \"ee147895-0bad-4dc1-a348-1be3348a7180\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.805626 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c3c7801f-4a15-44ce-8732-4f93b819a7c5-combined-ca-bundle\") pod \"keystone-679bbddb99-dh2h2\" (UID: \"c3c7801f-4a15-44ce-8732-4f93b819a7c5\") " pod="openstack-kuttl-tests/keystone-679bbddb99-dh2h2" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.805699 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ee147895-0bad-4dc1-a348-1be3348a7180-config-data-custom\") pod \"cinder-api-0\" (UID: \"ee147895-0bad-4dc1-a348-1be3348a7180\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.805771 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee147895-0bad-4dc1-a348-1be3348a7180-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"ee147895-0bad-4dc1-a348-1be3348a7180\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.806101 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xxj9h\" (UniqueName: \"kubernetes.io/projected/c3c7801f-4a15-44ce-8732-4f93b819a7c5-kube-api-access-xxj9h\") pod \"keystone-679bbddb99-dh2h2\" (UID: \"c3c7801f-4a15-44ce-8732-4f93b819a7c5\") " pod="openstack-kuttl-tests/keystone-679bbddb99-dh2h2" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.806934 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ee147895-0bad-4dc1-a348-1be3348a7180-public-tls-certs\") pod \"cinder-api-0\" (UID: \"ee147895-0bad-4dc1-a348-1be3348a7180\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.807470 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c3c7801f-4a15-44ce-8732-4f93b819a7c5-public-tls-certs\") pod \"keystone-679bbddb99-dh2h2\" (UID: \"c3c7801f-4a15-44ce-8732-4f93b819a7c5\") " pod="openstack-kuttl-tests/keystone-679bbddb99-dh2h2" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.808083 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ee147895-0bad-4dc1-a348-1be3348a7180-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"ee147895-0bad-4dc1-a348-1be3348a7180\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.808513 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c3c7801f-4a15-44ce-8732-4f93b819a7c5-scripts\") pod \"keystone-679bbddb99-dh2h2\" (UID: \"c3c7801f-4a15-44ce-8732-4f93b819a7c5\") " pod="openstack-kuttl-tests/keystone-679bbddb99-dh2h2" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.812338 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dpg58\" (UniqueName: \"kubernetes.io/projected/ee147895-0bad-4dc1-a348-1be3348a7180-kube-api-access-dpg58\") pod \"cinder-api-0\" (UID: \"ee147895-0bad-4dc1-a348-1be3348a7180\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.837282 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.887054 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e341749a-5099-4cd4-9c62-49231106d844-combined-ca-bundle\") pod \"neutron-57bd5796c7-h85wn\" (UID: \"e341749a-5099-4cd4-9c62-49231106d844\") " pod="openstack-kuttl-tests/neutron-57bd5796c7-h85wn" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.887115 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/e341749a-5099-4cd4-9c62-49231106d844-httpd-config\") pod \"neutron-57bd5796c7-h85wn\" (UID: \"e341749a-5099-4cd4-9c62-49231106d844\") " pod="openstack-kuttl-tests/neutron-57bd5796c7-h85wn" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.887279 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k5bn6\" (UniqueName: \"kubernetes.io/projected/e341749a-5099-4cd4-9c62-49231106d844-kube-api-access-k5bn6\") pod \"neutron-57bd5796c7-h85wn\" (UID: \"e341749a-5099-4cd4-9c62-49231106d844\") " pod="openstack-kuttl-tests/neutron-57bd5796c7-h85wn" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.887308 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/e341749a-5099-4cd4-9c62-49231106d844-ovndb-tls-certs\") pod \"neutron-57bd5796c7-h85wn\" (UID: \"e341749a-5099-4cd4-9c62-49231106d844\") " pod="openstack-kuttl-tests/neutron-57bd5796c7-h85wn" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.887386 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/e341749a-5099-4cd4-9c62-49231106d844-config\") pod \"neutron-57bd5796c7-h85wn\" (UID: \"e341749a-5099-4cd4-9c62-49231106d844\") " pod="openstack-kuttl-tests/neutron-57bd5796c7-h85wn" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.890845 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e341749a-5099-4cd4-9c62-49231106d844-combined-ca-bundle\") pod \"neutron-57bd5796c7-h85wn\" (UID: \"e341749a-5099-4cd4-9c62-49231106d844\") " pod="openstack-kuttl-tests/neutron-57bd5796c7-h85wn" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.891437 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/e341749a-5099-4cd4-9c62-49231106d844-httpd-config\") pod \"neutron-57bd5796c7-h85wn\" (UID: \"e341749a-5099-4cd4-9c62-49231106d844\") " pod="openstack-kuttl-tests/neutron-57bd5796c7-h85wn" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.894041 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/e341749a-5099-4cd4-9c62-49231106d844-ovndb-tls-certs\") pod \"neutron-57bd5796c7-h85wn\" (UID: \"e341749a-5099-4cd4-9c62-49231106d844\") " pod="openstack-kuttl-tests/neutron-57bd5796c7-h85wn" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.894751 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/e341749a-5099-4cd4-9c62-49231106d844-config\") pod \"neutron-57bd5796c7-h85wn\" (UID: \"e341749a-5099-4cd4-9c62-49231106d844\") " pod="openstack-kuttl-tests/neutron-57bd5796c7-h85wn" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.910980 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k5bn6\" (UniqueName: \"kubernetes.io/projected/e341749a-5099-4cd4-9c62-49231106d844-kube-api-access-k5bn6\") pod \"neutron-57bd5796c7-h85wn\" (UID: \"e341749a-5099-4cd4-9c62-49231106d844\") " pod="openstack-kuttl-tests/neutron-57bd5796c7-h85wn" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.920423 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-679bbddb99-dh2h2" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.951635 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-57bd5796c7-h85wn" Jan 20 17:07:55 crc kubenswrapper[4558]: I0120 17:07:55.968146 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-api-757fb55df8-9qzm2"] Jan 20 17:07:56 crc kubenswrapper[4558]: I0120 17:07:56.246600 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:07:56 crc kubenswrapper[4558]: W0120 17:07:56.248909 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podee147895_0bad_4dc1_a348_1be3348a7180.slice/crio-c8c446e659096745b9b7275e3a8ca45d6fa69e352b0452f3110e64f6ec71e609 WatchSource:0}: Error finding container c8c446e659096745b9b7275e3a8ca45d6fa69e352b0452f3110e64f6ec71e609: Status 404 returned error can't find the container with id c8c446e659096745b9b7275e3a8ca45d6fa69e352b0452f3110e64f6ec71e609 Jan 20 17:07:56 crc kubenswrapper[4558]: I0120 17:07:56.297315 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:07:56 crc kubenswrapper[4558]: I0120 17:07:56.298108 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:07:56 crc kubenswrapper[4558]: I0120 17:07:56.308009 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/barbican-api-5ffbfb44-sbdwx" Jan 20 17:07:56 crc kubenswrapper[4558]: I0120 17:07:56.309093 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:07:56 crc kubenswrapper[4558]: I0120 17:07:56.309130 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:07:56 crc kubenswrapper[4558]: I0120 17:07:56.339712 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:07:56 crc kubenswrapper[4558]: I0120 17:07:56.340127 4558 scope.go:117] "RemoveContainer" containerID="30e334d21be63f799a062bbdac433a435c365279315262bd4f809fb8f1cd4b61" Jan 20 17:07:56 crc kubenswrapper[4558]: I0120 17:07:56.356694 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:07:56 crc kubenswrapper[4558]: I0120 17:07:56.357746 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:07:56 crc kubenswrapper[4558]: I0120 17:07:56.366312 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:07:56 crc kubenswrapper[4558]: I0120 17:07:56.384737 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-757fb55df8-9qzm2" event={"ID":"87001341-5a74-4796-a332-6d57b2cf11c9","Type":"ContainerStarted","Data":"c38ffae2f71dd677618391bc8fe1b2be1a66a0d46ff78c2fe5bdd127277f6054"} Jan 20 17:07:56 crc kubenswrapper[4558]: I0120 17:07:56.384776 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-757fb55df8-9qzm2" event={"ID":"87001341-5a74-4796-a332-6d57b2cf11c9","Type":"ContainerStarted","Data":"b6736c7fbe940026d7050860f63ad8321552cca00db656609ca3e73cc1f72a4f"} Jan 20 17:07:56 crc kubenswrapper[4558]: I0120 17:07:56.409338 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"90d455f2-55fa-4028-85a0-346675b2194d","Type":"ContainerStarted","Data":"8083743d7d6d37e83f1bfb8607cb7050961576fa4bb93c383d46b5f5d4700062"} Jan 20 17:07:56 crc kubenswrapper[4558]: I0120 17:07:56.410240 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:07:56 crc kubenswrapper[4558]: I0120 17:07:56.421280 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"ee147895-0bad-4dc1-a348-1be3348a7180","Type":"ContainerStarted","Data":"c8c446e659096745b9b7275e3a8ca45d6fa69e352b0452f3110e64f6ec71e609"} Jan 20 17:07:56 crc kubenswrapper[4558]: I0120 17:07:56.421310 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:07:56 crc kubenswrapper[4558]: I0120 17:07:56.421330 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:07:56 crc kubenswrapper[4558]: I0120 17:07:56.421338 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:07:56 crc kubenswrapper[4558]: I0120 17:07:56.421520 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:07:56 crc kubenswrapper[4558]: I0120 17:07:56.430418 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-57bd5796c7-h85wn"] Jan 20 17:07:56 crc kubenswrapper[4558]: I0120 17:07:56.445498 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ceilometer-0" podStartSLOduration=1.63356785 podStartE2EDuration="7.445476595s" podCreationTimestamp="2026-01-20 17:07:49 +0000 UTC" firstStartedPulling="2026-01-20 17:07:50.23339458 +0000 UTC m=+1563.993732548" lastFinishedPulling="2026-01-20 17:07:56.045303326 +0000 UTC m=+1569.805641293" observedRunningTime="2026-01-20 17:07:56.437785172 +0000 UTC m=+1570.198123139" watchObservedRunningTime="2026-01-20 17:07:56.445476595 +0000 UTC m=+1570.205814562" Jan 20 17:07:56 crc kubenswrapper[4558]: I0120 17:07:56.448298 4558 scope.go:117] "RemoveContainer" containerID="90086f287523470f86b4f198e93742684b994c6b5d64645ae662556bcec04dd6" Jan 20 17:07:56 crc kubenswrapper[4558]: W0120 17:07:56.456335 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode341749a_5099_4cd4_9c62_49231106d844.slice/crio-c48d0404d532b820ea07e23cff8cfff5365950283202a6aed24cd27369498a3e WatchSource:0}: Error finding container c48d0404d532b820ea07e23cff8cfff5365950283202a6aed24cd27369498a3e: Status 404 returned error can't find the container with id c48d0404d532b820ea07e23cff8cfff5365950283202a6aed24cd27369498a3e Jan 20 17:07:56 crc kubenswrapper[4558]: I0120 17:07:56.484225 4558 scope.go:117] "RemoveContainer" containerID="185a881c6adc45360908ee6b237afe1d3e54a3888c07e7e92e0b666fe25a7f54" Jan 20 17:07:56 crc kubenswrapper[4558]: I0120 17:07:56.489134 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-679bbddb99-dh2h2"] Jan 20 17:07:56 crc kubenswrapper[4558]: I0120 17:07:56.508449 4558 scope.go:117] "RemoveContainer" containerID="590af9e8b321dd601e68895bca62499efbd67321a304b402ec4829504f95989f" Jan 20 17:07:56 crc kubenswrapper[4558]: I0120 17:07:56.582203 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9399ecb4-d974-4ebd-b695-4690fd4148b2" path="/var/lib/kubelet/pods/9399ecb4-d974-4ebd-b695-4690fd4148b2/volumes" Jan 20 17:07:56 crc kubenswrapper[4558]: I0120 17:07:56.638366 4558 scope.go:117] "RemoveContainer" containerID="c114c7e9df385088d0043b3929229f461356f434bf7a638790b4aa8b5de9a942" Jan 20 17:07:56 crc kubenswrapper[4558]: I0120 17:07:56.731630 4558 scope.go:117] "RemoveContainer" containerID="945d6cf4d7e0ae9b1db59237517dbc0df3d7e1a5de00383bf5d52e1df143d4d4" Jan 20 17:07:56 crc kubenswrapper[4558]: I0120 17:07:56.972059 4558 scope.go:117] "RemoveContainer" containerID="d5c274045b643e68cde6efde01b4bc5498d5fc7ca7cc737f8b037b3d7603c41b" Jan 20 17:07:57 crc kubenswrapper[4558]: I0120 17:07:57.109352 4558 scope.go:117] "RemoveContainer" containerID="f6c131a484e6463785eddbec476157d62fb901a314f15a2feebc53547803030e" Jan 20 17:07:57 crc kubenswrapper[4558]: I0120 17:07:57.193057 4558 scope.go:117] "RemoveContainer" containerID="4f9b85a4d21c95ef631ed6bcbf55d246c6b1aa708349dc4c85de6b2d2a17fd09" Jan 20 17:07:57 crc kubenswrapper[4558]: I0120 17:07:57.440661 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-57bd5796c7-h85wn" event={"ID":"e341749a-5099-4cd4-9c62-49231106d844","Type":"ContainerStarted","Data":"b2a69304487e5bb82b84e284bd744a49a17feb7082f8f7029ee3e0786bd5f530"} Jan 20 17:07:57 crc kubenswrapper[4558]: I0120 17:07:57.440708 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-57bd5796c7-h85wn" event={"ID":"e341749a-5099-4cd4-9c62-49231106d844","Type":"ContainerStarted","Data":"69e5cfa78cd1d6c40c39fac464ccf3b24fb3e23f3a0c1d4ed3d888269761249d"} Jan 20 17:07:57 crc kubenswrapper[4558]: I0120 17:07:57.440721 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-57bd5796c7-h85wn" event={"ID":"e341749a-5099-4cd4-9c62-49231106d844","Type":"ContainerStarted","Data":"c48d0404d532b820ea07e23cff8cfff5365950283202a6aed24cd27369498a3e"} Jan 20 17:07:57 crc kubenswrapper[4558]: I0120 17:07:57.441257 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/neutron-57bd5796c7-h85wn" Jan 20 17:07:57 crc kubenswrapper[4558]: I0120 17:07:57.478963 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/neutron-57bd5796c7-h85wn" podStartSLOduration=2.478942556 podStartE2EDuration="2.478942556s" podCreationTimestamp="2026-01-20 17:07:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:07:57.477863146 +0000 UTC m=+1571.238201103" watchObservedRunningTime="2026-01-20 17:07:57.478942556 +0000 UTC m=+1571.239280524" Jan 20 17:07:57 crc kubenswrapper[4558]: I0120 17:07:57.492001 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-679bbddb99-dh2h2" event={"ID":"c3c7801f-4a15-44ce-8732-4f93b819a7c5","Type":"ContainerStarted","Data":"f9f39e4d701bf54b38ab7a0e52db29f8a7cc6443a7b4a92c71351d6317512b7a"} Jan 20 17:07:57 crc kubenswrapper[4558]: I0120 17:07:57.492054 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-679bbddb99-dh2h2" event={"ID":"c3c7801f-4a15-44ce-8732-4f93b819a7c5","Type":"ContainerStarted","Data":"d3db55ecc908dffebec3940bd1f575c620640dab9324b5169d97c783cc0e4108"} Jan 20 17:07:57 crc kubenswrapper[4558]: I0120 17:07:57.501204 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/keystone-679bbddb99-dh2h2" Jan 20 17:07:57 crc kubenswrapper[4558]: I0120 17:07:57.508452 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"ee147895-0bad-4dc1-a348-1be3348a7180","Type":"ContainerStarted","Data":"0376c421f7190fd015b8795150f665866c227b4ec1aa2af3ff3ba419848ca268"} Jan 20 17:07:57 crc kubenswrapper[4558]: I0120 17:07:57.517091 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-757fb55df8-9qzm2" event={"ID":"87001341-5a74-4796-a332-6d57b2cf11c9","Type":"ContainerStarted","Data":"67a4a2b743f3eaffa244dd8f012af0a3fdc18477dc3e1252ee8aced329b34fda"} Jan 20 17:07:57 crc kubenswrapper[4558]: I0120 17:07:57.518343 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/barbican-api-757fb55df8-9qzm2" Jan 20 17:07:57 crc kubenswrapper[4558]: I0120 17:07:57.518379 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/barbican-api-757fb55df8-9qzm2" Jan 20 17:07:57 crc kubenswrapper[4558]: I0120 17:07:57.563188 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/keystone-679bbddb99-dh2h2" podStartSLOduration=2.562147858 podStartE2EDuration="2.562147858s" podCreationTimestamp="2026-01-20 17:07:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:07:57.516713813 +0000 UTC m=+1571.277051780" watchObservedRunningTime="2026-01-20 17:07:57.562147858 +0000 UTC m=+1571.322485825" Jan 20 17:07:57 crc kubenswrapper[4558]: I0120 17:07:57.581410 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/barbican-api-757fb55df8-9qzm2" podStartSLOduration=3.581393158 podStartE2EDuration="3.581393158s" podCreationTimestamp="2026-01-20 17:07:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:07:57.54190871 +0000 UTC m=+1571.302246676" watchObservedRunningTime="2026-01-20 17:07:57.581393158 +0000 UTC m=+1571.341731124" Jan 20 17:07:58 crc kubenswrapper[4558]: E0120 17:07:58.217186 4558 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4fb0c521_d465_47b9_b859_199f53143dca.slice/crio-720cf8a66b7cad8587f8ef26f04899153239b9d5f060876b68844c1340f9e7ba\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4fb0c521_d465_47b9_b859_199f53143dca.slice\": RecentStats: unable to find data in memory cache]" Jan 20 17:07:58 crc kubenswrapper[4558]: I0120 17:07:58.534274 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"ee147895-0bad-4dc1-a348-1be3348a7180","Type":"ContainerStarted","Data":"8465d6a9372429cd9148909a838d7378903121f9ff9f0fb73700c98d620d85b1"} Jan 20 17:07:58 crc kubenswrapper[4558]: I0120 17:07:58.534668 4558 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 20 17:07:58 crc kubenswrapper[4558]: I0120 17:07:58.534694 4558 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 20 17:07:58 crc kubenswrapper[4558]: I0120 17:07:58.552266 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/cinder-api-0" podStartSLOduration=3.552249536 podStartE2EDuration="3.552249536s" podCreationTimestamp="2026-01-20 17:07:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:07:58.549750377 +0000 UTC m=+1572.310088345" watchObservedRunningTime="2026-01-20 17:07:58.552249536 +0000 UTC m=+1572.312587503" Jan 20 17:07:58 crc kubenswrapper[4558]: I0120 17:07:58.556894 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:07:58 crc kubenswrapper[4558]: I0120 17:07:58.564559 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:07:58 crc kubenswrapper[4558]: I0120 17:07:58.796916 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:07:58 crc kubenswrapper[4558]: I0120 17:07:58.797060 4558 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 20 17:07:58 crc kubenswrapper[4558]: I0120 17:07:58.879659 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:07:58 crc kubenswrapper[4558]: I0120 17:07:58.990690 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/neutron-859ff747c8-5c47j"] Jan 20 17:07:58 crc kubenswrapper[4558]: I0120 17:07:58.992127 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-859ff747c8-5c47j" Jan 20 17:07:58 crc kubenswrapper[4558]: I0120 17:07:58.998458 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-neutron-internal-svc" Jan 20 17:07:58 crc kubenswrapper[4558]: I0120 17:07:58.998661 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-neutron-public-svc" Jan 20 17:07:59 crc kubenswrapper[4558]: I0120 17:07:59.014270 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-859ff747c8-5c47j"] Jan 20 17:07:59 crc kubenswrapper[4558]: I0120 17:07:59.073365 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f5d75d5a-4202-477d-97a5-8d92bcdd13b9-internal-tls-certs\") pod \"neutron-859ff747c8-5c47j\" (UID: \"f5d75d5a-4202-477d-97a5-8d92bcdd13b9\") " pod="openstack-kuttl-tests/neutron-859ff747c8-5c47j" Jan 20 17:07:59 crc kubenswrapper[4558]: I0120 17:07:59.073420 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fr6vd\" (UniqueName: \"kubernetes.io/projected/f5d75d5a-4202-477d-97a5-8d92bcdd13b9-kube-api-access-fr6vd\") pod \"neutron-859ff747c8-5c47j\" (UID: \"f5d75d5a-4202-477d-97a5-8d92bcdd13b9\") " pod="openstack-kuttl-tests/neutron-859ff747c8-5c47j" Jan 20 17:07:59 crc kubenswrapper[4558]: I0120 17:07:59.073443 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/f5d75d5a-4202-477d-97a5-8d92bcdd13b9-config\") pod \"neutron-859ff747c8-5c47j\" (UID: \"f5d75d5a-4202-477d-97a5-8d92bcdd13b9\") " pod="openstack-kuttl-tests/neutron-859ff747c8-5c47j" Jan 20 17:07:59 crc kubenswrapper[4558]: I0120 17:07:59.073483 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/f5d75d5a-4202-477d-97a5-8d92bcdd13b9-ovndb-tls-certs\") pod \"neutron-859ff747c8-5c47j\" (UID: \"f5d75d5a-4202-477d-97a5-8d92bcdd13b9\") " pod="openstack-kuttl-tests/neutron-859ff747c8-5c47j" Jan 20 17:07:59 crc kubenswrapper[4558]: I0120 17:07:59.073500 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5d75d5a-4202-477d-97a5-8d92bcdd13b9-combined-ca-bundle\") pod \"neutron-859ff747c8-5c47j\" (UID: \"f5d75d5a-4202-477d-97a5-8d92bcdd13b9\") " pod="openstack-kuttl-tests/neutron-859ff747c8-5c47j" Jan 20 17:07:59 crc kubenswrapper[4558]: I0120 17:07:59.073529 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f5d75d5a-4202-477d-97a5-8d92bcdd13b9-public-tls-certs\") pod \"neutron-859ff747c8-5c47j\" (UID: \"f5d75d5a-4202-477d-97a5-8d92bcdd13b9\") " pod="openstack-kuttl-tests/neutron-859ff747c8-5c47j" Jan 20 17:07:59 crc kubenswrapper[4558]: I0120 17:07:59.073556 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/f5d75d5a-4202-477d-97a5-8d92bcdd13b9-httpd-config\") pod \"neutron-859ff747c8-5c47j\" (UID: \"f5d75d5a-4202-477d-97a5-8d92bcdd13b9\") " pod="openstack-kuttl-tests/neutron-859ff747c8-5c47j" Jan 20 17:07:59 crc kubenswrapper[4558]: I0120 17:07:59.175067 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f5d75d5a-4202-477d-97a5-8d92bcdd13b9-internal-tls-certs\") pod \"neutron-859ff747c8-5c47j\" (UID: \"f5d75d5a-4202-477d-97a5-8d92bcdd13b9\") " pod="openstack-kuttl-tests/neutron-859ff747c8-5c47j" Jan 20 17:07:59 crc kubenswrapper[4558]: I0120 17:07:59.175965 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fr6vd\" (UniqueName: \"kubernetes.io/projected/f5d75d5a-4202-477d-97a5-8d92bcdd13b9-kube-api-access-fr6vd\") pod \"neutron-859ff747c8-5c47j\" (UID: \"f5d75d5a-4202-477d-97a5-8d92bcdd13b9\") " pod="openstack-kuttl-tests/neutron-859ff747c8-5c47j" Jan 20 17:07:59 crc kubenswrapper[4558]: I0120 17:07:59.175997 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/f5d75d5a-4202-477d-97a5-8d92bcdd13b9-config\") pod \"neutron-859ff747c8-5c47j\" (UID: \"f5d75d5a-4202-477d-97a5-8d92bcdd13b9\") " pod="openstack-kuttl-tests/neutron-859ff747c8-5c47j" Jan 20 17:07:59 crc kubenswrapper[4558]: I0120 17:07:59.176040 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/f5d75d5a-4202-477d-97a5-8d92bcdd13b9-ovndb-tls-certs\") pod \"neutron-859ff747c8-5c47j\" (UID: \"f5d75d5a-4202-477d-97a5-8d92bcdd13b9\") " pod="openstack-kuttl-tests/neutron-859ff747c8-5c47j" Jan 20 17:07:59 crc kubenswrapper[4558]: I0120 17:07:59.176056 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5d75d5a-4202-477d-97a5-8d92bcdd13b9-combined-ca-bundle\") pod \"neutron-859ff747c8-5c47j\" (UID: \"f5d75d5a-4202-477d-97a5-8d92bcdd13b9\") " pod="openstack-kuttl-tests/neutron-859ff747c8-5c47j" Jan 20 17:07:59 crc kubenswrapper[4558]: I0120 17:07:59.176083 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f5d75d5a-4202-477d-97a5-8d92bcdd13b9-public-tls-certs\") pod \"neutron-859ff747c8-5c47j\" (UID: \"f5d75d5a-4202-477d-97a5-8d92bcdd13b9\") " pod="openstack-kuttl-tests/neutron-859ff747c8-5c47j" Jan 20 17:07:59 crc kubenswrapper[4558]: I0120 17:07:59.176110 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/f5d75d5a-4202-477d-97a5-8d92bcdd13b9-httpd-config\") pod \"neutron-859ff747c8-5c47j\" (UID: \"f5d75d5a-4202-477d-97a5-8d92bcdd13b9\") " pod="openstack-kuttl-tests/neutron-859ff747c8-5c47j" Jan 20 17:07:59 crc kubenswrapper[4558]: I0120 17:07:59.183112 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/f5d75d5a-4202-477d-97a5-8d92bcdd13b9-ovndb-tls-certs\") pod \"neutron-859ff747c8-5c47j\" (UID: \"f5d75d5a-4202-477d-97a5-8d92bcdd13b9\") " pod="openstack-kuttl-tests/neutron-859ff747c8-5c47j" Jan 20 17:07:59 crc kubenswrapper[4558]: I0120 17:07:59.183464 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f5d75d5a-4202-477d-97a5-8d92bcdd13b9-internal-tls-certs\") pod \"neutron-859ff747c8-5c47j\" (UID: \"f5d75d5a-4202-477d-97a5-8d92bcdd13b9\") " pod="openstack-kuttl-tests/neutron-859ff747c8-5c47j" Jan 20 17:07:59 crc kubenswrapper[4558]: I0120 17:07:59.184535 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/f5d75d5a-4202-477d-97a5-8d92bcdd13b9-httpd-config\") pod \"neutron-859ff747c8-5c47j\" (UID: \"f5d75d5a-4202-477d-97a5-8d92bcdd13b9\") " pod="openstack-kuttl-tests/neutron-859ff747c8-5c47j" Jan 20 17:07:59 crc kubenswrapper[4558]: I0120 17:07:59.185215 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/f5d75d5a-4202-477d-97a5-8d92bcdd13b9-config\") pod \"neutron-859ff747c8-5c47j\" (UID: \"f5d75d5a-4202-477d-97a5-8d92bcdd13b9\") " pod="openstack-kuttl-tests/neutron-859ff747c8-5c47j" Jan 20 17:07:59 crc kubenswrapper[4558]: I0120 17:07:59.185278 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5d75d5a-4202-477d-97a5-8d92bcdd13b9-combined-ca-bundle\") pod \"neutron-859ff747c8-5c47j\" (UID: \"f5d75d5a-4202-477d-97a5-8d92bcdd13b9\") " pod="openstack-kuttl-tests/neutron-859ff747c8-5c47j" Jan 20 17:07:59 crc kubenswrapper[4558]: I0120 17:07:59.186604 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f5d75d5a-4202-477d-97a5-8d92bcdd13b9-public-tls-certs\") pod \"neutron-859ff747c8-5c47j\" (UID: \"f5d75d5a-4202-477d-97a5-8d92bcdd13b9\") " pod="openstack-kuttl-tests/neutron-859ff747c8-5c47j" Jan 20 17:07:59 crc kubenswrapper[4558]: I0120 17:07:59.190261 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fr6vd\" (UniqueName: \"kubernetes.io/projected/f5d75d5a-4202-477d-97a5-8d92bcdd13b9-kube-api-access-fr6vd\") pod \"neutron-859ff747c8-5c47j\" (UID: \"f5d75d5a-4202-477d-97a5-8d92bcdd13b9\") " pod="openstack-kuttl-tests/neutron-859ff747c8-5c47j" Jan 20 17:07:59 crc kubenswrapper[4558]: I0120 17:07:59.315582 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-859ff747c8-5c47j" Jan 20 17:07:59 crc kubenswrapper[4558]: I0120 17:07:59.545408 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:07:59 crc kubenswrapper[4558]: I0120 17:07:59.748718 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-859ff747c8-5c47j"] Jan 20 17:07:59 crc kubenswrapper[4558]: W0120 17:07:59.756711 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf5d75d5a_4202_477d_97a5_8d92bcdd13b9.slice/crio-5a8efb770421b4479b6c2977354d0273d8f5514be6b15091228236e78e95c87b WatchSource:0}: Error finding container 5a8efb770421b4479b6c2977354d0273d8f5514be6b15091228236e78e95c87b: Status 404 returned error can't find the container with id 5a8efb770421b4479b6c2977354d0273d8f5514be6b15091228236e78e95c87b Jan 20 17:07:59 crc kubenswrapper[4558]: I0120 17:07:59.791373 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:07:59 crc kubenswrapper[4558]: I0120 17:07:59.829221 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:08:00 crc kubenswrapper[4558]: I0120 17:08:00.555255 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-859ff747c8-5c47j" event={"ID":"f5d75d5a-4202-477d-97a5-8d92bcdd13b9","Type":"ContainerStarted","Data":"a2dd29b89f91cc3800497edcfbd3636296ddb90d87379145a79cd11ac30957af"} Jan 20 17:08:00 crc kubenswrapper[4558]: I0120 17:08:00.555536 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-859ff747c8-5c47j" event={"ID":"f5d75d5a-4202-477d-97a5-8d92bcdd13b9","Type":"ContainerStarted","Data":"774b082f2f30d066f7547f9bb1d923817b7873ec854074ba60f13876f9205004"} Jan 20 17:08:00 crc kubenswrapper[4558]: I0120 17:08:00.555549 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-859ff747c8-5c47j" event={"ID":"f5d75d5a-4202-477d-97a5-8d92bcdd13b9","Type":"ContainerStarted","Data":"5a8efb770421b4479b6c2977354d0273d8f5514be6b15091228236e78e95c87b"} Jan 20 17:08:00 crc kubenswrapper[4558]: I0120 17:08:00.555616 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/neutron-859ff747c8-5c47j" Jan 20 17:08:00 crc kubenswrapper[4558]: I0120 17:08:00.556317 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/cinder-scheduler-0" podUID="c6c600f3-c24a-4a8b-872d-679316f01e55" containerName="cinder-scheduler" containerID="cri-o://01cbe3fb135e1ae6155a40c536acc9c77c3d28806d529f69929510c415648798" gracePeriod=30 Jan 20 17:08:00 crc kubenswrapper[4558]: I0120 17:08:00.556485 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/cinder-scheduler-0" podUID="c6c600f3-c24a-4a8b-872d-679316f01e55" containerName="probe" containerID="cri-o://26df03d917e9ae72a0576aea96b12fedd027b6583792494940fdbec87b664c67" gracePeriod=30 Jan 20 17:08:00 crc kubenswrapper[4558]: I0120 17:08:00.565922 4558 scope.go:117] "RemoveContainer" containerID="a711a286282347590079d2447ef37fa9ed0f11085d6d7a65f85e65c1c2df608f" Jan 20 17:08:00 crc kubenswrapper[4558]: E0120 17:08:00.566120 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 17:08:00 crc kubenswrapper[4558]: I0120 17:08:00.581143 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/neutron-859ff747c8-5c47j" podStartSLOduration=2.5811314899999998 podStartE2EDuration="2.58113149s" podCreationTimestamp="2026-01-20 17:07:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:08:00.578580151 +0000 UTC m=+1574.338918118" watchObservedRunningTime="2026-01-20 17:08:00.58113149 +0000 UTC m=+1574.341469457" Jan 20 17:08:01 crc kubenswrapper[4558]: I0120 17:08:01.570472 4558 generic.go:334] "Generic (PLEG): container finished" podID="c6c600f3-c24a-4a8b-872d-679316f01e55" containerID="26df03d917e9ae72a0576aea96b12fedd027b6583792494940fdbec87b664c67" exitCode=0 Jan 20 17:08:01 crc kubenswrapper[4558]: I0120 17:08:01.570641 4558 generic.go:334] "Generic (PLEG): container finished" podID="c6c600f3-c24a-4a8b-872d-679316f01e55" containerID="01cbe3fb135e1ae6155a40c536acc9c77c3d28806d529f69929510c415648798" exitCode=0 Jan 20 17:08:01 crc kubenswrapper[4558]: I0120 17:08:01.571107 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"c6c600f3-c24a-4a8b-872d-679316f01e55","Type":"ContainerDied","Data":"26df03d917e9ae72a0576aea96b12fedd027b6583792494940fdbec87b664c67"} Jan 20 17:08:01 crc kubenswrapper[4558]: I0120 17:08:01.571132 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"c6c600f3-c24a-4a8b-872d-679316f01e55","Type":"ContainerDied","Data":"01cbe3fb135e1ae6155a40c536acc9c77c3d28806d529f69929510c415648798"} Jan 20 17:08:01 crc kubenswrapper[4558]: I0120 17:08:01.749714 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:08:01 crc kubenswrapper[4558]: I0120 17:08:01.929289 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/c6c600f3-c24a-4a8b-872d-679316f01e55-etc-machine-id\") pod \"c6c600f3-c24a-4a8b-872d-679316f01e55\" (UID: \"c6c600f3-c24a-4a8b-872d-679316f01e55\") " Jan 20 17:08:01 crc kubenswrapper[4558]: I0120 17:08:01.929389 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/c6c600f3-c24a-4a8b-872d-679316f01e55-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "c6c600f3-c24a-4a8b-872d-679316f01e55" (UID: "c6c600f3-c24a-4a8b-872d-679316f01e55"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 17:08:01 crc kubenswrapper[4558]: I0120 17:08:01.929463 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c6c600f3-c24a-4a8b-872d-679316f01e55-combined-ca-bundle\") pod \"c6c600f3-c24a-4a8b-872d-679316f01e55\" (UID: \"c6c600f3-c24a-4a8b-872d-679316f01e55\") " Jan 20 17:08:01 crc kubenswrapper[4558]: I0120 17:08:01.930015 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c6c600f3-c24a-4a8b-872d-679316f01e55-scripts\") pod \"c6c600f3-c24a-4a8b-872d-679316f01e55\" (UID: \"c6c600f3-c24a-4a8b-872d-679316f01e55\") " Jan 20 17:08:01 crc kubenswrapper[4558]: I0120 17:08:01.930045 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xswnr\" (UniqueName: \"kubernetes.io/projected/c6c600f3-c24a-4a8b-872d-679316f01e55-kube-api-access-xswnr\") pod \"c6c600f3-c24a-4a8b-872d-679316f01e55\" (UID: \"c6c600f3-c24a-4a8b-872d-679316f01e55\") " Jan 20 17:08:01 crc kubenswrapper[4558]: I0120 17:08:01.930083 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c6c600f3-c24a-4a8b-872d-679316f01e55-config-data\") pod \"c6c600f3-c24a-4a8b-872d-679316f01e55\" (UID: \"c6c600f3-c24a-4a8b-872d-679316f01e55\") " Jan 20 17:08:01 crc kubenswrapper[4558]: I0120 17:08:01.930116 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c6c600f3-c24a-4a8b-872d-679316f01e55-config-data-custom\") pod \"c6c600f3-c24a-4a8b-872d-679316f01e55\" (UID: \"c6c600f3-c24a-4a8b-872d-679316f01e55\") " Jan 20 17:08:01 crc kubenswrapper[4558]: I0120 17:08:01.930594 4558 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/c6c600f3-c24a-4a8b-872d-679316f01e55-etc-machine-id\") on node \"crc\" DevicePath \"\"" Jan 20 17:08:01 crc kubenswrapper[4558]: I0120 17:08:01.936254 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c6c600f3-c24a-4a8b-872d-679316f01e55-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "c6c600f3-c24a-4a8b-872d-679316f01e55" (UID: "c6c600f3-c24a-4a8b-872d-679316f01e55"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:08:01 crc kubenswrapper[4558]: I0120 17:08:01.945280 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c6c600f3-c24a-4a8b-872d-679316f01e55-kube-api-access-xswnr" (OuterVolumeSpecName: "kube-api-access-xswnr") pod "c6c600f3-c24a-4a8b-872d-679316f01e55" (UID: "c6c600f3-c24a-4a8b-872d-679316f01e55"). InnerVolumeSpecName "kube-api-access-xswnr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:08:01 crc kubenswrapper[4558]: I0120 17:08:01.951272 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c6c600f3-c24a-4a8b-872d-679316f01e55-scripts" (OuterVolumeSpecName: "scripts") pod "c6c600f3-c24a-4a8b-872d-679316f01e55" (UID: "c6c600f3-c24a-4a8b-872d-679316f01e55"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:08:01 crc kubenswrapper[4558]: I0120 17:08:01.975856 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c6c600f3-c24a-4a8b-872d-679316f01e55-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c6c600f3-c24a-4a8b-872d-679316f01e55" (UID: "c6c600f3-c24a-4a8b-872d-679316f01e55"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:08:02 crc kubenswrapper[4558]: I0120 17:08:02.003140 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c6c600f3-c24a-4a8b-872d-679316f01e55-config-data" (OuterVolumeSpecName: "config-data") pod "c6c600f3-c24a-4a8b-872d-679316f01e55" (UID: "c6c600f3-c24a-4a8b-872d-679316f01e55"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:08:02 crc kubenswrapper[4558]: I0120 17:08:02.033003 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c6c600f3-c24a-4a8b-872d-679316f01e55-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:08:02 crc kubenswrapper[4558]: I0120 17:08:02.033031 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c6c600f3-c24a-4a8b-872d-679316f01e55-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:08:02 crc kubenswrapper[4558]: I0120 17:08:02.033042 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xswnr\" (UniqueName: \"kubernetes.io/projected/c6c600f3-c24a-4a8b-872d-679316f01e55-kube-api-access-xswnr\") on node \"crc\" DevicePath \"\"" Jan 20 17:08:02 crc kubenswrapper[4558]: I0120 17:08:02.033052 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c6c600f3-c24a-4a8b-872d-679316f01e55-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:08:02 crc kubenswrapper[4558]: I0120 17:08:02.033062 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c6c600f3-c24a-4a8b-872d-679316f01e55-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:08:02 crc kubenswrapper[4558]: I0120 17:08:02.584799 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"c6c600f3-c24a-4a8b-872d-679316f01e55","Type":"ContainerDied","Data":"de429302d8ea2b548235b2fdadaee86bf6e719d8fdf08a361aa6c245db9bcf83"} Jan 20 17:08:02 crc kubenswrapper[4558]: I0120 17:08:02.584826 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:08:02 crc kubenswrapper[4558]: I0120 17:08:02.585119 4558 scope.go:117] "RemoveContainer" containerID="26df03d917e9ae72a0576aea96b12fedd027b6583792494940fdbec87b664c67" Jan 20 17:08:02 crc kubenswrapper[4558]: I0120 17:08:02.618443 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:08:02 crc kubenswrapper[4558]: I0120 17:08:02.619439 4558 scope.go:117] "RemoveContainer" containerID="01cbe3fb135e1ae6155a40c536acc9c77c3d28806d529f69929510c415648798" Jan 20 17:08:02 crc kubenswrapper[4558]: I0120 17:08:02.642266 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:08:02 crc kubenswrapper[4558]: I0120 17:08:02.670676 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:08:02 crc kubenswrapper[4558]: E0120 17:08:02.672393 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c6c600f3-c24a-4a8b-872d-679316f01e55" containerName="cinder-scheduler" Jan 20 17:08:02 crc kubenswrapper[4558]: I0120 17:08:02.672430 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="c6c600f3-c24a-4a8b-872d-679316f01e55" containerName="cinder-scheduler" Jan 20 17:08:02 crc kubenswrapper[4558]: E0120 17:08:02.672449 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c6c600f3-c24a-4a8b-872d-679316f01e55" containerName="probe" Jan 20 17:08:02 crc kubenswrapper[4558]: I0120 17:08:02.672456 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="c6c600f3-c24a-4a8b-872d-679316f01e55" containerName="probe" Jan 20 17:08:02 crc kubenswrapper[4558]: I0120 17:08:02.672936 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="c6c600f3-c24a-4a8b-872d-679316f01e55" containerName="cinder-scheduler" Jan 20 17:08:02 crc kubenswrapper[4558]: I0120 17:08:02.672963 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="c6c600f3-c24a-4a8b-872d-679316f01e55" containerName="probe" Jan 20 17:08:02 crc kubenswrapper[4558]: I0120 17:08:02.676858 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:08:02 crc kubenswrapper[4558]: I0120 17:08:02.679504 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cinder-scheduler-config-data" Jan 20 17:08:02 crc kubenswrapper[4558]: I0120 17:08:02.684112 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:08:02 crc kubenswrapper[4558]: I0120 17:08:02.848093 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1ec5ef72-7c69-4be3-974d-c020ddfea4f7-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"1ec5ef72-7c69-4be3-974d-c020ddfea4f7\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:08:02 crc kubenswrapper[4558]: I0120 17:08:02.848143 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1ec5ef72-7c69-4be3-974d-c020ddfea4f7-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"1ec5ef72-7c69-4be3-974d-c020ddfea4f7\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:08:02 crc kubenswrapper[4558]: I0120 17:08:02.848335 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1ec5ef72-7c69-4be3-974d-c020ddfea4f7-config-data\") pod \"cinder-scheduler-0\" (UID: \"1ec5ef72-7c69-4be3-974d-c020ddfea4f7\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:08:02 crc kubenswrapper[4558]: I0120 17:08:02.848388 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mxvx7\" (UniqueName: \"kubernetes.io/projected/1ec5ef72-7c69-4be3-974d-c020ddfea4f7-kube-api-access-mxvx7\") pod \"cinder-scheduler-0\" (UID: \"1ec5ef72-7c69-4be3-974d-c020ddfea4f7\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:08:02 crc kubenswrapper[4558]: I0120 17:08:02.848501 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/1ec5ef72-7c69-4be3-974d-c020ddfea4f7-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"1ec5ef72-7c69-4be3-974d-c020ddfea4f7\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:08:02 crc kubenswrapper[4558]: I0120 17:08:02.848546 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1ec5ef72-7c69-4be3-974d-c020ddfea4f7-scripts\") pod \"cinder-scheduler-0\" (UID: \"1ec5ef72-7c69-4be3-974d-c020ddfea4f7\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:08:02 crc kubenswrapper[4558]: I0120 17:08:02.949525 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/1ec5ef72-7c69-4be3-974d-c020ddfea4f7-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"1ec5ef72-7c69-4be3-974d-c020ddfea4f7\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:08:02 crc kubenswrapper[4558]: I0120 17:08:02.949568 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1ec5ef72-7c69-4be3-974d-c020ddfea4f7-scripts\") pod \"cinder-scheduler-0\" (UID: \"1ec5ef72-7c69-4be3-974d-c020ddfea4f7\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:08:02 crc kubenswrapper[4558]: I0120 17:08:02.949656 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1ec5ef72-7c69-4be3-974d-c020ddfea4f7-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"1ec5ef72-7c69-4be3-974d-c020ddfea4f7\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:08:02 crc kubenswrapper[4558]: I0120 17:08:02.950486 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1ec5ef72-7c69-4be3-974d-c020ddfea4f7-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"1ec5ef72-7c69-4be3-974d-c020ddfea4f7\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:08:02 crc kubenswrapper[4558]: I0120 17:08:02.950544 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1ec5ef72-7c69-4be3-974d-c020ddfea4f7-config-data\") pod \"cinder-scheduler-0\" (UID: \"1ec5ef72-7c69-4be3-974d-c020ddfea4f7\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:08:02 crc kubenswrapper[4558]: I0120 17:08:02.950571 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mxvx7\" (UniqueName: \"kubernetes.io/projected/1ec5ef72-7c69-4be3-974d-c020ddfea4f7-kube-api-access-mxvx7\") pod \"cinder-scheduler-0\" (UID: \"1ec5ef72-7c69-4be3-974d-c020ddfea4f7\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:08:02 crc kubenswrapper[4558]: I0120 17:08:02.949682 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/1ec5ef72-7c69-4be3-974d-c020ddfea4f7-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"1ec5ef72-7c69-4be3-974d-c020ddfea4f7\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:08:02 crc kubenswrapper[4558]: I0120 17:08:02.956119 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1ec5ef72-7c69-4be3-974d-c020ddfea4f7-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"1ec5ef72-7c69-4be3-974d-c020ddfea4f7\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:08:02 crc kubenswrapper[4558]: I0120 17:08:02.968670 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1ec5ef72-7c69-4be3-974d-c020ddfea4f7-scripts\") pod \"cinder-scheduler-0\" (UID: \"1ec5ef72-7c69-4be3-974d-c020ddfea4f7\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:08:02 crc kubenswrapper[4558]: I0120 17:08:02.968764 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1ec5ef72-7c69-4be3-974d-c020ddfea4f7-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"1ec5ef72-7c69-4be3-974d-c020ddfea4f7\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:08:02 crc kubenswrapper[4558]: I0120 17:08:02.969284 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1ec5ef72-7c69-4be3-974d-c020ddfea4f7-config-data\") pod \"cinder-scheduler-0\" (UID: \"1ec5ef72-7c69-4be3-974d-c020ddfea4f7\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:08:02 crc kubenswrapper[4558]: I0120 17:08:02.971022 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mxvx7\" (UniqueName: \"kubernetes.io/projected/1ec5ef72-7c69-4be3-974d-c020ddfea4f7-kube-api-access-mxvx7\") pod \"cinder-scheduler-0\" (UID: \"1ec5ef72-7c69-4be3-974d-c020ddfea4f7\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:08:02 crc kubenswrapper[4558]: I0120 17:08:02.993227 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:08:03 crc kubenswrapper[4558]: I0120 17:08:03.392574 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:08:03 crc kubenswrapper[4558]: W0120 17:08:03.400318 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1ec5ef72_7c69_4be3_974d_c020ddfea4f7.slice/crio-f3fac0c16d1cc19744ffbcb36a1f835b7e57b07e263a1f9175592eed6e112976 WatchSource:0}: Error finding container f3fac0c16d1cc19744ffbcb36a1f835b7e57b07e263a1f9175592eed6e112976: Status 404 returned error can't find the container with id f3fac0c16d1cc19744ffbcb36a1f835b7e57b07e263a1f9175592eed6e112976 Jan 20 17:08:03 crc kubenswrapper[4558]: I0120 17:08:03.598637 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"1ec5ef72-7c69-4be3-974d-c020ddfea4f7","Type":"ContainerStarted","Data":"f3fac0c16d1cc19744ffbcb36a1f835b7e57b07e263a1f9175592eed6e112976"} Jan 20 17:08:04 crc kubenswrapper[4558]: I0120 17:08:04.580109 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c6c600f3-c24a-4a8b-872d-679316f01e55" path="/var/lib/kubelet/pods/c6c600f3-c24a-4a8b-872d-679316f01e55/volumes" Jan 20 17:08:04 crc kubenswrapper[4558]: I0120 17:08:04.613899 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"1ec5ef72-7c69-4be3-974d-c020ddfea4f7","Type":"ContainerStarted","Data":"2ad54bc63fc4167f1a6668ac58b0ad56798eb8ff76a988eb75b07766de4d421c"} Jan 20 17:08:04 crc kubenswrapper[4558]: I0120 17:08:04.614005 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"1ec5ef72-7c69-4be3-974d-c020ddfea4f7","Type":"ContainerStarted","Data":"f1e7bbc89f40090d398e96e1f8922de73e690cb868b93535a373edcd14aedc39"} Jan 20 17:08:04 crc kubenswrapper[4558]: I0120 17:08:04.635778 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/cinder-scheduler-0" podStartSLOduration=2.635754189 podStartE2EDuration="2.635754189s" podCreationTimestamp="2026-01-20 17:08:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:08:04.631631436 +0000 UTC m=+1578.391969404" watchObservedRunningTime="2026-01-20 17:08:04.635754189 +0000 UTC m=+1578.396092156" Jan 20 17:08:05 crc kubenswrapper[4558]: I0120 17:08:05.667393 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-rv86l"] Jan 20 17:08:05 crc kubenswrapper[4558]: I0120 17:08:05.669518 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rv86l" Jan 20 17:08:05 crc kubenswrapper[4558]: I0120 17:08:05.686132 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-rv86l"] Jan 20 17:08:05 crc kubenswrapper[4558]: I0120 17:08:05.816495 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a48821c1-0266-44fd-b411-deb98533c7fd-catalog-content\") pod \"redhat-operators-rv86l\" (UID: \"a48821c1-0266-44fd-b411-deb98533c7fd\") " pod="openshift-marketplace/redhat-operators-rv86l" Jan 20 17:08:05 crc kubenswrapper[4558]: I0120 17:08:05.816969 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a48821c1-0266-44fd-b411-deb98533c7fd-utilities\") pod \"redhat-operators-rv86l\" (UID: \"a48821c1-0266-44fd-b411-deb98533c7fd\") " pod="openshift-marketplace/redhat-operators-rv86l" Jan 20 17:08:05 crc kubenswrapper[4558]: I0120 17:08:05.817010 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p42l6\" (UniqueName: \"kubernetes.io/projected/a48821c1-0266-44fd-b411-deb98533c7fd-kube-api-access-p42l6\") pod \"redhat-operators-rv86l\" (UID: \"a48821c1-0266-44fd-b411-deb98533c7fd\") " pod="openshift-marketplace/redhat-operators-rv86l" Jan 20 17:08:05 crc kubenswrapper[4558]: I0120 17:08:05.919132 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a48821c1-0266-44fd-b411-deb98533c7fd-utilities\") pod \"redhat-operators-rv86l\" (UID: \"a48821c1-0266-44fd-b411-deb98533c7fd\") " pod="openshift-marketplace/redhat-operators-rv86l" Jan 20 17:08:05 crc kubenswrapper[4558]: I0120 17:08:05.919432 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p42l6\" (UniqueName: \"kubernetes.io/projected/a48821c1-0266-44fd-b411-deb98533c7fd-kube-api-access-p42l6\") pod \"redhat-operators-rv86l\" (UID: \"a48821c1-0266-44fd-b411-deb98533c7fd\") " pod="openshift-marketplace/redhat-operators-rv86l" Jan 20 17:08:05 crc kubenswrapper[4558]: I0120 17:08:05.919535 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a48821c1-0266-44fd-b411-deb98533c7fd-catalog-content\") pod \"redhat-operators-rv86l\" (UID: \"a48821c1-0266-44fd-b411-deb98533c7fd\") " pod="openshift-marketplace/redhat-operators-rv86l" Jan 20 17:08:05 crc kubenswrapper[4558]: I0120 17:08:05.919585 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a48821c1-0266-44fd-b411-deb98533c7fd-utilities\") pod \"redhat-operators-rv86l\" (UID: \"a48821c1-0266-44fd-b411-deb98533c7fd\") " pod="openshift-marketplace/redhat-operators-rv86l" Jan 20 17:08:05 crc kubenswrapper[4558]: I0120 17:08:05.919892 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a48821c1-0266-44fd-b411-deb98533c7fd-catalog-content\") pod \"redhat-operators-rv86l\" (UID: \"a48821c1-0266-44fd-b411-deb98533c7fd\") " pod="openshift-marketplace/redhat-operators-rv86l" Jan 20 17:08:05 crc kubenswrapper[4558]: I0120 17:08:05.937381 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p42l6\" (UniqueName: \"kubernetes.io/projected/a48821c1-0266-44fd-b411-deb98533c7fd-kube-api-access-p42l6\") pod \"redhat-operators-rv86l\" (UID: \"a48821c1-0266-44fd-b411-deb98533c7fd\") " pod="openshift-marketplace/redhat-operators-rv86l" Jan 20 17:08:05 crc kubenswrapper[4558]: I0120 17:08:05.985888 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rv86l" Jan 20 17:08:06 crc kubenswrapper[4558]: I0120 17:08:06.445914 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-rv86l"] Jan 20 17:08:06 crc kubenswrapper[4558]: W0120 17:08:06.454339 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda48821c1_0266_44fd_b411_deb98533c7fd.slice/crio-7ad6a77ff3cc90289d2f0e9be9064a4bb240c2344f535d677a5379fa19da9d62 WatchSource:0}: Error finding container 7ad6a77ff3cc90289d2f0e9be9064a4bb240c2344f535d677a5379fa19da9d62: Status 404 returned error can't find the container with id 7ad6a77ff3cc90289d2f0e9be9064a4bb240c2344f535d677a5379fa19da9d62 Jan 20 17:08:06 crc kubenswrapper[4558]: I0120 17:08:06.643819 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rv86l" event={"ID":"a48821c1-0266-44fd-b411-deb98533c7fd","Type":"ContainerStarted","Data":"7ad6a77ff3cc90289d2f0e9be9064a4bb240c2344f535d677a5379fa19da9d62"} Jan 20 17:08:06 crc kubenswrapper[4558]: I0120 17:08:06.793718 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/barbican-api-757fb55df8-9qzm2" Jan 20 17:08:06 crc kubenswrapper[4558]: I0120 17:08:06.870857 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/barbican-api-757fb55df8-9qzm2" Jan 20 17:08:06 crc kubenswrapper[4558]: I0120 17:08:06.932446 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-api-5ffbfb44-sbdwx"] Jan 20 17:08:06 crc kubenswrapper[4558]: I0120 17:08:06.932702 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-api-5ffbfb44-sbdwx" podUID="cd459d10-b71b-45e5-9af4-042acd8bd024" containerName="barbican-api-log" containerID="cri-o://55a7a9bf0c591ba10d44b7ae55412d2ae7401940ae82cc0cc371573754636419" gracePeriod=30 Jan 20 17:08:06 crc kubenswrapper[4558]: I0120 17:08:06.932849 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-api-5ffbfb44-sbdwx" podUID="cd459d10-b71b-45e5-9af4-042acd8bd024" containerName="barbican-api" containerID="cri-o://96e36a345e076d95ab8b9b5325771dea828755c4ed25a1e273a730a00f809fc3" gracePeriod=30 Jan 20 17:08:07 crc kubenswrapper[4558]: I0120 17:08:07.652060 4558 generic.go:334] "Generic (PLEG): container finished" podID="cd459d10-b71b-45e5-9af4-042acd8bd024" containerID="55a7a9bf0c591ba10d44b7ae55412d2ae7401940ae82cc0cc371573754636419" exitCode=143 Jan 20 17:08:07 crc kubenswrapper[4558]: I0120 17:08:07.652144 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-5ffbfb44-sbdwx" event={"ID":"cd459d10-b71b-45e5-9af4-042acd8bd024","Type":"ContainerDied","Data":"55a7a9bf0c591ba10d44b7ae55412d2ae7401940ae82cc0cc371573754636419"} Jan 20 17:08:07 crc kubenswrapper[4558]: I0120 17:08:07.654309 4558 generic.go:334] "Generic (PLEG): container finished" podID="a48821c1-0266-44fd-b411-deb98533c7fd" containerID="cbc7e2a0f425c84798b36890852c5db9475ba5f8371ac737713a16c3e433d6e7" exitCode=0 Jan 20 17:08:07 crc kubenswrapper[4558]: I0120 17:08:07.654405 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rv86l" event={"ID":"a48821c1-0266-44fd-b411-deb98533c7fd","Type":"ContainerDied","Data":"cbc7e2a0f425c84798b36890852c5db9475ba5f8371ac737713a16c3e433d6e7"} Jan 20 17:08:07 crc kubenswrapper[4558]: I0120 17:08:07.876509 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:08:07 crc kubenswrapper[4558]: I0120 17:08:07.993549 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:08:08 crc kubenswrapper[4558]: E0120 17:08:08.422205 4558 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4fb0c521_d465_47b9_b859_199f53143dca.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4fb0c521_d465_47b9_b859_199f53143dca.slice/crio-720cf8a66b7cad8587f8ef26f04899153239b9d5f060876b68844c1340f9e7ba\": RecentStats: unable to find data in memory cache]" Jan 20 17:08:09 crc kubenswrapper[4558]: I0120 17:08:09.672442 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rv86l" event={"ID":"a48821c1-0266-44fd-b411-deb98533c7fd","Type":"ContainerStarted","Data":"a2c8de5cf376f04d4ec555ec3f89d8183c554a60e63ae583df7efab61c9f6922"} Jan 20 17:08:10 crc kubenswrapper[4558]: I0120 17:08:10.104755 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/barbican-api-5ffbfb44-sbdwx" podUID="cd459d10-b71b-45e5-9af4-042acd8bd024" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.1.94:9311/healthcheck\": read tcp 10.217.0.2:47922->10.217.1.94:9311: read: connection reset by peer" Jan 20 17:08:10 crc kubenswrapper[4558]: I0120 17:08:10.104755 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/barbican-api-5ffbfb44-sbdwx" podUID="cd459d10-b71b-45e5-9af4-042acd8bd024" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.1.94:9311/healthcheck\": read tcp 10.217.0.2:47914->10.217.1.94:9311: read: connection reset by peer" Jan 20 17:08:10 crc kubenswrapper[4558]: I0120 17:08:10.496409 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-5ffbfb44-sbdwx" Jan 20 17:08:10 crc kubenswrapper[4558]: I0120 17:08:10.523252 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd459d10-b71b-45e5-9af4-042acd8bd024-combined-ca-bundle\") pod \"cd459d10-b71b-45e5-9af4-042acd8bd024\" (UID: \"cd459d10-b71b-45e5-9af4-042acd8bd024\") " Jan 20 17:08:10 crc kubenswrapper[4558]: I0120 17:08:10.523960 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cd459d10-b71b-45e5-9af4-042acd8bd024-logs\") pod \"cd459d10-b71b-45e5-9af4-042acd8bd024\" (UID: \"cd459d10-b71b-45e5-9af4-042acd8bd024\") " Jan 20 17:08:10 crc kubenswrapper[4558]: I0120 17:08:10.524002 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cd459d10-b71b-45e5-9af4-042acd8bd024-config-data\") pod \"cd459d10-b71b-45e5-9af4-042acd8bd024\" (UID: \"cd459d10-b71b-45e5-9af4-042acd8bd024\") " Jan 20 17:08:10 crc kubenswrapper[4558]: I0120 17:08:10.524074 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-njbz4\" (UniqueName: \"kubernetes.io/projected/cd459d10-b71b-45e5-9af4-042acd8bd024-kube-api-access-njbz4\") pod \"cd459d10-b71b-45e5-9af4-042acd8bd024\" (UID: \"cd459d10-b71b-45e5-9af4-042acd8bd024\") " Jan 20 17:08:10 crc kubenswrapper[4558]: I0120 17:08:10.524114 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/cd459d10-b71b-45e5-9af4-042acd8bd024-config-data-custom\") pod \"cd459d10-b71b-45e5-9af4-042acd8bd024\" (UID: \"cd459d10-b71b-45e5-9af4-042acd8bd024\") " Jan 20 17:08:10 crc kubenswrapper[4558]: I0120 17:08:10.524919 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cd459d10-b71b-45e5-9af4-042acd8bd024-logs" (OuterVolumeSpecName: "logs") pod "cd459d10-b71b-45e5-9af4-042acd8bd024" (UID: "cd459d10-b71b-45e5-9af4-042acd8bd024"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:08:10 crc kubenswrapper[4558]: I0120 17:08:10.529024 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd459d10-b71b-45e5-9af4-042acd8bd024-kube-api-access-njbz4" (OuterVolumeSpecName: "kube-api-access-njbz4") pod "cd459d10-b71b-45e5-9af4-042acd8bd024" (UID: "cd459d10-b71b-45e5-9af4-042acd8bd024"). InnerVolumeSpecName "kube-api-access-njbz4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:08:10 crc kubenswrapper[4558]: I0120 17:08:10.532916 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cd459d10-b71b-45e5-9af4-042acd8bd024-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "cd459d10-b71b-45e5-9af4-042acd8bd024" (UID: "cd459d10-b71b-45e5-9af4-042acd8bd024"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:08:10 crc kubenswrapper[4558]: I0120 17:08:10.552379 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cd459d10-b71b-45e5-9af4-042acd8bd024-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "cd459d10-b71b-45e5-9af4-042acd8bd024" (UID: "cd459d10-b71b-45e5-9af4-042acd8bd024"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:08:10 crc kubenswrapper[4558]: I0120 17:08:10.563850 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cd459d10-b71b-45e5-9af4-042acd8bd024-config-data" (OuterVolumeSpecName: "config-data") pod "cd459d10-b71b-45e5-9af4-042acd8bd024" (UID: "cd459d10-b71b-45e5-9af4-042acd8bd024"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:08:10 crc kubenswrapper[4558]: I0120 17:08:10.626429 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-njbz4\" (UniqueName: \"kubernetes.io/projected/cd459d10-b71b-45e5-9af4-042acd8bd024-kube-api-access-njbz4\") on node \"crc\" DevicePath \"\"" Jan 20 17:08:10 crc kubenswrapper[4558]: I0120 17:08:10.626484 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/cd459d10-b71b-45e5-9af4-042acd8bd024-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:08:10 crc kubenswrapper[4558]: I0120 17:08:10.626495 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd459d10-b71b-45e5-9af4-042acd8bd024-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:08:10 crc kubenswrapper[4558]: I0120 17:08:10.626505 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cd459d10-b71b-45e5-9af4-042acd8bd024-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:08:10 crc kubenswrapper[4558]: I0120 17:08:10.626517 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cd459d10-b71b-45e5-9af4-042acd8bd024-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:08:10 crc kubenswrapper[4558]: I0120 17:08:10.681458 4558 generic.go:334] "Generic (PLEG): container finished" podID="cd459d10-b71b-45e5-9af4-042acd8bd024" containerID="96e36a345e076d95ab8b9b5325771dea828755c4ed25a1e273a730a00f809fc3" exitCode=0 Jan 20 17:08:10 crc kubenswrapper[4558]: I0120 17:08:10.681495 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-5ffbfb44-sbdwx" event={"ID":"cd459d10-b71b-45e5-9af4-042acd8bd024","Type":"ContainerDied","Data":"96e36a345e076d95ab8b9b5325771dea828755c4ed25a1e273a730a00f809fc3"} Jan 20 17:08:10 crc kubenswrapper[4558]: I0120 17:08:10.681514 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-5ffbfb44-sbdwx" Jan 20 17:08:10 crc kubenswrapper[4558]: I0120 17:08:10.681534 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-5ffbfb44-sbdwx" event={"ID":"cd459d10-b71b-45e5-9af4-042acd8bd024","Type":"ContainerDied","Data":"fbdaa6a00aab65fd715c1c09893e7d76edc2e170868300fe479d50fb4b6a8b7d"} Jan 20 17:08:10 crc kubenswrapper[4558]: I0120 17:08:10.681575 4558 scope.go:117] "RemoveContainer" containerID="96e36a345e076d95ab8b9b5325771dea828755c4ed25a1e273a730a00f809fc3" Jan 20 17:08:10 crc kubenswrapper[4558]: I0120 17:08:10.685346 4558 generic.go:334] "Generic (PLEG): container finished" podID="a48821c1-0266-44fd-b411-deb98533c7fd" containerID="a2c8de5cf376f04d4ec555ec3f89d8183c554a60e63ae583df7efab61c9f6922" exitCode=0 Jan 20 17:08:10 crc kubenswrapper[4558]: I0120 17:08:10.685390 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rv86l" event={"ID":"a48821c1-0266-44fd-b411-deb98533c7fd","Type":"ContainerDied","Data":"a2c8de5cf376f04d4ec555ec3f89d8183c554a60e63ae583df7efab61c9f6922"} Jan 20 17:08:10 crc kubenswrapper[4558]: I0120 17:08:10.707246 4558 scope.go:117] "RemoveContainer" containerID="55a7a9bf0c591ba10d44b7ae55412d2ae7401940ae82cc0cc371573754636419" Jan 20 17:08:10 crc kubenswrapper[4558]: I0120 17:08:10.721803 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-api-5ffbfb44-sbdwx"] Jan 20 17:08:10 crc kubenswrapper[4558]: I0120 17:08:10.726405 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/barbican-api-5ffbfb44-sbdwx"] Jan 20 17:08:10 crc kubenswrapper[4558]: I0120 17:08:10.731994 4558 scope.go:117] "RemoveContainer" containerID="96e36a345e076d95ab8b9b5325771dea828755c4ed25a1e273a730a00f809fc3" Jan 20 17:08:10 crc kubenswrapper[4558]: E0120 17:08:10.732540 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"96e36a345e076d95ab8b9b5325771dea828755c4ed25a1e273a730a00f809fc3\": container with ID starting with 96e36a345e076d95ab8b9b5325771dea828755c4ed25a1e273a730a00f809fc3 not found: ID does not exist" containerID="96e36a345e076d95ab8b9b5325771dea828755c4ed25a1e273a730a00f809fc3" Jan 20 17:08:10 crc kubenswrapper[4558]: I0120 17:08:10.732578 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"96e36a345e076d95ab8b9b5325771dea828755c4ed25a1e273a730a00f809fc3"} err="failed to get container status \"96e36a345e076d95ab8b9b5325771dea828755c4ed25a1e273a730a00f809fc3\": rpc error: code = NotFound desc = could not find container \"96e36a345e076d95ab8b9b5325771dea828755c4ed25a1e273a730a00f809fc3\": container with ID starting with 96e36a345e076d95ab8b9b5325771dea828755c4ed25a1e273a730a00f809fc3 not found: ID does not exist" Jan 20 17:08:10 crc kubenswrapper[4558]: I0120 17:08:10.732603 4558 scope.go:117] "RemoveContainer" containerID="55a7a9bf0c591ba10d44b7ae55412d2ae7401940ae82cc0cc371573754636419" Jan 20 17:08:10 crc kubenswrapper[4558]: E0120 17:08:10.732863 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"55a7a9bf0c591ba10d44b7ae55412d2ae7401940ae82cc0cc371573754636419\": container with ID starting with 55a7a9bf0c591ba10d44b7ae55412d2ae7401940ae82cc0cc371573754636419 not found: ID does not exist" containerID="55a7a9bf0c591ba10d44b7ae55412d2ae7401940ae82cc0cc371573754636419" Jan 20 17:08:10 crc kubenswrapper[4558]: I0120 17:08:10.732892 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"55a7a9bf0c591ba10d44b7ae55412d2ae7401940ae82cc0cc371573754636419"} err="failed to get container status \"55a7a9bf0c591ba10d44b7ae55412d2ae7401940ae82cc0cc371573754636419\": rpc error: code = NotFound desc = could not find container \"55a7a9bf0c591ba10d44b7ae55412d2ae7401940ae82cc0cc371573754636419\": container with ID starting with 55a7a9bf0c591ba10d44b7ae55412d2ae7401940ae82cc0cc371573754636419 not found: ID does not exist" Jan 20 17:08:11 crc kubenswrapper[4558]: I0120 17:08:11.694713 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rv86l" event={"ID":"a48821c1-0266-44fd-b411-deb98533c7fd","Type":"ContainerStarted","Data":"338e52b1f6e39162aa9f0d266698bce010d60e6392b39c78cbc191be0eca4835"} Jan 20 17:08:11 crc kubenswrapper[4558]: I0120 17:08:11.715615 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-rv86l" podStartSLOduration=3.170291904 podStartE2EDuration="6.715601346s" podCreationTimestamp="2026-01-20 17:08:05 +0000 UTC" firstStartedPulling="2026-01-20 17:08:07.657000646 +0000 UTC m=+1581.417338612" lastFinishedPulling="2026-01-20 17:08:11.202310087 +0000 UTC m=+1584.962648054" observedRunningTime="2026-01-20 17:08:11.713011405 +0000 UTC m=+1585.473349372" watchObservedRunningTime="2026-01-20 17:08:11.715601346 +0000 UTC m=+1585.475939313" Jan 20 17:08:12 crc kubenswrapper[4558]: I0120 17:08:12.581032 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd459d10-b71b-45e5-9af4-042acd8bd024" path="/var/lib/kubelet/pods/cd459d10-b71b-45e5-9af4-042acd8bd024/volumes" Jan 20 17:08:13 crc kubenswrapper[4558]: I0120 17:08:13.176874 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:08:13 crc kubenswrapper[4558]: I0120 17:08:13.567150 4558 scope.go:117] "RemoveContainer" containerID="a711a286282347590079d2447ef37fa9ed0f11085d6d7a65f85e65c1c2df608f" Jan 20 17:08:13 crc kubenswrapper[4558]: E0120 17:08:13.567646 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 17:08:15 crc kubenswrapper[4558]: I0120 17:08:15.986613 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-rv86l" Jan 20 17:08:15 crc kubenswrapper[4558]: I0120 17:08:15.987233 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-rv86l" Jan 20 17:08:17 crc kubenswrapper[4558]: I0120 17:08:17.020934 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-rv86l" podUID="a48821c1-0266-44fd-b411-deb98533c7fd" containerName="registry-server" probeResult="failure" output=< Jan 20 17:08:17 crc kubenswrapper[4558]: timeout: failed to connect service ":50051" within 1s Jan 20 17:08:17 crc kubenswrapper[4558]: > Jan 20 17:08:17 crc kubenswrapper[4558]: I0120 17:08:17.936083 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-qnbvz"] Jan 20 17:08:17 crc kubenswrapper[4558]: E0120 17:08:17.936625 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cd459d10-b71b-45e5-9af4-042acd8bd024" containerName="barbican-api" Jan 20 17:08:17 crc kubenswrapper[4558]: I0120 17:08:17.936661 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="cd459d10-b71b-45e5-9af4-042acd8bd024" containerName="barbican-api" Jan 20 17:08:17 crc kubenswrapper[4558]: E0120 17:08:17.936694 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cd459d10-b71b-45e5-9af4-042acd8bd024" containerName="barbican-api-log" Jan 20 17:08:17 crc kubenswrapper[4558]: I0120 17:08:17.936702 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="cd459d10-b71b-45e5-9af4-042acd8bd024" containerName="barbican-api-log" Jan 20 17:08:17 crc kubenswrapper[4558]: I0120 17:08:17.936868 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="cd459d10-b71b-45e5-9af4-042acd8bd024" containerName="barbican-api-log" Jan 20 17:08:17 crc kubenswrapper[4558]: I0120 17:08:17.936903 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="cd459d10-b71b-45e5-9af4-042acd8bd024" containerName="barbican-api" Jan 20 17:08:17 crc kubenswrapper[4558]: I0120 17:08:17.938426 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qnbvz" Jan 20 17:08:17 crc kubenswrapper[4558]: I0120 17:08:17.972533 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/01123c84-b8ac-47f1-bf92-d3618c099c7e-utilities\") pod \"certified-operators-qnbvz\" (UID: \"01123c84-b8ac-47f1-bf92-d3618c099c7e\") " pod="openshift-marketplace/certified-operators-qnbvz" Jan 20 17:08:17 crc kubenswrapper[4558]: I0120 17:08:17.972978 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/01123c84-b8ac-47f1-bf92-d3618c099c7e-catalog-content\") pod \"certified-operators-qnbvz\" (UID: \"01123c84-b8ac-47f1-bf92-d3618c099c7e\") " pod="openshift-marketplace/certified-operators-qnbvz" Jan 20 17:08:17 crc kubenswrapper[4558]: I0120 17:08:17.973024 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rgwg6\" (UniqueName: \"kubernetes.io/projected/01123c84-b8ac-47f1-bf92-d3618c099c7e-kube-api-access-rgwg6\") pod \"certified-operators-qnbvz\" (UID: \"01123c84-b8ac-47f1-bf92-d3618c099c7e\") " pod="openshift-marketplace/certified-operators-qnbvz" Jan 20 17:08:17 crc kubenswrapper[4558]: I0120 17:08:17.981659 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-qnbvz"] Jan 20 17:08:18 crc kubenswrapper[4558]: I0120 17:08:18.075694 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/01123c84-b8ac-47f1-bf92-d3618c099c7e-utilities\") pod \"certified-operators-qnbvz\" (UID: \"01123c84-b8ac-47f1-bf92-d3618c099c7e\") " pod="openshift-marketplace/certified-operators-qnbvz" Jan 20 17:08:18 crc kubenswrapper[4558]: I0120 17:08:18.075765 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/01123c84-b8ac-47f1-bf92-d3618c099c7e-catalog-content\") pod \"certified-operators-qnbvz\" (UID: \"01123c84-b8ac-47f1-bf92-d3618c099c7e\") " pod="openshift-marketplace/certified-operators-qnbvz" Jan 20 17:08:18 crc kubenswrapper[4558]: I0120 17:08:18.075812 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rgwg6\" (UniqueName: \"kubernetes.io/projected/01123c84-b8ac-47f1-bf92-d3618c099c7e-kube-api-access-rgwg6\") pod \"certified-operators-qnbvz\" (UID: \"01123c84-b8ac-47f1-bf92-d3618c099c7e\") " pod="openshift-marketplace/certified-operators-qnbvz" Jan 20 17:08:18 crc kubenswrapper[4558]: I0120 17:08:18.076308 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/01123c84-b8ac-47f1-bf92-d3618c099c7e-utilities\") pod \"certified-operators-qnbvz\" (UID: \"01123c84-b8ac-47f1-bf92-d3618c099c7e\") " pod="openshift-marketplace/certified-operators-qnbvz" Jan 20 17:08:18 crc kubenswrapper[4558]: I0120 17:08:18.076373 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/01123c84-b8ac-47f1-bf92-d3618c099c7e-catalog-content\") pod \"certified-operators-qnbvz\" (UID: \"01123c84-b8ac-47f1-bf92-d3618c099c7e\") " pod="openshift-marketplace/certified-operators-qnbvz" Jan 20 17:08:18 crc kubenswrapper[4558]: I0120 17:08:18.094982 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rgwg6\" (UniqueName: \"kubernetes.io/projected/01123c84-b8ac-47f1-bf92-d3618c099c7e-kube-api-access-rgwg6\") pod \"certified-operators-qnbvz\" (UID: \"01123c84-b8ac-47f1-bf92-d3618c099c7e\") " pod="openshift-marketplace/certified-operators-qnbvz" Jan 20 17:08:18 crc kubenswrapper[4558]: I0120 17:08:18.256741 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qnbvz" Jan 20 17:08:18 crc kubenswrapper[4558]: I0120 17:08:18.413220 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/placement-b8dd4f99b-9gg5b" Jan 20 17:08:18 crc kubenswrapper[4558]: I0120 17:08:18.478860 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/placement-b8dd4f99b-9gg5b" Jan 20 17:08:18 crc kubenswrapper[4558]: E0120 17:08:18.651139 4558 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4fb0c521_d465_47b9_b859_199f53143dca.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4fb0c521_d465_47b9_b859_199f53143dca.slice/crio-720cf8a66b7cad8587f8ef26f04899153239b9d5f060876b68844c1340f9e7ba\": RecentStats: unable to find data in memory cache]" Jan 20 17:08:18 crc kubenswrapper[4558]: I0120 17:08:18.743666 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-qnbvz"] Jan 20 17:08:19 crc kubenswrapper[4558]: I0120 17:08:19.742907 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:08:19 crc kubenswrapper[4558]: I0120 17:08:19.798851 4558 generic.go:334] "Generic (PLEG): container finished" podID="01123c84-b8ac-47f1-bf92-d3618c099c7e" containerID="28a5b2699e9d116eed0137c9dfeadaf4b2da6876c5215e27725d1e49a19b70d4" exitCode=0 Jan 20 17:08:19 crc kubenswrapper[4558]: I0120 17:08:19.798908 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qnbvz" event={"ID":"01123c84-b8ac-47f1-bf92-d3618c099c7e","Type":"ContainerDied","Data":"28a5b2699e9d116eed0137c9dfeadaf4b2da6876c5215e27725d1e49a19b70d4"} Jan 20 17:08:19 crc kubenswrapper[4558]: I0120 17:08:19.798961 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qnbvz" event={"ID":"01123c84-b8ac-47f1-bf92-d3618c099c7e","Type":"ContainerStarted","Data":"a9a64797dc837ed64b392702e083f13f53a31d7e82b3fb26795d9a74322bb31a"} Jan 20 17:08:20 crc kubenswrapper[4558]: I0120 17:08:20.812746 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qnbvz" event={"ID":"01123c84-b8ac-47f1-bf92-d3618c099c7e","Type":"ContainerStarted","Data":"7b1d87fa270dcbcac81feb109be31e372f330b78ed80625fb307a5eb1cc1087a"} Jan 20 17:08:21 crc kubenswrapper[4558]: I0120 17:08:21.824757 4558 generic.go:334] "Generic (PLEG): container finished" podID="01123c84-b8ac-47f1-bf92-d3618c099c7e" containerID="7b1d87fa270dcbcac81feb109be31e372f330b78ed80625fb307a5eb1cc1087a" exitCode=0 Jan 20 17:08:21 crc kubenswrapper[4558]: I0120 17:08:21.824835 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qnbvz" event={"ID":"01123c84-b8ac-47f1-bf92-d3618c099c7e","Type":"ContainerDied","Data":"7b1d87fa270dcbcac81feb109be31e372f330b78ed80625fb307a5eb1cc1087a"} Jan 20 17:08:22 crc kubenswrapper[4558]: I0120 17:08:22.843771 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qnbvz" event={"ID":"01123c84-b8ac-47f1-bf92-d3618c099c7e","Type":"ContainerStarted","Data":"473db2312f15d2e87ca8e362d6fd1f294f7a7c878491c2c1ec5d0012d988db43"} Jan 20 17:08:22 crc kubenswrapper[4558]: I0120 17:08:22.863987 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-qnbvz" podStartSLOduration=3.320400538 podStartE2EDuration="5.863970286s" podCreationTimestamp="2026-01-20 17:08:17 +0000 UTC" firstStartedPulling="2026-01-20 17:08:19.805740315 +0000 UTC m=+1593.566078282" lastFinishedPulling="2026-01-20 17:08:22.349310063 +0000 UTC m=+1596.109648030" observedRunningTime="2026-01-20 17:08:22.861925721 +0000 UTC m=+1596.622263688" watchObservedRunningTime="2026-01-20 17:08:22.863970286 +0000 UTC m=+1596.624308253" Jan 20 17:08:24 crc kubenswrapper[4558]: I0120 17:08:24.510068 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-b56m6"] Jan 20 17:08:24 crc kubenswrapper[4558]: I0120 17:08:24.512922 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-b56m6" Jan 20 17:08:24 crc kubenswrapper[4558]: I0120 17:08:24.519994 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-b56m6"] Jan 20 17:08:24 crc kubenswrapper[4558]: I0120 17:08:24.613610 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fbh6x\" (UniqueName: \"kubernetes.io/projected/cb6e616b-5e0c-47d8-a11b-2434eb5ec410-kube-api-access-fbh6x\") pod \"redhat-marketplace-b56m6\" (UID: \"cb6e616b-5e0c-47d8-a11b-2434eb5ec410\") " pod="openshift-marketplace/redhat-marketplace-b56m6" Jan 20 17:08:24 crc kubenswrapper[4558]: I0120 17:08:24.613697 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cb6e616b-5e0c-47d8-a11b-2434eb5ec410-catalog-content\") pod \"redhat-marketplace-b56m6\" (UID: \"cb6e616b-5e0c-47d8-a11b-2434eb5ec410\") " pod="openshift-marketplace/redhat-marketplace-b56m6" Jan 20 17:08:24 crc kubenswrapper[4558]: I0120 17:08:24.613786 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cb6e616b-5e0c-47d8-a11b-2434eb5ec410-utilities\") pod \"redhat-marketplace-b56m6\" (UID: \"cb6e616b-5e0c-47d8-a11b-2434eb5ec410\") " pod="openshift-marketplace/redhat-marketplace-b56m6" Jan 20 17:08:24 crc kubenswrapper[4558]: I0120 17:08:24.715540 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fbh6x\" (UniqueName: \"kubernetes.io/projected/cb6e616b-5e0c-47d8-a11b-2434eb5ec410-kube-api-access-fbh6x\") pod \"redhat-marketplace-b56m6\" (UID: \"cb6e616b-5e0c-47d8-a11b-2434eb5ec410\") " pod="openshift-marketplace/redhat-marketplace-b56m6" Jan 20 17:08:24 crc kubenswrapper[4558]: I0120 17:08:24.715611 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cb6e616b-5e0c-47d8-a11b-2434eb5ec410-catalog-content\") pod \"redhat-marketplace-b56m6\" (UID: \"cb6e616b-5e0c-47d8-a11b-2434eb5ec410\") " pod="openshift-marketplace/redhat-marketplace-b56m6" Jan 20 17:08:24 crc kubenswrapper[4558]: I0120 17:08:24.715685 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cb6e616b-5e0c-47d8-a11b-2434eb5ec410-utilities\") pod \"redhat-marketplace-b56m6\" (UID: \"cb6e616b-5e0c-47d8-a11b-2434eb5ec410\") " pod="openshift-marketplace/redhat-marketplace-b56m6" Jan 20 17:08:24 crc kubenswrapper[4558]: I0120 17:08:24.716206 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cb6e616b-5e0c-47d8-a11b-2434eb5ec410-catalog-content\") pod \"redhat-marketplace-b56m6\" (UID: \"cb6e616b-5e0c-47d8-a11b-2434eb5ec410\") " pod="openshift-marketplace/redhat-marketplace-b56m6" Jan 20 17:08:24 crc kubenswrapper[4558]: I0120 17:08:24.716274 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cb6e616b-5e0c-47d8-a11b-2434eb5ec410-utilities\") pod \"redhat-marketplace-b56m6\" (UID: \"cb6e616b-5e0c-47d8-a11b-2434eb5ec410\") " pod="openshift-marketplace/redhat-marketplace-b56m6" Jan 20 17:08:24 crc kubenswrapper[4558]: I0120 17:08:24.737486 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fbh6x\" (UniqueName: \"kubernetes.io/projected/cb6e616b-5e0c-47d8-a11b-2434eb5ec410-kube-api-access-fbh6x\") pod \"redhat-marketplace-b56m6\" (UID: \"cb6e616b-5e0c-47d8-a11b-2434eb5ec410\") " pod="openshift-marketplace/redhat-marketplace-b56m6" Jan 20 17:08:24 crc kubenswrapper[4558]: I0120 17:08:24.829236 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-b56m6" Jan 20 17:08:25 crc kubenswrapper[4558]: I0120 17:08:25.237153 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-b56m6"] Jan 20 17:08:25 crc kubenswrapper[4558]: I0120 17:08:25.870603 4558 generic.go:334] "Generic (PLEG): container finished" podID="cb6e616b-5e0c-47d8-a11b-2434eb5ec410" containerID="a62ccd599fd4641719cc9815bdd137447105a274a2138b5fcc0bf39e5d0ffac2" exitCode=0 Jan 20 17:08:25 crc kubenswrapper[4558]: I0120 17:08:25.870673 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-b56m6" event={"ID":"cb6e616b-5e0c-47d8-a11b-2434eb5ec410","Type":"ContainerDied","Data":"a62ccd599fd4641719cc9815bdd137447105a274a2138b5fcc0bf39e5d0ffac2"} Jan 20 17:08:25 crc kubenswrapper[4558]: I0120 17:08:25.870720 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-b56m6" event={"ID":"cb6e616b-5e0c-47d8-a11b-2434eb5ec410","Type":"ContainerStarted","Data":"c50a0451fc27febd11e9d8e0f4919b3b83ba371d0b639cc29af4171197a45fe6"} Jan 20 17:08:25 crc kubenswrapper[4558]: I0120 17:08:25.960521 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/neutron-57bd5796c7-h85wn" Jan 20 17:08:26 crc kubenswrapper[4558]: I0120 17:08:26.029155 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-rv86l" Jan 20 17:08:26 crc kubenswrapper[4558]: I0120 17:08:26.073419 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-rv86l" Jan 20 17:08:26 crc kubenswrapper[4558]: I0120 17:08:26.572538 4558 scope.go:117] "RemoveContainer" containerID="a711a286282347590079d2447ef37fa9ed0f11085d6d7a65f85e65c1c2df608f" Jan 20 17:08:26 crc kubenswrapper[4558]: E0120 17:08:26.572850 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 17:08:26 crc kubenswrapper[4558]: I0120 17:08:26.883449 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-b56m6" event={"ID":"cb6e616b-5e0c-47d8-a11b-2434eb5ec410","Type":"ContainerStarted","Data":"5108e8608988ff4f797e72eb2166c18268cdcdf43011cda95c997aaade334dbf"} Jan 20 17:08:27 crc kubenswrapper[4558]: I0120 17:08:27.273337 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/keystone-679bbddb99-dh2h2" Jan 20 17:08:27 crc kubenswrapper[4558]: I0120 17:08:27.895880 4558 generic.go:334] "Generic (PLEG): container finished" podID="cb6e616b-5e0c-47d8-a11b-2434eb5ec410" containerID="5108e8608988ff4f797e72eb2166c18268cdcdf43011cda95c997aaade334dbf" exitCode=0 Jan 20 17:08:27 crc kubenswrapper[4558]: I0120 17:08:27.895925 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-b56m6" event={"ID":"cb6e616b-5e0c-47d8-a11b-2434eb5ec410","Type":"ContainerDied","Data":"5108e8608988ff4f797e72eb2166c18268cdcdf43011cda95c997aaade334dbf"} Jan 20 17:08:28 crc kubenswrapper[4558]: I0120 17:08:28.258203 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-qnbvz" Jan 20 17:08:28 crc kubenswrapper[4558]: I0120 17:08:28.258245 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-qnbvz" Jan 20 17:08:28 crc kubenswrapper[4558]: I0120 17:08:28.307898 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-rv86l"] Jan 20 17:08:28 crc kubenswrapper[4558]: I0120 17:08:28.308075 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-rv86l" podUID="a48821c1-0266-44fd-b411-deb98533c7fd" containerName="registry-server" containerID="cri-o://338e52b1f6e39162aa9f0d266698bce010d60e6392b39c78cbc191be0eca4835" gracePeriod=2 Jan 20 17:08:28 crc kubenswrapper[4558]: I0120 17:08:28.341878 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-qnbvz" Jan 20 17:08:28 crc kubenswrapper[4558]: I0120 17:08:28.778714 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rv86l" Jan 20 17:08:28 crc kubenswrapper[4558]: E0120 17:08:28.859097 4558 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4fb0c521_d465_47b9_b859_199f53143dca.slice/crio-720cf8a66b7cad8587f8ef26f04899153239b9d5f060876b68844c1340f9e7ba\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4fb0c521_d465_47b9_b859_199f53143dca.slice\": RecentStats: unable to find data in memory cache]" Jan 20 17:08:28 crc kubenswrapper[4558]: I0120 17:08:28.905544 4558 generic.go:334] "Generic (PLEG): container finished" podID="a48821c1-0266-44fd-b411-deb98533c7fd" containerID="338e52b1f6e39162aa9f0d266698bce010d60e6392b39c78cbc191be0eca4835" exitCode=0 Jan 20 17:08:28 crc kubenswrapper[4558]: I0120 17:08:28.905601 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rv86l" event={"ID":"a48821c1-0266-44fd-b411-deb98533c7fd","Type":"ContainerDied","Data":"338e52b1f6e39162aa9f0d266698bce010d60e6392b39c78cbc191be0eca4835"} Jan 20 17:08:28 crc kubenswrapper[4558]: I0120 17:08:28.905627 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rv86l" event={"ID":"a48821c1-0266-44fd-b411-deb98533c7fd","Type":"ContainerDied","Data":"7ad6a77ff3cc90289d2f0e9be9064a4bb240c2344f535d677a5379fa19da9d62"} Jan 20 17:08:28 crc kubenswrapper[4558]: I0120 17:08:28.905643 4558 scope.go:117] "RemoveContainer" containerID="338e52b1f6e39162aa9f0d266698bce010d60e6392b39c78cbc191be0eca4835" Jan 20 17:08:28 crc kubenswrapper[4558]: I0120 17:08:28.905735 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rv86l" Jan 20 17:08:28 crc kubenswrapper[4558]: I0120 17:08:28.912267 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a48821c1-0266-44fd-b411-deb98533c7fd-utilities\") pod \"a48821c1-0266-44fd-b411-deb98533c7fd\" (UID: \"a48821c1-0266-44fd-b411-deb98533c7fd\") " Jan 20 17:08:28 crc kubenswrapper[4558]: I0120 17:08:28.913029 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-b56m6" event={"ID":"cb6e616b-5e0c-47d8-a11b-2434eb5ec410","Type":"ContainerStarted","Data":"2ffc570d7d24e20bfa3862de6474073c278b4dc1198b512280dae9fe8a40c9e4"} Jan 20 17:08:28 crc kubenswrapper[4558]: I0120 17:08:28.913108 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a48821c1-0266-44fd-b411-deb98533c7fd-utilities" (OuterVolumeSpecName: "utilities") pod "a48821c1-0266-44fd-b411-deb98533c7fd" (UID: "a48821c1-0266-44fd-b411-deb98533c7fd"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:08:28 crc kubenswrapper[4558]: I0120 17:08:28.913427 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p42l6\" (UniqueName: \"kubernetes.io/projected/a48821c1-0266-44fd-b411-deb98533c7fd-kube-api-access-p42l6\") pod \"a48821c1-0266-44fd-b411-deb98533c7fd\" (UID: \"a48821c1-0266-44fd-b411-deb98533c7fd\") " Jan 20 17:08:28 crc kubenswrapper[4558]: I0120 17:08:28.913836 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a48821c1-0266-44fd-b411-deb98533c7fd-catalog-content\") pod \"a48821c1-0266-44fd-b411-deb98533c7fd\" (UID: \"a48821c1-0266-44fd-b411-deb98533c7fd\") " Jan 20 17:08:28 crc kubenswrapper[4558]: I0120 17:08:28.915209 4558 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a48821c1-0266-44fd-b411-deb98533c7fd-utilities\") on node \"crc\" DevicePath \"\"" Jan 20 17:08:28 crc kubenswrapper[4558]: I0120 17:08:28.932383 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-b56m6" podStartSLOduration=2.177027601 podStartE2EDuration="4.932371065s" podCreationTimestamp="2026-01-20 17:08:24 +0000 UTC" firstStartedPulling="2026-01-20 17:08:25.873026349 +0000 UTC m=+1599.633364315" lastFinishedPulling="2026-01-20 17:08:28.628369812 +0000 UTC m=+1602.388707779" observedRunningTime="2026-01-20 17:08:28.929696707 +0000 UTC m=+1602.690034704" watchObservedRunningTime="2026-01-20 17:08:28.932371065 +0000 UTC m=+1602.692709033" Jan 20 17:08:28 crc kubenswrapper[4558]: I0120 17:08:28.936832 4558 scope.go:117] "RemoveContainer" containerID="a2c8de5cf376f04d4ec555ec3f89d8183c554a60e63ae583df7efab61c9f6922" Jan 20 17:08:28 crc kubenswrapper[4558]: I0120 17:08:28.938030 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a48821c1-0266-44fd-b411-deb98533c7fd-kube-api-access-p42l6" (OuterVolumeSpecName: "kube-api-access-p42l6") pod "a48821c1-0266-44fd-b411-deb98533c7fd" (UID: "a48821c1-0266-44fd-b411-deb98533c7fd"). InnerVolumeSpecName "kube-api-access-p42l6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:08:28 crc kubenswrapper[4558]: I0120 17:08:28.963444 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-qnbvz" Jan 20 17:08:28 crc kubenswrapper[4558]: I0120 17:08:28.981901 4558 scope.go:117] "RemoveContainer" containerID="cbc7e2a0f425c84798b36890852c5db9475ba5f8371ac737713a16c3e433d6e7" Jan 20 17:08:29 crc kubenswrapper[4558]: I0120 17:08:29.025564 4558 scope.go:117] "RemoveContainer" containerID="338e52b1f6e39162aa9f0d266698bce010d60e6392b39c78cbc191be0eca4835" Jan 20 17:08:29 crc kubenswrapper[4558]: E0120 17:08:29.028629 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"338e52b1f6e39162aa9f0d266698bce010d60e6392b39c78cbc191be0eca4835\": container with ID starting with 338e52b1f6e39162aa9f0d266698bce010d60e6392b39c78cbc191be0eca4835 not found: ID does not exist" containerID="338e52b1f6e39162aa9f0d266698bce010d60e6392b39c78cbc191be0eca4835" Jan 20 17:08:29 crc kubenswrapper[4558]: I0120 17:08:29.028733 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"338e52b1f6e39162aa9f0d266698bce010d60e6392b39c78cbc191be0eca4835"} err="failed to get container status \"338e52b1f6e39162aa9f0d266698bce010d60e6392b39c78cbc191be0eca4835\": rpc error: code = NotFound desc = could not find container \"338e52b1f6e39162aa9f0d266698bce010d60e6392b39c78cbc191be0eca4835\": container with ID starting with 338e52b1f6e39162aa9f0d266698bce010d60e6392b39c78cbc191be0eca4835 not found: ID does not exist" Jan 20 17:08:29 crc kubenswrapper[4558]: I0120 17:08:29.028778 4558 scope.go:117] "RemoveContainer" containerID="a2c8de5cf376f04d4ec555ec3f89d8183c554a60e63ae583df7efab61c9f6922" Jan 20 17:08:29 crc kubenswrapper[4558]: E0120 17:08:29.030792 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a2c8de5cf376f04d4ec555ec3f89d8183c554a60e63ae583df7efab61c9f6922\": container with ID starting with a2c8de5cf376f04d4ec555ec3f89d8183c554a60e63ae583df7efab61c9f6922 not found: ID does not exist" containerID="a2c8de5cf376f04d4ec555ec3f89d8183c554a60e63ae583df7efab61c9f6922" Jan 20 17:08:29 crc kubenswrapper[4558]: I0120 17:08:29.030857 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a2c8de5cf376f04d4ec555ec3f89d8183c554a60e63ae583df7efab61c9f6922"} err="failed to get container status \"a2c8de5cf376f04d4ec555ec3f89d8183c554a60e63ae583df7efab61c9f6922\": rpc error: code = NotFound desc = could not find container \"a2c8de5cf376f04d4ec555ec3f89d8183c554a60e63ae583df7efab61c9f6922\": container with ID starting with a2c8de5cf376f04d4ec555ec3f89d8183c554a60e63ae583df7efab61c9f6922 not found: ID does not exist" Jan 20 17:08:29 crc kubenswrapper[4558]: I0120 17:08:29.030900 4558 scope.go:117] "RemoveContainer" containerID="cbc7e2a0f425c84798b36890852c5db9475ba5f8371ac737713a16c3e433d6e7" Jan 20 17:08:29 crc kubenswrapper[4558]: E0120 17:08:29.031444 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cbc7e2a0f425c84798b36890852c5db9475ba5f8371ac737713a16c3e433d6e7\": container with ID starting with cbc7e2a0f425c84798b36890852c5db9475ba5f8371ac737713a16c3e433d6e7 not found: ID does not exist" containerID="cbc7e2a0f425c84798b36890852c5db9475ba5f8371ac737713a16c3e433d6e7" Jan 20 17:08:29 crc kubenswrapper[4558]: I0120 17:08:29.031484 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cbc7e2a0f425c84798b36890852c5db9475ba5f8371ac737713a16c3e433d6e7"} err="failed to get container status \"cbc7e2a0f425c84798b36890852c5db9475ba5f8371ac737713a16c3e433d6e7\": rpc error: code = NotFound desc = could not find container \"cbc7e2a0f425c84798b36890852c5db9475ba5f8371ac737713a16c3e433d6e7\": container with ID starting with cbc7e2a0f425c84798b36890852c5db9475ba5f8371ac737713a16c3e433d6e7 not found: ID does not exist" Jan 20 17:08:29 crc kubenswrapper[4558]: I0120 17:08:29.035553 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p42l6\" (UniqueName: \"kubernetes.io/projected/a48821c1-0266-44fd-b411-deb98533c7fd-kube-api-access-p42l6\") on node \"crc\" DevicePath \"\"" Jan 20 17:08:29 crc kubenswrapper[4558]: I0120 17:08:29.035978 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a48821c1-0266-44fd-b411-deb98533c7fd-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a48821c1-0266-44fd-b411-deb98533c7fd" (UID: "a48821c1-0266-44fd-b411-deb98533c7fd"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:08:29 crc kubenswrapper[4558]: I0120 17:08:29.137030 4558 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a48821c1-0266-44fd-b411-deb98533c7fd-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 20 17:08:29 crc kubenswrapper[4558]: I0120 17:08:29.245319 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-rv86l"] Jan 20 17:08:29 crc kubenswrapper[4558]: I0120 17:08:29.251227 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-rv86l"] Jan 20 17:08:29 crc kubenswrapper[4558]: I0120 17:08:29.338376 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/neutron-859ff747c8-5c47j" Jan 20 17:08:29 crc kubenswrapper[4558]: I0120 17:08:29.387605 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-57bd5796c7-h85wn"] Jan 20 17:08:29 crc kubenswrapper[4558]: I0120 17:08:29.387879 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/neutron-57bd5796c7-h85wn" podUID="e341749a-5099-4cd4-9c62-49231106d844" containerName="neutron-api" containerID="cri-o://69e5cfa78cd1d6c40c39fac464ccf3b24fb3e23f3a0c1d4ed3d888269761249d" gracePeriod=30 Jan 20 17:08:29 crc kubenswrapper[4558]: I0120 17:08:29.388051 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/neutron-57bd5796c7-h85wn" podUID="e341749a-5099-4cd4-9c62-49231106d844" containerName="neutron-httpd" containerID="cri-o://b2a69304487e5bb82b84e284bd744a49a17feb7082f8f7029ee3e0786bd5f530" gracePeriod=30 Jan 20 17:08:29 crc kubenswrapper[4558]: I0120 17:08:29.649371 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/openstackclient"] Jan 20 17:08:29 crc kubenswrapper[4558]: E0120 17:08:29.650131 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a48821c1-0266-44fd-b411-deb98533c7fd" containerName="registry-server" Jan 20 17:08:29 crc kubenswrapper[4558]: I0120 17:08:29.650226 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="a48821c1-0266-44fd-b411-deb98533c7fd" containerName="registry-server" Jan 20 17:08:29 crc kubenswrapper[4558]: E0120 17:08:29.650307 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a48821c1-0266-44fd-b411-deb98533c7fd" containerName="extract-utilities" Jan 20 17:08:29 crc kubenswrapper[4558]: I0120 17:08:29.650365 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="a48821c1-0266-44fd-b411-deb98533c7fd" containerName="extract-utilities" Jan 20 17:08:29 crc kubenswrapper[4558]: E0120 17:08:29.650422 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a48821c1-0266-44fd-b411-deb98533c7fd" containerName="extract-content" Jan 20 17:08:29 crc kubenswrapper[4558]: I0120 17:08:29.650469 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="a48821c1-0266-44fd-b411-deb98533c7fd" containerName="extract-content" Jan 20 17:08:29 crc kubenswrapper[4558]: I0120 17:08:29.650765 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="a48821c1-0266-44fd-b411-deb98533c7fd" containerName="registry-server" Jan 20 17:08:29 crc kubenswrapper[4558]: I0120 17:08:29.652801 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstackclient" Jan 20 17:08:29 crc kubenswrapper[4558]: I0120 17:08:29.655421 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-config" Jan 20 17:08:29 crc kubenswrapper[4558]: I0120 17:08:29.655518 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"openstackclient-openstackclient-dockercfg-99bqm" Jan 20 17:08:29 crc kubenswrapper[4558]: I0120 17:08:29.657861 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"openstack-config-secret" Jan 20 17:08:29 crc kubenswrapper[4558]: I0120 17:08:29.675963 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/openstackclient"] Jan 20 17:08:29 crc kubenswrapper[4558]: I0120 17:08:29.848927 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/6d6dfb4c-9a8d-405e-8ffd-288e446998cf-openstack-config-secret\") pod \"openstackclient\" (UID: \"6d6dfb4c-9a8d-405e-8ffd-288e446998cf\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:08:29 crc kubenswrapper[4558]: I0120 17:08:29.849286 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/6d6dfb4c-9a8d-405e-8ffd-288e446998cf-openstack-config\") pod \"openstackclient\" (UID: \"6d6dfb4c-9a8d-405e-8ffd-288e446998cf\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:08:29 crc kubenswrapper[4558]: I0120 17:08:29.849552 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6d6dfb4c-9a8d-405e-8ffd-288e446998cf-combined-ca-bundle\") pod \"openstackclient\" (UID: \"6d6dfb4c-9a8d-405e-8ffd-288e446998cf\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:08:29 crc kubenswrapper[4558]: I0120 17:08:29.849943 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2wwp6\" (UniqueName: \"kubernetes.io/projected/6d6dfb4c-9a8d-405e-8ffd-288e446998cf-kube-api-access-2wwp6\") pod \"openstackclient\" (UID: \"6d6dfb4c-9a8d-405e-8ffd-288e446998cf\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:08:29 crc kubenswrapper[4558]: I0120 17:08:29.926625 4558 generic.go:334] "Generic (PLEG): container finished" podID="e341749a-5099-4cd4-9c62-49231106d844" containerID="b2a69304487e5bb82b84e284bd744a49a17feb7082f8f7029ee3e0786bd5f530" exitCode=0 Jan 20 17:08:29 crc kubenswrapper[4558]: I0120 17:08:29.926726 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-57bd5796c7-h85wn" event={"ID":"e341749a-5099-4cd4-9c62-49231106d844","Type":"ContainerDied","Data":"b2a69304487e5bb82b84e284bd744a49a17feb7082f8f7029ee3e0786bd5f530"} Jan 20 17:08:29 crc kubenswrapper[4558]: I0120 17:08:29.952116 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6d6dfb4c-9a8d-405e-8ffd-288e446998cf-combined-ca-bundle\") pod \"openstackclient\" (UID: \"6d6dfb4c-9a8d-405e-8ffd-288e446998cf\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:08:29 crc kubenswrapper[4558]: I0120 17:08:29.952211 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2wwp6\" (UniqueName: \"kubernetes.io/projected/6d6dfb4c-9a8d-405e-8ffd-288e446998cf-kube-api-access-2wwp6\") pod \"openstackclient\" (UID: \"6d6dfb4c-9a8d-405e-8ffd-288e446998cf\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:08:29 crc kubenswrapper[4558]: I0120 17:08:29.952284 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/6d6dfb4c-9a8d-405e-8ffd-288e446998cf-openstack-config-secret\") pod \"openstackclient\" (UID: \"6d6dfb4c-9a8d-405e-8ffd-288e446998cf\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:08:29 crc kubenswrapper[4558]: I0120 17:08:29.952306 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/6d6dfb4c-9a8d-405e-8ffd-288e446998cf-openstack-config\") pod \"openstackclient\" (UID: \"6d6dfb4c-9a8d-405e-8ffd-288e446998cf\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:08:29 crc kubenswrapper[4558]: I0120 17:08:29.953400 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/6d6dfb4c-9a8d-405e-8ffd-288e446998cf-openstack-config\") pod \"openstackclient\" (UID: \"6d6dfb4c-9a8d-405e-8ffd-288e446998cf\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:08:29 crc kubenswrapper[4558]: I0120 17:08:29.957362 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/6d6dfb4c-9a8d-405e-8ffd-288e446998cf-openstack-config-secret\") pod \"openstackclient\" (UID: \"6d6dfb4c-9a8d-405e-8ffd-288e446998cf\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:08:29 crc kubenswrapper[4558]: I0120 17:08:29.959297 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6d6dfb4c-9a8d-405e-8ffd-288e446998cf-combined-ca-bundle\") pod \"openstackclient\" (UID: \"6d6dfb4c-9a8d-405e-8ffd-288e446998cf\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:08:29 crc kubenswrapper[4558]: I0120 17:08:29.969541 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2wwp6\" (UniqueName: \"kubernetes.io/projected/6d6dfb4c-9a8d-405e-8ffd-288e446998cf-kube-api-access-2wwp6\") pod \"openstackclient\" (UID: \"6d6dfb4c-9a8d-405e-8ffd-288e446998cf\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:08:29 crc kubenswrapper[4558]: I0120 17:08:29.977861 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstackclient" Jan 20 17:08:30 crc kubenswrapper[4558]: I0120 17:08:30.440430 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/openstackclient"] Jan 20 17:08:30 crc kubenswrapper[4558]: I0120 17:08:30.574635 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a48821c1-0266-44fd-b411-deb98533c7fd" path="/var/lib/kubelet/pods/a48821c1-0266-44fd-b411-deb98533c7fd/volumes" Jan 20 17:08:30 crc kubenswrapper[4558]: I0120 17:08:30.905202 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-qnbvz"] Jan 20 17:08:30 crc kubenswrapper[4558]: I0120 17:08:30.942608 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-qnbvz" podUID="01123c84-b8ac-47f1-bf92-d3618c099c7e" containerName="registry-server" containerID="cri-o://473db2312f15d2e87ca8e362d6fd1f294f7a7c878491c2c1ec5d0012d988db43" gracePeriod=2 Jan 20 17:08:30 crc kubenswrapper[4558]: I0120 17:08:30.943079 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstackclient" event={"ID":"6d6dfb4c-9a8d-405e-8ffd-288e446998cf","Type":"ContainerStarted","Data":"be9c2c4b147215f87ea040240daeaf63995b7b21012666e0eb6d43c0ceb36adc"} Jan 20 17:08:30 crc kubenswrapper[4558]: I0120 17:08:30.943143 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstackclient" event={"ID":"6d6dfb4c-9a8d-405e-8ffd-288e446998cf","Type":"ContainerStarted","Data":"5e8932fdd09345e120902448d981a60d35d50169327dc6b1ff21a3165deb0559"} Jan 20 17:08:30 crc kubenswrapper[4558]: I0120 17:08:30.977180 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/openstackclient" podStartSLOduration=1.97713294 podStartE2EDuration="1.97713294s" podCreationTimestamp="2026-01-20 17:08:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:08:30.960783446 +0000 UTC m=+1604.721121403" watchObservedRunningTime="2026-01-20 17:08:30.97713294 +0000 UTC m=+1604.737470897" Jan 20 17:08:31 crc kubenswrapper[4558]: I0120 17:08:31.290340 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qnbvz" Jan 20 17:08:31 crc kubenswrapper[4558]: I0120 17:08:31.385603 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/01123c84-b8ac-47f1-bf92-d3618c099c7e-catalog-content\") pod \"01123c84-b8ac-47f1-bf92-d3618c099c7e\" (UID: \"01123c84-b8ac-47f1-bf92-d3618c099c7e\") " Jan 20 17:08:31 crc kubenswrapper[4558]: I0120 17:08:31.385712 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/01123c84-b8ac-47f1-bf92-d3618c099c7e-utilities\") pod \"01123c84-b8ac-47f1-bf92-d3618c099c7e\" (UID: \"01123c84-b8ac-47f1-bf92-d3618c099c7e\") " Jan 20 17:08:31 crc kubenswrapper[4558]: I0120 17:08:31.385856 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rgwg6\" (UniqueName: \"kubernetes.io/projected/01123c84-b8ac-47f1-bf92-d3618c099c7e-kube-api-access-rgwg6\") pod \"01123c84-b8ac-47f1-bf92-d3618c099c7e\" (UID: \"01123c84-b8ac-47f1-bf92-d3618c099c7e\") " Jan 20 17:08:31 crc kubenswrapper[4558]: I0120 17:08:31.386730 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/01123c84-b8ac-47f1-bf92-d3618c099c7e-utilities" (OuterVolumeSpecName: "utilities") pod "01123c84-b8ac-47f1-bf92-d3618c099c7e" (UID: "01123c84-b8ac-47f1-bf92-d3618c099c7e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:08:31 crc kubenswrapper[4558]: I0120 17:08:31.391802 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01123c84-b8ac-47f1-bf92-d3618c099c7e-kube-api-access-rgwg6" (OuterVolumeSpecName: "kube-api-access-rgwg6") pod "01123c84-b8ac-47f1-bf92-d3618c099c7e" (UID: "01123c84-b8ac-47f1-bf92-d3618c099c7e"). InnerVolumeSpecName "kube-api-access-rgwg6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:08:31 crc kubenswrapper[4558]: I0120 17:08:31.427735 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/01123c84-b8ac-47f1-bf92-d3618c099c7e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "01123c84-b8ac-47f1-bf92-d3618c099c7e" (UID: "01123c84-b8ac-47f1-bf92-d3618c099c7e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:08:31 crc kubenswrapper[4558]: I0120 17:08:31.487946 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rgwg6\" (UniqueName: \"kubernetes.io/projected/01123c84-b8ac-47f1-bf92-d3618c099c7e-kube-api-access-rgwg6\") on node \"crc\" DevicePath \"\"" Jan 20 17:08:31 crc kubenswrapper[4558]: I0120 17:08:31.487987 4558 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/01123c84-b8ac-47f1-bf92-d3618c099c7e-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 20 17:08:31 crc kubenswrapper[4558]: I0120 17:08:31.488000 4558 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/01123c84-b8ac-47f1-bf92-d3618c099c7e-utilities\") on node \"crc\" DevicePath \"\"" Jan 20 17:08:31 crc kubenswrapper[4558]: I0120 17:08:31.954047 4558 generic.go:334] "Generic (PLEG): container finished" podID="01123c84-b8ac-47f1-bf92-d3618c099c7e" containerID="473db2312f15d2e87ca8e362d6fd1f294f7a7c878491c2c1ec5d0012d988db43" exitCode=0 Jan 20 17:08:31 crc kubenswrapper[4558]: I0120 17:08:31.954123 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qnbvz" Jan 20 17:08:31 crc kubenswrapper[4558]: I0120 17:08:31.954112 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qnbvz" event={"ID":"01123c84-b8ac-47f1-bf92-d3618c099c7e","Type":"ContainerDied","Data":"473db2312f15d2e87ca8e362d6fd1f294f7a7c878491c2c1ec5d0012d988db43"} Jan 20 17:08:31 crc kubenswrapper[4558]: I0120 17:08:31.954554 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qnbvz" event={"ID":"01123c84-b8ac-47f1-bf92-d3618c099c7e","Type":"ContainerDied","Data":"a9a64797dc837ed64b392702e083f13f53a31d7e82b3fb26795d9a74322bb31a"} Jan 20 17:08:31 crc kubenswrapper[4558]: I0120 17:08:31.954578 4558 scope.go:117] "RemoveContainer" containerID="473db2312f15d2e87ca8e362d6fd1f294f7a7c878491c2c1ec5d0012d988db43" Jan 20 17:08:32 crc kubenswrapper[4558]: I0120 17:08:32.071372 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-qnbvz"] Jan 20 17:08:32 crc kubenswrapper[4558]: I0120 17:08:32.076704 4558 scope.go:117] "RemoveContainer" containerID="7b1d87fa270dcbcac81feb109be31e372f330b78ed80625fb307a5eb1cc1087a" Jan 20 17:08:32 crc kubenswrapper[4558]: I0120 17:08:32.081322 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-qnbvz"] Jan 20 17:08:32 crc kubenswrapper[4558]: I0120 17:08:32.104093 4558 scope.go:117] "RemoveContainer" containerID="28a5b2699e9d116eed0137c9dfeadaf4b2da6876c5215e27725d1e49a19b70d4" Jan 20 17:08:32 crc kubenswrapper[4558]: I0120 17:08:32.125079 4558 scope.go:117] "RemoveContainer" containerID="473db2312f15d2e87ca8e362d6fd1f294f7a7c878491c2c1ec5d0012d988db43" Jan 20 17:08:32 crc kubenswrapper[4558]: E0120 17:08:32.135287 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"473db2312f15d2e87ca8e362d6fd1f294f7a7c878491c2c1ec5d0012d988db43\": container with ID starting with 473db2312f15d2e87ca8e362d6fd1f294f7a7c878491c2c1ec5d0012d988db43 not found: ID does not exist" containerID="473db2312f15d2e87ca8e362d6fd1f294f7a7c878491c2c1ec5d0012d988db43" Jan 20 17:08:32 crc kubenswrapper[4558]: I0120 17:08:32.135320 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"473db2312f15d2e87ca8e362d6fd1f294f7a7c878491c2c1ec5d0012d988db43"} err="failed to get container status \"473db2312f15d2e87ca8e362d6fd1f294f7a7c878491c2c1ec5d0012d988db43\": rpc error: code = NotFound desc = could not find container \"473db2312f15d2e87ca8e362d6fd1f294f7a7c878491c2c1ec5d0012d988db43\": container with ID starting with 473db2312f15d2e87ca8e362d6fd1f294f7a7c878491c2c1ec5d0012d988db43 not found: ID does not exist" Jan 20 17:08:32 crc kubenswrapper[4558]: I0120 17:08:32.135350 4558 scope.go:117] "RemoveContainer" containerID="7b1d87fa270dcbcac81feb109be31e372f330b78ed80625fb307a5eb1cc1087a" Jan 20 17:08:32 crc kubenswrapper[4558]: E0120 17:08:32.136900 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7b1d87fa270dcbcac81feb109be31e372f330b78ed80625fb307a5eb1cc1087a\": container with ID starting with 7b1d87fa270dcbcac81feb109be31e372f330b78ed80625fb307a5eb1cc1087a not found: ID does not exist" containerID="7b1d87fa270dcbcac81feb109be31e372f330b78ed80625fb307a5eb1cc1087a" Jan 20 17:08:32 crc kubenswrapper[4558]: I0120 17:08:32.136964 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7b1d87fa270dcbcac81feb109be31e372f330b78ed80625fb307a5eb1cc1087a"} err="failed to get container status \"7b1d87fa270dcbcac81feb109be31e372f330b78ed80625fb307a5eb1cc1087a\": rpc error: code = NotFound desc = could not find container \"7b1d87fa270dcbcac81feb109be31e372f330b78ed80625fb307a5eb1cc1087a\": container with ID starting with 7b1d87fa270dcbcac81feb109be31e372f330b78ed80625fb307a5eb1cc1087a not found: ID does not exist" Jan 20 17:08:32 crc kubenswrapper[4558]: I0120 17:08:32.136982 4558 scope.go:117] "RemoveContainer" containerID="28a5b2699e9d116eed0137c9dfeadaf4b2da6876c5215e27725d1e49a19b70d4" Jan 20 17:08:32 crc kubenswrapper[4558]: E0120 17:08:32.137240 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"28a5b2699e9d116eed0137c9dfeadaf4b2da6876c5215e27725d1e49a19b70d4\": container with ID starting with 28a5b2699e9d116eed0137c9dfeadaf4b2da6876c5215e27725d1e49a19b70d4 not found: ID does not exist" containerID="28a5b2699e9d116eed0137c9dfeadaf4b2da6876c5215e27725d1e49a19b70d4" Jan 20 17:08:32 crc kubenswrapper[4558]: I0120 17:08:32.137282 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"28a5b2699e9d116eed0137c9dfeadaf4b2da6876c5215e27725d1e49a19b70d4"} err="failed to get container status \"28a5b2699e9d116eed0137c9dfeadaf4b2da6876c5215e27725d1e49a19b70d4\": rpc error: code = NotFound desc = could not find container \"28a5b2699e9d116eed0137c9dfeadaf4b2da6876c5215e27725d1e49a19b70d4\": container with ID starting with 28a5b2699e9d116eed0137c9dfeadaf4b2da6876c5215e27725d1e49a19b70d4 not found: ID does not exist" Jan 20 17:08:32 crc kubenswrapper[4558]: I0120 17:08:32.332517 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-57bd5796c7-h85wn" Jan 20 17:08:32 crc kubenswrapper[4558]: I0120 17:08:32.416686 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e341749a-5099-4cd4-9c62-49231106d844-combined-ca-bundle\") pod \"e341749a-5099-4cd4-9c62-49231106d844\" (UID: \"e341749a-5099-4cd4-9c62-49231106d844\") " Jan 20 17:08:32 crc kubenswrapper[4558]: I0120 17:08:32.416969 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k5bn6\" (UniqueName: \"kubernetes.io/projected/e341749a-5099-4cd4-9c62-49231106d844-kube-api-access-k5bn6\") pod \"e341749a-5099-4cd4-9c62-49231106d844\" (UID: \"e341749a-5099-4cd4-9c62-49231106d844\") " Jan 20 17:08:32 crc kubenswrapper[4558]: I0120 17:08:32.417134 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/e341749a-5099-4cd4-9c62-49231106d844-httpd-config\") pod \"e341749a-5099-4cd4-9c62-49231106d844\" (UID: \"e341749a-5099-4cd4-9c62-49231106d844\") " Jan 20 17:08:32 crc kubenswrapper[4558]: I0120 17:08:32.417234 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/e341749a-5099-4cd4-9c62-49231106d844-ovndb-tls-certs\") pod \"e341749a-5099-4cd4-9c62-49231106d844\" (UID: \"e341749a-5099-4cd4-9c62-49231106d844\") " Jan 20 17:08:32 crc kubenswrapper[4558]: I0120 17:08:32.417335 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/e341749a-5099-4cd4-9c62-49231106d844-config\") pod \"e341749a-5099-4cd4-9c62-49231106d844\" (UID: \"e341749a-5099-4cd4-9c62-49231106d844\") " Jan 20 17:08:32 crc kubenswrapper[4558]: I0120 17:08:32.422233 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e341749a-5099-4cd4-9c62-49231106d844-kube-api-access-k5bn6" (OuterVolumeSpecName: "kube-api-access-k5bn6") pod "e341749a-5099-4cd4-9c62-49231106d844" (UID: "e341749a-5099-4cd4-9c62-49231106d844"). InnerVolumeSpecName "kube-api-access-k5bn6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:08:32 crc kubenswrapper[4558]: I0120 17:08:32.422339 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e341749a-5099-4cd4-9c62-49231106d844-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "e341749a-5099-4cd4-9c62-49231106d844" (UID: "e341749a-5099-4cd4-9c62-49231106d844"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:08:32 crc kubenswrapper[4558]: I0120 17:08:32.459372 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e341749a-5099-4cd4-9c62-49231106d844-config" (OuterVolumeSpecName: "config") pod "e341749a-5099-4cd4-9c62-49231106d844" (UID: "e341749a-5099-4cd4-9c62-49231106d844"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:08:32 crc kubenswrapper[4558]: I0120 17:08:32.487112 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e341749a-5099-4cd4-9c62-49231106d844-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e341749a-5099-4cd4-9c62-49231106d844" (UID: "e341749a-5099-4cd4-9c62-49231106d844"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:08:32 crc kubenswrapper[4558]: I0120 17:08:32.501046 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e341749a-5099-4cd4-9c62-49231106d844-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "e341749a-5099-4cd4-9c62-49231106d844" (UID: "e341749a-5099-4cd4-9c62-49231106d844"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:08:32 crc kubenswrapper[4558]: I0120 17:08:32.518427 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e341749a-5099-4cd4-9c62-49231106d844-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:08:32 crc kubenswrapper[4558]: I0120 17:08:32.518460 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k5bn6\" (UniqueName: \"kubernetes.io/projected/e341749a-5099-4cd4-9c62-49231106d844-kube-api-access-k5bn6\") on node \"crc\" DevicePath \"\"" Jan 20 17:08:32 crc kubenswrapper[4558]: I0120 17:08:32.518474 4558 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/e341749a-5099-4cd4-9c62-49231106d844-httpd-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:08:32 crc kubenswrapper[4558]: I0120 17:08:32.518484 4558 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/e341749a-5099-4cd4-9c62-49231106d844-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:08:32 crc kubenswrapper[4558]: I0120 17:08:32.518494 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/e341749a-5099-4cd4-9c62-49231106d844-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:08:32 crc kubenswrapper[4558]: I0120 17:08:32.574813 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01123c84-b8ac-47f1-bf92-d3618c099c7e" path="/var/lib/kubelet/pods/01123c84-b8ac-47f1-bf92-d3618c099c7e/volumes" Jan 20 17:08:32 crc kubenswrapper[4558]: I0120 17:08:32.968197 4558 generic.go:334] "Generic (PLEG): container finished" podID="e341749a-5099-4cd4-9c62-49231106d844" containerID="69e5cfa78cd1d6c40c39fac464ccf3b24fb3e23f3a0c1d4ed3d888269761249d" exitCode=0 Jan 20 17:08:32 crc kubenswrapper[4558]: I0120 17:08:32.968256 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-57bd5796c7-h85wn" event={"ID":"e341749a-5099-4cd4-9c62-49231106d844","Type":"ContainerDied","Data":"69e5cfa78cd1d6c40c39fac464ccf3b24fb3e23f3a0c1d4ed3d888269761249d"} Jan 20 17:08:32 crc kubenswrapper[4558]: I0120 17:08:32.968292 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-57bd5796c7-h85wn" event={"ID":"e341749a-5099-4cd4-9c62-49231106d844","Type":"ContainerDied","Data":"c48d0404d532b820ea07e23cff8cfff5365950283202a6aed24cd27369498a3e"} Jan 20 17:08:32 crc kubenswrapper[4558]: I0120 17:08:32.968316 4558 scope.go:117] "RemoveContainer" containerID="b2a69304487e5bb82b84e284bd744a49a17feb7082f8f7029ee3e0786bd5f530" Jan 20 17:08:32 crc kubenswrapper[4558]: I0120 17:08:32.968501 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-57bd5796c7-h85wn" Jan 20 17:08:32 crc kubenswrapper[4558]: I0120 17:08:32.994770 4558 scope.go:117] "RemoveContainer" containerID="69e5cfa78cd1d6c40c39fac464ccf3b24fb3e23f3a0c1d4ed3d888269761249d" Jan 20 17:08:32 crc kubenswrapper[4558]: I0120 17:08:32.995418 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-57bd5796c7-h85wn"] Jan 20 17:08:33 crc kubenswrapper[4558]: I0120 17:08:33.021964 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/neutron-57bd5796c7-h85wn"] Jan 20 17:08:33 crc kubenswrapper[4558]: I0120 17:08:33.043565 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/swift-proxy-69ddc9f468-cx7lb"] Jan 20 17:08:33 crc kubenswrapper[4558]: E0120 17:08:33.044055 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="01123c84-b8ac-47f1-bf92-d3618c099c7e" containerName="extract-utilities" Jan 20 17:08:33 crc kubenswrapper[4558]: I0120 17:08:33.044078 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="01123c84-b8ac-47f1-bf92-d3618c099c7e" containerName="extract-utilities" Jan 20 17:08:33 crc kubenswrapper[4558]: E0120 17:08:33.044104 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e341749a-5099-4cd4-9c62-49231106d844" containerName="neutron-httpd" Jan 20 17:08:33 crc kubenswrapper[4558]: I0120 17:08:33.044110 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="e341749a-5099-4cd4-9c62-49231106d844" containerName="neutron-httpd" Jan 20 17:08:33 crc kubenswrapper[4558]: E0120 17:08:33.044136 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="01123c84-b8ac-47f1-bf92-d3618c099c7e" containerName="extract-content" Jan 20 17:08:33 crc kubenswrapper[4558]: I0120 17:08:33.044142 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="01123c84-b8ac-47f1-bf92-d3618c099c7e" containerName="extract-content" Jan 20 17:08:33 crc kubenswrapper[4558]: E0120 17:08:33.044178 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="01123c84-b8ac-47f1-bf92-d3618c099c7e" containerName="registry-server" Jan 20 17:08:33 crc kubenswrapper[4558]: I0120 17:08:33.044185 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="01123c84-b8ac-47f1-bf92-d3618c099c7e" containerName="registry-server" Jan 20 17:08:33 crc kubenswrapper[4558]: E0120 17:08:33.044200 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e341749a-5099-4cd4-9c62-49231106d844" containerName="neutron-api" Jan 20 17:08:33 crc kubenswrapper[4558]: I0120 17:08:33.044205 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="e341749a-5099-4cd4-9c62-49231106d844" containerName="neutron-api" Jan 20 17:08:33 crc kubenswrapper[4558]: I0120 17:08:33.044444 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="e341749a-5099-4cd4-9c62-49231106d844" containerName="neutron-api" Jan 20 17:08:33 crc kubenswrapper[4558]: I0120 17:08:33.044465 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="01123c84-b8ac-47f1-bf92-d3618c099c7e" containerName="registry-server" Jan 20 17:08:33 crc kubenswrapper[4558]: I0120 17:08:33.044474 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="e341749a-5099-4cd4-9c62-49231106d844" containerName="neutron-httpd" Jan 20 17:08:33 crc kubenswrapper[4558]: I0120 17:08:33.045519 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-proxy-69ddc9f468-cx7lb" Jan 20 17:08:33 crc kubenswrapper[4558]: I0120 17:08:33.050782 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"swift-proxy-config-data" Jan 20 17:08:33 crc kubenswrapper[4558]: I0120 17:08:33.050880 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-swift-public-svc" Jan 20 17:08:33 crc kubenswrapper[4558]: I0120 17:08:33.051022 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/swift-proxy-69ddc9f468-cx7lb"] Jan 20 17:08:33 crc kubenswrapper[4558]: I0120 17:08:33.054229 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-swift-internal-svc" Jan 20 17:08:33 crc kubenswrapper[4558]: I0120 17:08:33.055945 4558 scope.go:117] "RemoveContainer" containerID="b2a69304487e5bb82b84e284bd744a49a17feb7082f8f7029ee3e0786bd5f530" Jan 20 17:08:33 crc kubenswrapper[4558]: E0120 17:08:33.059369 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b2a69304487e5bb82b84e284bd744a49a17feb7082f8f7029ee3e0786bd5f530\": container with ID starting with b2a69304487e5bb82b84e284bd744a49a17feb7082f8f7029ee3e0786bd5f530 not found: ID does not exist" containerID="b2a69304487e5bb82b84e284bd744a49a17feb7082f8f7029ee3e0786bd5f530" Jan 20 17:08:33 crc kubenswrapper[4558]: I0120 17:08:33.059407 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b2a69304487e5bb82b84e284bd744a49a17feb7082f8f7029ee3e0786bd5f530"} err="failed to get container status \"b2a69304487e5bb82b84e284bd744a49a17feb7082f8f7029ee3e0786bd5f530\": rpc error: code = NotFound desc = could not find container \"b2a69304487e5bb82b84e284bd744a49a17feb7082f8f7029ee3e0786bd5f530\": container with ID starting with b2a69304487e5bb82b84e284bd744a49a17feb7082f8f7029ee3e0786bd5f530 not found: ID does not exist" Jan 20 17:08:33 crc kubenswrapper[4558]: I0120 17:08:33.059432 4558 scope.go:117] "RemoveContainer" containerID="69e5cfa78cd1d6c40c39fac464ccf3b24fb3e23f3a0c1d4ed3d888269761249d" Jan 20 17:08:33 crc kubenswrapper[4558]: E0120 17:08:33.064716 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"69e5cfa78cd1d6c40c39fac464ccf3b24fb3e23f3a0c1d4ed3d888269761249d\": container with ID starting with 69e5cfa78cd1d6c40c39fac464ccf3b24fb3e23f3a0c1d4ed3d888269761249d not found: ID does not exist" containerID="69e5cfa78cd1d6c40c39fac464ccf3b24fb3e23f3a0c1d4ed3d888269761249d" Jan 20 17:08:33 crc kubenswrapper[4558]: I0120 17:08:33.064816 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"69e5cfa78cd1d6c40c39fac464ccf3b24fb3e23f3a0c1d4ed3d888269761249d"} err="failed to get container status \"69e5cfa78cd1d6c40c39fac464ccf3b24fb3e23f3a0c1d4ed3d888269761249d\": rpc error: code = NotFound desc = could not find container \"69e5cfa78cd1d6c40c39fac464ccf3b24fb3e23f3a0c1d4ed3d888269761249d\": container with ID starting with 69e5cfa78cd1d6c40c39fac464ccf3b24fb3e23f3a0c1d4ed3d888269761249d not found: ID does not exist" Jan 20 17:08:33 crc kubenswrapper[4558]: I0120 17:08:33.136056 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lqfqz\" (UniqueName: \"kubernetes.io/projected/b33d5d09-77a2-4bc5-aa13-898a97b01b69-kube-api-access-lqfqz\") pod \"swift-proxy-69ddc9f468-cx7lb\" (UID: \"b33d5d09-77a2-4bc5-aa13-898a97b01b69\") " pod="openstack-kuttl-tests/swift-proxy-69ddc9f468-cx7lb" Jan 20 17:08:33 crc kubenswrapper[4558]: I0120 17:08:33.136128 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b33d5d09-77a2-4bc5-aa13-898a97b01b69-log-httpd\") pod \"swift-proxy-69ddc9f468-cx7lb\" (UID: \"b33d5d09-77a2-4bc5-aa13-898a97b01b69\") " pod="openstack-kuttl-tests/swift-proxy-69ddc9f468-cx7lb" Jan 20 17:08:33 crc kubenswrapper[4558]: I0120 17:08:33.136158 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b33d5d09-77a2-4bc5-aa13-898a97b01b69-config-data\") pod \"swift-proxy-69ddc9f468-cx7lb\" (UID: \"b33d5d09-77a2-4bc5-aa13-898a97b01b69\") " pod="openstack-kuttl-tests/swift-proxy-69ddc9f468-cx7lb" Jan 20 17:08:33 crc kubenswrapper[4558]: I0120 17:08:33.136265 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b33d5d09-77a2-4bc5-aa13-898a97b01b69-combined-ca-bundle\") pod \"swift-proxy-69ddc9f468-cx7lb\" (UID: \"b33d5d09-77a2-4bc5-aa13-898a97b01b69\") " pod="openstack-kuttl-tests/swift-proxy-69ddc9f468-cx7lb" Jan 20 17:08:33 crc kubenswrapper[4558]: I0120 17:08:33.136321 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b33d5d09-77a2-4bc5-aa13-898a97b01b69-internal-tls-certs\") pod \"swift-proxy-69ddc9f468-cx7lb\" (UID: \"b33d5d09-77a2-4bc5-aa13-898a97b01b69\") " pod="openstack-kuttl-tests/swift-proxy-69ddc9f468-cx7lb" Jan 20 17:08:33 crc kubenswrapper[4558]: I0120 17:08:33.136386 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/b33d5d09-77a2-4bc5-aa13-898a97b01b69-etc-swift\") pod \"swift-proxy-69ddc9f468-cx7lb\" (UID: \"b33d5d09-77a2-4bc5-aa13-898a97b01b69\") " pod="openstack-kuttl-tests/swift-proxy-69ddc9f468-cx7lb" Jan 20 17:08:33 crc kubenswrapper[4558]: I0120 17:08:33.136452 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b33d5d09-77a2-4bc5-aa13-898a97b01b69-public-tls-certs\") pod \"swift-proxy-69ddc9f468-cx7lb\" (UID: \"b33d5d09-77a2-4bc5-aa13-898a97b01b69\") " pod="openstack-kuttl-tests/swift-proxy-69ddc9f468-cx7lb" Jan 20 17:08:33 crc kubenswrapper[4558]: I0120 17:08:33.136577 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b33d5d09-77a2-4bc5-aa13-898a97b01b69-run-httpd\") pod \"swift-proxy-69ddc9f468-cx7lb\" (UID: \"b33d5d09-77a2-4bc5-aa13-898a97b01b69\") " pod="openstack-kuttl-tests/swift-proxy-69ddc9f468-cx7lb" Jan 20 17:08:33 crc kubenswrapper[4558]: I0120 17:08:33.238224 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/b33d5d09-77a2-4bc5-aa13-898a97b01b69-etc-swift\") pod \"swift-proxy-69ddc9f468-cx7lb\" (UID: \"b33d5d09-77a2-4bc5-aa13-898a97b01b69\") " pod="openstack-kuttl-tests/swift-proxy-69ddc9f468-cx7lb" Jan 20 17:08:33 crc kubenswrapper[4558]: I0120 17:08:33.238531 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b33d5d09-77a2-4bc5-aa13-898a97b01b69-public-tls-certs\") pod \"swift-proxy-69ddc9f468-cx7lb\" (UID: \"b33d5d09-77a2-4bc5-aa13-898a97b01b69\") " pod="openstack-kuttl-tests/swift-proxy-69ddc9f468-cx7lb" Jan 20 17:08:33 crc kubenswrapper[4558]: I0120 17:08:33.239102 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b33d5d09-77a2-4bc5-aa13-898a97b01b69-run-httpd\") pod \"swift-proxy-69ddc9f468-cx7lb\" (UID: \"b33d5d09-77a2-4bc5-aa13-898a97b01b69\") " pod="openstack-kuttl-tests/swift-proxy-69ddc9f468-cx7lb" Jan 20 17:08:33 crc kubenswrapper[4558]: I0120 17:08:33.239372 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lqfqz\" (UniqueName: \"kubernetes.io/projected/b33d5d09-77a2-4bc5-aa13-898a97b01b69-kube-api-access-lqfqz\") pod \"swift-proxy-69ddc9f468-cx7lb\" (UID: \"b33d5d09-77a2-4bc5-aa13-898a97b01b69\") " pod="openstack-kuttl-tests/swift-proxy-69ddc9f468-cx7lb" Jan 20 17:08:33 crc kubenswrapper[4558]: I0120 17:08:33.239460 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b33d5d09-77a2-4bc5-aa13-898a97b01b69-log-httpd\") pod \"swift-proxy-69ddc9f468-cx7lb\" (UID: \"b33d5d09-77a2-4bc5-aa13-898a97b01b69\") " pod="openstack-kuttl-tests/swift-proxy-69ddc9f468-cx7lb" Jan 20 17:08:33 crc kubenswrapper[4558]: I0120 17:08:33.239526 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b33d5d09-77a2-4bc5-aa13-898a97b01b69-config-data\") pod \"swift-proxy-69ddc9f468-cx7lb\" (UID: \"b33d5d09-77a2-4bc5-aa13-898a97b01b69\") " pod="openstack-kuttl-tests/swift-proxy-69ddc9f468-cx7lb" Jan 20 17:08:33 crc kubenswrapper[4558]: I0120 17:08:33.239639 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b33d5d09-77a2-4bc5-aa13-898a97b01b69-combined-ca-bundle\") pod \"swift-proxy-69ddc9f468-cx7lb\" (UID: \"b33d5d09-77a2-4bc5-aa13-898a97b01b69\") " pod="openstack-kuttl-tests/swift-proxy-69ddc9f468-cx7lb" Jan 20 17:08:33 crc kubenswrapper[4558]: I0120 17:08:33.239740 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b33d5d09-77a2-4bc5-aa13-898a97b01b69-internal-tls-certs\") pod \"swift-proxy-69ddc9f468-cx7lb\" (UID: \"b33d5d09-77a2-4bc5-aa13-898a97b01b69\") " pod="openstack-kuttl-tests/swift-proxy-69ddc9f468-cx7lb" Jan 20 17:08:33 crc kubenswrapper[4558]: I0120 17:08:33.239647 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b33d5d09-77a2-4bc5-aa13-898a97b01b69-run-httpd\") pod \"swift-proxy-69ddc9f468-cx7lb\" (UID: \"b33d5d09-77a2-4bc5-aa13-898a97b01b69\") " pod="openstack-kuttl-tests/swift-proxy-69ddc9f468-cx7lb" Jan 20 17:08:33 crc kubenswrapper[4558]: I0120 17:08:33.240153 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b33d5d09-77a2-4bc5-aa13-898a97b01b69-log-httpd\") pod \"swift-proxy-69ddc9f468-cx7lb\" (UID: \"b33d5d09-77a2-4bc5-aa13-898a97b01b69\") " pod="openstack-kuttl-tests/swift-proxy-69ddc9f468-cx7lb" Jan 20 17:08:33 crc kubenswrapper[4558]: I0120 17:08:33.243323 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/b33d5d09-77a2-4bc5-aa13-898a97b01b69-etc-swift\") pod \"swift-proxy-69ddc9f468-cx7lb\" (UID: \"b33d5d09-77a2-4bc5-aa13-898a97b01b69\") " pod="openstack-kuttl-tests/swift-proxy-69ddc9f468-cx7lb" Jan 20 17:08:33 crc kubenswrapper[4558]: I0120 17:08:33.243632 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b33d5d09-77a2-4bc5-aa13-898a97b01b69-public-tls-certs\") pod \"swift-proxy-69ddc9f468-cx7lb\" (UID: \"b33d5d09-77a2-4bc5-aa13-898a97b01b69\") " pod="openstack-kuttl-tests/swift-proxy-69ddc9f468-cx7lb" Jan 20 17:08:33 crc kubenswrapper[4558]: I0120 17:08:33.243729 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b33d5d09-77a2-4bc5-aa13-898a97b01b69-combined-ca-bundle\") pod \"swift-proxy-69ddc9f468-cx7lb\" (UID: \"b33d5d09-77a2-4bc5-aa13-898a97b01b69\") " pod="openstack-kuttl-tests/swift-proxy-69ddc9f468-cx7lb" Jan 20 17:08:33 crc kubenswrapper[4558]: I0120 17:08:33.244149 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b33d5d09-77a2-4bc5-aa13-898a97b01b69-config-data\") pod \"swift-proxy-69ddc9f468-cx7lb\" (UID: \"b33d5d09-77a2-4bc5-aa13-898a97b01b69\") " pod="openstack-kuttl-tests/swift-proxy-69ddc9f468-cx7lb" Jan 20 17:08:33 crc kubenswrapper[4558]: I0120 17:08:33.256724 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b33d5d09-77a2-4bc5-aa13-898a97b01b69-internal-tls-certs\") pod \"swift-proxy-69ddc9f468-cx7lb\" (UID: \"b33d5d09-77a2-4bc5-aa13-898a97b01b69\") " pod="openstack-kuttl-tests/swift-proxy-69ddc9f468-cx7lb" Jan 20 17:08:33 crc kubenswrapper[4558]: I0120 17:08:33.258698 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lqfqz\" (UniqueName: \"kubernetes.io/projected/b33d5d09-77a2-4bc5-aa13-898a97b01b69-kube-api-access-lqfqz\") pod \"swift-proxy-69ddc9f468-cx7lb\" (UID: \"b33d5d09-77a2-4bc5-aa13-898a97b01b69\") " pod="openstack-kuttl-tests/swift-proxy-69ddc9f468-cx7lb" Jan 20 17:08:33 crc kubenswrapper[4558]: I0120 17:08:33.362622 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-proxy-69ddc9f468-cx7lb" Jan 20 17:08:33 crc kubenswrapper[4558]: I0120 17:08:33.776777 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:08:33 crc kubenswrapper[4558]: I0120 17:08:33.777573 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="90d455f2-55fa-4028-85a0-346675b2194d" containerName="proxy-httpd" containerID="cri-o://8083743d7d6d37e83f1bfb8607cb7050961576fa4bb93c383d46b5f5d4700062" gracePeriod=30 Jan 20 17:08:33 crc kubenswrapper[4558]: I0120 17:08:33.777642 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="90d455f2-55fa-4028-85a0-346675b2194d" containerName="sg-core" containerID="cri-o://b44c56e2a3bfa6f791c1435d62c65b4284e66eac297f83e08fcdbb309a19f3f6" gracePeriod=30 Jan 20 17:08:33 crc kubenswrapper[4558]: I0120 17:08:33.777813 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="90d455f2-55fa-4028-85a0-346675b2194d" containerName="ceilometer-notification-agent" containerID="cri-o://ff1eb8c91c9fbadd1d6273753e9b3efb911664d2db412ea5ed8946aecd1b1d70" gracePeriod=30 Jan 20 17:08:33 crc kubenswrapper[4558]: I0120 17:08:33.777865 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="90d455f2-55fa-4028-85a0-346675b2194d" containerName="ceilometer-central-agent" containerID="cri-o://a705863ebb9d52a220fb61755f22b86b64de0f93f5cff893ab6d746f51d915da" gracePeriod=30 Jan 20 17:08:33 crc kubenswrapper[4558]: I0120 17:08:33.822621 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/swift-proxy-69ddc9f468-cx7lb"] Jan 20 17:08:33 crc kubenswrapper[4558]: W0120 17:08:33.835328 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb33d5d09_77a2_4bc5_aa13_898a97b01b69.slice/crio-c01ea335d1161cefaa9a0eb97ec5b14a014013a8001bcaeb2c3ff8dca2420c0f WatchSource:0}: Error finding container c01ea335d1161cefaa9a0eb97ec5b14a014013a8001bcaeb2c3ff8dca2420c0f: Status 404 returned error can't find the container with id c01ea335d1161cefaa9a0eb97ec5b14a014013a8001bcaeb2c3ff8dca2420c0f Jan 20 17:08:33 crc kubenswrapper[4558]: I0120 17:08:33.976053 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-proxy-69ddc9f468-cx7lb" event={"ID":"b33d5d09-77a2-4bc5-aa13-898a97b01b69","Type":"ContainerStarted","Data":"c01ea335d1161cefaa9a0eb97ec5b14a014013a8001bcaeb2c3ff8dca2420c0f"} Jan 20 17:08:33 crc kubenswrapper[4558]: I0120 17:08:33.978126 4558 generic.go:334] "Generic (PLEG): container finished" podID="90d455f2-55fa-4028-85a0-346675b2194d" containerID="b44c56e2a3bfa6f791c1435d62c65b4284e66eac297f83e08fcdbb309a19f3f6" exitCode=2 Jan 20 17:08:33 crc kubenswrapper[4558]: I0120 17:08:33.978185 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"90d455f2-55fa-4028-85a0-346675b2194d","Type":"ContainerDied","Data":"b44c56e2a3bfa6f791c1435d62c65b4284e66eac297f83e08fcdbb309a19f3f6"} Jan 20 17:08:34 crc kubenswrapper[4558]: I0120 17:08:34.578476 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e341749a-5099-4cd4-9c62-49231106d844" path="/var/lib/kubelet/pods/e341749a-5099-4cd4-9c62-49231106d844/volumes" Jan 20 17:08:34 crc kubenswrapper[4558]: I0120 17:08:34.725731 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:08:34 crc kubenswrapper[4558]: I0120 17:08:34.773588 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/90d455f2-55fa-4028-85a0-346675b2194d-config-data\") pod \"90d455f2-55fa-4028-85a0-346675b2194d\" (UID: \"90d455f2-55fa-4028-85a0-346675b2194d\") " Jan 20 17:08:34 crc kubenswrapper[4558]: I0120 17:08:34.773635 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/90d455f2-55fa-4028-85a0-346675b2194d-run-httpd\") pod \"90d455f2-55fa-4028-85a0-346675b2194d\" (UID: \"90d455f2-55fa-4028-85a0-346675b2194d\") " Jan 20 17:08:34 crc kubenswrapper[4558]: I0120 17:08:34.773729 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/90d455f2-55fa-4028-85a0-346675b2194d-log-httpd\") pod \"90d455f2-55fa-4028-85a0-346675b2194d\" (UID: \"90d455f2-55fa-4028-85a0-346675b2194d\") " Jan 20 17:08:34 crc kubenswrapper[4558]: I0120 17:08:34.773808 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/90d455f2-55fa-4028-85a0-346675b2194d-sg-core-conf-yaml\") pod \"90d455f2-55fa-4028-85a0-346675b2194d\" (UID: \"90d455f2-55fa-4028-85a0-346675b2194d\") " Jan 20 17:08:34 crc kubenswrapper[4558]: I0120 17:08:34.773836 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8b6pg\" (UniqueName: \"kubernetes.io/projected/90d455f2-55fa-4028-85a0-346675b2194d-kube-api-access-8b6pg\") pod \"90d455f2-55fa-4028-85a0-346675b2194d\" (UID: \"90d455f2-55fa-4028-85a0-346675b2194d\") " Jan 20 17:08:34 crc kubenswrapper[4558]: I0120 17:08:34.773855 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/90d455f2-55fa-4028-85a0-346675b2194d-scripts\") pod \"90d455f2-55fa-4028-85a0-346675b2194d\" (UID: \"90d455f2-55fa-4028-85a0-346675b2194d\") " Jan 20 17:08:34 crc kubenswrapper[4558]: I0120 17:08:34.773916 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/90d455f2-55fa-4028-85a0-346675b2194d-combined-ca-bundle\") pod \"90d455f2-55fa-4028-85a0-346675b2194d\" (UID: \"90d455f2-55fa-4028-85a0-346675b2194d\") " Jan 20 17:08:34 crc kubenswrapper[4558]: I0120 17:08:34.774466 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/90d455f2-55fa-4028-85a0-346675b2194d-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "90d455f2-55fa-4028-85a0-346675b2194d" (UID: "90d455f2-55fa-4028-85a0-346675b2194d"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:08:34 crc kubenswrapper[4558]: I0120 17:08:34.774615 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/90d455f2-55fa-4028-85a0-346675b2194d-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "90d455f2-55fa-4028-85a0-346675b2194d" (UID: "90d455f2-55fa-4028-85a0-346675b2194d"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:08:34 crc kubenswrapper[4558]: I0120 17:08:34.775025 4558 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/90d455f2-55fa-4028-85a0-346675b2194d-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:08:34 crc kubenswrapper[4558]: I0120 17:08:34.775049 4558 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/90d455f2-55fa-4028-85a0-346675b2194d-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:08:34 crc kubenswrapper[4558]: I0120 17:08:34.778040 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/90d455f2-55fa-4028-85a0-346675b2194d-scripts" (OuterVolumeSpecName: "scripts") pod "90d455f2-55fa-4028-85a0-346675b2194d" (UID: "90d455f2-55fa-4028-85a0-346675b2194d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:08:34 crc kubenswrapper[4558]: I0120 17:08:34.806354 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/90d455f2-55fa-4028-85a0-346675b2194d-kube-api-access-8b6pg" (OuterVolumeSpecName: "kube-api-access-8b6pg") pod "90d455f2-55fa-4028-85a0-346675b2194d" (UID: "90d455f2-55fa-4028-85a0-346675b2194d"). InnerVolumeSpecName "kube-api-access-8b6pg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:08:34 crc kubenswrapper[4558]: I0120 17:08:34.811305 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/90d455f2-55fa-4028-85a0-346675b2194d-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "90d455f2-55fa-4028-85a0-346675b2194d" (UID: "90d455f2-55fa-4028-85a0-346675b2194d"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:08:34 crc kubenswrapper[4558]: I0120 17:08:34.830705 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-b56m6" Jan 20 17:08:34 crc kubenswrapper[4558]: I0120 17:08:34.831026 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-b56m6" Jan 20 17:08:34 crc kubenswrapper[4558]: I0120 17:08:34.839228 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/90d455f2-55fa-4028-85a0-346675b2194d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "90d455f2-55fa-4028-85a0-346675b2194d" (UID: "90d455f2-55fa-4028-85a0-346675b2194d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:08:34 crc kubenswrapper[4558]: I0120 17:08:34.861727 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/90d455f2-55fa-4028-85a0-346675b2194d-config-data" (OuterVolumeSpecName: "config-data") pod "90d455f2-55fa-4028-85a0-346675b2194d" (UID: "90d455f2-55fa-4028-85a0-346675b2194d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:08:34 crc kubenswrapper[4558]: I0120 17:08:34.866721 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-b56m6" Jan 20 17:08:34 crc kubenswrapper[4558]: I0120 17:08:34.876803 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/90d455f2-55fa-4028-85a0-346675b2194d-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:08:34 crc kubenswrapper[4558]: I0120 17:08:34.876937 4558 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/90d455f2-55fa-4028-85a0-346675b2194d-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 20 17:08:34 crc kubenswrapper[4558]: I0120 17:08:34.877016 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/90d455f2-55fa-4028-85a0-346675b2194d-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:08:34 crc kubenswrapper[4558]: I0120 17:08:34.877080 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8b6pg\" (UniqueName: \"kubernetes.io/projected/90d455f2-55fa-4028-85a0-346675b2194d-kube-api-access-8b6pg\") on node \"crc\" DevicePath \"\"" Jan 20 17:08:34 crc kubenswrapper[4558]: I0120 17:08:34.877139 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/90d455f2-55fa-4028-85a0-346675b2194d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:08:35 crc kubenswrapper[4558]: I0120 17:08:34.999726 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-proxy-69ddc9f468-cx7lb" event={"ID":"b33d5d09-77a2-4bc5-aa13-898a97b01b69","Type":"ContainerStarted","Data":"71bbd70a1c3421db19c774ca5977ad33d9b756863ccd7cb6a69d77b0a5690c71"} Jan 20 17:08:35 crc kubenswrapper[4558]: I0120 17:08:35.000417 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/swift-proxy-69ddc9f468-cx7lb" Jan 20 17:08:35 crc kubenswrapper[4558]: I0120 17:08:35.000436 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-proxy-69ddc9f468-cx7lb" event={"ID":"b33d5d09-77a2-4bc5-aa13-898a97b01b69","Type":"ContainerStarted","Data":"65dc4b617123648766af244b88586c5391acaca630b06a308276272002cf2765"} Jan 20 17:08:35 crc kubenswrapper[4558]: I0120 17:08:35.003517 4558 generic.go:334] "Generic (PLEG): container finished" podID="90d455f2-55fa-4028-85a0-346675b2194d" containerID="8083743d7d6d37e83f1bfb8607cb7050961576fa4bb93c383d46b5f5d4700062" exitCode=0 Jan 20 17:08:35 crc kubenswrapper[4558]: I0120 17:08:35.003546 4558 generic.go:334] "Generic (PLEG): container finished" podID="90d455f2-55fa-4028-85a0-346675b2194d" containerID="ff1eb8c91c9fbadd1d6273753e9b3efb911664d2db412ea5ed8946aecd1b1d70" exitCode=0 Jan 20 17:08:35 crc kubenswrapper[4558]: I0120 17:08:35.003556 4558 generic.go:334] "Generic (PLEG): container finished" podID="90d455f2-55fa-4028-85a0-346675b2194d" containerID="a705863ebb9d52a220fb61755f22b86b64de0f93f5cff893ab6d746f51d915da" exitCode=0 Jan 20 17:08:35 crc kubenswrapper[4558]: I0120 17:08:35.003555 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"90d455f2-55fa-4028-85a0-346675b2194d","Type":"ContainerDied","Data":"8083743d7d6d37e83f1bfb8607cb7050961576fa4bb93c383d46b5f5d4700062"} Jan 20 17:08:35 crc kubenswrapper[4558]: I0120 17:08:35.003598 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"90d455f2-55fa-4028-85a0-346675b2194d","Type":"ContainerDied","Data":"ff1eb8c91c9fbadd1d6273753e9b3efb911664d2db412ea5ed8946aecd1b1d70"} Jan 20 17:08:35 crc kubenswrapper[4558]: I0120 17:08:35.003612 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"90d455f2-55fa-4028-85a0-346675b2194d","Type":"ContainerDied","Data":"a705863ebb9d52a220fb61755f22b86b64de0f93f5cff893ab6d746f51d915da"} Jan 20 17:08:35 crc kubenswrapper[4558]: I0120 17:08:35.003623 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:08:35 crc kubenswrapper[4558]: I0120 17:08:35.003640 4558 scope.go:117] "RemoveContainer" containerID="8083743d7d6d37e83f1bfb8607cb7050961576fa4bb93c383d46b5f5d4700062" Jan 20 17:08:35 crc kubenswrapper[4558]: I0120 17:08:35.003626 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"90d455f2-55fa-4028-85a0-346675b2194d","Type":"ContainerDied","Data":"275d48aaffa9e56938daffb561b32f7f37dfe6673cd13d571a8763291224f6ea"} Jan 20 17:08:35 crc kubenswrapper[4558]: I0120 17:08:35.030576 4558 scope.go:117] "RemoveContainer" containerID="b44c56e2a3bfa6f791c1435d62c65b4284e66eac297f83e08fcdbb309a19f3f6" Jan 20 17:08:35 crc kubenswrapper[4558]: I0120 17:08:35.032966 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/swift-proxy-69ddc9f468-cx7lb" podStartSLOduration=2.032944084 podStartE2EDuration="2.032944084s" podCreationTimestamp="2026-01-20 17:08:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:08:35.02043931 +0000 UTC m=+1608.780777276" watchObservedRunningTime="2026-01-20 17:08:35.032944084 +0000 UTC m=+1608.793282051" Jan 20 17:08:35 crc kubenswrapper[4558]: I0120 17:08:35.068702 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-b56m6" Jan 20 17:08:35 crc kubenswrapper[4558]: I0120 17:08:35.086101 4558 scope.go:117] "RemoveContainer" containerID="ff1eb8c91c9fbadd1d6273753e9b3efb911664d2db412ea5ed8946aecd1b1d70" Jan 20 17:08:35 crc kubenswrapper[4558]: I0120 17:08:35.089119 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:08:35 crc kubenswrapper[4558]: I0120 17:08:35.096284 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:08:35 crc kubenswrapper[4558]: I0120 17:08:35.103451 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:08:35 crc kubenswrapper[4558]: E0120 17:08:35.103890 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="90d455f2-55fa-4028-85a0-346675b2194d" containerName="proxy-httpd" Jan 20 17:08:35 crc kubenswrapper[4558]: I0120 17:08:35.103903 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="90d455f2-55fa-4028-85a0-346675b2194d" containerName="proxy-httpd" Jan 20 17:08:35 crc kubenswrapper[4558]: E0120 17:08:35.103923 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="90d455f2-55fa-4028-85a0-346675b2194d" containerName="sg-core" Jan 20 17:08:35 crc kubenswrapper[4558]: I0120 17:08:35.103928 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="90d455f2-55fa-4028-85a0-346675b2194d" containerName="sg-core" Jan 20 17:08:35 crc kubenswrapper[4558]: E0120 17:08:35.103941 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="90d455f2-55fa-4028-85a0-346675b2194d" containerName="ceilometer-notification-agent" Jan 20 17:08:35 crc kubenswrapper[4558]: I0120 17:08:35.103949 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="90d455f2-55fa-4028-85a0-346675b2194d" containerName="ceilometer-notification-agent" Jan 20 17:08:35 crc kubenswrapper[4558]: E0120 17:08:35.103966 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="90d455f2-55fa-4028-85a0-346675b2194d" containerName="ceilometer-central-agent" Jan 20 17:08:35 crc kubenswrapper[4558]: I0120 17:08:35.103972 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="90d455f2-55fa-4028-85a0-346675b2194d" containerName="ceilometer-central-agent" Jan 20 17:08:35 crc kubenswrapper[4558]: I0120 17:08:35.104101 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="90d455f2-55fa-4028-85a0-346675b2194d" containerName="proxy-httpd" Jan 20 17:08:35 crc kubenswrapper[4558]: I0120 17:08:35.104111 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="90d455f2-55fa-4028-85a0-346675b2194d" containerName="ceilometer-central-agent" Jan 20 17:08:35 crc kubenswrapper[4558]: I0120 17:08:35.104119 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="90d455f2-55fa-4028-85a0-346675b2194d" containerName="sg-core" Jan 20 17:08:35 crc kubenswrapper[4558]: I0120 17:08:35.104140 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="90d455f2-55fa-4028-85a0-346675b2194d" containerName="ceilometer-notification-agent" Jan 20 17:08:35 crc kubenswrapper[4558]: I0120 17:08:35.107096 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:08:35 crc kubenswrapper[4558]: I0120 17:08:35.111022 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-scripts" Jan 20 17:08:35 crc kubenswrapper[4558]: I0120 17:08:35.121826 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:08:35 crc kubenswrapper[4558]: I0120 17:08:35.135272 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-config-data" Jan 20 17:08:35 crc kubenswrapper[4558]: I0120 17:08:35.223822 4558 scope.go:117] "RemoveContainer" containerID="a705863ebb9d52a220fb61755f22b86b64de0f93f5cff893ab6d746f51d915da" Jan 20 17:08:35 crc kubenswrapper[4558]: I0120 17:08:35.289959 4558 scope.go:117] "RemoveContainer" containerID="8083743d7d6d37e83f1bfb8607cb7050961576fa4bb93c383d46b5f5d4700062" Jan 20 17:08:35 crc kubenswrapper[4558]: E0120 17:08:35.297254 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8083743d7d6d37e83f1bfb8607cb7050961576fa4bb93c383d46b5f5d4700062\": container with ID starting with 8083743d7d6d37e83f1bfb8607cb7050961576fa4bb93c383d46b5f5d4700062 not found: ID does not exist" containerID="8083743d7d6d37e83f1bfb8607cb7050961576fa4bb93c383d46b5f5d4700062" Jan 20 17:08:35 crc kubenswrapper[4558]: I0120 17:08:35.297286 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8083743d7d6d37e83f1bfb8607cb7050961576fa4bb93c383d46b5f5d4700062"} err="failed to get container status \"8083743d7d6d37e83f1bfb8607cb7050961576fa4bb93c383d46b5f5d4700062\": rpc error: code = NotFound desc = could not find container \"8083743d7d6d37e83f1bfb8607cb7050961576fa4bb93c383d46b5f5d4700062\": container with ID starting with 8083743d7d6d37e83f1bfb8607cb7050961576fa4bb93c383d46b5f5d4700062 not found: ID does not exist" Jan 20 17:08:35 crc kubenswrapper[4558]: I0120 17:08:35.297316 4558 scope.go:117] "RemoveContainer" containerID="b44c56e2a3bfa6f791c1435d62c65b4284e66eac297f83e08fcdbb309a19f3f6" Jan 20 17:08:35 crc kubenswrapper[4558]: E0120 17:08:35.308235 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b44c56e2a3bfa6f791c1435d62c65b4284e66eac297f83e08fcdbb309a19f3f6\": container with ID starting with b44c56e2a3bfa6f791c1435d62c65b4284e66eac297f83e08fcdbb309a19f3f6 not found: ID does not exist" containerID="b44c56e2a3bfa6f791c1435d62c65b4284e66eac297f83e08fcdbb309a19f3f6" Jan 20 17:08:35 crc kubenswrapper[4558]: I0120 17:08:35.308259 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b44c56e2a3bfa6f791c1435d62c65b4284e66eac297f83e08fcdbb309a19f3f6"} err="failed to get container status \"b44c56e2a3bfa6f791c1435d62c65b4284e66eac297f83e08fcdbb309a19f3f6\": rpc error: code = NotFound desc = could not find container \"b44c56e2a3bfa6f791c1435d62c65b4284e66eac297f83e08fcdbb309a19f3f6\": container with ID starting with b44c56e2a3bfa6f791c1435d62c65b4284e66eac297f83e08fcdbb309a19f3f6 not found: ID does not exist" Jan 20 17:08:35 crc kubenswrapper[4558]: I0120 17:08:35.308277 4558 scope.go:117] "RemoveContainer" containerID="ff1eb8c91c9fbadd1d6273753e9b3efb911664d2db412ea5ed8946aecd1b1d70" Jan 20 17:08:35 crc kubenswrapper[4558]: E0120 17:08:35.312237 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ff1eb8c91c9fbadd1d6273753e9b3efb911664d2db412ea5ed8946aecd1b1d70\": container with ID starting with ff1eb8c91c9fbadd1d6273753e9b3efb911664d2db412ea5ed8946aecd1b1d70 not found: ID does not exist" containerID="ff1eb8c91c9fbadd1d6273753e9b3efb911664d2db412ea5ed8946aecd1b1d70" Jan 20 17:08:35 crc kubenswrapper[4558]: I0120 17:08:35.312260 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ff1eb8c91c9fbadd1d6273753e9b3efb911664d2db412ea5ed8946aecd1b1d70"} err="failed to get container status \"ff1eb8c91c9fbadd1d6273753e9b3efb911664d2db412ea5ed8946aecd1b1d70\": rpc error: code = NotFound desc = could not find container \"ff1eb8c91c9fbadd1d6273753e9b3efb911664d2db412ea5ed8946aecd1b1d70\": container with ID starting with ff1eb8c91c9fbadd1d6273753e9b3efb911664d2db412ea5ed8946aecd1b1d70 not found: ID does not exist" Jan 20 17:08:35 crc kubenswrapper[4558]: I0120 17:08:35.312278 4558 scope.go:117] "RemoveContainer" containerID="a705863ebb9d52a220fb61755f22b86b64de0f93f5cff893ab6d746f51d915da" Jan 20 17:08:35 crc kubenswrapper[4558]: E0120 17:08:35.318243 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a705863ebb9d52a220fb61755f22b86b64de0f93f5cff893ab6d746f51d915da\": container with ID starting with a705863ebb9d52a220fb61755f22b86b64de0f93f5cff893ab6d746f51d915da not found: ID does not exist" containerID="a705863ebb9d52a220fb61755f22b86b64de0f93f5cff893ab6d746f51d915da" Jan 20 17:08:35 crc kubenswrapper[4558]: I0120 17:08:35.318276 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a705863ebb9d52a220fb61755f22b86b64de0f93f5cff893ab6d746f51d915da"} err="failed to get container status \"a705863ebb9d52a220fb61755f22b86b64de0f93f5cff893ab6d746f51d915da\": rpc error: code = NotFound desc = could not find container \"a705863ebb9d52a220fb61755f22b86b64de0f93f5cff893ab6d746f51d915da\": container with ID starting with a705863ebb9d52a220fb61755f22b86b64de0f93f5cff893ab6d746f51d915da not found: ID does not exist" Jan 20 17:08:35 crc kubenswrapper[4558]: I0120 17:08:35.318292 4558 scope.go:117] "RemoveContainer" containerID="8083743d7d6d37e83f1bfb8607cb7050961576fa4bb93c383d46b5f5d4700062" Jan 20 17:08:35 crc kubenswrapper[4558]: I0120 17:08:35.320155 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/897179bd-8f95-47e8-b874-e1f7c2db06c7-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"897179bd-8f95-47e8-b874-e1f7c2db06c7\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:08:35 crc kubenswrapper[4558]: I0120 17:08:35.320234 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/897179bd-8f95-47e8-b874-e1f7c2db06c7-scripts\") pod \"ceilometer-0\" (UID: \"897179bd-8f95-47e8-b874-e1f7c2db06c7\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:08:35 crc kubenswrapper[4558]: I0120 17:08:35.320269 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r77db\" (UniqueName: \"kubernetes.io/projected/897179bd-8f95-47e8-b874-e1f7c2db06c7-kube-api-access-r77db\") pod \"ceilometer-0\" (UID: \"897179bd-8f95-47e8-b874-e1f7c2db06c7\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:08:35 crc kubenswrapper[4558]: I0120 17:08:35.320298 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/897179bd-8f95-47e8-b874-e1f7c2db06c7-config-data\") pod \"ceilometer-0\" (UID: \"897179bd-8f95-47e8-b874-e1f7c2db06c7\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:08:35 crc kubenswrapper[4558]: I0120 17:08:35.320349 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/897179bd-8f95-47e8-b874-e1f7c2db06c7-log-httpd\") pod \"ceilometer-0\" (UID: \"897179bd-8f95-47e8-b874-e1f7c2db06c7\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:08:35 crc kubenswrapper[4558]: I0120 17:08:35.320399 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/897179bd-8f95-47e8-b874-e1f7c2db06c7-run-httpd\") pod \"ceilometer-0\" (UID: \"897179bd-8f95-47e8-b874-e1f7c2db06c7\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:08:35 crc kubenswrapper[4558]: I0120 17:08:35.320441 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/897179bd-8f95-47e8-b874-e1f7c2db06c7-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"897179bd-8f95-47e8-b874-e1f7c2db06c7\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:08:35 crc kubenswrapper[4558]: I0120 17:08:35.328310 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8083743d7d6d37e83f1bfb8607cb7050961576fa4bb93c383d46b5f5d4700062"} err="failed to get container status \"8083743d7d6d37e83f1bfb8607cb7050961576fa4bb93c383d46b5f5d4700062\": rpc error: code = NotFound desc = could not find container \"8083743d7d6d37e83f1bfb8607cb7050961576fa4bb93c383d46b5f5d4700062\": container with ID starting with 8083743d7d6d37e83f1bfb8607cb7050961576fa4bb93c383d46b5f5d4700062 not found: ID does not exist" Jan 20 17:08:35 crc kubenswrapper[4558]: I0120 17:08:35.328363 4558 scope.go:117] "RemoveContainer" containerID="b44c56e2a3bfa6f791c1435d62c65b4284e66eac297f83e08fcdbb309a19f3f6" Jan 20 17:08:35 crc kubenswrapper[4558]: I0120 17:08:35.339396 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b44c56e2a3bfa6f791c1435d62c65b4284e66eac297f83e08fcdbb309a19f3f6"} err="failed to get container status \"b44c56e2a3bfa6f791c1435d62c65b4284e66eac297f83e08fcdbb309a19f3f6\": rpc error: code = NotFound desc = could not find container \"b44c56e2a3bfa6f791c1435d62c65b4284e66eac297f83e08fcdbb309a19f3f6\": container with ID starting with b44c56e2a3bfa6f791c1435d62c65b4284e66eac297f83e08fcdbb309a19f3f6 not found: ID does not exist" Jan 20 17:08:35 crc kubenswrapper[4558]: I0120 17:08:35.339461 4558 scope.go:117] "RemoveContainer" containerID="ff1eb8c91c9fbadd1d6273753e9b3efb911664d2db412ea5ed8946aecd1b1d70" Jan 20 17:08:35 crc kubenswrapper[4558]: I0120 17:08:35.343236 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ff1eb8c91c9fbadd1d6273753e9b3efb911664d2db412ea5ed8946aecd1b1d70"} err="failed to get container status \"ff1eb8c91c9fbadd1d6273753e9b3efb911664d2db412ea5ed8946aecd1b1d70\": rpc error: code = NotFound desc = could not find container \"ff1eb8c91c9fbadd1d6273753e9b3efb911664d2db412ea5ed8946aecd1b1d70\": container with ID starting with ff1eb8c91c9fbadd1d6273753e9b3efb911664d2db412ea5ed8946aecd1b1d70 not found: ID does not exist" Jan 20 17:08:35 crc kubenswrapper[4558]: I0120 17:08:35.343261 4558 scope.go:117] "RemoveContainer" containerID="a705863ebb9d52a220fb61755f22b86b64de0f93f5cff893ab6d746f51d915da" Jan 20 17:08:35 crc kubenswrapper[4558]: I0120 17:08:35.348227 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a705863ebb9d52a220fb61755f22b86b64de0f93f5cff893ab6d746f51d915da"} err="failed to get container status \"a705863ebb9d52a220fb61755f22b86b64de0f93f5cff893ab6d746f51d915da\": rpc error: code = NotFound desc = could not find container \"a705863ebb9d52a220fb61755f22b86b64de0f93f5cff893ab6d746f51d915da\": container with ID starting with a705863ebb9d52a220fb61755f22b86b64de0f93f5cff893ab6d746f51d915da not found: ID does not exist" Jan 20 17:08:35 crc kubenswrapper[4558]: I0120 17:08:35.348253 4558 scope.go:117] "RemoveContainer" containerID="8083743d7d6d37e83f1bfb8607cb7050961576fa4bb93c383d46b5f5d4700062" Jan 20 17:08:35 crc kubenswrapper[4558]: I0120 17:08:35.352252 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8083743d7d6d37e83f1bfb8607cb7050961576fa4bb93c383d46b5f5d4700062"} err="failed to get container status \"8083743d7d6d37e83f1bfb8607cb7050961576fa4bb93c383d46b5f5d4700062\": rpc error: code = NotFound desc = could not find container \"8083743d7d6d37e83f1bfb8607cb7050961576fa4bb93c383d46b5f5d4700062\": container with ID starting with 8083743d7d6d37e83f1bfb8607cb7050961576fa4bb93c383d46b5f5d4700062 not found: ID does not exist" Jan 20 17:08:35 crc kubenswrapper[4558]: I0120 17:08:35.352277 4558 scope.go:117] "RemoveContainer" containerID="b44c56e2a3bfa6f791c1435d62c65b4284e66eac297f83e08fcdbb309a19f3f6" Jan 20 17:08:35 crc kubenswrapper[4558]: I0120 17:08:35.356236 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b44c56e2a3bfa6f791c1435d62c65b4284e66eac297f83e08fcdbb309a19f3f6"} err="failed to get container status \"b44c56e2a3bfa6f791c1435d62c65b4284e66eac297f83e08fcdbb309a19f3f6\": rpc error: code = NotFound desc = could not find container \"b44c56e2a3bfa6f791c1435d62c65b4284e66eac297f83e08fcdbb309a19f3f6\": container with ID starting with b44c56e2a3bfa6f791c1435d62c65b4284e66eac297f83e08fcdbb309a19f3f6 not found: ID does not exist" Jan 20 17:08:35 crc kubenswrapper[4558]: I0120 17:08:35.356263 4558 scope.go:117] "RemoveContainer" containerID="ff1eb8c91c9fbadd1d6273753e9b3efb911664d2db412ea5ed8946aecd1b1d70" Jan 20 17:08:35 crc kubenswrapper[4558]: I0120 17:08:35.363256 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ff1eb8c91c9fbadd1d6273753e9b3efb911664d2db412ea5ed8946aecd1b1d70"} err="failed to get container status \"ff1eb8c91c9fbadd1d6273753e9b3efb911664d2db412ea5ed8946aecd1b1d70\": rpc error: code = NotFound desc = could not find container \"ff1eb8c91c9fbadd1d6273753e9b3efb911664d2db412ea5ed8946aecd1b1d70\": container with ID starting with ff1eb8c91c9fbadd1d6273753e9b3efb911664d2db412ea5ed8946aecd1b1d70 not found: ID does not exist" Jan 20 17:08:35 crc kubenswrapper[4558]: I0120 17:08:35.363289 4558 scope.go:117] "RemoveContainer" containerID="a705863ebb9d52a220fb61755f22b86b64de0f93f5cff893ab6d746f51d915da" Jan 20 17:08:35 crc kubenswrapper[4558]: I0120 17:08:35.367234 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a705863ebb9d52a220fb61755f22b86b64de0f93f5cff893ab6d746f51d915da"} err="failed to get container status \"a705863ebb9d52a220fb61755f22b86b64de0f93f5cff893ab6d746f51d915da\": rpc error: code = NotFound desc = could not find container \"a705863ebb9d52a220fb61755f22b86b64de0f93f5cff893ab6d746f51d915da\": container with ID starting with a705863ebb9d52a220fb61755f22b86b64de0f93f5cff893ab6d746f51d915da not found: ID does not exist" Jan 20 17:08:35 crc kubenswrapper[4558]: I0120 17:08:35.426576 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/897179bd-8f95-47e8-b874-e1f7c2db06c7-config-data\") pod \"ceilometer-0\" (UID: \"897179bd-8f95-47e8-b874-e1f7c2db06c7\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:08:35 crc kubenswrapper[4558]: I0120 17:08:35.426726 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/897179bd-8f95-47e8-b874-e1f7c2db06c7-log-httpd\") pod \"ceilometer-0\" (UID: \"897179bd-8f95-47e8-b874-e1f7c2db06c7\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:08:35 crc kubenswrapper[4558]: I0120 17:08:35.426869 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/897179bd-8f95-47e8-b874-e1f7c2db06c7-run-httpd\") pod \"ceilometer-0\" (UID: \"897179bd-8f95-47e8-b874-e1f7c2db06c7\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:08:35 crc kubenswrapper[4558]: I0120 17:08:35.426981 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/897179bd-8f95-47e8-b874-e1f7c2db06c7-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"897179bd-8f95-47e8-b874-e1f7c2db06c7\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:08:35 crc kubenswrapper[4558]: I0120 17:08:35.427059 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/897179bd-8f95-47e8-b874-e1f7c2db06c7-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"897179bd-8f95-47e8-b874-e1f7c2db06c7\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:08:35 crc kubenswrapper[4558]: I0120 17:08:35.427147 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/897179bd-8f95-47e8-b874-e1f7c2db06c7-scripts\") pod \"ceilometer-0\" (UID: \"897179bd-8f95-47e8-b874-e1f7c2db06c7\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:08:35 crc kubenswrapper[4558]: I0120 17:08:35.427200 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r77db\" (UniqueName: \"kubernetes.io/projected/897179bd-8f95-47e8-b874-e1f7c2db06c7-kube-api-access-r77db\") pod \"ceilometer-0\" (UID: \"897179bd-8f95-47e8-b874-e1f7c2db06c7\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:08:35 crc kubenswrapper[4558]: I0120 17:08:35.429850 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/897179bd-8f95-47e8-b874-e1f7c2db06c7-log-httpd\") pod \"ceilometer-0\" (UID: \"897179bd-8f95-47e8-b874-e1f7c2db06c7\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:08:35 crc kubenswrapper[4558]: I0120 17:08:35.430270 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/897179bd-8f95-47e8-b874-e1f7c2db06c7-run-httpd\") pod \"ceilometer-0\" (UID: \"897179bd-8f95-47e8-b874-e1f7c2db06c7\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:08:35 crc kubenswrapper[4558]: I0120 17:08:35.431892 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/897179bd-8f95-47e8-b874-e1f7c2db06c7-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"897179bd-8f95-47e8-b874-e1f7c2db06c7\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:08:35 crc kubenswrapper[4558]: I0120 17:08:35.434237 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/897179bd-8f95-47e8-b874-e1f7c2db06c7-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"897179bd-8f95-47e8-b874-e1f7c2db06c7\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:08:35 crc kubenswrapper[4558]: I0120 17:08:35.434732 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/897179bd-8f95-47e8-b874-e1f7c2db06c7-scripts\") pod \"ceilometer-0\" (UID: \"897179bd-8f95-47e8-b874-e1f7c2db06c7\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:08:35 crc kubenswrapper[4558]: I0120 17:08:35.435260 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/897179bd-8f95-47e8-b874-e1f7c2db06c7-config-data\") pod \"ceilometer-0\" (UID: \"897179bd-8f95-47e8-b874-e1f7c2db06c7\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:08:35 crc kubenswrapper[4558]: I0120 17:08:35.454102 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r77db\" (UniqueName: \"kubernetes.io/projected/897179bd-8f95-47e8-b874-e1f7c2db06c7-kube-api-access-r77db\") pod \"ceilometer-0\" (UID: \"897179bd-8f95-47e8-b874-e1f7c2db06c7\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:08:35 crc kubenswrapper[4558]: I0120 17:08:35.473987 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:08:35 crc kubenswrapper[4558]: I0120 17:08:35.898051 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:08:36 crc kubenswrapper[4558]: I0120 17:08:36.016235 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"897179bd-8f95-47e8-b874-e1f7c2db06c7","Type":"ContainerStarted","Data":"61c6c2095ae0eee7f059d64cc950e39c1871a29dd6e94931edcffddd0968781c"} Jan 20 17:08:36 crc kubenswrapper[4558]: I0120 17:08:36.019065 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/swift-proxy-69ddc9f468-cx7lb" Jan 20 17:08:36 crc kubenswrapper[4558]: I0120 17:08:36.108624 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-b56m6"] Jan 20 17:08:36 crc kubenswrapper[4558]: I0120 17:08:36.574291 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="90d455f2-55fa-4028-85a0-346675b2194d" path="/var/lib/kubelet/pods/90d455f2-55fa-4028-85a0-346675b2194d/volumes" Jan 20 17:08:37 crc kubenswrapper[4558]: I0120 17:08:37.026428 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"897179bd-8f95-47e8-b874-e1f7c2db06c7","Type":"ContainerStarted","Data":"86e5485cef63bc63ca0bd588b40d483bcc1d6c20fafa116fbbac46ac2602e0cb"} Jan 20 17:08:37 crc kubenswrapper[4558]: I0120 17:08:37.026587 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-b56m6" podUID="cb6e616b-5e0c-47d8-a11b-2434eb5ec410" containerName="registry-server" containerID="cri-o://2ffc570d7d24e20bfa3862de6474073c278b4dc1198b512280dae9fe8a40c9e4" gracePeriod=2 Jan 20 17:08:37 crc kubenswrapper[4558]: I0120 17:08:37.411541 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-b56m6" Jan 20 17:08:37 crc kubenswrapper[4558]: I0120 17:08:37.468786 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cb6e616b-5e0c-47d8-a11b-2434eb5ec410-utilities\") pod \"cb6e616b-5e0c-47d8-a11b-2434eb5ec410\" (UID: \"cb6e616b-5e0c-47d8-a11b-2434eb5ec410\") " Jan 20 17:08:37 crc kubenswrapper[4558]: I0120 17:08:37.468850 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fbh6x\" (UniqueName: \"kubernetes.io/projected/cb6e616b-5e0c-47d8-a11b-2434eb5ec410-kube-api-access-fbh6x\") pod \"cb6e616b-5e0c-47d8-a11b-2434eb5ec410\" (UID: \"cb6e616b-5e0c-47d8-a11b-2434eb5ec410\") " Jan 20 17:08:37 crc kubenswrapper[4558]: I0120 17:08:37.468875 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cb6e616b-5e0c-47d8-a11b-2434eb5ec410-catalog-content\") pod \"cb6e616b-5e0c-47d8-a11b-2434eb5ec410\" (UID: \"cb6e616b-5e0c-47d8-a11b-2434eb5ec410\") " Jan 20 17:08:37 crc kubenswrapper[4558]: I0120 17:08:37.470095 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cb6e616b-5e0c-47d8-a11b-2434eb5ec410-utilities" (OuterVolumeSpecName: "utilities") pod "cb6e616b-5e0c-47d8-a11b-2434eb5ec410" (UID: "cb6e616b-5e0c-47d8-a11b-2434eb5ec410"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:08:37 crc kubenswrapper[4558]: I0120 17:08:37.478043 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cb6e616b-5e0c-47d8-a11b-2434eb5ec410-kube-api-access-fbh6x" (OuterVolumeSpecName: "kube-api-access-fbh6x") pod "cb6e616b-5e0c-47d8-a11b-2434eb5ec410" (UID: "cb6e616b-5e0c-47d8-a11b-2434eb5ec410"). InnerVolumeSpecName "kube-api-access-fbh6x". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:08:37 crc kubenswrapper[4558]: I0120 17:08:37.486576 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cb6e616b-5e0c-47d8-a11b-2434eb5ec410-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "cb6e616b-5e0c-47d8-a11b-2434eb5ec410" (UID: "cb6e616b-5e0c-47d8-a11b-2434eb5ec410"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:08:37 crc kubenswrapper[4558]: I0120 17:08:37.566595 4558 scope.go:117] "RemoveContainer" containerID="a711a286282347590079d2447ef37fa9ed0f11085d6d7a65f85e65c1c2df608f" Jan 20 17:08:37 crc kubenswrapper[4558]: E0120 17:08:37.566953 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 17:08:37 crc kubenswrapper[4558]: I0120 17:08:37.570529 4558 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cb6e616b-5e0c-47d8-a11b-2434eb5ec410-utilities\") on node \"crc\" DevicePath \"\"" Jan 20 17:08:37 crc kubenswrapper[4558]: I0120 17:08:37.570558 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fbh6x\" (UniqueName: \"kubernetes.io/projected/cb6e616b-5e0c-47d8-a11b-2434eb5ec410-kube-api-access-fbh6x\") on node \"crc\" DevicePath \"\"" Jan 20 17:08:37 crc kubenswrapper[4558]: I0120 17:08:37.570570 4558 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cb6e616b-5e0c-47d8-a11b-2434eb5ec410-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 20 17:08:38 crc kubenswrapper[4558]: I0120 17:08:38.038833 4558 generic.go:334] "Generic (PLEG): container finished" podID="cb6e616b-5e0c-47d8-a11b-2434eb5ec410" containerID="2ffc570d7d24e20bfa3862de6474073c278b4dc1198b512280dae9fe8a40c9e4" exitCode=0 Jan 20 17:08:38 crc kubenswrapper[4558]: I0120 17:08:38.039237 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-b56m6" Jan 20 17:08:38 crc kubenswrapper[4558]: I0120 17:08:38.039047 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-b56m6" event={"ID":"cb6e616b-5e0c-47d8-a11b-2434eb5ec410","Type":"ContainerDied","Data":"2ffc570d7d24e20bfa3862de6474073c278b4dc1198b512280dae9fe8a40c9e4"} Jan 20 17:08:38 crc kubenswrapper[4558]: I0120 17:08:38.039308 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-b56m6" event={"ID":"cb6e616b-5e0c-47d8-a11b-2434eb5ec410","Type":"ContainerDied","Data":"c50a0451fc27febd11e9d8e0f4919b3b83ba371d0b639cc29af4171197a45fe6"} Jan 20 17:08:38 crc kubenswrapper[4558]: I0120 17:08:38.039350 4558 scope.go:117] "RemoveContainer" containerID="2ffc570d7d24e20bfa3862de6474073c278b4dc1198b512280dae9fe8a40c9e4" Jan 20 17:08:38 crc kubenswrapper[4558]: I0120 17:08:38.043612 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"897179bd-8f95-47e8-b874-e1f7c2db06c7","Type":"ContainerStarted","Data":"5839aa5e498cee14d343ff26b5edddf5cf98b1bb4356f594d515cd7aca5d1741"} Jan 20 17:08:38 crc kubenswrapper[4558]: I0120 17:08:38.069007 4558 scope.go:117] "RemoveContainer" containerID="5108e8608988ff4f797e72eb2166c18268cdcdf43011cda95c997aaade334dbf" Jan 20 17:08:38 crc kubenswrapper[4558]: I0120 17:08:38.077621 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-b56m6"] Jan 20 17:08:38 crc kubenswrapper[4558]: I0120 17:08:38.084597 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-b56m6"] Jan 20 17:08:38 crc kubenswrapper[4558]: I0120 17:08:38.089671 4558 scope.go:117] "RemoveContainer" containerID="a62ccd599fd4641719cc9815bdd137447105a274a2138b5fcc0bf39e5d0ffac2" Jan 20 17:08:38 crc kubenswrapper[4558]: I0120 17:08:38.123327 4558 scope.go:117] "RemoveContainer" containerID="2ffc570d7d24e20bfa3862de6474073c278b4dc1198b512280dae9fe8a40c9e4" Jan 20 17:08:38 crc kubenswrapper[4558]: E0120 17:08:38.123699 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2ffc570d7d24e20bfa3862de6474073c278b4dc1198b512280dae9fe8a40c9e4\": container with ID starting with 2ffc570d7d24e20bfa3862de6474073c278b4dc1198b512280dae9fe8a40c9e4 not found: ID does not exist" containerID="2ffc570d7d24e20bfa3862de6474073c278b4dc1198b512280dae9fe8a40c9e4" Jan 20 17:08:38 crc kubenswrapper[4558]: I0120 17:08:38.123733 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2ffc570d7d24e20bfa3862de6474073c278b4dc1198b512280dae9fe8a40c9e4"} err="failed to get container status \"2ffc570d7d24e20bfa3862de6474073c278b4dc1198b512280dae9fe8a40c9e4\": rpc error: code = NotFound desc = could not find container \"2ffc570d7d24e20bfa3862de6474073c278b4dc1198b512280dae9fe8a40c9e4\": container with ID starting with 2ffc570d7d24e20bfa3862de6474073c278b4dc1198b512280dae9fe8a40c9e4 not found: ID does not exist" Jan 20 17:08:38 crc kubenswrapper[4558]: I0120 17:08:38.123754 4558 scope.go:117] "RemoveContainer" containerID="5108e8608988ff4f797e72eb2166c18268cdcdf43011cda95c997aaade334dbf" Jan 20 17:08:38 crc kubenswrapper[4558]: E0120 17:08:38.124186 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5108e8608988ff4f797e72eb2166c18268cdcdf43011cda95c997aaade334dbf\": container with ID starting with 5108e8608988ff4f797e72eb2166c18268cdcdf43011cda95c997aaade334dbf not found: ID does not exist" containerID="5108e8608988ff4f797e72eb2166c18268cdcdf43011cda95c997aaade334dbf" Jan 20 17:08:38 crc kubenswrapper[4558]: I0120 17:08:38.124219 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5108e8608988ff4f797e72eb2166c18268cdcdf43011cda95c997aaade334dbf"} err="failed to get container status \"5108e8608988ff4f797e72eb2166c18268cdcdf43011cda95c997aaade334dbf\": rpc error: code = NotFound desc = could not find container \"5108e8608988ff4f797e72eb2166c18268cdcdf43011cda95c997aaade334dbf\": container with ID starting with 5108e8608988ff4f797e72eb2166c18268cdcdf43011cda95c997aaade334dbf not found: ID does not exist" Jan 20 17:08:38 crc kubenswrapper[4558]: I0120 17:08:38.124241 4558 scope.go:117] "RemoveContainer" containerID="a62ccd599fd4641719cc9815bdd137447105a274a2138b5fcc0bf39e5d0ffac2" Jan 20 17:08:38 crc kubenswrapper[4558]: E0120 17:08:38.124704 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a62ccd599fd4641719cc9815bdd137447105a274a2138b5fcc0bf39e5d0ffac2\": container with ID starting with a62ccd599fd4641719cc9815bdd137447105a274a2138b5fcc0bf39e5d0ffac2 not found: ID does not exist" containerID="a62ccd599fd4641719cc9815bdd137447105a274a2138b5fcc0bf39e5d0ffac2" Jan 20 17:08:38 crc kubenswrapper[4558]: I0120 17:08:38.124732 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a62ccd599fd4641719cc9815bdd137447105a274a2138b5fcc0bf39e5d0ffac2"} err="failed to get container status \"a62ccd599fd4641719cc9815bdd137447105a274a2138b5fcc0bf39e5d0ffac2\": rpc error: code = NotFound desc = could not find container \"a62ccd599fd4641719cc9815bdd137447105a274a2138b5fcc0bf39e5d0ffac2\": container with ID starting with a62ccd599fd4641719cc9815bdd137447105a274a2138b5fcc0bf39e5d0ffac2 not found: ID does not exist" Jan 20 17:08:38 crc kubenswrapper[4558]: I0120 17:08:38.578139 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cb6e616b-5e0c-47d8-a11b-2434eb5ec410" path="/var/lib/kubelet/pods/cb6e616b-5e0c-47d8-a11b-2434eb5ec410/volumes" Jan 20 17:08:39 crc kubenswrapper[4558]: I0120 17:08:39.056519 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"897179bd-8f95-47e8-b874-e1f7c2db06c7","Type":"ContainerStarted","Data":"da5479b2bc6f69158381f6b4f545f431790e8d4a301ad50da1d5f808b6102bf5"} Jan 20 17:08:39 crc kubenswrapper[4558]: E0120 17:08:39.056545 4558 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4fb0c521_d465_47b9_b859_199f53143dca.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4fb0c521_d465_47b9_b859_199f53143dca.slice/crio-720cf8a66b7cad8587f8ef26f04899153239b9d5f060876b68844c1340f9e7ba\": RecentStats: unable to find data in memory cache]" Jan 20 17:08:40 crc kubenswrapper[4558]: I0120 17:08:40.089063 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"897179bd-8f95-47e8-b874-e1f7c2db06c7","Type":"ContainerStarted","Data":"8db2ebc45ac71ce4fa8e3f4838455b8765bb2111869eaa2656b137d300bf7aa1"} Jan 20 17:08:40 crc kubenswrapper[4558]: I0120 17:08:40.089632 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:08:40 crc kubenswrapper[4558]: I0120 17:08:40.111576 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ceilometer-0" podStartSLOduration=1.324085478 podStartE2EDuration="5.111556496s" podCreationTimestamp="2026-01-20 17:08:35 +0000 UTC" firstStartedPulling="2026-01-20 17:08:35.904132565 +0000 UTC m=+1609.664470532" lastFinishedPulling="2026-01-20 17:08:39.691603583 +0000 UTC m=+1613.451941550" observedRunningTime="2026-01-20 17:08:40.105800022 +0000 UTC m=+1613.866137989" watchObservedRunningTime="2026-01-20 17:08:40.111556496 +0000 UTC m=+1613.871894463" Jan 20 17:08:40 crc kubenswrapper[4558]: I0120 17:08:40.199013 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-api-db-create-hs5v4"] Jan 20 17:08:40 crc kubenswrapper[4558]: E0120 17:08:40.199385 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cb6e616b-5e0c-47d8-a11b-2434eb5ec410" containerName="extract-utilities" Jan 20 17:08:40 crc kubenswrapper[4558]: I0120 17:08:40.199406 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="cb6e616b-5e0c-47d8-a11b-2434eb5ec410" containerName="extract-utilities" Jan 20 17:08:40 crc kubenswrapper[4558]: E0120 17:08:40.199423 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cb6e616b-5e0c-47d8-a11b-2434eb5ec410" containerName="registry-server" Jan 20 17:08:40 crc kubenswrapper[4558]: I0120 17:08:40.199430 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="cb6e616b-5e0c-47d8-a11b-2434eb5ec410" containerName="registry-server" Jan 20 17:08:40 crc kubenswrapper[4558]: E0120 17:08:40.199442 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cb6e616b-5e0c-47d8-a11b-2434eb5ec410" containerName="extract-content" Jan 20 17:08:40 crc kubenswrapper[4558]: I0120 17:08:40.199450 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="cb6e616b-5e0c-47d8-a11b-2434eb5ec410" containerName="extract-content" Jan 20 17:08:40 crc kubenswrapper[4558]: I0120 17:08:40.199620 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="cb6e616b-5e0c-47d8-a11b-2434eb5ec410" containerName="registry-server" Jan 20 17:08:40 crc kubenswrapper[4558]: I0120 17:08:40.200199 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-db-create-hs5v4" Jan 20 17:08:40 crc kubenswrapper[4558]: I0120 17:08:40.214866 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-db-create-hs5v4"] Jan 20 17:08:40 crc kubenswrapper[4558]: I0120 17:08:40.220444 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8gq7f\" (UniqueName: \"kubernetes.io/projected/ce2696d5-1fd9-496a-a751-68912b14248c-kube-api-access-8gq7f\") pod \"nova-api-db-create-hs5v4\" (UID: \"ce2696d5-1fd9-496a-a751-68912b14248c\") " pod="openstack-kuttl-tests/nova-api-db-create-hs5v4" Jan 20 17:08:40 crc kubenswrapper[4558]: I0120 17:08:40.220604 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ce2696d5-1fd9-496a-a751-68912b14248c-operator-scripts\") pod \"nova-api-db-create-hs5v4\" (UID: \"ce2696d5-1fd9-496a-a751-68912b14248c\") " pod="openstack-kuttl-tests/nova-api-db-create-hs5v4" Jan 20 17:08:40 crc kubenswrapper[4558]: I0120 17:08:40.322087 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8gq7f\" (UniqueName: \"kubernetes.io/projected/ce2696d5-1fd9-496a-a751-68912b14248c-kube-api-access-8gq7f\") pod \"nova-api-db-create-hs5v4\" (UID: \"ce2696d5-1fd9-496a-a751-68912b14248c\") " pod="openstack-kuttl-tests/nova-api-db-create-hs5v4" Jan 20 17:08:40 crc kubenswrapper[4558]: I0120 17:08:40.322249 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ce2696d5-1fd9-496a-a751-68912b14248c-operator-scripts\") pod \"nova-api-db-create-hs5v4\" (UID: \"ce2696d5-1fd9-496a-a751-68912b14248c\") " pod="openstack-kuttl-tests/nova-api-db-create-hs5v4" Jan 20 17:08:40 crc kubenswrapper[4558]: I0120 17:08:40.323004 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ce2696d5-1fd9-496a-a751-68912b14248c-operator-scripts\") pod \"nova-api-db-create-hs5v4\" (UID: \"ce2696d5-1fd9-496a-a751-68912b14248c\") " pod="openstack-kuttl-tests/nova-api-db-create-hs5v4" Jan 20 17:08:40 crc kubenswrapper[4558]: I0120 17:08:40.324940 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell0-db-create-k57tt"] Jan 20 17:08:40 crc kubenswrapper[4558]: I0120 17:08:40.326148 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-db-create-k57tt" Jan 20 17:08:40 crc kubenswrapper[4558]: I0120 17:08:40.331299 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-api-b308-account-create-update-mhczn"] Jan 20 17:08:40 crc kubenswrapper[4558]: I0120 17:08:40.332725 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-b308-account-create-update-mhczn" Jan 20 17:08:40 crc kubenswrapper[4558]: I0120 17:08:40.334107 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-api-db-secret" Jan 20 17:08:40 crc kubenswrapper[4558]: I0120 17:08:40.336587 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-b308-account-create-update-mhczn"] Jan 20 17:08:40 crc kubenswrapper[4558]: I0120 17:08:40.346866 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-db-create-k57tt"] Jan 20 17:08:40 crc kubenswrapper[4558]: I0120 17:08:40.358459 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8gq7f\" (UniqueName: \"kubernetes.io/projected/ce2696d5-1fd9-496a-a751-68912b14248c-kube-api-access-8gq7f\") pod \"nova-api-db-create-hs5v4\" (UID: \"ce2696d5-1fd9-496a-a751-68912b14248c\") " pod="openstack-kuttl-tests/nova-api-db-create-hs5v4" Jan 20 17:08:40 crc kubenswrapper[4558]: I0120 17:08:40.424529 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j9l5k\" (UniqueName: \"kubernetes.io/projected/b8906a01-1578-45d7-bfa0-4bbb4dfe6123-kube-api-access-j9l5k\") pod \"nova-cell0-db-create-k57tt\" (UID: \"b8906a01-1578-45d7-bfa0-4bbb4dfe6123\") " pod="openstack-kuttl-tests/nova-cell0-db-create-k57tt" Jan 20 17:08:40 crc kubenswrapper[4558]: I0120 17:08:40.424684 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ndvv8\" (UniqueName: \"kubernetes.io/projected/f44e4f71-b12b-41da-bb06-0495080715ea-kube-api-access-ndvv8\") pod \"nova-api-b308-account-create-update-mhczn\" (UID: \"f44e4f71-b12b-41da-bb06-0495080715ea\") " pod="openstack-kuttl-tests/nova-api-b308-account-create-update-mhczn" Jan 20 17:08:40 crc kubenswrapper[4558]: I0120 17:08:40.424779 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b8906a01-1578-45d7-bfa0-4bbb4dfe6123-operator-scripts\") pod \"nova-cell0-db-create-k57tt\" (UID: \"b8906a01-1578-45d7-bfa0-4bbb4dfe6123\") " pod="openstack-kuttl-tests/nova-cell0-db-create-k57tt" Jan 20 17:08:40 crc kubenswrapper[4558]: I0120 17:08:40.425082 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f44e4f71-b12b-41da-bb06-0495080715ea-operator-scripts\") pod \"nova-api-b308-account-create-update-mhczn\" (UID: \"f44e4f71-b12b-41da-bb06-0495080715ea\") " pod="openstack-kuttl-tests/nova-api-b308-account-create-update-mhczn" Jan 20 17:08:40 crc kubenswrapper[4558]: I0120 17:08:40.517200 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell1-db-create-gkhg5"] Jan 20 17:08:40 crc kubenswrapper[4558]: I0120 17:08:40.519148 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-db-create-gkhg5" Jan 20 17:08:40 crc kubenswrapper[4558]: I0120 17:08:40.520076 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-db-create-hs5v4" Jan 20 17:08:40 crc kubenswrapper[4558]: I0120 17:08:40.527436 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell0-77c1-account-create-update-9lp88"] Jan 20 17:08:40 crc kubenswrapper[4558]: I0120 17:08:40.527766 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b8906a01-1578-45d7-bfa0-4bbb4dfe6123-operator-scripts\") pod \"nova-cell0-db-create-k57tt\" (UID: \"b8906a01-1578-45d7-bfa0-4bbb4dfe6123\") " pod="openstack-kuttl-tests/nova-cell0-db-create-k57tt" Jan 20 17:08:40 crc kubenswrapper[4558]: I0120 17:08:40.527924 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f44e4f71-b12b-41da-bb06-0495080715ea-operator-scripts\") pod \"nova-api-b308-account-create-update-mhczn\" (UID: \"f44e4f71-b12b-41da-bb06-0495080715ea\") " pod="openstack-kuttl-tests/nova-api-b308-account-create-update-mhczn" Jan 20 17:08:40 crc kubenswrapper[4558]: I0120 17:08:40.527960 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j9l5k\" (UniqueName: \"kubernetes.io/projected/b8906a01-1578-45d7-bfa0-4bbb4dfe6123-kube-api-access-j9l5k\") pod \"nova-cell0-db-create-k57tt\" (UID: \"b8906a01-1578-45d7-bfa0-4bbb4dfe6123\") " pod="openstack-kuttl-tests/nova-cell0-db-create-k57tt" Jan 20 17:08:40 crc kubenswrapper[4558]: I0120 17:08:40.528045 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ndvv8\" (UniqueName: \"kubernetes.io/projected/f44e4f71-b12b-41da-bb06-0495080715ea-kube-api-access-ndvv8\") pod \"nova-api-b308-account-create-update-mhczn\" (UID: \"f44e4f71-b12b-41da-bb06-0495080715ea\") " pod="openstack-kuttl-tests/nova-api-b308-account-create-update-mhczn" Jan 20 17:08:40 crc kubenswrapper[4558]: I0120 17:08:40.528573 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-77c1-account-create-update-9lp88" Jan 20 17:08:40 crc kubenswrapper[4558]: I0120 17:08:40.529347 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b8906a01-1578-45d7-bfa0-4bbb4dfe6123-operator-scripts\") pod \"nova-cell0-db-create-k57tt\" (UID: \"b8906a01-1578-45d7-bfa0-4bbb4dfe6123\") " pod="openstack-kuttl-tests/nova-cell0-db-create-k57tt" Jan 20 17:08:40 crc kubenswrapper[4558]: I0120 17:08:40.529459 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f44e4f71-b12b-41da-bb06-0495080715ea-operator-scripts\") pod \"nova-api-b308-account-create-update-mhczn\" (UID: \"f44e4f71-b12b-41da-bb06-0495080715ea\") " pod="openstack-kuttl-tests/nova-api-b308-account-create-update-mhczn" Jan 20 17:08:40 crc kubenswrapper[4558]: I0120 17:08:40.530705 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell0-db-secret" Jan 20 17:08:40 crc kubenswrapper[4558]: I0120 17:08:40.546592 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-db-create-gkhg5"] Jan 20 17:08:40 crc kubenswrapper[4558]: I0120 17:08:40.549075 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ndvv8\" (UniqueName: \"kubernetes.io/projected/f44e4f71-b12b-41da-bb06-0495080715ea-kube-api-access-ndvv8\") pod \"nova-api-b308-account-create-update-mhczn\" (UID: \"f44e4f71-b12b-41da-bb06-0495080715ea\") " pod="openstack-kuttl-tests/nova-api-b308-account-create-update-mhczn" Jan 20 17:08:40 crc kubenswrapper[4558]: I0120 17:08:40.552600 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j9l5k\" (UniqueName: \"kubernetes.io/projected/b8906a01-1578-45d7-bfa0-4bbb4dfe6123-kube-api-access-j9l5k\") pod \"nova-cell0-db-create-k57tt\" (UID: \"b8906a01-1578-45d7-bfa0-4bbb4dfe6123\") " pod="openstack-kuttl-tests/nova-cell0-db-create-k57tt" Jan 20 17:08:40 crc kubenswrapper[4558]: I0120 17:08:40.552646 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-77c1-account-create-update-9lp88"] Jan 20 17:08:40 crc kubenswrapper[4558]: I0120 17:08:40.630888 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rgpbv\" (UniqueName: \"kubernetes.io/projected/038dd72b-1a11-42d9-98bb-5f681a2fbc38-kube-api-access-rgpbv\") pod \"nova-cell1-db-create-gkhg5\" (UID: \"038dd72b-1a11-42d9-98bb-5f681a2fbc38\") " pod="openstack-kuttl-tests/nova-cell1-db-create-gkhg5" Jan 20 17:08:40 crc kubenswrapper[4558]: I0120 17:08:40.631489 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9k2vq\" (UniqueName: \"kubernetes.io/projected/60dcbf76-e430-47bc-afe1-832961122cc9-kube-api-access-9k2vq\") pod \"nova-cell0-77c1-account-create-update-9lp88\" (UID: \"60dcbf76-e430-47bc-afe1-832961122cc9\") " pod="openstack-kuttl-tests/nova-cell0-77c1-account-create-update-9lp88" Jan 20 17:08:40 crc kubenswrapper[4558]: I0120 17:08:40.631550 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/60dcbf76-e430-47bc-afe1-832961122cc9-operator-scripts\") pod \"nova-cell0-77c1-account-create-update-9lp88\" (UID: \"60dcbf76-e430-47bc-afe1-832961122cc9\") " pod="openstack-kuttl-tests/nova-cell0-77c1-account-create-update-9lp88" Jan 20 17:08:40 crc kubenswrapper[4558]: I0120 17:08:40.631588 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/038dd72b-1a11-42d9-98bb-5f681a2fbc38-operator-scripts\") pod \"nova-cell1-db-create-gkhg5\" (UID: \"038dd72b-1a11-42d9-98bb-5f681a2fbc38\") " pod="openstack-kuttl-tests/nova-cell1-db-create-gkhg5" Jan 20 17:08:40 crc kubenswrapper[4558]: I0120 17:08:40.684451 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-b308-account-create-update-mhczn" Jan 20 17:08:40 crc kubenswrapper[4558]: I0120 17:08:40.686212 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-db-create-k57tt" Jan 20 17:08:40 crc kubenswrapper[4558]: I0120 17:08:40.730296 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell1-0eec-account-create-update-j75x4"] Jan 20 17:08:40 crc kubenswrapper[4558]: I0120 17:08:40.732238 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-0eec-account-create-update-j75x4"] Jan 20 17:08:40 crc kubenswrapper[4558]: I0120 17:08:40.732380 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-0eec-account-create-update-j75x4" Jan 20 17:08:40 crc kubenswrapper[4558]: I0120 17:08:40.734306 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/038dd72b-1a11-42d9-98bb-5f681a2fbc38-operator-scripts\") pod \"nova-cell1-db-create-gkhg5\" (UID: \"038dd72b-1a11-42d9-98bb-5f681a2fbc38\") " pod="openstack-kuttl-tests/nova-cell1-db-create-gkhg5" Jan 20 17:08:40 crc kubenswrapper[4558]: I0120 17:08:40.734406 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7eea1d5d-bf4b-4104-9c44-8dc4846f8c76-operator-scripts\") pod \"nova-cell1-0eec-account-create-update-j75x4\" (UID: \"7eea1d5d-bf4b-4104-9c44-8dc4846f8c76\") " pod="openstack-kuttl-tests/nova-cell1-0eec-account-create-update-j75x4" Jan 20 17:08:40 crc kubenswrapper[4558]: I0120 17:08:40.734493 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rgpbv\" (UniqueName: \"kubernetes.io/projected/038dd72b-1a11-42d9-98bb-5f681a2fbc38-kube-api-access-rgpbv\") pod \"nova-cell1-db-create-gkhg5\" (UID: \"038dd72b-1a11-42d9-98bb-5f681a2fbc38\") " pod="openstack-kuttl-tests/nova-cell1-db-create-gkhg5" Jan 20 17:08:40 crc kubenswrapper[4558]: I0120 17:08:40.734635 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kqld8\" (UniqueName: \"kubernetes.io/projected/7eea1d5d-bf4b-4104-9c44-8dc4846f8c76-kube-api-access-kqld8\") pod \"nova-cell1-0eec-account-create-update-j75x4\" (UID: \"7eea1d5d-bf4b-4104-9c44-8dc4846f8c76\") " pod="openstack-kuttl-tests/nova-cell1-0eec-account-create-update-j75x4" Jan 20 17:08:40 crc kubenswrapper[4558]: I0120 17:08:40.734689 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9k2vq\" (UniqueName: \"kubernetes.io/projected/60dcbf76-e430-47bc-afe1-832961122cc9-kube-api-access-9k2vq\") pod \"nova-cell0-77c1-account-create-update-9lp88\" (UID: \"60dcbf76-e430-47bc-afe1-832961122cc9\") " pod="openstack-kuttl-tests/nova-cell0-77c1-account-create-update-9lp88" Jan 20 17:08:40 crc kubenswrapper[4558]: I0120 17:08:40.734725 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/60dcbf76-e430-47bc-afe1-832961122cc9-operator-scripts\") pod \"nova-cell0-77c1-account-create-update-9lp88\" (UID: \"60dcbf76-e430-47bc-afe1-832961122cc9\") " pod="openstack-kuttl-tests/nova-cell0-77c1-account-create-update-9lp88" Jan 20 17:08:40 crc kubenswrapper[4558]: I0120 17:08:40.735553 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell1-db-secret" Jan 20 17:08:40 crc kubenswrapper[4558]: I0120 17:08:40.736280 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/038dd72b-1a11-42d9-98bb-5f681a2fbc38-operator-scripts\") pod \"nova-cell1-db-create-gkhg5\" (UID: \"038dd72b-1a11-42d9-98bb-5f681a2fbc38\") " pod="openstack-kuttl-tests/nova-cell1-db-create-gkhg5" Jan 20 17:08:40 crc kubenswrapper[4558]: I0120 17:08:40.737222 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/60dcbf76-e430-47bc-afe1-832961122cc9-operator-scripts\") pod \"nova-cell0-77c1-account-create-update-9lp88\" (UID: \"60dcbf76-e430-47bc-afe1-832961122cc9\") " pod="openstack-kuttl-tests/nova-cell0-77c1-account-create-update-9lp88" Jan 20 17:08:40 crc kubenswrapper[4558]: I0120 17:08:40.758040 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9k2vq\" (UniqueName: \"kubernetes.io/projected/60dcbf76-e430-47bc-afe1-832961122cc9-kube-api-access-9k2vq\") pod \"nova-cell0-77c1-account-create-update-9lp88\" (UID: \"60dcbf76-e430-47bc-afe1-832961122cc9\") " pod="openstack-kuttl-tests/nova-cell0-77c1-account-create-update-9lp88" Jan 20 17:08:40 crc kubenswrapper[4558]: I0120 17:08:40.761694 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rgpbv\" (UniqueName: \"kubernetes.io/projected/038dd72b-1a11-42d9-98bb-5f681a2fbc38-kube-api-access-rgpbv\") pod \"nova-cell1-db-create-gkhg5\" (UID: \"038dd72b-1a11-42d9-98bb-5f681a2fbc38\") " pod="openstack-kuttl-tests/nova-cell1-db-create-gkhg5" Jan 20 17:08:40 crc kubenswrapper[4558]: I0120 17:08:40.852095 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-db-create-gkhg5" Jan 20 17:08:40 crc kubenswrapper[4558]: I0120 17:08:40.887152 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7eea1d5d-bf4b-4104-9c44-8dc4846f8c76-operator-scripts\") pod \"nova-cell1-0eec-account-create-update-j75x4\" (UID: \"7eea1d5d-bf4b-4104-9c44-8dc4846f8c76\") " pod="openstack-kuttl-tests/nova-cell1-0eec-account-create-update-j75x4" Jan 20 17:08:40 crc kubenswrapper[4558]: I0120 17:08:40.887470 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kqld8\" (UniqueName: \"kubernetes.io/projected/7eea1d5d-bf4b-4104-9c44-8dc4846f8c76-kube-api-access-kqld8\") pod \"nova-cell1-0eec-account-create-update-j75x4\" (UID: \"7eea1d5d-bf4b-4104-9c44-8dc4846f8c76\") " pod="openstack-kuttl-tests/nova-cell1-0eec-account-create-update-j75x4" Jan 20 17:08:40 crc kubenswrapper[4558]: I0120 17:08:40.895321 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-77c1-account-create-update-9lp88" Jan 20 17:08:40 crc kubenswrapper[4558]: I0120 17:08:40.896534 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7eea1d5d-bf4b-4104-9c44-8dc4846f8c76-operator-scripts\") pod \"nova-cell1-0eec-account-create-update-j75x4\" (UID: \"7eea1d5d-bf4b-4104-9c44-8dc4846f8c76\") " pod="openstack-kuttl-tests/nova-cell1-0eec-account-create-update-j75x4" Jan 20 17:08:40 crc kubenswrapper[4558]: I0120 17:08:40.910893 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kqld8\" (UniqueName: \"kubernetes.io/projected/7eea1d5d-bf4b-4104-9c44-8dc4846f8c76-kube-api-access-kqld8\") pod \"nova-cell1-0eec-account-create-update-j75x4\" (UID: \"7eea1d5d-bf4b-4104-9c44-8dc4846f8c76\") " pod="openstack-kuttl-tests/nova-cell1-0eec-account-create-update-j75x4" Jan 20 17:08:41 crc kubenswrapper[4558]: W0120 17:08:41.071193 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podce2696d5_1fd9_496a_a751_68912b14248c.slice/crio-e69c819ba37bbaf9fc122a7891ff169ab8397b00152243ba2ac090b7f3dd7ba3 WatchSource:0}: Error finding container e69c819ba37bbaf9fc122a7891ff169ab8397b00152243ba2ac090b7f3dd7ba3: Status 404 returned error can't find the container with id e69c819ba37bbaf9fc122a7891ff169ab8397b00152243ba2ac090b7f3dd7ba3 Jan 20 17:08:41 crc kubenswrapper[4558]: I0120 17:08:41.073955 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-db-create-hs5v4"] Jan 20 17:08:41 crc kubenswrapper[4558]: I0120 17:08:41.125637 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-0eec-account-create-update-j75x4" Jan 20 17:08:41 crc kubenswrapper[4558]: I0120 17:08:41.135782 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-db-create-hs5v4" event={"ID":"ce2696d5-1fd9-496a-a751-68912b14248c","Type":"ContainerStarted","Data":"e69c819ba37bbaf9fc122a7891ff169ab8397b00152243ba2ac090b7f3dd7ba3"} Jan 20 17:08:41 crc kubenswrapper[4558]: I0120 17:08:41.200214 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-db-create-k57tt"] Jan 20 17:08:41 crc kubenswrapper[4558]: I0120 17:08:41.219180 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-b308-account-create-update-mhczn"] Jan 20 17:08:41 crc kubenswrapper[4558]: I0120 17:08:41.380173 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-77c1-account-create-update-9lp88"] Jan 20 17:08:41 crc kubenswrapper[4558]: I0120 17:08:41.470103 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-db-create-gkhg5"] Jan 20 17:08:41 crc kubenswrapper[4558]: W0120 17:08:41.494426 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod038dd72b_1a11_42d9_98bb_5f681a2fbc38.slice/crio-f3a869ab66d396bac15dc13d86e81a39480c3b7f86be03c00c260b299b7638ef WatchSource:0}: Error finding container f3a869ab66d396bac15dc13d86e81a39480c3b7f86be03c00c260b299b7638ef: Status 404 returned error can't find the container with id f3a869ab66d396bac15dc13d86e81a39480c3b7f86be03c00c260b299b7638ef Jan 20 17:08:41 crc kubenswrapper[4558]: I0120 17:08:41.573360 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-0eec-account-create-update-j75x4"] Jan 20 17:08:41 crc kubenswrapper[4558]: W0120 17:08:41.594959 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7eea1d5d_bf4b_4104_9c44_8dc4846f8c76.slice/crio-e8df26a7d4ed4702d211c5491b97465a620ec8094049cbcd55f67c49fa108acb WatchSource:0}: Error finding container e8df26a7d4ed4702d211c5491b97465a620ec8094049cbcd55f67c49fa108acb: Status 404 returned error can't find the container with id e8df26a7d4ed4702d211c5491b97465a620ec8094049cbcd55f67c49fa108acb Jan 20 17:08:41 crc kubenswrapper[4558]: I0120 17:08:41.759137 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:08:42 crc kubenswrapper[4558]: I0120 17:08:42.150713 4558 generic.go:334] "Generic (PLEG): container finished" podID="b8906a01-1578-45d7-bfa0-4bbb4dfe6123" containerID="6b61207025457ad07d1c5aba4ae82e5f70fd7063c9e90607164afad832616b9e" exitCode=0 Jan 20 17:08:42 crc kubenswrapper[4558]: I0120 17:08:42.150844 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-db-create-k57tt" event={"ID":"b8906a01-1578-45d7-bfa0-4bbb4dfe6123","Type":"ContainerDied","Data":"6b61207025457ad07d1c5aba4ae82e5f70fd7063c9e90607164afad832616b9e"} Jan 20 17:08:42 crc kubenswrapper[4558]: I0120 17:08:42.152013 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-db-create-k57tt" event={"ID":"b8906a01-1578-45d7-bfa0-4bbb4dfe6123","Type":"ContainerStarted","Data":"118af64dcb204441177eca5cd02873a970d3f4f43cdce9262e39a0f8ae9152e1"} Jan 20 17:08:42 crc kubenswrapper[4558]: I0120 17:08:42.153440 4558 generic.go:334] "Generic (PLEG): container finished" podID="7eea1d5d-bf4b-4104-9c44-8dc4846f8c76" containerID="afedf4808c1da5494a40e8abc086d0c1477f592d1ddd8d03ca1bec56382acdc7" exitCode=0 Jan 20 17:08:42 crc kubenswrapper[4558]: I0120 17:08:42.153497 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-0eec-account-create-update-j75x4" event={"ID":"7eea1d5d-bf4b-4104-9c44-8dc4846f8c76","Type":"ContainerDied","Data":"afedf4808c1da5494a40e8abc086d0c1477f592d1ddd8d03ca1bec56382acdc7"} Jan 20 17:08:42 crc kubenswrapper[4558]: I0120 17:08:42.153513 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-0eec-account-create-update-j75x4" event={"ID":"7eea1d5d-bf4b-4104-9c44-8dc4846f8c76","Type":"ContainerStarted","Data":"e8df26a7d4ed4702d211c5491b97465a620ec8094049cbcd55f67c49fa108acb"} Jan 20 17:08:42 crc kubenswrapper[4558]: I0120 17:08:42.155099 4558 generic.go:334] "Generic (PLEG): container finished" podID="038dd72b-1a11-42d9-98bb-5f681a2fbc38" containerID="0919b57f07a307b47bd20d28cd7d906c293aab7392a04d683e1852a092fb578d" exitCode=0 Jan 20 17:08:42 crc kubenswrapper[4558]: I0120 17:08:42.155137 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-db-create-gkhg5" event={"ID":"038dd72b-1a11-42d9-98bb-5f681a2fbc38","Type":"ContainerDied","Data":"0919b57f07a307b47bd20d28cd7d906c293aab7392a04d683e1852a092fb578d"} Jan 20 17:08:42 crc kubenswrapper[4558]: I0120 17:08:42.155379 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-db-create-gkhg5" event={"ID":"038dd72b-1a11-42d9-98bb-5f681a2fbc38","Type":"ContainerStarted","Data":"f3a869ab66d396bac15dc13d86e81a39480c3b7f86be03c00c260b299b7638ef"} Jan 20 17:08:42 crc kubenswrapper[4558]: I0120 17:08:42.157109 4558 generic.go:334] "Generic (PLEG): container finished" podID="60dcbf76-e430-47bc-afe1-832961122cc9" containerID="de69b0acdf13d9e2cff3bfba3e51a4b8eabd7d2214a17cc35a6defefc441f40e" exitCode=0 Jan 20 17:08:42 crc kubenswrapper[4558]: I0120 17:08:42.157185 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-77c1-account-create-update-9lp88" event={"ID":"60dcbf76-e430-47bc-afe1-832961122cc9","Type":"ContainerDied","Data":"de69b0acdf13d9e2cff3bfba3e51a4b8eabd7d2214a17cc35a6defefc441f40e"} Jan 20 17:08:42 crc kubenswrapper[4558]: I0120 17:08:42.157206 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-77c1-account-create-update-9lp88" event={"ID":"60dcbf76-e430-47bc-afe1-832961122cc9","Type":"ContainerStarted","Data":"98d1a0894abf4164255f76393ecf87ffdfd7c24dbb91954f5ae5d0b3c97d7608"} Jan 20 17:08:42 crc kubenswrapper[4558]: I0120 17:08:42.159464 4558 generic.go:334] "Generic (PLEG): container finished" podID="f44e4f71-b12b-41da-bb06-0495080715ea" containerID="39a1bec43e337cd8f6f2ae1d32fb36f33f86238fbb2652fd1c16b14026a92e2f" exitCode=0 Jan 20 17:08:42 crc kubenswrapper[4558]: I0120 17:08:42.159510 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-b308-account-create-update-mhczn" event={"ID":"f44e4f71-b12b-41da-bb06-0495080715ea","Type":"ContainerDied","Data":"39a1bec43e337cd8f6f2ae1d32fb36f33f86238fbb2652fd1c16b14026a92e2f"} Jan 20 17:08:42 crc kubenswrapper[4558]: I0120 17:08:42.159526 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-b308-account-create-update-mhczn" event={"ID":"f44e4f71-b12b-41da-bb06-0495080715ea","Type":"ContainerStarted","Data":"af893b1f622959e0479e8a49c85d50619d11ca2ca12a41ddb714508bbde034d0"} Jan 20 17:08:42 crc kubenswrapper[4558]: I0120 17:08:42.161731 4558 generic.go:334] "Generic (PLEG): container finished" podID="ce2696d5-1fd9-496a-a751-68912b14248c" containerID="5f7af9fba2acb372e7c8a94d678b4fd70c8bec57360ab8d0714e1148d95ff99d" exitCode=0 Jan 20 17:08:42 crc kubenswrapper[4558]: I0120 17:08:42.161920 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="897179bd-8f95-47e8-b874-e1f7c2db06c7" containerName="ceilometer-central-agent" containerID="cri-o://86e5485cef63bc63ca0bd588b40d483bcc1d6c20fafa116fbbac46ac2602e0cb" gracePeriod=30 Jan 20 17:08:42 crc kubenswrapper[4558]: I0120 17:08:42.162195 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-db-create-hs5v4" event={"ID":"ce2696d5-1fd9-496a-a751-68912b14248c","Type":"ContainerDied","Data":"5f7af9fba2acb372e7c8a94d678b4fd70c8bec57360ab8d0714e1148d95ff99d"} Jan 20 17:08:42 crc kubenswrapper[4558]: I0120 17:08:42.166593 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="897179bd-8f95-47e8-b874-e1f7c2db06c7" containerName="proxy-httpd" containerID="cri-o://8db2ebc45ac71ce4fa8e3f4838455b8765bb2111869eaa2656b137d300bf7aa1" gracePeriod=30 Jan 20 17:08:42 crc kubenswrapper[4558]: I0120 17:08:42.166661 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="897179bd-8f95-47e8-b874-e1f7c2db06c7" containerName="sg-core" containerID="cri-o://da5479b2bc6f69158381f6b4f545f431790e8d4a301ad50da1d5f808b6102bf5" gracePeriod=30 Jan 20 17:08:42 crc kubenswrapper[4558]: I0120 17:08:42.166703 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="897179bd-8f95-47e8-b874-e1f7c2db06c7" containerName="ceilometer-notification-agent" containerID="cri-o://5839aa5e498cee14d343ff26b5edddf5cf98b1bb4356f594d515cd7aca5d1741" gracePeriod=30 Jan 20 17:08:42 crc kubenswrapper[4558]: I0120 17:08:42.796048 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:08:42 crc kubenswrapper[4558]: I0120 17:08:42.954356 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/897179bd-8f95-47e8-b874-e1f7c2db06c7-sg-core-conf-yaml\") pod \"897179bd-8f95-47e8-b874-e1f7c2db06c7\" (UID: \"897179bd-8f95-47e8-b874-e1f7c2db06c7\") " Jan 20 17:08:42 crc kubenswrapper[4558]: I0120 17:08:42.954449 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/897179bd-8f95-47e8-b874-e1f7c2db06c7-config-data\") pod \"897179bd-8f95-47e8-b874-e1f7c2db06c7\" (UID: \"897179bd-8f95-47e8-b874-e1f7c2db06c7\") " Jan 20 17:08:42 crc kubenswrapper[4558]: I0120 17:08:42.954537 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/897179bd-8f95-47e8-b874-e1f7c2db06c7-combined-ca-bundle\") pod \"897179bd-8f95-47e8-b874-e1f7c2db06c7\" (UID: \"897179bd-8f95-47e8-b874-e1f7c2db06c7\") " Jan 20 17:08:42 crc kubenswrapper[4558]: I0120 17:08:42.954573 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/897179bd-8f95-47e8-b874-e1f7c2db06c7-run-httpd\") pod \"897179bd-8f95-47e8-b874-e1f7c2db06c7\" (UID: \"897179bd-8f95-47e8-b874-e1f7c2db06c7\") " Jan 20 17:08:42 crc kubenswrapper[4558]: I0120 17:08:42.954602 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/897179bd-8f95-47e8-b874-e1f7c2db06c7-log-httpd\") pod \"897179bd-8f95-47e8-b874-e1f7c2db06c7\" (UID: \"897179bd-8f95-47e8-b874-e1f7c2db06c7\") " Jan 20 17:08:42 crc kubenswrapper[4558]: I0120 17:08:42.954627 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/897179bd-8f95-47e8-b874-e1f7c2db06c7-scripts\") pod \"897179bd-8f95-47e8-b874-e1f7c2db06c7\" (UID: \"897179bd-8f95-47e8-b874-e1f7c2db06c7\") " Jan 20 17:08:42 crc kubenswrapper[4558]: I0120 17:08:42.954856 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r77db\" (UniqueName: \"kubernetes.io/projected/897179bd-8f95-47e8-b874-e1f7c2db06c7-kube-api-access-r77db\") pod \"897179bd-8f95-47e8-b874-e1f7c2db06c7\" (UID: \"897179bd-8f95-47e8-b874-e1f7c2db06c7\") " Jan 20 17:08:42 crc kubenswrapper[4558]: I0120 17:08:42.958228 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/897179bd-8f95-47e8-b874-e1f7c2db06c7-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "897179bd-8f95-47e8-b874-e1f7c2db06c7" (UID: "897179bd-8f95-47e8-b874-e1f7c2db06c7"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:08:42 crc kubenswrapper[4558]: I0120 17:08:42.960606 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/897179bd-8f95-47e8-b874-e1f7c2db06c7-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "897179bd-8f95-47e8-b874-e1f7c2db06c7" (UID: "897179bd-8f95-47e8-b874-e1f7c2db06c7"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:08:42 crc kubenswrapper[4558]: I0120 17:08:42.978360 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/897179bd-8f95-47e8-b874-e1f7c2db06c7-kube-api-access-r77db" (OuterVolumeSpecName: "kube-api-access-r77db") pod "897179bd-8f95-47e8-b874-e1f7c2db06c7" (UID: "897179bd-8f95-47e8-b874-e1f7c2db06c7"). InnerVolumeSpecName "kube-api-access-r77db". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:08:42 crc kubenswrapper[4558]: I0120 17:08:42.978568 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/897179bd-8f95-47e8-b874-e1f7c2db06c7-scripts" (OuterVolumeSpecName: "scripts") pod "897179bd-8f95-47e8-b874-e1f7c2db06c7" (UID: "897179bd-8f95-47e8-b874-e1f7c2db06c7"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:08:42 crc kubenswrapper[4558]: I0120 17:08:42.988614 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/897179bd-8f95-47e8-b874-e1f7c2db06c7-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "897179bd-8f95-47e8-b874-e1f7c2db06c7" (UID: "897179bd-8f95-47e8-b874-e1f7c2db06c7"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.024903 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/897179bd-8f95-47e8-b874-e1f7c2db06c7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "897179bd-8f95-47e8-b874-e1f7c2db06c7" (UID: "897179bd-8f95-47e8-b874-e1f7c2db06c7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.033230 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/897179bd-8f95-47e8-b874-e1f7c2db06c7-config-data" (OuterVolumeSpecName: "config-data") pod "897179bd-8f95-47e8-b874-e1f7c2db06c7" (UID: "897179bd-8f95-47e8-b874-e1f7c2db06c7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.057824 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r77db\" (UniqueName: \"kubernetes.io/projected/897179bd-8f95-47e8-b874-e1f7c2db06c7-kube-api-access-r77db\") on node \"crc\" DevicePath \"\"" Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.057851 4558 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/897179bd-8f95-47e8-b874-e1f7c2db06c7-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.057863 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/897179bd-8f95-47e8-b874-e1f7c2db06c7-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.057876 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/897179bd-8f95-47e8-b874-e1f7c2db06c7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.057888 4558 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/897179bd-8f95-47e8-b874-e1f7c2db06c7-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.057898 4558 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/897179bd-8f95-47e8-b874-e1f7c2db06c7-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.057907 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/897179bd-8f95-47e8-b874-e1f7c2db06c7-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.170886 4558 generic.go:334] "Generic (PLEG): container finished" podID="897179bd-8f95-47e8-b874-e1f7c2db06c7" containerID="8db2ebc45ac71ce4fa8e3f4838455b8765bb2111869eaa2656b137d300bf7aa1" exitCode=0 Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.170917 4558 generic.go:334] "Generic (PLEG): container finished" podID="897179bd-8f95-47e8-b874-e1f7c2db06c7" containerID="da5479b2bc6f69158381f6b4f545f431790e8d4a301ad50da1d5f808b6102bf5" exitCode=2 Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.170924 4558 generic.go:334] "Generic (PLEG): container finished" podID="897179bd-8f95-47e8-b874-e1f7c2db06c7" containerID="5839aa5e498cee14d343ff26b5edddf5cf98b1bb4356f594d515cd7aca5d1741" exitCode=0 Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.170931 4558 generic.go:334] "Generic (PLEG): container finished" podID="897179bd-8f95-47e8-b874-e1f7c2db06c7" containerID="86e5485cef63bc63ca0bd588b40d483bcc1d6c20fafa116fbbac46ac2602e0cb" exitCode=0 Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.171112 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.176102 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"897179bd-8f95-47e8-b874-e1f7c2db06c7","Type":"ContainerDied","Data":"8db2ebc45ac71ce4fa8e3f4838455b8765bb2111869eaa2656b137d300bf7aa1"} Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.176203 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"897179bd-8f95-47e8-b874-e1f7c2db06c7","Type":"ContainerDied","Data":"da5479b2bc6f69158381f6b4f545f431790e8d4a301ad50da1d5f808b6102bf5"} Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.176222 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"897179bd-8f95-47e8-b874-e1f7c2db06c7","Type":"ContainerDied","Data":"5839aa5e498cee14d343ff26b5edddf5cf98b1bb4356f594d515cd7aca5d1741"} Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.176232 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"897179bd-8f95-47e8-b874-e1f7c2db06c7","Type":"ContainerDied","Data":"86e5485cef63bc63ca0bd588b40d483bcc1d6c20fafa116fbbac46ac2602e0cb"} Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.176242 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"897179bd-8f95-47e8-b874-e1f7c2db06c7","Type":"ContainerDied","Data":"61c6c2095ae0eee7f059d64cc950e39c1871a29dd6e94931edcffddd0968781c"} Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.176260 4558 scope.go:117] "RemoveContainer" containerID="8db2ebc45ac71ce4fa8e3f4838455b8765bb2111869eaa2656b137d300bf7aa1" Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.200647 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.210297 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.225685 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:08:43 crc kubenswrapper[4558]: E0120 17:08:43.226712 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="897179bd-8f95-47e8-b874-e1f7c2db06c7" containerName="ceilometer-central-agent" Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.226792 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="897179bd-8f95-47e8-b874-e1f7c2db06c7" containerName="ceilometer-central-agent" Jan 20 17:08:43 crc kubenswrapper[4558]: E0120 17:08:43.226858 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="897179bd-8f95-47e8-b874-e1f7c2db06c7" containerName="sg-core" Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.226908 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="897179bd-8f95-47e8-b874-e1f7c2db06c7" containerName="sg-core" Jan 20 17:08:43 crc kubenswrapper[4558]: E0120 17:08:43.226968 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="897179bd-8f95-47e8-b874-e1f7c2db06c7" containerName="ceilometer-notification-agent" Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.227015 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="897179bd-8f95-47e8-b874-e1f7c2db06c7" containerName="ceilometer-notification-agent" Jan 20 17:08:43 crc kubenswrapper[4558]: E0120 17:08:43.227077 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="897179bd-8f95-47e8-b874-e1f7c2db06c7" containerName="proxy-httpd" Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.227123 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="897179bd-8f95-47e8-b874-e1f7c2db06c7" containerName="proxy-httpd" Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.227413 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="897179bd-8f95-47e8-b874-e1f7c2db06c7" containerName="ceilometer-central-agent" Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.227484 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="897179bd-8f95-47e8-b874-e1f7c2db06c7" containerName="proxy-httpd" Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.227532 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="897179bd-8f95-47e8-b874-e1f7c2db06c7" containerName="ceilometer-notification-agent" Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.227583 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="897179bd-8f95-47e8-b874-e1f7c2db06c7" containerName="sg-core" Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.229132 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.234935 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-config-data" Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.235147 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-scripts" Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.256454 4558 scope.go:117] "RemoveContainer" containerID="da5479b2bc6f69158381f6b4f545f431790e8d4a301ad50da1d5f808b6102bf5" Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.272217 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.334017 4558 scope.go:117] "RemoveContainer" containerID="5839aa5e498cee14d343ff26b5edddf5cf98b1bb4356f594d515cd7aca5d1741" Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.361623 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:08:43 crc kubenswrapper[4558]: E0120 17:08:43.362737 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[combined-ca-bundle config-data kube-api-access-8tqck log-httpd run-httpd scripts sg-core-conf-yaml], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack-kuttl-tests/ceilometer-0" podUID="09c4077a-74b1-4da7-9376-51ca3f968c68" Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.363858 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/09c4077a-74b1-4da7-9376-51ca3f968c68-run-httpd\") pod \"ceilometer-0\" (UID: \"09c4077a-74b1-4da7-9376-51ca3f968c68\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.363952 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/09c4077a-74b1-4da7-9376-51ca3f968c68-log-httpd\") pod \"ceilometer-0\" (UID: \"09c4077a-74b1-4da7-9376-51ca3f968c68\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.364026 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/09c4077a-74b1-4da7-9376-51ca3f968c68-scripts\") pod \"ceilometer-0\" (UID: \"09c4077a-74b1-4da7-9376-51ca3f968c68\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.364096 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/09c4077a-74b1-4da7-9376-51ca3f968c68-config-data\") pod \"ceilometer-0\" (UID: \"09c4077a-74b1-4da7-9376-51ca3f968c68\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.364208 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/09c4077a-74b1-4da7-9376-51ca3f968c68-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"09c4077a-74b1-4da7-9376-51ca3f968c68\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.364305 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8tqck\" (UniqueName: \"kubernetes.io/projected/09c4077a-74b1-4da7-9376-51ca3f968c68-kube-api-access-8tqck\") pod \"ceilometer-0\" (UID: \"09c4077a-74b1-4da7-9376-51ca3f968c68\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.364391 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/09c4077a-74b1-4da7-9376-51ca3f968c68-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"09c4077a-74b1-4da7-9376-51ca3f968c68\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.364927 4558 scope.go:117] "RemoveContainer" containerID="86e5485cef63bc63ca0bd588b40d483bcc1d6c20fafa116fbbac46ac2602e0cb" Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.373035 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/swift-proxy-69ddc9f468-cx7lb" Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.373582 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/swift-proxy-69ddc9f468-cx7lb" Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.397277 4558 scope.go:117] "RemoveContainer" containerID="8db2ebc45ac71ce4fa8e3f4838455b8765bb2111869eaa2656b137d300bf7aa1" Jan 20 17:08:43 crc kubenswrapper[4558]: E0120 17:08:43.398281 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8db2ebc45ac71ce4fa8e3f4838455b8765bb2111869eaa2656b137d300bf7aa1\": container with ID starting with 8db2ebc45ac71ce4fa8e3f4838455b8765bb2111869eaa2656b137d300bf7aa1 not found: ID does not exist" containerID="8db2ebc45ac71ce4fa8e3f4838455b8765bb2111869eaa2656b137d300bf7aa1" Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.398396 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8db2ebc45ac71ce4fa8e3f4838455b8765bb2111869eaa2656b137d300bf7aa1"} err="failed to get container status \"8db2ebc45ac71ce4fa8e3f4838455b8765bb2111869eaa2656b137d300bf7aa1\": rpc error: code = NotFound desc = could not find container \"8db2ebc45ac71ce4fa8e3f4838455b8765bb2111869eaa2656b137d300bf7aa1\": container with ID starting with 8db2ebc45ac71ce4fa8e3f4838455b8765bb2111869eaa2656b137d300bf7aa1 not found: ID does not exist" Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.398492 4558 scope.go:117] "RemoveContainer" containerID="da5479b2bc6f69158381f6b4f545f431790e8d4a301ad50da1d5f808b6102bf5" Jan 20 17:08:43 crc kubenswrapper[4558]: E0120 17:08:43.400077 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"da5479b2bc6f69158381f6b4f545f431790e8d4a301ad50da1d5f808b6102bf5\": container with ID starting with da5479b2bc6f69158381f6b4f545f431790e8d4a301ad50da1d5f808b6102bf5 not found: ID does not exist" containerID="da5479b2bc6f69158381f6b4f545f431790e8d4a301ad50da1d5f808b6102bf5" Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.400245 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"da5479b2bc6f69158381f6b4f545f431790e8d4a301ad50da1d5f808b6102bf5"} err="failed to get container status \"da5479b2bc6f69158381f6b4f545f431790e8d4a301ad50da1d5f808b6102bf5\": rpc error: code = NotFound desc = could not find container \"da5479b2bc6f69158381f6b4f545f431790e8d4a301ad50da1d5f808b6102bf5\": container with ID starting with da5479b2bc6f69158381f6b4f545f431790e8d4a301ad50da1d5f808b6102bf5 not found: ID does not exist" Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.400370 4558 scope.go:117] "RemoveContainer" containerID="5839aa5e498cee14d343ff26b5edddf5cf98b1bb4356f594d515cd7aca5d1741" Jan 20 17:08:43 crc kubenswrapper[4558]: E0120 17:08:43.400999 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5839aa5e498cee14d343ff26b5edddf5cf98b1bb4356f594d515cd7aca5d1741\": container with ID starting with 5839aa5e498cee14d343ff26b5edddf5cf98b1bb4356f594d515cd7aca5d1741 not found: ID does not exist" containerID="5839aa5e498cee14d343ff26b5edddf5cf98b1bb4356f594d515cd7aca5d1741" Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.401041 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5839aa5e498cee14d343ff26b5edddf5cf98b1bb4356f594d515cd7aca5d1741"} err="failed to get container status \"5839aa5e498cee14d343ff26b5edddf5cf98b1bb4356f594d515cd7aca5d1741\": rpc error: code = NotFound desc = could not find container \"5839aa5e498cee14d343ff26b5edddf5cf98b1bb4356f594d515cd7aca5d1741\": container with ID starting with 5839aa5e498cee14d343ff26b5edddf5cf98b1bb4356f594d515cd7aca5d1741 not found: ID does not exist" Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.401070 4558 scope.go:117] "RemoveContainer" containerID="86e5485cef63bc63ca0bd588b40d483bcc1d6c20fafa116fbbac46ac2602e0cb" Jan 20 17:08:43 crc kubenswrapper[4558]: E0120 17:08:43.401429 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"86e5485cef63bc63ca0bd588b40d483bcc1d6c20fafa116fbbac46ac2602e0cb\": container with ID starting with 86e5485cef63bc63ca0bd588b40d483bcc1d6c20fafa116fbbac46ac2602e0cb not found: ID does not exist" containerID="86e5485cef63bc63ca0bd588b40d483bcc1d6c20fafa116fbbac46ac2602e0cb" Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.401535 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"86e5485cef63bc63ca0bd588b40d483bcc1d6c20fafa116fbbac46ac2602e0cb"} err="failed to get container status \"86e5485cef63bc63ca0bd588b40d483bcc1d6c20fafa116fbbac46ac2602e0cb\": rpc error: code = NotFound desc = could not find container \"86e5485cef63bc63ca0bd588b40d483bcc1d6c20fafa116fbbac46ac2602e0cb\": container with ID starting with 86e5485cef63bc63ca0bd588b40d483bcc1d6c20fafa116fbbac46ac2602e0cb not found: ID does not exist" Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.401632 4558 scope.go:117] "RemoveContainer" containerID="8db2ebc45ac71ce4fa8e3f4838455b8765bb2111869eaa2656b137d300bf7aa1" Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.402010 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8db2ebc45ac71ce4fa8e3f4838455b8765bb2111869eaa2656b137d300bf7aa1"} err="failed to get container status \"8db2ebc45ac71ce4fa8e3f4838455b8765bb2111869eaa2656b137d300bf7aa1\": rpc error: code = NotFound desc = could not find container \"8db2ebc45ac71ce4fa8e3f4838455b8765bb2111869eaa2656b137d300bf7aa1\": container with ID starting with 8db2ebc45ac71ce4fa8e3f4838455b8765bb2111869eaa2656b137d300bf7aa1 not found: ID does not exist" Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.402036 4558 scope.go:117] "RemoveContainer" containerID="da5479b2bc6f69158381f6b4f545f431790e8d4a301ad50da1d5f808b6102bf5" Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.402487 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"da5479b2bc6f69158381f6b4f545f431790e8d4a301ad50da1d5f808b6102bf5"} err="failed to get container status \"da5479b2bc6f69158381f6b4f545f431790e8d4a301ad50da1d5f808b6102bf5\": rpc error: code = NotFound desc = could not find container \"da5479b2bc6f69158381f6b4f545f431790e8d4a301ad50da1d5f808b6102bf5\": container with ID starting with da5479b2bc6f69158381f6b4f545f431790e8d4a301ad50da1d5f808b6102bf5 not found: ID does not exist" Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.402519 4558 scope.go:117] "RemoveContainer" containerID="5839aa5e498cee14d343ff26b5edddf5cf98b1bb4356f594d515cd7aca5d1741" Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.402873 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5839aa5e498cee14d343ff26b5edddf5cf98b1bb4356f594d515cd7aca5d1741"} err="failed to get container status \"5839aa5e498cee14d343ff26b5edddf5cf98b1bb4356f594d515cd7aca5d1741\": rpc error: code = NotFound desc = could not find container \"5839aa5e498cee14d343ff26b5edddf5cf98b1bb4356f594d515cd7aca5d1741\": container with ID starting with 5839aa5e498cee14d343ff26b5edddf5cf98b1bb4356f594d515cd7aca5d1741 not found: ID does not exist" Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.402893 4558 scope.go:117] "RemoveContainer" containerID="86e5485cef63bc63ca0bd588b40d483bcc1d6c20fafa116fbbac46ac2602e0cb" Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.403587 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"86e5485cef63bc63ca0bd588b40d483bcc1d6c20fafa116fbbac46ac2602e0cb"} err="failed to get container status \"86e5485cef63bc63ca0bd588b40d483bcc1d6c20fafa116fbbac46ac2602e0cb\": rpc error: code = NotFound desc = could not find container \"86e5485cef63bc63ca0bd588b40d483bcc1d6c20fafa116fbbac46ac2602e0cb\": container with ID starting with 86e5485cef63bc63ca0bd588b40d483bcc1d6c20fafa116fbbac46ac2602e0cb not found: ID does not exist" Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.403684 4558 scope.go:117] "RemoveContainer" containerID="8db2ebc45ac71ce4fa8e3f4838455b8765bb2111869eaa2656b137d300bf7aa1" Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.404065 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8db2ebc45ac71ce4fa8e3f4838455b8765bb2111869eaa2656b137d300bf7aa1"} err="failed to get container status \"8db2ebc45ac71ce4fa8e3f4838455b8765bb2111869eaa2656b137d300bf7aa1\": rpc error: code = NotFound desc = could not find container \"8db2ebc45ac71ce4fa8e3f4838455b8765bb2111869eaa2656b137d300bf7aa1\": container with ID starting with 8db2ebc45ac71ce4fa8e3f4838455b8765bb2111869eaa2656b137d300bf7aa1 not found: ID does not exist" Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.404181 4558 scope.go:117] "RemoveContainer" containerID="da5479b2bc6f69158381f6b4f545f431790e8d4a301ad50da1d5f808b6102bf5" Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.405793 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"da5479b2bc6f69158381f6b4f545f431790e8d4a301ad50da1d5f808b6102bf5"} err="failed to get container status \"da5479b2bc6f69158381f6b4f545f431790e8d4a301ad50da1d5f808b6102bf5\": rpc error: code = NotFound desc = could not find container \"da5479b2bc6f69158381f6b4f545f431790e8d4a301ad50da1d5f808b6102bf5\": container with ID starting with da5479b2bc6f69158381f6b4f545f431790e8d4a301ad50da1d5f808b6102bf5 not found: ID does not exist" Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.405820 4558 scope.go:117] "RemoveContainer" containerID="5839aa5e498cee14d343ff26b5edddf5cf98b1bb4356f594d515cd7aca5d1741" Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.407529 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5839aa5e498cee14d343ff26b5edddf5cf98b1bb4356f594d515cd7aca5d1741"} err="failed to get container status \"5839aa5e498cee14d343ff26b5edddf5cf98b1bb4356f594d515cd7aca5d1741\": rpc error: code = NotFound desc = could not find container \"5839aa5e498cee14d343ff26b5edddf5cf98b1bb4356f594d515cd7aca5d1741\": container with ID starting with 5839aa5e498cee14d343ff26b5edddf5cf98b1bb4356f594d515cd7aca5d1741 not found: ID does not exist" Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.407624 4558 scope.go:117] "RemoveContainer" containerID="86e5485cef63bc63ca0bd588b40d483bcc1d6c20fafa116fbbac46ac2602e0cb" Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.408064 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"86e5485cef63bc63ca0bd588b40d483bcc1d6c20fafa116fbbac46ac2602e0cb"} err="failed to get container status \"86e5485cef63bc63ca0bd588b40d483bcc1d6c20fafa116fbbac46ac2602e0cb\": rpc error: code = NotFound desc = could not find container \"86e5485cef63bc63ca0bd588b40d483bcc1d6c20fafa116fbbac46ac2602e0cb\": container with ID starting with 86e5485cef63bc63ca0bd588b40d483bcc1d6c20fafa116fbbac46ac2602e0cb not found: ID does not exist" Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.408208 4558 scope.go:117] "RemoveContainer" containerID="8db2ebc45ac71ce4fa8e3f4838455b8765bb2111869eaa2656b137d300bf7aa1" Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.408544 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8db2ebc45ac71ce4fa8e3f4838455b8765bb2111869eaa2656b137d300bf7aa1"} err="failed to get container status \"8db2ebc45ac71ce4fa8e3f4838455b8765bb2111869eaa2656b137d300bf7aa1\": rpc error: code = NotFound desc = could not find container \"8db2ebc45ac71ce4fa8e3f4838455b8765bb2111869eaa2656b137d300bf7aa1\": container with ID starting with 8db2ebc45ac71ce4fa8e3f4838455b8765bb2111869eaa2656b137d300bf7aa1 not found: ID does not exist" Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.408707 4558 scope.go:117] "RemoveContainer" containerID="da5479b2bc6f69158381f6b4f545f431790e8d4a301ad50da1d5f808b6102bf5" Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.408958 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"da5479b2bc6f69158381f6b4f545f431790e8d4a301ad50da1d5f808b6102bf5"} err="failed to get container status \"da5479b2bc6f69158381f6b4f545f431790e8d4a301ad50da1d5f808b6102bf5\": rpc error: code = NotFound desc = could not find container \"da5479b2bc6f69158381f6b4f545f431790e8d4a301ad50da1d5f808b6102bf5\": container with ID starting with da5479b2bc6f69158381f6b4f545f431790e8d4a301ad50da1d5f808b6102bf5 not found: ID does not exist" Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.409046 4558 scope.go:117] "RemoveContainer" containerID="5839aa5e498cee14d343ff26b5edddf5cf98b1bb4356f594d515cd7aca5d1741" Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.409319 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5839aa5e498cee14d343ff26b5edddf5cf98b1bb4356f594d515cd7aca5d1741"} err="failed to get container status \"5839aa5e498cee14d343ff26b5edddf5cf98b1bb4356f594d515cd7aca5d1741\": rpc error: code = NotFound desc = could not find container \"5839aa5e498cee14d343ff26b5edddf5cf98b1bb4356f594d515cd7aca5d1741\": container with ID starting with 5839aa5e498cee14d343ff26b5edddf5cf98b1bb4356f594d515cd7aca5d1741 not found: ID does not exist" Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.409469 4558 scope.go:117] "RemoveContainer" containerID="86e5485cef63bc63ca0bd588b40d483bcc1d6c20fafa116fbbac46ac2602e0cb" Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.409777 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"86e5485cef63bc63ca0bd588b40d483bcc1d6c20fafa116fbbac46ac2602e0cb"} err="failed to get container status \"86e5485cef63bc63ca0bd588b40d483bcc1d6c20fafa116fbbac46ac2602e0cb\": rpc error: code = NotFound desc = could not find container \"86e5485cef63bc63ca0bd588b40d483bcc1d6c20fafa116fbbac46ac2602e0cb\": container with ID starting with 86e5485cef63bc63ca0bd588b40d483bcc1d6c20fafa116fbbac46ac2602e0cb not found: ID does not exist" Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.467078 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/09c4077a-74b1-4da7-9376-51ca3f968c68-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"09c4077a-74b1-4da7-9376-51ca3f968c68\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.467225 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8tqck\" (UniqueName: \"kubernetes.io/projected/09c4077a-74b1-4da7-9376-51ca3f968c68-kube-api-access-8tqck\") pod \"ceilometer-0\" (UID: \"09c4077a-74b1-4da7-9376-51ca3f968c68\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.467292 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/09c4077a-74b1-4da7-9376-51ca3f968c68-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"09c4077a-74b1-4da7-9376-51ca3f968c68\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.467436 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/09c4077a-74b1-4da7-9376-51ca3f968c68-run-httpd\") pod \"ceilometer-0\" (UID: \"09c4077a-74b1-4da7-9376-51ca3f968c68\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.467482 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/09c4077a-74b1-4da7-9376-51ca3f968c68-log-httpd\") pod \"ceilometer-0\" (UID: \"09c4077a-74b1-4da7-9376-51ca3f968c68\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.467514 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/09c4077a-74b1-4da7-9376-51ca3f968c68-scripts\") pod \"ceilometer-0\" (UID: \"09c4077a-74b1-4da7-9376-51ca3f968c68\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.467544 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/09c4077a-74b1-4da7-9376-51ca3f968c68-config-data\") pod \"ceilometer-0\" (UID: \"09c4077a-74b1-4da7-9376-51ca3f968c68\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.471392 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/09c4077a-74b1-4da7-9376-51ca3f968c68-run-httpd\") pod \"ceilometer-0\" (UID: \"09c4077a-74b1-4da7-9376-51ca3f968c68\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.471595 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/09c4077a-74b1-4da7-9376-51ca3f968c68-log-httpd\") pod \"ceilometer-0\" (UID: \"09c4077a-74b1-4da7-9376-51ca3f968c68\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.473427 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/09c4077a-74b1-4da7-9376-51ca3f968c68-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"09c4077a-74b1-4da7-9376-51ca3f968c68\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.476054 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/09c4077a-74b1-4da7-9376-51ca3f968c68-scripts\") pod \"ceilometer-0\" (UID: \"09c4077a-74b1-4da7-9376-51ca3f968c68\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.476532 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/09c4077a-74b1-4da7-9376-51ca3f968c68-config-data\") pod \"ceilometer-0\" (UID: \"09c4077a-74b1-4da7-9376-51ca3f968c68\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.482180 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/09c4077a-74b1-4da7-9376-51ca3f968c68-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"09c4077a-74b1-4da7-9376-51ca3f968c68\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.484015 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8tqck\" (UniqueName: \"kubernetes.io/projected/09c4077a-74b1-4da7-9376-51ca3f968c68-kube-api-access-8tqck\") pod \"ceilometer-0\" (UID: \"09c4077a-74b1-4da7-9376-51ca3f968c68\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.546885 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-db-create-k57tt" Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.671759 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b8906a01-1578-45d7-bfa0-4bbb4dfe6123-operator-scripts\") pod \"b8906a01-1578-45d7-bfa0-4bbb4dfe6123\" (UID: \"b8906a01-1578-45d7-bfa0-4bbb4dfe6123\") " Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.671923 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j9l5k\" (UniqueName: \"kubernetes.io/projected/b8906a01-1578-45d7-bfa0-4bbb4dfe6123-kube-api-access-j9l5k\") pod \"b8906a01-1578-45d7-bfa0-4bbb4dfe6123\" (UID: \"b8906a01-1578-45d7-bfa0-4bbb4dfe6123\") " Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.672414 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b8906a01-1578-45d7-bfa0-4bbb4dfe6123-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "b8906a01-1578-45d7-bfa0-4bbb4dfe6123" (UID: "b8906a01-1578-45d7-bfa0-4bbb4dfe6123"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.677098 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b8906a01-1578-45d7-bfa0-4bbb4dfe6123-kube-api-access-j9l5k" (OuterVolumeSpecName: "kube-api-access-j9l5k") pod "b8906a01-1578-45d7-bfa0-4bbb4dfe6123" (UID: "b8906a01-1578-45d7-bfa0-4bbb4dfe6123"). InnerVolumeSpecName "kube-api-access-j9l5k". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.691853 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-77c1-account-create-update-9lp88" Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.774409 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b8906a01-1578-45d7-bfa0-4bbb4dfe6123-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.774441 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j9l5k\" (UniqueName: \"kubernetes.io/projected/b8906a01-1578-45d7-bfa0-4bbb4dfe6123-kube-api-access-j9l5k\") on node \"crc\" DevicePath \"\"" Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.829621 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-db-create-hs5v4" Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.836235 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-b308-account-create-update-mhczn" Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.841261 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-db-create-gkhg5" Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.846001 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-0eec-account-create-update-j75x4" Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.877892 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/60dcbf76-e430-47bc-afe1-832961122cc9-operator-scripts\") pod \"60dcbf76-e430-47bc-afe1-832961122cc9\" (UID: \"60dcbf76-e430-47bc-afe1-832961122cc9\") " Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.877965 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9k2vq\" (UniqueName: \"kubernetes.io/projected/60dcbf76-e430-47bc-afe1-832961122cc9-kube-api-access-9k2vq\") pod \"60dcbf76-e430-47bc-afe1-832961122cc9\" (UID: \"60dcbf76-e430-47bc-afe1-832961122cc9\") " Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.879674 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/60dcbf76-e430-47bc-afe1-832961122cc9-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "60dcbf76-e430-47bc-afe1-832961122cc9" (UID: "60dcbf76-e430-47bc-afe1-832961122cc9"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.890067 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/60dcbf76-e430-47bc-afe1-832961122cc9-kube-api-access-9k2vq" (OuterVolumeSpecName: "kube-api-access-9k2vq") pod "60dcbf76-e430-47bc-afe1-832961122cc9" (UID: "60dcbf76-e430-47bc-afe1-832961122cc9"). InnerVolumeSpecName "kube-api-access-9k2vq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.980057 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/038dd72b-1a11-42d9-98bb-5f681a2fbc38-operator-scripts\") pod \"038dd72b-1a11-42d9-98bb-5f681a2fbc38\" (UID: \"038dd72b-1a11-42d9-98bb-5f681a2fbc38\") " Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.980513 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f44e4f71-b12b-41da-bb06-0495080715ea-operator-scripts\") pod \"f44e4f71-b12b-41da-bb06-0495080715ea\" (UID: \"f44e4f71-b12b-41da-bb06-0495080715ea\") " Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.980588 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rgpbv\" (UniqueName: \"kubernetes.io/projected/038dd72b-1a11-42d9-98bb-5f681a2fbc38-kube-api-access-rgpbv\") pod \"038dd72b-1a11-42d9-98bb-5f681a2fbc38\" (UID: \"038dd72b-1a11-42d9-98bb-5f681a2fbc38\") " Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.980687 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7eea1d5d-bf4b-4104-9c44-8dc4846f8c76-operator-scripts\") pod \"7eea1d5d-bf4b-4104-9c44-8dc4846f8c76\" (UID: \"7eea1d5d-bf4b-4104-9c44-8dc4846f8c76\") " Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.980728 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8gq7f\" (UniqueName: \"kubernetes.io/projected/ce2696d5-1fd9-496a-a751-68912b14248c-kube-api-access-8gq7f\") pod \"ce2696d5-1fd9-496a-a751-68912b14248c\" (UID: \"ce2696d5-1fd9-496a-a751-68912b14248c\") " Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.980790 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f44e4f71-b12b-41da-bb06-0495080715ea-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "f44e4f71-b12b-41da-bb06-0495080715ea" (UID: "f44e4f71-b12b-41da-bb06-0495080715ea"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.980808 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ndvv8\" (UniqueName: \"kubernetes.io/projected/f44e4f71-b12b-41da-bb06-0495080715ea-kube-api-access-ndvv8\") pod \"f44e4f71-b12b-41da-bb06-0495080715ea\" (UID: \"f44e4f71-b12b-41da-bb06-0495080715ea\") " Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.980935 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ce2696d5-1fd9-496a-a751-68912b14248c-operator-scripts\") pod \"ce2696d5-1fd9-496a-a751-68912b14248c\" (UID: \"ce2696d5-1fd9-496a-a751-68912b14248c\") " Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.981112 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kqld8\" (UniqueName: \"kubernetes.io/projected/7eea1d5d-bf4b-4104-9c44-8dc4846f8c76-kube-api-access-kqld8\") pod \"7eea1d5d-bf4b-4104-9c44-8dc4846f8c76\" (UID: \"7eea1d5d-bf4b-4104-9c44-8dc4846f8c76\") " Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.981922 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7eea1d5d-bf4b-4104-9c44-8dc4846f8c76-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "7eea1d5d-bf4b-4104-9c44-8dc4846f8c76" (UID: "7eea1d5d-bf4b-4104-9c44-8dc4846f8c76"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.982036 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ce2696d5-1fd9-496a-a751-68912b14248c-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "ce2696d5-1fd9-496a-a751-68912b14248c" (UID: "ce2696d5-1fd9-496a-a751-68912b14248c"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.982194 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/038dd72b-1a11-42d9-98bb-5f681a2fbc38-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "038dd72b-1a11-42d9-98bb-5f681a2fbc38" (UID: "038dd72b-1a11-42d9-98bb-5f681a2fbc38"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.982861 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ce2696d5-1fd9-496a-a751-68912b14248c-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.982944 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/038dd72b-1a11-42d9-98bb-5f681a2fbc38-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.983010 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f44e4f71-b12b-41da-bb06-0495080715ea-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.983064 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/60dcbf76-e430-47bc-afe1-832961122cc9-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.983113 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9k2vq\" (UniqueName: \"kubernetes.io/projected/60dcbf76-e430-47bc-afe1-832961122cc9-kube-api-access-9k2vq\") on node \"crc\" DevicePath \"\"" Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.983352 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7eea1d5d-bf4b-4104-9c44-8dc4846f8c76-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.985520 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/038dd72b-1a11-42d9-98bb-5f681a2fbc38-kube-api-access-rgpbv" (OuterVolumeSpecName: "kube-api-access-rgpbv") pod "038dd72b-1a11-42d9-98bb-5f681a2fbc38" (UID: "038dd72b-1a11-42d9-98bb-5f681a2fbc38"). InnerVolumeSpecName "kube-api-access-rgpbv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.985584 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f44e4f71-b12b-41da-bb06-0495080715ea-kube-api-access-ndvv8" (OuterVolumeSpecName: "kube-api-access-ndvv8") pod "f44e4f71-b12b-41da-bb06-0495080715ea" (UID: "f44e4f71-b12b-41da-bb06-0495080715ea"). InnerVolumeSpecName "kube-api-access-ndvv8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.985779 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ce2696d5-1fd9-496a-a751-68912b14248c-kube-api-access-8gq7f" (OuterVolumeSpecName: "kube-api-access-8gq7f") pod "ce2696d5-1fd9-496a-a751-68912b14248c" (UID: "ce2696d5-1fd9-496a-a751-68912b14248c"). InnerVolumeSpecName "kube-api-access-8gq7f". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:08:43 crc kubenswrapper[4558]: I0120 17:08:43.987345 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7eea1d5d-bf4b-4104-9c44-8dc4846f8c76-kube-api-access-kqld8" (OuterVolumeSpecName: "kube-api-access-kqld8") pod "7eea1d5d-bf4b-4104-9c44-8dc4846f8c76" (UID: "7eea1d5d-bf4b-4104-9c44-8dc4846f8c76"). InnerVolumeSpecName "kube-api-access-kqld8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:08:44 crc kubenswrapper[4558]: I0120 17:08:44.088256 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rgpbv\" (UniqueName: \"kubernetes.io/projected/038dd72b-1a11-42d9-98bb-5f681a2fbc38-kube-api-access-rgpbv\") on node \"crc\" DevicePath \"\"" Jan 20 17:08:44 crc kubenswrapper[4558]: I0120 17:08:44.088286 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8gq7f\" (UniqueName: \"kubernetes.io/projected/ce2696d5-1fd9-496a-a751-68912b14248c-kube-api-access-8gq7f\") on node \"crc\" DevicePath \"\"" Jan 20 17:08:44 crc kubenswrapper[4558]: I0120 17:08:44.088303 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ndvv8\" (UniqueName: \"kubernetes.io/projected/f44e4f71-b12b-41da-bb06-0495080715ea-kube-api-access-ndvv8\") on node \"crc\" DevicePath \"\"" Jan 20 17:08:44 crc kubenswrapper[4558]: I0120 17:08:44.088315 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kqld8\" (UniqueName: \"kubernetes.io/projected/7eea1d5d-bf4b-4104-9c44-8dc4846f8c76-kube-api-access-kqld8\") on node \"crc\" DevicePath \"\"" Jan 20 17:08:44 crc kubenswrapper[4558]: I0120 17:08:44.183983 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-db-create-hs5v4" event={"ID":"ce2696d5-1fd9-496a-a751-68912b14248c","Type":"ContainerDied","Data":"e69c819ba37bbaf9fc122a7891ff169ab8397b00152243ba2ac090b7f3dd7ba3"} Jan 20 17:08:44 crc kubenswrapper[4558]: I0120 17:08:44.184804 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e69c819ba37bbaf9fc122a7891ff169ab8397b00152243ba2ac090b7f3dd7ba3" Jan 20 17:08:44 crc kubenswrapper[4558]: I0120 17:08:44.184975 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-db-create-hs5v4" Jan 20 17:08:44 crc kubenswrapper[4558]: I0120 17:08:44.191059 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-db-create-k57tt" event={"ID":"b8906a01-1578-45d7-bfa0-4bbb4dfe6123","Type":"ContainerDied","Data":"118af64dcb204441177eca5cd02873a970d3f4f43cdce9262e39a0f8ae9152e1"} Jan 20 17:08:44 crc kubenswrapper[4558]: I0120 17:08:44.191120 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="118af64dcb204441177eca5cd02873a970d3f4f43cdce9262e39a0f8ae9152e1" Jan 20 17:08:44 crc kubenswrapper[4558]: I0120 17:08:44.191214 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-db-create-k57tt" Jan 20 17:08:44 crc kubenswrapper[4558]: I0120 17:08:44.196206 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-0eec-account-create-update-j75x4" Jan 20 17:08:44 crc kubenswrapper[4558]: I0120 17:08:44.196218 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-0eec-account-create-update-j75x4" event={"ID":"7eea1d5d-bf4b-4104-9c44-8dc4846f8c76","Type":"ContainerDied","Data":"e8df26a7d4ed4702d211c5491b97465a620ec8094049cbcd55f67c49fa108acb"} Jan 20 17:08:44 crc kubenswrapper[4558]: I0120 17:08:44.196429 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e8df26a7d4ed4702d211c5491b97465a620ec8094049cbcd55f67c49fa108acb" Jan 20 17:08:44 crc kubenswrapper[4558]: I0120 17:08:44.198205 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-db-create-gkhg5" event={"ID":"038dd72b-1a11-42d9-98bb-5f681a2fbc38","Type":"ContainerDied","Data":"f3a869ab66d396bac15dc13d86e81a39480c3b7f86be03c00c260b299b7638ef"} Jan 20 17:08:44 crc kubenswrapper[4558]: I0120 17:08:44.198264 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f3a869ab66d396bac15dc13d86e81a39480c3b7f86be03c00c260b299b7638ef" Jan 20 17:08:44 crc kubenswrapper[4558]: I0120 17:08:44.198343 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-db-create-gkhg5" Jan 20 17:08:44 crc kubenswrapper[4558]: I0120 17:08:44.203179 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-77c1-account-create-update-9lp88" event={"ID":"60dcbf76-e430-47bc-afe1-832961122cc9","Type":"ContainerDied","Data":"98d1a0894abf4164255f76393ecf87ffdfd7c24dbb91954f5ae5d0b3c97d7608"} Jan 20 17:08:44 crc kubenswrapper[4558]: I0120 17:08:44.203241 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="98d1a0894abf4164255f76393ecf87ffdfd7c24dbb91954f5ae5d0b3c97d7608" Jan 20 17:08:44 crc kubenswrapper[4558]: I0120 17:08:44.203324 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-77c1-account-create-update-9lp88" Jan 20 17:08:44 crc kubenswrapper[4558]: I0120 17:08:44.209663 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-b308-account-create-update-mhczn" event={"ID":"f44e4f71-b12b-41da-bb06-0495080715ea","Type":"ContainerDied","Data":"af893b1f622959e0479e8a49c85d50619d11ca2ca12a41ddb714508bbde034d0"} Jan 20 17:08:44 crc kubenswrapper[4558]: I0120 17:08:44.209710 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:08:44 crc kubenswrapper[4558]: I0120 17:08:44.209736 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="af893b1f622959e0479e8a49c85d50619d11ca2ca12a41ddb714508bbde034d0" Jan 20 17:08:44 crc kubenswrapper[4558]: I0120 17:08:44.209742 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-b308-account-create-update-mhczn" Jan 20 17:08:44 crc kubenswrapper[4558]: I0120 17:08:44.224044 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:08:44 crc kubenswrapper[4558]: I0120 17:08:44.391968 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/09c4077a-74b1-4da7-9376-51ca3f968c68-scripts\") pod \"09c4077a-74b1-4da7-9376-51ca3f968c68\" (UID: \"09c4077a-74b1-4da7-9376-51ca3f968c68\") " Jan 20 17:08:44 crc kubenswrapper[4558]: I0120 17:08:44.392022 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/09c4077a-74b1-4da7-9376-51ca3f968c68-sg-core-conf-yaml\") pod \"09c4077a-74b1-4da7-9376-51ca3f968c68\" (UID: \"09c4077a-74b1-4da7-9376-51ca3f968c68\") " Jan 20 17:08:44 crc kubenswrapper[4558]: I0120 17:08:44.392061 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/09c4077a-74b1-4da7-9376-51ca3f968c68-config-data\") pod \"09c4077a-74b1-4da7-9376-51ca3f968c68\" (UID: \"09c4077a-74b1-4da7-9376-51ca3f968c68\") " Jan 20 17:08:44 crc kubenswrapper[4558]: I0120 17:08:44.392088 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/09c4077a-74b1-4da7-9376-51ca3f968c68-combined-ca-bundle\") pod \"09c4077a-74b1-4da7-9376-51ca3f968c68\" (UID: \"09c4077a-74b1-4da7-9376-51ca3f968c68\") " Jan 20 17:08:44 crc kubenswrapper[4558]: I0120 17:08:44.392128 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/09c4077a-74b1-4da7-9376-51ca3f968c68-run-httpd\") pod \"09c4077a-74b1-4da7-9376-51ca3f968c68\" (UID: \"09c4077a-74b1-4da7-9376-51ca3f968c68\") " Jan 20 17:08:44 crc kubenswrapper[4558]: I0120 17:08:44.392153 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tqck\" (UniqueName: \"kubernetes.io/projected/09c4077a-74b1-4da7-9376-51ca3f968c68-kube-api-access-8tqck\") pod \"09c4077a-74b1-4da7-9376-51ca3f968c68\" (UID: \"09c4077a-74b1-4da7-9376-51ca3f968c68\") " Jan 20 17:08:44 crc kubenswrapper[4558]: I0120 17:08:44.392233 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/09c4077a-74b1-4da7-9376-51ca3f968c68-log-httpd\") pod \"09c4077a-74b1-4da7-9376-51ca3f968c68\" (UID: \"09c4077a-74b1-4da7-9376-51ca3f968c68\") " Jan 20 17:08:44 crc kubenswrapper[4558]: I0120 17:08:44.394013 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/09c4077a-74b1-4da7-9376-51ca3f968c68-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "09c4077a-74b1-4da7-9376-51ca3f968c68" (UID: "09c4077a-74b1-4da7-9376-51ca3f968c68"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:08:44 crc kubenswrapper[4558]: I0120 17:08:44.394918 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/09c4077a-74b1-4da7-9376-51ca3f968c68-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "09c4077a-74b1-4da7-9376-51ca3f968c68" (UID: "09c4077a-74b1-4da7-9376-51ca3f968c68"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:08:44 crc kubenswrapper[4558]: I0120 17:08:44.402539 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09c4077a-74b1-4da7-9376-51ca3f968c68-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "09c4077a-74b1-4da7-9376-51ca3f968c68" (UID: "09c4077a-74b1-4da7-9376-51ca3f968c68"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:08:44 crc kubenswrapper[4558]: I0120 17:08:44.402626 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09c4077a-74b1-4da7-9376-51ca3f968c68-config-data" (OuterVolumeSpecName: "config-data") pod "09c4077a-74b1-4da7-9376-51ca3f968c68" (UID: "09c4077a-74b1-4da7-9376-51ca3f968c68"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:08:44 crc kubenswrapper[4558]: I0120 17:08:44.402688 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09c4077a-74b1-4da7-9376-51ca3f968c68-scripts" (OuterVolumeSpecName: "scripts") pod "09c4077a-74b1-4da7-9376-51ca3f968c68" (UID: "09c4077a-74b1-4da7-9376-51ca3f968c68"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:08:44 crc kubenswrapper[4558]: I0120 17:08:44.406303 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09c4077a-74b1-4da7-9376-51ca3f968c68-kube-api-access-8tqck" (OuterVolumeSpecName: "kube-api-access-8tqck") pod "09c4077a-74b1-4da7-9376-51ca3f968c68" (UID: "09c4077a-74b1-4da7-9376-51ca3f968c68"). InnerVolumeSpecName "kube-api-access-8tqck". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:08:44 crc kubenswrapper[4558]: I0120 17:08:44.406767 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09c4077a-74b1-4da7-9376-51ca3f968c68-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "09c4077a-74b1-4da7-9376-51ca3f968c68" (UID: "09c4077a-74b1-4da7-9376-51ca3f968c68"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:08:44 crc kubenswrapper[4558]: I0120 17:08:44.494655 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/09c4077a-74b1-4da7-9376-51ca3f968c68-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:08:44 crc kubenswrapper[4558]: I0120 17:08:44.494689 4558 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/09c4077a-74b1-4da7-9376-51ca3f968c68-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 20 17:08:44 crc kubenswrapper[4558]: I0120 17:08:44.494700 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/09c4077a-74b1-4da7-9376-51ca3f968c68-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:08:44 crc kubenswrapper[4558]: I0120 17:08:44.494710 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/09c4077a-74b1-4da7-9376-51ca3f968c68-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:08:44 crc kubenswrapper[4558]: I0120 17:08:44.494722 4558 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/09c4077a-74b1-4da7-9376-51ca3f968c68-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:08:44 crc kubenswrapper[4558]: I0120 17:08:44.494731 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tqck\" (UniqueName: \"kubernetes.io/projected/09c4077a-74b1-4da7-9376-51ca3f968c68-kube-api-access-8tqck\") on node \"crc\" DevicePath \"\"" Jan 20 17:08:44 crc kubenswrapper[4558]: I0120 17:08:44.494740 4558 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/09c4077a-74b1-4da7-9376-51ca3f968c68-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:08:44 crc kubenswrapper[4558]: I0120 17:08:44.576814 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="897179bd-8f95-47e8-b874-e1f7c2db06c7" path="/var/lib/kubelet/pods/897179bd-8f95-47e8-b874-e1f7c2db06c7/volumes" Jan 20 17:08:45 crc kubenswrapper[4558]: I0120 17:08:45.217695 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:08:45 crc kubenswrapper[4558]: I0120 17:08:45.252312 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:08:45 crc kubenswrapper[4558]: I0120 17:08:45.264272 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:08:45 crc kubenswrapper[4558]: I0120 17:08:45.282912 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:08:45 crc kubenswrapper[4558]: E0120 17:08:45.283407 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="038dd72b-1a11-42d9-98bb-5f681a2fbc38" containerName="mariadb-database-create" Jan 20 17:08:45 crc kubenswrapper[4558]: I0120 17:08:45.283427 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="038dd72b-1a11-42d9-98bb-5f681a2fbc38" containerName="mariadb-database-create" Jan 20 17:08:45 crc kubenswrapper[4558]: E0120 17:08:45.283452 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ce2696d5-1fd9-496a-a751-68912b14248c" containerName="mariadb-database-create" Jan 20 17:08:45 crc kubenswrapper[4558]: I0120 17:08:45.283458 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ce2696d5-1fd9-496a-a751-68912b14248c" containerName="mariadb-database-create" Jan 20 17:08:45 crc kubenswrapper[4558]: E0120 17:08:45.283475 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b8906a01-1578-45d7-bfa0-4bbb4dfe6123" containerName="mariadb-database-create" Jan 20 17:08:45 crc kubenswrapper[4558]: I0120 17:08:45.283481 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="b8906a01-1578-45d7-bfa0-4bbb4dfe6123" containerName="mariadb-database-create" Jan 20 17:08:45 crc kubenswrapper[4558]: E0120 17:08:45.283489 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="60dcbf76-e430-47bc-afe1-832961122cc9" containerName="mariadb-account-create-update" Jan 20 17:08:45 crc kubenswrapper[4558]: I0120 17:08:45.283496 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="60dcbf76-e430-47bc-afe1-832961122cc9" containerName="mariadb-account-create-update" Jan 20 17:08:45 crc kubenswrapper[4558]: E0120 17:08:45.283513 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f44e4f71-b12b-41da-bb06-0495080715ea" containerName="mariadb-account-create-update" Jan 20 17:08:45 crc kubenswrapper[4558]: I0120 17:08:45.283521 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="f44e4f71-b12b-41da-bb06-0495080715ea" containerName="mariadb-account-create-update" Jan 20 17:08:45 crc kubenswrapper[4558]: E0120 17:08:45.283534 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7eea1d5d-bf4b-4104-9c44-8dc4846f8c76" containerName="mariadb-account-create-update" Jan 20 17:08:45 crc kubenswrapper[4558]: I0120 17:08:45.283540 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="7eea1d5d-bf4b-4104-9c44-8dc4846f8c76" containerName="mariadb-account-create-update" Jan 20 17:08:45 crc kubenswrapper[4558]: I0120 17:08:45.283697 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="7eea1d5d-bf4b-4104-9c44-8dc4846f8c76" containerName="mariadb-account-create-update" Jan 20 17:08:45 crc kubenswrapper[4558]: I0120 17:08:45.283707 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="b8906a01-1578-45d7-bfa0-4bbb4dfe6123" containerName="mariadb-database-create" Jan 20 17:08:45 crc kubenswrapper[4558]: I0120 17:08:45.283723 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="f44e4f71-b12b-41da-bb06-0495080715ea" containerName="mariadb-account-create-update" Jan 20 17:08:45 crc kubenswrapper[4558]: I0120 17:08:45.283732 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="038dd72b-1a11-42d9-98bb-5f681a2fbc38" containerName="mariadb-database-create" Jan 20 17:08:45 crc kubenswrapper[4558]: I0120 17:08:45.283741 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="60dcbf76-e430-47bc-afe1-832961122cc9" containerName="mariadb-account-create-update" Jan 20 17:08:45 crc kubenswrapper[4558]: I0120 17:08:45.283752 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="ce2696d5-1fd9-496a-a751-68912b14248c" containerName="mariadb-database-create" Jan 20 17:08:45 crc kubenswrapper[4558]: I0120 17:08:45.285350 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:08:45 crc kubenswrapper[4558]: I0120 17:08:45.294713 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-scripts" Jan 20 17:08:45 crc kubenswrapper[4558]: I0120 17:08:45.294926 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-config-data" Jan 20 17:08:45 crc kubenswrapper[4558]: I0120 17:08:45.305032 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:08:45 crc kubenswrapper[4558]: I0120 17:08:45.415954 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r6qc7\" (UniqueName: \"kubernetes.io/projected/14a845ef-e0c4-44aa-ba62-f8b688dfac3a-kube-api-access-r6qc7\") pod \"ceilometer-0\" (UID: \"14a845ef-e0c4-44aa-ba62-f8b688dfac3a\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:08:45 crc kubenswrapper[4558]: I0120 17:08:45.416237 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14a845ef-e0c4-44aa-ba62-f8b688dfac3a-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"14a845ef-e0c4-44aa-ba62-f8b688dfac3a\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:08:45 crc kubenswrapper[4558]: I0120 17:08:45.416276 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/14a845ef-e0c4-44aa-ba62-f8b688dfac3a-run-httpd\") pod \"ceilometer-0\" (UID: \"14a845ef-e0c4-44aa-ba62-f8b688dfac3a\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:08:45 crc kubenswrapper[4558]: I0120 17:08:45.416316 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/14a845ef-e0c4-44aa-ba62-f8b688dfac3a-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"14a845ef-e0c4-44aa-ba62-f8b688dfac3a\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:08:45 crc kubenswrapper[4558]: I0120 17:08:45.416364 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/14a845ef-e0c4-44aa-ba62-f8b688dfac3a-config-data\") pod \"ceilometer-0\" (UID: \"14a845ef-e0c4-44aa-ba62-f8b688dfac3a\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:08:45 crc kubenswrapper[4558]: I0120 17:08:45.416456 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/14a845ef-e0c4-44aa-ba62-f8b688dfac3a-scripts\") pod \"ceilometer-0\" (UID: \"14a845ef-e0c4-44aa-ba62-f8b688dfac3a\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:08:45 crc kubenswrapper[4558]: I0120 17:08:45.416698 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/14a845ef-e0c4-44aa-ba62-f8b688dfac3a-log-httpd\") pod \"ceilometer-0\" (UID: \"14a845ef-e0c4-44aa-ba62-f8b688dfac3a\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:08:45 crc kubenswrapper[4558]: I0120 17:08:45.521541 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/14a845ef-e0c4-44aa-ba62-f8b688dfac3a-config-data\") pod \"ceilometer-0\" (UID: \"14a845ef-e0c4-44aa-ba62-f8b688dfac3a\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:08:45 crc kubenswrapper[4558]: I0120 17:08:45.521744 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/14a845ef-e0c4-44aa-ba62-f8b688dfac3a-scripts\") pod \"ceilometer-0\" (UID: \"14a845ef-e0c4-44aa-ba62-f8b688dfac3a\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:08:45 crc kubenswrapper[4558]: I0120 17:08:45.521811 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/14a845ef-e0c4-44aa-ba62-f8b688dfac3a-log-httpd\") pod \"ceilometer-0\" (UID: \"14a845ef-e0c4-44aa-ba62-f8b688dfac3a\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:08:45 crc kubenswrapper[4558]: I0120 17:08:45.522062 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r6qc7\" (UniqueName: \"kubernetes.io/projected/14a845ef-e0c4-44aa-ba62-f8b688dfac3a-kube-api-access-r6qc7\") pod \"ceilometer-0\" (UID: \"14a845ef-e0c4-44aa-ba62-f8b688dfac3a\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:08:45 crc kubenswrapper[4558]: I0120 17:08:45.522281 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14a845ef-e0c4-44aa-ba62-f8b688dfac3a-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"14a845ef-e0c4-44aa-ba62-f8b688dfac3a\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:08:45 crc kubenswrapper[4558]: I0120 17:08:45.522312 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/14a845ef-e0c4-44aa-ba62-f8b688dfac3a-run-httpd\") pod \"ceilometer-0\" (UID: \"14a845ef-e0c4-44aa-ba62-f8b688dfac3a\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:08:45 crc kubenswrapper[4558]: I0120 17:08:45.522550 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/14a845ef-e0c4-44aa-ba62-f8b688dfac3a-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"14a845ef-e0c4-44aa-ba62-f8b688dfac3a\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:08:45 crc kubenswrapper[4558]: I0120 17:08:45.523956 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/14a845ef-e0c4-44aa-ba62-f8b688dfac3a-log-httpd\") pod \"ceilometer-0\" (UID: \"14a845ef-e0c4-44aa-ba62-f8b688dfac3a\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:08:45 crc kubenswrapper[4558]: I0120 17:08:45.524904 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/14a845ef-e0c4-44aa-ba62-f8b688dfac3a-run-httpd\") pod \"ceilometer-0\" (UID: \"14a845ef-e0c4-44aa-ba62-f8b688dfac3a\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:08:45 crc kubenswrapper[4558]: I0120 17:08:45.529932 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/14a845ef-e0c4-44aa-ba62-f8b688dfac3a-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"14a845ef-e0c4-44aa-ba62-f8b688dfac3a\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:08:45 crc kubenswrapper[4558]: I0120 17:08:45.535879 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14a845ef-e0c4-44aa-ba62-f8b688dfac3a-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"14a845ef-e0c4-44aa-ba62-f8b688dfac3a\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:08:45 crc kubenswrapper[4558]: I0120 17:08:45.536910 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/14a845ef-e0c4-44aa-ba62-f8b688dfac3a-config-data\") pod \"ceilometer-0\" (UID: \"14a845ef-e0c4-44aa-ba62-f8b688dfac3a\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:08:45 crc kubenswrapper[4558]: I0120 17:08:45.542041 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/14a845ef-e0c4-44aa-ba62-f8b688dfac3a-scripts\") pod \"ceilometer-0\" (UID: \"14a845ef-e0c4-44aa-ba62-f8b688dfac3a\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:08:45 crc kubenswrapper[4558]: I0120 17:08:45.545314 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r6qc7\" (UniqueName: \"kubernetes.io/projected/14a845ef-e0c4-44aa-ba62-f8b688dfac3a-kube-api-access-r6qc7\") pod \"ceilometer-0\" (UID: \"14a845ef-e0c4-44aa-ba62-f8b688dfac3a\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:08:45 crc kubenswrapper[4558]: I0120 17:08:45.605844 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:08:45 crc kubenswrapper[4558]: I0120 17:08:45.619226 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:08:45 crc kubenswrapper[4558]: I0120 17:08:45.619564 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-external-api-0" podUID="91e1a579-d120-4f5d-9117-f5e48646a8eb" containerName="glance-log" containerID="cri-o://f4af7a607838a829488c78ebe28735c0640218c8a4d67ceed1f74cc4aa21ff78" gracePeriod=30 Jan 20 17:08:45 crc kubenswrapper[4558]: I0120 17:08:45.619891 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-external-api-0" podUID="91e1a579-d120-4f5d-9117-f5e48646a8eb" containerName="glance-httpd" containerID="cri-o://db1a38fc6024093565d9088a46937eff818680f06d9ee97e07e63e11dde8ebfb" gracePeriod=30 Jan 20 17:08:45 crc kubenswrapper[4558]: I0120 17:08:45.776820 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:08:45 crc kubenswrapper[4558]: I0120 17:08:45.808230 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-db-sync-prb5g"] Jan 20 17:08:45 crc kubenswrapper[4558]: I0120 17:08:45.809607 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-prb5g" Jan 20 17:08:45 crc kubenswrapper[4558]: I0120 17:08:45.812285 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell0-conductor-config-data" Jan 20 17:08:45 crc kubenswrapper[4558]: I0120 17:08:45.812606 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell0-conductor-scripts" Jan 20 17:08:45 crc kubenswrapper[4558]: I0120 17:08:45.816807 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-nova-dockercfg-zrgwt" Jan 20 17:08:45 crc kubenswrapper[4558]: I0120 17:08:45.829266 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-db-sync-prb5g"] Jan 20 17:08:45 crc kubenswrapper[4558]: I0120 17:08:45.836887 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ecc6d472-eb61-4b6a-ba13-fd3bd4a897cc-config-data\") pod \"nova-cell0-conductor-db-sync-prb5g\" (UID: \"ecc6d472-eb61-4b6a-ba13-fd3bd4a897cc\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-prb5g" Jan 20 17:08:45 crc kubenswrapper[4558]: I0120 17:08:45.836929 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ecc6d472-eb61-4b6a-ba13-fd3bd4a897cc-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-prb5g\" (UID: \"ecc6d472-eb61-4b6a-ba13-fd3bd4a897cc\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-prb5g" Jan 20 17:08:45 crc kubenswrapper[4558]: I0120 17:08:45.836968 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bb9kn\" (UniqueName: \"kubernetes.io/projected/ecc6d472-eb61-4b6a-ba13-fd3bd4a897cc-kube-api-access-bb9kn\") pod \"nova-cell0-conductor-db-sync-prb5g\" (UID: \"ecc6d472-eb61-4b6a-ba13-fd3bd4a897cc\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-prb5g" Jan 20 17:08:45 crc kubenswrapper[4558]: I0120 17:08:45.837040 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ecc6d472-eb61-4b6a-ba13-fd3bd4a897cc-scripts\") pod \"nova-cell0-conductor-db-sync-prb5g\" (UID: \"ecc6d472-eb61-4b6a-ba13-fd3bd4a897cc\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-prb5g" Jan 20 17:08:45 crc kubenswrapper[4558]: I0120 17:08:45.938286 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ecc6d472-eb61-4b6a-ba13-fd3bd4a897cc-scripts\") pod \"nova-cell0-conductor-db-sync-prb5g\" (UID: \"ecc6d472-eb61-4b6a-ba13-fd3bd4a897cc\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-prb5g" Jan 20 17:08:45 crc kubenswrapper[4558]: I0120 17:08:45.938386 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ecc6d472-eb61-4b6a-ba13-fd3bd4a897cc-config-data\") pod \"nova-cell0-conductor-db-sync-prb5g\" (UID: \"ecc6d472-eb61-4b6a-ba13-fd3bd4a897cc\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-prb5g" Jan 20 17:08:45 crc kubenswrapper[4558]: I0120 17:08:45.938425 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ecc6d472-eb61-4b6a-ba13-fd3bd4a897cc-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-prb5g\" (UID: \"ecc6d472-eb61-4b6a-ba13-fd3bd4a897cc\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-prb5g" Jan 20 17:08:45 crc kubenswrapper[4558]: I0120 17:08:45.938476 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bb9kn\" (UniqueName: \"kubernetes.io/projected/ecc6d472-eb61-4b6a-ba13-fd3bd4a897cc-kube-api-access-bb9kn\") pod \"nova-cell0-conductor-db-sync-prb5g\" (UID: \"ecc6d472-eb61-4b6a-ba13-fd3bd4a897cc\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-prb5g" Jan 20 17:08:45 crc kubenswrapper[4558]: I0120 17:08:45.945689 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ecc6d472-eb61-4b6a-ba13-fd3bd4a897cc-config-data\") pod \"nova-cell0-conductor-db-sync-prb5g\" (UID: \"ecc6d472-eb61-4b6a-ba13-fd3bd4a897cc\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-prb5g" Jan 20 17:08:45 crc kubenswrapper[4558]: I0120 17:08:45.945780 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ecc6d472-eb61-4b6a-ba13-fd3bd4a897cc-scripts\") pod \"nova-cell0-conductor-db-sync-prb5g\" (UID: \"ecc6d472-eb61-4b6a-ba13-fd3bd4a897cc\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-prb5g" Jan 20 17:08:45 crc kubenswrapper[4558]: I0120 17:08:45.945800 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ecc6d472-eb61-4b6a-ba13-fd3bd4a897cc-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-prb5g\" (UID: \"ecc6d472-eb61-4b6a-ba13-fd3bd4a897cc\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-prb5g" Jan 20 17:08:45 crc kubenswrapper[4558]: I0120 17:08:45.955035 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bb9kn\" (UniqueName: \"kubernetes.io/projected/ecc6d472-eb61-4b6a-ba13-fd3bd4a897cc-kube-api-access-bb9kn\") pod \"nova-cell0-conductor-db-sync-prb5g\" (UID: \"ecc6d472-eb61-4b6a-ba13-fd3bd4a897cc\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-prb5g" Jan 20 17:08:46 crc kubenswrapper[4558]: I0120 17:08:46.092536 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:08:46 crc kubenswrapper[4558]: I0120 17:08:46.131586 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-prb5g" Jan 20 17:08:46 crc kubenswrapper[4558]: I0120 17:08:46.235071 4558 generic.go:334] "Generic (PLEG): container finished" podID="91e1a579-d120-4f5d-9117-f5e48646a8eb" containerID="f4af7a607838a829488c78ebe28735c0640218c8a4d67ceed1f74cc4aa21ff78" exitCode=143 Jan 20 17:08:46 crc kubenswrapper[4558]: I0120 17:08:46.235144 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"91e1a579-d120-4f5d-9117-f5e48646a8eb","Type":"ContainerDied","Data":"f4af7a607838a829488c78ebe28735c0640218c8a4d67ceed1f74cc4aa21ff78"} Jan 20 17:08:46 crc kubenswrapper[4558]: I0120 17:08:46.236662 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"14a845ef-e0c4-44aa-ba62-f8b688dfac3a","Type":"ContainerStarted","Data":"d6d5c7a58a05b3307114a7c63c4cce5c86aad5d605ab6814eb76c857ecd292a6"} Jan 20 17:08:46 crc kubenswrapper[4558]: I0120 17:08:46.582328 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09c4077a-74b1-4da7-9376-51ca3f968c68" path="/var/lib/kubelet/pods/09c4077a-74b1-4da7-9376-51ca3f968c68/volumes" Jan 20 17:08:46 crc kubenswrapper[4558]: I0120 17:08:46.621938 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-db-sync-prb5g"] Jan 20 17:08:47 crc kubenswrapper[4558]: I0120 17:08:47.247863 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-prb5g" event={"ID":"ecc6d472-eb61-4b6a-ba13-fd3bd4a897cc","Type":"ContainerStarted","Data":"a7da9ff56865b4a8a4aa6ba635fdc5e7c3e7c9c58b59113258f123b080d7a960"} Jan 20 17:08:47 crc kubenswrapper[4558]: I0120 17:08:47.248377 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-prb5g" event={"ID":"ecc6d472-eb61-4b6a-ba13-fd3bd4a897cc","Type":"ContainerStarted","Data":"67e77bfdcbb5574a6fbde73f713951aa15fcc9dad4f306df2d151b5bb815f5e7"} Jan 20 17:08:47 crc kubenswrapper[4558]: I0120 17:08:47.251038 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"14a845ef-e0c4-44aa-ba62-f8b688dfac3a","Type":"ContainerStarted","Data":"3e50577707c62080dface51f2d261b1cc7f9ebb85c3fd67af795b4997e0f5984"} Jan 20 17:08:47 crc kubenswrapper[4558]: I0120 17:08:47.265814 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-prb5g" podStartSLOduration=2.265796493 podStartE2EDuration="2.265796493s" podCreationTimestamp="2026-01-20 17:08:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:08:47.262379357 +0000 UTC m=+1621.022717324" watchObservedRunningTime="2026-01-20 17:08:47.265796493 +0000 UTC m=+1621.026134461" Jan 20 17:08:48 crc kubenswrapper[4558]: I0120 17:08:48.261136 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"14a845ef-e0c4-44aa-ba62-f8b688dfac3a","Type":"ContainerStarted","Data":"bdb7226aa9c3677a40649efd73072c91ed565734d744dfc5997b428f565dbc25"} Jan 20 17:08:48 crc kubenswrapper[4558]: I0120 17:08:48.567023 4558 scope.go:117] "RemoveContainer" containerID="a711a286282347590079d2447ef37fa9ed0f11085d6d7a65f85e65c1c2df608f" Jan 20 17:08:48 crc kubenswrapper[4558]: E0120 17:08:48.570776 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 17:08:48 crc kubenswrapper[4558]: I0120 17:08:48.774893 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/glance-default-external-api-0" podUID="91e1a579-d120-4f5d-9117-f5e48646a8eb" containerName="glance-httpd" probeResult="failure" output="Get \"https://10.217.1.90:9292/healthcheck\": read tcp 10.217.0.2:42732->10.217.1.90:9292: read: connection reset by peer" Jan 20 17:08:48 crc kubenswrapper[4558]: I0120 17:08:48.774925 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/glance-default-external-api-0" podUID="91e1a579-d120-4f5d-9117-f5e48646a8eb" containerName="glance-log" probeResult="failure" output="Get \"https://10.217.1.90:9292/healthcheck\": read tcp 10.217.0.2:42726->10.217.1.90:9292: read: connection reset by peer" Jan 20 17:08:48 crc kubenswrapper[4558]: I0120 17:08:48.954689 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:08:48 crc kubenswrapper[4558]: I0120 17:08:48.954955 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-internal-api-0" podUID="53103fed-90a2-4307-873a-7aa8e9a0f858" containerName="glance-log" containerID="cri-o://35ded3b25d0948e3d8b0f37bb581221a03212fb78cb9c59c0ecf8e53e679187e" gracePeriod=30 Jan 20 17:08:48 crc kubenswrapper[4558]: I0120 17:08:48.955023 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-internal-api-0" podUID="53103fed-90a2-4307-873a-7aa8e9a0f858" containerName="glance-httpd" containerID="cri-o://22accb0253cb0db6d14c25355240088e9f546a439c16b31aa2b65ef38ba02d2b" gracePeriod=30 Jan 20 17:08:49 crc kubenswrapper[4558]: I0120 17:08:49.214491 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:08:49 crc kubenswrapper[4558]: I0120 17:08:49.273845 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"14a845ef-e0c4-44aa-ba62-f8b688dfac3a","Type":"ContainerStarted","Data":"761d78e6a34c9fcd418ebb733922ca7e3ab737cb7174e299141399adee475e27"} Jan 20 17:08:49 crc kubenswrapper[4558]: I0120 17:08:49.276878 4558 generic.go:334] "Generic (PLEG): container finished" podID="53103fed-90a2-4307-873a-7aa8e9a0f858" containerID="35ded3b25d0948e3d8b0f37bb581221a03212fb78cb9c59c0ecf8e53e679187e" exitCode=143 Jan 20 17:08:49 crc kubenswrapper[4558]: I0120 17:08:49.276923 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"53103fed-90a2-4307-873a-7aa8e9a0f858","Type":"ContainerDied","Data":"35ded3b25d0948e3d8b0f37bb581221a03212fb78cb9c59c0ecf8e53e679187e"} Jan 20 17:08:49 crc kubenswrapper[4558]: I0120 17:08:49.284987 4558 generic.go:334] "Generic (PLEG): container finished" podID="91e1a579-d120-4f5d-9117-f5e48646a8eb" containerID="db1a38fc6024093565d9088a46937eff818680f06d9ee97e07e63e11dde8ebfb" exitCode=0 Jan 20 17:08:49 crc kubenswrapper[4558]: I0120 17:08:49.285039 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"91e1a579-d120-4f5d-9117-f5e48646a8eb","Type":"ContainerDied","Data":"db1a38fc6024093565d9088a46937eff818680f06d9ee97e07e63e11dde8ebfb"} Jan 20 17:08:49 crc kubenswrapper[4558]: I0120 17:08:49.285075 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"91e1a579-d120-4f5d-9117-f5e48646a8eb","Type":"ContainerDied","Data":"2b0643d8f4dd5e01e53550162a55cadaf752694bbff4f13711b326c93f685aa4"} Jan 20 17:08:49 crc kubenswrapper[4558]: I0120 17:08:49.285117 4558 scope.go:117] "RemoveContainer" containerID="db1a38fc6024093565d9088a46937eff818680f06d9ee97e07e63e11dde8ebfb" Jan 20 17:08:49 crc kubenswrapper[4558]: I0120 17:08:49.285341 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:08:49 crc kubenswrapper[4558]: I0120 17:08:49.313235 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/91e1a579-d120-4f5d-9117-f5e48646a8eb-httpd-run\") pod \"91e1a579-d120-4f5d-9117-f5e48646a8eb\" (UID: \"91e1a579-d120-4f5d-9117-f5e48646a8eb\") " Jan 20 17:08:49 crc kubenswrapper[4558]: I0120 17:08:49.313535 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/91e1a579-d120-4f5d-9117-f5e48646a8eb-scripts\") pod \"91e1a579-d120-4f5d-9117-f5e48646a8eb\" (UID: \"91e1a579-d120-4f5d-9117-f5e48646a8eb\") " Jan 20 17:08:49 crc kubenswrapper[4558]: I0120 17:08:49.313635 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/91e1a579-d120-4f5d-9117-f5e48646a8eb-logs\") pod \"91e1a579-d120-4f5d-9117-f5e48646a8eb\" (UID: \"91e1a579-d120-4f5d-9117-f5e48646a8eb\") " Jan 20 17:08:49 crc kubenswrapper[4558]: I0120 17:08:49.313741 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/91e1a579-d120-4f5d-9117-f5e48646a8eb-public-tls-certs\") pod \"91e1a579-d120-4f5d-9117-f5e48646a8eb\" (UID: \"91e1a579-d120-4f5d-9117-f5e48646a8eb\") " Jan 20 17:08:49 crc kubenswrapper[4558]: I0120 17:08:49.313805 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/91e1a579-d120-4f5d-9117-f5e48646a8eb-combined-ca-bundle\") pod \"91e1a579-d120-4f5d-9117-f5e48646a8eb\" (UID: \"91e1a579-d120-4f5d-9117-f5e48646a8eb\") " Jan 20 17:08:49 crc kubenswrapper[4558]: I0120 17:08:49.313929 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/91e1a579-d120-4f5d-9117-f5e48646a8eb-config-data\") pod \"91e1a579-d120-4f5d-9117-f5e48646a8eb\" (UID: \"91e1a579-d120-4f5d-9117-f5e48646a8eb\") " Jan 20 17:08:49 crc kubenswrapper[4558]: I0120 17:08:49.314041 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"91e1a579-d120-4f5d-9117-f5e48646a8eb\" (UID: \"91e1a579-d120-4f5d-9117-f5e48646a8eb\") " Jan 20 17:08:49 crc kubenswrapper[4558]: I0120 17:08:49.314177 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fr9z2\" (UniqueName: \"kubernetes.io/projected/91e1a579-d120-4f5d-9117-f5e48646a8eb-kube-api-access-fr9z2\") pod \"91e1a579-d120-4f5d-9117-f5e48646a8eb\" (UID: \"91e1a579-d120-4f5d-9117-f5e48646a8eb\") " Jan 20 17:08:49 crc kubenswrapper[4558]: I0120 17:08:49.313942 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/91e1a579-d120-4f5d-9117-f5e48646a8eb-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "91e1a579-d120-4f5d-9117-f5e48646a8eb" (UID: "91e1a579-d120-4f5d-9117-f5e48646a8eb"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:08:49 crc kubenswrapper[4558]: I0120 17:08:49.314250 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/91e1a579-d120-4f5d-9117-f5e48646a8eb-logs" (OuterVolumeSpecName: "logs") pod "91e1a579-d120-4f5d-9117-f5e48646a8eb" (UID: "91e1a579-d120-4f5d-9117-f5e48646a8eb"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:08:49 crc kubenswrapper[4558]: I0120 17:08:49.330649 4558 scope.go:117] "RemoveContainer" containerID="f4af7a607838a829488c78ebe28735c0640218c8a4d67ceed1f74cc4aa21ff78" Jan 20 17:08:49 crc kubenswrapper[4558]: I0120 17:08:49.339147 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/91e1a579-d120-4f5d-9117-f5e48646a8eb-kube-api-access-fr9z2" (OuterVolumeSpecName: "kube-api-access-fr9z2") pod "91e1a579-d120-4f5d-9117-f5e48646a8eb" (UID: "91e1a579-d120-4f5d-9117-f5e48646a8eb"). InnerVolumeSpecName "kube-api-access-fr9z2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:08:49 crc kubenswrapper[4558]: I0120 17:08:49.339678 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/91e1a579-d120-4f5d-9117-f5e48646a8eb-scripts" (OuterVolumeSpecName: "scripts") pod "91e1a579-d120-4f5d-9117-f5e48646a8eb" (UID: "91e1a579-d120-4f5d-9117-f5e48646a8eb"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:08:49 crc kubenswrapper[4558]: I0120 17:08:49.343279 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage03-crc" (OuterVolumeSpecName: "glance") pod "91e1a579-d120-4f5d-9117-f5e48646a8eb" (UID: "91e1a579-d120-4f5d-9117-f5e48646a8eb"). InnerVolumeSpecName "local-storage03-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:08:49 crc kubenswrapper[4558]: I0120 17:08:49.347158 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/91e1a579-d120-4f5d-9117-f5e48646a8eb-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "91e1a579-d120-4f5d-9117-f5e48646a8eb" (UID: "91e1a579-d120-4f5d-9117-f5e48646a8eb"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:08:49 crc kubenswrapper[4558]: I0120 17:08:49.372860 4558 scope.go:117] "RemoveContainer" containerID="db1a38fc6024093565d9088a46937eff818680f06d9ee97e07e63e11dde8ebfb" Jan 20 17:08:49 crc kubenswrapper[4558]: E0120 17:08:49.373366 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"db1a38fc6024093565d9088a46937eff818680f06d9ee97e07e63e11dde8ebfb\": container with ID starting with db1a38fc6024093565d9088a46937eff818680f06d9ee97e07e63e11dde8ebfb not found: ID does not exist" containerID="db1a38fc6024093565d9088a46937eff818680f06d9ee97e07e63e11dde8ebfb" Jan 20 17:08:49 crc kubenswrapper[4558]: I0120 17:08:49.373414 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"db1a38fc6024093565d9088a46937eff818680f06d9ee97e07e63e11dde8ebfb"} err="failed to get container status \"db1a38fc6024093565d9088a46937eff818680f06d9ee97e07e63e11dde8ebfb\": rpc error: code = NotFound desc = could not find container \"db1a38fc6024093565d9088a46937eff818680f06d9ee97e07e63e11dde8ebfb\": container with ID starting with db1a38fc6024093565d9088a46937eff818680f06d9ee97e07e63e11dde8ebfb not found: ID does not exist" Jan 20 17:08:49 crc kubenswrapper[4558]: I0120 17:08:49.373443 4558 scope.go:117] "RemoveContainer" containerID="f4af7a607838a829488c78ebe28735c0640218c8a4d67ceed1f74cc4aa21ff78" Jan 20 17:08:49 crc kubenswrapper[4558]: E0120 17:08:49.373683 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f4af7a607838a829488c78ebe28735c0640218c8a4d67ceed1f74cc4aa21ff78\": container with ID starting with f4af7a607838a829488c78ebe28735c0640218c8a4d67ceed1f74cc4aa21ff78 not found: ID does not exist" containerID="f4af7a607838a829488c78ebe28735c0640218c8a4d67ceed1f74cc4aa21ff78" Jan 20 17:08:49 crc kubenswrapper[4558]: I0120 17:08:49.373706 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f4af7a607838a829488c78ebe28735c0640218c8a4d67ceed1f74cc4aa21ff78"} err="failed to get container status \"f4af7a607838a829488c78ebe28735c0640218c8a4d67ceed1f74cc4aa21ff78\": rpc error: code = NotFound desc = could not find container \"f4af7a607838a829488c78ebe28735c0640218c8a4d67ceed1f74cc4aa21ff78\": container with ID starting with f4af7a607838a829488c78ebe28735c0640218c8a4d67ceed1f74cc4aa21ff78 not found: ID does not exist" Jan 20 17:08:49 crc kubenswrapper[4558]: I0120 17:08:49.375316 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/91e1a579-d120-4f5d-9117-f5e48646a8eb-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "91e1a579-d120-4f5d-9117-f5e48646a8eb" (UID: "91e1a579-d120-4f5d-9117-f5e48646a8eb"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:08:49 crc kubenswrapper[4558]: I0120 17:08:49.379328 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/91e1a579-d120-4f5d-9117-f5e48646a8eb-config-data" (OuterVolumeSpecName: "config-data") pod "91e1a579-d120-4f5d-9117-f5e48646a8eb" (UID: "91e1a579-d120-4f5d-9117-f5e48646a8eb"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:08:49 crc kubenswrapper[4558]: I0120 17:08:49.418582 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/91e1a579-d120-4f5d-9117-f5e48646a8eb-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:08:49 crc kubenswrapper[4558]: I0120 17:08:49.418609 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/91e1a579-d120-4f5d-9117-f5e48646a8eb-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:08:49 crc kubenswrapper[4558]: I0120 17:08:49.418623 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/91e1a579-d120-4f5d-9117-f5e48646a8eb-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:08:49 crc kubenswrapper[4558]: I0120 17:08:49.418651 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/91e1a579-d120-4f5d-9117-f5e48646a8eb-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:08:49 crc kubenswrapper[4558]: I0120 17:08:49.418659 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/91e1a579-d120-4f5d-9117-f5e48646a8eb-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:08:49 crc kubenswrapper[4558]: I0120 17:08:49.418687 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" " Jan 20 17:08:49 crc kubenswrapper[4558]: I0120 17:08:49.418697 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fr9z2\" (UniqueName: \"kubernetes.io/projected/91e1a579-d120-4f5d-9117-f5e48646a8eb-kube-api-access-fr9z2\") on node \"crc\" DevicePath \"\"" Jan 20 17:08:49 crc kubenswrapper[4558]: I0120 17:08:49.418706 4558 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/91e1a579-d120-4f5d-9117-f5e48646a8eb-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 20 17:08:49 crc kubenswrapper[4558]: I0120 17:08:49.436325 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage03-crc" (UniqueName: "kubernetes.io/local-volume/local-storage03-crc") on node "crc" Jan 20 17:08:49 crc kubenswrapper[4558]: I0120 17:08:49.521281 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:08:49 crc kubenswrapper[4558]: I0120 17:08:49.616346 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:08:49 crc kubenswrapper[4558]: I0120 17:08:49.626796 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:08:49 crc kubenswrapper[4558]: I0120 17:08:49.645100 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:08:49 crc kubenswrapper[4558]: E0120 17:08:49.645593 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="91e1a579-d120-4f5d-9117-f5e48646a8eb" containerName="glance-httpd" Jan 20 17:08:49 crc kubenswrapper[4558]: I0120 17:08:49.645612 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="91e1a579-d120-4f5d-9117-f5e48646a8eb" containerName="glance-httpd" Jan 20 17:08:49 crc kubenswrapper[4558]: E0120 17:08:49.645642 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="91e1a579-d120-4f5d-9117-f5e48646a8eb" containerName="glance-log" Jan 20 17:08:49 crc kubenswrapper[4558]: I0120 17:08:49.645648 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="91e1a579-d120-4f5d-9117-f5e48646a8eb" containerName="glance-log" Jan 20 17:08:49 crc kubenswrapper[4558]: I0120 17:08:49.645816 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="91e1a579-d120-4f5d-9117-f5e48646a8eb" containerName="glance-httpd" Jan 20 17:08:49 crc kubenswrapper[4558]: I0120 17:08:49.645829 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="91e1a579-d120-4f5d-9117-f5e48646a8eb" containerName="glance-log" Jan 20 17:08:49 crc kubenswrapper[4558]: I0120 17:08:49.646821 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:08:49 crc kubenswrapper[4558]: I0120 17:08:49.649039 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-glance-default-public-svc" Jan 20 17:08:49 crc kubenswrapper[4558]: I0120 17:08:49.649095 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-default-external-config-data" Jan 20 17:08:49 crc kubenswrapper[4558]: I0120 17:08:49.670693 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:08:49 crc kubenswrapper[4558]: I0120 17:08:49.827512 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d-config-data\") pod \"glance-default-external-api-0\" (UID: \"a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:08:49 crc kubenswrapper[4558]: I0120 17:08:49.827572 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-external-api-0\" (UID: \"a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:08:49 crc kubenswrapper[4558]: I0120 17:08:49.827646 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:08:49 crc kubenswrapper[4558]: I0120 17:08:49.827673 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:08:49 crc kubenswrapper[4558]: I0120 17:08:49.827697 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-js5hh\" (UniqueName: \"kubernetes.io/projected/a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d-kube-api-access-js5hh\") pod \"glance-default-external-api-0\" (UID: \"a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:08:49 crc kubenswrapper[4558]: I0120 17:08:49.827715 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:08:49 crc kubenswrapper[4558]: I0120 17:08:49.827956 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d-logs\") pod \"glance-default-external-api-0\" (UID: \"a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:08:49 crc kubenswrapper[4558]: I0120 17:08:49.828076 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d-scripts\") pod \"glance-default-external-api-0\" (UID: \"a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:08:49 crc kubenswrapper[4558]: I0120 17:08:49.930768 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:08:49 crc kubenswrapper[4558]: I0120 17:08:49.930817 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:08:49 crc kubenswrapper[4558]: I0120 17:08:49.930853 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-js5hh\" (UniqueName: \"kubernetes.io/projected/a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d-kube-api-access-js5hh\") pod \"glance-default-external-api-0\" (UID: \"a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:08:49 crc kubenswrapper[4558]: I0120 17:08:49.930876 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:08:49 crc kubenswrapper[4558]: I0120 17:08:49.930973 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d-logs\") pod \"glance-default-external-api-0\" (UID: \"a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:08:49 crc kubenswrapper[4558]: I0120 17:08:49.931018 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d-scripts\") pod \"glance-default-external-api-0\" (UID: \"a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:08:49 crc kubenswrapper[4558]: I0120 17:08:49.931221 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d-config-data\") pod \"glance-default-external-api-0\" (UID: \"a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:08:49 crc kubenswrapper[4558]: I0120 17:08:49.931266 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-external-api-0\" (UID: \"a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:08:49 crc kubenswrapper[4558]: I0120 17:08:49.931559 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:08:49 crc kubenswrapper[4558]: I0120 17:08:49.931687 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d-logs\") pod \"glance-default-external-api-0\" (UID: \"a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:08:49 crc kubenswrapper[4558]: I0120 17:08:49.931786 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-external-api-0\" (UID: \"a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d\") device mount path \"/mnt/openstack/pv03\"" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:08:49 crc kubenswrapper[4558]: I0120 17:08:49.936346 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d-config-data\") pod \"glance-default-external-api-0\" (UID: \"a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:08:49 crc kubenswrapper[4558]: I0120 17:08:49.937624 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:08:49 crc kubenswrapper[4558]: I0120 17:08:49.937696 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d-scripts\") pod \"glance-default-external-api-0\" (UID: \"a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:08:49 crc kubenswrapper[4558]: I0120 17:08:49.939950 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:08:49 crc kubenswrapper[4558]: I0120 17:08:49.948920 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-js5hh\" (UniqueName: \"kubernetes.io/projected/a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d-kube-api-access-js5hh\") pod \"glance-default-external-api-0\" (UID: \"a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:08:49 crc kubenswrapper[4558]: I0120 17:08:49.969451 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"glance-default-external-api-0\" (UID: \"a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:08:50 crc kubenswrapper[4558]: I0120 17:08:50.260753 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:08:50 crc kubenswrapper[4558]: I0120 17:08:50.305330 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"14a845ef-e0c4-44aa-ba62-f8b688dfac3a","Type":"ContainerStarted","Data":"3c15d8a18834d6f524266595b8c7261edda3271c447e55ea049ee6069eac5015"} Jan 20 17:08:50 crc kubenswrapper[4558]: I0120 17:08:50.305528 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="14a845ef-e0c4-44aa-ba62-f8b688dfac3a" containerName="ceilometer-central-agent" containerID="cri-o://3e50577707c62080dface51f2d261b1cc7f9ebb85c3fd67af795b4997e0f5984" gracePeriod=30 Jan 20 17:08:50 crc kubenswrapper[4558]: I0120 17:08:50.305684 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="14a845ef-e0c4-44aa-ba62-f8b688dfac3a" containerName="proxy-httpd" containerID="cri-o://3c15d8a18834d6f524266595b8c7261edda3271c447e55ea049ee6069eac5015" gracePeriod=30 Jan 20 17:08:50 crc kubenswrapper[4558]: I0120 17:08:50.305724 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="14a845ef-e0c4-44aa-ba62-f8b688dfac3a" containerName="sg-core" containerID="cri-o://761d78e6a34c9fcd418ebb733922ca7e3ab737cb7174e299141399adee475e27" gracePeriod=30 Jan 20 17:08:50 crc kubenswrapper[4558]: I0120 17:08:50.305780 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="14a845ef-e0c4-44aa-ba62-f8b688dfac3a" containerName="ceilometer-notification-agent" containerID="cri-o://bdb7226aa9c3677a40649efd73072c91ed565734d744dfc5997b428f565dbc25" gracePeriod=30 Jan 20 17:08:50 crc kubenswrapper[4558]: I0120 17:08:50.305831 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:08:50 crc kubenswrapper[4558]: I0120 17:08:50.329735 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ceilometer-0" podStartSLOduration=1.481712597 podStartE2EDuration="5.329718018s" podCreationTimestamp="2026-01-20 17:08:45 +0000 UTC" firstStartedPulling="2026-01-20 17:08:46.098197934 +0000 UTC m=+1619.858535901" lastFinishedPulling="2026-01-20 17:08:49.946203355 +0000 UTC m=+1623.706541322" observedRunningTime="2026-01-20 17:08:50.32743283 +0000 UTC m=+1624.087770797" watchObservedRunningTime="2026-01-20 17:08:50.329718018 +0000 UTC m=+1624.090055985" Jan 20 17:08:50 crc kubenswrapper[4558]: I0120 17:08:50.577122 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="91e1a579-d120-4f5d-9117-f5e48646a8eb" path="/var/lib/kubelet/pods/91e1a579-d120-4f5d-9117-f5e48646a8eb/volumes" Jan 20 17:08:50 crc kubenswrapper[4558]: I0120 17:08:50.690783 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:08:50 crc kubenswrapper[4558]: W0120 17:08:50.691501 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda77ca0d2_5ba2_49a1_aa97_c0ee74a5ca0d.slice/crio-35496bb2794ee8c33d201fa94c332fd85c6e0347dc7d4b07b255d69022103dff WatchSource:0}: Error finding container 35496bb2794ee8c33d201fa94c332fd85c6e0347dc7d4b07b255d69022103dff: Status 404 returned error can't find the container with id 35496bb2794ee8c33d201fa94c332fd85c6e0347dc7d4b07b255d69022103dff Jan 20 17:08:51 crc kubenswrapper[4558]: I0120 17:08:51.325547 4558 generic.go:334] "Generic (PLEG): container finished" podID="14a845ef-e0c4-44aa-ba62-f8b688dfac3a" containerID="3c15d8a18834d6f524266595b8c7261edda3271c447e55ea049ee6069eac5015" exitCode=0 Jan 20 17:08:51 crc kubenswrapper[4558]: I0120 17:08:51.325974 4558 generic.go:334] "Generic (PLEG): container finished" podID="14a845ef-e0c4-44aa-ba62-f8b688dfac3a" containerID="761d78e6a34c9fcd418ebb733922ca7e3ab737cb7174e299141399adee475e27" exitCode=2 Jan 20 17:08:51 crc kubenswrapper[4558]: I0120 17:08:51.325988 4558 generic.go:334] "Generic (PLEG): container finished" podID="14a845ef-e0c4-44aa-ba62-f8b688dfac3a" containerID="bdb7226aa9c3677a40649efd73072c91ed565734d744dfc5997b428f565dbc25" exitCode=0 Jan 20 17:08:51 crc kubenswrapper[4558]: I0120 17:08:51.325728 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"14a845ef-e0c4-44aa-ba62-f8b688dfac3a","Type":"ContainerDied","Data":"3c15d8a18834d6f524266595b8c7261edda3271c447e55ea049ee6069eac5015"} Jan 20 17:08:51 crc kubenswrapper[4558]: I0120 17:08:51.326057 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"14a845ef-e0c4-44aa-ba62-f8b688dfac3a","Type":"ContainerDied","Data":"761d78e6a34c9fcd418ebb733922ca7e3ab737cb7174e299141399adee475e27"} Jan 20 17:08:51 crc kubenswrapper[4558]: I0120 17:08:51.326071 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"14a845ef-e0c4-44aa-ba62-f8b688dfac3a","Type":"ContainerDied","Data":"bdb7226aa9c3677a40649efd73072c91ed565734d744dfc5997b428f565dbc25"} Jan 20 17:08:51 crc kubenswrapper[4558]: I0120 17:08:51.328604 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d","Type":"ContainerStarted","Data":"3e0d89a98f1d6af7b5d9794c3b45f1a7d02d2b5e21f231ecf13088af12d9aed7"} Jan 20 17:08:51 crc kubenswrapper[4558]: I0120 17:08:51.328630 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d","Type":"ContainerStarted","Data":"35496bb2794ee8c33d201fa94c332fd85c6e0347dc7d4b07b255d69022103dff"} Jan 20 17:08:52 crc kubenswrapper[4558]: I0120 17:08:52.348137 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d","Type":"ContainerStarted","Data":"fcae9d3478f16c4ae798638653ce51dee0f0bdc70fd0867d5014ba53378f69ef"} Jan 20 17:08:52 crc kubenswrapper[4558]: I0120 17:08:52.352413 4558 generic.go:334] "Generic (PLEG): container finished" podID="53103fed-90a2-4307-873a-7aa8e9a0f858" containerID="22accb0253cb0db6d14c25355240088e9f546a439c16b31aa2b65ef38ba02d2b" exitCode=0 Jan 20 17:08:52 crc kubenswrapper[4558]: I0120 17:08:52.352467 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"53103fed-90a2-4307-873a-7aa8e9a0f858","Type":"ContainerDied","Data":"22accb0253cb0db6d14c25355240088e9f546a439c16b31aa2b65ef38ba02d2b"} Jan 20 17:08:52 crc kubenswrapper[4558]: I0120 17:08:52.377039 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/glance-default-external-api-0" podStartSLOduration=3.377019587 podStartE2EDuration="3.377019587s" podCreationTimestamp="2026-01-20 17:08:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:08:52.376374775 +0000 UTC m=+1626.136712742" watchObservedRunningTime="2026-01-20 17:08:52.377019587 +0000 UTC m=+1626.137357554" Jan 20 17:08:52 crc kubenswrapper[4558]: I0120 17:08:52.528299 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:08:52 crc kubenswrapper[4558]: I0120 17:08:52.709113 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"53103fed-90a2-4307-873a-7aa8e9a0f858\" (UID: \"53103fed-90a2-4307-873a-7aa8e9a0f858\") " Jan 20 17:08:52 crc kubenswrapper[4558]: I0120 17:08:52.709241 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/53103fed-90a2-4307-873a-7aa8e9a0f858-config-data\") pod \"53103fed-90a2-4307-873a-7aa8e9a0f858\" (UID: \"53103fed-90a2-4307-873a-7aa8e9a0f858\") " Jan 20 17:08:52 crc kubenswrapper[4558]: I0120 17:08:52.709386 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/53103fed-90a2-4307-873a-7aa8e9a0f858-internal-tls-certs\") pod \"53103fed-90a2-4307-873a-7aa8e9a0f858\" (UID: \"53103fed-90a2-4307-873a-7aa8e9a0f858\") " Jan 20 17:08:52 crc kubenswrapper[4558]: I0120 17:08:52.709468 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/53103fed-90a2-4307-873a-7aa8e9a0f858-combined-ca-bundle\") pod \"53103fed-90a2-4307-873a-7aa8e9a0f858\" (UID: \"53103fed-90a2-4307-873a-7aa8e9a0f858\") " Jan 20 17:08:52 crc kubenswrapper[4558]: I0120 17:08:52.709685 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/53103fed-90a2-4307-873a-7aa8e9a0f858-logs\") pod \"53103fed-90a2-4307-873a-7aa8e9a0f858\" (UID: \"53103fed-90a2-4307-873a-7aa8e9a0f858\") " Jan 20 17:08:52 crc kubenswrapper[4558]: I0120 17:08:52.709799 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/53103fed-90a2-4307-873a-7aa8e9a0f858-scripts\") pod \"53103fed-90a2-4307-873a-7aa8e9a0f858\" (UID: \"53103fed-90a2-4307-873a-7aa8e9a0f858\") " Jan 20 17:08:52 crc kubenswrapper[4558]: I0120 17:08:52.709843 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/53103fed-90a2-4307-873a-7aa8e9a0f858-httpd-run\") pod \"53103fed-90a2-4307-873a-7aa8e9a0f858\" (UID: \"53103fed-90a2-4307-873a-7aa8e9a0f858\") " Jan 20 17:08:52 crc kubenswrapper[4558]: I0120 17:08:52.709927 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-97xp7\" (UniqueName: \"kubernetes.io/projected/53103fed-90a2-4307-873a-7aa8e9a0f858-kube-api-access-97xp7\") pod \"53103fed-90a2-4307-873a-7aa8e9a0f858\" (UID: \"53103fed-90a2-4307-873a-7aa8e9a0f858\") " Jan 20 17:08:52 crc kubenswrapper[4558]: I0120 17:08:52.710257 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/53103fed-90a2-4307-873a-7aa8e9a0f858-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "53103fed-90a2-4307-873a-7aa8e9a0f858" (UID: "53103fed-90a2-4307-873a-7aa8e9a0f858"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:08:52 crc kubenswrapper[4558]: I0120 17:08:52.710714 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/53103fed-90a2-4307-873a-7aa8e9a0f858-logs" (OuterVolumeSpecName: "logs") pod "53103fed-90a2-4307-873a-7aa8e9a0f858" (UID: "53103fed-90a2-4307-873a-7aa8e9a0f858"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:08:52 crc kubenswrapper[4558]: I0120 17:08:52.711942 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/53103fed-90a2-4307-873a-7aa8e9a0f858-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:08:52 crc kubenswrapper[4558]: I0120 17:08:52.711995 4558 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/53103fed-90a2-4307-873a-7aa8e9a0f858-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 20 17:08:52 crc kubenswrapper[4558]: I0120 17:08:52.716601 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage06-crc" (OuterVolumeSpecName: "glance") pod "53103fed-90a2-4307-873a-7aa8e9a0f858" (UID: "53103fed-90a2-4307-873a-7aa8e9a0f858"). InnerVolumeSpecName "local-storage06-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:08:52 crc kubenswrapper[4558]: I0120 17:08:52.717806 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/53103fed-90a2-4307-873a-7aa8e9a0f858-kube-api-access-97xp7" (OuterVolumeSpecName: "kube-api-access-97xp7") pod "53103fed-90a2-4307-873a-7aa8e9a0f858" (UID: "53103fed-90a2-4307-873a-7aa8e9a0f858"). InnerVolumeSpecName "kube-api-access-97xp7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:08:52 crc kubenswrapper[4558]: I0120 17:08:52.720383 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/53103fed-90a2-4307-873a-7aa8e9a0f858-scripts" (OuterVolumeSpecName: "scripts") pod "53103fed-90a2-4307-873a-7aa8e9a0f858" (UID: "53103fed-90a2-4307-873a-7aa8e9a0f858"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:08:52 crc kubenswrapper[4558]: I0120 17:08:52.738688 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/53103fed-90a2-4307-873a-7aa8e9a0f858-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "53103fed-90a2-4307-873a-7aa8e9a0f858" (UID: "53103fed-90a2-4307-873a-7aa8e9a0f858"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:08:52 crc kubenswrapper[4558]: I0120 17:08:52.756292 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/53103fed-90a2-4307-873a-7aa8e9a0f858-config-data" (OuterVolumeSpecName: "config-data") pod "53103fed-90a2-4307-873a-7aa8e9a0f858" (UID: "53103fed-90a2-4307-873a-7aa8e9a0f858"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:08:52 crc kubenswrapper[4558]: I0120 17:08:52.757293 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/53103fed-90a2-4307-873a-7aa8e9a0f858-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "53103fed-90a2-4307-873a-7aa8e9a0f858" (UID: "53103fed-90a2-4307-873a-7aa8e9a0f858"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:08:52 crc kubenswrapper[4558]: I0120 17:08:52.814249 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-97xp7\" (UniqueName: \"kubernetes.io/projected/53103fed-90a2-4307-873a-7aa8e9a0f858-kube-api-access-97xp7\") on node \"crc\" DevicePath \"\"" Jan 20 17:08:52 crc kubenswrapper[4558]: I0120 17:08:52.814301 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" " Jan 20 17:08:52 crc kubenswrapper[4558]: I0120 17:08:52.814312 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/53103fed-90a2-4307-873a-7aa8e9a0f858-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:08:52 crc kubenswrapper[4558]: I0120 17:08:52.814323 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/53103fed-90a2-4307-873a-7aa8e9a0f858-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:08:52 crc kubenswrapper[4558]: I0120 17:08:52.814332 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/53103fed-90a2-4307-873a-7aa8e9a0f858-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:08:52 crc kubenswrapper[4558]: I0120 17:08:52.814347 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/53103fed-90a2-4307-873a-7aa8e9a0f858-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:08:52 crc kubenswrapper[4558]: I0120 17:08:52.831224 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage06-crc" (UniqueName: "kubernetes.io/local-volume/local-storage06-crc") on node "crc" Jan 20 17:08:52 crc kubenswrapper[4558]: I0120 17:08:52.915222 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:08:53 crc kubenswrapper[4558]: I0120 17:08:53.367051 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"53103fed-90a2-4307-873a-7aa8e9a0f858","Type":"ContainerDied","Data":"7605e11b45ce155d20eaa8d8b26428ebf2f5b410e35ca872cd9b5349e845b6cd"} Jan 20 17:08:53 crc kubenswrapper[4558]: I0120 17:08:53.367457 4558 scope.go:117] "RemoveContainer" containerID="22accb0253cb0db6d14c25355240088e9f546a439c16b31aa2b65ef38ba02d2b" Jan 20 17:08:53 crc kubenswrapper[4558]: I0120 17:08:53.367132 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:08:53 crc kubenswrapper[4558]: I0120 17:08:53.370488 4558 generic.go:334] "Generic (PLEG): container finished" podID="ecc6d472-eb61-4b6a-ba13-fd3bd4a897cc" containerID="a7da9ff56865b4a8a4aa6ba635fdc5e7c3e7c9c58b59113258f123b080d7a960" exitCode=0 Jan 20 17:08:53 crc kubenswrapper[4558]: I0120 17:08:53.370573 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-prb5g" event={"ID":"ecc6d472-eb61-4b6a-ba13-fd3bd4a897cc","Type":"ContainerDied","Data":"a7da9ff56865b4a8a4aa6ba635fdc5e7c3e7c9c58b59113258f123b080d7a960"} Jan 20 17:08:53 crc kubenswrapper[4558]: I0120 17:08:53.402107 4558 scope.go:117] "RemoveContainer" containerID="35ded3b25d0948e3d8b0f37bb581221a03212fb78cb9c59c0ecf8e53e679187e" Jan 20 17:08:53 crc kubenswrapper[4558]: I0120 17:08:53.421397 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:08:53 crc kubenswrapper[4558]: I0120 17:08:53.425752 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:08:53 crc kubenswrapper[4558]: I0120 17:08:53.439591 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:08:53 crc kubenswrapper[4558]: E0120 17:08:53.440065 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="53103fed-90a2-4307-873a-7aa8e9a0f858" containerName="glance-httpd" Jan 20 17:08:53 crc kubenswrapper[4558]: I0120 17:08:53.440085 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="53103fed-90a2-4307-873a-7aa8e9a0f858" containerName="glance-httpd" Jan 20 17:08:53 crc kubenswrapper[4558]: E0120 17:08:53.440119 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="53103fed-90a2-4307-873a-7aa8e9a0f858" containerName="glance-log" Jan 20 17:08:53 crc kubenswrapper[4558]: I0120 17:08:53.440127 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="53103fed-90a2-4307-873a-7aa8e9a0f858" containerName="glance-log" Jan 20 17:08:53 crc kubenswrapper[4558]: I0120 17:08:53.440342 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="53103fed-90a2-4307-873a-7aa8e9a0f858" containerName="glance-httpd" Jan 20 17:08:53 crc kubenswrapper[4558]: I0120 17:08:53.440358 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="53103fed-90a2-4307-873a-7aa8e9a0f858" containerName="glance-log" Jan 20 17:08:53 crc kubenswrapper[4558]: I0120 17:08:53.441323 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:08:53 crc kubenswrapper[4558]: I0120 17:08:53.443397 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-glance-default-internal-svc" Jan 20 17:08:53 crc kubenswrapper[4558]: I0120 17:08:53.443414 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-default-internal-config-data" Jan 20 17:08:53 crc kubenswrapper[4558]: I0120 17:08:53.447249 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:08:53 crc kubenswrapper[4558]: I0120 17:08:53.628647 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-internal-api-0\" (UID: \"25737ece-fb84-4543-8c3a-94ffa7b8f095\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:08:53 crc kubenswrapper[4558]: I0120 17:08:53.628719 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/25737ece-fb84-4543-8c3a-94ffa7b8f095-logs\") pod \"glance-default-internal-api-0\" (UID: \"25737ece-fb84-4543-8c3a-94ffa7b8f095\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:08:53 crc kubenswrapper[4558]: I0120 17:08:53.628990 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5sbvc\" (UniqueName: \"kubernetes.io/projected/25737ece-fb84-4543-8c3a-94ffa7b8f095-kube-api-access-5sbvc\") pod \"glance-default-internal-api-0\" (UID: \"25737ece-fb84-4543-8c3a-94ffa7b8f095\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:08:53 crc kubenswrapper[4558]: I0120 17:08:53.629313 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/25737ece-fb84-4543-8c3a-94ffa7b8f095-config-data\") pod \"glance-default-internal-api-0\" (UID: \"25737ece-fb84-4543-8c3a-94ffa7b8f095\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:08:53 crc kubenswrapper[4558]: I0120 17:08:53.629440 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/25737ece-fb84-4543-8c3a-94ffa7b8f095-scripts\") pod \"glance-default-internal-api-0\" (UID: \"25737ece-fb84-4543-8c3a-94ffa7b8f095\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:08:53 crc kubenswrapper[4558]: I0120 17:08:53.629519 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/25737ece-fb84-4543-8c3a-94ffa7b8f095-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"25737ece-fb84-4543-8c3a-94ffa7b8f095\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:08:53 crc kubenswrapper[4558]: I0120 17:08:53.629747 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/25737ece-fb84-4543-8c3a-94ffa7b8f095-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"25737ece-fb84-4543-8c3a-94ffa7b8f095\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:08:53 crc kubenswrapper[4558]: I0120 17:08:53.629799 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/25737ece-fb84-4543-8c3a-94ffa7b8f095-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"25737ece-fb84-4543-8c3a-94ffa7b8f095\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:08:53 crc kubenswrapper[4558]: I0120 17:08:53.731709 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/25737ece-fb84-4543-8c3a-94ffa7b8f095-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"25737ece-fb84-4543-8c3a-94ffa7b8f095\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:08:53 crc kubenswrapper[4558]: I0120 17:08:53.731755 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/25737ece-fb84-4543-8c3a-94ffa7b8f095-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"25737ece-fb84-4543-8c3a-94ffa7b8f095\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:08:53 crc kubenswrapper[4558]: I0120 17:08:53.731852 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-internal-api-0\" (UID: \"25737ece-fb84-4543-8c3a-94ffa7b8f095\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:08:53 crc kubenswrapper[4558]: I0120 17:08:53.731895 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/25737ece-fb84-4543-8c3a-94ffa7b8f095-logs\") pod \"glance-default-internal-api-0\" (UID: \"25737ece-fb84-4543-8c3a-94ffa7b8f095\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:08:53 crc kubenswrapper[4558]: I0120 17:08:53.732031 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5sbvc\" (UniqueName: \"kubernetes.io/projected/25737ece-fb84-4543-8c3a-94ffa7b8f095-kube-api-access-5sbvc\") pod \"glance-default-internal-api-0\" (UID: \"25737ece-fb84-4543-8c3a-94ffa7b8f095\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:08:53 crc kubenswrapper[4558]: I0120 17:08:53.732080 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/25737ece-fb84-4543-8c3a-94ffa7b8f095-config-data\") pod \"glance-default-internal-api-0\" (UID: \"25737ece-fb84-4543-8c3a-94ffa7b8f095\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:08:53 crc kubenswrapper[4558]: I0120 17:08:53.732147 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/25737ece-fb84-4543-8c3a-94ffa7b8f095-scripts\") pod \"glance-default-internal-api-0\" (UID: \"25737ece-fb84-4543-8c3a-94ffa7b8f095\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:08:53 crc kubenswrapper[4558]: I0120 17:08:53.732197 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-internal-api-0\" (UID: \"25737ece-fb84-4543-8c3a-94ffa7b8f095\") device mount path \"/mnt/openstack/pv06\"" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:08:53 crc kubenswrapper[4558]: I0120 17:08:53.732208 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/25737ece-fb84-4543-8c3a-94ffa7b8f095-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"25737ece-fb84-4543-8c3a-94ffa7b8f095\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:08:53 crc kubenswrapper[4558]: I0120 17:08:53.732658 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/25737ece-fb84-4543-8c3a-94ffa7b8f095-logs\") pod \"glance-default-internal-api-0\" (UID: \"25737ece-fb84-4543-8c3a-94ffa7b8f095\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:08:53 crc kubenswrapper[4558]: I0120 17:08:53.732879 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/25737ece-fb84-4543-8c3a-94ffa7b8f095-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"25737ece-fb84-4543-8c3a-94ffa7b8f095\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:08:53 crc kubenswrapper[4558]: I0120 17:08:53.738378 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/25737ece-fb84-4543-8c3a-94ffa7b8f095-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"25737ece-fb84-4543-8c3a-94ffa7b8f095\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:08:53 crc kubenswrapper[4558]: I0120 17:08:53.738419 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/25737ece-fb84-4543-8c3a-94ffa7b8f095-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"25737ece-fb84-4543-8c3a-94ffa7b8f095\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:08:53 crc kubenswrapper[4558]: I0120 17:08:53.739313 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/25737ece-fb84-4543-8c3a-94ffa7b8f095-config-data\") pod \"glance-default-internal-api-0\" (UID: \"25737ece-fb84-4543-8c3a-94ffa7b8f095\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:08:53 crc kubenswrapper[4558]: I0120 17:08:53.744688 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/25737ece-fb84-4543-8c3a-94ffa7b8f095-scripts\") pod \"glance-default-internal-api-0\" (UID: \"25737ece-fb84-4543-8c3a-94ffa7b8f095\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:08:53 crc kubenswrapper[4558]: I0120 17:08:53.748996 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5sbvc\" (UniqueName: \"kubernetes.io/projected/25737ece-fb84-4543-8c3a-94ffa7b8f095-kube-api-access-5sbvc\") pod \"glance-default-internal-api-0\" (UID: \"25737ece-fb84-4543-8c3a-94ffa7b8f095\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:08:53 crc kubenswrapper[4558]: I0120 17:08:53.757534 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"glance-default-internal-api-0\" (UID: \"25737ece-fb84-4543-8c3a-94ffa7b8f095\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:08:53 crc kubenswrapper[4558]: I0120 17:08:53.766930 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:08:54 crc kubenswrapper[4558]: I0120 17:08:54.220557 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:08:54 crc kubenswrapper[4558]: I0120 17:08:54.389510 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"25737ece-fb84-4543-8c3a-94ffa7b8f095","Type":"ContainerStarted","Data":"f4f0991223cc312b00c334450c5683d70634552d654d8ae89e0441116bad5ab5"} Jan 20 17:08:54 crc kubenswrapper[4558]: I0120 17:08:54.578219 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="53103fed-90a2-4307-873a-7aa8e9a0f858" path="/var/lib/kubelet/pods/53103fed-90a2-4307-873a-7aa8e9a0f858/volumes" Jan 20 17:08:54 crc kubenswrapper[4558]: I0120 17:08:54.732917 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-prb5g" Jan 20 17:08:54 crc kubenswrapper[4558]: I0120 17:08:54.857438 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ecc6d472-eb61-4b6a-ba13-fd3bd4a897cc-scripts\") pod \"ecc6d472-eb61-4b6a-ba13-fd3bd4a897cc\" (UID: \"ecc6d472-eb61-4b6a-ba13-fd3bd4a897cc\") " Jan 20 17:08:54 crc kubenswrapper[4558]: I0120 17:08:54.857497 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bb9kn\" (UniqueName: \"kubernetes.io/projected/ecc6d472-eb61-4b6a-ba13-fd3bd4a897cc-kube-api-access-bb9kn\") pod \"ecc6d472-eb61-4b6a-ba13-fd3bd4a897cc\" (UID: \"ecc6d472-eb61-4b6a-ba13-fd3bd4a897cc\") " Jan 20 17:08:54 crc kubenswrapper[4558]: I0120 17:08:54.857859 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ecc6d472-eb61-4b6a-ba13-fd3bd4a897cc-config-data\") pod \"ecc6d472-eb61-4b6a-ba13-fd3bd4a897cc\" (UID: \"ecc6d472-eb61-4b6a-ba13-fd3bd4a897cc\") " Jan 20 17:08:54 crc kubenswrapper[4558]: I0120 17:08:54.858062 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ecc6d472-eb61-4b6a-ba13-fd3bd4a897cc-combined-ca-bundle\") pod \"ecc6d472-eb61-4b6a-ba13-fd3bd4a897cc\" (UID: \"ecc6d472-eb61-4b6a-ba13-fd3bd4a897cc\") " Jan 20 17:08:54 crc kubenswrapper[4558]: I0120 17:08:54.863127 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ecc6d472-eb61-4b6a-ba13-fd3bd4a897cc-scripts" (OuterVolumeSpecName: "scripts") pod "ecc6d472-eb61-4b6a-ba13-fd3bd4a897cc" (UID: "ecc6d472-eb61-4b6a-ba13-fd3bd4a897cc"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:08:54 crc kubenswrapper[4558]: I0120 17:08:54.870467 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ecc6d472-eb61-4b6a-ba13-fd3bd4a897cc-kube-api-access-bb9kn" (OuterVolumeSpecName: "kube-api-access-bb9kn") pod "ecc6d472-eb61-4b6a-ba13-fd3bd4a897cc" (UID: "ecc6d472-eb61-4b6a-ba13-fd3bd4a897cc"). InnerVolumeSpecName "kube-api-access-bb9kn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:08:54 crc kubenswrapper[4558]: I0120 17:08:54.881496 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ecc6d472-eb61-4b6a-ba13-fd3bd4a897cc-config-data" (OuterVolumeSpecName: "config-data") pod "ecc6d472-eb61-4b6a-ba13-fd3bd4a897cc" (UID: "ecc6d472-eb61-4b6a-ba13-fd3bd4a897cc"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:08:54 crc kubenswrapper[4558]: I0120 17:08:54.883545 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ecc6d472-eb61-4b6a-ba13-fd3bd4a897cc-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ecc6d472-eb61-4b6a-ba13-fd3bd4a897cc" (UID: "ecc6d472-eb61-4b6a-ba13-fd3bd4a897cc"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:08:54 crc kubenswrapper[4558]: I0120 17:08:54.961970 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ecc6d472-eb61-4b6a-ba13-fd3bd4a897cc-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:08:54 crc kubenswrapper[4558]: I0120 17:08:54.962002 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ecc6d472-eb61-4b6a-ba13-fd3bd4a897cc-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:08:54 crc kubenswrapper[4558]: I0120 17:08:54.962014 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bb9kn\" (UniqueName: \"kubernetes.io/projected/ecc6d472-eb61-4b6a-ba13-fd3bd4a897cc-kube-api-access-bb9kn\") on node \"crc\" DevicePath \"\"" Jan 20 17:08:54 crc kubenswrapper[4558]: I0120 17:08:54.962028 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ecc6d472-eb61-4b6a-ba13-fd3bd4a897cc-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:08:55 crc kubenswrapper[4558]: I0120 17:08:55.403692 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-prb5g" event={"ID":"ecc6d472-eb61-4b6a-ba13-fd3bd4a897cc","Type":"ContainerDied","Data":"67e77bfdcbb5574a6fbde73f713951aa15fcc9dad4f306df2d151b5bb815f5e7"} Jan 20 17:08:55 crc kubenswrapper[4558]: I0120 17:08:55.403755 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="67e77bfdcbb5574a6fbde73f713951aa15fcc9dad4f306df2d151b5bb815f5e7" Jan 20 17:08:55 crc kubenswrapper[4558]: I0120 17:08:55.403767 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-prb5g" Jan 20 17:08:55 crc kubenswrapper[4558]: I0120 17:08:55.405734 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"25737ece-fb84-4543-8c3a-94ffa7b8f095","Type":"ContainerStarted","Data":"611dd2ec139015a415381aaa856813e6abfb158b3d9c4ce578651f256b7bef8b"} Jan 20 17:08:55 crc kubenswrapper[4558]: I0120 17:08:55.405767 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"25737ece-fb84-4543-8c3a-94ffa7b8f095","Type":"ContainerStarted","Data":"166e1e0d14b2cb48e695c5ce11cca54d963c6250ddce1f9f936a358c8a5b3580"} Jan 20 17:08:55 crc kubenswrapper[4558]: I0120 17:08:55.431994 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/glance-default-internal-api-0" podStartSLOduration=2.431965383 podStartE2EDuration="2.431965383s" podCreationTimestamp="2026-01-20 17:08:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:08:55.425660386 +0000 UTC m=+1629.185998344" watchObservedRunningTime="2026-01-20 17:08:55.431965383 +0000 UTC m=+1629.192303350" Jan 20 17:08:55 crc kubenswrapper[4558]: I0120 17:08:55.486305 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-0"] Jan 20 17:08:55 crc kubenswrapper[4558]: E0120 17:08:55.486871 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ecc6d472-eb61-4b6a-ba13-fd3bd4a897cc" containerName="nova-cell0-conductor-db-sync" Jan 20 17:08:55 crc kubenswrapper[4558]: I0120 17:08:55.486892 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ecc6d472-eb61-4b6a-ba13-fd3bd4a897cc" containerName="nova-cell0-conductor-db-sync" Jan 20 17:08:55 crc kubenswrapper[4558]: I0120 17:08:55.487157 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="ecc6d472-eb61-4b6a-ba13-fd3bd4a897cc" containerName="nova-cell0-conductor-db-sync" Jan 20 17:08:55 crc kubenswrapper[4558]: I0120 17:08:55.487950 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:08:55 crc kubenswrapper[4558]: I0120 17:08:55.493360 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell0-conductor-config-data" Jan 20 17:08:55 crc kubenswrapper[4558]: I0120 17:08:55.493370 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-nova-dockercfg-zrgwt" Jan 20 17:08:55 crc kubenswrapper[4558]: I0120 17:08:55.501003 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-0"] Jan 20 17:08:55 crc kubenswrapper[4558]: I0120 17:08:55.572862 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e257a965-096b-4687-811b-b27348b6960b-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"e257a965-096b-4687-811b-b27348b6960b\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:08:55 crc kubenswrapper[4558]: I0120 17:08:55.572991 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e257a965-096b-4687-811b-b27348b6960b-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"e257a965-096b-4687-811b-b27348b6960b\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:08:55 crc kubenswrapper[4558]: I0120 17:08:55.573467 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-59lj2\" (UniqueName: \"kubernetes.io/projected/e257a965-096b-4687-811b-b27348b6960b-kube-api-access-59lj2\") pod \"nova-cell0-conductor-0\" (UID: \"e257a965-096b-4687-811b-b27348b6960b\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:08:55 crc kubenswrapper[4558]: I0120 17:08:55.675394 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-59lj2\" (UniqueName: \"kubernetes.io/projected/e257a965-096b-4687-811b-b27348b6960b-kube-api-access-59lj2\") pod \"nova-cell0-conductor-0\" (UID: \"e257a965-096b-4687-811b-b27348b6960b\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:08:55 crc kubenswrapper[4558]: I0120 17:08:55.675503 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e257a965-096b-4687-811b-b27348b6960b-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"e257a965-096b-4687-811b-b27348b6960b\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:08:55 crc kubenswrapper[4558]: I0120 17:08:55.676108 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e257a965-096b-4687-811b-b27348b6960b-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"e257a965-096b-4687-811b-b27348b6960b\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:08:55 crc kubenswrapper[4558]: I0120 17:08:55.682016 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e257a965-096b-4687-811b-b27348b6960b-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"e257a965-096b-4687-811b-b27348b6960b\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:08:55 crc kubenswrapper[4558]: I0120 17:08:55.683191 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e257a965-096b-4687-811b-b27348b6960b-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"e257a965-096b-4687-811b-b27348b6960b\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:08:55 crc kubenswrapper[4558]: I0120 17:08:55.695026 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-59lj2\" (UniqueName: \"kubernetes.io/projected/e257a965-096b-4687-811b-b27348b6960b-kube-api-access-59lj2\") pod \"nova-cell0-conductor-0\" (UID: \"e257a965-096b-4687-811b-b27348b6960b\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:08:55 crc kubenswrapper[4558]: I0120 17:08:55.802919 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:08:56 crc kubenswrapper[4558]: I0120 17:08:56.278038 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-0"] Jan 20 17:08:56 crc kubenswrapper[4558]: I0120 17:08:56.417824 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-0" event={"ID":"e257a965-096b-4687-811b-b27348b6960b","Type":"ContainerStarted","Data":"60ee7afbfd5773827eb34e775f2737c9ed6cfd9a17640e178adf29345277e7f0"} Jan 20 17:08:56 crc kubenswrapper[4558]: I0120 17:08:56.419805 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:08:56 crc kubenswrapper[4558]: I0120 17:08:56.423891 4558 generic.go:334] "Generic (PLEG): container finished" podID="14a845ef-e0c4-44aa-ba62-f8b688dfac3a" containerID="3e50577707c62080dface51f2d261b1cc7f9ebb85c3fd67af795b4997e0f5984" exitCode=0 Jan 20 17:08:56 crc kubenswrapper[4558]: I0120 17:08:56.424067 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"14a845ef-e0c4-44aa-ba62-f8b688dfac3a","Type":"ContainerDied","Data":"3e50577707c62080dface51f2d261b1cc7f9ebb85c3fd67af795b4997e0f5984"} Jan 20 17:08:56 crc kubenswrapper[4558]: I0120 17:08:56.424144 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"14a845ef-e0c4-44aa-ba62-f8b688dfac3a","Type":"ContainerDied","Data":"d6d5c7a58a05b3307114a7c63c4cce5c86aad5d605ab6814eb76c857ecd292a6"} Jan 20 17:08:56 crc kubenswrapper[4558]: I0120 17:08:56.424187 4558 scope.go:117] "RemoveContainer" containerID="3c15d8a18834d6f524266595b8c7261edda3271c447e55ea049ee6069eac5015" Jan 20 17:08:56 crc kubenswrapper[4558]: I0120 17:08:56.472313 4558 scope.go:117] "RemoveContainer" containerID="761d78e6a34c9fcd418ebb733922ca7e3ab737cb7174e299141399adee475e27" Jan 20 17:08:56 crc kubenswrapper[4558]: I0120 17:08:56.497207 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/14a845ef-e0c4-44aa-ba62-f8b688dfac3a-scripts\") pod \"14a845ef-e0c4-44aa-ba62-f8b688dfac3a\" (UID: \"14a845ef-e0c4-44aa-ba62-f8b688dfac3a\") " Jan 20 17:08:56 crc kubenswrapper[4558]: I0120 17:08:56.497375 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/14a845ef-e0c4-44aa-ba62-f8b688dfac3a-sg-core-conf-yaml\") pod \"14a845ef-e0c4-44aa-ba62-f8b688dfac3a\" (UID: \"14a845ef-e0c4-44aa-ba62-f8b688dfac3a\") " Jan 20 17:08:56 crc kubenswrapper[4558]: I0120 17:08:56.497436 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/14a845ef-e0c4-44aa-ba62-f8b688dfac3a-log-httpd\") pod \"14a845ef-e0c4-44aa-ba62-f8b688dfac3a\" (UID: \"14a845ef-e0c4-44aa-ba62-f8b688dfac3a\") " Jan 20 17:08:56 crc kubenswrapper[4558]: I0120 17:08:56.497548 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14a845ef-e0c4-44aa-ba62-f8b688dfac3a-combined-ca-bundle\") pod \"14a845ef-e0c4-44aa-ba62-f8b688dfac3a\" (UID: \"14a845ef-e0c4-44aa-ba62-f8b688dfac3a\") " Jan 20 17:08:56 crc kubenswrapper[4558]: I0120 17:08:56.497580 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/14a845ef-e0c4-44aa-ba62-f8b688dfac3a-config-data\") pod \"14a845ef-e0c4-44aa-ba62-f8b688dfac3a\" (UID: \"14a845ef-e0c4-44aa-ba62-f8b688dfac3a\") " Jan 20 17:08:56 crc kubenswrapper[4558]: I0120 17:08:56.497628 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r6qc7\" (UniqueName: \"kubernetes.io/projected/14a845ef-e0c4-44aa-ba62-f8b688dfac3a-kube-api-access-r6qc7\") pod \"14a845ef-e0c4-44aa-ba62-f8b688dfac3a\" (UID: \"14a845ef-e0c4-44aa-ba62-f8b688dfac3a\") " Jan 20 17:08:56 crc kubenswrapper[4558]: I0120 17:08:56.497745 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/14a845ef-e0c4-44aa-ba62-f8b688dfac3a-run-httpd\") pod \"14a845ef-e0c4-44aa-ba62-f8b688dfac3a\" (UID: \"14a845ef-e0c4-44aa-ba62-f8b688dfac3a\") " Jan 20 17:08:56 crc kubenswrapper[4558]: I0120 17:08:56.497872 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/14a845ef-e0c4-44aa-ba62-f8b688dfac3a-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "14a845ef-e0c4-44aa-ba62-f8b688dfac3a" (UID: "14a845ef-e0c4-44aa-ba62-f8b688dfac3a"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:08:56 crc kubenswrapper[4558]: I0120 17:08:56.498216 4558 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/14a845ef-e0c4-44aa-ba62-f8b688dfac3a-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:08:56 crc kubenswrapper[4558]: I0120 17:08:56.498516 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/14a845ef-e0c4-44aa-ba62-f8b688dfac3a-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "14a845ef-e0c4-44aa-ba62-f8b688dfac3a" (UID: "14a845ef-e0c4-44aa-ba62-f8b688dfac3a"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:08:56 crc kubenswrapper[4558]: I0120 17:08:56.498871 4558 scope.go:117] "RemoveContainer" containerID="bdb7226aa9c3677a40649efd73072c91ed565734d744dfc5997b428f565dbc25" Jan 20 17:08:56 crc kubenswrapper[4558]: I0120 17:08:56.500807 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/14a845ef-e0c4-44aa-ba62-f8b688dfac3a-scripts" (OuterVolumeSpecName: "scripts") pod "14a845ef-e0c4-44aa-ba62-f8b688dfac3a" (UID: "14a845ef-e0c4-44aa-ba62-f8b688dfac3a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:08:56 crc kubenswrapper[4558]: I0120 17:08:56.504113 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/14a845ef-e0c4-44aa-ba62-f8b688dfac3a-kube-api-access-r6qc7" (OuterVolumeSpecName: "kube-api-access-r6qc7") pod "14a845ef-e0c4-44aa-ba62-f8b688dfac3a" (UID: "14a845ef-e0c4-44aa-ba62-f8b688dfac3a"). InnerVolumeSpecName "kube-api-access-r6qc7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:08:56 crc kubenswrapper[4558]: I0120 17:08:56.516666 4558 scope.go:117] "RemoveContainer" containerID="3e50577707c62080dface51f2d261b1cc7f9ebb85c3fd67af795b4997e0f5984" Jan 20 17:08:56 crc kubenswrapper[4558]: I0120 17:08:56.518711 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/14a845ef-e0c4-44aa-ba62-f8b688dfac3a-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "14a845ef-e0c4-44aa-ba62-f8b688dfac3a" (UID: "14a845ef-e0c4-44aa-ba62-f8b688dfac3a"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:08:56 crc kubenswrapper[4558]: I0120 17:08:56.532978 4558 scope.go:117] "RemoveContainer" containerID="3c15d8a18834d6f524266595b8c7261edda3271c447e55ea049ee6069eac5015" Jan 20 17:08:56 crc kubenswrapper[4558]: E0120 17:08:56.533292 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3c15d8a18834d6f524266595b8c7261edda3271c447e55ea049ee6069eac5015\": container with ID starting with 3c15d8a18834d6f524266595b8c7261edda3271c447e55ea049ee6069eac5015 not found: ID does not exist" containerID="3c15d8a18834d6f524266595b8c7261edda3271c447e55ea049ee6069eac5015" Jan 20 17:08:56 crc kubenswrapper[4558]: I0120 17:08:56.533326 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3c15d8a18834d6f524266595b8c7261edda3271c447e55ea049ee6069eac5015"} err="failed to get container status \"3c15d8a18834d6f524266595b8c7261edda3271c447e55ea049ee6069eac5015\": rpc error: code = NotFound desc = could not find container \"3c15d8a18834d6f524266595b8c7261edda3271c447e55ea049ee6069eac5015\": container with ID starting with 3c15d8a18834d6f524266595b8c7261edda3271c447e55ea049ee6069eac5015 not found: ID does not exist" Jan 20 17:08:56 crc kubenswrapper[4558]: I0120 17:08:56.533354 4558 scope.go:117] "RemoveContainer" containerID="761d78e6a34c9fcd418ebb733922ca7e3ab737cb7174e299141399adee475e27" Jan 20 17:08:56 crc kubenswrapper[4558]: E0120 17:08:56.533595 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"761d78e6a34c9fcd418ebb733922ca7e3ab737cb7174e299141399adee475e27\": container with ID starting with 761d78e6a34c9fcd418ebb733922ca7e3ab737cb7174e299141399adee475e27 not found: ID does not exist" containerID="761d78e6a34c9fcd418ebb733922ca7e3ab737cb7174e299141399adee475e27" Jan 20 17:08:56 crc kubenswrapper[4558]: I0120 17:08:56.533632 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"761d78e6a34c9fcd418ebb733922ca7e3ab737cb7174e299141399adee475e27"} err="failed to get container status \"761d78e6a34c9fcd418ebb733922ca7e3ab737cb7174e299141399adee475e27\": rpc error: code = NotFound desc = could not find container \"761d78e6a34c9fcd418ebb733922ca7e3ab737cb7174e299141399adee475e27\": container with ID starting with 761d78e6a34c9fcd418ebb733922ca7e3ab737cb7174e299141399adee475e27 not found: ID does not exist" Jan 20 17:08:56 crc kubenswrapper[4558]: I0120 17:08:56.533660 4558 scope.go:117] "RemoveContainer" containerID="bdb7226aa9c3677a40649efd73072c91ed565734d744dfc5997b428f565dbc25" Jan 20 17:08:56 crc kubenswrapper[4558]: E0120 17:08:56.533856 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bdb7226aa9c3677a40649efd73072c91ed565734d744dfc5997b428f565dbc25\": container with ID starting with bdb7226aa9c3677a40649efd73072c91ed565734d744dfc5997b428f565dbc25 not found: ID does not exist" containerID="bdb7226aa9c3677a40649efd73072c91ed565734d744dfc5997b428f565dbc25" Jan 20 17:08:56 crc kubenswrapper[4558]: I0120 17:08:56.533889 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bdb7226aa9c3677a40649efd73072c91ed565734d744dfc5997b428f565dbc25"} err="failed to get container status \"bdb7226aa9c3677a40649efd73072c91ed565734d744dfc5997b428f565dbc25\": rpc error: code = NotFound desc = could not find container \"bdb7226aa9c3677a40649efd73072c91ed565734d744dfc5997b428f565dbc25\": container with ID starting with bdb7226aa9c3677a40649efd73072c91ed565734d744dfc5997b428f565dbc25 not found: ID does not exist" Jan 20 17:08:56 crc kubenswrapper[4558]: I0120 17:08:56.533903 4558 scope.go:117] "RemoveContainer" containerID="3e50577707c62080dface51f2d261b1cc7f9ebb85c3fd67af795b4997e0f5984" Jan 20 17:08:56 crc kubenswrapper[4558]: E0120 17:08:56.534087 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3e50577707c62080dface51f2d261b1cc7f9ebb85c3fd67af795b4997e0f5984\": container with ID starting with 3e50577707c62080dface51f2d261b1cc7f9ebb85c3fd67af795b4997e0f5984 not found: ID does not exist" containerID="3e50577707c62080dface51f2d261b1cc7f9ebb85c3fd67af795b4997e0f5984" Jan 20 17:08:56 crc kubenswrapper[4558]: I0120 17:08:56.534112 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3e50577707c62080dface51f2d261b1cc7f9ebb85c3fd67af795b4997e0f5984"} err="failed to get container status \"3e50577707c62080dface51f2d261b1cc7f9ebb85c3fd67af795b4997e0f5984\": rpc error: code = NotFound desc = could not find container \"3e50577707c62080dface51f2d261b1cc7f9ebb85c3fd67af795b4997e0f5984\": container with ID starting with 3e50577707c62080dface51f2d261b1cc7f9ebb85c3fd67af795b4997e0f5984 not found: ID does not exist" Jan 20 17:08:56 crc kubenswrapper[4558]: I0120 17:08:56.556325 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/14a845ef-e0c4-44aa-ba62-f8b688dfac3a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "14a845ef-e0c4-44aa-ba62-f8b688dfac3a" (UID: "14a845ef-e0c4-44aa-ba62-f8b688dfac3a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:08:56 crc kubenswrapper[4558]: I0120 17:08:56.564249 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/14a845ef-e0c4-44aa-ba62-f8b688dfac3a-config-data" (OuterVolumeSpecName: "config-data") pod "14a845ef-e0c4-44aa-ba62-f8b688dfac3a" (UID: "14a845ef-e0c4-44aa-ba62-f8b688dfac3a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:08:56 crc kubenswrapper[4558]: I0120 17:08:56.600648 4558 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/14a845ef-e0c4-44aa-ba62-f8b688dfac3a-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 20 17:08:56 crc kubenswrapper[4558]: I0120 17:08:56.600681 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14a845ef-e0c4-44aa-ba62-f8b688dfac3a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:08:56 crc kubenswrapper[4558]: I0120 17:08:56.600693 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/14a845ef-e0c4-44aa-ba62-f8b688dfac3a-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:08:56 crc kubenswrapper[4558]: I0120 17:08:56.600705 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r6qc7\" (UniqueName: \"kubernetes.io/projected/14a845ef-e0c4-44aa-ba62-f8b688dfac3a-kube-api-access-r6qc7\") on node \"crc\" DevicePath \"\"" Jan 20 17:08:56 crc kubenswrapper[4558]: I0120 17:08:56.600719 4558 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/14a845ef-e0c4-44aa-ba62-f8b688dfac3a-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:08:56 crc kubenswrapper[4558]: I0120 17:08:56.600727 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/14a845ef-e0c4-44aa-ba62-f8b688dfac3a-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:08:57 crc kubenswrapper[4558]: I0120 17:08:57.437995 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-0" event={"ID":"e257a965-096b-4687-811b-b27348b6960b","Type":"ContainerStarted","Data":"06874b7ffc658aefa292ba327a53635b1d027548feace3f818a84e97053573fc"} Jan 20 17:08:57 crc kubenswrapper[4558]: I0120 17:08:57.438541 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:08:57 crc kubenswrapper[4558]: I0120 17:08:57.440226 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:08:57 crc kubenswrapper[4558]: I0120 17:08:57.457831 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell0-conductor-0" podStartSLOduration=2.45781796 podStartE2EDuration="2.45781796s" podCreationTimestamp="2026-01-20 17:08:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:08:57.454652537 +0000 UTC m=+1631.214990494" watchObservedRunningTime="2026-01-20 17:08:57.45781796 +0000 UTC m=+1631.218155927" Jan 20 17:08:57 crc kubenswrapper[4558]: I0120 17:08:57.476378 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:08:57 crc kubenswrapper[4558]: I0120 17:08:57.484052 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:08:57 crc kubenswrapper[4558]: I0120 17:08:57.490420 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:08:57 crc kubenswrapper[4558]: E0120 17:08:57.490888 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="14a845ef-e0c4-44aa-ba62-f8b688dfac3a" containerName="sg-core" Jan 20 17:08:57 crc kubenswrapper[4558]: I0120 17:08:57.490910 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="14a845ef-e0c4-44aa-ba62-f8b688dfac3a" containerName="sg-core" Jan 20 17:08:57 crc kubenswrapper[4558]: E0120 17:08:57.490927 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="14a845ef-e0c4-44aa-ba62-f8b688dfac3a" containerName="proxy-httpd" Jan 20 17:08:57 crc kubenswrapper[4558]: I0120 17:08:57.490934 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="14a845ef-e0c4-44aa-ba62-f8b688dfac3a" containerName="proxy-httpd" Jan 20 17:08:57 crc kubenswrapper[4558]: E0120 17:08:57.490955 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="14a845ef-e0c4-44aa-ba62-f8b688dfac3a" containerName="ceilometer-notification-agent" Jan 20 17:08:57 crc kubenswrapper[4558]: I0120 17:08:57.490961 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="14a845ef-e0c4-44aa-ba62-f8b688dfac3a" containerName="ceilometer-notification-agent" Jan 20 17:08:57 crc kubenswrapper[4558]: E0120 17:08:57.490989 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="14a845ef-e0c4-44aa-ba62-f8b688dfac3a" containerName="ceilometer-central-agent" Jan 20 17:08:57 crc kubenswrapper[4558]: I0120 17:08:57.490996 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="14a845ef-e0c4-44aa-ba62-f8b688dfac3a" containerName="ceilometer-central-agent" Jan 20 17:08:57 crc kubenswrapper[4558]: I0120 17:08:57.491212 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="14a845ef-e0c4-44aa-ba62-f8b688dfac3a" containerName="sg-core" Jan 20 17:08:57 crc kubenswrapper[4558]: I0120 17:08:57.491239 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="14a845ef-e0c4-44aa-ba62-f8b688dfac3a" containerName="ceilometer-notification-agent" Jan 20 17:08:57 crc kubenswrapper[4558]: I0120 17:08:57.491252 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="14a845ef-e0c4-44aa-ba62-f8b688dfac3a" containerName="ceilometer-central-agent" Jan 20 17:08:57 crc kubenswrapper[4558]: I0120 17:08:57.491260 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="14a845ef-e0c4-44aa-ba62-f8b688dfac3a" containerName="proxy-httpd" Jan 20 17:08:57 crc kubenswrapper[4558]: I0120 17:08:57.493136 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:08:57 crc kubenswrapper[4558]: I0120 17:08:57.497531 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:08:57 crc kubenswrapper[4558]: I0120 17:08:57.504591 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-scripts" Jan 20 17:08:57 crc kubenswrapper[4558]: I0120 17:08:57.504807 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-config-data" Jan 20 17:08:57 crc kubenswrapper[4558]: I0120 17:08:57.623996 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d05d78a5-0552-43f3-92ea-42cdf1ada68b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"d05d78a5-0552-43f3-92ea-42cdf1ada68b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:08:57 crc kubenswrapper[4558]: I0120 17:08:57.624058 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d05d78a5-0552-43f3-92ea-42cdf1ada68b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"d05d78a5-0552-43f3-92ea-42cdf1ada68b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:08:57 crc kubenswrapper[4558]: I0120 17:08:57.624085 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d05d78a5-0552-43f3-92ea-42cdf1ada68b-config-data\") pod \"ceilometer-0\" (UID: \"d05d78a5-0552-43f3-92ea-42cdf1ada68b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:08:57 crc kubenswrapper[4558]: I0120 17:08:57.624251 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d05d78a5-0552-43f3-92ea-42cdf1ada68b-scripts\") pod \"ceilometer-0\" (UID: \"d05d78a5-0552-43f3-92ea-42cdf1ada68b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:08:57 crc kubenswrapper[4558]: I0120 17:08:57.624305 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d05d78a5-0552-43f3-92ea-42cdf1ada68b-run-httpd\") pod \"ceilometer-0\" (UID: \"d05d78a5-0552-43f3-92ea-42cdf1ada68b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:08:57 crc kubenswrapper[4558]: I0120 17:08:57.624366 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d05d78a5-0552-43f3-92ea-42cdf1ada68b-log-httpd\") pod \"ceilometer-0\" (UID: \"d05d78a5-0552-43f3-92ea-42cdf1ada68b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:08:57 crc kubenswrapper[4558]: I0120 17:08:57.624427 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4fpjw\" (UniqueName: \"kubernetes.io/projected/d05d78a5-0552-43f3-92ea-42cdf1ada68b-kube-api-access-4fpjw\") pod \"ceilometer-0\" (UID: \"d05d78a5-0552-43f3-92ea-42cdf1ada68b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:08:57 crc kubenswrapper[4558]: I0120 17:08:57.685945 4558 scope.go:117] "RemoveContainer" containerID="0418065a826dccc72f46275e64ebe555b3a206061ecb094ddae0dd94701f192f" Jan 20 17:08:57 crc kubenswrapper[4558]: I0120 17:08:57.718760 4558 scope.go:117] "RemoveContainer" containerID="49b1d953e107342a6b4a2ead13c7460b3febff9178ceee056591b0e1435dc0d9" Jan 20 17:08:57 crc kubenswrapper[4558]: I0120 17:08:57.726686 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d05d78a5-0552-43f3-92ea-42cdf1ada68b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"d05d78a5-0552-43f3-92ea-42cdf1ada68b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:08:57 crc kubenswrapper[4558]: I0120 17:08:57.726760 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d05d78a5-0552-43f3-92ea-42cdf1ada68b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"d05d78a5-0552-43f3-92ea-42cdf1ada68b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:08:57 crc kubenswrapper[4558]: I0120 17:08:57.726800 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d05d78a5-0552-43f3-92ea-42cdf1ada68b-config-data\") pod \"ceilometer-0\" (UID: \"d05d78a5-0552-43f3-92ea-42cdf1ada68b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:08:57 crc kubenswrapper[4558]: I0120 17:08:57.726837 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d05d78a5-0552-43f3-92ea-42cdf1ada68b-scripts\") pod \"ceilometer-0\" (UID: \"d05d78a5-0552-43f3-92ea-42cdf1ada68b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:08:57 crc kubenswrapper[4558]: I0120 17:08:57.726875 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d05d78a5-0552-43f3-92ea-42cdf1ada68b-run-httpd\") pod \"ceilometer-0\" (UID: \"d05d78a5-0552-43f3-92ea-42cdf1ada68b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:08:57 crc kubenswrapper[4558]: I0120 17:08:57.726923 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d05d78a5-0552-43f3-92ea-42cdf1ada68b-log-httpd\") pod \"ceilometer-0\" (UID: \"d05d78a5-0552-43f3-92ea-42cdf1ada68b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:08:57 crc kubenswrapper[4558]: I0120 17:08:57.726973 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4fpjw\" (UniqueName: \"kubernetes.io/projected/d05d78a5-0552-43f3-92ea-42cdf1ada68b-kube-api-access-4fpjw\") pod \"ceilometer-0\" (UID: \"d05d78a5-0552-43f3-92ea-42cdf1ada68b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:08:57 crc kubenswrapper[4558]: I0120 17:08:57.728103 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d05d78a5-0552-43f3-92ea-42cdf1ada68b-run-httpd\") pod \"ceilometer-0\" (UID: \"d05d78a5-0552-43f3-92ea-42cdf1ada68b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:08:57 crc kubenswrapper[4558]: I0120 17:08:57.728672 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d05d78a5-0552-43f3-92ea-42cdf1ada68b-log-httpd\") pod \"ceilometer-0\" (UID: \"d05d78a5-0552-43f3-92ea-42cdf1ada68b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:08:57 crc kubenswrapper[4558]: I0120 17:08:57.732782 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d05d78a5-0552-43f3-92ea-42cdf1ada68b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"d05d78a5-0552-43f3-92ea-42cdf1ada68b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:08:57 crc kubenswrapper[4558]: I0120 17:08:57.733913 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d05d78a5-0552-43f3-92ea-42cdf1ada68b-config-data\") pod \"ceilometer-0\" (UID: \"d05d78a5-0552-43f3-92ea-42cdf1ada68b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:08:57 crc kubenswrapper[4558]: I0120 17:08:57.734245 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d05d78a5-0552-43f3-92ea-42cdf1ada68b-scripts\") pod \"ceilometer-0\" (UID: \"d05d78a5-0552-43f3-92ea-42cdf1ada68b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:08:57 crc kubenswrapper[4558]: I0120 17:08:57.735120 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d05d78a5-0552-43f3-92ea-42cdf1ada68b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"d05d78a5-0552-43f3-92ea-42cdf1ada68b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:08:57 crc kubenswrapper[4558]: I0120 17:08:57.742987 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4fpjw\" (UniqueName: \"kubernetes.io/projected/d05d78a5-0552-43f3-92ea-42cdf1ada68b-kube-api-access-4fpjw\") pod \"ceilometer-0\" (UID: \"d05d78a5-0552-43f3-92ea-42cdf1ada68b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:08:57 crc kubenswrapper[4558]: I0120 17:08:57.815051 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:08:57 crc kubenswrapper[4558]: I0120 17:08:57.828000 4558 scope.go:117] "RemoveContainer" containerID="849d022aa7e01656bcda911e10553d7e4a4b77905285c453f857bea7c5853f1c" Jan 20 17:08:57 crc kubenswrapper[4558]: I0120 17:08:57.861265 4558 scope.go:117] "RemoveContainer" containerID="e73316f86d69e1990f728687e78cbc371595994b43c68ca7476b2c35486e8305" Jan 20 17:08:57 crc kubenswrapper[4558]: I0120 17:08:57.886633 4558 scope.go:117] "RemoveContainer" containerID="6cedc115d4e6af60550ede3121827d9dbbc1313246a8ea36817021a7ace12bae" Jan 20 17:08:57 crc kubenswrapper[4558]: I0120 17:08:57.922370 4558 scope.go:117] "RemoveContainer" containerID="6f75a2e587b8aeda7d220c1e548eaebda4e03638ed963e8d38e9e47793f84d9e" Jan 20 17:08:57 crc kubenswrapper[4558]: I0120 17:08:57.948413 4558 scope.go:117] "RemoveContainer" containerID="61908f05aa1a1c1d9c5f59500dcb21ba49538140328cfaf4bca4fda3098e6ad6" Jan 20 17:08:57 crc kubenswrapper[4558]: I0120 17:08:57.994376 4558 scope.go:117] "RemoveContainer" containerID="188c3e342f37b5a0e2973b3d0a9ce309da3207e014b9fd3f20fdbe5d6d224911" Jan 20 17:08:58 crc kubenswrapper[4558]: I0120 17:08:58.025805 4558 scope.go:117] "RemoveContainer" containerID="82423a979fb5acca56c55fc07937fea00be152ca157837a8e089a8d66ed5523d" Jan 20 17:08:58 crc kubenswrapper[4558]: I0120 17:08:58.068060 4558 scope.go:117] "RemoveContainer" containerID="d09c1f5cb99b9abbcc9bafcadac3092d4b77d95efd6cfae624db258c91598c5b" Jan 20 17:08:58 crc kubenswrapper[4558]: I0120 17:08:58.097388 4558 scope.go:117] "RemoveContainer" containerID="be2a0248e5982920b07bb828a104cefcb46f9233f457cf95302aceef0c8d8b6f" Jan 20 17:08:58 crc kubenswrapper[4558]: I0120 17:08:58.139236 4558 scope.go:117] "RemoveContainer" containerID="4f36c092e43707a5550308954378b43c241d5129d5701e92f63c5d91f23f9e25" Jan 20 17:08:58 crc kubenswrapper[4558]: I0120 17:08:58.172233 4558 scope.go:117] "RemoveContainer" containerID="9990e29de8eb44da080f3921fca3c78f78c2476ef82affd2a61ff9d7a9c4550b" Jan 20 17:08:58 crc kubenswrapper[4558]: I0120 17:08:58.192289 4558 scope.go:117] "RemoveContainer" containerID="aa3653d2225383bf934da0f655ed12217fcafbbb93b425d7a7cf38609e8256e9" Jan 20 17:08:58 crc kubenswrapper[4558]: I0120 17:08:58.206405 4558 scope.go:117] "RemoveContainer" containerID="62550ed538832efcdb4e52bf31aa3d0f46c4b167fffd93e4a2f4e812472afffe" Jan 20 17:08:58 crc kubenswrapper[4558]: I0120 17:08:58.233747 4558 scope.go:117] "RemoveContainer" containerID="7a5c7c5046f9233504ec28fc00ba05146028e8398f93bed66b2944a461db8e6b" Jan 20 17:08:58 crc kubenswrapper[4558]: I0120 17:08:58.272716 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:08:58 crc kubenswrapper[4558]: W0120 17:08:58.284524 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd05d78a5_0552_43f3_92ea_42cdf1ada68b.slice/crio-c66be163d29e3a263ebdc33ba20a26d838ec67c94a1b386d93fdff08291099dd WatchSource:0}: Error finding container c66be163d29e3a263ebdc33ba20a26d838ec67c94a1b386d93fdff08291099dd: Status 404 returned error can't find the container with id c66be163d29e3a263ebdc33ba20a26d838ec67c94a1b386d93fdff08291099dd Jan 20 17:08:58 crc kubenswrapper[4558]: I0120 17:08:58.364567 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-0"] Jan 20 17:08:58 crc kubenswrapper[4558]: I0120 17:08:58.461203 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"d05d78a5-0552-43f3-92ea-42cdf1ada68b","Type":"ContainerStarted","Data":"c66be163d29e3a263ebdc33ba20a26d838ec67c94a1b386d93fdff08291099dd"} Jan 20 17:08:58 crc kubenswrapper[4558]: I0120 17:08:58.580545 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="14a845ef-e0c4-44aa-ba62-f8b688dfac3a" path="/var/lib/kubelet/pods/14a845ef-e0c4-44aa-ba62-f8b688dfac3a/volumes" Jan 20 17:08:59 crc kubenswrapper[4558]: I0120 17:08:59.127926 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:08:59 crc kubenswrapper[4558]: I0120 17:08:59.473939 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"d05d78a5-0552-43f3-92ea-42cdf1ada68b","Type":"ContainerStarted","Data":"b15bbf50615d51f30e8f1fc1234a2ab3d117af8204a8fe96f52f573f00f39a70"} Jan 20 17:08:59 crc kubenswrapper[4558]: I0120 17:08:59.474149 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-cell0-conductor-0" podUID="e257a965-096b-4687-811b-b27348b6960b" containerName="nova-cell0-conductor-conductor" containerID="cri-o://06874b7ffc658aefa292ba327a53635b1d027548feace3f818a84e97053573fc" gracePeriod=30 Jan 20 17:09:00 crc kubenswrapper[4558]: I0120 17:09:00.261386 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:09:00 crc kubenswrapper[4558]: I0120 17:09:00.261689 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:09:00 crc kubenswrapper[4558]: I0120 17:09:00.290075 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:09:00 crc kubenswrapper[4558]: I0120 17:09:00.306399 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:09:00 crc kubenswrapper[4558]: I0120 17:09:00.487385 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"d05d78a5-0552-43f3-92ea-42cdf1ada68b","Type":"ContainerStarted","Data":"eabd1645a8c5904d952f7074ae5bb78c2c29c4bd6d9f4e82b38424add4274dd0"} Jan 20 17:09:00 crc kubenswrapper[4558]: I0120 17:09:00.490224 4558 generic.go:334] "Generic (PLEG): container finished" podID="e257a965-096b-4687-811b-b27348b6960b" containerID="06874b7ffc658aefa292ba327a53635b1d027548feace3f818a84e97053573fc" exitCode=0 Jan 20 17:09:00 crc kubenswrapper[4558]: I0120 17:09:00.490300 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-0" event={"ID":"e257a965-096b-4687-811b-b27348b6960b","Type":"ContainerDied","Data":"06874b7ffc658aefa292ba327a53635b1d027548feace3f818a84e97053573fc"} Jan 20 17:09:00 crc kubenswrapper[4558]: I0120 17:09:00.490658 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:09:00 crc kubenswrapper[4558]: I0120 17:09:00.490708 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:09:00 crc kubenswrapper[4558]: I0120 17:09:00.628948 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:09:00 crc kubenswrapper[4558]: I0120 17:09:00.799236 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-59lj2\" (UniqueName: \"kubernetes.io/projected/e257a965-096b-4687-811b-b27348b6960b-kube-api-access-59lj2\") pod \"e257a965-096b-4687-811b-b27348b6960b\" (UID: \"e257a965-096b-4687-811b-b27348b6960b\") " Jan 20 17:09:00 crc kubenswrapper[4558]: I0120 17:09:00.799358 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e257a965-096b-4687-811b-b27348b6960b-config-data\") pod \"e257a965-096b-4687-811b-b27348b6960b\" (UID: \"e257a965-096b-4687-811b-b27348b6960b\") " Jan 20 17:09:00 crc kubenswrapper[4558]: I0120 17:09:00.799575 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e257a965-096b-4687-811b-b27348b6960b-combined-ca-bundle\") pod \"e257a965-096b-4687-811b-b27348b6960b\" (UID: \"e257a965-096b-4687-811b-b27348b6960b\") " Jan 20 17:09:00 crc kubenswrapper[4558]: I0120 17:09:00.813493 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e257a965-096b-4687-811b-b27348b6960b-kube-api-access-59lj2" (OuterVolumeSpecName: "kube-api-access-59lj2") pod "e257a965-096b-4687-811b-b27348b6960b" (UID: "e257a965-096b-4687-811b-b27348b6960b"). InnerVolumeSpecName "kube-api-access-59lj2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:09:00 crc kubenswrapper[4558]: I0120 17:09:00.824159 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e257a965-096b-4687-811b-b27348b6960b-config-data" (OuterVolumeSpecName: "config-data") pod "e257a965-096b-4687-811b-b27348b6960b" (UID: "e257a965-096b-4687-811b-b27348b6960b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:09:00 crc kubenswrapper[4558]: I0120 17:09:00.827534 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e257a965-096b-4687-811b-b27348b6960b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e257a965-096b-4687-811b-b27348b6960b" (UID: "e257a965-096b-4687-811b-b27348b6960b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:09:00 crc kubenswrapper[4558]: I0120 17:09:00.902228 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-59lj2\" (UniqueName: \"kubernetes.io/projected/e257a965-096b-4687-811b-b27348b6960b-kube-api-access-59lj2\") on node \"crc\" DevicePath \"\"" Jan 20 17:09:00 crc kubenswrapper[4558]: I0120 17:09:00.902262 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e257a965-096b-4687-811b-b27348b6960b-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:09:00 crc kubenswrapper[4558]: I0120 17:09:00.902274 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e257a965-096b-4687-811b-b27348b6960b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:09:01 crc kubenswrapper[4558]: I0120 17:09:01.506837 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"d05d78a5-0552-43f3-92ea-42cdf1ada68b","Type":"ContainerStarted","Data":"c9ef26618f8125edfa262c1bab7d474a4407427e9856b55f0403387387c12061"} Jan 20 17:09:01 crc kubenswrapper[4558]: I0120 17:09:01.509303 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-0" event={"ID":"e257a965-096b-4687-811b-b27348b6960b","Type":"ContainerDied","Data":"60ee7afbfd5773827eb34e775f2737c9ed6cfd9a17640e178adf29345277e7f0"} Jan 20 17:09:01 crc kubenswrapper[4558]: I0120 17:09:01.509408 4558 scope.go:117] "RemoveContainer" containerID="06874b7ffc658aefa292ba327a53635b1d027548feace3f818a84e97053573fc" Jan 20 17:09:01 crc kubenswrapper[4558]: I0120 17:09:01.509332 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:09:01 crc kubenswrapper[4558]: I0120 17:09:01.558029 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-0"] Jan 20 17:09:01 crc kubenswrapper[4558]: I0120 17:09:01.565947 4558 scope.go:117] "RemoveContainer" containerID="a711a286282347590079d2447ef37fa9ed0f11085d6d7a65f85e65c1c2df608f" Jan 20 17:09:01 crc kubenswrapper[4558]: E0120 17:09:01.566180 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 17:09:01 crc kubenswrapper[4558]: I0120 17:09:01.566744 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-0"] Jan 20 17:09:01 crc kubenswrapper[4558]: I0120 17:09:01.578718 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-0"] Jan 20 17:09:01 crc kubenswrapper[4558]: E0120 17:09:01.579185 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e257a965-096b-4687-811b-b27348b6960b" containerName="nova-cell0-conductor-conductor" Jan 20 17:09:01 crc kubenswrapper[4558]: I0120 17:09:01.579205 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="e257a965-096b-4687-811b-b27348b6960b" containerName="nova-cell0-conductor-conductor" Jan 20 17:09:01 crc kubenswrapper[4558]: I0120 17:09:01.579594 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="e257a965-096b-4687-811b-b27348b6960b" containerName="nova-cell0-conductor-conductor" Jan 20 17:09:01 crc kubenswrapper[4558]: I0120 17:09:01.580251 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:09:01 crc kubenswrapper[4558]: I0120 17:09:01.582057 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell0-conductor-config-data" Jan 20 17:09:01 crc kubenswrapper[4558]: I0120 17:09:01.582361 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-nova-dockercfg-zrgwt" Jan 20 17:09:01 crc kubenswrapper[4558]: I0120 17:09:01.585673 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-0"] Jan 20 17:09:01 crc kubenswrapper[4558]: I0120 17:09:01.718910 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/83e95b9c-608a-40f8-8d11-1295a1477130-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"83e95b9c-608a-40f8-8d11-1295a1477130\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:09:01 crc kubenswrapper[4558]: I0120 17:09:01.719035 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/83e95b9c-608a-40f8-8d11-1295a1477130-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"83e95b9c-608a-40f8-8d11-1295a1477130\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:09:01 crc kubenswrapper[4558]: I0120 17:09:01.719146 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7v4sb\" (UniqueName: \"kubernetes.io/projected/83e95b9c-608a-40f8-8d11-1295a1477130-kube-api-access-7v4sb\") pod \"nova-cell0-conductor-0\" (UID: \"83e95b9c-608a-40f8-8d11-1295a1477130\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:09:01 crc kubenswrapper[4558]: I0120 17:09:01.821881 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/83e95b9c-608a-40f8-8d11-1295a1477130-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"83e95b9c-608a-40f8-8d11-1295a1477130\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:09:01 crc kubenswrapper[4558]: I0120 17:09:01.822009 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/83e95b9c-608a-40f8-8d11-1295a1477130-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"83e95b9c-608a-40f8-8d11-1295a1477130\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:09:01 crc kubenswrapper[4558]: I0120 17:09:01.822104 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7v4sb\" (UniqueName: \"kubernetes.io/projected/83e95b9c-608a-40f8-8d11-1295a1477130-kube-api-access-7v4sb\") pod \"nova-cell0-conductor-0\" (UID: \"83e95b9c-608a-40f8-8d11-1295a1477130\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:09:01 crc kubenswrapper[4558]: I0120 17:09:01.830463 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/83e95b9c-608a-40f8-8d11-1295a1477130-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"83e95b9c-608a-40f8-8d11-1295a1477130\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:09:01 crc kubenswrapper[4558]: I0120 17:09:01.832401 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/83e95b9c-608a-40f8-8d11-1295a1477130-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"83e95b9c-608a-40f8-8d11-1295a1477130\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:09:01 crc kubenswrapper[4558]: I0120 17:09:01.837500 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7v4sb\" (UniqueName: \"kubernetes.io/projected/83e95b9c-608a-40f8-8d11-1295a1477130-kube-api-access-7v4sb\") pod \"nova-cell0-conductor-0\" (UID: \"83e95b9c-608a-40f8-8d11-1295a1477130\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:09:01 crc kubenswrapper[4558]: I0120 17:09:01.900993 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:09:02 crc kubenswrapper[4558]: I0120 17:09:02.174796 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:09:02 crc kubenswrapper[4558]: I0120 17:09:02.271293 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:09:02 crc kubenswrapper[4558]: I0120 17:09:02.357098 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-0"] Jan 20 17:09:02 crc kubenswrapper[4558]: W0120 17:09:02.362520 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod83e95b9c_608a_40f8_8d11_1295a1477130.slice/crio-af71c6b04cedc5931fa595d1ca171992a8016dec62e03c6a86dc1004e9778821 WatchSource:0}: Error finding container af71c6b04cedc5931fa595d1ca171992a8016dec62e03c6a86dc1004e9778821: Status 404 returned error can't find the container with id af71c6b04cedc5931fa595d1ca171992a8016dec62e03c6a86dc1004e9778821 Jan 20 17:09:02 crc kubenswrapper[4558]: I0120 17:09:02.518459 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-0" event={"ID":"83e95b9c-608a-40f8-8d11-1295a1477130","Type":"ContainerStarted","Data":"de349ebe41ded06fcbd0f827d2f80b48e666805321fc0fb9a57e81131ec6ac1e"} Jan 20 17:09:02 crc kubenswrapper[4558]: I0120 17:09:02.518677 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-0" event={"ID":"83e95b9c-608a-40f8-8d11-1295a1477130","Type":"ContainerStarted","Data":"af71c6b04cedc5931fa595d1ca171992a8016dec62e03c6a86dc1004e9778821"} Jan 20 17:09:02 crc kubenswrapper[4558]: I0120 17:09:02.518732 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:09:02 crc kubenswrapper[4558]: I0120 17:09:02.531599 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell0-conductor-0" podStartSLOduration=1.5315827359999998 podStartE2EDuration="1.531582736s" podCreationTimestamp="2026-01-20 17:09:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:09:02.528486183 +0000 UTC m=+1636.288824150" watchObservedRunningTime="2026-01-20 17:09:02.531582736 +0000 UTC m=+1636.291920703" Jan 20 17:09:02 crc kubenswrapper[4558]: I0120 17:09:02.604323 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e257a965-096b-4687-811b-b27348b6960b" path="/var/lib/kubelet/pods/e257a965-096b-4687-811b-b27348b6960b/volumes" Jan 20 17:09:03 crc kubenswrapper[4558]: I0120 17:09:03.555893 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="d05d78a5-0552-43f3-92ea-42cdf1ada68b" containerName="ceilometer-central-agent" containerID="cri-o://b15bbf50615d51f30e8f1fc1234a2ab3d117af8204a8fe96f52f573f00f39a70" gracePeriod=30 Jan 20 17:09:03 crc kubenswrapper[4558]: I0120 17:09:03.556330 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"d05d78a5-0552-43f3-92ea-42cdf1ada68b","Type":"ContainerStarted","Data":"2919c6702e0599cc3961a4abb6a86885d78bd6fa56cb58f102e657d5a4cf1fca"} Jan 20 17:09:03 crc kubenswrapper[4558]: I0120 17:09:03.557695 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:09:03 crc kubenswrapper[4558]: I0120 17:09:03.557121 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="d05d78a5-0552-43f3-92ea-42cdf1ada68b" containerName="ceilometer-notification-agent" containerID="cri-o://eabd1645a8c5904d952f7074ae5bb78c2c29c4bd6d9f4e82b38424add4274dd0" gracePeriod=30 Jan 20 17:09:03 crc kubenswrapper[4558]: I0120 17:09:03.557215 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="d05d78a5-0552-43f3-92ea-42cdf1ada68b" containerName="proxy-httpd" containerID="cri-o://2919c6702e0599cc3961a4abb6a86885d78bd6fa56cb58f102e657d5a4cf1fca" gracePeriod=30 Jan 20 17:09:03 crc kubenswrapper[4558]: I0120 17:09:03.557095 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="d05d78a5-0552-43f3-92ea-42cdf1ada68b" containerName="sg-core" containerID="cri-o://c9ef26618f8125edfa262c1bab7d474a4407427e9856b55f0403387387c12061" gracePeriod=30 Jan 20 17:09:03 crc kubenswrapper[4558]: I0120 17:09:03.581622 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ceilometer-0" podStartSLOduration=2.3157077839999998 podStartE2EDuration="6.581606193s" podCreationTimestamp="2026-01-20 17:08:57 +0000 UTC" firstStartedPulling="2026-01-20 17:08:58.287665591 +0000 UTC m=+1632.048003558" lastFinishedPulling="2026-01-20 17:09:02.553564 +0000 UTC m=+1636.313901967" observedRunningTime="2026-01-20 17:09:03.574862662 +0000 UTC m=+1637.335200629" watchObservedRunningTime="2026-01-20 17:09:03.581606193 +0000 UTC m=+1637.341944160" Jan 20 17:09:03 crc kubenswrapper[4558]: I0120 17:09:03.767294 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:09:03 crc kubenswrapper[4558]: I0120 17:09:03.767349 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:09:03 crc kubenswrapper[4558]: I0120 17:09:03.800096 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:09:03 crc kubenswrapper[4558]: I0120 17:09:03.815768 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:09:04 crc kubenswrapper[4558]: I0120 17:09:04.374518 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:09:04 crc kubenswrapper[4558]: I0120 17:09:04.481479 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d05d78a5-0552-43f3-92ea-42cdf1ada68b-combined-ca-bundle\") pod \"d05d78a5-0552-43f3-92ea-42cdf1ada68b\" (UID: \"d05d78a5-0552-43f3-92ea-42cdf1ada68b\") " Jan 20 17:09:04 crc kubenswrapper[4558]: I0120 17:09:04.481730 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d05d78a5-0552-43f3-92ea-42cdf1ada68b-log-httpd\") pod \"d05d78a5-0552-43f3-92ea-42cdf1ada68b\" (UID: \"d05d78a5-0552-43f3-92ea-42cdf1ada68b\") " Jan 20 17:09:04 crc kubenswrapper[4558]: I0120 17:09:04.481844 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d05d78a5-0552-43f3-92ea-42cdf1ada68b-run-httpd\") pod \"d05d78a5-0552-43f3-92ea-42cdf1ada68b\" (UID: \"d05d78a5-0552-43f3-92ea-42cdf1ada68b\") " Jan 20 17:09:04 crc kubenswrapper[4558]: I0120 17:09:04.481958 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d05d78a5-0552-43f3-92ea-42cdf1ada68b-sg-core-conf-yaml\") pod \"d05d78a5-0552-43f3-92ea-42cdf1ada68b\" (UID: \"d05d78a5-0552-43f3-92ea-42cdf1ada68b\") " Jan 20 17:09:04 crc kubenswrapper[4558]: I0120 17:09:04.482000 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4fpjw\" (UniqueName: \"kubernetes.io/projected/d05d78a5-0552-43f3-92ea-42cdf1ada68b-kube-api-access-4fpjw\") pod \"d05d78a5-0552-43f3-92ea-42cdf1ada68b\" (UID: \"d05d78a5-0552-43f3-92ea-42cdf1ada68b\") " Jan 20 17:09:04 crc kubenswrapper[4558]: I0120 17:09:04.482083 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d05d78a5-0552-43f3-92ea-42cdf1ada68b-scripts\") pod \"d05d78a5-0552-43f3-92ea-42cdf1ada68b\" (UID: \"d05d78a5-0552-43f3-92ea-42cdf1ada68b\") " Jan 20 17:09:04 crc kubenswrapper[4558]: I0120 17:09:04.482124 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d05d78a5-0552-43f3-92ea-42cdf1ada68b-config-data\") pod \"d05d78a5-0552-43f3-92ea-42cdf1ada68b\" (UID: \"d05d78a5-0552-43f3-92ea-42cdf1ada68b\") " Jan 20 17:09:04 crc kubenswrapper[4558]: I0120 17:09:04.482138 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d05d78a5-0552-43f3-92ea-42cdf1ada68b-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "d05d78a5-0552-43f3-92ea-42cdf1ada68b" (UID: "d05d78a5-0552-43f3-92ea-42cdf1ada68b"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:09:04 crc kubenswrapper[4558]: I0120 17:09:04.482220 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d05d78a5-0552-43f3-92ea-42cdf1ada68b-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "d05d78a5-0552-43f3-92ea-42cdf1ada68b" (UID: "d05d78a5-0552-43f3-92ea-42cdf1ada68b"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:09:04 crc kubenswrapper[4558]: I0120 17:09:04.482544 4558 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d05d78a5-0552-43f3-92ea-42cdf1ada68b-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:09:04 crc kubenswrapper[4558]: I0120 17:09:04.482564 4558 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d05d78a5-0552-43f3-92ea-42cdf1ada68b-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:09:04 crc kubenswrapper[4558]: I0120 17:09:04.486706 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d05d78a5-0552-43f3-92ea-42cdf1ada68b-kube-api-access-4fpjw" (OuterVolumeSpecName: "kube-api-access-4fpjw") pod "d05d78a5-0552-43f3-92ea-42cdf1ada68b" (UID: "d05d78a5-0552-43f3-92ea-42cdf1ada68b"). InnerVolumeSpecName "kube-api-access-4fpjw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:09:04 crc kubenswrapper[4558]: I0120 17:09:04.486687 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d05d78a5-0552-43f3-92ea-42cdf1ada68b-scripts" (OuterVolumeSpecName: "scripts") pod "d05d78a5-0552-43f3-92ea-42cdf1ada68b" (UID: "d05d78a5-0552-43f3-92ea-42cdf1ada68b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:09:04 crc kubenswrapper[4558]: I0120 17:09:04.506909 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d05d78a5-0552-43f3-92ea-42cdf1ada68b-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "d05d78a5-0552-43f3-92ea-42cdf1ada68b" (UID: "d05d78a5-0552-43f3-92ea-42cdf1ada68b"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:09:04 crc kubenswrapper[4558]: I0120 17:09:04.532604 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d05d78a5-0552-43f3-92ea-42cdf1ada68b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d05d78a5-0552-43f3-92ea-42cdf1ada68b" (UID: "d05d78a5-0552-43f3-92ea-42cdf1ada68b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:09:04 crc kubenswrapper[4558]: I0120 17:09:04.557316 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d05d78a5-0552-43f3-92ea-42cdf1ada68b-config-data" (OuterVolumeSpecName: "config-data") pod "d05d78a5-0552-43f3-92ea-42cdf1ada68b" (UID: "d05d78a5-0552-43f3-92ea-42cdf1ada68b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:09:04 crc kubenswrapper[4558]: I0120 17:09:04.567867 4558 generic.go:334] "Generic (PLEG): container finished" podID="d05d78a5-0552-43f3-92ea-42cdf1ada68b" containerID="2919c6702e0599cc3961a4abb6a86885d78bd6fa56cb58f102e657d5a4cf1fca" exitCode=0 Jan 20 17:09:04 crc kubenswrapper[4558]: I0120 17:09:04.567913 4558 generic.go:334] "Generic (PLEG): container finished" podID="d05d78a5-0552-43f3-92ea-42cdf1ada68b" containerID="c9ef26618f8125edfa262c1bab7d474a4407427e9856b55f0403387387c12061" exitCode=2 Jan 20 17:09:04 crc kubenswrapper[4558]: I0120 17:09:04.567923 4558 generic.go:334] "Generic (PLEG): container finished" podID="d05d78a5-0552-43f3-92ea-42cdf1ada68b" containerID="eabd1645a8c5904d952f7074ae5bb78c2c29c4bd6d9f4e82b38424add4274dd0" exitCode=0 Jan 20 17:09:04 crc kubenswrapper[4558]: I0120 17:09:04.567933 4558 generic.go:334] "Generic (PLEG): container finished" podID="d05d78a5-0552-43f3-92ea-42cdf1ada68b" containerID="b15bbf50615d51f30e8f1fc1234a2ab3d117af8204a8fe96f52f573f00f39a70" exitCode=0 Jan 20 17:09:04 crc kubenswrapper[4558]: I0120 17:09:04.569030 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:09:04 crc kubenswrapper[4558]: I0120 17:09:04.584233 4558 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d05d78a5-0552-43f3-92ea-42cdf1ada68b-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 20 17:09:04 crc kubenswrapper[4558]: I0120 17:09:04.584265 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4fpjw\" (UniqueName: \"kubernetes.io/projected/d05d78a5-0552-43f3-92ea-42cdf1ada68b-kube-api-access-4fpjw\") on node \"crc\" DevicePath \"\"" Jan 20 17:09:04 crc kubenswrapper[4558]: I0120 17:09:04.584280 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d05d78a5-0552-43f3-92ea-42cdf1ada68b-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:09:04 crc kubenswrapper[4558]: I0120 17:09:04.584293 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d05d78a5-0552-43f3-92ea-42cdf1ada68b-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:09:04 crc kubenswrapper[4558]: I0120 17:09:04.584305 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d05d78a5-0552-43f3-92ea-42cdf1ada68b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:09:04 crc kubenswrapper[4558]: I0120 17:09:04.589027 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:09:04 crc kubenswrapper[4558]: I0120 17:09:04.589056 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"d05d78a5-0552-43f3-92ea-42cdf1ada68b","Type":"ContainerDied","Data":"2919c6702e0599cc3961a4abb6a86885d78bd6fa56cb58f102e657d5a4cf1fca"} Jan 20 17:09:04 crc kubenswrapper[4558]: I0120 17:09:04.589078 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"d05d78a5-0552-43f3-92ea-42cdf1ada68b","Type":"ContainerDied","Data":"c9ef26618f8125edfa262c1bab7d474a4407427e9856b55f0403387387c12061"} Jan 20 17:09:04 crc kubenswrapper[4558]: I0120 17:09:04.589092 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:09:04 crc kubenswrapper[4558]: I0120 17:09:04.589103 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"d05d78a5-0552-43f3-92ea-42cdf1ada68b","Type":"ContainerDied","Data":"eabd1645a8c5904d952f7074ae5bb78c2c29c4bd6d9f4e82b38424add4274dd0"} Jan 20 17:09:04 crc kubenswrapper[4558]: I0120 17:09:04.589112 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"d05d78a5-0552-43f3-92ea-42cdf1ada68b","Type":"ContainerDied","Data":"b15bbf50615d51f30e8f1fc1234a2ab3d117af8204a8fe96f52f573f00f39a70"} Jan 20 17:09:04 crc kubenswrapper[4558]: I0120 17:09:04.589122 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"d05d78a5-0552-43f3-92ea-42cdf1ada68b","Type":"ContainerDied","Data":"c66be163d29e3a263ebdc33ba20a26d838ec67c94a1b386d93fdff08291099dd"} Jan 20 17:09:04 crc kubenswrapper[4558]: I0120 17:09:04.589372 4558 scope.go:117] "RemoveContainer" containerID="2919c6702e0599cc3961a4abb6a86885d78bd6fa56cb58f102e657d5a4cf1fca" Jan 20 17:09:04 crc kubenswrapper[4558]: I0120 17:09:04.611085 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:09:04 crc kubenswrapper[4558]: I0120 17:09:04.621532 4558 scope.go:117] "RemoveContainer" containerID="c9ef26618f8125edfa262c1bab7d474a4407427e9856b55f0403387387c12061" Jan 20 17:09:04 crc kubenswrapper[4558]: I0120 17:09:04.622784 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:09:04 crc kubenswrapper[4558]: I0120 17:09:04.637222 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:09:04 crc kubenswrapper[4558]: E0120 17:09:04.637706 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d05d78a5-0552-43f3-92ea-42cdf1ada68b" containerName="proxy-httpd" Jan 20 17:09:04 crc kubenswrapper[4558]: I0120 17:09:04.637721 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d05d78a5-0552-43f3-92ea-42cdf1ada68b" containerName="proxy-httpd" Jan 20 17:09:04 crc kubenswrapper[4558]: E0120 17:09:04.637736 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d05d78a5-0552-43f3-92ea-42cdf1ada68b" containerName="ceilometer-notification-agent" Jan 20 17:09:04 crc kubenswrapper[4558]: I0120 17:09:04.637742 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d05d78a5-0552-43f3-92ea-42cdf1ada68b" containerName="ceilometer-notification-agent" Jan 20 17:09:04 crc kubenswrapper[4558]: E0120 17:09:04.637773 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d05d78a5-0552-43f3-92ea-42cdf1ada68b" containerName="sg-core" Jan 20 17:09:04 crc kubenswrapper[4558]: I0120 17:09:04.637779 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d05d78a5-0552-43f3-92ea-42cdf1ada68b" containerName="sg-core" Jan 20 17:09:04 crc kubenswrapper[4558]: E0120 17:09:04.637794 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d05d78a5-0552-43f3-92ea-42cdf1ada68b" containerName="ceilometer-central-agent" Jan 20 17:09:04 crc kubenswrapper[4558]: I0120 17:09:04.637800 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d05d78a5-0552-43f3-92ea-42cdf1ada68b" containerName="ceilometer-central-agent" Jan 20 17:09:04 crc kubenswrapper[4558]: I0120 17:09:04.637965 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="d05d78a5-0552-43f3-92ea-42cdf1ada68b" containerName="proxy-httpd" Jan 20 17:09:04 crc kubenswrapper[4558]: I0120 17:09:04.637983 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="d05d78a5-0552-43f3-92ea-42cdf1ada68b" containerName="ceilometer-notification-agent" Jan 20 17:09:04 crc kubenswrapper[4558]: I0120 17:09:04.637992 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="d05d78a5-0552-43f3-92ea-42cdf1ada68b" containerName="ceilometer-central-agent" Jan 20 17:09:04 crc kubenswrapper[4558]: I0120 17:09:04.638000 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="d05d78a5-0552-43f3-92ea-42cdf1ada68b" containerName="sg-core" Jan 20 17:09:04 crc kubenswrapper[4558]: I0120 17:09:04.639689 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:09:04 crc kubenswrapper[4558]: I0120 17:09:04.642268 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-config-data" Jan 20 17:09:04 crc kubenswrapper[4558]: I0120 17:09:04.642491 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-scripts" Jan 20 17:09:04 crc kubenswrapper[4558]: I0120 17:09:04.646223 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:09:04 crc kubenswrapper[4558]: I0120 17:09:04.659476 4558 scope.go:117] "RemoveContainer" containerID="eabd1645a8c5904d952f7074ae5bb78c2c29c4bd6d9f4e82b38424add4274dd0" Jan 20 17:09:04 crc kubenswrapper[4558]: I0120 17:09:04.679522 4558 scope.go:117] "RemoveContainer" containerID="b15bbf50615d51f30e8f1fc1234a2ab3d117af8204a8fe96f52f573f00f39a70" Jan 20 17:09:04 crc kubenswrapper[4558]: I0120 17:09:04.686068 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d27d1164-be6c-49db-a7dc-56a74af12c32-config-data\") pod \"ceilometer-0\" (UID: \"d27d1164-be6c-49db-a7dc-56a74af12c32\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:09:04 crc kubenswrapper[4558]: I0120 17:09:04.686119 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d27d1164-be6c-49db-a7dc-56a74af12c32-scripts\") pod \"ceilometer-0\" (UID: \"d27d1164-be6c-49db-a7dc-56a74af12c32\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:09:04 crc kubenswrapper[4558]: I0120 17:09:04.686153 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d27d1164-be6c-49db-a7dc-56a74af12c32-run-httpd\") pod \"ceilometer-0\" (UID: \"d27d1164-be6c-49db-a7dc-56a74af12c32\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:09:04 crc kubenswrapper[4558]: I0120 17:09:04.686279 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d27d1164-be6c-49db-a7dc-56a74af12c32-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"d27d1164-be6c-49db-a7dc-56a74af12c32\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:09:04 crc kubenswrapper[4558]: I0120 17:09:04.686306 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d27d1164-be6c-49db-a7dc-56a74af12c32-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"d27d1164-be6c-49db-a7dc-56a74af12c32\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:09:04 crc kubenswrapper[4558]: I0120 17:09:04.686397 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g6q6b\" (UniqueName: \"kubernetes.io/projected/d27d1164-be6c-49db-a7dc-56a74af12c32-kube-api-access-g6q6b\") pod \"ceilometer-0\" (UID: \"d27d1164-be6c-49db-a7dc-56a74af12c32\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:09:04 crc kubenswrapper[4558]: I0120 17:09:04.686421 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d27d1164-be6c-49db-a7dc-56a74af12c32-log-httpd\") pod \"ceilometer-0\" (UID: \"d27d1164-be6c-49db-a7dc-56a74af12c32\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:09:04 crc kubenswrapper[4558]: I0120 17:09:04.697479 4558 scope.go:117] "RemoveContainer" containerID="2919c6702e0599cc3961a4abb6a86885d78bd6fa56cb58f102e657d5a4cf1fca" Jan 20 17:09:04 crc kubenswrapper[4558]: E0120 17:09:04.697844 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2919c6702e0599cc3961a4abb6a86885d78bd6fa56cb58f102e657d5a4cf1fca\": container with ID starting with 2919c6702e0599cc3961a4abb6a86885d78bd6fa56cb58f102e657d5a4cf1fca not found: ID does not exist" containerID="2919c6702e0599cc3961a4abb6a86885d78bd6fa56cb58f102e657d5a4cf1fca" Jan 20 17:09:04 crc kubenswrapper[4558]: I0120 17:09:04.697890 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2919c6702e0599cc3961a4abb6a86885d78bd6fa56cb58f102e657d5a4cf1fca"} err="failed to get container status \"2919c6702e0599cc3961a4abb6a86885d78bd6fa56cb58f102e657d5a4cf1fca\": rpc error: code = NotFound desc = could not find container \"2919c6702e0599cc3961a4abb6a86885d78bd6fa56cb58f102e657d5a4cf1fca\": container with ID starting with 2919c6702e0599cc3961a4abb6a86885d78bd6fa56cb58f102e657d5a4cf1fca not found: ID does not exist" Jan 20 17:09:04 crc kubenswrapper[4558]: I0120 17:09:04.697920 4558 scope.go:117] "RemoveContainer" containerID="c9ef26618f8125edfa262c1bab7d474a4407427e9856b55f0403387387c12061" Jan 20 17:09:04 crc kubenswrapper[4558]: E0120 17:09:04.698201 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c9ef26618f8125edfa262c1bab7d474a4407427e9856b55f0403387387c12061\": container with ID starting with c9ef26618f8125edfa262c1bab7d474a4407427e9856b55f0403387387c12061 not found: ID does not exist" containerID="c9ef26618f8125edfa262c1bab7d474a4407427e9856b55f0403387387c12061" Jan 20 17:09:04 crc kubenswrapper[4558]: I0120 17:09:04.698222 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c9ef26618f8125edfa262c1bab7d474a4407427e9856b55f0403387387c12061"} err="failed to get container status \"c9ef26618f8125edfa262c1bab7d474a4407427e9856b55f0403387387c12061\": rpc error: code = NotFound desc = could not find container \"c9ef26618f8125edfa262c1bab7d474a4407427e9856b55f0403387387c12061\": container with ID starting with c9ef26618f8125edfa262c1bab7d474a4407427e9856b55f0403387387c12061 not found: ID does not exist" Jan 20 17:09:04 crc kubenswrapper[4558]: I0120 17:09:04.698237 4558 scope.go:117] "RemoveContainer" containerID="eabd1645a8c5904d952f7074ae5bb78c2c29c4bd6d9f4e82b38424add4274dd0" Jan 20 17:09:04 crc kubenswrapper[4558]: E0120 17:09:04.699416 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eabd1645a8c5904d952f7074ae5bb78c2c29c4bd6d9f4e82b38424add4274dd0\": container with ID starting with eabd1645a8c5904d952f7074ae5bb78c2c29c4bd6d9f4e82b38424add4274dd0 not found: ID does not exist" containerID="eabd1645a8c5904d952f7074ae5bb78c2c29c4bd6d9f4e82b38424add4274dd0" Jan 20 17:09:04 crc kubenswrapper[4558]: I0120 17:09:04.699469 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eabd1645a8c5904d952f7074ae5bb78c2c29c4bd6d9f4e82b38424add4274dd0"} err="failed to get container status \"eabd1645a8c5904d952f7074ae5bb78c2c29c4bd6d9f4e82b38424add4274dd0\": rpc error: code = NotFound desc = could not find container \"eabd1645a8c5904d952f7074ae5bb78c2c29c4bd6d9f4e82b38424add4274dd0\": container with ID starting with eabd1645a8c5904d952f7074ae5bb78c2c29c4bd6d9f4e82b38424add4274dd0 not found: ID does not exist" Jan 20 17:09:04 crc kubenswrapper[4558]: I0120 17:09:04.699501 4558 scope.go:117] "RemoveContainer" containerID="b15bbf50615d51f30e8f1fc1234a2ab3d117af8204a8fe96f52f573f00f39a70" Jan 20 17:09:04 crc kubenswrapper[4558]: E0120 17:09:04.699886 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b15bbf50615d51f30e8f1fc1234a2ab3d117af8204a8fe96f52f573f00f39a70\": container with ID starting with b15bbf50615d51f30e8f1fc1234a2ab3d117af8204a8fe96f52f573f00f39a70 not found: ID does not exist" containerID="b15bbf50615d51f30e8f1fc1234a2ab3d117af8204a8fe96f52f573f00f39a70" Jan 20 17:09:04 crc kubenswrapper[4558]: I0120 17:09:04.699968 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b15bbf50615d51f30e8f1fc1234a2ab3d117af8204a8fe96f52f573f00f39a70"} err="failed to get container status \"b15bbf50615d51f30e8f1fc1234a2ab3d117af8204a8fe96f52f573f00f39a70\": rpc error: code = NotFound desc = could not find container \"b15bbf50615d51f30e8f1fc1234a2ab3d117af8204a8fe96f52f573f00f39a70\": container with ID starting with b15bbf50615d51f30e8f1fc1234a2ab3d117af8204a8fe96f52f573f00f39a70 not found: ID does not exist" Jan 20 17:09:04 crc kubenswrapper[4558]: I0120 17:09:04.700036 4558 scope.go:117] "RemoveContainer" containerID="2919c6702e0599cc3961a4abb6a86885d78bd6fa56cb58f102e657d5a4cf1fca" Jan 20 17:09:04 crc kubenswrapper[4558]: I0120 17:09:04.700445 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2919c6702e0599cc3961a4abb6a86885d78bd6fa56cb58f102e657d5a4cf1fca"} err="failed to get container status \"2919c6702e0599cc3961a4abb6a86885d78bd6fa56cb58f102e657d5a4cf1fca\": rpc error: code = NotFound desc = could not find container \"2919c6702e0599cc3961a4abb6a86885d78bd6fa56cb58f102e657d5a4cf1fca\": container with ID starting with 2919c6702e0599cc3961a4abb6a86885d78bd6fa56cb58f102e657d5a4cf1fca not found: ID does not exist" Jan 20 17:09:04 crc kubenswrapper[4558]: I0120 17:09:04.700473 4558 scope.go:117] "RemoveContainer" containerID="c9ef26618f8125edfa262c1bab7d474a4407427e9856b55f0403387387c12061" Jan 20 17:09:04 crc kubenswrapper[4558]: I0120 17:09:04.700744 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c9ef26618f8125edfa262c1bab7d474a4407427e9856b55f0403387387c12061"} err="failed to get container status \"c9ef26618f8125edfa262c1bab7d474a4407427e9856b55f0403387387c12061\": rpc error: code = NotFound desc = could not find container \"c9ef26618f8125edfa262c1bab7d474a4407427e9856b55f0403387387c12061\": container with ID starting with c9ef26618f8125edfa262c1bab7d474a4407427e9856b55f0403387387c12061 not found: ID does not exist" Jan 20 17:09:04 crc kubenswrapper[4558]: I0120 17:09:04.700817 4558 scope.go:117] "RemoveContainer" containerID="eabd1645a8c5904d952f7074ae5bb78c2c29c4bd6d9f4e82b38424add4274dd0" Jan 20 17:09:04 crc kubenswrapper[4558]: I0120 17:09:04.701146 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eabd1645a8c5904d952f7074ae5bb78c2c29c4bd6d9f4e82b38424add4274dd0"} err="failed to get container status \"eabd1645a8c5904d952f7074ae5bb78c2c29c4bd6d9f4e82b38424add4274dd0\": rpc error: code = NotFound desc = could not find container \"eabd1645a8c5904d952f7074ae5bb78c2c29c4bd6d9f4e82b38424add4274dd0\": container with ID starting with eabd1645a8c5904d952f7074ae5bb78c2c29c4bd6d9f4e82b38424add4274dd0 not found: ID does not exist" Jan 20 17:09:04 crc kubenswrapper[4558]: I0120 17:09:04.701211 4558 scope.go:117] "RemoveContainer" containerID="b15bbf50615d51f30e8f1fc1234a2ab3d117af8204a8fe96f52f573f00f39a70" Jan 20 17:09:04 crc kubenswrapper[4558]: I0120 17:09:04.701533 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b15bbf50615d51f30e8f1fc1234a2ab3d117af8204a8fe96f52f573f00f39a70"} err="failed to get container status \"b15bbf50615d51f30e8f1fc1234a2ab3d117af8204a8fe96f52f573f00f39a70\": rpc error: code = NotFound desc = could not find container \"b15bbf50615d51f30e8f1fc1234a2ab3d117af8204a8fe96f52f573f00f39a70\": container with ID starting with b15bbf50615d51f30e8f1fc1234a2ab3d117af8204a8fe96f52f573f00f39a70 not found: ID does not exist" Jan 20 17:09:04 crc kubenswrapper[4558]: I0120 17:09:04.701554 4558 scope.go:117] "RemoveContainer" containerID="2919c6702e0599cc3961a4abb6a86885d78bd6fa56cb58f102e657d5a4cf1fca" Jan 20 17:09:04 crc kubenswrapper[4558]: I0120 17:09:04.701875 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2919c6702e0599cc3961a4abb6a86885d78bd6fa56cb58f102e657d5a4cf1fca"} err="failed to get container status \"2919c6702e0599cc3961a4abb6a86885d78bd6fa56cb58f102e657d5a4cf1fca\": rpc error: code = NotFound desc = could not find container \"2919c6702e0599cc3961a4abb6a86885d78bd6fa56cb58f102e657d5a4cf1fca\": container with ID starting with 2919c6702e0599cc3961a4abb6a86885d78bd6fa56cb58f102e657d5a4cf1fca not found: ID does not exist" Jan 20 17:09:04 crc kubenswrapper[4558]: I0120 17:09:04.701897 4558 scope.go:117] "RemoveContainer" containerID="c9ef26618f8125edfa262c1bab7d474a4407427e9856b55f0403387387c12061" Jan 20 17:09:04 crc kubenswrapper[4558]: I0120 17:09:04.702131 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c9ef26618f8125edfa262c1bab7d474a4407427e9856b55f0403387387c12061"} err="failed to get container status \"c9ef26618f8125edfa262c1bab7d474a4407427e9856b55f0403387387c12061\": rpc error: code = NotFound desc = could not find container \"c9ef26618f8125edfa262c1bab7d474a4407427e9856b55f0403387387c12061\": container with ID starting with c9ef26618f8125edfa262c1bab7d474a4407427e9856b55f0403387387c12061 not found: ID does not exist" Jan 20 17:09:04 crc kubenswrapper[4558]: I0120 17:09:04.702251 4558 scope.go:117] "RemoveContainer" containerID="eabd1645a8c5904d952f7074ae5bb78c2c29c4bd6d9f4e82b38424add4274dd0" Jan 20 17:09:04 crc kubenswrapper[4558]: I0120 17:09:04.702570 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eabd1645a8c5904d952f7074ae5bb78c2c29c4bd6d9f4e82b38424add4274dd0"} err="failed to get container status \"eabd1645a8c5904d952f7074ae5bb78c2c29c4bd6d9f4e82b38424add4274dd0\": rpc error: code = NotFound desc = could not find container \"eabd1645a8c5904d952f7074ae5bb78c2c29c4bd6d9f4e82b38424add4274dd0\": container with ID starting with eabd1645a8c5904d952f7074ae5bb78c2c29c4bd6d9f4e82b38424add4274dd0 not found: ID does not exist" Jan 20 17:09:04 crc kubenswrapper[4558]: I0120 17:09:04.702590 4558 scope.go:117] "RemoveContainer" containerID="b15bbf50615d51f30e8f1fc1234a2ab3d117af8204a8fe96f52f573f00f39a70" Jan 20 17:09:04 crc kubenswrapper[4558]: I0120 17:09:04.702846 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b15bbf50615d51f30e8f1fc1234a2ab3d117af8204a8fe96f52f573f00f39a70"} err="failed to get container status \"b15bbf50615d51f30e8f1fc1234a2ab3d117af8204a8fe96f52f573f00f39a70\": rpc error: code = NotFound desc = could not find container \"b15bbf50615d51f30e8f1fc1234a2ab3d117af8204a8fe96f52f573f00f39a70\": container with ID starting with b15bbf50615d51f30e8f1fc1234a2ab3d117af8204a8fe96f52f573f00f39a70 not found: ID does not exist" Jan 20 17:09:04 crc kubenswrapper[4558]: I0120 17:09:04.702868 4558 scope.go:117] "RemoveContainer" containerID="2919c6702e0599cc3961a4abb6a86885d78bd6fa56cb58f102e657d5a4cf1fca" Jan 20 17:09:04 crc kubenswrapper[4558]: I0120 17:09:04.703137 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2919c6702e0599cc3961a4abb6a86885d78bd6fa56cb58f102e657d5a4cf1fca"} err="failed to get container status \"2919c6702e0599cc3961a4abb6a86885d78bd6fa56cb58f102e657d5a4cf1fca\": rpc error: code = NotFound desc = could not find container \"2919c6702e0599cc3961a4abb6a86885d78bd6fa56cb58f102e657d5a4cf1fca\": container with ID starting with 2919c6702e0599cc3961a4abb6a86885d78bd6fa56cb58f102e657d5a4cf1fca not found: ID does not exist" Jan 20 17:09:04 crc kubenswrapper[4558]: I0120 17:09:04.703232 4558 scope.go:117] "RemoveContainer" containerID="c9ef26618f8125edfa262c1bab7d474a4407427e9856b55f0403387387c12061" Jan 20 17:09:04 crc kubenswrapper[4558]: I0120 17:09:04.703557 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c9ef26618f8125edfa262c1bab7d474a4407427e9856b55f0403387387c12061"} err="failed to get container status \"c9ef26618f8125edfa262c1bab7d474a4407427e9856b55f0403387387c12061\": rpc error: code = NotFound desc = could not find container \"c9ef26618f8125edfa262c1bab7d474a4407427e9856b55f0403387387c12061\": container with ID starting with c9ef26618f8125edfa262c1bab7d474a4407427e9856b55f0403387387c12061 not found: ID does not exist" Jan 20 17:09:04 crc kubenswrapper[4558]: I0120 17:09:04.703633 4558 scope.go:117] "RemoveContainer" containerID="eabd1645a8c5904d952f7074ae5bb78c2c29c4bd6d9f4e82b38424add4274dd0" Jan 20 17:09:04 crc kubenswrapper[4558]: I0120 17:09:04.703939 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eabd1645a8c5904d952f7074ae5bb78c2c29c4bd6d9f4e82b38424add4274dd0"} err="failed to get container status \"eabd1645a8c5904d952f7074ae5bb78c2c29c4bd6d9f4e82b38424add4274dd0\": rpc error: code = NotFound desc = could not find container \"eabd1645a8c5904d952f7074ae5bb78c2c29c4bd6d9f4e82b38424add4274dd0\": container with ID starting with eabd1645a8c5904d952f7074ae5bb78c2c29c4bd6d9f4e82b38424add4274dd0 not found: ID does not exist" Jan 20 17:09:04 crc kubenswrapper[4558]: I0120 17:09:04.703961 4558 scope.go:117] "RemoveContainer" containerID="b15bbf50615d51f30e8f1fc1234a2ab3d117af8204a8fe96f52f573f00f39a70" Jan 20 17:09:04 crc kubenswrapper[4558]: I0120 17:09:04.704229 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b15bbf50615d51f30e8f1fc1234a2ab3d117af8204a8fe96f52f573f00f39a70"} err="failed to get container status \"b15bbf50615d51f30e8f1fc1234a2ab3d117af8204a8fe96f52f573f00f39a70\": rpc error: code = NotFound desc = could not find container \"b15bbf50615d51f30e8f1fc1234a2ab3d117af8204a8fe96f52f573f00f39a70\": container with ID starting with b15bbf50615d51f30e8f1fc1234a2ab3d117af8204a8fe96f52f573f00f39a70 not found: ID does not exist" Jan 20 17:09:04 crc kubenswrapper[4558]: I0120 17:09:04.787580 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d27d1164-be6c-49db-a7dc-56a74af12c32-config-data\") pod \"ceilometer-0\" (UID: \"d27d1164-be6c-49db-a7dc-56a74af12c32\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:09:04 crc kubenswrapper[4558]: I0120 17:09:04.787625 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d27d1164-be6c-49db-a7dc-56a74af12c32-scripts\") pod \"ceilometer-0\" (UID: \"d27d1164-be6c-49db-a7dc-56a74af12c32\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:09:04 crc kubenswrapper[4558]: I0120 17:09:04.787660 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d27d1164-be6c-49db-a7dc-56a74af12c32-run-httpd\") pod \"ceilometer-0\" (UID: \"d27d1164-be6c-49db-a7dc-56a74af12c32\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:09:04 crc kubenswrapper[4558]: I0120 17:09:04.787703 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d27d1164-be6c-49db-a7dc-56a74af12c32-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"d27d1164-be6c-49db-a7dc-56a74af12c32\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:09:04 crc kubenswrapper[4558]: I0120 17:09:04.787722 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d27d1164-be6c-49db-a7dc-56a74af12c32-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"d27d1164-be6c-49db-a7dc-56a74af12c32\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:09:04 crc kubenswrapper[4558]: I0120 17:09:04.787768 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g6q6b\" (UniqueName: \"kubernetes.io/projected/d27d1164-be6c-49db-a7dc-56a74af12c32-kube-api-access-g6q6b\") pod \"ceilometer-0\" (UID: \"d27d1164-be6c-49db-a7dc-56a74af12c32\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:09:04 crc kubenswrapper[4558]: I0120 17:09:04.787788 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d27d1164-be6c-49db-a7dc-56a74af12c32-log-httpd\") pod \"ceilometer-0\" (UID: \"d27d1164-be6c-49db-a7dc-56a74af12c32\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:09:04 crc kubenswrapper[4558]: I0120 17:09:04.788497 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d27d1164-be6c-49db-a7dc-56a74af12c32-run-httpd\") pod \"ceilometer-0\" (UID: \"d27d1164-be6c-49db-a7dc-56a74af12c32\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:09:04 crc kubenswrapper[4558]: I0120 17:09:04.788562 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d27d1164-be6c-49db-a7dc-56a74af12c32-log-httpd\") pod \"ceilometer-0\" (UID: \"d27d1164-be6c-49db-a7dc-56a74af12c32\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:09:04 crc kubenswrapper[4558]: I0120 17:09:04.792573 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d27d1164-be6c-49db-a7dc-56a74af12c32-scripts\") pod \"ceilometer-0\" (UID: \"d27d1164-be6c-49db-a7dc-56a74af12c32\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:09:04 crc kubenswrapper[4558]: I0120 17:09:04.792578 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d27d1164-be6c-49db-a7dc-56a74af12c32-config-data\") pod \"ceilometer-0\" (UID: \"d27d1164-be6c-49db-a7dc-56a74af12c32\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:09:04 crc kubenswrapper[4558]: I0120 17:09:04.793803 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d27d1164-be6c-49db-a7dc-56a74af12c32-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"d27d1164-be6c-49db-a7dc-56a74af12c32\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:09:04 crc kubenswrapper[4558]: I0120 17:09:04.794311 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d27d1164-be6c-49db-a7dc-56a74af12c32-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"d27d1164-be6c-49db-a7dc-56a74af12c32\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:09:04 crc kubenswrapper[4558]: I0120 17:09:04.801373 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g6q6b\" (UniqueName: \"kubernetes.io/projected/d27d1164-be6c-49db-a7dc-56a74af12c32-kube-api-access-g6q6b\") pod \"ceilometer-0\" (UID: \"d27d1164-be6c-49db-a7dc-56a74af12c32\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:09:04 crc kubenswrapper[4558]: I0120 17:09:04.962291 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:09:05 crc kubenswrapper[4558]: I0120 17:09:05.370255 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:09:05 crc kubenswrapper[4558]: W0120 17:09:05.373100 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd27d1164_be6c_49db_a7dc_56a74af12c32.slice/crio-067fc0310ed1a4c04e1d931ec82c2d2dac63120cd52700b0d425c8fd43a6a9e2 WatchSource:0}: Error finding container 067fc0310ed1a4c04e1d931ec82c2d2dac63120cd52700b0d425c8fd43a6a9e2: Status 404 returned error can't find the container with id 067fc0310ed1a4c04e1d931ec82c2d2dac63120cd52700b0d425c8fd43a6a9e2 Jan 20 17:09:05 crc kubenswrapper[4558]: I0120 17:09:05.597883 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"d27d1164-be6c-49db-a7dc-56a74af12c32","Type":"ContainerStarted","Data":"067fc0310ed1a4c04e1d931ec82c2d2dac63120cd52700b0d425c8fd43a6a9e2"} Jan 20 17:09:06 crc kubenswrapper[4558]: I0120 17:09:06.243217 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-0"] Jan 20 17:09:06 crc kubenswrapper[4558]: I0120 17:09:06.243440 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-cell0-conductor-0" podUID="83e95b9c-608a-40f8-8d11-1295a1477130" containerName="nova-cell0-conductor-conductor" containerID="cri-o://de349ebe41ded06fcbd0f827d2f80b48e666805321fc0fb9a57e81131ec6ac1e" gracePeriod=30 Jan 20 17:09:06 crc kubenswrapper[4558]: I0120 17:09:06.248654 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:09:06 crc kubenswrapper[4558]: I0120 17:09:06.401694 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:09:06 crc kubenswrapper[4558]: I0120 17:09:06.574761 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d05d78a5-0552-43f3-92ea-42cdf1ada68b" path="/var/lib/kubelet/pods/d05d78a5-0552-43f3-92ea-42cdf1ada68b/volumes" Jan 20 17:09:06 crc kubenswrapper[4558]: I0120 17:09:06.602968 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:09:06 crc kubenswrapper[4558]: I0120 17:09:06.608849 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"d27d1164-be6c-49db-a7dc-56a74af12c32","Type":"ContainerStarted","Data":"31849b24749f11c992cc77007199bf8bbbdb90dcbcb43ee7a0007d17faecc6e4"} Jan 20 17:09:07 crc kubenswrapper[4558]: I0120 17:09:07.523314 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:09:07 crc kubenswrapper[4558]: I0120 17:09:07.622121 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"d27d1164-be6c-49db-a7dc-56a74af12c32","Type":"ContainerStarted","Data":"c6af4b5bc09c1b45c7198223b309b4b990177d845ec9c86ae19ba014a2f34f00"} Jan 20 17:09:07 crc kubenswrapper[4558]: I0120 17:09:07.623629 4558 generic.go:334] "Generic (PLEG): container finished" podID="83e95b9c-608a-40f8-8d11-1295a1477130" containerID="de349ebe41ded06fcbd0f827d2f80b48e666805321fc0fb9a57e81131ec6ac1e" exitCode=0 Jan 20 17:09:07 crc kubenswrapper[4558]: I0120 17:09:07.623662 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:09:07 crc kubenswrapper[4558]: I0120 17:09:07.623709 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-0" event={"ID":"83e95b9c-608a-40f8-8d11-1295a1477130","Type":"ContainerDied","Data":"de349ebe41ded06fcbd0f827d2f80b48e666805321fc0fb9a57e81131ec6ac1e"} Jan 20 17:09:07 crc kubenswrapper[4558]: I0120 17:09:07.623752 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-0" event={"ID":"83e95b9c-608a-40f8-8d11-1295a1477130","Type":"ContainerDied","Data":"af71c6b04cedc5931fa595d1ca171992a8016dec62e03c6a86dc1004e9778821"} Jan 20 17:09:07 crc kubenswrapper[4558]: I0120 17:09:07.623774 4558 scope.go:117] "RemoveContainer" containerID="de349ebe41ded06fcbd0f827d2f80b48e666805321fc0fb9a57e81131ec6ac1e" Jan 20 17:09:07 crc kubenswrapper[4558]: I0120 17:09:07.650772 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/83e95b9c-608a-40f8-8d11-1295a1477130-combined-ca-bundle\") pod \"83e95b9c-608a-40f8-8d11-1295a1477130\" (UID: \"83e95b9c-608a-40f8-8d11-1295a1477130\") " Jan 20 17:09:07 crc kubenswrapper[4558]: I0120 17:09:07.651215 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7v4sb\" (UniqueName: \"kubernetes.io/projected/83e95b9c-608a-40f8-8d11-1295a1477130-kube-api-access-7v4sb\") pod \"83e95b9c-608a-40f8-8d11-1295a1477130\" (UID: \"83e95b9c-608a-40f8-8d11-1295a1477130\") " Jan 20 17:09:07 crc kubenswrapper[4558]: I0120 17:09:07.651370 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/83e95b9c-608a-40f8-8d11-1295a1477130-config-data\") pod \"83e95b9c-608a-40f8-8d11-1295a1477130\" (UID: \"83e95b9c-608a-40f8-8d11-1295a1477130\") " Jan 20 17:09:07 crc kubenswrapper[4558]: I0120 17:09:07.655414 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/83e95b9c-608a-40f8-8d11-1295a1477130-kube-api-access-7v4sb" (OuterVolumeSpecName: "kube-api-access-7v4sb") pod "83e95b9c-608a-40f8-8d11-1295a1477130" (UID: "83e95b9c-608a-40f8-8d11-1295a1477130"). InnerVolumeSpecName "kube-api-access-7v4sb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:09:07 crc kubenswrapper[4558]: I0120 17:09:07.658820 4558 scope.go:117] "RemoveContainer" containerID="de349ebe41ded06fcbd0f827d2f80b48e666805321fc0fb9a57e81131ec6ac1e" Jan 20 17:09:07 crc kubenswrapper[4558]: E0120 17:09:07.659307 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"de349ebe41ded06fcbd0f827d2f80b48e666805321fc0fb9a57e81131ec6ac1e\": container with ID starting with de349ebe41ded06fcbd0f827d2f80b48e666805321fc0fb9a57e81131ec6ac1e not found: ID does not exist" containerID="de349ebe41ded06fcbd0f827d2f80b48e666805321fc0fb9a57e81131ec6ac1e" Jan 20 17:09:07 crc kubenswrapper[4558]: I0120 17:09:07.659414 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"de349ebe41ded06fcbd0f827d2f80b48e666805321fc0fb9a57e81131ec6ac1e"} err="failed to get container status \"de349ebe41ded06fcbd0f827d2f80b48e666805321fc0fb9a57e81131ec6ac1e\": rpc error: code = NotFound desc = could not find container \"de349ebe41ded06fcbd0f827d2f80b48e666805321fc0fb9a57e81131ec6ac1e\": container with ID starting with de349ebe41ded06fcbd0f827d2f80b48e666805321fc0fb9a57e81131ec6ac1e not found: ID does not exist" Jan 20 17:09:07 crc kubenswrapper[4558]: I0120 17:09:07.682242 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/83e95b9c-608a-40f8-8d11-1295a1477130-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "83e95b9c-608a-40f8-8d11-1295a1477130" (UID: "83e95b9c-608a-40f8-8d11-1295a1477130"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:09:07 crc kubenswrapper[4558]: I0120 17:09:07.683010 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/83e95b9c-608a-40f8-8d11-1295a1477130-config-data" (OuterVolumeSpecName: "config-data") pod "83e95b9c-608a-40f8-8d11-1295a1477130" (UID: "83e95b9c-608a-40f8-8d11-1295a1477130"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:09:07 crc kubenswrapper[4558]: I0120 17:09:07.754244 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7v4sb\" (UniqueName: \"kubernetes.io/projected/83e95b9c-608a-40f8-8d11-1295a1477130-kube-api-access-7v4sb\") on node \"crc\" DevicePath \"\"" Jan 20 17:09:07 crc kubenswrapper[4558]: I0120 17:09:07.754356 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/83e95b9c-608a-40f8-8d11-1295a1477130-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:09:07 crc kubenswrapper[4558]: I0120 17:09:07.754501 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/83e95b9c-608a-40f8-8d11-1295a1477130-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:09:07 crc kubenswrapper[4558]: I0120 17:09:07.949479 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-0"] Jan 20 17:09:07 crc kubenswrapper[4558]: I0120 17:09:07.957122 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-0"] Jan 20 17:09:07 crc kubenswrapper[4558]: I0120 17:09:07.974530 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-0"] Jan 20 17:09:07 crc kubenswrapper[4558]: E0120 17:09:07.974940 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="83e95b9c-608a-40f8-8d11-1295a1477130" containerName="nova-cell0-conductor-conductor" Jan 20 17:09:07 crc kubenswrapper[4558]: I0120 17:09:07.974963 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="83e95b9c-608a-40f8-8d11-1295a1477130" containerName="nova-cell0-conductor-conductor" Jan 20 17:09:07 crc kubenswrapper[4558]: I0120 17:09:07.975188 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="83e95b9c-608a-40f8-8d11-1295a1477130" containerName="nova-cell0-conductor-conductor" Jan 20 17:09:07 crc kubenswrapper[4558]: I0120 17:09:07.975851 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:09:07 crc kubenswrapper[4558]: I0120 17:09:07.977545 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-nova-dockercfg-zrgwt" Jan 20 17:09:07 crc kubenswrapper[4558]: I0120 17:09:07.977622 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell0-conductor-config-data" Jan 20 17:09:07 crc kubenswrapper[4558]: I0120 17:09:07.988430 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-0"] Jan 20 17:09:08 crc kubenswrapper[4558]: I0120 17:09:08.163660 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf954b15-e163-4f99-8c1b-5e04d06666bc-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"cf954b15-e163-4f99-8c1b-5e04d06666bc\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:09:08 crc kubenswrapper[4558]: I0120 17:09:08.163728 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf954b15-e163-4f99-8c1b-5e04d06666bc-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"cf954b15-e163-4f99-8c1b-5e04d06666bc\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:09:08 crc kubenswrapper[4558]: I0120 17:09:08.164307 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2vzrf\" (UniqueName: \"kubernetes.io/projected/cf954b15-e163-4f99-8c1b-5e04d06666bc-kube-api-access-2vzrf\") pod \"nova-cell0-conductor-0\" (UID: \"cf954b15-e163-4f99-8c1b-5e04d06666bc\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:09:08 crc kubenswrapper[4558]: I0120 17:09:08.266638 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2vzrf\" (UniqueName: \"kubernetes.io/projected/cf954b15-e163-4f99-8c1b-5e04d06666bc-kube-api-access-2vzrf\") pod \"nova-cell0-conductor-0\" (UID: \"cf954b15-e163-4f99-8c1b-5e04d06666bc\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:09:08 crc kubenswrapper[4558]: I0120 17:09:08.266842 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf954b15-e163-4f99-8c1b-5e04d06666bc-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"cf954b15-e163-4f99-8c1b-5e04d06666bc\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:09:08 crc kubenswrapper[4558]: I0120 17:09:08.266932 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf954b15-e163-4f99-8c1b-5e04d06666bc-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"cf954b15-e163-4f99-8c1b-5e04d06666bc\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:09:08 crc kubenswrapper[4558]: I0120 17:09:08.273527 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf954b15-e163-4f99-8c1b-5e04d06666bc-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"cf954b15-e163-4f99-8c1b-5e04d06666bc\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:09:08 crc kubenswrapper[4558]: I0120 17:09:08.274346 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf954b15-e163-4f99-8c1b-5e04d06666bc-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"cf954b15-e163-4f99-8c1b-5e04d06666bc\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:09:08 crc kubenswrapper[4558]: I0120 17:09:08.284961 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2vzrf\" (UniqueName: \"kubernetes.io/projected/cf954b15-e163-4f99-8c1b-5e04d06666bc-kube-api-access-2vzrf\") pod \"nova-cell0-conductor-0\" (UID: \"cf954b15-e163-4f99-8c1b-5e04d06666bc\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:09:08 crc kubenswrapper[4558]: I0120 17:09:08.321694 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:09:08 crc kubenswrapper[4558]: I0120 17:09:08.577215 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="83e95b9c-608a-40f8-8d11-1295a1477130" path="/var/lib/kubelet/pods/83e95b9c-608a-40f8-8d11-1295a1477130/volumes" Jan 20 17:09:08 crc kubenswrapper[4558]: I0120 17:09:08.637911 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"d27d1164-be6c-49db-a7dc-56a74af12c32","Type":"ContainerStarted","Data":"ee56317e16be231d1efe3d77c889580e5893f52e928fa0e2d6f9b6e73674a627"} Jan 20 17:09:08 crc kubenswrapper[4558]: I0120 17:09:08.770031 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-0"] Jan 20 17:09:09 crc kubenswrapper[4558]: I0120 17:09:09.651226 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"d27d1164-be6c-49db-a7dc-56a74af12c32","Type":"ContainerStarted","Data":"4d2c3cb38019daa5f1141a52333f2c8a7b8fff5b5d7d4a134d4be9a4fd78ad94"} Jan 20 17:09:09 crc kubenswrapper[4558]: I0120 17:09:09.651382 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="d27d1164-be6c-49db-a7dc-56a74af12c32" containerName="ceilometer-central-agent" containerID="cri-o://31849b24749f11c992cc77007199bf8bbbdb90dcbcb43ee7a0007d17faecc6e4" gracePeriod=30 Jan 20 17:09:09 crc kubenswrapper[4558]: I0120 17:09:09.651410 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:09:09 crc kubenswrapper[4558]: I0120 17:09:09.651461 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="d27d1164-be6c-49db-a7dc-56a74af12c32" containerName="sg-core" containerID="cri-o://ee56317e16be231d1efe3d77c889580e5893f52e928fa0e2d6f9b6e73674a627" gracePeriod=30 Jan 20 17:09:09 crc kubenswrapper[4558]: I0120 17:09:09.651505 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="d27d1164-be6c-49db-a7dc-56a74af12c32" containerName="ceilometer-notification-agent" containerID="cri-o://c6af4b5bc09c1b45c7198223b309b4b990177d845ec9c86ae19ba014a2f34f00" gracePeriod=30 Jan 20 17:09:09 crc kubenswrapper[4558]: I0120 17:09:09.651598 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="d27d1164-be6c-49db-a7dc-56a74af12c32" containerName="proxy-httpd" containerID="cri-o://4d2c3cb38019daa5f1141a52333f2c8a7b8fff5b5d7d4a134d4be9a4fd78ad94" gracePeriod=30 Jan 20 17:09:09 crc kubenswrapper[4558]: I0120 17:09:09.658293 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-0" event={"ID":"cf954b15-e163-4f99-8c1b-5e04d06666bc","Type":"ContainerStarted","Data":"d149c52b2c9aabf55ad4004fb586734f8d22f15451b6f01163da779feaa3c048"} Jan 20 17:09:09 crc kubenswrapper[4558]: I0120 17:09:09.658328 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-0" event={"ID":"cf954b15-e163-4f99-8c1b-5e04d06666bc","Type":"ContainerStarted","Data":"7d46177eacff73e145f6daa61019cd59b81967045d98118ff19d2492dc4f0cf9"} Jan 20 17:09:09 crc kubenswrapper[4558]: I0120 17:09:09.658936 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:09:09 crc kubenswrapper[4558]: I0120 17:09:09.679079 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ceilometer-0" podStartSLOduration=1.8647232200000001 podStartE2EDuration="5.679055098s" podCreationTimestamp="2026-01-20 17:09:04 +0000 UTC" firstStartedPulling="2026-01-20 17:09:05.376071492 +0000 UTC m=+1639.136409459" lastFinishedPulling="2026-01-20 17:09:09.19040337 +0000 UTC m=+1642.950741337" observedRunningTime="2026-01-20 17:09:09.669291517 +0000 UTC m=+1643.429629484" watchObservedRunningTime="2026-01-20 17:09:09.679055098 +0000 UTC m=+1643.439393064" Jan 20 17:09:09 crc kubenswrapper[4558]: I0120 17:09:09.694725 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell0-conductor-0" podStartSLOduration=2.6946955580000003 podStartE2EDuration="2.694695558s" podCreationTimestamp="2026-01-20 17:09:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:09:09.688626596 +0000 UTC m=+1643.448964563" watchObservedRunningTime="2026-01-20 17:09:09.694695558 +0000 UTC m=+1643.455033525" Jan 20 17:09:09 crc kubenswrapper[4558]: E0120 17:09:09.715022 4558 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd27d1164_be6c_49db_a7dc_56a74af12c32.slice/crio-ee56317e16be231d1efe3d77c889580e5893f52e928fa0e2d6f9b6e73674a627.scope\": RecentStats: unable to find data in memory cache]" Jan 20 17:09:10 crc kubenswrapper[4558]: I0120 17:09:10.673025 4558 generic.go:334] "Generic (PLEG): container finished" podID="d27d1164-be6c-49db-a7dc-56a74af12c32" containerID="4d2c3cb38019daa5f1141a52333f2c8a7b8fff5b5d7d4a134d4be9a4fd78ad94" exitCode=0 Jan 20 17:09:10 crc kubenswrapper[4558]: I0120 17:09:10.673424 4558 generic.go:334] "Generic (PLEG): container finished" podID="d27d1164-be6c-49db-a7dc-56a74af12c32" containerID="ee56317e16be231d1efe3d77c889580e5893f52e928fa0e2d6f9b6e73674a627" exitCode=2 Jan 20 17:09:10 crc kubenswrapper[4558]: I0120 17:09:10.673433 4558 generic.go:334] "Generic (PLEG): container finished" podID="d27d1164-be6c-49db-a7dc-56a74af12c32" containerID="c6af4b5bc09c1b45c7198223b309b4b990177d845ec9c86ae19ba014a2f34f00" exitCode=0 Jan 20 17:09:10 crc kubenswrapper[4558]: I0120 17:09:10.673070 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"d27d1164-be6c-49db-a7dc-56a74af12c32","Type":"ContainerDied","Data":"4d2c3cb38019daa5f1141a52333f2c8a7b8fff5b5d7d4a134d4be9a4fd78ad94"} Jan 20 17:09:10 crc kubenswrapper[4558]: I0120 17:09:10.673544 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"d27d1164-be6c-49db-a7dc-56a74af12c32","Type":"ContainerDied","Data":"ee56317e16be231d1efe3d77c889580e5893f52e928fa0e2d6f9b6e73674a627"} Jan 20 17:09:10 crc kubenswrapper[4558]: I0120 17:09:10.673558 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"d27d1164-be6c-49db-a7dc-56a74af12c32","Type":"ContainerDied","Data":"c6af4b5bc09c1b45c7198223b309b4b990177d845ec9c86ae19ba014a2f34f00"} Jan 20 17:09:12 crc kubenswrapper[4558]: I0120 17:09:12.472702 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:09:12 crc kubenswrapper[4558]: I0120 17:09:12.567643 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d27d1164-be6c-49db-a7dc-56a74af12c32-config-data\") pod \"d27d1164-be6c-49db-a7dc-56a74af12c32\" (UID: \"d27d1164-be6c-49db-a7dc-56a74af12c32\") " Jan 20 17:09:12 crc kubenswrapper[4558]: I0120 17:09:12.567870 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d27d1164-be6c-49db-a7dc-56a74af12c32-log-httpd\") pod \"d27d1164-be6c-49db-a7dc-56a74af12c32\" (UID: \"d27d1164-be6c-49db-a7dc-56a74af12c32\") " Jan 20 17:09:12 crc kubenswrapper[4558]: I0120 17:09:12.567968 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d27d1164-be6c-49db-a7dc-56a74af12c32-scripts\") pod \"d27d1164-be6c-49db-a7dc-56a74af12c32\" (UID: \"d27d1164-be6c-49db-a7dc-56a74af12c32\") " Jan 20 17:09:12 crc kubenswrapper[4558]: I0120 17:09:12.568129 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d27d1164-be6c-49db-a7dc-56a74af12c32-combined-ca-bundle\") pod \"d27d1164-be6c-49db-a7dc-56a74af12c32\" (UID: \"d27d1164-be6c-49db-a7dc-56a74af12c32\") " Jan 20 17:09:12 crc kubenswrapper[4558]: I0120 17:09:12.568281 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g6q6b\" (UniqueName: \"kubernetes.io/projected/d27d1164-be6c-49db-a7dc-56a74af12c32-kube-api-access-g6q6b\") pod \"d27d1164-be6c-49db-a7dc-56a74af12c32\" (UID: \"d27d1164-be6c-49db-a7dc-56a74af12c32\") " Jan 20 17:09:12 crc kubenswrapper[4558]: I0120 17:09:12.568316 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d27d1164-be6c-49db-a7dc-56a74af12c32-sg-core-conf-yaml\") pod \"d27d1164-be6c-49db-a7dc-56a74af12c32\" (UID: \"d27d1164-be6c-49db-a7dc-56a74af12c32\") " Jan 20 17:09:12 crc kubenswrapper[4558]: I0120 17:09:12.568393 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d27d1164-be6c-49db-a7dc-56a74af12c32-run-httpd\") pod \"d27d1164-be6c-49db-a7dc-56a74af12c32\" (UID: \"d27d1164-be6c-49db-a7dc-56a74af12c32\") " Jan 20 17:09:12 crc kubenswrapper[4558]: I0120 17:09:12.568696 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d27d1164-be6c-49db-a7dc-56a74af12c32-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "d27d1164-be6c-49db-a7dc-56a74af12c32" (UID: "d27d1164-be6c-49db-a7dc-56a74af12c32"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:09:12 crc kubenswrapper[4558]: I0120 17:09:12.568906 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d27d1164-be6c-49db-a7dc-56a74af12c32-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "d27d1164-be6c-49db-a7dc-56a74af12c32" (UID: "d27d1164-be6c-49db-a7dc-56a74af12c32"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:09:12 crc kubenswrapper[4558]: I0120 17:09:12.569180 4558 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d27d1164-be6c-49db-a7dc-56a74af12c32-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:09:12 crc kubenswrapper[4558]: I0120 17:09:12.569202 4558 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d27d1164-be6c-49db-a7dc-56a74af12c32-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:09:12 crc kubenswrapper[4558]: I0120 17:09:12.575075 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d27d1164-be6c-49db-a7dc-56a74af12c32-kube-api-access-g6q6b" (OuterVolumeSpecName: "kube-api-access-g6q6b") pod "d27d1164-be6c-49db-a7dc-56a74af12c32" (UID: "d27d1164-be6c-49db-a7dc-56a74af12c32"). InnerVolumeSpecName "kube-api-access-g6q6b". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:09:12 crc kubenswrapper[4558]: I0120 17:09:12.575435 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d27d1164-be6c-49db-a7dc-56a74af12c32-scripts" (OuterVolumeSpecName: "scripts") pod "d27d1164-be6c-49db-a7dc-56a74af12c32" (UID: "d27d1164-be6c-49db-a7dc-56a74af12c32"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:09:12 crc kubenswrapper[4558]: I0120 17:09:12.596285 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d27d1164-be6c-49db-a7dc-56a74af12c32-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "d27d1164-be6c-49db-a7dc-56a74af12c32" (UID: "d27d1164-be6c-49db-a7dc-56a74af12c32"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:09:12 crc kubenswrapper[4558]: I0120 17:09:12.630444 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d27d1164-be6c-49db-a7dc-56a74af12c32-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d27d1164-be6c-49db-a7dc-56a74af12c32" (UID: "d27d1164-be6c-49db-a7dc-56a74af12c32"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:09:12 crc kubenswrapper[4558]: I0120 17:09:12.644609 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d27d1164-be6c-49db-a7dc-56a74af12c32-config-data" (OuterVolumeSpecName: "config-data") pod "d27d1164-be6c-49db-a7dc-56a74af12c32" (UID: "d27d1164-be6c-49db-a7dc-56a74af12c32"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:09:12 crc kubenswrapper[4558]: I0120 17:09:12.672040 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g6q6b\" (UniqueName: \"kubernetes.io/projected/d27d1164-be6c-49db-a7dc-56a74af12c32-kube-api-access-g6q6b\") on node \"crc\" DevicePath \"\"" Jan 20 17:09:12 crc kubenswrapper[4558]: I0120 17:09:12.672076 4558 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d27d1164-be6c-49db-a7dc-56a74af12c32-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 20 17:09:12 crc kubenswrapper[4558]: I0120 17:09:12.672086 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d27d1164-be6c-49db-a7dc-56a74af12c32-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:09:12 crc kubenswrapper[4558]: I0120 17:09:12.672099 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d27d1164-be6c-49db-a7dc-56a74af12c32-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:09:12 crc kubenswrapper[4558]: I0120 17:09:12.672110 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d27d1164-be6c-49db-a7dc-56a74af12c32-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:09:12 crc kubenswrapper[4558]: I0120 17:09:12.696798 4558 generic.go:334] "Generic (PLEG): container finished" podID="d27d1164-be6c-49db-a7dc-56a74af12c32" containerID="31849b24749f11c992cc77007199bf8bbbdb90dcbcb43ee7a0007d17faecc6e4" exitCode=0 Jan 20 17:09:12 crc kubenswrapper[4558]: I0120 17:09:12.696897 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:09:12 crc kubenswrapper[4558]: I0120 17:09:12.696859 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"d27d1164-be6c-49db-a7dc-56a74af12c32","Type":"ContainerDied","Data":"31849b24749f11c992cc77007199bf8bbbdb90dcbcb43ee7a0007d17faecc6e4"} Jan 20 17:09:12 crc kubenswrapper[4558]: I0120 17:09:12.697069 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"d27d1164-be6c-49db-a7dc-56a74af12c32","Type":"ContainerDied","Data":"067fc0310ed1a4c04e1d931ec82c2d2dac63120cd52700b0d425c8fd43a6a9e2"} Jan 20 17:09:12 crc kubenswrapper[4558]: I0120 17:09:12.697158 4558 scope.go:117] "RemoveContainer" containerID="4d2c3cb38019daa5f1141a52333f2c8a7b8fff5b5d7d4a134d4be9a4fd78ad94" Jan 20 17:09:12 crc kubenswrapper[4558]: I0120 17:09:12.716529 4558 scope.go:117] "RemoveContainer" containerID="ee56317e16be231d1efe3d77c889580e5893f52e928fa0e2d6f9b6e73674a627" Jan 20 17:09:12 crc kubenswrapper[4558]: I0120 17:09:12.729673 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:09:12 crc kubenswrapper[4558]: I0120 17:09:12.735925 4558 scope.go:117] "RemoveContainer" containerID="c6af4b5bc09c1b45c7198223b309b4b990177d845ec9c86ae19ba014a2f34f00" Jan 20 17:09:12 crc kubenswrapper[4558]: I0120 17:09:12.738355 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:09:12 crc kubenswrapper[4558]: I0120 17:09:12.745707 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:09:12 crc kubenswrapper[4558]: E0120 17:09:12.746070 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d27d1164-be6c-49db-a7dc-56a74af12c32" containerName="ceilometer-notification-agent" Jan 20 17:09:12 crc kubenswrapper[4558]: I0120 17:09:12.746089 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d27d1164-be6c-49db-a7dc-56a74af12c32" containerName="ceilometer-notification-agent" Jan 20 17:09:12 crc kubenswrapper[4558]: E0120 17:09:12.746118 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d27d1164-be6c-49db-a7dc-56a74af12c32" containerName="sg-core" Jan 20 17:09:12 crc kubenswrapper[4558]: I0120 17:09:12.746125 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d27d1164-be6c-49db-a7dc-56a74af12c32" containerName="sg-core" Jan 20 17:09:12 crc kubenswrapper[4558]: E0120 17:09:12.746143 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d27d1164-be6c-49db-a7dc-56a74af12c32" containerName="ceilometer-central-agent" Jan 20 17:09:12 crc kubenswrapper[4558]: I0120 17:09:12.746148 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d27d1164-be6c-49db-a7dc-56a74af12c32" containerName="ceilometer-central-agent" Jan 20 17:09:12 crc kubenswrapper[4558]: E0120 17:09:12.746156 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d27d1164-be6c-49db-a7dc-56a74af12c32" containerName="proxy-httpd" Jan 20 17:09:12 crc kubenswrapper[4558]: I0120 17:09:12.746175 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d27d1164-be6c-49db-a7dc-56a74af12c32" containerName="proxy-httpd" Jan 20 17:09:12 crc kubenswrapper[4558]: I0120 17:09:12.746323 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="d27d1164-be6c-49db-a7dc-56a74af12c32" containerName="ceilometer-notification-agent" Jan 20 17:09:12 crc kubenswrapper[4558]: I0120 17:09:12.746360 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="d27d1164-be6c-49db-a7dc-56a74af12c32" containerName="proxy-httpd" Jan 20 17:09:12 crc kubenswrapper[4558]: I0120 17:09:12.746370 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="d27d1164-be6c-49db-a7dc-56a74af12c32" containerName="ceilometer-central-agent" Jan 20 17:09:12 crc kubenswrapper[4558]: I0120 17:09:12.746376 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="d27d1164-be6c-49db-a7dc-56a74af12c32" containerName="sg-core" Jan 20 17:09:12 crc kubenswrapper[4558]: I0120 17:09:12.747957 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:09:12 crc kubenswrapper[4558]: I0120 17:09:12.749521 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-scripts" Jan 20 17:09:12 crc kubenswrapper[4558]: I0120 17:09:12.749954 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-config-data" Jan 20 17:09:12 crc kubenswrapper[4558]: I0120 17:09:12.770418 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:09:12 crc kubenswrapper[4558]: I0120 17:09:12.772599 4558 scope.go:117] "RemoveContainer" containerID="31849b24749f11c992cc77007199bf8bbbdb90dcbcb43ee7a0007d17faecc6e4" Jan 20 17:09:12 crc kubenswrapper[4558]: I0120 17:09:12.798935 4558 scope.go:117] "RemoveContainer" containerID="4d2c3cb38019daa5f1141a52333f2c8a7b8fff5b5d7d4a134d4be9a4fd78ad94" Jan 20 17:09:12 crc kubenswrapper[4558]: E0120 17:09:12.799963 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4d2c3cb38019daa5f1141a52333f2c8a7b8fff5b5d7d4a134d4be9a4fd78ad94\": container with ID starting with 4d2c3cb38019daa5f1141a52333f2c8a7b8fff5b5d7d4a134d4be9a4fd78ad94 not found: ID does not exist" containerID="4d2c3cb38019daa5f1141a52333f2c8a7b8fff5b5d7d4a134d4be9a4fd78ad94" Jan 20 17:09:12 crc kubenswrapper[4558]: I0120 17:09:12.800030 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4d2c3cb38019daa5f1141a52333f2c8a7b8fff5b5d7d4a134d4be9a4fd78ad94"} err="failed to get container status \"4d2c3cb38019daa5f1141a52333f2c8a7b8fff5b5d7d4a134d4be9a4fd78ad94\": rpc error: code = NotFound desc = could not find container \"4d2c3cb38019daa5f1141a52333f2c8a7b8fff5b5d7d4a134d4be9a4fd78ad94\": container with ID starting with 4d2c3cb38019daa5f1141a52333f2c8a7b8fff5b5d7d4a134d4be9a4fd78ad94 not found: ID does not exist" Jan 20 17:09:12 crc kubenswrapper[4558]: I0120 17:09:12.800059 4558 scope.go:117] "RemoveContainer" containerID="ee56317e16be231d1efe3d77c889580e5893f52e928fa0e2d6f9b6e73674a627" Jan 20 17:09:12 crc kubenswrapper[4558]: E0120 17:09:12.800552 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ee56317e16be231d1efe3d77c889580e5893f52e928fa0e2d6f9b6e73674a627\": container with ID starting with ee56317e16be231d1efe3d77c889580e5893f52e928fa0e2d6f9b6e73674a627 not found: ID does not exist" containerID="ee56317e16be231d1efe3d77c889580e5893f52e928fa0e2d6f9b6e73674a627" Jan 20 17:09:12 crc kubenswrapper[4558]: I0120 17:09:12.800608 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ee56317e16be231d1efe3d77c889580e5893f52e928fa0e2d6f9b6e73674a627"} err="failed to get container status \"ee56317e16be231d1efe3d77c889580e5893f52e928fa0e2d6f9b6e73674a627\": rpc error: code = NotFound desc = could not find container \"ee56317e16be231d1efe3d77c889580e5893f52e928fa0e2d6f9b6e73674a627\": container with ID starting with ee56317e16be231d1efe3d77c889580e5893f52e928fa0e2d6f9b6e73674a627 not found: ID does not exist" Jan 20 17:09:12 crc kubenswrapper[4558]: I0120 17:09:12.800621 4558 scope.go:117] "RemoveContainer" containerID="c6af4b5bc09c1b45c7198223b309b4b990177d845ec9c86ae19ba014a2f34f00" Jan 20 17:09:12 crc kubenswrapper[4558]: E0120 17:09:12.800892 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c6af4b5bc09c1b45c7198223b309b4b990177d845ec9c86ae19ba014a2f34f00\": container with ID starting with c6af4b5bc09c1b45c7198223b309b4b990177d845ec9c86ae19ba014a2f34f00 not found: ID does not exist" containerID="c6af4b5bc09c1b45c7198223b309b4b990177d845ec9c86ae19ba014a2f34f00" Jan 20 17:09:12 crc kubenswrapper[4558]: I0120 17:09:12.800918 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c6af4b5bc09c1b45c7198223b309b4b990177d845ec9c86ae19ba014a2f34f00"} err="failed to get container status \"c6af4b5bc09c1b45c7198223b309b4b990177d845ec9c86ae19ba014a2f34f00\": rpc error: code = NotFound desc = could not find container \"c6af4b5bc09c1b45c7198223b309b4b990177d845ec9c86ae19ba014a2f34f00\": container with ID starting with c6af4b5bc09c1b45c7198223b309b4b990177d845ec9c86ae19ba014a2f34f00 not found: ID does not exist" Jan 20 17:09:12 crc kubenswrapper[4558]: I0120 17:09:12.800931 4558 scope.go:117] "RemoveContainer" containerID="31849b24749f11c992cc77007199bf8bbbdb90dcbcb43ee7a0007d17faecc6e4" Jan 20 17:09:12 crc kubenswrapper[4558]: E0120 17:09:12.801280 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"31849b24749f11c992cc77007199bf8bbbdb90dcbcb43ee7a0007d17faecc6e4\": container with ID starting with 31849b24749f11c992cc77007199bf8bbbdb90dcbcb43ee7a0007d17faecc6e4 not found: ID does not exist" containerID="31849b24749f11c992cc77007199bf8bbbdb90dcbcb43ee7a0007d17faecc6e4" Jan 20 17:09:12 crc kubenswrapper[4558]: I0120 17:09:12.801299 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"31849b24749f11c992cc77007199bf8bbbdb90dcbcb43ee7a0007d17faecc6e4"} err="failed to get container status \"31849b24749f11c992cc77007199bf8bbbdb90dcbcb43ee7a0007d17faecc6e4\": rpc error: code = NotFound desc = could not find container \"31849b24749f11c992cc77007199bf8bbbdb90dcbcb43ee7a0007d17faecc6e4\": container with ID starting with 31849b24749f11c992cc77007199bf8bbbdb90dcbcb43ee7a0007d17faecc6e4 not found: ID does not exist" Jan 20 17:09:12 crc kubenswrapper[4558]: I0120 17:09:12.878214 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8149079b-aaf9-4993-b7b3-62c379dbc919-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"8149079b-aaf9-4993-b7b3-62c379dbc919\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:09:12 crc kubenswrapper[4558]: I0120 17:09:12.878447 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8149079b-aaf9-4993-b7b3-62c379dbc919-run-httpd\") pod \"ceilometer-0\" (UID: \"8149079b-aaf9-4993-b7b3-62c379dbc919\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:09:12 crc kubenswrapper[4558]: I0120 17:09:12.878661 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qmlqq\" (UniqueName: \"kubernetes.io/projected/8149079b-aaf9-4993-b7b3-62c379dbc919-kube-api-access-qmlqq\") pod \"ceilometer-0\" (UID: \"8149079b-aaf9-4993-b7b3-62c379dbc919\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:09:12 crc kubenswrapper[4558]: I0120 17:09:12.878791 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8149079b-aaf9-4993-b7b3-62c379dbc919-scripts\") pod \"ceilometer-0\" (UID: \"8149079b-aaf9-4993-b7b3-62c379dbc919\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:09:12 crc kubenswrapper[4558]: I0120 17:09:12.878867 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8149079b-aaf9-4993-b7b3-62c379dbc919-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"8149079b-aaf9-4993-b7b3-62c379dbc919\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:09:12 crc kubenswrapper[4558]: I0120 17:09:12.879050 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8149079b-aaf9-4993-b7b3-62c379dbc919-log-httpd\") pod \"ceilometer-0\" (UID: \"8149079b-aaf9-4993-b7b3-62c379dbc919\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:09:12 crc kubenswrapper[4558]: I0120 17:09:12.879195 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8149079b-aaf9-4993-b7b3-62c379dbc919-config-data\") pod \"ceilometer-0\" (UID: \"8149079b-aaf9-4993-b7b3-62c379dbc919\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:09:12 crc kubenswrapper[4558]: I0120 17:09:12.981700 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8149079b-aaf9-4993-b7b3-62c379dbc919-scripts\") pod \"ceilometer-0\" (UID: \"8149079b-aaf9-4993-b7b3-62c379dbc919\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:09:12 crc kubenswrapper[4558]: I0120 17:09:12.981798 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8149079b-aaf9-4993-b7b3-62c379dbc919-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"8149079b-aaf9-4993-b7b3-62c379dbc919\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:09:12 crc kubenswrapper[4558]: I0120 17:09:12.981885 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8149079b-aaf9-4993-b7b3-62c379dbc919-log-httpd\") pod \"ceilometer-0\" (UID: \"8149079b-aaf9-4993-b7b3-62c379dbc919\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:09:12 crc kubenswrapper[4558]: I0120 17:09:12.981950 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8149079b-aaf9-4993-b7b3-62c379dbc919-config-data\") pod \"ceilometer-0\" (UID: \"8149079b-aaf9-4993-b7b3-62c379dbc919\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:09:12 crc kubenswrapper[4558]: I0120 17:09:12.982037 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8149079b-aaf9-4993-b7b3-62c379dbc919-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"8149079b-aaf9-4993-b7b3-62c379dbc919\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:09:12 crc kubenswrapper[4558]: I0120 17:09:12.982682 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8149079b-aaf9-4993-b7b3-62c379dbc919-run-httpd\") pod \"ceilometer-0\" (UID: \"8149079b-aaf9-4993-b7b3-62c379dbc919\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:09:12 crc kubenswrapper[4558]: I0120 17:09:12.982810 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qmlqq\" (UniqueName: \"kubernetes.io/projected/8149079b-aaf9-4993-b7b3-62c379dbc919-kube-api-access-qmlqq\") pod \"ceilometer-0\" (UID: \"8149079b-aaf9-4993-b7b3-62c379dbc919\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:09:12 crc kubenswrapper[4558]: I0120 17:09:12.982914 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8149079b-aaf9-4993-b7b3-62c379dbc919-run-httpd\") pod \"ceilometer-0\" (UID: \"8149079b-aaf9-4993-b7b3-62c379dbc919\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:09:12 crc kubenswrapper[4558]: I0120 17:09:12.982684 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8149079b-aaf9-4993-b7b3-62c379dbc919-log-httpd\") pod \"ceilometer-0\" (UID: \"8149079b-aaf9-4993-b7b3-62c379dbc919\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:09:12 crc kubenswrapper[4558]: I0120 17:09:12.986631 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8149079b-aaf9-4993-b7b3-62c379dbc919-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"8149079b-aaf9-4993-b7b3-62c379dbc919\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:09:12 crc kubenswrapper[4558]: I0120 17:09:12.986693 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8149079b-aaf9-4993-b7b3-62c379dbc919-scripts\") pod \"ceilometer-0\" (UID: \"8149079b-aaf9-4993-b7b3-62c379dbc919\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:09:12 crc kubenswrapper[4558]: I0120 17:09:12.987697 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8149079b-aaf9-4993-b7b3-62c379dbc919-config-data\") pod \"ceilometer-0\" (UID: \"8149079b-aaf9-4993-b7b3-62c379dbc919\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:09:12 crc kubenswrapper[4558]: I0120 17:09:12.987729 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8149079b-aaf9-4993-b7b3-62c379dbc919-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"8149079b-aaf9-4993-b7b3-62c379dbc919\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:09:12 crc kubenswrapper[4558]: I0120 17:09:12.997197 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qmlqq\" (UniqueName: \"kubernetes.io/projected/8149079b-aaf9-4993-b7b3-62c379dbc919-kube-api-access-qmlqq\") pod \"ceilometer-0\" (UID: \"8149079b-aaf9-4993-b7b3-62c379dbc919\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:09:13 crc kubenswrapper[4558]: I0120 17:09:13.074197 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:09:13 crc kubenswrapper[4558]: I0120 17:09:13.350285 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:09:13 crc kubenswrapper[4558]: I0120 17:09:13.495147 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:09:13 crc kubenswrapper[4558]: W0120 17:09:13.498700 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8149079b_aaf9_4993_b7b3_62c379dbc919.slice/crio-1d082ba7967af3832a810efa0b3d087422bcb75cc144d69b39058fa5f06c95ec WatchSource:0}: Error finding container 1d082ba7967af3832a810efa0b3d087422bcb75cc144d69b39058fa5f06c95ec: Status 404 returned error can't find the container with id 1d082ba7967af3832a810efa0b3d087422bcb75cc144d69b39058fa5f06c95ec Jan 20 17:09:13 crc kubenswrapper[4558]: I0120 17:09:13.709142 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"8149079b-aaf9-4993-b7b3-62c379dbc919","Type":"ContainerStarted","Data":"1d082ba7967af3832a810efa0b3d087422bcb75cc144d69b39058fa5f06c95ec"} Jan 20 17:09:13 crc kubenswrapper[4558]: I0120 17:09:13.780025 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell0-cell-mapping-kn965"] Jan 20 17:09:13 crc kubenswrapper[4558]: I0120 17:09:13.781216 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-cell-mapping-kn965" Jan 20 17:09:13 crc kubenswrapper[4558]: I0120 17:09:13.784039 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell0-manage-config-data" Jan 20 17:09:13 crc kubenswrapper[4558]: I0120 17:09:13.788531 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell0-manage-scripts" Jan 20 17:09:13 crc kubenswrapper[4558]: I0120 17:09:13.794419 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-cell-mapping-kn965"] Jan 20 17:09:13 crc kubenswrapper[4558]: I0120 17:09:13.904512 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d0bb0682-179e-4316-98d1-55c1912cc154-scripts\") pod \"nova-cell0-cell-mapping-kn965\" (UID: \"d0bb0682-179e-4316-98d1-55c1912cc154\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-kn965" Jan 20 17:09:13 crc kubenswrapper[4558]: I0120 17:09:13.904567 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cwdqd\" (UniqueName: \"kubernetes.io/projected/d0bb0682-179e-4316-98d1-55c1912cc154-kube-api-access-cwdqd\") pod \"nova-cell0-cell-mapping-kn965\" (UID: \"d0bb0682-179e-4316-98d1-55c1912cc154\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-kn965" Jan 20 17:09:13 crc kubenswrapper[4558]: I0120 17:09:13.904606 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d0bb0682-179e-4316-98d1-55c1912cc154-config-data\") pod \"nova-cell0-cell-mapping-kn965\" (UID: \"d0bb0682-179e-4316-98d1-55c1912cc154\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-kn965" Jan 20 17:09:13 crc kubenswrapper[4558]: I0120 17:09:13.904625 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d0bb0682-179e-4316-98d1-55c1912cc154-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-kn965\" (UID: \"d0bb0682-179e-4316-98d1-55c1912cc154\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-kn965" Jan 20 17:09:13 crc kubenswrapper[4558]: I0120 17:09:13.931360 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:09:13 crc kubenswrapper[4558]: I0120 17:09:13.932757 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:09:13 crc kubenswrapper[4558]: I0120 17:09:13.943629 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:09:13 crc kubenswrapper[4558]: I0120 17:09:13.947525 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-api-config-data" Jan 20 17:09:13 crc kubenswrapper[4558]: I0120 17:09:13.961483 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:09:13 crc kubenswrapper[4558]: I0120 17:09:13.962654 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:09:13 crc kubenswrapper[4558]: I0120 17:09:13.963961 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-scheduler-config-data" Jan 20 17:09:13 crc kubenswrapper[4558]: I0120 17:09:13.986156 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:09:14 crc kubenswrapper[4558]: I0120 17:09:14.013578 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d0bb0682-179e-4316-98d1-55c1912cc154-scripts\") pod \"nova-cell0-cell-mapping-kn965\" (UID: \"d0bb0682-179e-4316-98d1-55c1912cc154\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-kn965" Jan 20 17:09:14 crc kubenswrapper[4558]: I0120 17:09:14.013684 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cwdqd\" (UniqueName: \"kubernetes.io/projected/d0bb0682-179e-4316-98d1-55c1912cc154-kube-api-access-cwdqd\") pod \"nova-cell0-cell-mapping-kn965\" (UID: \"d0bb0682-179e-4316-98d1-55c1912cc154\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-kn965" Jan 20 17:09:14 crc kubenswrapper[4558]: I0120 17:09:14.013769 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d0bb0682-179e-4316-98d1-55c1912cc154-config-data\") pod \"nova-cell0-cell-mapping-kn965\" (UID: \"d0bb0682-179e-4316-98d1-55c1912cc154\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-kn965" Jan 20 17:09:14 crc kubenswrapper[4558]: I0120 17:09:14.013796 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d0bb0682-179e-4316-98d1-55c1912cc154-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-kn965\" (UID: \"d0bb0682-179e-4316-98d1-55c1912cc154\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-kn965" Jan 20 17:09:14 crc kubenswrapper[4558]: I0120 17:09:14.046751 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d0bb0682-179e-4316-98d1-55c1912cc154-config-data\") pod \"nova-cell0-cell-mapping-kn965\" (UID: \"d0bb0682-179e-4316-98d1-55c1912cc154\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-kn965" Jan 20 17:09:14 crc kubenswrapper[4558]: I0120 17:09:14.047875 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d0bb0682-179e-4316-98d1-55c1912cc154-scripts\") pod \"nova-cell0-cell-mapping-kn965\" (UID: \"d0bb0682-179e-4316-98d1-55c1912cc154\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-kn965" Jan 20 17:09:14 crc kubenswrapper[4558]: I0120 17:09:14.052671 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cwdqd\" (UniqueName: \"kubernetes.io/projected/d0bb0682-179e-4316-98d1-55c1912cc154-kube-api-access-cwdqd\") pod \"nova-cell0-cell-mapping-kn965\" (UID: \"d0bb0682-179e-4316-98d1-55c1912cc154\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-kn965" Jan 20 17:09:14 crc kubenswrapper[4558]: I0120 17:09:14.068258 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d0bb0682-179e-4316-98d1-55c1912cc154-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-kn965\" (UID: \"d0bb0682-179e-4316-98d1-55c1912cc154\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-kn965" Jan 20 17:09:14 crc kubenswrapper[4558]: I0120 17:09:14.088533 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:09:14 crc kubenswrapper[4558]: I0120 17:09:14.090873 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:09:14 crc kubenswrapper[4558]: I0120 17:09:14.093729 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-metadata-config-data" Jan 20 17:09:14 crc kubenswrapper[4558]: I0120 17:09:14.112260 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:09:14 crc kubenswrapper[4558]: I0120 17:09:14.113048 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-cell-mapping-kn965" Jan 20 17:09:14 crc kubenswrapper[4558]: I0120 17:09:14.115617 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f640bcab-f4ac-417c-86e6-af44f26a0ca9-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"f640bcab-f4ac-417c-86e6-af44f26a0ca9\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:09:14 crc kubenswrapper[4558]: I0120 17:09:14.115699 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p4l5f\" (UniqueName: \"kubernetes.io/projected/660d7b2b-b480-41c1-8e10-d2216f28c591-kube-api-access-p4l5f\") pod \"nova-api-0\" (UID: \"660d7b2b-b480-41c1-8e10-d2216f28c591\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:09:14 crc kubenswrapper[4558]: I0120 17:09:14.115743 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/660d7b2b-b480-41c1-8e10-d2216f28c591-config-data\") pod \"nova-api-0\" (UID: \"660d7b2b-b480-41c1-8e10-d2216f28c591\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:09:14 crc kubenswrapper[4558]: I0120 17:09:14.115779 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/660d7b2b-b480-41c1-8e10-d2216f28c591-logs\") pod \"nova-api-0\" (UID: \"660d7b2b-b480-41c1-8e10-d2216f28c591\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:09:14 crc kubenswrapper[4558]: I0120 17:09:14.115806 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f640bcab-f4ac-417c-86e6-af44f26a0ca9-config-data\") pod \"nova-scheduler-0\" (UID: \"f640bcab-f4ac-417c-86e6-af44f26a0ca9\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:09:14 crc kubenswrapper[4558]: I0120 17:09:14.115835 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/660d7b2b-b480-41c1-8e10-d2216f28c591-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"660d7b2b-b480-41c1-8e10-d2216f28c591\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:09:14 crc kubenswrapper[4558]: I0120 17:09:14.115873 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sr5l5\" (UniqueName: \"kubernetes.io/projected/f640bcab-f4ac-417c-86e6-af44f26a0ca9-kube-api-access-sr5l5\") pod \"nova-scheduler-0\" (UID: \"f640bcab-f4ac-417c-86e6-af44f26a0ca9\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:09:14 crc kubenswrapper[4558]: I0120 17:09:14.153816 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:09:14 crc kubenswrapper[4558]: I0120 17:09:14.157567 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:09:14 crc kubenswrapper[4558]: I0120 17:09:14.160466 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell1-novncproxy-config-data" Jan 20 17:09:14 crc kubenswrapper[4558]: I0120 17:09:14.170960 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:09:14 crc kubenswrapper[4558]: I0120 17:09:14.217997 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8b7737aa-e87a-4ddc-b056-d6a822b7ed03-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"8b7737aa-e87a-4ddc-b056-d6a822b7ed03\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:09:14 crc kubenswrapper[4558]: I0120 17:09:14.218047 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f640bcab-f4ac-417c-86e6-af44f26a0ca9-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"f640bcab-f4ac-417c-86e6-af44f26a0ca9\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:09:14 crc kubenswrapper[4558]: I0120 17:09:14.218096 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p4l5f\" (UniqueName: \"kubernetes.io/projected/660d7b2b-b480-41c1-8e10-d2216f28c591-kube-api-access-p4l5f\") pod \"nova-api-0\" (UID: \"660d7b2b-b480-41c1-8e10-d2216f28c591\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:09:14 crc kubenswrapper[4558]: I0120 17:09:14.218231 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/660d7b2b-b480-41c1-8e10-d2216f28c591-config-data\") pod \"nova-api-0\" (UID: \"660d7b2b-b480-41c1-8e10-d2216f28c591\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:09:14 crc kubenswrapper[4558]: I0120 17:09:14.218257 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bvn8n\" (UniqueName: \"kubernetes.io/projected/8b7737aa-e87a-4ddc-b056-d6a822b7ed03-kube-api-access-bvn8n\") pod \"nova-metadata-0\" (UID: \"8b7737aa-e87a-4ddc-b056-d6a822b7ed03\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:09:14 crc kubenswrapper[4558]: I0120 17:09:14.218302 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/660d7b2b-b480-41c1-8e10-d2216f28c591-logs\") pod \"nova-api-0\" (UID: \"660d7b2b-b480-41c1-8e10-d2216f28c591\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:09:14 crc kubenswrapper[4558]: I0120 17:09:14.218354 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f640bcab-f4ac-417c-86e6-af44f26a0ca9-config-data\") pod \"nova-scheduler-0\" (UID: \"f640bcab-f4ac-417c-86e6-af44f26a0ca9\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:09:14 crc kubenswrapper[4558]: I0120 17:09:14.218405 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8b7737aa-e87a-4ddc-b056-d6a822b7ed03-config-data\") pod \"nova-metadata-0\" (UID: \"8b7737aa-e87a-4ddc-b056-d6a822b7ed03\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:09:14 crc kubenswrapper[4558]: I0120 17:09:14.218425 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/660d7b2b-b480-41c1-8e10-d2216f28c591-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"660d7b2b-b480-41c1-8e10-d2216f28c591\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:09:14 crc kubenswrapper[4558]: I0120 17:09:14.218457 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8b7737aa-e87a-4ddc-b056-d6a822b7ed03-logs\") pod \"nova-metadata-0\" (UID: \"8b7737aa-e87a-4ddc-b056-d6a822b7ed03\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:09:14 crc kubenswrapper[4558]: I0120 17:09:14.218514 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sr5l5\" (UniqueName: \"kubernetes.io/projected/f640bcab-f4ac-417c-86e6-af44f26a0ca9-kube-api-access-sr5l5\") pod \"nova-scheduler-0\" (UID: \"f640bcab-f4ac-417c-86e6-af44f26a0ca9\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:09:14 crc kubenswrapper[4558]: I0120 17:09:14.222146 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/660d7b2b-b480-41c1-8e10-d2216f28c591-logs\") pod \"nova-api-0\" (UID: \"660d7b2b-b480-41c1-8e10-d2216f28c591\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:09:14 crc kubenswrapper[4558]: I0120 17:09:14.225473 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f640bcab-f4ac-417c-86e6-af44f26a0ca9-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"f640bcab-f4ac-417c-86e6-af44f26a0ca9\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:09:14 crc kubenswrapper[4558]: I0120 17:09:14.225925 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f640bcab-f4ac-417c-86e6-af44f26a0ca9-config-data\") pod \"nova-scheduler-0\" (UID: \"f640bcab-f4ac-417c-86e6-af44f26a0ca9\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:09:14 crc kubenswrapper[4558]: I0120 17:09:14.227707 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/660d7b2b-b480-41c1-8e10-d2216f28c591-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"660d7b2b-b480-41c1-8e10-d2216f28c591\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:09:14 crc kubenswrapper[4558]: I0120 17:09:14.234448 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sr5l5\" (UniqueName: \"kubernetes.io/projected/f640bcab-f4ac-417c-86e6-af44f26a0ca9-kube-api-access-sr5l5\") pod \"nova-scheduler-0\" (UID: \"f640bcab-f4ac-417c-86e6-af44f26a0ca9\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:09:14 crc kubenswrapper[4558]: I0120 17:09:14.238667 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p4l5f\" (UniqueName: \"kubernetes.io/projected/660d7b2b-b480-41c1-8e10-d2216f28c591-kube-api-access-p4l5f\") pod \"nova-api-0\" (UID: \"660d7b2b-b480-41c1-8e10-d2216f28c591\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:09:14 crc kubenswrapper[4558]: I0120 17:09:14.248325 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/660d7b2b-b480-41c1-8e10-d2216f28c591-config-data\") pod \"nova-api-0\" (UID: \"660d7b2b-b480-41c1-8e10-d2216f28c591\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:09:14 crc kubenswrapper[4558]: I0120 17:09:14.250114 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:09:14 crc kubenswrapper[4558]: I0120 17:09:14.313923 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:09:14 crc kubenswrapper[4558]: I0120 17:09:14.323855 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qk6sl\" (UniqueName: \"kubernetes.io/projected/85104385-0429-4a08-a55d-1e863e8fe32a-kube-api-access-qk6sl\") pod \"nova-cell1-novncproxy-0\" (UID: \"85104385-0429-4a08-a55d-1e863e8fe32a\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:09:14 crc kubenswrapper[4558]: I0120 17:09:14.324535 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/85104385-0429-4a08-a55d-1e863e8fe32a-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"85104385-0429-4a08-a55d-1e863e8fe32a\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:09:14 crc kubenswrapper[4558]: I0120 17:09:14.324605 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8b7737aa-e87a-4ddc-b056-d6a822b7ed03-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"8b7737aa-e87a-4ddc-b056-d6a822b7ed03\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:09:14 crc kubenswrapper[4558]: I0120 17:09:14.324763 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bvn8n\" (UniqueName: \"kubernetes.io/projected/8b7737aa-e87a-4ddc-b056-d6a822b7ed03-kube-api-access-bvn8n\") pod \"nova-metadata-0\" (UID: \"8b7737aa-e87a-4ddc-b056-d6a822b7ed03\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:09:14 crc kubenswrapper[4558]: I0120 17:09:14.324797 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/85104385-0429-4a08-a55d-1e863e8fe32a-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"85104385-0429-4a08-a55d-1e863e8fe32a\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:09:14 crc kubenswrapper[4558]: I0120 17:09:14.324934 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8b7737aa-e87a-4ddc-b056-d6a822b7ed03-config-data\") pod \"nova-metadata-0\" (UID: \"8b7737aa-e87a-4ddc-b056-d6a822b7ed03\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:09:14 crc kubenswrapper[4558]: I0120 17:09:14.324988 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8b7737aa-e87a-4ddc-b056-d6a822b7ed03-logs\") pod \"nova-metadata-0\" (UID: \"8b7737aa-e87a-4ddc-b056-d6a822b7ed03\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:09:14 crc kubenswrapper[4558]: I0120 17:09:14.329683 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8b7737aa-e87a-4ddc-b056-d6a822b7ed03-logs\") pod \"nova-metadata-0\" (UID: \"8b7737aa-e87a-4ddc-b056-d6a822b7ed03\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:09:14 crc kubenswrapper[4558]: I0120 17:09:14.330087 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8b7737aa-e87a-4ddc-b056-d6a822b7ed03-config-data\") pod \"nova-metadata-0\" (UID: \"8b7737aa-e87a-4ddc-b056-d6a822b7ed03\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:09:14 crc kubenswrapper[4558]: I0120 17:09:14.332609 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8b7737aa-e87a-4ddc-b056-d6a822b7ed03-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"8b7737aa-e87a-4ddc-b056-d6a822b7ed03\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:09:14 crc kubenswrapper[4558]: I0120 17:09:14.347279 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bvn8n\" (UniqueName: \"kubernetes.io/projected/8b7737aa-e87a-4ddc-b056-d6a822b7ed03-kube-api-access-bvn8n\") pod \"nova-metadata-0\" (UID: \"8b7737aa-e87a-4ddc-b056-d6a822b7ed03\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:09:14 crc kubenswrapper[4558]: I0120 17:09:14.426656 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qk6sl\" (UniqueName: \"kubernetes.io/projected/85104385-0429-4a08-a55d-1e863e8fe32a-kube-api-access-qk6sl\") pod \"nova-cell1-novncproxy-0\" (UID: \"85104385-0429-4a08-a55d-1e863e8fe32a\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:09:14 crc kubenswrapper[4558]: I0120 17:09:14.426701 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/85104385-0429-4a08-a55d-1e863e8fe32a-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"85104385-0429-4a08-a55d-1e863e8fe32a\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:09:14 crc kubenswrapper[4558]: I0120 17:09:14.426770 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/85104385-0429-4a08-a55d-1e863e8fe32a-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"85104385-0429-4a08-a55d-1e863e8fe32a\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:09:14 crc kubenswrapper[4558]: I0120 17:09:14.434108 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/85104385-0429-4a08-a55d-1e863e8fe32a-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"85104385-0429-4a08-a55d-1e863e8fe32a\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:09:14 crc kubenswrapper[4558]: I0120 17:09:14.439377 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:09:14 crc kubenswrapper[4558]: I0120 17:09:14.441751 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/85104385-0429-4a08-a55d-1e863e8fe32a-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"85104385-0429-4a08-a55d-1e863e8fe32a\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:09:14 crc kubenswrapper[4558]: I0120 17:09:14.450256 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qk6sl\" (UniqueName: \"kubernetes.io/projected/85104385-0429-4a08-a55d-1e863e8fe32a-kube-api-access-qk6sl\") pod \"nova-cell1-novncproxy-0\" (UID: \"85104385-0429-4a08-a55d-1e863e8fe32a\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:09:14 crc kubenswrapper[4558]: I0120 17:09:14.519438 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:09:14 crc kubenswrapper[4558]: I0120 17:09:14.586109 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d27d1164-be6c-49db-a7dc-56a74af12c32" path="/var/lib/kubelet/pods/d27d1164-be6c-49db-a7dc-56a74af12c32/volumes" Jan 20 17:09:14 crc kubenswrapper[4558]: I0120 17:09:14.587011 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-cell-mapping-kn965"] Jan 20 17:09:14 crc kubenswrapper[4558]: I0120 17:09:14.727128 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-cell-mapping-kn965" event={"ID":"d0bb0682-179e-4316-98d1-55c1912cc154","Type":"ContainerStarted","Data":"ace38f7a3bbcb96e064278e6d251883adf8718863525236ed7f914f19ee89e97"} Jan 20 17:09:14 crc kubenswrapper[4558]: I0120 17:09:14.741885 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:09:14 crc kubenswrapper[4558]: I0120 17:09:14.742426 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"8149079b-aaf9-4993-b7b3-62c379dbc919","Type":"ContainerStarted","Data":"bcf08c42c09beee2b414cfe87eb9584080e4a7bc1b0ffc6999ff8ed107f5c12e"} Jan 20 17:09:14 crc kubenswrapper[4558]: I0120 17:09:14.820110 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:09:14 crc kubenswrapper[4558]: I0120 17:09:14.892893 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-db-sync-7l69k"] Jan 20 17:09:14 crc kubenswrapper[4558]: I0120 17:09:14.894036 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-7l69k" Jan 20 17:09:14 crc kubenswrapper[4558]: I0120 17:09:14.896863 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell1-conductor-scripts" Jan 20 17:09:14 crc kubenswrapper[4558]: I0120 17:09:14.897072 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell1-conductor-config-data" Jan 20 17:09:14 crc kubenswrapper[4558]: I0120 17:09:14.901655 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-db-sync-7l69k"] Jan 20 17:09:14 crc kubenswrapper[4558]: I0120 17:09:14.934215 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:09:15 crc kubenswrapper[4558]: I0120 17:09:15.035335 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:09:15 crc kubenswrapper[4558]: I0120 17:09:15.041503 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/11c55353-18fe-4039-b437-e642c5ff122e-config-data\") pod \"nova-cell1-conductor-db-sync-7l69k\" (UID: \"11c55353-18fe-4039-b437-e642c5ff122e\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-7l69k" Jan 20 17:09:15 crc kubenswrapper[4558]: I0120 17:09:15.041601 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bjxx9\" (UniqueName: \"kubernetes.io/projected/11c55353-18fe-4039-b437-e642c5ff122e-kube-api-access-bjxx9\") pod \"nova-cell1-conductor-db-sync-7l69k\" (UID: \"11c55353-18fe-4039-b437-e642c5ff122e\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-7l69k" Jan 20 17:09:15 crc kubenswrapper[4558]: I0120 17:09:15.041914 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/11c55353-18fe-4039-b437-e642c5ff122e-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-7l69k\" (UID: \"11c55353-18fe-4039-b437-e642c5ff122e\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-7l69k" Jan 20 17:09:15 crc kubenswrapper[4558]: I0120 17:09:15.041945 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/11c55353-18fe-4039-b437-e642c5ff122e-scripts\") pod \"nova-cell1-conductor-db-sync-7l69k\" (UID: \"11c55353-18fe-4039-b437-e642c5ff122e\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-7l69k" Jan 20 17:09:15 crc kubenswrapper[4558]: I0120 17:09:15.144334 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/11c55353-18fe-4039-b437-e642c5ff122e-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-7l69k\" (UID: \"11c55353-18fe-4039-b437-e642c5ff122e\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-7l69k" Jan 20 17:09:15 crc kubenswrapper[4558]: I0120 17:09:15.144383 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/11c55353-18fe-4039-b437-e642c5ff122e-scripts\") pod \"nova-cell1-conductor-db-sync-7l69k\" (UID: \"11c55353-18fe-4039-b437-e642c5ff122e\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-7l69k" Jan 20 17:09:15 crc kubenswrapper[4558]: I0120 17:09:15.144492 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/11c55353-18fe-4039-b437-e642c5ff122e-config-data\") pod \"nova-cell1-conductor-db-sync-7l69k\" (UID: \"11c55353-18fe-4039-b437-e642c5ff122e\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-7l69k" Jan 20 17:09:15 crc kubenswrapper[4558]: I0120 17:09:15.144547 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bjxx9\" (UniqueName: \"kubernetes.io/projected/11c55353-18fe-4039-b437-e642c5ff122e-kube-api-access-bjxx9\") pod \"nova-cell1-conductor-db-sync-7l69k\" (UID: \"11c55353-18fe-4039-b437-e642c5ff122e\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-7l69k" Jan 20 17:09:15 crc kubenswrapper[4558]: I0120 17:09:15.148034 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/11c55353-18fe-4039-b437-e642c5ff122e-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-7l69k\" (UID: \"11c55353-18fe-4039-b437-e642c5ff122e\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-7l69k" Jan 20 17:09:15 crc kubenswrapper[4558]: I0120 17:09:15.152301 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/11c55353-18fe-4039-b437-e642c5ff122e-scripts\") pod \"nova-cell1-conductor-db-sync-7l69k\" (UID: \"11c55353-18fe-4039-b437-e642c5ff122e\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-7l69k" Jan 20 17:09:15 crc kubenswrapper[4558]: I0120 17:09:15.161042 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bjxx9\" (UniqueName: \"kubernetes.io/projected/11c55353-18fe-4039-b437-e642c5ff122e-kube-api-access-bjxx9\") pod \"nova-cell1-conductor-db-sync-7l69k\" (UID: \"11c55353-18fe-4039-b437-e642c5ff122e\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-7l69k" Jan 20 17:09:15 crc kubenswrapper[4558]: I0120 17:09:15.164252 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/11c55353-18fe-4039-b437-e642c5ff122e-config-data\") pod \"nova-cell1-conductor-db-sync-7l69k\" (UID: \"11c55353-18fe-4039-b437-e642c5ff122e\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-7l69k" Jan 20 17:09:15 crc kubenswrapper[4558]: I0120 17:09:15.225802 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-7l69k" Jan 20 17:09:15 crc kubenswrapper[4558]: I0120 17:09:15.712913 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-db-sync-7l69k"] Jan 20 17:09:15 crc kubenswrapper[4558]: I0120 17:09:15.760888 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"f640bcab-f4ac-417c-86e6-af44f26a0ca9","Type":"ContainerStarted","Data":"df65964e41193f502824f8b897729832e4e3dd70022cdd614c0000b9be9cf213"} Jan 20 17:09:15 crc kubenswrapper[4558]: I0120 17:09:15.760924 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"f640bcab-f4ac-417c-86e6-af44f26a0ca9","Type":"ContainerStarted","Data":"00f862df32f86bb023d0e3bd9e9b04f6266ab1cdd61813e8b14bf474e0509a32"} Jan 20 17:09:15 crc kubenswrapper[4558]: I0120 17:09:15.768597 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"8149079b-aaf9-4993-b7b3-62c379dbc919","Type":"ContainerStarted","Data":"ce253fe8c8588e6acd46ebabc10336d56ff71dbf40702384572f4a2298fb0916"} Jan 20 17:09:15 crc kubenswrapper[4558]: I0120 17:09:15.777525 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"8b7737aa-e87a-4ddc-b056-d6a822b7ed03","Type":"ContainerStarted","Data":"45ae7f3768271e59277b9b48f984fa8761929d51c4d3f3fb549c86125c60e69c"} Jan 20 17:09:15 crc kubenswrapper[4558]: I0120 17:09:15.777572 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"8b7737aa-e87a-4ddc-b056-d6a822b7ed03","Type":"ContainerStarted","Data":"85ca12a8000ffa1a6ba4458dc2a9b0550acf77360b933d97cfbf8f222e903611"} Jan 20 17:09:15 crc kubenswrapper[4558]: I0120 17:09:15.777589 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"8b7737aa-e87a-4ddc-b056-d6a822b7ed03","Type":"ContainerStarted","Data":"f26cc9213c40b065f6bd06dc82f1e5ad087fd0cb16476f5b1b0f03474cfb1b67"} Jan 20 17:09:15 crc kubenswrapper[4558]: I0120 17:09:15.784461 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-scheduler-0" podStartSLOduration=2.784447888 podStartE2EDuration="2.784447888s" podCreationTimestamp="2026-01-20 17:09:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:09:15.775375739 +0000 UTC m=+1649.535713707" watchObservedRunningTime="2026-01-20 17:09:15.784447888 +0000 UTC m=+1649.544785856" Jan 20 17:09:15 crc kubenswrapper[4558]: I0120 17:09:15.786447 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"85104385-0429-4a08-a55d-1e863e8fe32a","Type":"ContainerStarted","Data":"b369b0ff7e6827c44421f58cf5cd33c472d20c6d76b0c62d43db5e0a7c1f9d2a"} Jan 20 17:09:15 crc kubenswrapper[4558]: I0120 17:09:15.786481 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"85104385-0429-4a08-a55d-1e863e8fe32a","Type":"ContainerStarted","Data":"b3f86ef0cc35adc843b935d5699240e51beb598cc251a460954dd1e4212cc549"} Jan 20 17:09:15 crc kubenswrapper[4558]: I0120 17:09:15.794993 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-7l69k" event={"ID":"11c55353-18fe-4039-b437-e642c5ff122e","Type":"ContainerStarted","Data":"e208a722fc2fe4ffeabd0b2d7083d3c2af97c4ac991ea1ecaf7242f293485fbd"} Jan 20 17:09:15 crc kubenswrapper[4558]: I0120 17:09:15.795129 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-metadata-0" podStartSLOduration=1.7950991269999999 podStartE2EDuration="1.795099127s" podCreationTimestamp="2026-01-20 17:09:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:09:15.790586823 +0000 UTC m=+1649.550924790" watchObservedRunningTime="2026-01-20 17:09:15.795099127 +0000 UTC m=+1649.555437094" Jan 20 17:09:15 crc kubenswrapper[4558]: I0120 17:09:15.799454 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-cell-mapping-kn965" event={"ID":"d0bb0682-179e-4316-98d1-55c1912cc154","Type":"ContainerStarted","Data":"05aa1119c41b498e17a30471e75a8154754e7a84508cb65cee9fc6ef9d057e44"} Jan 20 17:09:15 crc kubenswrapper[4558]: I0120 17:09:15.816508 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"660d7b2b-b480-41c1-8e10-d2216f28c591","Type":"ContainerStarted","Data":"d9c47772c06d9f094ea0cce3a6bf4fbc7f43d6366d44936501ab735811412dbf"} Jan 20 17:09:15 crc kubenswrapper[4558]: I0120 17:09:15.816537 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"660d7b2b-b480-41c1-8e10-d2216f28c591","Type":"ContainerStarted","Data":"e20361edf85efa6d348a94ffef199107af85dd94257c9b4b7f0a6aaab545e2d2"} Jan 20 17:09:15 crc kubenswrapper[4558]: I0120 17:09:15.816549 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"660d7b2b-b480-41c1-8e10-d2216f28c591","Type":"ContainerStarted","Data":"4406990f3bdc3b75d7df8b4f94217f405513540b1758b9a6762f8f796b319451"} Jan 20 17:09:15 crc kubenswrapper[4558]: I0120 17:09:15.832302 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" podStartSLOduration=1.8322862359999998 podStartE2EDuration="1.832286236s" podCreationTimestamp="2026-01-20 17:09:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:09:15.809461155 +0000 UTC m=+1649.569799122" watchObservedRunningTime="2026-01-20 17:09:15.832286236 +0000 UTC m=+1649.592624203" Jan 20 17:09:15 crc kubenswrapper[4558]: I0120 17:09:15.840780 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell0-cell-mapping-kn965" podStartSLOduration=2.840759739 podStartE2EDuration="2.840759739s" podCreationTimestamp="2026-01-20 17:09:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:09:15.821454066 +0000 UTC m=+1649.581792034" watchObservedRunningTime="2026-01-20 17:09:15.840759739 +0000 UTC m=+1649.601097706" Jan 20 17:09:15 crc kubenswrapper[4558]: I0120 17:09:15.855549 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-api-0" podStartSLOduration=2.855534252 podStartE2EDuration="2.855534252s" podCreationTimestamp="2026-01-20 17:09:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:09:15.833926871 +0000 UTC m=+1649.594264838" watchObservedRunningTime="2026-01-20 17:09:15.855534252 +0000 UTC m=+1649.615872219" Jan 20 17:09:16 crc kubenswrapper[4558]: I0120 17:09:16.575191 4558 scope.go:117] "RemoveContainer" containerID="a711a286282347590079d2447ef37fa9ed0f11085d6d7a65f85e65c1c2df608f" Jan 20 17:09:16 crc kubenswrapper[4558]: E0120 17:09:16.575920 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 17:09:16 crc kubenswrapper[4558]: I0120 17:09:16.690967 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:09:16 crc kubenswrapper[4558]: I0120 17:09:16.696658 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:09:16 crc kubenswrapper[4558]: I0120 17:09:16.828382 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"8149079b-aaf9-4993-b7b3-62c379dbc919","Type":"ContainerStarted","Data":"c39e33e6d6b2482550c133b107a28667770de5c51ec6fdd129bfe9594a2dc923"} Jan 20 17:09:16 crc kubenswrapper[4558]: I0120 17:09:16.831476 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-7l69k" event={"ID":"11c55353-18fe-4039-b437-e642c5ff122e","Type":"ContainerStarted","Data":"b47901b596b370703e4ec143a0305eb1f5aa2874468f01a805b913daadf8f137"} Jan 20 17:09:16 crc kubenswrapper[4558]: I0120 17:09:16.851879 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-7l69k" podStartSLOduration=2.851859944 podStartE2EDuration="2.851859944s" podCreationTimestamp="2026-01-20 17:09:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:09:16.843103378 +0000 UTC m=+1650.603441345" watchObservedRunningTime="2026-01-20 17:09:16.851859944 +0000 UTC m=+1650.612197911" Jan 20 17:09:17 crc kubenswrapper[4558]: I0120 17:09:17.857753 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" podUID="85104385-0429-4a08-a55d-1e863e8fe32a" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://b369b0ff7e6827c44421f58cf5cd33c472d20c6d76b0c62d43db5e0a7c1f9d2a" gracePeriod=30 Jan 20 17:09:17 crc kubenswrapper[4558]: I0120 17:09:17.858307 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-metadata-0" podUID="8b7737aa-e87a-4ddc-b056-d6a822b7ed03" containerName="nova-metadata-log" containerID="cri-o://85ca12a8000ffa1a6ba4458dc2a9b0550acf77360b933d97cfbf8f222e903611" gracePeriod=30 Jan 20 17:09:17 crc kubenswrapper[4558]: I0120 17:09:17.858613 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-metadata-0" podUID="8b7737aa-e87a-4ddc-b056-d6a822b7ed03" containerName="nova-metadata-metadata" containerID="cri-o://45ae7f3768271e59277b9b48f984fa8761929d51c4d3f3fb549c86125c60e69c" gracePeriod=30 Jan 20 17:09:18 crc kubenswrapper[4558]: I0120 17:09:18.491780 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:09:18 crc kubenswrapper[4558]: I0120 17:09:18.612449 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:09:18 crc kubenswrapper[4558]: I0120 17:09:18.641533 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8b7737aa-e87a-4ddc-b056-d6a822b7ed03-combined-ca-bundle\") pod \"8b7737aa-e87a-4ddc-b056-d6a822b7ed03\" (UID: \"8b7737aa-e87a-4ddc-b056-d6a822b7ed03\") " Jan 20 17:09:18 crc kubenswrapper[4558]: I0120 17:09:18.641639 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8b7737aa-e87a-4ddc-b056-d6a822b7ed03-config-data\") pod \"8b7737aa-e87a-4ddc-b056-d6a822b7ed03\" (UID: \"8b7737aa-e87a-4ddc-b056-d6a822b7ed03\") " Jan 20 17:09:18 crc kubenswrapper[4558]: I0120 17:09:18.642083 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8b7737aa-e87a-4ddc-b056-d6a822b7ed03-logs\") pod \"8b7737aa-e87a-4ddc-b056-d6a822b7ed03\" (UID: \"8b7737aa-e87a-4ddc-b056-d6a822b7ed03\") " Jan 20 17:09:18 crc kubenswrapper[4558]: I0120 17:09:18.642393 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bvn8n\" (UniqueName: \"kubernetes.io/projected/8b7737aa-e87a-4ddc-b056-d6a822b7ed03-kube-api-access-bvn8n\") pod \"8b7737aa-e87a-4ddc-b056-d6a822b7ed03\" (UID: \"8b7737aa-e87a-4ddc-b056-d6a822b7ed03\") " Jan 20 17:09:18 crc kubenswrapper[4558]: I0120 17:09:18.642532 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8b7737aa-e87a-4ddc-b056-d6a822b7ed03-logs" (OuterVolumeSpecName: "logs") pod "8b7737aa-e87a-4ddc-b056-d6a822b7ed03" (UID: "8b7737aa-e87a-4ddc-b056-d6a822b7ed03"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:09:18 crc kubenswrapper[4558]: I0120 17:09:18.643409 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8b7737aa-e87a-4ddc-b056-d6a822b7ed03-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:09:18 crc kubenswrapper[4558]: I0120 17:09:18.648335 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8b7737aa-e87a-4ddc-b056-d6a822b7ed03-kube-api-access-bvn8n" (OuterVolumeSpecName: "kube-api-access-bvn8n") pod "8b7737aa-e87a-4ddc-b056-d6a822b7ed03" (UID: "8b7737aa-e87a-4ddc-b056-d6a822b7ed03"). InnerVolumeSpecName "kube-api-access-bvn8n". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:09:18 crc kubenswrapper[4558]: I0120 17:09:18.671943 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8b7737aa-e87a-4ddc-b056-d6a822b7ed03-config-data" (OuterVolumeSpecName: "config-data") pod "8b7737aa-e87a-4ddc-b056-d6a822b7ed03" (UID: "8b7737aa-e87a-4ddc-b056-d6a822b7ed03"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:09:18 crc kubenswrapper[4558]: I0120 17:09:18.684517 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8b7737aa-e87a-4ddc-b056-d6a822b7ed03-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8b7737aa-e87a-4ddc-b056-d6a822b7ed03" (UID: "8b7737aa-e87a-4ddc-b056-d6a822b7ed03"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:09:18 crc kubenswrapper[4558]: I0120 17:09:18.745004 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qk6sl\" (UniqueName: \"kubernetes.io/projected/85104385-0429-4a08-a55d-1e863e8fe32a-kube-api-access-qk6sl\") pod \"85104385-0429-4a08-a55d-1e863e8fe32a\" (UID: \"85104385-0429-4a08-a55d-1e863e8fe32a\") " Jan 20 17:09:18 crc kubenswrapper[4558]: I0120 17:09:18.745108 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/85104385-0429-4a08-a55d-1e863e8fe32a-config-data\") pod \"85104385-0429-4a08-a55d-1e863e8fe32a\" (UID: \"85104385-0429-4a08-a55d-1e863e8fe32a\") " Jan 20 17:09:18 crc kubenswrapper[4558]: I0120 17:09:18.745151 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/85104385-0429-4a08-a55d-1e863e8fe32a-combined-ca-bundle\") pod \"85104385-0429-4a08-a55d-1e863e8fe32a\" (UID: \"85104385-0429-4a08-a55d-1e863e8fe32a\") " Jan 20 17:09:18 crc kubenswrapper[4558]: I0120 17:09:18.745869 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8b7737aa-e87a-4ddc-b056-d6a822b7ed03-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:09:18 crc kubenswrapper[4558]: I0120 17:09:18.745892 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bvn8n\" (UniqueName: \"kubernetes.io/projected/8b7737aa-e87a-4ddc-b056-d6a822b7ed03-kube-api-access-bvn8n\") on node \"crc\" DevicePath \"\"" Jan 20 17:09:18 crc kubenswrapper[4558]: I0120 17:09:18.745904 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8b7737aa-e87a-4ddc-b056-d6a822b7ed03-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:09:18 crc kubenswrapper[4558]: I0120 17:09:18.749401 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/85104385-0429-4a08-a55d-1e863e8fe32a-kube-api-access-qk6sl" (OuterVolumeSpecName: "kube-api-access-qk6sl") pod "85104385-0429-4a08-a55d-1e863e8fe32a" (UID: "85104385-0429-4a08-a55d-1e863e8fe32a"). InnerVolumeSpecName "kube-api-access-qk6sl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:09:18 crc kubenswrapper[4558]: I0120 17:09:18.766524 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/85104385-0429-4a08-a55d-1e863e8fe32a-config-data" (OuterVolumeSpecName: "config-data") pod "85104385-0429-4a08-a55d-1e863e8fe32a" (UID: "85104385-0429-4a08-a55d-1e863e8fe32a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:09:18 crc kubenswrapper[4558]: I0120 17:09:18.768844 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/85104385-0429-4a08-a55d-1e863e8fe32a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "85104385-0429-4a08-a55d-1e863e8fe32a" (UID: "85104385-0429-4a08-a55d-1e863e8fe32a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:09:18 crc kubenswrapper[4558]: I0120 17:09:18.848427 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/85104385-0429-4a08-a55d-1e863e8fe32a-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:09:18 crc kubenswrapper[4558]: I0120 17:09:18.848704 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/85104385-0429-4a08-a55d-1e863e8fe32a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:09:18 crc kubenswrapper[4558]: I0120 17:09:18.848720 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qk6sl\" (UniqueName: \"kubernetes.io/projected/85104385-0429-4a08-a55d-1e863e8fe32a-kube-api-access-qk6sl\") on node \"crc\" DevicePath \"\"" Jan 20 17:09:18 crc kubenswrapper[4558]: I0120 17:09:18.893037 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"8149079b-aaf9-4993-b7b3-62c379dbc919","Type":"ContainerStarted","Data":"5775e82892c438c20a28ac8ee95d014102563fb0a2da04cb631f3c4dac7a8946"} Jan 20 17:09:18 crc kubenswrapper[4558]: I0120 17:09:18.893394 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:09:18 crc kubenswrapper[4558]: I0120 17:09:18.896874 4558 generic.go:334] "Generic (PLEG): container finished" podID="8b7737aa-e87a-4ddc-b056-d6a822b7ed03" containerID="45ae7f3768271e59277b9b48f984fa8761929d51c4d3f3fb549c86125c60e69c" exitCode=0 Jan 20 17:09:18 crc kubenswrapper[4558]: I0120 17:09:18.896913 4558 generic.go:334] "Generic (PLEG): container finished" podID="8b7737aa-e87a-4ddc-b056-d6a822b7ed03" containerID="85ca12a8000ffa1a6ba4458dc2a9b0550acf77360b933d97cfbf8f222e903611" exitCode=143 Jan 20 17:09:18 crc kubenswrapper[4558]: I0120 17:09:18.896995 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"8b7737aa-e87a-4ddc-b056-d6a822b7ed03","Type":"ContainerDied","Data":"45ae7f3768271e59277b9b48f984fa8761929d51c4d3f3fb549c86125c60e69c"} Jan 20 17:09:18 crc kubenswrapper[4558]: I0120 17:09:18.897034 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"8b7737aa-e87a-4ddc-b056-d6a822b7ed03","Type":"ContainerDied","Data":"85ca12a8000ffa1a6ba4458dc2a9b0550acf77360b933d97cfbf8f222e903611"} Jan 20 17:09:18 crc kubenswrapper[4558]: I0120 17:09:18.897058 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"8b7737aa-e87a-4ddc-b056-d6a822b7ed03","Type":"ContainerDied","Data":"f26cc9213c40b065f6bd06dc82f1e5ad087fd0cb16476f5b1b0f03474cfb1b67"} Jan 20 17:09:18 crc kubenswrapper[4558]: I0120 17:09:18.897078 4558 scope.go:117] "RemoveContainer" containerID="45ae7f3768271e59277b9b48f984fa8761929d51c4d3f3fb549c86125c60e69c" Jan 20 17:09:18 crc kubenswrapper[4558]: I0120 17:09:18.897535 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:09:18 crc kubenswrapper[4558]: I0120 17:09:18.899558 4558 generic.go:334] "Generic (PLEG): container finished" podID="85104385-0429-4a08-a55d-1e863e8fe32a" containerID="b369b0ff7e6827c44421f58cf5cd33c472d20c6d76b0c62d43db5e0a7c1f9d2a" exitCode=0 Jan 20 17:09:18 crc kubenswrapper[4558]: I0120 17:09:18.899608 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"85104385-0429-4a08-a55d-1e863e8fe32a","Type":"ContainerDied","Data":"b369b0ff7e6827c44421f58cf5cd33c472d20c6d76b0c62d43db5e0a7c1f9d2a"} Jan 20 17:09:18 crc kubenswrapper[4558]: I0120 17:09:18.899638 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"85104385-0429-4a08-a55d-1e863e8fe32a","Type":"ContainerDied","Data":"b3f86ef0cc35adc843b935d5699240e51beb598cc251a460954dd1e4212cc549"} Jan 20 17:09:18 crc kubenswrapper[4558]: I0120 17:09:18.899644 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:09:18 crc kubenswrapper[4558]: I0120 17:09:18.917119 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ceilometer-0" podStartSLOduration=2.629348415 podStartE2EDuration="6.917100457s" podCreationTimestamp="2026-01-20 17:09:12 +0000 UTC" firstStartedPulling="2026-01-20 17:09:13.501229734 +0000 UTC m=+1647.261567701" lastFinishedPulling="2026-01-20 17:09:17.788981777 +0000 UTC m=+1651.549319743" observedRunningTime="2026-01-20 17:09:18.915534623 +0000 UTC m=+1652.675872589" watchObservedRunningTime="2026-01-20 17:09:18.917100457 +0000 UTC m=+1652.677438424" Jan 20 17:09:18 crc kubenswrapper[4558]: I0120 17:09:18.926107 4558 scope.go:117] "RemoveContainer" containerID="85ca12a8000ffa1a6ba4458dc2a9b0550acf77360b933d97cfbf8f222e903611" Jan 20 17:09:18 crc kubenswrapper[4558]: I0120 17:09:18.960880 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:09:18 crc kubenswrapper[4558]: I0120 17:09:18.970509 4558 scope.go:117] "RemoveContainer" containerID="45ae7f3768271e59277b9b48f984fa8761929d51c4d3f3fb549c86125c60e69c" Jan 20 17:09:18 crc kubenswrapper[4558]: E0120 17:09:18.975269 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"45ae7f3768271e59277b9b48f984fa8761929d51c4d3f3fb549c86125c60e69c\": container with ID starting with 45ae7f3768271e59277b9b48f984fa8761929d51c4d3f3fb549c86125c60e69c not found: ID does not exist" containerID="45ae7f3768271e59277b9b48f984fa8761929d51c4d3f3fb549c86125c60e69c" Jan 20 17:09:18 crc kubenswrapper[4558]: I0120 17:09:18.975310 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"45ae7f3768271e59277b9b48f984fa8761929d51c4d3f3fb549c86125c60e69c"} err="failed to get container status \"45ae7f3768271e59277b9b48f984fa8761929d51c4d3f3fb549c86125c60e69c\": rpc error: code = NotFound desc = could not find container \"45ae7f3768271e59277b9b48f984fa8761929d51c4d3f3fb549c86125c60e69c\": container with ID starting with 45ae7f3768271e59277b9b48f984fa8761929d51c4d3f3fb549c86125c60e69c not found: ID does not exist" Jan 20 17:09:18 crc kubenswrapper[4558]: I0120 17:09:18.975339 4558 scope.go:117] "RemoveContainer" containerID="85ca12a8000ffa1a6ba4458dc2a9b0550acf77360b933d97cfbf8f222e903611" Jan 20 17:09:18 crc kubenswrapper[4558]: E0120 17:09:18.975806 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"85ca12a8000ffa1a6ba4458dc2a9b0550acf77360b933d97cfbf8f222e903611\": container with ID starting with 85ca12a8000ffa1a6ba4458dc2a9b0550acf77360b933d97cfbf8f222e903611 not found: ID does not exist" containerID="85ca12a8000ffa1a6ba4458dc2a9b0550acf77360b933d97cfbf8f222e903611" Jan 20 17:09:18 crc kubenswrapper[4558]: I0120 17:09:18.975847 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"85ca12a8000ffa1a6ba4458dc2a9b0550acf77360b933d97cfbf8f222e903611"} err="failed to get container status \"85ca12a8000ffa1a6ba4458dc2a9b0550acf77360b933d97cfbf8f222e903611\": rpc error: code = NotFound desc = could not find container \"85ca12a8000ffa1a6ba4458dc2a9b0550acf77360b933d97cfbf8f222e903611\": container with ID starting with 85ca12a8000ffa1a6ba4458dc2a9b0550acf77360b933d97cfbf8f222e903611 not found: ID does not exist" Jan 20 17:09:18 crc kubenswrapper[4558]: I0120 17:09:18.975873 4558 scope.go:117] "RemoveContainer" containerID="45ae7f3768271e59277b9b48f984fa8761929d51c4d3f3fb549c86125c60e69c" Jan 20 17:09:18 crc kubenswrapper[4558]: I0120 17:09:18.976192 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"45ae7f3768271e59277b9b48f984fa8761929d51c4d3f3fb549c86125c60e69c"} err="failed to get container status \"45ae7f3768271e59277b9b48f984fa8761929d51c4d3f3fb549c86125c60e69c\": rpc error: code = NotFound desc = could not find container \"45ae7f3768271e59277b9b48f984fa8761929d51c4d3f3fb549c86125c60e69c\": container with ID starting with 45ae7f3768271e59277b9b48f984fa8761929d51c4d3f3fb549c86125c60e69c not found: ID does not exist" Jan 20 17:09:18 crc kubenswrapper[4558]: I0120 17:09:18.976212 4558 scope.go:117] "RemoveContainer" containerID="85ca12a8000ffa1a6ba4458dc2a9b0550acf77360b933d97cfbf8f222e903611" Jan 20 17:09:18 crc kubenswrapper[4558]: I0120 17:09:18.976424 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"85ca12a8000ffa1a6ba4458dc2a9b0550acf77360b933d97cfbf8f222e903611"} err="failed to get container status \"85ca12a8000ffa1a6ba4458dc2a9b0550acf77360b933d97cfbf8f222e903611\": rpc error: code = NotFound desc = could not find container \"85ca12a8000ffa1a6ba4458dc2a9b0550acf77360b933d97cfbf8f222e903611\": container with ID starting with 85ca12a8000ffa1a6ba4458dc2a9b0550acf77360b933d97cfbf8f222e903611 not found: ID does not exist" Jan 20 17:09:18 crc kubenswrapper[4558]: I0120 17:09:18.976444 4558 scope.go:117] "RemoveContainer" containerID="b369b0ff7e6827c44421f58cf5cd33c472d20c6d76b0c62d43db5e0a7c1f9d2a" Jan 20 17:09:18 crc kubenswrapper[4558]: I0120 17:09:18.993365 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:09:19 crc kubenswrapper[4558]: I0120 17:09:19.001789 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:09:19 crc kubenswrapper[4558]: I0120 17:09:19.010205 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:09:19 crc kubenswrapper[4558]: I0120 17:09:19.012155 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:09:19 crc kubenswrapper[4558]: I0120 17:09:19.014588 4558 scope.go:117] "RemoveContainer" containerID="b369b0ff7e6827c44421f58cf5cd33c472d20c6d76b0c62d43db5e0a7c1f9d2a" Jan 20 17:09:19 crc kubenswrapper[4558]: E0120 17:09:19.014613 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8b7737aa-e87a-4ddc-b056-d6a822b7ed03" containerName="nova-metadata-log" Jan 20 17:09:19 crc kubenswrapper[4558]: I0120 17:09:19.014631 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="8b7737aa-e87a-4ddc-b056-d6a822b7ed03" containerName="nova-metadata-log" Jan 20 17:09:19 crc kubenswrapper[4558]: E0120 17:09:19.014672 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="85104385-0429-4a08-a55d-1e863e8fe32a" containerName="nova-cell1-novncproxy-novncproxy" Jan 20 17:09:19 crc kubenswrapper[4558]: I0120 17:09:19.014680 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="85104385-0429-4a08-a55d-1e863e8fe32a" containerName="nova-cell1-novncproxy-novncproxy" Jan 20 17:09:19 crc kubenswrapper[4558]: E0120 17:09:19.014688 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8b7737aa-e87a-4ddc-b056-d6a822b7ed03" containerName="nova-metadata-metadata" Jan 20 17:09:19 crc kubenswrapper[4558]: I0120 17:09:19.014695 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="8b7737aa-e87a-4ddc-b056-d6a822b7ed03" containerName="nova-metadata-metadata" Jan 20 17:09:19 crc kubenswrapper[4558]: I0120 17:09:19.015121 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="8b7737aa-e87a-4ddc-b056-d6a822b7ed03" containerName="nova-metadata-metadata" Jan 20 17:09:19 crc kubenswrapper[4558]: I0120 17:09:19.015143 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="8b7737aa-e87a-4ddc-b056-d6a822b7ed03" containerName="nova-metadata-log" Jan 20 17:09:19 crc kubenswrapper[4558]: I0120 17:09:19.015154 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="85104385-0429-4a08-a55d-1e863e8fe32a" containerName="nova-cell1-novncproxy-novncproxy" Jan 20 17:09:19 crc kubenswrapper[4558]: I0120 17:09:19.015961 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:09:19 crc kubenswrapper[4558]: E0120 17:09:19.016833 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b369b0ff7e6827c44421f58cf5cd33c472d20c6d76b0c62d43db5e0a7c1f9d2a\": container with ID starting with b369b0ff7e6827c44421f58cf5cd33c472d20c6d76b0c62d43db5e0a7c1f9d2a not found: ID does not exist" containerID="b369b0ff7e6827c44421f58cf5cd33c472d20c6d76b0c62d43db5e0a7c1f9d2a" Jan 20 17:09:19 crc kubenswrapper[4558]: I0120 17:09:19.016870 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b369b0ff7e6827c44421f58cf5cd33c472d20c6d76b0c62d43db5e0a7c1f9d2a"} err="failed to get container status \"b369b0ff7e6827c44421f58cf5cd33c472d20c6d76b0c62d43db5e0a7c1f9d2a\": rpc error: code = NotFound desc = could not find container \"b369b0ff7e6827c44421f58cf5cd33c472d20c6d76b0c62d43db5e0a7c1f9d2a\": container with ID starting with b369b0ff7e6827c44421f58cf5cd33c472d20c6d76b0c62d43db5e0a7c1f9d2a not found: ID does not exist" Jan 20 17:09:19 crc kubenswrapper[4558]: I0120 17:09:19.017574 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell1-novncproxy-config-data" Jan 20 17:09:19 crc kubenswrapper[4558]: I0120 17:09:19.017751 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-novncproxy-cell1-public-svc" Jan 20 17:09:19 crc kubenswrapper[4558]: I0120 17:09:19.017880 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-novncproxy-cell1-vencrypt" Jan 20 17:09:19 crc kubenswrapper[4558]: I0120 17:09:19.020222 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:09:19 crc kubenswrapper[4558]: I0120 17:09:19.021872 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:09:19 crc kubenswrapper[4558]: I0120 17:09:19.024622 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-metadata-config-data" Jan 20 17:09:19 crc kubenswrapper[4558]: I0120 17:09:19.024677 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-metadata-internal-svc" Jan 20 17:09:19 crc kubenswrapper[4558]: I0120 17:09:19.037301 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:09:19 crc kubenswrapper[4558]: I0120 17:09:19.052688 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:09:19 crc kubenswrapper[4558]: I0120 17:09:19.160259 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/1927a92c-7b85-47f6-b401-4032daabe0a3-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"1927a92c-7b85-47f6-b401-4032daabe0a3\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:09:19 crc kubenswrapper[4558]: I0120 17:09:19.160315 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1927a92c-7b85-47f6-b401-4032daabe0a3-logs\") pod \"nova-metadata-0\" (UID: \"1927a92c-7b85-47f6-b401-4032daabe0a3\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:09:19 crc kubenswrapper[4558]: I0120 17:09:19.160365 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e30a943a-f753-41fc-adc8-03822aa712c3-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"e30a943a-f753-41fc-adc8-03822aa712c3\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:09:19 crc kubenswrapper[4558]: I0120 17:09:19.160557 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/e30a943a-f753-41fc-adc8-03822aa712c3-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"e30a943a-f753-41fc-adc8-03822aa712c3\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:09:19 crc kubenswrapper[4558]: I0120 17:09:19.160606 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t6q9w\" (UniqueName: \"kubernetes.io/projected/1927a92c-7b85-47f6-b401-4032daabe0a3-kube-api-access-t6q9w\") pod \"nova-metadata-0\" (UID: \"1927a92c-7b85-47f6-b401-4032daabe0a3\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:09:19 crc kubenswrapper[4558]: I0120 17:09:19.160718 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e30a943a-f753-41fc-adc8-03822aa712c3-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"e30a943a-f753-41fc-adc8-03822aa712c3\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:09:19 crc kubenswrapper[4558]: I0120 17:09:19.160791 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1927a92c-7b85-47f6-b401-4032daabe0a3-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"1927a92c-7b85-47f6-b401-4032daabe0a3\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:09:19 crc kubenswrapper[4558]: I0120 17:09:19.160919 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w698k\" (UniqueName: \"kubernetes.io/projected/e30a943a-f753-41fc-adc8-03822aa712c3-kube-api-access-w698k\") pod \"nova-cell1-novncproxy-0\" (UID: \"e30a943a-f753-41fc-adc8-03822aa712c3\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:09:19 crc kubenswrapper[4558]: I0120 17:09:19.161055 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1927a92c-7b85-47f6-b401-4032daabe0a3-config-data\") pod \"nova-metadata-0\" (UID: \"1927a92c-7b85-47f6-b401-4032daabe0a3\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:09:19 crc kubenswrapper[4558]: I0120 17:09:19.161111 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/e30a943a-f753-41fc-adc8-03822aa712c3-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"e30a943a-f753-41fc-adc8-03822aa712c3\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:09:19 crc kubenswrapper[4558]: I0120 17:09:19.263097 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1927a92c-7b85-47f6-b401-4032daabe0a3-config-data\") pod \"nova-metadata-0\" (UID: \"1927a92c-7b85-47f6-b401-4032daabe0a3\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:09:19 crc kubenswrapper[4558]: I0120 17:09:19.263156 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/e30a943a-f753-41fc-adc8-03822aa712c3-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"e30a943a-f753-41fc-adc8-03822aa712c3\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:09:19 crc kubenswrapper[4558]: I0120 17:09:19.263212 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/1927a92c-7b85-47f6-b401-4032daabe0a3-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"1927a92c-7b85-47f6-b401-4032daabe0a3\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:09:19 crc kubenswrapper[4558]: I0120 17:09:19.263236 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1927a92c-7b85-47f6-b401-4032daabe0a3-logs\") pod \"nova-metadata-0\" (UID: \"1927a92c-7b85-47f6-b401-4032daabe0a3\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:09:19 crc kubenswrapper[4558]: I0120 17:09:19.263258 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e30a943a-f753-41fc-adc8-03822aa712c3-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"e30a943a-f753-41fc-adc8-03822aa712c3\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:09:19 crc kubenswrapper[4558]: I0120 17:09:19.263294 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/e30a943a-f753-41fc-adc8-03822aa712c3-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"e30a943a-f753-41fc-adc8-03822aa712c3\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:09:19 crc kubenswrapper[4558]: I0120 17:09:19.263311 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t6q9w\" (UniqueName: \"kubernetes.io/projected/1927a92c-7b85-47f6-b401-4032daabe0a3-kube-api-access-t6q9w\") pod \"nova-metadata-0\" (UID: \"1927a92c-7b85-47f6-b401-4032daabe0a3\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:09:19 crc kubenswrapper[4558]: I0120 17:09:19.263358 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e30a943a-f753-41fc-adc8-03822aa712c3-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"e30a943a-f753-41fc-adc8-03822aa712c3\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:09:19 crc kubenswrapper[4558]: I0120 17:09:19.263385 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1927a92c-7b85-47f6-b401-4032daabe0a3-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"1927a92c-7b85-47f6-b401-4032daabe0a3\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:09:19 crc kubenswrapper[4558]: I0120 17:09:19.263415 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w698k\" (UniqueName: \"kubernetes.io/projected/e30a943a-f753-41fc-adc8-03822aa712c3-kube-api-access-w698k\") pod \"nova-cell1-novncproxy-0\" (UID: \"e30a943a-f753-41fc-adc8-03822aa712c3\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:09:19 crc kubenswrapper[4558]: I0120 17:09:19.263835 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1927a92c-7b85-47f6-b401-4032daabe0a3-logs\") pod \"nova-metadata-0\" (UID: \"1927a92c-7b85-47f6-b401-4032daabe0a3\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:09:19 crc kubenswrapper[4558]: I0120 17:09:19.268076 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/e30a943a-f753-41fc-adc8-03822aa712c3-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"e30a943a-f753-41fc-adc8-03822aa712c3\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:09:19 crc kubenswrapper[4558]: I0120 17:09:19.268508 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1927a92c-7b85-47f6-b401-4032daabe0a3-config-data\") pod \"nova-metadata-0\" (UID: \"1927a92c-7b85-47f6-b401-4032daabe0a3\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:09:19 crc kubenswrapper[4558]: I0120 17:09:19.270100 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e30a943a-f753-41fc-adc8-03822aa712c3-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"e30a943a-f753-41fc-adc8-03822aa712c3\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:09:19 crc kubenswrapper[4558]: I0120 17:09:19.270315 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e30a943a-f753-41fc-adc8-03822aa712c3-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"e30a943a-f753-41fc-adc8-03822aa712c3\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:09:19 crc kubenswrapper[4558]: I0120 17:09:19.272101 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1927a92c-7b85-47f6-b401-4032daabe0a3-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"1927a92c-7b85-47f6-b401-4032daabe0a3\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:09:19 crc kubenswrapper[4558]: I0120 17:09:19.277559 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/1927a92c-7b85-47f6-b401-4032daabe0a3-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"1927a92c-7b85-47f6-b401-4032daabe0a3\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:09:19 crc kubenswrapper[4558]: I0120 17:09:19.278843 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/e30a943a-f753-41fc-adc8-03822aa712c3-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"e30a943a-f753-41fc-adc8-03822aa712c3\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:09:19 crc kubenswrapper[4558]: I0120 17:09:19.279686 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t6q9w\" (UniqueName: \"kubernetes.io/projected/1927a92c-7b85-47f6-b401-4032daabe0a3-kube-api-access-t6q9w\") pod \"nova-metadata-0\" (UID: \"1927a92c-7b85-47f6-b401-4032daabe0a3\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:09:19 crc kubenswrapper[4558]: I0120 17:09:19.281049 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w698k\" (UniqueName: \"kubernetes.io/projected/e30a943a-f753-41fc-adc8-03822aa712c3-kube-api-access-w698k\") pod \"nova-cell1-novncproxy-0\" (UID: \"e30a943a-f753-41fc-adc8-03822aa712c3\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:09:19 crc kubenswrapper[4558]: I0120 17:09:19.316098 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:09:19 crc kubenswrapper[4558]: I0120 17:09:19.341215 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:09:19 crc kubenswrapper[4558]: I0120 17:09:19.345962 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:09:19 crc kubenswrapper[4558]: I0120 17:09:19.793070 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:09:19 crc kubenswrapper[4558]: W0120 17:09:19.796370 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode30a943a_f753_41fc_adc8_03822aa712c3.slice/crio-f29ed6548455a86b575a5ffb74017f4988c6a7e20546f6a3c1688ccaa4b7fecd WatchSource:0}: Error finding container f29ed6548455a86b575a5ffb74017f4988c6a7e20546f6a3c1688ccaa4b7fecd: Status 404 returned error can't find the container with id f29ed6548455a86b575a5ffb74017f4988c6a7e20546f6a3c1688ccaa4b7fecd Jan 20 17:09:19 crc kubenswrapper[4558]: I0120 17:09:19.845759 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:09:19 crc kubenswrapper[4558]: I0120 17:09:19.911025 4558 generic.go:334] "Generic (PLEG): container finished" podID="11c55353-18fe-4039-b437-e642c5ff122e" containerID="b47901b596b370703e4ec143a0305eb1f5aa2874468f01a805b913daadf8f137" exitCode=0 Jan 20 17:09:19 crc kubenswrapper[4558]: I0120 17:09:19.911401 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-7l69k" event={"ID":"11c55353-18fe-4039-b437-e642c5ff122e","Type":"ContainerDied","Data":"b47901b596b370703e4ec143a0305eb1f5aa2874468f01a805b913daadf8f137"} Jan 20 17:09:19 crc kubenswrapper[4558]: I0120 17:09:19.913606 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"e30a943a-f753-41fc-adc8-03822aa712c3","Type":"ContainerStarted","Data":"f29ed6548455a86b575a5ffb74017f4988c6a7e20546f6a3c1688ccaa4b7fecd"} Jan 20 17:09:19 crc kubenswrapper[4558]: I0120 17:09:19.921504 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"1927a92c-7b85-47f6-b401-4032daabe0a3","Type":"ContainerStarted","Data":"8d7937ce2d9b74b07d582f7f1f76bec9293645b6cb2a357d1da0e33e95cb61ec"} Jan 20 17:09:20 crc kubenswrapper[4558]: I0120 17:09:20.577470 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="85104385-0429-4a08-a55d-1e863e8fe32a" path="/var/lib/kubelet/pods/85104385-0429-4a08-a55d-1e863e8fe32a/volumes" Jan 20 17:09:20 crc kubenswrapper[4558]: I0120 17:09:20.578277 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8b7737aa-e87a-4ddc-b056-d6a822b7ed03" path="/var/lib/kubelet/pods/8b7737aa-e87a-4ddc-b056-d6a822b7ed03/volumes" Jan 20 17:09:20 crc kubenswrapper[4558]: I0120 17:09:20.943855 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"1927a92c-7b85-47f6-b401-4032daabe0a3","Type":"ContainerStarted","Data":"51dbcfcc01818883ec45c4c1c6a0e37e606d6de400d6565d031eb8ec2eeab71e"} Jan 20 17:09:20 crc kubenswrapper[4558]: I0120 17:09:20.943918 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"1927a92c-7b85-47f6-b401-4032daabe0a3","Type":"ContainerStarted","Data":"69d85ebc012c0971a3ab6f07a4d07dbfbf3982312e288549fe4c398a0062158b"} Jan 20 17:09:20 crc kubenswrapper[4558]: I0120 17:09:20.945874 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"e30a943a-f753-41fc-adc8-03822aa712c3","Type":"ContainerStarted","Data":"4009b12e702e655b643aa02ca67d095804a9ddf483a6488a64d2205988f9a03a"} Jan 20 17:09:20 crc kubenswrapper[4558]: I0120 17:09:20.962898 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-metadata-0" podStartSLOduration=2.962880406 podStartE2EDuration="2.962880406s" podCreationTimestamp="2026-01-20 17:09:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:09:20.95971846 +0000 UTC m=+1654.720056427" watchObservedRunningTime="2026-01-20 17:09:20.962880406 +0000 UTC m=+1654.723218373" Jan 20 17:09:21 crc kubenswrapper[4558]: I0120 17:09:21.291465 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-7l69k" Jan 20 17:09:21 crc kubenswrapper[4558]: I0120 17:09:21.310447 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" podStartSLOduration=3.310428966 podStartE2EDuration="3.310428966s" podCreationTimestamp="2026-01-20 17:09:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:09:20.981701537 +0000 UTC m=+1654.742039504" watchObservedRunningTime="2026-01-20 17:09:21.310428966 +0000 UTC m=+1655.070766933" Jan 20 17:09:21 crc kubenswrapper[4558]: I0120 17:09:21.400777 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bjxx9\" (UniqueName: \"kubernetes.io/projected/11c55353-18fe-4039-b437-e642c5ff122e-kube-api-access-bjxx9\") pod \"11c55353-18fe-4039-b437-e642c5ff122e\" (UID: \"11c55353-18fe-4039-b437-e642c5ff122e\") " Jan 20 17:09:21 crc kubenswrapper[4558]: I0120 17:09:21.400865 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/11c55353-18fe-4039-b437-e642c5ff122e-scripts\") pod \"11c55353-18fe-4039-b437-e642c5ff122e\" (UID: \"11c55353-18fe-4039-b437-e642c5ff122e\") " Jan 20 17:09:21 crc kubenswrapper[4558]: I0120 17:09:21.401089 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/11c55353-18fe-4039-b437-e642c5ff122e-combined-ca-bundle\") pod \"11c55353-18fe-4039-b437-e642c5ff122e\" (UID: \"11c55353-18fe-4039-b437-e642c5ff122e\") " Jan 20 17:09:21 crc kubenswrapper[4558]: I0120 17:09:21.401221 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/11c55353-18fe-4039-b437-e642c5ff122e-config-data\") pod \"11c55353-18fe-4039-b437-e642c5ff122e\" (UID: \"11c55353-18fe-4039-b437-e642c5ff122e\") " Jan 20 17:09:21 crc kubenswrapper[4558]: I0120 17:09:21.406508 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/11c55353-18fe-4039-b437-e642c5ff122e-kube-api-access-bjxx9" (OuterVolumeSpecName: "kube-api-access-bjxx9") pod "11c55353-18fe-4039-b437-e642c5ff122e" (UID: "11c55353-18fe-4039-b437-e642c5ff122e"). InnerVolumeSpecName "kube-api-access-bjxx9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:09:21 crc kubenswrapper[4558]: I0120 17:09:21.406925 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/11c55353-18fe-4039-b437-e642c5ff122e-scripts" (OuterVolumeSpecName: "scripts") pod "11c55353-18fe-4039-b437-e642c5ff122e" (UID: "11c55353-18fe-4039-b437-e642c5ff122e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:09:21 crc kubenswrapper[4558]: I0120 17:09:21.427141 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/11c55353-18fe-4039-b437-e642c5ff122e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "11c55353-18fe-4039-b437-e642c5ff122e" (UID: "11c55353-18fe-4039-b437-e642c5ff122e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:09:21 crc kubenswrapper[4558]: I0120 17:09:21.436009 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/11c55353-18fe-4039-b437-e642c5ff122e-config-data" (OuterVolumeSpecName: "config-data") pod "11c55353-18fe-4039-b437-e642c5ff122e" (UID: "11c55353-18fe-4039-b437-e642c5ff122e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:09:21 crc kubenswrapper[4558]: I0120 17:09:21.504628 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/11c55353-18fe-4039-b437-e642c5ff122e-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:09:21 crc kubenswrapper[4558]: I0120 17:09:21.504671 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/11c55353-18fe-4039-b437-e642c5ff122e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:09:21 crc kubenswrapper[4558]: I0120 17:09:21.504684 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/11c55353-18fe-4039-b437-e642c5ff122e-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:09:21 crc kubenswrapper[4558]: I0120 17:09:21.504697 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bjxx9\" (UniqueName: \"kubernetes.io/projected/11c55353-18fe-4039-b437-e642c5ff122e-kube-api-access-bjxx9\") on node \"crc\" DevicePath \"\"" Jan 20 17:09:21 crc kubenswrapper[4558]: I0120 17:09:21.958033 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-7l69k" Jan 20 17:09:21 crc kubenswrapper[4558]: I0120 17:09:21.958016 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-7l69k" event={"ID":"11c55353-18fe-4039-b437-e642c5ff122e","Type":"ContainerDied","Data":"e208a722fc2fe4ffeabd0b2d7083d3c2af97c4ac991ea1ecaf7242f293485fbd"} Jan 20 17:09:21 crc kubenswrapper[4558]: I0120 17:09:21.958879 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e208a722fc2fe4ffeabd0b2d7083d3c2af97c4ac991ea1ecaf7242f293485fbd" Jan 20 17:09:21 crc kubenswrapper[4558]: I0120 17:09:21.960308 4558 generic.go:334] "Generic (PLEG): container finished" podID="d0bb0682-179e-4316-98d1-55c1912cc154" containerID="05aa1119c41b498e17a30471e75a8154754e7a84508cb65cee9fc6ef9d057e44" exitCode=0 Jan 20 17:09:21 crc kubenswrapper[4558]: I0120 17:09:21.960363 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-cell-mapping-kn965" event={"ID":"d0bb0682-179e-4316-98d1-55c1912cc154","Type":"ContainerDied","Data":"05aa1119c41b498e17a30471e75a8154754e7a84508cb65cee9fc6ef9d057e44"} Jan 20 17:09:21 crc kubenswrapper[4558]: I0120 17:09:21.993195 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-0"] Jan 20 17:09:21 crc kubenswrapper[4558]: E0120 17:09:21.993666 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="11c55353-18fe-4039-b437-e642c5ff122e" containerName="nova-cell1-conductor-db-sync" Jan 20 17:09:21 crc kubenswrapper[4558]: I0120 17:09:21.993687 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="11c55353-18fe-4039-b437-e642c5ff122e" containerName="nova-cell1-conductor-db-sync" Jan 20 17:09:21 crc kubenswrapper[4558]: I0120 17:09:21.993868 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="11c55353-18fe-4039-b437-e642c5ff122e" containerName="nova-cell1-conductor-db-sync" Jan 20 17:09:21 crc kubenswrapper[4558]: I0120 17:09:21.994574 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:09:21 crc kubenswrapper[4558]: I0120 17:09:21.996461 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell1-conductor-config-data" Jan 20 17:09:21 crc kubenswrapper[4558]: I0120 17:09:21.998444 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-0"] Jan 20 17:09:22 crc kubenswrapper[4558]: I0120 17:09:22.013071 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/937396e0-a574-4597-bafc-bf1d8a909d3c-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"937396e0-a574-4597-bafc-bf1d8a909d3c\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:09:22 crc kubenswrapper[4558]: I0120 17:09:22.013201 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nt869\" (UniqueName: \"kubernetes.io/projected/937396e0-a574-4597-bafc-bf1d8a909d3c-kube-api-access-nt869\") pod \"nova-cell1-conductor-0\" (UID: \"937396e0-a574-4597-bafc-bf1d8a909d3c\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:09:22 crc kubenswrapper[4558]: I0120 17:09:22.013262 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/937396e0-a574-4597-bafc-bf1d8a909d3c-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"937396e0-a574-4597-bafc-bf1d8a909d3c\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:09:22 crc kubenswrapper[4558]: I0120 17:09:22.115436 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/937396e0-a574-4597-bafc-bf1d8a909d3c-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"937396e0-a574-4597-bafc-bf1d8a909d3c\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:09:22 crc kubenswrapper[4558]: I0120 17:09:22.115642 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nt869\" (UniqueName: \"kubernetes.io/projected/937396e0-a574-4597-bafc-bf1d8a909d3c-kube-api-access-nt869\") pod \"nova-cell1-conductor-0\" (UID: \"937396e0-a574-4597-bafc-bf1d8a909d3c\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:09:22 crc kubenswrapper[4558]: I0120 17:09:22.115745 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/937396e0-a574-4597-bafc-bf1d8a909d3c-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"937396e0-a574-4597-bafc-bf1d8a909d3c\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:09:22 crc kubenswrapper[4558]: I0120 17:09:22.122110 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/937396e0-a574-4597-bafc-bf1d8a909d3c-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"937396e0-a574-4597-bafc-bf1d8a909d3c\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:09:22 crc kubenswrapper[4558]: I0120 17:09:22.122908 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/937396e0-a574-4597-bafc-bf1d8a909d3c-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"937396e0-a574-4597-bafc-bf1d8a909d3c\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:09:22 crc kubenswrapper[4558]: I0120 17:09:22.131097 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nt869\" (UniqueName: \"kubernetes.io/projected/937396e0-a574-4597-bafc-bf1d8a909d3c-kube-api-access-nt869\") pod \"nova-cell1-conductor-0\" (UID: \"937396e0-a574-4597-bafc-bf1d8a909d3c\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:09:22 crc kubenswrapper[4558]: I0120 17:09:22.309495 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:09:22 crc kubenswrapper[4558]: I0120 17:09:22.719465 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-0"] Jan 20 17:09:22 crc kubenswrapper[4558]: W0120 17:09:22.722729 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod937396e0_a574_4597_bafc_bf1d8a909d3c.slice/crio-81a3a14e5559a9514153dd85cad2368d4cf799ec843e60f45201b7c583751eeb WatchSource:0}: Error finding container 81a3a14e5559a9514153dd85cad2368d4cf799ec843e60f45201b7c583751eeb: Status 404 returned error can't find the container with id 81a3a14e5559a9514153dd85cad2368d4cf799ec843e60f45201b7c583751eeb Jan 20 17:09:22 crc kubenswrapper[4558]: I0120 17:09:22.974254 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-0" event={"ID":"937396e0-a574-4597-bafc-bf1d8a909d3c","Type":"ContainerStarted","Data":"51dda5ae1f21ef092b384f0127be32effe565acdb2160b9b7bd5bc4ac8bc0278"} Jan 20 17:09:22 crc kubenswrapper[4558]: I0120 17:09:22.974632 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-0" event={"ID":"937396e0-a574-4597-bafc-bf1d8a909d3c","Type":"ContainerStarted","Data":"81a3a14e5559a9514153dd85cad2368d4cf799ec843e60f45201b7c583751eeb"} Jan 20 17:09:22 crc kubenswrapper[4558]: I0120 17:09:22.994782 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell1-conductor-0" podStartSLOduration=1.99476243 podStartE2EDuration="1.99476243s" podCreationTimestamp="2026-01-20 17:09:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:09:22.993618608 +0000 UTC m=+1656.753956566" watchObservedRunningTime="2026-01-20 17:09:22.99476243 +0000 UTC m=+1656.755100388" Jan 20 17:09:23 crc kubenswrapper[4558]: I0120 17:09:23.287760 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-cell-mapping-kn965" Jan 20 17:09:23 crc kubenswrapper[4558]: I0120 17:09:23.343988 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cwdqd\" (UniqueName: \"kubernetes.io/projected/d0bb0682-179e-4316-98d1-55c1912cc154-kube-api-access-cwdqd\") pod \"d0bb0682-179e-4316-98d1-55c1912cc154\" (UID: \"d0bb0682-179e-4316-98d1-55c1912cc154\") " Jan 20 17:09:23 crc kubenswrapper[4558]: I0120 17:09:23.344047 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d0bb0682-179e-4316-98d1-55c1912cc154-scripts\") pod \"d0bb0682-179e-4316-98d1-55c1912cc154\" (UID: \"d0bb0682-179e-4316-98d1-55c1912cc154\") " Jan 20 17:09:23 crc kubenswrapper[4558]: I0120 17:09:23.344305 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d0bb0682-179e-4316-98d1-55c1912cc154-config-data\") pod \"d0bb0682-179e-4316-98d1-55c1912cc154\" (UID: \"d0bb0682-179e-4316-98d1-55c1912cc154\") " Jan 20 17:09:23 crc kubenswrapper[4558]: I0120 17:09:23.344333 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d0bb0682-179e-4316-98d1-55c1912cc154-combined-ca-bundle\") pod \"d0bb0682-179e-4316-98d1-55c1912cc154\" (UID: \"d0bb0682-179e-4316-98d1-55c1912cc154\") " Jan 20 17:09:23 crc kubenswrapper[4558]: I0120 17:09:23.353410 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d0bb0682-179e-4316-98d1-55c1912cc154-kube-api-access-cwdqd" (OuterVolumeSpecName: "kube-api-access-cwdqd") pod "d0bb0682-179e-4316-98d1-55c1912cc154" (UID: "d0bb0682-179e-4316-98d1-55c1912cc154"). InnerVolumeSpecName "kube-api-access-cwdqd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:09:23 crc kubenswrapper[4558]: I0120 17:09:23.357652 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d0bb0682-179e-4316-98d1-55c1912cc154-scripts" (OuterVolumeSpecName: "scripts") pod "d0bb0682-179e-4316-98d1-55c1912cc154" (UID: "d0bb0682-179e-4316-98d1-55c1912cc154"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:09:23 crc kubenswrapper[4558]: I0120 17:09:23.367656 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d0bb0682-179e-4316-98d1-55c1912cc154-config-data" (OuterVolumeSpecName: "config-data") pod "d0bb0682-179e-4316-98d1-55c1912cc154" (UID: "d0bb0682-179e-4316-98d1-55c1912cc154"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:09:23 crc kubenswrapper[4558]: I0120 17:09:23.369336 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d0bb0682-179e-4316-98d1-55c1912cc154-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d0bb0682-179e-4316-98d1-55c1912cc154" (UID: "d0bb0682-179e-4316-98d1-55c1912cc154"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:09:23 crc kubenswrapper[4558]: I0120 17:09:23.447103 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cwdqd\" (UniqueName: \"kubernetes.io/projected/d0bb0682-179e-4316-98d1-55c1912cc154-kube-api-access-cwdqd\") on node \"crc\" DevicePath \"\"" Jan 20 17:09:23 crc kubenswrapper[4558]: I0120 17:09:23.447135 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d0bb0682-179e-4316-98d1-55c1912cc154-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:09:23 crc kubenswrapper[4558]: I0120 17:09:23.447147 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d0bb0682-179e-4316-98d1-55c1912cc154-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:09:23 crc kubenswrapper[4558]: I0120 17:09:23.447187 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d0bb0682-179e-4316-98d1-55c1912cc154-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:09:23 crc kubenswrapper[4558]: I0120 17:09:23.988057 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-cell-mapping-kn965" Jan 20 17:09:23 crc kubenswrapper[4558]: I0120 17:09:23.988649 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-cell-mapping-kn965" event={"ID":"d0bb0682-179e-4316-98d1-55c1912cc154","Type":"ContainerDied","Data":"ace38f7a3bbcb96e064278e6d251883adf8718863525236ed7f914f19ee89e97"} Jan 20 17:09:23 crc kubenswrapper[4558]: I0120 17:09:23.988686 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ace38f7a3bbcb96e064278e6d251883adf8718863525236ed7f914f19ee89e97" Jan 20 17:09:23 crc kubenswrapper[4558]: I0120 17:09:23.988707 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:09:24 crc kubenswrapper[4558]: I0120 17:09:24.151717 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:09:24 crc kubenswrapper[4558]: I0120 17:09:24.151963 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-api-0" podUID="660d7b2b-b480-41c1-8e10-d2216f28c591" containerName="nova-api-log" containerID="cri-o://e20361edf85efa6d348a94ffef199107af85dd94257c9b4b7f0a6aaab545e2d2" gracePeriod=30 Jan 20 17:09:24 crc kubenswrapper[4558]: I0120 17:09:24.152388 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-api-0" podUID="660d7b2b-b480-41c1-8e10-d2216f28c591" containerName="nova-api-api" containerID="cri-o://d9c47772c06d9f094ea0cce3a6bf4fbc7f43d6366d44936501ab735811412dbf" gracePeriod=30 Jan 20 17:09:24 crc kubenswrapper[4558]: I0120 17:09:24.159762 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:09:24 crc kubenswrapper[4558]: I0120 17:09:24.159984 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-scheduler-0" podUID="f640bcab-f4ac-417c-86e6-af44f26a0ca9" containerName="nova-scheduler-scheduler" containerID="cri-o://df65964e41193f502824f8b897729832e4e3dd70022cdd614c0000b9be9cf213" gracePeriod=30 Jan 20 17:09:24 crc kubenswrapper[4558]: I0120 17:09:24.178872 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:09:24 crc kubenswrapper[4558]: I0120 17:09:24.179143 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-metadata-0" podUID="1927a92c-7b85-47f6-b401-4032daabe0a3" containerName="nova-metadata-log" containerID="cri-o://69d85ebc012c0971a3ab6f07a4d07dbfbf3982312e288549fe4c398a0062158b" gracePeriod=30 Jan 20 17:09:24 crc kubenswrapper[4558]: I0120 17:09:24.179353 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-metadata-0" podUID="1927a92c-7b85-47f6-b401-4032daabe0a3" containerName="nova-metadata-metadata" containerID="cri-o://51dbcfcc01818883ec45c4c1c6a0e37e606d6de400d6565d031eb8ec2eeab71e" gracePeriod=30 Jan 20 17:09:24 crc kubenswrapper[4558]: I0120 17:09:24.342238 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:09:24 crc kubenswrapper[4558]: I0120 17:09:24.347021 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:09:24 crc kubenswrapper[4558]: I0120 17:09:24.347899 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:09:24 crc kubenswrapper[4558]: I0120 17:09:24.777056 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:09:24 crc kubenswrapper[4558]: I0120 17:09:24.784207 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:09:24 crc kubenswrapper[4558]: I0120 17:09:24.973824 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t6q9w\" (UniqueName: \"kubernetes.io/projected/1927a92c-7b85-47f6-b401-4032daabe0a3-kube-api-access-t6q9w\") pod \"1927a92c-7b85-47f6-b401-4032daabe0a3\" (UID: \"1927a92c-7b85-47f6-b401-4032daabe0a3\") " Jan 20 17:09:24 crc kubenswrapper[4558]: I0120 17:09:24.973886 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/660d7b2b-b480-41c1-8e10-d2216f28c591-combined-ca-bundle\") pod \"660d7b2b-b480-41c1-8e10-d2216f28c591\" (UID: \"660d7b2b-b480-41c1-8e10-d2216f28c591\") " Jan 20 17:09:24 crc kubenswrapper[4558]: I0120 17:09:24.973965 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p4l5f\" (UniqueName: \"kubernetes.io/projected/660d7b2b-b480-41c1-8e10-d2216f28c591-kube-api-access-p4l5f\") pod \"660d7b2b-b480-41c1-8e10-d2216f28c591\" (UID: \"660d7b2b-b480-41c1-8e10-d2216f28c591\") " Jan 20 17:09:24 crc kubenswrapper[4558]: I0120 17:09:24.973996 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1927a92c-7b85-47f6-b401-4032daabe0a3-combined-ca-bundle\") pod \"1927a92c-7b85-47f6-b401-4032daabe0a3\" (UID: \"1927a92c-7b85-47f6-b401-4032daabe0a3\") " Jan 20 17:09:24 crc kubenswrapper[4558]: I0120 17:09:24.974026 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/660d7b2b-b480-41c1-8e10-d2216f28c591-logs\") pod \"660d7b2b-b480-41c1-8e10-d2216f28c591\" (UID: \"660d7b2b-b480-41c1-8e10-d2216f28c591\") " Jan 20 17:09:24 crc kubenswrapper[4558]: I0120 17:09:24.974046 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/660d7b2b-b480-41c1-8e10-d2216f28c591-config-data\") pod \"660d7b2b-b480-41c1-8e10-d2216f28c591\" (UID: \"660d7b2b-b480-41c1-8e10-d2216f28c591\") " Jan 20 17:09:24 crc kubenswrapper[4558]: I0120 17:09:24.974065 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1927a92c-7b85-47f6-b401-4032daabe0a3-logs\") pod \"1927a92c-7b85-47f6-b401-4032daabe0a3\" (UID: \"1927a92c-7b85-47f6-b401-4032daabe0a3\") " Jan 20 17:09:24 crc kubenswrapper[4558]: I0120 17:09:24.974086 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/1927a92c-7b85-47f6-b401-4032daabe0a3-nova-metadata-tls-certs\") pod \"1927a92c-7b85-47f6-b401-4032daabe0a3\" (UID: \"1927a92c-7b85-47f6-b401-4032daabe0a3\") " Jan 20 17:09:24 crc kubenswrapper[4558]: I0120 17:09:24.974105 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1927a92c-7b85-47f6-b401-4032daabe0a3-config-data\") pod \"1927a92c-7b85-47f6-b401-4032daabe0a3\" (UID: \"1927a92c-7b85-47f6-b401-4032daabe0a3\") " Jan 20 17:09:24 crc kubenswrapper[4558]: I0120 17:09:24.975427 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1927a92c-7b85-47f6-b401-4032daabe0a3-logs" (OuterVolumeSpecName: "logs") pod "1927a92c-7b85-47f6-b401-4032daabe0a3" (UID: "1927a92c-7b85-47f6-b401-4032daabe0a3"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:09:24 crc kubenswrapper[4558]: I0120 17:09:24.975582 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/660d7b2b-b480-41c1-8e10-d2216f28c591-logs" (OuterVolumeSpecName: "logs") pod "660d7b2b-b480-41c1-8e10-d2216f28c591" (UID: "660d7b2b-b480-41c1-8e10-d2216f28c591"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:09:24 crc kubenswrapper[4558]: I0120 17:09:24.980976 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1927a92c-7b85-47f6-b401-4032daabe0a3-kube-api-access-t6q9w" (OuterVolumeSpecName: "kube-api-access-t6q9w") pod "1927a92c-7b85-47f6-b401-4032daabe0a3" (UID: "1927a92c-7b85-47f6-b401-4032daabe0a3"). InnerVolumeSpecName "kube-api-access-t6q9w". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:09:24 crc kubenswrapper[4558]: I0120 17:09:24.982017 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/660d7b2b-b480-41c1-8e10-d2216f28c591-kube-api-access-p4l5f" (OuterVolumeSpecName: "kube-api-access-p4l5f") pod "660d7b2b-b480-41c1-8e10-d2216f28c591" (UID: "660d7b2b-b480-41c1-8e10-d2216f28c591"). InnerVolumeSpecName "kube-api-access-p4l5f". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:09:25 crc kubenswrapper[4558]: I0120 17:09:25.006334 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1927a92c-7b85-47f6-b401-4032daabe0a3-config-data" (OuterVolumeSpecName: "config-data") pod "1927a92c-7b85-47f6-b401-4032daabe0a3" (UID: "1927a92c-7b85-47f6-b401-4032daabe0a3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:09:25 crc kubenswrapper[4558]: I0120 17:09:25.009496 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/660d7b2b-b480-41c1-8e10-d2216f28c591-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "660d7b2b-b480-41c1-8e10-d2216f28c591" (UID: "660d7b2b-b480-41c1-8e10-d2216f28c591"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:09:25 crc kubenswrapper[4558]: I0120 17:09:25.009918 4558 generic.go:334] "Generic (PLEG): container finished" podID="1927a92c-7b85-47f6-b401-4032daabe0a3" containerID="51dbcfcc01818883ec45c4c1c6a0e37e606d6de400d6565d031eb8ec2eeab71e" exitCode=0 Jan 20 17:09:25 crc kubenswrapper[4558]: I0120 17:09:25.009950 4558 generic.go:334] "Generic (PLEG): container finished" podID="1927a92c-7b85-47f6-b401-4032daabe0a3" containerID="69d85ebc012c0971a3ab6f07a4d07dbfbf3982312e288549fe4c398a0062158b" exitCode=143 Jan 20 17:09:25 crc kubenswrapper[4558]: I0120 17:09:25.010008 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"1927a92c-7b85-47f6-b401-4032daabe0a3","Type":"ContainerDied","Data":"51dbcfcc01818883ec45c4c1c6a0e37e606d6de400d6565d031eb8ec2eeab71e"} Jan 20 17:09:25 crc kubenswrapper[4558]: I0120 17:09:25.010049 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"1927a92c-7b85-47f6-b401-4032daabe0a3","Type":"ContainerDied","Data":"69d85ebc012c0971a3ab6f07a4d07dbfbf3982312e288549fe4c398a0062158b"} Jan 20 17:09:25 crc kubenswrapper[4558]: I0120 17:09:25.010061 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"1927a92c-7b85-47f6-b401-4032daabe0a3","Type":"ContainerDied","Data":"8d7937ce2d9b74b07d582f7f1f76bec9293645b6cb2a357d1da0e33e95cb61ec"} Jan 20 17:09:25 crc kubenswrapper[4558]: I0120 17:09:25.010084 4558 scope.go:117] "RemoveContainer" containerID="51dbcfcc01818883ec45c4c1c6a0e37e606d6de400d6565d031eb8ec2eeab71e" Jan 20 17:09:25 crc kubenswrapper[4558]: I0120 17:09:25.010274 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:09:25 crc kubenswrapper[4558]: I0120 17:09:25.014356 4558 generic.go:334] "Generic (PLEG): container finished" podID="660d7b2b-b480-41c1-8e10-d2216f28c591" containerID="d9c47772c06d9f094ea0cce3a6bf4fbc7f43d6366d44936501ab735811412dbf" exitCode=0 Jan 20 17:09:25 crc kubenswrapper[4558]: I0120 17:09:25.014388 4558 generic.go:334] "Generic (PLEG): container finished" podID="660d7b2b-b480-41c1-8e10-d2216f28c591" containerID="e20361edf85efa6d348a94ffef199107af85dd94257c9b4b7f0a6aaab545e2d2" exitCode=143 Jan 20 17:09:25 crc kubenswrapper[4558]: I0120 17:09:25.014432 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"660d7b2b-b480-41c1-8e10-d2216f28c591","Type":"ContainerDied","Data":"d9c47772c06d9f094ea0cce3a6bf4fbc7f43d6366d44936501ab735811412dbf"} Jan 20 17:09:25 crc kubenswrapper[4558]: I0120 17:09:25.014520 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"660d7b2b-b480-41c1-8e10-d2216f28c591","Type":"ContainerDied","Data":"e20361edf85efa6d348a94ffef199107af85dd94257c9b4b7f0a6aaab545e2d2"} Jan 20 17:09:25 crc kubenswrapper[4558]: I0120 17:09:25.014517 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:09:25 crc kubenswrapper[4558]: I0120 17:09:25.014535 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"660d7b2b-b480-41c1-8e10-d2216f28c591","Type":"ContainerDied","Data":"4406990f3bdc3b75d7df8b4f94217f405513540b1758b9a6762f8f796b319451"} Jan 20 17:09:25 crc kubenswrapper[4558]: I0120 17:09:25.020713 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/660d7b2b-b480-41c1-8e10-d2216f28c591-config-data" (OuterVolumeSpecName: "config-data") pod "660d7b2b-b480-41c1-8e10-d2216f28c591" (UID: "660d7b2b-b480-41c1-8e10-d2216f28c591"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:09:25 crc kubenswrapper[4558]: I0120 17:09:25.023624 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1927a92c-7b85-47f6-b401-4032daabe0a3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1927a92c-7b85-47f6-b401-4032daabe0a3" (UID: "1927a92c-7b85-47f6-b401-4032daabe0a3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:09:25 crc kubenswrapper[4558]: I0120 17:09:25.034674 4558 scope.go:117] "RemoveContainer" containerID="69d85ebc012c0971a3ab6f07a4d07dbfbf3982312e288549fe4c398a0062158b" Jan 20 17:09:25 crc kubenswrapper[4558]: I0120 17:09:25.034903 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1927a92c-7b85-47f6-b401-4032daabe0a3-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "1927a92c-7b85-47f6-b401-4032daabe0a3" (UID: "1927a92c-7b85-47f6-b401-4032daabe0a3"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:09:25 crc kubenswrapper[4558]: I0120 17:09:25.053700 4558 scope.go:117] "RemoveContainer" containerID="51dbcfcc01818883ec45c4c1c6a0e37e606d6de400d6565d031eb8ec2eeab71e" Jan 20 17:09:25 crc kubenswrapper[4558]: E0120 17:09:25.054360 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"51dbcfcc01818883ec45c4c1c6a0e37e606d6de400d6565d031eb8ec2eeab71e\": container with ID starting with 51dbcfcc01818883ec45c4c1c6a0e37e606d6de400d6565d031eb8ec2eeab71e not found: ID does not exist" containerID="51dbcfcc01818883ec45c4c1c6a0e37e606d6de400d6565d031eb8ec2eeab71e" Jan 20 17:09:25 crc kubenswrapper[4558]: I0120 17:09:25.054405 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"51dbcfcc01818883ec45c4c1c6a0e37e606d6de400d6565d031eb8ec2eeab71e"} err="failed to get container status \"51dbcfcc01818883ec45c4c1c6a0e37e606d6de400d6565d031eb8ec2eeab71e\": rpc error: code = NotFound desc = could not find container \"51dbcfcc01818883ec45c4c1c6a0e37e606d6de400d6565d031eb8ec2eeab71e\": container with ID starting with 51dbcfcc01818883ec45c4c1c6a0e37e606d6de400d6565d031eb8ec2eeab71e not found: ID does not exist" Jan 20 17:09:25 crc kubenswrapper[4558]: I0120 17:09:25.054434 4558 scope.go:117] "RemoveContainer" containerID="69d85ebc012c0971a3ab6f07a4d07dbfbf3982312e288549fe4c398a0062158b" Jan 20 17:09:25 crc kubenswrapper[4558]: E0120 17:09:25.054759 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"69d85ebc012c0971a3ab6f07a4d07dbfbf3982312e288549fe4c398a0062158b\": container with ID starting with 69d85ebc012c0971a3ab6f07a4d07dbfbf3982312e288549fe4c398a0062158b not found: ID does not exist" containerID="69d85ebc012c0971a3ab6f07a4d07dbfbf3982312e288549fe4c398a0062158b" Jan 20 17:09:25 crc kubenswrapper[4558]: I0120 17:09:25.054793 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"69d85ebc012c0971a3ab6f07a4d07dbfbf3982312e288549fe4c398a0062158b"} err="failed to get container status \"69d85ebc012c0971a3ab6f07a4d07dbfbf3982312e288549fe4c398a0062158b\": rpc error: code = NotFound desc = could not find container \"69d85ebc012c0971a3ab6f07a4d07dbfbf3982312e288549fe4c398a0062158b\": container with ID starting with 69d85ebc012c0971a3ab6f07a4d07dbfbf3982312e288549fe4c398a0062158b not found: ID does not exist" Jan 20 17:09:25 crc kubenswrapper[4558]: I0120 17:09:25.054819 4558 scope.go:117] "RemoveContainer" containerID="51dbcfcc01818883ec45c4c1c6a0e37e606d6de400d6565d031eb8ec2eeab71e" Jan 20 17:09:25 crc kubenswrapper[4558]: I0120 17:09:25.055154 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"51dbcfcc01818883ec45c4c1c6a0e37e606d6de400d6565d031eb8ec2eeab71e"} err="failed to get container status \"51dbcfcc01818883ec45c4c1c6a0e37e606d6de400d6565d031eb8ec2eeab71e\": rpc error: code = NotFound desc = could not find container \"51dbcfcc01818883ec45c4c1c6a0e37e606d6de400d6565d031eb8ec2eeab71e\": container with ID starting with 51dbcfcc01818883ec45c4c1c6a0e37e606d6de400d6565d031eb8ec2eeab71e not found: ID does not exist" Jan 20 17:09:25 crc kubenswrapper[4558]: I0120 17:09:25.055230 4558 scope.go:117] "RemoveContainer" containerID="69d85ebc012c0971a3ab6f07a4d07dbfbf3982312e288549fe4c398a0062158b" Jan 20 17:09:25 crc kubenswrapper[4558]: I0120 17:09:25.055481 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"69d85ebc012c0971a3ab6f07a4d07dbfbf3982312e288549fe4c398a0062158b"} err="failed to get container status \"69d85ebc012c0971a3ab6f07a4d07dbfbf3982312e288549fe4c398a0062158b\": rpc error: code = NotFound desc = could not find container \"69d85ebc012c0971a3ab6f07a4d07dbfbf3982312e288549fe4c398a0062158b\": container with ID starting with 69d85ebc012c0971a3ab6f07a4d07dbfbf3982312e288549fe4c398a0062158b not found: ID does not exist" Jan 20 17:09:25 crc kubenswrapper[4558]: I0120 17:09:25.055507 4558 scope.go:117] "RemoveContainer" containerID="d9c47772c06d9f094ea0cce3a6bf4fbc7f43d6366d44936501ab735811412dbf" Jan 20 17:09:25 crc kubenswrapper[4558]: I0120 17:09:25.071222 4558 scope.go:117] "RemoveContainer" containerID="e20361edf85efa6d348a94ffef199107af85dd94257c9b4b7f0a6aaab545e2d2" Jan 20 17:09:25 crc kubenswrapper[4558]: I0120 17:09:25.075667 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p4l5f\" (UniqueName: \"kubernetes.io/projected/660d7b2b-b480-41c1-8e10-d2216f28c591-kube-api-access-p4l5f\") on node \"crc\" DevicePath \"\"" Jan 20 17:09:25 crc kubenswrapper[4558]: I0120 17:09:25.075692 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1927a92c-7b85-47f6-b401-4032daabe0a3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:09:25 crc kubenswrapper[4558]: I0120 17:09:25.075703 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/660d7b2b-b480-41c1-8e10-d2216f28c591-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:09:25 crc kubenswrapper[4558]: I0120 17:09:25.075713 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/660d7b2b-b480-41c1-8e10-d2216f28c591-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:09:25 crc kubenswrapper[4558]: I0120 17:09:25.075723 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1927a92c-7b85-47f6-b401-4032daabe0a3-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:09:25 crc kubenswrapper[4558]: I0120 17:09:25.075732 4558 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/1927a92c-7b85-47f6-b401-4032daabe0a3-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:09:25 crc kubenswrapper[4558]: I0120 17:09:25.075742 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1927a92c-7b85-47f6-b401-4032daabe0a3-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:09:25 crc kubenswrapper[4558]: I0120 17:09:25.075752 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t6q9w\" (UniqueName: \"kubernetes.io/projected/1927a92c-7b85-47f6-b401-4032daabe0a3-kube-api-access-t6q9w\") on node \"crc\" DevicePath \"\"" Jan 20 17:09:25 crc kubenswrapper[4558]: I0120 17:09:25.075760 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/660d7b2b-b480-41c1-8e10-d2216f28c591-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:09:25 crc kubenswrapper[4558]: I0120 17:09:25.090056 4558 scope.go:117] "RemoveContainer" containerID="d9c47772c06d9f094ea0cce3a6bf4fbc7f43d6366d44936501ab735811412dbf" Jan 20 17:09:25 crc kubenswrapper[4558]: E0120 17:09:25.090413 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d9c47772c06d9f094ea0cce3a6bf4fbc7f43d6366d44936501ab735811412dbf\": container with ID starting with d9c47772c06d9f094ea0cce3a6bf4fbc7f43d6366d44936501ab735811412dbf not found: ID does not exist" containerID="d9c47772c06d9f094ea0cce3a6bf4fbc7f43d6366d44936501ab735811412dbf" Jan 20 17:09:25 crc kubenswrapper[4558]: I0120 17:09:25.090490 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d9c47772c06d9f094ea0cce3a6bf4fbc7f43d6366d44936501ab735811412dbf"} err="failed to get container status \"d9c47772c06d9f094ea0cce3a6bf4fbc7f43d6366d44936501ab735811412dbf\": rpc error: code = NotFound desc = could not find container \"d9c47772c06d9f094ea0cce3a6bf4fbc7f43d6366d44936501ab735811412dbf\": container with ID starting with d9c47772c06d9f094ea0cce3a6bf4fbc7f43d6366d44936501ab735811412dbf not found: ID does not exist" Jan 20 17:09:25 crc kubenswrapper[4558]: I0120 17:09:25.090537 4558 scope.go:117] "RemoveContainer" containerID="e20361edf85efa6d348a94ffef199107af85dd94257c9b4b7f0a6aaab545e2d2" Jan 20 17:09:25 crc kubenswrapper[4558]: E0120 17:09:25.090958 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e20361edf85efa6d348a94ffef199107af85dd94257c9b4b7f0a6aaab545e2d2\": container with ID starting with e20361edf85efa6d348a94ffef199107af85dd94257c9b4b7f0a6aaab545e2d2 not found: ID does not exist" containerID="e20361edf85efa6d348a94ffef199107af85dd94257c9b4b7f0a6aaab545e2d2" Jan 20 17:09:25 crc kubenswrapper[4558]: I0120 17:09:25.090995 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e20361edf85efa6d348a94ffef199107af85dd94257c9b4b7f0a6aaab545e2d2"} err="failed to get container status \"e20361edf85efa6d348a94ffef199107af85dd94257c9b4b7f0a6aaab545e2d2\": rpc error: code = NotFound desc = could not find container \"e20361edf85efa6d348a94ffef199107af85dd94257c9b4b7f0a6aaab545e2d2\": container with ID starting with e20361edf85efa6d348a94ffef199107af85dd94257c9b4b7f0a6aaab545e2d2 not found: ID does not exist" Jan 20 17:09:25 crc kubenswrapper[4558]: I0120 17:09:25.091023 4558 scope.go:117] "RemoveContainer" containerID="d9c47772c06d9f094ea0cce3a6bf4fbc7f43d6366d44936501ab735811412dbf" Jan 20 17:09:25 crc kubenswrapper[4558]: I0120 17:09:25.091309 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d9c47772c06d9f094ea0cce3a6bf4fbc7f43d6366d44936501ab735811412dbf"} err="failed to get container status \"d9c47772c06d9f094ea0cce3a6bf4fbc7f43d6366d44936501ab735811412dbf\": rpc error: code = NotFound desc = could not find container \"d9c47772c06d9f094ea0cce3a6bf4fbc7f43d6366d44936501ab735811412dbf\": container with ID starting with d9c47772c06d9f094ea0cce3a6bf4fbc7f43d6366d44936501ab735811412dbf not found: ID does not exist" Jan 20 17:09:25 crc kubenswrapper[4558]: I0120 17:09:25.091332 4558 scope.go:117] "RemoveContainer" containerID="e20361edf85efa6d348a94ffef199107af85dd94257c9b4b7f0a6aaab545e2d2" Jan 20 17:09:25 crc kubenswrapper[4558]: I0120 17:09:25.094565 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e20361edf85efa6d348a94ffef199107af85dd94257c9b4b7f0a6aaab545e2d2"} err="failed to get container status \"e20361edf85efa6d348a94ffef199107af85dd94257c9b4b7f0a6aaab545e2d2\": rpc error: code = NotFound desc = could not find container \"e20361edf85efa6d348a94ffef199107af85dd94257c9b4b7f0a6aaab545e2d2\": container with ID starting with e20361edf85efa6d348a94ffef199107af85dd94257c9b4b7f0a6aaab545e2d2 not found: ID does not exist" Jan 20 17:09:25 crc kubenswrapper[4558]: I0120 17:09:25.350447 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:09:25 crc kubenswrapper[4558]: I0120 17:09:25.362149 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:09:25 crc kubenswrapper[4558]: I0120 17:09:25.369523 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:09:25 crc kubenswrapper[4558]: I0120 17:09:25.377221 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:09:25 crc kubenswrapper[4558]: I0120 17:09:25.382505 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:09:25 crc kubenswrapper[4558]: E0120 17:09:25.383015 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="660d7b2b-b480-41c1-8e10-d2216f28c591" containerName="nova-api-log" Jan 20 17:09:25 crc kubenswrapper[4558]: I0120 17:09:25.383036 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="660d7b2b-b480-41c1-8e10-d2216f28c591" containerName="nova-api-log" Jan 20 17:09:25 crc kubenswrapper[4558]: E0120 17:09:25.383062 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="660d7b2b-b480-41c1-8e10-d2216f28c591" containerName="nova-api-api" Jan 20 17:09:25 crc kubenswrapper[4558]: I0120 17:09:25.383069 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="660d7b2b-b480-41c1-8e10-d2216f28c591" containerName="nova-api-api" Jan 20 17:09:25 crc kubenswrapper[4558]: E0120 17:09:25.383081 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d0bb0682-179e-4316-98d1-55c1912cc154" containerName="nova-manage" Jan 20 17:09:25 crc kubenswrapper[4558]: I0120 17:09:25.383087 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d0bb0682-179e-4316-98d1-55c1912cc154" containerName="nova-manage" Jan 20 17:09:25 crc kubenswrapper[4558]: E0120 17:09:25.383100 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1927a92c-7b85-47f6-b401-4032daabe0a3" containerName="nova-metadata-log" Jan 20 17:09:25 crc kubenswrapper[4558]: I0120 17:09:25.383107 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="1927a92c-7b85-47f6-b401-4032daabe0a3" containerName="nova-metadata-log" Jan 20 17:09:25 crc kubenswrapper[4558]: E0120 17:09:25.383117 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1927a92c-7b85-47f6-b401-4032daabe0a3" containerName="nova-metadata-metadata" Jan 20 17:09:25 crc kubenswrapper[4558]: I0120 17:09:25.383124 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="1927a92c-7b85-47f6-b401-4032daabe0a3" containerName="nova-metadata-metadata" Jan 20 17:09:25 crc kubenswrapper[4558]: I0120 17:09:25.383369 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="1927a92c-7b85-47f6-b401-4032daabe0a3" containerName="nova-metadata-metadata" Jan 20 17:09:25 crc kubenswrapper[4558]: I0120 17:09:25.383409 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="1927a92c-7b85-47f6-b401-4032daabe0a3" containerName="nova-metadata-log" Jan 20 17:09:25 crc kubenswrapper[4558]: I0120 17:09:25.383422 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="660d7b2b-b480-41c1-8e10-d2216f28c591" containerName="nova-api-api" Jan 20 17:09:25 crc kubenswrapper[4558]: I0120 17:09:25.383430 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="d0bb0682-179e-4316-98d1-55c1912cc154" containerName="nova-manage" Jan 20 17:09:25 crc kubenswrapper[4558]: I0120 17:09:25.383445 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="660d7b2b-b480-41c1-8e10-d2216f28c591" containerName="nova-api-log" Jan 20 17:09:25 crc kubenswrapper[4558]: I0120 17:09:25.384570 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:09:25 crc kubenswrapper[4558]: I0120 17:09:25.392761 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:09:25 crc kubenswrapper[4558]: I0120 17:09:25.393252 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-metadata-internal-svc" Jan 20 17:09:25 crc kubenswrapper[4558]: I0120 17:09:25.394208 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:09:25 crc kubenswrapper[4558]: I0120 17:09:25.394300 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:09:25 crc kubenswrapper[4558]: I0120 17:09:25.394544 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-metadata-config-data" Jan 20 17:09:25 crc kubenswrapper[4558]: I0120 17:09:25.399040 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-api-config-data" Jan 20 17:09:25 crc kubenswrapper[4558]: I0120 17:09:25.407420 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:09:25 crc kubenswrapper[4558]: I0120 17:09:25.585990 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0aba62a2-39a5-44ba-952b-68d05f05ba3d-config-data\") pod \"nova-api-0\" (UID: \"0aba62a2-39a5-44ba-952b-68d05f05ba3d\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:09:25 crc kubenswrapper[4558]: I0120 17:09:25.586251 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6d5q8\" (UniqueName: \"kubernetes.io/projected/0aba62a2-39a5-44ba-952b-68d05f05ba3d-kube-api-access-6d5q8\") pod \"nova-api-0\" (UID: \"0aba62a2-39a5-44ba-952b-68d05f05ba3d\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:09:25 crc kubenswrapper[4558]: I0120 17:09:25.586388 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d46a03de-3bdb-4932-bd49-da4fecd91907-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"d46a03de-3bdb-4932-bd49-da4fecd91907\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:09:25 crc kubenswrapper[4558]: I0120 17:09:25.586476 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d46a03de-3bdb-4932-bd49-da4fecd91907-config-data\") pod \"nova-metadata-0\" (UID: \"d46a03de-3bdb-4932-bd49-da4fecd91907\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:09:25 crc kubenswrapper[4558]: I0120 17:09:25.586560 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0aba62a2-39a5-44ba-952b-68d05f05ba3d-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"0aba62a2-39a5-44ba-952b-68d05f05ba3d\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:09:25 crc kubenswrapper[4558]: I0120 17:09:25.586696 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lxkkc\" (UniqueName: \"kubernetes.io/projected/d46a03de-3bdb-4932-bd49-da4fecd91907-kube-api-access-lxkkc\") pod \"nova-metadata-0\" (UID: \"d46a03de-3bdb-4932-bd49-da4fecd91907\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:09:25 crc kubenswrapper[4558]: I0120 17:09:25.586821 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/d46a03de-3bdb-4932-bd49-da4fecd91907-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"d46a03de-3bdb-4932-bd49-da4fecd91907\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:09:25 crc kubenswrapper[4558]: I0120 17:09:25.586943 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d46a03de-3bdb-4932-bd49-da4fecd91907-logs\") pod \"nova-metadata-0\" (UID: \"d46a03de-3bdb-4932-bd49-da4fecd91907\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:09:25 crc kubenswrapper[4558]: I0120 17:09:25.587066 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0aba62a2-39a5-44ba-952b-68d05f05ba3d-logs\") pod \"nova-api-0\" (UID: \"0aba62a2-39a5-44ba-952b-68d05f05ba3d\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:09:25 crc kubenswrapper[4558]: I0120 17:09:25.689237 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0aba62a2-39a5-44ba-952b-68d05f05ba3d-config-data\") pod \"nova-api-0\" (UID: \"0aba62a2-39a5-44ba-952b-68d05f05ba3d\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:09:25 crc kubenswrapper[4558]: I0120 17:09:25.689409 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6d5q8\" (UniqueName: \"kubernetes.io/projected/0aba62a2-39a5-44ba-952b-68d05f05ba3d-kube-api-access-6d5q8\") pod \"nova-api-0\" (UID: \"0aba62a2-39a5-44ba-952b-68d05f05ba3d\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:09:25 crc kubenswrapper[4558]: I0120 17:09:25.689442 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d46a03de-3bdb-4932-bd49-da4fecd91907-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"d46a03de-3bdb-4932-bd49-da4fecd91907\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:09:25 crc kubenswrapper[4558]: I0120 17:09:25.689460 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d46a03de-3bdb-4932-bd49-da4fecd91907-config-data\") pod \"nova-metadata-0\" (UID: \"d46a03de-3bdb-4932-bd49-da4fecd91907\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:09:25 crc kubenswrapper[4558]: I0120 17:09:25.689485 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0aba62a2-39a5-44ba-952b-68d05f05ba3d-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"0aba62a2-39a5-44ba-952b-68d05f05ba3d\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:09:25 crc kubenswrapper[4558]: I0120 17:09:25.689546 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lxkkc\" (UniqueName: \"kubernetes.io/projected/d46a03de-3bdb-4932-bd49-da4fecd91907-kube-api-access-lxkkc\") pod \"nova-metadata-0\" (UID: \"d46a03de-3bdb-4932-bd49-da4fecd91907\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:09:25 crc kubenswrapper[4558]: I0120 17:09:25.689608 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/d46a03de-3bdb-4932-bd49-da4fecd91907-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"d46a03de-3bdb-4932-bd49-da4fecd91907\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:09:25 crc kubenswrapper[4558]: I0120 17:09:25.689657 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d46a03de-3bdb-4932-bd49-da4fecd91907-logs\") pod \"nova-metadata-0\" (UID: \"d46a03de-3bdb-4932-bd49-da4fecd91907\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:09:25 crc kubenswrapper[4558]: I0120 17:09:25.689715 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0aba62a2-39a5-44ba-952b-68d05f05ba3d-logs\") pod \"nova-api-0\" (UID: \"0aba62a2-39a5-44ba-952b-68d05f05ba3d\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:09:25 crc kubenswrapper[4558]: I0120 17:09:25.690128 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0aba62a2-39a5-44ba-952b-68d05f05ba3d-logs\") pod \"nova-api-0\" (UID: \"0aba62a2-39a5-44ba-952b-68d05f05ba3d\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:09:25 crc kubenswrapper[4558]: I0120 17:09:25.691614 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d46a03de-3bdb-4932-bd49-da4fecd91907-logs\") pod \"nova-metadata-0\" (UID: \"d46a03de-3bdb-4932-bd49-da4fecd91907\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:09:25 crc kubenswrapper[4558]: I0120 17:09:25.695094 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/d46a03de-3bdb-4932-bd49-da4fecd91907-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"d46a03de-3bdb-4932-bd49-da4fecd91907\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:09:25 crc kubenswrapper[4558]: I0120 17:09:25.695158 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d46a03de-3bdb-4932-bd49-da4fecd91907-config-data\") pod \"nova-metadata-0\" (UID: \"d46a03de-3bdb-4932-bd49-da4fecd91907\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:09:25 crc kubenswrapper[4558]: I0120 17:09:25.695732 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0aba62a2-39a5-44ba-952b-68d05f05ba3d-config-data\") pod \"nova-api-0\" (UID: \"0aba62a2-39a5-44ba-952b-68d05f05ba3d\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:09:25 crc kubenswrapper[4558]: I0120 17:09:25.696135 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0aba62a2-39a5-44ba-952b-68d05f05ba3d-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"0aba62a2-39a5-44ba-952b-68d05f05ba3d\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:09:25 crc kubenswrapper[4558]: I0120 17:09:25.698304 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d46a03de-3bdb-4932-bd49-da4fecd91907-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"d46a03de-3bdb-4932-bd49-da4fecd91907\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:09:25 crc kubenswrapper[4558]: I0120 17:09:25.706408 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lxkkc\" (UniqueName: \"kubernetes.io/projected/d46a03de-3bdb-4932-bd49-da4fecd91907-kube-api-access-lxkkc\") pod \"nova-metadata-0\" (UID: \"d46a03de-3bdb-4932-bd49-da4fecd91907\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:09:25 crc kubenswrapper[4558]: I0120 17:09:25.708641 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6d5q8\" (UniqueName: \"kubernetes.io/projected/0aba62a2-39a5-44ba-952b-68d05f05ba3d-kube-api-access-6d5q8\") pod \"nova-api-0\" (UID: \"0aba62a2-39a5-44ba-952b-68d05f05ba3d\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:09:25 crc kubenswrapper[4558]: I0120 17:09:25.710600 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:09:25 crc kubenswrapper[4558]: I0120 17:09:25.717887 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:09:26 crc kubenswrapper[4558]: I0120 17:09:26.156590 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:09:26 crc kubenswrapper[4558]: I0120 17:09:26.228113 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:09:26 crc kubenswrapper[4558]: W0120 17:09:26.246958 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0aba62a2_39a5_44ba_952b_68d05f05ba3d.slice/crio-f0452e67edd05af644b70a290dcfdd7068bd084cf45a61561eff60d401aba6a7 WatchSource:0}: Error finding container f0452e67edd05af644b70a290dcfdd7068bd084cf45a61561eff60d401aba6a7: Status 404 returned error can't find the container with id f0452e67edd05af644b70a290dcfdd7068bd084cf45a61561eff60d401aba6a7 Jan 20 17:09:26 crc kubenswrapper[4558]: I0120 17:09:26.590472 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1927a92c-7b85-47f6-b401-4032daabe0a3" path="/var/lib/kubelet/pods/1927a92c-7b85-47f6-b401-4032daabe0a3/volumes" Jan 20 17:09:26 crc kubenswrapper[4558]: I0120 17:09:26.591483 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="660d7b2b-b480-41c1-8e10-d2216f28c591" path="/var/lib/kubelet/pods/660d7b2b-b480-41c1-8e10-d2216f28c591/volumes" Jan 20 17:09:26 crc kubenswrapper[4558]: I0120 17:09:26.740881 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:09:26 crc kubenswrapper[4558]: I0120 17:09:26.819080 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f640bcab-f4ac-417c-86e6-af44f26a0ca9-combined-ca-bundle\") pod \"f640bcab-f4ac-417c-86e6-af44f26a0ca9\" (UID: \"f640bcab-f4ac-417c-86e6-af44f26a0ca9\") " Jan 20 17:09:26 crc kubenswrapper[4558]: I0120 17:09:26.819291 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f640bcab-f4ac-417c-86e6-af44f26a0ca9-config-data\") pod \"f640bcab-f4ac-417c-86e6-af44f26a0ca9\" (UID: \"f640bcab-f4ac-417c-86e6-af44f26a0ca9\") " Jan 20 17:09:26 crc kubenswrapper[4558]: I0120 17:09:26.819401 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sr5l5\" (UniqueName: \"kubernetes.io/projected/f640bcab-f4ac-417c-86e6-af44f26a0ca9-kube-api-access-sr5l5\") pod \"f640bcab-f4ac-417c-86e6-af44f26a0ca9\" (UID: \"f640bcab-f4ac-417c-86e6-af44f26a0ca9\") " Jan 20 17:09:26 crc kubenswrapper[4558]: I0120 17:09:26.828472 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f640bcab-f4ac-417c-86e6-af44f26a0ca9-kube-api-access-sr5l5" (OuterVolumeSpecName: "kube-api-access-sr5l5") pod "f640bcab-f4ac-417c-86e6-af44f26a0ca9" (UID: "f640bcab-f4ac-417c-86e6-af44f26a0ca9"). InnerVolumeSpecName "kube-api-access-sr5l5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:09:26 crc kubenswrapper[4558]: I0120 17:09:26.849959 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f640bcab-f4ac-417c-86e6-af44f26a0ca9-config-data" (OuterVolumeSpecName: "config-data") pod "f640bcab-f4ac-417c-86e6-af44f26a0ca9" (UID: "f640bcab-f4ac-417c-86e6-af44f26a0ca9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:09:26 crc kubenswrapper[4558]: I0120 17:09:26.853561 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f640bcab-f4ac-417c-86e6-af44f26a0ca9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f640bcab-f4ac-417c-86e6-af44f26a0ca9" (UID: "f640bcab-f4ac-417c-86e6-af44f26a0ca9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:09:26 crc kubenswrapper[4558]: I0120 17:09:26.922109 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f640bcab-f4ac-417c-86e6-af44f26a0ca9-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:09:26 crc kubenswrapper[4558]: I0120 17:09:26.922142 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sr5l5\" (UniqueName: \"kubernetes.io/projected/f640bcab-f4ac-417c-86e6-af44f26a0ca9-kube-api-access-sr5l5\") on node \"crc\" DevicePath \"\"" Jan 20 17:09:26 crc kubenswrapper[4558]: I0120 17:09:26.922158 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f640bcab-f4ac-417c-86e6-af44f26a0ca9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:09:27 crc kubenswrapper[4558]: I0120 17:09:27.051227 4558 generic.go:334] "Generic (PLEG): container finished" podID="f640bcab-f4ac-417c-86e6-af44f26a0ca9" containerID="df65964e41193f502824f8b897729832e4e3dd70022cdd614c0000b9be9cf213" exitCode=0 Jan 20 17:09:27 crc kubenswrapper[4558]: I0120 17:09:27.051322 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"f640bcab-f4ac-417c-86e6-af44f26a0ca9","Type":"ContainerDied","Data":"df65964e41193f502824f8b897729832e4e3dd70022cdd614c0000b9be9cf213"} Jan 20 17:09:27 crc kubenswrapper[4558]: I0120 17:09:27.051394 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"f640bcab-f4ac-417c-86e6-af44f26a0ca9","Type":"ContainerDied","Data":"00f862df32f86bb023d0e3bd9e9b04f6266ab1cdd61813e8b14bf474e0509a32"} Jan 20 17:09:27 crc kubenswrapper[4558]: I0120 17:09:27.051420 4558 scope.go:117] "RemoveContainer" containerID="df65964e41193f502824f8b897729832e4e3dd70022cdd614c0000b9be9cf213" Jan 20 17:09:27 crc kubenswrapper[4558]: I0120 17:09:27.051640 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:09:27 crc kubenswrapper[4558]: I0120 17:09:27.053411 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"0aba62a2-39a5-44ba-952b-68d05f05ba3d","Type":"ContainerStarted","Data":"24ba9021a6d7b15c6de341be07bead07b860fa618692fdcc0f9d21e6893ae9ea"} Jan 20 17:09:27 crc kubenswrapper[4558]: I0120 17:09:27.053456 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"0aba62a2-39a5-44ba-952b-68d05f05ba3d","Type":"ContainerStarted","Data":"a432d4af917fe8731860e9db28949c193d839b884d400249de58aedacd69560e"} Jan 20 17:09:27 crc kubenswrapper[4558]: I0120 17:09:27.053472 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"0aba62a2-39a5-44ba-952b-68d05f05ba3d","Type":"ContainerStarted","Data":"f0452e67edd05af644b70a290dcfdd7068bd084cf45a61561eff60d401aba6a7"} Jan 20 17:09:27 crc kubenswrapper[4558]: I0120 17:09:27.056520 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"d46a03de-3bdb-4932-bd49-da4fecd91907","Type":"ContainerStarted","Data":"366eeb0f21b82fded41d4ec3fe46359cd74195c6c393c5080bb24b221fbf0fd0"} Jan 20 17:09:27 crc kubenswrapper[4558]: I0120 17:09:27.056557 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"d46a03de-3bdb-4932-bd49-da4fecd91907","Type":"ContainerStarted","Data":"7c31e2893dd9cac6872008d8ef48b64f3102f3ebfec35cce0281a9b91ba96cb3"} Jan 20 17:09:27 crc kubenswrapper[4558]: I0120 17:09:27.056573 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"d46a03de-3bdb-4932-bd49-da4fecd91907","Type":"ContainerStarted","Data":"dee4754d0a4f45831cd7b7d7183cc1ecf64ae33e72b0d43a0b1701b3ff3b64db"} Jan 20 17:09:27 crc kubenswrapper[4558]: I0120 17:09:27.080371 4558 scope.go:117] "RemoveContainer" containerID="df65964e41193f502824f8b897729832e4e3dd70022cdd614c0000b9be9cf213" Jan 20 17:09:27 crc kubenswrapper[4558]: E0120 17:09:27.080686 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"df65964e41193f502824f8b897729832e4e3dd70022cdd614c0000b9be9cf213\": container with ID starting with df65964e41193f502824f8b897729832e4e3dd70022cdd614c0000b9be9cf213 not found: ID does not exist" containerID="df65964e41193f502824f8b897729832e4e3dd70022cdd614c0000b9be9cf213" Jan 20 17:09:27 crc kubenswrapper[4558]: I0120 17:09:27.080722 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"df65964e41193f502824f8b897729832e4e3dd70022cdd614c0000b9be9cf213"} err="failed to get container status \"df65964e41193f502824f8b897729832e4e3dd70022cdd614c0000b9be9cf213\": rpc error: code = NotFound desc = could not find container \"df65964e41193f502824f8b897729832e4e3dd70022cdd614c0000b9be9cf213\": container with ID starting with df65964e41193f502824f8b897729832e4e3dd70022cdd614c0000b9be9cf213 not found: ID does not exist" Jan 20 17:09:27 crc kubenswrapper[4558]: I0120 17:09:27.087506 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-api-0" podStartSLOduration=2.087491827 podStartE2EDuration="2.087491827s" podCreationTimestamp="2026-01-20 17:09:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:09:27.077696737 +0000 UTC m=+1660.838034704" watchObservedRunningTime="2026-01-20 17:09:27.087491827 +0000 UTC m=+1660.847829793" Jan 20 17:09:27 crc kubenswrapper[4558]: I0120 17:09:27.092647 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:09:27 crc kubenswrapper[4558]: I0120 17:09:27.100529 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:09:27 crc kubenswrapper[4558]: I0120 17:09:27.115682 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:09:27 crc kubenswrapper[4558]: E0120 17:09:27.116148 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f640bcab-f4ac-417c-86e6-af44f26a0ca9" containerName="nova-scheduler-scheduler" Jan 20 17:09:27 crc kubenswrapper[4558]: I0120 17:09:27.116182 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="f640bcab-f4ac-417c-86e6-af44f26a0ca9" containerName="nova-scheduler-scheduler" Jan 20 17:09:27 crc kubenswrapper[4558]: I0120 17:09:27.116381 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="f640bcab-f4ac-417c-86e6-af44f26a0ca9" containerName="nova-scheduler-scheduler" Jan 20 17:09:27 crc kubenswrapper[4558]: I0120 17:09:27.116850 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-metadata-0" podStartSLOduration=2.116825356 podStartE2EDuration="2.116825356s" podCreationTimestamp="2026-01-20 17:09:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:09:27.10762738 +0000 UTC m=+1660.867965347" watchObservedRunningTime="2026-01-20 17:09:27.116825356 +0000 UTC m=+1660.877163323" Jan 20 17:09:27 crc kubenswrapper[4558]: I0120 17:09:27.117071 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:09:27 crc kubenswrapper[4558]: I0120 17:09:27.118892 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-scheduler-config-data" Jan 20 17:09:27 crc kubenswrapper[4558]: I0120 17:09:27.126788 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kg6dq\" (UniqueName: \"kubernetes.io/projected/9cf7d5a4-78ef-4331-b28a-4ef3f8f06b35-kube-api-access-kg6dq\") pod \"nova-scheduler-0\" (UID: \"9cf7d5a4-78ef-4331-b28a-4ef3f8f06b35\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:09:27 crc kubenswrapper[4558]: I0120 17:09:27.126898 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9cf7d5a4-78ef-4331-b28a-4ef3f8f06b35-config-data\") pod \"nova-scheduler-0\" (UID: \"9cf7d5a4-78ef-4331-b28a-4ef3f8f06b35\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:09:27 crc kubenswrapper[4558]: I0120 17:09:27.126978 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9cf7d5a4-78ef-4331-b28a-4ef3f8f06b35-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"9cf7d5a4-78ef-4331-b28a-4ef3f8f06b35\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:09:27 crc kubenswrapper[4558]: I0120 17:09:27.135639 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:09:27 crc kubenswrapper[4558]: I0120 17:09:27.229120 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kg6dq\" (UniqueName: \"kubernetes.io/projected/9cf7d5a4-78ef-4331-b28a-4ef3f8f06b35-kube-api-access-kg6dq\") pod \"nova-scheduler-0\" (UID: \"9cf7d5a4-78ef-4331-b28a-4ef3f8f06b35\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:09:27 crc kubenswrapper[4558]: I0120 17:09:27.229222 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9cf7d5a4-78ef-4331-b28a-4ef3f8f06b35-config-data\") pod \"nova-scheduler-0\" (UID: \"9cf7d5a4-78ef-4331-b28a-4ef3f8f06b35\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:09:27 crc kubenswrapper[4558]: I0120 17:09:27.229278 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9cf7d5a4-78ef-4331-b28a-4ef3f8f06b35-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"9cf7d5a4-78ef-4331-b28a-4ef3f8f06b35\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:09:27 crc kubenswrapper[4558]: I0120 17:09:27.234105 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9cf7d5a4-78ef-4331-b28a-4ef3f8f06b35-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"9cf7d5a4-78ef-4331-b28a-4ef3f8f06b35\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:09:27 crc kubenswrapper[4558]: I0120 17:09:27.234820 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9cf7d5a4-78ef-4331-b28a-4ef3f8f06b35-config-data\") pod \"nova-scheduler-0\" (UID: \"9cf7d5a4-78ef-4331-b28a-4ef3f8f06b35\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:09:27 crc kubenswrapper[4558]: I0120 17:09:27.244052 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kg6dq\" (UniqueName: \"kubernetes.io/projected/9cf7d5a4-78ef-4331-b28a-4ef3f8f06b35-kube-api-access-kg6dq\") pod \"nova-scheduler-0\" (UID: \"9cf7d5a4-78ef-4331-b28a-4ef3f8f06b35\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:09:27 crc kubenswrapper[4558]: I0120 17:09:27.331848 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:09:27 crc kubenswrapper[4558]: I0120 17:09:27.435806 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:09:27 crc kubenswrapper[4558]: I0120 17:09:27.842520 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:09:28 crc kubenswrapper[4558]: I0120 17:09:28.078831 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"9cf7d5a4-78ef-4331-b28a-4ef3f8f06b35","Type":"ContainerStarted","Data":"3465a9415f40dc71ee80e7a40e1d193a38dbe6748e350f8cabf08597b3055836"} Jan 20 17:09:28 crc kubenswrapper[4558]: I0120 17:09:28.078882 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"9cf7d5a4-78ef-4331-b28a-4ef3f8f06b35","Type":"ContainerStarted","Data":"2664c0e0fd94d254732ddab42625d014c1cc982ee00882133439035e83f10aaf"} Jan 20 17:09:28 crc kubenswrapper[4558]: I0120 17:09:28.099430 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-scheduler-0" podStartSLOduration=1.099411171 podStartE2EDuration="1.099411171s" podCreationTimestamp="2026-01-20 17:09:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:09:28.092525413 +0000 UTC m=+1661.852863380" watchObservedRunningTime="2026-01-20 17:09:28.099411171 +0000 UTC m=+1661.859749138" Jan 20 17:09:28 crc kubenswrapper[4558]: I0120 17:09:28.578090 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f640bcab-f4ac-417c-86e6-af44f26a0ca9" path="/var/lib/kubelet/pods/f640bcab-f4ac-417c-86e6-af44f26a0ca9/volumes" Jan 20 17:09:29 crc kubenswrapper[4558]: I0120 17:09:29.341743 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:09:29 crc kubenswrapper[4558]: I0120 17:09:29.362085 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:09:30 crc kubenswrapper[4558]: I0120 17:09:30.121322 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:09:30 crc kubenswrapper[4558]: I0120 17:09:30.246040 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell1-cell-mapping-5lrhm"] Jan 20 17:09:30 crc kubenswrapper[4558]: I0120 17:09:30.247379 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-cell-mapping-5lrhm" Jan 20 17:09:30 crc kubenswrapper[4558]: I0120 17:09:30.257884 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell1-manage-scripts" Jan 20 17:09:30 crc kubenswrapper[4558]: I0120 17:09:30.261643 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell1-manage-config-data" Jan 20 17:09:30 crc kubenswrapper[4558]: I0120 17:09:30.264926 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-cell-mapping-5lrhm"] Jan 20 17:09:30 crc kubenswrapper[4558]: I0120 17:09:30.394336 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mg7kj\" (UniqueName: \"kubernetes.io/projected/7e920ead-f95a-434d-9aa0-47342d337eae-kube-api-access-mg7kj\") pod \"nova-cell1-cell-mapping-5lrhm\" (UID: \"7e920ead-f95a-434d-9aa0-47342d337eae\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-5lrhm" Jan 20 17:09:30 crc kubenswrapper[4558]: I0120 17:09:30.394426 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7e920ead-f95a-434d-9aa0-47342d337eae-config-data\") pod \"nova-cell1-cell-mapping-5lrhm\" (UID: \"7e920ead-f95a-434d-9aa0-47342d337eae\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-5lrhm" Jan 20 17:09:30 crc kubenswrapper[4558]: I0120 17:09:30.394464 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7e920ead-f95a-434d-9aa0-47342d337eae-scripts\") pod \"nova-cell1-cell-mapping-5lrhm\" (UID: \"7e920ead-f95a-434d-9aa0-47342d337eae\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-5lrhm" Jan 20 17:09:30 crc kubenswrapper[4558]: I0120 17:09:30.394511 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e920ead-f95a-434d-9aa0-47342d337eae-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-5lrhm\" (UID: \"7e920ead-f95a-434d-9aa0-47342d337eae\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-5lrhm" Jan 20 17:09:30 crc kubenswrapper[4558]: I0120 17:09:30.497417 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mg7kj\" (UniqueName: \"kubernetes.io/projected/7e920ead-f95a-434d-9aa0-47342d337eae-kube-api-access-mg7kj\") pod \"nova-cell1-cell-mapping-5lrhm\" (UID: \"7e920ead-f95a-434d-9aa0-47342d337eae\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-5lrhm" Jan 20 17:09:30 crc kubenswrapper[4558]: I0120 17:09:30.497563 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7e920ead-f95a-434d-9aa0-47342d337eae-config-data\") pod \"nova-cell1-cell-mapping-5lrhm\" (UID: \"7e920ead-f95a-434d-9aa0-47342d337eae\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-5lrhm" Jan 20 17:09:30 crc kubenswrapper[4558]: I0120 17:09:30.497618 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7e920ead-f95a-434d-9aa0-47342d337eae-scripts\") pod \"nova-cell1-cell-mapping-5lrhm\" (UID: \"7e920ead-f95a-434d-9aa0-47342d337eae\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-5lrhm" Jan 20 17:09:30 crc kubenswrapper[4558]: I0120 17:09:30.497705 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e920ead-f95a-434d-9aa0-47342d337eae-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-5lrhm\" (UID: \"7e920ead-f95a-434d-9aa0-47342d337eae\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-5lrhm" Jan 20 17:09:30 crc kubenswrapper[4558]: I0120 17:09:30.506208 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7e920ead-f95a-434d-9aa0-47342d337eae-config-data\") pod \"nova-cell1-cell-mapping-5lrhm\" (UID: \"7e920ead-f95a-434d-9aa0-47342d337eae\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-5lrhm" Jan 20 17:09:30 crc kubenswrapper[4558]: I0120 17:09:30.506466 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7e920ead-f95a-434d-9aa0-47342d337eae-scripts\") pod \"nova-cell1-cell-mapping-5lrhm\" (UID: \"7e920ead-f95a-434d-9aa0-47342d337eae\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-5lrhm" Jan 20 17:09:30 crc kubenswrapper[4558]: I0120 17:09:30.506473 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e920ead-f95a-434d-9aa0-47342d337eae-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-5lrhm\" (UID: \"7e920ead-f95a-434d-9aa0-47342d337eae\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-5lrhm" Jan 20 17:09:30 crc kubenswrapper[4558]: I0120 17:09:30.512824 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mg7kj\" (UniqueName: \"kubernetes.io/projected/7e920ead-f95a-434d-9aa0-47342d337eae-kube-api-access-mg7kj\") pod \"nova-cell1-cell-mapping-5lrhm\" (UID: \"7e920ead-f95a-434d-9aa0-47342d337eae\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-5lrhm" Jan 20 17:09:30 crc kubenswrapper[4558]: I0120 17:09:30.567399 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-cell-mapping-5lrhm" Jan 20 17:09:30 crc kubenswrapper[4558]: I0120 17:09:30.569103 4558 scope.go:117] "RemoveContainer" containerID="a711a286282347590079d2447ef37fa9ed0f11085d6d7a65f85e65c1c2df608f" Jan 20 17:09:30 crc kubenswrapper[4558]: E0120 17:09:30.569303 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 17:09:30 crc kubenswrapper[4558]: I0120 17:09:30.711452 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:09:30 crc kubenswrapper[4558]: I0120 17:09:30.711558 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:09:30 crc kubenswrapper[4558]: W0120 17:09:30.985330 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7e920ead_f95a_434d_9aa0_47342d337eae.slice/crio-24b03733c471425658e67a1be0b3130fc786b460de666fe9d6f170f923fe691e WatchSource:0}: Error finding container 24b03733c471425658e67a1be0b3130fc786b460de666fe9d6f170f923fe691e: Status 404 returned error can't find the container with id 24b03733c471425658e67a1be0b3130fc786b460de666fe9d6f170f923fe691e Jan 20 17:09:30 crc kubenswrapper[4558]: I0120 17:09:30.986997 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-cell-mapping-5lrhm"] Jan 20 17:09:31 crc kubenswrapper[4558]: I0120 17:09:31.117546 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-cell-mapping-5lrhm" event={"ID":"7e920ead-f95a-434d-9aa0-47342d337eae","Type":"ContainerStarted","Data":"24b03733c471425658e67a1be0b3130fc786b460de666fe9d6f170f923fe691e"} Jan 20 17:09:32 crc kubenswrapper[4558]: I0120 17:09:32.139468 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-cell-mapping-5lrhm" event={"ID":"7e920ead-f95a-434d-9aa0-47342d337eae","Type":"ContainerStarted","Data":"3d9d12fe62d20a0f517381a88d1e4c4ebb688df85f473c30c5913811a68df201"} Jan 20 17:09:32 crc kubenswrapper[4558]: I0120 17:09:32.163737 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell1-cell-mapping-5lrhm" podStartSLOduration=2.163712911 podStartE2EDuration="2.163712911s" podCreationTimestamp="2026-01-20 17:09:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:09:32.155186328 +0000 UTC m=+1665.915524295" watchObservedRunningTime="2026-01-20 17:09:32.163712911 +0000 UTC m=+1665.924050878" Jan 20 17:09:32 crc kubenswrapper[4558]: I0120 17:09:32.436228 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:09:35 crc kubenswrapper[4558]: I0120 17:09:35.711497 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:09:35 crc kubenswrapper[4558]: I0120 17:09:35.712010 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:09:35 crc kubenswrapper[4558]: I0120 17:09:35.718650 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:09:35 crc kubenswrapper[4558]: I0120 17:09:35.718699 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:09:36 crc kubenswrapper[4558]: I0120 17:09:36.186137 4558 generic.go:334] "Generic (PLEG): container finished" podID="7e920ead-f95a-434d-9aa0-47342d337eae" containerID="3d9d12fe62d20a0f517381a88d1e4c4ebb688df85f473c30c5913811a68df201" exitCode=0 Jan 20 17:09:36 crc kubenswrapper[4558]: I0120 17:09:36.186195 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-cell-mapping-5lrhm" event={"ID":"7e920ead-f95a-434d-9aa0-47342d337eae","Type":"ContainerDied","Data":"3d9d12fe62d20a0f517381a88d1e4c4ebb688df85f473c30c5913811a68df201"} Jan 20 17:09:36 crc kubenswrapper[4558]: I0120 17:09:36.731317 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-metadata-0" podUID="d46a03de-3bdb-4932-bd49-da4fecd91907" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.1.137:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 20 17:09:36 crc kubenswrapper[4558]: I0120 17:09:36.731371 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-metadata-0" podUID="d46a03de-3bdb-4932-bd49-da4fecd91907" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.1.137:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 20 17:09:36 crc kubenswrapper[4558]: I0120 17:09:36.773396 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-api-0" podUID="0aba62a2-39a5-44ba-952b-68d05f05ba3d" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.1.138:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 20 17:09:36 crc kubenswrapper[4558]: I0120 17:09:36.814371 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-api-0" podUID="0aba62a2-39a5-44ba-952b-68d05f05ba3d" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.1.138:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 20 17:09:37 crc kubenswrapper[4558]: I0120 17:09:37.436600 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:09:37 crc kubenswrapper[4558]: I0120 17:09:37.462880 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:09:37 crc kubenswrapper[4558]: I0120 17:09:37.524023 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-cell-mapping-5lrhm" Jan 20 17:09:37 crc kubenswrapper[4558]: I0120 17:09:37.655388 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg7kj\" (UniqueName: \"kubernetes.io/projected/7e920ead-f95a-434d-9aa0-47342d337eae-kube-api-access-mg7kj\") pod \"7e920ead-f95a-434d-9aa0-47342d337eae\" (UID: \"7e920ead-f95a-434d-9aa0-47342d337eae\") " Jan 20 17:09:37 crc kubenswrapper[4558]: I0120 17:09:37.655516 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7e920ead-f95a-434d-9aa0-47342d337eae-config-data\") pod \"7e920ead-f95a-434d-9aa0-47342d337eae\" (UID: \"7e920ead-f95a-434d-9aa0-47342d337eae\") " Jan 20 17:09:37 crc kubenswrapper[4558]: I0120 17:09:37.655672 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e920ead-f95a-434d-9aa0-47342d337eae-combined-ca-bundle\") pod \"7e920ead-f95a-434d-9aa0-47342d337eae\" (UID: \"7e920ead-f95a-434d-9aa0-47342d337eae\") " Jan 20 17:09:37 crc kubenswrapper[4558]: I0120 17:09:37.655884 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7e920ead-f95a-434d-9aa0-47342d337eae-scripts\") pod \"7e920ead-f95a-434d-9aa0-47342d337eae\" (UID: \"7e920ead-f95a-434d-9aa0-47342d337eae\") " Jan 20 17:09:37 crc kubenswrapper[4558]: I0120 17:09:37.663735 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7e920ead-f95a-434d-9aa0-47342d337eae-scripts" (OuterVolumeSpecName: "scripts") pod "7e920ead-f95a-434d-9aa0-47342d337eae" (UID: "7e920ead-f95a-434d-9aa0-47342d337eae"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:09:37 crc kubenswrapper[4558]: I0120 17:09:37.677246 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7e920ead-f95a-434d-9aa0-47342d337eae-kube-api-access-mg7kj" (OuterVolumeSpecName: "kube-api-access-mg7kj") pod "7e920ead-f95a-434d-9aa0-47342d337eae" (UID: "7e920ead-f95a-434d-9aa0-47342d337eae"). InnerVolumeSpecName "kube-api-access-mg7kj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:09:37 crc kubenswrapper[4558]: I0120 17:09:37.684181 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7e920ead-f95a-434d-9aa0-47342d337eae-config-data" (OuterVolumeSpecName: "config-data") pod "7e920ead-f95a-434d-9aa0-47342d337eae" (UID: "7e920ead-f95a-434d-9aa0-47342d337eae"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:09:37 crc kubenswrapper[4558]: I0120 17:09:37.684275 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7e920ead-f95a-434d-9aa0-47342d337eae-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7e920ead-f95a-434d-9aa0-47342d337eae" (UID: "7e920ead-f95a-434d-9aa0-47342d337eae"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:09:37 crc kubenswrapper[4558]: I0120 17:09:37.757983 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7e920ead-f95a-434d-9aa0-47342d337eae-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:09:37 crc kubenswrapper[4558]: I0120 17:09:37.758016 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg7kj\" (UniqueName: \"kubernetes.io/projected/7e920ead-f95a-434d-9aa0-47342d337eae-kube-api-access-mg7kj\") on node \"crc\" DevicePath \"\"" Jan 20 17:09:37 crc kubenswrapper[4558]: I0120 17:09:37.758031 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7e920ead-f95a-434d-9aa0-47342d337eae-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:09:37 crc kubenswrapper[4558]: I0120 17:09:37.758045 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e920ead-f95a-434d-9aa0-47342d337eae-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:09:38 crc kubenswrapper[4558]: I0120 17:09:38.205146 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-cell-mapping-5lrhm" event={"ID":"7e920ead-f95a-434d-9aa0-47342d337eae","Type":"ContainerDied","Data":"24b03733c471425658e67a1be0b3130fc786b460de666fe9d6f170f923fe691e"} Jan 20 17:09:38 crc kubenswrapper[4558]: I0120 17:09:38.205236 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="24b03733c471425658e67a1be0b3130fc786b460de666fe9d6f170f923fe691e" Jan 20 17:09:38 crc kubenswrapper[4558]: I0120 17:09:38.205372 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-cell-mapping-5lrhm" Jan 20 17:09:38 crc kubenswrapper[4558]: I0120 17:09:38.237826 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:09:38 crc kubenswrapper[4558]: I0120 17:09:38.369375 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:09:38 crc kubenswrapper[4558]: I0120 17:09:38.369733 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-api-0" podUID="0aba62a2-39a5-44ba-952b-68d05f05ba3d" containerName="nova-api-log" containerID="cri-o://a432d4af917fe8731860e9db28949c193d839b884d400249de58aedacd69560e" gracePeriod=30 Jan 20 17:09:38 crc kubenswrapper[4558]: I0120 17:09:38.369791 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-api-0" podUID="0aba62a2-39a5-44ba-952b-68d05f05ba3d" containerName="nova-api-api" containerID="cri-o://24ba9021a6d7b15c6de341be07bead07b860fa618692fdcc0f9d21e6893ae9ea" gracePeriod=30 Jan 20 17:09:38 crc kubenswrapper[4558]: I0120 17:09:38.423295 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:09:38 crc kubenswrapper[4558]: I0120 17:09:38.423532 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-metadata-0" podUID="d46a03de-3bdb-4932-bd49-da4fecd91907" containerName="nova-metadata-log" containerID="cri-o://7c31e2893dd9cac6872008d8ef48b64f3102f3ebfec35cce0281a9b91ba96cb3" gracePeriod=30 Jan 20 17:09:38 crc kubenswrapper[4558]: I0120 17:09:38.423606 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-metadata-0" podUID="d46a03de-3bdb-4932-bd49-da4fecd91907" containerName="nova-metadata-metadata" containerID="cri-o://366eeb0f21b82fded41d4ec3fe46359cd74195c6c393c5080bb24b221fbf0fd0" gracePeriod=30 Jan 20 17:09:38 crc kubenswrapper[4558]: I0120 17:09:38.649118 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:09:39 crc kubenswrapper[4558]: I0120 17:09:39.216508 4558 generic.go:334] "Generic (PLEG): container finished" podID="d46a03de-3bdb-4932-bd49-da4fecd91907" containerID="7c31e2893dd9cac6872008d8ef48b64f3102f3ebfec35cce0281a9b91ba96cb3" exitCode=143 Jan 20 17:09:39 crc kubenswrapper[4558]: I0120 17:09:39.216620 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"d46a03de-3bdb-4932-bd49-da4fecd91907","Type":"ContainerDied","Data":"7c31e2893dd9cac6872008d8ef48b64f3102f3ebfec35cce0281a9b91ba96cb3"} Jan 20 17:09:39 crc kubenswrapper[4558]: I0120 17:09:39.219447 4558 generic.go:334] "Generic (PLEG): container finished" podID="0aba62a2-39a5-44ba-952b-68d05f05ba3d" containerID="a432d4af917fe8731860e9db28949c193d839b884d400249de58aedacd69560e" exitCode=143 Jan 20 17:09:39 crc kubenswrapper[4558]: I0120 17:09:39.219533 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"0aba62a2-39a5-44ba-952b-68d05f05ba3d","Type":"ContainerDied","Data":"a432d4af917fe8731860e9db28949c193d839b884d400249de58aedacd69560e"} Jan 20 17:09:40 crc kubenswrapper[4558]: I0120 17:09:40.227822 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-scheduler-0" podUID="9cf7d5a4-78ef-4331-b28a-4ef3f8f06b35" containerName="nova-scheduler-scheduler" containerID="cri-o://3465a9415f40dc71ee80e7a40e1d193a38dbe6748e350f8cabf08597b3055836" gracePeriod=30 Jan 20 17:09:41 crc kubenswrapper[4558]: I0120 17:09:41.933573 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:09:41 crc kubenswrapper[4558]: I0120 17:09:41.941640 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:09:42 crc kubenswrapper[4558]: I0120 17:09:42.039810 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0aba62a2-39a5-44ba-952b-68d05f05ba3d-logs\") pod \"0aba62a2-39a5-44ba-952b-68d05f05ba3d\" (UID: \"0aba62a2-39a5-44ba-952b-68d05f05ba3d\") " Jan 20 17:09:42 crc kubenswrapper[4558]: I0120 17:09:42.039857 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6d5q8\" (UniqueName: \"kubernetes.io/projected/0aba62a2-39a5-44ba-952b-68d05f05ba3d-kube-api-access-6d5q8\") pod \"0aba62a2-39a5-44ba-952b-68d05f05ba3d\" (UID: \"0aba62a2-39a5-44ba-952b-68d05f05ba3d\") " Jan 20 17:09:42 crc kubenswrapper[4558]: I0120 17:09:42.039894 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0aba62a2-39a5-44ba-952b-68d05f05ba3d-combined-ca-bundle\") pod \"0aba62a2-39a5-44ba-952b-68d05f05ba3d\" (UID: \"0aba62a2-39a5-44ba-952b-68d05f05ba3d\") " Jan 20 17:09:42 crc kubenswrapper[4558]: I0120 17:09:42.040009 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0aba62a2-39a5-44ba-952b-68d05f05ba3d-config-data\") pod \"0aba62a2-39a5-44ba-952b-68d05f05ba3d\" (UID: \"0aba62a2-39a5-44ba-952b-68d05f05ba3d\") " Jan 20 17:09:42 crc kubenswrapper[4558]: I0120 17:09:42.040400 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0aba62a2-39a5-44ba-952b-68d05f05ba3d-logs" (OuterVolumeSpecName: "logs") pod "0aba62a2-39a5-44ba-952b-68d05f05ba3d" (UID: "0aba62a2-39a5-44ba-952b-68d05f05ba3d"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:09:42 crc kubenswrapper[4558]: I0120 17:09:42.041212 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0aba62a2-39a5-44ba-952b-68d05f05ba3d-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:09:42 crc kubenswrapper[4558]: I0120 17:09:42.047494 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0aba62a2-39a5-44ba-952b-68d05f05ba3d-kube-api-access-6d5q8" (OuterVolumeSpecName: "kube-api-access-6d5q8") pod "0aba62a2-39a5-44ba-952b-68d05f05ba3d" (UID: "0aba62a2-39a5-44ba-952b-68d05f05ba3d"). InnerVolumeSpecName "kube-api-access-6d5q8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:09:42 crc kubenswrapper[4558]: I0120 17:09:42.066586 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0aba62a2-39a5-44ba-952b-68d05f05ba3d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0aba62a2-39a5-44ba-952b-68d05f05ba3d" (UID: "0aba62a2-39a5-44ba-952b-68d05f05ba3d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:09:42 crc kubenswrapper[4558]: I0120 17:09:42.066965 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0aba62a2-39a5-44ba-952b-68d05f05ba3d-config-data" (OuterVolumeSpecName: "config-data") pod "0aba62a2-39a5-44ba-952b-68d05f05ba3d" (UID: "0aba62a2-39a5-44ba-952b-68d05f05ba3d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:09:42 crc kubenswrapper[4558]: I0120 17:09:42.142244 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d46a03de-3bdb-4932-bd49-da4fecd91907-config-data\") pod \"d46a03de-3bdb-4932-bd49-da4fecd91907\" (UID: \"d46a03de-3bdb-4932-bd49-da4fecd91907\") " Jan 20 17:09:42 crc kubenswrapper[4558]: I0120 17:09:42.142440 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lxkkc\" (UniqueName: \"kubernetes.io/projected/d46a03de-3bdb-4932-bd49-da4fecd91907-kube-api-access-lxkkc\") pod \"d46a03de-3bdb-4932-bd49-da4fecd91907\" (UID: \"d46a03de-3bdb-4932-bd49-da4fecd91907\") " Jan 20 17:09:42 crc kubenswrapper[4558]: I0120 17:09:42.142707 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d46a03de-3bdb-4932-bd49-da4fecd91907-combined-ca-bundle\") pod \"d46a03de-3bdb-4932-bd49-da4fecd91907\" (UID: \"d46a03de-3bdb-4932-bd49-da4fecd91907\") " Jan 20 17:09:42 crc kubenswrapper[4558]: I0120 17:09:42.143026 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d46a03de-3bdb-4932-bd49-da4fecd91907-logs\") pod \"d46a03de-3bdb-4932-bd49-da4fecd91907\" (UID: \"d46a03de-3bdb-4932-bd49-da4fecd91907\") " Jan 20 17:09:42 crc kubenswrapper[4558]: I0120 17:09:42.143324 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/d46a03de-3bdb-4932-bd49-da4fecd91907-nova-metadata-tls-certs\") pod \"d46a03de-3bdb-4932-bd49-da4fecd91907\" (UID: \"d46a03de-3bdb-4932-bd49-da4fecd91907\") " Jan 20 17:09:42 crc kubenswrapper[4558]: I0120 17:09:42.143598 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d46a03de-3bdb-4932-bd49-da4fecd91907-logs" (OuterVolumeSpecName: "logs") pod "d46a03de-3bdb-4932-bd49-da4fecd91907" (UID: "d46a03de-3bdb-4932-bd49-da4fecd91907"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:09:42 crc kubenswrapper[4558]: I0120 17:09:42.144026 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6d5q8\" (UniqueName: \"kubernetes.io/projected/0aba62a2-39a5-44ba-952b-68d05f05ba3d-kube-api-access-6d5q8\") on node \"crc\" DevicePath \"\"" Jan 20 17:09:42 crc kubenswrapper[4558]: I0120 17:09:42.144126 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0aba62a2-39a5-44ba-952b-68d05f05ba3d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:09:42 crc kubenswrapper[4558]: I0120 17:09:42.144217 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d46a03de-3bdb-4932-bd49-da4fecd91907-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:09:42 crc kubenswrapper[4558]: I0120 17:09:42.144284 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0aba62a2-39a5-44ba-952b-68d05f05ba3d-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:09:42 crc kubenswrapper[4558]: I0120 17:09:42.145938 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d46a03de-3bdb-4932-bd49-da4fecd91907-kube-api-access-lxkkc" (OuterVolumeSpecName: "kube-api-access-lxkkc") pod "d46a03de-3bdb-4932-bd49-da4fecd91907" (UID: "d46a03de-3bdb-4932-bd49-da4fecd91907"). InnerVolumeSpecName "kube-api-access-lxkkc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:09:42 crc kubenswrapper[4558]: I0120 17:09:42.161917 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d46a03de-3bdb-4932-bd49-da4fecd91907-config-data" (OuterVolumeSpecName: "config-data") pod "d46a03de-3bdb-4932-bd49-da4fecd91907" (UID: "d46a03de-3bdb-4932-bd49-da4fecd91907"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:09:42 crc kubenswrapper[4558]: I0120 17:09:42.165339 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d46a03de-3bdb-4932-bd49-da4fecd91907-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d46a03de-3bdb-4932-bd49-da4fecd91907" (UID: "d46a03de-3bdb-4932-bd49-da4fecd91907"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:09:42 crc kubenswrapper[4558]: I0120 17:09:42.180031 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d46a03de-3bdb-4932-bd49-da4fecd91907-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "d46a03de-3bdb-4932-bd49-da4fecd91907" (UID: "d46a03de-3bdb-4932-bd49-da4fecd91907"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:09:42 crc kubenswrapper[4558]: I0120 17:09:42.246366 4558 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/d46a03de-3bdb-4932-bd49-da4fecd91907-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:09:42 crc kubenswrapper[4558]: I0120 17:09:42.246521 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d46a03de-3bdb-4932-bd49-da4fecd91907-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:09:42 crc kubenswrapper[4558]: I0120 17:09:42.246787 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lxkkc\" (UniqueName: \"kubernetes.io/projected/d46a03de-3bdb-4932-bd49-da4fecd91907-kube-api-access-lxkkc\") on node \"crc\" DevicePath \"\"" Jan 20 17:09:42 crc kubenswrapper[4558]: I0120 17:09:42.246820 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d46a03de-3bdb-4932-bd49-da4fecd91907-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:09:42 crc kubenswrapper[4558]: I0120 17:09:42.248175 4558 generic.go:334] "Generic (PLEG): container finished" podID="0aba62a2-39a5-44ba-952b-68d05f05ba3d" containerID="24ba9021a6d7b15c6de341be07bead07b860fa618692fdcc0f9d21e6893ae9ea" exitCode=0 Jan 20 17:09:42 crc kubenswrapper[4558]: I0120 17:09:42.248252 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:09:42 crc kubenswrapper[4558]: I0120 17:09:42.248258 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"0aba62a2-39a5-44ba-952b-68d05f05ba3d","Type":"ContainerDied","Data":"24ba9021a6d7b15c6de341be07bead07b860fa618692fdcc0f9d21e6893ae9ea"} Jan 20 17:09:42 crc kubenswrapper[4558]: I0120 17:09:42.248305 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"0aba62a2-39a5-44ba-952b-68d05f05ba3d","Type":"ContainerDied","Data":"f0452e67edd05af644b70a290dcfdd7068bd084cf45a61561eff60d401aba6a7"} Jan 20 17:09:42 crc kubenswrapper[4558]: I0120 17:09:42.248326 4558 scope.go:117] "RemoveContainer" containerID="24ba9021a6d7b15c6de341be07bead07b860fa618692fdcc0f9d21e6893ae9ea" Jan 20 17:09:42 crc kubenswrapper[4558]: I0120 17:09:42.251620 4558 generic.go:334] "Generic (PLEG): container finished" podID="d46a03de-3bdb-4932-bd49-da4fecd91907" containerID="366eeb0f21b82fded41d4ec3fe46359cd74195c6c393c5080bb24b221fbf0fd0" exitCode=0 Jan 20 17:09:42 crc kubenswrapper[4558]: I0120 17:09:42.251655 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"d46a03de-3bdb-4932-bd49-da4fecd91907","Type":"ContainerDied","Data":"366eeb0f21b82fded41d4ec3fe46359cd74195c6c393c5080bb24b221fbf0fd0"} Jan 20 17:09:42 crc kubenswrapper[4558]: I0120 17:09:42.251671 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"d46a03de-3bdb-4932-bd49-da4fecd91907","Type":"ContainerDied","Data":"dee4754d0a4f45831cd7b7d7183cc1ecf64ae33e72b0d43a0b1701b3ff3b64db"} Jan 20 17:09:42 crc kubenswrapper[4558]: I0120 17:09:42.251715 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:09:42 crc kubenswrapper[4558]: I0120 17:09:42.272114 4558 scope.go:117] "RemoveContainer" containerID="a432d4af917fe8731860e9db28949c193d839b884d400249de58aedacd69560e" Jan 20 17:09:42 crc kubenswrapper[4558]: I0120 17:09:42.309088 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:09:42 crc kubenswrapper[4558]: I0120 17:09:42.320417 4558 scope.go:117] "RemoveContainer" containerID="24ba9021a6d7b15c6de341be07bead07b860fa618692fdcc0f9d21e6893ae9ea" Jan 20 17:09:42 crc kubenswrapper[4558]: E0120 17:09:42.320917 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"24ba9021a6d7b15c6de341be07bead07b860fa618692fdcc0f9d21e6893ae9ea\": container with ID starting with 24ba9021a6d7b15c6de341be07bead07b860fa618692fdcc0f9d21e6893ae9ea not found: ID does not exist" containerID="24ba9021a6d7b15c6de341be07bead07b860fa618692fdcc0f9d21e6893ae9ea" Jan 20 17:09:42 crc kubenswrapper[4558]: I0120 17:09:42.321021 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"24ba9021a6d7b15c6de341be07bead07b860fa618692fdcc0f9d21e6893ae9ea"} err="failed to get container status \"24ba9021a6d7b15c6de341be07bead07b860fa618692fdcc0f9d21e6893ae9ea\": rpc error: code = NotFound desc = could not find container \"24ba9021a6d7b15c6de341be07bead07b860fa618692fdcc0f9d21e6893ae9ea\": container with ID starting with 24ba9021a6d7b15c6de341be07bead07b860fa618692fdcc0f9d21e6893ae9ea not found: ID does not exist" Jan 20 17:09:42 crc kubenswrapper[4558]: I0120 17:09:42.321094 4558 scope.go:117] "RemoveContainer" containerID="a432d4af917fe8731860e9db28949c193d839b884d400249de58aedacd69560e" Jan 20 17:09:42 crc kubenswrapper[4558]: E0120 17:09:42.321376 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a432d4af917fe8731860e9db28949c193d839b884d400249de58aedacd69560e\": container with ID starting with a432d4af917fe8731860e9db28949c193d839b884d400249de58aedacd69560e not found: ID does not exist" containerID="a432d4af917fe8731860e9db28949c193d839b884d400249de58aedacd69560e" Jan 20 17:09:42 crc kubenswrapper[4558]: I0120 17:09:42.321475 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a432d4af917fe8731860e9db28949c193d839b884d400249de58aedacd69560e"} err="failed to get container status \"a432d4af917fe8731860e9db28949c193d839b884d400249de58aedacd69560e\": rpc error: code = NotFound desc = could not find container \"a432d4af917fe8731860e9db28949c193d839b884d400249de58aedacd69560e\": container with ID starting with a432d4af917fe8731860e9db28949c193d839b884d400249de58aedacd69560e not found: ID does not exist" Jan 20 17:09:42 crc kubenswrapper[4558]: I0120 17:09:42.321544 4558 scope.go:117] "RemoveContainer" containerID="366eeb0f21b82fded41d4ec3fe46359cd74195c6c393c5080bb24b221fbf0fd0" Jan 20 17:09:42 crc kubenswrapper[4558]: I0120 17:09:42.345824 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:09:42 crc kubenswrapper[4558]: I0120 17:09:42.354616 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:09:42 crc kubenswrapper[4558]: I0120 17:09:42.354921 4558 scope.go:117] "RemoveContainer" containerID="7c31e2893dd9cac6872008d8ef48b64f3102f3ebfec35cce0281a9b91ba96cb3" Jan 20 17:09:42 crc kubenswrapper[4558]: I0120 17:09:42.361721 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:09:42 crc kubenswrapper[4558]: I0120 17:09:42.366847 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:09:42 crc kubenswrapper[4558]: E0120 17:09:42.367305 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d46a03de-3bdb-4932-bd49-da4fecd91907" containerName="nova-metadata-log" Jan 20 17:09:42 crc kubenswrapper[4558]: I0120 17:09:42.367329 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d46a03de-3bdb-4932-bd49-da4fecd91907" containerName="nova-metadata-log" Jan 20 17:09:42 crc kubenswrapper[4558]: E0120 17:09:42.367343 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7e920ead-f95a-434d-9aa0-47342d337eae" containerName="nova-manage" Jan 20 17:09:42 crc kubenswrapper[4558]: I0120 17:09:42.367357 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="7e920ead-f95a-434d-9aa0-47342d337eae" containerName="nova-manage" Jan 20 17:09:42 crc kubenswrapper[4558]: E0120 17:09:42.367433 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0aba62a2-39a5-44ba-952b-68d05f05ba3d" containerName="nova-api-log" Jan 20 17:09:42 crc kubenswrapper[4558]: I0120 17:09:42.367446 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="0aba62a2-39a5-44ba-952b-68d05f05ba3d" containerName="nova-api-log" Jan 20 17:09:42 crc kubenswrapper[4558]: E0120 17:09:42.367462 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0aba62a2-39a5-44ba-952b-68d05f05ba3d" containerName="nova-api-api" Jan 20 17:09:42 crc kubenswrapper[4558]: I0120 17:09:42.367468 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="0aba62a2-39a5-44ba-952b-68d05f05ba3d" containerName="nova-api-api" Jan 20 17:09:42 crc kubenswrapper[4558]: E0120 17:09:42.367486 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d46a03de-3bdb-4932-bd49-da4fecd91907" containerName="nova-metadata-metadata" Jan 20 17:09:42 crc kubenswrapper[4558]: I0120 17:09:42.367493 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d46a03de-3bdb-4932-bd49-da4fecd91907" containerName="nova-metadata-metadata" Jan 20 17:09:42 crc kubenswrapper[4558]: I0120 17:09:42.367672 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="d46a03de-3bdb-4932-bd49-da4fecd91907" containerName="nova-metadata-log" Jan 20 17:09:42 crc kubenswrapper[4558]: I0120 17:09:42.367689 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="7e920ead-f95a-434d-9aa0-47342d337eae" containerName="nova-manage" Jan 20 17:09:42 crc kubenswrapper[4558]: I0120 17:09:42.367704 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="0aba62a2-39a5-44ba-952b-68d05f05ba3d" containerName="nova-api-api" Jan 20 17:09:42 crc kubenswrapper[4558]: I0120 17:09:42.367716 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="0aba62a2-39a5-44ba-952b-68d05f05ba3d" containerName="nova-api-log" Jan 20 17:09:42 crc kubenswrapper[4558]: I0120 17:09:42.367733 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="d46a03de-3bdb-4932-bd49-da4fecd91907" containerName="nova-metadata-metadata" Jan 20 17:09:42 crc kubenswrapper[4558]: I0120 17:09:42.368930 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:09:42 crc kubenswrapper[4558]: I0120 17:09:42.370528 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-metadata-internal-svc" Jan 20 17:09:42 crc kubenswrapper[4558]: I0120 17:09:42.371921 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-metadata-config-data" Jan 20 17:09:42 crc kubenswrapper[4558]: I0120 17:09:42.372687 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:09:42 crc kubenswrapper[4558]: I0120 17:09:42.373864 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:09:42 crc kubenswrapper[4558]: I0120 17:09:42.375415 4558 scope.go:117] "RemoveContainer" containerID="366eeb0f21b82fded41d4ec3fe46359cd74195c6c393c5080bb24b221fbf0fd0" Jan 20 17:09:42 crc kubenswrapper[4558]: I0120 17:09:42.375446 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-api-config-data" Jan 20 17:09:42 crc kubenswrapper[4558]: E0120 17:09:42.375944 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"366eeb0f21b82fded41d4ec3fe46359cd74195c6c393c5080bb24b221fbf0fd0\": container with ID starting with 366eeb0f21b82fded41d4ec3fe46359cd74195c6c393c5080bb24b221fbf0fd0 not found: ID does not exist" containerID="366eeb0f21b82fded41d4ec3fe46359cd74195c6c393c5080bb24b221fbf0fd0" Jan 20 17:09:42 crc kubenswrapper[4558]: I0120 17:09:42.375982 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"366eeb0f21b82fded41d4ec3fe46359cd74195c6c393c5080bb24b221fbf0fd0"} err="failed to get container status \"366eeb0f21b82fded41d4ec3fe46359cd74195c6c393c5080bb24b221fbf0fd0\": rpc error: code = NotFound desc = could not find container \"366eeb0f21b82fded41d4ec3fe46359cd74195c6c393c5080bb24b221fbf0fd0\": container with ID starting with 366eeb0f21b82fded41d4ec3fe46359cd74195c6c393c5080bb24b221fbf0fd0 not found: ID does not exist" Jan 20 17:09:42 crc kubenswrapper[4558]: I0120 17:09:42.376008 4558 scope.go:117] "RemoveContainer" containerID="7c31e2893dd9cac6872008d8ef48b64f3102f3ebfec35cce0281a9b91ba96cb3" Jan 20 17:09:42 crc kubenswrapper[4558]: E0120 17:09:42.376396 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7c31e2893dd9cac6872008d8ef48b64f3102f3ebfec35cce0281a9b91ba96cb3\": container with ID starting with 7c31e2893dd9cac6872008d8ef48b64f3102f3ebfec35cce0281a9b91ba96cb3 not found: ID does not exist" containerID="7c31e2893dd9cac6872008d8ef48b64f3102f3ebfec35cce0281a9b91ba96cb3" Jan 20 17:09:42 crc kubenswrapper[4558]: I0120 17:09:42.376420 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7c31e2893dd9cac6872008d8ef48b64f3102f3ebfec35cce0281a9b91ba96cb3"} err="failed to get container status \"7c31e2893dd9cac6872008d8ef48b64f3102f3ebfec35cce0281a9b91ba96cb3\": rpc error: code = NotFound desc = could not find container \"7c31e2893dd9cac6872008d8ef48b64f3102f3ebfec35cce0281a9b91ba96cb3\": container with ID starting with 7c31e2893dd9cac6872008d8ef48b64f3102f3ebfec35cce0281a9b91ba96cb3 not found: ID does not exist" Jan 20 17:09:42 crc kubenswrapper[4558]: I0120 17:09:42.378470 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:09:42 crc kubenswrapper[4558]: I0120 17:09:42.384396 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:09:42 crc kubenswrapper[4558]: E0120 17:09:42.438301 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="3465a9415f40dc71ee80e7a40e1d193a38dbe6748e350f8cabf08597b3055836" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 20 17:09:42 crc kubenswrapper[4558]: E0120 17:09:42.439374 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="3465a9415f40dc71ee80e7a40e1d193a38dbe6748e350f8cabf08597b3055836" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 20 17:09:42 crc kubenswrapper[4558]: E0120 17:09:42.440421 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="3465a9415f40dc71ee80e7a40e1d193a38dbe6748e350f8cabf08597b3055836" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 20 17:09:42 crc kubenswrapper[4558]: E0120 17:09:42.440450 4558 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack-kuttl-tests/nova-scheduler-0" podUID="9cf7d5a4-78ef-4331-b28a-4ef3f8f06b35" containerName="nova-scheduler-scheduler" Jan 20 17:09:42 crc kubenswrapper[4558]: I0120 17:09:42.453696 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-brfq8\" (UniqueName: \"kubernetes.io/projected/0b0b02aa-9ba9-4207-b77c-8157659e4ba6-kube-api-access-brfq8\") pod \"nova-api-0\" (UID: \"0b0b02aa-9ba9-4207-b77c-8157659e4ba6\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:09:42 crc kubenswrapper[4558]: I0120 17:09:42.453761 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5560f6a4-fc05-4d97-8496-b87804dfab99-logs\") pod \"nova-metadata-0\" (UID: \"5560f6a4-fc05-4d97-8496-b87804dfab99\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:09:42 crc kubenswrapper[4558]: I0120 17:09:42.453815 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0b0b02aa-9ba9-4207-b77c-8157659e4ba6-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"0b0b02aa-9ba9-4207-b77c-8157659e4ba6\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:09:42 crc kubenswrapper[4558]: I0120 17:09:42.453832 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0b0b02aa-9ba9-4207-b77c-8157659e4ba6-logs\") pod \"nova-api-0\" (UID: \"0b0b02aa-9ba9-4207-b77c-8157659e4ba6\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:09:42 crc kubenswrapper[4558]: I0120 17:09:42.453881 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5560f6a4-fc05-4d97-8496-b87804dfab99-config-data\") pod \"nova-metadata-0\" (UID: \"5560f6a4-fc05-4d97-8496-b87804dfab99\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:09:42 crc kubenswrapper[4558]: I0120 17:09:42.453906 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5560f6a4-fc05-4d97-8496-b87804dfab99-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"5560f6a4-fc05-4d97-8496-b87804dfab99\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:09:42 crc kubenswrapper[4558]: I0120 17:09:42.453924 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zr25m\" (UniqueName: \"kubernetes.io/projected/5560f6a4-fc05-4d97-8496-b87804dfab99-kube-api-access-zr25m\") pod \"nova-metadata-0\" (UID: \"5560f6a4-fc05-4d97-8496-b87804dfab99\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:09:42 crc kubenswrapper[4558]: I0120 17:09:42.453960 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/5560f6a4-fc05-4d97-8496-b87804dfab99-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"5560f6a4-fc05-4d97-8496-b87804dfab99\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:09:42 crc kubenswrapper[4558]: I0120 17:09:42.453978 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0b0b02aa-9ba9-4207-b77c-8157659e4ba6-config-data\") pod \"nova-api-0\" (UID: \"0b0b02aa-9ba9-4207-b77c-8157659e4ba6\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:09:42 crc kubenswrapper[4558]: I0120 17:09:42.556022 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-brfq8\" (UniqueName: \"kubernetes.io/projected/0b0b02aa-9ba9-4207-b77c-8157659e4ba6-kube-api-access-brfq8\") pod \"nova-api-0\" (UID: \"0b0b02aa-9ba9-4207-b77c-8157659e4ba6\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:09:42 crc kubenswrapper[4558]: I0120 17:09:42.556142 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5560f6a4-fc05-4d97-8496-b87804dfab99-logs\") pod \"nova-metadata-0\" (UID: \"5560f6a4-fc05-4d97-8496-b87804dfab99\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:09:42 crc kubenswrapper[4558]: I0120 17:09:42.556257 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0b0b02aa-9ba9-4207-b77c-8157659e4ba6-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"0b0b02aa-9ba9-4207-b77c-8157659e4ba6\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:09:42 crc kubenswrapper[4558]: I0120 17:09:42.556294 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0b0b02aa-9ba9-4207-b77c-8157659e4ba6-logs\") pod \"nova-api-0\" (UID: \"0b0b02aa-9ba9-4207-b77c-8157659e4ba6\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:09:42 crc kubenswrapper[4558]: I0120 17:09:42.556406 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5560f6a4-fc05-4d97-8496-b87804dfab99-config-data\") pod \"nova-metadata-0\" (UID: \"5560f6a4-fc05-4d97-8496-b87804dfab99\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:09:42 crc kubenswrapper[4558]: I0120 17:09:42.556478 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5560f6a4-fc05-4d97-8496-b87804dfab99-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"5560f6a4-fc05-4d97-8496-b87804dfab99\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:09:42 crc kubenswrapper[4558]: I0120 17:09:42.556516 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zr25m\" (UniqueName: \"kubernetes.io/projected/5560f6a4-fc05-4d97-8496-b87804dfab99-kube-api-access-zr25m\") pod \"nova-metadata-0\" (UID: \"5560f6a4-fc05-4d97-8496-b87804dfab99\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:09:42 crc kubenswrapper[4558]: I0120 17:09:42.556559 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/5560f6a4-fc05-4d97-8496-b87804dfab99-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"5560f6a4-fc05-4d97-8496-b87804dfab99\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:09:42 crc kubenswrapper[4558]: I0120 17:09:42.556596 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0b0b02aa-9ba9-4207-b77c-8157659e4ba6-config-data\") pod \"nova-api-0\" (UID: \"0b0b02aa-9ba9-4207-b77c-8157659e4ba6\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:09:42 crc kubenswrapper[4558]: I0120 17:09:42.556714 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5560f6a4-fc05-4d97-8496-b87804dfab99-logs\") pod \"nova-metadata-0\" (UID: \"5560f6a4-fc05-4d97-8496-b87804dfab99\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:09:42 crc kubenswrapper[4558]: I0120 17:09:42.556831 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0b0b02aa-9ba9-4207-b77c-8157659e4ba6-logs\") pod \"nova-api-0\" (UID: \"0b0b02aa-9ba9-4207-b77c-8157659e4ba6\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:09:42 crc kubenswrapper[4558]: I0120 17:09:42.561755 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5560f6a4-fc05-4d97-8496-b87804dfab99-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"5560f6a4-fc05-4d97-8496-b87804dfab99\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:09:42 crc kubenswrapper[4558]: I0120 17:09:42.562242 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/5560f6a4-fc05-4d97-8496-b87804dfab99-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"5560f6a4-fc05-4d97-8496-b87804dfab99\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:09:42 crc kubenswrapper[4558]: I0120 17:09:42.562407 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0b0b02aa-9ba9-4207-b77c-8157659e4ba6-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"0b0b02aa-9ba9-4207-b77c-8157659e4ba6\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:09:42 crc kubenswrapper[4558]: I0120 17:09:42.562701 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5560f6a4-fc05-4d97-8496-b87804dfab99-config-data\") pod \"nova-metadata-0\" (UID: \"5560f6a4-fc05-4d97-8496-b87804dfab99\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:09:42 crc kubenswrapper[4558]: I0120 17:09:42.562935 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0b0b02aa-9ba9-4207-b77c-8157659e4ba6-config-data\") pod \"nova-api-0\" (UID: \"0b0b02aa-9ba9-4207-b77c-8157659e4ba6\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:09:42 crc kubenswrapper[4558]: I0120 17:09:42.566678 4558 scope.go:117] "RemoveContainer" containerID="a711a286282347590079d2447ef37fa9ed0f11085d6d7a65f85e65c1c2df608f" Jan 20 17:09:42 crc kubenswrapper[4558]: E0120 17:09:42.566968 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 17:09:42 crc kubenswrapper[4558]: I0120 17:09:42.571239 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zr25m\" (UniqueName: \"kubernetes.io/projected/5560f6a4-fc05-4d97-8496-b87804dfab99-kube-api-access-zr25m\") pod \"nova-metadata-0\" (UID: \"5560f6a4-fc05-4d97-8496-b87804dfab99\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:09:42 crc kubenswrapper[4558]: I0120 17:09:42.571261 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-brfq8\" (UniqueName: \"kubernetes.io/projected/0b0b02aa-9ba9-4207-b77c-8157659e4ba6-kube-api-access-brfq8\") pod \"nova-api-0\" (UID: \"0b0b02aa-9ba9-4207-b77c-8157659e4ba6\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:09:42 crc kubenswrapper[4558]: I0120 17:09:42.576757 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0aba62a2-39a5-44ba-952b-68d05f05ba3d" path="/var/lib/kubelet/pods/0aba62a2-39a5-44ba-952b-68d05f05ba3d/volumes" Jan 20 17:09:42 crc kubenswrapper[4558]: I0120 17:09:42.577741 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d46a03de-3bdb-4932-bd49-da4fecd91907" path="/var/lib/kubelet/pods/d46a03de-3bdb-4932-bd49-da4fecd91907/volumes" Jan 20 17:09:42 crc kubenswrapper[4558]: I0120 17:09:42.683197 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:09:42 crc kubenswrapper[4558]: I0120 17:09:42.694854 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:09:43 crc kubenswrapper[4558]: I0120 17:09:43.085057 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:09:43 crc kubenswrapper[4558]: I0120 17:09:43.093330 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:09:43 crc kubenswrapper[4558]: I0120 17:09:43.166263 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:09:43 crc kubenswrapper[4558]: I0120 17:09:43.271013 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"0b0b02aa-9ba9-4207-b77c-8157659e4ba6","Type":"ContainerStarted","Data":"2c69c43b59b63ce2f85e2277018dc5c5a49ec02929892a7ab944cbbb36ee4c10"} Jan 20 17:09:43 crc kubenswrapper[4558]: I0120 17:09:43.278217 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"5560f6a4-fc05-4d97-8496-b87804dfab99","Type":"ContainerStarted","Data":"a93c5f1f38eeb134451e56fbc3900bca1dc90fbb9204fedc6662faec2d98373f"} Jan 20 17:09:43 crc kubenswrapper[4558]: I0120 17:09:43.938648 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:09:44 crc kubenswrapper[4558]: I0120 17:09:44.096141 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9cf7d5a4-78ef-4331-b28a-4ef3f8f06b35-combined-ca-bundle\") pod \"9cf7d5a4-78ef-4331-b28a-4ef3f8f06b35\" (UID: \"9cf7d5a4-78ef-4331-b28a-4ef3f8f06b35\") " Jan 20 17:09:44 crc kubenswrapper[4558]: I0120 17:09:44.097373 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9cf7d5a4-78ef-4331-b28a-4ef3f8f06b35-config-data\") pod \"9cf7d5a4-78ef-4331-b28a-4ef3f8f06b35\" (UID: \"9cf7d5a4-78ef-4331-b28a-4ef3f8f06b35\") " Jan 20 17:09:44 crc kubenswrapper[4558]: I0120 17:09:44.097948 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kg6dq\" (UniqueName: \"kubernetes.io/projected/9cf7d5a4-78ef-4331-b28a-4ef3f8f06b35-kube-api-access-kg6dq\") pod \"9cf7d5a4-78ef-4331-b28a-4ef3f8f06b35\" (UID: \"9cf7d5a4-78ef-4331-b28a-4ef3f8f06b35\") " Jan 20 17:09:44 crc kubenswrapper[4558]: I0120 17:09:44.103661 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9cf7d5a4-78ef-4331-b28a-4ef3f8f06b35-kube-api-access-kg6dq" (OuterVolumeSpecName: "kube-api-access-kg6dq") pod "9cf7d5a4-78ef-4331-b28a-4ef3f8f06b35" (UID: "9cf7d5a4-78ef-4331-b28a-4ef3f8f06b35"). InnerVolumeSpecName "kube-api-access-kg6dq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:09:44 crc kubenswrapper[4558]: I0120 17:09:44.121672 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9cf7d5a4-78ef-4331-b28a-4ef3f8f06b35-config-data" (OuterVolumeSpecName: "config-data") pod "9cf7d5a4-78ef-4331-b28a-4ef3f8f06b35" (UID: "9cf7d5a4-78ef-4331-b28a-4ef3f8f06b35"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:09:44 crc kubenswrapper[4558]: I0120 17:09:44.128936 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9cf7d5a4-78ef-4331-b28a-4ef3f8f06b35-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9cf7d5a4-78ef-4331-b28a-4ef3f8f06b35" (UID: "9cf7d5a4-78ef-4331-b28a-4ef3f8f06b35"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:09:44 crc kubenswrapper[4558]: I0120 17:09:44.202843 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9cf7d5a4-78ef-4331-b28a-4ef3f8f06b35-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:09:44 crc kubenswrapper[4558]: I0120 17:09:44.202881 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kg6dq\" (UniqueName: \"kubernetes.io/projected/9cf7d5a4-78ef-4331-b28a-4ef3f8f06b35-kube-api-access-kg6dq\") on node \"crc\" DevicePath \"\"" Jan 20 17:09:44 crc kubenswrapper[4558]: I0120 17:09:44.202896 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9cf7d5a4-78ef-4331-b28a-4ef3f8f06b35-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:09:44 crc kubenswrapper[4558]: I0120 17:09:44.292992 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"0b0b02aa-9ba9-4207-b77c-8157659e4ba6","Type":"ContainerStarted","Data":"0da1ffbd18a6d46e2e13c3f5dc89867501bd1410f00da6ac8d8a3630eba71d59"} Jan 20 17:09:44 crc kubenswrapper[4558]: I0120 17:09:44.293289 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"0b0b02aa-9ba9-4207-b77c-8157659e4ba6","Type":"ContainerStarted","Data":"6c2b79ff5237f93288a809fb0566043521294554bd47c3ed1a939b33fa9bee10"} Jan 20 17:09:44 crc kubenswrapper[4558]: I0120 17:09:44.295228 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"5560f6a4-fc05-4d97-8496-b87804dfab99","Type":"ContainerStarted","Data":"c1a2c6fb29683dd75096376810ff0f53fdd799813b29ba97b56d0f8d5b5eac42"} Jan 20 17:09:44 crc kubenswrapper[4558]: I0120 17:09:44.295261 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"5560f6a4-fc05-4d97-8496-b87804dfab99","Type":"ContainerStarted","Data":"737bdc74f2c709ec376fc9204c898d8cba1280d8792f62f6a65923c927c2ba2f"} Jan 20 17:09:44 crc kubenswrapper[4558]: I0120 17:09:44.297962 4558 generic.go:334] "Generic (PLEG): container finished" podID="9cf7d5a4-78ef-4331-b28a-4ef3f8f06b35" containerID="3465a9415f40dc71ee80e7a40e1d193a38dbe6748e350f8cabf08597b3055836" exitCode=0 Jan 20 17:09:44 crc kubenswrapper[4558]: I0120 17:09:44.297994 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"9cf7d5a4-78ef-4331-b28a-4ef3f8f06b35","Type":"ContainerDied","Data":"3465a9415f40dc71ee80e7a40e1d193a38dbe6748e350f8cabf08597b3055836"} Jan 20 17:09:44 crc kubenswrapper[4558]: I0120 17:09:44.298010 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"9cf7d5a4-78ef-4331-b28a-4ef3f8f06b35","Type":"ContainerDied","Data":"2664c0e0fd94d254732ddab42625d014c1cc982ee00882133439035e83f10aaf"} Jan 20 17:09:44 crc kubenswrapper[4558]: I0120 17:09:44.298026 4558 scope.go:117] "RemoveContainer" containerID="3465a9415f40dc71ee80e7a40e1d193a38dbe6748e350f8cabf08597b3055836" Jan 20 17:09:44 crc kubenswrapper[4558]: I0120 17:09:44.298109 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:09:44 crc kubenswrapper[4558]: I0120 17:09:44.316760 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-api-0" podStartSLOduration=2.31674825 podStartE2EDuration="2.31674825s" podCreationTimestamp="2026-01-20 17:09:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:09:44.310047269 +0000 UTC m=+1678.070385237" watchObservedRunningTime="2026-01-20 17:09:44.31674825 +0000 UTC m=+1678.077086217" Jan 20 17:09:44 crc kubenswrapper[4558]: I0120 17:09:44.324426 4558 scope.go:117] "RemoveContainer" containerID="3465a9415f40dc71ee80e7a40e1d193a38dbe6748e350f8cabf08597b3055836" Jan 20 17:09:44 crc kubenswrapper[4558]: E0120 17:09:44.325850 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3465a9415f40dc71ee80e7a40e1d193a38dbe6748e350f8cabf08597b3055836\": container with ID starting with 3465a9415f40dc71ee80e7a40e1d193a38dbe6748e350f8cabf08597b3055836 not found: ID does not exist" containerID="3465a9415f40dc71ee80e7a40e1d193a38dbe6748e350f8cabf08597b3055836" Jan 20 17:09:44 crc kubenswrapper[4558]: I0120 17:09:44.325883 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3465a9415f40dc71ee80e7a40e1d193a38dbe6748e350f8cabf08597b3055836"} err="failed to get container status \"3465a9415f40dc71ee80e7a40e1d193a38dbe6748e350f8cabf08597b3055836\": rpc error: code = NotFound desc = could not find container \"3465a9415f40dc71ee80e7a40e1d193a38dbe6748e350f8cabf08597b3055836\": container with ID starting with 3465a9415f40dc71ee80e7a40e1d193a38dbe6748e350f8cabf08597b3055836 not found: ID does not exist" Jan 20 17:09:44 crc kubenswrapper[4558]: I0120 17:09:44.330943 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-metadata-0" podStartSLOduration=2.330926281 podStartE2EDuration="2.330926281s" podCreationTimestamp="2026-01-20 17:09:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:09:44.329338685 +0000 UTC m=+1678.089676652" watchObservedRunningTime="2026-01-20 17:09:44.330926281 +0000 UTC m=+1678.091264247" Jan 20 17:09:44 crc kubenswrapper[4558]: I0120 17:09:44.357971 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:09:44 crc kubenswrapper[4558]: I0120 17:09:44.368218 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:09:44 crc kubenswrapper[4558]: I0120 17:09:44.374657 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:09:44 crc kubenswrapper[4558]: E0120 17:09:44.375140 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9cf7d5a4-78ef-4331-b28a-4ef3f8f06b35" containerName="nova-scheduler-scheduler" Jan 20 17:09:44 crc kubenswrapper[4558]: I0120 17:09:44.375188 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="9cf7d5a4-78ef-4331-b28a-4ef3f8f06b35" containerName="nova-scheduler-scheduler" Jan 20 17:09:44 crc kubenswrapper[4558]: I0120 17:09:44.375414 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="9cf7d5a4-78ef-4331-b28a-4ef3f8f06b35" containerName="nova-scheduler-scheduler" Jan 20 17:09:44 crc kubenswrapper[4558]: I0120 17:09:44.376071 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:09:44 crc kubenswrapper[4558]: I0120 17:09:44.377970 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-scheduler-config-data" Jan 20 17:09:44 crc kubenswrapper[4558]: I0120 17:09:44.379714 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:09:44 crc kubenswrapper[4558]: I0120 17:09:44.409244 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/04194274-834f-49e3-ac2f-c28998193181-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"04194274-834f-49e3-ac2f-c28998193181\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:09:44 crc kubenswrapper[4558]: I0120 17:09:44.409308 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/04194274-834f-49e3-ac2f-c28998193181-config-data\") pod \"nova-scheduler-0\" (UID: \"04194274-834f-49e3-ac2f-c28998193181\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:09:44 crc kubenswrapper[4558]: I0120 17:09:44.409388 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jjl4r\" (UniqueName: \"kubernetes.io/projected/04194274-834f-49e3-ac2f-c28998193181-kube-api-access-jjl4r\") pod \"nova-scheduler-0\" (UID: \"04194274-834f-49e3-ac2f-c28998193181\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:09:44 crc kubenswrapper[4558]: I0120 17:09:44.510152 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jjl4r\" (UniqueName: \"kubernetes.io/projected/04194274-834f-49e3-ac2f-c28998193181-kube-api-access-jjl4r\") pod \"nova-scheduler-0\" (UID: \"04194274-834f-49e3-ac2f-c28998193181\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:09:44 crc kubenswrapper[4558]: I0120 17:09:44.510300 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/04194274-834f-49e3-ac2f-c28998193181-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"04194274-834f-49e3-ac2f-c28998193181\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:09:44 crc kubenswrapper[4558]: I0120 17:09:44.510322 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/04194274-834f-49e3-ac2f-c28998193181-config-data\") pod \"nova-scheduler-0\" (UID: \"04194274-834f-49e3-ac2f-c28998193181\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:09:44 crc kubenswrapper[4558]: I0120 17:09:44.513825 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/04194274-834f-49e3-ac2f-c28998193181-config-data\") pod \"nova-scheduler-0\" (UID: \"04194274-834f-49e3-ac2f-c28998193181\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:09:44 crc kubenswrapper[4558]: I0120 17:09:44.513850 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/04194274-834f-49e3-ac2f-c28998193181-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"04194274-834f-49e3-ac2f-c28998193181\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:09:44 crc kubenswrapper[4558]: I0120 17:09:44.523545 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jjl4r\" (UniqueName: \"kubernetes.io/projected/04194274-834f-49e3-ac2f-c28998193181-kube-api-access-jjl4r\") pod \"nova-scheduler-0\" (UID: \"04194274-834f-49e3-ac2f-c28998193181\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:09:44 crc kubenswrapper[4558]: I0120 17:09:44.578803 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9cf7d5a4-78ef-4331-b28a-4ef3f8f06b35" path="/var/lib/kubelet/pods/9cf7d5a4-78ef-4331-b28a-4ef3f8f06b35/volumes" Jan 20 17:09:44 crc kubenswrapper[4558]: I0120 17:09:44.692121 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:09:45 crc kubenswrapper[4558]: I0120 17:09:45.096700 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:09:45 crc kubenswrapper[4558]: I0120 17:09:45.308828 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"04194274-834f-49e3-ac2f-c28998193181","Type":"ContainerStarted","Data":"9827c6a452c5455a00296263b978afc4645bb1e30af3fb06ef2be62752a0e311"} Jan 20 17:09:45 crc kubenswrapper[4558]: I0120 17:09:45.309091 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"04194274-834f-49e3-ac2f-c28998193181","Type":"ContainerStarted","Data":"9112388e6768e1285e740f89d3fb74ff9c129402b557edac1264c28abf281672"} Jan 20 17:09:45 crc kubenswrapper[4558]: I0120 17:09:45.325735 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-scheduler-0" podStartSLOduration=1.3257179479999999 podStartE2EDuration="1.325717948s" podCreationTimestamp="2026-01-20 17:09:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:09:45.31957683 +0000 UTC m=+1679.079914797" watchObservedRunningTime="2026-01-20 17:09:45.325717948 +0000 UTC m=+1679.086055915" Jan 20 17:09:46 crc kubenswrapper[4558]: I0120 17:09:46.772261 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 17:09:46 crc kubenswrapper[4558]: I0120 17:09:46.772710 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/kube-state-metrics-0" podUID="09945b97-d2ef-4ce6-b8af-9612a4dd3482" containerName="kube-state-metrics" containerID="cri-o://7c92f532e9da8f4dcfbeda22a4be95c8aaf993e53dc631d60e9def3a4379b4ae" gracePeriod=30 Jan 20 17:09:47 crc kubenswrapper[4558]: I0120 17:09:47.204479 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:09:47 crc kubenswrapper[4558]: I0120 17:09:47.336948 4558 generic.go:334] "Generic (PLEG): container finished" podID="09945b97-d2ef-4ce6-b8af-9612a4dd3482" containerID="7c92f532e9da8f4dcfbeda22a4be95c8aaf993e53dc631d60e9def3a4379b4ae" exitCode=2 Jan 20 17:09:47 crc kubenswrapper[4558]: I0120 17:09:47.337004 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/kube-state-metrics-0" event={"ID":"09945b97-d2ef-4ce6-b8af-9612a4dd3482","Type":"ContainerDied","Data":"7c92f532e9da8f4dcfbeda22a4be95c8aaf993e53dc631d60e9def3a4379b4ae"} Jan 20 17:09:47 crc kubenswrapper[4558]: I0120 17:09:47.337043 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/kube-state-metrics-0" event={"ID":"09945b97-d2ef-4ce6-b8af-9612a4dd3482","Type":"ContainerDied","Data":"98883ad4db19024fe00d11fc9a21c52ea1d1ebdab4ef91c668c16a5f4b3f420c"} Jan 20 17:09:47 crc kubenswrapper[4558]: I0120 17:09:47.337063 4558 scope.go:117] "RemoveContainer" containerID="7c92f532e9da8f4dcfbeda22a4be95c8aaf993e53dc631d60e9def3a4379b4ae" Jan 20 17:09:47 crc kubenswrapper[4558]: I0120 17:09:47.337207 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:09:47 crc kubenswrapper[4558]: I0120 17:09:47.368534 4558 scope.go:117] "RemoveContainer" containerID="7c92f532e9da8f4dcfbeda22a4be95c8aaf993e53dc631d60e9def3a4379b4ae" Jan 20 17:09:47 crc kubenswrapper[4558]: E0120 17:09:47.368951 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7c92f532e9da8f4dcfbeda22a4be95c8aaf993e53dc631d60e9def3a4379b4ae\": container with ID starting with 7c92f532e9da8f4dcfbeda22a4be95c8aaf993e53dc631d60e9def3a4379b4ae not found: ID does not exist" containerID="7c92f532e9da8f4dcfbeda22a4be95c8aaf993e53dc631d60e9def3a4379b4ae" Jan 20 17:09:47 crc kubenswrapper[4558]: I0120 17:09:47.368983 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7c92f532e9da8f4dcfbeda22a4be95c8aaf993e53dc631d60e9def3a4379b4ae"} err="failed to get container status \"7c92f532e9da8f4dcfbeda22a4be95c8aaf993e53dc631d60e9def3a4379b4ae\": rpc error: code = NotFound desc = could not find container \"7c92f532e9da8f4dcfbeda22a4be95c8aaf993e53dc631d60e9def3a4379b4ae\": container with ID starting with 7c92f532e9da8f4dcfbeda22a4be95c8aaf993e53dc631d60e9def3a4379b4ae not found: ID does not exist" Jan 20 17:09:47 crc kubenswrapper[4558]: I0120 17:09:47.376808 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-57djt\" (UniqueName: \"kubernetes.io/projected/09945b97-d2ef-4ce6-b8af-9612a4dd3482-kube-api-access-57djt\") pod \"09945b97-d2ef-4ce6-b8af-9612a4dd3482\" (UID: \"09945b97-d2ef-4ce6-b8af-9612a4dd3482\") " Jan 20 17:09:47 crc kubenswrapper[4558]: I0120 17:09:47.383480 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09945b97-d2ef-4ce6-b8af-9612a4dd3482-kube-api-access-57djt" (OuterVolumeSpecName: "kube-api-access-57djt") pod "09945b97-d2ef-4ce6-b8af-9612a4dd3482" (UID: "09945b97-d2ef-4ce6-b8af-9612a4dd3482"). InnerVolumeSpecName "kube-api-access-57djt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:09:47 crc kubenswrapper[4558]: I0120 17:09:47.479097 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-57djt\" (UniqueName: \"kubernetes.io/projected/09945b97-d2ef-4ce6-b8af-9612a4dd3482-kube-api-access-57djt\") on node \"crc\" DevicePath \"\"" Jan 20 17:09:47 crc kubenswrapper[4558]: I0120 17:09:47.683857 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:09:47 crc kubenswrapper[4558]: I0120 17:09:47.683964 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:09:47 crc kubenswrapper[4558]: I0120 17:09:47.688060 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 17:09:47 crc kubenswrapper[4558]: I0120 17:09:47.701202 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 17:09:47 crc kubenswrapper[4558]: I0120 17:09:47.711825 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 17:09:47 crc kubenswrapper[4558]: E0120 17:09:47.712253 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09945b97-d2ef-4ce6-b8af-9612a4dd3482" containerName="kube-state-metrics" Jan 20 17:09:47 crc kubenswrapper[4558]: I0120 17:09:47.712268 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="09945b97-d2ef-4ce6-b8af-9612a4dd3482" containerName="kube-state-metrics" Jan 20 17:09:47 crc kubenswrapper[4558]: I0120 17:09:47.712537 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="09945b97-d2ef-4ce6-b8af-9612a4dd3482" containerName="kube-state-metrics" Jan 20 17:09:47 crc kubenswrapper[4558]: I0120 17:09:47.713753 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:09:47 crc kubenswrapper[4558]: I0120 17:09:47.715506 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"kube-state-metrics-tls-config" Jan 20 17:09:47 crc kubenswrapper[4558]: I0120 17:09:47.715731 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-kube-state-metrics-svc" Jan 20 17:09:47 crc kubenswrapper[4558]: I0120 17:09:47.720296 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 17:09:47 crc kubenswrapper[4558]: I0120 17:09:47.891447 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jtwsp\" (UniqueName: \"kubernetes.io/projected/4f4387d7-1e8c-48a5-9176-c8e683469eb0-kube-api-access-jtwsp\") pod \"kube-state-metrics-0\" (UID: \"4f4387d7-1e8c-48a5-9176-c8e683469eb0\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:09:47 crc kubenswrapper[4558]: I0120 17:09:47.891613 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/4f4387d7-1e8c-48a5-9176-c8e683469eb0-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"4f4387d7-1e8c-48a5-9176-c8e683469eb0\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:09:47 crc kubenswrapper[4558]: I0120 17:09:47.891794 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/4f4387d7-1e8c-48a5-9176-c8e683469eb0-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"4f4387d7-1e8c-48a5-9176-c8e683469eb0\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:09:47 crc kubenswrapper[4558]: I0120 17:09:47.891841 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f4387d7-1e8c-48a5-9176-c8e683469eb0-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"4f4387d7-1e8c-48a5-9176-c8e683469eb0\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:09:47 crc kubenswrapper[4558]: I0120 17:09:47.992913 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jtwsp\" (UniqueName: \"kubernetes.io/projected/4f4387d7-1e8c-48a5-9176-c8e683469eb0-kube-api-access-jtwsp\") pod \"kube-state-metrics-0\" (UID: \"4f4387d7-1e8c-48a5-9176-c8e683469eb0\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:09:47 crc kubenswrapper[4558]: I0120 17:09:47.992968 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/4f4387d7-1e8c-48a5-9176-c8e683469eb0-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"4f4387d7-1e8c-48a5-9176-c8e683469eb0\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:09:47 crc kubenswrapper[4558]: I0120 17:09:47.993033 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/4f4387d7-1e8c-48a5-9176-c8e683469eb0-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"4f4387d7-1e8c-48a5-9176-c8e683469eb0\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:09:47 crc kubenswrapper[4558]: I0120 17:09:47.993073 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f4387d7-1e8c-48a5-9176-c8e683469eb0-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"4f4387d7-1e8c-48a5-9176-c8e683469eb0\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:09:48 crc kubenswrapper[4558]: I0120 17:09:47.999605 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/4f4387d7-1e8c-48a5-9176-c8e683469eb0-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"4f4387d7-1e8c-48a5-9176-c8e683469eb0\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:09:48 crc kubenswrapper[4558]: I0120 17:09:48.000285 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f4387d7-1e8c-48a5-9176-c8e683469eb0-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"4f4387d7-1e8c-48a5-9176-c8e683469eb0\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:09:48 crc kubenswrapper[4558]: I0120 17:09:48.000659 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/4f4387d7-1e8c-48a5-9176-c8e683469eb0-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"4f4387d7-1e8c-48a5-9176-c8e683469eb0\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:09:48 crc kubenswrapper[4558]: I0120 17:09:48.017610 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jtwsp\" (UniqueName: \"kubernetes.io/projected/4f4387d7-1e8c-48a5-9176-c8e683469eb0-kube-api-access-jtwsp\") pod \"kube-state-metrics-0\" (UID: \"4f4387d7-1e8c-48a5-9176-c8e683469eb0\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:09:48 crc kubenswrapper[4558]: I0120 17:09:48.030303 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:09:48 crc kubenswrapper[4558]: I0120 17:09:48.359412 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:09:48 crc kubenswrapper[4558]: I0120 17:09:48.359992 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="8149079b-aaf9-4993-b7b3-62c379dbc919" containerName="ceilometer-central-agent" containerID="cri-o://bcf08c42c09beee2b414cfe87eb9584080e4a7bc1b0ffc6999ff8ed107f5c12e" gracePeriod=30 Jan 20 17:09:48 crc kubenswrapper[4558]: I0120 17:09:48.360496 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="8149079b-aaf9-4993-b7b3-62c379dbc919" containerName="proxy-httpd" containerID="cri-o://5775e82892c438c20a28ac8ee95d014102563fb0a2da04cb631f3c4dac7a8946" gracePeriod=30 Jan 20 17:09:48 crc kubenswrapper[4558]: I0120 17:09:48.360560 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="8149079b-aaf9-4993-b7b3-62c379dbc919" containerName="sg-core" containerID="cri-o://c39e33e6d6b2482550c133b107a28667770de5c51ec6fdd129bfe9594a2dc923" gracePeriod=30 Jan 20 17:09:48 crc kubenswrapper[4558]: I0120 17:09:48.360604 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="8149079b-aaf9-4993-b7b3-62c379dbc919" containerName="ceilometer-notification-agent" containerID="cri-o://ce253fe8c8588e6acd46ebabc10336d56ff71dbf40702384572f4a2298fb0916" gracePeriod=30 Jan 20 17:09:48 crc kubenswrapper[4558]: W0120 17:09:48.434476 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4f4387d7_1e8c_48a5_9176_c8e683469eb0.slice/crio-d5c38e0a5b6888725592c57432e7cf01c5c8b9b0b4df4bf9a2bf7693c93692e8 WatchSource:0}: Error finding container d5c38e0a5b6888725592c57432e7cf01c5c8b9b0b4df4bf9a2bf7693c93692e8: Status 404 returned error can't find the container with id d5c38e0a5b6888725592c57432e7cf01c5c8b9b0b4df4bf9a2bf7693c93692e8 Jan 20 17:09:48 crc kubenswrapper[4558]: I0120 17:09:48.436835 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 17:09:48 crc kubenswrapper[4558]: I0120 17:09:48.578517 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09945b97-d2ef-4ce6-b8af-9612a4dd3482" path="/var/lib/kubelet/pods/09945b97-d2ef-4ce6-b8af-9612a4dd3482/volumes" Jan 20 17:09:49 crc kubenswrapper[4558]: I0120 17:09:49.372490 4558 generic.go:334] "Generic (PLEG): container finished" podID="8149079b-aaf9-4993-b7b3-62c379dbc919" containerID="5775e82892c438c20a28ac8ee95d014102563fb0a2da04cb631f3c4dac7a8946" exitCode=0 Jan 20 17:09:49 crc kubenswrapper[4558]: I0120 17:09:49.372824 4558 generic.go:334] "Generic (PLEG): container finished" podID="8149079b-aaf9-4993-b7b3-62c379dbc919" containerID="c39e33e6d6b2482550c133b107a28667770de5c51ec6fdd129bfe9594a2dc923" exitCode=2 Jan 20 17:09:49 crc kubenswrapper[4558]: I0120 17:09:49.372834 4558 generic.go:334] "Generic (PLEG): container finished" podID="8149079b-aaf9-4993-b7b3-62c379dbc919" containerID="bcf08c42c09beee2b414cfe87eb9584080e4a7bc1b0ffc6999ff8ed107f5c12e" exitCode=0 Jan 20 17:09:49 crc kubenswrapper[4558]: I0120 17:09:49.372559 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"8149079b-aaf9-4993-b7b3-62c379dbc919","Type":"ContainerDied","Data":"5775e82892c438c20a28ac8ee95d014102563fb0a2da04cb631f3c4dac7a8946"} Jan 20 17:09:49 crc kubenswrapper[4558]: I0120 17:09:49.372920 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"8149079b-aaf9-4993-b7b3-62c379dbc919","Type":"ContainerDied","Data":"c39e33e6d6b2482550c133b107a28667770de5c51ec6fdd129bfe9594a2dc923"} Jan 20 17:09:49 crc kubenswrapper[4558]: I0120 17:09:49.372937 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"8149079b-aaf9-4993-b7b3-62c379dbc919","Type":"ContainerDied","Data":"bcf08c42c09beee2b414cfe87eb9584080e4a7bc1b0ffc6999ff8ed107f5c12e"} Jan 20 17:09:49 crc kubenswrapper[4558]: I0120 17:09:49.374369 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/kube-state-metrics-0" event={"ID":"4f4387d7-1e8c-48a5-9176-c8e683469eb0","Type":"ContainerStarted","Data":"38bf3ef8cd80bbc8c77c61c653d79ffe5710ff36ee6b494a5fb1186c8b5147a9"} Jan 20 17:09:49 crc kubenswrapper[4558]: I0120 17:09:49.374401 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/kube-state-metrics-0" event={"ID":"4f4387d7-1e8c-48a5-9176-c8e683469eb0","Type":"ContainerStarted","Data":"d5c38e0a5b6888725592c57432e7cf01c5c8b9b0b4df4bf9a2bf7693c93692e8"} Jan 20 17:09:49 crc kubenswrapper[4558]: I0120 17:09:49.374560 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:09:49 crc kubenswrapper[4558]: I0120 17:09:49.400273 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/kube-state-metrics-0" podStartSLOduration=2.12791054 podStartE2EDuration="2.400251238s" podCreationTimestamp="2026-01-20 17:09:47 +0000 UTC" firstStartedPulling="2026-01-20 17:09:48.437213993 +0000 UTC m=+1682.197551960" lastFinishedPulling="2026-01-20 17:09:48.709554691 +0000 UTC m=+1682.469892658" observedRunningTime="2026-01-20 17:09:49.388426392 +0000 UTC m=+1683.148764359" watchObservedRunningTime="2026-01-20 17:09:49.400251238 +0000 UTC m=+1683.160589205" Jan 20 17:09:49 crc kubenswrapper[4558]: I0120 17:09:49.693106 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:09:50 crc kubenswrapper[4558]: I0120 17:09:50.399922 4558 generic.go:334] "Generic (PLEG): container finished" podID="8149079b-aaf9-4993-b7b3-62c379dbc919" containerID="ce253fe8c8588e6acd46ebabc10336d56ff71dbf40702384572f4a2298fb0916" exitCode=0 Jan 20 17:09:50 crc kubenswrapper[4558]: I0120 17:09:50.400067 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"8149079b-aaf9-4993-b7b3-62c379dbc919","Type":"ContainerDied","Data":"ce253fe8c8588e6acd46ebabc10336d56ff71dbf40702384572f4a2298fb0916"} Jan 20 17:09:50 crc kubenswrapper[4558]: I0120 17:09:50.705422 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:09:50 crc kubenswrapper[4558]: I0120 17:09:50.762777 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8149079b-aaf9-4993-b7b3-62c379dbc919-combined-ca-bundle\") pod \"8149079b-aaf9-4993-b7b3-62c379dbc919\" (UID: \"8149079b-aaf9-4993-b7b3-62c379dbc919\") " Jan 20 17:09:50 crc kubenswrapper[4558]: I0120 17:09:50.762829 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8149079b-aaf9-4993-b7b3-62c379dbc919-log-httpd\") pod \"8149079b-aaf9-4993-b7b3-62c379dbc919\" (UID: \"8149079b-aaf9-4993-b7b3-62c379dbc919\") " Jan 20 17:09:50 crc kubenswrapper[4558]: I0120 17:09:50.762856 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8149079b-aaf9-4993-b7b3-62c379dbc919-scripts\") pod \"8149079b-aaf9-4993-b7b3-62c379dbc919\" (UID: \"8149079b-aaf9-4993-b7b3-62c379dbc919\") " Jan 20 17:09:50 crc kubenswrapper[4558]: I0120 17:09:50.762907 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8149079b-aaf9-4993-b7b3-62c379dbc919-run-httpd\") pod \"8149079b-aaf9-4993-b7b3-62c379dbc919\" (UID: \"8149079b-aaf9-4993-b7b3-62c379dbc919\") " Jan 20 17:09:50 crc kubenswrapper[4558]: I0120 17:09:50.762931 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8149079b-aaf9-4993-b7b3-62c379dbc919-sg-core-conf-yaml\") pod \"8149079b-aaf9-4993-b7b3-62c379dbc919\" (UID: \"8149079b-aaf9-4993-b7b3-62c379dbc919\") " Jan 20 17:09:50 crc kubenswrapper[4558]: I0120 17:09:50.763082 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qmlqq\" (UniqueName: \"kubernetes.io/projected/8149079b-aaf9-4993-b7b3-62c379dbc919-kube-api-access-qmlqq\") pod \"8149079b-aaf9-4993-b7b3-62c379dbc919\" (UID: \"8149079b-aaf9-4993-b7b3-62c379dbc919\") " Jan 20 17:09:50 crc kubenswrapper[4558]: I0120 17:09:50.763116 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8149079b-aaf9-4993-b7b3-62c379dbc919-config-data\") pod \"8149079b-aaf9-4993-b7b3-62c379dbc919\" (UID: \"8149079b-aaf9-4993-b7b3-62c379dbc919\") " Jan 20 17:09:50 crc kubenswrapper[4558]: I0120 17:09:50.763330 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8149079b-aaf9-4993-b7b3-62c379dbc919-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "8149079b-aaf9-4993-b7b3-62c379dbc919" (UID: "8149079b-aaf9-4993-b7b3-62c379dbc919"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:09:50 crc kubenswrapper[4558]: I0120 17:09:50.763391 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8149079b-aaf9-4993-b7b3-62c379dbc919-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "8149079b-aaf9-4993-b7b3-62c379dbc919" (UID: "8149079b-aaf9-4993-b7b3-62c379dbc919"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:09:50 crc kubenswrapper[4558]: I0120 17:09:50.768936 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8149079b-aaf9-4993-b7b3-62c379dbc919-scripts" (OuterVolumeSpecName: "scripts") pod "8149079b-aaf9-4993-b7b3-62c379dbc919" (UID: "8149079b-aaf9-4993-b7b3-62c379dbc919"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:09:50 crc kubenswrapper[4558]: I0120 17:09:50.768964 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8149079b-aaf9-4993-b7b3-62c379dbc919-kube-api-access-qmlqq" (OuterVolumeSpecName: "kube-api-access-qmlqq") pod "8149079b-aaf9-4993-b7b3-62c379dbc919" (UID: "8149079b-aaf9-4993-b7b3-62c379dbc919"). InnerVolumeSpecName "kube-api-access-qmlqq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:09:50 crc kubenswrapper[4558]: I0120 17:09:50.785409 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8149079b-aaf9-4993-b7b3-62c379dbc919-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "8149079b-aaf9-4993-b7b3-62c379dbc919" (UID: "8149079b-aaf9-4993-b7b3-62c379dbc919"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:09:50 crc kubenswrapper[4558]: I0120 17:09:50.816983 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8149079b-aaf9-4993-b7b3-62c379dbc919-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8149079b-aaf9-4993-b7b3-62c379dbc919" (UID: "8149079b-aaf9-4993-b7b3-62c379dbc919"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:09:50 crc kubenswrapper[4558]: I0120 17:09:50.832431 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8149079b-aaf9-4993-b7b3-62c379dbc919-config-data" (OuterVolumeSpecName: "config-data") pod "8149079b-aaf9-4993-b7b3-62c379dbc919" (UID: "8149079b-aaf9-4993-b7b3-62c379dbc919"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:09:50 crc kubenswrapper[4558]: I0120 17:09:50.865911 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qmlqq\" (UniqueName: \"kubernetes.io/projected/8149079b-aaf9-4993-b7b3-62c379dbc919-kube-api-access-qmlqq\") on node \"crc\" DevicePath \"\"" Jan 20 17:09:50 crc kubenswrapper[4558]: I0120 17:09:50.865940 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8149079b-aaf9-4993-b7b3-62c379dbc919-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:09:50 crc kubenswrapper[4558]: I0120 17:09:50.865954 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8149079b-aaf9-4993-b7b3-62c379dbc919-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:09:50 crc kubenswrapper[4558]: I0120 17:09:50.865963 4558 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8149079b-aaf9-4993-b7b3-62c379dbc919-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:09:50 crc kubenswrapper[4558]: I0120 17:09:50.865972 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8149079b-aaf9-4993-b7b3-62c379dbc919-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:09:50 crc kubenswrapper[4558]: I0120 17:09:50.865981 4558 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8149079b-aaf9-4993-b7b3-62c379dbc919-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:09:50 crc kubenswrapper[4558]: I0120 17:09:50.865993 4558 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8149079b-aaf9-4993-b7b3-62c379dbc919-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 20 17:09:51 crc kubenswrapper[4558]: I0120 17:09:51.425275 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"8149079b-aaf9-4993-b7b3-62c379dbc919","Type":"ContainerDied","Data":"1d082ba7967af3832a810efa0b3d087422bcb75cc144d69b39058fa5f06c95ec"} Jan 20 17:09:51 crc kubenswrapper[4558]: I0120 17:09:51.425336 4558 scope.go:117] "RemoveContainer" containerID="5775e82892c438c20a28ac8ee95d014102563fb0a2da04cb631f3c4dac7a8946" Jan 20 17:09:51 crc kubenswrapper[4558]: I0120 17:09:51.425485 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:09:51 crc kubenswrapper[4558]: I0120 17:09:51.456219 4558 scope.go:117] "RemoveContainer" containerID="c39e33e6d6b2482550c133b107a28667770de5c51ec6fdd129bfe9594a2dc923" Jan 20 17:09:51 crc kubenswrapper[4558]: I0120 17:09:51.473697 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:09:51 crc kubenswrapper[4558]: I0120 17:09:51.476541 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:09:51 crc kubenswrapper[4558]: I0120 17:09:51.481055 4558 scope.go:117] "RemoveContainer" containerID="ce253fe8c8588e6acd46ebabc10336d56ff71dbf40702384572f4a2298fb0916" Jan 20 17:09:51 crc kubenswrapper[4558]: I0120 17:09:51.482367 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:09:51 crc kubenswrapper[4558]: E0120 17:09:51.482797 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8149079b-aaf9-4993-b7b3-62c379dbc919" containerName="ceilometer-central-agent" Jan 20 17:09:51 crc kubenswrapper[4558]: I0120 17:09:51.482817 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="8149079b-aaf9-4993-b7b3-62c379dbc919" containerName="ceilometer-central-agent" Jan 20 17:09:51 crc kubenswrapper[4558]: E0120 17:09:51.482832 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8149079b-aaf9-4993-b7b3-62c379dbc919" containerName="proxy-httpd" Jan 20 17:09:51 crc kubenswrapper[4558]: I0120 17:09:51.482840 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="8149079b-aaf9-4993-b7b3-62c379dbc919" containerName="proxy-httpd" Jan 20 17:09:51 crc kubenswrapper[4558]: E0120 17:09:51.482850 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8149079b-aaf9-4993-b7b3-62c379dbc919" containerName="ceilometer-notification-agent" Jan 20 17:09:51 crc kubenswrapper[4558]: I0120 17:09:51.482856 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="8149079b-aaf9-4993-b7b3-62c379dbc919" containerName="ceilometer-notification-agent" Jan 20 17:09:51 crc kubenswrapper[4558]: E0120 17:09:51.482886 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8149079b-aaf9-4993-b7b3-62c379dbc919" containerName="sg-core" Jan 20 17:09:51 crc kubenswrapper[4558]: I0120 17:09:51.482892 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="8149079b-aaf9-4993-b7b3-62c379dbc919" containerName="sg-core" Jan 20 17:09:51 crc kubenswrapper[4558]: I0120 17:09:51.483096 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="8149079b-aaf9-4993-b7b3-62c379dbc919" containerName="sg-core" Jan 20 17:09:51 crc kubenswrapper[4558]: I0120 17:09:51.483112 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="8149079b-aaf9-4993-b7b3-62c379dbc919" containerName="ceilometer-notification-agent" Jan 20 17:09:51 crc kubenswrapper[4558]: I0120 17:09:51.483125 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="8149079b-aaf9-4993-b7b3-62c379dbc919" containerName="proxy-httpd" Jan 20 17:09:51 crc kubenswrapper[4558]: I0120 17:09:51.483133 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="8149079b-aaf9-4993-b7b3-62c379dbc919" containerName="ceilometer-central-agent" Jan 20 17:09:51 crc kubenswrapper[4558]: I0120 17:09:51.485622 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:09:51 crc kubenswrapper[4558]: I0120 17:09:51.488212 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:09:51 crc kubenswrapper[4558]: I0120 17:09:51.489101 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-scripts" Jan 20 17:09:51 crc kubenswrapper[4558]: I0120 17:09:51.489377 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-config-data" Jan 20 17:09:51 crc kubenswrapper[4558]: I0120 17:09:51.489560 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-ceilometer-internal-svc" Jan 20 17:09:51 crc kubenswrapper[4558]: I0120 17:09:51.516660 4558 scope.go:117] "RemoveContainer" containerID="bcf08c42c09beee2b414cfe87eb9584080e4a7bc1b0ffc6999ff8ed107f5c12e" Jan 20 17:09:51 crc kubenswrapper[4558]: I0120 17:09:51.577374 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7z525\" (UniqueName: \"kubernetes.io/projected/89e1323a-3d22-4eb8-9b4d-ec6a18658a07-kube-api-access-7z525\") pod \"ceilometer-0\" (UID: \"89e1323a-3d22-4eb8-9b4d-ec6a18658a07\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:09:51 crc kubenswrapper[4558]: I0120 17:09:51.577409 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/89e1323a-3d22-4eb8-9b4d-ec6a18658a07-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"89e1323a-3d22-4eb8-9b4d-ec6a18658a07\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:09:51 crc kubenswrapper[4558]: I0120 17:09:51.577523 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/89e1323a-3d22-4eb8-9b4d-ec6a18658a07-config-data\") pod \"ceilometer-0\" (UID: \"89e1323a-3d22-4eb8-9b4d-ec6a18658a07\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:09:51 crc kubenswrapper[4558]: I0120 17:09:51.577659 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/89e1323a-3d22-4eb8-9b4d-ec6a18658a07-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"89e1323a-3d22-4eb8-9b4d-ec6a18658a07\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:09:51 crc kubenswrapper[4558]: I0120 17:09:51.578251 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/89e1323a-3d22-4eb8-9b4d-ec6a18658a07-run-httpd\") pod \"ceilometer-0\" (UID: \"89e1323a-3d22-4eb8-9b4d-ec6a18658a07\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:09:51 crc kubenswrapper[4558]: I0120 17:09:51.578291 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/89e1323a-3d22-4eb8-9b4d-ec6a18658a07-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"89e1323a-3d22-4eb8-9b4d-ec6a18658a07\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:09:51 crc kubenswrapper[4558]: I0120 17:09:51.578329 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/89e1323a-3d22-4eb8-9b4d-ec6a18658a07-scripts\") pod \"ceilometer-0\" (UID: \"89e1323a-3d22-4eb8-9b4d-ec6a18658a07\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:09:51 crc kubenswrapper[4558]: I0120 17:09:51.578432 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/89e1323a-3d22-4eb8-9b4d-ec6a18658a07-log-httpd\") pod \"ceilometer-0\" (UID: \"89e1323a-3d22-4eb8-9b4d-ec6a18658a07\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:09:51 crc kubenswrapper[4558]: I0120 17:09:51.680706 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7z525\" (UniqueName: \"kubernetes.io/projected/89e1323a-3d22-4eb8-9b4d-ec6a18658a07-kube-api-access-7z525\") pod \"ceilometer-0\" (UID: \"89e1323a-3d22-4eb8-9b4d-ec6a18658a07\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:09:51 crc kubenswrapper[4558]: I0120 17:09:51.680759 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/89e1323a-3d22-4eb8-9b4d-ec6a18658a07-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"89e1323a-3d22-4eb8-9b4d-ec6a18658a07\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:09:51 crc kubenswrapper[4558]: I0120 17:09:51.680808 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/89e1323a-3d22-4eb8-9b4d-ec6a18658a07-config-data\") pod \"ceilometer-0\" (UID: \"89e1323a-3d22-4eb8-9b4d-ec6a18658a07\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:09:51 crc kubenswrapper[4558]: I0120 17:09:51.680878 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/89e1323a-3d22-4eb8-9b4d-ec6a18658a07-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"89e1323a-3d22-4eb8-9b4d-ec6a18658a07\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:09:51 crc kubenswrapper[4558]: I0120 17:09:51.680999 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/89e1323a-3d22-4eb8-9b4d-ec6a18658a07-run-httpd\") pod \"ceilometer-0\" (UID: \"89e1323a-3d22-4eb8-9b4d-ec6a18658a07\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:09:51 crc kubenswrapper[4558]: I0120 17:09:51.681032 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/89e1323a-3d22-4eb8-9b4d-ec6a18658a07-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"89e1323a-3d22-4eb8-9b4d-ec6a18658a07\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:09:51 crc kubenswrapper[4558]: I0120 17:09:51.681073 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/89e1323a-3d22-4eb8-9b4d-ec6a18658a07-scripts\") pod \"ceilometer-0\" (UID: \"89e1323a-3d22-4eb8-9b4d-ec6a18658a07\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:09:51 crc kubenswrapper[4558]: I0120 17:09:51.681189 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/89e1323a-3d22-4eb8-9b4d-ec6a18658a07-log-httpd\") pod \"ceilometer-0\" (UID: \"89e1323a-3d22-4eb8-9b4d-ec6a18658a07\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:09:51 crc kubenswrapper[4558]: I0120 17:09:51.683659 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/89e1323a-3d22-4eb8-9b4d-ec6a18658a07-run-httpd\") pod \"ceilometer-0\" (UID: \"89e1323a-3d22-4eb8-9b4d-ec6a18658a07\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:09:51 crc kubenswrapper[4558]: I0120 17:09:51.683963 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/89e1323a-3d22-4eb8-9b4d-ec6a18658a07-log-httpd\") pod \"ceilometer-0\" (UID: \"89e1323a-3d22-4eb8-9b4d-ec6a18658a07\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:09:51 crc kubenswrapper[4558]: I0120 17:09:51.686930 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/89e1323a-3d22-4eb8-9b4d-ec6a18658a07-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"89e1323a-3d22-4eb8-9b4d-ec6a18658a07\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:09:51 crc kubenswrapper[4558]: I0120 17:09:51.687449 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/89e1323a-3d22-4eb8-9b4d-ec6a18658a07-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"89e1323a-3d22-4eb8-9b4d-ec6a18658a07\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:09:51 crc kubenswrapper[4558]: I0120 17:09:51.688271 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/89e1323a-3d22-4eb8-9b4d-ec6a18658a07-config-data\") pod \"ceilometer-0\" (UID: \"89e1323a-3d22-4eb8-9b4d-ec6a18658a07\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:09:51 crc kubenswrapper[4558]: I0120 17:09:51.688290 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/89e1323a-3d22-4eb8-9b4d-ec6a18658a07-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"89e1323a-3d22-4eb8-9b4d-ec6a18658a07\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:09:51 crc kubenswrapper[4558]: I0120 17:09:51.697450 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7z525\" (UniqueName: \"kubernetes.io/projected/89e1323a-3d22-4eb8-9b4d-ec6a18658a07-kube-api-access-7z525\") pod \"ceilometer-0\" (UID: \"89e1323a-3d22-4eb8-9b4d-ec6a18658a07\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:09:51 crc kubenswrapper[4558]: I0120 17:09:51.697650 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/89e1323a-3d22-4eb8-9b4d-ec6a18658a07-scripts\") pod \"ceilometer-0\" (UID: \"89e1323a-3d22-4eb8-9b4d-ec6a18658a07\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:09:51 crc kubenswrapper[4558]: I0120 17:09:51.815947 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:09:52 crc kubenswrapper[4558]: I0120 17:09:52.223478 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:09:52 crc kubenswrapper[4558]: W0120 17:09:52.225648 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod89e1323a_3d22_4eb8_9b4d_ec6a18658a07.slice/crio-0d5074d60a2579bba0f1b7464ef9770a1ec87dbb15380110002a23580f967255 WatchSource:0}: Error finding container 0d5074d60a2579bba0f1b7464ef9770a1ec87dbb15380110002a23580f967255: Status 404 returned error can't find the container with id 0d5074d60a2579bba0f1b7464ef9770a1ec87dbb15380110002a23580f967255 Jan 20 17:09:52 crc kubenswrapper[4558]: I0120 17:09:52.433455 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"89e1323a-3d22-4eb8-9b4d-ec6a18658a07","Type":"ContainerStarted","Data":"0d5074d60a2579bba0f1b7464ef9770a1ec87dbb15380110002a23580f967255"} Jan 20 17:09:52 crc kubenswrapper[4558]: I0120 17:09:52.575582 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8149079b-aaf9-4993-b7b3-62c379dbc919" path="/var/lib/kubelet/pods/8149079b-aaf9-4993-b7b3-62c379dbc919/volumes" Jan 20 17:09:52 crc kubenswrapper[4558]: I0120 17:09:52.684023 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:09:52 crc kubenswrapper[4558]: I0120 17:09:52.684086 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:09:52 crc kubenswrapper[4558]: I0120 17:09:52.695412 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:09:52 crc kubenswrapper[4558]: I0120 17:09:52.695462 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:09:53 crc kubenswrapper[4558]: I0120 17:09:53.455248 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"89e1323a-3d22-4eb8-9b4d-ec6a18658a07","Type":"ContainerStarted","Data":"bf8e58ac5483bbd8b4b20deac013037c3720b4d82d3d752be60bddcf6786ec9c"} Jan 20 17:09:53 crc kubenswrapper[4558]: I0120 17:09:53.699313 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-metadata-0" podUID="5560f6a4-fc05-4d97-8496-b87804dfab99" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.1.141:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 20 17:09:53 crc kubenswrapper[4558]: I0120 17:09:53.699319 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-metadata-0" podUID="5560f6a4-fc05-4d97-8496-b87804dfab99" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.1.141:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 20 17:09:53 crc kubenswrapper[4558]: I0120 17:09:53.781848 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-api-0" podUID="0b0b02aa-9ba9-4207-b77c-8157659e4ba6" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.1.142:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 20 17:09:53 crc kubenswrapper[4558]: I0120 17:09:53.781871 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-api-0" podUID="0b0b02aa-9ba9-4207-b77c-8157659e4ba6" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.1.142:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 20 17:09:54 crc kubenswrapper[4558]: I0120 17:09:54.478682 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"89e1323a-3d22-4eb8-9b4d-ec6a18658a07","Type":"ContainerStarted","Data":"1a7711d2fd4845229c64cceee2b019f546a95bd205d9e18867f21e0bfd5f0619"} Jan 20 17:09:54 crc kubenswrapper[4558]: I0120 17:09:54.693620 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:09:54 crc kubenswrapper[4558]: I0120 17:09:54.715348 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:09:55 crc kubenswrapper[4558]: I0120 17:09:55.491801 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"89e1323a-3d22-4eb8-9b4d-ec6a18658a07","Type":"ContainerStarted","Data":"9c3597a304e198ec373869c8b52c1a042f11482f15c4fd19d23add134bcc9917"} Jan 20 17:09:55 crc kubenswrapper[4558]: I0120 17:09:55.520505 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:09:55 crc kubenswrapper[4558]: I0120 17:09:55.566752 4558 scope.go:117] "RemoveContainer" containerID="a711a286282347590079d2447ef37fa9ed0f11085d6d7a65f85e65c1c2df608f" Jan 20 17:09:55 crc kubenswrapper[4558]: E0120 17:09:55.567037 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 17:09:56 crc kubenswrapper[4558]: I0120 17:09:56.509885 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"89e1323a-3d22-4eb8-9b4d-ec6a18658a07","Type":"ContainerStarted","Data":"9920a3f7bb3e25b6b2a9836dcdab2898c5a9e4c3abc1d0ec169b4c920e1b090c"} Jan 20 17:09:56 crc kubenswrapper[4558]: I0120 17:09:56.531127 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ceilometer-0" podStartSLOduration=1.990879769 podStartE2EDuration="5.531115211s" podCreationTimestamp="2026-01-20 17:09:51 +0000 UTC" firstStartedPulling="2026-01-20 17:09:52.227981363 +0000 UTC m=+1685.988319331" lastFinishedPulling="2026-01-20 17:09:55.768216806 +0000 UTC m=+1689.528554773" observedRunningTime="2026-01-20 17:09:56.524552812 +0000 UTC m=+1690.284890768" watchObservedRunningTime="2026-01-20 17:09:56.531115211 +0000 UTC m=+1690.291453178" Jan 20 17:09:57 crc kubenswrapper[4558]: I0120 17:09:57.537684 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:09:58 crc kubenswrapper[4558]: I0120 17:09:58.040404 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:09:58 crc kubenswrapper[4558]: I0120 17:09:58.769139 4558 scope.go:117] "RemoveContainer" containerID="535ff1923649723165cb5389ef0ba724b32d3a882103118bddd27f257b9974e3" Jan 20 17:09:58 crc kubenswrapper[4558]: I0120 17:09:58.813422 4558 scope.go:117] "RemoveContainer" containerID="2f7dd16ad3f45538283e03f6272b991aea2504752f702631ee8b13f86d5e231b" Jan 20 17:09:58 crc kubenswrapper[4558]: I0120 17:09:58.835777 4558 scope.go:117] "RemoveContainer" containerID="2c83f13eea2134840701f05851db3d14cc87fed3be70ecde02d1c824d2f80a0d" Jan 20 17:09:58 crc kubenswrapper[4558]: I0120 17:09:58.902803 4558 scope.go:117] "RemoveContainer" containerID="92f915a76088a8ba412c82688f4d1506c1c3c06ae16505ce91c0d9ea9dfe22aa" Jan 20 17:09:58 crc kubenswrapper[4558]: I0120 17:09:58.924181 4558 scope.go:117] "RemoveContainer" containerID="225fabc2e274bc055279016bab31443db7f376181931205b6688497f431e61fd" Jan 20 17:09:58 crc kubenswrapper[4558]: I0120 17:09:58.965596 4558 scope.go:117] "RemoveContainer" containerID="148dd9de98eda3e5b9625476910ce41f12b5496fe3cc4cb92bbf3686a771a8e5" Jan 20 17:09:58 crc kubenswrapper[4558]: I0120 17:09:58.984269 4558 scope.go:117] "RemoveContainer" containerID="92b0792fd25f209b5d17ce804362725cc41d26b7bb428b94617376e79223ee79" Jan 20 17:09:59 crc kubenswrapper[4558]: I0120 17:09:59.001465 4558 scope.go:117] "RemoveContainer" containerID="fe128214e7f772bc7935abe6b4072bef9cbf2631f9f205c7d0e3f2ffd9ff1572" Jan 20 17:09:59 crc kubenswrapper[4558]: I0120 17:09:59.030133 4558 scope.go:117] "RemoveContainer" containerID="c02c16b021c1478fe3fa6419ce9bea842560b0d70281d36ac4dc50c497b670fb" Jan 20 17:09:59 crc kubenswrapper[4558]: I0120 17:09:59.078495 4558 scope.go:117] "RemoveContainer" containerID="7b9317fe62c5265ebcee0b8b4c582e9d2dbd8115c5f719f11d257204794edfb2" Jan 20 17:09:59 crc kubenswrapper[4558]: I0120 17:09:59.100501 4558 scope.go:117] "RemoveContainer" containerID="16d11c7e8c68131f8eb72146637f28a05ad570c65546debfb217fa0b00e58dae" Jan 20 17:09:59 crc kubenswrapper[4558]: I0120 17:09:59.119135 4558 scope.go:117] "RemoveContainer" containerID="6877fd17a18c33efd011fb22bc699b71b1f95e440e3bb32835f147ed97daf4a4" Jan 20 17:10:02 crc kubenswrapper[4558]: I0120 17:10:02.690479 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:10:02 crc kubenswrapper[4558]: I0120 17:10:02.691710 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:10:02 crc kubenswrapper[4558]: I0120 17:10:02.696884 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:10:02 crc kubenswrapper[4558]: I0120 17:10:02.697475 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:10:02 crc kubenswrapper[4558]: I0120 17:10:02.699441 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:10:02 crc kubenswrapper[4558]: I0120 17:10:02.699706 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:10:02 crc kubenswrapper[4558]: I0120 17:10:02.699752 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:10:02 crc kubenswrapper[4558]: I0120 17:10:02.701741 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:10:03 crc kubenswrapper[4558]: I0120 17:10:03.606176 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:10:03 crc kubenswrapper[4558]: I0120 17:10:03.609289 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:10:05 crc kubenswrapper[4558]: I0120 17:10:05.328714 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:10:05 crc kubenswrapper[4558]: I0120 17:10:05.328956 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="89e1323a-3d22-4eb8-9b4d-ec6a18658a07" containerName="ceilometer-central-agent" containerID="cri-o://bf8e58ac5483bbd8b4b20deac013037c3720b4d82d3d752be60bddcf6786ec9c" gracePeriod=30 Jan 20 17:10:05 crc kubenswrapper[4558]: I0120 17:10:05.329751 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="89e1323a-3d22-4eb8-9b4d-ec6a18658a07" containerName="sg-core" containerID="cri-o://9c3597a304e198ec373869c8b52c1a042f11482f15c4fd19d23add134bcc9917" gracePeriod=30 Jan 20 17:10:05 crc kubenswrapper[4558]: I0120 17:10:05.329975 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="89e1323a-3d22-4eb8-9b4d-ec6a18658a07" containerName="proxy-httpd" containerID="cri-o://9920a3f7bb3e25b6b2a9836dcdab2898c5a9e4c3abc1d0ec169b4c920e1b090c" gracePeriod=30 Jan 20 17:10:05 crc kubenswrapper[4558]: I0120 17:10:05.329958 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="89e1323a-3d22-4eb8-9b4d-ec6a18658a07" containerName="ceilometer-notification-agent" containerID="cri-o://1a7711d2fd4845229c64cceee2b019f546a95bd205d9e18867f21e0bfd5f0619" gracePeriod=30 Jan 20 17:10:05 crc kubenswrapper[4558]: I0120 17:10:05.340368 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/ceilometer-0" podUID="89e1323a-3d22-4eb8-9b4d-ec6a18658a07" containerName="proxy-httpd" probeResult="failure" output="Get \"https://10.217.1.145:3000/\": read tcp 10.217.0.2:36600->10.217.1.145:3000: read: connection reset by peer" Jan 20 17:10:05 crc kubenswrapper[4558]: I0120 17:10:05.627387 4558 generic.go:334] "Generic (PLEG): container finished" podID="89e1323a-3d22-4eb8-9b4d-ec6a18658a07" containerID="9920a3f7bb3e25b6b2a9836dcdab2898c5a9e4c3abc1d0ec169b4c920e1b090c" exitCode=0 Jan 20 17:10:05 crc kubenswrapper[4558]: I0120 17:10:05.627757 4558 generic.go:334] "Generic (PLEG): container finished" podID="89e1323a-3d22-4eb8-9b4d-ec6a18658a07" containerID="9c3597a304e198ec373869c8b52c1a042f11482f15c4fd19d23add134bcc9917" exitCode=2 Jan 20 17:10:05 crc kubenswrapper[4558]: I0120 17:10:05.627558 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"89e1323a-3d22-4eb8-9b4d-ec6a18658a07","Type":"ContainerDied","Data":"9920a3f7bb3e25b6b2a9836dcdab2898c5a9e4c3abc1d0ec169b4c920e1b090c"} Jan 20 17:10:05 crc kubenswrapper[4558]: I0120 17:10:05.628149 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"89e1323a-3d22-4eb8-9b4d-ec6a18658a07","Type":"ContainerDied","Data":"9c3597a304e198ec373869c8b52c1a042f11482f15c4fd19d23add134bcc9917"} Jan 20 17:10:06 crc kubenswrapper[4558]: I0120 17:10:06.556034 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:10:06 crc kubenswrapper[4558]: I0120 17:10:06.641903 4558 generic.go:334] "Generic (PLEG): container finished" podID="89e1323a-3d22-4eb8-9b4d-ec6a18658a07" containerID="bf8e58ac5483bbd8b4b20deac013037c3720b4d82d3d752be60bddcf6786ec9c" exitCode=0 Jan 20 17:10:06 crc kubenswrapper[4558]: I0120 17:10:06.642000 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"89e1323a-3d22-4eb8-9b4d-ec6a18658a07","Type":"ContainerDied","Data":"bf8e58ac5483bbd8b4b20deac013037c3720b4d82d3d752be60bddcf6786ec9c"} Jan 20 17:10:06 crc kubenswrapper[4558]: I0120 17:10:06.642832 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-api-0" podUID="0b0b02aa-9ba9-4207-b77c-8157659e4ba6" containerName="nova-api-log" containerID="cri-o://6c2b79ff5237f93288a809fb0566043521294554bd47c3ed1a939b33fa9bee10" gracePeriod=30 Jan 20 17:10:06 crc kubenswrapper[4558]: I0120 17:10:06.642925 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-api-0" podUID="0b0b02aa-9ba9-4207-b77c-8157659e4ba6" containerName="nova-api-api" containerID="cri-o://0da1ffbd18a6d46e2e13c3f5dc89867501bd1410f00da6ac8d8a3630eba71d59" gracePeriod=30 Jan 20 17:10:07 crc kubenswrapper[4558]: I0120 17:10:07.651186 4558 generic.go:334] "Generic (PLEG): container finished" podID="0b0b02aa-9ba9-4207-b77c-8157659e4ba6" containerID="6c2b79ff5237f93288a809fb0566043521294554bd47c3ed1a939b33fa9bee10" exitCode=143 Jan 20 17:10:07 crc kubenswrapper[4558]: I0120 17:10:07.651196 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"0b0b02aa-9ba9-4207-b77c-8157659e4ba6","Type":"ContainerDied","Data":"6c2b79ff5237f93288a809fb0566043521294554bd47c3ed1a939b33fa9bee10"} Jan 20 17:10:08 crc kubenswrapper[4558]: I0120 17:10:08.452074 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:10:08 crc kubenswrapper[4558]: I0120 17:10:08.537624 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7z525\" (UniqueName: \"kubernetes.io/projected/89e1323a-3d22-4eb8-9b4d-ec6a18658a07-kube-api-access-7z525\") pod \"89e1323a-3d22-4eb8-9b4d-ec6a18658a07\" (UID: \"89e1323a-3d22-4eb8-9b4d-ec6a18658a07\") " Jan 20 17:10:08 crc kubenswrapper[4558]: I0120 17:10:08.537721 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/89e1323a-3d22-4eb8-9b4d-ec6a18658a07-config-data\") pod \"89e1323a-3d22-4eb8-9b4d-ec6a18658a07\" (UID: \"89e1323a-3d22-4eb8-9b4d-ec6a18658a07\") " Jan 20 17:10:08 crc kubenswrapper[4558]: I0120 17:10:08.537757 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/89e1323a-3d22-4eb8-9b4d-ec6a18658a07-scripts\") pod \"89e1323a-3d22-4eb8-9b4d-ec6a18658a07\" (UID: \"89e1323a-3d22-4eb8-9b4d-ec6a18658a07\") " Jan 20 17:10:08 crc kubenswrapper[4558]: I0120 17:10:08.537825 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/89e1323a-3d22-4eb8-9b4d-ec6a18658a07-log-httpd\") pod \"89e1323a-3d22-4eb8-9b4d-ec6a18658a07\" (UID: \"89e1323a-3d22-4eb8-9b4d-ec6a18658a07\") " Jan 20 17:10:08 crc kubenswrapper[4558]: I0120 17:10:08.537897 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/89e1323a-3d22-4eb8-9b4d-ec6a18658a07-ceilometer-tls-certs\") pod \"89e1323a-3d22-4eb8-9b4d-ec6a18658a07\" (UID: \"89e1323a-3d22-4eb8-9b4d-ec6a18658a07\") " Jan 20 17:10:08 crc kubenswrapper[4558]: I0120 17:10:08.537954 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/89e1323a-3d22-4eb8-9b4d-ec6a18658a07-combined-ca-bundle\") pod \"89e1323a-3d22-4eb8-9b4d-ec6a18658a07\" (UID: \"89e1323a-3d22-4eb8-9b4d-ec6a18658a07\") " Jan 20 17:10:08 crc kubenswrapper[4558]: I0120 17:10:08.537998 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/89e1323a-3d22-4eb8-9b4d-ec6a18658a07-sg-core-conf-yaml\") pod \"89e1323a-3d22-4eb8-9b4d-ec6a18658a07\" (UID: \"89e1323a-3d22-4eb8-9b4d-ec6a18658a07\") " Jan 20 17:10:08 crc kubenswrapper[4558]: I0120 17:10:08.538034 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/89e1323a-3d22-4eb8-9b4d-ec6a18658a07-run-httpd\") pod \"89e1323a-3d22-4eb8-9b4d-ec6a18658a07\" (UID: \"89e1323a-3d22-4eb8-9b4d-ec6a18658a07\") " Jan 20 17:10:08 crc kubenswrapper[4558]: I0120 17:10:08.539011 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/89e1323a-3d22-4eb8-9b4d-ec6a18658a07-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "89e1323a-3d22-4eb8-9b4d-ec6a18658a07" (UID: "89e1323a-3d22-4eb8-9b4d-ec6a18658a07"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:10:08 crc kubenswrapper[4558]: I0120 17:10:08.539303 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/89e1323a-3d22-4eb8-9b4d-ec6a18658a07-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "89e1323a-3d22-4eb8-9b4d-ec6a18658a07" (UID: "89e1323a-3d22-4eb8-9b4d-ec6a18658a07"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:10:08 crc kubenswrapper[4558]: I0120 17:10:08.545748 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/89e1323a-3d22-4eb8-9b4d-ec6a18658a07-scripts" (OuterVolumeSpecName: "scripts") pod "89e1323a-3d22-4eb8-9b4d-ec6a18658a07" (UID: "89e1323a-3d22-4eb8-9b4d-ec6a18658a07"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:10:08 crc kubenswrapper[4558]: I0120 17:10:08.548806 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/89e1323a-3d22-4eb8-9b4d-ec6a18658a07-kube-api-access-7z525" (OuterVolumeSpecName: "kube-api-access-7z525") pod "89e1323a-3d22-4eb8-9b4d-ec6a18658a07" (UID: "89e1323a-3d22-4eb8-9b4d-ec6a18658a07"). InnerVolumeSpecName "kube-api-access-7z525". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:10:08 crc kubenswrapper[4558]: I0120 17:10:08.563066 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/89e1323a-3d22-4eb8-9b4d-ec6a18658a07-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "89e1323a-3d22-4eb8-9b4d-ec6a18658a07" (UID: "89e1323a-3d22-4eb8-9b4d-ec6a18658a07"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:10:08 crc kubenswrapper[4558]: I0120 17:10:08.582299 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/89e1323a-3d22-4eb8-9b4d-ec6a18658a07-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "89e1323a-3d22-4eb8-9b4d-ec6a18658a07" (UID: "89e1323a-3d22-4eb8-9b4d-ec6a18658a07"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:10:08 crc kubenswrapper[4558]: I0120 17:10:08.605189 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/89e1323a-3d22-4eb8-9b4d-ec6a18658a07-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "89e1323a-3d22-4eb8-9b4d-ec6a18658a07" (UID: "89e1323a-3d22-4eb8-9b4d-ec6a18658a07"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:10:08 crc kubenswrapper[4558]: I0120 17:10:08.620660 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/89e1323a-3d22-4eb8-9b4d-ec6a18658a07-config-data" (OuterVolumeSpecName: "config-data") pod "89e1323a-3d22-4eb8-9b4d-ec6a18658a07" (UID: "89e1323a-3d22-4eb8-9b4d-ec6a18658a07"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:10:08 crc kubenswrapper[4558]: I0120 17:10:08.641823 4558 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/89e1323a-3d22-4eb8-9b4d-ec6a18658a07-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:08 crc kubenswrapper[4558]: I0120 17:10:08.641877 4558 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/89e1323a-3d22-4eb8-9b4d-ec6a18658a07-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:08 crc kubenswrapper[4558]: I0120 17:10:08.641894 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/89e1323a-3d22-4eb8-9b4d-ec6a18658a07-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:08 crc kubenswrapper[4558]: I0120 17:10:08.641908 4558 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/89e1323a-3d22-4eb8-9b4d-ec6a18658a07-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:08 crc kubenswrapper[4558]: I0120 17:10:08.641918 4558 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/89e1323a-3d22-4eb8-9b4d-ec6a18658a07-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:08 crc kubenswrapper[4558]: I0120 17:10:08.641947 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7z525\" (UniqueName: \"kubernetes.io/projected/89e1323a-3d22-4eb8-9b4d-ec6a18658a07-kube-api-access-7z525\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:08 crc kubenswrapper[4558]: I0120 17:10:08.641958 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/89e1323a-3d22-4eb8-9b4d-ec6a18658a07-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:08 crc kubenswrapper[4558]: I0120 17:10:08.641969 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/89e1323a-3d22-4eb8-9b4d-ec6a18658a07-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:08 crc kubenswrapper[4558]: I0120 17:10:08.663258 4558 generic.go:334] "Generic (PLEG): container finished" podID="89e1323a-3d22-4eb8-9b4d-ec6a18658a07" containerID="1a7711d2fd4845229c64cceee2b019f546a95bd205d9e18867f21e0bfd5f0619" exitCode=0 Jan 20 17:10:08 crc kubenswrapper[4558]: I0120 17:10:08.663305 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"89e1323a-3d22-4eb8-9b4d-ec6a18658a07","Type":"ContainerDied","Data":"1a7711d2fd4845229c64cceee2b019f546a95bd205d9e18867f21e0bfd5f0619"} Jan 20 17:10:08 crc kubenswrapper[4558]: I0120 17:10:08.663343 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"89e1323a-3d22-4eb8-9b4d-ec6a18658a07","Type":"ContainerDied","Data":"0d5074d60a2579bba0f1b7464ef9770a1ec87dbb15380110002a23580f967255"} Jan 20 17:10:08 crc kubenswrapper[4558]: I0120 17:10:08.663373 4558 scope.go:117] "RemoveContainer" containerID="9920a3f7bb3e25b6b2a9836dcdab2898c5a9e4c3abc1d0ec169b4c920e1b090c" Jan 20 17:10:08 crc kubenswrapper[4558]: I0120 17:10:08.663374 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:10:08 crc kubenswrapper[4558]: I0120 17:10:08.700772 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:10:08 crc kubenswrapper[4558]: I0120 17:10:08.705506 4558 scope.go:117] "RemoveContainer" containerID="9c3597a304e198ec373869c8b52c1a042f11482f15c4fd19d23add134bcc9917" Jan 20 17:10:08 crc kubenswrapper[4558]: I0120 17:10:08.706416 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:10:08 crc kubenswrapper[4558]: I0120 17:10:08.725492 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:10:08 crc kubenswrapper[4558]: E0120 17:10:08.725901 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="89e1323a-3d22-4eb8-9b4d-ec6a18658a07" containerName="ceilometer-notification-agent" Jan 20 17:10:08 crc kubenswrapper[4558]: I0120 17:10:08.725920 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="89e1323a-3d22-4eb8-9b4d-ec6a18658a07" containerName="ceilometer-notification-agent" Jan 20 17:10:08 crc kubenswrapper[4558]: E0120 17:10:08.725940 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="89e1323a-3d22-4eb8-9b4d-ec6a18658a07" containerName="ceilometer-central-agent" Jan 20 17:10:08 crc kubenswrapper[4558]: I0120 17:10:08.725947 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="89e1323a-3d22-4eb8-9b4d-ec6a18658a07" containerName="ceilometer-central-agent" Jan 20 17:10:08 crc kubenswrapper[4558]: E0120 17:10:08.725961 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="89e1323a-3d22-4eb8-9b4d-ec6a18658a07" containerName="proxy-httpd" Jan 20 17:10:08 crc kubenswrapper[4558]: I0120 17:10:08.725969 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="89e1323a-3d22-4eb8-9b4d-ec6a18658a07" containerName="proxy-httpd" Jan 20 17:10:08 crc kubenswrapper[4558]: E0120 17:10:08.725979 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="89e1323a-3d22-4eb8-9b4d-ec6a18658a07" containerName="sg-core" Jan 20 17:10:08 crc kubenswrapper[4558]: I0120 17:10:08.725985 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="89e1323a-3d22-4eb8-9b4d-ec6a18658a07" containerName="sg-core" Jan 20 17:10:08 crc kubenswrapper[4558]: I0120 17:10:08.726174 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="89e1323a-3d22-4eb8-9b4d-ec6a18658a07" containerName="ceilometer-notification-agent" Jan 20 17:10:08 crc kubenswrapper[4558]: I0120 17:10:08.726195 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="89e1323a-3d22-4eb8-9b4d-ec6a18658a07" containerName="proxy-httpd" Jan 20 17:10:08 crc kubenswrapper[4558]: I0120 17:10:08.726209 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="89e1323a-3d22-4eb8-9b4d-ec6a18658a07" containerName="sg-core" Jan 20 17:10:08 crc kubenswrapper[4558]: I0120 17:10:08.726225 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="89e1323a-3d22-4eb8-9b4d-ec6a18658a07" containerName="ceilometer-central-agent" Jan 20 17:10:08 crc kubenswrapper[4558]: I0120 17:10:08.728293 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:10:08 crc kubenswrapper[4558]: I0120 17:10:08.729342 4558 scope.go:117] "RemoveContainer" containerID="1a7711d2fd4845229c64cceee2b019f546a95bd205d9e18867f21e0bfd5f0619" Jan 20 17:10:08 crc kubenswrapper[4558]: I0120 17:10:08.730322 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-ceilometer-internal-svc" Jan 20 17:10:08 crc kubenswrapper[4558]: I0120 17:10:08.730481 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-scripts" Jan 20 17:10:08 crc kubenswrapper[4558]: I0120 17:10:08.736457 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-config-data" Jan 20 17:10:08 crc kubenswrapper[4558]: I0120 17:10:08.737021 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:10:08 crc kubenswrapper[4558]: I0120 17:10:08.749920 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/95cf677f-e744-4dad-acfb-507100d2ea14-config-data\") pod \"ceilometer-0\" (UID: \"95cf677f-e744-4dad-acfb-507100d2ea14\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:10:08 crc kubenswrapper[4558]: I0120 17:10:08.749970 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/95cf677f-e744-4dad-acfb-507100d2ea14-scripts\") pod \"ceilometer-0\" (UID: \"95cf677f-e744-4dad-acfb-507100d2ea14\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:10:08 crc kubenswrapper[4558]: I0120 17:10:08.749997 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/95cf677f-e744-4dad-acfb-507100d2ea14-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"95cf677f-e744-4dad-acfb-507100d2ea14\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:10:08 crc kubenswrapper[4558]: I0120 17:10:08.750057 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/95cf677f-e744-4dad-acfb-507100d2ea14-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"95cf677f-e744-4dad-acfb-507100d2ea14\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:10:08 crc kubenswrapper[4558]: I0120 17:10:08.750087 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/95cf677f-e744-4dad-acfb-507100d2ea14-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"95cf677f-e744-4dad-acfb-507100d2ea14\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:10:08 crc kubenswrapper[4558]: I0120 17:10:08.750106 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/95cf677f-e744-4dad-acfb-507100d2ea14-log-httpd\") pod \"ceilometer-0\" (UID: \"95cf677f-e744-4dad-acfb-507100d2ea14\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:10:08 crc kubenswrapper[4558]: I0120 17:10:08.750123 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/95cf677f-e744-4dad-acfb-507100d2ea14-run-httpd\") pod \"ceilometer-0\" (UID: \"95cf677f-e744-4dad-acfb-507100d2ea14\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:10:08 crc kubenswrapper[4558]: I0120 17:10:08.750173 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4w5vq\" (UniqueName: \"kubernetes.io/projected/95cf677f-e744-4dad-acfb-507100d2ea14-kube-api-access-4w5vq\") pod \"ceilometer-0\" (UID: \"95cf677f-e744-4dad-acfb-507100d2ea14\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:10:08 crc kubenswrapper[4558]: I0120 17:10:08.758810 4558 scope.go:117] "RemoveContainer" containerID="bf8e58ac5483bbd8b4b20deac013037c3720b4d82d3d752be60bddcf6786ec9c" Jan 20 17:10:08 crc kubenswrapper[4558]: I0120 17:10:08.780777 4558 scope.go:117] "RemoveContainer" containerID="9920a3f7bb3e25b6b2a9836dcdab2898c5a9e4c3abc1d0ec169b4c920e1b090c" Jan 20 17:10:08 crc kubenswrapper[4558]: E0120 17:10:08.781138 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9920a3f7bb3e25b6b2a9836dcdab2898c5a9e4c3abc1d0ec169b4c920e1b090c\": container with ID starting with 9920a3f7bb3e25b6b2a9836dcdab2898c5a9e4c3abc1d0ec169b4c920e1b090c not found: ID does not exist" containerID="9920a3f7bb3e25b6b2a9836dcdab2898c5a9e4c3abc1d0ec169b4c920e1b090c" Jan 20 17:10:08 crc kubenswrapper[4558]: I0120 17:10:08.781182 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9920a3f7bb3e25b6b2a9836dcdab2898c5a9e4c3abc1d0ec169b4c920e1b090c"} err="failed to get container status \"9920a3f7bb3e25b6b2a9836dcdab2898c5a9e4c3abc1d0ec169b4c920e1b090c\": rpc error: code = NotFound desc = could not find container \"9920a3f7bb3e25b6b2a9836dcdab2898c5a9e4c3abc1d0ec169b4c920e1b090c\": container with ID starting with 9920a3f7bb3e25b6b2a9836dcdab2898c5a9e4c3abc1d0ec169b4c920e1b090c not found: ID does not exist" Jan 20 17:10:08 crc kubenswrapper[4558]: I0120 17:10:08.781202 4558 scope.go:117] "RemoveContainer" containerID="9c3597a304e198ec373869c8b52c1a042f11482f15c4fd19d23add134bcc9917" Jan 20 17:10:08 crc kubenswrapper[4558]: E0120 17:10:08.781518 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9c3597a304e198ec373869c8b52c1a042f11482f15c4fd19d23add134bcc9917\": container with ID starting with 9c3597a304e198ec373869c8b52c1a042f11482f15c4fd19d23add134bcc9917 not found: ID does not exist" containerID="9c3597a304e198ec373869c8b52c1a042f11482f15c4fd19d23add134bcc9917" Jan 20 17:10:08 crc kubenswrapper[4558]: I0120 17:10:08.781559 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9c3597a304e198ec373869c8b52c1a042f11482f15c4fd19d23add134bcc9917"} err="failed to get container status \"9c3597a304e198ec373869c8b52c1a042f11482f15c4fd19d23add134bcc9917\": rpc error: code = NotFound desc = could not find container \"9c3597a304e198ec373869c8b52c1a042f11482f15c4fd19d23add134bcc9917\": container with ID starting with 9c3597a304e198ec373869c8b52c1a042f11482f15c4fd19d23add134bcc9917 not found: ID does not exist" Jan 20 17:10:08 crc kubenswrapper[4558]: I0120 17:10:08.781586 4558 scope.go:117] "RemoveContainer" containerID="1a7711d2fd4845229c64cceee2b019f546a95bd205d9e18867f21e0bfd5f0619" Jan 20 17:10:08 crc kubenswrapper[4558]: E0120 17:10:08.781852 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1a7711d2fd4845229c64cceee2b019f546a95bd205d9e18867f21e0bfd5f0619\": container with ID starting with 1a7711d2fd4845229c64cceee2b019f546a95bd205d9e18867f21e0bfd5f0619 not found: ID does not exist" containerID="1a7711d2fd4845229c64cceee2b019f546a95bd205d9e18867f21e0bfd5f0619" Jan 20 17:10:08 crc kubenswrapper[4558]: I0120 17:10:08.781883 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1a7711d2fd4845229c64cceee2b019f546a95bd205d9e18867f21e0bfd5f0619"} err="failed to get container status \"1a7711d2fd4845229c64cceee2b019f546a95bd205d9e18867f21e0bfd5f0619\": rpc error: code = NotFound desc = could not find container \"1a7711d2fd4845229c64cceee2b019f546a95bd205d9e18867f21e0bfd5f0619\": container with ID starting with 1a7711d2fd4845229c64cceee2b019f546a95bd205d9e18867f21e0bfd5f0619 not found: ID does not exist" Jan 20 17:10:08 crc kubenswrapper[4558]: I0120 17:10:08.781899 4558 scope.go:117] "RemoveContainer" containerID="bf8e58ac5483bbd8b4b20deac013037c3720b4d82d3d752be60bddcf6786ec9c" Jan 20 17:10:08 crc kubenswrapper[4558]: E0120 17:10:08.782114 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bf8e58ac5483bbd8b4b20deac013037c3720b4d82d3d752be60bddcf6786ec9c\": container with ID starting with bf8e58ac5483bbd8b4b20deac013037c3720b4d82d3d752be60bddcf6786ec9c not found: ID does not exist" containerID="bf8e58ac5483bbd8b4b20deac013037c3720b4d82d3d752be60bddcf6786ec9c" Jan 20 17:10:08 crc kubenswrapper[4558]: I0120 17:10:08.782139 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bf8e58ac5483bbd8b4b20deac013037c3720b4d82d3d752be60bddcf6786ec9c"} err="failed to get container status \"bf8e58ac5483bbd8b4b20deac013037c3720b4d82d3d752be60bddcf6786ec9c\": rpc error: code = NotFound desc = could not find container \"bf8e58ac5483bbd8b4b20deac013037c3720b4d82d3d752be60bddcf6786ec9c\": container with ID starting with bf8e58ac5483bbd8b4b20deac013037c3720b4d82d3d752be60bddcf6786ec9c not found: ID does not exist" Jan 20 17:10:08 crc kubenswrapper[4558]: I0120 17:10:08.852415 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/95cf677f-e744-4dad-acfb-507100d2ea14-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"95cf677f-e744-4dad-acfb-507100d2ea14\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:10:08 crc kubenswrapper[4558]: I0120 17:10:08.852487 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/95cf677f-e744-4dad-acfb-507100d2ea14-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"95cf677f-e744-4dad-acfb-507100d2ea14\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:10:08 crc kubenswrapper[4558]: I0120 17:10:08.852515 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/95cf677f-e744-4dad-acfb-507100d2ea14-log-httpd\") pod \"ceilometer-0\" (UID: \"95cf677f-e744-4dad-acfb-507100d2ea14\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:10:08 crc kubenswrapper[4558]: I0120 17:10:08.852545 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/95cf677f-e744-4dad-acfb-507100d2ea14-run-httpd\") pod \"ceilometer-0\" (UID: \"95cf677f-e744-4dad-acfb-507100d2ea14\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:10:08 crc kubenswrapper[4558]: I0120 17:10:08.852615 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4w5vq\" (UniqueName: \"kubernetes.io/projected/95cf677f-e744-4dad-acfb-507100d2ea14-kube-api-access-4w5vq\") pod \"ceilometer-0\" (UID: \"95cf677f-e744-4dad-acfb-507100d2ea14\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:10:08 crc kubenswrapper[4558]: I0120 17:10:08.852728 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/95cf677f-e744-4dad-acfb-507100d2ea14-config-data\") pod \"ceilometer-0\" (UID: \"95cf677f-e744-4dad-acfb-507100d2ea14\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:10:08 crc kubenswrapper[4558]: I0120 17:10:08.852780 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/95cf677f-e744-4dad-acfb-507100d2ea14-scripts\") pod \"ceilometer-0\" (UID: \"95cf677f-e744-4dad-acfb-507100d2ea14\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:10:08 crc kubenswrapper[4558]: I0120 17:10:08.852813 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/95cf677f-e744-4dad-acfb-507100d2ea14-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"95cf677f-e744-4dad-acfb-507100d2ea14\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:10:08 crc kubenswrapper[4558]: I0120 17:10:08.853032 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/95cf677f-e744-4dad-acfb-507100d2ea14-log-httpd\") pod \"ceilometer-0\" (UID: \"95cf677f-e744-4dad-acfb-507100d2ea14\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:10:08 crc kubenswrapper[4558]: I0120 17:10:08.853456 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/95cf677f-e744-4dad-acfb-507100d2ea14-run-httpd\") pod \"ceilometer-0\" (UID: \"95cf677f-e744-4dad-acfb-507100d2ea14\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:10:08 crc kubenswrapper[4558]: I0120 17:10:08.857081 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/95cf677f-e744-4dad-acfb-507100d2ea14-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"95cf677f-e744-4dad-acfb-507100d2ea14\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:10:08 crc kubenswrapper[4558]: I0120 17:10:08.857194 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/95cf677f-e744-4dad-acfb-507100d2ea14-scripts\") pod \"ceilometer-0\" (UID: \"95cf677f-e744-4dad-acfb-507100d2ea14\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:10:08 crc kubenswrapper[4558]: I0120 17:10:08.858438 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/95cf677f-e744-4dad-acfb-507100d2ea14-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"95cf677f-e744-4dad-acfb-507100d2ea14\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:10:08 crc kubenswrapper[4558]: I0120 17:10:08.858788 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/95cf677f-e744-4dad-acfb-507100d2ea14-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"95cf677f-e744-4dad-acfb-507100d2ea14\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:10:08 crc kubenswrapper[4558]: I0120 17:10:08.859882 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/95cf677f-e744-4dad-acfb-507100d2ea14-config-data\") pod \"ceilometer-0\" (UID: \"95cf677f-e744-4dad-acfb-507100d2ea14\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:10:08 crc kubenswrapper[4558]: I0120 17:10:08.866790 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4w5vq\" (UniqueName: \"kubernetes.io/projected/95cf677f-e744-4dad-acfb-507100d2ea14-kube-api-access-4w5vq\") pod \"ceilometer-0\" (UID: \"95cf677f-e744-4dad-acfb-507100d2ea14\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:10:09 crc kubenswrapper[4558]: I0120 17:10:09.054523 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:10:09 crc kubenswrapper[4558]: I0120 17:10:09.468389 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:10:09 crc kubenswrapper[4558]: W0120 17:10:09.469640 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod95cf677f_e744_4dad_acfb_507100d2ea14.slice/crio-78f0cfb41c13b877e4a3bfd328ace655b0b9adbff758b82b1578a7102bbe3717 WatchSource:0}: Error finding container 78f0cfb41c13b877e4a3bfd328ace655b0b9adbff758b82b1578a7102bbe3717: Status 404 returned error can't find the container with id 78f0cfb41c13b877e4a3bfd328ace655b0b9adbff758b82b1578a7102bbe3717 Jan 20 17:10:09 crc kubenswrapper[4558]: I0120 17:10:09.566321 4558 scope.go:117] "RemoveContainer" containerID="a711a286282347590079d2447ef37fa9ed0f11085d6d7a65f85e65c1c2df608f" Jan 20 17:10:09 crc kubenswrapper[4558]: E0120 17:10:09.566632 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 17:10:09 crc kubenswrapper[4558]: I0120 17:10:09.675206 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"95cf677f-e744-4dad-acfb-507100d2ea14","Type":"ContainerStarted","Data":"78f0cfb41c13b877e4a3bfd328ace655b0b9adbff758b82b1578a7102bbe3717"} Jan 20 17:10:10 crc kubenswrapper[4558]: I0120 17:10:10.099445 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:10:10 crc kubenswrapper[4558]: I0120 17:10:10.285277 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0b0b02aa-9ba9-4207-b77c-8157659e4ba6-config-data\") pod \"0b0b02aa-9ba9-4207-b77c-8157659e4ba6\" (UID: \"0b0b02aa-9ba9-4207-b77c-8157659e4ba6\") " Jan 20 17:10:10 crc kubenswrapper[4558]: I0120 17:10:10.285505 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-brfq8\" (UniqueName: \"kubernetes.io/projected/0b0b02aa-9ba9-4207-b77c-8157659e4ba6-kube-api-access-brfq8\") pod \"0b0b02aa-9ba9-4207-b77c-8157659e4ba6\" (UID: \"0b0b02aa-9ba9-4207-b77c-8157659e4ba6\") " Jan 20 17:10:10 crc kubenswrapper[4558]: I0120 17:10:10.285718 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0b0b02aa-9ba9-4207-b77c-8157659e4ba6-combined-ca-bundle\") pod \"0b0b02aa-9ba9-4207-b77c-8157659e4ba6\" (UID: \"0b0b02aa-9ba9-4207-b77c-8157659e4ba6\") " Jan 20 17:10:10 crc kubenswrapper[4558]: I0120 17:10:10.285823 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0b0b02aa-9ba9-4207-b77c-8157659e4ba6-logs\") pod \"0b0b02aa-9ba9-4207-b77c-8157659e4ba6\" (UID: \"0b0b02aa-9ba9-4207-b77c-8157659e4ba6\") " Jan 20 17:10:10 crc kubenswrapper[4558]: I0120 17:10:10.286325 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0b0b02aa-9ba9-4207-b77c-8157659e4ba6-logs" (OuterVolumeSpecName: "logs") pod "0b0b02aa-9ba9-4207-b77c-8157659e4ba6" (UID: "0b0b02aa-9ba9-4207-b77c-8157659e4ba6"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:10:10 crc kubenswrapper[4558]: I0120 17:10:10.287561 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0b0b02aa-9ba9-4207-b77c-8157659e4ba6-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:10 crc kubenswrapper[4558]: I0120 17:10:10.290821 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b0b02aa-9ba9-4207-b77c-8157659e4ba6-kube-api-access-brfq8" (OuterVolumeSpecName: "kube-api-access-brfq8") pod "0b0b02aa-9ba9-4207-b77c-8157659e4ba6" (UID: "0b0b02aa-9ba9-4207-b77c-8157659e4ba6"). InnerVolumeSpecName "kube-api-access-brfq8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:10:10 crc kubenswrapper[4558]: I0120 17:10:10.308235 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b0b02aa-9ba9-4207-b77c-8157659e4ba6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0b0b02aa-9ba9-4207-b77c-8157659e4ba6" (UID: "0b0b02aa-9ba9-4207-b77c-8157659e4ba6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:10:10 crc kubenswrapper[4558]: I0120 17:10:10.310332 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b0b02aa-9ba9-4207-b77c-8157659e4ba6-config-data" (OuterVolumeSpecName: "config-data") pod "0b0b02aa-9ba9-4207-b77c-8157659e4ba6" (UID: "0b0b02aa-9ba9-4207-b77c-8157659e4ba6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:10:10 crc kubenswrapper[4558]: I0120 17:10:10.389759 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0b0b02aa-9ba9-4207-b77c-8157659e4ba6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:10 crc kubenswrapper[4558]: I0120 17:10:10.389790 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0b0b02aa-9ba9-4207-b77c-8157659e4ba6-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:10 crc kubenswrapper[4558]: I0120 17:10:10.389806 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-brfq8\" (UniqueName: \"kubernetes.io/projected/0b0b02aa-9ba9-4207-b77c-8157659e4ba6-kube-api-access-brfq8\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:10 crc kubenswrapper[4558]: I0120 17:10:10.575939 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="89e1323a-3d22-4eb8-9b4d-ec6a18658a07" path="/var/lib/kubelet/pods/89e1323a-3d22-4eb8-9b4d-ec6a18658a07/volumes" Jan 20 17:10:10 crc kubenswrapper[4558]: I0120 17:10:10.685306 4558 generic.go:334] "Generic (PLEG): container finished" podID="0b0b02aa-9ba9-4207-b77c-8157659e4ba6" containerID="0da1ffbd18a6d46e2e13c3f5dc89867501bd1410f00da6ac8d8a3630eba71d59" exitCode=0 Jan 20 17:10:10 crc kubenswrapper[4558]: I0120 17:10:10.685369 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"0b0b02aa-9ba9-4207-b77c-8157659e4ba6","Type":"ContainerDied","Data":"0da1ffbd18a6d46e2e13c3f5dc89867501bd1410f00da6ac8d8a3630eba71d59"} Jan 20 17:10:10 crc kubenswrapper[4558]: I0120 17:10:10.685393 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"0b0b02aa-9ba9-4207-b77c-8157659e4ba6","Type":"ContainerDied","Data":"2c69c43b59b63ce2f85e2277018dc5c5a49ec02929892a7ab944cbbb36ee4c10"} Jan 20 17:10:10 crc kubenswrapper[4558]: I0120 17:10:10.685414 4558 scope.go:117] "RemoveContainer" containerID="0da1ffbd18a6d46e2e13c3f5dc89867501bd1410f00da6ac8d8a3630eba71d59" Jan 20 17:10:10 crc kubenswrapper[4558]: I0120 17:10:10.685517 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:10:10 crc kubenswrapper[4558]: I0120 17:10:10.688205 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"95cf677f-e744-4dad-acfb-507100d2ea14","Type":"ContainerStarted","Data":"6c367435c2502d2379a050e4069609ac5d1de29fbcbd65e5756a526e79921a06"} Jan 20 17:10:10 crc kubenswrapper[4558]: I0120 17:10:10.711190 4558 scope.go:117] "RemoveContainer" containerID="6c2b79ff5237f93288a809fb0566043521294554bd47c3ed1a939b33fa9bee10" Jan 20 17:10:10 crc kubenswrapper[4558]: I0120 17:10:10.714716 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:10:10 crc kubenswrapper[4558]: I0120 17:10:10.722069 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:10:10 crc kubenswrapper[4558]: I0120 17:10:10.737341 4558 scope.go:117] "RemoveContainer" containerID="0da1ffbd18a6d46e2e13c3f5dc89867501bd1410f00da6ac8d8a3630eba71d59" Jan 20 17:10:10 crc kubenswrapper[4558]: E0120 17:10:10.737842 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0da1ffbd18a6d46e2e13c3f5dc89867501bd1410f00da6ac8d8a3630eba71d59\": container with ID starting with 0da1ffbd18a6d46e2e13c3f5dc89867501bd1410f00da6ac8d8a3630eba71d59 not found: ID does not exist" containerID="0da1ffbd18a6d46e2e13c3f5dc89867501bd1410f00da6ac8d8a3630eba71d59" Jan 20 17:10:10 crc kubenswrapper[4558]: I0120 17:10:10.737935 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0da1ffbd18a6d46e2e13c3f5dc89867501bd1410f00da6ac8d8a3630eba71d59"} err="failed to get container status \"0da1ffbd18a6d46e2e13c3f5dc89867501bd1410f00da6ac8d8a3630eba71d59\": rpc error: code = NotFound desc = could not find container \"0da1ffbd18a6d46e2e13c3f5dc89867501bd1410f00da6ac8d8a3630eba71d59\": container with ID starting with 0da1ffbd18a6d46e2e13c3f5dc89867501bd1410f00da6ac8d8a3630eba71d59 not found: ID does not exist" Jan 20 17:10:10 crc kubenswrapper[4558]: I0120 17:10:10.738022 4558 scope.go:117] "RemoveContainer" containerID="6c2b79ff5237f93288a809fb0566043521294554bd47c3ed1a939b33fa9bee10" Jan 20 17:10:10 crc kubenswrapper[4558]: I0120 17:10:10.739118 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:10:10 crc kubenswrapper[4558]: E0120 17:10:10.739553 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0b0b02aa-9ba9-4207-b77c-8157659e4ba6" containerName="nova-api-api" Jan 20 17:10:10 crc kubenswrapper[4558]: I0120 17:10:10.739571 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="0b0b02aa-9ba9-4207-b77c-8157659e4ba6" containerName="nova-api-api" Jan 20 17:10:10 crc kubenswrapper[4558]: E0120 17:10:10.739611 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0b0b02aa-9ba9-4207-b77c-8157659e4ba6" containerName="nova-api-log" Jan 20 17:10:10 crc kubenswrapper[4558]: I0120 17:10:10.739616 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="0b0b02aa-9ba9-4207-b77c-8157659e4ba6" containerName="nova-api-log" Jan 20 17:10:10 crc kubenswrapper[4558]: I0120 17:10:10.739756 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="0b0b02aa-9ba9-4207-b77c-8157659e4ba6" containerName="nova-api-log" Jan 20 17:10:10 crc kubenswrapper[4558]: I0120 17:10:10.739770 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="0b0b02aa-9ba9-4207-b77c-8157659e4ba6" containerName="nova-api-api" Jan 20 17:10:10 crc kubenswrapper[4558]: I0120 17:10:10.740791 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:10:10 crc kubenswrapper[4558]: E0120 17:10:10.740938 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6c2b79ff5237f93288a809fb0566043521294554bd47c3ed1a939b33fa9bee10\": container with ID starting with 6c2b79ff5237f93288a809fb0566043521294554bd47c3ed1a939b33fa9bee10 not found: ID does not exist" containerID="6c2b79ff5237f93288a809fb0566043521294554bd47c3ed1a939b33fa9bee10" Jan 20 17:10:10 crc kubenswrapper[4558]: I0120 17:10:10.741011 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6c2b79ff5237f93288a809fb0566043521294554bd47c3ed1a939b33fa9bee10"} err="failed to get container status \"6c2b79ff5237f93288a809fb0566043521294554bd47c3ed1a939b33fa9bee10\": rpc error: code = NotFound desc = could not find container \"6c2b79ff5237f93288a809fb0566043521294554bd47c3ed1a939b33fa9bee10\": container with ID starting with 6c2b79ff5237f93288a809fb0566043521294554bd47c3ed1a939b33fa9bee10 not found: ID does not exist" Jan 20 17:10:10 crc kubenswrapper[4558]: I0120 17:10:10.745124 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-internal-svc" Jan 20 17:10:10 crc kubenswrapper[4558]: I0120 17:10:10.745328 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-public-svc" Jan 20 17:10:10 crc kubenswrapper[4558]: I0120 17:10:10.745525 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-api-config-data" Jan 20 17:10:10 crc kubenswrapper[4558]: I0120 17:10:10.770294 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:10:10 crc kubenswrapper[4558]: I0120 17:10:10.898297 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8gx99\" (UniqueName: \"kubernetes.io/projected/ecb70be9-510b-4ec7-ae5b-20f2ae8dd6a1-kube-api-access-8gx99\") pod \"nova-api-0\" (UID: \"ecb70be9-510b-4ec7-ae5b-20f2ae8dd6a1\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:10:10 crc kubenswrapper[4558]: I0120 17:10:10.898401 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ecb70be9-510b-4ec7-ae5b-20f2ae8dd6a1-logs\") pod \"nova-api-0\" (UID: \"ecb70be9-510b-4ec7-ae5b-20f2ae8dd6a1\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:10:10 crc kubenswrapper[4558]: I0120 17:10:10.898463 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ecb70be9-510b-4ec7-ae5b-20f2ae8dd6a1-internal-tls-certs\") pod \"nova-api-0\" (UID: \"ecb70be9-510b-4ec7-ae5b-20f2ae8dd6a1\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:10:10 crc kubenswrapper[4558]: I0120 17:10:10.898524 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ecb70be9-510b-4ec7-ae5b-20f2ae8dd6a1-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"ecb70be9-510b-4ec7-ae5b-20f2ae8dd6a1\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:10:10 crc kubenswrapper[4558]: I0120 17:10:10.898594 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ecb70be9-510b-4ec7-ae5b-20f2ae8dd6a1-config-data\") pod \"nova-api-0\" (UID: \"ecb70be9-510b-4ec7-ae5b-20f2ae8dd6a1\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:10:10 crc kubenswrapper[4558]: I0120 17:10:10.898657 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ecb70be9-510b-4ec7-ae5b-20f2ae8dd6a1-public-tls-certs\") pod \"nova-api-0\" (UID: \"ecb70be9-510b-4ec7-ae5b-20f2ae8dd6a1\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:10:10 crc kubenswrapper[4558]: I0120 17:10:10.999808 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ecb70be9-510b-4ec7-ae5b-20f2ae8dd6a1-internal-tls-certs\") pod \"nova-api-0\" (UID: \"ecb70be9-510b-4ec7-ae5b-20f2ae8dd6a1\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:10:10 crc kubenswrapper[4558]: I0120 17:10:10.999878 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ecb70be9-510b-4ec7-ae5b-20f2ae8dd6a1-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"ecb70be9-510b-4ec7-ae5b-20f2ae8dd6a1\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:10:11 crc kubenswrapper[4558]: I0120 17:10:10.999932 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ecb70be9-510b-4ec7-ae5b-20f2ae8dd6a1-config-data\") pod \"nova-api-0\" (UID: \"ecb70be9-510b-4ec7-ae5b-20f2ae8dd6a1\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:10:11 crc kubenswrapper[4558]: I0120 17:10:10.999964 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ecb70be9-510b-4ec7-ae5b-20f2ae8dd6a1-public-tls-certs\") pod \"nova-api-0\" (UID: \"ecb70be9-510b-4ec7-ae5b-20f2ae8dd6a1\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:10:11 crc kubenswrapper[4558]: I0120 17:10:11.000026 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8gx99\" (UniqueName: \"kubernetes.io/projected/ecb70be9-510b-4ec7-ae5b-20f2ae8dd6a1-kube-api-access-8gx99\") pod \"nova-api-0\" (UID: \"ecb70be9-510b-4ec7-ae5b-20f2ae8dd6a1\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:10:11 crc kubenswrapper[4558]: I0120 17:10:11.000066 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ecb70be9-510b-4ec7-ae5b-20f2ae8dd6a1-logs\") pod \"nova-api-0\" (UID: \"ecb70be9-510b-4ec7-ae5b-20f2ae8dd6a1\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:10:11 crc kubenswrapper[4558]: I0120 17:10:11.000843 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ecb70be9-510b-4ec7-ae5b-20f2ae8dd6a1-logs\") pod \"nova-api-0\" (UID: \"ecb70be9-510b-4ec7-ae5b-20f2ae8dd6a1\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:10:11 crc kubenswrapper[4558]: I0120 17:10:11.004368 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ecb70be9-510b-4ec7-ae5b-20f2ae8dd6a1-internal-tls-certs\") pod \"nova-api-0\" (UID: \"ecb70be9-510b-4ec7-ae5b-20f2ae8dd6a1\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:10:11 crc kubenswrapper[4558]: I0120 17:10:11.004502 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ecb70be9-510b-4ec7-ae5b-20f2ae8dd6a1-config-data\") pod \"nova-api-0\" (UID: \"ecb70be9-510b-4ec7-ae5b-20f2ae8dd6a1\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:10:11 crc kubenswrapper[4558]: I0120 17:10:11.005033 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ecb70be9-510b-4ec7-ae5b-20f2ae8dd6a1-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"ecb70be9-510b-4ec7-ae5b-20f2ae8dd6a1\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:10:11 crc kubenswrapper[4558]: I0120 17:10:11.005077 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ecb70be9-510b-4ec7-ae5b-20f2ae8dd6a1-public-tls-certs\") pod \"nova-api-0\" (UID: \"ecb70be9-510b-4ec7-ae5b-20f2ae8dd6a1\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:10:11 crc kubenswrapper[4558]: I0120 17:10:11.018468 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8gx99\" (UniqueName: \"kubernetes.io/projected/ecb70be9-510b-4ec7-ae5b-20f2ae8dd6a1-kube-api-access-8gx99\") pod \"nova-api-0\" (UID: \"ecb70be9-510b-4ec7-ae5b-20f2ae8dd6a1\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:10:11 crc kubenswrapper[4558]: I0120 17:10:11.056496 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:10:11 crc kubenswrapper[4558]: I0120 17:10:11.469852 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:10:11 crc kubenswrapper[4558]: W0120 17:10:11.475805 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podecb70be9_510b_4ec7_ae5b_20f2ae8dd6a1.slice/crio-9f1781e6ce864ca3b756a3b2c6cb6554180b80ae4d98e30a5b24012cde991745 WatchSource:0}: Error finding container 9f1781e6ce864ca3b756a3b2c6cb6554180b80ae4d98e30a5b24012cde991745: Status 404 returned error can't find the container with id 9f1781e6ce864ca3b756a3b2c6cb6554180b80ae4d98e30a5b24012cde991745 Jan 20 17:10:11 crc kubenswrapper[4558]: I0120 17:10:11.715296 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"95cf677f-e744-4dad-acfb-507100d2ea14","Type":"ContainerStarted","Data":"9e42a0ec21415521693070869352b9127abae2be472aef5468b15f95c32a8545"} Jan 20 17:10:11 crc kubenswrapper[4558]: I0120 17:10:11.715343 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"95cf677f-e744-4dad-acfb-507100d2ea14","Type":"ContainerStarted","Data":"b8d7b4e681364253444a1c0248fab2e3349b056b8048a2f9443b427211b7dd9d"} Jan 20 17:10:11 crc kubenswrapper[4558]: I0120 17:10:11.717587 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"ecb70be9-510b-4ec7-ae5b-20f2ae8dd6a1","Type":"ContainerStarted","Data":"3961204b11e978cd9995166f21ade05036318ca560f84a52206089ba8789ed9d"} Jan 20 17:10:11 crc kubenswrapper[4558]: I0120 17:10:11.717647 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"ecb70be9-510b-4ec7-ae5b-20f2ae8dd6a1","Type":"ContainerStarted","Data":"9f1781e6ce864ca3b756a3b2c6cb6554180b80ae4d98e30a5b24012cde991745"} Jan 20 17:10:12 crc kubenswrapper[4558]: I0120 17:10:12.575702 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b0b02aa-9ba9-4207-b77c-8157659e4ba6" path="/var/lib/kubelet/pods/0b0b02aa-9ba9-4207-b77c-8157659e4ba6/volumes" Jan 20 17:10:12 crc kubenswrapper[4558]: I0120 17:10:12.727458 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"ecb70be9-510b-4ec7-ae5b-20f2ae8dd6a1","Type":"ContainerStarted","Data":"013d2b8016fc21c333aa7e40dd675884b8161eccaa5a0b7a7ee016c3b3fef132"} Jan 20 17:10:12 crc kubenswrapper[4558]: I0120 17:10:12.760871 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-api-0" podStartSLOduration=2.760851785 podStartE2EDuration="2.760851785s" podCreationTimestamp="2026-01-20 17:10:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:10:12.753985484 +0000 UTC m=+1706.514323551" watchObservedRunningTime="2026-01-20 17:10:12.760851785 +0000 UTC m=+1706.521189752" Jan 20 17:10:13 crc kubenswrapper[4558]: I0120 17:10:13.740819 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"95cf677f-e744-4dad-acfb-507100d2ea14","Type":"ContainerStarted","Data":"41ac2c9d6515d906d6c94b74bae2f46a5cb6aead6aa685ee10b8acf27caf202f"} Jan 20 17:10:13 crc kubenswrapper[4558]: I0120 17:10:13.773573 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ceilometer-0" podStartSLOduration=2.293790698 podStartE2EDuration="5.773550579s" podCreationTimestamp="2026-01-20 17:10:08 +0000 UTC" firstStartedPulling="2026-01-20 17:10:09.472308158 +0000 UTC m=+1703.232646125" lastFinishedPulling="2026-01-20 17:10:12.952068039 +0000 UTC m=+1706.712406006" observedRunningTime="2026-01-20 17:10:13.77010444 +0000 UTC m=+1707.530442406" watchObservedRunningTime="2026-01-20 17:10:13.773550579 +0000 UTC m=+1707.533888546" Jan 20 17:10:14 crc kubenswrapper[4558]: I0120 17:10:14.755625 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:10:21 crc kubenswrapper[4558]: I0120 17:10:21.057825 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:10:21 crc kubenswrapper[4558]: I0120 17:10:21.058650 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:10:22 crc kubenswrapper[4558]: I0120 17:10:22.073295 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-api-0" podUID="ecb70be9-510b-4ec7-ae5b-20f2ae8dd6a1" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.1.147:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 20 17:10:22 crc kubenswrapper[4558]: I0120 17:10:22.073709 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-api-0" podUID="ecb70be9-510b-4ec7-ae5b-20f2ae8dd6a1" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.1.147:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 20 17:10:22 crc kubenswrapper[4558]: I0120 17:10:22.565817 4558 scope.go:117] "RemoveContainer" containerID="a711a286282347590079d2447ef37fa9ed0f11085d6d7a65f85e65c1c2df608f" Jan 20 17:10:22 crc kubenswrapper[4558]: E0120 17:10:22.566083 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 17:10:31 crc kubenswrapper[4558]: I0120 17:10:31.063661 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:10:31 crc kubenswrapper[4558]: I0120 17:10:31.064389 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:10:31 crc kubenswrapper[4558]: I0120 17:10:31.065285 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:10:31 crc kubenswrapper[4558]: I0120 17:10:31.070641 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:10:31 crc kubenswrapper[4558]: I0120 17:10:31.917880 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:10:31 crc kubenswrapper[4558]: I0120 17:10:31.924477 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:10:35 crc kubenswrapper[4558]: I0120 17:10:35.566888 4558 scope.go:117] "RemoveContainer" containerID="a711a286282347590079d2447ef37fa9ed0f11085d6d7a65f85e65c1c2df608f" Jan 20 17:10:35 crc kubenswrapper[4558]: E0120 17:10:35.567519 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 17:10:39 crc kubenswrapper[4558]: I0120 17:10:39.062745 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:10:43 crc kubenswrapper[4558]: I0120 17:10:43.274326 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ovn-controller-76z65"] Jan 20 17:10:43 crc kubenswrapper[4558]: I0120 17:10:43.284636 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-controller-76z65" Jan 20 17:10:43 crc kubenswrapper[4558]: I0120 17:10:43.297481 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-ovncontroller-ovndbs" Jan 20 17:10:43 crc kubenswrapper[4558]: I0120 17:10:43.298538 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"ovncontroller-scripts" Jan 20 17:10:43 crc kubenswrapper[4558]: I0120 17:10:43.298803 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ovncontroller-ovncontroller-dockercfg-lpc6x" Jan 20 17:10:43 crc kubenswrapper[4558]: I0120 17:10:43.310210 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ovn-controller-ovs-jgq6k"] Jan 20 17:10:43 crc kubenswrapper[4558]: I0120 17:10:43.315801 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-controller-ovs-jgq6k" Jan 20 17:10:43 crc kubenswrapper[4558]: I0120 17:10:43.340482 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovn-controller-76z65"] Jan 20 17:10:43 crc kubenswrapper[4558]: I0120 17:10:43.353988 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovn-controller-ovs-jgq6k"] Jan 20 17:10:43 crc kubenswrapper[4558]: I0120 17:10:43.365524 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ovn-controller-metrics-hq9q2"] Jan 20 17:10:43 crc kubenswrapper[4558]: I0120 17:10:43.367024 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-controller-metrics-hq9q2" Jan 20 17:10:43 crc kubenswrapper[4558]: I0120 17:10:43.369127 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"ovncontroller-metrics-config" Jan 20 17:10:43 crc kubenswrapper[4558]: I0120 17:10:43.391952 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovn-controller-metrics-hq9q2"] Jan 20 17:10:43 crc kubenswrapper[4558]: I0120 17:10:43.408838 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/7e6f6562-3dc9-4cdb-8679-1342459d299f-ovs-rundir\") pod \"ovn-controller-metrics-hq9q2\" (UID: \"7e6f6562-3dc9-4cdb-8679-1342459d299f\") " pod="openstack-kuttl-tests/ovn-controller-metrics-hq9q2" Jan 20 17:10:43 crc kubenswrapper[4558]: I0120 17:10:43.408913 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/4e9f22e4-d3e1-4e67-93c2-4774c61c66bf-var-run\") pod \"ovn-controller-ovs-jgq6k\" (UID: \"4e9f22e4-d3e1-4e67-93c2-4774c61c66bf\") " pod="openstack-kuttl-tests/ovn-controller-ovs-jgq6k" Jan 20 17:10:43 crc kubenswrapper[4558]: I0120 17:10:43.408957 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e6f6562-3dc9-4cdb-8679-1342459d299f-combined-ca-bundle\") pod \"ovn-controller-metrics-hq9q2\" (UID: \"7e6f6562-3dc9-4cdb-8679-1342459d299f\") " pod="openstack-kuttl-tests/ovn-controller-metrics-hq9q2" Jan 20 17:10:43 crc kubenswrapper[4558]: I0120 17:10:43.408980 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vnf2s\" (UniqueName: \"kubernetes.io/projected/7e6f6562-3dc9-4cdb-8679-1342459d299f-kube-api-access-vnf2s\") pod \"ovn-controller-metrics-hq9q2\" (UID: \"7e6f6562-3dc9-4cdb-8679-1342459d299f\") " pod="openstack-kuttl-tests/ovn-controller-metrics-hq9q2" Jan 20 17:10:43 crc kubenswrapper[4558]: I0120 17:10:43.409001 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/4e9f22e4-d3e1-4e67-93c2-4774c61c66bf-var-log\") pod \"ovn-controller-ovs-jgq6k\" (UID: \"4e9f22e4-d3e1-4e67-93c2-4774c61c66bf\") " pod="openstack-kuttl-tests/ovn-controller-ovs-jgq6k" Jan 20 17:10:43 crc kubenswrapper[4558]: I0120 17:10:43.409038 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/cc4ac09f-8f5e-474d-8669-a3a2927a37aa-var-run\") pod \"ovn-controller-76z65\" (UID: \"cc4ac09f-8f5e-474d-8669-a3a2927a37aa\") " pod="openstack-kuttl-tests/ovn-controller-76z65" Jan 20 17:10:43 crc kubenswrapper[4558]: I0120 17:10:43.409070 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gbcfw\" (UniqueName: \"kubernetes.io/projected/4e9f22e4-d3e1-4e67-93c2-4774c61c66bf-kube-api-access-gbcfw\") pod \"ovn-controller-ovs-jgq6k\" (UID: \"4e9f22e4-d3e1-4e67-93c2-4774c61c66bf\") " pod="openstack-kuttl-tests/ovn-controller-ovs-jgq6k" Jan 20 17:10:43 crc kubenswrapper[4558]: I0120 17:10:43.409110 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/cc4ac09f-8f5e-474d-8669-a3a2927a37aa-var-run-ovn\") pod \"ovn-controller-76z65\" (UID: \"cc4ac09f-8f5e-474d-8669-a3a2927a37aa\") " pod="openstack-kuttl-tests/ovn-controller-76z65" Jan 20 17:10:43 crc kubenswrapper[4558]: I0120 17:10:43.409260 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/4e9f22e4-d3e1-4e67-93c2-4774c61c66bf-var-lib\") pod \"ovn-controller-ovs-jgq6k\" (UID: \"4e9f22e4-d3e1-4e67-93c2-4774c61c66bf\") " pod="openstack-kuttl-tests/ovn-controller-ovs-jgq6k" Jan 20 17:10:43 crc kubenswrapper[4558]: I0120 17:10:43.409305 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/7e6f6562-3dc9-4cdb-8679-1342459d299f-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-hq9q2\" (UID: \"7e6f6562-3dc9-4cdb-8679-1342459d299f\") " pod="openstack-kuttl-tests/ovn-controller-metrics-hq9q2" Jan 20 17:10:43 crc kubenswrapper[4558]: I0120 17:10:43.409341 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/cc4ac09f-8f5e-474d-8669-a3a2927a37aa-ovn-controller-tls-certs\") pod \"ovn-controller-76z65\" (UID: \"cc4ac09f-8f5e-474d-8669-a3a2927a37aa\") " pod="openstack-kuttl-tests/ovn-controller-76z65" Jan 20 17:10:43 crc kubenswrapper[4558]: I0120 17:10:43.409435 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7e6f6562-3dc9-4cdb-8679-1342459d299f-config\") pod \"ovn-controller-metrics-hq9q2\" (UID: \"7e6f6562-3dc9-4cdb-8679-1342459d299f\") " pod="openstack-kuttl-tests/ovn-controller-metrics-hq9q2" Jan 20 17:10:43 crc kubenswrapper[4558]: I0120 17:10:43.409472 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/cc4ac09f-8f5e-474d-8669-a3a2927a37aa-scripts\") pod \"ovn-controller-76z65\" (UID: \"cc4ac09f-8f5e-474d-8669-a3a2927a37aa\") " pod="openstack-kuttl-tests/ovn-controller-76z65" Jan 20 17:10:43 crc kubenswrapper[4558]: I0120 17:10:43.409525 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cc4ac09f-8f5e-474d-8669-a3a2927a37aa-combined-ca-bundle\") pod \"ovn-controller-76z65\" (UID: \"cc4ac09f-8f5e-474d-8669-a3a2927a37aa\") " pod="openstack-kuttl-tests/ovn-controller-76z65" Jan 20 17:10:43 crc kubenswrapper[4558]: I0120 17:10:43.409556 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/cc4ac09f-8f5e-474d-8669-a3a2927a37aa-var-log-ovn\") pod \"ovn-controller-76z65\" (UID: \"cc4ac09f-8f5e-474d-8669-a3a2927a37aa\") " pod="openstack-kuttl-tests/ovn-controller-76z65" Jan 20 17:10:43 crc kubenswrapper[4558]: I0120 17:10:43.409590 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4e9f22e4-d3e1-4e67-93c2-4774c61c66bf-scripts\") pod \"ovn-controller-ovs-jgq6k\" (UID: \"4e9f22e4-d3e1-4e67-93c2-4774c61c66bf\") " pod="openstack-kuttl-tests/ovn-controller-ovs-jgq6k" Jan 20 17:10:43 crc kubenswrapper[4558]: I0120 17:10:43.409668 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5xq97\" (UniqueName: \"kubernetes.io/projected/cc4ac09f-8f5e-474d-8669-a3a2927a37aa-kube-api-access-5xq97\") pod \"ovn-controller-76z65\" (UID: \"cc4ac09f-8f5e-474d-8669-a3a2927a37aa\") " pod="openstack-kuttl-tests/ovn-controller-76z65" Jan 20 17:10:43 crc kubenswrapper[4558]: I0120 17:10:43.409694 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/7e6f6562-3dc9-4cdb-8679-1342459d299f-ovn-rundir\") pod \"ovn-controller-metrics-hq9q2\" (UID: \"7e6f6562-3dc9-4cdb-8679-1342459d299f\") " pod="openstack-kuttl-tests/ovn-controller-metrics-hq9q2" Jan 20 17:10:43 crc kubenswrapper[4558]: I0120 17:10:43.409736 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/4e9f22e4-d3e1-4e67-93c2-4774c61c66bf-etc-ovs\") pod \"ovn-controller-ovs-jgq6k\" (UID: \"4e9f22e4-d3e1-4e67-93c2-4774c61c66bf\") " pod="openstack-kuttl-tests/ovn-controller-ovs-jgq6k" Jan 20 17:10:43 crc kubenswrapper[4558]: I0120 17:10:43.412768 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ovn-controller-76z65-config-2ljgt"] Jan 20 17:10:43 crc kubenswrapper[4558]: I0120 17:10:43.413983 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-controller-76z65-config-2ljgt" Jan 20 17:10:43 crc kubenswrapper[4558]: I0120 17:10:43.417479 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"ovncontroller-extra-scripts" Jan 20 17:10:43 crc kubenswrapper[4558]: I0120 17:10:43.417779 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovn-controller-76z65-config-2ljgt"] Jan 20 17:10:43 crc kubenswrapper[4558]: I0120 17:10:43.511676 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/d298dad1-1064-4368-aaf0-0bd0b215ee60-var-run-ovn\") pod \"ovn-controller-76z65-config-2ljgt\" (UID: \"d298dad1-1064-4368-aaf0-0bd0b215ee60\") " pod="openstack-kuttl-tests/ovn-controller-76z65-config-2ljgt" Jan 20 17:10:43 crc kubenswrapper[4558]: I0120 17:10:43.511775 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5xq97\" (UniqueName: \"kubernetes.io/projected/cc4ac09f-8f5e-474d-8669-a3a2927a37aa-kube-api-access-5xq97\") pod \"ovn-controller-76z65\" (UID: \"cc4ac09f-8f5e-474d-8669-a3a2927a37aa\") " pod="openstack-kuttl-tests/ovn-controller-76z65" Jan 20 17:10:43 crc kubenswrapper[4558]: I0120 17:10:43.511813 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/7e6f6562-3dc9-4cdb-8679-1342459d299f-ovn-rundir\") pod \"ovn-controller-metrics-hq9q2\" (UID: \"7e6f6562-3dc9-4cdb-8679-1342459d299f\") " pod="openstack-kuttl-tests/ovn-controller-metrics-hq9q2" Jan 20 17:10:43 crc kubenswrapper[4558]: I0120 17:10:43.511862 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/4e9f22e4-d3e1-4e67-93c2-4774c61c66bf-etc-ovs\") pod \"ovn-controller-ovs-jgq6k\" (UID: \"4e9f22e4-d3e1-4e67-93c2-4774c61c66bf\") " pod="openstack-kuttl-tests/ovn-controller-ovs-jgq6k" Jan 20 17:10:43 crc kubenswrapper[4558]: I0120 17:10:43.511902 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/d298dad1-1064-4368-aaf0-0bd0b215ee60-var-run\") pod \"ovn-controller-76z65-config-2ljgt\" (UID: \"d298dad1-1064-4368-aaf0-0bd0b215ee60\") " pod="openstack-kuttl-tests/ovn-controller-76z65-config-2ljgt" Jan 20 17:10:43 crc kubenswrapper[4558]: I0120 17:10:43.511944 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/7e6f6562-3dc9-4cdb-8679-1342459d299f-ovs-rundir\") pod \"ovn-controller-metrics-hq9q2\" (UID: \"7e6f6562-3dc9-4cdb-8679-1342459d299f\") " pod="openstack-kuttl-tests/ovn-controller-metrics-hq9q2" Jan 20 17:10:43 crc kubenswrapper[4558]: I0120 17:10:43.511980 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hgbqx\" (UniqueName: \"kubernetes.io/projected/d298dad1-1064-4368-aaf0-0bd0b215ee60-kube-api-access-hgbqx\") pod \"ovn-controller-76z65-config-2ljgt\" (UID: \"d298dad1-1064-4368-aaf0-0bd0b215ee60\") " pod="openstack-kuttl-tests/ovn-controller-76z65-config-2ljgt" Jan 20 17:10:43 crc kubenswrapper[4558]: I0120 17:10:43.512001 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/d298dad1-1064-4368-aaf0-0bd0b215ee60-var-log-ovn\") pod \"ovn-controller-76z65-config-2ljgt\" (UID: \"d298dad1-1064-4368-aaf0-0bd0b215ee60\") " pod="openstack-kuttl-tests/ovn-controller-76z65-config-2ljgt" Jan 20 17:10:43 crc kubenswrapper[4558]: I0120 17:10:43.512039 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/4e9f22e4-d3e1-4e67-93c2-4774c61c66bf-var-run\") pod \"ovn-controller-ovs-jgq6k\" (UID: \"4e9f22e4-d3e1-4e67-93c2-4774c61c66bf\") " pod="openstack-kuttl-tests/ovn-controller-ovs-jgq6k" Jan 20 17:10:43 crc kubenswrapper[4558]: I0120 17:10:43.512072 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e6f6562-3dc9-4cdb-8679-1342459d299f-combined-ca-bundle\") pod \"ovn-controller-metrics-hq9q2\" (UID: \"7e6f6562-3dc9-4cdb-8679-1342459d299f\") " pod="openstack-kuttl-tests/ovn-controller-metrics-hq9q2" Jan 20 17:10:43 crc kubenswrapper[4558]: I0120 17:10:43.512091 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vnf2s\" (UniqueName: \"kubernetes.io/projected/7e6f6562-3dc9-4cdb-8679-1342459d299f-kube-api-access-vnf2s\") pod \"ovn-controller-metrics-hq9q2\" (UID: \"7e6f6562-3dc9-4cdb-8679-1342459d299f\") " pod="openstack-kuttl-tests/ovn-controller-metrics-hq9q2" Jan 20 17:10:43 crc kubenswrapper[4558]: I0120 17:10:43.512112 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/4e9f22e4-d3e1-4e67-93c2-4774c61c66bf-var-log\") pod \"ovn-controller-ovs-jgq6k\" (UID: \"4e9f22e4-d3e1-4e67-93c2-4774c61c66bf\") " pod="openstack-kuttl-tests/ovn-controller-ovs-jgq6k" Jan 20 17:10:43 crc kubenswrapper[4558]: I0120 17:10:43.512144 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/cc4ac09f-8f5e-474d-8669-a3a2927a37aa-var-run\") pod \"ovn-controller-76z65\" (UID: \"cc4ac09f-8f5e-474d-8669-a3a2927a37aa\") " pod="openstack-kuttl-tests/ovn-controller-76z65" Jan 20 17:10:43 crc kubenswrapper[4558]: I0120 17:10:43.512208 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gbcfw\" (UniqueName: \"kubernetes.io/projected/4e9f22e4-d3e1-4e67-93c2-4774c61c66bf-kube-api-access-gbcfw\") pod \"ovn-controller-ovs-jgq6k\" (UID: \"4e9f22e4-d3e1-4e67-93c2-4774c61c66bf\") " pod="openstack-kuttl-tests/ovn-controller-ovs-jgq6k" Jan 20 17:10:43 crc kubenswrapper[4558]: I0120 17:10:43.512255 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/cc4ac09f-8f5e-474d-8669-a3a2927a37aa-var-run-ovn\") pod \"ovn-controller-76z65\" (UID: \"cc4ac09f-8f5e-474d-8669-a3a2927a37aa\") " pod="openstack-kuttl-tests/ovn-controller-76z65" Jan 20 17:10:43 crc kubenswrapper[4558]: I0120 17:10:43.512311 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/4e9f22e4-d3e1-4e67-93c2-4774c61c66bf-var-lib\") pod \"ovn-controller-ovs-jgq6k\" (UID: \"4e9f22e4-d3e1-4e67-93c2-4774c61c66bf\") " pod="openstack-kuttl-tests/ovn-controller-ovs-jgq6k" Jan 20 17:10:43 crc kubenswrapper[4558]: I0120 17:10:43.512349 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/7e6f6562-3dc9-4cdb-8679-1342459d299f-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-hq9q2\" (UID: \"7e6f6562-3dc9-4cdb-8679-1342459d299f\") " pod="openstack-kuttl-tests/ovn-controller-metrics-hq9q2" Jan 20 17:10:43 crc kubenswrapper[4558]: I0120 17:10:43.512393 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/cc4ac09f-8f5e-474d-8669-a3a2927a37aa-ovn-controller-tls-certs\") pod \"ovn-controller-76z65\" (UID: \"cc4ac09f-8f5e-474d-8669-a3a2927a37aa\") " pod="openstack-kuttl-tests/ovn-controller-76z65" Jan 20 17:10:43 crc kubenswrapper[4558]: I0120 17:10:43.512432 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d298dad1-1064-4368-aaf0-0bd0b215ee60-scripts\") pod \"ovn-controller-76z65-config-2ljgt\" (UID: \"d298dad1-1064-4368-aaf0-0bd0b215ee60\") " pod="openstack-kuttl-tests/ovn-controller-76z65-config-2ljgt" Jan 20 17:10:43 crc kubenswrapper[4558]: I0120 17:10:43.512496 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7e6f6562-3dc9-4cdb-8679-1342459d299f-config\") pod \"ovn-controller-metrics-hq9q2\" (UID: \"7e6f6562-3dc9-4cdb-8679-1342459d299f\") " pod="openstack-kuttl-tests/ovn-controller-metrics-hq9q2" Jan 20 17:10:43 crc kubenswrapper[4558]: I0120 17:10:43.512523 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/cc4ac09f-8f5e-474d-8669-a3a2927a37aa-scripts\") pod \"ovn-controller-76z65\" (UID: \"cc4ac09f-8f5e-474d-8669-a3a2927a37aa\") " pod="openstack-kuttl-tests/ovn-controller-76z65" Jan 20 17:10:43 crc kubenswrapper[4558]: I0120 17:10:43.512568 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/d298dad1-1064-4368-aaf0-0bd0b215ee60-additional-scripts\") pod \"ovn-controller-76z65-config-2ljgt\" (UID: \"d298dad1-1064-4368-aaf0-0bd0b215ee60\") " pod="openstack-kuttl-tests/ovn-controller-76z65-config-2ljgt" Jan 20 17:10:43 crc kubenswrapper[4558]: I0120 17:10:43.512572 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/7e6f6562-3dc9-4cdb-8679-1342459d299f-ovn-rundir\") pod \"ovn-controller-metrics-hq9q2\" (UID: \"7e6f6562-3dc9-4cdb-8679-1342459d299f\") " pod="openstack-kuttl-tests/ovn-controller-metrics-hq9q2" Jan 20 17:10:43 crc kubenswrapper[4558]: I0120 17:10:43.512595 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cc4ac09f-8f5e-474d-8669-a3a2927a37aa-combined-ca-bundle\") pod \"ovn-controller-76z65\" (UID: \"cc4ac09f-8f5e-474d-8669-a3a2927a37aa\") " pod="openstack-kuttl-tests/ovn-controller-76z65" Jan 20 17:10:43 crc kubenswrapper[4558]: I0120 17:10:43.512693 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/cc4ac09f-8f5e-474d-8669-a3a2927a37aa-var-log-ovn\") pod \"ovn-controller-76z65\" (UID: \"cc4ac09f-8f5e-474d-8669-a3a2927a37aa\") " pod="openstack-kuttl-tests/ovn-controller-76z65" Jan 20 17:10:43 crc kubenswrapper[4558]: I0120 17:10:43.512747 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4e9f22e4-d3e1-4e67-93c2-4774c61c66bf-scripts\") pod \"ovn-controller-ovs-jgq6k\" (UID: \"4e9f22e4-d3e1-4e67-93c2-4774c61c66bf\") " pod="openstack-kuttl-tests/ovn-controller-ovs-jgq6k" Jan 20 17:10:43 crc kubenswrapper[4558]: I0120 17:10:43.513688 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/7e6f6562-3dc9-4cdb-8679-1342459d299f-ovs-rundir\") pod \"ovn-controller-metrics-hq9q2\" (UID: \"7e6f6562-3dc9-4cdb-8679-1342459d299f\") " pod="openstack-kuttl-tests/ovn-controller-metrics-hq9q2" Jan 20 17:10:43 crc kubenswrapper[4558]: I0120 17:10:43.513788 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/4e9f22e4-d3e1-4e67-93c2-4774c61c66bf-etc-ovs\") pod \"ovn-controller-ovs-jgq6k\" (UID: \"4e9f22e4-d3e1-4e67-93c2-4774c61c66bf\") " pod="openstack-kuttl-tests/ovn-controller-ovs-jgq6k" Jan 20 17:10:43 crc kubenswrapper[4558]: I0120 17:10:43.513850 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/cc4ac09f-8f5e-474d-8669-a3a2927a37aa-var-run-ovn\") pod \"ovn-controller-76z65\" (UID: \"cc4ac09f-8f5e-474d-8669-a3a2927a37aa\") " pod="openstack-kuttl-tests/ovn-controller-76z65" Jan 20 17:10:43 crc kubenswrapper[4558]: I0120 17:10:43.513901 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/4e9f22e4-d3e1-4e67-93c2-4774c61c66bf-var-run\") pod \"ovn-controller-ovs-jgq6k\" (UID: \"4e9f22e4-d3e1-4e67-93c2-4774c61c66bf\") " pod="openstack-kuttl-tests/ovn-controller-ovs-jgq6k" Jan 20 17:10:43 crc kubenswrapper[4558]: I0120 17:10:43.514995 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4e9f22e4-d3e1-4e67-93c2-4774c61c66bf-scripts\") pod \"ovn-controller-ovs-jgq6k\" (UID: \"4e9f22e4-d3e1-4e67-93c2-4774c61c66bf\") " pod="openstack-kuttl-tests/ovn-controller-ovs-jgq6k" Jan 20 17:10:43 crc kubenswrapper[4558]: I0120 17:10:43.515084 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/cc4ac09f-8f5e-474d-8669-a3a2927a37aa-var-log-ovn\") pod \"ovn-controller-76z65\" (UID: \"cc4ac09f-8f5e-474d-8669-a3a2927a37aa\") " pod="openstack-kuttl-tests/ovn-controller-76z65" Jan 20 17:10:43 crc kubenswrapper[4558]: I0120 17:10:43.515129 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/4e9f22e4-d3e1-4e67-93c2-4774c61c66bf-var-log\") pod \"ovn-controller-ovs-jgq6k\" (UID: \"4e9f22e4-d3e1-4e67-93c2-4774c61c66bf\") " pod="openstack-kuttl-tests/ovn-controller-ovs-jgq6k" Jan 20 17:10:43 crc kubenswrapper[4558]: I0120 17:10:43.515408 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/cc4ac09f-8f5e-474d-8669-a3a2927a37aa-var-run\") pod \"ovn-controller-76z65\" (UID: \"cc4ac09f-8f5e-474d-8669-a3a2927a37aa\") " pod="openstack-kuttl-tests/ovn-controller-76z65" Jan 20 17:10:43 crc kubenswrapper[4558]: I0120 17:10:43.515592 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/4e9f22e4-d3e1-4e67-93c2-4774c61c66bf-var-lib\") pod \"ovn-controller-ovs-jgq6k\" (UID: \"4e9f22e4-d3e1-4e67-93c2-4774c61c66bf\") " pod="openstack-kuttl-tests/ovn-controller-ovs-jgq6k" Jan 20 17:10:43 crc kubenswrapper[4558]: I0120 17:10:43.516521 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7e6f6562-3dc9-4cdb-8679-1342459d299f-config\") pod \"ovn-controller-metrics-hq9q2\" (UID: \"7e6f6562-3dc9-4cdb-8679-1342459d299f\") " pod="openstack-kuttl-tests/ovn-controller-metrics-hq9q2" Jan 20 17:10:43 crc kubenswrapper[4558]: I0120 17:10:43.520274 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/cc4ac09f-8f5e-474d-8669-a3a2927a37aa-scripts\") pod \"ovn-controller-76z65\" (UID: \"cc4ac09f-8f5e-474d-8669-a3a2927a37aa\") " pod="openstack-kuttl-tests/ovn-controller-76z65" Jan 20 17:10:43 crc kubenswrapper[4558]: I0120 17:10:43.521139 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/7e6f6562-3dc9-4cdb-8679-1342459d299f-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-hq9q2\" (UID: \"7e6f6562-3dc9-4cdb-8679-1342459d299f\") " pod="openstack-kuttl-tests/ovn-controller-metrics-hq9q2" Jan 20 17:10:43 crc kubenswrapper[4558]: I0120 17:10:43.521245 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e6f6562-3dc9-4cdb-8679-1342459d299f-combined-ca-bundle\") pod \"ovn-controller-metrics-hq9q2\" (UID: \"7e6f6562-3dc9-4cdb-8679-1342459d299f\") " pod="openstack-kuttl-tests/ovn-controller-metrics-hq9q2" Jan 20 17:10:43 crc kubenswrapper[4558]: I0120 17:10:43.521781 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cc4ac09f-8f5e-474d-8669-a3a2927a37aa-combined-ca-bundle\") pod \"ovn-controller-76z65\" (UID: \"cc4ac09f-8f5e-474d-8669-a3a2927a37aa\") " pod="openstack-kuttl-tests/ovn-controller-76z65" Jan 20 17:10:43 crc kubenswrapper[4558]: I0120 17:10:43.522443 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/cc4ac09f-8f5e-474d-8669-a3a2927a37aa-ovn-controller-tls-certs\") pod \"ovn-controller-76z65\" (UID: \"cc4ac09f-8f5e-474d-8669-a3a2927a37aa\") " pod="openstack-kuttl-tests/ovn-controller-76z65" Jan 20 17:10:43 crc kubenswrapper[4558]: I0120 17:10:43.528794 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5xq97\" (UniqueName: \"kubernetes.io/projected/cc4ac09f-8f5e-474d-8669-a3a2927a37aa-kube-api-access-5xq97\") pod \"ovn-controller-76z65\" (UID: \"cc4ac09f-8f5e-474d-8669-a3a2927a37aa\") " pod="openstack-kuttl-tests/ovn-controller-76z65" Jan 20 17:10:43 crc kubenswrapper[4558]: I0120 17:10:43.531632 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gbcfw\" (UniqueName: \"kubernetes.io/projected/4e9f22e4-d3e1-4e67-93c2-4774c61c66bf-kube-api-access-gbcfw\") pod \"ovn-controller-ovs-jgq6k\" (UID: \"4e9f22e4-d3e1-4e67-93c2-4774c61c66bf\") " pod="openstack-kuttl-tests/ovn-controller-ovs-jgq6k" Jan 20 17:10:43 crc kubenswrapper[4558]: I0120 17:10:43.540383 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vnf2s\" (UniqueName: \"kubernetes.io/projected/7e6f6562-3dc9-4cdb-8679-1342459d299f-kube-api-access-vnf2s\") pod \"ovn-controller-metrics-hq9q2\" (UID: \"7e6f6562-3dc9-4cdb-8679-1342459d299f\") " pod="openstack-kuttl-tests/ovn-controller-metrics-hq9q2" Jan 20 17:10:43 crc kubenswrapper[4558]: I0120 17:10:43.615098 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d298dad1-1064-4368-aaf0-0bd0b215ee60-scripts\") pod \"ovn-controller-76z65-config-2ljgt\" (UID: \"d298dad1-1064-4368-aaf0-0bd0b215ee60\") " pod="openstack-kuttl-tests/ovn-controller-76z65-config-2ljgt" Jan 20 17:10:43 crc kubenswrapper[4558]: I0120 17:10:43.615217 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/d298dad1-1064-4368-aaf0-0bd0b215ee60-additional-scripts\") pod \"ovn-controller-76z65-config-2ljgt\" (UID: \"d298dad1-1064-4368-aaf0-0bd0b215ee60\") " pod="openstack-kuttl-tests/ovn-controller-76z65-config-2ljgt" Jan 20 17:10:43 crc kubenswrapper[4558]: I0120 17:10:43.615281 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/d298dad1-1064-4368-aaf0-0bd0b215ee60-var-run-ovn\") pod \"ovn-controller-76z65-config-2ljgt\" (UID: \"d298dad1-1064-4368-aaf0-0bd0b215ee60\") " pod="openstack-kuttl-tests/ovn-controller-76z65-config-2ljgt" Jan 20 17:10:43 crc kubenswrapper[4558]: I0120 17:10:43.615378 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/d298dad1-1064-4368-aaf0-0bd0b215ee60-var-run\") pod \"ovn-controller-76z65-config-2ljgt\" (UID: \"d298dad1-1064-4368-aaf0-0bd0b215ee60\") " pod="openstack-kuttl-tests/ovn-controller-76z65-config-2ljgt" Jan 20 17:10:43 crc kubenswrapper[4558]: I0120 17:10:43.615432 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hgbqx\" (UniqueName: \"kubernetes.io/projected/d298dad1-1064-4368-aaf0-0bd0b215ee60-kube-api-access-hgbqx\") pod \"ovn-controller-76z65-config-2ljgt\" (UID: \"d298dad1-1064-4368-aaf0-0bd0b215ee60\") " pod="openstack-kuttl-tests/ovn-controller-76z65-config-2ljgt" Jan 20 17:10:43 crc kubenswrapper[4558]: I0120 17:10:43.615463 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/d298dad1-1064-4368-aaf0-0bd0b215ee60-var-log-ovn\") pod \"ovn-controller-76z65-config-2ljgt\" (UID: \"d298dad1-1064-4368-aaf0-0bd0b215ee60\") " pod="openstack-kuttl-tests/ovn-controller-76z65-config-2ljgt" Jan 20 17:10:43 crc kubenswrapper[4558]: I0120 17:10:43.615584 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/d298dad1-1064-4368-aaf0-0bd0b215ee60-var-run\") pod \"ovn-controller-76z65-config-2ljgt\" (UID: \"d298dad1-1064-4368-aaf0-0bd0b215ee60\") " pod="openstack-kuttl-tests/ovn-controller-76z65-config-2ljgt" Jan 20 17:10:43 crc kubenswrapper[4558]: I0120 17:10:43.615607 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/d298dad1-1064-4368-aaf0-0bd0b215ee60-var-log-ovn\") pod \"ovn-controller-76z65-config-2ljgt\" (UID: \"d298dad1-1064-4368-aaf0-0bd0b215ee60\") " pod="openstack-kuttl-tests/ovn-controller-76z65-config-2ljgt" Jan 20 17:10:43 crc kubenswrapper[4558]: I0120 17:10:43.615682 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/d298dad1-1064-4368-aaf0-0bd0b215ee60-var-run-ovn\") pod \"ovn-controller-76z65-config-2ljgt\" (UID: \"d298dad1-1064-4368-aaf0-0bd0b215ee60\") " pod="openstack-kuttl-tests/ovn-controller-76z65-config-2ljgt" Jan 20 17:10:43 crc kubenswrapper[4558]: I0120 17:10:43.616508 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/d298dad1-1064-4368-aaf0-0bd0b215ee60-additional-scripts\") pod \"ovn-controller-76z65-config-2ljgt\" (UID: \"d298dad1-1064-4368-aaf0-0bd0b215ee60\") " pod="openstack-kuttl-tests/ovn-controller-76z65-config-2ljgt" Jan 20 17:10:43 crc kubenswrapper[4558]: I0120 17:10:43.617360 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d298dad1-1064-4368-aaf0-0bd0b215ee60-scripts\") pod \"ovn-controller-76z65-config-2ljgt\" (UID: \"d298dad1-1064-4368-aaf0-0bd0b215ee60\") " pod="openstack-kuttl-tests/ovn-controller-76z65-config-2ljgt" Jan 20 17:10:43 crc kubenswrapper[4558]: I0120 17:10:43.617707 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-controller-76z65" Jan 20 17:10:43 crc kubenswrapper[4558]: I0120 17:10:43.633335 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hgbqx\" (UniqueName: \"kubernetes.io/projected/d298dad1-1064-4368-aaf0-0bd0b215ee60-kube-api-access-hgbqx\") pod \"ovn-controller-76z65-config-2ljgt\" (UID: \"d298dad1-1064-4368-aaf0-0bd0b215ee60\") " pod="openstack-kuttl-tests/ovn-controller-76z65-config-2ljgt" Jan 20 17:10:43 crc kubenswrapper[4558]: I0120 17:10:43.645692 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-controller-ovs-jgq6k" Jan 20 17:10:43 crc kubenswrapper[4558]: I0120 17:10:43.696478 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-controller-metrics-hq9q2" Jan 20 17:10:43 crc kubenswrapper[4558]: I0120 17:10:43.732081 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-controller-76z65-config-2ljgt" Jan 20 17:10:44 crc kubenswrapper[4558]: I0120 17:10:44.054469 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovn-controller-76z65"] Jan 20 17:10:44 crc kubenswrapper[4558]: I0120 17:10:44.143478 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovn-controller-ovs-jgq6k"] Jan 20 17:10:44 crc kubenswrapper[4558]: W0120 17:10:44.143876 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4e9f22e4_d3e1_4e67_93c2_4774c61c66bf.slice/crio-01ad088341bcbd8a3065cb7befa9425bef5648a324a58cec68bd0cac01d484a7 WatchSource:0}: Error finding container 01ad088341bcbd8a3065cb7befa9425bef5648a324a58cec68bd0cac01d484a7: Status 404 returned error can't find the container with id 01ad088341bcbd8a3065cb7befa9425bef5648a324a58cec68bd0cac01d484a7 Jan 20 17:10:44 crc kubenswrapper[4558]: W0120 17:10:44.201567 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7e6f6562_3dc9_4cdb_8679_1342459d299f.slice/crio-2558107edbc436ff0cc59de113b963be0e443f1be192a0cd7d793a0368fc9903 WatchSource:0}: Error finding container 2558107edbc436ff0cc59de113b963be0e443f1be192a0cd7d793a0368fc9903: Status 404 returned error can't find the container with id 2558107edbc436ff0cc59de113b963be0e443f1be192a0cd7d793a0368fc9903 Jan 20 17:10:44 crc kubenswrapper[4558]: I0120 17:10:44.202424 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovn-controller-metrics-hq9q2"] Jan 20 17:10:44 crc kubenswrapper[4558]: I0120 17:10:44.246827 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovn-controller-76z65-config-2ljgt"] Jan 20 17:10:45 crc kubenswrapper[4558]: I0120 17:10:45.040093 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-controller-76z65" event={"ID":"cc4ac09f-8f5e-474d-8669-a3a2927a37aa","Type":"ContainerStarted","Data":"901cf54c320d55e1c1655fb84307ad5367d8cb5e216fc551badcbd60f487bbcf"} Jan 20 17:10:45 crc kubenswrapper[4558]: I0120 17:10:45.040651 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ovn-controller-76z65" Jan 20 17:10:45 crc kubenswrapper[4558]: I0120 17:10:45.040674 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-controller-76z65" event={"ID":"cc4ac09f-8f5e-474d-8669-a3a2927a37aa","Type":"ContainerStarted","Data":"ff22ab3ae0a3eb2241357f8e13dec89999702b34410e4f2783597f229953b310"} Jan 20 17:10:45 crc kubenswrapper[4558]: I0120 17:10:45.041894 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-controller-76z65-config-2ljgt" event={"ID":"d298dad1-1064-4368-aaf0-0bd0b215ee60","Type":"ContainerStarted","Data":"8e3efb638bbc1064f16cefab4a99ea7a1094f7ef51dacc45ead8e5b6008f26f4"} Jan 20 17:10:45 crc kubenswrapper[4558]: I0120 17:10:45.041922 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-controller-76z65-config-2ljgt" event={"ID":"d298dad1-1064-4368-aaf0-0bd0b215ee60","Type":"ContainerStarted","Data":"cc46c41e4fe6d4d79db46df29d2cad7939fe18b6ffd72dc63b9f79519906671e"} Jan 20 17:10:45 crc kubenswrapper[4558]: I0120 17:10:45.043671 4558 generic.go:334] "Generic (PLEG): container finished" podID="4e9f22e4-d3e1-4e67-93c2-4774c61c66bf" containerID="34c19f16fa96fda159f5a09a39d736bc3656c84c962957aba7eacf2ef3a38ee0" exitCode=0 Jan 20 17:10:45 crc kubenswrapper[4558]: I0120 17:10:45.043749 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-controller-ovs-jgq6k" event={"ID":"4e9f22e4-d3e1-4e67-93c2-4774c61c66bf","Type":"ContainerDied","Data":"34c19f16fa96fda159f5a09a39d736bc3656c84c962957aba7eacf2ef3a38ee0"} Jan 20 17:10:45 crc kubenswrapper[4558]: I0120 17:10:45.043782 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-controller-ovs-jgq6k" event={"ID":"4e9f22e4-d3e1-4e67-93c2-4774c61c66bf","Type":"ContainerStarted","Data":"01ad088341bcbd8a3065cb7befa9425bef5648a324a58cec68bd0cac01d484a7"} Jan 20 17:10:45 crc kubenswrapper[4558]: I0120 17:10:45.046210 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-controller-metrics-hq9q2" event={"ID":"7e6f6562-3dc9-4cdb-8679-1342459d299f","Type":"ContainerStarted","Data":"12971f559b3b533b9c1d7ba73504b1b6cf3d6f393ac7e397073d25b2d8e8847c"} Jan 20 17:10:45 crc kubenswrapper[4558]: I0120 17:10:45.046235 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-controller-metrics-hq9q2" event={"ID":"7e6f6562-3dc9-4cdb-8679-1342459d299f","Type":"ContainerStarted","Data":"2558107edbc436ff0cc59de113b963be0e443f1be192a0cd7d793a0368fc9903"} Jan 20 17:10:45 crc kubenswrapper[4558]: I0120 17:10:45.062301 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ovn-controller-76z65" podStartSLOduration=2.062282722 podStartE2EDuration="2.062282722s" podCreationTimestamp="2026-01-20 17:10:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:10:45.056210764 +0000 UTC m=+1738.816548731" watchObservedRunningTime="2026-01-20 17:10:45.062282722 +0000 UTC m=+1738.822620688" Jan 20 17:10:45 crc kubenswrapper[4558]: I0120 17:10:45.086610 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ovn-controller-76z65-config-2ljgt" podStartSLOduration=2.086586965 podStartE2EDuration="2.086586965s" podCreationTimestamp="2026-01-20 17:10:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:10:45.070953708 +0000 UTC m=+1738.831291674" watchObservedRunningTime="2026-01-20 17:10:45.086586965 +0000 UTC m=+1738.846924932" Jan 20 17:10:45 crc kubenswrapper[4558]: I0120 17:10:45.087730 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ovn-controller-metrics-hq9q2" podStartSLOduration=2.08772259 podStartE2EDuration="2.08772259s" podCreationTimestamp="2026-01-20 17:10:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:10:45.084340981 +0000 UTC m=+1738.844678949" watchObservedRunningTime="2026-01-20 17:10:45.08772259 +0000 UTC m=+1738.848060557" Jan 20 17:10:46 crc kubenswrapper[4558]: I0120 17:10:46.056965 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-controller-ovs-jgq6k" event={"ID":"4e9f22e4-d3e1-4e67-93c2-4774c61c66bf","Type":"ContainerStarted","Data":"9c27c6b27ef7131cb9e16bde6b7dd1e00481e16fcc5615909f60bfad298cfc8a"} Jan 20 17:10:46 crc kubenswrapper[4558]: I0120 17:10:46.058192 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-controller-ovs-jgq6k" event={"ID":"4e9f22e4-d3e1-4e67-93c2-4774c61c66bf","Type":"ContainerStarted","Data":"f93f94d4c8b0fcf0d9961eab49e9fd5086f604818a4a1ad930503c4ada356fe6"} Jan 20 17:10:46 crc kubenswrapper[4558]: I0120 17:10:46.078563 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ovn-controller-ovs-jgq6k" podStartSLOduration=3.078545226 podStartE2EDuration="3.078545226s" podCreationTimestamp="2026-01-20 17:10:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:10:46.071351911 +0000 UTC m=+1739.831689877" watchObservedRunningTime="2026-01-20 17:10:46.078545226 +0000 UTC m=+1739.838883194" Jan 20 17:10:46 crc kubenswrapper[4558]: I0120 17:10:46.572468 4558 scope.go:117] "RemoveContainer" containerID="a711a286282347590079d2447ef37fa9ed0f11085d6d7a65f85e65c1c2df608f" Jan 20 17:10:46 crc kubenswrapper[4558]: E0120 17:10:46.573206 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 17:10:47 crc kubenswrapper[4558]: I0120 17:10:47.072471 4558 generic.go:334] "Generic (PLEG): container finished" podID="d298dad1-1064-4368-aaf0-0bd0b215ee60" containerID="8e3efb638bbc1064f16cefab4a99ea7a1094f7ef51dacc45ead8e5b6008f26f4" exitCode=0 Jan 20 17:10:47 crc kubenswrapper[4558]: I0120 17:10:47.072574 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-controller-76z65-config-2ljgt" event={"ID":"d298dad1-1064-4368-aaf0-0bd0b215ee60","Type":"ContainerDied","Data":"8e3efb638bbc1064f16cefab4a99ea7a1094f7ef51dacc45ead8e5b6008f26f4"} Jan 20 17:10:47 crc kubenswrapper[4558]: I0120 17:10:47.073771 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ovn-controller-ovs-jgq6k" Jan 20 17:10:47 crc kubenswrapper[4558]: I0120 17:10:47.073824 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ovn-controller-ovs-jgq6k" Jan 20 17:10:47 crc kubenswrapper[4558]: I0120 17:10:47.494912 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovn-controller-76z65-config-2ljgt"] Jan 20 17:10:47 crc kubenswrapper[4558]: I0120 17:10:47.502794 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovn-controller-76z65"] Jan 20 17:10:47 crc kubenswrapper[4558]: I0120 17:10:47.517868 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovn-controller-metrics-hq9q2"] Jan 20 17:10:47 crc kubenswrapper[4558]: I0120 17:10:47.518131 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ovn-controller-metrics-hq9q2" podUID="7e6f6562-3dc9-4cdb-8679-1342459d299f" containerName="openstack-network-exporter" containerID="cri-o://12971f559b3b533b9c1d7ba73504b1b6cf3d6f393ac7e397073d25b2d8e8847c" gracePeriod=30 Jan 20 17:10:47 crc kubenswrapper[4558]: I0120 17:10:47.542291 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovn-controller-ovs-jgq6k"] Jan 20 17:10:47 crc kubenswrapper[4558]: I0120 17:10:47.936751 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovn-controller-metrics-hq9q2_7e6f6562-3dc9-4cdb-8679-1342459d299f/openstack-network-exporter/0.log" Jan 20 17:10:47 crc kubenswrapper[4558]: I0120 17:10:47.936865 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-controller-metrics-hq9q2" Jan 20 17:10:48 crc kubenswrapper[4558]: I0120 17:10:48.034459 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e6f6562-3dc9-4cdb-8679-1342459d299f-combined-ca-bundle\") pod \"7e6f6562-3dc9-4cdb-8679-1342459d299f\" (UID: \"7e6f6562-3dc9-4cdb-8679-1342459d299f\") " Jan 20 17:10:48 crc kubenswrapper[4558]: I0120 17:10:48.034509 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vnf2s\" (UniqueName: \"kubernetes.io/projected/7e6f6562-3dc9-4cdb-8679-1342459d299f-kube-api-access-vnf2s\") pod \"7e6f6562-3dc9-4cdb-8679-1342459d299f\" (UID: \"7e6f6562-3dc9-4cdb-8679-1342459d299f\") " Jan 20 17:10:48 crc kubenswrapper[4558]: I0120 17:10:48.034549 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7e6f6562-3dc9-4cdb-8679-1342459d299f-config\") pod \"7e6f6562-3dc9-4cdb-8679-1342459d299f\" (UID: \"7e6f6562-3dc9-4cdb-8679-1342459d299f\") " Jan 20 17:10:48 crc kubenswrapper[4558]: I0120 17:10:48.034591 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/7e6f6562-3dc9-4cdb-8679-1342459d299f-ovs-rundir\") pod \"7e6f6562-3dc9-4cdb-8679-1342459d299f\" (UID: \"7e6f6562-3dc9-4cdb-8679-1342459d299f\") " Jan 20 17:10:48 crc kubenswrapper[4558]: I0120 17:10:48.034616 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/7e6f6562-3dc9-4cdb-8679-1342459d299f-ovn-rundir\") pod \"7e6f6562-3dc9-4cdb-8679-1342459d299f\" (UID: \"7e6f6562-3dc9-4cdb-8679-1342459d299f\") " Jan 20 17:10:48 crc kubenswrapper[4558]: I0120 17:10:48.034668 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/7e6f6562-3dc9-4cdb-8679-1342459d299f-metrics-certs-tls-certs\") pod \"7e6f6562-3dc9-4cdb-8679-1342459d299f\" (UID: \"7e6f6562-3dc9-4cdb-8679-1342459d299f\") " Jan 20 17:10:48 crc kubenswrapper[4558]: I0120 17:10:48.034709 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7e6f6562-3dc9-4cdb-8679-1342459d299f-ovs-rundir" (OuterVolumeSpecName: "ovs-rundir") pod "7e6f6562-3dc9-4cdb-8679-1342459d299f" (UID: "7e6f6562-3dc9-4cdb-8679-1342459d299f"). InnerVolumeSpecName "ovs-rundir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 17:10:48 crc kubenswrapper[4558]: I0120 17:10:48.034788 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7e6f6562-3dc9-4cdb-8679-1342459d299f-ovn-rundir" (OuterVolumeSpecName: "ovn-rundir") pod "7e6f6562-3dc9-4cdb-8679-1342459d299f" (UID: "7e6f6562-3dc9-4cdb-8679-1342459d299f"). InnerVolumeSpecName "ovn-rundir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 17:10:48 crc kubenswrapper[4558]: I0120 17:10:48.035153 4558 reconciler_common.go:293] "Volume detached for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/7e6f6562-3dc9-4cdb-8679-1342459d299f-ovs-rundir\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:48 crc kubenswrapper[4558]: I0120 17:10:48.035189 4558 reconciler_common.go:293] "Volume detached for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/7e6f6562-3dc9-4cdb-8679-1342459d299f-ovn-rundir\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:48 crc kubenswrapper[4558]: I0120 17:10:48.035198 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7e6f6562-3dc9-4cdb-8679-1342459d299f-config" (OuterVolumeSpecName: "config") pod "7e6f6562-3dc9-4cdb-8679-1342459d299f" (UID: "7e6f6562-3dc9-4cdb-8679-1342459d299f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:10:48 crc kubenswrapper[4558]: I0120 17:10:48.040272 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7e6f6562-3dc9-4cdb-8679-1342459d299f-kube-api-access-vnf2s" (OuterVolumeSpecName: "kube-api-access-vnf2s") pod "7e6f6562-3dc9-4cdb-8679-1342459d299f" (UID: "7e6f6562-3dc9-4cdb-8679-1342459d299f"). InnerVolumeSpecName "kube-api-access-vnf2s". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:10:48 crc kubenswrapper[4558]: I0120 17:10:48.058958 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7e6f6562-3dc9-4cdb-8679-1342459d299f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7e6f6562-3dc9-4cdb-8679-1342459d299f" (UID: "7e6f6562-3dc9-4cdb-8679-1342459d299f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:10:48 crc kubenswrapper[4558]: I0120 17:10:48.108690 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovn-controller-metrics-hq9q2_7e6f6562-3dc9-4cdb-8679-1342459d299f/openstack-network-exporter/0.log" Jan 20 17:10:48 crc kubenswrapper[4558]: I0120 17:10:48.108737 4558 generic.go:334] "Generic (PLEG): container finished" podID="7e6f6562-3dc9-4cdb-8679-1342459d299f" containerID="12971f559b3b533b9c1d7ba73504b1b6cf3d6f393ac7e397073d25b2d8e8847c" exitCode=2 Jan 20 17:10:48 crc kubenswrapper[4558]: I0120 17:10:48.108830 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-controller-metrics-hq9q2" event={"ID":"7e6f6562-3dc9-4cdb-8679-1342459d299f","Type":"ContainerDied","Data":"12971f559b3b533b9c1d7ba73504b1b6cf3d6f393ac7e397073d25b2d8e8847c"} Jan 20 17:10:48 crc kubenswrapper[4558]: I0120 17:10:48.108915 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-controller-metrics-hq9q2" event={"ID":"7e6f6562-3dc9-4cdb-8679-1342459d299f","Type":"ContainerDied","Data":"2558107edbc436ff0cc59de113b963be0e443f1be192a0cd7d793a0368fc9903"} Jan 20 17:10:48 crc kubenswrapper[4558]: I0120 17:10:48.108942 4558 scope.go:117] "RemoveContainer" containerID="12971f559b3b533b9c1d7ba73504b1b6cf3d6f393ac7e397073d25b2d8e8847c" Jan 20 17:10:48 crc kubenswrapper[4558]: I0120 17:10:48.109770 4558 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="openstack-kuttl-tests/ovn-controller-ovs-jgq6k" secret="" err="secret \"ovncontroller-ovncontroller-dockercfg-lpc6x\" not found" Jan 20 17:10:48 crc kubenswrapper[4558]: I0120 17:10:48.110254 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-controller-metrics-hq9q2" Jan 20 17:10:48 crc kubenswrapper[4558]: I0120 17:10:48.125232 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7e6f6562-3dc9-4cdb-8679-1342459d299f-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "7e6f6562-3dc9-4cdb-8679-1342459d299f" (UID: "7e6f6562-3dc9-4cdb-8679-1342459d299f"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:10:48 crc kubenswrapper[4558]: I0120 17:10:48.136791 4558 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/7e6f6562-3dc9-4cdb-8679-1342459d299f-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:48 crc kubenswrapper[4558]: I0120 17:10:48.136918 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e6f6562-3dc9-4cdb-8679-1342459d299f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:48 crc kubenswrapper[4558]: I0120 17:10:48.136979 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vnf2s\" (UniqueName: \"kubernetes.io/projected/7e6f6562-3dc9-4cdb-8679-1342459d299f-kube-api-access-vnf2s\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:48 crc kubenswrapper[4558]: I0120 17:10:48.137033 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7e6f6562-3dc9-4cdb-8679-1342459d299f-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:48 crc kubenswrapper[4558]: E0120 17:10:48.136856 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/ovncontroller-scripts: configmap "ovncontroller-scripts" not found Jan 20 17:10:48 crc kubenswrapper[4558]: E0120 17:10:48.137291 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/4e9f22e4-d3e1-4e67-93c2-4774c61c66bf-scripts podName:4e9f22e4-d3e1-4e67-93c2-4774c61c66bf nodeName:}" failed. No retries permitted until 2026-01-20 17:10:48.637269444 +0000 UTC m=+1742.397607412 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "scripts" (UniqueName: "kubernetes.io/configmap/4e9f22e4-d3e1-4e67-93c2-4774c61c66bf-scripts") pod "ovn-controller-ovs-jgq6k" (UID: "4e9f22e4-d3e1-4e67-93c2-4774c61c66bf") : configmap "ovncontroller-scripts" not found Jan 20 17:10:48 crc kubenswrapper[4558]: I0120 17:10:48.138781 4558 scope.go:117] "RemoveContainer" containerID="12971f559b3b533b9c1d7ba73504b1b6cf3d6f393ac7e397073d25b2d8e8847c" Jan 20 17:10:48 crc kubenswrapper[4558]: E0120 17:10:48.139156 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"12971f559b3b533b9c1d7ba73504b1b6cf3d6f393ac7e397073d25b2d8e8847c\": container with ID starting with 12971f559b3b533b9c1d7ba73504b1b6cf3d6f393ac7e397073d25b2d8e8847c not found: ID does not exist" containerID="12971f559b3b533b9c1d7ba73504b1b6cf3d6f393ac7e397073d25b2d8e8847c" Jan 20 17:10:48 crc kubenswrapper[4558]: I0120 17:10:48.139204 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"12971f559b3b533b9c1d7ba73504b1b6cf3d6f393ac7e397073d25b2d8e8847c"} err="failed to get container status \"12971f559b3b533b9c1d7ba73504b1b6cf3d6f393ac7e397073d25b2d8e8847c\": rpc error: code = NotFound desc = could not find container \"12971f559b3b533b9c1d7ba73504b1b6cf3d6f393ac7e397073d25b2d8e8847c\": container with ID starting with 12971f559b3b533b9c1d7ba73504b1b6cf3d6f393ac7e397073d25b2d8e8847c not found: ID does not exist" Jan 20 17:10:48 crc kubenswrapper[4558]: E0120 17:10:48.196505 4558 handlers.go:78] "Exec lifecycle hook for Container in Pod failed" err="command '/usr/share/ovn/scripts/ovn-ctl stop_controller' exited with 137: " execCommand=["/usr/share/ovn/scripts/ovn-ctl","stop_controller"] containerName="ovn-controller" pod="openstack-kuttl-tests/ovn-controller-76z65" message=< Jan 20 17:10:48 crc kubenswrapper[4558]: Exiting ovn-controller (1) [ OK ] Jan 20 17:10:48 crc kubenswrapper[4558]: > Jan 20 17:10:48 crc kubenswrapper[4558]: E0120 17:10:48.196544 4558 kuberuntime_container.go:691] "PreStop hook failed" err="command '/usr/share/ovn/scripts/ovn-ctl stop_controller' exited with 137: " pod="openstack-kuttl-tests/ovn-controller-76z65" podUID="cc4ac09f-8f5e-474d-8669-a3a2927a37aa" containerName="ovn-controller" containerID="cri-o://901cf54c320d55e1c1655fb84307ad5367d8cb5e216fc551badcbd60f487bbcf" Jan 20 17:10:48 crc kubenswrapper[4558]: I0120 17:10:48.196582 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ovn-controller-76z65" podUID="cc4ac09f-8f5e-474d-8669-a3a2927a37aa" containerName="ovn-controller" containerID="cri-o://901cf54c320d55e1c1655fb84307ad5367d8cb5e216fc551badcbd60f487bbcf" gracePeriod=30 Jan 20 17:10:48 crc kubenswrapper[4558]: I0120 17:10:48.376821 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-controller-76z65-config-2ljgt" Jan 20 17:10:48 crc kubenswrapper[4558]: I0120 17:10:48.443831 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hgbqx\" (UniqueName: \"kubernetes.io/projected/d298dad1-1064-4368-aaf0-0bd0b215ee60-kube-api-access-hgbqx\") pod \"d298dad1-1064-4368-aaf0-0bd0b215ee60\" (UID: \"d298dad1-1064-4368-aaf0-0bd0b215ee60\") " Jan 20 17:10:48 crc kubenswrapper[4558]: I0120 17:10:48.444237 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d298dad1-1064-4368-aaf0-0bd0b215ee60-scripts\") pod \"d298dad1-1064-4368-aaf0-0bd0b215ee60\" (UID: \"d298dad1-1064-4368-aaf0-0bd0b215ee60\") " Jan 20 17:10:48 crc kubenswrapper[4558]: I0120 17:10:48.444331 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/d298dad1-1064-4368-aaf0-0bd0b215ee60-var-run-ovn\") pod \"d298dad1-1064-4368-aaf0-0bd0b215ee60\" (UID: \"d298dad1-1064-4368-aaf0-0bd0b215ee60\") " Jan 20 17:10:48 crc kubenswrapper[4558]: I0120 17:10:48.444429 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/d298dad1-1064-4368-aaf0-0bd0b215ee60-additional-scripts\") pod \"d298dad1-1064-4368-aaf0-0bd0b215ee60\" (UID: \"d298dad1-1064-4368-aaf0-0bd0b215ee60\") " Jan 20 17:10:48 crc kubenswrapper[4558]: I0120 17:10:48.444447 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d298dad1-1064-4368-aaf0-0bd0b215ee60-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "d298dad1-1064-4368-aaf0-0bd0b215ee60" (UID: "d298dad1-1064-4368-aaf0-0bd0b215ee60"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 17:10:48 crc kubenswrapper[4558]: I0120 17:10:48.444482 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/d298dad1-1064-4368-aaf0-0bd0b215ee60-var-run\") pod \"d298dad1-1064-4368-aaf0-0bd0b215ee60\" (UID: \"d298dad1-1064-4368-aaf0-0bd0b215ee60\") " Jan 20 17:10:48 crc kubenswrapper[4558]: I0120 17:10:48.444550 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/d298dad1-1064-4368-aaf0-0bd0b215ee60-var-log-ovn\") pod \"d298dad1-1064-4368-aaf0-0bd0b215ee60\" (UID: \"d298dad1-1064-4368-aaf0-0bd0b215ee60\") " Jan 20 17:10:48 crc kubenswrapper[4558]: I0120 17:10:48.444768 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d298dad1-1064-4368-aaf0-0bd0b215ee60-var-run" (OuterVolumeSpecName: "var-run") pod "d298dad1-1064-4368-aaf0-0bd0b215ee60" (UID: "d298dad1-1064-4368-aaf0-0bd0b215ee60"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 17:10:48 crc kubenswrapper[4558]: I0120 17:10:48.444902 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d298dad1-1064-4368-aaf0-0bd0b215ee60-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "d298dad1-1064-4368-aaf0-0bd0b215ee60" (UID: "d298dad1-1064-4368-aaf0-0bd0b215ee60"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 17:10:48 crc kubenswrapper[4558]: I0120 17:10:48.444956 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d298dad1-1064-4368-aaf0-0bd0b215ee60-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "d298dad1-1064-4368-aaf0-0bd0b215ee60" (UID: "d298dad1-1064-4368-aaf0-0bd0b215ee60"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:10:48 crc kubenswrapper[4558]: I0120 17:10:48.445210 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d298dad1-1064-4368-aaf0-0bd0b215ee60-scripts" (OuterVolumeSpecName: "scripts") pod "d298dad1-1064-4368-aaf0-0bd0b215ee60" (UID: "d298dad1-1064-4368-aaf0-0bd0b215ee60"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:10:48 crc kubenswrapper[4558]: I0120 17:10:48.445472 4558 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/d298dad1-1064-4368-aaf0-0bd0b215ee60-var-run-ovn\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:48 crc kubenswrapper[4558]: I0120 17:10:48.445489 4558 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/d298dad1-1064-4368-aaf0-0bd0b215ee60-additional-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:48 crc kubenswrapper[4558]: I0120 17:10:48.445501 4558 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/d298dad1-1064-4368-aaf0-0bd0b215ee60-var-run\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:48 crc kubenswrapper[4558]: I0120 17:10:48.445511 4558 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/d298dad1-1064-4368-aaf0-0bd0b215ee60-var-log-ovn\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:48 crc kubenswrapper[4558]: I0120 17:10:48.445519 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d298dad1-1064-4368-aaf0-0bd0b215ee60-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:48 crc kubenswrapper[4558]: I0120 17:10:48.446954 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovn-controller-metrics-hq9q2"] Jan 20 17:10:48 crc kubenswrapper[4558]: I0120 17:10:48.448636 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d298dad1-1064-4368-aaf0-0bd0b215ee60-kube-api-access-hgbqx" (OuterVolumeSpecName: "kube-api-access-hgbqx") pod "d298dad1-1064-4368-aaf0-0bd0b215ee60" (UID: "d298dad1-1064-4368-aaf0-0bd0b215ee60"). InnerVolumeSpecName "kube-api-access-hgbqx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:10:48 crc kubenswrapper[4558]: I0120 17:10:48.453986 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ovn-controller-metrics-hq9q2"] Jan 20 17:10:48 crc kubenswrapper[4558]: I0120 17:10:48.491235 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-controller-76z65" Jan 20 17:10:48 crc kubenswrapper[4558]: I0120 17:10:48.546883 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5xq97\" (UniqueName: \"kubernetes.io/projected/cc4ac09f-8f5e-474d-8669-a3a2927a37aa-kube-api-access-5xq97\") pod \"cc4ac09f-8f5e-474d-8669-a3a2927a37aa\" (UID: \"cc4ac09f-8f5e-474d-8669-a3a2927a37aa\") " Jan 20 17:10:48 crc kubenswrapper[4558]: I0120 17:10:48.547042 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/cc4ac09f-8f5e-474d-8669-a3a2927a37aa-ovn-controller-tls-certs\") pod \"cc4ac09f-8f5e-474d-8669-a3a2927a37aa\" (UID: \"cc4ac09f-8f5e-474d-8669-a3a2927a37aa\") " Jan 20 17:10:48 crc kubenswrapper[4558]: I0120 17:10:48.547125 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/cc4ac09f-8f5e-474d-8669-a3a2927a37aa-var-run\") pod \"cc4ac09f-8f5e-474d-8669-a3a2927a37aa\" (UID: \"cc4ac09f-8f5e-474d-8669-a3a2927a37aa\") " Jan 20 17:10:48 crc kubenswrapper[4558]: I0120 17:10:48.547264 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/cc4ac09f-8f5e-474d-8669-a3a2927a37aa-scripts\") pod \"cc4ac09f-8f5e-474d-8669-a3a2927a37aa\" (UID: \"cc4ac09f-8f5e-474d-8669-a3a2927a37aa\") " Jan 20 17:10:48 crc kubenswrapper[4558]: I0120 17:10:48.547337 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/cc4ac09f-8f5e-474d-8669-a3a2927a37aa-var-run" (OuterVolumeSpecName: "var-run") pod "cc4ac09f-8f5e-474d-8669-a3a2927a37aa" (UID: "cc4ac09f-8f5e-474d-8669-a3a2927a37aa"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 17:10:48 crc kubenswrapper[4558]: I0120 17:10:48.547422 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/cc4ac09f-8f5e-474d-8669-a3a2927a37aa-var-run-ovn\") pod \"cc4ac09f-8f5e-474d-8669-a3a2927a37aa\" (UID: \"cc4ac09f-8f5e-474d-8669-a3a2927a37aa\") " Jan 20 17:10:48 crc kubenswrapper[4558]: I0120 17:10:48.547467 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cc4ac09f-8f5e-474d-8669-a3a2927a37aa-combined-ca-bundle\") pod \"cc4ac09f-8f5e-474d-8669-a3a2927a37aa\" (UID: \"cc4ac09f-8f5e-474d-8669-a3a2927a37aa\") " Jan 20 17:10:48 crc kubenswrapper[4558]: I0120 17:10:48.547484 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/cc4ac09f-8f5e-474d-8669-a3a2927a37aa-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "cc4ac09f-8f5e-474d-8669-a3a2927a37aa" (UID: "cc4ac09f-8f5e-474d-8669-a3a2927a37aa"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 17:10:48 crc kubenswrapper[4558]: I0120 17:10:48.547677 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/cc4ac09f-8f5e-474d-8669-a3a2927a37aa-var-log-ovn\") pod \"cc4ac09f-8f5e-474d-8669-a3a2927a37aa\" (UID: \"cc4ac09f-8f5e-474d-8669-a3a2927a37aa\") " Jan 20 17:10:48 crc kubenswrapper[4558]: I0120 17:10:48.547803 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/cc4ac09f-8f5e-474d-8669-a3a2927a37aa-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "cc4ac09f-8f5e-474d-8669-a3a2927a37aa" (UID: "cc4ac09f-8f5e-474d-8669-a3a2927a37aa"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 17:10:48 crc kubenswrapper[4558]: I0120 17:10:48.548539 4558 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/cc4ac09f-8f5e-474d-8669-a3a2927a37aa-var-run\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:48 crc kubenswrapper[4558]: I0120 17:10:48.548566 4558 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/cc4ac09f-8f5e-474d-8669-a3a2927a37aa-var-run-ovn\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:48 crc kubenswrapper[4558]: I0120 17:10:48.548583 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hgbqx\" (UniqueName: \"kubernetes.io/projected/d298dad1-1064-4368-aaf0-0bd0b215ee60-kube-api-access-hgbqx\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:48 crc kubenswrapper[4558]: I0120 17:10:48.548594 4558 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/cc4ac09f-8f5e-474d-8669-a3a2927a37aa-var-log-ovn\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:48 crc kubenswrapper[4558]: I0120 17:10:48.549762 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cc4ac09f-8f5e-474d-8669-a3a2927a37aa-scripts" (OuterVolumeSpecName: "scripts") pod "cc4ac09f-8f5e-474d-8669-a3a2927a37aa" (UID: "cc4ac09f-8f5e-474d-8669-a3a2927a37aa"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:10:48 crc kubenswrapper[4558]: I0120 17:10:48.551114 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cc4ac09f-8f5e-474d-8669-a3a2927a37aa-kube-api-access-5xq97" (OuterVolumeSpecName: "kube-api-access-5xq97") pod "cc4ac09f-8f5e-474d-8669-a3a2927a37aa" (UID: "cc4ac09f-8f5e-474d-8669-a3a2927a37aa"). InnerVolumeSpecName "kube-api-access-5xq97". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:10:48 crc kubenswrapper[4558]: I0120 17:10:48.571744 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cc4ac09f-8f5e-474d-8669-a3a2927a37aa-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "cc4ac09f-8f5e-474d-8669-a3a2927a37aa" (UID: "cc4ac09f-8f5e-474d-8669-a3a2927a37aa"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:10:48 crc kubenswrapper[4558]: I0120 17:10:48.581132 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7e6f6562-3dc9-4cdb-8679-1342459d299f" path="/var/lib/kubelet/pods/7e6f6562-3dc9-4cdb-8679-1342459d299f/volumes" Jan 20 17:10:48 crc kubenswrapper[4558]: I0120 17:10:48.611845 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cc4ac09f-8f5e-474d-8669-a3a2927a37aa-ovn-controller-tls-certs" (OuterVolumeSpecName: "ovn-controller-tls-certs") pod "cc4ac09f-8f5e-474d-8669-a3a2927a37aa" (UID: "cc4ac09f-8f5e-474d-8669-a3a2927a37aa"). InnerVolumeSpecName "ovn-controller-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:10:48 crc kubenswrapper[4558]: I0120 17:10:48.649797 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/cc4ac09f-8f5e-474d-8669-a3a2927a37aa-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:48 crc kubenswrapper[4558]: I0120 17:10:48.649826 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cc4ac09f-8f5e-474d-8669-a3a2927a37aa-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:48 crc kubenswrapper[4558]: I0120 17:10:48.649839 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5xq97\" (UniqueName: \"kubernetes.io/projected/cc4ac09f-8f5e-474d-8669-a3a2927a37aa-kube-api-access-5xq97\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:48 crc kubenswrapper[4558]: I0120 17:10:48.649849 4558 reconciler_common.go:293] "Volume detached for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/cc4ac09f-8f5e-474d-8669-a3a2927a37aa-ovn-controller-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:48 crc kubenswrapper[4558]: E0120 17:10:48.650246 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/ovncontroller-scripts: configmap "ovncontroller-scripts" not found Jan 20 17:10:48 crc kubenswrapper[4558]: E0120 17:10:48.650396 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/4e9f22e4-d3e1-4e67-93c2-4774c61c66bf-scripts podName:4e9f22e4-d3e1-4e67-93c2-4774c61c66bf nodeName:}" failed. No retries permitted until 2026-01-20 17:10:49.650377532 +0000 UTC m=+1743.410715498 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "scripts" (UniqueName: "kubernetes.io/configmap/4e9f22e4-d3e1-4e67-93c2-4774c61c66bf-scripts") pod "ovn-controller-ovs-jgq6k" (UID: "4e9f22e4-d3e1-4e67-93c2-4774c61c66bf") : configmap "ovncontroller-scripts" not found Jan 20 17:10:49 crc kubenswrapper[4558]: I0120 17:10:49.120925 4558 generic.go:334] "Generic (PLEG): container finished" podID="cc4ac09f-8f5e-474d-8669-a3a2927a37aa" containerID="901cf54c320d55e1c1655fb84307ad5367d8cb5e216fc551badcbd60f487bbcf" exitCode=0 Jan 20 17:10:49 crc kubenswrapper[4558]: I0120 17:10:49.121002 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-controller-76z65" Jan 20 17:10:49 crc kubenswrapper[4558]: I0120 17:10:49.121040 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-controller-76z65" event={"ID":"cc4ac09f-8f5e-474d-8669-a3a2927a37aa","Type":"ContainerDied","Data":"901cf54c320d55e1c1655fb84307ad5367d8cb5e216fc551badcbd60f487bbcf"} Jan 20 17:10:49 crc kubenswrapper[4558]: I0120 17:10:49.121113 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-controller-76z65" event={"ID":"cc4ac09f-8f5e-474d-8669-a3a2927a37aa","Type":"ContainerDied","Data":"ff22ab3ae0a3eb2241357f8e13dec89999702b34410e4f2783597f229953b310"} Jan 20 17:10:49 crc kubenswrapper[4558]: I0120 17:10:49.121139 4558 scope.go:117] "RemoveContainer" containerID="901cf54c320d55e1c1655fb84307ad5367d8cb5e216fc551badcbd60f487bbcf" Jan 20 17:10:49 crc kubenswrapper[4558]: I0120 17:10:49.124232 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-controller-76z65-config-2ljgt" event={"ID":"d298dad1-1064-4368-aaf0-0bd0b215ee60","Type":"ContainerDied","Data":"cc46c41e4fe6d4d79db46df29d2cad7939fe18b6ffd72dc63b9f79519906671e"} Jan 20 17:10:49 crc kubenswrapper[4558]: I0120 17:10:49.124269 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cc46c41e4fe6d4d79db46df29d2cad7939fe18b6ffd72dc63b9f79519906671e" Jan 20 17:10:49 crc kubenswrapper[4558]: I0120 17:10:49.124282 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-controller-76z65-config-2ljgt" Jan 20 17:10:49 crc kubenswrapper[4558]: I0120 17:10:49.158304 4558 scope.go:117] "RemoveContainer" containerID="901cf54c320d55e1c1655fb84307ad5367d8cb5e216fc551badcbd60f487bbcf" Jan 20 17:10:49 crc kubenswrapper[4558]: E0120 17:10:49.158879 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"901cf54c320d55e1c1655fb84307ad5367d8cb5e216fc551badcbd60f487bbcf\": container with ID starting with 901cf54c320d55e1c1655fb84307ad5367d8cb5e216fc551badcbd60f487bbcf not found: ID does not exist" containerID="901cf54c320d55e1c1655fb84307ad5367d8cb5e216fc551badcbd60f487bbcf" Jan 20 17:10:49 crc kubenswrapper[4558]: I0120 17:10:49.158916 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"901cf54c320d55e1c1655fb84307ad5367d8cb5e216fc551badcbd60f487bbcf"} err="failed to get container status \"901cf54c320d55e1c1655fb84307ad5367d8cb5e216fc551badcbd60f487bbcf\": rpc error: code = NotFound desc = could not find container \"901cf54c320d55e1c1655fb84307ad5367d8cb5e216fc551badcbd60f487bbcf\": container with ID starting with 901cf54c320d55e1c1655fb84307ad5367d8cb5e216fc551badcbd60f487bbcf not found: ID does not exist" Jan 20 17:10:49 crc kubenswrapper[4558]: I0120 17:10:49.170541 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovn-controller-76z65-config-2ljgt"] Jan 20 17:10:49 crc kubenswrapper[4558]: I0120 17:10:49.178511 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ovn-controller-76z65-config-2ljgt"] Jan 20 17:10:49 crc kubenswrapper[4558]: I0120 17:10:49.184695 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovn-controller-76z65"] Jan 20 17:10:49 crc kubenswrapper[4558]: I0120 17:10:49.191357 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ovn-controller-76z65"] Jan 20 17:10:49 crc kubenswrapper[4558]: I0120 17:10:49.654869 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ovn-controller-ovs-jgq6k" podUID="4e9f22e4-d3e1-4e67-93c2-4774c61c66bf" containerName="ovs-vswitchd" containerID="cri-o://9c27c6b27ef7131cb9e16bde6b7dd1e00481e16fcc5615909f60bfad298cfc8a" gracePeriod=30 Jan 20 17:10:49 crc kubenswrapper[4558]: E0120 17:10:49.667448 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/ovncontroller-scripts: configmap "ovncontroller-scripts" not found Jan 20 17:10:49 crc kubenswrapper[4558]: E0120 17:10:49.667983 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/4e9f22e4-d3e1-4e67-93c2-4774c61c66bf-scripts podName:4e9f22e4-d3e1-4e67-93c2-4774c61c66bf nodeName:}" failed. No retries permitted until 2026-01-20 17:10:51.667930213 +0000 UTC m=+1745.428268180 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "scripts" (UniqueName: "kubernetes.io/configmap/4e9f22e4-d3e1-4e67-93c2-4774c61c66bf-scripts") pod "ovn-controller-ovs-jgq6k" (UID: "4e9f22e4-d3e1-4e67-93c2-4774c61c66bf") : configmap "ovncontroller-scripts" not found Jan 20 17:10:49 crc kubenswrapper[4558]: E0120 17:10:49.741019 4558 handlers.go:78] "Exec lifecycle hook for Container in Pod failed" err=< Jan 20 17:10:49 crc kubenswrapper[4558]: command '/usr/local/bin/container-scripts/stop-ovsdb-server.sh' exited with 137: ++ dirname /usr/local/bin/container-scripts/stop-ovsdb-server.sh Jan 20 17:10:49 crc kubenswrapper[4558]: + source /usr/local/bin/container-scripts/functions Jan 20 17:10:49 crc kubenswrapper[4558]: ++ OVNBridge=br-int Jan 20 17:10:49 crc kubenswrapper[4558]: ++ OVNRemote=tcp:localhost:6642 Jan 20 17:10:49 crc kubenswrapper[4558]: ++ OVNEncapType=geneve Jan 20 17:10:49 crc kubenswrapper[4558]: ++ OVNAvailabilityZones= Jan 20 17:10:49 crc kubenswrapper[4558]: ++ EnableChassisAsGateway=true Jan 20 17:10:49 crc kubenswrapper[4558]: ++ PhysicalNetworks= Jan 20 17:10:49 crc kubenswrapper[4558]: ++ OVNHostName= Jan 20 17:10:49 crc kubenswrapper[4558]: ++ DB_FILE=/etc/openvswitch/conf.db Jan 20 17:10:49 crc kubenswrapper[4558]: ++ ovs_dir=/var/lib/openvswitch Jan 20 17:10:49 crc kubenswrapper[4558]: ++ FLOWS_RESTORE_SCRIPT=/var/lib/openvswitch/flows-script Jan 20 17:10:49 crc kubenswrapper[4558]: ++ FLOWS_RESTORE_DIR=/var/lib/openvswitch/saved-flows Jan 20 17:10:49 crc kubenswrapper[4558]: ++ SAFE_TO_STOP_OVSDB_SERVER_SEMAPHORE=/var/lib/openvswitch/is_safe_to_stop_ovsdb_server Jan 20 17:10:49 crc kubenswrapper[4558]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Jan 20 17:10:49 crc kubenswrapper[4558]: + sleep 0.5 Jan 20 17:10:49 crc kubenswrapper[4558]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Jan 20 17:10:49 crc kubenswrapper[4558]: + cleanup_ovsdb_server_semaphore Jan 20 17:10:49 crc kubenswrapper[4558]: + rm -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server Jan 20 17:10:49 crc kubenswrapper[4558]: + /usr/share/openvswitch/scripts/ovs-ctl stop --no-ovs-vswitchd Jan 20 17:10:49 crc kubenswrapper[4558]: > execCommand=["/usr/local/bin/container-scripts/stop-ovsdb-server.sh"] containerName="ovsdb-server" pod="openstack-kuttl-tests/ovn-controller-ovs-jgq6k" message=< Jan 20 17:10:49 crc kubenswrapper[4558]: Exiting ovsdb-server (5) [ OK ] Jan 20 17:10:49 crc kubenswrapper[4558]: ++ dirname /usr/local/bin/container-scripts/stop-ovsdb-server.sh Jan 20 17:10:49 crc kubenswrapper[4558]: + source /usr/local/bin/container-scripts/functions Jan 20 17:10:49 crc kubenswrapper[4558]: ++ OVNBridge=br-int Jan 20 17:10:49 crc kubenswrapper[4558]: ++ OVNRemote=tcp:localhost:6642 Jan 20 17:10:49 crc kubenswrapper[4558]: ++ OVNEncapType=geneve Jan 20 17:10:49 crc kubenswrapper[4558]: ++ OVNAvailabilityZones= Jan 20 17:10:49 crc kubenswrapper[4558]: ++ EnableChassisAsGateway=true Jan 20 17:10:49 crc kubenswrapper[4558]: ++ PhysicalNetworks= Jan 20 17:10:49 crc kubenswrapper[4558]: ++ OVNHostName= Jan 20 17:10:49 crc kubenswrapper[4558]: ++ DB_FILE=/etc/openvswitch/conf.db Jan 20 17:10:49 crc kubenswrapper[4558]: ++ ovs_dir=/var/lib/openvswitch Jan 20 17:10:49 crc kubenswrapper[4558]: ++ FLOWS_RESTORE_SCRIPT=/var/lib/openvswitch/flows-script Jan 20 17:10:49 crc kubenswrapper[4558]: ++ FLOWS_RESTORE_DIR=/var/lib/openvswitch/saved-flows Jan 20 17:10:49 crc kubenswrapper[4558]: ++ SAFE_TO_STOP_OVSDB_SERVER_SEMAPHORE=/var/lib/openvswitch/is_safe_to_stop_ovsdb_server Jan 20 17:10:49 crc kubenswrapper[4558]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Jan 20 17:10:49 crc kubenswrapper[4558]: + sleep 0.5 Jan 20 17:10:49 crc kubenswrapper[4558]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Jan 20 17:10:49 crc kubenswrapper[4558]: + cleanup_ovsdb_server_semaphore Jan 20 17:10:49 crc kubenswrapper[4558]: + rm -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server Jan 20 17:10:49 crc kubenswrapper[4558]: + /usr/share/openvswitch/scripts/ovs-ctl stop --no-ovs-vswitchd Jan 20 17:10:49 crc kubenswrapper[4558]: > Jan 20 17:10:49 crc kubenswrapper[4558]: E0120 17:10:49.741063 4558 kuberuntime_container.go:691] "PreStop hook failed" err=< Jan 20 17:10:49 crc kubenswrapper[4558]: command '/usr/local/bin/container-scripts/stop-ovsdb-server.sh' exited with 137: ++ dirname /usr/local/bin/container-scripts/stop-ovsdb-server.sh Jan 20 17:10:49 crc kubenswrapper[4558]: + source /usr/local/bin/container-scripts/functions Jan 20 17:10:49 crc kubenswrapper[4558]: ++ OVNBridge=br-int Jan 20 17:10:49 crc kubenswrapper[4558]: ++ OVNRemote=tcp:localhost:6642 Jan 20 17:10:49 crc kubenswrapper[4558]: ++ OVNEncapType=geneve Jan 20 17:10:49 crc kubenswrapper[4558]: ++ OVNAvailabilityZones= Jan 20 17:10:49 crc kubenswrapper[4558]: ++ EnableChassisAsGateway=true Jan 20 17:10:49 crc kubenswrapper[4558]: ++ PhysicalNetworks= Jan 20 17:10:49 crc kubenswrapper[4558]: ++ OVNHostName= Jan 20 17:10:49 crc kubenswrapper[4558]: ++ DB_FILE=/etc/openvswitch/conf.db Jan 20 17:10:49 crc kubenswrapper[4558]: ++ ovs_dir=/var/lib/openvswitch Jan 20 17:10:49 crc kubenswrapper[4558]: ++ FLOWS_RESTORE_SCRIPT=/var/lib/openvswitch/flows-script Jan 20 17:10:49 crc kubenswrapper[4558]: ++ FLOWS_RESTORE_DIR=/var/lib/openvswitch/saved-flows Jan 20 17:10:49 crc kubenswrapper[4558]: ++ SAFE_TO_STOP_OVSDB_SERVER_SEMAPHORE=/var/lib/openvswitch/is_safe_to_stop_ovsdb_server Jan 20 17:10:49 crc kubenswrapper[4558]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Jan 20 17:10:49 crc kubenswrapper[4558]: + sleep 0.5 Jan 20 17:10:49 crc kubenswrapper[4558]: + '[' '!' -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server ']' Jan 20 17:10:49 crc kubenswrapper[4558]: + cleanup_ovsdb_server_semaphore Jan 20 17:10:49 crc kubenswrapper[4558]: + rm -f /var/lib/openvswitch/is_safe_to_stop_ovsdb_server Jan 20 17:10:49 crc kubenswrapper[4558]: + /usr/share/openvswitch/scripts/ovs-ctl stop --no-ovs-vswitchd Jan 20 17:10:49 crc kubenswrapper[4558]: > pod="openstack-kuttl-tests/ovn-controller-ovs-jgq6k" podUID="4e9f22e4-d3e1-4e67-93c2-4774c61c66bf" containerName="ovsdb-server" containerID="cri-o://f93f94d4c8b0fcf0d9961eab49e9fd5086f604818a4a1ad930503c4ada356fe6" Jan 20 17:10:49 crc kubenswrapper[4558]: I0120 17:10:49.741119 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ovn-controller-ovs-jgq6k" podUID="4e9f22e4-d3e1-4e67-93c2-4774c61c66bf" containerName="ovsdb-server" containerID="cri-o://f93f94d4c8b0fcf0d9961eab49e9fd5086f604818a4a1ad930503c4ada356fe6" gracePeriod=30 Jan 20 17:10:50 crc kubenswrapper[4558]: I0120 17:10:50.105347 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovn-controller-ovs-jgq6k_4e9f22e4-d3e1-4e67-93c2-4774c61c66bf/ovs-vswitchd/0.log" Jan 20 17:10:50 crc kubenswrapper[4558]: I0120 17:10:50.106141 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-controller-ovs-jgq6k" Jan 20 17:10:50 crc kubenswrapper[4558]: I0120 17:10:50.135569 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovn-controller-ovs-jgq6k_4e9f22e4-d3e1-4e67-93c2-4774c61c66bf/ovs-vswitchd/0.log" Jan 20 17:10:50 crc kubenswrapper[4558]: I0120 17:10:50.136525 4558 generic.go:334] "Generic (PLEG): container finished" podID="4e9f22e4-d3e1-4e67-93c2-4774c61c66bf" containerID="9c27c6b27ef7131cb9e16bde6b7dd1e00481e16fcc5615909f60bfad298cfc8a" exitCode=143 Jan 20 17:10:50 crc kubenswrapper[4558]: I0120 17:10:50.136569 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-controller-ovs-jgq6k" Jan 20 17:10:50 crc kubenswrapper[4558]: I0120 17:10:50.136587 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-controller-ovs-jgq6k" event={"ID":"4e9f22e4-d3e1-4e67-93c2-4774c61c66bf","Type":"ContainerDied","Data":"9c27c6b27ef7131cb9e16bde6b7dd1e00481e16fcc5615909f60bfad298cfc8a"} Jan 20 17:10:50 crc kubenswrapper[4558]: I0120 17:10:50.136647 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-controller-ovs-jgq6k" event={"ID":"4e9f22e4-d3e1-4e67-93c2-4774c61c66bf","Type":"ContainerDied","Data":"f93f94d4c8b0fcf0d9961eab49e9fd5086f604818a4a1ad930503c4ada356fe6"} Jan 20 17:10:50 crc kubenswrapper[4558]: I0120 17:10:50.136569 4558 generic.go:334] "Generic (PLEG): container finished" podID="4e9f22e4-d3e1-4e67-93c2-4774c61c66bf" containerID="f93f94d4c8b0fcf0d9961eab49e9fd5086f604818a4a1ad930503c4ada356fe6" exitCode=0 Jan 20 17:10:50 crc kubenswrapper[4558]: I0120 17:10:50.136699 4558 scope.go:117] "RemoveContainer" containerID="9c27c6b27ef7131cb9e16bde6b7dd1e00481e16fcc5615909f60bfad298cfc8a" Jan 20 17:10:50 crc kubenswrapper[4558]: I0120 17:10:50.136766 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-controller-ovs-jgq6k" event={"ID":"4e9f22e4-d3e1-4e67-93c2-4774c61c66bf","Type":"ContainerDied","Data":"01ad088341bcbd8a3065cb7befa9425bef5648a324a58cec68bd0cac01d484a7"} Jan 20 17:10:50 crc kubenswrapper[4558]: I0120 17:10:50.156296 4558 scope.go:117] "RemoveContainer" containerID="f93f94d4c8b0fcf0d9961eab49e9fd5086f604818a4a1ad930503c4ada356fe6" Jan 20 17:10:50 crc kubenswrapper[4558]: I0120 17:10:50.172555 4558 scope.go:117] "RemoveContainer" containerID="34c19f16fa96fda159f5a09a39d736bc3656c84c962957aba7eacf2ef3a38ee0" Jan 20 17:10:50 crc kubenswrapper[4558]: I0120 17:10:50.176771 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/4e9f22e4-d3e1-4e67-93c2-4774c61c66bf-etc-ovs\") pod \"4e9f22e4-d3e1-4e67-93c2-4774c61c66bf\" (UID: \"4e9f22e4-d3e1-4e67-93c2-4774c61c66bf\") " Jan 20 17:10:50 crc kubenswrapper[4558]: I0120 17:10:50.176831 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4e9f22e4-d3e1-4e67-93c2-4774c61c66bf-scripts\") pod \"4e9f22e4-d3e1-4e67-93c2-4774c61c66bf\" (UID: \"4e9f22e4-d3e1-4e67-93c2-4774c61c66bf\") " Jan 20 17:10:50 crc kubenswrapper[4558]: I0120 17:10:50.176877 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4e9f22e4-d3e1-4e67-93c2-4774c61c66bf-etc-ovs" (OuterVolumeSpecName: "etc-ovs") pod "4e9f22e4-d3e1-4e67-93c2-4774c61c66bf" (UID: "4e9f22e4-d3e1-4e67-93c2-4774c61c66bf"). InnerVolumeSpecName "etc-ovs". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 17:10:50 crc kubenswrapper[4558]: I0120 17:10:50.176859 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/4e9f22e4-d3e1-4e67-93c2-4774c61c66bf-var-lib\") pod \"4e9f22e4-d3e1-4e67-93c2-4774c61c66bf\" (UID: \"4e9f22e4-d3e1-4e67-93c2-4774c61c66bf\") " Jan 20 17:10:50 crc kubenswrapper[4558]: I0120 17:10:50.177046 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4e9f22e4-d3e1-4e67-93c2-4774c61c66bf-var-lib" (OuterVolumeSpecName: "var-lib") pod "4e9f22e4-d3e1-4e67-93c2-4774c61c66bf" (UID: "4e9f22e4-d3e1-4e67-93c2-4774c61c66bf"). InnerVolumeSpecName "var-lib". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 17:10:50 crc kubenswrapper[4558]: I0120 17:10:50.177071 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/4e9f22e4-d3e1-4e67-93c2-4774c61c66bf-var-log\") pod \"4e9f22e4-d3e1-4e67-93c2-4774c61c66bf\" (UID: \"4e9f22e4-d3e1-4e67-93c2-4774c61c66bf\") " Jan 20 17:10:50 crc kubenswrapper[4558]: I0120 17:10:50.177161 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4e9f22e4-d3e1-4e67-93c2-4774c61c66bf-var-log" (OuterVolumeSpecName: "var-log") pod "4e9f22e4-d3e1-4e67-93c2-4774c61c66bf" (UID: "4e9f22e4-d3e1-4e67-93c2-4774c61c66bf"). InnerVolumeSpecName "var-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 17:10:50 crc kubenswrapper[4558]: I0120 17:10:50.177203 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gbcfw\" (UniqueName: \"kubernetes.io/projected/4e9f22e4-d3e1-4e67-93c2-4774c61c66bf-kube-api-access-gbcfw\") pod \"4e9f22e4-d3e1-4e67-93c2-4774c61c66bf\" (UID: \"4e9f22e4-d3e1-4e67-93c2-4774c61c66bf\") " Jan 20 17:10:50 crc kubenswrapper[4558]: I0120 17:10:50.177234 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/4e9f22e4-d3e1-4e67-93c2-4774c61c66bf-var-run\") pod \"4e9f22e4-d3e1-4e67-93c2-4774c61c66bf\" (UID: \"4e9f22e4-d3e1-4e67-93c2-4774c61c66bf\") " Jan 20 17:10:50 crc kubenswrapper[4558]: I0120 17:10:50.177337 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4e9f22e4-d3e1-4e67-93c2-4774c61c66bf-var-run" (OuterVolumeSpecName: "var-run") pod "4e9f22e4-d3e1-4e67-93c2-4774c61c66bf" (UID: "4e9f22e4-d3e1-4e67-93c2-4774c61c66bf"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 17:10:50 crc kubenswrapper[4558]: I0120 17:10:50.177935 4558 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/4e9f22e4-d3e1-4e67-93c2-4774c61c66bf-var-run\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:50 crc kubenswrapper[4558]: I0120 17:10:50.177957 4558 reconciler_common.go:293] "Volume detached for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/4e9f22e4-d3e1-4e67-93c2-4774c61c66bf-etc-ovs\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:50 crc kubenswrapper[4558]: I0120 17:10:50.177955 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4e9f22e4-d3e1-4e67-93c2-4774c61c66bf-scripts" (OuterVolumeSpecName: "scripts") pod "4e9f22e4-d3e1-4e67-93c2-4774c61c66bf" (UID: "4e9f22e4-d3e1-4e67-93c2-4774c61c66bf"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:10:50 crc kubenswrapper[4558]: I0120 17:10:50.177968 4558 reconciler_common.go:293] "Volume detached for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/4e9f22e4-d3e1-4e67-93c2-4774c61c66bf-var-lib\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:50 crc kubenswrapper[4558]: I0120 17:10:50.178027 4558 reconciler_common.go:293] "Volume detached for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/4e9f22e4-d3e1-4e67-93c2-4774c61c66bf-var-log\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:50 crc kubenswrapper[4558]: I0120 17:10:50.181899 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4e9f22e4-d3e1-4e67-93c2-4774c61c66bf-kube-api-access-gbcfw" (OuterVolumeSpecName: "kube-api-access-gbcfw") pod "4e9f22e4-d3e1-4e67-93c2-4774c61c66bf" (UID: "4e9f22e4-d3e1-4e67-93c2-4774c61c66bf"). InnerVolumeSpecName "kube-api-access-gbcfw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:10:50 crc kubenswrapper[4558]: I0120 17:10:50.195802 4558 scope.go:117] "RemoveContainer" containerID="9c27c6b27ef7131cb9e16bde6b7dd1e00481e16fcc5615909f60bfad298cfc8a" Jan 20 17:10:50 crc kubenswrapper[4558]: E0120 17:10:50.196510 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9c27c6b27ef7131cb9e16bde6b7dd1e00481e16fcc5615909f60bfad298cfc8a\": container with ID starting with 9c27c6b27ef7131cb9e16bde6b7dd1e00481e16fcc5615909f60bfad298cfc8a not found: ID does not exist" containerID="9c27c6b27ef7131cb9e16bde6b7dd1e00481e16fcc5615909f60bfad298cfc8a" Jan 20 17:10:50 crc kubenswrapper[4558]: I0120 17:10:50.196753 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9c27c6b27ef7131cb9e16bde6b7dd1e00481e16fcc5615909f60bfad298cfc8a"} err="failed to get container status \"9c27c6b27ef7131cb9e16bde6b7dd1e00481e16fcc5615909f60bfad298cfc8a\": rpc error: code = NotFound desc = could not find container \"9c27c6b27ef7131cb9e16bde6b7dd1e00481e16fcc5615909f60bfad298cfc8a\": container with ID starting with 9c27c6b27ef7131cb9e16bde6b7dd1e00481e16fcc5615909f60bfad298cfc8a not found: ID does not exist" Jan 20 17:10:50 crc kubenswrapper[4558]: I0120 17:10:50.196791 4558 scope.go:117] "RemoveContainer" containerID="f93f94d4c8b0fcf0d9961eab49e9fd5086f604818a4a1ad930503c4ada356fe6" Jan 20 17:10:50 crc kubenswrapper[4558]: E0120 17:10:50.197259 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f93f94d4c8b0fcf0d9961eab49e9fd5086f604818a4a1ad930503c4ada356fe6\": container with ID starting with f93f94d4c8b0fcf0d9961eab49e9fd5086f604818a4a1ad930503c4ada356fe6 not found: ID does not exist" containerID="f93f94d4c8b0fcf0d9961eab49e9fd5086f604818a4a1ad930503c4ada356fe6" Jan 20 17:10:50 crc kubenswrapper[4558]: I0120 17:10:50.197291 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f93f94d4c8b0fcf0d9961eab49e9fd5086f604818a4a1ad930503c4ada356fe6"} err="failed to get container status \"f93f94d4c8b0fcf0d9961eab49e9fd5086f604818a4a1ad930503c4ada356fe6\": rpc error: code = NotFound desc = could not find container \"f93f94d4c8b0fcf0d9961eab49e9fd5086f604818a4a1ad930503c4ada356fe6\": container with ID starting with f93f94d4c8b0fcf0d9961eab49e9fd5086f604818a4a1ad930503c4ada356fe6 not found: ID does not exist" Jan 20 17:10:50 crc kubenswrapper[4558]: I0120 17:10:50.197310 4558 scope.go:117] "RemoveContainer" containerID="34c19f16fa96fda159f5a09a39d736bc3656c84c962957aba7eacf2ef3a38ee0" Jan 20 17:10:50 crc kubenswrapper[4558]: E0120 17:10:50.197686 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"34c19f16fa96fda159f5a09a39d736bc3656c84c962957aba7eacf2ef3a38ee0\": container with ID starting with 34c19f16fa96fda159f5a09a39d736bc3656c84c962957aba7eacf2ef3a38ee0 not found: ID does not exist" containerID="34c19f16fa96fda159f5a09a39d736bc3656c84c962957aba7eacf2ef3a38ee0" Jan 20 17:10:50 crc kubenswrapper[4558]: I0120 17:10:50.197741 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"34c19f16fa96fda159f5a09a39d736bc3656c84c962957aba7eacf2ef3a38ee0"} err="failed to get container status \"34c19f16fa96fda159f5a09a39d736bc3656c84c962957aba7eacf2ef3a38ee0\": rpc error: code = NotFound desc = could not find container \"34c19f16fa96fda159f5a09a39d736bc3656c84c962957aba7eacf2ef3a38ee0\": container with ID starting with 34c19f16fa96fda159f5a09a39d736bc3656c84c962957aba7eacf2ef3a38ee0 not found: ID does not exist" Jan 20 17:10:50 crc kubenswrapper[4558]: I0120 17:10:50.197784 4558 scope.go:117] "RemoveContainer" containerID="9c27c6b27ef7131cb9e16bde6b7dd1e00481e16fcc5615909f60bfad298cfc8a" Jan 20 17:10:50 crc kubenswrapper[4558]: I0120 17:10:50.198147 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9c27c6b27ef7131cb9e16bde6b7dd1e00481e16fcc5615909f60bfad298cfc8a"} err="failed to get container status \"9c27c6b27ef7131cb9e16bde6b7dd1e00481e16fcc5615909f60bfad298cfc8a\": rpc error: code = NotFound desc = could not find container \"9c27c6b27ef7131cb9e16bde6b7dd1e00481e16fcc5615909f60bfad298cfc8a\": container with ID starting with 9c27c6b27ef7131cb9e16bde6b7dd1e00481e16fcc5615909f60bfad298cfc8a not found: ID does not exist" Jan 20 17:10:50 crc kubenswrapper[4558]: I0120 17:10:50.198210 4558 scope.go:117] "RemoveContainer" containerID="f93f94d4c8b0fcf0d9961eab49e9fd5086f604818a4a1ad930503c4ada356fe6" Jan 20 17:10:50 crc kubenswrapper[4558]: I0120 17:10:50.198744 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f93f94d4c8b0fcf0d9961eab49e9fd5086f604818a4a1ad930503c4ada356fe6"} err="failed to get container status \"f93f94d4c8b0fcf0d9961eab49e9fd5086f604818a4a1ad930503c4ada356fe6\": rpc error: code = NotFound desc = could not find container \"f93f94d4c8b0fcf0d9961eab49e9fd5086f604818a4a1ad930503c4ada356fe6\": container with ID starting with f93f94d4c8b0fcf0d9961eab49e9fd5086f604818a4a1ad930503c4ada356fe6 not found: ID does not exist" Jan 20 17:10:50 crc kubenswrapper[4558]: I0120 17:10:50.198775 4558 scope.go:117] "RemoveContainer" containerID="34c19f16fa96fda159f5a09a39d736bc3656c84c962957aba7eacf2ef3a38ee0" Jan 20 17:10:50 crc kubenswrapper[4558]: I0120 17:10:50.199060 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"34c19f16fa96fda159f5a09a39d736bc3656c84c962957aba7eacf2ef3a38ee0"} err="failed to get container status \"34c19f16fa96fda159f5a09a39d736bc3656c84c962957aba7eacf2ef3a38ee0\": rpc error: code = NotFound desc = could not find container \"34c19f16fa96fda159f5a09a39d736bc3656c84c962957aba7eacf2ef3a38ee0\": container with ID starting with 34c19f16fa96fda159f5a09a39d736bc3656c84c962957aba7eacf2ef3a38ee0 not found: ID does not exist" Jan 20 17:10:50 crc kubenswrapper[4558]: I0120 17:10:50.279901 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4e9f22e4-d3e1-4e67-93c2-4774c61c66bf-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:50 crc kubenswrapper[4558]: I0120 17:10:50.279929 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gbcfw\" (UniqueName: \"kubernetes.io/projected/4e9f22e4-d3e1-4e67-93c2-4774c61c66bf-kube-api-access-gbcfw\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:50 crc kubenswrapper[4558]: I0120 17:10:50.469111 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovn-controller-ovs-jgq6k"] Jan 20 17:10:50 crc kubenswrapper[4558]: I0120 17:10:50.476629 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ovn-controller-ovs-jgq6k"] Jan 20 17:10:50 crc kubenswrapper[4558]: I0120 17:10:50.578460 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4e9f22e4-d3e1-4e67-93c2-4774c61c66bf" path="/var/lib/kubelet/pods/4e9f22e4-d3e1-4e67-93c2-4774c61c66bf/volumes" Jan 20 17:10:50 crc kubenswrapper[4558]: I0120 17:10:50.579379 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cc4ac09f-8f5e-474d-8669-a3a2927a37aa" path="/var/lib/kubelet/pods/cc4ac09f-8f5e-474d-8669-a3a2927a37aa/volumes" Jan 20 17:10:50 crc kubenswrapper[4558]: I0120 17:10:50.579970 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d298dad1-1064-4368-aaf0-0bd0b215ee60" path="/var/lib/kubelet/pods/d298dad1-1064-4368-aaf0-0bd0b215ee60/volumes" Jan 20 17:10:51 crc kubenswrapper[4558]: I0120 17:10:51.710931 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/root-account-create-update-f6ldh"] Jan 20 17:10:51 crc kubenswrapper[4558]: E0120 17:10:51.712523 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7e6f6562-3dc9-4cdb-8679-1342459d299f" containerName="openstack-network-exporter" Jan 20 17:10:51 crc kubenswrapper[4558]: I0120 17:10:51.712605 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="7e6f6562-3dc9-4cdb-8679-1342459d299f" containerName="openstack-network-exporter" Jan 20 17:10:51 crc kubenswrapper[4558]: E0120 17:10:51.712679 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cc4ac09f-8f5e-474d-8669-a3a2927a37aa" containerName="ovn-controller" Jan 20 17:10:51 crc kubenswrapper[4558]: I0120 17:10:51.712743 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="cc4ac09f-8f5e-474d-8669-a3a2927a37aa" containerName="ovn-controller" Jan 20 17:10:51 crc kubenswrapper[4558]: E0120 17:10:51.712796 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e9f22e4-d3e1-4e67-93c2-4774c61c66bf" containerName="ovs-vswitchd" Jan 20 17:10:51 crc kubenswrapper[4558]: I0120 17:10:51.712849 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e9f22e4-d3e1-4e67-93c2-4774c61c66bf" containerName="ovs-vswitchd" Jan 20 17:10:51 crc kubenswrapper[4558]: E0120 17:10:51.712916 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d298dad1-1064-4368-aaf0-0bd0b215ee60" containerName="ovn-config" Jan 20 17:10:51 crc kubenswrapper[4558]: I0120 17:10:51.712961 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d298dad1-1064-4368-aaf0-0bd0b215ee60" containerName="ovn-config" Jan 20 17:10:51 crc kubenswrapper[4558]: E0120 17:10:51.713020 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e9f22e4-d3e1-4e67-93c2-4774c61c66bf" containerName="ovsdb-server" Jan 20 17:10:51 crc kubenswrapper[4558]: I0120 17:10:51.713066 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e9f22e4-d3e1-4e67-93c2-4774c61c66bf" containerName="ovsdb-server" Jan 20 17:10:51 crc kubenswrapper[4558]: E0120 17:10:51.713128 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e9f22e4-d3e1-4e67-93c2-4774c61c66bf" containerName="ovsdb-server-init" Jan 20 17:10:51 crc kubenswrapper[4558]: I0120 17:10:51.713197 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e9f22e4-d3e1-4e67-93c2-4774c61c66bf" containerName="ovsdb-server-init" Jan 20 17:10:51 crc kubenswrapper[4558]: I0120 17:10:51.713451 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="4e9f22e4-d3e1-4e67-93c2-4774c61c66bf" containerName="ovs-vswitchd" Jan 20 17:10:51 crc kubenswrapper[4558]: I0120 17:10:51.713528 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="d298dad1-1064-4368-aaf0-0bd0b215ee60" containerName="ovn-config" Jan 20 17:10:51 crc kubenswrapper[4558]: I0120 17:10:51.713584 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="7e6f6562-3dc9-4cdb-8679-1342459d299f" containerName="openstack-network-exporter" Jan 20 17:10:51 crc kubenswrapper[4558]: I0120 17:10:51.713634 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="4e9f22e4-d3e1-4e67-93c2-4774c61c66bf" containerName="ovsdb-server" Jan 20 17:10:51 crc kubenswrapper[4558]: I0120 17:10:51.713690 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="cc4ac09f-8f5e-474d-8669-a3a2927a37aa" containerName="ovn-controller" Jan 20 17:10:51 crc kubenswrapper[4558]: I0120 17:10:51.714471 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-f6ldh" Jan 20 17:10:51 crc kubenswrapper[4558]: E0120 17:10:51.726479 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/combined-ca-bundle: secret "combined-ca-bundle" not found Jan 20 17:10:51 crc kubenswrapper[4558]: E0120 17:10:51.726538 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f98dbc6f-bd60-4064-b0d8-947d4080b9ec-combined-ca-bundle podName:f98dbc6f-bd60-4064-b0d8-947d4080b9ec nodeName:}" failed. No retries permitted until 2026-01-20 17:10:52.226520239 +0000 UTC m=+1745.986858205 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/f98dbc6f-bd60-4064-b0d8-947d4080b9ec-combined-ca-bundle") pod "ovsdbserver-nb-0" (UID: "f98dbc6f-bd60-4064-b0d8-947d4080b9ec") : secret "combined-ca-bundle" not found Jan 20 17:10:51 crc kubenswrapper[4558]: I0120 17:10:51.734623 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"openstack-mariadb-root-db-secret" Jan 20 17:10:51 crc kubenswrapper[4558]: I0120 17:10:51.739771 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-f6ldh"] Jan 20 17:10:51 crc kubenswrapper[4558]: I0120 17:10:51.789143 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/rabbitmq-server-0"] Jan 20 17:10:51 crc kubenswrapper[4558]: I0120 17:10:51.832654 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/glance-cba5-account-create-update-4mwc6"] Jan 20 17:10:51 crc kubenswrapper[4558]: I0120 17:10:51.834017 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-cba5-account-create-update-4mwc6" Jan 20 17:10:51 crc kubenswrapper[4558]: I0120 17:10:51.836272 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-db-secret" Jan 20 17:10:51 crc kubenswrapper[4558]: I0120 17:10:51.838605 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/931d9a7a-aa04-42c3-a276-e71388a8b9d3-operator-scripts\") pod \"root-account-create-update-f6ldh\" (UID: \"931d9a7a-aa04-42c3-a276-e71388a8b9d3\") " pod="openstack-kuttl-tests/root-account-create-update-f6ldh" Jan 20 17:10:51 crc kubenswrapper[4558]: I0120 17:10:51.838896 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rd4km\" (UniqueName: \"kubernetes.io/projected/931d9a7a-aa04-42c3-a276-e71388a8b9d3-kube-api-access-rd4km\") pod \"root-account-create-update-f6ldh\" (UID: \"931d9a7a-aa04-42c3-a276-e71388a8b9d3\") " pod="openstack-kuttl-tests/root-account-create-update-f6ldh" Jan 20 17:10:51 crc kubenswrapper[4558]: I0120 17:10:51.883143 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/placement-64d3-account-create-update-h72jc"] Jan 20 17:10:51 crc kubenswrapper[4558]: I0120 17:10:51.884785 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-64d3-account-create-update-h72jc" Jan 20 17:10:51 crc kubenswrapper[4558]: I0120 17:10:51.890343 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"placement-db-secret" Jan 20 17:10:51 crc kubenswrapper[4558]: I0120 17:10:51.933824 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/openstackclient"] Jan 20 17:10:51 crc kubenswrapper[4558]: I0120 17:10:51.934211 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/openstackclient" podUID="6d6dfb4c-9a8d-405e-8ffd-288e446998cf" containerName="openstackclient" containerID="cri-o://be9c2c4b147215f87ea040240daeaf63995b7b21012666e0eb6d43c0ceb36adc" gracePeriod=2 Jan 20 17:10:51 crc kubenswrapper[4558]: I0120 17:10:51.943521 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a01fa32b-fc5c-49ba-b102-b4751878380a-operator-scripts\") pod \"glance-cba5-account-create-update-4mwc6\" (UID: \"a01fa32b-fc5c-49ba-b102-b4751878380a\") " pod="openstack-kuttl-tests/glance-cba5-account-create-update-4mwc6" Jan 20 17:10:51 crc kubenswrapper[4558]: I0120 17:10:51.943580 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/931d9a7a-aa04-42c3-a276-e71388a8b9d3-operator-scripts\") pod \"root-account-create-update-f6ldh\" (UID: \"931d9a7a-aa04-42c3-a276-e71388a8b9d3\") " pod="openstack-kuttl-tests/root-account-create-update-f6ldh" Jan 20 17:10:51 crc kubenswrapper[4558]: I0120 17:10:51.943672 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2vswq\" (UniqueName: \"kubernetes.io/projected/a01fa32b-fc5c-49ba-b102-b4751878380a-kube-api-access-2vswq\") pod \"glance-cba5-account-create-update-4mwc6\" (UID: \"a01fa32b-fc5c-49ba-b102-b4751878380a\") " pod="openstack-kuttl-tests/glance-cba5-account-create-update-4mwc6" Jan 20 17:10:51 crc kubenswrapper[4558]: I0120 17:10:51.943837 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rd4km\" (UniqueName: \"kubernetes.io/projected/931d9a7a-aa04-42c3-a276-e71388a8b9d3-kube-api-access-rd4km\") pod \"root-account-create-update-f6ldh\" (UID: \"931d9a7a-aa04-42c3-a276-e71388a8b9d3\") " pod="openstack-kuttl-tests/root-account-create-update-f6ldh" Jan 20 17:10:51 crc kubenswrapper[4558]: E0120 17:10:51.944713 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Jan 20 17:10:51 crc kubenswrapper[4558]: E0120 17:10:51.944762 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/07fab146-67be-42ba-b263-ee19fe95720b-config-data podName:07fab146-67be-42ba-b263-ee19fe95720b nodeName:}" failed. No retries permitted until 2026-01-20 17:10:52.444747765 +0000 UTC m=+1746.205085722 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/07fab146-67be-42ba-b263-ee19fe95720b-config-data") pod "rabbitmq-server-0" (UID: "07fab146-67be-42ba-b263-ee19fe95720b") : configmap "rabbitmq-config-data" not found Jan 20 17:10:51 crc kubenswrapper[4558]: I0120 17:10:51.945152 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/931d9a7a-aa04-42c3-a276-e71388a8b9d3-operator-scripts\") pod \"root-account-create-update-f6ldh\" (UID: \"931d9a7a-aa04-42c3-a276-e71388a8b9d3\") " pod="openstack-kuttl-tests/root-account-create-update-f6ldh" Jan 20 17:10:51 crc kubenswrapper[4558]: I0120 17:10:51.978001 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rd4km\" (UniqueName: \"kubernetes.io/projected/931d9a7a-aa04-42c3-a276-e71388a8b9d3-kube-api-access-rd4km\") pod \"root-account-create-update-f6ldh\" (UID: \"931d9a7a-aa04-42c3-a276-e71388a8b9d3\") " pod="openstack-kuttl-tests/root-account-create-update-f6ldh" Jan 20 17:10:52 crc kubenswrapper[4558]: I0120 17:10:52.000954 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/openstackclient"] Jan 20 17:10:52 crc kubenswrapper[4558]: I0120 17:10:52.033721 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-cba5-account-create-update-4mwc6"] Jan 20 17:10:52 crc kubenswrapper[4558]: I0120 17:10:52.041916 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-f6ldh" Jan 20 17:10:52 crc kubenswrapper[4558]: I0120 17:10:52.045985 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a01fa32b-fc5c-49ba-b102-b4751878380a-operator-scripts\") pod \"glance-cba5-account-create-update-4mwc6\" (UID: \"a01fa32b-fc5c-49ba-b102-b4751878380a\") " pod="openstack-kuttl-tests/glance-cba5-account-create-update-4mwc6" Jan 20 17:10:52 crc kubenswrapper[4558]: I0120 17:10:52.046031 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2vswq\" (UniqueName: \"kubernetes.io/projected/a01fa32b-fc5c-49ba-b102-b4751878380a-kube-api-access-2vswq\") pod \"glance-cba5-account-create-update-4mwc6\" (UID: \"a01fa32b-fc5c-49ba-b102-b4751878380a\") " pod="openstack-kuttl-tests/glance-cba5-account-create-update-4mwc6" Jan 20 17:10:52 crc kubenswrapper[4558]: I0120 17:10:52.046112 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/32721dbf-d5cf-43d4-9ddc-ad1664d17869-operator-scripts\") pod \"placement-64d3-account-create-update-h72jc\" (UID: \"32721dbf-d5cf-43d4-9ddc-ad1664d17869\") " pod="openstack-kuttl-tests/placement-64d3-account-create-update-h72jc" Jan 20 17:10:52 crc kubenswrapper[4558]: I0120 17:10:52.046131 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j84wj\" (UniqueName: \"kubernetes.io/projected/32721dbf-d5cf-43d4-9ddc-ad1664d17869-kube-api-access-j84wj\") pod \"placement-64d3-account-create-update-h72jc\" (UID: \"32721dbf-d5cf-43d4-9ddc-ad1664d17869\") " pod="openstack-kuttl-tests/placement-64d3-account-create-update-h72jc" Jan 20 17:10:52 crc kubenswrapper[4558]: I0120 17:10:52.046697 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a01fa32b-fc5c-49ba-b102-b4751878380a-operator-scripts\") pod \"glance-cba5-account-create-update-4mwc6\" (UID: \"a01fa32b-fc5c-49ba-b102-b4751878380a\") " pod="openstack-kuttl-tests/glance-cba5-account-create-update-4mwc6" Jan 20 17:10:52 crc kubenswrapper[4558]: I0120 17:10:52.091222 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-64d3-account-create-update-h72jc"] Jan 20 17:10:52 crc kubenswrapper[4558]: I0120 17:10:52.112814 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2vswq\" (UniqueName: \"kubernetes.io/projected/a01fa32b-fc5c-49ba-b102-b4751878380a-kube-api-access-2vswq\") pod \"glance-cba5-account-create-update-4mwc6\" (UID: \"a01fa32b-fc5c-49ba-b102-b4751878380a\") " pod="openstack-kuttl-tests/glance-cba5-account-create-update-4mwc6" Jan 20 17:10:52 crc kubenswrapper[4558]: I0120 17:10:52.128319 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:10:52 crc kubenswrapper[4558]: I0120 17:10:52.128603 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/cinder-scheduler-0" podUID="1ec5ef72-7c69-4be3-974d-c020ddfea4f7" containerName="cinder-scheduler" containerID="cri-o://f1e7bbc89f40090d398e96e1f8922de73e690cb868b93535a373edcd14aedc39" gracePeriod=30 Jan 20 17:10:52 crc kubenswrapper[4558]: I0120 17:10:52.129053 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/cinder-scheduler-0" podUID="1ec5ef72-7c69-4be3-974d-c020ddfea4f7" containerName="probe" containerID="cri-o://2ad54bc63fc4167f1a6668ac58b0ad56798eb8ff76a988eb75b07766de4d421c" gracePeriod=30 Jan 20 17:10:52 crc kubenswrapper[4558]: I0120 17:10:52.150589 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/32721dbf-d5cf-43d4-9ddc-ad1664d17869-operator-scripts\") pod \"placement-64d3-account-create-update-h72jc\" (UID: \"32721dbf-d5cf-43d4-9ddc-ad1664d17869\") " pod="openstack-kuttl-tests/placement-64d3-account-create-update-h72jc" Jan 20 17:10:52 crc kubenswrapper[4558]: I0120 17:10:52.150633 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j84wj\" (UniqueName: \"kubernetes.io/projected/32721dbf-d5cf-43d4-9ddc-ad1664d17869-kube-api-access-j84wj\") pod \"placement-64d3-account-create-update-h72jc\" (UID: \"32721dbf-d5cf-43d4-9ddc-ad1664d17869\") " pod="openstack-kuttl-tests/placement-64d3-account-create-update-h72jc" Jan 20 17:10:52 crc kubenswrapper[4558]: I0120 17:10:52.151559 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/32721dbf-d5cf-43d4-9ddc-ad1664d17869-operator-scripts\") pod \"placement-64d3-account-create-update-h72jc\" (UID: \"32721dbf-d5cf-43d4-9ddc-ad1664d17869\") " pod="openstack-kuttl-tests/placement-64d3-account-create-update-h72jc" Jan 20 17:10:52 crc kubenswrapper[4558]: I0120 17:10:52.190096 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-q5wjq"] Jan 20 17:10:52 crc kubenswrapper[4558]: I0120 17:10:52.190754 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-cba5-account-create-update-4mwc6" Jan 20 17:10:52 crc kubenswrapper[4558]: I0120 17:10:52.194328 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j84wj\" (UniqueName: \"kubernetes.io/projected/32721dbf-d5cf-43d4-9ddc-ad1664d17869-kube-api-access-j84wj\") pod \"placement-64d3-account-create-update-h72jc\" (UID: \"32721dbf-d5cf-43d4-9ddc-ad1664d17869\") " pod="openstack-kuttl-tests/placement-64d3-account-create-update-h72jc" Jan 20 17:10:52 crc kubenswrapper[4558]: I0120 17:10:52.209230 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-q5wjq"] Jan 20 17:10:52 crc kubenswrapper[4558]: I0120 17:10:52.218822 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-64d3-account-create-update-h72jc" Jan 20 17:10:52 crc kubenswrapper[4558]: I0120 17:10:52.224861 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:10:52 crc kubenswrapper[4558]: I0120 17:10:52.225101 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/cinder-api-0" podUID="ee147895-0bad-4dc1-a348-1be3348a7180" containerName="cinder-api-log" containerID="cri-o://0376c421f7190fd015b8795150f665866c227b4ec1aa2af3ff3ba419848ca268" gracePeriod=30 Jan 20 17:10:52 crc kubenswrapper[4558]: I0120 17:10:52.225529 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/cinder-api-0" podUID="ee147895-0bad-4dc1-a348-1be3348a7180" containerName="cinder-api" containerID="cri-o://8465d6a9372429cd9148909a838d7378903121f9ff9f0fb73700c98d620d85b1" gracePeriod=30 Jan 20 17:10:52 crc kubenswrapper[4558]: I0120 17:10:52.232899 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/placement-64d3-account-create-update-gx7gt"] Jan 20 17:10:52 crc kubenswrapper[4558]: I0120 17:10:52.249825 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/neutron-456e-account-create-update-fvc6f"] Jan 20 17:10:52 crc kubenswrapper[4558]: E0120 17:10:52.250519 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6d6dfb4c-9a8d-405e-8ffd-288e446998cf" containerName="openstackclient" Jan 20 17:10:52 crc kubenswrapper[4558]: I0120 17:10:52.250536 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="6d6dfb4c-9a8d-405e-8ffd-288e446998cf" containerName="openstackclient" Jan 20 17:10:52 crc kubenswrapper[4558]: I0120 17:10:52.250727 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="6d6dfb4c-9a8d-405e-8ffd-288e446998cf" containerName="openstackclient" Jan 20 17:10:52 crc kubenswrapper[4558]: I0120 17:10:52.251688 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-456e-account-create-update-fvc6f" Jan 20 17:10:52 crc kubenswrapper[4558]: E0120 17:10:52.253058 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/combined-ca-bundle: secret "combined-ca-bundle" not found Jan 20 17:10:52 crc kubenswrapper[4558]: E0120 17:10:52.253110 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f98dbc6f-bd60-4064-b0d8-947d4080b9ec-combined-ca-bundle podName:f98dbc6f-bd60-4064-b0d8-947d4080b9ec nodeName:}" failed. No retries permitted until 2026-01-20 17:10:53.253095409 +0000 UTC m=+1747.013433376 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/f98dbc6f-bd60-4064-b0d8-947d4080b9ec-combined-ca-bundle") pod "ovsdbserver-nb-0" (UID: "f98dbc6f-bd60-4064-b0d8-947d4080b9ec") : secret "combined-ca-bundle" not found Jan 20 17:10:52 crc kubenswrapper[4558]: I0120 17:10:52.280727 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"neutron-db-secret" Jan 20 17:10:52 crc kubenswrapper[4558]: I0120 17:10:52.306018 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/placement-64d3-account-create-update-gx7gt"] Jan 20 17:10:52 crc kubenswrapper[4558]: I0120 17:10:52.398071 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qvz5m\" (UniqueName: \"kubernetes.io/projected/8218833e-265b-448d-bc76-550d3c1d213d-kube-api-access-qvz5m\") pod \"neutron-456e-account-create-update-fvc6f\" (UID: \"8218833e-265b-448d-bc76-550d3c1d213d\") " pod="openstack-kuttl-tests/neutron-456e-account-create-update-fvc6f" Jan 20 17:10:52 crc kubenswrapper[4558]: I0120 17:10:52.398442 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8218833e-265b-448d-bc76-550d3c1d213d-operator-scripts\") pod \"neutron-456e-account-create-update-fvc6f\" (UID: \"8218833e-265b-448d-bc76-550d3c1d213d\") " pod="openstack-kuttl-tests/neutron-456e-account-create-update-fvc6f" Jan 20 17:10:52 crc kubenswrapper[4558]: I0120 17:10:52.501912 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-456e-account-create-update-fvc6f"] Jan 20 17:10:52 crc kubenswrapper[4558]: E0120 17:10:52.519804 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Jan 20 17:10:52 crc kubenswrapper[4558]: E0120 17:10:52.519882 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/07fab146-67be-42ba-b263-ee19fe95720b-config-data podName:07fab146-67be-42ba-b263-ee19fe95720b nodeName:}" failed. No retries permitted until 2026-01-20 17:10:53.519856601 +0000 UTC m=+1747.280194568 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/07fab146-67be-42ba-b263-ee19fe95720b-config-data") pod "rabbitmq-server-0" (UID: "07fab146-67be-42ba-b263-ee19fe95720b") : configmap "rabbitmq-config-data" not found Jan 20 17:10:52 crc kubenswrapper[4558]: I0120 17:10:52.525066 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8218833e-265b-448d-bc76-550d3c1d213d-operator-scripts\") pod \"neutron-456e-account-create-update-fvc6f\" (UID: \"8218833e-265b-448d-bc76-550d3c1d213d\") " pod="openstack-kuttl-tests/neutron-456e-account-create-update-fvc6f" Jan 20 17:10:52 crc kubenswrapper[4558]: I0120 17:10:52.525205 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qvz5m\" (UniqueName: \"kubernetes.io/projected/8218833e-265b-448d-bc76-550d3c1d213d-kube-api-access-qvz5m\") pod \"neutron-456e-account-create-update-fvc6f\" (UID: \"8218833e-265b-448d-bc76-550d3c1d213d\") " pod="openstack-kuttl-tests/neutron-456e-account-create-update-fvc6f" Jan 20 17:10:52 crc kubenswrapper[4558]: I0120 17:10:52.526836 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8218833e-265b-448d-bc76-550d3c1d213d-operator-scripts\") pod \"neutron-456e-account-create-update-fvc6f\" (UID: \"8218833e-265b-448d-bc76-550d3c1d213d\") " pod="openstack-kuttl-tests/neutron-456e-account-create-update-fvc6f" Jan 20 17:10:52 crc kubenswrapper[4558]: I0120 17:10:52.562298 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-cba5-account-create-update-wsmw9"] Jan 20 17:10:52 crc kubenswrapper[4558]: I0120 17:10:52.669311 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="05bee7dd-6a89-4bbd-a097-2f2c66d038e7" path="/var/lib/kubelet/pods/05bee7dd-6a89-4bbd-a097-2f2c66d038e7/volumes" Jan 20 17:10:52 crc kubenswrapper[4558]: I0120 17:10:52.669967 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="500a1e2f-ae7f-41bf-9b31-c9c69386c774" path="/var/lib/kubelet/pods/500a1e2f-ae7f-41bf-9b31-c9c69386c774/volumes" Jan 20 17:10:52 crc kubenswrapper[4558]: I0120 17:10:52.670564 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/glance-cba5-account-create-update-wsmw9"] Jan 20 17:10:52 crc kubenswrapper[4558]: I0120 17:10:52.687788 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qvz5m\" (UniqueName: \"kubernetes.io/projected/8218833e-265b-448d-bc76-550d3c1d213d-kube-api-access-qvz5m\") pod \"neutron-456e-account-create-update-fvc6f\" (UID: \"8218833e-265b-448d-bc76-550d3c1d213d\") " pod="openstack-kuttl-tests/neutron-456e-account-create-update-fvc6f" Jan 20 17:10:52 crc kubenswrapper[4558]: I0120 17:10:52.717144 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/cinder-6a24-account-create-update-cvtmm"] Jan 20 17:10:52 crc kubenswrapper[4558]: I0120 17:10:52.718561 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-6a24-account-create-update-cvtmm" Jan 20 17:10:52 crc kubenswrapper[4558]: I0120 17:10:52.748397 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cinder-db-secret" Jan 20 17:10:52 crc kubenswrapper[4558]: I0120 17:10:52.749058 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-api-b308-account-create-update-wlh8l"] Jan 20 17:10:52 crc kubenswrapper[4558]: I0120 17:10:52.760639 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9b8664b1-e050-479b-b965-aed1a70e4bd1-operator-scripts\") pod \"cinder-6a24-account-create-update-cvtmm\" (UID: \"9b8664b1-e050-479b-b965-aed1a70e4bd1\") " pod="openstack-kuttl-tests/cinder-6a24-account-create-update-cvtmm" Jan 20 17:10:52 crc kubenswrapper[4558]: I0120 17:10:52.760807 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h4mh6\" (UniqueName: \"kubernetes.io/projected/9b8664b1-e050-479b-b965-aed1a70e4bd1-kube-api-access-h4mh6\") pod \"cinder-6a24-account-create-update-cvtmm\" (UID: \"9b8664b1-e050-479b-b965-aed1a70e4bd1\") " pod="openstack-kuttl-tests/cinder-6a24-account-create-update-cvtmm" Jan 20 17:10:52 crc kubenswrapper[4558]: I0120 17:10:52.769819 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-b308-account-create-update-wlh8l" Jan 20 17:10:52 crc kubenswrapper[4558]: I0120 17:10:52.779495 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell1-0eec-account-create-update-2btsb"] Jan 20 17:10:52 crc kubenswrapper[4558]: I0120 17:10:52.780801 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-0eec-account-create-update-2btsb" Jan 20 17:10:52 crc kubenswrapper[4558]: I0120 17:10:52.784494 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell1-db-secret" Jan 20 17:10:52 crc kubenswrapper[4558]: I0120 17:10:52.806625 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-api-db-secret" Jan 20 17:10:52 crc kubenswrapper[4558]: I0120 17:10:52.831320 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-6a24-account-create-update-cvtmm"] Jan 20 17:10:52 crc kubenswrapper[4558]: I0120 17:10:52.848829 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-0eec-account-create-update-2btsb"] Jan 20 17:10:52 crc kubenswrapper[4558]: I0120 17:10:52.862652 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9b8664b1-e050-479b-b965-aed1a70e4bd1-operator-scripts\") pod \"cinder-6a24-account-create-update-cvtmm\" (UID: \"9b8664b1-e050-479b-b965-aed1a70e4bd1\") " pod="openstack-kuttl-tests/cinder-6a24-account-create-update-cvtmm" Jan 20 17:10:52 crc kubenswrapper[4558]: I0120 17:10:52.862776 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gs6pf\" (UniqueName: \"kubernetes.io/projected/99812c8d-c18f-48d5-a063-deb9462a904b-kube-api-access-gs6pf\") pod \"nova-cell1-0eec-account-create-update-2btsb\" (UID: \"99812c8d-c18f-48d5-a063-deb9462a904b\") " pod="openstack-kuttl-tests/nova-cell1-0eec-account-create-update-2btsb" Jan 20 17:10:52 crc kubenswrapper[4558]: I0120 17:10:52.862831 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4v2x5\" (UniqueName: \"kubernetes.io/projected/12dec63c-ec5c-4327-b1ec-aa9ee39acf82-kube-api-access-4v2x5\") pod \"nova-api-b308-account-create-update-wlh8l\" (UID: \"12dec63c-ec5c-4327-b1ec-aa9ee39acf82\") " pod="openstack-kuttl-tests/nova-api-b308-account-create-update-wlh8l" Jan 20 17:10:52 crc kubenswrapper[4558]: I0120 17:10:52.862918 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/12dec63c-ec5c-4327-b1ec-aa9ee39acf82-operator-scripts\") pod \"nova-api-b308-account-create-update-wlh8l\" (UID: \"12dec63c-ec5c-4327-b1ec-aa9ee39acf82\") " pod="openstack-kuttl-tests/nova-api-b308-account-create-update-wlh8l" Jan 20 17:10:52 crc kubenswrapper[4558]: I0120 17:10:52.862943 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h4mh6\" (UniqueName: \"kubernetes.io/projected/9b8664b1-e050-479b-b965-aed1a70e4bd1-kube-api-access-h4mh6\") pod \"cinder-6a24-account-create-update-cvtmm\" (UID: \"9b8664b1-e050-479b-b965-aed1a70e4bd1\") " pod="openstack-kuttl-tests/cinder-6a24-account-create-update-cvtmm" Jan 20 17:10:52 crc kubenswrapper[4558]: I0120 17:10:52.862970 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/99812c8d-c18f-48d5-a063-deb9462a904b-operator-scripts\") pod \"nova-cell1-0eec-account-create-update-2btsb\" (UID: \"99812c8d-c18f-48d5-a063-deb9462a904b\") " pod="openstack-kuttl-tests/nova-cell1-0eec-account-create-update-2btsb" Jan 20 17:10:52 crc kubenswrapper[4558]: I0120 17:10:52.863721 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9b8664b1-e050-479b-b965-aed1a70e4bd1-operator-scripts\") pod \"cinder-6a24-account-create-update-cvtmm\" (UID: \"9b8664b1-e050-479b-b965-aed1a70e4bd1\") " pod="openstack-kuttl-tests/cinder-6a24-account-create-update-cvtmm" Jan 20 17:10:52 crc kubenswrapper[4558]: I0120 17:10:52.866386 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-b308-account-create-update-wlh8l"] Jan 20 17:10:52 crc kubenswrapper[4558]: I0120 17:10:52.875732 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-456e-account-create-update-6vj76"] Jan 20 17:10:52 crc kubenswrapper[4558]: I0120 17:10:52.895016 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-456e-account-create-update-fvc6f" Jan 20 17:10:52 crc kubenswrapper[4558]: I0120 17:10:52.909389 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/neutron-456e-account-create-update-6vj76"] Jan 20 17:10:52 crc kubenswrapper[4558]: I0120 17:10:52.927586 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-sb-0"] Jan 20 17:10:52 crc kubenswrapper[4558]: I0120 17:10:52.927967 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ovsdbserver-sb-0" podUID="6f1a8225-d143-4ae0-9301-8025f1b639e5" containerName="openstack-network-exporter" containerID="cri-o://27f4034104e6baa2769f9fc2dec19cbc295799af2b5e24f6d5ea225a19e1124b" gracePeriod=300 Jan 20 17:10:52 crc kubenswrapper[4558]: I0120 17:10:52.952915 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovn-northd-0"] Jan 20 17:10:52 crc kubenswrapper[4558]: I0120 17:10:52.953507 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ovn-northd-0" podUID="c6a5b342-eaf2-408c-828e-9bc0bf10d09e" containerName="ovn-northd" containerID="cri-o://14a9e266c3e7805021dc147f925c01b6b1d5dec0c4a69dce9222790ed28a80b1" gracePeriod=30 Jan 20 17:10:52 crc kubenswrapper[4558]: I0120 17:10:52.953646 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ovn-northd-0" podUID="c6a5b342-eaf2-408c-828e-9bc0bf10d09e" containerName="openstack-network-exporter" containerID="cri-o://26c2170a07662d397db2cd598e3f8c54887440864fb9753f04f3060866b77ade" gracePeriod=30 Jan 20 17:10:52 crc kubenswrapper[4558]: I0120 17:10:52.965357 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gs6pf\" (UniqueName: \"kubernetes.io/projected/99812c8d-c18f-48d5-a063-deb9462a904b-kube-api-access-gs6pf\") pod \"nova-cell1-0eec-account-create-update-2btsb\" (UID: \"99812c8d-c18f-48d5-a063-deb9462a904b\") " pod="openstack-kuttl-tests/nova-cell1-0eec-account-create-update-2btsb" Jan 20 17:10:52 crc kubenswrapper[4558]: I0120 17:10:52.965422 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4v2x5\" (UniqueName: \"kubernetes.io/projected/12dec63c-ec5c-4327-b1ec-aa9ee39acf82-kube-api-access-4v2x5\") pod \"nova-api-b308-account-create-update-wlh8l\" (UID: \"12dec63c-ec5c-4327-b1ec-aa9ee39acf82\") " pod="openstack-kuttl-tests/nova-api-b308-account-create-update-wlh8l" Jan 20 17:10:52 crc kubenswrapper[4558]: I0120 17:10:52.965491 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/12dec63c-ec5c-4327-b1ec-aa9ee39acf82-operator-scripts\") pod \"nova-api-b308-account-create-update-wlh8l\" (UID: \"12dec63c-ec5c-4327-b1ec-aa9ee39acf82\") " pod="openstack-kuttl-tests/nova-api-b308-account-create-update-wlh8l" Jan 20 17:10:52 crc kubenswrapper[4558]: I0120 17:10:52.965521 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/99812c8d-c18f-48d5-a063-deb9462a904b-operator-scripts\") pod \"nova-cell1-0eec-account-create-update-2btsb\" (UID: \"99812c8d-c18f-48d5-a063-deb9462a904b\") " pod="openstack-kuttl-tests/nova-cell1-0eec-account-create-update-2btsb" Jan 20 17:10:52 crc kubenswrapper[4558]: I0120 17:10:52.966282 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/99812c8d-c18f-48d5-a063-deb9462a904b-operator-scripts\") pod \"nova-cell1-0eec-account-create-update-2btsb\" (UID: \"99812c8d-c18f-48d5-a063-deb9462a904b\") " pod="openstack-kuttl-tests/nova-cell1-0eec-account-create-update-2btsb" Jan 20 17:10:52 crc kubenswrapper[4558]: I0120 17:10:52.967140 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/12dec63c-ec5c-4327-b1ec-aa9ee39acf82-operator-scripts\") pod \"nova-api-b308-account-create-update-wlh8l\" (UID: \"12dec63c-ec5c-4327-b1ec-aa9ee39acf82\") " pod="openstack-kuttl-tests/nova-api-b308-account-create-update-wlh8l" Jan 20 17:10:52 crc kubenswrapper[4558]: I0120 17:10:52.968826 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h4mh6\" (UniqueName: \"kubernetes.io/projected/9b8664b1-e050-479b-b965-aed1a70e4bd1-kube-api-access-h4mh6\") pod \"cinder-6a24-account-create-update-cvtmm\" (UID: \"9b8664b1-e050-479b-b965-aed1a70e4bd1\") " pod="openstack-kuttl-tests/cinder-6a24-account-create-update-cvtmm" Jan 20 17:10:52 crc kubenswrapper[4558]: I0120 17:10:52.985233 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-6a24-account-create-update-zn7sv"] Jan 20 17:10:53 crc kubenswrapper[4558]: I0120 17:10:53.003387 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/swift-ring-rebalance-6hv7n"] Jan 20 17:10:53 crc kubenswrapper[4558]: I0120 17:10:53.039012 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-b308-account-create-update-mhczn"] Jan 20 17:10:53 crc kubenswrapper[4558]: I0120 17:10:53.042580 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-api-b308-account-create-update-mhczn"] Jan 20 17:10:53 crc kubenswrapper[4558]: I0120 17:10:53.056740 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4v2x5\" (UniqueName: \"kubernetes.io/projected/12dec63c-ec5c-4327-b1ec-aa9ee39acf82-kube-api-access-4v2x5\") pod \"nova-api-b308-account-create-update-wlh8l\" (UID: \"12dec63c-ec5c-4327-b1ec-aa9ee39acf82\") " pod="openstack-kuttl-tests/nova-api-b308-account-create-update-wlh8l" Jan 20 17:10:53 crc kubenswrapper[4558]: I0120 17:10:53.069641 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gs6pf\" (UniqueName: \"kubernetes.io/projected/99812c8d-c18f-48d5-a063-deb9462a904b-kube-api-access-gs6pf\") pod \"nova-cell1-0eec-account-create-update-2btsb\" (UID: \"99812c8d-c18f-48d5-a063-deb9462a904b\") " pod="openstack-kuttl-tests/nova-cell1-0eec-account-create-update-2btsb" Jan 20 17:10:53 crc kubenswrapper[4558]: I0120 17:10:53.070003 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/swift-ring-rebalance-6hv7n"] Jan 20 17:10:53 crc kubenswrapper[4558]: I0120 17:10:53.090861 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/cinder-6a24-account-create-update-zn7sv"] Jan 20 17:10:53 crc kubenswrapper[4558]: I0120 17:10:53.092696 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-6a24-account-create-update-cvtmm" Jan 20 17:10:53 crc kubenswrapper[4558]: I0120 17:10:53.113946 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/rabbitmq-cell1-server-0"] Jan 20 17:10:53 crc kubenswrapper[4558]: I0120 17:10:53.116398 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/ovn-northd-0" podUID="c6a5b342-eaf2-408c-828e-9bc0bf10d09e" containerName="ovn-northd" probeResult="failure" output=< Jan 20 17:10:53 crc kubenswrapper[4558]: 2026-01-20T17:10:53Z|00001|unixctl|WARN|failed to connect to /tmp/ovn-northd.1.ctl Jan 20 17:10:53 crc kubenswrapper[4558]: ovn-appctl: cannot connect to "/tmp/ovn-northd.1.ctl" (No such file or directory) Jan 20 17:10:53 crc kubenswrapper[4558]: > Jan 20 17:10:53 crc kubenswrapper[4558]: I0120 17:10:53.135333 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-nb-0"] Jan 20 17:10:53 crc kubenswrapper[4558]: I0120 17:10:53.135955 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ovsdbserver-nb-0" podUID="f98dbc6f-bd60-4064-b0d8-947d4080b9ec" containerName="openstack-network-exporter" containerID="cri-o://7cbff1daf53c24e61424f2057794575c2b66d928a2e2c492b88b52dfa56a5ee3" gracePeriod=300 Jan 20 17:10:53 crc kubenswrapper[4558]: I0120 17:10:53.137936 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-b308-account-create-update-wlh8l" Jan 20 17:10:53 crc kubenswrapper[4558]: I0120 17:10:53.146492 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-db-sync-r7zgt"] Jan 20 17:10:53 crc kubenswrapper[4558]: I0120 17:10:53.163849 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-0eec-account-create-update-2btsb" Jan 20 17:10:53 crc kubenswrapper[4558]: I0120 17:10:53.164638 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/glance-db-sync-r7zgt"] Jan 20 17:10:53 crc kubenswrapper[4558]: I0120 17:10:53.176455 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell0-77c1-account-create-update-9lp88"] Jan 20 17:10:53 crc kubenswrapper[4558]: I0120 17:10:53.225395 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-0eec-account-create-update-j75x4"] Jan 20 17:10:53 crc kubenswrapper[4558]: I0120 17:10:53.247154 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/root-account-create-update-f6ldh" event={"ID":"931d9a7a-aa04-42c3-a276-e71388a8b9d3","Type":"ContainerStarted","Data":"8360fe6b2d64fd248c22e3565b6e32c9b0d66b218b8a35e48394563fc743ecf5"} Jan 20 17:10:53 crc kubenswrapper[4558]: I0120 17:10:53.275292 4558 generic.go:334] "Generic (PLEG): container finished" podID="ee147895-0bad-4dc1-a348-1be3348a7180" containerID="0376c421f7190fd015b8795150f665866c227b4ec1aa2af3ff3ba419848ca268" exitCode=143 Jan 20 17:10:53 crc kubenswrapper[4558]: I0120 17:10:53.275393 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"ee147895-0bad-4dc1-a348-1be3348a7180","Type":"ContainerDied","Data":"0376c421f7190fd015b8795150f665866c227b4ec1aa2af3ff3ba419848ca268"} Jan 20 17:10:53 crc kubenswrapper[4558]: I0120 17:10:53.281716 4558 generic.go:334] "Generic (PLEG): container finished" podID="c6a5b342-eaf2-408c-828e-9bc0bf10d09e" containerID="26c2170a07662d397db2cd598e3f8c54887440864fb9753f04f3060866b77ade" exitCode=2 Jan 20 17:10:53 crc kubenswrapper[4558]: I0120 17:10:53.281957 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-northd-0" event={"ID":"c6a5b342-eaf2-408c-828e-9bc0bf10d09e","Type":"ContainerDied","Data":"26c2170a07662d397db2cd598e3f8c54887440864fb9753f04f3060866b77ade"} Jan 20 17:10:53 crc kubenswrapper[4558]: I0120 17:10:53.303072 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell0-77c1-account-create-update-9lp88"] Jan 20 17:10:53 crc kubenswrapper[4558]: E0120 17:10:53.314049 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/combined-ca-bundle: secret "combined-ca-bundle" not found Jan 20 17:10:53 crc kubenswrapper[4558]: E0120 17:10:53.314147 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f98dbc6f-bd60-4064-b0d8-947d4080b9ec-combined-ca-bundle podName:f98dbc6f-bd60-4064-b0d8-947d4080b9ec nodeName:}" failed. No retries permitted until 2026-01-20 17:10:55.314120649 +0000 UTC m=+1749.074458615 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/f98dbc6f-bd60-4064-b0d8-947d4080b9ec-combined-ca-bundle") pod "ovsdbserver-nb-0" (UID: "f98dbc6f-bd60-4064-b0d8-947d4080b9ec") : secret "combined-ca-bundle" not found Jan 20 17:10:53 crc kubenswrapper[4558]: E0120 17:10:53.317598 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Jan 20 17:10:53 crc kubenswrapper[4558]: E0120 17:10:53.317714 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/39403277-bf62-47c1-8e86-cdec59f2da7b-config-data podName:39403277-bf62-47c1-8e86-cdec59f2da7b nodeName:}" failed. No retries permitted until 2026-01-20 17:10:53.81769544 +0000 UTC m=+1747.578033408 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/39403277-bf62-47c1-8e86-cdec59f2da7b-config-data") pod "rabbitmq-cell1-server-0" (UID: "39403277-bf62-47c1-8e86-cdec59f2da7b") : configmap "rabbitmq-cell1-config-data" not found Jan 20 17:10:53 crc kubenswrapper[4558]: I0120 17:10:53.329138 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-f6ldh"] Jan 20 17:10:53 crc kubenswrapper[4558]: I0120 17:10:53.363266 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell1-0eec-account-create-update-j75x4"] Jan 20 17:10:53 crc kubenswrapper[4558]: I0120 17:10:53.379346 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-db-sync-5rtqc"] Jan 20 17:10:53 crc kubenswrapper[4558]: E0120 17:10:53.406450 4558 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 20 17:10:53 crc kubenswrapper[4558]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[/bin/sh -c #!/bin/bash Jan 20 17:10:53 crc kubenswrapper[4558]: Jan 20 17:10:53 crc kubenswrapper[4558]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 20 17:10:53 crc kubenswrapper[4558]: Jan 20 17:10:53 crc kubenswrapper[4558]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 20 17:10:53 crc kubenswrapper[4558]: Jan 20 17:10:53 crc kubenswrapper[4558]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 20 17:10:53 crc kubenswrapper[4558]: Jan 20 17:10:53 crc kubenswrapper[4558]: if [ -n "placement" ]; then Jan 20 17:10:53 crc kubenswrapper[4558]: GRANT_DATABASE="placement" Jan 20 17:10:53 crc kubenswrapper[4558]: else Jan 20 17:10:53 crc kubenswrapper[4558]: GRANT_DATABASE="*" Jan 20 17:10:53 crc kubenswrapper[4558]: fi Jan 20 17:10:53 crc kubenswrapper[4558]: Jan 20 17:10:53 crc kubenswrapper[4558]: # going for maximum compatibility here: Jan 20 17:10:53 crc kubenswrapper[4558]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 20 17:10:53 crc kubenswrapper[4558]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 20 17:10:53 crc kubenswrapper[4558]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 20 17:10:53 crc kubenswrapper[4558]: # support updates Jan 20 17:10:53 crc kubenswrapper[4558]: Jan 20 17:10:53 crc kubenswrapper[4558]: $MYSQL_CMD < logger="UnhandledError" Jan 20 17:10:53 crc kubenswrapper[4558]: I0120 17:10:53.406529 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/barbican-db-sync-5rtqc"] Jan 20 17:10:53 crc kubenswrapper[4558]: E0120 17:10:53.411275 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"placement-db-secret\\\" not found\"" pod="openstack-kuttl-tests/placement-64d3-account-create-update-h72jc" podUID="32721dbf-d5cf-43d4-9ddc-ad1664d17869" Jan 20 17:10:53 crc kubenswrapper[4558]: I0120 17:10:53.421286 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell0-77c1-account-create-update-kgxm8"] Jan 20 17:10:53 crc kubenswrapper[4558]: I0120 17:10:53.431876 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-77c1-account-create-update-kgxm8" Jan 20 17:10:53 crc kubenswrapper[4558]: I0120 17:10:53.436682 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell0-db-secret" Jan 20 17:10:53 crc kubenswrapper[4558]: I0120 17:10:53.458229 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-77c1-account-create-update-kgxm8"] Jan 20 17:10:53 crc kubenswrapper[4558]: I0120 17:10:53.527363 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-txl8j\" (UniqueName: \"kubernetes.io/projected/5d4337cb-4c83-4c75-b4e4-3c7f16373356-kube-api-access-txl8j\") pod \"nova-cell0-77c1-account-create-update-kgxm8\" (UID: \"5d4337cb-4c83-4c75-b4e4-3c7f16373356\") " pod="openstack-kuttl-tests/nova-cell0-77c1-account-create-update-kgxm8" Jan 20 17:10:53 crc kubenswrapper[4558]: I0120 17:10:53.527516 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5d4337cb-4c83-4c75-b4e4-3c7f16373356-operator-scripts\") pod \"nova-cell0-77c1-account-create-update-kgxm8\" (UID: \"5d4337cb-4c83-4c75-b4e4-3c7f16373356\") " pod="openstack-kuttl-tests/nova-cell0-77c1-account-create-update-kgxm8" Jan 20 17:10:53 crc kubenswrapper[4558]: E0120 17:10:53.527794 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Jan 20 17:10:53 crc kubenswrapper[4558]: E0120 17:10:53.527843 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/07fab146-67be-42ba-b263-ee19fe95720b-config-data podName:07fab146-67be-42ba-b263-ee19fe95720b nodeName:}" failed. No retries permitted until 2026-01-20 17:10:55.52782483 +0000 UTC m=+1749.288162796 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/07fab146-67be-42ba-b263-ee19fe95720b-config-data") pod "rabbitmq-server-0" (UID: "07fab146-67be-42ba-b263-ee19fe95720b") : configmap "rabbitmq-config-data" not found Jan 20 17:10:53 crc kubenswrapper[4558]: I0120 17:10:53.630872 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5d4337cb-4c83-4c75-b4e4-3c7f16373356-operator-scripts\") pod \"nova-cell0-77c1-account-create-update-kgxm8\" (UID: \"5d4337cb-4c83-4c75-b4e4-3c7f16373356\") " pod="openstack-kuttl-tests/nova-cell0-77c1-account-create-update-kgxm8" Jan 20 17:10:53 crc kubenswrapper[4558]: I0120 17:10:53.631340 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-txl8j\" (UniqueName: \"kubernetes.io/projected/5d4337cb-4c83-4c75-b4e4-3c7f16373356-kube-api-access-txl8j\") pod \"nova-cell0-77c1-account-create-update-kgxm8\" (UID: \"5d4337cb-4c83-4c75-b4e4-3c7f16373356\") " pod="openstack-kuttl-tests/nova-cell0-77c1-account-create-update-kgxm8" Jan 20 17:10:53 crc kubenswrapper[4558]: I0120 17:10:53.631860 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5d4337cb-4c83-4c75-b4e4-3c7f16373356-operator-scripts\") pod \"nova-cell0-77c1-account-create-update-kgxm8\" (UID: \"5d4337cb-4c83-4c75-b4e4-3c7f16373356\") " pod="openstack-kuttl-tests/nova-cell0-77c1-account-create-update-kgxm8" Jan 20 17:10:53 crc kubenswrapper[4558]: I0120 17:10:53.643401 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/placement-db-sync-zgncw"] Jan 20 17:10:53 crc kubenswrapper[4558]: I0120 17:10:53.648603 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/placement-db-sync-zgncw"] Jan 20 17:10:53 crc kubenswrapper[4558]: I0120 17:10:53.651744 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-txl8j\" (UniqueName: \"kubernetes.io/projected/5d4337cb-4c83-4c75-b4e4-3c7f16373356-kube-api-access-txl8j\") pod \"nova-cell0-77c1-account-create-update-kgxm8\" (UID: \"5d4337cb-4c83-4c75-b4e4-3c7f16373356\") " pod="openstack-kuttl-tests/nova-cell0-77c1-account-create-update-kgxm8" Jan 20 17:10:53 crc kubenswrapper[4558]: I0120 17:10:53.667890 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-db-sync-fs4x6"] Jan 20 17:10:53 crc kubenswrapper[4558]: I0120 17:10:53.701307 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/neutron-db-sync-fs4x6"] Jan 20 17:10:53 crc kubenswrapper[4558]: I0120 17:10:53.724267 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/swift-storage-0"] Jan 20 17:10:53 crc kubenswrapper[4558]: I0120 17:10:53.724763 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="fc023815-924a-4a04-bad2-5fc862ef20ed" containerName="account-server" containerID="cri-o://a8fa945168b61b5421cd9a0b28d23083787cacb8489bf80416f7599e933bb729" gracePeriod=30 Jan 20 17:10:53 crc kubenswrapper[4558]: I0120 17:10:53.725127 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="fc023815-924a-4a04-bad2-5fc862ef20ed" containerName="swift-recon-cron" containerID="cri-o://e1ee27aa87a720002e5126efcfb765b302b81489973de33a9ecf856e1da6cf32" gracePeriod=30 Jan 20 17:10:53 crc kubenswrapper[4558]: I0120 17:10:53.725196 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="fc023815-924a-4a04-bad2-5fc862ef20ed" containerName="rsync" containerID="cri-o://9410708314b2bfc60099a915bc2c1d7c39d1320c24755c9f69a3d55337b01e9b" gracePeriod=30 Jan 20 17:10:53 crc kubenswrapper[4558]: I0120 17:10:53.725233 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="fc023815-924a-4a04-bad2-5fc862ef20ed" containerName="object-expirer" containerID="cri-o://9528266b3b97d400ebef502af6d8130d7c0559f6df040c83585a2f746292ef2d" gracePeriod=30 Jan 20 17:10:53 crc kubenswrapper[4558]: I0120 17:10:53.725266 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="fc023815-924a-4a04-bad2-5fc862ef20ed" containerName="object-updater" containerID="cri-o://7b43c5f22df4aacb3c09f2db534e5fb1e65e53019530050eed242dc948ad8a62" gracePeriod=30 Jan 20 17:10:53 crc kubenswrapper[4558]: I0120 17:10:53.725295 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="fc023815-924a-4a04-bad2-5fc862ef20ed" containerName="object-auditor" containerID="cri-o://375019cf206eb74d303642688e63e85c3a2faecea6a7d087bd3a333841cd82c8" gracePeriod=30 Jan 20 17:10:53 crc kubenswrapper[4558]: I0120 17:10:53.725326 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="fc023815-924a-4a04-bad2-5fc862ef20ed" containerName="object-replicator" containerID="cri-o://18ccc604f3f78f7668fb5ba7e36fe96edce4c2dcfa1c6e7165a65ee3388df062" gracePeriod=30 Jan 20 17:10:53 crc kubenswrapper[4558]: I0120 17:10:53.725356 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="fc023815-924a-4a04-bad2-5fc862ef20ed" containerName="object-server" containerID="cri-o://3517eea5a519d4ed5dbc9c6b97d0c39a1746f6138a5b79baf3fd8a2d54e4293f" gracePeriod=30 Jan 20 17:10:53 crc kubenswrapper[4558]: I0120 17:10:53.725402 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="fc023815-924a-4a04-bad2-5fc862ef20ed" containerName="container-updater" containerID="cri-o://9f3a700b66f96079e1d010bea1fc7bf937b11f9fea1048aaff6419fd900af7c5" gracePeriod=30 Jan 20 17:10:53 crc kubenswrapper[4558]: I0120 17:10:53.725439 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="fc023815-924a-4a04-bad2-5fc862ef20ed" containerName="container-auditor" containerID="cri-o://8c2d36ea5a305d3a1da0f10e70884099eb310895225cdbcdf413bf2c005effc4" gracePeriod=30 Jan 20 17:10:53 crc kubenswrapper[4558]: I0120 17:10:53.725475 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="fc023815-924a-4a04-bad2-5fc862ef20ed" containerName="container-replicator" containerID="cri-o://1656f891b22791ec6d9f940bf33e0630746d220bfffb0046c75f48d2afa2d782" gracePeriod=30 Jan 20 17:10:53 crc kubenswrapper[4558]: I0120 17:10:53.725505 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="fc023815-924a-4a04-bad2-5fc862ef20ed" containerName="container-server" containerID="cri-o://d2f143dbca4d3796805d8445782c84acc22a2c02590f44e1ad34b26b6934656f" gracePeriod=30 Jan 20 17:10:53 crc kubenswrapper[4558]: I0120 17:10:53.725531 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="fc023815-924a-4a04-bad2-5fc862ef20ed" containerName="account-reaper" containerID="cri-o://101d312b0bf3e3b73451053b69e1450a946df99e2bf194fa8970254181c3653e" gracePeriod=30 Jan 20 17:10:53 crc kubenswrapper[4558]: I0120 17:10:53.725561 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="fc023815-924a-4a04-bad2-5fc862ef20ed" containerName="account-auditor" containerID="cri-o://0b1bf5c7f6bc3d5fc7206be9bc6c12aadefeeeb27348b35c50c441224ffe435b" gracePeriod=30 Jan 20 17:10:53 crc kubenswrapper[4558]: I0120 17:10:53.725588 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="fc023815-924a-4a04-bad2-5fc862ef20ed" containerName="account-replicator" containerID="cri-o://e39b88b3c77bb6d2be10abb480b241402fde95d8a3a44afc3741d81cd38c9f4f" gracePeriod=30 Jan 20 17:10:53 crc kubenswrapper[4558]: I0120 17:10:53.744031 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-db-sync-blmnq"] Jan 20 17:10:53 crc kubenswrapper[4558]: I0120 17:10:53.761274 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/cinder-db-sync-blmnq"] Jan 20 17:10:53 crc kubenswrapper[4558]: I0120 17:10:53.772798 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-cell-mapping-5lrhm"] Jan 20 17:10:53 crc kubenswrapper[4558]: I0120 17:10:53.784261 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell1-cell-mapping-5lrhm"] Jan 20 17:10:53 crc kubenswrapper[4558]: I0120 17:10:53.791575 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell0-cell-mapping-kn965"] Jan 20 17:10:53 crc kubenswrapper[4558]: I0120 17:10:53.798110 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell0-cell-mapping-kn965"] Jan 20 17:10:53 crc kubenswrapper[4558]: I0120 17:10:53.807803 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-64d3-account-create-update-h72jc"] Jan 20 17:10:53 crc kubenswrapper[4558]: I0120 17:10:53.811796 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ovsdbserver-sb-0" podUID="6f1a8225-d143-4ae0-9301-8025f1b639e5" containerName="ovsdbserver-sb" containerID="cri-o://0ed7f1c17ddf0fc44bcd37dd9e0ff1ad788674cd606ea41336b8a5c6132d841d" gracePeriod=300 Jan 20 17:10:53 crc kubenswrapper[4558]: I0120 17:10:53.815796 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ovsdbserver-nb-0" podUID="f98dbc6f-bd60-4064-b0d8-947d4080b9ec" containerName="ovsdbserver-nb" containerID="cri-o://ec0223d973099ec19b7266b05971f33d632f61aa218c28a0e77f251b638c66e8" gracePeriod=300 Jan 20 17:10:53 crc kubenswrapper[4558]: I0120 17:10:53.822980 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:10:53 crc kubenswrapper[4558]: I0120 17:10:53.823203 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-internal-api-0" podUID="25737ece-fb84-4543-8c3a-94ffa7b8f095" containerName="glance-log" containerID="cri-o://166e1e0d14b2cb48e695c5ce11cca54d963c6250ddce1f9f936a358c8a5b3580" gracePeriod=30 Jan 20 17:10:53 crc kubenswrapper[4558]: I0120 17:10:53.823328 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-internal-api-0" podUID="25737ece-fb84-4543-8c3a-94ffa7b8f095" containerName="glance-httpd" containerID="cri-o://611dd2ec139015a415381aaa856813e6abfb158b3d9c4ce578651f256b7bef8b" gracePeriod=30 Jan 20 17:10:53 crc kubenswrapper[4558]: E0120 17:10:53.838052 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Jan 20 17:10:53 crc kubenswrapper[4558]: E0120 17:10:53.838099 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/39403277-bf62-47c1-8e86-cdec59f2da7b-config-data podName:39403277-bf62-47c1-8e86-cdec59f2da7b nodeName:}" failed. No retries permitted until 2026-01-20 17:10:54.83808506 +0000 UTC m=+1748.598423027 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/39403277-bf62-47c1-8e86-cdec59f2da7b-config-data") pod "rabbitmq-cell1-server-0" (UID: "39403277-bf62-47c1-8e86-cdec59f2da7b") : configmap "rabbitmq-cell1-config-data" not found Jan 20 17:10:53 crc kubenswrapper[4558]: I0120 17:10:53.863845 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:10:53 crc kubenswrapper[4558]: I0120 17:10:53.864055 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-external-api-0" podUID="a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d" containerName="glance-log" containerID="cri-o://3e0d89a98f1d6af7b5d9794c3b45f1a7d02d2b5e21f231ecf13088af12d9aed7" gracePeriod=30 Jan 20 17:10:53 crc kubenswrapper[4558]: I0120 17:10:53.864379 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-external-api-0" podUID="a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d" containerName="glance-httpd" containerID="cri-o://fcae9d3478f16c4ae798638653ce51dee0f0bdc70fd0867d5014ba53378f69ef" gracePeriod=30 Jan 20 17:10:53 crc kubenswrapper[4558]: I0120 17:10:53.879980 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/placement-b8dd4f99b-9gg5b"] Jan 20 17:10:53 crc kubenswrapper[4558]: I0120 17:10:53.889233 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/placement-b8dd4f99b-9gg5b" podUID="043476ec-5392-41af-970c-89d20b6b30a5" containerName="placement-log" containerID="cri-o://a18b5b718a98c1d90904f2a89cfdb392cf52b6b547739f0cef3c92632d95c1e4" gracePeriod=30 Jan 20 17:10:53 crc kubenswrapper[4558]: I0120 17:10:53.890471 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/placement-b8dd4f99b-9gg5b" podUID="043476ec-5392-41af-970c-89d20b6b30a5" containerName="placement-api" containerID="cri-o://d1ba5118e075c9076247767e8ee5b5f7ddae3519d2f382217f34408cb902f7ec" gracePeriod=30 Jan 20 17:10:53 crc kubenswrapper[4558]: I0120 17:10:53.912202 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-cba5-account-create-update-4mwc6"] Jan 20 17:10:53 crc kubenswrapper[4558]: I0120 17:10:53.932597 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-859ff747c8-5c47j"] Jan 20 17:10:53 crc kubenswrapper[4558]: I0120 17:10:53.933310 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/neutron-859ff747c8-5c47j" podUID="f5d75d5a-4202-477d-97a5-8d92bcdd13b9" containerName="neutron-api" containerID="cri-o://774b082f2f30d066f7547f9bb1d923817b7873ec854074ba60f13876f9205004" gracePeriod=30 Jan 20 17:10:53 crc kubenswrapper[4558]: I0120 17:10:53.933766 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/neutron-859ff747c8-5c47j" podUID="f5d75d5a-4202-477d-97a5-8d92bcdd13b9" containerName="neutron-httpd" containerID="cri-o://a2dd29b89f91cc3800497edcfbd3636296ddb90d87379145a79cd11ac30957af" gracePeriod=30 Jan 20 17:10:53 crc kubenswrapper[4558]: E0120 17:10:53.943532 4558 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 20 17:10:53 crc kubenswrapper[4558]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[/bin/sh -c #!/bin/bash Jan 20 17:10:53 crc kubenswrapper[4558]: Jan 20 17:10:53 crc kubenswrapper[4558]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 20 17:10:53 crc kubenswrapper[4558]: Jan 20 17:10:53 crc kubenswrapper[4558]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 20 17:10:53 crc kubenswrapper[4558]: Jan 20 17:10:53 crc kubenswrapper[4558]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 20 17:10:53 crc kubenswrapper[4558]: Jan 20 17:10:53 crc kubenswrapper[4558]: if [ -n "glance" ]; then Jan 20 17:10:53 crc kubenswrapper[4558]: GRANT_DATABASE="glance" Jan 20 17:10:53 crc kubenswrapper[4558]: else Jan 20 17:10:53 crc kubenswrapper[4558]: GRANT_DATABASE="*" Jan 20 17:10:53 crc kubenswrapper[4558]: fi Jan 20 17:10:53 crc kubenswrapper[4558]: Jan 20 17:10:53 crc kubenswrapper[4558]: # going for maximum compatibility here: Jan 20 17:10:53 crc kubenswrapper[4558]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 20 17:10:53 crc kubenswrapper[4558]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 20 17:10:53 crc kubenswrapper[4558]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 20 17:10:53 crc kubenswrapper[4558]: # support updates Jan 20 17:10:53 crc kubenswrapper[4558]: Jan 20 17:10:53 crc kubenswrapper[4558]: $MYSQL_CMD < logger="UnhandledError" Jan 20 17:10:53 crc kubenswrapper[4558]: I0120 17:10:53.945056 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-8xt6f"] Jan 20 17:10:53 crc kubenswrapper[4558]: E0120 17:10:53.946448 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"glance-db-secret\\\" not found\"" pod="openstack-kuttl-tests/glance-cba5-account-create-update-4mwc6" podUID="a01fa32b-fc5c-49ba-b102-b4751878380a" Jan 20 17:10:53 crc kubenswrapper[4558]: I0120 17:10:53.996577 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-db-create-4cht4"] Jan 20 17:10:53 crc kubenswrapper[4558]: I0120 17:10:53.996802 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-8xt6f"] Jan 20 17:10:53 crc kubenswrapper[4558]: I0120 17:10:53.996875 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-8xt6f" Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.024664 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/barbican-db-create-4cht4"] Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.042210 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/placement-db-create-mcpzb"] Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.047152 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zzlnz\" (UniqueName: \"kubernetes.io/projected/b730bb71-b18e-4b97-8d6b-d4ac799d08fd-kube-api-access-zzlnz\") pod \"dnsmasq-dnsmasq-84b9f45d47-8xt6f\" (UID: \"b730bb71-b18e-4b97-8d6b-d4ac799d08fd\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-8xt6f" Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.047305 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b730bb71-b18e-4b97-8d6b-d4ac799d08fd-config\") pod \"dnsmasq-dnsmasq-84b9f45d47-8xt6f\" (UID: \"b730bb71-b18e-4b97-8d6b-d4ac799d08fd\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-8xt6f" Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.047584 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/b730bb71-b18e-4b97-8d6b-d4ac799d08fd-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-84b9f45d47-8xt6f\" (UID: \"b730bb71-b18e-4b97-8d6b-d4ac799d08fd\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-8xt6f" Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.054618 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/placement-db-create-mcpzb"] Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.066992 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-db-create-nq6nj"] Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.071599 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/glance-db-create-nq6nj"] Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.077331 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-3e15-account-create-update-v7rcr"] Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.087201 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/barbican-3e15-account-create-update-v7rcr"] Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.090887 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/rabbitmq-server-0"] Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.105440 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/placement-64d3-account-create-update-h72jc"] Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.110233 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-456e-account-create-update-fvc6f"] Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.114385 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/openstack-cell1-galera-0"] Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.124924 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-db-create-8q2dt"] Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.129207 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/neutron-db-create-8q2dt"] Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.159177 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zzlnz\" (UniqueName: \"kubernetes.io/projected/b730bb71-b18e-4b97-8d6b-d4ac799d08fd-kube-api-access-zzlnz\") pod \"dnsmasq-dnsmasq-84b9f45d47-8xt6f\" (UID: \"b730bb71-b18e-4b97-8d6b-d4ac799d08fd\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-8xt6f" Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.159299 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b730bb71-b18e-4b97-8d6b-d4ac799d08fd-config\") pod \"dnsmasq-dnsmasq-84b9f45d47-8xt6f\" (UID: \"b730bb71-b18e-4b97-8d6b-d4ac799d08fd\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-8xt6f" Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.159361 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/b730bb71-b18e-4b97-8d6b-d4ac799d08fd-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-84b9f45d47-8xt6f\" (UID: \"b730bb71-b18e-4b97-8d6b-d4ac799d08fd\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-8xt6f" Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.161145 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b730bb71-b18e-4b97-8d6b-d4ac799d08fd-config\") pod \"dnsmasq-dnsmasq-84b9f45d47-8xt6f\" (UID: \"b730bb71-b18e-4b97-8d6b-d4ac799d08fd\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-8xt6f" Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.178769 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/b730bb71-b18e-4b97-8d6b-d4ac799d08fd-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-84b9f45d47-8xt6f\" (UID: \"b730bb71-b18e-4b97-8d6b-d4ac799d08fd\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-8xt6f" Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.186113 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-api-757fb55df8-9qzm2"] Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.186394 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-api-757fb55df8-9qzm2" podUID="87001341-5a74-4796-a332-6d57b2cf11c9" containerName="barbican-api-log" containerID="cri-o://c38ffae2f71dd677618391bc8fe1b2be1a66a0d46ff78c2fe5bdd127277f6054" gracePeriod=30 Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.186727 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-api-757fb55df8-9qzm2" podUID="87001341-5a74-4796-a332-6d57b2cf11c9" containerName="barbican-api" containerID="cri-o://67a4a2b743f3eaffa244dd8f012af0a3fdc18477dc3e1252ee8aced329b34fda" gracePeriod=30 Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.198201 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zzlnz\" (UniqueName: \"kubernetes.io/projected/b730bb71-b18e-4b97-8d6b-d4ac799d08fd-kube-api-access-zzlnz\") pod \"dnsmasq-dnsmasq-84b9f45d47-8xt6f\" (UID: \"b730bb71-b18e-4b97-8d6b-d4ac799d08fd\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-8xt6f" Jan 20 17:10:54 crc kubenswrapper[4558]: E0120 17:10:54.206032 4558 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 20 17:10:54 crc kubenswrapper[4558]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[/bin/sh -c #!/bin/bash Jan 20 17:10:54 crc kubenswrapper[4558]: Jan 20 17:10:54 crc kubenswrapper[4558]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 20 17:10:54 crc kubenswrapper[4558]: Jan 20 17:10:54 crc kubenswrapper[4558]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 20 17:10:54 crc kubenswrapper[4558]: Jan 20 17:10:54 crc kubenswrapper[4558]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 20 17:10:54 crc kubenswrapper[4558]: Jan 20 17:10:54 crc kubenswrapper[4558]: if [ -n "neutron" ]; then Jan 20 17:10:54 crc kubenswrapper[4558]: GRANT_DATABASE="neutron" Jan 20 17:10:54 crc kubenswrapper[4558]: else Jan 20 17:10:54 crc kubenswrapper[4558]: GRANT_DATABASE="*" Jan 20 17:10:54 crc kubenswrapper[4558]: fi Jan 20 17:10:54 crc kubenswrapper[4558]: Jan 20 17:10:54 crc kubenswrapper[4558]: # going for maximum compatibility here: Jan 20 17:10:54 crc kubenswrapper[4558]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 20 17:10:54 crc kubenswrapper[4558]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 20 17:10:54 crc kubenswrapper[4558]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 20 17:10:54 crc kubenswrapper[4558]: # support updates Jan 20 17:10:54 crc kubenswrapper[4558]: Jan 20 17:10:54 crc kubenswrapper[4558]: $MYSQL_CMD < logger="UnhandledError" Jan 20 17:10:54 crc kubenswrapper[4558]: E0120 17:10:54.207632 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"neutron-db-secret\\\" not found\"" pod="openstack-kuttl-tests/neutron-456e-account-create-update-fvc6f" podUID="8218833e-265b-448d-bc76-550d3c1d213d" Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.217571 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/rabbitmq-server-0" podUID="07fab146-67be-42ba-b263-ee19fe95720b" containerName="rabbitmq" containerID="cri-o://ed0d0cf0a577509bde5ed7f30ee7d1552be82db06a4009c9c9826bf9daa8477a" gracePeriod=604800 Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.234234 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-cba5-account-create-update-4mwc6"] Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.257998 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.258395 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-api-0" podUID="ecb70be9-510b-4ec7-ae5b-20f2ae8dd6a1" containerName="nova-api-log" containerID="cri-o://3961204b11e978cd9995166f21ade05036318ca560f84a52206089ba8789ed9d" gracePeriod=30 Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.258699 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-api-0" podUID="ecb70be9-510b-4ec7-ae5b-20f2ae8dd6a1" containerName="nova-api-api" containerID="cri-o://013d2b8016fc21c333aa7e40dd675884b8161eccaa5a0b7a7ee016c3b3fef132" gracePeriod=30 Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.314490 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-db-create-vsd8c"] Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.347565 4558 generic.go:334] "Generic (PLEG): container finished" podID="6f1a8225-d143-4ae0-9301-8025f1b639e5" containerID="27f4034104e6baa2769f9fc2dec19cbc295799af2b5e24f6d5ea225a19e1124b" exitCode=2 Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.347597 4558 generic.go:334] "Generic (PLEG): container finished" podID="6f1a8225-d143-4ae0-9301-8025f1b639e5" containerID="0ed7f1c17ddf0fc44bcd37dd9e0ff1ad788674cd606ea41336b8a5c6132d841d" exitCode=0 Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.347665 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-sb-0" event={"ID":"6f1a8225-d143-4ae0-9301-8025f1b639e5","Type":"ContainerDied","Data":"27f4034104e6baa2769f9fc2dec19cbc295799af2b5e24f6d5ea225a19e1124b"} Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.347743 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-sb-0" event={"ID":"6f1a8225-d143-4ae0-9301-8025f1b639e5","Type":"ContainerDied","Data":"0ed7f1c17ddf0fc44bcd37dd9e0ff1ad788674cd606ea41336b8a5c6132d841d"} Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.349991 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/cinder-db-create-vsd8c"] Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.359150 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-keystone-listener-86d9c4d694-78ch5"] Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.359415 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-keystone-listener-86d9c4d694-78ch5" podUID="209ec6bc-170f-4c8a-ad7e-e441ace95d1b" containerName="barbican-keystone-listener-log" containerID="cri-o://f36f1742d8b3097d9223becd7320632e27297558f65c436f5ad091f8b5074cbd" gracePeriod=30 Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.359803 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-keystone-listener-86d9c4d694-78ch5" podUID="209ec6bc-170f-4c8a-ad7e-e441ace95d1b" containerName="barbican-keystone-listener" containerID="cri-o://63f037fefd6327562c7c2f8bf9caa685258bcc48a23627261cd9883239f176f0" gracePeriod=30 Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.384603 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.384885 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-metadata-0" podUID="5560f6a4-fc05-4d97-8496-b87804dfab99" containerName="nova-metadata-log" containerID="cri-o://737bdc74f2c709ec376fc9204c898d8cba1280d8792f62f6a65923c927c2ba2f" gracePeriod=30 Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.385411 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-metadata-0" podUID="5560f6a4-fc05-4d97-8496-b87804dfab99" containerName="nova-metadata-metadata" containerID="cri-o://c1a2c6fb29683dd75096376810ff0f53fdd799813b29ba97b56d0f8d5b5eac42" gracePeriod=30 Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.395022 4558 generic.go:334] "Generic (PLEG): container finished" podID="f98dbc6f-bd60-4064-b0d8-947d4080b9ec" containerID="7cbff1daf53c24e61424f2057794575c2b66d928a2e2c492b88b52dfa56a5ee3" exitCode=2 Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.395055 4558 generic.go:334] "Generic (PLEG): container finished" podID="f98dbc6f-bd60-4064-b0d8-947d4080b9ec" containerID="ec0223d973099ec19b7266b05971f33d632f61aa218c28a0e77f251b638c66e8" exitCode=0 Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.395133 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-nb-0" event={"ID":"f98dbc6f-bd60-4064-b0d8-947d4080b9ec","Type":"ContainerDied","Data":"7cbff1daf53c24e61424f2057794575c2b66d928a2e2c492b88b52dfa56a5ee3"} Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.395184 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-nb-0" event={"ID":"f98dbc6f-bd60-4064-b0d8-947d4080b9ec","Type":"ContainerDied","Data":"ec0223d973099ec19b7266b05971f33d632f61aa218c28a0e77f251b638c66e8"} Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.402119 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-db-create-gkhg5"] Jan 20 17:10:54 crc kubenswrapper[4558]: E0120 17:10:54.404382 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of ec0223d973099ec19b7266b05971f33d632f61aa218c28a0e77f251b638c66e8 is running failed: container process not found" containerID="ec0223d973099ec19b7266b05971f33d632f61aa218c28a0e77f251b638c66e8" cmd=["/usr/bin/pidof","ovsdb-server"] Jan 20 17:10:54 crc kubenswrapper[4558]: E0120 17:10:54.407274 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of ec0223d973099ec19b7266b05971f33d632f61aa218c28a0e77f251b638c66e8 is running failed: container process not found" containerID="ec0223d973099ec19b7266b05971f33d632f61aa218c28a0e77f251b638c66e8" cmd=["/usr/bin/pidof","ovsdb-server"] Jan 20 17:10:54 crc kubenswrapper[4558]: E0120 17:10:54.408287 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of ec0223d973099ec19b7266b05971f33d632f61aa218c28a0e77f251b638c66e8 is running failed: container process not found" containerID="ec0223d973099ec19b7266b05971f33d632f61aa218c28a0e77f251b638c66e8" cmd=["/usr/bin/pidof","ovsdb-server"] Jan 20 17:10:54 crc kubenswrapper[4558]: E0120 17:10:54.408320 4558 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of ec0223d973099ec19b7266b05971f33d632f61aa218c28a0e77f251b638c66e8 is running failed: container process not found" probeType="Readiness" pod="openstack-kuttl-tests/ovsdbserver-nb-0" podUID="f98dbc6f-bd60-4064-b0d8-947d4080b9ec" containerName="ovsdbserver-nb" Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.417359 4558 generic.go:334] "Generic (PLEG): container finished" podID="f5d75d5a-4202-477d-97a5-8d92bcdd13b9" containerID="a2dd29b89f91cc3800497edcfbd3636296ddb90d87379145a79cd11ac30957af" exitCode=0 Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.417426 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-859ff747c8-5c47j" event={"ID":"f5d75d5a-4202-477d-97a5-8d92bcdd13b9","Type":"ContainerDied","Data":"a2dd29b89f91cc3800497edcfbd3636296ddb90d87379145a79cd11ac30957af"} Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.418960 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-6a24-account-create-update-cvtmm"] Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.424419 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell1-db-create-gkhg5"] Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.432249 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-worker-7bcc85c7d9-jm8qh"] Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.432521 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-worker-7bcc85c7d9-jm8qh" podUID="faade961-fce0-4ad6-a039-4ba83a95dd68" containerName="barbican-worker-log" containerID="cri-o://8bdce8800380698b3525e8fafce4e103f8ffe07d9a3881dbd922429612c13082" gracePeriod=30 Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.432906 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-worker-7bcc85c7d9-jm8qh" podUID="faade961-fce0-4ad6-a039-4ba83a95dd68" containerName="barbican-worker" containerID="cri-o://987117f4a057251c5482c4e15328842920fdcea58544c68a18a70dee0c0bd727" gracePeriod=30 Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.437569 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-0eec-account-create-update-2btsb"] Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.450229 4558 generic.go:334] "Generic (PLEG): container finished" podID="6d6dfb4c-9a8d-405e-8ffd-288e446998cf" containerID="be9c2c4b147215f87ea040240daeaf63995b7b21012666e0eb6d43c0ceb36adc" exitCode=137 Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.450501 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.450944 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" podUID="e30a943a-f753-41fc-adc8-03822aa712c3" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://4009b12e702e655b643aa02ca67d095804a9ddf483a6488a64d2205988f9a03a" gracePeriod=30 Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.457605 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-db-create-hs5v4"] Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.461552 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/openstack-cell1-galera-0" podUID="01d65c2e-73b2-4d9e-a659-c384a3a3e2fe" containerName="galera" containerID="cri-o://7f4378e692fe617442cc1cc02c71c9e5beb567c1bbfc8f7fa7801b7b523d4e8b" gracePeriod=30 Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.462868 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-77c1-account-create-update-kgxm8" Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.480789 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-8xt6f" Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.483200 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-api-db-create-hs5v4"] Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.491382 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell0-db-create-k57tt"] Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.497245 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-b308-account-create-update-wlh8l"] Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.497437 4558 generic.go:334] "Generic (PLEG): container finished" podID="043476ec-5392-41af-970c-89d20b6b30a5" containerID="a18b5b718a98c1d90904f2a89cfdb392cf52b6b547739f0cef3c92632d95c1e4" exitCode=143 Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.497551 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-b8dd4f99b-9gg5b" event={"ID":"043476ec-5392-41af-970c-89d20b6b30a5","Type":"ContainerDied","Data":"a18b5b718a98c1d90904f2a89cfdb392cf52b6b547739f0cef3c92632d95c1e4"} Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.502413 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell0-db-create-k57tt"] Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.511957 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell0-77c1-account-create-update-kgxm8"] Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.522457 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/rabbitmq-cell1-server-0"] Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.542881 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-db-sync-7l69k"] Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.552724 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-0"] Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.552891 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-cell1-conductor-0" podUID="937396e0-a574-4597-bafc-bf1d8a909d3c" containerName="nova-cell1-conductor-conductor" containerID="cri-o://51dda5ae1f21ef092b384f0127be32effe565acdb2160b9b7bd5bc4ac8bc0278" gracePeriod=30 Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.562282 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.562659 4558 generic.go:334] "Generic (PLEG): container finished" podID="fc023815-924a-4a04-bad2-5fc862ef20ed" containerID="9410708314b2bfc60099a915bc2c1d7c39d1320c24755c9f69a3d55337b01e9b" exitCode=0 Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.562681 4558 generic.go:334] "Generic (PLEG): container finished" podID="fc023815-924a-4a04-bad2-5fc862ef20ed" containerID="9528266b3b97d400ebef502af6d8130d7c0559f6df040c83585a2f746292ef2d" exitCode=0 Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.562690 4558 generic.go:334] "Generic (PLEG): container finished" podID="fc023815-924a-4a04-bad2-5fc862ef20ed" containerID="7b43c5f22df4aacb3c09f2db534e5fb1e65e53019530050eed242dc948ad8a62" exitCode=0 Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.562698 4558 generic.go:334] "Generic (PLEG): container finished" podID="fc023815-924a-4a04-bad2-5fc862ef20ed" containerID="375019cf206eb74d303642688e63e85c3a2faecea6a7d087bd3a333841cd82c8" exitCode=0 Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.562721 4558 generic.go:334] "Generic (PLEG): container finished" podID="fc023815-924a-4a04-bad2-5fc862ef20ed" containerID="18ccc604f3f78f7668fb5ba7e36fe96edce4c2dcfa1c6e7165a65ee3388df062" exitCode=0 Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.562729 4558 generic.go:334] "Generic (PLEG): container finished" podID="fc023815-924a-4a04-bad2-5fc862ef20ed" containerID="9f3a700b66f96079e1d010bea1fc7bf937b11f9fea1048aaff6419fd900af7c5" exitCode=0 Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.562735 4558 generic.go:334] "Generic (PLEG): container finished" podID="fc023815-924a-4a04-bad2-5fc862ef20ed" containerID="8c2d36ea5a305d3a1da0f10e70884099eb310895225cdbcdf413bf2c005effc4" exitCode=0 Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.562741 4558 generic.go:334] "Generic (PLEG): container finished" podID="fc023815-924a-4a04-bad2-5fc862ef20ed" containerID="1656f891b22791ec6d9f940bf33e0630746d220bfffb0046c75f48d2afa2d782" exitCode=0 Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.562748 4558 generic.go:334] "Generic (PLEG): container finished" podID="fc023815-924a-4a04-bad2-5fc862ef20ed" containerID="d2f143dbca4d3796805d8445782c84acc22a2c02590f44e1ad34b26b6934656f" exitCode=0 Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.562753 4558 generic.go:334] "Generic (PLEG): container finished" podID="fc023815-924a-4a04-bad2-5fc862ef20ed" containerID="101d312b0bf3e3b73451053b69e1450a946df99e2bf194fa8970254181c3653e" exitCode=0 Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.562760 4558 generic.go:334] "Generic (PLEG): container finished" podID="fc023815-924a-4a04-bad2-5fc862ef20ed" containerID="0b1bf5c7f6bc3d5fc7206be9bc6c12aadefeeeb27348b35c50c441224ffe435b" exitCode=0 Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.562768 4558 generic.go:334] "Generic (PLEG): container finished" podID="fc023815-924a-4a04-bad2-5fc862ef20ed" containerID="e39b88b3c77bb6d2be10abb480b241402fde95d8a3a44afc3741d81cd38c9f4f" exitCode=0 Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.562820 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"fc023815-924a-4a04-bad2-5fc862ef20ed","Type":"ContainerDied","Data":"9410708314b2bfc60099a915bc2c1d7c39d1320c24755c9f69a3d55337b01e9b"} Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.562838 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"fc023815-924a-4a04-bad2-5fc862ef20ed","Type":"ContainerDied","Data":"9528266b3b97d400ebef502af6d8130d7c0559f6df040c83585a2f746292ef2d"} Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.562848 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"fc023815-924a-4a04-bad2-5fc862ef20ed","Type":"ContainerDied","Data":"7b43c5f22df4aacb3c09f2db534e5fb1e65e53019530050eed242dc948ad8a62"} Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.562856 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"fc023815-924a-4a04-bad2-5fc862ef20ed","Type":"ContainerDied","Data":"375019cf206eb74d303642688e63e85c3a2faecea6a7d087bd3a333841cd82c8"} Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.562883 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"fc023815-924a-4a04-bad2-5fc862ef20ed","Type":"ContainerDied","Data":"18ccc604f3f78f7668fb5ba7e36fe96edce4c2dcfa1c6e7165a65ee3388df062"} Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.562893 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"fc023815-924a-4a04-bad2-5fc862ef20ed","Type":"ContainerDied","Data":"9f3a700b66f96079e1d010bea1fc7bf937b11f9fea1048aaff6419fd900af7c5"} Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.562902 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"fc023815-924a-4a04-bad2-5fc862ef20ed","Type":"ContainerDied","Data":"8c2d36ea5a305d3a1da0f10e70884099eb310895225cdbcdf413bf2c005effc4"} Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.562912 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"fc023815-924a-4a04-bad2-5fc862ef20ed","Type":"ContainerDied","Data":"1656f891b22791ec6d9f940bf33e0630746d220bfffb0046c75f48d2afa2d782"} Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.562920 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"fc023815-924a-4a04-bad2-5fc862ef20ed","Type":"ContainerDied","Data":"d2f143dbca4d3796805d8445782c84acc22a2c02590f44e1ad34b26b6934656f"} Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.562927 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"fc023815-924a-4a04-bad2-5fc862ef20ed","Type":"ContainerDied","Data":"101d312b0bf3e3b73451053b69e1450a946df99e2bf194fa8970254181c3653e"} Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.562935 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"fc023815-924a-4a04-bad2-5fc862ef20ed","Type":"ContainerDied","Data":"0b1bf5c7f6bc3d5fc7206be9bc6c12aadefeeeb27348b35c50c441224ffe435b"} Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.562960 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"fc023815-924a-4a04-bad2-5fc862ef20ed","Type":"ContainerDied","Data":"e39b88b3c77bb6d2be10abb480b241402fde95d8a3a44afc3741d81cd38c9f4f"} Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.582321 4558 generic.go:334] "Generic (PLEG): container finished" podID="a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d" containerID="3e0d89a98f1d6af7b5d9794c3b45f1a7d02d2b5e21f231ecf13088af12d9aed7" exitCode=143 Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.610300 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="038dd72b-1a11-42d9-98bb-5f681a2fbc38" path="/var/lib/kubelet/pods/038dd72b-1a11-42d9-98bb-5f681a2fbc38/volumes" Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.611584 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="07a478af-b5f6-4daa-8940-f40b9fb00e2f" path="/var/lib/kubelet/pods/07a478af-b5f6-4daa-8940-f40b9fb00e2f/volumes" Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.612671 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1b1bf8d2-dad2-40e2-bf27-1a536f7a2ce5" path="/var/lib/kubelet/pods/1b1bf8d2-dad2-40e2-bf27-1a536f7a2ce5/volumes" Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.613837 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="274045d4-1273-418b-9fd4-ba6886f98af6" path="/var/lib/kubelet/pods/274045d4-1273-418b-9fd4-ba6886f98af6/volumes" Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.614878 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="296f0b5e-f4bb-4d1d-a920-432cc483e950" path="/var/lib/kubelet/pods/296f0b5e-f4bb-4d1d-a920-432cc483e950/volumes" Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.615402 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="32e12252-5dee-4b9d-9c75-fc273c840a11" path="/var/lib/kubelet/pods/32e12252-5dee-4b9d-9c75-fc273c840a11/volumes" Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.615908 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="33161661-cb8b-4f8b-b3da-c6c8bc9a0cd8" path="/var/lib/kubelet/pods/33161661-cb8b-4f8b-b3da-c6c8bc9a0cd8/volumes" Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.616451 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="42aec55d-662f-4390-8b64-d12da6a53863" path="/var/lib/kubelet/pods/42aec55d-662f-4390-8b64-d12da6a53863/volumes" Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.617489 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4c9f9871-91f4-4670-a6af-0c493f3ce85b" path="/var/lib/kubelet/pods/4c9f9871-91f4-4670-a6af-0c493f3ce85b/volumes" Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.618011 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4fb0c521-d465-47b9-b859-199f53143dca" path="/var/lib/kubelet/pods/4fb0c521-d465-47b9-b859-199f53143dca/volumes" Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.618572 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="60dcbf76-e430-47bc-afe1-832961122cc9" path="/var/lib/kubelet/pods/60dcbf76-e430-47bc-afe1-832961122cc9/volumes" Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.619884 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="69d7d5b9-efa3-4a24-88ca-e3f6932188d0" path="/var/lib/kubelet/pods/69d7d5b9-efa3-4a24-88ca-e3f6932188d0/volumes" Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.620424 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7e152238-833a-4e1c-b854-0584daae3826" path="/var/lib/kubelet/pods/7e152238-833a-4e1c-b854-0584daae3826/volumes" Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.621591 4558 generic.go:334] "Generic (PLEG): container finished" podID="25737ece-fb84-4543-8c3a-94ffa7b8f095" containerID="166e1e0d14b2cb48e695c5ce11cca54d963c6250ddce1f9f936a358c8a5b3580" exitCode=143 Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.622533 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7e920ead-f95a-434d-9aa0-47342d337eae" path="/var/lib/kubelet/pods/7e920ead-f95a-434d-9aa0-47342d337eae/volumes" Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.623022 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7eea1d5d-bf4b-4104-9c44-8dc4846f8c76" path="/var/lib/kubelet/pods/7eea1d5d-bf4b-4104-9c44-8dc4846f8c76/volumes" Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.625918 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7fadd286-85f0-45eb-a8fc-c408074c178c" path="/var/lib/kubelet/pods/7fadd286-85f0-45eb-a8fc-c408074c178c/volumes" Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.626482 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="81c02319-4baf-44d1-b0b2-ae946049fb60" path="/var/lib/kubelet/pods/81c02319-4baf-44d1-b0b2-ae946049fb60/volumes" Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.626983 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8ea04114-b82b-4609-8f7f-a9ff17cb3b57" path="/var/lib/kubelet/pods/8ea04114-b82b-4609-8f7f-a9ff17cb3b57/volumes" Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.627518 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="906a9a7a-653d-44c7-ba0a-ec3b65ea7098" path="/var/lib/kubelet/pods/906a9a7a-653d-44c7-ba0a-ec3b65ea7098/volumes" Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.628563 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b8906a01-1578-45d7-bfa0-4bbb4dfe6123" path="/var/lib/kubelet/pods/b8906a01-1578-45d7-bfa0-4bbb4dfe6123/volumes" Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.629034 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ce2696d5-1fd9-496a-a751-68912b14248c" path="/var/lib/kubelet/pods/ce2696d5-1fd9-496a-a751-68912b14248c/volumes" Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.629517 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d0bb0682-179e-4316-98d1-55c1912cc154" path="/var/lib/kubelet/pods/d0bb0682-179e-4316-98d1-55c1912cc154/volumes" Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.645226 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f44e4f71-b12b-41da-bb06-0495080715ea" path="/var/lib/kubelet/pods/f44e4f71-b12b-41da-bb06-0495080715ea/volumes" Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.652899 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-db-sync-7l69k"] Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.652995 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d","Type":"ContainerDied","Data":"3e0d89a98f1d6af7b5d9794c3b45f1a7d02d2b5e21f231ecf13088af12d9aed7"} Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.653068 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-456e-account-create-update-fvc6f"] Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.653143 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-456e-account-create-update-fvc6f" event={"ID":"8218833e-265b-448d-bc76-550d3c1d213d","Type":"ContainerStarted","Data":"9a483c8e462a575d93e04256f83312e24643355e38c21061584acd09658cc0b7"} Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.653230 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"25737ece-fb84-4543-8c3a-94ffa7b8f095","Type":"ContainerDied","Data":"166e1e0d14b2cb48e695c5ce11cca54d963c6250ddce1f9f936a358c8a5b3580"} Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.653291 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-db-sync-prb5g"] Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.653343 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-0"] Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.653418 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-cba5-account-create-update-4mwc6" event={"ID":"a01fa32b-fc5c-49ba-b102-b4751878380a","Type":"ContainerStarted","Data":"212794007c322648f5f83a3c8ba92e12f62ec81a971cf1d5e52e9bdc9796ed6e"} Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.653630 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-cell0-conductor-0" podUID="cf954b15-e163-4f99-8c1b-5e04d06666bc" containerName="nova-cell0-conductor-conductor" containerID="cri-o://d149c52b2c9aabf55ad4004fb586734f8d22f15451b6f01163da779feaa3c048" gracePeriod=30 Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.656977 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-db-sync-prb5g"] Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.661626 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.661759 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-scheduler-0" podUID="04194274-834f-49e3-ac2f-c28998193181" containerName="nova-scheduler-scheduler" containerID="cri-o://9827c6a452c5455a00296263b978afc4645bb1e30af3fb06ef2be62752a0e311" gracePeriod=30 Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.668253 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.668477 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="95cf677f-e744-4dad-acfb-507100d2ea14" containerName="ceilometer-central-agent" containerID="cri-o://6c367435c2502d2379a050e4069609ac5d1de29fbcbd65e5756a526e79921a06" gracePeriod=30 Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.668562 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="95cf677f-e744-4dad-acfb-507100d2ea14" containerName="proxy-httpd" containerID="cri-o://41ac2c9d6515d906d6c94b74bae2f46a5cb6aead6aa685ee10b8acf27caf202f" gracePeriod=30 Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.668602 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="95cf677f-e744-4dad-acfb-507100d2ea14" containerName="sg-core" containerID="cri-o://9e42a0ec21415521693070869352b9127abae2be472aef5468b15f95c32a8545" gracePeriod=30 Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.668639 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="95cf677f-e744-4dad-acfb-507100d2ea14" containerName="ceilometer-notification-agent" containerID="cri-o://b8d7b4e681364253444a1c0248fab2e3349b056b8048a2f9443b427211b7dd9d" gracePeriod=30 Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.675144 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.675398 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/kube-state-metrics-0" podUID="4f4387d7-1e8c-48a5-9176-c8e683469eb0" containerName="kube-state-metrics" containerID="cri-o://38bf3ef8cd80bbc8c77c61c653d79ffe5710ff36ee6b494a5fb1186c8b5147a9" gracePeriod=30 Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.680556 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-b308-account-create-update-wlh8l"] Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.684852 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-6a24-account-create-update-cvtmm"] Jan 20 17:10:54 crc kubenswrapper[4558]: E0120 17:10:54.691065 4558 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 20 17:10:54 crc kubenswrapper[4558]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[/bin/sh -c #!/bin/bash Jan 20 17:10:54 crc kubenswrapper[4558]: Jan 20 17:10:54 crc kubenswrapper[4558]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 20 17:10:54 crc kubenswrapper[4558]: Jan 20 17:10:54 crc kubenswrapper[4558]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 20 17:10:54 crc kubenswrapper[4558]: Jan 20 17:10:54 crc kubenswrapper[4558]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 20 17:10:54 crc kubenswrapper[4558]: Jan 20 17:10:54 crc kubenswrapper[4558]: if [ -n "cinder" ]; then Jan 20 17:10:54 crc kubenswrapper[4558]: GRANT_DATABASE="cinder" Jan 20 17:10:54 crc kubenswrapper[4558]: else Jan 20 17:10:54 crc kubenswrapper[4558]: GRANT_DATABASE="*" Jan 20 17:10:54 crc kubenswrapper[4558]: fi Jan 20 17:10:54 crc kubenswrapper[4558]: Jan 20 17:10:54 crc kubenswrapper[4558]: # going for maximum compatibility here: Jan 20 17:10:54 crc kubenswrapper[4558]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 20 17:10:54 crc kubenswrapper[4558]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 20 17:10:54 crc kubenswrapper[4558]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 20 17:10:54 crc kubenswrapper[4558]: # support updates Jan 20 17:10:54 crc kubenswrapper[4558]: Jan 20 17:10:54 crc kubenswrapper[4558]: $MYSQL_CMD < logger="UnhandledError" Jan 20 17:10:54 crc kubenswrapper[4558]: E0120 17:10:54.692749 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"cinder-db-secret\\\" not found\"" pod="openstack-kuttl-tests/cinder-6a24-account-create-update-cvtmm" podUID="9b8664b1-e050-479b-b965-aed1a70e4bd1" Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.694879 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-0eec-account-create-update-2btsb"] Jan 20 17:10:54 crc kubenswrapper[4558]: E0120 17:10:54.696659 4558 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 20 17:10:54 crc kubenswrapper[4558]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[/bin/sh -c #!/bin/bash Jan 20 17:10:54 crc kubenswrapper[4558]: Jan 20 17:10:54 crc kubenswrapper[4558]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 20 17:10:54 crc kubenswrapper[4558]: Jan 20 17:10:54 crc kubenswrapper[4558]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 20 17:10:54 crc kubenswrapper[4558]: Jan 20 17:10:54 crc kubenswrapper[4558]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 20 17:10:54 crc kubenswrapper[4558]: Jan 20 17:10:54 crc kubenswrapper[4558]: if [ -n "nova_api" ]; then Jan 20 17:10:54 crc kubenswrapper[4558]: GRANT_DATABASE="nova_api" Jan 20 17:10:54 crc kubenswrapper[4558]: else Jan 20 17:10:54 crc kubenswrapper[4558]: GRANT_DATABASE="*" Jan 20 17:10:54 crc kubenswrapper[4558]: fi Jan 20 17:10:54 crc kubenswrapper[4558]: Jan 20 17:10:54 crc kubenswrapper[4558]: # going for maximum compatibility here: Jan 20 17:10:54 crc kubenswrapper[4558]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 20 17:10:54 crc kubenswrapper[4558]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 20 17:10:54 crc kubenswrapper[4558]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 20 17:10:54 crc kubenswrapper[4558]: # support updates Jan 20 17:10:54 crc kubenswrapper[4558]: Jan 20 17:10:54 crc kubenswrapper[4558]: $MYSQL_CMD < logger="UnhandledError" Jan 20 17:10:54 crc kubenswrapper[4558]: E0120 17:10:54.697864 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"nova-api-db-secret\\\" not found\"" pod="openstack-kuttl-tests/nova-api-b308-account-create-update-wlh8l" podUID="12dec63c-ec5c-4327-b1ec-aa9ee39acf82" Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.703994 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-64d3-account-create-update-h72jc" event={"ID":"32721dbf-d5cf-43d4-9ddc-ad1664d17869","Type":"ContainerStarted","Data":"5faed37875136e2ea4cc33be20edaf2f31b9c5255b277e2a41fe3f19325d2706"} Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.705002 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" podUID="39403277-bf62-47c1-8e86-cdec59f2da7b" containerName="rabbitmq" containerID="cri-o://564f1f778e4e883e5ac1e2a892f2ceadcb69333aff26325a33259bf543a29685" gracePeriod=604800 Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.721174 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f98dbc6f-bd60-4064-b0d8-947d4080b9ec-config\") pod \"f98dbc6f-bd60-4064-b0d8-947d4080b9ec\" (UID: \"f98dbc6f-bd60-4064-b0d8-947d4080b9ec\") " Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.721412 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f98dbc6f-bd60-4064-b0d8-947d4080b9ec-scripts\") pod \"f98dbc6f-bd60-4064-b0d8-947d4080b9ec\" (UID: \"f98dbc6f-bd60-4064-b0d8-947d4080b9ec\") " Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.721435 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/f98dbc6f-bd60-4064-b0d8-947d4080b9ec-metrics-certs-tls-certs\") pod \"f98dbc6f-bd60-4064-b0d8-947d4080b9ec\" (UID: \"f98dbc6f-bd60-4064-b0d8-947d4080b9ec\") " Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.721459 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndbcluster-nb-etc-ovn\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"f98dbc6f-bd60-4064-b0d8-947d4080b9ec\" (UID: \"f98dbc6f-bd60-4064-b0d8-947d4080b9ec\") " Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.721491 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/f98dbc6f-bd60-4064-b0d8-947d4080b9ec-ovsdb-rundir\") pod \"f98dbc6f-bd60-4064-b0d8-947d4080b9ec\" (UID: \"f98dbc6f-bd60-4064-b0d8-947d4080b9ec\") " Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.721520 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f98dbc6f-bd60-4064-b0d8-947d4080b9ec-combined-ca-bundle\") pod \"f98dbc6f-bd60-4064-b0d8-947d4080b9ec\" (UID: \"f98dbc6f-bd60-4064-b0d8-947d4080b9ec\") " Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.721548 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/f98dbc6f-bd60-4064-b0d8-947d4080b9ec-ovsdbserver-nb-tls-certs\") pod \"f98dbc6f-bd60-4064-b0d8-947d4080b9ec\" (UID: \"f98dbc6f-bd60-4064-b0d8-947d4080b9ec\") " Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.721588 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zk4dr\" (UniqueName: \"kubernetes.io/projected/f98dbc6f-bd60-4064-b0d8-947d4080b9ec-kube-api-access-zk4dr\") pod \"f98dbc6f-bd60-4064-b0d8-947d4080b9ec\" (UID: \"f98dbc6f-bd60-4064-b0d8-947d4080b9ec\") " Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.723793 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f98dbc6f-bd60-4064-b0d8-947d4080b9ec-ovsdb-rundir" (OuterVolumeSpecName: "ovsdb-rundir") pod "f98dbc6f-bd60-4064-b0d8-947d4080b9ec" (UID: "f98dbc6f-bd60-4064-b0d8-947d4080b9ec"). InnerVolumeSpecName "ovsdb-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.724364 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f98dbc6f-bd60-4064-b0d8-947d4080b9ec-scripts" (OuterVolumeSpecName: "scripts") pod "f98dbc6f-bd60-4064-b0d8-947d4080b9ec" (UID: "f98dbc6f-bd60-4064-b0d8-947d4080b9ec"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:10:54 crc kubenswrapper[4558]: E0120 17:10:54.725115 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="9827c6a452c5455a00296263b978afc4645bb1e30af3fb06ef2be62752a0e311" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.726000 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.730187 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f98dbc6f-bd60-4064-b0d8-947d4080b9ec-config" (OuterVolumeSpecName: "config") pod "f98dbc6f-bd60-4064-b0d8-947d4080b9ec" (UID: "f98dbc6f-bd60-4064-b0d8-947d4080b9ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:10:54 crc kubenswrapper[4558]: E0120 17:10:54.736701 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="9827c6a452c5455a00296263b978afc4645bb1e30af3fb06ef2be62752a0e311" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.736891 4558 generic.go:334] "Generic (PLEG): container finished" podID="1ec5ef72-7c69-4be3-974d-c020ddfea4f7" containerID="2ad54bc63fc4167f1a6668ac58b0ad56798eb8ff76a988eb75b07766de4d421c" exitCode=0 Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.736983 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"1ec5ef72-7c69-4be3-974d-c020ddfea4f7","Type":"ContainerDied","Data":"2ad54bc63fc4167f1a6668ac58b0ad56798eb8ff76a988eb75b07766de4d421c"} Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.748402 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage09-crc" (OuterVolumeSpecName: "ovndbcluster-nb-etc-ovn") pod "f98dbc6f-bd60-4064-b0d8-947d4080b9ec" (UID: "f98dbc6f-bd60-4064-b0d8-947d4080b9ec"). InnerVolumeSpecName "local-storage09-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:10:54 crc kubenswrapper[4558]: E0120 17:10:54.752699 4558 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 20 17:10:54 crc kubenswrapper[4558]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[/bin/sh -c #!/bin/bash Jan 20 17:10:54 crc kubenswrapper[4558]: Jan 20 17:10:54 crc kubenswrapper[4558]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 20 17:10:54 crc kubenswrapper[4558]: Jan 20 17:10:54 crc kubenswrapper[4558]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 20 17:10:54 crc kubenswrapper[4558]: Jan 20 17:10:54 crc kubenswrapper[4558]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 20 17:10:54 crc kubenswrapper[4558]: Jan 20 17:10:54 crc kubenswrapper[4558]: if [ -n "placement" ]; then Jan 20 17:10:54 crc kubenswrapper[4558]: GRANT_DATABASE="placement" Jan 20 17:10:54 crc kubenswrapper[4558]: else Jan 20 17:10:54 crc kubenswrapper[4558]: GRANT_DATABASE="*" Jan 20 17:10:54 crc kubenswrapper[4558]: fi Jan 20 17:10:54 crc kubenswrapper[4558]: Jan 20 17:10:54 crc kubenswrapper[4558]: # going for maximum compatibility here: Jan 20 17:10:54 crc kubenswrapper[4558]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 20 17:10:54 crc kubenswrapper[4558]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 20 17:10:54 crc kubenswrapper[4558]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 20 17:10:54 crc kubenswrapper[4558]: # support updates Jan 20 17:10:54 crc kubenswrapper[4558]: Jan 20 17:10:54 crc kubenswrapper[4558]: $MYSQL_CMD < logger="UnhandledError" Jan 20 17:10:54 crc kubenswrapper[4558]: E0120 17:10:54.754033 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"placement-db-secret\\\" not found\"" pod="openstack-kuttl-tests/placement-64d3-account-create-update-h72jc" podUID="32721dbf-d5cf-43d4-9ddc-ad1664d17869" Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.756506 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f98dbc6f-bd60-4064-b0d8-947d4080b9ec-kube-api-access-zk4dr" (OuterVolumeSpecName: "kube-api-access-zk4dr") pod "f98dbc6f-bd60-4064-b0d8-947d4080b9ec" (UID: "f98dbc6f-bd60-4064-b0d8-947d4080b9ec"). InnerVolumeSpecName "kube-api-access-zk4dr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:10:54 crc kubenswrapper[4558]: E0120 17:10:54.764812 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="9827c6a452c5455a00296263b978afc4645bb1e30af3fb06ef2be62752a0e311" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 20 17:10:54 crc kubenswrapper[4558]: E0120 17:10:54.764898 4558 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack-kuttl-tests/nova-scheduler-0" podUID="04194274-834f-49e3-ac2f-c28998193181" containerName="nova-scheduler-scheduler" Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.768898 4558 generic.go:334] "Generic (PLEG): container finished" podID="931d9a7a-aa04-42c3-a276-e71388a8b9d3" containerID="d966e46c5366cede7ed705aeae414b6587268470408797878320598c6aa92cbd" exitCode=1 Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.768994 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/root-account-create-update-f6ldh" event={"ID":"931d9a7a-aa04-42c3-a276-e71388a8b9d3","Type":"ContainerDied","Data":"d966e46c5366cede7ed705aeae414b6587268470408797878320598c6aa92cbd"} Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.779002 4558 scope.go:117] "RemoveContainer" containerID="d966e46c5366cede7ed705aeae414b6587268470408797878320598c6aa92cbd" Jan 20 17:10:54 crc kubenswrapper[4558]: E0120 17:10:54.785225 4558 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 20 17:10:54 crc kubenswrapper[4558]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[/bin/sh -c #!/bin/bash Jan 20 17:10:54 crc kubenswrapper[4558]: Jan 20 17:10:54 crc kubenswrapper[4558]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 20 17:10:54 crc kubenswrapper[4558]: Jan 20 17:10:54 crc kubenswrapper[4558]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 20 17:10:54 crc kubenswrapper[4558]: Jan 20 17:10:54 crc kubenswrapper[4558]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 20 17:10:54 crc kubenswrapper[4558]: Jan 20 17:10:54 crc kubenswrapper[4558]: if [ -n "nova_cell1" ]; then Jan 20 17:10:54 crc kubenswrapper[4558]: GRANT_DATABASE="nova_cell1" Jan 20 17:10:54 crc kubenswrapper[4558]: else Jan 20 17:10:54 crc kubenswrapper[4558]: GRANT_DATABASE="*" Jan 20 17:10:54 crc kubenswrapper[4558]: fi Jan 20 17:10:54 crc kubenswrapper[4558]: Jan 20 17:10:54 crc kubenswrapper[4558]: # going for maximum compatibility here: Jan 20 17:10:54 crc kubenswrapper[4558]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 20 17:10:54 crc kubenswrapper[4558]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 20 17:10:54 crc kubenswrapper[4558]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 20 17:10:54 crc kubenswrapper[4558]: # support updates Jan 20 17:10:54 crc kubenswrapper[4558]: Jan 20 17:10:54 crc kubenswrapper[4558]: $MYSQL_CMD < logger="UnhandledError" Jan 20 17:10:54 crc kubenswrapper[4558]: E0120 17:10:54.786613 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"nova-cell1-db-secret\\\" not found\"" pod="openstack-kuttl-tests/nova-cell1-0eec-account-create-update-2btsb" podUID="99812c8d-c18f-48d5-a063-deb9462a904b" Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.823347 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6f1a8225-d143-4ae0-9301-8025f1b639e5-combined-ca-bundle\") pod \"6f1a8225-d143-4ae0-9301-8025f1b639e5\" (UID: \"6f1a8225-d143-4ae0-9301-8025f1b639e5\") " Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.824127 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" " Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.824143 4558 reconciler_common.go:293] "Volume detached for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/f98dbc6f-bd60-4064-b0d8-947d4080b9ec-ovsdb-rundir\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.824153 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zk4dr\" (UniqueName: \"kubernetes.io/projected/f98dbc6f-bd60-4064-b0d8-947d4080b9ec-kube-api-access-zk4dr\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.824175 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f98dbc6f-bd60-4064-b0d8-947d4080b9ec-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.824183 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f98dbc6f-bd60-4064-b0d8-947d4080b9ec-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.849347 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f98dbc6f-bd60-4064-b0d8-947d4080b9ec-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f98dbc6f-bd60-4064-b0d8-947d4080b9ec" (UID: "f98dbc6f-bd60-4064-b0d8-947d4080b9ec"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.860892 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstackclient" Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.909640 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6f1a8225-d143-4ae0-9301-8025f1b639e5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6f1a8225-d143-4ae0-9301-8025f1b639e5" (UID: "6f1a8225-d143-4ae0-9301-8025f1b639e5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.920806 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage09-crc" (UniqueName: "kubernetes.io/local-volume/local-storage09-crc") on node "crc" Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.926303 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6f1a8225-d143-4ae0-9301-8025f1b639e5-scripts\") pod \"6f1a8225-d143-4ae0-9301-8025f1b639e5\" (UID: \"6f1a8225-d143-4ae0-9301-8025f1b639e5\") " Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.926347 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/6f1a8225-d143-4ae0-9301-8025f1b639e5-metrics-certs-tls-certs\") pod \"6f1a8225-d143-4ae0-9301-8025f1b639e5\" (UID: \"6f1a8225-d143-4ae0-9301-8025f1b639e5\") " Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.926376 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/6f1a8225-d143-4ae0-9301-8025f1b639e5-ovsdbserver-sb-tls-certs\") pod \"6f1a8225-d143-4ae0-9301-8025f1b639e5\" (UID: \"6f1a8225-d143-4ae0-9301-8025f1b639e5\") " Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.926521 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q4kpc\" (UniqueName: \"kubernetes.io/projected/6f1a8225-d143-4ae0-9301-8025f1b639e5-kube-api-access-q4kpc\") pod \"6f1a8225-d143-4ae0-9301-8025f1b639e5\" (UID: \"6f1a8225-d143-4ae0-9301-8025f1b639e5\") " Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.926646 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/6f1a8225-d143-4ae0-9301-8025f1b639e5-ovsdb-rundir\") pod \"6f1a8225-d143-4ae0-9301-8025f1b639e5\" (UID: \"6f1a8225-d143-4ae0-9301-8025f1b639e5\") " Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.926714 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndbcluster-sb-etc-ovn\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"6f1a8225-d143-4ae0-9301-8025f1b639e5\" (UID: \"6f1a8225-d143-4ae0-9301-8025f1b639e5\") " Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.926737 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6f1a8225-d143-4ae0-9301-8025f1b639e5-config\") pod \"6f1a8225-d143-4ae0-9301-8025f1b639e5\" (UID: \"6f1a8225-d143-4ae0-9301-8025f1b639e5\") " Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.927401 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.927420 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f98dbc6f-bd60-4064-b0d8-947d4080b9ec-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.927430 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6f1a8225-d143-4ae0-9301-8025f1b639e5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.929177 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6f1a8225-d143-4ae0-9301-8025f1b639e5-scripts" (OuterVolumeSpecName: "scripts") pod "6f1a8225-d143-4ae0-9301-8025f1b639e5" (UID: "6f1a8225-d143-4ae0-9301-8025f1b639e5"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.929596 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6f1a8225-d143-4ae0-9301-8025f1b639e5-ovsdb-rundir" (OuterVolumeSpecName: "ovsdb-rundir") pod "6f1a8225-d143-4ae0-9301-8025f1b639e5" (UID: "6f1a8225-d143-4ae0-9301-8025f1b639e5"). InnerVolumeSpecName "ovsdb-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.930353 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6f1a8225-d143-4ae0-9301-8025f1b639e5-config" (OuterVolumeSpecName: "config") pod "6f1a8225-d143-4ae0-9301-8025f1b639e5" (UID: "6f1a8225-d143-4ae0-9301-8025f1b639e5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:10:54 crc kubenswrapper[4558]: E0120 17:10:54.930478 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Jan 20 17:10:54 crc kubenswrapper[4558]: E0120 17:10:54.930534 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/39403277-bf62-47c1-8e86-cdec59f2da7b-config-data podName:39403277-bf62-47c1-8e86-cdec59f2da7b nodeName:}" failed. No retries permitted until 2026-01-20 17:10:56.930515747 +0000 UTC m=+1750.690853714 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/39403277-bf62-47c1-8e86-cdec59f2da7b-config-data") pod "rabbitmq-cell1-server-0" (UID: "39403277-bf62-47c1-8e86-cdec59f2da7b") : configmap "rabbitmq-cell1-config-data" not found Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.942837 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6f1a8225-d143-4ae0-9301-8025f1b639e5-kube-api-access-q4kpc" (OuterVolumeSpecName: "kube-api-access-q4kpc") pod "6f1a8225-d143-4ae0-9301-8025f1b639e5" (UID: "6f1a8225-d143-4ae0-9301-8025f1b639e5"). InnerVolumeSpecName "kube-api-access-q4kpc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:10:54 crc kubenswrapper[4558]: I0120 17:10:54.965508 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage01-crc" (OuterVolumeSpecName: "ovndbcluster-sb-etc-ovn") pod "6f1a8225-d143-4ae0-9301-8025f1b639e5" (UID: "6f1a8225-d143-4ae0-9301-8025f1b639e5"). InnerVolumeSpecName "local-storage01-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:10:55 crc kubenswrapper[4558]: I0120 17:10:55.030945 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/6d6dfb4c-9a8d-405e-8ffd-288e446998cf-openstack-config-secret\") pod \"6d6dfb4c-9a8d-405e-8ffd-288e446998cf\" (UID: \"6d6dfb4c-9a8d-405e-8ffd-288e446998cf\") " Jan 20 17:10:55 crc kubenswrapper[4558]: I0120 17:10:55.031151 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/6d6dfb4c-9a8d-405e-8ffd-288e446998cf-openstack-config\") pod \"6d6dfb4c-9a8d-405e-8ffd-288e446998cf\" (UID: \"6d6dfb4c-9a8d-405e-8ffd-288e446998cf\") " Jan 20 17:10:55 crc kubenswrapper[4558]: I0120 17:10:55.031232 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2wwp6\" (UniqueName: \"kubernetes.io/projected/6d6dfb4c-9a8d-405e-8ffd-288e446998cf-kube-api-access-2wwp6\") pod \"6d6dfb4c-9a8d-405e-8ffd-288e446998cf\" (UID: \"6d6dfb4c-9a8d-405e-8ffd-288e446998cf\") " Jan 20 17:10:55 crc kubenswrapper[4558]: I0120 17:10:55.031294 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6d6dfb4c-9a8d-405e-8ffd-288e446998cf-combined-ca-bundle\") pod \"6d6dfb4c-9a8d-405e-8ffd-288e446998cf\" (UID: \"6d6dfb4c-9a8d-405e-8ffd-288e446998cf\") " Jan 20 17:10:55 crc kubenswrapper[4558]: I0120 17:10:55.031856 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6f1a8225-d143-4ae0-9301-8025f1b639e5-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:55 crc kubenswrapper[4558]: I0120 17:10:55.031875 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q4kpc\" (UniqueName: \"kubernetes.io/projected/6f1a8225-d143-4ae0-9301-8025f1b639e5-kube-api-access-q4kpc\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:55 crc kubenswrapper[4558]: I0120 17:10:55.031885 4558 reconciler_common.go:293] "Volume detached for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/6f1a8225-d143-4ae0-9301-8025f1b639e5-ovsdb-rundir\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:55 crc kubenswrapper[4558]: I0120 17:10:55.031904 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" " Jan 20 17:10:55 crc kubenswrapper[4558]: I0120 17:10:55.031913 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6f1a8225-d143-4ae0-9301-8025f1b639e5-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:55 crc kubenswrapper[4558]: I0120 17:10:55.110331 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f98dbc6f-bd60-4064-b0d8-947d4080b9ec-ovsdbserver-nb-tls-certs" (OuterVolumeSpecName: "ovsdbserver-nb-tls-certs") pod "f98dbc6f-bd60-4064-b0d8-947d4080b9ec" (UID: "f98dbc6f-bd60-4064-b0d8-947d4080b9ec"). InnerVolumeSpecName "ovsdbserver-nb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:10:55 crc kubenswrapper[4558]: I0120 17:10:55.117488 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/swift-proxy-69ddc9f468-cx7lb"] Jan 20 17:10:55 crc kubenswrapper[4558]: I0120 17:10:55.117490 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f98dbc6f-bd60-4064-b0d8-947d4080b9ec-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "f98dbc6f-bd60-4064-b0d8-947d4080b9ec" (UID: "f98dbc6f-bd60-4064-b0d8-947d4080b9ec"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:10:55 crc kubenswrapper[4558]: I0120 17:10:55.117704 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-proxy-69ddc9f468-cx7lb" podUID="b33d5d09-77a2-4bc5-aa13-898a97b01b69" containerName="proxy-httpd" containerID="cri-o://65dc4b617123648766af244b88586c5391acaca630b06a308276272002cf2765" gracePeriod=30 Jan 20 17:10:55 crc kubenswrapper[4558]: I0120 17:10:55.117924 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/rabbitmq-server-0" podUID="07fab146-67be-42ba-b263-ee19fe95720b" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.1.51:5671: connect: connection refused" Jan 20 17:10:55 crc kubenswrapper[4558]: I0120 17:10:55.118022 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-proxy-69ddc9f468-cx7lb" podUID="b33d5d09-77a2-4bc5-aa13-898a97b01b69" containerName="proxy-server" containerID="cri-o://71bbd70a1c3421db19c774ca5977ad33d9b756863ccd7cb6a69d77b0a5690c71" gracePeriod=30 Jan 20 17:10:55 crc kubenswrapper[4558]: I0120 17:10:55.195928 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6d6dfb4c-9a8d-405e-8ffd-288e446998cf-kube-api-access-2wwp6" (OuterVolumeSpecName: "kube-api-access-2wwp6") pod "6d6dfb4c-9a8d-405e-8ffd-288e446998cf" (UID: "6d6dfb4c-9a8d-405e-8ffd-288e446998cf"). InnerVolumeSpecName "kube-api-access-2wwp6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:10:55 crc kubenswrapper[4558]: I0120 17:10:55.209017 4558 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/f98dbc6f-bd60-4064-b0d8-947d4080b9ec-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:55 crc kubenswrapper[4558]: I0120 17:10:55.257875 4558 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/f98dbc6f-bd60-4064-b0d8-947d4080b9ec-ovsdbserver-nb-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:55 crc kubenswrapper[4558]: I0120 17:10:55.258194 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6d6dfb4c-9a8d-405e-8ffd-288e446998cf-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "6d6dfb4c-9a8d-405e-8ffd-288e446998cf" (UID: "6d6dfb4c-9a8d-405e-8ffd-288e446998cf"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:10:55 crc kubenswrapper[4558]: I0120 17:10:55.263984 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6f1a8225-d143-4ae0-9301-8025f1b639e5-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "6f1a8225-d143-4ae0-9301-8025f1b639e5" (UID: "6f1a8225-d143-4ae0-9301-8025f1b639e5"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:10:55 crc kubenswrapper[4558]: I0120 17:10:55.264238 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6d6dfb4c-9a8d-405e-8ffd-288e446998cf-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6d6dfb4c-9a8d-405e-8ffd-288e446998cf" (UID: "6d6dfb4c-9a8d-405e-8ffd-288e446998cf"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:10:55 crc kubenswrapper[4558]: I0120 17:10:55.324567 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" podUID="39403277-bf62-47c1-8e86-cdec59f2da7b" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.1.52:5671: connect: connection refused" Jan 20 17:10:55 crc kubenswrapper[4558]: I0120 17:10:55.352799 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage01-crc" (UniqueName: "kubernetes.io/local-volume/local-storage01-crc") on node "crc" Jan 20 17:10:55 crc kubenswrapper[4558]: I0120 17:10:55.360034 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:55 crc kubenswrapper[4558]: I0120 17:10:55.360066 4558 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/6d6dfb4c-9a8d-405e-8ffd-288e446998cf-openstack-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:55 crc kubenswrapper[4558]: I0120 17:10:55.360080 4558 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/6f1a8225-d143-4ae0-9301-8025f1b639e5-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:55 crc kubenswrapper[4558]: I0120 17:10:55.360090 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2wwp6\" (UniqueName: \"kubernetes.io/projected/6d6dfb4c-9a8d-405e-8ffd-288e446998cf-kube-api-access-2wwp6\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:55 crc kubenswrapper[4558]: I0120 17:10:55.360098 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6d6dfb4c-9a8d-405e-8ffd-288e446998cf-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:55 crc kubenswrapper[4558]: I0120 17:10:55.408246 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6f1a8225-d143-4ae0-9301-8025f1b639e5-ovsdbserver-sb-tls-certs" (OuterVolumeSpecName: "ovsdbserver-sb-tls-certs") pod "6f1a8225-d143-4ae0-9301-8025f1b639e5" (UID: "6f1a8225-d143-4ae0-9301-8025f1b639e5"). InnerVolumeSpecName "ovsdbserver-sb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:10:55 crc kubenswrapper[4558]: I0120 17:10:55.418024 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6d6dfb4c-9a8d-405e-8ffd-288e446998cf-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "6d6dfb4c-9a8d-405e-8ffd-288e446998cf" (UID: "6d6dfb4c-9a8d-405e-8ffd-288e446998cf"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:10:55 crc kubenswrapper[4558]: I0120 17:10:55.464622 4558 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/6f1a8225-d143-4ae0-9301-8025f1b639e5-ovsdbserver-sb-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:55 crc kubenswrapper[4558]: I0120 17:10:55.464667 4558 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/6d6dfb4c-9a8d-405e-8ffd-288e446998cf-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:55 crc kubenswrapper[4558]: E0120 17:10:55.567814 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Jan 20 17:10:55 crc kubenswrapper[4558]: E0120 17:10:55.567907 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/07fab146-67be-42ba-b263-ee19fe95720b-config-data podName:07fab146-67be-42ba-b263-ee19fe95720b nodeName:}" failed. No retries permitted until 2026-01-20 17:10:59.567889739 +0000 UTC m=+1753.328227707 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/07fab146-67be-42ba-b263-ee19fe95720b-config-data") pod "rabbitmq-server-0" (UID: "07fab146-67be-42ba-b263-ee19fe95720b") : configmap "rabbitmq-config-data" not found Jan 20 17:10:55 crc kubenswrapper[4558]: I0120 17:10:55.574554 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-cba5-account-create-update-4mwc6" Jan 20 17:10:55 crc kubenswrapper[4558]: I0120 17:10:55.605912 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-456e-account-create-update-fvc6f" Jan 20 17:10:55 crc kubenswrapper[4558]: I0120 17:10:55.668849 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a01fa32b-fc5c-49ba-b102-b4751878380a-operator-scripts\") pod \"a01fa32b-fc5c-49ba-b102-b4751878380a\" (UID: \"a01fa32b-fc5c-49ba-b102-b4751878380a\") " Jan 20 17:10:55 crc kubenswrapper[4558]: I0120 17:10:55.669550 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2vswq\" (UniqueName: \"kubernetes.io/projected/a01fa32b-fc5c-49ba-b102-b4751878380a-kube-api-access-2vswq\") pod \"a01fa32b-fc5c-49ba-b102-b4751878380a\" (UID: \"a01fa32b-fc5c-49ba-b102-b4751878380a\") " Jan 20 17:10:55 crc kubenswrapper[4558]: I0120 17:10:55.669678 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a01fa32b-fc5c-49ba-b102-b4751878380a-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "a01fa32b-fc5c-49ba-b102-b4751878380a" (UID: "a01fa32b-fc5c-49ba-b102-b4751878380a"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:10:55 crc kubenswrapper[4558]: I0120 17:10:55.671624 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a01fa32b-fc5c-49ba-b102-b4751878380a-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:55 crc kubenswrapper[4558]: I0120 17:10:55.675530 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a01fa32b-fc5c-49ba-b102-b4751878380a-kube-api-access-2vswq" (OuterVolumeSpecName: "kube-api-access-2vswq") pod "a01fa32b-fc5c-49ba-b102-b4751878380a" (UID: "a01fa32b-fc5c-49ba-b102-b4751878380a"). InnerVolumeSpecName "kube-api-access-2vswq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:10:55 crc kubenswrapper[4558]: I0120 17:10:55.773828 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qvz5m\" (UniqueName: \"kubernetes.io/projected/8218833e-265b-448d-bc76-550d3c1d213d-kube-api-access-qvz5m\") pod \"8218833e-265b-448d-bc76-550d3c1d213d\" (UID: \"8218833e-265b-448d-bc76-550d3c1d213d\") " Jan 20 17:10:55 crc kubenswrapper[4558]: I0120 17:10:55.773919 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8218833e-265b-448d-bc76-550d3c1d213d-operator-scripts\") pod \"8218833e-265b-448d-bc76-550d3c1d213d\" (UID: \"8218833e-265b-448d-bc76-550d3c1d213d\") " Jan 20 17:10:55 crc kubenswrapper[4558]: I0120 17:10:55.774648 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8218833e-265b-448d-bc76-550d3c1d213d-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "8218833e-265b-448d-bc76-550d3c1d213d" (UID: "8218833e-265b-448d-bc76-550d3c1d213d"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:10:55 crc kubenswrapper[4558]: I0120 17:10:55.774725 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2vswq\" (UniqueName: \"kubernetes.io/projected/a01fa32b-fc5c-49ba-b102-b4751878380a-kube-api-access-2vswq\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:55 crc kubenswrapper[4558]: I0120 17:10:55.778290 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8218833e-265b-448d-bc76-550d3c1d213d-kube-api-access-qvz5m" (OuterVolumeSpecName: "kube-api-access-qvz5m") pod "8218833e-265b-448d-bc76-550d3c1d213d" (UID: "8218833e-265b-448d-bc76-550d3c1d213d"). InnerVolumeSpecName "kube-api-access-qvz5m". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:10:55 crc kubenswrapper[4558]: I0120 17:10:55.798021 4558 generic.go:334] "Generic (PLEG): container finished" podID="4f4387d7-1e8c-48a5-9176-c8e683469eb0" containerID="38bf3ef8cd80bbc8c77c61c653d79ffe5710ff36ee6b494a5fb1186c8b5147a9" exitCode=2 Jan 20 17:10:55 crc kubenswrapper[4558]: I0120 17:10:55.798083 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/kube-state-metrics-0" event={"ID":"4f4387d7-1e8c-48a5-9176-c8e683469eb0","Type":"ContainerDied","Data":"38bf3ef8cd80bbc8c77c61c653d79ffe5710ff36ee6b494a5fb1186c8b5147a9"} Jan 20 17:10:55 crc kubenswrapper[4558]: I0120 17:10:55.804677 4558 generic.go:334] "Generic (PLEG): container finished" podID="01d65c2e-73b2-4d9e-a659-c384a3a3e2fe" containerID="7f4378e692fe617442cc1cc02c71c9e5beb567c1bbfc8f7fa7801b7b523d4e8b" exitCode=0 Jan 20 17:10:55 crc kubenswrapper[4558]: I0120 17:10:55.804743 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-cell1-galera-0" event={"ID":"01d65c2e-73b2-4d9e-a659-c384a3a3e2fe","Type":"ContainerDied","Data":"7f4378e692fe617442cc1cc02c71c9e5beb567c1bbfc8f7fa7801b7b523d4e8b"} Jan 20 17:10:55 crc kubenswrapper[4558]: I0120 17:10:55.806363 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-456e-account-create-update-fvc6f" event={"ID":"8218833e-265b-448d-bc76-550d3c1d213d","Type":"ContainerDied","Data":"9a483c8e462a575d93e04256f83312e24643355e38c21061584acd09658cc0b7"} Jan 20 17:10:55 crc kubenswrapper[4558]: I0120 17:10:55.806444 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-456e-account-create-update-fvc6f" Jan 20 17:10:55 crc kubenswrapper[4558]: I0120 17:10:55.822376 4558 generic.go:334] "Generic (PLEG): container finished" podID="faade961-fce0-4ad6-a039-4ba83a95dd68" containerID="8bdce8800380698b3525e8fafce4e103f8ffe07d9a3881dbd922429612c13082" exitCode=143 Jan 20 17:10:55 crc kubenswrapper[4558]: I0120 17:10:55.822444 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-worker-7bcc85c7d9-jm8qh" event={"ID":"faade961-fce0-4ad6-a039-4ba83a95dd68","Type":"ContainerDied","Data":"8bdce8800380698b3525e8fafce4e103f8ffe07d9a3881dbd922429612c13082"} Jan 20 17:10:55 crc kubenswrapper[4558]: I0120 17:10:55.825307 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-6a24-account-create-update-cvtmm" event={"ID":"9b8664b1-e050-479b-b965-aed1a70e4bd1","Type":"ContainerStarted","Data":"e18be31df081cf061af134c7daa18656b9fd77f54f32e06363c00698770f7db8"} Jan 20 17:10:55 crc kubenswrapper[4558]: I0120 17:10:55.829888 4558 scope.go:117] "RemoveContainer" containerID="be9c2c4b147215f87ea040240daeaf63995b7b21012666e0eb6d43c0ceb36adc" Jan 20 17:10:55 crc kubenswrapper[4558]: I0120 17:10:55.830014 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstackclient" Jan 20 17:10:55 crc kubenswrapper[4558]: I0120 17:10:55.847303 4558 generic.go:334] "Generic (PLEG): container finished" podID="ecb70be9-510b-4ec7-ae5b-20f2ae8dd6a1" containerID="3961204b11e978cd9995166f21ade05036318ca560f84a52206089ba8789ed9d" exitCode=143 Jan 20 17:10:55 crc kubenswrapper[4558]: I0120 17:10:55.847377 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"ecb70be9-510b-4ec7-ae5b-20f2ae8dd6a1","Type":"ContainerDied","Data":"3961204b11e978cd9995166f21ade05036318ca560f84a52206089ba8789ed9d"} Jan 20 17:10:55 crc kubenswrapper[4558]: I0120 17:10:55.848701 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-cba5-account-create-update-4mwc6" event={"ID":"a01fa32b-fc5c-49ba-b102-b4751878380a","Type":"ContainerDied","Data":"212794007c322648f5f83a3c8ba92e12f62ec81a971cf1d5e52e9bdc9796ed6e"} Jan 20 17:10:55 crc kubenswrapper[4558]: I0120 17:10:55.848778 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-cba5-account-create-update-4mwc6" Jan 20 17:10:55 crc kubenswrapper[4558]: I0120 17:10:55.857637 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-456e-account-create-update-fvc6f"] Jan 20 17:10:55 crc kubenswrapper[4558]: I0120 17:10:55.870106 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/neutron-456e-account-create-update-fvc6f"] Jan 20 17:10:55 crc kubenswrapper[4558]: I0120 17:10:55.877071 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8218833e-265b-448d-bc76-550d3c1d213d-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:55 crc kubenswrapper[4558]: I0120 17:10:55.877098 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qvz5m\" (UniqueName: \"kubernetes.io/projected/8218833e-265b-448d-bc76-550d3c1d213d-kube-api-access-qvz5m\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:55 crc kubenswrapper[4558]: I0120 17:10:55.878055 4558 generic.go:334] "Generic (PLEG): container finished" podID="5560f6a4-fc05-4d97-8496-b87804dfab99" containerID="737bdc74f2c709ec376fc9204c898d8cba1280d8792f62f6a65923c927c2ba2f" exitCode=143 Jan 20 17:10:55 crc kubenswrapper[4558]: I0120 17:10:55.878128 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"5560f6a4-fc05-4d97-8496-b87804dfab99","Type":"ContainerDied","Data":"737bdc74f2c709ec376fc9204c898d8cba1280d8792f62f6a65923c927c2ba2f"} Jan 20 17:10:55 crc kubenswrapper[4558]: I0120 17:10:55.884725 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:10:55 crc kubenswrapper[4558]: I0120 17:10:55.885268 4558 generic.go:334] "Generic (PLEG): container finished" podID="931d9a7a-aa04-42c3-a276-e71388a8b9d3" containerID="30050fc936fb62cb7339a9a80b961e2d364426635e1598a842e2fb3d470652c2" exitCode=1 Jan 20 17:10:55 crc kubenswrapper[4558]: I0120 17:10:55.885311 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/root-account-create-update-f6ldh" event={"ID":"931d9a7a-aa04-42c3-a276-e71388a8b9d3","Type":"ContainerDied","Data":"30050fc936fb62cb7339a9a80b961e2d364426635e1598a842e2fb3d470652c2"} Jan 20 17:10:55 crc kubenswrapper[4558]: I0120 17:10:55.885338 4558 scope.go:117] "RemoveContainer" containerID="d966e46c5366cede7ed705aeae414b6587268470408797878320598c6aa92cbd" Jan 20 17:10:55 crc kubenswrapper[4558]: I0120 17:10:55.885958 4558 scope.go:117] "RemoveContainer" containerID="30050fc936fb62cb7339a9a80b961e2d364426635e1598a842e2fb3d470652c2" Jan 20 17:10:55 crc kubenswrapper[4558]: E0120 17:10:55.886270 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CrashLoopBackOff: \"back-off 10s restarting failed container=mariadb-account-create-update pod=root-account-create-update-f6ldh_openstack-kuttl-tests(931d9a7a-aa04-42c3-a276-e71388a8b9d3)\"" pod="openstack-kuttl-tests/root-account-create-update-f6ldh" podUID="931d9a7a-aa04-42c3-a276-e71388a8b9d3" Jan 20 17:10:55 crc kubenswrapper[4558]: I0120 17:10:55.924288 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-cba5-account-create-update-4mwc6"] Jan 20 17:10:55 crc kubenswrapper[4558]: I0120 17:10:55.928120 4558 generic.go:334] "Generic (PLEG): container finished" podID="fc023815-924a-4a04-bad2-5fc862ef20ed" containerID="3517eea5a519d4ed5dbc9c6b97d0c39a1746f6138a5b79baf3fd8a2d54e4293f" exitCode=0 Jan 20 17:10:55 crc kubenswrapper[4558]: I0120 17:10:55.928153 4558 generic.go:334] "Generic (PLEG): container finished" podID="fc023815-924a-4a04-bad2-5fc862ef20ed" containerID="a8fa945168b61b5421cd9a0b28d23083787cacb8489bf80416f7599e933bb729" exitCode=0 Jan 20 17:10:55 crc kubenswrapper[4558]: I0120 17:10:55.928228 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"fc023815-924a-4a04-bad2-5fc862ef20ed","Type":"ContainerDied","Data":"3517eea5a519d4ed5dbc9c6b97d0c39a1746f6138a5b79baf3fd8a2d54e4293f"} Jan 20 17:10:55 crc kubenswrapper[4558]: I0120 17:10:55.928262 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"fc023815-924a-4a04-bad2-5fc862ef20ed","Type":"ContainerDied","Data":"a8fa945168b61b5421cd9a0b28d23083787cacb8489bf80416f7599e933bb729"} Jan 20 17:10:55 crc kubenswrapper[4558]: I0120 17:10:55.931156 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-b308-account-create-update-wlh8l" event={"ID":"12dec63c-ec5c-4327-b1ec-aa9ee39acf82","Type":"ContainerStarted","Data":"b45f70a1d5c73fc8cb6894309f08fede043b7cd27440dce23ca6f0fc7454635b"} Jan 20 17:10:55 crc kubenswrapper[4558]: I0120 17:10:55.943486 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/glance-cba5-account-create-update-4mwc6"] Jan 20 17:10:55 crc kubenswrapper[4558]: I0120 17:10:55.971690 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-nb-0" event={"ID":"f98dbc6f-bd60-4064-b0d8-947d4080b9ec","Type":"ContainerDied","Data":"19366b51047dcffb431ed74f700176cc3bdbee5cc52bf32768fe8be71c1e1092"} Jan 20 17:10:55 crc kubenswrapper[4558]: I0120 17:10:55.971801 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:10:55 crc kubenswrapper[4558]: I0120 17:10:55.979293 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jtwsp\" (UniqueName: \"kubernetes.io/projected/4f4387d7-1e8c-48a5-9176-c8e683469eb0-kube-api-access-jtwsp\") pod \"4f4387d7-1e8c-48a5-9176-c8e683469eb0\" (UID: \"4f4387d7-1e8c-48a5-9176-c8e683469eb0\") " Jan 20 17:10:55 crc kubenswrapper[4558]: I0120 17:10:55.980223 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/4f4387d7-1e8c-48a5-9176-c8e683469eb0-kube-state-metrics-tls-config\") pod \"4f4387d7-1e8c-48a5-9176-c8e683469eb0\" (UID: \"4f4387d7-1e8c-48a5-9176-c8e683469eb0\") " Jan 20 17:10:55 crc kubenswrapper[4558]: I0120 17:10:55.980429 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/4f4387d7-1e8c-48a5-9176-c8e683469eb0-kube-state-metrics-tls-certs\") pod \"4f4387d7-1e8c-48a5-9176-c8e683469eb0\" (UID: \"4f4387d7-1e8c-48a5-9176-c8e683469eb0\") " Jan 20 17:10:55 crc kubenswrapper[4558]: I0120 17:10:55.981992 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f4387d7-1e8c-48a5-9176-c8e683469eb0-combined-ca-bundle\") pod \"4f4387d7-1e8c-48a5-9176-c8e683469eb0\" (UID: \"4f4387d7-1e8c-48a5-9176-c8e683469eb0\") " Jan 20 17:10:55 crc kubenswrapper[4558]: I0120 17:10:55.983308 4558 scope.go:117] "RemoveContainer" containerID="7cbff1daf53c24e61424f2057794575c2b66d928a2e2c492b88b52dfa56a5ee3" Jan 20 17:10:55 crc kubenswrapper[4558]: I0120 17:10:55.990481 4558 generic.go:334] "Generic (PLEG): container finished" podID="e30a943a-f753-41fc-adc8-03822aa712c3" containerID="4009b12e702e655b643aa02ca67d095804a9ddf483a6488a64d2205988f9a03a" exitCode=0 Jan 20 17:10:55 crc kubenswrapper[4558]: I0120 17:10:55.990619 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"e30a943a-f753-41fc-adc8-03822aa712c3","Type":"ContainerDied","Data":"4009b12e702e655b643aa02ca67d095804a9ddf483a6488a64d2205988f9a03a"} Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.002509 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4f4387d7-1e8c-48a5-9176-c8e683469eb0-kube-api-access-jtwsp" (OuterVolumeSpecName: "kube-api-access-jtwsp") pod "4f4387d7-1e8c-48a5-9176-c8e683469eb0" (UID: "4f4387d7-1e8c-48a5-9176-c8e683469eb0"). InnerVolumeSpecName "kube-api-access-jtwsp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.005680 4558 generic.go:334] "Generic (PLEG): container finished" podID="b33d5d09-77a2-4bc5-aa13-898a97b01b69" containerID="71bbd70a1c3421db19c774ca5977ad33d9b756863ccd7cb6a69d77b0a5690c71" exitCode=0 Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.005707 4558 generic.go:334] "Generic (PLEG): container finished" podID="b33d5d09-77a2-4bc5-aa13-898a97b01b69" containerID="65dc4b617123648766af244b88586c5391acaca630b06a308276272002cf2765" exitCode=0 Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.005781 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-proxy-69ddc9f468-cx7lb" event={"ID":"b33d5d09-77a2-4bc5-aa13-898a97b01b69","Type":"ContainerDied","Data":"71bbd70a1c3421db19c774ca5977ad33d9b756863ccd7cb6a69d77b0a5690c71"} Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.005807 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-proxy-69ddc9f468-cx7lb" event={"ID":"b33d5d09-77a2-4bc5-aa13-898a97b01b69","Type":"ContainerDied","Data":"65dc4b617123648766af244b88586c5391acaca630b06a308276272002cf2765"} Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.013878 4558 generic.go:334] "Generic (PLEG): container finished" podID="95cf677f-e744-4dad-acfb-507100d2ea14" containerID="41ac2c9d6515d906d6c94b74bae2f46a5cb6aead6aa685ee10b8acf27caf202f" exitCode=0 Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.013909 4558 generic.go:334] "Generic (PLEG): container finished" podID="95cf677f-e744-4dad-acfb-507100d2ea14" containerID="9e42a0ec21415521693070869352b9127abae2be472aef5468b15f95c32a8545" exitCode=2 Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.013917 4558 generic.go:334] "Generic (PLEG): container finished" podID="95cf677f-e744-4dad-acfb-507100d2ea14" containerID="6c367435c2502d2379a050e4069609ac5d1de29fbcbd65e5756a526e79921a06" exitCode=0 Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.014055 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"95cf677f-e744-4dad-acfb-507100d2ea14","Type":"ContainerDied","Data":"41ac2c9d6515d906d6c94b74bae2f46a5cb6aead6aa685ee10b8acf27caf202f"} Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.014092 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"95cf677f-e744-4dad-acfb-507100d2ea14","Type":"ContainerDied","Data":"9e42a0ec21415521693070869352b9127abae2be472aef5468b15f95c32a8545"} Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.014144 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"95cf677f-e744-4dad-acfb-507100d2ea14","Type":"ContainerDied","Data":"6c367435c2502d2379a050e4069609ac5d1de29fbcbd65e5756a526e79921a06"} Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.021906 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-sb-0" event={"ID":"6f1a8225-d143-4ae0-9301-8025f1b639e5","Type":"ContainerDied","Data":"5cbdc6f3d74248287ce88a82613cee08d93bdaf72455ee389246f8d62b9c7405"} Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.022003 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.023793 4558 generic.go:334] "Generic (PLEG): container finished" podID="209ec6bc-170f-4c8a-ad7e-e441ace95d1b" containerID="f36f1742d8b3097d9223becd7320632e27297558f65c436f5ad091f8b5074cbd" exitCode=143 Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.023850 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-keystone-listener-86d9c4d694-78ch5" event={"ID":"209ec6bc-170f-4c8a-ad7e-e441ace95d1b","Type":"ContainerDied","Data":"f36f1742d8b3097d9223becd7320632e27297558f65c436f5ad091f8b5074cbd"} Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.025312 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-0eec-account-create-update-2btsb" event={"ID":"99812c8d-c18f-48d5-a063-deb9462a904b","Type":"ContainerStarted","Data":"117bde14f91f654a5ebb943394d43661da0ffe640fd4cbb549b06da5dd1009d4"} Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.034936 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4f4387d7-1e8c-48a5-9176-c8e683469eb0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4f4387d7-1e8c-48a5-9176-c8e683469eb0" (UID: "4f4387d7-1e8c-48a5-9176-c8e683469eb0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.050329 4558 generic.go:334] "Generic (PLEG): container finished" podID="87001341-5a74-4796-a332-6d57b2cf11c9" containerID="c38ffae2f71dd677618391bc8fe1b2be1a66a0d46ff78c2fe5bdd127277f6054" exitCode=143 Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.050962 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-757fb55df8-9qzm2" event={"ID":"87001341-5a74-4796-a332-6d57b2cf11c9","Type":"ContainerDied","Data":"c38ffae2f71dd677618391bc8fe1b2be1a66a0d46ff78c2fe5bdd127277f6054"} Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.076596 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4f4387d7-1e8c-48a5-9176-c8e683469eb0-kube-state-metrics-tls-certs" (OuterVolumeSpecName: "kube-state-metrics-tls-certs") pod "4f4387d7-1e8c-48a5-9176-c8e683469eb0" (UID: "4f4387d7-1e8c-48a5-9176-c8e683469eb0"). InnerVolumeSpecName "kube-state-metrics-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.084804 4558 reconciler_common.go:293] "Volume detached for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/4f4387d7-1e8c-48a5-9176-c8e683469eb0-kube-state-metrics-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.084837 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f4387d7-1e8c-48a5-9176-c8e683469eb0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.084847 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jtwsp\" (UniqueName: \"kubernetes.io/projected/4f4387d7-1e8c-48a5-9176-c8e683469eb0-kube-api-access-jtwsp\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.109134 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4f4387d7-1e8c-48a5-9176-c8e683469eb0-kube-state-metrics-tls-config" (OuterVolumeSpecName: "kube-state-metrics-tls-config") pod "4f4387d7-1e8c-48a5-9176-c8e683469eb0" (UID: "4f4387d7-1e8c-48a5-9176-c8e683469eb0"). InnerVolumeSpecName "kube-state-metrics-tls-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.132411 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.135791 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell0-77c1-account-create-update-kgxm8"] Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.168977 4558 scope.go:117] "RemoveContainer" containerID="ec0223d973099ec19b7266b05971f33d632f61aa218c28a0e77f251b638c66e8" Jan 20 17:10:56 crc kubenswrapper[4558]: E0120 17:10:56.176938 4558 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 20 17:10:56 crc kubenswrapper[4558]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[/bin/sh -c #!/bin/bash Jan 20 17:10:56 crc kubenswrapper[4558]: Jan 20 17:10:56 crc kubenswrapper[4558]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 20 17:10:56 crc kubenswrapper[4558]: Jan 20 17:10:56 crc kubenswrapper[4558]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 20 17:10:56 crc kubenswrapper[4558]: Jan 20 17:10:56 crc kubenswrapper[4558]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 20 17:10:56 crc kubenswrapper[4558]: Jan 20 17:10:56 crc kubenswrapper[4558]: if [ -n "nova_cell0" ]; then Jan 20 17:10:56 crc kubenswrapper[4558]: GRANT_DATABASE="nova_cell0" Jan 20 17:10:56 crc kubenswrapper[4558]: else Jan 20 17:10:56 crc kubenswrapper[4558]: GRANT_DATABASE="*" Jan 20 17:10:56 crc kubenswrapper[4558]: fi Jan 20 17:10:56 crc kubenswrapper[4558]: Jan 20 17:10:56 crc kubenswrapper[4558]: # going for maximum compatibility here: Jan 20 17:10:56 crc kubenswrapper[4558]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 20 17:10:56 crc kubenswrapper[4558]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 20 17:10:56 crc kubenswrapper[4558]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 20 17:10:56 crc kubenswrapper[4558]: # support updates Jan 20 17:10:56 crc kubenswrapper[4558]: Jan 20 17:10:56 crc kubenswrapper[4558]: $MYSQL_CMD < logger="UnhandledError" Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.177776 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-8xt6f"] Jan 20 17:10:56 crc kubenswrapper[4558]: E0120 17:10:56.178648 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"nova-cell0-db-secret\\\" not found\"" pod="openstack-kuttl-tests/nova-cell0-77c1-account-create-update-kgxm8" podUID="5d4337cb-4c83-4c75-b4e4-3c7f16373356" Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.187537 4558 reconciler_common.go:293] "Volume detached for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/4f4387d7-1e8c-48a5-9176-c8e683469eb0-kube-state-metrics-tls-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.204150 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-nb-0"] Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.211020 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-nb-0"] Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.227310 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-sb-0"] Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.229008 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.231533 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-sb-0"] Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.241065 4558 scope.go:117] "RemoveContainer" containerID="27f4034104e6baa2769f9fc2dec19cbc295799af2b5e24f6d5ea225a19e1124b" Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.290399 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e30a943a-f753-41fc-adc8-03822aa712c3-combined-ca-bundle\") pod \"e30a943a-f753-41fc-adc8-03822aa712c3\" (UID: \"e30a943a-f753-41fc-adc8-03822aa712c3\") " Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.290451 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e30a943a-f753-41fc-adc8-03822aa712c3-config-data\") pod \"e30a943a-f753-41fc-adc8-03822aa712c3\" (UID: \"e30a943a-f753-41fc-adc8-03822aa712c3\") " Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.290518 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/e30a943a-f753-41fc-adc8-03822aa712c3-nova-novncproxy-tls-certs\") pod \"e30a943a-f753-41fc-adc8-03822aa712c3\" (UID: \"e30a943a-f753-41fc-adc8-03822aa712c3\") " Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.291187 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/e30a943a-f753-41fc-adc8-03822aa712c3-vencrypt-tls-certs\") pod \"e30a943a-f753-41fc-adc8-03822aa712c3\" (UID: \"e30a943a-f753-41fc-adc8-03822aa712c3\") " Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.291296 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w698k\" (UniqueName: \"kubernetes.io/projected/e30a943a-f753-41fc-adc8-03822aa712c3-kube-api-access-w698k\") pod \"e30a943a-f753-41fc-adc8-03822aa712c3\" (UID: \"e30a943a-f753-41fc-adc8-03822aa712c3\") " Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.298913 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e30a943a-f753-41fc-adc8-03822aa712c3-kube-api-access-w698k" (OuterVolumeSpecName: "kube-api-access-w698k") pod "e30a943a-f753-41fc-adc8-03822aa712c3" (UID: "e30a943a-f753-41fc-adc8-03822aa712c3"). InnerVolumeSpecName "kube-api-access-w698k". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.300620 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-proxy-69ddc9f468-cx7lb" Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.307971 4558 scope.go:117] "RemoveContainer" containerID="0ed7f1c17ddf0fc44bcd37dd9e0ff1ad788674cd606ea41336b8a5c6132d841d" Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.330277 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e30a943a-f753-41fc-adc8-03822aa712c3-config-data" (OuterVolumeSpecName: "config-data") pod "e30a943a-f753-41fc-adc8-03822aa712c3" (UID: "e30a943a-f753-41fc-adc8-03822aa712c3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.345527 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e30a943a-f753-41fc-adc8-03822aa712c3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e30a943a-f753-41fc-adc8-03822aa712c3" (UID: "e30a943a-f753-41fc-adc8-03822aa712c3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.353288 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e30a943a-f753-41fc-adc8-03822aa712c3-nova-novncproxy-tls-certs" (OuterVolumeSpecName: "nova-novncproxy-tls-certs") pod "e30a943a-f753-41fc-adc8-03822aa712c3" (UID: "e30a943a-f753-41fc-adc8-03822aa712c3"). InnerVolumeSpecName "nova-novncproxy-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.384342 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-6a24-account-create-update-cvtmm" Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.390484 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e30a943a-f753-41fc-adc8-03822aa712c3-vencrypt-tls-certs" (OuterVolumeSpecName: "vencrypt-tls-certs") pod "e30a943a-f753-41fc-adc8-03822aa712c3" (UID: "e30a943a-f753-41fc-adc8-03822aa712c3"). InnerVolumeSpecName "vencrypt-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.393127 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b33d5d09-77a2-4bc5-aa13-898a97b01b69-run-httpd\") pod \"b33d5d09-77a2-4bc5-aa13-898a97b01b69\" (UID: \"b33d5d09-77a2-4bc5-aa13-898a97b01b69\") " Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.393330 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b33d5d09-77a2-4bc5-aa13-898a97b01b69-config-data\") pod \"b33d5d09-77a2-4bc5-aa13-898a97b01b69\" (UID: \"b33d5d09-77a2-4bc5-aa13-898a97b01b69\") " Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.393430 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/01d65c2e-73b2-4d9e-a659-c384a3a3e2fe-combined-ca-bundle\") pod \"01d65c2e-73b2-4d9e-a659-c384a3a3e2fe\" (UID: \"01d65c2e-73b2-4d9e-a659-c384a3a3e2fe\") " Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.393515 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b33d5d09-77a2-4bc5-aa13-898a97b01b69-public-tls-certs\") pod \"b33d5d09-77a2-4bc5-aa13-898a97b01b69\" (UID: \"b33d5d09-77a2-4bc5-aa13-898a97b01b69\") " Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.393652 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b33d5d09-77a2-4bc5-aa13-898a97b01b69-log-httpd\") pod \"b33d5d09-77a2-4bc5-aa13-898a97b01b69\" (UID: \"b33d5d09-77a2-4bc5-aa13-898a97b01b69\") " Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.393762 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b33d5d09-77a2-4bc5-aa13-898a97b01b69-internal-tls-certs\") pod \"b33d5d09-77a2-4bc5-aa13-898a97b01b69\" (UID: \"b33d5d09-77a2-4bc5-aa13-898a97b01b69\") " Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.393832 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/b33d5d09-77a2-4bc5-aa13-898a97b01b69-etc-swift\") pod \"b33d5d09-77a2-4bc5-aa13-898a97b01b69\" (UID: \"b33d5d09-77a2-4bc5-aa13-898a97b01b69\") " Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.393539 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b33d5d09-77a2-4bc5-aa13-898a97b01b69-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "b33d5d09-77a2-4bc5-aa13-898a97b01b69" (UID: "b33d5d09-77a2-4bc5-aa13-898a97b01b69"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.393915 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/01d65c2e-73b2-4d9e-a659-c384a3a3e2fe-config-data-generated\") pod \"01d65c2e-73b2-4d9e-a659-c384a3a3e2fe\" (UID: \"01d65c2e-73b2-4d9e-a659-c384a3a3e2fe\") " Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.394064 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lqfqz\" (UniqueName: \"kubernetes.io/projected/b33d5d09-77a2-4bc5-aa13-898a97b01b69-kube-api-access-lqfqz\") pod \"b33d5d09-77a2-4bc5-aa13-898a97b01b69\" (UID: \"b33d5d09-77a2-4bc5-aa13-898a97b01b69\") " Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.394160 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mysql-db\" (UniqueName: \"kubernetes.io/local-volume/local-storage13-crc\") pod \"01d65c2e-73b2-4d9e-a659-c384a3a3e2fe\" (UID: \"01d65c2e-73b2-4d9e-a659-c384a3a3e2fe\") " Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.394216 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hqqmd\" (UniqueName: \"kubernetes.io/projected/01d65c2e-73b2-4d9e-a659-c384a3a3e2fe-kube-api-access-hqqmd\") pod \"01d65c2e-73b2-4d9e-a659-c384a3a3e2fe\" (UID: \"01d65c2e-73b2-4d9e-a659-c384a3a3e2fe\") " Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.394250 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/01d65c2e-73b2-4d9e-a659-c384a3a3e2fe-config-data-default\") pod \"01d65c2e-73b2-4d9e-a659-c384a3a3e2fe\" (UID: \"01d65c2e-73b2-4d9e-a659-c384a3a3e2fe\") " Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.394274 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/01d65c2e-73b2-4d9e-a659-c384a3a3e2fe-operator-scripts\") pod \"01d65c2e-73b2-4d9e-a659-c384a3a3e2fe\" (UID: \"01d65c2e-73b2-4d9e-a659-c384a3a3e2fe\") " Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.394295 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/01d65c2e-73b2-4d9e-a659-c384a3a3e2fe-kolla-config\") pod \"01d65c2e-73b2-4d9e-a659-c384a3a3e2fe\" (UID: \"01d65c2e-73b2-4d9e-a659-c384a3a3e2fe\") " Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.394346 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b33d5d09-77a2-4bc5-aa13-898a97b01b69-combined-ca-bundle\") pod \"b33d5d09-77a2-4bc5-aa13-898a97b01b69\" (UID: \"b33d5d09-77a2-4bc5-aa13-898a97b01b69\") " Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.394392 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/01d65c2e-73b2-4d9e-a659-c384a3a3e2fe-galera-tls-certs\") pod \"01d65c2e-73b2-4d9e-a659-c384a3a3e2fe\" (UID: \"01d65c2e-73b2-4d9e-a659-c384a3a3e2fe\") " Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.395712 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b33d5d09-77a2-4bc5-aa13-898a97b01b69-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "b33d5d09-77a2-4bc5-aa13-898a97b01b69" (UID: "b33d5d09-77a2-4bc5-aa13-898a97b01b69"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.398942 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/01d65c2e-73b2-4d9e-a659-c384a3a3e2fe-config-data-generated" (OuterVolumeSpecName: "config-data-generated") pod "01d65c2e-73b2-4d9e-a659-c384a3a3e2fe" (UID: "01d65c2e-73b2-4d9e-a659-c384a3a3e2fe"). InnerVolumeSpecName "config-data-generated". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.399614 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01d65c2e-73b2-4d9e-a659-c384a3a3e2fe-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "01d65c2e-73b2-4d9e-a659-c384a3a3e2fe" (UID: "01d65c2e-73b2-4d9e-a659-c384a3a3e2fe"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.400485 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01d65c2e-73b2-4d9e-a659-c384a3a3e2fe-config-data-default" (OuterVolumeSpecName: "config-data-default") pod "01d65c2e-73b2-4d9e-a659-c384a3a3e2fe" (UID: "01d65c2e-73b2-4d9e-a659-c384a3a3e2fe"). InnerVolumeSpecName "config-data-default". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.400797 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01d65c2e-73b2-4d9e-a659-c384a3a3e2fe-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "01d65c2e-73b2-4d9e-a659-c384a3a3e2fe" (UID: "01d65c2e-73b2-4d9e-a659-c384a3a3e2fe"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.407797 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w698k\" (UniqueName: \"kubernetes.io/projected/e30a943a-f753-41fc-adc8-03822aa712c3-kube-api-access-w698k\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.407873 4558 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b33d5d09-77a2-4bc5-aa13-898a97b01b69-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.407926 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e30a943a-f753-41fc-adc8-03822aa712c3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.407937 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e30a943a-f753-41fc-adc8-03822aa712c3-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.407950 4558 reconciler_common.go:293] "Volume detached for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/e30a943a-f753-41fc-adc8-03822aa712c3-nova-novncproxy-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.407967 4558 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b33d5d09-77a2-4bc5-aa13-898a97b01b69-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.407978 4558 reconciler_common.go:293] "Volume detached for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/e30a943a-f753-41fc-adc8-03822aa712c3-vencrypt-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.408593 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01d65c2e-73b2-4d9e-a659-c384a3a3e2fe-kube-api-access-hqqmd" (OuterVolumeSpecName: "kube-api-access-hqqmd") pod "01d65c2e-73b2-4d9e-a659-c384a3a3e2fe" (UID: "01d65c2e-73b2-4d9e-a659-c384a3a3e2fe"). InnerVolumeSpecName "kube-api-access-hqqmd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.424959 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b33d5d09-77a2-4bc5-aa13-898a97b01b69-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "b33d5d09-77a2-4bc5-aa13-898a97b01b69" (UID: "b33d5d09-77a2-4bc5-aa13-898a97b01b69"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.425032 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b33d5d09-77a2-4bc5-aa13-898a97b01b69-kube-api-access-lqfqz" (OuterVolumeSpecName: "kube-api-access-lqfqz") pod "b33d5d09-77a2-4bc5-aa13-898a97b01b69" (UID: "b33d5d09-77a2-4bc5-aa13-898a97b01b69"). InnerVolumeSpecName "kube-api-access-lqfqz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.435880 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage13-crc" (OuterVolumeSpecName: "mysql-db") pod "01d65c2e-73b2-4d9e-a659-c384a3a3e2fe" (UID: "01d65c2e-73b2-4d9e-a659-c384a3a3e2fe"). InnerVolumeSpecName "local-storage13-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.454691 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01d65c2e-73b2-4d9e-a659-c384a3a3e2fe-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "01d65c2e-73b2-4d9e-a659-c384a3a3e2fe" (UID: "01d65c2e-73b2-4d9e-a659-c384a3a3e2fe"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.456242 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b33d5d09-77a2-4bc5-aa13-898a97b01b69-config-data" (OuterVolumeSpecName: "config-data") pod "b33d5d09-77a2-4bc5-aa13-898a97b01b69" (UID: "b33d5d09-77a2-4bc5-aa13-898a97b01b69"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.460243 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b33d5d09-77a2-4bc5-aa13-898a97b01b69-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "b33d5d09-77a2-4bc5-aa13-898a97b01b69" (UID: "b33d5d09-77a2-4bc5-aa13-898a97b01b69"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.463304 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01d65c2e-73b2-4d9e-a659-c384a3a3e2fe-galera-tls-certs" (OuterVolumeSpecName: "galera-tls-certs") pod "01d65c2e-73b2-4d9e-a659-c384a3a3e2fe" (UID: "01d65c2e-73b2-4d9e-a659-c384a3a3e2fe"). InnerVolumeSpecName "galera-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.475771 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b33d5d09-77a2-4bc5-aa13-898a97b01b69-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "b33d5d09-77a2-4bc5-aa13-898a97b01b69" (UID: "b33d5d09-77a2-4bc5-aa13-898a97b01b69"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.508906 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h4mh6\" (UniqueName: \"kubernetes.io/projected/9b8664b1-e050-479b-b965-aed1a70e4bd1-kube-api-access-h4mh6\") pod \"9b8664b1-e050-479b-b965-aed1a70e4bd1\" (UID: \"9b8664b1-e050-479b-b965-aed1a70e4bd1\") " Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.509083 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9b8664b1-e050-479b-b965-aed1a70e4bd1-operator-scripts\") pod \"9b8664b1-e050-479b-b965-aed1a70e4bd1\" (UID: \"9b8664b1-e050-479b-b965-aed1a70e4bd1\") " Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.509570 4558 reconciler_common.go:293] "Volume detached for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/01d65c2e-73b2-4d9e-a659-c384a3a3e2fe-galera-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.509582 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b33d5d09-77a2-4bc5-aa13-898a97b01b69-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.509593 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/01d65c2e-73b2-4d9e-a659-c384a3a3e2fe-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.509602 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b33d5d09-77a2-4bc5-aa13-898a97b01b69-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.509610 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b33d5d09-77a2-4bc5-aa13-898a97b01b69-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.509621 4558 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/b33d5d09-77a2-4bc5-aa13-898a97b01b69-etc-swift\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.509630 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lqfqz\" (UniqueName: \"kubernetes.io/projected/b33d5d09-77a2-4bc5-aa13-898a97b01b69-kube-api-access-lqfqz\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.509639 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/01d65c2e-73b2-4d9e-a659-c384a3a3e2fe-config-data-generated\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.509659 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage13-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage13-crc\") on node \"crc\" " Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.509669 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hqqmd\" (UniqueName: \"kubernetes.io/projected/01d65c2e-73b2-4d9e-a659-c384a3a3e2fe-kube-api-access-hqqmd\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.509678 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/01d65c2e-73b2-4d9e-a659-c384a3a3e2fe-config-data-default\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.509687 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/01d65c2e-73b2-4d9e-a659-c384a3a3e2fe-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.509695 4558 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/01d65c2e-73b2-4d9e-a659-c384a3a3e2fe-kolla-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.513593 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9b8664b1-e050-479b-b965-aed1a70e4bd1-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "9b8664b1-e050-479b-b965-aed1a70e4bd1" (UID: "9b8664b1-e050-479b-b965-aed1a70e4bd1"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.520318 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9b8664b1-e050-479b-b965-aed1a70e4bd1-kube-api-access-h4mh6" (OuterVolumeSpecName: "kube-api-access-h4mh6") pod "9b8664b1-e050-479b-b965-aed1a70e4bd1" (UID: "9b8664b1-e050-479b-b965-aed1a70e4bd1"). InnerVolumeSpecName "kube-api-access-h4mh6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.538089 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage13-crc" (UniqueName: "kubernetes.io/local-volume/local-storage13-crc") on node "crc" Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.554412 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b33d5d09-77a2-4bc5-aa13-898a97b01b69-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b33d5d09-77a2-4bc5-aa13-898a97b01b69" (UID: "b33d5d09-77a2-4bc5-aa13-898a97b01b69"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.586829 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="11c55353-18fe-4039-b437-e642c5ff122e" path="/var/lib/kubelet/pods/11c55353-18fe-4039-b437-e642c5ff122e/volumes" Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.587825 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6d6dfb4c-9a8d-405e-8ffd-288e446998cf" path="/var/lib/kubelet/pods/6d6dfb4c-9a8d-405e-8ffd-288e446998cf/volumes" Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.588526 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6f1a8225-d143-4ae0-9301-8025f1b639e5" path="/var/lib/kubelet/pods/6f1a8225-d143-4ae0-9301-8025f1b639e5/volumes" Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.589488 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-b308-account-create-update-wlh8l" Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.590754 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8218833e-265b-448d-bc76-550d3c1d213d" path="/var/lib/kubelet/pods/8218833e-265b-448d-bc76-550d3c1d213d/volumes" Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.591558 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a01fa32b-fc5c-49ba-b102-b4751878380a" path="/var/lib/kubelet/pods/a01fa32b-fc5c-49ba-b102-b4751878380a/volumes" Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.592081 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ecc6d472-eb61-4b6a-ba13-fd3bd4a897cc" path="/var/lib/kubelet/pods/ecc6d472-eb61-4b6a-ba13-fd3bd4a897cc/volumes" Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.593064 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f98dbc6f-bd60-4064-b0d8-947d4080b9ec" path="/var/lib/kubelet/pods/f98dbc6f-bd60-4064-b0d8-947d4080b9ec/volumes" Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.595719 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-0eec-account-create-update-2btsb" Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.601997 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-64d3-account-create-update-h72jc" Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.614641 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h4mh6\" (UniqueName: \"kubernetes.io/projected/9b8664b1-e050-479b-b965-aed1a70e4bd1-kube-api-access-h4mh6\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.614669 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage13-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage13-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.614686 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9b8664b1-e050-479b-b965-aed1a70e4bd1-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.614697 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b33d5d09-77a2-4bc5-aa13-898a97b01b69-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.716324 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/12dec63c-ec5c-4327-b1ec-aa9ee39acf82-operator-scripts\") pod \"12dec63c-ec5c-4327-b1ec-aa9ee39acf82\" (UID: \"12dec63c-ec5c-4327-b1ec-aa9ee39acf82\") " Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.716428 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/99812c8d-c18f-48d5-a063-deb9462a904b-operator-scripts\") pod \"99812c8d-c18f-48d5-a063-deb9462a904b\" (UID: \"99812c8d-c18f-48d5-a063-deb9462a904b\") " Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.716511 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/32721dbf-d5cf-43d4-9ddc-ad1664d17869-operator-scripts\") pod \"32721dbf-d5cf-43d4-9ddc-ad1664d17869\" (UID: \"32721dbf-d5cf-43d4-9ddc-ad1664d17869\") " Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.716624 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4v2x5\" (UniqueName: \"kubernetes.io/projected/12dec63c-ec5c-4327-b1ec-aa9ee39acf82-kube-api-access-4v2x5\") pod \"12dec63c-ec5c-4327-b1ec-aa9ee39acf82\" (UID: \"12dec63c-ec5c-4327-b1ec-aa9ee39acf82\") " Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.716650 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j84wj\" (UniqueName: \"kubernetes.io/projected/32721dbf-d5cf-43d4-9ddc-ad1664d17869-kube-api-access-j84wj\") pod \"32721dbf-d5cf-43d4-9ddc-ad1664d17869\" (UID: \"32721dbf-d5cf-43d4-9ddc-ad1664d17869\") " Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.716671 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gs6pf\" (UniqueName: \"kubernetes.io/projected/99812c8d-c18f-48d5-a063-deb9462a904b-kube-api-access-gs6pf\") pod \"99812c8d-c18f-48d5-a063-deb9462a904b\" (UID: \"99812c8d-c18f-48d5-a063-deb9462a904b\") " Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.722239 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/12dec63c-ec5c-4327-b1ec-aa9ee39acf82-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "12dec63c-ec5c-4327-b1ec-aa9ee39acf82" (UID: "12dec63c-ec5c-4327-b1ec-aa9ee39acf82"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.722560 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/99812c8d-c18f-48d5-a063-deb9462a904b-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "99812c8d-c18f-48d5-a063-deb9462a904b" (UID: "99812c8d-c18f-48d5-a063-deb9462a904b"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.722855 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/32721dbf-d5cf-43d4-9ddc-ad1664d17869-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "32721dbf-d5cf-43d4-9ddc-ad1664d17869" (UID: "32721dbf-d5cf-43d4-9ddc-ad1664d17869"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.726014 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/12dec63c-ec5c-4327-b1ec-aa9ee39acf82-kube-api-access-4v2x5" (OuterVolumeSpecName: "kube-api-access-4v2x5") pod "12dec63c-ec5c-4327-b1ec-aa9ee39acf82" (UID: "12dec63c-ec5c-4327-b1ec-aa9ee39acf82"). InnerVolumeSpecName "kube-api-access-4v2x5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.729563 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/32721dbf-d5cf-43d4-9ddc-ad1664d17869-kube-api-access-j84wj" (OuterVolumeSpecName: "kube-api-access-j84wj") pod "32721dbf-d5cf-43d4-9ddc-ad1664d17869" (UID: "32721dbf-d5cf-43d4-9ddc-ad1664d17869"). InnerVolumeSpecName "kube-api-access-j84wj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:10:56 crc kubenswrapper[4558]: E0120 17:10:56.730098 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-memcached-svc: secret "cert-memcached-svc" not found Jan 20 17:10:56 crc kubenswrapper[4558]: E0120 17:10:56.730154 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4406b36c-adb2-4042-bc92-9efed5a43942-memcached-tls-certs podName:4406b36c-adb2-4042-bc92-9efed5a43942 nodeName:}" failed. No retries permitted until 2026-01-20 17:10:57.23013586 +0000 UTC m=+1750.990473828 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memcached-tls-certs" (UniqueName: "kubernetes.io/secret/4406b36c-adb2-4042-bc92-9efed5a43942-memcached-tls-certs") pod "memcached-0" (UID: "4406b36c-adb2-4042-bc92-9efed5a43942") : secret "cert-memcached-svc" not found Jan 20 17:10:56 crc kubenswrapper[4558]: E0120 17:10:56.730214 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/combined-ca-bundle: secret "combined-ca-bundle" not found Jan 20 17:10:56 crc kubenswrapper[4558]: E0120 17:10:56.730298 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4406b36c-adb2-4042-bc92-9efed5a43942-combined-ca-bundle podName:4406b36c-adb2-4042-bc92-9efed5a43942 nodeName:}" failed. No retries permitted until 2026-01-20 17:10:57.230276595 +0000 UTC m=+1750.990614562 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/4406b36c-adb2-4042-bc92-9efed5a43942-combined-ca-bundle") pod "memcached-0" (UID: "4406b36c-adb2-4042-bc92-9efed5a43942") : secret "combined-ca-bundle" not found Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.733862 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/99812c8d-c18f-48d5-a063-deb9462a904b-kube-api-access-gs6pf" (OuterVolumeSpecName: "kube-api-access-gs6pf") pod "99812c8d-c18f-48d5-a063-deb9462a904b" (UID: "99812c8d-c18f-48d5-a063-deb9462a904b"). InnerVolumeSpecName "kube-api-access-gs6pf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.768393 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.827190 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/32721dbf-d5cf-43d4-9ddc-ad1664d17869-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.827222 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4v2x5\" (UniqueName: \"kubernetes.io/projected/12dec63c-ec5c-4327-b1ec-aa9ee39acf82-kube-api-access-4v2x5\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.827233 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j84wj\" (UniqueName: \"kubernetes.io/projected/32721dbf-d5cf-43d4-9ddc-ad1664d17869-kube-api-access-j84wj\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.827242 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gs6pf\" (UniqueName: \"kubernetes.io/projected/99812c8d-c18f-48d5-a063-deb9462a904b-kube-api-access-gs6pf\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.827253 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/12dec63c-ec5c-4327-b1ec-aa9ee39acf82-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.827261 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/99812c8d-c18f-48d5-a063-deb9462a904b-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.876926 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-c88a-account-create-update-vglhd"] Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.892211 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/memcached-0"] Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.904323 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/keystone-c88a-account-create-update-vglhd"] Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.925275 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/keystone-c88a-account-create-update-7d8q7"] Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.929951 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/937396e0-a574-4597-bafc-bf1d8a909d3c-config-data\") pod \"937396e0-a574-4597-bafc-bf1d8a909d3c\" (UID: \"937396e0-a574-4597-bafc-bf1d8a909d3c\") " Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.930202 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nt869\" (UniqueName: \"kubernetes.io/projected/937396e0-a574-4597-bafc-bf1d8a909d3c-kube-api-access-nt869\") pod \"937396e0-a574-4597-bafc-bf1d8a909d3c\" (UID: \"937396e0-a574-4597-bafc-bf1d8a909d3c\") " Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.930245 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/937396e0-a574-4597-bafc-bf1d8a909d3c-combined-ca-bundle\") pod \"937396e0-a574-4597-bafc-bf1d8a909d3c\" (UID: \"937396e0-a574-4597-bafc-bf1d8a909d3c\") " Jan 20 17:10:56 crc kubenswrapper[4558]: E0120 17:10:56.931027 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Jan 20 17:10:56 crc kubenswrapper[4558]: E0120 17:10:56.931079 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/39403277-bf62-47c1-8e86-cdec59f2da7b-config-data podName:39403277-bf62-47c1-8e86-cdec59f2da7b nodeName:}" failed. No retries permitted until 2026-01-20 17:11:00.931063756 +0000 UTC m=+1754.691401723 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/39403277-bf62-47c1-8e86-cdec59f2da7b-config-data") pod "rabbitmq-cell1-server-0" (UID: "39403277-bf62-47c1-8e86-cdec59f2da7b") : configmap "rabbitmq-cell1-config-data" not found Jan 20 17:10:56 crc kubenswrapper[4558]: E0120 17:10:56.949412 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6f1a8225-d143-4ae0-9301-8025f1b639e5" containerName="openstack-network-exporter" Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.949454 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="6f1a8225-d143-4ae0-9301-8025f1b639e5" containerName="openstack-network-exporter" Jan 20 17:10:56 crc kubenswrapper[4558]: E0120 17:10:56.949472 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="01d65c2e-73b2-4d9e-a659-c384a3a3e2fe" containerName="mysql-bootstrap" Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.949478 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="01d65c2e-73b2-4d9e-a659-c384a3a3e2fe" containerName="mysql-bootstrap" Jan 20 17:10:56 crc kubenswrapper[4558]: E0120 17:10:56.949485 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f98dbc6f-bd60-4064-b0d8-947d4080b9ec" containerName="ovsdbserver-nb" Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.949489 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="f98dbc6f-bd60-4064-b0d8-947d4080b9ec" containerName="ovsdbserver-nb" Jan 20 17:10:56 crc kubenswrapper[4558]: E0120 17:10:56.949504 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e30a943a-f753-41fc-adc8-03822aa712c3" containerName="nova-cell1-novncproxy-novncproxy" Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.949510 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="e30a943a-f753-41fc-adc8-03822aa712c3" containerName="nova-cell1-novncproxy-novncproxy" Jan 20 17:10:56 crc kubenswrapper[4558]: E0120 17:10:56.949524 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f98dbc6f-bd60-4064-b0d8-947d4080b9ec" containerName="openstack-network-exporter" Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.949530 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="f98dbc6f-bd60-4064-b0d8-947d4080b9ec" containerName="openstack-network-exporter" Jan 20 17:10:56 crc kubenswrapper[4558]: E0120 17:10:56.949547 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4f4387d7-1e8c-48a5-9176-c8e683469eb0" containerName="kube-state-metrics" Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.949554 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="4f4387d7-1e8c-48a5-9176-c8e683469eb0" containerName="kube-state-metrics" Jan 20 17:10:56 crc kubenswrapper[4558]: E0120 17:10:56.949567 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="937396e0-a574-4597-bafc-bf1d8a909d3c" containerName="nova-cell1-conductor-conductor" Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.949571 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="937396e0-a574-4597-bafc-bf1d8a909d3c" containerName="nova-cell1-conductor-conductor" Jan 20 17:10:56 crc kubenswrapper[4558]: E0120 17:10:56.949579 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6f1a8225-d143-4ae0-9301-8025f1b639e5" containerName="ovsdbserver-sb" Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.949584 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="6f1a8225-d143-4ae0-9301-8025f1b639e5" containerName="ovsdbserver-sb" Jan 20 17:10:56 crc kubenswrapper[4558]: E0120 17:10:56.949595 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b33d5d09-77a2-4bc5-aa13-898a97b01b69" containerName="proxy-httpd" Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.949600 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="b33d5d09-77a2-4bc5-aa13-898a97b01b69" containerName="proxy-httpd" Jan 20 17:10:56 crc kubenswrapper[4558]: E0120 17:10:56.949612 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b33d5d09-77a2-4bc5-aa13-898a97b01b69" containerName="proxy-server" Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.949617 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="b33d5d09-77a2-4bc5-aa13-898a97b01b69" containerName="proxy-server" Jan 20 17:10:56 crc kubenswrapper[4558]: E0120 17:10:56.949625 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="01d65c2e-73b2-4d9e-a659-c384a3a3e2fe" containerName="galera" Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.949630 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="01d65c2e-73b2-4d9e-a659-c384a3a3e2fe" containerName="galera" Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.949778 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="b33d5d09-77a2-4bc5-aa13-898a97b01b69" containerName="proxy-server" Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.949791 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="6f1a8225-d143-4ae0-9301-8025f1b639e5" containerName="ovsdbserver-sb" Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.949797 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="f98dbc6f-bd60-4064-b0d8-947d4080b9ec" containerName="ovsdbserver-nb" Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.949804 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="937396e0-a574-4597-bafc-bf1d8a909d3c" containerName="nova-cell1-conductor-conductor" Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.949812 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="f98dbc6f-bd60-4064-b0d8-947d4080b9ec" containerName="openstack-network-exporter" Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.949820 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="b33d5d09-77a2-4bc5-aa13-898a97b01b69" containerName="proxy-httpd" Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.949829 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="01d65c2e-73b2-4d9e-a659-c384a3a3e2fe" containerName="galera" Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.949835 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="6f1a8225-d143-4ae0-9301-8025f1b639e5" containerName="openstack-network-exporter" Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.949845 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="4f4387d7-1e8c-48a5-9176-c8e683469eb0" containerName="kube-state-metrics" Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.949852 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="e30a943a-f753-41fc-adc8-03822aa712c3" containerName="nova-cell1-novncproxy-novncproxy" Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.955661 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/937396e0-a574-4597-bafc-bf1d8a909d3c-kube-api-access-nt869" (OuterVolumeSpecName: "kube-api-access-nt869") pod "937396e0-a574-4597-bafc-bf1d8a909d3c" (UID: "937396e0-a574-4597-bafc-bf1d8a909d3c"). InnerVolumeSpecName "kube-api-access-nt869". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.968424 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-bootstrap-v2krf"] Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.968509 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-c88a-account-create-update-7d8q7" Jan 20 17:10:56 crc kubenswrapper[4558]: I0120 17:10:56.990544 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/keystone-bootstrap-v2krf"] Jan 20 17:10:57 crc kubenswrapper[4558]: I0120 17:10:57.000890 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-db-secret" Jan 20 17:10:57 crc kubenswrapper[4558]: I0120 17:10:57.014093 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/937396e0-a574-4597-bafc-bf1d8a909d3c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "937396e0-a574-4597-bafc-bf1d8a909d3c" (UID: "937396e0-a574-4597-bafc-bf1d8a909d3c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:10:57 crc kubenswrapper[4558]: I0120 17:10:57.029974 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-db-sync-nrcvr"] Jan 20 17:10:57 crc kubenswrapper[4558]: I0120 17:10:57.033103 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nt869\" (UniqueName: \"kubernetes.io/projected/937396e0-a574-4597-bafc-bf1d8a909d3c-kube-api-access-nt869\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:57 crc kubenswrapper[4558]: I0120 17:10:57.033134 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/937396e0-a574-4597-bafc-bf1d8a909d3c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:57 crc kubenswrapper[4558]: I0120 17:10:57.040422 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/937396e0-a574-4597-bafc-bf1d8a909d3c-config-data" (OuterVolumeSpecName: "config-data") pod "937396e0-a574-4597-bafc-bf1d8a909d3c" (UID: "937396e0-a574-4597-bafc-bf1d8a909d3c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:10:57 crc kubenswrapper[4558]: I0120 17:10:57.069665 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-c88a-account-create-update-7d8q7"] Jan 20 17:10:57 crc kubenswrapper[4558]: I0120 17:10:57.092575 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/keystone-db-sync-nrcvr"] Jan 20 17:10:57 crc kubenswrapper[4558]: I0120 17:10:57.103608 4558 generic.go:334] "Generic (PLEG): container finished" podID="b730bb71-b18e-4b97-8d6b-d4ac799d08fd" containerID="5e6cb306bc07e01d6e9ddaee7760a28a523c717659bf64df27406b349acbd1c2" exitCode=0 Jan 20 17:10:57 crc kubenswrapper[4558]: I0120 17:10:57.103676 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-8xt6f" event={"ID":"b730bb71-b18e-4b97-8d6b-d4ac799d08fd","Type":"ContainerDied","Data":"5e6cb306bc07e01d6e9ddaee7760a28a523c717659bf64df27406b349acbd1c2"} Jan 20 17:10:57 crc kubenswrapper[4558]: I0120 17:10:57.103706 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-8xt6f" event={"ID":"b730bb71-b18e-4b97-8d6b-d4ac799d08fd","Type":"ContainerStarted","Data":"f27657a2906f22bd8d283a7350bf78fd6eb7c8e04bd6211cf52429d4b13a3a6c"} Jan 20 17:10:57 crc kubenswrapper[4558]: I0120 17:10:57.105119 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-679bbddb99-dh2h2"] Jan 20 17:10:57 crc kubenswrapper[4558]: I0120 17:10:57.105326 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/keystone-679bbddb99-dh2h2" podUID="c3c7801f-4a15-44ce-8732-4f93b819a7c5" containerName="keystone-api" containerID="cri-o://f9f39e4d701bf54b38ab7a0e52db29f8a7cc6443a7b4a92c71351d6317512b7a" gracePeriod=30 Jan 20 17:10:57 crc kubenswrapper[4558]: I0120 17:10:57.110344 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/cinder-api-0" podUID="ee147895-0bad-4dc1-a348-1be3348a7180" containerName="cinder-api" probeResult="failure" output="Get \"https://10.217.1.100:8776/healthcheck\": read tcp 10.217.0.2:36920->10.217.1.100:8776: read: connection reset by peer" Jan 20 17:10:57 crc kubenswrapper[4558]: I0120 17:10:57.135024 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-trdbf\" (UniqueName: \"kubernetes.io/projected/90b46572-b60a-4ef8-ad8c-1e67f51d9df4-kube-api-access-trdbf\") pod \"keystone-c88a-account-create-update-7d8q7\" (UID: \"90b46572-b60a-4ef8-ad8c-1e67f51d9df4\") " pod="openstack-kuttl-tests/keystone-c88a-account-create-update-7d8q7" Jan 20 17:10:57 crc kubenswrapper[4558]: I0120 17:10:57.135087 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/90b46572-b60a-4ef8-ad8c-1e67f51d9df4-operator-scripts\") pod \"keystone-c88a-account-create-update-7d8q7\" (UID: \"90b46572-b60a-4ef8-ad8c-1e67f51d9df4\") " pod="openstack-kuttl-tests/keystone-c88a-account-create-update-7d8q7" Jan 20 17:10:57 crc kubenswrapper[4558]: I0120 17:10:57.135314 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/937396e0-a574-4597-bafc-bf1d8a909d3c-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:57 crc kubenswrapper[4558]: I0120 17:10:57.151201 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-proxy-69ddc9f468-cx7lb" event={"ID":"b33d5d09-77a2-4bc5-aa13-898a97b01b69","Type":"ContainerDied","Data":"c01ea335d1161cefaa9a0eb97ec5b14a014013a8001bcaeb2c3ff8dca2420c0f"} Jan 20 17:10:57 crc kubenswrapper[4558]: I0120 17:10:57.151276 4558 scope.go:117] "RemoveContainer" containerID="71bbd70a1c3421db19c774ca5977ad33d9b756863ccd7cb6a69d77b0a5690c71" Jan 20 17:10:57 crc kubenswrapper[4558]: I0120 17:10:57.151475 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-proxy-69ddc9f468-cx7lb" Jan 20 17:10:57 crc kubenswrapper[4558]: I0120 17:10:57.152770 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/openstack-galera-0"] Jan 20 17:10:57 crc kubenswrapper[4558]: I0120 17:10:57.160904 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-db-create-mjhrj"] Jan 20 17:10:57 crc kubenswrapper[4558]: I0120 17:10:57.161839 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-77c1-account-create-update-kgxm8" event={"ID":"5d4337cb-4c83-4c75-b4e4-3c7f16373356","Type":"ContainerStarted","Data":"a422c2655b9f94a5e11c62e74b3109c74608ba1d98a84dc2af758d56fbc5e610"} Jan 20 17:10:57 crc kubenswrapper[4558]: I0120 17:10:57.166066 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/keystone-db-create-mjhrj"] Jan 20 17:10:57 crc kubenswrapper[4558]: I0120 17:10:57.179631 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-64d3-account-create-update-h72jc" Jan 20 17:10:57 crc kubenswrapper[4558]: I0120 17:10:57.179922 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-c88a-account-create-update-7d8q7"] Jan 20 17:10:57 crc kubenswrapper[4558]: I0120 17:10:57.180018 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-64d3-account-create-update-h72jc" event={"ID":"32721dbf-d5cf-43d4-9ddc-ad1664d17869","Type":"ContainerDied","Data":"5faed37875136e2ea4cc33be20edaf2f31b9c5255b277e2a41fe3f19325d2706"} Jan 20 17:10:57 crc kubenswrapper[4558]: I0120 17:10:57.183808 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-f6ldh"] Jan 20 17:10:57 crc kubenswrapper[4558]: I0120 17:10:57.185078 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-b308-account-create-update-wlh8l" event={"ID":"12dec63c-ec5c-4327-b1ec-aa9ee39acf82","Type":"ContainerDied","Data":"b45f70a1d5c73fc8cb6894309f08fede043b7cd27440dce23ca6f0fc7454635b"} Jan 20 17:10:57 crc kubenswrapper[4558]: I0120 17:10:57.185127 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-b308-account-create-update-wlh8l" Jan 20 17:10:57 crc kubenswrapper[4558]: I0120 17:10:57.198424 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-0eec-account-create-update-2btsb" event={"ID":"99812c8d-c18f-48d5-a063-deb9462a904b","Type":"ContainerDied","Data":"117bde14f91f654a5ebb943394d43661da0ffe640fd4cbb549b06da5dd1009d4"} Jan 20 17:10:57 crc kubenswrapper[4558]: I0120 17:10:57.198507 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-0eec-account-create-update-2btsb" Jan 20 17:10:57 crc kubenswrapper[4558]: I0120 17:10:57.219047 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-6a24-account-create-update-cvtmm" event={"ID":"9b8664b1-e050-479b-b965-aed1a70e4bd1","Type":"ContainerDied","Data":"e18be31df081cf061af134c7daa18656b9fd77f54f32e06363c00698770f7db8"} Jan 20 17:10:57 crc kubenswrapper[4558]: I0120 17:10:57.220196 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-6a24-account-create-update-cvtmm" Jan 20 17:10:57 crc kubenswrapper[4558]: I0120 17:10:57.237617 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/kube-state-metrics-0" event={"ID":"4f4387d7-1e8c-48a5-9176-c8e683469eb0","Type":"ContainerDied","Data":"d5c38e0a5b6888725592c57432e7cf01c5c8b9b0b4df4bf9a2bf7693c93692e8"} Jan 20 17:10:57 crc kubenswrapper[4558]: I0120 17:10:57.237714 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:10:57 crc kubenswrapper[4558]: I0120 17:10:57.241735 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-trdbf\" (UniqueName: \"kubernetes.io/projected/90b46572-b60a-4ef8-ad8c-1e67f51d9df4-kube-api-access-trdbf\") pod \"keystone-c88a-account-create-update-7d8q7\" (UID: \"90b46572-b60a-4ef8-ad8c-1e67f51d9df4\") " pod="openstack-kuttl-tests/keystone-c88a-account-create-update-7d8q7" Jan 20 17:10:57 crc kubenswrapper[4558]: I0120 17:10:57.241773 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/90b46572-b60a-4ef8-ad8c-1e67f51d9df4-operator-scripts\") pod \"keystone-c88a-account-create-update-7d8q7\" (UID: \"90b46572-b60a-4ef8-ad8c-1e67f51d9df4\") " pod="openstack-kuttl-tests/keystone-c88a-account-create-update-7d8q7" Jan 20 17:10:57 crc kubenswrapper[4558]: E0120 17:10:57.241890 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/openstack-scripts: configmap "openstack-scripts" not found Jan 20 17:10:57 crc kubenswrapper[4558]: E0120 17:10:57.241932 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/90b46572-b60a-4ef8-ad8c-1e67f51d9df4-operator-scripts podName:90b46572-b60a-4ef8-ad8c-1e67f51d9df4 nodeName:}" failed. No retries permitted until 2026-01-20 17:10:57.741918674 +0000 UTC m=+1751.502256642 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/90b46572-b60a-4ef8-ad8c-1e67f51d9df4-operator-scripts") pod "keystone-c88a-account-create-update-7d8q7" (UID: "90b46572-b60a-4ef8-ad8c-1e67f51d9df4") : configmap "openstack-scripts" not found Jan 20 17:10:57 crc kubenswrapper[4558]: E0120 17:10:57.242005 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-memcached-svc: secret "cert-memcached-svc" not found Jan 20 17:10:57 crc kubenswrapper[4558]: E0120 17:10:57.242060 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4406b36c-adb2-4042-bc92-9efed5a43942-memcached-tls-certs podName:4406b36c-adb2-4042-bc92-9efed5a43942 nodeName:}" failed. No retries permitted until 2026-01-20 17:10:58.242042297 +0000 UTC m=+1752.002380265 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "memcached-tls-certs" (UniqueName: "kubernetes.io/secret/4406b36c-adb2-4042-bc92-9efed5a43942-memcached-tls-certs") pod "memcached-0" (UID: "4406b36c-adb2-4042-bc92-9efed5a43942") : secret "cert-memcached-svc" not found Jan 20 17:10:57 crc kubenswrapper[4558]: E0120 17:10:57.242091 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/combined-ca-bundle: secret "combined-ca-bundle" not found Jan 20 17:10:57 crc kubenswrapper[4558]: E0120 17:10:57.242112 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4406b36c-adb2-4042-bc92-9efed5a43942-combined-ca-bundle podName:4406b36c-adb2-4042-bc92-9efed5a43942 nodeName:}" failed. No retries permitted until 2026-01-20 17:10:58.242104905 +0000 UTC m=+1752.002442862 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/4406b36c-adb2-4042-bc92-9efed5a43942-combined-ca-bundle") pod "memcached-0" (UID: "4406b36c-adb2-4042-bc92-9efed5a43942") : secret "combined-ca-bundle" not found Jan 20 17:10:57 crc kubenswrapper[4558]: I0120 17:10:57.245227 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-cell1-galera-0" event={"ID":"01d65c2e-73b2-4d9e-a659-c384a3a3e2fe","Type":"ContainerDied","Data":"89b649c708086702888eb623a2f0f213c0d91f2609ba354b60a11cf2432bbb05"} Jan 20 17:10:57 crc kubenswrapper[4558]: I0120 17:10:57.245300 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:10:57 crc kubenswrapper[4558]: E0120 17:10:57.247416 4558 projected.go:194] Error preparing data for projected volume kube-api-access-trdbf for pod openstack-kuttl-tests/keystone-c88a-account-create-update-7d8q7: failed to fetch token: serviceaccounts "galera-openstack" not found Jan 20 17:10:57 crc kubenswrapper[4558]: E0120 17:10:57.247502 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/90b46572-b60a-4ef8-ad8c-1e67f51d9df4-kube-api-access-trdbf podName:90b46572-b60a-4ef8-ad8c-1e67f51d9df4 nodeName:}" failed. No retries permitted until 2026-01-20 17:10:57.747481085 +0000 UTC m=+1751.507819052 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-trdbf" (UniqueName: "kubernetes.io/projected/90b46572-b60a-4ef8-ad8c-1e67f51d9df4-kube-api-access-trdbf") pod "keystone-c88a-account-create-update-7d8q7" (UID: "90b46572-b60a-4ef8-ad8c-1e67f51d9df4") : failed to fetch token: serviceaccounts "galera-openstack" not found Jan 20 17:10:57 crc kubenswrapper[4558]: I0120 17:10:57.253748 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"e30a943a-f753-41fc-adc8-03822aa712c3","Type":"ContainerDied","Data":"f29ed6548455a86b575a5ffb74017f4988c6a7e20546f6a3c1688ccaa4b7fecd"} Jan 20 17:10:57 crc kubenswrapper[4558]: I0120 17:10:57.253848 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:10:57 crc kubenswrapper[4558]: I0120 17:10:57.264780 4558 generic.go:334] "Generic (PLEG): container finished" podID="937396e0-a574-4597-bafc-bf1d8a909d3c" containerID="51dda5ae1f21ef092b384f0127be32effe565acdb2160b9b7bd5bc4ac8bc0278" exitCode=0 Jan 20 17:10:57 crc kubenswrapper[4558]: I0120 17:10:57.264850 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-0" event={"ID":"937396e0-a574-4597-bafc-bf1d8a909d3c","Type":"ContainerDied","Data":"51dda5ae1f21ef092b384f0127be32effe565acdb2160b9b7bd5bc4ac8bc0278"} Jan 20 17:10:57 crc kubenswrapper[4558]: I0120 17:10:57.264876 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-0" event={"ID":"937396e0-a574-4597-bafc-bf1d8a909d3c","Type":"ContainerDied","Data":"81a3a14e5559a9514153dd85cad2368d4cf799ec843e60f45201b7c583751eeb"} Jan 20 17:10:57 crc kubenswrapper[4558]: I0120 17:10:57.267458 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:10:57 crc kubenswrapper[4558]: I0120 17:10:57.269150 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/memcached-0" podUID="4406b36c-adb2-4042-bc92-9efed5a43942" containerName="memcached" containerID="cri-o://133261d6818bfb9dc626eb1b737b442170f15e8780d606e9ddad15e359c16063" gracePeriod=30 Jan 20 17:10:57 crc kubenswrapper[4558]: I0120 17:10:57.270155 4558 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="openstack-kuttl-tests/root-account-create-update-f6ldh" secret="" err="secret \"galera-openstack-dockercfg-8mnqk\" not found" Jan 20 17:10:57 crc kubenswrapper[4558]: I0120 17:10:57.270203 4558 scope.go:117] "RemoveContainer" containerID="30050fc936fb62cb7339a9a80b961e2d364426635e1598a842e2fb3d470652c2" Jan 20 17:10:57 crc kubenswrapper[4558]: E0120 17:10:57.271971 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CrashLoopBackOff: \"back-off 10s restarting failed container=mariadb-account-create-update pod=root-account-create-update-f6ldh_openstack-kuttl-tests(931d9a7a-aa04-42c3-a276-e71388a8b9d3)\"" pod="openstack-kuttl-tests/root-account-create-update-f6ldh" podUID="931d9a7a-aa04-42c3-a276-e71388a8b9d3" Jan 20 17:10:57 crc kubenswrapper[4558]: I0120 17:10:57.338610 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/openstack-galera-0" podUID="5558aeea-2a7f-4654-a261-a2902c9434e0" containerName="galera" containerID="cri-o://0334d5f9b2258c2ff98366283a9bd0e77fed9de54416a50e2a8bd36057e911d2" gracePeriod=30 Jan 20 17:10:57 crc kubenswrapper[4558]: E0120 17:10:57.446940 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/openstack-scripts: configmap "openstack-scripts" not found Jan 20 17:10:57 crc kubenswrapper[4558]: E0120 17:10:57.447005 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/931d9a7a-aa04-42c3-a276-e71388a8b9d3-operator-scripts podName:931d9a7a-aa04-42c3-a276-e71388a8b9d3 nodeName:}" failed. No retries permitted until 2026-01-20 17:10:57.946989591 +0000 UTC m=+1751.707327558 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/931d9a7a-aa04-42c3-a276-e71388a8b9d3-operator-scripts") pod "root-account-create-update-f6ldh" (UID: "931d9a7a-aa04-42c3-a276-e71388a8b9d3") : configmap "openstack-scripts" not found Jan 20 17:10:57 crc kubenswrapper[4558]: I0120 17:10:57.465317 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/barbican-api-757fb55df8-9qzm2" podUID="87001341-5a74-4796-a332-6d57b2cf11c9" containerName="barbican-api-log" probeResult="failure" output="Get \"https://10.217.1.99:9311/healthcheck\": read tcp 10.217.0.2:49960->10.217.1.99:9311: read: connection reset by peer" Jan 20 17:10:57 crc kubenswrapper[4558]: I0120 17:10:57.465674 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/barbican-api-757fb55df8-9qzm2" podUID="87001341-5a74-4796-a332-6d57b2cf11c9" containerName="barbican-api" probeResult="failure" output="Get \"https://10.217.1.99:9311/healthcheck\": read tcp 10.217.0.2:49948->10.217.1.99:9311: read: connection reset by peer" Jan 20 17:10:57 crc kubenswrapper[4558]: E0120 17:10:57.577852 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 14a9e266c3e7805021dc147f925c01b6b1d5dec0c4a69dce9222790ed28a80b1 is running failed: container process not found" containerID="14a9e266c3e7805021dc147f925c01b6b1d5dec0c4a69dce9222790ed28a80b1" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Jan 20 17:10:57 crc kubenswrapper[4558]: E0120 17:10:57.579212 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 14a9e266c3e7805021dc147f925c01b6b1d5dec0c4a69dce9222790ed28a80b1 is running failed: container process not found" containerID="14a9e266c3e7805021dc147f925c01b6b1d5dec0c4a69dce9222790ed28a80b1" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Jan 20 17:10:57 crc kubenswrapper[4558]: E0120 17:10:57.579481 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 14a9e266c3e7805021dc147f925c01b6b1d5dec0c4a69dce9222790ed28a80b1 is running failed: container process not found" containerID="14a9e266c3e7805021dc147f925c01b6b1d5dec0c4a69dce9222790ed28a80b1" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Jan 20 17:10:57 crc kubenswrapper[4558]: E0120 17:10:57.579505 4558 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 14a9e266c3e7805021dc147f925c01b6b1d5dec0c4a69dce9222790ed28a80b1 is running failed: container process not found" probeType="Readiness" pod="openstack-kuttl-tests/ovn-northd-0" podUID="c6a5b342-eaf2-408c-828e-9bc0bf10d09e" containerName="ovn-northd" Jan 20 17:10:57 crc kubenswrapper[4558]: I0120 17:10:57.651600 4558 scope.go:117] "RemoveContainer" containerID="65dc4b617123648766af244b88586c5391acaca630b06a308276272002cf2765" Jan 20 17:10:57 crc kubenswrapper[4558]: I0120 17:10:57.723385 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-77c1-account-create-update-kgxm8" Jan 20 17:10:57 crc kubenswrapper[4558]: E0120 17:10:57.754416 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[kube-api-access-trdbf operator-scripts], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack-kuttl-tests/keystone-c88a-account-create-update-7d8q7" podUID="90b46572-b60a-4ef8-ad8c-1e67f51d9df4" Jan 20 17:10:57 crc kubenswrapper[4558]: I0120 17:10:57.756793 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-trdbf\" (UniqueName: \"kubernetes.io/projected/90b46572-b60a-4ef8-ad8c-1e67f51d9df4-kube-api-access-trdbf\") pod \"keystone-c88a-account-create-update-7d8q7\" (UID: \"90b46572-b60a-4ef8-ad8c-1e67f51d9df4\") " pod="openstack-kuttl-tests/keystone-c88a-account-create-update-7d8q7" Jan 20 17:10:57 crc kubenswrapper[4558]: I0120 17:10:57.756841 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/90b46572-b60a-4ef8-ad8c-1e67f51d9df4-operator-scripts\") pod \"keystone-c88a-account-create-update-7d8q7\" (UID: \"90b46572-b60a-4ef8-ad8c-1e67f51d9df4\") " pod="openstack-kuttl-tests/keystone-c88a-account-create-update-7d8q7" Jan 20 17:10:57 crc kubenswrapper[4558]: E0120 17:10:57.757080 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/openstack-scripts: configmap "openstack-scripts" not found Jan 20 17:10:57 crc kubenswrapper[4558]: E0120 17:10:57.757130 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/90b46572-b60a-4ef8-ad8c-1e67f51d9df4-operator-scripts podName:90b46572-b60a-4ef8-ad8c-1e67f51d9df4 nodeName:}" failed. No retries permitted until 2026-01-20 17:10:58.757114627 +0000 UTC m=+1752.517452595 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/90b46572-b60a-4ef8-ad8c-1e67f51d9df4-operator-scripts") pod "keystone-c88a-account-create-update-7d8q7" (UID: "90b46572-b60a-4ef8-ad8c-1e67f51d9df4") : configmap "openstack-scripts" not found Jan 20 17:10:57 crc kubenswrapper[4558]: E0120 17:10:57.771930 4558 projected.go:194] Error preparing data for projected volume kube-api-access-trdbf for pod openstack-kuttl-tests/keystone-c88a-account-create-update-7d8q7: failed to fetch token: serviceaccounts "galera-openstack" not found Jan 20 17:10:57 crc kubenswrapper[4558]: E0120 17:10:57.771982 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/90b46572-b60a-4ef8-ad8c-1e67f51d9df4-kube-api-access-trdbf podName:90b46572-b60a-4ef8-ad8c-1e67f51d9df4 nodeName:}" failed. No retries permitted until 2026-01-20 17:10:58.771967398 +0000 UTC m=+1752.532305365 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-trdbf" (UniqueName: "kubernetes.io/projected/90b46572-b60a-4ef8-ad8c-1e67f51d9df4-kube-api-access-trdbf") pod "keystone-c88a-account-create-update-7d8q7" (UID: "90b46572-b60a-4ef8-ad8c-1e67f51d9df4") : failed to fetch token: serviceaccounts "galera-openstack" not found Jan 20 17:10:57 crc kubenswrapper[4558]: I0120 17:10:57.826220 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 17:10:57 crc kubenswrapper[4558]: I0120 17:10:57.845239 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 17:10:57 crc kubenswrapper[4558]: I0120 17:10:57.858329 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-txl8j\" (UniqueName: \"kubernetes.io/projected/5d4337cb-4c83-4c75-b4e4-3c7f16373356-kube-api-access-txl8j\") pod \"5d4337cb-4c83-4c75-b4e4-3c7f16373356\" (UID: \"5d4337cb-4c83-4c75-b4e4-3c7f16373356\") " Jan 20 17:10:57 crc kubenswrapper[4558]: I0120 17:10:57.858419 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5d4337cb-4c83-4c75-b4e4-3c7f16373356-operator-scripts\") pod \"5d4337cb-4c83-4c75-b4e4-3c7f16373356\" (UID: \"5d4337cb-4c83-4c75-b4e4-3c7f16373356\") " Jan 20 17:10:57 crc kubenswrapper[4558]: I0120 17:10:57.863407 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5d4337cb-4c83-4c75-b4e4-3c7f16373356-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "5d4337cb-4c83-4c75-b4e4-3c7f16373356" (UID: "5d4337cb-4c83-4c75-b4e4-3c7f16373356"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:10:57 crc kubenswrapper[4558]: I0120 17:10:57.871380 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-b308-account-create-update-wlh8l"] Jan 20 17:10:57 crc kubenswrapper[4558]: I0120 17:10:57.880127 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5d4337cb-4c83-4c75-b4e4-3c7f16373356-kube-api-access-txl8j" (OuterVolumeSpecName: "kube-api-access-txl8j") pod "5d4337cb-4c83-4c75-b4e4-3c7f16373356" (UID: "5d4337cb-4c83-4c75-b4e4-3c7f16373356"). InnerVolumeSpecName "kube-api-access-txl8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:10:57 crc kubenswrapper[4558]: I0120 17:10:57.880614 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-api-b308-account-create-update-wlh8l"] Jan 20 17:10:57 crc kubenswrapper[4558]: I0120 17:10:57.913758 4558 scope.go:117] "RemoveContainer" containerID="38bf3ef8cd80bbc8c77c61c653d79ffe5710ff36ee6b494a5fb1186c8b5147a9" Jan 20 17:10:57 crc kubenswrapper[4558]: I0120 17:10:57.954324 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:10:57 crc kubenswrapper[4558]: E0120 17:10:57.976061 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/openstack-scripts: configmap "openstack-scripts" not found Jan 20 17:10:57 crc kubenswrapper[4558]: E0120 17:10:57.976192 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/931d9a7a-aa04-42c3-a276-e71388a8b9d3-operator-scripts podName:931d9a7a-aa04-42c3-a276-e71388a8b9d3 nodeName:}" failed. No retries permitted until 2026-01-20 17:10:58.97613237 +0000 UTC m=+1752.736470338 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/931d9a7a-aa04-42c3-a276-e71388a8b9d3-operator-scripts") pod "root-account-create-update-f6ldh" (UID: "931d9a7a-aa04-42c3-a276-e71388a8b9d3") : configmap "openstack-scripts" not found Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.009518 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-txl8j\" (UniqueName: \"kubernetes.io/projected/5d4337cb-4c83-4c75-b4e4-3c7f16373356-kube-api-access-txl8j\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.009570 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5d4337cb-4c83-4c75-b4e4-3c7f16373356-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.014471 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/nova-metadata-0" podUID="5560f6a4-fc05-4d97-8496-b87804dfab99" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.1.141:8775/\": read tcp 10.217.0.2:50958->10.217.1.141:8775: read: connection reset by peer" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.015187 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/nova-metadata-0" podUID="5560f6a4-fc05-4d97-8496-b87804dfab99" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.1.141:8775/\": read tcp 10.217.0.2:50946->10.217.1.141:8775: read: connection reset by peer" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.017894 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.025284 4558 scope.go:117] "RemoveContainer" containerID="7f4378e692fe617442cc1cc02c71c9e5beb567c1bbfc8f7fa7801b7b523d4e8b" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.031268 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/openstack-cell1-galera-0"] Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.037250 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/openstack-cell1-galera-0"] Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.075876 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/swift-proxy-69ddc9f468-cx7lb"] Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.086705 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/swift-proxy-69ddc9f468-cx7lb"] Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.094749 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/placement-64d3-account-create-update-h72jc"] Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.101200 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/placement-64d3-account-create-update-h72jc"] Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.124645 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-0eec-account-create-update-2btsb"] Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.138258 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell1-0eec-account-create-update-2btsb"] Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.146506 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-6a24-account-create-update-cvtmm"] Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.154233 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/cinder-6a24-account-create-update-cvtmm"] Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.157382 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-0"] Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.162781 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-0"] Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.223016 4558 scope.go:117] "RemoveContainer" containerID="f02cae86071c5890d7fc01b166be5c563da48c0b99d44dd72907dfa48eedd578" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.285905 4558 generic.go:334] "Generic (PLEG): container finished" podID="ee147895-0bad-4dc1-a348-1be3348a7180" containerID="8465d6a9372429cd9148909a838d7378903121f9ff9f0fb73700c98d620d85b1" exitCode=0 Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.286003 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"ee147895-0bad-4dc1-a348-1be3348a7180","Type":"ContainerDied","Data":"8465d6a9372429cd9148909a838d7378903121f9ff9f0fb73700c98d620d85b1"} Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.286048 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"ee147895-0bad-4dc1-a348-1be3348a7180","Type":"ContainerDied","Data":"c8c446e659096745b9b7275e3a8ca45d6fa69e352b0452f3110e64f6ec71e609"} Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.286068 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c8c446e659096745b9b7275e3a8ca45d6fa69e352b0452f3110e64f6ec71e609" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.288465 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovn-northd-0_c6a5b342-eaf2-408c-828e-9bc0bf10d09e/ovn-northd/0.log" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.288509 4558 generic.go:334] "Generic (PLEG): container finished" podID="c6a5b342-eaf2-408c-828e-9bc0bf10d09e" containerID="14a9e266c3e7805021dc147f925c01b6b1d5dec0c4a69dce9222790ed28a80b1" exitCode=139 Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.288546 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-northd-0" event={"ID":"c6a5b342-eaf2-408c-828e-9bc0bf10d09e","Type":"ContainerDied","Data":"14a9e266c3e7805021dc147f925c01b6b1d5dec0c4a69dce9222790ed28a80b1"} Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.290231 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-77c1-account-create-update-kgxm8" event={"ID":"5d4337cb-4c83-4c75-b4e4-3c7f16373356","Type":"ContainerDied","Data":"a422c2655b9f94a5e11c62e74b3109c74608ba1d98a84dc2af758d56fbc5e610"} Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.290620 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-77c1-account-create-update-kgxm8" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.301240 4558 generic.go:334] "Generic (PLEG): container finished" podID="25737ece-fb84-4543-8c3a-94ffa7b8f095" containerID="611dd2ec139015a415381aaa856813e6abfb158b3d9c4ce578651f256b7bef8b" exitCode=0 Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.301305 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"25737ece-fb84-4543-8c3a-94ffa7b8f095","Type":"ContainerDied","Data":"611dd2ec139015a415381aaa856813e6abfb158b3d9c4ce578651f256b7bef8b"} Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.309821 4558 generic.go:334] "Generic (PLEG): container finished" podID="1ec5ef72-7c69-4be3-974d-c020ddfea4f7" containerID="f1e7bbc89f40090d398e96e1f8922de73e690cb868b93535a373edcd14aedc39" exitCode=0 Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.309890 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"1ec5ef72-7c69-4be3-974d-c020ddfea4f7","Type":"ContainerDied","Data":"f1e7bbc89f40090d398e96e1f8922de73e690cb868b93535a373edcd14aedc39"} Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.314303 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-b8dd4f99b-9gg5b" Jan 20 17:10:58 crc kubenswrapper[4558]: E0120 17:10:58.316025 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-memcached-svc: secret "cert-memcached-svc" not found Jan 20 17:10:58 crc kubenswrapper[4558]: E0120 17:10:58.316105 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4406b36c-adb2-4042-bc92-9efed5a43942-memcached-tls-certs podName:4406b36c-adb2-4042-bc92-9efed5a43942 nodeName:}" failed. No retries permitted until 2026-01-20 17:11:00.316082792 +0000 UTC m=+1754.076420759 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "memcached-tls-certs" (UniqueName: "kubernetes.io/secret/4406b36c-adb2-4042-bc92-9efed5a43942-memcached-tls-certs") pod "memcached-0" (UID: "4406b36c-adb2-4042-bc92-9efed5a43942") : secret "cert-memcached-svc" not found Jan 20 17:10:58 crc kubenswrapper[4558]: E0120 17:10:58.316197 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/combined-ca-bundle: secret "combined-ca-bundle" not found Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.316197 4558 generic.go:334] "Generic (PLEG): container finished" podID="87001341-5a74-4796-a332-6d57b2cf11c9" containerID="67a4a2b743f3eaffa244dd8f012af0a3fdc18477dc3e1252ee8aced329b34fda" exitCode=0 Jan 20 17:10:58 crc kubenswrapper[4558]: E0120 17:10:58.316224 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4406b36c-adb2-4042-bc92-9efed5a43942-combined-ca-bundle podName:4406b36c-adb2-4042-bc92-9efed5a43942 nodeName:}" failed. No retries permitted until 2026-01-20 17:11:00.316217495 +0000 UTC m=+1754.076555462 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/4406b36c-adb2-4042-bc92-9efed5a43942-combined-ca-bundle") pod "memcached-0" (UID: "4406b36c-adb2-4042-bc92-9efed5a43942") : secret "combined-ca-bundle" not found Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.316308 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-757fb55df8-9qzm2" event={"ID":"87001341-5a74-4796-a332-6d57b2cf11c9","Type":"ContainerDied","Data":"67a4a2b743f3eaffa244dd8f012af0a3fdc18477dc3e1252ee8aced329b34fda"} Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.325295 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:10:58 crc kubenswrapper[4558]: E0120 17:10:58.326076 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d149c52b2c9aabf55ad4004fb586734f8d22f15451b6f01163da779feaa3c048 is running failed: container process not found" containerID="d149c52b2c9aabf55ad4004fb586734f8d22f15451b6f01163da779feaa3c048" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:10:58 crc kubenswrapper[4558]: E0120 17:10:58.326446 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d149c52b2c9aabf55ad4004fb586734f8d22f15451b6f01163da779feaa3c048 is running failed: container process not found" containerID="d149c52b2c9aabf55ad4004fb586734f8d22f15451b6f01163da779feaa3c048" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.326651 4558 generic.go:334] "Generic (PLEG): container finished" podID="043476ec-5392-41af-970c-89d20b6b30a5" containerID="d1ba5118e075c9076247767e8ee5b5f7ddae3519d2f382217f34408cb902f7ec" exitCode=0 Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.326711 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-b8dd4f99b-9gg5b" event={"ID":"043476ec-5392-41af-970c-89d20b6b30a5","Type":"ContainerDied","Data":"d1ba5118e075c9076247767e8ee5b5f7ddae3519d2f382217f34408cb902f7ec"} Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.326729 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-b8dd4f99b-9gg5b" event={"ID":"043476ec-5392-41af-970c-89d20b6b30a5","Type":"ContainerDied","Data":"4c84428a80cbfe98f47d97ac5e8c97645f3b7939e2b685b5e8bf9d6bc5ffd8f9"} Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.326799 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-b8dd4f99b-9gg5b" Jan 20 17:10:58 crc kubenswrapper[4558]: E0120 17:10:58.327760 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d149c52b2c9aabf55ad4004fb586734f8d22f15451b6f01163da779feaa3c048 is running failed: container process not found" containerID="d149c52b2c9aabf55ad4004fb586734f8d22f15451b6f01163da779feaa3c048" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:10:58 crc kubenswrapper[4558]: E0120 17:10:58.327781 4558 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of d149c52b2c9aabf55ad4004fb586734f8d22f15451b6f01163da779feaa3c048 is running failed: container process not found" probeType="Readiness" pod="openstack-kuttl-tests/nova-cell0-conductor-0" podUID="cf954b15-e163-4f99-8c1b-5e04d06666bc" containerName="nova-cell0-conductor-conductor" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.329448 4558 generic.go:334] "Generic (PLEG): container finished" podID="cf954b15-e163-4f99-8c1b-5e04d06666bc" containerID="d149c52b2c9aabf55ad4004fb586734f8d22f15451b6f01163da779feaa3c048" exitCode=0 Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.329520 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-0" event={"ID":"cf954b15-e163-4f99-8c1b-5e04d06666bc","Type":"ContainerDied","Data":"d149c52b2c9aabf55ad4004fb586734f8d22f15451b6f01163da779feaa3c048"} Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.339030 4558 generic.go:334] "Generic (PLEG): container finished" podID="5560f6a4-fc05-4d97-8496-b87804dfab99" containerID="c1a2c6fb29683dd75096376810ff0f53fdd799813b29ba97b56d0f8d5b5eac42" exitCode=0 Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.339192 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"5560f6a4-fc05-4d97-8496-b87804dfab99","Type":"ContainerDied","Data":"c1a2c6fb29683dd75096376810ff0f53fdd799813b29ba97b56d0f8d5b5eac42"} Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.348622 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-8xt6f" event={"ID":"b730bb71-b18e-4b97-8d6b-d4ac799d08fd","Type":"ContainerStarted","Data":"ce47b8c8a1a63f47017e4025110e0953002e31faa417215a7f430725acf9a8eb"} Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.348932 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-8xt6f" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.353898 4558 scope.go:117] "RemoveContainer" containerID="4009b12e702e655b643aa02ca67d095804a9ddf483a6488a64d2205988f9a03a" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.356740 4558 generic.go:334] "Generic (PLEG): container finished" podID="95cf677f-e744-4dad-acfb-507100d2ea14" containerID="b8d7b4e681364253444a1c0248fab2e3349b056b8048a2f9443b427211b7dd9d" exitCode=0 Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.356797 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"95cf677f-e744-4dad-acfb-507100d2ea14","Type":"ContainerDied","Data":"b8d7b4e681364253444a1c0248fab2e3349b056b8048a2f9443b427211b7dd9d"} Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.358056 4558 generic.go:334] "Generic (PLEG): container finished" podID="ecb70be9-510b-4ec7-ae5b-20f2ae8dd6a1" containerID="013d2b8016fc21c333aa7e40dd675884b8161eccaa5a0b7a7ee016c3b3fef132" exitCode=0 Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.358102 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"ecb70be9-510b-4ec7-ae5b-20f2ae8dd6a1","Type":"ContainerDied","Data":"013d2b8016fc21c333aa7e40dd675884b8161eccaa5a0b7a7ee016c3b3fef132"} Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.360409 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell0-77c1-account-create-update-kgxm8"] Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.360944 4558 generic.go:334] "Generic (PLEG): container finished" podID="a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d" containerID="fcae9d3478f16c4ae798638653ce51dee0f0bdc70fd0867d5014ba53378f69ef" exitCode=0 Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.360985 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d","Type":"ContainerDied","Data":"fcae9d3478f16c4ae798638653ce51dee0f0bdc70fd0867d5014ba53378f69ef"} Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.362876 4558 generic.go:334] "Generic (PLEG): container finished" podID="209ec6bc-170f-4c8a-ad7e-e441ace95d1b" containerID="63f037fefd6327562c7c2f8bf9caa685258bcc48a23627261cd9883239f176f0" exitCode=0 Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.362917 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-keystone-listener-86d9c4d694-78ch5" event={"ID":"209ec6bc-170f-4c8a-ad7e-e441ace95d1b","Type":"ContainerDied","Data":"63f037fefd6327562c7c2f8bf9caa685258bcc48a23627261cd9883239f176f0"} Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.363998 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-c88a-account-create-update-7d8q7" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.364072 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell0-77c1-account-create-update-kgxm8"] Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.394504 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-c88a-account-create-update-7d8q7" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.416391 4558 scope.go:117] "RemoveContainer" containerID="51dda5ae1f21ef092b384f0127be32effe565acdb2160b9b7bd5bc4ac8bc0278" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.417104 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ee147895-0bad-4dc1-a348-1be3348a7180-logs\") pod \"ee147895-0bad-4dc1-a348-1be3348a7180\" (UID: \"ee147895-0bad-4dc1-a348-1be3348a7180\") " Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.417142 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee147895-0bad-4dc1-a348-1be3348a7180-combined-ca-bundle\") pod \"ee147895-0bad-4dc1-a348-1be3348a7180\" (UID: \"ee147895-0bad-4dc1-a348-1be3348a7180\") " Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.417243 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/043476ec-5392-41af-970c-89d20b6b30a5-logs\") pod \"043476ec-5392-41af-970c-89d20b6b30a5\" (UID: \"043476ec-5392-41af-970c-89d20b6b30a5\") " Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.417309 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dpg58\" (UniqueName: \"kubernetes.io/projected/ee147895-0bad-4dc1-a348-1be3348a7180-kube-api-access-dpg58\") pod \"ee147895-0bad-4dc1-a348-1be3348a7180\" (UID: \"ee147895-0bad-4dc1-a348-1be3348a7180\") " Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.417335 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/043476ec-5392-41af-970c-89d20b6b30a5-config-data\") pod \"043476ec-5392-41af-970c-89d20b6b30a5\" (UID: \"043476ec-5392-41af-970c-89d20b6b30a5\") " Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.417358 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/043476ec-5392-41af-970c-89d20b6b30a5-scripts\") pod \"043476ec-5392-41af-970c-89d20b6b30a5\" (UID: \"043476ec-5392-41af-970c-89d20b6b30a5\") " Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.417394 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ee147895-0bad-4dc1-a348-1be3348a7180-public-tls-certs\") pod \"ee147895-0bad-4dc1-a348-1be3348a7180\" (UID: \"ee147895-0bad-4dc1-a348-1be3348a7180\") " Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.417431 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/043476ec-5392-41af-970c-89d20b6b30a5-combined-ca-bundle\") pod \"043476ec-5392-41af-970c-89d20b6b30a5\" (UID: \"043476ec-5392-41af-970c-89d20b6b30a5\") " Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.417454 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/043476ec-5392-41af-970c-89d20b6b30a5-internal-tls-certs\") pod \"043476ec-5392-41af-970c-89d20b6b30a5\" (UID: \"043476ec-5392-41af-970c-89d20b6b30a5\") " Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.417512 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ee147895-0bad-4dc1-a348-1be3348a7180-scripts\") pod \"ee147895-0bad-4dc1-a348-1be3348a7180\" (UID: \"ee147895-0bad-4dc1-a348-1be3348a7180\") " Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.417553 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ee147895-0bad-4dc1-a348-1be3348a7180-config-data\") pod \"ee147895-0bad-4dc1-a348-1be3348a7180\" (UID: \"ee147895-0bad-4dc1-a348-1be3348a7180\") " Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.417594 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tp4jm\" (UniqueName: \"kubernetes.io/projected/043476ec-5392-41af-970c-89d20b6b30a5-kube-api-access-tp4jm\") pod \"043476ec-5392-41af-970c-89d20b6b30a5\" (UID: \"043476ec-5392-41af-970c-89d20b6b30a5\") " Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.417633 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ee147895-0bad-4dc1-a348-1be3348a7180-config-data-custom\") pod \"ee147895-0bad-4dc1-a348-1be3348a7180\" (UID: \"ee147895-0bad-4dc1-a348-1be3348a7180\") " Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.417669 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/043476ec-5392-41af-970c-89d20b6b30a5-public-tls-certs\") pod \"043476ec-5392-41af-970c-89d20b6b30a5\" (UID: \"043476ec-5392-41af-970c-89d20b6b30a5\") " Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.417689 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ee147895-0bad-4dc1-a348-1be3348a7180-internal-tls-certs\") pod \"ee147895-0bad-4dc1-a348-1be3348a7180\" (UID: \"ee147895-0bad-4dc1-a348-1be3348a7180\") " Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.417728 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ee147895-0bad-4dc1-a348-1be3348a7180-etc-machine-id\") pod \"ee147895-0bad-4dc1-a348-1be3348a7180\" (UID: \"ee147895-0bad-4dc1-a348-1be3348a7180\") " Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.418235 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ee147895-0bad-4dc1-a348-1be3348a7180-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "ee147895-0bad-4dc1-a348-1be3348a7180" (UID: "ee147895-0bad-4dc1-a348-1be3348a7180"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.418672 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ee147895-0bad-4dc1-a348-1be3348a7180-logs" (OuterVolumeSpecName: "logs") pod "ee147895-0bad-4dc1-a348-1be3348a7180" (UID: "ee147895-0bad-4dc1-a348-1be3348a7180"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.429570 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ee147895-0bad-4dc1-a348-1be3348a7180-scripts" (OuterVolumeSpecName: "scripts") pod "ee147895-0bad-4dc1-a348-1be3348a7180" (UID: "ee147895-0bad-4dc1-a348-1be3348a7180"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.435042 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/043476ec-5392-41af-970c-89d20b6b30a5-kube-api-access-tp4jm" (OuterVolumeSpecName: "kube-api-access-tp4jm") pod "043476ec-5392-41af-970c-89d20b6b30a5" (UID: "043476ec-5392-41af-970c-89d20b6b30a5"). InnerVolumeSpecName "kube-api-access-tp4jm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.438410 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/043476ec-5392-41af-970c-89d20b6b30a5-logs" (OuterVolumeSpecName: "logs") pod "043476ec-5392-41af-970c-89d20b6b30a5" (UID: "043476ec-5392-41af-970c-89d20b6b30a5"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.469972 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ee147895-0bad-4dc1-a348-1be3348a7180-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "ee147895-0bad-4dc1-a348-1be3348a7180" (UID: "ee147895-0bad-4dc1-a348-1be3348a7180"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.473530 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-8xt6f" podStartSLOduration=5.47350664 podStartE2EDuration="5.47350664s" podCreationTimestamp="2026-01-20 17:10:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:10:58.450646603 +0000 UTC m=+1752.210984570" watchObservedRunningTime="2026-01-20 17:10:58.47350664 +0000 UTC m=+1752.233844607" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.490192 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/043476ec-5392-41af-970c-89d20b6b30a5-scripts" (OuterVolumeSpecName: "scripts") pod "043476ec-5392-41af-970c-89d20b6b30a5" (UID: "043476ec-5392-41af-970c-89d20b6b30a5"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.494436 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ee147895-0bad-4dc1-a348-1be3348a7180-kube-api-access-dpg58" (OuterVolumeSpecName: "kube-api-access-dpg58") pod "ee147895-0bad-4dc1-a348-1be3348a7180" (UID: "ee147895-0bad-4dc1-a348-1be3348a7180"). InnerVolumeSpecName "kube-api-access-dpg58". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.499377 4558 scope.go:117] "RemoveContainer" containerID="51dda5ae1f21ef092b384f0127be32effe565acdb2160b9b7bd5bc4ac8bc0278" Jan 20 17:10:58 crc kubenswrapper[4558]: E0120 17:10:58.502941 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"51dda5ae1f21ef092b384f0127be32effe565acdb2160b9b7bd5bc4ac8bc0278\": container with ID starting with 51dda5ae1f21ef092b384f0127be32effe565acdb2160b9b7bd5bc4ac8bc0278 not found: ID does not exist" containerID="51dda5ae1f21ef092b384f0127be32effe565acdb2160b9b7bd5bc4ac8bc0278" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.502993 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"51dda5ae1f21ef092b384f0127be32effe565acdb2160b9b7bd5bc4ac8bc0278"} err="failed to get container status \"51dda5ae1f21ef092b384f0127be32effe565acdb2160b9b7bd5bc4ac8bc0278\": rpc error: code = NotFound desc = could not find container \"51dda5ae1f21ef092b384f0127be32effe565acdb2160b9b7bd5bc4ac8bc0278\": container with ID starting with 51dda5ae1f21ef092b384f0127be32effe565acdb2160b9b7bd5bc4ac8bc0278 not found: ID does not exist" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.503032 4558 scope.go:117] "RemoveContainer" containerID="d1ba5118e075c9076247767e8ee5b5f7ddae3519d2f382217f34408cb902f7ec" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.512549 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ee147895-0bad-4dc1-a348-1be3348a7180-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ee147895-0bad-4dc1-a348-1be3348a7180" (UID: "ee147895-0bad-4dc1-a348-1be3348a7180"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.520392 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dpg58\" (UniqueName: \"kubernetes.io/projected/ee147895-0bad-4dc1-a348-1be3348a7180-kube-api-access-dpg58\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.520419 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/043476ec-5392-41af-970c-89d20b6b30a5-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.520431 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ee147895-0bad-4dc1-a348-1be3348a7180-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.520439 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tp4jm\" (UniqueName: \"kubernetes.io/projected/043476ec-5392-41af-970c-89d20b6b30a5-kube-api-access-tp4jm\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.520447 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ee147895-0bad-4dc1-a348-1be3348a7180-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.520455 4558 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ee147895-0bad-4dc1-a348-1be3348a7180-etc-machine-id\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.520462 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ee147895-0bad-4dc1-a348-1be3348a7180-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.520471 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee147895-0bad-4dc1-a348-1be3348a7180-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.520479 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/043476ec-5392-41af-970c-89d20b6b30a5-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.528601 4558 scope.go:117] "RemoveContainer" containerID="a18b5b718a98c1d90904f2a89cfdb392cf52b6b547739f0cef3c92632d95c1e4" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.575103 4558 scope.go:117] "RemoveContainer" containerID="d1ba5118e075c9076247767e8ee5b5f7ddae3519d2f382217f34408cb902f7ec" Jan 20 17:10:58 crc kubenswrapper[4558]: E0120 17:10:58.576681 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d1ba5118e075c9076247767e8ee5b5f7ddae3519d2f382217f34408cb902f7ec\": container with ID starting with d1ba5118e075c9076247767e8ee5b5f7ddae3519d2f382217f34408cb902f7ec not found: ID does not exist" containerID="d1ba5118e075c9076247767e8ee5b5f7ddae3519d2f382217f34408cb902f7ec" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.576716 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d1ba5118e075c9076247767e8ee5b5f7ddae3519d2f382217f34408cb902f7ec"} err="failed to get container status \"d1ba5118e075c9076247767e8ee5b5f7ddae3519d2f382217f34408cb902f7ec\": rpc error: code = NotFound desc = could not find container \"d1ba5118e075c9076247767e8ee5b5f7ddae3519d2f382217f34408cb902f7ec\": container with ID starting with d1ba5118e075c9076247767e8ee5b5f7ddae3519d2f382217f34408cb902f7ec not found: ID does not exist" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.576736 4558 scope.go:117] "RemoveContainer" containerID="a18b5b718a98c1d90904f2a89cfdb392cf52b6b547739f0cef3c92632d95c1e4" Jan 20 17:10:58 crc kubenswrapper[4558]: E0120 17:10:58.578024 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a18b5b718a98c1d90904f2a89cfdb392cf52b6b547739f0cef3c92632d95c1e4\": container with ID starting with a18b5b718a98c1d90904f2a89cfdb392cf52b6b547739f0cef3c92632d95c1e4 not found: ID does not exist" containerID="a18b5b718a98c1d90904f2a89cfdb392cf52b6b547739f0cef3c92632d95c1e4" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.578050 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a18b5b718a98c1d90904f2a89cfdb392cf52b6b547739f0cef3c92632d95c1e4"} err="failed to get container status \"a18b5b718a98c1d90904f2a89cfdb392cf52b6b547739f0cef3c92632d95c1e4\": rpc error: code = NotFound desc = could not find container \"a18b5b718a98c1d90904f2a89cfdb392cf52b6b547739f0cef3c92632d95c1e4\": container with ID starting with a18b5b718a98c1d90904f2a89cfdb392cf52b6b547739f0cef3c92632d95c1e4 not found: ID does not exist" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.580391 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01d65c2e-73b2-4d9e-a659-c384a3a3e2fe" path="/var/lib/kubelet/pods/01d65c2e-73b2-4d9e-a659-c384a3a3e2fe/volumes" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.581312 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="12dec63c-ec5c-4327-b1ec-aa9ee39acf82" path="/var/lib/kubelet/pods/12dec63c-ec5c-4327-b1ec-aa9ee39acf82/volumes" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.581762 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="23f3fc6a-3d4e-4e04-87c7-c819069b4a89" path="/var/lib/kubelet/pods/23f3fc6a-3d4e-4e04-87c7-c819069b4a89/volumes" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.582093 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.582874 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2ba3bb38-522d-444e-bfe4-510b9fa17227" path="/var/lib/kubelet/pods/2ba3bb38-522d-444e-bfe4-510b9fa17227/volumes" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.583640 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="32721dbf-d5cf-43d4-9ddc-ad1664d17869" path="/var/lib/kubelet/pods/32721dbf-d5cf-43d4-9ddc-ad1664d17869/volumes" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.584184 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4f1e655f-1cb1-483c-9dd4-cc58c0d72c45" path="/var/lib/kubelet/pods/4f1e655f-1cb1-483c-9dd4-cc58c0d72c45/volumes" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.584934 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4f4387d7-1e8c-48a5-9176-c8e683469eb0" path="/var/lib/kubelet/pods/4f4387d7-1e8c-48a5-9176-c8e683469eb0/volumes" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.586184 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5d4337cb-4c83-4c75-b4e4-3c7f16373356" path="/var/lib/kubelet/pods/5d4337cb-4c83-4c75-b4e4-3c7f16373356/volumes" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.586591 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="937396e0-a574-4597-bafc-bf1d8a909d3c" path="/var/lib/kubelet/pods/937396e0-a574-4597-bafc-bf1d8a909d3c/volumes" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.587110 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="99812c8d-c18f-48d5-a063-deb9462a904b" path="/var/lib/kubelet/pods/99812c8d-c18f-48d5-a063-deb9462a904b/volumes" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.587529 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9b8664b1-e050-479b-b965-aed1a70e4bd1" path="/var/lib/kubelet/pods/9b8664b1-e050-479b-b965-aed1a70e4bd1/volumes" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.587736 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovn-northd-0_c6a5b342-eaf2-408c-828e-9bc0bf10d09e/ovn-northd/0.log" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.588838 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.589338 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b33d5d09-77a2-4bc5-aa13-898a97b01b69" path="/var/lib/kubelet/pods/b33d5d09-77a2-4bc5-aa13-898a97b01b69/volumes" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.590970 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e30a943a-f753-41fc-adc8-03822aa712c3" path="/var/lib/kubelet/pods/e30a943a-f753-41fc-adc8-03822aa712c3/volumes" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.593054 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/043476ec-5392-41af-970c-89d20b6b30a5-config-data" (OuterVolumeSpecName: "config-data") pod "043476ec-5392-41af-970c-89d20b6b30a5" (UID: "043476ec-5392-41af-970c-89d20b6b30a5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.593569 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f6e1d86c-7bad-427d-8b89-f1df1213ccc5" path="/var/lib/kubelet/pods/f6e1d86c-7bad-427d-8b89-f1df1213ccc5/volumes" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.624339 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/043476ec-5392-41af-970c-89d20b6b30a5-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.624925 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.628993 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.630189 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-757fb55df8-9qzm2" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.648597 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-keystone-listener-86d9c4d694-78ch5" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.650951 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ee147895-0bad-4dc1-a348-1be3348a7180-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "ee147895-0bad-4dc1-a348-1be3348a7180" (UID: "ee147895-0bad-4dc1-a348-1be3348a7180"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.661508 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.680685 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/043476ec-5392-41af-970c-89d20b6b30a5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "043476ec-5392-41af-970c-89d20b6b30a5" (UID: "043476ec-5392-41af-970c-89d20b6b30a5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.684115 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ee147895-0bad-4dc1-a348-1be3348a7180-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "ee147895-0bad-4dc1-a348-1be3348a7180" (UID: "ee147895-0bad-4dc1-a348-1be3348a7180"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.705867 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ee147895-0bad-4dc1-a348-1be3348a7180-config-data" (OuterVolumeSpecName: "config-data") pod "ee147895-0bad-4dc1-a348-1be3348a7180" (UID: "ee147895-0bad-4dc1-a348-1be3348a7180"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.725498 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d-combined-ca-bundle\") pod \"a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d\" (UID: \"a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d\") " Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.725547 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c6a5b342-eaf2-408c-828e-9bc0bf10d09e-scripts\") pod \"c6a5b342-eaf2-408c-828e-9bc0bf10d09e\" (UID: \"c6a5b342-eaf2-408c-828e-9bc0bf10d09e\") " Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.725570 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/c6a5b342-eaf2-408c-828e-9bc0bf10d09e-ovn-northd-tls-certs\") pod \"c6a5b342-eaf2-408c-828e-9bc0bf10d09e\" (UID: \"c6a5b342-eaf2-408c-828e-9bc0bf10d09e\") " Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.725598 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/95cf677f-e744-4dad-acfb-507100d2ea14-sg-core-conf-yaml\") pod \"95cf677f-e744-4dad-acfb-507100d2ea14\" (UID: \"95cf677f-e744-4dad-acfb-507100d2ea14\") " Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.725657 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5sbvc\" (UniqueName: \"kubernetes.io/projected/25737ece-fb84-4543-8c3a-94ffa7b8f095-kube-api-access-5sbvc\") pod \"25737ece-fb84-4543-8c3a-94ffa7b8f095\" (UID: \"25737ece-fb84-4543-8c3a-94ffa7b8f095\") " Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.725690 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c6a5b342-eaf2-408c-828e-9bc0bf10d09e-config\") pod \"c6a5b342-eaf2-408c-828e-9bc0bf10d09e\" (UID: \"c6a5b342-eaf2-408c-828e-9bc0bf10d09e\") " Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.725754 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/87001341-5a74-4796-a332-6d57b2cf11c9-logs\") pod \"87001341-5a74-4796-a332-6d57b2cf11c9\" (UID: \"87001341-5a74-4796-a332-6d57b2cf11c9\") " Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.725772 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d-public-tls-certs\") pod \"a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d\" (UID: \"a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d\") " Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.725790 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/c6a5b342-eaf2-408c-828e-9bc0bf10d09e-ovn-rundir\") pod \"c6a5b342-eaf2-408c-828e-9bc0bf10d09e\" (UID: \"c6a5b342-eaf2-408c-828e-9bc0bf10d09e\") " Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.725808 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/c6a5b342-eaf2-408c-828e-9bc0bf10d09e-metrics-certs-tls-certs\") pod \"c6a5b342-eaf2-408c-828e-9bc0bf10d09e\" (UID: \"c6a5b342-eaf2-408c-828e-9bc0bf10d09e\") " Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.725828 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d-scripts\") pod \"a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d\" (UID: \"a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d\") " Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.725861 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/95cf677f-e744-4dad-acfb-507100d2ea14-config-data\") pod \"95cf677f-e744-4dad-acfb-507100d2ea14\" (UID: \"95cf677f-e744-4dad-acfb-507100d2ea14\") " Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.725887 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d-config-data\") pod \"a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d\" (UID: \"a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d\") " Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.725946 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lmmrc\" (UniqueName: \"kubernetes.io/projected/209ec6bc-170f-4c8a-ad7e-e441ace95d1b-kube-api-access-lmmrc\") pod \"209ec6bc-170f-4c8a-ad7e-e441ace95d1b\" (UID: \"209ec6bc-170f-4c8a-ad7e-e441ace95d1b\") " Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.725985 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/209ec6bc-170f-4c8a-ad7e-e441ace95d1b-combined-ca-bundle\") pod \"209ec6bc-170f-4c8a-ad7e-e441ace95d1b\" (UID: \"209ec6bc-170f-4c8a-ad7e-e441ace95d1b\") " Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.726014 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/25737ece-fb84-4543-8c3a-94ffa7b8f095-logs\") pod \"25737ece-fb84-4543-8c3a-94ffa7b8f095\" (UID: \"25737ece-fb84-4543-8c3a-94ffa7b8f095\") " Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.726030 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d-httpd-run\") pod \"a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d\" (UID: \"a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d\") " Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.726055 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/95cf677f-e744-4dad-acfb-507100d2ea14-combined-ca-bundle\") pod \"95cf677f-e744-4dad-acfb-507100d2ea14\" (UID: \"95cf677f-e744-4dad-acfb-507100d2ea14\") " Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.726081 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/95cf677f-e744-4dad-acfb-507100d2ea14-log-httpd\") pod \"95cf677f-e744-4dad-acfb-507100d2ea14\" (UID: \"95cf677f-e744-4dad-acfb-507100d2ea14\") " Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.726099 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/25737ece-fb84-4543-8c3a-94ffa7b8f095-config-data\") pod \"25737ece-fb84-4543-8c3a-94ffa7b8f095\" (UID: \"25737ece-fb84-4543-8c3a-94ffa7b8f095\") " Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.726120 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/209ec6bc-170f-4c8a-ad7e-e441ace95d1b-logs\") pod \"209ec6bc-170f-4c8a-ad7e-e441ace95d1b\" (UID: \"209ec6bc-170f-4c8a-ad7e-e441ace95d1b\") " Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.726146 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/209ec6bc-170f-4c8a-ad7e-e441ace95d1b-config-data\") pod \"209ec6bc-170f-4c8a-ad7e-e441ace95d1b\" (UID: \"209ec6bc-170f-4c8a-ad7e-e441ace95d1b\") " Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.726204 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/25737ece-fb84-4543-8c3a-94ffa7b8f095-scripts\") pod \"25737ece-fb84-4543-8c3a-94ffa7b8f095\" (UID: \"25737ece-fb84-4543-8c3a-94ffa7b8f095\") " Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.726224 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d-logs\") pod \"a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d\" (UID: \"a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d\") " Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.726252 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-js5hh\" (UniqueName: \"kubernetes.io/projected/a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d-kube-api-access-js5hh\") pod \"a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d\" (UID: \"a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d\") " Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.726267 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/95cf677f-e744-4dad-acfb-507100d2ea14-run-httpd\") pod \"95cf677f-e744-4dad-acfb-507100d2ea14\" (UID: \"95cf677f-e744-4dad-acfb-507100d2ea14\") " Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.726287 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/87001341-5a74-4796-a332-6d57b2cf11c9-combined-ca-bundle\") pod \"87001341-5a74-4796-a332-6d57b2cf11c9\" (UID: \"87001341-5a74-4796-a332-6d57b2cf11c9\") " Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.726305 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/25737ece-fb84-4543-8c3a-94ffa7b8f095-internal-tls-certs\") pod \"25737ece-fb84-4543-8c3a-94ffa7b8f095\" (UID: \"25737ece-fb84-4543-8c3a-94ffa7b8f095\") " Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.726325 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d\" (UID: \"a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d\") " Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.726341 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4w5vq\" (UniqueName: \"kubernetes.io/projected/95cf677f-e744-4dad-acfb-507100d2ea14-kube-api-access-4w5vq\") pod \"95cf677f-e744-4dad-acfb-507100d2ea14\" (UID: \"95cf677f-e744-4dad-acfb-507100d2ea14\") " Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.726359 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/87001341-5a74-4796-a332-6d57b2cf11c9-config-data\") pod \"87001341-5a74-4796-a332-6d57b2cf11c9\" (UID: \"87001341-5a74-4796-a332-6d57b2cf11c9\") " Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.726396 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fpt4j\" (UniqueName: \"kubernetes.io/projected/87001341-5a74-4796-a332-6d57b2cf11c9-kube-api-access-fpt4j\") pod \"87001341-5a74-4796-a332-6d57b2cf11c9\" (UID: \"87001341-5a74-4796-a332-6d57b2cf11c9\") " Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.726418 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/25737ece-fb84-4543-8c3a-94ffa7b8f095-combined-ca-bundle\") pod \"25737ece-fb84-4543-8c3a-94ffa7b8f095\" (UID: \"25737ece-fb84-4543-8c3a-94ffa7b8f095\") " Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.726438 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/95cf677f-e744-4dad-acfb-507100d2ea14-ceilometer-tls-certs\") pod \"95cf677f-e744-4dad-acfb-507100d2ea14\" (UID: \"95cf677f-e744-4dad-acfb-507100d2ea14\") " Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.726458 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/95cf677f-e744-4dad-acfb-507100d2ea14-scripts\") pod \"95cf677f-e744-4dad-acfb-507100d2ea14\" (UID: \"95cf677f-e744-4dad-acfb-507100d2ea14\") " Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.726488 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/87001341-5a74-4796-a332-6d57b2cf11c9-internal-tls-certs\") pod \"87001341-5a74-4796-a332-6d57b2cf11c9\" (UID: \"87001341-5a74-4796-a332-6d57b2cf11c9\") " Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.726504 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/25737ece-fb84-4543-8c3a-94ffa7b8f095-httpd-run\") pod \"25737ece-fb84-4543-8c3a-94ffa7b8f095\" (UID: \"25737ece-fb84-4543-8c3a-94ffa7b8f095\") " Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.726535 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/209ec6bc-170f-4c8a-ad7e-e441ace95d1b-config-data-custom\") pod \"209ec6bc-170f-4c8a-ad7e-e441ace95d1b\" (UID: \"209ec6bc-170f-4c8a-ad7e-e441ace95d1b\") " Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.726551 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/87001341-5a74-4796-a332-6d57b2cf11c9-public-tls-certs\") pod \"87001341-5a74-4796-a332-6d57b2cf11c9\" (UID: \"87001341-5a74-4796-a332-6d57b2cf11c9\") " Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.726569 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/87001341-5a74-4796-a332-6d57b2cf11c9-config-data-custom\") pod \"87001341-5a74-4796-a332-6d57b2cf11c9\" (UID: \"87001341-5a74-4796-a332-6d57b2cf11c9\") " Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.726590 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c5zts\" (UniqueName: \"kubernetes.io/projected/c6a5b342-eaf2-408c-828e-9bc0bf10d09e-kube-api-access-c5zts\") pod \"c6a5b342-eaf2-408c-828e-9bc0bf10d09e\" (UID: \"c6a5b342-eaf2-408c-828e-9bc0bf10d09e\") " Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.726634 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c6a5b342-eaf2-408c-828e-9bc0bf10d09e-combined-ca-bundle\") pod \"c6a5b342-eaf2-408c-828e-9bc0bf10d09e\" (UID: \"c6a5b342-eaf2-408c-828e-9bc0bf10d09e\") " Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.726658 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"25737ece-fb84-4543-8c3a-94ffa7b8f095\" (UID: \"25737ece-fb84-4543-8c3a-94ffa7b8f095\") " Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.726946 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d" (UID: "a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.728096 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ee147895-0bad-4dc1-a348-1be3348a7180-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.728115 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ee147895-0bad-4dc1-a348-1be3348a7180-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.728130 4558 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.728140 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ee147895-0bad-4dc1-a348-1be3348a7180-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.728150 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/043476ec-5392-41af-970c-89d20b6b30a5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.729347 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c6a5b342-eaf2-408c-828e-9bc0bf10d09e-ovn-rundir" (OuterVolumeSpecName: "ovn-rundir") pod "c6a5b342-eaf2-408c-828e-9bc0bf10d09e" (UID: "c6a5b342-eaf2-408c-828e-9bc0bf10d09e"). InnerVolumeSpecName "ovn-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.732327 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c6a5b342-eaf2-408c-828e-9bc0bf10d09e-scripts" (OuterVolumeSpecName: "scripts") pod "c6a5b342-eaf2-408c-828e-9bc0bf10d09e" (UID: "c6a5b342-eaf2-408c-828e-9bc0bf10d09e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.733494 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/25737ece-fb84-4543-8c3a-94ffa7b8f095-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "25737ece-fb84-4543-8c3a-94ffa7b8f095" (UID: "25737ece-fb84-4543-8c3a-94ffa7b8f095"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.746570 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/25737ece-fb84-4543-8c3a-94ffa7b8f095-logs" (OuterVolumeSpecName: "logs") pod "25737ece-fb84-4543-8c3a-94ffa7b8f095" (UID: "25737ece-fb84-4543-8c3a-94ffa7b8f095"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.748808 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/209ec6bc-170f-4c8a-ad7e-e441ace95d1b-kube-api-access-lmmrc" (OuterVolumeSpecName: "kube-api-access-lmmrc") pod "209ec6bc-170f-4c8a-ad7e-e441ace95d1b" (UID: "209ec6bc-170f-4c8a-ad7e-e441ace95d1b"). InnerVolumeSpecName "kube-api-access-lmmrc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.758115 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/209ec6bc-170f-4c8a-ad7e-e441ace95d1b-logs" (OuterVolumeSpecName: "logs") pod "209ec6bc-170f-4c8a-ad7e-e441ace95d1b" (UID: "209ec6bc-170f-4c8a-ad7e-e441ace95d1b"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.766720 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/95cf677f-e744-4dad-acfb-507100d2ea14-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "95cf677f-e744-4dad-acfb-507100d2ea14" (UID: "95cf677f-e744-4dad-acfb-507100d2ea14"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.767219 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c6a5b342-eaf2-408c-828e-9bc0bf10d09e-config" (OuterVolumeSpecName: "config") pod "c6a5b342-eaf2-408c-828e-9bc0bf10d09e" (UID: "c6a5b342-eaf2-408c-828e-9bc0bf10d09e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.768052 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87001341-5a74-4796-a332-6d57b2cf11c9-kube-api-access-fpt4j" (OuterVolumeSpecName: "kube-api-access-fpt4j") pod "87001341-5a74-4796-a332-6d57b2cf11c9" (UID: "87001341-5a74-4796-a332-6d57b2cf11c9"). InnerVolumeSpecName "kube-api-access-fpt4j". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.768180 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/87001341-5a74-4796-a332-6d57b2cf11c9-logs" (OuterVolumeSpecName: "logs") pod "87001341-5a74-4796-a332-6d57b2cf11c9" (UID: "87001341-5a74-4796-a332-6d57b2cf11c9"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.768899 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d-logs" (OuterVolumeSpecName: "logs") pod "a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d" (UID: "a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.769143 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/95cf677f-e744-4dad-acfb-507100d2ea14-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "95cf677f-e744-4dad-acfb-507100d2ea14" (UID: "95cf677f-e744-4dad-acfb-507100d2ea14"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.773179 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25737ece-fb84-4543-8c3a-94ffa7b8f095-kube-api-access-5sbvc" (OuterVolumeSpecName: "kube-api-access-5sbvc") pod "25737ece-fb84-4543-8c3a-94ffa7b8f095" (UID: "25737ece-fb84-4543-8c3a-94ffa7b8f095"). InnerVolumeSpecName "kube-api-access-5sbvc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.773674 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/95cf677f-e744-4dad-acfb-507100d2ea14-scripts" (OuterVolumeSpecName: "scripts") pod "95cf677f-e744-4dad-acfb-507100d2ea14" (UID: "95cf677f-e744-4dad-acfb-507100d2ea14"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.775291 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c6a5b342-eaf2-408c-828e-9bc0bf10d09e-kube-api-access-c5zts" (OuterVolumeSpecName: "kube-api-access-c5zts") pod "c6a5b342-eaf2-408c-828e-9bc0bf10d09e" (UID: "c6a5b342-eaf2-408c-828e-9bc0bf10d09e"). InnerVolumeSpecName "kube-api-access-c5zts". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.777751 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87001341-5a74-4796-a332-6d57b2cf11c9-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "87001341-5a74-4796-a332-6d57b2cf11c9" (UID: "87001341-5a74-4796-a332-6d57b2cf11c9"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.777796 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d-scripts" (OuterVolumeSpecName: "scripts") pod "a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d" (UID: "a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.782505 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/209ec6bc-170f-4c8a-ad7e-e441ace95d1b-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "209ec6bc-170f-4c8a-ad7e-e441ace95d1b" (UID: "209ec6bc-170f-4c8a-ad7e-e441ace95d1b"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.782548 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25737ece-fb84-4543-8c3a-94ffa7b8f095-scripts" (OuterVolumeSpecName: "scripts") pod "25737ece-fb84-4543-8c3a-94ffa7b8f095" (UID: "25737ece-fb84-4543-8c3a-94ffa7b8f095"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.782564 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage06-crc" (OuterVolumeSpecName: "glance") pod "25737ece-fb84-4543-8c3a-94ffa7b8f095" (UID: "25737ece-fb84-4543-8c3a-94ffa7b8f095"). InnerVolumeSpecName "local-storage06-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.796338 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d-kube-api-access-js5hh" (OuterVolumeSpecName: "kube-api-access-js5hh") pod "a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d" (UID: "a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d"). InnerVolumeSpecName "kube-api-access-js5hh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.796359 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage03-crc" (OuterVolumeSpecName: "glance") pod "a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d" (UID: "a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d"). InnerVolumeSpecName "local-storage03-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.802730 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/95cf677f-e744-4dad-acfb-507100d2ea14-kube-api-access-4w5vq" (OuterVolumeSpecName: "kube-api-access-4w5vq") pod "95cf677f-e744-4dad-acfb-507100d2ea14" (UID: "95cf677f-e744-4dad-acfb-507100d2ea14"). InnerVolumeSpecName "kube-api-access-4w5vq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.829412 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d" (UID: "a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.829970 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ecb70be9-510b-4ec7-ae5b-20f2ae8dd6a1-combined-ca-bundle\") pod \"ecb70be9-510b-4ec7-ae5b-20f2ae8dd6a1\" (UID: \"ecb70be9-510b-4ec7-ae5b-20f2ae8dd6a1\") " Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.830019 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ecb70be9-510b-4ec7-ae5b-20f2ae8dd6a1-config-data\") pod \"ecb70be9-510b-4ec7-ae5b-20f2ae8dd6a1\" (UID: \"ecb70be9-510b-4ec7-ae5b-20f2ae8dd6a1\") " Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.830097 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ecb70be9-510b-4ec7-ae5b-20f2ae8dd6a1-internal-tls-certs\") pod \"ecb70be9-510b-4ec7-ae5b-20f2ae8dd6a1\" (UID: \"ecb70be9-510b-4ec7-ae5b-20f2ae8dd6a1\") " Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.830193 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d-combined-ca-bundle\") pod \"a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d\" (UID: \"a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d\") " Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.830243 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ecb70be9-510b-4ec7-ae5b-20f2ae8dd6a1-logs\") pod \"ecb70be9-510b-4ec7-ae5b-20f2ae8dd6a1\" (UID: \"ecb70be9-510b-4ec7-ae5b-20f2ae8dd6a1\") " Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.830385 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ecb70be9-510b-4ec7-ae5b-20f2ae8dd6a1-public-tls-certs\") pod \"ecb70be9-510b-4ec7-ae5b-20f2ae8dd6a1\" (UID: \"ecb70be9-510b-4ec7-ae5b-20f2ae8dd6a1\") " Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.830495 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8gx99\" (UniqueName: \"kubernetes.io/projected/ecb70be9-510b-4ec7-ae5b-20f2ae8dd6a1-kube-api-access-8gx99\") pod \"ecb70be9-510b-4ec7-ae5b-20f2ae8dd6a1\" (UID: \"ecb70be9-510b-4ec7-ae5b-20f2ae8dd6a1\") " Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.830685 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/043476ec-5392-41af-970c-89d20b6b30a5-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "043476ec-5392-41af-970c-89d20b6b30a5" (UID: "043476ec-5392-41af-970c-89d20b6b30a5"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:10:58 crc kubenswrapper[4558]: W0120 17:10:58.830869 4558 empty_dir.go:500] Warning: Unmount skipped because path does not exist: /var/lib/kubelet/pods/a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d/volumes/kubernetes.io~secret/combined-ca-bundle Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.830923 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d" (UID: "a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.830883 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-trdbf\" (UniqueName: \"kubernetes.io/projected/90b46572-b60a-4ef8-ad8c-1e67f51d9df4-kube-api-access-trdbf\") pod \"keystone-c88a-account-create-update-7d8q7\" (UID: \"90b46572-b60a-4ef8-ad8c-1e67f51d9df4\") " pod="openstack-kuttl-tests/keystone-c88a-account-create-update-7d8q7" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.831118 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/90b46572-b60a-4ef8-ad8c-1e67f51d9df4-operator-scripts\") pod \"keystone-c88a-account-create-update-7d8q7\" (UID: \"90b46572-b60a-4ef8-ad8c-1e67f51d9df4\") " pod="openstack-kuttl-tests/keystone-c88a-account-create-update-7d8q7" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.831550 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lmmrc\" (UniqueName: \"kubernetes.io/projected/209ec6bc-170f-4c8a-ad7e-e441ace95d1b-kube-api-access-lmmrc\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.831618 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/25737ece-fb84-4543-8c3a-94ffa7b8f095-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.831670 4558 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/95cf677f-e744-4dad-acfb-507100d2ea14-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.831717 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/209ec6bc-170f-4c8a-ad7e-e441ace95d1b-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.833704 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/25737ece-fb84-4543-8c3a-94ffa7b8f095-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.832263 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ecb70be9-510b-4ec7-ae5b-20f2ae8dd6a1-logs" (OuterVolumeSpecName: "logs") pod "ecb70be9-510b-4ec7-ae5b-20f2ae8dd6a1" (UID: "ecb70be9-510b-4ec7-ae5b-20f2ae8dd6a1"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:10:58 crc kubenswrapper[4558]: E0120 17:10:58.832877 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/openstack-scripts: configmap "openstack-scripts" not found Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.833778 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:58 crc kubenswrapper[4558]: E0120 17:10:58.833865 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/90b46572-b60a-4ef8-ad8c-1e67f51d9df4-operator-scripts podName:90b46572-b60a-4ef8-ad8c-1e67f51d9df4 nodeName:}" failed. No retries permitted until 2026-01-20 17:11:00.833842726 +0000 UTC m=+1754.594180683 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/90b46572-b60a-4ef8-ad8c-1e67f51d9df4-operator-scripts") pod "keystone-c88a-account-create-update-7d8q7" (UID: "90b46572-b60a-4ef8-ad8c-1e67f51d9df4") : configmap "openstack-scripts" not found Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.833931 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-js5hh\" (UniqueName: \"kubernetes.io/projected/a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d-kube-api-access-js5hh\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.833957 4558 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/95cf677f-e744-4dad-acfb-507100d2ea14-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.834018 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" " Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.834036 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4w5vq\" (UniqueName: \"kubernetes.io/projected/95cf677f-e744-4dad-acfb-507100d2ea14-kube-api-access-4w5vq\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.834084 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fpt4j\" (UniqueName: \"kubernetes.io/projected/87001341-5a74-4796-a332-6d57b2cf11c9-kube-api-access-fpt4j\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.834097 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/95cf677f-e744-4dad-acfb-507100d2ea14-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.834114 4558 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/25737ece-fb84-4543-8c3a-94ffa7b8f095-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.834127 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/209ec6bc-170f-4c8a-ad7e-e441ace95d1b-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.834273 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/87001341-5a74-4796-a332-6d57b2cf11c9-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.834337 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c5zts\" (UniqueName: \"kubernetes.io/projected/c6a5b342-eaf2-408c-828e-9bc0bf10d09e-kube-api-access-c5zts\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.834347 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/043476ec-5392-41af-970c-89d20b6b30a5-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.834395 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" " Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.834407 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.834418 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c6a5b342-eaf2-408c-828e-9bc0bf10d09e-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.834428 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5sbvc\" (UniqueName: \"kubernetes.io/projected/25737ece-fb84-4543-8c3a-94ffa7b8f095-kube-api-access-5sbvc\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.834436 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c6a5b342-eaf2-408c-828e-9bc0bf10d09e-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:58 crc kubenswrapper[4558]: E0120 17:10:58.834751 4558 projected.go:194] Error preparing data for projected volume kube-api-access-trdbf for pod openstack-kuttl-tests/keystone-c88a-account-create-update-7d8q7: failed to fetch token: serviceaccounts "galera-openstack" not found Jan 20 17:10:58 crc kubenswrapper[4558]: E0120 17:10:58.834817 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/90b46572-b60a-4ef8-ad8c-1e67f51d9df4-kube-api-access-trdbf podName:90b46572-b60a-4ef8-ad8c-1e67f51d9df4 nodeName:}" failed. No retries permitted until 2026-01-20 17:11:00.834802791 +0000 UTC m=+1754.595140758 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-trdbf" (UniqueName: "kubernetes.io/projected/90b46572-b60a-4ef8-ad8c-1e67f51d9df4-kube-api-access-trdbf") pod "keystone-c88a-account-create-update-7d8q7" (UID: "90b46572-b60a-4ef8-ad8c-1e67f51d9df4") : failed to fetch token: serviceaccounts "galera-openstack" not found Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.834842 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/87001341-5a74-4796-a332-6d57b2cf11c9-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.834853 4558 reconciler_common.go:293] "Volume detached for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/c6a5b342-eaf2-408c-828e-9bc0bf10d09e-ovn-rundir\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.834971 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.838129 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ecb70be9-510b-4ec7-ae5b-20f2ae8dd6a1-kube-api-access-8gx99" (OuterVolumeSpecName: "kube-api-access-8gx99") pod "ecb70be9-510b-4ec7-ae5b-20f2ae8dd6a1" (UID: "ecb70be9-510b-4ec7-ae5b-20f2ae8dd6a1"). InnerVolumeSpecName "kube-api-access-8gx99". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.855106 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.877228 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.902535 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.936120 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/95cf677f-e744-4dad-acfb-507100d2ea14-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "95cf677f-e744-4dad-acfb-507100d2ea14" (UID: "95cf677f-e744-4dad-acfb-507100d2ea14"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.937942 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zr25m\" (UniqueName: \"kubernetes.io/projected/5560f6a4-fc05-4d97-8496-b87804dfab99-kube-api-access-zr25m\") pod \"5560f6a4-fc05-4d97-8496-b87804dfab99\" (UID: \"5560f6a4-fc05-4d97-8496-b87804dfab99\") " Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.938137 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5560f6a4-fc05-4d97-8496-b87804dfab99-logs\") pod \"5560f6a4-fc05-4d97-8496-b87804dfab99\" (UID: \"5560f6a4-fc05-4d97-8496-b87804dfab99\") " Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.938400 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/95cf677f-e744-4dad-acfb-507100d2ea14-sg-core-conf-yaml\") pod \"95cf677f-e744-4dad-acfb-507100d2ea14\" (UID: \"95cf677f-e744-4dad-acfb-507100d2ea14\") " Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.938446 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/5560f6a4-fc05-4d97-8496-b87804dfab99-nova-metadata-tls-certs\") pod \"5560f6a4-fc05-4d97-8496-b87804dfab99\" (UID: \"5560f6a4-fc05-4d97-8496-b87804dfab99\") " Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.938485 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5560f6a4-fc05-4d97-8496-b87804dfab99-config-data\") pod \"5560f6a4-fc05-4d97-8496-b87804dfab99\" (UID: \"5560f6a4-fc05-4d97-8496-b87804dfab99\") " Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.938559 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5560f6a4-fc05-4d97-8496-b87804dfab99-combined-ca-bundle\") pod \"5560f6a4-fc05-4d97-8496-b87804dfab99\" (UID: \"5560f6a4-fc05-4d97-8496-b87804dfab99\") " Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.938707 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5560f6a4-fc05-4d97-8496-b87804dfab99-logs" (OuterVolumeSpecName: "logs") pod "5560f6a4-fc05-4d97-8496-b87804dfab99" (UID: "5560f6a4-fc05-4d97-8496-b87804dfab99"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:10:58 crc kubenswrapper[4558]: W0120 17:10:58.941040 4558 empty_dir.go:500] Warning: Unmount skipped because path does not exist: /var/lib/kubelet/pods/95cf677f-e744-4dad-acfb-507100d2ea14/volumes/kubernetes.io~secret/sg-core-conf-yaml Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.941267 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/95cf677f-e744-4dad-acfb-507100d2ea14-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "95cf677f-e744-4dad-acfb-507100d2ea14" (UID: "95cf677f-e744-4dad-acfb-507100d2ea14"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.942057 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8gx99\" (UniqueName: \"kubernetes.io/projected/ecb70be9-510b-4ec7-ae5b-20f2ae8dd6a1-kube-api-access-8gx99\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.942129 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5560f6a4-fc05-4d97-8496-b87804dfab99-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.942211 4558 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/95cf677f-e744-4dad-acfb-507100d2ea14-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.942264 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ecb70be9-510b-4ec7-ae5b-20f2ae8dd6a1-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.969448 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5560f6a4-fc05-4d97-8496-b87804dfab99-kube-api-access-zr25m" (OuterVolumeSpecName: "kube-api-access-zr25m") pod "5560f6a4-fc05-4d97-8496-b87804dfab99" (UID: "5560f6a4-fc05-4d97-8496-b87804dfab99"). InnerVolumeSpecName "kube-api-access-zr25m". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:10:58 crc kubenswrapper[4558]: I0120 17:10:58.973661 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/043476ec-5392-41af-970c-89d20b6b30a5-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "043476ec-5392-41af-970c-89d20b6b30a5" (UID: "043476ec-5392-41af-970c-89d20b6b30a5"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.001274 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87001341-5a74-4796-a332-6d57b2cf11c9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "87001341-5a74-4796-a332-6d57b2cf11c9" (UID: "87001341-5a74-4796-a332-6d57b2cf11c9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.002419 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/209ec6bc-170f-4c8a-ad7e-e441ace95d1b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "209ec6bc-170f-4c8a-ad7e-e441ace95d1b" (UID: "209ec6bc-170f-4c8a-ad7e-e441ace95d1b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.009292 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ecb70be9-510b-4ec7-ae5b-20f2ae8dd6a1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ecb70be9-510b-4ec7-ae5b-20f2ae8dd6a1" (UID: "ecb70be9-510b-4ec7-ae5b-20f2ae8dd6a1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.011987 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage06-crc" (UniqueName: "kubernetes.io/local-volume/local-storage06-crc") on node "crc" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.033136 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c6a5b342-eaf2-408c-828e-9bc0bf10d09e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c6a5b342-eaf2-408c-828e-9bc0bf10d09e" (UID: "c6a5b342-eaf2-408c-828e-9bc0bf10d09e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.043684 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf954b15-e163-4f99-8c1b-5e04d06666bc-config-data\") pod \"cf954b15-e163-4f99-8c1b-5e04d06666bc\" (UID: \"cf954b15-e163-4f99-8c1b-5e04d06666bc\") " Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.043804 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1ec5ef72-7c69-4be3-974d-c020ddfea4f7-combined-ca-bundle\") pod \"1ec5ef72-7c69-4be3-974d-c020ddfea4f7\" (UID: \"1ec5ef72-7c69-4be3-974d-c020ddfea4f7\") " Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.043962 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2vzrf\" (UniqueName: \"kubernetes.io/projected/cf954b15-e163-4f99-8c1b-5e04d06666bc-kube-api-access-2vzrf\") pod \"cf954b15-e163-4f99-8c1b-5e04d06666bc\" (UID: \"cf954b15-e163-4f99-8c1b-5e04d06666bc\") " Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.044032 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/1ec5ef72-7c69-4be3-974d-c020ddfea4f7-etc-machine-id\") pod \"1ec5ef72-7c69-4be3-974d-c020ddfea4f7\" (UID: \"1ec5ef72-7c69-4be3-974d-c020ddfea4f7\") " Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.044089 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf954b15-e163-4f99-8c1b-5e04d06666bc-combined-ca-bundle\") pod \"cf954b15-e163-4f99-8c1b-5e04d06666bc\" (UID: \"cf954b15-e163-4f99-8c1b-5e04d06666bc\") " Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.044115 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1ec5ef72-7c69-4be3-974d-c020ddfea4f7-config-data\") pod \"1ec5ef72-7c69-4be3-974d-c020ddfea4f7\" (UID: \"1ec5ef72-7c69-4be3-974d-c020ddfea4f7\") " Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.044174 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1ec5ef72-7c69-4be3-974d-c020ddfea4f7-scripts\") pod \"1ec5ef72-7c69-4be3-974d-c020ddfea4f7\" (UID: \"1ec5ef72-7c69-4be3-974d-c020ddfea4f7\") " Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.044223 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mxvx7\" (UniqueName: \"kubernetes.io/projected/1ec5ef72-7c69-4be3-974d-c020ddfea4f7-kube-api-access-mxvx7\") pod \"1ec5ef72-7c69-4be3-974d-c020ddfea4f7\" (UID: \"1ec5ef72-7c69-4be3-974d-c020ddfea4f7\") " Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.044263 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1ec5ef72-7c69-4be3-974d-c020ddfea4f7-config-data-custom\") pod \"1ec5ef72-7c69-4be3-974d-c020ddfea4f7\" (UID: \"1ec5ef72-7c69-4be3-974d-c020ddfea4f7\") " Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.044839 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zr25m\" (UniqueName: \"kubernetes.io/projected/5560f6a4-fc05-4d97-8496-b87804dfab99-kube-api-access-zr25m\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.044852 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/209ec6bc-170f-4c8a-ad7e-e441ace95d1b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.044861 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/87001341-5a74-4796-a332-6d57b2cf11c9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.044870 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ecb70be9-510b-4ec7-ae5b-20f2ae8dd6a1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.044878 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c6a5b342-eaf2-408c-828e-9bc0bf10d09e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.044887 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.044896 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/043476ec-5392-41af-970c-89d20b6b30a5-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:59 crc kubenswrapper[4558]: E0120 17:10:59.044956 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/openstack-scripts: configmap "openstack-scripts" not found Jan 20 17:10:59 crc kubenswrapper[4558]: E0120 17:10:59.045000 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/931d9a7a-aa04-42c3-a276-e71388a8b9d3-operator-scripts podName:931d9a7a-aa04-42c3-a276-e71388a8b9d3 nodeName:}" failed. No retries permitted until 2026-01-20 17:11:01.044985641 +0000 UTC m=+1754.805323607 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/931d9a7a-aa04-42c3-a276-e71388a8b9d3-operator-scripts") pod "root-account-create-update-f6ldh" (UID: "931d9a7a-aa04-42c3-a276-e71388a8b9d3") : configmap "openstack-scripts" not found Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.046223 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1ec5ef72-7c69-4be3-974d-c020ddfea4f7-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "1ec5ef72-7c69-4be3-974d-c020ddfea4f7" (UID: "1ec5ef72-7c69-4be3-974d-c020ddfea4f7"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.056521 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1ec5ef72-7c69-4be3-974d-c020ddfea4f7-kube-api-access-mxvx7" (OuterVolumeSpecName: "kube-api-access-mxvx7") pod "1ec5ef72-7c69-4be3-974d-c020ddfea4f7" (UID: "1ec5ef72-7c69-4be3-974d-c020ddfea4f7"). InnerVolumeSpecName "kube-api-access-mxvx7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.057031 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cf954b15-e163-4f99-8c1b-5e04d06666bc-kube-api-access-2vzrf" (OuterVolumeSpecName: "kube-api-access-2vzrf") pod "cf954b15-e163-4f99-8c1b-5e04d06666bc" (UID: "cf954b15-e163-4f99-8c1b-5e04d06666bc"). InnerVolumeSpecName "kube-api-access-2vzrf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.059791 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage03-crc" (UniqueName: "kubernetes.io/local-volume/local-storage03-crc") on node "crc" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.072664 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1ec5ef72-7c69-4be3-974d-c020ddfea4f7-scripts" (OuterVolumeSpecName: "scripts") pod "1ec5ef72-7c69-4be3-974d-c020ddfea4f7" (UID: "1ec5ef72-7c69-4be3-974d-c020ddfea4f7"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.072659 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25737ece-fb84-4543-8c3a-94ffa7b8f095-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "25737ece-fb84-4543-8c3a-94ffa7b8f095" (UID: "25737ece-fb84-4543-8c3a-94ffa7b8f095"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.072722 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1ec5ef72-7c69-4be3-974d-c020ddfea4f7-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "1ec5ef72-7c69-4be3-974d-c020ddfea4f7" (UID: "1ec5ef72-7c69-4be3-974d-c020ddfea4f7"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.080711 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cf954b15-e163-4f99-8c1b-5e04d06666bc-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "cf954b15-e163-4f99-8c1b-5e04d06666bc" (UID: "cf954b15-e163-4f99-8c1b-5e04d06666bc"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.090294 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/95cf677f-e744-4dad-acfb-507100d2ea14-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "95cf677f-e744-4dad-acfb-507100d2ea14" (UID: "95cf677f-e744-4dad-acfb-507100d2ea14"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.109241 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ecb70be9-510b-4ec7-ae5b-20f2ae8dd6a1-config-data" (OuterVolumeSpecName: "config-data") pod "ecb70be9-510b-4ec7-ae5b-20f2ae8dd6a1" (UID: "ecb70be9-510b-4ec7-ae5b-20f2ae8dd6a1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.115227 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5560f6a4-fc05-4d97-8496-b87804dfab99-config-data" (OuterVolumeSpecName: "config-data") pod "5560f6a4-fc05-4d97-8496-b87804dfab99" (UID: "5560f6a4-fc05-4d97-8496-b87804dfab99"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.117615 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/95cf677f-e744-4dad-acfb-507100d2ea14-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "95cf677f-e744-4dad-acfb-507100d2ea14" (UID: "95cf677f-e744-4dad-acfb-507100d2ea14"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.126251 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ecb70be9-510b-4ec7-ae5b-20f2ae8dd6a1-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "ecb70be9-510b-4ec7-ae5b-20f2ae8dd6a1" (UID: "ecb70be9-510b-4ec7-ae5b-20f2ae8dd6a1"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.149600 4558 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/1ec5ef72-7c69-4be3-974d-c020ddfea4f7-etc-machine-id\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.149622 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf954b15-e163-4f99-8c1b-5e04d06666bc-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.149634 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5560f6a4-fc05-4d97-8496-b87804dfab99-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.149642 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1ec5ef72-7c69-4be3-974d-c020ddfea4f7-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.149672 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mxvx7\" (UniqueName: \"kubernetes.io/projected/1ec5ef72-7c69-4be3-974d-c020ddfea4f7-kube-api-access-mxvx7\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.149683 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1ec5ef72-7c69-4be3-974d-c020ddfea4f7-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.149691 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ecb70be9-510b-4ec7-ae5b-20f2ae8dd6a1-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.149701 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/95cf677f-e744-4dad-acfb-507100d2ea14-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.149710 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.149719 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ecb70be9-510b-4ec7-ae5b-20f2ae8dd6a1-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.149746 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/25737ece-fb84-4543-8c3a-94ffa7b8f095-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.149755 4558 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/95cf677f-e744-4dad-acfb-507100d2ea14-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.149766 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2vzrf\" (UniqueName: \"kubernetes.io/projected/cf954b15-e163-4f99-8c1b-5e04d06666bc-kube-api-access-2vzrf\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.161517 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d" (UID: "a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.161818 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d-config-data" (OuterVolumeSpecName: "config-data") pod "a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d" (UID: "a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.164329 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c6a5b342-eaf2-408c-828e-9bc0bf10d09e-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "c6a5b342-eaf2-408c-828e-9bc0bf10d09e" (UID: "c6a5b342-eaf2-408c-828e-9bc0bf10d09e"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.179609 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87001341-5a74-4796-a332-6d57b2cf11c9-config-data" (OuterVolumeSpecName: "config-data") pod "87001341-5a74-4796-a332-6d57b2cf11c9" (UID: "87001341-5a74-4796-a332-6d57b2cf11c9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.185725 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ecb70be9-510b-4ec7-ae5b-20f2ae8dd6a1-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "ecb70be9-510b-4ec7-ae5b-20f2ae8dd6a1" (UID: "ecb70be9-510b-4ec7-ae5b-20f2ae8dd6a1"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.190350 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cf954b15-e163-4f99-8c1b-5e04d06666bc-config-data" (OuterVolumeSpecName: "config-data") pod "cf954b15-e163-4f99-8c1b-5e04d06666bc" (UID: "cf954b15-e163-4f99-8c1b-5e04d06666bc"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.190475 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25737ece-fb84-4543-8c3a-94ffa7b8f095-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "25737ece-fb84-4543-8c3a-94ffa7b8f095" (UID: "25737ece-fb84-4543-8c3a-94ffa7b8f095"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.197299 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87001341-5a74-4796-a332-6d57b2cf11c9-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "87001341-5a74-4796-a332-6d57b2cf11c9" (UID: "87001341-5a74-4796-a332-6d57b2cf11c9"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.200460 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c6a5b342-eaf2-408c-828e-9bc0bf10d09e-ovn-northd-tls-certs" (OuterVolumeSpecName: "ovn-northd-tls-certs") pod "c6a5b342-eaf2-408c-828e-9bc0bf10d09e" (UID: "c6a5b342-eaf2-408c-828e-9bc0bf10d09e"). InnerVolumeSpecName "ovn-northd-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.210602 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87001341-5a74-4796-a332-6d57b2cf11c9-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "87001341-5a74-4796-a332-6d57b2cf11c9" (UID: "87001341-5a74-4796-a332-6d57b2cf11c9"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.212885 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5560f6a4-fc05-4d97-8496-b87804dfab99-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5560f6a4-fc05-4d97-8496-b87804dfab99" (UID: "5560f6a4-fc05-4d97-8496-b87804dfab99"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.215245 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25737ece-fb84-4543-8c3a-94ffa7b8f095-config-data" (OuterVolumeSpecName: "config-data") pod "25737ece-fb84-4543-8c3a-94ffa7b8f095" (UID: "25737ece-fb84-4543-8c3a-94ffa7b8f095"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.228018 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1ec5ef72-7c69-4be3-974d-c020ddfea4f7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1ec5ef72-7c69-4be3-974d-c020ddfea4f7" (UID: "1ec5ef72-7c69-4be3-974d-c020ddfea4f7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.228023 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5560f6a4-fc05-4d97-8496-b87804dfab99-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "5560f6a4-fc05-4d97-8496-b87804dfab99" (UID: "5560f6a4-fc05-4d97-8496-b87804dfab99"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.228089 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/95cf677f-e744-4dad-acfb-507100d2ea14-config-data" (OuterVolumeSpecName: "config-data") pod "95cf677f-e744-4dad-acfb-507100d2ea14" (UID: "95cf677f-e744-4dad-acfb-507100d2ea14"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.234997 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/209ec6bc-170f-4c8a-ad7e-e441ace95d1b-config-data" (OuterVolumeSpecName: "config-data") pod "209ec6bc-170f-4c8a-ad7e-e441ace95d1b" (UID: "209ec6bc-170f-4c8a-ad7e-e441ace95d1b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.251245 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.251275 4558 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/c6a5b342-eaf2-408c-828e-9bc0bf10d09e-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.251287 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5560f6a4-fc05-4d97-8496-b87804dfab99-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.251297 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/95cf677f-e744-4dad-acfb-507100d2ea14-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.251307 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.251317 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf954b15-e163-4f99-8c1b-5e04d06666bc-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.251326 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/25737ece-fb84-4543-8c3a-94ffa7b8f095-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.251334 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/209ec6bc-170f-4c8a-ad7e-e441ace95d1b-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.251343 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1ec5ef72-7c69-4be3-974d-c020ddfea4f7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.251352 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/25737ece-fb84-4543-8c3a-94ffa7b8f095-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.251360 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/87001341-5a74-4796-a332-6d57b2cf11c9-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.251375 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/87001341-5a74-4796-a332-6d57b2cf11c9-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.251384 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ecb70be9-510b-4ec7-ae5b-20f2ae8dd6a1-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.251392 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/87001341-5a74-4796-a332-6d57b2cf11c9-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.251401 4558 reconciler_common.go:293] "Volume detached for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/c6a5b342-eaf2-408c-828e-9bc0bf10d09e-ovn-northd-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.251409 4558 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/5560f6a4-fc05-4d97-8496-b87804dfab99-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.264928 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1ec5ef72-7c69-4be3-974d-c020ddfea4f7-config-data" (OuterVolumeSpecName: "config-data") pod "1ec5ef72-7c69-4be3-974d-c020ddfea4f7" (UID: "1ec5ef72-7c69-4be3-974d-c020ddfea4f7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.318470 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/neutron-859ff747c8-5c47j" podUID="f5d75d5a-4202-477d-97a5-8d92bcdd13b9" containerName="neutron-httpd" probeResult="failure" output="Get \"https://10.217.1.103:9696/\": dial tcp 10.217.1.103:9696: connect: connection refused" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.352987 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1ec5ef72-7c69-4be3-974d-c020ddfea4f7-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.376354 4558 generic.go:334] "Generic (PLEG): container finished" podID="4406b36c-adb2-4042-bc92-9efed5a43942" containerID="133261d6818bfb9dc626eb1b737b442170f15e8780d606e9ddad15e359c16063" exitCode=0 Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.376436 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/memcached-0" event={"ID":"4406b36c-adb2-4042-bc92-9efed5a43942","Type":"ContainerDied","Data":"133261d6818bfb9dc626eb1b737b442170f15e8780d606e9ddad15e359c16063"} Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.376506 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/memcached-0" event={"ID":"4406b36c-adb2-4042-bc92-9efed5a43942","Type":"ContainerDied","Data":"6e0726e59512b0c7bc84ee5bbc07cb0daf6bafc6946d1f94340a122d3ddae6aa"} Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.376522 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6e0726e59512b0c7bc84ee5bbc07cb0daf6bafc6946d1f94340a122d3ddae6aa" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.378620 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"1ec5ef72-7c69-4be3-974d-c020ddfea4f7","Type":"ContainerDied","Data":"f3fac0c16d1cc19744ffbcb36a1f835b7e57b07e263a1f9175592eed6e112976"} Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.378662 4558 scope.go:117] "RemoveContainer" containerID="2ad54bc63fc4167f1a6668ac58b0ad56798eb8ff76a988eb75b07766de4d421c" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.378883 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.384701 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d","Type":"ContainerDied","Data":"35496bb2794ee8c33d201fa94c332fd85c6e0347dc7d4b07b255d69022103dff"} Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.384861 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.395157 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"95cf677f-e744-4dad-acfb-507100d2ea14","Type":"ContainerDied","Data":"78f0cfb41c13b877e4a3bfd328ace655b0b9adbff758b82b1578a7102bbe3717"} Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.395349 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.402682 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-0" event={"ID":"cf954b15-e163-4f99-8c1b-5e04d06666bc","Type":"ContainerDied","Data":"7d46177eacff73e145f6daa61019cd59b81967045d98118ff19d2492dc4f0cf9"} Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.402799 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.406196 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-f6ldh" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.423419 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-757fb55df8-9qzm2" event={"ID":"87001341-5a74-4796-a332-6d57b2cf11c9","Type":"ContainerDied","Data":"b6736c7fbe940026d7050860f63ad8321552cca00db656609ca3e73cc1f72a4f"} Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.423625 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-757fb55df8-9qzm2" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.438527 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"ecb70be9-510b-4ec7-ae5b-20f2ae8dd6a1","Type":"ContainerDied","Data":"9f1781e6ce864ca3b756a3b2c6cb6554180b80ae4d98e30a5b24012cde991745"} Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.438580 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.441154 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"25737ece-fb84-4543-8c3a-94ffa7b8f095","Type":"ContainerDied","Data":"f4f0991223cc312b00c334450c5683d70634552d654d8ae89e0441116bad5ab5"} Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.441270 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.446811 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/root-account-create-update-f6ldh" event={"ID":"931d9a7a-aa04-42c3-a276-e71388a8b9d3","Type":"ContainerDied","Data":"8360fe6b2d64fd248c22e3565b6e32c9b0d66b218b8a35e48394563fc743ecf5"} Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.446836 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-f6ldh" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.449045 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/placement-b8dd4f99b-9gg5b"] Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.450042 4558 scope.go:117] "RemoveContainer" containerID="f1e7bbc89f40090d398e96e1f8922de73e690cb868b93535a373edcd14aedc39" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.454816 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.455025 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"5560f6a4-fc05-4d97-8496-b87804dfab99","Type":"ContainerDied","Data":"a93c5f1f38eeb134451e56fbc3900bca1dc90fbb9204fedc6662faec2d98373f"} Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.464339 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovn-northd-0_c6a5b342-eaf2-408c-828e-9bc0bf10d09e/ovn-northd/0.log" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.464427 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-northd-0" event={"ID":"c6a5b342-eaf2-408c-828e-9bc0bf10d09e","Type":"ContainerDied","Data":"f7cd21ca9b08cd59a47ba406223b128ca14eff70ac8265c04176b9f42593f90c"} Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.464838 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.466114 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/memcached-0" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.468579 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/placement-b8dd4f99b-9gg5b"] Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.470453 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-keystone-listener-86d9c4d694-78ch5" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.470462 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-keystone-listener-86d9c4d694-78ch5" event={"ID":"209ec6bc-170f-4c8a-ad7e-e441ace95d1b","Type":"ContainerDied","Data":"08de7bab4d88a30bfae5bb75c0e24a01f445b709d48ab81d3db8a6f2fc510505"} Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.472976 4558 generic.go:334] "Generic (PLEG): container finished" podID="5558aeea-2a7f-4654-a261-a2902c9434e0" containerID="0334d5f9b2258c2ff98366283a9bd0e77fed9de54416a50e2a8bd36057e911d2" exitCode=0 Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.475721 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-galera-0" event={"ID":"5558aeea-2a7f-4654-a261-a2902c9434e0","Type":"ContainerDied","Data":"0334d5f9b2258c2ff98366283a9bd0e77fed9de54416a50e2a8bd36057e911d2"} Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.475852 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.476670 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-c88a-account-create-update-7d8q7" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.482080 4558 scope.go:117] "RemoveContainer" containerID="fcae9d3478f16c4ae798638653ce51dee0f0bdc70fd0867d5014ba53378f69ef" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.493572 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.501262 4558 scope.go:117] "RemoveContainer" containerID="f820bd8add1ea95d24868db10ab96d6351761090b1af2d6f03fc4e29de25ecac" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.517234 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.522568 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.537255 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.555986 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4406b36c-adb2-4042-bc92-9efed5a43942-combined-ca-bundle\") pod \"4406b36c-adb2-4042-bc92-9efed5a43942\" (UID: \"4406b36c-adb2-4042-bc92-9efed5a43942\") " Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.556047 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m7rcs\" (UniqueName: \"kubernetes.io/projected/4406b36c-adb2-4042-bc92-9efed5a43942-kube-api-access-m7rcs\") pod \"4406b36c-adb2-4042-bc92-9efed5a43942\" (UID: \"4406b36c-adb2-4042-bc92-9efed5a43942\") " Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.556133 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/931d9a7a-aa04-42c3-a276-e71388a8b9d3-operator-scripts\") pod \"931d9a7a-aa04-42c3-a276-e71388a8b9d3\" (UID: \"931d9a7a-aa04-42c3-a276-e71388a8b9d3\") " Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.556189 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/4406b36c-adb2-4042-bc92-9efed5a43942-kolla-config\") pod \"4406b36c-adb2-4042-bc92-9efed5a43942\" (UID: \"4406b36c-adb2-4042-bc92-9efed5a43942\") " Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.556220 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/4406b36c-adb2-4042-bc92-9efed5a43942-memcached-tls-certs\") pod \"4406b36c-adb2-4042-bc92-9efed5a43942\" (UID: \"4406b36c-adb2-4042-bc92-9efed5a43942\") " Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.556620 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4406b36c-adb2-4042-bc92-9efed5a43942-config-data\") pod \"4406b36c-adb2-4042-bc92-9efed5a43942\" (UID: \"4406b36c-adb2-4042-bc92-9efed5a43942\") " Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.556682 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rd4km\" (UniqueName: \"kubernetes.io/projected/931d9a7a-aa04-42c3-a276-e71388a8b9d3-kube-api-access-rd4km\") pod \"931d9a7a-aa04-42c3-a276-e71388a8b9d3\" (UID: \"931d9a7a-aa04-42c3-a276-e71388a8b9d3\") " Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.558064 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4406b36c-adb2-4042-bc92-9efed5a43942-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "4406b36c-adb2-4042-bc92-9efed5a43942" (UID: "4406b36c-adb2-4042-bc92-9efed5a43942"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.559145 4558 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/4406b36c-adb2-4042-bc92-9efed5a43942-kolla-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.560104 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4406b36c-adb2-4042-bc92-9efed5a43942-config-data" (OuterVolumeSpecName: "config-data") pod "4406b36c-adb2-4042-bc92-9efed5a43942" (UID: "4406b36c-adb2-4042-bc92-9efed5a43942"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.560606 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/931d9a7a-aa04-42c3-a276-e71388a8b9d3-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "931d9a7a-aa04-42c3-a276-e71388a8b9d3" (UID: "931d9a7a-aa04-42c3-a276-e71388a8b9d3"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.565836 4558 scope.go:117] "RemoveContainer" containerID="a711a286282347590079d2447ef37fa9ed0f11085d6d7a65f85e65c1c2df608f" Jan 20 17:10:59 crc kubenswrapper[4558]: E0120 17:10:59.566210 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.568351 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/931d9a7a-aa04-42c3-a276-e71388a8b9d3-kube-api-access-rd4km" (OuterVolumeSpecName: "kube-api-access-rd4km") pod "931d9a7a-aa04-42c3-a276-e71388a8b9d3" (UID: "931d9a7a-aa04-42c3-a276-e71388a8b9d3"). InnerVolumeSpecName "kube-api-access-rd4km". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.568414 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4406b36c-adb2-4042-bc92-9efed5a43942-kube-api-access-m7rcs" (OuterVolumeSpecName: "kube-api-access-m7rcs") pod "4406b36c-adb2-4042-bc92-9efed5a43942" (UID: "4406b36c-adb2-4042-bc92-9efed5a43942"). InnerVolumeSpecName "kube-api-access-m7rcs". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.583125 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4406b36c-adb2-4042-bc92-9efed5a43942-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4406b36c-adb2-4042-bc92-9efed5a43942" (UID: "4406b36c-adb2-4042-bc92-9efed5a43942"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.614028 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4406b36c-adb2-4042-bc92-9efed5a43942-memcached-tls-certs" (OuterVolumeSpecName: "memcached-tls-certs") pod "4406b36c-adb2-4042-bc92-9efed5a43942" (UID: "4406b36c-adb2-4042-bc92-9efed5a43942"). InnerVolumeSpecName "memcached-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.624829 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.633780 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-0"] Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.644043 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-0"] Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.667242 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4406b36c-adb2-4042-bc92-9efed5a43942-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.667331 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m7rcs\" (UniqueName: \"kubernetes.io/projected/4406b36c-adb2-4042-bc92-9efed5a43942-kube-api-access-m7rcs\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.667408 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/931d9a7a-aa04-42c3-a276-e71388a8b9d3-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.667462 4558 reconciler_common.go:293] "Volume detached for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/4406b36c-adb2-4042-bc92-9efed5a43942-memcached-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.667510 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4406b36c-adb2-4042-bc92-9efed5a43942-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.667567 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rd4km\" (UniqueName: \"kubernetes.io/projected/931d9a7a-aa04-42c3-a276-e71388a8b9d3-kube-api-access-rd4km\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:59 crc kubenswrapper[4558]: E0120 17:10:59.667721 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Jan 20 17:10:59 crc kubenswrapper[4558]: E0120 17:10:59.667782 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/07fab146-67be-42ba-b263-ee19fe95720b-config-data podName:07fab146-67be-42ba-b263-ee19fe95720b nodeName:}" failed. No retries permitted until 2026-01-20 17:11:07.667763075 +0000 UTC m=+1761.428101043 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/07fab146-67be-42ba-b263-ee19fe95720b-config-data") pod "rabbitmq-server-0" (UID: "07fab146-67be-42ba-b263-ee19fe95720b") : configmap "rabbitmq-config-data" not found Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.689202 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-c88a-account-create-update-7d8q7"] Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.698449 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/keystone-c88a-account-create-update-7d8q7"] Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.704147 4558 scope.go:117] "RemoveContainer" containerID="3e0d89a98f1d6af7b5d9794c3b45f1a7d02d2b5e21f231ecf13088af12d9aed7" Jan 20 17:10:59 crc kubenswrapper[4558]: E0120 17:10:59.706305 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="9827c6a452c5455a00296263b978afc4645bb1e30af3fb06ef2be62752a0e311" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.709457 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:10:59 crc kubenswrapper[4558]: E0120 17:10:59.711424 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="9827c6a452c5455a00296263b978afc4645bb1e30af3fb06ef2be62752a0e311" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.715127 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:10:59 crc kubenswrapper[4558]: E0120 17:10:59.716785 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="9827c6a452c5455a00296263b978afc4645bb1e30af3fb06ef2be62752a0e311" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 20 17:10:59 crc kubenswrapper[4558]: E0120 17:10:59.716818 4558 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack-kuttl-tests/nova-scheduler-0" podUID="04194274-834f-49e3-ac2f-c28998193181" containerName="nova-scheduler-scheduler" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.720514 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-api-757fb55df8-9qzm2"] Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.727772 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/barbican-api-757fb55df8-9qzm2"] Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.741964 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.759302 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.769187 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/5558aeea-2a7f-4654-a261-a2902c9434e0-kolla-config\") pod \"5558aeea-2a7f-4654-a261-a2902c9434e0\" (UID: \"5558aeea-2a7f-4654-a261-a2902c9434e0\") " Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.769319 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5558aeea-2a7f-4654-a261-a2902c9434e0-operator-scripts\") pod \"5558aeea-2a7f-4654-a261-a2902c9434e0\" (UID: \"5558aeea-2a7f-4654-a261-a2902c9434e0\") " Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.769359 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/5558aeea-2a7f-4654-a261-a2902c9434e0-config-data-generated\") pod \"5558aeea-2a7f-4654-a261-a2902c9434e0\" (UID: \"5558aeea-2a7f-4654-a261-a2902c9434e0\") " Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.769405 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mysql-db\" (UniqueName: \"kubernetes.io/local-volume/local-storage16-crc\") pod \"5558aeea-2a7f-4654-a261-a2902c9434e0\" (UID: \"5558aeea-2a7f-4654-a261-a2902c9434e0\") " Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.769435 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4gxxw\" (UniqueName: \"kubernetes.io/projected/5558aeea-2a7f-4654-a261-a2902c9434e0-kube-api-access-4gxxw\") pod \"5558aeea-2a7f-4654-a261-a2902c9434e0\" (UID: \"5558aeea-2a7f-4654-a261-a2902c9434e0\") " Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.769492 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5558aeea-2a7f-4654-a261-a2902c9434e0-combined-ca-bundle\") pod \"5558aeea-2a7f-4654-a261-a2902c9434e0\" (UID: \"5558aeea-2a7f-4654-a261-a2902c9434e0\") " Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.769644 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/5558aeea-2a7f-4654-a261-a2902c9434e0-config-data-default\") pod \"5558aeea-2a7f-4654-a261-a2902c9434e0\" (UID: \"5558aeea-2a7f-4654-a261-a2902c9434e0\") " Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.769685 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/5558aeea-2a7f-4654-a261-a2902c9434e0-galera-tls-certs\") pod \"5558aeea-2a7f-4654-a261-a2902c9434e0\" (UID: \"5558aeea-2a7f-4654-a261-a2902c9434e0\") " Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.770024 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5558aeea-2a7f-4654-a261-a2902c9434e0-config-data-generated" (OuterVolumeSpecName: "config-data-generated") pod "5558aeea-2a7f-4654-a261-a2902c9434e0" (UID: "5558aeea-2a7f-4654-a261-a2902c9434e0"). InnerVolumeSpecName "config-data-generated". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.770362 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/5558aeea-2a7f-4654-a261-a2902c9434e0-config-data-generated\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.770620 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5558aeea-2a7f-4654-a261-a2902c9434e0-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "5558aeea-2a7f-4654-a261-a2902c9434e0" (UID: "5558aeea-2a7f-4654-a261-a2902c9434e0"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.771040 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5558aeea-2a7f-4654-a261-a2902c9434e0-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "5558aeea-2a7f-4654-a261-a2902c9434e0" (UID: "5558aeea-2a7f-4654-a261-a2902c9434e0"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.771700 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5558aeea-2a7f-4654-a261-a2902c9434e0-config-data-default" (OuterVolumeSpecName: "config-data-default") pod "5558aeea-2a7f-4654-a261-a2902c9434e0" (UID: "5558aeea-2a7f-4654-a261-a2902c9434e0"). InnerVolumeSpecName "config-data-default". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.772421 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.773055 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5558aeea-2a7f-4654-a261-a2902c9434e0-kube-api-access-4gxxw" (OuterVolumeSpecName: "kube-api-access-4gxxw") pod "5558aeea-2a7f-4654-a261-a2902c9434e0" (UID: "5558aeea-2a7f-4654-a261-a2902c9434e0"). InnerVolumeSpecName "kube-api-access-4gxxw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.774045 4558 scope.go:117] "RemoveContainer" containerID="41ac2c9d6515d906d6c94b74bae2f46a5cb6aead6aa685ee10b8acf27caf202f" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.787576 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage16-crc" (OuterVolumeSpecName: "mysql-db") pod "5558aeea-2a7f-4654-a261-a2902c9434e0" (UID: "5558aeea-2a7f-4654-a261-a2902c9434e0"). InnerVolumeSpecName "local-storage16-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.791586 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.805015 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.809791 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5558aeea-2a7f-4654-a261-a2902c9434e0-galera-tls-certs" (OuterVolumeSpecName: "galera-tls-certs") pod "5558aeea-2a7f-4654-a261-a2902c9434e0" (UID: "5558aeea-2a7f-4654-a261-a2902c9434e0"). InnerVolumeSpecName "galera-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.809952 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5558aeea-2a7f-4654-a261-a2902c9434e0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5558aeea-2a7f-4654-a261-a2902c9434e0" (UID: "5558aeea-2a7f-4654-a261-a2902c9434e0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.811128 4558 scope.go:117] "RemoveContainer" containerID="9e42a0ec21415521693070869352b9127abae2be472aef5468b15f95c32a8545" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.811499 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.816327 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.820408 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.824084 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovn-northd-0"] Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.829640 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ovn-northd-0"] Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.831884 4558 scope.go:117] "RemoveContainer" containerID="b8d7b4e681364253444a1c0248fab2e3349b056b8048a2f9443b427211b7dd9d" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.834942 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-keystone-listener-86d9c4d694-78ch5"] Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.839405 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/barbican-keystone-listener-86d9c4d694-78ch5"] Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.843180 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-f6ldh"] Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.846838 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-f6ldh"] Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.872253 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5558aeea-2a7f-4654-a261-a2902c9434e0-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.872295 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage16-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage16-crc\") on node \"crc\" " Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.872309 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4gxxw\" (UniqueName: \"kubernetes.io/projected/5558aeea-2a7f-4654-a261-a2902c9434e0-kube-api-access-4gxxw\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.872320 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/90b46572-b60a-4ef8-ad8c-1e67f51d9df4-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.872331 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5558aeea-2a7f-4654-a261-a2902c9434e0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.872341 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-trdbf\" (UniqueName: \"kubernetes.io/projected/90b46572-b60a-4ef8-ad8c-1e67f51d9df4-kube-api-access-trdbf\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.872350 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/5558aeea-2a7f-4654-a261-a2902c9434e0-config-data-default\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.872359 4558 reconciler_common.go:293] "Volume detached for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/5558aeea-2a7f-4654-a261-a2902c9434e0-galera-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.872373 4558 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/5558aeea-2a7f-4654-a261-a2902c9434e0-kolla-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.874534 4558 scope.go:117] "RemoveContainer" containerID="6c367435c2502d2379a050e4069609ac5d1de29fbcbd65e5756a526e79921a06" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.885766 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage16-crc" (UniqueName: "kubernetes.io/local-volume/local-storage16-crc") on node "crc" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.898282 4558 scope.go:117] "RemoveContainer" containerID="d149c52b2c9aabf55ad4004fb586734f8d22f15451b6f01163da779feaa3c048" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.925629 4558 scope.go:117] "RemoveContainer" containerID="67a4a2b743f3eaffa244dd8f012af0a3fdc18477dc3e1252ee8aced329b34fda" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.962413 4558 scope.go:117] "RemoveContainer" containerID="c38ffae2f71dd677618391bc8fe1b2be1a66a0d46ff78c2fe5bdd127277f6054" Jan 20 17:10:59 crc kubenswrapper[4558]: I0120 17:10:59.974144 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage16-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage16-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:11:00 crc kubenswrapper[4558]: I0120 17:11:00.008872 4558 scope.go:117] "RemoveContainer" containerID="013d2b8016fc21c333aa7e40dd675884b8161eccaa5a0b7a7ee016c3b3fef132" Jan 20 17:11:00 crc kubenswrapper[4558]: I0120 17:11:00.063946 4558 scope.go:117] "RemoveContainer" containerID="3961204b11e978cd9995166f21ade05036318ca560f84a52206089ba8789ed9d" Jan 20 17:11:00 crc kubenswrapper[4558]: I0120 17:11:00.084285 4558 scope.go:117] "RemoveContainer" containerID="611dd2ec139015a415381aaa856813e6abfb158b3d9c4ce578651f256b7bef8b" Jan 20 17:11:00 crc kubenswrapper[4558]: I0120 17:11:00.117235 4558 scope.go:117] "RemoveContainer" containerID="166e1e0d14b2cb48e695c5ce11cca54d963c6250ddce1f9f936a358c8a5b3580" Jan 20 17:11:00 crc kubenswrapper[4558]: I0120 17:11:00.143495 4558 scope.go:117] "RemoveContainer" containerID="30050fc936fb62cb7339a9a80b961e2d364426635e1598a842e2fb3d470652c2" Jan 20 17:11:00 crc kubenswrapper[4558]: I0120 17:11:00.177318 4558 scope.go:117] "RemoveContainer" containerID="c1a2c6fb29683dd75096376810ff0f53fdd799813b29ba97b56d0f8d5b5eac42" Jan 20 17:11:00 crc kubenswrapper[4558]: I0120 17:11:00.220731 4558 scope.go:117] "RemoveContainer" containerID="737bdc74f2c709ec376fc9204c898d8cba1280d8792f62f6a65923c927c2ba2f" Jan 20 17:11:00 crc kubenswrapper[4558]: I0120 17:11:00.244313 4558 scope.go:117] "RemoveContainer" containerID="26c2170a07662d397db2cd598e3f8c54887440864fb9753f04f3060866b77ade" Jan 20 17:11:00 crc kubenswrapper[4558]: I0120 17:11:00.275541 4558 scope.go:117] "RemoveContainer" containerID="14a9e266c3e7805021dc147f925c01b6b1d5dec0c4a69dce9222790ed28a80b1" Jan 20 17:11:00 crc kubenswrapper[4558]: I0120 17:11:00.294545 4558 scope.go:117] "RemoveContainer" containerID="63f037fefd6327562c7c2f8bf9caa685258bcc48a23627261cd9883239f176f0" Jan 20 17:11:00 crc kubenswrapper[4558]: I0120 17:11:00.312494 4558 scope.go:117] "RemoveContainer" containerID="f36f1742d8b3097d9223becd7320632e27297558f65c436f5ad091f8b5074cbd" Jan 20 17:11:00 crc kubenswrapper[4558]: I0120 17:11:00.549187 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-galera-0" event={"ID":"5558aeea-2a7f-4654-a261-a2902c9434e0","Type":"ContainerDied","Data":"34b1ad77eceb5d064cd6b97ce02e8b533b8f06fddeef342d20a8e7595ec71ac6"} Jan 20 17:11:00 crc kubenswrapper[4558]: I0120 17:11:00.549247 4558 scope.go:117] "RemoveContainer" containerID="0334d5f9b2258c2ff98366283a9bd0e77fed9de54416a50e2a8bd36057e911d2" Jan 20 17:11:00 crc kubenswrapper[4558]: I0120 17:11:00.549391 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:11:00 crc kubenswrapper[4558]: I0120 17:11:00.572968 4558 generic.go:334] "Generic (PLEG): container finished" podID="c3c7801f-4a15-44ce-8732-4f93b819a7c5" containerID="f9f39e4d701bf54b38ab7a0e52db29f8a7cc6443a7b4a92c71351d6317512b7a" exitCode=0 Jan 20 17:11:00 crc kubenswrapper[4558]: I0120 17:11:00.575125 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="043476ec-5392-41af-970c-89d20b6b30a5" path="/var/lib/kubelet/pods/043476ec-5392-41af-970c-89d20b6b30a5/volumes" Jan 20 17:11:00 crc kubenswrapper[4558]: I0120 17:11:00.575691 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1ec5ef72-7c69-4be3-974d-c020ddfea4f7" path="/var/lib/kubelet/pods/1ec5ef72-7c69-4be3-974d-c020ddfea4f7/volumes" Jan 20 17:11:00 crc kubenswrapper[4558]: I0120 17:11:00.576628 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="209ec6bc-170f-4c8a-ad7e-e441ace95d1b" path="/var/lib/kubelet/pods/209ec6bc-170f-4c8a-ad7e-e441ace95d1b/volumes" Jan 20 17:11:00 crc kubenswrapper[4558]: I0120 17:11:00.577872 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25737ece-fb84-4543-8c3a-94ffa7b8f095" path="/var/lib/kubelet/pods/25737ece-fb84-4543-8c3a-94ffa7b8f095/volumes" Jan 20 17:11:00 crc kubenswrapper[4558]: I0120 17:11:00.578551 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5560f6a4-fc05-4d97-8496-b87804dfab99" path="/var/lib/kubelet/pods/5560f6a4-fc05-4d97-8496-b87804dfab99/volumes" Jan 20 17:11:00 crc kubenswrapper[4558]: I0120 17:11:00.579742 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87001341-5a74-4796-a332-6d57b2cf11c9" path="/var/lib/kubelet/pods/87001341-5a74-4796-a332-6d57b2cf11c9/volumes" Jan 20 17:11:00 crc kubenswrapper[4558]: I0120 17:11:00.580305 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="90b46572-b60a-4ef8-ad8c-1e67f51d9df4" path="/var/lib/kubelet/pods/90b46572-b60a-4ef8-ad8c-1e67f51d9df4/volumes" Jan 20 17:11:00 crc kubenswrapper[4558]: I0120 17:11:00.580592 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="931d9a7a-aa04-42c3-a276-e71388a8b9d3" path="/var/lib/kubelet/pods/931d9a7a-aa04-42c3-a276-e71388a8b9d3/volumes" Jan 20 17:11:00 crc kubenswrapper[4558]: I0120 17:11:00.581088 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="95cf677f-e744-4dad-acfb-507100d2ea14" path="/var/lib/kubelet/pods/95cf677f-e744-4dad-acfb-507100d2ea14/volumes" Jan 20 17:11:00 crc kubenswrapper[4558]: I0120 17:11:00.582696 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d" path="/var/lib/kubelet/pods/a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d/volumes" Jan 20 17:11:00 crc kubenswrapper[4558]: I0120 17:11:00.583365 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c6a5b342-eaf2-408c-828e-9bc0bf10d09e" path="/var/lib/kubelet/pods/c6a5b342-eaf2-408c-828e-9bc0bf10d09e/volumes" Jan 20 17:11:00 crc kubenswrapper[4558]: I0120 17:11:00.591598 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cf954b15-e163-4f99-8c1b-5e04d06666bc" path="/var/lib/kubelet/pods/cf954b15-e163-4f99-8c1b-5e04d06666bc/volumes" Jan 20 17:11:00 crc kubenswrapper[4558]: I0120 17:11:00.593897 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ecb70be9-510b-4ec7-ae5b-20f2ae8dd6a1" path="/var/lib/kubelet/pods/ecb70be9-510b-4ec7-ae5b-20f2ae8dd6a1/volumes" Jan 20 17:11:00 crc kubenswrapper[4558]: I0120 17:11:00.594508 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ee147895-0bad-4dc1-a348-1be3348a7180" path="/var/lib/kubelet/pods/ee147895-0bad-4dc1-a348-1be3348a7180/volumes" Jan 20 17:11:00 crc kubenswrapper[4558]: I0120 17:11:00.594870 4558 generic.go:334] "Generic (PLEG): container finished" podID="07fab146-67be-42ba-b263-ee19fe95720b" containerID="ed0d0cf0a577509bde5ed7f30ee7d1552be82db06a4009c9c9826bf9daa8477a" exitCode=0 Jan 20 17:11:00 crc kubenswrapper[4558]: I0120 17:11:00.595337 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-679bbddb99-dh2h2" event={"ID":"c3c7801f-4a15-44ce-8732-4f93b819a7c5","Type":"ContainerDied","Data":"f9f39e4d701bf54b38ab7a0e52db29f8a7cc6443a7b4a92c71351d6317512b7a"} Jan 20 17:11:00 crc kubenswrapper[4558]: I0120 17:11:00.595403 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/openstack-galera-0"] Jan 20 17:11:00 crc kubenswrapper[4558]: I0120 17:11:00.595445 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/openstack-galera-0"] Jan 20 17:11:00 crc kubenswrapper[4558]: I0120 17:11:00.595468 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-server-0" event={"ID":"07fab146-67be-42ba-b263-ee19fe95720b","Type":"ContainerDied","Data":"ed0d0cf0a577509bde5ed7f30ee7d1552be82db06a4009c9c9826bf9daa8477a"} Jan 20 17:11:00 crc kubenswrapper[4558]: I0120 17:11:00.601122 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/memcached-0" Jan 20 17:11:00 crc kubenswrapper[4558]: I0120 17:11:00.614664 4558 scope.go:117] "RemoveContainer" containerID="b0e23acf14cdde4592922d77c67da28178ab391aa9f227444148a567d640e55d" Jan 20 17:11:00 crc kubenswrapper[4558]: I0120 17:11:00.628838 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/memcached-0"] Jan 20 17:11:00 crc kubenswrapper[4558]: I0120 17:11:00.633302 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/memcached-0"] Jan 20 17:11:00 crc kubenswrapper[4558]: I0120 17:11:00.656436 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-679bbddb99-dh2h2" Jan 20 17:11:00 crc kubenswrapper[4558]: I0120 17:11:00.769018 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:11:00 crc kubenswrapper[4558]: I0120 17:11:00.806658 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"07fab146-67be-42ba-b263-ee19fe95720b\" (UID: \"07fab146-67be-42ba-b263-ee19fe95720b\") " Jan 20 17:11:00 crc kubenswrapper[4558]: I0120 17:11:00.806723 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xxj9h\" (UniqueName: \"kubernetes.io/projected/c3c7801f-4a15-44ce-8732-4f93b819a7c5-kube-api-access-xxj9h\") pod \"c3c7801f-4a15-44ce-8732-4f93b819a7c5\" (UID: \"c3c7801f-4a15-44ce-8732-4f93b819a7c5\") " Jan 20 17:11:00 crc kubenswrapper[4558]: I0120 17:11:00.806751 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/c3c7801f-4a15-44ce-8732-4f93b819a7c5-credential-keys\") pod \"c3c7801f-4a15-44ce-8732-4f93b819a7c5\" (UID: \"c3c7801f-4a15-44ce-8732-4f93b819a7c5\") " Jan 20 17:11:00 crc kubenswrapper[4558]: I0120 17:11:00.806795 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c3c7801f-4a15-44ce-8732-4f93b819a7c5-public-tls-certs\") pod \"c3c7801f-4a15-44ce-8732-4f93b819a7c5\" (UID: \"c3c7801f-4a15-44ce-8732-4f93b819a7c5\") " Jan 20 17:11:00 crc kubenswrapper[4558]: I0120 17:11:00.806816 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c3c7801f-4a15-44ce-8732-4f93b819a7c5-internal-tls-certs\") pod \"c3c7801f-4a15-44ce-8732-4f93b819a7c5\" (UID: \"c3c7801f-4a15-44ce-8732-4f93b819a7c5\") " Jan 20 17:11:00 crc kubenswrapper[4558]: I0120 17:11:00.806837 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c3c7801f-4a15-44ce-8732-4f93b819a7c5-combined-ca-bundle\") pod \"c3c7801f-4a15-44ce-8732-4f93b819a7c5\" (UID: \"c3c7801f-4a15-44ce-8732-4f93b819a7c5\") " Jan 20 17:11:00 crc kubenswrapper[4558]: I0120 17:11:00.806877 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c3c7801f-4a15-44ce-8732-4f93b819a7c5-config-data\") pod \"c3c7801f-4a15-44ce-8732-4f93b819a7c5\" (UID: \"c3c7801f-4a15-44ce-8732-4f93b819a7c5\") " Jan 20 17:11:00 crc kubenswrapper[4558]: I0120 17:11:00.806906 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/07fab146-67be-42ba-b263-ee19fe95720b-server-conf\") pod \"07fab146-67be-42ba-b263-ee19fe95720b\" (UID: \"07fab146-67be-42ba-b263-ee19fe95720b\") " Jan 20 17:11:00 crc kubenswrapper[4558]: I0120 17:11:00.806943 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/07fab146-67be-42ba-b263-ee19fe95720b-rabbitmq-plugins\") pod \"07fab146-67be-42ba-b263-ee19fe95720b\" (UID: \"07fab146-67be-42ba-b263-ee19fe95720b\") " Jan 20 17:11:00 crc kubenswrapper[4558]: I0120 17:11:00.806963 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c3c7801f-4a15-44ce-8732-4f93b819a7c5-scripts\") pod \"c3c7801f-4a15-44ce-8732-4f93b819a7c5\" (UID: \"c3c7801f-4a15-44ce-8732-4f93b819a7c5\") " Jan 20 17:11:00 crc kubenswrapper[4558]: I0120 17:11:00.806980 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/07fab146-67be-42ba-b263-ee19fe95720b-rabbitmq-erlang-cookie\") pod \"07fab146-67be-42ba-b263-ee19fe95720b\" (UID: \"07fab146-67be-42ba-b263-ee19fe95720b\") " Jan 20 17:11:00 crc kubenswrapper[4558]: I0120 17:11:00.807019 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/07fab146-67be-42ba-b263-ee19fe95720b-pod-info\") pod \"07fab146-67be-42ba-b263-ee19fe95720b\" (UID: \"07fab146-67be-42ba-b263-ee19fe95720b\") " Jan 20 17:11:00 crc kubenswrapper[4558]: I0120 17:11:00.807041 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/07fab146-67be-42ba-b263-ee19fe95720b-erlang-cookie-secret\") pod \"07fab146-67be-42ba-b263-ee19fe95720b\" (UID: \"07fab146-67be-42ba-b263-ee19fe95720b\") " Jan 20 17:11:00 crc kubenswrapper[4558]: I0120 17:11:00.807060 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fspbv\" (UniqueName: \"kubernetes.io/projected/07fab146-67be-42ba-b263-ee19fe95720b-kube-api-access-fspbv\") pod \"07fab146-67be-42ba-b263-ee19fe95720b\" (UID: \"07fab146-67be-42ba-b263-ee19fe95720b\") " Jan 20 17:11:00 crc kubenswrapper[4558]: I0120 17:11:00.807095 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/07fab146-67be-42ba-b263-ee19fe95720b-plugins-conf\") pod \"07fab146-67be-42ba-b263-ee19fe95720b\" (UID: \"07fab146-67be-42ba-b263-ee19fe95720b\") " Jan 20 17:11:00 crc kubenswrapper[4558]: I0120 17:11:00.807121 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/07fab146-67be-42ba-b263-ee19fe95720b-config-data\") pod \"07fab146-67be-42ba-b263-ee19fe95720b\" (UID: \"07fab146-67be-42ba-b263-ee19fe95720b\") " Jan 20 17:11:00 crc kubenswrapper[4558]: I0120 17:11:00.807140 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/c3c7801f-4a15-44ce-8732-4f93b819a7c5-fernet-keys\") pod \"c3c7801f-4a15-44ce-8732-4f93b819a7c5\" (UID: \"c3c7801f-4a15-44ce-8732-4f93b819a7c5\") " Jan 20 17:11:00 crc kubenswrapper[4558]: I0120 17:11:00.807204 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/07fab146-67be-42ba-b263-ee19fe95720b-rabbitmq-confd\") pod \"07fab146-67be-42ba-b263-ee19fe95720b\" (UID: \"07fab146-67be-42ba-b263-ee19fe95720b\") " Jan 20 17:11:00 crc kubenswrapper[4558]: I0120 17:11:00.807227 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/07fab146-67be-42ba-b263-ee19fe95720b-rabbitmq-tls\") pod \"07fab146-67be-42ba-b263-ee19fe95720b\" (UID: \"07fab146-67be-42ba-b263-ee19fe95720b\") " Jan 20 17:11:00 crc kubenswrapper[4558]: I0120 17:11:00.810156 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/07fab146-67be-42ba-b263-ee19fe95720b-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "07fab146-67be-42ba-b263-ee19fe95720b" (UID: "07fab146-67be-42ba-b263-ee19fe95720b"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:11:00 crc kubenswrapper[4558]: I0120 17:11:00.810246 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/07fab146-67be-42ba-b263-ee19fe95720b-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "07fab146-67be-42ba-b263-ee19fe95720b" (UID: "07fab146-67be-42ba-b263-ee19fe95720b"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:11:00 crc kubenswrapper[4558]: I0120 17:11:00.812440 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/07fab146-67be-42ba-b263-ee19fe95720b-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "07fab146-67be-42ba-b263-ee19fe95720b" (UID: "07fab146-67be-42ba-b263-ee19fe95720b"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:11:00 crc kubenswrapper[4558]: I0120 17:11:00.812791 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/07fab146-67be-42ba-b263-ee19fe95720b-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "07fab146-67be-42ba-b263-ee19fe95720b" (UID: "07fab146-67be-42ba-b263-ee19fe95720b"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:11:00 crc kubenswrapper[4558]: I0120 17:11:00.814609 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/07fab146-67be-42ba-b263-ee19fe95720b-kube-api-access-fspbv" (OuterVolumeSpecName: "kube-api-access-fspbv") pod "07fab146-67be-42ba-b263-ee19fe95720b" (UID: "07fab146-67be-42ba-b263-ee19fe95720b"). InnerVolumeSpecName "kube-api-access-fspbv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:11:00 crc kubenswrapper[4558]: I0120 17:11:00.815010 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c3c7801f-4a15-44ce-8732-4f93b819a7c5-scripts" (OuterVolumeSpecName: "scripts") pod "c3c7801f-4a15-44ce-8732-4f93b819a7c5" (UID: "c3c7801f-4a15-44ce-8732-4f93b819a7c5"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:11:00 crc kubenswrapper[4558]: I0120 17:11:00.816056 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c3c7801f-4a15-44ce-8732-4f93b819a7c5-kube-api-access-xxj9h" (OuterVolumeSpecName: "kube-api-access-xxj9h") pod "c3c7801f-4a15-44ce-8732-4f93b819a7c5" (UID: "c3c7801f-4a15-44ce-8732-4f93b819a7c5"). InnerVolumeSpecName "kube-api-access-xxj9h". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:11:00 crc kubenswrapper[4558]: I0120 17:11:00.816948 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c3c7801f-4a15-44ce-8732-4f93b819a7c5-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "c3c7801f-4a15-44ce-8732-4f93b819a7c5" (UID: "c3c7801f-4a15-44ce-8732-4f93b819a7c5"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:11:00 crc kubenswrapper[4558]: I0120 17:11:00.818405 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage11-crc" (OuterVolumeSpecName: "persistence") pod "07fab146-67be-42ba-b263-ee19fe95720b" (UID: "07fab146-67be-42ba-b263-ee19fe95720b"). InnerVolumeSpecName "local-storage11-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:11:00 crc kubenswrapper[4558]: I0120 17:11:00.826645 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/07fab146-67be-42ba-b263-ee19fe95720b-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "07fab146-67be-42ba-b263-ee19fe95720b" (UID: "07fab146-67be-42ba-b263-ee19fe95720b"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:11:00 crc kubenswrapper[4558]: I0120 17:11:00.826885 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c3c7801f-4a15-44ce-8732-4f93b819a7c5-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "c3c7801f-4a15-44ce-8732-4f93b819a7c5" (UID: "c3c7801f-4a15-44ce-8732-4f93b819a7c5"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:11:00 crc kubenswrapper[4558]: I0120 17:11:00.828685 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/07fab146-67be-42ba-b263-ee19fe95720b-pod-info" (OuterVolumeSpecName: "pod-info") pod "07fab146-67be-42ba-b263-ee19fe95720b" (UID: "07fab146-67be-42ba-b263-ee19fe95720b"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Jan 20 17:11:00 crc kubenswrapper[4558]: I0120 17:11:00.843870 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c3c7801f-4a15-44ce-8732-4f93b819a7c5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c3c7801f-4a15-44ce-8732-4f93b819a7c5" (UID: "c3c7801f-4a15-44ce-8732-4f93b819a7c5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:11:00 crc kubenswrapper[4558]: I0120 17:11:00.850847 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/07fab146-67be-42ba-b263-ee19fe95720b-config-data" (OuterVolumeSpecName: "config-data") pod "07fab146-67be-42ba-b263-ee19fe95720b" (UID: "07fab146-67be-42ba-b263-ee19fe95720b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:11:00 crc kubenswrapper[4558]: I0120 17:11:00.851338 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c3c7801f-4a15-44ce-8732-4f93b819a7c5-config-data" (OuterVolumeSpecName: "config-data") pod "c3c7801f-4a15-44ce-8732-4f93b819a7c5" (UID: "c3c7801f-4a15-44ce-8732-4f93b819a7c5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:11:00 crc kubenswrapper[4558]: I0120 17:11:00.854801 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/07fab146-67be-42ba-b263-ee19fe95720b-server-conf" (OuterVolumeSpecName: "server-conf") pod "07fab146-67be-42ba-b263-ee19fe95720b" (UID: "07fab146-67be-42ba-b263-ee19fe95720b"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:11:00 crc kubenswrapper[4558]: I0120 17:11:00.866675 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c3c7801f-4a15-44ce-8732-4f93b819a7c5-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "c3c7801f-4a15-44ce-8732-4f93b819a7c5" (UID: "c3c7801f-4a15-44ce-8732-4f93b819a7c5"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:11:00 crc kubenswrapper[4558]: I0120 17:11:00.868110 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c3c7801f-4a15-44ce-8732-4f93b819a7c5-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "c3c7801f-4a15-44ce-8732-4f93b819a7c5" (UID: "c3c7801f-4a15-44ce-8732-4f93b819a7c5"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:11:00 crc kubenswrapper[4558]: I0120 17:11:00.891676 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/07fab146-67be-42ba-b263-ee19fe95720b-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "07fab146-67be-42ba-b263-ee19fe95720b" (UID: "07fab146-67be-42ba-b263-ee19fe95720b"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:11:00 crc kubenswrapper[4558]: I0120 17:11:00.909251 4558 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/07fab146-67be-42ba-b263-ee19fe95720b-server-conf\") on node \"crc\" DevicePath \"\"" Jan 20 17:11:00 crc kubenswrapper[4558]: I0120 17:11:00.909283 4558 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/07fab146-67be-42ba-b263-ee19fe95720b-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Jan 20 17:11:00 crc kubenswrapper[4558]: I0120 17:11:00.909315 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c3c7801f-4a15-44ce-8732-4f93b819a7c5-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:11:00 crc kubenswrapper[4558]: I0120 17:11:00.909331 4558 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/07fab146-67be-42ba-b263-ee19fe95720b-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Jan 20 17:11:00 crc kubenswrapper[4558]: I0120 17:11:00.909342 4558 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/07fab146-67be-42ba-b263-ee19fe95720b-pod-info\") on node \"crc\" DevicePath \"\"" Jan 20 17:11:00 crc kubenswrapper[4558]: I0120 17:11:00.909352 4558 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/07fab146-67be-42ba-b263-ee19fe95720b-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Jan 20 17:11:00 crc kubenswrapper[4558]: I0120 17:11:00.909394 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fspbv\" (UniqueName: \"kubernetes.io/projected/07fab146-67be-42ba-b263-ee19fe95720b-kube-api-access-fspbv\") on node \"crc\" DevicePath \"\"" Jan 20 17:11:00 crc kubenswrapper[4558]: I0120 17:11:00.909407 4558 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/07fab146-67be-42ba-b263-ee19fe95720b-plugins-conf\") on node \"crc\" DevicePath \"\"" Jan 20 17:11:00 crc kubenswrapper[4558]: I0120 17:11:00.909419 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/07fab146-67be-42ba-b263-ee19fe95720b-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:11:00 crc kubenswrapper[4558]: I0120 17:11:00.909429 4558 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/c3c7801f-4a15-44ce-8732-4f93b819a7c5-fernet-keys\") on node \"crc\" DevicePath \"\"" Jan 20 17:11:00 crc kubenswrapper[4558]: I0120 17:11:00.909439 4558 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/07fab146-67be-42ba-b263-ee19fe95720b-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Jan 20 17:11:00 crc kubenswrapper[4558]: I0120 17:11:00.909448 4558 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/07fab146-67be-42ba-b263-ee19fe95720b-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Jan 20 17:11:00 crc kubenswrapper[4558]: I0120 17:11:00.909558 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" " Jan 20 17:11:00 crc kubenswrapper[4558]: I0120 17:11:00.909574 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xxj9h\" (UniqueName: \"kubernetes.io/projected/c3c7801f-4a15-44ce-8732-4f93b819a7c5-kube-api-access-xxj9h\") on node \"crc\" DevicePath \"\"" Jan 20 17:11:00 crc kubenswrapper[4558]: I0120 17:11:00.909588 4558 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/c3c7801f-4a15-44ce-8732-4f93b819a7c5-credential-keys\") on node \"crc\" DevicePath \"\"" Jan 20 17:11:00 crc kubenswrapper[4558]: I0120 17:11:00.909598 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c3c7801f-4a15-44ce-8732-4f93b819a7c5-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:11:00 crc kubenswrapper[4558]: I0120 17:11:00.909611 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c3c7801f-4a15-44ce-8732-4f93b819a7c5-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:11:00 crc kubenswrapper[4558]: I0120 17:11:00.909641 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c3c7801f-4a15-44ce-8732-4f93b819a7c5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:11:00 crc kubenswrapper[4558]: I0120 17:11:00.909651 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c3c7801f-4a15-44ce-8732-4f93b819a7c5-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:11:00 crc kubenswrapper[4558]: I0120 17:11:00.921003 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage11-crc" (UniqueName: "kubernetes.io/local-volume/local-storage11-crc") on node "crc" Jan 20 17:11:01 crc kubenswrapper[4558]: I0120 17:11:01.010230 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:11:01 crc kubenswrapper[4558]: E0120 17:11:01.010326 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Jan 20 17:11:01 crc kubenswrapper[4558]: E0120 17:11:01.010392 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/39403277-bf62-47c1-8e86-cdec59f2da7b-config-data podName:39403277-bf62-47c1-8e86-cdec59f2da7b nodeName:}" failed. No retries permitted until 2026-01-20 17:11:09.010365729 +0000 UTC m=+1762.770703695 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/39403277-bf62-47c1-8e86-cdec59f2da7b-config-data") pod "rabbitmq-cell1-server-0" (UID: "39403277-bf62-47c1-8e86-cdec59f2da7b") : configmap "rabbitmq-cell1-config-data" not found Jan 20 17:11:01 crc kubenswrapper[4558]: I0120 17:11:01.335976 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:11:01 crc kubenswrapper[4558]: I0120 17:11:01.518322 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"39403277-bf62-47c1-8e86-cdec59f2da7b\" (UID: \"39403277-bf62-47c1-8e86-cdec59f2da7b\") " Jan 20 17:11:01 crc kubenswrapper[4558]: I0120 17:11:01.518389 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/39403277-bf62-47c1-8e86-cdec59f2da7b-config-data\") pod \"39403277-bf62-47c1-8e86-cdec59f2da7b\" (UID: \"39403277-bf62-47c1-8e86-cdec59f2da7b\") " Jan 20 17:11:01 crc kubenswrapper[4558]: I0120 17:11:01.518426 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/39403277-bf62-47c1-8e86-cdec59f2da7b-rabbitmq-tls\") pod \"39403277-bf62-47c1-8e86-cdec59f2da7b\" (UID: \"39403277-bf62-47c1-8e86-cdec59f2da7b\") " Jan 20 17:11:01 crc kubenswrapper[4558]: I0120 17:11:01.518458 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/39403277-bf62-47c1-8e86-cdec59f2da7b-erlang-cookie-secret\") pod \"39403277-bf62-47c1-8e86-cdec59f2da7b\" (UID: \"39403277-bf62-47c1-8e86-cdec59f2da7b\") " Jan 20 17:11:01 crc kubenswrapper[4558]: I0120 17:11:01.518495 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gs7c8\" (UniqueName: \"kubernetes.io/projected/39403277-bf62-47c1-8e86-cdec59f2da7b-kube-api-access-gs7c8\") pod \"39403277-bf62-47c1-8e86-cdec59f2da7b\" (UID: \"39403277-bf62-47c1-8e86-cdec59f2da7b\") " Jan 20 17:11:01 crc kubenswrapper[4558]: I0120 17:11:01.518524 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/39403277-bf62-47c1-8e86-cdec59f2da7b-pod-info\") pod \"39403277-bf62-47c1-8e86-cdec59f2da7b\" (UID: \"39403277-bf62-47c1-8e86-cdec59f2da7b\") " Jan 20 17:11:01 crc kubenswrapper[4558]: I0120 17:11:01.518550 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/39403277-bf62-47c1-8e86-cdec59f2da7b-rabbitmq-erlang-cookie\") pod \"39403277-bf62-47c1-8e86-cdec59f2da7b\" (UID: \"39403277-bf62-47c1-8e86-cdec59f2da7b\") " Jan 20 17:11:01 crc kubenswrapper[4558]: I0120 17:11:01.518583 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/39403277-bf62-47c1-8e86-cdec59f2da7b-rabbitmq-plugins\") pod \"39403277-bf62-47c1-8e86-cdec59f2da7b\" (UID: \"39403277-bf62-47c1-8e86-cdec59f2da7b\") " Jan 20 17:11:01 crc kubenswrapper[4558]: I0120 17:11:01.518612 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/39403277-bf62-47c1-8e86-cdec59f2da7b-server-conf\") pod \"39403277-bf62-47c1-8e86-cdec59f2da7b\" (UID: \"39403277-bf62-47c1-8e86-cdec59f2da7b\") " Jan 20 17:11:01 crc kubenswrapper[4558]: I0120 17:11:01.518645 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/39403277-bf62-47c1-8e86-cdec59f2da7b-plugins-conf\") pod \"39403277-bf62-47c1-8e86-cdec59f2da7b\" (UID: \"39403277-bf62-47c1-8e86-cdec59f2da7b\") " Jan 20 17:11:01 crc kubenswrapper[4558]: I0120 17:11:01.518668 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/39403277-bf62-47c1-8e86-cdec59f2da7b-rabbitmq-confd\") pod \"39403277-bf62-47c1-8e86-cdec59f2da7b\" (UID: \"39403277-bf62-47c1-8e86-cdec59f2da7b\") " Jan 20 17:11:01 crc kubenswrapper[4558]: I0120 17:11:01.519931 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/39403277-bf62-47c1-8e86-cdec59f2da7b-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "39403277-bf62-47c1-8e86-cdec59f2da7b" (UID: "39403277-bf62-47c1-8e86-cdec59f2da7b"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:11:01 crc kubenswrapper[4558]: I0120 17:11:01.520076 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/39403277-bf62-47c1-8e86-cdec59f2da7b-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "39403277-bf62-47c1-8e86-cdec59f2da7b" (UID: "39403277-bf62-47c1-8e86-cdec59f2da7b"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:11:01 crc kubenswrapper[4558]: I0120 17:11:01.520867 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/39403277-bf62-47c1-8e86-cdec59f2da7b-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "39403277-bf62-47c1-8e86-cdec59f2da7b" (UID: "39403277-bf62-47c1-8e86-cdec59f2da7b"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:11:01 crc kubenswrapper[4558]: I0120 17:11:01.523610 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/39403277-bf62-47c1-8e86-cdec59f2da7b-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "39403277-bf62-47c1-8e86-cdec59f2da7b" (UID: "39403277-bf62-47c1-8e86-cdec59f2da7b"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:11:01 crc kubenswrapper[4558]: I0120 17:11:01.523712 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/39403277-bf62-47c1-8e86-cdec59f2da7b-kube-api-access-gs7c8" (OuterVolumeSpecName: "kube-api-access-gs7c8") pod "39403277-bf62-47c1-8e86-cdec59f2da7b" (UID: "39403277-bf62-47c1-8e86-cdec59f2da7b"). InnerVolumeSpecName "kube-api-access-gs7c8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:11:01 crc kubenswrapper[4558]: I0120 17:11:01.523903 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage07-crc" (OuterVolumeSpecName: "persistence") pod "39403277-bf62-47c1-8e86-cdec59f2da7b" (UID: "39403277-bf62-47c1-8e86-cdec59f2da7b"). InnerVolumeSpecName "local-storage07-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:11:01 crc kubenswrapper[4558]: I0120 17:11:01.525602 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/39403277-bf62-47c1-8e86-cdec59f2da7b-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "39403277-bf62-47c1-8e86-cdec59f2da7b" (UID: "39403277-bf62-47c1-8e86-cdec59f2da7b"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:11:01 crc kubenswrapper[4558]: I0120 17:11:01.529947 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/39403277-bf62-47c1-8e86-cdec59f2da7b-pod-info" (OuterVolumeSpecName: "pod-info") pod "39403277-bf62-47c1-8e86-cdec59f2da7b" (UID: "39403277-bf62-47c1-8e86-cdec59f2da7b"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Jan 20 17:11:01 crc kubenswrapper[4558]: I0120 17:11:01.538189 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/39403277-bf62-47c1-8e86-cdec59f2da7b-config-data" (OuterVolumeSpecName: "config-data") pod "39403277-bf62-47c1-8e86-cdec59f2da7b" (UID: "39403277-bf62-47c1-8e86-cdec59f2da7b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:11:01 crc kubenswrapper[4558]: I0120 17:11:01.548106 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/39403277-bf62-47c1-8e86-cdec59f2da7b-server-conf" (OuterVolumeSpecName: "server-conf") pod "39403277-bf62-47c1-8e86-cdec59f2da7b" (UID: "39403277-bf62-47c1-8e86-cdec59f2da7b"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:11:01 crc kubenswrapper[4558]: I0120 17:11:01.598191 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/39403277-bf62-47c1-8e86-cdec59f2da7b-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "39403277-bf62-47c1-8e86-cdec59f2da7b" (UID: "39403277-bf62-47c1-8e86-cdec59f2da7b"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:11:01 crc kubenswrapper[4558]: I0120 17:11:01.615209 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-679bbddb99-dh2h2" event={"ID":"c3c7801f-4a15-44ce-8732-4f93b819a7c5","Type":"ContainerDied","Data":"d3db55ecc908dffebec3940bd1f575c620640dab9324b5169d97c783cc0e4108"} Jan 20 17:11:01 crc kubenswrapper[4558]: I0120 17:11:01.615259 4558 scope.go:117] "RemoveContainer" containerID="f9f39e4d701bf54b38ab7a0e52db29f8a7cc6443a7b4a92c71351d6317512b7a" Jan 20 17:11:01 crc kubenswrapper[4558]: I0120 17:11:01.615332 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-679bbddb99-dh2h2" Jan 20 17:11:01 crc kubenswrapper[4558]: I0120 17:11:01.624850 4558 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/39403277-bf62-47c1-8e86-cdec59f2da7b-server-conf\") on node \"crc\" DevicePath \"\"" Jan 20 17:11:01 crc kubenswrapper[4558]: I0120 17:11:01.624866 4558 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/39403277-bf62-47c1-8e86-cdec59f2da7b-plugins-conf\") on node \"crc\" DevicePath \"\"" Jan 20 17:11:01 crc kubenswrapper[4558]: I0120 17:11:01.624875 4558 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/39403277-bf62-47c1-8e86-cdec59f2da7b-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Jan 20 17:11:01 crc kubenswrapper[4558]: I0120 17:11:01.624893 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" " Jan 20 17:11:01 crc kubenswrapper[4558]: I0120 17:11:01.624902 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/39403277-bf62-47c1-8e86-cdec59f2da7b-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:11:01 crc kubenswrapper[4558]: I0120 17:11:01.624911 4558 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/39403277-bf62-47c1-8e86-cdec59f2da7b-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Jan 20 17:11:01 crc kubenswrapper[4558]: I0120 17:11:01.624919 4558 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/39403277-bf62-47c1-8e86-cdec59f2da7b-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Jan 20 17:11:01 crc kubenswrapper[4558]: I0120 17:11:01.624928 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gs7c8\" (UniqueName: \"kubernetes.io/projected/39403277-bf62-47c1-8e86-cdec59f2da7b-kube-api-access-gs7c8\") on node \"crc\" DevicePath \"\"" Jan 20 17:11:01 crc kubenswrapper[4558]: I0120 17:11:01.624937 4558 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/39403277-bf62-47c1-8e86-cdec59f2da7b-pod-info\") on node \"crc\" DevicePath \"\"" Jan 20 17:11:01 crc kubenswrapper[4558]: I0120 17:11:01.624945 4558 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/39403277-bf62-47c1-8e86-cdec59f2da7b-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Jan 20 17:11:01 crc kubenswrapper[4558]: I0120 17:11:01.624954 4558 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/39403277-bf62-47c1-8e86-cdec59f2da7b-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Jan 20 17:11:01 crc kubenswrapper[4558]: I0120 17:11:01.644505 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-server-0" event={"ID":"07fab146-67be-42ba-b263-ee19fe95720b","Type":"ContainerDied","Data":"f851d4db758ec400cd2a5783e8b809166ecd88cabc95b5ae3feccf69f99f49b2"} Jan 20 17:11:01 crc kubenswrapper[4558]: I0120 17:11:01.644544 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:11:01 crc kubenswrapper[4558]: I0120 17:11:01.651032 4558 scope.go:117] "RemoveContainer" containerID="ed0d0cf0a577509bde5ed7f30ee7d1552be82db06a4009c9c9826bf9daa8477a" Jan 20 17:11:01 crc kubenswrapper[4558]: I0120 17:11:01.651591 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage07-crc" (UniqueName: "kubernetes.io/local-volume/local-storage07-crc") on node "crc" Jan 20 17:11:01 crc kubenswrapper[4558]: I0120 17:11:01.651748 4558 generic.go:334] "Generic (PLEG): container finished" podID="39403277-bf62-47c1-8e86-cdec59f2da7b" containerID="564f1f778e4e883e5ac1e2a892f2ceadcb69333aff26325a33259bf543a29685" exitCode=0 Jan 20 17:11:01 crc kubenswrapper[4558]: I0120 17:11:01.651797 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" event={"ID":"39403277-bf62-47c1-8e86-cdec59f2da7b","Type":"ContainerDied","Data":"564f1f778e4e883e5ac1e2a892f2ceadcb69333aff26325a33259bf543a29685"} Jan 20 17:11:01 crc kubenswrapper[4558]: I0120 17:11:01.651825 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" event={"ID":"39403277-bf62-47c1-8e86-cdec59f2da7b","Type":"ContainerDied","Data":"115b9114314899b77d2e49048148e18468424f9c063b59a8eda0ecc0a2b4bdb7"} Jan 20 17:11:01 crc kubenswrapper[4558]: I0120 17:11:01.651881 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:11:01 crc kubenswrapper[4558]: I0120 17:11:01.675092 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-679bbddb99-dh2h2"] Jan 20 17:11:01 crc kubenswrapper[4558]: I0120 17:11:01.680597 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/keystone-679bbddb99-dh2h2"] Jan 20 17:11:01 crc kubenswrapper[4558]: I0120 17:11:01.700691 4558 scope.go:117] "RemoveContainer" containerID="89eca221d9e593e3a9aecdfa6ea1b10c75f176fb44073dcdb0a8d8482121ba81" Jan 20 17:11:01 crc kubenswrapper[4558]: I0120 17:11:01.731887 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:11:01 crc kubenswrapper[4558]: I0120 17:11:01.743092 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/rabbitmq-server-0"] Jan 20 17:11:01 crc kubenswrapper[4558]: I0120 17:11:01.745669 4558 scope.go:117] "RemoveContainer" containerID="564f1f778e4e883e5ac1e2a892f2ceadcb69333aff26325a33259bf543a29685" Jan 20 17:11:01 crc kubenswrapper[4558]: I0120 17:11:01.751724 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/rabbitmq-server-0"] Jan 20 17:11:01 crc kubenswrapper[4558]: I0120 17:11:01.757244 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/rabbitmq-cell1-server-0"] Jan 20 17:11:01 crc kubenswrapper[4558]: I0120 17:11:01.764116 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/rabbitmq-cell1-server-0"] Jan 20 17:11:01 crc kubenswrapper[4558]: I0120 17:11:01.766820 4558 scope.go:117] "RemoveContainer" containerID="2a031a75cc169d112132928bc181cdf5e2a9498a2ceea6c4d92265e6e5d5799c" Jan 20 17:11:01 crc kubenswrapper[4558]: I0120 17:11:01.791330 4558 scope.go:117] "RemoveContainer" containerID="564f1f778e4e883e5ac1e2a892f2ceadcb69333aff26325a33259bf543a29685" Jan 20 17:11:01 crc kubenswrapper[4558]: E0120 17:11:01.791900 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"564f1f778e4e883e5ac1e2a892f2ceadcb69333aff26325a33259bf543a29685\": container with ID starting with 564f1f778e4e883e5ac1e2a892f2ceadcb69333aff26325a33259bf543a29685 not found: ID does not exist" containerID="564f1f778e4e883e5ac1e2a892f2ceadcb69333aff26325a33259bf543a29685" Jan 20 17:11:01 crc kubenswrapper[4558]: I0120 17:11:01.791938 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"564f1f778e4e883e5ac1e2a892f2ceadcb69333aff26325a33259bf543a29685"} err="failed to get container status \"564f1f778e4e883e5ac1e2a892f2ceadcb69333aff26325a33259bf543a29685\": rpc error: code = NotFound desc = could not find container \"564f1f778e4e883e5ac1e2a892f2ceadcb69333aff26325a33259bf543a29685\": container with ID starting with 564f1f778e4e883e5ac1e2a892f2ceadcb69333aff26325a33259bf543a29685 not found: ID does not exist" Jan 20 17:11:01 crc kubenswrapper[4558]: I0120 17:11:01.791970 4558 scope.go:117] "RemoveContainer" containerID="2a031a75cc169d112132928bc181cdf5e2a9498a2ceea6c4d92265e6e5d5799c" Jan 20 17:11:01 crc kubenswrapper[4558]: E0120 17:11:01.793553 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2a031a75cc169d112132928bc181cdf5e2a9498a2ceea6c4d92265e6e5d5799c\": container with ID starting with 2a031a75cc169d112132928bc181cdf5e2a9498a2ceea6c4d92265e6e5d5799c not found: ID does not exist" containerID="2a031a75cc169d112132928bc181cdf5e2a9498a2ceea6c4d92265e6e5d5799c" Jan 20 17:11:01 crc kubenswrapper[4558]: I0120 17:11:01.793611 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2a031a75cc169d112132928bc181cdf5e2a9498a2ceea6c4d92265e6e5d5799c"} err="failed to get container status \"2a031a75cc169d112132928bc181cdf5e2a9498a2ceea6c4d92265e6e5d5799c\": rpc error: code = NotFound desc = could not find container \"2a031a75cc169d112132928bc181cdf5e2a9498a2ceea6c4d92265e6e5d5799c\": container with ID starting with 2a031a75cc169d112132928bc181cdf5e2a9498a2ceea6c4d92265e6e5d5799c not found: ID does not exist" Jan 20 17:11:01 crc kubenswrapper[4558]: I0120 17:11:01.940199 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-worker-7bcc85c7d9-jm8qh" Jan 20 17:11:02 crc kubenswrapper[4558]: I0120 17:11:02.034481 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/faade961-fce0-4ad6-a039-4ba83a95dd68-config-data\") pod \"faade961-fce0-4ad6-a039-4ba83a95dd68\" (UID: \"faade961-fce0-4ad6-a039-4ba83a95dd68\") " Jan 20 17:11:02 crc kubenswrapper[4558]: I0120 17:11:02.034543 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/faade961-fce0-4ad6-a039-4ba83a95dd68-logs\") pod \"faade961-fce0-4ad6-a039-4ba83a95dd68\" (UID: \"faade961-fce0-4ad6-a039-4ba83a95dd68\") " Jan 20 17:11:02 crc kubenswrapper[4558]: I0120 17:11:02.034579 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/faade961-fce0-4ad6-a039-4ba83a95dd68-config-data-custom\") pod \"faade961-fce0-4ad6-a039-4ba83a95dd68\" (UID: \"faade961-fce0-4ad6-a039-4ba83a95dd68\") " Jan 20 17:11:02 crc kubenswrapper[4558]: I0120 17:11:02.034640 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/faade961-fce0-4ad6-a039-4ba83a95dd68-combined-ca-bundle\") pod \"faade961-fce0-4ad6-a039-4ba83a95dd68\" (UID: \"faade961-fce0-4ad6-a039-4ba83a95dd68\") " Jan 20 17:11:02 crc kubenswrapper[4558]: I0120 17:11:02.034666 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r7std\" (UniqueName: \"kubernetes.io/projected/faade961-fce0-4ad6-a039-4ba83a95dd68-kube-api-access-r7std\") pod \"faade961-fce0-4ad6-a039-4ba83a95dd68\" (UID: \"faade961-fce0-4ad6-a039-4ba83a95dd68\") " Jan 20 17:11:02 crc kubenswrapper[4558]: I0120 17:11:02.035738 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/faade961-fce0-4ad6-a039-4ba83a95dd68-logs" (OuterVolumeSpecName: "logs") pod "faade961-fce0-4ad6-a039-4ba83a95dd68" (UID: "faade961-fce0-4ad6-a039-4ba83a95dd68"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:11:02 crc kubenswrapper[4558]: I0120 17:11:02.040242 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/faade961-fce0-4ad6-a039-4ba83a95dd68-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "faade961-fce0-4ad6-a039-4ba83a95dd68" (UID: "faade961-fce0-4ad6-a039-4ba83a95dd68"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:11:02 crc kubenswrapper[4558]: I0120 17:11:02.040585 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/faade961-fce0-4ad6-a039-4ba83a95dd68-kube-api-access-r7std" (OuterVolumeSpecName: "kube-api-access-r7std") pod "faade961-fce0-4ad6-a039-4ba83a95dd68" (UID: "faade961-fce0-4ad6-a039-4ba83a95dd68"). InnerVolumeSpecName "kube-api-access-r7std". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:11:02 crc kubenswrapper[4558]: I0120 17:11:02.059271 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/faade961-fce0-4ad6-a039-4ba83a95dd68-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "faade961-fce0-4ad6-a039-4ba83a95dd68" (UID: "faade961-fce0-4ad6-a039-4ba83a95dd68"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:11:02 crc kubenswrapper[4558]: I0120 17:11:02.073284 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/faade961-fce0-4ad6-a039-4ba83a95dd68-config-data" (OuterVolumeSpecName: "config-data") pod "faade961-fce0-4ad6-a039-4ba83a95dd68" (UID: "faade961-fce0-4ad6-a039-4ba83a95dd68"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:11:02 crc kubenswrapper[4558]: I0120 17:11:02.112751 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:11:02 crc kubenswrapper[4558]: I0120 17:11:02.136015 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jjl4r\" (UniqueName: \"kubernetes.io/projected/04194274-834f-49e3-ac2f-c28998193181-kube-api-access-jjl4r\") pod \"04194274-834f-49e3-ac2f-c28998193181\" (UID: \"04194274-834f-49e3-ac2f-c28998193181\") " Jan 20 17:11:02 crc kubenswrapper[4558]: I0120 17:11:02.136210 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/04194274-834f-49e3-ac2f-c28998193181-config-data\") pod \"04194274-834f-49e3-ac2f-c28998193181\" (UID: \"04194274-834f-49e3-ac2f-c28998193181\") " Jan 20 17:11:02 crc kubenswrapper[4558]: I0120 17:11:02.136279 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/04194274-834f-49e3-ac2f-c28998193181-combined-ca-bundle\") pod \"04194274-834f-49e3-ac2f-c28998193181\" (UID: \"04194274-834f-49e3-ac2f-c28998193181\") " Jan 20 17:11:02 crc kubenswrapper[4558]: I0120 17:11:02.136911 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/faade961-fce0-4ad6-a039-4ba83a95dd68-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:11:02 crc kubenswrapper[4558]: I0120 17:11:02.136936 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/faade961-fce0-4ad6-a039-4ba83a95dd68-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:11:02 crc kubenswrapper[4558]: I0120 17:11:02.136949 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r7std\" (UniqueName: \"kubernetes.io/projected/faade961-fce0-4ad6-a039-4ba83a95dd68-kube-api-access-r7std\") on node \"crc\" DevicePath \"\"" Jan 20 17:11:02 crc kubenswrapper[4558]: I0120 17:11:02.136962 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/faade961-fce0-4ad6-a039-4ba83a95dd68-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:11:02 crc kubenswrapper[4558]: I0120 17:11:02.136974 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/faade961-fce0-4ad6-a039-4ba83a95dd68-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:11:02 crc kubenswrapper[4558]: I0120 17:11:02.139066 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/04194274-834f-49e3-ac2f-c28998193181-kube-api-access-jjl4r" (OuterVolumeSpecName: "kube-api-access-jjl4r") pod "04194274-834f-49e3-ac2f-c28998193181" (UID: "04194274-834f-49e3-ac2f-c28998193181"). InnerVolumeSpecName "kube-api-access-jjl4r". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:11:02 crc kubenswrapper[4558]: I0120 17:11:02.158098 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/04194274-834f-49e3-ac2f-c28998193181-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "04194274-834f-49e3-ac2f-c28998193181" (UID: "04194274-834f-49e3-ac2f-c28998193181"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:11:02 crc kubenswrapper[4558]: I0120 17:11:02.160831 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/04194274-834f-49e3-ac2f-c28998193181-config-data" (OuterVolumeSpecName: "config-data") pod "04194274-834f-49e3-ac2f-c28998193181" (UID: "04194274-834f-49e3-ac2f-c28998193181"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:11:02 crc kubenswrapper[4558]: I0120 17:11:02.238202 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jjl4r\" (UniqueName: \"kubernetes.io/projected/04194274-834f-49e3-ac2f-c28998193181-kube-api-access-jjl4r\") on node \"crc\" DevicePath \"\"" Jan 20 17:11:02 crc kubenswrapper[4558]: I0120 17:11:02.238230 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/04194274-834f-49e3-ac2f-c28998193181-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:11:02 crc kubenswrapper[4558]: I0120 17:11:02.238243 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/04194274-834f-49e3-ac2f-c28998193181-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:11:02 crc kubenswrapper[4558]: I0120 17:11:02.576912 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="07fab146-67be-42ba-b263-ee19fe95720b" path="/var/lib/kubelet/pods/07fab146-67be-42ba-b263-ee19fe95720b/volumes" Jan 20 17:11:02 crc kubenswrapper[4558]: I0120 17:11:02.577986 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="39403277-bf62-47c1-8e86-cdec59f2da7b" path="/var/lib/kubelet/pods/39403277-bf62-47c1-8e86-cdec59f2da7b/volumes" Jan 20 17:11:02 crc kubenswrapper[4558]: I0120 17:11:02.579325 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4406b36c-adb2-4042-bc92-9efed5a43942" path="/var/lib/kubelet/pods/4406b36c-adb2-4042-bc92-9efed5a43942/volumes" Jan 20 17:11:02 crc kubenswrapper[4558]: I0120 17:11:02.580013 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5558aeea-2a7f-4654-a261-a2902c9434e0" path="/var/lib/kubelet/pods/5558aeea-2a7f-4654-a261-a2902c9434e0/volumes" Jan 20 17:11:02 crc kubenswrapper[4558]: I0120 17:11:02.580641 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c3c7801f-4a15-44ce-8732-4f93b819a7c5" path="/var/lib/kubelet/pods/c3c7801f-4a15-44ce-8732-4f93b819a7c5/volumes" Jan 20 17:11:02 crc kubenswrapper[4558]: I0120 17:11:02.662863 4558 generic.go:334] "Generic (PLEG): container finished" podID="faade961-fce0-4ad6-a039-4ba83a95dd68" containerID="987117f4a057251c5482c4e15328842920fdcea58544c68a18a70dee0c0bd727" exitCode=0 Jan 20 17:11:02 crc kubenswrapper[4558]: I0120 17:11:02.662940 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-worker-7bcc85c7d9-jm8qh" event={"ID":"faade961-fce0-4ad6-a039-4ba83a95dd68","Type":"ContainerDied","Data":"987117f4a057251c5482c4e15328842920fdcea58544c68a18a70dee0c0bd727"} Jan 20 17:11:02 crc kubenswrapper[4558]: I0120 17:11:02.662974 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-worker-7bcc85c7d9-jm8qh" event={"ID":"faade961-fce0-4ad6-a039-4ba83a95dd68","Type":"ContainerDied","Data":"f97ce9d190b163d24d32a36261ccedb84e9d2308cd6fb8b7d48d7eb4331e510a"} Jan 20 17:11:02 crc kubenswrapper[4558]: I0120 17:11:02.662994 4558 scope.go:117] "RemoveContainer" containerID="987117f4a057251c5482c4e15328842920fdcea58544c68a18a70dee0c0bd727" Jan 20 17:11:02 crc kubenswrapper[4558]: I0120 17:11:02.663105 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-worker-7bcc85c7d9-jm8qh" Jan 20 17:11:02 crc kubenswrapper[4558]: I0120 17:11:02.671017 4558 generic.go:334] "Generic (PLEG): container finished" podID="04194274-834f-49e3-ac2f-c28998193181" containerID="9827c6a452c5455a00296263b978afc4645bb1e30af3fb06ef2be62752a0e311" exitCode=0 Jan 20 17:11:02 crc kubenswrapper[4558]: I0120 17:11:02.671078 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:11:02 crc kubenswrapper[4558]: I0120 17:11:02.671092 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"04194274-834f-49e3-ac2f-c28998193181","Type":"ContainerDied","Data":"9827c6a452c5455a00296263b978afc4645bb1e30af3fb06ef2be62752a0e311"} Jan 20 17:11:02 crc kubenswrapper[4558]: I0120 17:11:02.671115 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"04194274-834f-49e3-ac2f-c28998193181","Type":"ContainerDied","Data":"9112388e6768e1285e740f89d3fb74ff9c129402b557edac1264c28abf281672"} Jan 20 17:11:02 crc kubenswrapper[4558]: I0120 17:11:02.689317 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-worker-7bcc85c7d9-jm8qh"] Jan 20 17:11:02 crc kubenswrapper[4558]: I0120 17:11:02.694285 4558 scope.go:117] "RemoveContainer" containerID="8bdce8800380698b3525e8fafce4e103f8ffe07d9a3881dbd922429612c13082" Jan 20 17:11:02 crc kubenswrapper[4558]: I0120 17:11:02.712637 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/barbican-worker-7bcc85c7d9-jm8qh"] Jan 20 17:11:02 crc kubenswrapper[4558]: I0120 17:11:02.717405 4558 scope.go:117] "RemoveContainer" containerID="987117f4a057251c5482c4e15328842920fdcea58544c68a18a70dee0c0bd727" Jan 20 17:11:02 crc kubenswrapper[4558]: E0120 17:11:02.717737 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"987117f4a057251c5482c4e15328842920fdcea58544c68a18a70dee0c0bd727\": container with ID starting with 987117f4a057251c5482c4e15328842920fdcea58544c68a18a70dee0c0bd727 not found: ID does not exist" containerID="987117f4a057251c5482c4e15328842920fdcea58544c68a18a70dee0c0bd727" Jan 20 17:11:02 crc kubenswrapper[4558]: I0120 17:11:02.717774 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"987117f4a057251c5482c4e15328842920fdcea58544c68a18a70dee0c0bd727"} err="failed to get container status \"987117f4a057251c5482c4e15328842920fdcea58544c68a18a70dee0c0bd727\": rpc error: code = NotFound desc = could not find container \"987117f4a057251c5482c4e15328842920fdcea58544c68a18a70dee0c0bd727\": container with ID starting with 987117f4a057251c5482c4e15328842920fdcea58544c68a18a70dee0c0bd727 not found: ID does not exist" Jan 20 17:11:02 crc kubenswrapper[4558]: I0120 17:11:02.717799 4558 scope.go:117] "RemoveContainer" containerID="8bdce8800380698b3525e8fafce4e103f8ffe07d9a3881dbd922429612c13082" Jan 20 17:11:02 crc kubenswrapper[4558]: E0120 17:11:02.718069 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8bdce8800380698b3525e8fafce4e103f8ffe07d9a3881dbd922429612c13082\": container with ID starting with 8bdce8800380698b3525e8fafce4e103f8ffe07d9a3881dbd922429612c13082 not found: ID does not exist" containerID="8bdce8800380698b3525e8fafce4e103f8ffe07d9a3881dbd922429612c13082" Jan 20 17:11:02 crc kubenswrapper[4558]: I0120 17:11:02.718100 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8bdce8800380698b3525e8fafce4e103f8ffe07d9a3881dbd922429612c13082"} err="failed to get container status \"8bdce8800380698b3525e8fafce4e103f8ffe07d9a3881dbd922429612c13082\": rpc error: code = NotFound desc = could not find container \"8bdce8800380698b3525e8fafce4e103f8ffe07d9a3881dbd922429612c13082\": container with ID starting with 8bdce8800380698b3525e8fafce4e103f8ffe07d9a3881dbd922429612c13082 not found: ID does not exist" Jan 20 17:11:02 crc kubenswrapper[4558]: I0120 17:11:02.718129 4558 scope.go:117] "RemoveContainer" containerID="9827c6a452c5455a00296263b978afc4645bb1e30af3fb06ef2be62752a0e311" Jan 20 17:11:02 crc kubenswrapper[4558]: I0120 17:11:02.722409 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:11:02 crc kubenswrapper[4558]: I0120 17:11:02.735250 4558 scope.go:117] "RemoveContainer" containerID="9827c6a452c5455a00296263b978afc4645bb1e30af3fb06ef2be62752a0e311" Jan 20 17:11:02 crc kubenswrapper[4558]: E0120 17:11:02.737239 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9827c6a452c5455a00296263b978afc4645bb1e30af3fb06ef2be62752a0e311\": container with ID starting with 9827c6a452c5455a00296263b978afc4645bb1e30af3fb06ef2be62752a0e311 not found: ID does not exist" containerID="9827c6a452c5455a00296263b978afc4645bb1e30af3fb06ef2be62752a0e311" Jan 20 17:11:02 crc kubenswrapper[4558]: I0120 17:11:02.737290 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9827c6a452c5455a00296263b978afc4645bb1e30af3fb06ef2be62752a0e311"} err="failed to get container status \"9827c6a452c5455a00296263b978afc4645bb1e30af3fb06ef2be62752a0e311\": rpc error: code = NotFound desc = could not find container \"9827c6a452c5455a00296263b978afc4645bb1e30af3fb06ef2be62752a0e311\": container with ID starting with 9827c6a452c5455a00296263b978afc4645bb1e30af3fb06ef2be62752a0e311 not found: ID does not exist" Jan 20 17:11:02 crc kubenswrapper[4558]: I0120 17:11:02.740832 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:11:04 crc kubenswrapper[4558]: I0120 17:11:04.485419 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-8xt6f" Jan 20 17:11:04 crc kubenswrapper[4558]: I0120 17:11:04.548334 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-85d5c6dbcc-qhtkq"] Jan 20 17:11:04 crc kubenswrapper[4558]: I0120 17:11:04.548583 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-85d5c6dbcc-qhtkq" podUID="d3959a6a-ea1a-489e-a441-3645144becc9" containerName="dnsmasq-dns" containerID="cri-o://178a4671de8370a23a24bda120d247c615e3cd49322b5534733e470d52a6d3ef" gracePeriod=10 Jan 20 17:11:04 crc kubenswrapper[4558]: I0120 17:11:04.573842 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="04194274-834f-49e3-ac2f-c28998193181" path="/var/lib/kubelet/pods/04194274-834f-49e3-ac2f-c28998193181/volumes" Jan 20 17:11:04 crc kubenswrapper[4558]: I0120 17:11:04.574651 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="faade961-fce0-4ad6-a039-4ba83a95dd68" path="/var/lib/kubelet/pods/faade961-fce0-4ad6-a039-4ba83a95dd68/volumes" Jan 20 17:11:04 crc kubenswrapper[4558]: I0120 17:11:04.700386 4558 generic.go:334] "Generic (PLEG): container finished" podID="d3959a6a-ea1a-489e-a441-3645144becc9" containerID="178a4671de8370a23a24bda120d247c615e3cd49322b5534733e470d52a6d3ef" exitCode=0 Jan 20 17:11:04 crc kubenswrapper[4558]: I0120 17:11:04.700431 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-85d5c6dbcc-qhtkq" event={"ID":"d3959a6a-ea1a-489e-a441-3645144becc9","Type":"ContainerDied","Data":"178a4671de8370a23a24bda120d247c615e3cd49322b5534733e470d52a6d3ef"} Jan 20 17:11:04 crc kubenswrapper[4558]: I0120 17:11:04.949577 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-85d5c6dbcc-qhtkq" Jan 20 17:11:05 crc kubenswrapper[4558]: I0120 17:11:05.073473 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fgt4x\" (UniqueName: \"kubernetes.io/projected/d3959a6a-ea1a-489e-a441-3645144becc9-kube-api-access-fgt4x\") pod \"d3959a6a-ea1a-489e-a441-3645144becc9\" (UID: \"d3959a6a-ea1a-489e-a441-3645144becc9\") " Jan 20 17:11:05 crc kubenswrapper[4558]: I0120 17:11:05.073563 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/d3959a6a-ea1a-489e-a441-3645144becc9-dnsmasq-svc\") pod \"d3959a6a-ea1a-489e-a441-3645144becc9\" (UID: \"d3959a6a-ea1a-489e-a441-3645144becc9\") " Jan 20 17:11:05 crc kubenswrapper[4558]: I0120 17:11:05.073652 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d3959a6a-ea1a-489e-a441-3645144becc9-config\") pod \"d3959a6a-ea1a-489e-a441-3645144becc9\" (UID: \"d3959a6a-ea1a-489e-a441-3645144becc9\") " Jan 20 17:11:05 crc kubenswrapper[4558]: I0120 17:11:05.073704 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d3959a6a-ea1a-489e-a441-3645144becc9-dns-swift-storage-0\") pod \"d3959a6a-ea1a-489e-a441-3645144becc9\" (UID: \"d3959a6a-ea1a-489e-a441-3645144becc9\") " Jan 20 17:11:05 crc kubenswrapper[4558]: I0120 17:11:05.093652 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d3959a6a-ea1a-489e-a441-3645144becc9-kube-api-access-fgt4x" (OuterVolumeSpecName: "kube-api-access-fgt4x") pod "d3959a6a-ea1a-489e-a441-3645144becc9" (UID: "d3959a6a-ea1a-489e-a441-3645144becc9"). InnerVolumeSpecName "kube-api-access-fgt4x". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:11:05 crc kubenswrapper[4558]: I0120 17:11:05.108685 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d3959a6a-ea1a-489e-a441-3645144becc9-config" (OuterVolumeSpecName: "config") pod "d3959a6a-ea1a-489e-a441-3645144becc9" (UID: "d3959a6a-ea1a-489e-a441-3645144becc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:11:05 crc kubenswrapper[4558]: I0120 17:11:05.109532 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d3959a6a-ea1a-489e-a441-3645144becc9-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "d3959a6a-ea1a-489e-a441-3645144becc9" (UID: "d3959a6a-ea1a-489e-a441-3645144becc9"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:11:05 crc kubenswrapper[4558]: I0120 17:11:05.112833 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d3959a6a-ea1a-489e-a441-3645144becc9-dnsmasq-svc" (OuterVolumeSpecName: "dnsmasq-svc") pod "d3959a6a-ea1a-489e-a441-3645144becc9" (UID: "d3959a6a-ea1a-489e-a441-3645144becc9"). InnerVolumeSpecName "dnsmasq-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:11:05 crc kubenswrapper[4558]: I0120 17:11:05.174707 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fgt4x\" (UniqueName: \"kubernetes.io/projected/d3959a6a-ea1a-489e-a441-3645144becc9-kube-api-access-fgt4x\") on node \"crc\" DevicePath \"\"" Jan 20 17:11:05 crc kubenswrapper[4558]: I0120 17:11:05.174736 4558 reconciler_common.go:293] "Volume detached for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/d3959a6a-ea1a-489e-a441-3645144becc9-dnsmasq-svc\") on node \"crc\" DevicePath \"\"" Jan 20 17:11:05 crc kubenswrapper[4558]: I0120 17:11:05.174759 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d3959a6a-ea1a-489e-a441-3645144becc9-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:11:05 crc kubenswrapper[4558]: I0120 17:11:05.174770 4558 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d3959a6a-ea1a-489e-a441-3645144becc9-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Jan 20 17:11:05 crc kubenswrapper[4558]: I0120 17:11:05.714328 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-85d5c6dbcc-qhtkq" event={"ID":"d3959a6a-ea1a-489e-a441-3645144becc9","Type":"ContainerDied","Data":"c8b7f616c8c91d42aff417ce11a01c56bb1eaab75ef988ddada50e8ab04205c6"} Jan 20 17:11:05 crc kubenswrapper[4558]: I0120 17:11:05.714388 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-85d5c6dbcc-qhtkq" Jan 20 17:11:05 crc kubenswrapper[4558]: I0120 17:11:05.714425 4558 scope.go:117] "RemoveContainer" containerID="178a4671de8370a23a24bda120d247c615e3cd49322b5534733e470d52a6d3ef" Jan 20 17:11:05 crc kubenswrapper[4558]: I0120 17:11:05.744229 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-85d5c6dbcc-qhtkq"] Jan 20 17:11:05 crc kubenswrapper[4558]: I0120 17:11:05.749113 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-85d5c6dbcc-qhtkq"] Jan 20 17:11:05 crc kubenswrapper[4558]: I0120 17:11:05.749512 4558 scope.go:117] "RemoveContainer" containerID="cfa88af8b8d3146e3428eb9c96d266f627f3f53714a121693b6baa9ba5c6c511" Jan 20 17:11:06 crc kubenswrapper[4558]: I0120 17:11:06.576687 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d3959a6a-ea1a-489e-a441-3645144becc9" path="/var/lib/kubelet/pods/d3959a6a-ea1a-489e-a441-3645144becc9/volumes" Jan 20 17:11:11 crc kubenswrapper[4558]: I0120 17:11:11.785331 4558 generic.go:334] "Generic (PLEG): container finished" podID="f5d75d5a-4202-477d-97a5-8d92bcdd13b9" containerID="774b082f2f30d066f7547f9bb1d923817b7873ec854074ba60f13876f9205004" exitCode=0 Jan 20 17:11:11 crc kubenswrapper[4558]: I0120 17:11:11.785738 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-859ff747c8-5c47j" event={"ID":"f5d75d5a-4202-477d-97a5-8d92bcdd13b9","Type":"ContainerDied","Data":"774b082f2f30d066f7547f9bb1d923817b7873ec854074ba60f13876f9205004"} Jan 20 17:11:11 crc kubenswrapper[4558]: I0120 17:11:11.981073 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-859ff747c8-5c47j" Jan 20 17:11:12 crc kubenswrapper[4558]: I0120 17:11:12.078705 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/f5d75d5a-4202-477d-97a5-8d92bcdd13b9-config\") pod \"f5d75d5a-4202-477d-97a5-8d92bcdd13b9\" (UID: \"f5d75d5a-4202-477d-97a5-8d92bcdd13b9\") " Jan 20 17:11:12 crc kubenswrapper[4558]: I0120 17:11:12.078764 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f5d75d5a-4202-477d-97a5-8d92bcdd13b9-public-tls-certs\") pod \"f5d75d5a-4202-477d-97a5-8d92bcdd13b9\" (UID: \"f5d75d5a-4202-477d-97a5-8d92bcdd13b9\") " Jan 20 17:11:12 crc kubenswrapper[4558]: I0120 17:11:12.078802 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fr6vd\" (UniqueName: \"kubernetes.io/projected/f5d75d5a-4202-477d-97a5-8d92bcdd13b9-kube-api-access-fr6vd\") pod \"f5d75d5a-4202-477d-97a5-8d92bcdd13b9\" (UID: \"f5d75d5a-4202-477d-97a5-8d92bcdd13b9\") " Jan 20 17:11:12 crc kubenswrapper[4558]: I0120 17:11:12.078865 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/f5d75d5a-4202-477d-97a5-8d92bcdd13b9-ovndb-tls-certs\") pod \"f5d75d5a-4202-477d-97a5-8d92bcdd13b9\" (UID: \"f5d75d5a-4202-477d-97a5-8d92bcdd13b9\") " Jan 20 17:11:12 crc kubenswrapper[4558]: I0120 17:11:12.078900 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f5d75d5a-4202-477d-97a5-8d92bcdd13b9-internal-tls-certs\") pod \"f5d75d5a-4202-477d-97a5-8d92bcdd13b9\" (UID: \"f5d75d5a-4202-477d-97a5-8d92bcdd13b9\") " Jan 20 17:11:12 crc kubenswrapper[4558]: I0120 17:11:12.078919 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/f5d75d5a-4202-477d-97a5-8d92bcdd13b9-httpd-config\") pod \"f5d75d5a-4202-477d-97a5-8d92bcdd13b9\" (UID: \"f5d75d5a-4202-477d-97a5-8d92bcdd13b9\") " Jan 20 17:11:12 crc kubenswrapper[4558]: I0120 17:11:12.078946 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5d75d5a-4202-477d-97a5-8d92bcdd13b9-combined-ca-bundle\") pod \"f5d75d5a-4202-477d-97a5-8d92bcdd13b9\" (UID: \"f5d75d5a-4202-477d-97a5-8d92bcdd13b9\") " Jan 20 17:11:12 crc kubenswrapper[4558]: I0120 17:11:12.083818 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f5d75d5a-4202-477d-97a5-8d92bcdd13b9-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "f5d75d5a-4202-477d-97a5-8d92bcdd13b9" (UID: "f5d75d5a-4202-477d-97a5-8d92bcdd13b9"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:11:12 crc kubenswrapper[4558]: I0120 17:11:12.083895 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f5d75d5a-4202-477d-97a5-8d92bcdd13b9-kube-api-access-fr6vd" (OuterVolumeSpecName: "kube-api-access-fr6vd") pod "f5d75d5a-4202-477d-97a5-8d92bcdd13b9" (UID: "f5d75d5a-4202-477d-97a5-8d92bcdd13b9"). InnerVolumeSpecName "kube-api-access-fr6vd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:11:12 crc kubenswrapper[4558]: I0120 17:11:12.109637 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f5d75d5a-4202-477d-97a5-8d92bcdd13b9-config" (OuterVolumeSpecName: "config") pod "f5d75d5a-4202-477d-97a5-8d92bcdd13b9" (UID: "f5d75d5a-4202-477d-97a5-8d92bcdd13b9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:11:12 crc kubenswrapper[4558]: I0120 17:11:12.111676 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f5d75d5a-4202-477d-97a5-8d92bcdd13b9-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "f5d75d5a-4202-477d-97a5-8d92bcdd13b9" (UID: "f5d75d5a-4202-477d-97a5-8d92bcdd13b9"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:11:12 crc kubenswrapper[4558]: I0120 17:11:12.113408 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f5d75d5a-4202-477d-97a5-8d92bcdd13b9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f5d75d5a-4202-477d-97a5-8d92bcdd13b9" (UID: "f5d75d5a-4202-477d-97a5-8d92bcdd13b9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:11:12 crc kubenswrapper[4558]: I0120 17:11:12.113407 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f5d75d5a-4202-477d-97a5-8d92bcdd13b9-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "f5d75d5a-4202-477d-97a5-8d92bcdd13b9" (UID: "f5d75d5a-4202-477d-97a5-8d92bcdd13b9"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:11:12 crc kubenswrapper[4558]: I0120 17:11:12.121545 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f5d75d5a-4202-477d-97a5-8d92bcdd13b9-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "f5d75d5a-4202-477d-97a5-8d92bcdd13b9" (UID: "f5d75d5a-4202-477d-97a5-8d92bcdd13b9"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:11:12 crc kubenswrapper[4558]: I0120 17:11:12.180319 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f5d75d5a-4202-477d-97a5-8d92bcdd13b9-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:11:12 crc kubenswrapper[4558]: I0120 17:11:12.180464 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fr6vd\" (UniqueName: \"kubernetes.io/projected/f5d75d5a-4202-477d-97a5-8d92bcdd13b9-kube-api-access-fr6vd\") on node \"crc\" DevicePath \"\"" Jan 20 17:11:12 crc kubenswrapper[4558]: I0120 17:11:12.180532 4558 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/f5d75d5a-4202-477d-97a5-8d92bcdd13b9-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:11:12 crc kubenswrapper[4558]: I0120 17:11:12.180584 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f5d75d5a-4202-477d-97a5-8d92bcdd13b9-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:11:12 crc kubenswrapper[4558]: I0120 17:11:12.180647 4558 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/f5d75d5a-4202-477d-97a5-8d92bcdd13b9-httpd-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:11:12 crc kubenswrapper[4558]: I0120 17:11:12.180702 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5d75d5a-4202-477d-97a5-8d92bcdd13b9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:11:12 crc kubenswrapper[4558]: I0120 17:11:12.180754 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/f5d75d5a-4202-477d-97a5-8d92bcdd13b9-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:11:12 crc kubenswrapper[4558]: I0120 17:11:12.565768 4558 scope.go:117] "RemoveContainer" containerID="a711a286282347590079d2447ef37fa9ed0f11085d6d7a65f85e65c1c2df608f" Jan 20 17:11:12 crc kubenswrapper[4558]: E0120 17:11:12.566004 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 17:11:12 crc kubenswrapper[4558]: I0120 17:11:12.797483 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-859ff747c8-5c47j" event={"ID":"f5d75d5a-4202-477d-97a5-8d92bcdd13b9","Type":"ContainerDied","Data":"5a8efb770421b4479b6c2977354d0273d8f5514be6b15091228236e78e95c87b"} Jan 20 17:11:12 crc kubenswrapper[4558]: I0120 17:11:12.797557 4558 scope.go:117] "RemoveContainer" containerID="a2dd29b89f91cc3800497edcfbd3636296ddb90d87379145a79cd11ac30957af" Jan 20 17:11:12 crc kubenswrapper[4558]: I0120 17:11:12.797567 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-859ff747c8-5c47j" Jan 20 17:11:12 crc kubenswrapper[4558]: I0120 17:11:12.818718 4558 scope.go:117] "RemoveContainer" containerID="774b082f2f30d066f7547f9bb1d923817b7873ec854074ba60f13876f9205004" Jan 20 17:11:12 crc kubenswrapper[4558]: I0120 17:11:12.819210 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-859ff747c8-5c47j"] Jan 20 17:11:12 crc kubenswrapper[4558]: I0120 17:11:12.824300 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/neutron-859ff747c8-5c47j"] Jan 20 17:11:14 crc kubenswrapper[4558]: I0120 17:11:14.574493 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f5d75d5a-4202-477d-97a5-8d92bcdd13b9" path="/var/lib/kubelet/pods/f5d75d5a-4202-477d-97a5-8d92bcdd13b9/volumes" Jan 20 17:11:23 crc kubenswrapper[4558]: I0120 17:11:23.565948 4558 scope.go:117] "RemoveContainer" containerID="a711a286282347590079d2447ef37fa9ed0f11085d6d7a65f85e65c1c2df608f" Jan 20 17:11:23 crc kubenswrapper[4558]: E0120 17:11:23.566755 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 17:11:23 crc kubenswrapper[4558]: I0120 17:11:23.908409 4558 generic.go:334] "Generic (PLEG): container finished" podID="fc023815-924a-4a04-bad2-5fc862ef20ed" containerID="e1ee27aa87a720002e5126efcfb765b302b81489973de33a9ecf856e1da6cf32" exitCode=137 Jan 20 17:11:23 crc kubenswrapper[4558]: I0120 17:11:23.908494 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"fc023815-924a-4a04-bad2-5fc862ef20ed","Type":"ContainerDied","Data":"e1ee27aa87a720002e5126efcfb765b302b81489973de33a9ecf856e1da6cf32"} Jan 20 17:11:24 crc kubenswrapper[4558]: I0120 17:11:24.056997 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:11:24 crc kubenswrapper[4558]: I0120 17:11:24.255654 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rhtdc\" (UniqueName: \"kubernetes.io/projected/fc023815-924a-4a04-bad2-5fc862ef20ed-kube-api-access-rhtdc\") pod \"fc023815-924a-4a04-bad2-5fc862ef20ed\" (UID: \"fc023815-924a-4a04-bad2-5fc862ef20ed\") " Jan 20 17:11:24 crc kubenswrapper[4558]: I0120 17:11:24.255705 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/fc023815-924a-4a04-bad2-5fc862ef20ed-etc-swift\") pod \"fc023815-924a-4a04-bad2-5fc862ef20ed\" (UID: \"fc023815-924a-4a04-bad2-5fc862ef20ed\") " Jan 20 17:11:24 crc kubenswrapper[4558]: I0120 17:11:24.255743 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/fc023815-924a-4a04-bad2-5fc862ef20ed-cache\") pod \"fc023815-924a-4a04-bad2-5fc862ef20ed\" (UID: \"fc023815-924a-4a04-bad2-5fc862ef20ed\") " Jan 20 17:11:24 crc kubenswrapper[4558]: I0120 17:11:24.255797 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/fc023815-924a-4a04-bad2-5fc862ef20ed-lock\") pod \"fc023815-924a-4a04-bad2-5fc862ef20ed\" (UID: \"fc023815-924a-4a04-bad2-5fc862ef20ed\") " Jan 20 17:11:24 crc kubenswrapper[4558]: I0120 17:11:24.255833 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swift\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"fc023815-924a-4a04-bad2-5fc862ef20ed\" (UID: \"fc023815-924a-4a04-bad2-5fc862ef20ed\") " Jan 20 17:11:24 crc kubenswrapper[4558]: I0120 17:11:24.256542 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fc023815-924a-4a04-bad2-5fc862ef20ed-lock" (OuterVolumeSpecName: "lock") pod "fc023815-924a-4a04-bad2-5fc862ef20ed" (UID: "fc023815-924a-4a04-bad2-5fc862ef20ed"). InnerVolumeSpecName "lock". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:11:24 crc kubenswrapper[4558]: I0120 17:11:24.256600 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fc023815-924a-4a04-bad2-5fc862ef20ed-cache" (OuterVolumeSpecName: "cache") pod "fc023815-924a-4a04-bad2-5fc862ef20ed" (UID: "fc023815-924a-4a04-bad2-5fc862ef20ed"). InnerVolumeSpecName "cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:11:24 crc kubenswrapper[4558]: I0120 17:11:24.262827 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fc023815-924a-4a04-bad2-5fc862ef20ed-kube-api-access-rhtdc" (OuterVolumeSpecName: "kube-api-access-rhtdc") pod "fc023815-924a-4a04-bad2-5fc862ef20ed" (UID: "fc023815-924a-4a04-bad2-5fc862ef20ed"). InnerVolumeSpecName "kube-api-access-rhtdc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:11:24 crc kubenswrapper[4558]: I0120 17:11:24.262917 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fc023815-924a-4a04-bad2-5fc862ef20ed-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "fc023815-924a-4a04-bad2-5fc862ef20ed" (UID: "fc023815-924a-4a04-bad2-5fc862ef20ed"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:11:24 crc kubenswrapper[4558]: I0120 17:11:24.263856 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage02-crc" (OuterVolumeSpecName: "swift") pod "fc023815-924a-4a04-bad2-5fc862ef20ed" (UID: "fc023815-924a-4a04-bad2-5fc862ef20ed"). InnerVolumeSpecName "local-storage02-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:11:24 crc kubenswrapper[4558]: I0120 17:11:24.357702 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" " Jan 20 17:11:24 crc kubenswrapper[4558]: I0120 17:11:24.357733 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rhtdc\" (UniqueName: \"kubernetes.io/projected/fc023815-924a-4a04-bad2-5fc862ef20ed-kube-api-access-rhtdc\") on node \"crc\" DevicePath \"\"" Jan 20 17:11:24 crc kubenswrapper[4558]: I0120 17:11:24.357748 4558 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/fc023815-924a-4a04-bad2-5fc862ef20ed-etc-swift\") on node \"crc\" DevicePath \"\"" Jan 20 17:11:24 crc kubenswrapper[4558]: I0120 17:11:24.357759 4558 reconciler_common.go:293] "Volume detached for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/fc023815-924a-4a04-bad2-5fc862ef20ed-cache\") on node \"crc\" DevicePath \"\"" Jan 20 17:11:24 crc kubenswrapper[4558]: I0120 17:11:24.357768 4558 reconciler_common.go:293] "Volume detached for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/fc023815-924a-4a04-bad2-5fc862ef20ed-lock\") on node \"crc\" DevicePath \"\"" Jan 20 17:11:24 crc kubenswrapper[4558]: I0120 17:11:24.370299 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage02-crc" (UniqueName: "kubernetes.io/local-volume/local-storage02-crc") on node "crc" Jan 20 17:11:24 crc kubenswrapper[4558]: I0120 17:11:24.459422 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:11:24 crc kubenswrapper[4558]: I0120 17:11:24.926812 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"fc023815-924a-4a04-bad2-5fc862ef20ed","Type":"ContainerDied","Data":"c1f8a2c586c1976b6cb7f85d1a6a5b6b148309e4fecdd10861bb4a6fb7dea098"} Jan 20 17:11:24 crc kubenswrapper[4558]: I0120 17:11:24.927239 4558 scope.go:117] "RemoveContainer" containerID="e1ee27aa87a720002e5126efcfb765b302b81489973de33a9ecf856e1da6cf32" Jan 20 17:11:24 crc kubenswrapper[4558]: I0120 17:11:24.926939 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:11:24 crc kubenswrapper[4558]: I0120 17:11:24.955179 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/swift-storage-0"] Jan 20 17:11:24 crc kubenswrapper[4558]: I0120 17:11:24.955156 4558 scope.go:117] "RemoveContainer" containerID="9410708314b2bfc60099a915bc2c1d7c39d1320c24755c9f69a3d55337b01e9b" Jan 20 17:11:24 crc kubenswrapper[4558]: I0120 17:11:24.958927 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/swift-storage-0"] Jan 20 17:11:24 crc kubenswrapper[4558]: I0120 17:11:24.969128 4558 scope.go:117] "RemoveContainer" containerID="9528266b3b97d400ebef502af6d8130d7c0559f6df040c83585a2f746292ef2d" Jan 20 17:11:24 crc kubenswrapper[4558]: I0120 17:11:24.983846 4558 scope.go:117] "RemoveContainer" containerID="7b43c5f22df4aacb3c09f2db534e5fb1e65e53019530050eed242dc948ad8a62" Jan 20 17:11:24 crc kubenswrapper[4558]: I0120 17:11:24.996774 4558 scope.go:117] "RemoveContainer" containerID="375019cf206eb74d303642688e63e85c3a2faecea6a7d087bd3a333841cd82c8" Jan 20 17:11:25 crc kubenswrapper[4558]: I0120 17:11:25.011431 4558 scope.go:117] "RemoveContainer" containerID="18ccc604f3f78f7668fb5ba7e36fe96edce4c2dcfa1c6e7165a65ee3388df062" Jan 20 17:11:25 crc kubenswrapper[4558]: I0120 17:11:25.026801 4558 scope.go:117] "RemoveContainer" containerID="3517eea5a519d4ed5dbc9c6b97d0c39a1746f6138a5b79baf3fd8a2d54e4293f" Jan 20 17:11:25 crc kubenswrapper[4558]: I0120 17:11:25.039265 4558 scope.go:117] "RemoveContainer" containerID="9f3a700b66f96079e1d010bea1fc7bf937b11f9fea1048aaff6419fd900af7c5" Jan 20 17:11:25 crc kubenswrapper[4558]: I0120 17:11:25.054547 4558 scope.go:117] "RemoveContainer" containerID="8c2d36ea5a305d3a1da0f10e70884099eb310895225cdbcdf413bf2c005effc4" Jan 20 17:11:25 crc kubenswrapper[4558]: I0120 17:11:25.071439 4558 scope.go:117] "RemoveContainer" containerID="1656f891b22791ec6d9f940bf33e0630746d220bfffb0046c75f48d2afa2d782" Jan 20 17:11:25 crc kubenswrapper[4558]: I0120 17:11:25.087314 4558 scope.go:117] "RemoveContainer" containerID="d2f143dbca4d3796805d8445782c84acc22a2c02590f44e1ad34b26b6934656f" Jan 20 17:11:25 crc kubenswrapper[4558]: I0120 17:11:25.112303 4558 scope.go:117] "RemoveContainer" containerID="101d312b0bf3e3b73451053b69e1450a946df99e2bf194fa8970254181c3653e" Jan 20 17:11:25 crc kubenswrapper[4558]: I0120 17:11:25.127267 4558 scope.go:117] "RemoveContainer" containerID="0b1bf5c7f6bc3d5fc7206be9bc6c12aadefeeeb27348b35c50c441224ffe435b" Jan 20 17:11:25 crc kubenswrapper[4558]: I0120 17:11:25.140997 4558 scope.go:117] "RemoveContainer" containerID="e39b88b3c77bb6d2be10abb480b241402fde95d8a3a44afc3741d81cd38c9f4f" Jan 20 17:11:25 crc kubenswrapper[4558]: I0120 17:11:25.157112 4558 scope.go:117] "RemoveContainer" containerID="a8fa945168b61b5421cd9a0b28d23083787cacb8489bf80416f7599e933bb729" Jan 20 17:11:26 crc kubenswrapper[4558]: I0120 17:11:26.574708 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fc023815-924a-4a04-bad2-5fc862ef20ed" path="/var/lib/kubelet/pods/fc023815-924a-4a04-bad2-5fc862ef20ed/volumes" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.277025 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["crc-storage/crc-storage-crc-p2tx9"] Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.288065 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["crc-storage/crc-storage-crc-p2tx9"] Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.377063 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["crc-storage/crc-storage-crc-w52qk"] Jan 20 17:11:33 crc kubenswrapper[4558]: E0120 17:11:33.377847 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="07fab146-67be-42ba-b263-ee19fe95720b" containerName="setup-container" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.377941 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="07fab146-67be-42ba-b263-ee19fe95720b" containerName="setup-container" Jan 20 17:11:33 crc kubenswrapper[4558]: E0120 17:11:33.377999 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc023815-924a-4a04-bad2-5fc862ef20ed" containerName="rsync" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.378045 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc023815-924a-4a04-bad2-5fc862ef20ed" containerName="rsync" Jan 20 17:11:33 crc kubenswrapper[4558]: E0120 17:11:33.378098 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d" containerName="glance-log" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.378151 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d" containerName="glance-log" Jan 20 17:11:33 crc kubenswrapper[4558]: E0120 17:11:33.378230 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ee147895-0bad-4dc1-a348-1be3348a7180" containerName="cinder-api-log" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.378284 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ee147895-0bad-4dc1-a348-1be3348a7180" containerName="cinder-api-log" Jan 20 17:11:33 crc kubenswrapper[4558]: E0120 17:11:33.378359 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="95cf677f-e744-4dad-acfb-507100d2ea14" containerName="proxy-httpd" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.378417 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="95cf677f-e744-4dad-acfb-507100d2ea14" containerName="proxy-httpd" Jan 20 17:11:33 crc kubenswrapper[4558]: E0120 17:11:33.378463 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="25737ece-fb84-4543-8c3a-94ffa7b8f095" containerName="glance-log" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.378508 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="25737ece-fb84-4543-8c3a-94ffa7b8f095" containerName="glance-log" Jan 20 17:11:33 crc kubenswrapper[4558]: E0120 17:11:33.378553 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc023815-924a-4a04-bad2-5fc862ef20ed" containerName="container-replicator" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.378593 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc023815-924a-4a04-bad2-5fc862ef20ed" containerName="container-replicator" Jan 20 17:11:33 crc kubenswrapper[4558]: E0120 17:11:33.378643 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ecb70be9-510b-4ec7-ae5b-20f2ae8dd6a1" containerName="nova-api-api" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.378688 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ecb70be9-510b-4ec7-ae5b-20f2ae8dd6a1" containerName="nova-api-api" Jan 20 17:11:33 crc kubenswrapper[4558]: E0120 17:11:33.378744 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f5d75d5a-4202-477d-97a5-8d92bcdd13b9" containerName="neutron-api" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.378788 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="f5d75d5a-4202-477d-97a5-8d92bcdd13b9" containerName="neutron-api" Jan 20 17:11:33 crc kubenswrapper[4558]: E0120 17:11:33.378841 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f5d75d5a-4202-477d-97a5-8d92bcdd13b9" containerName="neutron-httpd" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.378886 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="f5d75d5a-4202-477d-97a5-8d92bcdd13b9" containerName="neutron-httpd" Jan 20 17:11:33 crc kubenswrapper[4558]: E0120 17:11:33.378935 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5560f6a4-fc05-4d97-8496-b87804dfab99" containerName="nova-metadata-metadata" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.378982 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="5560f6a4-fc05-4d97-8496-b87804dfab99" containerName="nova-metadata-metadata" Jan 20 17:11:33 crc kubenswrapper[4558]: E0120 17:11:33.379042 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc023815-924a-4a04-bad2-5fc862ef20ed" containerName="object-replicator" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.379086 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc023815-924a-4a04-bad2-5fc862ef20ed" containerName="object-replicator" Jan 20 17:11:33 crc kubenswrapper[4558]: E0120 17:11:33.379130 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ee147895-0bad-4dc1-a348-1be3348a7180" containerName="cinder-api" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.379195 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ee147895-0bad-4dc1-a348-1be3348a7180" containerName="cinder-api" Jan 20 17:11:33 crc kubenswrapper[4558]: E0120 17:11:33.379260 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="39403277-bf62-47c1-8e86-cdec59f2da7b" containerName="rabbitmq" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.379306 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="39403277-bf62-47c1-8e86-cdec59f2da7b" containerName="rabbitmq" Jan 20 17:11:33 crc kubenswrapper[4558]: E0120 17:11:33.379349 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="25737ece-fb84-4543-8c3a-94ffa7b8f095" containerName="glance-httpd" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.379403 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="25737ece-fb84-4543-8c3a-94ffa7b8f095" containerName="glance-httpd" Jan 20 17:11:33 crc kubenswrapper[4558]: E0120 17:11:33.379453 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ecb70be9-510b-4ec7-ae5b-20f2ae8dd6a1" containerName="nova-api-log" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.379541 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ecb70be9-510b-4ec7-ae5b-20f2ae8dd6a1" containerName="nova-api-log" Jan 20 17:11:33 crc kubenswrapper[4558]: E0120 17:11:33.379597 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5558aeea-2a7f-4654-a261-a2902c9434e0" containerName="galera" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.379809 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="5558aeea-2a7f-4654-a261-a2902c9434e0" containerName="galera" Jan 20 17:11:33 crc kubenswrapper[4558]: E0120 17:11:33.379869 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc023815-924a-4a04-bad2-5fc862ef20ed" containerName="swift-recon-cron" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.379912 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc023815-924a-4a04-bad2-5fc862ef20ed" containerName="swift-recon-cron" Jan 20 17:11:33 crc kubenswrapper[4558]: E0120 17:11:33.379989 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc023815-924a-4a04-bad2-5fc862ef20ed" containerName="container-updater" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.380056 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc023815-924a-4a04-bad2-5fc862ef20ed" containerName="container-updater" Jan 20 17:11:33 crc kubenswrapper[4558]: E0120 17:11:33.380118 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="07fab146-67be-42ba-b263-ee19fe95720b" containerName="rabbitmq" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.380198 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="07fab146-67be-42ba-b263-ee19fe95720b" containerName="rabbitmq" Jan 20 17:11:33 crc kubenswrapper[4558]: E0120 17:11:33.380286 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="95cf677f-e744-4dad-acfb-507100d2ea14" containerName="ceilometer-central-agent" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.380337 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="95cf677f-e744-4dad-acfb-507100d2ea14" containerName="ceilometer-central-agent" Jan 20 17:11:33 crc kubenswrapper[4558]: E0120 17:11:33.380419 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="043476ec-5392-41af-970c-89d20b6b30a5" containerName="placement-log" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.380486 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="043476ec-5392-41af-970c-89d20b6b30a5" containerName="placement-log" Jan 20 17:11:33 crc kubenswrapper[4558]: E0120 17:11:33.380550 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc023815-924a-4a04-bad2-5fc862ef20ed" containerName="container-auditor" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.380619 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc023815-924a-4a04-bad2-5fc862ef20ed" containerName="container-auditor" Jan 20 17:11:33 crc kubenswrapper[4558]: E0120 17:11:33.380686 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="39403277-bf62-47c1-8e86-cdec59f2da7b" containerName="setup-container" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.380734 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="39403277-bf62-47c1-8e86-cdec59f2da7b" containerName="setup-container" Jan 20 17:11:33 crc kubenswrapper[4558]: E0120 17:11:33.380780 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="931d9a7a-aa04-42c3-a276-e71388a8b9d3" containerName="mariadb-account-create-update" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.380839 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="931d9a7a-aa04-42c3-a276-e71388a8b9d3" containerName="mariadb-account-create-update" Jan 20 17:11:33 crc kubenswrapper[4558]: E0120 17:11:33.380891 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc023815-924a-4a04-bad2-5fc862ef20ed" containerName="account-auditor" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.380938 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc023815-924a-4a04-bad2-5fc862ef20ed" containerName="account-auditor" Jan 20 17:11:33 crc kubenswrapper[4558]: E0120 17:11:33.380983 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c3c7801f-4a15-44ce-8732-4f93b819a7c5" containerName="keystone-api" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.381025 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="c3c7801f-4a15-44ce-8732-4f93b819a7c5" containerName="keystone-api" Jan 20 17:11:33 crc kubenswrapper[4558]: E0120 17:11:33.381127 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="931d9a7a-aa04-42c3-a276-e71388a8b9d3" containerName="mariadb-account-create-update" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.381215 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="931d9a7a-aa04-42c3-a276-e71388a8b9d3" containerName="mariadb-account-create-update" Jan 20 17:11:33 crc kubenswrapper[4558]: E0120 17:11:33.381309 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c6a5b342-eaf2-408c-828e-9bc0bf10d09e" containerName="ovn-northd" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.381354 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="c6a5b342-eaf2-408c-828e-9bc0bf10d09e" containerName="ovn-northd" Jan 20 17:11:33 crc kubenswrapper[4558]: E0120 17:11:33.381418 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1ec5ef72-7c69-4be3-974d-c020ddfea4f7" containerName="probe" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.381464 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="1ec5ef72-7c69-4be3-974d-c020ddfea4f7" containerName="probe" Jan 20 17:11:33 crc kubenswrapper[4558]: E0120 17:11:33.381506 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc023815-924a-4a04-bad2-5fc862ef20ed" containerName="account-replicator" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.381549 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc023815-924a-4a04-bad2-5fc862ef20ed" containerName="account-replicator" Jan 20 17:11:33 crc kubenswrapper[4558]: E0120 17:11:33.381604 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc023815-924a-4a04-bad2-5fc862ef20ed" containerName="object-auditor" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.381648 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc023815-924a-4a04-bad2-5fc862ef20ed" containerName="object-auditor" Jan 20 17:11:33 crc kubenswrapper[4558]: E0120 17:11:33.381701 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="faade961-fce0-4ad6-a039-4ba83a95dd68" containerName="barbican-worker" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.381743 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="faade961-fce0-4ad6-a039-4ba83a95dd68" containerName="barbican-worker" Jan 20 17:11:33 crc kubenswrapper[4558]: E0120 17:11:33.381785 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="04194274-834f-49e3-ac2f-c28998193181" containerName="nova-scheduler-scheduler" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.381827 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="04194274-834f-49e3-ac2f-c28998193181" containerName="nova-scheduler-scheduler" Jan 20 17:11:33 crc kubenswrapper[4558]: E0120 17:11:33.381870 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="95cf677f-e744-4dad-acfb-507100d2ea14" containerName="ceilometer-notification-agent" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.381907 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="95cf677f-e744-4dad-acfb-507100d2ea14" containerName="ceilometer-notification-agent" Jan 20 17:11:33 crc kubenswrapper[4558]: E0120 17:11:33.381952 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5560f6a4-fc05-4d97-8496-b87804dfab99" containerName="nova-metadata-log" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.381994 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="5560f6a4-fc05-4d97-8496-b87804dfab99" containerName="nova-metadata-log" Jan 20 17:11:33 crc kubenswrapper[4558]: E0120 17:11:33.382041 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="95cf677f-e744-4dad-acfb-507100d2ea14" containerName="sg-core" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.382082 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="95cf677f-e744-4dad-acfb-507100d2ea14" containerName="sg-core" Jan 20 17:11:33 crc kubenswrapper[4558]: E0120 17:11:33.382124 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="87001341-5a74-4796-a332-6d57b2cf11c9" containerName="barbican-api-log" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.382279 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="87001341-5a74-4796-a332-6d57b2cf11c9" containerName="barbican-api-log" Jan 20 17:11:33 crc kubenswrapper[4558]: E0120 17:11:33.382338 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1ec5ef72-7c69-4be3-974d-c020ddfea4f7" containerName="cinder-scheduler" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.382397 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="1ec5ef72-7c69-4be3-974d-c020ddfea4f7" containerName="cinder-scheduler" Jan 20 17:11:33 crc kubenswrapper[4558]: E0120 17:11:33.382446 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="87001341-5a74-4796-a332-6d57b2cf11c9" containerName="barbican-api" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.382491 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="87001341-5a74-4796-a332-6d57b2cf11c9" containerName="barbican-api" Jan 20 17:11:33 crc kubenswrapper[4558]: E0120 17:11:33.382538 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="faade961-fce0-4ad6-a039-4ba83a95dd68" containerName="barbican-worker-log" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.382581 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="faade961-fce0-4ad6-a039-4ba83a95dd68" containerName="barbican-worker-log" Jan 20 17:11:33 crc kubenswrapper[4558]: E0120 17:11:33.382627 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5558aeea-2a7f-4654-a261-a2902c9434e0" containerName="mysql-bootstrap" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.382672 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="5558aeea-2a7f-4654-a261-a2902c9434e0" containerName="mysql-bootstrap" Jan 20 17:11:33 crc kubenswrapper[4558]: E0120 17:11:33.382718 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf954b15-e163-4f99-8c1b-5e04d06666bc" containerName="nova-cell0-conductor-conductor" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.382759 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf954b15-e163-4f99-8c1b-5e04d06666bc" containerName="nova-cell0-conductor-conductor" Jan 20 17:11:33 crc kubenswrapper[4558]: E0120 17:11:33.382802 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc023815-924a-4a04-bad2-5fc862ef20ed" containerName="container-server" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.382885 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc023815-924a-4a04-bad2-5fc862ef20ed" containerName="container-server" Jan 20 17:11:33 crc kubenswrapper[4558]: E0120 17:11:33.382973 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc023815-924a-4a04-bad2-5fc862ef20ed" containerName="object-updater" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.383019 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc023815-924a-4a04-bad2-5fc862ef20ed" containerName="object-updater" Jan 20 17:11:33 crc kubenswrapper[4558]: E0120 17:11:33.383063 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc023815-924a-4a04-bad2-5fc862ef20ed" containerName="object-expirer" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.383105 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc023815-924a-4a04-bad2-5fc862ef20ed" containerName="object-expirer" Jan 20 17:11:33 crc kubenswrapper[4558]: E0120 17:11:33.383151 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc023815-924a-4a04-bad2-5fc862ef20ed" containerName="account-server" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.383361 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc023815-924a-4a04-bad2-5fc862ef20ed" containerName="account-server" Jan 20 17:11:33 crc kubenswrapper[4558]: E0120 17:11:33.383534 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d3959a6a-ea1a-489e-a441-3645144becc9" containerName="init" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.383871 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d3959a6a-ea1a-489e-a441-3645144becc9" containerName="init" Jan 20 17:11:33 crc kubenswrapper[4558]: E0120 17:11:33.383928 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc023815-924a-4a04-bad2-5fc862ef20ed" containerName="object-server" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.383972 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc023815-924a-4a04-bad2-5fc862ef20ed" containerName="object-server" Jan 20 17:11:33 crc kubenswrapper[4558]: E0120 17:11:33.384016 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d3959a6a-ea1a-489e-a441-3645144becc9" containerName="dnsmasq-dns" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.384061 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d3959a6a-ea1a-489e-a441-3645144becc9" containerName="dnsmasq-dns" Jan 20 17:11:33 crc kubenswrapper[4558]: E0120 17:11:33.384104 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d" containerName="glance-httpd" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.384148 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d" containerName="glance-httpd" Jan 20 17:11:33 crc kubenswrapper[4558]: E0120 17:11:33.384222 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="209ec6bc-170f-4c8a-ad7e-e441ace95d1b" containerName="barbican-keystone-listener" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.384270 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="209ec6bc-170f-4c8a-ad7e-e441ace95d1b" containerName="barbican-keystone-listener" Jan 20 17:11:33 crc kubenswrapper[4558]: E0120 17:11:33.384315 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc023815-924a-4a04-bad2-5fc862ef20ed" containerName="account-reaper" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.384354 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc023815-924a-4a04-bad2-5fc862ef20ed" containerName="account-reaper" Jan 20 17:11:33 crc kubenswrapper[4558]: E0120 17:11:33.384405 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4406b36c-adb2-4042-bc92-9efed5a43942" containerName="memcached" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.384450 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="4406b36c-adb2-4042-bc92-9efed5a43942" containerName="memcached" Jan 20 17:11:33 crc kubenswrapper[4558]: E0120 17:11:33.384495 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="043476ec-5392-41af-970c-89d20b6b30a5" containerName="placement-api" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.384537 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="043476ec-5392-41af-970c-89d20b6b30a5" containerName="placement-api" Jan 20 17:11:33 crc kubenswrapper[4558]: E0120 17:11:33.384576 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c6a5b342-eaf2-408c-828e-9bc0bf10d09e" containerName="openstack-network-exporter" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.384617 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="c6a5b342-eaf2-408c-828e-9bc0bf10d09e" containerName="openstack-network-exporter" Jan 20 17:11:33 crc kubenswrapper[4558]: E0120 17:11:33.384662 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="209ec6bc-170f-4c8a-ad7e-e441ace95d1b" containerName="barbican-keystone-listener-log" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.384704 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="209ec6bc-170f-4c8a-ad7e-e441ace95d1b" containerName="barbican-keystone-listener-log" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.385023 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="95cf677f-e744-4dad-acfb-507100d2ea14" containerName="ceilometer-notification-agent" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.385080 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="fc023815-924a-4a04-bad2-5fc862ef20ed" containerName="container-updater" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.385128 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="87001341-5a74-4796-a332-6d57b2cf11c9" containerName="barbican-api-log" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.385195 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="d3959a6a-ea1a-489e-a441-3645144becc9" containerName="dnsmasq-dns" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.385236 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="fc023815-924a-4a04-bad2-5fc862ef20ed" containerName="account-reaper" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.385282 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="043476ec-5392-41af-970c-89d20b6b30a5" containerName="placement-api" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.385329 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d" containerName="glance-httpd" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.385374 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="fc023815-924a-4a04-bad2-5fc862ef20ed" containerName="container-server" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.385434 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="95cf677f-e744-4dad-acfb-507100d2ea14" containerName="proxy-httpd" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.385480 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="25737ece-fb84-4543-8c3a-94ffa7b8f095" containerName="glance-httpd" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.385522 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="5560f6a4-fc05-4d97-8496-b87804dfab99" containerName="nova-metadata-metadata" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.385568 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="07fab146-67be-42ba-b263-ee19fe95720b" containerName="rabbitmq" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.385610 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="fc023815-924a-4a04-bad2-5fc862ef20ed" containerName="container-auditor" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.385660 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="1ec5ef72-7c69-4be3-974d-c020ddfea4f7" containerName="probe" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.385707 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="fc023815-924a-4a04-bad2-5fc862ef20ed" containerName="object-server" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.385751 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="fc023815-924a-4a04-bad2-5fc862ef20ed" containerName="object-auditor" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.385796 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="ee147895-0bad-4dc1-a348-1be3348a7180" containerName="cinder-api" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.385841 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="5560f6a4-fc05-4d97-8496-b87804dfab99" containerName="nova-metadata-log" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.387304 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="209ec6bc-170f-4c8a-ad7e-e441ace95d1b" containerName="barbican-keystone-listener-log" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.387375 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="209ec6bc-170f-4c8a-ad7e-e441ace95d1b" containerName="barbican-keystone-listener" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.387411 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="4406b36c-adb2-4042-bc92-9efed5a43942" containerName="memcached" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.387420 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="043476ec-5392-41af-970c-89d20b6b30a5" containerName="placement-log" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.387432 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="faade961-fce0-4ad6-a039-4ba83a95dd68" containerName="barbican-worker-log" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.387447 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="95cf677f-e744-4dad-acfb-507100d2ea14" containerName="ceilometer-central-agent" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.387455 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="c6a5b342-eaf2-408c-828e-9bc0bf10d09e" containerName="ovn-northd" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.387470 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="ecb70be9-510b-4ec7-ae5b-20f2ae8dd6a1" containerName="nova-api-log" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.387479 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="cf954b15-e163-4f99-8c1b-5e04d06666bc" containerName="nova-cell0-conductor-conductor" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.387491 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="fc023815-924a-4a04-bad2-5fc862ef20ed" containerName="account-auditor" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.387500 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="87001341-5a74-4796-a332-6d57b2cf11c9" containerName="barbican-api" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.387516 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="39403277-bf62-47c1-8e86-cdec59f2da7b" containerName="rabbitmq" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.387528 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="faade961-fce0-4ad6-a039-4ba83a95dd68" containerName="barbican-worker" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.387539 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="931d9a7a-aa04-42c3-a276-e71388a8b9d3" containerName="mariadb-account-create-update" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.387548 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="25737ece-fb84-4543-8c3a-94ffa7b8f095" containerName="glance-log" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.387555 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="1ec5ef72-7c69-4be3-974d-c020ddfea4f7" containerName="cinder-scheduler" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.387572 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="ee147895-0bad-4dc1-a348-1be3348a7180" containerName="cinder-api-log" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.387582 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="c6a5b342-eaf2-408c-828e-9bc0bf10d09e" containerName="openstack-network-exporter" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.387591 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="931d9a7a-aa04-42c3-a276-e71388a8b9d3" containerName="mariadb-account-create-update" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.387600 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="5558aeea-2a7f-4654-a261-a2902c9434e0" containerName="galera" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.387608 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="04194274-834f-49e3-ac2f-c28998193181" containerName="nova-scheduler-scheduler" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.387620 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="fc023815-924a-4a04-bad2-5fc862ef20ed" containerName="account-replicator" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.387635 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="ecb70be9-510b-4ec7-ae5b-20f2ae8dd6a1" containerName="nova-api-api" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.387642 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="c3c7801f-4a15-44ce-8732-4f93b819a7c5" containerName="keystone-api" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.387654 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="fc023815-924a-4a04-bad2-5fc862ef20ed" containerName="account-server" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.387661 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="a77ca0d2-5ba2-49a1-aa97-c0ee74a5ca0d" containerName="glance-log" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.387669 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="fc023815-924a-4a04-bad2-5fc862ef20ed" containerName="object-updater" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.387675 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="f5d75d5a-4202-477d-97a5-8d92bcdd13b9" containerName="neutron-api" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.387682 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="fc023815-924a-4a04-bad2-5fc862ef20ed" containerName="container-replicator" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.387689 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="fc023815-924a-4a04-bad2-5fc862ef20ed" containerName="swift-recon-cron" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.387696 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="95cf677f-e744-4dad-acfb-507100d2ea14" containerName="sg-core" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.387705 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="fc023815-924a-4a04-bad2-5fc862ef20ed" containerName="object-expirer" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.387714 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="f5d75d5a-4202-477d-97a5-8d92bcdd13b9" containerName="neutron-httpd" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.387722 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="fc023815-924a-4a04-bad2-5fc862ef20ed" containerName="rsync" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.387729 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="fc023815-924a-4a04-bad2-5fc862ef20ed" containerName="object-replicator" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.388422 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-w52qk"] Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.388520 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-w52qk" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.390838 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"crc-storage" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.390862 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"openshift-service-ca.crt" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.392611 4558 reflector.go:368] Caches populated for *v1.Secret from object-"crc-storage"/"crc-storage-dockercfg-k9ht8" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.392861 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"kube-root-ca.crt" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.482690 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/cf9955a8-a332-4d22-bd01-0a1fa834fe9e-crc-storage\") pod \"crc-storage-crc-w52qk\" (UID: \"cf9955a8-a332-4d22-bd01-0a1fa834fe9e\") " pod="crc-storage/crc-storage-crc-w52qk" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.482915 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/cf9955a8-a332-4d22-bd01-0a1fa834fe9e-node-mnt\") pod \"crc-storage-crc-w52qk\" (UID: \"cf9955a8-a332-4d22-bd01-0a1fa834fe9e\") " pod="crc-storage/crc-storage-crc-w52qk" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.483024 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kfth9\" (UniqueName: \"kubernetes.io/projected/cf9955a8-a332-4d22-bd01-0a1fa834fe9e-kube-api-access-kfth9\") pod \"crc-storage-crc-w52qk\" (UID: \"cf9955a8-a332-4d22-bd01-0a1fa834fe9e\") " pod="crc-storage/crc-storage-crc-w52qk" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.583617 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/cf9955a8-a332-4d22-bd01-0a1fa834fe9e-crc-storage\") pod \"crc-storage-crc-w52qk\" (UID: \"cf9955a8-a332-4d22-bd01-0a1fa834fe9e\") " pod="crc-storage/crc-storage-crc-w52qk" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.583724 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/cf9955a8-a332-4d22-bd01-0a1fa834fe9e-node-mnt\") pod \"crc-storage-crc-w52qk\" (UID: \"cf9955a8-a332-4d22-bd01-0a1fa834fe9e\") " pod="crc-storage/crc-storage-crc-w52qk" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.583780 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kfth9\" (UniqueName: \"kubernetes.io/projected/cf9955a8-a332-4d22-bd01-0a1fa834fe9e-kube-api-access-kfth9\") pod \"crc-storage-crc-w52qk\" (UID: \"cf9955a8-a332-4d22-bd01-0a1fa834fe9e\") " pod="crc-storage/crc-storage-crc-w52qk" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.583949 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/cf9955a8-a332-4d22-bd01-0a1fa834fe9e-node-mnt\") pod \"crc-storage-crc-w52qk\" (UID: \"cf9955a8-a332-4d22-bd01-0a1fa834fe9e\") " pod="crc-storage/crc-storage-crc-w52qk" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.584636 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/cf9955a8-a332-4d22-bd01-0a1fa834fe9e-crc-storage\") pod \"crc-storage-crc-w52qk\" (UID: \"cf9955a8-a332-4d22-bd01-0a1fa834fe9e\") " pod="crc-storage/crc-storage-crc-w52qk" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.601055 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kfth9\" (UniqueName: \"kubernetes.io/projected/cf9955a8-a332-4d22-bd01-0a1fa834fe9e-kube-api-access-kfth9\") pod \"crc-storage-crc-w52qk\" (UID: \"cf9955a8-a332-4d22-bd01-0a1fa834fe9e\") " pod="crc-storage/crc-storage-crc-w52qk" Jan 20 17:11:33 crc kubenswrapper[4558]: I0120 17:11:33.708289 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-w52qk" Jan 20 17:11:34 crc kubenswrapper[4558]: I0120 17:11:34.097324 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-w52qk"] Jan 20 17:11:34 crc kubenswrapper[4558]: I0120 17:11:34.574818 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="06232454-6033-4d5a-98e9-10552b195792" path="/var/lib/kubelet/pods/06232454-6033-4d5a-98e9-10552b195792/volumes" Jan 20 17:11:35 crc kubenswrapper[4558]: I0120 17:11:35.019029 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-w52qk" event={"ID":"cf9955a8-a332-4d22-bd01-0a1fa834fe9e","Type":"ContainerStarted","Data":"c1a4b84370225b549fc02b79145b4c61e6750fe3d5ea99a5fed3b0d5238da749"} Jan 20 17:11:35 crc kubenswrapper[4558]: I0120 17:11:35.019496 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-w52qk" event={"ID":"cf9955a8-a332-4d22-bd01-0a1fa834fe9e","Type":"ContainerStarted","Data":"ed55667ea1a198b93314f4c299c488756b1a1ff657a1ebdd5cb119d7f635cc7f"} Jan 20 17:11:35 crc kubenswrapper[4558]: I0120 17:11:35.036762 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="crc-storage/crc-storage-crc-w52qk" podStartSLOduration=1.336879917 podStartE2EDuration="2.036737156s" podCreationTimestamp="2026-01-20 17:11:33 +0000 UTC" firstStartedPulling="2026-01-20 17:11:34.106271869 +0000 UTC m=+1787.866609837" lastFinishedPulling="2026-01-20 17:11:34.806129109 +0000 UTC m=+1788.566467076" observedRunningTime="2026-01-20 17:11:35.033421112 +0000 UTC m=+1788.793759079" watchObservedRunningTime="2026-01-20 17:11:35.036737156 +0000 UTC m=+1788.797075114" Jan 20 17:11:36 crc kubenswrapper[4558]: I0120 17:11:36.032437 4558 generic.go:334] "Generic (PLEG): container finished" podID="cf9955a8-a332-4d22-bd01-0a1fa834fe9e" containerID="c1a4b84370225b549fc02b79145b4c61e6750fe3d5ea99a5fed3b0d5238da749" exitCode=0 Jan 20 17:11:36 crc kubenswrapper[4558]: I0120 17:11:36.032563 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-w52qk" event={"ID":"cf9955a8-a332-4d22-bd01-0a1fa834fe9e","Type":"ContainerDied","Data":"c1a4b84370225b549fc02b79145b4c61e6750fe3d5ea99a5fed3b0d5238da749"} Jan 20 17:11:36 crc kubenswrapper[4558]: I0120 17:11:36.570939 4558 scope.go:117] "RemoveContainer" containerID="a711a286282347590079d2447ef37fa9ed0f11085d6d7a65f85e65c1c2df608f" Jan 20 17:11:37 crc kubenswrapper[4558]: I0120 17:11:37.048769 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" event={"ID":"68337d27-3fa6-4a29-88b0-82e60c3739eb","Type":"ContainerStarted","Data":"09cede35de046f760d5c008c35cdd13631d8660c06e04fb988751619769147c6"} Jan 20 17:11:37 crc kubenswrapper[4558]: I0120 17:11:37.283525 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-w52qk" Jan 20 17:11:37 crc kubenswrapper[4558]: I0120 17:11:37.338756 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfth9\" (UniqueName: \"kubernetes.io/projected/cf9955a8-a332-4d22-bd01-0a1fa834fe9e-kube-api-access-kfth9\") pod \"cf9955a8-a332-4d22-bd01-0a1fa834fe9e\" (UID: \"cf9955a8-a332-4d22-bd01-0a1fa834fe9e\") " Jan 20 17:11:37 crc kubenswrapper[4558]: I0120 17:11:37.338801 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/cf9955a8-a332-4d22-bd01-0a1fa834fe9e-crc-storage\") pod \"cf9955a8-a332-4d22-bd01-0a1fa834fe9e\" (UID: \"cf9955a8-a332-4d22-bd01-0a1fa834fe9e\") " Jan 20 17:11:37 crc kubenswrapper[4558]: I0120 17:11:37.338832 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/cf9955a8-a332-4d22-bd01-0a1fa834fe9e-node-mnt\") pod \"cf9955a8-a332-4d22-bd01-0a1fa834fe9e\" (UID: \"cf9955a8-a332-4d22-bd01-0a1fa834fe9e\") " Jan 20 17:11:37 crc kubenswrapper[4558]: I0120 17:11:37.339110 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/cf9955a8-a332-4d22-bd01-0a1fa834fe9e-node-mnt" (OuterVolumeSpecName: "node-mnt") pod "cf9955a8-a332-4d22-bd01-0a1fa834fe9e" (UID: "cf9955a8-a332-4d22-bd01-0a1fa834fe9e"). InnerVolumeSpecName "node-mnt". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 17:11:37 crc kubenswrapper[4558]: I0120 17:11:37.343114 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cf9955a8-a332-4d22-bd01-0a1fa834fe9e-kube-api-access-kfth9" (OuterVolumeSpecName: "kube-api-access-kfth9") pod "cf9955a8-a332-4d22-bd01-0a1fa834fe9e" (UID: "cf9955a8-a332-4d22-bd01-0a1fa834fe9e"). InnerVolumeSpecName "kube-api-access-kfth9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:11:37 crc kubenswrapper[4558]: I0120 17:11:37.357427 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cf9955a8-a332-4d22-bd01-0a1fa834fe9e-crc-storage" (OuterVolumeSpecName: "crc-storage") pod "cf9955a8-a332-4d22-bd01-0a1fa834fe9e" (UID: "cf9955a8-a332-4d22-bd01-0a1fa834fe9e"). InnerVolumeSpecName "crc-storage". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:11:37 crc kubenswrapper[4558]: I0120 17:11:37.440519 4558 reconciler_common.go:293] "Volume detached for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/cf9955a8-a332-4d22-bd01-0a1fa834fe9e-node-mnt\") on node \"crc\" DevicePath \"\"" Jan 20 17:11:37 crc kubenswrapper[4558]: I0120 17:11:37.440582 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfth9\" (UniqueName: \"kubernetes.io/projected/cf9955a8-a332-4d22-bd01-0a1fa834fe9e-kube-api-access-kfth9\") on node \"crc\" DevicePath \"\"" Jan 20 17:11:37 crc kubenswrapper[4558]: I0120 17:11:37.440595 4558 reconciler_common.go:293] "Volume detached for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/cf9955a8-a332-4d22-bd01-0a1fa834fe9e-crc-storage\") on node \"crc\" DevicePath \"\"" Jan 20 17:11:38 crc kubenswrapper[4558]: I0120 17:11:38.060568 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-w52qk" event={"ID":"cf9955a8-a332-4d22-bd01-0a1fa834fe9e","Type":"ContainerDied","Data":"ed55667ea1a198b93314f4c299c488756b1a1ff657a1ebdd5cb119d7f635cc7f"} Jan 20 17:11:38 crc kubenswrapper[4558]: I0120 17:11:38.061004 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ed55667ea1a198b93314f4c299c488756b1a1ff657a1ebdd5cb119d7f635cc7f" Jan 20 17:11:38 crc kubenswrapper[4558]: I0120 17:11:38.060643 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-w52qk" Jan 20 17:11:39 crc kubenswrapper[4558]: I0120 17:11:39.952859 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["crc-storage/crc-storage-crc-w52qk"] Jan 20 17:11:39 crc kubenswrapper[4558]: I0120 17:11:39.957388 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["crc-storage/crc-storage-crc-w52qk"] Jan 20 17:11:40 crc kubenswrapper[4558]: I0120 17:11:40.062095 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["crc-storage/crc-storage-crc-x77zr"] Jan 20 17:11:40 crc kubenswrapper[4558]: E0120 17:11:40.062461 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf9955a8-a332-4d22-bd01-0a1fa834fe9e" containerName="storage" Jan 20 17:11:40 crc kubenswrapper[4558]: I0120 17:11:40.062485 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf9955a8-a332-4d22-bd01-0a1fa834fe9e" containerName="storage" Jan 20 17:11:40 crc kubenswrapper[4558]: I0120 17:11:40.062632 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="cf9955a8-a332-4d22-bd01-0a1fa834fe9e" containerName="storage" Jan 20 17:11:40 crc kubenswrapper[4558]: I0120 17:11:40.063103 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-x77zr" Jan 20 17:11:40 crc kubenswrapper[4558]: I0120 17:11:40.065181 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"openshift-service-ca.crt" Jan 20 17:11:40 crc kubenswrapper[4558]: I0120 17:11:40.067019 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"kube-root-ca.crt" Jan 20 17:11:40 crc kubenswrapper[4558]: I0120 17:11:40.067581 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"crc-storage" Jan 20 17:11:40 crc kubenswrapper[4558]: I0120 17:11:40.070553 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-x77zr"] Jan 20 17:11:40 crc kubenswrapper[4558]: I0120 17:11:40.071773 4558 reflector.go:368] Caches populated for *v1.Secret from object-"crc-storage"/"crc-storage-dockercfg-k9ht8" Jan 20 17:11:40 crc kubenswrapper[4558]: I0120 17:11:40.077977 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/8a298713-e4df-41b7-a492-31833ff89d7e-node-mnt\") pod \"crc-storage-crc-x77zr\" (UID: \"8a298713-e4df-41b7-a492-31833ff89d7e\") " pod="crc-storage/crc-storage-crc-x77zr" Jan 20 17:11:40 crc kubenswrapper[4558]: I0120 17:11:40.078037 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z8bkw\" (UniqueName: \"kubernetes.io/projected/8a298713-e4df-41b7-a492-31833ff89d7e-kube-api-access-z8bkw\") pod \"crc-storage-crc-x77zr\" (UID: \"8a298713-e4df-41b7-a492-31833ff89d7e\") " pod="crc-storage/crc-storage-crc-x77zr" Jan 20 17:11:40 crc kubenswrapper[4558]: I0120 17:11:40.078250 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/8a298713-e4df-41b7-a492-31833ff89d7e-crc-storage\") pod \"crc-storage-crc-x77zr\" (UID: \"8a298713-e4df-41b7-a492-31833ff89d7e\") " pod="crc-storage/crc-storage-crc-x77zr" Jan 20 17:11:40 crc kubenswrapper[4558]: I0120 17:11:40.181115 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/8a298713-e4df-41b7-a492-31833ff89d7e-crc-storage\") pod \"crc-storage-crc-x77zr\" (UID: \"8a298713-e4df-41b7-a492-31833ff89d7e\") " pod="crc-storage/crc-storage-crc-x77zr" Jan 20 17:11:40 crc kubenswrapper[4558]: I0120 17:11:40.181209 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/8a298713-e4df-41b7-a492-31833ff89d7e-node-mnt\") pod \"crc-storage-crc-x77zr\" (UID: \"8a298713-e4df-41b7-a492-31833ff89d7e\") " pod="crc-storage/crc-storage-crc-x77zr" Jan 20 17:11:40 crc kubenswrapper[4558]: I0120 17:11:40.181247 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z8bkw\" (UniqueName: \"kubernetes.io/projected/8a298713-e4df-41b7-a492-31833ff89d7e-kube-api-access-z8bkw\") pod \"crc-storage-crc-x77zr\" (UID: \"8a298713-e4df-41b7-a492-31833ff89d7e\") " pod="crc-storage/crc-storage-crc-x77zr" Jan 20 17:11:40 crc kubenswrapper[4558]: I0120 17:11:40.181800 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/8a298713-e4df-41b7-a492-31833ff89d7e-node-mnt\") pod \"crc-storage-crc-x77zr\" (UID: \"8a298713-e4df-41b7-a492-31833ff89d7e\") " pod="crc-storage/crc-storage-crc-x77zr" Jan 20 17:11:40 crc kubenswrapper[4558]: I0120 17:11:40.182026 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/8a298713-e4df-41b7-a492-31833ff89d7e-crc-storage\") pod \"crc-storage-crc-x77zr\" (UID: \"8a298713-e4df-41b7-a492-31833ff89d7e\") " pod="crc-storage/crc-storage-crc-x77zr" Jan 20 17:11:40 crc kubenswrapper[4558]: I0120 17:11:40.200429 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z8bkw\" (UniqueName: \"kubernetes.io/projected/8a298713-e4df-41b7-a492-31833ff89d7e-kube-api-access-z8bkw\") pod \"crc-storage-crc-x77zr\" (UID: \"8a298713-e4df-41b7-a492-31833ff89d7e\") " pod="crc-storage/crc-storage-crc-x77zr" Jan 20 17:11:40 crc kubenswrapper[4558]: I0120 17:11:40.380980 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-x77zr" Jan 20 17:11:40 crc kubenswrapper[4558]: I0120 17:11:40.574331 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cf9955a8-a332-4d22-bd01-0a1fa834fe9e" path="/var/lib/kubelet/pods/cf9955a8-a332-4d22-bd01-0a1fa834fe9e/volumes" Jan 20 17:11:40 crc kubenswrapper[4558]: I0120 17:11:40.777722 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-x77zr"] Jan 20 17:11:40 crc kubenswrapper[4558]: W0120 17:11:40.781570 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8a298713_e4df_41b7_a492_31833ff89d7e.slice/crio-bc81084f497adc7ded59e577df5ee72b966c0363a4cc895de535c5d202da1660 WatchSource:0}: Error finding container bc81084f497adc7ded59e577df5ee72b966c0363a4cc895de535c5d202da1660: Status 404 returned error can't find the container with id bc81084f497adc7ded59e577df5ee72b966c0363a4cc895de535c5d202da1660 Jan 20 17:11:41 crc kubenswrapper[4558]: I0120 17:11:41.082053 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-x77zr" event={"ID":"8a298713-e4df-41b7-a492-31833ff89d7e","Type":"ContainerStarted","Data":"bc81084f497adc7ded59e577df5ee72b966c0363a4cc895de535c5d202da1660"} Jan 20 17:11:42 crc kubenswrapper[4558]: I0120 17:11:42.094685 4558 generic.go:334] "Generic (PLEG): container finished" podID="8a298713-e4df-41b7-a492-31833ff89d7e" containerID="afdc53de64cb2128ddb1ed677df93ded610adda09fa181e5dd671194f1e9cd60" exitCode=0 Jan 20 17:11:42 crc kubenswrapper[4558]: I0120 17:11:42.094756 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-x77zr" event={"ID":"8a298713-e4df-41b7-a492-31833ff89d7e","Type":"ContainerDied","Data":"afdc53de64cb2128ddb1ed677df93ded610adda09fa181e5dd671194f1e9cd60"} Jan 20 17:11:43 crc kubenswrapper[4558]: I0120 17:11:43.343461 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-x77zr" Jan 20 17:11:43 crc kubenswrapper[4558]: I0120 17:11:43.439996 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/8a298713-e4df-41b7-a492-31833ff89d7e-crc-storage\") pod \"8a298713-e4df-41b7-a492-31833ff89d7e\" (UID: \"8a298713-e4df-41b7-a492-31833ff89d7e\") " Jan 20 17:11:43 crc kubenswrapper[4558]: I0120 17:11:43.440390 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z8bkw\" (UniqueName: \"kubernetes.io/projected/8a298713-e4df-41b7-a492-31833ff89d7e-kube-api-access-z8bkw\") pod \"8a298713-e4df-41b7-a492-31833ff89d7e\" (UID: \"8a298713-e4df-41b7-a492-31833ff89d7e\") " Jan 20 17:11:43 crc kubenswrapper[4558]: I0120 17:11:43.440551 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/8a298713-e4df-41b7-a492-31833ff89d7e-node-mnt\") pod \"8a298713-e4df-41b7-a492-31833ff89d7e\" (UID: \"8a298713-e4df-41b7-a492-31833ff89d7e\") " Jan 20 17:11:43 crc kubenswrapper[4558]: I0120 17:11:43.440629 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8a298713-e4df-41b7-a492-31833ff89d7e-node-mnt" (OuterVolumeSpecName: "node-mnt") pod "8a298713-e4df-41b7-a492-31833ff89d7e" (UID: "8a298713-e4df-41b7-a492-31833ff89d7e"). InnerVolumeSpecName "node-mnt". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 17:11:43 crc kubenswrapper[4558]: I0120 17:11:43.441124 4558 reconciler_common.go:293] "Volume detached for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/8a298713-e4df-41b7-a492-31833ff89d7e-node-mnt\") on node \"crc\" DevicePath \"\"" Jan 20 17:11:43 crc kubenswrapper[4558]: I0120 17:11:43.445533 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8a298713-e4df-41b7-a492-31833ff89d7e-kube-api-access-z8bkw" (OuterVolumeSpecName: "kube-api-access-z8bkw") pod "8a298713-e4df-41b7-a492-31833ff89d7e" (UID: "8a298713-e4df-41b7-a492-31833ff89d7e"). InnerVolumeSpecName "kube-api-access-z8bkw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:11:43 crc kubenswrapper[4558]: I0120 17:11:43.458394 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8a298713-e4df-41b7-a492-31833ff89d7e-crc-storage" (OuterVolumeSpecName: "crc-storage") pod "8a298713-e4df-41b7-a492-31833ff89d7e" (UID: "8a298713-e4df-41b7-a492-31833ff89d7e"). InnerVolumeSpecName "crc-storage". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:11:43 crc kubenswrapper[4558]: I0120 17:11:43.542738 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z8bkw\" (UniqueName: \"kubernetes.io/projected/8a298713-e4df-41b7-a492-31833ff89d7e-kube-api-access-z8bkw\") on node \"crc\" DevicePath \"\"" Jan 20 17:11:43 crc kubenswrapper[4558]: I0120 17:11:43.542850 4558 reconciler_common.go:293] "Volume detached for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/8a298713-e4df-41b7-a492-31833ff89d7e-crc-storage\") on node \"crc\" DevicePath \"\"" Jan 20 17:11:44 crc kubenswrapper[4558]: I0120 17:11:44.120695 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-x77zr" event={"ID":"8a298713-e4df-41b7-a492-31833ff89d7e","Type":"ContainerDied","Data":"bc81084f497adc7ded59e577df5ee72b966c0363a4cc895de535c5d202da1660"} Jan 20 17:11:44 crc kubenswrapper[4558]: I0120 17:11:44.121037 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bc81084f497adc7ded59e577df5ee72b966c0363a4cc895de535c5d202da1660" Jan 20 17:11:44 crc kubenswrapper[4558]: I0120 17:11:44.121111 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-x77zr" Jan 20 17:11:48 crc kubenswrapper[4558]: I0120 17:11:48.924064 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08jrb5p"] Jan 20 17:11:48 crc kubenswrapper[4558]: E0120 17:11:48.924851 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8a298713-e4df-41b7-a492-31833ff89d7e" containerName="storage" Jan 20 17:11:48 crc kubenswrapper[4558]: I0120 17:11:48.924865 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="8a298713-e4df-41b7-a492-31833ff89d7e" containerName="storage" Jan 20 17:11:48 crc kubenswrapper[4558]: I0120 17:11:48.925008 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="8a298713-e4df-41b7-a492-31833ff89d7e" containerName="storage" Jan 20 17:11:48 crc kubenswrapper[4558]: I0120 17:11:48.925963 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08jrb5p" Jan 20 17:11:48 crc kubenswrapper[4558]: I0120 17:11:48.927682 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Jan 20 17:11:48 crc kubenswrapper[4558]: I0120 17:11:48.931492 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08jrb5p"] Jan 20 17:11:49 crc kubenswrapper[4558]: I0120 17:11:49.015727 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/a606311e-7c02-48a9-9f83-46f862549670-util\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08jrb5p\" (UID: \"a606311e-7c02-48a9-9f83-46f862549670\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08jrb5p" Jan 20 17:11:49 crc kubenswrapper[4558]: I0120 17:11:49.015776 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/a606311e-7c02-48a9-9f83-46f862549670-bundle\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08jrb5p\" (UID: \"a606311e-7c02-48a9-9f83-46f862549670\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08jrb5p" Jan 20 17:11:49 crc kubenswrapper[4558]: I0120 17:11:49.015822 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fslzh\" (UniqueName: \"kubernetes.io/projected/a606311e-7c02-48a9-9f83-46f862549670-kube-api-access-fslzh\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08jrb5p\" (UID: \"a606311e-7c02-48a9-9f83-46f862549670\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08jrb5p" Jan 20 17:11:49 crc kubenswrapper[4558]: I0120 17:11:49.117726 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/a606311e-7c02-48a9-9f83-46f862549670-util\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08jrb5p\" (UID: \"a606311e-7c02-48a9-9f83-46f862549670\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08jrb5p" Jan 20 17:11:49 crc kubenswrapper[4558]: I0120 17:11:49.117784 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/a606311e-7c02-48a9-9f83-46f862549670-bundle\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08jrb5p\" (UID: \"a606311e-7c02-48a9-9f83-46f862549670\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08jrb5p" Jan 20 17:11:49 crc kubenswrapper[4558]: I0120 17:11:49.117843 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fslzh\" (UniqueName: \"kubernetes.io/projected/a606311e-7c02-48a9-9f83-46f862549670-kube-api-access-fslzh\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08jrb5p\" (UID: \"a606311e-7c02-48a9-9f83-46f862549670\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08jrb5p" Jan 20 17:11:49 crc kubenswrapper[4558]: I0120 17:11:49.118256 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/a606311e-7c02-48a9-9f83-46f862549670-util\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08jrb5p\" (UID: \"a606311e-7c02-48a9-9f83-46f862549670\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08jrb5p" Jan 20 17:11:49 crc kubenswrapper[4558]: I0120 17:11:49.118366 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/a606311e-7c02-48a9-9f83-46f862549670-bundle\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08jrb5p\" (UID: \"a606311e-7c02-48a9-9f83-46f862549670\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08jrb5p" Jan 20 17:11:49 crc kubenswrapper[4558]: I0120 17:11:49.137448 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fslzh\" (UniqueName: \"kubernetes.io/projected/a606311e-7c02-48a9-9f83-46f862549670-kube-api-access-fslzh\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08jrb5p\" (UID: \"a606311e-7c02-48a9-9f83-46f862549670\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08jrb5p" Jan 20 17:11:49 crc kubenswrapper[4558]: I0120 17:11:49.242302 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08jrb5p" Jan 20 17:11:49 crc kubenswrapper[4558]: I0120 17:11:49.630036 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08jrb5p"] Jan 20 17:11:50 crc kubenswrapper[4558]: I0120 17:11:50.179553 4558 generic.go:334] "Generic (PLEG): container finished" podID="a606311e-7c02-48a9-9f83-46f862549670" containerID="7cbf1ecbfd0136248e8be8ba375209cf352702003c5faa39ef8930f0a72f78c6" exitCode=0 Jan 20 17:11:50 crc kubenswrapper[4558]: I0120 17:11:50.179613 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08jrb5p" event={"ID":"a606311e-7c02-48a9-9f83-46f862549670","Type":"ContainerDied","Data":"7cbf1ecbfd0136248e8be8ba375209cf352702003c5faa39ef8930f0a72f78c6"} Jan 20 17:11:50 crc kubenswrapper[4558]: I0120 17:11:50.179651 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08jrb5p" event={"ID":"a606311e-7c02-48a9-9f83-46f862549670","Type":"ContainerStarted","Data":"499b27f88256a6c095d6c2c4669f15b8a21b52e326ec782d1a7b55ca56878c45"} Jan 20 17:11:52 crc kubenswrapper[4558]: I0120 17:11:52.202095 4558 generic.go:334] "Generic (PLEG): container finished" podID="a606311e-7c02-48a9-9f83-46f862549670" containerID="7b9fb638147415f7ab6fb4be1932e024f37dd0e49de688e15bd88149a405bfb7" exitCode=0 Jan 20 17:11:52 crc kubenswrapper[4558]: I0120 17:11:52.202141 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08jrb5p" event={"ID":"a606311e-7c02-48a9-9f83-46f862549670","Type":"ContainerDied","Data":"7b9fb638147415f7ab6fb4be1932e024f37dd0e49de688e15bd88149a405bfb7"} Jan 20 17:11:53 crc kubenswrapper[4558]: I0120 17:11:53.215348 4558 generic.go:334] "Generic (PLEG): container finished" podID="a606311e-7c02-48a9-9f83-46f862549670" containerID="a9930865d3ccfefa62ece9825d4052305d00a69d74a80cec3585fa355fafaa5b" exitCode=0 Jan 20 17:11:53 crc kubenswrapper[4558]: I0120 17:11:53.215425 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08jrb5p" event={"ID":"a606311e-7c02-48a9-9f83-46f862549670","Type":"ContainerDied","Data":"a9930865d3ccfefa62ece9825d4052305d00a69d74a80cec3585fa355fafaa5b"} Jan 20 17:11:54 crc kubenswrapper[4558]: I0120 17:11:54.459584 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08jrb5p" Jan 20 17:11:54 crc kubenswrapper[4558]: I0120 17:11:54.595412 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/a606311e-7c02-48a9-9f83-46f862549670-bundle\") pod \"a606311e-7c02-48a9-9f83-46f862549670\" (UID: \"a606311e-7c02-48a9-9f83-46f862549670\") " Jan 20 17:11:54 crc kubenswrapper[4558]: I0120 17:11:54.595463 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/a606311e-7c02-48a9-9f83-46f862549670-util\") pod \"a606311e-7c02-48a9-9f83-46f862549670\" (UID: \"a606311e-7c02-48a9-9f83-46f862549670\") " Jan 20 17:11:54 crc kubenswrapper[4558]: I0120 17:11:54.595914 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fslzh\" (UniqueName: \"kubernetes.io/projected/a606311e-7c02-48a9-9f83-46f862549670-kube-api-access-fslzh\") pod \"a606311e-7c02-48a9-9f83-46f862549670\" (UID: \"a606311e-7c02-48a9-9f83-46f862549670\") " Jan 20 17:11:54 crc kubenswrapper[4558]: I0120 17:11:54.598497 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a606311e-7c02-48a9-9f83-46f862549670-bundle" (OuterVolumeSpecName: "bundle") pod "a606311e-7c02-48a9-9f83-46f862549670" (UID: "a606311e-7c02-48a9-9f83-46f862549670"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:11:54 crc kubenswrapper[4558]: I0120 17:11:54.601465 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a606311e-7c02-48a9-9f83-46f862549670-kube-api-access-fslzh" (OuterVolumeSpecName: "kube-api-access-fslzh") pod "a606311e-7c02-48a9-9f83-46f862549670" (UID: "a606311e-7c02-48a9-9f83-46f862549670"). InnerVolumeSpecName "kube-api-access-fslzh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:11:54 crc kubenswrapper[4558]: I0120 17:11:54.608066 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a606311e-7c02-48a9-9f83-46f862549670-util" (OuterVolumeSpecName: "util") pod "a606311e-7c02-48a9-9f83-46f862549670" (UID: "a606311e-7c02-48a9-9f83-46f862549670"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:11:54 crc kubenswrapper[4558]: I0120 17:11:54.697625 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fslzh\" (UniqueName: \"kubernetes.io/projected/a606311e-7c02-48a9-9f83-46f862549670-kube-api-access-fslzh\") on node \"crc\" DevicePath \"\"" Jan 20 17:11:54 crc kubenswrapper[4558]: I0120 17:11:54.697784 4558 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/a606311e-7c02-48a9-9f83-46f862549670-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:11:54 crc kubenswrapper[4558]: I0120 17:11:54.697845 4558 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/a606311e-7c02-48a9-9f83-46f862549670-util\") on node \"crc\" DevicePath \"\"" Jan 20 17:11:55 crc kubenswrapper[4558]: I0120 17:11:55.233319 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08jrb5p" event={"ID":"a606311e-7c02-48a9-9f83-46f862549670","Type":"ContainerDied","Data":"499b27f88256a6c095d6c2c4669f15b8a21b52e326ec782d1a7b55ca56878c45"} Jan 20 17:11:55 crc kubenswrapper[4558]: I0120 17:11:55.233567 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="499b27f88256a6c095d6c2c4669f15b8a21b52e326ec782d1a7b55ca56878c45" Jan 20 17:11:55 crc kubenswrapper[4558]: I0120 17:11:55.233411 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08jrb5p" Jan 20 17:12:00 crc kubenswrapper[4558]: I0120 17:12:00.276066 4558 scope.go:117] "RemoveContainer" containerID="f8289d46cef13048b1da599d8f884a8ca8552346ce31e55069458d4dcbbf3e26" Jan 20 17:12:06 crc kubenswrapper[4558]: I0120 17:12:06.132473 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-68bc856cb9-nfzlw"] Jan 20 17:12:06 crc kubenswrapper[4558]: E0120 17:12:06.133257 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a606311e-7c02-48a9-9f83-46f862549670" containerName="util" Jan 20 17:12:06 crc kubenswrapper[4558]: I0120 17:12:06.133272 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="a606311e-7c02-48a9-9f83-46f862549670" containerName="util" Jan 20 17:12:06 crc kubenswrapper[4558]: E0120 17:12:06.133290 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a606311e-7c02-48a9-9f83-46f862549670" containerName="extract" Jan 20 17:12:06 crc kubenswrapper[4558]: I0120 17:12:06.133296 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="a606311e-7c02-48a9-9f83-46f862549670" containerName="extract" Jan 20 17:12:06 crc kubenswrapper[4558]: E0120 17:12:06.133315 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a606311e-7c02-48a9-9f83-46f862549670" containerName="pull" Jan 20 17:12:06 crc kubenswrapper[4558]: I0120 17:12:06.133320 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="a606311e-7c02-48a9-9f83-46f862549670" containerName="pull" Jan 20 17:12:06 crc kubenswrapper[4558]: I0120 17:12:06.133469 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="a606311e-7c02-48a9-9f83-46f862549670" containerName="extract" Jan 20 17:12:06 crc kubenswrapper[4558]: I0120 17:12:06.133979 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-68bc856cb9-nfzlw" Jan 20 17:12:06 crc kubenswrapper[4558]: I0120 17:12:06.136068 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators"/"openshift-service-ca.crt" Jan 20 17:12:06 crc kubenswrapper[4558]: I0120 17:12:06.136348 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators"/"kube-root-ca.crt" Jan 20 17:12:06 crc kubenswrapper[4558]: I0120 17:12:06.137862 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-dockercfg-cz5bx" Jan 20 17:12:06 crc kubenswrapper[4558]: I0120 17:12:06.149927 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-68bc856cb9-nfzlw"] Jan 20 17:12:06 crc kubenswrapper[4558]: I0120 17:12:06.262943 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ktnc8\" (UniqueName: \"kubernetes.io/projected/fef8d546-d905-4701-ac1e-09cd4c4b1ed8-kube-api-access-ktnc8\") pod \"obo-prometheus-operator-68bc856cb9-nfzlw\" (UID: \"fef8d546-d905-4701-ac1e-09cd4c4b1ed8\") " pod="openshift-operators/obo-prometheus-operator-68bc856cb9-nfzlw" Jan 20 17:12:06 crc kubenswrapper[4558]: I0120 17:12:06.328004 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-558658b7b5-hhs5k"] Jan 20 17:12:06 crc kubenswrapper[4558]: I0120 17:12:06.328981 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-558658b7b5-hhs5k" Jan 20 17:12:06 crc kubenswrapper[4558]: I0120 17:12:06.332981 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-admission-webhook-service-cert" Jan 20 17:12:06 crc kubenswrapper[4558]: I0120 17:12:06.335406 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-admission-webhook-dockercfg-d5x98" Jan 20 17:12:06 crc kubenswrapper[4558]: I0120 17:12:06.339664 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-558658b7b5-cpc8s"] Jan 20 17:12:06 crc kubenswrapper[4558]: I0120 17:12:06.340743 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-558658b7b5-cpc8s" Jan 20 17:12:06 crc kubenswrapper[4558]: I0120 17:12:06.362607 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-558658b7b5-cpc8s"] Jan 20 17:12:06 crc kubenswrapper[4558]: I0120 17:12:06.364440 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ktnc8\" (UniqueName: \"kubernetes.io/projected/fef8d546-d905-4701-ac1e-09cd4c4b1ed8-kube-api-access-ktnc8\") pod \"obo-prometheus-operator-68bc856cb9-nfzlw\" (UID: \"fef8d546-d905-4701-ac1e-09cd4c4b1ed8\") " pod="openshift-operators/obo-prometheus-operator-68bc856cb9-nfzlw" Jan 20 17:12:06 crc kubenswrapper[4558]: I0120 17:12:06.382913 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ktnc8\" (UniqueName: \"kubernetes.io/projected/fef8d546-d905-4701-ac1e-09cd4c4b1ed8-kube-api-access-ktnc8\") pod \"obo-prometheus-operator-68bc856cb9-nfzlw\" (UID: \"fef8d546-d905-4701-ac1e-09cd4c4b1ed8\") " pod="openshift-operators/obo-prometheus-operator-68bc856cb9-nfzlw" Jan 20 17:12:06 crc kubenswrapper[4558]: I0120 17:12:06.389001 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-558658b7b5-hhs5k"] Jan 20 17:12:06 crc kubenswrapper[4558]: I0120 17:12:06.450290 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-68bc856cb9-nfzlw" Jan 20 17:12:06 crc kubenswrapper[4558]: I0120 17:12:06.468434 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/8c562f2d-0fe1-4a6a-9664-00e2cad8c416-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-558658b7b5-hhs5k\" (UID: \"8c562f2d-0fe1-4a6a-9664-00e2cad8c416\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-558658b7b5-hhs5k" Jan 20 17:12:06 crc kubenswrapper[4558]: I0120 17:12:06.468506 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/42ac1cad-9697-4a87-974d-f1dfe31a3627-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-558658b7b5-cpc8s\" (UID: \"42ac1cad-9697-4a87-974d-f1dfe31a3627\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-558658b7b5-cpc8s" Jan 20 17:12:06 crc kubenswrapper[4558]: I0120 17:12:06.468560 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/42ac1cad-9697-4a87-974d-f1dfe31a3627-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-558658b7b5-cpc8s\" (UID: \"42ac1cad-9697-4a87-974d-f1dfe31a3627\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-558658b7b5-cpc8s" Jan 20 17:12:06 crc kubenswrapper[4558]: I0120 17:12:06.468584 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/8c562f2d-0fe1-4a6a-9664-00e2cad8c416-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-558658b7b5-hhs5k\" (UID: \"8c562f2d-0fe1-4a6a-9664-00e2cad8c416\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-558658b7b5-hhs5k" Jan 20 17:12:06 crc kubenswrapper[4558]: I0120 17:12:06.493722 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/observability-operator-59bdc8b94-rlprt"] Jan 20 17:12:06 crc kubenswrapper[4558]: I0120 17:12:06.494645 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-59bdc8b94-rlprt" Jan 20 17:12:06 crc kubenswrapper[4558]: I0120 17:12:06.501515 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-operator-tls" Jan 20 17:12:06 crc kubenswrapper[4558]: I0120 17:12:06.501567 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-operator-sa-dockercfg-qbf68" Jan 20 17:12:06 crc kubenswrapper[4558]: I0120 17:12:06.509293 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-operator-59bdc8b94-rlprt"] Jan 20 17:12:06 crc kubenswrapper[4558]: I0120 17:12:06.575318 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/8c562f2d-0fe1-4a6a-9664-00e2cad8c416-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-558658b7b5-hhs5k\" (UID: \"8c562f2d-0fe1-4a6a-9664-00e2cad8c416\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-558658b7b5-hhs5k" Jan 20 17:12:06 crc kubenswrapper[4558]: I0120 17:12:06.575363 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/42ac1cad-9697-4a87-974d-f1dfe31a3627-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-558658b7b5-cpc8s\" (UID: \"42ac1cad-9697-4a87-974d-f1dfe31a3627\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-558658b7b5-cpc8s" Jan 20 17:12:06 crc kubenswrapper[4558]: I0120 17:12:06.575414 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fc2vk\" (UniqueName: \"kubernetes.io/projected/0c2f4961-5cb9-4460-b8f2-ff20d5bafc08-kube-api-access-fc2vk\") pod \"observability-operator-59bdc8b94-rlprt\" (UID: \"0c2f4961-5cb9-4460-b8f2-ff20d5bafc08\") " pod="openshift-operators/observability-operator-59bdc8b94-rlprt" Jan 20 17:12:06 crc kubenswrapper[4558]: I0120 17:12:06.575437 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/42ac1cad-9697-4a87-974d-f1dfe31a3627-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-558658b7b5-cpc8s\" (UID: \"42ac1cad-9697-4a87-974d-f1dfe31a3627\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-558658b7b5-cpc8s" Jan 20 17:12:06 crc kubenswrapper[4558]: I0120 17:12:06.575455 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/8c562f2d-0fe1-4a6a-9664-00e2cad8c416-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-558658b7b5-hhs5k\" (UID: \"8c562f2d-0fe1-4a6a-9664-00e2cad8c416\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-558658b7b5-hhs5k" Jan 20 17:12:06 crc kubenswrapper[4558]: I0120 17:12:06.575515 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/0c2f4961-5cb9-4460-b8f2-ff20d5bafc08-observability-operator-tls\") pod \"observability-operator-59bdc8b94-rlprt\" (UID: \"0c2f4961-5cb9-4460-b8f2-ff20d5bafc08\") " pod="openshift-operators/observability-operator-59bdc8b94-rlprt" Jan 20 17:12:06 crc kubenswrapper[4558]: I0120 17:12:06.579784 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/8c562f2d-0fe1-4a6a-9664-00e2cad8c416-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-558658b7b5-hhs5k\" (UID: \"8c562f2d-0fe1-4a6a-9664-00e2cad8c416\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-558658b7b5-hhs5k" Jan 20 17:12:06 crc kubenswrapper[4558]: I0120 17:12:06.582595 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/42ac1cad-9697-4a87-974d-f1dfe31a3627-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-558658b7b5-cpc8s\" (UID: \"42ac1cad-9697-4a87-974d-f1dfe31a3627\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-558658b7b5-cpc8s" Jan 20 17:12:06 crc kubenswrapper[4558]: I0120 17:12:06.584901 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/8c562f2d-0fe1-4a6a-9664-00e2cad8c416-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-558658b7b5-hhs5k\" (UID: \"8c562f2d-0fe1-4a6a-9664-00e2cad8c416\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-558658b7b5-hhs5k" Jan 20 17:12:06 crc kubenswrapper[4558]: I0120 17:12:06.587513 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/42ac1cad-9697-4a87-974d-f1dfe31a3627-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-558658b7b5-cpc8s\" (UID: \"42ac1cad-9697-4a87-974d-f1dfe31a3627\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-558658b7b5-cpc8s" Jan 20 17:12:06 crc kubenswrapper[4558]: I0120 17:12:06.644001 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-558658b7b5-hhs5k" Jan 20 17:12:06 crc kubenswrapper[4558]: I0120 17:12:06.657928 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-558658b7b5-cpc8s" Jan 20 17:12:06 crc kubenswrapper[4558]: I0120 17:12:06.678907 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/0c2f4961-5cb9-4460-b8f2-ff20d5bafc08-observability-operator-tls\") pod \"observability-operator-59bdc8b94-rlprt\" (UID: \"0c2f4961-5cb9-4460-b8f2-ff20d5bafc08\") " pod="openshift-operators/observability-operator-59bdc8b94-rlprt" Jan 20 17:12:06 crc kubenswrapper[4558]: I0120 17:12:06.679038 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fc2vk\" (UniqueName: \"kubernetes.io/projected/0c2f4961-5cb9-4460-b8f2-ff20d5bafc08-kube-api-access-fc2vk\") pod \"observability-operator-59bdc8b94-rlprt\" (UID: \"0c2f4961-5cb9-4460-b8f2-ff20d5bafc08\") " pod="openshift-operators/observability-operator-59bdc8b94-rlprt" Jan 20 17:12:06 crc kubenswrapper[4558]: I0120 17:12:06.686422 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/0c2f4961-5cb9-4460-b8f2-ff20d5bafc08-observability-operator-tls\") pod \"observability-operator-59bdc8b94-rlprt\" (UID: \"0c2f4961-5cb9-4460-b8f2-ff20d5bafc08\") " pod="openshift-operators/observability-operator-59bdc8b94-rlprt" Jan 20 17:12:06 crc kubenswrapper[4558]: I0120 17:12:06.703152 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/perses-operator-5bf474d74f-6nh6n"] Jan 20 17:12:06 crc kubenswrapper[4558]: I0120 17:12:06.704114 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5bf474d74f-6nh6n" Jan 20 17:12:06 crc kubenswrapper[4558]: I0120 17:12:06.708027 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"perses-operator-dockercfg-xjr7b" Jan 20 17:12:06 crc kubenswrapper[4558]: I0120 17:12:06.710876 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fc2vk\" (UniqueName: \"kubernetes.io/projected/0c2f4961-5cb9-4460-b8f2-ff20d5bafc08-kube-api-access-fc2vk\") pod \"observability-operator-59bdc8b94-rlprt\" (UID: \"0c2f4961-5cb9-4460-b8f2-ff20d5bafc08\") " pod="openshift-operators/observability-operator-59bdc8b94-rlprt" Jan 20 17:12:06 crc kubenswrapper[4558]: I0120 17:12:06.713504 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/perses-operator-5bf474d74f-6nh6n"] Jan 20 17:12:06 crc kubenswrapper[4558]: I0120 17:12:06.779753 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/6f4871c7-a64d-4d49-b830-66b6718d608a-openshift-service-ca\") pod \"perses-operator-5bf474d74f-6nh6n\" (UID: \"6f4871c7-a64d-4d49-b830-66b6718d608a\") " pod="openshift-operators/perses-operator-5bf474d74f-6nh6n" Jan 20 17:12:06 crc kubenswrapper[4558]: I0120 17:12:06.779806 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4q4ds\" (UniqueName: \"kubernetes.io/projected/6f4871c7-a64d-4d49-b830-66b6718d608a-kube-api-access-4q4ds\") pod \"perses-operator-5bf474d74f-6nh6n\" (UID: \"6f4871c7-a64d-4d49-b830-66b6718d608a\") " pod="openshift-operators/perses-operator-5bf474d74f-6nh6n" Jan 20 17:12:06 crc kubenswrapper[4558]: I0120 17:12:06.858468 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-59bdc8b94-rlprt" Jan 20 17:12:06 crc kubenswrapper[4558]: I0120 17:12:06.881568 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/6f4871c7-a64d-4d49-b830-66b6718d608a-openshift-service-ca\") pod \"perses-operator-5bf474d74f-6nh6n\" (UID: \"6f4871c7-a64d-4d49-b830-66b6718d608a\") " pod="openshift-operators/perses-operator-5bf474d74f-6nh6n" Jan 20 17:12:06 crc kubenswrapper[4558]: I0120 17:12:06.881603 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4q4ds\" (UniqueName: \"kubernetes.io/projected/6f4871c7-a64d-4d49-b830-66b6718d608a-kube-api-access-4q4ds\") pod \"perses-operator-5bf474d74f-6nh6n\" (UID: \"6f4871c7-a64d-4d49-b830-66b6718d608a\") " pod="openshift-operators/perses-operator-5bf474d74f-6nh6n" Jan 20 17:12:06 crc kubenswrapper[4558]: I0120 17:12:06.882648 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/6f4871c7-a64d-4d49-b830-66b6718d608a-openshift-service-ca\") pod \"perses-operator-5bf474d74f-6nh6n\" (UID: \"6f4871c7-a64d-4d49-b830-66b6718d608a\") " pod="openshift-operators/perses-operator-5bf474d74f-6nh6n" Jan 20 17:12:06 crc kubenswrapper[4558]: I0120 17:12:06.900885 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4q4ds\" (UniqueName: \"kubernetes.io/projected/6f4871c7-a64d-4d49-b830-66b6718d608a-kube-api-access-4q4ds\") pod \"perses-operator-5bf474d74f-6nh6n\" (UID: \"6f4871c7-a64d-4d49-b830-66b6718d608a\") " pod="openshift-operators/perses-operator-5bf474d74f-6nh6n" Jan 20 17:12:07 crc kubenswrapper[4558]: I0120 17:12:07.005049 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-68bc856cb9-nfzlw"] Jan 20 17:12:07 crc kubenswrapper[4558]: W0120 17:12:07.020318 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfef8d546_d905_4701_ac1e_09cd4c4b1ed8.slice/crio-5587c6f3a3e28d871fa0c20af38fa8721db40d02b4cf8b3f48203ed3bcd1b526 WatchSource:0}: Error finding container 5587c6f3a3e28d871fa0c20af38fa8721db40d02b4cf8b3f48203ed3bcd1b526: Status 404 returned error can't find the container with id 5587c6f3a3e28d871fa0c20af38fa8721db40d02b4cf8b3f48203ed3bcd1b526 Jan 20 17:12:07 crc kubenswrapper[4558]: I0120 17:12:07.021052 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-558658b7b5-hhs5k"] Jan 20 17:12:07 crc kubenswrapper[4558]: W0120 17:12:07.028474 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8c562f2d_0fe1_4a6a_9664_00e2cad8c416.slice/crio-3b30f84c03c10b8090fcc4bba131811a6d5a50fb7547f5770a7a8bc07b34c703 WatchSource:0}: Error finding container 3b30f84c03c10b8090fcc4bba131811a6d5a50fb7547f5770a7a8bc07b34c703: Status 404 returned error can't find the container with id 3b30f84c03c10b8090fcc4bba131811a6d5a50fb7547f5770a7a8bc07b34c703 Jan 20 17:12:07 crc kubenswrapper[4558]: I0120 17:12:07.031535 4558 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 20 17:12:07 crc kubenswrapper[4558]: I0120 17:12:07.051046 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5bf474d74f-6nh6n" Jan 20 17:12:07 crc kubenswrapper[4558]: I0120 17:12:07.173216 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-558658b7b5-cpc8s"] Jan 20 17:12:07 crc kubenswrapper[4558]: W0120 17:12:07.181107 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod42ac1cad_9697_4a87_974d_f1dfe31a3627.slice/crio-4642f920e83ceb89a75d2fbc18c362c07d9fb96237187f656cd2eeacddd8cd37 WatchSource:0}: Error finding container 4642f920e83ceb89a75d2fbc18c362c07d9fb96237187f656cd2eeacddd8cd37: Status 404 returned error can't find the container with id 4642f920e83ceb89a75d2fbc18c362c07d9fb96237187f656cd2eeacddd8cd37 Jan 20 17:12:07 crc kubenswrapper[4558]: I0120 17:12:07.303639 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-operator-59bdc8b94-rlprt"] Jan 20 17:12:07 crc kubenswrapper[4558]: W0120 17:12:07.310900 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0c2f4961_5cb9_4460_b8f2_ff20d5bafc08.slice/crio-b1b53711dbfdc35e2d318177908a4811e335484aba4d4f090bf2ba4a4bb3fb24 WatchSource:0}: Error finding container b1b53711dbfdc35e2d318177908a4811e335484aba4d4f090bf2ba4a4bb3fb24: Status 404 returned error can't find the container with id b1b53711dbfdc35e2d318177908a4811e335484aba4d4f090bf2ba4a4bb3fb24 Jan 20 17:12:07 crc kubenswrapper[4558]: I0120 17:12:07.352187 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-558658b7b5-cpc8s" event={"ID":"42ac1cad-9697-4a87-974d-f1dfe31a3627","Type":"ContainerStarted","Data":"4642f920e83ceb89a75d2fbc18c362c07d9fb96237187f656cd2eeacddd8cd37"} Jan 20 17:12:07 crc kubenswrapper[4558]: I0120 17:12:07.353612 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-68bc856cb9-nfzlw" event={"ID":"fef8d546-d905-4701-ac1e-09cd4c4b1ed8","Type":"ContainerStarted","Data":"5587c6f3a3e28d871fa0c20af38fa8721db40d02b4cf8b3f48203ed3bcd1b526"} Jan 20 17:12:07 crc kubenswrapper[4558]: I0120 17:12:07.356710 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-558658b7b5-hhs5k" event={"ID":"8c562f2d-0fe1-4a6a-9664-00e2cad8c416","Type":"ContainerStarted","Data":"3b30f84c03c10b8090fcc4bba131811a6d5a50fb7547f5770a7a8bc07b34c703"} Jan 20 17:12:07 crc kubenswrapper[4558]: I0120 17:12:07.358791 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-operator-59bdc8b94-rlprt" event={"ID":"0c2f4961-5cb9-4460-b8f2-ff20d5bafc08","Type":"ContainerStarted","Data":"b1b53711dbfdc35e2d318177908a4811e335484aba4d4f090bf2ba4a4bb3fb24"} Jan 20 17:12:07 crc kubenswrapper[4558]: I0120 17:12:07.471142 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/perses-operator-5bf474d74f-6nh6n"] Jan 20 17:12:07 crc kubenswrapper[4558]: W0120 17:12:07.481354 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6f4871c7_a64d_4d49_b830_66b6718d608a.slice/crio-20e6dd32018d9821f404788fb6b33f0dd4973bd407191d48bf6451aaf8b3a9d4 WatchSource:0}: Error finding container 20e6dd32018d9821f404788fb6b33f0dd4973bd407191d48bf6451aaf8b3a9d4: Status 404 returned error can't find the container with id 20e6dd32018d9821f404788fb6b33f0dd4973bd407191d48bf6451aaf8b3a9d4 Jan 20 17:12:08 crc kubenswrapper[4558]: I0120 17:12:08.439546 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/perses-operator-5bf474d74f-6nh6n" event={"ID":"6f4871c7-a64d-4d49-b830-66b6718d608a","Type":"ContainerStarted","Data":"20e6dd32018d9821f404788fb6b33f0dd4973bd407191d48bf6451aaf8b3a9d4"} Jan 20 17:12:09 crc kubenswrapper[4558]: I0120 17:12:09.160089 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/rabbitmq-server-0"] Jan 20 17:12:09 crc kubenswrapper[4558]: I0120 17:12:09.164755 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:12:09 crc kubenswrapper[4558]: I0120 17:12:09.166641 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"rabbitmq-default-user" Jan 20 17:12:09 crc kubenswrapper[4558]: I0120 17:12:09.167003 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"rabbitmq-plugins-conf" Jan 20 17:12:09 crc kubenswrapper[4558]: I0120 17:12:09.167004 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-rabbitmq-svc" Jan 20 17:12:09 crc kubenswrapper[4558]: I0120 17:12:09.167071 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"rabbitmq-server-dockercfg-dlh9d" Jan 20 17:12:09 crc kubenswrapper[4558]: I0120 17:12:09.167241 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"rabbitmq-erlang-cookie" Jan 20 17:12:09 crc kubenswrapper[4558]: I0120 17:12:09.171007 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/rabbitmq-server-0"] Jan 20 17:12:09 crc kubenswrapper[4558]: I0120 17:12:09.173200 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"rabbitmq-config-data" Jan 20 17:12:09 crc kubenswrapper[4558]: I0120 17:12:09.173426 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"rabbitmq-server-conf" Jan 20 17:12:09 crc kubenswrapper[4558]: I0120 17:12:09.334467 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/fd2edfa4-790f-49d7-9e32-29571e490aaf-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"fd2edfa4-790f-49d7-9e32-29571e490aaf\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:12:09 crc kubenswrapper[4558]: I0120 17:12:09.334519 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/fd2edfa4-790f-49d7-9e32-29571e490aaf-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"fd2edfa4-790f-49d7-9e32-29571e490aaf\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:12:09 crc kubenswrapper[4558]: I0120 17:12:09.334557 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/fd2edfa4-790f-49d7-9e32-29571e490aaf-pod-info\") pod \"rabbitmq-server-0\" (UID: \"fd2edfa4-790f-49d7-9e32-29571e490aaf\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:12:09 crc kubenswrapper[4558]: I0120 17:12:09.334581 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage14-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage14-crc\") pod \"rabbitmq-server-0\" (UID: \"fd2edfa4-790f-49d7-9e32-29571e490aaf\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:12:09 crc kubenswrapper[4558]: I0120 17:12:09.334603 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/fd2edfa4-790f-49d7-9e32-29571e490aaf-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"fd2edfa4-790f-49d7-9e32-29571e490aaf\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:12:09 crc kubenswrapper[4558]: I0120 17:12:09.334752 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ftjq8\" (UniqueName: \"kubernetes.io/projected/fd2edfa4-790f-49d7-9e32-29571e490aaf-kube-api-access-ftjq8\") pod \"rabbitmq-server-0\" (UID: \"fd2edfa4-790f-49d7-9e32-29571e490aaf\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:12:09 crc kubenswrapper[4558]: I0120 17:12:09.334790 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/fd2edfa4-790f-49d7-9e32-29571e490aaf-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"fd2edfa4-790f-49d7-9e32-29571e490aaf\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:12:09 crc kubenswrapper[4558]: I0120 17:12:09.334834 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/fd2edfa4-790f-49d7-9e32-29571e490aaf-config-data\") pod \"rabbitmq-server-0\" (UID: \"fd2edfa4-790f-49d7-9e32-29571e490aaf\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:12:09 crc kubenswrapper[4558]: I0120 17:12:09.334863 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/fd2edfa4-790f-49d7-9e32-29571e490aaf-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"fd2edfa4-790f-49d7-9e32-29571e490aaf\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:12:09 crc kubenswrapper[4558]: I0120 17:12:09.335090 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/fd2edfa4-790f-49d7-9e32-29571e490aaf-server-conf\") pod \"rabbitmq-server-0\" (UID: \"fd2edfa4-790f-49d7-9e32-29571e490aaf\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:12:09 crc kubenswrapper[4558]: I0120 17:12:09.335223 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/fd2edfa4-790f-49d7-9e32-29571e490aaf-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"fd2edfa4-790f-49d7-9e32-29571e490aaf\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:12:09 crc kubenswrapper[4558]: I0120 17:12:09.437671 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ftjq8\" (UniqueName: \"kubernetes.io/projected/fd2edfa4-790f-49d7-9e32-29571e490aaf-kube-api-access-ftjq8\") pod \"rabbitmq-server-0\" (UID: \"fd2edfa4-790f-49d7-9e32-29571e490aaf\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:12:09 crc kubenswrapper[4558]: I0120 17:12:09.437750 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/fd2edfa4-790f-49d7-9e32-29571e490aaf-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"fd2edfa4-790f-49d7-9e32-29571e490aaf\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:12:09 crc kubenswrapper[4558]: I0120 17:12:09.437809 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/fd2edfa4-790f-49d7-9e32-29571e490aaf-config-data\") pod \"rabbitmq-server-0\" (UID: \"fd2edfa4-790f-49d7-9e32-29571e490aaf\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:12:09 crc kubenswrapper[4558]: I0120 17:12:09.437853 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/fd2edfa4-790f-49d7-9e32-29571e490aaf-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"fd2edfa4-790f-49d7-9e32-29571e490aaf\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:12:09 crc kubenswrapper[4558]: I0120 17:12:09.437876 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/fd2edfa4-790f-49d7-9e32-29571e490aaf-server-conf\") pod \"rabbitmq-server-0\" (UID: \"fd2edfa4-790f-49d7-9e32-29571e490aaf\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:12:09 crc kubenswrapper[4558]: I0120 17:12:09.437928 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/fd2edfa4-790f-49d7-9e32-29571e490aaf-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"fd2edfa4-790f-49d7-9e32-29571e490aaf\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:12:09 crc kubenswrapper[4558]: I0120 17:12:09.437964 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/fd2edfa4-790f-49d7-9e32-29571e490aaf-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"fd2edfa4-790f-49d7-9e32-29571e490aaf\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:12:09 crc kubenswrapper[4558]: I0120 17:12:09.437984 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/fd2edfa4-790f-49d7-9e32-29571e490aaf-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"fd2edfa4-790f-49d7-9e32-29571e490aaf\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:12:09 crc kubenswrapper[4558]: I0120 17:12:09.438018 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/fd2edfa4-790f-49d7-9e32-29571e490aaf-pod-info\") pod \"rabbitmq-server-0\" (UID: \"fd2edfa4-790f-49d7-9e32-29571e490aaf\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:12:09 crc kubenswrapper[4558]: I0120 17:12:09.438038 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage14-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage14-crc\") pod \"rabbitmq-server-0\" (UID: \"fd2edfa4-790f-49d7-9e32-29571e490aaf\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:12:09 crc kubenswrapper[4558]: I0120 17:12:09.438053 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/fd2edfa4-790f-49d7-9e32-29571e490aaf-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"fd2edfa4-790f-49d7-9e32-29571e490aaf\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:12:09 crc kubenswrapper[4558]: I0120 17:12:09.438542 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/fd2edfa4-790f-49d7-9e32-29571e490aaf-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"fd2edfa4-790f-49d7-9e32-29571e490aaf\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:12:09 crc kubenswrapper[4558]: I0120 17:12:09.438910 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage14-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage14-crc\") pod \"rabbitmq-server-0\" (UID: \"fd2edfa4-790f-49d7-9e32-29571e490aaf\") device mount path \"/mnt/openstack/pv14\"" pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:12:09 crc kubenswrapper[4558]: I0120 17:12:09.439319 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/fd2edfa4-790f-49d7-9e32-29571e490aaf-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"fd2edfa4-790f-49d7-9e32-29571e490aaf\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:12:09 crc kubenswrapper[4558]: I0120 17:12:09.440202 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/fd2edfa4-790f-49d7-9e32-29571e490aaf-server-conf\") pod \"rabbitmq-server-0\" (UID: \"fd2edfa4-790f-49d7-9e32-29571e490aaf\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:12:09 crc kubenswrapper[4558]: I0120 17:12:09.440383 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/fd2edfa4-790f-49d7-9e32-29571e490aaf-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"fd2edfa4-790f-49d7-9e32-29571e490aaf\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:12:09 crc kubenswrapper[4558]: I0120 17:12:09.444325 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/fd2edfa4-790f-49d7-9e32-29571e490aaf-config-data\") pod \"rabbitmq-server-0\" (UID: \"fd2edfa4-790f-49d7-9e32-29571e490aaf\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:12:09 crc kubenswrapper[4558]: I0120 17:12:09.453664 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/fd2edfa4-790f-49d7-9e32-29571e490aaf-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"fd2edfa4-790f-49d7-9e32-29571e490aaf\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:12:09 crc kubenswrapper[4558]: I0120 17:12:09.453861 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/fd2edfa4-790f-49d7-9e32-29571e490aaf-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"fd2edfa4-790f-49d7-9e32-29571e490aaf\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:12:09 crc kubenswrapper[4558]: I0120 17:12:09.454034 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/fd2edfa4-790f-49d7-9e32-29571e490aaf-pod-info\") pod \"rabbitmq-server-0\" (UID: \"fd2edfa4-790f-49d7-9e32-29571e490aaf\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:12:09 crc kubenswrapper[4558]: I0120 17:12:09.458728 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ftjq8\" (UniqueName: \"kubernetes.io/projected/fd2edfa4-790f-49d7-9e32-29571e490aaf-kube-api-access-ftjq8\") pod \"rabbitmq-server-0\" (UID: \"fd2edfa4-790f-49d7-9e32-29571e490aaf\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:12:09 crc kubenswrapper[4558]: I0120 17:12:09.458907 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/fd2edfa4-790f-49d7-9e32-29571e490aaf-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"fd2edfa4-790f-49d7-9e32-29571e490aaf\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:12:09 crc kubenswrapper[4558]: I0120 17:12:09.475263 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage14-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage14-crc\") pod \"rabbitmq-server-0\" (UID: \"fd2edfa4-790f-49d7-9e32-29571e490aaf\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:12:09 crc kubenswrapper[4558]: I0120 17:12:09.491191 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:12:10 crc kubenswrapper[4558]: I0120 17:12:10.612383 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/openstack-galera-0"] Jan 20 17:12:10 crc kubenswrapper[4558]: I0120 17:12:10.613654 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:12:10 crc kubenswrapper[4558]: I0120 17:12:10.625112 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"galera-openstack-dockercfg-h49tc" Jan 20 17:12:10 crc kubenswrapper[4558]: I0120 17:12:10.626298 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-galera-openstack-svc" Jan 20 17:12:10 crc kubenswrapper[4558]: I0120 17:12:10.626565 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-config-data" Jan 20 17:12:10 crc kubenswrapper[4558]: I0120 17:12:10.626675 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-scripts" Jan 20 17:12:10 crc kubenswrapper[4558]: I0120 17:12:10.630572 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"combined-ca-bundle" Jan 20 17:12:10 crc kubenswrapper[4558]: I0120 17:12:10.632325 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/openstack-galera-0"] Jan 20 17:12:10 crc kubenswrapper[4558]: I0120 17:12:10.772256 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/eed05f95-b1a3-44b9-bb2c-e7b01d91d8cc-kolla-config\") pod \"openstack-galera-0\" (UID: \"eed05f95-b1a3-44b9-bb2c-e7b01d91d8cc\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:12:10 crc kubenswrapper[4558]: I0120 17:12:10.772330 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eed05f95-b1a3-44b9-bb2c-e7b01d91d8cc-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"eed05f95-b1a3-44b9-bb2c-e7b01d91d8cc\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:12:10 crc kubenswrapper[4558]: I0120 17:12:10.772381 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/eed05f95-b1a3-44b9-bb2c-e7b01d91d8cc-config-data-generated\") pod \"openstack-galera-0\" (UID: \"eed05f95-b1a3-44b9-bb2c-e7b01d91d8cc\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:12:10 crc kubenswrapper[4558]: I0120 17:12:10.772441 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-55bnq\" (UniqueName: \"kubernetes.io/projected/eed05f95-b1a3-44b9-bb2c-e7b01d91d8cc-kube-api-access-55bnq\") pod \"openstack-galera-0\" (UID: \"eed05f95-b1a3-44b9-bb2c-e7b01d91d8cc\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:12:10 crc kubenswrapper[4558]: I0120 17:12:10.772475 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"openstack-galera-0\" (UID: \"eed05f95-b1a3-44b9-bb2c-e7b01d91d8cc\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:12:10 crc kubenswrapper[4558]: I0120 17:12:10.772526 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/eed05f95-b1a3-44b9-bb2c-e7b01d91d8cc-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"eed05f95-b1a3-44b9-bb2c-e7b01d91d8cc\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:12:10 crc kubenswrapper[4558]: I0120 17:12:10.772548 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/eed05f95-b1a3-44b9-bb2c-e7b01d91d8cc-config-data-default\") pod \"openstack-galera-0\" (UID: \"eed05f95-b1a3-44b9-bb2c-e7b01d91d8cc\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:12:10 crc kubenswrapper[4558]: I0120 17:12:10.772617 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/eed05f95-b1a3-44b9-bb2c-e7b01d91d8cc-operator-scripts\") pod \"openstack-galera-0\" (UID: \"eed05f95-b1a3-44b9-bb2c-e7b01d91d8cc\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:12:10 crc kubenswrapper[4558]: I0120 17:12:10.874058 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/eed05f95-b1a3-44b9-bb2c-e7b01d91d8cc-config-data-generated\") pod \"openstack-galera-0\" (UID: \"eed05f95-b1a3-44b9-bb2c-e7b01d91d8cc\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:12:10 crc kubenswrapper[4558]: I0120 17:12:10.874119 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-55bnq\" (UniqueName: \"kubernetes.io/projected/eed05f95-b1a3-44b9-bb2c-e7b01d91d8cc-kube-api-access-55bnq\") pod \"openstack-galera-0\" (UID: \"eed05f95-b1a3-44b9-bb2c-e7b01d91d8cc\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:12:10 crc kubenswrapper[4558]: I0120 17:12:10.874151 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"openstack-galera-0\" (UID: \"eed05f95-b1a3-44b9-bb2c-e7b01d91d8cc\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:12:10 crc kubenswrapper[4558]: I0120 17:12:10.874330 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"openstack-galera-0\" (UID: \"eed05f95-b1a3-44b9-bb2c-e7b01d91d8cc\") device mount path \"/mnt/openstack/pv05\"" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:12:10 crc kubenswrapper[4558]: I0120 17:12:10.874656 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/eed05f95-b1a3-44b9-bb2c-e7b01d91d8cc-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"eed05f95-b1a3-44b9-bb2c-e7b01d91d8cc\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:12:10 crc kubenswrapper[4558]: I0120 17:12:10.874711 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/eed05f95-b1a3-44b9-bb2c-e7b01d91d8cc-config-data-default\") pod \"openstack-galera-0\" (UID: \"eed05f95-b1a3-44b9-bb2c-e7b01d91d8cc\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:12:10 crc kubenswrapper[4558]: I0120 17:12:10.874724 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/eed05f95-b1a3-44b9-bb2c-e7b01d91d8cc-config-data-generated\") pod \"openstack-galera-0\" (UID: \"eed05f95-b1a3-44b9-bb2c-e7b01d91d8cc\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:12:10 crc kubenswrapper[4558]: I0120 17:12:10.874820 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/eed05f95-b1a3-44b9-bb2c-e7b01d91d8cc-operator-scripts\") pod \"openstack-galera-0\" (UID: \"eed05f95-b1a3-44b9-bb2c-e7b01d91d8cc\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:12:10 crc kubenswrapper[4558]: I0120 17:12:10.875042 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/eed05f95-b1a3-44b9-bb2c-e7b01d91d8cc-kolla-config\") pod \"openstack-galera-0\" (UID: \"eed05f95-b1a3-44b9-bb2c-e7b01d91d8cc\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:12:10 crc kubenswrapper[4558]: I0120 17:12:10.875081 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eed05f95-b1a3-44b9-bb2c-e7b01d91d8cc-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"eed05f95-b1a3-44b9-bb2c-e7b01d91d8cc\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:12:10 crc kubenswrapper[4558]: I0120 17:12:10.875553 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/eed05f95-b1a3-44b9-bb2c-e7b01d91d8cc-config-data-default\") pod \"openstack-galera-0\" (UID: \"eed05f95-b1a3-44b9-bb2c-e7b01d91d8cc\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:12:10 crc kubenswrapper[4558]: I0120 17:12:10.875790 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/eed05f95-b1a3-44b9-bb2c-e7b01d91d8cc-kolla-config\") pod \"openstack-galera-0\" (UID: \"eed05f95-b1a3-44b9-bb2c-e7b01d91d8cc\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:12:10 crc kubenswrapper[4558]: I0120 17:12:10.876305 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/eed05f95-b1a3-44b9-bb2c-e7b01d91d8cc-operator-scripts\") pod \"openstack-galera-0\" (UID: \"eed05f95-b1a3-44b9-bb2c-e7b01d91d8cc\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:12:10 crc kubenswrapper[4558]: I0120 17:12:10.882124 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eed05f95-b1a3-44b9-bb2c-e7b01d91d8cc-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"eed05f95-b1a3-44b9-bb2c-e7b01d91d8cc\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:12:10 crc kubenswrapper[4558]: I0120 17:12:10.885609 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/eed05f95-b1a3-44b9-bb2c-e7b01d91d8cc-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"eed05f95-b1a3-44b9-bb2c-e7b01d91d8cc\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:12:10 crc kubenswrapper[4558]: I0120 17:12:10.897760 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-55bnq\" (UniqueName: \"kubernetes.io/projected/eed05f95-b1a3-44b9-bb2c-e7b01d91d8cc-kube-api-access-55bnq\") pod \"openstack-galera-0\" (UID: \"eed05f95-b1a3-44b9-bb2c-e7b01d91d8cc\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:12:10 crc kubenswrapper[4558]: I0120 17:12:10.925266 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"openstack-galera-0\" (UID: \"eed05f95-b1a3-44b9-bb2c-e7b01d91d8cc\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:12:10 crc kubenswrapper[4558]: I0120 17:12:10.940518 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:12:11 crc kubenswrapper[4558]: I0120 17:12:11.036641 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/memcached-0"] Jan 20 17:12:11 crc kubenswrapper[4558]: I0120 17:12:11.037653 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/memcached-0" Jan 20 17:12:11 crc kubenswrapper[4558]: I0120 17:12:11.039402 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"memcached-config-data" Jan 20 17:12:11 crc kubenswrapper[4558]: I0120 17:12:11.039517 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-memcached-svc" Jan 20 17:12:11 crc kubenswrapper[4558]: I0120 17:12:11.040897 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"memcached-memcached-dockercfg-qr4h5" Jan 20 17:12:11 crc kubenswrapper[4558]: I0120 17:12:11.052490 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/memcached-0"] Jan 20 17:12:11 crc kubenswrapper[4558]: I0120 17:12:11.178878 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/fc9b3fed-9730-4199-b074-3f01cdc1b7ce-memcached-tls-certs\") pod \"memcached-0\" (UID: \"fc9b3fed-9730-4199-b074-3f01cdc1b7ce\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:12:11 crc kubenswrapper[4558]: I0120 17:12:11.179263 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c5mg8\" (UniqueName: \"kubernetes.io/projected/fc9b3fed-9730-4199-b074-3f01cdc1b7ce-kube-api-access-c5mg8\") pod \"memcached-0\" (UID: \"fc9b3fed-9730-4199-b074-3f01cdc1b7ce\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:12:11 crc kubenswrapper[4558]: I0120 17:12:11.179295 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/fc9b3fed-9730-4199-b074-3f01cdc1b7ce-kolla-config\") pod \"memcached-0\" (UID: \"fc9b3fed-9730-4199-b074-3f01cdc1b7ce\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:12:11 crc kubenswrapper[4558]: I0120 17:12:11.179365 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/fc9b3fed-9730-4199-b074-3f01cdc1b7ce-config-data\") pod \"memcached-0\" (UID: \"fc9b3fed-9730-4199-b074-3f01cdc1b7ce\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:12:11 crc kubenswrapper[4558]: I0120 17:12:11.179479 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc9b3fed-9730-4199-b074-3f01cdc1b7ce-combined-ca-bundle\") pod \"memcached-0\" (UID: \"fc9b3fed-9730-4199-b074-3f01cdc1b7ce\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:12:11 crc kubenswrapper[4558]: I0120 17:12:11.280609 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc9b3fed-9730-4199-b074-3f01cdc1b7ce-combined-ca-bundle\") pod \"memcached-0\" (UID: \"fc9b3fed-9730-4199-b074-3f01cdc1b7ce\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:12:11 crc kubenswrapper[4558]: I0120 17:12:11.280691 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/fc9b3fed-9730-4199-b074-3f01cdc1b7ce-memcached-tls-certs\") pod \"memcached-0\" (UID: \"fc9b3fed-9730-4199-b074-3f01cdc1b7ce\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:12:11 crc kubenswrapper[4558]: I0120 17:12:11.280736 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c5mg8\" (UniqueName: \"kubernetes.io/projected/fc9b3fed-9730-4199-b074-3f01cdc1b7ce-kube-api-access-c5mg8\") pod \"memcached-0\" (UID: \"fc9b3fed-9730-4199-b074-3f01cdc1b7ce\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:12:11 crc kubenswrapper[4558]: I0120 17:12:11.280755 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/fc9b3fed-9730-4199-b074-3f01cdc1b7ce-kolla-config\") pod \"memcached-0\" (UID: \"fc9b3fed-9730-4199-b074-3f01cdc1b7ce\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:12:11 crc kubenswrapper[4558]: I0120 17:12:11.280769 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/fc9b3fed-9730-4199-b074-3f01cdc1b7ce-config-data\") pod \"memcached-0\" (UID: \"fc9b3fed-9730-4199-b074-3f01cdc1b7ce\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:12:11 crc kubenswrapper[4558]: I0120 17:12:11.281909 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/fc9b3fed-9730-4199-b074-3f01cdc1b7ce-config-data\") pod \"memcached-0\" (UID: \"fc9b3fed-9730-4199-b074-3f01cdc1b7ce\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:12:11 crc kubenswrapper[4558]: I0120 17:12:11.281943 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/fc9b3fed-9730-4199-b074-3f01cdc1b7ce-kolla-config\") pod \"memcached-0\" (UID: \"fc9b3fed-9730-4199-b074-3f01cdc1b7ce\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:12:11 crc kubenswrapper[4558]: I0120 17:12:11.292568 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/fc9b3fed-9730-4199-b074-3f01cdc1b7ce-memcached-tls-certs\") pod \"memcached-0\" (UID: \"fc9b3fed-9730-4199-b074-3f01cdc1b7ce\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:12:11 crc kubenswrapper[4558]: I0120 17:12:11.292608 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc9b3fed-9730-4199-b074-3f01cdc1b7ce-combined-ca-bundle\") pod \"memcached-0\" (UID: \"fc9b3fed-9730-4199-b074-3f01cdc1b7ce\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:12:11 crc kubenswrapper[4558]: I0120 17:12:11.302357 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c5mg8\" (UniqueName: \"kubernetes.io/projected/fc9b3fed-9730-4199-b074-3f01cdc1b7ce-kube-api-access-c5mg8\") pod \"memcached-0\" (UID: \"fc9b3fed-9730-4199-b074-3f01cdc1b7ce\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:12:11 crc kubenswrapper[4558]: I0120 17:12:11.354677 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/memcached-0" Jan 20 17:12:12 crc kubenswrapper[4558]: I0120 17:12:12.703128 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 17:12:12 crc kubenswrapper[4558]: I0120 17:12:12.704088 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:12:12 crc kubenswrapper[4558]: I0120 17:12:12.706527 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"telemetry-ceilometer-dockercfg-dqsfb" Jan 20 17:12:12 crc kubenswrapper[4558]: I0120 17:12:12.718023 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k5w95\" (UniqueName: \"kubernetes.io/projected/9d1e8534-0277-43f6-a3be-2c3985853e6e-kube-api-access-k5w95\") pod \"kube-state-metrics-0\" (UID: \"9d1e8534-0277-43f6-a3be-2c3985853e6e\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:12:12 crc kubenswrapper[4558]: I0120 17:12:12.724802 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 17:12:12 crc kubenswrapper[4558]: I0120 17:12:12.786774 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/rabbitmq-server-0"] Jan 20 17:12:12 crc kubenswrapper[4558]: I0120 17:12:12.819241 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k5w95\" (UniqueName: \"kubernetes.io/projected/9d1e8534-0277-43f6-a3be-2c3985853e6e-kube-api-access-k5w95\") pod \"kube-state-metrics-0\" (UID: \"9d1e8534-0277-43f6-a3be-2c3985853e6e\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:12:12 crc kubenswrapper[4558]: I0120 17:12:12.838284 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k5w95\" (UniqueName: \"kubernetes.io/projected/9d1e8534-0277-43f6-a3be-2c3985853e6e-kube-api-access-k5w95\") pod \"kube-state-metrics-0\" (UID: \"9d1e8534-0277-43f6-a3be-2c3985853e6e\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:12:13 crc kubenswrapper[4558]: I0120 17:12:13.025443 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:12:15 crc kubenswrapper[4558]: I0120 17:12:15.533551 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-server-0" event={"ID":"fd2edfa4-790f-49d7-9e32-29571e490aaf","Type":"ContainerStarted","Data":"aabe31337edbbbf211a17606aae464568db3b6b1f67edf5ee22c95c8f40bfe41"} Jan 20 17:12:15 crc kubenswrapper[4558]: I0120 17:12:15.651084 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 17:12:15 crc kubenswrapper[4558]: I0120 17:12:15.790632 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/openstack-galera-0"] Jan 20 17:12:15 crc kubenswrapper[4558]: W0120 17:12:15.805867 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podeed05f95_b1a3_44b9_bb2c_e7b01d91d8cc.slice/crio-d37c6a4934e531436a7197ab1418cec7c93e40413cab503ad229a613d4fe2a94 WatchSource:0}: Error finding container d37c6a4934e531436a7197ab1418cec7c93e40413cab503ad229a613d4fe2a94: Status 404 returned error can't find the container with id d37c6a4934e531436a7197ab1418cec7c93e40413cab503ad229a613d4fe2a94 Jan 20 17:12:15 crc kubenswrapper[4558]: I0120 17:12:15.807989 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/memcached-0"] Jan 20 17:12:16 crc kubenswrapper[4558]: I0120 17:12:16.542771 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-operator-59bdc8b94-rlprt" event={"ID":"0c2f4961-5cb9-4460-b8f2-ff20d5bafc08","Type":"ContainerStarted","Data":"3134cd3f26e04744586ac25e4742f6b00629925f99a213976ad98247e86418f8"} Jan 20 17:12:16 crc kubenswrapper[4558]: I0120 17:12:16.543097 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators/observability-operator-59bdc8b94-rlprt" Jan 20 17:12:16 crc kubenswrapper[4558]: I0120 17:12:16.545094 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-558658b7b5-cpc8s" event={"ID":"42ac1cad-9697-4a87-974d-f1dfe31a3627","Type":"ContainerStarted","Data":"2eb5568173a2f8aa1d9f3b90655793df16171b1281710873efd4b2c85b96fa00"} Jan 20 17:12:16 crc kubenswrapper[4558]: I0120 17:12:16.546705 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-68bc856cb9-nfzlw" event={"ID":"fef8d546-d905-4701-ac1e-09cd4c4b1ed8","Type":"ContainerStarted","Data":"0f0c3f7f64f965e6abd21b0e60623ee5819fee82c6bc7049a8f81913ad13a93b"} Jan 20 17:12:16 crc kubenswrapper[4558]: I0120 17:12:16.548364 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/memcached-0" event={"ID":"fc9b3fed-9730-4199-b074-3f01cdc1b7ce","Type":"ContainerStarted","Data":"e1569c77aa4164d84391079102afae404d6f206b7e90d3e21dd30d7b3d65062f"} Jan 20 17:12:16 crc kubenswrapper[4558]: I0120 17:12:16.548420 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/memcached-0" event={"ID":"fc9b3fed-9730-4199-b074-3f01cdc1b7ce","Type":"ContainerStarted","Data":"cac139f685c54cd4045122f570a333d81828bb041bd0a2b6deaf3f56cd1930fd"} Jan 20 17:12:16 crc kubenswrapper[4558]: I0120 17:12:16.548448 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/memcached-0" Jan 20 17:12:16 crc kubenswrapper[4558]: I0120 17:12:16.549703 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-server-0" event={"ID":"fd2edfa4-790f-49d7-9e32-29571e490aaf","Type":"ContainerStarted","Data":"428595a29d94d4ba528e047a5b7e52f2fb67f15ff604242487cb42e3df5dbd56"} Jan 20 17:12:16 crc kubenswrapper[4558]: I0120 17:12:16.550938 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-558658b7b5-hhs5k" event={"ID":"8c562f2d-0fe1-4a6a-9664-00e2cad8c416","Type":"ContainerStarted","Data":"b687dc3bbf44c6ff0ab042fedfc77d973e0a60f4407a6c27d524e10c3ffeb053"} Jan 20 17:12:16 crc kubenswrapper[4558]: I0120 17:12:16.552006 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/perses-operator-5bf474d74f-6nh6n" event={"ID":"6f4871c7-a64d-4d49-b830-66b6718d608a","Type":"ContainerStarted","Data":"27c94701e857761ffaee7c10fe50dc9aa2265d4c4196bdbbfdaecd3bbc8fcc18"} Jan 20 17:12:16 crc kubenswrapper[4558]: I0120 17:12:16.552125 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators/perses-operator-5bf474d74f-6nh6n" Jan 20 17:12:16 crc kubenswrapper[4558]: I0120 17:12:16.553082 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/kube-state-metrics-0" event={"ID":"9d1e8534-0277-43f6-a3be-2c3985853e6e","Type":"ContainerStarted","Data":"dcb0fe6875233f2fdf58a3e6c4e56531163a5e8c5098db78a1bf54da8f0a72d8"} Jan 20 17:12:16 crc kubenswrapper[4558]: I0120 17:12:16.553114 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/kube-state-metrics-0" event={"ID":"9d1e8534-0277-43f6-a3be-2c3985853e6e","Type":"ContainerStarted","Data":"8de19a498ecd504f295b063b1f621f22dacf5039e0bb2f6cef8d786cdb21428b"} Jan 20 17:12:16 crc kubenswrapper[4558]: I0120 17:12:16.553213 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:12:16 crc kubenswrapper[4558]: I0120 17:12:16.554108 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-galera-0" event={"ID":"eed05f95-b1a3-44b9-bb2c-e7b01d91d8cc","Type":"ContainerStarted","Data":"ef4526e531f7d5807e6513289cdeac244b91e66048cb4682c4a8bb251a104b9d"} Jan 20 17:12:16 crc kubenswrapper[4558]: I0120 17:12:16.554134 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-galera-0" event={"ID":"eed05f95-b1a3-44b9-bb2c-e7b01d91d8cc","Type":"ContainerStarted","Data":"d37c6a4934e531436a7197ab1418cec7c93e40413cab503ad229a613d4fe2a94"} Jan 20 17:12:16 crc kubenswrapper[4558]: I0120 17:12:16.585698 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-admission-webhook-558658b7b5-cpc8s" podStartSLOduration=2.673061846 podStartE2EDuration="10.585686187s" podCreationTimestamp="2026-01-20 17:12:06 +0000 UTC" firstStartedPulling="2026-01-20 17:12:07.194903728 +0000 UTC m=+1820.955241695" lastFinishedPulling="2026-01-20 17:12:15.107528069 +0000 UTC m=+1828.867866036" observedRunningTime="2026-01-20 17:12:16.582992912 +0000 UTC m=+1830.343330879" watchObservedRunningTime="2026-01-20 17:12:16.585686187 +0000 UTC m=+1830.346024155" Jan 20 17:12:16 crc kubenswrapper[4558]: I0120 17:12:16.585794 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/observability-operator-59bdc8b94-rlprt" podStartSLOduration=2.655534557 podStartE2EDuration="10.585789531s" podCreationTimestamp="2026-01-20 17:12:06 +0000 UTC" firstStartedPulling="2026-01-20 17:12:07.313081966 +0000 UTC m=+1821.073419933" lastFinishedPulling="2026-01-20 17:12:15.24333694 +0000 UTC m=+1829.003674907" observedRunningTime="2026-01-20 17:12:16.567283702 +0000 UTC m=+1830.327621669" watchObservedRunningTime="2026-01-20 17:12:16.585789531 +0000 UTC m=+1830.346127498" Jan 20 17:12:16 crc kubenswrapper[4558]: I0120 17:12:16.593595 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators/observability-operator-59bdc8b94-rlprt" Jan 20 17:12:16 crc kubenswrapper[4558]: I0120 17:12:16.629805 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/memcached-0" podStartSLOduration=5.6297846719999995 podStartE2EDuration="5.629784672s" podCreationTimestamp="2026-01-20 17:12:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:12:16.628700433 +0000 UTC m=+1830.389038400" watchObservedRunningTime="2026-01-20 17:12:16.629784672 +0000 UTC m=+1830.390122639" Jan 20 17:12:16 crc kubenswrapper[4558]: I0120 17:12:16.644705 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-68bc856cb9-nfzlw" podStartSLOduration=2.486823435 podStartE2EDuration="10.644687987s" podCreationTimestamp="2026-01-20 17:12:06 +0000 UTC" firstStartedPulling="2026-01-20 17:12:07.031057955 +0000 UTC m=+1820.791395922" lastFinishedPulling="2026-01-20 17:12:15.188922507 +0000 UTC m=+1828.949260474" observedRunningTime="2026-01-20 17:12:16.644050307 +0000 UTC m=+1830.404388274" watchObservedRunningTime="2026-01-20 17:12:16.644687987 +0000 UTC m=+1830.405025944" Jan 20 17:12:16 crc kubenswrapper[4558]: I0120 17:12:16.730928 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/perses-operator-5bf474d74f-6nh6n" podStartSLOduration=3.02237987 podStartE2EDuration="10.730909882s" podCreationTimestamp="2026-01-20 17:12:06 +0000 UTC" firstStartedPulling="2026-01-20 17:12:07.483014675 +0000 UTC m=+1821.243352643" lastFinishedPulling="2026-01-20 17:12:15.191544688 +0000 UTC m=+1828.951882655" observedRunningTime="2026-01-20 17:12:16.728340921 +0000 UTC m=+1830.488678888" watchObservedRunningTime="2026-01-20 17:12:16.730909882 +0000 UTC m=+1830.491247849" Jan 20 17:12:16 crc kubenswrapper[4558]: I0120 17:12:16.743503 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/kube-state-metrics-0" podStartSLOduration=4.477724501 podStartE2EDuration="4.743483877s" podCreationTimestamp="2026-01-20 17:12:12 +0000 UTC" firstStartedPulling="2026-01-20 17:12:15.669971244 +0000 UTC m=+1829.430309211" lastFinishedPulling="2026-01-20 17:12:15.93573062 +0000 UTC m=+1829.696068587" observedRunningTime="2026-01-20 17:12:16.739838983 +0000 UTC m=+1830.500176950" watchObservedRunningTime="2026-01-20 17:12:16.743483877 +0000 UTC m=+1830.503821845" Jan 20 17:12:16 crc kubenswrapper[4558]: I0120 17:12:16.758561 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-admission-webhook-558658b7b5-hhs5k" podStartSLOduration=2.619811172 podStartE2EDuration="10.75854063s" podCreationTimestamp="2026-01-20 17:12:06 +0000 UTC" firstStartedPulling="2026-01-20 17:12:07.031295562 +0000 UTC m=+1820.791633529" lastFinishedPulling="2026-01-20 17:12:15.170025021 +0000 UTC m=+1828.930362987" observedRunningTime="2026-01-20 17:12:16.753042471 +0000 UTC m=+1830.513380439" watchObservedRunningTime="2026-01-20 17:12:16.75854063 +0000 UTC m=+1830.518878587" Jan 20 17:12:16 crc kubenswrapper[4558]: I0120 17:12:16.869947 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ovsdbserver-nb-0"] Jan 20 17:12:16 crc kubenswrapper[4558]: I0120 17:12:16.871341 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:12:16 crc kubenswrapper[4558]: I0120 17:12:16.873576 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ovncluster-ovndbcluster-nb-dockercfg-s4wgn" Jan 20 17:12:16 crc kubenswrapper[4558]: I0120 17:12:16.873611 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-ovn-metrics" Jan 20 17:12:16 crc kubenswrapper[4558]: I0120 17:12:16.873618 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"ovndbcluster-nb-scripts" Jan 20 17:12:16 crc kubenswrapper[4558]: I0120 17:12:16.873737 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"ovndbcluster-nb-config" Jan 20 17:12:16 crc kubenswrapper[4558]: I0120 17:12:16.876764 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-ovndbcluster-nb-ovndbs" Jan 20 17:12:16 crc kubenswrapper[4558]: I0120 17:12:16.882983 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-nb-0"] Jan 20 17:12:17 crc kubenswrapper[4558]: I0120 17:12:17.008946 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zhx8v\" (UniqueName: \"kubernetes.io/projected/ef3c342e-679b-4a2e-94df-82adf4200290-kube-api-access-zhx8v\") pod \"ovsdbserver-nb-0\" (UID: \"ef3c342e-679b-4a2e-94df-82adf4200290\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:12:17 crc kubenswrapper[4558]: I0120 17:12:17.008989 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ef3c342e-679b-4a2e-94df-82adf4200290-config\") pod \"ovsdbserver-nb-0\" (UID: \"ef3c342e-679b-4a2e-94df-82adf4200290\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:12:17 crc kubenswrapper[4558]: I0120 17:12:17.009010 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"ovsdbserver-nb-0\" (UID: \"ef3c342e-679b-4a2e-94df-82adf4200290\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:12:17 crc kubenswrapper[4558]: I0120 17:12:17.009040 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ef3c342e-679b-4a2e-94df-82adf4200290-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"ef3c342e-679b-4a2e-94df-82adf4200290\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:12:17 crc kubenswrapper[4558]: I0120 17:12:17.009249 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/ef3c342e-679b-4a2e-94df-82adf4200290-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"ef3c342e-679b-4a2e-94df-82adf4200290\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:12:17 crc kubenswrapper[4558]: I0120 17:12:17.009318 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/ef3c342e-679b-4a2e-94df-82adf4200290-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"ef3c342e-679b-4a2e-94df-82adf4200290\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:12:17 crc kubenswrapper[4558]: I0120 17:12:17.009524 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ef3c342e-679b-4a2e-94df-82adf4200290-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"ef3c342e-679b-4a2e-94df-82adf4200290\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:12:17 crc kubenswrapper[4558]: I0120 17:12:17.009700 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/ef3c342e-679b-4a2e-94df-82adf4200290-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"ef3c342e-679b-4a2e-94df-82adf4200290\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:12:17 crc kubenswrapper[4558]: I0120 17:12:17.112081 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ef3c342e-679b-4a2e-94df-82adf4200290-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"ef3c342e-679b-4a2e-94df-82adf4200290\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:12:17 crc kubenswrapper[4558]: I0120 17:12:17.112209 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/ef3c342e-679b-4a2e-94df-82adf4200290-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"ef3c342e-679b-4a2e-94df-82adf4200290\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:12:17 crc kubenswrapper[4558]: I0120 17:12:17.112307 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zhx8v\" (UniqueName: \"kubernetes.io/projected/ef3c342e-679b-4a2e-94df-82adf4200290-kube-api-access-zhx8v\") pod \"ovsdbserver-nb-0\" (UID: \"ef3c342e-679b-4a2e-94df-82adf4200290\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:12:17 crc kubenswrapper[4558]: I0120 17:12:17.112332 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ef3c342e-679b-4a2e-94df-82adf4200290-config\") pod \"ovsdbserver-nb-0\" (UID: \"ef3c342e-679b-4a2e-94df-82adf4200290\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:12:17 crc kubenswrapper[4558]: I0120 17:12:17.112353 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"ovsdbserver-nb-0\" (UID: \"ef3c342e-679b-4a2e-94df-82adf4200290\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:12:17 crc kubenswrapper[4558]: I0120 17:12:17.112383 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ef3c342e-679b-4a2e-94df-82adf4200290-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"ef3c342e-679b-4a2e-94df-82adf4200290\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:12:17 crc kubenswrapper[4558]: I0120 17:12:17.112414 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/ef3c342e-679b-4a2e-94df-82adf4200290-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"ef3c342e-679b-4a2e-94df-82adf4200290\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:12:17 crc kubenswrapper[4558]: I0120 17:12:17.112446 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/ef3c342e-679b-4a2e-94df-82adf4200290-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"ef3c342e-679b-4a2e-94df-82adf4200290\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:12:17 crc kubenswrapper[4558]: I0120 17:12:17.122268 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/ef3c342e-679b-4a2e-94df-82adf4200290-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"ef3c342e-679b-4a2e-94df-82adf4200290\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:12:17 crc kubenswrapper[4558]: I0120 17:12:17.124068 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ef3c342e-679b-4a2e-94df-82adf4200290-config\") pod \"ovsdbserver-nb-0\" (UID: \"ef3c342e-679b-4a2e-94df-82adf4200290\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:12:17 crc kubenswrapper[4558]: I0120 17:12:17.124355 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/ef3c342e-679b-4a2e-94df-82adf4200290-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"ef3c342e-679b-4a2e-94df-82adf4200290\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:12:17 crc kubenswrapper[4558]: I0120 17:12:17.125362 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ef3c342e-679b-4a2e-94df-82adf4200290-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"ef3c342e-679b-4a2e-94df-82adf4200290\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:12:17 crc kubenswrapper[4558]: I0120 17:12:17.125586 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"ovsdbserver-nb-0\" (UID: \"ef3c342e-679b-4a2e-94df-82adf4200290\") device mount path \"/mnt/openstack/pv06\"" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:12:17 crc kubenswrapper[4558]: I0120 17:12:17.126799 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ef3c342e-679b-4a2e-94df-82adf4200290-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"ef3c342e-679b-4a2e-94df-82adf4200290\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:12:17 crc kubenswrapper[4558]: I0120 17:12:17.129993 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/ef3c342e-679b-4a2e-94df-82adf4200290-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"ef3c342e-679b-4a2e-94df-82adf4200290\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:12:17 crc kubenswrapper[4558]: I0120 17:12:17.142907 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zhx8v\" (UniqueName: \"kubernetes.io/projected/ef3c342e-679b-4a2e-94df-82adf4200290-kube-api-access-zhx8v\") pod \"ovsdbserver-nb-0\" (UID: \"ef3c342e-679b-4a2e-94df-82adf4200290\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:12:17 crc kubenswrapper[4558]: I0120 17:12:17.146735 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"ovsdbserver-nb-0\" (UID: \"ef3c342e-679b-4a2e-94df-82adf4200290\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:12:17 crc kubenswrapper[4558]: I0120 17:12:17.187104 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:12:17 crc kubenswrapper[4558]: I0120 17:12:17.631264 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-nb-0"] Jan 20 17:12:17 crc kubenswrapper[4558]: W0120 17:12:17.635409 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podef3c342e_679b_4a2e_94df_82adf4200290.slice/crio-f06caed6fd4ba7ff3885c96d3a4bf10f618cb49633d16f2b3d487a7de7bcfb65 WatchSource:0}: Error finding container f06caed6fd4ba7ff3885c96d3a4bf10f618cb49633d16f2b3d487a7de7bcfb65: Status 404 returned error can't find the container with id f06caed6fd4ba7ff3885c96d3a4bf10f618cb49633d16f2b3d487a7de7bcfb65 Jan 20 17:12:18 crc kubenswrapper[4558]: I0120 17:12:18.576908 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-nb-0" event={"ID":"ef3c342e-679b-4a2e-94df-82adf4200290","Type":"ContainerStarted","Data":"8e3ee6cc7b998c32fac77b9ad7200546c9a3386eaad38097f2d7e10239440726"} Jan 20 17:12:18 crc kubenswrapper[4558]: I0120 17:12:18.577308 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-nb-0" event={"ID":"ef3c342e-679b-4a2e-94df-82adf4200290","Type":"ContainerStarted","Data":"33152c6067fbcddb0b361284c671bb49a76a4cb01da62629d12aee5d3c00f3dc"} Jan 20 17:12:18 crc kubenswrapper[4558]: I0120 17:12:18.577322 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-nb-0" event={"ID":"ef3c342e-679b-4a2e-94df-82adf4200290","Type":"ContainerStarted","Data":"f06caed6fd4ba7ff3885c96d3a4bf10f618cb49633d16f2b3d487a7de7bcfb65"} Jan 20 17:12:18 crc kubenswrapper[4558]: I0120 17:12:18.595412 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ovsdbserver-nb-0" podStartSLOduration=3.595370672 podStartE2EDuration="3.595370672s" podCreationTimestamp="2026-01-20 17:12:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:12:18.590809625 +0000 UTC m=+1832.351147591" watchObservedRunningTime="2026-01-20 17:12:18.595370672 +0000 UTC m=+1832.355708639" Jan 20 17:12:19 crc kubenswrapper[4558]: I0120 17:12:19.046853 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ovsdbserver-sb-0"] Jan 20 17:12:19 crc kubenswrapper[4558]: I0120 17:12:19.048345 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:12:19 crc kubenswrapper[4558]: I0120 17:12:19.052124 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"ovndbcluster-sb-scripts" Jan 20 17:12:19 crc kubenswrapper[4558]: I0120 17:12:19.053604 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ovncluster-ovndbcluster-sb-dockercfg-g7rnq" Jan 20 17:12:19 crc kubenswrapper[4558]: I0120 17:12:19.053763 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-ovndbcluster-sb-ovndbs" Jan 20 17:12:19 crc kubenswrapper[4558]: I0120 17:12:19.053900 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"ovndbcluster-sb-config" Jan 20 17:12:19 crc kubenswrapper[4558]: I0120 17:12:19.057592 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-sb-0"] Jan 20 17:12:19 crc kubenswrapper[4558]: I0120 17:12:19.148015 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8635d21f-4563-42ce-a3ab-4c67653d4dee-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"8635d21f-4563-42ce-a3ab-4c67653d4dee\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:12:19 crc kubenswrapper[4558]: I0120 17:12:19.148132 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/8635d21f-4563-42ce-a3ab-4c67653d4dee-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"8635d21f-4563-42ce-a3ab-4c67653d4dee\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:12:19 crc kubenswrapper[4558]: I0120 17:12:19.148245 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8635d21f-4563-42ce-a3ab-4c67653d4dee-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"8635d21f-4563-42ce-a3ab-4c67653d4dee\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:12:19 crc kubenswrapper[4558]: I0120 17:12:19.148376 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8635d21f-4563-42ce-a3ab-4c67653d4dee-config\") pod \"ovsdbserver-sb-0\" (UID: \"8635d21f-4563-42ce-a3ab-4c67653d4dee\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:12:19 crc kubenswrapper[4558]: I0120 17:12:19.148467 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"ovsdbserver-sb-0\" (UID: \"8635d21f-4563-42ce-a3ab-4c67653d4dee\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:12:19 crc kubenswrapper[4558]: I0120 17:12:19.148549 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/8635d21f-4563-42ce-a3ab-4c67653d4dee-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"8635d21f-4563-42ce-a3ab-4c67653d4dee\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:12:19 crc kubenswrapper[4558]: I0120 17:12:19.148586 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/8635d21f-4563-42ce-a3ab-4c67653d4dee-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"8635d21f-4563-42ce-a3ab-4c67653d4dee\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:12:19 crc kubenswrapper[4558]: I0120 17:12:19.148625 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xx4lb\" (UniqueName: \"kubernetes.io/projected/8635d21f-4563-42ce-a3ab-4c67653d4dee-kube-api-access-xx4lb\") pod \"ovsdbserver-sb-0\" (UID: \"8635d21f-4563-42ce-a3ab-4c67653d4dee\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:12:19 crc kubenswrapper[4558]: I0120 17:12:19.249960 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/8635d21f-4563-42ce-a3ab-4c67653d4dee-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"8635d21f-4563-42ce-a3ab-4c67653d4dee\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:12:19 crc kubenswrapper[4558]: I0120 17:12:19.250038 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8635d21f-4563-42ce-a3ab-4c67653d4dee-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"8635d21f-4563-42ce-a3ab-4c67653d4dee\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:12:19 crc kubenswrapper[4558]: I0120 17:12:19.250074 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8635d21f-4563-42ce-a3ab-4c67653d4dee-config\") pod \"ovsdbserver-sb-0\" (UID: \"8635d21f-4563-42ce-a3ab-4c67653d4dee\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:12:19 crc kubenswrapper[4558]: I0120 17:12:19.250105 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"ovsdbserver-sb-0\" (UID: \"8635d21f-4563-42ce-a3ab-4c67653d4dee\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:12:19 crc kubenswrapper[4558]: I0120 17:12:19.250141 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/8635d21f-4563-42ce-a3ab-4c67653d4dee-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"8635d21f-4563-42ce-a3ab-4c67653d4dee\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:12:19 crc kubenswrapper[4558]: I0120 17:12:19.250177 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/8635d21f-4563-42ce-a3ab-4c67653d4dee-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"8635d21f-4563-42ce-a3ab-4c67653d4dee\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:12:19 crc kubenswrapper[4558]: I0120 17:12:19.250206 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xx4lb\" (UniqueName: \"kubernetes.io/projected/8635d21f-4563-42ce-a3ab-4c67653d4dee-kube-api-access-xx4lb\") pod \"ovsdbserver-sb-0\" (UID: \"8635d21f-4563-42ce-a3ab-4c67653d4dee\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:12:19 crc kubenswrapper[4558]: I0120 17:12:19.250237 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8635d21f-4563-42ce-a3ab-4c67653d4dee-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"8635d21f-4563-42ce-a3ab-4c67653d4dee\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:12:19 crc kubenswrapper[4558]: I0120 17:12:19.251261 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"ovsdbserver-sb-0\" (UID: \"8635d21f-4563-42ce-a3ab-4c67653d4dee\") device mount path \"/mnt/openstack/pv04\"" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:12:19 crc kubenswrapper[4558]: I0120 17:12:19.251306 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/8635d21f-4563-42ce-a3ab-4c67653d4dee-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"8635d21f-4563-42ce-a3ab-4c67653d4dee\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:12:19 crc kubenswrapper[4558]: I0120 17:12:19.251824 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8635d21f-4563-42ce-a3ab-4c67653d4dee-config\") pod \"ovsdbserver-sb-0\" (UID: \"8635d21f-4563-42ce-a3ab-4c67653d4dee\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:12:19 crc kubenswrapper[4558]: I0120 17:12:19.251824 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8635d21f-4563-42ce-a3ab-4c67653d4dee-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"8635d21f-4563-42ce-a3ab-4c67653d4dee\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:12:19 crc kubenswrapper[4558]: I0120 17:12:19.256414 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/8635d21f-4563-42ce-a3ab-4c67653d4dee-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"8635d21f-4563-42ce-a3ab-4c67653d4dee\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:12:19 crc kubenswrapper[4558]: I0120 17:12:19.259135 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8635d21f-4563-42ce-a3ab-4c67653d4dee-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"8635d21f-4563-42ce-a3ab-4c67653d4dee\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:12:19 crc kubenswrapper[4558]: I0120 17:12:19.263963 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/8635d21f-4563-42ce-a3ab-4c67653d4dee-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"8635d21f-4563-42ce-a3ab-4c67653d4dee\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:12:19 crc kubenswrapper[4558]: I0120 17:12:19.270724 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xx4lb\" (UniqueName: \"kubernetes.io/projected/8635d21f-4563-42ce-a3ab-4c67653d4dee-kube-api-access-xx4lb\") pod \"ovsdbserver-sb-0\" (UID: \"8635d21f-4563-42ce-a3ab-4c67653d4dee\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:12:19 crc kubenswrapper[4558]: I0120 17:12:19.275493 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"ovsdbserver-sb-0\" (UID: \"8635d21f-4563-42ce-a3ab-4c67653d4dee\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:12:19 crc kubenswrapper[4558]: I0120 17:12:19.365706 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:12:19 crc kubenswrapper[4558]: I0120 17:12:19.710253 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-sb-0"] Jan 20 17:12:20 crc kubenswrapper[4558]: I0120 17:12:20.187594 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:12:20 crc kubenswrapper[4558]: I0120 17:12:20.215255 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:12:20 crc kubenswrapper[4558]: I0120 17:12:20.589291 4558 generic.go:334] "Generic (PLEG): container finished" podID="eed05f95-b1a3-44b9-bb2c-e7b01d91d8cc" containerID="ef4526e531f7d5807e6513289cdeac244b91e66048cb4682c4a8bb251a104b9d" exitCode=0 Jan 20 17:12:20 crc kubenswrapper[4558]: I0120 17:12:20.589366 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-galera-0" event={"ID":"eed05f95-b1a3-44b9-bb2c-e7b01d91d8cc","Type":"ContainerDied","Data":"ef4526e531f7d5807e6513289cdeac244b91e66048cb4682c4a8bb251a104b9d"} Jan 20 17:12:20 crc kubenswrapper[4558]: I0120 17:12:20.592847 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-sb-0" event={"ID":"8635d21f-4563-42ce-a3ab-4c67653d4dee","Type":"ContainerStarted","Data":"4d5b12f2c407a5d33bf76ac52ac15d916d0ebbaa277f7ad8812327a1b03f357f"} Jan 20 17:12:20 crc kubenswrapper[4558]: I0120 17:12:20.592928 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-sb-0" event={"ID":"8635d21f-4563-42ce-a3ab-4c67653d4dee","Type":"ContainerStarted","Data":"86bcdd3af3dc36b18b179d5cfb21ad164a541e5d79260e3b531c6ec4c20e63e1"} Jan 20 17:12:20 crc kubenswrapper[4558]: I0120 17:12:20.592942 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-sb-0" event={"ID":"8635d21f-4563-42ce-a3ab-4c67653d4dee","Type":"ContainerStarted","Data":"676c49625641465d1cc9c17ece4e1bac4203c9cf3e2fa79816092cfdc3814fc5"} Jan 20 17:12:20 crc kubenswrapper[4558]: I0120 17:12:20.592959 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:12:20 crc kubenswrapper[4558]: I0120 17:12:20.643685 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ovsdbserver-sb-0" podStartSLOduration=2.643662496 podStartE2EDuration="2.643662496s" podCreationTimestamp="2026-01-20 17:12:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:12:20.640996983 +0000 UTC m=+1834.401334951" watchObservedRunningTime="2026-01-20 17:12:20.643662496 +0000 UTC m=+1834.404000463" Jan 20 17:12:21 crc kubenswrapper[4558]: I0120 17:12:21.355980 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/memcached-0" Jan 20 17:12:21 crc kubenswrapper[4558]: I0120 17:12:21.600345 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-galera-0" event={"ID":"eed05f95-b1a3-44b9-bb2c-e7b01d91d8cc","Type":"ContainerStarted","Data":"4c99017bce2cd4c2557c9e2d276409ac4a6b6a986805b29e970cc3af2a59f1df"} Jan 20 17:12:22 crc kubenswrapper[4558]: I0120 17:12:22.235879 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:12:22 crc kubenswrapper[4558]: I0120 17:12:22.253106 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/openstack-galera-0" podStartSLOduration=13.253088736 podStartE2EDuration="13.253088736s" podCreationTimestamp="2026-01-20 17:12:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:12:21.64015679 +0000 UTC m=+1835.400494757" watchObservedRunningTime="2026-01-20 17:12:22.253088736 +0000 UTC m=+1836.013426703" Jan 20 17:12:22 crc kubenswrapper[4558]: I0120 17:12:22.366471 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:12:22 crc kubenswrapper[4558]: I0120 17:12:22.397734 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:12:22 crc kubenswrapper[4558]: I0120 17:12:22.611382 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:12:23 crc kubenswrapper[4558]: I0120 17:12:23.161683 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:12:24 crc kubenswrapper[4558]: I0120 17:12:24.323940 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/swift-storage-0"] Jan 20 17:12:24 crc kubenswrapper[4558]: I0120 17:12:24.328404 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:12:24 crc kubenswrapper[4558]: I0120 17:12:24.330963 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"swift-swift-dockercfg-kqprh" Jan 20 17:12:24 crc kubenswrapper[4558]: I0120 17:12:24.330983 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"swift-ring-files" Jan 20 17:12:24 crc kubenswrapper[4558]: I0120 17:12:24.330989 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"swift-storage-config-data" Jan 20 17:12:24 crc kubenswrapper[4558]: I0120 17:12:24.331871 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"swift-conf" Jan 20 17:12:24 crc kubenswrapper[4558]: I0120 17:12:24.345596 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/swift-storage-0"] Jan 20 17:12:24 crc kubenswrapper[4558]: I0120 17:12:24.402788 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:12:24 crc kubenswrapper[4558]: I0120 17:12:24.450533 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5-etc-swift\") pod \"swift-storage-0\" (UID: \"16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:12:24 crc kubenswrapper[4558]: I0120 17:12:24.450602 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5-cache\") pod \"swift-storage-0\" (UID: \"16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:12:24 crc kubenswrapper[4558]: I0120 17:12:24.450756 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lkqx2\" (UniqueName: \"kubernetes.io/projected/16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5-kube-api-access-lkqx2\") pod \"swift-storage-0\" (UID: \"16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:12:24 crc kubenswrapper[4558]: I0120 17:12:24.450795 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"swift-storage-0\" (UID: \"16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:12:24 crc kubenswrapper[4558]: I0120 17:12:24.466951 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5-lock\") pod \"swift-storage-0\" (UID: \"16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:12:24 crc kubenswrapper[4558]: I0120 17:12:24.569623 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lkqx2\" (UniqueName: \"kubernetes.io/projected/16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5-kube-api-access-lkqx2\") pod \"swift-storage-0\" (UID: \"16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:12:24 crc kubenswrapper[4558]: I0120 17:12:24.569682 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"swift-storage-0\" (UID: \"16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:12:24 crc kubenswrapper[4558]: I0120 17:12:24.569730 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5-lock\") pod \"swift-storage-0\" (UID: \"16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:12:24 crc kubenswrapper[4558]: I0120 17:12:24.569788 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5-etc-swift\") pod \"swift-storage-0\" (UID: \"16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:12:24 crc kubenswrapper[4558]: I0120 17:12:24.569809 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5-cache\") pod \"swift-storage-0\" (UID: \"16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:12:24 crc kubenswrapper[4558]: E0120 17:12:24.570137 4558 projected.go:288] Couldn't get configMap openstack-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Jan 20 17:12:24 crc kubenswrapper[4558]: E0120 17:12:24.570181 4558 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack-kuttl-tests/swift-storage-0: configmap "swift-ring-files" not found Jan 20 17:12:24 crc kubenswrapper[4558]: E0120 17:12:24.570230 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5-etc-swift podName:16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5 nodeName:}" failed. No retries permitted until 2026-01-20 17:12:25.070211062 +0000 UTC m=+1838.830549030 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5-etc-swift") pod "swift-storage-0" (UID: "16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5") : configmap "swift-ring-files" not found Jan 20 17:12:24 crc kubenswrapper[4558]: I0120 17:12:24.570150 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"swift-storage-0\" (UID: \"16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5\") device mount path \"/mnt/openstack/pv08\"" pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:12:24 crc kubenswrapper[4558]: I0120 17:12:24.570287 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5-cache\") pod \"swift-storage-0\" (UID: \"16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:12:24 crc kubenswrapper[4558]: I0120 17:12:24.570381 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5-lock\") pod \"swift-storage-0\" (UID: \"16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:12:24 crc kubenswrapper[4558]: I0120 17:12:24.590999 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lkqx2\" (UniqueName: \"kubernetes.io/projected/16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5-kube-api-access-lkqx2\") pod \"swift-storage-0\" (UID: \"16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:12:24 crc kubenswrapper[4558]: I0120 17:12:24.591745 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"swift-storage-0\" (UID: \"16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:12:24 crc kubenswrapper[4558]: I0120 17:12:24.706778 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/swift-ring-rebalance-rgrkz"] Jan 20 17:12:24 crc kubenswrapper[4558]: I0120 17:12:24.712762 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-ring-rebalance-rgrkz" Jan 20 17:12:24 crc kubenswrapper[4558]: I0120 17:12:24.717299 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"swift-ring-config-data" Jan 20 17:12:24 crc kubenswrapper[4558]: I0120 17:12:24.717509 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"swift-proxy-config-data" Jan 20 17:12:24 crc kubenswrapper[4558]: I0120 17:12:24.722500 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"swift-ring-scripts" Jan 20 17:12:24 crc kubenswrapper[4558]: I0120 17:12:24.751262 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/swift-ring-rebalance-rgrkz"] Jan 20 17:12:24 crc kubenswrapper[4558]: E0120 17:12:24.752046 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[combined-ca-bundle dispersionconf etc-swift kube-api-access-mjm4w ring-data-devices scripts swiftconf], unattached volumes=[], failed to process volumes=[combined-ca-bundle dispersionconf etc-swift kube-api-access-mjm4w ring-data-devices scripts swiftconf]: context canceled" pod="openstack-kuttl-tests/swift-ring-rebalance-rgrkz" podUID="f3a0a5e2-d38a-47ec-8be7-bbeafd2145bd" Jan 20 17:12:24 crc kubenswrapper[4558]: I0120 17:12:24.777252 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/swift-ring-rebalance-hj5p8"] Jan 20 17:12:24 crc kubenswrapper[4558]: I0120 17:12:24.778548 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-ring-rebalance-hj5p8" Jan 20 17:12:24 crc kubenswrapper[4558]: I0120 17:12:24.785041 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/swift-ring-rebalance-hj5p8"] Jan 20 17:12:24 crc kubenswrapper[4558]: I0120 17:12:24.788815 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ovn-northd-0"] Jan 20 17:12:24 crc kubenswrapper[4558]: I0120 17:12:24.794104 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:12:24 crc kubenswrapper[4558]: I0120 17:12:24.800989 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"ovnnorthd-scripts" Jan 20 17:12:24 crc kubenswrapper[4558]: I0120 17:12:24.801133 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"ovnnorthd-config" Jan 20 17:12:24 crc kubenswrapper[4558]: I0120 17:12:24.802652 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-ovnnorthd-ovndbs" Jan 20 17:12:24 crc kubenswrapper[4558]: I0120 17:12:24.802861 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ovnnorthd-ovnnorthd-dockercfg-8nsbf" Jan 20 17:12:24 crc kubenswrapper[4558]: I0120 17:12:24.834223 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/swift-ring-rebalance-rgrkz"] Jan 20 17:12:24 crc kubenswrapper[4558]: I0120 17:12:24.836317 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovn-northd-0"] Jan 20 17:12:24 crc kubenswrapper[4558]: I0120 17:12:24.878283 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/f3a0a5e2-d38a-47ec-8be7-bbeafd2145bd-etc-swift\") pod \"swift-ring-rebalance-rgrkz\" (UID: \"f3a0a5e2-d38a-47ec-8be7-bbeafd2145bd\") " pod="openstack-kuttl-tests/swift-ring-rebalance-rgrkz" Jan 20 17:12:24 crc kubenswrapper[4558]: I0120 17:12:24.878325 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/f3a0a5e2-d38a-47ec-8be7-bbeafd2145bd-ring-data-devices\") pod \"swift-ring-rebalance-rgrkz\" (UID: \"f3a0a5e2-d38a-47ec-8be7-bbeafd2145bd\") " pod="openstack-kuttl-tests/swift-ring-rebalance-rgrkz" Jan 20 17:12:24 crc kubenswrapper[4558]: I0120 17:12:24.878371 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c7d7d3af-d2d4-404c-ae30-a8802ea8709a-scripts\") pod \"swift-ring-rebalance-hj5p8\" (UID: \"c7d7d3af-d2d4-404c-ae30-a8802ea8709a\") " pod="openstack-kuttl-tests/swift-ring-rebalance-hj5p8" Jan 20 17:12:24 crc kubenswrapper[4558]: I0120 17:12:24.878407 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/f3a0a5e2-d38a-47ec-8be7-bbeafd2145bd-swiftconf\") pod \"swift-ring-rebalance-rgrkz\" (UID: \"f3a0a5e2-d38a-47ec-8be7-bbeafd2145bd\") " pod="openstack-kuttl-tests/swift-ring-rebalance-rgrkz" Jan 20 17:12:24 crc kubenswrapper[4558]: I0120 17:12:24.878430 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c7d7d3af-d2d4-404c-ae30-a8802ea8709a-combined-ca-bundle\") pod \"swift-ring-rebalance-hj5p8\" (UID: \"c7d7d3af-d2d4-404c-ae30-a8802ea8709a\") " pod="openstack-kuttl-tests/swift-ring-rebalance-hj5p8" Jan 20 17:12:24 crc kubenswrapper[4558]: I0120 17:12:24.878449 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/c7d7d3af-d2d4-404c-ae30-a8802ea8709a-swiftconf\") pod \"swift-ring-rebalance-hj5p8\" (UID: \"c7d7d3af-d2d4-404c-ae30-a8802ea8709a\") " pod="openstack-kuttl-tests/swift-ring-rebalance-hj5p8" Jan 20 17:12:24 crc kubenswrapper[4558]: I0120 17:12:24.878470 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xfpzf\" (UniqueName: \"kubernetes.io/projected/c7d7d3af-d2d4-404c-ae30-a8802ea8709a-kube-api-access-xfpzf\") pod \"swift-ring-rebalance-hj5p8\" (UID: \"c7d7d3af-d2d4-404c-ae30-a8802ea8709a\") " pod="openstack-kuttl-tests/swift-ring-rebalance-hj5p8" Jan 20 17:12:24 crc kubenswrapper[4558]: I0120 17:12:24.878504 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/f3a0a5e2-d38a-47ec-8be7-bbeafd2145bd-dispersionconf\") pod \"swift-ring-rebalance-rgrkz\" (UID: \"f3a0a5e2-d38a-47ec-8be7-bbeafd2145bd\") " pod="openstack-kuttl-tests/swift-ring-rebalance-rgrkz" Jan 20 17:12:24 crc kubenswrapper[4558]: I0120 17:12:24.878533 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/c7d7d3af-d2d4-404c-ae30-a8802ea8709a-etc-swift\") pod \"swift-ring-rebalance-hj5p8\" (UID: \"c7d7d3af-d2d4-404c-ae30-a8802ea8709a\") " pod="openstack-kuttl-tests/swift-ring-rebalance-hj5p8" Jan 20 17:12:24 crc kubenswrapper[4558]: I0120 17:12:24.878552 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mjm4w\" (UniqueName: \"kubernetes.io/projected/f3a0a5e2-d38a-47ec-8be7-bbeafd2145bd-kube-api-access-mjm4w\") pod \"swift-ring-rebalance-rgrkz\" (UID: \"f3a0a5e2-d38a-47ec-8be7-bbeafd2145bd\") " pod="openstack-kuttl-tests/swift-ring-rebalance-rgrkz" Jan 20 17:12:24 crc kubenswrapper[4558]: I0120 17:12:24.878568 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/c7d7d3af-d2d4-404c-ae30-a8802ea8709a-dispersionconf\") pod \"swift-ring-rebalance-hj5p8\" (UID: \"c7d7d3af-d2d4-404c-ae30-a8802ea8709a\") " pod="openstack-kuttl-tests/swift-ring-rebalance-hj5p8" Jan 20 17:12:24 crc kubenswrapper[4558]: I0120 17:12:24.878589 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f3a0a5e2-d38a-47ec-8be7-bbeafd2145bd-combined-ca-bundle\") pod \"swift-ring-rebalance-rgrkz\" (UID: \"f3a0a5e2-d38a-47ec-8be7-bbeafd2145bd\") " pod="openstack-kuttl-tests/swift-ring-rebalance-rgrkz" Jan 20 17:12:24 crc kubenswrapper[4558]: I0120 17:12:24.878622 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/c7d7d3af-d2d4-404c-ae30-a8802ea8709a-ring-data-devices\") pod \"swift-ring-rebalance-hj5p8\" (UID: \"c7d7d3af-d2d4-404c-ae30-a8802ea8709a\") " pod="openstack-kuttl-tests/swift-ring-rebalance-hj5p8" Jan 20 17:12:24 crc kubenswrapper[4558]: I0120 17:12:24.878642 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f3a0a5e2-d38a-47ec-8be7-bbeafd2145bd-scripts\") pod \"swift-ring-rebalance-rgrkz\" (UID: \"f3a0a5e2-d38a-47ec-8be7-bbeafd2145bd\") " pod="openstack-kuttl-tests/swift-ring-rebalance-rgrkz" Jan 20 17:12:24 crc kubenswrapper[4558]: I0120 17:12:24.980628 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xfpzf\" (UniqueName: \"kubernetes.io/projected/c7d7d3af-d2d4-404c-ae30-a8802ea8709a-kube-api-access-xfpzf\") pod \"swift-ring-rebalance-hj5p8\" (UID: \"c7d7d3af-d2d4-404c-ae30-a8802ea8709a\") " pod="openstack-kuttl-tests/swift-ring-rebalance-hj5p8" Jan 20 17:12:24 crc kubenswrapper[4558]: I0120 17:12:24.980698 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9qjg6\" (UniqueName: \"kubernetes.io/projected/e8370668-8857-4be3-bcaa-bae8d6cdd158-kube-api-access-9qjg6\") pod \"ovn-northd-0\" (UID: \"e8370668-8857-4be3-bcaa-bae8d6cdd158\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:12:24 crc kubenswrapper[4558]: I0120 17:12:24.980723 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/f3a0a5e2-d38a-47ec-8be7-bbeafd2145bd-dispersionconf\") pod \"swift-ring-rebalance-rgrkz\" (UID: \"f3a0a5e2-d38a-47ec-8be7-bbeafd2145bd\") " pod="openstack-kuttl-tests/swift-ring-rebalance-rgrkz" Jan 20 17:12:24 crc kubenswrapper[4558]: I0120 17:12:24.980749 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e8370668-8857-4be3-bcaa-bae8d6cdd158-scripts\") pod \"ovn-northd-0\" (UID: \"e8370668-8857-4be3-bcaa-bae8d6cdd158\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:12:24 crc kubenswrapper[4558]: I0120 17:12:24.980787 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/c7d7d3af-d2d4-404c-ae30-a8802ea8709a-etc-swift\") pod \"swift-ring-rebalance-hj5p8\" (UID: \"c7d7d3af-d2d4-404c-ae30-a8802ea8709a\") " pod="openstack-kuttl-tests/swift-ring-rebalance-hj5p8" Jan 20 17:12:24 crc kubenswrapper[4558]: I0120 17:12:24.980806 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mjm4w\" (UniqueName: \"kubernetes.io/projected/f3a0a5e2-d38a-47ec-8be7-bbeafd2145bd-kube-api-access-mjm4w\") pod \"swift-ring-rebalance-rgrkz\" (UID: \"f3a0a5e2-d38a-47ec-8be7-bbeafd2145bd\") " pod="openstack-kuttl-tests/swift-ring-rebalance-rgrkz" Jan 20 17:12:24 crc kubenswrapper[4558]: I0120 17:12:24.980821 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/c7d7d3af-d2d4-404c-ae30-a8802ea8709a-dispersionconf\") pod \"swift-ring-rebalance-hj5p8\" (UID: \"c7d7d3af-d2d4-404c-ae30-a8802ea8709a\") " pod="openstack-kuttl-tests/swift-ring-rebalance-hj5p8" Jan 20 17:12:24 crc kubenswrapper[4558]: I0120 17:12:24.980850 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f3a0a5e2-d38a-47ec-8be7-bbeafd2145bd-combined-ca-bundle\") pod \"swift-ring-rebalance-rgrkz\" (UID: \"f3a0a5e2-d38a-47ec-8be7-bbeafd2145bd\") " pod="openstack-kuttl-tests/swift-ring-rebalance-rgrkz" Jan 20 17:12:24 crc kubenswrapper[4558]: I0120 17:12:24.980868 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e8370668-8857-4be3-bcaa-bae8d6cdd158-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"e8370668-8857-4be3-bcaa-bae8d6cdd158\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:12:24 crc kubenswrapper[4558]: I0120 17:12:24.980901 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e8370668-8857-4be3-bcaa-bae8d6cdd158-config\") pod \"ovn-northd-0\" (UID: \"e8370668-8857-4be3-bcaa-bae8d6cdd158\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:12:24 crc kubenswrapper[4558]: I0120 17:12:24.980941 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/c7d7d3af-d2d4-404c-ae30-a8802ea8709a-ring-data-devices\") pod \"swift-ring-rebalance-hj5p8\" (UID: \"c7d7d3af-d2d4-404c-ae30-a8802ea8709a\") " pod="openstack-kuttl-tests/swift-ring-rebalance-hj5p8" Jan 20 17:12:24 crc kubenswrapper[4558]: I0120 17:12:24.980971 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f3a0a5e2-d38a-47ec-8be7-bbeafd2145bd-scripts\") pod \"swift-ring-rebalance-rgrkz\" (UID: \"f3a0a5e2-d38a-47ec-8be7-bbeafd2145bd\") " pod="openstack-kuttl-tests/swift-ring-rebalance-rgrkz" Jan 20 17:12:24 crc kubenswrapper[4558]: I0120 17:12:24.980993 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/e8370668-8857-4be3-bcaa-bae8d6cdd158-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"e8370668-8857-4be3-bcaa-bae8d6cdd158\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:12:24 crc kubenswrapper[4558]: I0120 17:12:24.981058 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/f3a0a5e2-d38a-47ec-8be7-bbeafd2145bd-etc-swift\") pod \"swift-ring-rebalance-rgrkz\" (UID: \"f3a0a5e2-d38a-47ec-8be7-bbeafd2145bd\") " pod="openstack-kuttl-tests/swift-ring-rebalance-rgrkz" Jan 20 17:12:24 crc kubenswrapper[4558]: I0120 17:12:24.981086 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/e8370668-8857-4be3-bcaa-bae8d6cdd158-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"e8370668-8857-4be3-bcaa-bae8d6cdd158\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:12:24 crc kubenswrapper[4558]: I0120 17:12:24.981105 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/f3a0a5e2-d38a-47ec-8be7-bbeafd2145bd-ring-data-devices\") pod \"swift-ring-rebalance-rgrkz\" (UID: \"f3a0a5e2-d38a-47ec-8be7-bbeafd2145bd\") " pod="openstack-kuttl-tests/swift-ring-rebalance-rgrkz" Jan 20 17:12:24 crc kubenswrapper[4558]: I0120 17:12:24.981157 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c7d7d3af-d2d4-404c-ae30-a8802ea8709a-scripts\") pod \"swift-ring-rebalance-hj5p8\" (UID: \"c7d7d3af-d2d4-404c-ae30-a8802ea8709a\") " pod="openstack-kuttl-tests/swift-ring-rebalance-hj5p8" Jan 20 17:12:24 crc kubenswrapper[4558]: I0120 17:12:24.981207 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/e8370668-8857-4be3-bcaa-bae8d6cdd158-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"e8370668-8857-4be3-bcaa-bae8d6cdd158\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:12:24 crc kubenswrapper[4558]: I0120 17:12:24.981229 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/f3a0a5e2-d38a-47ec-8be7-bbeafd2145bd-swiftconf\") pod \"swift-ring-rebalance-rgrkz\" (UID: \"f3a0a5e2-d38a-47ec-8be7-bbeafd2145bd\") " pod="openstack-kuttl-tests/swift-ring-rebalance-rgrkz" Jan 20 17:12:24 crc kubenswrapper[4558]: I0120 17:12:24.981262 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c7d7d3af-d2d4-404c-ae30-a8802ea8709a-combined-ca-bundle\") pod \"swift-ring-rebalance-hj5p8\" (UID: \"c7d7d3af-d2d4-404c-ae30-a8802ea8709a\") " pod="openstack-kuttl-tests/swift-ring-rebalance-hj5p8" Jan 20 17:12:24 crc kubenswrapper[4558]: I0120 17:12:24.981282 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/c7d7d3af-d2d4-404c-ae30-a8802ea8709a-swiftconf\") pod \"swift-ring-rebalance-hj5p8\" (UID: \"c7d7d3af-d2d4-404c-ae30-a8802ea8709a\") " pod="openstack-kuttl-tests/swift-ring-rebalance-hj5p8" Jan 20 17:12:24 crc kubenswrapper[4558]: I0120 17:12:24.981947 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/c7d7d3af-d2d4-404c-ae30-a8802ea8709a-etc-swift\") pod \"swift-ring-rebalance-hj5p8\" (UID: \"c7d7d3af-d2d4-404c-ae30-a8802ea8709a\") " pod="openstack-kuttl-tests/swift-ring-rebalance-hj5p8" Jan 20 17:12:24 crc kubenswrapper[4558]: I0120 17:12:24.982235 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/f3a0a5e2-d38a-47ec-8be7-bbeafd2145bd-etc-swift\") pod \"swift-ring-rebalance-rgrkz\" (UID: \"f3a0a5e2-d38a-47ec-8be7-bbeafd2145bd\") " pod="openstack-kuttl-tests/swift-ring-rebalance-rgrkz" Jan 20 17:12:24 crc kubenswrapper[4558]: I0120 17:12:24.982751 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/c7d7d3af-d2d4-404c-ae30-a8802ea8709a-ring-data-devices\") pod \"swift-ring-rebalance-hj5p8\" (UID: \"c7d7d3af-d2d4-404c-ae30-a8802ea8709a\") " pod="openstack-kuttl-tests/swift-ring-rebalance-hj5p8" Jan 20 17:12:24 crc kubenswrapper[4558]: I0120 17:12:24.983184 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f3a0a5e2-d38a-47ec-8be7-bbeafd2145bd-scripts\") pod \"swift-ring-rebalance-rgrkz\" (UID: \"f3a0a5e2-d38a-47ec-8be7-bbeafd2145bd\") " pod="openstack-kuttl-tests/swift-ring-rebalance-rgrkz" Jan 20 17:12:24 crc kubenswrapper[4558]: I0120 17:12:24.983883 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c7d7d3af-d2d4-404c-ae30-a8802ea8709a-scripts\") pod \"swift-ring-rebalance-hj5p8\" (UID: \"c7d7d3af-d2d4-404c-ae30-a8802ea8709a\") " pod="openstack-kuttl-tests/swift-ring-rebalance-hj5p8" Jan 20 17:12:24 crc kubenswrapper[4558]: I0120 17:12:24.984261 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/f3a0a5e2-d38a-47ec-8be7-bbeafd2145bd-ring-data-devices\") pod \"swift-ring-rebalance-rgrkz\" (UID: \"f3a0a5e2-d38a-47ec-8be7-bbeafd2145bd\") " pod="openstack-kuttl-tests/swift-ring-rebalance-rgrkz" Jan 20 17:12:24 crc kubenswrapper[4558]: I0120 17:12:24.998796 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/c7d7d3af-d2d4-404c-ae30-a8802ea8709a-dispersionconf\") pod \"swift-ring-rebalance-hj5p8\" (UID: \"c7d7d3af-d2d4-404c-ae30-a8802ea8709a\") " pod="openstack-kuttl-tests/swift-ring-rebalance-hj5p8" Jan 20 17:12:24 crc kubenswrapper[4558]: I0120 17:12:24.998984 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/f3a0a5e2-d38a-47ec-8be7-bbeafd2145bd-dispersionconf\") pod \"swift-ring-rebalance-rgrkz\" (UID: \"f3a0a5e2-d38a-47ec-8be7-bbeafd2145bd\") " pod="openstack-kuttl-tests/swift-ring-rebalance-rgrkz" Jan 20 17:12:25 crc kubenswrapper[4558]: I0120 17:12:25.000724 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f3a0a5e2-d38a-47ec-8be7-bbeafd2145bd-combined-ca-bundle\") pod \"swift-ring-rebalance-rgrkz\" (UID: \"f3a0a5e2-d38a-47ec-8be7-bbeafd2145bd\") " pod="openstack-kuttl-tests/swift-ring-rebalance-rgrkz" Jan 20 17:12:25 crc kubenswrapper[4558]: I0120 17:12:25.001694 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/c7d7d3af-d2d4-404c-ae30-a8802ea8709a-swiftconf\") pod \"swift-ring-rebalance-hj5p8\" (UID: \"c7d7d3af-d2d4-404c-ae30-a8802ea8709a\") " pod="openstack-kuttl-tests/swift-ring-rebalance-hj5p8" Jan 20 17:12:25 crc kubenswrapper[4558]: I0120 17:12:25.002432 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/f3a0a5e2-d38a-47ec-8be7-bbeafd2145bd-swiftconf\") pod \"swift-ring-rebalance-rgrkz\" (UID: \"f3a0a5e2-d38a-47ec-8be7-bbeafd2145bd\") " pod="openstack-kuttl-tests/swift-ring-rebalance-rgrkz" Jan 20 17:12:25 crc kubenswrapper[4558]: I0120 17:12:25.002562 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c7d7d3af-d2d4-404c-ae30-a8802ea8709a-combined-ca-bundle\") pod \"swift-ring-rebalance-hj5p8\" (UID: \"c7d7d3af-d2d4-404c-ae30-a8802ea8709a\") " pod="openstack-kuttl-tests/swift-ring-rebalance-hj5p8" Jan 20 17:12:25 crc kubenswrapper[4558]: I0120 17:12:25.006698 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xfpzf\" (UniqueName: \"kubernetes.io/projected/c7d7d3af-d2d4-404c-ae30-a8802ea8709a-kube-api-access-xfpzf\") pod \"swift-ring-rebalance-hj5p8\" (UID: \"c7d7d3af-d2d4-404c-ae30-a8802ea8709a\") " pod="openstack-kuttl-tests/swift-ring-rebalance-hj5p8" Jan 20 17:12:25 crc kubenswrapper[4558]: I0120 17:12:25.025318 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mjm4w\" (UniqueName: \"kubernetes.io/projected/f3a0a5e2-d38a-47ec-8be7-bbeafd2145bd-kube-api-access-mjm4w\") pod \"swift-ring-rebalance-rgrkz\" (UID: \"f3a0a5e2-d38a-47ec-8be7-bbeafd2145bd\") " pod="openstack-kuttl-tests/swift-ring-rebalance-rgrkz" Jan 20 17:12:25 crc kubenswrapper[4558]: I0120 17:12:25.083339 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e8370668-8857-4be3-bcaa-bae8d6cdd158-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"e8370668-8857-4be3-bcaa-bae8d6cdd158\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:12:25 crc kubenswrapper[4558]: I0120 17:12:25.083435 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e8370668-8857-4be3-bcaa-bae8d6cdd158-config\") pod \"ovn-northd-0\" (UID: \"e8370668-8857-4be3-bcaa-bae8d6cdd158\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:12:25 crc kubenswrapper[4558]: I0120 17:12:25.083526 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/e8370668-8857-4be3-bcaa-bae8d6cdd158-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"e8370668-8857-4be3-bcaa-bae8d6cdd158\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:12:25 crc kubenswrapper[4558]: I0120 17:12:25.083596 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5-etc-swift\") pod \"swift-storage-0\" (UID: \"16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:12:25 crc kubenswrapper[4558]: I0120 17:12:25.083646 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/e8370668-8857-4be3-bcaa-bae8d6cdd158-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"e8370668-8857-4be3-bcaa-bae8d6cdd158\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:12:25 crc kubenswrapper[4558]: I0120 17:12:25.083738 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/e8370668-8857-4be3-bcaa-bae8d6cdd158-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"e8370668-8857-4be3-bcaa-bae8d6cdd158\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:12:25 crc kubenswrapper[4558]: E0120 17:12:25.083812 4558 projected.go:288] Couldn't get configMap openstack-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Jan 20 17:12:25 crc kubenswrapper[4558]: I0120 17:12:25.083838 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9qjg6\" (UniqueName: \"kubernetes.io/projected/e8370668-8857-4be3-bcaa-bae8d6cdd158-kube-api-access-9qjg6\") pod \"ovn-northd-0\" (UID: \"e8370668-8857-4be3-bcaa-bae8d6cdd158\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:12:25 crc kubenswrapper[4558]: E0120 17:12:25.083846 4558 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack-kuttl-tests/swift-storage-0: configmap "swift-ring-files" not found Jan 20 17:12:25 crc kubenswrapper[4558]: I0120 17:12:25.083874 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e8370668-8857-4be3-bcaa-bae8d6cdd158-scripts\") pod \"ovn-northd-0\" (UID: \"e8370668-8857-4be3-bcaa-bae8d6cdd158\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:12:25 crc kubenswrapper[4558]: E0120 17:12:25.083905 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5-etc-swift podName:16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5 nodeName:}" failed. No retries permitted until 2026-01-20 17:12:26.083883045 +0000 UTC m=+1839.844221022 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5-etc-swift") pod "swift-storage-0" (UID: "16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5") : configmap "swift-ring-files" not found Jan 20 17:12:25 crc kubenswrapper[4558]: I0120 17:12:25.084044 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/e8370668-8857-4be3-bcaa-bae8d6cdd158-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"e8370668-8857-4be3-bcaa-bae8d6cdd158\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:12:25 crc kubenswrapper[4558]: I0120 17:12:25.084248 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e8370668-8857-4be3-bcaa-bae8d6cdd158-config\") pod \"ovn-northd-0\" (UID: \"e8370668-8857-4be3-bcaa-bae8d6cdd158\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:12:25 crc kubenswrapper[4558]: I0120 17:12:25.084671 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e8370668-8857-4be3-bcaa-bae8d6cdd158-scripts\") pod \"ovn-northd-0\" (UID: \"e8370668-8857-4be3-bcaa-bae8d6cdd158\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:12:25 crc kubenswrapper[4558]: I0120 17:12:25.086532 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/e8370668-8857-4be3-bcaa-bae8d6cdd158-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"e8370668-8857-4be3-bcaa-bae8d6cdd158\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:12:25 crc kubenswrapper[4558]: I0120 17:12:25.086685 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e8370668-8857-4be3-bcaa-bae8d6cdd158-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"e8370668-8857-4be3-bcaa-bae8d6cdd158\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:12:25 crc kubenswrapper[4558]: I0120 17:12:25.087137 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/e8370668-8857-4be3-bcaa-bae8d6cdd158-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"e8370668-8857-4be3-bcaa-bae8d6cdd158\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:12:25 crc kubenswrapper[4558]: I0120 17:12:25.100449 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-ring-rebalance-hj5p8" Jan 20 17:12:25 crc kubenswrapper[4558]: I0120 17:12:25.100778 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9qjg6\" (UniqueName: \"kubernetes.io/projected/e8370668-8857-4be3-bcaa-bae8d6cdd158-kube-api-access-9qjg6\") pod \"ovn-northd-0\" (UID: \"e8370668-8857-4be3-bcaa-bae8d6cdd158\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:12:25 crc kubenswrapper[4558]: I0120 17:12:25.114662 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:12:25 crc kubenswrapper[4558]: I0120 17:12:25.562262 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/swift-ring-rebalance-hj5p8"] Jan 20 17:12:25 crc kubenswrapper[4558]: W0120 17:12:25.564948 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc7d7d3af_d2d4_404c_ae30_a8802ea8709a.slice/crio-c83e3b6d2533dffbc6053a2550cb22dbd971139a977e07caeac17ca594414953 WatchSource:0}: Error finding container c83e3b6d2533dffbc6053a2550cb22dbd971139a977e07caeac17ca594414953: Status 404 returned error can't find the container with id c83e3b6d2533dffbc6053a2550cb22dbd971139a977e07caeac17ca594414953 Jan 20 17:12:25 crc kubenswrapper[4558]: I0120 17:12:25.631867 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovn-northd-0"] Jan 20 17:12:25 crc kubenswrapper[4558]: I0120 17:12:25.645781 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-ring-rebalance-rgrkz" Jan 20 17:12:25 crc kubenswrapper[4558]: I0120 17:12:25.646300 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-ring-rebalance-hj5p8" event={"ID":"c7d7d3af-d2d4-404c-ae30-a8802ea8709a","Type":"ContainerStarted","Data":"c83e3b6d2533dffbc6053a2550cb22dbd971139a977e07caeac17ca594414953"} Jan 20 17:12:25 crc kubenswrapper[4558]: W0120 17:12:25.647515 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode8370668_8857_4be3_bcaa_bae8d6cdd158.slice/crio-19a79edaa42605a563f764f37b4accff1f775f3b2ba6ff2f66a1ec82094bce5f WatchSource:0}: Error finding container 19a79edaa42605a563f764f37b4accff1f775f3b2ba6ff2f66a1ec82094bce5f: Status 404 returned error can't find the container with id 19a79edaa42605a563f764f37b4accff1f775f3b2ba6ff2f66a1ec82094bce5f Jan 20 17:12:25 crc kubenswrapper[4558]: I0120 17:12:25.792596 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-ring-rebalance-rgrkz" Jan 20 17:12:25 crc kubenswrapper[4558]: I0120 17:12:25.901412 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f3a0a5e2-d38a-47ec-8be7-bbeafd2145bd-scripts\") pod \"f3a0a5e2-d38a-47ec-8be7-bbeafd2145bd\" (UID: \"f3a0a5e2-d38a-47ec-8be7-bbeafd2145bd\") " Jan 20 17:12:25 crc kubenswrapper[4558]: I0120 17:12:25.901461 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/f3a0a5e2-d38a-47ec-8be7-bbeafd2145bd-dispersionconf\") pod \"f3a0a5e2-d38a-47ec-8be7-bbeafd2145bd\" (UID: \"f3a0a5e2-d38a-47ec-8be7-bbeafd2145bd\") " Jan 20 17:12:25 crc kubenswrapper[4558]: I0120 17:12:25.902595 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/f3a0a5e2-d38a-47ec-8be7-bbeafd2145bd-ring-data-devices\") pod \"f3a0a5e2-d38a-47ec-8be7-bbeafd2145bd\" (UID: \"f3a0a5e2-d38a-47ec-8be7-bbeafd2145bd\") " Jan 20 17:12:25 crc kubenswrapper[4558]: I0120 17:12:25.902631 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f3a0a5e2-d38a-47ec-8be7-bbeafd2145bd-combined-ca-bundle\") pod \"f3a0a5e2-d38a-47ec-8be7-bbeafd2145bd\" (UID: \"f3a0a5e2-d38a-47ec-8be7-bbeafd2145bd\") " Jan 20 17:12:25 crc kubenswrapper[4558]: I0120 17:12:25.902675 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/f3a0a5e2-d38a-47ec-8be7-bbeafd2145bd-etc-swift\") pod \"f3a0a5e2-d38a-47ec-8be7-bbeafd2145bd\" (UID: \"f3a0a5e2-d38a-47ec-8be7-bbeafd2145bd\") " Jan 20 17:12:25 crc kubenswrapper[4558]: I0120 17:12:25.902792 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/f3a0a5e2-d38a-47ec-8be7-bbeafd2145bd-swiftconf\") pod \"f3a0a5e2-d38a-47ec-8be7-bbeafd2145bd\" (UID: \"f3a0a5e2-d38a-47ec-8be7-bbeafd2145bd\") " Jan 20 17:12:25 crc kubenswrapper[4558]: I0120 17:12:25.902792 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f3a0a5e2-d38a-47ec-8be7-bbeafd2145bd-scripts" (OuterVolumeSpecName: "scripts") pod "f3a0a5e2-d38a-47ec-8be7-bbeafd2145bd" (UID: "f3a0a5e2-d38a-47ec-8be7-bbeafd2145bd"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:12:25 crc kubenswrapper[4558]: I0120 17:12:25.902856 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mjm4w\" (UniqueName: \"kubernetes.io/projected/f3a0a5e2-d38a-47ec-8be7-bbeafd2145bd-kube-api-access-mjm4w\") pod \"f3a0a5e2-d38a-47ec-8be7-bbeafd2145bd\" (UID: \"f3a0a5e2-d38a-47ec-8be7-bbeafd2145bd\") " Jan 20 17:12:25 crc kubenswrapper[4558]: I0120 17:12:25.903188 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f3a0a5e2-d38a-47ec-8be7-bbeafd2145bd-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "f3a0a5e2-d38a-47ec-8be7-bbeafd2145bd" (UID: "f3a0a5e2-d38a-47ec-8be7-bbeafd2145bd"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:12:25 crc kubenswrapper[4558]: I0120 17:12:25.903954 4558 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/f3a0a5e2-d38a-47ec-8be7-bbeafd2145bd-ring-data-devices\") on node \"crc\" DevicePath \"\"" Jan 20 17:12:25 crc kubenswrapper[4558]: I0120 17:12:25.903978 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f3a0a5e2-d38a-47ec-8be7-bbeafd2145bd-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:12:25 crc kubenswrapper[4558]: I0120 17:12:25.904187 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f3a0a5e2-d38a-47ec-8be7-bbeafd2145bd-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "f3a0a5e2-d38a-47ec-8be7-bbeafd2145bd" (UID: "f3a0a5e2-d38a-47ec-8be7-bbeafd2145bd"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:12:25 crc kubenswrapper[4558]: I0120 17:12:25.906001 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f3a0a5e2-d38a-47ec-8be7-bbeafd2145bd-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "f3a0a5e2-d38a-47ec-8be7-bbeafd2145bd" (UID: "f3a0a5e2-d38a-47ec-8be7-bbeafd2145bd"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:12:25 crc kubenswrapper[4558]: I0120 17:12:25.906845 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f3a0a5e2-d38a-47ec-8be7-bbeafd2145bd-kube-api-access-mjm4w" (OuterVolumeSpecName: "kube-api-access-mjm4w") pod "f3a0a5e2-d38a-47ec-8be7-bbeafd2145bd" (UID: "f3a0a5e2-d38a-47ec-8be7-bbeafd2145bd"). InnerVolumeSpecName "kube-api-access-mjm4w". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:12:25 crc kubenswrapper[4558]: I0120 17:12:25.907794 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f3a0a5e2-d38a-47ec-8be7-bbeafd2145bd-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f3a0a5e2-d38a-47ec-8be7-bbeafd2145bd" (UID: "f3a0a5e2-d38a-47ec-8be7-bbeafd2145bd"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:12:25 crc kubenswrapper[4558]: I0120 17:12:25.908064 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f3a0a5e2-d38a-47ec-8be7-bbeafd2145bd-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "f3a0a5e2-d38a-47ec-8be7-bbeafd2145bd" (UID: "f3a0a5e2-d38a-47ec-8be7-bbeafd2145bd"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:12:26 crc kubenswrapper[4558]: I0120 17:12:26.006331 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f3a0a5e2-d38a-47ec-8be7-bbeafd2145bd-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:12:26 crc kubenswrapper[4558]: I0120 17:12:26.006441 4558 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/f3a0a5e2-d38a-47ec-8be7-bbeafd2145bd-etc-swift\") on node \"crc\" DevicePath \"\"" Jan 20 17:12:26 crc kubenswrapper[4558]: I0120 17:12:26.006528 4558 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/f3a0a5e2-d38a-47ec-8be7-bbeafd2145bd-swiftconf\") on node \"crc\" DevicePath \"\"" Jan 20 17:12:26 crc kubenswrapper[4558]: I0120 17:12:26.006595 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mjm4w\" (UniqueName: \"kubernetes.io/projected/f3a0a5e2-d38a-47ec-8be7-bbeafd2145bd-kube-api-access-mjm4w\") on node \"crc\" DevicePath \"\"" Jan 20 17:12:26 crc kubenswrapper[4558]: I0120 17:12:26.006657 4558 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/f3a0a5e2-d38a-47ec-8be7-bbeafd2145bd-dispersionconf\") on node \"crc\" DevicePath \"\"" Jan 20 17:12:26 crc kubenswrapper[4558]: I0120 17:12:26.107950 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5-etc-swift\") pod \"swift-storage-0\" (UID: \"16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:12:26 crc kubenswrapper[4558]: E0120 17:12:26.108189 4558 projected.go:288] Couldn't get configMap openstack-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Jan 20 17:12:26 crc kubenswrapper[4558]: E0120 17:12:26.108224 4558 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack-kuttl-tests/swift-storage-0: configmap "swift-ring-files" not found Jan 20 17:12:26 crc kubenswrapper[4558]: E0120 17:12:26.108287 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5-etc-swift podName:16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5 nodeName:}" failed. No retries permitted until 2026-01-20 17:12:28.108270331 +0000 UTC m=+1841.868608297 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5-etc-swift") pod "swift-storage-0" (UID: "16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5") : configmap "swift-ring-files" not found Jan 20 17:12:26 crc kubenswrapper[4558]: I0120 17:12:26.696328 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-northd-0" event={"ID":"e8370668-8857-4be3-bcaa-bae8d6cdd158","Type":"ContainerStarted","Data":"642cff711dbe4d54cbee09e66959eb32d2d26fed71a8ef14e847e8bed60f8ee8"} Jan 20 17:12:26 crc kubenswrapper[4558]: I0120 17:12:26.696383 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-northd-0" event={"ID":"e8370668-8857-4be3-bcaa-bae8d6cdd158","Type":"ContainerStarted","Data":"a07ebe0308220308f5b0687be7bf7284b0986dfc78e64ccb177087d9a970bb3f"} Jan 20 17:12:26 crc kubenswrapper[4558]: I0120 17:12:26.696400 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-northd-0" event={"ID":"e8370668-8857-4be3-bcaa-bae8d6cdd158","Type":"ContainerStarted","Data":"19a79edaa42605a563f764f37b4accff1f775f3b2ba6ff2f66a1ec82094bce5f"} Jan 20 17:12:26 crc kubenswrapper[4558]: I0120 17:12:26.697550 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:12:26 crc kubenswrapper[4558]: I0120 17:12:26.705877 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-ring-rebalance-rgrkz" Jan 20 17:12:26 crc kubenswrapper[4558]: I0120 17:12:26.706254 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-ring-rebalance-hj5p8" event={"ID":"c7d7d3af-d2d4-404c-ae30-a8802ea8709a","Type":"ContainerStarted","Data":"b91a14b7427c528f3415bf8592c7e627a68ca466d0f73da79aa04ad1ec9ef426"} Jan 20 17:12:26 crc kubenswrapper[4558]: I0120 17:12:26.731729 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ovn-northd-0" podStartSLOduration=2.7317153259999998 podStartE2EDuration="2.731715326s" podCreationTimestamp="2026-01-20 17:12:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:12:26.72849484 +0000 UTC m=+1840.488832808" watchObservedRunningTime="2026-01-20 17:12:26.731715326 +0000 UTC m=+1840.492053293" Jan 20 17:12:26 crc kubenswrapper[4558]: I0120 17:12:26.774027 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/swift-ring-rebalance-rgrkz"] Jan 20 17:12:26 crc kubenswrapper[4558]: I0120 17:12:26.808800 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/swift-ring-rebalance-rgrkz"] Jan 20 17:12:27 crc kubenswrapper[4558]: I0120 17:12:27.053926 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators/perses-operator-5bf474d74f-6nh6n" Jan 20 17:12:27 crc kubenswrapper[4558]: I0120 17:12:27.074268 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/swift-ring-rebalance-hj5p8" podStartSLOduration=3.074251256 podStartE2EDuration="3.074251256s" podCreationTimestamp="2026-01-20 17:12:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:12:26.822431543 +0000 UTC m=+1840.582769510" watchObservedRunningTime="2026-01-20 17:12:27.074251256 +0000 UTC m=+1840.834589224" Jan 20 17:12:27 crc kubenswrapper[4558]: I0120 17:12:27.747768 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/alertmanager-metric-storage-0"] Jan 20 17:12:27 crc kubenswrapper[4558]: I0120 17:12:27.749360 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/alertmanager-metric-storage-0" Jan 20 17:12:27 crc kubenswrapper[4558]: I0120 17:12:27.751276 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"metric-storage-alertmanager-dockercfg-j4tb4" Jan 20 17:12:27 crc kubenswrapper[4558]: I0120 17:12:27.755095 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"alertmanager-metric-storage-tls-assets-0" Jan 20 17:12:27 crc kubenswrapper[4558]: I0120 17:12:27.755788 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"alertmanager-metric-storage-web-config" Jan 20 17:12:27 crc kubenswrapper[4558]: I0120 17:12:27.755904 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"alertmanager-metric-storage-generated" Jan 20 17:12:27 crc kubenswrapper[4558]: I0120 17:12:27.756430 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"alertmanager-metric-storage-cluster-tls-config" Jan 20 17:12:27 crc kubenswrapper[4558]: I0120 17:12:27.780794 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/alertmanager-metric-storage-0"] Jan 20 17:12:27 crc kubenswrapper[4558]: I0120 17:12:27.944749 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/13a0a262-6f56-4cf9-9b0d-85110bcff1e7-config-volume\") pod \"alertmanager-metric-storage-0\" (UID: \"13a0a262-6f56-4cf9-9b0d-85110bcff1e7\") " pod="openstack-kuttl-tests/alertmanager-metric-storage-0" Jan 20 17:12:27 crc kubenswrapper[4558]: I0120 17:12:27.944838 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/13a0a262-6f56-4cf9-9b0d-85110bcff1e7-tls-assets\") pod \"alertmanager-metric-storage-0\" (UID: \"13a0a262-6f56-4cf9-9b0d-85110bcff1e7\") " pod="openstack-kuttl-tests/alertmanager-metric-storage-0" Jan 20 17:12:27 crc kubenswrapper[4558]: I0120 17:12:27.944918 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"alertmanager-metric-storage-db\" (UniqueName: \"kubernetes.io/empty-dir/13a0a262-6f56-4cf9-9b0d-85110bcff1e7-alertmanager-metric-storage-db\") pod \"alertmanager-metric-storage-0\" (UID: \"13a0a262-6f56-4cf9-9b0d-85110bcff1e7\") " pod="openstack-kuttl-tests/alertmanager-metric-storage-0" Jan 20 17:12:27 crc kubenswrapper[4558]: I0120 17:12:27.945009 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cluster-tls-config\" (UniqueName: \"kubernetes.io/secret/13a0a262-6f56-4cf9-9b0d-85110bcff1e7-cluster-tls-config\") pod \"alertmanager-metric-storage-0\" (UID: \"13a0a262-6f56-4cf9-9b0d-85110bcff1e7\") " pod="openstack-kuttl-tests/alertmanager-metric-storage-0" Jan 20 17:12:27 crc kubenswrapper[4558]: I0120 17:12:27.945038 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/13a0a262-6f56-4cf9-9b0d-85110bcff1e7-web-config\") pod \"alertmanager-metric-storage-0\" (UID: \"13a0a262-6f56-4cf9-9b0d-85110bcff1e7\") " pod="openstack-kuttl-tests/alertmanager-metric-storage-0" Jan 20 17:12:27 crc kubenswrapper[4558]: I0120 17:12:27.945058 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/13a0a262-6f56-4cf9-9b0d-85110bcff1e7-config-out\") pod \"alertmanager-metric-storage-0\" (UID: \"13a0a262-6f56-4cf9-9b0d-85110bcff1e7\") " pod="openstack-kuttl-tests/alertmanager-metric-storage-0" Jan 20 17:12:27 crc kubenswrapper[4558]: I0120 17:12:27.945096 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p7njw\" (UniqueName: \"kubernetes.io/projected/13a0a262-6f56-4cf9-9b0d-85110bcff1e7-kube-api-access-p7njw\") pod \"alertmanager-metric-storage-0\" (UID: \"13a0a262-6f56-4cf9-9b0d-85110bcff1e7\") " pod="openstack-kuttl-tests/alertmanager-metric-storage-0" Jan 20 17:12:28 crc kubenswrapper[4558]: I0120 17:12:28.047190 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cluster-tls-config\" (UniqueName: \"kubernetes.io/secret/13a0a262-6f56-4cf9-9b0d-85110bcff1e7-cluster-tls-config\") pod \"alertmanager-metric-storage-0\" (UID: \"13a0a262-6f56-4cf9-9b0d-85110bcff1e7\") " pod="openstack-kuttl-tests/alertmanager-metric-storage-0" Jan 20 17:12:28 crc kubenswrapper[4558]: I0120 17:12:28.047249 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/13a0a262-6f56-4cf9-9b0d-85110bcff1e7-web-config\") pod \"alertmanager-metric-storage-0\" (UID: \"13a0a262-6f56-4cf9-9b0d-85110bcff1e7\") " pod="openstack-kuttl-tests/alertmanager-metric-storage-0" Jan 20 17:12:28 crc kubenswrapper[4558]: I0120 17:12:28.047273 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/13a0a262-6f56-4cf9-9b0d-85110bcff1e7-config-out\") pod \"alertmanager-metric-storage-0\" (UID: \"13a0a262-6f56-4cf9-9b0d-85110bcff1e7\") " pod="openstack-kuttl-tests/alertmanager-metric-storage-0" Jan 20 17:12:28 crc kubenswrapper[4558]: I0120 17:12:28.047317 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p7njw\" (UniqueName: \"kubernetes.io/projected/13a0a262-6f56-4cf9-9b0d-85110bcff1e7-kube-api-access-p7njw\") pod \"alertmanager-metric-storage-0\" (UID: \"13a0a262-6f56-4cf9-9b0d-85110bcff1e7\") " pod="openstack-kuttl-tests/alertmanager-metric-storage-0" Jan 20 17:12:28 crc kubenswrapper[4558]: I0120 17:12:28.047381 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/13a0a262-6f56-4cf9-9b0d-85110bcff1e7-config-volume\") pod \"alertmanager-metric-storage-0\" (UID: \"13a0a262-6f56-4cf9-9b0d-85110bcff1e7\") " pod="openstack-kuttl-tests/alertmanager-metric-storage-0" Jan 20 17:12:28 crc kubenswrapper[4558]: I0120 17:12:28.047454 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/13a0a262-6f56-4cf9-9b0d-85110bcff1e7-tls-assets\") pod \"alertmanager-metric-storage-0\" (UID: \"13a0a262-6f56-4cf9-9b0d-85110bcff1e7\") " pod="openstack-kuttl-tests/alertmanager-metric-storage-0" Jan 20 17:12:28 crc kubenswrapper[4558]: I0120 17:12:28.047535 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"alertmanager-metric-storage-db\" (UniqueName: \"kubernetes.io/empty-dir/13a0a262-6f56-4cf9-9b0d-85110bcff1e7-alertmanager-metric-storage-db\") pod \"alertmanager-metric-storage-0\" (UID: \"13a0a262-6f56-4cf9-9b0d-85110bcff1e7\") " pod="openstack-kuttl-tests/alertmanager-metric-storage-0" Jan 20 17:12:28 crc kubenswrapper[4558]: I0120 17:12:28.048084 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"alertmanager-metric-storage-db\" (UniqueName: \"kubernetes.io/empty-dir/13a0a262-6f56-4cf9-9b0d-85110bcff1e7-alertmanager-metric-storage-db\") pod \"alertmanager-metric-storage-0\" (UID: \"13a0a262-6f56-4cf9-9b0d-85110bcff1e7\") " pod="openstack-kuttl-tests/alertmanager-metric-storage-0" Jan 20 17:12:28 crc kubenswrapper[4558]: I0120 17:12:28.054040 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/13a0a262-6f56-4cf9-9b0d-85110bcff1e7-tls-assets\") pod \"alertmanager-metric-storage-0\" (UID: \"13a0a262-6f56-4cf9-9b0d-85110bcff1e7\") " pod="openstack-kuttl-tests/alertmanager-metric-storage-0" Jan 20 17:12:28 crc kubenswrapper[4558]: I0120 17:12:28.054202 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cluster-tls-config\" (UniqueName: \"kubernetes.io/secret/13a0a262-6f56-4cf9-9b0d-85110bcff1e7-cluster-tls-config\") pod \"alertmanager-metric-storage-0\" (UID: \"13a0a262-6f56-4cf9-9b0d-85110bcff1e7\") " pod="openstack-kuttl-tests/alertmanager-metric-storage-0" Jan 20 17:12:28 crc kubenswrapper[4558]: I0120 17:12:28.054781 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/13a0a262-6f56-4cf9-9b0d-85110bcff1e7-config-volume\") pod \"alertmanager-metric-storage-0\" (UID: \"13a0a262-6f56-4cf9-9b0d-85110bcff1e7\") " pod="openstack-kuttl-tests/alertmanager-metric-storage-0" Jan 20 17:12:28 crc kubenswrapper[4558]: I0120 17:12:28.066336 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/13a0a262-6f56-4cf9-9b0d-85110bcff1e7-web-config\") pod \"alertmanager-metric-storage-0\" (UID: \"13a0a262-6f56-4cf9-9b0d-85110bcff1e7\") " pod="openstack-kuttl-tests/alertmanager-metric-storage-0" Jan 20 17:12:28 crc kubenswrapper[4558]: I0120 17:12:28.066804 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/13a0a262-6f56-4cf9-9b0d-85110bcff1e7-config-out\") pod \"alertmanager-metric-storage-0\" (UID: \"13a0a262-6f56-4cf9-9b0d-85110bcff1e7\") " pod="openstack-kuttl-tests/alertmanager-metric-storage-0" Jan 20 17:12:28 crc kubenswrapper[4558]: I0120 17:12:28.082376 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p7njw\" (UniqueName: \"kubernetes.io/projected/13a0a262-6f56-4cf9-9b0d-85110bcff1e7-kube-api-access-p7njw\") pod \"alertmanager-metric-storage-0\" (UID: \"13a0a262-6f56-4cf9-9b0d-85110bcff1e7\") " pod="openstack-kuttl-tests/alertmanager-metric-storage-0" Jan 20 17:12:28 crc kubenswrapper[4558]: I0120 17:12:28.149108 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5-etc-swift\") pod \"swift-storage-0\" (UID: \"16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:12:28 crc kubenswrapper[4558]: E0120 17:12:28.149386 4558 projected.go:288] Couldn't get configMap openstack-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Jan 20 17:12:28 crc kubenswrapper[4558]: E0120 17:12:28.149421 4558 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack-kuttl-tests/swift-storage-0: configmap "swift-ring-files" not found Jan 20 17:12:28 crc kubenswrapper[4558]: E0120 17:12:28.149472 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5-etc-swift podName:16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5 nodeName:}" failed. No retries permitted until 2026-01-20 17:12:32.149455962 +0000 UTC m=+1845.909793929 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5-etc-swift") pod "swift-storage-0" (UID: "16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5") : configmap "swift-ring-files" not found Jan 20 17:12:28 crc kubenswrapper[4558]: I0120 17:12:28.322013 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/prometheus-metric-storage-0"] Jan 20 17:12:28 crc kubenswrapper[4558]: I0120 17:12:28.323801 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/prometheus-metric-storage-0" Jan 20 17:12:28 crc kubenswrapper[4558]: I0120 17:12:28.325806 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"prometheus-metric-storage-rulefiles-0" Jan 20 17:12:28 crc kubenswrapper[4558]: I0120 17:12:28.325876 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"prometheus-metric-storage-thanos-prometheus-http-client-file" Jan 20 17:12:28 crc kubenswrapper[4558]: I0120 17:12:28.325886 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"prometheus-metric-storage-rulefiles-1" Jan 20 17:12:28 crc kubenswrapper[4558]: I0120 17:12:28.326011 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"prometheus-metric-storage-tls-assets-0" Jan 20 17:12:28 crc kubenswrapper[4558]: I0120 17:12:28.328277 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"prometheus-metric-storage-web-config" Jan 20 17:12:28 crc kubenswrapper[4558]: I0120 17:12:28.328318 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"prometheus-metric-storage-rulefiles-2" Jan 20 17:12:28 crc kubenswrapper[4558]: I0120 17:12:28.328318 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"prometheus-metric-storage" Jan 20 17:12:28 crc kubenswrapper[4558]: I0120 17:12:28.328885 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"metric-storage-prometheus-dockercfg-vf58l" Jan 20 17:12:28 crc kubenswrapper[4558]: I0120 17:12:28.350238 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/prometheus-metric-storage-0"] Jan 20 17:12:28 crc kubenswrapper[4558]: I0120 17:12:28.365418 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/alertmanager-metric-storage-0" Jan 20 17:12:28 crc kubenswrapper[4558]: I0120 17:12:28.455521 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"prometheus-metric-storage-0\" (UID: \"b099ccf1-da7f-4e4d-80cf-629b21cd774c\") " pod="openstack-kuttl-tests/prometheus-metric-storage-0" Jan 20 17:12:28 crc kubenswrapper[4558]: I0120 17:12:28.455575 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/b099ccf1-da7f-4e4d-80cf-629b21cd774c-config\") pod \"prometheus-metric-storage-0\" (UID: \"b099ccf1-da7f-4e4d-80cf-629b21cd774c\") " pod="openstack-kuttl-tests/prometheus-metric-storage-0" Jan 20 17:12:28 crc kubenswrapper[4558]: I0120 17:12:28.455624 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lwdtt\" (UniqueName: \"kubernetes.io/projected/b099ccf1-da7f-4e4d-80cf-629b21cd774c-kube-api-access-lwdtt\") pod \"prometheus-metric-storage-0\" (UID: \"b099ccf1-da7f-4e4d-80cf-629b21cd774c\") " pod="openstack-kuttl-tests/prometheus-metric-storage-0" Jan 20 17:12:28 crc kubenswrapper[4558]: I0120 17:12:28.455657 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-metric-storage-rulefiles-2\" (UniqueName: \"kubernetes.io/configmap/b099ccf1-da7f-4e4d-80cf-629b21cd774c-prometheus-metric-storage-rulefiles-2\") pod \"prometheus-metric-storage-0\" (UID: \"b099ccf1-da7f-4e4d-80cf-629b21cd774c\") " pod="openstack-kuttl-tests/prometheus-metric-storage-0" Jan 20 17:12:28 crc kubenswrapper[4558]: I0120 17:12:28.455740 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/b099ccf1-da7f-4e4d-80cf-629b21cd774c-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"b099ccf1-da7f-4e4d-80cf-629b21cd774c\") " pod="openstack-kuttl-tests/prometheus-metric-storage-0" Jan 20 17:12:28 crc kubenswrapper[4558]: I0120 17:12:28.455759 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-metric-storage-rulefiles-1\" (UniqueName: \"kubernetes.io/configmap/b099ccf1-da7f-4e4d-80cf-629b21cd774c-prometheus-metric-storage-rulefiles-1\") pod \"prometheus-metric-storage-0\" (UID: \"b099ccf1-da7f-4e4d-80cf-629b21cd774c\") " pod="openstack-kuttl-tests/prometheus-metric-storage-0" Jan 20 17:12:28 crc kubenswrapper[4558]: I0120 17:12:28.455877 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/b099ccf1-da7f-4e4d-80cf-629b21cd774c-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"b099ccf1-da7f-4e4d-80cf-629b21cd774c\") " pod="openstack-kuttl-tests/prometheus-metric-storage-0" Jan 20 17:12:28 crc kubenswrapper[4558]: I0120 17:12:28.455933 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/b099ccf1-da7f-4e4d-80cf-629b21cd774c-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"b099ccf1-da7f-4e4d-80cf-629b21cd774c\") " pod="openstack-kuttl-tests/prometheus-metric-storage-0" Jan 20 17:12:28 crc kubenswrapper[4558]: I0120 17:12:28.455958 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/b099ccf1-da7f-4e4d-80cf-629b21cd774c-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"b099ccf1-da7f-4e4d-80cf-629b21cd774c\") " pod="openstack-kuttl-tests/prometheus-metric-storage-0" Jan 20 17:12:28 crc kubenswrapper[4558]: I0120 17:12:28.456044 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/b099ccf1-da7f-4e4d-80cf-629b21cd774c-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"b099ccf1-da7f-4e4d-80cf-629b21cd774c\") " pod="openstack-kuttl-tests/prometheus-metric-storage-0" Jan 20 17:12:28 crc kubenswrapper[4558]: I0120 17:12:28.557179 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lwdtt\" (UniqueName: \"kubernetes.io/projected/b099ccf1-da7f-4e4d-80cf-629b21cd774c-kube-api-access-lwdtt\") pod \"prometheus-metric-storage-0\" (UID: \"b099ccf1-da7f-4e4d-80cf-629b21cd774c\") " pod="openstack-kuttl-tests/prometheus-metric-storage-0" Jan 20 17:12:28 crc kubenswrapper[4558]: I0120 17:12:28.557229 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-metric-storage-rulefiles-2\" (UniqueName: \"kubernetes.io/configmap/b099ccf1-da7f-4e4d-80cf-629b21cd774c-prometheus-metric-storage-rulefiles-2\") pod \"prometheus-metric-storage-0\" (UID: \"b099ccf1-da7f-4e4d-80cf-629b21cd774c\") " pod="openstack-kuttl-tests/prometheus-metric-storage-0" Jan 20 17:12:28 crc kubenswrapper[4558]: I0120 17:12:28.557278 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/b099ccf1-da7f-4e4d-80cf-629b21cd774c-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"b099ccf1-da7f-4e4d-80cf-629b21cd774c\") " pod="openstack-kuttl-tests/prometheus-metric-storage-0" Jan 20 17:12:28 crc kubenswrapper[4558]: I0120 17:12:28.557293 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-metric-storage-rulefiles-1\" (UniqueName: \"kubernetes.io/configmap/b099ccf1-da7f-4e4d-80cf-629b21cd774c-prometheus-metric-storage-rulefiles-1\") pod \"prometheus-metric-storage-0\" (UID: \"b099ccf1-da7f-4e4d-80cf-629b21cd774c\") " pod="openstack-kuttl-tests/prometheus-metric-storage-0" Jan 20 17:12:28 crc kubenswrapper[4558]: I0120 17:12:28.557317 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/b099ccf1-da7f-4e4d-80cf-629b21cd774c-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"b099ccf1-da7f-4e4d-80cf-629b21cd774c\") " pod="openstack-kuttl-tests/prometheus-metric-storage-0" Jan 20 17:12:28 crc kubenswrapper[4558]: I0120 17:12:28.557334 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/b099ccf1-da7f-4e4d-80cf-629b21cd774c-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"b099ccf1-da7f-4e4d-80cf-629b21cd774c\") " pod="openstack-kuttl-tests/prometheus-metric-storage-0" Jan 20 17:12:28 crc kubenswrapper[4558]: I0120 17:12:28.557350 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/b099ccf1-da7f-4e4d-80cf-629b21cd774c-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"b099ccf1-da7f-4e4d-80cf-629b21cd774c\") " pod="openstack-kuttl-tests/prometheus-metric-storage-0" Jan 20 17:12:28 crc kubenswrapper[4558]: I0120 17:12:28.557386 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/b099ccf1-da7f-4e4d-80cf-629b21cd774c-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"b099ccf1-da7f-4e4d-80cf-629b21cd774c\") " pod="openstack-kuttl-tests/prometheus-metric-storage-0" Jan 20 17:12:28 crc kubenswrapper[4558]: I0120 17:12:28.557438 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"prometheus-metric-storage-0\" (UID: \"b099ccf1-da7f-4e4d-80cf-629b21cd774c\") " pod="openstack-kuttl-tests/prometheus-metric-storage-0" Jan 20 17:12:28 crc kubenswrapper[4558]: I0120 17:12:28.557467 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/b099ccf1-da7f-4e4d-80cf-629b21cd774c-config\") pod \"prometheus-metric-storage-0\" (UID: \"b099ccf1-da7f-4e4d-80cf-629b21cd774c\") " pod="openstack-kuttl-tests/prometheus-metric-storage-0" Jan 20 17:12:28 crc kubenswrapper[4558]: I0120 17:12:28.558518 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-metric-storage-rulefiles-2\" (UniqueName: \"kubernetes.io/configmap/b099ccf1-da7f-4e4d-80cf-629b21cd774c-prometheus-metric-storage-rulefiles-2\") pod \"prometheus-metric-storage-0\" (UID: \"b099ccf1-da7f-4e4d-80cf-629b21cd774c\") " pod="openstack-kuttl-tests/prometheus-metric-storage-0" Jan 20 17:12:28 crc kubenswrapper[4558]: I0120 17:12:28.560016 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"prometheus-metric-storage-0\" (UID: \"b099ccf1-da7f-4e4d-80cf-629b21cd774c\") device mount path \"/mnt/openstack/pv12\"" pod="openstack-kuttl-tests/prometheus-metric-storage-0" Jan 20 17:12:28 crc kubenswrapper[4558]: I0120 17:12:28.561068 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/b099ccf1-da7f-4e4d-80cf-629b21cd774c-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"b099ccf1-da7f-4e4d-80cf-629b21cd774c\") " pod="openstack-kuttl-tests/prometheus-metric-storage-0" Jan 20 17:12:28 crc kubenswrapper[4558]: I0120 17:12:28.564967 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/b099ccf1-da7f-4e4d-80cf-629b21cd774c-config\") pod \"prometheus-metric-storage-0\" (UID: \"b099ccf1-da7f-4e4d-80cf-629b21cd774c\") " pod="openstack-kuttl-tests/prometheus-metric-storage-0" Jan 20 17:12:28 crc kubenswrapper[4558]: I0120 17:12:28.565380 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/b099ccf1-da7f-4e4d-80cf-629b21cd774c-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"b099ccf1-da7f-4e4d-80cf-629b21cd774c\") " pod="openstack-kuttl-tests/prometheus-metric-storage-0" Jan 20 17:12:28 crc kubenswrapper[4558]: I0120 17:12:28.567592 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-metric-storage-rulefiles-1\" (UniqueName: \"kubernetes.io/configmap/b099ccf1-da7f-4e4d-80cf-629b21cd774c-prometheus-metric-storage-rulefiles-1\") pod \"prometheus-metric-storage-0\" (UID: \"b099ccf1-da7f-4e4d-80cf-629b21cd774c\") " pod="openstack-kuttl-tests/prometheus-metric-storage-0" Jan 20 17:12:28 crc kubenswrapper[4558]: I0120 17:12:28.582613 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/b099ccf1-da7f-4e4d-80cf-629b21cd774c-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"b099ccf1-da7f-4e4d-80cf-629b21cd774c\") " pod="openstack-kuttl-tests/prometheus-metric-storage-0" Jan 20 17:12:28 crc kubenswrapper[4558]: I0120 17:12:28.583917 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/b099ccf1-da7f-4e4d-80cf-629b21cd774c-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"b099ccf1-da7f-4e4d-80cf-629b21cd774c\") " pod="openstack-kuttl-tests/prometheus-metric-storage-0" Jan 20 17:12:28 crc kubenswrapper[4558]: I0120 17:12:28.589721 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f3a0a5e2-d38a-47ec-8be7-bbeafd2145bd" path="/var/lib/kubelet/pods/f3a0a5e2-d38a-47ec-8be7-bbeafd2145bd/volumes" Jan 20 17:12:28 crc kubenswrapper[4558]: I0120 17:12:28.609543 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/b099ccf1-da7f-4e4d-80cf-629b21cd774c-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"b099ccf1-da7f-4e4d-80cf-629b21cd774c\") " pod="openstack-kuttl-tests/prometheus-metric-storage-0" Jan 20 17:12:28 crc kubenswrapper[4558]: I0120 17:12:28.632766 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lwdtt\" (UniqueName: \"kubernetes.io/projected/b099ccf1-da7f-4e4d-80cf-629b21cd774c-kube-api-access-lwdtt\") pod \"prometheus-metric-storage-0\" (UID: \"b099ccf1-da7f-4e4d-80cf-629b21cd774c\") " pod="openstack-kuttl-tests/prometheus-metric-storage-0" Jan 20 17:12:28 crc kubenswrapper[4558]: I0120 17:12:28.666895 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"prometheus-metric-storage-0\" (UID: \"b099ccf1-da7f-4e4d-80cf-629b21cd774c\") " pod="openstack-kuttl-tests/prometheus-metric-storage-0" Jan 20 17:12:28 crc kubenswrapper[4558]: I0120 17:12:28.892759 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/alertmanager-metric-storage-0"] Jan 20 17:12:28 crc kubenswrapper[4558]: I0120 17:12:28.939791 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/prometheus-metric-storage-0" Jan 20 17:12:29 crc kubenswrapper[4558]: I0120 17:12:29.554183 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/prometheus-metric-storage-0"] Jan 20 17:12:29 crc kubenswrapper[4558]: W0120 17:12:29.560320 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb099ccf1_da7f_4e4d_80cf_629b21cd774c.slice/crio-0031c8136e0e76ec9357655101976c9fb0febef0bf6d6bca1f583990b7719c56 WatchSource:0}: Error finding container 0031c8136e0e76ec9357655101976c9fb0febef0bf6d6bca1f583990b7719c56: Status 404 returned error can't find the container with id 0031c8136e0e76ec9357655101976c9fb0febef0bf6d6bca1f583990b7719c56 Jan 20 17:12:29 crc kubenswrapper[4558]: I0120 17:12:29.742813 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/prometheus-metric-storage-0" event={"ID":"b099ccf1-da7f-4e4d-80cf-629b21cd774c","Type":"ContainerStarted","Data":"0031c8136e0e76ec9357655101976c9fb0febef0bf6d6bca1f583990b7719c56"} Jan 20 17:12:29 crc kubenswrapper[4558]: I0120 17:12:29.744284 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/alertmanager-metric-storage-0" event={"ID":"13a0a262-6f56-4cf9-9b0d-85110bcff1e7","Type":"ContainerStarted","Data":"1e7f2a5ae885cc4c28319d54221677567b47d3120f3c178847ab30c414af96bd"} Jan 20 17:12:30 crc kubenswrapper[4558]: I0120 17:12:30.942140 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:12:30 crc kubenswrapper[4558]: I0120 17:12:30.942208 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:12:31 crc kubenswrapper[4558]: I0120 17:12:31.319694 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:12:31 crc kubenswrapper[4558]: I0120 17:12:31.850859 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:12:32 crc kubenswrapper[4558]: I0120 17:12:32.231838 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5-etc-swift\") pod \"swift-storage-0\" (UID: \"16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:12:32 crc kubenswrapper[4558]: E0120 17:12:32.232093 4558 projected.go:288] Couldn't get configMap openstack-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Jan 20 17:12:32 crc kubenswrapper[4558]: E0120 17:12:32.232228 4558 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack-kuttl-tests/swift-storage-0: configmap "swift-ring-files" not found Jan 20 17:12:32 crc kubenswrapper[4558]: E0120 17:12:32.232311 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5-etc-swift podName:16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5 nodeName:}" failed. No retries permitted until 2026-01-20 17:12:40.232287741 +0000 UTC m=+1853.992625718 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5-etc-swift") pod "swift-storage-0" (UID: "16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5") : configmap "swift-ring-files" not found Jan 20 17:12:32 crc kubenswrapper[4558]: I0120 17:12:32.995986 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/watcher-db-create-4k5sd"] Jan 20 17:12:32 crc kubenswrapper[4558]: I0120 17:12:32.997091 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/watcher-db-create-4k5sd" Jan 20 17:12:33 crc kubenswrapper[4558]: I0120 17:12:33.015742 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/watcher-db-create-4k5sd"] Jan 20 17:12:33 crc kubenswrapper[4558]: I0120 17:12:33.135651 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/watcher-be54-account-create-update-2fkrk"] Jan 20 17:12:33 crc kubenswrapper[4558]: I0120 17:12:33.136737 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/watcher-be54-account-create-update-2fkrk" Jan 20 17:12:33 crc kubenswrapper[4558]: I0120 17:12:33.138794 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"watcher-db-secret" Jan 20 17:12:33 crc kubenswrapper[4558]: I0120 17:12:33.149989 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/dc21aa2e-3cb0-43c7-b1d9-650741794867-operator-scripts\") pod \"watcher-db-create-4k5sd\" (UID: \"dc21aa2e-3cb0-43c7-b1d9-650741794867\") " pod="openstack-kuttl-tests/watcher-db-create-4k5sd" Jan 20 17:12:33 crc kubenswrapper[4558]: I0120 17:12:33.150065 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ptxt2\" (UniqueName: \"kubernetes.io/projected/dc21aa2e-3cb0-43c7-b1d9-650741794867-kube-api-access-ptxt2\") pod \"watcher-db-create-4k5sd\" (UID: \"dc21aa2e-3cb0-43c7-b1d9-650741794867\") " pod="openstack-kuttl-tests/watcher-db-create-4k5sd" Jan 20 17:12:33 crc kubenswrapper[4558]: I0120 17:12:33.150482 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/watcher-be54-account-create-update-2fkrk"] Jan 20 17:12:33 crc kubenswrapper[4558]: I0120 17:12:33.253387 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ft5bd\" (UniqueName: \"kubernetes.io/projected/034d0ad2-ef35-4499-97bf-22bb087bda19-kube-api-access-ft5bd\") pod \"watcher-be54-account-create-update-2fkrk\" (UID: \"034d0ad2-ef35-4499-97bf-22bb087bda19\") " pod="openstack-kuttl-tests/watcher-be54-account-create-update-2fkrk" Jan 20 17:12:33 crc kubenswrapper[4558]: I0120 17:12:33.253797 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/dc21aa2e-3cb0-43c7-b1d9-650741794867-operator-scripts\") pod \"watcher-db-create-4k5sd\" (UID: \"dc21aa2e-3cb0-43c7-b1d9-650741794867\") " pod="openstack-kuttl-tests/watcher-db-create-4k5sd" Jan 20 17:12:33 crc kubenswrapper[4558]: I0120 17:12:33.253846 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ptxt2\" (UniqueName: \"kubernetes.io/projected/dc21aa2e-3cb0-43c7-b1d9-650741794867-kube-api-access-ptxt2\") pod \"watcher-db-create-4k5sd\" (UID: \"dc21aa2e-3cb0-43c7-b1d9-650741794867\") " pod="openstack-kuttl-tests/watcher-db-create-4k5sd" Jan 20 17:12:33 crc kubenswrapper[4558]: I0120 17:12:33.253875 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/034d0ad2-ef35-4499-97bf-22bb087bda19-operator-scripts\") pod \"watcher-be54-account-create-update-2fkrk\" (UID: \"034d0ad2-ef35-4499-97bf-22bb087bda19\") " pod="openstack-kuttl-tests/watcher-be54-account-create-update-2fkrk" Jan 20 17:12:33 crc kubenswrapper[4558]: I0120 17:12:33.254916 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/dc21aa2e-3cb0-43c7-b1d9-650741794867-operator-scripts\") pod \"watcher-db-create-4k5sd\" (UID: \"dc21aa2e-3cb0-43c7-b1d9-650741794867\") " pod="openstack-kuttl-tests/watcher-db-create-4k5sd" Jan 20 17:12:33 crc kubenswrapper[4558]: I0120 17:12:33.300862 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ptxt2\" (UniqueName: \"kubernetes.io/projected/dc21aa2e-3cb0-43c7-b1d9-650741794867-kube-api-access-ptxt2\") pod \"watcher-db-create-4k5sd\" (UID: \"dc21aa2e-3cb0-43c7-b1d9-650741794867\") " pod="openstack-kuttl-tests/watcher-db-create-4k5sd" Jan 20 17:12:33 crc kubenswrapper[4558]: I0120 17:12:33.314275 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/watcher-db-create-4k5sd" Jan 20 17:12:33 crc kubenswrapper[4558]: I0120 17:12:33.355991 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/034d0ad2-ef35-4499-97bf-22bb087bda19-operator-scripts\") pod \"watcher-be54-account-create-update-2fkrk\" (UID: \"034d0ad2-ef35-4499-97bf-22bb087bda19\") " pod="openstack-kuttl-tests/watcher-be54-account-create-update-2fkrk" Jan 20 17:12:33 crc kubenswrapper[4558]: I0120 17:12:33.356344 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ft5bd\" (UniqueName: \"kubernetes.io/projected/034d0ad2-ef35-4499-97bf-22bb087bda19-kube-api-access-ft5bd\") pod \"watcher-be54-account-create-update-2fkrk\" (UID: \"034d0ad2-ef35-4499-97bf-22bb087bda19\") " pod="openstack-kuttl-tests/watcher-be54-account-create-update-2fkrk" Jan 20 17:12:33 crc kubenswrapper[4558]: I0120 17:12:33.356665 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/034d0ad2-ef35-4499-97bf-22bb087bda19-operator-scripts\") pod \"watcher-be54-account-create-update-2fkrk\" (UID: \"034d0ad2-ef35-4499-97bf-22bb087bda19\") " pod="openstack-kuttl-tests/watcher-be54-account-create-update-2fkrk" Jan 20 17:12:33 crc kubenswrapper[4558]: I0120 17:12:33.371926 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ft5bd\" (UniqueName: \"kubernetes.io/projected/034d0ad2-ef35-4499-97bf-22bb087bda19-kube-api-access-ft5bd\") pod \"watcher-be54-account-create-update-2fkrk\" (UID: \"034d0ad2-ef35-4499-97bf-22bb087bda19\") " pod="openstack-kuttl-tests/watcher-be54-account-create-update-2fkrk" Jan 20 17:12:33 crc kubenswrapper[4558]: I0120 17:12:33.450751 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/watcher-be54-account-create-update-2fkrk" Jan 20 17:12:33 crc kubenswrapper[4558]: I0120 17:12:33.694268 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/watcher-db-create-4k5sd"] Jan 20 17:12:33 crc kubenswrapper[4558]: W0120 17:12:33.695953 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddc21aa2e_3cb0_43c7_b1d9_650741794867.slice/crio-399bcc6a5f7bb1f040210f804c2c33053680b7b0e770e0cc53e636c288980b1d WatchSource:0}: Error finding container 399bcc6a5f7bb1f040210f804c2c33053680b7b0e770e0cc53e636c288980b1d: Status 404 returned error can't find the container with id 399bcc6a5f7bb1f040210f804c2c33053680b7b0e770e0cc53e636c288980b1d Jan 20 17:12:33 crc kubenswrapper[4558]: I0120 17:12:33.789994 4558 generic.go:334] "Generic (PLEG): container finished" podID="c7d7d3af-d2d4-404c-ae30-a8802ea8709a" containerID="b91a14b7427c528f3415bf8592c7e627a68ca466d0f73da79aa04ad1ec9ef426" exitCode=0 Jan 20 17:12:33 crc kubenswrapper[4558]: I0120 17:12:33.790072 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-ring-rebalance-hj5p8" event={"ID":"c7d7d3af-d2d4-404c-ae30-a8802ea8709a","Type":"ContainerDied","Data":"b91a14b7427c528f3415bf8592c7e627a68ca466d0f73da79aa04ad1ec9ef426"} Jan 20 17:12:33 crc kubenswrapper[4558]: I0120 17:12:33.791367 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/watcher-db-create-4k5sd" event={"ID":"dc21aa2e-3cb0-43c7-b1d9-650741794867","Type":"ContainerStarted","Data":"399bcc6a5f7bb1f040210f804c2c33053680b7b0e770e0cc53e636c288980b1d"} Jan 20 17:12:33 crc kubenswrapper[4558]: I0120 17:12:33.864472 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/watcher-be54-account-create-update-2fkrk"] Jan 20 17:12:33 crc kubenswrapper[4558]: W0120 17:12:33.871596 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod034d0ad2_ef35_4499_97bf_22bb087bda19.slice/crio-cdee0c1d72e0534a129fe7ec54ddee9701c209fc28fbcb14b82cff80ada0ef2f WatchSource:0}: Error finding container cdee0c1d72e0534a129fe7ec54ddee9701c209fc28fbcb14b82cff80ada0ef2f: Status 404 returned error can't find the container with id cdee0c1d72e0534a129fe7ec54ddee9701c209fc28fbcb14b82cff80ada0ef2f Jan 20 17:12:34 crc kubenswrapper[4558]: I0120 17:12:34.804241 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/prometheus-metric-storage-0" event={"ID":"b099ccf1-da7f-4e4d-80cf-629b21cd774c","Type":"ContainerStarted","Data":"daad0853cc1011ae35e95a9f79a7ec7c1017c2cddf3225d7409dec8cb1b97163"} Jan 20 17:12:34 crc kubenswrapper[4558]: I0120 17:12:34.808200 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/alertmanager-metric-storage-0" event={"ID":"13a0a262-6f56-4cf9-9b0d-85110bcff1e7","Type":"ContainerStarted","Data":"f2c8e0e78787bca0b6f637ec67af7122ba24894258eb3e16a2aa600b8bbdc042"} Jan 20 17:12:34 crc kubenswrapper[4558]: I0120 17:12:34.810343 4558 generic.go:334] "Generic (PLEG): container finished" podID="034d0ad2-ef35-4499-97bf-22bb087bda19" containerID="c03edbfea1cb0bae5898092d9ae12c1998d6252afb73ed3b9dd034e275deae47" exitCode=0 Jan 20 17:12:34 crc kubenswrapper[4558]: I0120 17:12:34.810441 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/watcher-be54-account-create-update-2fkrk" event={"ID":"034d0ad2-ef35-4499-97bf-22bb087bda19","Type":"ContainerDied","Data":"c03edbfea1cb0bae5898092d9ae12c1998d6252afb73ed3b9dd034e275deae47"} Jan 20 17:12:34 crc kubenswrapper[4558]: I0120 17:12:34.810476 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/watcher-be54-account-create-update-2fkrk" event={"ID":"034d0ad2-ef35-4499-97bf-22bb087bda19","Type":"ContainerStarted","Data":"cdee0c1d72e0534a129fe7ec54ddee9701c209fc28fbcb14b82cff80ada0ef2f"} Jan 20 17:12:34 crc kubenswrapper[4558]: I0120 17:12:34.815063 4558 generic.go:334] "Generic (PLEG): container finished" podID="dc21aa2e-3cb0-43c7-b1d9-650741794867" containerID="1395ab77fe1bb470be25d5faad0eaa0e7b01a32afbd5a14b58917b77c984fae7" exitCode=0 Jan 20 17:12:34 crc kubenswrapper[4558]: I0120 17:12:34.815128 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/watcher-db-create-4k5sd" event={"ID":"dc21aa2e-3cb0-43c7-b1d9-650741794867","Type":"ContainerDied","Data":"1395ab77fe1bb470be25d5faad0eaa0e7b01a32afbd5a14b58917b77c984fae7"} Jan 20 17:12:35 crc kubenswrapper[4558]: I0120 17:12:35.143955 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-ring-rebalance-hj5p8" Jan 20 17:12:35 crc kubenswrapper[4558]: I0120 17:12:35.170848 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:12:35 crc kubenswrapper[4558]: I0120 17:12:35.189279 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/c7d7d3af-d2d4-404c-ae30-a8802ea8709a-dispersionconf\") pod \"c7d7d3af-d2d4-404c-ae30-a8802ea8709a\" (UID: \"c7d7d3af-d2d4-404c-ae30-a8802ea8709a\") " Jan 20 17:12:35 crc kubenswrapper[4558]: I0120 17:12:35.189384 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/c7d7d3af-d2d4-404c-ae30-a8802ea8709a-ring-data-devices\") pod \"c7d7d3af-d2d4-404c-ae30-a8802ea8709a\" (UID: \"c7d7d3af-d2d4-404c-ae30-a8802ea8709a\") " Jan 20 17:12:35 crc kubenswrapper[4558]: I0120 17:12:35.189456 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/c7d7d3af-d2d4-404c-ae30-a8802ea8709a-swiftconf\") pod \"c7d7d3af-d2d4-404c-ae30-a8802ea8709a\" (UID: \"c7d7d3af-d2d4-404c-ae30-a8802ea8709a\") " Jan 20 17:12:35 crc kubenswrapper[4558]: I0120 17:12:35.189499 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c7d7d3af-d2d4-404c-ae30-a8802ea8709a-combined-ca-bundle\") pod \"c7d7d3af-d2d4-404c-ae30-a8802ea8709a\" (UID: \"c7d7d3af-d2d4-404c-ae30-a8802ea8709a\") " Jan 20 17:12:35 crc kubenswrapper[4558]: I0120 17:12:35.189520 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c7d7d3af-d2d4-404c-ae30-a8802ea8709a-scripts\") pod \"c7d7d3af-d2d4-404c-ae30-a8802ea8709a\" (UID: \"c7d7d3af-d2d4-404c-ae30-a8802ea8709a\") " Jan 20 17:12:35 crc kubenswrapper[4558]: I0120 17:12:35.189649 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/c7d7d3af-d2d4-404c-ae30-a8802ea8709a-etc-swift\") pod \"c7d7d3af-d2d4-404c-ae30-a8802ea8709a\" (UID: \"c7d7d3af-d2d4-404c-ae30-a8802ea8709a\") " Jan 20 17:12:35 crc kubenswrapper[4558]: I0120 17:12:35.189837 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xfpzf\" (UniqueName: \"kubernetes.io/projected/c7d7d3af-d2d4-404c-ae30-a8802ea8709a-kube-api-access-xfpzf\") pod \"c7d7d3af-d2d4-404c-ae30-a8802ea8709a\" (UID: \"c7d7d3af-d2d4-404c-ae30-a8802ea8709a\") " Jan 20 17:12:35 crc kubenswrapper[4558]: I0120 17:12:35.194535 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c7d7d3af-d2d4-404c-ae30-a8802ea8709a-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "c7d7d3af-d2d4-404c-ae30-a8802ea8709a" (UID: "c7d7d3af-d2d4-404c-ae30-a8802ea8709a"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:12:35 crc kubenswrapper[4558]: I0120 17:12:35.194844 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c7d7d3af-d2d4-404c-ae30-a8802ea8709a-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "c7d7d3af-d2d4-404c-ae30-a8802ea8709a" (UID: "c7d7d3af-d2d4-404c-ae30-a8802ea8709a"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:12:35 crc kubenswrapper[4558]: I0120 17:12:35.218983 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c7d7d3af-d2d4-404c-ae30-a8802ea8709a-kube-api-access-xfpzf" (OuterVolumeSpecName: "kube-api-access-xfpzf") pod "c7d7d3af-d2d4-404c-ae30-a8802ea8709a" (UID: "c7d7d3af-d2d4-404c-ae30-a8802ea8709a"). InnerVolumeSpecName "kube-api-access-xfpzf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:12:35 crc kubenswrapper[4558]: I0120 17:12:35.222701 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c7d7d3af-d2d4-404c-ae30-a8802ea8709a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c7d7d3af-d2d4-404c-ae30-a8802ea8709a" (UID: "c7d7d3af-d2d4-404c-ae30-a8802ea8709a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:12:35 crc kubenswrapper[4558]: I0120 17:12:35.222891 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c7d7d3af-d2d4-404c-ae30-a8802ea8709a-scripts" (OuterVolumeSpecName: "scripts") pod "c7d7d3af-d2d4-404c-ae30-a8802ea8709a" (UID: "c7d7d3af-d2d4-404c-ae30-a8802ea8709a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:12:35 crc kubenswrapper[4558]: I0120 17:12:35.230154 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c7d7d3af-d2d4-404c-ae30-a8802ea8709a-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "c7d7d3af-d2d4-404c-ae30-a8802ea8709a" (UID: "c7d7d3af-d2d4-404c-ae30-a8802ea8709a"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:12:35 crc kubenswrapper[4558]: I0120 17:12:35.242585 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c7d7d3af-d2d4-404c-ae30-a8802ea8709a-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "c7d7d3af-d2d4-404c-ae30-a8802ea8709a" (UID: "c7d7d3af-d2d4-404c-ae30-a8802ea8709a"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:12:35 crc kubenswrapper[4558]: I0120 17:12:35.292904 4558 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/c7d7d3af-d2d4-404c-ae30-a8802ea8709a-dispersionconf\") on node \"crc\" DevicePath \"\"" Jan 20 17:12:35 crc kubenswrapper[4558]: I0120 17:12:35.293200 4558 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/c7d7d3af-d2d4-404c-ae30-a8802ea8709a-ring-data-devices\") on node \"crc\" DevicePath \"\"" Jan 20 17:12:35 crc kubenswrapper[4558]: I0120 17:12:35.293480 4558 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/c7d7d3af-d2d4-404c-ae30-a8802ea8709a-swiftconf\") on node \"crc\" DevicePath \"\"" Jan 20 17:12:35 crc kubenswrapper[4558]: I0120 17:12:35.293542 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c7d7d3af-d2d4-404c-ae30-a8802ea8709a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:12:35 crc kubenswrapper[4558]: I0120 17:12:35.293617 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c7d7d3af-d2d4-404c-ae30-a8802ea8709a-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:12:35 crc kubenswrapper[4558]: I0120 17:12:35.293680 4558 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/c7d7d3af-d2d4-404c-ae30-a8802ea8709a-etc-swift\") on node \"crc\" DevicePath \"\"" Jan 20 17:12:35 crc kubenswrapper[4558]: I0120 17:12:35.293738 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xfpzf\" (UniqueName: \"kubernetes.io/projected/c7d7d3af-d2d4-404c-ae30-a8802ea8709a-kube-api-access-xfpzf\") on node \"crc\" DevicePath \"\"" Jan 20 17:12:35 crc kubenswrapper[4558]: I0120 17:12:35.828083 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-ring-rebalance-hj5p8" event={"ID":"c7d7d3af-d2d4-404c-ae30-a8802ea8709a","Type":"ContainerDied","Data":"c83e3b6d2533dffbc6053a2550cb22dbd971139a977e07caeac17ca594414953"} Jan 20 17:12:35 crc kubenswrapper[4558]: I0120 17:12:35.828134 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c83e3b6d2533dffbc6053a2550cb22dbd971139a977e07caeac17ca594414953" Jan 20 17:12:35 crc kubenswrapper[4558]: I0120 17:12:35.828097 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-ring-rebalance-hj5p8" Jan 20 17:12:36 crc kubenswrapper[4558]: I0120 17:12:36.171354 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/watcher-db-create-4k5sd" Jan 20 17:12:36 crc kubenswrapper[4558]: I0120 17:12:36.243045 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/watcher-be54-account-create-update-2fkrk" Jan 20 17:12:36 crc kubenswrapper[4558]: I0120 17:12:36.313820 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/dc21aa2e-3cb0-43c7-b1d9-650741794867-operator-scripts\") pod \"dc21aa2e-3cb0-43c7-b1d9-650741794867\" (UID: \"dc21aa2e-3cb0-43c7-b1d9-650741794867\") " Jan 20 17:12:36 crc kubenswrapper[4558]: I0120 17:12:36.313908 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ft5bd\" (UniqueName: \"kubernetes.io/projected/034d0ad2-ef35-4499-97bf-22bb087bda19-kube-api-access-ft5bd\") pod \"034d0ad2-ef35-4499-97bf-22bb087bda19\" (UID: \"034d0ad2-ef35-4499-97bf-22bb087bda19\") " Jan 20 17:12:36 crc kubenswrapper[4558]: I0120 17:12:36.314006 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ptxt2\" (UniqueName: \"kubernetes.io/projected/dc21aa2e-3cb0-43c7-b1d9-650741794867-kube-api-access-ptxt2\") pod \"dc21aa2e-3cb0-43c7-b1d9-650741794867\" (UID: \"dc21aa2e-3cb0-43c7-b1d9-650741794867\") " Jan 20 17:12:36 crc kubenswrapper[4558]: I0120 17:12:36.314183 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/034d0ad2-ef35-4499-97bf-22bb087bda19-operator-scripts\") pod \"034d0ad2-ef35-4499-97bf-22bb087bda19\" (UID: \"034d0ad2-ef35-4499-97bf-22bb087bda19\") " Jan 20 17:12:36 crc kubenswrapper[4558]: I0120 17:12:36.314440 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dc21aa2e-3cb0-43c7-b1d9-650741794867-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "dc21aa2e-3cb0-43c7-b1d9-650741794867" (UID: "dc21aa2e-3cb0-43c7-b1d9-650741794867"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:12:36 crc kubenswrapper[4558]: I0120 17:12:36.314761 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/034d0ad2-ef35-4499-97bf-22bb087bda19-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "034d0ad2-ef35-4499-97bf-22bb087bda19" (UID: "034d0ad2-ef35-4499-97bf-22bb087bda19"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:12:36 crc kubenswrapper[4558]: I0120 17:12:36.314970 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/dc21aa2e-3cb0-43c7-b1d9-650741794867-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:12:36 crc kubenswrapper[4558]: I0120 17:12:36.314987 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/034d0ad2-ef35-4499-97bf-22bb087bda19-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:12:36 crc kubenswrapper[4558]: I0120 17:12:36.318306 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/034d0ad2-ef35-4499-97bf-22bb087bda19-kube-api-access-ft5bd" (OuterVolumeSpecName: "kube-api-access-ft5bd") pod "034d0ad2-ef35-4499-97bf-22bb087bda19" (UID: "034d0ad2-ef35-4499-97bf-22bb087bda19"). InnerVolumeSpecName "kube-api-access-ft5bd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:12:36 crc kubenswrapper[4558]: I0120 17:12:36.318916 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dc21aa2e-3cb0-43c7-b1d9-650741794867-kube-api-access-ptxt2" (OuterVolumeSpecName: "kube-api-access-ptxt2") pod "dc21aa2e-3cb0-43c7-b1d9-650741794867" (UID: "dc21aa2e-3cb0-43c7-b1d9-650741794867"). InnerVolumeSpecName "kube-api-access-ptxt2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:12:36 crc kubenswrapper[4558]: I0120 17:12:36.416305 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ft5bd\" (UniqueName: \"kubernetes.io/projected/034d0ad2-ef35-4499-97bf-22bb087bda19-kube-api-access-ft5bd\") on node \"crc\" DevicePath \"\"" Jan 20 17:12:36 crc kubenswrapper[4558]: I0120 17:12:36.416336 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ptxt2\" (UniqueName: \"kubernetes.io/projected/dc21aa2e-3cb0-43c7-b1d9-650741794867-kube-api-access-ptxt2\") on node \"crc\" DevicePath \"\"" Jan 20 17:12:36 crc kubenswrapper[4558]: I0120 17:12:36.839465 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/watcher-be54-account-create-update-2fkrk" event={"ID":"034d0ad2-ef35-4499-97bf-22bb087bda19","Type":"ContainerDied","Data":"cdee0c1d72e0534a129fe7ec54ddee9701c209fc28fbcb14b82cff80ada0ef2f"} Jan 20 17:12:36 crc kubenswrapper[4558]: I0120 17:12:36.839494 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/watcher-be54-account-create-update-2fkrk" Jan 20 17:12:36 crc kubenswrapper[4558]: I0120 17:12:36.839516 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cdee0c1d72e0534a129fe7ec54ddee9701c209fc28fbcb14b82cff80ada0ef2f" Jan 20 17:12:36 crc kubenswrapper[4558]: I0120 17:12:36.843658 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/watcher-db-create-4k5sd" event={"ID":"dc21aa2e-3cb0-43c7-b1d9-650741794867","Type":"ContainerDied","Data":"399bcc6a5f7bb1f040210f804c2c33053680b7b0e770e0cc53e636c288980b1d"} Jan 20 17:12:36 crc kubenswrapper[4558]: I0120 17:12:36.843696 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="399bcc6a5f7bb1f040210f804c2c33053680b7b0e770e0cc53e636c288980b1d" Jan 20 17:12:36 crc kubenswrapper[4558]: I0120 17:12:36.843697 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/watcher-db-create-4k5sd" Jan 20 17:12:39 crc kubenswrapper[4558]: I0120 17:12:39.631904 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/root-account-create-update-9jhcc"] Jan 20 17:12:39 crc kubenswrapper[4558]: E0120 17:12:39.632528 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c7d7d3af-d2d4-404c-ae30-a8802ea8709a" containerName="swift-ring-rebalance" Jan 20 17:12:39 crc kubenswrapper[4558]: I0120 17:12:39.632544 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="c7d7d3af-d2d4-404c-ae30-a8802ea8709a" containerName="swift-ring-rebalance" Jan 20 17:12:39 crc kubenswrapper[4558]: E0120 17:12:39.632577 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="034d0ad2-ef35-4499-97bf-22bb087bda19" containerName="mariadb-account-create-update" Jan 20 17:12:39 crc kubenswrapper[4558]: I0120 17:12:39.632583 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="034d0ad2-ef35-4499-97bf-22bb087bda19" containerName="mariadb-account-create-update" Jan 20 17:12:39 crc kubenswrapper[4558]: E0120 17:12:39.632604 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc21aa2e-3cb0-43c7-b1d9-650741794867" containerName="mariadb-database-create" Jan 20 17:12:39 crc kubenswrapper[4558]: I0120 17:12:39.632611 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc21aa2e-3cb0-43c7-b1d9-650741794867" containerName="mariadb-database-create" Jan 20 17:12:39 crc kubenswrapper[4558]: I0120 17:12:39.632780 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="034d0ad2-ef35-4499-97bf-22bb087bda19" containerName="mariadb-account-create-update" Jan 20 17:12:39 crc kubenswrapper[4558]: I0120 17:12:39.632791 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="dc21aa2e-3cb0-43c7-b1d9-650741794867" containerName="mariadb-database-create" Jan 20 17:12:39 crc kubenswrapper[4558]: I0120 17:12:39.632799 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="c7d7d3af-d2d4-404c-ae30-a8802ea8709a" containerName="swift-ring-rebalance" Jan 20 17:12:39 crc kubenswrapper[4558]: I0120 17:12:39.633381 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-9jhcc" Jan 20 17:12:39 crc kubenswrapper[4558]: I0120 17:12:39.635246 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"openstack-mariadb-root-db-secret" Jan 20 17:12:39 crc kubenswrapper[4558]: I0120 17:12:39.649340 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-9jhcc"] Jan 20 17:12:39 crc kubenswrapper[4558]: I0120 17:12:39.773650 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3b9f240f-b49c-4184-b5f7-6cc6fa26dc13-operator-scripts\") pod \"root-account-create-update-9jhcc\" (UID: \"3b9f240f-b49c-4184-b5f7-6cc6fa26dc13\") " pod="openstack-kuttl-tests/root-account-create-update-9jhcc" Jan 20 17:12:39 crc kubenswrapper[4558]: I0120 17:12:39.773724 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k4ndf\" (UniqueName: \"kubernetes.io/projected/3b9f240f-b49c-4184-b5f7-6cc6fa26dc13-kube-api-access-k4ndf\") pod \"root-account-create-update-9jhcc\" (UID: \"3b9f240f-b49c-4184-b5f7-6cc6fa26dc13\") " pod="openstack-kuttl-tests/root-account-create-update-9jhcc" Jan 20 17:12:39 crc kubenswrapper[4558]: I0120 17:12:39.876223 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3b9f240f-b49c-4184-b5f7-6cc6fa26dc13-operator-scripts\") pod \"root-account-create-update-9jhcc\" (UID: \"3b9f240f-b49c-4184-b5f7-6cc6fa26dc13\") " pod="openstack-kuttl-tests/root-account-create-update-9jhcc" Jan 20 17:12:39 crc kubenswrapper[4558]: I0120 17:12:39.876320 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k4ndf\" (UniqueName: \"kubernetes.io/projected/3b9f240f-b49c-4184-b5f7-6cc6fa26dc13-kube-api-access-k4ndf\") pod \"root-account-create-update-9jhcc\" (UID: \"3b9f240f-b49c-4184-b5f7-6cc6fa26dc13\") " pod="openstack-kuttl-tests/root-account-create-update-9jhcc" Jan 20 17:12:39 crc kubenswrapper[4558]: I0120 17:12:39.878605 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3b9f240f-b49c-4184-b5f7-6cc6fa26dc13-operator-scripts\") pod \"root-account-create-update-9jhcc\" (UID: \"3b9f240f-b49c-4184-b5f7-6cc6fa26dc13\") " pod="openstack-kuttl-tests/root-account-create-update-9jhcc" Jan 20 17:12:39 crc kubenswrapper[4558]: I0120 17:12:39.894607 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k4ndf\" (UniqueName: \"kubernetes.io/projected/3b9f240f-b49c-4184-b5f7-6cc6fa26dc13-kube-api-access-k4ndf\") pod \"root-account-create-update-9jhcc\" (UID: \"3b9f240f-b49c-4184-b5f7-6cc6fa26dc13\") " pod="openstack-kuttl-tests/root-account-create-update-9jhcc" Jan 20 17:12:39 crc kubenswrapper[4558]: I0120 17:12:39.948552 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-9jhcc" Jan 20 17:12:40 crc kubenswrapper[4558]: I0120 17:12:40.284544 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5-etc-swift\") pod \"swift-storage-0\" (UID: \"16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:12:40 crc kubenswrapper[4558]: I0120 17:12:40.294137 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5-etc-swift\") pod \"swift-storage-0\" (UID: \"16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:12:40 crc kubenswrapper[4558]: I0120 17:12:40.348169 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-9jhcc"] Jan 20 17:12:40 crc kubenswrapper[4558]: I0120 17:12:40.543865 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:12:40 crc kubenswrapper[4558]: I0120 17:12:40.890376 4558 generic.go:334] "Generic (PLEG): container finished" podID="b099ccf1-da7f-4e4d-80cf-629b21cd774c" containerID="daad0853cc1011ae35e95a9f79a7ec7c1017c2cddf3225d7409dec8cb1b97163" exitCode=0 Jan 20 17:12:40 crc kubenswrapper[4558]: I0120 17:12:40.890468 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/prometheus-metric-storage-0" event={"ID":"b099ccf1-da7f-4e4d-80cf-629b21cd774c","Type":"ContainerDied","Data":"daad0853cc1011ae35e95a9f79a7ec7c1017c2cddf3225d7409dec8cb1b97163"} Jan 20 17:12:40 crc kubenswrapper[4558]: I0120 17:12:40.893544 4558 generic.go:334] "Generic (PLEG): container finished" podID="13a0a262-6f56-4cf9-9b0d-85110bcff1e7" containerID="f2c8e0e78787bca0b6f637ec67af7122ba24894258eb3e16a2aa600b8bbdc042" exitCode=0 Jan 20 17:12:40 crc kubenswrapper[4558]: I0120 17:12:40.893581 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/alertmanager-metric-storage-0" event={"ID":"13a0a262-6f56-4cf9-9b0d-85110bcff1e7","Type":"ContainerDied","Data":"f2c8e0e78787bca0b6f637ec67af7122ba24894258eb3e16a2aa600b8bbdc042"} Jan 20 17:12:40 crc kubenswrapper[4558]: I0120 17:12:40.897695 4558 generic.go:334] "Generic (PLEG): container finished" podID="3b9f240f-b49c-4184-b5f7-6cc6fa26dc13" containerID="6de11959c90a5cfbeca69efc88d3c21904e93473ebaeb4c8af59fc4d0cb196b9" exitCode=0 Jan 20 17:12:40 crc kubenswrapper[4558]: I0120 17:12:40.897731 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/root-account-create-update-9jhcc" event={"ID":"3b9f240f-b49c-4184-b5f7-6cc6fa26dc13","Type":"ContainerDied","Data":"6de11959c90a5cfbeca69efc88d3c21904e93473ebaeb4c8af59fc4d0cb196b9"} Jan 20 17:12:40 crc kubenswrapper[4558]: I0120 17:12:40.897756 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/root-account-create-update-9jhcc" event={"ID":"3b9f240f-b49c-4184-b5f7-6cc6fa26dc13","Type":"ContainerStarted","Data":"0b08e3bc9ad948aee56fac35a70b321858bc2c5174badde945cdc2fb5d50b8ab"} Jan 20 17:12:40 crc kubenswrapper[4558]: I0120 17:12:40.962382 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/swift-storage-0"] Jan 20 17:12:40 crc kubenswrapper[4558]: I0120 17:12:40.981434 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/keystone-db-create-zcw7r"] Jan 20 17:12:40 crc kubenswrapper[4558]: I0120 17:12:40.982416 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-db-create-zcw7r" Jan 20 17:12:40 crc kubenswrapper[4558]: I0120 17:12:40.994947 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-db-create-zcw7r"] Jan 20 17:12:41 crc kubenswrapper[4558]: I0120 17:12:41.093980 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/keystone-3f8b-account-create-update-8lrlw"] Jan 20 17:12:41 crc kubenswrapper[4558]: I0120 17:12:41.095805 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-3f8b-account-create-update-8lrlw" Jan 20 17:12:41 crc kubenswrapper[4558]: I0120 17:12:41.098414 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-db-secret" Jan 20 17:12:41 crc kubenswrapper[4558]: I0120 17:12:41.100128 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-62vkc\" (UniqueName: \"kubernetes.io/projected/19216a3e-4417-45d8-a039-731d0bc938ca-kube-api-access-62vkc\") pod \"keystone-db-create-zcw7r\" (UID: \"19216a3e-4417-45d8-a039-731d0bc938ca\") " pod="openstack-kuttl-tests/keystone-db-create-zcw7r" Jan 20 17:12:41 crc kubenswrapper[4558]: I0120 17:12:41.100224 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/19216a3e-4417-45d8-a039-731d0bc938ca-operator-scripts\") pod \"keystone-db-create-zcw7r\" (UID: \"19216a3e-4417-45d8-a039-731d0bc938ca\") " pod="openstack-kuttl-tests/keystone-db-create-zcw7r" Jan 20 17:12:41 crc kubenswrapper[4558]: I0120 17:12:41.111475 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-3f8b-account-create-update-8lrlw"] Jan 20 17:12:41 crc kubenswrapper[4558]: I0120 17:12:41.205071 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/19216a3e-4417-45d8-a039-731d0bc938ca-operator-scripts\") pod \"keystone-db-create-zcw7r\" (UID: \"19216a3e-4417-45d8-a039-731d0bc938ca\") " pod="openstack-kuttl-tests/keystone-db-create-zcw7r" Jan 20 17:12:41 crc kubenswrapper[4558]: I0120 17:12:41.205433 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kms7g\" (UniqueName: \"kubernetes.io/projected/5cb4c24d-6741-4ee1-a1d5-c7d994499296-kube-api-access-kms7g\") pod \"keystone-3f8b-account-create-update-8lrlw\" (UID: \"5cb4c24d-6741-4ee1-a1d5-c7d994499296\") " pod="openstack-kuttl-tests/keystone-3f8b-account-create-update-8lrlw" Jan 20 17:12:41 crc kubenswrapper[4558]: I0120 17:12:41.205662 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-62vkc\" (UniqueName: \"kubernetes.io/projected/19216a3e-4417-45d8-a039-731d0bc938ca-kube-api-access-62vkc\") pod \"keystone-db-create-zcw7r\" (UID: \"19216a3e-4417-45d8-a039-731d0bc938ca\") " pod="openstack-kuttl-tests/keystone-db-create-zcw7r" Jan 20 17:12:41 crc kubenswrapper[4558]: I0120 17:12:41.205856 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5cb4c24d-6741-4ee1-a1d5-c7d994499296-operator-scripts\") pod \"keystone-3f8b-account-create-update-8lrlw\" (UID: \"5cb4c24d-6741-4ee1-a1d5-c7d994499296\") " pod="openstack-kuttl-tests/keystone-3f8b-account-create-update-8lrlw" Jan 20 17:12:41 crc kubenswrapper[4558]: I0120 17:12:41.205907 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/19216a3e-4417-45d8-a039-731d0bc938ca-operator-scripts\") pod \"keystone-db-create-zcw7r\" (UID: \"19216a3e-4417-45d8-a039-731d0bc938ca\") " pod="openstack-kuttl-tests/keystone-db-create-zcw7r" Jan 20 17:12:41 crc kubenswrapper[4558]: I0120 17:12:41.228662 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-62vkc\" (UniqueName: \"kubernetes.io/projected/19216a3e-4417-45d8-a039-731d0bc938ca-kube-api-access-62vkc\") pod \"keystone-db-create-zcw7r\" (UID: \"19216a3e-4417-45d8-a039-731d0bc938ca\") " pod="openstack-kuttl-tests/keystone-db-create-zcw7r" Jan 20 17:12:41 crc kubenswrapper[4558]: I0120 17:12:41.295796 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/placement-db-create-8nrk4"] Jan 20 17:12:41 crc kubenswrapper[4558]: I0120 17:12:41.299330 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-db-create-8nrk4" Jan 20 17:12:41 crc kubenswrapper[4558]: I0120 17:12:41.303441 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-db-create-zcw7r" Jan 20 17:12:41 crc kubenswrapper[4558]: I0120 17:12:41.309418 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5cb4c24d-6741-4ee1-a1d5-c7d994499296-operator-scripts\") pod \"keystone-3f8b-account-create-update-8lrlw\" (UID: \"5cb4c24d-6741-4ee1-a1d5-c7d994499296\") " pod="openstack-kuttl-tests/keystone-3f8b-account-create-update-8lrlw" Jan 20 17:12:41 crc kubenswrapper[4558]: I0120 17:12:41.309814 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kms7g\" (UniqueName: \"kubernetes.io/projected/5cb4c24d-6741-4ee1-a1d5-c7d994499296-kube-api-access-kms7g\") pod \"keystone-3f8b-account-create-update-8lrlw\" (UID: \"5cb4c24d-6741-4ee1-a1d5-c7d994499296\") " pod="openstack-kuttl-tests/keystone-3f8b-account-create-update-8lrlw" Jan 20 17:12:41 crc kubenswrapper[4558]: I0120 17:12:41.313416 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5cb4c24d-6741-4ee1-a1d5-c7d994499296-operator-scripts\") pod \"keystone-3f8b-account-create-update-8lrlw\" (UID: \"5cb4c24d-6741-4ee1-a1d5-c7d994499296\") " pod="openstack-kuttl-tests/keystone-3f8b-account-create-update-8lrlw" Jan 20 17:12:41 crc kubenswrapper[4558]: I0120 17:12:41.334112 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-db-create-8nrk4"] Jan 20 17:12:41 crc kubenswrapper[4558]: I0120 17:12:41.349462 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kms7g\" (UniqueName: \"kubernetes.io/projected/5cb4c24d-6741-4ee1-a1d5-c7d994499296-kube-api-access-kms7g\") pod \"keystone-3f8b-account-create-update-8lrlw\" (UID: \"5cb4c24d-6741-4ee1-a1d5-c7d994499296\") " pod="openstack-kuttl-tests/keystone-3f8b-account-create-update-8lrlw" Jan 20 17:12:41 crc kubenswrapper[4558]: I0120 17:12:41.404176 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/placement-e79a-account-create-update-b7lf6"] Jan 20 17:12:41 crc kubenswrapper[4558]: I0120 17:12:41.405347 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-e79a-account-create-update-b7lf6" Jan 20 17:12:41 crc kubenswrapper[4558]: I0120 17:12:41.409047 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"placement-db-secret" Jan 20 17:12:41 crc kubenswrapper[4558]: I0120 17:12:41.411563 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ndwwz\" (UniqueName: \"kubernetes.io/projected/3ff7ded6-4fd1-42cc-9999-eea7e800be38-kube-api-access-ndwwz\") pod \"placement-db-create-8nrk4\" (UID: \"3ff7ded6-4fd1-42cc-9999-eea7e800be38\") " pod="openstack-kuttl-tests/placement-db-create-8nrk4" Jan 20 17:12:41 crc kubenswrapper[4558]: I0120 17:12:41.411667 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3ff7ded6-4fd1-42cc-9999-eea7e800be38-operator-scripts\") pod \"placement-db-create-8nrk4\" (UID: \"3ff7ded6-4fd1-42cc-9999-eea7e800be38\") " pod="openstack-kuttl-tests/placement-db-create-8nrk4" Jan 20 17:12:41 crc kubenswrapper[4558]: I0120 17:12:41.417976 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-e79a-account-create-update-b7lf6"] Jan 20 17:12:41 crc kubenswrapper[4558]: I0120 17:12:41.430280 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-3f8b-account-create-update-8lrlw" Jan 20 17:12:41 crc kubenswrapper[4558]: I0120 17:12:41.493329 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/glance-db-create-4tdmg"] Jan 20 17:12:41 crc kubenswrapper[4558]: I0120 17:12:41.494392 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-db-create-4tdmg" Jan 20 17:12:41 crc kubenswrapper[4558]: I0120 17:12:41.502000 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-db-create-4tdmg"] Jan 20 17:12:41 crc kubenswrapper[4558]: I0120 17:12:41.513418 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vwpwv\" (UniqueName: \"kubernetes.io/projected/26dff2aa-11c9-494a-bf96-9ed83afd07b6-kube-api-access-vwpwv\") pod \"placement-e79a-account-create-update-b7lf6\" (UID: \"26dff2aa-11c9-494a-bf96-9ed83afd07b6\") " pod="openstack-kuttl-tests/placement-e79a-account-create-update-b7lf6" Jan 20 17:12:41 crc kubenswrapper[4558]: I0120 17:12:41.513542 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/26dff2aa-11c9-494a-bf96-9ed83afd07b6-operator-scripts\") pod \"placement-e79a-account-create-update-b7lf6\" (UID: \"26dff2aa-11c9-494a-bf96-9ed83afd07b6\") " pod="openstack-kuttl-tests/placement-e79a-account-create-update-b7lf6" Jan 20 17:12:41 crc kubenswrapper[4558]: I0120 17:12:41.513600 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ndwwz\" (UniqueName: \"kubernetes.io/projected/3ff7ded6-4fd1-42cc-9999-eea7e800be38-kube-api-access-ndwwz\") pod \"placement-db-create-8nrk4\" (UID: \"3ff7ded6-4fd1-42cc-9999-eea7e800be38\") " pod="openstack-kuttl-tests/placement-db-create-8nrk4" Jan 20 17:12:41 crc kubenswrapper[4558]: I0120 17:12:41.513675 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3ff7ded6-4fd1-42cc-9999-eea7e800be38-operator-scripts\") pod \"placement-db-create-8nrk4\" (UID: \"3ff7ded6-4fd1-42cc-9999-eea7e800be38\") " pod="openstack-kuttl-tests/placement-db-create-8nrk4" Jan 20 17:12:41 crc kubenswrapper[4558]: I0120 17:12:41.514549 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3ff7ded6-4fd1-42cc-9999-eea7e800be38-operator-scripts\") pod \"placement-db-create-8nrk4\" (UID: \"3ff7ded6-4fd1-42cc-9999-eea7e800be38\") " pod="openstack-kuttl-tests/placement-db-create-8nrk4" Jan 20 17:12:41 crc kubenswrapper[4558]: I0120 17:12:41.531368 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ndwwz\" (UniqueName: \"kubernetes.io/projected/3ff7ded6-4fd1-42cc-9999-eea7e800be38-kube-api-access-ndwwz\") pod \"placement-db-create-8nrk4\" (UID: \"3ff7ded6-4fd1-42cc-9999-eea7e800be38\") " pod="openstack-kuttl-tests/placement-db-create-8nrk4" Jan 20 17:12:41 crc kubenswrapper[4558]: I0120 17:12:41.607059 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/glance-d429-account-create-update-4qp4q"] Jan 20 17:12:41 crc kubenswrapper[4558]: I0120 17:12:41.610038 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-d429-account-create-update-4qp4q" Jan 20 17:12:41 crc kubenswrapper[4558]: I0120 17:12:41.611881 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-db-secret" Jan 20 17:12:41 crc kubenswrapper[4558]: I0120 17:12:41.615357 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vwpwv\" (UniqueName: \"kubernetes.io/projected/26dff2aa-11c9-494a-bf96-9ed83afd07b6-kube-api-access-vwpwv\") pod \"placement-e79a-account-create-update-b7lf6\" (UID: \"26dff2aa-11c9-494a-bf96-9ed83afd07b6\") " pod="openstack-kuttl-tests/placement-e79a-account-create-update-b7lf6" Jan 20 17:12:41 crc kubenswrapper[4558]: I0120 17:12:41.615459 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6cgf8\" (UniqueName: \"kubernetes.io/projected/170b963c-2b54-4497-82b5-03ac18e07ffe-kube-api-access-6cgf8\") pod \"glance-db-create-4tdmg\" (UID: \"170b963c-2b54-4497-82b5-03ac18e07ffe\") " pod="openstack-kuttl-tests/glance-db-create-4tdmg" Jan 20 17:12:41 crc kubenswrapper[4558]: I0120 17:12:41.615498 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/170b963c-2b54-4497-82b5-03ac18e07ffe-operator-scripts\") pod \"glance-db-create-4tdmg\" (UID: \"170b963c-2b54-4497-82b5-03ac18e07ffe\") " pod="openstack-kuttl-tests/glance-db-create-4tdmg" Jan 20 17:12:41 crc kubenswrapper[4558]: I0120 17:12:41.615552 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/26dff2aa-11c9-494a-bf96-9ed83afd07b6-operator-scripts\") pod \"placement-e79a-account-create-update-b7lf6\" (UID: \"26dff2aa-11c9-494a-bf96-9ed83afd07b6\") " pod="openstack-kuttl-tests/placement-e79a-account-create-update-b7lf6" Jan 20 17:12:41 crc kubenswrapper[4558]: I0120 17:12:41.616752 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/26dff2aa-11c9-494a-bf96-9ed83afd07b6-operator-scripts\") pod \"placement-e79a-account-create-update-b7lf6\" (UID: \"26dff2aa-11c9-494a-bf96-9ed83afd07b6\") " pod="openstack-kuttl-tests/placement-e79a-account-create-update-b7lf6" Jan 20 17:12:41 crc kubenswrapper[4558]: I0120 17:12:41.617711 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-d429-account-create-update-4qp4q"] Jan 20 17:12:41 crc kubenswrapper[4558]: I0120 17:12:41.631676 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vwpwv\" (UniqueName: \"kubernetes.io/projected/26dff2aa-11c9-494a-bf96-9ed83afd07b6-kube-api-access-vwpwv\") pod \"placement-e79a-account-create-update-b7lf6\" (UID: \"26dff2aa-11c9-494a-bf96-9ed83afd07b6\") " pod="openstack-kuttl-tests/placement-e79a-account-create-update-b7lf6" Jan 20 17:12:41 crc kubenswrapper[4558]: I0120 17:12:41.658563 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-db-create-8nrk4" Jan 20 17:12:41 crc kubenswrapper[4558]: I0120 17:12:41.706073 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-db-create-zcw7r"] Jan 20 17:12:41 crc kubenswrapper[4558]: I0120 17:12:41.717222 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/581b4670-ceb9-4190-be1d-2ca27c49b472-operator-scripts\") pod \"glance-d429-account-create-update-4qp4q\" (UID: \"581b4670-ceb9-4190-be1d-2ca27c49b472\") " pod="openstack-kuttl-tests/glance-d429-account-create-update-4qp4q" Jan 20 17:12:41 crc kubenswrapper[4558]: I0120 17:12:41.717421 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6cgf8\" (UniqueName: \"kubernetes.io/projected/170b963c-2b54-4497-82b5-03ac18e07ffe-kube-api-access-6cgf8\") pod \"glance-db-create-4tdmg\" (UID: \"170b963c-2b54-4497-82b5-03ac18e07ffe\") " pod="openstack-kuttl-tests/glance-db-create-4tdmg" Jan 20 17:12:41 crc kubenswrapper[4558]: I0120 17:12:41.717471 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/170b963c-2b54-4497-82b5-03ac18e07ffe-operator-scripts\") pod \"glance-db-create-4tdmg\" (UID: \"170b963c-2b54-4497-82b5-03ac18e07ffe\") " pod="openstack-kuttl-tests/glance-db-create-4tdmg" Jan 20 17:12:41 crc kubenswrapper[4558]: I0120 17:12:41.717569 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fxdwh\" (UniqueName: \"kubernetes.io/projected/581b4670-ceb9-4190-be1d-2ca27c49b472-kube-api-access-fxdwh\") pod \"glance-d429-account-create-update-4qp4q\" (UID: \"581b4670-ceb9-4190-be1d-2ca27c49b472\") " pod="openstack-kuttl-tests/glance-d429-account-create-update-4qp4q" Jan 20 17:12:41 crc kubenswrapper[4558]: I0120 17:12:41.718508 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/170b963c-2b54-4497-82b5-03ac18e07ffe-operator-scripts\") pod \"glance-db-create-4tdmg\" (UID: \"170b963c-2b54-4497-82b5-03ac18e07ffe\") " pod="openstack-kuttl-tests/glance-db-create-4tdmg" Jan 20 17:12:41 crc kubenswrapper[4558]: I0120 17:12:41.720585 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-e79a-account-create-update-b7lf6" Jan 20 17:12:41 crc kubenswrapper[4558]: W0120 17:12:41.733271 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod19216a3e_4417_45d8_a039_731d0bc938ca.slice/crio-742ca9fd9595273a902d0b476797ada3f6a5b0f41ffd71e77205ee027f232665 WatchSource:0}: Error finding container 742ca9fd9595273a902d0b476797ada3f6a5b0f41ffd71e77205ee027f232665: Status 404 returned error can't find the container with id 742ca9fd9595273a902d0b476797ada3f6a5b0f41ffd71e77205ee027f232665 Jan 20 17:12:41 crc kubenswrapper[4558]: I0120 17:12:41.737812 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6cgf8\" (UniqueName: \"kubernetes.io/projected/170b963c-2b54-4497-82b5-03ac18e07ffe-kube-api-access-6cgf8\") pod \"glance-db-create-4tdmg\" (UID: \"170b963c-2b54-4497-82b5-03ac18e07ffe\") " pod="openstack-kuttl-tests/glance-db-create-4tdmg" Jan 20 17:12:41 crc kubenswrapper[4558]: I0120 17:12:41.825368 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/581b4670-ceb9-4190-be1d-2ca27c49b472-operator-scripts\") pod \"glance-d429-account-create-update-4qp4q\" (UID: \"581b4670-ceb9-4190-be1d-2ca27c49b472\") " pod="openstack-kuttl-tests/glance-d429-account-create-update-4qp4q" Jan 20 17:12:41 crc kubenswrapper[4558]: I0120 17:12:41.825543 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fxdwh\" (UniqueName: \"kubernetes.io/projected/581b4670-ceb9-4190-be1d-2ca27c49b472-kube-api-access-fxdwh\") pod \"glance-d429-account-create-update-4qp4q\" (UID: \"581b4670-ceb9-4190-be1d-2ca27c49b472\") " pod="openstack-kuttl-tests/glance-d429-account-create-update-4qp4q" Jan 20 17:12:41 crc kubenswrapper[4558]: I0120 17:12:41.826526 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/581b4670-ceb9-4190-be1d-2ca27c49b472-operator-scripts\") pod \"glance-d429-account-create-update-4qp4q\" (UID: \"581b4670-ceb9-4190-be1d-2ca27c49b472\") " pod="openstack-kuttl-tests/glance-d429-account-create-update-4qp4q" Jan 20 17:12:41 crc kubenswrapper[4558]: I0120 17:12:41.827658 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-db-create-4tdmg" Jan 20 17:12:41 crc kubenswrapper[4558]: I0120 17:12:41.848298 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fxdwh\" (UniqueName: \"kubernetes.io/projected/581b4670-ceb9-4190-be1d-2ca27c49b472-kube-api-access-fxdwh\") pod \"glance-d429-account-create-update-4qp4q\" (UID: \"581b4670-ceb9-4190-be1d-2ca27c49b472\") " pod="openstack-kuttl-tests/glance-d429-account-create-update-4qp4q" Jan 20 17:12:41 crc kubenswrapper[4558]: I0120 17:12:41.867901 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-3f8b-account-create-update-8lrlw"] Jan 20 17:12:41 crc kubenswrapper[4558]: W0120 17:12:41.894948 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5cb4c24d_6741_4ee1_a1d5_c7d994499296.slice/crio-8353d8ec7a6c83d790e8254f11c91ca14d6f7eb13ca2ca26d41b7eba63730254 WatchSource:0}: Error finding container 8353d8ec7a6c83d790e8254f11c91ca14d6f7eb13ca2ca26d41b7eba63730254: Status 404 returned error can't find the container with id 8353d8ec7a6c83d790e8254f11c91ca14d6f7eb13ca2ca26d41b7eba63730254 Jan 20 17:12:41 crc kubenswrapper[4558]: I0120 17:12:41.932667 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-d429-account-create-update-4qp4q" Jan 20 17:12:41 crc kubenswrapper[4558]: I0120 17:12:41.956406 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-3f8b-account-create-update-8lrlw" event={"ID":"5cb4c24d-6741-4ee1-a1d5-c7d994499296","Type":"ContainerStarted","Data":"8353d8ec7a6c83d790e8254f11c91ca14d6f7eb13ca2ca26d41b7eba63730254"} Jan 20 17:12:41 crc kubenswrapper[4558]: I0120 17:12:41.959103 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-db-create-zcw7r" event={"ID":"19216a3e-4417-45d8-a039-731d0bc938ca","Type":"ContainerStarted","Data":"742ca9fd9595273a902d0b476797ada3f6a5b0f41ffd71e77205ee027f232665"} Jan 20 17:12:41 crc kubenswrapper[4558]: I0120 17:12:41.961576 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5","Type":"ContainerStarted","Data":"3042c88a8b0acc604598545abaf566b25ac21482df62b0a3a116f13f8566a2af"} Jan 20 17:12:41 crc kubenswrapper[4558]: I0120 17:12:41.961597 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5","Type":"ContainerStarted","Data":"a3d60956ff7af8b2370d1374fe03e956eb5d0ac246c55b421bb4276908799a3f"} Jan 20 17:12:41 crc kubenswrapper[4558]: I0120 17:12:41.961608 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5","Type":"ContainerStarted","Data":"287de52d7a1cbdfd30b38ebc5ce6836c6e7462c638fb7b5cb4514ff2fd994ca5"} Jan 20 17:12:41 crc kubenswrapper[4558]: I0120 17:12:41.961617 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5","Type":"ContainerStarted","Data":"d34ab3fc8aadd821e92e2bf92349870b0e59665705b0c279da24491935dbfa40"} Jan 20 17:12:41 crc kubenswrapper[4558]: I0120 17:12:41.961626 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5","Type":"ContainerStarted","Data":"1cd0a2990c13e48d1af8da127be8da6b524c7be98b42a509147ce0af41f1c26c"} Jan 20 17:12:42 crc kubenswrapper[4558]: I0120 17:12:42.115113 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-db-create-8nrk4"] Jan 20 17:12:42 crc kubenswrapper[4558]: I0120 17:12:42.293663 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-e79a-account-create-update-b7lf6"] Jan 20 17:12:42 crc kubenswrapper[4558]: W0120 17:12:42.309969 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod26dff2aa_11c9_494a_bf96_9ed83afd07b6.slice/crio-d384fb5439712f60b68a9b5c4f4c49cc9769ecf2e12b7f41af032df93e5be467 WatchSource:0}: Error finding container d384fb5439712f60b68a9b5c4f4c49cc9769ecf2e12b7f41af032df93e5be467: Status 404 returned error can't find the container with id d384fb5439712f60b68a9b5c4f4c49cc9769ecf2e12b7f41af032df93e5be467 Jan 20 17:12:42 crc kubenswrapper[4558]: I0120 17:12:42.430193 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-db-create-4tdmg"] Jan 20 17:12:42 crc kubenswrapper[4558]: I0120 17:12:42.444055 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-9jhcc" Jan 20 17:12:42 crc kubenswrapper[4558]: I0120 17:12:42.500577 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-d429-account-create-update-4qp4q"] Jan 20 17:12:42 crc kubenswrapper[4558]: W0120 17:12:42.511267 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod581b4670_ceb9_4190_be1d_2ca27c49b472.slice/crio-885e0d900baf337f658e5953b8af358f2049e4a95d46140ee876dee97a18f979 WatchSource:0}: Error finding container 885e0d900baf337f658e5953b8af358f2049e4a95d46140ee876dee97a18f979: Status 404 returned error can't find the container with id 885e0d900baf337f658e5953b8af358f2049e4a95d46140ee876dee97a18f979 Jan 20 17:12:42 crc kubenswrapper[4558]: I0120 17:12:42.539274 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3b9f240f-b49c-4184-b5f7-6cc6fa26dc13-operator-scripts\") pod \"3b9f240f-b49c-4184-b5f7-6cc6fa26dc13\" (UID: \"3b9f240f-b49c-4184-b5f7-6cc6fa26dc13\") " Jan 20 17:12:42 crc kubenswrapper[4558]: I0120 17:12:42.539447 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k4ndf\" (UniqueName: \"kubernetes.io/projected/3b9f240f-b49c-4184-b5f7-6cc6fa26dc13-kube-api-access-k4ndf\") pod \"3b9f240f-b49c-4184-b5f7-6cc6fa26dc13\" (UID: \"3b9f240f-b49c-4184-b5f7-6cc6fa26dc13\") " Jan 20 17:12:42 crc kubenswrapper[4558]: I0120 17:12:42.540682 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3b9f240f-b49c-4184-b5f7-6cc6fa26dc13-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "3b9f240f-b49c-4184-b5f7-6cc6fa26dc13" (UID: "3b9f240f-b49c-4184-b5f7-6cc6fa26dc13"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:12:42 crc kubenswrapper[4558]: I0120 17:12:42.549265 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3b9f240f-b49c-4184-b5f7-6cc6fa26dc13-kube-api-access-k4ndf" (OuterVolumeSpecName: "kube-api-access-k4ndf") pod "3b9f240f-b49c-4184-b5f7-6cc6fa26dc13" (UID: "3b9f240f-b49c-4184-b5f7-6cc6fa26dc13"). InnerVolumeSpecName "kube-api-access-k4ndf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:12:42 crc kubenswrapper[4558]: I0120 17:12:42.641755 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k4ndf\" (UniqueName: \"kubernetes.io/projected/3b9f240f-b49c-4184-b5f7-6cc6fa26dc13-kube-api-access-k4ndf\") on node \"crc\" DevicePath \"\"" Jan 20 17:12:42 crc kubenswrapper[4558]: I0120 17:12:42.642085 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3b9f240f-b49c-4184-b5f7-6cc6fa26dc13-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:12:42 crc kubenswrapper[4558]: I0120 17:12:42.975190 4558 generic.go:334] "Generic (PLEG): container finished" podID="581b4670-ceb9-4190-be1d-2ca27c49b472" containerID="6be9f1e75267073ffc4217f778b1284a97ccddc602c74c02869b9a557b447c2f" exitCode=0 Jan 20 17:12:42 crc kubenswrapper[4558]: I0120 17:12:42.975315 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-d429-account-create-update-4qp4q" event={"ID":"581b4670-ceb9-4190-be1d-2ca27c49b472","Type":"ContainerDied","Data":"6be9f1e75267073ffc4217f778b1284a97ccddc602c74c02869b9a557b447c2f"} Jan 20 17:12:42 crc kubenswrapper[4558]: I0120 17:12:42.975411 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-d429-account-create-update-4qp4q" event={"ID":"581b4670-ceb9-4190-be1d-2ca27c49b472","Type":"ContainerStarted","Data":"885e0d900baf337f658e5953b8af358f2049e4a95d46140ee876dee97a18f979"} Jan 20 17:12:42 crc kubenswrapper[4558]: I0120 17:12:42.978536 4558 generic.go:334] "Generic (PLEG): container finished" podID="5cb4c24d-6741-4ee1-a1d5-c7d994499296" containerID="e148c477809aecae4b26fafb6fa46c29c5861e22d0ce0d0c52f698f488c73169" exitCode=0 Jan 20 17:12:42 crc kubenswrapper[4558]: I0120 17:12:42.978646 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-3f8b-account-create-update-8lrlw" event={"ID":"5cb4c24d-6741-4ee1-a1d5-c7d994499296","Type":"ContainerDied","Data":"e148c477809aecae4b26fafb6fa46c29c5861e22d0ce0d0c52f698f488c73169"} Jan 20 17:12:42 crc kubenswrapper[4558]: I0120 17:12:42.981986 4558 generic.go:334] "Generic (PLEG): container finished" podID="19216a3e-4417-45d8-a039-731d0bc938ca" containerID="933310e106b177e33387ea977d5513b4b20d06f30928f4803750ca49dab87ab7" exitCode=0 Jan 20 17:12:42 crc kubenswrapper[4558]: I0120 17:12:42.982038 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-db-create-zcw7r" event={"ID":"19216a3e-4417-45d8-a039-731d0bc938ca","Type":"ContainerDied","Data":"933310e106b177e33387ea977d5513b4b20d06f30928f4803750ca49dab87ab7"} Jan 20 17:12:42 crc kubenswrapper[4558]: I0120 17:12:42.987126 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5","Type":"ContainerStarted","Data":"b448c6813dc21e1609badba9c414f16cfa90d4bc879f828fc03dc5e2d8c5c01d"} Jan 20 17:12:42 crc kubenswrapper[4558]: I0120 17:12:42.987184 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5","Type":"ContainerStarted","Data":"f4843ed0e8d1373bb2683da97ab873974d82c40ca96c66ac0e816208a3870873"} Jan 20 17:12:42 crc kubenswrapper[4558]: I0120 17:12:42.987196 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5","Type":"ContainerStarted","Data":"42f3e6080a5aaa7c6b331d0120550bd3df0ded5cc2d89acdcce42e051c81d082"} Jan 20 17:12:42 crc kubenswrapper[4558]: I0120 17:12:42.987205 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5","Type":"ContainerStarted","Data":"0264294b8782deaf18a7c21e66a3cd60ef8bc25c146077c5f8dd36370f4b9f31"} Jan 20 17:12:42 crc kubenswrapper[4558]: I0120 17:12:42.991352 4558 generic.go:334] "Generic (PLEG): container finished" podID="3ff7ded6-4fd1-42cc-9999-eea7e800be38" containerID="efad400cecbafa3e01864aab70dc1aa8234d0684805bf1a431ddc104e6872701" exitCode=0 Jan 20 17:12:42 crc kubenswrapper[4558]: I0120 17:12:42.991419 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-db-create-8nrk4" event={"ID":"3ff7ded6-4fd1-42cc-9999-eea7e800be38","Type":"ContainerDied","Data":"efad400cecbafa3e01864aab70dc1aa8234d0684805bf1a431ddc104e6872701"} Jan 20 17:12:42 crc kubenswrapper[4558]: I0120 17:12:42.991479 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-db-create-8nrk4" event={"ID":"3ff7ded6-4fd1-42cc-9999-eea7e800be38","Type":"ContainerStarted","Data":"cd3f0c187a93e4739593e406500bb408afb05b1120352552975cfd704b7bdaea"} Jan 20 17:12:42 crc kubenswrapper[4558]: I0120 17:12:42.995946 4558 generic.go:334] "Generic (PLEG): container finished" podID="26dff2aa-11c9-494a-bf96-9ed83afd07b6" containerID="bc15a614c0fd478376937dcd3b1eb44701ee9b454a54a11ed2ce7c51605d407a" exitCode=0 Jan 20 17:12:42 crc kubenswrapper[4558]: I0120 17:12:42.996008 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-e79a-account-create-update-b7lf6" event={"ID":"26dff2aa-11c9-494a-bf96-9ed83afd07b6","Type":"ContainerDied","Data":"bc15a614c0fd478376937dcd3b1eb44701ee9b454a54a11ed2ce7c51605d407a"} Jan 20 17:12:42 crc kubenswrapper[4558]: I0120 17:12:42.996027 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-e79a-account-create-update-b7lf6" event={"ID":"26dff2aa-11c9-494a-bf96-9ed83afd07b6","Type":"ContainerStarted","Data":"d384fb5439712f60b68a9b5c4f4c49cc9769ecf2e12b7f41af032df93e5be467"} Jan 20 17:12:42 crc kubenswrapper[4558]: I0120 17:12:42.997833 4558 generic.go:334] "Generic (PLEG): container finished" podID="170b963c-2b54-4497-82b5-03ac18e07ffe" containerID="2df608c682d3185fad4a76b76ebef175530127ff27ccf6e17867915e943b9657" exitCode=0 Jan 20 17:12:42 crc kubenswrapper[4558]: I0120 17:12:42.997993 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-db-create-4tdmg" event={"ID":"170b963c-2b54-4497-82b5-03ac18e07ffe","Type":"ContainerDied","Data":"2df608c682d3185fad4a76b76ebef175530127ff27ccf6e17867915e943b9657"} Jan 20 17:12:42 crc kubenswrapper[4558]: I0120 17:12:42.998016 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-db-create-4tdmg" event={"ID":"170b963c-2b54-4497-82b5-03ac18e07ffe","Type":"ContainerStarted","Data":"d39093c9d36859b62e5fa40e756e87d913c245bb0d16ad63d9d21629bd08122a"} Jan 20 17:12:43 crc kubenswrapper[4558]: I0120 17:12:43.002507 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/root-account-create-update-9jhcc" event={"ID":"3b9f240f-b49c-4184-b5f7-6cc6fa26dc13","Type":"ContainerDied","Data":"0b08e3bc9ad948aee56fac35a70b321858bc2c5174badde945cdc2fb5d50b8ab"} Jan 20 17:12:43 crc kubenswrapper[4558]: I0120 17:12:43.002535 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0b08e3bc9ad948aee56fac35a70b321858bc2c5174badde945cdc2fb5d50b8ab" Jan 20 17:12:43 crc kubenswrapper[4558]: I0120 17:12:43.002582 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-9jhcc" Jan 20 17:12:44 crc kubenswrapper[4558]: I0120 17:12:44.022676 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5","Type":"ContainerStarted","Data":"e9011c98afeb74bd61067467d261960e71d5f4ce02439018a751b87779a9a042"} Jan 20 17:12:44 crc kubenswrapper[4558]: I0120 17:12:44.022839 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5","Type":"ContainerStarted","Data":"0ffad205c7e0b9a3e7a3ed7b7494d0c934e33af468fd39ec2025c4bc685f9b2a"} Jan 20 17:12:44 crc kubenswrapper[4558]: I0120 17:12:44.022990 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5","Type":"ContainerStarted","Data":"e8487e2e852bfbcd349f6ab70c8f1c9163850c213b76cd2ed1af8305cfe8bbf6"} Jan 20 17:12:44 crc kubenswrapper[4558]: I0120 17:12:44.355094 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-d429-account-create-update-4qp4q" Jan 20 17:12:44 crc kubenswrapper[4558]: I0120 17:12:44.496087 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fxdwh\" (UniqueName: \"kubernetes.io/projected/581b4670-ceb9-4190-be1d-2ca27c49b472-kube-api-access-fxdwh\") pod \"581b4670-ceb9-4190-be1d-2ca27c49b472\" (UID: \"581b4670-ceb9-4190-be1d-2ca27c49b472\") " Jan 20 17:12:44 crc kubenswrapper[4558]: I0120 17:12:44.496702 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/581b4670-ceb9-4190-be1d-2ca27c49b472-operator-scripts\") pod \"581b4670-ceb9-4190-be1d-2ca27c49b472\" (UID: \"581b4670-ceb9-4190-be1d-2ca27c49b472\") " Jan 20 17:12:44 crc kubenswrapper[4558]: I0120 17:12:44.497234 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/581b4670-ceb9-4190-be1d-2ca27c49b472-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "581b4670-ceb9-4190-be1d-2ca27c49b472" (UID: "581b4670-ceb9-4190-be1d-2ca27c49b472"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:12:44 crc kubenswrapper[4558]: I0120 17:12:44.500941 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/581b4670-ceb9-4190-be1d-2ca27c49b472-kube-api-access-fxdwh" (OuterVolumeSpecName: "kube-api-access-fxdwh") pod "581b4670-ceb9-4190-be1d-2ca27c49b472" (UID: "581b4670-ceb9-4190-be1d-2ca27c49b472"). InnerVolumeSpecName "kube-api-access-fxdwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:12:44 crc kubenswrapper[4558]: I0120 17:12:44.508431 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/581b4670-ceb9-4190-be1d-2ca27c49b472-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:12:44 crc kubenswrapper[4558]: I0120 17:12:44.580494 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-e79a-account-create-update-b7lf6" Jan 20 17:12:44 crc kubenswrapper[4558]: I0120 17:12:44.610699 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fxdwh\" (UniqueName: \"kubernetes.io/projected/581b4670-ceb9-4190-be1d-2ca27c49b472-kube-api-access-fxdwh\") on node \"crc\" DevicePath \"\"" Jan 20 17:12:44 crc kubenswrapper[4558]: I0120 17:12:44.643582 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-3f8b-account-create-update-8lrlw" Jan 20 17:12:44 crc kubenswrapper[4558]: I0120 17:12:44.644281 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-db-create-4tdmg" Jan 20 17:12:44 crc kubenswrapper[4558]: I0120 17:12:44.648855 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-db-create-zcw7r" Jan 20 17:12:44 crc kubenswrapper[4558]: I0120 17:12:44.662519 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-db-create-8nrk4" Jan 20 17:12:44 crc kubenswrapper[4558]: I0120 17:12:44.711822 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/26dff2aa-11c9-494a-bf96-9ed83afd07b6-operator-scripts\") pod \"26dff2aa-11c9-494a-bf96-9ed83afd07b6\" (UID: \"26dff2aa-11c9-494a-bf96-9ed83afd07b6\") " Jan 20 17:12:44 crc kubenswrapper[4558]: I0120 17:12:44.712025 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vwpwv\" (UniqueName: \"kubernetes.io/projected/26dff2aa-11c9-494a-bf96-9ed83afd07b6-kube-api-access-vwpwv\") pod \"26dff2aa-11c9-494a-bf96-9ed83afd07b6\" (UID: \"26dff2aa-11c9-494a-bf96-9ed83afd07b6\") " Jan 20 17:12:44 crc kubenswrapper[4558]: I0120 17:12:44.712455 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/26dff2aa-11c9-494a-bf96-9ed83afd07b6-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "26dff2aa-11c9-494a-bf96-9ed83afd07b6" (UID: "26dff2aa-11c9-494a-bf96-9ed83afd07b6"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:12:44 crc kubenswrapper[4558]: I0120 17:12:44.714310 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/26dff2aa-11c9-494a-bf96-9ed83afd07b6-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:12:44 crc kubenswrapper[4558]: I0120 17:12:44.716124 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/26dff2aa-11c9-494a-bf96-9ed83afd07b6-kube-api-access-vwpwv" (OuterVolumeSpecName: "kube-api-access-vwpwv") pod "26dff2aa-11c9-494a-bf96-9ed83afd07b6" (UID: "26dff2aa-11c9-494a-bf96-9ed83afd07b6"). InnerVolumeSpecName "kube-api-access-vwpwv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:12:44 crc kubenswrapper[4558]: I0120 17:12:44.815033 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-62vkc\" (UniqueName: \"kubernetes.io/projected/19216a3e-4417-45d8-a039-731d0bc938ca-kube-api-access-62vkc\") pod \"19216a3e-4417-45d8-a039-731d0bc938ca\" (UID: \"19216a3e-4417-45d8-a039-731d0bc938ca\") " Jan 20 17:12:44 crc kubenswrapper[4558]: I0120 17:12:44.815119 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/170b963c-2b54-4497-82b5-03ac18e07ffe-operator-scripts\") pod \"170b963c-2b54-4497-82b5-03ac18e07ffe\" (UID: \"170b963c-2b54-4497-82b5-03ac18e07ffe\") " Jan 20 17:12:44 crc kubenswrapper[4558]: I0120 17:12:44.815253 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5cb4c24d-6741-4ee1-a1d5-c7d994499296-operator-scripts\") pod \"5cb4c24d-6741-4ee1-a1d5-c7d994499296\" (UID: \"5cb4c24d-6741-4ee1-a1d5-c7d994499296\") " Jan 20 17:12:44 crc kubenswrapper[4558]: I0120 17:12:44.815276 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ndwwz\" (UniqueName: \"kubernetes.io/projected/3ff7ded6-4fd1-42cc-9999-eea7e800be38-kube-api-access-ndwwz\") pod \"3ff7ded6-4fd1-42cc-9999-eea7e800be38\" (UID: \"3ff7ded6-4fd1-42cc-9999-eea7e800be38\") " Jan 20 17:12:44 crc kubenswrapper[4558]: I0120 17:12:44.815324 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3ff7ded6-4fd1-42cc-9999-eea7e800be38-operator-scripts\") pod \"3ff7ded6-4fd1-42cc-9999-eea7e800be38\" (UID: \"3ff7ded6-4fd1-42cc-9999-eea7e800be38\") " Jan 20 17:12:44 crc kubenswrapper[4558]: I0120 17:12:44.815349 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kms7g\" (UniqueName: \"kubernetes.io/projected/5cb4c24d-6741-4ee1-a1d5-c7d994499296-kube-api-access-kms7g\") pod \"5cb4c24d-6741-4ee1-a1d5-c7d994499296\" (UID: \"5cb4c24d-6741-4ee1-a1d5-c7d994499296\") " Jan 20 17:12:44 crc kubenswrapper[4558]: I0120 17:12:44.815378 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6cgf8\" (UniqueName: \"kubernetes.io/projected/170b963c-2b54-4497-82b5-03ac18e07ffe-kube-api-access-6cgf8\") pod \"170b963c-2b54-4497-82b5-03ac18e07ffe\" (UID: \"170b963c-2b54-4497-82b5-03ac18e07ffe\") " Jan 20 17:12:44 crc kubenswrapper[4558]: I0120 17:12:44.815488 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/19216a3e-4417-45d8-a039-731d0bc938ca-operator-scripts\") pod \"19216a3e-4417-45d8-a039-731d0bc938ca\" (UID: \"19216a3e-4417-45d8-a039-731d0bc938ca\") " Jan 20 17:12:44 crc kubenswrapper[4558]: I0120 17:12:44.815871 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vwpwv\" (UniqueName: \"kubernetes.io/projected/26dff2aa-11c9-494a-bf96-9ed83afd07b6-kube-api-access-vwpwv\") on node \"crc\" DevicePath \"\"" Jan 20 17:12:44 crc kubenswrapper[4558]: I0120 17:12:44.816441 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/19216a3e-4417-45d8-a039-731d0bc938ca-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "19216a3e-4417-45d8-a039-731d0bc938ca" (UID: "19216a3e-4417-45d8-a039-731d0bc938ca"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:12:44 crc kubenswrapper[4558]: I0120 17:12:44.816633 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3ff7ded6-4fd1-42cc-9999-eea7e800be38-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "3ff7ded6-4fd1-42cc-9999-eea7e800be38" (UID: "3ff7ded6-4fd1-42cc-9999-eea7e800be38"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:12:44 crc kubenswrapper[4558]: I0120 17:12:44.817966 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/170b963c-2b54-4497-82b5-03ac18e07ffe-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "170b963c-2b54-4497-82b5-03ac18e07ffe" (UID: "170b963c-2b54-4497-82b5-03ac18e07ffe"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:12:44 crc kubenswrapper[4558]: I0120 17:12:44.818239 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5cb4c24d-6741-4ee1-a1d5-c7d994499296-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "5cb4c24d-6741-4ee1-a1d5-c7d994499296" (UID: "5cb4c24d-6741-4ee1-a1d5-c7d994499296"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:12:44 crc kubenswrapper[4558]: I0120 17:12:44.900326 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/19216a3e-4417-45d8-a039-731d0bc938ca-kube-api-access-62vkc" (OuterVolumeSpecName: "kube-api-access-62vkc") pod "19216a3e-4417-45d8-a039-731d0bc938ca" (UID: "19216a3e-4417-45d8-a039-731d0bc938ca"). InnerVolumeSpecName "kube-api-access-62vkc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:12:44 crc kubenswrapper[4558]: I0120 17:12:44.900391 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ff7ded6-4fd1-42cc-9999-eea7e800be38-kube-api-access-ndwwz" (OuterVolumeSpecName: "kube-api-access-ndwwz") pod "3ff7ded6-4fd1-42cc-9999-eea7e800be38" (UID: "3ff7ded6-4fd1-42cc-9999-eea7e800be38"). InnerVolumeSpecName "kube-api-access-ndwwz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:12:44 crc kubenswrapper[4558]: I0120 17:12:44.900503 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5cb4c24d-6741-4ee1-a1d5-c7d994499296-kube-api-access-kms7g" (OuterVolumeSpecName: "kube-api-access-kms7g") pod "5cb4c24d-6741-4ee1-a1d5-c7d994499296" (UID: "5cb4c24d-6741-4ee1-a1d5-c7d994499296"). InnerVolumeSpecName "kube-api-access-kms7g". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:12:44 crc kubenswrapper[4558]: I0120 17:12:44.903320 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/170b963c-2b54-4497-82b5-03ac18e07ffe-kube-api-access-6cgf8" (OuterVolumeSpecName: "kube-api-access-6cgf8") pod "170b963c-2b54-4497-82b5-03ac18e07ffe" (UID: "170b963c-2b54-4497-82b5-03ac18e07ffe"). InnerVolumeSpecName "kube-api-access-6cgf8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:12:44 crc kubenswrapper[4558]: I0120 17:12:44.918555 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5cb4c24d-6741-4ee1-a1d5-c7d994499296-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:12:44 crc kubenswrapper[4558]: I0120 17:12:44.918585 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ndwwz\" (UniqueName: \"kubernetes.io/projected/3ff7ded6-4fd1-42cc-9999-eea7e800be38-kube-api-access-ndwwz\") on node \"crc\" DevicePath \"\"" Jan 20 17:12:44 crc kubenswrapper[4558]: I0120 17:12:44.918598 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3ff7ded6-4fd1-42cc-9999-eea7e800be38-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:12:44 crc kubenswrapper[4558]: I0120 17:12:44.918609 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kms7g\" (UniqueName: \"kubernetes.io/projected/5cb4c24d-6741-4ee1-a1d5-c7d994499296-kube-api-access-kms7g\") on node \"crc\" DevicePath \"\"" Jan 20 17:12:44 crc kubenswrapper[4558]: I0120 17:12:44.918621 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6cgf8\" (UniqueName: \"kubernetes.io/projected/170b963c-2b54-4497-82b5-03ac18e07ffe-kube-api-access-6cgf8\") on node \"crc\" DevicePath \"\"" Jan 20 17:12:44 crc kubenswrapper[4558]: I0120 17:12:44.918630 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/19216a3e-4417-45d8-a039-731d0bc938ca-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:12:44 crc kubenswrapper[4558]: I0120 17:12:44.918639 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-62vkc\" (UniqueName: \"kubernetes.io/projected/19216a3e-4417-45d8-a039-731d0bc938ca-kube-api-access-62vkc\") on node \"crc\" DevicePath \"\"" Jan 20 17:12:44 crc kubenswrapper[4558]: I0120 17:12:44.918647 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/170b963c-2b54-4497-82b5-03ac18e07ffe-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:12:45 crc kubenswrapper[4558]: I0120 17:12:45.031450 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-db-create-zcw7r" event={"ID":"19216a3e-4417-45d8-a039-731d0bc938ca","Type":"ContainerDied","Data":"742ca9fd9595273a902d0b476797ada3f6a5b0f41ffd71e77205ee027f232665"} Jan 20 17:12:45 crc kubenswrapper[4558]: I0120 17:12:45.031751 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="742ca9fd9595273a902d0b476797ada3f6a5b0f41ffd71e77205ee027f232665" Jan 20 17:12:45 crc kubenswrapper[4558]: I0120 17:12:45.031518 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-db-create-zcw7r" Jan 20 17:12:45 crc kubenswrapper[4558]: I0120 17:12:45.037955 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5","Type":"ContainerStarted","Data":"99c4bacb5b9768a4caf100f0cb1002d9cece8b806e2823da791412ee816ffda8"} Jan 20 17:12:45 crc kubenswrapper[4558]: I0120 17:12:45.038056 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5","Type":"ContainerStarted","Data":"f85ec25adaf01df7a7853545852faa186e28a1806e5769092b47b3631e9f6a39"} Jan 20 17:12:45 crc kubenswrapper[4558]: I0120 17:12:45.038124 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5","Type":"ContainerStarted","Data":"a05f333c41b9d8ef7c02ce57e5a66f44cf679c3dd3e8cbc037a0abf4cb9c1076"} Jan 20 17:12:45 crc kubenswrapper[4558]: I0120 17:12:45.038213 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5","Type":"ContainerStarted","Data":"351e496453cf020b285bf45cc107bc71f86bd776035bd828428d72e1c7062529"} Jan 20 17:12:45 crc kubenswrapper[4558]: I0120 17:12:45.041964 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/alertmanager-metric-storage-0" event={"ID":"13a0a262-6f56-4cf9-9b0d-85110bcff1e7","Type":"ContainerStarted","Data":"626d2d61214153aec0449b76e316d8177bdd3708c258717f7715a91a609543f1"} Jan 20 17:12:45 crc kubenswrapper[4558]: I0120 17:12:45.043686 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-db-create-8nrk4" event={"ID":"3ff7ded6-4fd1-42cc-9999-eea7e800be38","Type":"ContainerDied","Data":"cd3f0c187a93e4739593e406500bb408afb05b1120352552975cfd704b7bdaea"} Jan 20 17:12:45 crc kubenswrapper[4558]: I0120 17:12:45.043815 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cd3f0c187a93e4739593e406500bb408afb05b1120352552975cfd704b7bdaea" Jan 20 17:12:45 crc kubenswrapper[4558]: I0120 17:12:45.043893 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-db-create-8nrk4" Jan 20 17:12:45 crc kubenswrapper[4558]: I0120 17:12:45.045550 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-e79a-account-create-update-b7lf6" event={"ID":"26dff2aa-11c9-494a-bf96-9ed83afd07b6","Type":"ContainerDied","Data":"d384fb5439712f60b68a9b5c4f4c49cc9769ecf2e12b7f41af032df93e5be467"} Jan 20 17:12:45 crc kubenswrapper[4558]: I0120 17:12:45.045626 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d384fb5439712f60b68a9b5c4f4c49cc9769ecf2e12b7f41af032df93e5be467" Jan 20 17:12:45 crc kubenswrapper[4558]: I0120 17:12:45.045705 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-e79a-account-create-update-b7lf6" Jan 20 17:12:45 crc kubenswrapper[4558]: I0120 17:12:45.053917 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-db-create-4tdmg" Jan 20 17:12:45 crc kubenswrapper[4558]: I0120 17:12:45.054028 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-db-create-4tdmg" event={"ID":"170b963c-2b54-4497-82b5-03ac18e07ffe","Type":"ContainerDied","Data":"d39093c9d36859b62e5fa40e756e87d913c245bb0d16ad63d9d21629bd08122a"} Jan 20 17:12:45 crc kubenswrapper[4558]: I0120 17:12:45.054117 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d39093c9d36859b62e5fa40e756e87d913c245bb0d16ad63d9d21629bd08122a" Jan 20 17:12:45 crc kubenswrapper[4558]: I0120 17:12:45.064221 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-d429-account-create-update-4qp4q" event={"ID":"581b4670-ceb9-4190-be1d-2ca27c49b472","Type":"ContainerDied","Data":"885e0d900baf337f658e5953b8af358f2049e4a95d46140ee876dee97a18f979"} Jan 20 17:12:45 crc kubenswrapper[4558]: I0120 17:12:45.064254 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="885e0d900baf337f658e5953b8af358f2049e4a95d46140ee876dee97a18f979" Jan 20 17:12:45 crc kubenswrapper[4558]: I0120 17:12:45.064313 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-d429-account-create-update-4qp4q" Jan 20 17:12:45 crc kubenswrapper[4558]: I0120 17:12:45.076859 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-3f8b-account-create-update-8lrlw" event={"ID":"5cb4c24d-6741-4ee1-a1d5-c7d994499296","Type":"ContainerDied","Data":"8353d8ec7a6c83d790e8254f11c91ca14d6f7eb13ca2ca26d41b7eba63730254"} Jan 20 17:12:45 crc kubenswrapper[4558]: I0120 17:12:45.076898 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-3f8b-account-create-update-8lrlw" Jan 20 17:12:45 crc kubenswrapper[4558]: I0120 17:12:45.076936 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8353d8ec7a6c83d790e8254f11c91ca14d6f7eb13ca2ca26d41b7eba63730254" Jan 20 17:12:45 crc kubenswrapper[4558]: I0120 17:12:45.078466 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/swift-storage-0" podStartSLOduration=22.078454376 podStartE2EDuration="22.078454376s" podCreationTimestamp="2026-01-20 17:12:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:12:45.074110066 +0000 UTC m=+1858.834448033" watchObservedRunningTime="2026-01-20 17:12:45.078454376 +0000 UTC m=+1858.838792333" Jan 20 17:12:45 crc kubenswrapper[4558]: I0120 17:12:45.200712 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-996c57c45-7hr9m"] Jan 20 17:12:45 crc kubenswrapper[4558]: E0120 17:12:45.200996 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="19216a3e-4417-45d8-a039-731d0bc938ca" containerName="mariadb-database-create" Jan 20 17:12:45 crc kubenswrapper[4558]: I0120 17:12:45.201014 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="19216a3e-4417-45d8-a039-731d0bc938ca" containerName="mariadb-database-create" Jan 20 17:12:45 crc kubenswrapper[4558]: E0120 17:12:45.201033 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="170b963c-2b54-4497-82b5-03ac18e07ffe" containerName="mariadb-database-create" Jan 20 17:12:45 crc kubenswrapper[4558]: I0120 17:12:45.201040 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="170b963c-2b54-4497-82b5-03ac18e07ffe" containerName="mariadb-database-create" Jan 20 17:12:45 crc kubenswrapper[4558]: E0120 17:12:45.201058 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5cb4c24d-6741-4ee1-a1d5-c7d994499296" containerName="mariadb-account-create-update" Jan 20 17:12:45 crc kubenswrapper[4558]: I0120 17:12:45.201064 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="5cb4c24d-6741-4ee1-a1d5-c7d994499296" containerName="mariadb-account-create-update" Jan 20 17:12:45 crc kubenswrapper[4558]: E0120 17:12:45.201076 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="26dff2aa-11c9-494a-bf96-9ed83afd07b6" containerName="mariadb-account-create-update" Jan 20 17:12:45 crc kubenswrapper[4558]: I0120 17:12:45.201081 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="26dff2aa-11c9-494a-bf96-9ed83afd07b6" containerName="mariadb-account-create-update" Jan 20 17:12:45 crc kubenswrapper[4558]: E0120 17:12:45.201088 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3b9f240f-b49c-4184-b5f7-6cc6fa26dc13" containerName="mariadb-account-create-update" Jan 20 17:12:45 crc kubenswrapper[4558]: I0120 17:12:45.201094 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="3b9f240f-b49c-4184-b5f7-6cc6fa26dc13" containerName="mariadb-account-create-update" Jan 20 17:12:45 crc kubenswrapper[4558]: E0120 17:12:45.201101 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3ff7ded6-4fd1-42cc-9999-eea7e800be38" containerName="mariadb-database-create" Jan 20 17:12:45 crc kubenswrapper[4558]: I0120 17:12:45.201106 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="3ff7ded6-4fd1-42cc-9999-eea7e800be38" containerName="mariadb-database-create" Jan 20 17:12:45 crc kubenswrapper[4558]: E0120 17:12:45.201116 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="581b4670-ceb9-4190-be1d-2ca27c49b472" containerName="mariadb-account-create-update" Jan 20 17:12:45 crc kubenswrapper[4558]: I0120 17:12:45.201121 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="581b4670-ceb9-4190-be1d-2ca27c49b472" containerName="mariadb-account-create-update" Jan 20 17:12:45 crc kubenswrapper[4558]: I0120 17:12:45.201276 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="19216a3e-4417-45d8-a039-731d0bc938ca" containerName="mariadb-database-create" Jan 20 17:12:45 crc kubenswrapper[4558]: I0120 17:12:45.201290 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="5cb4c24d-6741-4ee1-a1d5-c7d994499296" containerName="mariadb-account-create-update" Jan 20 17:12:45 crc kubenswrapper[4558]: I0120 17:12:45.201299 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="170b963c-2b54-4497-82b5-03ac18e07ffe" containerName="mariadb-database-create" Jan 20 17:12:45 crc kubenswrapper[4558]: I0120 17:12:45.201310 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="26dff2aa-11c9-494a-bf96-9ed83afd07b6" containerName="mariadb-account-create-update" Jan 20 17:12:45 crc kubenswrapper[4558]: I0120 17:12:45.201322 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="3b9f240f-b49c-4184-b5f7-6cc6fa26dc13" containerName="mariadb-account-create-update" Jan 20 17:12:45 crc kubenswrapper[4558]: I0120 17:12:45.201330 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="581b4670-ceb9-4190-be1d-2ca27c49b472" containerName="mariadb-account-create-update" Jan 20 17:12:45 crc kubenswrapper[4558]: I0120 17:12:45.201337 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="3ff7ded6-4fd1-42cc-9999-eea7e800be38" containerName="mariadb-database-create" Jan 20 17:12:45 crc kubenswrapper[4558]: I0120 17:12:45.202145 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-996c57c45-7hr9m" Jan 20 17:12:45 crc kubenswrapper[4558]: I0120 17:12:45.204261 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"dns-swift-storage-0" Jan 20 17:12:45 crc kubenswrapper[4558]: I0120 17:12:45.210431 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-996c57c45-7hr9m"] Jan 20 17:12:45 crc kubenswrapper[4558]: I0120 17:12:45.323662 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6e03bea8-761e-4d6d-bcaf-f4397b3ec310-config\") pod \"dnsmasq-dnsmasq-996c57c45-7hr9m\" (UID: \"6e03bea8-761e-4d6d-bcaf-f4397b3ec310\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-996c57c45-7hr9m" Jan 20 17:12:45 crc kubenswrapper[4558]: I0120 17:12:45.323730 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-62v9h\" (UniqueName: \"kubernetes.io/projected/6e03bea8-761e-4d6d-bcaf-f4397b3ec310-kube-api-access-62v9h\") pod \"dnsmasq-dnsmasq-996c57c45-7hr9m\" (UID: \"6e03bea8-761e-4d6d-bcaf-f4397b3ec310\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-996c57c45-7hr9m" Jan 20 17:12:45 crc kubenswrapper[4558]: I0120 17:12:45.323888 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/6e03bea8-761e-4d6d-bcaf-f4397b3ec310-dns-swift-storage-0\") pod \"dnsmasq-dnsmasq-996c57c45-7hr9m\" (UID: \"6e03bea8-761e-4d6d-bcaf-f4397b3ec310\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-996c57c45-7hr9m" Jan 20 17:12:45 crc kubenswrapper[4558]: I0120 17:12:45.323922 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/6e03bea8-761e-4d6d-bcaf-f4397b3ec310-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-996c57c45-7hr9m\" (UID: \"6e03bea8-761e-4d6d-bcaf-f4397b3ec310\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-996c57c45-7hr9m" Jan 20 17:12:45 crc kubenswrapper[4558]: I0120 17:12:45.425359 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-62v9h\" (UniqueName: \"kubernetes.io/projected/6e03bea8-761e-4d6d-bcaf-f4397b3ec310-kube-api-access-62v9h\") pod \"dnsmasq-dnsmasq-996c57c45-7hr9m\" (UID: \"6e03bea8-761e-4d6d-bcaf-f4397b3ec310\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-996c57c45-7hr9m" Jan 20 17:12:45 crc kubenswrapper[4558]: I0120 17:12:45.425502 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/6e03bea8-761e-4d6d-bcaf-f4397b3ec310-dns-swift-storage-0\") pod \"dnsmasq-dnsmasq-996c57c45-7hr9m\" (UID: \"6e03bea8-761e-4d6d-bcaf-f4397b3ec310\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-996c57c45-7hr9m" Jan 20 17:12:45 crc kubenswrapper[4558]: I0120 17:12:45.425524 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/6e03bea8-761e-4d6d-bcaf-f4397b3ec310-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-996c57c45-7hr9m\" (UID: \"6e03bea8-761e-4d6d-bcaf-f4397b3ec310\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-996c57c45-7hr9m" Jan 20 17:12:45 crc kubenswrapper[4558]: I0120 17:12:45.425567 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6e03bea8-761e-4d6d-bcaf-f4397b3ec310-config\") pod \"dnsmasq-dnsmasq-996c57c45-7hr9m\" (UID: \"6e03bea8-761e-4d6d-bcaf-f4397b3ec310\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-996c57c45-7hr9m" Jan 20 17:12:45 crc kubenswrapper[4558]: I0120 17:12:45.426457 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6e03bea8-761e-4d6d-bcaf-f4397b3ec310-config\") pod \"dnsmasq-dnsmasq-996c57c45-7hr9m\" (UID: \"6e03bea8-761e-4d6d-bcaf-f4397b3ec310\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-996c57c45-7hr9m" Jan 20 17:12:45 crc kubenswrapper[4558]: I0120 17:12:45.426480 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/6e03bea8-761e-4d6d-bcaf-f4397b3ec310-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-996c57c45-7hr9m\" (UID: \"6e03bea8-761e-4d6d-bcaf-f4397b3ec310\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-996c57c45-7hr9m" Jan 20 17:12:45 crc kubenswrapper[4558]: I0120 17:12:45.426555 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/6e03bea8-761e-4d6d-bcaf-f4397b3ec310-dns-swift-storage-0\") pod \"dnsmasq-dnsmasq-996c57c45-7hr9m\" (UID: \"6e03bea8-761e-4d6d-bcaf-f4397b3ec310\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-996c57c45-7hr9m" Jan 20 17:12:45 crc kubenswrapper[4558]: I0120 17:12:45.440654 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-62v9h\" (UniqueName: \"kubernetes.io/projected/6e03bea8-761e-4d6d-bcaf-f4397b3ec310-kube-api-access-62v9h\") pod \"dnsmasq-dnsmasq-996c57c45-7hr9m\" (UID: \"6e03bea8-761e-4d6d-bcaf-f4397b3ec310\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-996c57c45-7hr9m" Jan 20 17:12:45 crc kubenswrapper[4558]: I0120 17:12:45.514731 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-996c57c45-7hr9m" Jan 20 17:12:45 crc kubenswrapper[4558]: I0120 17:12:45.968859 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-996c57c45-7hr9m"] Jan 20 17:12:45 crc kubenswrapper[4558]: W0120 17:12:45.983728 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6e03bea8_761e_4d6d_bcaf_f4397b3ec310.slice/crio-b73a4ba20b3eb0fb213ad6d73eec657ba4caec4f0bcac84dbbc9ab20976ab8d7 WatchSource:0}: Error finding container b73a4ba20b3eb0fb213ad6d73eec657ba4caec4f0bcac84dbbc9ab20976ab8d7: Status 404 returned error can't find the container with id b73a4ba20b3eb0fb213ad6d73eec657ba4caec4f0bcac84dbbc9ab20976ab8d7 Jan 20 17:12:46 crc kubenswrapper[4558]: I0120 17:12:46.092188 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-996c57c45-7hr9m" event={"ID":"6e03bea8-761e-4d6d-bcaf-f4397b3ec310","Type":"ContainerStarted","Data":"b73a4ba20b3eb0fb213ad6d73eec657ba4caec4f0bcac84dbbc9ab20976ab8d7"} Jan 20 17:12:46 crc kubenswrapper[4558]: I0120 17:12:46.833988 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/glance-db-sync-6r4hq"] Jan 20 17:12:46 crc kubenswrapper[4558]: I0120 17:12:46.835600 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-db-sync-6r4hq" Jan 20 17:12:46 crc kubenswrapper[4558]: I0120 17:12:46.837438 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-config-data" Jan 20 17:12:46 crc kubenswrapper[4558]: I0120 17:12:46.837514 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-glance-dockercfg-5qnnr" Jan 20 17:12:46 crc kubenswrapper[4558]: I0120 17:12:46.847591 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-db-sync-6r4hq"] Jan 20 17:12:46 crc kubenswrapper[4558]: I0120 17:12:46.959796 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a7b45248-f499-4b04-9591-e7268e39ac5c-config-data\") pod \"glance-db-sync-6r4hq\" (UID: \"a7b45248-f499-4b04-9591-e7268e39ac5c\") " pod="openstack-kuttl-tests/glance-db-sync-6r4hq" Jan 20 17:12:46 crc kubenswrapper[4558]: I0120 17:12:46.959935 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7b45248-f499-4b04-9591-e7268e39ac5c-combined-ca-bundle\") pod \"glance-db-sync-6r4hq\" (UID: \"a7b45248-f499-4b04-9591-e7268e39ac5c\") " pod="openstack-kuttl-tests/glance-db-sync-6r4hq" Jan 20 17:12:46 crc kubenswrapper[4558]: I0120 17:12:46.960182 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/a7b45248-f499-4b04-9591-e7268e39ac5c-db-sync-config-data\") pod \"glance-db-sync-6r4hq\" (UID: \"a7b45248-f499-4b04-9591-e7268e39ac5c\") " pod="openstack-kuttl-tests/glance-db-sync-6r4hq" Jan 20 17:12:46 crc kubenswrapper[4558]: I0120 17:12:46.960215 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f24qx\" (UniqueName: \"kubernetes.io/projected/a7b45248-f499-4b04-9591-e7268e39ac5c-kube-api-access-f24qx\") pod \"glance-db-sync-6r4hq\" (UID: \"a7b45248-f499-4b04-9591-e7268e39ac5c\") " pod="openstack-kuttl-tests/glance-db-sync-6r4hq" Jan 20 17:12:47 crc kubenswrapper[4558]: I0120 17:12:47.062255 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a7b45248-f499-4b04-9591-e7268e39ac5c-config-data\") pod \"glance-db-sync-6r4hq\" (UID: \"a7b45248-f499-4b04-9591-e7268e39ac5c\") " pod="openstack-kuttl-tests/glance-db-sync-6r4hq" Jan 20 17:12:47 crc kubenswrapper[4558]: I0120 17:12:47.063284 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7b45248-f499-4b04-9591-e7268e39ac5c-combined-ca-bundle\") pod \"glance-db-sync-6r4hq\" (UID: \"a7b45248-f499-4b04-9591-e7268e39ac5c\") " pod="openstack-kuttl-tests/glance-db-sync-6r4hq" Jan 20 17:12:47 crc kubenswrapper[4558]: I0120 17:12:47.063515 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/a7b45248-f499-4b04-9591-e7268e39ac5c-db-sync-config-data\") pod \"glance-db-sync-6r4hq\" (UID: \"a7b45248-f499-4b04-9591-e7268e39ac5c\") " pod="openstack-kuttl-tests/glance-db-sync-6r4hq" Jan 20 17:12:47 crc kubenswrapper[4558]: I0120 17:12:47.063542 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f24qx\" (UniqueName: \"kubernetes.io/projected/a7b45248-f499-4b04-9591-e7268e39ac5c-kube-api-access-f24qx\") pod \"glance-db-sync-6r4hq\" (UID: \"a7b45248-f499-4b04-9591-e7268e39ac5c\") " pod="openstack-kuttl-tests/glance-db-sync-6r4hq" Jan 20 17:12:47 crc kubenswrapper[4558]: I0120 17:12:47.067028 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7b45248-f499-4b04-9591-e7268e39ac5c-combined-ca-bundle\") pod \"glance-db-sync-6r4hq\" (UID: \"a7b45248-f499-4b04-9591-e7268e39ac5c\") " pod="openstack-kuttl-tests/glance-db-sync-6r4hq" Jan 20 17:12:47 crc kubenswrapper[4558]: I0120 17:12:47.072660 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a7b45248-f499-4b04-9591-e7268e39ac5c-config-data\") pod \"glance-db-sync-6r4hq\" (UID: \"a7b45248-f499-4b04-9591-e7268e39ac5c\") " pod="openstack-kuttl-tests/glance-db-sync-6r4hq" Jan 20 17:12:47 crc kubenswrapper[4558]: I0120 17:12:47.075036 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/a7b45248-f499-4b04-9591-e7268e39ac5c-db-sync-config-data\") pod \"glance-db-sync-6r4hq\" (UID: \"a7b45248-f499-4b04-9591-e7268e39ac5c\") " pod="openstack-kuttl-tests/glance-db-sync-6r4hq" Jan 20 17:12:47 crc kubenswrapper[4558]: I0120 17:12:47.077969 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f24qx\" (UniqueName: \"kubernetes.io/projected/a7b45248-f499-4b04-9591-e7268e39ac5c-kube-api-access-f24qx\") pod \"glance-db-sync-6r4hq\" (UID: \"a7b45248-f499-4b04-9591-e7268e39ac5c\") " pod="openstack-kuttl-tests/glance-db-sync-6r4hq" Jan 20 17:12:47 crc kubenswrapper[4558]: I0120 17:12:47.117203 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/alertmanager-metric-storage-0" event={"ID":"13a0a262-6f56-4cf9-9b0d-85110bcff1e7","Type":"ContainerStarted","Data":"abbc56b6e1300066bf6f3c76afd3d47d0ea00a3c7a4a4d427f6e57e82969bee0"} Jan 20 17:12:47 crc kubenswrapper[4558]: I0120 17:12:47.117461 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/alertmanager-metric-storage-0" Jan 20 17:12:47 crc kubenswrapper[4558]: I0120 17:12:47.120888 4558 generic.go:334] "Generic (PLEG): container finished" podID="6e03bea8-761e-4d6d-bcaf-f4397b3ec310" containerID="cd19794387027d0145a1234413d0cd07984694f7a7bd98888d3a026bd9839c77" exitCode=0 Jan 20 17:12:47 crc kubenswrapper[4558]: I0120 17:12:47.120943 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-996c57c45-7hr9m" event={"ID":"6e03bea8-761e-4d6d-bcaf-f4397b3ec310","Type":"ContainerDied","Data":"cd19794387027d0145a1234413d0cd07984694f7a7bd98888d3a026bd9839c77"} Jan 20 17:12:47 crc kubenswrapper[4558]: I0120 17:12:47.124219 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/alertmanager-metric-storage-0" Jan 20 17:12:47 crc kubenswrapper[4558]: I0120 17:12:47.147780 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/alertmanager-metric-storage-0" podStartSLOduration=5.056264569 podStartE2EDuration="20.147765054s" podCreationTimestamp="2026-01-20 17:12:27 +0000 UTC" firstStartedPulling="2026-01-20 17:12:28.918588906 +0000 UTC m=+1842.678926873" lastFinishedPulling="2026-01-20 17:12:44.010089392 +0000 UTC m=+1857.770427358" observedRunningTime="2026-01-20 17:12:47.136470325 +0000 UTC m=+1860.896808293" watchObservedRunningTime="2026-01-20 17:12:47.147765054 +0000 UTC m=+1860.908103021" Jan 20 17:12:47 crc kubenswrapper[4558]: I0120 17:12:47.156909 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-db-sync-6r4hq" Jan 20 17:12:48 crc kubenswrapper[4558]: I0120 17:12:48.131050 4558 generic.go:334] "Generic (PLEG): container finished" podID="fd2edfa4-790f-49d7-9e32-29571e490aaf" containerID="428595a29d94d4ba528e047a5b7e52f2fb67f15ff604242487cb42e3df5dbd56" exitCode=0 Jan 20 17:12:48 crc kubenswrapper[4558]: I0120 17:12:48.131136 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-server-0" event={"ID":"fd2edfa4-790f-49d7-9e32-29571e490aaf","Type":"ContainerDied","Data":"428595a29d94d4ba528e047a5b7e52f2fb67f15ff604242487cb42e3df5dbd56"} Jan 20 17:12:49 crc kubenswrapper[4558]: I0120 17:12:49.393795 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-db-sync-6r4hq"] Jan 20 17:12:50 crc kubenswrapper[4558]: I0120 17:12:50.157435 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-996c57c45-7hr9m" event={"ID":"6e03bea8-761e-4d6d-bcaf-f4397b3ec310","Type":"ContainerStarted","Data":"f95d7322fcfd2ddfaa632b4101e4e708456d08454d53d6519b66c6fc9742857f"} Jan 20 17:12:50 crc kubenswrapper[4558]: I0120 17:12:50.158569 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-996c57c45-7hr9m" Jan 20 17:12:50 crc kubenswrapper[4558]: I0120 17:12:50.160362 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-server-0" event={"ID":"fd2edfa4-790f-49d7-9e32-29571e490aaf","Type":"ContainerStarted","Data":"2e090c322fd27930306ecae5c98d22a52137ae21234138d9f2df2a9c227f3a1b"} Jan 20 17:12:50 crc kubenswrapper[4558]: I0120 17:12:50.161275 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:12:50 crc kubenswrapper[4558]: I0120 17:12:50.163011 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/prometheus-metric-storage-0" event={"ID":"b099ccf1-da7f-4e4d-80cf-629b21cd774c","Type":"ContainerStarted","Data":"42bef42ec418e9c5fec49130ad8e153b3d3494179d54ac7a788a08bcfe416096"} Jan 20 17:12:50 crc kubenswrapper[4558]: I0120 17:12:50.164898 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-db-sync-6r4hq" event={"ID":"a7b45248-f499-4b04-9591-e7268e39ac5c","Type":"ContainerStarted","Data":"e33f52cf7ad70d9b08e69aacbd68898760b0b7c600e8b28a20625caa9cbc16eb"} Jan 20 17:12:50 crc kubenswrapper[4558]: I0120 17:12:50.164926 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-db-sync-6r4hq" event={"ID":"a7b45248-f499-4b04-9591-e7268e39ac5c","Type":"ContainerStarted","Data":"131ed5e728387df05764bc0c16cc857b318ce3c8f8f7ebaef709e4bc65f565be"} Jan 20 17:12:50 crc kubenswrapper[4558]: I0120 17:12:50.185512 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-996c57c45-7hr9m" podStartSLOduration=5.185494934 podStartE2EDuration="5.185494934s" podCreationTimestamp="2026-01-20 17:12:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:12:50.176661223 +0000 UTC m=+1863.936999189" watchObservedRunningTime="2026-01-20 17:12:50.185494934 +0000 UTC m=+1863.945832902" Jan 20 17:12:50 crc kubenswrapper[4558]: I0120 17:12:50.195646 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/glance-db-sync-6r4hq" podStartSLOduration=4.195611318 podStartE2EDuration="4.195611318s" podCreationTimestamp="2026-01-20 17:12:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:12:50.195278361 +0000 UTC m=+1863.955616318" watchObservedRunningTime="2026-01-20 17:12:50.195611318 +0000 UTC m=+1863.955949285" Jan 20 17:12:50 crc kubenswrapper[4558]: I0120 17:12:50.216937 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/rabbitmq-server-0" podStartSLOduration=42.216917012 podStartE2EDuration="42.216917012s" podCreationTimestamp="2026-01-20 17:12:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:12:50.215542286 +0000 UTC m=+1863.975880253" watchObservedRunningTime="2026-01-20 17:12:50.216917012 +0000 UTC m=+1863.977254979" Jan 20 17:12:52 crc kubenswrapper[4558]: I0120 17:12:52.209785 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/prometheus-metric-storage-0" event={"ID":"b099ccf1-da7f-4e4d-80cf-629b21cd774c","Type":"ContainerStarted","Data":"5f412156d497432b7604ccd46172afc5d571c254c8c81a3782c29c607e00f7ae"} Jan 20 17:12:54 crc kubenswrapper[4558]: I0120 17:12:54.231235 4558 generic.go:334] "Generic (PLEG): container finished" podID="a7b45248-f499-4b04-9591-e7268e39ac5c" containerID="e33f52cf7ad70d9b08e69aacbd68898760b0b7c600e8b28a20625caa9cbc16eb" exitCode=0 Jan 20 17:12:54 crc kubenswrapper[4558]: I0120 17:12:54.231321 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-db-sync-6r4hq" event={"ID":"a7b45248-f499-4b04-9591-e7268e39ac5c","Type":"ContainerDied","Data":"e33f52cf7ad70d9b08e69aacbd68898760b0b7c600e8b28a20625caa9cbc16eb"} Jan 20 17:12:55 crc kubenswrapper[4558]: I0120 17:12:55.245071 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/prometheus-metric-storage-0" event={"ID":"b099ccf1-da7f-4e4d-80cf-629b21cd774c","Type":"ContainerStarted","Data":"b4f83144ed64d3c2e00534f5cba38c30b991da847b01e3d1aa7c8ca6ad26c591"} Jan 20 17:12:55 crc kubenswrapper[4558]: I0120 17:12:55.276389 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/prometheus-metric-storage-0" podStartSLOduration=3.307238698 podStartE2EDuration="28.276369093s" podCreationTimestamp="2026-01-20 17:12:27 +0000 UTC" firstStartedPulling="2026-01-20 17:12:29.569788001 +0000 UTC m=+1843.330125968" lastFinishedPulling="2026-01-20 17:12:54.538918396 +0000 UTC m=+1868.299256363" observedRunningTime="2026-01-20 17:12:55.27182612 +0000 UTC m=+1869.032164087" watchObservedRunningTime="2026-01-20 17:12:55.276369093 +0000 UTC m=+1869.036707060" Jan 20 17:12:55 crc kubenswrapper[4558]: I0120 17:12:55.514553 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-db-sync-6r4hq" Jan 20 17:12:55 crc kubenswrapper[4558]: I0120 17:12:55.516336 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-996c57c45-7hr9m" Jan 20 17:12:55 crc kubenswrapper[4558]: I0120 17:12:55.580117 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-8xt6f"] Jan 20 17:12:55 crc kubenswrapper[4558]: I0120 17:12:55.580370 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-8xt6f" podUID="b730bb71-b18e-4b97-8d6b-d4ac799d08fd" containerName="dnsmasq-dns" containerID="cri-o://ce47b8c8a1a63f47017e4025110e0953002e31faa417215a7f430725acf9a8eb" gracePeriod=10 Jan 20 17:12:55 crc kubenswrapper[4558]: I0120 17:12:55.620365 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a7b45248-f499-4b04-9591-e7268e39ac5c-config-data\") pod \"a7b45248-f499-4b04-9591-e7268e39ac5c\" (UID: \"a7b45248-f499-4b04-9591-e7268e39ac5c\") " Jan 20 17:12:55 crc kubenswrapper[4558]: I0120 17:12:55.620551 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/a7b45248-f499-4b04-9591-e7268e39ac5c-db-sync-config-data\") pod \"a7b45248-f499-4b04-9591-e7268e39ac5c\" (UID: \"a7b45248-f499-4b04-9591-e7268e39ac5c\") " Jan 20 17:12:55 crc kubenswrapper[4558]: I0120 17:12:55.620602 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f24qx\" (UniqueName: \"kubernetes.io/projected/a7b45248-f499-4b04-9591-e7268e39ac5c-kube-api-access-f24qx\") pod \"a7b45248-f499-4b04-9591-e7268e39ac5c\" (UID: \"a7b45248-f499-4b04-9591-e7268e39ac5c\") " Jan 20 17:12:55 crc kubenswrapper[4558]: I0120 17:12:55.620688 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7b45248-f499-4b04-9591-e7268e39ac5c-combined-ca-bundle\") pod \"a7b45248-f499-4b04-9591-e7268e39ac5c\" (UID: \"a7b45248-f499-4b04-9591-e7268e39ac5c\") " Jan 20 17:12:55 crc kubenswrapper[4558]: I0120 17:12:55.626461 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a7b45248-f499-4b04-9591-e7268e39ac5c-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "a7b45248-f499-4b04-9591-e7268e39ac5c" (UID: "a7b45248-f499-4b04-9591-e7268e39ac5c"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:12:55 crc kubenswrapper[4558]: I0120 17:12:55.632329 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a7b45248-f499-4b04-9591-e7268e39ac5c-kube-api-access-f24qx" (OuterVolumeSpecName: "kube-api-access-f24qx") pod "a7b45248-f499-4b04-9591-e7268e39ac5c" (UID: "a7b45248-f499-4b04-9591-e7268e39ac5c"). InnerVolumeSpecName "kube-api-access-f24qx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:12:55 crc kubenswrapper[4558]: I0120 17:12:55.640477 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a7b45248-f499-4b04-9591-e7268e39ac5c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a7b45248-f499-4b04-9591-e7268e39ac5c" (UID: "a7b45248-f499-4b04-9591-e7268e39ac5c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:12:55 crc kubenswrapper[4558]: I0120 17:12:55.655248 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a7b45248-f499-4b04-9591-e7268e39ac5c-config-data" (OuterVolumeSpecName: "config-data") pod "a7b45248-f499-4b04-9591-e7268e39ac5c" (UID: "a7b45248-f499-4b04-9591-e7268e39ac5c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:12:55 crc kubenswrapper[4558]: I0120 17:12:55.723344 4558 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/a7b45248-f499-4b04-9591-e7268e39ac5c-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:12:55 crc kubenswrapper[4558]: I0120 17:12:55.723378 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f24qx\" (UniqueName: \"kubernetes.io/projected/a7b45248-f499-4b04-9591-e7268e39ac5c-kube-api-access-f24qx\") on node \"crc\" DevicePath \"\"" Jan 20 17:12:55 crc kubenswrapper[4558]: I0120 17:12:55.723393 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7b45248-f499-4b04-9591-e7268e39ac5c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:12:55 crc kubenswrapper[4558]: I0120 17:12:55.723412 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a7b45248-f499-4b04-9591-e7268e39ac5c-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:12:55 crc kubenswrapper[4558]: I0120 17:12:55.961838 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-8xt6f" Jan 20 17:12:56 crc kubenswrapper[4558]: I0120 17:12:56.029071 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b730bb71-b18e-4b97-8d6b-d4ac799d08fd-config\") pod \"b730bb71-b18e-4b97-8d6b-d4ac799d08fd\" (UID: \"b730bb71-b18e-4b97-8d6b-d4ac799d08fd\") " Jan 20 17:12:56 crc kubenswrapper[4558]: I0120 17:12:56.029241 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/b730bb71-b18e-4b97-8d6b-d4ac799d08fd-dnsmasq-svc\") pod \"b730bb71-b18e-4b97-8d6b-d4ac799d08fd\" (UID: \"b730bb71-b18e-4b97-8d6b-d4ac799d08fd\") " Jan 20 17:12:56 crc kubenswrapper[4558]: I0120 17:12:56.029279 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zzlnz\" (UniqueName: \"kubernetes.io/projected/b730bb71-b18e-4b97-8d6b-d4ac799d08fd-kube-api-access-zzlnz\") pod \"b730bb71-b18e-4b97-8d6b-d4ac799d08fd\" (UID: \"b730bb71-b18e-4b97-8d6b-d4ac799d08fd\") " Jan 20 17:12:56 crc kubenswrapper[4558]: I0120 17:12:56.033394 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b730bb71-b18e-4b97-8d6b-d4ac799d08fd-kube-api-access-zzlnz" (OuterVolumeSpecName: "kube-api-access-zzlnz") pod "b730bb71-b18e-4b97-8d6b-d4ac799d08fd" (UID: "b730bb71-b18e-4b97-8d6b-d4ac799d08fd"). InnerVolumeSpecName "kube-api-access-zzlnz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:12:56 crc kubenswrapper[4558]: I0120 17:12:56.060449 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b730bb71-b18e-4b97-8d6b-d4ac799d08fd-config" (OuterVolumeSpecName: "config") pod "b730bb71-b18e-4b97-8d6b-d4ac799d08fd" (UID: "b730bb71-b18e-4b97-8d6b-d4ac799d08fd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:12:56 crc kubenswrapper[4558]: I0120 17:12:56.061034 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b730bb71-b18e-4b97-8d6b-d4ac799d08fd-dnsmasq-svc" (OuterVolumeSpecName: "dnsmasq-svc") pod "b730bb71-b18e-4b97-8d6b-d4ac799d08fd" (UID: "b730bb71-b18e-4b97-8d6b-d4ac799d08fd"). InnerVolumeSpecName "dnsmasq-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:12:56 crc kubenswrapper[4558]: I0120 17:12:56.130672 4558 reconciler_common.go:293] "Volume detached for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/b730bb71-b18e-4b97-8d6b-d4ac799d08fd-dnsmasq-svc\") on node \"crc\" DevicePath \"\"" Jan 20 17:12:56 crc kubenswrapper[4558]: I0120 17:12:56.130700 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zzlnz\" (UniqueName: \"kubernetes.io/projected/b730bb71-b18e-4b97-8d6b-d4ac799d08fd-kube-api-access-zzlnz\") on node \"crc\" DevicePath \"\"" Jan 20 17:12:56 crc kubenswrapper[4558]: I0120 17:12:56.130713 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b730bb71-b18e-4b97-8d6b-d4ac799d08fd-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:12:56 crc kubenswrapper[4558]: I0120 17:12:56.258197 4558 generic.go:334] "Generic (PLEG): container finished" podID="b730bb71-b18e-4b97-8d6b-d4ac799d08fd" containerID="ce47b8c8a1a63f47017e4025110e0953002e31faa417215a7f430725acf9a8eb" exitCode=0 Jan 20 17:12:56 crc kubenswrapper[4558]: I0120 17:12:56.258262 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-8xt6f" Jan 20 17:12:56 crc kubenswrapper[4558]: I0120 17:12:56.258297 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-8xt6f" event={"ID":"b730bb71-b18e-4b97-8d6b-d4ac799d08fd","Type":"ContainerDied","Data":"ce47b8c8a1a63f47017e4025110e0953002e31faa417215a7f430725acf9a8eb"} Jan 20 17:12:56 crc kubenswrapper[4558]: I0120 17:12:56.258387 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-8xt6f" event={"ID":"b730bb71-b18e-4b97-8d6b-d4ac799d08fd","Type":"ContainerDied","Data":"f27657a2906f22bd8d283a7350bf78fd6eb7c8e04bd6211cf52429d4b13a3a6c"} Jan 20 17:12:56 crc kubenswrapper[4558]: I0120 17:12:56.258435 4558 scope.go:117] "RemoveContainer" containerID="ce47b8c8a1a63f47017e4025110e0953002e31faa417215a7f430725acf9a8eb" Jan 20 17:12:56 crc kubenswrapper[4558]: I0120 17:12:56.263495 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-db-sync-6r4hq" event={"ID":"a7b45248-f499-4b04-9591-e7268e39ac5c","Type":"ContainerDied","Data":"131ed5e728387df05764bc0c16cc857b318ce3c8f8f7ebaef709e4bc65f565be"} Jan 20 17:12:56 crc kubenswrapper[4558]: I0120 17:12:56.263532 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="131ed5e728387df05764bc0c16cc857b318ce3c8f8f7ebaef709e4bc65f565be" Jan 20 17:12:56 crc kubenswrapper[4558]: I0120 17:12:56.263663 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-db-sync-6r4hq" Jan 20 17:12:56 crc kubenswrapper[4558]: I0120 17:12:56.280030 4558 scope.go:117] "RemoveContainer" containerID="5e6cb306bc07e01d6e9ddaee7760a28a523c717659bf64df27406b349acbd1c2" Jan 20 17:12:56 crc kubenswrapper[4558]: I0120 17:12:56.306140 4558 scope.go:117] "RemoveContainer" containerID="ce47b8c8a1a63f47017e4025110e0953002e31faa417215a7f430725acf9a8eb" Jan 20 17:12:56 crc kubenswrapper[4558]: E0120 17:12:56.306556 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ce47b8c8a1a63f47017e4025110e0953002e31faa417215a7f430725acf9a8eb\": container with ID starting with ce47b8c8a1a63f47017e4025110e0953002e31faa417215a7f430725acf9a8eb not found: ID does not exist" containerID="ce47b8c8a1a63f47017e4025110e0953002e31faa417215a7f430725acf9a8eb" Jan 20 17:12:56 crc kubenswrapper[4558]: I0120 17:12:56.306591 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ce47b8c8a1a63f47017e4025110e0953002e31faa417215a7f430725acf9a8eb"} err="failed to get container status \"ce47b8c8a1a63f47017e4025110e0953002e31faa417215a7f430725acf9a8eb\": rpc error: code = NotFound desc = could not find container \"ce47b8c8a1a63f47017e4025110e0953002e31faa417215a7f430725acf9a8eb\": container with ID starting with ce47b8c8a1a63f47017e4025110e0953002e31faa417215a7f430725acf9a8eb not found: ID does not exist" Jan 20 17:12:56 crc kubenswrapper[4558]: I0120 17:12:56.306616 4558 scope.go:117] "RemoveContainer" containerID="5e6cb306bc07e01d6e9ddaee7760a28a523c717659bf64df27406b349acbd1c2" Jan 20 17:12:56 crc kubenswrapper[4558]: E0120 17:12:56.306855 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5e6cb306bc07e01d6e9ddaee7760a28a523c717659bf64df27406b349acbd1c2\": container with ID starting with 5e6cb306bc07e01d6e9ddaee7760a28a523c717659bf64df27406b349acbd1c2 not found: ID does not exist" containerID="5e6cb306bc07e01d6e9ddaee7760a28a523c717659bf64df27406b349acbd1c2" Jan 20 17:12:56 crc kubenswrapper[4558]: I0120 17:12:56.306869 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5e6cb306bc07e01d6e9ddaee7760a28a523c717659bf64df27406b349acbd1c2"} err="failed to get container status \"5e6cb306bc07e01d6e9ddaee7760a28a523c717659bf64df27406b349acbd1c2\": rpc error: code = NotFound desc = could not find container \"5e6cb306bc07e01d6e9ddaee7760a28a523c717659bf64df27406b349acbd1c2\": container with ID starting with 5e6cb306bc07e01d6e9ddaee7760a28a523c717659bf64df27406b349acbd1c2 not found: ID does not exist" Jan 20 17:12:56 crc kubenswrapper[4558]: I0120 17:12:56.309619 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-8xt6f"] Jan 20 17:12:56 crc kubenswrapper[4558]: I0120 17:12:56.314349 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-8xt6f"] Jan 20 17:12:56 crc kubenswrapper[4558]: I0120 17:12:56.577380 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b730bb71-b18e-4b97-8d6b-d4ac799d08fd" path="/var/lib/kubelet/pods/b730bb71-b18e-4b97-8d6b-d4ac799d08fd/volumes" Jan 20 17:12:58 crc kubenswrapper[4558]: I0120 17:12:58.940195 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/prometheus-metric-storage-0" Jan 20 17:12:58 crc kubenswrapper[4558]: I0120 17:12:58.940918 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/prometheus-metric-storage-0" Jan 20 17:12:58 crc kubenswrapper[4558]: I0120 17:12:58.942637 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/prometheus-metric-storage-0" Jan 20 17:12:59 crc kubenswrapper[4558]: I0120 17:12:59.294522 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/prometheus-metric-storage-0" Jan 20 17:12:59 crc kubenswrapper[4558]: I0120 17:12:59.493400 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:12:59 crc kubenswrapper[4558]: I0120 17:12:59.781584 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/watcher-db-sync-z8lmb"] Jan 20 17:12:59 crc kubenswrapper[4558]: E0120 17:12:59.781925 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a7b45248-f499-4b04-9591-e7268e39ac5c" containerName="glance-db-sync" Jan 20 17:12:59 crc kubenswrapper[4558]: I0120 17:12:59.781943 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="a7b45248-f499-4b04-9591-e7268e39ac5c" containerName="glance-db-sync" Jan 20 17:12:59 crc kubenswrapper[4558]: E0120 17:12:59.781953 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b730bb71-b18e-4b97-8d6b-d4ac799d08fd" containerName="init" Jan 20 17:12:59 crc kubenswrapper[4558]: I0120 17:12:59.781958 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="b730bb71-b18e-4b97-8d6b-d4ac799d08fd" containerName="init" Jan 20 17:12:59 crc kubenswrapper[4558]: E0120 17:12:59.781978 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b730bb71-b18e-4b97-8d6b-d4ac799d08fd" containerName="dnsmasq-dns" Jan 20 17:12:59 crc kubenswrapper[4558]: I0120 17:12:59.781984 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="b730bb71-b18e-4b97-8d6b-d4ac799d08fd" containerName="dnsmasq-dns" Jan 20 17:12:59 crc kubenswrapper[4558]: I0120 17:12:59.782131 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="b730bb71-b18e-4b97-8d6b-d4ac799d08fd" containerName="dnsmasq-dns" Jan 20 17:12:59 crc kubenswrapper[4558]: I0120 17:12:59.782141 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="a7b45248-f499-4b04-9591-e7268e39ac5c" containerName="glance-db-sync" Jan 20 17:12:59 crc kubenswrapper[4558]: I0120 17:12:59.782696 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/watcher-db-sync-z8lmb" Jan 20 17:12:59 crc kubenswrapper[4558]: I0120 17:12:59.784185 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"watcher-watcher-dockercfg-r4jqf" Jan 20 17:12:59 crc kubenswrapper[4558]: I0120 17:12:59.787101 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"watcher-config-data" Jan 20 17:12:59 crc kubenswrapper[4558]: I0120 17:12:59.792073 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/watcher-db-sync-z8lmb"] Jan 20 17:12:59 crc kubenswrapper[4558]: I0120 17:12:59.798260 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/4f36762c-fdfd-45ac-a5b1-4be2bd718a9b-db-sync-config-data\") pod \"watcher-db-sync-z8lmb\" (UID: \"4f36762c-fdfd-45ac-a5b1-4be2bd718a9b\") " pod="openstack-kuttl-tests/watcher-db-sync-z8lmb" Jan 20 17:12:59 crc kubenswrapper[4558]: I0120 17:12:59.798353 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f36762c-fdfd-45ac-a5b1-4be2bd718a9b-combined-ca-bundle\") pod \"watcher-db-sync-z8lmb\" (UID: \"4f36762c-fdfd-45ac-a5b1-4be2bd718a9b\") " pod="openstack-kuttl-tests/watcher-db-sync-z8lmb" Jan 20 17:12:59 crc kubenswrapper[4558]: I0120 17:12:59.798541 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4f36762c-fdfd-45ac-a5b1-4be2bd718a9b-config-data\") pod \"watcher-db-sync-z8lmb\" (UID: \"4f36762c-fdfd-45ac-a5b1-4be2bd718a9b\") " pod="openstack-kuttl-tests/watcher-db-sync-z8lmb" Jan 20 17:12:59 crc kubenswrapper[4558]: I0120 17:12:59.798601 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5nktq\" (UniqueName: \"kubernetes.io/projected/4f36762c-fdfd-45ac-a5b1-4be2bd718a9b-kube-api-access-5nktq\") pod \"watcher-db-sync-z8lmb\" (UID: \"4f36762c-fdfd-45ac-a5b1-4be2bd718a9b\") " pod="openstack-kuttl-tests/watcher-db-sync-z8lmb" Jan 20 17:12:59 crc kubenswrapper[4558]: I0120 17:12:59.841680 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/cinder-db-create-2vwsk"] Jan 20 17:12:59 crc kubenswrapper[4558]: I0120 17:12:59.843691 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-db-create-2vwsk" Jan 20 17:12:59 crc kubenswrapper[4558]: I0120 17:12:59.865353 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-db-create-2vwsk"] Jan 20 17:12:59 crc kubenswrapper[4558]: I0120 17:12:59.900523 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/4f36762c-fdfd-45ac-a5b1-4be2bd718a9b-db-sync-config-data\") pod \"watcher-db-sync-z8lmb\" (UID: \"4f36762c-fdfd-45ac-a5b1-4be2bd718a9b\") " pod="openstack-kuttl-tests/watcher-db-sync-z8lmb" Jan 20 17:12:59 crc kubenswrapper[4558]: I0120 17:12:59.900603 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f36762c-fdfd-45ac-a5b1-4be2bd718a9b-combined-ca-bundle\") pod \"watcher-db-sync-z8lmb\" (UID: \"4f36762c-fdfd-45ac-a5b1-4be2bd718a9b\") " pod="openstack-kuttl-tests/watcher-db-sync-z8lmb" Jan 20 17:12:59 crc kubenswrapper[4558]: I0120 17:12:59.900742 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kznq8\" (UniqueName: \"kubernetes.io/projected/2d068982-e4de-4519-8842-8ae09682ad42-kube-api-access-kznq8\") pod \"cinder-db-create-2vwsk\" (UID: \"2d068982-e4de-4519-8842-8ae09682ad42\") " pod="openstack-kuttl-tests/cinder-db-create-2vwsk" Jan 20 17:12:59 crc kubenswrapper[4558]: I0120 17:12:59.900770 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2d068982-e4de-4519-8842-8ae09682ad42-operator-scripts\") pod \"cinder-db-create-2vwsk\" (UID: \"2d068982-e4de-4519-8842-8ae09682ad42\") " pod="openstack-kuttl-tests/cinder-db-create-2vwsk" Jan 20 17:12:59 crc kubenswrapper[4558]: I0120 17:12:59.900968 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4f36762c-fdfd-45ac-a5b1-4be2bd718a9b-config-data\") pod \"watcher-db-sync-z8lmb\" (UID: \"4f36762c-fdfd-45ac-a5b1-4be2bd718a9b\") " pod="openstack-kuttl-tests/watcher-db-sync-z8lmb" Jan 20 17:12:59 crc kubenswrapper[4558]: I0120 17:12:59.901065 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5nktq\" (UniqueName: \"kubernetes.io/projected/4f36762c-fdfd-45ac-a5b1-4be2bd718a9b-kube-api-access-5nktq\") pod \"watcher-db-sync-z8lmb\" (UID: \"4f36762c-fdfd-45ac-a5b1-4be2bd718a9b\") " pod="openstack-kuttl-tests/watcher-db-sync-z8lmb" Jan 20 17:12:59 crc kubenswrapper[4558]: I0120 17:12:59.909709 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/4f36762c-fdfd-45ac-a5b1-4be2bd718a9b-db-sync-config-data\") pod \"watcher-db-sync-z8lmb\" (UID: \"4f36762c-fdfd-45ac-a5b1-4be2bd718a9b\") " pod="openstack-kuttl-tests/watcher-db-sync-z8lmb" Jan 20 17:12:59 crc kubenswrapper[4558]: I0120 17:12:59.909804 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f36762c-fdfd-45ac-a5b1-4be2bd718a9b-combined-ca-bundle\") pod \"watcher-db-sync-z8lmb\" (UID: \"4f36762c-fdfd-45ac-a5b1-4be2bd718a9b\") " pod="openstack-kuttl-tests/watcher-db-sync-z8lmb" Jan 20 17:12:59 crc kubenswrapper[4558]: I0120 17:12:59.910018 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4f36762c-fdfd-45ac-a5b1-4be2bd718a9b-config-data\") pod \"watcher-db-sync-z8lmb\" (UID: \"4f36762c-fdfd-45ac-a5b1-4be2bd718a9b\") " pod="openstack-kuttl-tests/watcher-db-sync-z8lmb" Jan 20 17:12:59 crc kubenswrapper[4558]: I0120 17:12:59.921649 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/barbican-db-create-5fghl"] Jan 20 17:12:59 crc kubenswrapper[4558]: I0120 17:12:59.922918 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-db-create-5fghl" Jan 20 17:12:59 crc kubenswrapper[4558]: I0120 17:12:59.930104 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5nktq\" (UniqueName: \"kubernetes.io/projected/4f36762c-fdfd-45ac-a5b1-4be2bd718a9b-kube-api-access-5nktq\") pod \"watcher-db-sync-z8lmb\" (UID: \"4f36762c-fdfd-45ac-a5b1-4be2bd718a9b\") " pod="openstack-kuttl-tests/watcher-db-sync-z8lmb" Jan 20 17:12:59 crc kubenswrapper[4558]: I0120 17:12:59.938276 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/barbican-5773-account-create-update-ztf6s"] Jan 20 17:12:59 crc kubenswrapper[4558]: I0120 17:12:59.939552 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-5773-account-create-update-ztf6s" Jan 20 17:12:59 crc kubenswrapper[4558]: I0120 17:12:59.942229 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"barbican-db-secret" Jan 20 17:12:59 crc kubenswrapper[4558]: I0120 17:12:59.959347 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-db-create-5fghl"] Jan 20 17:12:59 crc kubenswrapper[4558]: I0120 17:12:59.977185 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-5773-account-create-update-ztf6s"] Jan 20 17:13:00 crc kubenswrapper[4558]: I0120 17:13:00.003024 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gxjjh\" (UniqueName: \"kubernetes.io/projected/279f250a-2578-4aa2-9ca7-31b35bc0b791-kube-api-access-gxjjh\") pod \"barbican-db-create-5fghl\" (UID: \"279f250a-2578-4aa2-9ca7-31b35bc0b791\") " pod="openstack-kuttl-tests/barbican-db-create-5fghl" Jan 20 17:13:00 crc kubenswrapper[4558]: I0120 17:13:00.003085 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0bfa16c9-a8d3-46f7-8e6a-ba4c0d8426b6-operator-scripts\") pod \"barbican-5773-account-create-update-ztf6s\" (UID: \"0bfa16c9-a8d3-46f7-8e6a-ba4c0d8426b6\") " pod="openstack-kuttl-tests/barbican-5773-account-create-update-ztf6s" Jan 20 17:13:00 crc kubenswrapper[4558]: I0120 17:13:00.003133 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/279f250a-2578-4aa2-9ca7-31b35bc0b791-operator-scripts\") pod \"barbican-db-create-5fghl\" (UID: \"279f250a-2578-4aa2-9ca7-31b35bc0b791\") " pod="openstack-kuttl-tests/barbican-db-create-5fghl" Jan 20 17:13:00 crc kubenswrapper[4558]: I0120 17:13:00.003195 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tskrx\" (UniqueName: \"kubernetes.io/projected/0bfa16c9-a8d3-46f7-8e6a-ba4c0d8426b6-kube-api-access-tskrx\") pod \"barbican-5773-account-create-update-ztf6s\" (UID: \"0bfa16c9-a8d3-46f7-8e6a-ba4c0d8426b6\") " pod="openstack-kuttl-tests/barbican-5773-account-create-update-ztf6s" Jan 20 17:13:00 crc kubenswrapper[4558]: I0120 17:13:00.003451 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kznq8\" (UniqueName: \"kubernetes.io/projected/2d068982-e4de-4519-8842-8ae09682ad42-kube-api-access-kznq8\") pod \"cinder-db-create-2vwsk\" (UID: \"2d068982-e4de-4519-8842-8ae09682ad42\") " pod="openstack-kuttl-tests/cinder-db-create-2vwsk" Jan 20 17:13:00 crc kubenswrapper[4558]: I0120 17:13:00.003489 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2d068982-e4de-4519-8842-8ae09682ad42-operator-scripts\") pod \"cinder-db-create-2vwsk\" (UID: \"2d068982-e4de-4519-8842-8ae09682ad42\") " pod="openstack-kuttl-tests/cinder-db-create-2vwsk" Jan 20 17:13:00 crc kubenswrapper[4558]: I0120 17:13:00.004279 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2d068982-e4de-4519-8842-8ae09682ad42-operator-scripts\") pod \"cinder-db-create-2vwsk\" (UID: \"2d068982-e4de-4519-8842-8ae09682ad42\") " pod="openstack-kuttl-tests/cinder-db-create-2vwsk" Jan 20 17:13:00 crc kubenswrapper[4558]: I0120 17:13:00.025414 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/neutron-db-create-pz7pt"] Jan 20 17:13:00 crc kubenswrapper[4558]: I0120 17:13:00.026587 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-db-create-pz7pt" Jan 20 17:13:00 crc kubenswrapper[4558]: I0120 17:13:00.032504 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kznq8\" (UniqueName: \"kubernetes.io/projected/2d068982-e4de-4519-8842-8ae09682ad42-kube-api-access-kznq8\") pod \"cinder-db-create-2vwsk\" (UID: \"2d068982-e4de-4519-8842-8ae09682ad42\") " pod="openstack-kuttl-tests/cinder-db-create-2vwsk" Jan 20 17:13:00 crc kubenswrapper[4558]: I0120 17:13:00.037081 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/cinder-7601-account-create-update-9h885"] Jan 20 17:13:00 crc kubenswrapper[4558]: I0120 17:13:00.038381 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-7601-account-create-update-9h885" Jan 20 17:13:00 crc kubenswrapper[4558]: I0120 17:13:00.039770 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cinder-db-secret" Jan 20 17:13:00 crc kubenswrapper[4558]: I0120 17:13:00.044557 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-7601-account-create-update-9h885"] Jan 20 17:13:00 crc kubenswrapper[4558]: I0120 17:13:00.050688 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-db-create-pz7pt"] Jan 20 17:13:00 crc kubenswrapper[4558]: I0120 17:13:00.097765 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/watcher-db-sync-z8lmb" Jan 20 17:13:00 crc kubenswrapper[4558]: I0120 17:13:00.104681 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fb9a7708-e8da-47ae-a2c7-a6550d399c92-operator-scripts\") pod \"cinder-7601-account-create-update-9h885\" (UID: \"fb9a7708-e8da-47ae-a2c7-a6550d399c92\") " pod="openstack-kuttl-tests/cinder-7601-account-create-update-9h885" Jan 20 17:13:00 crc kubenswrapper[4558]: I0120 17:13:00.104729 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4qdkh\" (UniqueName: \"kubernetes.io/projected/bfbc710a-9995-4db3-a621-c7dd17624239-kube-api-access-4qdkh\") pod \"neutron-db-create-pz7pt\" (UID: \"bfbc710a-9995-4db3-a621-c7dd17624239\") " pod="openstack-kuttl-tests/neutron-db-create-pz7pt" Jan 20 17:13:00 crc kubenswrapper[4558]: I0120 17:13:00.104790 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tskrx\" (UniqueName: \"kubernetes.io/projected/0bfa16c9-a8d3-46f7-8e6a-ba4c0d8426b6-kube-api-access-tskrx\") pod \"barbican-5773-account-create-update-ztf6s\" (UID: \"0bfa16c9-a8d3-46f7-8e6a-ba4c0d8426b6\") " pod="openstack-kuttl-tests/barbican-5773-account-create-update-ztf6s" Jan 20 17:13:00 crc kubenswrapper[4558]: I0120 17:13:00.104879 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-txtdq\" (UniqueName: \"kubernetes.io/projected/fb9a7708-e8da-47ae-a2c7-a6550d399c92-kube-api-access-txtdq\") pod \"cinder-7601-account-create-update-9h885\" (UID: \"fb9a7708-e8da-47ae-a2c7-a6550d399c92\") " pod="openstack-kuttl-tests/cinder-7601-account-create-update-9h885" Jan 20 17:13:00 crc kubenswrapper[4558]: I0120 17:13:00.104934 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bfbc710a-9995-4db3-a621-c7dd17624239-operator-scripts\") pod \"neutron-db-create-pz7pt\" (UID: \"bfbc710a-9995-4db3-a621-c7dd17624239\") " pod="openstack-kuttl-tests/neutron-db-create-pz7pt" Jan 20 17:13:00 crc kubenswrapper[4558]: I0120 17:13:00.104966 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gxjjh\" (UniqueName: \"kubernetes.io/projected/279f250a-2578-4aa2-9ca7-31b35bc0b791-kube-api-access-gxjjh\") pod \"barbican-db-create-5fghl\" (UID: \"279f250a-2578-4aa2-9ca7-31b35bc0b791\") " pod="openstack-kuttl-tests/barbican-db-create-5fghl" Jan 20 17:13:00 crc kubenswrapper[4558]: I0120 17:13:00.105179 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0bfa16c9-a8d3-46f7-8e6a-ba4c0d8426b6-operator-scripts\") pod \"barbican-5773-account-create-update-ztf6s\" (UID: \"0bfa16c9-a8d3-46f7-8e6a-ba4c0d8426b6\") " pod="openstack-kuttl-tests/barbican-5773-account-create-update-ztf6s" Jan 20 17:13:00 crc kubenswrapper[4558]: I0120 17:13:00.105258 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/279f250a-2578-4aa2-9ca7-31b35bc0b791-operator-scripts\") pod \"barbican-db-create-5fghl\" (UID: \"279f250a-2578-4aa2-9ca7-31b35bc0b791\") " pod="openstack-kuttl-tests/barbican-db-create-5fghl" Jan 20 17:13:00 crc kubenswrapper[4558]: I0120 17:13:00.105997 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/279f250a-2578-4aa2-9ca7-31b35bc0b791-operator-scripts\") pod \"barbican-db-create-5fghl\" (UID: \"279f250a-2578-4aa2-9ca7-31b35bc0b791\") " pod="openstack-kuttl-tests/barbican-db-create-5fghl" Jan 20 17:13:00 crc kubenswrapper[4558]: I0120 17:13:00.106153 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0bfa16c9-a8d3-46f7-8e6a-ba4c0d8426b6-operator-scripts\") pod \"barbican-5773-account-create-update-ztf6s\" (UID: \"0bfa16c9-a8d3-46f7-8e6a-ba4c0d8426b6\") " pod="openstack-kuttl-tests/barbican-5773-account-create-update-ztf6s" Jan 20 17:13:00 crc kubenswrapper[4558]: I0120 17:13:00.122002 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tskrx\" (UniqueName: \"kubernetes.io/projected/0bfa16c9-a8d3-46f7-8e6a-ba4c0d8426b6-kube-api-access-tskrx\") pod \"barbican-5773-account-create-update-ztf6s\" (UID: \"0bfa16c9-a8d3-46f7-8e6a-ba4c0d8426b6\") " pod="openstack-kuttl-tests/barbican-5773-account-create-update-ztf6s" Jan 20 17:13:00 crc kubenswrapper[4558]: I0120 17:13:00.125147 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gxjjh\" (UniqueName: \"kubernetes.io/projected/279f250a-2578-4aa2-9ca7-31b35bc0b791-kube-api-access-gxjjh\") pod \"barbican-db-create-5fghl\" (UID: \"279f250a-2578-4aa2-9ca7-31b35bc0b791\") " pod="openstack-kuttl-tests/barbican-db-create-5fghl" Jan 20 17:13:00 crc kubenswrapper[4558]: I0120 17:13:00.163626 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-db-create-2vwsk" Jan 20 17:13:00 crc kubenswrapper[4558]: I0120 17:13:00.200585 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/keystone-db-sync-5v4np"] Jan 20 17:13:00 crc kubenswrapper[4558]: I0120 17:13:00.201852 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-db-sync-5v4np" Jan 20 17:13:00 crc kubenswrapper[4558]: I0120 17:13:00.205020 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone" Jan 20 17:13:00 crc kubenswrapper[4558]: I0120 17:13:00.205886 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-scripts" Jan 20 17:13:00 crc kubenswrapper[4558]: I0120 17:13:00.206179 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-keystone-dockercfg-kq484" Jan 20 17:13:00 crc kubenswrapper[4558]: I0120 17:13:00.207458 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-txtdq\" (UniqueName: \"kubernetes.io/projected/fb9a7708-e8da-47ae-a2c7-a6550d399c92-kube-api-access-txtdq\") pod \"cinder-7601-account-create-update-9h885\" (UID: \"fb9a7708-e8da-47ae-a2c7-a6550d399c92\") " pod="openstack-kuttl-tests/cinder-7601-account-create-update-9h885" Jan 20 17:13:00 crc kubenswrapper[4558]: I0120 17:13:00.207573 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bfbc710a-9995-4db3-a621-c7dd17624239-operator-scripts\") pod \"neutron-db-create-pz7pt\" (UID: \"bfbc710a-9995-4db3-a621-c7dd17624239\") " pod="openstack-kuttl-tests/neutron-db-create-pz7pt" Jan 20 17:13:00 crc kubenswrapper[4558]: I0120 17:13:00.207688 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fb9a7708-e8da-47ae-a2c7-a6550d399c92-operator-scripts\") pod \"cinder-7601-account-create-update-9h885\" (UID: \"fb9a7708-e8da-47ae-a2c7-a6550d399c92\") " pod="openstack-kuttl-tests/cinder-7601-account-create-update-9h885" Jan 20 17:13:00 crc kubenswrapper[4558]: I0120 17:13:00.207713 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4qdkh\" (UniqueName: \"kubernetes.io/projected/bfbc710a-9995-4db3-a621-c7dd17624239-kube-api-access-4qdkh\") pod \"neutron-db-create-pz7pt\" (UID: \"bfbc710a-9995-4db3-a621-c7dd17624239\") " pod="openstack-kuttl-tests/neutron-db-create-pz7pt" Jan 20 17:13:00 crc kubenswrapper[4558]: I0120 17:13:00.208823 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bfbc710a-9995-4db3-a621-c7dd17624239-operator-scripts\") pod \"neutron-db-create-pz7pt\" (UID: \"bfbc710a-9995-4db3-a621-c7dd17624239\") " pod="openstack-kuttl-tests/neutron-db-create-pz7pt" Jan 20 17:13:00 crc kubenswrapper[4558]: I0120 17:13:00.208917 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fb9a7708-e8da-47ae-a2c7-a6550d399c92-operator-scripts\") pod \"cinder-7601-account-create-update-9h885\" (UID: \"fb9a7708-e8da-47ae-a2c7-a6550d399c92\") " pod="openstack-kuttl-tests/cinder-7601-account-create-update-9h885" Jan 20 17:13:00 crc kubenswrapper[4558]: I0120 17:13:00.210572 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-config-data" Jan 20 17:13:00 crc kubenswrapper[4558]: I0120 17:13:00.219290 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-db-sync-5v4np"] Jan 20 17:13:00 crc kubenswrapper[4558]: I0120 17:13:00.228728 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-txtdq\" (UniqueName: \"kubernetes.io/projected/fb9a7708-e8da-47ae-a2c7-a6550d399c92-kube-api-access-txtdq\") pod \"cinder-7601-account-create-update-9h885\" (UID: \"fb9a7708-e8da-47ae-a2c7-a6550d399c92\") " pod="openstack-kuttl-tests/cinder-7601-account-create-update-9h885" Jan 20 17:13:00 crc kubenswrapper[4558]: I0120 17:13:00.234820 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4qdkh\" (UniqueName: \"kubernetes.io/projected/bfbc710a-9995-4db3-a621-c7dd17624239-kube-api-access-4qdkh\") pod \"neutron-db-create-pz7pt\" (UID: \"bfbc710a-9995-4db3-a621-c7dd17624239\") " pod="openstack-kuttl-tests/neutron-db-create-pz7pt" Jan 20 17:13:00 crc kubenswrapper[4558]: I0120 17:13:00.246219 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/neutron-d62f-account-create-update-pmq2x"] Jan 20 17:13:00 crc kubenswrapper[4558]: I0120 17:13:00.247644 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-d62f-account-create-update-pmq2x" Jan 20 17:13:00 crc kubenswrapper[4558]: I0120 17:13:00.252750 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"neutron-db-secret" Jan 20 17:13:00 crc kubenswrapper[4558]: I0120 17:13:00.279478 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-db-create-5fghl" Jan 20 17:13:00 crc kubenswrapper[4558]: I0120 17:13:00.285495 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-d62f-account-create-update-pmq2x"] Jan 20 17:13:00 crc kubenswrapper[4558]: I0120 17:13:00.295386 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-5773-account-create-update-ztf6s" Jan 20 17:13:00 crc kubenswrapper[4558]: I0120 17:13:00.316814 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b8bbfcda-5b58-470f-9f0e-e35cb51e1872-config-data\") pod \"keystone-db-sync-5v4np\" (UID: \"b8bbfcda-5b58-470f-9f0e-e35cb51e1872\") " pod="openstack-kuttl-tests/keystone-db-sync-5v4np" Jan 20 17:13:00 crc kubenswrapper[4558]: I0120 17:13:00.316895 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8bbfcda-5b58-470f-9f0e-e35cb51e1872-combined-ca-bundle\") pod \"keystone-db-sync-5v4np\" (UID: \"b8bbfcda-5b58-470f-9f0e-e35cb51e1872\") " pod="openstack-kuttl-tests/keystone-db-sync-5v4np" Jan 20 17:13:00 crc kubenswrapper[4558]: I0120 17:13:00.316939 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bdbe1471-99ee-4ffa-b596-d8ebb4674c6d-operator-scripts\") pod \"neutron-d62f-account-create-update-pmq2x\" (UID: \"bdbe1471-99ee-4ffa-b596-d8ebb4674c6d\") " pod="openstack-kuttl-tests/neutron-d62f-account-create-update-pmq2x" Jan 20 17:13:00 crc kubenswrapper[4558]: I0120 17:13:00.316995 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fqkc2\" (UniqueName: \"kubernetes.io/projected/b8bbfcda-5b58-470f-9f0e-e35cb51e1872-kube-api-access-fqkc2\") pod \"keystone-db-sync-5v4np\" (UID: \"b8bbfcda-5b58-470f-9f0e-e35cb51e1872\") " pod="openstack-kuttl-tests/keystone-db-sync-5v4np" Jan 20 17:13:00 crc kubenswrapper[4558]: I0120 17:13:00.317014 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qhhdf\" (UniqueName: \"kubernetes.io/projected/bdbe1471-99ee-4ffa-b596-d8ebb4674c6d-kube-api-access-qhhdf\") pod \"neutron-d62f-account-create-update-pmq2x\" (UID: \"bdbe1471-99ee-4ffa-b596-d8ebb4674c6d\") " pod="openstack-kuttl-tests/neutron-d62f-account-create-update-pmq2x" Jan 20 17:13:00 crc kubenswrapper[4558]: I0120 17:13:00.373635 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-db-create-pz7pt" Jan 20 17:13:00 crc kubenswrapper[4558]: I0120 17:13:00.375292 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-7601-account-create-update-9h885" Jan 20 17:13:00 crc kubenswrapper[4558]: I0120 17:13:00.418045 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bdbe1471-99ee-4ffa-b596-d8ebb4674c6d-operator-scripts\") pod \"neutron-d62f-account-create-update-pmq2x\" (UID: \"bdbe1471-99ee-4ffa-b596-d8ebb4674c6d\") " pod="openstack-kuttl-tests/neutron-d62f-account-create-update-pmq2x" Jan 20 17:13:00 crc kubenswrapper[4558]: I0120 17:13:00.418196 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fqkc2\" (UniqueName: \"kubernetes.io/projected/b8bbfcda-5b58-470f-9f0e-e35cb51e1872-kube-api-access-fqkc2\") pod \"keystone-db-sync-5v4np\" (UID: \"b8bbfcda-5b58-470f-9f0e-e35cb51e1872\") " pod="openstack-kuttl-tests/keystone-db-sync-5v4np" Jan 20 17:13:00 crc kubenswrapper[4558]: I0120 17:13:00.418218 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qhhdf\" (UniqueName: \"kubernetes.io/projected/bdbe1471-99ee-4ffa-b596-d8ebb4674c6d-kube-api-access-qhhdf\") pod \"neutron-d62f-account-create-update-pmq2x\" (UID: \"bdbe1471-99ee-4ffa-b596-d8ebb4674c6d\") " pod="openstack-kuttl-tests/neutron-d62f-account-create-update-pmq2x" Jan 20 17:13:00 crc kubenswrapper[4558]: I0120 17:13:00.418271 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b8bbfcda-5b58-470f-9f0e-e35cb51e1872-config-data\") pod \"keystone-db-sync-5v4np\" (UID: \"b8bbfcda-5b58-470f-9f0e-e35cb51e1872\") " pod="openstack-kuttl-tests/keystone-db-sync-5v4np" Jan 20 17:13:00 crc kubenswrapper[4558]: I0120 17:13:00.418328 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8bbfcda-5b58-470f-9f0e-e35cb51e1872-combined-ca-bundle\") pod \"keystone-db-sync-5v4np\" (UID: \"b8bbfcda-5b58-470f-9f0e-e35cb51e1872\") " pod="openstack-kuttl-tests/keystone-db-sync-5v4np" Jan 20 17:13:00 crc kubenswrapper[4558]: I0120 17:13:00.419586 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bdbe1471-99ee-4ffa-b596-d8ebb4674c6d-operator-scripts\") pod \"neutron-d62f-account-create-update-pmq2x\" (UID: \"bdbe1471-99ee-4ffa-b596-d8ebb4674c6d\") " pod="openstack-kuttl-tests/neutron-d62f-account-create-update-pmq2x" Jan 20 17:13:00 crc kubenswrapper[4558]: I0120 17:13:00.435364 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8bbfcda-5b58-470f-9f0e-e35cb51e1872-combined-ca-bundle\") pod \"keystone-db-sync-5v4np\" (UID: \"b8bbfcda-5b58-470f-9f0e-e35cb51e1872\") " pod="openstack-kuttl-tests/keystone-db-sync-5v4np" Jan 20 17:13:00 crc kubenswrapper[4558]: I0120 17:13:00.445745 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fqkc2\" (UniqueName: \"kubernetes.io/projected/b8bbfcda-5b58-470f-9f0e-e35cb51e1872-kube-api-access-fqkc2\") pod \"keystone-db-sync-5v4np\" (UID: \"b8bbfcda-5b58-470f-9f0e-e35cb51e1872\") " pod="openstack-kuttl-tests/keystone-db-sync-5v4np" Jan 20 17:13:00 crc kubenswrapper[4558]: I0120 17:13:00.449077 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qhhdf\" (UniqueName: \"kubernetes.io/projected/bdbe1471-99ee-4ffa-b596-d8ebb4674c6d-kube-api-access-qhhdf\") pod \"neutron-d62f-account-create-update-pmq2x\" (UID: \"bdbe1471-99ee-4ffa-b596-d8ebb4674c6d\") " pod="openstack-kuttl-tests/neutron-d62f-account-create-update-pmq2x" Jan 20 17:13:00 crc kubenswrapper[4558]: I0120 17:13:00.450381 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b8bbfcda-5b58-470f-9f0e-e35cb51e1872-config-data\") pod \"keystone-db-sync-5v4np\" (UID: \"b8bbfcda-5b58-470f-9f0e-e35cb51e1872\") " pod="openstack-kuttl-tests/keystone-db-sync-5v4np" Jan 20 17:13:00 crc kubenswrapper[4558]: I0120 17:13:00.463558 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/watcher-db-sync-z8lmb"] Jan 20 17:13:00 crc kubenswrapper[4558]: I0120 17:13:00.521740 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-db-sync-5v4np" Jan 20 17:13:00 crc kubenswrapper[4558]: I0120 17:13:00.587466 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-d62f-account-create-update-pmq2x" Jan 20 17:13:00 crc kubenswrapper[4558]: I0120 17:13:00.670886 4558 scope.go:117] "RemoveContainer" containerID="133261d6818bfb9dc626eb1b737b442170f15e8780d606e9ddad15e359c16063" Jan 20 17:13:00 crc kubenswrapper[4558]: I0120 17:13:00.775607 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-db-create-2vwsk"] Jan 20 17:13:00 crc kubenswrapper[4558]: I0120 17:13:00.782232 4558 scope.go:117] "RemoveContainer" containerID="ad35d349271ee789d2afcaa6751a4b0d4ee214cd86c3bf00b2033a0275b6e913" Jan 20 17:13:00 crc kubenswrapper[4558]: I0120 17:13:00.905559 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-5773-account-create-update-ztf6s"] Jan 20 17:13:00 crc kubenswrapper[4558]: I0120 17:13:00.936584 4558 scope.go:117] "RemoveContainer" containerID="d9787b048d328e82e6bcbf1c8980b56892abf588f759d3c88cfa03d604a497d8" Jan 20 17:13:01 crc kubenswrapper[4558]: I0120 17:13:01.009333 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-7601-account-create-update-9h885"] Jan 20 17:13:01 crc kubenswrapper[4558]: I0120 17:13:01.017752 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-db-create-5fghl"] Jan 20 17:13:01 crc kubenswrapper[4558]: I0120 17:13:01.024126 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-db-create-pz7pt"] Jan 20 17:13:01 crc kubenswrapper[4558]: I0120 17:13:01.286292 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-db-sync-5v4np"] Jan 20 17:13:01 crc kubenswrapper[4558]: I0120 17:13:01.354557 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-db-sync-5v4np" event={"ID":"b8bbfcda-5b58-470f-9f0e-e35cb51e1872","Type":"ContainerStarted","Data":"c34e4c2490e6017179b12db1a5031fac2423913d2f0a732ae44f9314a575f26c"} Jan 20 17:13:01 crc kubenswrapper[4558]: I0120 17:13:01.365036 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-7601-account-create-update-9h885" event={"ID":"fb9a7708-e8da-47ae-a2c7-a6550d399c92","Type":"ContainerStarted","Data":"5ae967978a075f7c2d6bb7b5491362516aba9ecdf1c98fba3638a9a6a4285cab"} Jan 20 17:13:01 crc kubenswrapper[4558]: I0120 17:13:01.365104 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-7601-account-create-update-9h885" event={"ID":"fb9a7708-e8da-47ae-a2c7-a6550d399c92","Type":"ContainerStarted","Data":"e40c392fcca7f87e00abede5e3b0bd876729e765638c05a84dfbf9f35a6020da"} Jan 20 17:13:01 crc kubenswrapper[4558]: I0120 17:13:01.383152 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/cinder-7601-account-create-update-9h885" podStartSLOduration=1.383134852 podStartE2EDuration="1.383134852s" podCreationTimestamp="2026-01-20 17:13:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:13:01.381569138 +0000 UTC m=+1875.141907104" watchObservedRunningTime="2026-01-20 17:13:01.383134852 +0000 UTC m=+1875.143472820" Jan 20 17:13:01 crc kubenswrapper[4558]: I0120 17:13:01.386749 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-5773-account-create-update-ztf6s" event={"ID":"0bfa16c9-a8d3-46f7-8e6a-ba4c0d8426b6","Type":"ContainerStarted","Data":"072e7d885f97c7bad5483c48de51474ef74dc6ad6f70322b9de1ba59a261dcff"} Jan 20 17:13:01 crc kubenswrapper[4558]: I0120 17:13:01.386800 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-5773-account-create-update-ztf6s" event={"ID":"0bfa16c9-a8d3-46f7-8e6a-ba4c0d8426b6","Type":"ContainerStarted","Data":"144814c29b582954fd9e83d19f6d6c3db80759fe2589931c6984373a0ed2275f"} Jan 20 17:13:01 crc kubenswrapper[4558]: I0120 17:13:01.396622 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-db-create-2vwsk" event={"ID":"2d068982-e4de-4519-8842-8ae09682ad42","Type":"ContainerStarted","Data":"7d286e7f84165b7ba56adec4f49203713926bb3402c7da156ca7d232602d1f8c"} Jan 20 17:13:01 crc kubenswrapper[4558]: I0120 17:13:01.396658 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-db-create-2vwsk" event={"ID":"2d068982-e4de-4519-8842-8ae09682ad42","Type":"ContainerStarted","Data":"fd51dd9c764cc4407b4240d0e08e48820dc4cb91553852b653cac70266025e98"} Jan 20 17:13:01 crc kubenswrapper[4558]: I0120 17:13:01.405157 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-d62f-account-create-update-pmq2x"] Jan 20 17:13:01 crc kubenswrapper[4558]: I0120 17:13:01.408775 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/barbican-5773-account-create-update-ztf6s" podStartSLOduration=2.40876052 podStartE2EDuration="2.40876052s" podCreationTimestamp="2026-01-20 17:12:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:13:01.400459249 +0000 UTC m=+1875.160797216" watchObservedRunningTime="2026-01-20 17:13:01.40876052 +0000 UTC m=+1875.169098487" Jan 20 17:13:01 crc kubenswrapper[4558]: I0120 17:13:01.418005 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-db-create-pz7pt" event={"ID":"bfbc710a-9995-4db3-a621-c7dd17624239","Type":"ContainerStarted","Data":"51b5b5fb5af49adbe1f8ad6fd74aa519237b6772474430f5d67767dfe8302c67"} Jan 20 17:13:01 crc kubenswrapper[4558]: I0120 17:13:01.418048 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-db-create-pz7pt" event={"ID":"bfbc710a-9995-4db3-a621-c7dd17624239","Type":"ContainerStarted","Data":"3deb49fcc8784aa19d6c32b59185e4ea92e4570334aa0df2184af92a63a8cb7b"} Jan 20 17:13:01 crc kubenswrapper[4558]: I0120 17:13:01.423525 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/cinder-db-create-2vwsk" podStartSLOduration=2.423515095 podStartE2EDuration="2.423515095s" podCreationTimestamp="2026-01-20 17:12:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:13:01.420840696 +0000 UTC m=+1875.181178663" watchObservedRunningTime="2026-01-20 17:13:01.423515095 +0000 UTC m=+1875.183853062" Jan 20 17:13:01 crc kubenswrapper[4558]: I0120 17:13:01.429748 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-db-create-5fghl" event={"ID":"279f250a-2578-4aa2-9ca7-31b35bc0b791","Type":"ContainerStarted","Data":"5e32ab1155526e1fa5af89a3aed00623d506307a8c78e5c9ed1d89cf46db001d"} Jan 20 17:13:01 crc kubenswrapper[4558]: I0120 17:13:01.429796 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-db-create-5fghl" event={"ID":"279f250a-2578-4aa2-9ca7-31b35bc0b791","Type":"ContainerStarted","Data":"aed2d6e5cf85eedd3bf3a754c48377adf5d72d37cb2b3d314ad8ebaf52f6b61a"} Jan 20 17:13:01 crc kubenswrapper[4558]: I0120 17:13:01.438418 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/watcher-db-sync-z8lmb" event={"ID":"4f36762c-fdfd-45ac-a5b1-4be2bd718a9b","Type":"ContainerStarted","Data":"b5fbca86372b9e460a800a9406446aac5e63fffe6398ed6bf0e1b9b07d641403"} Jan 20 17:13:01 crc kubenswrapper[4558]: I0120 17:13:01.458307 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/neutron-db-create-pz7pt" podStartSLOduration=1.458287563 podStartE2EDuration="1.458287563s" podCreationTimestamp="2026-01-20 17:13:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:13:01.43850399 +0000 UTC m=+1875.198841957" watchObservedRunningTime="2026-01-20 17:13:01.458287563 +0000 UTC m=+1875.218625529" Jan 20 17:13:01 crc kubenswrapper[4558]: I0120 17:13:01.468121 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/barbican-db-create-5fghl" podStartSLOduration=2.4681068870000002 podStartE2EDuration="2.468106887s" podCreationTimestamp="2026-01-20 17:12:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:13:01.45745124 +0000 UTC m=+1875.217789208" watchObservedRunningTime="2026-01-20 17:13:01.468106887 +0000 UTC m=+1875.228444855" Jan 20 17:13:02 crc kubenswrapper[4558]: I0120 17:13:02.451629 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-db-sync-5v4np" event={"ID":"b8bbfcda-5b58-470f-9f0e-e35cb51e1872","Type":"ContainerStarted","Data":"c0310ca75de4ab98af768cbc03504523fccea796fa93999b9f55b3a18b67ca3b"} Jan 20 17:13:02 crc kubenswrapper[4558]: I0120 17:13:02.457419 4558 generic.go:334] "Generic (PLEG): container finished" podID="bfbc710a-9995-4db3-a621-c7dd17624239" containerID="51b5b5fb5af49adbe1f8ad6fd74aa519237b6772474430f5d67767dfe8302c67" exitCode=0 Jan 20 17:13:02 crc kubenswrapper[4558]: I0120 17:13:02.457530 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-db-create-pz7pt" event={"ID":"bfbc710a-9995-4db3-a621-c7dd17624239","Type":"ContainerDied","Data":"51b5b5fb5af49adbe1f8ad6fd74aa519237b6772474430f5d67767dfe8302c67"} Jan 20 17:13:02 crc kubenswrapper[4558]: I0120 17:13:02.470218 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/keystone-db-sync-5v4np" podStartSLOduration=2.470198708 podStartE2EDuration="2.470198708s" podCreationTimestamp="2026-01-20 17:13:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:13:02.462926392 +0000 UTC m=+1876.223264360" watchObservedRunningTime="2026-01-20 17:13:02.470198708 +0000 UTC m=+1876.230536675" Jan 20 17:13:02 crc kubenswrapper[4558]: I0120 17:13:02.470576 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-db-create-5fghl" event={"ID":"279f250a-2578-4aa2-9ca7-31b35bc0b791","Type":"ContainerDied","Data":"5e32ab1155526e1fa5af89a3aed00623d506307a8c78e5c9ed1d89cf46db001d"} Jan 20 17:13:02 crc kubenswrapper[4558]: I0120 17:13:02.470431 4558 generic.go:334] "Generic (PLEG): container finished" podID="279f250a-2578-4aa2-9ca7-31b35bc0b791" containerID="5e32ab1155526e1fa5af89a3aed00623d506307a8c78e5c9ed1d89cf46db001d" exitCode=0 Jan 20 17:13:02 crc kubenswrapper[4558]: I0120 17:13:02.476950 4558 generic.go:334] "Generic (PLEG): container finished" podID="fb9a7708-e8da-47ae-a2c7-a6550d399c92" containerID="5ae967978a075f7c2d6bb7b5491362516aba9ecdf1c98fba3638a9a6a4285cab" exitCode=0 Jan 20 17:13:02 crc kubenswrapper[4558]: I0120 17:13:02.477013 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-7601-account-create-update-9h885" event={"ID":"fb9a7708-e8da-47ae-a2c7-a6550d399c92","Type":"ContainerDied","Data":"5ae967978a075f7c2d6bb7b5491362516aba9ecdf1c98fba3638a9a6a4285cab"} Jan 20 17:13:02 crc kubenswrapper[4558]: I0120 17:13:02.484613 4558 generic.go:334] "Generic (PLEG): container finished" podID="0bfa16c9-a8d3-46f7-8e6a-ba4c0d8426b6" containerID="072e7d885f97c7bad5483c48de51474ef74dc6ad6f70322b9de1ba59a261dcff" exitCode=0 Jan 20 17:13:02 crc kubenswrapper[4558]: I0120 17:13:02.484689 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-5773-account-create-update-ztf6s" event={"ID":"0bfa16c9-a8d3-46f7-8e6a-ba4c0d8426b6","Type":"ContainerDied","Data":"072e7d885f97c7bad5483c48de51474ef74dc6ad6f70322b9de1ba59a261dcff"} Jan 20 17:13:02 crc kubenswrapper[4558]: I0120 17:13:02.486450 4558 generic.go:334] "Generic (PLEG): container finished" podID="bdbe1471-99ee-4ffa-b596-d8ebb4674c6d" containerID="547add12d01aa7c468fe2c72184b3e108a17eca0453172299f58c20adaeabe41" exitCode=0 Jan 20 17:13:02 crc kubenswrapper[4558]: I0120 17:13:02.486503 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-d62f-account-create-update-pmq2x" event={"ID":"bdbe1471-99ee-4ffa-b596-d8ebb4674c6d","Type":"ContainerDied","Data":"547add12d01aa7c468fe2c72184b3e108a17eca0453172299f58c20adaeabe41"} Jan 20 17:13:02 crc kubenswrapper[4558]: I0120 17:13:02.486558 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-d62f-account-create-update-pmq2x" event={"ID":"bdbe1471-99ee-4ffa-b596-d8ebb4674c6d","Type":"ContainerStarted","Data":"762c77636e981917ac668536aa6791fb7ec85c3c00a73a1d179753fc13c9db28"} Jan 20 17:13:02 crc kubenswrapper[4558]: I0120 17:13:02.489350 4558 generic.go:334] "Generic (PLEG): container finished" podID="2d068982-e4de-4519-8842-8ae09682ad42" containerID="7d286e7f84165b7ba56adec4f49203713926bb3402c7da156ca7d232602d1f8c" exitCode=0 Jan 20 17:13:02 crc kubenswrapper[4558]: I0120 17:13:02.489399 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-db-create-2vwsk" event={"ID":"2d068982-e4de-4519-8842-8ae09682ad42","Type":"ContainerDied","Data":"7d286e7f84165b7ba56adec4f49203713926bb3402c7da156ca7d232602d1f8c"} Jan 20 17:13:02 crc kubenswrapper[4558]: I0120 17:13:02.545148 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/prometheus-metric-storage-0"] Jan 20 17:13:02 crc kubenswrapper[4558]: I0120 17:13:02.545441 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/prometheus-metric-storage-0" podUID="b099ccf1-da7f-4e4d-80cf-629b21cd774c" containerName="prometheus" containerID="cri-o://42bef42ec418e9c5fec49130ad8e153b3d3494179d54ac7a788a08bcfe416096" gracePeriod=600 Jan 20 17:13:02 crc kubenswrapper[4558]: I0120 17:13:02.545784 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/prometheus-metric-storage-0" podUID="b099ccf1-da7f-4e4d-80cf-629b21cd774c" containerName="thanos-sidecar" containerID="cri-o://b4f83144ed64d3c2e00534f5cba38c30b991da847b01e3d1aa7c8ca6ad26c591" gracePeriod=600 Jan 20 17:13:02 crc kubenswrapper[4558]: I0120 17:13:02.545838 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/prometheus-metric-storage-0" podUID="b099ccf1-da7f-4e4d-80cf-629b21cd774c" containerName="config-reloader" containerID="cri-o://5f412156d497432b7604ccd46172afc5d571c254c8c81a3782c29c607e00f7ae" gracePeriod=600 Jan 20 17:13:03 crc kubenswrapper[4558]: I0120 17:13:03.516594 4558 generic.go:334] "Generic (PLEG): container finished" podID="b8bbfcda-5b58-470f-9f0e-e35cb51e1872" containerID="c0310ca75de4ab98af768cbc03504523fccea796fa93999b9f55b3a18b67ca3b" exitCode=0 Jan 20 17:13:03 crc kubenswrapper[4558]: I0120 17:13:03.516686 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-db-sync-5v4np" event={"ID":"b8bbfcda-5b58-470f-9f0e-e35cb51e1872","Type":"ContainerDied","Data":"c0310ca75de4ab98af768cbc03504523fccea796fa93999b9f55b3a18b67ca3b"} Jan 20 17:13:03 crc kubenswrapper[4558]: I0120 17:13:03.523689 4558 generic.go:334] "Generic (PLEG): container finished" podID="b099ccf1-da7f-4e4d-80cf-629b21cd774c" containerID="b4f83144ed64d3c2e00534f5cba38c30b991da847b01e3d1aa7c8ca6ad26c591" exitCode=0 Jan 20 17:13:03 crc kubenswrapper[4558]: I0120 17:13:03.523726 4558 generic.go:334] "Generic (PLEG): container finished" podID="b099ccf1-da7f-4e4d-80cf-629b21cd774c" containerID="5f412156d497432b7604ccd46172afc5d571c254c8c81a3782c29c607e00f7ae" exitCode=0 Jan 20 17:13:03 crc kubenswrapper[4558]: I0120 17:13:03.523735 4558 generic.go:334] "Generic (PLEG): container finished" podID="b099ccf1-da7f-4e4d-80cf-629b21cd774c" containerID="42bef42ec418e9c5fec49130ad8e153b3d3494179d54ac7a788a08bcfe416096" exitCode=0 Jan 20 17:13:03 crc kubenswrapper[4558]: I0120 17:13:03.523822 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/prometheus-metric-storage-0" event={"ID":"b099ccf1-da7f-4e4d-80cf-629b21cd774c","Type":"ContainerDied","Data":"b4f83144ed64d3c2e00534f5cba38c30b991da847b01e3d1aa7c8ca6ad26c591"} Jan 20 17:13:03 crc kubenswrapper[4558]: I0120 17:13:03.523876 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/prometheus-metric-storage-0" event={"ID":"b099ccf1-da7f-4e4d-80cf-629b21cd774c","Type":"ContainerDied","Data":"5f412156d497432b7604ccd46172afc5d571c254c8c81a3782c29c607e00f7ae"} Jan 20 17:13:03 crc kubenswrapper[4558]: I0120 17:13:03.523887 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/prometheus-metric-storage-0" event={"ID":"b099ccf1-da7f-4e4d-80cf-629b21cd774c","Type":"ContainerDied","Data":"42bef42ec418e9c5fec49130ad8e153b3d3494179d54ac7a788a08bcfe416096"} Jan 20 17:13:03 crc kubenswrapper[4558]: I0120 17:13:03.956424 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/prometheus-metric-storage-0" podUID="b099ccf1-da7f-4e4d-80cf-629b21cd774c" containerName="prometheus" probeResult="failure" output="Get \"http://10.217.1.182:9090/-/ready\": dial tcp 10.217.1.182:9090: connect: connection refused" Jan 20 17:13:03 crc kubenswrapper[4558]: I0120 17:13:03.961915 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-db-create-2vwsk" Jan 20 17:13:04 crc kubenswrapper[4558]: I0120 17:13:04.010581 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2d068982-e4de-4519-8842-8ae09682ad42-operator-scripts\") pod \"2d068982-e4de-4519-8842-8ae09682ad42\" (UID: \"2d068982-e4de-4519-8842-8ae09682ad42\") " Jan 20 17:13:04 crc kubenswrapper[4558]: I0120 17:13:04.010866 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kznq8\" (UniqueName: \"kubernetes.io/projected/2d068982-e4de-4519-8842-8ae09682ad42-kube-api-access-kznq8\") pod \"2d068982-e4de-4519-8842-8ae09682ad42\" (UID: \"2d068982-e4de-4519-8842-8ae09682ad42\") " Jan 20 17:13:04 crc kubenswrapper[4558]: I0120 17:13:04.012079 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2d068982-e4de-4519-8842-8ae09682ad42-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "2d068982-e4de-4519-8842-8ae09682ad42" (UID: "2d068982-e4de-4519-8842-8ae09682ad42"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:13:04 crc kubenswrapper[4558]: I0120 17:13:04.026037 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2d068982-e4de-4519-8842-8ae09682ad42-kube-api-access-kznq8" (OuterVolumeSpecName: "kube-api-access-kznq8") pod "2d068982-e4de-4519-8842-8ae09682ad42" (UID: "2d068982-e4de-4519-8842-8ae09682ad42"). InnerVolumeSpecName "kube-api-access-kznq8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:13:04 crc kubenswrapper[4558]: I0120 17:13:04.108530 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-7601-account-create-update-9h885" Jan 20 17:13:04 crc kubenswrapper[4558]: I0120 17:13:04.112741 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2d068982-e4de-4519-8842-8ae09682ad42-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:13:04 crc kubenswrapper[4558]: I0120 17:13:04.112766 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kznq8\" (UniqueName: \"kubernetes.io/projected/2d068982-e4de-4519-8842-8ae09682ad42-kube-api-access-kznq8\") on node \"crc\" DevicePath \"\"" Jan 20 17:13:04 crc kubenswrapper[4558]: I0120 17:13:04.125374 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-db-create-5fghl" Jan 20 17:13:04 crc kubenswrapper[4558]: I0120 17:13:04.217887 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-txtdq\" (UniqueName: \"kubernetes.io/projected/fb9a7708-e8da-47ae-a2c7-a6550d399c92-kube-api-access-txtdq\") pod \"fb9a7708-e8da-47ae-a2c7-a6550d399c92\" (UID: \"fb9a7708-e8da-47ae-a2c7-a6550d399c92\") " Jan 20 17:13:04 crc kubenswrapper[4558]: I0120 17:13:04.218641 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/279f250a-2578-4aa2-9ca7-31b35bc0b791-operator-scripts\") pod \"279f250a-2578-4aa2-9ca7-31b35bc0b791\" (UID: \"279f250a-2578-4aa2-9ca7-31b35bc0b791\") " Jan 20 17:13:04 crc kubenswrapper[4558]: I0120 17:13:04.218879 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fb9a7708-e8da-47ae-a2c7-a6550d399c92-operator-scripts\") pod \"fb9a7708-e8da-47ae-a2c7-a6550d399c92\" (UID: \"fb9a7708-e8da-47ae-a2c7-a6550d399c92\") " Jan 20 17:13:04 crc kubenswrapper[4558]: I0120 17:13:04.218939 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gxjjh\" (UniqueName: \"kubernetes.io/projected/279f250a-2578-4aa2-9ca7-31b35bc0b791-kube-api-access-gxjjh\") pod \"279f250a-2578-4aa2-9ca7-31b35bc0b791\" (UID: \"279f250a-2578-4aa2-9ca7-31b35bc0b791\") " Jan 20 17:13:04 crc kubenswrapper[4558]: I0120 17:13:04.219298 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/279f250a-2578-4aa2-9ca7-31b35bc0b791-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "279f250a-2578-4aa2-9ca7-31b35bc0b791" (UID: "279f250a-2578-4aa2-9ca7-31b35bc0b791"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:13:04 crc kubenswrapper[4558]: I0120 17:13:04.219767 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/279f250a-2578-4aa2-9ca7-31b35bc0b791-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:13:04 crc kubenswrapper[4558]: I0120 17:13:04.221636 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fb9a7708-e8da-47ae-a2c7-a6550d399c92-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "fb9a7708-e8da-47ae-a2c7-a6550d399c92" (UID: "fb9a7708-e8da-47ae-a2c7-a6550d399c92"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:13:04 crc kubenswrapper[4558]: I0120 17:13:04.230677 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fb9a7708-e8da-47ae-a2c7-a6550d399c92-kube-api-access-txtdq" (OuterVolumeSpecName: "kube-api-access-txtdq") pod "fb9a7708-e8da-47ae-a2c7-a6550d399c92" (UID: "fb9a7708-e8da-47ae-a2c7-a6550d399c92"). InnerVolumeSpecName "kube-api-access-txtdq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:13:04 crc kubenswrapper[4558]: I0120 17:13:04.234966 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/279f250a-2578-4aa2-9ca7-31b35bc0b791-kube-api-access-gxjjh" (OuterVolumeSpecName: "kube-api-access-gxjjh") pod "279f250a-2578-4aa2-9ca7-31b35bc0b791" (UID: "279f250a-2578-4aa2-9ca7-31b35bc0b791"). InnerVolumeSpecName "kube-api-access-gxjjh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:13:04 crc kubenswrapper[4558]: I0120 17:13:04.277329 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-db-create-pz7pt" Jan 20 17:13:04 crc kubenswrapper[4558]: I0120 17:13:04.290181 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-d62f-account-create-update-pmq2x" Jan 20 17:13:04 crc kubenswrapper[4558]: I0120 17:13:04.309910 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-5773-account-create-update-ztf6s" Jan 20 17:13:04 crc kubenswrapper[4558]: I0120 17:13:04.320727 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4qdkh\" (UniqueName: \"kubernetes.io/projected/bfbc710a-9995-4db3-a621-c7dd17624239-kube-api-access-4qdkh\") pod \"bfbc710a-9995-4db3-a621-c7dd17624239\" (UID: \"bfbc710a-9995-4db3-a621-c7dd17624239\") " Jan 20 17:13:04 crc kubenswrapper[4558]: I0120 17:13:04.320823 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bfbc710a-9995-4db3-a621-c7dd17624239-operator-scripts\") pod \"bfbc710a-9995-4db3-a621-c7dd17624239\" (UID: \"bfbc710a-9995-4db3-a621-c7dd17624239\") " Jan 20 17:13:04 crc kubenswrapper[4558]: I0120 17:13:04.320849 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qhhdf\" (UniqueName: \"kubernetes.io/projected/bdbe1471-99ee-4ffa-b596-d8ebb4674c6d-kube-api-access-qhhdf\") pod \"bdbe1471-99ee-4ffa-b596-d8ebb4674c6d\" (UID: \"bdbe1471-99ee-4ffa-b596-d8ebb4674c6d\") " Jan 20 17:13:04 crc kubenswrapper[4558]: I0120 17:13:04.320915 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bdbe1471-99ee-4ffa-b596-d8ebb4674c6d-operator-scripts\") pod \"bdbe1471-99ee-4ffa-b596-d8ebb4674c6d\" (UID: \"bdbe1471-99ee-4ffa-b596-d8ebb4674c6d\") " Jan 20 17:13:04 crc kubenswrapper[4558]: I0120 17:13:04.321490 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fb9a7708-e8da-47ae-a2c7-a6550d399c92-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:13:04 crc kubenswrapper[4558]: I0120 17:13:04.321506 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gxjjh\" (UniqueName: \"kubernetes.io/projected/279f250a-2578-4aa2-9ca7-31b35bc0b791-kube-api-access-gxjjh\") on node \"crc\" DevicePath \"\"" Jan 20 17:13:04 crc kubenswrapper[4558]: I0120 17:13:04.321517 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-txtdq\" (UniqueName: \"kubernetes.io/projected/fb9a7708-e8da-47ae-a2c7-a6550d399c92-kube-api-access-txtdq\") on node \"crc\" DevicePath \"\"" Jan 20 17:13:04 crc kubenswrapper[4558]: I0120 17:13:04.321991 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bdbe1471-99ee-4ffa-b596-d8ebb4674c6d-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "bdbe1471-99ee-4ffa-b596-d8ebb4674c6d" (UID: "bdbe1471-99ee-4ffa-b596-d8ebb4674c6d"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:13:04 crc kubenswrapper[4558]: I0120 17:13:04.322325 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bfbc710a-9995-4db3-a621-c7dd17624239-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "bfbc710a-9995-4db3-a621-c7dd17624239" (UID: "bfbc710a-9995-4db3-a621-c7dd17624239"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:13:04 crc kubenswrapper[4558]: I0120 17:13:04.323713 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bfbc710a-9995-4db3-a621-c7dd17624239-kube-api-access-4qdkh" (OuterVolumeSpecName: "kube-api-access-4qdkh") pod "bfbc710a-9995-4db3-a621-c7dd17624239" (UID: "bfbc710a-9995-4db3-a621-c7dd17624239"). InnerVolumeSpecName "kube-api-access-4qdkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:13:04 crc kubenswrapper[4558]: I0120 17:13:04.324157 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bdbe1471-99ee-4ffa-b596-d8ebb4674c6d-kube-api-access-qhhdf" (OuterVolumeSpecName: "kube-api-access-qhhdf") pod "bdbe1471-99ee-4ffa-b596-d8ebb4674c6d" (UID: "bdbe1471-99ee-4ffa-b596-d8ebb4674c6d"). InnerVolumeSpecName "kube-api-access-qhhdf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:13:04 crc kubenswrapper[4558]: I0120 17:13:04.422549 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tskrx\" (UniqueName: \"kubernetes.io/projected/0bfa16c9-a8d3-46f7-8e6a-ba4c0d8426b6-kube-api-access-tskrx\") pod \"0bfa16c9-a8d3-46f7-8e6a-ba4c0d8426b6\" (UID: \"0bfa16c9-a8d3-46f7-8e6a-ba4c0d8426b6\") " Jan 20 17:13:04 crc kubenswrapper[4558]: I0120 17:13:04.422740 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0bfa16c9-a8d3-46f7-8e6a-ba4c0d8426b6-operator-scripts\") pod \"0bfa16c9-a8d3-46f7-8e6a-ba4c0d8426b6\" (UID: \"0bfa16c9-a8d3-46f7-8e6a-ba4c0d8426b6\") " Jan 20 17:13:04 crc kubenswrapper[4558]: I0120 17:13:04.423709 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4qdkh\" (UniqueName: \"kubernetes.io/projected/bfbc710a-9995-4db3-a621-c7dd17624239-kube-api-access-4qdkh\") on node \"crc\" DevicePath \"\"" Jan 20 17:13:04 crc kubenswrapper[4558]: I0120 17:13:04.423737 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bfbc710a-9995-4db3-a621-c7dd17624239-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:13:04 crc kubenswrapper[4558]: I0120 17:13:04.423746 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qhhdf\" (UniqueName: \"kubernetes.io/projected/bdbe1471-99ee-4ffa-b596-d8ebb4674c6d-kube-api-access-qhhdf\") on node \"crc\" DevicePath \"\"" Jan 20 17:13:04 crc kubenswrapper[4558]: I0120 17:13:04.423757 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bdbe1471-99ee-4ffa-b596-d8ebb4674c6d-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:13:04 crc kubenswrapper[4558]: I0120 17:13:04.424440 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0bfa16c9-a8d3-46f7-8e6a-ba4c0d8426b6-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "0bfa16c9-a8d3-46f7-8e6a-ba4c0d8426b6" (UID: "0bfa16c9-a8d3-46f7-8e6a-ba4c0d8426b6"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:13:04 crc kubenswrapper[4558]: I0120 17:13:04.426377 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0bfa16c9-a8d3-46f7-8e6a-ba4c0d8426b6-kube-api-access-tskrx" (OuterVolumeSpecName: "kube-api-access-tskrx") pod "0bfa16c9-a8d3-46f7-8e6a-ba4c0d8426b6" (UID: "0bfa16c9-a8d3-46f7-8e6a-ba4c0d8426b6"). InnerVolumeSpecName "kube-api-access-tskrx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:13:04 crc kubenswrapper[4558]: I0120 17:13:04.526084 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0bfa16c9-a8d3-46f7-8e6a-ba4c0d8426b6-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:13:04 crc kubenswrapper[4558]: I0120 17:13:04.526156 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tskrx\" (UniqueName: \"kubernetes.io/projected/0bfa16c9-a8d3-46f7-8e6a-ba4c0d8426b6-kube-api-access-tskrx\") on node \"crc\" DevicePath \"\"" Jan 20 17:13:04 crc kubenswrapper[4558]: I0120 17:13:04.542318 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-7601-account-create-update-9h885" event={"ID":"fb9a7708-e8da-47ae-a2c7-a6550d399c92","Type":"ContainerDied","Data":"e40c392fcca7f87e00abede5e3b0bd876729e765638c05a84dfbf9f35a6020da"} Jan 20 17:13:04 crc kubenswrapper[4558]: I0120 17:13:04.542360 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e40c392fcca7f87e00abede5e3b0bd876729e765638c05a84dfbf9f35a6020da" Jan 20 17:13:04 crc kubenswrapper[4558]: I0120 17:13:04.542426 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-7601-account-create-update-9h885" Jan 20 17:13:04 crc kubenswrapper[4558]: I0120 17:13:04.547983 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-5773-account-create-update-ztf6s" event={"ID":"0bfa16c9-a8d3-46f7-8e6a-ba4c0d8426b6","Type":"ContainerDied","Data":"144814c29b582954fd9e83d19f6d6c3db80759fe2589931c6984373a0ed2275f"} Jan 20 17:13:04 crc kubenswrapper[4558]: I0120 17:13:04.548013 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-5773-account-create-update-ztf6s" Jan 20 17:13:04 crc kubenswrapper[4558]: I0120 17:13:04.548028 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="144814c29b582954fd9e83d19f6d6c3db80759fe2589931c6984373a0ed2275f" Jan 20 17:13:04 crc kubenswrapper[4558]: I0120 17:13:04.550009 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-d62f-account-create-update-pmq2x" event={"ID":"bdbe1471-99ee-4ffa-b596-d8ebb4674c6d","Type":"ContainerDied","Data":"762c77636e981917ac668536aa6791fb7ec85c3c00a73a1d179753fc13c9db28"} Jan 20 17:13:04 crc kubenswrapper[4558]: I0120 17:13:04.550040 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="762c77636e981917ac668536aa6791fb7ec85c3c00a73a1d179753fc13c9db28" Jan 20 17:13:04 crc kubenswrapper[4558]: I0120 17:13:04.550092 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-d62f-account-create-update-pmq2x" Jan 20 17:13:04 crc kubenswrapper[4558]: I0120 17:13:04.555147 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-db-create-2vwsk" Jan 20 17:13:04 crc kubenswrapper[4558]: I0120 17:13:04.555216 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-db-create-2vwsk" event={"ID":"2d068982-e4de-4519-8842-8ae09682ad42","Type":"ContainerDied","Data":"fd51dd9c764cc4407b4240d0e08e48820dc4cb91553852b653cac70266025e98"} Jan 20 17:13:04 crc kubenswrapper[4558]: I0120 17:13:04.555263 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fd51dd9c764cc4407b4240d0e08e48820dc4cb91553852b653cac70266025e98" Jan 20 17:13:04 crc kubenswrapper[4558]: I0120 17:13:04.563705 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-db-create-pz7pt" Jan 20 17:13:04 crc kubenswrapper[4558]: I0120 17:13:04.563645 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-db-create-pz7pt" event={"ID":"bfbc710a-9995-4db3-a621-c7dd17624239","Type":"ContainerDied","Data":"3deb49fcc8784aa19d6c32b59185e4ea92e4570334aa0df2184af92a63a8cb7b"} Jan 20 17:13:04 crc kubenswrapper[4558]: I0120 17:13:04.563843 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3deb49fcc8784aa19d6c32b59185e4ea92e4570334aa0df2184af92a63a8cb7b" Jan 20 17:13:04 crc kubenswrapper[4558]: I0120 17:13:04.566597 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-db-create-5fghl" Jan 20 17:13:04 crc kubenswrapper[4558]: I0120 17:13:04.593647 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-db-create-5fghl" event={"ID":"279f250a-2578-4aa2-9ca7-31b35bc0b791","Type":"ContainerDied","Data":"aed2d6e5cf85eedd3bf3a754c48377adf5d72d37cb2b3d314ad8ebaf52f6b61a"} Jan 20 17:13:04 crc kubenswrapper[4558]: I0120 17:13:04.593700 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="aed2d6e5cf85eedd3bf3a754c48377adf5d72d37cb2b3d314ad8ebaf52f6b61a" Jan 20 17:13:06 crc kubenswrapper[4558]: I0120 17:13:06.841763 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/prometheus-metric-storage-0" Jan 20 17:13:06 crc kubenswrapper[4558]: I0120 17:13:06.844219 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-db-sync-5v4np" Jan 20 17:13:06 crc kubenswrapper[4558]: I0120 17:13:06.884127 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/b099ccf1-da7f-4e4d-80cf-629b21cd774c-thanos-prometheus-http-client-file\") pod \"b099ccf1-da7f-4e4d-80cf-629b21cd774c\" (UID: \"b099ccf1-da7f-4e4d-80cf-629b21cd774c\") " Jan 20 17:13:06 crc kubenswrapper[4558]: I0120 17:13:06.884209 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"prometheus-metric-storage-db\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"b099ccf1-da7f-4e4d-80cf-629b21cd774c\" (UID: \"b099ccf1-da7f-4e4d-80cf-629b21cd774c\") " Jan 20 17:13:06 crc kubenswrapper[4558]: I0120 17:13:06.884283 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"prometheus-metric-storage-rulefiles-2\" (UniqueName: \"kubernetes.io/configmap/b099ccf1-da7f-4e4d-80cf-629b21cd774c-prometheus-metric-storage-rulefiles-2\") pod \"b099ccf1-da7f-4e4d-80cf-629b21cd774c\" (UID: \"b099ccf1-da7f-4e4d-80cf-629b21cd774c\") " Jan 20 17:13:06 crc kubenswrapper[4558]: I0120 17:13:06.884331 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/b099ccf1-da7f-4e4d-80cf-629b21cd774c-web-config\") pod \"b099ccf1-da7f-4e4d-80cf-629b21cd774c\" (UID: \"b099ccf1-da7f-4e4d-80cf-629b21cd774c\") " Jan 20 17:13:06 crc kubenswrapper[4558]: I0120 17:13:06.884359 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/b099ccf1-da7f-4e4d-80cf-629b21cd774c-config-out\") pod \"b099ccf1-da7f-4e4d-80cf-629b21cd774c\" (UID: \"b099ccf1-da7f-4e4d-80cf-629b21cd774c\") " Jan 20 17:13:06 crc kubenswrapper[4558]: I0120 17:13:06.884439 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/b099ccf1-da7f-4e4d-80cf-629b21cd774c-tls-assets\") pod \"b099ccf1-da7f-4e4d-80cf-629b21cd774c\" (UID: \"b099ccf1-da7f-4e4d-80cf-629b21cd774c\") " Jan 20 17:13:06 crc kubenswrapper[4558]: I0120 17:13:06.884502 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/b099ccf1-da7f-4e4d-80cf-629b21cd774c-config\") pod \"b099ccf1-da7f-4e4d-80cf-629b21cd774c\" (UID: \"b099ccf1-da7f-4e4d-80cf-629b21cd774c\") " Jan 20 17:13:06 crc kubenswrapper[4558]: I0120 17:13:06.884522 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqkc2\" (UniqueName: \"kubernetes.io/projected/b8bbfcda-5b58-470f-9f0e-e35cb51e1872-kube-api-access-fqkc2\") pod \"b8bbfcda-5b58-470f-9f0e-e35cb51e1872\" (UID: \"b8bbfcda-5b58-470f-9f0e-e35cb51e1872\") " Jan 20 17:13:06 crc kubenswrapper[4558]: I0120 17:13:06.884543 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/b099ccf1-da7f-4e4d-80cf-629b21cd774c-prometheus-metric-storage-rulefiles-0\") pod \"b099ccf1-da7f-4e4d-80cf-629b21cd774c\" (UID: \"b099ccf1-da7f-4e4d-80cf-629b21cd774c\") " Jan 20 17:13:06 crc kubenswrapper[4558]: I0120 17:13:06.884563 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8bbfcda-5b58-470f-9f0e-e35cb51e1872-combined-ca-bundle\") pod \"b8bbfcda-5b58-470f-9f0e-e35cb51e1872\" (UID: \"b8bbfcda-5b58-470f-9f0e-e35cb51e1872\") " Jan 20 17:13:06 crc kubenswrapper[4558]: I0120 17:13:06.884589 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b8bbfcda-5b58-470f-9f0e-e35cb51e1872-config-data\") pod \"b8bbfcda-5b58-470f-9f0e-e35cb51e1872\" (UID: \"b8bbfcda-5b58-470f-9f0e-e35cb51e1872\") " Jan 20 17:13:06 crc kubenswrapper[4558]: I0120 17:13:06.884623 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lwdtt\" (UniqueName: \"kubernetes.io/projected/b099ccf1-da7f-4e4d-80cf-629b21cd774c-kube-api-access-lwdtt\") pod \"b099ccf1-da7f-4e4d-80cf-629b21cd774c\" (UID: \"b099ccf1-da7f-4e4d-80cf-629b21cd774c\") " Jan 20 17:13:06 crc kubenswrapper[4558]: I0120 17:13:06.884717 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"prometheus-metric-storage-rulefiles-1\" (UniqueName: \"kubernetes.io/configmap/b099ccf1-da7f-4e4d-80cf-629b21cd774c-prometheus-metric-storage-rulefiles-1\") pod \"b099ccf1-da7f-4e4d-80cf-629b21cd774c\" (UID: \"b099ccf1-da7f-4e4d-80cf-629b21cd774c\") " Jan 20 17:13:06 crc kubenswrapper[4558]: I0120 17:13:06.885586 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b099ccf1-da7f-4e4d-80cf-629b21cd774c-prometheus-metric-storage-rulefiles-1" (OuterVolumeSpecName: "prometheus-metric-storage-rulefiles-1") pod "b099ccf1-da7f-4e4d-80cf-629b21cd774c" (UID: "b099ccf1-da7f-4e4d-80cf-629b21cd774c"). InnerVolumeSpecName "prometheus-metric-storage-rulefiles-1". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:13:06 crc kubenswrapper[4558]: I0120 17:13:06.890445 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b099ccf1-da7f-4e4d-80cf-629b21cd774c-prometheus-metric-storage-rulefiles-0" (OuterVolumeSpecName: "prometheus-metric-storage-rulefiles-0") pod "b099ccf1-da7f-4e4d-80cf-629b21cd774c" (UID: "b099ccf1-da7f-4e4d-80cf-629b21cd774c"). InnerVolumeSpecName "prometheus-metric-storage-rulefiles-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:13:06 crc kubenswrapper[4558]: I0120 17:13:06.890938 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b099ccf1-da7f-4e4d-80cf-629b21cd774c-prometheus-metric-storage-rulefiles-2" (OuterVolumeSpecName: "prometheus-metric-storage-rulefiles-2") pod "b099ccf1-da7f-4e4d-80cf-629b21cd774c" (UID: "b099ccf1-da7f-4e4d-80cf-629b21cd774c"). InnerVolumeSpecName "prometheus-metric-storage-rulefiles-2". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:13:06 crc kubenswrapper[4558]: I0120 17:13:06.892233 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b099ccf1-da7f-4e4d-80cf-629b21cd774c-config" (OuterVolumeSpecName: "config") pod "b099ccf1-da7f-4e4d-80cf-629b21cd774c" (UID: "b099ccf1-da7f-4e4d-80cf-629b21cd774c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:13:06 crc kubenswrapper[4558]: I0120 17:13:06.896832 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b8bbfcda-5b58-470f-9f0e-e35cb51e1872-kube-api-access-fqkc2" (OuterVolumeSpecName: "kube-api-access-fqkc2") pod "b8bbfcda-5b58-470f-9f0e-e35cb51e1872" (UID: "b8bbfcda-5b58-470f-9f0e-e35cb51e1872"). InnerVolumeSpecName "kube-api-access-fqkc2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:13:06 crc kubenswrapper[4558]: I0120 17:13:06.896906 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b099ccf1-da7f-4e4d-80cf-629b21cd774c-thanos-prometheus-http-client-file" (OuterVolumeSpecName: "thanos-prometheus-http-client-file") pod "b099ccf1-da7f-4e4d-80cf-629b21cd774c" (UID: "b099ccf1-da7f-4e4d-80cf-629b21cd774c"). InnerVolumeSpecName "thanos-prometheus-http-client-file". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:13:06 crc kubenswrapper[4558]: I0120 17:13:06.897548 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b099ccf1-da7f-4e4d-80cf-629b21cd774c-config-out" (OuterVolumeSpecName: "config-out") pod "b099ccf1-da7f-4e4d-80cf-629b21cd774c" (UID: "b099ccf1-da7f-4e4d-80cf-629b21cd774c"). InnerVolumeSpecName "config-out". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:13:06 crc kubenswrapper[4558]: I0120 17:13:06.899196 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b099ccf1-da7f-4e4d-80cf-629b21cd774c-kube-api-access-lwdtt" (OuterVolumeSpecName: "kube-api-access-lwdtt") pod "b099ccf1-da7f-4e4d-80cf-629b21cd774c" (UID: "b099ccf1-da7f-4e4d-80cf-629b21cd774c"). InnerVolumeSpecName "kube-api-access-lwdtt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:13:06 crc kubenswrapper[4558]: I0120 17:13:06.899769 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage12-crc" (OuterVolumeSpecName: "prometheus-metric-storage-db") pod "b099ccf1-da7f-4e4d-80cf-629b21cd774c" (UID: "b099ccf1-da7f-4e4d-80cf-629b21cd774c"). InnerVolumeSpecName "local-storage12-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:13:06 crc kubenswrapper[4558]: I0120 17:13:06.926041 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b099ccf1-da7f-4e4d-80cf-629b21cd774c-tls-assets" (OuterVolumeSpecName: "tls-assets") pod "b099ccf1-da7f-4e4d-80cf-629b21cd774c" (UID: "b099ccf1-da7f-4e4d-80cf-629b21cd774c"). InnerVolumeSpecName "tls-assets". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:13:06 crc kubenswrapper[4558]: I0120 17:13:06.929751 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b099ccf1-da7f-4e4d-80cf-629b21cd774c-web-config" (OuterVolumeSpecName: "web-config") pod "b099ccf1-da7f-4e4d-80cf-629b21cd774c" (UID: "b099ccf1-da7f-4e4d-80cf-629b21cd774c"). InnerVolumeSpecName "web-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:13:06 crc kubenswrapper[4558]: I0120 17:13:06.929960 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b8bbfcda-5b58-470f-9f0e-e35cb51e1872-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b8bbfcda-5b58-470f-9f0e-e35cb51e1872" (UID: "b8bbfcda-5b58-470f-9f0e-e35cb51e1872"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:13:06 crc kubenswrapper[4558]: I0120 17:13:06.957043 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b8bbfcda-5b58-470f-9f0e-e35cb51e1872-config-data" (OuterVolumeSpecName: "config-data") pod "b8bbfcda-5b58-470f-9f0e-e35cb51e1872" (UID: "b8bbfcda-5b58-470f-9f0e-e35cb51e1872"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:13:06 crc kubenswrapper[4558]: I0120 17:13:06.986715 4558 reconciler_common.go:293] "Volume detached for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/b099ccf1-da7f-4e4d-80cf-629b21cd774c-tls-assets\") on node \"crc\" DevicePath \"\"" Jan 20 17:13:06 crc kubenswrapper[4558]: I0120 17:13:06.986906 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/b099ccf1-da7f-4e4d-80cf-629b21cd774c-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:13:06 crc kubenswrapper[4558]: I0120 17:13:06.986976 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqkc2\" (UniqueName: \"kubernetes.io/projected/b8bbfcda-5b58-470f-9f0e-e35cb51e1872-kube-api-access-fqkc2\") on node \"crc\" DevicePath \"\"" Jan 20 17:13:06 crc kubenswrapper[4558]: I0120 17:13:06.987126 4558 reconciler_common.go:293] "Volume detached for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/b099ccf1-da7f-4e4d-80cf-629b21cd774c-prometheus-metric-storage-rulefiles-0\") on node \"crc\" DevicePath \"\"" Jan 20 17:13:06 crc kubenswrapper[4558]: I0120 17:13:06.987251 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8bbfcda-5b58-470f-9f0e-e35cb51e1872-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:13:06 crc kubenswrapper[4558]: I0120 17:13:06.987318 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b8bbfcda-5b58-470f-9f0e-e35cb51e1872-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:13:06 crc kubenswrapper[4558]: I0120 17:13:06.987375 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lwdtt\" (UniqueName: \"kubernetes.io/projected/b099ccf1-da7f-4e4d-80cf-629b21cd774c-kube-api-access-lwdtt\") on node \"crc\" DevicePath \"\"" Jan 20 17:13:06 crc kubenswrapper[4558]: I0120 17:13:06.987441 4558 reconciler_common.go:293] "Volume detached for volume \"prometheus-metric-storage-rulefiles-1\" (UniqueName: \"kubernetes.io/configmap/b099ccf1-da7f-4e4d-80cf-629b21cd774c-prometheus-metric-storage-rulefiles-1\") on node \"crc\" DevicePath \"\"" Jan 20 17:13:06 crc kubenswrapper[4558]: I0120 17:13:06.987497 4558 reconciler_common.go:293] "Volume detached for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/b099ccf1-da7f-4e4d-80cf-629b21cd774c-thanos-prometheus-http-client-file\") on node \"crc\" DevicePath \"\"" Jan 20 17:13:06 crc kubenswrapper[4558]: I0120 17:13:06.987584 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" " Jan 20 17:13:07 crc kubenswrapper[4558]: I0120 17:13:07.000364 4558 reconciler_common.go:293] "Volume detached for volume \"prometheus-metric-storage-rulefiles-2\" (UniqueName: \"kubernetes.io/configmap/b099ccf1-da7f-4e4d-80cf-629b21cd774c-prometheus-metric-storage-rulefiles-2\") on node \"crc\" DevicePath \"\"" Jan 20 17:13:07 crc kubenswrapper[4558]: I0120 17:13:07.000606 4558 reconciler_common.go:293] "Volume detached for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/b099ccf1-da7f-4e4d-80cf-629b21cd774c-web-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:13:07 crc kubenswrapper[4558]: I0120 17:13:07.001006 4558 reconciler_common.go:293] "Volume detached for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/b099ccf1-da7f-4e4d-80cf-629b21cd774c-config-out\") on node \"crc\" DevicePath \"\"" Jan 20 17:13:07 crc kubenswrapper[4558]: I0120 17:13:07.009686 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage12-crc" (UniqueName: "kubernetes.io/local-volume/local-storage12-crc") on node "crc" Jan 20 17:13:07 crc kubenswrapper[4558]: I0120 17:13:07.102302 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:13:07 crc kubenswrapper[4558]: I0120 17:13:07.610783 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/prometheus-metric-storage-0" event={"ID":"b099ccf1-da7f-4e4d-80cf-629b21cd774c","Type":"ContainerDied","Data":"0031c8136e0e76ec9357655101976c9fb0febef0bf6d6bca1f583990b7719c56"} Jan 20 17:13:07 crc kubenswrapper[4558]: I0120 17:13:07.610859 4558 scope.go:117] "RemoveContainer" containerID="b4f83144ed64d3c2e00534f5cba38c30b991da847b01e3d1aa7c8ca6ad26c591" Jan 20 17:13:07 crc kubenswrapper[4558]: I0120 17:13:07.610814 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/prometheus-metric-storage-0" Jan 20 17:13:07 crc kubenswrapper[4558]: I0120 17:13:07.612905 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-db-sync-5v4np" event={"ID":"b8bbfcda-5b58-470f-9f0e-e35cb51e1872","Type":"ContainerDied","Data":"c34e4c2490e6017179b12db1a5031fac2423913d2f0a732ae44f9314a575f26c"} Jan 20 17:13:07 crc kubenswrapper[4558]: I0120 17:13:07.612944 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c34e4c2490e6017179b12db1a5031fac2423913d2f0a732ae44f9314a575f26c" Jan 20 17:13:07 crc kubenswrapper[4558]: I0120 17:13:07.612974 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-db-sync-5v4np" Jan 20 17:13:07 crc kubenswrapper[4558]: I0120 17:13:07.647834 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/prometheus-metric-storage-0"] Jan 20 17:13:07 crc kubenswrapper[4558]: I0120 17:13:07.661184 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/prometheus-metric-storage-0"] Jan 20 17:13:07 crc kubenswrapper[4558]: I0120 17:13:07.671744 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/prometheus-metric-storage-0"] Jan 20 17:13:07 crc kubenswrapper[4558]: E0120 17:13:07.672087 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="279f250a-2578-4aa2-9ca7-31b35bc0b791" containerName="mariadb-database-create" Jan 20 17:13:07 crc kubenswrapper[4558]: I0120 17:13:07.672106 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="279f250a-2578-4aa2-9ca7-31b35bc0b791" containerName="mariadb-database-create" Jan 20 17:13:07 crc kubenswrapper[4558]: E0120 17:13:07.672127 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b099ccf1-da7f-4e4d-80cf-629b21cd774c" containerName="config-reloader" Jan 20 17:13:07 crc kubenswrapper[4558]: I0120 17:13:07.672134 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="b099ccf1-da7f-4e4d-80cf-629b21cd774c" containerName="config-reloader" Jan 20 17:13:07 crc kubenswrapper[4558]: E0120 17:13:07.672149 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2d068982-e4de-4519-8842-8ae09682ad42" containerName="mariadb-database-create" Jan 20 17:13:07 crc kubenswrapper[4558]: I0120 17:13:07.672156 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="2d068982-e4de-4519-8842-8ae09682ad42" containerName="mariadb-database-create" Jan 20 17:13:07 crc kubenswrapper[4558]: E0120 17:13:07.672184 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b099ccf1-da7f-4e4d-80cf-629b21cd774c" containerName="prometheus" Jan 20 17:13:07 crc kubenswrapper[4558]: I0120 17:13:07.672190 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="b099ccf1-da7f-4e4d-80cf-629b21cd774c" containerName="prometheus" Jan 20 17:13:07 crc kubenswrapper[4558]: E0120 17:13:07.672199 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bfbc710a-9995-4db3-a621-c7dd17624239" containerName="mariadb-database-create" Jan 20 17:13:07 crc kubenswrapper[4558]: I0120 17:13:07.672204 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="bfbc710a-9995-4db3-a621-c7dd17624239" containerName="mariadb-database-create" Jan 20 17:13:07 crc kubenswrapper[4558]: E0120 17:13:07.672213 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fb9a7708-e8da-47ae-a2c7-a6550d399c92" containerName="mariadb-account-create-update" Jan 20 17:13:07 crc kubenswrapper[4558]: I0120 17:13:07.672219 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="fb9a7708-e8da-47ae-a2c7-a6550d399c92" containerName="mariadb-account-create-update" Jan 20 17:13:07 crc kubenswrapper[4558]: E0120 17:13:07.672232 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b8bbfcda-5b58-470f-9f0e-e35cb51e1872" containerName="keystone-db-sync" Jan 20 17:13:07 crc kubenswrapper[4558]: I0120 17:13:07.672238 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="b8bbfcda-5b58-470f-9f0e-e35cb51e1872" containerName="keystone-db-sync" Jan 20 17:13:07 crc kubenswrapper[4558]: E0120 17:13:07.672250 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b099ccf1-da7f-4e4d-80cf-629b21cd774c" containerName="thanos-sidecar" Jan 20 17:13:07 crc kubenswrapper[4558]: I0120 17:13:07.672255 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="b099ccf1-da7f-4e4d-80cf-629b21cd774c" containerName="thanos-sidecar" Jan 20 17:13:07 crc kubenswrapper[4558]: E0120 17:13:07.672262 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b099ccf1-da7f-4e4d-80cf-629b21cd774c" containerName="init-config-reloader" Jan 20 17:13:07 crc kubenswrapper[4558]: I0120 17:13:07.672267 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="b099ccf1-da7f-4e4d-80cf-629b21cd774c" containerName="init-config-reloader" Jan 20 17:13:07 crc kubenswrapper[4558]: E0120 17:13:07.672277 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0bfa16c9-a8d3-46f7-8e6a-ba4c0d8426b6" containerName="mariadb-account-create-update" Jan 20 17:13:07 crc kubenswrapper[4558]: I0120 17:13:07.672283 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="0bfa16c9-a8d3-46f7-8e6a-ba4c0d8426b6" containerName="mariadb-account-create-update" Jan 20 17:13:07 crc kubenswrapper[4558]: E0120 17:13:07.672292 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bdbe1471-99ee-4ffa-b596-d8ebb4674c6d" containerName="mariadb-account-create-update" Jan 20 17:13:07 crc kubenswrapper[4558]: I0120 17:13:07.672298 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="bdbe1471-99ee-4ffa-b596-d8ebb4674c6d" containerName="mariadb-account-create-update" Jan 20 17:13:07 crc kubenswrapper[4558]: I0120 17:13:07.672445 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="bfbc710a-9995-4db3-a621-c7dd17624239" containerName="mariadb-database-create" Jan 20 17:13:07 crc kubenswrapper[4558]: I0120 17:13:07.672459 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="279f250a-2578-4aa2-9ca7-31b35bc0b791" containerName="mariadb-database-create" Jan 20 17:13:07 crc kubenswrapper[4558]: I0120 17:13:07.672472 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="2d068982-e4de-4519-8842-8ae09682ad42" containerName="mariadb-database-create" Jan 20 17:13:07 crc kubenswrapper[4558]: I0120 17:13:07.672481 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="b8bbfcda-5b58-470f-9f0e-e35cb51e1872" containerName="keystone-db-sync" Jan 20 17:13:07 crc kubenswrapper[4558]: I0120 17:13:07.672494 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="0bfa16c9-a8d3-46f7-8e6a-ba4c0d8426b6" containerName="mariadb-account-create-update" Jan 20 17:13:07 crc kubenswrapper[4558]: I0120 17:13:07.672505 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="fb9a7708-e8da-47ae-a2c7-a6550d399c92" containerName="mariadb-account-create-update" Jan 20 17:13:07 crc kubenswrapper[4558]: I0120 17:13:07.672515 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="bdbe1471-99ee-4ffa-b596-d8ebb4674c6d" containerName="mariadb-account-create-update" Jan 20 17:13:07 crc kubenswrapper[4558]: I0120 17:13:07.672524 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="b099ccf1-da7f-4e4d-80cf-629b21cd774c" containerName="config-reloader" Jan 20 17:13:07 crc kubenswrapper[4558]: I0120 17:13:07.672532 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="b099ccf1-da7f-4e4d-80cf-629b21cd774c" containerName="thanos-sidecar" Jan 20 17:13:07 crc kubenswrapper[4558]: I0120 17:13:07.672541 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="b099ccf1-da7f-4e4d-80cf-629b21cd774c" containerName="prometheus" Jan 20 17:13:07 crc kubenswrapper[4558]: I0120 17:13:07.673958 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/prometheus-metric-storage-0" Jan 20 17:13:07 crc kubenswrapper[4558]: I0120 17:13:07.679074 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"prometheus-metric-storage-rulefiles-0" Jan 20 17:13:07 crc kubenswrapper[4558]: I0120 17:13:07.679185 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-metric-storage-prometheus-svc" Jan 20 17:13:07 crc kubenswrapper[4558]: I0120 17:13:07.679371 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"prometheus-metric-storage" Jan 20 17:13:07 crc kubenswrapper[4558]: I0120 17:13:07.679075 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"prometheus-metric-storage-rulefiles-1" Jan 20 17:13:07 crc kubenswrapper[4558]: I0120 17:13:07.679076 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"prometheus-metric-storage-web-config" Jan 20 17:13:07 crc kubenswrapper[4558]: I0120 17:13:07.680236 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"prometheus-metric-storage-thanos-prometheus-http-client-file" Jan 20 17:13:07 crc kubenswrapper[4558]: I0120 17:13:07.680397 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"prometheus-metric-storage-rulefiles-2" Jan 20 17:13:07 crc kubenswrapper[4558]: I0120 17:13:07.680628 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"metric-storage-prometheus-dockercfg-vf58l" Jan 20 17:13:07 crc kubenswrapper[4558]: I0120 17:13:07.691974 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"prometheus-metric-storage-tls-assets-0" Jan 20 17:13:07 crc kubenswrapper[4558]: I0120 17:13:07.697270 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/prometheus-metric-storage-0"] Jan 20 17:13:07 crc kubenswrapper[4558]: I0120 17:13:07.833429 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-metric-storage-rulefiles-2\" (UniqueName: \"kubernetes.io/configmap/e87a4844-8a90-4965-878f-d95aa09c47bb-prometheus-metric-storage-rulefiles-2\") pod \"prometheus-metric-storage-0\" (UID: \"e87a4844-8a90-4965-878f-d95aa09c47bb\") " pod="openstack-kuttl-tests/prometheus-metric-storage-0" Jan 20 17:13:07 crc kubenswrapper[4558]: I0120 17:13:07.833499 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/e87a4844-8a90-4965-878f-d95aa09c47bb-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"e87a4844-8a90-4965-878f-d95aa09c47bb\") " pod="openstack-kuttl-tests/prometheus-metric-storage-0" Jan 20 17:13:07 crc kubenswrapper[4558]: I0120 17:13:07.833537 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-metric-storage-rulefiles-1\" (UniqueName: \"kubernetes.io/configmap/e87a4844-8a90-4965-878f-d95aa09c47bb-prometheus-metric-storage-rulefiles-1\") pod \"prometheus-metric-storage-0\" (UID: \"e87a4844-8a90-4965-878f-d95aa09c47bb\") " pod="openstack-kuttl-tests/prometheus-metric-storage-0" Jan 20 17:13:07 crc kubenswrapper[4558]: I0120 17:13:07.833561 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"prometheus-metric-storage-0\" (UID: \"e87a4844-8a90-4965-878f-d95aa09c47bb\") " pod="openstack-kuttl-tests/prometheus-metric-storage-0" Jan 20 17:13:07 crc kubenswrapper[4558]: I0120 17:13:07.833585 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/e87a4844-8a90-4965-878f-d95aa09c47bb-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"e87a4844-8a90-4965-878f-d95aa09c47bb\") " pod="openstack-kuttl-tests/prometheus-metric-storage-0" Jan 20 17:13:07 crc kubenswrapper[4558]: I0120 17:13:07.833626 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/e87a4844-8a90-4965-878f-d95aa09c47bb-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"e87a4844-8a90-4965-878f-d95aa09c47bb\") " pod="openstack-kuttl-tests/prometheus-metric-storage-0" Jan 20 17:13:07 crc kubenswrapper[4558]: I0120 17:13:07.833654 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/e87a4844-8a90-4965-878f-d95aa09c47bb-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"e87a4844-8a90-4965-878f-d95aa09c47bb\") " pod="openstack-kuttl-tests/prometheus-metric-storage-0" Jan 20 17:13:07 crc kubenswrapper[4558]: I0120 17:13:07.833685 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/e87a4844-8a90-4965-878f-d95aa09c47bb-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"e87a4844-8a90-4965-878f-d95aa09c47bb\") " pod="openstack-kuttl-tests/prometheus-metric-storage-0" Jan 20 17:13:07 crc kubenswrapper[4558]: I0120 17:13:07.833780 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/e87a4844-8a90-4965-878f-d95aa09c47bb-config\") pod \"prometheus-metric-storage-0\" (UID: \"e87a4844-8a90-4965-878f-d95aa09c47bb\") " pod="openstack-kuttl-tests/prometheus-metric-storage-0" Jan 20 17:13:07 crc kubenswrapper[4558]: I0120 17:13:07.833797 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e87a4844-8a90-4965-878f-d95aa09c47bb-secret-combined-ca-bundle\") pod \"prometheus-metric-storage-0\" (UID: \"e87a4844-8a90-4965-878f-d95aa09c47bb\") " pod="openstack-kuttl-tests/prometheus-metric-storage-0" Jan 20 17:13:07 crc kubenswrapper[4558]: I0120 17:13:07.833835 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/e87a4844-8a90-4965-878f-d95aa09c47bb-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"e87a4844-8a90-4965-878f-d95aa09c47bb\") " pod="openstack-kuttl-tests/prometheus-metric-storage-0" Jan 20 17:13:07 crc kubenswrapper[4558]: I0120 17:13:07.833850 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v2gjr\" (UniqueName: \"kubernetes.io/projected/e87a4844-8a90-4965-878f-d95aa09c47bb-kube-api-access-v2gjr\") pod \"prometheus-metric-storage-0\" (UID: \"e87a4844-8a90-4965-878f-d95aa09c47bb\") " pod="openstack-kuttl-tests/prometheus-metric-storage-0" Jan 20 17:13:07 crc kubenswrapper[4558]: I0120 17:13:07.833889 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/e87a4844-8a90-4965-878f-d95aa09c47bb-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"e87a4844-8a90-4965-878f-d95aa09c47bb\") " pod="openstack-kuttl-tests/prometheus-metric-storage-0" Jan 20 17:13:07 crc kubenswrapper[4558]: I0120 17:13:07.936245 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/e87a4844-8a90-4965-878f-d95aa09c47bb-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"e87a4844-8a90-4965-878f-d95aa09c47bb\") " pod="openstack-kuttl-tests/prometheus-metric-storage-0" Jan 20 17:13:07 crc kubenswrapper[4558]: I0120 17:13:07.936630 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/e87a4844-8a90-4965-878f-d95aa09c47bb-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"e87a4844-8a90-4965-878f-d95aa09c47bb\") " pod="openstack-kuttl-tests/prometheus-metric-storage-0" Jan 20 17:13:07 crc kubenswrapper[4558]: I0120 17:13:07.936717 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/e87a4844-8a90-4965-878f-d95aa09c47bb-config\") pod \"prometheus-metric-storage-0\" (UID: \"e87a4844-8a90-4965-878f-d95aa09c47bb\") " pod="openstack-kuttl-tests/prometheus-metric-storage-0" Jan 20 17:13:07 crc kubenswrapper[4558]: I0120 17:13:07.936741 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e87a4844-8a90-4965-878f-d95aa09c47bb-secret-combined-ca-bundle\") pod \"prometheus-metric-storage-0\" (UID: \"e87a4844-8a90-4965-878f-d95aa09c47bb\") " pod="openstack-kuttl-tests/prometheus-metric-storage-0" Jan 20 17:13:07 crc kubenswrapper[4558]: I0120 17:13:07.936773 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/e87a4844-8a90-4965-878f-d95aa09c47bb-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"e87a4844-8a90-4965-878f-d95aa09c47bb\") " pod="openstack-kuttl-tests/prometheus-metric-storage-0" Jan 20 17:13:07 crc kubenswrapper[4558]: I0120 17:13:07.936796 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v2gjr\" (UniqueName: \"kubernetes.io/projected/e87a4844-8a90-4965-878f-d95aa09c47bb-kube-api-access-v2gjr\") pod \"prometheus-metric-storage-0\" (UID: \"e87a4844-8a90-4965-878f-d95aa09c47bb\") " pod="openstack-kuttl-tests/prometheus-metric-storage-0" Jan 20 17:13:07 crc kubenswrapper[4558]: I0120 17:13:07.936829 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/e87a4844-8a90-4965-878f-d95aa09c47bb-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"e87a4844-8a90-4965-878f-d95aa09c47bb\") " pod="openstack-kuttl-tests/prometheus-metric-storage-0" Jan 20 17:13:07 crc kubenswrapper[4558]: I0120 17:13:07.936871 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-metric-storage-rulefiles-2\" (UniqueName: \"kubernetes.io/configmap/e87a4844-8a90-4965-878f-d95aa09c47bb-prometheus-metric-storage-rulefiles-2\") pod \"prometheus-metric-storage-0\" (UID: \"e87a4844-8a90-4965-878f-d95aa09c47bb\") " pod="openstack-kuttl-tests/prometheus-metric-storage-0" Jan 20 17:13:07 crc kubenswrapper[4558]: I0120 17:13:07.936898 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/e87a4844-8a90-4965-878f-d95aa09c47bb-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"e87a4844-8a90-4965-878f-d95aa09c47bb\") " pod="openstack-kuttl-tests/prometheus-metric-storage-0" Jan 20 17:13:07 crc kubenswrapper[4558]: I0120 17:13:07.936924 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-metric-storage-rulefiles-1\" (UniqueName: \"kubernetes.io/configmap/e87a4844-8a90-4965-878f-d95aa09c47bb-prometheus-metric-storage-rulefiles-1\") pod \"prometheus-metric-storage-0\" (UID: \"e87a4844-8a90-4965-878f-d95aa09c47bb\") " pod="openstack-kuttl-tests/prometheus-metric-storage-0" Jan 20 17:13:07 crc kubenswrapper[4558]: I0120 17:13:07.936946 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"prometheus-metric-storage-0\" (UID: \"e87a4844-8a90-4965-878f-d95aa09c47bb\") " pod="openstack-kuttl-tests/prometheus-metric-storage-0" Jan 20 17:13:07 crc kubenswrapper[4558]: I0120 17:13:07.936970 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/e87a4844-8a90-4965-878f-d95aa09c47bb-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"e87a4844-8a90-4965-878f-d95aa09c47bb\") " pod="openstack-kuttl-tests/prometheus-metric-storage-0" Jan 20 17:13:07 crc kubenswrapper[4558]: I0120 17:13:07.937000 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/e87a4844-8a90-4965-878f-d95aa09c47bb-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"e87a4844-8a90-4965-878f-d95aa09c47bb\") " pod="openstack-kuttl-tests/prometheus-metric-storage-0" Jan 20 17:13:07 crc kubenswrapper[4558]: I0120 17:13:07.937520 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"prometheus-metric-storage-0\" (UID: \"e87a4844-8a90-4965-878f-d95aa09c47bb\") device mount path \"/mnt/openstack/pv12\"" pod="openstack-kuttl-tests/prometheus-metric-storage-0" Jan 20 17:13:07 crc kubenswrapper[4558]: I0120 17:13:07.937859 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-metric-storage-rulefiles-1\" (UniqueName: \"kubernetes.io/configmap/e87a4844-8a90-4965-878f-d95aa09c47bb-prometheus-metric-storage-rulefiles-1\") pod \"prometheus-metric-storage-0\" (UID: \"e87a4844-8a90-4965-878f-d95aa09c47bb\") " pod="openstack-kuttl-tests/prometheus-metric-storage-0" Jan 20 17:13:07 crc kubenswrapper[4558]: I0120 17:13:07.937920 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-metric-storage-rulefiles-2\" (UniqueName: \"kubernetes.io/configmap/e87a4844-8a90-4965-878f-d95aa09c47bb-prometheus-metric-storage-rulefiles-2\") pod \"prometheus-metric-storage-0\" (UID: \"e87a4844-8a90-4965-878f-d95aa09c47bb\") " pod="openstack-kuttl-tests/prometheus-metric-storage-0" Jan 20 17:13:07 crc kubenswrapper[4558]: I0120 17:13:07.938365 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/e87a4844-8a90-4965-878f-d95aa09c47bb-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"e87a4844-8a90-4965-878f-d95aa09c47bb\") " pod="openstack-kuttl-tests/prometheus-metric-storage-0" Jan 20 17:13:07 crc kubenswrapper[4558]: I0120 17:13:07.946051 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/e87a4844-8a90-4965-878f-d95aa09c47bb-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"e87a4844-8a90-4965-878f-d95aa09c47bb\") " pod="openstack-kuttl-tests/prometheus-metric-storage-0" Jan 20 17:13:07 crc kubenswrapper[4558]: I0120 17:13:07.947804 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/e87a4844-8a90-4965-878f-d95aa09c47bb-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"e87a4844-8a90-4965-878f-d95aa09c47bb\") " pod="openstack-kuttl-tests/prometheus-metric-storage-0" Jan 20 17:13:07 crc kubenswrapper[4558]: I0120 17:13:07.949298 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/e87a4844-8a90-4965-878f-d95aa09c47bb-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"e87a4844-8a90-4965-878f-d95aa09c47bb\") " pod="openstack-kuttl-tests/prometheus-metric-storage-0" Jan 20 17:13:07 crc kubenswrapper[4558]: I0120 17:13:07.949337 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/e87a4844-8a90-4965-878f-d95aa09c47bb-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"e87a4844-8a90-4965-878f-d95aa09c47bb\") " pod="openstack-kuttl-tests/prometheus-metric-storage-0" Jan 20 17:13:07 crc kubenswrapper[4558]: I0120 17:13:07.949865 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e87a4844-8a90-4965-878f-d95aa09c47bb-secret-combined-ca-bundle\") pod \"prometheus-metric-storage-0\" (UID: \"e87a4844-8a90-4965-878f-d95aa09c47bb\") " pod="openstack-kuttl-tests/prometheus-metric-storage-0" Jan 20 17:13:07 crc kubenswrapper[4558]: I0120 17:13:07.950971 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/e87a4844-8a90-4965-878f-d95aa09c47bb-config\") pod \"prometheus-metric-storage-0\" (UID: \"e87a4844-8a90-4965-878f-d95aa09c47bb\") " pod="openstack-kuttl-tests/prometheus-metric-storage-0" Jan 20 17:13:07 crc kubenswrapper[4558]: I0120 17:13:07.951104 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/e87a4844-8a90-4965-878f-d95aa09c47bb-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"e87a4844-8a90-4965-878f-d95aa09c47bb\") " pod="openstack-kuttl-tests/prometheus-metric-storage-0" Jan 20 17:13:07 crc kubenswrapper[4558]: I0120 17:13:07.951362 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/e87a4844-8a90-4965-878f-d95aa09c47bb-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"e87a4844-8a90-4965-878f-d95aa09c47bb\") " pod="openstack-kuttl-tests/prometheus-metric-storage-0" Jan 20 17:13:07 crc kubenswrapper[4558]: I0120 17:13:07.954912 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v2gjr\" (UniqueName: \"kubernetes.io/projected/e87a4844-8a90-4965-878f-d95aa09c47bb-kube-api-access-v2gjr\") pod \"prometheus-metric-storage-0\" (UID: \"e87a4844-8a90-4965-878f-d95aa09c47bb\") " pod="openstack-kuttl-tests/prometheus-metric-storage-0" Jan 20 17:13:07 crc kubenswrapper[4558]: I0120 17:13:07.978531 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"prometheus-metric-storage-0\" (UID: \"e87a4844-8a90-4965-878f-d95aa09c47bb\") " pod="openstack-kuttl-tests/prometheus-metric-storage-0" Jan 20 17:13:08 crc kubenswrapper[4558]: I0120 17:13:08.028727 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/keystone-bootstrap-479z7"] Jan 20 17:13:08 crc kubenswrapper[4558]: I0120 17:13:08.029995 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-bootstrap-479z7" Jan 20 17:13:08 crc kubenswrapper[4558]: I0120 17:13:08.032375 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"osp-secret" Jan 20 17:13:08 crc kubenswrapper[4558]: I0120 17:13:08.032575 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-scripts" Jan 20 17:13:08 crc kubenswrapper[4558]: I0120 17:13:08.032893 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-config-data" Jan 20 17:13:08 crc kubenswrapper[4558]: I0120 17:13:08.033107 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone" Jan 20 17:13:08 crc kubenswrapper[4558]: I0120 17:13:08.033263 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-keystone-dockercfg-kq484" Jan 20 17:13:08 crc kubenswrapper[4558]: I0120 17:13:08.042721 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-bootstrap-479z7"] Jan 20 17:13:08 crc kubenswrapper[4558]: I0120 17:13:08.140792 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gh2wp\" (UniqueName: \"kubernetes.io/projected/d81dc2b3-5e3d-4b54-98f5-938c751dcaed-kube-api-access-gh2wp\") pod \"keystone-bootstrap-479z7\" (UID: \"d81dc2b3-5e3d-4b54-98f5-938c751dcaed\") " pod="openstack-kuttl-tests/keystone-bootstrap-479z7" Jan 20 17:13:08 crc kubenswrapper[4558]: I0120 17:13:08.140906 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d81dc2b3-5e3d-4b54-98f5-938c751dcaed-fernet-keys\") pod \"keystone-bootstrap-479z7\" (UID: \"d81dc2b3-5e3d-4b54-98f5-938c751dcaed\") " pod="openstack-kuttl-tests/keystone-bootstrap-479z7" Jan 20 17:13:08 crc kubenswrapper[4558]: I0120 17:13:08.140971 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/d81dc2b3-5e3d-4b54-98f5-938c751dcaed-credential-keys\") pod \"keystone-bootstrap-479z7\" (UID: \"d81dc2b3-5e3d-4b54-98f5-938c751dcaed\") " pod="openstack-kuttl-tests/keystone-bootstrap-479z7" Jan 20 17:13:08 crc kubenswrapper[4558]: I0120 17:13:08.141044 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d81dc2b3-5e3d-4b54-98f5-938c751dcaed-scripts\") pod \"keystone-bootstrap-479z7\" (UID: \"d81dc2b3-5e3d-4b54-98f5-938c751dcaed\") " pod="openstack-kuttl-tests/keystone-bootstrap-479z7" Jan 20 17:13:08 crc kubenswrapper[4558]: I0120 17:13:08.141244 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d81dc2b3-5e3d-4b54-98f5-938c751dcaed-config-data\") pod \"keystone-bootstrap-479z7\" (UID: \"d81dc2b3-5e3d-4b54-98f5-938c751dcaed\") " pod="openstack-kuttl-tests/keystone-bootstrap-479z7" Jan 20 17:13:08 crc kubenswrapper[4558]: I0120 17:13:08.141474 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d81dc2b3-5e3d-4b54-98f5-938c751dcaed-combined-ca-bundle\") pod \"keystone-bootstrap-479z7\" (UID: \"d81dc2b3-5e3d-4b54-98f5-938c751dcaed\") " pod="openstack-kuttl-tests/keystone-bootstrap-479z7" Jan 20 17:13:08 crc kubenswrapper[4558]: I0120 17:13:08.148759 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:13:08 crc kubenswrapper[4558]: I0120 17:13:08.150546 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:13:08 crc kubenswrapper[4558]: I0120 17:13:08.152327 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-scripts" Jan 20 17:13:08 crc kubenswrapper[4558]: I0120 17:13:08.152653 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-config-data" Jan 20 17:13:08 crc kubenswrapper[4558]: I0120 17:13:08.168888 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:13:08 crc kubenswrapper[4558]: I0120 17:13:08.206242 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/cinder-db-sync-xmmz7"] Jan 20 17:13:08 crc kubenswrapper[4558]: I0120 17:13:08.207662 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-db-sync-xmmz7" Jan 20 17:13:08 crc kubenswrapper[4558]: I0120 17:13:08.213001 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cinder-scripts" Jan 20 17:13:08 crc kubenswrapper[4558]: I0120 17:13:08.213089 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cinder-config-data" Jan 20 17:13:08 crc kubenswrapper[4558]: I0120 17:13:08.213681 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cinder-cinder-dockercfg-7sxv6" Jan 20 17:13:08 crc kubenswrapper[4558]: I0120 17:13:08.225778 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-db-sync-xmmz7"] Jan 20 17:13:08 crc kubenswrapper[4558]: I0120 17:13:08.243017 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d81dc2b3-5e3d-4b54-98f5-938c751dcaed-combined-ca-bundle\") pod \"keystone-bootstrap-479z7\" (UID: \"d81dc2b3-5e3d-4b54-98f5-938c751dcaed\") " pod="openstack-kuttl-tests/keystone-bootstrap-479z7" Jan 20 17:13:08 crc kubenswrapper[4558]: I0120 17:13:08.243059 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/52844554-ecdd-40db-ae62-0f0de3d9030d-log-httpd\") pod \"ceilometer-0\" (UID: \"52844554-ecdd-40db-ae62-0f0de3d9030d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:13:08 crc kubenswrapper[4558]: I0120 17:13:08.243096 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/52844554-ecdd-40db-ae62-0f0de3d9030d-config-data\") pod \"ceilometer-0\" (UID: \"52844554-ecdd-40db-ae62-0f0de3d9030d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:13:08 crc kubenswrapper[4558]: I0120 17:13:08.243117 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w4kzx\" (UniqueName: \"kubernetes.io/projected/52844554-ecdd-40db-ae62-0f0de3d9030d-kube-api-access-w4kzx\") pod \"ceilometer-0\" (UID: \"52844554-ecdd-40db-ae62-0f0de3d9030d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:13:08 crc kubenswrapper[4558]: I0120 17:13:08.243139 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gh2wp\" (UniqueName: \"kubernetes.io/projected/d81dc2b3-5e3d-4b54-98f5-938c751dcaed-kube-api-access-gh2wp\") pod \"keystone-bootstrap-479z7\" (UID: \"d81dc2b3-5e3d-4b54-98f5-938c751dcaed\") " pod="openstack-kuttl-tests/keystone-bootstrap-479z7" Jan 20 17:13:08 crc kubenswrapper[4558]: I0120 17:13:08.243184 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/52844554-ecdd-40db-ae62-0f0de3d9030d-run-httpd\") pod \"ceilometer-0\" (UID: \"52844554-ecdd-40db-ae62-0f0de3d9030d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:13:08 crc kubenswrapper[4558]: I0120 17:13:08.243203 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d81dc2b3-5e3d-4b54-98f5-938c751dcaed-fernet-keys\") pod \"keystone-bootstrap-479z7\" (UID: \"d81dc2b3-5e3d-4b54-98f5-938c751dcaed\") " pod="openstack-kuttl-tests/keystone-bootstrap-479z7" Jan 20 17:13:08 crc kubenswrapper[4558]: I0120 17:13:08.243230 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/d81dc2b3-5e3d-4b54-98f5-938c751dcaed-credential-keys\") pod \"keystone-bootstrap-479z7\" (UID: \"d81dc2b3-5e3d-4b54-98f5-938c751dcaed\") " pod="openstack-kuttl-tests/keystone-bootstrap-479z7" Jan 20 17:13:08 crc kubenswrapper[4558]: I0120 17:13:08.243251 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d81dc2b3-5e3d-4b54-98f5-938c751dcaed-scripts\") pod \"keystone-bootstrap-479z7\" (UID: \"d81dc2b3-5e3d-4b54-98f5-938c751dcaed\") " pod="openstack-kuttl-tests/keystone-bootstrap-479z7" Jan 20 17:13:08 crc kubenswrapper[4558]: I0120 17:13:08.243269 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/52844554-ecdd-40db-ae62-0f0de3d9030d-scripts\") pod \"ceilometer-0\" (UID: \"52844554-ecdd-40db-ae62-0f0de3d9030d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:13:08 crc kubenswrapper[4558]: I0120 17:13:08.243294 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/52844554-ecdd-40db-ae62-0f0de3d9030d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"52844554-ecdd-40db-ae62-0f0de3d9030d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:13:08 crc kubenswrapper[4558]: I0120 17:13:08.243315 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d81dc2b3-5e3d-4b54-98f5-938c751dcaed-config-data\") pod \"keystone-bootstrap-479z7\" (UID: \"d81dc2b3-5e3d-4b54-98f5-938c751dcaed\") " pod="openstack-kuttl-tests/keystone-bootstrap-479z7" Jan 20 17:13:08 crc kubenswrapper[4558]: I0120 17:13:08.243336 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/52844554-ecdd-40db-ae62-0f0de3d9030d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"52844554-ecdd-40db-ae62-0f0de3d9030d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:13:08 crc kubenswrapper[4558]: I0120 17:13:08.249566 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/d81dc2b3-5e3d-4b54-98f5-938c751dcaed-credential-keys\") pod \"keystone-bootstrap-479z7\" (UID: \"d81dc2b3-5e3d-4b54-98f5-938c751dcaed\") " pod="openstack-kuttl-tests/keystone-bootstrap-479z7" Jan 20 17:13:08 crc kubenswrapper[4558]: I0120 17:13:08.250555 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d81dc2b3-5e3d-4b54-98f5-938c751dcaed-fernet-keys\") pod \"keystone-bootstrap-479z7\" (UID: \"d81dc2b3-5e3d-4b54-98f5-938c751dcaed\") " pod="openstack-kuttl-tests/keystone-bootstrap-479z7" Jan 20 17:13:08 crc kubenswrapper[4558]: I0120 17:13:08.255751 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d81dc2b3-5e3d-4b54-98f5-938c751dcaed-scripts\") pod \"keystone-bootstrap-479z7\" (UID: \"d81dc2b3-5e3d-4b54-98f5-938c751dcaed\") " pod="openstack-kuttl-tests/keystone-bootstrap-479z7" Jan 20 17:13:08 crc kubenswrapper[4558]: I0120 17:13:08.255828 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d81dc2b3-5e3d-4b54-98f5-938c751dcaed-combined-ca-bundle\") pod \"keystone-bootstrap-479z7\" (UID: \"d81dc2b3-5e3d-4b54-98f5-938c751dcaed\") " pod="openstack-kuttl-tests/keystone-bootstrap-479z7" Jan 20 17:13:08 crc kubenswrapper[4558]: I0120 17:13:08.261083 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d81dc2b3-5e3d-4b54-98f5-938c751dcaed-config-data\") pod \"keystone-bootstrap-479z7\" (UID: \"d81dc2b3-5e3d-4b54-98f5-938c751dcaed\") " pod="openstack-kuttl-tests/keystone-bootstrap-479z7" Jan 20 17:13:08 crc kubenswrapper[4558]: I0120 17:13:08.263883 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/placement-db-sync-c95c5"] Jan 20 17:13:08 crc kubenswrapper[4558]: I0120 17:13:08.265093 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-db-sync-c95c5" Jan 20 17:13:08 crc kubenswrapper[4558]: I0120 17:13:08.268399 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"placement-config-data" Jan 20 17:13:08 crc kubenswrapper[4558]: I0120 17:13:08.268575 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"placement-scripts" Jan 20 17:13:08 crc kubenswrapper[4558]: I0120 17:13:08.268683 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"placement-placement-dockercfg-fxvdq" Jan 20 17:13:08 crc kubenswrapper[4558]: I0120 17:13:08.275648 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gh2wp\" (UniqueName: \"kubernetes.io/projected/d81dc2b3-5e3d-4b54-98f5-938c751dcaed-kube-api-access-gh2wp\") pod \"keystone-bootstrap-479z7\" (UID: \"d81dc2b3-5e3d-4b54-98f5-938c751dcaed\") " pod="openstack-kuttl-tests/keystone-bootstrap-479z7" Jan 20 17:13:08 crc kubenswrapper[4558]: I0120 17:13:08.284554 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-db-sync-c95c5"] Jan 20 17:13:08 crc kubenswrapper[4558]: I0120 17:13:08.289971 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/prometheus-metric-storage-0" Jan 20 17:13:08 crc kubenswrapper[4558]: I0120 17:13:08.345116 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/52844554-ecdd-40db-ae62-0f0de3d9030d-log-httpd\") pod \"ceilometer-0\" (UID: \"52844554-ecdd-40db-ae62-0f0de3d9030d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:13:08 crc kubenswrapper[4558]: I0120 17:13:08.345197 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/4e87c463-6ff6-4a56-a207-b6b86578ec36-etc-machine-id\") pod \"cinder-db-sync-xmmz7\" (UID: \"4e87c463-6ff6-4a56-a207-b6b86578ec36\") " pod="openstack-kuttl-tests/cinder-db-sync-xmmz7" Jan 20 17:13:08 crc kubenswrapper[4558]: I0120 17:13:08.345273 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/52844554-ecdd-40db-ae62-0f0de3d9030d-config-data\") pod \"ceilometer-0\" (UID: \"52844554-ecdd-40db-ae62-0f0de3d9030d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:13:08 crc kubenswrapper[4558]: I0120 17:13:08.345305 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w4kzx\" (UniqueName: \"kubernetes.io/projected/52844554-ecdd-40db-ae62-0f0de3d9030d-kube-api-access-w4kzx\") pod \"ceilometer-0\" (UID: \"52844554-ecdd-40db-ae62-0f0de3d9030d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:13:08 crc kubenswrapper[4558]: I0120 17:13:08.345342 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c3bca81a-32b0-4c02-90c0-ab10bb1eb835-combined-ca-bundle\") pod \"placement-db-sync-c95c5\" (UID: \"c3bca81a-32b0-4c02-90c0-ab10bb1eb835\") " pod="openstack-kuttl-tests/placement-db-sync-c95c5" Jan 20 17:13:08 crc kubenswrapper[4558]: I0120 17:13:08.345377 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4e87c463-6ff6-4a56-a207-b6b86578ec36-scripts\") pod \"cinder-db-sync-xmmz7\" (UID: \"4e87c463-6ff6-4a56-a207-b6b86578ec36\") " pod="openstack-kuttl-tests/cinder-db-sync-xmmz7" Jan 20 17:13:08 crc kubenswrapper[4558]: I0120 17:13:08.345413 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/52844554-ecdd-40db-ae62-0f0de3d9030d-run-httpd\") pod \"ceilometer-0\" (UID: \"52844554-ecdd-40db-ae62-0f0de3d9030d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:13:08 crc kubenswrapper[4558]: I0120 17:13:08.345475 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mt52t\" (UniqueName: \"kubernetes.io/projected/4e87c463-6ff6-4a56-a207-b6b86578ec36-kube-api-access-mt52t\") pod \"cinder-db-sync-xmmz7\" (UID: \"4e87c463-6ff6-4a56-a207-b6b86578ec36\") " pod="openstack-kuttl-tests/cinder-db-sync-xmmz7" Jan 20 17:13:08 crc kubenswrapper[4558]: I0120 17:13:08.345506 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/52844554-ecdd-40db-ae62-0f0de3d9030d-scripts\") pod \"ceilometer-0\" (UID: \"52844554-ecdd-40db-ae62-0f0de3d9030d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:13:08 crc kubenswrapper[4558]: I0120 17:13:08.345527 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c3bca81a-32b0-4c02-90c0-ab10bb1eb835-logs\") pod \"placement-db-sync-c95c5\" (UID: \"c3bca81a-32b0-4c02-90c0-ab10bb1eb835\") " pod="openstack-kuttl-tests/placement-db-sync-c95c5" Jan 20 17:13:08 crc kubenswrapper[4558]: I0120 17:13:08.345551 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/4e87c463-6ff6-4a56-a207-b6b86578ec36-db-sync-config-data\") pod \"cinder-db-sync-xmmz7\" (UID: \"4e87c463-6ff6-4a56-a207-b6b86578ec36\") " pod="openstack-kuttl-tests/cinder-db-sync-xmmz7" Jan 20 17:13:08 crc kubenswrapper[4558]: I0120 17:13:08.345585 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-92kfm\" (UniqueName: \"kubernetes.io/projected/c3bca81a-32b0-4c02-90c0-ab10bb1eb835-kube-api-access-92kfm\") pod \"placement-db-sync-c95c5\" (UID: \"c3bca81a-32b0-4c02-90c0-ab10bb1eb835\") " pod="openstack-kuttl-tests/placement-db-sync-c95c5" Jan 20 17:13:08 crc kubenswrapper[4558]: I0120 17:13:08.345615 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c3bca81a-32b0-4c02-90c0-ab10bb1eb835-config-data\") pod \"placement-db-sync-c95c5\" (UID: \"c3bca81a-32b0-4c02-90c0-ab10bb1eb835\") " pod="openstack-kuttl-tests/placement-db-sync-c95c5" Jan 20 17:13:08 crc kubenswrapper[4558]: I0120 17:13:08.345639 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/52844554-ecdd-40db-ae62-0f0de3d9030d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"52844554-ecdd-40db-ae62-0f0de3d9030d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:13:08 crc kubenswrapper[4558]: I0120 17:13:08.345686 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e87c463-6ff6-4a56-a207-b6b86578ec36-config-data\") pod \"cinder-db-sync-xmmz7\" (UID: \"4e87c463-6ff6-4a56-a207-b6b86578ec36\") " pod="openstack-kuttl-tests/cinder-db-sync-xmmz7" Jan 20 17:13:08 crc kubenswrapper[4558]: I0120 17:13:08.345720 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/52844554-ecdd-40db-ae62-0f0de3d9030d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"52844554-ecdd-40db-ae62-0f0de3d9030d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:13:08 crc kubenswrapper[4558]: I0120 17:13:08.345772 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e87c463-6ff6-4a56-a207-b6b86578ec36-combined-ca-bundle\") pod \"cinder-db-sync-xmmz7\" (UID: \"4e87c463-6ff6-4a56-a207-b6b86578ec36\") " pod="openstack-kuttl-tests/cinder-db-sync-xmmz7" Jan 20 17:13:08 crc kubenswrapper[4558]: I0120 17:13:08.345798 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c3bca81a-32b0-4c02-90c0-ab10bb1eb835-scripts\") pod \"placement-db-sync-c95c5\" (UID: \"c3bca81a-32b0-4c02-90c0-ab10bb1eb835\") " pod="openstack-kuttl-tests/placement-db-sync-c95c5" Jan 20 17:13:08 crc kubenswrapper[4558]: I0120 17:13:08.346527 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/52844554-ecdd-40db-ae62-0f0de3d9030d-run-httpd\") pod \"ceilometer-0\" (UID: \"52844554-ecdd-40db-ae62-0f0de3d9030d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:13:08 crc kubenswrapper[4558]: I0120 17:13:08.346694 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/52844554-ecdd-40db-ae62-0f0de3d9030d-log-httpd\") pod \"ceilometer-0\" (UID: \"52844554-ecdd-40db-ae62-0f0de3d9030d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:13:08 crc kubenswrapper[4558]: I0120 17:13:08.350483 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/52844554-ecdd-40db-ae62-0f0de3d9030d-config-data\") pod \"ceilometer-0\" (UID: \"52844554-ecdd-40db-ae62-0f0de3d9030d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:13:08 crc kubenswrapper[4558]: I0120 17:13:08.351043 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/52844554-ecdd-40db-ae62-0f0de3d9030d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"52844554-ecdd-40db-ae62-0f0de3d9030d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:13:08 crc kubenswrapper[4558]: I0120 17:13:08.351798 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/52844554-ecdd-40db-ae62-0f0de3d9030d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"52844554-ecdd-40db-ae62-0f0de3d9030d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:13:08 crc kubenswrapper[4558]: I0120 17:13:08.357077 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/52844554-ecdd-40db-ae62-0f0de3d9030d-scripts\") pod \"ceilometer-0\" (UID: \"52844554-ecdd-40db-ae62-0f0de3d9030d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:13:08 crc kubenswrapper[4558]: I0120 17:13:08.361242 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w4kzx\" (UniqueName: \"kubernetes.io/projected/52844554-ecdd-40db-ae62-0f0de3d9030d-kube-api-access-w4kzx\") pod \"ceilometer-0\" (UID: \"52844554-ecdd-40db-ae62-0f0de3d9030d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:13:08 crc kubenswrapper[4558]: I0120 17:13:08.363274 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-bootstrap-479z7" Jan 20 17:13:08 crc kubenswrapper[4558]: I0120 17:13:08.448099 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e87c463-6ff6-4a56-a207-b6b86578ec36-config-data\") pod \"cinder-db-sync-xmmz7\" (UID: \"4e87c463-6ff6-4a56-a207-b6b86578ec36\") " pod="openstack-kuttl-tests/cinder-db-sync-xmmz7" Jan 20 17:13:08 crc kubenswrapper[4558]: I0120 17:13:08.448373 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e87c463-6ff6-4a56-a207-b6b86578ec36-combined-ca-bundle\") pod \"cinder-db-sync-xmmz7\" (UID: \"4e87c463-6ff6-4a56-a207-b6b86578ec36\") " pod="openstack-kuttl-tests/cinder-db-sync-xmmz7" Jan 20 17:13:08 crc kubenswrapper[4558]: I0120 17:13:08.448479 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c3bca81a-32b0-4c02-90c0-ab10bb1eb835-scripts\") pod \"placement-db-sync-c95c5\" (UID: \"c3bca81a-32b0-4c02-90c0-ab10bb1eb835\") " pod="openstack-kuttl-tests/placement-db-sync-c95c5" Jan 20 17:13:08 crc kubenswrapper[4558]: I0120 17:13:08.448574 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/4e87c463-6ff6-4a56-a207-b6b86578ec36-etc-machine-id\") pod \"cinder-db-sync-xmmz7\" (UID: \"4e87c463-6ff6-4a56-a207-b6b86578ec36\") " pod="openstack-kuttl-tests/cinder-db-sync-xmmz7" Jan 20 17:13:08 crc kubenswrapper[4558]: I0120 17:13:08.448696 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c3bca81a-32b0-4c02-90c0-ab10bb1eb835-combined-ca-bundle\") pod \"placement-db-sync-c95c5\" (UID: \"c3bca81a-32b0-4c02-90c0-ab10bb1eb835\") " pod="openstack-kuttl-tests/placement-db-sync-c95c5" Jan 20 17:13:08 crc kubenswrapper[4558]: I0120 17:13:08.448824 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4e87c463-6ff6-4a56-a207-b6b86578ec36-scripts\") pod \"cinder-db-sync-xmmz7\" (UID: \"4e87c463-6ff6-4a56-a207-b6b86578ec36\") " pod="openstack-kuttl-tests/cinder-db-sync-xmmz7" Jan 20 17:13:08 crc kubenswrapper[4558]: I0120 17:13:08.448959 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mt52t\" (UniqueName: \"kubernetes.io/projected/4e87c463-6ff6-4a56-a207-b6b86578ec36-kube-api-access-mt52t\") pod \"cinder-db-sync-xmmz7\" (UID: \"4e87c463-6ff6-4a56-a207-b6b86578ec36\") " pod="openstack-kuttl-tests/cinder-db-sync-xmmz7" Jan 20 17:13:08 crc kubenswrapper[4558]: I0120 17:13:08.449102 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c3bca81a-32b0-4c02-90c0-ab10bb1eb835-logs\") pod \"placement-db-sync-c95c5\" (UID: \"c3bca81a-32b0-4c02-90c0-ab10bb1eb835\") " pod="openstack-kuttl-tests/placement-db-sync-c95c5" Jan 20 17:13:08 crc kubenswrapper[4558]: I0120 17:13:08.449210 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/4e87c463-6ff6-4a56-a207-b6b86578ec36-db-sync-config-data\") pod \"cinder-db-sync-xmmz7\" (UID: \"4e87c463-6ff6-4a56-a207-b6b86578ec36\") " pod="openstack-kuttl-tests/cinder-db-sync-xmmz7" Jan 20 17:13:08 crc kubenswrapper[4558]: I0120 17:13:08.449323 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-92kfm\" (UniqueName: \"kubernetes.io/projected/c3bca81a-32b0-4c02-90c0-ab10bb1eb835-kube-api-access-92kfm\") pod \"placement-db-sync-c95c5\" (UID: \"c3bca81a-32b0-4c02-90c0-ab10bb1eb835\") " pod="openstack-kuttl-tests/placement-db-sync-c95c5" Jan 20 17:13:08 crc kubenswrapper[4558]: I0120 17:13:08.448821 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/4e87c463-6ff6-4a56-a207-b6b86578ec36-etc-machine-id\") pod \"cinder-db-sync-xmmz7\" (UID: \"4e87c463-6ff6-4a56-a207-b6b86578ec36\") " pod="openstack-kuttl-tests/cinder-db-sync-xmmz7" Jan 20 17:13:08 crc kubenswrapper[4558]: I0120 17:13:08.449618 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c3bca81a-32b0-4c02-90c0-ab10bb1eb835-logs\") pod \"placement-db-sync-c95c5\" (UID: \"c3bca81a-32b0-4c02-90c0-ab10bb1eb835\") " pod="openstack-kuttl-tests/placement-db-sync-c95c5" Jan 20 17:13:08 crc kubenswrapper[4558]: I0120 17:13:08.449976 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c3bca81a-32b0-4c02-90c0-ab10bb1eb835-config-data\") pod \"placement-db-sync-c95c5\" (UID: \"c3bca81a-32b0-4c02-90c0-ab10bb1eb835\") " pod="openstack-kuttl-tests/placement-db-sync-c95c5" Jan 20 17:13:08 crc kubenswrapper[4558]: I0120 17:13:08.453547 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4e87c463-6ff6-4a56-a207-b6b86578ec36-scripts\") pod \"cinder-db-sync-xmmz7\" (UID: \"4e87c463-6ff6-4a56-a207-b6b86578ec36\") " pod="openstack-kuttl-tests/cinder-db-sync-xmmz7" Jan 20 17:13:08 crc kubenswrapper[4558]: I0120 17:13:08.453902 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c3bca81a-32b0-4c02-90c0-ab10bb1eb835-config-data\") pod \"placement-db-sync-c95c5\" (UID: \"c3bca81a-32b0-4c02-90c0-ab10bb1eb835\") " pod="openstack-kuttl-tests/placement-db-sync-c95c5" Jan 20 17:13:08 crc kubenswrapper[4558]: I0120 17:13:08.454980 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/4e87c463-6ff6-4a56-a207-b6b86578ec36-db-sync-config-data\") pod \"cinder-db-sync-xmmz7\" (UID: \"4e87c463-6ff6-4a56-a207-b6b86578ec36\") " pod="openstack-kuttl-tests/cinder-db-sync-xmmz7" Jan 20 17:13:08 crc kubenswrapper[4558]: I0120 17:13:08.455958 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e87c463-6ff6-4a56-a207-b6b86578ec36-config-data\") pod \"cinder-db-sync-xmmz7\" (UID: \"4e87c463-6ff6-4a56-a207-b6b86578ec36\") " pod="openstack-kuttl-tests/cinder-db-sync-xmmz7" Jan 20 17:13:08 crc kubenswrapper[4558]: I0120 17:13:08.456946 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c3bca81a-32b0-4c02-90c0-ab10bb1eb835-scripts\") pod \"placement-db-sync-c95c5\" (UID: \"c3bca81a-32b0-4c02-90c0-ab10bb1eb835\") " pod="openstack-kuttl-tests/placement-db-sync-c95c5" Jan 20 17:13:08 crc kubenswrapper[4558]: I0120 17:13:08.457377 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e87c463-6ff6-4a56-a207-b6b86578ec36-combined-ca-bundle\") pod \"cinder-db-sync-xmmz7\" (UID: \"4e87c463-6ff6-4a56-a207-b6b86578ec36\") " pod="openstack-kuttl-tests/cinder-db-sync-xmmz7" Jan 20 17:13:08 crc kubenswrapper[4558]: I0120 17:13:08.457895 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c3bca81a-32b0-4c02-90c0-ab10bb1eb835-combined-ca-bundle\") pod \"placement-db-sync-c95c5\" (UID: \"c3bca81a-32b0-4c02-90c0-ab10bb1eb835\") " pod="openstack-kuttl-tests/placement-db-sync-c95c5" Jan 20 17:13:08 crc kubenswrapper[4558]: I0120 17:13:08.465639 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:13:08 crc kubenswrapper[4558]: I0120 17:13:08.465957 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-92kfm\" (UniqueName: \"kubernetes.io/projected/c3bca81a-32b0-4c02-90c0-ab10bb1eb835-kube-api-access-92kfm\") pod \"placement-db-sync-c95c5\" (UID: \"c3bca81a-32b0-4c02-90c0-ab10bb1eb835\") " pod="openstack-kuttl-tests/placement-db-sync-c95c5" Jan 20 17:13:08 crc kubenswrapper[4558]: I0120 17:13:08.467863 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mt52t\" (UniqueName: \"kubernetes.io/projected/4e87c463-6ff6-4a56-a207-b6b86578ec36-kube-api-access-mt52t\") pod \"cinder-db-sync-xmmz7\" (UID: \"4e87c463-6ff6-4a56-a207-b6b86578ec36\") " pod="openstack-kuttl-tests/cinder-db-sync-xmmz7" Jan 20 17:13:08 crc kubenswrapper[4558]: I0120 17:13:08.542707 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-db-sync-xmmz7" Jan 20 17:13:08 crc kubenswrapper[4558]: I0120 17:13:08.585016 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b099ccf1-da7f-4e4d-80cf-629b21cd774c" path="/var/lib/kubelet/pods/b099ccf1-da7f-4e4d-80cf-629b21cd774c/volumes" Jan 20 17:13:08 crc kubenswrapper[4558]: I0120 17:13:08.649522 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-db-sync-c95c5" Jan 20 17:13:09 crc kubenswrapper[4558]: I0120 17:13:09.168479 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:13:09 crc kubenswrapper[4558]: I0120 17:13:09.169859 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:13:09 crc kubenswrapper[4558]: I0120 17:13:09.171905 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-glance-default-internal-svc" Jan 20 17:13:09 crc kubenswrapper[4558]: I0120 17:13:09.171950 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-default-internal-config-data" Jan 20 17:13:09 crc kubenswrapper[4558]: I0120 17:13:09.173526 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-scripts" Jan 20 17:13:09 crc kubenswrapper[4558]: I0120 17:13:09.177632 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-glance-dockercfg-5qnnr" Jan 20 17:13:09 crc kubenswrapper[4558]: I0120 17:13:09.194931 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:13:09 crc kubenswrapper[4558]: I0120 17:13:09.268463 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/384dff5b-0848-41ff-bcac-ae3bce090fd4-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"384dff5b-0848-41ff-bcac-ae3bce090fd4\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:13:09 crc kubenswrapper[4558]: I0120 17:13:09.268529 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-chhqk\" (UniqueName: \"kubernetes.io/projected/384dff5b-0848-41ff-bcac-ae3bce090fd4-kube-api-access-chhqk\") pod \"glance-default-internal-api-0\" (UID: \"384dff5b-0848-41ff-bcac-ae3bce090fd4\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:13:09 crc kubenswrapper[4558]: I0120 17:13:09.268610 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/384dff5b-0848-41ff-bcac-ae3bce090fd4-logs\") pod \"glance-default-internal-api-0\" (UID: \"384dff5b-0848-41ff-bcac-ae3bce090fd4\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:13:09 crc kubenswrapper[4558]: I0120 17:13:09.268726 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/384dff5b-0848-41ff-bcac-ae3bce090fd4-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"384dff5b-0848-41ff-bcac-ae3bce090fd4\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:13:09 crc kubenswrapper[4558]: I0120 17:13:09.268950 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage17-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage17-crc\") pod \"glance-default-internal-api-0\" (UID: \"384dff5b-0848-41ff-bcac-ae3bce090fd4\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:13:09 crc kubenswrapper[4558]: I0120 17:13:09.269036 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/384dff5b-0848-41ff-bcac-ae3bce090fd4-config-data\") pod \"glance-default-internal-api-0\" (UID: \"384dff5b-0848-41ff-bcac-ae3bce090fd4\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:13:09 crc kubenswrapper[4558]: I0120 17:13:09.269183 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/384dff5b-0848-41ff-bcac-ae3bce090fd4-scripts\") pod \"glance-default-internal-api-0\" (UID: \"384dff5b-0848-41ff-bcac-ae3bce090fd4\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:13:09 crc kubenswrapper[4558]: I0120 17:13:09.269254 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/384dff5b-0848-41ff-bcac-ae3bce090fd4-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"384dff5b-0848-41ff-bcac-ae3bce090fd4\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:13:09 crc kubenswrapper[4558]: I0120 17:13:09.371430 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/384dff5b-0848-41ff-bcac-ae3bce090fd4-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"384dff5b-0848-41ff-bcac-ae3bce090fd4\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:13:09 crc kubenswrapper[4558]: I0120 17:13:09.371785 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-chhqk\" (UniqueName: \"kubernetes.io/projected/384dff5b-0848-41ff-bcac-ae3bce090fd4-kube-api-access-chhqk\") pod \"glance-default-internal-api-0\" (UID: \"384dff5b-0848-41ff-bcac-ae3bce090fd4\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:13:09 crc kubenswrapper[4558]: I0120 17:13:09.371850 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/384dff5b-0848-41ff-bcac-ae3bce090fd4-logs\") pod \"glance-default-internal-api-0\" (UID: \"384dff5b-0848-41ff-bcac-ae3bce090fd4\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:13:09 crc kubenswrapper[4558]: I0120 17:13:09.371890 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/384dff5b-0848-41ff-bcac-ae3bce090fd4-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"384dff5b-0848-41ff-bcac-ae3bce090fd4\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:13:09 crc kubenswrapper[4558]: I0120 17:13:09.371990 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage17-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage17-crc\") pod \"glance-default-internal-api-0\" (UID: \"384dff5b-0848-41ff-bcac-ae3bce090fd4\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:13:09 crc kubenswrapper[4558]: I0120 17:13:09.372044 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/384dff5b-0848-41ff-bcac-ae3bce090fd4-config-data\") pod \"glance-default-internal-api-0\" (UID: \"384dff5b-0848-41ff-bcac-ae3bce090fd4\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:13:09 crc kubenswrapper[4558]: I0120 17:13:09.372114 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/384dff5b-0848-41ff-bcac-ae3bce090fd4-scripts\") pod \"glance-default-internal-api-0\" (UID: \"384dff5b-0848-41ff-bcac-ae3bce090fd4\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:13:09 crc kubenswrapper[4558]: I0120 17:13:09.372158 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/384dff5b-0848-41ff-bcac-ae3bce090fd4-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"384dff5b-0848-41ff-bcac-ae3bce090fd4\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:13:09 crc kubenswrapper[4558]: I0120 17:13:09.373075 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage17-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage17-crc\") pod \"glance-default-internal-api-0\" (UID: \"384dff5b-0848-41ff-bcac-ae3bce090fd4\") device mount path \"/mnt/openstack/pv17\"" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:13:09 crc kubenswrapper[4558]: I0120 17:13:09.374262 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/384dff5b-0848-41ff-bcac-ae3bce090fd4-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"384dff5b-0848-41ff-bcac-ae3bce090fd4\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:13:09 crc kubenswrapper[4558]: I0120 17:13:09.374363 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/384dff5b-0848-41ff-bcac-ae3bce090fd4-logs\") pod \"glance-default-internal-api-0\" (UID: \"384dff5b-0848-41ff-bcac-ae3bce090fd4\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:13:09 crc kubenswrapper[4558]: I0120 17:13:09.380346 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/384dff5b-0848-41ff-bcac-ae3bce090fd4-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"384dff5b-0848-41ff-bcac-ae3bce090fd4\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:13:09 crc kubenswrapper[4558]: I0120 17:13:09.380496 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/384dff5b-0848-41ff-bcac-ae3bce090fd4-config-data\") pod \"glance-default-internal-api-0\" (UID: \"384dff5b-0848-41ff-bcac-ae3bce090fd4\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:13:09 crc kubenswrapper[4558]: I0120 17:13:09.380587 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/384dff5b-0848-41ff-bcac-ae3bce090fd4-scripts\") pod \"glance-default-internal-api-0\" (UID: \"384dff5b-0848-41ff-bcac-ae3bce090fd4\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:13:09 crc kubenswrapper[4558]: I0120 17:13:09.380705 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/384dff5b-0848-41ff-bcac-ae3bce090fd4-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"384dff5b-0848-41ff-bcac-ae3bce090fd4\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:13:09 crc kubenswrapper[4558]: I0120 17:13:09.391702 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-chhqk\" (UniqueName: \"kubernetes.io/projected/384dff5b-0848-41ff-bcac-ae3bce090fd4-kube-api-access-chhqk\") pod \"glance-default-internal-api-0\" (UID: \"384dff5b-0848-41ff-bcac-ae3bce090fd4\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:13:09 crc kubenswrapper[4558]: I0120 17:13:09.410394 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage17-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage17-crc\") pod \"glance-default-internal-api-0\" (UID: \"384dff5b-0848-41ff-bcac-ae3bce090fd4\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:13:09 crc kubenswrapper[4558]: I0120 17:13:09.485293 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:13:10 crc kubenswrapper[4558]: I0120 17:13:10.317314 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/barbican-db-sync-skf6m"] Jan 20 17:13:10 crc kubenswrapper[4558]: I0120 17:13:10.318632 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-db-sync-skf6m" Jan 20 17:13:10 crc kubenswrapper[4558]: I0120 17:13:10.325154 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"barbican-config-data" Jan 20 17:13:10 crc kubenswrapper[4558]: I0120 17:13:10.325189 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"barbican-barbican-dockercfg-gwmf6" Jan 20 17:13:10 crc kubenswrapper[4558]: I0120 17:13:10.327176 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-db-sync-skf6m"] Jan 20 17:13:10 crc kubenswrapper[4558]: I0120 17:13:10.395738 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/6906ea2d-0a88-47cd-9c23-3e1c40320286-db-sync-config-data\") pod \"barbican-db-sync-skf6m\" (UID: \"6906ea2d-0a88-47cd-9c23-3e1c40320286\") " pod="openstack-kuttl-tests/barbican-db-sync-skf6m" Jan 20 17:13:10 crc kubenswrapper[4558]: I0120 17:13:10.396267 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6906ea2d-0a88-47cd-9c23-3e1c40320286-combined-ca-bundle\") pod \"barbican-db-sync-skf6m\" (UID: \"6906ea2d-0a88-47cd-9c23-3e1c40320286\") " pod="openstack-kuttl-tests/barbican-db-sync-skf6m" Jan 20 17:13:10 crc kubenswrapper[4558]: I0120 17:13:10.396293 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kx4j7\" (UniqueName: \"kubernetes.io/projected/6906ea2d-0a88-47cd-9c23-3e1c40320286-kube-api-access-kx4j7\") pod \"barbican-db-sync-skf6m\" (UID: \"6906ea2d-0a88-47cd-9c23-3e1c40320286\") " pod="openstack-kuttl-tests/barbican-db-sync-skf6m" Jan 20 17:13:10 crc kubenswrapper[4558]: I0120 17:13:10.496675 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:13:10 crc kubenswrapper[4558]: I0120 17:13:10.497876 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/6906ea2d-0a88-47cd-9c23-3e1c40320286-db-sync-config-data\") pod \"barbican-db-sync-skf6m\" (UID: \"6906ea2d-0a88-47cd-9c23-3e1c40320286\") " pod="openstack-kuttl-tests/barbican-db-sync-skf6m" Jan 20 17:13:10 crc kubenswrapper[4558]: I0120 17:13:10.497994 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6906ea2d-0a88-47cd-9c23-3e1c40320286-combined-ca-bundle\") pod \"barbican-db-sync-skf6m\" (UID: \"6906ea2d-0a88-47cd-9c23-3e1c40320286\") " pod="openstack-kuttl-tests/barbican-db-sync-skf6m" Jan 20 17:13:10 crc kubenswrapper[4558]: I0120 17:13:10.498021 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kx4j7\" (UniqueName: \"kubernetes.io/projected/6906ea2d-0a88-47cd-9c23-3e1c40320286-kube-api-access-kx4j7\") pod \"barbican-db-sync-skf6m\" (UID: \"6906ea2d-0a88-47cd-9c23-3e1c40320286\") " pod="openstack-kuttl-tests/barbican-db-sync-skf6m" Jan 20 17:13:10 crc kubenswrapper[4558]: I0120 17:13:10.498396 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:13:10 crc kubenswrapper[4558]: I0120 17:13:10.503850 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/6906ea2d-0a88-47cd-9c23-3e1c40320286-db-sync-config-data\") pod \"barbican-db-sync-skf6m\" (UID: \"6906ea2d-0a88-47cd-9c23-3e1c40320286\") " pod="openstack-kuttl-tests/barbican-db-sync-skf6m" Jan 20 17:13:10 crc kubenswrapper[4558]: I0120 17:13:10.503874 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6906ea2d-0a88-47cd-9c23-3e1c40320286-combined-ca-bundle\") pod \"barbican-db-sync-skf6m\" (UID: \"6906ea2d-0a88-47cd-9c23-3e1c40320286\") " pod="openstack-kuttl-tests/barbican-db-sync-skf6m" Jan 20 17:13:10 crc kubenswrapper[4558]: I0120 17:13:10.504791 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-default-external-config-data" Jan 20 17:13:10 crc kubenswrapper[4558]: I0120 17:13:10.505038 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-glance-default-public-svc" Jan 20 17:13:10 crc kubenswrapper[4558]: I0120 17:13:10.541281 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kx4j7\" (UniqueName: \"kubernetes.io/projected/6906ea2d-0a88-47cd-9c23-3e1c40320286-kube-api-access-kx4j7\") pod \"barbican-db-sync-skf6m\" (UID: \"6906ea2d-0a88-47cd-9c23-3e1c40320286\") " pod="openstack-kuttl-tests/barbican-db-sync-skf6m" Jan 20 17:13:10 crc kubenswrapper[4558]: I0120 17:13:10.560730 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:13:10 crc kubenswrapper[4558]: I0120 17:13:10.600793 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5fc79d09-4510-442a-84e3-bcc714eb4a07-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"5fc79d09-4510-442a-84e3-bcc714eb4a07\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:13:10 crc kubenswrapper[4558]: I0120 17:13:10.600858 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5fc79d09-4510-442a-84e3-bcc714eb4a07-scripts\") pod \"glance-default-external-api-0\" (UID: \"5fc79d09-4510-442a-84e3-bcc714eb4a07\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:13:10 crc kubenswrapper[4558]: I0120 17:13:10.600932 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5fc79d09-4510-442a-84e3-bcc714eb4a07-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"5fc79d09-4510-442a-84e3-bcc714eb4a07\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:13:10 crc kubenswrapper[4558]: I0120 17:13:10.600957 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage13-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage13-crc\") pod \"glance-default-external-api-0\" (UID: \"5fc79d09-4510-442a-84e3-bcc714eb4a07\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:13:10 crc kubenswrapper[4558]: I0120 17:13:10.600983 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5fc79d09-4510-442a-84e3-bcc714eb4a07-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"5fc79d09-4510-442a-84e3-bcc714eb4a07\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:13:10 crc kubenswrapper[4558]: I0120 17:13:10.601022 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h2782\" (UniqueName: \"kubernetes.io/projected/5fc79d09-4510-442a-84e3-bcc714eb4a07-kube-api-access-h2782\") pod \"glance-default-external-api-0\" (UID: \"5fc79d09-4510-442a-84e3-bcc714eb4a07\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:13:10 crc kubenswrapper[4558]: I0120 17:13:10.601044 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5fc79d09-4510-442a-84e3-bcc714eb4a07-logs\") pod \"glance-default-external-api-0\" (UID: \"5fc79d09-4510-442a-84e3-bcc714eb4a07\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:13:10 crc kubenswrapper[4558]: I0120 17:13:10.601099 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5fc79d09-4510-442a-84e3-bcc714eb4a07-config-data\") pod \"glance-default-external-api-0\" (UID: \"5fc79d09-4510-442a-84e3-bcc714eb4a07\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:13:10 crc kubenswrapper[4558]: I0120 17:13:10.611450 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:13:10 crc kubenswrapper[4558]: E0120 17:13:10.612204 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[combined-ca-bundle config-data glance httpd-run kube-api-access-h2782 logs public-tls-certs scripts], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack-kuttl-tests/glance-default-external-api-0" podUID="5fc79d09-4510-442a-84e3-bcc714eb4a07" Jan 20 17:13:10 crc kubenswrapper[4558]: I0120 17:13:10.641882 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-db-sync-skf6m" Jan 20 17:13:10 crc kubenswrapper[4558]: I0120 17:13:10.652264 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:13:10 crc kubenswrapper[4558]: I0120 17:13:10.672797 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/neutron-db-sync-5wdp7"] Jan 20 17:13:10 crc kubenswrapper[4558]: I0120 17:13:10.673987 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-db-sync-5wdp7" Jan 20 17:13:10 crc kubenswrapper[4558]: I0120 17:13:10.679340 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:13:10 crc kubenswrapper[4558]: I0120 17:13:10.680040 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"neutron-config" Jan 20 17:13:10 crc kubenswrapper[4558]: I0120 17:13:10.680322 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"neutron-httpd-config" Jan 20 17:13:10 crc kubenswrapper[4558]: I0120 17:13:10.680590 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"neutron-neutron-dockercfg-pk75l" Jan 20 17:13:10 crc kubenswrapper[4558]: I0120 17:13:10.692204 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:13:10 crc kubenswrapper[4558]: I0120 17:13:10.695906 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-db-sync-5wdp7"] Jan 20 17:13:10 crc kubenswrapper[4558]: I0120 17:13:10.703421 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5fc79d09-4510-442a-84e3-bcc714eb4a07-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"5fc79d09-4510-442a-84e3-bcc714eb4a07\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:13:10 crc kubenswrapper[4558]: I0120 17:13:10.703496 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5fc79d09-4510-442a-84e3-bcc714eb4a07-scripts\") pod \"glance-default-external-api-0\" (UID: \"5fc79d09-4510-442a-84e3-bcc714eb4a07\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:13:10 crc kubenswrapper[4558]: I0120 17:13:10.703536 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5fc79d09-4510-442a-84e3-bcc714eb4a07-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"5fc79d09-4510-442a-84e3-bcc714eb4a07\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:13:10 crc kubenswrapper[4558]: I0120 17:13:10.703564 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage13-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage13-crc\") pod \"glance-default-external-api-0\" (UID: \"5fc79d09-4510-442a-84e3-bcc714eb4a07\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:13:10 crc kubenswrapper[4558]: I0120 17:13:10.703587 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5fc79d09-4510-442a-84e3-bcc714eb4a07-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"5fc79d09-4510-442a-84e3-bcc714eb4a07\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:13:10 crc kubenswrapper[4558]: I0120 17:13:10.703616 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h2782\" (UniqueName: \"kubernetes.io/projected/5fc79d09-4510-442a-84e3-bcc714eb4a07-kube-api-access-h2782\") pod \"glance-default-external-api-0\" (UID: \"5fc79d09-4510-442a-84e3-bcc714eb4a07\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:13:10 crc kubenswrapper[4558]: I0120 17:13:10.703637 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5fc79d09-4510-442a-84e3-bcc714eb4a07-logs\") pod \"glance-default-external-api-0\" (UID: \"5fc79d09-4510-442a-84e3-bcc714eb4a07\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:13:10 crc kubenswrapper[4558]: I0120 17:13:10.703672 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5fc79d09-4510-442a-84e3-bcc714eb4a07-config-data\") pod \"glance-default-external-api-0\" (UID: \"5fc79d09-4510-442a-84e3-bcc714eb4a07\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:13:10 crc kubenswrapper[4558]: I0120 17:13:10.704500 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage13-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage13-crc\") pod \"glance-default-external-api-0\" (UID: \"5fc79d09-4510-442a-84e3-bcc714eb4a07\") device mount path \"/mnt/openstack/pv13\"" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:13:10 crc kubenswrapper[4558]: I0120 17:13:10.708705 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5fc79d09-4510-442a-84e3-bcc714eb4a07-logs\") pod \"glance-default-external-api-0\" (UID: \"5fc79d09-4510-442a-84e3-bcc714eb4a07\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:13:10 crc kubenswrapper[4558]: I0120 17:13:10.708947 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5fc79d09-4510-442a-84e3-bcc714eb4a07-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"5fc79d09-4510-442a-84e3-bcc714eb4a07\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:13:10 crc kubenswrapper[4558]: I0120 17:13:10.711802 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5fc79d09-4510-442a-84e3-bcc714eb4a07-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"5fc79d09-4510-442a-84e3-bcc714eb4a07\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:13:10 crc kubenswrapper[4558]: I0120 17:13:10.729087 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5fc79d09-4510-442a-84e3-bcc714eb4a07-scripts\") pod \"glance-default-external-api-0\" (UID: \"5fc79d09-4510-442a-84e3-bcc714eb4a07\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:13:10 crc kubenswrapper[4558]: I0120 17:13:10.742682 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5fc79d09-4510-442a-84e3-bcc714eb4a07-config-data\") pod \"glance-default-external-api-0\" (UID: \"5fc79d09-4510-442a-84e3-bcc714eb4a07\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:13:10 crc kubenswrapper[4558]: I0120 17:13:10.748342 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5fc79d09-4510-442a-84e3-bcc714eb4a07-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"5fc79d09-4510-442a-84e3-bcc714eb4a07\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:13:10 crc kubenswrapper[4558]: I0120 17:13:10.748870 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h2782\" (UniqueName: \"kubernetes.io/projected/5fc79d09-4510-442a-84e3-bcc714eb4a07-kube-api-access-h2782\") pod \"glance-default-external-api-0\" (UID: \"5fc79d09-4510-442a-84e3-bcc714eb4a07\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:13:10 crc kubenswrapper[4558]: I0120 17:13:10.772313 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage13-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage13-crc\") pod \"glance-default-external-api-0\" (UID: \"5fc79d09-4510-442a-84e3-bcc714eb4a07\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:13:10 crc kubenswrapper[4558]: I0120 17:13:10.801471 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:13:10 crc kubenswrapper[4558]: I0120 17:13:10.810451 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5fc79d09-4510-442a-84e3-bcc714eb4a07-logs\") pod \"5fc79d09-4510-442a-84e3-bcc714eb4a07\" (UID: \"5fc79d09-4510-442a-84e3-bcc714eb4a07\") " Jan 20 17:13:10 crc kubenswrapper[4558]: I0120 17:13:10.810631 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5fc79d09-4510-442a-84e3-bcc714eb4a07-config-data\") pod \"5fc79d09-4510-442a-84e3-bcc714eb4a07\" (UID: \"5fc79d09-4510-442a-84e3-bcc714eb4a07\") " Jan 20 17:13:10 crc kubenswrapper[4558]: I0120 17:13:10.810705 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5fc79d09-4510-442a-84e3-bcc714eb4a07-public-tls-certs\") pod \"5fc79d09-4510-442a-84e3-bcc714eb4a07\" (UID: \"5fc79d09-4510-442a-84e3-bcc714eb4a07\") " Jan 20 17:13:10 crc kubenswrapper[4558]: I0120 17:13:10.810741 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5fc79d09-4510-442a-84e3-bcc714eb4a07-combined-ca-bundle\") pod \"5fc79d09-4510-442a-84e3-bcc714eb4a07\" (UID: \"5fc79d09-4510-442a-84e3-bcc714eb4a07\") " Jan 20 17:13:10 crc kubenswrapper[4558]: I0120 17:13:10.810814 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h2782\" (UniqueName: \"kubernetes.io/projected/5fc79d09-4510-442a-84e3-bcc714eb4a07-kube-api-access-h2782\") pod \"5fc79d09-4510-442a-84e3-bcc714eb4a07\" (UID: \"5fc79d09-4510-442a-84e3-bcc714eb4a07\") " Jan 20 17:13:10 crc kubenswrapper[4558]: I0120 17:13:10.810883 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5fc79d09-4510-442a-84e3-bcc714eb4a07-httpd-run\") pod \"5fc79d09-4510-442a-84e3-bcc714eb4a07\" (UID: \"5fc79d09-4510-442a-84e3-bcc714eb4a07\") " Jan 20 17:13:10 crc kubenswrapper[4558]: I0120 17:13:10.810956 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5fc79d09-4510-442a-84e3-bcc714eb4a07-scripts\") pod \"5fc79d09-4510-442a-84e3-bcc714eb4a07\" (UID: \"5fc79d09-4510-442a-84e3-bcc714eb4a07\") " Jan 20 17:13:10 crc kubenswrapper[4558]: I0120 17:13:10.811691 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ddaf057e-7323-49e4-a1c5-0f91ad0aaaaa-combined-ca-bundle\") pod \"neutron-db-sync-5wdp7\" (UID: \"ddaf057e-7323-49e4-a1c5-0f91ad0aaaaa\") " pod="openstack-kuttl-tests/neutron-db-sync-5wdp7" Jan 20 17:13:10 crc kubenswrapper[4558]: I0120 17:13:10.811761 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/ddaf057e-7323-49e4-a1c5-0f91ad0aaaaa-config\") pod \"neutron-db-sync-5wdp7\" (UID: \"ddaf057e-7323-49e4-a1c5-0f91ad0aaaaa\") " pod="openstack-kuttl-tests/neutron-db-sync-5wdp7" Jan 20 17:13:10 crc kubenswrapper[4558]: I0120 17:13:10.811821 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vwrg2\" (UniqueName: \"kubernetes.io/projected/ddaf057e-7323-49e4-a1c5-0f91ad0aaaaa-kube-api-access-vwrg2\") pod \"neutron-db-sync-5wdp7\" (UID: \"ddaf057e-7323-49e4-a1c5-0f91ad0aaaaa\") " pod="openstack-kuttl-tests/neutron-db-sync-5wdp7" Jan 20 17:13:10 crc kubenswrapper[4558]: I0120 17:13:10.812871 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5fc79d09-4510-442a-84e3-bcc714eb4a07-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "5fc79d09-4510-442a-84e3-bcc714eb4a07" (UID: "5fc79d09-4510-442a-84e3-bcc714eb4a07"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:13:10 crc kubenswrapper[4558]: I0120 17:13:10.831460 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fc79d09-4510-442a-84e3-bcc714eb4a07-kube-api-access-h2782" (OuterVolumeSpecName: "kube-api-access-h2782") pod "5fc79d09-4510-442a-84e3-bcc714eb4a07" (UID: "5fc79d09-4510-442a-84e3-bcc714eb4a07"). InnerVolumeSpecName "kube-api-access-h2782". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:13:10 crc kubenswrapper[4558]: I0120 17:13:10.834381 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fc79d09-4510-442a-84e3-bcc714eb4a07-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5fc79d09-4510-442a-84e3-bcc714eb4a07" (UID: "5fc79d09-4510-442a-84e3-bcc714eb4a07"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:13:10 crc kubenswrapper[4558]: I0120 17:13:10.834623 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fc79d09-4510-442a-84e3-bcc714eb4a07-config-data" (OuterVolumeSpecName: "config-data") pod "5fc79d09-4510-442a-84e3-bcc714eb4a07" (UID: "5fc79d09-4510-442a-84e3-bcc714eb4a07"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:13:10 crc kubenswrapper[4558]: I0120 17:13:10.844231 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5fc79d09-4510-442a-84e3-bcc714eb4a07-logs" (OuterVolumeSpecName: "logs") pod "5fc79d09-4510-442a-84e3-bcc714eb4a07" (UID: "5fc79d09-4510-442a-84e3-bcc714eb4a07"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:13:10 crc kubenswrapper[4558]: I0120 17:13:10.860360 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fc79d09-4510-442a-84e3-bcc714eb4a07-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "5fc79d09-4510-442a-84e3-bcc714eb4a07" (UID: "5fc79d09-4510-442a-84e3-bcc714eb4a07"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:13:10 crc kubenswrapper[4558]: I0120 17:13:10.860388 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fc79d09-4510-442a-84e3-bcc714eb4a07-scripts" (OuterVolumeSpecName: "scripts") pod "5fc79d09-4510-442a-84e3-bcc714eb4a07" (UID: "5fc79d09-4510-442a-84e3-bcc714eb4a07"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:13:10 crc kubenswrapper[4558]: I0120 17:13:10.914449 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage13-crc\") pod \"5fc79d09-4510-442a-84e3-bcc714eb4a07\" (UID: \"5fc79d09-4510-442a-84e3-bcc714eb4a07\") " Jan 20 17:13:10 crc kubenswrapper[4558]: I0120 17:13:10.915100 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ddaf057e-7323-49e4-a1c5-0f91ad0aaaaa-combined-ca-bundle\") pod \"neutron-db-sync-5wdp7\" (UID: \"ddaf057e-7323-49e4-a1c5-0f91ad0aaaaa\") " pod="openstack-kuttl-tests/neutron-db-sync-5wdp7" Jan 20 17:13:10 crc kubenswrapper[4558]: I0120 17:13:10.915138 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/ddaf057e-7323-49e4-a1c5-0f91ad0aaaaa-config\") pod \"neutron-db-sync-5wdp7\" (UID: \"ddaf057e-7323-49e4-a1c5-0f91ad0aaaaa\") " pod="openstack-kuttl-tests/neutron-db-sync-5wdp7" Jan 20 17:13:10 crc kubenswrapper[4558]: I0120 17:13:10.915179 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vwrg2\" (UniqueName: \"kubernetes.io/projected/ddaf057e-7323-49e4-a1c5-0f91ad0aaaaa-kube-api-access-vwrg2\") pod \"neutron-db-sync-5wdp7\" (UID: \"ddaf057e-7323-49e4-a1c5-0f91ad0aaaaa\") " pod="openstack-kuttl-tests/neutron-db-sync-5wdp7" Jan 20 17:13:10 crc kubenswrapper[4558]: I0120 17:13:10.915251 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h2782\" (UniqueName: \"kubernetes.io/projected/5fc79d09-4510-442a-84e3-bcc714eb4a07-kube-api-access-h2782\") on node \"crc\" DevicePath \"\"" Jan 20 17:13:10 crc kubenswrapper[4558]: I0120 17:13:10.915263 4558 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5fc79d09-4510-442a-84e3-bcc714eb4a07-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 20 17:13:10 crc kubenswrapper[4558]: I0120 17:13:10.915272 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5fc79d09-4510-442a-84e3-bcc714eb4a07-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:13:10 crc kubenswrapper[4558]: I0120 17:13:10.915281 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5fc79d09-4510-442a-84e3-bcc714eb4a07-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:13:10 crc kubenswrapper[4558]: I0120 17:13:10.915290 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5fc79d09-4510-442a-84e3-bcc714eb4a07-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:13:10 crc kubenswrapper[4558]: I0120 17:13:10.915309 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5fc79d09-4510-442a-84e3-bcc714eb4a07-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:13:10 crc kubenswrapper[4558]: I0120 17:13:10.915318 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5fc79d09-4510-442a-84e3-bcc714eb4a07-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:13:10 crc kubenswrapper[4558]: I0120 17:13:10.923845 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/ddaf057e-7323-49e4-a1c5-0f91ad0aaaaa-config\") pod \"neutron-db-sync-5wdp7\" (UID: \"ddaf057e-7323-49e4-a1c5-0f91ad0aaaaa\") " pod="openstack-kuttl-tests/neutron-db-sync-5wdp7" Jan 20 17:13:10 crc kubenswrapper[4558]: I0120 17:13:10.925916 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ddaf057e-7323-49e4-a1c5-0f91ad0aaaaa-combined-ca-bundle\") pod \"neutron-db-sync-5wdp7\" (UID: \"ddaf057e-7323-49e4-a1c5-0f91ad0aaaaa\") " pod="openstack-kuttl-tests/neutron-db-sync-5wdp7" Jan 20 17:13:10 crc kubenswrapper[4558]: I0120 17:13:10.926793 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage13-crc" (OuterVolumeSpecName: "glance") pod "5fc79d09-4510-442a-84e3-bcc714eb4a07" (UID: "5fc79d09-4510-442a-84e3-bcc714eb4a07"). InnerVolumeSpecName "local-storage13-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:13:10 crc kubenswrapper[4558]: I0120 17:13:10.931043 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vwrg2\" (UniqueName: \"kubernetes.io/projected/ddaf057e-7323-49e4-a1c5-0f91ad0aaaaa-kube-api-access-vwrg2\") pod \"neutron-db-sync-5wdp7\" (UID: \"ddaf057e-7323-49e4-a1c5-0f91ad0aaaaa\") " pod="openstack-kuttl-tests/neutron-db-sync-5wdp7" Jan 20 17:13:11 crc kubenswrapper[4558]: I0120 17:13:11.017398 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage13-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage13-crc\") on node \"crc\" " Jan 20 17:13:11 crc kubenswrapper[4558]: I0120 17:13:11.031723 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage13-crc" (UniqueName: "kubernetes.io/local-volume/local-storage13-crc") on node "crc" Jan 20 17:13:11 crc kubenswrapper[4558]: I0120 17:13:11.044293 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-db-sync-5wdp7" Jan 20 17:13:11 crc kubenswrapper[4558]: I0120 17:13:11.119521 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage13-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage13-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:13:11 crc kubenswrapper[4558]: I0120 17:13:11.263556 4558 scope.go:117] "RemoveContainer" containerID="5f412156d497432b7604ccd46172afc5d571c254c8c81a3782c29c607e00f7ae" Jan 20 17:13:11 crc kubenswrapper[4558]: I0120 17:13:11.663561 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:13:11 crc kubenswrapper[4558]: I0120 17:13:11.720278 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:13:11 crc kubenswrapper[4558]: I0120 17:13:11.735526 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:13:11 crc kubenswrapper[4558]: I0120 17:13:11.743241 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:13:11 crc kubenswrapper[4558]: I0120 17:13:11.744620 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:13:11 crc kubenswrapper[4558]: I0120 17:13:11.746883 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-default-external-config-data" Jan 20 17:13:11 crc kubenswrapper[4558]: I0120 17:13:11.747078 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-glance-default-public-svc" Jan 20 17:13:11 crc kubenswrapper[4558]: I0120 17:13:11.760252 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:13:11 crc kubenswrapper[4558]: I0120 17:13:11.834052 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c061507a-294f-453b-be5d-4fdca19c790c-logs\") pod \"glance-default-external-api-0\" (UID: \"c061507a-294f-453b-be5d-4fdca19c790c\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:13:11 crc kubenswrapper[4558]: I0120 17:13:11.834109 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5lcn8\" (UniqueName: \"kubernetes.io/projected/c061507a-294f-453b-be5d-4fdca19c790c-kube-api-access-5lcn8\") pod \"glance-default-external-api-0\" (UID: \"c061507a-294f-453b-be5d-4fdca19c790c\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:13:11 crc kubenswrapper[4558]: I0120 17:13:11.834185 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c061507a-294f-453b-be5d-4fdca19c790c-config-data\") pod \"glance-default-external-api-0\" (UID: \"c061507a-294f-453b-be5d-4fdca19c790c\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:13:11 crc kubenswrapper[4558]: I0120 17:13:11.834213 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage13-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage13-crc\") pod \"glance-default-external-api-0\" (UID: \"c061507a-294f-453b-be5d-4fdca19c790c\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:13:11 crc kubenswrapper[4558]: I0120 17:13:11.834239 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c061507a-294f-453b-be5d-4fdca19c790c-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"c061507a-294f-453b-be5d-4fdca19c790c\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:13:11 crc kubenswrapper[4558]: I0120 17:13:11.834259 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c061507a-294f-453b-be5d-4fdca19c790c-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"c061507a-294f-453b-be5d-4fdca19c790c\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:13:11 crc kubenswrapper[4558]: I0120 17:13:11.834283 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c061507a-294f-453b-be5d-4fdca19c790c-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"c061507a-294f-453b-be5d-4fdca19c790c\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:13:11 crc kubenswrapper[4558]: I0120 17:13:11.834364 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c061507a-294f-453b-be5d-4fdca19c790c-scripts\") pod \"glance-default-external-api-0\" (UID: \"c061507a-294f-453b-be5d-4fdca19c790c\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:13:11 crc kubenswrapper[4558]: I0120 17:13:11.935516 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c061507a-294f-453b-be5d-4fdca19c790c-logs\") pod \"glance-default-external-api-0\" (UID: \"c061507a-294f-453b-be5d-4fdca19c790c\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:13:11 crc kubenswrapper[4558]: I0120 17:13:11.935586 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5lcn8\" (UniqueName: \"kubernetes.io/projected/c061507a-294f-453b-be5d-4fdca19c790c-kube-api-access-5lcn8\") pod \"glance-default-external-api-0\" (UID: \"c061507a-294f-453b-be5d-4fdca19c790c\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:13:11 crc kubenswrapper[4558]: I0120 17:13:11.935709 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c061507a-294f-453b-be5d-4fdca19c790c-config-data\") pod \"glance-default-external-api-0\" (UID: \"c061507a-294f-453b-be5d-4fdca19c790c\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:13:11 crc kubenswrapper[4558]: I0120 17:13:11.935745 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage13-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage13-crc\") pod \"glance-default-external-api-0\" (UID: \"c061507a-294f-453b-be5d-4fdca19c790c\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:13:11 crc kubenswrapper[4558]: I0120 17:13:11.935779 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c061507a-294f-453b-be5d-4fdca19c790c-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"c061507a-294f-453b-be5d-4fdca19c790c\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:13:11 crc kubenswrapper[4558]: I0120 17:13:11.935809 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c061507a-294f-453b-be5d-4fdca19c790c-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"c061507a-294f-453b-be5d-4fdca19c790c\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:13:11 crc kubenswrapper[4558]: I0120 17:13:11.935847 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c061507a-294f-453b-be5d-4fdca19c790c-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"c061507a-294f-453b-be5d-4fdca19c790c\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:13:11 crc kubenswrapper[4558]: I0120 17:13:11.935897 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c061507a-294f-453b-be5d-4fdca19c790c-scripts\") pod \"glance-default-external-api-0\" (UID: \"c061507a-294f-453b-be5d-4fdca19c790c\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:13:11 crc kubenswrapper[4558]: I0120 17:13:11.935940 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage13-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage13-crc\") pod \"glance-default-external-api-0\" (UID: \"c061507a-294f-453b-be5d-4fdca19c790c\") device mount path \"/mnt/openstack/pv13\"" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:13:11 crc kubenswrapper[4558]: I0120 17:13:11.936780 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c061507a-294f-453b-be5d-4fdca19c790c-logs\") pod \"glance-default-external-api-0\" (UID: \"c061507a-294f-453b-be5d-4fdca19c790c\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:13:11 crc kubenswrapper[4558]: I0120 17:13:11.937592 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c061507a-294f-453b-be5d-4fdca19c790c-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"c061507a-294f-453b-be5d-4fdca19c790c\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:13:11 crc kubenswrapper[4558]: I0120 17:13:11.941426 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c061507a-294f-453b-be5d-4fdca19c790c-scripts\") pod \"glance-default-external-api-0\" (UID: \"c061507a-294f-453b-be5d-4fdca19c790c\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:13:11 crc kubenswrapper[4558]: I0120 17:13:11.941753 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c061507a-294f-453b-be5d-4fdca19c790c-config-data\") pod \"glance-default-external-api-0\" (UID: \"c061507a-294f-453b-be5d-4fdca19c790c\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:13:11 crc kubenswrapper[4558]: I0120 17:13:11.941993 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c061507a-294f-453b-be5d-4fdca19c790c-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"c061507a-294f-453b-be5d-4fdca19c790c\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:13:11 crc kubenswrapper[4558]: I0120 17:13:11.944010 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c061507a-294f-453b-be5d-4fdca19c790c-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"c061507a-294f-453b-be5d-4fdca19c790c\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:13:11 crc kubenswrapper[4558]: I0120 17:13:11.951137 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5lcn8\" (UniqueName: \"kubernetes.io/projected/c061507a-294f-453b-be5d-4fdca19c790c-kube-api-access-5lcn8\") pod \"glance-default-external-api-0\" (UID: \"c061507a-294f-453b-be5d-4fdca19c790c\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:13:11 crc kubenswrapper[4558]: I0120 17:13:11.965037 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage13-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage13-crc\") pod \"glance-default-external-api-0\" (UID: \"c061507a-294f-453b-be5d-4fdca19c790c\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:13:12 crc kubenswrapper[4558]: I0120 17:13:12.067219 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:13:12 crc kubenswrapper[4558]: I0120 17:13:12.584511 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fc79d09-4510-442a-84e3-bcc714eb4a07" path="/var/lib/kubelet/pods/5fc79d09-4510-442a-84e3-bcc714eb4a07/volumes" Jan 20 17:13:15 crc kubenswrapper[4558]: I0120 17:13:15.472722 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-bootstrap-479z7"] Jan 20 17:13:15 crc kubenswrapper[4558]: I0120 17:13:15.706725 4558 scope.go:117] "RemoveContainer" containerID="42bef42ec418e9c5fec49130ad8e153b3d3494179d54ac7a788a08bcfe416096" Jan 20 17:13:15 crc kubenswrapper[4558]: I0120 17:13:15.849732 4558 scope.go:117] "RemoveContainer" containerID="daad0853cc1011ae35e95a9f79a7ec7c1017c2cddf3225d7409dec8cb1b97163" Jan 20 17:13:16 crc kubenswrapper[4558]: I0120 17:13:16.204354 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-db-sync-c95c5"] Jan 20 17:13:16 crc kubenswrapper[4558]: W0120 17:13:16.227194 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode87a4844_8a90_4965_878f_d95aa09c47bb.slice/crio-53d4a8330d12668e9d869d236edfd30facbf5ef1c9317515c37714583c2d61ec WatchSource:0}: Error finding container 53d4a8330d12668e9d869d236edfd30facbf5ef1c9317515c37714583c2d61ec: Status 404 returned error can't find the container with id 53d4a8330d12668e9d869d236edfd30facbf5ef1c9317515c37714583c2d61ec Jan 20 17:13:16 crc kubenswrapper[4558]: I0120 17:13:16.227267 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/prometheus-metric-storage-0"] Jan 20 17:13:16 crc kubenswrapper[4558]: W0120 17:13:16.233073 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6906ea2d_0a88_47cd_9c23_3e1c40320286.slice/crio-8a31aef2472a4b8d8f98a3e4406ec52bc9216df431012766e9b0020fab44f8e7 WatchSource:0}: Error finding container 8a31aef2472a4b8d8f98a3e4406ec52bc9216df431012766e9b0020fab44f8e7: Status 404 returned error can't find the container with id 8a31aef2472a4b8d8f98a3e4406ec52bc9216df431012766e9b0020fab44f8e7 Jan 20 17:13:16 crc kubenswrapper[4558]: I0120 17:13:16.237691 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-db-sync-skf6m"] Jan 20 17:13:16 crc kubenswrapper[4558]: I0120 17:13:16.409325 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-db-sync-5wdp7"] Jan 20 17:13:16 crc kubenswrapper[4558]: I0120 17:13:16.423802 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-db-sync-xmmz7"] Jan 20 17:13:16 crc kubenswrapper[4558]: I0120 17:13:16.430261 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:13:16 crc kubenswrapper[4558]: I0120 17:13:16.433491 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:13:16 crc kubenswrapper[4558]: I0120 17:13:16.440298 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:13:16 crc kubenswrapper[4558]: I0120 17:13:16.757948 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-db-sync-c95c5" event={"ID":"c3bca81a-32b0-4c02-90c0-ab10bb1eb835","Type":"ContainerStarted","Data":"ede71d31814bb504757b712e0cf2f0927853e428f6428a924676c1d65b768351"} Jan 20 17:13:16 crc kubenswrapper[4558]: I0120 17:13:16.758001 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-db-sync-c95c5" event={"ID":"c3bca81a-32b0-4c02-90c0-ab10bb1eb835","Type":"ContainerStarted","Data":"5478c0409438d311ff48f1c0cdb0750ab3e6246803cf6daf8092bd03f1e06d6c"} Jan 20 17:13:16 crc kubenswrapper[4558]: I0120 17:13:16.765469 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/prometheus-metric-storage-0" event={"ID":"e87a4844-8a90-4965-878f-d95aa09c47bb","Type":"ContainerStarted","Data":"53d4a8330d12668e9d869d236edfd30facbf5ef1c9317515c37714583c2d61ec"} Jan 20 17:13:16 crc kubenswrapper[4558]: I0120 17:13:16.768657 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/watcher-db-sync-z8lmb" event={"ID":"4f36762c-fdfd-45ac-a5b1-4be2bd718a9b","Type":"ContainerStarted","Data":"c0a3f252e721c2ca07be91f94b01bf52c6603bf0f355466ad7d645ee1891d878"} Jan 20 17:13:16 crc kubenswrapper[4558]: I0120 17:13:16.779194 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-db-sync-skf6m" event={"ID":"6906ea2d-0a88-47cd-9c23-3e1c40320286","Type":"ContainerStarted","Data":"bd20e6e920cac2215a98a5c447c2c55bc432c00d80f0d36813856b38b27308b8"} Jan 20 17:13:16 crc kubenswrapper[4558]: I0120 17:13:16.779218 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-db-sync-skf6m" event={"ID":"6906ea2d-0a88-47cd-9c23-3e1c40320286","Type":"ContainerStarted","Data":"8a31aef2472a4b8d8f98a3e4406ec52bc9216df431012766e9b0020fab44f8e7"} Jan 20 17:13:16 crc kubenswrapper[4558]: I0120 17:13:16.791359 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/placement-db-sync-c95c5" podStartSLOduration=8.791341938 podStartE2EDuration="8.791341938s" podCreationTimestamp="2026-01-20 17:13:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:13:16.77207636 +0000 UTC m=+1890.532414327" watchObservedRunningTime="2026-01-20 17:13:16.791341938 +0000 UTC m=+1890.551679905" Jan 20 17:13:16 crc kubenswrapper[4558]: I0120 17:13:16.799314 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-db-sync-5wdp7" event={"ID":"ddaf057e-7323-49e4-a1c5-0f91ad0aaaaa","Type":"ContainerStarted","Data":"8540f5c72d8ae0ebbfdf8ace4baa83976e8887ffaba9374a18c9d79a16cd9d30"} Jan 20 17:13:16 crc kubenswrapper[4558]: I0120 17:13:16.799369 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-db-sync-5wdp7" event={"ID":"ddaf057e-7323-49e4-a1c5-0f91ad0aaaaa","Type":"ContainerStarted","Data":"3d8858e5c49b5ab857b4b2de7ded6058c5738b29048b63692b6c579705a8d102"} Jan 20 17:13:16 crc kubenswrapper[4558]: I0120 17:13:16.802621 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/watcher-db-sync-z8lmb" podStartSLOduration=2.413570414 podStartE2EDuration="17.802602783s" podCreationTimestamp="2026-01-20 17:12:59 +0000 UTC" firstStartedPulling="2026-01-20 17:13:00.472861251 +0000 UTC m=+1874.233199218" lastFinishedPulling="2026-01-20 17:13:15.86189362 +0000 UTC m=+1889.622231587" observedRunningTime="2026-01-20 17:13:16.795273992 +0000 UTC m=+1890.555611958" watchObservedRunningTime="2026-01-20 17:13:16.802602783 +0000 UTC m=+1890.562940750" Jan 20 17:13:16 crc kubenswrapper[4558]: I0120 17:13:16.813592 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"52844554-ecdd-40db-ae62-0f0de3d9030d","Type":"ContainerStarted","Data":"e6c3ae37797a69bb34636f1b18613265125f49fbdbcefbbcfb29be74a7d67a71"} Jan 20 17:13:16 crc kubenswrapper[4558]: I0120 17:13:16.831230 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/barbican-db-sync-skf6m" podStartSLOduration=6.831212974 podStartE2EDuration="6.831212974s" podCreationTimestamp="2026-01-20 17:13:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:13:16.810156458 +0000 UTC m=+1890.570494424" watchObservedRunningTime="2026-01-20 17:13:16.831212974 +0000 UTC m=+1890.591550940" Jan 20 17:13:16 crc kubenswrapper[4558]: I0120 17:13:16.837351 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"c061507a-294f-453b-be5d-4fdca19c790c","Type":"ContainerStarted","Data":"fd451b5cc00585c8579efbc7e0be3a1078970c7fd9941b13ddf84c39c0ad04e9"} Jan 20 17:13:16 crc kubenswrapper[4558]: I0120 17:13:16.838414 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/neutron-db-sync-5wdp7" podStartSLOduration=6.83840086 podStartE2EDuration="6.83840086s" podCreationTimestamp="2026-01-20 17:13:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:13:16.825317888 +0000 UTC m=+1890.585655854" watchObservedRunningTime="2026-01-20 17:13:16.83840086 +0000 UTC m=+1890.598738827" Jan 20 17:13:16 crc kubenswrapper[4558]: I0120 17:13:16.846765 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-db-sync-xmmz7" event={"ID":"4e87c463-6ff6-4a56-a207-b6b86578ec36","Type":"ContainerStarted","Data":"27531b2b5f6f9170a62fb7645bec0cee5541b19a0b1b664f62d55b883f94d981"} Jan 20 17:13:16 crc kubenswrapper[4558]: I0120 17:13:16.854522 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-bootstrap-479z7" event={"ID":"d81dc2b3-5e3d-4b54-98f5-938c751dcaed","Type":"ContainerStarted","Data":"77f2cc9689416d67bb531c240255bcc3dba26ca8b5684419ffa0bcb7727882f2"} Jan 20 17:13:16 crc kubenswrapper[4558]: I0120 17:13:16.854565 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-bootstrap-479z7" event={"ID":"d81dc2b3-5e3d-4b54-98f5-938c751dcaed","Type":"ContainerStarted","Data":"6c3b488e6cf0fcd51b8086633850a6a0661c1e397930b452ebb9740f164ab032"} Jan 20 17:13:16 crc kubenswrapper[4558]: I0120 17:13:16.859741 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"384dff5b-0848-41ff-bcac-ae3bce090fd4","Type":"ContainerStarted","Data":"924a32a1cc9b5d281fd0920a23551b1cee39f7d6cb1b36ebd2f03e527044124a"} Jan 20 17:13:16 crc kubenswrapper[4558]: I0120 17:13:16.876560 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/keystone-bootstrap-479z7" podStartSLOduration=9.876536742 podStartE2EDuration="9.876536742s" podCreationTimestamp="2026-01-20 17:13:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:13:16.864796585 +0000 UTC m=+1890.625134552" watchObservedRunningTime="2026-01-20 17:13:16.876536742 +0000 UTC m=+1890.636874709" Jan 20 17:13:17 crc kubenswrapper[4558]: I0120 17:13:17.889581 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-db-sync-xmmz7" event={"ID":"4e87c463-6ff6-4a56-a207-b6b86578ec36","Type":"ContainerStarted","Data":"59d0c6bafc63cc80658f7568160ae47885a8cf64a674673643c37344ad9aa432"} Jan 20 17:13:17 crc kubenswrapper[4558]: I0120 17:13:17.901303 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"384dff5b-0848-41ff-bcac-ae3bce090fd4","Type":"ContainerStarted","Data":"d7acc2e71e623b0cb40d205dabe996ea5dbae69b82aabc6076726b533f985398"} Jan 20 17:13:17 crc kubenswrapper[4558]: I0120 17:13:17.908924 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"52844554-ecdd-40db-ae62-0f0de3d9030d","Type":"ContainerStarted","Data":"3c009e1d5d872ae0cc303819626a3fcbec01907270a323804faaa0a2fe958dd6"} Jan 20 17:13:17 crc kubenswrapper[4558]: I0120 17:13:17.912789 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"c061507a-294f-453b-be5d-4fdca19c790c","Type":"ContainerStarted","Data":"1a5915eb633ad5a17f2153955cceb387e146bddc311a76f35f78e2b62cdc3b6e"} Jan 20 17:13:17 crc kubenswrapper[4558]: I0120 17:13:17.915117 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/cinder-db-sync-xmmz7" podStartSLOduration=9.915091048 podStartE2EDuration="9.915091048s" podCreationTimestamp="2026-01-20 17:13:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:13:17.908313754 +0000 UTC m=+1891.668651721" watchObservedRunningTime="2026-01-20 17:13:17.915091048 +0000 UTC m=+1891.675429016" Jan 20 17:13:18 crc kubenswrapper[4558]: I0120 17:13:18.924402 4558 generic.go:334] "Generic (PLEG): container finished" podID="c3bca81a-32b0-4c02-90c0-ab10bb1eb835" containerID="ede71d31814bb504757b712e0cf2f0927853e428f6428a924676c1d65b768351" exitCode=0 Jan 20 17:13:18 crc kubenswrapper[4558]: I0120 17:13:18.924565 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-db-sync-c95c5" event={"ID":"c3bca81a-32b0-4c02-90c0-ab10bb1eb835","Type":"ContainerDied","Data":"ede71d31814bb504757b712e0cf2f0927853e428f6428a924676c1d65b768351"} Jan 20 17:13:18 crc kubenswrapper[4558]: I0120 17:13:18.927612 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/prometheus-metric-storage-0" event={"ID":"e87a4844-8a90-4965-878f-d95aa09c47bb","Type":"ContainerStarted","Data":"aa1db8c742dbd3178eb1465494b8e471f9616f4072004ba4f8cbbb2be9c8f316"} Jan 20 17:13:18 crc kubenswrapper[4558]: I0120 17:13:18.929617 4558 generic.go:334] "Generic (PLEG): container finished" podID="4f36762c-fdfd-45ac-a5b1-4be2bd718a9b" containerID="c0a3f252e721c2ca07be91f94b01bf52c6603bf0f355466ad7d645ee1891d878" exitCode=0 Jan 20 17:13:18 crc kubenswrapper[4558]: I0120 17:13:18.929730 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/watcher-db-sync-z8lmb" event={"ID":"4f36762c-fdfd-45ac-a5b1-4be2bd718a9b","Type":"ContainerDied","Data":"c0a3f252e721c2ca07be91f94b01bf52c6603bf0f355466ad7d645ee1891d878"} Jan 20 17:13:18 crc kubenswrapper[4558]: I0120 17:13:18.931402 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"52844554-ecdd-40db-ae62-0f0de3d9030d","Type":"ContainerStarted","Data":"217dca2679c63dd7846ef65d423f3eea95f3f3ffc7c680a0e1fc9edda8767cd7"} Jan 20 17:13:18 crc kubenswrapper[4558]: I0120 17:13:18.933017 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"c061507a-294f-453b-be5d-4fdca19c790c","Type":"ContainerStarted","Data":"22a2a926f869e6446bdf888e1ee153d4bcc61395a9b6875ebbb63ec5af2731bc"} Jan 20 17:13:18 crc kubenswrapper[4558]: I0120 17:13:18.935690 4558 generic.go:334] "Generic (PLEG): container finished" podID="d81dc2b3-5e3d-4b54-98f5-938c751dcaed" containerID="77f2cc9689416d67bb531c240255bcc3dba26ca8b5684419ffa0bcb7727882f2" exitCode=0 Jan 20 17:13:18 crc kubenswrapper[4558]: I0120 17:13:18.935746 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-bootstrap-479z7" event={"ID":"d81dc2b3-5e3d-4b54-98f5-938c751dcaed","Type":"ContainerDied","Data":"77f2cc9689416d67bb531c240255bcc3dba26ca8b5684419ffa0bcb7727882f2"} Jan 20 17:13:18 crc kubenswrapper[4558]: I0120 17:13:18.937095 4558 generic.go:334] "Generic (PLEG): container finished" podID="6906ea2d-0a88-47cd-9c23-3e1c40320286" containerID="bd20e6e920cac2215a98a5c447c2c55bc432c00d80f0d36813856b38b27308b8" exitCode=0 Jan 20 17:13:18 crc kubenswrapper[4558]: I0120 17:13:18.937189 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-db-sync-skf6m" event={"ID":"6906ea2d-0a88-47cd-9c23-3e1c40320286","Type":"ContainerDied","Data":"bd20e6e920cac2215a98a5c447c2c55bc432c00d80f0d36813856b38b27308b8"} Jan 20 17:13:18 crc kubenswrapper[4558]: I0120 17:13:18.938783 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-internal-api-0" podUID="384dff5b-0848-41ff-bcac-ae3bce090fd4" containerName="glance-log" containerID="cri-o://d7acc2e71e623b0cb40d205dabe996ea5dbae69b82aabc6076726b533f985398" gracePeriod=30 Jan 20 17:13:18 crc kubenswrapper[4558]: I0120 17:13:18.938905 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-internal-api-0" podUID="384dff5b-0848-41ff-bcac-ae3bce090fd4" containerName="glance-httpd" containerID="cri-o://6886f930b8ebf8fd1785a38aa97bb588070e381c8d811be6116989ade7aef81a" gracePeriod=30 Jan 20 17:13:18 crc kubenswrapper[4558]: I0120 17:13:18.939070 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"384dff5b-0848-41ff-bcac-ae3bce090fd4","Type":"ContainerStarted","Data":"6886f930b8ebf8fd1785a38aa97bb588070e381c8d811be6116989ade7aef81a"} Jan 20 17:13:18 crc kubenswrapper[4558]: I0120 17:13:18.996886 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/glance-default-external-api-0" podStartSLOduration=7.996873834 podStartE2EDuration="7.996873834s" podCreationTimestamp="2026-01-20 17:13:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:13:18.991918586 +0000 UTC m=+1892.752256553" watchObservedRunningTime="2026-01-20 17:13:18.996873834 +0000 UTC m=+1892.757211801" Jan 20 17:13:19 crc kubenswrapper[4558]: I0120 17:13:19.042470 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/glance-default-internal-api-0" podStartSLOduration=11.042454896 podStartE2EDuration="11.042454896s" podCreationTimestamp="2026-01-20 17:13:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:13:19.03571907 +0000 UTC m=+1892.796057037" watchObservedRunningTime="2026-01-20 17:13:19.042454896 +0000 UTC m=+1892.802792863" Jan 20 17:13:19 crc kubenswrapper[4558]: I0120 17:13:19.457475 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:13:19 crc kubenswrapper[4558]: I0120 17:13:19.509207 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/384dff5b-0848-41ff-bcac-ae3bce090fd4-internal-tls-certs\") pod \"384dff5b-0848-41ff-bcac-ae3bce090fd4\" (UID: \"384dff5b-0848-41ff-bcac-ae3bce090fd4\") " Jan 20 17:13:19 crc kubenswrapper[4558]: I0120 17:13:19.509346 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/384dff5b-0848-41ff-bcac-ae3bce090fd4-httpd-run\") pod \"384dff5b-0848-41ff-bcac-ae3bce090fd4\" (UID: \"384dff5b-0848-41ff-bcac-ae3bce090fd4\") " Jan 20 17:13:19 crc kubenswrapper[4558]: I0120 17:13:19.509387 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/384dff5b-0848-41ff-bcac-ae3bce090fd4-config-data\") pod \"384dff5b-0848-41ff-bcac-ae3bce090fd4\" (UID: \"384dff5b-0848-41ff-bcac-ae3bce090fd4\") " Jan 20 17:13:19 crc kubenswrapper[4558]: I0120 17:13:19.509434 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage17-crc\") pod \"384dff5b-0848-41ff-bcac-ae3bce090fd4\" (UID: \"384dff5b-0848-41ff-bcac-ae3bce090fd4\") " Jan 20 17:13:19 crc kubenswrapper[4558]: I0120 17:13:19.509545 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-chhqk\" (UniqueName: \"kubernetes.io/projected/384dff5b-0848-41ff-bcac-ae3bce090fd4-kube-api-access-chhqk\") pod \"384dff5b-0848-41ff-bcac-ae3bce090fd4\" (UID: \"384dff5b-0848-41ff-bcac-ae3bce090fd4\") " Jan 20 17:13:19 crc kubenswrapper[4558]: I0120 17:13:19.509640 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/384dff5b-0848-41ff-bcac-ae3bce090fd4-combined-ca-bundle\") pod \"384dff5b-0848-41ff-bcac-ae3bce090fd4\" (UID: \"384dff5b-0848-41ff-bcac-ae3bce090fd4\") " Jan 20 17:13:19 crc kubenswrapper[4558]: I0120 17:13:19.509658 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/384dff5b-0848-41ff-bcac-ae3bce090fd4-logs\") pod \"384dff5b-0848-41ff-bcac-ae3bce090fd4\" (UID: \"384dff5b-0848-41ff-bcac-ae3bce090fd4\") " Jan 20 17:13:19 crc kubenswrapper[4558]: I0120 17:13:19.509682 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/384dff5b-0848-41ff-bcac-ae3bce090fd4-scripts\") pod \"384dff5b-0848-41ff-bcac-ae3bce090fd4\" (UID: \"384dff5b-0848-41ff-bcac-ae3bce090fd4\") " Jan 20 17:13:19 crc kubenswrapper[4558]: I0120 17:13:19.509937 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/384dff5b-0848-41ff-bcac-ae3bce090fd4-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "384dff5b-0848-41ff-bcac-ae3bce090fd4" (UID: "384dff5b-0848-41ff-bcac-ae3bce090fd4"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:13:19 crc kubenswrapper[4558]: I0120 17:13:19.510151 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/384dff5b-0848-41ff-bcac-ae3bce090fd4-logs" (OuterVolumeSpecName: "logs") pod "384dff5b-0848-41ff-bcac-ae3bce090fd4" (UID: "384dff5b-0848-41ff-bcac-ae3bce090fd4"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:13:19 crc kubenswrapper[4558]: I0120 17:13:19.510486 4558 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/384dff5b-0848-41ff-bcac-ae3bce090fd4-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 20 17:13:19 crc kubenswrapper[4558]: I0120 17:13:19.510509 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/384dff5b-0848-41ff-bcac-ae3bce090fd4-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:13:19 crc kubenswrapper[4558]: I0120 17:13:19.514807 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/384dff5b-0848-41ff-bcac-ae3bce090fd4-kube-api-access-chhqk" (OuterVolumeSpecName: "kube-api-access-chhqk") pod "384dff5b-0848-41ff-bcac-ae3bce090fd4" (UID: "384dff5b-0848-41ff-bcac-ae3bce090fd4"). InnerVolumeSpecName "kube-api-access-chhqk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:13:19 crc kubenswrapper[4558]: I0120 17:13:19.516818 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage17-crc" (OuterVolumeSpecName: "glance") pod "384dff5b-0848-41ff-bcac-ae3bce090fd4" (UID: "384dff5b-0848-41ff-bcac-ae3bce090fd4"). InnerVolumeSpecName "local-storage17-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:13:19 crc kubenswrapper[4558]: I0120 17:13:19.516930 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/384dff5b-0848-41ff-bcac-ae3bce090fd4-scripts" (OuterVolumeSpecName: "scripts") pod "384dff5b-0848-41ff-bcac-ae3bce090fd4" (UID: "384dff5b-0848-41ff-bcac-ae3bce090fd4"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:13:19 crc kubenswrapper[4558]: I0120 17:13:19.534836 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/384dff5b-0848-41ff-bcac-ae3bce090fd4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "384dff5b-0848-41ff-bcac-ae3bce090fd4" (UID: "384dff5b-0848-41ff-bcac-ae3bce090fd4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:13:19 crc kubenswrapper[4558]: I0120 17:13:19.546188 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/384dff5b-0848-41ff-bcac-ae3bce090fd4-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "384dff5b-0848-41ff-bcac-ae3bce090fd4" (UID: "384dff5b-0848-41ff-bcac-ae3bce090fd4"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:13:19 crc kubenswrapper[4558]: I0120 17:13:19.546580 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/384dff5b-0848-41ff-bcac-ae3bce090fd4-config-data" (OuterVolumeSpecName: "config-data") pod "384dff5b-0848-41ff-bcac-ae3bce090fd4" (UID: "384dff5b-0848-41ff-bcac-ae3bce090fd4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:13:19 crc kubenswrapper[4558]: I0120 17:13:19.612241 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-chhqk\" (UniqueName: \"kubernetes.io/projected/384dff5b-0848-41ff-bcac-ae3bce090fd4-kube-api-access-chhqk\") on node \"crc\" DevicePath \"\"" Jan 20 17:13:19 crc kubenswrapper[4558]: I0120 17:13:19.612279 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/384dff5b-0848-41ff-bcac-ae3bce090fd4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:13:19 crc kubenswrapper[4558]: I0120 17:13:19.612291 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/384dff5b-0848-41ff-bcac-ae3bce090fd4-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:13:19 crc kubenswrapper[4558]: I0120 17:13:19.612302 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/384dff5b-0848-41ff-bcac-ae3bce090fd4-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:13:19 crc kubenswrapper[4558]: I0120 17:13:19.612312 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/384dff5b-0848-41ff-bcac-ae3bce090fd4-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:13:19 crc kubenswrapper[4558]: I0120 17:13:19.612344 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage17-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage17-crc\") on node \"crc\" " Jan 20 17:13:19 crc kubenswrapper[4558]: I0120 17:13:19.633061 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage17-crc" (UniqueName: "kubernetes.io/local-volume/local-storage17-crc") on node "crc" Jan 20 17:13:19 crc kubenswrapper[4558]: I0120 17:13:19.713535 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage17-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage17-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:13:19 crc kubenswrapper[4558]: I0120 17:13:19.953693 4558 generic.go:334] "Generic (PLEG): container finished" podID="384dff5b-0848-41ff-bcac-ae3bce090fd4" containerID="6886f930b8ebf8fd1785a38aa97bb588070e381c8d811be6116989ade7aef81a" exitCode=0 Jan 20 17:13:19 crc kubenswrapper[4558]: I0120 17:13:19.954019 4558 generic.go:334] "Generic (PLEG): container finished" podID="384dff5b-0848-41ff-bcac-ae3bce090fd4" containerID="d7acc2e71e623b0cb40d205dabe996ea5dbae69b82aabc6076726b533f985398" exitCode=143 Jan 20 17:13:19 crc kubenswrapper[4558]: I0120 17:13:19.953788 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:13:19 crc kubenswrapper[4558]: I0120 17:13:19.953769 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"384dff5b-0848-41ff-bcac-ae3bce090fd4","Type":"ContainerDied","Data":"6886f930b8ebf8fd1785a38aa97bb588070e381c8d811be6116989ade7aef81a"} Jan 20 17:13:19 crc kubenswrapper[4558]: I0120 17:13:19.954136 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"384dff5b-0848-41ff-bcac-ae3bce090fd4","Type":"ContainerDied","Data":"d7acc2e71e623b0cb40d205dabe996ea5dbae69b82aabc6076726b533f985398"} Jan 20 17:13:19 crc kubenswrapper[4558]: I0120 17:13:19.954153 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"384dff5b-0848-41ff-bcac-ae3bce090fd4","Type":"ContainerDied","Data":"924a32a1cc9b5d281fd0920a23551b1cee39f7d6cb1b36ebd2f03e527044124a"} Jan 20 17:13:19 crc kubenswrapper[4558]: I0120 17:13:19.954199 4558 scope.go:117] "RemoveContainer" containerID="6886f930b8ebf8fd1785a38aa97bb588070e381c8d811be6116989ade7aef81a" Jan 20 17:13:19 crc kubenswrapper[4558]: I0120 17:13:19.961592 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"52844554-ecdd-40db-ae62-0f0de3d9030d","Type":"ContainerStarted","Data":"6d036b93fa5cf012470faed7d76baa620b68e49cfc21d8f4c3db9b864a1baeed"} Jan 20 17:13:19 crc kubenswrapper[4558]: I0120 17:13:19.997220 4558 scope.go:117] "RemoveContainer" containerID="d7acc2e71e623b0cb40d205dabe996ea5dbae69b82aabc6076726b533f985398" Jan 20 17:13:20 crc kubenswrapper[4558]: I0120 17:13:20.003157 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:13:20 crc kubenswrapper[4558]: I0120 17:13:20.017218 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:13:20 crc kubenswrapper[4558]: I0120 17:13:20.024336 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:13:20 crc kubenswrapper[4558]: E0120 17:13:20.024691 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="384dff5b-0848-41ff-bcac-ae3bce090fd4" containerName="glance-log" Jan 20 17:13:20 crc kubenswrapper[4558]: I0120 17:13:20.024710 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="384dff5b-0848-41ff-bcac-ae3bce090fd4" containerName="glance-log" Jan 20 17:13:20 crc kubenswrapper[4558]: E0120 17:13:20.024724 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="384dff5b-0848-41ff-bcac-ae3bce090fd4" containerName="glance-httpd" Jan 20 17:13:20 crc kubenswrapper[4558]: I0120 17:13:20.024731 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="384dff5b-0848-41ff-bcac-ae3bce090fd4" containerName="glance-httpd" Jan 20 17:13:20 crc kubenswrapper[4558]: I0120 17:13:20.024911 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="384dff5b-0848-41ff-bcac-ae3bce090fd4" containerName="glance-httpd" Jan 20 17:13:20 crc kubenswrapper[4558]: I0120 17:13:20.024935 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="384dff5b-0848-41ff-bcac-ae3bce090fd4" containerName="glance-log" Jan 20 17:13:20 crc kubenswrapper[4558]: I0120 17:13:20.025823 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:13:20 crc kubenswrapper[4558]: I0120 17:13:20.030509 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-glance-default-internal-svc" Jan 20 17:13:20 crc kubenswrapper[4558]: I0120 17:13:20.030824 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-default-internal-config-data" Jan 20 17:13:20 crc kubenswrapper[4558]: I0120 17:13:20.044906 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:13:20 crc kubenswrapper[4558]: I0120 17:13:20.053095 4558 scope.go:117] "RemoveContainer" containerID="6886f930b8ebf8fd1785a38aa97bb588070e381c8d811be6116989ade7aef81a" Jan 20 17:13:20 crc kubenswrapper[4558]: E0120 17:13:20.056668 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6886f930b8ebf8fd1785a38aa97bb588070e381c8d811be6116989ade7aef81a\": container with ID starting with 6886f930b8ebf8fd1785a38aa97bb588070e381c8d811be6116989ade7aef81a not found: ID does not exist" containerID="6886f930b8ebf8fd1785a38aa97bb588070e381c8d811be6116989ade7aef81a" Jan 20 17:13:20 crc kubenswrapper[4558]: I0120 17:13:20.056706 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6886f930b8ebf8fd1785a38aa97bb588070e381c8d811be6116989ade7aef81a"} err="failed to get container status \"6886f930b8ebf8fd1785a38aa97bb588070e381c8d811be6116989ade7aef81a\": rpc error: code = NotFound desc = could not find container \"6886f930b8ebf8fd1785a38aa97bb588070e381c8d811be6116989ade7aef81a\": container with ID starting with 6886f930b8ebf8fd1785a38aa97bb588070e381c8d811be6116989ade7aef81a not found: ID does not exist" Jan 20 17:13:20 crc kubenswrapper[4558]: I0120 17:13:20.056732 4558 scope.go:117] "RemoveContainer" containerID="d7acc2e71e623b0cb40d205dabe996ea5dbae69b82aabc6076726b533f985398" Jan 20 17:13:20 crc kubenswrapper[4558]: E0120 17:13:20.060372 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d7acc2e71e623b0cb40d205dabe996ea5dbae69b82aabc6076726b533f985398\": container with ID starting with d7acc2e71e623b0cb40d205dabe996ea5dbae69b82aabc6076726b533f985398 not found: ID does not exist" containerID="d7acc2e71e623b0cb40d205dabe996ea5dbae69b82aabc6076726b533f985398" Jan 20 17:13:20 crc kubenswrapper[4558]: I0120 17:13:20.060403 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d7acc2e71e623b0cb40d205dabe996ea5dbae69b82aabc6076726b533f985398"} err="failed to get container status \"d7acc2e71e623b0cb40d205dabe996ea5dbae69b82aabc6076726b533f985398\": rpc error: code = NotFound desc = could not find container \"d7acc2e71e623b0cb40d205dabe996ea5dbae69b82aabc6076726b533f985398\": container with ID starting with d7acc2e71e623b0cb40d205dabe996ea5dbae69b82aabc6076726b533f985398 not found: ID does not exist" Jan 20 17:13:20 crc kubenswrapper[4558]: I0120 17:13:20.060434 4558 scope.go:117] "RemoveContainer" containerID="6886f930b8ebf8fd1785a38aa97bb588070e381c8d811be6116989ade7aef81a" Jan 20 17:13:20 crc kubenswrapper[4558]: I0120 17:13:20.064375 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6886f930b8ebf8fd1785a38aa97bb588070e381c8d811be6116989ade7aef81a"} err="failed to get container status \"6886f930b8ebf8fd1785a38aa97bb588070e381c8d811be6116989ade7aef81a\": rpc error: code = NotFound desc = could not find container \"6886f930b8ebf8fd1785a38aa97bb588070e381c8d811be6116989ade7aef81a\": container with ID starting with 6886f930b8ebf8fd1785a38aa97bb588070e381c8d811be6116989ade7aef81a not found: ID does not exist" Jan 20 17:13:20 crc kubenswrapper[4558]: I0120 17:13:20.064400 4558 scope.go:117] "RemoveContainer" containerID="d7acc2e71e623b0cb40d205dabe996ea5dbae69b82aabc6076726b533f985398" Jan 20 17:13:20 crc kubenswrapper[4558]: I0120 17:13:20.065073 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d7acc2e71e623b0cb40d205dabe996ea5dbae69b82aabc6076726b533f985398"} err="failed to get container status \"d7acc2e71e623b0cb40d205dabe996ea5dbae69b82aabc6076726b533f985398\": rpc error: code = NotFound desc = could not find container \"d7acc2e71e623b0cb40d205dabe996ea5dbae69b82aabc6076726b533f985398\": container with ID starting with d7acc2e71e623b0cb40d205dabe996ea5dbae69b82aabc6076726b533f985398 not found: ID does not exist" Jan 20 17:13:20 crc kubenswrapper[4558]: I0120 17:13:20.121471 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fec58b11-5e1a-4bfc-bfcc-fde43e56b8fb-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"fec58b11-5e1a-4bfc-bfcc-fde43e56b8fb\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:13:20 crc kubenswrapper[4558]: I0120 17:13:20.121512 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/fec58b11-5e1a-4bfc-bfcc-fde43e56b8fb-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"fec58b11-5e1a-4bfc-bfcc-fde43e56b8fb\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:13:20 crc kubenswrapper[4558]: I0120 17:13:20.121541 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fec58b11-5e1a-4bfc-bfcc-fde43e56b8fb-logs\") pod \"glance-default-internal-api-0\" (UID: \"fec58b11-5e1a-4bfc-bfcc-fde43e56b8fb\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:13:20 crc kubenswrapper[4558]: I0120 17:13:20.121590 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fec58b11-5e1a-4bfc-bfcc-fde43e56b8fb-config-data\") pod \"glance-default-internal-api-0\" (UID: \"fec58b11-5e1a-4bfc-bfcc-fde43e56b8fb\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:13:20 crc kubenswrapper[4558]: I0120 17:13:20.121645 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zvqh8\" (UniqueName: \"kubernetes.io/projected/fec58b11-5e1a-4bfc-bfcc-fde43e56b8fb-kube-api-access-zvqh8\") pod \"glance-default-internal-api-0\" (UID: \"fec58b11-5e1a-4bfc-bfcc-fde43e56b8fb\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:13:20 crc kubenswrapper[4558]: I0120 17:13:20.121745 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage17-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage17-crc\") pod \"glance-default-internal-api-0\" (UID: \"fec58b11-5e1a-4bfc-bfcc-fde43e56b8fb\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:13:20 crc kubenswrapper[4558]: I0120 17:13:20.121906 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fec58b11-5e1a-4bfc-bfcc-fde43e56b8fb-scripts\") pod \"glance-default-internal-api-0\" (UID: \"fec58b11-5e1a-4bfc-bfcc-fde43e56b8fb\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:13:20 crc kubenswrapper[4558]: I0120 17:13:20.121972 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/fec58b11-5e1a-4bfc-bfcc-fde43e56b8fb-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"fec58b11-5e1a-4bfc-bfcc-fde43e56b8fb\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:13:20 crc kubenswrapper[4558]: I0120 17:13:20.224685 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fec58b11-5e1a-4bfc-bfcc-fde43e56b8fb-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"fec58b11-5e1a-4bfc-bfcc-fde43e56b8fb\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:13:20 crc kubenswrapper[4558]: I0120 17:13:20.224749 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/fec58b11-5e1a-4bfc-bfcc-fde43e56b8fb-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"fec58b11-5e1a-4bfc-bfcc-fde43e56b8fb\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:13:20 crc kubenswrapper[4558]: I0120 17:13:20.224782 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fec58b11-5e1a-4bfc-bfcc-fde43e56b8fb-logs\") pod \"glance-default-internal-api-0\" (UID: \"fec58b11-5e1a-4bfc-bfcc-fde43e56b8fb\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:13:20 crc kubenswrapper[4558]: I0120 17:13:20.224859 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fec58b11-5e1a-4bfc-bfcc-fde43e56b8fb-config-data\") pod \"glance-default-internal-api-0\" (UID: \"fec58b11-5e1a-4bfc-bfcc-fde43e56b8fb\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:13:20 crc kubenswrapper[4558]: I0120 17:13:20.224943 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zvqh8\" (UniqueName: \"kubernetes.io/projected/fec58b11-5e1a-4bfc-bfcc-fde43e56b8fb-kube-api-access-zvqh8\") pod \"glance-default-internal-api-0\" (UID: \"fec58b11-5e1a-4bfc-bfcc-fde43e56b8fb\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:13:20 crc kubenswrapper[4558]: I0120 17:13:20.225082 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage17-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage17-crc\") pod \"glance-default-internal-api-0\" (UID: \"fec58b11-5e1a-4bfc-bfcc-fde43e56b8fb\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:13:20 crc kubenswrapper[4558]: I0120 17:13:20.225216 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fec58b11-5e1a-4bfc-bfcc-fde43e56b8fb-scripts\") pod \"glance-default-internal-api-0\" (UID: \"fec58b11-5e1a-4bfc-bfcc-fde43e56b8fb\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:13:20 crc kubenswrapper[4558]: I0120 17:13:20.225263 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/fec58b11-5e1a-4bfc-bfcc-fde43e56b8fb-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"fec58b11-5e1a-4bfc-bfcc-fde43e56b8fb\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:13:20 crc kubenswrapper[4558]: I0120 17:13:20.225386 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/fec58b11-5e1a-4bfc-bfcc-fde43e56b8fb-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"fec58b11-5e1a-4bfc-bfcc-fde43e56b8fb\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:13:20 crc kubenswrapper[4558]: I0120 17:13:20.225627 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fec58b11-5e1a-4bfc-bfcc-fde43e56b8fb-logs\") pod \"glance-default-internal-api-0\" (UID: \"fec58b11-5e1a-4bfc-bfcc-fde43e56b8fb\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:13:20 crc kubenswrapper[4558]: I0120 17:13:20.225842 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage17-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage17-crc\") pod \"glance-default-internal-api-0\" (UID: \"fec58b11-5e1a-4bfc-bfcc-fde43e56b8fb\") device mount path \"/mnt/openstack/pv17\"" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:13:20 crc kubenswrapper[4558]: I0120 17:13:20.231933 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fec58b11-5e1a-4bfc-bfcc-fde43e56b8fb-scripts\") pod \"glance-default-internal-api-0\" (UID: \"fec58b11-5e1a-4bfc-bfcc-fde43e56b8fb\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:13:20 crc kubenswrapper[4558]: I0120 17:13:20.238090 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fec58b11-5e1a-4bfc-bfcc-fde43e56b8fb-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"fec58b11-5e1a-4bfc-bfcc-fde43e56b8fb\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:13:20 crc kubenswrapper[4558]: I0120 17:13:20.244255 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fec58b11-5e1a-4bfc-bfcc-fde43e56b8fb-config-data\") pod \"glance-default-internal-api-0\" (UID: \"fec58b11-5e1a-4bfc-bfcc-fde43e56b8fb\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:13:20 crc kubenswrapper[4558]: I0120 17:13:20.254866 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zvqh8\" (UniqueName: \"kubernetes.io/projected/fec58b11-5e1a-4bfc-bfcc-fde43e56b8fb-kube-api-access-zvqh8\") pod \"glance-default-internal-api-0\" (UID: \"fec58b11-5e1a-4bfc-bfcc-fde43e56b8fb\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:13:20 crc kubenswrapper[4558]: I0120 17:13:20.264030 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/fec58b11-5e1a-4bfc-bfcc-fde43e56b8fb-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"fec58b11-5e1a-4bfc-bfcc-fde43e56b8fb\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:13:20 crc kubenswrapper[4558]: I0120 17:13:20.292450 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage17-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage17-crc\") pod \"glance-default-internal-api-0\" (UID: \"fec58b11-5e1a-4bfc-bfcc-fde43e56b8fb\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:13:20 crc kubenswrapper[4558]: I0120 17:13:20.347334 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:13:20 crc kubenswrapper[4558]: I0120 17:13:20.413788 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-bootstrap-479z7" Jan 20 17:13:20 crc kubenswrapper[4558]: I0120 17:13:20.474687 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/watcher-db-sync-z8lmb" Jan 20 17:13:20 crc kubenswrapper[4558]: I0120 17:13:20.474987 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-db-sync-skf6m" Jan 20 17:13:20 crc kubenswrapper[4558]: I0120 17:13:20.481281 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-db-sync-c95c5" Jan 20 17:13:20 crc kubenswrapper[4558]: I0120 17:13:20.530818 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-92kfm\" (UniqueName: \"kubernetes.io/projected/c3bca81a-32b0-4c02-90c0-ab10bb1eb835-kube-api-access-92kfm\") pod \"c3bca81a-32b0-4c02-90c0-ab10bb1eb835\" (UID: \"c3bca81a-32b0-4c02-90c0-ab10bb1eb835\") " Jan 20 17:13:20 crc kubenswrapper[4558]: I0120 17:13:20.530860 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f36762c-fdfd-45ac-a5b1-4be2bd718a9b-combined-ca-bundle\") pod \"4f36762c-fdfd-45ac-a5b1-4be2bd718a9b\" (UID: \"4f36762c-fdfd-45ac-a5b1-4be2bd718a9b\") " Jan 20 17:13:20 crc kubenswrapper[4558]: I0120 17:13:20.530881 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/d81dc2b3-5e3d-4b54-98f5-938c751dcaed-credential-keys\") pod \"d81dc2b3-5e3d-4b54-98f5-938c751dcaed\" (UID: \"d81dc2b3-5e3d-4b54-98f5-938c751dcaed\") " Jan 20 17:13:20 crc kubenswrapper[4558]: I0120 17:13:20.530902 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kx4j7\" (UniqueName: \"kubernetes.io/projected/6906ea2d-0a88-47cd-9c23-3e1c40320286-kube-api-access-kx4j7\") pod \"6906ea2d-0a88-47cd-9c23-3e1c40320286\" (UID: \"6906ea2d-0a88-47cd-9c23-3e1c40320286\") " Jan 20 17:13:20 crc kubenswrapper[4558]: I0120 17:13:20.530923 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d81dc2b3-5e3d-4b54-98f5-938c751dcaed-combined-ca-bundle\") pod \"d81dc2b3-5e3d-4b54-98f5-938c751dcaed\" (UID: \"d81dc2b3-5e3d-4b54-98f5-938c751dcaed\") " Jan 20 17:13:20 crc kubenswrapper[4558]: I0120 17:13:20.530945 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c3bca81a-32b0-4c02-90c0-ab10bb1eb835-combined-ca-bundle\") pod \"c3bca81a-32b0-4c02-90c0-ab10bb1eb835\" (UID: \"c3bca81a-32b0-4c02-90c0-ab10bb1eb835\") " Jan 20 17:13:20 crc kubenswrapper[4558]: I0120 17:13:20.530962 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4f36762c-fdfd-45ac-a5b1-4be2bd718a9b-config-data\") pod \"4f36762c-fdfd-45ac-a5b1-4be2bd718a9b\" (UID: \"4f36762c-fdfd-45ac-a5b1-4be2bd718a9b\") " Jan 20 17:13:20 crc kubenswrapper[4558]: I0120 17:13:20.530989 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d81dc2b3-5e3d-4b54-98f5-938c751dcaed-config-data\") pod \"d81dc2b3-5e3d-4b54-98f5-938c751dcaed\" (UID: \"d81dc2b3-5e3d-4b54-98f5-938c751dcaed\") " Jan 20 17:13:20 crc kubenswrapper[4558]: I0120 17:13:20.531020 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c3bca81a-32b0-4c02-90c0-ab10bb1eb835-config-data\") pod \"c3bca81a-32b0-4c02-90c0-ab10bb1eb835\" (UID: \"c3bca81a-32b0-4c02-90c0-ab10bb1eb835\") " Jan 20 17:13:20 crc kubenswrapper[4558]: I0120 17:13:20.531035 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c3bca81a-32b0-4c02-90c0-ab10bb1eb835-scripts\") pod \"c3bca81a-32b0-4c02-90c0-ab10bb1eb835\" (UID: \"c3bca81a-32b0-4c02-90c0-ab10bb1eb835\") " Jan 20 17:13:20 crc kubenswrapper[4558]: I0120 17:13:20.531053 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5nktq\" (UniqueName: \"kubernetes.io/projected/4f36762c-fdfd-45ac-a5b1-4be2bd718a9b-kube-api-access-5nktq\") pod \"4f36762c-fdfd-45ac-a5b1-4be2bd718a9b\" (UID: \"4f36762c-fdfd-45ac-a5b1-4be2bd718a9b\") " Jan 20 17:13:20 crc kubenswrapper[4558]: I0120 17:13:20.531096 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gh2wp\" (UniqueName: \"kubernetes.io/projected/d81dc2b3-5e3d-4b54-98f5-938c751dcaed-kube-api-access-gh2wp\") pod \"d81dc2b3-5e3d-4b54-98f5-938c751dcaed\" (UID: \"d81dc2b3-5e3d-4b54-98f5-938c751dcaed\") " Jan 20 17:13:20 crc kubenswrapper[4558]: I0120 17:13:20.531182 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6906ea2d-0a88-47cd-9c23-3e1c40320286-combined-ca-bundle\") pod \"6906ea2d-0a88-47cd-9c23-3e1c40320286\" (UID: \"6906ea2d-0a88-47cd-9c23-3e1c40320286\") " Jan 20 17:13:20 crc kubenswrapper[4558]: I0120 17:13:20.531214 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d81dc2b3-5e3d-4b54-98f5-938c751dcaed-scripts\") pod \"d81dc2b3-5e3d-4b54-98f5-938c751dcaed\" (UID: \"d81dc2b3-5e3d-4b54-98f5-938c751dcaed\") " Jan 20 17:13:20 crc kubenswrapper[4558]: I0120 17:13:20.531251 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/4f36762c-fdfd-45ac-a5b1-4be2bd718a9b-db-sync-config-data\") pod \"4f36762c-fdfd-45ac-a5b1-4be2bd718a9b\" (UID: \"4f36762c-fdfd-45ac-a5b1-4be2bd718a9b\") " Jan 20 17:13:20 crc kubenswrapper[4558]: I0120 17:13:20.531276 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/6906ea2d-0a88-47cd-9c23-3e1c40320286-db-sync-config-data\") pod \"6906ea2d-0a88-47cd-9c23-3e1c40320286\" (UID: \"6906ea2d-0a88-47cd-9c23-3e1c40320286\") " Jan 20 17:13:20 crc kubenswrapper[4558]: I0120 17:13:20.531293 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d81dc2b3-5e3d-4b54-98f5-938c751dcaed-fernet-keys\") pod \"d81dc2b3-5e3d-4b54-98f5-938c751dcaed\" (UID: \"d81dc2b3-5e3d-4b54-98f5-938c751dcaed\") " Jan 20 17:13:20 crc kubenswrapper[4558]: I0120 17:13:20.531329 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c3bca81a-32b0-4c02-90c0-ab10bb1eb835-logs\") pod \"c3bca81a-32b0-4c02-90c0-ab10bb1eb835\" (UID: \"c3bca81a-32b0-4c02-90c0-ab10bb1eb835\") " Jan 20 17:13:20 crc kubenswrapper[4558]: I0120 17:13:20.532345 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c3bca81a-32b0-4c02-90c0-ab10bb1eb835-logs" (OuterVolumeSpecName: "logs") pod "c3bca81a-32b0-4c02-90c0-ab10bb1eb835" (UID: "c3bca81a-32b0-4c02-90c0-ab10bb1eb835"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:13:20 crc kubenswrapper[4558]: I0120 17:13:20.535493 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6906ea2d-0a88-47cd-9c23-3e1c40320286-kube-api-access-kx4j7" (OuterVolumeSpecName: "kube-api-access-kx4j7") pod "6906ea2d-0a88-47cd-9c23-3e1c40320286" (UID: "6906ea2d-0a88-47cd-9c23-3e1c40320286"). InnerVolumeSpecName "kube-api-access-kx4j7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:13:20 crc kubenswrapper[4558]: I0120 17:13:20.535657 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c3bca81a-32b0-4c02-90c0-ab10bb1eb835-scripts" (OuterVolumeSpecName: "scripts") pod "c3bca81a-32b0-4c02-90c0-ab10bb1eb835" (UID: "c3bca81a-32b0-4c02-90c0-ab10bb1eb835"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:13:20 crc kubenswrapper[4558]: I0120 17:13:20.537633 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d81dc2b3-5e3d-4b54-98f5-938c751dcaed-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "d81dc2b3-5e3d-4b54-98f5-938c751dcaed" (UID: "d81dc2b3-5e3d-4b54-98f5-938c751dcaed"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:13:20 crc kubenswrapper[4558]: I0120 17:13:20.538073 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c3bca81a-32b0-4c02-90c0-ab10bb1eb835-kube-api-access-92kfm" (OuterVolumeSpecName: "kube-api-access-92kfm") pod "c3bca81a-32b0-4c02-90c0-ab10bb1eb835" (UID: "c3bca81a-32b0-4c02-90c0-ab10bb1eb835"). InnerVolumeSpecName "kube-api-access-92kfm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:13:20 crc kubenswrapper[4558]: I0120 17:13:20.542151 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d81dc2b3-5e3d-4b54-98f5-938c751dcaed-kube-api-access-gh2wp" (OuterVolumeSpecName: "kube-api-access-gh2wp") pod "d81dc2b3-5e3d-4b54-98f5-938c751dcaed" (UID: "d81dc2b3-5e3d-4b54-98f5-938c751dcaed"). InnerVolumeSpecName "kube-api-access-gh2wp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:13:20 crc kubenswrapper[4558]: I0120 17:13:20.544513 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4f36762c-fdfd-45ac-a5b1-4be2bd718a9b-kube-api-access-5nktq" (OuterVolumeSpecName: "kube-api-access-5nktq") pod "4f36762c-fdfd-45ac-a5b1-4be2bd718a9b" (UID: "4f36762c-fdfd-45ac-a5b1-4be2bd718a9b"). InnerVolumeSpecName "kube-api-access-5nktq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:13:20 crc kubenswrapper[4558]: I0120 17:13:20.545367 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d81dc2b3-5e3d-4b54-98f5-938c751dcaed-scripts" (OuterVolumeSpecName: "scripts") pod "d81dc2b3-5e3d-4b54-98f5-938c751dcaed" (UID: "d81dc2b3-5e3d-4b54-98f5-938c751dcaed"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:13:20 crc kubenswrapper[4558]: I0120 17:13:20.547570 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4f36762c-fdfd-45ac-a5b1-4be2bd718a9b-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "4f36762c-fdfd-45ac-a5b1-4be2bd718a9b" (UID: "4f36762c-fdfd-45ac-a5b1-4be2bd718a9b"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:13:20 crc kubenswrapper[4558]: I0120 17:13:20.548140 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6906ea2d-0a88-47cd-9c23-3e1c40320286-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "6906ea2d-0a88-47cd-9c23-3e1c40320286" (UID: "6906ea2d-0a88-47cd-9c23-3e1c40320286"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:13:20 crc kubenswrapper[4558]: I0120 17:13:20.555006 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d81dc2b3-5e3d-4b54-98f5-938c751dcaed-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "d81dc2b3-5e3d-4b54-98f5-938c751dcaed" (UID: "d81dc2b3-5e3d-4b54-98f5-938c751dcaed"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:13:20 crc kubenswrapper[4558]: I0120 17:13:20.573569 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d81dc2b3-5e3d-4b54-98f5-938c751dcaed-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d81dc2b3-5e3d-4b54-98f5-938c751dcaed" (UID: "d81dc2b3-5e3d-4b54-98f5-938c751dcaed"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:13:20 crc kubenswrapper[4558]: I0120 17:13:20.575056 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="384dff5b-0848-41ff-bcac-ae3bce090fd4" path="/var/lib/kubelet/pods/384dff5b-0848-41ff-bcac-ae3bce090fd4/volumes" Jan 20 17:13:20 crc kubenswrapper[4558]: I0120 17:13:20.582565 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4f36762c-fdfd-45ac-a5b1-4be2bd718a9b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4f36762c-fdfd-45ac-a5b1-4be2bd718a9b" (UID: "4f36762c-fdfd-45ac-a5b1-4be2bd718a9b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:13:20 crc kubenswrapper[4558]: I0120 17:13:20.584312 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c3bca81a-32b0-4c02-90c0-ab10bb1eb835-config-data" (OuterVolumeSpecName: "config-data") pod "c3bca81a-32b0-4c02-90c0-ab10bb1eb835" (UID: "c3bca81a-32b0-4c02-90c0-ab10bb1eb835"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:13:20 crc kubenswrapper[4558]: I0120 17:13:20.589064 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c3bca81a-32b0-4c02-90c0-ab10bb1eb835-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c3bca81a-32b0-4c02-90c0-ab10bb1eb835" (UID: "c3bca81a-32b0-4c02-90c0-ab10bb1eb835"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:13:20 crc kubenswrapper[4558]: I0120 17:13:20.590210 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d81dc2b3-5e3d-4b54-98f5-938c751dcaed-config-data" (OuterVolumeSpecName: "config-data") pod "d81dc2b3-5e3d-4b54-98f5-938c751dcaed" (UID: "d81dc2b3-5e3d-4b54-98f5-938c751dcaed"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:13:20 crc kubenswrapper[4558]: I0120 17:13:20.599984 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6906ea2d-0a88-47cd-9c23-3e1c40320286-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6906ea2d-0a88-47cd-9c23-3e1c40320286" (UID: "6906ea2d-0a88-47cd-9c23-3e1c40320286"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:13:20 crc kubenswrapper[4558]: I0120 17:13:20.602087 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4f36762c-fdfd-45ac-a5b1-4be2bd718a9b-config-data" (OuterVolumeSpecName: "config-data") pod "4f36762c-fdfd-45ac-a5b1-4be2bd718a9b" (UID: "4f36762c-fdfd-45ac-a5b1-4be2bd718a9b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:13:20 crc kubenswrapper[4558]: I0120 17:13:20.633941 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-92kfm\" (UniqueName: \"kubernetes.io/projected/c3bca81a-32b0-4c02-90c0-ab10bb1eb835-kube-api-access-92kfm\") on node \"crc\" DevicePath \"\"" Jan 20 17:13:20 crc kubenswrapper[4558]: I0120 17:13:20.633981 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f36762c-fdfd-45ac-a5b1-4be2bd718a9b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:13:20 crc kubenswrapper[4558]: I0120 17:13:20.633994 4558 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/d81dc2b3-5e3d-4b54-98f5-938c751dcaed-credential-keys\") on node \"crc\" DevicePath \"\"" Jan 20 17:13:20 crc kubenswrapper[4558]: I0120 17:13:20.634005 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kx4j7\" (UniqueName: \"kubernetes.io/projected/6906ea2d-0a88-47cd-9c23-3e1c40320286-kube-api-access-kx4j7\") on node \"crc\" DevicePath \"\"" Jan 20 17:13:20 crc kubenswrapper[4558]: I0120 17:13:20.634016 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d81dc2b3-5e3d-4b54-98f5-938c751dcaed-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:13:20 crc kubenswrapper[4558]: I0120 17:13:20.634026 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c3bca81a-32b0-4c02-90c0-ab10bb1eb835-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:13:20 crc kubenswrapper[4558]: I0120 17:13:20.634036 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4f36762c-fdfd-45ac-a5b1-4be2bd718a9b-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:13:20 crc kubenswrapper[4558]: I0120 17:13:20.634047 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d81dc2b3-5e3d-4b54-98f5-938c751dcaed-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:13:20 crc kubenswrapper[4558]: I0120 17:13:20.634060 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c3bca81a-32b0-4c02-90c0-ab10bb1eb835-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:13:20 crc kubenswrapper[4558]: I0120 17:13:20.634071 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c3bca81a-32b0-4c02-90c0-ab10bb1eb835-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:13:20 crc kubenswrapper[4558]: I0120 17:13:20.634085 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5nktq\" (UniqueName: \"kubernetes.io/projected/4f36762c-fdfd-45ac-a5b1-4be2bd718a9b-kube-api-access-5nktq\") on node \"crc\" DevicePath \"\"" Jan 20 17:13:20 crc kubenswrapper[4558]: I0120 17:13:20.634097 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gh2wp\" (UniqueName: \"kubernetes.io/projected/d81dc2b3-5e3d-4b54-98f5-938c751dcaed-kube-api-access-gh2wp\") on node \"crc\" DevicePath \"\"" Jan 20 17:13:20 crc kubenswrapper[4558]: I0120 17:13:20.634107 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6906ea2d-0a88-47cd-9c23-3e1c40320286-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:13:20 crc kubenswrapper[4558]: I0120 17:13:20.634116 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d81dc2b3-5e3d-4b54-98f5-938c751dcaed-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:13:20 crc kubenswrapper[4558]: I0120 17:13:20.634127 4558 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/4f36762c-fdfd-45ac-a5b1-4be2bd718a9b-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:13:20 crc kubenswrapper[4558]: I0120 17:13:20.634139 4558 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/6906ea2d-0a88-47cd-9c23-3e1c40320286-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:13:20 crc kubenswrapper[4558]: I0120 17:13:20.634148 4558 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d81dc2b3-5e3d-4b54-98f5-938c751dcaed-fernet-keys\") on node \"crc\" DevicePath \"\"" Jan 20 17:13:20 crc kubenswrapper[4558]: I0120 17:13:20.634174 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c3bca81a-32b0-4c02-90c0-ab10bb1eb835-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:13:20 crc kubenswrapper[4558]: I0120 17:13:20.836913 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:13:20 crc kubenswrapper[4558]: W0120 17:13:20.838977 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfec58b11_5e1a_4bfc_bfcc_fde43e56b8fb.slice/crio-b70bffc6a37840a954dfe5e2fe46eb02c0529f106b9c0a49bb2844f07850c1cc WatchSource:0}: Error finding container b70bffc6a37840a954dfe5e2fe46eb02c0529f106b9c0a49bb2844f07850c1cc: Status 404 returned error can't find the container with id b70bffc6a37840a954dfe5e2fe46eb02c0529f106b9c0a49bb2844f07850c1cc Jan 20 17:13:20 crc kubenswrapper[4558]: I0120 17:13:20.997028 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-db-sync-c95c5" Jan 20 17:13:20 crc kubenswrapper[4558]: I0120 17:13:20.997022 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-db-sync-c95c5" event={"ID":"c3bca81a-32b0-4c02-90c0-ab10bb1eb835","Type":"ContainerDied","Data":"5478c0409438d311ff48f1c0cdb0750ab3e6246803cf6daf8092bd03f1e06d6c"} Jan 20 17:13:20 crc kubenswrapper[4558]: I0120 17:13:20.998194 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5478c0409438d311ff48f1c0cdb0750ab3e6246803cf6daf8092bd03f1e06d6c" Jan 20 17:13:20 crc kubenswrapper[4558]: I0120 17:13:20.998224 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"fec58b11-5e1a-4bfc-bfcc-fde43e56b8fb","Type":"ContainerStarted","Data":"b70bffc6a37840a954dfe5e2fe46eb02c0529f106b9c0a49bb2844f07850c1cc"} Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:20.999883 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/watcher-db-sync-z8lmb" event={"ID":"4f36762c-fdfd-45ac-a5b1-4be2bd718a9b","Type":"ContainerDied","Data":"b5fbca86372b9e460a800a9406446aac5e63fffe6398ed6bf0e1b9b07d641403"} Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:20.999937 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b5fbca86372b9e460a800a9406446aac5e63fffe6398ed6bf0e1b9b07d641403" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:20.999937 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/watcher-db-sync-z8lmb" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.007351 4558 generic.go:334] "Generic (PLEG): container finished" podID="4e87c463-6ff6-4a56-a207-b6b86578ec36" containerID="59d0c6bafc63cc80658f7568160ae47885a8cf64a674673643c37344ad9aa432" exitCode=0 Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.007439 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-db-sync-xmmz7" event={"ID":"4e87c463-6ff6-4a56-a207-b6b86578ec36","Type":"ContainerDied","Data":"59d0c6bafc63cc80658f7568160ae47885a8cf64a674673643c37344ad9aa432"} Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.009154 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-bootstrap-479z7" event={"ID":"d81dc2b3-5e3d-4b54-98f5-938c751dcaed","Type":"ContainerDied","Data":"6c3b488e6cf0fcd51b8086633850a6a0661c1e397930b452ebb9740f164ab032"} Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.009211 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6c3b488e6cf0fcd51b8086633850a6a0661c1e397930b452ebb9740f164ab032" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.009263 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-bootstrap-479z7" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.043526 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-db-sync-skf6m" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.045536 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-db-sync-skf6m" event={"ID":"6906ea2d-0a88-47cd-9c23-3e1c40320286","Type":"ContainerDied","Data":"8a31aef2472a4b8d8f98a3e4406ec52bc9216df431012766e9b0020fab44f8e7"} Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.045573 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8a31aef2472a4b8d8f98a3e4406ec52bc9216df431012766e9b0020fab44f8e7" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.130098 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-bootstrap-479z7"] Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.167289 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/keystone-bootstrap-479z7"] Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.274548 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/placement-7ddd44bcf8-9tj9c"] Jan 20 17:13:21 crc kubenswrapper[4558]: E0120 17:13:21.274915 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d81dc2b3-5e3d-4b54-98f5-938c751dcaed" containerName="keystone-bootstrap" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.274927 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d81dc2b3-5e3d-4b54-98f5-938c751dcaed" containerName="keystone-bootstrap" Jan 20 17:13:21 crc kubenswrapper[4558]: E0120 17:13:21.274940 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4f36762c-fdfd-45ac-a5b1-4be2bd718a9b" containerName="watcher-db-sync" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.274946 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="4f36762c-fdfd-45ac-a5b1-4be2bd718a9b" containerName="watcher-db-sync" Jan 20 17:13:21 crc kubenswrapper[4558]: E0120 17:13:21.274960 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c3bca81a-32b0-4c02-90c0-ab10bb1eb835" containerName="placement-db-sync" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.274967 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="c3bca81a-32b0-4c02-90c0-ab10bb1eb835" containerName="placement-db-sync" Jan 20 17:13:21 crc kubenswrapper[4558]: E0120 17:13:21.274990 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6906ea2d-0a88-47cd-9c23-3e1c40320286" containerName="barbican-db-sync" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.274995 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="6906ea2d-0a88-47cd-9c23-3e1c40320286" containerName="barbican-db-sync" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.275494 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="c3bca81a-32b0-4c02-90c0-ab10bb1eb835" containerName="placement-db-sync" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.275523 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="6906ea2d-0a88-47cd-9c23-3e1c40320286" containerName="barbican-db-sync" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.275531 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="d81dc2b3-5e3d-4b54-98f5-938c751dcaed" containerName="keystone-bootstrap" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.275540 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="4f36762c-fdfd-45ac-a5b1-4be2bd718a9b" containerName="watcher-db-sync" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.276608 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-7ddd44bcf8-9tj9c" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.278436 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-7ddd44bcf8-9tj9c"] Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.282362 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/249a3706-31a6-4fb2-9a3f-94700d9ae30e-config-data\") pod \"placement-7ddd44bcf8-9tj9c\" (UID: \"249a3706-31a6-4fb2-9a3f-94700d9ae30e\") " pod="openstack-kuttl-tests/placement-7ddd44bcf8-9tj9c" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.282427 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/249a3706-31a6-4fb2-9a3f-94700d9ae30e-logs\") pod \"placement-7ddd44bcf8-9tj9c\" (UID: \"249a3706-31a6-4fb2-9a3f-94700d9ae30e\") " pod="openstack-kuttl-tests/placement-7ddd44bcf8-9tj9c" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.282449 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/249a3706-31a6-4fb2-9a3f-94700d9ae30e-public-tls-certs\") pod \"placement-7ddd44bcf8-9tj9c\" (UID: \"249a3706-31a6-4fb2-9a3f-94700d9ae30e\") " pod="openstack-kuttl-tests/placement-7ddd44bcf8-9tj9c" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.282467 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/249a3706-31a6-4fb2-9a3f-94700d9ae30e-scripts\") pod \"placement-7ddd44bcf8-9tj9c\" (UID: \"249a3706-31a6-4fb2-9a3f-94700d9ae30e\") " pod="openstack-kuttl-tests/placement-7ddd44bcf8-9tj9c" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.282497 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xgpbd\" (UniqueName: \"kubernetes.io/projected/249a3706-31a6-4fb2-9a3f-94700d9ae30e-kube-api-access-xgpbd\") pod \"placement-7ddd44bcf8-9tj9c\" (UID: \"249a3706-31a6-4fb2-9a3f-94700d9ae30e\") " pod="openstack-kuttl-tests/placement-7ddd44bcf8-9tj9c" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.282546 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/249a3706-31a6-4fb2-9a3f-94700d9ae30e-internal-tls-certs\") pod \"placement-7ddd44bcf8-9tj9c\" (UID: \"249a3706-31a6-4fb2-9a3f-94700d9ae30e\") " pod="openstack-kuttl-tests/placement-7ddd44bcf8-9tj9c" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.282564 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/249a3706-31a6-4fb2-9a3f-94700d9ae30e-combined-ca-bundle\") pod \"placement-7ddd44bcf8-9tj9c\" (UID: \"249a3706-31a6-4fb2-9a3f-94700d9ae30e\") " pod="openstack-kuttl-tests/placement-7ddd44bcf8-9tj9c" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.291282 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"placement-scripts" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.292816 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-placement-internal-svc" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.292963 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-placement-public-svc" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.293088 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"placement-placement-dockercfg-fxvdq" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.293372 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"placement-config-data" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.309816 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/barbican-keystone-listener-75484ddb58-9p7fq"] Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.312140 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-keystone-listener-75484ddb58-9p7fq" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.330545 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"barbican-config-data" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.330773 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"barbican-keystone-listener-config-data" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.344114 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"barbican-barbican-dockercfg-gwmf6" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.344205 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-keystone-listener-75484ddb58-9p7fq"] Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.362213 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/barbican-worker-55f98f9877-gm2wm"] Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.363712 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-worker-55f98f9877-gm2wm" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.383951 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/249a3706-31a6-4fb2-9a3f-94700d9ae30e-internal-tls-certs\") pod \"placement-7ddd44bcf8-9tj9c\" (UID: \"249a3706-31a6-4fb2-9a3f-94700d9ae30e\") " pod="openstack-kuttl-tests/placement-7ddd44bcf8-9tj9c" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.383989 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/249a3706-31a6-4fb2-9a3f-94700d9ae30e-combined-ca-bundle\") pod \"placement-7ddd44bcf8-9tj9c\" (UID: \"249a3706-31a6-4fb2-9a3f-94700d9ae30e\") " pod="openstack-kuttl-tests/placement-7ddd44bcf8-9tj9c" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.384040 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/249a3706-31a6-4fb2-9a3f-94700d9ae30e-config-data\") pod \"placement-7ddd44bcf8-9tj9c\" (UID: \"249a3706-31a6-4fb2-9a3f-94700d9ae30e\") " pod="openstack-kuttl-tests/placement-7ddd44bcf8-9tj9c" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.384068 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/249a3706-31a6-4fb2-9a3f-94700d9ae30e-logs\") pod \"placement-7ddd44bcf8-9tj9c\" (UID: \"249a3706-31a6-4fb2-9a3f-94700d9ae30e\") " pod="openstack-kuttl-tests/placement-7ddd44bcf8-9tj9c" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.384094 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/249a3706-31a6-4fb2-9a3f-94700d9ae30e-public-tls-certs\") pod \"placement-7ddd44bcf8-9tj9c\" (UID: \"249a3706-31a6-4fb2-9a3f-94700d9ae30e\") " pod="openstack-kuttl-tests/placement-7ddd44bcf8-9tj9c" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.384112 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/249a3706-31a6-4fb2-9a3f-94700d9ae30e-scripts\") pod \"placement-7ddd44bcf8-9tj9c\" (UID: \"249a3706-31a6-4fb2-9a3f-94700d9ae30e\") " pod="openstack-kuttl-tests/placement-7ddd44bcf8-9tj9c" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.384145 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xgpbd\" (UniqueName: \"kubernetes.io/projected/249a3706-31a6-4fb2-9a3f-94700d9ae30e-kube-api-access-xgpbd\") pod \"placement-7ddd44bcf8-9tj9c\" (UID: \"249a3706-31a6-4fb2-9a3f-94700d9ae30e\") " pod="openstack-kuttl-tests/placement-7ddd44bcf8-9tj9c" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.385788 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/249a3706-31a6-4fb2-9a3f-94700d9ae30e-logs\") pod \"placement-7ddd44bcf8-9tj9c\" (UID: \"249a3706-31a6-4fb2-9a3f-94700d9ae30e\") " pod="openstack-kuttl-tests/placement-7ddd44bcf8-9tj9c" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.389354 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"barbican-worker-config-data" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.395663 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/249a3706-31a6-4fb2-9a3f-94700d9ae30e-combined-ca-bundle\") pod \"placement-7ddd44bcf8-9tj9c\" (UID: \"249a3706-31a6-4fb2-9a3f-94700d9ae30e\") " pod="openstack-kuttl-tests/placement-7ddd44bcf8-9tj9c" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.396580 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/249a3706-31a6-4fb2-9a3f-94700d9ae30e-public-tls-certs\") pod \"placement-7ddd44bcf8-9tj9c\" (UID: \"249a3706-31a6-4fb2-9a3f-94700d9ae30e\") " pod="openstack-kuttl-tests/placement-7ddd44bcf8-9tj9c" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.396632 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/keystone-bootstrap-wcsxk"] Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.397801 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-bootstrap-wcsxk" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.403775 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/249a3706-31a6-4fb2-9a3f-94700d9ae30e-config-data\") pod \"placement-7ddd44bcf8-9tj9c\" (UID: \"249a3706-31a6-4fb2-9a3f-94700d9ae30e\") " pod="openstack-kuttl-tests/placement-7ddd44bcf8-9tj9c" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.415345 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-worker-55f98f9877-gm2wm"] Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.419020 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-keystone-dockercfg-kq484" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.419245 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.419367 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"osp-secret" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.419493 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-config-data" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.419599 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-scripts" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.419812 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/249a3706-31a6-4fb2-9a3f-94700d9ae30e-internal-tls-certs\") pod \"placement-7ddd44bcf8-9tj9c\" (UID: \"249a3706-31a6-4fb2-9a3f-94700d9ae30e\") " pod="openstack-kuttl-tests/placement-7ddd44bcf8-9tj9c" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.434857 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/249a3706-31a6-4fb2-9a3f-94700d9ae30e-scripts\") pod \"placement-7ddd44bcf8-9tj9c\" (UID: \"249a3706-31a6-4fb2-9a3f-94700d9ae30e\") " pod="openstack-kuttl-tests/placement-7ddd44bcf8-9tj9c" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.451224 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-bootstrap-wcsxk"] Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.485550 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bpclt\" (UniqueName: \"kubernetes.io/projected/462f58d4-b41a-45b8-aa8a-003bfe30e625-kube-api-access-bpclt\") pod \"barbican-worker-55f98f9877-gm2wm\" (UID: \"462f58d4-b41a-45b8-aa8a-003bfe30e625\") " pod="openstack-kuttl-tests/barbican-worker-55f98f9877-gm2wm" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.485635 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8dfaaf1f-540b-4c8b-b93c-1e3556c37801-config-data\") pod \"barbican-keystone-listener-75484ddb58-9p7fq\" (UID: \"8dfaaf1f-540b-4c8b-b93c-1e3556c37801\") " pod="openstack-kuttl-tests/barbican-keystone-listener-75484ddb58-9p7fq" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.485669 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/462f58d4-b41a-45b8-aa8a-003bfe30e625-config-data-custom\") pod \"barbican-worker-55f98f9877-gm2wm\" (UID: \"462f58d4-b41a-45b8-aa8a-003bfe30e625\") " pod="openstack-kuttl-tests/barbican-worker-55f98f9877-gm2wm" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.485701 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/462f58d4-b41a-45b8-aa8a-003bfe30e625-config-data\") pod \"barbican-worker-55f98f9877-gm2wm\" (UID: \"462f58d4-b41a-45b8-aa8a-003bfe30e625\") " pod="openstack-kuttl-tests/barbican-worker-55f98f9877-gm2wm" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.485729 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fdvsr\" (UniqueName: \"kubernetes.io/projected/8dfaaf1f-540b-4c8b-b93c-1e3556c37801-kube-api-access-fdvsr\") pod \"barbican-keystone-listener-75484ddb58-9p7fq\" (UID: \"8dfaaf1f-540b-4c8b-b93c-1e3556c37801\") " pod="openstack-kuttl-tests/barbican-keystone-listener-75484ddb58-9p7fq" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.485789 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xgpbd\" (UniqueName: \"kubernetes.io/projected/249a3706-31a6-4fb2-9a3f-94700d9ae30e-kube-api-access-xgpbd\") pod \"placement-7ddd44bcf8-9tj9c\" (UID: \"249a3706-31a6-4fb2-9a3f-94700d9ae30e\") " pod="openstack-kuttl-tests/placement-7ddd44bcf8-9tj9c" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.485793 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8dfaaf1f-540b-4c8b-b93c-1e3556c37801-config-data-custom\") pod \"barbican-keystone-listener-75484ddb58-9p7fq\" (UID: \"8dfaaf1f-540b-4c8b-b93c-1e3556c37801\") " pod="openstack-kuttl-tests/barbican-keystone-listener-75484ddb58-9p7fq" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.486826 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/462f58d4-b41a-45b8-aa8a-003bfe30e625-logs\") pod \"barbican-worker-55f98f9877-gm2wm\" (UID: \"462f58d4-b41a-45b8-aa8a-003bfe30e625\") " pod="openstack-kuttl-tests/barbican-worker-55f98f9877-gm2wm" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.486888 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/462f58d4-b41a-45b8-aa8a-003bfe30e625-combined-ca-bundle\") pod \"barbican-worker-55f98f9877-gm2wm\" (UID: \"462f58d4-b41a-45b8-aa8a-003bfe30e625\") " pod="openstack-kuttl-tests/barbican-worker-55f98f9877-gm2wm" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.486924 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8dfaaf1f-540b-4c8b-b93c-1e3556c37801-logs\") pod \"barbican-keystone-listener-75484ddb58-9p7fq\" (UID: \"8dfaaf1f-540b-4c8b-b93c-1e3556c37801\") " pod="openstack-kuttl-tests/barbican-keystone-listener-75484ddb58-9p7fq" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.487028 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8dfaaf1f-540b-4c8b-b93c-1e3556c37801-combined-ca-bundle\") pod \"barbican-keystone-listener-75484ddb58-9p7fq\" (UID: \"8dfaaf1f-540b-4c8b-b93c-1e3556c37801\") " pod="openstack-kuttl-tests/barbican-keystone-listener-75484ddb58-9p7fq" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.502303 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/barbican-api-9f6d49bb-klwm8"] Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.503811 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-9f6d49bb-klwm8" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.516492 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"barbican-api-config-data" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.542723 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-api-9f6d49bb-klwm8"] Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.588952 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bpclt\" (UniqueName: \"kubernetes.io/projected/462f58d4-b41a-45b8-aa8a-003bfe30e625-kube-api-access-bpclt\") pod \"barbican-worker-55f98f9877-gm2wm\" (UID: \"462f58d4-b41a-45b8-aa8a-003bfe30e625\") " pod="openstack-kuttl-tests/barbican-worker-55f98f9877-gm2wm" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.589019 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8dfaaf1f-540b-4c8b-b93c-1e3556c37801-config-data\") pod \"barbican-keystone-listener-75484ddb58-9p7fq\" (UID: \"8dfaaf1f-540b-4c8b-b93c-1e3556c37801\") " pod="openstack-kuttl-tests/barbican-keystone-listener-75484ddb58-9p7fq" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.589046 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4f712673-2484-4dc7-9677-344aa012e994-logs\") pod \"barbican-api-9f6d49bb-klwm8\" (UID: \"4f712673-2484-4dc7-9677-344aa012e994\") " pod="openstack-kuttl-tests/barbican-api-9f6d49bb-klwm8" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.589070 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f712673-2484-4dc7-9677-344aa012e994-combined-ca-bundle\") pod \"barbican-api-9f6d49bb-klwm8\" (UID: \"4f712673-2484-4dc7-9677-344aa012e994\") " pod="openstack-kuttl-tests/barbican-api-9f6d49bb-klwm8" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.589088 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4f712673-2484-4dc7-9677-344aa012e994-config-data\") pod \"barbican-api-9f6d49bb-klwm8\" (UID: \"4f712673-2484-4dc7-9677-344aa012e994\") " pod="openstack-kuttl-tests/barbican-api-9f6d49bb-klwm8" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.589104 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/462f58d4-b41a-45b8-aa8a-003bfe30e625-config-data-custom\") pod \"barbican-worker-55f98f9877-gm2wm\" (UID: \"462f58d4-b41a-45b8-aa8a-003bfe30e625\") " pod="openstack-kuttl-tests/barbican-worker-55f98f9877-gm2wm" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.589119 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/29ba41cf-902a-4ee1-88c9-d1db60f5e9ab-fernet-keys\") pod \"keystone-bootstrap-wcsxk\" (UID: \"29ba41cf-902a-4ee1-88c9-d1db60f5e9ab\") " pod="openstack-kuttl-tests/keystone-bootstrap-wcsxk" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.589141 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/462f58d4-b41a-45b8-aa8a-003bfe30e625-config-data\") pod \"barbican-worker-55f98f9877-gm2wm\" (UID: \"462f58d4-b41a-45b8-aa8a-003bfe30e625\") " pod="openstack-kuttl-tests/barbican-worker-55f98f9877-gm2wm" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.589189 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fdvsr\" (UniqueName: \"kubernetes.io/projected/8dfaaf1f-540b-4c8b-b93c-1e3556c37801-kube-api-access-fdvsr\") pod \"barbican-keystone-listener-75484ddb58-9p7fq\" (UID: \"8dfaaf1f-540b-4c8b-b93c-1e3556c37801\") " pod="openstack-kuttl-tests/barbican-keystone-listener-75484ddb58-9p7fq" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.589216 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4f712673-2484-4dc7-9677-344aa012e994-config-data-custom\") pod \"barbican-api-9f6d49bb-klwm8\" (UID: \"4f712673-2484-4dc7-9677-344aa012e994\") " pod="openstack-kuttl-tests/barbican-api-9f6d49bb-klwm8" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.589241 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/29ba41cf-902a-4ee1-88c9-d1db60f5e9ab-config-data\") pod \"keystone-bootstrap-wcsxk\" (UID: \"29ba41cf-902a-4ee1-88c9-d1db60f5e9ab\") " pod="openstack-kuttl-tests/keystone-bootstrap-wcsxk" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.589284 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8dfaaf1f-540b-4c8b-b93c-1e3556c37801-config-data-custom\") pod \"barbican-keystone-listener-75484ddb58-9p7fq\" (UID: \"8dfaaf1f-540b-4c8b-b93c-1e3556c37801\") " pod="openstack-kuttl-tests/barbican-keystone-listener-75484ddb58-9p7fq" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.589318 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/462f58d4-b41a-45b8-aa8a-003bfe30e625-logs\") pod \"barbican-worker-55f98f9877-gm2wm\" (UID: \"462f58d4-b41a-45b8-aa8a-003bfe30e625\") " pod="openstack-kuttl-tests/barbican-worker-55f98f9877-gm2wm" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.589352 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/462f58d4-b41a-45b8-aa8a-003bfe30e625-combined-ca-bundle\") pod \"barbican-worker-55f98f9877-gm2wm\" (UID: \"462f58d4-b41a-45b8-aa8a-003bfe30e625\") " pod="openstack-kuttl-tests/barbican-worker-55f98f9877-gm2wm" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.589372 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/29ba41cf-902a-4ee1-88c9-d1db60f5e9ab-credential-keys\") pod \"keystone-bootstrap-wcsxk\" (UID: \"29ba41cf-902a-4ee1-88c9-d1db60f5e9ab\") " pod="openstack-kuttl-tests/keystone-bootstrap-wcsxk" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.589390 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8dfaaf1f-540b-4c8b-b93c-1e3556c37801-logs\") pod \"barbican-keystone-listener-75484ddb58-9p7fq\" (UID: \"8dfaaf1f-540b-4c8b-b93c-1e3556c37801\") " pod="openstack-kuttl-tests/barbican-keystone-listener-75484ddb58-9p7fq" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.589443 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v4qmx\" (UniqueName: \"kubernetes.io/projected/4f712673-2484-4dc7-9677-344aa012e994-kube-api-access-v4qmx\") pod \"barbican-api-9f6d49bb-klwm8\" (UID: \"4f712673-2484-4dc7-9677-344aa012e994\") " pod="openstack-kuttl-tests/barbican-api-9f6d49bb-klwm8" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.589468 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29ba41cf-902a-4ee1-88c9-d1db60f5e9ab-combined-ca-bundle\") pod \"keystone-bootstrap-wcsxk\" (UID: \"29ba41cf-902a-4ee1-88c9-d1db60f5e9ab\") " pod="openstack-kuttl-tests/keystone-bootstrap-wcsxk" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.589491 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bsjrl\" (UniqueName: \"kubernetes.io/projected/29ba41cf-902a-4ee1-88c9-d1db60f5e9ab-kube-api-access-bsjrl\") pod \"keystone-bootstrap-wcsxk\" (UID: \"29ba41cf-902a-4ee1-88c9-d1db60f5e9ab\") " pod="openstack-kuttl-tests/keystone-bootstrap-wcsxk" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.589526 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8dfaaf1f-540b-4c8b-b93c-1e3556c37801-combined-ca-bundle\") pod \"barbican-keystone-listener-75484ddb58-9p7fq\" (UID: \"8dfaaf1f-540b-4c8b-b93c-1e3556c37801\") " pod="openstack-kuttl-tests/barbican-keystone-listener-75484ddb58-9p7fq" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.589576 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/29ba41cf-902a-4ee1-88c9-d1db60f5e9ab-scripts\") pod \"keystone-bootstrap-wcsxk\" (UID: \"29ba41cf-902a-4ee1-88c9-d1db60f5e9ab\") " pod="openstack-kuttl-tests/keystone-bootstrap-wcsxk" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.589928 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/462f58d4-b41a-45b8-aa8a-003bfe30e625-logs\") pod \"barbican-worker-55f98f9877-gm2wm\" (UID: \"462f58d4-b41a-45b8-aa8a-003bfe30e625\") " pod="openstack-kuttl-tests/barbican-worker-55f98f9877-gm2wm" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.591770 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8dfaaf1f-540b-4c8b-b93c-1e3556c37801-logs\") pod \"barbican-keystone-listener-75484ddb58-9p7fq\" (UID: \"8dfaaf1f-540b-4c8b-b93c-1e3556c37801\") " pod="openstack-kuttl-tests/barbican-keystone-listener-75484ddb58-9p7fq" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.594485 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8dfaaf1f-540b-4c8b-b93c-1e3556c37801-config-data\") pod \"barbican-keystone-listener-75484ddb58-9p7fq\" (UID: \"8dfaaf1f-540b-4c8b-b93c-1e3556c37801\") " pod="openstack-kuttl-tests/barbican-keystone-listener-75484ddb58-9p7fq" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.600002 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8dfaaf1f-540b-4c8b-b93c-1e3556c37801-combined-ca-bundle\") pod \"barbican-keystone-listener-75484ddb58-9p7fq\" (UID: \"8dfaaf1f-540b-4c8b-b93c-1e3556c37801\") " pod="openstack-kuttl-tests/barbican-keystone-listener-75484ddb58-9p7fq" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.601369 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8dfaaf1f-540b-4c8b-b93c-1e3556c37801-config-data-custom\") pod \"barbican-keystone-listener-75484ddb58-9p7fq\" (UID: \"8dfaaf1f-540b-4c8b-b93c-1e3556c37801\") " pod="openstack-kuttl-tests/barbican-keystone-listener-75484ddb58-9p7fq" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.603008 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/462f58d4-b41a-45b8-aa8a-003bfe30e625-config-data\") pod \"barbican-worker-55f98f9877-gm2wm\" (UID: \"462f58d4-b41a-45b8-aa8a-003bfe30e625\") " pod="openstack-kuttl-tests/barbican-worker-55f98f9877-gm2wm" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.613351 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/462f58d4-b41a-45b8-aa8a-003bfe30e625-combined-ca-bundle\") pod \"barbican-worker-55f98f9877-gm2wm\" (UID: \"462f58d4-b41a-45b8-aa8a-003bfe30e625\") " pod="openstack-kuttl-tests/barbican-worker-55f98f9877-gm2wm" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.613755 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/462f58d4-b41a-45b8-aa8a-003bfe30e625-config-data-custom\") pod \"barbican-worker-55f98f9877-gm2wm\" (UID: \"462f58d4-b41a-45b8-aa8a-003bfe30e625\") " pod="openstack-kuttl-tests/barbican-worker-55f98f9877-gm2wm" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.625311 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bpclt\" (UniqueName: \"kubernetes.io/projected/462f58d4-b41a-45b8-aa8a-003bfe30e625-kube-api-access-bpclt\") pod \"barbican-worker-55f98f9877-gm2wm\" (UID: \"462f58d4-b41a-45b8-aa8a-003bfe30e625\") " pod="openstack-kuttl-tests/barbican-worker-55f98f9877-gm2wm" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.629588 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fdvsr\" (UniqueName: \"kubernetes.io/projected/8dfaaf1f-540b-4c8b-b93c-1e3556c37801-kube-api-access-fdvsr\") pod \"barbican-keystone-listener-75484ddb58-9p7fq\" (UID: \"8dfaaf1f-540b-4c8b-b93c-1e3556c37801\") " pod="openstack-kuttl-tests/barbican-keystone-listener-75484ddb58-9p7fq" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.645541 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/watcher-api-0"] Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.646968 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/watcher-api-0" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.652483 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"watcher-watcher-dockercfg-r4jqf" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.657979 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"watcher-api-config-data" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.672225 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/watcher-applier-0"] Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.673423 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/watcher-applier-0" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.681376 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/watcher-api-0"] Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.685624 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"watcher-applier-config-data" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.690993 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4f712673-2484-4dc7-9677-344aa012e994-config-data-custom\") pod \"barbican-api-9f6d49bb-klwm8\" (UID: \"4f712673-2484-4dc7-9677-344aa012e994\") " pod="openstack-kuttl-tests/barbican-api-9f6d49bb-klwm8" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.691027 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30b9f169-eef8-4a30-b543-83dda735da70-combined-ca-bundle\") pod \"watcher-applier-0\" (UID: \"30b9f169-eef8-4a30-b543-83dda735da70\") " pod="openstack-kuttl-tests/watcher-applier-0" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.691046 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/29ba41cf-902a-4ee1-88c9-d1db60f5e9ab-config-data\") pod \"keystone-bootstrap-wcsxk\" (UID: \"29ba41cf-902a-4ee1-88c9-d1db60f5e9ab\") " pod="openstack-kuttl-tests/keystone-bootstrap-wcsxk" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.691085 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9d818a7f-96eb-499c-bb53-75b259d30282-combined-ca-bundle\") pod \"watcher-api-0\" (UID: \"9d818a7f-96eb-499c-bb53-75b259d30282\") " pod="openstack-kuttl-tests/watcher-api-0" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.691112 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/29ba41cf-902a-4ee1-88c9-d1db60f5e9ab-credential-keys\") pod \"keystone-bootstrap-wcsxk\" (UID: \"29ba41cf-902a-4ee1-88c9-d1db60f5e9ab\") " pod="openstack-kuttl-tests/keystone-bootstrap-wcsxk" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.691139 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v4qmx\" (UniqueName: \"kubernetes.io/projected/4f712673-2484-4dc7-9677-344aa012e994-kube-api-access-v4qmx\") pod \"barbican-api-9f6d49bb-klwm8\" (UID: \"4f712673-2484-4dc7-9677-344aa012e994\") " pod="openstack-kuttl-tests/barbican-api-9f6d49bb-klwm8" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.691175 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29ba41cf-902a-4ee1-88c9-d1db60f5e9ab-combined-ca-bundle\") pod \"keystone-bootstrap-wcsxk\" (UID: \"29ba41cf-902a-4ee1-88c9-d1db60f5e9ab\") " pod="openstack-kuttl-tests/keystone-bootstrap-wcsxk" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.691215 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bsjrl\" (UniqueName: \"kubernetes.io/projected/29ba41cf-902a-4ee1-88c9-d1db60f5e9ab-kube-api-access-bsjrl\") pod \"keystone-bootstrap-wcsxk\" (UID: \"29ba41cf-902a-4ee1-88c9-d1db60f5e9ab\") " pod="openstack-kuttl-tests/keystone-bootstrap-wcsxk" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.691240 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9d818a7f-96eb-499c-bb53-75b259d30282-config-data\") pod \"watcher-api-0\" (UID: \"9d818a7f-96eb-499c-bb53-75b259d30282\") " pod="openstack-kuttl-tests/watcher-api-0" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.691259 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/30b9f169-eef8-4a30-b543-83dda735da70-config-data\") pod \"watcher-applier-0\" (UID: \"30b9f169-eef8-4a30-b543-83dda735da70\") " pod="openstack-kuttl-tests/watcher-applier-0" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.691276 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wwgzq\" (UniqueName: \"kubernetes.io/projected/9d818a7f-96eb-499c-bb53-75b259d30282-kube-api-access-wwgzq\") pod \"watcher-api-0\" (UID: \"9d818a7f-96eb-499c-bb53-75b259d30282\") " pod="openstack-kuttl-tests/watcher-api-0" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.691292 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9d818a7f-96eb-499c-bb53-75b259d30282-logs\") pod \"watcher-api-0\" (UID: \"9d818a7f-96eb-499c-bb53-75b259d30282\") " pod="openstack-kuttl-tests/watcher-api-0" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.691309 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/29ba41cf-902a-4ee1-88c9-d1db60f5e9ab-scripts\") pod \"keystone-bootstrap-wcsxk\" (UID: \"29ba41cf-902a-4ee1-88c9-d1db60f5e9ab\") " pod="openstack-kuttl-tests/keystone-bootstrap-wcsxk" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.691338 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mj5nf\" (UniqueName: \"kubernetes.io/projected/30b9f169-eef8-4a30-b543-83dda735da70-kube-api-access-mj5nf\") pod \"watcher-applier-0\" (UID: \"30b9f169-eef8-4a30-b543-83dda735da70\") " pod="openstack-kuttl-tests/watcher-applier-0" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.691351 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/30b9f169-eef8-4a30-b543-83dda735da70-logs\") pod \"watcher-applier-0\" (UID: \"30b9f169-eef8-4a30-b543-83dda735da70\") " pod="openstack-kuttl-tests/watcher-applier-0" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.691366 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4f712673-2484-4dc7-9677-344aa012e994-logs\") pod \"barbican-api-9f6d49bb-klwm8\" (UID: \"4f712673-2484-4dc7-9677-344aa012e994\") " pod="openstack-kuttl-tests/barbican-api-9f6d49bb-klwm8" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.691380 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f712673-2484-4dc7-9677-344aa012e994-combined-ca-bundle\") pod \"barbican-api-9f6d49bb-klwm8\" (UID: \"4f712673-2484-4dc7-9677-344aa012e994\") " pod="openstack-kuttl-tests/barbican-api-9f6d49bb-klwm8" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.691398 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4f712673-2484-4dc7-9677-344aa012e994-config-data\") pod \"barbican-api-9f6d49bb-klwm8\" (UID: \"4f712673-2484-4dc7-9677-344aa012e994\") " pod="openstack-kuttl-tests/barbican-api-9f6d49bb-klwm8" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.691420 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/9d818a7f-96eb-499c-bb53-75b259d30282-custom-prometheus-ca\") pod \"watcher-api-0\" (UID: \"9d818a7f-96eb-499c-bb53-75b259d30282\") " pod="openstack-kuttl-tests/watcher-api-0" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.691435 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/29ba41cf-902a-4ee1-88c9-d1db60f5e9ab-fernet-keys\") pod \"keystone-bootstrap-wcsxk\" (UID: \"29ba41cf-902a-4ee1-88c9-d1db60f5e9ab\") " pod="openstack-kuttl-tests/keystone-bootstrap-wcsxk" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.697369 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/29ba41cf-902a-4ee1-88c9-d1db60f5e9ab-scripts\") pod \"keystone-bootstrap-wcsxk\" (UID: \"29ba41cf-902a-4ee1-88c9-d1db60f5e9ab\") " pod="openstack-kuttl-tests/keystone-bootstrap-wcsxk" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.701873 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/watcher-applier-0"] Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.702930 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4f712673-2484-4dc7-9677-344aa012e994-logs\") pod \"barbican-api-9f6d49bb-klwm8\" (UID: \"4f712673-2484-4dc7-9677-344aa012e994\") " pod="openstack-kuttl-tests/barbican-api-9f6d49bb-klwm8" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.712277 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/29ba41cf-902a-4ee1-88c9-d1db60f5e9ab-fernet-keys\") pod \"keystone-bootstrap-wcsxk\" (UID: \"29ba41cf-902a-4ee1-88c9-d1db60f5e9ab\") " pod="openstack-kuttl-tests/keystone-bootstrap-wcsxk" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.713198 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/29ba41cf-902a-4ee1-88c9-d1db60f5e9ab-config-data\") pod \"keystone-bootstrap-wcsxk\" (UID: \"29ba41cf-902a-4ee1-88c9-d1db60f5e9ab\") " pod="openstack-kuttl-tests/keystone-bootstrap-wcsxk" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.720201 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/watcher-decision-engine-0"] Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.721228 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/watcher-decision-engine-0" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.731742 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f712673-2484-4dc7-9677-344aa012e994-combined-ca-bundle\") pod \"barbican-api-9f6d49bb-klwm8\" (UID: \"4f712673-2484-4dc7-9677-344aa012e994\") " pod="openstack-kuttl-tests/barbican-api-9f6d49bb-klwm8" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.731807 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/watcher-decision-engine-0"] Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.732308 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29ba41cf-902a-4ee1-88c9-d1db60f5e9ab-combined-ca-bundle\") pod \"keystone-bootstrap-wcsxk\" (UID: \"29ba41cf-902a-4ee1-88c9-d1db60f5e9ab\") " pod="openstack-kuttl-tests/keystone-bootstrap-wcsxk" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.733176 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4f712673-2484-4dc7-9677-344aa012e994-config-data\") pod \"barbican-api-9f6d49bb-klwm8\" (UID: \"4f712673-2484-4dc7-9677-344aa012e994\") " pod="openstack-kuttl-tests/barbican-api-9f6d49bb-klwm8" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.736534 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4f712673-2484-4dc7-9677-344aa012e994-config-data-custom\") pod \"barbican-api-9f6d49bb-klwm8\" (UID: \"4f712673-2484-4dc7-9677-344aa012e994\") " pod="openstack-kuttl-tests/barbican-api-9f6d49bb-klwm8" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.736819 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/29ba41cf-902a-4ee1-88c9-d1db60f5e9ab-credential-keys\") pod \"keystone-bootstrap-wcsxk\" (UID: \"29ba41cf-902a-4ee1-88c9-d1db60f5e9ab\") " pod="openstack-kuttl-tests/keystone-bootstrap-wcsxk" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.736826 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"watcher-decision-engine-config-data" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.737897 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-7ddd44bcf8-9tj9c" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.747272 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bsjrl\" (UniqueName: \"kubernetes.io/projected/29ba41cf-902a-4ee1-88c9-d1db60f5e9ab-kube-api-access-bsjrl\") pod \"keystone-bootstrap-wcsxk\" (UID: \"29ba41cf-902a-4ee1-88c9-d1db60f5e9ab\") " pod="openstack-kuttl-tests/keystone-bootstrap-wcsxk" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.750372 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v4qmx\" (UniqueName: \"kubernetes.io/projected/4f712673-2484-4dc7-9677-344aa012e994-kube-api-access-v4qmx\") pod \"barbican-api-9f6d49bb-klwm8\" (UID: \"4f712673-2484-4dc7-9677-344aa012e994\") " pod="openstack-kuttl-tests/barbican-api-9f6d49bb-klwm8" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.792679 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f47cb9e2-2306-494d-94ff-b60393a9413e-combined-ca-bundle\") pod \"watcher-decision-engine-0\" (UID: \"f47cb9e2-2306-494d-94ff-b60393a9413e\") " pod="openstack-kuttl-tests/watcher-decision-engine-0" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.792739 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f47cb9e2-2306-494d-94ff-b60393a9413e-config-data\") pod \"watcher-decision-engine-0\" (UID: \"f47cb9e2-2306-494d-94ff-b60393a9413e\") " pod="openstack-kuttl-tests/watcher-decision-engine-0" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.792772 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9d818a7f-96eb-499c-bb53-75b259d30282-combined-ca-bundle\") pod \"watcher-api-0\" (UID: \"9d818a7f-96eb-499c-bb53-75b259d30282\") " pod="openstack-kuttl-tests/watcher-api-0" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.792812 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/f47cb9e2-2306-494d-94ff-b60393a9413e-custom-prometheus-ca\") pod \"watcher-decision-engine-0\" (UID: \"f47cb9e2-2306-494d-94ff-b60393a9413e\") " pod="openstack-kuttl-tests/watcher-decision-engine-0" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.792848 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9d818a7f-96eb-499c-bb53-75b259d30282-config-data\") pod \"watcher-api-0\" (UID: \"9d818a7f-96eb-499c-bb53-75b259d30282\") " pod="openstack-kuttl-tests/watcher-api-0" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.792865 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/30b9f169-eef8-4a30-b543-83dda735da70-config-data\") pod \"watcher-applier-0\" (UID: \"30b9f169-eef8-4a30-b543-83dda735da70\") " pod="openstack-kuttl-tests/watcher-applier-0" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.792885 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wwgzq\" (UniqueName: \"kubernetes.io/projected/9d818a7f-96eb-499c-bb53-75b259d30282-kube-api-access-wwgzq\") pod \"watcher-api-0\" (UID: \"9d818a7f-96eb-499c-bb53-75b259d30282\") " pod="openstack-kuttl-tests/watcher-api-0" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.792903 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9d818a7f-96eb-499c-bb53-75b259d30282-logs\") pod \"watcher-api-0\" (UID: \"9d818a7f-96eb-499c-bb53-75b259d30282\") " pod="openstack-kuttl-tests/watcher-api-0" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.792935 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wvkrs\" (UniqueName: \"kubernetes.io/projected/f47cb9e2-2306-494d-94ff-b60393a9413e-kube-api-access-wvkrs\") pod \"watcher-decision-engine-0\" (UID: \"f47cb9e2-2306-494d-94ff-b60393a9413e\") " pod="openstack-kuttl-tests/watcher-decision-engine-0" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.792949 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/30b9f169-eef8-4a30-b543-83dda735da70-logs\") pod \"watcher-applier-0\" (UID: \"30b9f169-eef8-4a30-b543-83dda735da70\") " pod="openstack-kuttl-tests/watcher-applier-0" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.792965 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mj5nf\" (UniqueName: \"kubernetes.io/projected/30b9f169-eef8-4a30-b543-83dda735da70-kube-api-access-mj5nf\") pod \"watcher-applier-0\" (UID: \"30b9f169-eef8-4a30-b543-83dda735da70\") " pod="openstack-kuttl-tests/watcher-applier-0" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.792988 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/9d818a7f-96eb-499c-bb53-75b259d30282-custom-prometheus-ca\") pod \"watcher-api-0\" (UID: \"9d818a7f-96eb-499c-bb53-75b259d30282\") " pod="openstack-kuttl-tests/watcher-api-0" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.793007 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f47cb9e2-2306-494d-94ff-b60393a9413e-logs\") pod \"watcher-decision-engine-0\" (UID: \"f47cb9e2-2306-494d-94ff-b60393a9413e\") " pod="openstack-kuttl-tests/watcher-decision-engine-0" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.793039 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30b9f169-eef8-4a30-b543-83dda735da70-combined-ca-bundle\") pod \"watcher-applier-0\" (UID: \"30b9f169-eef8-4a30-b543-83dda735da70\") " pod="openstack-kuttl-tests/watcher-applier-0" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.799726 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30b9f169-eef8-4a30-b543-83dda735da70-combined-ca-bundle\") pod \"watcher-applier-0\" (UID: \"30b9f169-eef8-4a30-b543-83dda735da70\") " pod="openstack-kuttl-tests/watcher-applier-0" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.799726 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9d818a7f-96eb-499c-bb53-75b259d30282-combined-ca-bundle\") pod \"watcher-api-0\" (UID: \"9d818a7f-96eb-499c-bb53-75b259d30282\") " pod="openstack-kuttl-tests/watcher-api-0" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.799993 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9d818a7f-96eb-499c-bb53-75b259d30282-logs\") pod \"watcher-api-0\" (UID: \"9d818a7f-96eb-499c-bb53-75b259d30282\") " pod="openstack-kuttl-tests/watcher-api-0" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.800227 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/30b9f169-eef8-4a30-b543-83dda735da70-logs\") pod \"watcher-applier-0\" (UID: \"30b9f169-eef8-4a30-b543-83dda735da70\") " pod="openstack-kuttl-tests/watcher-applier-0" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.803590 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9d818a7f-96eb-499c-bb53-75b259d30282-config-data\") pod \"watcher-api-0\" (UID: \"9d818a7f-96eb-499c-bb53-75b259d30282\") " pod="openstack-kuttl-tests/watcher-api-0" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.806138 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/30b9f169-eef8-4a30-b543-83dda735da70-config-data\") pod \"watcher-applier-0\" (UID: \"30b9f169-eef8-4a30-b543-83dda735da70\") " pod="openstack-kuttl-tests/watcher-applier-0" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.812015 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/9d818a7f-96eb-499c-bb53-75b259d30282-custom-prometheus-ca\") pod \"watcher-api-0\" (UID: \"9d818a7f-96eb-499c-bb53-75b259d30282\") " pod="openstack-kuttl-tests/watcher-api-0" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.814598 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-keystone-listener-75484ddb58-9p7fq" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.825921 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mj5nf\" (UniqueName: \"kubernetes.io/projected/30b9f169-eef8-4a30-b543-83dda735da70-kube-api-access-mj5nf\") pod \"watcher-applier-0\" (UID: \"30b9f169-eef8-4a30-b543-83dda735da70\") " pod="openstack-kuttl-tests/watcher-applier-0" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.826438 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-worker-55f98f9877-gm2wm" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.827150 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wwgzq\" (UniqueName: \"kubernetes.io/projected/9d818a7f-96eb-499c-bb53-75b259d30282-kube-api-access-wwgzq\") pod \"watcher-api-0\" (UID: \"9d818a7f-96eb-499c-bb53-75b259d30282\") " pod="openstack-kuttl-tests/watcher-api-0" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.835682 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-bootstrap-wcsxk" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.850321 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-9f6d49bb-klwm8" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.896471 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f47cb9e2-2306-494d-94ff-b60393a9413e-combined-ca-bundle\") pod \"watcher-decision-engine-0\" (UID: \"f47cb9e2-2306-494d-94ff-b60393a9413e\") " pod="openstack-kuttl-tests/watcher-decision-engine-0" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.896890 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f47cb9e2-2306-494d-94ff-b60393a9413e-config-data\") pod \"watcher-decision-engine-0\" (UID: \"f47cb9e2-2306-494d-94ff-b60393a9413e\") " pod="openstack-kuttl-tests/watcher-decision-engine-0" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.897012 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/f47cb9e2-2306-494d-94ff-b60393a9413e-custom-prometheus-ca\") pod \"watcher-decision-engine-0\" (UID: \"f47cb9e2-2306-494d-94ff-b60393a9413e\") " pod="openstack-kuttl-tests/watcher-decision-engine-0" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.897174 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wvkrs\" (UniqueName: \"kubernetes.io/projected/f47cb9e2-2306-494d-94ff-b60393a9413e-kube-api-access-wvkrs\") pod \"watcher-decision-engine-0\" (UID: \"f47cb9e2-2306-494d-94ff-b60393a9413e\") " pod="openstack-kuttl-tests/watcher-decision-engine-0" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.897233 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f47cb9e2-2306-494d-94ff-b60393a9413e-logs\") pod \"watcher-decision-engine-0\" (UID: \"f47cb9e2-2306-494d-94ff-b60393a9413e\") " pod="openstack-kuttl-tests/watcher-decision-engine-0" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.897722 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f47cb9e2-2306-494d-94ff-b60393a9413e-logs\") pod \"watcher-decision-engine-0\" (UID: \"f47cb9e2-2306-494d-94ff-b60393a9413e\") " pod="openstack-kuttl-tests/watcher-decision-engine-0" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.909758 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f47cb9e2-2306-494d-94ff-b60393a9413e-config-data\") pod \"watcher-decision-engine-0\" (UID: \"f47cb9e2-2306-494d-94ff-b60393a9413e\") " pod="openstack-kuttl-tests/watcher-decision-engine-0" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.910593 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/f47cb9e2-2306-494d-94ff-b60393a9413e-custom-prometheus-ca\") pod \"watcher-decision-engine-0\" (UID: \"f47cb9e2-2306-494d-94ff-b60393a9413e\") " pod="openstack-kuttl-tests/watcher-decision-engine-0" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.911692 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f47cb9e2-2306-494d-94ff-b60393a9413e-combined-ca-bundle\") pod \"watcher-decision-engine-0\" (UID: \"f47cb9e2-2306-494d-94ff-b60393a9413e\") " pod="openstack-kuttl-tests/watcher-decision-engine-0" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.919646 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wvkrs\" (UniqueName: \"kubernetes.io/projected/f47cb9e2-2306-494d-94ff-b60393a9413e-kube-api-access-wvkrs\") pod \"watcher-decision-engine-0\" (UID: \"f47cb9e2-2306-494d-94ff-b60393a9413e\") " pod="openstack-kuttl-tests/watcher-decision-engine-0" Jan 20 17:13:21 crc kubenswrapper[4558]: I0120 17:13:21.980881 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/watcher-api-0" Jan 20 17:13:22 crc kubenswrapper[4558]: I0120 17:13:22.003790 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/watcher-applier-0" Jan 20 17:13:22 crc kubenswrapper[4558]: I0120 17:13:22.061150 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/watcher-decision-engine-0" Jan 20 17:13:22 crc kubenswrapper[4558]: I0120 17:13:22.069562 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:13:22 crc kubenswrapper[4558]: I0120 17:13:22.069604 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:13:22 crc kubenswrapper[4558]: I0120 17:13:22.130712 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"52844554-ecdd-40db-ae62-0f0de3d9030d","Type":"ContainerStarted","Data":"43b7757fe757d67b5cf9c9c9b7f1b9c7fa72b3c8b9da6c03be8b5de7cdde4efb"} Jan 20 17:13:22 crc kubenswrapper[4558]: I0120 17:13:22.131096 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="52844554-ecdd-40db-ae62-0f0de3d9030d" containerName="ceilometer-central-agent" containerID="cri-o://3c009e1d5d872ae0cc303819626a3fcbec01907270a323804faaa0a2fe958dd6" gracePeriod=30 Jan 20 17:13:22 crc kubenswrapper[4558]: I0120 17:13:22.131364 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:13:22 crc kubenswrapper[4558]: I0120 17:13:22.131614 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="52844554-ecdd-40db-ae62-0f0de3d9030d" containerName="proxy-httpd" containerID="cri-o://43b7757fe757d67b5cf9c9c9b7f1b9c7fa72b3c8b9da6c03be8b5de7cdde4efb" gracePeriod=30 Jan 20 17:13:22 crc kubenswrapper[4558]: I0120 17:13:22.131661 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="52844554-ecdd-40db-ae62-0f0de3d9030d" containerName="sg-core" containerID="cri-o://6d036b93fa5cf012470faed7d76baa620b68e49cfc21d8f4c3db9b864a1baeed" gracePeriod=30 Jan 20 17:13:22 crc kubenswrapper[4558]: I0120 17:13:22.131696 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="52844554-ecdd-40db-ae62-0f0de3d9030d" containerName="ceilometer-notification-agent" containerID="cri-o://217dca2679c63dd7846ef65d423f3eea95f3f3ffc7c680a0e1fc9edda8767cd7" gracePeriod=30 Jan 20 17:13:22 crc kubenswrapper[4558]: I0120 17:13:22.160961 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:13:22 crc kubenswrapper[4558]: I0120 17:13:22.162628 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ceilometer-0" podStartSLOduration=9.341753046000001 podStartE2EDuration="14.162613409s" podCreationTimestamp="2026-01-20 17:13:08 +0000 UTC" firstStartedPulling="2026-01-20 17:13:16.453534178 +0000 UTC m=+1890.213872135" lastFinishedPulling="2026-01-20 17:13:21.27439453 +0000 UTC m=+1895.034732498" observedRunningTime="2026-01-20 17:13:22.145833966 +0000 UTC m=+1895.906171933" watchObservedRunningTime="2026-01-20 17:13:22.162613409 +0000 UTC m=+1895.922951376" Jan 20 17:13:22 crc kubenswrapper[4558]: I0120 17:13:22.166688 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:13:22 crc kubenswrapper[4558]: I0120 17:13:22.168801 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"fec58b11-5e1a-4bfc-bfcc-fde43e56b8fb","Type":"ContainerStarted","Data":"37c69cdd0c87e5dab78bf80037c40a035a8f38600230f3ea65b93bc33964dc4e"} Jan 20 17:13:22 crc kubenswrapper[4558]: I0120 17:13:22.370855 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-7ddd44bcf8-9tj9c"] Jan 20 17:13:22 crc kubenswrapper[4558]: I0120 17:13:22.579574 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d81dc2b3-5e3d-4b54-98f5-938c751dcaed" path="/var/lib/kubelet/pods/d81dc2b3-5e3d-4b54-98f5-938c751dcaed/volumes" Jan 20 17:13:22 crc kubenswrapper[4558]: I0120 17:13:22.866059 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-db-sync-xmmz7" Jan 20 17:13:22 crc kubenswrapper[4558]: I0120 17:13:22.888957 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-bootstrap-wcsxk"] Jan 20 17:13:22 crc kubenswrapper[4558]: I0120 17:13:22.896426 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-keystone-listener-75484ddb58-9p7fq"] Jan 20 17:13:22 crc kubenswrapper[4558]: W0120 17:13:22.896820 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4f712673_2484_4dc7_9677_344aa012e994.slice/crio-742766fa00ccaa0569089ca1a686d86515119aea8175ecee023ecbfff810c290 WatchSource:0}: Error finding container 742766fa00ccaa0569089ca1a686d86515119aea8175ecee023ecbfff810c290: Status 404 returned error can't find the container with id 742766fa00ccaa0569089ca1a686d86515119aea8175ecee023ecbfff810c290 Jan 20 17:13:22 crc kubenswrapper[4558]: I0120 17:13:22.901381 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-api-9f6d49bb-klwm8"] Jan 20 17:13:22 crc kubenswrapper[4558]: I0120 17:13:22.911415 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-worker-55f98f9877-gm2wm"] Jan 20 17:13:22 crc kubenswrapper[4558]: W0120 17:13:22.925059 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod462f58d4_b41a_45b8_aa8a_003bfe30e625.slice/crio-db613104669c89ae6b312be6587dc38f5bd96e683a72780c04ce61c6c65b84f9 WatchSource:0}: Error finding container db613104669c89ae6b312be6587dc38f5bd96e683a72780c04ce61c6c65b84f9: Status 404 returned error can't find the container with id db613104669c89ae6b312be6587dc38f5bd96e683a72780c04ce61c6c65b84f9 Jan 20 17:13:22 crc kubenswrapper[4558]: I0120 17:13:22.985421 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/watcher-applier-0"] Jan 20 17:13:23 crc kubenswrapper[4558]: W0120 17:13:23.004361 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf47cb9e2_2306_494d_94ff_b60393a9413e.slice/crio-14ba1f05070ba36b446d53c2741a503c6e949900568de158a24d243b2fee06d8 WatchSource:0}: Error finding container 14ba1f05070ba36b446d53c2741a503c6e949900568de158a24d243b2fee06d8: Status 404 returned error can't find the container with id 14ba1f05070ba36b446d53c2741a503c6e949900568de158a24d243b2fee06d8 Jan 20 17:13:23 crc kubenswrapper[4558]: I0120 17:13:23.013524 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/watcher-api-0"] Jan 20 17:13:23 crc kubenswrapper[4558]: I0120 17:13:23.016816 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/watcher-decision-engine-0"] Jan 20 17:13:23 crc kubenswrapper[4558]: W0120 17:13:23.027199 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod30b9f169_eef8_4a30_b543_83dda735da70.slice/crio-9dc1080b55204391c2413ceefa32984f19b36cde05aade1975a34ec4f434e74a WatchSource:0}: Error finding container 9dc1080b55204391c2413ceefa32984f19b36cde05aade1975a34ec4f434e74a: Status 404 returned error can't find the container with id 9dc1080b55204391c2413ceefa32984f19b36cde05aade1975a34ec4f434e74a Jan 20 17:13:23 crc kubenswrapper[4558]: I0120 17:13:23.037243 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/4e87c463-6ff6-4a56-a207-b6b86578ec36-db-sync-config-data\") pod \"4e87c463-6ff6-4a56-a207-b6b86578ec36\" (UID: \"4e87c463-6ff6-4a56-a207-b6b86578ec36\") " Jan 20 17:13:23 crc kubenswrapper[4558]: I0120 17:13:23.037284 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e87c463-6ff6-4a56-a207-b6b86578ec36-combined-ca-bundle\") pod \"4e87c463-6ff6-4a56-a207-b6b86578ec36\" (UID: \"4e87c463-6ff6-4a56-a207-b6b86578ec36\") " Jan 20 17:13:23 crc kubenswrapper[4558]: I0120 17:13:23.037309 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4e87c463-6ff6-4a56-a207-b6b86578ec36-scripts\") pod \"4e87c463-6ff6-4a56-a207-b6b86578ec36\" (UID: \"4e87c463-6ff6-4a56-a207-b6b86578ec36\") " Jan 20 17:13:23 crc kubenswrapper[4558]: I0120 17:13:23.037326 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/4e87c463-6ff6-4a56-a207-b6b86578ec36-etc-machine-id\") pod \"4e87c463-6ff6-4a56-a207-b6b86578ec36\" (UID: \"4e87c463-6ff6-4a56-a207-b6b86578ec36\") " Jan 20 17:13:23 crc kubenswrapper[4558]: I0120 17:13:23.037357 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e87c463-6ff6-4a56-a207-b6b86578ec36-config-data\") pod \"4e87c463-6ff6-4a56-a207-b6b86578ec36\" (UID: \"4e87c463-6ff6-4a56-a207-b6b86578ec36\") " Jan 20 17:13:23 crc kubenswrapper[4558]: I0120 17:13:23.037485 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mt52t\" (UniqueName: \"kubernetes.io/projected/4e87c463-6ff6-4a56-a207-b6b86578ec36-kube-api-access-mt52t\") pod \"4e87c463-6ff6-4a56-a207-b6b86578ec36\" (UID: \"4e87c463-6ff6-4a56-a207-b6b86578ec36\") " Jan 20 17:13:23 crc kubenswrapper[4558]: I0120 17:13:23.038357 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4e87c463-6ff6-4a56-a207-b6b86578ec36-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "4e87c463-6ff6-4a56-a207-b6b86578ec36" (UID: "4e87c463-6ff6-4a56-a207-b6b86578ec36"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 17:13:23 crc kubenswrapper[4558]: I0120 17:13:23.045895 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e87c463-6ff6-4a56-a207-b6b86578ec36-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "4e87c463-6ff6-4a56-a207-b6b86578ec36" (UID: "4e87c463-6ff6-4a56-a207-b6b86578ec36"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:13:23 crc kubenswrapper[4558]: I0120 17:13:23.048080 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e87c463-6ff6-4a56-a207-b6b86578ec36-scripts" (OuterVolumeSpecName: "scripts") pod "4e87c463-6ff6-4a56-a207-b6b86578ec36" (UID: "4e87c463-6ff6-4a56-a207-b6b86578ec36"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:13:23 crc kubenswrapper[4558]: I0120 17:13:23.060957 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4e87c463-6ff6-4a56-a207-b6b86578ec36-kube-api-access-mt52t" (OuterVolumeSpecName: "kube-api-access-mt52t") pod "4e87c463-6ff6-4a56-a207-b6b86578ec36" (UID: "4e87c463-6ff6-4a56-a207-b6b86578ec36"). InnerVolumeSpecName "kube-api-access-mt52t". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:13:23 crc kubenswrapper[4558]: I0120 17:13:23.110951 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e87c463-6ff6-4a56-a207-b6b86578ec36-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4e87c463-6ff6-4a56-a207-b6b86578ec36" (UID: "4e87c463-6ff6-4a56-a207-b6b86578ec36"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:13:23 crc kubenswrapper[4558]: I0120 17:13:23.136971 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e87c463-6ff6-4a56-a207-b6b86578ec36-config-data" (OuterVolumeSpecName: "config-data") pod "4e87c463-6ff6-4a56-a207-b6b86578ec36" (UID: "4e87c463-6ff6-4a56-a207-b6b86578ec36"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:13:23 crc kubenswrapper[4558]: I0120 17:13:23.140802 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4e87c463-6ff6-4a56-a207-b6b86578ec36-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:13:23 crc kubenswrapper[4558]: I0120 17:13:23.140855 4558 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/4e87c463-6ff6-4a56-a207-b6b86578ec36-etc-machine-id\") on node \"crc\" DevicePath \"\"" Jan 20 17:13:23 crc kubenswrapper[4558]: I0120 17:13:23.140866 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e87c463-6ff6-4a56-a207-b6b86578ec36-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:13:23 crc kubenswrapper[4558]: I0120 17:13:23.140877 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mt52t\" (UniqueName: \"kubernetes.io/projected/4e87c463-6ff6-4a56-a207-b6b86578ec36-kube-api-access-mt52t\") on node \"crc\" DevicePath \"\"" Jan 20 17:13:23 crc kubenswrapper[4558]: I0120 17:13:23.140888 4558 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/4e87c463-6ff6-4a56-a207-b6b86578ec36-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:13:23 crc kubenswrapper[4558]: I0120 17:13:23.141268 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e87c463-6ff6-4a56-a207-b6b86578ec36-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:13:23 crc kubenswrapper[4558]: I0120 17:13:23.197364 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/watcher-decision-engine-0" event={"ID":"f47cb9e2-2306-494d-94ff-b60393a9413e","Type":"ContainerStarted","Data":"14ba1f05070ba36b446d53c2741a503c6e949900568de158a24d243b2fee06d8"} Jan 20 17:13:23 crc kubenswrapper[4558]: I0120 17:13:23.210347 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/watcher-applier-0" event={"ID":"30b9f169-eef8-4a30-b543-83dda735da70","Type":"ContainerStarted","Data":"9dc1080b55204391c2413ceefa32984f19b36cde05aade1975a34ec4f434e74a"} Jan 20 17:13:23 crc kubenswrapper[4558]: I0120 17:13:23.258384 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-7ddd44bcf8-9tj9c" event={"ID":"249a3706-31a6-4fb2-9a3f-94700d9ae30e","Type":"ContainerStarted","Data":"5a1fe60313509d6c2bf1b4b977c2854c74261997efa724134b9eef9555b92ec0"} Jan 20 17:13:23 crc kubenswrapper[4558]: I0120 17:13:23.258442 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-7ddd44bcf8-9tj9c" event={"ID":"249a3706-31a6-4fb2-9a3f-94700d9ae30e","Type":"ContainerStarted","Data":"3195aba3a065ce0b79495382f56692fe1c0dcdeb28d7112bbbc4da678d816321"} Jan 20 17:13:23 crc kubenswrapper[4558]: I0120 17:13:23.270946 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:13:23 crc kubenswrapper[4558]: E0120 17:13:23.271493 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e87c463-6ff6-4a56-a207-b6b86578ec36" containerName="cinder-db-sync" Jan 20 17:13:23 crc kubenswrapper[4558]: I0120 17:13:23.271511 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e87c463-6ff6-4a56-a207-b6b86578ec36" containerName="cinder-db-sync" Jan 20 17:13:23 crc kubenswrapper[4558]: I0120 17:13:23.271706 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="4e87c463-6ff6-4a56-a207-b6b86578ec36" containerName="cinder-db-sync" Jan 20 17:13:23 crc kubenswrapper[4558]: I0120 17:13:23.272724 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:13:23 crc kubenswrapper[4558]: I0120 17:13:23.278540 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cinder-scheduler-config-data" Jan 20 17:13:23 crc kubenswrapper[4558]: I0120 17:13:23.280192 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"fec58b11-5e1a-4bfc-bfcc-fde43e56b8fb","Type":"ContainerStarted","Data":"2345422c3ac145fd1d8e65d12769cbcb80d89d096669fb577251421566912341"} Jan 20 17:13:23 crc kubenswrapper[4558]: I0120 17:13:23.284851 4558 generic.go:334] "Generic (PLEG): container finished" podID="52844554-ecdd-40db-ae62-0f0de3d9030d" containerID="6d036b93fa5cf012470faed7d76baa620b68e49cfc21d8f4c3db9b864a1baeed" exitCode=2 Jan 20 17:13:23 crc kubenswrapper[4558]: I0120 17:13:23.284887 4558 generic.go:334] "Generic (PLEG): container finished" podID="52844554-ecdd-40db-ae62-0f0de3d9030d" containerID="217dca2679c63dd7846ef65d423f3eea95f3f3ffc7c680a0e1fc9edda8767cd7" exitCode=0 Jan 20 17:13:23 crc kubenswrapper[4558]: I0120 17:13:23.284896 4558 generic.go:334] "Generic (PLEG): container finished" podID="52844554-ecdd-40db-ae62-0f0de3d9030d" containerID="3c009e1d5d872ae0cc303819626a3fcbec01907270a323804faaa0a2fe958dd6" exitCode=0 Jan 20 17:13:23 crc kubenswrapper[4558]: I0120 17:13:23.284933 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"52844554-ecdd-40db-ae62-0f0de3d9030d","Type":"ContainerDied","Data":"6d036b93fa5cf012470faed7d76baa620b68e49cfc21d8f4c3db9b864a1baeed"} Jan 20 17:13:23 crc kubenswrapper[4558]: I0120 17:13:23.284950 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"52844554-ecdd-40db-ae62-0f0de3d9030d","Type":"ContainerDied","Data":"217dca2679c63dd7846ef65d423f3eea95f3f3ffc7c680a0e1fc9edda8767cd7"} Jan 20 17:13:23 crc kubenswrapper[4558]: I0120 17:13:23.284960 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"52844554-ecdd-40db-ae62-0f0de3d9030d","Type":"ContainerDied","Data":"3c009e1d5d872ae0cc303819626a3fcbec01907270a323804faaa0a2fe958dd6"} Jan 20 17:13:23 crc kubenswrapper[4558]: I0120 17:13:23.286518 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-db-sync-xmmz7" Jan 20 17:13:23 crc kubenswrapper[4558]: I0120 17:13:23.286950 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-db-sync-xmmz7" event={"ID":"4e87c463-6ff6-4a56-a207-b6b86578ec36","Type":"ContainerDied","Data":"27531b2b5f6f9170a62fb7645bec0cee5541b19a0b1b664f62d55b883f94d981"} Jan 20 17:13:23 crc kubenswrapper[4558]: I0120 17:13:23.286986 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="27531b2b5f6f9170a62fb7645bec0cee5541b19a0b1b664f62d55b883f94d981" Jan 20 17:13:23 crc kubenswrapper[4558]: I0120 17:13:23.294346 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:13:23 crc kubenswrapper[4558]: I0120 17:13:23.338340 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-9f6d49bb-klwm8" event={"ID":"4f712673-2484-4dc7-9677-344aa012e994","Type":"ContainerStarted","Data":"15337f336c8fb92e0fbc69b63d63f594d01aa962d1ec2bfd37c990066c99f695"} Jan 20 17:13:23 crc kubenswrapper[4558]: I0120 17:13:23.338399 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-9f6d49bb-klwm8" event={"ID":"4f712673-2484-4dc7-9677-344aa012e994","Type":"ContainerStarted","Data":"742766fa00ccaa0569089ca1a686d86515119aea8175ecee023ecbfff810c290"} Jan 20 17:13:23 crc kubenswrapper[4558]: I0120 17:13:23.341061 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-bootstrap-wcsxk" event={"ID":"29ba41cf-902a-4ee1-88c9-d1db60f5e9ab","Type":"ContainerStarted","Data":"f99b4296bd5b00c49e313c96740dc5b93241fc455baebd491918d5e761d9430d"} Jan 20 17:13:23 crc kubenswrapper[4558]: I0120 17:13:23.341109 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-bootstrap-wcsxk" event={"ID":"29ba41cf-902a-4ee1-88c9-d1db60f5e9ab","Type":"ContainerStarted","Data":"a044263419798af2b990cc6751997e70fb59c7de958d1fe23b64b6fec8924ce8"} Jan 20 17:13:23 crc kubenswrapper[4558]: I0120 17:13:23.349286 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/watcher-api-0" event={"ID":"9d818a7f-96eb-499c-bb53-75b259d30282","Type":"ContainerStarted","Data":"f715178005e09dde7f62ac369f14e46483409f8b24358919c2ba9baed7d61d84"} Jan 20 17:13:23 crc kubenswrapper[4558]: I0120 17:13:23.350734 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc4869f2-03aa-4eb4-838c-5999cf611b1e-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"dc4869f2-03aa-4eb4-838c-5999cf611b1e\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:13:23 crc kubenswrapper[4558]: I0120 17:13:23.350776 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j84j5\" (UniqueName: \"kubernetes.io/projected/dc4869f2-03aa-4eb4-838c-5999cf611b1e-kube-api-access-j84j5\") pod \"cinder-scheduler-0\" (UID: \"dc4869f2-03aa-4eb4-838c-5999cf611b1e\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:13:23 crc kubenswrapper[4558]: I0120 17:13:23.350876 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/dc4869f2-03aa-4eb4-838c-5999cf611b1e-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"dc4869f2-03aa-4eb4-838c-5999cf611b1e\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:13:23 crc kubenswrapper[4558]: I0120 17:13:23.350921 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dc4869f2-03aa-4eb4-838c-5999cf611b1e-config-data\") pod \"cinder-scheduler-0\" (UID: \"dc4869f2-03aa-4eb4-838c-5999cf611b1e\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:13:23 crc kubenswrapper[4558]: I0120 17:13:23.351028 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/dc4869f2-03aa-4eb4-838c-5999cf611b1e-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"dc4869f2-03aa-4eb4-838c-5999cf611b1e\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:13:23 crc kubenswrapper[4558]: I0120 17:13:23.351059 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dc4869f2-03aa-4eb4-838c-5999cf611b1e-scripts\") pod \"cinder-scheduler-0\" (UID: \"dc4869f2-03aa-4eb4-838c-5999cf611b1e\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:13:23 crc kubenswrapper[4558]: I0120 17:13:23.354185 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-worker-55f98f9877-gm2wm" event={"ID":"462f58d4-b41a-45b8-aa8a-003bfe30e625","Type":"ContainerStarted","Data":"696c95ac455228cd89e273ea6e2bcb78c3bf7063a8f5ebba19cde538e34d64bb"} Jan 20 17:13:23 crc kubenswrapper[4558]: I0120 17:13:23.354209 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-worker-55f98f9877-gm2wm" event={"ID":"462f58d4-b41a-45b8-aa8a-003bfe30e625","Type":"ContainerStarted","Data":"db613104669c89ae6b312be6587dc38f5bd96e683a72780c04ce61c6c65b84f9"} Jan 20 17:13:23 crc kubenswrapper[4558]: I0120 17:13:23.376671 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-keystone-listener-75484ddb58-9p7fq" event={"ID":"8dfaaf1f-540b-4c8b-b93c-1e3556c37801","Type":"ContainerStarted","Data":"bbac2ee5bf98eddd33a60a5edd0a1dac188b6c3de922ce151b7900626fe0fff8"} Jan 20 17:13:23 crc kubenswrapper[4558]: I0120 17:13:23.376716 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-keystone-listener-75484ddb58-9p7fq" event={"ID":"8dfaaf1f-540b-4c8b-b93c-1e3556c37801","Type":"ContainerStarted","Data":"04964c7fbc6fe6167016fdbfd0babb05695f0f29b288f1d7c5a026f79e48f86d"} Jan 20 17:13:23 crc kubenswrapper[4558]: I0120 17:13:23.377075 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:13:23 crc kubenswrapper[4558]: I0120 17:13:23.377485 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:13:23 crc kubenswrapper[4558]: I0120 17:13:23.380711 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/glance-default-internal-api-0" podStartSLOduration=4.380694936 podStartE2EDuration="4.380694936s" podCreationTimestamp="2026-01-20 17:13:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:13:23.348664525 +0000 UTC m=+1897.109002492" watchObservedRunningTime="2026-01-20 17:13:23.380694936 +0000 UTC m=+1897.141032904" Jan 20 17:13:23 crc kubenswrapper[4558]: I0120 17:13:23.381590 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:13:23 crc kubenswrapper[4558]: I0120 17:13:23.383853 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:13:23 crc kubenswrapper[4558]: I0120 17:13:23.399913 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cinder-api-config-data" Jan 20 17:13:23 crc kubenswrapper[4558]: I0120 17:13:23.411033 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:13:23 crc kubenswrapper[4558]: I0120 17:13:23.418621 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/keystone-bootstrap-wcsxk" podStartSLOduration=2.418604283 podStartE2EDuration="2.418604283s" podCreationTimestamp="2026-01-20 17:13:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:13:23.368028027 +0000 UTC m=+1897.128365994" watchObservedRunningTime="2026-01-20 17:13:23.418604283 +0000 UTC m=+1897.178942250" Jan 20 17:13:23 crc kubenswrapper[4558]: I0120 17:13:23.452252 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/dc4869f2-03aa-4eb4-838c-5999cf611b1e-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"dc4869f2-03aa-4eb4-838c-5999cf611b1e\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:13:23 crc kubenswrapper[4558]: I0120 17:13:23.452572 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dc4869f2-03aa-4eb4-838c-5999cf611b1e-config-data\") pod \"cinder-scheduler-0\" (UID: \"dc4869f2-03aa-4eb4-838c-5999cf611b1e\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:13:23 crc kubenswrapper[4558]: I0120 17:13:23.452637 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/dc4869f2-03aa-4eb4-838c-5999cf611b1e-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"dc4869f2-03aa-4eb4-838c-5999cf611b1e\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:13:23 crc kubenswrapper[4558]: I0120 17:13:23.452655 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dc4869f2-03aa-4eb4-838c-5999cf611b1e-scripts\") pod \"cinder-scheduler-0\" (UID: \"dc4869f2-03aa-4eb4-838c-5999cf611b1e\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:13:23 crc kubenswrapper[4558]: I0120 17:13:23.452735 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc4869f2-03aa-4eb4-838c-5999cf611b1e-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"dc4869f2-03aa-4eb4-838c-5999cf611b1e\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:13:23 crc kubenswrapper[4558]: I0120 17:13:23.452757 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j84j5\" (UniqueName: \"kubernetes.io/projected/dc4869f2-03aa-4eb4-838c-5999cf611b1e-kube-api-access-j84j5\") pod \"cinder-scheduler-0\" (UID: \"dc4869f2-03aa-4eb4-838c-5999cf611b1e\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:13:23 crc kubenswrapper[4558]: I0120 17:13:23.455314 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/dc4869f2-03aa-4eb4-838c-5999cf611b1e-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"dc4869f2-03aa-4eb4-838c-5999cf611b1e\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:13:23 crc kubenswrapper[4558]: I0120 17:13:23.459693 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dc4869f2-03aa-4eb4-838c-5999cf611b1e-scripts\") pod \"cinder-scheduler-0\" (UID: \"dc4869f2-03aa-4eb4-838c-5999cf611b1e\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:13:23 crc kubenswrapper[4558]: I0120 17:13:23.459997 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/dc4869f2-03aa-4eb4-838c-5999cf611b1e-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"dc4869f2-03aa-4eb4-838c-5999cf611b1e\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:13:23 crc kubenswrapper[4558]: I0120 17:13:23.460029 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc4869f2-03aa-4eb4-838c-5999cf611b1e-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"dc4869f2-03aa-4eb4-838c-5999cf611b1e\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:13:23 crc kubenswrapper[4558]: I0120 17:13:23.462317 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dc4869f2-03aa-4eb4-838c-5999cf611b1e-config-data\") pod \"cinder-scheduler-0\" (UID: \"dc4869f2-03aa-4eb4-838c-5999cf611b1e\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:13:23 crc kubenswrapper[4558]: I0120 17:13:23.479649 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j84j5\" (UniqueName: \"kubernetes.io/projected/dc4869f2-03aa-4eb4-838c-5999cf611b1e-kube-api-access-j84j5\") pod \"cinder-scheduler-0\" (UID: \"dc4869f2-03aa-4eb4-838c-5999cf611b1e\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:13:23 crc kubenswrapper[4558]: I0120 17:13:23.555111 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/187eeeab-5309-4d74-94fb-3ff6e79caca7-config-data\") pod \"cinder-api-0\" (UID: \"187eeeab-5309-4d74-94fb-3ff6e79caca7\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:13:23 crc kubenswrapper[4558]: I0120 17:13:23.555265 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/187eeeab-5309-4d74-94fb-3ff6e79caca7-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"187eeeab-5309-4d74-94fb-3ff6e79caca7\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:13:23 crc kubenswrapper[4558]: I0120 17:13:23.555300 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/187eeeab-5309-4d74-94fb-3ff6e79caca7-scripts\") pod \"cinder-api-0\" (UID: \"187eeeab-5309-4d74-94fb-3ff6e79caca7\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:13:23 crc kubenswrapper[4558]: I0120 17:13:23.555361 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/187eeeab-5309-4d74-94fb-3ff6e79caca7-etc-machine-id\") pod \"cinder-api-0\" (UID: \"187eeeab-5309-4d74-94fb-3ff6e79caca7\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:13:23 crc kubenswrapper[4558]: I0120 17:13:23.555512 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/187eeeab-5309-4d74-94fb-3ff6e79caca7-config-data-custom\") pod \"cinder-api-0\" (UID: \"187eeeab-5309-4d74-94fb-3ff6e79caca7\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:13:23 crc kubenswrapper[4558]: I0120 17:13:23.555627 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/187eeeab-5309-4d74-94fb-3ff6e79caca7-logs\") pod \"cinder-api-0\" (UID: \"187eeeab-5309-4d74-94fb-3ff6e79caca7\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:13:23 crc kubenswrapper[4558]: I0120 17:13:23.555712 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qndpk\" (UniqueName: \"kubernetes.io/projected/187eeeab-5309-4d74-94fb-3ff6e79caca7-kube-api-access-qndpk\") pod \"cinder-api-0\" (UID: \"187eeeab-5309-4d74-94fb-3ff6e79caca7\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:13:23 crc kubenswrapper[4558]: I0120 17:13:23.658155 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/187eeeab-5309-4d74-94fb-3ff6e79caca7-config-data-custom\") pod \"cinder-api-0\" (UID: \"187eeeab-5309-4d74-94fb-3ff6e79caca7\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:13:23 crc kubenswrapper[4558]: I0120 17:13:23.658273 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/187eeeab-5309-4d74-94fb-3ff6e79caca7-logs\") pod \"cinder-api-0\" (UID: \"187eeeab-5309-4d74-94fb-3ff6e79caca7\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:13:23 crc kubenswrapper[4558]: I0120 17:13:23.658330 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qndpk\" (UniqueName: \"kubernetes.io/projected/187eeeab-5309-4d74-94fb-3ff6e79caca7-kube-api-access-qndpk\") pod \"cinder-api-0\" (UID: \"187eeeab-5309-4d74-94fb-3ff6e79caca7\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:13:23 crc kubenswrapper[4558]: I0120 17:13:23.658359 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/187eeeab-5309-4d74-94fb-3ff6e79caca7-config-data\") pod \"cinder-api-0\" (UID: \"187eeeab-5309-4d74-94fb-3ff6e79caca7\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:13:23 crc kubenswrapper[4558]: I0120 17:13:23.658973 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/187eeeab-5309-4d74-94fb-3ff6e79caca7-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"187eeeab-5309-4d74-94fb-3ff6e79caca7\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:13:23 crc kubenswrapper[4558]: I0120 17:13:23.659016 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/187eeeab-5309-4d74-94fb-3ff6e79caca7-scripts\") pod \"cinder-api-0\" (UID: \"187eeeab-5309-4d74-94fb-3ff6e79caca7\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:13:23 crc kubenswrapper[4558]: I0120 17:13:23.659106 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/187eeeab-5309-4d74-94fb-3ff6e79caca7-etc-machine-id\") pod \"cinder-api-0\" (UID: \"187eeeab-5309-4d74-94fb-3ff6e79caca7\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:13:23 crc kubenswrapper[4558]: I0120 17:13:23.659249 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/187eeeab-5309-4d74-94fb-3ff6e79caca7-etc-machine-id\") pod \"cinder-api-0\" (UID: \"187eeeab-5309-4d74-94fb-3ff6e79caca7\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:13:23 crc kubenswrapper[4558]: I0120 17:13:23.659544 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/187eeeab-5309-4d74-94fb-3ff6e79caca7-logs\") pod \"cinder-api-0\" (UID: \"187eeeab-5309-4d74-94fb-3ff6e79caca7\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:13:23 crc kubenswrapper[4558]: I0120 17:13:23.664765 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/187eeeab-5309-4d74-94fb-3ff6e79caca7-config-data-custom\") pod \"cinder-api-0\" (UID: \"187eeeab-5309-4d74-94fb-3ff6e79caca7\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:13:23 crc kubenswrapper[4558]: I0120 17:13:23.665143 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/187eeeab-5309-4d74-94fb-3ff6e79caca7-scripts\") pod \"cinder-api-0\" (UID: \"187eeeab-5309-4d74-94fb-3ff6e79caca7\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:13:23 crc kubenswrapper[4558]: I0120 17:13:23.670480 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/187eeeab-5309-4d74-94fb-3ff6e79caca7-config-data\") pod \"cinder-api-0\" (UID: \"187eeeab-5309-4d74-94fb-3ff6e79caca7\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:13:23 crc kubenswrapper[4558]: I0120 17:13:23.671894 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:13:23 crc kubenswrapper[4558]: I0120 17:13:23.672985 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/187eeeab-5309-4d74-94fb-3ff6e79caca7-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"187eeeab-5309-4d74-94fb-3ff6e79caca7\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:13:23 crc kubenswrapper[4558]: I0120 17:13:23.695243 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qndpk\" (UniqueName: \"kubernetes.io/projected/187eeeab-5309-4d74-94fb-3ff6e79caca7-kube-api-access-qndpk\") pod \"cinder-api-0\" (UID: \"187eeeab-5309-4d74-94fb-3ff6e79caca7\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:13:23 crc kubenswrapper[4558]: I0120 17:13:23.729972 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:13:24 crc kubenswrapper[4558]: I0120 17:13:24.225647 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:13:24 crc kubenswrapper[4558]: I0120 17:13:24.393080 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:13:24 crc kubenswrapper[4558]: I0120 17:13:24.397121 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-keystone-listener-75484ddb58-9p7fq" event={"ID":"8dfaaf1f-540b-4c8b-b93c-1e3556c37801","Type":"ContainerStarted","Data":"ac12e2c480e51bbc2f445d7182e6e37c4208d79ce61c920b8055cf83b75b4e84"} Jan 20 17:13:24 crc kubenswrapper[4558]: I0120 17:13:24.412361 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-9f6d49bb-klwm8" event={"ID":"4f712673-2484-4dc7-9677-344aa012e994","Type":"ContainerStarted","Data":"4a4f4bedefc86803f964f405689218285f7aa370ef9d7b89f4551921d9e7ab94"} Jan 20 17:13:24 crc kubenswrapper[4558]: I0120 17:13:24.412416 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/barbican-api-9f6d49bb-klwm8" Jan 20 17:13:24 crc kubenswrapper[4558]: I0120 17:13:24.412451 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/barbican-api-9f6d49bb-klwm8" Jan 20 17:13:24 crc kubenswrapper[4558]: I0120 17:13:24.415006 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/barbican-keystone-listener-75484ddb58-9p7fq" podStartSLOduration=3.414992688 podStartE2EDuration="3.414992688s" podCreationTimestamp="2026-01-20 17:13:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:13:24.411676683 +0000 UTC m=+1898.172014650" watchObservedRunningTime="2026-01-20 17:13:24.414992688 +0000 UTC m=+1898.175330656" Jan 20 17:13:24 crc kubenswrapper[4558]: I0120 17:13:24.436626 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/watcher-api-0" event={"ID":"9d818a7f-96eb-499c-bb53-75b259d30282","Type":"ContainerStarted","Data":"4d800f765daaca579849767e5cdf500af707cd7b1a7b9da4dc81822c77f4680d"} Jan 20 17:13:24 crc kubenswrapper[4558]: I0120 17:13:24.436661 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/watcher-api-0" event={"ID":"9d818a7f-96eb-499c-bb53-75b259d30282","Type":"ContainerStarted","Data":"24e4540d7ddf0429e73f6d20be940f8c915072c049401903c2e81ab3d7419eb1"} Jan 20 17:13:24 crc kubenswrapper[4558]: I0120 17:13:24.437443 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/watcher-api-0" Jan 20 17:13:24 crc kubenswrapper[4558]: I0120 17:13:24.451090 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/barbican-api-9f6d49bb-klwm8" podStartSLOduration=3.4510440510000002 podStartE2EDuration="3.451044051s" podCreationTimestamp="2026-01-20 17:13:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:13:24.449068856 +0000 UTC m=+1898.209406844" watchObservedRunningTime="2026-01-20 17:13:24.451044051 +0000 UTC m=+1898.211382018" Jan 20 17:13:24 crc kubenswrapper[4558]: I0120 17:13:24.464189 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-worker-55f98f9877-gm2wm" event={"ID":"462f58d4-b41a-45b8-aa8a-003bfe30e625","Type":"ContainerStarted","Data":"0da15b1363218841887085f4193f9590c3888bf6f9f12345ba5c89c89755a3f7"} Jan 20 17:13:24 crc kubenswrapper[4558]: I0120 17:13:24.471651 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/watcher-api-0" podStartSLOduration=3.471631144 podStartE2EDuration="3.471631144s" podCreationTimestamp="2026-01-20 17:13:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:13:24.46764565 +0000 UTC m=+1898.227983617" watchObservedRunningTime="2026-01-20 17:13:24.471631144 +0000 UTC m=+1898.231969111" Jan 20 17:13:24 crc kubenswrapper[4558]: I0120 17:13:24.491992 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-7ddd44bcf8-9tj9c" event={"ID":"249a3706-31a6-4fb2-9a3f-94700d9ae30e","Type":"ContainerStarted","Data":"1d078bc9b7292d0036875780ff82d54713ce56fd9923a103f4645e0f8a09e9ff"} Jan 20 17:13:24 crc kubenswrapper[4558]: I0120 17:13:24.492248 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/placement-7ddd44bcf8-9tj9c" Jan 20 17:13:24 crc kubenswrapper[4558]: I0120 17:13:24.492294 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/placement-7ddd44bcf8-9tj9c" Jan 20 17:13:24 crc kubenswrapper[4558]: I0120 17:13:24.496657 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/barbican-worker-55f98f9877-gm2wm" podStartSLOduration=3.496640072 podStartE2EDuration="3.496640072s" podCreationTimestamp="2026-01-20 17:13:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:13:24.491879941 +0000 UTC m=+1898.252217909" watchObservedRunningTime="2026-01-20 17:13:24.496640072 +0000 UTC m=+1898.256978039" Jan 20 17:13:24 crc kubenswrapper[4558]: I0120 17:13:24.514976 4558 generic.go:334] "Generic (PLEG): container finished" podID="e87a4844-8a90-4965-878f-d95aa09c47bb" containerID="aa1db8c742dbd3178eb1465494b8e471f9616f4072004ba4f8cbbb2be9c8f316" exitCode=0 Jan 20 17:13:24 crc kubenswrapper[4558]: I0120 17:13:24.515083 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/prometheus-metric-storage-0" event={"ID":"e87a4844-8a90-4965-878f-d95aa09c47bb","Type":"ContainerDied","Data":"aa1db8c742dbd3178eb1465494b8e471f9616f4072004ba4f8cbbb2be9c8f316"} Jan 20 17:13:24 crc kubenswrapper[4558]: I0120 17:13:24.525271 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"dc4869f2-03aa-4eb4-838c-5999cf611b1e","Type":"ContainerStarted","Data":"85584a435a55ae996a57544225016dbea39899f0efaf4c0ef3109b0a3c73b7a6"} Jan 20 17:13:24 crc kubenswrapper[4558]: I0120 17:13:24.529329 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/placement-7ddd44bcf8-9tj9c" podStartSLOduration=3.52930639 podStartE2EDuration="3.52930639s" podCreationTimestamp="2026-01-20 17:13:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:13:24.519631887 +0000 UTC m=+1898.279969854" watchObservedRunningTime="2026-01-20 17:13:24.52930639 +0000 UTC m=+1898.289644356" Jan 20 17:13:25 crc kubenswrapper[4558]: I0120 17:13:25.549655 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/prometheus-metric-storage-0" event={"ID":"e87a4844-8a90-4965-878f-d95aa09c47bb","Type":"ContainerStarted","Data":"83ca1736c91920d790e234d6dd7cb02e325a0698ad1ac3ce1a25312d1f3c51a8"} Jan 20 17:13:25 crc kubenswrapper[4558]: I0120 17:13:25.556343 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"187eeeab-5309-4d74-94fb-3ff6e79caca7","Type":"ContainerStarted","Data":"2387b6137564d3dbf1d7b355cdca010ebb8096e22cc205591a7b540682fa85c7"} Jan 20 17:13:25 crc kubenswrapper[4558]: I0120 17:13:25.556470 4558 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 20 17:13:25 crc kubenswrapper[4558]: I0120 17:13:25.556480 4558 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 20 17:13:25 crc kubenswrapper[4558]: I0120 17:13:25.858185 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:13:25 crc kubenswrapper[4558]: I0120 17:13:25.985888 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:13:26 crc kubenswrapper[4558]: I0120 17:13:26.604434 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"dc4869f2-03aa-4eb4-838c-5999cf611b1e","Type":"ContainerStarted","Data":"2082a9f2ff86d3cf8772d78180aa514bb9911f6f9bd1df655da6be3d1954fb32"} Jan 20 17:13:26 crc kubenswrapper[4558]: I0120 17:13:26.632834 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"187eeeab-5309-4d74-94fb-3ff6e79caca7","Type":"ContainerStarted","Data":"0c3878257515377d291ca58d12479b0e039a6ba8b5436223da4b4e79734fa49a"} Jan 20 17:13:26 crc kubenswrapper[4558]: I0120 17:13:26.632899 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"187eeeab-5309-4d74-94fb-3ff6e79caca7","Type":"ContainerStarted","Data":"7cde236b788c6e539d179d45e1a6bd302e864e40ee824e5900d677d5f15ca317"} Jan 20 17:13:26 crc kubenswrapper[4558]: I0120 17:13:26.632897 4558 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 20 17:13:26 crc kubenswrapper[4558]: I0120 17:13:26.676709 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/cinder-api-0" podStartSLOduration=3.6766834189999997 podStartE2EDuration="3.676683419s" podCreationTimestamp="2026-01-20 17:13:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:13:26.66008165 +0000 UTC m=+1900.420419617" watchObservedRunningTime="2026-01-20 17:13:26.676683419 +0000 UTC m=+1900.437021385" Jan 20 17:13:26 crc kubenswrapper[4558]: I0120 17:13:26.679582 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/watcher-api-0" Jan 20 17:13:26 crc kubenswrapper[4558]: I0120 17:13:26.943866 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:13:26 crc kubenswrapper[4558]: I0120 17:13:26.982120 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/watcher-api-0" Jan 20 17:13:27 crc kubenswrapper[4558]: I0120 17:13:27.645150 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/watcher-applier-0" event={"ID":"30b9f169-eef8-4a30-b543-83dda735da70","Type":"ContainerStarted","Data":"146969194ab0a40e402393f07d4594aaad069b32a10f5e882b7b5d9c1bed6628"} Jan 20 17:13:27 crc kubenswrapper[4558]: I0120 17:13:27.649632 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/watcher-decision-engine-0" event={"ID":"f47cb9e2-2306-494d-94ff-b60393a9413e","Type":"ContainerStarted","Data":"2100b7c749ddb30bbb158352b085f32659152dcc98b26a89f4ae8fd0e68b79ce"} Jan 20 17:13:27 crc kubenswrapper[4558]: I0120 17:13:27.652401 4558 generic.go:334] "Generic (PLEG): container finished" podID="29ba41cf-902a-4ee1-88c9-d1db60f5e9ab" containerID="f99b4296bd5b00c49e313c96740dc5b93241fc455baebd491918d5e761d9430d" exitCode=0 Jan 20 17:13:27 crc kubenswrapper[4558]: I0120 17:13:27.652506 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-bootstrap-wcsxk" event={"ID":"29ba41cf-902a-4ee1-88c9-d1db60f5e9ab","Type":"ContainerDied","Data":"f99b4296bd5b00c49e313c96740dc5b93241fc455baebd491918d5e761d9430d"} Jan 20 17:13:27 crc kubenswrapper[4558]: I0120 17:13:27.654913 4558 generic.go:334] "Generic (PLEG): container finished" podID="ddaf057e-7323-49e4-a1c5-0f91ad0aaaaa" containerID="8540f5c72d8ae0ebbfdf8ace4baa83976e8887ffaba9374a18c9d79a16cd9d30" exitCode=0 Jan 20 17:13:27 crc kubenswrapper[4558]: I0120 17:13:27.654984 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-db-sync-5wdp7" event={"ID":"ddaf057e-7323-49e4-a1c5-0f91ad0aaaaa","Type":"ContainerDied","Data":"8540f5c72d8ae0ebbfdf8ace4baa83976e8887ffaba9374a18c9d79a16cd9d30"} Jan 20 17:13:27 crc kubenswrapper[4558]: I0120 17:13:27.658055 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/prometheus-metric-storage-0" event={"ID":"e87a4844-8a90-4965-878f-d95aa09c47bb","Type":"ContainerStarted","Data":"812f4fcb56b8658b4bbf8cdf87243bd17984898af6dea1b34072f67d46707f0b"} Jan 20 17:13:27 crc kubenswrapper[4558]: I0120 17:13:27.658770 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/prometheus-metric-storage-0" event={"ID":"e87a4844-8a90-4965-878f-d95aa09c47bb","Type":"ContainerStarted","Data":"cee5946fda9135814e788d08c6619a590258ddf4784db12ce17b315442c52074"} Jan 20 17:13:27 crc kubenswrapper[4558]: I0120 17:13:27.661302 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"dc4869f2-03aa-4eb4-838c-5999cf611b1e","Type":"ContainerStarted","Data":"21503c4a228c86329bee9a8b686d4bf7d098cd0eccd98077432b6ac0f0932987"} Jan 20 17:13:27 crc kubenswrapper[4558]: I0120 17:13:27.662236 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:13:27 crc kubenswrapper[4558]: I0120 17:13:27.666150 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/watcher-applier-0" podStartSLOduration=2.641469109 podStartE2EDuration="6.666136001s" podCreationTimestamp="2026-01-20 17:13:21 +0000 UTC" firstStartedPulling="2026-01-20 17:13:23.030105823 +0000 UTC m=+1896.790443789" lastFinishedPulling="2026-01-20 17:13:27.054772715 +0000 UTC m=+1900.815110681" observedRunningTime="2026-01-20 17:13:27.663756616 +0000 UTC m=+1901.424094583" watchObservedRunningTime="2026-01-20 17:13:27.666136001 +0000 UTC m=+1901.426473968" Jan 20 17:13:27 crc kubenswrapper[4558]: I0120 17:13:27.743767 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/prometheus-metric-storage-0" podStartSLOduration=20.743745412 podStartE2EDuration="20.743745412s" podCreationTimestamp="2026-01-20 17:13:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:13:27.735862799 +0000 UTC m=+1901.496200766" watchObservedRunningTime="2026-01-20 17:13:27.743745412 +0000 UTC m=+1901.504083378" Jan 20 17:13:27 crc kubenswrapper[4558]: I0120 17:13:27.749678 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/watcher-decision-engine-0" podStartSLOduration=2.711109193 podStartE2EDuration="6.74966865s" podCreationTimestamp="2026-01-20 17:13:21 +0000 UTC" firstStartedPulling="2026-01-20 17:13:23.006470477 +0000 UTC m=+1896.766808444" lastFinishedPulling="2026-01-20 17:13:27.045029944 +0000 UTC m=+1900.805367901" observedRunningTime="2026-01-20 17:13:27.748694618 +0000 UTC m=+1901.509032585" watchObservedRunningTime="2026-01-20 17:13:27.74966865 +0000 UTC m=+1901.510006617" Jan 20 17:13:27 crc kubenswrapper[4558]: I0120 17:13:27.765138 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/cinder-scheduler-0" podStartSLOduration=4.765118552 podStartE2EDuration="4.765118552s" podCreationTimestamp="2026-01-20 17:13:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:13:27.761446928 +0000 UTC m=+1901.521784895" watchObservedRunningTime="2026-01-20 17:13:27.765118552 +0000 UTC m=+1901.525456519" Jan 20 17:13:28 crc kubenswrapper[4558]: I0120 17:13:28.291730 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/prometheus-metric-storage-0" Jan 20 17:13:28 crc kubenswrapper[4558]: I0120 17:13:28.379395 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/barbican-api-6587ff9c6b-8wmpk"] Jan 20 17:13:28 crc kubenswrapper[4558]: I0120 17:13:28.380960 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-6587ff9c6b-8wmpk" Jan 20 17:13:28 crc kubenswrapper[4558]: I0120 17:13:28.383780 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-barbican-public-svc" Jan 20 17:13:28 crc kubenswrapper[4558]: I0120 17:13:28.385564 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-barbican-internal-svc" Jan 20 17:13:28 crc kubenswrapper[4558]: I0120 17:13:28.403483 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-api-6587ff9c6b-8wmpk"] Jan 20 17:13:28 crc kubenswrapper[4558]: I0120 17:13:28.494403 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9116eb6c-0630-4022-9c43-8c8793e08922-internal-tls-certs\") pod \"barbican-api-6587ff9c6b-8wmpk\" (UID: \"9116eb6c-0630-4022-9c43-8c8793e08922\") " pod="openstack-kuttl-tests/barbican-api-6587ff9c6b-8wmpk" Jan 20 17:13:28 crc kubenswrapper[4558]: I0120 17:13:28.494471 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9116eb6c-0630-4022-9c43-8c8793e08922-logs\") pod \"barbican-api-6587ff9c6b-8wmpk\" (UID: \"9116eb6c-0630-4022-9c43-8c8793e08922\") " pod="openstack-kuttl-tests/barbican-api-6587ff9c6b-8wmpk" Jan 20 17:13:28 crc kubenswrapper[4558]: I0120 17:13:28.494517 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9116eb6c-0630-4022-9c43-8c8793e08922-config-data\") pod \"barbican-api-6587ff9c6b-8wmpk\" (UID: \"9116eb6c-0630-4022-9c43-8c8793e08922\") " pod="openstack-kuttl-tests/barbican-api-6587ff9c6b-8wmpk" Jan 20 17:13:28 crc kubenswrapper[4558]: I0120 17:13:28.494625 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9116eb6c-0630-4022-9c43-8c8793e08922-config-data-custom\") pod \"barbican-api-6587ff9c6b-8wmpk\" (UID: \"9116eb6c-0630-4022-9c43-8c8793e08922\") " pod="openstack-kuttl-tests/barbican-api-6587ff9c6b-8wmpk" Jan 20 17:13:28 crc kubenswrapper[4558]: I0120 17:13:28.494676 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9116eb6c-0630-4022-9c43-8c8793e08922-combined-ca-bundle\") pod \"barbican-api-6587ff9c6b-8wmpk\" (UID: \"9116eb6c-0630-4022-9c43-8c8793e08922\") " pod="openstack-kuttl-tests/barbican-api-6587ff9c6b-8wmpk" Jan 20 17:13:28 crc kubenswrapper[4558]: I0120 17:13:28.494942 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qkt95\" (UniqueName: \"kubernetes.io/projected/9116eb6c-0630-4022-9c43-8c8793e08922-kube-api-access-qkt95\") pod \"barbican-api-6587ff9c6b-8wmpk\" (UID: \"9116eb6c-0630-4022-9c43-8c8793e08922\") " pod="openstack-kuttl-tests/barbican-api-6587ff9c6b-8wmpk" Jan 20 17:13:28 crc kubenswrapper[4558]: I0120 17:13:28.495133 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9116eb6c-0630-4022-9c43-8c8793e08922-public-tls-certs\") pod \"barbican-api-6587ff9c6b-8wmpk\" (UID: \"9116eb6c-0630-4022-9c43-8c8793e08922\") " pod="openstack-kuttl-tests/barbican-api-6587ff9c6b-8wmpk" Jan 20 17:13:28 crc kubenswrapper[4558]: I0120 17:13:28.597357 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9116eb6c-0630-4022-9c43-8c8793e08922-public-tls-certs\") pod \"barbican-api-6587ff9c6b-8wmpk\" (UID: \"9116eb6c-0630-4022-9c43-8c8793e08922\") " pod="openstack-kuttl-tests/barbican-api-6587ff9c6b-8wmpk" Jan 20 17:13:28 crc kubenswrapper[4558]: I0120 17:13:28.597661 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9116eb6c-0630-4022-9c43-8c8793e08922-internal-tls-certs\") pod \"barbican-api-6587ff9c6b-8wmpk\" (UID: \"9116eb6c-0630-4022-9c43-8c8793e08922\") " pod="openstack-kuttl-tests/barbican-api-6587ff9c6b-8wmpk" Jan 20 17:13:28 crc kubenswrapper[4558]: I0120 17:13:28.598261 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9116eb6c-0630-4022-9c43-8c8793e08922-logs\") pod \"barbican-api-6587ff9c6b-8wmpk\" (UID: \"9116eb6c-0630-4022-9c43-8c8793e08922\") " pod="openstack-kuttl-tests/barbican-api-6587ff9c6b-8wmpk" Jan 20 17:13:28 crc kubenswrapper[4558]: I0120 17:13:28.598374 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9116eb6c-0630-4022-9c43-8c8793e08922-config-data\") pod \"barbican-api-6587ff9c6b-8wmpk\" (UID: \"9116eb6c-0630-4022-9c43-8c8793e08922\") " pod="openstack-kuttl-tests/barbican-api-6587ff9c6b-8wmpk" Jan 20 17:13:28 crc kubenswrapper[4558]: I0120 17:13:28.598509 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9116eb6c-0630-4022-9c43-8c8793e08922-config-data-custom\") pod \"barbican-api-6587ff9c6b-8wmpk\" (UID: \"9116eb6c-0630-4022-9c43-8c8793e08922\") " pod="openstack-kuttl-tests/barbican-api-6587ff9c6b-8wmpk" Jan 20 17:13:28 crc kubenswrapper[4558]: I0120 17:13:28.598600 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9116eb6c-0630-4022-9c43-8c8793e08922-combined-ca-bundle\") pod \"barbican-api-6587ff9c6b-8wmpk\" (UID: \"9116eb6c-0630-4022-9c43-8c8793e08922\") " pod="openstack-kuttl-tests/barbican-api-6587ff9c6b-8wmpk" Jan 20 17:13:28 crc kubenswrapper[4558]: I0120 17:13:28.598722 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9116eb6c-0630-4022-9c43-8c8793e08922-logs\") pod \"barbican-api-6587ff9c6b-8wmpk\" (UID: \"9116eb6c-0630-4022-9c43-8c8793e08922\") " pod="openstack-kuttl-tests/barbican-api-6587ff9c6b-8wmpk" Jan 20 17:13:28 crc kubenswrapper[4558]: I0120 17:13:28.598863 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qkt95\" (UniqueName: \"kubernetes.io/projected/9116eb6c-0630-4022-9c43-8c8793e08922-kube-api-access-qkt95\") pod \"barbican-api-6587ff9c6b-8wmpk\" (UID: \"9116eb6c-0630-4022-9c43-8c8793e08922\") " pod="openstack-kuttl-tests/barbican-api-6587ff9c6b-8wmpk" Jan 20 17:13:28 crc kubenswrapper[4558]: I0120 17:13:28.603333 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9116eb6c-0630-4022-9c43-8c8793e08922-config-data-custom\") pod \"barbican-api-6587ff9c6b-8wmpk\" (UID: \"9116eb6c-0630-4022-9c43-8c8793e08922\") " pod="openstack-kuttl-tests/barbican-api-6587ff9c6b-8wmpk" Jan 20 17:13:28 crc kubenswrapper[4558]: I0120 17:13:28.604193 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9116eb6c-0630-4022-9c43-8c8793e08922-public-tls-certs\") pod \"barbican-api-6587ff9c6b-8wmpk\" (UID: \"9116eb6c-0630-4022-9c43-8c8793e08922\") " pod="openstack-kuttl-tests/barbican-api-6587ff9c6b-8wmpk" Jan 20 17:13:28 crc kubenswrapper[4558]: I0120 17:13:28.604235 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9116eb6c-0630-4022-9c43-8c8793e08922-internal-tls-certs\") pod \"barbican-api-6587ff9c6b-8wmpk\" (UID: \"9116eb6c-0630-4022-9c43-8c8793e08922\") " pod="openstack-kuttl-tests/barbican-api-6587ff9c6b-8wmpk" Jan 20 17:13:28 crc kubenswrapper[4558]: I0120 17:13:28.607061 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9116eb6c-0630-4022-9c43-8c8793e08922-config-data\") pod \"barbican-api-6587ff9c6b-8wmpk\" (UID: \"9116eb6c-0630-4022-9c43-8c8793e08922\") " pod="openstack-kuttl-tests/barbican-api-6587ff9c6b-8wmpk" Jan 20 17:13:28 crc kubenswrapper[4558]: I0120 17:13:28.607991 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9116eb6c-0630-4022-9c43-8c8793e08922-combined-ca-bundle\") pod \"barbican-api-6587ff9c6b-8wmpk\" (UID: \"9116eb6c-0630-4022-9c43-8c8793e08922\") " pod="openstack-kuttl-tests/barbican-api-6587ff9c6b-8wmpk" Jan 20 17:13:28 crc kubenswrapper[4558]: I0120 17:13:28.616323 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qkt95\" (UniqueName: \"kubernetes.io/projected/9116eb6c-0630-4022-9c43-8c8793e08922-kube-api-access-qkt95\") pod \"barbican-api-6587ff9c6b-8wmpk\" (UID: \"9116eb6c-0630-4022-9c43-8c8793e08922\") " pod="openstack-kuttl-tests/barbican-api-6587ff9c6b-8wmpk" Jan 20 17:13:28 crc kubenswrapper[4558]: I0120 17:13:28.670457 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/cinder-api-0" podUID="187eeeab-5309-4d74-94fb-3ff6e79caca7" containerName="cinder-api-log" containerID="cri-o://7cde236b788c6e539d179d45e1a6bd302e864e40ee824e5900d677d5f15ca317" gracePeriod=30 Jan 20 17:13:28 crc kubenswrapper[4558]: I0120 17:13:28.670510 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/cinder-api-0" podUID="187eeeab-5309-4d74-94fb-3ff6e79caca7" containerName="cinder-api" containerID="cri-o://0c3878257515377d291ca58d12479b0e039a6ba8b5436223da4b4e79734fa49a" gracePeriod=30 Jan 20 17:13:28 crc kubenswrapper[4558]: I0120 17:13:28.674818 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:13:28 crc kubenswrapper[4558]: I0120 17:13:28.696784 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-6587ff9c6b-8wmpk" Jan 20 17:13:29 crc kubenswrapper[4558]: I0120 17:13:29.097326 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-bootstrap-wcsxk" Jan 20 17:13:29 crc kubenswrapper[4558]: I0120 17:13:29.206878 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/29ba41cf-902a-4ee1-88c9-d1db60f5e9ab-scripts\") pod \"29ba41cf-902a-4ee1-88c9-d1db60f5e9ab\" (UID: \"29ba41cf-902a-4ee1-88c9-d1db60f5e9ab\") " Jan 20 17:13:29 crc kubenswrapper[4558]: I0120 17:13:29.206926 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/29ba41cf-902a-4ee1-88c9-d1db60f5e9ab-credential-keys\") pod \"29ba41cf-902a-4ee1-88c9-d1db60f5e9ab\" (UID: \"29ba41cf-902a-4ee1-88c9-d1db60f5e9ab\") " Jan 20 17:13:29 crc kubenswrapper[4558]: I0120 17:13:29.207049 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bsjrl\" (UniqueName: \"kubernetes.io/projected/29ba41cf-902a-4ee1-88c9-d1db60f5e9ab-kube-api-access-bsjrl\") pod \"29ba41cf-902a-4ee1-88c9-d1db60f5e9ab\" (UID: \"29ba41cf-902a-4ee1-88c9-d1db60f5e9ab\") " Jan 20 17:13:29 crc kubenswrapper[4558]: I0120 17:13:29.207096 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/29ba41cf-902a-4ee1-88c9-d1db60f5e9ab-config-data\") pod \"29ba41cf-902a-4ee1-88c9-d1db60f5e9ab\" (UID: \"29ba41cf-902a-4ee1-88c9-d1db60f5e9ab\") " Jan 20 17:13:29 crc kubenswrapper[4558]: I0120 17:13:29.207173 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29ba41cf-902a-4ee1-88c9-d1db60f5e9ab-combined-ca-bundle\") pod \"29ba41cf-902a-4ee1-88c9-d1db60f5e9ab\" (UID: \"29ba41cf-902a-4ee1-88c9-d1db60f5e9ab\") " Jan 20 17:13:29 crc kubenswrapper[4558]: I0120 17:13:29.207209 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/29ba41cf-902a-4ee1-88c9-d1db60f5e9ab-fernet-keys\") pod \"29ba41cf-902a-4ee1-88c9-d1db60f5e9ab\" (UID: \"29ba41cf-902a-4ee1-88c9-d1db60f5e9ab\") " Jan 20 17:13:29 crc kubenswrapper[4558]: I0120 17:13:29.218312 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/29ba41cf-902a-4ee1-88c9-d1db60f5e9ab-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "29ba41cf-902a-4ee1-88c9-d1db60f5e9ab" (UID: "29ba41cf-902a-4ee1-88c9-d1db60f5e9ab"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:13:29 crc kubenswrapper[4558]: I0120 17:13:29.218334 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/29ba41cf-902a-4ee1-88c9-d1db60f5e9ab-scripts" (OuterVolumeSpecName: "scripts") pod "29ba41cf-902a-4ee1-88c9-d1db60f5e9ab" (UID: "29ba41cf-902a-4ee1-88c9-d1db60f5e9ab"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:13:29 crc kubenswrapper[4558]: I0120 17:13:29.218374 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/29ba41cf-902a-4ee1-88c9-d1db60f5e9ab-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "29ba41cf-902a-4ee1-88c9-d1db60f5e9ab" (UID: "29ba41cf-902a-4ee1-88c9-d1db60f5e9ab"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:13:29 crc kubenswrapper[4558]: I0120 17:13:29.218424 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/29ba41cf-902a-4ee1-88c9-d1db60f5e9ab-kube-api-access-bsjrl" (OuterVolumeSpecName: "kube-api-access-bsjrl") pod "29ba41cf-902a-4ee1-88c9-d1db60f5e9ab" (UID: "29ba41cf-902a-4ee1-88c9-d1db60f5e9ab"). InnerVolumeSpecName "kube-api-access-bsjrl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:13:29 crc kubenswrapper[4558]: I0120 17:13:29.228767 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/29ba41cf-902a-4ee1-88c9-d1db60f5e9ab-config-data" (OuterVolumeSpecName: "config-data") pod "29ba41cf-902a-4ee1-88c9-d1db60f5e9ab" (UID: "29ba41cf-902a-4ee1-88c9-d1db60f5e9ab"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:13:29 crc kubenswrapper[4558]: I0120 17:13:29.243158 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/29ba41cf-902a-4ee1-88c9-d1db60f5e9ab-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "29ba41cf-902a-4ee1-88c9-d1db60f5e9ab" (UID: "29ba41cf-902a-4ee1-88c9-d1db60f5e9ab"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:13:29 crc kubenswrapper[4558]: I0120 17:13:29.243738 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-api-6587ff9c6b-8wmpk"] Jan 20 17:13:29 crc kubenswrapper[4558]: W0120 17:13:29.245070 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9116eb6c_0630_4022_9c43_8c8793e08922.slice/crio-5f7acaada2e998fa3812813955fef087ffa0d3ed68263d891cd46b28367e673f WatchSource:0}: Error finding container 5f7acaada2e998fa3812813955fef087ffa0d3ed68263d891cd46b28367e673f: Status 404 returned error can't find the container with id 5f7acaada2e998fa3812813955fef087ffa0d3ed68263d891cd46b28367e673f Jan 20 17:13:29 crc kubenswrapper[4558]: I0120 17:13:29.309492 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/29ba41cf-902a-4ee1-88c9-d1db60f5e9ab-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:13:29 crc kubenswrapper[4558]: I0120 17:13:29.309525 4558 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/29ba41cf-902a-4ee1-88c9-d1db60f5e9ab-credential-keys\") on node \"crc\" DevicePath \"\"" Jan 20 17:13:29 crc kubenswrapper[4558]: I0120 17:13:29.309542 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bsjrl\" (UniqueName: \"kubernetes.io/projected/29ba41cf-902a-4ee1-88c9-d1db60f5e9ab-kube-api-access-bsjrl\") on node \"crc\" DevicePath \"\"" Jan 20 17:13:29 crc kubenswrapper[4558]: I0120 17:13:29.309555 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/29ba41cf-902a-4ee1-88c9-d1db60f5e9ab-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:13:29 crc kubenswrapper[4558]: I0120 17:13:29.309567 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29ba41cf-902a-4ee1-88c9-d1db60f5e9ab-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:13:29 crc kubenswrapper[4558]: I0120 17:13:29.309579 4558 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/29ba41cf-902a-4ee1-88c9-d1db60f5e9ab-fernet-keys\") on node \"crc\" DevicePath \"\"" Jan 20 17:13:29 crc kubenswrapper[4558]: I0120 17:13:29.679873 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-6587ff9c6b-8wmpk" event={"ID":"9116eb6c-0630-4022-9c43-8c8793e08922","Type":"ContainerStarted","Data":"5f7acaada2e998fa3812813955fef087ffa0d3ed68263d891cd46b28367e673f"} Jan 20 17:13:29 crc kubenswrapper[4558]: I0120 17:13:29.682451 4558 generic.go:334] "Generic (PLEG): container finished" podID="187eeeab-5309-4d74-94fb-3ff6e79caca7" containerID="0c3878257515377d291ca58d12479b0e039a6ba8b5436223da4b4e79734fa49a" exitCode=0 Jan 20 17:13:29 crc kubenswrapper[4558]: I0120 17:13:29.682482 4558 generic.go:334] "Generic (PLEG): container finished" podID="187eeeab-5309-4d74-94fb-3ff6e79caca7" containerID="7cde236b788c6e539d179d45e1a6bd302e864e40ee824e5900d677d5f15ca317" exitCode=143 Jan 20 17:13:29 crc kubenswrapper[4558]: I0120 17:13:29.682534 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"187eeeab-5309-4d74-94fb-3ff6e79caca7","Type":"ContainerDied","Data":"0c3878257515377d291ca58d12479b0e039a6ba8b5436223da4b4e79734fa49a"} Jan 20 17:13:29 crc kubenswrapper[4558]: I0120 17:13:29.682597 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"187eeeab-5309-4d74-94fb-3ff6e79caca7","Type":"ContainerDied","Data":"7cde236b788c6e539d179d45e1a6bd302e864e40ee824e5900d677d5f15ca317"} Jan 20 17:13:29 crc kubenswrapper[4558]: I0120 17:13:29.684556 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-bootstrap-wcsxk" event={"ID":"29ba41cf-902a-4ee1-88c9-d1db60f5e9ab","Type":"ContainerDied","Data":"a044263419798af2b990cc6751997e70fb59c7de958d1fe23b64b6fec8924ce8"} Jan 20 17:13:29 crc kubenswrapper[4558]: I0120 17:13:29.684615 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-bootstrap-wcsxk" Jan 20 17:13:29 crc kubenswrapper[4558]: I0120 17:13:29.684626 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a044263419798af2b990cc6751997e70fb59c7de958d1fe23b64b6fec8924ce8" Jan 20 17:13:29 crc kubenswrapper[4558]: I0120 17:13:29.894833 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-db-sync-5wdp7" Jan 20 17:13:29 crc kubenswrapper[4558]: I0120 17:13:29.899502 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/keystone-57f6f664bd-7h7jh"] Jan 20 17:13:29 crc kubenswrapper[4558]: E0120 17:13:29.899922 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29ba41cf-902a-4ee1-88c9-d1db60f5e9ab" containerName="keystone-bootstrap" Jan 20 17:13:29 crc kubenswrapper[4558]: I0120 17:13:29.899944 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="29ba41cf-902a-4ee1-88c9-d1db60f5e9ab" containerName="keystone-bootstrap" Jan 20 17:13:29 crc kubenswrapper[4558]: E0120 17:13:29.899972 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ddaf057e-7323-49e4-a1c5-0f91ad0aaaaa" containerName="neutron-db-sync" Jan 20 17:13:29 crc kubenswrapper[4558]: I0120 17:13:29.899978 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ddaf057e-7323-49e4-a1c5-0f91ad0aaaaa" containerName="neutron-db-sync" Jan 20 17:13:29 crc kubenswrapper[4558]: I0120 17:13:29.900143 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="29ba41cf-902a-4ee1-88c9-d1db60f5e9ab" containerName="keystone-bootstrap" Jan 20 17:13:29 crc kubenswrapper[4558]: I0120 17:13:29.900179 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="ddaf057e-7323-49e4-a1c5-0f91ad0aaaaa" containerName="neutron-db-sync" Jan 20 17:13:29 crc kubenswrapper[4558]: I0120 17:13:29.900849 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-57f6f664bd-7h7jh" Jan 20 17:13:29 crc kubenswrapper[4558]: I0120 17:13:29.902446 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone" Jan 20 17:13:29 crc kubenswrapper[4558]: I0120 17:13:29.903466 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-scripts" Jan 20 17:13:29 crc kubenswrapper[4558]: I0120 17:13:29.904284 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-keystone-internal-svc" Jan 20 17:13:29 crc kubenswrapper[4558]: I0120 17:13:29.904319 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-config-data" Jan 20 17:13:29 crc kubenswrapper[4558]: I0120 17:13:29.904801 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-keystone-public-svc" Jan 20 17:13:29 crc kubenswrapper[4558]: I0120 17:13:29.905467 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-keystone-dockercfg-kq484" Jan 20 17:13:29 crc kubenswrapper[4558]: I0120 17:13:29.916393 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-57f6f664bd-7h7jh"] Jan 20 17:13:30 crc kubenswrapper[4558]: I0120 17:13:30.024443 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ddaf057e-7323-49e4-a1c5-0f91ad0aaaaa-combined-ca-bundle\") pod \"ddaf057e-7323-49e4-a1c5-0f91ad0aaaaa\" (UID: \"ddaf057e-7323-49e4-a1c5-0f91ad0aaaaa\") " Jan 20 17:13:30 crc kubenswrapper[4558]: I0120 17:13:30.024552 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/ddaf057e-7323-49e4-a1c5-0f91ad0aaaaa-config\") pod \"ddaf057e-7323-49e4-a1c5-0f91ad0aaaaa\" (UID: \"ddaf057e-7323-49e4-a1c5-0f91ad0aaaaa\") " Jan 20 17:13:30 crc kubenswrapper[4558]: I0120 17:13:30.024826 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vwrg2\" (UniqueName: \"kubernetes.io/projected/ddaf057e-7323-49e4-a1c5-0f91ad0aaaaa-kube-api-access-vwrg2\") pod \"ddaf057e-7323-49e4-a1c5-0f91ad0aaaaa\" (UID: \"ddaf057e-7323-49e4-a1c5-0f91ad0aaaaa\") " Jan 20 17:13:30 crc kubenswrapper[4558]: I0120 17:13:30.025440 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5efe2c5d-bf53-462a-beb1-36cb940fc6a0-config-data\") pod \"keystone-57f6f664bd-7h7jh\" (UID: \"5efe2c5d-bf53-462a-beb1-36cb940fc6a0\") " pod="openstack-kuttl-tests/keystone-57f6f664bd-7h7jh" Jan 20 17:13:30 crc kubenswrapper[4558]: I0120 17:13:30.025513 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5efe2c5d-bf53-462a-beb1-36cb940fc6a0-scripts\") pod \"keystone-57f6f664bd-7h7jh\" (UID: \"5efe2c5d-bf53-462a-beb1-36cb940fc6a0\") " pod="openstack-kuttl-tests/keystone-57f6f664bd-7h7jh" Jan 20 17:13:30 crc kubenswrapper[4558]: I0120 17:13:30.025533 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nx687\" (UniqueName: \"kubernetes.io/projected/5efe2c5d-bf53-462a-beb1-36cb940fc6a0-kube-api-access-nx687\") pod \"keystone-57f6f664bd-7h7jh\" (UID: \"5efe2c5d-bf53-462a-beb1-36cb940fc6a0\") " pod="openstack-kuttl-tests/keystone-57f6f664bd-7h7jh" Jan 20 17:13:30 crc kubenswrapper[4558]: I0120 17:13:30.025574 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/5efe2c5d-bf53-462a-beb1-36cb940fc6a0-credential-keys\") pod \"keystone-57f6f664bd-7h7jh\" (UID: \"5efe2c5d-bf53-462a-beb1-36cb940fc6a0\") " pod="openstack-kuttl-tests/keystone-57f6f664bd-7h7jh" Jan 20 17:13:30 crc kubenswrapper[4558]: I0120 17:13:30.025693 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/5efe2c5d-bf53-462a-beb1-36cb940fc6a0-fernet-keys\") pod \"keystone-57f6f664bd-7h7jh\" (UID: \"5efe2c5d-bf53-462a-beb1-36cb940fc6a0\") " pod="openstack-kuttl-tests/keystone-57f6f664bd-7h7jh" Jan 20 17:13:30 crc kubenswrapper[4558]: I0120 17:13:30.025725 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5efe2c5d-bf53-462a-beb1-36cb940fc6a0-internal-tls-certs\") pod \"keystone-57f6f664bd-7h7jh\" (UID: \"5efe2c5d-bf53-462a-beb1-36cb940fc6a0\") " pod="openstack-kuttl-tests/keystone-57f6f664bd-7h7jh" Jan 20 17:13:30 crc kubenswrapper[4558]: I0120 17:13:30.025872 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5efe2c5d-bf53-462a-beb1-36cb940fc6a0-combined-ca-bundle\") pod \"keystone-57f6f664bd-7h7jh\" (UID: \"5efe2c5d-bf53-462a-beb1-36cb940fc6a0\") " pod="openstack-kuttl-tests/keystone-57f6f664bd-7h7jh" Jan 20 17:13:30 crc kubenswrapper[4558]: I0120 17:13:30.025950 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5efe2c5d-bf53-462a-beb1-36cb940fc6a0-public-tls-certs\") pod \"keystone-57f6f664bd-7h7jh\" (UID: \"5efe2c5d-bf53-462a-beb1-36cb940fc6a0\") " pod="openstack-kuttl-tests/keystone-57f6f664bd-7h7jh" Jan 20 17:13:30 crc kubenswrapper[4558]: I0120 17:13:30.032044 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ddaf057e-7323-49e4-a1c5-0f91ad0aaaaa-kube-api-access-vwrg2" (OuterVolumeSpecName: "kube-api-access-vwrg2") pod "ddaf057e-7323-49e4-a1c5-0f91ad0aaaaa" (UID: "ddaf057e-7323-49e4-a1c5-0f91ad0aaaaa"). InnerVolumeSpecName "kube-api-access-vwrg2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:13:30 crc kubenswrapper[4558]: I0120 17:13:30.057488 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ddaf057e-7323-49e4-a1c5-0f91ad0aaaaa-config" (OuterVolumeSpecName: "config") pod "ddaf057e-7323-49e4-a1c5-0f91ad0aaaaa" (UID: "ddaf057e-7323-49e4-a1c5-0f91ad0aaaaa"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:13:30 crc kubenswrapper[4558]: I0120 17:13:30.060671 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ddaf057e-7323-49e4-a1c5-0f91ad0aaaaa-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ddaf057e-7323-49e4-a1c5-0f91ad0aaaaa" (UID: "ddaf057e-7323-49e4-a1c5-0f91ad0aaaaa"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:13:30 crc kubenswrapper[4558]: I0120 17:13:30.127179 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5efe2c5d-bf53-462a-beb1-36cb940fc6a0-internal-tls-certs\") pod \"keystone-57f6f664bd-7h7jh\" (UID: \"5efe2c5d-bf53-462a-beb1-36cb940fc6a0\") " pod="openstack-kuttl-tests/keystone-57f6f664bd-7h7jh" Jan 20 17:13:30 crc kubenswrapper[4558]: I0120 17:13:30.127262 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5efe2c5d-bf53-462a-beb1-36cb940fc6a0-combined-ca-bundle\") pod \"keystone-57f6f664bd-7h7jh\" (UID: \"5efe2c5d-bf53-462a-beb1-36cb940fc6a0\") " pod="openstack-kuttl-tests/keystone-57f6f664bd-7h7jh" Jan 20 17:13:30 crc kubenswrapper[4558]: I0120 17:13:30.127300 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5efe2c5d-bf53-462a-beb1-36cb940fc6a0-public-tls-certs\") pod \"keystone-57f6f664bd-7h7jh\" (UID: \"5efe2c5d-bf53-462a-beb1-36cb940fc6a0\") " pod="openstack-kuttl-tests/keystone-57f6f664bd-7h7jh" Jan 20 17:13:30 crc kubenswrapper[4558]: I0120 17:13:30.127338 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5efe2c5d-bf53-462a-beb1-36cb940fc6a0-config-data\") pod \"keystone-57f6f664bd-7h7jh\" (UID: \"5efe2c5d-bf53-462a-beb1-36cb940fc6a0\") " pod="openstack-kuttl-tests/keystone-57f6f664bd-7h7jh" Jan 20 17:13:30 crc kubenswrapper[4558]: I0120 17:13:30.127365 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5efe2c5d-bf53-462a-beb1-36cb940fc6a0-scripts\") pod \"keystone-57f6f664bd-7h7jh\" (UID: \"5efe2c5d-bf53-462a-beb1-36cb940fc6a0\") " pod="openstack-kuttl-tests/keystone-57f6f664bd-7h7jh" Jan 20 17:13:30 crc kubenswrapper[4558]: I0120 17:13:30.127382 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nx687\" (UniqueName: \"kubernetes.io/projected/5efe2c5d-bf53-462a-beb1-36cb940fc6a0-kube-api-access-nx687\") pod \"keystone-57f6f664bd-7h7jh\" (UID: \"5efe2c5d-bf53-462a-beb1-36cb940fc6a0\") " pod="openstack-kuttl-tests/keystone-57f6f664bd-7h7jh" Jan 20 17:13:30 crc kubenswrapper[4558]: I0120 17:13:30.127399 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/5efe2c5d-bf53-462a-beb1-36cb940fc6a0-credential-keys\") pod \"keystone-57f6f664bd-7h7jh\" (UID: \"5efe2c5d-bf53-462a-beb1-36cb940fc6a0\") " pod="openstack-kuttl-tests/keystone-57f6f664bd-7h7jh" Jan 20 17:13:30 crc kubenswrapper[4558]: I0120 17:13:30.127450 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/5efe2c5d-bf53-462a-beb1-36cb940fc6a0-fernet-keys\") pod \"keystone-57f6f664bd-7h7jh\" (UID: \"5efe2c5d-bf53-462a-beb1-36cb940fc6a0\") " pod="openstack-kuttl-tests/keystone-57f6f664bd-7h7jh" Jan 20 17:13:30 crc kubenswrapper[4558]: I0120 17:13:30.127493 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vwrg2\" (UniqueName: \"kubernetes.io/projected/ddaf057e-7323-49e4-a1c5-0f91ad0aaaaa-kube-api-access-vwrg2\") on node \"crc\" DevicePath \"\"" Jan 20 17:13:30 crc kubenswrapper[4558]: I0120 17:13:30.127504 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ddaf057e-7323-49e4-a1c5-0f91ad0aaaaa-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:13:30 crc kubenswrapper[4558]: I0120 17:13:30.127514 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/ddaf057e-7323-49e4-a1c5-0f91ad0aaaaa-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:13:30 crc kubenswrapper[4558]: I0120 17:13:30.131094 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5efe2c5d-bf53-462a-beb1-36cb940fc6a0-scripts\") pod \"keystone-57f6f664bd-7h7jh\" (UID: \"5efe2c5d-bf53-462a-beb1-36cb940fc6a0\") " pod="openstack-kuttl-tests/keystone-57f6f664bd-7h7jh" Jan 20 17:13:30 crc kubenswrapper[4558]: I0120 17:13:30.131768 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5efe2c5d-bf53-462a-beb1-36cb940fc6a0-combined-ca-bundle\") pod \"keystone-57f6f664bd-7h7jh\" (UID: \"5efe2c5d-bf53-462a-beb1-36cb940fc6a0\") " pod="openstack-kuttl-tests/keystone-57f6f664bd-7h7jh" Jan 20 17:13:30 crc kubenswrapper[4558]: I0120 17:13:30.132144 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5efe2c5d-bf53-462a-beb1-36cb940fc6a0-public-tls-certs\") pod \"keystone-57f6f664bd-7h7jh\" (UID: \"5efe2c5d-bf53-462a-beb1-36cb940fc6a0\") " pod="openstack-kuttl-tests/keystone-57f6f664bd-7h7jh" Jan 20 17:13:30 crc kubenswrapper[4558]: I0120 17:13:30.132001 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/5efe2c5d-bf53-462a-beb1-36cb940fc6a0-fernet-keys\") pod \"keystone-57f6f664bd-7h7jh\" (UID: \"5efe2c5d-bf53-462a-beb1-36cb940fc6a0\") " pod="openstack-kuttl-tests/keystone-57f6f664bd-7h7jh" Jan 20 17:13:30 crc kubenswrapper[4558]: I0120 17:13:30.132521 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5efe2c5d-bf53-462a-beb1-36cb940fc6a0-internal-tls-certs\") pod \"keystone-57f6f664bd-7h7jh\" (UID: \"5efe2c5d-bf53-462a-beb1-36cb940fc6a0\") " pod="openstack-kuttl-tests/keystone-57f6f664bd-7h7jh" Jan 20 17:13:30 crc kubenswrapper[4558]: I0120 17:13:30.133194 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5efe2c5d-bf53-462a-beb1-36cb940fc6a0-config-data\") pod \"keystone-57f6f664bd-7h7jh\" (UID: \"5efe2c5d-bf53-462a-beb1-36cb940fc6a0\") " pod="openstack-kuttl-tests/keystone-57f6f664bd-7h7jh" Jan 20 17:13:30 crc kubenswrapper[4558]: I0120 17:13:30.133537 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/5efe2c5d-bf53-462a-beb1-36cb940fc6a0-credential-keys\") pod \"keystone-57f6f664bd-7h7jh\" (UID: \"5efe2c5d-bf53-462a-beb1-36cb940fc6a0\") " pod="openstack-kuttl-tests/keystone-57f6f664bd-7h7jh" Jan 20 17:13:30 crc kubenswrapper[4558]: I0120 17:13:30.143133 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nx687\" (UniqueName: \"kubernetes.io/projected/5efe2c5d-bf53-462a-beb1-36cb940fc6a0-kube-api-access-nx687\") pod \"keystone-57f6f664bd-7h7jh\" (UID: \"5efe2c5d-bf53-462a-beb1-36cb940fc6a0\") " pod="openstack-kuttl-tests/keystone-57f6f664bd-7h7jh" Jan 20 17:13:30 crc kubenswrapper[4558]: I0120 17:13:30.214194 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-57f6f664bd-7h7jh" Jan 20 17:13:30 crc kubenswrapper[4558]: I0120 17:13:30.292499 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:13:30 crc kubenswrapper[4558]: I0120 17:13:30.348048 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:13:30 crc kubenswrapper[4558]: I0120 17:13:30.348112 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:13:30 crc kubenswrapper[4558]: I0120 17:13:30.386232 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:13:30 crc kubenswrapper[4558]: I0120 17:13:30.391324 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:13:30 crc kubenswrapper[4558]: I0120 17:13:30.434265 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/187eeeab-5309-4d74-94fb-3ff6e79caca7-combined-ca-bundle\") pod \"187eeeab-5309-4d74-94fb-3ff6e79caca7\" (UID: \"187eeeab-5309-4d74-94fb-3ff6e79caca7\") " Jan 20 17:13:30 crc kubenswrapper[4558]: I0120 17:13:30.434446 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/187eeeab-5309-4d74-94fb-3ff6e79caca7-etc-machine-id\") pod \"187eeeab-5309-4d74-94fb-3ff6e79caca7\" (UID: \"187eeeab-5309-4d74-94fb-3ff6e79caca7\") " Jan 20 17:13:30 crc kubenswrapper[4558]: I0120 17:13:30.434508 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/187eeeab-5309-4d74-94fb-3ff6e79caca7-config-data-custom\") pod \"187eeeab-5309-4d74-94fb-3ff6e79caca7\" (UID: \"187eeeab-5309-4d74-94fb-3ff6e79caca7\") " Jan 20 17:13:30 crc kubenswrapper[4558]: I0120 17:13:30.434549 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/187eeeab-5309-4d74-94fb-3ff6e79caca7-scripts\") pod \"187eeeab-5309-4d74-94fb-3ff6e79caca7\" (UID: \"187eeeab-5309-4d74-94fb-3ff6e79caca7\") " Jan 20 17:13:30 crc kubenswrapper[4558]: I0120 17:13:30.434576 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/187eeeab-5309-4d74-94fb-3ff6e79caca7-config-data\") pod \"187eeeab-5309-4d74-94fb-3ff6e79caca7\" (UID: \"187eeeab-5309-4d74-94fb-3ff6e79caca7\") " Jan 20 17:13:30 crc kubenswrapper[4558]: I0120 17:13:30.434613 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/187eeeab-5309-4d74-94fb-3ff6e79caca7-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "187eeeab-5309-4d74-94fb-3ff6e79caca7" (UID: "187eeeab-5309-4d74-94fb-3ff6e79caca7"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 17:13:30 crc kubenswrapper[4558]: I0120 17:13:30.434629 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qndpk\" (UniqueName: \"kubernetes.io/projected/187eeeab-5309-4d74-94fb-3ff6e79caca7-kube-api-access-qndpk\") pod \"187eeeab-5309-4d74-94fb-3ff6e79caca7\" (UID: \"187eeeab-5309-4d74-94fb-3ff6e79caca7\") " Jan 20 17:13:30 crc kubenswrapper[4558]: I0120 17:13:30.434764 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/187eeeab-5309-4d74-94fb-3ff6e79caca7-logs\") pod \"187eeeab-5309-4d74-94fb-3ff6e79caca7\" (UID: \"187eeeab-5309-4d74-94fb-3ff6e79caca7\") " Jan 20 17:13:30 crc kubenswrapper[4558]: I0120 17:13:30.435670 4558 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/187eeeab-5309-4d74-94fb-3ff6e79caca7-etc-machine-id\") on node \"crc\" DevicePath \"\"" Jan 20 17:13:30 crc kubenswrapper[4558]: I0120 17:13:30.435990 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/187eeeab-5309-4d74-94fb-3ff6e79caca7-logs" (OuterVolumeSpecName: "logs") pod "187eeeab-5309-4d74-94fb-3ff6e79caca7" (UID: "187eeeab-5309-4d74-94fb-3ff6e79caca7"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:13:30 crc kubenswrapper[4558]: I0120 17:13:30.442305 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/187eeeab-5309-4d74-94fb-3ff6e79caca7-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "187eeeab-5309-4d74-94fb-3ff6e79caca7" (UID: "187eeeab-5309-4d74-94fb-3ff6e79caca7"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:13:30 crc kubenswrapper[4558]: I0120 17:13:30.442408 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/187eeeab-5309-4d74-94fb-3ff6e79caca7-scripts" (OuterVolumeSpecName: "scripts") pod "187eeeab-5309-4d74-94fb-3ff6e79caca7" (UID: "187eeeab-5309-4d74-94fb-3ff6e79caca7"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:13:30 crc kubenswrapper[4558]: I0120 17:13:30.442518 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/187eeeab-5309-4d74-94fb-3ff6e79caca7-kube-api-access-qndpk" (OuterVolumeSpecName: "kube-api-access-qndpk") pod "187eeeab-5309-4d74-94fb-3ff6e79caca7" (UID: "187eeeab-5309-4d74-94fb-3ff6e79caca7"). InnerVolumeSpecName "kube-api-access-qndpk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:13:30 crc kubenswrapper[4558]: I0120 17:13:30.472512 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/187eeeab-5309-4d74-94fb-3ff6e79caca7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "187eeeab-5309-4d74-94fb-3ff6e79caca7" (UID: "187eeeab-5309-4d74-94fb-3ff6e79caca7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:13:30 crc kubenswrapper[4558]: I0120 17:13:30.509026 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/187eeeab-5309-4d74-94fb-3ff6e79caca7-config-data" (OuterVolumeSpecName: "config-data") pod "187eeeab-5309-4d74-94fb-3ff6e79caca7" (UID: "187eeeab-5309-4d74-94fb-3ff6e79caca7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:13:30 crc kubenswrapper[4558]: I0120 17:13:30.538434 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/187eeeab-5309-4d74-94fb-3ff6e79caca7-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:13:30 crc kubenswrapper[4558]: I0120 17:13:30.538465 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/187eeeab-5309-4d74-94fb-3ff6e79caca7-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:13:30 crc kubenswrapper[4558]: I0120 17:13:30.538474 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/187eeeab-5309-4d74-94fb-3ff6e79caca7-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:13:30 crc kubenswrapper[4558]: I0120 17:13:30.538484 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qndpk\" (UniqueName: \"kubernetes.io/projected/187eeeab-5309-4d74-94fb-3ff6e79caca7-kube-api-access-qndpk\") on node \"crc\" DevicePath \"\"" Jan 20 17:13:30 crc kubenswrapper[4558]: I0120 17:13:30.538494 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/187eeeab-5309-4d74-94fb-3ff6e79caca7-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:13:30 crc kubenswrapper[4558]: I0120 17:13:30.538504 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/187eeeab-5309-4d74-94fb-3ff6e79caca7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:13:30 crc kubenswrapper[4558]: I0120 17:13:30.652597 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-57f6f664bd-7h7jh"] Jan 20 17:13:30 crc kubenswrapper[4558]: I0120 17:13:30.720802 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-db-sync-5wdp7" event={"ID":"ddaf057e-7323-49e4-a1c5-0f91ad0aaaaa","Type":"ContainerDied","Data":"3d8858e5c49b5ab857b4b2de7ded6058c5738b29048b63692b6c579705a8d102"} Jan 20 17:13:30 crc kubenswrapper[4558]: I0120 17:13:30.720840 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-db-sync-5wdp7" Jan 20 17:13:30 crc kubenswrapper[4558]: I0120 17:13:30.720854 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3d8858e5c49b5ab857b4b2de7ded6058c5738b29048b63692b6c579705a8d102" Jan 20 17:13:30 crc kubenswrapper[4558]: I0120 17:13:30.727586 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-6587ff9c6b-8wmpk" event={"ID":"9116eb6c-0630-4022-9c43-8c8793e08922","Type":"ContainerStarted","Data":"30ace2ed0b9d212ff18f8956aaa8ea0cdaa3a1c7b26e2fe661e7f78b0f3795a2"} Jan 20 17:13:30 crc kubenswrapper[4558]: I0120 17:13:30.727642 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-6587ff9c6b-8wmpk" event={"ID":"9116eb6c-0630-4022-9c43-8c8793e08922","Type":"ContainerStarted","Data":"6da9a00e2025f693c02a7cb6b25aca4e9c3ecc4fc8e7c3494f4026506bdec6c1"} Jan 20 17:13:30 crc kubenswrapper[4558]: I0120 17:13:30.727919 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/barbican-api-6587ff9c6b-8wmpk" Jan 20 17:13:30 crc kubenswrapper[4558]: I0120 17:13:30.727977 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/barbican-api-6587ff9c6b-8wmpk" Jan 20 17:13:30 crc kubenswrapper[4558]: I0120 17:13:30.729910 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-57f6f664bd-7h7jh" event={"ID":"5efe2c5d-bf53-462a-beb1-36cb940fc6a0","Type":"ContainerStarted","Data":"c886f8dc54026b98639367c19491fdbfd34c24d6c8fda7aeb5c732e54143bc46"} Jan 20 17:13:30 crc kubenswrapper[4558]: I0120 17:13:30.734623 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"187eeeab-5309-4d74-94fb-3ff6e79caca7","Type":"ContainerDied","Data":"2387b6137564d3dbf1d7b355cdca010ebb8096e22cc205591a7b540682fa85c7"} Jan 20 17:13:30 crc kubenswrapper[4558]: I0120 17:13:30.734696 4558 scope.go:117] "RemoveContainer" containerID="0c3878257515377d291ca58d12479b0e039a6ba8b5436223da4b4e79734fa49a" Jan 20 17:13:30 crc kubenswrapper[4558]: I0120 17:13:30.734720 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:13:30 crc kubenswrapper[4558]: I0120 17:13:30.736092 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:13:30 crc kubenswrapper[4558]: I0120 17:13:30.736128 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:13:30 crc kubenswrapper[4558]: I0120 17:13:30.751306 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/barbican-api-6587ff9c6b-8wmpk" podStartSLOduration=2.751295169 podStartE2EDuration="2.751295169s" podCreationTimestamp="2026-01-20 17:13:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:13:30.746988341 +0000 UTC m=+1904.507326297" watchObservedRunningTime="2026-01-20 17:13:30.751295169 +0000 UTC m=+1904.511633136" Jan 20 17:13:30 crc kubenswrapper[4558]: I0120 17:13:30.783138 4558 scope.go:117] "RemoveContainer" containerID="7cde236b788c6e539d179d45e1a6bd302e864e40ee824e5900d677d5f15ca317" Jan 20 17:13:30 crc kubenswrapper[4558]: I0120 17:13:30.787218 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:13:30 crc kubenswrapper[4558]: I0120 17:13:30.804790 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:13:30 crc kubenswrapper[4558]: I0120 17:13:30.827740 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:13:30 crc kubenswrapper[4558]: E0120 17:13:30.828261 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="187eeeab-5309-4d74-94fb-3ff6e79caca7" containerName="cinder-api" Jan 20 17:13:30 crc kubenswrapper[4558]: I0120 17:13:30.828284 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="187eeeab-5309-4d74-94fb-3ff6e79caca7" containerName="cinder-api" Jan 20 17:13:30 crc kubenswrapper[4558]: E0120 17:13:30.828320 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="187eeeab-5309-4d74-94fb-3ff6e79caca7" containerName="cinder-api-log" Jan 20 17:13:30 crc kubenswrapper[4558]: I0120 17:13:30.828327 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="187eeeab-5309-4d74-94fb-3ff6e79caca7" containerName="cinder-api-log" Jan 20 17:13:30 crc kubenswrapper[4558]: I0120 17:13:30.828543 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="187eeeab-5309-4d74-94fb-3ff6e79caca7" containerName="cinder-api-log" Jan 20 17:13:30 crc kubenswrapper[4558]: I0120 17:13:30.828558 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="187eeeab-5309-4d74-94fb-3ff6e79caca7" containerName="cinder-api" Jan 20 17:13:30 crc kubenswrapper[4558]: I0120 17:13:30.829683 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:13:30 crc kubenswrapper[4558]: I0120 17:13:30.833629 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-cinder-public-svc" Jan 20 17:13:30 crc kubenswrapper[4558]: I0120 17:13:30.833667 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-cinder-internal-svc" Jan 20 17:13:30 crc kubenswrapper[4558]: I0120 17:13:30.833789 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cinder-api-config-data" Jan 20 17:13:30 crc kubenswrapper[4558]: I0120 17:13:30.838074 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:13:30 crc kubenswrapper[4558]: I0120 17:13:30.950934 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:13:30 crc kubenswrapper[4558]: I0120 17:13:30.951230 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a-logs\") pod \"cinder-api-0\" (UID: \"f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:13:30 crc kubenswrapper[4558]: I0120 17:13:30.951271 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a-public-tls-certs\") pod \"cinder-api-0\" (UID: \"f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:13:30 crc kubenswrapper[4558]: I0120 17:13:30.951317 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a-etc-machine-id\") pod \"cinder-api-0\" (UID: \"f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:13:30 crc kubenswrapper[4558]: I0120 17:13:30.951432 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:13:30 crc kubenswrapper[4558]: I0120 17:13:30.951530 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zmfdr\" (UniqueName: \"kubernetes.io/projected/f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a-kube-api-access-zmfdr\") pod \"cinder-api-0\" (UID: \"f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:13:30 crc kubenswrapper[4558]: I0120 17:13:30.951656 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a-config-data-custom\") pod \"cinder-api-0\" (UID: \"f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:13:30 crc kubenswrapper[4558]: I0120 17:13:30.951693 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a-config-data\") pod \"cinder-api-0\" (UID: \"f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:13:30 crc kubenswrapper[4558]: I0120 17:13:30.951749 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a-scripts\") pod \"cinder-api-0\" (UID: \"f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:13:31 crc kubenswrapper[4558]: I0120 17:13:31.054343 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a-scripts\") pod \"cinder-api-0\" (UID: \"f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:13:31 crc kubenswrapper[4558]: I0120 17:13:31.054471 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:13:31 crc kubenswrapper[4558]: I0120 17:13:31.054500 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a-logs\") pod \"cinder-api-0\" (UID: \"f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:13:31 crc kubenswrapper[4558]: I0120 17:13:31.054524 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a-public-tls-certs\") pod \"cinder-api-0\" (UID: \"f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:13:31 crc kubenswrapper[4558]: I0120 17:13:31.054550 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a-etc-machine-id\") pod \"cinder-api-0\" (UID: \"f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:13:31 crc kubenswrapper[4558]: I0120 17:13:31.054621 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:13:31 crc kubenswrapper[4558]: I0120 17:13:31.054643 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zmfdr\" (UniqueName: \"kubernetes.io/projected/f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a-kube-api-access-zmfdr\") pod \"cinder-api-0\" (UID: \"f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:13:31 crc kubenswrapper[4558]: I0120 17:13:31.054669 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a-config-data-custom\") pod \"cinder-api-0\" (UID: \"f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:13:31 crc kubenswrapper[4558]: I0120 17:13:31.054689 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a-config-data\") pod \"cinder-api-0\" (UID: \"f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:13:31 crc kubenswrapper[4558]: I0120 17:13:31.054847 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a-etc-machine-id\") pod \"cinder-api-0\" (UID: \"f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:13:31 crc kubenswrapper[4558]: I0120 17:13:31.055218 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a-logs\") pod \"cinder-api-0\" (UID: \"f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:13:31 crc kubenswrapper[4558]: I0120 17:13:31.060585 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:13:31 crc kubenswrapper[4558]: I0120 17:13:31.061070 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a-config-data-custom\") pod \"cinder-api-0\" (UID: \"f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:13:31 crc kubenswrapper[4558]: I0120 17:13:31.061662 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:13:31 crc kubenswrapper[4558]: I0120 17:13:31.062044 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a-public-tls-certs\") pod \"cinder-api-0\" (UID: \"f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:13:31 crc kubenswrapper[4558]: I0120 17:13:31.070844 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a-config-data\") pod \"cinder-api-0\" (UID: \"f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:13:31 crc kubenswrapper[4558]: I0120 17:13:31.071306 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a-scripts\") pod \"cinder-api-0\" (UID: \"f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:13:31 crc kubenswrapper[4558]: I0120 17:13:31.078936 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zmfdr\" (UniqueName: \"kubernetes.io/projected/f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a-kube-api-access-zmfdr\") pod \"cinder-api-0\" (UID: \"f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:13:31 crc kubenswrapper[4558]: I0120 17:13:31.117503 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/neutron-57ff5684f-lrvnz"] Jan 20 17:13:31 crc kubenswrapper[4558]: I0120 17:13:31.118939 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-57ff5684f-lrvnz" Jan 20 17:13:31 crc kubenswrapper[4558]: I0120 17:13:31.122196 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"neutron-config" Jan 20 17:13:31 crc kubenswrapper[4558]: I0120 17:13:31.122257 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"neutron-httpd-config" Jan 20 17:13:31 crc kubenswrapper[4558]: I0120 17:13:31.122195 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"neutron-neutron-dockercfg-pk75l" Jan 20 17:13:31 crc kubenswrapper[4558]: I0120 17:13:31.122705 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-neutron-ovndbs" Jan 20 17:13:31 crc kubenswrapper[4558]: I0120 17:13:31.140099 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-57ff5684f-lrvnz"] Jan 20 17:13:31 crc kubenswrapper[4558]: I0120 17:13:31.145799 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:13:31 crc kubenswrapper[4558]: I0120 17:13:31.259415 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/74a94aaa-09bf-40a6-939b-14cf47decbfb-ovndb-tls-certs\") pod \"neutron-57ff5684f-lrvnz\" (UID: \"74a94aaa-09bf-40a6-939b-14cf47decbfb\") " pod="openstack-kuttl-tests/neutron-57ff5684f-lrvnz" Jan 20 17:13:31 crc kubenswrapper[4558]: I0120 17:13:31.259527 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/74a94aaa-09bf-40a6-939b-14cf47decbfb-combined-ca-bundle\") pod \"neutron-57ff5684f-lrvnz\" (UID: \"74a94aaa-09bf-40a6-939b-14cf47decbfb\") " pod="openstack-kuttl-tests/neutron-57ff5684f-lrvnz" Jan 20 17:13:31 crc kubenswrapper[4558]: I0120 17:13:31.259556 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/74a94aaa-09bf-40a6-939b-14cf47decbfb-config\") pod \"neutron-57ff5684f-lrvnz\" (UID: \"74a94aaa-09bf-40a6-939b-14cf47decbfb\") " pod="openstack-kuttl-tests/neutron-57ff5684f-lrvnz" Jan 20 17:13:31 crc kubenswrapper[4558]: I0120 17:13:31.259596 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-79cxp\" (UniqueName: \"kubernetes.io/projected/74a94aaa-09bf-40a6-939b-14cf47decbfb-kube-api-access-79cxp\") pod \"neutron-57ff5684f-lrvnz\" (UID: \"74a94aaa-09bf-40a6-939b-14cf47decbfb\") " pod="openstack-kuttl-tests/neutron-57ff5684f-lrvnz" Jan 20 17:13:31 crc kubenswrapper[4558]: I0120 17:13:31.259663 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/74a94aaa-09bf-40a6-939b-14cf47decbfb-httpd-config\") pod \"neutron-57ff5684f-lrvnz\" (UID: \"74a94aaa-09bf-40a6-939b-14cf47decbfb\") " pod="openstack-kuttl-tests/neutron-57ff5684f-lrvnz" Jan 20 17:13:31 crc kubenswrapper[4558]: I0120 17:13:31.361294 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/74a94aaa-09bf-40a6-939b-14cf47decbfb-ovndb-tls-certs\") pod \"neutron-57ff5684f-lrvnz\" (UID: \"74a94aaa-09bf-40a6-939b-14cf47decbfb\") " pod="openstack-kuttl-tests/neutron-57ff5684f-lrvnz" Jan 20 17:13:31 crc kubenswrapper[4558]: I0120 17:13:31.361670 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/74a94aaa-09bf-40a6-939b-14cf47decbfb-combined-ca-bundle\") pod \"neutron-57ff5684f-lrvnz\" (UID: \"74a94aaa-09bf-40a6-939b-14cf47decbfb\") " pod="openstack-kuttl-tests/neutron-57ff5684f-lrvnz" Jan 20 17:13:31 crc kubenswrapper[4558]: I0120 17:13:31.361695 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/74a94aaa-09bf-40a6-939b-14cf47decbfb-config\") pod \"neutron-57ff5684f-lrvnz\" (UID: \"74a94aaa-09bf-40a6-939b-14cf47decbfb\") " pod="openstack-kuttl-tests/neutron-57ff5684f-lrvnz" Jan 20 17:13:31 crc kubenswrapper[4558]: I0120 17:13:31.361744 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-79cxp\" (UniqueName: \"kubernetes.io/projected/74a94aaa-09bf-40a6-939b-14cf47decbfb-kube-api-access-79cxp\") pod \"neutron-57ff5684f-lrvnz\" (UID: \"74a94aaa-09bf-40a6-939b-14cf47decbfb\") " pod="openstack-kuttl-tests/neutron-57ff5684f-lrvnz" Jan 20 17:13:31 crc kubenswrapper[4558]: I0120 17:13:31.361828 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/74a94aaa-09bf-40a6-939b-14cf47decbfb-httpd-config\") pod \"neutron-57ff5684f-lrvnz\" (UID: \"74a94aaa-09bf-40a6-939b-14cf47decbfb\") " pod="openstack-kuttl-tests/neutron-57ff5684f-lrvnz" Jan 20 17:13:31 crc kubenswrapper[4558]: I0120 17:13:31.369139 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/74a94aaa-09bf-40a6-939b-14cf47decbfb-combined-ca-bundle\") pod \"neutron-57ff5684f-lrvnz\" (UID: \"74a94aaa-09bf-40a6-939b-14cf47decbfb\") " pod="openstack-kuttl-tests/neutron-57ff5684f-lrvnz" Jan 20 17:13:31 crc kubenswrapper[4558]: I0120 17:13:31.371831 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/74a94aaa-09bf-40a6-939b-14cf47decbfb-config\") pod \"neutron-57ff5684f-lrvnz\" (UID: \"74a94aaa-09bf-40a6-939b-14cf47decbfb\") " pod="openstack-kuttl-tests/neutron-57ff5684f-lrvnz" Jan 20 17:13:31 crc kubenswrapper[4558]: I0120 17:13:31.373424 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/74a94aaa-09bf-40a6-939b-14cf47decbfb-httpd-config\") pod \"neutron-57ff5684f-lrvnz\" (UID: \"74a94aaa-09bf-40a6-939b-14cf47decbfb\") " pod="openstack-kuttl-tests/neutron-57ff5684f-lrvnz" Jan 20 17:13:31 crc kubenswrapper[4558]: I0120 17:13:31.380707 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-79cxp\" (UniqueName: \"kubernetes.io/projected/74a94aaa-09bf-40a6-939b-14cf47decbfb-kube-api-access-79cxp\") pod \"neutron-57ff5684f-lrvnz\" (UID: \"74a94aaa-09bf-40a6-939b-14cf47decbfb\") " pod="openstack-kuttl-tests/neutron-57ff5684f-lrvnz" Jan 20 17:13:31 crc kubenswrapper[4558]: I0120 17:13:31.385856 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/74a94aaa-09bf-40a6-939b-14cf47decbfb-ovndb-tls-certs\") pod \"neutron-57ff5684f-lrvnz\" (UID: \"74a94aaa-09bf-40a6-939b-14cf47decbfb\") " pod="openstack-kuttl-tests/neutron-57ff5684f-lrvnz" Jan 20 17:13:31 crc kubenswrapper[4558]: I0120 17:13:31.437749 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-57ff5684f-lrvnz" Jan 20 17:13:31 crc kubenswrapper[4558]: I0120 17:13:31.582025 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:13:31 crc kubenswrapper[4558]: I0120 17:13:31.616311 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-smvr5"] Jan 20 17:13:31 crc kubenswrapper[4558]: I0120 17:13:31.618258 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-smvr5" Jan 20 17:13:31 crc kubenswrapper[4558]: I0120 17:13:31.630187 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-smvr5"] Jan 20 17:13:31 crc kubenswrapper[4558]: I0120 17:13:31.771890 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9946343f-cd2f-4a21-8422-a13633a8aa60-utilities\") pod \"community-operators-smvr5\" (UID: \"9946343f-cd2f-4a21-8422-a13633a8aa60\") " pod="openshift-marketplace/community-operators-smvr5" Jan 20 17:13:31 crc kubenswrapper[4558]: I0120 17:13:31.771965 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jsbcn\" (UniqueName: \"kubernetes.io/projected/9946343f-cd2f-4a21-8422-a13633a8aa60-kube-api-access-jsbcn\") pod \"community-operators-smvr5\" (UID: \"9946343f-cd2f-4a21-8422-a13633a8aa60\") " pod="openshift-marketplace/community-operators-smvr5" Jan 20 17:13:31 crc kubenswrapper[4558]: I0120 17:13:31.772028 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9946343f-cd2f-4a21-8422-a13633a8aa60-catalog-content\") pod \"community-operators-smvr5\" (UID: \"9946343f-cd2f-4a21-8422-a13633a8aa60\") " pod="openshift-marketplace/community-operators-smvr5" Jan 20 17:13:31 crc kubenswrapper[4558]: I0120 17:13:31.803434 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a","Type":"ContainerStarted","Data":"1e8c549655f4738930aa4cb22db8a5096d0978d8d183b8feadb66c5d3224ddeb"} Jan 20 17:13:31 crc kubenswrapper[4558]: I0120 17:13:31.819150 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-57f6f664bd-7h7jh" event={"ID":"5efe2c5d-bf53-462a-beb1-36cb940fc6a0","Type":"ContainerStarted","Data":"9ebec879cfd3dcec199dea1e7738ccfa45af9b5be157ef55c3e4756d96cca6ba"} Jan 20 17:13:31 crc kubenswrapper[4558]: I0120 17:13:31.819721 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/keystone-57f6f664bd-7h7jh" Jan 20 17:13:31 crc kubenswrapper[4558]: I0120 17:13:31.853687 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/keystone-57f6f664bd-7h7jh" podStartSLOduration=2.853668564 podStartE2EDuration="2.853668564s" podCreationTimestamp="2026-01-20 17:13:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:13:31.847701403 +0000 UTC m=+1905.608039369" watchObservedRunningTime="2026-01-20 17:13:31.853668564 +0000 UTC m=+1905.614006531" Jan 20 17:13:31 crc kubenswrapper[4558]: I0120 17:13:31.879478 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9946343f-cd2f-4a21-8422-a13633a8aa60-utilities\") pod \"community-operators-smvr5\" (UID: \"9946343f-cd2f-4a21-8422-a13633a8aa60\") " pod="openshift-marketplace/community-operators-smvr5" Jan 20 17:13:31 crc kubenswrapper[4558]: I0120 17:13:31.879562 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jsbcn\" (UniqueName: \"kubernetes.io/projected/9946343f-cd2f-4a21-8422-a13633a8aa60-kube-api-access-jsbcn\") pod \"community-operators-smvr5\" (UID: \"9946343f-cd2f-4a21-8422-a13633a8aa60\") " pod="openshift-marketplace/community-operators-smvr5" Jan 20 17:13:31 crc kubenswrapper[4558]: I0120 17:13:31.879621 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9946343f-cd2f-4a21-8422-a13633a8aa60-catalog-content\") pod \"community-operators-smvr5\" (UID: \"9946343f-cd2f-4a21-8422-a13633a8aa60\") " pod="openshift-marketplace/community-operators-smvr5" Jan 20 17:13:31 crc kubenswrapper[4558]: I0120 17:13:31.880652 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9946343f-cd2f-4a21-8422-a13633a8aa60-catalog-content\") pod \"community-operators-smvr5\" (UID: \"9946343f-cd2f-4a21-8422-a13633a8aa60\") " pod="openshift-marketplace/community-operators-smvr5" Jan 20 17:13:31 crc kubenswrapper[4558]: I0120 17:13:31.880876 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9946343f-cd2f-4a21-8422-a13633a8aa60-utilities\") pod \"community-operators-smvr5\" (UID: \"9946343f-cd2f-4a21-8422-a13633a8aa60\") " pod="openshift-marketplace/community-operators-smvr5" Jan 20 17:13:31 crc kubenswrapper[4558]: I0120 17:13:31.908920 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jsbcn\" (UniqueName: \"kubernetes.io/projected/9946343f-cd2f-4a21-8422-a13633a8aa60-kube-api-access-jsbcn\") pod \"community-operators-smvr5\" (UID: \"9946343f-cd2f-4a21-8422-a13633a8aa60\") " pod="openshift-marketplace/community-operators-smvr5" Jan 20 17:13:31 crc kubenswrapper[4558]: I0120 17:13:31.985792 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/watcher-api-0" Jan 20 17:13:31 crc kubenswrapper[4558]: I0120 17:13:31.998539 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/watcher-api-0" Jan 20 17:13:32 crc kubenswrapper[4558]: I0120 17:13:32.000655 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-smvr5" Jan 20 17:13:32 crc kubenswrapper[4558]: I0120 17:13:32.006154 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/watcher-applier-0" Jan 20 17:13:32 crc kubenswrapper[4558]: I0120 17:13:32.006204 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/watcher-applier-0" Jan 20 17:13:32 crc kubenswrapper[4558]: I0120 17:13:32.061333 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/watcher-decision-engine-0" Jan 20 17:13:32 crc kubenswrapper[4558]: I0120 17:13:32.086810 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/watcher-applier-0" Jan 20 17:13:32 crc kubenswrapper[4558]: I0120 17:13:32.133493 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/watcher-decision-engine-0" Jan 20 17:13:32 crc kubenswrapper[4558]: I0120 17:13:32.234725 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-57ff5684f-lrvnz"] Jan 20 17:13:32 crc kubenswrapper[4558]: I0120 17:13:32.455924 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-smvr5"] Jan 20 17:13:32 crc kubenswrapper[4558]: W0120 17:13:32.470444 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9946343f_cd2f_4a21_8422_a13633a8aa60.slice/crio-4641d33d1404e0dd6e8a024560ab1fd23960e0c378669ce29ab26ff7f4c2027f WatchSource:0}: Error finding container 4641d33d1404e0dd6e8a024560ab1fd23960e0c378669ce29ab26ff7f4c2027f: Status 404 returned error can't find the container with id 4641d33d1404e0dd6e8a024560ab1fd23960e0c378669ce29ab26ff7f4c2027f Jan 20 17:13:32 crc kubenswrapper[4558]: I0120 17:13:32.597120 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="187eeeab-5309-4d74-94fb-3ff6e79caca7" path="/var/lib/kubelet/pods/187eeeab-5309-4d74-94fb-3ff6e79caca7/volumes" Jan 20 17:13:32 crc kubenswrapper[4558]: I0120 17:13:32.838806 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a","Type":"ContainerStarted","Data":"1e85d135d8cc85e410ab61877d8a2508b9c4604a7dac00b92f5266230868a2eb"} Jan 20 17:13:32 crc kubenswrapper[4558]: I0120 17:13:32.847669 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-57ff5684f-lrvnz" event={"ID":"74a94aaa-09bf-40a6-939b-14cf47decbfb","Type":"ContainerStarted","Data":"b3957ac5e71ef7db311880371641f2fdc7bcab0ed8164d5236e8c8bf10f094c9"} Jan 20 17:13:32 crc kubenswrapper[4558]: I0120 17:13:32.847713 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-57ff5684f-lrvnz" event={"ID":"74a94aaa-09bf-40a6-939b-14cf47decbfb","Type":"ContainerStarted","Data":"12ebfb93d71f9739efd05242bab18ae4e145132aec048fa8002727c1b17ad130"} Jan 20 17:13:32 crc kubenswrapper[4558]: I0120 17:13:32.860408 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-smvr5" event={"ID":"9946343f-cd2f-4a21-8422-a13633a8aa60","Type":"ContainerStarted","Data":"4641d33d1404e0dd6e8a024560ab1fd23960e0c378669ce29ab26ff7f4c2027f"} Jan 20 17:13:32 crc kubenswrapper[4558]: I0120 17:13:32.861978 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/watcher-decision-engine-0" Jan 20 17:13:32 crc kubenswrapper[4558]: I0120 17:13:32.871454 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/watcher-api-0" Jan 20 17:13:32 crc kubenswrapper[4558]: I0120 17:13:32.915793 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/watcher-decision-engine-0" Jan 20 17:13:32 crc kubenswrapper[4558]: I0120 17:13:32.921916 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/watcher-applier-0" Jan 20 17:13:33 crc kubenswrapper[4558]: I0120 17:13:33.259703 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:13:33 crc kubenswrapper[4558]: I0120 17:13:33.259849 4558 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 20 17:13:33 crc kubenswrapper[4558]: I0120 17:13:33.430912 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/neutron-857895d98b-n66x9"] Jan 20 17:13:33 crc kubenswrapper[4558]: I0120 17:13:33.432519 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-857895d98b-n66x9" Jan 20 17:13:33 crc kubenswrapper[4558]: I0120 17:13:33.441184 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-neutron-internal-svc" Jan 20 17:13:33 crc kubenswrapper[4558]: I0120 17:13:33.441286 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-neutron-public-svc" Jan 20 17:13:33 crc kubenswrapper[4558]: I0120 17:13:33.442352 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-857895d98b-n66x9"] Jan 20 17:13:33 crc kubenswrapper[4558]: I0120 17:13:33.523803 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/32c4dbf1-2dfe-44a7-be21-1415017ca20c-combined-ca-bundle\") pod \"neutron-857895d98b-n66x9\" (UID: \"32c4dbf1-2dfe-44a7-be21-1415017ca20c\") " pod="openstack-kuttl-tests/neutron-857895d98b-n66x9" Jan 20 17:13:33 crc kubenswrapper[4558]: I0120 17:13:33.523863 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/32c4dbf1-2dfe-44a7-be21-1415017ca20c-config\") pod \"neutron-857895d98b-n66x9\" (UID: \"32c4dbf1-2dfe-44a7-be21-1415017ca20c\") " pod="openstack-kuttl-tests/neutron-857895d98b-n66x9" Jan 20 17:13:33 crc kubenswrapper[4558]: I0120 17:13:33.523948 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/32c4dbf1-2dfe-44a7-be21-1415017ca20c-public-tls-certs\") pod \"neutron-857895d98b-n66x9\" (UID: \"32c4dbf1-2dfe-44a7-be21-1415017ca20c\") " pod="openstack-kuttl-tests/neutron-857895d98b-n66x9" Jan 20 17:13:33 crc kubenswrapper[4558]: I0120 17:13:33.523996 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/32c4dbf1-2dfe-44a7-be21-1415017ca20c-internal-tls-certs\") pod \"neutron-857895d98b-n66x9\" (UID: \"32c4dbf1-2dfe-44a7-be21-1415017ca20c\") " pod="openstack-kuttl-tests/neutron-857895d98b-n66x9" Jan 20 17:13:33 crc kubenswrapper[4558]: I0120 17:13:33.524116 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/32c4dbf1-2dfe-44a7-be21-1415017ca20c-ovndb-tls-certs\") pod \"neutron-857895d98b-n66x9\" (UID: \"32c4dbf1-2dfe-44a7-be21-1415017ca20c\") " pod="openstack-kuttl-tests/neutron-857895d98b-n66x9" Jan 20 17:13:33 crc kubenswrapper[4558]: I0120 17:13:33.524217 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/32c4dbf1-2dfe-44a7-be21-1415017ca20c-httpd-config\") pod \"neutron-857895d98b-n66x9\" (UID: \"32c4dbf1-2dfe-44a7-be21-1415017ca20c\") " pod="openstack-kuttl-tests/neutron-857895d98b-n66x9" Jan 20 17:13:33 crc kubenswrapper[4558]: I0120 17:13:33.524248 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rfbbj\" (UniqueName: \"kubernetes.io/projected/32c4dbf1-2dfe-44a7-be21-1415017ca20c-kube-api-access-rfbbj\") pod \"neutron-857895d98b-n66x9\" (UID: \"32c4dbf1-2dfe-44a7-be21-1415017ca20c\") " pod="openstack-kuttl-tests/neutron-857895d98b-n66x9" Jan 20 17:13:33 crc kubenswrapper[4558]: I0120 17:13:33.584527 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:13:33 crc kubenswrapper[4558]: I0120 17:13:33.626575 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/32c4dbf1-2dfe-44a7-be21-1415017ca20c-public-tls-certs\") pod \"neutron-857895d98b-n66x9\" (UID: \"32c4dbf1-2dfe-44a7-be21-1415017ca20c\") " pod="openstack-kuttl-tests/neutron-857895d98b-n66x9" Jan 20 17:13:33 crc kubenswrapper[4558]: I0120 17:13:33.626653 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/32c4dbf1-2dfe-44a7-be21-1415017ca20c-internal-tls-certs\") pod \"neutron-857895d98b-n66x9\" (UID: \"32c4dbf1-2dfe-44a7-be21-1415017ca20c\") " pod="openstack-kuttl-tests/neutron-857895d98b-n66x9" Jan 20 17:13:33 crc kubenswrapper[4558]: I0120 17:13:33.626728 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/32c4dbf1-2dfe-44a7-be21-1415017ca20c-ovndb-tls-certs\") pod \"neutron-857895d98b-n66x9\" (UID: \"32c4dbf1-2dfe-44a7-be21-1415017ca20c\") " pod="openstack-kuttl-tests/neutron-857895d98b-n66x9" Jan 20 17:13:33 crc kubenswrapper[4558]: I0120 17:13:33.626769 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/32c4dbf1-2dfe-44a7-be21-1415017ca20c-httpd-config\") pod \"neutron-857895d98b-n66x9\" (UID: \"32c4dbf1-2dfe-44a7-be21-1415017ca20c\") " pod="openstack-kuttl-tests/neutron-857895d98b-n66x9" Jan 20 17:13:33 crc kubenswrapper[4558]: I0120 17:13:33.626790 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rfbbj\" (UniqueName: \"kubernetes.io/projected/32c4dbf1-2dfe-44a7-be21-1415017ca20c-kube-api-access-rfbbj\") pod \"neutron-857895d98b-n66x9\" (UID: \"32c4dbf1-2dfe-44a7-be21-1415017ca20c\") " pod="openstack-kuttl-tests/neutron-857895d98b-n66x9" Jan 20 17:13:33 crc kubenswrapper[4558]: I0120 17:13:33.626848 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/32c4dbf1-2dfe-44a7-be21-1415017ca20c-combined-ca-bundle\") pod \"neutron-857895d98b-n66x9\" (UID: \"32c4dbf1-2dfe-44a7-be21-1415017ca20c\") " pod="openstack-kuttl-tests/neutron-857895d98b-n66x9" Jan 20 17:13:33 crc kubenswrapper[4558]: I0120 17:13:33.626880 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/32c4dbf1-2dfe-44a7-be21-1415017ca20c-config\") pod \"neutron-857895d98b-n66x9\" (UID: \"32c4dbf1-2dfe-44a7-be21-1415017ca20c\") " pod="openstack-kuttl-tests/neutron-857895d98b-n66x9" Jan 20 17:13:33 crc kubenswrapper[4558]: I0120 17:13:33.632775 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/32c4dbf1-2dfe-44a7-be21-1415017ca20c-httpd-config\") pod \"neutron-857895d98b-n66x9\" (UID: \"32c4dbf1-2dfe-44a7-be21-1415017ca20c\") " pod="openstack-kuttl-tests/neutron-857895d98b-n66x9" Jan 20 17:13:33 crc kubenswrapper[4558]: I0120 17:13:33.635422 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/32c4dbf1-2dfe-44a7-be21-1415017ca20c-public-tls-certs\") pod \"neutron-857895d98b-n66x9\" (UID: \"32c4dbf1-2dfe-44a7-be21-1415017ca20c\") " pod="openstack-kuttl-tests/neutron-857895d98b-n66x9" Jan 20 17:13:33 crc kubenswrapper[4558]: I0120 17:13:33.637822 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/32c4dbf1-2dfe-44a7-be21-1415017ca20c-config\") pod \"neutron-857895d98b-n66x9\" (UID: \"32c4dbf1-2dfe-44a7-be21-1415017ca20c\") " pod="openstack-kuttl-tests/neutron-857895d98b-n66x9" Jan 20 17:13:33 crc kubenswrapper[4558]: I0120 17:13:33.637954 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/32c4dbf1-2dfe-44a7-be21-1415017ca20c-ovndb-tls-certs\") pod \"neutron-857895d98b-n66x9\" (UID: \"32c4dbf1-2dfe-44a7-be21-1415017ca20c\") " pod="openstack-kuttl-tests/neutron-857895d98b-n66x9" Jan 20 17:13:33 crc kubenswrapper[4558]: I0120 17:13:33.638459 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/32c4dbf1-2dfe-44a7-be21-1415017ca20c-combined-ca-bundle\") pod \"neutron-857895d98b-n66x9\" (UID: \"32c4dbf1-2dfe-44a7-be21-1415017ca20c\") " pod="openstack-kuttl-tests/neutron-857895d98b-n66x9" Jan 20 17:13:33 crc kubenswrapper[4558]: I0120 17:13:33.644949 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/32c4dbf1-2dfe-44a7-be21-1415017ca20c-internal-tls-certs\") pod \"neutron-857895d98b-n66x9\" (UID: \"32c4dbf1-2dfe-44a7-be21-1415017ca20c\") " pod="openstack-kuttl-tests/neutron-857895d98b-n66x9" Jan 20 17:13:33 crc kubenswrapper[4558]: I0120 17:13:33.648244 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rfbbj\" (UniqueName: \"kubernetes.io/projected/32c4dbf1-2dfe-44a7-be21-1415017ca20c-kube-api-access-rfbbj\") pod \"neutron-857895d98b-n66x9\" (UID: \"32c4dbf1-2dfe-44a7-be21-1415017ca20c\") " pod="openstack-kuttl-tests/neutron-857895d98b-n66x9" Jan 20 17:13:33 crc kubenswrapper[4558]: I0120 17:13:33.694695 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/barbican-api-9f6d49bb-klwm8" Jan 20 17:13:33 crc kubenswrapper[4558]: I0120 17:13:33.750310 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-857895d98b-n66x9" Jan 20 17:13:33 crc kubenswrapper[4558]: I0120 17:13:33.871023 4558 generic.go:334] "Generic (PLEG): container finished" podID="9946343f-cd2f-4a21-8422-a13633a8aa60" containerID="9c47e972aebd64566110e65f6a0d8c4b41eb4130c83a7641e09bdd5b4000c880" exitCode=0 Jan 20 17:13:33 crc kubenswrapper[4558]: I0120 17:13:33.871244 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-smvr5" event={"ID":"9946343f-cd2f-4a21-8422-a13633a8aa60","Type":"ContainerDied","Data":"9c47e972aebd64566110e65f6a0d8c4b41eb4130c83a7641e09bdd5b4000c880"} Jan 20 17:13:33 crc kubenswrapper[4558]: I0120 17:13:33.880750 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a","Type":"ContainerStarted","Data":"46a944f926e2a66f4adf5877139aadf5d8d647d7a08c085ceffe4a3ae8061c0b"} Jan 20 17:13:33 crc kubenswrapper[4558]: I0120 17:13:33.881141 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:13:33 crc kubenswrapper[4558]: I0120 17:13:33.892457 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-57ff5684f-lrvnz" event={"ID":"74a94aaa-09bf-40a6-939b-14cf47decbfb","Type":"ContainerStarted","Data":"3ab945d23aee1932a7b33d3938b543d738b3d8e27e3ce486a815df1a94173535"} Jan 20 17:13:33 crc kubenswrapper[4558]: I0120 17:13:33.892485 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/neutron-57ff5684f-lrvnz" Jan 20 17:13:33 crc kubenswrapper[4558]: I0120 17:13:33.920264 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/cinder-api-0" podStartSLOduration=3.920244851 podStartE2EDuration="3.920244851s" podCreationTimestamp="2026-01-20 17:13:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:13:33.918385495 +0000 UTC m=+1907.678723451" watchObservedRunningTime="2026-01-20 17:13:33.920244851 +0000 UTC m=+1907.680582818" Jan 20 17:13:33 crc kubenswrapper[4558]: I0120 17:13:33.924065 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/barbican-api-9f6d49bb-klwm8" Jan 20 17:13:33 crc kubenswrapper[4558]: I0120 17:13:33.931469 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:13:33 crc kubenswrapper[4558]: I0120 17:13:33.956968 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/neutron-57ff5684f-lrvnz" podStartSLOduration=2.956946457 podStartE2EDuration="2.956946457s" podCreationTimestamp="2026-01-20 17:13:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:13:33.932123949 +0000 UTC m=+1907.692461916" watchObservedRunningTime="2026-01-20 17:13:33.956946457 +0000 UTC m=+1907.717284424" Jan 20 17:13:33 crc kubenswrapper[4558]: I0120 17:13:33.982144 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:13:34 crc kubenswrapper[4558]: I0120 17:13:34.269250 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-857895d98b-n66x9"] Jan 20 17:13:34 crc kubenswrapper[4558]: I0120 17:13:34.929918 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-857895d98b-n66x9" event={"ID":"32c4dbf1-2dfe-44a7-be21-1415017ca20c","Type":"ContainerStarted","Data":"bfe3f187246a119dce5b9bd953345bf403b08cd980efbe013f04ba0712f25420"} Jan 20 17:13:34 crc kubenswrapper[4558]: I0120 17:13:34.930266 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-857895d98b-n66x9" event={"ID":"32c4dbf1-2dfe-44a7-be21-1415017ca20c","Type":"ContainerStarted","Data":"40fb072bac172dc7253b14ca577f8224ce05af40e481c08769f4d2e454f5196a"} Jan 20 17:13:34 crc kubenswrapper[4558]: I0120 17:13:34.930278 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-857895d98b-n66x9" event={"ID":"32c4dbf1-2dfe-44a7-be21-1415017ca20c","Type":"ContainerStarted","Data":"e846cc85499f6e69b78230561840eb894c958779d3199834bbede89077272e1e"} Jan 20 17:13:34 crc kubenswrapper[4558]: I0120 17:13:34.931195 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/cinder-scheduler-0" podUID="dc4869f2-03aa-4eb4-838c-5999cf611b1e" containerName="cinder-scheduler" containerID="cri-o://2082a9f2ff86d3cf8772d78180aa514bb9911f6f9bd1df655da6be3d1954fb32" gracePeriod=30 Jan 20 17:13:34 crc kubenswrapper[4558]: I0120 17:13:34.931308 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/cinder-scheduler-0" podUID="dc4869f2-03aa-4eb4-838c-5999cf611b1e" containerName="probe" containerID="cri-o://21503c4a228c86329bee9a8b686d4bf7d098cd0eccd98077432b6ac0f0932987" gracePeriod=30 Jan 20 17:13:34 crc kubenswrapper[4558]: I0120 17:13:34.971976 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/neutron-857895d98b-n66x9" podStartSLOduration=1.971958924 podStartE2EDuration="1.971958924s" podCreationTimestamp="2026-01-20 17:13:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:13:34.963245378 +0000 UTC m=+1908.723583344" watchObservedRunningTime="2026-01-20 17:13:34.971958924 +0000 UTC m=+1908.732296890" Jan 20 17:13:35 crc kubenswrapper[4558]: I0120 17:13:35.945683 4558 generic.go:334] "Generic (PLEG): container finished" podID="9946343f-cd2f-4a21-8422-a13633a8aa60" containerID="121ffaa692f9f1cedab3f39ed7fca18eeee1c049698fd8e9569dd63a7b3371dc" exitCode=0 Jan 20 17:13:35 crc kubenswrapper[4558]: I0120 17:13:35.945779 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-smvr5" event={"ID":"9946343f-cd2f-4a21-8422-a13633a8aa60","Type":"ContainerDied","Data":"121ffaa692f9f1cedab3f39ed7fca18eeee1c049698fd8e9569dd63a7b3371dc"} Jan 20 17:13:35 crc kubenswrapper[4558]: I0120 17:13:35.950238 4558 generic.go:334] "Generic (PLEG): container finished" podID="dc4869f2-03aa-4eb4-838c-5999cf611b1e" containerID="21503c4a228c86329bee9a8b686d4bf7d098cd0eccd98077432b6ac0f0932987" exitCode=0 Jan 20 17:13:35 crc kubenswrapper[4558]: I0120 17:13:35.950288 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"dc4869f2-03aa-4eb4-838c-5999cf611b1e","Type":"ContainerDied","Data":"21503c4a228c86329bee9a8b686d4bf7d098cd0eccd98077432b6ac0f0932987"} Jan 20 17:13:35 crc kubenswrapper[4558]: I0120 17:13:35.950944 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/neutron-857895d98b-n66x9" Jan 20 17:13:36 crc kubenswrapper[4558]: I0120 17:13:36.836463 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:13:36 crc kubenswrapper[4558]: I0120 17:13:36.909968 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dc4869f2-03aa-4eb4-838c-5999cf611b1e-config-data\") pod \"dc4869f2-03aa-4eb4-838c-5999cf611b1e\" (UID: \"dc4869f2-03aa-4eb4-838c-5999cf611b1e\") " Jan 20 17:13:36 crc kubenswrapper[4558]: I0120 17:13:36.910008 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dc4869f2-03aa-4eb4-838c-5999cf611b1e-scripts\") pod \"dc4869f2-03aa-4eb4-838c-5999cf611b1e\" (UID: \"dc4869f2-03aa-4eb4-838c-5999cf611b1e\") " Jan 20 17:13:36 crc kubenswrapper[4558]: I0120 17:13:36.910096 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j84j5\" (UniqueName: \"kubernetes.io/projected/dc4869f2-03aa-4eb4-838c-5999cf611b1e-kube-api-access-j84j5\") pod \"dc4869f2-03aa-4eb4-838c-5999cf611b1e\" (UID: \"dc4869f2-03aa-4eb4-838c-5999cf611b1e\") " Jan 20 17:13:36 crc kubenswrapper[4558]: I0120 17:13:36.910150 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/dc4869f2-03aa-4eb4-838c-5999cf611b1e-config-data-custom\") pod \"dc4869f2-03aa-4eb4-838c-5999cf611b1e\" (UID: \"dc4869f2-03aa-4eb4-838c-5999cf611b1e\") " Jan 20 17:13:36 crc kubenswrapper[4558]: I0120 17:13:36.910195 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/dc4869f2-03aa-4eb4-838c-5999cf611b1e-etc-machine-id\") pod \"dc4869f2-03aa-4eb4-838c-5999cf611b1e\" (UID: \"dc4869f2-03aa-4eb4-838c-5999cf611b1e\") " Jan 20 17:13:36 crc kubenswrapper[4558]: I0120 17:13:36.910272 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc4869f2-03aa-4eb4-838c-5999cf611b1e-combined-ca-bundle\") pod \"dc4869f2-03aa-4eb4-838c-5999cf611b1e\" (UID: \"dc4869f2-03aa-4eb4-838c-5999cf611b1e\") " Jan 20 17:13:36 crc kubenswrapper[4558]: I0120 17:13:36.910438 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/dc4869f2-03aa-4eb4-838c-5999cf611b1e-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "dc4869f2-03aa-4eb4-838c-5999cf611b1e" (UID: "dc4869f2-03aa-4eb4-838c-5999cf611b1e"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 17:13:36 crc kubenswrapper[4558]: I0120 17:13:36.910856 4558 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/dc4869f2-03aa-4eb4-838c-5999cf611b1e-etc-machine-id\") on node \"crc\" DevicePath \"\"" Jan 20 17:13:36 crc kubenswrapper[4558]: I0120 17:13:36.916046 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dc4869f2-03aa-4eb4-838c-5999cf611b1e-scripts" (OuterVolumeSpecName: "scripts") pod "dc4869f2-03aa-4eb4-838c-5999cf611b1e" (UID: "dc4869f2-03aa-4eb4-838c-5999cf611b1e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:13:36 crc kubenswrapper[4558]: I0120 17:13:36.930474 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dc4869f2-03aa-4eb4-838c-5999cf611b1e-kube-api-access-j84j5" (OuterVolumeSpecName: "kube-api-access-j84j5") pod "dc4869f2-03aa-4eb4-838c-5999cf611b1e" (UID: "dc4869f2-03aa-4eb4-838c-5999cf611b1e"). InnerVolumeSpecName "kube-api-access-j84j5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:13:36 crc kubenswrapper[4558]: I0120 17:13:36.932802 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dc4869f2-03aa-4eb4-838c-5999cf611b1e-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "dc4869f2-03aa-4eb4-838c-5999cf611b1e" (UID: "dc4869f2-03aa-4eb4-838c-5999cf611b1e"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:13:36 crc kubenswrapper[4558]: I0120 17:13:36.953004 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dc4869f2-03aa-4eb4-838c-5999cf611b1e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "dc4869f2-03aa-4eb4-838c-5999cf611b1e" (UID: "dc4869f2-03aa-4eb4-838c-5999cf611b1e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:13:36 crc kubenswrapper[4558]: I0120 17:13:36.961694 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-smvr5" event={"ID":"9946343f-cd2f-4a21-8422-a13633a8aa60","Type":"ContainerStarted","Data":"f0579634518c6d59bd2bf9e349ea4107d6dbb68c173cff7418ff1ddfdf10b1e8"} Jan 20 17:13:36 crc kubenswrapper[4558]: I0120 17:13:36.975987 4558 generic.go:334] "Generic (PLEG): container finished" podID="dc4869f2-03aa-4eb4-838c-5999cf611b1e" containerID="2082a9f2ff86d3cf8772d78180aa514bb9911f6f9bd1df655da6be3d1954fb32" exitCode=0 Jan 20 17:13:36 crc kubenswrapper[4558]: I0120 17:13:36.976286 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"dc4869f2-03aa-4eb4-838c-5999cf611b1e","Type":"ContainerDied","Data":"2082a9f2ff86d3cf8772d78180aa514bb9911f6f9bd1df655da6be3d1954fb32"} Jan 20 17:13:36 crc kubenswrapper[4558]: I0120 17:13:36.976344 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"dc4869f2-03aa-4eb4-838c-5999cf611b1e","Type":"ContainerDied","Data":"85584a435a55ae996a57544225016dbea39899f0efaf4c0ef3109b0a3c73b7a6"} Jan 20 17:13:36 crc kubenswrapper[4558]: I0120 17:13:36.976364 4558 scope.go:117] "RemoveContainer" containerID="21503c4a228c86329bee9a8b686d4bf7d098cd0eccd98077432b6ac0f0932987" Jan 20 17:13:36 crc kubenswrapper[4558]: I0120 17:13:36.976309 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:13:36 crc kubenswrapper[4558]: I0120 17:13:36.986290 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-smvr5" podStartSLOduration=3.369043183 podStartE2EDuration="5.98627612s" podCreationTimestamp="2026-01-20 17:13:31 +0000 UTC" firstStartedPulling="2026-01-20 17:13:33.876707862 +0000 UTC m=+1907.637045829" lastFinishedPulling="2026-01-20 17:13:36.493940799 +0000 UTC m=+1910.254278766" observedRunningTime="2026-01-20 17:13:36.982121658 +0000 UTC m=+1910.742459625" watchObservedRunningTime="2026-01-20 17:13:36.98627612 +0000 UTC m=+1910.746614087" Jan 20 17:13:37 crc kubenswrapper[4558]: I0120 17:13:37.007695 4558 scope.go:117] "RemoveContainer" containerID="2082a9f2ff86d3cf8772d78180aa514bb9911f6f9bd1df655da6be3d1954fb32" Jan 20 17:13:37 crc kubenswrapper[4558]: I0120 17:13:37.010579 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dc4869f2-03aa-4eb4-838c-5999cf611b1e-config-data" (OuterVolumeSpecName: "config-data") pod "dc4869f2-03aa-4eb4-838c-5999cf611b1e" (UID: "dc4869f2-03aa-4eb4-838c-5999cf611b1e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:13:37 crc kubenswrapper[4558]: I0120 17:13:37.013095 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j84j5\" (UniqueName: \"kubernetes.io/projected/dc4869f2-03aa-4eb4-838c-5999cf611b1e-kube-api-access-j84j5\") on node \"crc\" DevicePath \"\"" Jan 20 17:13:37 crc kubenswrapper[4558]: I0120 17:13:37.013120 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/dc4869f2-03aa-4eb4-838c-5999cf611b1e-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:13:37 crc kubenswrapper[4558]: I0120 17:13:37.013131 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc4869f2-03aa-4eb4-838c-5999cf611b1e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:13:37 crc kubenswrapper[4558]: I0120 17:13:37.013141 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dc4869f2-03aa-4eb4-838c-5999cf611b1e-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:13:37 crc kubenswrapper[4558]: I0120 17:13:37.013150 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dc4869f2-03aa-4eb4-838c-5999cf611b1e-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:13:37 crc kubenswrapper[4558]: I0120 17:13:37.026877 4558 scope.go:117] "RemoveContainer" containerID="21503c4a228c86329bee9a8b686d4bf7d098cd0eccd98077432b6ac0f0932987" Jan 20 17:13:37 crc kubenswrapper[4558]: E0120 17:13:37.030614 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"21503c4a228c86329bee9a8b686d4bf7d098cd0eccd98077432b6ac0f0932987\": container with ID starting with 21503c4a228c86329bee9a8b686d4bf7d098cd0eccd98077432b6ac0f0932987 not found: ID does not exist" containerID="21503c4a228c86329bee9a8b686d4bf7d098cd0eccd98077432b6ac0f0932987" Jan 20 17:13:37 crc kubenswrapper[4558]: I0120 17:13:37.030648 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"21503c4a228c86329bee9a8b686d4bf7d098cd0eccd98077432b6ac0f0932987"} err="failed to get container status \"21503c4a228c86329bee9a8b686d4bf7d098cd0eccd98077432b6ac0f0932987\": rpc error: code = NotFound desc = could not find container \"21503c4a228c86329bee9a8b686d4bf7d098cd0eccd98077432b6ac0f0932987\": container with ID starting with 21503c4a228c86329bee9a8b686d4bf7d098cd0eccd98077432b6ac0f0932987 not found: ID does not exist" Jan 20 17:13:37 crc kubenswrapper[4558]: I0120 17:13:37.030673 4558 scope.go:117] "RemoveContainer" containerID="2082a9f2ff86d3cf8772d78180aa514bb9911f6f9bd1df655da6be3d1954fb32" Jan 20 17:13:37 crc kubenswrapper[4558]: E0120 17:13:37.034530 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2082a9f2ff86d3cf8772d78180aa514bb9911f6f9bd1df655da6be3d1954fb32\": container with ID starting with 2082a9f2ff86d3cf8772d78180aa514bb9911f6f9bd1df655da6be3d1954fb32 not found: ID does not exist" containerID="2082a9f2ff86d3cf8772d78180aa514bb9911f6f9bd1df655da6be3d1954fb32" Jan 20 17:13:37 crc kubenswrapper[4558]: I0120 17:13:37.034573 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2082a9f2ff86d3cf8772d78180aa514bb9911f6f9bd1df655da6be3d1954fb32"} err="failed to get container status \"2082a9f2ff86d3cf8772d78180aa514bb9911f6f9bd1df655da6be3d1954fb32\": rpc error: code = NotFound desc = could not find container \"2082a9f2ff86d3cf8772d78180aa514bb9911f6f9bd1df655da6be3d1954fb32\": container with ID starting with 2082a9f2ff86d3cf8772d78180aa514bb9911f6f9bd1df655da6be3d1954fb32 not found: ID does not exist" Jan 20 17:13:37 crc kubenswrapper[4558]: I0120 17:13:37.330459 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:13:37 crc kubenswrapper[4558]: I0120 17:13:37.338228 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:13:37 crc kubenswrapper[4558]: I0120 17:13:37.356442 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:13:37 crc kubenswrapper[4558]: E0120 17:13:37.356931 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc4869f2-03aa-4eb4-838c-5999cf611b1e" containerName="cinder-scheduler" Jan 20 17:13:37 crc kubenswrapper[4558]: I0120 17:13:37.356951 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc4869f2-03aa-4eb4-838c-5999cf611b1e" containerName="cinder-scheduler" Jan 20 17:13:37 crc kubenswrapper[4558]: E0120 17:13:37.357001 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc4869f2-03aa-4eb4-838c-5999cf611b1e" containerName="probe" Jan 20 17:13:37 crc kubenswrapper[4558]: I0120 17:13:37.357007 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc4869f2-03aa-4eb4-838c-5999cf611b1e" containerName="probe" Jan 20 17:13:37 crc kubenswrapper[4558]: I0120 17:13:37.357215 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="dc4869f2-03aa-4eb4-838c-5999cf611b1e" containerName="probe" Jan 20 17:13:37 crc kubenswrapper[4558]: I0120 17:13:37.357243 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="dc4869f2-03aa-4eb4-838c-5999cf611b1e" containerName="cinder-scheduler" Jan 20 17:13:37 crc kubenswrapper[4558]: I0120 17:13:37.358313 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:13:37 crc kubenswrapper[4558]: I0120 17:13:37.363870 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cinder-scheduler-config-data" Jan 20 17:13:37 crc kubenswrapper[4558]: I0120 17:13:37.368226 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:13:37 crc kubenswrapper[4558]: I0120 17:13:37.424659 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6b82g\" (UniqueName: \"kubernetes.io/projected/3072fee5-c73d-407d-80c7-c376628ec545-kube-api-access-6b82g\") pod \"cinder-scheduler-0\" (UID: \"3072fee5-c73d-407d-80c7-c376628ec545\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:13:37 crc kubenswrapper[4558]: I0120 17:13:37.424730 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3072fee5-c73d-407d-80c7-c376628ec545-scripts\") pod \"cinder-scheduler-0\" (UID: \"3072fee5-c73d-407d-80c7-c376628ec545\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:13:37 crc kubenswrapper[4558]: I0120 17:13:37.425149 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3072fee5-c73d-407d-80c7-c376628ec545-config-data\") pod \"cinder-scheduler-0\" (UID: \"3072fee5-c73d-407d-80c7-c376628ec545\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:13:37 crc kubenswrapper[4558]: I0120 17:13:37.425449 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3072fee5-c73d-407d-80c7-c376628ec545-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"3072fee5-c73d-407d-80c7-c376628ec545\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:13:37 crc kubenswrapper[4558]: I0120 17:13:37.425497 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3072fee5-c73d-407d-80c7-c376628ec545-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"3072fee5-c73d-407d-80c7-c376628ec545\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:13:37 crc kubenswrapper[4558]: I0120 17:13:37.425666 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/3072fee5-c73d-407d-80c7-c376628ec545-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"3072fee5-c73d-407d-80c7-c376628ec545\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:13:37 crc kubenswrapper[4558]: I0120 17:13:37.526987 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/watcher-api-0"] Jan 20 17:13:37 crc kubenswrapper[4558]: I0120 17:13:37.527394 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/watcher-api-0" podUID="9d818a7f-96eb-499c-bb53-75b259d30282" containerName="watcher-api-log" containerID="cri-o://24e4540d7ddf0429e73f6d20be940f8c915072c049401903c2e81ab3d7419eb1" gracePeriod=30 Jan 20 17:13:37 crc kubenswrapper[4558]: I0120 17:13:37.527526 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/watcher-api-0" podUID="9d818a7f-96eb-499c-bb53-75b259d30282" containerName="watcher-api" containerID="cri-o://4d800f765daaca579849767e5cdf500af707cd7b1a7b9da4dc81822c77f4680d" gracePeriod=30 Jan 20 17:13:37 crc kubenswrapper[4558]: I0120 17:13:37.529237 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/3072fee5-c73d-407d-80c7-c376628ec545-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"3072fee5-c73d-407d-80c7-c376628ec545\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:13:37 crc kubenswrapper[4558]: I0120 17:13:37.529372 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6b82g\" (UniqueName: \"kubernetes.io/projected/3072fee5-c73d-407d-80c7-c376628ec545-kube-api-access-6b82g\") pod \"cinder-scheduler-0\" (UID: \"3072fee5-c73d-407d-80c7-c376628ec545\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:13:37 crc kubenswrapper[4558]: I0120 17:13:37.529403 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3072fee5-c73d-407d-80c7-c376628ec545-scripts\") pod \"cinder-scheduler-0\" (UID: \"3072fee5-c73d-407d-80c7-c376628ec545\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:13:37 crc kubenswrapper[4558]: I0120 17:13:37.529473 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3072fee5-c73d-407d-80c7-c376628ec545-config-data\") pod \"cinder-scheduler-0\" (UID: \"3072fee5-c73d-407d-80c7-c376628ec545\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:13:37 crc kubenswrapper[4558]: I0120 17:13:37.529514 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3072fee5-c73d-407d-80c7-c376628ec545-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"3072fee5-c73d-407d-80c7-c376628ec545\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:13:37 crc kubenswrapper[4558]: I0120 17:13:37.529536 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3072fee5-c73d-407d-80c7-c376628ec545-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"3072fee5-c73d-407d-80c7-c376628ec545\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:13:37 crc kubenswrapper[4558]: I0120 17:13:37.531348 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/3072fee5-c73d-407d-80c7-c376628ec545-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"3072fee5-c73d-407d-80c7-c376628ec545\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:13:37 crc kubenswrapper[4558]: I0120 17:13:37.535181 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3072fee5-c73d-407d-80c7-c376628ec545-scripts\") pod \"cinder-scheduler-0\" (UID: \"3072fee5-c73d-407d-80c7-c376628ec545\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:13:37 crc kubenswrapper[4558]: I0120 17:13:37.535901 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3072fee5-c73d-407d-80c7-c376628ec545-config-data\") pod \"cinder-scheduler-0\" (UID: \"3072fee5-c73d-407d-80c7-c376628ec545\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:13:37 crc kubenswrapper[4558]: I0120 17:13:37.541860 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3072fee5-c73d-407d-80c7-c376628ec545-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"3072fee5-c73d-407d-80c7-c376628ec545\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:13:37 crc kubenswrapper[4558]: I0120 17:13:37.547524 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6b82g\" (UniqueName: \"kubernetes.io/projected/3072fee5-c73d-407d-80c7-c376628ec545-kube-api-access-6b82g\") pod \"cinder-scheduler-0\" (UID: \"3072fee5-c73d-407d-80c7-c376628ec545\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:13:37 crc kubenswrapper[4558]: I0120 17:13:37.548580 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3072fee5-c73d-407d-80c7-c376628ec545-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"3072fee5-c73d-407d-80c7-c376628ec545\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:13:37 crc kubenswrapper[4558]: I0120 17:13:37.729402 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:13:38 crc kubenswrapper[4558]: I0120 17:13:38.003326 4558 generic.go:334] "Generic (PLEG): container finished" podID="9d818a7f-96eb-499c-bb53-75b259d30282" containerID="24e4540d7ddf0429e73f6d20be940f8c915072c049401903c2e81ab3d7419eb1" exitCode=143 Jan 20 17:13:38 crc kubenswrapper[4558]: I0120 17:13:38.003839 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/watcher-api-0" event={"ID":"9d818a7f-96eb-499c-bb53-75b259d30282","Type":"ContainerDied","Data":"24e4540d7ddf0429e73f6d20be940f8c915072c049401903c2e81ab3d7419eb1"} Jan 20 17:13:38 crc kubenswrapper[4558]: I0120 17:13:38.180408 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:13:38 crc kubenswrapper[4558]: I0120 17:13:38.291445 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/prometheus-metric-storage-0" Jan 20 17:13:38 crc kubenswrapper[4558]: I0120 17:13:38.297116 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/prometheus-metric-storage-0" Jan 20 17:13:38 crc kubenswrapper[4558]: I0120 17:13:38.477653 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/ceilometer-0" podUID="52844554-ecdd-40db-ae62-0f0de3d9030d" containerName="proxy-httpd" probeResult="failure" output="HTTP probe failed with statuscode: 503" Jan 20 17:13:38 crc kubenswrapper[4558]: I0120 17:13:38.579813 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dc4869f2-03aa-4eb4-838c-5999cf611b1e" path="/var/lib/kubelet/pods/dc4869f2-03aa-4eb4-838c-5999cf611b1e/volumes" Jan 20 17:13:39 crc kubenswrapper[4558]: I0120 17:13:39.025041 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"3072fee5-c73d-407d-80c7-c376628ec545","Type":"ContainerStarted","Data":"fa635aef0a0203fdb13831ee491f982a0ac51b1bc7f7d5c51955065fd5631a37"} Jan 20 17:13:39 crc kubenswrapper[4558]: I0120 17:13:39.025500 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"3072fee5-c73d-407d-80c7-c376628ec545","Type":"ContainerStarted","Data":"3270877cc1d65f29e106afce6920ca9c56ef00c9c65d7db9adddebed07f1be1b"} Jan 20 17:13:39 crc kubenswrapper[4558]: I0120 17:13:39.034960 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/prometheus-metric-storage-0" Jan 20 17:13:39 crc kubenswrapper[4558]: I0120 17:13:39.997641 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/barbican-api-6587ff9c6b-8wmpk" Jan 20 17:13:40 crc kubenswrapper[4558]: I0120 17:13:40.064738 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"3072fee5-c73d-407d-80c7-c376628ec545","Type":"ContainerStarted","Data":"599766d523b289496126b3e65bad734ac36d67a7eee51faf425f5873eea60397"} Jan 20 17:13:40 crc kubenswrapper[4558]: I0120 17:13:40.087101 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/barbican-api-6587ff9c6b-8wmpk" Jan 20 17:13:40 crc kubenswrapper[4558]: I0120 17:13:40.087231 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/cinder-scheduler-0" podStartSLOduration=3.087210973 podStartE2EDuration="3.087210973s" podCreationTimestamp="2026-01-20 17:13:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:13:40.079678499 +0000 UTC m=+1913.840016466" watchObservedRunningTime="2026-01-20 17:13:40.087210973 +0000 UTC m=+1913.847548941" Jan 20 17:13:40 crc kubenswrapper[4558]: I0120 17:13:40.138174 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-api-9f6d49bb-klwm8"] Jan 20 17:13:40 crc kubenswrapper[4558]: I0120 17:13:40.138373 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-api-9f6d49bb-klwm8" podUID="4f712673-2484-4dc7-9677-344aa012e994" containerName="barbican-api-log" containerID="cri-o://15337f336c8fb92e0fbc69b63d63f594d01aa962d1ec2bfd37c990066c99f695" gracePeriod=30 Jan 20 17:13:40 crc kubenswrapper[4558]: I0120 17:13:40.138771 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-api-9f6d49bb-klwm8" podUID="4f712673-2484-4dc7-9677-344aa012e994" containerName="barbican-api" containerID="cri-o://4a4f4bedefc86803f964f405689218285f7aa370ef9d7b89f4551921d9e7ab94" gracePeriod=30 Jan 20 17:13:41 crc kubenswrapper[4558]: I0120 17:13:41.089721 4558 generic.go:334] "Generic (PLEG): container finished" podID="4f712673-2484-4dc7-9677-344aa012e994" containerID="15337f336c8fb92e0fbc69b63d63f594d01aa962d1ec2bfd37c990066c99f695" exitCode=143 Jan 20 17:13:41 crc kubenswrapper[4558]: I0120 17:13:41.090236 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-9f6d49bb-klwm8" event={"ID":"4f712673-2484-4dc7-9677-344aa012e994","Type":"ContainerDied","Data":"15337f336c8fb92e0fbc69b63d63f594d01aa962d1ec2bfd37c990066c99f695"} Jan 20 17:13:41 crc kubenswrapper[4558]: I0120 17:13:41.094827 4558 generic.go:334] "Generic (PLEG): container finished" podID="9d818a7f-96eb-499c-bb53-75b259d30282" containerID="4d800f765daaca579849767e5cdf500af707cd7b1a7b9da4dc81822c77f4680d" exitCode=0 Jan 20 17:13:41 crc kubenswrapper[4558]: I0120 17:13:41.095871 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/watcher-api-0" event={"ID":"9d818a7f-96eb-499c-bb53-75b259d30282","Type":"ContainerDied","Data":"4d800f765daaca579849767e5cdf500af707cd7b1a7b9da4dc81822c77f4680d"} Jan 20 17:13:41 crc kubenswrapper[4558]: I0120 17:13:41.329046 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/watcher-api-0" Jan 20 17:13:41 crc kubenswrapper[4558]: I0120 17:13:41.415620 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/9d818a7f-96eb-499c-bb53-75b259d30282-custom-prometheus-ca\") pod \"9d818a7f-96eb-499c-bb53-75b259d30282\" (UID: \"9d818a7f-96eb-499c-bb53-75b259d30282\") " Jan 20 17:13:41 crc kubenswrapper[4558]: I0120 17:13:41.415964 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9d818a7f-96eb-499c-bb53-75b259d30282-logs\") pod \"9d818a7f-96eb-499c-bb53-75b259d30282\" (UID: \"9d818a7f-96eb-499c-bb53-75b259d30282\") " Jan 20 17:13:41 crc kubenswrapper[4558]: I0120 17:13:41.416345 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9d818a7f-96eb-499c-bb53-75b259d30282-logs" (OuterVolumeSpecName: "logs") pod "9d818a7f-96eb-499c-bb53-75b259d30282" (UID: "9d818a7f-96eb-499c-bb53-75b259d30282"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:13:41 crc kubenswrapper[4558]: I0120 17:13:41.416576 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wwgzq\" (UniqueName: \"kubernetes.io/projected/9d818a7f-96eb-499c-bb53-75b259d30282-kube-api-access-wwgzq\") pod \"9d818a7f-96eb-499c-bb53-75b259d30282\" (UID: \"9d818a7f-96eb-499c-bb53-75b259d30282\") " Jan 20 17:13:41 crc kubenswrapper[4558]: I0120 17:13:41.416958 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9d818a7f-96eb-499c-bb53-75b259d30282-combined-ca-bundle\") pod \"9d818a7f-96eb-499c-bb53-75b259d30282\" (UID: \"9d818a7f-96eb-499c-bb53-75b259d30282\") " Jan 20 17:13:41 crc kubenswrapper[4558]: I0120 17:13:41.417110 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9d818a7f-96eb-499c-bb53-75b259d30282-config-data\") pod \"9d818a7f-96eb-499c-bb53-75b259d30282\" (UID: \"9d818a7f-96eb-499c-bb53-75b259d30282\") " Jan 20 17:13:41 crc kubenswrapper[4558]: I0120 17:13:41.417722 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9d818a7f-96eb-499c-bb53-75b259d30282-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:13:41 crc kubenswrapper[4558]: I0120 17:13:41.423776 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d818a7f-96eb-499c-bb53-75b259d30282-kube-api-access-wwgzq" (OuterVolumeSpecName: "kube-api-access-wwgzq") pod "9d818a7f-96eb-499c-bb53-75b259d30282" (UID: "9d818a7f-96eb-499c-bb53-75b259d30282"). InnerVolumeSpecName "kube-api-access-wwgzq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:13:41 crc kubenswrapper[4558]: I0120 17:13:41.437433 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d818a7f-96eb-499c-bb53-75b259d30282-custom-prometheus-ca" (OuterVolumeSpecName: "custom-prometheus-ca") pod "9d818a7f-96eb-499c-bb53-75b259d30282" (UID: "9d818a7f-96eb-499c-bb53-75b259d30282"). InnerVolumeSpecName "custom-prometheus-ca". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:13:41 crc kubenswrapper[4558]: I0120 17:13:41.439145 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d818a7f-96eb-499c-bb53-75b259d30282-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9d818a7f-96eb-499c-bb53-75b259d30282" (UID: "9d818a7f-96eb-499c-bb53-75b259d30282"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:13:41 crc kubenswrapper[4558]: I0120 17:13:41.454424 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d818a7f-96eb-499c-bb53-75b259d30282-config-data" (OuterVolumeSpecName: "config-data") pod "9d818a7f-96eb-499c-bb53-75b259d30282" (UID: "9d818a7f-96eb-499c-bb53-75b259d30282"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:13:41 crc kubenswrapper[4558]: I0120 17:13:41.520310 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wwgzq\" (UniqueName: \"kubernetes.io/projected/9d818a7f-96eb-499c-bb53-75b259d30282-kube-api-access-wwgzq\") on node \"crc\" DevicePath \"\"" Jan 20 17:13:41 crc kubenswrapper[4558]: I0120 17:13:41.520342 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9d818a7f-96eb-499c-bb53-75b259d30282-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:13:41 crc kubenswrapper[4558]: I0120 17:13:41.520353 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9d818a7f-96eb-499c-bb53-75b259d30282-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:13:41 crc kubenswrapper[4558]: I0120 17:13:41.520365 4558 reconciler_common.go:293] "Volume detached for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/9d818a7f-96eb-499c-bb53-75b259d30282-custom-prometheus-ca\") on node \"crc\" DevicePath \"\"" Jan 20 17:13:42 crc kubenswrapper[4558]: I0120 17:13:42.001565 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-smvr5" Jan 20 17:13:42 crc kubenswrapper[4558]: I0120 17:13:42.001623 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-smvr5" Jan 20 17:13:42 crc kubenswrapper[4558]: I0120 17:13:42.042519 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-smvr5" Jan 20 17:13:42 crc kubenswrapper[4558]: I0120 17:13:42.105537 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/watcher-api-0" Jan 20 17:13:42 crc kubenswrapper[4558]: I0120 17:13:42.105595 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/watcher-api-0" event={"ID":"9d818a7f-96eb-499c-bb53-75b259d30282","Type":"ContainerDied","Data":"f715178005e09dde7f62ac369f14e46483409f8b24358919c2ba9baed7d61d84"} Jan 20 17:13:42 crc kubenswrapper[4558]: I0120 17:13:42.105638 4558 scope.go:117] "RemoveContainer" containerID="4d800f765daaca579849767e5cdf500af707cd7b1a7b9da4dc81822c77f4680d" Jan 20 17:13:42 crc kubenswrapper[4558]: I0120 17:13:42.135209 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/watcher-api-0"] Jan 20 17:13:42 crc kubenswrapper[4558]: I0120 17:13:42.135282 4558 scope.go:117] "RemoveContainer" containerID="24e4540d7ddf0429e73f6d20be940f8c915072c049401903c2e81ab3d7419eb1" Jan 20 17:13:42 crc kubenswrapper[4558]: I0120 17:13:42.149715 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/watcher-api-0"] Jan 20 17:13:42 crc kubenswrapper[4558]: I0120 17:13:42.160267 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/watcher-api-0"] Jan 20 17:13:42 crc kubenswrapper[4558]: E0120 17:13:42.160786 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9d818a7f-96eb-499c-bb53-75b259d30282" containerName="watcher-api" Jan 20 17:13:42 crc kubenswrapper[4558]: I0120 17:13:42.160799 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="9d818a7f-96eb-499c-bb53-75b259d30282" containerName="watcher-api" Jan 20 17:13:42 crc kubenswrapper[4558]: E0120 17:13:42.160830 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9d818a7f-96eb-499c-bb53-75b259d30282" containerName="watcher-api-log" Jan 20 17:13:42 crc kubenswrapper[4558]: I0120 17:13:42.160837 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="9d818a7f-96eb-499c-bb53-75b259d30282" containerName="watcher-api-log" Jan 20 17:13:42 crc kubenswrapper[4558]: I0120 17:13:42.161028 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="9d818a7f-96eb-499c-bb53-75b259d30282" containerName="watcher-api" Jan 20 17:13:42 crc kubenswrapper[4558]: I0120 17:13:42.161043 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="9d818a7f-96eb-499c-bb53-75b259d30282" containerName="watcher-api-log" Jan 20 17:13:42 crc kubenswrapper[4558]: I0120 17:13:42.162107 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/watcher-api-0" Jan 20 17:13:42 crc kubenswrapper[4558]: I0120 17:13:42.164300 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-smvr5" Jan 20 17:13:42 crc kubenswrapper[4558]: I0120 17:13:42.164901 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-watcher-internal-svc" Jan 20 17:13:42 crc kubenswrapper[4558]: I0120 17:13:42.165281 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"watcher-api-config-data" Jan 20 17:13:42 crc kubenswrapper[4558]: I0120 17:13:42.165392 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-watcher-public-svc" Jan 20 17:13:42 crc kubenswrapper[4558]: I0120 17:13:42.166374 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/watcher-api-0"] Jan 20 17:13:42 crc kubenswrapper[4558]: I0120 17:13:42.236397 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0a49fae8-fd33-4a50-b967-e8e8cc48731a-logs\") pod \"watcher-api-0\" (UID: \"0a49fae8-fd33-4a50-b967-e8e8cc48731a\") " pod="openstack-kuttl-tests/watcher-api-0" Jan 20 17:13:42 crc kubenswrapper[4558]: I0120 17:13:42.236721 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0a49fae8-fd33-4a50-b967-e8e8cc48731a-internal-tls-certs\") pod \"watcher-api-0\" (UID: \"0a49fae8-fd33-4a50-b967-e8e8cc48731a\") " pod="openstack-kuttl-tests/watcher-api-0" Jan 20 17:13:42 crc kubenswrapper[4558]: I0120 17:13:42.237185 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/0a49fae8-fd33-4a50-b967-e8e8cc48731a-custom-prometheus-ca\") pod \"watcher-api-0\" (UID: \"0a49fae8-fd33-4a50-b967-e8e8cc48731a\") " pod="openstack-kuttl-tests/watcher-api-0" Jan 20 17:13:42 crc kubenswrapper[4558]: I0120 17:13:42.237299 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0a49fae8-fd33-4a50-b967-e8e8cc48731a-combined-ca-bundle\") pod \"watcher-api-0\" (UID: \"0a49fae8-fd33-4a50-b967-e8e8cc48731a\") " pod="openstack-kuttl-tests/watcher-api-0" Jan 20 17:13:42 crc kubenswrapper[4558]: I0120 17:13:42.237450 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mxm9b\" (UniqueName: \"kubernetes.io/projected/0a49fae8-fd33-4a50-b967-e8e8cc48731a-kube-api-access-mxm9b\") pod \"watcher-api-0\" (UID: \"0a49fae8-fd33-4a50-b967-e8e8cc48731a\") " pod="openstack-kuttl-tests/watcher-api-0" Jan 20 17:13:42 crc kubenswrapper[4558]: I0120 17:13:42.237560 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0a49fae8-fd33-4a50-b967-e8e8cc48731a-config-data\") pod \"watcher-api-0\" (UID: \"0a49fae8-fd33-4a50-b967-e8e8cc48731a\") " pod="openstack-kuttl-tests/watcher-api-0" Jan 20 17:13:42 crc kubenswrapper[4558]: I0120 17:13:42.237623 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0a49fae8-fd33-4a50-b967-e8e8cc48731a-public-tls-certs\") pod \"watcher-api-0\" (UID: \"0a49fae8-fd33-4a50-b967-e8e8cc48731a\") " pod="openstack-kuttl-tests/watcher-api-0" Jan 20 17:13:42 crc kubenswrapper[4558]: I0120 17:13:42.276243 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-smvr5"] Jan 20 17:13:42 crc kubenswrapper[4558]: I0120 17:13:42.339618 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0a49fae8-fd33-4a50-b967-e8e8cc48731a-logs\") pod \"watcher-api-0\" (UID: \"0a49fae8-fd33-4a50-b967-e8e8cc48731a\") " pod="openstack-kuttl-tests/watcher-api-0" Jan 20 17:13:42 crc kubenswrapper[4558]: I0120 17:13:42.339729 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0a49fae8-fd33-4a50-b967-e8e8cc48731a-internal-tls-certs\") pod \"watcher-api-0\" (UID: \"0a49fae8-fd33-4a50-b967-e8e8cc48731a\") " pod="openstack-kuttl-tests/watcher-api-0" Jan 20 17:13:42 crc kubenswrapper[4558]: I0120 17:13:42.339798 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/0a49fae8-fd33-4a50-b967-e8e8cc48731a-custom-prometheus-ca\") pod \"watcher-api-0\" (UID: \"0a49fae8-fd33-4a50-b967-e8e8cc48731a\") " pod="openstack-kuttl-tests/watcher-api-0" Jan 20 17:13:42 crc kubenswrapper[4558]: I0120 17:13:42.339830 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0a49fae8-fd33-4a50-b967-e8e8cc48731a-combined-ca-bundle\") pod \"watcher-api-0\" (UID: \"0a49fae8-fd33-4a50-b967-e8e8cc48731a\") " pod="openstack-kuttl-tests/watcher-api-0" Jan 20 17:13:42 crc kubenswrapper[4558]: I0120 17:13:42.339858 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mxm9b\" (UniqueName: \"kubernetes.io/projected/0a49fae8-fd33-4a50-b967-e8e8cc48731a-kube-api-access-mxm9b\") pod \"watcher-api-0\" (UID: \"0a49fae8-fd33-4a50-b967-e8e8cc48731a\") " pod="openstack-kuttl-tests/watcher-api-0" Jan 20 17:13:42 crc kubenswrapper[4558]: I0120 17:13:42.339884 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0a49fae8-fd33-4a50-b967-e8e8cc48731a-config-data\") pod \"watcher-api-0\" (UID: \"0a49fae8-fd33-4a50-b967-e8e8cc48731a\") " pod="openstack-kuttl-tests/watcher-api-0" Jan 20 17:13:42 crc kubenswrapper[4558]: I0120 17:13:42.339905 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0a49fae8-fd33-4a50-b967-e8e8cc48731a-public-tls-certs\") pod \"watcher-api-0\" (UID: \"0a49fae8-fd33-4a50-b967-e8e8cc48731a\") " pod="openstack-kuttl-tests/watcher-api-0" Jan 20 17:13:42 crc kubenswrapper[4558]: I0120 17:13:42.340821 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0a49fae8-fd33-4a50-b967-e8e8cc48731a-logs\") pod \"watcher-api-0\" (UID: \"0a49fae8-fd33-4a50-b967-e8e8cc48731a\") " pod="openstack-kuttl-tests/watcher-api-0" Jan 20 17:13:42 crc kubenswrapper[4558]: I0120 17:13:42.344069 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0a49fae8-fd33-4a50-b967-e8e8cc48731a-public-tls-certs\") pod \"watcher-api-0\" (UID: \"0a49fae8-fd33-4a50-b967-e8e8cc48731a\") " pod="openstack-kuttl-tests/watcher-api-0" Jan 20 17:13:42 crc kubenswrapper[4558]: I0120 17:13:42.344254 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0a49fae8-fd33-4a50-b967-e8e8cc48731a-config-data\") pod \"watcher-api-0\" (UID: \"0a49fae8-fd33-4a50-b967-e8e8cc48731a\") " pod="openstack-kuttl-tests/watcher-api-0" Jan 20 17:13:42 crc kubenswrapper[4558]: I0120 17:13:42.344726 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0a49fae8-fd33-4a50-b967-e8e8cc48731a-combined-ca-bundle\") pod \"watcher-api-0\" (UID: \"0a49fae8-fd33-4a50-b967-e8e8cc48731a\") " pod="openstack-kuttl-tests/watcher-api-0" Jan 20 17:13:42 crc kubenswrapper[4558]: I0120 17:13:42.347582 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/0a49fae8-fd33-4a50-b967-e8e8cc48731a-custom-prometheus-ca\") pod \"watcher-api-0\" (UID: \"0a49fae8-fd33-4a50-b967-e8e8cc48731a\") " pod="openstack-kuttl-tests/watcher-api-0" Jan 20 17:13:42 crc kubenswrapper[4558]: I0120 17:13:42.350087 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0a49fae8-fd33-4a50-b967-e8e8cc48731a-internal-tls-certs\") pod \"watcher-api-0\" (UID: \"0a49fae8-fd33-4a50-b967-e8e8cc48731a\") " pod="openstack-kuttl-tests/watcher-api-0" Jan 20 17:13:42 crc kubenswrapper[4558]: I0120 17:13:42.368069 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mxm9b\" (UniqueName: \"kubernetes.io/projected/0a49fae8-fd33-4a50-b967-e8e8cc48731a-kube-api-access-mxm9b\") pod \"watcher-api-0\" (UID: \"0a49fae8-fd33-4a50-b967-e8e8cc48731a\") " pod="openstack-kuttl-tests/watcher-api-0" Jan 20 17:13:42 crc kubenswrapper[4558]: I0120 17:13:42.489736 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/watcher-api-0" Jan 20 17:13:42 crc kubenswrapper[4558]: I0120 17:13:42.579922 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d818a7f-96eb-499c-bb53-75b259d30282" path="/var/lib/kubelet/pods/9d818a7f-96eb-499c-bb53-75b259d30282/volumes" Jan 20 17:13:42 crc kubenswrapper[4558]: I0120 17:13:42.730125 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:13:42 crc kubenswrapper[4558]: I0120 17:13:42.799247 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:13:42 crc kubenswrapper[4558]: I0120 17:13:42.938047 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/watcher-api-0"] Jan 20 17:13:43 crc kubenswrapper[4558]: I0120 17:13:43.115620 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/watcher-api-0" event={"ID":"0a49fae8-fd33-4a50-b967-e8e8cc48731a","Type":"ContainerStarted","Data":"e74604d2c7d949fd19df578aba3942d98f6666bf0d5ad3062afde5dc44fe6f40"} Jan 20 17:13:43 crc kubenswrapper[4558]: I0120 17:13:43.283648 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/barbican-api-9f6d49bb-klwm8" podUID="4f712673-2484-4dc7-9677-344aa012e994" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.1.217:9311/healthcheck\": read tcp 10.217.0.2:50790->10.217.1.217:9311: read: connection reset by peer" Jan 20 17:13:43 crc kubenswrapper[4558]: I0120 17:13:43.283679 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/barbican-api-9f6d49bb-klwm8" podUID="4f712673-2484-4dc7-9677-344aa012e994" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.1.217:9311/healthcheck\": read tcp 10.217.0.2:50806->10.217.1.217:9311: read: connection reset by peer" Jan 20 17:13:43 crc kubenswrapper[4558]: I0120 17:13:43.671438 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-9f6d49bb-klwm8" Jan 20 17:13:43 crc kubenswrapper[4558]: I0120 17:13:43.808700 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f712673-2484-4dc7-9677-344aa012e994-combined-ca-bundle\") pod \"4f712673-2484-4dc7-9677-344aa012e994\" (UID: \"4f712673-2484-4dc7-9677-344aa012e994\") " Jan 20 17:13:43 crc kubenswrapper[4558]: I0120 17:13:43.808990 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v4qmx\" (UniqueName: \"kubernetes.io/projected/4f712673-2484-4dc7-9677-344aa012e994-kube-api-access-v4qmx\") pod \"4f712673-2484-4dc7-9677-344aa012e994\" (UID: \"4f712673-2484-4dc7-9677-344aa012e994\") " Jan 20 17:13:43 crc kubenswrapper[4558]: I0120 17:13:43.809055 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4f712673-2484-4dc7-9677-344aa012e994-config-data\") pod \"4f712673-2484-4dc7-9677-344aa012e994\" (UID: \"4f712673-2484-4dc7-9677-344aa012e994\") " Jan 20 17:13:43 crc kubenswrapper[4558]: I0120 17:13:43.809250 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4f712673-2484-4dc7-9677-344aa012e994-logs\") pod \"4f712673-2484-4dc7-9677-344aa012e994\" (UID: \"4f712673-2484-4dc7-9677-344aa012e994\") " Jan 20 17:13:43 crc kubenswrapper[4558]: I0120 17:13:43.809405 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4f712673-2484-4dc7-9677-344aa012e994-config-data-custom\") pod \"4f712673-2484-4dc7-9677-344aa012e994\" (UID: \"4f712673-2484-4dc7-9677-344aa012e994\") " Jan 20 17:13:43 crc kubenswrapper[4558]: I0120 17:13:43.809778 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4f712673-2484-4dc7-9677-344aa012e994-logs" (OuterVolumeSpecName: "logs") pod "4f712673-2484-4dc7-9677-344aa012e994" (UID: "4f712673-2484-4dc7-9677-344aa012e994"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:13:43 crc kubenswrapper[4558]: I0120 17:13:43.810963 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4f712673-2484-4dc7-9677-344aa012e994-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:13:43 crc kubenswrapper[4558]: I0120 17:13:43.814325 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4f712673-2484-4dc7-9677-344aa012e994-kube-api-access-v4qmx" (OuterVolumeSpecName: "kube-api-access-v4qmx") pod "4f712673-2484-4dc7-9677-344aa012e994" (UID: "4f712673-2484-4dc7-9677-344aa012e994"). InnerVolumeSpecName "kube-api-access-v4qmx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:13:43 crc kubenswrapper[4558]: I0120 17:13:43.814455 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4f712673-2484-4dc7-9677-344aa012e994-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "4f712673-2484-4dc7-9677-344aa012e994" (UID: "4f712673-2484-4dc7-9677-344aa012e994"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:13:43 crc kubenswrapper[4558]: I0120 17:13:43.838009 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4f712673-2484-4dc7-9677-344aa012e994-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4f712673-2484-4dc7-9677-344aa012e994" (UID: "4f712673-2484-4dc7-9677-344aa012e994"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:13:43 crc kubenswrapper[4558]: I0120 17:13:43.856565 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4f712673-2484-4dc7-9677-344aa012e994-config-data" (OuterVolumeSpecName: "config-data") pod "4f712673-2484-4dc7-9677-344aa012e994" (UID: "4f712673-2484-4dc7-9677-344aa012e994"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:13:43 crc kubenswrapper[4558]: I0120 17:13:43.914602 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4f712673-2484-4dc7-9677-344aa012e994-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:13:43 crc kubenswrapper[4558]: I0120 17:13:43.914646 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f712673-2484-4dc7-9677-344aa012e994-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:13:43 crc kubenswrapper[4558]: I0120 17:13:43.914658 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v4qmx\" (UniqueName: \"kubernetes.io/projected/4f712673-2484-4dc7-9677-344aa012e994-kube-api-access-v4qmx\") on node \"crc\" DevicePath \"\"" Jan 20 17:13:43 crc kubenswrapper[4558]: I0120 17:13:43.914672 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4f712673-2484-4dc7-9677-344aa012e994-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:13:44 crc kubenswrapper[4558]: I0120 17:13:44.126539 4558 generic.go:334] "Generic (PLEG): container finished" podID="4f712673-2484-4dc7-9677-344aa012e994" containerID="4a4f4bedefc86803f964f405689218285f7aa370ef9d7b89f4551921d9e7ab94" exitCode=0 Jan 20 17:13:44 crc kubenswrapper[4558]: I0120 17:13:44.126840 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-9f6d49bb-klwm8" event={"ID":"4f712673-2484-4dc7-9677-344aa012e994","Type":"ContainerDied","Data":"4a4f4bedefc86803f964f405689218285f7aa370ef9d7b89f4551921d9e7ab94"} Jan 20 17:13:44 crc kubenswrapper[4558]: I0120 17:13:44.126870 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-9f6d49bb-klwm8" event={"ID":"4f712673-2484-4dc7-9677-344aa012e994","Type":"ContainerDied","Data":"742766fa00ccaa0569089ca1a686d86515119aea8175ecee023ecbfff810c290"} Jan 20 17:13:44 crc kubenswrapper[4558]: I0120 17:13:44.126886 4558 scope.go:117] "RemoveContainer" containerID="4a4f4bedefc86803f964f405689218285f7aa370ef9d7b89f4551921d9e7ab94" Jan 20 17:13:44 crc kubenswrapper[4558]: I0120 17:13:44.127012 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-9f6d49bb-klwm8" Jan 20 17:13:44 crc kubenswrapper[4558]: I0120 17:13:44.137134 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-smvr5" podUID="9946343f-cd2f-4a21-8422-a13633a8aa60" containerName="registry-server" containerID="cri-o://f0579634518c6d59bd2bf9e349ea4107d6dbb68c173cff7418ff1ddfdf10b1e8" gracePeriod=2 Jan 20 17:13:44 crc kubenswrapper[4558]: I0120 17:13:44.137187 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/watcher-api-0" event={"ID":"0a49fae8-fd33-4a50-b967-e8e8cc48731a","Type":"ContainerStarted","Data":"f92d43e68f181ffa640787e9d41e5040d775ba561a1cacb4acbc03876430f695"} Jan 20 17:13:44 crc kubenswrapper[4558]: I0120 17:13:44.137235 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/watcher-api-0" event={"ID":"0a49fae8-fd33-4a50-b967-e8e8cc48731a","Type":"ContainerStarted","Data":"8d223b6be945d7320575fde68951e2dcf11d7174379c849a669c1180f13b9071"} Jan 20 17:13:44 crc kubenswrapper[4558]: I0120 17:13:44.138043 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/watcher-api-0" Jan 20 17:13:44 crc kubenswrapper[4558]: I0120 17:13:44.161970 4558 scope.go:117] "RemoveContainer" containerID="15337f336c8fb92e0fbc69b63d63f594d01aa962d1ec2bfd37c990066c99f695" Jan 20 17:13:44 crc kubenswrapper[4558]: I0120 17:13:44.187335 4558 scope.go:117] "RemoveContainer" containerID="4a4f4bedefc86803f964f405689218285f7aa370ef9d7b89f4551921d9e7ab94" Jan 20 17:13:44 crc kubenswrapper[4558]: E0120 17:13:44.187667 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4a4f4bedefc86803f964f405689218285f7aa370ef9d7b89f4551921d9e7ab94\": container with ID starting with 4a4f4bedefc86803f964f405689218285f7aa370ef9d7b89f4551921d9e7ab94 not found: ID does not exist" containerID="4a4f4bedefc86803f964f405689218285f7aa370ef9d7b89f4551921d9e7ab94" Jan 20 17:13:44 crc kubenswrapper[4558]: I0120 17:13:44.187701 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4a4f4bedefc86803f964f405689218285f7aa370ef9d7b89f4551921d9e7ab94"} err="failed to get container status \"4a4f4bedefc86803f964f405689218285f7aa370ef9d7b89f4551921d9e7ab94\": rpc error: code = NotFound desc = could not find container \"4a4f4bedefc86803f964f405689218285f7aa370ef9d7b89f4551921d9e7ab94\": container with ID starting with 4a4f4bedefc86803f964f405689218285f7aa370ef9d7b89f4551921d9e7ab94 not found: ID does not exist" Jan 20 17:13:44 crc kubenswrapper[4558]: I0120 17:13:44.187721 4558 scope.go:117] "RemoveContainer" containerID="15337f336c8fb92e0fbc69b63d63f594d01aa962d1ec2bfd37c990066c99f695" Jan 20 17:13:44 crc kubenswrapper[4558]: E0120 17:13:44.188076 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"15337f336c8fb92e0fbc69b63d63f594d01aa962d1ec2bfd37c990066c99f695\": container with ID starting with 15337f336c8fb92e0fbc69b63d63f594d01aa962d1ec2bfd37c990066c99f695 not found: ID does not exist" containerID="15337f336c8fb92e0fbc69b63d63f594d01aa962d1ec2bfd37c990066c99f695" Jan 20 17:13:44 crc kubenswrapper[4558]: I0120 17:13:44.188109 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"15337f336c8fb92e0fbc69b63d63f594d01aa962d1ec2bfd37c990066c99f695"} err="failed to get container status \"15337f336c8fb92e0fbc69b63d63f594d01aa962d1ec2bfd37c990066c99f695\": rpc error: code = NotFound desc = could not find container \"15337f336c8fb92e0fbc69b63d63f594d01aa962d1ec2bfd37c990066c99f695\": container with ID starting with 15337f336c8fb92e0fbc69b63d63f594d01aa962d1ec2bfd37c990066c99f695 not found: ID does not exist" Jan 20 17:13:44 crc kubenswrapper[4558]: I0120 17:13:44.188696 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/watcher-api-0" podStartSLOduration=2.188677434 podStartE2EDuration="2.188677434s" podCreationTimestamp="2026-01-20 17:13:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:13:44.174819965 +0000 UTC m=+1917.935157923" watchObservedRunningTime="2026-01-20 17:13:44.188677434 +0000 UTC m=+1917.949015400" Jan 20 17:13:44 crc kubenswrapper[4558]: I0120 17:13:44.199224 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-api-9f6d49bb-klwm8"] Jan 20 17:13:44 crc kubenswrapper[4558]: I0120 17:13:44.205010 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/barbican-api-9f6d49bb-klwm8"] Jan 20 17:13:44 crc kubenswrapper[4558]: I0120 17:13:44.562474 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-smvr5" Jan 20 17:13:44 crc kubenswrapper[4558]: I0120 17:13:44.576247 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4f712673-2484-4dc7-9677-344aa012e994" path="/var/lib/kubelet/pods/4f712673-2484-4dc7-9677-344aa012e994/volumes" Jan 20 17:13:44 crc kubenswrapper[4558]: I0120 17:13:44.740957 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jsbcn\" (UniqueName: \"kubernetes.io/projected/9946343f-cd2f-4a21-8422-a13633a8aa60-kube-api-access-jsbcn\") pod \"9946343f-cd2f-4a21-8422-a13633a8aa60\" (UID: \"9946343f-cd2f-4a21-8422-a13633a8aa60\") " Jan 20 17:13:44 crc kubenswrapper[4558]: I0120 17:13:44.741382 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9946343f-cd2f-4a21-8422-a13633a8aa60-utilities\") pod \"9946343f-cd2f-4a21-8422-a13633a8aa60\" (UID: \"9946343f-cd2f-4a21-8422-a13633a8aa60\") " Jan 20 17:13:44 crc kubenswrapper[4558]: I0120 17:13:44.741428 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9946343f-cd2f-4a21-8422-a13633a8aa60-catalog-content\") pod \"9946343f-cd2f-4a21-8422-a13633a8aa60\" (UID: \"9946343f-cd2f-4a21-8422-a13633a8aa60\") " Jan 20 17:13:44 crc kubenswrapper[4558]: I0120 17:13:44.743307 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9946343f-cd2f-4a21-8422-a13633a8aa60-utilities" (OuterVolumeSpecName: "utilities") pod "9946343f-cd2f-4a21-8422-a13633a8aa60" (UID: "9946343f-cd2f-4a21-8422-a13633a8aa60"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:13:44 crc kubenswrapper[4558]: I0120 17:13:44.750030 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9946343f-cd2f-4a21-8422-a13633a8aa60-kube-api-access-jsbcn" (OuterVolumeSpecName: "kube-api-access-jsbcn") pod "9946343f-cd2f-4a21-8422-a13633a8aa60" (UID: "9946343f-cd2f-4a21-8422-a13633a8aa60"). InnerVolumeSpecName "kube-api-access-jsbcn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:13:44 crc kubenswrapper[4558]: I0120 17:13:44.785127 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9946343f-cd2f-4a21-8422-a13633a8aa60-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9946343f-cd2f-4a21-8422-a13633a8aa60" (UID: "9946343f-cd2f-4a21-8422-a13633a8aa60"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:13:44 crc kubenswrapper[4558]: I0120 17:13:44.843409 4558 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9946343f-cd2f-4a21-8422-a13633a8aa60-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 20 17:13:44 crc kubenswrapper[4558]: I0120 17:13:44.843447 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jsbcn\" (UniqueName: \"kubernetes.io/projected/9946343f-cd2f-4a21-8422-a13633a8aa60-kube-api-access-jsbcn\") on node \"crc\" DevicePath \"\"" Jan 20 17:13:44 crc kubenswrapper[4558]: I0120 17:13:44.843460 4558 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9946343f-cd2f-4a21-8422-a13633a8aa60-utilities\") on node \"crc\" DevicePath \"\"" Jan 20 17:13:45 crc kubenswrapper[4558]: I0120 17:13:45.148052 4558 generic.go:334] "Generic (PLEG): container finished" podID="9946343f-cd2f-4a21-8422-a13633a8aa60" containerID="f0579634518c6d59bd2bf9e349ea4107d6dbb68c173cff7418ff1ddfdf10b1e8" exitCode=0 Jan 20 17:13:45 crc kubenswrapper[4558]: I0120 17:13:45.148152 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-smvr5" Jan 20 17:13:45 crc kubenswrapper[4558]: I0120 17:13:45.148141 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-smvr5" event={"ID":"9946343f-cd2f-4a21-8422-a13633a8aa60","Type":"ContainerDied","Data":"f0579634518c6d59bd2bf9e349ea4107d6dbb68c173cff7418ff1ddfdf10b1e8"} Jan 20 17:13:45 crc kubenswrapper[4558]: I0120 17:13:45.148456 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-smvr5" event={"ID":"9946343f-cd2f-4a21-8422-a13633a8aa60","Type":"ContainerDied","Data":"4641d33d1404e0dd6e8a024560ab1fd23960e0c378669ce29ab26ff7f4c2027f"} Jan 20 17:13:45 crc kubenswrapper[4558]: I0120 17:13:45.148491 4558 scope.go:117] "RemoveContainer" containerID="f0579634518c6d59bd2bf9e349ea4107d6dbb68c173cff7418ff1ddfdf10b1e8" Jan 20 17:13:45 crc kubenswrapper[4558]: I0120 17:13:45.183157 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-smvr5"] Jan 20 17:13:45 crc kubenswrapper[4558]: I0120 17:13:45.184635 4558 scope.go:117] "RemoveContainer" containerID="121ffaa692f9f1cedab3f39ed7fca18eeee1c049698fd8e9569dd63a7b3371dc" Jan 20 17:13:45 crc kubenswrapper[4558]: I0120 17:13:45.194974 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-smvr5"] Jan 20 17:13:45 crc kubenswrapper[4558]: I0120 17:13:45.208285 4558 scope.go:117] "RemoveContainer" containerID="9c47e972aebd64566110e65f6a0d8c4b41eb4130c83a7641e09bdd5b4000c880" Jan 20 17:13:45 crc kubenswrapper[4558]: I0120 17:13:45.236739 4558 scope.go:117] "RemoveContainer" containerID="f0579634518c6d59bd2bf9e349ea4107d6dbb68c173cff7418ff1ddfdf10b1e8" Jan 20 17:13:45 crc kubenswrapper[4558]: E0120 17:13:45.237100 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f0579634518c6d59bd2bf9e349ea4107d6dbb68c173cff7418ff1ddfdf10b1e8\": container with ID starting with f0579634518c6d59bd2bf9e349ea4107d6dbb68c173cff7418ff1ddfdf10b1e8 not found: ID does not exist" containerID="f0579634518c6d59bd2bf9e349ea4107d6dbb68c173cff7418ff1ddfdf10b1e8" Jan 20 17:13:45 crc kubenswrapper[4558]: I0120 17:13:45.237139 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f0579634518c6d59bd2bf9e349ea4107d6dbb68c173cff7418ff1ddfdf10b1e8"} err="failed to get container status \"f0579634518c6d59bd2bf9e349ea4107d6dbb68c173cff7418ff1ddfdf10b1e8\": rpc error: code = NotFound desc = could not find container \"f0579634518c6d59bd2bf9e349ea4107d6dbb68c173cff7418ff1ddfdf10b1e8\": container with ID starting with f0579634518c6d59bd2bf9e349ea4107d6dbb68c173cff7418ff1ddfdf10b1e8 not found: ID does not exist" Jan 20 17:13:45 crc kubenswrapper[4558]: I0120 17:13:45.237184 4558 scope.go:117] "RemoveContainer" containerID="121ffaa692f9f1cedab3f39ed7fca18eeee1c049698fd8e9569dd63a7b3371dc" Jan 20 17:13:45 crc kubenswrapper[4558]: E0120 17:13:45.237475 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"121ffaa692f9f1cedab3f39ed7fca18eeee1c049698fd8e9569dd63a7b3371dc\": container with ID starting with 121ffaa692f9f1cedab3f39ed7fca18eeee1c049698fd8e9569dd63a7b3371dc not found: ID does not exist" containerID="121ffaa692f9f1cedab3f39ed7fca18eeee1c049698fd8e9569dd63a7b3371dc" Jan 20 17:13:45 crc kubenswrapper[4558]: I0120 17:13:45.237529 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"121ffaa692f9f1cedab3f39ed7fca18eeee1c049698fd8e9569dd63a7b3371dc"} err="failed to get container status \"121ffaa692f9f1cedab3f39ed7fca18eeee1c049698fd8e9569dd63a7b3371dc\": rpc error: code = NotFound desc = could not find container \"121ffaa692f9f1cedab3f39ed7fca18eeee1c049698fd8e9569dd63a7b3371dc\": container with ID starting with 121ffaa692f9f1cedab3f39ed7fca18eeee1c049698fd8e9569dd63a7b3371dc not found: ID does not exist" Jan 20 17:13:45 crc kubenswrapper[4558]: I0120 17:13:45.237552 4558 scope.go:117] "RemoveContainer" containerID="9c47e972aebd64566110e65f6a0d8c4b41eb4130c83a7641e09bdd5b4000c880" Jan 20 17:13:45 crc kubenswrapper[4558]: E0120 17:13:45.237859 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9c47e972aebd64566110e65f6a0d8c4b41eb4130c83a7641e09bdd5b4000c880\": container with ID starting with 9c47e972aebd64566110e65f6a0d8c4b41eb4130c83a7641e09bdd5b4000c880 not found: ID does not exist" containerID="9c47e972aebd64566110e65f6a0d8c4b41eb4130c83a7641e09bdd5b4000c880" Jan 20 17:13:45 crc kubenswrapper[4558]: I0120 17:13:45.237883 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9c47e972aebd64566110e65f6a0d8c4b41eb4130c83a7641e09bdd5b4000c880"} err="failed to get container status \"9c47e972aebd64566110e65f6a0d8c4b41eb4130c83a7641e09bdd5b4000c880\": rpc error: code = NotFound desc = could not find container \"9c47e972aebd64566110e65f6a0d8c4b41eb4130c83a7641e09bdd5b4000c880\": container with ID starting with 9c47e972aebd64566110e65f6a0d8c4b41eb4130c83a7641e09bdd5b4000c880 not found: ID does not exist" Jan 20 17:13:45 crc kubenswrapper[4558]: I0120 17:13:45.979474 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/watcher-api-0" Jan 20 17:13:46 crc kubenswrapper[4558]: I0120 17:13:46.582840 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9946343f-cd2f-4a21-8422-a13633a8aa60" path="/var/lib/kubelet/pods/9946343f-cd2f-4a21-8422-a13633a8aa60/volumes" Jan 20 17:13:47 crc kubenswrapper[4558]: I0120 17:13:47.490177 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/watcher-api-0" Jan 20 17:13:47 crc kubenswrapper[4558]: I0120 17:13:47.946771 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:13:52 crc kubenswrapper[4558]: I0120 17:13:52.237850 4558 generic.go:334] "Generic (PLEG): container finished" podID="52844554-ecdd-40db-ae62-0f0de3d9030d" containerID="43b7757fe757d67b5cf9c9c9b7f1b9c7fa72b3c8b9da6c03be8b5de7cdde4efb" exitCode=137 Jan 20 17:13:52 crc kubenswrapper[4558]: I0120 17:13:52.237934 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"52844554-ecdd-40db-ae62-0f0de3d9030d","Type":"ContainerDied","Data":"43b7757fe757d67b5cf9c9c9b7f1b9c7fa72b3c8b9da6c03be8b5de7cdde4efb"} Jan 20 17:13:52 crc kubenswrapper[4558]: I0120 17:13:52.490467 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/watcher-api-0" Jan 20 17:13:52 crc kubenswrapper[4558]: I0120 17:13:52.501140 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/watcher-api-0" Jan 20 17:13:52 crc kubenswrapper[4558]: I0120 17:13:52.540706 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:13:52 crc kubenswrapper[4558]: I0120 17:13:52.621491 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/placement-7ddd44bcf8-9tj9c" Jan 20 17:13:52 crc kubenswrapper[4558]: I0120 17:13:52.622461 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/placement-7ddd44bcf8-9tj9c" Jan 20 17:13:52 crc kubenswrapper[4558]: I0120 17:13:52.704698 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/52844554-ecdd-40db-ae62-0f0de3d9030d-combined-ca-bundle\") pod \"52844554-ecdd-40db-ae62-0f0de3d9030d\" (UID: \"52844554-ecdd-40db-ae62-0f0de3d9030d\") " Jan 20 17:13:52 crc kubenswrapper[4558]: I0120 17:13:52.704825 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/52844554-ecdd-40db-ae62-0f0de3d9030d-sg-core-conf-yaml\") pod \"52844554-ecdd-40db-ae62-0f0de3d9030d\" (UID: \"52844554-ecdd-40db-ae62-0f0de3d9030d\") " Jan 20 17:13:52 crc kubenswrapper[4558]: I0120 17:13:52.704944 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/52844554-ecdd-40db-ae62-0f0de3d9030d-run-httpd\") pod \"52844554-ecdd-40db-ae62-0f0de3d9030d\" (UID: \"52844554-ecdd-40db-ae62-0f0de3d9030d\") " Jan 20 17:13:52 crc kubenswrapper[4558]: I0120 17:13:52.704992 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4kzx\" (UniqueName: \"kubernetes.io/projected/52844554-ecdd-40db-ae62-0f0de3d9030d-kube-api-access-w4kzx\") pod \"52844554-ecdd-40db-ae62-0f0de3d9030d\" (UID: \"52844554-ecdd-40db-ae62-0f0de3d9030d\") " Jan 20 17:13:52 crc kubenswrapper[4558]: I0120 17:13:52.705133 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/52844554-ecdd-40db-ae62-0f0de3d9030d-scripts\") pod \"52844554-ecdd-40db-ae62-0f0de3d9030d\" (UID: \"52844554-ecdd-40db-ae62-0f0de3d9030d\") " Jan 20 17:13:52 crc kubenswrapper[4558]: I0120 17:13:52.705273 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/52844554-ecdd-40db-ae62-0f0de3d9030d-log-httpd\") pod \"52844554-ecdd-40db-ae62-0f0de3d9030d\" (UID: \"52844554-ecdd-40db-ae62-0f0de3d9030d\") " Jan 20 17:13:52 crc kubenswrapper[4558]: I0120 17:13:52.705386 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/52844554-ecdd-40db-ae62-0f0de3d9030d-config-data\") pod \"52844554-ecdd-40db-ae62-0f0de3d9030d\" (UID: \"52844554-ecdd-40db-ae62-0f0de3d9030d\") " Jan 20 17:13:52 crc kubenswrapper[4558]: I0120 17:13:52.707123 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/52844554-ecdd-40db-ae62-0f0de3d9030d-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "52844554-ecdd-40db-ae62-0f0de3d9030d" (UID: "52844554-ecdd-40db-ae62-0f0de3d9030d"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:13:52 crc kubenswrapper[4558]: I0120 17:13:52.707707 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/52844554-ecdd-40db-ae62-0f0de3d9030d-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "52844554-ecdd-40db-ae62-0f0de3d9030d" (UID: "52844554-ecdd-40db-ae62-0f0de3d9030d"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:13:52 crc kubenswrapper[4558]: I0120 17:13:52.713185 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/52844554-ecdd-40db-ae62-0f0de3d9030d-scripts" (OuterVolumeSpecName: "scripts") pod "52844554-ecdd-40db-ae62-0f0de3d9030d" (UID: "52844554-ecdd-40db-ae62-0f0de3d9030d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:13:52 crc kubenswrapper[4558]: I0120 17:13:52.714917 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/52844554-ecdd-40db-ae62-0f0de3d9030d-kube-api-access-w4kzx" (OuterVolumeSpecName: "kube-api-access-w4kzx") pod "52844554-ecdd-40db-ae62-0f0de3d9030d" (UID: "52844554-ecdd-40db-ae62-0f0de3d9030d"). InnerVolumeSpecName "kube-api-access-w4kzx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:13:52 crc kubenswrapper[4558]: I0120 17:13:52.745884 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/52844554-ecdd-40db-ae62-0f0de3d9030d-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "52844554-ecdd-40db-ae62-0f0de3d9030d" (UID: "52844554-ecdd-40db-ae62-0f0de3d9030d"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:13:52 crc kubenswrapper[4558]: I0120 17:13:52.789131 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/52844554-ecdd-40db-ae62-0f0de3d9030d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "52844554-ecdd-40db-ae62-0f0de3d9030d" (UID: "52844554-ecdd-40db-ae62-0f0de3d9030d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:13:52 crc kubenswrapper[4558]: I0120 17:13:52.790968 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/52844554-ecdd-40db-ae62-0f0de3d9030d-config-data" (OuterVolumeSpecName: "config-data") pod "52844554-ecdd-40db-ae62-0f0de3d9030d" (UID: "52844554-ecdd-40db-ae62-0f0de3d9030d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:13:52 crc kubenswrapper[4558]: I0120 17:13:52.810349 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/52844554-ecdd-40db-ae62-0f0de3d9030d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:13:52 crc kubenswrapper[4558]: I0120 17:13:52.810385 4558 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/52844554-ecdd-40db-ae62-0f0de3d9030d-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 20 17:13:52 crc kubenswrapper[4558]: I0120 17:13:52.810400 4558 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/52844554-ecdd-40db-ae62-0f0de3d9030d-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:13:52 crc kubenswrapper[4558]: I0120 17:13:52.810412 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4kzx\" (UniqueName: \"kubernetes.io/projected/52844554-ecdd-40db-ae62-0f0de3d9030d-kube-api-access-w4kzx\") on node \"crc\" DevicePath \"\"" Jan 20 17:13:52 crc kubenswrapper[4558]: I0120 17:13:52.810435 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/52844554-ecdd-40db-ae62-0f0de3d9030d-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:13:52 crc kubenswrapper[4558]: I0120 17:13:52.810447 4558 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/52844554-ecdd-40db-ae62-0f0de3d9030d-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:13:52 crc kubenswrapper[4558]: I0120 17:13:52.810457 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/52844554-ecdd-40db-ae62-0f0de3d9030d-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:13:53 crc kubenswrapper[4558]: I0120 17:13:53.251391 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"52844554-ecdd-40db-ae62-0f0de3d9030d","Type":"ContainerDied","Data":"e6c3ae37797a69bb34636f1b18613265125f49fbdbcefbbcfb29be74a7d67a71"} Jan 20 17:13:53 crc kubenswrapper[4558]: I0120 17:13:53.252402 4558 scope.go:117] "RemoveContainer" containerID="43b7757fe757d67b5cf9c9c9b7f1b9c7fa72b3c8b9da6c03be8b5de7cdde4efb" Jan 20 17:13:53 crc kubenswrapper[4558]: I0120 17:13:53.251460 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:13:53 crc kubenswrapper[4558]: I0120 17:13:53.259267 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/watcher-api-0" Jan 20 17:13:53 crc kubenswrapper[4558]: I0120 17:13:53.301527 4558 scope.go:117] "RemoveContainer" containerID="6d036b93fa5cf012470faed7d76baa620b68e49cfc21d8f4c3db9b864a1baeed" Jan 20 17:13:53 crc kubenswrapper[4558]: I0120 17:13:53.325047 4558 scope.go:117] "RemoveContainer" containerID="217dca2679c63dd7846ef65d423f3eea95f3f3ffc7c680a0e1fc9edda8767cd7" Jan 20 17:13:53 crc kubenswrapper[4558]: I0120 17:13:53.335809 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:13:53 crc kubenswrapper[4558]: I0120 17:13:53.357796 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:13:53 crc kubenswrapper[4558]: I0120 17:13:53.358653 4558 scope.go:117] "RemoveContainer" containerID="3c009e1d5d872ae0cc303819626a3fcbec01907270a323804faaa0a2fe958dd6" Jan 20 17:13:53 crc kubenswrapper[4558]: I0120 17:13:53.375706 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:13:53 crc kubenswrapper[4558]: E0120 17:13:53.376233 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4f712673-2484-4dc7-9677-344aa012e994" containerName="barbican-api" Jan 20 17:13:53 crc kubenswrapper[4558]: I0120 17:13:53.376251 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="4f712673-2484-4dc7-9677-344aa012e994" containerName="barbican-api" Jan 20 17:13:53 crc kubenswrapper[4558]: E0120 17:13:53.376261 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="52844554-ecdd-40db-ae62-0f0de3d9030d" containerName="proxy-httpd" Jan 20 17:13:53 crc kubenswrapper[4558]: I0120 17:13:53.376266 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="52844554-ecdd-40db-ae62-0f0de3d9030d" containerName="proxy-httpd" Jan 20 17:13:53 crc kubenswrapper[4558]: E0120 17:13:53.376274 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="52844554-ecdd-40db-ae62-0f0de3d9030d" containerName="sg-core" Jan 20 17:13:53 crc kubenswrapper[4558]: I0120 17:13:53.376280 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="52844554-ecdd-40db-ae62-0f0de3d9030d" containerName="sg-core" Jan 20 17:13:53 crc kubenswrapper[4558]: E0120 17:13:53.376298 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="52844554-ecdd-40db-ae62-0f0de3d9030d" containerName="ceilometer-notification-agent" Jan 20 17:13:53 crc kubenswrapper[4558]: I0120 17:13:53.376303 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="52844554-ecdd-40db-ae62-0f0de3d9030d" containerName="ceilometer-notification-agent" Jan 20 17:13:53 crc kubenswrapper[4558]: E0120 17:13:53.376312 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9946343f-cd2f-4a21-8422-a13633a8aa60" containerName="extract-content" Jan 20 17:13:53 crc kubenswrapper[4558]: I0120 17:13:53.376317 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="9946343f-cd2f-4a21-8422-a13633a8aa60" containerName="extract-content" Jan 20 17:13:53 crc kubenswrapper[4558]: E0120 17:13:53.376332 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9946343f-cd2f-4a21-8422-a13633a8aa60" containerName="registry-server" Jan 20 17:13:53 crc kubenswrapper[4558]: I0120 17:13:53.376337 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="9946343f-cd2f-4a21-8422-a13633a8aa60" containerName="registry-server" Jan 20 17:13:53 crc kubenswrapper[4558]: E0120 17:13:53.376351 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9946343f-cd2f-4a21-8422-a13633a8aa60" containerName="extract-utilities" Jan 20 17:13:53 crc kubenswrapper[4558]: I0120 17:13:53.376357 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="9946343f-cd2f-4a21-8422-a13633a8aa60" containerName="extract-utilities" Jan 20 17:13:53 crc kubenswrapper[4558]: E0120 17:13:53.376368 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="52844554-ecdd-40db-ae62-0f0de3d9030d" containerName="ceilometer-central-agent" Jan 20 17:13:53 crc kubenswrapper[4558]: I0120 17:13:53.376373 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="52844554-ecdd-40db-ae62-0f0de3d9030d" containerName="ceilometer-central-agent" Jan 20 17:13:53 crc kubenswrapper[4558]: E0120 17:13:53.376383 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4f712673-2484-4dc7-9677-344aa012e994" containerName="barbican-api-log" Jan 20 17:13:53 crc kubenswrapper[4558]: I0120 17:13:53.376388 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="4f712673-2484-4dc7-9677-344aa012e994" containerName="barbican-api-log" Jan 20 17:13:53 crc kubenswrapper[4558]: I0120 17:13:53.376578 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="52844554-ecdd-40db-ae62-0f0de3d9030d" containerName="proxy-httpd" Jan 20 17:13:53 crc kubenswrapper[4558]: I0120 17:13:53.376595 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="52844554-ecdd-40db-ae62-0f0de3d9030d" containerName="ceilometer-notification-agent" Jan 20 17:13:53 crc kubenswrapper[4558]: I0120 17:13:53.376602 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="4f712673-2484-4dc7-9677-344aa012e994" containerName="barbican-api-log" Jan 20 17:13:53 crc kubenswrapper[4558]: I0120 17:13:53.376609 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="4f712673-2484-4dc7-9677-344aa012e994" containerName="barbican-api" Jan 20 17:13:53 crc kubenswrapper[4558]: I0120 17:13:53.376618 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="9946343f-cd2f-4a21-8422-a13633a8aa60" containerName="registry-server" Jan 20 17:13:53 crc kubenswrapper[4558]: I0120 17:13:53.376625 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="52844554-ecdd-40db-ae62-0f0de3d9030d" containerName="ceilometer-central-agent" Jan 20 17:13:53 crc kubenswrapper[4558]: I0120 17:13:53.376639 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="52844554-ecdd-40db-ae62-0f0de3d9030d" containerName="sg-core" Jan 20 17:13:53 crc kubenswrapper[4558]: I0120 17:13:53.378266 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:13:53 crc kubenswrapper[4558]: I0120 17:13:53.380985 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-config-data" Jan 20 17:13:53 crc kubenswrapper[4558]: I0120 17:13:53.381182 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-scripts" Jan 20 17:13:53 crc kubenswrapper[4558]: I0120 17:13:53.383033 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:13:53 crc kubenswrapper[4558]: I0120 17:13:53.533318 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/85ccfc1a-2f90-48fc-81c7-6a56f816b4ed-scripts\") pod \"ceilometer-0\" (UID: \"85ccfc1a-2f90-48fc-81c7-6a56f816b4ed\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:13:53 crc kubenswrapper[4558]: I0120 17:13:53.533376 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/85ccfc1a-2f90-48fc-81c7-6a56f816b4ed-log-httpd\") pod \"ceilometer-0\" (UID: \"85ccfc1a-2f90-48fc-81c7-6a56f816b4ed\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:13:53 crc kubenswrapper[4558]: I0120 17:13:53.533526 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/85ccfc1a-2f90-48fc-81c7-6a56f816b4ed-config-data\") pod \"ceilometer-0\" (UID: \"85ccfc1a-2f90-48fc-81c7-6a56f816b4ed\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:13:53 crc kubenswrapper[4558]: I0120 17:13:53.533573 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/85ccfc1a-2f90-48fc-81c7-6a56f816b4ed-run-httpd\") pod \"ceilometer-0\" (UID: \"85ccfc1a-2f90-48fc-81c7-6a56f816b4ed\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:13:53 crc kubenswrapper[4558]: I0120 17:13:53.533743 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/85ccfc1a-2f90-48fc-81c7-6a56f816b4ed-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"85ccfc1a-2f90-48fc-81c7-6a56f816b4ed\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:13:53 crc kubenswrapper[4558]: I0120 17:13:53.533811 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jmkms\" (UniqueName: \"kubernetes.io/projected/85ccfc1a-2f90-48fc-81c7-6a56f816b4ed-kube-api-access-jmkms\") pod \"ceilometer-0\" (UID: \"85ccfc1a-2f90-48fc-81c7-6a56f816b4ed\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:13:53 crc kubenswrapper[4558]: I0120 17:13:53.533910 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/85ccfc1a-2f90-48fc-81c7-6a56f816b4ed-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"85ccfc1a-2f90-48fc-81c7-6a56f816b4ed\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:13:53 crc kubenswrapper[4558]: I0120 17:13:53.635571 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/85ccfc1a-2f90-48fc-81c7-6a56f816b4ed-scripts\") pod \"ceilometer-0\" (UID: \"85ccfc1a-2f90-48fc-81c7-6a56f816b4ed\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:13:53 crc kubenswrapper[4558]: I0120 17:13:53.635614 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/85ccfc1a-2f90-48fc-81c7-6a56f816b4ed-log-httpd\") pod \"ceilometer-0\" (UID: \"85ccfc1a-2f90-48fc-81c7-6a56f816b4ed\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:13:53 crc kubenswrapper[4558]: I0120 17:13:53.635651 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/85ccfc1a-2f90-48fc-81c7-6a56f816b4ed-config-data\") pod \"ceilometer-0\" (UID: \"85ccfc1a-2f90-48fc-81c7-6a56f816b4ed\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:13:53 crc kubenswrapper[4558]: I0120 17:13:53.635679 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/85ccfc1a-2f90-48fc-81c7-6a56f816b4ed-run-httpd\") pod \"ceilometer-0\" (UID: \"85ccfc1a-2f90-48fc-81c7-6a56f816b4ed\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:13:53 crc kubenswrapper[4558]: I0120 17:13:53.635732 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/85ccfc1a-2f90-48fc-81c7-6a56f816b4ed-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"85ccfc1a-2f90-48fc-81c7-6a56f816b4ed\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:13:53 crc kubenswrapper[4558]: I0120 17:13:53.635765 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jmkms\" (UniqueName: \"kubernetes.io/projected/85ccfc1a-2f90-48fc-81c7-6a56f816b4ed-kube-api-access-jmkms\") pod \"ceilometer-0\" (UID: \"85ccfc1a-2f90-48fc-81c7-6a56f816b4ed\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:13:53 crc kubenswrapper[4558]: I0120 17:13:53.635809 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/85ccfc1a-2f90-48fc-81c7-6a56f816b4ed-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"85ccfc1a-2f90-48fc-81c7-6a56f816b4ed\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:13:53 crc kubenswrapper[4558]: I0120 17:13:53.636068 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/85ccfc1a-2f90-48fc-81c7-6a56f816b4ed-log-httpd\") pod \"ceilometer-0\" (UID: \"85ccfc1a-2f90-48fc-81c7-6a56f816b4ed\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:13:53 crc kubenswrapper[4558]: I0120 17:13:53.636155 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/85ccfc1a-2f90-48fc-81c7-6a56f816b4ed-run-httpd\") pod \"ceilometer-0\" (UID: \"85ccfc1a-2f90-48fc-81c7-6a56f816b4ed\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:13:53 crc kubenswrapper[4558]: I0120 17:13:53.642385 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/85ccfc1a-2f90-48fc-81c7-6a56f816b4ed-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"85ccfc1a-2f90-48fc-81c7-6a56f816b4ed\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:13:53 crc kubenswrapper[4558]: I0120 17:13:53.642721 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/85ccfc1a-2f90-48fc-81c7-6a56f816b4ed-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"85ccfc1a-2f90-48fc-81c7-6a56f816b4ed\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:13:53 crc kubenswrapper[4558]: I0120 17:13:53.643024 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/85ccfc1a-2f90-48fc-81c7-6a56f816b4ed-scripts\") pod \"ceilometer-0\" (UID: \"85ccfc1a-2f90-48fc-81c7-6a56f816b4ed\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:13:53 crc kubenswrapper[4558]: I0120 17:13:53.643830 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/85ccfc1a-2f90-48fc-81c7-6a56f816b4ed-config-data\") pod \"ceilometer-0\" (UID: \"85ccfc1a-2f90-48fc-81c7-6a56f816b4ed\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:13:53 crc kubenswrapper[4558]: I0120 17:13:53.651292 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jmkms\" (UniqueName: \"kubernetes.io/projected/85ccfc1a-2f90-48fc-81c7-6a56f816b4ed-kube-api-access-jmkms\") pod \"ceilometer-0\" (UID: \"85ccfc1a-2f90-48fc-81c7-6a56f816b4ed\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:13:53 crc kubenswrapper[4558]: I0120 17:13:53.705511 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:13:54 crc kubenswrapper[4558]: W0120 17:13:54.112061 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod85ccfc1a_2f90_48fc_81c7_6a56f816b4ed.slice/crio-031df42e2864f3a5e32570e02e5a1a5875f94081db947196a64d3e379b055d0f WatchSource:0}: Error finding container 031df42e2864f3a5e32570e02e5a1a5875f94081db947196a64d3e379b055d0f: Status 404 returned error can't find the container with id 031df42e2864f3a5e32570e02e5a1a5875f94081db947196a64d3e379b055d0f Jan 20 17:13:54 crc kubenswrapper[4558]: I0120 17:13:54.112204 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:13:54 crc kubenswrapper[4558]: I0120 17:13:54.268254 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"85ccfc1a-2f90-48fc-81c7-6a56f816b4ed","Type":"ContainerStarted","Data":"031df42e2864f3a5e32570e02e5a1a5875f94081db947196a64d3e379b055d0f"} Jan 20 17:13:54 crc kubenswrapper[4558]: I0120 17:13:54.578815 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="52844554-ecdd-40db-ae62-0f0de3d9030d" path="/var/lib/kubelet/pods/52844554-ecdd-40db-ae62-0f0de3d9030d/volumes" Jan 20 17:13:55 crc kubenswrapper[4558]: I0120 17:13:55.282327 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"85ccfc1a-2f90-48fc-81c7-6a56f816b4ed","Type":"ContainerStarted","Data":"c9cf0b697ef3a1cc6cbe7003982635af6caafda258ac84df4b337d918d84a131"} Jan 20 17:13:56 crc kubenswrapper[4558]: I0120 17:13:56.296561 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"85ccfc1a-2f90-48fc-81c7-6a56f816b4ed","Type":"ContainerStarted","Data":"af38c59fc30341345b617fc6baa276bd5d6e1b49bd5c59b8b7cce9ea37599d4e"} Jan 20 17:13:57 crc kubenswrapper[4558]: I0120 17:13:57.310155 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"85ccfc1a-2f90-48fc-81c7-6a56f816b4ed","Type":"ContainerStarted","Data":"b643ef2ff0f6cf5f17fa2342a2837f753dfc80721b4480f65cddb7967ebd6322"} Jan 20 17:13:57 crc kubenswrapper[4558]: I0120 17:13:57.329888 4558 patch_prober.go:28] interesting pod/machine-config-daemon-2vr4r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 20 17:13:57 crc kubenswrapper[4558]: I0120 17:13:57.330279 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 20 17:13:58 crc kubenswrapper[4558]: I0120 17:13:58.324639 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"85ccfc1a-2f90-48fc-81c7-6a56f816b4ed","Type":"ContainerStarted","Data":"844551855fe12f90386215175a994158aa4a3a5b9052eeb0b66b37721628b055"} Jan 20 17:13:58 crc kubenswrapper[4558]: I0120 17:13:58.324860 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:13:58 crc kubenswrapper[4558]: I0120 17:13:58.351877 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ceilometer-0" podStartSLOduration=1.562799891 podStartE2EDuration="5.351857923s" podCreationTimestamp="2026-01-20 17:13:53 +0000 UTC" firstStartedPulling="2026-01-20 17:13:54.114802815 +0000 UTC m=+1927.875140783" lastFinishedPulling="2026-01-20 17:13:57.903860848 +0000 UTC m=+1931.664198815" observedRunningTime="2026-01-20 17:13:58.342956404 +0000 UTC m=+1932.103294370" watchObservedRunningTime="2026-01-20 17:13:58.351857923 +0000 UTC m=+1932.112195890" Jan 20 17:14:01 crc kubenswrapper[4558]: I0120 17:14:01.229709 4558 scope.go:117] "RemoveContainer" containerID="f4eb3aa9fec9fe4e1edca473d9d52013d4b4d121058769c4a99430648da5242c" Jan 20 17:14:01 crc kubenswrapper[4558]: I0120 17:14:01.261872 4558 scope.go:117] "RemoveContainer" containerID="2401ca3f0a9fa22defdde8ce4f9940679e94f9352ac4844cdba2183eaa8ea52b" Jan 20 17:14:01 crc kubenswrapper[4558]: I0120 17:14:01.289906 4558 scope.go:117] "RemoveContainer" containerID="6b1947119d80650089ad705d253d7d5004d6ff8894f34d16dafbb8da8a02235d" Jan 20 17:14:01 crc kubenswrapper[4558]: I0120 17:14:01.336333 4558 scope.go:117] "RemoveContainer" containerID="cf2885dbcf415dfa13b8eb9cb926c8cf1c4dc26a15550166d7005e724bc40041" Jan 20 17:14:01 crc kubenswrapper[4558]: I0120 17:14:01.357896 4558 scope.go:117] "RemoveContainer" containerID="d11163804592e684099fa46ac620c517657e03a9172c6a38e1115514a3db22fa" Jan 20 17:14:01 crc kubenswrapper[4558]: I0120 17:14:01.380714 4558 scope.go:117] "RemoveContainer" containerID="be8c70d94caaadd4e739ca16f688e2a0b03fc39521354a36341ffe6bf26f22c9" Jan 20 17:14:01 crc kubenswrapper[4558]: I0120 17:14:01.415441 4558 scope.go:117] "RemoveContainer" containerID="397dc1ad776ba3a8529bc116085603412a742263395cc5fe26f66ac424ae8a4f" Jan 20 17:14:01 crc kubenswrapper[4558]: I0120 17:14:01.442005 4558 scope.go:117] "RemoveContainer" containerID="0376c421f7190fd015b8795150f665866c227b4ec1aa2af3ff3ba419848ca268" Jan 20 17:14:01 crc kubenswrapper[4558]: I0120 17:14:01.452813 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/neutron-57ff5684f-lrvnz" Jan 20 17:14:01 crc kubenswrapper[4558]: I0120 17:14:01.481682 4558 scope.go:117] "RemoveContainer" containerID="22729bdda56e8d2c20ee63a4a6fb57799ef450e9e4e5a8468b37e26fe04a78b5" Jan 20 17:14:01 crc kubenswrapper[4558]: I0120 17:14:01.524807 4558 scope.go:117] "RemoveContainer" containerID="32427dea9b72d979ea2f99684179f56d767ac269c899e70e736d6c6ed049f52c" Jan 20 17:14:01 crc kubenswrapper[4558]: I0120 17:14:01.542959 4558 scope.go:117] "RemoveContainer" containerID="29cc24c38e89d551c6feb9ca7e8c3ccf2dba9bb46a4cf839f1e0d5911675a5fb" Jan 20 17:14:01 crc kubenswrapper[4558]: I0120 17:14:01.555762 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/keystone-57f6f664bd-7h7jh" Jan 20 17:14:01 crc kubenswrapper[4558]: I0120 17:14:01.560459 4558 scope.go:117] "RemoveContainer" containerID="941423064fa6ba0069e6771da6e9512b3e450564237394653ec2888a52642246" Jan 20 17:14:01 crc kubenswrapper[4558]: I0120 17:14:01.596369 4558 scope.go:117] "RemoveContainer" containerID="b5598fdef3344f2fd0700ea5b28128a663e40fc58f479d51a5d3dd26322c1fa5" Jan 20 17:14:01 crc kubenswrapper[4558]: I0120 17:14:01.625141 4558 scope.go:117] "RemoveContainer" containerID="59a7305eb2948167d417b047936332c60a3a27b73ade2362490b6ee4221588fc" Jan 20 17:14:01 crc kubenswrapper[4558]: I0120 17:14:01.641728 4558 scope.go:117] "RemoveContainer" containerID="9a3c955d5b11e9518eab345cc1f4a24a125317c2b69b6e6b2070a5417c63906f" Jan 20 17:14:01 crc kubenswrapper[4558]: I0120 17:14:01.670992 4558 scope.go:117] "RemoveContainer" containerID="0a6a7ebe77f730e7953cf0ae10557aacb2780f78536cbdeb6ab122242e514827" Jan 20 17:14:01 crc kubenswrapper[4558]: I0120 17:14:01.700752 4558 scope.go:117] "RemoveContainer" containerID="a1feb2942ef8ec9035f46ccb949b28abf9ee6144ba754bbfc58cb16aa1930cc0" Jan 20 17:14:01 crc kubenswrapper[4558]: I0120 17:14:01.718814 4558 scope.go:117] "RemoveContainer" containerID="742e670ce01715ecca8fb6a5ea852d6067112215a2e930c09ce883d2269c14b7" Jan 20 17:14:01 crc kubenswrapper[4558]: I0120 17:14:01.746960 4558 scope.go:117] "RemoveContainer" containerID="2d30f855d2909e2fba03e13362c8b595118ff00963aa9a93f120e33156d5f97a" Jan 20 17:14:01 crc kubenswrapper[4558]: I0120 17:14:01.762572 4558 scope.go:117] "RemoveContainer" containerID="b00c43b649e5e692c940930fe70e4fd392d52b887827633cf0858cc366463066" Jan 20 17:14:01 crc kubenswrapper[4558]: I0120 17:14:01.786126 4558 scope.go:117] "RemoveContainer" containerID="8465d6a9372429cd9148909a838d7378903121f9ff9f0fb73700c98d620d85b1" Jan 20 17:14:01 crc kubenswrapper[4558]: I0120 17:14:01.808229 4558 scope.go:117] "RemoveContainer" containerID="6d2fb7f79a476206ea9a340648fc10811998e0e677155a9b0fcf13275927b0db" Jan 20 17:14:01 crc kubenswrapper[4558]: I0120 17:14:01.826481 4558 scope.go:117] "RemoveContainer" containerID="f9b4e33892cfb054ac4d9d36cf2ff5895e08ea9f1492b277bd8903e77f17643b" Jan 20 17:14:01 crc kubenswrapper[4558]: I0120 17:14:01.860279 4558 scope.go:117] "RemoveContainer" containerID="d08a36e722fef6d092b220311cda4f15ef2817b2b5440e696ca33c3ddaa6fac8" Jan 20 17:14:01 crc kubenswrapper[4558]: I0120 17:14:01.876968 4558 scope.go:117] "RemoveContainer" containerID="16d31d54d596530bd1cc93b119b9b9cc979a47d6a7dd2c11504f7b06e4e859d5" Jan 20 17:14:02 crc kubenswrapper[4558]: I0120 17:14:02.529525 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/openstackclient"] Jan 20 17:14:02 crc kubenswrapper[4558]: I0120 17:14:02.531233 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstackclient" Jan 20 17:14:02 crc kubenswrapper[4558]: I0120 17:14:02.534112 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-config" Jan 20 17:14:02 crc kubenswrapper[4558]: I0120 17:14:02.535512 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"openstack-config-secret" Jan 20 17:14:02 crc kubenswrapper[4558]: I0120 17:14:02.535767 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"openstackclient-openstackclient-dockercfg-2xl52" Jan 20 17:14:02 crc kubenswrapper[4558]: I0120 17:14:02.541379 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/openstackclient"] Jan 20 17:14:02 crc kubenswrapper[4558]: I0120 17:14:02.614926 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/bad8aeff-00a6-4456-bbdf-937441ed04c0-openstack-config\") pod \"openstackclient\" (UID: \"bad8aeff-00a6-4456-bbdf-937441ed04c0\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:14:02 crc kubenswrapper[4558]: I0120 17:14:02.615008 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/bad8aeff-00a6-4456-bbdf-937441ed04c0-openstack-config-secret\") pod \"openstackclient\" (UID: \"bad8aeff-00a6-4456-bbdf-937441ed04c0\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:14:02 crc kubenswrapper[4558]: I0120 17:14:02.615110 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bad8aeff-00a6-4456-bbdf-937441ed04c0-combined-ca-bundle\") pod \"openstackclient\" (UID: \"bad8aeff-00a6-4456-bbdf-937441ed04c0\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:14:02 crc kubenswrapper[4558]: I0120 17:14:02.615176 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vr595\" (UniqueName: \"kubernetes.io/projected/bad8aeff-00a6-4456-bbdf-937441ed04c0-kube-api-access-vr595\") pod \"openstackclient\" (UID: \"bad8aeff-00a6-4456-bbdf-937441ed04c0\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:14:02 crc kubenswrapper[4558]: I0120 17:14:02.720481 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/bad8aeff-00a6-4456-bbdf-937441ed04c0-openstack-config\") pod \"openstackclient\" (UID: \"bad8aeff-00a6-4456-bbdf-937441ed04c0\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:14:02 crc kubenswrapper[4558]: I0120 17:14:02.720592 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/bad8aeff-00a6-4456-bbdf-937441ed04c0-openstack-config-secret\") pod \"openstackclient\" (UID: \"bad8aeff-00a6-4456-bbdf-937441ed04c0\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:14:02 crc kubenswrapper[4558]: I0120 17:14:02.720704 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bad8aeff-00a6-4456-bbdf-937441ed04c0-combined-ca-bundle\") pod \"openstackclient\" (UID: \"bad8aeff-00a6-4456-bbdf-937441ed04c0\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:14:02 crc kubenswrapper[4558]: I0120 17:14:02.720804 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vr595\" (UniqueName: \"kubernetes.io/projected/bad8aeff-00a6-4456-bbdf-937441ed04c0-kube-api-access-vr595\") pod \"openstackclient\" (UID: \"bad8aeff-00a6-4456-bbdf-937441ed04c0\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:14:02 crc kubenswrapper[4558]: I0120 17:14:02.721319 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/bad8aeff-00a6-4456-bbdf-937441ed04c0-openstack-config\") pod \"openstackclient\" (UID: \"bad8aeff-00a6-4456-bbdf-937441ed04c0\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:14:02 crc kubenswrapper[4558]: I0120 17:14:02.726869 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bad8aeff-00a6-4456-bbdf-937441ed04c0-combined-ca-bundle\") pod \"openstackclient\" (UID: \"bad8aeff-00a6-4456-bbdf-937441ed04c0\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:14:02 crc kubenswrapper[4558]: I0120 17:14:02.727463 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/bad8aeff-00a6-4456-bbdf-937441ed04c0-openstack-config-secret\") pod \"openstackclient\" (UID: \"bad8aeff-00a6-4456-bbdf-937441ed04c0\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:14:02 crc kubenswrapper[4558]: I0120 17:14:02.733951 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vr595\" (UniqueName: \"kubernetes.io/projected/bad8aeff-00a6-4456-bbdf-937441ed04c0-kube-api-access-vr595\") pod \"openstackclient\" (UID: \"bad8aeff-00a6-4456-bbdf-937441ed04c0\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:14:02 crc kubenswrapper[4558]: I0120 17:14:02.884790 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstackclient" Jan 20 17:14:03 crc kubenswrapper[4558]: I0120 17:14:03.301841 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/openstackclient"] Jan 20 17:14:03 crc kubenswrapper[4558]: I0120 17:14:03.392757 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstackclient" event={"ID":"bad8aeff-00a6-4456-bbdf-937441ed04c0","Type":"ContainerStarted","Data":"8f300e47e02ddfd6beb6e40680bcbdaeea9c55373220e6de13937b8051558738"} Jan 20 17:14:03 crc kubenswrapper[4558]: I0120 17:14:03.765394 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/neutron-857895d98b-n66x9" Jan 20 17:14:03 crc kubenswrapper[4558]: I0120 17:14:03.823981 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-57ff5684f-lrvnz"] Jan 20 17:14:03 crc kubenswrapper[4558]: I0120 17:14:03.824345 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/neutron-57ff5684f-lrvnz" podUID="74a94aaa-09bf-40a6-939b-14cf47decbfb" containerName="neutron-api" containerID="cri-o://b3957ac5e71ef7db311880371641f2fdc7bcab0ed8164d5236e8c8bf10f094c9" gracePeriod=30 Jan 20 17:14:03 crc kubenswrapper[4558]: I0120 17:14:03.824335 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/neutron-57ff5684f-lrvnz" podUID="74a94aaa-09bf-40a6-939b-14cf47decbfb" containerName="neutron-httpd" containerID="cri-o://3ab945d23aee1932a7b33d3938b543d738b3d8e27e3ce486a815df1a94173535" gracePeriod=30 Jan 20 17:14:04 crc kubenswrapper[4558]: I0120 17:14:04.404212 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstackclient" event={"ID":"bad8aeff-00a6-4456-bbdf-937441ed04c0","Type":"ContainerStarted","Data":"b209eadd3c0946d42cd2fa2ad03f5f87bda4b804b2952272d092f8c6dbcda131"} Jan 20 17:14:04 crc kubenswrapper[4558]: I0120 17:14:04.407655 4558 generic.go:334] "Generic (PLEG): container finished" podID="74a94aaa-09bf-40a6-939b-14cf47decbfb" containerID="3ab945d23aee1932a7b33d3938b543d738b3d8e27e3ce486a815df1a94173535" exitCode=0 Jan 20 17:14:04 crc kubenswrapper[4558]: I0120 17:14:04.407703 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-57ff5684f-lrvnz" event={"ID":"74a94aaa-09bf-40a6-939b-14cf47decbfb","Type":"ContainerDied","Data":"3ab945d23aee1932a7b33d3938b543d738b3d8e27e3ce486a815df1a94173535"} Jan 20 17:14:04 crc kubenswrapper[4558]: I0120 17:14:04.417441 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/openstackclient" podStartSLOduration=2.417416854 podStartE2EDuration="2.417416854s" podCreationTimestamp="2026-01-20 17:14:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:14:04.416601391 +0000 UTC m=+1938.176939359" watchObservedRunningTime="2026-01-20 17:14:04.417416854 +0000 UTC m=+1938.177754821" Jan 20 17:14:05 crc kubenswrapper[4558]: I0120 17:14:05.778329 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:14:05 crc kubenswrapper[4558]: I0120 17:14:05.778638 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="85ccfc1a-2f90-48fc-81c7-6a56f816b4ed" containerName="ceilometer-central-agent" containerID="cri-o://c9cf0b697ef3a1cc6cbe7003982635af6caafda258ac84df4b337d918d84a131" gracePeriod=30 Jan 20 17:14:05 crc kubenswrapper[4558]: I0120 17:14:05.778713 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="85ccfc1a-2f90-48fc-81c7-6a56f816b4ed" containerName="proxy-httpd" containerID="cri-o://844551855fe12f90386215175a994158aa4a3a5b9052eeb0b66b37721628b055" gracePeriod=30 Jan 20 17:14:05 crc kubenswrapper[4558]: I0120 17:14:05.778723 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="85ccfc1a-2f90-48fc-81c7-6a56f816b4ed" containerName="ceilometer-notification-agent" containerID="cri-o://af38c59fc30341345b617fc6baa276bd5d6e1b49bd5c59b8b7cce9ea37599d4e" gracePeriod=30 Jan 20 17:14:05 crc kubenswrapper[4558]: I0120 17:14:05.778751 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="85ccfc1a-2f90-48fc-81c7-6a56f816b4ed" containerName="sg-core" containerID="cri-o://b643ef2ff0f6cf5f17fa2342a2837f753dfc80721b4480f65cddb7967ebd6322" gracePeriod=30 Jan 20 17:14:06 crc kubenswrapper[4558]: I0120 17:14:06.365143 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/swift-proxy-7c48456b-8z4pm"] Jan 20 17:14:06 crc kubenswrapper[4558]: I0120 17:14:06.366734 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-proxy-7c48456b-8z4pm" Jan 20 17:14:06 crc kubenswrapper[4558]: I0120 17:14:06.371226 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"swift-proxy-config-data" Jan 20 17:14:06 crc kubenswrapper[4558]: I0120 17:14:06.371334 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-swift-public-svc" Jan 20 17:14:06 crc kubenswrapper[4558]: I0120 17:14:06.371810 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-swift-internal-svc" Jan 20 17:14:06 crc kubenswrapper[4558]: I0120 17:14:06.379224 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/swift-proxy-7c48456b-8z4pm"] Jan 20 17:14:06 crc kubenswrapper[4558]: I0120 17:14:06.396315 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fe030148-30ef-4bb7-bf94-8835a0707df1-config-data\") pod \"swift-proxy-7c48456b-8z4pm\" (UID: \"fe030148-30ef-4bb7-bf94-8835a0707df1\") " pod="openstack-kuttl-tests/swift-proxy-7c48456b-8z4pm" Jan 20 17:14:06 crc kubenswrapper[4558]: I0120 17:14:06.396367 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h7ptp\" (UniqueName: \"kubernetes.io/projected/fe030148-30ef-4bb7-bf94-8835a0707df1-kube-api-access-h7ptp\") pod \"swift-proxy-7c48456b-8z4pm\" (UID: \"fe030148-30ef-4bb7-bf94-8835a0707df1\") " pod="openstack-kuttl-tests/swift-proxy-7c48456b-8z4pm" Jan 20 17:14:06 crc kubenswrapper[4558]: I0120 17:14:06.396391 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fe030148-30ef-4bb7-bf94-8835a0707df1-log-httpd\") pod \"swift-proxy-7c48456b-8z4pm\" (UID: \"fe030148-30ef-4bb7-bf94-8835a0707df1\") " pod="openstack-kuttl-tests/swift-proxy-7c48456b-8z4pm" Jan 20 17:14:06 crc kubenswrapper[4558]: I0120 17:14:06.396438 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fe030148-30ef-4bb7-bf94-8835a0707df1-run-httpd\") pod \"swift-proxy-7c48456b-8z4pm\" (UID: \"fe030148-30ef-4bb7-bf94-8835a0707df1\") " pod="openstack-kuttl-tests/swift-proxy-7c48456b-8z4pm" Jan 20 17:14:06 crc kubenswrapper[4558]: I0120 17:14:06.396479 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/fe030148-30ef-4bb7-bf94-8835a0707df1-etc-swift\") pod \"swift-proxy-7c48456b-8z4pm\" (UID: \"fe030148-30ef-4bb7-bf94-8835a0707df1\") " pod="openstack-kuttl-tests/swift-proxy-7c48456b-8z4pm" Jan 20 17:14:06 crc kubenswrapper[4558]: I0120 17:14:06.396506 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/fe030148-30ef-4bb7-bf94-8835a0707df1-public-tls-certs\") pod \"swift-proxy-7c48456b-8z4pm\" (UID: \"fe030148-30ef-4bb7-bf94-8835a0707df1\") " pod="openstack-kuttl-tests/swift-proxy-7c48456b-8z4pm" Jan 20 17:14:06 crc kubenswrapper[4558]: I0120 17:14:06.396561 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fe030148-30ef-4bb7-bf94-8835a0707df1-combined-ca-bundle\") pod \"swift-proxy-7c48456b-8z4pm\" (UID: \"fe030148-30ef-4bb7-bf94-8835a0707df1\") " pod="openstack-kuttl-tests/swift-proxy-7c48456b-8z4pm" Jan 20 17:14:06 crc kubenswrapper[4558]: I0120 17:14:06.396581 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/fe030148-30ef-4bb7-bf94-8835a0707df1-internal-tls-certs\") pod \"swift-proxy-7c48456b-8z4pm\" (UID: \"fe030148-30ef-4bb7-bf94-8835a0707df1\") " pod="openstack-kuttl-tests/swift-proxy-7c48456b-8z4pm" Jan 20 17:14:06 crc kubenswrapper[4558]: I0120 17:14:06.431283 4558 generic.go:334] "Generic (PLEG): container finished" podID="85ccfc1a-2f90-48fc-81c7-6a56f816b4ed" containerID="844551855fe12f90386215175a994158aa4a3a5b9052eeb0b66b37721628b055" exitCode=0 Jan 20 17:14:06 crc kubenswrapper[4558]: I0120 17:14:06.431318 4558 generic.go:334] "Generic (PLEG): container finished" podID="85ccfc1a-2f90-48fc-81c7-6a56f816b4ed" containerID="b643ef2ff0f6cf5f17fa2342a2837f753dfc80721b4480f65cddb7967ebd6322" exitCode=2 Jan 20 17:14:06 crc kubenswrapper[4558]: I0120 17:14:06.431328 4558 generic.go:334] "Generic (PLEG): container finished" podID="85ccfc1a-2f90-48fc-81c7-6a56f816b4ed" containerID="af38c59fc30341345b617fc6baa276bd5d6e1b49bd5c59b8b7cce9ea37599d4e" exitCode=0 Jan 20 17:14:06 crc kubenswrapper[4558]: I0120 17:14:06.431337 4558 generic.go:334] "Generic (PLEG): container finished" podID="85ccfc1a-2f90-48fc-81c7-6a56f816b4ed" containerID="c9cf0b697ef3a1cc6cbe7003982635af6caafda258ac84df4b337d918d84a131" exitCode=0 Jan 20 17:14:06 crc kubenswrapper[4558]: I0120 17:14:06.431351 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"85ccfc1a-2f90-48fc-81c7-6a56f816b4ed","Type":"ContainerDied","Data":"844551855fe12f90386215175a994158aa4a3a5b9052eeb0b66b37721628b055"} Jan 20 17:14:06 crc kubenswrapper[4558]: I0120 17:14:06.431437 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"85ccfc1a-2f90-48fc-81c7-6a56f816b4ed","Type":"ContainerDied","Data":"b643ef2ff0f6cf5f17fa2342a2837f753dfc80721b4480f65cddb7967ebd6322"} Jan 20 17:14:06 crc kubenswrapper[4558]: I0120 17:14:06.431453 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"85ccfc1a-2f90-48fc-81c7-6a56f816b4ed","Type":"ContainerDied","Data":"af38c59fc30341345b617fc6baa276bd5d6e1b49bd5c59b8b7cce9ea37599d4e"} Jan 20 17:14:06 crc kubenswrapper[4558]: I0120 17:14:06.431463 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"85ccfc1a-2f90-48fc-81c7-6a56f816b4ed","Type":"ContainerDied","Data":"c9cf0b697ef3a1cc6cbe7003982635af6caafda258ac84df4b337d918d84a131"} Jan 20 17:14:06 crc kubenswrapper[4558]: I0120 17:14:06.498687 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fe030148-30ef-4bb7-bf94-8835a0707df1-combined-ca-bundle\") pod \"swift-proxy-7c48456b-8z4pm\" (UID: \"fe030148-30ef-4bb7-bf94-8835a0707df1\") " pod="openstack-kuttl-tests/swift-proxy-7c48456b-8z4pm" Jan 20 17:14:06 crc kubenswrapper[4558]: I0120 17:14:06.498855 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/fe030148-30ef-4bb7-bf94-8835a0707df1-internal-tls-certs\") pod \"swift-proxy-7c48456b-8z4pm\" (UID: \"fe030148-30ef-4bb7-bf94-8835a0707df1\") " pod="openstack-kuttl-tests/swift-proxy-7c48456b-8z4pm" Jan 20 17:14:06 crc kubenswrapper[4558]: I0120 17:14:06.499044 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fe030148-30ef-4bb7-bf94-8835a0707df1-config-data\") pod \"swift-proxy-7c48456b-8z4pm\" (UID: \"fe030148-30ef-4bb7-bf94-8835a0707df1\") " pod="openstack-kuttl-tests/swift-proxy-7c48456b-8z4pm" Jan 20 17:14:06 crc kubenswrapper[4558]: I0120 17:14:06.499128 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h7ptp\" (UniqueName: \"kubernetes.io/projected/fe030148-30ef-4bb7-bf94-8835a0707df1-kube-api-access-h7ptp\") pod \"swift-proxy-7c48456b-8z4pm\" (UID: \"fe030148-30ef-4bb7-bf94-8835a0707df1\") " pod="openstack-kuttl-tests/swift-proxy-7c48456b-8z4pm" Jan 20 17:14:06 crc kubenswrapper[4558]: I0120 17:14:06.499157 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fe030148-30ef-4bb7-bf94-8835a0707df1-log-httpd\") pod \"swift-proxy-7c48456b-8z4pm\" (UID: \"fe030148-30ef-4bb7-bf94-8835a0707df1\") " pod="openstack-kuttl-tests/swift-proxy-7c48456b-8z4pm" Jan 20 17:14:06 crc kubenswrapper[4558]: I0120 17:14:06.499246 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fe030148-30ef-4bb7-bf94-8835a0707df1-run-httpd\") pod \"swift-proxy-7c48456b-8z4pm\" (UID: \"fe030148-30ef-4bb7-bf94-8835a0707df1\") " pod="openstack-kuttl-tests/swift-proxy-7c48456b-8z4pm" Jan 20 17:14:06 crc kubenswrapper[4558]: I0120 17:14:06.499332 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/fe030148-30ef-4bb7-bf94-8835a0707df1-etc-swift\") pod \"swift-proxy-7c48456b-8z4pm\" (UID: \"fe030148-30ef-4bb7-bf94-8835a0707df1\") " pod="openstack-kuttl-tests/swift-proxy-7c48456b-8z4pm" Jan 20 17:14:06 crc kubenswrapper[4558]: I0120 17:14:06.499400 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/fe030148-30ef-4bb7-bf94-8835a0707df1-public-tls-certs\") pod \"swift-proxy-7c48456b-8z4pm\" (UID: \"fe030148-30ef-4bb7-bf94-8835a0707df1\") " pod="openstack-kuttl-tests/swift-proxy-7c48456b-8z4pm" Jan 20 17:14:06 crc kubenswrapper[4558]: I0120 17:14:06.500894 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fe030148-30ef-4bb7-bf94-8835a0707df1-log-httpd\") pod \"swift-proxy-7c48456b-8z4pm\" (UID: \"fe030148-30ef-4bb7-bf94-8835a0707df1\") " pod="openstack-kuttl-tests/swift-proxy-7c48456b-8z4pm" Jan 20 17:14:06 crc kubenswrapper[4558]: I0120 17:14:06.500943 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fe030148-30ef-4bb7-bf94-8835a0707df1-run-httpd\") pod \"swift-proxy-7c48456b-8z4pm\" (UID: \"fe030148-30ef-4bb7-bf94-8835a0707df1\") " pod="openstack-kuttl-tests/swift-proxy-7c48456b-8z4pm" Jan 20 17:14:06 crc kubenswrapper[4558]: I0120 17:14:06.507116 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/fe030148-30ef-4bb7-bf94-8835a0707df1-public-tls-certs\") pod \"swift-proxy-7c48456b-8z4pm\" (UID: \"fe030148-30ef-4bb7-bf94-8835a0707df1\") " pod="openstack-kuttl-tests/swift-proxy-7c48456b-8z4pm" Jan 20 17:14:06 crc kubenswrapper[4558]: I0120 17:14:06.507143 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/fe030148-30ef-4bb7-bf94-8835a0707df1-internal-tls-certs\") pod \"swift-proxy-7c48456b-8z4pm\" (UID: \"fe030148-30ef-4bb7-bf94-8835a0707df1\") " pod="openstack-kuttl-tests/swift-proxy-7c48456b-8z4pm" Jan 20 17:14:06 crc kubenswrapper[4558]: I0120 17:14:06.508665 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fe030148-30ef-4bb7-bf94-8835a0707df1-combined-ca-bundle\") pod \"swift-proxy-7c48456b-8z4pm\" (UID: \"fe030148-30ef-4bb7-bf94-8835a0707df1\") " pod="openstack-kuttl-tests/swift-proxy-7c48456b-8z4pm" Jan 20 17:14:06 crc kubenswrapper[4558]: I0120 17:14:06.510385 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fe030148-30ef-4bb7-bf94-8835a0707df1-config-data\") pod \"swift-proxy-7c48456b-8z4pm\" (UID: \"fe030148-30ef-4bb7-bf94-8835a0707df1\") " pod="openstack-kuttl-tests/swift-proxy-7c48456b-8z4pm" Jan 20 17:14:06 crc kubenswrapper[4558]: I0120 17:14:06.511711 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/fe030148-30ef-4bb7-bf94-8835a0707df1-etc-swift\") pod \"swift-proxy-7c48456b-8z4pm\" (UID: \"fe030148-30ef-4bb7-bf94-8835a0707df1\") " pod="openstack-kuttl-tests/swift-proxy-7c48456b-8z4pm" Jan 20 17:14:06 crc kubenswrapper[4558]: I0120 17:14:06.520535 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h7ptp\" (UniqueName: \"kubernetes.io/projected/fe030148-30ef-4bb7-bf94-8835a0707df1-kube-api-access-h7ptp\") pod \"swift-proxy-7c48456b-8z4pm\" (UID: \"fe030148-30ef-4bb7-bf94-8835a0707df1\") " pod="openstack-kuttl-tests/swift-proxy-7c48456b-8z4pm" Jan 20 17:14:06 crc kubenswrapper[4558]: I0120 17:14:06.685562 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-proxy-7c48456b-8z4pm" Jan 20 17:14:06 crc kubenswrapper[4558]: I0120 17:14:06.795991 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:14:06 crc kubenswrapper[4558]: I0120 17:14:06.816598 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/85ccfc1a-2f90-48fc-81c7-6a56f816b4ed-run-httpd\") pod \"85ccfc1a-2f90-48fc-81c7-6a56f816b4ed\" (UID: \"85ccfc1a-2f90-48fc-81c7-6a56f816b4ed\") " Jan 20 17:14:06 crc kubenswrapper[4558]: I0120 17:14:06.816763 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jmkms\" (UniqueName: \"kubernetes.io/projected/85ccfc1a-2f90-48fc-81c7-6a56f816b4ed-kube-api-access-jmkms\") pod \"85ccfc1a-2f90-48fc-81c7-6a56f816b4ed\" (UID: \"85ccfc1a-2f90-48fc-81c7-6a56f816b4ed\") " Jan 20 17:14:06 crc kubenswrapper[4558]: I0120 17:14:06.816991 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/85ccfc1a-2f90-48fc-81c7-6a56f816b4ed-combined-ca-bundle\") pod \"85ccfc1a-2f90-48fc-81c7-6a56f816b4ed\" (UID: \"85ccfc1a-2f90-48fc-81c7-6a56f816b4ed\") " Jan 20 17:14:06 crc kubenswrapper[4558]: I0120 17:14:06.817041 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/85ccfc1a-2f90-48fc-81c7-6a56f816b4ed-scripts\") pod \"85ccfc1a-2f90-48fc-81c7-6a56f816b4ed\" (UID: \"85ccfc1a-2f90-48fc-81c7-6a56f816b4ed\") " Jan 20 17:14:06 crc kubenswrapper[4558]: I0120 17:14:06.817126 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/85ccfc1a-2f90-48fc-81c7-6a56f816b4ed-sg-core-conf-yaml\") pod \"85ccfc1a-2f90-48fc-81c7-6a56f816b4ed\" (UID: \"85ccfc1a-2f90-48fc-81c7-6a56f816b4ed\") " Jan 20 17:14:06 crc kubenswrapper[4558]: I0120 17:14:06.817159 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/85ccfc1a-2f90-48fc-81c7-6a56f816b4ed-log-httpd\") pod \"85ccfc1a-2f90-48fc-81c7-6a56f816b4ed\" (UID: \"85ccfc1a-2f90-48fc-81c7-6a56f816b4ed\") " Jan 20 17:14:06 crc kubenswrapper[4558]: I0120 17:14:06.817207 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/85ccfc1a-2f90-48fc-81c7-6a56f816b4ed-config-data\") pod \"85ccfc1a-2f90-48fc-81c7-6a56f816b4ed\" (UID: \"85ccfc1a-2f90-48fc-81c7-6a56f816b4ed\") " Jan 20 17:14:06 crc kubenswrapper[4558]: I0120 17:14:06.822515 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/85ccfc1a-2f90-48fc-81c7-6a56f816b4ed-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "85ccfc1a-2f90-48fc-81c7-6a56f816b4ed" (UID: "85ccfc1a-2f90-48fc-81c7-6a56f816b4ed"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:14:06 crc kubenswrapper[4558]: I0120 17:14:06.822984 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/85ccfc1a-2f90-48fc-81c7-6a56f816b4ed-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "85ccfc1a-2f90-48fc-81c7-6a56f816b4ed" (UID: "85ccfc1a-2f90-48fc-81c7-6a56f816b4ed"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:14:06 crc kubenswrapper[4558]: I0120 17:14:06.828487 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/85ccfc1a-2f90-48fc-81c7-6a56f816b4ed-kube-api-access-jmkms" (OuterVolumeSpecName: "kube-api-access-jmkms") pod "85ccfc1a-2f90-48fc-81c7-6a56f816b4ed" (UID: "85ccfc1a-2f90-48fc-81c7-6a56f816b4ed"). InnerVolumeSpecName "kube-api-access-jmkms". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:14:06 crc kubenswrapper[4558]: I0120 17:14:06.835610 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/85ccfc1a-2f90-48fc-81c7-6a56f816b4ed-scripts" (OuterVolumeSpecName: "scripts") pod "85ccfc1a-2f90-48fc-81c7-6a56f816b4ed" (UID: "85ccfc1a-2f90-48fc-81c7-6a56f816b4ed"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:14:06 crc kubenswrapper[4558]: I0120 17:14:06.854293 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/85ccfc1a-2f90-48fc-81c7-6a56f816b4ed-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "85ccfc1a-2f90-48fc-81c7-6a56f816b4ed" (UID: "85ccfc1a-2f90-48fc-81c7-6a56f816b4ed"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:14:06 crc kubenswrapper[4558]: I0120 17:14:06.887332 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/85ccfc1a-2f90-48fc-81c7-6a56f816b4ed-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "85ccfc1a-2f90-48fc-81c7-6a56f816b4ed" (UID: "85ccfc1a-2f90-48fc-81c7-6a56f816b4ed"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:14:06 crc kubenswrapper[4558]: I0120 17:14:06.919296 4558 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/85ccfc1a-2f90-48fc-81c7-6a56f816b4ed-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:06 crc kubenswrapper[4558]: I0120 17:14:06.919319 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jmkms\" (UniqueName: \"kubernetes.io/projected/85ccfc1a-2f90-48fc-81c7-6a56f816b4ed-kube-api-access-jmkms\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:06 crc kubenswrapper[4558]: I0120 17:14:06.919331 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/85ccfc1a-2f90-48fc-81c7-6a56f816b4ed-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:06 crc kubenswrapper[4558]: I0120 17:14:06.919341 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/85ccfc1a-2f90-48fc-81c7-6a56f816b4ed-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:06 crc kubenswrapper[4558]: I0120 17:14:06.919348 4558 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/85ccfc1a-2f90-48fc-81c7-6a56f816b4ed-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:06 crc kubenswrapper[4558]: I0120 17:14:06.919357 4558 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/85ccfc1a-2f90-48fc-81c7-6a56f816b4ed-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:06 crc kubenswrapper[4558]: I0120 17:14:06.939049 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/85ccfc1a-2f90-48fc-81c7-6a56f816b4ed-config-data" (OuterVolumeSpecName: "config-data") pod "85ccfc1a-2f90-48fc-81c7-6a56f816b4ed" (UID: "85ccfc1a-2f90-48fc-81c7-6a56f816b4ed"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:14:07 crc kubenswrapper[4558]: I0120 17:14:07.027899 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/85ccfc1a-2f90-48fc-81c7-6a56f816b4ed-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:07 crc kubenswrapper[4558]: I0120 17:14:07.141475 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/swift-proxy-7c48456b-8z4pm"] Jan 20 17:14:07 crc kubenswrapper[4558]: I0120 17:14:07.448978 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"85ccfc1a-2f90-48fc-81c7-6a56f816b4ed","Type":"ContainerDied","Data":"031df42e2864f3a5e32570e02e5a1a5875f94081db947196a64d3e379b055d0f"} Jan 20 17:14:07 crc kubenswrapper[4558]: I0120 17:14:07.449425 4558 scope.go:117] "RemoveContainer" containerID="844551855fe12f90386215175a994158aa4a3a5b9052eeb0b66b37721628b055" Jan 20 17:14:07 crc kubenswrapper[4558]: I0120 17:14:07.449651 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:14:07 crc kubenswrapper[4558]: I0120 17:14:07.457223 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-proxy-7c48456b-8z4pm" event={"ID":"fe030148-30ef-4bb7-bf94-8835a0707df1","Type":"ContainerStarted","Data":"e3f4b05578fbd81c020b9ad602df15fafd661cd2db06f53de10dce624927e791"} Jan 20 17:14:07 crc kubenswrapper[4558]: I0120 17:14:07.457396 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-proxy-7c48456b-8z4pm" event={"ID":"fe030148-30ef-4bb7-bf94-8835a0707df1","Type":"ContainerStarted","Data":"e10a8df049a111763b05a5a1434dadcfda33fdd06b015a788e15885a841c4323"} Jan 20 17:14:07 crc kubenswrapper[4558]: I0120 17:14:07.499305 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:14:07 crc kubenswrapper[4558]: I0120 17:14:07.503279 4558 scope.go:117] "RemoveContainer" containerID="b643ef2ff0f6cf5f17fa2342a2837f753dfc80721b4480f65cddb7967ebd6322" Jan 20 17:14:07 crc kubenswrapper[4558]: I0120 17:14:07.514455 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:14:07 crc kubenswrapper[4558]: I0120 17:14:07.518584 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:14:07 crc kubenswrapper[4558]: E0120 17:14:07.518913 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="85ccfc1a-2f90-48fc-81c7-6a56f816b4ed" containerName="sg-core" Jan 20 17:14:07 crc kubenswrapper[4558]: I0120 17:14:07.518930 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="85ccfc1a-2f90-48fc-81c7-6a56f816b4ed" containerName="sg-core" Jan 20 17:14:07 crc kubenswrapper[4558]: E0120 17:14:07.518950 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="85ccfc1a-2f90-48fc-81c7-6a56f816b4ed" containerName="ceilometer-notification-agent" Jan 20 17:14:07 crc kubenswrapper[4558]: I0120 17:14:07.518956 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="85ccfc1a-2f90-48fc-81c7-6a56f816b4ed" containerName="ceilometer-notification-agent" Jan 20 17:14:07 crc kubenswrapper[4558]: E0120 17:14:07.518973 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="85ccfc1a-2f90-48fc-81c7-6a56f816b4ed" containerName="ceilometer-central-agent" Jan 20 17:14:07 crc kubenswrapper[4558]: I0120 17:14:07.518980 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="85ccfc1a-2f90-48fc-81c7-6a56f816b4ed" containerName="ceilometer-central-agent" Jan 20 17:14:07 crc kubenswrapper[4558]: E0120 17:14:07.519005 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="85ccfc1a-2f90-48fc-81c7-6a56f816b4ed" containerName="proxy-httpd" Jan 20 17:14:07 crc kubenswrapper[4558]: I0120 17:14:07.519012 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="85ccfc1a-2f90-48fc-81c7-6a56f816b4ed" containerName="proxy-httpd" Jan 20 17:14:07 crc kubenswrapper[4558]: I0120 17:14:07.519178 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="85ccfc1a-2f90-48fc-81c7-6a56f816b4ed" containerName="proxy-httpd" Jan 20 17:14:07 crc kubenswrapper[4558]: I0120 17:14:07.519195 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="85ccfc1a-2f90-48fc-81c7-6a56f816b4ed" containerName="sg-core" Jan 20 17:14:07 crc kubenswrapper[4558]: I0120 17:14:07.519206 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="85ccfc1a-2f90-48fc-81c7-6a56f816b4ed" containerName="ceilometer-central-agent" Jan 20 17:14:07 crc kubenswrapper[4558]: I0120 17:14:07.519216 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="85ccfc1a-2f90-48fc-81c7-6a56f816b4ed" containerName="ceilometer-notification-agent" Jan 20 17:14:07 crc kubenswrapper[4558]: I0120 17:14:07.520758 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:14:07 crc kubenswrapper[4558]: I0120 17:14:07.526326 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-config-data" Jan 20 17:14:07 crc kubenswrapper[4558]: I0120 17:14:07.542503 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:14:07 crc kubenswrapper[4558]: I0120 17:14:07.542722 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-scripts" Jan 20 17:14:07 crc kubenswrapper[4558]: I0120 17:14:07.563232 4558 scope.go:117] "RemoveContainer" containerID="af38c59fc30341345b617fc6baa276bd5d6e1b49bd5c59b8b7cce9ea37599d4e" Jan 20 17:14:07 crc kubenswrapper[4558]: I0120 17:14:07.593412 4558 scope.go:117] "RemoveContainer" containerID="c9cf0b697ef3a1cc6cbe7003982635af6caafda258ac84df4b337d918d84a131" Jan 20 17:14:07 crc kubenswrapper[4558]: I0120 17:14:07.642790 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d6352ad2-dc0a-46b0-a9b2-f1fffe97d8ab-log-httpd\") pod \"ceilometer-0\" (UID: \"d6352ad2-dc0a-46b0-a9b2-f1fffe97d8ab\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:14:07 crc kubenswrapper[4558]: I0120 17:14:07.642841 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d6352ad2-dc0a-46b0-a9b2-f1fffe97d8ab-config-data\") pod \"ceilometer-0\" (UID: \"d6352ad2-dc0a-46b0-a9b2-f1fffe97d8ab\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:14:07 crc kubenswrapper[4558]: I0120 17:14:07.642869 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d6352ad2-dc0a-46b0-a9b2-f1fffe97d8ab-run-httpd\") pod \"ceilometer-0\" (UID: \"d6352ad2-dc0a-46b0-a9b2-f1fffe97d8ab\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:14:07 crc kubenswrapper[4558]: I0120 17:14:07.642932 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d6352ad2-dc0a-46b0-a9b2-f1fffe97d8ab-scripts\") pod \"ceilometer-0\" (UID: \"d6352ad2-dc0a-46b0-a9b2-f1fffe97d8ab\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:14:07 crc kubenswrapper[4558]: I0120 17:14:07.642954 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d6352ad2-dc0a-46b0-a9b2-f1fffe97d8ab-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"d6352ad2-dc0a-46b0-a9b2-f1fffe97d8ab\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:14:07 crc kubenswrapper[4558]: I0120 17:14:07.643009 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d6352ad2-dc0a-46b0-a9b2-f1fffe97d8ab-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"d6352ad2-dc0a-46b0-a9b2-f1fffe97d8ab\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:14:07 crc kubenswrapper[4558]: I0120 17:14:07.643031 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hntbs\" (UniqueName: \"kubernetes.io/projected/d6352ad2-dc0a-46b0-a9b2-f1fffe97d8ab-kube-api-access-hntbs\") pod \"ceilometer-0\" (UID: \"d6352ad2-dc0a-46b0-a9b2-f1fffe97d8ab\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:14:07 crc kubenswrapper[4558]: I0120 17:14:07.745536 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d6352ad2-dc0a-46b0-a9b2-f1fffe97d8ab-config-data\") pod \"ceilometer-0\" (UID: \"d6352ad2-dc0a-46b0-a9b2-f1fffe97d8ab\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:14:07 crc kubenswrapper[4558]: I0120 17:14:07.745578 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d6352ad2-dc0a-46b0-a9b2-f1fffe97d8ab-log-httpd\") pod \"ceilometer-0\" (UID: \"d6352ad2-dc0a-46b0-a9b2-f1fffe97d8ab\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:14:07 crc kubenswrapper[4558]: I0120 17:14:07.745614 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d6352ad2-dc0a-46b0-a9b2-f1fffe97d8ab-run-httpd\") pod \"ceilometer-0\" (UID: \"d6352ad2-dc0a-46b0-a9b2-f1fffe97d8ab\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:14:07 crc kubenswrapper[4558]: I0120 17:14:07.745733 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d6352ad2-dc0a-46b0-a9b2-f1fffe97d8ab-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"d6352ad2-dc0a-46b0-a9b2-f1fffe97d8ab\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:14:07 crc kubenswrapper[4558]: I0120 17:14:07.745752 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d6352ad2-dc0a-46b0-a9b2-f1fffe97d8ab-scripts\") pod \"ceilometer-0\" (UID: \"d6352ad2-dc0a-46b0-a9b2-f1fffe97d8ab\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:14:07 crc kubenswrapper[4558]: I0120 17:14:07.745831 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d6352ad2-dc0a-46b0-a9b2-f1fffe97d8ab-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"d6352ad2-dc0a-46b0-a9b2-f1fffe97d8ab\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:14:07 crc kubenswrapper[4558]: I0120 17:14:07.745861 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hntbs\" (UniqueName: \"kubernetes.io/projected/d6352ad2-dc0a-46b0-a9b2-f1fffe97d8ab-kube-api-access-hntbs\") pod \"ceilometer-0\" (UID: \"d6352ad2-dc0a-46b0-a9b2-f1fffe97d8ab\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:14:07 crc kubenswrapper[4558]: I0120 17:14:07.746110 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d6352ad2-dc0a-46b0-a9b2-f1fffe97d8ab-log-httpd\") pod \"ceilometer-0\" (UID: \"d6352ad2-dc0a-46b0-a9b2-f1fffe97d8ab\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:14:07 crc kubenswrapper[4558]: I0120 17:14:07.746301 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d6352ad2-dc0a-46b0-a9b2-f1fffe97d8ab-run-httpd\") pod \"ceilometer-0\" (UID: \"d6352ad2-dc0a-46b0-a9b2-f1fffe97d8ab\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:14:07 crc kubenswrapper[4558]: I0120 17:14:07.749738 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d6352ad2-dc0a-46b0-a9b2-f1fffe97d8ab-scripts\") pod \"ceilometer-0\" (UID: \"d6352ad2-dc0a-46b0-a9b2-f1fffe97d8ab\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:14:07 crc kubenswrapper[4558]: I0120 17:14:07.749804 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d6352ad2-dc0a-46b0-a9b2-f1fffe97d8ab-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"d6352ad2-dc0a-46b0-a9b2-f1fffe97d8ab\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:14:07 crc kubenswrapper[4558]: I0120 17:14:07.750197 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d6352ad2-dc0a-46b0-a9b2-f1fffe97d8ab-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"d6352ad2-dc0a-46b0-a9b2-f1fffe97d8ab\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:14:07 crc kubenswrapper[4558]: I0120 17:14:07.751768 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d6352ad2-dc0a-46b0-a9b2-f1fffe97d8ab-config-data\") pod \"ceilometer-0\" (UID: \"d6352ad2-dc0a-46b0-a9b2-f1fffe97d8ab\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:14:07 crc kubenswrapper[4558]: I0120 17:14:07.762189 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hntbs\" (UniqueName: \"kubernetes.io/projected/d6352ad2-dc0a-46b0-a9b2-f1fffe97d8ab-kube-api-access-hntbs\") pod \"ceilometer-0\" (UID: \"d6352ad2-dc0a-46b0-a9b2-f1fffe97d8ab\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:14:07 crc kubenswrapper[4558]: I0120 17:14:07.842157 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:14:08 crc kubenswrapper[4558]: I0120 17:14:08.260454 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:14:08 crc kubenswrapper[4558]: I0120 17:14:08.261235 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-external-api-0" podUID="c061507a-294f-453b-be5d-4fdca19c790c" containerName="glance-log" containerID="cri-o://1a5915eb633ad5a17f2153955cceb387e146bddc311a76f35f78e2b62cdc3b6e" gracePeriod=30 Jan 20 17:14:08 crc kubenswrapper[4558]: I0120 17:14:08.261582 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-external-api-0" podUID="c061507a-294f-453b-be5d-4fdca19c790c" containerName="glance-httpd" containerID="cri-o://22a2a926f869e6446bdf888e1ee153d4bcc61395a9b6875ebbb63ec5af2731bc" gracePeriod=30 Jan 20 17:14:08 crc kubenswrapper[4558]: I0120 17:14:08.303717 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:14:08 crc kubenswrapper[4558]: I0120 17:14:08.345964 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-57ff5684f-lrvnz" Jan 20 17:14:08 crc kubenswrapper[4558]: I0120 17:14:08.464535 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/74a94aaa-09bf-40a6-939b-14cf47decbfb-ovndb-tls-certs\") pod \"74a94aaa-09bf-40a6-939b-14cf47decbfb\" (UID: \"74a94aaa-09bf-40a6-939b-14cf47decbfb\") " Jan 20 17:14:08 crc kubenswrapper[4558]: I0120 17:14:08.464616 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/74a94aaa-09bf-40a6-939b-14cf47decbfb-config\") pod \"74a94aaa-09bf-40a6-939b-14cf47decbfb\" (UID: \"74a94aaa-09bf-40a6-939b-14cf47decbfb\") " Jan 20 17:14:08 crc kubenswrapper[4558]: I0120 17:14:08.464759 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-79cxp\" (UniqueName: \"kubernetes.io/projected/74a94aaa-09bf-40a6-939b-14cf47decbfb-kube-api-access-79cxp\") pod \"74a94aaa-09bf-40a6-939b-14cf47decbfb\" (UID: \"74a94aaa-09bf-40a6-939b-14cf47decbfb\") " Jan 20 17:14:08 crc kubenswrapper[4558]: I0120 17:14:08.464946 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/74a94aaa-09bf-40a6-939b-14cf47decbfb-httpd-config\") pod \"74a94aaa-09bf-40a6-939b-14cf47decbfb\" (UID: \"74a94aaa-09bf-40a6-939b-14cf47decbfb\") " Jan 20 17:14:08 crc kubenswrapper[4558]: I0120 17:14:08.465030 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/74a94aaa-09bf-40a6-939b-14cf47decbfb-combined-ca-bundle\") pod \"74a94aaa-09bf-40a6-939b-14cf47decbfb\" (UID: \"74a94aaa-09bf-40a6-939b-14cf47decbfb\") " Jan 20 17:14:08 crc kubenswrapper[4558]: I0120 17:14:08.470675 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/74a94aaa-09bf-40a6-939b-14cf47decbfb-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "74a94aaa-09bf-40a6-939b-14cf47decbfb" (UID: "74a94aaa-09bf-40a6-939b-14cf47decbfb"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:14:08 crc kubenswrapper[4558]: I0120 17:14:08.471030 4558 generic.go:334] "Generic (PLEG): container finished" podID="c061507a-294f-453b-be5d-4fdca19c790c" containerID="1a5915eb633ad5a17f2153955cceb387e146bddc311a76f35f78e2b62cdc3b6e" exitCode=143 Jan 20 17:14:08 crc kubenswrapper[4558]: I0120 17:14:08.471474 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"c061507a-294f-453b-be5d-4fdca19c790c","Type":"ContainerDied","Data":"1a5915eb633ad5a17f2153955cceb387e146bddc311a76f35f78e2b62cdc3b6e"} Jan 20 17:14:08 crc kubenswrapper[4558]: I0120 17:14:08.471698 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/74a94aaa-09bf-40a6-939b-14cf47decbfb-kube-api-access-79cxp" (OuterVolumeSpecName: "kube-api-access-79cxp") pod "74a94aaa-09bf-40a6-939b-14cf47decbfb" (UID: "74a94aaa-09bf-40a6-939b-14cf47decbfb"). InnerVolumeSpecName "kube-api-access-79cxp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:14:08 crc kubenswrapper[4558]: I0120 17:14:08.475001 4558 generic.go:334] "Generic (PLEG): container finished" podID="74a94aaa-09bf-40a6-939b-14cf47decbfb" containerID="b3957ac5e71ef7db311880371641f2fdc7bcab0ed8164d5236e8c8bf10f094c9" exitCode=0 Jan 20 17:14:08 crc kubenswrapper[4558]: I0120 17:14:08.475081 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-57ff5684f-lrvnz" event={"ID":"74a94aaa-09bf-40a6-939b-14cf47decbfb","Type":"ContainerDied","Data":"b3957ac5e71ef7db311880371641f2fdc7bcab0ed8164d5236e8c8bf10f094c9"} Jan 20 17:14:08 crc kubenswrapper[4558]: I0120 17:14:08.475130 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-57ff5684f-lrvnz" Jan 20 17:14:08 crc kubenswrapper[4558]: I0120 17:14:08.475144 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-57ff5684f-lrvnz" event={"ID":"74a94aaa-09bf-40a6-939b-14cf47decbfb","Type":"ContainerDied","Data":"12ebfb93d71f9739efd05242bab18ae4e145132aec048fa8002727c1b17ad130"} Jan 20 17:14:08 crc kubenswrapper[4558]: I0120 17:14:08.475203 4558 scope.go:117] "RemoveContainer" containerID="3ab945d23aee1932a7b33d3938b543d738b3d8e27e3ce486a815df1a94173535" Jan 20 17:14:08 crc kubenswrapper[4558]: I0120 17:14:08.479897 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-proxy-7c48456b-8z4pm" event={"ID":"fe030148-30ef-4bb7-bf94-8835a0707df1","Type":"ContainerStarted","Data":"690711c809e51e7b043bae2985f6866cfd82e22825c519971e1f62af4518288a"} Jan 20 17:14:08 crc kubenswrapper[4558]: I0120 17:14:08.481493 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/swift-proxy-7c48456b-8z4pm" Jan 20 17:14:08 crc kubenswrapper[4558]: I0120 17:14:08.481554 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/swift-proxy-7c48456b-8z4pm" Jan 20 17:14:08 crc kubenswrapper[4558]: I0120 17:14:08.484128 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"d6352ad2-dc0a-46b0-a9b2-f1fffe97d8ab","Type":"ContainerStarted","Data":"e1fea547f03a94e234b0198a2f534f2d876d172b6a486d40ed8c94ef0540eb90"} Jan 20 17:14:08 crc kubenswrapper[4558]: I0120 17:14:08.519226 4558 scope.go:117] "RemoveContainer" containerID="b3957ac5e71ef7db311880371641f2fdc7bcab0ed8164d5236e8c8bf10f094c9" Jan 20 17:14:08 crc kubenswrapper[4558]: I0120 17:14:08.521660 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/swift-proxy-7c48456b-8z4pm" podStartSLOduration=2.521259142 podStartE2EDuration="2.521259142s" podCreationTimestamp="2026-01-20 17:14:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:14:08.510007003 +0000 UTC m=+1942.270344990" watchObservedRunningTime="2026-01-20 17:14:08.521259142 +0000 UTC m=+1942.281597110" Jan 20 17:14:08 crc kubenswrapper[4558]: I0120 17:14:08.562319 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/74a94aaa-09bf-40a6-939b-14cf47decbfb-config" (OuterVolumeSpecName: "config") pod "74a94aaa-09bf-40a6-939b-14cf47decbfb" (UID: "74a94aaa-09bf-40a6-939b-14cf47decbfb"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:14:08 crc kubenswrapper[4558]: I0120 17:14:08.562334 4558 scope.go:117] "RemoveContainer" containerID="3ab945d23aee1932a7b33d3938b543d738b3d8e27e3ce486a815df1a94173535" Jan 20 17:14:08 crc kubenswrapper[4558]: E0120 17:14:08.566252 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3ab945d23aee1932a7b33d3938b543d738b3d8e27e3ce486a815df1a94173535\": container with ID starting with 3ab945d23aee1932a7b33d3938b543d738b3d8e27e3ce486a815df1a94173535 not found: ID does not exist" containerID="3ab945d23aee1932a7b33d3938b543d738b3d8e27e3ce486a815df1a94173535" Jan 20 17:14:08 crc kubenswrapper[4558]: I0120 17:14:08.566293 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3ab945d23aee1932a7b33d3938b543d738b3d8e27e3ce486a815df1a94173535"} err="failed to get container status \"3ab945d23aee1932a7b33d3938b543d738b3d8e27e3ce486a815df1a94173535\": rpc error: code = NotFound desc = could not find container \"3ab945d23aee1932a7b33d3938b543d738b3d8e27e3ce486a815df1a94173535\": container with ID starting with 3ab945d23aee1932a7b33d3938b543d738b3d8e27e3ce486a815df1a94173535 not found: ID does not exist" Jan 20 17:14:08 crc kubenswrapper[4558]: I0120 17:14:08.566314 4558 scope.go:117] "RemoveContainer" containerID="b3957ac5e71ef7db311880371641f2fdc7bcab0ed8164d5236e8c8bf10f094c9" Jan 20 17:14:08 crc kubenswrapper[4558]: E0120 17:14:08.567563 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b3957ac5e71ef7db311880371641f2fdc7bcab0ed8164d5236e8c8bf10f094c9\": container with ID starting with b3957ac5e71ef7db311880371641f2fdc7bcab0ed8164d5236e8c8bf10f094c9 not found: ID does not exist" containerID="b3957ac5e71ef7db311880371641f2fdc7bcab0ed8164d5236e8c8bf10f094c9" Jan 20 17:14:08 crc kubenswrapper[4558]: I0120 17:14:08.567585 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b3957ac5e71ef7db311880371641f2fdc7bcab0ed8164d5236e8c8bf10f094c9"} err="failed to get container status \"b3957ac5e71ef7db311880371641f2fdc7bcab0ed8164d5236e8c8bf10f094c9\": rpc error: code = NotFound desc = could not find container \"b3957ac5e71ef7db311880371641f2fdc7bcab0ed8164d5236e8c8bf10f094c9\": container with ID starting with b3957ac5e71ef7db311880371641f2fdc7bcab0ed8164d5236e8c8bf10f094c9 not found: ID does not exist" Jan 20 17:14:08 crc kubenswrapper[4558]: I0120 17:14:08.572911 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/74a94aaa-09bf-40a6-939b-14cf47decbfb-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:08 crc kubenswrapper[4558]: I0120 17:14:08.572939 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-79cxp\" (UniqueName: \"kubernetes.io/projected/74a94aaa-09bf-40a6-939b-14cf47decbfb-kube-api-access-79cxp\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:08 crc kubenswrapper[4558]: I0120 17:14:08.572949 4558 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/74a94aaa-09bf-40a6-939b-14cf47decbfb-httpd-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:08 crc kubenswrapper[4558]: I0120 17:14:08.584005 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/74a94aaa-09bf-40a6-939b-14cf47decbfb-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "74a94aaa-09bf-40a6-939b-14cf47decbfb" (UID: "74a94aaa-09bf-40a6-939b-14cf47decbfb"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:14:08 crc kubenswrapper[4558]: I0120 17:14:08.597026 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="85ccfc1a-2f90-48fc-81c7-6a56f816b4ed" path="/var/lib/kubelet/pods/85ccfc1a-2f90-48fc-81c7-6a56f816b4ed/volumes" Jan 20 17:14:08 crc kubenswrapper[4558]: I0120 17:14:08.614277 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/74a94aaa-09bf-40a6-939b-14cf47decbfb-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "74a94aaa-09bf-40a6-939b-14cf47decbfb" (UID: "74a94aaa-09bf-40a6-939b-14cf47decbfb"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:14:08 crc kubenswrapper[4558]: I0120 17:14:08.675261 4558 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/74a94aaa-09bf-40a6-939b-14cf47decbfb-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:08 crc kubenswrapper[4558]: I0120 17:14:08.675309 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/74a94aaa-09bf-40a6-939b-14cf47decbfb-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:08 crc kubenswrapper[4558]: I0120 17:14:08.800453 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-57ff5684f-lrvnz"] Jan 20 17:14:08 crc kubenswrapper[4558]: I0120 17:14:08.806057 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/neutron-57ff5684f-lrvnz"] Jan 20 17:14:09 crc kubenswrapper[4558]: I0120 17:14:09.075810 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-api-db-create-zhtn8"] Jan 20 17:14:09 crc kubenswrapper[4558]: E0120 17:14:09.076399 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="74a94aaa-09bf-40a6-939b-14cf47decbfb" containerName="neutron-httpd" Jan 20 17:14:09 crc kubenswrapper[4558]: I0120 17:14:09.076418 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="74a94aaa-09bf-40a6-939b-14cf47decbfb" containerName="neutron-httpd" Jan 20 17:14:09 crc kubenswrapper[4558]: E0120 17:14:09.077180 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="74a94aaa-09bf-40a6-939b-14cf47decbfb" containerName="neutron-api" Jan 20 17:14:09 crc kubenswrapper[4558]: I0120 17:14:09.077200 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="74a94aaa-09bf-40a6-939b-14cf47decbfb" containerName="neutron-api" Jan 20 17:14:09 crc kubenswrapper[4558]: I0120 17:14:09.077528 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="74a94aaa-09bf-40a6-939b-14cf47decbfb" containerName="neutron-api" Jan 20 17:14:09 crc kubenswrapper[4558]: I0120 17:14:09.077545 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="74a94aaa-09bf-40a6-939b-14cf47decbfb" containerName="neutron-httpd" Jan 20 17:14:09 crc kubenswrapper[4558]: I0120 17:14:09.079351 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-db-create-zhtn8" Jan 20 17:14:09 crc kubenswrapper[4558]: I0120 17:14:09.096100 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-db-create-zhtn8"] Jan 20 17:14:09 crc kubenswrapper[4558]: I0120 17:14:09.126935 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-api-542c-account-create-update-szpdm"] Jan 20 17:14:09 crc kubenswrapper[4558]: I0120 17:14:09.128479 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-542c-account-create-update-szpdm" Jan 20 17:14:09 crc kubenswrapper[4558]: I0120 17:14:09.132611 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-api-db-secret" Jan 20 17:14:09 crc kubenswrapper[4558]: I0120 17:14:09.153594 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-542c-account-create-update-szpdm"] Jan 20 17:14:09 crc kubenswrapper[4558]: I0120 17:14:09.197433 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6748d231-22b7-4a78-b3ab-945673683907-operator-scripts\") pod \"nova-api-db-create-zhtn8\" (UID: \"6748d231-22b7-4a78-b3ab-945673683907\") " pod="openstack-kuttl-tests/nova-api-db-create-zhtn8" Jan 20 17:14:09 crc kubenswrapper[4558]: I0120 17:14:09.198556 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fknw6\" (UniqueName: \"kubernetes.io/projected/daf7e77a-752b-41c8-8b3d-ac04bfb5fb0d-kube-api-access-fknw6\") pod \"nova-api-542c-account-create-update-szpdm\" (UID: \"daf7e77a-752b-41c8-8b3d-ac04bfb5fb0d\") " pod="openstack-kuttl-tests/nova-api-542c-account-create-update-szpdm" Jan 20 17:14:09 crc kubenswrapper[4558]: I0120 17:14:09.198750 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m4d2p\" (UniqueName: \"kubernetes.io/projected/6748d231-22b7-4a78-b3ab-945673683907-kube-api-access-m4d2p\") pod \"nova-api-db-create-zhtn8\" (UID: \"6748d231-22b7-4a78-b3ab-945673683907\") " pod="openstack-kuttl-tests/nova-api-db-create-zhtn8" Jan 20 17:14:09 crc kubenswrapper[4558]: I0120 17:14:09.198859 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/daf7e77a-752b-41c8-8b3d-ac04bfb5fb0d-operator-scripts\") pod \"nova-api-542c-account-create-update-szpdm\" (UID: \"daf7e77a-752b-41c8-8b3d-ac04bfb5fb0d\") " pod="openstack-kuttl-tests/nova-api-542c-account-create-update-szpdm" Jan 20 17:14:09 crc kubenswrapper[4558]: I0120 17:14:09.210611 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:14:09 crc kubenswrapper[4558]: I0120 17:14:09.210892 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-internal-api-0" podUID="fec58b11-5e1a-4bfc-bfcc-fde43e56b8fb" containerName="glance-log" containerID="cri-o://37c69cdd0c87e5dab78bf80037c40a035a8f38600230f3ea65b93bc33964dc4e" gracePeriod=30 Jan 20 17:14:09 crc kubenswrapper[4558]: I0120 17:14:09.210963 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-internal-api-0" podUID="fec58b11-5e1a-4bfc-bfcc-fde43e56b8fb" containerName="glance-httpd" containerID="cri-o://2345422c3ac145fd1d8e65d12769cbcb80d89d096669fb577251421566912341" gracePeriod=30 Jan 20 17:14:09 crc kubenswrapper[4558]: I0120 17:14:09.263410 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell0-db-create-f8nhx"] Jan 20 17:14:09 crc kubenswrapper[4558]: I0120 17:14:09.264797 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-db-create-f8nhx" Jan 20 17:14:09 crc kubenswrapper[4558]: I0120 17:14:09.275158 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-db-create-f8nhx"] Jan 20 17:14:09 crc kubenswrapper[4558]: I0120 17:14:09.283553 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell0-531b-account-create-update-994sx"] Jan 20 17:14:09 crc kubenswrapper[4558]: I0120 17:14:09.285114 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-531b-account-create-update-994sx" Jan 20 17:14:09 crc kubenswrapper[4558]: I0120 17:14:09.287812 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell0-db-secret" Jan 20 17:14:09 crc kubenswrapper[4558]: I0120 17:14:09.301224 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m4d2p\" (UniqueName: \"kubernetes.io/projected/6748d231-22b7-4a78-b3ab-945673683907-kube-api-access-m4d2p\") pod \"nova-api-db-create-zhtn8\" (UID: \"6748d231-22b7-4a78-b3ab-945673683907\") " pod="openstack-kuttl-tests/nova-api-db-create-zhtn8" Jan 20 17:14:09 crc kubenswrapper[4558]: I0120 17:14:09.301347 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/daf7e77a-752b-41c8-8b3d-ac04bfb5fb0d-operator-scripts\") pod \"nova-api-542c-account-create-update-szpdm\" (UID: \"daf7e77a-752b-41c8-8b3d-ac04bfb5fb0d\") " pod="openstack-kuttl-tests/nova-api-542c-account-create-update-szpdm" Jan 20 17:14:09 crc kubenswrapper[4558]: I0120 17:14:09.301450 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6748d231-22b7-4a78-b3ab-945673683907-operator-scripts\") pod \"nova-api-db-create-zhtn8\" (UID: \"6748d231-22b7-4a78-b3ab-945673683907\") " pod="openstack-kuttl-tests/nova-api-db-create-zhtn8" Jan 20 17:14:09 crc kubenswrapper[4558]: I0120 17:14:09.301538 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fknw6\" (UniqueName: \"kubernetes.io/projected/daf7e77a-752b-41c8-8b3d-ac04bfb5fb0d-kube-api-access-fknw6\") pod \"nova-api-542c-account-create-update-szpdm\" (UID: \"daf7e77a-752b-41c8-8b3d-ac04bfb5fb0d\") " pod="openstack-kuttl-tests/nova-api-542c-account-create-update-szpdm" Jan 20 17:14:09 crc kubenswrapper[4558]: I0120 17:14:09.302951 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6748d231-22b7-4a78-b3ab-945673683907-operator-scripts\") pod \"nova-api-db-create-zhtn8\" (UID: \"6748d231-22b7-4a78-b3ab-945673683907\") " pod="openstack-kuttl-tests/nova-api-db-create-zhtn8" Jan 20 17:14:09 crc kubenswrapper[4558]: I0120 17:14:09.303083 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/daf7e77a-752b-41c8-8b3d-ac04bfb5fb0d-operator-scripts\") pod \"nova-api-542c-account-create-update-szpdm\" (UID: \"daf7e77a-752b-41c8-8b3d-ac04bfb5fb0d\") " pod="openstack-kuttl-tests/nova-api-542c-account-create-update-szpdm" Jan 20 17:14:09 crc kubenswrapper[4558]: I0120 17:14:09.314561 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-531b-account-create-update-994sx"] Jan 20 17:14:09 crc kubenswrapper[4558]: I0120 17:14:09.329028 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fknw6\" (UniqueName: \"kubernetes.io/projected/daf7e77a-752b-41c8-8b3d-ac04bfb5fb0d-kube-api-access-fknw6\") pod \"nova-api-542c-account-create-update-szpdm\" (UID: \"daf7e77a-752b-41c8-8b3d-ac04bfb5fb0d\") " pod="openstack-kuttl-tests/nova-api-542c-account-create-update-szpdm" Jan 20 17:14:09 crc kubenswrapper[4558]: I0120 17:14:09.331055 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m4d2p\" (UniqueName: \"kubernetes.io/projected/6748d231-22b7-4a78-b3ab-945673683907-kube-api-access-m4d2p\") pod \"nova-api-db-create-zhtn8\" (UID: \"6748d231-22b7-4a78-b3ab-945673683907\") " pod="openstack-kuttl-tests/nova-api-db-create-zhtn8" Jan 20 17:14:09 crc kubenswrapper[4558]: I0120 17:14:09.373635 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell1-db-create-5wn2p"] Jan 20 17:14:09 crc kubenswrapper[4558]: I0120 17:14:09.375051 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-db-create-5wn2p" Jan 20 17:14:09 crc kubenswrapper[4558]: I0120 17:14:09.388323 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-db-create-5wn2p"] Jan 20 17:14:09 crc kubenswrapper[4558]: I0120 17:14:09.402256 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-db-create-zhtn8" Jan 20 17:14:09 crc kubenswrapper[4558]: I0120 17:14:09.412850 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e303f939-15b8-4652-919a-ceef96c1c997-operator-scripts\") pod \"nova-cell0-db-create-f8nhx\" (UID: \"e303f939-15b8-4652-919a-ceef96c1c997\") " pod="openstack-kuttl-tests/nova-cell0-db-create-f8nhx" Jan 20 17:14:09 crc kubenswrapper[4558]: I0120 17:14:09.412922 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e6f2b04e-81d0-4840-aa61-97174ccff959-operator-scripts\") pod \"nova-cell0-531b-account-create-update-994sx\" (UID: \"e6f2b04e-81d0-4840-aa61-97174ccff959\") " pod="openstack-kuttl-tests/nova-cell0-531b-account-create-update-994sx" Jan 20 17:14:09 crc kubenswrapper[4558]: I0120 17:14:09.412965 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l9g4s\" (UniqueName: \"kubernetes.io/projected/e6f2b04e-81d0-4840-aa61-97174ccff959-kube-api-access-l9g4s\") pod \"nova-cell0-531b-account-create-update-994sx\" (UID: \"e6f2b04e-81d0-4840-aa61-97174ccff959\") " pod="openstack-kuttl-tests/nova-cell0-531b-account-create-update-994sx" Jan 20 17:14:09 crc kubenswrapper[4558]: I0120 17:14:09.413021 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dkps2\" (UniqueName: \"kubernetes.io/projected/e303f939-15b8-4652-919a-ceef96c1c997-kube-api-access-dkps2\") pod \"nova-cell0-db-create-f8nhx\" (UID: \"e303f939-15b8-4652-919a-ceef96c1c997\") " pod="openstack-kuttl-tests/nova-cell0-db-create-f8nhx" Jan 20 17:14:09 crc kubenswrapper[4558]: I0120 17:14:09.446963 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-542c-account-create-update-szpdm" Jan 20 17:14:09 crc kubenswrapper[4558]: I0120 17:14:09.479265 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell1-aad3-account-create-update-tbsnm"] Jan 20 17:14:09 crc kubenswrapper[4558]: I0120 17:14:09.480640 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-aad3-account-create-update-tbsnm" Jan 20 17:14:09 crc kubenswrapper[4558]: I0120 17:14:09.483931 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell1-db-secret" Jan 20 17:14:09 crc kubenswrapper[4558]: I0120 17:14:09.489468 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-aad3-account-create-update-tbsnm"] Jan 20 17:14:09 crc kubenswrapper[4558]: I0120 17:14:09.507721 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"d6352ad2-dc0a-46b0-a9b2-f1fffe97d8ab","Type":"ContainerStarted","Data":"6e00a9aa371726359418b84cec4cb52d2eb2a18f6fcb6f310ef1bb67367ef96a"} Jan 20 17:14:09 crc kubenswrapper[4558]: I0120 17:14:09.514043 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l9g4s\" (UniqueName: \"kubernetes.io/projected/e6f2b04e-81d0-4840-aa61-97174ccff959-kube-api-access-l9g4s\") pod \"nova-cell0-531b-account-create-update-994sx\" (UID: \"e6f2b04e-81d0-4840-aa61-97174ccff959\") " pod="openstack-kuttl-tests/nova-cell0-531b-account-create-update-994sx" Jan 20 17:14:09 crc kubenswrapper[4558]: I0120 17:14:09.514123 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dkps2\" (UniqueName: \"kubernetes.io/projected/e303f939-15b8-4652-919a-ceef96c1c997-kube-api-access-dkps2\") pod \"nova-cell0-db-create-f8nhx\" (UID: \"e303f939-15b8-4652-919a-ceef96c1c997\") " pod="openstack-kuttl-tests/nova-cell0-db-create-f8nhx" Jan 20 17:14:09 crc kubenswrapper[4558]: I0120 17:14:09.514228 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/099e720f-99be-44a9-b149-94a07ebe5e02-operator-scripts\") pod \"nova-cell1-db-create-5wn2p\" (UID: \"099e720f-99be-44a9-b149-94a07ebe5e02\") " pod="openstack-kuttl-tests/nova-cell1-db-create-5wn2p" Jan 20 17:14:09 crc kubenswrapper[4558]: I0120 17:14:09.514248 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e303f939-15b8-4652-919a-ceef96c1c997-operator-scripts\") pod \"nova-cell0-db-create-f8nhx\" (UID: \"e303f939-15b8-4652-919a-ceef96c1c997\") " pod="openstack-kuttl-tests/nova-cell0-db-create-f8nhx" Jan 20 17:14:09 crc kubenswrapper[4558]: I0120 17:14:09.514274 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gc7qn\" (UniqueName: \"kubernetes.io/projected/099e720f-99be-44a9-b149-94a07ebe5e02-kube-api-access-gc7qn\") pod \"nova-cell1-db-create-5wn2p\" (UID: \"099e720f-99be-44a9-b149-94a07ebe5e02\") " pod="openstack-kuttl-tests/nova-cell1-db-create-5wn2p" Jan 20 17:14:09 crc kubenswrapper[4558]: I0120 17:14:09.514318 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e6f2b04e-81d0-4840-aa61-97174ccff959-operator-scripts\") pod \"nova-cell0-531b-account-create-update-994sx\" (UID: \"e6f2b04e-81d0-4840-aa61-97174ccff959\") " pod="openstack-kuttl-tests/nova-cell0-531b-account-create-update-994sx" Jan 20 17:14:09 crc kubenswrapper[4558]: I0120 17:14:09.515078 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e6f2b04e-81d0-4840-aa61-97174ccff959-operator-scripts\") pod \"nova-cell0-531b-account-create-update-994sx\" (UID: \"e6f2b04e-81d0-4840-aa61-97174ccff959\") " pod="openstack-kuttl-tests/nova-cell0-531b-account-create-update-994sx" Jan 20 17:14:09 crc kubenswrapper[4558]: I0120 17:14:09.515189 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e303f939-15b8-4652-919a-ceef96c1c997-operator-scripts\") pod \"nova-cell0-db-create-f8nhx\" (UID: \"e303f939-15b8-4652-919a-ceef96c1c997\") " pod="openstack-kuttl-tests/nova-cell0-db-create-f8nhx" Jan 20 17:14:09 crc kubenswrapper[4558]: I0120 17:14:09.516392 4558 generic.go:334] "Generic (PLEG): container finished" podID="fec58b11-5e1a-4bfc-bfcc-fde43e56b8fb" containerID="37c69cdd0c87e5dab78bf80037c40a035a8f38600230f3ea65b93bc33964dc4e" exitCode=143 Jan 20 17:14:09 crc kubenswrapper[4558]: I0120 17:14:09.517619 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"fec58b11-5e1a-4bfc-bfcc-fde43e56b8fb","Type":"ContainerDied","Data":"37c69cdd0c87e5dab78bf80037c40a035a8f38600230f3ea65b93bc33964dc4e"} Jan 20 17:14:09 crc kubenswrapper[4558]: I0120 17:14:09.528432 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l9g4s\" (UniqueName: \"kubernetes.io/projected/e6f2b04e-81d0-4840-aa61-97174ccff959-kube-api-access-l9g4s\") pod \"nova-cell0-531b-account-create-update-994sx\" (UID: \"e6f2b04e-81d0-4840-aa61-97174ccff959\") " pod="openstack-kuttl-tests/nova-cell0-531b-account-create-update-994sx" Jan 20 17:14:09 crc kubenswrapper[4558]: I0120 17:14:09.530285 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dkps2\" (UniqueName: \"kubernetes.io/projected/e303f939-15b8-4652-919a-ceef96c1c997-kube-api-access-dkps2\") pod \"nova-cell0-db-create-f8nhx\" (UID: \"e303f939-15b8-4652-919a-ceef96c1c997\") " pod="openstack-kuttl-tests/nova-cell0-db-create-f8nhx" Jan 20 17:14:09 crc kubenswrapper[4558]: I0120 17:14:09.592384 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-db-create-f8nhx" Jan 20 17:14:09 crc kubenswrapper[4558]: I0120 17:14:09.601489 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-531b-account-create-update-994sx" Jan 20 17:14:09 crc kubenswrapper[4558]: I0120 17:14:09.616445 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4ecd38e8-120d-4e5b-9649-405b6068e9d1-operator-scripts\") pod \"nova-cell1-aad3-account-create-update-tbsnm\" (UID: \"4ecd38e8-120d-4e5b-9649-405b6068e9d1\") " pod="openstack-kuttl-tests/nova-cell1-aad3-account-create-update-tbsnm" Jan 20 17:14:09 crc kubenswrapper[4558]: I0120 17:14:09.616569 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/099e720f-99be-44a9-b149-94a07ebe5e02-operator-scripts\") pod \"nova-cell1-db-create-5wn2p\" (UID: \"099e720f-99be-44a9-b149-94a07ebe5e02\") " pod="openstack-kuttl-tests/nova-cell1-db-create-5wn2p" Jan 20 17:14:09 crc kubenswrapper[4558]: I0120 17:14:09.616607 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-prq4s\" (UniqueName: \"kubernetes.io/projected/4ecd38e8-120d-4e5b-9649-405b6068e9d1-kube-api-access-prq4s\") pod \"nova-cell1-aad3-account-create-update-tbsnm\" (UID: \"4ecd38e8-120d-4e5b-9649-405b6068e9d1\") " pod="openstack-kuttl-tests/nova-cell1-aad3-account-create-update-tbsnm" Jan 20 17:14:09 crc kubenswrapper[4558]: I0120 17:14:09.616631 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gc7qn\" (UniqueName: \"kubernetes.io/projected/099e720f-99be-44a9-b149-94a07ebe5e02-kube-api-access-gc7qn\") pod \"nova-cell1-db-create-5wn2p\" (UID: \"099e720f-99be-44a9-b149-94a07ebe5e02\") " pod="openstack-kuttl-tests/nova-cell1-db-create-5wn2p" Jan 20 17:14:09 crc kubenswrapper[4558]: I0120 17:14:09.621935 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/099e720f-99be-44a9-b149-94a07ebe5e02-operator-scripts\") pod \"nova-cell1-db-create-5wn2p\" (UID: \"099e720f-99be-44a9-b149-94a07ebe5e02\") " pod="openstack-kuttl-tests/nova-cell1-db-create-5wn2p" Jan 20 17:14:09 crc kubenswrapper[4558]: I0120 17:14:09.636927 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gc7qn\" (UniqueName: \"kubernetes.io/projected/099e720f-99be-44a9-b149-94a07ebe5e02-kube-api-access-gc7qn\") pod \"nova-cell1-db-create-5wn2p\" (UID: \"099e720f-99be-44a9-b149-94a07ebe5e02\") " pod="openstack-kuttl-tests/nova-cell1-db-create-5wn2p" Jan 20 17:14:09 crc kubenswrapper[4558]: I0120 17:14:09.712409 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-db-create-5wn2p" Jan 20 17:14:09 crc kubenswrapper[4558]: I0120 17:14:09.719457 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-prq4s\" (UniqueName: \"kubernetes.io/projected/4ecd38e8-120d-4e5b-9649-405b6068e9d1-kube-api-access-prq4s\") pod \"nova-cell1-aad3-account-create-update-tbsnm\" (UID: \"4ecd38e8-120d-4e5b-9649-405b6068e9d1\") " pod="openstack-kuttl-tests/nova-cell1-aad3-account-create-update-tbsnm" Jan 20 17:14:09 crc kubenswrapper[4558]: I0120 17:14:09.719585 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4ecd38e8-120d-4e5b-9649-405b6068e9d1-operator-scripts\") pod \"nova-cell1-aad3-account-create-update-tbsnm\" (UID: \"4ecd38e8-120d-4e5b-9649-405b6068e9d1\") " pod="openstack-kuttl-tests/nova-cell1-aad3-account-create-update-tbsnm" Jan 20 17:14:09 crc kubenswrapper[4558]: I0120 17:14:09.720435 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4ecd38e8-120d-4e5b-9649-405b6068e9d1-operator-scripts\") pod \"nova-cell1-aad3-account-create-update-tbsnm\" (UID: \"4ecd38e8-120d-4e5b-9649-405b6068e9d1\") " pod="openstack-kuttl-tests/nova-cell1-aad3-account-create-update-tbsnm" Jan 20 17:14:09 crc kubenswrapper[4558]: I0120 17:14:09.748140 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-prq4s\" (UniqueName: \"kubernetes.io/projected/4ecd38e8-120d-4e5b-9649-405b6068e9d1-kube-api-access-prq4s\") pod \"nova-cell1-aad3-account-create-update-tbsnm\" (UID: \"4ecd38e8-120d-4e5b-9649-405b6068e9d1\") " pod="openstack-kuttl-tests/nova-cell1-aad3-account-create-update-tbsnm" Jan 20 17:14:09 crc kubenswrapper[4558]: I0120 17:14:09.755371 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:14:09 crc kubenswrapper[4558]: I0120 17:14:09.807812 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-aad3-account-create-update-tbsnm" Jan 20 17:14:09 crc kubenswrapper[4558]: I0120 17:14:09.904987 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-db-create-zhtn8"] Jan 20 17:14:09 crc kubenswrapper[4558]: W0120 17:14:09.915046 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6748d231_22b7_4a78_b3ab_945673683907.slice/crio-a82bbdb6d994b0d44c68089988406a53be91030b61293ac116343ce3e357f24c WatchSource:0}: Error finding container a82bbdb6d994b0d44c68089988406a53be91030b61293ac116343ce3e357f24c: Status 404 returned error can't find the container with id a82bbdb6d994b0d44c68089988406a53be91030b61293ac116343ce3e357f24c Jan 20 17:14:09 crc kubenswrapper[4558]: I0120 17:14:09.986543 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-542c-account-create-update-szpdm"] Jan 20 17:14:10 crc kubenswrapper[4558]: I0120 17:14:10.081947 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-db-create-f8nhx"] Jan 20 17:14:10 crc kubenswrapper[4558]: I0120 17:14:10.173058 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-531b-account-create-update-994sx"] Jan 20 17:14:10 crc kubenswrapper[4558]: I0120 17:14:10.237478 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-db-create-5wn2p"] Jan 20 17:14:10 crc kubenswrapper[4558]: W0120 17:14:10.248827 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod099e720f_99be_44a9_b149_94a07ebe5e02.slice/crio-7d38110691b9d6a4a879ce2acb4aa9b8402d3805ec65b47555184332e8ab8e5e WatchSource:0}: Error finding container 7d38110691b9d6a4a879ce2acb4aa9b8402d3805ec65b47555184332e8ab8e5e: Status 404 returned error can't find the container with id 7d38110691b9d6a4a879ce2acb4aa9b8402d3805ec65b47555184332e8ab8e5e Jan 20 17:14:10 crc kubenswrapper[4558]: I0120 17:14:10.344647 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-aad3-account-create-update-tbsnm"] Jan 20 17:14:10 crc kubenswrapper[4558]: W0120 17:14:10.350362 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4ecd38e8_120d_4e5b_9649_405b6068e9d1.slice/crio-eb9ed236e92b77ffb593e8b6ebe3b9a8017b8a5b9b8f183262d118db1f3cfc3f WatchSource:0}: Error finding container eb9ed236e92b77ffb593e8b6ebe3b9a8017b8a5b9b8f183262d118db1f3cfc3f: Status 404 returned error can't find the container with id eb9ed236e92b77ffb593e8b6ebe3b9a8017b8a5b9b8f183262d118db1f3cfc3f Jan 20 17:14:10 crc kubenswrapper[4558]: I0120 17:14:10.527138 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-db-create-5wn2p" event={"ID":"099e720f-99be-44a9-b149-94a07ebe5e02","Type":"ContainerStarted","Data":"883394b0eba68b6b185ae8ea2cf0e2b62ea0e3012167ae18ee5ac40175a88877"} Jan 20 17:14:10 crc kubenswrapper[4558]: I0120 17:14:10.527217 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-db-create-5wn2p" event={"ID":"099e720f-99be-44a9-b149-94a07ebe5e02","Type":"ContainerStarted","Data":"7d38110691b9d6a4a879ce2acb4aa9b8402d3805ec65b47555184332e8ab8e5e"} Jan 20 17:14:10 crc kubenswrapper[4558]: I0120 17:14:10.533975 4558 generic.go:334] "Generic (PLEG): container finished" podID="6748d231-22b7-4a78-b3ab-945673683907" containerID="2b7ec2d7ccc9dca2793c497e669041069a2d75e8c81cc8deccc7613b50550351" exitCode=0 Jan 20 17:14:10 crc kubenswrapper[4558]: I0120 17:14:10.534046 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-db-create-zhtn8" event={"ID":"6748d231-22b7-4a78-b3ab-945673683907","Type":"ContainerDied","Data":"2b7ec2d7ccc9dca2793c497e669041069a2d75e8c81cc8deccc7613b50550351"} Jan 20 17:14:10 crc kubenswrapper[4558]: I0120 17:14:10.534072 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-db-create-zhtn8" event={"ID":"6748d231-22b7-4a78-b3ab-945673683907","Type":"ContainerStarted","Data":"a82bbdb6d994b0d44c68089988406a53be91030b61293ac116343ce3e357f24c"} Jan 20 17:14:10 crc kubenswrapper[4558]: I0120 17:14:10.541057 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"d6352ad2-dc0a-46b0-a9b2-f1fffe97d8ab","Type":"ContainerStarted","Data":"cdb59cc41909e93106e64cb221195718916752f4f20e16104eb862f1be73922f"} Jan 20 17:14:10 crc kubenswrapper[4558]: I0120 17:14:10.543256 4558 generic.go:334] "Generic (PLEG): container finished" podID="e303f939-15b8-4652-919a-ceef96c1c997" containerID="f993c3851cad4942bd43255e4b46e389ed11dd15b246b851874c551543ffd036" exitCode=0 Jan 20 17:14:10 crc kubenswrapper[4558]: I0120 17:14:10.543321 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-db-create-f8nhx" event={"ID":"e303f939-15b8-4652-919a-ceef96c1c997","Type":"ContainerDied","Data":"f993c3851cad4942bd43255e4b46e389ed11dd15b246b851874c551543ffd036"} Jan 20 17:14:10 crc kubenswrapper[4558]: I0120 17:14:10.543339 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-db-create-f8nhx" event={"ID":"e303f939-15b8-4652-919a-ceef96c1c997","Type":"ContainerStarted","Data":"93cab0985b074490d7b8fa8c5040239090ae0d9b034840df7568eb7e157bcb2b"} Jan 20 17:14:10 crc kubenswrapper[4558]: I0120 17:14:10.544703 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-542c-account-create-update-szpdm" event={"ID":"daf7e77a-752b-41c8-8b3d-ac04bfb5fb0d","Type":"ContainerStarted","Data":"9b22e669bf253aa92c22917982ce64b58f8825c76606560a2e22b75e74428371"} Jan 20 17:14:10 crc kubenswrapper[4558]: I0120 17:14:10.544727 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-542c-account-create-update-szpdm" event={"ID":"daf7e77a-752b-41c8-8b3d-ac04bfb5fb0d","Type":"ContainerStarted","Data":"d426c2ffa8dfe7f408c44ece330a11506f314edea720e4fc810824ab982822f6"} Jan 20 17:14:10 crc kubenswrapper[4558]: I0120 17:14:10.546410 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-531b-account-create-update-994sx" event={"ID":"e6f2b04e-81d0-4840-aa61-97174ccff959","Type":"ContainerStarted","Data":"a5660a8652ca704957d7bced65ab7d2377b377fe46ad27b55d4e4ae2ccdc42e9"} Jan 20 17:14:10 crc kubenswrapper[4558]: I0120 17:14:10.546448 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-531b-account-create-update-994sx" event={"ID":"e6f2b04e-81d0-4840-aa61-97174ccff959","Type":"ContainerStarted","Data":"103d7f521e9ea9477bd9c220e4abc07a17d0a9f566acdbd673cc9f52280798d0"} Jan 20 17:14:10 crc kubenswrapper[4558]: I0120 17:14:10.548366 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-aad3-account-create-update-tbsnm" event={"ID":"4ecd38e8-120d-4e5b-9649-405b6068e9d1","Type":"ContainerStarted","Data":"eb9ed236e92b77ffb593e8b6ebe3b9a8017b8a5b9b8f183262d118db1f3cfc3f"} Jan 20 17:14:10 crc kubenswrapper[4558]: I0120 17:14:10.548920 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell1-db-create-5wn2p" podStartSLOduration=1.5489017760000001 podStartE2EDuration="1.548901776s" podCreationTimestamp="2026-01-20 17:14:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:14:10.544917284 +0000 UTC m=+1944.305255251" watchObservedRunningTime="2026-01-20 17:14:10.548901776 +0000 UTC m=+1944.309239744" Jan 20 17:14:10 crc kubenswrapper[4558]: I0120 17:14:10.579313 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="74a94aaa-09bf-40a6-939b-14cf47decbfb" path="/var/lib/kubelet/pods/74a94aaa-09bf-40a6-939b-14cf47decbfb/volumes" Jan 20 17:14:10 crc kubenswrapper[4558]: I0120 17:14:10.603132 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell1-aad3-account-create-update-tbsnm" podStartSLOduration=1.6031148210000001 podStartE2EDuration="1.603114821s" podCreationTimestamp="2026-01-20 17:14:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:14:10.599006866 +0000 UTC m=+1944.359344834" watchObservedRunningTime="2026-01-20 17:14:10.603114821 +0000 UTC m=+1944.363452788" Jan 20 17:14:11 crc kubenswrapper[4558]: I0120 17:14:11.565024 4558 generic.go:334] "Generic (PLEG): container finished" podID="099e720f-99be-44a9-b149-94a07ebe5e02" containerID="883394b0eba68b6b185ae8ea2cf0e2b62ea0e3012167ae18ee5ac40175a88877" exitCode=0 Jan 20 17:14:11 crc kubenswrapper[4558]: I0120 17:14:11.565137 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-db-create-5wn2p" event={"ID":"099e720f-99be-44a9-b149-94a07ebe5e02","Type":"ContainerDied","Data":"883394b0eba68b6b185ae8ea2cf0e2b62ea0e3012167ae18ee5ac40175a88877"} Jan 20 17:14:11 crc kubenswrapper[4558]: I0120 17:14:11.569285 4558 generic.go:334] "Generic (PLEG): container finished" podID="4ecd38e8-120d-4e5b-9649-405b6068e9d1" containerID="c509ef0942ef4f61cb8995d82ebb71f9cf485dd51e17ff30d688714ae3d81de1" exitCode=0 Jan 20 17:14:11 crc kubenswrapper[4558]: I0120 17:14:11.569544 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-aad3-account-create-update-tbsnm" event={"ID":"4ecd38e8-120d-4e5b-9649-405b6068e9d1","Type":"ContainerDied","Data":"c509ef0942ef4f61cb8995d82ebb71f9cf485dd51e17ff30d688714ae3d81de1"} Jan 20 17:14:11 crc kubenswrapper[4558]: I0120 17:14:11.571517 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"d6352ad2-dc0a-46b0-a9b2-f1fffe97d8ab","Type":"ContainerStarted","Data":"f12820ceb4c08944499274ef4eac4230ee15bce8eae024111d501a4102ebfb9c"} Jan 20 17:14:11 crc kubenswrapper[4558]: I0120 17:14:11.573630 4558 generic.go:334] "Generic (PLEG): container finished" podID="c061507a-294f-453b-be5d-4fdca19c790c" containerID="22a2a926f869e6446bdf888e1ee153d4bcc61395a9b6875ebbb63ec5af2731bc" exitCode=0 Jan 20 17:14:11 crc kubenswrapper[4558]: I0120 17:14:11.573704 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"c061507a-294f-453b-be5d-4fdca19c790c","Type":"ContainerDied","Data":"22a2a926f869e6446bdf888e1ee153d4bcc61395a9b6875ebbb63ec5af2731bc"} Jan 20 17:14:11 crc kubenswrapper[4558]: I0120 17:14:11.574693 4558 generic.go:334] "Generic (PLEG): container finished" podID="daf7e77a-752b-41c8-8b3d-ac04bfb5fb0d" containerID="9b22e669bf253aa92c22917982ce64b58f8825c76606560a2e22b75e74428371" exitCode=0 Jan 20 17:14:11 crc kubenswrapper[4558]: I0120 17:14:11.574731 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-542c-account-create-update-szpdm" event={"ID":"daf7e77a-752b-41c8-8b3d-ac04bfb5fb0d","Type":"ContainerDied","Data":"9b22e669bf253aa92c22917982ce64b58f8825c76606560a2e22b75e74428371"} Jan 20 17:14:11 crc kubenswrapper[4558]: I0120 17:14:11.576561 4558 generic.go:334] "Generic (PLEG): container finished" podID="e6f2b04e-81d0-4840-aa61-97174ccff959" containerID="a5660a8652ca704957d7bced65ab7d2377b377fe46ad27b55d4e4ae2ccdc42e9" exitCode=0 Jan 20 17:14:11 crc kubenswrapper[4558]: I0120 17:14:11.577184 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-531b-account-create-update-994sx" event={"ID":"e6f2b04e-81d0-4840-aa61-97174ccff959","Type":"ContainerDied","Data":"a5660a8652ca704957d7bced65ab7d2377b377fe46ad27b55d4e4ae2ccdc42e9"} Jan 20 17:14:11 crc kubenswrapper[4558]: I0120 17:14:11.588107 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell0-531b-account-create-update-994sx" podStartSLOduration=2.588093391 podStartE2EDuration="2.588093391s" podCreationTimestamp="2026-01-20 17:14:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:14:10.622813053 +0000 UTC m=+1944.383151020" watchObservedRunningTime="2026-01-20 17:14:11.588093391 +0000 UTC m=+1945.348431359" Jan 20 17:14:11 crc kubenswrapper[4558]: I0120 17:14:11.840273 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:14:11 crc kubenswrapper[4558]: I0120 17:14:11.977747 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-db-create-f8nhx" Jan 20 17:14:11 crc kubenswrapper[4558]: I0120 17:14:11.985676 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c061507a-294f-453b-be5d-4fdca19c790c-logs\") pod \"c061507a-294f-453b-be5d-4fdca19c790c\" (UID: \"c061507a-294f-453b-be5d-4fdca19c790c\") " Jan 20 17:14:11 crc kubenswrapper[4558]: I0120 17:14:11.985758 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage13-crc\") pod \"c061507a-294f-453b-be5d-4fdca19c790c\" (UID: \"c061507a-294f-453b-be5d-4fdca19c790c\") " Jan 20 17:14:11 crc kubenswrapper[4558]: I0120 17:14:11.985810 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c061507a-294f-453b-be5d-4fdca19c790c-scripts\") pod \"c061507a-294f-453b-be5d-4fdca19c790c\" (UID: \"c061507a-294f-453b-be5d-4fdca19c790c\") " Jan 20 17:14:11 crc kubenswrapper[4558]: I0120 17:14:11.985975 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c061507a-294f-453b-be5d-4fdca19c790c-combined-ca-bundle\") pod \"c061507a-294f-453b-be5d-4fdca19c790c\" (UID: \"c061507a-294f-453b-be5d-4fdca19c790c\") " Jan 20 17:14:11 crc kubenswrapper[4558]: I0120 17:14:11.986043 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5lcn8\" (UniqueName: \"kubernetes.io/projected/c061507a-294f-453b-be5d-4fdca19c790c-kube-api-access-5lcn8\") pod \"c061507a-294f-453b-be5d-4fdca19c790c\" (UID: \"c061507a-294f-453b-be5d-4fdca19c790c\") " Jan 20 17:14:11 crc kubenswrapper[4558]: I0120 17:14:11.986090 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c061507a-294f-453b-be5d-4fdca19c790c-public-tls-certs\") pod \"c061507a-294f-453b-be5d-4fdca19c790c\" (UID: \"c061507a-294f-453b-be5d-4fdca19c790c\") " Jan 20 17:14:11 crc kubenswrapper[4558]: I0120 17:14:11.986128 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c061507a-294f-453b-be5d-4fdca19c790c-httpd-run\") pod \"c061507a-294f-453b-be5d-4fdca19c790c\" (UID: \"c061507a-294f-453b-be5d-4fdca19c790c\") " Jan 20 17:14:11 crc kubenswrapper[4558]: I0120 17:14:11.986157 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c061507a-294f-453b-be5d-4fdca19c790c-config-data\") pod \"c061507a-294f-453b-be5d-4fdca19c790c\" (UID: \"c061507a-294f-453b-be5d-4fdca19c790c\") " Jan 20 17:14:11 crc kubenswrapper[4558]: I0120 17:14:11.987847 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c061507a-294f-453b-be5d-4fdca19c790c-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "c061507a-294f-453b-be5d-4fdca19c790c" (UID: "c061507a-294f-453b-be5d-4fdca19c790c"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:14:12 crc kubenswrapper[4558]: I0120 17:14:11.995415 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c061507a-294f-453b-be5d-4fdca19c790c-logs" (OuterVolumeSpecName: "logs") pod "c061507a-294f-453b-be5d-4fdca19c790c" (UID: "c061507a-294f-453b-be5d-4fdca19c790c"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:14:12 crc kubenswrapper[4558]: I0120 17:14:11.995497 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c061507a-294f-453b-be5d-4fdca19c790c-scripts" (OuterVolumeSpecName: "scripts") pod "c061507a-294f-453b-be5d-4fdca19c790c" (UID: "c061507a-294f-453b-be5d-4fdca19c790c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:14:12 crc kubenswrapper[4558]: I0120 17:14:12.005957 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c061507a-294f-453b-be5d-4fdca19c790c-kube-api-access-5lcn8" (OuterVolumeSpecName: "kube-api-access-5lcn8") pod "c061507a-294f-453b-be5d-4fdca19c790c" (UID: "c061507a-294f-453b-be5d-4fdca19c790c"). InnerVolumeSpecName "kube-api-access-5lcn8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:14:12 crc kubenswrapper[4558]: I0120 17:14:12.007346 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage13-crc" (OuterVolumeSpecName: "glance") pod "c061507a-294f-453b-be5d-4fdca19c790c" (UID: "c061507a-294f-453b-be5d-4fdca19c790c"). InnerVolumeSpecName "local-storage13-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:14:12 crc kubenswrapper[4558]: I0120 17:14:12.035272 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c061507a-294f-453b-be5d-4fdca19c790c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c061507a-294f-453b-be5d-4fdca19c790c" (UID: "c061507a-294f-453b-be5d-4fdca19c790c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:14:12 crc kubenswrapper[4558]: I0120 17:14:12.088221 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e303f939-15b8-4652-919a-ceef96c1c997-operator-scripts\") pod \"e303f939-15b8-4652-919a-ceef96c1c997\" (UID: \"e303f939-15b8-4652-919a-ceef96c1c997\") " Jan 20 17:14:12 crc kubenswrapper[4558]: I0120 17:14:12.088400 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dkps2\" (UniqueName: \"kubernetes.io/projected/e303f939-15b8-4652-919a-ceef96c1c997-kube-api-access-dkps2\") pod \"e303f939-15b8-4652-919a-ceef96c1c997\" (UID: \"e303f939-15b8-4652-919a-ceef96c1c997\") " Jan 20 17:14:12 crc kubenswrapper[4558]: I0120 17:14:12.088808 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c061507a-294f-453b-be5d-4fdca19c790c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:12 crc kubenswrapper[4558]: I0120 17:14:12.088827 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5lcn8\" (UniqueName: \"kubernetes.io/projected/c061507a-294f-453b-be5d-4fdca19c790c-kube-api-access-5lcn8\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:12 crc kubenswrapper[4558]: I0120 17:14:12.088839 4558 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c061507a-294f-453b-be5d-4fdca19c790c-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:12 crc kubenswrapper[4558]: I0120 17:14:12.088847 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c061507a-294f-453b-be5d-4fdca19c790c-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:12 crc kubenswrapper[4558]: I0120 17:14:12.088866 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage13-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage13-crc\") on node \"crc\" " Jan 20 17:14:12 crc kubenswrapper[4558]: I0120 17:14:12.088874 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c061507a-294f-453b-be5d-4fdca19c790c-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:12 crc kubenswrapper[4558]: I0120 17:14:12.089279 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e303f939-15b8-4652-919a-ceef96c1c997-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "e303f939-15b8-4652-919a-ceef96c1c997" (UID: "e303f939-15b8-4652-919a-ceef96c1c997"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:14:12 crc kubenswrapper[4558]: I0120 17:14:12.091752 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c061507a-294f-453b-be5d-4fdca19c790c-config-data" (OuterVolumeSpecName: "config-data") pod "c061507a-294f-453b-be5d-4fdca19c790c" (UID: "c061507a-294f-453b-be5d-4fdca19c790c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:14:12 crc kubenswrapper[4558]: I0120 17:14:12.095218 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e303f939-15b8-4652-919a-ceef96c1c997-kube-api-access-dkps2" (OuterVolumeSpecName: "kube-api-access-dkps2") pod "e303f939-15b8-4652-919a-ceef96c1c997" (UID: "e303f939-15b8-4652-919a-ceef96c1c997"). InnerVolumeSpecName "kube-api-access-dkps2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:14:12 crc kubenswrapper[4558]: I0120 17:14:12.107237 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage13-crc" (UniqueName: "kubernetes.io/local-volume/local-storage13-crc") on node "crc" Jan 20 17:14:12 crc kubenswrapper[4558]: I0120 17:14:12.111273 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c061507a-294f-453b-be5d-4fdca19c790c-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "c061507a-294f-453b-be5d-4fdca19c790c" (UID: "c061507a-294f-453b-be5d-4fdca19c790c"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:14:12 crc kubenswrapper[4558]: I0120 17:14:12.154206 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-db-create-zhtn8" Jan 20 17:14:12 crc kubenswrapper[4558]: I0120 17:14:12.164910 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-542c-account-create-update-szpdm" Jan 20 17:14:12 crc kubenswrapper[4558]: I0120 17:14:12.190476 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e303f939-15b8-4652-919a-ceef96c1c997-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:12 crc kubenswrapper[4558]: I0120 17:14:12.190505 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage13-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage13-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:12 crc kubenswrapper[4558]: I0120 17:14:12.190516 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dkps2\" (UniqueName: \"kubernetes.io/projected/e303f939-15b8-4652-919a-ceef96c1c997-kube-api-access-dkps2\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:12 crc kubenswrapper[4558]: I0120 17:14:12.190527 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c061507a-294f-453b-be5d-4fdca19c790c-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:12 crc kubenswrapper[4558]: I0120 17:14:12.190537 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c061507a-294f-453b-be5d-4fdca19c790c-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:12 crc kubenswrapper[4558]: I0120 17:14:12.291591 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6748d231-22b7-4a78-b3ab-945673683907-operator-scripts\") pod \"6748d231-22b7-4a78-b3ab-945673683907\" (UID: \"6748d231-22b7-4a78-b3ab-945673683907\") " Jan 20 17:14:12 crc kubenswrapper[4558]: I0120 17:14:12.291655 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fknw6\" (UniqueName: \"kubernetes.io/projected/daf7e77a-752b-41c8-8b3d-ac04bfb5fb0d-kube-api-access-fknw6\") pod \"daf7e77a-752b-41c8-8b3d-ac04bfb5fb0d\" (UID: \"daf7e77a-752b-41c8-8b3d-ac04bfb5fb0d\") " Jan 20 17:14:12 crc kubenswrapper[4558]: I0120 17:14:12.291725 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m4d2p\" (UniqueName: \"kubernetes.io/projected/6748d231-22b7-4a78-b3ab-945673683907-kube-api-access-m4d2p\") pod \"6748d231-22b7-4a78-b3ab-945673683907\" (UID: \"6748d231-22b7-4a78-b3ab-945673683907\") " Jan 20 17:14:12 crc kubenswrapper[4558]: I0120 17:14:12.291752 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/daf7e77a-752b-41c8-8b3d-ac04bfb5fb0d-operator-scripts\") pod \"daf7e77a-752b-41c8-8b3d-ac04bfb5fb0d\" (UID: \"daf7e77a-752b-41c8-8b3d-ac04bfb5fb0d\") " Jan 20 17:14:12 crc kubenswrapper[4558]: I0120 17:14:12.292052 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6748d231-22b7-4a78-b3ab-945673683907-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "6748d231-22b7-4a78-b3ab-945673683907" (UID: "6748d231-22b7-4a78-b3ab-945673683907"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:14:12 crc kubenswrapper[4558]: I0120 17:14:12.292476 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/daf7e77a-752b-41c8-8b3d-ac04bfb5fb0d-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "daf7e77a-752b-41c8-8b3d-ac04bfb5fb0d" (UID: "daf7e77a-752b-41c8-8b3d-ac04bfb5fb0d"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:14:12 crc kubenswrapper[4558]: I0120 17:14:12.292903 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6748d231-22b7-4a78-b3ab-945673683907-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:12 crc kubenswrapper[4558]: I0120 17:14:12.292933 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/daf7e77a-752b-41c8-8b3d-ac04bfb5fb0d-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:12 crc kubenswrapper[4558]: I0120 17:14:12.294103 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/daf7e77a-752b-41c8-8b3d-ac04bfb5fb0d-kube-api-access-fknw6" (OuterVolumeSpecName: "kube-api-access-fknw6") pod "daf7e77a-752b-41c8-8b3d-ac04bfb5fb0d" (UID: "daf7e77a-752b-41c8-8b3d-ac04bfb5fb0d"). InnerVolumeSpecName "kube-api-access-fknw6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:14:12 crc kubenswrapper[4558]: I0120 17:14:12.294832 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6748d231-22b7-4a78-b3ab-945673683907-kube-api-access-m4d2p" (OuterVolumeSpecName: "kube-api-access-m4d2p") pod "6748d231-22b7-4a78-b3ab-945673683907" (UID: "6748d231-22b7-4a78-b3ab-945673683907"). InnerVolumeSpecName "kube-api-access-m4d2p". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:14:12 crc kubenswrapper[4558]: I0120 17:14:12.395673 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fknw6\" (UniqueName: \"kubernetes.io/projected/daf7e77a-752b-41c8-8b3d-ac04bfb5fb0d-kube-api-access-fknw6\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:12 crc kubenswrapper[4558]: I0120 17:14:12.396046 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m4d2p\" (UniqueName: \"kubernetes.io/projected/6748d231-22b7-4a78-b3ab-945673683907-kube-api-access-m4d2p\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:12 crc kubenswrapper[4558]: I0120 17:14:12.633867 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-542c-account-create-update-szpdm" event={"ID":"daf7e77a-752b-41c8-8b3d-ac04bfb5fb0d","Type":"ContainerDied","Data":"d426c2ffa8dfe7f408c44ece330a11506f314edea720e4fc810824ab982822f6"} Jan 20 17:14:12 crc kubenswrapper[4558]: I0120 17:14:12.633919 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d426c2ffa8dfe7f408c44ece330a11506f314edea720e4fc810824ab982822f6" Jan 20 17:14:12 crc kubenswrapper[4558]: I0120 17:14:12.633989 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-542c-account-create-update-szpdm" Jan 20 17:14:12 crc kubenswrapper[4558]: I0120 17:14:12.654718 4558 generic.go:334] "Generic (PLEG): container finished" podID="fec58b11-5e1a-4bfc-bfcc-fde43e56b8fb" containerID="2345422c3ac145fd1d8e65d12769cbcb80d89d096669fb577251421566912341" exitCode=0 Jan 20 17:14:12 crc kubenswrapper[4558]: I0120 17:14:12.654827 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"fec58b11-5e1a-4bfc-bfcc-fde43e56b8fb","Type":"ContainerDied","Data":"2345422c3ac145fd1d8e65d12769cbcb80d89d096669fb577251421566912341"} Jan 20 17:14:12 crc kubenswrapper[4558]: I0120 17:14:12.663348 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-db-create-zhtn8" event={"ID":"6748d231-22b7-4a78-b3ab-945673683907","Type":"ContainerDied","Data":"a82bbdb6d994b0d44c68089988406a53be91030b61293ac116343ce3e357f24c"} Jan 20 17:14:12 crc kubenswrapper[4558]: I0120 17:14:12.663373 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a82bbdb6d994b0d44c68089988406a53be91030b61293ac116343ce3e357f24c" Jan 20 17:14:12 crc kubenswrapper[4558]: I0120 17:14:12.663443 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-db-create-zhtn8" Jan 20 17:14:12 crc kubenswrapper[4558]: I0120 17:14:12.666504 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:14:12 crc kubenswrapper[4558]: I0120 17:14:12.667134 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"c061507a-294f-453b-be5d-4fdca19c790c","Type":"ContainerDied","Data":"fd451b5cc00585c8579efbc7e0be3a1078970c7fd9941b13ddf84c39c0ad04e9"} Jan 20 17:14:12 crc kubenswrapper[4558]: I0120 17:14:12.667208 4558 scope.go:117] "RemoveContainer" containerID="22a2a926f869e6446bdf888e1ee153d4bcc61395a9b6875ebbb63ec5af2731bc" Jan 20 17:14:12 crc kubenswrapper[4558]: I0120 17:14:12.672344 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-db-create-f8nhx" event={"ID":"e303f939-15b8-4652-919a-ceef96c1c997","Type":"ContainerDied","Data":"93cab0985b074490d7b8fa8c5040239090ae0d9b034840df7568eb7e157bcb2b"} Jan 20 17:14:12 crc kubenswrapper[4558]: I0120 17:14:12.672379 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="93cab0985b074490d7b8fa8c5040239090ae0d9b034840df7568eb7e157bcb2b" Jan 20 17:14:12 crc kubenswrapper[4558]: I0120 17:14:12.672486 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-db-create-f8nhx" Jan 20 17:14:12 crc kubenswrapper[4558]: I0120 17:14:12.780024 4558 scope.go:117] "RemoveContainer" containerID="1a5915eb633ad5a17f2153955cceb387e146bddc311a76f35f78e2b62cdc3b6e" Jan 20 17:14:12 crc kubenswrapper[4558]: I0120 17:14:12.805105 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:14:12 crc kubenswrapper[4558]: I0120 17:14:12.816622 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:14:12 crc kubenswrapper[4558]: I0120 17:14:12.825512 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:14:12 crc kubenswrapper[4558]: E0120 17:14:12.825954 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="daf7e77a-752b-41c8-8b3d-ac04bfb5fb0d" containerName="mariadb-account-create-update" Jan 20 17:14:12 crc kubenswrapper[4558]: I0120 17:14:12.825967 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="daf7e77a-752b-41c8-8b3d-ac04bfb5fb0d" containerName="mariadb-account-create-update" Jan 20 17:14:12 crc kubenswrapper[4558]: E0120 17:14:12.825978 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c061507a-294f-453b-be5d-4fdca19c790c" containerName="glance-httpd" Jan 20 17:14:12 crc kubenswrapper[4558]: I0120 17:14:12.825984 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="c061507a-294f-453b-be5d-4fdca19c790c" containerName="glance-httpd" Jan 20 17:14:12 crc kubenswrapper[4558]: E0120 17:14:12.826011 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e303f939-15b8-4652-919a-ceef96c1c997" containerName="mariadb-database-create" Jan 20 17:14:12 crc kubenswrapper[4558]: I0120 17:14:12.826017 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="e303f939-15b8-4652-919a-ceef96c1c997" containerName="mariadb-database-create" Jan 20 17:14:12 crc kubenswrapper[4558]: E0120 17:14:12.826035 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c061507a-294f-453b-be5d-4fdca19c790c" containerName="glance-log" Jan 20 17:14:12 crc kubenswrapper[4558]: I0120 17:14:12.826041 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="c061507a-294f-453b-be5d-4fdca19c790c" containerName="glance-log" Jan 20 17:14:12 crc kubenswrapper[4558]: E0120 17:14:12.826052 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6748d231-22b7-4a78-b3ab-945673683907" containerName="mariadb-database-create" Jan 20 17:14:12 crc kubenswrapper[4558]: I0120 17:14:12.826058 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="6748d231-22b7-4a78-b3ab-945673683907" containerName="mariadb-database-create" Jan 20 17:14:12 crc kubenswrapper[4558]: I0120 17:14:12.826274 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="6748d231-22b7-4a78-b3ab-945673683907" containerName="mariadb-database-create" Jan 20 17:14:12 crc kubenswrapper[4558]: I0120 17:14:12.826287 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="c061507a-294f-453b-be5d-4fdca19c790c" containerName="glance-httpd" Jan 20 17:14:12 crc kubenswrapper[4558]: I0120 17:14:12.826309 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="daf7e77a-752b-41c8-8b3d-ac04bfb5fb0d" containerName="mariadb-account-create-update" Jan 20 17:14:12 crc kubenswrapper[4558]: I0120 17:14:12.826320 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="e303f939-15b8-4652-919a-ceef96c1c997" containerName="mariadb-database-create" Jan 20 17:14:12 crc kubenswrapper[4558]: I0120 17:14:12.826332 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="c061507a-294f-453b-be5d-4fdca19c790c" containerName="glance-log" Jan 20 17:14:12 crc kubenswrapper[4558]: I0120 17:14:12.827441 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:14:12 crc kubenswrapper[4558]: I0120 17:14:12.831747 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:14:12 crc kubenswrapper[4558]: I0120 17:14:12.833178 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-glance-default-public-svc" Jan 20 17:14:12 crc kubenswrapper[4558]: I0120 17:14:12.833369 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-default-external-config-data" Jan 20 17:14:12 crc kubenswrapper[4558]: I0120 17:14:12.858986 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fz7gc\" (UniqueName: \"kubernetes.io/projected/a9ada454-f6fa-4ea4-b386-3ddbcd37f233-kube-api-access-fz7gc\") pod \"glance-default-external-api-0\" (UID: \"a9ada454-f6fa-4ea4-b386-3ddbcd37f233\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:14:12 crc kubenswrapper[4558]: I0120 17:14:12.859057 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a9ada454-f6fa-4ea4-b386-3ddbcd37f233-scripts\") pod \"glance-default-external-api-0\" (UID: \"a9ada454-f6fa-4ea4-b386-3ddbcd37f233\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:14:12 crc kubenswrapper[4558]: I0120 17:14:12.859088 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a9ada454-f6fa-4ea4-b386-3ddbcd37f233-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"a9ada454-f6fa-4ea4-b386-3ddbcd37f233\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:14:12 crc kubenswrapper[4558]: I0120 17:14:12.859136 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a9ada454-f6fa-4ea4-b386-3ddbcd37f233-config-data\") pod \"glance-default-external-api-0\" (UID: \"a9ada454-f6fa-4ea4-b386-3ddbcd37f233\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:14:12 crc kubenswrapper[4558]: I0120 17:14:12.859174 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a9ada454-f6fa-4ea4-b386-3ddbcd37f233-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"a9ada454-f6fa-4ea4-b386-3ddbcd37f233\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:14:12 crc kubenswrapper[4558]: I0120 17:14:12.859221 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a9ada454-f6fa-4ea4-b386-3ddbcd37f233-logs\") pod \"glance-default-external-api-0\" (UID: \"a9ada454-f6fa-4ea4-b386-3ddbcd37f233\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:14:12 crc kubenswrapper[4558]: I0120 17:14:12.859258 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage13-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage13-crc\") pod \"glance-default-external-api-0\" (UID: \"a9ada454-f6fa-4ea4-b386-3ddbcd37f233\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:14:12 crc kubenswrapper[4558]: I0120 17:14:12.859287 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a9ada454-f6fa-4ea4-b386-3ddbcd37f233-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"a9ada454-f6fa-4ea4-b386-3ddbcd37f233\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:14:12 crc kubenswrapper[4558]: I0120 17:14:12.905023 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:14:12 crc kubenswrapper[4558]: I0120 17:14:12.960632 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fz7gc\" (UniqueName: \"kubernetes.io/projected/a9ada454-f6fa-4ea4-b386-3ddbcd37f233-kube-api-access-fz7gc\") pod \"glance-default-external-api-0\" (UID: \"a9ada454-f6fa-4ea4-b386-3ddbcd37f233\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:14:12 crc kubenswrapper[4558]: I0120 17:14:12.960723 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a9ada454-f6fa-4ea4-b386-3ddbcd37f233-scripts\") pod \"glance-default-external-api-0\" (UID: \"a9ada454-f6fa-4ea4-b386-3ddbcd37f233\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:14:12 crc kubenswrapper[4558]: I0120 17:14:12.960773 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a9ada454-f6fa-4ea4-b386-3ddbcd37f233-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"a9ada454-f6fa-4ea4-b386-3ddbcd37f233\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:14:12 crc kubenswrapper[4558]: I0120 17:14:12.960855 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a9ada454-f6fa-4ea4-b386-3ddbcd37f233-config-data\") pod \"glance-default-external-api-0\" (UID: \"a9ada454-f6fa-4ea4-b386-3ddbcd37f233\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:14:12 crc kubenswrapper[4558]: I0120 17:14:12.960881 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a9ada454-f6fa-4ea4-b386-3ddbcd37f233-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"a9ada454-f6fa-4ea4-b386-3ddbcd37f233\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:14:12 crc kubenswrapper[4558]: I0120 17:14:12.960954 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a9ada454-f6fa-4ea4-b386-3ddbcd37f233-logs\") pod \"glance-default-external-api-0\" (UID: \"a9ada454-f6fa-4ea4-b386-3ddbcd37f233\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:14:12 crc kubenswrapper[4558]: I0120 17:14:12.961018 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage13-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage13-crc\") pod \"glance-default-external-api-0\" (UID: \"a9ada454-f6fa-4ea4-b386-3ddbcd37f233\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:14:12 crc kubenswrapper[4558]: I0120 17:14:12.961068 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a9ada454-f6fa-4ea4-b386-3ddbcd37f233-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"a9ada454-f6fa-4ea4-b386-3ddbcd37f233\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:14:12 crc kubenswrapper[4558]: I0120 17:14:12.961790 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage13-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage13-crc\") pod \"glance-default-external-api-0\" (UID: \"a9ada454-f6fa-4ea4-b386-3ddbcd37f233\") device mount path \"/mnt/openstack/pv13\"" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:14:12 crc kubenswrapper[4558]: I0120 17:14:12.961956 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a9ada454-f6fa-4ea4-b386-3ddbcd37f233-logs\") pod \"glance-default-external-api-0\" (UID: \"a9ada454-f6fa-4ea4-b386-3ddbcd37f233\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:14:12 crc kubenswrapper[4558]: I0120 17:14:12.962028 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a9ada454-f6fa-4ea4-b386-3ddbcd37f233-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"a9ada454-f6fa-4ea4-b386-3ddbcd37f233\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:14:12 crc kubenswrapper[4558]: I0120 17:14:12.975264 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a9ada454-f6fa-4ea4-b386-3ddbcd37f233-config-data\") pod \"glance-default-external-api-0\" (UID: \"a9ada454-f6fa-4ea4-b386-3ddbcd37f233\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:14:12 crc kubenswrapper[4558]: I0120 17:14:12.976492 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a9ada454-f6fa-4ea4-b386-3ddbcd37f233-scripts\") pod \"glance-default-external-api-0\" (UID: \"a9ada454-f6fa-4ea4-b386-3ddbcd37f233\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:14:12 crc kubenswrapper[4558]: I0120 17:14:12.984152 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fz7gc\" (UniqueName: \"kubernetes.io/projected/a9ada454-f6fa-4ea4-b386-3ddbcd37f233-kube-api-access-fz7gc\") pod \"glance-default-external-api-0\" (UID: \"a9ada454-f6fa-4ea4-b386-3ddbcd37f233\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:14:12 crc kubenswrapper[4558]: I0120 17:14:12.984584 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a9ada454-f6fa-4ea4-b386-3ddbcd37f233-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"a9ada454-f6fa-4ea4-b386-3ddbcd37f233\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:14:12 crc kubenswrapper[4558]: I0120 17:14:12.985081 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a9ada454-f6fa-4ea4-b386-3ddbcd37f233-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"a9ada454-f6fa-4ea4-b386-3ddbcd37f233\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:14:13 crc kubenswrapper[4558]: I0120 17:14:13.001084 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage13-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage13-crc\") pod \"glance-default-external-api-0\" (UID: \"a9ada454-f6fa-4ea4-b386-3ddbcd37f233\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:14:13 crc kubenswrapper[4558]: I0120 17:14:13.062302 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zvqh8\" (UniqueName: \"kubernetes.io/projected/fec58b11-5e1a-4bfc-bfcc-fde43e56b8fb-kube-api-access-zvqh8\") pod \"fec58b11-5e1a-4bfc-bfcc-fde43e56b8fb\" (UID: \"fec58b11-5e1a-4bfc-bfcc-fde43e56b8fb\") " Jan 20 17:14:13 crc kubenswrapper[4558]: I0120 17:14:13.062392 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fec58b11-5e1a-4bfc-bfcc-fde43e56b8fb-scripts\") pod \"fec58b11-5e1a-4bfc-bfcc-fde43e56b8fb\" (UID: \"fec58b11-5e1a-4bfc-bfcc-fde43e56b8fb\") " Jan 20 17:14:13 crc kubenswrapper[4558]: I0120 17:14:13.062439 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage17-crc\") pod \"fec58b11-5e1a-4bfc-bfcc-fde43e56b8fb\" (UID: \"fec58b11-5e1a-4bfc-bfcc-fde43e56b8fb\") " Jan 20 17:14:13 crc kubenswrapper[4558]: I0120 17:14:13.062523 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fec58b11-5e1a-4bfc-bfcc-fde43e56b8fb-combined-ca-bundle\") pod \"fec58b11-5e1a-4bfc-bfcc-fde43e56b8fb\" (UID: \"fec58b11-5e1a-4bfc-bfcc-fde43e56b8fb\") " Jan 20 17:14:13 crc kubenswrapper[4558]: I0120 17:14:13.067604 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/fec58b11-5e1a-4bfc-bfcc-fde43e56b8fb-httpd-run\") pod \"fec58b11-5e1a-4bfc-bfcc-fde43e56b8fb\" (UID: \"fec58b11-5e1a-4bfc-bfcc-fde43e56b8fb\") " Jan 20 17:14:13 crc kubenswrapper[4558]: I0120 17:14:13.067661 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fec58b11-5e1a-4bfc-bfcc-fde43e56b8fb-config-data\") pod \"fec58b11-5e1a-4bfc-bfcc-fde43e56b8fb\" (UID: \"fec58b11-5e1a-4bfc-bfcc-fde43e56b8fb\") " Jan 20 17:14:13 crc kubenswrapper[4558]: I0120 17:14:13.067697 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fec58b11-5e1a-4bfc-bfcc-fde43e56b8fb-logs\") pod \"fec58b11-5e1a-4bfc-bfcc-fde43e56b8fb\" (UID: \"fec58b11-5e1a-4bfc-bfcc-fde43e56b8fb\") " Jan 20 17:14:13 crc kubenswrapper[4558]: I0120 17:14:13.067745 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/fec58b11-5e1a-4bfc-bfcc-fde43e56b8fb-internal-tls-certs\") pod \"fec58b11-5e1a-4bfc-bfcc-fde43e56b8fb\" (UID: \"fec58b11-5e1a-4bfc-bfcc-fde43e56b8fb\") " Jan 20 17:14:13 crc kubenswrapper[4558]: I0120 17:14:13.074361 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fec58b11-5e1a-4bfc-bfcc-fde43e56b8fb-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "fec58b11-5e1a-4bfc-bfcc-fde43e56b8fb" (UID: "fec58b11-5e1a-4bfc-bfcc-fde43e56b8fb"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:14:13 crc kubenswrapper[4558]: I0120 17:14:13.077526 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fec58b11-5e1a-4bfc-bfcc-fde43e56b8fb-logs" (OuterVolumeSpecName: "logs") pod "fec58b11-5e1a-4bfc-bfcc-fde43e56b8fb" (UID: "fec58b11-5e1a-4bfc-bfcc-fde43e56b8fb"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:14:13 crc kubenswrapper[4558]: I0120 17:14:13.080794 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage17-crc" (OuterVolumeSpecName: "glance") pod "fec58b11-5e1a-4bfc-bfcc-fde43e56b8fb" (UID: "fec58b11-5e1a-4bfc-bfcc-fde43e56b8fb"). InnerVolumeSpecName "local-storage17-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:14:13 crc kubenswrapper[4558]: I0120 17:14:13.085387 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fec58b11-5e1a-4bfc-bfcc-fde43e56b8fb-kube-api-access-zvqh8" (OuterVolumeSpecName: "kube-api-access-zvqh8") pod "fec58b11-5e1a-4bfc-bfcc-fde43e56b8fb" (UID: "fec58b11-5e1a-4bfc-bfcc-fde43e56b8fb"). InnerVolumeSpecName "kube-api-access-zvqh8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:14:13 crc kubenswrapper[4558]: I0120 17:14:13.088610 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fec58b11-5e1a-4bfc-bfcc-fde43e56b8fb-scripts" (OuterVolumeSpecName: "scripts") pod "fec58b11-5e1a-4bfc-bfcc-fde43e56b8fb" (UID: "fec58b11-5e1a-4bfc-bfcc-fde43e56b8fb"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:14:13 crc kubenswrapper[4558]: I0120 17:14:13.147944 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:14:13 crc kubenswrapper[4558]: I0120 17:14:13.155462 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fec58b11-5e1a-4bfc-bfcc-fde43e56b8fb-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fec58b11-5e1a-4bfc-bfcc-fde43e56b8fb" (UID: "fec58b11-5e1a-4bfc-bfcc-fde43e56b8fb"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:14:13 crc kubenswrapper[4558]: I0120 17:14:13.164903 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fec58b11-5e1a-4bfc-bfcc-fde43e56b8fb-config-data" (OuterVolumeSpecName: "config-data") pod "fec58b11-5e1a-4bfc-bfcc-fde43e56b8fb" (UID: "fec58b11-5e1a-4bfc-bfcc-fde43e56b8fb"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:14:13 crc kubenswrapper[4558]: I0120 17:14:13.170044 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fec58b11-5e1a-4bfc-bfcc-fde43e56b8fb-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:13 crc kubenswrapper[4558]: I0120 17:14:13.170069 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zvqh8\" (UniqueName: \"kubernetes.io/projected/fec58b11-5e1a-4bfc-bfcc-fde43e56b8fb-kube-api-access-zvqh8\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:13 crc kubenswrapper[4558]: I0120 17:14:13.170079 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fec58b11-5e1a-4bfc-bfcc-fde43e56b8fb-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:13 crc kubenswrapper[4558]: I0120 17:14:13.170104 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage17-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage17-crc\") on node \"crc\" " Jan 20 17:14:13 crc kubenswrapper[4558]: I0120 17:14:13.170114 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fec58b11-5e1a-4bfc-bfcc-fde43e56b8fb-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:13 crc kubenswrapper[4558]: I0120 17:14:13.170124 4558 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/fec58b11-5e1a-4bfc-bfcc-fde43e56b8fb-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:13 crc kubenswrapper[4558]: I0120 17:14:13.170132 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fec58b11-5e1a-4bfc-bfcc-fde43e56b8fb-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:13 crc kubenswrapper[4558]: I0120 17:14:13.199875 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fec58b11-5e1a-4bfc-bfcc-fde43e56b8fb-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "fec58b11-5e1a-4bfc-bfcc-fde43e56b8fb" (UID: "fec58b11-5e1a-4bfc-bfcc-fde43e56b8fb"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:14:13 crc kubenswrapper[4558]: I0120 17:14:13.200125 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-531b-account-create-update-994sx" Jan 20 17:14:13 crc kubenswrapper[4558]: I0120 17:14:13.209499 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-aad3-account-create-update-tbsnm" Jan 20 17:14:13 crc kubenswrapper[4558]: I0120 17:14:13.212814 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage17-crc" (UniqueName: "kubernetes.io/local-volume/local-storage17-crc") on node "crc" Jan 20 17:14:13 crc kubenswrapper[4558]: I0120 17:14:13.218549 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-db-create-5wn2p" Jan 20 17:14:13 crc kubenswrapper[4558]: I0120 17:14:13.273410 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/fec58b11-5e1a-4bfc-bfcc-fde43e56b8fb-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:13 crc kubenswrapper[4558]: I0120 17:14:13.273451 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage17-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage17-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:13 crc kubenswrapper[4558]: I0120 17:14:13.374928 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-prq4s\" (UniqueName: \"kubernetes.io/projected/4ecd38e8-120d-4e5b-9649-405b6068e9d1-kube-api-access-prq4s\") pod \"4ecd38e8-120d-4e5b-9649-405b6068e9d1\" (UID: \"4ecd38e8-120d-4e5b-9649-405b6068e9d1\") " Jan 20 17:14:13 crc kubenswrapper[4558]: I0120 17:14:13.375114 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4ecd38e8-120d-4e5b-9649-405b6068e9d1-operator-scripts\") pod \"4ecd38e8-120d-4e5b-9649-405b6068e9d1\" (UID: \"4ecd38e8-120d-4e5b-9649-405b6068e9d1\") " Jan 20 17:14:13 crc kubenswrapper[4558]: I0120 17:14:13.375188 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l9g4s\" (UniqueName: \"kubernetes.io/projected/e6f2b04e-81d0-4840-aa61-97174ccff959-kube-api-access-l9g4s\") pod \"e6f2b04e-81d0-4840-aa61-97174ccff959\" (UID: \"e6f2b04e-81d0-4840-aa61-97174ccff959\") " Jan 20 17:14:13 crc kubenswrapper[4558]: I0120 17:14:13.375215 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gc7qn\" (UniqueName: \"kubernetes.io/projected/099e720f-99be-44a9-b149-94a07ebe5e02-kube-api-access-gc7qn\") pod \"099e720f-99be-44a9-b149-94a07ebe5e02\" (UID: \"099e720f-99be-44a9-b149-94a07ebe5e02\") " Jan 20 17:14:13 crc kubenswrapper[4558]: I0120 17:14:13.375266 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e6f2b04e-81d0-4840-aa61-97174ccff959-operator-scripts\") pod \"e6f2b04e-81d0-4840-aa61-97174ccff959\" (UID: \"e6f2b04e-81d0-4840-aa61-97174ccff959\") " Jan 20 17:14:13 crc kubenswrapper[4558]: I0120 17:14:13.375304 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/099e720f-99be-44a9-b149-94a07ebe5e02-operator-scripts\") pod \"099e720f-99be-44a9-b149-94a07ebe5e02\" (UID: \"099e720f-99be-44a9-b149-94a07ebe5e02\") " Jan 20 17:14:13 crc kubenswrapper[4558]: I0120 17:14:13.375742 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4ecd38e8-120d-4e5b-9649-405b6068e9d1-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "4ecd38e8-120d-4e5b-9649-405b6068e9d1" (UID: "4ecd38e8-120d-4e5b-9649-405b6068e9d1"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:14:13 crc kubenswrapper[4558]: I0120 17:14:13.376174 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/099e720f-99be-44a9-b149-94a07ebe5e02-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "099e720f-99be-44a9-b149-94a07ebe5e02" (UID: "099e720f-99be-44a9-b149-94a07ebe5e02"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:14:13 crc kubenswrapper[4558]: I0120 17:14:13.376505 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e6f2b04e-81d0-4840-aa61-97174ccff959-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "e6f2b04e-81d0-4840-aa61-97174ccff959" (UID: "e6f2b04e-81d0-4840-aa61-97174ccff959"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:14:13 crc kubenswrapper[4558]: I0120 17:14:13.386968 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e6f2b04e-81d0-4840-aa61-97174ccff959-kube-api-access-l9g4s" (OuterVolumeSpecName: "kube-api-access-l9g4s") pod "e6f2b04e-81d0-4840-aa61-97174ccff959" (UID: "e6f2b04e-81d0-4840-aa61-97174ccff959"). InnerVolumeSpecName "kube-api-access-l9g4s". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:14:13 crc kubenswrapper[4558]: I0120 17:14:13.387202 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/099e720f-99be-44a9-b149-94a07ebe5e02-kube-api-access-gc7qn" (OuterVolumeSpecName: "kube-api-access-gc7qn") pod "099e720f-99be-44a9-b149-94a07ebe5e02" (UID: "099e720f-99be-44a9-b149-94a07ebe5e02"). InnerVolumeSpecName "kube-api-access-gc7qn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:14:13 crc kubenswrapper[4558]: I0120 17:14:13.390465 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4ecd38e8-120d-4e5b-9649-405b6068e9d1-kube-api-access-prq4s" (OuterVolumeSpecName: "kube-api-access-prq4s") pod "4ecd38e8-120d-4e5b-9649-405b6068e9d1" (UID: "4ecd38e8-120d-4e5b-9649-405b6068e9d1"). InnerVolumeSpecName "kube-api-access-prq4s". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:14:13 crc kubenswrapper[4558]: I0120 17:14:13.478633 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4ecd38e8-120d-4e5b-9649-405b6068e9d1-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:13 crc kubenswrapper[4558]: I0120 17:14:13.478973 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l9g4s\" (UniqueName: \"kubernetes.io/projected/e6f2b04e-81d0-4840-aa61-97174ccff959-kube-api-access-l9g4s\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:13 crc kubenswrapper[4558]: I0120 17:14:13.479039 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gc7qn\" (UniqueName: \"kubernetes.io/projected/099e720f-99be-44a9-b149-94a07ebe5e02-kube-api-access-gc7qn\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:13 crc kubenswrapper[4558]: I0120 17:14:13.479121 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e6f2b04e-81d0-4840-aa61-97174ccff959-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:13 crc kubenswrapper[4558]: I0120 17:14:13.479209 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/099e720f-99be-44a9-b149-94a07ebe5e02-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:13 crc kubenswrapper[4558]: I0120 17:14:13.479293 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-prq4s\" (UniqueName: \"kubernetes.io/projected/4ecd38e8-120d-4e5b-9649-405b6068e9d1-kube-api-access-prq4s\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:13 crc kubenswrapper[4558]: W0120 17:14:13.640529 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda9ada454_f6fa_4ea4_b386_3ddbcd37f233.slice/crio-7c1ae5ffe5f58759e7773584026e6bc8dbd274362dd8c71eba12a88a090c95de WatchSource:0}: Error finding container 7c1ae5ffe5f58759e7773584026e6bc8dbd274362dd8c71eba12a88a090c95de: Status 404 returned error can't find the container with id 7c1ae5ffe5f58759e7773584026e6bc8dbd274362dd8c71eba12a88a090c95de Jan 20 17:14:13 crc kubenswrapper[4558]: I0120 17:14:13.643115 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:14:13 crc kubenswrapper[4558]: I0120 17:14:13.687223 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"fec58b11-5e1a-4bfc-bfcc-fde43e56b8fb","Type":"ContainerDied","Data":"b70bffc6a37840a954dfe5e2fe46eb02c0529f106b9c0a49bb2844f07850c1cc"} Jan 20 17:14:13 crc kubenswrapper[4558]: I0120 17:14:13.687236 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:14:13 crc kubenswrapper[4558]: I0120 17:14:13.687298 4558 scope.go:117] "RemoveContainer" containerID="2345422c3ac145fd1d8e65d12769cbcb80d89d096669fb577251421566912341" Jan 20 17:14:13 crc kubenswrapper[4558]: I0120 17:14:13.690259 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"d6352ad2-dc0a-46b0-a9b2-f1fffe97d8ab","Type":"ContainerStarted","Data":"c1addf594a0e4be4a59dc20deb51636ae9b8537b9b29f20ad2448386146af197"} Jan 20 17:14:13 crc kubenswrapper[4558]: I0120 17:14:13.690471 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="d6352ad2-dc0a-46b0-a9b2-f1fffe97d8ab" containerName="ceilometer-central-agent" containerID="cri-o://6e00a9aa371726359418b84cec4cb52d2eb2a18f6fcb6f310ef1bb67367ef96a" gracePeriod=30 Jan 20 17:14:13 crc kubenswrapper[4558]: I0120 17:14:13.690770 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:14:13 crc kubenswrapper[4558]: I0120 17:14:13.691076 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="d6352ad2-dc0a-46b0-a9b2-f1fffe97d8ab" containerName="proxy-httpd" containerID="cri-o://c1addf594a0e4be4a59dc20deb51636ae9b8537b9b29f20ad2448386146af197" gracePeriod=30 Jan 20 17:14:13 crc kubenswrapper[4558]: I0120 17:14:13.691138 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="d6352ad2-dc0a-46b0-a9b2-f1fffe97d8ab" containerName="sg-core" containerID="cri-o://f12820ceb4c08944499274ef4eac4230ee15bce8eae024111d501a4102ebfb9c" gracePeriod=30 Jan 20 17:14:13 crc kubenswrapper[4558]: I0120 17:14:13.691231 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="d6352ad2-dc0a-46b0-a9b2-f1fffe97d8ab" containerName="ceilometer-notification-agent" containerID="cri-o://cdb59cc41909e93106e64cb221195718916752f4f20e16104eb862f1be73922f" gracePeriod=30 Jan 20 17:14:13 crc kubenswrapper[4558]: I0120 17:14:13.703208 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"a9ada454-f6fa-4ea4-b386-3ddbcd37f233","Type":"ContainerStarted","Data":"7c1ae5ffe5f58759e7773584026e6bc8dbd274362dd8c71eba12a88a090c95de"} Jan 20 17:14:13 crc kubenswrapper[4558]: I0120 17:14:13.705261 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-531b-account-create-update-994sx" event={"ID":"e6f2b04e-81d0-4840-aa61-97174ccff959","Type":"ContainerDied","Data":"103d7f521e9ea9477bd9c220e4abc07a17d0a9f566acdbd673cc9f52280798d0"} Jan 20 17:14:13 crc kubenswrapper[4558]: I0120 17:14:13.705304 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="103d7f521e9ea9477bd9c220e4abc07a17d0a9f566acdbd673cc9f52280798d0" Jan 20 17:14:13 crc kubenswrapper[4558]: I0120 17:14:13.705328 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-531b-account-create-update-994sx" Jan 20 17:14:13 crc kubenswrapper[4558]: I0120 17:14:13.714229 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ceilometer-0" podStartSLOduration=2.749011075 podStartE2EDuration="6.714213047s" podCreationTimestamp="2026-01-20 17:14:07 +0000 UTC" firstStartedPulling="2026-01-20 17:14:08.306395813 +0000 UTC m=+1942.066733780" lastFinishedPulling="2026-01-20 17:14:12.271597784 +0000 UTC m=+1946.031935752" observedRunningTime="2026-01-20 17:14:13.709702425 +0000 UTC m=+1947.470040392" watchObservedRunningTime="2026-01-20 17:14:13.714213047 +0000 UTC m=+1947.474551014" Jan 20 17:14:13 crc kubenswrapper[4558]: I0120 17:14:13.721591 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-aad3-account-create-update-tbsnm" event={"ID":"4ecd38e8-120d-4e5b-9649-405b6068e9d1","Type":"ContainerDied","Data":"eb9ed236e92b77ffb593e8b6ebe3b9a8017b8a5b9b8f183262d118db1f3cfc3f"} Jan 20 17:14:13 crc kubenswrapper[4558]: I0120 17:14:13.721650 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="eb9ed236e92b77ffb593e8b6ebe3b9a8017b8a5b9b8f183262d118db1f3cfc3f" Jan 20 17:14:13 crc kubenswrapper[4558]: I0120 17:14:13.721684 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-aad3-account-create-update-tbsnm" Jan 20 17:14:13 crc kubenswrapper[4558]: I0120 17:14:13.724769 4558 scope.go:117] "RemoveContainer" containerID="37c69cdd0c87e5dab78bf80037c40a035a8f38600230f3ea65b93bc33964dc4e" Jan 20 17:14:13 crc kubenswrapper[4558]: I0120 17:14:13.728242 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:14:13 crc kubenswrapper[4558]: I0120 17:14:13.731807 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-db-create-5wn2p" event={"ID":"099e720f-99be-44a9-b149-94a07ebe5e02","Type":"ContainerDied","Data":"7d38110691b9d6a4a879ce2acb4aa9b8402d3805ec65b47555184332e8ab8e5e"} Jan 20 17:14:13 crc kubenswrapper[4558]: I0120 17:14:13.731839 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7d38110691b9d6a4a879ce2acb4aa9b8402d3805ec65b47555184332e8ab8e5e" Jan 20 17:14:13 crc kubenswrapper[4558]: I0120 17:14:13.731852 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-db-create-5wn2p" Jan 20 17:14:13 crc kubenswrapper[4558]: I0120 17:14:13.736313 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:14:13 crc kubenswrapper[4558]: I0120 17:14:13.761600 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:14:13 crc kubenswrapper[4558]: E0120 17:14:13.762025 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fec58b11-5e1a-4bfc-bfcc-fde43e56b8fb" containerName="glance-log" Jan 20 17:14:13 crc kubenswrapper[4558]: I0120 17:14:13.762050 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="fec58b11-5e1a-4bfc-bfcc-fde43e56b8fb" containerName="glance-log" Jan 20 17:14:13 crc kubenswrapper[4558]: E0120 17:14:13.762057 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e6f2b04e-81d0-4840-aa61-97174ccff959" containerName="mariadb-account-create-update" Jan 20 17:14:13 crc kubenswrapper[4558]: I0120 17:14:13.762065 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="e6f2b04e-81d0-4840-aa61-97174ccff959" containerName="mariadb-account-create-update" Jan 20 17:14:13 crc kubenswrapper[4558]: E0120 17:14:13.762085 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4ecd38e8-120d-4e5b-9649-405b6068e9d1" containerName="mariadb-account-create-update" Jan 20 17:14:13 crc kubenswrapper[4558]: I0120 17:14:13.762091 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="4ecd38e8-120d-4e5b-9649-405b6068e9d1" containerName="mariadb-account-create-update" Jan 20 17:14:13 crc kubenswrapper[4558]: E0120 17:14:13.762104 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="099e720f-99be-44a9-b149-94a07ebe5e02" containerName="mariadb-database-create" Jan 20 17:14:13 crc kubenswrapper[4558]: I0120 17:14:13.762110 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="099e720f-99be-44a9-b149-94a07ebe5e02" containerName="mariadb-database-create" Jan 20 17:14:13 crc kubenswrapper[4558]: E0120 17:14:13.762127 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fec58b11-5e1a-4bfc-bfcc-fde43e56b8fb" containerName="glance-httpd" Jan 20 17:14:13 crc kubenswrapper[4558]: I0120 17:14:13.762132 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="fec58b11-5e1a-4bfc-bfcc-fde43e56b8fb" containerName="glance-httpd" Jan 20 17:14:13 crc kubenswrapper[4558]: I0120 17:14:13.762367 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="099e720f-99be-44a9-b149-94a07ebe5e02" containerName="mariadb-database-create" Jan 20 17:14:13 crc kubenswrapper[4558]: I0120 17:14:13.762387 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="e6f2b04e-81d0-4840-aa61-97174ccff959" containerName="mariadb-account-create-update" Jan 20 17:14:13 crc kubenswrapper[4558]: I0120 17:14:13.762403 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="fec58b11-5e1a-4bfc-bfcc-fde43e56b8fb" containerName="glance-httpd" Jan 20 17:14:13 crc kubenswrapper[4558]: I0120 17:14:13.762437 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="fec58b11-5e1a-4bfc-bfcc-fde43e56b8fb" containerName="glance-log" Jan 20 17:14:13 crc kubenswrapper[4558]: I0120 17:14:13.762446 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="4ecd38e8-120d-4e5b-9649-405b6068e9d1" containerName="mariadb-account-create-update" Jan 20 17:14:13 crc kubenswrapper[4558]: I0120 17:14:13.763389 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:14:13 crc kubenswrapper[4558]: I0120 17:14:13.768630 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-glance-default-internal-svc" Jan 20 17:14:13 crc kubenswrapper[4558]: I0120 17:14:13.769215 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-default-internal-config-data" Jan 20 17:14:13 crc kubenswrapper[4558]: I0120 17:14:13.775940 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:14:13 crc kubenswrapper[4558]: I0120 17:14:13.890703 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5588d9e2-2f89-4abb-94de-76b5791582a5-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"5588d9e2-2f89-4abb-94de-76b5791582a5\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:14:13 crc kubenswrapper[4558]: I0120 17:14:13.890755 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage17-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage17-crc\") pod \"glance-default-internal-api-0\" (UID: \"5588d9e2-2f89-4abb-94de-76b5791582a5\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:14:13 crc kubenswrapper[4558]: I0120 17:14:13.890784 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4wvct\" (UniqueName: \"kubernetes.io/projected/5588d9e2-2f89-4abb-94de-76b5791582a5-kube-api-access-4wvct\") pod \"glance-default-internal-api-0\" (UID: \"5588d9e2-2f89-4abb-94de-76b5791582a5\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:14:13 crc kubenswrapper[4558]: I0120 17:14:13.890929 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5588d9e2-2f89-4abb-94de-76b5791582a5-scripts\") pod \"glance-default-internal-api-0\" (UID: \"5588d9e2-2f89-4abb-94de-76b5791582a5\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:14:13 crc kubenswrapper[4558]: I0120 17:14:13.890953 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5588d9e2-2f89-4abb-94de-76b5791582a5-config-data\") pod \"glance-default-internal-api-0\" (UID: \"5588d9e2-2f89-4abb-94de-76b5791582a5\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:14:13 crc kubenswrapper[4558]: I0120 17:14:13.890986 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5588d9e2-2f89-4abb-94de-76b5791582a5-logs\") pod \"glance-default-internal-api-0\" (UID: \"5588d9e2-2f89-4abb-94de-76b5791582a5\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:14:13 crc kubenswrapper[4558]: I0120 17:14:13.891034 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5588d9e2-2f89-4abb-94de-76b5791582a5-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"5588d9e2-2f89-4abb-94de-76b5791582a5\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:14:13 crc kubenswrapper[4558]: I0120 17:14:13.891088 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5588d9e2-2f89-4abb-94de-76b5791582a5-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"5588d9e2-2f89-4abb-94de-76b5791582a5\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:14:13 crc kubenswrapper[4558]: I0120 17:14:13.992491 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5588d9e2-2f89-4abb-94de-76b5791582a5-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"5588d9e2-2f89-4abb-94de-76b5791582a5\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:14:13 crc kubenswrapper[4558]: I0120 17:14:13.992538 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage17-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage17-crc\") pod \"glance-default-internal-api-0\" (UID: \"5588d9e2-2f89-4abb-94de-76b5791582a5\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:14:13 crc kubenswrapper[4558]: I0120 17:14:13.992561 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4wvct\" (UniqueName: \"kubernetes.io/projected/5588d9e2-2f89-4abb-94de-76b5791582a5-kube-api-access-4wvct\") pod \"glance-default-internal-api-0\" (UID: \"5588d9e2-2f89-4abb-94de-76b5791582a5\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:14:13 crc kubenswrapper[4558]: I0120 17:14:13.992647 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5588d9e2-2f89-4abb-94de-76b5791582a5-scripts\") pod \"glance-default-internal-api-0\" (UID: \"5588d9e2-2f89-4abb-94de-76b5791582a5\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:14:13 crc kubenswrapper[4558]: I0120 17:14:13.992667 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5588d9e2-2f89-4abb-94de-76b5791582a5-config-data\") pod \"glance-default-internal-api-0\" (UID: \"5588d9e2-2f89-4abb-94de-76b5791582a5\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:14:13 crc kubenswrapper[4558]: I0120 17:14:13.992692 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5588d9e2-2f89-4abb-94de-76b5791582a5-logs\") pod \"glance-default-internal-api-0\" (UID: \"5588d9e2-2f89-4abb-94de-76b5791582a5\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:14:13 crc kubenswrapper[4558]: I0120 17:14:13.992722 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5588d9e2-2f89-4abb-94de-76b5791582a5-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"5588d9e2-2f89-4abb-94de-76b5791582a5\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:14:13 crc kubenswrapper[4558]: I0120 17:14:13.992751 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5588d9e2-2f89-4abb-94de-76b5791582a5-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"5588d9e2-2f89-4abb-94de-76b5791582a5\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:14:13 crc kubenswrapper[4558]: I0120 17:14:13.993061 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage17-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage17-crc\") pod \"glance-default-internal-api-0\" (UID: \"5588d9e2-2f89-4abb-94de-76b5791582a5\") device mount path \"/mnt/openstack/pv17\"" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:14:13 crc kubenswrapper[4558]: I0120 17:14:13.993111 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5588d9e2-2f89-4abb-94de-76b5791582a5-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"5588d9e2-2f89-4abb-94de-76b5791582a5\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:14:13 crc kubenswrapper[4558]: I0120 17:14:13.993343 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5588d9e2-2f89-4abb-94de-76b5791582a5-logs\") pod \"glance-default-internal-api-0\" (UID: \"5588d9e2-2f89-4abb-94de-76b5791582a5\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:14:13 crc kubenswrapper[4558]: I0120 17:14:13.998691 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5588d9e2-2f89-4abb-94de-76b5791582a5-scripts\") pod \"glance-default-internal-api-0\" (UID: \"5588d9e2-2f89-4abb-94de-76b5791582a5\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:14:13 crc kubenswrapper[4558]: I0120 17:14:13.998716 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5588d9e2-2f89-4abb-94de-76b5791582a5-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"5588d9e2-2f89-4abb-94de-76b5791582a5\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:14:13 crc kubenswrapper[4558]: I0120 17:14:13.999254 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5588d9e2-2f89-4abb-94de-76b5791582a5-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"5588d9e2-2f89-4abb-94de-76b5791582a5\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:14:14 crc kubenswrapper[4558]: I0120 17:14:13.999984 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5588d9e2-2f89-4abb-94de-76b5791582a5-config-data\") pod \"glance-default-internal-api-0\" (UID: \"5588d9e2-2f89-4abb-94de-76b5791582a5\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:14:14 crc kubenswrapper[4558]: I0120 17:14:14.007284 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4wvct\" (UniqueName: \"kubernetes.io/projected/5588d9e2-2f89-4abb-94de-76b5791582a5-kube-api-access-4wvct\") pod \"glance-default-internal-api-0\" (UID: \"5588d9e2-2f89-4abb-94de-76b5791582a5\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:14:14 crc kubenswrapper[4558]: I0120 17:14:14.024490 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage17-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage17-crc\") pod \"glance-default-internal-api-0\" (UID: \"5588d9e2-2f89-4abb-94de-76b5791582a5\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:14:14 crc kubenswrapper[4558]: I0120 17:14:14.099501 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:14:14 crc kubenswrapper[4558]: I0120 17:14:14.304709 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:14:14 crc kubenswrapper[4558]: I0120 17:14:14.402843 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d6352ad2-dc0a-46b0-a9b2-f1fffe97d8ab-config-data\") pod \"d6352ad2-dc0a-46b0-a9b2-f1fffe97d8ab\" (UID: \"d6352ad2-dc0a-46b0-a9b2-f1fffe97d8ab\") " Jan 20 17:14:14 crc kubenswrapper[4558]: I0120 17:14:14.402960 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d6352ad2-dc0a-46b0-a9b2-f1fffe97d8ab-scripts\") pod \"d6352ad2-dc0a-46b0-a9b2-f1fffe97d8ab\" (UID: \"d6352ad2-dc0a-46b0-a9b2-f1fffe97d8ab\") " Jan 20 17:14:14 crc kubenswrapper[4558]: I0120 17:14:14.403096 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d6352ad2-dc0a-46b0-a9b2-f1fffe97d8ab-combined-ca-bundle\") pod \"d6352ad2-dc0a-46b0-a9b2-f1fffe97d8ab\" (UID: \"d6352ad2-dc0a-46b0-a9b2-f1fffe97d8ab\") " Jan 20 17:14:14 crc kubenswrapper[4558]: I0120 17:14:14.403193 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d6352ad2-dc0a-46b0-a9b2-f1fffe97d8ab-log-httpd\") pod \"d6352ad2-dc0a-46b0-a9b2-f1fffe97d8ab\" (UID: \"d6352ad2-dc0a-46b0-a9b2-f1fffe97d8ab\") " Jan 20 17:14:14 crc kubenswrapper[4558]: I0120 17:14:14.403248 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hntbs\" (UniqueName: \"kubernetes.io/projected/d6352ad2-dc0a-46b0-a9b2-f1fffe97d8ab-kube-api-access-hntbs\") pod \"d6352ad2-dc0a-46b0-a9b2-f1fffe97d8ab\" (UID: \"d6352ad2-dc0a-46b0-a9b2-f1fffe97d8ab\") " Jan 20 17:14:14 crc kubenswrapper[4558]: I0120 17:14:14.403697 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d6352ad2-dc0a-46b0-a9b2-f1fffe97d8ab-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "d6352ad2-dc0a-46b0-a9b2-f1fffe97d8ab" (UID: "d6352ad2-dc0a-46b0-a9b2-f1fffe97d8ab"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:14:14 crc kubenswrapper[4558]: I0120 17:14:14.404956 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d6352ad2-dc0a-46b0-a9b2-f1fffe97d8ab-run-httpd\") pod \"d6352ad2-dc0a-46b0-a9b2-f1fffe97d8ab\" (UID: \"d6352ad2-dc0a-46b0-a9b2-f1fffe97d8ab\") " Jan 20 17:14:14 crc kubenswrapper[4558]: I0120 17:14:14.405054 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d6352ad2-dc0a-46b0-a9b2-f1fffe97d8ab-sg-core-conf-yaml\") pod \"d6352ad2-dc0a-46b0-a9b2-f1fffe97d8ab\" (UID: \"d6352ad2-dc0a-46b0-a9b2-f1fffe97d8ab\") " Jan 20 17:14:14 crc kubenswrapper[4558]: I0120 17:14:14.405592 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d6352ad2-dc0a-46b0-a9b2-f1fffe97d8ab-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "d6352ad2-dc0a-46b0-a9b2-f1fffe97d8ab" (UID: "d6352ad2-dc0a-46b0-a9b2-f1fffe97d8ab"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:14:14 crc kubenswrapper[4558]: I0120 17:14:14.406556 4558 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d6352ad2-dc0a-46b0-a9b2-f1fffe97d8ab-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:14 crc kubenswrapper[4558]: I0120 17:14:14.406584 4558 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d6352ad2-dc0a-46b0-a9b2-f1fffe97d8ab-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:14 crc kubenswrapper[4558]: I0120 17:14:14.410446 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d6352ad2-dc0a-46b0-a9b2-f1fffe97d8ab-kube-api-access-hntbs" (OuterVolumeSpecName: "kube-api-access-hntbs") pod "d6352ad2-dc0a-46b0-a9b2-f1fffe97d8ab" (UID: "d6352ad2-dc0a-46b0-a9b2-f1fffe97d8ab"). InnerVolumeSpecName "kube-api-access-hntbs". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:14:14 crc kubenswrapper[4558]: I0120 17:14:14.410506 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d6352ad2-dc0a-46b0-a9b2-f1fffe97d8ab-scripts" (OuterVolumeSpecName: "scripts") pod "d6352ad2-dc0a-46b0-a9b2-f1fffe97d8ab" (UID: "d6352ad2-dc0a-46b0-a9b2-f1fffe97d8ab"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:14:14 crc kubenswrapper[4558]: I0120 17:14:14.438466 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d6352ad2-dc0a-46b0-a9b2-f1fffe97d8ab-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "d6352ad2-dc0a-46b0-a9b2-f1fffe97d8ab" (UID: "d6352ad2-dc0a-46b0-a9b2-f1fffe97d8ab"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:14:14 crc kubenswrapper[4558]: I0120 17:14:14.479287 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d6352ad2-dc0a-46b0-a9b2-f1fffe97d8ab-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d6352ad2-dc0a-46b0-a9b2-f1fffe97d8ab" (UID: "d6352ad2-dc0a-46b0-a9b2-f1fffe97d8ab"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:14:14 crc kubenswrapper[4558]: I0120 17:14:14.484145 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d6352ad2-dc0a-46b0-a9b2-f1fffe97d8ab-config-data" (OuterVolumeSpecName: "config-data") pod "d6352ad2-dc0a-46b0-a9b2-f1fffe97d8ab" (UID: "d6352ad2-dc0a-46b0-a9b2-f1fffe97d8ab"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:14:14 crc kubenswrapper[4558]: I0120 17:14:14.509566 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d6352ad2-dc0a-46b0-a9b2-f1fffe97d8ab-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:14 crc kubenswrapper[4558]: I0120 17:14:14.509687 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d6352ad2-dc0a-46b0-a9b2-f1fffe97d8ab-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:14 crc kubenswrapper[4558]: I0120 17:14:14.509771 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hntbs\" (UniqueName: \"kubernetes.io/projected/d6352ad2-dc0a-46b0-a9b2-f1fffe97d8ab-kube-api-access-hntbs\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:14 crc kubenswrapper[4558]: I0120 17:14:14.509831 4558 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d6352ad2-dc0a-46b0-a9b2-f1fffe97d8ab-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:14 crc kubenswrapper[4558]: I0120 17:14:14.509894 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d6352ad2-dc0a-46b0-a9b2-f1fffe97d8ab-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:14 crc kubenswrapper[4558]: I0120 17:14:14.556752 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:14:14 crc kubenswrapper[4558]: I0120 17:14:14.597936 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c061507a-294f-453b-be5d-4fdca19c790c" path="/var/lib/kubelet/pods/c061507a-294f-453b-be5d-4fdca19c790c/volumes" Jan 20 17:14:14 crc kubenswrapper[4558]: I0120 17:14:14.599606 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fec58b11-5e1a-4bfc-bfcc-fde43e56b8fb" path="/var/lib/kubelet/pods/fec58b11-5e1a-4bfc-bfcc-fde43e56b8fb/volumes" Jan 20 17:14:14 crc kubenswrapper[4558]: I0120 17:14:14.609712 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-db-sync-srppb"] Jan 20 17:14:14 crc kubenswrapper[4558]: E0120 17:14:14.612295 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d6352ad2-dc0a-46b0-a9b2-f1fffe97d8ab" containerName="proxy-httpd" Jan 20 17:14:14 crc kubenswrapper[4558]: I0120 17:14:14.612321 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d6352ad2-dc0a-46b0-a9b2-f1fffe97d8ab" containerName="proxy-httpd" Jan 20 17:14:14 crc kubenswrapper[4558]: E0120 17:14:14.612357 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d6352ad2-dc0a-46b0-a9b2-f1fffe97d8ab" containerName="ceilometer-notification-agent" Jan 20 17:14:14 crc kubenswrapper[4558]: I0120 17:14:14.612364 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d6352ad2-dc0a-46b0-a9b2-f1fffe97d8ab" containerName="ceilometer-notification-agent" Jan 20 17:14:14 crc kubenswrapper[4558]: E0120 17:14:14.612370 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d6352ad2-dc0a-46b0-a9b2-f1fffe97d8ab" containerName="ceilometer-central-agent" Jan 20 17:14:14 crc kubenswrapper[4558]: I0120 17:14:14.612378 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d6352ad2-dc0a-46b0-a9b2-f1fffe97d8ab" containerName="ceilometer-central-agent" Jan 20 17:14:14 crc kubenswrapper[4558]: E0120 17:14:14.612411 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d6352ad2-dc0a-46b0-a9b2-f1fffe97d8ab" containerName="sg-core" Jan 20 17:14:14 crc kubenswrapper[4558]: I0120 17:14:14.612417 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d6352ad2-dc0a-46b0-a9b2-f1fffe97d8ab" containerName="sg-core" Jan 20 17:14:14 crc kubenswrapper[4558]: I0120 17:14:14.612679 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="d6352ad2-dc0a-46b0-a9b2-f1fffe97d8ab" containerName="ceilometer-central-agent" Jan 20 17:14:14 crc kubenswrapper[4558]: I0120 17:14:14.612700 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="d6352ad2-dc0a-46b0-a9b2-f1fffe97d8ab" containerName="proxy-httpd" Jan 20 17:14:14 crc kubenswrapper[4558]: I0120 17:14:14.612714 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="d6352ad2-dc0a-46b0-a9b2-f1fffe97d8ab" containerName="sg-core" Jan 20 17:14:14 crc kubenswrapper[4558]: I0120 17:14:14.612729 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="d6352ad2-dc0a-46b0-a9b2-f1fffe97d8ab" containerName="ceilometer-notification-agent" Jan 20 17:14:14 crc kubenswrapper[4558]: I0120 17:14:14.613630 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-srppb" Jan 20 17:14:14 crc kubenswrapper[4558]: I0120 17:14:14.619103 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell0-conductor-scripts" Jan 20 17:14:14 crc kubenswrapper[4558]: I0120 17:14:14.619475 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-nova-dockercfg-km6cf" Jan 20 17:14:14 crc kubenswrapper[4558]: I0120 17:14:14.619501 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell0-conductor-config-data" Jan 20 17:14:14 crc kubenswrapper[4558]: I0120 17:14:14.641694 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-db-sync-srppb"] Jan 20 17:14:14 crc kubenswrapper[4558]: I0120 17:14:14.742597 4558 generic.go:334] "Generic (PLEG): container finished" podID="d6352ad2-dc0a-46b0-a9b2-f1fffe97d8ab" containerID="c1addf594a0e4be4a59dc20deb51636ae9b8537b9b29f20ad2448386146af197" exitCode=0 Jan 20 17:14:14 crc kubenswrapper[4558]: I0120 17:14:14.742631 4558 generic.go:334] "Generic (PLEG): container finished" podID="d6352ad2-dc0a-46b0-a9b2-f1fffe97d8ab" containerID="f12820ceb4c08944499274ef4eac4230ee15bce8eae024111d501a4102ebfb9c" exitCode=2 Jan 20 17:14:14 crc kubenswrapper[4558]: I0120 17:14:14.742639 4558 generic.go:334] "Generic (PLEG): container finished" podID="d6352ad2-dc0a-46b0-a9b2-f1fffe97d8ab" containerID="cdb59cc41909e93106e64cb221195718916752f4f20e16104eb862f1be73922f" exitCode=0 Jan 20 17:14:14 crc kubenswrapper[4558]: I0120 17:14:14.742646 4558 generic.go:334] "Generic (PLEG): container finished" podID="d6352ad2-dc0a-46b0-a9b2-f1fffe97d8ab" containerID="6e00a9aa371726359418b84cec4cb52d2eb2a18f6fcb6f310ef1bb67367ef96a" exitCode=0 Jan 20 17:14:14 crc kubenswrapper[4558]: I0120 17:14:14.742698 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"d6352ad2-dc0a-46b0-a9b2-f1fffe97d8ab","Type":"ContainerDied","Data":"c1addf594a0e4be4a59dc20deb51636ae9b8537b9b29f20ad2448386146af197"} Jan 20 17:14:14 crc kubenswrapper[4558]: I0120 17:14:14.742728 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"d6352ad2-dc0a-46b0-a9b2-f1fffe97d8ab","Type":"ContainerDied","Data":"f12820ceb4c08944499274ef4eac4230ee15bce8eae024111d501a4102ebfb9c"} Jan 20 17:14:14 crc kubenswrapper[4558]: I0120 17:14:14.742739 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"d6352ad2-dc0a-46b0-a9b2-f1fffe97d8ab","Type":"ContainerDied","Data":"cdb59cc41909e93106e64cb221195718916752f4f20e16104eb862f1be73922f"} Jan 20 17:14:14 crc kubenswrapper[4558]: I0120 17:14:14.742749 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"d6352ad2-dc0a-46b0-a9b2-f1fffe97d8ab","Type":"ContainerDied","Data":"6e00a9aa371726359418b84cec4cb52d2eb2a18f6fcb6f310ef1bb67367ef96a"} Jan 20 17:14:14 crc kubenswrapper[4558]: I0120 17:14:14.742757 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"d6352ad2-dc0a-46b0-a9b2-f1fffe97d8ab","Type":"ContainerDied","Data":"e1fea547f03a94e234b0198a2f534f2d876d172b6a486d40ed8c94ef0540eb90"} Jan 20 17:14:14 crc kubenswrapper[4558]: I0120 17:14:14.742771 4558 scope.go:117] "RemoveContainer" containerID="c1addf594a0e4be4a59dc20deb51636ae9b8537b9b29f20ad2448386146af197" Jan 20 17:14:14 crc kubenswrapper[4558]: I0120 17:14:14.742914 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:14:14 crc kubenswrapper[4558]: I0120 17:14:14.746155 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"5588d9e2-2f89-4abb-94de-76b5791582a5","Type":"ContainerStarted","Data":"6e377d33262043a09a008c7a0ab4c3ea5568682ab8ca401ccaf1b7b4b5801d9f"} Jan 20 17:14:14 crc kubenswrapper[4558]: I0120 17:14:14.749318 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"a9ada454-f6fa-4ea4-b386-3ddbcd37f233","Type":"ContainerStarted","Data":"5259e35b7300917470585b85e57577b5c54cf041fcc22c4114363695b0632f21"} Jan 20 17:14:14 crc kubenswrapper[4558]: I0120 17:14:14.783095 4558 scope.go:117] "RemoveContainer" containerID="f12820ceb4c08944499274ef4eac4230ee15bce8eae024111d501a4102ebfb9c" Jan 20 17:14:14 crc kubenswrapper[4558]: I0120 17:14:14.785210 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:14:14 crc kubenswrapper[4558]: I0120 17:14:14.810693 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:14:14 crc kubenswrapper[4558]: I0120 17:14:14.817888 4558 scope.go:117] "RemoveContainer" containerID="cdb59cc41909e93106e64cb221195718916752f4f20e16104eb862f1be73922f" Jan 20 17:14:14 crc kubenswrapper[4558]: I0120 17:14:14.819321 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/65cdc060-60d9-48a8-b535-31b93f49e1ed-scripts\") pod \"nova-cell0-conductor-db-sync-srppb\" (UID: \"65cdc060-60d9-48a8-b535-31b93f49e1ed\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-srppb" Jan 20 17:14:14 crc kubenswrapper[4558]: I0120 17:14:14.819542 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s5fpn\" (UniqueName: \"kubernetes.io/projected/65cdc060-60d9-48a8-b535-31b93f49e1ed-kube-api-access-s5fpn\") pod \"nova-cell0-conductor-db-sync-srppb\" (UID: \"65cdc060-60d9-48a8-b535-31b93f49e1ed\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-srppb" Jan 20 17:14:14 crc kubenswrapper[4558]: I0120 17:14:14.819602 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/65cdc060-60d9-48a8-b535-31b93f49e1ed-config-data\") pod \"nova-cell0-conductor-db-sync-srppb\" (UID: \"65cdc060-60d9-48a8-b535-31b93f49e1ed\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-srppb" Jan 20 17:14:14 crc kubenswrapper[4558]: I0120 17:14:14.819621 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65cdc060-60d9-48a8-b535-31b93f49e1ed-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-srppb\" (UID: \"65cdc060-60d9-48a8-b535-31b93f49e1ed\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-srppb" Jan 20 17:14:14 crc kubenswrapper[4558]: I0120 17:14:14.820644 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:14:14 crc kubenswrapper[4558]: I0120 17:14:14.822924 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:14:14 crc kubenswrapper[4558]: I0120 17:14:14.824780 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-scripts" Jan 20 17:14:14 crc kubenswrapper[4558]: I0120 17:14:14.825396 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-config-data" Jan 20 17:14:14 crc kubenswrapper[4558]: I0120 17:14:14.829029 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:14:14 crc kubenswrapper[4558]: I0120 17:14:14.846068 4558 scope.go:117] "RemoveContainer" containerID="6e00a9aa371726359418b84cec4cb52d2eb2a18f6fcb6f310ef1bb67367ef96a" Jan 20 17:14:14 crc kubenswrapper[4558]: I0120 17:14:14.869280 4558 scope.go:117] "RemoveContainer" containerID="c1addf594a0e4be4a59dc20deb51636ae9b8537b9b29f20ad2448386146af197" Jan 20 17:14:14 crc kubenswrapper[4558]: E0120 17:14:14.870302 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c1addf594a0e4be4a59dc20deb51636ae9b8537b9b29f20ad2448386146af197\": container with ID starting with c1addf594a0e4be4a59dc20deb51636ae9b8537b9b29f20ad2448386146af197 not found: ID does not exist" containerID="c1addf594a0e4be4a59dc20deb51636ae9b8537b9b29f20ad2448386146af197" Jan 20 17:14:14 crc kubenswrapper[4558]: I0120 17:14:14.870339 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c1addf594a0e4be4a59dc20deb51636ae9b8537b9b29f20ad2448386146af197"} err="failed to get container status \"c1addf594a0e4be4a59dc20deb51636ae9b8537b9b29f20ad2448386146af197\": rpc error: code = NotFound desc = could not find container \"c1addf594a0e4be4a59dc20deb51636ae9b8537b9b29f20ad2448386146af197\": container with ID starting with c1addf594a0e4be4a59dc20deb51636ae9b8537b9b29f20ad2448386146af197 not found: ID does not exist" Jan 20 17:14:14 crc kubenswrapper[4558]: I0120 17:14:14.870361 4558 scope.go:117] "RemoveContainer" containerID="f12820ceb4c08944499274ef4eac4230ee15bce8eae024111d501a4102ebfb9c" Jan 20 17:14:14 crc kubenswrapper[4558]: E0120 17:14:14.870736 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f12820ceb4c08944499274ef4eac4230ee15bce8eae024111d501a4102ebfb9c\": container with ID starting with f12820ceb4c08944499274ef4eac4230ee15bce8eae024111d501a4102ebfb9c not found: ID does not exist" containerID="f12820ceb4c08944499274ef4eac4230ee15bce8eae024111d501a4102ebfb9c" Jan 20 17:14:14 crc kubenswrapper[4558]: I0120 17:14:14.870761 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f12820ceb4c08944499274ef4eac4230ee15bce8eae024111d501a4102ebfb9c"} err="failed to get container status \"f12820ceb4c08944499274ef4eac4230ee15bce8eae024111d501a4102ebfb9c\": rpc error: code = NotFound desc = could not find container \"f12820ceb4c08944499274ef4eac4230ee15bce8eae024111d501a4102ebfb9c\": container with ID starting with f12820ceb4c08944499274ef4eac4230ee15bce8eae024111d501a4102ebfb9c not found: ID does not exist" Jan 20 17:14:14 crc kubenswrapper[4558]: I0120 17:14:14.870777 4558 scope.go:117] "RemoveContainer" containerID="cdb59cc41909e93106e64cb221195718916752f4f20e16104eb862f1be73922f" Jan 20 17:14:14 crc kubenswrapper[4558]: E0120 17:14:14.871596 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cdb59cc41909e93106e64cb221195718916752f4f20e16104eb862f1be73922f\": container with ID starting with cdb59cc41909e93106e64cb221195718916752f4f20e16104eb862f1be73922f not found: ID does not exist" containerID="cdb59cc41909e93106e64cb221195718916752f4f20e16104eb862f1be73922f" Jan 20 17:14:14 crc kubenswrapper[4558]: I0120 17:14:14.871627 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cdb59cc41909e93106e64cb221195718916752f4f20e16104eb862f1be73922f"} err="failed to get container status \"cdb59cc41909e93106e64cb221195718916752f4f20e16104eb862f1be73922f\": rpc error: code = NotFound desc = could not find container \"cdb59cc41909e93106e64cb221195718916752f4f20e16104eb862f1be73922f\": container with ID starting with cdb59cc41909e93106e64cb221195718916752f4f20e16104eb862f1be73922f not found: ID does not exist" Jan 20 17:14:14 crc kubenswrapper[4558]: I0120 17:14:14.871648 4558 scope.go:117] "RemoveContainer" containerID="6e00a9aa371726359418b84cec4cb52d2eb2a18f6fcb6f310ef1bb67367ef96a" Jan 20 17:14:14 crc kubenswrapper[4558]: E0120 17:14:14.871997 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6e00a9aa371726359418b84cec4cb52d2eb2a18f6fcb6f310ef1bb67367ef96a\": container with ID starting with 6e00a9aa371726359418b84cec4cb52d2eb2a18f6fcb6f310ef1bb67367ef96a not found: ID does not exist" containerID="6e00a9aa371726359418b84cec4cb52d2eb2a18f6fcb6f310ef1bb67367ef96a" Jan 20 17:14:14 crc kubenswrapper[4558]: I0120 17:14:14.872036 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6e00a9aa371726359418b84cec4cb52d2eb2a18f6fcb6f310ef1bb67367ef96a"} err="failed to get container status \"6e00a9aa371726359418b84cec4cb52d2eb2a18f6fcb6f310ef1bb67367ef96a\": rpc error: code = NotFound desc = could not find container \"6e00a9aa371726359418b84cec4cb52d2eb2a18f6fcb6f310ef1bb67367ef96a\": container with ID starting with 6e00a9aa371726359418b84cec4cb52d2eb2a18f6fcb6f310ef1bb67367ef96a not found: ID does not exist" Jan 20 17:14:14 crc kubenswrapper[4558]: I0120 17:14:14.872062 4558 scope.go:117] "RemoveContainer" containerID="c1addf594a0e4be4a59dc20deb51636ae9b8537b9b29f20ad2448386146af197" Jan 20 17:14:14 crc kubenswrapper[4558]: I0120 17:14:14.872448 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c1addf594a0e4be4a59dc20deb51636ae9b8537b9b29f20ad2448386146af197"} err="failed to get container status \"c1addf594a0e4be4a59dc20deb51636ae9b8537b9b29f20ad2448386146af197\": rpc error: code = NotFound desc = could not find container \"c1addf594a0e4be4a59dc20deb51636ae9b8537b9b29f20ad2448386146af197\": container with ID starting with c1addf594a0e4be4a59dc20deb51636ae9b8537b9b29f20ad2448386146af197 not found: ID does not exist" Jan 20 17:14:14 crc kubenswrapper[4558]: I0120 17:14:14.872472 4558 scope.go:117] "RemoveContainer" containerID="f12820ceb4c08944499274ef4eac4230ee15bce8eae024111d501a4102ebfb9c" Jan 20 17:14:14 crc kubenswrapper[4558]: I0120 17:14:14.872774 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f12820ceb4c08944499274ef4eac4230ee15bce8eae024111d501a4102ebfb9c"} err="failed to get container status \"f12820ceb4c08944499274ef4eac4230ee15bce8eae024111d501a4102ebfb9c\": rpc error: code = NotFound desc = could not find container \"f12820ceb4c08944499274ef4eac4230ee15bce8eae024111d501a4102ebfb9c\": container with ID starting with f12820ceb4c08944499274ef4eac4230ee15bce8eae024111d501a4102ebfb9c not found: ID does not exist" Jan 20 17:14:14 crc kubenswrapper[4558]: I0120 17:14:14.872792 4558 scope.go:117] "RemoveContainer" containerID="cdb59cc41909e93106e64cb221195718916752f4f20e16104eb862f1be73922f" Jan 20 17:14:14 crc kubenswrapper[4558]: I0120 17:14:14.873070 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cdb59cc41909e93106e64cb221195718916752f4f20e16104eb862f1be73922f"} err="failed to get container status \"cdb59cc41909e93106e64cb221195718916752f4f20e16104eb862f1be73922f\": rpc error: code = NotFound desc = could not find container \"cdb59cc41909e93106e64cb221195718916752f4f20e16104eb862f1be73922f\": container with ID starting with cdb59cc41909e93106e64cb221195718916752f4f20e16104eb862f1be73922f not found: ID does not exist" Jan 20 17:14:14 crc kubenswrapper[4558]: I0120 17:14:14.873087 4558 scope.go:117] "RemoveContainer" containerID="6e00a9aa371726359418b84cec4cb52d2eb2a18f6fcb6f310ef1bb67367ef96a" Jan 20 17:14:14 crc kubenswrapper[4558]: I0120 17:14:14.873389 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6e00a9aa371726359418b84cec4cb52d2eb2a18f6fcb6f310ef1bb67367ef96a"} err="failed to get container status \"6e00a9aa371726359418b84cec4cb52d2eb2a18f6fcb6f310ef1bb67367ef96a\": rpc error: code = NotFound desc = could not find container \"6e00a9aa371726359418b84cec4cb52d2eb2a18f6fcb6f310ef1bb67367ef96a\": container with ID starting with 6e00a9aa371726359418b84cec4cb52d2eb2a18f6fcb6f310ef1bb67367ef96a not found: ID does not exist" Jan 20 17:14:14 crc kubenswrapper[4558]: I0120 17:14:14.873414 4558 scope.go:117] "RemoveContainer" containerID="c1addf594a0e4be4a59dc20deb51636ae9b8537b9b29f20ad2448386146af197" Jan 20 17:14:14 crc kubenswrapper[4558]: I0120 17:14:14.873717 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c1addf594a0e4be4a59dc20deb51636ae9b8537b9b29f20ad2448386146af197"} err="failed to get container status \"c1addf594a0e4be4a59dc20deb51636ae9b8537b9b29f20ad2448386146af197\": rpc error: code = NotFound desc = could not find container \"c1addf594a0e4be4a59dc20deb51636ae9b8537b9b29f20ad2448386146af197\": container with ID starting with c1addf594a0e4be4a59dc20deb51636ae9b8537b9b29f20ad2448386146af197 not found: ID does not exist" Jan 20 17:14:14 crc kubenswrapper[4558]: I0120 17:14:14.873745 4558 scope.go:117] "RemoveContainer" containerID="f12820ceb4c08944499274ef4eac4230ee15bce8eae024111d501a4102ebfb9c" Jan 20 17:14:14 crc kubenswrapper[4558]: I0120 17:14:14.874747 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f12820ceb4c08944499274ef4eac4230ee15bce8eae024111d501a4102ebfb9c"} err="failed to get container status \"f12820ceb4c08944499274ef4eac4230ee15bce8eae024111d501a4102ebfb9c\": rpc error: code = NotFound desc = could not find container \"f12820ceb4c08944499274ef4eac4230ee15bce8eae024111d501a4102ebfb9c\": container with ID starting with f12820ceb4c08944499274ef4eac4230ee15bce8eae024111d501a4102ebfb9c not found: ID does not exist" Jan 20 17:14:14 crc kubenswrapper[4558]: I0120 17:14:14.874772 4558 scope.go:117] "RemoveContainer" containerID="cdb59cc41909e93106e64cb221195718916752f4f20e16104eb862f1be73922f" Jan 20 17:14:14 crc kubenswrapper[4558]: I0120 17:14:14.875053 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cdb59cc41909e93106e64cb221195718916752f4f20e16104eb862f1be73922f"} err="failed to get container status \"cdb59cc41909e93106e64cb221195718916752f4f20e16104eb862f1be73922f\": rpc error: code = NotFound desc = could not find container \"cdb59cc41909e93106e64cb221195718916752f4f20e16104eb862f1be73922f\": container with ID starting with cdb59cc41909e93106e64cb221195718916752f4f20e16104eb862f1be73922f not found: ID does not exist" Jan 20 17:14:14 crc kubenswrapper[4558]: I0120 17:14:14.875077 4558 scope.go:117] "RemoveContainer" containerID="6e00a9aa371726359418b84cec4cb52d2eb2a18f6fcb6f310ef1bb67367ef96a" Jan 20 17:14:14 crc kubenswrapper[4558]: I0120 17:14:14.875432 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6e00a9aa371726359418b84cec4cb52d2eb2a18f6fcb6f310ef1bb67367ef96a"} err="failed to get container status \"6e00a9aa371726359418b84cec4cb52d2eb2a18f6fcb6f310ef1bb67367ef96a\": rpc error: code = NotFound desc = could not find container \"6e00a9aa371726359418b84cec4cb52d2eb2a18f6fcb6f310ef1bb67367ef96a\": container with ID starting with 6e00a9aa371726359418b84cec4cb52d2eb2a18f6fcb6f310ef1bb67367ef96a not found: ID does not exist" Jan 20 17:14:14 crc kubenswrapper[4558]: I0120 17:14:14.875453 4558 scope.go:117] "RemoveContainer" containerID="c1addf594a0e4be4a59dc20deb51636ae9b8537b9b29f20ad2448386146af197" Jan 20 17:14:14 crc kubenswrapper[4558]: I0120 17:14:14.875728 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c1addf594a0e4be4a59dc20deb51636ae9b8537b9b29f20ad2448386146af197"} err="failed to get container status \"c1addf594a0e4be4a59dc20deb51636ae9b8537b9b29f20ad2448386146af197\": rpc error: code = NotFound desc = could not find container \"c1addf594a0e4be4a59dc20deb51636ae9b8537b9b29f20ad2448386146af197\": container with ID starting with c1addf594a0e4be4a59dc20deb51636ae9b8537b9b29f20ad2448386146af197 not found: ID does not exist" Jan 20 17:14:14 crc kubenswrapper[4558]: I0120 17:14:14.875750 4558 scope.go:117] "RemoveContainer" containerID="f12820ceb4c08944499274ef4eac4230ee15bce8eae024111d501a4102ebfb9c" Jan 20 17:14:14 crc kubenswrapper[4558]: I0120 17:14:14.876026 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f12820ceb4c08944499274ef4eac4230ee15bce8eae024111d501a4102ebfb9c"} err="failed to get container status \"f12820ceb4c08944499274ef4eac4230ee15bce8eae024111d501a4102ebfb9c\": rpc error: code = NotFound desc = could not find container \"f12820ceb4c08944499274ef4eac4230ee15bce8eae024111d501a4102ebfb9c\": container with ID starting with f12820ceb4c08944499274ef4eac4230ee15bce8eae024111d501a4102ebfb9c not found: ID does not exist" Jan 20 17:14:14 crc kubenswrapper[4558]: I0120 17:14:14.876049 4558 scope.go:117] "RemoveContainer" containerID="cdb59cc41909e93106e64cb221195718916752f4f20e16104eb862f1be73922f" Jan 20 17:14:14 crc kubenswrapper[4558]: I0120 17:14:14.876275 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cdb59cc41909e93106e64cb221195718916752f4f20e16104eb862f1be73922f"} err="failed to get container status \"cdb59cc41909e93106e64cb221195718916752f4f20e16104eb862f1be73922f\": rpc error: code = NotFound desc = could not find container \"cdb59cc41909e93106e64cb221195718916752f4f20e16104eb862f1be73922f\": container with ID starting with cdb59cc41909e93106e64cb221195718916752f4f20e16104eb862f1be73922f not found: ID does not exist" Jan 20 17:14:14 crc kubenswrapper[4558]: I0120 17:14:14.876297 4558 scope.go:117] "RemoveContainer" containerID="6e00a9aa371726359418b84cec4cb52d2eb2a18f6fcb6f310ef1bb67367ef96a" Jan 20 17:14:14 crc kubenswrapper[4558]: I0120 17:14:14.876540 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6e00a9aa371726359418b84cec4cb52d2eb2a18f6fcb6f310ef1bb67367ef96a"} err="failed to get container status \"6e00a9aa371726359418b84cec4cb52d2eb2a18f6fcb6f310ef1bb67367ef96a\": rpc error: code = NotFound desc = could not find container \"6e00a9aa371726359418b84cec4cb52d2eb2a18f6fcb6f310ef1bb67367ef96a\": container with ID starting with 6e00a9aa371726359418b84cec4cb52d2eb2a18f6fcb6f310ef1bb67367ef96a not found: ID does not exist" Jan 20 17:14:14 crc kubenswrapper[4558]: I0120 17:14:14.921511 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/65cdc060-60d9-48a8-b535-31b93f49e1ed-scripts\") pod \"nova-cell0-conductor-db-sync-srppb\" (UID: \"65cdc060-60d9-48a8-b535-31b93f49e1ed\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-srppb" Jan 20 17:14:14 crc kubenswrapper[4558]: I0120 17:14:14.921792 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/03425c3d-b5bf-4520-bb29-5e953d285698-run-httpd\") pod \"ceilometer-0\" (UID: \"03425c3d-b5bf-4520-bb29-5e953d285698\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:14:14 crc kubenswrapper[4558]: I0120 17:14:14.921833 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/03425c3d-b5bf-4520-bb29-5e953d285698-log-httpd\") pod \"ceilometer-0\" (UID: \"03425c3d-b5bf-4520-bb29-5e953d285698\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:14:14 crc kubenswrapper[4558]: I0120 17:14:14.921867 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/03425c3d-b5bf-4520-bb29-5e953d285698-config-data\") pod \"ceilometer-0\" (UID: \"03425c3d-b5bf-4520-bb29-5e953d285698\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:14:14 crc kubenswrapper[4558]: I0120 17:14:14.921953 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sknln\" (UniqueName: \"kubernetes.io/projected/03425c3d-b5bf-4520-bb29-5e953d285698-kube-api-access-sknln\") pod \"ceilometer-0\" (UID: \"03425c3d-b5bf-4520-bb29-5e953d285698\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:14:14 crc kubenswrapper[4558]: I0120 17:14:14.922090 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s5fpn\" (UniqueName: \"kubernetes.io/projected/65cdc060-60d9-48a8-b535-31b93f49e1ed-kube-api-access-s5fpn\") pod \"nova-cell0-conductor-db-sync-srppb\" (UID: \"65cdc060-60d9-48a8-b535-31b93f49e1ed\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-srppb" Jan 20 17:14:14 crc kubenswrapper[4558]: I0120 17:14:14.922135 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/65cdc060-60d9-48a8-b535-31b93f49e1ed-config-data\") pod \"nova-cell0-conductor-db-sync-srppb\" (UID: \"65cdc060-60d9-48a8-b535-31b93f49e1ed\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-srppb" Jan 20 17:14:14 crc kubenswrapper[4558]: I0120 17:14:14.922156 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65cdc060-60d9-48a8-b535-31b93f49e1ed-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-srppb\" (UID: \"65cdc060-60d9-48a8-b535-31b93f49e1ed\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-srppb" Jan 20 17:14:14 crc kubenswrapper[4558]: I0120 17:14:14.922347 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/03425c3d-b5bf-4520-bb29-5e953d285698-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"03425c3d-b5bf-4520-bb29-5e953d285698\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:14:14 crc kubenswrapper[4558]: I0120 17:14:14.922400 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/03425c3d-b5bf-4520-bb29-5e953d285698-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"03425c3d-b5bf-4520-bb29-5e953d285698\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:14:14 crc kubenswrapper[4558]: I0120 17:14:14.922432 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/03425c3d-b5bf-4520-bb29-5e953d285698-scripts\") pod \"ceilometer-0\" (UID: \"03425c3d-b5bf-4520-bb29-5e953d285698\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:14:14 crc kubenswrapper[4558]: I0120 17:14:14.925706 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/65cdc060-60d9-48a8-b535-31b93f49e1ed-scripts\") pod \"nova-cell0-conductor-db-sync-srppb\" (UID: \"65cdc060-60d9-48a8-b535-31b93f49e1ed\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-srppb" Jan 20 17:14:14 crc kubenswrapper[4558]: I0120 17:14:14.926322 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65cdc060-60d9-48a8-b535-31b93f49e1ed-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-srppb\" (UID: \"65cdc060-60d9-48a8-b535-31b93f49e1ed\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-srppb" Jan 20 17:14:14 crc kubenswrapper[4558]: I0120 17:14:14.926539 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/65cdc060-60d9-48a8-b535-31b93f49e1ed-config-data\") pod \"nova-cell0-conductor-db-sync-srppb\" (UID: \"65cdc060-60d9-48a8-b535-31b93f49e1ed\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-srppb" Jan 20 17:14:14 crc kubenswrapper[4558]: I0120 17:14:14.938693 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s5fpn\" (UniqueName: \"kubernetes.io/projected/65cdc060-60d9-48a8-b535-31b93f49e1ed-kube-api-access-s5fpn\") pod \"nova-cell0-conductor-db-sync-srppb\" (UID: \"65cdc060-60d9-48a8-b535-31b93f49e1ed\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-srppb" Jan 20 17:14:14 crc kubenswrapper[4558]: I0120 17:14:14.941959 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-srppb" Jan 20 17:14:15 crc kubenswrapper[4558]: I0120 17:14:15.024159 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/03425c3d-b5bf-4520-bb29-5e953d285698-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"03425c3d-b5bf-4520-bb29-5e953d285698\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:14:15 crc kubenswrapper[4558]: I0120 17:14:15.024228 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/03425c3d-b5bf-4520-bb29-5e953d285698-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"03425c3d-b5bf-4520-bb29-5e953d285698\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:14:15 crc kubenswrapper[4558]: I0120 17:14:15.024253 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/03425c3d-b5bf-4520-bb29-5e953d285698-scripts\") pod \"ceilometer-0\" (UID: \"03425c3d-b5bf-4520-bb29-5e953d285698\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:14:15 crc kubenswrapper[4558]: I0120 17:14:15.024320 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/03425c3d-b5bf-4520-bb29-5e953d285698-run-httpd\") pod \"ceilometer-0\" (UID: \"03425c3d-b5bf-4520-bb29-5e953d285698\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:14:15 crc kubenswrapper[4558]: I0120 17:14:15.024347 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/03425c3d-b5bf-4520-bb29-5e953d285698-log-httpd\") pod \"ceilometer-0\" (UID: \"03425c3d-b5bf-4520-bb29-5e953d285698\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:14:15 crc kubenswrapper[4558]: I0120 17:14:15.024365 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/03425c3d-b5bf-4520-bb29-5e953d285698-config-data\") pod \"ceilometer-0\" (UID: \"03425c3d-b5bf-4520-bb29-5e953d285698\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:14:15 crc kubenswrapper[4558]: I0120 17:14:15.024394 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sknln\" (UniqueName: \"kubernetes.io/projected/03425c3d-b5bf-4520-bb29-5e953d285698-kube-api-access-sknln\") pod \"ceilometer-0\" (UID: \"03425c3d-b5bf-4520-bb29-5e953d285698\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:14:15 crc kubenswrapper[4558]: I0120 17:14:15.024786 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/03425c3d-b5bf-4520-bb29-5e953d285698-run-httpd\") pod \"ceilometer-0\" (UID: \"03425c3d-b5bf-4520-bb29-5e953d285698\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:14:15 crc kubenswrapper[4558]: I0120 17:14:15.025048 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/03425c3d-b5bf-4520-bb29-5e953d285698-log-httpd\") pod \"ceilometer-0\" (UID: \"03425c3d-b5bf-4520-bb29-5e953d285698\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:14:15 crc kubenswrapper[4558]: I0120 17:14:15.029468 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/03425c3d-b5bf-4520-bb29-5e953d285698-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"03425c3d-b5bf-4520-bb29-5e953d285698\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:14:15 crc kubenswrapper[4558]: I0120 17:14:15.029615 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/03425c3d-b5bf-4520-bb29-5e953d285698-scripts\") pod \"ceilometer-0\" (UID: \"03425c3d-b5bf-4520-bb29-5e953d285698\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:14:15 crc kubenswrapper[4558]: I0120 17:14:15.032238 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/03425c3d-b5bf-4520-bb29-5e953d285698-config-data\") pod \"ceilometer-0\" (UID: \"03425c3d-b5bf-4520-bb29-5e953d285698\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:14:15 crc kubenswrapper[4558]: I0120 17:14:15.033592 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/03425c3d-b5bf-4520-bb29-5e953d285698-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"03425c3d-b5bf-4520-bb29-5e953d285698\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:14:15 crc kubenswrapper[4558]: I0120 17:14:15.047403 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sknln\" (UniqueName: \"kubernetes.io/projected/03425c3d-b5bf-4520-bb29-5e953d285698-kube-api-access-sknln\") pod \"ceilometer-0\" (UID: \"03425c3d-b5bf-4520-bb29-5e953d285698\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:14:15 crc kubenswrapper[4558]: I0120 17:14:15.137889 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:14:15 crc kubenswrapper[4558]: I0120 17:14:15.459589 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-db-sync-srppb"] Jan 20 17:14:15 crc kubenswrapper[4558]: W0120 17:14:15.475322 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod65cdc060_60d9_48a8_b535_31b93f49e1ed.slice/crio-b8b2b4a9e34fc9294650366365dac932a7259155ee90ab6b9889a62f898f337a WatchSource:0}: Error finding container b8b2b4a9e34fc9294650366365dac932a7259155ee90ab6b9889a62f898f337a: Status 404 returned error can't find the container with id b8b2b4a9e34fc9294650366365dac932a7259155ee90ab6b9889a62f898f337a Jan 20 17:14:15 crc kubenswrapper[4558]: I0120 17:14:15.551608 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:14:15 crc kubenswrapper[4558]: W0120 17:14:15.560070 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod03425c3d_b5bf_4520_bb29_5e953d285698.slice/crio-4e2892f80aff8d47486504372adc09eb9762dba25f29727c698d6ba1df5a5106 WatchSource:0}: Error finding container 4e2892f80aff8d47486504372adc09eb9762dba25f29727c698d6ba1df5a5106: Status 404 returned error can't find the container with id 4e2892f80aff8d47486504372adc09eb9762dba25f29727c698d6ba1df5a5106 Jan 20 17:14:15 crc kubenswrapper[4558]: I0120 17:14:15.769039 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"03425c3d-b5bf-4520-bb29-5e953d285698","Type":"ContainerStarted","Data":"4e2892f80aff8d47486504372adc09eb9762dba25f29727c698d6ba1df5a5106"} Jan 20 17:14:15 crc kubenswrapper[4558]: I0120 17:14:15.771672 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"5588d9e2-2f89-4abb-94de-76b5791582a5","Type":"ContainerStarted","Data":"674fe7739106de999aea93678b41abfb64307a7d4593b865b7537759b3ee32a7"} Jan 20 17:14:15 crc kubenswrapper[4558]: I0120 17:14:15.771711 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"5588d9e2-2f89-4abb-94de-76b5791582a5","Type":"ContainerStarted","Data":"f7e6285aee41adbd48413910cd7a5c3aa34306a43105d42dd032ba571f9b7d5c"} Jan 20 17:14:15 crc kubenswrapper[4558]: I0120 17:14:15.774118 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"a9ada454-f6fa-4ea4-b386-3ddbcd37f233","Type":"ContainerStarted","Data":"dbed1dfb06d84c003e0890032b6a972606b43286eee0111f21ef16dfa9e94a1d"} Jan 20 17:14:15 crc kubenswrapper[4558]: I0120 17:14:15.780157 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-srppb" event={"ID":"65cdc060-60d9-48a8-b535-31b93f49e1ed","Type":"ContainerStarted","Data":"34592e4c65b898d25a46f138f47e5ebed319feee771e779589feba4abd2545b5"} Jan 20 17:14:15 crc kubenswrapper[4558]: I0120 17:14:15.780203 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-srppb" event={"ID":"65cdc060-60d9-48a8-b535-31b93f49e1ed","Type":"ContainerStarted","Data":"b8b2b4a9e34fc9294650366365dac932a7259155ee90ab6b9889a62f898f337a"} Jan 20 17:14:15 crc kubenswrapper[4558]: I0120 17:14:15.797621 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/glance-default-internal-api-0" podStartSLOduration=2.797604614 podStartE2EDuration="2.797604614s" podCreationTimestamp="2026-01-20 17:14:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:14:15.793102457 +0000 UTC m=+1949.553440424" watchObservedRunningTime="2026-01-20 17:14:15.797604614 +0000 UTC m=+1949.557942581" Jan 20 17:14:15 crc kubenswrapper[4558]: I0120 17:14:15.815509 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/glance-default-external-api-0" podStartSLOduration=3.81549778 podStartE2EDuration="3.81549778s" podCreationTimestamp="2026-01-20 17:14:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:14:15.809558393 +0000 UTC m=+1949.569896360" watchObservedRunningTime="2026-01-20 17:14:15.81549778 +0000 UTC m=+1949.575835747" Jan 20 17:14:15 crc kubenswrapper[4558]: I0120 17:14:15.829254 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-srppb" podStartSLOduration=1.82923839 podStartE2EDuration="1.82923839s" podCreationTimestamp="2026-01-20 17:14:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:14:15.824036998 +0000 UTC m=+1949.584374965" watchObservedRunningTime="2026-01-20 17:14:15.82923839 +0000 UTC m=+1949.589576356" Jan 20 17:14:16 crc kubenswrapper[4558]: I0120 17:14:16.561509 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:14:16 crc kubenswrapper[4558]: I0120 17:14:16.576897 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d6352ad2-dc0a-46b0-a9b2-f1fffe97d8ab" path="/var/lib/kubelet/pods/d6352ad2-dc0a-46b0-a9b2-f1fffe97d8ab/volumes" Jan 20 17:14:16 crc kubenswrapper[4558]: I0120 17:14:16.690314 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/swift-proxy-7c48456b-8z4pm" Jan 20 17:14:16 crc kubenswrapper[4558]: I0120 17:14:16.691798 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/swift-proxy-7c48456b-8z4pm" Jan 20 17:14:16 crc kubenswrapper[4558]: I0120 17:14:16.792537 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"03425c3d-b5bf-4520-bb29-5e953d285698","Type":"ContainerStarted","Data":"291169ed74eb828b64be9a4f588cf633e52f4869bd98972b8ba91e6afd5793c7"} Jan 20 17:14:17 crc kubenswrapper[4558]: I0120 17:14:17.800704 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"03425c3d-b5bf-4520-bb29-5e953d285698","Type":"ContainerStarted","Data":"4b1aadf2aee87daac3bc8fdaf22670353ba7a446b66e43eb5cff6b6bacf0a9e1"} Jan 20 17:14:18 crc kubenswrapper[4558]: I0120 17:14:18.812324 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"03425c3d-b5bf-4520-bb29-5e953d285698","Type":"ContainerStarted","Data":"0b2270d43d3bfeb4f118c234252baa2620a49109d18670883a7c6cd5287cc3ef"} Jan 20 17:14:19 crc kubenswrapper[4558]: I0120 17:14:19.845439 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"03425c3d-b5bf-4520-bb29-5e953d285698","Type":"ContainerStarted","Data":"0fb55c407dda6948afb10ebbf79d8c99f5e75b7441c3add56ed4d88e4583a651"} Jan 20 17:14:19 crc kubenswrapper[4558]: I0120 17:14:19.845579 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="03425c3d-b5bf-4520-bb29-5e953d285698" containerName="ceilometer-central-agent" containerID="cri-o://291169ed74eb828b64be9a4f588cf633e52f4869bd98972b8ba91e6afd5793c7" gracePeriod=30 Jan 20 17:14:19 crc kubenswrapper[4558]: I0120 17:14:19.845798 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:14:19 crc kubenswrapper[4558]: I0120 17:14:19.845874 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="03425c3d-b5bf-4520-bb29-5e953d285698" containerName="sg-core" containerID="cri-o://0b2270d43d3bfeb4f118c234252baa2620a49109d18670883a7c6cd5287cc3ef" gracePeriod=30 Jan 20 17:14:19 crc kubenswrapper[4558]: I0120 17:14:19.845914 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="03425c3d-b5bf-4520-bb29-5e953d285698" containerName="ceilometer-notification-agent" containerID="cri-o://4b1aadf2aee87daac3bc8fdaf22670353ba7a446b66e43eb5cff6b6bacf0a9e1" gracePeriod=30 Jan 20 17:14:19 crc kubenswrapper[4558]: I0120 17:14:19.845953 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="03425c3d-b5bf-4520-bb29-5e953d285698" containerName="proxy-httpd" containerID="cri-o://0fb55c407dda6948afb10ebbf79d8c99f5e75b7441c3add56ed4d88e4583a651" gracePeriod=30 Jan 20 17:14:19 crc kubenswrapper[4558]: I0120 17:14:19.864153 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ceilometer-0" podStartSLOduration=2.004474783 podStartE2EDuration="5.864142422s" podCreationTimestamp="2026-01-20 17:14:14 +0000 UTC" firstStartedPulling="2026-01-20 17:14:15.563149513 +0000 UTC m=+1949.323487481" lastFinishedPulling="2026-01-20 17:14:19.422817153 +0000 UTC m=+1953.183155120" observedRunningTime="2026-01-20 17:14:19.863740115 +0000 UTC m=+1953.624078073" watchObservedRunningTime="2026-01-20 17:14:19.864142422 +0000 UTC m=+1953.624480389" Jan 20 17:14:20 crc kubenswrapper[4558]: I0120 17:14:20.858934 4558 generic.go:334] "Generic (PLEG): container finished" podID="03425c3d-b5bf-4520-bb29-5e953d285698" containerID="0fb55c407dda6948afb10ebbf79d8c99f5e75b7441c3add56ed4d88e4583a651" exitCode=0 Jan 20 17:14:20 crc kubenswrapper[4558]: I0120 17:14:20.858974 4558 generic.go:334] "Generic (PLEG): container finished" podID="03425c3d-b5bf-4520-bb29-5e953d285698" containerID="0b2270d43d3bfeb4f118c234252baa2620a49109d18670883a7c6cd5287cc3ef" exitCode=2 Jan 20 17:14:20 crc kubenswrapper[4558]: I0120 17:14:20.858983 4558 generic.go:334] "Generic (PLEG): container finished" podID="03425c3d-b5bf-4520-bb29-5e953d285698" containerID="4b1aadf2aee87daac3bc8fdaf22670353ba7a446b66e43eb5cff6b6bacf0a9e1" exitCode=0 Jan 20 17:14:20 crc kubenswrapper[4558]: I0120 17:14:20.859028 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"03425c3d-b5bf-4520-bb29-5e953d285698","Type":"ContainerDied","Data":"0fb55c407dda6948afb10ebbf79d8c99f5e75b7441c3add56ed4d88e4583a651"} Jan 20 17:14:20 crc kubenswrapper[4558]: I0120 17:14:20.859090 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"03425c3d-b5bf-4520-bb29-5e953d285698","Type":"ContainerDied","Data":"0b2270d43d3bfeb4f118c234252baa2620a49109d18670883a7c6cd5287cc3ef"} Jan 20 17:14:20 crc kubenswrapper[4558]: I0120 17:14:20.859105 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"03425c3d-b5bf-4520-bb29-5e953d285698","Type":"ContainerDied","Data":"4b1aadf2aee87daac3bc8fdaf22670353ba7a446b66e43eb5cff6b6bacf0a9e1"} Jan 20 17:14:21 crc kubenswrapper[4558]: I0120 17:14:21.360388 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:14:21 crc kubenswrapper[4558]: I0120 17:14:21.464492 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/03425c3d-b5bf-4520-bb29-5e953d285698-log-httpd\") pod \"03425c3d-b5bf-4520-bb29-5e953d285698\" (UID: \"03425c3d-b5bf-4520-bb29-5e953d285698\") " Jan 20 17:14:21 crc kubenswrapper[4558]: I0120 17:14:21.464588 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/03425c3d-b5bf-4520-bb29-5e953d285698-combined-ca-bundle\") pod \"03425c3d-b5bf-4520-bb29-5e953d285698\" (UID: \"03425c3d-b5bf-4520-bb29-5e953d285698\") " Jan 20 17:14:21 crc kubenswrapper[4558]: I0120 17:14:21.464632 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/03425c3d-b5bf-4520-bb29-5e953d285698-run-httpd\") pod \"03425c3d-b5bf-4520-bb29-5e953d285698\" (UID: \"03425c3d-b5bf-4520-bb29-5e953d285698\") " Jan 20 17:14:21 crc kubenswrapper[4558]: I0120 17:14:21.464693 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/03425c3d-b5bf-4520-bb29-5e953d285698-sg-core-conf-yaml\") pod \"03425c3d-b5bf-4520-bb29-5e953d285698\" (UID: \"03425c3d-b5bf-4520-bb29-5e953d285698\") " Jan 20 17:14:21 crc kubenswrapper[4558]: I0120 17:14:21.464781 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sknln\" (UniqueName: \"kubernetes.io/projected/03425c3d-b5bf-4520-bb29-5e953d285698-kube-api-access-sknln\") pod \"03425c3d-b5bf-4520-bb29-5e953d285698\" (UID: \"03425c3d-b5bf-4520-bb29-5e953d285698\") " Jan 20 17:14:21 crc kubenswrapper[4558]: I0120 17:14:21.464836 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/03425c3d-b5bf-4520-bb29-5e953d285698-config-data\") pod \"03425c3d-b5bf-4520-bb29-5e953d285698\" (UID: \"03425c3d-b5bf-4520-bb29-5e953d285698\") " Jan 20 17:14:21 crc kubenswrapper[4558]: I0120 17:14:21.464877 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/03425c3d-b5bf-4520-bb29-5e953d285698-scripts\") pod \"03425c3d-b5bf-4520-bb29-5e953d285698\" (UID: \"03425c3d-b5bf-4520-bb29-5e953d285698\") " Jan 20 17:14:21 crc kubenswrapper[4558]: I0120 17:14:21.465050 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/03425c3d-b5bf-4520-bb29-5e953d285698-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "03425c3d-b5bf-4520-bb29-5e953d285698" (UID: "03425c3d-b5bf-4520-bb29-5e953d285698"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:14:21 crc kubenswrapper[4558]: I0120 17:14:21.465789 4558 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/03425c3d-b5bf-4520-bb29-5e953d285698-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:21 crc kubenswrapper[4558]: I0120 17:14:21.465800 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/03425c3d-b5bf-4520-bb29-5e953d285698-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "03425c3d-b5bf-4520-bb29-5e953d285698" (UID: "03425c3d-b5bf-4520-bb29-5e953d285698"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:14:21 crc kubenswrapper[4558]: I0120 17:14:21.472642 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/03425c3d-b5bf-4520-bb29-5e953d285698-scripts" (OuterVolumeSpecName: "scripts") pod "03425c3d-b5bf-4520-bb29-5e953d285698" (UID: "03425c3d-b5bf-4520-bb29-5e953d285698"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:14:21 crc kubenswrapper[4558]: I0120 17:14:21.475100 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/03425c3d-b5bf-4520-bb29-5e953d285698-kube-api-access-sknln" (OuterVolumeSpecName: "kube-api-access-sknln") pod "03425c3d-b5bf-4520-bb29-5e953d285698" (UID: "03425c3d-b5bf-4520-bb29-5e953d285698"). InnerVolumeSpecName "kube-api-access-sknln". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:14:21 crc kubenswrapper[4558]: I0120 17:14:21.494859 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/03425c3d-b5bf-4520-bb29-5e953d285698-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "03425c3d-b5bf-4520-bb29-5e953d285698" (UID: "03425c3d-b5bf-4520-bb29-5e953d285698"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:14:21 crc kubenswrapper[4558]: I0120 17:14:21.537199 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/03425c3d-b5bf-4520-bb29-5e953d285698-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "03425c3d-b5bf-4520-bb29-5e953d285698" (UID: "03425c3d-b5bf-4520-bb29-5e953d285698"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:14:21 crc kubenswrapper[4558]: I0120 17:14:21.553695 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/03425c3d-b5bf-4520-bb29-5e953d285698-config-data" (OuterVolumeSpecName: "config-data") pod "03425c3d-b5bf-4520-bb29-5e953d285698" (UID: "03425c3d-b5bf-4520-bb29-5e953d285698"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:14:21 crc kubenswrapper[4558]: I0120 17:14:21.567727 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/03425c3d-b5bf-4520-bb29-5e953d285698-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:21 crc kubenswrapper[4558]: I0120 17:14:21.567751 4558 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/03425c3d-b5bf-4520-bb29-5e953d285698-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:21 crc kubenswrapper[4558]: I0120 17:14:21.567762 4558 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/03425c3d-b5bf-4520-bb29-5e953d285698-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:21 crc kubenswrapper[4558]: I0120 17:14:21.567775 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sknln\" (UniqueName: \"kubernetes.io/projected/03425c3d-b5bf-4520-bb29-5e953d285698-kube-api-access-sknln\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:21 crc kubenswrapper[4558]: I0120 17:14:21.567788 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/03425c3d-b5bf-4520-bb29-5e953d285698-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:21 crc kubenswrapper[4558]: I0120 17:14:21.567800 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/03425c3d-b5bf-4520-bb29-5e953d285698-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:21 crc kubenswrapper[4558]: I0120 17:14:21.873600 4558 generic.go:334] "Generic (PLEG): container finished" podID="03425c3d-b5bf-4520-bb29-5e953d285698" containerID="291169ed74eb828b64be9a4f588cf633e52f4869bd98972b8ba91e6afd5793c7" exitCode=0 Jan 20 17:14:21 crc kubenswrapper[4558]: I0120 17:14:21.873690 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"03425c3d-b5bf-4520-bb29-5e953d285698","Type":"ContainerDied","Data":"291169ed74eb828b64be9a4f588cf633e52f4869bd98972b8ba91e6afd5793c7"} Jan 20 17:14:21 crc kubenswrapper[4558]: I0120 17:14:21.873765 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"03425c3d-b5bf-4520-bb29-5e953d285698","Type":"ContainerDied","Data":"4e2892f80aff8d47486504372adc09eb9762dba25f29727c698d6ba1df5a5106"} Jan 20 17:14:21 crc kubenswrapper[4558]: I0120 17:14:21.873791 4558 scope.go:117] "RemoveContainer" containerID="0fb55c407dda6948afb10ebbf79d8c99f5e75b7441c3add56ed4d88e4583a651" Jan 20 17:14:21 crc kubenswrapper[4558]: I0120 17:14:21.873834 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:14:21 crc kubenswrapper[4558]: I0120 17:14:21.876548 4558 generic.go:334] "Generic (PLEG): container finished" podID="65cdc060-60d9-48a8-b535-31b93f49e1ed" containerID="34592e4c65b898d25a46f138f47e5ebed319feee771e779589feba4abd2545b5" exitCode=0 Jan 20 17:14:21 crc kubenswrapper[4558]: I0120 17:14:21.876616 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-srppb" event={"ID":"65cdc060-60d9-48a8-b535-31b93f49e1ed","Type":"ContainerDied","Data":"34592e4c65b898d25a46f138f47e5ebed319feee771e779589feba4abd2545b5"} Jan 20 17:14:21 crc kubenswrapper[4558]: I0120 17:14:21.902245 4558 scope.go:117] "RemoveContainer" containerID="0b2270d43d3bfeb4f118c234252baa2620a49109d18670883a7c6cd5287cc3ef" Jan 20 17:14:21 crc kubenswrapper[4558]: I0120 17:14:21.923630 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:14:21 crc kubenswrapper[4558]: I0120 17:14:21.926118 4558 scope.go:117] "RemoveContainer" containerID="4b1aadf2aee87daac3bc8fdaf22670353ba7a446b66e43eb5cff6b6bacf0a9e1" Jan 20 17:14:21 crc kubenswrapper[4558]: I0120 17:14:21.939010 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:14:21 crc kubenswrapper[4558]: I0120 17:14:21.950402 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:14:21 crc kubenswrapper[4558]: E0120 17:14:21.950875 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="03425c3d-b5bf-4520-bb29-5e953d285698" containerName="ceilometer-central-agent" Jan 20 17:14:21 crc kubenswrapper[4558]: I0120 17:14:21.950898 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="03425c3d-b5bf-4520-bb29-5e953d285698" containerName="ceilometer-central-agent" Jan 20 17:14:21 crc kubenswrapper[4558]: E0120 17:14:21.950921 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="03425c3d-b5bf-4520-bb29-5e953d285698" containerName="proxy-httpd" Jan 20 17:14:21 crc kubenswrapper[4558]: I0120 17:14:21.950929 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="03425c3d-b5bf-4520-bb29-5e953d285698" containerName="proxy-httpd" Jan 20 17:14:21 crc kubenswrapper[4558]: E0120 17:14:21.950948 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="03425c3d-b5bf-4520-bb29-5e953d285698" containerName="ceilometer-notification-agent" Jan 20 17:14:21 crc kubenswrapper[4558]: I0120 17:14:21.950957 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="03425c3d-b5bf-4520-bb29-5e953d285698" containerName="ceilometer-notification-agent" Jan 20 17:14:21 crc kubenswrapper[4558]: E0120 17:14:21.950973 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="03425c3d-b5bf-4520-bb29-5e953d285698" containerName="sg-core" Jan 20 17:14:21 crc kubenswrapper[4558]: I0120 17:14:21.950982 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="03425c3d-b5bf-4520-bb29-5e953d285698" containerName="sg-core" Jan 20 17:14:21 crc kubenswrapper[4558]: I0120 17:14:21.951223 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="03425c3d-b5bf-4520-bb29-5e953d285698" containerName="sg-core" Jan 20 17:14:21 crc kubenswrapper[4558]: I0120 17:14:21.951245 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="03425c3d-b5bf-4520-bb29-5e953d285698" containerName="ceilometer-notification-agent" Jan 20 17:14:21 crc kubenswrapper[4558]: I0120 17:14:21.951261 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="03425c3d-b5bf-4520-bb29-5e953d285698" containerName="proxy-httpd" Jan 20 17:14:21 crc kubenswrapper[4558]: I0120 17:14:21.951279 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="03425c3d-b5bf-4520-bb29-5e953d285698" containerName="ceilometer-central-agent" Jan 20 17:14:21 crc kubenswrapper[4558]: I0120 17:14:21.952819 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:14:21 crc kubenswrapper[4558]: I0120 17:14:21.960526 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-config-data" Jan 20 17:14:21 crc kubenswrapper[4558]: I0120 17:14:21.960556 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-scripts" Jan 20 17:14:21 crc kubenswrapper[4558]: I0120 17:14:21.960954 4558 scope.go:117] "RemoveContainer" containerID="291169ed74eb828b64be9a4f588cf633e52f4869bd98972b8ba91e6afd5793c7" Jan 20 17:14:21 crc kubenswrapper[4558]: I0120 17:14:21.966938 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:14:21 crc kubenswrapper[4558]: I0120 17:14:21.989099 4558 scope.go:117] "RemoveContainer" containerID="0fb55c407dda6948afb10ebbf79d8c99f5e75b7441c3add56ed4d88e4583a651" Jan 20 17:14:21 crc kubenswrapper[4558]: E0120 17:14:21.989528 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0fb55c407dda6948afb10ebbf79d8c99f5e75b7441c3add56ed4d88e4583a651\": container with ID starting with 0fb55c407dda6948afb10ebbf79d8c99f5e75b7441c3add56ed4d88e4583a651 not found: ID does not exist" containerID="0fb55c407dda6948afb10ebbf79d8c99f5e75b7441c3add56ed4d88e4583a651" Jan 20 17:14:21 crc kubenswrapper[4558]: I0120 17:14:21.989562 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0fb55c407dda6948afb10ebbf79d8c99f5e75b7441c3add56ed4d88e4583a651"} err="failed to get container status \"0fb55c407dda6948afb10ebbf79d8c99f5e75b7441c3add56ed4d88e4583a651\": rpc error: code = NotFound desc = could not find container \"0fb55c407dda6948afb10ebbf79d8c99f5e75b7441c3add56ed4d88e4583a651\": container with ID starting with 0fb55c407dda6948afb10ebbf79d8c99f5e75b7441c3add56ed4d88e4583a651 not found: ID does not exist" Jan 20 17:14:21 crc kubenswrapper[4558]: I0120 17:14:21.989588 4558 scope.go:117] "RemoveContainer" containerID="0b2270d43d3bfeb4f118c234252baa2620a49109d18670883a7c6cd5287cc3ef" Jan 20 17:14:21 crc kubenswrapper[4558]: E0120 17:14:21.989914 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0b2270d43d3bfeb4f118c234252baa2620a49109d18670883a7c6cd5287cc3ef\": container with ID starting with 0b2270d43d3bfeb4f118c234252baa2620a49109d18670883a7c6cd5287cc3ef not found: ID does not exist" containerID="0b2270d43d3bfeb4f118c234252baa2620a49109d18670883a7c6cd5287cc3ef" Jan 20 17:14:21 crc kubenswrapper[4558]: I0120 17:14:21.989957 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0b2270d43d3bfeb4f118c234252baa2620a49109d18670883a7c6cd5287cc3ef"} err="failed to get container status \"0b2270d43d3bfeb4f118c234252baa2620a49109d18670883a7c6cd5287cc3ef\": rpc error: code = NotFound desc = could not find container \"0b2270d43d3bfeb4f118c234252baa2620a49109d18670883a7c6cd5287cc3ef\": container with ID starting with 0b2270d43d3bfeb4f118c234252baa2620a49109d18670883a7c6cd5287cc3ef not found: ID does not exist" Jan 20 17:14:21 crc kubenswrapper[4558]: I0120 17:14:21.989990 4558 scope.go:117] "RemoveContainer" containerID="4b1aadf2aee87daac3bc8fdaf22670353ba7a446b66e43eb5cff6b6bacf0a9e1" Jan 20 17:14:21 crc kubenswrapper[4558]: E0120 17:14:21.990413 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4b1aadf2aee87daac3bc8fdaf22670353ba7a446b66e43eb5cff6b6bacf0a9e1\": container with ID starting with 4b1aadf2aee87daac3bc8fdaf22670353ba7a446b66e43eb5cff6b6bacf0a9e1 not found: ID does not exist" containerID="4b1aadf2aee87daac3bc8fdaf22670353ba7a446b66e43eb5cff6b6bacf0a9e1" Jan 20 17:14:21 crc kubenswrapper[4558]: I0120 17:14:21.990447 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4b1aadf2aee87daac3bc8fdaf22670353ba7a446b66e43eb5cff6b6bacf0a9e1"} err="failed to get container status \"4b1aadf2aee87daac3bc8fdaf22670353ba7a446b66e43eb5cff6b6bacf0a9e1\": rpc error: code = NotFound desc = could not find container \"4b1aadf2aee87daac3bc8fdaf22670353ba7a446b66e43eb5cff6b6bacf0a9e1\": container with ID starting with 4b1aadf2aee87daac3bc8fdaf22670353ba7a446b66e43eb5cff6b6bacf0a9e1 not found: ID does not exist" Jan 20 17:14:21 crc kubenswrapper[4558]: I0120 17:14:21.990464 4558 scope.go:117] "RemoveContainer" containerID="291169ed74eb828b64be9a4f588cf633e52f4869bd98972b8ba91e6afd5793c7" Jan 20 17:14:21 crc kubenswrapper[4558]: E0120 17:14:21.990915 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"291169ed74eb828b64be9a4f588cf633e52f4869bd98972b8ba91e6afd5793c7\": container with ID starting with 291169ed74eb828b64be9a4f588cf633e52f4869bd98972b8ba91e6afd5793c7 not found: ID does not exist" containerID="291169ed74eb828b64be9a4f588cf633e52f4869bd98972b8ba91e6afd5793c7" Jan 20 17:14:21 crc kubenswrapper[4558]: I0120 17:14:21.990934 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"291169ed74eb828b64be9a4f588cf633e52f4869bd98972b8ba91e6afd5793c7"} err="failed to get container status \"291169ed74eb828b64be9a4f588cf633e52f4869bd98972b8ba91e6afd5793c7\": rpc error: code = NotFound desc = could not find container \"291169ed74eb828b64be9a4f588cf633e52f4869bd98972b8ba91e6afd5793c7\": container with ID starting with 291169ed74eb828b64be9a4f588cf633e52f4869bd98972b8ba91e6afd5793c7 not found: ID does not exist" Jan 20 17:14:22 crc kubenswrapper[4558]: I0120 17:14:22.080374 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5c988319-ece3-4016-b491-1b33b78ed16e-config-data\") pod \"ceilometer-0\" (UID: \"5c988319-ece3-4016-b491-1b33b78ed16e\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:14:22 crc kubenswrapper[4558]: I0120 17:14:22.080431 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c988319-ece3-4016-b491-1b33b78ed16e-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"5c988319-ece3-4016-b491-1b33b78ed16e\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:14:22 crc kubenswrapper[4558]: I0120 17:14:22.080723 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5c988319-ece3-4016-b491-1b33b78ed16e-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"5c988319-ece3-4016-b491-1b33b78ed16e\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:14:22 crc kubenswrapper[4558]: I0120 17:14:22.080869 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v944k\" (UniqueName: \"kubernetes.io/projected/5c988319-ece3-4016-b491-1b33b78ed16e-kube-api-access-v944k\") pod \"ceilometer-0\" (UID: \"5c988319-ece3-4016-b491-1b33b78ed16e\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:14:22 crc kubenswrapper[4558]: I0120 17:14:22.081036 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5c988319-ece3-4016-b491-1b33b78ed16e-scripts\") pod \"ceilometer-0\" (UID: \"5c988319-ece3-4016-b491-1b33b78ed16e\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:14:22 crc kubenswrapper[4558]: I0120 17:14:22.081298 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5c988319-ece3-4016-b491-1b33b78ed16e-run-httpd\") pod \"ceilometer-0\" (UID: \"5c988319-ece3-4016-b491-1b33b78ed16e\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:14:22 crc kubenswrapper[4558]: I0120 17:14:22.081520 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5c988319-ece3-4016-b491-1b33b78ed16e-log-httpd\") pod \"ceilometer-0\" (UID: \"5c988319-ece3-4016-b491-1b33b78ed16e\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:14:22 crc kubenswrapper[4558]: I0120 17:14:22.184533 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5c988319-ece3-4016-b491-1b33b78ed16e-log-httpd\") pod \"ceilometer-0\" (UID: \"5c988319-ece3-4016-b491-1b33b78ed16e\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:14:22 crc kubenswrapper[4558]: I0120 17:14:22.184619 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5c988319-ece3-4016-b491-1b33b78ed16e-config-data\") pod \"ceilometer-0\" (UID: \"5c988319-ece3-4016-b491-1b33b78ed16e\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:14:22 crc kubenswrapper[4558]: I0120 17:14:22.184648 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c988319-ece3-4016-b491-1b33b78ed16e-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"5c988319-ece3-4016-b491-1b33b78ed16e\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:14:22 crc kubenswrapper[4558]: I0120 17:14:22.184700 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5c988319-ece3-4016-b491-1b33b78ed16e-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"5c988319-ece3-4016-b491-1b33b78ed16e\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:14:22 crc kubenswrapper[4558]: I0120 17:14:22.184739 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v944k\" (UniqueName: \"kubernetes.io/projected/5c988319-ece3-4016-b491-1b33b78ed16e-kube-api-access-v944k\") pod \"ceilometer-0\" (UID: \"5c988319-ece3-4016-b491-1b33b78ed16e\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:14:22 crc kubenswrapper[4558]: I0120 17:14:22.184780 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5c988319-ece3-4016-b491-1b33b78ed16e-scripts\") pod \"ceilometer-0\" (UID: \"5c988319-ece3-4016-b491-1b33b78ed16e\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:14:22 crc kubenswrapper[4558]: I0120 17:14:22.184832 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5c988319-ece3-4016-b491-1b33b78ed16e-run-httpd\") pod \"ceilometer-0\" (UID: \"5c988319-ece3-4016-b491-1b33b78ed16e\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:14:22 crc kubenswrapper[4558]: I0120 17:14:22.185617 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5c988319-ece3-4016-b491-1b33b78ed16e-log-httpd\") pod \"ceilometer-0\" (UID: \"5c988319-ece3-4016-b491-1b33b78ed16e\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:14:22 crc kubenswrapper[4558]: I0120 17:14:22.186317 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5c988319-ece3-4016-b491-1b33b78ed16e-run-httpd\") pod \"ceilometer-0\" (UID: \"5c988319-ece3-4016-b491-1b33b78ed16e\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:14:22 crc kubenswrapper[4558]: I0120 17:14:22.189619 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5c988319-ece3-4016-b491-1b33b78ed16e-scripts\") pod \"ceilometer-0\" (UID: \"5c988319-ece3-4016-b491-1b33b78ed16e\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:14:22 crc kubenswrapper[4558]: I0120 17:14:22.191351 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c988319-ece3-4016-b491-1b33b78ed16e-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"5c988319-ece3-4016-b491-1b33b78ed16e\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:14:22 crc kubenswrapper[4558]: I0120 17:14:22.191696 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5c988319-ece3-4016-b491-1b33b78ed16e-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"5c988319-ece3-4016-b491-1b33b78ed16e\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:14:22 crc kubenswrapper[4558]: I0120 17:14:22.192195 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5c988319-ece3-4016-b491-1b33b78ed16e-config-data\") pod \"ceilometer-0\" (UID: \"5c988319-ece3-4016-b491-1b33b78ed16e\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:14:22 crc kubenswrapper[4558]: I0120 17:14:22.207441 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v944k\" (UniqueName: \"kubernetes.io/projected/5c988319-ece3-4016-b491-1b33b78ed16e-kube-api-access-v944k\") pod \"ceilometer-0\" (UID: \"5c988319-ece3-4016-b491-1b33b78ed16e\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:14:22 crc kubenswrapper[4558]: I0120 17:14:22.272068 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:14:22 crc kubenswrapper[4558]: I0120 17:14:22.575667 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="03425c3d-b5bf-4520-bb29-5e953d285698" path="/var/lib/kubelet/pods/03425c3d-b5bf-4520-bb29-5e953d285698/volumes" Jan 20 17:14:22 crc kubenswrapper[4558]: I0120 17:14:22.700018 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:14:22 crc kubenswrapper[4558]: W0120 17:14:22.701874 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5c988319_ece3_4016_b491_1b33b78ed16e.slice/crio-39faf509844bb66dd0720fd2821cd11a541c4e8a2d4eb29534bbddc6a0ed755a WatchSource:0}: Error finding container 39faf509844bb66dd0720fd2821cd11a541c4e8a2d4eb29534bbddc6a0ed755a: Status 404 returned error can't find the container with id 39faf509844bb66dd0720fd2821cd11a541c4e8a2d4eb29534bbddc6a0ed755a Jan 20 17:14:22 crc kubenswrapper[4558]: I0120 17:14:22.891354 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"5c988319-ece3-4016-b491-1b33b78ed16e","Type":"ContainerStarted","Data":"39faf509844bb66dd0720fd2821cd11a541c4e8a2d4eb29534bbddc6a0ed755a"} Jan 20 17:14:23 crc kubenswrapper[4558]: I0120 17:14:23.149127 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:14:23 crc kubenswrapper[4558]: I0120 17:14:23.149195 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:14:23 crc kubenswrapper[4558]: I0120 17:14:23.186482 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:14:23 crc kubenswrapper[4558]: I0120 17:14:23.189184 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:14:23 crc kubenswrapper[4558]: I0120 17:14:23.313711 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-srppb" Jan 20 17:14:23 crc kubenswrapper[4558]: I0120 17:14:23.417850 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65cdc060-60d9-48a8-b535-31b93f49e1ed-combined-ca-bundle\") pod \"65cdc060-60d9-48a8-b535-31b93f49e1ed\" (UID: \"65cdc060-60d9-48a8-b535-31b93f49e1ed\") " Jan 20 17:14:23 crc kubenswrapper[4558]: I0120 17:14:23.417946 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/65cdc060-60d9-48a8-b535-31b93f49e1ed-scripts\") pod \"65cdc060-60d9-48a8-b535-31b93f49e1ed\" (UID: \"65cdc060-60d9-48a8-b535-31b93f49e1ed\") " Jan 20 17:14:23 crc kubenswrapper[4558]: I0120 17:14:23.417987 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/65cdc060-60d9-48a8-b535-31b93f49e1ed-config-data\") pod \"65cdc060-60d9-48a8-b535-31b93f49e1ed\" (UID: \"65cdc060-60d9-48a8-b535-31b93f49e1ed\") " Jan 20 17:14:23 crc kubenswrapper[4558]: I0120 17:14:23.418222 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s5fpn\" (UniqueName: \"kubernetes.io/projected/65cdc060-60d9-48a8-b535-31b93f49e1ed-kube-api-access-s5fpn\") pod \"65cdc060-60d9-48a8-b535-31b93f49e1ed\" (UID: \"65cdc060-60d9-48a8-b535-31b93f49e1ed\") " Jan 20 17:14:23 crc kubenswrapper[4558]: I0120 17:14:23.424206 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/65cdc060-60d9-48a8-b535-31b93f49e1ed-scripts" (OuterVolumeSpecName: "scripts") pod "65cdc060-60d9-48a8-b535-31b93f49e1ed" (UID: "65cdc060-60d9-48a8-b535-31b93f49e1ed"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:14:23 crc kubenswrapper[4558]: I0120 17:14:23.425104 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/65cdc060-60d9-48a8-b535-31b93f49e1ed-kube-api-access-s5fpn" (OuterVolumeSpecName: "kube-api-access-s5fpn") pod "65cdc060-60d9-48a8-b535-31b93f49e1ed" (UID: "65cdc060-60d9-48a8-b535-31b93f49e1ed"). InnerVolumeSpecName "kube-api-access-s5fpn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:14:23 crc kubenswrapper[4558]: I0120 17:14:23.459100 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/65cdc060-60d9-48a8-b535-31b93f49e1ed-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "65cdc060-60d9-48a8-b535-31b93f49e1ed" (UID: "65cdc060-60d9-48a8-b535-31b93f49e1ed"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:14:23 crc kubenswrapper[4558]: I0120 17:14:23.463824 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/65cdc060-60d9-48a8-b535-31b93f49e1ed-config-data" (OuterVolumeSpecName: "config-data") pod "65cdc060-60d9-48a8-b535-31b93f49e1ed" (UID: "65cdc060-60d9-48a8-b535-31b93f49e1ed"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:14:23 crc kubenswrapper[4558]: I0120 17:14:23.521512 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/65cdc060-60d9-48a8-b535-31b93f49e1ed-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:23 crc kubenswrapper[4558]: I0120 17:14:23.521768 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/65cdc060-60d9-48a8-b535-31b93f49e1ed-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:23 crc kubenswrapper[4558]: I0120 17:14:23.521780 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s5fpn\" (UniqueName: \"kubernetes.io/projected/65cdc060-60d9-48a8-b535-31b93f49e1ed-kube-api-access-s5fpn\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:23 crc kubenswrapper[4558]: I0120 17:14:23.521793 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65cdc060-60d9-48a8-b535-31b93f49e1ed-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:23 crc kubenswrapper[4558]: I0120 17:14:23.906279 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-srppb" event={"ID":"65cdc060-60d9-48a8-b535-31b93f49e1ed","Type":"ContainerDied","Data":"b8b2b4a9e34fc9294650366365dac932a7259155ee90ab6b9889a62f898f337a"} Jan 20 17:14:23 crc kubenswrapper[4558]: I0120 17:14:23.906311 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-srppb" Jan 20 17:14:23 crc kubenswrapper[4558]: I0120 17:14:23.906324 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b8b2b4a9e34fc9294650366365dac932a7259155ee90ab6b9889a62f898f337a" Jan 20 17:14:23 crc kubenswrapper[4558]: I0120 17:14:23.909106 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"5c988319-ece3-4016-b491-1b33b78ed16e","Type":"ContainerStarted","Data":"cafc05a16acc6bf3c534e283f0aef442c9ded6ca2cdb82522472318623e4b9ea"} Jan 20 17:14:23 crc kubenswrapper[4558]: I0120 17:14:23.909409 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:14:23 crc kubenswrapper[4558]: I0120 17:14:23.909447 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:14:24 crc kubenswrapper[4558]: I0120 17:14:24.101422 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:14:24 crc kubenswrapper[4558]: I0120 17:14:24.101855 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:14:24 crc kubenswrapper[4558]: I0120 17:14:24.140322 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:14:24 crc kubenswrapper[4558]: I0120 17:14:24.146587 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:14:24 crc kubenswrapper[4558]: I0120 17:14:24.387445 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell0-cell-mapping-qhsxm"] Jan 20 17:14:24 crc kubenswrapper[4558]: E0120 17:14:24.387984 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="65cdc060-60d9-48a8-b535-31b93f49e1ed" containerName="nova-cell0-conductor-db-sync" Jan 20 17:14:24 crc kubenswrapper[4558]: I0120 17:14:24.387999 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="65cdc060-60d9-48a8-b535-31b93f49e1ed" containerName="nova-cell0-conductor-db-sync" Jan 20 17:14:24 crc kubenswrapper[4558]: I0120 17:14:24.388220 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="65cdc060-60d9-48a8-b535-31b93f49e1ed" containerName="nova-cell0-conductor-db-sync" Jan 20 17:14:24 crc kubenswrapper[4558]: I0120 17:14:24.388965 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-cell-mapping-qhsxm" Jan 20 17:14:24 crc kubenswrapper[4558]: I0120 17:14:24.393073 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-cell-mapping-qhsxm"] Jan 20 17:14:24 crc kubenswrapper[4558]: I0120 17:14:24.393654 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-nova-dockercfg-km6cf" Jan 20 17:14:24 crc kubenswrapper[4558]: I0120 17:14:24.393813 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell0-manage-config-data" Jan 20 17:14:24 crc kubenswrapper[4558]: I0120 17:14:24.393928 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell0-manage-scripts" Jan 20 17:14:24 crc kubenswrapper[4558]: I0120 17:14:24.523359 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:14:24 crc kubenswrapper[4558]: I0120 17:14:24.524906 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:14:24 crc kubenswrapper[4558]: I0120 17:14:24.532447 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-api-config-data" Jan 20 17:14:24 crc kubenswrapper[4558]: I0120 17:14:24.544441 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d0da5348-6caa-4cc9-b170-9de34ebda4a1-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-qhsxm\" (UID: \"d0da5348-6caa-4cc9-b170-9de34ebda4a1\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-qhsxm" Jan 20 17:14:24 crc kubenswrapper[4558]: I0120 17:14:24.544546 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d0da5348-6caa-4cc9-b170-9de34ebda4a1-scripts\") pod \"nova-cell0-cell-mapping-qhsxm\" (UID: \"d0da5348-6caa-4cc9-b170-9de34ebda4a1\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-qhsxm" Jan 20 17:14:24 crc kubenswrapper[4558]: I0120 17:14:24.544610 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d0da5348-6caa-4cc9-b170-9de34ebda4a1-config-data\") pod \"nova-cell0-cell-mapping-qhsxm\" (UID: \"d0da5348-6caa-4cc9-b170-9de34ebda4a1\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-qhsxm" Jan 20 17:14:24 crc kubenswrapper[4558]: I0120 17:14:24.544637 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f6jfz\" (UniqueName: \"kubernetes.io/projected/d0da5348-6caa-4cc9-b170-9de34ebda4a1-kube-api-access-f6jfz\") pod \"nova-cell0-cell-mapping-qhsxm\" (UID: \"d0da5348-6caa-4cc9-b170-9de34ebda4a1\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-qhsxm" Jan 20 17:14:24 crc kubenswrapper[4558]: I0120 17:14:24.552222 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:14:24 crc kubenswrapper[4558]: I0120 17:14:24.553721 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:14:24 crc kubenswrapper[4558]: I0120 17:14:24.555228 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell1-novncproxy-config-data" Jan 20 17:14:24 crc kubenswrapper[4558]: I0120 17:14:24.561105 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:14:24 crc kubenswrapper[4558]: I0120 17:14:24.580950 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:14:24 crc kubenswrapper[4558]: I0120 17:14:24.651509 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d0da5348-6caa-4cc9-b170-9de34ebda4a1-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-qhsxm\" (UID: \"d0da5348-6caa-4cc9-b170-9de34ebda4a1\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-qhsxm" Jan 20 17:14:24 crc kubenswrapper[4558]: I0120 17:14:24.651974 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/447f5fd6-0794-4075-9934-0d9e9a6a3f07-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"447f5fd6-0794-4075-9934-0d9e9a6a3f07\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:14:24 crc kubenswrapper[4558]: I0120 17:14:24.652074 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/447f5fd6-0794-4075-9934-0d9e9a6a3f07-logs\") pod \"nova-api-0\" (UID: \"447f5fd6-0794-4075-9934-0d9e9a6a3f07\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:14:24 crc kubenswrapper[4558]: I0120 17:14:24.652131 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94ddd4ab-9303-4ada-8fcb-30b1f694e062-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"94ddd4ab-9303-4ada-8fcb-30b1f694e062\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:14:24 crc kubenswrapper[4558]: I0120 17:14:24.652915 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:14:24 crc kubenswrapper[4558]: I0120 17:14:24.653011 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r4pxc\" (UniqueName: \"kubernetes.io/projected/94ddd4ab-9303-4ada-8fcb-30b1f694e062-kube-api-access-r4pxc\") pod \"nova-cell1-novncproxy-0\" (UID: \"94ddd4ab-9303-4ada-8fcb-30b1f694e062\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:14:24 crc kubenswrapper[4558]: I0120 17:14:24.653073 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/94ddd4ab-9303-4ada-8fcb-30b1f694e062-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"94ddd4ab-9303-4ada-8fcb-30b1f694e062\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:14:24 crc kubenswrapper[4558]: I0120 17:14:24.653139 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d0da5348-6caa-4cc9-b170-9de34ebda4a1-scripts\") pod \"nova-cell0-cell-mapping-qhsxm\" (UID: \"d0da5348-6caa-4cc9-b170-9de34ebda4a1\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-qhsxm" Jan 20 17:14:24 crc kubenswrapper[4558]: I0120 17:14:24.653196 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9z4bl\" (UniqueName: \"kubernetes.io/projected/447f5fd6-0794-4075-9934-0d9e9a6a3f07-kube-api-access-9z4bl\") pod \"nova-api-0\" (UID: \"447f5fd6-0794-4075-9934-0d9e9a6a3f07\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:14:24 crc kubenswrapper[4558]: I0120 17:14:24.653276 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/447f5fd6-0794-4075-9934-0d9e9a6a3f07-config-data\") pod \"nova-api-0\" (UID: \"447f5fd6-0794-4075-9934-0d9e9a6a3f07\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:14:24 crc kubenswrapper[4558]: I0120 17:14:24.653349 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d0da5348-6caa-4cc9-b170-9de34ebda4a1-config-data\") pod \"nova-cell0-cell-mapping-qhsxm\" (UID: \"d0da5348-6caa-4cc9-b170-9de34ebda4a1\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-qhsxm" Jan 20 17:14:24 crc kubenswrapper[4558]: I0120 17:14:24.653410 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f6jfz\" (UniqueName: \"kubernetes.io/projected/d0da5348-6caa-4cc9-b170-9de34ebda4a1-kube-api-access-f6jfz\") pod \"nova-cell0-cell-mapping-qhsxm\" (UID: \"d0da5348-6caa-4cc9-b170-9de34ebda4a1\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-qhsxm" Jan 20 17:14:24 crc kubenswrapper[4558]: I0120 17:14:24.658028 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:14:24 crc kubenswrapper[4558]: I0120 17:14:24.660879 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d0da5348-6caa-4cc9-b170-9de34ebda4a1-scripts\") pod \"nova-cell0-cell-mapping-qhsxm\" (UID: \"d0da5348-6caa-4cc9-b170-9de34ebda4a1\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-qhsxm" Jan 20 17:14:24 crc kubenswrapper[4558]: I0120 17:14:24.661485 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d0da5348-6caa-4cc9-b170-9de34ebda4a1-config-data\") pod \"nova-cell0-cell-mapping-qhsxm\" (UID: \"d0da5348-6caa-4cc9-b170-9de34ebda4a1\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-qhsxm" Jan 20 17:14:24 crc kubenswrapper[4558]: I0120 17:14:24.664145 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-metadata-config-data" Jan 20 17:14:24 crc kubenswrapper[4558]: I0120 17:14:24.669237 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d0da5348-6caa-4cc9-b170-9de34ebda4a1-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-qhsxm\" (UID: \"d0da5348-6caa-4cc9-b170-9de34ebda4a1\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-qhsxm" Jan 20 17:14:24 crc kubenswrapper[4558]: I0120 17:14:24.686901 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:14:24 crc kubenswrapper[4558]: I0120 17:14:24.690021 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f6jfz\" (UniqueName: \"kubernetes.io/projected/d0da5348-6caa-4cc9-b170-9de34ebda4a1-kube-api-access-f6jfz\") pod \"nova-cell0-cell-mapping-qhsxm\" (UID: \"d0da5348-6caa-4cc9-b170-9de34ebda4a1\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-qhsxm" Jan 20 17:14:24 crc kubenswrapper[4558]: I0120 17:14:24.757090 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r4pxc\" (UniqueName: \"kubernetes.io/projected/94ddd4ab-9303-4ada-8fcb-30b1f694e062-kube-api-access-r4pxc\") pod \"nova-cell1-novncproxy-0\" (UID: \"94ddd4ab-9303-4ada-8fcb-30b1f694e062\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:14:24 crc kubenswrapper[4558]: I0120 17:14:24.757135 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/94ddd4ab-9303-4ada-8fcb-30b1f694e062-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"94ddd4ab-9303-4ada-8fcb-30b1f694e062\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:14:24 crc kubenswrapper[4558]: I0120 17:14:24.757180 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9z4bl\" (UniqueName: \"kubernetes.io/projected/447f5fd6-0794-4075-9934-0d9e9a6a3f07-kube-api-access-9z4bl\") pod \"nova-api-0\" (UID: \"447f5fd6-0794-4075-9934-0d9e9a6a3f07\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:14:24 crc kubenswrapper[4558]: I0120 17:14:24.757219 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/447f5fd6-0794-4075-9934-0d9e9a6a3f07-config-data\") pod \"nova-api-0\" (UID: \"447f5fd6-0794-4075-9934-0d9e9a6a3f07\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:14:24 crc kubenswrapper[4558]: I0120 17:14:24.757262 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c8634585-03f0-46bd-9d24-8de13c7b2e03-logs\") pod \"nova-metadata-0\" (UID: \"c8634585-03f0-46bd-9d24-8de13c7b2e03\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:14:24 crc kubenswrapper[4558]: I0120 17:14:24.757303 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c8634585-03f0-46bd-9d24-8de13c7b2e03-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"c8634585-03f0-46bd-9d24-8de13c7b2e03\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:14:24 crc kubenswrapper[4558]: I0120 17:14:24.757332 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-25dxc\" (UniqueName: \"kubernetes.io/projected/c8634585-03f0-46bd-9d24-8de13c7b2e03-kube-api-access-25dxc\") pod \"nova-metadata-0\" (UID: \"c8634585-03f0-46bd-9d24-8de13c7b2e03\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:14:24 crc kubenswrapper[4558]: I0120 17:14:24.757374 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/447f5fd6-0794-4075-9934-0d9e9a6a3f07-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"447f5fd6-0794-4075-9934-0d9e9a6a3f07\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:14:24 crc kubenswrapper[4558]: I0120 17:14:24.757406 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/447f5fd6-0794-4075-9934-0d9e9a6a3f07-logs\") pod \"nova-api-0\" (UID: \"447f5fd6-0794-4075-9934-0d9e9a6a3f07\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:14:24 crc kubenswrapper[4558]: I0120 17:14:24.757436 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c8634585-03f0-46bd-9d24-8de13c7b2e03-config-data\") pod \"nova-metadata-0\" (UID: \"c8634585-03f0-46bd-9d24-8de13c7b2e03\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:14:24 crc kubenswrapper[4558]: I0120 17:14:24.757459 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94ddd4ab-9303-4ada-8fcb-30b1f694e062-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"94ddd4ab-9303-4ada-8fcb-30b1f694e062\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:14:24 crc kubenswrapper[4558]: I0120 17:14:24.758664 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:14:24 crc kubenswrapper[4558]: I0120 17:14:24.775562 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-cell-mapping-qhsxm" Jan 20 17:14:24 crc kubenswrapper[4558]: I0120 17:14:24.776136 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94ddd4ab-9303-4ada-8fcb-30b1f694e062-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"94ddd4ab-9303-4ada-8fcb-30b1f694e062\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:14:24 crc kubenswrapper[4558]: I0120 17:14:24.776583 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/447f5fd6-0794-4075-9934-0d9e9a6a3f07-logs\") pod \"nova-api-0\" (UID: \"447f5fd6-0794-4075-9934-0d9e9a6a3f07\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:14:24 crc kubenswrapper[4558]: I0120 17:14:24.776778 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/94ddd4ab-9303-4ada-8fcb-30b1f694e062-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"94ddd4ab-9303-4ada-8fcb-30b1f694e062\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:14:24 crc kubenswrapper[4558]: I0120 17:14:24.779362 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:14:24 crc kubenswrapper[4558]: I0120 17:14:24.779479 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:14:24 crc kubenswrapper[4558]: I0120 17:14:24.782713 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/447f5fd6-0794-4075-9934-0d9e9a6a3f07-config-data\") pod \"nova-api-0\" (UID: \"447f5fd6-0794-4075-9934-0d9e9a6a3f07\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:14:24 crc kubenswrapper[4558]: I0120 17:14:24.788182 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-scheduler-config-data" Jan 20 17:14:24 crc kubenswrapper[4558]: I0120 17:14:24.791809 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r4pxc\" (UniqueName: \"kubernetes.io/projected/94ddd4ab-9303-4ada-8fcb-30b1f694e062-kube-api-access-r4pxc\") pod \"nova-cell1-novncproxy-0\" (UID: \"94ddd4ab-9303-4ada-8fcb-30b1f694e062\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:14:24 crc kubenswrapper[4558]: I0120 17:14:24.811248 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9z4bl\" (UniqueName: \"kubernetes.io/projected/447f5fd6-0794-4075-9934-0d9e9a6a3f07-kube-api-access-9z4bl\") pod \"nova-api-0\" (UID: \"447f5fd6-0794-4075-9934-0d9e9a6a3f07\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:14:24 crc kubenswrapper[4558]: I0120 17:14:24.822829 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/447f5fd6-0794-4075-9934-0d9e9a6a3f07-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"447f5fd6-0794-4075-9934-0d9e9a6a3f07\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:14:24 crc kubenswrapper[4558]: I0120 17:14:24.860340 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c8634585-03f0-46bd-9d24-8de13c7b2e03-logs\") pod \"nova-metadata-0\" (UID: \"c8634585-03f0-46bd-9d24-8de13c7b2e03\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:14:24 crc kubenswrapper[4558]: I0120 17:14:24.860458 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2caeb2e8-f66c-4eb1-8d50-022d18a4752a-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"2caeb2e8-f66c-4eb1-8d50-022d18a4752a\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:14:24 crc kubenswrapper[4558]: I0120 17:14:24.860573 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c8634585-03f0-46bd-9d24-8de13c7b2e03-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"c8634585-03f0-46bd-9d24-8de13c7b2e03\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:14:24 crc kubenswrapper[4558]: I0120 17:14:24.860648 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-25dxc\" (UniqueName: \"kubernetes.io/projected/c8634585-03f0-46bd-9d24-8de13c7b2e03-kube-api-access-25dxc\") pod \"nova-metadata-0\" (UID: \"c8634585-03f0-46bd-9d24-8de13c7b2e03\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:14:24 crc kubenswrapper[4558]: I0120 17:14:24.861008 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gtxnx\" (UniqueName: \"kubernetes.io/projected/2caeb2e8-f66c-4eb1-8d50-022d18a4752a-kube-api-access-gtxnx\") pod \"nova-scheduler-0\" (UID: \"2caeb2e8-f66c-4eb1-8d50-022d18a4752a\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:14:24 crc kubenswrapper[4558]: I0120 17:14:24.861122 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c8634585-03f0-46bd-9d24-8de13c7b2e03-config-data\") pod \"nova-metadata-0\" (UID: \"c8634585-03f0-46bd-9d24-8de13c7b2e03\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:14:24 crc kubenswrapper[4558]: I0120 17:14:24.861383 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2caeb2e8-f66c-4eb1-8d50-022d18a4752a-config-data\") pod \"nova-scheduler-0\" (UID: \"2caeb2e8-f66c-4eb1-8d50-022d18a4752a\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:14:24 crc kubenswrapper[4558]: I0120 17:14:24.863260 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c8634585-03f0-46bd-9d24-8de13c7b2e03-logs\") pod \"nova-metadata-0\" (UID: \"c8634585-03f0-46bd-9d24-8de13c7b2e03\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:14:24 crc kubenswrapper[4558]: I0120 17:14:24.864798 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c8634585-03f0-46bd-9d24-8de13c7b2e03-config-data\") pod \"nova-metadata-0\" (UID: \"c8634585-03f0-46bd-9d24-8de13c7b2e03\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:14:24 crc kubenswrapper[4558]: I0120 17:14:24.866495 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:14:24 crc kubenswrapper[4558]: I0120 17:14:24.868012 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c8634585-03f0-46bd-9d24-8de13c7b2e03-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"c8634585-03f0-46bd-9d24-8de13c7b2e03\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:14:24 crc kubenswrapper[4558]: I0120 17:14:24.875798 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-25dxc\" (UniqueName: \"kubernetes.io/projected/c8634585-03f0-46bd-9d24-8de13c7b2e03-kube-api-access-25dxc\") pod \"nova-metadata-0\" (UID: \"c8634585-03f0-46bd-9d24-8de13c7b2e03\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:14:24 crc kubenswrapper[4558]: I0120 17:14:24.897490 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:14:24 crc kubenswrapper[4558]: I0120 17:14:24.972015 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2caeb2e8-f66c-4eb1-8d50-022d18a4752a-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"2caeb2e8-f66c-4eb1-8d50-022d18a4752a\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:14:24 crc kubenswrapper[4558]: I0120 17:14:24.972453 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gtxnx\" (UniqueName: \"kubernetes.io/projected/2caeb2e8-f66c-4eb1-8d50-022d18a4752a-kube-api-access-gtxnx\") pod \"nova-scheduler-0\" (UID: \"2caeb2e8-f66c-4eb1-8d50-022d18a4752a\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:14:24 crc kubenswrapper[4558]: I0120 17:14:24.972566 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2caeb2e8-f66c-4eb1-8d50-022d18a4752a-config-data\") pod \"nova-scheduler-0\" (UID: \"2caeb2e8-f66c-4eb1-8d50-022d18a4752a\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:14:24 crc kubenswrapper[4558]: I0120 17:14:24.983518 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2caeb2e8-f66c-4eb1-8d50-022d18a4752a-config-data\") pod \"nova-scheduler-0\" (UID: \"2caeb2e8-f66c-4eb1-8d50-022d18a4752a\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:14:24 crc kubenswrapper[4558]: I0120 17:14:24.987690 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2caeb2e8-f66c-4eb1-8d50-022d18a4752a-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"2caeb2e8-f66c-4eb1-8d50-022d18a4752a\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:14:25 crc kubenswrapper[4558]: I0120 17:14:25.009629 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gtxnx\" (UniqueName: \"kubernetes.io/projected/2caeb2e8-f66c-4eb1-8d50-022d18a4752a-kube-api-access-gtxnx\") pod \"nova-scheduler-0\" (UID: \"2caeb2e8-f66c-4eb1-8d50-022d18a4752a\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:14:25 crc kubenswrapper[4558]: I0120 17:14:25.028299 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"5c988319-ece3-4016-b491-1b33b78ed16e","Type":"ContainerStarted","Data":"4fb572825219a0fc20cb0370fc86ca598777392dfec9d5d57c869d049806523b"} Jan 20 17:14:25 crc kubenswrapper[4558]: I0120 17:14:25.028346 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:14:25 crc kubenswrapper[4558]: I0120 17:14:25.028360 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:14:25 crc kubenswrapper[4558]: I0120 17:14:25.177906 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:14:25 crc kubenswrapper[4558]: I0120 17:14:25.186389 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:14:25 crc kubenswrapper[4558]: I0120 17:14:25.315570 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-cell-mapping-qhsxm"] Jan 20 17:14:25 crc kubenswrapper[4558]: I0120 17:14:25.478594 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:14:25 crc kubenswrapper[4558]: I0120 17:14:25.603552 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:14:25 crc kubenswrapper[4558]: I0120 17:14:25.797484 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:14:25 crc kubenswrapper[4558]: I0120 17:14:25.805638 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:14:26 crc kubenswrapper[4558]: I0120 17:14:26.035254 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"2caeb2e8-f66c-4eb1-8d50-022d18a4752a","Type":"ContainerStarted","Data":"5324bbb7b9d764646a86fd9ae2fd1eeb6a7954a411a86cfff178806306997761"} Jan 20 17:14:26 crc kubenswrapper[4558]: I0120 17:14:26.038376 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-cell-mapping-qhsxm" event={"ID":"d0da5348-6caa-4cc9-b170-9de34ebda4a1","Type":"ContainerStarted","Data":"0a0eaadfee5356b39912e205ecdbed7a048b984f1c3c340a311f0c6d29670423"} Jan 20 17:14:26 crc kubenswrapper[4558]: I0120 17:14:26.038451 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-cell-mapping-qhsxm" event={"ID":"d0da5348-6caa-4cc9-b170-9de34ebda4a1","Type":"ContainerStarted","Data":"83e3e29ee80732913db4c9cde4658f19bfc112c217c474511818746a902d6629"} Jan 20 17:14:26 crc kubenswrapper[4558]: I0120 17:14:26.041171 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"c8634585-03f0-46bd-9d24-8de13c7b2e03","Type":"ContainerStarted","Data":"108148857f9ad56dda9534ffba28232f783b4078ee76b1132a2b7daafbf7f686"} Jan 20 17:14:26 crc kubenswrapper[4558]: I0120 17:14:26.042677 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"94ddd4ab-9303-4ada-8fcb-30b1f694e062","Type":"ContainerStarted","Data":"77efdcb7624778187484a8cc850b40a9a4af8b5e503b97aeb120af23a1789bd3"} Jan 20 17:14:26 crc kubenswrapper[4558]: I0120 17:14:26.044418 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"447f5fd6-0794-4075-9934-0d9e9a6a3f07","Type":"ContainerStarted","Data":"9a183b22791dde7f1a6bd83dfb318607a3ef80a62babd2e8c1a86b8f7bdefc9f"} Jan 20 17:14:26 crc kubenswrapper[4558]: I0120 17:14:26.044465 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"447f5fd6-0794-4075-9934-0d9e9a6a3f07","Type":"ContainerStarted","Data":"8ad3f5e2f4223450fe75b263914a00ce129a42656094822cf12899d901cb5506"} Jan 20 17:14:26 crc kubenswrapper[4558]: I0120 17:14:26.059130 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell0-cell-mapping-qhsxm" podStartSLOduration=2.059113177 podStartE2EDuration="2.059113177s" podCreationTimestamp="2026-01-20 17:14:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:14:26.056947574 +0000 UTC m=+1959.817285541" watchObservedRunningTime="2026-01-20 17:14:26.059113177 +0000 UTC m=+1959.819451144" Jan 20 17:14:26 crc kubenswrapper[4558]: I0120 17:14:26.166741 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:14:26 crc kubenswrapper[4558]: I0120 17:14:26.166826 4558 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 20 17:14:26 crc kubenswrapper[4558]: I0120 17:14:26.220241 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:14:26 crc kubenswrapper[4558]: I0120 17:14:26.487836 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-db-sync-nsn26"] Jan 20 17:14:26 crc kubenswrapper[4558]: I0120 17:14:26.489682 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-nsn26" Jan 20 17:14:26 crc kubenswrapper[4558]: I0120 17:14:26.493122 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell1-conductor-scripts" Jan 20 17:14:26 crc kubenswrapper[4558]: I0120 17:14:26.493600 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell1-conductor-config-data" Jan 20 17:14:26 crc kubenswrapper[4558]: I0120 17:14:26.498903 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-db-sync-nsn26"] Jan 20 17:14:26 crc kubenswrapper[4558]: I0120 17:14:26.531307 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2s4tm\" (UniqueName: \"kubernetes.io/projected/ecc5d42c-dbf5-44e2-b487-25aa99419dc1-kube-api-access-2s4tm\") pod \"nova-cell1-conductor-db-sync-nsn26\" (UID: \"ecc5d42c-dbf5-44e2-b487-25aa99419dc1\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-nsn26" Jan 20 17:14:26 crc kubenswrapper[4558]: I0120 17:14:26.531495 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ecc5d42c-dbf5-44e2-b487-25aa99419dc1-config-data\") pod \"nova-cell1-conductor-db-sync-nsn26\" (UID: \"ecc5d42c-dbf5-44e2-b487-25aa99419dc1\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-nsn26" Jan 20 17:14:26 crc kubenswrapper[4558]: I0120 17:14:26.531571 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ecc5d42c-dbf5-44e2-b487-25aa99419dc1-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-nsn26\" (UID: \"ecc5d42c-dbf5-44e2-b487-25aa99419dc1\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-nsn26" Jan 20 17:14:26 crc kubenswrapper[4558]: I0120 17:14:26.531603 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ecc5d42c-dbf5-44e2-b487-25aa99419dc1-scripts\") pod \"nova-cell1-conductor-db-sync-nsn26\" (UID: \"ecc5d42c-dbf5-44e2-b487-25aa99419dc1\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-nsn26" Jan 20 17:14:26 crc kubenswrapper[4558]: I0120 17:14:26.646886 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ecc5d42c-dbf5-44e2-b487-25aa99419dc1-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-nsn26\" (UID: \"ecc5d42c-dbf5-44e2-b487-25aa99419dc1\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-nsn26" Jan 20 17:14:26 crc kubenswrapper[4558]: I0120 17:14:26.647134 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ecc5d42c-dbf5-44e2-b487-25aa99419dc1-scripts\") pod \"nova-cell1-conductor-db-sync-nsn26\" (UID: \"ecc5d42c-dbf5-44e2-b487-25aa99419dc1\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-nsn26" Jan 20 17:14:26 crc kubenswrapper[4558]: I0120 17:14:26.649715 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2s4tm\" (UniqueName: \"kubernetes.io/projected/ecc5d42c-dbf5-44e2-b487-25aa99419dc1-kube-api-access-2s4tm\") pod \"nova-cell1-conductor-db-sync-nsn26\" (UID: \"ecc5d42c-dbf5-44e2-b487-25aa99419dc1\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-nsn26" Jan 20 17:14:26 crc kubenswrapper[4558]: I0120 17:14:26.650175 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ecc5d42c-dbf5-44e2-b487-25aa99419dc1-config-data\") pod \"nova-cell1-conductor-db-sync-nsn26\" (UID: \"ecc5d42c-dbf5-44e2-b487-25aa99419dc1\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-nsn26" Jan 20 17:14:26 crc kubenswrapper[4558]: I0120 17:14:26.653180 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ecc5d42c-dbf5-44e2-b487-25aa99419dc1-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-nsn26\" (UID: \"ecc5d42c-dbf5-44e2-b487-25aa99419dc1\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-nsn26" Jan 20 17:14:26 crc kubenswrapper[4558]: I0120 17:14:26.697659 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2s4tm\" (UniqueName: \"kubernetes.io/projected/ecc5d42c-dbf5-44e2-b487-25aa99419dc1-kube-api-access-2s4tm\") pod \"nova-cell1-conductor-db-sync-nsn26\" (UID: \"ecc5d42c-dbf5-44e2-b487-25aa99419dc1\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-nsn26" Jan 20 17:14:26 crc kubenswrapper[4558]: I0120 17:14:26.698010 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ecc5d42c-dbf5-44e2-b487-25aa99419dc1-scripts\") pod \"nova-cell1-conductor-db-sync-nsn26\" (UID: \"ecc5d42c-dbf5-44e2-b487-25aa99419dc1\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-nsn26" Jan 20 17:14:26 crc kubenswrapper[4558]: I0120 17:14:26.699512 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ecc5d42c-dbf5-44e2-b487-25aa99419dc1-config-data\") pod \"nova-cell1-conductor-db-sync-nsn26\" (UID: \"ecc5d42c-dbf5-44e2-b487-25aa99419dc1\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-nsn26" Jan 20 17:14:26 crc kubenswrapper[4558]: I0120 17:14:26.804565 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-nsn26" Jan 20 17:14:27 crc kubenswrapper[4558]: I0120 17:14:27.059768 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"94ddd4ab-9303-4ada-8fcb-30b1f694e062","Type":"ContainerStarted","Data":"f31188aa53d24f58395306cc9b4ba53c87effb9a7022fa60ee4309f845e90278"} Jan 20 17:14:27 crc kubenswrapper[4558]: I0120 17:14:27.064621 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"447f5fd6-0794-4075-9934-0d9e9a6a3f07","Type":"ContainerStarted","Data":"41dccb6b414a601fbd30ded62673eacba509935404e0c54ded313061dceddaf1"} Jan 20 17:14:27 crc kubenswrapper[4558]: I0120 17:14:27.066256 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"2caeb2e8-f66c-4eb1-8d50-022d18a4752a","Type":"ContainerStarted","Data":"84fcb93f0f2a487eee0f2409ccc5beef495eb55ab92858af037a7c0fc064c55a"} Jan 20 17:14:27 crc kubenswrapper[4558]: I0120 17:14:27.069662 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"5c988319-ece3-4016-b491-1b33b78ed16e","Type":"ContainerStarted","Data":"e91939ae15b50ee2cb9f127b9219736a27901de5d5be68878bb47fc2b64c9dc5"} Jan 20 17:14:27 crc kubenswrapper[4558]: I0120 17:14:27.071145 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"c8634585-03f0-46bd-9d24-8de13c7b2e03","Type":"ContainerStarted","Data":"bd750746c3c946e143efe3b5e434ea6372a78afea873074ab019cb75fab30e8f"} Jan 20 17:14:27 crc kubenswrapper[4558]: I0120 17:14:27.071220 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"c8634585-03f0-46bd-9d24-8de13c7b2e03","Type":"ContainerStarted","Data":"10345e5f559c4b6bca555657d434de8f554f23c8931d6a96bdbd64571c2d4037"} Jan 20 17:14:27 crc kubenswrapper[4558]: I0120 17:14:27.071494 4558 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 20 17:14:27 crc kubenswrapper[4558]: I0120 17:14:27.071518 4558 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 20 17:14:27 crc kubenswrapper[4558]: I0120 17:14:27.077513 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" podStartSLOduration=3.07750099 podStartE2EDuration="3.07750099s" podCreationTimestamp="2026-01-20 17:14:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:14:27.07342188 +0000 UTC m=+1960.833759846" watchObservedRunningTime="2026-01-20 17:14:27.07750099 +0000 UTC m=+1960.837838957" Jan 20 17:14:27 crc kubenswrapper[4558]: I0120 17:14:27.116294 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-scheduler-0" podStartSLOduration=3.116267048 podStartE2EDuration="3.116267048s" podCreationTimestamp="2026-01-20 17:14:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:14:27.091353078 +0000 UTC m=+1960.851691045" watchObservedRunningTime="2026-01-20 17:14:27.116267048 +0000 UTC m=+1960.876605015" Jan 20 17:14:27 crc kubenswrapper[4558]: I0120 17:14:27.126445 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-metadata-0" podStartSLOduration=3.126433134 podStartE2EDuration="3.126433134s" podCreationTimestamp="2026-01-20 17:14:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:14:27.113174803 +0000 UTC m=+1960.873512770" watchObservedRunningTime="2026-01-20 17:14:27.126433134 +0000 UTC m=+1960.886771102" Jan 20 17:14:27 crc kubenswrapper[4558]: I0120 17:14:27.136898 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-api-0" podStartSLOduration=3.136888074 podStartE2EDuration="3.136888074s" podCreationTimestamp="2026-01-20 17:14:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:14:27.130960628 +0000 UTC m=+1960.891298595" watchObservedRunningTime="2026-01-20 17:14:27.136888074 +0000 UTC m=+1960.897226042" Jan 20 17:14:27 crc kubenswrapper[4558]: I0120 17:14:27.232651 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:14:27 crc kubenswrapper[4558]: I0120 17:14:27.245181 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:14:27 crc kubenswrapper[4558]: I0120 17:14:27.290836 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-db-sync-nsn26"] Jan 20 17:14:27 crc kubenswrapper[4558]: I0120 17:14:27.330322 4558 patch_prober.go:28] interesting pod/machine-config-daemon-2vr4r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 20 17:14:27 crc kubenswrapper[4558]: I0120 17:14:27.330483 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 20 17:14:27 crc kubenswrapper[4558]: I0120 17:14:27.396008 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:14:27 crc kubenswrapper[4558]: I0120 17:14:27.403480 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:14:28 crc kubenswrapper[4558]: I0120 17:14:28.080569 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-nsn26" event={"ID":"ecc5d42c-dbf5-44e2-b487-25aa99419dc1","Type":"ContainerStarted","Data":"b814fd8702534f35668ee31f88682bb9c76df8f8dfbcd5d7d7c5cbda3ab01f91"} Jan 20 17:14:28 crc kubenswrapper[4558]: I0120 17:14:28.080884 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-nsn26" event={"ID":"ecc5d42c-dbf5-44e2-b487-25aa99419dc1","Type":"ContainerStarted","Data":"3b34d9abc197b7150d227b467de430bd41ca143df5114b03e77dacd7c4a78df6"} Jan 20 17:14:28 crc kubenswrapper[4558]: I0120 17:14:28.083441 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"5c988319-ece3-4016-b491-1b33b78ed16e","Type":"ContainerStarted","Data":"e70948baf660f38543ddb5acaa11cca1652348a68137c5b3ac38f88134bf299c"} Jan 20 17:14:28 crc kubenswrapper[4558]: I0120 17:14:28.102322 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-nsn26" podStartSLOduration=2.102304829 podStartE2EDuration="2.102304829s" podCreationTimestamp="2026-01-20 17:14:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:14:28.093632993 +0000 UTC m=+1961.853970960" watchObservedRunningTime="2026-01-20 17:14:28.102304829 +0000 UTC m=+1961.862642796" Jan 20 17:14:28 crc kubenswrapper[4558]: I0120 17:14:28.156911 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ceilometer-0" podStartSLOduration=2.515342349 podStartE2EDuration="7.156871278s" podCreationTimestamp="2026-01-20 17:14:21 +0000 UTC" firstStartedPulling="2026-01-20 17:14:22.7042851 +0000 UTC m=+1956.464623067" lastFinishedPulling="2026-01-20 17:14:27.345814029 +0000 UTC m=+1961.106151996" observedRunningTime="2026-01-20 17:14:28.141982512 +0000 UTC m=+1961.902320479" watchObservedRunningTime="2026-01-20 17:14:28.156871278 +0000 UTC m=+1961.917209245" Jan 20 17:14:29 crc kubenswrapper[4558]: I0120 17:14:29.091208 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:14:29 crc kubenswrapper[4558]: I0120 17:14:29.091376 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" podUID="94ddd4ab-9303-4ada-8fcb-30b1f694e062" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://f31188aa53d24f58395306cc9b4ba53c87effb9a7022fa60ee4309f845e90278" gracePeriod=30 Jan 20 17:14:29 crc kubenswrapper[4558]: I0120 17:14:29.091482 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-metadata-0" podUID="c8634585-03f0-46bd-9d24-8de13c7b2e03" containerName="nova-metadata-log" containerID="cri-o://10345e5f559c4b6bca555657d434de8f554f23c8931d6a96bdbd64571c2d4037" gracePeriod=30 Jan 20 17:14:29 crc kubenswrapper[4558]: I0120 17:14:29.091633 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-metadata-0" podUID="c8634585-03f0-46bd-9d24-8de13c7b2e03" containerName="nova-metadata-metadata" containerID="cri-o://bd750746c3c946e143efe3b5e434ea6372a78afea873074ab019cb75fab30e8f" gracePeriod=30 Jan 20 17:14:29 crc kubenswrapper[4558]: I0120 17:14:29.363413 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:14:29 crc kubenswrapper[4558]: I0120 17:14:29.363738 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-api-0" podUID="447f5fd6-0794-4075-9934-0d9e9a6a3f07" containerName="nova-api-log" containerID="cri-o://9a183b22791dde7f1a6bd83dfb318607a3ef80a62babd2e8c1a86b8f7bdefc9f" gracePeriod=30 Jan 20 17:14:29 crc kubenswrapper[4558]: I0120 17:14:29.363912 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-api-0" podUID="447f5fd6-0794-4075-9934-0d9e9a6a3f07" containerName="nova-api-api" containerID="cri-o://41dccb6b414a601fbd30ded62673eacba509935404e0c54ded313061dceddaf1" gracePeriod=30 Jan 20 17:14:29 crc kubenswrapper[4558]: I0120 17:14:29.378367 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/watcher-decision-engine-0"] Jan 20 17:14:29 crc kubenswrapper[4558]: I0120 17:14:29.378603 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/watcher-decision-engine-0" podUID="f47cb9e2-2306-494d-94ff-b60393a9413e" containerName="watcher-decision-engine" containerID="cri-o://2100b7c749ddb30bbb158352b085f32659152dcc98b26a89f4ae8fd0e68b79ce" gracePeriod=30 Jan 20 17:14:29 crc kubenswrapper[4558]: I0120 17:14:29.433271 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:14:29 crc kubenswrapper[4558]: I0120 17:14:29.433477 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-scheduler-0" podUID="2caeb2e8-f66c-4eb1-8d50-022d18a4752a" containerName="nova-scheduler-scheduler" containerID="cri-o://84fcb93f0f2a487eee0f2409ccc5beef495eb55ab92858af037a7c0fc064c55a" gracePeriod=30 Jan 20 17:14:29 crc kubenswrapper[4558]: I0120 17:14:29.759686 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:14:29 crc kubenswrapper[4558]: I0120 17:14:29.827932 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c8634585-03f0-46bd-9d24-8de13c7b2e03-logs\") pod \"c8634585-03f0-46bd-9d24-8de13c7b2e03\" (UID: \"c8634585-03f0-46bd-9d24-8de13c7b2e03\") " Jan 20 17:14:29 crc kubenswrapper[4558]: I0120 17:14:29.828141 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-25dxc\" (UniqueName: \"kubernetes.io/projected/c8634585-03f0-46bd-9d24-8de13c7b2e03-kube-api-access-25dxc\") pod \"c8634585-03f0-46bd-9d24-8de13c7b2e03\" (UID: \"c8634585-03f0-46bd-9d24-8de13c7b2e03\") " Jan 20 17:14:29 crc kubenswrapper[4558]: I0120 17:14:29.828349 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c8634585-03f0-46bd-9d24-8de13c7b2e03-combined-ca-bundle\") pod \"c8634585-03f0-46bd-9d24-8de13c7b2e03\" (UID: \"c8634585-03f0-46bd-9d24-8de13c7b2e03\") " Jan 20 17:14:29 crc kubenswrapper[4558]: I0120 17:14:29.828401 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c8634585-03f0-46bd-9d24-8de13c7b2e03-logs" (OuterVolumeSpecName: "logs") pod "c8634585-03f0-46bd-9d24-8de13c7b2e03" (UID: "c8634585-03f0-46bd-9d24-8de13c7b2e03"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:14:29 crc kubenswrapper[4558]: I0120 17:14:29.828485 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c8634585-03f0-46bd-9d24-8de13c7b2e03-config-data\") pod \"c8634585-03f0-46bd-9d24-8de13c7b2e03\" (UID: \"c8634585-03f0-46bd-9d24-8de13c7b2e03\") " Jan 20 17:14:29 crc kubenswrapper[4558]: I0120 17:14:29.829050 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c8634585-03f0-46bd-9d24-8de13c7b2e03-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:29 crc kubenswrapper[4558]: I0120 17:14:29.846406 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c8634585-03f0-46bd-9d24-8de13c7b2e03-kube-api-access-25dxc" (OuterVolumeSpecName: "kube-api-access-25dxc") pod "c8634585-03f0-46bd-9d24-8de13c7b2e03" (UID: "c8634585-03f0-46bd-9d24-8de13c7b2e03"). InnerVolumeSpecName "kube-api-access-25dxc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:14:29 crc kubenswrapper[4558]: I0120 17:14:29.862355 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c8634585-03f0-46bd-9d24-8de13c7b2e03-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c8634585-03f0-46bd-9d24-8de13c7b2e03" (UID: "c8634585-03f0-46bd-9d24-8de13c7b2e03"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:14:29 crc kubenswrapper[4558]: I0120 17:14:29.866762 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c8634585-03f0-46bd-9d24-8de13c7b2e03-config-data" (OuterVolumeSpecName: "config-data") pod "c8634585-03f0-46bd-9d24-8de13c7b2e03" (UID: "c8634585-03f0-46bd-9d24-8de13c7b2e03"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:14:29 crc kubenswrapper[4558]: I0120 17:14:29.902623 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:14:29 crc kubenswrapper[4558]: I0120 17:14:29.929965 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r4pxc\" (UniqueName: \"kubernetes.io/projected/94ddd4ab-9303-4ada-8fcb-30b1f694e062-kube-api-access-r4pxc\") pod \"94ddd4ab-9303-4ada-8fcb-30b1f694e062\" (UID: \"94ddd4ab-9303-4ada-8fcb-30b1f694e062\") " Jan 20 17:14:29 crc kubenswrapper[4558]: I0120 17:14:29.930130 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/94ddd4ab-9303-4ada-8fcb-30b1f694e062-config-data\") pod \"94ddd4ab-9303-4ada-8fcb-30b1f694e062\" (UID: \"94ddd4ab-9303-4ada-8fcb-30b1f694e062\") " Jan 20 17:14:29 crc kubenswrapper[4558]: I0120 17:14:29.930290 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94ddd4ab-9303-4ada-8fcb-30b1f694e062-combined-ca-bundle\") pod \"94ddd4ab-9303-4ada-8fcb-30b1f694e062\" (UID: \"94ddd4ab-9303-4ada-8fcb-30b1f694e062\") " Jan 20 17:14:29 crc kubenswrapper[4558]: I0120 17:14:29.930938 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-25dxc\" (UniqueName: \"kubernetes.io/projected/c8634585-03f0-46bd-9d24-8de13c7b2e03-kube-api-access-25dxc\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:29 crc kubenswrapper[4558]: I0120 17:14:29.930958 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c8634585-03f0-46bd-9d24-8de13c7b2e03-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:29 crc kubenswrapper[4558]: I0120 17:14:29.930968 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c8634585-03f0-46bd-9d24-8de13c7b2e03-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:29 crc kubenswrapper[4558]: I0120 17:14:29.938303 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/94ddd4ab-9303-4ada-8fcb-30b1f694e062-kube-api-access-r4pxc" (OuterVolumeSpecName: "kube-api-access-r4pxc") pod "94ddd4ab-9303-4ada-8fcb-30b1f694e062" (UID: "94ddd4ab-9303-4ada-8fcb-30b1f694e062"). InnerVolumeSpecName "kube-api-access-r4pxc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:14:29 crc kubenswrapper[4558]: I0120 17:14:29.957482 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/94ddd4ab-9303-4ada-8fcb-30b1f694e062-config-data" (OuterVolumeSpecName: "config-data") pod "94ddd4ab-9303-4ada-8fcb-30b1f694e062" (UID: "94ddd4ab-9303-4ada-8fcb-30b1f694e062"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:14:29 crc kubenswrapper[4558]: I0120 17:14:29.959243 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/94ddd4ab-9303-4ada-8fcb-30b1f694e062-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "94ddd4ab-9303-4ada-8fcb-30b1f694e062" (UID: "94ddd4ab-9303-4ada-8fcb-30b1f694e062"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.008009 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.031789 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/447f5fd6-0794-4075-9934-0d9e9a6a3f07-combined-ca-bundle\") pod \"447f5fd6-0794-4075-9934-0d9e9a6a3f07\" (UID: \"447f5fd6-0794-4075-9934-0d9e9a6a3f07\") " Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.031936 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9z4bl\" (UniqueName: \"kubernetes.io/projected/447f5fd6-0794-4075-9934-0d9e9a6a3f07-kube-api-access-9z4bl\") pod \"447f5fd6-0794-4075-9934-0d9e9a6a3f07\" (UID: \"447f5fd6-0794-4075-9934-0d9e9a6a3f07\") " Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.032048 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/447f5fd6-0794-4075-9934-0d9e9a6a3f07-config-data\") pod \"447f5fd6-0794-4075-9934-0d9e9a6a3f07\" (UID: \"447f5fd6-0794-4075-9934-0d9e9a6a3f07\") " Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.032098 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/447f5fd6-0794-4075-9934-0d9e9a6a3f07-logs\") pod \"447f5fd6-0794-4075-9934-0d9e9a6a3f07\" (UID: \"447f5fd6-0794-4075-9934-0d9e9a6a3f07\") " Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.032659 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r4pxc\" (UniqueName: \"kubernetes.io/projected/94ddd4ab-9303-4ada-8fcb-30b1f694e062-kube-api-access-r4pxc\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.032677 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/94ddd4ab-9303-4ada-8fcb-30b1f694e062-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.032687 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94ddd4ab-9303-4ada-8fcb-30b1f694e062-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.032953 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/447f5fd6-0794-4075-9934-0d9e9a6a3f07-logs" (OuterVolumeSpecName: "logs") pod "447f5fd6-0794-4075-9934-0d9e9a6a3f07" (UID: "447f5fd6-0794-4075-9934-0d9e9a6a3f07"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.037015 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/447f5fd6-0794-4075-9934-0d9e9a6a3f07-kube-api-access-9z4bl" (OuterVolumeSpecName: "kube-api-access-9z4bl") pod "447f5fd6-0794-4075-9934-0d9e9a6a3f07" (UID: "447f5fd6-0794-4075-9934-0d9e9a6a3f07"). InnerVolumeSpecName "kube-api-access-9z4bl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.067407 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/447f5fd6-0794-4075-9934-0d9e9a6a3f07-config-data" (OuterVolumeSpecName: "config-data") pod "447f5fd6-0794-4075-9934-0d9e9a6a3f07" (UID: "447f5fd6-0794-4075-9934-0d9e9a6a3f07"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.090485 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/447f5fd6-0794-4075-9934-0d9e9a6a3f07-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "447f5fd6-0794-4075-9934-0d9e9a6a3f07" (UID: "447f5fd6-0794-4075-9934-0d9e9a6a3f07"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.104859 4558 generic.go:334] "Generic (PLEG): container finished" podID="94ddd4ab-9303-4ada-8fcb-30b1f694e062" containerID="f31188aa53d24f58395306cc9b4ba53c87effb9a7022fa60ee4309f845e90278" exitCode=0 Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.104920 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.104938 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"94ddd4ab-9303-4ada-8fcb-30b1f694e062","Type":"ContainerDied","Data":"f31188aa53d24f58395306cc9b4ba53c87effb9a7022fa60ee4309f845e90278"} Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.105651 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"94ddd4ab-9303-4ada-8fcb-30b1f694e062","Type":"ContainerDied","Data":"77efdcb7624778187484a8cc850b40a9a4af8b5e503b97aeb120af23a1789bd3"} Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.105689 4558 scope.go:117] "RemoveContainer" containerID="f31188aa53d24f58395306cc9b4ba53c87effb9a7022fa60ee4309f845e90278" Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.110839 4558 generic.go:334] "Generic (PLEG): container finished" podID="447f5fd6-0794-4075-9934-0d9e9a6a3f07" containerID="41dccb6b414a601fbd30ded62673eacba509935404e0c54ded313061dceddaf1" exitCode=0 Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.110858 4558 generic.go:334] "Generic (PLEG): container finished" podID="447f5fd6-0794-4075-9934-0d9e9a6a3f07" containerID="9a183b22791dde7f1a6bd83dfb318607a3ef80a62babd2e8c1a86b8f7bdefc9f" exitCode=143 Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.110892 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"447f5fd6-0794-4075-9934-0d9e9a6a3f07","Type":"ContainerDied","Data":"41dccb6b414a601fbd30ded62673eacba509935404e0c54ded313061dceddaf1"} Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.110911 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"447f5fd6-0794-4075-9934-0d9e9a6a3f07","Type":"ContainerDied","Data":"9a183b22791dde7f1a6bd83dfb318607a3ef80a62babd2e8c1a86b8f7bdefc9f"} Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.110921 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"447f5fd6-0794-4075-9934-0d9e9a6a3f07","Type":"ContainerDied","Data":"8ad3f5e2f4223450fe75b263914a00ce129a42656094822cf12899d901cb5506"} Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.110973 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.124407 4558 generic.go:334] "Generic (PLEG): container finished" podID="2caeb2e8-f66c-4eb1-8d50-022d18a4752a" containerID="84fcb93f0f2a487eee0f2409ccc5beef495eb55ab92858af037a7c0fc064c55a" exitCode=0 Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.124530 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"2caeb2e8-f66c-4eb1-8d50-022d18a4752a","Type":"ContainerDied","Data":"84fcb93f0f2a487eee0f2409ccc5beef495eb55ab92858af037a7c0fc064c55a"} Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.129420 4558 generic.go:334] "Generic (PLEG): container finished" podID="c8634585-03f0-46bd-9d24-8de13c7b2e03" containerID="bd750746c3c946e143efe3b5e434ea6372a78afea873074ab019cb75fab30e8f" exitCode=0 Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.129515 4558 generic.go:334] "Generic (PLEG): container finished" podID="c8634585-03f0-46bd-9d24-8de13c7b2e03" containerID="10345e5f559c4b6bca555657d434de8f554f23c8931d6a96bdbd64571c2d4037" exitCode=143 Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.129596 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"c8634585-03f0-46bd-9d24-8de13c7b2e03","Type":"ContainerDied","Data":"bd750746c3c946e143efe3b5e434ea6372a78afea873074ab019cb75fab30e8f"} Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.129642 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"c8634585-03f0-46bd-9d24-8de13c7b2e03","Type":"ContainerDied","Data":"10345e5f559c4b6bca555657d434de8f554f23c8931d6a96bdbd64571c2d4037"} Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.129659 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"c8634585-03f0-46bd-9d24-8de13c7b2e03","Type":"ContainerDied","Data":"108148857f9ad56dda9534ffba28232f783b4078ee76b1132a2b7daafbf7f686"} Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.129550 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.134048 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9z4bl\" (UniqueName: \"kubernetes.io/projected/447f5fd6-0794-4075-9934-0d9e9a6a3f07-kube-api-access-9z4bl\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.134136 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/447f5fd6-0794-4075-9934-0d9e9a6a3f07-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.134209 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/447f5fd6-0794-4075-9934-0d9e9a6a3f07-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.134290 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/447f5fd6-0794-4075-9934-0d9e9a6a3f07-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.158834 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.168943 4558 scope.go:117] "RemoveContainer" containerID="f31188aa53d24f58395306cc9b4ba53c87effb9a7022fa60ee4309f845e90278" Jan 20 17:14:30 crc kubenswrapper[4558]: E0120 17:14:30.169920 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f31188aa53d24f58395306cc9b4ba53c87effb9a7022fa60ee4309f845e90278\": container with ID starting with f31188aa53d24f58395306cc9b4ba53c87effb9a7022fa60ee4309f845e90278 not found: ID does not exist" containerID="f31188aa53d24f58395306cc9b4ba53c87effb9a7022fa60ee4309f845e90278" Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.169947 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f31188aa53d24f58395306cc9b4ba53c87effb9a7022fa60ee4309f845e90278"} err="failed to get container status \"f31188aa53d24f58395306cc9b4ba53c87effb9a7022fa60ee4309f845e90278\": rpc error: code = NotFound desc = could not find container \"f31188aa53d24f58395306cc9b4ba53c87effb9a7022fa60ee4309f845e90278\": container with ID starting with f31188aa53d24f58395306cc9b4ba53c87effb9a7022fa60ee4309f845e90278 not found: ID does not exist" Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.169965 4558 scope.go:117] "RemoveContainer" containerID="41dccb6b414a601fbd30ded62673eacba509935404e0c54ded313061dceddaf1" Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.173377 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.183716 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:14:30 crc kubenswrapper[4558]: E0120 17:14:30.184334 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c8634585-03f0-46bd-9d24-8de13c7b2e03" containerName="nova-metadata-log" Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.184349 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="c8634585-03f0-46bd-9d24-8de13c7b2e03" containerName="nova-metadata-log" Jan 20 17:14:30 crc kubenswrapper[4558]: E0120 17:14:30.184361 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="94ddd4ab-9303-4ada-8fcb-30b1f694e062" containerName="nova-cell1-novncproxy-novncproxy" Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.184368 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="94ddd4ab-9303-4ada-8fcb-30b1f694e062" containerName="nova-cell1-novncproxy-novncproxy" Jan 20 17:14:30 crc kubenswrapper[4558]: E0120 17:14:30.184380 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="447f5fd6-0794-4075-9934-0d9e9a6a3f07" containerName="nova-api-log" Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.184387 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="447f5fd6-0794-4075-9934-0d9e9a6a3f07" containerName="nova-api-log" Jan 20 17:14:30 crc kubenswrapper[4558]: E0120 17:14:30.184394 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="447f5fd6-0794-4075-9934-0d9e9a6a3f07" containerName="nova-api-api" Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.184400 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="447f5fd6-0794-4075-9934-0d9e9a6a3f07" containerName="nova-api-api" Jan 20 17:14:30 crc kubenswrapper[4558]: E0120 17:14:30.184418 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c8634585-03f0-46bd-9d24-8de13c7b2e03" containerName="nova-metadata-metadata" Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.184423 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="c8634585-03f0-46bd-9d24-8de13c7b2e03" containerName="nova-metadata-metadata" Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.184601 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="c8634585-03f0-46bd-9d24-8de13c7b2e03" containerName="nova-metadata-metadata" Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.184637 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="c8634585-03f0-46bd-9d24-8de13c7b2e03" containerName="nova-metadata-log" Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.184648 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="447f5fd6-0794-4075-9934-0d9e9a6a3f07" containerName="nova-api-log" Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.184661 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="94ddd4ab-9303-4ada-8fcb-30b1f694e062" containerName="nova-cell1-novncproxy-novncproxy" Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.184671 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="447f5fd6-0794-4075-9934-0d9e9a6a3f07" containerName="nova-api-api" Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.185481 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.187073 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.187466 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell1-novncproxy-config-data" Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.188573 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-novncproxy-cell1-vencrypt" Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.188804 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-novncproxy-cell1-public-svc" Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.223675 4558 scope.go:117] "RemoveContainer" containerID="9a183b22791dde7f1a6bd83dfb318607a3ef80a62babd2e8c1a86b8f7bdefc9f" Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.237816 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ph8gt\" (UniqueName: \"kubernetes.io/projected/5f22129e-7bdd-41c8-8429-1835bdeac056-kube-api-access-ph8gt\") pod \"nova-cell1-novncproxy-0\" (UID: \"5f22129e-7bdd-41c8-8429-1835bdeac056\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.237899 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f22129e-7bdd-41c8-8429-1835bdeac056-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"5f22129e-7bdd-41c8-8429-1835bdeac056\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.238058 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/5f22129e-7bdd-41c8-8429-1835bdeac056-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"5f22129e-7bdd-41c8-8429-1835bdeac056\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.238112 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5f22129e-7bdd-41c8-8429-1835bdeac056-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"5f22129e-7bdd-41c8-8429-1835bdeac056\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.238130 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/5f22129e-7bdd-41c8-8429-1835bdeac056-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"5f22129e-7bdd-41c8-8429-1835bdeac056\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.262759 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.291463 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.296814 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.302213 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.309667 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.328224 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.333045 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-api-config-data" Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.334099 4558 scope.go:117] "RemoveContainer" containerID="41dccb6b414a601fbd30ded62673eacba509935404e0c54ded313061dceddaf1" Jan 20 17:14:30 crc kubenswrapper[4558]: E0120 17:14:30.334609 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"41dccb6b414a601fbd30ded62673eacba509935404e0c54ded313061dceddaf1\": container with ID starting with 41dccb6b414a601fbd30ded62673eacba509935404e0c54ded313061dceddaf1 not found: ID does not exist" containerID="41dccb6b414a601fbd30ded62673eacba509935404e0c54ded313061dceddaf1" Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.334636 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"41dccb6b414a601fbd30ded62673eacba509935404e0c54ded313061dceddaf1"} err="failed to get container status \"41dccb6b414a601fbd30ded62673eacba509935404e0c54ded313061dceddaf1\": rpc error: code = NotFound desc = could not find container \"41dccb6b414a601fbd30ded62673eacba509935404e0c54ded313061dceddaf1\": container with ID starting with 41dccb6b414a601fbd30ded62673eacba509935404e0c54ded313061dceddaf1 not found: ID does not exist" Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.334657 4558 scope.go:117] "RemoveContainer" containerID="9a183b22791dde7f1a6bd83dfb318607a3ef80a62babd2e8c1a86b8f7bdefc9f" Jan 20 17:14:30 crc kubenswrapper[4558]: E0120 17:14:30.334887 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9a183b22791dde7f1a6bd83dfb318607a3ef80a62babd2e8c1a86b8f7bdefc9f\": container with ID starting with 9a183b22791dde7f1a6bd83dfb318607a3ef80a62babd2e8c1a86b8f7bdefc9f not found: ID does not exist" containerID="9a183b22791dde7f1a6bd83dfb318607a3ef80a62babd2e8c1a86b8f7bdefc9f" Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.334909 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9a183b22791dde7f1a6bd83dfb318607a3ef80a62babd2e8c1a86b8f7bdefc9f"} err="failed to get container status \"9a183b22791dde7f1a6bd83dfb318607a3ef80a62babd2e8c1a86b8f7bdefc9f\": rpc error: code = NotFound desc = could not find container \"9a183b22791dde7f1a6bd83dfb318607a3ef80a62babd2e8c1a86b8f7bdefc9f\": container with ID starting with 9a183b22791dde7f1a6bd83dfb318607a3ef80a62babd2e8c1a86b8f7bdefc9f not found: ID does not exist" Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.334932 4558 scope.go:117] "RemoveContainer" containerID="41dccb6b414a601fbd30ded62673eacba509935404e0c54ded313061dceddaf1" Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.334947 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.335135 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"41dccb6b414a601fbd30ded62673eacba509935404e0c54ded313061dceddaf1"} err="failed to get container status \"41dccb6b414a601fbd30ded62673eacba509935404e0c54ded313061dceddaf1\": rpc error: code = NotFound desc = could not find container \"41dccb6b414a601fbd30ded62673eacba509935404e0c54ded313061dceddaf1\": container with ID starting with 41dccb6b414a601fbd30ded62673eacba509935404e0c54ded313061dceddaf1 not found: ID does not exist" Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.335154 4558 scope.go:117] "RemoveContainer" containerID="9a183b22791dde7f1a6bd83dfb318607a3ef80a62babd2e8c1a86b8f7bdefc9f" Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.335469 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9a183b22791dde7f1a6bd83dfb318607a3ef80a62babd2e8c1a86b8f7bdefc9f"} err="failed to get container status \"9a183b22791dde7f1a6bd83dfb318607a3ef80a62babd2e8c1a86b8f7bdefc9f\": rpc error: code = NotFound desc = could not find container \"9a183b22791dde7f1a6bd83dfb318607a3ef80a62babd2e8c1a86b8f7bdefc9f\": container with ID starting with 9a183b22791dde7f1a6bd83dfb318607a3ef80a62babd2e8c1a86b8f7bdefc9f not found: ID does not exist" Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.335488 4558 scope.go:117] "RemoveContainer" containerID="bd750746c3c946e143efe3b5e434ea6372a78afea873074ab019cb75fab30e8f" Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.346403 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/29232964-95b0-47f8-b0d4-e97ca6c2edb0-config-data\") pod \"nova-api-0\" (UID: \"29232964-95b0-47f8-b0d4-e97ca6c2edb0\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.346485 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/5f22129e-7bdd-41c8-8429-1835bdeac056-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"5f22129e-7bdd-41c8-8429-1835bdeac056\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.346518 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qwp4b\" (UniqueName: \"kubernetes.io/projected/29232964-95b0-47f8-b0d4-e97ca6c2edb0-kube-api-access-qwp4b\") pod \"nova-api-0\" (UID: \"29232964-95b0-47f8-b0d4-e97ca6c2edb0\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.346555 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5f22129e-7bdd-41c8-8429-1835bdeac056-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"5f22129e-7bdd-41c8-8429-1835bdeac056\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.346581 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/5f22129e-7bdd-41c8-8429-1835bdeac056-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"5f22129e-7bdd-41c8-8429-1835bdeac056\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.346616 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29232964-95b0-47f8-b0d4-e97ca6c2edb0-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"29232964-95b0-47f8-b0d4-e97ca6c2edb0\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.346747 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ph8gt\" (UniqueName: \"kubernetes.io/projected/5f22129e-7bdd-41c8-8429-1835bdeac056-kube-api-access-ph8gt\") pod \"nova-cell1-novncproxy-0\" (UID: \"5f22129e-7bdd-41c8-8429-1835bdeac056\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.346802 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/29232964-95b0-47f8-b0d4-e97ca6c2edb0-logs\") pod \"nova-api-0\" (UID: \"29232964-95b0-47f8-b0d4-e97ca6c2edb0\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.346871 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f22129e-7bdd-41c8-8429-1835bdeac056-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"5f22129e-7bdd-41c8-8429-1835bdeac056\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.352549 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/5f22129e-7bdd-41c8-8429-1835bdeac056-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"5f22129e-7bdd-41c8-8429-1835bdeac056\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.353745 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5f22129e-7bdd-41c8-8429-1835bdeac056-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"5f22129e-7bdd-41c8-8429-1835bdeac056\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.357133 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f22129e-7bdd-41c8-8429-1835bdeac056-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"5f22129e-7bdd-41c8-8429-1835bdeac056\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.363669 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/5f22129e-7bdd-41c8-8429-1835bdeac056-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"5f22129e-7bdd-41c8-8429-1835bdeac056\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.368140 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ph8gt\" (UniqueName: \"kubernetes.io/projected/5f22129e-7bdd-41c8-8429-1835bdeac056-kube-api-access-ph8gt\") pod \"nova-cell1-novncproxy-0\" (UID: \"5f22129e-7bdd-41c8-8429-1835bdeac056\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.419446 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.441941 4558 scope.go:117] "RemoveContainer" containerID="10345e5f559c4b6bca555657d434de8f554f23c8931d6a96bdbd64571c2d4037" Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.443516 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.447550 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2caeb2e8-f66c-4eb1-8d50-022d18a4752a-config-data\") pod \"2caeb2e8-f66c-4eb1-8d50-022d18a4752a\" (UID: \"2caeb2e8-f66c-4eb1-8d50-022d18a4752a\") " Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.447664 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gtxnx\" (UniqueName: \"kubernetes.io/projected/2caeb2e8-f66c-4eb1-8d50-022d18a4752a-kube-api-access-gtxnx\") pod \"2caeb2e8-f66c-4eb1-8d50-022d18a4752a\" (UID: \"2caeb2e8-f66c-4eb1-8d50-022d18a4752a\") " Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.447779 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2caeb2e8-f66c-4eb1-8d50-022d18a4752a-combined-ca-bundle\") pod \"2caeb2e8-f66c-4eb1-8d50-022d18a4752a\" (UID: \"2caeb2e8-f66c-4eb1-8d50-022d18a4752a\") " Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.448764 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/29232964-95b0-47f8-b0d4-e97ca6c2edb0-config-data\") pod \"nova-api-0\" (UID: \"29232964-95b0-47f8-b0d4-e97ca6c2edb0\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.448868 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qwp4b\" (UniqueName: \"kubernetes.io/projected/29232964-95b0-47f8-b0d4-e97ca6c2edb0-kube-api-access-qwp4b\") pod \"nova-api-0\" (UID: \"29232964-95b0-47f8-b0d4-e97ca6c2edb0\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.449237 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29232964-95b0-47f8-b0d4-e97ca6c2edb0-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"29232964-95b0-47f8-b0d4-e97ca6c2edb0\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.450291 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/29232964-95b0-47f8-b0d4-e97ca6c2edb0-logs\") pod \"nova-api-0\" (UID: \"29232964-95b0-47f8-b0d4-e97ca6c2edb0\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.450857 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/29232964-95b0-47f8-b0d4-e97ca6c2edb0-logs\") pod \"nova-api-0\" (UID: \"29232964-95b0-47f8-b0d4-e97ca6c2edb0\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.453830 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29232964-95b0-47f8-b0d4-e97ca6c2edb0-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"29232964-95b0-47f8-b0d4-e97ca6c2edb0\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.457006 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/29232964-95b0-47f8-b0d4-e97ca6c2edb0-config-data\") pod \"nova-api-0\" (UID: \"29232964-95b0-47f8-b0d4-e97ca6c2edb0\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.459034 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:14:30 crc kubenswrapper[4558]: E0120 17:14:30.459888 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2caeb2e8-f66c-4eb1-8d50-022d18a4752a" containerName="nova-scheduler-scheduler" Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.459965 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="2caeb2e8-f66c-4eb1-8d50-022d18a4752a" containerName="nova-scheduler-scheduler" Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.460284 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="2caeb2e8-f66c-4eb1-8d50-022d18a4752a" containerName="nova-scheduler-scheduler" Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.461551 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.463566 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-metadata-config-data" Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.464299 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-metadata-internal-svc" Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.465308 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2caeb2e8-f66c-4eb1-8d50-022d18a4752a-kube-api-access-gtxnx" (OuterVolumeSpecName: "kube-api-access-gtxnx") pod "2caeb2e8-f66c-4eb1-8d50-022d18a4752a" (UID: "2caeb2e8-f66c-4eb1-8d50-022d18a4752a"). InnerVolumeSpecName "kube-api-access-gtxnx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.468424 4558 scope.go:117] "RemoveContainer" containerID="bd750746c3c946e143efe3b5e434ea6372a78afea873074ab019cb75fab30e8f" Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.470620 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:14:30 crc kubenswrapper[4558]: E0120 17:14:30.474292 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bd750746c3c946e143efe3b5e434ea6372a78afea873074ab019cb75fab30e8f\": container with ID starting with bd750746c3c946e143efe3b5e434ea6372a78afea873074ab019cb75fab30e8f not found: ID does not exist" containerID="bd750746c3c946e143efe3b5e434ea6372a78afea873074ab019cb75fab30e8f" Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.474341 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bd750746c3c946e143efe3b5e434ea6372a78afea873074ab019cb75fab30e8f"} err="failed to get container status \"bd750746c3c946e143efe3b5e434ea6372a78afea873074ab019cb75fab30e8f\": rpc error: code = NotFound desc = could not find container \"bd750746c3c946e143efe3b5e434ea6372a78afea873074ab019cb75fab30e8f\": container with ID starting with bd750746c3c946e143efe3b5e434ea6372a78afea873074ab019cb75fab30e8f not found: ID does not exist" Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.474366 4558 scope.go:117] "RemoveContainer" containerID="10345e5f559c4b6bca555657d434de8f554f23c8931d6a96bdbd64571c2d4037" Jan 20 17:14:30 crc kubenswrapper[4558]: E0120 17:14:30.477064 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"10345e5f559c4b6bca555657d434de8f554f23c8931d6a96bdbd64571c2d4037\": container with ID starting with 10345e5f559c4b6bca555657d434de8f554f23c8931d6a96bdbd64571c2d4037 not found: ID does not exist" containerID="10345e5f559c4b6bca555657d434de8f554f23c8931d6a96bdbd64571c2d4037" Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.477092 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"10345e5f559c4b6bca555657d434de8f554f23c8931d6a96bdbd64571c2d4037"} err="failed to get container status \"10345e5f559c4b6bca555657d434de8f554f23c8931d6a96bdbd64571c2d4037\": rpc error: code = NotFound desc = could not find container \"10345e5f559c4b6bca555657d434de8f554f23c8931d6a96bdbd64571c2d4037\": container with ID starting with 10345e5f559c4b6bca555657d434de8f554f23c8931d6a96bdbd64571c2d4037 not found: ID does not exist" Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.477113 4558 scope.go:117] "RemoveContainer" containerID="bd750746c3c946e143efe3b5e434ea6372a78afea873074ab019cb75fab30e8f" Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.480918 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bd750746c3c946e143efe3b5e434ea6372a78afea873074ab019cb75fab30e8f"} err="failed to get container status \"bd750746c3c946e143efe3b5e434ea6372a78afea873074ab019cb75fab30e8f\": rpc error: code = NotFound desc = could not find container \"bd750746c3c946e143efe3b5e434ea6372a78afea873074ab019cb75fab30e8f\": container with ID starting with bd750746c3c946e143efe3b5e434ea6372a78afea873074ab019cb75fab30e8f not found: ID does not exist" Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.480961 4558 scope.go:117] "RemoveContainer" containerID="10345e5f559c4b6bca555657d434de8f554f23c8931d6a96bdbd64571c2d4037" Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.481243 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qwp4b\" (UniqueName: \"kubernetes.io/projected/29232964-95b0-47f8-b0d4-e97ca6c2edb0-kube-api-access-qwp4b\") pod \"nova-api-0\" (UID: \"29232964-95b0-47f8-b0d4-e97ca6c2edb0\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.481590 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"10345e5f559c4b6bca555657d434de8f554f23c8931d6a96bdbd64571c2d4037"} err="failed to get container status \"10345e5f559c4b6bca555657d434de8f554f23c8931d6a96bdbd64571c2d4037\": rpc error: code = NotFound desc = could not find container \"10345e5f559c4b6bca555657d434de8f554f23c8931d6a96bdbd64571c2d4037\": container with ID starting with 10345e5f559c4b6bca555657d434de8f554f23c8931d6a96bdbd64571c2d4037 not found: ID does not exist" Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.486649 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2caeb2e8-f66c-4eb1-8d50-022d18a4752a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2caeb2e8-f66c-4eb1-8d50-022d18a4752a" (UID: "2caeb2e8-f66c-4eb1-8d50-022d18a4752a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.491328 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2caeb2e8-f66c-4eb1-8d50-022d18a4752a-config-data" (OuterVolumeSpecName: "config-data") pod "2caeb2e8-f66c-4eb1-8d50-022d18a4752a" (UID: "2caeb2e8-f66c-4eb1-8d50-022d18a4752a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.552157 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8067a838-dcf7-4332-8442-e94a50021fe0-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"8067a838-dcf7-4332-8442-e94a50021fe0\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.552438 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8067a838-dcf7-4332-8442-e94a50021fe0-logs\") pod \"nova-metadata-0\" (UID: \"8067a838-dcf7-4332-8442-e94a50021fe0\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.552500 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8067a838-dcf7-4332-8442-e94a50021fe0-config-data\") pod \"nova-metadata-0\" (UID: \"8067a838-dcf7-4332-8442-e94a50021fe0\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.552565 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/8067a838-dcf7-4332-8442-e94a50021fe0-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"8067a838-dcf7-4332-8442-e94a50021fe0\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.552716 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jt7z8\" (UniqueName: \"kubernetes.io/projected/8067a838-dcf7-4332-8442-e94a50021fe0-kube-api-access-jt7z8\") pod \"nova-metadata-0\" (UID: \"8067a838-dcf7-4332-8442-e94a50021fe0\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.552985 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2caeb2e8-f66c-4eb1-8d50-022d18a4752a-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.553007 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gtxnx\" (UniqueName: \"kubernetes.io/projected/2caeb2e8-f66c-4eb1-8d50-022d18a4752a-kube-api-access-gtxnx\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.553019 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2caeb2e8-f66c-4eb1-8d50-022d18a4752a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.577260 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="447f5fd6-0794-4075-9934-0d9e9a6a3f07" path="/var/lib/kubelet/pods/447f5fd6-0794-4075-9934-0d9e9a6a3f07/volumes" Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.578094 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="94ddd4ab-9303-4ada-8fcb-30b1f694e062" path="/var/lib/kubelet/pods/94ddd4ab-9303-4ada-8fcb-30b1f694e062/volumes" Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.578757 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c8634585-03f0-46bd-9d24-8de13c7b2e03" path="/var/lib/kubelet/pods/c8634585-03f0-46bd-9d24-8de13c7b2e03/volumes" Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.635371 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.661531 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.662013 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8067a838-dcf7-4332-8442-e94a50021fe0-logs\") pod \"nova-metadata-0\" (UID: \"8067a838-dcf7-4332-8442-e94a50021fe0\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.662081 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8067a838-dcf7-4332-8442-e94a50021fe0-config-data\") pod \"nova-metadata-0\" (UID: \"8067a838-dcf7-4332-8442-e94a50021fe0\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.662143 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/8067a838-dcf7-4332-8442-e94a50021fe0-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"8067a838-dcf7-4332-8442-e94a50021fe0\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.662226 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jt7z8\" (UniqueName: \"kubernetes.io/projected/8067a838-dcf7-4332-8442-e94a50021fe0-kube-api-access-jt7z8\") pod \"nova-metadata-0\" (UID: \"8067a838-dcf7-4332-8442-e94a50021fe0\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.662453 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8067a838-dcf7-4332-8442-e94a50021fe0-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"8067a838-dcf7-4332-8442-e94a50021fe0\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.663461 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8067a838-dcf7-4332-8442-e94a50021fe0-logs\") pod \"nova-metadata-0\" (UID: \"8067a838-dcf7-4332-8442-e94a50021fe0\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.667265 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8067a838-dcf7-4332-8442-e94a50021fe0-config-data\") pod \"nova-metadata-0\" (UID: \"8067a838-dcf7-4332-8442-e94a50021fe0\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.668290 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8067a838-dcf7-4332-8442-e94a50021fe0-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"8067a838-dcf7-4332-8442-e94a50021fe0\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.669727 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/8067a838-dcf7-4332-8442-e94a50021fe0-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"8067a838-dcf7-4332-8442-e94a50021fe0\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.678318 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jt7z8\" (UniqueName: \"kubernetes.io/projected/8067a838-dcf7-4332-8442-e94a50021fe0-kube-api-access-jt7z8\") pod \"nova-metadata-0\" (UID: \"8067a838-dcf7-4332-8442-e94a50021fe0\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:14:30 crc kubenswrapper[4558]: I0120 17:14:30.813260 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:14:31 crc kubenswrapper[4558]: I0120 17:14:31.072222 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:14:31 crc kubenswrapper[4558]: I0120 17:14:31.105898 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:14:31 crc kubenswrapper[4558]: I0120 17:14:31.147676 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:14:31 crc kubenswrapper[4558]: I0120 17:14:31.147709 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"2caeb2e8-f66c-4eb1-8d50-022d18a4752a","Type":"ContainerDied","Data":"5324bbb7b9d764646a86fd9ae2fd1eeb6a7954a411a86cfff178806306997761"} Jan 20 17:14:31 crc kubenswrapper[4558]: I0120 17:14:31.147769 4558 scope.go:117] "RemoveContainer" containerID="84fcb93f0f2a487eee0f2409ccc5beef495eb55ab92858af037a7c0fc064c55a" Jan 20 17:14:31 crc kubenswrapper[4558]: I0120 17:14:31.157056 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"5f22129e-7bdd-41c8-8429-1835bdeac056","Type":"ContainerStarted","Data":"f69cf700f58076e58068468c9c81f03810bbb31dcc484875f8a33673ab262780"} Jan 20 17:14:31 crc kubenswrapper[4558]: I0120 17:14:31.157269 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="5c988319-ece3-4016-b491-1b33b78ed16e" containerName="ceilometer-central-agent" containerID="cri-o://cafc05a16acc6bf3c534e283f0aef442c9ded6ca2cdb82522472318623e4b9ea" gracePeriod=30 Jan 20 17:14:31 crc kubenswrapper[4558]: I0120 17:14:31.157368 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="5c988319-ece3-4016-b491-1b33b78ed16e" containerName="proxy-httpd" containerID="cri-o://e70948baf660f38543ddb5acaa11cca1652348a68137c5b3ac38f88134bf299c" gracePeriod=30 Jan 20 17:14:31 crc kubenswrapper[4558]: I0120 17:14:31.157410 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="5c988319-ece3-4016-b491-1b33b78ed16e" containerName="sg-core" containerID="cri-o://e91939ae15b50ee2cb9f127b9219736a27901de5d5be68878bb47fc2b64c9dc5" gracePeriod=30 Jan 20 17:14:31 crc kubenswrapper[4558]: I0120 17:14:31.157459 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="5c988319-ece3-4016-b491-1b33b78ed16e" containerName="ceilometer-notification-agent" containerID="cri-o://4fb572825219a0fc20cb0370fc86ca598777392dfec9d5d57c869d049806523b" gracePeriod=30 Jan 20 17:14:31 crc kubenswrapper[4558]: I0120 17:14:31.202097 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:14:31 crc kubenswrapper[4558]: I0120 17:14:31.212358 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:14:31 crc kubenswrapper[4558]: I0120 17:14:31.224525 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:14:31 crc kubenswrapper[4558]: I0120 17:14:31.228143 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:14:31 crc kubenswrapper[4558]: I0120 17:14:31.239007 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-scheduler-config-data" Jan 20 17:14:31 crc kubenswrapper[4558]: I0120 17:14:31.239152 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:14:31 crc kubenswrapper[4558]: I0120 17:14:31.243267 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:14:31 crc kubenswrapper[4558]: I0120 17:14:31.289903 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bmphj\" (UniqueName: \"kubernetes.io/projected/ae67b718-71bb-4136-a699-8782a9096f4c-kube-api-access-bmphj\") pod \"nova-scheduler-0\" (UID: \"ae67b718-71bb-4136-a699-8782a9096f4c\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:14:31 crc kubenswrapper[4558]: I0120 17:14:31.290241 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae67b718-71bb-4136-a699-8782a9096f4c-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"ae67b718-71bb-4136-a699-8782a9096f4c\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:14:31 crc kubenswrapper[4558]: I0120 17:14:31.290293 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ae67b718-71bb-4136-a699-8782a9096f4c-config-data\") pod \"nova-scheduler-0\" (UID: \"ae67b718-71bb-4136-a699-8782a9096f4c\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:14:31 crc kubenswrapper[4558]: I0120 17:14:31.333310 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:14:31 crc kubenswrapper[4558]: W0120 17:14:31.339235 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8067a838_dcf7_4332_8442_e94a50021fe0.slice/crio-d884abd42914467738fa0841650e5c9127f6f6c92f86c1d49de0d7443fda203d WatchSource:0}: Error finding container d884abd42914467738fa0841650e5c9127f6f6c92f86c1d49de0d7443fda203d: Status 404 returned error can't find the container with id d884abd42914467738fa0841650e5c9127f6f6c92f86c1d49de0d7443fda203d Jan 20 17:14:31 crc kubenswrapper[4558]: I0120 17:14:31.395286 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bmphj\" (UniqueName: \"kubernetes.io/projected/ae67b718-71bb-4136-a699-8782a9096f4c-kube-api-access-bmphj\") pod \"nova-scheduler-0\" (UID: \"ae67b718-71bb-4136-a699-8782a9096f4c\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:14:31 crc kubenswrapper[4558]: I0120 17:14:31.395748 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae67b718-71bb-4136-a699-8782a9096f4c-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"ae67b718-71bb-4136-a699-8782a9096f4c\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:14:31 crc kubenswrapper[4558]: I0120 17:14:31.395792 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ae67b718-71bb-4136-a699-8782a9096f4c-config-data\") pod \"nova-scheduler-0\" (UID: \"ae67b718-71bb-4136-a699-8782a9096f4c\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:14:31 crc kubenswrapper[4558]: I0120 17:14:31.400059 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae67b718-71bb-4136-a699-8782a9096f4c-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"ae67b718-71bb-4136-a699-8782a9096f4c\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:14:31 crc kubenswrapper[4558]: I0120 17:14:31.402318 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ae67b718-71bb-4136-a699-8782a9096f4c-config-data\") pod \"nova-scheduler-0\" (UID: \"ae67b718-71bb-4136-a699-8782a9096f4c\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:14:31 crc kubenswrapper[4558]: I0120 17:14:31.410943 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bmphj\" (UniqueName: \"kubernetes.io/projected/ae67b718-71bb-4136-a699-8782a9096f4c-kube-api-access-bmphj\") pod \"nova-scheduler-0\" (UID: \"ae67b718-71bb-4136-a699-8782a9096f4c\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:14:31 crc kubenswrapper[4558]: I0120 17:14:31.533480 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:14:32 crc kubenswrapper[4558]: I0120 17:14:32.011499 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:14:32 crc kubenswrapper[4558]: I0120 17:14:32.142254 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:14:32 crc kubenswrapper[4558]: I0120 17:14:32.158534 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:14:32 crc kubenswrapper[4558]: I0120 17:14:32.181905 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"5f22129e-7bdd-41c8-8429-1835bdeac056","Type":"ContainerStarted","Data":"1d230d2a0fea8641ed5c9fd7572373d4544ed3a33eec9635c3211d644c4386b4"} Jan 20 17:14:32 crc kubenswrapper[4558]: I0120 17:14:32.239845 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:14:32 crc kubenswrapper[4558]: I0120 17:14:32.243210 4558 generic.go:334] "Generic (PLEG): container finished" podID="5c988319-ece3-4016-b491-1b33b78ed16e" containerID="e70948baf660f38543ddb5acaa11cca1652348a68137c5b3ac38f88134bf299c" exitCode=0 Jan 20 17:14:32 crc kubenswrapper[4558]: I0120 17:14:32.243240 4558 generic.go:334] "Generic (PLEG): container finished" podID="5c988319-ece3-4016-b491-1b33b78ed16e" containerID="e91939ae15b50ee2cb9f127b9219736a27901de5d5be68878bb47fc2b64c9dc5" exitCode=2 Jan 20 17:14:32 crc kubenswrapper[4558]: I0120 17:14:32.243252 4558 generic.go:334] "Generic (PLEG): container finished" podID="5c988319-ece3-4016-b491-1b33b78ed16e" containerID="4fb572825219a0fc20cb0370fc86ca598777392dfec9d5d57c869d049806523b" exitCode=0 Jan 20 17:14:32 crc kubenswrapper[4558]: I0120 17:14:32.243332 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"5c988319-ece3-4016-b491-1b33b78ed16e","Type":"ContainerDied","Data":"e70948baf660f38543ddb5acaa11cca1652348a68137c5b3ac38f88134bf299c"} Jan 20 17:14:32 crc kubenswrapper[4558]: I0120 17:14:32.243363 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"5c988319-ece3-4016-b491-1b33b78ed16e","Type":"ContainerDied","Data":"e91939ae15b50ee2cb9f127b9219736a27901de5d5be68878bb47fc2b64c9dc5"} Jan 20 17:14:32 crc kubenswrapper[4558]: I0120 17:14:32.243381 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"5c988319-ece3-4016-b491-1b33b78ed16e","Type":"ContainerDied","Data":"4fb572825219a0fc20cb0370fc86ca598777392dfec9d5d57c869d049806523b"} Jan 20 17:14:32 crc kubenswrapper[4558]: I0120 17:14:32.262363 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:14:32 crc kubenswrapper[4558]: I0120 17:14:32.262778 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"8067a838-dcf7-4332-8442-e94a50021fe0","Type":"ContainerStarted","Data":"ad8502ca35c45cecd011886fa79c9b3cb4c4d7ec7d7ad4682172d45925197beb"} Jan 20 17:14:32 crc kubenswrapper[4558]: I0120 17:14:32.262823 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"8067a838-dcf7-4332-8442-e94a50021fe0","Type":"ContainerStarted","Data":"ee4ca77631e9a03b15c1d64ddaacce12151ac26e83d7086a6d09f17539af0d00"} Jan 20 17:14:32 crc kubenswrapper[4558]: I0120 17:14:32.262837 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"8067a838-dcf7-4332-8442-e94a50021fe0","Type":"ContainerStarted","Data":"d884abd42914467738fa0841650e5c9127f6f6c92f86c1d49de0d7443fda203d"} Jan 20 17:14:32 crc kubenswrapper[4558]: I0120 17:14:32.285833 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"ae67b718-71bb-4136-a699-8782a9096f4c","Type":"ContainerStarted","Data":"fe8251362332809196e047fce882cc337bd1d9c848056c7f5b2d9243fe4207bd"} Jan 20 17:14:32 crc kubenswrapper[4558]: I0120 17:14:32.285873 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-scheduler-0" podUID="ae67b718-71bb-4136-a699-8782a9096f4c" containerName="nova-scheduler-scheduler" containerID="cri-o://6026ba63914f7680778f2f25afac4b99568c6f14063d1d42d1eeb3493064d57e" gracePeriod=30 Jan 20 17:14:32 crc kubenswrapper[4558]: I0120 17:14:32.293445 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" podStartSLOduration=2.29342055 podStartE2EDuration="2.29342055s" podCreationTimestamp="2026-01-20 17:14:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:14:32.193767567 +0000 UTC m=+1965.954105535" watchObservedRunningTime="2026-01-20 17:14:32.29342055 +0000 UTC m=+1966.053758507" Jan 20 17:14:32 crc kubenswrapper[4558]: I0120 17:14:32.306990 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-metadata-0" podStartSLOduration=2.30698073 podStartE2EDuration="2.30698073s" podCreationTimestamp="2026-01-20 17:14:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:14:32.282926306 +0000 UTC m=+1966.043264273" watchObservedRunningTime="2026-01-20 17:14:32.30698073 +0000 UTC m=+1966.067318697" Jan 20 17:14:32 crc kubenswrapper[4558]: I0120 17:14:32.308912 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-scheduler-0" podStartSLOduration=1.308905749 podStartE2EDuration="1.308905749s" podCreationTimestamp="2026-01-20 17:14:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:14:32.304452636 +0000 UTC m=+1966.064790592" watchObservedRunningTime="2026-01-20 17:14:32.308905749 +0000 UTC m=+1966.069243716" Jan 20 17:14:32 crc kubenswrapper[4558]: I0120 17:14:32.313408 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"29232964-95b0-47f8-b0d4-e97ca6c2edb0","Type":"ContainerStarted","Data":"a744603c2dfecdc8fe308f661cc3863db4a7888b1a5c3d8465108f0f1204f886"} Jan 20 17:14:32 crc kubenswrapper[4558]: I0120 17:14:32.313455 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"29232964-95b0-47f8-b0d4-e97ca6c2edb0","Type":"ContainerStarted","Data":"7fb709765b7c19fe25f68c8cfe05c09ac65502597ecea2e22418bdd646e97d9d"} Jan 20 17:14:32 crc kubenswrapper[4558]: I0120 17:14:32.313470 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"29232964-95b0-47f8-b0d4-e97ca6c2edb0","Type":"ContainerStarted","Data":"b1efe54118d732596a143e301b6cfd239b9e1f8f17fc5ccbbc5b55dca6c66d11"} Jan 20 17:14:32 crc kubenswrapper[4558]: I0120 17:14:32.332087 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-api-0" podStartSLOduration=2.3320795260000002 podStartE2EDuration="2.332079526s" podCreationTimestamp="2026-01-20 17:14:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:14:32.328436346 +0000 UTC m=+1966.088774313" watchObservedRunningTime="2026-01-20 17:14:32.332079526 +0000 UTC m=+1966.092417493" Jan 20 17:14:32 crc kubenswrapper[4558]: I0120 17:14:32.576060 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2caeb2e8-f66c-4eb1-8d50-022d18a4752a" path="/var/lib/kubelet/pods/2caeb2e8-f66c-4eb1-8d50-022d18a4752a/volumes" Jan 20 17:14:33 crc kubenswrapper[4558]: I0120 17:14:33.331455 4558 generic.go:334] "Generic (PLEG): container finished" podID="ecc5d42c-dbf5-44e2-b487-25aa99419dc1" containerID="b814fd8702534f35668ee31f88682bb9c76df8f8dfbcd5d7d7c5cbda3ab01f91" exitCode=0 Jan 20 17:14:33 crc kubenswrapper[4558]: I0120 17:14:33.331535 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-nsn26" event={"ID":"ecc5d42c-dbf5-44e2-b487-25aa99419dc1","Type":"ContainerDied","Data":"b814fd8702534f35668ee31f88682bb9c76df8f8dfbcd5d7d7c5cbda3ab01f91"} Jan 20 17:14:33 crc kubenswrapper[4558]: I0120 17:14:33.335404 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"ae67b718-71bb-4136-a699-8782a9096f4c","Type":"ContainerStarted","Data":"6026ba63914f7680778f2f25afac4b99568c6f14063d1d42d1eeb3493064d57e"} Jan 20 17:14:33 crc kubenswrapper[4558]: I0120 17:14:33.335699 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-api-0" podUID="29232964-95b0-47f8-b0d4-e97ca6c2edb0" containerName="nova-api-log" containerID="cri-o://7fb709765b7c19fe25f68c8cfe05c09ac65502597ecea2e22418bdd646e97d9d" gracePeriod=30 Jan 20 17:14:33 crc kubenswrapper[4558]: I0120 17:14:33.335924 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-metadata-0" podUID="8067a838-dcf7-4332-8442-e94a50021fe0" containerName="nova-metadata-log" containerID="cri-o://ee4ca77631e9a03b15c1d64ddaacce12151ac26e83d7086a6d09f17539af0d00" gracePeriod=30 Jan 20 17:14:33 crc kubenswrapper[4558]: I0120 17:14:33.335936 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-metadata-0" podUID="8067a838-dcf7-4332-8442-e94a50021fe0" containerName="nova-metadata-metadata" containerID="cri-o://ad8502ca35c45cecd011886fa79c9b3cb4c4d7ec7d7ad4682172d45925197beb" gracePeriod=30 Jan 20 17:14:33 crc kubenswrapper[4558]: I0120 17:14:33.335762 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-api-0" podUID="29232964-95b0-47f8-b0d4-e97ca6c2edb0" containerName="nova-api-api" containerID="cri-o://a744603c2dfecdc8fe308f661cc3863db4a7888b1a5c3d8465108f0f1204f886" gracePeriod=30 Jan 20 17:14:33 crc kubenswrapper[4558]: I0120 17:14:33.952615 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:14:34 crc kubenswrapper[4558]: I0120 17:14:33.997779 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:14:34 crc kubenswrapper[4558]: I0120 17:14:34.089811 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/29232964-95b0-47f8-b0d4-e97ca6c2edb0-logs\") pod \"29232964-95b0-47f8-b0d4-e97ca6c2edb0\" (UID: \"29232964-95b0-47f8-b0d4-e97ca6c2edb0\") " Jan 20 17:14:34 crc kubenswrapper[4558]: I0120 17:14:34.090097 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29232964-95b0-47f8-b0d4-e97ca6c2edb0-combined-ca-bundle\") pod \"29232964-95b0-47f8-b0d4-e97ca6c2edb0\" (UID: \"29232964-95b0-47f8-b0d4-e97ca6c2edb0\") " Jan 20 17:14:34 crc kubenswrapper[4558]: I0120 17:14:34.090200 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8067a838-dcf7-4332-8442-e94a50021fe0-logs\") pod \"8067a838-dcf7-4332-8442-e94a50021fe0\" (UID: \"8067a838-dcf7-4332-8442-e94a50021fe0\") " Jan 20 17:14:34 crc kubenswrapper[4558]: I0120 17:14:34.090271 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jt7z8\" (UniqueName: \"kubernetes.io/projected/8067a838-dcf7-4332-8442-e94a50021fe0-kube-api-access-jt7z8\") pod \"8067a838-dcf7-4332-8442-e94a50021fe0\" (UID: \"8067a838-dcf7-4332-8442-e94a50021fe0\") " Jan 20 17:14:34 crc kubenswrapper[4558]: I0120 17:14:34.090327 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8067a838-dcf7-4332-8442-e94a50021fe0-combined-ca-bundle\") pod \"8067a838-dcf7-4332-8442-e94a50021fe0\" (UID: \"8067a838-dcf7-4332-8442-e94a50021fe0\") " Jan 20 17:14:34 crc kubenswrapper[4558]: I0120 17:14:34.090352 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8067a838-dcf7-4332-8442-e94a50021fe0-config-data\") pod \"8067a838-dcf7-4332-8442-e94a50021fe0\" (UID: \"8067a838-dcf7-4332-8442-e94a50021fe0\") " Jan 20 17:14:34 crc kubenswrapper[4558]: I0120 17:14:34.090549 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/29232964-95b0-47f8-b0d4-e97ca6c2edb0-config-data\") pod \"29232964-95b0-47f8-b0d4-e97ca6c2edb0\" (UID: \"29232964-95b0-47f8-b0d4-e97ca6c2edb0\") " Jan 20 17:14:34 crc kubenswrapper[4558]: I0120 17:14:34.090636 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/8067a838-dcf7-4332-8442-e94a50021fe0-nova-metadata-tls-certs\") pod \"8067a838-dcf7-4332-8442-e94a50021fe0\" (UID: \"8067a838-dcf7-4332-8442-e94a50021fe0\") " Jan 20 17:14:34 crc kubenswrapper[4558]: I0120 17:14:34.090733 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qwp4b\" (UniqueName: \"kubernetes.io/projected/29232964-95b0-47f8-b0d4-e97ca6c2edb0-kube-api-access-qwp4b\") pod \"29232964-95b0-47f8-b0d4-e97ca6c2edb0\" (UID: \"29232964-95b0-47f8-b0d4-e97ca6c2edb0\") " Jan 20 17:14:34 crc kubenswrapper[4558]: I0120 17:14:34.091177 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/29232964-95b0-47f8-b0d4-e97ca6c2edb0-logs" (OuterVolumeSpecName: "logs") pod "29232964-95b0-47f8-b0d4-e97ca6c2edb0" (UID: "29232964-95b0-47f8-b0d4-e97ca6c2edb0"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:14:34 crc kubenswrapper[4558]: I0120 17:14:34.091788 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/29232964-95b0-47f8-b0d4-e97ca6c2edb0-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:34 crc kubenswrapper[4558]: I0120 17:14:34.092069 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8067a838-dcf7-4332-8442-e94a50021fe0-logs" (OuterVolumeSpecName: "logs") pod "8067a838-dcf7-4332-8442-e94a50021fe0" (UID: "8067a838-dcf7-4332-8442-e94a50021fe0"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:14:34 crc kubenswrapper[4558]: I0120 17:14:34.098462 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/29232964-95b0-47f8-b0d4-e97ca6c2edb0-kube-api-access-qwp4b" (OuterVolumeSpecName: "kube-api-access-qwp4b") pod "29232964-95b0-47f8-b0d4-e97ca6c2edb0" (UID: "29232964-95b0-47f8-b0d4-e97ca6c2edb0"). InnerVolumeSpecName "kube-api-access-qwp4b". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:14:34 crc kubenswrapper[4558]: I0120 17:14:34.099320 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8067a838-dcf7-4332-8442-e94a50021fe0-kube-api-access-jt7z8" (OuterVolumeSpecName: "kube-api-access-jt7z8") pod "8067a838-dcf7-4332-8442-e94a50021fe0" (UID: "8067a838-dcf7-4332-8442-e94a50021fe0"). InnerVolumeSpecName "kube-api-access-jt7z8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:14:34 crc kubenswrapper[4558]: I0120 17:14:34.120791 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8067a838-dcf7-4332-8442-e94a50021fe0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8067a838-dcf7-4332-8442-e94a50021fe0" (UID: "8067a838-dcf7-4332-8442-e94a50021fe0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:14:34 crc kubenswrapper[4558]: I0120 17:14:34.122574 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/29232964-95b0-47f8-b0d4-e97ca6c2edb0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "29232964-95b0-47f8-b0d4-e97ca6c2edb0" (UID: "29232964-95b0-47f8-b0d4-e97ca6c2edb0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:14:34 crc kubenswrapper[4558]: I0120 17:14:34.126452 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/29232964-95b0-47f8-b0d4-e97ca6c2edb0-config-data" (OuterVolumeSpecName: "config-data") pod "29232964-95b0-47f8-b0d4-e97ca6c2edb0" (UID: "29232964-95b0-47f8-b0d4-e97ca6c2edb0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:14:34 crc kubenswrapper[4558]: I0120 17:14:34.128660 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8067a838-dcf7-4332-8442-e94a50021fe0-config-data" (OuterVolumeSpecName: "config-data") pod "8067a838-dcf7-4332-8442-e94a50021fe0" (UID: "8067a838-dcf7-4332-8442-e94a50021fe0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:14:34 crc kubenswrapper[4558]: I0120 17:14:34.153959 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8067a838-dcf7-4332-8442-e94a50021fe0-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "8067a838-dcf7-4332-8442-e94a50021fe0" (UID: "8067a838-dcf7-4332-8442-e94a50021fe0"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:14:34 crc kubenswrapper[4558]: I0120 17:14:34.195212 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jt7z8\" (UniqueName: \"kubernetes.io/projected/8067a838-dcf7-4332-8442-e94a50021fe0-kube-api-access-jt7z8\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:34 crc kubenswrapper[4558]: I0120 17:14:34.195246 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8067a838-dcf7-4332-8442-e94a50021fe0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:34 crc kubenswrapper[4558]: I0120 17:14:34.195258 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8067a838-dcf7-4332-8442-e94a50021fe0-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:34 crc kubenswrapper[4558]: I0120 17:14:34.195274 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/29232964-95b0-47f8-b0d4-e97ca6c2edb0-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:34 crc kubenswrapper[4558]: I0120 17:14:34.195287 4558 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/8067a838-dcf7-4332-8442-e94a50021fe0-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:34 crc kubenswrapper[4558]: I0120 17:14:34.195300 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qwp4b\" (UniqueName: \"kubernetes.io/projected/29232964-95b0-47f8-b0d4-e97ca6c2edb0-kube-api-access-qwp4b\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:34 crc kubenswrapper[4558]: I0120 17:14:34.195311 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29232964-95b0-47f8-b0d4-e97ca6c2edb0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:34 crc kubenswrapper[4558]: I0120 17:14:34.195323 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8067a838-dcf7-4332-8442-e94a50021fe0-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:34 crc kubenswrapper[4558]: I0120 17:14:34.346837 4558 generic.go:334] "Generic (PLEG): container finished" podID="8067a838-dcf7-4332-8442-e94a50021fe0" containerID="ad8502ca35c45cecd011886fa79c9b3cb4c4d7ec7d7ad4682172d45925197beb" exitCode=0 Jan 20 17:14:34 crc kubenswrapper[4558]: I0120 17:14:34.346884 4558 generic.go:334] "Generic (PLEG): container finished" podID="8067a838-dcf7-4332-8442-e94a50021fe0" containerID="ee4ca77631e9a03b15c1d64ddaacce12151ac26e83d7086a6d09f17539af0d00" exitCode=143 Jan 20 17:14:34 crc kubenswrapper[4558]: I0120 17:14:34.346932 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:14:34 crc kubenswrapper[4558]: I0120 17:14:34.346970 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"8067a838-dcf7-4332-8442-e94a50021fe0","Type":"ContainerDied","Data":"ad8502ca35c45cecd011886fa79c9b3cb4c4d7ec7d7ad4682172d45925197beb"} Jan 20 17:14:34 crc kubenswrapper[4558]: I0120 17:14:34.347137 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"8067a838-dcf7-4332-8442-e94a50021fe0","Type":"ContainerDied","Data":"ee4ca77631e9a03b15c1d64ddaacce12151ac26e83d7086a6d09f17539af0d00"} Jan 20 17:14:34 crc kubenswrapper[4558]: I0120 17:14:34.347179 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"8067a838-dcf7-4332-8442-e94a50021fe0","Type":"ContainerDied","Data":"d884abd42914467738fa0841650e5c9127f6f6c92f86c1d49de0d7443fda203d"} Jan 20 17:14:34 crc kubenswrapper[4558]: I0120 17:14:34.347192 4558 scope.go:117] "RemoveContainer" containerID="ad8502ca35c45cecd011886fa79c9b3cb4c4d7ec7d7ad4682172d45925197beb" Jan 20 17:14:34 crc kubenswrapper[4558]: I0120 17:14:34.350260 4558 generic.go:334] "Generic (PLEG): container finished" podID="29232964-95b0-47f8-b0d4-e97ca6c2edb0" containerID="a744603c2dfecdc8fe308f661cc3863db4a7888b1a5c3d8465108f0f1204f886" exitCode=0 Jan 20 17:14:34 crc kubenswrapper[4558]: I0120 17:14:34.350282 4558 generic.go:334] "Generic (PLEG): container finished" podID="29232964-95b0-47f8-b0d4-e97ca6c2edb0" containerID="7fb709765b7c19fe25f68c8cfe05c09ac65502597ecea2e22418bdd646e97d9d" exitCode=143 Jan 20 17:14:34 crc kubenswrapper[4558]: I0120 17:14:34.350361 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"29232964-95b0-47f8-b0d4-e97ca6c2edb0","Type":"ContainerDied","Data":"a744603c2dfecdc8fe308f661cc3863db4a7888b1a5c3d8465108f0f1204f886"} Jan 20 17:14:34 crc kubenswrapper[4558]: I0120 17:14:34.350405 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"29232964-95b0-47f8-b0d4-e97ca6c2edb0","Type":"ContainerDied","Data":"7fb709765b7c19fe25f68c8cfe05c09ac65502597ecea2e22418bdd646e97d9d"} Jan 20 17:14:34 crc kubenswrapper[4558]: I0120 17:14:34.350420 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"29232964-95b0-47f8-b0d4-e97ca6c2edb0","Type":"ContainerDied","Data":"b1efe54118d732596a143e301b6cfd239b9e1f8f17fc5ccbbc5b55dca6c66d11"} Jan 20 17:14:34 crc kubenswrapper[4558]: I0120 17:14:34.350407 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:14:34 crc kubenswrapper[4558]: I0120 17:14:34.356030 4558 generic.go:334] "Generic (PLEG): container finished" podID="d0da5348-6caa-4cc9-b170-9de34ebda4a1" containerID="0a0eaadfee5356b39912e205ecdbed7a048b984f1c3c340a311f0c6d29670423" exitCode=0 Jan 20 17:14:34 crc kubenswrapper[4558]: I0120 17:14:34.356189 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-cell-mapping-qhsxm" event={"ID":"d0da5348-6caa-4cc9-b170-9de34ebda4a1","Type":"ContainerDied","Data":"0a0eaadfee5356b39912e205ecdbed7a048b984f1c3c340a311f0c6d29670423"} Jan 20 17:14:34 crc kubenswrapper[4558]: I0120 17:14:34.356336 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" podUID="5f22129e-7bdd-41c8-8429-1835bdeac056" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://1d230d2a0fea8641ed5c9fd7572373d4544ed3a33eec9635c3211d644c4386b4" gracePeriod=30 Jan 20 17:14:34 crc kubenswrapper[4558]: I0120 17:14:34.378336 4558 scope.go:117] "RemoveContainer" containerID="ee4ca77631e9a03b15c1d64ddaacce12151ac26e83d7086a6d09f17539af0d00" Jan 20 17:14:34 crc kubenswrapper[4558]: I0120 17:14:34.416381 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:14:34 crc kubenswrapper[4558]: I0120 17:14:34.421024 4558 scope.go:117] "RemoveContainer" containerID="ad8502ca35c45cecd011886fa79c9b3cb4c4d7ec7d7ad4682172d45925197beb" Jan 20 17:14:34 crc kubenswrapper[4558]: E0120 17:14:34.422149 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ad8502ca35c45cecd011886fa79c9b3cb4c4d7ec7d7ad4682172d45925197beb\": container with ID starting with ad8502ca35c45cecd011886fa79c9b3cb4c4d7ec7d7ad4682172d45925197beb not found: ID does not exist" containerID="ad8502ca35c45cecd011886fa79c9b3cb4c4d7ec7d7ad4682172d45925197beb" Jan 20 17:14:34 crc kubenswrapper[4558]: I0120 17:14:34.422209 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ad8502ca35c45cecd011886fa79c9b3cb4c4d7ec7d7ad4682172d45925197beb"} err="failed to get container status \"ad8502ca35c45cecd011886fa79c9b3cb4c4d7ec7d7ad4682172d45925197beb\": rpc error: code = NotFound desc = could not find container \"ad8502ca35c45cecd011886fa79c9b3cb4c4d7ec7d7ad4682172d45925197beb\": container with ID starting with ad8502ca35c45cecd011886fa79c9b3cb4c4d7ec7d7ad4682172d45925197beb not found: ID does not exist" Jan 20 17:14:34 crc kubenswrapper[4558]: I0120 17:14:34.422232 4558 scope.go:117] "RemoveContainer" containerID="ee4ca77631e9a03b15c1d64ddaacce12151ac26e83d7086a6d09f17539af0d00" Jan 20 17:14:34 crc kubenswrapper[4558]: E0120 17:14:34.422712 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ee4ca77631e9a03b15c1d64ddaacce12151ac26e83d7086a6d09f17539af0d00\": container with ID starting with ee4ca77631e9a03b15c1d64ddaacce12151ac26e83d7086a6d09f17539af0d00 not found: ID does not exist" containerID="ee4ca77631e9a03b15c1d64ddaacce12151ac26e83d7086a6d09f17539af0d00" Jan 20 17:14:34 crc kubenswrapper[4558]: I0120 17:14:34.422728 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ee4ca77631e9a03b15c1d64ddaacce12151ac26e83d7086a6d09f17539af0d00"} err="failed to get container status \"ee4ca77631e9a03b15c1d64ddaacce12151ac26e83d7086a6d09f17539af0d00\": rpc error: code = NotFound desc = could not find container \"ee4ca77631e9a03b15c1d64ddaacce12151ac26e83d7086a6d09f17539af0d00\": container with ID starting with ee4ca77631e9a03b15c1d64ddaacce12151ac26e83d7086a6d09f17539af0d00 not found: ID does not exist" Jan 20 17:14:34 crc kubenswrapper[4558]: I0120 17:14:34.422741 4558 scope.go:117] "RemoveContainer" containerID="ad8502ca35c45cecd011886fa79c9b3cb4c4d7ec7d7ad4682172d45925197beb" Jan 20 17:14:34 crc kubenswrapper[4558]: I0120 17:14:34.422949 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ad8502ca35c45cecd011886fa79c9b3cb4c4d7ec7d7ad4682172d45925197beb"} err="failed to get container status \"ad8502ca35c45cecd011886fa79c9b3cb4c4d7ec7d7ad4682172d45925197beb\": rpc error: code = NotFound desc = could not find container \"ad8502ca35c45cecd011886fa79c9b3cb4c4d7ec7d7ad4682172d45925197beb\": container with ID starting with ad8502ca35c45cecd011886fa79c9b3cb4c4d7ec7d7ad4682172d45925197beb not found: ID does not exist" Jan 20 17:14:34 crc kubenswrapper[4558]: I0120 17:14:34.422975 4558 scope.go:117] "RemoveContainer" containerID="ee4ca77631e9a03b15c1d64ddaacce12151ac26e83d7086a6d09f17539af0d00" Jan 20 17:14:34 crc kubenswrapper[4558]: I0120 17:14:34.423142 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ee4ca77631e9a03b15c1d64ddaacce12151ac26e83d7086a6d09f17539af0d00"} err="failed to get container status \"ee4ca77631e9a03b15c1d64ddaacce12151ac26e83d7086a6d09f17539af0d00\": rpc error: code = NotFound desc = could not find container \"ee4ca77631e9a03b15c1d64ddaacce12151ac26e83d7086a6d09f17539af0d00\": container with ID starting with ee4ca77631e9a03b15c1d64ddaacce12151ac26e83d7086a6d09f17539af0d00 not found: ID does not exist" Jan 20 17:14:34 crc kubenswrapper[4558]: I0120 17:14:34.423188 4558 scope.go:117] "RemoveContainer" containerID="a744603c2dfecdc8fe308f661cc3863db4a7888b1a5c3d8465108f0f1204f886" Jan 20 17:14:34 crc kubenswrapper[4558]: I0120 17:14:34.432299 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:14:34 crc kubenswrapper[4558]: I0120 17:14:34.458915 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:14:34 crc kubenswrapper[4558]: I0120 17:14:34.467661 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:14:34 crc kubenswrapper[4558]: E0120 17:14:34.468555 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29232964-95b0-47f8-b0d4-e97ca6c2edb0" containerName="nova-api-log" Jan 20 17:14:34 crc kubenswrapper[4558]: I0120 17:14:34.468632 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="29232964-95b0-47f8-b0d4-e97ca6c2edb0" containerName="nova-api-log" Jan 20 17:14:34 crc kubenswrapper[4558]: E0120 17:14:34.468720 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29232964-95b0-47f8-b0d4-e97ca6c2edb0" containerName="nova-api-api" Jan 20 17:14:34 crc kubenswrapper[4558]: I0120 17:14:34.468788 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="29232964-95b0-47f8-b0d4-e97ca6c2edb0" containerName="nova-api-api" Jan 20 17:14:34 crc kubenswrapper[4558]: E0120 17:14:34.468849 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8067a838-dcf7-4332-8442-e94a50021fe0" containerName="nova-metadata-metadata" Jan 20 17:14:34 crc kubenswrapper[4558]: I0120 17:14:34.468899 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="8067a838-dcf7-4332-8442-e94a50021fe0" containerName="nova-metadata-metadata" Jan 20 17:14:34 crc kubenswrapper[4558]: E0120 17:14:34.468946 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8067a838-dcf7-4332-8442-e94a50021fe0" containerName="nova-metadata-log" Jan 20 17:14:34 crc kubenswrapper[4558]: I0120 17:14:34.468986 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="8067a838-dcf7-4332-8442-e94a50021fe0" containerName="nova-metadata-log" Jan 20 17:14:34 crc kubenswrapper[4558]: I0120 17:14:34.469226 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="8067a838-dcf7-4332-8442-e94a50021fe0" containerName="nova-metadata-metadata" Jan 20 17:14:34 crc kubenswrapper[4558]: I0120 17:14:34.469816 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="29232964-95b0-47f8-b0d4-e97ca6c2edb0" containerName="nova-api-log" Jan 20 17:14:34 crc kubenswrapper[4558]: I0120 17:14:34.469886 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="8067a838-dcf7-4332-8442-e94a50021fe0" containerName="nova-metadata-log" Jan 20 17:14:34 crc kubenswrapper[4558]: I0120 17:14:34.469939 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="29232964-95b0-47f8-b0d4-e97ca6c2edb0" containerName="nova-api-api" Jan 20 17:14:34 crc kubenswrapper[4558]: I0120 17:14:34.470272 4558 scope.go:117] "RemoveContainer" containerID="7fb709765b7c19fe25f68c8cfe05c09ac65502597ecea2e22418bdd646e97d9d" Jan 20 17:14:34 crc kubenswrapper[4558]: I0120 17:14:34.471066 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:14:34 crc kubenswrapper[4558]: I0120 17:14:34.475550 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:14:34 crc kubenswrapper[4558]: I0120 17:14:34.476018 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-api-config-data" Jan 20 17:14:34 crc kubenswrapper[4558]: I0120 17:14:34.481701 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:14:34 crc kubenswrapper[4558]: I0120 17:14:34.498235 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:14:34 crc kubenswrapper[4558]: I0120 17:14:34.500154 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:14:34 crc kubenswrapper[4558]: I0120 17:14:34.503583 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-metadata-internal-svc" Jan 20 17:14:34 crc kubenswrapper[4558]: I0120 17:14:34.504418 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-metadata-config-data" Jan 20 17:14:34 crc kubenswrapper[4558]: I0120 17:14:34.507538 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:14:34 crc kubenswrapper[4558]: I0120 17:14:34.524798 4558 scope.go:117] "RemoveContainer" containerID="a744603c2dfecdc8fe308f661cc3863db4a7888b1a5c3d8465108f0f1204f886" Jan 20 17:14:34 crc kubenswrapper[4558]: E0120 17:14:34.529906 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a744603c2dfecdc8fe308f661cc3863db4a7888b1a5c3d8465108f0f1204f886\": container with ID starting with a744603c2dfecdc8fe308f661cc3863db4a7888b1a5c3d8465108f0f1204f886 not found: ID does not exist" containerID="a744603c2dfecdc8fe308f661cc3863db4a7888b1a5c3d8465108f0f1204f886" Jan 20 17:14:34 crc kubenswrapper[4558]: I0120 17:14:34.529958 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a744603c2dfecdc8fe308f661cc3863db4a7888b1a5c3d8465108f0f1204f886"} err="failed to get container status \"a744603c2dfecdc8fe308f661cc3863db4a7888b1a5c3d8465108f0f1204f886\": rpc error: code = NotFound desc = could not find container \"a744603c2dfecdc8fe308f661cc3863db4a7888b1a5c3d8465108f0f1204f886\": container with ID starting with a744603c2dfecdc8fe308f661cc3863db4a7888b1a5c3d8465108f0f1204f886 not found: ID does not exist" Jan 20 17:14:34 crc kubenswrapper[4558]: I0120 17:14:34.529983 4558 scope.go:117] "RemoveContainer" containerID="7fb709765b7c19fe25f68c8cfe05c09ac65502597ecea2e22418bdd646e97d9d" Jan 20 17:14:34 crc kubenswrapper[4558]: E0120 17:14:34.530693 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7fb709765b7c19fe25f68c8cfe05c09ac65502597ecea2e22418bdd646e97d9d\": container with ID starting with 7fb709765b7c19fe25f68c8cfe05c09ac65502597ecea2e22418bdd646e97d9d not found: ID does not exist" containerID="7fb709765b7c19fe25f68c8cfe05c09ac65502597ecea2e22418bdd646e97d9d" Jan 20 17:14:34 crc kubenswrapper[4558]: I0120 17:14:34.530714 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7fb709765b7c19fe25f68c8cfe05c09ac65502597ecea2e22418bdd646e97d9d"} err="failed to get container status \"7fb709765b7c19fe25f68c8cfe05c09ac65502597ecea2e22418bdd646e97d9d\": rpc error: code = NotFound desc = could not find container \"7fb709765b7c19fe25f68c8cfe05c09ac65502597ecea2e22418bdd646e97d9d\": container with ID starting with 7fb709765b7c19fe25f68c8cfe05c09ac65502597ecea2e22418bdd646e97d9d not found: ID does not exist" Jan 20 17:14:34 crc kubenswrapper[4558]: I0120 17:14:34.530727 4558 scope.go:117] "RemoveContainer" containerID="a744603c2dfecdc8fe308f661cc3863db4a7888b1a5c3d8465108f0f1204f886" Jan 20 17:14:34 crc kubenswrapper[4558]: I0120 17:14:34.538264 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a744603c2dfecdc8fe308f661cc3863db4a7888b1a5c3d8465108f0f1204f886"} err="failed to get container status \"a744603c2dfecdc8fe308f661cc3863db4a7888b1a5c3d8465108f0f1204f886\": rpc error: code = NotFound desc = could not find container \"a744603c2dfecdc8fe308f661cc3863db4a7888b1a5c3d8465108f0f1204f886\": container with ID starting with a744603c2dfecdc8fe308f661cc3863db4a7888b1a5c3d8465108f0f1204f886 not found: ID does not exist" Jan 20 17:14:34 crc kubenswrapper[4558]: I0120 17:14:34.538310 4558 scope.go:117] "RemoveContainer" containerID="7fb709765b7c19fe25f68c8cfe05c09ac65502597ecea2e22418bdd646e97d9d" Jan 20 17:14:34 crc kubenswrapper[4558]: I0120 17:14:34.539084 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7fb709765b7c19fe25f68c8cfe05c09ac65502597ecea2e22418bdd646e97d9d"} err="failed to get container status \"7fb709765b7c19fe25f68c8cfe05c09ac65502597ecea2e22418bdd646e97d9d\": rpc error: code = NotFound desc = could not find container \"7fb709765b7c19fe25f68c8cfe05c09ac65502597ecea2e22418bdd646e97d9d\": container with ID starting with 7fb709765b7c19fe25f68c8cfe05c09ac65502597ecea2e22418bdd646e97d9d not found: ID does not exist" Jan 20 17:14:34 crc kubenswrapper[4558]: I0120 17:14:34.583709 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="29232964-95b0-47f8-b0d4-e97ca6c2edb0" path="/var/lib/kubelet/pods/29232964-95b0-47f8-b0d4-e97ca6c2edb0/volumes" Jan 20 17:14:34 crc kubenswrapper[4558]: I0120 17:14:34.584348 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8067a838-dcf7-4332-8442-e94a50021fe0" path="/var/lib/kubelet/pods/8067a838-dcf7-4332-8442-e94a50021fe0/volumes" Jan 20 17:14:34 crc kubenswrapper[4558]: I0120 17:14:34.605723 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vgkbb\" (UniqueName: \"kubernetes.io/projected/ded19e77-da51-4081-b20f-03f986cc412b-kube-api-access-vgkbb\") pod \"nova-api-0\" (UID: \"ded19e77-da51-4081-b20f-03f986cc412b\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:14:34 crc kubenswrapper[4558]: I0120 17:14:34.605796 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ded19e77-da51-4081-b20f-03f986cc412b-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"ded19e77-da51-4081-b20f-03f986cc412b\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:14:34 crc kubenswrapper[4558]: I0120 17:14:34.605832 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ded19e77-da51-4081-b20f-03f986cc412b-logs\") pod \"nova-api-0\" (UID: \"ded19e77-da51-4081-b20f-03f986cc412b\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:14:34 crc kubenswrapper[4558]: I0120 17:14:34.605856 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vw8kl\" (UniqueName: \"kubernetes.io/projected/40f1774e-3159-4036-9016-2dfdb11c3a73-kube-api-access-vw8kl\") pod \"nova-metadata-0\" (UID: \"40f1774e-3159-4036-9016-2dfdb11c3a73\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:14:34 crc kubenswrapper[4558]: I0120 17:14:34.605889 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/40f1774e-3159-4036-9016-2dfdb11c3a73-logs\") pod \"nova-metadata-0\" (UID: \"40f1774e-3159-4036-9016-2dfdb11c3a73\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:14:34 crc kubenswrapper[4558]: I0120 17:14:34.605921 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40f1774e-3159-4036-9016-2dfdb11c3a73-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"40f1774e-3159-4036-9016-2dfdb11c3a73\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:14:34 crc kubenswrapper[4558]: I0120 17:14:34.606025 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ded19e77-da51-4081-b20f-03f986cc412b-config-data\") pod \"nova-api-0\" (UID: \"ded19e77-da51-4081-b20f-03f986cc412b\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:14:34 crc kubenswrapper[4558]: I0120 17:14:34.606051 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/40f1774e-3159-4036-9016-2dfdb11c3a73-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"40f1774e-3159-4036-9016-2dfdb11c3a73\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:14:34 crc kubenswrapper[4558]: I0120 17:14:34.606083 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/40f1774e-3159-4036-9016-2dfdb11c3a73-config-data\") pod \"nova-metadata-0\" (UID: \"40f1774e-3159-4036-9016-2dfdb11c3a73\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:14:34 crc kubenswrapper[4558]: I0120 17:14:34.708695 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vgkbb\" (UniqueName: \"kubernetes.io/projected/ded19e77-da51-4081-b20f-03f986cc412b-kube-api-access-vgkbb\") pod \"nova-api-0\" (UID: \"ded19e77-da51-4081-b20f-03f986cc412b\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:14:34 crc kubenswrapper[4558]: I0120 17:14:34.709013 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ded19e77-da51-4081-b20f-03f986cc412b-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"ded19e77-da51-4081-b20f-03f986cc412b\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:14:34 crc kubenswrapper[4558]: I0120 17:14:34.709066 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ded19e77-da51-4081-b20f-03f986cc412b-logs\") pod \"nova-api-0\" (UID: \"ded19e77-da51-4081-b20f-03f986cc412b\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:14:34 crc kubenswrapper[4558]: I0120 17:14:34.709086 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vw8kl\" (UniqueName: \"kubernetes.io/projected/40f1774e-3159-4036-9016-2dfdb11c3a73-kube-api-access-vw8kl\") pod \"nova-metadata-0\" (UID: \"40f1774e-3159-4036-9016-2dfdb11c3a73\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:14:34 crc kubenswrapper[4558]: I0120 17:14:34.709133 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/40f1774e-3159-4036-9016-2dfdb11c3a73-logs\") pod \"nova-metadata-0\" (UID: \"40f1774e-3159-4036-9016-2dfdb11c3a73\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:14:34 crc kubenswrapper[4558]: I0120 17:14:34.709173 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40f1774e-3159-4036-9016-2dfdb11c3a73-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"40f1774e-3159-4036-9016-2dfdb11c3a73\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:14:34 crc kubenswrapper[4558]: I0120 17:14:34.709253 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ded19e77-da51-4081-b20f-03f986cc412b-config-data\") pod \"nova-api-0\" (UID: \"ded19e77-da51-4081-b20f-03f986cc412b\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:14:34 crc kubenswrapper[4558]: I0120 17:14:34.709276 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/40f1774e-3159-4036-9016-2dfdb11c3a73-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"40f1774e-3159-4036-9016-2dfdb11c3a73\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:14:34 crc kubenswrapper[4558]: I0120 17:14:34.709299 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/40f1774e-3159-4036-9016-2dfdb11c3a73-config-data\") pod \"nova-metadata-0\" (UID: \"40f1774e-3159-4036-9016-2dfdb11c3a73\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:14:34 crc kubenswrapper[4558]: I0120 17:14:34.709706 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/40f1774e-3159-4036-9016-2dfdb11c3a73-logs\") pod \"nova-metadata-0\" (UID: \"40f1774e-3159-4036-9016-2dfdb11c3a73\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:14:34 crc kubenswrapper[4558]: I0120 17:14:34.709837 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ded19e77-da51-4081-b20f-03f986cc412b-logs\") pod \"nova-api-0\" (UID: \"ded19e77-da51-4081-b20f-03f986cc412b\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:14:34 crc kubenswrapper[4558]: I0120 17:14:34.714930 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40f1774e-3159-4036-9016-2dfdb11c3a73-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"40f1774e-3159-4036-9016-2dfdb11c3a73\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:14:34 crc kubenswrapper[4558]: I0120 17:14:34.714948 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ded19e77-da51-4081-b20f-03f986cc412b-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"ded19e77-da51-4081-b20f-03f986cc412b\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:14:34 crc kubenswrapper[4558]: I0120 17:14:34.716189 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/40f1774e-3159-4036-9016-2dfdb11c3a73-config-data\") pod \"nova-metadata-0\" (UID: \"40f1774e-3159-4036-9016-2dfdb11c3a73\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:14:34 crc kubenswrapper[4558]: I0120 17:14:34.716770 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ded19e77-da51-4081-b20f-03f986cc412b-config-data\") pod \"nova-api-0\" (UID: \"ded19e77-da51-4081-b20f-03f986cc412b\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:14:34 crc kubenswrapper[4558]: I0120 17:14:34.718469 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/40f1774e-3159-4036-9016-2dfdb11c3a73-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"40f1774e-3159-4036-9016-2dfdb11c3a73\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:14:34 crc kubenswrapper[4558]: I0120 17:14:34.724890 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vgkbb\" (UniqueName: \"kubernetes.io/projected/ded19e77-da51-4081-b20f-03f986cc412b-kube-api-access-vgkbb\") pod \"nova-api-0\" (UID: \"ded19e77-da51-4081-b20f-03f986cc412b\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:14:34 crc kubenswrapper[4558]: I0120 17:14:34.726689 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vw8kl\" (UniqueName: \"kubernetes.io/projected/40f1774e-3159-4036-9016-2dfdb11c3a73-kube-api-access-vw8kl\") pod \"nova-metadata-0\" (UID: \"40f1774e-3159-4036-9016-2dfdb11c3a73\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:14:34 crc kubenswrapper[4558]: I0120 17:14:34.815422 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:14:34 crc kubenswrapper[4558]: I0120 17:14:34.824229 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:14:34 crc kubenswrapper[4558]: I0120 17:14:34.886858 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-nsn26" Jan 20 17:14:35 crc kubenswrapper[4558]: I0120 17:14:35.017686 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ecc5d42c-dbf5-44e2-b487-25aa99419dc1-combined-ca-bundle\") pod \"ecc5d42c-dbf5-44e2-b487-25aa99419dc1\" (UID: \"ecc5d42c-dbf5-44e2-b487-25aa99419dc1\") " Jan 20 17:14:35 crc kubenswrapper[4558]: I0120 17:14:35.017992 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ecc5d42c-dbf5-44e2-b487-25aa99419dc1-config-data\") pod \"ecc5d42c-dbf5-44e2-b487-25aa99419dc1\" (UID: \"ecc5d42c-dbf5-44e2-b487-25aa99419dc1\") " Jan 20 17:14:35 crc kubenswrapper[4558]: I0120 17:14:35.018220 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ecc5d42c-dbf5-44e2-b487-25aa99419dc1-scripts\") pod \"ecc5d42c-dbf5-44e2-b487-25aa99419dc1\" (UID: \"ecc5d42c-dbf5-44e2-b487-25aa99419dc1\") " Jan 20 17:14:35 crc kubenswrapper[4558]: I0120 17:14:35.018245 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2s4tm\" (UniqueName: \"kubernetes.io/projected/ecc5d42c-dbf5-44e2-b487-25aa99419dc1-kube-api-access-2s4tm\") pod \"ecc5d42c-dbf5-44e2-b487-25aa99419dc1\" (UID: \"ecc5d42c-dbf5-44e2-b487-25aa99419dc1\") " Jan 20 17:14:35 crc kubenswrapper[4558]: I0120 17:14:35.023596 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ecc5d42c-dbf5-44e2-b487-25aa99419dc1-scripts" (OuterVolumeSpecName: "scripts") pod "ecc5d42c-dbf5-44e2-b487-25aa99419dc1" (UID: "ecc5d42c-dbf5-44e2-b487-25aa99419dc1"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:14:35 crc kubenswrapper[4558]: I0120 17:14:35.024191 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ecc5d42c-dbf5-44e2-b487-25aa99419dc1-kube-api-access-2s4tm" (OuterVolumeSpecName: "kube-api-access-2s4tm") pod "ecc5d42c-dbf5-44e2-b487-25aa99419dc1" (UID: "ecc5d42c-dbf5-44e2-b487-25aa99419dc1"). InnerVolumeSpecName "kube-api-access-2s4tm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:14:35 crc kubenswrapper[4558]: I0120 17:14:35.044578 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ecc5d42c-dbf5-44e2-b487-25aa99419dc1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ecc5d42c-dbf5-44e2-b487-25aa99419dc1" (UID: "ecc5d42c-dbf5-44e2-b487-25aa99419dc1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:14:35 crc kubenswrapper[4558]: I0120 17:14:35.071935 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ecc5d42c-dbf5-44e2-b487-25aa99419dc1-config-data" (OuterVolumeSpecName: "config-data") pod "ecc5d42c-dbf5-44e2-b487-25aa99419dc1" (UID: "ecc5d42c-dbf5-44e2-b487-25aa99419dc1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:14:35 crc kubenswrapper[4558]: I0120 17:14:35.082721 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:14:35 crc kubenswrapper[4558]: I0120 17:14:35.125442 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ecc5d42c-dbf5-44e2-b487-25aa99419dc1-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:35 crc kubenswrapper[4558]: I0120 17:14:35.125478 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2s4tm\" (UniqueName: \"kubernetes.io/projected/ecc5d42c-dbf5-44e2-b487-25aa99419dc1-kube-api-access-2s4tm\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:35 crc kubenswrapper[4558]: I0120 17:14:35.125492 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ecc5d42c-dbf5-44e2-b487-25aa99419dc1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:35 crc kubenswrapper[4558]: I0120 17:14:35.125503 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ecc5d42c-dbf5-44e2-b487-25aa99419dc1-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:35 crc kubenswrapper[4558]: I0120 17:14:35.227099 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ph8gt\" (UniqueName: \"kubernetes.io/projected/5f22129e-7bdd-41c8-8429-1835bdeac056-kube-api-access-ph8gt\") pod \"5f22129e-7bdd-41c8-8429-1835bdeac056\" (UID: \"5f22129e-7bdd-41c8-8429-1835bdeac056\") " Jan 20 17:14:35 crc kubenswrapper[4558]: I0120 17:14:35.227268 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f22129e-7bdd-41c8-8429-1835bdeac056-combined-ca-bundle\") pod \"5f22129e-7bdd-41c8-8429-1835bdeac056\" (UID: \"5f22129e-7bdd-41c8-8429-1835bdeac056\") " Jan 20 17:14:35 crc kubenswrapper[4558]: I0120 17:14:35.227302 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5f22129e-7bdd-41c8-8429-1835bdeac056-config-data\") pod \"5f22129e-7bdd-41c8-8429-1835bdeac056\" (UID: \"5f22129e-7bdd-41c8-8429-1835bdeac056\") " Jan 20 17:14:35 crc kubenswrapper[4558]: I0120 17:14:35.227339 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/5f22129e-7bdd-41c8-8429-1835bdeac056-nova-novncproxy-tls-certs\") pod \"5f22129e-7bdd-41c8-8429-1835bdeac056\" (UID: \"5f22129e-7bdd-41c8-8429-1835bdeac056\") " Jan 20 17:14:35 crc kubenswrapper[4558]: I0120 17:14:35.227370 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/5f22129e-7bdd-41c8-8429-1835bdeac056-vencrypt-tls-certs\") pod \"5f22129e-7bdd-41c8-8429-1835bdeac056\" (UID: \"5f22129e-7bdd-41c8-8429-1835bdeac056\") " Jan 20 17:14:35 crc kubenswrapper[4558]: I0120 17:14:35.230462 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5f22129e-7bdd-41c8-8429-1835bdeac056-kube-api-access-ph8gt" (OuterVolumeSpecName: "kube-api-access-ph8gt") pod "5f22129e-7bdd-41c8-8429-1835bdeac056" (UID: "5f22129e-7bdd-41c8-8429-1835bdeac056"). InnerVolumeSpecName "kube-api-access-ph8gt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:14:35 crc kubenswrapper[4558]: I0120 17:14:35.259674 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5f22129e-7bdd-41c8-8429-1835bdeac056-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5f22129e-7bdd-41c8-8429-1835bdeac056" (UID: "5f22129e-7bdd-41c8-8429-1835bdeac056"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:14:35 crc kubenswrapper[4558]: I0120 17:14:35.266676 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5f22129e-7bdd-41c8-8429-1835bdeac056-config-data" (OuterVolumeSpecName: "config-data") pod "5f22129e-7bdd-41c8-8429-1835bdeac056" (UID: "5f22129e-7bdd-41c8-8429-1835bdeac056"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:14:35 crc kubenswrapper[4558]: I0120 17:14:35.267695 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5f22129e-7bdd-41c8-8429-1835bdeac056-vencrypt-tls-certs" (OuterVolumeSpecName: "vencrypt-tls-certs") pod "5f22129e-7bdd-41c8-8429-1835bdeac056" (UID: "5f22129e-7bdd-41c8-8429-1835bdeac056"). InnerVolumeSpecName "vencrypt-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:14:35 crc kubenswrapper[4558]: I0120 17:14:35.268481 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5f22129e-7bdd-41c8-8429-1835bdeac056-nova-novncproxy-tls-certs" (OuterVolumeSpecName: "nova-novncproxy-tls-certs") pod "5f22129e-7bdd-41c8-8429-1835bdeac056" (UID: "5f22129e-7bdd-41c8-8429-1835bdeac056"). InnerVolumeSpecName "nova-novncproxy-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:14:35 crc kubenswrapper[4558]: I0120 17:14:35.330312 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ph8gt\" (UniqueName: \"kubernetes.io/projected/5f22129e-7bdd-41c8-8429-1835bdeac056-kube-api-access-ph8gt\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:35 crc kubenswrapper[4558]: I0120 17:14:35.330348 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f22129e-7bdd-41c8-8429-1835bdeac056-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:35 crc kubenswrapper[4558]: I0120 17:14:35.330360 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5f22129e-7bdd-41c8-8429-1835bdeac056-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:35 crc kubenswrapper[4558]: I0120 17:14:35.330373 4558 reconciler_common.go:293] "Volume detached for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/5f22129e-7bdd-41c8-8429-1835bdeac056-nova-novncproxy-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:35 crc kubenswrapper[4558]: I0120 17:14:35.330384 4558 reconciler_common.go:293] "Volume detached for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/5f22129e-7bdd-41c8-8429-1835bdeac056-vencrypt-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:35 crc kubenswrapper[4558]: I0120 17:14:35.335803 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:14:35 crc kubenswrapper[4558]: W0120 17:14:35.341551 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod40f1774e_3159_4036_9016_2dfdb11c3a73.slice/crio-aad0729df9d33527ca45cffae693f83a8aebb573cc85726dfc7c9c8b49405cff WatchSource:0}: Error finding container aad0729df9d33527ca45cffae693f83a8aebb573cc85726dfc7c9c8b49405cff: Status 404 returned error can't find the container with id aad0729df9d33527ca45cffae693f83a8aebb573cc85726dfc7c9c8b49405cff Jan 20 17:14:35 crc kubenswrapper[4558]: I0120 17:14:35.378049 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-nsn26" event={"ID":"ecc5d42c-dbf5-44e2-b487-25aa99419dc1","Type":"ContainerDied","Data":"3b34d9abc197b7150d227b467de430bd41ca143df5114b03e77dacd7c4a78df6"} Jan 20 17:14:35 crc kubenswrapper[4558]: I0120 17:14:35.378109 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-nsn26" Jan 20 17:14:35 crc kubenswrapper[4558]: I0120 17:14:35.378116 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3b34d9abc197b7150d227b467de430bd41ca143df5114b03e77dacd7c4a78df6" Jan 20 17:14:35 crc kubenswrapper[4558]: I0120 17:14:35.386851 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"40f1774e-3159-4036-9016-2dfdb11c3a73","Type":"ContainerStarted","Data":"aad0729df9d33527ca45cffae693f83a8aebb573cc85726dfc7c9c8b49405cff"} Jan 20 17:14:35 crc kubenswrapper[4558]: I0120 17:14:35.399817 4558 generic.go:334] "Generic (PLEG): container finished" podID="5f22129e-7bdd-41c8-8429-1835bdeac056" containerID="1d230d2a0fea8641ed5c9fd7572373d4544ed3a33eec9635c3211d644c4386b4" exitCode=0 Jan 20 17:14:35 crc kubenswrapper[4558]: I0120 17:14:35.399931 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"5f22129e-7bdd-41c8-8429-1835bdeac056","Type":"ContainerDied","Data":"1d230d2a0fea8641ed5c9fd7572373d4544ed3a33eec9635c3211d644c4386b4"} Jan 20 17:14:35 crc kubenswrapper[4558]: I0120 17:14:35.399966 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"5f22129e-7bdd-41c8-8429-1835bdeac056","Type":"ContainerDied","Data":"f69cf700f58076e58068468c9c81f03810bbb31dcc484875f8a33673ab262780"} Jan 20 17:14:35 crc kubenswrapper[4558]: I0120 17:14:35.399999 4558 scope.go:117] "RemoveContainer" containerID="1d230d2a0fea8641ed5c9fd7572373d4544ed3a33eec9635c3211d644c4386b4" Jan 20 17:14:35 crc kubenswrapper[4558]: I0120 17:14:35.400111 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:14:35 crc kubenswrapper[4558]: I0120 17:14:35.426349 4558 generic.go:334] "Generic (PLEG): container finished" podID="f47cb9e2-2306-494d-94ff-b60393a9413e" containerID="2100b7c749ddb30bbb158352b085f32659152dcc98b26a89f4ae8fd0e68b79ce" exitCode=0 Jan 20 17:14:35 crc kubenswrapper[4558]: I0120 17:14:35.426586 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/watcher-decision-engine-0" event={"ID":"f47cb9e2-2306-494d-94ff-b60393a9413e","Type":"ContainerDied","Data":"2100b7c749ddb30bbb158352b085f32659152dcc98b26a89f4ae8fd0e68b79ce"} Jan 20 17:14:35 crc kubenswrapper[4558]: I0120 17:14:35.428727 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:14:35 crc kubenswrapper[4558]: I0120 17:14:35.449259 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-0"] Jan 20 17:14:35 crc kubenswrapper[4558]: E0120 17:14:35.449820 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f22129e-7bdd-41c8-8429-1835bdeac056" containerName="nova-cell1-novncproxy-novncproxy" Jan 20 17:14:35 crc kubenswrapper[4558]: I0120 17:14:35.449836 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f22129e-7bdd-41c8-8429-1835bdeac056" containerName="nova-cell1-novncproxy-novncproxy" Jan 20 17:14:35 crc kubenswrapper[4558]: E0120 17:14:35.449864 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ecc5d42c-dbf5-44e2-b487-25aa99419dc1" containerName="nova-cell1-conductor-db-sync" Jan 20 17:14:35 crc kubenswrapper[4558]: I0120 17:14:35.449871 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ecc5d42c-dbf5-44e2-b487-25aa99419dc1" containerName="nova-cell1-conductor-db-sync" Jan 20 17:14:35 crc kubenswrapper[4558]: I0120 17:14:35.450080 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="ecc5d42c-dbf5-44e2-b487-25aa99419dc1" containerName="nova-cell1-conductor-db-sync" Jan 20 17:14:35 crc kubenswrapper[4558]: I0120 17:14:35.450101 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="5f22129e-7bdd-41c8-8429-1835bdeac056" containerName="nova-cell1-novncproxy-novncproxy" Jan 20 17:14:35 crc kubenswrapper[4558]: I0120 17:14:35.450848 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:14:35 crc kubenswrapper[4558]: I0120 17:14:35.461901 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell1-conductor-config-data" Jan 20 17:14:35 crc kubenswrapper[4558]: I0120 17:14:35.462218 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-0"] Jan 20 17:14:35 crc kubenswrapper[4558]: I0120 17:14:35.479110 4558 scope.go:117] "RemoveContainer" containerID="1d230d2a0fea8641ed5c9fd7572373d4544ed3a33eec9635c3211d644c4386b4" Jan 20 17:14:35 crc kubenswrapper[4558]: I0120 17:14:35.479116 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:14:35 crc kubenswrapper[4558]: I0120 17:14:35.485825 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:14:35 crc kubenswrapper[4558]: E0120 17:14:35.492271 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1d230d2a0fea8641ed5c9fd7572373d4544ed3a33eec9635c3211d644c4386b4\": container with ID starting with 1d230d2a0fea8641ed5c9fd7572373d4544ed3a33eec9635c3211d644c4386b4 not found: ID does not exist" containerID="1d230d2a0fea8641ed5c9fd7572373d4544ed3a33eec9635c3211d644c4386b4" Jan 20 17:14:35 crc kubenswrapper[4558]: I0120 17:14:35.492301 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1d230d2a0fea8641ed5c9fd7572373d4544ed3a33eec9635c3211d644c4386b4"} err="failed to get container status \"1d230d2a0fea8641ed5c9fd7572373d4544ed3a33eec9635c3211d644c4386b4\": rpc error: code = NotFound desc = could not find container \"1d230d2a0fea8641ed5c9fd7572373d4544ed3a33eec9635c3211d644c4386b4\": container with ID starting with 1d230d2a0fea8641ed5c9fd7572373d4544ed3a33eec9635c3211d644c4386b4 not found: ID does not exist" Jan 20 17:14:35 crc kubenswrapper[4558]: I0120 17:14:35.494482 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:14:35 crc kubenswrapper[4558]: I0120 17:14:35.495889 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:14:35 crc kubenswrapper[4558]: I0120 17:14:35.498829 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-novncproxy-cell1-vencrypt" Jan 20 17:14:35 crc kubenswrapper[4558]: I0120 17:14:35.499015 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-novncproxy-cell1-public-svc" Jan 20 17:14:35 crc kubenswrapper[4558]: I0120 17:14:35.499288 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell1-novncproxy-config-data" Jan 20 17:14:35 crc kubenswrapper[4558]: I0120 17:14:35.535773 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/217a2ebc-8331-4a6f-9113-2c813563a2b8-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"217a2ebc-8331-4a6f-9113-2c813563a2b8\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:14:35 crc kubenswrapper[4558]: I0120 17:14:35.536114 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rbfpb\" (UniqueName: \"kubernetes.io/projected/217a2ebc-8331-4a6f-9113-2c813563a2b8-kube-api-access-rbfpb\") pod \"nova-cell1-conductor-0\" (UID: \"217a2ebc-8331-4a6f-9113-2c813563a2b8\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:14:35 crc kubenswrapper[4558]: I0120 17:14:35.536265 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/217a2ebc-8331-4a6f-9113-2c813563a2b8-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"217a2ebc-8331-4a6f-9113-2c813563a2b8\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:14:35 crc kubenswrapper[4558]: I0120 17:14:35.543011 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:14:35 crc kubenswrapper[4558]: I0120 17:14:35.638239 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d89mw\" (UniqueName: \"kubernetes.io/projected/ad371747-3ff0-49b0-b2a6-33d7bb7cd16b-kube-api-access-d89mw\") pod \"nova-cell1-novncproxy-0\" (UID: \"ad371747-3ff0-49b0-b2a6-33d7bb7cd16b\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:14:35 crc kubenswrapper[4558]: I0120 17:14:35.638398 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rbfpb\" (UniqueName: \"kubernetes.io/projected/217a2ebc-8331-4a6f-9113-2c813563a2b8-kube-api-access-rbfpb\") pod \"nova-cell1-conductor-0\" (UID: \"217a2ebc-8331-4a6f-9113-2c813563a2b8\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:14:35 crc kubenswrapper[4558]: I0120 17:14:35.638438 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/ad371747-3ff0-49b0-b2a6-33d7bb7cd16b-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"ad371747-3ff0-49b0-b2a6-33d7bb7cd16b\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:14:35 crc kubenswrapper[4558]: I0120 17:14:35.638535 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/217a2ebc-8331-4a6f-9113-2c813563a2b8-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"217a2ebc-8331-4a6f-9113-2c813563a2b8\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:14:35 crc kubenswrapper[4558]: I0120 17:14:35.638653 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/217a2ebc-8331-4a6f-9113-2c813563a2b8-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"217a2ebc-8331-4a6f-9113-2c813563a2b8\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:14:35 crc kubenswrapper[4558]: I0120 17:14:35.638712 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/ad371747-3ff0-49b0-b2a6-33d7bb7cd16b-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"ad371747-3ff0-49b0-b2a6-33d7bb7cd16b\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:14:35 crc kubenswrapper[4558]: I0120 17:14:35.638735 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ad371747-3ff0-49b0-b2a6-33d7bb7cd16b-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"ad371747-3ff0-49b0-b2a6-33d7bb7cd16b\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:14:35 crc kubenswrapper[4558]: I0120 17:14:35.638868 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ad371747-3ff0-49b0-b2a6-33d7bb7cd16b-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"ad371747-3ff0-49b0-b2a6-33d7bb7cd16b\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:14:35 crc kubenswrapper[4558]: I0120 17:14:35.654657 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/217a2ebc-8331-4a6f-9113-2c813563a2b8-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"217a2ebc-8331-4a6f-9113-2c813563a2b8\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:14:35 crc kubenswrapper[4558]: I0120 17:14:35.654812 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/217a2ebc-8331-4a6f-9113-2c813563a2b8-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"217a2ebc-8331-4a6f-9113-2c813563a2b8\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:14:35 crc kubenswrapper[4558]: I0120 17:14:35.655773 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rbfpb\" (UniqueName: \"kubernetes.io/projected/217a2ebc-8331-4a6f-9113-2c813563a2b8-kube-api-access-rbfpb\") pod \"nova-cell1-conductor-0\" (UID: \"217a2ebc-8331-4a6f-9113-2c813563a2b8\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:14:35 crc kubenswrapper[4558]: I0120 17:14:35.741637 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/ad371747-3ff0-49b0-b2a6-33d7bb7cd16b-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"ad371747-3ff0-49b0-b2a6-33d7bb7cd16b\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:14:35 crc kubenswrapper[4558]: I0120 17:14:35.741695 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ad371747-3ff0-49b0-b2a6-33d7bb7cd16b-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"ad371747-3ff0-49b0-b2a6-33d7bb7cd16b\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:14:35 crc kubenswrapper[4558]: I0120 17:14:35.741759 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ad371747-3ff0-49b0-b2a6-33d7bb7cd16b-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"ad371747-3ff0-49b0-b2a6-33d7bb7cd16b\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:14:35 crc kubenswrapper[4558]: I0120 17:14:35.741805 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d89mw\" (UniqueName: \"kubernetes.io/projected/ad371747-3ff0-49b0-b2a6-33d7bb7cd16b-kube-api-access-d89mw\") pod \"nova-cell1-novncproxy-0\" (UID: \"ad371747-3ff0-49b0-b2a6-33d7bb7cd16b\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:14:35 crc kubenswrapper[4558]: I0120 17:14:35.741851 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/ad371747-3ff0-49b0-b2a6-33d7bb7cd16b-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"ad371747-3ff0-49b0-b2a6-33d7bb7cd16b\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:14:35 crc kubenswrapper[4558]: I0120 17:14:35.744784 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/ad371747-3ff0-49b0-b2a6-33d7bb7cd16b-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"ad371747-3ff0-49b0-b2a6-33d7bb7cd16b\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:14:35 crc kubenswrapper[4558]: I0120 17:14:35.746689 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/ad371747-3ff0-49b0-b2a6-33d7bb7cd16b-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"ad371747-3ff0-49b0-b2a6-33d7bb7cd16b\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:14:35 crc kubenswrapper[4558]: I0120 17:14:35.747821 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ad371747-3ff0-49b0-b2a6-33d7bb7cd16b-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"ad371747-3ff0-49b0-b2a6-33d7bb7cd16b\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:14:35 crc kubenswrapper[4558]: I0120 17:14:35.749129 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ad371747-3ff0-49b0-b2a6-33d7bb7cd16b-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"ad371747-3ff0-49b0-b2a6-33d7bb7cd16b\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:14:35 crc kubenswrapper[4558]: I0120 17:14:35.757607 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d89mw\" (UniqueName: \"kubernetes.io/projected/ad371747-3ff0-49b0-b2a6-33d7bb7cd16b-kube-api-access-d89mw\") pod \"nova-cell1-novncproxy-0\" (UID: \"ad371747-3ff0-49b0-b2a6-33d7bb7cd16b\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:14:35 crc kubenswrapper[4558]: I0120 17:14:35.799894 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:14:35 crc kubenswrapper[4558]: I0120 17:14:35.806719 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:14:35 crc kubenswrapper[4558]: I0120 17:14:35.823581 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/watcher-decision-engine-0" Jan 20 17:14:35 crc kubenswrapper[4558]: I0120 17:14:35.824001 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-cell-mapping-qhsxm" Jan 20 17:14:35 crc kubenswrapper[4558]: I0120 17:14:35.945757 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f6jfz\" (UniqueName: \"kubernetes.io/projected/d0da5348-6caa-4cc9-b170-9de34ebda4a1-kube-api-access-f6jfz\") pod \"d0da5348-6caa-4cc9-b170-9de34ebda4a1\" (UID: \"d0da5348-6caa-4cc9-b170-9de34ebda4a1\") " Jan 20 17:14:35 crc kubenswrapper[4558]: I0120 17:14:35.945833 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f47cb9e2-2306-494d-94ff-b60393a9413e-config-data\") pod \"f47cb9e2-2306-494d-94ff-b60393a9413e\" (UID: \"f47cb9e2-2306-494d-94ff-b60393a9413e\") " Jan 20 17:14:35 crc kubenswrapper[4558]: I0120 17:14:35.945893 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f47cb9e2-2306-494d-94ff-b60393a9413e-logs\") pod \"f47cb9e2-2306-494d-94ff-b60393a9413e\" (UID: \"f47cb9e2-2306-494d-94ff-b60393a9413e\") " Jan 20 17:14:35 crc kubenswrapper[4558]: I0120 17:14:35.946068 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f47cb9e2-2306-494d-94ff-b60393a9413e-combined-ca-bundle\") pod \"f47cb9e2-2306-494d-94ff-b60393a9413e\" (UID: \"f47cb9e2-2306-494d-94ff-b60393a9413e\") " Jan 20 17:14:35 crc kubenswrapper[4558]: I0120 17:14:35.946139 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/f47cb9e2-2306-494d-94ff-b60393a9413e-custom-prometheus-ca\") pod \"f47cb9e2-2306-494d-94ff-b60393a9413e\" (UID: \"f47cb9e2-2306-494d-94ff-b60393a9413e\") " Jan 20 17:14:35 crc kubenswrapper[4558]: I0120 17:14:35.946205 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d0da5348-6caa-4cc9-b170-9de34ebda4a1-combined-ca-bundle\") pod \"d0da5348-6caa-4cc9-b170-9de34ebda4a1\" (UID: \"d0da5348-6caa-4cc9-b170-9de34ebda4a1\") " Jan 20 17:14:35 crc kubenswrapper[4558]: I0120 17:14:35.946255 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wvkrs\" (UniqueName: \"kubernetes.io/projected/f47cb9e2-2306-494d-94ff-b60393a9413e-kube-api-access-wvkrs\") pod \"f47cb9e2-2306-494d-94ff-b60393a9413e\" (UID: \"f47cb9e2-2306-494d-94ff-b60393a9413e\") " Jan 20 17:14:35 crc kubenswrapper[4558]: I0120 17:14:35.946321 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d0da5348-6caa-4cc9-b170-9de34ebda4a1-scripts\") pod \"d0da5348-6caa-4cc9-b170-9de34ebda4a1\" (UID: \"d0da5348-6caa-4cc9-b170-9de34ebda4a1\") " Jan 20 17:14:35 crc kubenswrapper[4558]: I0120 17:14:35.946338 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d0da5348-6caa-4cc9-b170-9de34ebda4a1-config-data\") pod \"d0da5348-6caa-4cc9-b170-9de34ebda4a1\" (UID: \"d0da5348-6caa-4cc9-b170-9de34ebda4a1\") " Jan 20 17:14:35 crc kubenswrapper[4558]: I0120 17:14:35.950201 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d0da5348-6caa-4cc9-b170-9de34ebda4a1-scripts" (OuterVolumeSpecName: "scripts") pod "d0da5348-6caa-4cc9-b170-9de34ebda4a1" (UID: "d0da5348-6caa-4cc9-b170-9de34ebda4a1"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:14:35 crc kubenswrapper[4558]: I0120 17:14:35.951009 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f47cb9e2-2306-494d-94ff-b60393a9413e-kube-api-access-wvkrs" (OuterVolumeSpecName: "kube-api-access-wvkrs") pod "f47cb9e2-2306-494d-94ff-b60393a9413e" (UID: "f47cb9e2-2306-494d-94ff-b60393a9413e"). InnerVolumeSpecName "kube-api-access-wvkrs". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:14:35 crc kubenswrapper[4558]: I0120 17:14:35.960280 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f47cb9e2-2306-494d-94ff-b60393a9413e-logs" (OuterVolumeSpecName: "logs") pod "f47cb9e2-2306-494d-94ff-b60393a9413e" (UID: "f47cb9e2-2306-494d-94ff-b60393a9413e"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:14:35 crc kubenswrapper[4558]: I0120 17:14:35.960387 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d0da5348-6caa-4cc9-b170-9de34ebda4a1-kube-api-access-f6jfz" (OuterVolumeSpecName: "kube-api-access-f6jfz") pod "d0da5348-6caa-4cc9-b170-9de34ebda4a1" (UID: "d0da5348-6caa-4cc9-b170-9de34ebda4a1"). InnerVolumeSpecName "kube-api-access-f6jfz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:14:35 crc kubenswrapper[4558]: I0120 17:14:35.974589 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f47cb9e2-2306-494d-94ff-b60393a9413e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f47cb9e2-2306-494d-94ff-b60393a9413e" (UID: "f47cb9e2-2306-494d-94ff-b60393a9413e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:14:35 crc kubenswrapper[4558]: I0120 17:14:35.977603 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d0da5348-6caa-4cc9-b170-9de34ebda4a1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d0da5348-6caa-4cc9-b170-9de34ebda4a1" (UID: "d0da5348-6caa-4cc9-b170-9de34ebda4a1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:14:35 crc kubenswrapper[4558]: I0120 17:14:35.983530 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d0da5348-6caa-4cc9-b170-9de34ebda4a1-config-data" (OuterVolumeSpecName: "config-data") pod "d0da5348-6caa-4cc9-b170-9de34ebda4a1" (UID: "d0da5348-6caa-4cc9-b170-9de34ebda4a1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:14:35 crc kubenswrapper[4558]: I0120 17:14:35.986289 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f47cb9e2-2306-494d-94ff-b60393a9413e-custom-prometheus-ca" (OuterVolumeSpecName: "custom-prometheus-ca") pod "f47cb9e2-2306-494d-94ff-b60393a9413e" (UID: "f47cb9e2-2306-494d-94ff-b60393a9413e"). InnerVolumeSpecName "custom-prometheus-ca". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:14:36 crc kubenswrapper[4558]: I0120 17:14:36.026731 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f47cb9e2-2306-494d-94ff-b60393a9413e-config-data" (OuterVolumeSpecName: "config-data") pod "f47cb9e2-2306-494d-94ff-b60393a9413e" (UID: "f47cb9e2-2306-494d-94ff-b60393a9413e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:14:36 crc kubenswrapper[4558]: I0120 17:14:36.049618 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f6jfz\" (UniqueName: \"kubernetes.io/projected/d0da5348-6caa-4cc9-b170-9de34ebda4a1-kube-api-access-f6jfz\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:36 crc kubenswrapper[4558]: I0120 17:14:36.049652 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f47cb9e2-2306-494d-94ff-b60393a9413e-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:36 crc kubenswrapper[4558]: I0120 17:14:36.049665 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f47cb9e2-2306-494d-94ff-b60393a9413e-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:36 crc kubenswrapper[4558]: I0120 17:14:36.049675 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f47cb9e2-2306-494d-94ff-b60393a9413e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:36 crc kubenswrapper[4558]: I0120 17:14:36.049684 4558 reconciler_common.go:293] "Volume detached for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/f47cb9e2-2306-494d-94ff-b60393a9413e-custom-prometheus-ca\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:36 crc kubenswrapper[4558]: I0120 17:14:36.049694 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d0da5348-6caa-4cc9-b170-9de34ebda4a1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:36 crc kubenswrapper[4558]: I0120 17:14:36.049703 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wvkrs\" (UniqueName: \"kubernetes.io/projected/f47cb9e2-2306-494d-94ff-b60393a9413e-kube-api-access-wvkrs\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:36 crc kubenswrapper[4558]: I0120 17:14:36.049712 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d0da5348-6caa-4cc9-b170-9de34ebda4a1-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:36 crc kubenswrapper[4558]: I0120 17:14:36.049720 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d0da5348-6caa-4cc9-b170-9de34ebda4a1-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:36 crc kubenswrapper[4558]: I0120 17:14:36.284757 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-0"] Jan 20 17:14:36 crc kubenswrapper[4558]: W0120 17:14:36.288180 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod217a2ebc_8331_4a6f_9113_2c813563a2b8.slice/crio-fbc48c8bed18e946f067cb8bb8a942077ad48821a9531de67a200f0aa5a7d292 WatchSource:0}: Error finding container fbc48c8bed18e946f067cb8bb8a942077ad48821a9531de67a200f0aa5a7d292: Status 404 returned error can't find the container with id fbc48c8bed18e946f067cb8bb8a942077ad48821a9531de67a200f0aa5a7d292 Jan 20 17:14:36 crc kubenswrapper[4558]: I0120 17:14:36.357131 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:14:36 crc kubenswrapper[4558]: I0120 17:14:36.368256 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:14:36 crc kubenswrapper[4558]: I0120 17:14:36.467495 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5c988319-ece3-4016-b491-1b33b78ed16e-run-httpd\") pod \"5c988319-ece3-4016-b491-1b33b78ed16e\" (UID: \"5c988319-ece3-4016-b491-1b33b78ed16e\") " Jan 20 17:14:36 crc kubenswrapper[4558]: I0120 17:14:36.467577 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v944k\" (UniqueName: \"kubernetes.io/projected/5c988319-ece3-4016-b491-1b33b78ed16e-kube-api-access-v944k\") pod \"5c988319-ece3-4016-b491-1b33b78ed16e\" (UID: \"5c988319-ece3-4016-b491-1b33b78ed16e\") " Jan 20 17:14:36 crc kubenswrapper[4558]: I0120 17:14:36.467597 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5c988319-ece3-4016-b491-1b33b78ed16e-scripts\") pod \"5c988319-ece3-4016-b491-1b33b78ed16e\" (UID: \"5c988319-ece3-4016-b491-1b33b78ed16e\") " Jan 20 17:14:36 crc kubenswrapper[4558]: I0120 17:14:36.467628 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5c988319-ece3-4016-b491-1b33b78ed16e-config-data\") pod \"5c988319-ece3-4016-b491-1b33b78ed16e\" (UID: \"5c988319-ece3-4016-b491-1b33b78ed16e\") " Jan 20 17:14:36 crc kubenswrapper[4558]: I0120 17:14:36.467659 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5c988319-ece3-4016-b491-1b33b78ed16e-log-httpd\") pod \"5c988319-ece3-4016-b491-1b33b78ed16e\" (UID: \"5c988319-ece3-4016-b491-1b33b78ed16e\") " Jan 20 17:14:36 crc kubenswrapper[4558]: I0120 17:14:36.470150 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"ded19e77-da51-4081-b20f-03f986cc412b","Type":"ContainerStarted","Data":"e899f6cc9b5ec4a4e564a21267746840385cf7bb7474bd60d135afef730a48f1"} Jan 20 17:14:36 crc kubenswrapper[4558]: I0120 17:14:36.470215 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"ded19e77-da51-4081-b20f-03f986cc412b","Type":"ContainerStarted","Data":"e3a6447255ffd9001e6ac6854d2220b6e97607a7e07591ed42dcadf0cde58933"} Jan 20 17:14:36 crc kubenswrapper[4558]: I0120 17:14:36.470226 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"ded19e77-da51-4081-b20f-03f986cc412b","Type":"ContainerStarted","Data":"13c367dcb40892886fbe40c8d49d5ece7cfbbc333f3e62e221ec91c233c0f9dc"} Jan 20 17:14:36 crc kubenswrapper[4558]: I0120 17:14:36.468143 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5c988319-ece3-4016-b491-1b33b78ed16e-sg-core-conf-yaml\") pod \"5c988319-ece3-4016-b491-1b33b78ed16e\" (UID: \"5c988319-ece3-4016-b491-1b33b78ed16e\") " Jan 20 17:14:36 crc kubenswrapper[4558]: I0120 17:14:36.471338 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c988319-ece3-4016-b491-1b33b78ed16e-combined-ca-bundle\") pod \"5c988319-ece3-4016-b491-1b33b78ed16e\" (UID: \"5c988319-ece3-4016-b491-1b33b78ed16e\") " Jan 20 17:14:36 crc kubenswrapper[4558]: I0120 17:14:36.474599 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5c988319-ece3-4016-b491-1b33b78ed16e-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "5c988319-ece3-4016-b491-1b33b78ed16e" (UID: "5c988319-ece3-4016-b491-1b33b78ed16e"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:14:36 crc kubenswrapper[4558]: I0120 17:14:36.474966 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5c988319-ece3-4016-b491-1b33b78ed16e-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "5c988319-ece3-4016-b491-1b33b78ed16e" (UID: "5c988319-ece3-4016-b491-1b33b78ed16e"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:14:36 crc kubenswrapper[4558]: I0120 17:14:36.484912 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"ad371747-3ff0-49b0-b2a6-33d7bb7cd16b","Type":"ContainerStarted","Data":"9c502ea7204a81d002cfc4ba30da06e438661fd039b6fbbda85906d12fb85a6a"} Jan 20 17:14:36 crc kubenswrapper[4558]: I0120 17:14:36.489604 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:14:36 crc kubenswrapper[4558]: I0120 17:14:36.489832 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5c988319-ece3-4016-b491-1b33b78ed16e-scripts" (OuterVolumeSpecName: "scripts") pod "5c988319-ece3-4016-b491-1b33b78ed16e" (UID: "5c988319-ece3-4016-b491-1b33b78ed16e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:14:36 crc kubenswrapper[4558]: I0120 17:14:36.494945 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5c988319-ece3-4016-b491-1b33b78ed16e-kube-api-access-v944k" (OuterVolumeSpecName: "kube-api-access-v944k") pod "5c988319-ece3-4016-b491-1b33b78ed16e" (UID: "5c988319-ece3-4016-b491-1b33b78ed16e"). InnerVolumeSpecName "kube-api-access-v944k". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:14:36 crc kubenswrapper[4558]: I0120 17:14:36.500520 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"40f1774e-3159-4036-9016-2dfdb11c3a73","Type":"ContainerStarted","Data":"7c0819b9b637338e1da79f8e2bfa8d5ca87e4c444ed30a4a5130b7d3785d1b24"} Jan 20 17:14:36 crc kubenswrapper[4558]: I0120 17:14:36.500594 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"40f1774e-3159-4036-9016-2dfdb11c3a73","Type":"ContainerStarted","Data":"a1f5b2bb1811a8e501161a3d7ad7f4e65976deef70ca7fcc27658bc2cbe72cd5"} Jan 20 17:14:36 crc kubenswrapper[4558]: I0120 17:14:36.515150 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-cell-mapping-qhsxm" event={"ID":"d0da5348-6caa-4cc9-b170-9de34ebda4a1","Type":"ContainerDied","Data":"83e3e29ee80732913db4c9cde4658f19bfc112c217c474511818746a902d6629"} Jan 20 17:14:36 crc kubenswrapper[4558]: I0120 17:14:36.515214 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="83e3e29ee80732913db4c9cde4658f19bfc112c217c474511818746a902d6629" Jan 20 17:14:36 crc kubenswrapper[4558]: I0120 17:14:36.515282 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-cell-mapping-qhsxm" Jan 20 17:14:36 crc kubenswrapper[4558]: I0120 17:14:36.523269 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-api-0" podStartSLOduration=2.523254969 podStartE2EDuration="2.523254969s" podCreationTimestamp="2026-01-20 17:14:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:14:36.496683282 +0000 UTC m=+1970.257021269" watchObservedRunningTime="2026-01-20 17:14:36.523254969 +0000 UTC m=+1970.283592926" Jan 20 17:14:36 crc kubenswrapper[4558]: I0120 17:14:36.523378 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/watcher-decision-engine-0" event={"ID":"f47cb9e2-2306-494d-94ff-b60393a9413e","Type":"ContainerDied","Data":"14ba1f05070ba36b446d53c2741a503c6e949900568de158a24d243b2fee06d8"} Jan 20 17:14:36 crc kubenswrapper[4558]: I0120 17:14:36.523409 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/watcher-decision-engine-0" Jan 20 17:14:36 crc kubenswrapper[4558]: I0120 17:14:36.523424 4558 scope.go:117] "RemoveContainer" containerID="2100b7c749ddb30bbb158352b085f32659152dcc98b26a89f4ae8fd0e68b79ce" Jan 20 17:14:36 crc kubenswrapper[4558]: I0120 17:14:36.534121 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:14:36 crc kubenswrapper[4558]: I0120 17:14:36.547928 4558 generic.go:334] "Generic (PLEG): container finished" podID="5c988319-ece3-4016-b491-1b33b78ed16e" containerID="cafc05a16acc6bf3c534e283f0aef442c9ded6ca2cdb82522472318623e4b9ea" exitCode=0 Jan 20 17:14:36 crc kubenswrapper[4558]: I0120 17:14:36.547988 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"5c988319-ece3-4016-b491-1b33b78ed16e","Type":"ContainerDied","Data":"cafc05a16acc6bf3c534e283f0aef442c9ded6ca2cdb82522472318623e4b9ea"} Jan 20 17:14:36 crc kubenswrapper[4558]: I0120 17:14:36.548010 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"5c988319-ece3-4016-b491-1b33b78ed16e","Type":"ContainerDied","Data":"39faf509844bb66dd0720fd2821cd11a541c4e8a2d4eb29534bbddc6a0ed755a"} Jan 20 17:14:36 crc kubenswrapper[4558]: I0120 17:14:36.548237 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:14:36 crc kubenswrapper[4558]: I0120 17:14:36.549439 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-metadata-0" podStartSLOduration=2.549411906 podStartE2EDuration="2.549411906s" podCreationTimestamp="2026-01-20 17:14:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:14:36.52522847 +0000 UTC m=+1970.285566438" watchObservedRunningTime="2026-01-20 17:14:36.549411906 +0000 UTC m=+1970.309749873" Jan 20 17:14:36 crc kubenswrapper[4558]: I0120 17:14:36.555822 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5c988319-ece3-4016-b491-1b33b78ed16e-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "5c988319-ece3-4016-b491-1b33b78ed16e" (UID: "5c988319-ece3-4016-b491-1b33b78ed16e"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:14:36 crc kubenswrapper[4558]: I0120 17:14:36.561024 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-0" event={"ID":"217a2ebc-8331-4a6f-9113-2c813563a2b8","Type":"ContainerStarted","Data":"fbc48c8bed18e946f067cb8bb8a942077ad48821a9531de67a200f0aa5a7d292"} Jan 20 17:14:36 crc kubenswrapper[4558]: I0120 17:14:36.561455 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:14:36 crc kubenswrapper[4558]: I0120 17:14:36.574543 4558 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5c988319-ece3-4016-b491-1b33b78ed16e-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:36 crc kubenswrapper[4558]: I0120 17:14:36.574564 4558 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5c988319-ece3-4016-b491-1b33b78ed16e-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:36 crc kubenswrapper[4558]: I0120 17:14:36.574574 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v944k\" (UniqueName: \"kubernetes.io/projected/5c988319-ece3-4016-b491-1b33b78ed16e-kube-api-access-v944k\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:36 crc kubenswrapper[4558]: I0120 17:14:36.574584 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5c988319-ece3-4016-b491-1b33b78ed16e-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:36 crc kubenswrapper[4558]: I0120 17:14:36.574594 4558 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5c988319-ece3-4016-b491-1b33b78ed16e-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:36 crc kubenswrapper[4558]: I0120 17:14:36.583356 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5f22129e-7bdd-41c8-8429-1835bdeac056" path="/var/lib/kubelet/pods/5f22129e-7bdd-41c8-8429-1835bdeac056/volumes" Jan 20 17:14:36 crc kubenswrapper[4558]: I0120 17:14:36.602867 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5c988319-ece3-4016-b491-1b33b78ed16e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5c988319-ece3-4016-b491-1b33b78ed16e" (UID: "5c988319-ece3-4016-b491-1b33b78ed16e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:14:36 crc kubenswrapper[4558]: I0120 17:14:36.610390 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5c988319-ece3-4016-b491-1b33b78ed16e-config-data" (OuterVolumeSpecName: "config-data") pod "5c988319-ece3-4016-b491-1b33b78ed16e" (UID: "5c988319-ece3-4016-b491-1b33b78ed16e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:14:36 crc kubenswrapper[4558]: I0120 17:14:36.612539 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell1-conductor-0" podStartSLOduration=1.6125128439999998 podStartE2EDuration="1.612512844s" podCreationTimestamp="2026-01-20 17:14:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:14:36.576413741 +0000 UTC m=+1970.336751708" watchObservedRunningTime="2026-01-20 17:14:36.612512844 +0000 UTC m=+1970.372850821" Jan 20 17:14:36 crc kubenswrapper[4558]: I0120 17:14:36.638392 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:14:36 crc kubenswrapper[4558]: I0120 17:14:36.656299 4558 scope.go:117] "RemoveContainer" containerID="e70948baf660f38543ddb5acaa11cca1652348a68137c5b3ac38f88134bf299c" Jan 20 17:14:36 crc kubenswrapper[4558]: I0120 17:14:36.678301 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5c988319-ece3-4016-b491-1b33b78ed16e-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:36 crc kubenswrapper[4558]: I0120 17:14:36.685685 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c988319-ece3-4016-b491-1b33b78ed16e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:36 crc kubenswrapper[4558]: I0120 17:14:36.688713 4558 scope.go:117] "RemoveContainer" containerID="e91939ae15b50ee2cb9f127b9219736a27901de5d5be68878bb47fc2b64c9dc5" Jan 20 17:14:36 crc kubenswrapper[4558]: I0120 17:14:36.703444 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/watcher-decision-engine-0"] Jan 20 17:14:36 crc kubenswrapper[4558]: I0120 17:14:36.713221 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/watcher-decision-engine-0"] Jan 20 17:14:36 crc kubenswrapper[4558]: I0120 17:14:36.716911 4558 scope.go:117] "RemoveContainer" containerID="4fb572825219a0fc20cb0370fc86ca598777392dfec9d5d57c869d049806523b" Jan 20 17:14:36 crc kubenswrapper[4558]: I0120 17:14:36.719472 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/watcher-decision-engine-0"] Jan 20 17:14:36 crc kubenswrapper[4558]: E0120 17:14:36.720091 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d0da5348-6caa-4cc9-b170-9de34ebda4a1" containerName="nova-manage" Jan 20 17:14:36 crc kubenswrapper[4558]: I0120 17:14:36.720558 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d0da5348-6caa-4cc9-b170-9de34ebda4a1" containerName="nova-manage" Jan 20 17:14:36 crc kubenswrapper[4558]: E0120 17:14:36.720633 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5c988319-ece3-4016-b491-1b33b78ed16e" containerName="ceilometer-central-agent" Jan 20 17:14:36 crc kubenswrapper[4558]: I0120 17:14:36.720694 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="5c988319-ece3-4016-b491-1b33b78ed16e" containerName="ceilometer-central-agent" Jan 20 17:14:36 crc kubenswrapper[4558]: E0120 17:14:36.720789 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5c988319-ece3-4016-b491-1b33b78ed16e" containerName="sg-core" Jan 20 17:14:36 crc kubenswrapper[4558]: I0120 17:14:36.720862 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="5c988319-ece3-4016-b491-1b33b78ed16e" containerName="sg-core" Jan 20 17:14:36 crc kubenswrapper[4558]: E0120 17:14:36.721770 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f47cb9e2-2306-494d-94ff-b60393a9413e" containerName="watcher-decision-engine" Jan 20 17:14:36 crc kubenswrapper[4558]: I0120 17:14:36.721990 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="f47cb9e2-2306-494d-94ff-b60393a9413e" containerName="watcher-decision-engine" Jan 20 17:14:36 crc kubenswrapper[4558]: E0120 17:14:36.722077 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5c988319-ece3-4016-b491-1b33b78ed16e" containerName="ceilometer-notification-agent" Jan 20 17:14:36 crc kubenswrapper[4558]: I0120 17:14:36.722123 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="5c988319-ece3-4016-b491-1b33b78ed16e" containerName="ceilometer-notification-agent" Jan 20 17:14:36 crc kubenswrapper[4558]: E0120 17:14:36.722194 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5c988319-ece3-4016-b491-1b33b78ed16e" containerName="proxy-httpd" Jan 20 17:14:36 crc kubenswrapper[4558]: I0120 17:14:36.722238 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="5c988319-ece3-4016-b491-1b33b78ed16e" containerName="proxy-httpd" Jan 20 17:14:36 crc kubenswrapper[4558]: I0120 17:14:36.722769 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="5c988319-ece3-4016-b491-1b33b78ed16e" containerName="proxy-httpd" Jan 20 17:14:36 crc kubenswrapper[4558]: I0120 17:14:36.722843 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="f47cb9e2-2306-494d-94ff-b60393a9413e" containerName="watcher-decision-engine" Jan 20 17:14:36 crc kubenswrapper[4558]: I0120 17:14:36.722935 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="5c988319-ece3-4016-b491-1b33b78ed16e" containerName="sg-core" Jan 20 17:14:36 crc kubenswrapper[4558]: I0120 17:14:36.722990 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="d0da5348-6caa-4cc9-b170-9de34ebda4a1" containerName="nova-manage" Jan 20 17:14:36 crc kubenswrapper[4558]: I0120 17:14:36.723037 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="5c988319-ece3-4016-b491-1b33b78ed16e" containerName="ceilometer-central-agent" Jan 20 17:14:36 crc kubenswrapper[4558]: I0120 17:14:36.723093 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="5c988319-ece3-4016-b491-1b33b78ed16e" containerName="ceilometer-notification-agent" Jan 20 17:14:36 crc kubenswrapper[4558]: I0120 17:14:36.724600 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/watcher-decision-engine-0" Jan 20 17:14:36 crc kubenswrapper[4558]: I0120 17:14:36.727078 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"watcher-decision-engine-config-data" Jan 20 17:14:36 crc kubenswrapper[4558]: I0120 17:14:36.728436 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/watcher-decision-engine-0"] Jan 20 17:14:36 crc kubenswrapper[4558]: I0120 17:14:36.754767 4558 scope.go:117] "RemoveContainer" containerID="cafc05a16acc6bf3c534e283f0aef442c9ded6ca2cdb82522472318623e4b9ea" Jan 20 17:14:36 crc kubenswrapper[4558]: I0120 17:14:36.775582 4558 scope.go:117] "RemoveContainer" containerID="e70948baf660f38543ddb5acaa11cca1652348a68137c5b3ac38f88134bf299c" Jan 20 17:14:36 crc kubenswrapper[4558]: E0120 17:14:36.776330 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e70948baf660f38543ddb5acaa11cca1652348a68137c5b3ac38f88134bf299c\": container with ID starting with e70948baf660f38543ddb5acaa11cca1652348a68137c5b3ac38f88134bf299c not found: ID does not exist" containerID="e70948baf660f38543ddb5acaa11cca1652348a68137c5b3ac38f88134bf299c" Jan 20 17:14:36 crc kubenswrapper[4558]: I0120 17:14:36.776379 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e70948baf660f38543ddb5acaa11cca1652348a68137c5b3ac38f88134bf299c"} err="failed to get container status \"e70948baf660f38543ddb5acaa11cca1652348a68137c5b3ac38f88134bf299c\": rpc error: code = NotFound desc = could not find container \"e70948baf660f38543ddb5acaa11cca1652348a68137c5b3ac38f88134bf299c\": container with ID starting with e70948baf660f38543ddb5acaa11cca1652348a68137c5b3ac38f88134bf299c not found: ID does not exist" Jan 20 17:14:36 crc kubenswrapper[4558]: I0120 17:14:36.776408 4558 scope.go:117] "RemoveContainer" containerID="e91939ae15b50ee2cb9f127b9219736a27901de5d5be68878bb47fc2b64c9dc5" Jan 20 17:14:36 crc kubenswrapper[4558]: E0120 17:14:36.776889 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e91939ae15b50ee2cb9f127b9219736a27901de5d5be68878bb47fc2b64c9dc5\": container with ID starting with e91939ae15b50ee2cb9f127b9219736a27901de5d5be68878bb47fc2b64c9dc5 not found: ID does not exist" containerID="e91939ae15b50ee2cb9f127b9219736a27901de5d5be68878bb47fc2b64c9dc5" Jan 20 17:14:36 crc kubenswrapper[4558]: I0120 17:14:36.776911 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e91939ae15b50ee2cb9f127b9219736a27901de5d5be68878bb47fc2b64c9dc5"} err="failed to get container status \"e91939ae15b50ee2cb9f127b9219736a27901de5d5be68878bb47fc2b64c9dc5\": rpc error: code = NotFound desc = could not find container \"e91939ae15b50ee2cb9f127b9219736a27901de5d5be68878bb47fc2b64c9dc5\": container with ID starting with e91939ae15b50ee2cb9f127b9219736a27901de5d5be68878bb47fc2b64c9dc5 not found: ID does not exist" Jan 20 17:14:36 crc kubenswrapper[4558]: I0120 17:14:36.776929 4558 scope.go:117] "RemoveContainer" containerID="4fb572825219a0fc20cb0370fc86ca598777392dfec9d5d57c869d049806523b" Jan 20 17:14:36 crc kubenswrapper[4558]: E0120 17:14:36.777468 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4fb572825219a0fc20cb0370fc86ca598777392dfec9d5d57c869d049806523b\": container with ID starting with 4fb572825219a0fc20cb0370fc86ca598777392dfec9d5d57c869d049806523b not found: ID does not exist" containerID="4fb572825219a0fc20cb0370fc86ca598777392dfec9d5d57c869d049806523b" Jan 20 17:14:36 crc kubenswrapper[4558]: I0120 17:14:36.777493 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4fb572825219a0fc20cb0370fc86ca598777392dfec9d5d57c869d049806523b"} err="failed to get container status \"4fb572825219a0fc20cb0370fc86ca598777392dfec9d5d57c869d049806523b\": rpc error: code = NotFound desc = could not find container \"4fb572825219a0fc20cb0370fc86ca598777392dfec9d5d57c869d049806523b\": container with ID starting with 4fb572825219a0fc20cb0370fc86ca598777392dfec9d5d57c869d049806523b not found: ID does not exist" Jan 20 17:14:36 crc kubenswrapper[4558]: I0120 17:14:36.777507 4558 scope.go:117] "RemoveContainer" containerID="cafc05a16acc6bf3c534e283f0aef442c9ded6ca2cdb82522472318623e4b9ea" Jan 20 17:14:36 crc kubenswrapper[4558]: E0120 17:14:36.777718 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cafc05a16acc6bf3c534e283f0aef442c9ded6ca2cdb82522472318623e4b9ea\": container with ID starting with cafc05a16acc6bf3c534e283f0aef442c9ded6ca2cdb82522472318623e4b9ea not found: ID does not exist" containerID="cafc05a16acc6bf3c534e283f0aef442c9ded6ca2cdb82522472318623e4b9ea" Jan 20 17:14:36 crc kubenswrapper[4558]: I0120 17:14:36.777740 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cafc05a16acc6bf3c534e283f0aef442c9ded6ca2cdb82522472318623e4b9ea"} err="failed to get container status \"cafc05a16acc6bf3c534e283f0aef442c9ded6ca2cdb82522472318623e4b9ea\": rpc error: code = NotFound desc = could not find container \"cafc05a16acc6bf3c534e283f0aef442c9ded6ca2cdb82522472318623e4b9ea\": container with ID starting with cafc05a16acc6bf3c534e283f0aef442c9ded6ca2cdb82522472318623e4b9ea not found: ID does not exist" Jan 20 17:14:36 crc kubenswrapper[4558]: I0120 17:14:36.877397 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:14:36 crc kubenswrapper[4558]: I0120 17:14:36.883531 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:14:36 crc kubenswrapper[4558]: I0120 17:14:36.890625 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2510d24d-b593-4196-92bc-ddde09cc7c15-logs\") pod \"watcher-decision-engine-0\" (UID: \"2510d24d-b593-4196-92bc-ddde09cc7c15\") " pod="openstack-kuttl-tests/watcher-decision-engine-0" Jan 20 17:14:36 crc kubenswrapper[4558]: I0120 17:14:36.890676 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2510d24d-b593-4196-92bc-ddde09cc7c15-combined-ca-bundle\") pod \"watcher-decision-engine-0\" (UID: \"2510d24d-b593-4196-92bc-ddde09cc7c15\") " pod="openstack-kuttl-tests/watcher-decision-engine-0" Jan 20 17:14:36 crc kubenswrapper[4558]: I0120 17:14:36.890862 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2510d24d-b593-4196-92bc-ddde09cc7c15-config-data\") pod \"watcher-decision-engine-0\" (UID: \"2510d24d-b593-4196-92bc-ddde09cc7c15\") " pod="openstack-kuttl-tests/watcher-decision-engine-0" Jan 20 17:14:36 crc kubenswrapper[4558]: I0120 17:14:36.890932 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/2510d24d-b593-4196-92bc-ddde09cc7c15-custom-prometheus-ca\") pod \"watcher-decision-engine-0\" (UID: \"2510d24d-b593-4196-92bc-ddde09cc7c15\") " pod="openstack-kuttl-tests/watcher-decision-engine-0" Jan 20 17:14:36 crc kubenswrapper[4558]: I0120 17:14:36.890971 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jjngf\" (UniqueName: \"kubernetes.io/projected/2510d24d-b593-4196-92bc-ddde09cc7c15-kube-api-access-jjngf\") pod \"watcher-decision-engine-0\" (UID: \"2510d24d-b593-4196-92bc-ddde09cc7c15\") " pod="openstack-kuttl-tests/watcher-decision-engine-0" Jan 20 17:14:36 crc kubenswrapper[4558]: I0120 17:14:36.900109 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:14:36 crc kubenswrapper[4558]: I0120 17:14:36.902405 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:14:36 crc kubenswrapper[4558]: I0120 17:14:36.904333 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-config-data" Jan 20 17:14:36 crc kubenswrapper[4558]: I0120 17:14:36.905450 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-scripts" Jan 20 17:14:36 crc kubenswrapper[4558]: I0120 17:14:36.918865 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:14:36 crc kubenswrapper[4558]: I0120 17:14:36.992751 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/2510d24d-b593-4196-92bc-ddde09cc7c15-custom-prometheus-ca\") pod \"watcher-decision-engine-0\" (UID: \"2510d24d-b593-4196-92bc-ddde09cc7c15\") " pod="openstack-kuttl-tests/watcher-decision-engine-0" Jan 20 17:14:36 crc kubenswrapper[4558]: I0120 17:14:36.992809 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/9b48e62d-af6d-4eb9-b491-766f773de980-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"9b48e62d-af6d-4eb9-b491-766f773de980\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:14:36 crc kubenswrapper[4558]: I0120 17:14:36.992843 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jjngf\" (UniqueName: \"kubernetes.io/projected/2510d24d-b593-4196-92bc-ddde09cc7c15-kube-api-access-jjngf\") pod \"watcher-decision-engine-0\" (UID: \"2510d24d-b593-4196-92bc-ddde09cc7c15\") " pod="openstack-kuttl-tests/watcher-decision-engine-0" Jan 20 17:14:36 crc kubenswrapper[4558]: I0120 17:14:36.992869 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9b48e62d-af6d-4eb9-b491-766f773de980-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"9b48e62d-af6d-4eb9-b491-766f773de980\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:14:36 crc kubenswrapper[4558]: I0120 17:14:36.993196 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9b48e62d-af6d-4eb9-b491-766f773de980-config-data\") pod \"ceilometer-0\" (UID: \"9b48e62d-af6d-4eb9-b491-766f773de980\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:14:36 crc kubenswrapper[4558]: I0120 17:14:36.993257 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2510d24d-b593-4196-92bc-ddde09cc7c15-logs\") pod \"watcher-decision-engine-0\" (UID: \"2510d24d-b593-4196-92bc-ddde09cc7c15\") " pod="openstack-kuttl-tests/watcher-decision-engine-0" Jan 20 17:14:36 crc kubenswrapper[4558]: I0120 17:14:36.993290 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2510d24d-b593-4196-92bc-ddde09cc7c15-combined-ca-bundle\") pod \"watcher-decision-engine-0\" (UID: \"2510d24d-b593-4196-92bc-ddde09cc7c15\") " pod="openstack-kuttl-tests/watcher-decision-engine-0" Jan 20 17:14:36 crc kubenswrapper[4558]: I0120 17:14:36.993578 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9b48e62d-af6d-4eb9-b491-766f773de980-run-httpd\") pod \"ceilometer-0\" (UID: \"9b48e62d-af6d-4eb9-b491-766f773de980\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:14:36 crc kubenswrapper[4558]: I0120 17:14:36.993636 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-trkz4\" (UniqueName: \"kubernetes.io/projected/9b48e62d-af6d-4eb9-b491-766f773de980-kube-api-access-trkz4\") pod \"ceilometer-0\" (UID: \"9b48e62d-af6d-4eb9-b491-766f773de980\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:14:36 crc kubenswrapper[4558]: I0120 17:14:36.993702 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2510d24d-b593-4196-92bc-ddde09cc7c15-logs\") pod \"watcher-decision-engine-0\" (UID: \"2510d24d-b593-4196-92bc-ddde09cc7c15\") " pod="openstack-kuttl-tests/watcher-decision-engine-0" Jan 20 17:14:36 crc kubenswrapper[4558]: I0120 17:14:36.993772 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9b48e62d-af6d-4eb9-b491-766f773de980-log-httpd\") pod \"ceilometer-0\" (UID: \"9b48e62d-af6d-4eb9-b491-766f773de980\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:14:36 crc kubenswrapper[4558]: I0120 17:14:36.993842 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9b48e62d-af6d-4eb9-b491-766f773de980-scripts\") pod \"ceilometer-0\" (UID: \"9b48e62d-af6d-4eb9-b491-766f773de980\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:14:36 crc kubenswrapper[4558]: I0120 17:14:36.993875 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2510d24d-b593-4196-92bc-ddde09cc7c15-config-data\") pod \"watcher-decision-engine-0\" (UID: \"2510d24d-b593-4196-92bc-ddde09cc7c15\") " pod="openstack-kuttl-tests/watcher-decision-engine-0" Jan 20 17:14:37 crc kubenswrapper[4558]: I0120 17:14:36.999928 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2510d24d-b593-4196-92bc-ddde09cc7c15-combined-ca-bundle\") pod \"watcher-decision-engine-0\" (UID: \"2510d24d-b593-4196-92bc-ddde09cc7c15\") " pod="openstack-kuttl-tests/watcher-decision-engine-0" Jan 20 17:14:37 crc kubenswrapper[4558]: I0120 17:14:37.000602 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2510d24d-b593-4196-92bc-ddde09cc7c15-config-data\") pod \"watcher-decision-engine-0\" (UID: \"2510d24d-b593-4196-92bc-ddde09cc7c15\") " pod="openstack-kuttl-tests/watcher-decision-engine-0" Jan 20 17:14:37 crc kubenswrapper[4558]: I0120 17:14:37.000752 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/2510d24d-b593-4196-92bc-ddde09cc7c15-custom-prometheus-ca\") pod \"watcher-decision-engine-0\" (UID: \"2510d24d-b593-4196-92bc-ddde09cc7c15\") " pod="openstack-kuttl-tests/watcher-decision-engine-0" Jan 20 17:14:37 crc kubenswrapper[4558]: I0120 17:14:37.008584 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jjngf\" (UniqueName: \"kubernetes.io/projected/2510d24d-b593-4196-92bc-ddde09cc7c15-kube-api-access-jjngf\") pod \"watcher-decision-engine-0\" (UID: \"2510d24d-b593-4196-92bc-ddde09cc7c15\") " pod="openstack-kuttl-tests/watcher-decision-engine-0" Jan 20 17:14:37 crc kubenswrapper[4558]: I0120 17:14:37.044250 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/watcher-decision-engine-0" Jan 20 17:14:37 crc kubenswrapper[4558]: I0120 17:14:37.094929 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9b48e62d-af6d-4eb9-b491-766f773de980-run-httpd\") pod \"ceilometer-0\" (UID: \"9b48e62d-af6d-4eb9-b491-766f773de980\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:14:37 crc kubenswrapper[4558]: I0120 17:14:37.094977 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-trkz4\" (UniqueName: \"kubernetes.io/projected/9b48e62d-af6d-4eb9-b491-766f773de980-kube-api-access-trkz4\") pod \"ceilometer-0\" (UID: \"9b48e62d-af6d-4eb9-b491-766f773de980\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:14:37 crc kubenswrapper[4558]: I0120 17:14:37.095009 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9b48e62d-af6d-4eb9-b491-766f773de980-log-httpd\") pod \"ceilometer-0\" (UID: \"9b48e62d-af6d-4eb9-b491-766f773de980\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:14:37 crc kubenswrapper[4558]: I0120 17:14:37.095049 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9b48e62d-af6d-4eb9-b491-766f773de980-scripts\") pod \"ceilometer-0\" (UID: \"9b48e62d-af6d-4eb9-b491-766f773de980\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:14:37 crc kubenswrapper[4558]: I0120 17:14:37.095103 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/9b48e62d-af6d-4eb9-b491-766f773de980-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"9b48e62d-af6d-4eb9-b491-766f773de980\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:14:37 crc kubenswrapper[4558]: I0120 17:14:37.095130 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9b48e62d-af6d-4eb9-b491-766f773de980-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"9b48e62d-af6d-4eb9-b491-766f773de980\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:14:37 crc kubenswrapper[4558]: I0120 17:14:37.095199 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9b48e62d-af6d-4eb9-b491-766f773de980-config-data\") pod \"ceilometer-0\" (UID: \"9b48e62d-af6d-4eb9-b491-766f773de980\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:14:37 crc kubenswrapper[4558]: I0120 17:14:37.095471 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9b48e62d-af6d-4eb9-b491-766f773de980-run-httpd\") pod \"ceilometer-0\" (UID: \"9b48e62d-af6d-4eb9-b491-766f773de980\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:14:37 crc kubenswrapper[4558]: I0120 17:14:37.095978 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9b48e62d-af6d-4eb9-b491-766f773de980-log-httpd\") pod \"ceilometer-0\" (UID: \"9b48e62d-af6d-4eb9-b491-766f773de980\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:14:37 crc kubenswrapper[4558]: I0120 17:14:37.099339 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9b48e62d-af6d-4eb9-b491-766f773de980-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"9b48e62d-af6d-4eb9-b491-766f773de980\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:14:37 crc kubenswrapper[4558]: I0120 17:14:37.100395 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9b48e62d-af6d-4eb9-b491-766f773de980-config-data\") pod \"ceilometer-0\" (UID: \"9b48e62d-af6d-4eb9-b491-766f773de980\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:14:37 crc kubenswrapper[4558]: I0120 17:14:37.102689 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/9b48e62d-af6d-4eb9-b491-766f773de980-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"9b48e62d-af6d-4eb9-b491-766f773de980\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:14:37 crc kubenswrapper[4558]: I0120 17:14:37.103557 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9b48e62d-af6d-4eb9-b491-766f773de980-scripts\") pod \"ceilometer-0\" (UID: \"9b48e62d-af6d-4eb9-b491-766f773de980\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:14:37 crc kubenswrapper[4558]: I0120 17:14:37.107912 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-trkz4\" (UniqueName: \"kubernetes.io/projected/9b48e62d-af6d-4eb9-b491-766f773de980-kube-api-access-trkz4\") pod \"ceilometer-0\" (UID: \"9b48e62d-af6d-4eb9-b491-766f773de980\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:14:37 crc kubenswrapper[4558]: I0120 17:14:37.220699 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:14:37 crc kubenswrapper[4558]: I0120 17:14:37.475266 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/watcher-decision-engine-0"] Jan 20 17:14:37 crc kubenswrapper[4558]: I0120 17:14:37.575603 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-0" event={"ID":"217a2ebc-8331-4a6f-9113-2c813563a2b8","Type":"ContainerStarted","Data":"521ed414fb8023645d2152fb8453d458e32e8655c7e7d2cd8ec86ea164244611"} Jan 20 17:14:37 crc kubenswrapper[4558]: I0120 17:14:37.587273 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"ad371747-3ff0-49b0-b2a6-33d7bb7cd16b","Type":"ContainerStarted","Data":"a7f915ce1e5f59e4f41a36080909e1ccc6e29dbe8d312e443b304657ef70a93d"} Jan 20 17:14:37 crc kubenswrapper[4558]: I0120 17:14:37.589991 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/watcher-decision-engine-0" event={"ID":"2510d24d-b593-4196-92bc-ddde09cc7c15","Type":"ContainerStarted","Data":"bce59ba9886161320d99f71934595cd7b5badc8291ab33b3064137614c70f2f1"} Jan 20 17:14:37 crc kubenswrapper[4558]: I0120 17:14:37.611419 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" podStartSLOduration=2.611405519 podStartE2EDuration="2.611405519s" podCreationTimestamp="2026-01-20 17:14:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:14:37.602686604 +0000 UTC m=+1971.363024570" watchObservedRunningTime="2026-01-20 17:14:37.611405519 +0000 UTC m=+1971.371743486" Jan 20 17:14:37 crc kubenswrapper[4558]: I0120 17:14:37.656955 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:14:38 crc kubenswrapper[4558]: I0120 17:14:38.574922 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5c988319-ece3-4016-b491-1b33b78ed16e" path="/var/lib/kubelet/pods/5c988319-ece3-4016-b491-1b33b78ed16e/volumes" Jan 20 17:14:38 crc kubenswrapper[4558]: I0120 17:14:38.575994 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f47cb9e2-2306-494d-94ff-b60393a9413e" path="/var/lib/kubelet/pods/f47cb9e2-2306-494d-94ff-b60393a9413e/volumes" Jan 20 17:14:38 crc kubenswrapper[4558]: I0120 17:14:38.599986 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/watcher-decision-engine-0" event={"ID":"2510d24d-b593-4196-92bc-ddde09cc7c15","Type":"ContainerStarted","Data":"f630bf27700ad23f76cb69b14ae9eed65e06889873b8b21234996b7a87609328"} Jan 20 17:14:38 crc kubenswrapper[4558]: I0120 17:14:38.601704 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"9b48e62d-af6d-4eb9-b491-766f773de980","Type":"ContainerStarted","Data":"6493127d7274540c9787e4ceef28212ffed209d34f09092dfba89ffe109e8cc7"} Jan 20 17:14:38 crc kubenswrapper[4558]: I0120 17:14:38.601745 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"9b48e62d-af6d-4eb9-b491-766f773de980","Type":"ContainerStarted","Data":"b71edb754077779738936bd111a5b8e161187e0d42e500f88ccb77801e567749"} Jan 20 17:14:38 crc kubenswrapper[4558]: I0120 17:14:38.601812 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-metadata-0" podUID="40f1774e-3159-4036-9016-2dfdb11c3a73" containerName="nova-metadata-log" containerID="cri-o://a1f5b2bb1811a8e501161a3d7ad7f4e65976deef70ca7fcc27658bc2cbe72cd5" gracePeriod=30 Jan 20 17:14:38 crc kubenswrapper[4558]: I0120 17:14:38.601885 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-metadata-0" podUID="40f1774e-3159-4036-9016-2dfdb11c3a73" containerName="nova-metadata-metadata" containerID="cri-o://7c0819b9b637338e1da79f8e2bfa8d5ca87e4c444ed30a4a5130b7d3785d1b24" gracePeriod=30 Jan 20 17:14:38 crc kubenswrapper[4558]: I0120 17:14:38.602456 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-api-0" podUID="ded19e77-da51-4081-b20f-03f986cc412b" containerName="nova-api-log" containerID="cri-o://e3a6447255ffd9001e6ac6854d2220b6e97607a7e07591ed42dcadf0cde58933" gracePeriod=30 Jan 20 17:14:38 crc kubenswrapper[4558]: I0120 17:14:38.602572 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-api-0" podUID="ded19e77-da51-4081-b20f-03f986cc412b" containerName="nova-api-api" containerID="cri-o://e899f6cc9b5ec4a4e564a21267746840385cf7bb7474bd60d135afef730a48f1" gracePeriod=30 Jan 20 17:14:38 crc kubenswrapper[4558]: I0120 17:14:38.623062 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/watcher-decision-engine-0" podStartSLOduration=2.623040966 podStartE2EDuration="2.623040966s" podCreationTimestamp="2026-01-20 17:14:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:14:38.617476742 +0000 UTC m=+1972.377814709" watchObservedRunningTime="2026-01-20 17:14:38.623040966 +0000 UTC m=+1972.383378932" Jan 20 17:14:39 crc kubenswrapper[4558]: I0120 17:14:39.120676 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:14:39 crc kubenswrapper[4558]: I0120 17:14:39.178145 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:14:39 crc kubenswrapper[4558]: I0120 17:14:39.242067 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vgkbb\" (UniqueName: \"kubernetes.io/projected/ded19e77-da51-4081-b20f-03f986cc412b-kube-api-access-vgkbb\") pod \"ded19e77-da51-4081-b20f-03f986cc412b\" (UID: \"ded19e77-da51-4081-b20f-03f986cc412b\") " Jan 20 17:14:39 crc kubenswrapper[4558]: I0120 17:14:39.242190 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ded19e77-da51-4081-b20f-03f986cc412b-combined-ca-bundle\") pod \"ded19e77-da51-4081-b20f-03f986cc412b\" (UID: \"ded19e77-da51-4081-b20f-03f986cc412b\") " Jan 20 17:14:39 crc kubenswrapper[4558]: I0120 17:14:39.242270 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ded19e77-da51-4081-b20f-03f986cc412b-logs\") pod \"ded19e77-da51-4081-b20f-03f986cc412b\" (UID: \"ded19e77-da51-4081-b20f-03f986cc412b\") " Jan 20 17:14:39 crc kubenswrapper[4558]: I0120 17:14:39.242328 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ded19e77-da51-4081-b20f-03f986cc412b-config-data\") pod \"ded19e77-da51-4081-b20f-03f986cc412b\" (UID: \"ded19e77-da51-4081-b20f-03f986cc412b\") " Jan 20 17:14:39 crc kubenswrapper[4558]: I0120 17:14:39.242642 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ded19e77-da51-4081-b20f-03f986cc412b-logs" (OuterVolumeSpecName: "logs") pod "ded19e77-da51-4081-b20f-03f986cc412b" (UID: "ded19e77-da51-4081-b20f-03f986cc412b"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:14:39 crc kubenswrapper[4558]: I0120 17:14:39.243619 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ded19e77-da51-4081-b20f-03f986cc412b-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:39 crc kubenswrapper[4558]: I0120 17:14:39.245885 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ded19e77-da51-4081-b20f-03f986cc412b-kube-api-access-vgkbb" (OuterVolumeSpecName: "kube-api-access-vgkbb") pod "ded19e77-da51-4081-b20f-03f986cc412b" (UID: "ded19e77-da51-4081-b20f-03f986cc412b"). InnerVolumeSpecName "kube-api-access-vgkbb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:14:39 crc kubenswrapper[4558]: I0120 17:14:39.264443 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ded19e77-da51-4081-b20f-03f986cc412b-config-data" (OuterVolumeSpecName: "config-data") pod "ded19e77-da51-4081-b20f-03f986cc412b" (UID: "ded19e77-da51-4081-b20f-03f986cc412b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:14:39 crc kubenswrapper[4558]: I0120 17:14:39.267827 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ded19e77-da51-4081-b20f-03f986cc412b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ded19e77-da51-4081-b20f-03f986cc412b" (UID: "ded19e77-da51-4081-b20f-03f986cc412b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:14:39 crc kubenswrapper[4558]: I0120 17:14:39.344798 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40f1774e-3159-4036-9016-2dfdb11c3a73-combined-ca-bundle\") pod \"40f1774e-3159-4036-9016-2dfdb11c3a73\" (UID: \"40f1774e-3159-4036-9016-2dfdb11c3a73\") " Jan 20 17:14:39 crc kubenswrapper[4558]: I0120 17:14:39.345037 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/40f1774e-3159-4036-9016-2dfdb11c3a73-logs\") pod \"40f1774e-3159-4036-9016-2dfdb11c3a73\" (UID: \"40f1774e-3159-4036-9016-2dfdb11c3a73\") " Jan 20 17:14:39 crc kubenswrapper[4558]: I0120 17:14:39.345119 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/40f1774e-3159-4036-9016-2dfdb11c3a73-nova-metadata-tls-certs\") pod \"40f1774e-3159-4036-9016-2dfdb11c3a73\" (UID: \"40f1774e-3159-4036-9016-2dfdb11c3a73\") " Jan 20 17:14:39 crc kubenswrapper[4558]: I0120 17:14:39.345237 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/40f1774e-3159-4036-9016-2dfdb11c3a73-config-data\") pod \"40f1774e-3159-4036-9016-2dfdb11c3a73\" (UID: \"40f1774e-3159-4036-9016-2dfdb11c3a73\") " Jan 20 17:14:39 crc kubenswrapper[4558]: I0120 17:14:39.345350 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vw8kl\" (UniqueName: \"kubernetes.io/projected/40f1774e-3159-4036-9016-2dfdb11c3a73-kube-api-access-vw8kl\") pod \"40f1774e-3159-4036-9016-2dfdb11c3a73\" (UID: \"40f1774e-3159-4036-9016-2dfdb11c3a73\") " Jan 20 17:14:39 crc kubenswrapper[4558]: I0120 17:14:39.345370 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/40f1774e-3159-4036-9016-2dfdb11c3a73-logs" (OuterVolumeSpecName: "logs") pod "40f1774e-3159-4036-9016-2dfdb11c3a73" (UID: "40f1774e-3159-4036-9016-2dfdb11c3a73"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:14:39 crc kubenswrapper[4558]: I0120 17:14:39.347590 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vgkbb\" (UniqueName: \"kubernetes.io/projected/ded19e77-da51-4081-b20f-03f986cc412b-kube-api-access-vgkbb\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:39 crc kubenswrapper[4558]: I0120 17:14:39.347692 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ded19e77-da51-4081-b20f-03f986cc412b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:39 crc kubenswrapper[4558]: I0120 17:14:39.347722 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/40f1774e-3159-4036-9016-2dfdb11c3a73-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:39 crc kubenswrapper[4558]: I0120 17:14:39.347736 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ded19e77-da51-4081-b20f-03f986cc412b-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:39 crc kubenswrapper[4558]: I0120 17:14:39.351321 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/40f1774e-3159-4036-9016-2dfdb11c3a73-kube-api-access-vw8kl" (OuterVolumeSpecName: "kube-api-access-vw8kl") pod "40f1774e-3159-4036-9016-2dfdb11c3a73" (UID: "40f1774e-3159-4036-9016-2dfdb11c3a73"). InnerVolumeSpecName "kube-api-access-vw8kl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:14:39 crc kubenswrapper[4558]: I0120 17:14:39.370592 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/40f1774e-3159-4036-9016-2dfdb11c3a73-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "40f1774e-3159-4036-9016-2dfdb11c3a73" (UID: "40f1774e-3159-4036-9016-2dfdb11c3a73"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:14:39 crc kubenswrapper[4558]: I0120 17:14:39.371874 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/40f1774e-3159-4036-9016-2dfdb11c3a73-config-data" (OuterVolumeSpecName: "config-data") pod "40f1774e-3159-4036-9016-2dfdb11c3a73" (UID: "40f1774e-3159-4036-9016-2dfdb11c3a73"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:14:39 crc kubenswrapper[4558]: I0120 17:14:39.387308 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/40f1774e-3159-4036-9016-2dfdb11c3a73-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "40f1774e-3159-4036-9016-2dfdb11c3a73" (UID: "40f1774e-3159-4036-9016-2dfdb11c3a73"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:14:39 crc kubenswrapper[4558]: I0120 17:14:39.466663 4558 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/40f1774e-3159-4036-9016-2dfdb11c3a73-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:39 crc kubenswrapper[4558]: I0120 17:14:39.467624 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/40f1774e-3159-4036-9016-2dfdb11c3a73-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:39 crc kubenswrapper[4558]: I0120 17:14:39.467645 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vw8kl\" (UniqueName: \"kubernetes.io/projected/40f1774e-3159-4036-9016-2dfdb11c3a73-kube-api-access-vw8kl\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:39 crc kubenswrapper[4558]: I0120 17:14:39.467659 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40f1774e-3159-4036-9016-2dfdb11c3a73-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:39 crc kubenswrapper[4558]: I0120 17:14:39.614912 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"9b48e62d-af6d-4eb9-b491-766f773de980","Type":"ContainerStarted","Data":"4e41ea0254e75b461a1547d95c8aac810d14af69ad864a1c7606e805b4ce37ce"} Jan 20 17:14:39 crc kubenswrapper[4558]: I0120 17:14:39.616937 4558 generic.go:334] "Generic (PLEG): container finished" podID="ded19e77-da51-4081-b20f-03f986cc412b" containerID="e899f6cc9b5ec4a4e564a21267746840385cf7bb7474bd60d135afef730a48f1" exitCode=0 Jan 20 17:14:39 crc kubenswrapper[4558]: I0120 17:14:39.616971 4558 generic.go:334] "Generic (PLEG): container finished" podID="ded19e77-da51-4081-b20f-03f986cc412b" containerID="e3a6447255ffd9001e6ac6854d2220b6e97607a7e07591ed42dcadf0cde58933" exitCode=143 Jan 20 17:14:39 crc kubenswrapper[4558]: I0120 17:14:39.617023 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"ded19e77-da51-4081-b20f-03f986cc412b","Type":"ContainerDied","Data":"e899f6cc9b5ec4a4e564a21267746840385cf7bb7474bd60d135afef730a48f1"} Jan 20 17:14:39 crc kubenswrapper[4558]: I0120 17:14:39.617047 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"ded19e77-da51-4081-b20f-03f986cc412b","Type":"ContainerDied","Data":"e3a6447255ffd9001e6ac6854d2220b6e97607a7e07591ed42dcadf0cde58933"} Jan 20 17:14:39 crc kubenswrapper[4558]: I0120 17:14:39.617059 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"ded19e77-da51-4081-b20f-03f986cc412b","Type":"ContainerDied","Data":"13c367dcb40892886fbe40c8d49d5ece7cfbbc333f3e62e221ec91c233c0f9dc"} Jan 20 17:14:39 crc kubenswrapper[4558]: I0120 17:14:39.617083 4558 scope.go:117] "RemoveContainer" containerID="e899f6cc9b5ec4a4e564a21267746840385cf7bb7474bd60d135afef730a48f1" Jan 20 17:14:39 crc kubenswrapper[4558]: I0120 17:14:39.617239 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:14:39 crc kubenswrapper[4558]: I0120 17:14:39.626447 4558 generic.go:334] "Generic (PLEG): container finished" podID="40f1774e-3159-4036-9016-2dfdb11c3a73" containerID="7c0819b9b637338e1da79f8e2bfa8d5ca87e4c444ed30a4a5130b7d3785d1b24" exitCode=0 Jan 20 17:14:39 crc kubenswrapper[4558]: I0120 17:14:39.626496 4558 generic.go:334] "Generic (PLEG): container finished" podID="40f1774e-3159-4036-9016-2dfdb11c3a73" containerID="a1f5b2bb1811a8e501161a3d7ad7f4e65976deef70ca7fcc27658bc2cbe72cd5" exitCode=143 Jan 20 17:14:39 crc kubenswrapper[4558]: I0120 17:14:39.626592 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:14:39 crc kubenswrapper[4558]: I0120 17:14:39.626609 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"40f1774e-3159-4036-9016-2dfdb11c3a73","Type":"ContainerDied","Data":"7c0819b9b637338e1da79f8e2bfa8d5ca87e4c444ed30a4a5130b7d3785d1b24"} Jan 20 17:14:39 crc kubenswrapper[4558]: I0120 17:14:39.626762 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"40f1774e-3159-4036-9016-2dfdb11c3a73","Type":"ContainerDied","Data":"a1f5b2bb1811a8e501161a3d7ad7f4e65976deef70ca7fcc27658bc2cbe72cd5"} Jan 20 17:14:39 crc kubenswrapper[4558]: I0120 17:14:39.626826 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"40f1774e-3159-4036-9016-2dfdb11c3a73","Type":"ContainerDied","Data":"aad0729df9d33527ca45cffae693f83a8aebb573cc85726dfc7c9c8b49405cff"} Jan 20 17:14:39 crc kubenswrapper[4558]: I0120 17:14:39.647975 4558 scope.go:117] "RemoveContainer" containerID="e3a6447255ffd9001e6ac6854d2220b6e97607a7e07591ed42dcadf0cde58933" Jan 20 17:14:39 crc kubenswrapper[4558]: I0120 17:14:39.678081 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:14:39 crc kubenswrapper[4558]: I0120 17:14:39.681777 4558 scope.go:117] "RemoveContainer" containerID="e899f6cc9b5ec4a4e564a21267746840385cf7bb7474bd60d135afef730a48f1" Jan 20 17:14:39 crc kubenswrapper[4558]: E0120 17:14:39.682341 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e899f6cc9b5ec4a4e564a21267746840385cf7bb7474bd60d135afef730a48f1\": container with ID starting with e899f6cc9b5ec4a4e564a21267746840385cf7bb7474bd60d135afef730a48f1 not found: ID does not exist" containerID="e899f6cc9b5ec4a4e564a21267746840385cf7bb7474bd60d135afef730a48f1" Jan 20 17:14:39 crc kubenswrapper[4558]: I0120 17:14:39.682385 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e899f6cc9b5ec4a4e564a21267746840385cf7bb7474bd60d135afef730a48f1"} err="failed to get container status \"e899f6cc9b5ec4a4e564a21267746840385cf7bb7474bd60d135afef730a48f1\": rpc error: code = NotFound desc = could not find container \"e899f6cc9b5ec4a4e564a21267746840385cf7bb7474bd60d135afef730a48f1\": container with ID starting with e899f6cc9b5ec4a4e564a21267746840385cf7bb7474bd60d135afef730a48f1 not found: ID does not exist" Jan 20 17:14:39 crc kubenswrapper[4558]: I0120 17:14:39.682415 4558 scope.go:117] "RemoveContainer" containerID="e3a6447255ffd9001e6ac6854d2220b6e97607a7e07591ed42dcadf0cde58933" Jan 20 17:14:39 crc kubenswrapper[4558]: E0120 17:14:39.682674 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e3a6447255ffd9001e6ac6854d2220b6e97607a7e07591ed42dcadf0cde58933\": container with ID starting with e3a6447255ffd9001e6ac6854d2220b6e97607a7e07591ed42dcadf0cde58933 not found: ID does not exist" containerID="e3a6447255ffd9001e6ac6854d2220b6e97607a7e07591ed42dcadf0cde58933" Jan 20 17:14:39 crc kubenswrapper[4558]: I0120 17:14:39.682699 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e3a6447255ffd9001e6ac6854d2220b6e97607a7e07591ed42dcadf0cde58933"} err="failed to get container status \"e3a6447255ffd9001e6ac6854d2220b6e97607a7e07591ed42dcadf0cde58933\": rpc error: code = NotFound desc = could not find container \"e3a6447255ffd9001e6ac6854d2220b6e97607a7e07591ed42dcadf0cde58933\": container with ID starting with e3a6447255ffd9001e6ac6854d2220b6e97607a7e07591ed42dcadf0cde58933 not found: ID does not exist" Jan 20 17:14:39 crc kubenswrapper[4558]: I0120 17:14:39.682715 4558 scope.go:117] "RemoveContainer" containerID="e899f6cc9b5ec4a4e564a21267746840385cf7bb7474bd60d135afef730a48f1" Jan 20 17:14:39 crc kubenswrapper[4558]: I0120 17:14:39.682926 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e899f6cc9b5ec4a4e564a21267746840385cf7bb7474bd60d135afef730a48f1"} err="failed to get container status \"e899f6cc9b5ec4a4e564a21267746840385cf7bb7474bd60d135afef730a48f1\": rpc error: code = NotFound desc = could not find container \"e899f6cc9b5ec4a4e564a21267746840385cf7bb7474bd60d135afef730a48f1\": container with ID starting with e899f6cc9b5ec4a4e564a21267746840385cf7bb7474bd60d135afef730a48f1 not found: ID does not exist" Jan 20 17:14:39 crc kubenswrapper[4558]: I0120 17:14:39.682949 4558 scope.go:117] "RemoveContainer" containerID="e3a6447255ffd9001e6ac6854d2220b6e97607a7e07591ed42dcadf0cde58933" Jan 20 17:14:39 crc kubenswrapper[4558]: I0120 17:14:39.683124 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e3a6447255ffd9001e6ac6854d2220b6e97607a7e07591ed42dcadf0cde58933"} err="failed to get container status \"e3a6447255ffd9001e6ac6854d2220b6e97607a7e07591ed42dcadf0cde58933\": rpc error: code = NotFound desc = could not find container \"e3a6447255ffd9001e6ac6854d2220b6e97607a7e07591ed42dcadf0cde58933\": container with ID starting with e3a6447255ffd9001e6ac6854d2220b6e97607a7e07591ed42dcadf0cde58933 not found: ID does not exist" Jan 20 17:14:39 crc kubenswrapper[4558]: I0120 17:14:39.683144 4558 scope.go:117] "RemoveContainer" containerID="7c0819b9b637338e1da79f8e2bfa8d5ca87e4c444ed30a4a5130b7d3785d1b24" Jan 20 17:14:39 crc kubenswrapper[4558]: I0120 17:14:39.715628 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:14:39 crc kubenswrapper[4558]: I0120 17:14:39.717778 4558 scope.go:117] "RemoveContainer" containerID="a1f5b2bb1811a8e501161a3d7ad7f4e65976deef70ca7fcc27658bc2cbe72cd5" Jan 20 17:14:39 crc kubenswrapper[4558]: I0120 17:14:39.770029 4558 scope.go:117] "RemoveContainer" containerID="7c0819b9b637338e1da79f8e2bfa8d5ca87e4c444ed30a4a5130b7d3785d1b24" Jan 20 17:14:39 crc kubenswrapper[4558]: E0120 17:14:39.770511 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7c0819b9b637338e1da79f8e2bfa8d5ca87e4c444ed30a4a5130b7d3785d1b24\": container with ID starting with 7c0819b9b637338e1da79f8e2bfa8d5ca87e4c444ed30a4a5130b7d3785d1b24 not found: ID does not exist" containerID="7c0819b9b637338e1da79f8e2bfa8d5ca87e4c444ed30a4a5130b7d3785d1b24" Jan 20 17:14:39 crc kubenswrapper[4558]: I0120 17:14:39.770546 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7c0819b9b637338e1da79f8e2bfa8d5ca87e4c444ed30a4a5130b7d3785d1b24"} err="failed to get container status \"7c0819b9b637338e1da79f8e2bfa8d5ca87e4c444ed30a4a5130b7d3785d1b24\": rpc error: code = NotFound desc = could not find container \"7c0819b9b637338e1da79f8e2bfa8d5ca87e4c444ed30a4a5130b7d3785d1b24\": container with ID starting with 7c0819b9b637338e1da79f8e2bfa8d5ca87e4c444ed30a4a5130b7d3785d1b24 not found: ID does not exist" Jan 20 17:14:39 crc kubenswrapper[4558]: I0120 17:14:39.770568 4558 scope.go:117] "RemoveContainer" containerID="a1f5b2bb1811a8e501161a3d7ad7f4e65976deef70ca7fcc27658bc2cbe72cd5" Jan 20 17:14:39 crc kubenswrapper[4558]: E0120 17:14:39.774247 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a1f5b2bb1811a8e501161a3d7ad7f4e65976deef70ca7fcc27658bc2cbe72cd5\": container with ID starting with a1f5b2bb1811a8e501161a3d7ad7f4e65976deef70ca7fcc27658bc2cbe72cd5 not found: ID does not exist" containerID="a1f5b2bb1811a8e501161a3d7ad7f4e65976deef70ca7fcc27658bc2cbe72cd5" Jan 20 17:14:39 crc kubenswrapper[4558]: I0120 17:14:39.774284 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a1f5b2bb1811a8e501161a3d7ad7f4e65976deef70ca7fcc27658bc2cbe72cd5"} err="failed to get container status \"a1f5b2bb1811a8e501161a3d7ad7f4e65976deef70ca7fcc27658bc2cbe72cd5\": rpc error: code = NotFound desc = could not find container \"a1f5b2bb1811a8e501161a3d7ad7f4e65976deef70ca7fcc27658bc2cbe72cd5\": container with ID starting with a1f5b2bb1811a8e501161a3d7ad7f4e65976deef70ca7fcc27658bc2cbe72cd5 not found: ID does not exist" Jan 20 17:14:39 crc kubenswrapper[4558]: I0120 17:14:39.774299 4558 scope.go:117] "RemoveContainer" containerID="7c0819b9b637338e1da79f8e2bfa8d5ca87e4c444ed30a4a5130b7d3785d1b24" Jan 20 17:14:39 crc kubenswrapper[4558]: I0120 17:14:39.774357 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:14:39 crc kubenswrapper[4558]: I0120 17:14:39.778112 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7c0819b9b637338e1da79f8e2bfa8d5ca87e4c444ed30a4a5130b7d3785d1b24"} err="failed to get container status \"7c0819b9b637338e1da79f8e2bfa8d5ca87e4c444ed30a4a5130b7d3785d1b24\": rpc error: code = NotFound desc = could not find container \"7c0819b9b637338e1da79f8e2bfa8d5ca87e4c444ed30a4a5130b7d3785d1b24\": container with ID starting with 7c0819b9b637338e1da79f8e2bfa8d5ca87e4c444ed30a4a5130b7d3785d1b24 not found: ID does not exist" Jan 20 17:14:39 crc kubenswrapper[4558]: I0120 17:14:39.778174 4558 scope.go:117] "RemoveContainer" containerID="a1f5b2bb1811a8e501161a3d7ad7f4e65976deef70ca7fcc27658bc2cbe72cd5" Jan 20 17:14:39 crc kubenswrapper[4558]: I0120 17:14:39.778735 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a1f5b2bb1811a8e501161a3d7ad7f4e65976deef70ca7fcc27658bc2cbe72cd5"} err="failed to get container status \"a1f5b2bb1811a8e501161a3d7ad7f4e65976deef70ca7fcc27658bc2cbe72cd5\": rpc error: code = NotFound desc = could not find container \"a1f5b2bb1811a8e501161a3d7ad7f4e65976deef70ca7fcc27658bc2cbe72cd5\": container with ID starting with a1f5b2bb1811a8e501161a3d7ad7f4e65976deef70ca7fcc27658bc2cbe72cd5 not found: ID does not exist" Jan 20 17:14:39 crc kubenswrapper[4558]: I0120 17:14:39.785985 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:14:39 crc kubenswrapper[4558]: I0120 17:14:39.794586 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:14:39 crc kubenswrapper[4558]: E0120 17:14:39.795076 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="40f1774e-3159-4036-9016-2dfdb11c3a73" containerName="nova-metadata-metadata" Jan 20 17:14:39 crc kubenswrapper[4558]: I0120 17:14:39.795098 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="40f1774e-3159-4036-9016-2dfdb11c3a73" containerName="nova-metadata-metadata" Jan 20 17:14:39 crc kubenswrapper[4558]: E0120 17:14:39.795108 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ded19e77-da51-4081-b20f-03f986cc412b" containerName="nova-api-api" Jan 20 17:14:39 crc kubenswrapper[4558]: I0120 17:14:39.795116 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ded19e77-da51-4081-b20f-03f986cc412b" containerName="nova-api-api" Jan 20 17:14:39 crc kubenswrapper[4558]: E0120 17:14:39.795139 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ded19e77-da51-4081-b20f-03f986cc412b" containerName="nova-api-log" Jan 20 17:14:39 crc kubenswrapper[4558]: I0120 17:14:39.795145 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ded19e77-da51-4081-b20f-03f986cc412b" containerName="nova-api-log" Jan 20 17:14:39 crc kubenswrapper[4558]: E0120 17:14:39.795192 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="40f1774e-3159-4036-9016-2dfdb11c3a73" containerName="nova-metadata-log" Jan 20 17:14:39 crc kubenswrapper[4558]: I0120 17:14:39.795222 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="40f1774e-3159-4036-9016-2dfdb11c3a73" containerName="nova-metadata-log" Jan 20 17:14:39 crc kubenswrapper[4558]: I0120 17:14:39.795546 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="ded19e77-da51-4081-b20f-03f986cc412b" containerName="nova-api-log" Jan 20 17:14:39 crc kubenswrapper[4558]: I0120 17:14:39.795568 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="40f1774e-3159-4036-9016-2dfdb11c3a73" containerName="nova-metadata-log" Jan 20 17:14:39 crc kubenswrapper[4558]: I0120 17:14:39.795582 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="ded19e77-da51-4081-b20f-03f986cc412b" containerName="nova-api-api" Jan 20 17:14:39 crc kubenswrapper[4558]: I0120 17:14:39.795590 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="40f1774e-3159-4036-9016-2dfdb11c3a73" containerName="nova-metadata-metadata" Jan 20 17:14:39 crc kubenswrapper[4558]: I0120 17:14:39.796814 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:14:39 crc kubenswrapper[4558]: I0120 17:14:39.800007 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-api-config-data" Jan 20 17:14:39 crc kubenswrapper[4558]: I0120 17:14:39.801083 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:14:39 crc kubenswrapper[4558]: I0120 17:14:39.806691 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:14:39 crc kubenswrapper[4558]: I0120 17:14:39.808457 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:14:39 crc kubenswrapper[4558]: I0120 17:14:39.812514 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-metadata-config-data" Jan 20 17:14:39 crc kubenswrapper[4558]: I0120 17:14:39.813327 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-metadata-internal-svc" Jan 20 17:14:39 crc kubenswrapper[4558]: I0120 17:14:39.821843 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:14:39 crc kubenswrapper[4558]: I0120 17:14:39.982880 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ndqp4\" (UniqueName: \"kubernetes.io/projected/5f2954ae-ca63-4cbe-963c-5721cdc086ed-kube-api-access-ndqp4\") pod \"nova-metadata-0\" (UID: \"5f2954ae-ca63-4cbe-963c-5721cdc086ed\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:14:39 crc kubenswrapper[4558]: I0120 17:14:39.983295 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f2954ae-ca63-4cbe-963c-5721cdc086ed-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"5f2954ae-ca63-4cbe-963c-5721cdc086ed\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:14:39 crc kubenswrapper[4558]: I0120 17:14:39.983332 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/412b9dee-e1e1-464b-9e5b-333df0634be7-logs\") pod \"nova-api-0\" (UID: \"412b9dee-e1e1-464b-9e5b-333df0634be7\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:14:39 crc kubenswrapper[4558]: I0120 17:14:39.983379 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5f2954ae-ca63-4cbe-963c-5721cdc086ed-config-data\") pod \"nova-metadata-0\" (UID: \"5f2954ae-ca63-4cbe-963c-5721cdc086ed\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:14:39 crc kubenswrapper[4558]: I0120 17:14:39.983777 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q72z4\" (UniqueName: \"kubernetes.io/projected/412b9dee-e1e1-464b-9e5b-333df0634be7-kube-api-access-q72z4\") pod \"nova-api-0\" (UID: \"412b9dee-e1e1-464b-9e5b-333df0634be7\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:14:39 crc kubenswrapper[4558]: I0120 17:14:39.983864 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/412b9dee-e1e1-464b-9e5b-333df0634be7-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"412b9dee-e1e1-464b-9e5b-333df0634be7\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:14:39 crc kubenswrapper[4558]: I0120 17:14:39.984082 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/412b9dee-e1e1-464b-9e5b-333df0634be7-config-data\") pod \"nova-api-0\" (UID: \"412b9dee-e1e1-464b-9e5b-333df0634be7\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:14:39 crc kubenswrapper[4558]: I0120 17:14:39.984341 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5f2954ae-ca63-4cbe-963c-5721cdc086ed-logs\") pod \"nova-metadata-0\" (UID: \"5f2954ae-ca63-4cbe-963c-5721cdc086ed\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:14:39 crc kubenswrapper[4558]: I0120 17:14:39.984375 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/5f2954ae-ca63-4cbe-963c-5721cdc086ed-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"5f2954ae-ca63-4cbe-963c-5721cdc086ed\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:14:40 crc kubenswrapper[4558]: I0120 17:14:40.086656 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/5f2954ae-ca63-4cbe-963c-5721cdc086ed-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"5f2954ae-ca63-4cbe-963c-5721cdc086ed\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:14:40 crc kubenswrapper[4558]: I0120 17:14:40.086700 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5f2954ae-ca63-4cbe-963c-5721cdc086ed-logs\") pod \"nova-metadata-0\" (UID: \"5f2954ae-ca63-4cbe-963c-5721cdc086ed\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:14:40 crc kubenswrapper[4558]: I0120 17:14:40.086782 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ndqp4\" (UniqueName: \"kubernetes.io/projected/5f2954ae-ca63-4cbe-963c-5721cdc086ed-kube-api-access-ndqp4\") pod \"nova-metadata-0\" (UID: \"5f2954ae-ca63-4cbe-963c-5721cdc086ed\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:14:40 crc kubenswrapper[4558]: I0120 17:14:40.086844 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f2954ae-ca63-4cbe-963c-5721cdc086ed-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"5f2954ae-ca63-4cbe-963c-5721cdc086ed\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:14:40 crc kubenswrapper[4558]: I0120 17:14:40.086870 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/412b9dee-e1e1-464b-9e5b-333df0634be7-logs\") pod \"nova-api-0\" (UID: \"412b9dee-e1e1-464b-9e5b-333df0634be7\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:14:40 crc kubenswrapper[4558]: I0120 17:14:40.086899 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5f2954ae-ca63-4cbe-963c-5721cdc086ed-config-data\") pod \"nova-metadata-0\" (UID: \"5f2954ae-ca63-4cbe-963c-5721cdc086ed\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:14:40 crc kubenswrapper[4558]: I0120 17:14:40.086924 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q72z4\" (UniqueName: \"kubernetes.io/projected/412b9dee-e1e1-464b-9e5b-333df0634be7-kube-api-access-q72z4\") pod \"nova-api-0\" (UID: \"412b9dee-e1e1-464b-9e5b-333df0634be7\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:14:40 crc kubenswrapper[4558]: I0120 17:14:40.086946 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/412b9dee-e1e1-464b-9e5b-333df0634be7-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"412b9dee-e1e1-464b-9e5b-333df0634be7\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:14:40 crc kubenswrapper[4558]: I0120 17:14:40.087005 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/412b9dee-e1e1-464b-9e5b-333df0634be7-config-data\") pod \"nova-api-0\" (UID: \"412b9dee-e1e1-464b-9e5b-333df0634be7\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:14:40 crc kubenswrapper[4558]: I0120 17:14:40.087410 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5f2954ae-ca63-4cbe-963c-5721cdc086ed-logs\") pod \"nova-metadata-0\" (UID: \"5f2954ae-ca63-4cbe-963c-5721cdc086ed\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:14:40 crc kubenswrapper[4558]: I0120 17:14:40.087543 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/412b9dee-e1e1-464b-9e5b-333df0634be7-logs\") pod \"nova-api-0\" (UID: \"412b9dee-e1e1-464b-9e5b-333df0634be7\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:14:40 crc kubenswrapper[4558]: I0120 17:14:40.093452 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f2954ae-ca63-4cbe-963c-5721cdc086ed-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"5f2954ae-ca63-4cbe-963c-5721cdc086ed\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:14:40 crc kubenswrapper[4558]: I0120 17:14:40.093844 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/412b9dee-e1e1-464b-9e5b-333df0634be7-config-data\") pod \"nova-api-0\" (UID: \"412b9dee-e1e1-464b-9e5b-333df0634be7\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:14:40 crc kubenswrapper[4558]: I0120 17:14:40.094455 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5f2954ae-ca63-4cbe-963c-5721cdc086ed-config-data\") pod \"nova-metadata-0\" (UID: \"5f2954ae-ca63-4cbe-963c-5721cdc086ed\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:14:40 crc kubenswrapper[4558]: I0120 17:14:40.098526 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/5f2954ae-ca63-4cbe-963c-5721cdc086ed-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"5f2954ae-ca63-4cbe-963c-5721cdc086ed\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:14:40 crc kubenswrapper[4558]: I0120 17:14:40.102225 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/412b9dee-e1e1-464b-9e5b-333df0634be7-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"412b9dee-e1e1-464b-9e5b-333df0634be7\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:14:40 crc kubenswrapper[4558]: I0120 17:14:40.103490 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ndqp4\" (UniqueName: \"kubernetes.io/projected/5f2954ae-ca63-4cbe-963c-5721cdc086ed-kube-api-access-ndqp4\") pod \"nova-metadata-0\" (UID: \"5f2954ae-ca63-4cbe-963c-5721cdc086ed\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:14:40 crc kubenswrapper[4558]: I0120 17:14:40.104913 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q72z4\" (UniqueName: \"kubernetes.io/projected/412b9dee-e1e1-464b-9e5b-333df0634be7-kube-api-access-q72z4\") pod \"nova-api-0\" (UID: \"412b9dee-e1e1-464b-9e5b-333df0634be7\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:14:40 crc kubenswrapper[4558]: I0120 17:14:40.117093 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:14:40 crc kubenswrapper[4558]: I0120 17:14:40.132189 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:14:40 crc kubenswrapper[4558]: I0120 17:14:40.577224 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="40f1774e-3159-4036-9016-2dfdb11c3a73" path="/var/lib/kubelet/pods/40f1774e-3159-4036-9016-2dfdb11c3a73/volumes" Jan 20 17:14:40 crc kubenswrapper[4558]: I0120 17:14:40.578088 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ded19e77-da51-4081-b20f-03f986cc412b" path="/var/lib/kubelet/pods/ded19e77-da51-4081-b20f-03f986cc412b/volumes" Jan 20 17:14:40 crc kubenswrapper[4558]: I0120 17:14:40.602831 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:14:40 crc kubenswrapper[4558]: W0120 17:14:40.610501 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod412b9dee_e1e1_464b_9e5b_333df0634be7.slice/crio-a42c63492608dd76d3afa174b223df2cce20856e1ea2f06bc0ac3b98ebdaea3a WatchSource:0}: Error finding container a42c63492608dd76d3afa174b223df2cce20856e1ea2f06bc0ac3b98ebdaea3a: Status 404 returned error can't find the container with id a42c63492608dd76d3afa174b223df2cce20856e1ea2f06bc0ac3b98ebdaea3a Jan 20 17:14:40 crc kubenswrapper[4558]: I0120 17:14:40.641868 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"9b48e62d-af6d-4eb9-b491-766f773de980","Type":"ContainerStarted","Data":"7c73bd509e0c42d2ec4674301759d5ad520f1bb21ab0ec927d4c4aafc592d56c"} Jan 20 17:14:40 crc kubenswrapper[4558]: I0120 17:14:40.642911 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"412b9dee-e1e1-464b-9e5b-333df0634be7","Type":"ContainerStarted","Data":"a42c63492608dd76d3afa174b223df2cce20856e1ea2f06bc0ac3b98ebdaea3a"} Jan 20 17:14:40 crc kubenswrapper[4558]: I0120 17:14:40.685834 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:14:40 crc kubenswrapper[4558]: W0120 17:14:40.698195 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5f2954ae_ca63_4cbe_963c_5721cdc086ed.slice/crio-1d815015bca759aa4b5f579f3d486ab04c2f5879bedd5051ac62b96ca4a0bb64 WatchSource:0}: Error finding container 1d815015bca759aa4b5f579f3d486ab04c2f5879bedd5051ac62b96ca4a0bb64: Status 404 returned error can't find the container with id 1d815015bca759aa4b5f579f3d486ab04c2f5879bedd5051ac62b96ca4a0bb64 Jan 20 17:14:40 crc kubenswrapper[4558]: I0120 17:14:40.807249 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:14:41 crc kubenswrapper[4558]: I0120 17:14:41.661078 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"5f2954ae-ca63-4cbe-963c-5721cdc086ed","Type":"ContainerStarted","Data":"8f366bcfc90f3917e32fc1bad9f262fda1ff8f03750cd1c1697bf5061796443a"} Jan 20 17:14:41 crc kubenswrapper[4558]: I0120 17:14:41.661600 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"5f2954ae-ca63-4cbe-963c-5721cdc086ed","Type":"ContainerStarted","Data":"35674014ac0646c142bd738942450c47ca74143d05b633cb0fefd65cee0a1166"} Jan 20 17:14:41 crc kubenswrapper[4558]: I0120 17:14:41.661618 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"5f2954ae-ca63-4cbe-963c-5721cdc086ed","Type":"ContainerStarted","Data":"1d815015bca759aa4b5f579f3d486ab04c2f5879bedd5051ac62b96ca4a0bb64"} Jan 20 17:14:41 crc kubenswrapper[4558]: I0120 17:14:41.665212 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"9b48e62d-af6d-4eb9-b491-766f773de980","Type":"ContainerStarted","Data":"bf15066af68c4f42d89667ee3f7871a7e42a87e7e5fe77fc9e8a69ddd83cf2f7"} Jan 20 17:14:41 crc kubenswrapper[4558]: I0120 17:14:41.665402 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:14:41 crc kubenswrapper[4558]: I0120 17:14:41.672881 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"412b9dee-e1e1-464b-9e5b-333df0634be7","Type":"ContainerStarted","Data":"53838079aa17a6463e7a401184003e8a0a06c177b316076f685f658cba88e77d"} Jan 20 17:14:41 crc kubenswrapper[4558]: I0120 17:14:41.672954 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"412b9dee-e1e1-464b-9e5b-333df0634be7","Type":"ContainerStarted","Data":"3dd89ea514584d95be106b0fd566aeb1b002ced4afaafb5adcdd9417440875bc"} Jan 20 17:14:41 crc kubenswrapper[4558]: I0120 17:14:41.688919 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-metadata-0" podStartSLOduration=2.6889020439999998 podStartE2EDuration="2.688902044s" podCreationTimestamp="2026-01-20 17:14:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:14:41.683292135 +0000 UTC m=+1975.443630102" watchObservedRunningTime="2026-01-20 17:14:41.688902044 +0000 UTC m=+1975.449240012" Jan 20 17:14:41 crc kubenswrapper[4558]: I0120 17:14:41.705874 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-api-0" podStartSLOduration=2.705864381 podStartE2EDuration="2.705864381s" podCreationTimestamp="2026-01-20 17:14:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:14:41.698503951 +0000 UTC m=+1975.458841918" watchObservedRunningTime="2026-01-20 17:14:41.705864381 +0000 UTC m=+1975.466202349" Jan 20 17:14:41 crc kubenswrapper[4558]: I0120 17:14:41.721449 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ceilometer-0" podStartSLOduration=1.943642281 podStartE2EDuration="5.721442965s" podCreationTimestamp="2026-01-20 17:14:36 +0000 UTC" firstStartedPulling="2026-01-20 17:14:37.660406823 +0000 UTC m=+1971.420744790" lastFinishedPulling="2026-01-20 17:14:41.438207507 +0000 UTC m=+1975.198545474" observedRunningTime="2026-01-20 17:14:41.717737377 +0000 UTC m=+1975.478075344" watchObservedRunningTime="2026-01-20 17:14:41.721442965 +0000 UTC m=+1975.481780933" Jan 20 17:14:45 crc kubenswrapper[4558]: I0120 17:14:45.133782 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:14:45 crc kubenswrapper[4558]: I0120 17:14:45.134702 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:14:45 crc kubenswrapper[4558]: I0120 17:14:45.808627 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:14:45 crc kubenswrapper[4558]: I0120 17:14:45.827269 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:14:45 crc kubenswrapper[4558]: I0120 17:14:45.828286 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:14:46 crc kubenswrapper[4558]: I0120 17:14:46.752667 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:14:46 crc kubenswrapper[4558]: I0120 17:14:46.929764 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell1-cell-mapping-8qtw4"] Jan 20 17:14:46 crc kubenswrapper[4558]: I0120 17:14:46.931238 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-cell-mapping-8qtw4" Jan 20 17:14:46 crc kubenswrapper[4558]: I0120 17:14:46.934957 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell1-manage-config-data" Jan 20 17:14:46 crc kubenswrapper[4558]: I0120 17:14:46.935599 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell1-manage-scripts" Jan 20 17:14:46 crc kubenswrapper[4558]: I0120 17:14:46.942072 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-cell-mapping-8qtw4"] Jan 20 17:14:46 crc kubenswrapper[4558]: I0120 17:14:46.942715 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7869482b-42ff-4c97-84b3-3f9f84ba29f5-scripts\") pod \"nova-cell1-cell-mapping-8qtw4\" (UID: \"7869482b-42ff-4c97-84b3-3f9f84ba29f5\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-8qtw4" Jan 20 17:14:46 crc kubenswrapper[4558]: I0120 17:14:46.942759 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7869482b-42ff-4c97-84b3-3f9f84ba29f5-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-8qtw4\" (UID: \"7869482b-42ff-4c97-84b3-3f9f84ba29f5\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-8qtw4" Jan 20 17:14:46 crc kubenswrapper[4558]: I0120 17:14:46.942835 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2pslm\" (UniqueName: \"kubernetes.io/projected/7869482b-42ff-4c97-84b3-3f9f84ba29f5-kube-api-access-2pslm\") pod \"nova-cell1-cell-mapping-8qtw4\" (UID: \"7869482b-42ff-4c97-84b3-3f9f84ba29f5\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-8qtw4" Jan 20 17:14:46 crc kubenswrapper[4558]: I0120 17:14:46.942974 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7869482b-42ff-4c97-84b3-3f9f84ba29f5-config-data\") pod \"nova-cell1-cell-mapping-8qtw4\" (UID: \"7869482b-42ff-4c97-84b3-3f9f84ba29f5\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-8qtw4" Jan 20 17:14:47 crc kubenswrapper[4558]: I0120 17:14:47.044986 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/watcher-decision-engine-0" Jan 20 17:14:47 crc kubenswrapper[4558]: I0120 17:14:47.046281 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7869482b-42ff-4c97-84b3-3f9f84ba29f5-config-data\") pod \"nova-cell1-cell-mapping-8qtw4\" (UID: \"7869482b-42ff-4c97-84b3-3f9f84ba29f5\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-8qtw4" Jan 20 17:14:47 crc kubenswrapper[4558]: I0120 17:14:47.046415 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7869482b-42ff-4c97-84b3-3f9f84ba29f5-scripts\") pod \"nova-cell1-cell-mapping-8qtw4\" (UID: \"7869482b-42ff-4c97-84b3-3f9f84ba29f5\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-8qtw4" Jan 20 17:14:47 crc kubenswrapper[4558]: I0120 17:14:47.046461 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7869482b-42ff-4c97-84b3-3f9f84ba29f5-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-8qtw4\" (UID: \"7869482b-42ff-4c97-84b3-3f9f84ba29f5\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-8qtw4" Jan 20 17:14:47 crc kubenswrapper[4558]: I0120 17:14:47.046488 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2pslm\" (UniqueName: \"kubernetes.io/projected/7869482b-42ff-4c97-84b3-3f9f84ba29f5-kube-api-access-2pslm\") pod \"nova-cell1-cell-mapping-8qtw4\" (UID: \"7869482b-42ff-4c97-84b3-3f9f84ba29f5\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-8qtw4" Jan 20 17:14:47 crc kubenswrapper[4558]: I0120 17:14:47.073565 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/watcher-decision-engine-0" Jan 20 17:14:47 crc kubenswrapper[4558]: I0120 17:14:47.087763 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2pslm\" (UniqueName: \"kubernetes.io/projected/7869482b-42ff-4c97-84b3-3f9f84ba29f5-kube-api-access-2pslm\") pod \"nova-cell1-cell-mapping-8qtw4\" (UID: \"7869482b-42ff-4c97-84b3-3f9f84ba29f5\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-8qtw4" Jan 20 17:14:47 crc kubenswrapper[4558]: I0120 17:14:47.087796 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7869482b-42ff-4c97-84b3-3f9f84ba29f5-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-8qtw4\" (UID: \"7869482b-42ff-4c97-84b3-3f9f84ba29f5\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-8qtw4" Jan 20 17:14:47 crc kubenswrapper[4558]: I0120 17:14:47.088473 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7869482b-42ff-4c97-84b3-3f9f84ba29f5-scripts\") pod \"nova-cell1-cell-mapping-8qtw4\" (UID: \"7869482b-42ff-4c97-84b3-3f9f84ba29f5\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-8qtw4" Jan 20 17:14:47 crc kubenswrapper[4558]: I0120 17:14:47.088905 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7869482b-42ff-4c97-84b3-3f9f84ba29f5-config-data\") pod \"nova-cell1-cell-mapping-8qtw4\" (UID: \"7869482b-42ff-4c97-84b3-3f9f84ba29f5\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-8qtw4" Jan 20 17:14:47 crc kubenswrapper[4558]: I0120 17:14:47.255989 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-cell-mapping-8qtw4" Jan 20 17:14:47 crc kubenswrapper[4558]: I0120 17:14:47.684604 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-cell-mapping-8qtw4"] Jan 20 17:14:47 crc kubenswrapper[4558]: I0120 17:14:47.747763 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-cell-mapping-8qtw4" event={"ID":"7869482b-42ff-4c97-84b3-3f9f84ba29f5","Type":"ContainerStarted","Data":"142a583e814e644eb7bca6a323c511e7fc08fb1d031e11dcfeb51b14c66f666d"} Jan 20 17:14:47 crc kubenswrapper[4558]: I0120 17:14:47.748260 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/watcher-decision-engine-0" Jan 20 17:14:47 crc kubenswrapper[4558]: I0120 17:14:47.782144 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/watcher-decision-engine-0" Jan 20 17:14:48 crc kubenswrapper[4558]: I0120 17:14:48.759451 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-cell-mapping-8qtw4" event={"ID":"7869482b-42ff-4c97-84b3-3f9f84ba29f5","Type":"ContainerStarted","Data":"2922ddc4410807395574c4bcc5718acbb05a0ff484308d0ea59126f2251e8de3"} Jan 20 17:14:48 crc kubenswrapper[4558]: I0120 17:14:48.781856 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell1-cell-mapping-8qtw4" podStartSLOduration=2.781836772 podStartE2EDuration="2.781836772s" podCreationTimestamp="2026-01-20 17:14:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:14:48.772230668 +0000 UTC m=+1982.532568635" watchObservedRunningTime="2026-01-20 17:14:48.781836772 +0000 UTC m=+1982.542174739" Jan 20 17:14:50 crc kubenswrapper[4558]: I0120 17:14:50.118108 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:14:50 crc kubenswrapper[4558]: I0120 17:14:50.118195 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:14:50 crc kubenswrapper[4558]: I0120 17:14:50.132993 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:14:50 crc kubenswrapper[4558]: I0120 17:14:50.133062 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:14:51 crc kubenswrapper[4558]: I0120 17:14:51.213374 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-metadata-0" podUID="5f2954ae-ca63-4cbe-963c-5721cdc086ed" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.93:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 20 17:14:51 crc kubenswrapper[4558]: I0120 17:14:51.213597 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-api-0" podUID="412b9dee-e1e1-464b-9e5b-333df0634be7" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.91:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 20 17:14:51 crc kubenswrapper[4558]: I0120 17:14:51.213621 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-metadata-0" podUID="5f2954ae-ca63-4cbe-963c-5721cdc086ed" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.93:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 20 17:14:51 crc kubenswrapper[4558]: I0120 17:14:51.213615 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-api-0" podUID="412b9dee-e1e1-464b-9e5b-333df0634be7" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.91:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 20 17:14:52 crc kubenswrapper[4558]: I0120 17:14:52.799046 4558 generic.go:334] "Generic (PLEG): container finished" podID="7869482b-42ff-4c97-84b3-3f9f84ba29f5" containerID="2922ddc4410807395574c4bcc5718acbb05a0ff484308d0ea59126f2251e8de3" exitCode=0 Jan 20 17:14:52 crc kubenswrapper[4558]: I0120 17:14:52.799114 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-cell-mapping-8qtw4" event={"ID":"7869482b-42ff-4c97-84b3-3f9f84ba29f5","Type":"ContainerDied","Data":"2922ddc4410807395574c4bcc5718acbb05a0ff484308d0ea59126f2251e8de3"} Jan 20 17:14:54 crc kubenswrapper[4558]: I0120 17:14:54.127483 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-cell-mapping-8qtw4" Jan 20 17:14:54 crc kubenswrapper[4558]: I0120 17:14:54.312291 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2pslm\" (UniqueName: \"kubernetes.io/projected/7869482b-42ff-4c97-84b3-3f9f84ba29f5-kube-api-access-2pslm\") pod \"7869482b-42ff-4c97-84b3-3f9f84ba29f5\" (UID: \"7869482b-42ff-4c97-84b3-3f9f84ba29f5\") " Jan 20 17:14:54 crc kubenswrapper[4558]: I0120 17:14:54.312388 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7869482b-42ff-4c97-84b3-3f9f84ba29f5-config-data\") pod \"7869482b-42ff-4c97-84b3-3f9f84ba29f5\" (UID: \"7869482b-42ff-4c97-84b3-3f9f84ba29f5\") " Jan 20 17:14:54 crc kubenswrapper[4558]: I0120 17:14:54.312495 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7869482b-42ff-4c97-84b3-3f9f84ba29f5-scripts\") pod \"7869482b-42ff-4c97-84b3-3f9f84ba29f5\" (UID: \"7869482b-42ff-4c97-84b3-3f9f84ba29f5\") " Jan 20 17:14:54 crc kubenswrapper[4558]: I0120 17:14:54.312569 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7869482b-42ff-4c97-84b3-3f9f84ba29f5-combined-ca-bundle\") pod \"7869482b-42ff-4c97-84b3-3f9f84ba29f5\" (UID: \"7869482b-42ff-4c97-84b3-3f9f84ba29f5\") " Jan 20 17:14:54 crc kubenswrapper[4558]: I0120 17:14:54.320467 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7869482b-42ff-4c97-84b3-3f9f84ba29f5-kube-api-access-2pslm" (OuterVolumeSpecName: "kube-api-access-2pslm") pod "7869482b-42ff-4c97-84b3-3f9f84ba29f5" (UID: "7869482b-42ff-4c97-84b3-3f9f84ba29f5"). InnerVolumeSpecName "kube-api-access-2pslm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:14:54 crc kubenswrapper[4558]: I0120 17:14:54.320665 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7869482b-42ff-4c97-84b3-3f9f84ba29f5-scripts" (OuterVolumeSpecName: "scripts") pod "7869482b-42ff-4c97-84b3-3f9f84ba29f5" (UID: "7869482b-42ff-4c97-84b3-3f9f84ba29f5"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:14:54 crc kubenswrapper[4558]: I0120 17:14:54.339763 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7869482b-42ff-4c97-84b3-3f9f84ba29f5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7869482b-42ff-4c97-84b3-3f9f84ba29f5" (UID: "7869482b-42ff-4c97-84b3-3f9f84ba29f5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:14:54 crc kubenswrapper[4558]: I0120 17:14:54.341747 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7869482b-42ff-4c97-84b3-3f9f84ba29f5-config-data" (OuterVolumeSpecName: "config-data") pod "7869482b-42ff-4c97-84b3-3f9f84ba29f5" (UID: "7869482b-42ff-4c97-84b3-3f9f84ba29f5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:14:54 crc kubenswrapper[4558]: I0120 17:14:54.415933 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2pslm\" (UniqueName: \"kubernetes.io/projected/7869482b-42ff-4c97-84b3-3f9f84ba29f5-kube-api-access-2pslm\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:54 crc kubenswrapper[4558]: I0120 17:14:54.415978 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7869482b-42ff-4c97-84b3-3f9f84ba29f5-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:54 crc kubenswrapper[4558]: I0120 17:14:54.415990 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7869482b-42ff-4c97-84b3-3f9f84ba29f5-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:54 crc kubenswrapper[4558]: I0120 17:14:54.416001 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7869482b-42ff-4c97-84b3-3f9f84ba29f5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:54 crc kubenswrapper[4558]: I0120 17:14:54.825080 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-cell-mapping-8qtw4" event={"ID":"7869482b-42ff-4c97-84b3-3f9f84ba29f5","Type":"ContainerDied","Data":"142a583e814e644eb7bca6a323c511e7fc08fb1d031e11dcfeb51b14c66f666d"} Jan 20 17:14:54 crc kubenswrapper[4558]: I0120 17:14:54.825138 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-cell-mapping-8qtw4" Jan 20 17:14:54 crc kubenswrapper[4558]: I0120 17:14:54.825149 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="142a583e814e644eb7bca6a323c511e7fc08fb1d031e11dcfeb51b14c66f666d" Jan 20 17:14:54 crc kubenswrapper[4558]: I0120 17:14:54.995482 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:14:54 crc kubenswrapper[4558]: I0120 17:14:54.995765 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-api-0" podUID="412b9dee-e1e1-464b-9e5b-333df0634be7" containerName="nova-api-log" containerID="cri-o://3dd89ea514584d95be106b0fd566aeb1b002ced4afaafb5adcdd9417440875bc" gracePeriod=30 Jan 20 17:14:54 crc kubenswrapper[4558]: I0120 17:14:54.995848 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-api-0" podUID="412b9dee-e1e1-464b-9e5b-333df0634be7" containerName="nova-api-api" containerID="cri-o://53838079aa17a6463e7a401184003e8a0a06c177b316076f685f658cba88e77d" gracePeriod=30 Jan 20 17:14:55 crc kubenswrapper[4558]: I0120 17:14:55.011367 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:14:55 crc kubenswrapper[4558]: I0120 17:14:55.011781 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-metadata-0" podUID="5f2954ae-ca63-4cbe-963c-5721cdc086ed" containerName="nova-metadata-metadata" containerID="cri-o://8f366bcfc90f3917e32fc1bad9f262fda1ff8f03750cd1c1697bf5061796443a" gracePeriod=30 Jan 20 17:14:55 crc kubenswrapper[4558]: I0120 17:14:55.011673 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-metadata-0" podUID="5f2954ae-ca63-4cbe-963c-5721cdc086ed" containerName="nova-metadata-log" containerID="cri-o://35674014ac0646c142bd738942450c47ca74143d05b633cb0fefd65cee0a1166" gracePeriod=30 Jan 20 17:14:55 crc kubenswrapper[4558]: I0120 17:14:55.838764 4558 generic.go:334] "Generic (PLEG): container finished" podID="5f2954ae-ca63-4cbe-963c-5721cdc086ed" containerID="35674014ac0646c142bd738942450c47ca74143d05b633cb0fefd65cee0a1166" exitCode=143 Jan 20 17:14:55 crc kubenswrapper[4558]: I0120 17:14:55.838868 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"5f2954ae-ca63-4cbe-963c-5721cdc086ed","Type":"ContainerDied","Data":"35674014ac0646c142bd738942450c47ca74143d05b633cb0fefd65cee0a1166"} Jan 20 17:14:55 crc kubenswrapper[4558]: I0120 17:14:55.841947 4558 generic.go:334] "Generic (PLEG): container finished" podID="412b9dee-e1e1-464b-9e5b-333df0634be7" containerID="3dd89ea514584d95be106b0fd566aeb1b002ced4afaafb5adcdd9417440875bc" exitCode=143 Jan 20 17:14:55 crc kubenswrapper[4558]: I0120 17:14:55.841989 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"412b9dee-e1e1-464b-9e5b-333df0634be7","Type":"ContainerDied","Data":"3dd89ea514584d95be106b0fd566aeb1b002ced4afaafb5adcdd9417440875bc"} Jan 20 17:14:57 crc kubenswrapper[4558]: I0120 17:14:57.329959 4558 patch_prober.go:28] interesting pod/machine-config-daemon-2vr4r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 20 17:14:57 crc kubenswrapper[4558]: I0120 17:14:57.330031 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 20 17:14:57 crc kubenswrapper[4558]: I0120 17:14:57.330084 4558 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" Jan 20 17:14:57 crc kubenswrapper[4558]: I0120 17:14:57.331045 4558 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"09cede35de046f760d5c008c35cdd13631d8660c06e04fb988751619769147c6"} pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 20 17:14:57 crc kubenswrapper[4558]: I0120 17:14:57.331112 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" containerID="cri-o://09cede35de046f760d5c008c35cdd13631d8660c06e04fb988751619769147c6" gracePeriod=600 Jan 20 17:14:57 crc kubenswrapper[4558]: I0120 17:14:57.865157 4558 generic.go:334] "Generic (PLEG): container finished" podID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerID="09cede35de046f760d5c008c35cdd13631d8660c06e04fb988751619769147c6" exitCode=0 Jan 20 17:14:57 crc kubenswrapper[4558]: I0120 17:14:57.865193 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" event={"ID":"68337d27-3fa6-4a29-88b0-82e60c3739eb","Type":"ContainerDied","Data":"09cede35de046f760d5c008c35cdd13631d8660c06e04fb988751619769147c6"} Jan 20 17:14:57 crc kubenswrapper[4558]: I0120 17:14:57.865500 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" event={"ID":"68337d27-3fa6-4a29-88b0-82e60c3739eb","Type":"ContainerStarted","Data":"0ffce44366a8b4466427b682fb41640425366a0f4449210959148de9aef695c6"} Jan 20 17:14:57 crc kubenswrapper[4558]: I0120 17:14:57.865525 4558 scope.go:117] "RemoveContainer" containerID="a711a286282347590079d2447ef37fa9ed0f11085d6d7a65f85e65c1c2df608f" Jan 20 17:14:58 crc kubenswrapper[4558]: I0120 17:14:58.593865 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:14:58 crc kubenswrapper[4558]: I0120 17:14:58.598803 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:14:58 crc kubenswrapper[4558]: I0120 17:14:58.602510 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5f2954ae-ca63-4cbe-963c-5721cdc086ed-logs\") pod \"5f2954ae-ca63-4cbe-963c-5721cdc086ed\" (UID: \"5f2954ae-ca63-4cbe-963c-5721cdc086ed\") " Jan 20 17:14:58 crc kubenswrapper[4558]: I0120 17:14:58.602703 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5f2954ae-ca63-4cbe-963c-5721cdc086ed-config-data\") pod \"5f2954ae-ca63-4cbe-963c-5721cdc086ed\" (UID: \"5f2954ae-ca63-4cbe-963c-5721cdc086ed\") " Jan 20 17:14:58 crc kubenswrapper[4558]: I0120 17:14:58.603062 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5f2954ae-ca63-4cbe-963c-5721cdc086ed-logs" (OuterVolumeSpecName: "logs") pod "5f2954ae-ca63-4cbe-963c-5721cdc086ed" (UID: "5f2954ae-ca63-4cbe-963c-5721cdc086ed"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:14:58 crc kubenswrapper[4558]: I0120 17:14:58.603721 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5f2954ae-ca63-4cbe-963c-5721cdc086ed-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:58 crc kubenswrapper[4558]: I0120 17:14:58.637746 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5f2954ae-ca63-4cbe-963c-5721cdc086ed-config-data" (OuterVolumeSpecName: "config-data") pod "5f2954ae-ca63-4cbe-963c-5721cdc086ed" (UID: "5f2954ae-ca63-4cbe-963c-5721cdc086ed"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:14:58 crc kubenswrapper[4558]: I0120 17:14:58.704378 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q72z4\" (UniqueName: \"kubernetes.io/projected/412b9dee-e1e1-464b-9e5b-333df0634be7-kube-api-access-q72z4\") pod \"412b9dee-e1e1-464b-9e5b-333df0634be7\" (UID: \"412b9dee-e1e1-464b-9e5b-333df0634be7\") " Jan 20 17:14:58 crc kubenswrapper[4558]: I0120 17:14:58.704553 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f2954ae-ca63-4cbe-963c-5721cdc086ed-combined-ca-bundle\") pod \"5f2954ae-ca63-4cbe-963c-5721cdc086ed\" (UID: \"5f2954ae-ca63-4cbe-963c-5721cdc086ed\") " Jan 20 17:14:58 crc kubenswrapper[4558]: I0120 17:14:58.704651 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ndqp4\" (UniqueName: \"kubernetes.io/projected/5f2954ae-ca63-4cbe-963c-5721cdc086ed-kube-api-access-ndqp4\") pod \"5f2954ae-ca63-4cbe-963c-5721cdc086ed\" (UID: \"5f2954ae-ca63-4cbe-963c-5721cdc086ed\") " Jan 20 17:14:58 crc kubenswrapper[4558]: I0120 17:14:58.704797 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/412b9dee-e1e1-464b-9e5b-333df0634be7-combined-ca-bundle\") pod \"412b9dee-e1e1-464b-9e5b-333df0634be7\" (UID: \"412b9dee-e1e1-464b-9e5b-333df0634be7\") " Jan 20 17:14:58 crc kubenswrapper[4558]: I0120 17:14:58.705055 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/412b9dee-e1e1-464b-9e5b-333df0634be7-logs\") pod \"412b9dee-e1e1-464b-9e5b-333df0634be7\" (UID: \"412b9dee-e1e1-464b-9e5b-333df0634be7\") " Jan 20 17:14:58 crc kubenswrapper[4558]: I0120 17:14:58.705136 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/412b9dee-e1e1-464b-9e5b-333df0634be7-config-data\") pod \"412b9dee-e1e1-464b-9e5b-333df0634be7\" (UID: \"412b9dee-e1e1-464b-9e5b-333df0634be7\") " Jan 20 17:14:58 crc kubenswrapper[4558]: I0120 17:14:58.705238 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/5f2954ae-ca63-4cbe-963c-5721cdc086ed-nova-metadata-tls-certs\") pod \"5f2954ae-ca63-4cbe-963c-5721cdc086ed\" (UID: \"5f2954ae-ca63-4cbe-963c-5721cdc086ed\") " Jan 20 17:14:58 crc kubenswrapper[4558]: I0120 17:14:58.705400 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/412b9dee-e1e1-464b-9e5b-333df0634be7-logs" (OuterVolumeSpecName: "logs") pod "412b9dee-e1e1-464b-9e5b-333df0634be7" (UID: "412b9dee-e1e1-464b-9e5b-333df0634be7"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:14:58 crc kubenswrapper[4558]: I0120 17:14:58.705889 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/412b9dee-e1e1-464b-9e5b-333df0634be7-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:58 crc kubenswrapper[4558]: I0120 17:14:58.705951 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5f2954ae-ca63-4cbe-963c-5721cdc086ed-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:58 crc kubenswrapper[4558]: I0120 17:14:58.707930 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/412b9dee-e1e1-464b-9e5b-333df0634be7-kube-api-access-q72z4" (OuterVolumeSpecName: "kube-api-access-q72z4") pod "412b9dee-e1e1-464b-9e5b-333df0634be7" (UID: "412b9dee-e1e1-464b-9e5b-333df0634be7"). InnerVolumeSpecName "kube-api-access-q72z4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:14:58 crc kubenswrapper[4558]: I0120 17:14:58.708404 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5f2954ae-ca63-4cbe-963c-5721cdc086ed-kube-api-access-ndqp4" (OuterVolumeSpecName: "kube-api-access-ndqp4") pod "5f2954ae-ca63-4cbe-963c-5721cdc086ed" (UID: "5f2954ae-ca63-4cbe-963c-5721cdc086ed"). InnerVolumeSpecName "kube-api-access-ndqp4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:14:58 crc kubenswrapper[4558]: E0120 17:14:58.723932 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/412b9dee-e1e1-464b-9e5b-333df0634be7-config-data podName:412b9dee-e1e1-464b-9e5b-333df0634be7 nodeName:}" failed. No retries permitted until 2026-01-20 17:14:59.223907488 +0000 UTC m=+1992.984245455 (durationBeforeRetry 500ms). Error: error cleaning subPath mounts for volume "config-data" (UniqueName: "kubernetes.io/secret/412b9dee-e1e1-464b-9e5b-333df0634be7-config-data") pod "412b9dee-e1e1-464b-9e5b-333df0634be7" (UID: "412b9dee-e1e1-464b-9e5b-333df0634be7") : error deleting /var/lib/kubelet/pods/412b9dee-e1e1-464b-9e5b-333df0634be7/volume-subpaths: remove /var/lib/kubelet/pods/412b9dee-e1e1-464b-9e5b-333df0634be7/volume-subpaths: no such file or directory Jan 20 17:14:58 crc kubenswrapper[4558]: I0120 17:14:58.724394 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5f2954ae-ca63-4cbe-963c-5721cdc086ed-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5f2954ae-ca63-4cbe-963c-5721cdc086ed" (UID: "5f2954ae-ca63-4cbe-963c-5721cdc086ed"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:14:58 crc kubenswrapper[4558]: I0120 17:14:58.726077 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/412b9dee-e1e1-464b-9e5b-333df0634be7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "412b9dee-e1e1-464b-9e5b-333df0634be7" (UID: "412b9dee-e1e1-464b-9e5b-333df0634be7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:14:58 crc kubenswrapper[4558]: I0120 17:14:58.739860 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5f2954ae-ca63-4cbe-963c-5721cdc086ed-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "5f2954ae-ca63-4cbe-963c-5721cdc086ed" (UID: "5f2954ae-ca63-4cbe-963c-5721cdc086ed"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:14:58 crc kubenswrapper[4558]: I0120 17:14:58.807770 4558 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/5f2954ae-ca63-4cbe-963c-5721cdc086ed-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:58 crc kubenswrapper[4558]: I0120 17:14:58.807802 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q72z4\" (UniqueName: \"kubernetes.io/projected/412b9dee-e1e1-464b-9e5b-333df0634be7-kube-api-access-q72z4\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:58 crc kubenswrapper[4558]: I0120 17:14:58.807814 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f2954ae-ca63-4cbe-963c-5721cdc086ed-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:58 crc kubenswrapper[4558]: I0120 17:14:58.807825 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ndqp4\" (UniqueName: \"kubernetes.io/projected/5f2954ae-ca63-4cbe-963c-5721cdc086ed-kube-api-access-ndqp4\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:58 crc kubenswrapper[4558]: I0120 17:14:58.807836 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/412b9dee-e1e1-464b-9e5b-333df0634be7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:58 crc kubenswrapper[4558]: I0120 17:14:58.884250 4558 generic.go:334] "Generic (PLEG): container finished" podID="412b9dee-e1e1-464b-9e5b-333df0634be7" containerID="53838079aa17a6463e7a401184003e8a0a06c177b316076f685f658cba88e77d" exitCode=0 Jan 20 17:14:58 crc kubenswrapper[4558]: I0120 17:14:58.884359 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"412b9dee-e1e1-464b-9e5b-333df0634be7","Type":"ContainerDied","Data":"53838079aa17a6463e7a401184003e8a0a06c177b316076f685f658cba88e77d"} Jan 20 17:14:58 crc kubenswrapper[4558]: I0120 17:14:58.884412 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"412b9dee-e1e1-464b-9e5b-333df0634be7","Type":"ContainerDied","Data":"a42c63492608dd76d3afa174b223df2cce20856e1ea2f06bc0ac3b98ebdaea3a"} Jan 20 17:14:58 crc kubenswrapper[4558]: I0120 17:14:58.884463 4558 scope.go:117] "RemoveContainer" containerID="53838079aa17a6463e7a401184003e8a0a06c177b316076f685f658cba88e77d" Jan 20 17:14:58 crc kubenswrapper[4558]: I0120 17:14:58.884778 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:14:58 crc kubenswrapper[4558]: I0120 17:14:58.886640 4558 generic.go:334] "Generic (PLEG): container finished" podID="5f2954ae-ca63-4cbe-963c-5721cdc086ed" containerID="8f366bcfc90f3917e32fc1bad9f262fda1ff8f03750cd1c1697bf5061796443a" exitCode=0 Jan 20 17:14:58 crc kubenswrapper[4558]: I0120 17:14:58.886680 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"5f2954ae-ca63-4cbe-963c-5721cdc086ed","Type":"ContainerDied","Data":"8f366bcfc90f3917e32fc1bad9f262fda1ff8f03750cd1c1697bf5061796443a"} Jan 20 17:14:58 crc kubenswrapper[4558]: I0120 17:14:58.886726 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:14:58 crc kubenswrapper[4558]: I0120 17:14:58.886728 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"5f2954ae-ca63-4cbe-963c-5721cdc086ed","Type":"ContainerDied","Data":"1d815015bca759aa4b5f579f3d486ab04c2f5879bedd5051ac62b96ca4a0bb64"} Jan 20 17:14:58 crc kubenswrapper[4558]: I0120 17:14:58.907193 4558 scope.go:117] "RemoveContainer" containerID="3dd89ea514584d95be106b0fd566aeb1b002ced4afaafb5adcdd9417440875bc" Jan 20 17:14:58 crc kubenswrapper[4558]: I0120 17:14:58.929507 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:14:58 crc kubenswrapper[4558]: I0120 17:14:58.945931 4558 scope.go:117] "RemoveContainer" containerID="53838079aa17a6463e7a401184003e8a0a06c177b316076f685f658cba88e77d" Jan 20 17:14:58 crc kubenswrapper[4558]: E0120 17:14:58.946501 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"53838079aa17a6463e7a401184003e8a0a06c177b316076f685f658cba88e77d\": container with ID starting with 53838079aa17a6463e7a401184003e8a0a06c177b316076f685f658cba88e77d not found: ID does not exist" containerID="53838079aa17a6463e7a401184003e8a0a06c177b316076f685f658cba88e77d" Jan 20 17:14:58 crc kubenswrapper[4558]: I0120 17:14:58.946595 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"53838079aa17a6463e7a401184003e8a0a06c177b316076f685f658cba88e77d"} err="failed to get container status \"53838079aa17a6463e7a401184003e8a0a06c177b316076f685f658cba88e77d\": rpc error: code = NotFound desc = could not find container \"53838079aa17a6463e7a401184003e8a0a06c177b316076f685f658cba88e77d\": container with ID starting with 53838079aa17a6463e7a401184003e8a0a06c177b316076f685f658cba88e77d not found: ID does not exist" Jan 20 17:14:58 crc kubenswrapper[4558]: I0120 17:14:58.946667 4558 scope.go:117] "RemoveContainer" containerID="3dd89ea514584d95be106b0fd566aeb1b002ced4afaafb5adcdd9417440875bc" Jan 20 17:14:58 crc kubenswrapper[4558]: I0120 17:14:58.947183 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:14:58 crc kubenswrapper[4558]: E0120 17:14:58.947583 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3dd89ea514584d95be106b0fd566aeb1b002ced4afaafb5adcdd9417440875bc\": container with ID starting with 3dd89ea514584d95be106b0fd566aeb1b002ced4afaafb5adcdd9417440875bc not found: ID does not exist" containerID="3dd89ea514584d95be106b0fd566aeb1b002ced4afaafb5adcdd9417440875bc" Jan 20 17:14:58 crc kubenswrapper[4558]: I0120 17:14:58.947712 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3dd89ea514584d95be106b0fd566aeb1b002ced4afaafb5adcdd9417440875bc"} err="failed to get container status \"3dd89ea514584d95be106b0fd566aeb1b002ced4afaafb5adcdd9417440875bc\": rpc error: code = NotFound desc = could not find container \"3dd89ea514584d95be106b0fd566aeb1b002ced4afaafb5adcdd9417440875bc\": container with ID starting with 3dd89ea514584d95be106b0fd566aeb1b002ced4afaafb5adcdd9417440875bc not found: ID does not exist" Jan 20 17:14:58 crc kubenswrapper[4558]: I0120 17:14:58.947794 4558 scope.go:117] "RemoveContainer" containerID="8f366bcfc90f3917e32fc1bad9f262fda1ff8f03750cd1c1697bf5061796443a" Jan 20 17:14:58 crc kubenswrapper[4558]: I0120 17:14:58.964177 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:14:58 crc kubenswrapper[4558]: E0120 17:14:58.964655 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7869482b-42ff-4c97-84b3-3f9f84ba29f5" containerName="nova-manage" Jan 20 17:14:58 crc kubenswrapper[4558]: I0120 17:14:58.964734 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="7869482b-42ff-4c97-84b3-3f9f84ba29f5" containerName="nova-manage" Jan 20 17:14:58 crc kubenswrapper[4558]: E0120 17:14:58.964783 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="412b9dee-e1e1-464b-9e5b-333df0634be7" containerName="nova-api-log" Jan 20 17:14:58 crc kubenswrapper[4558]: I0120 17:14:58.964792 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="412b9dee-e1e1-464b-9e5b-333df0634be7" containerName="nova-api-log" Jan 20 17:14:58 crc kubenswrapper[4558]: E0120 17:14:58.964807 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f2954ae-ca63-4cbe-963c-5721cdc086ed" containerName="nova-metadata-log" Jan 20 17:14:58 crc kubenswrapper[4558]: I0120 17:14:58.964814 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f2954ae-ca63-4cbe-963c-5721cdc086ed" containerName="nova-metadata-log" Jan 20 17:14:58 crc kubenswrapper[4558]: E0120 17:14:58.964826 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f2954ae-ca63-4cbe-963c-5721cdc086ed" containerName="nova-metadata-metadata" Jan 20 17:14:58 crc kubenswrapper[4558]: I0120 17:14:58.964831 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f2954ae-ca63-4cbe-963c-5721cdc086ed" containerName="nova-metadata-metadata" Jan 20 17:14:58 crc kubenswrapper[4558]: E0120 17:14:58.964845 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="412b9dee-e1e1-464b-9e5b-333df0634be7" containerName="nova-api-api" Jan 20 17:14:58 crc kubenswrapper[4558]: I0120 17:14:58.964851 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="412b9dee-e1e1-464b-9e5b-333df0634be7" containerName="nova-api-api" Jan 20 17:14:58 crc kubenswrapper[4558]: I0120 17:14:58.965090 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="412b9dee-e1e1-464b-9e5b-333df0634be7" containerName="nova-api-api" Jan 20 17:14:58 crc kubenswrapper[4558]: I0120 17:14:58.965105 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="7869482b-42ff-4c97-84b3-3f9f84ba29f5" containerName="nova-manage" Jan 20 17:14:58 crc kubenswrapper[4558]: I0120 17:14:58.965118 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="412b9dee-e1e1-464b-9e5b-333df0634be7" containerName="nova-api-log" Jan 20 17:14:58 crc kubenswrapper[4558]: I0120 17:14:58.965135 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="5f2954ae-ca63-4cbe-963c-5721cdc086ed" containerName="nova-metadata-metadata" Jan 20 17:14:58 crc kubenswrapper[4558]: I0120 17:14:58.965146 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="5f2954ae-ca63-4cbe-963c-5721cdc086ed" containerName="nova-metadata-log" Jan 20 17:14:58 crc kubenswrapper[4558]: I0120 17:14:58.966460 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:14:58 crc kubenswrapper[4558]: I0120 17:14:58.969100 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-metadata-internal-svc" Jan 20 17:14:58 crc kubenswrapper[4558]: I0120 17:14:58.969152 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-metadata-config-data" Jan 20 17:14:58 crc kubenswrapper[4558]: I0120 17:14:58.979923 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:14:58 crc kubenswrapper[4558]: I0120 17:14:58.987869 4558 scope.go:117] "RemoveContainer" containerID="35674014ac0646c142bd738942450c47ca74143d05b633cb0fefd65cee0a1166" Jan 20 17:14:59 crc kubenswrapper[4558]: I0120 17:14:59.010504 4558 scope.go:117] "RemoveContainer" containerID="8f366bcfc90f3917e32fc1bad9f262fda1ff8f03750cd1c1697bf5061796443a" Jan 20 17:14:59 crc kubenswrapper[4558]: E0120 17:14:59.011235 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8f366bcfc90f3917e32fc1bad9f262fda1ff8f03750cd1c1697bf5061796443a\": container with ID starting with 8f366bcfc90f3917e32fc1bad9f262fda1ff8f03750cd1c1697bf5061796443a not found: ID does not exist" containerID="8f366bcfc90f3917e32fc1bad9f262fda1ff8f03750cd1c1697bf5061796443a" Jan 20 17:14:59 crc kubenswrapper[4558]: I0120 17:14:59.011274 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8f366bcfc90f3917e32fc1bad9f262fda1ff8f03750cd1c1697bf5061796443a"} err="failed to get container status \"8f366bcfc90f3917e32fc1bad9f262fda1ff8f03750cd1c1697bf5061796443a\": rpc error: code = NotFound desc = could not find container \"8f366bcfc90f3917e32fc1bad9f262fda1ff8f03750cd1c1697bf5061796443a\": container with ID starting with 8f366bcfc90f3917e32fc1bad9f262fda1ff8f03750cd1c1697bf5061796443a not found: ID does not exist" Jan 20 17:14:59 crc kubenswrapper[4558]: I0120 17:14:59.011300 4558 scope.go:117] "RemoveContainer" containerID="35674014ac0646c142bd738942450c47ca74143d05b633cb0fefd65cee0a1166" Jan 20 17:14:59 crc kubenswrapper[4558]: E0120 17:14:59.011617 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"35674014ac0646c142bd738942450c47ca74143d05b633cb0fefd65cee0a1166\": container with ID starting with 35674014ac0646c142bd738942450c47ca74143d05b633cb0fefd65cee0a1166 not found: ID does not exist" containerID="35674014ac0646c142bd738942450c47ca74143d05b633cb0fefd65cee0a1166" Jan 20 17:14:59 crc kubenswrapper[4558]: I0120 17:14:59.011637 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"35674014ac0646c142bd738942450c47ca74143d05b633cb0fefd65cee0a1166"} err="failed to get container status \"35674014ac0646c142bd738942450c47ca74143d05b633cb0fefd65cee0a1166\": rpc error: code = NotFound desc = could not find container \"35674014ac0646c142bd738942450c47ca74143d05b633cb0fefd65cee0a1166\": container with ID starting with 35674014ac0646c142bd738942450c47ca74143d05b633cb0fefd65cee0a1166 not found: ID does not exist" Jan 20 17:14:59 crc kubenswrapper[4558]: I0120 17:14:59.116207 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/70dda975-7e0b-40a4-a92a-675be53560b7-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"70dda975-7e0b-40a4-a92a-675be53560b7\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:14:59 crc kubenswrapper[4558]: I0120 17:14:59.116445 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-45h8b\" (UniqueName: \"kubernetes.io/projected/70dda975-7e0b-40a4-a92a-675be53560b7-kube-api-access-45h8b\") pod \"nova-metadata-0\" (UID: \"70dda975-7e0b-40a4-a92a-675be53560b7\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:14:59 crc kubenswrapper[4558]: I0120 17:14:59.116483 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/70dda975-7e0b-40a4-a92a-675be53560b7-logs\") pod \"nova-metadata-0\" (UID: \"70dda975-7e0b-40a4-a92a-675be53560b7\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:14:59 crc kubenswrapper[4558]: I0120 17:14:59.116526 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/70dda975-7e0b-40a4-a92a-675be53560b7-config-data\") pod \"nova-metadata-0\" (UID: \"70dda975-7e0b-40a4-a92a-675be53560b7\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:14:59 crc kubenswrapper[4558]: I0120 17:14:59.116780 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/70dda975-7e0b-40a4-a92a-675be53560b7-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"70dda975-7e0b-40a4-a92a-675be53560b7\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:14:59 crc kubenswrapper[4558]: I0120 17:14:59.218993 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/70dda975-7e0b-40a4-a92a-675be53560b7-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"70dda975-7e0b-40a4-a92a-675be53560b7\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:14:59 crc kubenswrapper[4558]: I0120 17:14:59.219135 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/70dda975-7e0b-40a4-a92a-675be53560b7-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"70dda975-7e0b-40a4-a92a-675be53560b7\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:14:59 crc kubenswrapper[4558]: I0120 17:14:59.219190 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-45h8b\" (UniqueName: \"kubernetes.io/projected/70dda975-7e0b-40a4-a92a-675be53560b7-kube-api-access-45h8b\") pod \"nova-metadata-0\" (UID: \"70dda975-7e0b-40a4-a92a-675be53560b7\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:14:59 crc kubenswrapper[4558]: I0120 17:14:59.219234 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/70dda975-7e0b-40a4-a92a-675be53560b7-logs\") pod \"nova-metadata-0\" (UID: \"70dda975-7e0b-40a4-a92a-675be53560b7\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:14:59 crc kubenswrapper[4558]: I0120 17:14:59.219320 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/70dda975-7e0b-40a4-a92a-675be53560b7-config-data\") pod \"nova-metadata-0\" (UID: \"70dda975-7e0b-40a4-a92a-675be53560b7\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:14:59 crc kubenswrapper[4558]: I0120 17:14:59.219822 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/70dda975-7e0b-40a4-a92a-675be53560b7-logs\") pod \"nova-metadata-0\" (UID: \"70dda975-7e0b-40a4-a92a-675be53560b7\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:14:59 crc kubenswrapper[4558]: I0120 17:14:59.225124 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/70dda975-7e0b-40a4-a92a-675be53560b7-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"70dda975-7e0b-40a4-a92a-675be53560b7\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:14:59 crc kubenswrapper[4558]: I0120 17:14:59.225509 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/70dda975-7e0b-40a4-a92a-675be53560b7-config-data\") pod \"nova-metadata-0\" (UID: \"70dda975-7e0b-40a4-a92a-675be53560b7\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:14:59 crc kubenswrapper[4558]: I0120 17:14:59.226015 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/70dda975-7e0b-40a4-a92a-675be53560b7-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"70dda975-7e0b-40a4-a92a-675be53560b7\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:14:59 crc kubenswrapper[4558]: I0120 17:14:59.233671 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-45h8b\" (UniqueName: \"kubernetes.io/projected/70dda975-7e0b-40a4-a92a-675be53560b7-kube-api-access-45h8b\") pod \"nova-metadata-0\" (UID: \"70dda975-7e0b-40a4-a92a-675be53560b7\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:14:59 crc kubenswrapper[4558]: I0120 17:14:59.291639 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:14:59 crc kubenswrapper[4558]: I0120 17:14:59.320773 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/412b9dee-e1e1-464b-9e5b-333df0634be7-config-data\") pod \"412b9dee-e1e1-464b-9e5b-333df0634be7\" (UID: \"412b9dee-e1e1-464b-9e5b-333df0634be7\") " Jan 20 17:14:59 crc kubenswrapper[4558]: I0120 17:14:59.324477 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/412b9dee-e1e1-464b-9e5b-333df0634be7-config-data" (OuterVolumeSpecName: "config-data") pod "412b9dee-e1e1-464b-9e5b-333df0634be7" (UID: "412b9dee-e1e1-464b-9e5b-333df0634be7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:14:59 crc kubenswrapper[4558]: I0120 17:14:59.424259 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/412b9dee-e1e1-464b-9e5b-333df0634be7-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:14:59 crc kubenswrapper[4558]: I0120 17:14:59.527482 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:14:59 crc kubenswrapper[4558]: I0120 17:14:59.540746 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:14:59 crc kubenswrapper[4558]: I0120 17:14:59.548356 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:14:59 crc kubenswrapper[4558]: I0120 17:14:59.550201 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:14:59 crc kubenswrapper[4558]: I0120 17:14:59.552194 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-api-config-data" Jan 20 17:14:59 crc kubenswrapper[4558]: I0120 17:14:59.557560 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:14:59 crc kubenswrapper[4558]: I0120 17:14:59.721249 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:14:59 crc kubenswrapper[4558]: W0120 17:14:59.722756 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod70dda975_7e0b_40a4_a92a_675be53560b7.slice/crio-fe17107afca6886a9153cdacb46fecd00018063c05331260ddc6d05020f827a3 WatchSource:0}: Error finding container fe17107afca6886a9153cdacb46fecd00018063c05331260ddc6d05020f827a3: Status 404 returned error can't find the container with id fe17107afca6886a9153cdacb46fecd00018063c05331260ddc6d05020f827a3 Jan 20 17:14:59 crc kubenswrapper[4558]: I0120 17:14:59.732326 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5826b774-d151-468a-b24c-91f679dcd51e-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"5826b774-d151-468a-b24c-91f679dcd51e\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:14:59 crc kubenswrapper[4558]: I0120 17:14:59.732627 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5826b774-d151-468a-b24c-91f679dcd51e-config-data\") pod \"nova-api-0\" (UID: \"5826b774-d151-468a-b24c-91f679dcd51e\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:14:59 crc kubenswrapper[4558]: I0120 17:14:59.732704 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fdb66\" (UniqueName: \"kubernetes.io/projected/5826b774-d151-468a-b24c-91f679dcd51e-kube-api-access-fdb66\") pod \"nova-api-0\" (UID: \"5826b774-d151-468a-b24c-91f679dcd51e\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:14:59 crc kubenswrapper[4558]: I0120 17:14:59.732784 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5826b774-d151-468a-b24c-91f679dcd51e-logs\") pod \"nova-api-0\" (UID: \"5826b774-d151-468a-b24c-91f679dcd51e\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:14:59 crc kubenswrapper[4558]: I0120 17:14:59.834828 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5826b774-d151-468a-b24c-91f679dcd51e-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"5826b774-d151-468a-b24c-91f679dcd51e\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:14:59 crc kubenswrapper[4558]: I0120 17:14:59.834925 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5826b774-d151-468a-b24c-91f679dcd51e-config-data\") pod \"nova-api-0\" (UID: \"5826b774-d151-468a-b24c-91f679dcd51e\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:14:59 crc kubenswrapper[4558]: I0120 17:14:59.834961 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fdb66\" (UniqueName: \"kubernetes.io/projected/5826b774-d151-468a-b24c-91f679dcd51e-kube-api-access-fdb66\") pod \"nova-api-0\" (UID: \"5826b774-d151-468a-b24c-91f679dcd51e\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:14:59 crc kubenswrapper[4558]: I0120 17:14:59.834999 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5826b774-d151-468a-b24c-91f679dcd51e-logs\") pod \"nova-api-0\" (UID: \"5826b774-d151-468a-b24c-91f679dcd51e\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:14:59 crc kubenswrapper[4558]: I0120 17:14:59.835479 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5826b774-d151-468a-b24c-91f679dcd51e-logs\") pod \"nova-api-0\" (UID: \"5826b774-d151-468a-b24c-91f679dcd51e\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:14:59 crc kubenswrapper[4558]: I0120 17:14:59.840230 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5826b774-d151-468a-b24c-91f679dcd51e-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"5826b774-d151-468a-b24c-91f679dcd51e\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:14:59 crc kubenswrapper[4558]: I0120 17:14:59.840325 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5826b774-d151-468a-b24c-91f679dcd51e-config-data\") pod \"nova-api-0\" (UID: \"5826b774-d151-468a-b24c-91f679dcd51e\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:14:59 crc kubenswrapper[4558]: I0120 17:14:59.852185 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fdb66\" (UniqueName: \"kubernetes.io/projected/5826b774-d151-468a-b24c-91f679dcd51e-kube-api-access-fdb66\") pod \"nova-api-0\" (UID: \"5826b774-d151-468a-b24c-91f679dcd51e\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:14:59 crc kubenswrapper[4558]: I0120 17:14:59.880050 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:14:59 crc kubenswrapper[4558]: I0120 17:14:59.898730 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"70dda975-7e0b-40a4-a92a-675be53560b7","Type":"ContainerStarted","Data":"fe17107afca6886a9153cdacb46fecd00018063c05331260ddc6d05020f827a3"} Jan 20 17:15:00 crc kubenswrapper[4558]: I0120 17:15:00.143838 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29482155-xfwxm"] Jan 20 17:15:00 crc kubenswrapper[4558]: I0120 17:15:00.145405 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29482155-xfwxm" Jan 20 17:15:00 crc kubenswrapper[4558]: I0120 17:15:00.148626 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 20 17:15:00 crc kubenswrapper[4558]: I0120 17:15:00.148625 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 20 17:15:00 crc kubenswrapper[4558]: I0120 17:15:00.154037 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29482155-xfwxm"] Jan 20 17:15:00 crc kubenswrapper[4558]: I0120 17:15:00.245108 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mj7bb\" (UniqueName: \"kubernetes.io/projected/9080d637-d392-4532-9054-180e871834f0-kube-api-access-mj7bb\") pod \"collect-profiles-29482155-xfwxm\" (UID: \"9080d637-d392-4532-9054-180e871834f0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482155-xfwxm" Jan 20 17:15:00 crc kubenswrapper[4558]: I0120 17:15:00.245327 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9080d637-d392-4532-9054-180e871834f0-secret-volume\") pod \"collect-profiles-29482155-xfwxm\" (UID: \"9080d637-d392-4532-9054-180e871834f0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482155-xfwxm" Jan 20 17:15:00 crc kubenswrapper[4558]: I0120 17:15:00.245383 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9080d637-d392-4532-9054-180e871834f0-config-volume\") pod \"collect-profiles-29482155-xfwxm\" (UID: \"9080d637-d392-4532-9054-180e871834f0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482155-xfwxm" Jan 20 17:15:00 crc kubenswrapper[4558]: I0120 17:15:00.312470 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:15:00 crc kubenswrapper[4558]: W0120 17:15:00.312844 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5826b774_d151_468a_b24c_91f679dcd51e.slice/crio-d5c1fc96531926bf61084e4561a1e15ec8d6ae1a2575f76d438b0078466ab648 WatchSource:0}: Error finding container d5c1fc96531926bf61084e4561a1e15ec8d6ae1a2575f76d438b0078466ab648: Status 404 returned error can't find the container with id d5c1fc96531926bf61084e4561a1e15ec8d6ae1a2575f76d438b0078466ab648 Jan 20 17:15:00 crc kubenswrapper[4558]: I0120 17:15:00.347600 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9080d637-d392-4532-9054-180e871834f0-secret-volume\") pod \"collect-profiles-29482155-xfwxm\" (UID: \"9080d637-d392-4532-9054-180e871834f0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482155-xfwxm" Jan 20 17:15:00 crc kubenswrapper[4558]: I0120 17:15:00.347678 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9080d637-d392-4532-9054-180e871834f0-config-volume\") pod \"collect-profiles-29482155-xfwxm\" (UID: \"9080d637-d392-4532-9054-180e871834f0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482155-xfwxm" Jan 20 17:15:00 crc kubenswrapper[4558]: I0120 17:15:00.347752 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mj7bb\" (UniqueName: \"kubernetes.io/projected/9080d637-d392-4532-9054-180e871834f0-kube-api-access-mj7bb\") pod \"collect-profiles-29482155-xfwxm\" (UID: \"9080d637-d392-4532-9054-180e871834f0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482155-xfwxm" Jan 20 17:15:00 crc kubenswrapper[4558]: I0120 17:15:00.348781 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9080d637-d392-4532-9054-180e871834f0-config-volume\") pod \"collect-profiles-29482155-xfwxm\" (UID: \"9080d637-d392-4532-9054-180e871834f0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482155-xfwxm" Jan 20 17:15:00 crc kubenswrapper[4558]: I0120 17:15:00.351733 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9080d637-d392-4532-9054-180e871834f0-secret-volume\") pod \"collect-profiles-29482155-xfwxm\" (UID: \"9080d637-d392-4532-9054-180e871834f0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482155-xfwxm" Jan 20 17:15:00 crc kubenswrapper[4558]: I0120 17:15:00.360778 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mj7bb\" (UniqueName: \"kubernetes.io/projected/9080d637-d392-4532-9054-180e871834f0-kube-api-access-mj7bb\") pod \"collect-profiles-29482155-xfwxm\" (UID: \"9080d637-d392-4532-9054-180e871834f0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482155-xfwxm" Jan 20 17:15:00 crc kubenswrapper[4558]: I0120 17:15:00.467119 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29482155-xfwxm" Jan 20 17:15:00 crc kubenswrapper[4558]: I0120 17:15:00.581046 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="412b9dee-e1e1-464b-9e5b-333df0634be7" path="/var/lib/kubelet/pods/412b9dee-e1e1-464b-9e5b-333df0634be7/volumes" Jan 20 17:15:00 crc kubenswrapper[4558]: I0120 17:15:00.582858 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5f2954ae-ca63-4cbe-963c-5721cdc086ed" path="/var/lib/kubelet/pods/5f2954ae-ca63-4cbe-963c-5721cdc086ed/volumes" Jan 20 17:15:00 crc kubenswrapper[4558]: I0120 17:15:00.883047 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29482155-xfwxm"] Jan 20 17:15:00 crc kubenswrapper[4558]: W0120 17:15:00.883322 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9080d637_d392_4532_9054_180e871834f0.slice/crio-f7a17673e479e95f97cee3c4ec66e462dc5cfccd4b32dc686a8cb8109a2f6345 WatchSource:0}: Error finding container f7a17673e479e95f97cee3c4ec66e462dc5cfccd4b32dc686a8cb8109a2f6345: Status 404 returned error can't find the container with id f7a17673e479e95f97cee3c4ec66e462dc5cfccd4b32dc686a8cb8109a2f6345 Jan 20 17:15:00 crc kubenswrapper[4558]: I0120 17:15:00.918943 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29482155-xfwxm" event={"ID":"9080d637-d392-4532-9054-180e871834f0","Type":"ContainerStarted","Data":"f7a17673e479e95f97cee3c4ec66e462dc5cfccd4b32dc686a8cb8109a2f6345"} Jan 20 17:15:00 crc kubenswrapper[4558]: I0120 17:15:00.921179 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"5826b774-d151-468a-b24c-91f679dcd51e","Type":"ContainerStarted","Data":"819240a126d240c13f523976836647677409b57b3e0cb405a5f882b973ba6d0d"} Jan 20 17:15:00 crc kubenswrapper[4558]: I0120 17:15:00.921208 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"5826b774-d151-468a-b24c-91f679dcd51e","Type":"ContainerStarted","Data":"233f1664fba406e964e76cc4bcc4a609d18242e49e46e8d8828fefb0c7a70bdd"} Jan 20 17:15:00 crc kubenswrapper[4558]: I0120 17:15:00.921221 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"5826b774-d151-468a-b24c-91f679dcd51e","Type":"ContainerStarted","Data":"d5c1fc96531926bf61084e4561a1e15ec8d6ae1a2575f76d438b0078466ab648"} Jan 20 17:15:00 crc kubenswrapper[4558]: I0120 17:15:00.924096 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"70dda975-7e0b-40a4-a92a-675be53560b7","Type":"ContainerStarted","Data":"4d6bd322347a5bd72fd3102a79147d9af8b5b4497067a83772ce8835798883b0"} Jan 20 17:15:00 crc kubenswrapper[4558]: I0120 17:15:00.924140 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"70dda975-7e0b-40a4-a92a-675be53560b7","Type":"ContainerStarted","Data":"dc90c73cc956157f0d2205b7cbb05b8859c8423df552df5ea7b67e88690ae584"} Jan 20 17:15:00 crc kubenswrapper[4558]: I0120 17:15:00.952368 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-api-0" podStartSLOduration=1.952350528 podStartE2EDuration="1.952350528s" podCreationTimestamp="2026-01-20 17:14:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:15:00.936471959 +0000 UTC m=+1994.696809926" watchObservedRunningTime="2026-01-20 17:15:00.952350528 +0000 UTC m=+1994.712688494" Jan 20 17:15:00 crc kubenswrapper[4558]: I0120 17:15:00.967793 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-metadata-0" podStartSLOduration=2.9677763539999997 podStartE2EDuration="2.967776354s" podCreationTimestamp="2026-01-20 17:14:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:15:00.953629371 +0000 UTC m=+1994.713967338" watchObservedRunningTime="2026-01-20 17:15:00.967776354 +0000 UTC m=+1994.728114321" Jan 20 17:15:01 crc kubenswrapper[4558]: I0120 17:15:01.936932 4558 generic.go:334] "Generic (PLEG): container finished" podID="9080d637-d392-4532-9054-180e871834f0" containerID="190e0e153cb7f3ebee2da00cdb98b714cac30a72af17afd576aa0adf5db3eec6" exitCode=0 Jan 20 17:15:01 crc kubenswrapper[4558]: I0120 17:15:01.936974 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29482155-xfwxm" event={"ID":"9080d637-d392-4532-9054-180e871834f0","Type":"ContainerDied","Data":"190e0e153cb7f3ebee2da00cdb98b714cac30a72af17afd576aa0adf5db3eec6"} Jan 20 17:15:02 crc kubenswrapper[4558]: I0120 17:15:02.428226 4558 scope.go:117] "RemoveContainer" containerID="0919b57f07a307b47bd20d28cd7d906c293aab7392a04d683e1852a092fb578d" Jan 20 17:15:02 crc kubenswrapper[4558]: I0120 17:15:02.448811 4558 scope.go:117] "RemoveContainer" containerID="39a1bec43e337cd8f6f2ae1d32fb36f33f86238fbb2652fd1c16b14026a92e2f" Jan 20 17:15:02 crc kubenswrapper[4558]: I0120 17:15:02.488650 4558 scope.go:117] "RemoveContainer" containerID="5f7af9fba2acb372e7c8a94d678b4fd70c8bec57360ab8d0714e1148d95ff99d" Jan 20 17:15:02 crc kubenswrapper[4558]: I0120 17:15:02.515732 4558 scope.go:117] "RemoveContainer" containerID="de69b0acdf13d9e2cff3bfba3e51a4b8eabd7d2214a17cc35a6defefc441f40e" Jan 20 17:15:02 crc kubenswrapper[4558]: I0120 17:15:02.546259 4558 scope.go:117] "RemoveContainer" containerID="afedf4808c1da5494a40e8abc086d0c1477f592d1ddd8d03ca1bec56382acdc7" Jan 20 17:15:02 crc kubenswrapper[4558]: I0120 17:15:02.579065 4558 scope.go:117] "RemoveContainer" containerID="a7da9ff56865b4a8a4aa6ba635fdc5e7c3e7c9c58b59113258f123b080d7a960" Jan 20 17:15:02 crc kubenswrapper[4558]: I0120 17:15:02.592488 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:15:02 crc kubenswrapper[4558]: I0120 17:15:02.628671 4558 scope.go:117] "RemoveContainer" containerID="6b61207025457ad07d1c5aba4ae82e5f70fd7063c9e90607164afad832616b9e" Jan 20 17:15:02 crc kubenswrapper[4558]: I0120 17:15:02.720278 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bmphj\" (UniqueName: \"kubernetes.io/projected/ae67b718-71bb-4136-a699-8782a9096f4c-kube-api-access-bmphj\") pod \"ae67b718-71bb-4136-a699-8782a9096f4c\" (UID: \"ae67b718-71bb-4136-a699-8782a9096f4c\") " Jan 20 17:15:02 crc kubenswrapper[4558]: I0120 17:15:02.720592 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae67b718-71bb-4136-a699-8782a9096f4c-combined-ca-bundle\") pod \"ae67b718-71bb-4136-a699-8782a9096f4c\" (UID: \"ae67b718-71bb-4136-a699-8782a9096f4c\") " Jan 20 17:15:02 crc kubenswrapper[4558]: I0120 17:15:02.720736 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ae67b718-71bb-4136-a699-8782a9096f4c-config-data\") pod \"ae67b718-71bb-4136-a699-8782a9096f4c\" (UID: \"ae67b718-71bb-4136-a699-8782a9096f4c\") " Jan 20 17:15:02 crc kubenswrapper[4558]: I0120 17:15:02.727215 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ae67b718-71bb-4136-a699-8782a9096f4c-kube-api-access-bmphj" (OuterVolumeSpecName: "kube-api-access-bmphj") pod "ae67b718-71bb-4136-a699-8782a9096f4c" (UID: "ae67b718-71bb-4136-a699-8782a9096f4c"). InnerVolumeSpecName "kube-api-access-bmphj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:15:02 crc kubenswrapper[4558]: I0120 17:15:02.746598 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ae67b718-71bb-4136-a699-8782a9096f4c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ae67b718-71bb-4136-a699-8782a9096f4c" (UID: "ae67b718-71bb-4136-a699-8782a9096f4c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:15:02 crc kubenswrapper[4558]: I0120 17:15:02.747047 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ae67b718-71bb-4136-a699-8782a9096f4c-config-data" (OuterVolumeSpecName: "config-data") pod "ae67b718-71bb-4136-a699-8782a9096f4c" (UID: "ae67b718-71bb-4136-a699-8782a9096f4c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:15:02 crc kubenswrapper[4558]: I0120 17:15:02.823417 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae67b718-71bb-4136-a699-8782a9096f4c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:15:02 crc kubenswrapper[4558]: I0120 17:15:02.823453 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ae67b718-71bb-4136-a699-8782a9096f4c-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:15:02 crc kubenswrapper[4558]: I0120 17:15:02.823465 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bmphj\" (UniqueName: \"kubernetes.io/projected/ae67b718-71bb-4136-a699-8782a9096f4c-kube-api-access-bmphj\") on node \"crc\" DevicePath \"\"" Jan 20 17:15:02 crc kubenswrapper[4558]: I0120 17:15:02.949268 4558 generic.go:334] "Generic (PLEG): container finished" podID="ae67b718-71bb-4136-a699-8782a9096f4c" containerID="6026ba63914f7680778f2f25afac4b99568c6f14063d1d42d1eeb3493064d57e" exitCode=137 Jan 20 17:15:02 crc kubenswrapper[4558]: I0120 17:15:02.949342 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:15:02 crc kubenswrapper[4558]: I0120 17:15:02.949332 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"ae67b718-71bb-4136-a699-8782a9096f4c","Type":"ContainerDied","Data":"6026ba63914f7680778f2f25afac4b99568c6f14063d1d42d1eeb3493064d57e"} Jan 20 17:15:02 crc kubenswrapper[4558]: I0120 17:15:02.949651 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"ae67b718-71bb-4136-a699-8782a9096f4c","Type":"ContainerDied","Data":"fe8251362332809196e047fce882cc337bd1d9c848056c7f5b2d9243fe4207bd"} Jan 20 17:15:02 crc kubenswrapper[4558]: I0120 17:15:02.949701 4558 scope.go:117] "RemoveContainer" containerID="6026ba63914f7680778f2f25afac4b99568c6f14063d1d42d1eeb3493064d57e" Jan 20 17:15:02 crc kubenswrapper[4558]: I0120 17:15:02.984269 4558 scope.go:117] "RemoveContainer" containerID="6026ba63914f7680778f2f25afac4b99568c6f14063d1d42d1eeb3493064d57e" Jan 20 17:15:02 crc kubenswrapper[4558]: E0120 17:15:02.984734 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6026ba63914f7680778f2f25afac4b99568c6f14063d1d42d1eeb3493064d57e\": container with ID starting with 6026ba63914f7680778f2f25afac4b99568c6f14063d1d42d1eeb3493064d57e not found: ID does not exist" containerID="6026ba63914f7680778f2f25afac4b99568c6f14063d1d42d1eeb3493064d57e" Jan 20 17:15:02 crc kubenswrapper[4558]: I0120 17:15:02.984864 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6026ba63914f7680778f2f25afac4b99568c6f14063d1d42d1eeb3493064d57e"} err="failed to get container status \"6026ba63914f7680778f2f25afac4b99568c6f14063d1d42d1eeb3493064d57e\": rpc error: code = NotFound desc = could not find container \"6026ba63914f7680778f2f25afac4b99568c6f14063d1d42d1eeb3493064d57e\": container with ID starting with 6026ba63914f7680778f2f25afac4b99568c6f14063d1d42d1eeb3493064d57e not found: ID does not exist" Jan 20 17:15:02 crc kubenswrapper[4558]: I0120 17:15:02.987644 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:15:03 crc kubenswrapper[4558]: I0120 17:15:03.003367 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:15:03 crc kubenswrapper[4558]: I0120 17:15:03.015305 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:15:03 crc kubenswrapper[4558]: E0120 17:15:03.015870 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ae67b718-71bb-4136-a699-8782a9096f4c" containerName="nova-scheduler-scheduler" Jan 20 17:15:03 crc kubenswrapper[4558]: I0120 17:15:03.015893 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ae67b718-71bb-4136-a699-8782a9096f4c" containerName="nova-scheduler-scheduler" Jan 20 17:15:03 crc kubenswrapper[4558]: I0120 17:15:03.016106 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="ae67b718-71bb-4136-a699-8782a9096f4c" containerName="nova-scheduler-scheduler" Jan 20 17:15:03 crc kubenswrapper[4558]: I0120 17:15:03.016924 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:15:03 crc kubenswrapper[4558]: I0120 17:15:03.019191 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-scheduler-config-data" Jan 20 17:15:03 crc kubenswrapper[4558]: I0120 17:15:03.027545 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:15:03 crc kubenswrapper[4558]: I0120 17:15:03.030579 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c4cb69c1-3d51-4b13-a025-b37c986ffede-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"c4cb69c1-3d51-4b13-a025-b37c986ffede\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:15:03 crc kubenswrapper[4558]: I0120 17:15:03.030668 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tfkzf\" (UniqueName: \"kubernetes.io/projected/c4cb69c1-3d51-4b13-a025-b37c986ffede-kube-api-access-tfkzf\") pod \"nova-scheduler-0\" (UID: \"c4cb69c1-3d51-4b13-a025-b37c986ffede\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:15:03 crc kubenswrapper[4558]: I0120 17:15:03.030708 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c4cb69c1-3d51-4b13-a025-b37c986ffede-config-data\") pod \"nova-scheduler-0\" (UID: \"c4cb69c1-3d51-4b13-a025-b37c986ffede\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:15:03 crc kubenswrapper[4558]: I0120 17:15:03.132131 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c4cb69c1-3d51-4b13-a025-b37c986ffede-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"c4cb69c1-3d51-4b13-a025-b37c986ffede\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:15:03 crc kubenswrapper[4558]: I0120 17:15:03.132269 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tfkzf\" (UniqueName: \"kubernetes.io/projected/c4cb69c1-3d51-4b13-a025-b37c986ffede-kube-api-access-tfkzf\") pod \"nova-scheduler-0\" (UID: \"c4cb69c1-3d51-4b13-a025-b37c986ffede\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:15:03 crc kubenswrapper[4558]: I0120 17:15:03.132309 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c4cb69c1-3d51-4b13-a025-b37c986ffede-config-data\") pod \"nova-scheduler-0\" (UID: \"c4cb69c1-3d51-4b13-a025-b37c986ffede\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:15:03 crc kubenswrapper[4558]: I0120 17:15:03.137731 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c4cb69c1-3d51-4b13-a025-b37c986ffede-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"c4cb69c1-3d51-4b13-a025-b37c986ffede\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:15:03 crc kubenswrapper[4558]: I0120 17:15:03.138911 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c4cb69c1-3d51-4b13-a025-b37c986ffede-config-data\") pod \"nova-scheduler-0\" (UID: \"c4cb69c1-3d51-4b13-a025-b37c986ffede\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:15:03 crc kubenswrapper[4558]: I0120 17:15:03.146342 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tfkzf\" (UniqueName: \"kubernetes.io/projected/c4cb69c1-3d51-4b13-a025-b37c986ffede-kube-api-access-tfkzf\") pod \"nova-scheduler-0\" (UID: \"c4cb69c1-3d51-4b13-a025-b37c986ffede\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:15:03 crc kubenswrapper[4558]: I0120 17:15:03.212575 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29482155-xfwxm" Jan 20 17:15:03 crc kubenswrapper[4558]: I0120 17:15:03.335271 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9080d637-d392-4532-9054-180e871834f0-config-volume\") pod \"9080d637-d392-4532-9054-180e871834f0\" (UID: \"9080d637-d392-4532-9054-180e871834f0\") " Jan 20 17:15:03 crc kubenswrapper[4558]: I0120 17:15:03.335343 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mj7bb\" (UniqueName: \"kubernetes.io/projected/9080d637-d392-4532-9054-180e871834f0-kube-api-access-mj7bb\") pod \"9080d637-d392-4532-9054-180e871834f0\" (UID: \"9080d637-d392-4532-9054-180e871834f0\") " Jan 20 17:15:03 crc kubenswrapper[4558]: I0120 17:15:03.335504 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9080d637-d392-4532-9054-180e871834f0-secret-volume\") pod \"9080d637-d392-4532-9054-180e871834f0\" (UID: \"9080d637-d392-4532-9054-180e871834f0\") " Jan 20 17:15:03 crc kubenswrapper[4558]: I0120 17:15:03.336230 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9080d637-d392-4532-9054-180e871834f0-config-volume" (OuterVolumeSpecName: "config-volume") pod "9080d637-d392-4532-9054-180e871834f0" (UID: "9080d637-d392-4532-9054-180e871834f0"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:15:03 crc kubenswrapper[4558]: I0120 17:15:03.336706 4558 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9080d637-d392-4532-9054-180e871834f0-config-volume\") on node \"crc\" DevicePath \"\"" Jan 20 17:15:03 crc kubenswrapper[4558]: I0120 17:15:03.338850 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:15:03 crc kubenswrapper[4558]: I0120 17:15:03.339420 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9080d637-d392-4532-9054-180e871834f0-kube-api-access-mj7bb" (OuterVolumeSpecName: "kube-api-access-mj7bb") pod "9080d637-d392-4532-9054-180e871834f0" (UID: "9080d637-d392-4532-9054-180e871834f0"). InnerVolumeSpecName "kube-api-access-mj7bb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:15:03 crc kubenswrapper[4558]: I0120 17:15:03.340072 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9080d637-d392-4532-9054-180e871834f0-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "9080d637-d392-4532-9054-180e871834f0" (UID: "9080d637-d392-4532-9054-180e871834f0"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:15:03 crc kubenswrapper[4558]: I0120 17:15:03.438197 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mj7bb\" (UniqueName: \"kubernetes.io/projected/9080d637-d392-4532-9054-180e871834f0-kube-api-access-mj7bb\") on node \"crc\" DevicePath \"\"" Jan 20 17:15:03 crc kubenswrapper[4558]: I0120 17:15:03.438228 4558 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/9080d637-d392-4532-9054-180e871834f0-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 20 17:15:03 crc kubenswrapper[4558]: I0120 17:15:03.736991 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:15:03 crc kubenswrapper[4558]: I0120 17:15:03.963968 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29482155-xfwxm" Jan 20 17:15:03 crc kubenswrapper[4558]: I0120 17:15:03.963967 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29482155-xfwxm" event={"ID":"9080d637-d392-4532-9054-180e871834f0","Type":"ContainerDied","Data":"f7a17673e479e95f97cee3c4ec66e462dc5cfccd4b32dc686a8cb8109a2f6345"} Jan 20 17:15:03 crc kubenswrapper[4558]: I0120 17:15:03.964531 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f7a17673e479e95f97cee3c4ec66e462dc5cfccd4b32dc686a8cb8109a2f6345" Jan 20 17:15:03 crc kubenswrapper[4558]: I0120 17:15:03.966216 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"c4cb69c1-3d51-4b13-a025-b37c986ffede","Type":"ContainerStarted","Data":"022773dc93b3f1b894358c04d0057cb8d8780ca1ef654626b973b40167c3532e"} Jan 20 17:15:03 crc kubenswrapper[4558]: I0120 17:15:03.966265 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"c4cb69c1-3d51-4b13-a025-b37c986ffede","Type":"ContainerStarted","Data":"3c735e96acc0786f4271c1844dfa0a1b160606a4ed024ea4da415c596f59c86d"} Jan 20 17:15:03 crc kubenswrapper[4558]: I0120 17:15:03.988923 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-scheduler-0" podStartSLOduration=1.9889037539999999 podStartE2EDuration="1.988903754s" podCreationTimestamp="2026-01-20 17:15:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:15:03.982342135 +0000 UTC m=+1997.742680103" watchObservedRunningTime="2026-01-20 17:15:03.988903754 +0000 UTC m=+1997.749241721" Jan 20 17:15:04 crc kubenswrapper[4558]: I0120 17:15:04.273709 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29482110-7p5pb"] Jan 20 17:15:04 crc kubenswrapper[4558]: I0120 17:15:04.279803 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29482110-7p5pb"] Jan 20 17:15:04 crc kubenswrapper[4558]: I0120 17:15:04.291702 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:15:04 crc kubenswrapper[4558]: I0120 17:15:04.291739 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:15:04 crc kubenswrapper[4558]: I0120 17:15:04.581985 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31eb233b-13fe-40cb-9259-f18574a85e01" path="/var/lib/kubelet/pods/31eb233b-13fe-40cb-9259-f18574a85e01/volumes" Jan 20 17:15:04 crc kubenswrapper[4558]: I0120 17:15:04.585361 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ae67b718-71bb-4136-a699-8782a9096f4c" path="/var/lib/kubelet/pods/ae67b718-71bb-4136-a699-8782a9096f4c/volumes" Jan 20 17:15:07 crc kubenswrapper[4558]: I0120 17:15:07.226021 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:15:08 crc kubenswrapper[4558]: I0120 17:15:08.339679 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:15:09 crc kubenswrapper[4558]: I0120 17:15:09.292075 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:15:09 crc kubenswrapper[4558]: I0120 17:15:09.292607 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:15:09 crc kubenswrapper[4558]: I0120 17:15:09.881230 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:15:09 crc kubenswrapper[4558]: I0120 17:15:09.881299 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:15:10 crc kubenswrapper[4558]: I0120 17:15:10.304306 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-metadata-0" podUID="70dda975-7e0b-40a4-a92a-675be53560b7" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.95:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 20 17:15:10 crc kubenswrapper[4558]: I0120 17:15:10.304336 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-metadata-0" podUID="70dda975-7e0b-40a4-a92a-675be53560b7" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.95:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 20 17:15:10 crc kubenswrapper[4558]: I0120 17:15:10.548111 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 17:15:10 crc kubenswrapper[4558]: I0120 17:15:10.548336 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/kube-state-metrics-0" podUID="9d1e8534-0277-43f6-a3be-2c3985853e6e" containerName="kube-state-metrics" containerID="cri-o://dcb0fe6875233f2fdf58a3e6c4e56531163a5e8c5098db78a1bf54da8f0a72d8" gracePeriod=30 Jan 20 17:15:10 crc kubenswrapper[4558]: I0120 17:15:10.964321 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-api-0" podUID="5826b774-d151-468a-b24c-91f679dcd51e" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.96:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 20 17:15:10 crc kubenswrapper[4558]: I0120 17:15:10.964372 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-api-0" podUID="5826b774-d151-468a-b24c-91f679dcd51e" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.96:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 20 17:15:11 crc kubenswrapper[4558]: I0120 17:15:11.022227 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:15:11 crc kubenswrapper[4558]: I0120 17:15:11.025287 4558 generic.go:334] "Generic (PLEG): container finished" podID="9d1e8534-0277-43f6-a3be-2c3985853e6e" containerID="dcb0fe6875233f2fdf58a3e6c4e56531163a5e8c5098db78a1bf54da8f0a72d8" exitCode=2 Jan 20 17:15:11 crc kubenswrapper[4558]: I0120 17:15:11.025333 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/kube-state-metrics-0" event={"ID":"9d1e8534-0277-43f6-a3be-2c3985853e6e","Type":"ContainerDied","Data":"dcb0fe6875233f2fdf58a3e6c4e56531163a5e8c5098db78a1bf54da8f0a72d8"} Jan 20 17:15:11 crc kubenswrapper[4558]: I0120 17:15:11.025360 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/kube-state-metrics-0" event={"ID":"9d1e8534-0277-43f6-a3be-2c3985853e6e","Type":"ContainerDied","Data":"8de19a498ecd504f295b063b1f621f22dacf5039e0bb2f6cef8d786cdb21428b"} Jan 20 17:15:11 crc kubenswrapper[4558]: I0120 17:15:11.025378 4558 scope.go:117] "RemoveContainer" containerID="dcb0fe6875233f2fdf58a3e6c4e56531163a5e8c5098db78a1bf54da8f0a72d8" Jan 20 17:15:11 crc kubenswrapper[4558]: I0120 17:15:11.025510 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:15:11 crc kubenswrapper[4558]: I0120 17:15:11.054813 4558 scope.go:117] "RemoveContainer" containerID="dcb0fe6875233f2fdf58a3e6c4e56531163a5e8c5098db78a1bf54da8f0a72d8" Jan 20 17:15:11 crc kubenswrapper[4558]: E0120 17:15:11.055178 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dcb0fe6875233f2fdf58a3e6c4e56531163a5e8c5098db78a1bf54da8f0a72d8\": container with ID starting with dcb0fe6875233f2fdf58a3e6c4e56531163a5e8c5098db78a1bf54da8f0a72d8 not found: ID does not exist" containerID="dcb0fe6875233f2fdf58a3e6c4e56531163a5e8c5098db78a1bf54da8f0a72d8" Jan 20 17:15:11 crc kubenswrapper[4558]: I0120 17:15:11.055214 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dcb0fe6875233f2fdf58a3e6c4e56531163a5e8c5098db78a1bf54da8f0a72d8"} err="failed to get container status \"dcb0fe6875233f2fdf58a3e6c4e56531163a5e8c5098db78a1bf54da8f0a72d8\": rpc error: code = NotFound desc = could not find container \"dcb0fe6875233f2fdf58a3e6c4e56531163a5e8c5098db78a1bf54da8f0a72d8\": container with ID starting with dcb0fe6875233f2fdf58a3e6c4e56531163a5e8c5098db78a1bf54da8f0a72d8 not found: ID does not exist" Jan 20 17:15:11 crc kubenswrapper[4558]: I0120 17:15:11.080646 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k5w95\" (UniqueName: \"kubernetes.io/projected/9d1e8534-0277-43f6-a3be-2c3985853e6e-kube-api-access-k5w95\") pod \"9d1e8534-0277-43f6-a3be-2c3985853e6e\" (UID: \"9d1e8534-0277-43f6-a3be-2c3985853e6e\") " Jan 20 17:15:11 crc kubenswrapper[4558]: I0120 17:15:11.099512 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d1e8534-0277-43f6-a3be-2c3985853e6e-kube-api-access-k5w95" (OuterVolumeSpecName: "kube-api-access-k5w95") pod "9d1e8534-0277-43f6-a3be-2c3985853e6e" (UID: "9d1e8534-0277-43f6-a3be-2c3985853e6e"). InnerVolumeSpecName "kube-api-access-k5w95". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:15:11 crc kubenswrapper[4558]: I0120 17:15:11.184202 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k5w95\" (UniqueName: \"kubernetes.io/projected/9d1e8534-0277-43f6-a3be-2c3985853e6e-kube-api-access-k5w95\") on node \"crc\" DevicePath \"\"" Jan 20 17:15:11 crc kubenswrapper[4558]: I0120 17:15:11.358152 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 17:15:11 crc kubenswrapper[4558]: I0120 17:15:11.364459 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 17:15:11 crc kubenswrapper[4558]: I0120 17:15:11.393801 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 17:15:11 crc kubenswrapper[4558]: E0120 17:15:11.395237 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9d1e8534-0277-43f6-a3be-2c3985853e6e" containerName="kube-state-metrics" Jan 20 17:15:11 crc kubenswrapper[4558]: I0120 17:15:11.395270 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="9d1e8534-0277-43f6-a3be-2c3985853e6e" containerName="kube-state-metrics" Jan 20 17:15:11 crc kubenswrapper[4558]: E0120 17:15:11.395305 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9080d637-d392-4532-9054-180e871834f0" containerName="collect-profiles" Jan 20 17:15:11 crc kubenswrapper[4558]: I0120 17:15:11.395315 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="9080d637-d392-4532-9054-180e871834f0" containerName="collect-profiles" Jan 20 17:15:11 crc kubenswrapper[4558]: I0120 17:15:11.395854 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="9080d637-d392-4532-9054-180e871834f0" containerName="collect-profiles" Jan 20 17:15:11 crc kubenswrapper[4558]: I0120 17:15:11.395902 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="9d1e8534-0277-43f6-a3be-2c3985853e6e" containerName="kube-state-metrics" Jan 20 17:15:11 crc kubenswrapper[4558]: I0120 17:15:11.397609 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:15:11 crc kubenswrapper[4558]: I0120 17:15:11.402698 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"kube-state-metrics-tls-config" Jan 20 17:15:11 crc kubenswrapper[4558]: I0120 17:15:11.403913 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-kube-state-metrics-svc" Jan 20 17:15:11 crc kubenswrapper[4558]: I0120 17:15:11.431227 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 17:15:11 crc kubenswrapper[4558]: I0120 17:15:11.508821 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6947f138-6d2f-40e2-a236-736d26d3a1e6-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"6947f138-6d2f-40e2-a236-736d26d3a1e6\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:15:11 crc kubenswrapper[4558]: I0120 17:15:11.508898 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/6947f138-6d2f-40e2-a236-736d26d3a1e6-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"6947f138-6d2f-40e2-a236-736d26d3a1e6\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:15:11 crc kubenswrapper[4558]: I0120 17:15:11.509249 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/6947f138-6d2f-40e2-a236-736d26d3a1e6-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"6947f138-6d2f-40e2-a236-736d26d3a1e6\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:15:11 crc kubenswrapper[4558]: I0120 17:15:11.509287 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tzm5k\" (UniqueName: \"kubernetes.io/projected/6947f138-6d2f-40e2-a236-736d26d3a1e6-kube-api-access-tzm5k\") pod \"kube-state-metrics-0\" (UID: \"6947f138-6d2f-40e2-a236-736d26d3a1e6\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:15:11 crc kubenswrapper[4558]: I0120 17:15:11.613982 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/6947f138-6d2f-40e2-a236-736d26d3a1e6-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"6947f138-6d2f-40e2-a236-736d26d3a1e6\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:15:11 crc kubenswrapper[4558]: I0120 17:15:11.614718 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tzm5k\" (UniqueName: \"kubernetes.io/projected/6947f138-6d2f-40e2-a236-736d26d3a1e6-kube-api-access-tzm5k\") pod \"kube-state-metrics-0\" (UID: \"6947f138-6d2f-40e2-a236-736d26d3a1e6\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:15:11 crc kubenswrapper[4558]: I0120 17:15:11.614829 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6947f138-6d2f-40e2-a236-736d26d3a1e6-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"6947f138-6d2f-40e2-a236-736d26d3a1e6\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:15:11 crc kubenswrapper[4558]: I0120 17:15:11.614897 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/6947f138-6d2f-40e2-a236-736d26d3a1e6-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"6947f138-6d2f-40e2-a236-736d26d3a1e6\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:15:11 crc kubenswrapper[4558]: I0120 17:15:11.620853 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/6947f138-6d2f-40e2-a236-736d26d3a1e6-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"6947f138-6d2f-40e2-a236-736d26d3a1e6\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:15:11 crc kubenswrapper[4558]: I0120 17:15:11.630202 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/6947f138-6d2f-40e2-a236-736d26d3a1e6-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"6947f138-6d2f-40e2-a236-736d26d3a1e6\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:15:11 crc kubenswrapper[4558]: I0120 17:15:11.630840 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6947f138-6d2f-40e2-a236-736d26d3a1e6-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"6947f138-6d2f-40e2-a236-736d26d3a1e6\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:15:11 crc kubenswrapper[4558]: I0120 17:15:11.638325 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tzm5k\" (UniqueName: \"kubernetes.io/projected/6947f138-6d2f-40e2-a236-736d26d3a1e6-kube-api-access-tzm5k\") pod \"kube-state-metrics-0\" (UID: \"6947f138-6d2f-40e2-a236-736d26d3a1e6\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:15:11 crc kubenswrapper[4558]: I0120 17:15:11.725809 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:15:12 crc kubenswrapper[4558]: I0120 17:15:12.061544 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:15:12 crc kubenswrapper[4558]: I0120 17:15:12.062250 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="9b48e62d-af6d-4eb9-b491-766f773de980" containerName="sg-core" containerID="cri-o://7c73bd509e0c42d2ec4674301759d5ad520f1bb21ab0ec927d4c4aafc592d56c" gracePeriod=30 Jan 20 17:15:12 crc kubenswrapper[4558]: I0120 17:15:12.062412 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="9b48e62d-af6d-4eb9-b491-766f773de980" containerName="proxy-httpd" containerID="cri-o://bf15066af68c4f42d89667ee3f7871a7e42a87e7e5fe77fc9e8a69ddd83cf2f7" gracePeriod=30 Jan 20 17:15:12 crc kubenswrapper[4558]: I0120 17:15:12.062447 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="9b48e62d-af6d-4eb9-b491-766f773de980" containerName="ceilometer-notification-agent" containerID="cri-o://4e41ea0254e75b461a1547d95c8aac810d14af69ad864a1c7606e805b4ce37ce" gracePeriod=30 Jan 20 17:15:12 crc kubenswrapper[4558]: I0120 17:15:12.062107 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="9b48e62d-af6d-4eb9-b491-766f773de980" containerName="ceilometer-central-agent" containerID="cri-o://6493127d7274540c9787e4ceef28212ffed209d34f09092dfba89ffe109e8cc7" gracePeriod=30 Jan 20 17:15:12 crc kubenswrapper[4558]: I0120 17:15:12.124623 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 17:15:12 crc kubenswrapper[4558]: I0120 17:15:12.577091 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d1e8534-0277-43f6-a3be-2c3985853e6e" path="/var/lib/kubelet/pods/9d1e8534-0277-43f6-a3be-2c3985853e6e/volumes" Jan 20 17:15:13 crc kubenswrapper[4558]: I0120 17:15:13.053143 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/kube-state-metrics-0" event={"ID":"6947f138-6d2f-40e2-a236-736d26d3a1e6","Type":"ContainerStarted","Data":"3019d79f9b53d96500bf161bbfffaf2fd1e86e0b8753d308b473cc6f0550db6e"} Jan 20 17:15:13 crc kubenswrapper[4558]: I0120 17:15:13.053231 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/kube-state-metrics-0" event={"ID":"6947f138-6d2f-40e2-a236-736d26d3a1e6","Type":"ContainerStarted","Data":"b4819a707a14b0b7318668e9392277556e80c25680c1b4bf1615db3adc4f5c57"} Jan 20 17:15:13 crc kubenswrapper[4558]: I0120 17:15:13.053550 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:15:13 crc kubenswrapper[4558]: I0120 17:15:13.055954 4558 generic.go:334] "Generic (PLEG): container finished" podID="9b48e62d-af6d-4eb9-b491-766f773de980" containerID="bf15066af68c4f42d89667ee3f7871a7e42a87e7e5fe77fc9e8a69ddd83cf2f7" exitCode=0 Jan 20 17:15:13 crc kubenswrapper[4558]: I0120 17:15:13.055989 4558 generic.go:334] "Generic (PLEG): container finished" podID="9b48e62d-af6d-4eb9-b491-766f773de980" containerID="7c73bd509e0c42d2ec4674301759d5ad520f1bb21ab0ec927d4c4aafc592d56c" exitCode=2 Jan 20 17:15:13 crc kubenswrapper[4558]: I0120 17:15:13.055999 4558 generic.go:334] "Generic (PLEG): container finished" podID="9b48e62d-af6d-4eb9-b491-766f773de980" containerID="6493127d7274540c9787e4ceef28212ffed209d34f09092dfba89ffe109e8cc7" exitCode=0 Jan 20 17:15:13 crc kubenswrapper[4558]: I0120 17:15:13.056025 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"9b48e62d-af6d-4eb9-b491-766f773de980","Type":"ContainerDied","Data":"bf15066af68c4f42d89667ee3f7871a7e42a87e7e5fe77fc9e8a69ddd83cf2f7"} Jan 20 17:15:13 crc kubenswrapper[4558]: I0120 17:15:13.056053 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"9b48e62d-af6d-4eb9-b491-766f773de980","Type":"ContainerDied","Data":"7c73bd509e0c42d2ec4674301759d5ad520f1bb21ab0ec927d4c4aafc592d56c"} Jan 20 17:15:13 crc kubenswrapper[4558]: I0120 17:15:13.056064 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"9b48e62d-af6d-4eb9-b491-766f773de980","Type":"ContainerDied","Data":"6493127d7274540c9787e4ceef28212ffed209d34f09092dfba89ffe109e8cc7"} Jan 20 17:15:13 crc kubenswrapper[4558]: I0120 17:15:13.079181 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/kube-state-metrics-0" podStartSLOduration=1.742686558 podStartE2EDuration="2.079142686s" podCreationTimestamp="2026-01-20 17:15:11 +0000 UTC" firstStartedPulling="2026-01-20 17:15:12.134776225 +0000 UTC m=+2005.895114192" lastFinishedPulling="2026-01-20 17:15:12.471232353 +0000 UTC m=+2006.231570320" observedRunningTime="2026-01-20 17:15:13.069369578 +0000 UTC m=+2006.829707545" watchObservedRunningTime="2026-01-20 17:15:13.079142686 +0000 UTC m=+2006.839480653" Jan 20 17:15:13 crc kubenswrapper[4558]: I0120 17:15:13.339612 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:15:13 crc kubenswrapper[4558]: I0120 17:15:13.364527 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:15:13 crc kubenswrapper[4558]: I0120 17:15:13.794755 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:15:13 crc kubenswrapper[4558]: I0120 17:15:13.874790 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/9b48e62d-af6d-4eb9-b491-766f773de980-sg-core-conf-yaml\") pod \"9b48e62d-af6d-4eb9-b491-766f773de980\" (UID: \"9b48e62d-af6d-4eb9-b491-766f773de980\") " Jan 20 17:15:13 crc kubenswrapper[4558]: I0120 17:15:13.874862 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9b48e62d-af6d-4eb9-b491-766f773de980-log-httpd\") pod \"9b48e62d-af6d-4eb9-b491-766f773de980\" (UID: \"9b48e62d-af6d-4eb9-b491-766f773de980\") " Jan 20 17:15:13 crc kubenswrapper[4558]: I0120 17:15:13.874930 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9b48e62d-af6d-4eb9-b491-766f773de980-scripts\") pod \"9b48e62d-af6d-4eb9-b491-766f773de980\" (UID: \"9b48e62d-af6d-4eb9-b491-766f773de980\") " Jan 20 17:15:13 crc kubenswrapper[4558]: I0120 17:15:13.874983 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9b48e62d-af6d-4eb9-b491-766f773de980-run-httpd\") pod \"9b48e62d-af6d-4eb9-b491-766f773de980\" (UID: \"9b48e62d-af6d-4eb9-b491-766f773de980\") " Jan 20 17:15:13 crc kubenswrapper[4558]: I0120 17:15:13.875052 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9b48e62d-af6d-4eb9-b491-766f773de980-config-data\") pod \"9b48e62d-af6d-4eb9-b491-766f773de980\" (UID: \"9b48e62d-af6d-4eb9-b491-766f773de980\") " Jan 20 17:15:13 crc kubenswrapper[4558]: I0120 17:15:13.875176 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-trkz4\" (UniqueName: \"kubernetes.io/projected/9b48e62d-af6d-4eb9-b491-766f773de980-kube-api-access-trkz4\") pod \"9b48e62d-af6d-4eb9-b491-766f773de980\" (UID: \"9b48e62d-af6d-4eb9-b491-766f773de980\") " Jan 20 17:15:13 crc kubenswrapper[4558]: I0120 17:15:13.875323 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9b48e62d-af6d-4eb9-b491-766f773de980-combined-ca-bundle\") pod \"9b48e62d-af6d-4eb9-b491-766f773de980\" (UID: \"9b48e62d-af6d-4eb9-b491-766f773de980\") " Jan 20 17:15:13 crc kubenswrapper[4558]: I0120 17:15:13.875646 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9b48e62d-af6d-4eb9-b491-766f773de980-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "9b48e62d-af6d-4eb9-b491-766f773de980" (UID: "9b48e62d-af6d-4eb9-b491-766f773de980"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:15:13 crc kubenswrapper[4558]: I0120 17:15:13.875722 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9b48e62d-af6d-4eb9-b491-766f773de980-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "9b48e62d-af6d-4eb9-b491-766f773de980" (UID: "9b48e62d-af6d-4eb9-b491-766f773de980"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:15:13 crc kubenswrapper[4558]: I0120 17:15:13.877087 4558 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9b48e62d-af6d-4eb9-b491-766f773de980-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:15:13 crc kubenswrapper[4558]: I0120 17:15:13.877116 4558 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9b48e62d-af6d-4eb9-b491-766f773de980-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:15:13 crc kubenswrapper[4558]: I0120 17:15:13.881192 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9b48e62d-af6d-4eb9-b491-766f773de980-scripts" (OuterVolumeSpecName: "scripts") pod "9b48e62d-af6d-4eb9-b491-766f773de980" (UID: "9b48e62d-af6d-4eb9-b491-766f773de980"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:15:13 crc kubenswrapper[4558]: I0120 17:15:13.883884 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9b48e62d-af6d-4eb9-b491-766f773de980-kube-api-access-trkz4" (OuterVolumeSpecName: "kube-api-access-trkz4") pod "9b48e62d-af6d-4eb9-b491-766f773de980" (UID: "9b48e62d-af6d-4eb9-b491-766f773de980"). InnerVolumeSpecName "kube-api-access-trkz4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:15:13 crc kubenswrapper[4558]: I0120 17:15:13.908331 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9b48e62d-af6d-4eb9-b491-766f773de980-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "9b48e62d-af6d-4eb9-b491-766f773de980" (UID: "9b48e62d-af6d-4eb9-b491-766f773de980"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:15:13 crc kubenswrapper[4558]: I0120 17:15:13.936155 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9b48e62d-af6d-4eb9-b491-766f773de980-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9b48e62d-af6d-4eb9-b491-766f773de980" (UID: "9b48e62d-af6d-4eb9-b491-766f773de980"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:15:13 crc kubenswrapper[4558]: I0120 17:15:13.952388 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9b48e62d-af6d-4eb9-b491-766f773de980-config-data" (OuterVolumeSpecName: "config-data") pod "9b48e62d-af6d-4eb9-b491-766f773de980" (UID: "9b48e62d-af6d-4eb9-b491-766f773de980"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:15:13 crc kubenswrapper[4558]: I0120 17:15:13.980504 4558 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/9b48e62d-af6d-4eb9-b491-766f773de980-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 20 17:15:13 crc kubenswrapper[4558]: I0120 17:15:13.980538 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9b48e62d-af6d-4eb9-b491-766f773de980-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:15:13 crc kubenswrapper[4558]: I0120 17:15:13.980550 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9b48e62d-af6d-4eb9-b491-766f773de980-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:15:13 crc kubenswrapper[4558]: I0120 17:15:13.980563 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-trkz4\" (UniqueName: \"kubernetes.io/projected/9b48e62d-af6d-4eb9-b491-766f773de980-kube-api-access-trkz4\") on node \"crc\" DevicePath \"\"" Jan 20 17:15:13 crc kubenswrapper[4558]: I0120 17:15:13.980577 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9b48e62d-af6d-4eb9-b491-766f773de980-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:15:14 crc kubenswrapper[4558]: I0120 17:15:14.068784 4558 generic.go:334] "Generic (PLEG): container finished" podID="9b48e62d-af6d-4eb9-b491-766f773de980" containerID="4e41ea0254e75b461a1547d95c8aac810d14af69ad864a1c7606e805b4ce37ce" exitCode=0 Jan 20 17:15:14 crc kubenswrapper[4558]: I0120 17:15:14.068867 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"9b48e62d-af6d-4eb9-b491-766f773de980","Type":"ContainerDied","Data":"4e41ea0254e75b461a1547d95c8aac810d14af69ad864a1c7606e805b4ce37ce"} Jan 20 17:15:14 crc kubenswrapper[4558]: I0120 17:15:14.068930 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"9b48e62d-af6d-4eb9-b491-766f773de980","Type":"ContainerDied","Data":"b71edb754077779738936bd111a5b8e161187e0d42e500f88ccb77801e567749"} Jan 20 17:15:14 crc kubenswrapper[4558]: I0120 17:15:14.068960 4558 scope.go:117] "RemoveContainer" containerID="bf15066af68c4f42d89667ee3f7871a7e42a87e7e5fe77fc9e8a69ddd83cf2f7" Jan 20 17:15:14 crc kubenswrapper[4558]: I0120 17:15:14.068867 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:15:14 crc kubenswrapper[4558]: I0120 17:15:14.111696 4558 scope.go:117] "RemoveContainer" containerID="7c73bd509e0c42d2ec4674301759d5ad520f1bb21ab0ec927d4c4aafc592d56c" Jan 20 17:15:14 crc kubenswrapper[4558]: I0120 17:15:14.118493 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:15:14 crc kubenswrapper[4558]: I0120 17:15:14.126589 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:15:14 crc kubenswrapper[4558]: I0120 17:15:14.145384 4558 scope.go:117] "RemoveContainer" containerID="4e41ea0254e75b461a1547d95c8aac810d14af69ad864a1c7606e805b4ce37ce" Jan 20 17:15:14 crc kubenswrapper[4558]: I0120 17:15:14.145529 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:15:14 crc kubenswrapper[4558]: I0120 17:15:14.154554 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:15:14 crc kubenswrapper[4558]: E0120 17:15:14.155304 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9b48e62d-af6d-4eb9-b491-766f773de980" containerName="ceilometer-central-agent" Jan 20 17:15:14 crc kubenswrapper[4558]: I0120 17:15:14.155338 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="9b48e62d-af6d-4eb9-b491-766f773de980" containerName="ceilometer-central-agent" Jan 20 17:15:14 crc kubenswrapper[4558]: E0120 17:15:14.155358 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9b48e62d-af6d-4eb9-b491-766f773de980" containerName="sg-core" Jan 20 17:15:14 crc kubenswrapper[4558]: I0120 17:15:14.155365 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="9b48e62d-af6d-4eb9-b491-766f773de980" containerName="sg-core" Jan 20 17:15:14 crc kubenswrapper[4558]: E0120 17:15:14.155377 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9b48e62d-af6d-4eb9-b491-766f773de980" containerName="proxy-httpd" Jan 20 17:15:14 crc kubenswrapper[4558]: I0120 17:15:14.155382 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="9b48e62d-af6d-4eb9-b491-766f773de980" containerName="proxy-httpd" Jan 20 17:15:14 crc kubenswrapper[4558]: E0120 17:15:14.155402 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9b48e62d-af6d-4eb9-b491-766f773de980" containerName="ceilometer-notification-agent" Jan 20 17:15:14 crc kubenswrapper[4558]: I0120 17:15:14.155408 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="9b48e62d-af6d-4eb9-b491-766f773de980" containerName="ceilometer-notification-agent" Jan 20 17:15:14 crc kubenswrapper[4558]: I0120 17:15:14.155667 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="9b48e62d-af6d-4eb9-b491-766f773de980" containerName="ceilometer-notification-agent" Jan 20 17:15:14 crc kubenswrapper[4558]: I0120 17:15:14.155689 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="9b48e62d-af6d-4eb9-b491-766f773de980" containerName="ceilometer-central-agent" Jan 20 17:15:14 crc kubenswrapper[4558]: I0120 17:15:14.155703 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="9b48e62d-af6d-4eb9-b491-766f773de980" containerName="proxy-httpd" Jan 20 17:15:14 crc kubenswrapper[4558]: I0120 17:15:14.155715 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="9b48e62d-af6d-4eb9-b491-766f773de980" containerName="sg-core" Jan 20 17:15:14 crc kubenswrapper[4558]: I0120 17:15:14.157677 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:15:14 crc kubenswrapper[4558]: I0120 17:15:14.158918 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-scripts" Jan 20 17:15:14 crc kubenswrapper[4558]: I0120 17:15:14.160069 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-config-data" Jan 20 17:15:14 crc kubenswrapper[4558]: I0120 17:15:14.160182 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-ceilometer-internal-svc" Jan 20 17:15:14 crc kubenswrapper[4558]: I0120 17:15:14.161320 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:15:14 crc kubenswrapper[4558]: I0120 17:15:14.166098 4558 scope.go:117] "RemoveContainer" containerID="6493127d7274540c9787e4ceef28212ffed209d34f09092dfba89ffe109e8cc7" Jan 20 17:15:14 crc kubenswrapper[4558]: I0120 17:15:14.185247 4558 scope.go:117] "RemoveContainer" containerID="bf15066af68c4f42d89667ee3f7871a7e42a87e7e5fe77fc9e8a69ddd83cf2f7" Jan 20 17:15:14 crc kubenswrapper[4558]: E0120 17:15:14.185691 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bf15066af68c4f42d89667ee3f7871a7e42a87e7e5fe77fc9e8a69ddd83cf2f7\": container with ID starting with bf15066af68c4f42d89667ee3f7871a7e42a87e7e5fe77fc9e8a69ddd83cf2f7 not found: ID does not exist" containerID="bf15066af68c4f42d89667ee3f7871a7e42a87e7e5fe77fc9e8a69ddd83cf2f7" Jan 20 17:15:14 crc kubenswrapper[4558]: I0120 17:15:14.185781 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bf15066af68c4f42d89667ee3f7871a7e42a87e7e5fe77fc9e8a69ddd83cf2f7"} err="failed to get container status \"bf15066af68c4f42d89667ee3f7871a7e42a87e7e5fe77fc9e8a69ddd83cf2f7\": rpc error: code = NotFound desc = could not find container \"bf15066af68c4f42d89667ee3f7871a7e42a87e7e5fe77fc9e8a69ddd83cf2f7\": container with ID starting with bf15066af68c4f42d89667ee3f7871a7e42a87e7e5fe77fc9e8a69ddd83cf2f7 not found: ID does not exist" Jan 20 17:15:14 crc kubenswrapper[4558]: I0120 17:15:14.185873 4558 scope.go:117] "RemoveContainer" containerID="7c73bd509e0c42d2ec4674301759d5ad520f1bb21ab0ec927d4c4aafc592d56c" Jan 20 17:15:14 crc kubenswrapper[4558]: E0120 17:15:14.186348 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7c73bd509e0c42d2ec4674301759d5ad520f1bb21ab0ec927d4c4aafc592d56c\": container with ID starting with 7c73bd509e0c42d2ec4674301759d5ad520f1bb21ab0ec927d4c4aafc592d56c not found: ID does not exist" containerID="7c73bd509e0c42d2ec4674301759d5ad520f1bb21ab0ec927d4c4aafc592d56c" Jan 20 17:15:14 crc kubenswrapper[4558]: I0120 17:15:14.186378 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7c73bd509e0c42d2ec4674301759d5ad520f1bb21ab0ec927d4c4aafc592d56c"} err="failed to get container status \"7c73bd509e0c42d2ec4674301759d5ad520f1bb21ab0ec927d4c4aafc592d56c\": rpc error: code = NotFound desc = could not find container \"7c73bd509e0c42d2ec4674301759d5ad520f1bb21ab0ec927d4c4aafc592d56c\": container with ID starting with 7c73bd509e0c42d2ec4674301759d5ad520f1bb21ab0ec927d4c4aafc592d56c not found: ID does not exist" Jan 20 17:15:14 crc kubenswrapper[4558]: I0120 17:15:14.186400 4558 scope.go:117] "RemoveContainer" containerID="4e41ea0254e75b461a1547d95c8aac810d14af69ad864a1c7606e805b4ce37ce" Jan 20 17:15:14 crc kubenswrapper[4558]: E0120 17:15:14.186698 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4e41ea0254e75b461a1547d95c8aac810d14af69ad864a1c7606e805b4ce37ce\": container with ID starting with 4e41ea0254e75b461a1547d95c8aac810d14af69ad864a1c7606e805b4ce37ce not found: ID does not exist" containerID="4e41ea0254e75b461a1547d95c8aac810d14af69ad864a1c7606e805b4ce37ce" Jan 20 17:15:14 crc kubenswrapper[4558]: I0120 17:15:14.186783 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4e41ea0254e75b461a1547d95c8aac810d14af69ad864a1c7606e805b4ce37ce"} err="failed to get container status \"4e41ea0254e75b461a1547d95c8aac810d14af69ad864a1c7606e805b4ce37ce\": rpc error: code = NotFound desc = could not find container \"4e41ea0254e75b461a1547d95c8aac810d14af69ad864a1c7606e805b4ce37ce\": container with ID starting with 4e41ea0254e75b461a1547d95c8aac810d14af69ad864a1c7606e805b4ce37ce not found: ID does not exist" Jan 20 17:15:14 crc kubenswrapper[4558]: I0120 17:15:14.186849 4558 scope.go:117] "RemoveContainer" containerID="6493127d7274540c9787e4ceef28212ffed209d34f09092dfba89ffe109e8cc7" Jan 20 17:15:14 crc kubenswrapper[4558]: E0120 17:15:14.187191 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6493127d7274540c9787e4ceef28212ffed209d34f09092dfba89ffe109e8cc7\": container with ID starting with 6493127d7274540c9787e4ceef28212ffed209d34f09092dfba89ffe109e8cc7 not found: ID does not exist" containerID="6493127d7274540c9787e4ceef28212ffed209d34f09092dfba89ffe109e8cc7" Jan 20 17:15:14 crc kubenswrapper[4558]: I0120 17:15:14.187221 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6493127d7274540c9787e4ceef28212ffed209d34f09092dfba89ffe109e8cc7"} err="failed to get container status \"6493127d7274540c9787e4ceef28212ffed209d34f09092dfba89ffe109e8cc7\": rpc error: code = NotFound desc = could not find container \"6493127d7274540c9787e4ceef28212ffed209d34f09092dfba89ffe109e8cc7\": container with ID starting with 6493127d7274540c9787e4ceef28212ffed209d34f09092dfba89ffe109e8cc7 not found: ID does not exist" Jan 20 17:15:14 crc kubenswrapper[4558]: I0120 17:15:14.286370 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aca0e52c-f10e-4793-8895-a2f580dda020-config-data\") pod \"ceilometer-0\" (UID: \"aca0e52c-f10e-4793-8895-a2f580dda020\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:15:14 crc kubenswrapper[4558]: I0120 17:15:14.286636 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pvnqz\" (UniqueName: \"kubernetes.io/projected/aca0e52c-f10e-4793-8895-a2f580dda020-kube-api-access-pvnqz\") pod \"ceilometer-0\" (UID: \"aca0e52c-f10e-4793-8895-a2f580dda020\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:15:14 crc kubenswrapper[4558]: I0120 17:15:14.286794 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/aca0e52c-f10e-4793-8895-a2f580dda020-run-httpd\") pod \"ceilometer-0\" (UID: \"aca0e52c-f10e-4793-8895-a2f580dda020\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:15:14 crc kubenswrapper[4558]: I0120 17:15:14.286868 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/aca0e52c-f10e-4793-8895-a2f580dda020-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"aca0e52c-f10e-4793-8895-a2f580dda020\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:15:14 crc kubenswrapper[4558]: I0120 17:15:14.286970 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aca0e52c-f10e-4793-8895-a2f580dda020-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"aca0e52c-f10e-4793-8895-a2f580dda020\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:15:14 crc kubenswrapper[4558]: I0120 17:15:14.287191 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/aca0e52c-f10e-4793-8895-a2f580dda020-log-httpd\") pod \"ceilometer-0\" (UID: \"aca0e52c-f10e-4793-8895-a2f580dda020\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:15:14 crc kubenswrapper[4558]: I0120 17:15:14.287356 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aca0e52c-f10e-4793-8895-a2f580dda020-scripts\") pod \"ceilometer-0\" (UID: \"aca0e52c-f10e-4793-8895-a2f580dda020\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:15:14 crc kubenswrapper[4558]: I0120 17:15:14.287418 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/aca0e52c-f10e-4793-8895-a2f580dda020-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"aca0e52c-f10e-4793-8895-a2f580dda020\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:15:14 crc kubenswrapper[4558]: I0120 17:15:14.390504 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/aca0e52c-f10e-4793-8895-a2f580dda020-run-httpd\") pod \"ceilometer-0\" (UID: \"aca0e52c-f10e-4793-8895-a2f580dda020\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:15:14 crc kubenswrapper[4558]: I0120 17:15:14.390561 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/aca0e52c-f10e-4793-8895-a2f580dda020-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"aca0e52c-f10e-4793-8895-a2f580dda020\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:15:14 crc kubenswrapper[4558]: I0120 17:15:14.390604 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aca0e52c-f10e-4793-8895-a2f580dda020-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"aca0e52c-f10e-4793-8895-a2f580dda020\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:15:14 crc kubenswrapper[4558]: I0120 17:15:14.390649 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/aca0e52c-f10e-4793-8895-a2f580dda020-log-httpd\") pod \"ceilometer-0\" (UID: \"aca0e52c-f10e-4793-8895-a2f580dda020\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:15:14 crc kubenswrapper[4558]: I0120 17:15:14.390695 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aca0e52c-f10e-4793-8895-a2f580dda020-scripts\") pod \"ceilometer-0\" (UID: \"aca0e52c-f10e-4793-8895-a2f580dda020\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:15:14 crc kubenswrapper[4558]: I0120 17:15:14.390720 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/aca0e52c-f10e-4793-8895-a2f580dda020-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"aca0e52c-f10e-4793-8895-a2f580dda020\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:15:14 crc kubenswrapper[4558]: I0120 17:15:14.390760 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aca0e52c-f10e-4793-8895-a2f580dda020-config-data\") pod \"ceilometer-0\" (UID: \"aca0e52c-f10e-4793-8895-a2f580dda020\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:15:14 crc kubenswrapper[4558]: I0120 17:15:14.390817 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pvnqz\" (UniqueName: \"kubernetes.io/projected/aca0e52c-f10e-4793-8895-a2f580dda020-kube-api-access-pvnqz\") pod \"ceilometer-0\" (UID: \"aca0e52c-f10e-4793-8895-a2f580dda020\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:15:14 crc kubenswrapper[4558]: I0120 17:15:14.391657 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/aca0e52c-f10e-4793-8895-a2f580dda020-log-httpd\") pod \"ceilometer-0\" (UID: \"aca0e52c-f10e-4793-8895-a2f580dda020\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:15:14 crc kubenswrapper[4558]: I0120 17:15:14.392055 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/aca0e52c-f10e-4793-8895-a2f580dda020-run-httpd\") pod \"ceilometer-0\" (UID: \"aca0e52c-f10e-4793-8895-a2f580dda020\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:15:14 crc kubenswrapper[4558]: I0120 17:15:14.397069 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/aca0e52c-f10e-4793-8895-a2f580dda020-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"aca0e52c-f10e-4793-8895-a2f580dda020\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:15:14 crc kubenswrapper[4558]: I0120 17:15:14.398096 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aca0e52c-f10e-4793-8895-a2f580dda020-config-data\") pod \"ceilometer-0\" (UID: \"aca0e52c-f10e-4793-8895-a2f580dda020\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:15:14 crc kubenswrapper[4558]: I0120 17:15:14.398895 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aca0e52c-f10e-4793-8895-a2f580dda020-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"aca0e52c-f10e-4793-8895-a2f580dda020\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:15:14 crc kubenswrapper[4558]: I0120 17:15:14.399479 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/aca0e52c-f10e-4793-8895-a2f580dda020-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"aca0e52c-f10e-4793-8895-a2f580dda020\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:15:14 crc kubenswrapper[4558]: I0120 17:15:14.404057 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aca0e52c-f10e-4793-8895-a2f580dda020-scripts\") pod \"ceilometer-0\" (UID: \"aca0e52c-f10e-4793-8895-a2f580dda020\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:15:14 crc kubenswrapper[4558]: I0120 17:15:14.407030 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pvnqz\" (UniqueName: \"kubernetes.io/projected/aca0e52c-f10e-4793-8895-a2f580dda020-kube-api-access-pvnqz\") pod \"ceilometer-0\" (UID: \"aca0e52c-f10e-4793-8895-a2f580dda020\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:15:14 crc kubenswrapper[4558]: I0120 17:15:14.484228 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:15:14 crc kubenswrapper[4558]: I0120 17:15:14.591571 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9b48e62d-af6d-4eb9-b491-766f773de980" path="/var/lib/kubelet/pods/9b48e62d-af6d-4eb9-b491-766f773de980/volumes" Jan 20 17:15:15 crc kubenswrapper[4558]: I0120 17:15:15.009083 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:15:15 crc kubenswrapper[4558]: W0120 17:15:15.010845 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podaca0e52c_f10e_4793_8895_a2f580dda020.slice/crio-21c3b61a9b3b87500ffa33eda7ce124b6f9e24bc143ed1b13b7876042651154c WatchSource:0}: Error finding container 21c3b61a9b3b87500ffa33eda7ce124b6f9e24bc143ed1b13b7876042651154c: Status 404 returned error can't find the container with id 21c3b61a9b3b87500ffa33eda7ce124b6f9e24bc143ed1b13b7876042651154c Jan 20 17:15:15 crc kubenswrapper[4558]: I0120 17:15:15.079351 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"aca0e52c-f10e-4793-8895-a2f580dda020","Type":"ContainerStarted","Data":"21c3b61a9b3b87500ffa33eda7ce124b6f9e24bc143ed1b13b7876042651154c"} Jan 20 17:15:16 crc kubenswrapper[4558]: I0120 17:15:16.099864 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"aca0e52c-f10e-4793-8895-a2f580dda020","Type":"ContainerStarted","Data":"9523b6e24557527f6d800087e26993a7ca07aa7592f157f0cfa48fe2dd0f8837"} Jan 20 17:15:17 crc kubenswrapper[4558]: I0120 17:15:17.110366 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"aca0e52c-f10e-4793-8895-a2f580dda020","Type":"ContainerStarted","Data":"9f39cbf16ac33b9bc66989f659996550b62a727db9541c24814cc381a693cd8f"} Jan 20 17:15:18 crc kubenswrapper[4558]: I0120 17:15:18.125049 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"aca0e52c-f10e-4793-8895-a2f580dda020","Type":"ContainerStarted","Data":"7fd942968e0a2cfec2c808bad22c44cd278a04e01b6953c93af2b85e081cc91a"} Jan 20 17:15:19 crc kubenswrapper[4558]: I0120 17:15:19.138070 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"aca0e52c-f10e-4793-8895-a2f580dda020","Type":"ContainerStarted","Data":"f1435b928ed2a992e9f83776fe60c6728d8a8d66e41b7e08829694e65cb7f314"} Jan 20 17:15:19 crc kubenswrapper[4558]: I0120 17:15:19.139217 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:15:19 crc kubenswrapper[4558]: I0120 17:15:19.159039 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ceilometer-0" podStartSLOduration=1.308397292 podStartE2EDuration="5.159016767s" podCreationTimestamp="2026-01-20 17:15:14 +0000 UTC" firstStartedPulling="2026-01-20 17:15:15.013422537 +0000 UTC m=+2008.773760504" lastFinishedPulling="2026-01-20 17:15:18.864042012 +0000 UTC m=+2012.624379979" observedRunningTime="2026-01-20 17:15:19.153799546 +0000 UTC m=+2012.914137512" watchObservedRunningTime="2026-01-20 17:15:19.159016767 +0000 UTC m=+2012.919354734" Jan 20 17:15:19 crc kubenswrapper[4558]: I0120 17:15:19.297009 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:15:19 crc kubenswrapper[4558]: I0120 17:15:19.297535 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:15:19 crc kubenswrapper[4558]: I0120 17:15:19.307933 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:15:19 crc kubenswrapper[4558]: I0120 17:15:19.308082 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:15:19 crc kubenswrapper[4558]: I0120 17:15:19.884491 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:15:19 crc kubenswrapper[4558]: I0120 17:15:19.885120 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:15:19 crc kubenswrapper[4558]: I0120 17:15:19.886840 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:15:19 crc kubenswrapper[4558]: I0120 17:15:19.891489 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:15:20 crc kubenswrapper[4558]: I0120 17:15:20.157738 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:15:20 crc kubenswrapper[4558]: I0120 17:15:20.162189 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:15:21 crc kubenswrapper[4558]: I0120 17:15:21.746148 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:15:21 crc kubenswrapper[4558]: I0120 17:15:21.986446 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:15:22 crc kubenswrapper[4558]: I0120 17:15:22.175477 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="aca0e52c-f10e-4793-8895-a2f580dda020" containerName="ceilometer-central-agent" containerID="cri-o://9523b6e24557527f6d800087e26993a7ca07aa7592f157f0cfa48fe2dd0f8837" gracePeriod=30 Jan 20 17:15:22 crc kubenswrapper[4558]: I0120 17:15:22.175567 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="aca0e52c-f10e-4793-8895-a2f580dda020" containerName="proxy-httpd" containerID="cri-o://f1435b928ed2a992e9f83776fe60c6728d8a8d66e41b7e08829694e65cb7f314" gracePeriod=30 Jan 20 17:15:22 crc kubenswrapper[4558]: I0120 17:15:22.175599 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="aca0e52c-f10e-4793-8895-a2f580dda020" containerName="ceilometer-notification-agent" containerID="cri-o://9f39cbf16ac33b9bc66989f659996550b62a727db9541c24814cc381a693cd8f" gracePeriod=30 Jan 20 17:15:22 crc kubenswrapper[4558]: I0120 17:15:22.175568 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="aca0e52c-f10e-4793-8895-a2f580dda020" containerName="sg-core" containerID="cri-o://7fd942968e0a2cfec2c808bad22c44cd278a04e01b6953c93af2b85e081cc91a" gracePeriod=30 Jan 20 17:15:22 crc kubenswrapper[4558]: I0120 17:15:22.753404 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:15:22 crc kubenswrapper[4558]: I0120 17:15:22.925924 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:15:23 crc kubenswrapper[4558]: I0120 17:15:23.068523 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aca0e52c-f10e-4793-8895-a2f580dda020-combined-ca-bundle\") pod \"aca0e52c-f10e-4793-8895-a2f580dda020\" (UID: \"aca0e52c-f10e-4793-8895-a2f580dda020\") " Jan 20 17:15:23 crc kubenswrapper[4558]: I0120 17:15:23.068675 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aca0e52c-f10e-4793-8895-a2f580dda020-scripts\") pod \"aca0e52c-f10e-4793-8895-a2f580dda020\" (UID: \"aca0e52c-f10e-4793-8895-a2f580dda020\") " Jan 20 17:15:23 crc kubenswrapper[4558]: I0120 17:15:23.068800 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aca0e52c-f10e-4793-8895-a2f580dda020-config-data\") pod \"aca0e52c-f10e-4793-8895-a2f580dda020\" (UID: \"aca0e52c-f10e-4793-8895-a2f580dda020\") " Jan 20 17:15:23 crc kubenswrapper[4558]: I0120 17:15:23.068885 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/aca0e52c-f10e-4793-8895-a2f580dda020-sg-core-conf-yaml\") pod \"aca0e52c-f10e-4793-8895-a2f580dda020\" (UID: \"aca0e52c-f10e-4793-8895-a2f580dda020\") " Jan 20 17:15:23 crc kubenswrapper[4558]: I0120 17:15:23.068919 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/aca0e52c-f10e-4793-8895-a2f580dda020-log-httpd\") pod \"aca0e52c-f10e-4793-8895-a2f580dda020\" (UID: \"aca0e52c-f10e-4793-8895-a2f580dda020\") " Jan 20 17:15:23 crc kubenswrapper[4558]: I0120 17:15:23.068963 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/aca0e52c-f10e-4793-8895-a2f580dda020-run-httpd\") pod \"aca0e52c-f10e-4793-8895-a2f580dda020\" (UID: \"aca0e52c-f10e-4793-8895-a2f580dda020\") " Jan 20 17:15:23 crc kubenswrapper[4558]: I0120 17:15:23.069048 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pvnqz\" (UniqueName: \"kubernetes.io/projected/aca0e52c-f10e-4793-8895-a2f580dda020-kube-api-access-pvnqz\") pod \"aca0e52c-f10e-4793-8895-a2f580dda020\" (UID: \"aca0e52c-f10e-4793-8895-a2f580dda020\") " Jan 20 17:15:23 crc kubenswrapper[4558]: I0120 17:15:23.069097 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/aca0e52c-f10e-4793-8895-a2f580dda020-ceilometer-tls-certs\") pod \"aca0e52c-f10e-4793-8895-a2f580dda020\" (UID: \"aca0e52c-f10e-4793-8895-a2f580dda020\") " Jan 20 17:15:23 crc kubenswrapper[4558]: I0120 17:15:23.070490 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/aca0e52c-f10e-4793-8895-a2f580dda020-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "aca0e52c-f10e-4793-8895-a2f580dda020" (UID: "aca0e52c-f10e-4793-8895-a2f580dda020"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:15:23 crc kubenswrapper[4558]: I0120 17:15:23.070780 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/aca0e52c-f10e-4793-8895-a2f580dda020-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "aca0e52c-f10e-4793-8895-a2f580dda020" (UID: "aca0e52c-f10e-4793-8895-a2f580dda020"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:15:23 crc kubenswrapper[4558]: I0120 17:15:23.078964 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aca0e52c-f10e-4793-8895-a2f580dda020-scripts" (OuterVolumeSpecName: "scripts") pod "aca0e52c-f10e-4793-8895-a2f580dda020" (UID: "aca0e52c-f10e-4793-8895-a2f580dda020"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:15:23 crc kubenswrapper[4558]: I0120 17:15:23.079202 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aca0e52c-f10e-4793-8895-a2f580dda020-kube-api-access-pvnqz" (OuterVolumeSpecName: "kube-api-access-pvnqz") pod "aca0e52c-f10e-4793-8895-a2f580dda020" (UID: "aca0e52c-f10e-4793-8895-a2f580dda020"). InnerVolumeSpecName "kube-api-access-pvnqz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:15:23 crc kubenswrapper[4558]: I0120 17:15:23.105369 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aca0e52c-f10e-4793-8895-a2f580dda020-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "aca0e52c-f10e-4793-8895-a2f580dda020" (UID: "aca0e52c-f10e-4793-8895-a2f580dda020"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:15:23 crc kubenswrapper[4558]: I0120 17:15:23.128982 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aca0e52c-f10e-4793-8895-a2f580dda020-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "aca0e52c-f10e-4793-8895-a2f580dda020" (UID: "aca0e52c-f10e-4793-8895-a2f580dda020"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:15:23 crc kubenswrapper[4558]: I0120 17:15:23.139562 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aca0e52c-f10e-4793-8895-a2f580dda020-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "aca0e52c-f10e-4793-8895-a2f580dda020" (UID: "aca0e52c-f10e-4793-8895-a2f580dda020"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:15:23 crc kubenswrapper[4558]: I0120 17:15:23.148288 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aca0e52c-f10e-4793-8895-a2f580dda020-config-data" (OuterVolumeSpecName: "config-data") pod "aca0e52c-f10e-4793-8895-a2f580dda020" (UID: "aca0e52c-f10e-4793-8895-a2f580dda020"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:15:23 crc kubenswrapper[4558]: I0120 17:15:23.173057 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aca0e52c-f10e-4793-8895-a2f580dda020-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:15:23 crc kubenswrapper[4558]: I0120 17:15:23.173085 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aca0e52c-f10e-4793-8895-a2f580dda020-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:15:23 crc kubenswrapper[4558]: I0120 17:15:23.173095 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aca0e52c-f10e-4793-8895-a2f580dda020-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:15:23 crc kubenswrapper[4558]: I0120 17:15:23.173106 4558 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/aca0e52c-f10e-4793-8895-a2f580dda020-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 20 17:15:23 crc kubenswrapper[4558]: I0120 17:15:23.173117 4558 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/aca0e52c-f10e-4793-8895-a2f580dda020-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:15:23 crc kubenswrapper[4558]: I0120 17:15:23.173127 4558 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/aca0e52c-f10e-4793-8895-a2f580dda020-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:15:23 crc kubenswrapper[4558]: I0120 17:15:23.173137 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pvnqz\" (UniqueName: \"kubernetes.io/projected/aca0e52c-f10e-4793-8895-a2f580dda020-kube-api-access-pvnqz\") on node \"crc\" DevicePath \"\"" Jan 20 17:15:23 crc kubenswrapper[4558]: I0120 17:15:23.173149 4558 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/aca0e52c-f10e-4793-8895-a2f580dda020-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:15:23 crc kubenswrapper[4558]: I0120 17:15:23.186464 4558 generic.go:334] "Generic (PLEG): container finished" podID="aca0e52c-f10e-4793-8895-a2f580dda020" containerID="f1435b928ed2a992e9f83776fe60c6728d8a8d66e41b7e08829694e65cb7f314" exitCode=0 Jan 20 17:15:23 crc kubenswrapper[4558]: I0120 17:15:23.186493 4558 generic.go:334] "Generic (PLEG): container finished" podID="aca0e52c-f10e-4793-8895-a2f580dda020" containerID="7fd942968e0a2cfec2c808bad22c44cd278a04e01b6953c93af2b85e081cc91a" exitCode=2 Jan 20 17:15:23 crc kubenswrapper[4558]: I0120 17:15:23.186521 4558 generic.go:334] "Generic (PLEG): container finished" podID="aca0e52c-f10e-4793-8895-a2f580dda020" containerID="9f39cbf16ac33b9bc66989f659996550b62a727db9541c24814cc381a693cd8f" exitCode=0 Jan 20 17:15:23 crc kubenswrapper[4558]: I0120 17:15:23.186536 4558 generic.go:334] "Generic (PLEG): container finished" podID="aca0e52c-f10e-4793-8895-a2f580dda020" containerID="9523b6e24557527f6d800087e26993a7ca07aa7592f157f0cfa48fe2dd0f8837" exitCode=0 Jan 20 17:15:23 crc kubenswrapper[4558]: I0120 17:15:23.186549 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:15:23 crc kubenswrapper[4558]: I0120 17:15:23.186630 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"aca0e52c-f10e-4793-8895-a2f580dda020","Type":"ContainerDied","Data":"f1435b928ed2a992e9f83776fe60c6728d8a8d66e41b7e08829694e65cb7f314"} Jan 20 17:15:23 crc kubenswrapper[4558]: I0120 17:15:23.186678 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"aca0e52c-f10e-4793-8895-a2f580dda020","Type":"ContainerDied","Data":"7fd942968e0a2cfec2c808bad22c44cd278a04e01b6953c93af2b85e081cc91a"} Jan 20 17:15:23 crc kubenswrapper[4558]: I0120 17:15:23.186690 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"aca0e52c-f10e-4793-8895-a2f580dda020","Type":"ContainerDied","Data":"9f39cbf16ac33b9bc66989f659996550b62a727db9541c24814cc381a693cd8f"} Jan 20 17:15:23 crc kubenswrapper[4558]: I0120 17:15:23.186701 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"aca0e52c-f10e-4793-8895-a2f580dda020","Type":"ContainerDied","Data":"9523b6e24557527f6d800087e26993a7ca07aa7592f157f0cfa48fe2dd0f8837"} Jan 20 17:15:23 crc kubenswrapper[4558]: I0120 17:15:23.186709 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"aca0e52c-f10e-4793-8895-a2f580dda020","Type":"ContainerDied","Data":"21c3b61a9b3b87500ffa33eda7ce124b6f9e24bc143ed1b13b7876042651154c"} Jan 20 17:15:23 crc kubenswrapper[4558]: I0120 17:15:23.186725 4558 scope.go:117] "RemoveContainer" containerID="f1435b928ed2a992e9f83776fe60c6728d8a8d66e41b7e08829694e65cb7f314" Jan 20 17:15:23 crc kubenswrapper[4558]: I0120 17:15:23.187676 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-api-0" podUID="5826b774-d151-468a-b24c-91f679dcd51e" containerName="nova-api-log" containerID="cri-o://233f1664fba406e964e76cc4bcc4a609d18242e49e46e8d8828fefb0c7a70bdd" gracePeriod=30 Jan 20 17:15:23 crc kubenswrapper[4558]: I0120 17:15:23.187898 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-api-0" podUID="5826b774-d151-468a-b24c-91f679dcd51e" containerName="nova-api-api" containerID="cri-o://819240a126d240c13f523976836647677409b57b3e0cb405a5f882b973ba6d0d" gracePeriod=30 Jan 20 17:15:23 crc kubenswrapper[4558]: I0120 17:15:23.221675 4558 scope.go:117] "RemoveContainer" containerID="7fd942968e0a2cfec2c808bad22c44cd278a04e01b6953c93af2b85e081cc91a" Jan 20 17:15:23 crc kubenswrapper[4558]: I0120 17:15:23.236434 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:15:23 crc kubenswrapper[4558]: I0120 17:15:23.243958 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:15:23 crc kubenswrapper[4558]: I0120 17:15:23.248410 4558 scope.go:117] "RemoveContainer" containerID="9f39cbf16ac33b9bc66989f659996550b62a727db9541c24814cc381a693cd8f" Jan 20 17:15:23 crc kubenswrapper[4558]: I0120 17:15:23.257257 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:15:23 crc kubenswrapper[4558]: E0120 17:15:23.257776 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aca0e52c-f10e-4793-8895-a2f580dda020" containerName="ceilometer-central-agent" Jan 20 17:15:23 crc kubenswrapper[4558]: I0120 17:15:23.257795 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="aca0e52c-f10e-4793-8895-a2f580dda020" containerName="ceilometer-central-agent" Jan 20 17:15:23 crc kubenswrapper[4558]: E0120 17:15:23.257820 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aca0e52c-f10e-4793-8895-a2f580dda020" containerName="proxy-httpd" Jan 20 17:15:23 crc kubenswrapper[4558]: I0120 17:15:23.257828 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="aca0e52c-f10e-4793-8895-a2f580dda020" containerName="proxy-httpd" Jan 20 17:15:23 crc kubenswrapper[4558]: E0120 17:15:23.257841 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aca0e52c-f10e-4793-8895-a2f580dda020" containerName="ceilometer-notification-agent" Jan 20 17:15:23 crc kubenswrapper[4558]: I0120 17:15:23.257847 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="aca0e52c-f10e-4793-8895-a2f580dda020" containerName="ceilometer-notification-agent" Jan 20 17:15:23 crc kubenswrapper[4558]: E0120 17:15:23.257862 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aca0e52c-f10e-4793-8895-a2f580dda020" containerName="sg-core" Jan 20 17:15:23 crc kubenswrapper[4558]: I0120 17:15:23.257868 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="aca0e52c-f10e-4793-8895-a2f580dda020" containerName="sg-core" Jan 20 17:15:23 crc kubenswrapper[4558]: I0120 17:15:23.258047 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="aca0e52c-f10e-4793-8895-a2f580dda020" containerName="ceilometer-central-agent" Jan 20 17:15:23 crc kubenswrapper[4558]: I0120 17:15:23.258068 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="aca0e52c-f10e-4793-8895-a2f580dda020" containerName="proxy-httpd" Jan 20 17:15:23 crc kubenswrapper[4558]: I0120 17:15:23.258081 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="aca0e52c-f10e-4793-8895-a2f580dda020" containerName="sg-core" Jan 20 17:15:23 crc kubenswrapper[4558]: I0120 17:15:23.258091 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="aca0e52c-f10e-4793-8895-a2f580dda020" containerName="ceilometer-notification-agent" Jan 20 17:15:23 crc kubenswrapper[4558]: I0120 17:15:23.260122 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:15:23 crc kubenswrapper[4558]: I0120 17:15:23.262342 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-ceilometer-internal-svc" Jan 20 17:15:23 crc kubenswrapper[4558]: I0120 17:15:23.262499 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-config-data" Jan 20 17:15:23 crc kubenswrapper[4558]: I0120 17:15:23.262800 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-scripts" Jan 20 17:15:23 crc kubenswrapper[4558]: I0120 17:15:23.265136 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:15:23 crc kubenswrapper[4558]: I0120 17:15:23.266671 4558 scope.go:117] "RemoveContainer" containerID="9523b6e24557527f6d800087e26993a7ca07aa7592f157f0cfa48fe2dd0f8837" Jan 20 17:15:23 crc kubenswrapper[4558]: I0120 17:15:23.282101 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4kbxg\" (UniqueName: \"kubernetes.io/projected/f452978c-ede4-44a9-b660-d9a4676e2fc7-kube-api-access-4kbxg\") pod \"ceilometer-0\" (UID: \"f452978c-ede4-44a9-b660-d9a4676e2fc7\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:15:23 crc kubenswrapper[4558]: I0120 17:15:23.282151 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f452978c-ede4-44a9-b660-d9a4676e2fc7-log-httpd\") pod \"ceilometer-0\" (UID: \"f452978c-ede4-44a9-b660-d9a4676e2fc7\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:15:23 crc kubenswrapper[4558]: I0120 17:15:23.282208 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f452978c-ede4-44a9-b660-d9a4676e2fc7-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"f452978c-ede4-44a9-b660-d9a4676e2fc7\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:15:23 crc kubenswrapper[4558]: I0120 17:15:23.282240 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f452978c-ede4-44a9-b660-d9a4676e2fc7-config-data\") pod \"ceilometer-0\" (UID: \"f452978c-ede4-44a9-b660-d9a4676e2fc7\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:15:23 crc kubenswrapper[4558]: I0120 17:15:23.283549 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f452978c-ede4-44a9-b660-d9a4676e2fc7-run-httpd\") pod \"ceilometer-0\" (UID: \"f452978c-ede4-44a9-b660-d9a4676e2fc7\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:15:23 crc kubenswrapper[4558]: I0120 17:15:23.283581 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/f452978c-ede4-44a9-b660-d9a4676e2fc7-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"f452978c-ede4-44a9-b660-d9a4676e2fc7\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:15:23 crc kubenswrapper[4558]: I0120 17:15:23.283649 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f452978c-ede4-44a9-b660-d9a4676e2fc7-scripts\") pod \"ceilometer-0\" (UID: \"f452978c-ede4-44a9-b660-d9a4676e2fc7\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:15:23 crc kubenswrapper[4558]: I0120 17:15:23.283691 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f452978c-ede4-44a9-b660-d9a4676e2fc7-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"f452978c-ede4-44a9-b660-d9a4676e2fc7\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:15:23 crc kubenswrapper[4558]: I0120 17:15:23.301685 4558 scope.go:117] "RemoveContainer" containerID="f1435b928ed2a992e9f83776fe60c6728d8a8d66e41b7e08829694e65cb7f314" Jan 20 17:15:23 crc kubenswrapper[4558]: E0120 17:15:23.308821 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f1435b928ed2a992e9f83776fe60c6728d8a8d66e41b7e08829694e65cb7f314\": container with ID starting with f1435b928ed2a992e9f83776fe60c6728d8a8d66e41b7e08829694e65cb7f314 not found: ID does not exist" containerID="f1435b928ed2a992e9f83776fe60c6728d8a8d66e41b7e08829694e65cb7f314" Jan 20 17:15:23 crc kubenswrapper[4558]: I0120 17:15:23.308859 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f1435b928ed2a992e9f83776fe60c6728d8a8d66e41b7e08829694e65cb7f314"} err="failed to get container status \"f1435b928ed2a992e9f83776fe60c6728d8a8d66e41b7e08829694e65cb7f314\": rpc error: code = NotFound desc = could not find container \"f1435b928ed2a992e9f83776fe60c6728d8a8d66e41b7e08829694e65cb7f314\": container with ID starting with f1435b928ed2a992e9f83776fe60c6728d8a8d66e41b7e08829694e65cb7f314 not found: ID does not exist" Jan 20 17:15:23 crc kubenswrapper[4558]: I0120 17:15:23.308884 4558 scope.go:117] "RemoveContainer" containerID="7fd942968e0a2cfec2c808bad22c44cd278a04e01b6953c93af2b85e081cc91a" Jan 20 17:15:23 crc kubenswrapper[4558]: E0120 17:15:23.309457 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7fd942968e0a2cfec2c808bad22c44cd278a04e01b6953c93af2b85e081cc91a\": container with ID starting with 7fd942968e0a2cfec2c808bad22c44cd278a04e01b6953c93af2b85e081cc91a not found: ID does not exist" containerID="7fd942968e0a2cfec2c808bad22c44cd278a04e01b6953c93af2b85e081cc91a" Jan 20 17:15:23 crc kubenswrapper[4558]: I0120 17:15:23.309489 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7fd942968e0a2cfec2c808bad22c44cd278a04e01b6953c93af2b85e081cc91a"} err="failed to get container status \"7fd942968e0a2cfec2c808bad22c44cd278a04e01b6953c93af2b85e081cc91a\": rpc error: code = NotFound desc = could not find container \"7fd942968e0a2cfec2c808bad22c44cd278a04e01b6953c93af2b85e081cc91a\": container with ID starting with 7fd942968e0a2cfec2c808bad22c44cd278a04e01b6953c93af2b85e081cc91a not found: ID does not exist" Jan 20 17:15:23 crc kubenswrapper[4558]: I0120 17:15:23.309512 4558 scope.go:117] "RemoveContainer" containerID="9f39cbf16ac33b9bc66989f659996550b62a727db9541c24814cc381a693cd8f" Jan 20 17:15:23 crc kubenswrapper[4558]: E0120 17:15:23.309736 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9f39cbf16ac33b9bc66989f659996550b62a727db9541c24814cc381a693cd8f\": container with ID starting with 9f39cbf16ac33b9bc66989f659996550b62a727db9541c24814cc381a693cd8f not found: ID does not exist" containerID="9f39cbf16ac33b9bc66989f659996550b62a727db9541c24814cc381a693cd8f" Jan 20 17:15:23 crc kubenswrapper[4558]: I0120 17:15:23.309753 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9f39cbf16ac33b9bc66989f659996550b62a727db9541c24814cc381a693cd8f"} err="failed to get container status \"9f39cbf16ac33b9bc66989f659996550b62a727db9541c24814cc381a693cd8f\": rpc error: code = NotFound desc = could not find container \"9f39cbf16ac33b9bc66989f659996550b62a727db9541c24814cc381a693cd8f\": container with ID starting with 9f39cbf16ac33b9bc66989f659996550b62a727db9541c24814cc381a693cd8f not found: ID does not exist" Jan 20 17:15:23 crc kubenswrapper[4558]: I0120 17:15:23.309768 4558 scope.go:117] "RemoveContainer" containerID="9523b6e24557527f6d800087e26993a7ca07aa7592f157f0cfa48fe2dd0f8837" Jan 20 17:15:23 crc kubenswrapper[4558]: E0120 17:15:23.309991 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9523b6e24557527f6d800087e26993a7ca07aa7592f157f0cfa48fe2dd0f8837\": container with ID starting with 9523b6e24557527f6d800087e26993a7ca07aa7592f157f0cfa48fe2dd0f8837 not found: ID does not exist" containerID="9523b6e24557527f6d800087e26993a7ca07aa7592f157f0cfa48fe2dd0f8837" Jan 20 17:15:23 crc kubenswrapper[4558]: I0120 17:15:23.310008 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9523b6e24557527f6d800087e26993a7ca07aa7592f157f0cfa48fe2dd0f8837"} err="failed to get container status \"9523b6e24557527f6d800087e26993a7ca07aa7592f157f0cfa48fe2dd0f8837\": rpc error: code = NotFound desc = could not find container \"9523b6e24557527f6d800087e26993a7ca07aa7592f157f0cfa48fe2dd0f8837\": container with ID starting with 9523b6e24557527f6d800087e26993a7ca07aa7592f157f0cfa48fe2dd0f8837 not found: ID does not exist" Jan 20 17:15:23 crc kubenswrapper[4558]: I0120 17:15:23.310023 4558 scope.go:117] "RemoveContainer" containerID="f1435b928ed2a992e9f83776fe60c6728d8a8d66e41b7e08829694e65cb7f314" Jan 20 17:15:23 crc kubenswrapper[4558]: I0120 17:15:23.311299 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f1435b928ed2a992e9f83776fe60c6728d8a8d66e41b7e08829694e65cb7f314"} err="failed to get container status \"f1435b928ed2a992e9f83776fe60c6728d8a8d66e41b7e08829694e65cb7f314\": rpc error: code = NotFound desc = could not find container \"f1435b928ed2a992e9f83776fe60c6728d8a8d66e41b7e08829694e65cb7f314\": container with ID starting with f1435b928ed2a992e9f83776fe60c6728d8a8d66e41b7e08829694e65cb7f314 not found: ID does not exist" Jan 20 17:15:23 crc kubenswrapper[4558]: I0120 17:15:23.311320 4558 scope.go:117] "RemoveContainer" containerID="7fd942968e0a2cfec2c808bad22c44cd278a04e01b6953c93af2b85e081cc91a" Jan 20 17:15:23 crc kubenswrapper[4558]: I0120 17:15:23.311965 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7fd942968e0a2cfec2c808bad22c44cd278a04e01b6953c93af2b85e081cc91a"} err="failed to get container status \"7fd942968e0a2cfec2c808bad22c44cd278a04e01b6953c93af2b85e081cc91a\": rpc error: code = NotFound desc = could not find container \"7fd942968e0a2cfec2c808bad22c44cd278a04e01b6953c93af2b85e081cc91a\": container with ID starting with 7fd942968e0a2cfec2c808bad22c44cd278a04e01b6953c93af2b85e081cc91a not found: ID does not exist" Jan 20 17:15:23 crc kubenswrapper[4558]: I0120 17:15:23.311999 4558 scope.go:117] "RemoveContainer" containerID="9f39cbf16ac33b9bc66989f659996550b62a727db9541c24814cc381a693cd8f" Jan 20 17:15:23 crc kubenswrapper[4558]: I0120 17:15:23.312478 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9f39cbf16ac33b9bc66989f659996550b62a727db9541c24814cc381a693cd8f"} err="failed to get container status \"9f39cbf16ac33b9bc66989f659996550b62a727db9541c24814cc381a693cd8f\": rpc error: code = NotFound desc = could not find container \"9f39cbf16ac33b9bc66989f659996550b62a727db9541c24814cc381a693cd8f\": container with ID starting with 9f39cbf16ac33b9bc66989f659996550b62a727db9541c24814cc381a693cd8f not found: ID does not exist" Jan 20 17:15:23 crc kubenswrapper[4558]: I0120 17:15:23.312502 4558 scope.go:117] "RemoveContainer" containerID="9523b6e24557527f6d800087e26993a7ca07aa7592f157f0cfa48fe2dd0f8837" Jan 20 17:15:23 crc kubenswrapper[4558]: I0120 17:15:23.312873 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9523b6e24557527f6d800087e26993a7ca07aa7592f157f0cfa48fe2dd0f8837"} err="failed to get container status \"9523b6e24557527f6d800087e26993a7ca07aa7592f157f0cfa48fe2dd0f8837\": rpc error: code = NotFound desc = could not find container \"9523b6e24557527f6d800087e26993a7ca07aa7592f157f0cfa48fe2dd0f8837\": container with ID starting with 9523b6e24557527f6d800087e26993a7ca07aa7592f157f0cfa48fe2dd0f8837 not found: ID does not exist" Jan 20 17:15:23 crc kubenswrapper[4558]: I0120 17:15:23.312904 4558 scope.go:117] "RemoveContainer" containerID="f1435b928ed2a992e9f83776fe60c6728d8a8d66e41b7e08829694e65cb7f314" Jan 20 17:15:23 crc kubenswrapper[4558]: I0120 17:15:23.313183 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f1435b928ed2a992e9f83776fe60c6728d8a8d66e41b7e08829694e65cb7f314"} err="failed to get container status \"f1435b928ed2a992e9f83776fe60c6728d8a8d66e41b7e08829694e65cb7f314\": rpc error: code = NotFound desc = could not find container \"f1435b928ed2a992e9f83776fe60c6728d8a8d66e41b7e08829694e65cb7f314\": container with ID starting with f1435b928ed2a992e9f83776fe60c6728d8a8d66e41b7e08829694e65cb7f314 not found: ID does not exist" Jan 20 17:15:23 crc kubenswrapper[4558]: I0120 17:15:23.313203 4558 scope.go:117] "RemoveContainer" containerID="7fd942968e0a2cfec2c808bad22c44cd278a04e01b6953c93af2b85e081cc91a" Jan 20 17:15:23 crc kubenswrapper[4558]: I0120 17:15:23.313532 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7fd942968e0a2cfec2c808bad22c44cd278a04e01b6953c93af2b85e081cc91a"} err="failed to get container status \"7fd942968e0a2cfec2c808bad22c44cd278a04e01b6953c93af2b85e081cc91a\": rpc error: code = NotFound desc = could not find container \"7fd942968e0a2cfec2c808bad22c44cd278a04e01b6953c93af2b85e081cc91a\": container with ID starting with 7fd942968e0a2cfec2c808bad22c44cd278a04e01b6953c93af2b85e081cc91a not found: ID does not exist" Jan 20 17:15:23 crc kubenswrapper[4558]: I0120 17:15:23.313553 4558 scope.go:117] "RemoveContainer" containerID="9f39cbf16ac33b9bc66989f659996550b62a727db9541c24814cc381a693cd8f" Jan 20 17:15:23 crc kubenswrapper[4558]: I0120 17:15:23.313768 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9f39cbf16ac33b9bc66989f659996550b62a727db9541c24814cc381a693cd8f"} err="failed to get container status \"9f39cbf16ac33b9bc66989f659996550b62a727db9541c24814cc381a693cd8f\": rpc error: code = NotFound desc = could not find container \"9f39cbf16ac33b9bc66989f659996550b62a727db9541c24814cc381a693cd8f\": container with ID starting with 9f39cbf16ac33b9bc66989f659996550b62a727db9541c24814cc381a693cd8f not found: ID does not exist" Jan 20 17:15:23 crc kubenswrapper[4558]: I0120 17:15:23.313787 4558 scope.go:117] "RemoveContainer" containerID="9523b6e24557527f6d800087e26993a7ca07aa7592f157f0cfa48fe2dd0f8837" Jan 20 17:15:23 crc kubenswrapper[4558]: I0120 17:15:23.314100 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9523b6e24557527f6d800087e26993a7ca07aa7592f157f0cfa48fe2dd0f8837"} err="failed to get container status \"9523b6e24557527f6d800087e26993a7ca07aa7592f157f0cfa48fe2dd0f8837\": rpc error: code = NotFound desc = could not find container \"9523b6e24557527f6d800087e26993a7ca07aa7592f157f0cfa48fe2dd0f8837\": container with ID starting with 9523b6e24557527f6d800087e26993a7ca07aa7592f157f0cfa48fe2dd0f8837 not found: ID does not exist" Jan 20 17:15:23 crc kubenswrapper[4558]: I0120 17:15:23.314122 4558 scope.go:117] "RemoveContainer" containerID="f1435b928ed2a992e9f83776fe60c6728d8a8d66e41b7e08829694e65cb7f314" Jan 20 17:15:23 crc kubenswrapper[4558]: I0120 17:15:23.314316 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f1435b928ed2a992e9f83776fe60c6728d8a8d66e41b7e08829694e65cb7f314"} err="failed to get container status \"f1435b928ed2a992e9f83776fe60c6728d8a8d66e41b7e08829694e65cb7f314\": rpc error: code = NotFound desc = could not find container \"f1435b928ed2a992e9f83776fe60c6728d8a8d66e41b7e08829694e65cb7f314\": container with ID starting with f1435b928ed2a992e9f83776fe60c6728d8a8d66e41b7e08829694e65cb7f314 not found: ID does not exist" Jan 20 17:15:23 crc kubenswrapper[4558]: I0120 17:15:23.314342 4558 scope.go:117] "RemoveContainer" containerID="7fd942968e0a2cfec2c808bad22c44cd278a04e01b6953c93af2b85e081cc91a" Jan 20 17:15:23 crc kubenswrapper[4558]: I0120 17:15:23.314629 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7fd942968e0a2cfec2c808bad22c44cd278a04e01b6953c93af2b85e081cc91a"} err="failed to get container status \"7fd942968e0a2cfec2c808bad22c44cd278a04e01b6953c93af2b85e081cc91a\": rpc error: code = NotFound desc = could not find container \"7fd942968e0a2cfec2c808bad22c44cd278a04e01b6953c93af2b85e081cc91a\": container with ID starting with 7fd942968e0a2cfec2c808bad22c44cd278a04e01b6953c93af2b85e081cc91a not found: ID does not exist" Jan 20 17:15:23 crc kubenswrapper[4558]: I0120 17:15:23.314647 4558 scope.go:117] "RemoveContainer" containerID="9f39cbf16ac33b9bc66989f659996550b62a727db9541c24814cc381a693cd8f" Jan 20 17:15:23 crc kubenswrapper[4558]: I0120 17:15:23.314917 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9f39cbf16ac33b9bc66989f659996550b62a727db9541c24814cc381a693cd8f"} err="failed to get container status \"9f39cbf16ac33b9bc66989f659996550b62a727db9541c24814cc381a693cd8f\": rpc error: code = NotFound desc = could not find container \"9f39cbf16ac33b9bc66989f659996550b62a727db9541c24814cc381a693cd8f\": container with ID starting with 9f39cbf16ac33b9bc66989f659996550b62a727db9541c24814cc381a693cd8f not found: ID does not exist" Jan 20 17:15:23 crc kubenswrapper[4558]: I0120 17:15:23.314933 4558 scope.go:117] "RemoveContainer" containerID="9523b6e24557527f6d800087e26993a7ca07aa7592f157f0cfa48fe2dd0f8837" Jan 20 17:15:23 crc kubenswrapper[4558]: I0120 17:15:23.315152 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9523b6e24557527f6d800087e26993a7ca07aa7592f157f0cfa48fe2dd0f8837"} err="failed to get container status \"9523b6e24557527f6d800087e26993a7ca07aa7592f157f0cfa48fe2dd0f8837\": rpc error: code = NotFound desc = could not find container \"9523b6e24557527f6d800087e26993a7ca07aa7592f157f0cfa48fe2dd0f8837\": container with ID starting with 9523b6e24557527f6d800087e26993a7ca07aa7592f157f0cfa48fe2dd0f8837 not found: ID does not exist" Jan 20 17:15:23 crc kubenswrapper[4558]: I0120 17:15:23.386327 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f452978c-ede4-44a9-b660-d9a4676e2fc7-run-httpd\") pod \"ceilometer-0\" (UID: \"f452978c-ede4-44a9-b660-d9a4676e2fc7\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:15:23 crc kubenswrapper[4558]: I0120 17:15:23.386369 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/f452978c-ede4-44a9-b660-d9a4676e2fc7-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"f452978c-ede4-44a9-b660-d9a4676e2fc7\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:15:23 crc kubenswrapper[4558]: I0120 17:15:23.386420 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f452978c-ede4-44a9-b660-d9a4676e2fc7-scripts\") pod \"ceilometer-0\" (UID: \"f452978c-ede4-44a9-b660-d9a4676e2fc7\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:15:23 crc kubenswrapper[4558]: I0120 17:15:23.386466 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f452978c-ede4-44a9-b660-d9a4676e2fc7-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"f452978c-ede4-44a9-b660-d9a4676e2fc7\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:15:23 crc kubenswrapper[4558]: I0120 17:15:23.386489 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4kbxg\" (UniqueName: \"kubernetes.io/projected/f452978c-ede4-44a9-b660-d9a4676e2fc7-kube-api-access-4kbxg\") pod \"ceilometer-0\" (UID: \"f452978c-ede4-44a9-b660-d9a4676e2fc7\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:15:23 crc kubenswrapper[4558]: I0120 17:15:23.386510 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f452978c-ede4-44a9-b660-d9a4676e2fc7-log-httpd\") pod \"ceilometer-0\" (UID: \"f452978c-ede4-44a9-b660-d9a4676e2fc7\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:15:23 crc kubenswrapper[4558]: I0120 17:15:23.386534 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f452978c-ede4-44a9-b660-d9a4676e2fc7-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"f452978c-ede4-44a9-b660-d9a4676e2fc7\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:15:23 crc kubenswrapper[4558]: I0120 17:15:23.386561 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f452978c-ede4-44a9-b660-d9a4676e2fc7-config-data\") pod \"ceilometer-0\" (UID: \"f452978c-ede4-44a9-b660-d9a4676e2fc7\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:15:23 crc kubenswrapper[4558]: I0120 17:15:23.386775 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f452978c-ede4-44a9-b660-d9a4676e2fc7-run-httpd\") pod \"ceilometer-0\" (UID: \"f452978c-ede4-44a9-b660-d9a4676e2fc7\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:15:23 crc kubenswrapper[4558]: I0120 17:15:23.387425 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f452978c-ede4-44a9-b660-d9a4676e2fc7-log-httpd\") pod \"ceilometer-0\" (UID: \"f452978c-ede4-44a9-b660-d9a4676e2fc7\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:15:23 crc kubenswrapper[4558]: I0120 17:15:23.390422 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f452978c-ede4-44a9-b660-d9a4676e2fc7-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"f452978c-ede4-44a9-b660-d9a4676e2fc7\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:15:23 crc kubenswrapper[4558]: I0120 17:15:23.390699 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f452978c-ede4-44a9-b660-d9a4676e2fc7-config-data\") pod \"ceilometer-0\" (UID: \"f452978c-ede4-44a9-b660-d9a4676e2fc7\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:15:23 crc kubenswrapper[4558]: I0120 17:15:23.391810 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f452978c-ede4-44a9-b660-d9a4676e2fc7-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"f452978c-ede4-44a9-b660-d9a4676e2fc7\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:15:23 crc kubenswrapper[4558]: I0120 17:15:23.392433 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/f452978c-ede4-44a9-b660-d9a4676e2fc7-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"f452978c-ede4-44a9-b660-d9a4676e2fc7\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:15:23 crc kubenswrapper[4558]: I0120 17:15:23.393040 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f452978c-ede4-44a9-b660-d9a4676e2fc7-scripts\") pod \"ceilometer-0\" (UID: \"f452978c-ede4-44a9-b660-d9a4676e2fc7\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:15:23 crc kubenswrapper[4558]: I0120 17:15:23.403962 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4kbxg\" (UniqueName: \"kubernetes.io/projected/f452978c-ede4-44a9-b660-d9a4676e2fc7-kube-api-access-4kbxg\") pod \"ceilometer-0\" (UID: \"f452978c-ede4-44a9-b660-d9a4676e2fc7\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:15:23 crc kubenswrapper[4558]: I0120 17:15:23.583509 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:15:23 crc kubenswrapper[4558]: I0120 17:15:23.773921 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:15:24 crc kubenswrapper[4558]: I0120 17:15:24.041284 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:15:24 crc kubenswrapper[4558]: I0120 17:15:24.204511 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"f452978c-ede4-44a9-b660-d9a4676e2fc7","Type":"ContainerStarted","Data":"a25f28a228ec7cd754689b8a15d8b061514ac1109890662c52777a60db6208f1"} Jan 20 17:15:24 crc kubenswrapper[4558]: I0120 17:15:24.209346 4558 generic.go:334] "Generic (PLEG): container finished" podID="5826b774-d151-468a-b24c-91f679dcd51e" containerID="233f1664fba406e964e76cc4bcc4a609d18242e49e46e8d8828fefb0c7a70bdd" exitCode=143 Jan 20 17:15:24 crc kubenswrapper[4558]: I0120 17:15:24.209382 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"5826b774-d151-468a-b24c-91f679dcd51e","Type":"ContainerDied","Data":"233f1664fba406e964e76cc4bcc4a609d18242e49e46e8d8828fefb0c7a70bdd"} Jan 20 17:15:24 crc kubenswrapper[4558]: I0120 17:15:24.575670 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aca0e52c-f10e-4793-8895-a2f580dda020" path="/var/lib/kubelet/pods/aca0e52c-f10e-4793-8895-a2f580dda020/volumes" Jan 20 17:15:25 crc kubenswrapper[4558]: I0120 17:15:25.219662 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"f452978c-ede4-44a9-b660-d9a4676e2fc7","Type":"ContainerStarted","Data":"06bca196be427a3d76d403e933c98efdc2048fd31222e4b2bb0a9e6bb56a381f"} Jan 20 17:15:26 crc kubenswrapper[4558]: I0120 17:15:26.245634 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"f452978c-ede4-44a9-b660-d9a4676e2fc7","Type":"ContainerStarted","Data":"98bffc914933dbbca94750b59a3f9f3ae7e1b15e73a501efd1efd50adac6e0a0"} Jan 20 17:15:26 crc kubenswrapper[4558]: I0120 17:15:26.765967 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:15:26 crc kubenswrapper[4558]: I0120 17:15:26.958843 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5826b774-d151-468a-b24c-91f679dcd51e-combined-ca-bundle\") pod \"5826b774-d151-468a-b24c-91f679dcd51e\" (UID: \"5826b774-d151-468a-b24c-91f679dcd51e\") " Jan 20 17:15:26 crc kubenswrapper[4558]: I0120 17:15:26.958972 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5826b774-d151-468a-b24c-91f679dcd51e-config-data\") pod \"5826b774-d151-468a-b24c-91f679dcd51e\" (UID: \"5826b774-d151-468a-b24c-91f679dcd51e\") " Jan 20 17:15:26 crc kubenswrapper[4558]: I0120 17:15:26.959259 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5826b774-d151-468a-b24c-91f679dcd51e-logs\") pod \"5826b774-d151-468a-b24c-91f679dcd51e\" (UID: \"5826b774-d151-468a-b24c-91f679dcd51e\") " Jan 20 17:15:26 crc kubenswrapper[4558]: I0120 17:15:26.959305 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fdb66\" (UniqueName: \"kubernetes.io/projected/5826b774-d151-468a-b24c-91f679dcd51e-kube-api-access-fdb66\") pod \"5826b774-d151-468a-b24c-91f679dcd51e\" (UID: \"5826b774-d151-468a-b24c-91f679dcd51e\") " Jan 20 17:15:26 crc kubenswrapper[4558]: I0120 17:15:26.960075 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5826b774-d151-468a-b24c-91f679dcd51e-logs" (OuterVolumeSpecName: "logs") pod "5826b774-d151-468a-b24c-91f679dcd51e" (UID: "5826b774-d151-468a-b24c-91f679dcd51e"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:15:26 crc kubenswrapper[4558]: I0120 17:15:26.960998 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5826b774-d151-468a-b24c-91f679dcd51e-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:15:26 crc kubenswrapper[4558]: I0120 17:15:26.966951 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5826b774-d151-468a-b24c-91f679dcd51e-kube-api-access-fdb66" (OuterVolumeSpecName: "kube-api-access-fdb66") pod "5826b774-d151-468a-b24c-91f679dcd51e" (UID: "5826b774-d151-468a-b24c-91f679dcd51e"). InnerVolumeSpecName "kube-api-access-fdb66". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:15:26 crc kubenswrapper[4558]: I0120 17:15:26.987394 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5826b774-d151-468a-b24c-91f679dcd51e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5826b774-d151-468a-b24c-91f679dcd51e" (UID: "5826b774-d151-468a-b24c-91f679dcd51e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:15:26 crc kubenswrapper[4558]: I0120 17:15:26.995346 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5826b774-d151-468a-b24c-91f679dcd51e-config-data" (OuterVolumeSpecName: "config-data") pod "5826b774-d151-468a-b24c-91f679dcd51e" (UID: "5826b774-d151-468a-b24c-91f679dcd51e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:15:27 crc kubenswrapper[4558]: I0120 17:15:27.063768 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5826b774-d151-468a-b24c-91f679dcd51e-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:15:27 crc kubenswrapper[4558]: I0120 17:15:27.064029 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fdb66\" (UniqueName: \"kubernetes.io/projected/5826b774-d151-468a-b24c-91f679dcd51e-kube-api-access-fdb66\") on node \"crc\" DevicePath \"\"" Jan 20 17:15:27 crc kubenswrapper[4558]: I0120 17:15:27.064043 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5826b774-d151-468a-b24c-91f679dcd51e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:15:27 crc kubenswrapper[4558]: I0120 17:15:27.253759 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"f452978c-ede4-44a9-b660-d9a4676e2fc7","Type":"ContainerStarted","Data":"ea7773b5bec38533a4f5a0048b5ed3b0f1f1a38c7244ab758b80155e59ccd04b"} Jan 20 17:15:27 crc kubenswrapper[4558]: I0120 17:15:27.255071 4558 generic.go:334] "Generic (PLEG): container finished" podID="5826b774-d151-468a-b24c-91f679dcd51e" containerID="819240a126d240c13f523976836647677409b57b3e0cb405a5f882b973ba6d0d" exitCode=0 Jan 20 17:15:27 crc kubenswrapper[4558]: I0120 17:15:27.255095 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"5826b774-d151-468a-b24c-91f679dcd51e","Type":"ContainerDied","Data":"819240a126d240c13f523976836647677409b57b3e0cb405a5f882b973ba6d0d"} Jan 20 17:15:27 crc kubenswrapper[4558]: I0120 17:15:27.255112 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"5826b774-d151-468a-b24c-91f679dcd51e","Type":"ContainerDied","Data":"d5c1fc96531926bf61084e4561a1e15ec8d6ae1a2575f76d438b0078466ab648"} Jan 20 17:15:27 crc kubenswrapper[4558]: I0120 17:15:27.255127 4558 scope.go:117] "RemoveContainer" containerID="819240a126d240c13f523976836647677409b57b3e0cb405a5f882b973ba6d0d" Jan 20 17:15:27 crc kubenswrapper[4558]: I0120 17:15:27.255263 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:15:27 crc kubenswrapper[4558]: I0120 17:15:27.284626 4558 scope.go:117] "RemoveContainer" containerID="233f1664fba406e964e76cc4bcc4a609d18242e49e46e8d8828fefb0c7a70bdd" Jan 20 17:15:27 crc kubenswrapper[4558]: I0120 17:15:27.285497 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:15:27 crc kubenswrapper[4558]: I0120 17:15:27.294973 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:15:27 crc kubenswrapper[4558]: I0120 17:15:27.323404 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:15:27 crc kubenswrapper[4558]: E0120 17:15:27.323909 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5826b774-d151-468a-b24c-91f679dcd51e" containerName="nova-api-log" Jan 20 17:15:27 crc kubenswrapper[4558]: I0120 17:15:27.323922 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="5826b774-d151-468a-b24c-91f679dcd51e" containerName="nova-api-log" Jan 20 17:15:27 crc kubenswrapper[4558]: E0120 17:15:27.323945 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5826b774-d151-468a-b24c-91f679dcd51e" containerName="nova-api-api" Jan 20 17:15:27 crc kubenswrapper[4558]: I0120 17:15:27.323950 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="5826b774-d151-468a-b24c-91f679dcd51e" containerName="nova-api-api" Jan 20 17:15:27 crc kubenswrapper[4558]: I0120 17:15:27.324120 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="5826b774-d151-468a-b24c-91f679dcd51e" containerName="nova-api-log" Jan 20 17:15:27 crc kubenswrapper[4558]: I0120 17:15:27.324136 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="5826b774-d151-468a-b24c-91f679dcd51e" containerName="nova-api-api" Jan 20 17:15:27 crc kubenswrapper[4558]: I0120 17:15:27.325185 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:15:27 crc kubenswrapper[4558]: I0120 17:15:27.327714 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-internal-svc" Jan 20 17:15:27 crc kubenswrapper[4558]: I0120 17:15:27.327855 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-public-svc" Jan 20 17:15:27 crc kubenswrapper[4558]: I0120 17:15:27.328711 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-api-config-data" Jan 20 17:15:27 crc kubenswrapper[4558]: I0120 17:15:27.331399 4558 scope.go:117] "RemoveContainer" containerID="819240a126d240c13f523976836647677409b57b3e0cb405a5f882b973ba6d0d" Jan 20 17:15:27 crc kubenswrapper[4558]: E0120 17:15:27.340297 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"819240a126d240c13f523976836647677409b57b3e0cb405a5f882b973ba6d0d\": container with ID starting with 819240a126d240c13f523976836647677409b57b3e0cb405a5f882b973ba6d0d not found: ID does not exist" containerID="819240a126d240c13f523976836647677409b57b3e0cb405a5f882b973ba6d0d" Jan 20 17:15:27 crc kubenswrapper[4558]: I0120 17:15:27.340349 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"819240a126d240c13f523976836647677409b57b3e0cb405a5f882b973ba6d0d"} err="failed to get container status \"819240a126d240c13f523976836647677409b57b3e0cb405a5f882b973ba6d0d\": rpc error: code = NotFound desc = could not find container \"819240a126d240c13f523976836647677409b57b3e0cb405a5f882b973ba6d0d\": container with ID starting with 819240a126d240c13f523976836647677409b57b3e0cb405a5f882b973ba6d0d not found: ID does not exist" Jan 20 17:15:27 crc kubenswrapper[4558]: I0120 17:15:27.340377 4558 scope.go:117] "RemoveContainer" containerID="233f1664fba406e964e76cc4bcc4a609d18242e49e46e8d8828fefb0c7a70bdd" Jan 20 17:15:27 crc kubenswrapper[4558]: E0120 17:15:27.341155 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"233f1664fba406e964e76cc4bcc4a609d18242e49e46e8d8828fefb0c7a70bdd\": container with ID starting with 233f1664fba406e964e76cc4bcc4a609d18242e49e46e8d8828fefb0c7a70bdd not found: ID does not exist" containerID="233f1664fba406e964e76cc4bcc4a609d18242e49e46e8d8828fefb0c7a70bdd" Jan 20 17:15:27 crc kubenswrapper[4558]: I0120 17:15:27.341186 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"233f1664fba406e964e76cc4bcc4a609d18242e49e46e8d8828fefb0c7a70bdd"} err="failed to get container status \"233f1664fba406e964e76cc4bcc4a609d18242e49e46e8d8828fefb0c7a70bdd\": rpc error: code = NotFound desc = could not find container \"233f1664fba406e964e76cc4bcc4a609d18242e49e46e8d8828fefb0c7a70bdd\": container with ID starting with 233f1664fba406e964e76cc4bcc4a609d18242e49e46e8d8828fefb0c7a70bdd not found: ID does not exist" Jan 20 17:15:27 crc kubenswrapper[4558]: I0120 17:15:27.344069 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:15:27 crc kubenswrapper[4558]: I0120 17:15:27.368526 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/158b4a4e-78aa-4813-a00e-abddfb7214ef-internal-tls-certs\") pod \"nova-api-0\" (UID: \"158b4a4e-78aa-4813-a00e-abddfb7214ef\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:15:27 crc kubenswrapper[4558]: I0120 17:15:27.368594 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/158b4a4e-78aa-4813-a00e-abddfb7214ef-public-tls-certs\") pod \"nova-api-0\" (UID: \"158b4a4e-78aa-4813-a00e-abddfb7214ef\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:15:27 crc kubenswrapper[4558]: I0120 17:15:27.368625 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/158b4a4e-78aa-4813-a00e-abddfb7214ef-logs\") pod \"nova-api-0\" (UID: \"158b4a4e-78aa-4813-a00e-abddfb7214ef\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:15:27 crc kubenswrapper[4558]: I0120 17:15:27.368656 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/158b4a4e-78aa-4813-a00e-abddfb7214ef-config-data\") pod \"nova-api-0\" (UID: \"158b4a4e-78aa-4813-a00e-abddfb7214ef\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:15:27 crc kubenswrapper[4558]: I0120 17:15:27.368690 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/158b4a4e-78aa-4813-a00e-abddfb7214ef-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"158b4a4e-78aa-4813-a00e-abddfb7214ef\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:15:27 crc kubenswrapper[4558]: I0120 17:15:27.368713 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s6jzx\" (UniqueName: \"kubernetes.io/projected/158b4a4e-78aa-4813-a00e-abddfb7214ef-kube-api-access-s6jzx\") pod \"nova-api-0\" (UID: \"158b4a4e-78aa-4813-a00e-abddfb7214ef\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:15:27 crc kubenswrapper[4558]: I0120 17:15:27.470891 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/158b4a4e-78aa-4813-a00e-abddfb7214ef-internal-tls-certs\") pod \"nova-api-0\" (UID: \"158b4a4e-78aa-4813-a00e-abddfb7214ef\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:15:27 crc kubenswrapper[4558]: I0120 17:15:27.471103 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/158b4a4e-78aa-4813-a00e-abddfb7214ef-public-tls-certs\") pod \"nova-api-0\" (UID: \"158b4a4e-78aa-4813-a00e-abddfb7214ef\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:15:27 crc kubenswrapper[4558]: I0120 17:15:27.471193 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/158b4a4e-78aa-4813-a00e-abddfb7214ef-logs\") pod \"nova-api-0\" (UID: \"158b4a4e-78aa-4813-a00e-abddfb7214ef\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:15:27 crc kubenswrapper[4558]: I0120 17:15:27.471277 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/158b4a4e-78aa-4813-a00e-abddfb7214ef-config-data\") pod \"nova-api-0\" (UID: \"158b4a4e-78aa-4813-a00e-abddfb7214ef\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:15:27 crc kubenswrapper[4558]: I0120 17:15:27.471369 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/158b4a4e-78aa-4813-a00e-abddfb7214ef-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"158b4a4e-78aa-4813-a00e-abddfb7214ef\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:15:27 crc kubenswrapper[4558]: I0120 17:15:27.471414 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s6jzx\" (UniqueName: \"kubernetes.io/projected/158b4a4e-78aa-4813-a00e-abddfb7214ef-kube-api-access-s6jzx\") pod \"nova-api-0\" (UID: \"158b4a4e-78aa-4813-a00e-abddfb7214ef\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:15:27 crc kubenswrapper[4558]: I0120 17:15:27.472259 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/158b4a4e-78aa-4813-a00e-abddfb7214ef-logs\") pod \"nova-api-0\" (UID: \"158b4a4e-78aa-4813-a00e-abddfb7214ef\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:15:27 crc kubenswrapper[4558]: I0120 17:15:27.479956 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/158b4a4e-78aa-4813-a00e-abddfb7214ef-public-tls-certs\") pod \"nova-api-0\" (UID: \"158b4a4e-78aa-4813-a00e-abddfb7214ef\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:15:27 crc kubenswrapper[4558]: I0120 17:15:27.479965 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/158b4a4e-78aa-4813-a00e-abddfb7214ef-internal-tls-certs\") pod \"nova-api-0\" (UID: \"158b4a4e-78aa-4813-a00e-abddfb7214ef\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:15:27 crc kubenswrapper[4558]: I0120 17:15:27.480086 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/158b4a4e-78aa-4813-a00e-abddfb7214ef-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"158b4a4e-78aa-4813-a00e-abddfb7214ef\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:15:27 crc kubenswrapper[4558]: I0120 17:15:27.480489 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/158b4a4e-78aa-4813-a00e-abddfb7214ef-config-data\") pod \"nova-api-0\" (UID: \"158b4a4e-78aa-4813-a00e-abddfb7214ef\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:15:27 crc kubenswrapper[4558]: I0120 17:15:27.488356 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s6jzx\" (UniqueName: \"kubernetes.io/projected/158b4a4e-78aa-4813-a00e-abddfb7214ef-kube-api-access-s6jzx\") pod \"nova-api-0\" (UID: \"158b4a4e-78aa-4813-a00e-abddfb7214ef\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:15:27 crc kubenswrapper[4558]: I0120 17:15:27.656832 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:15:28 crc kubenswrapper[4558]: I0120 17:15:28.112577 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:15:28 crc kubenswrapper[4558]: I0120 17:15:28.271919 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"158b4a4e-78aa-4813-a00e-abddfb7214ef","Type":"ContainerStarted","Data":"52104433e07b8f12e8c1abd6d48e18ef5e93a62555059f9dad9e59eb47911179"} Jan 20 17:15:28 crc kubenswrapper[4558]: I0120 17:15:28.576213 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5826b774-d151-468a-b24c-91f679dcd51e" path="/var/lib/kubelet/pods/5826b774-d151-468a-b24c-91f679dcd51e/volumes" Jan 20 17:15:29 crc kubenswrapper[4558]: I0120 17:15:29.283927 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"158b4a4e-78aa-4813-a00e-abddfb7214ef","Type":"ContainerStarted","Data":"32341c380c826a7aa830f2d1cd35459c6a706f48a875899fa1c9c906b440f63c"} Jan 20 17:15:29 crc kubenswrapper[4558]: I0120 17:15:29.284281 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"158b4a4e-78aa-4813-a00e-abddfb7214ef","Type":"ContainerStarted","Data":"0d8f4bd8e6d346771ef889450d9cc94bd4e4a03f37ab45706759418add0250d9"} Jan 20 17:15:29 crc kubenswrapper[4558]: I0120 17:15:29.287477 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"f452978c-ede4-44a9-b660-d9a4676e2fc7","Type":"ContainerStarted","Data":"98d171055c6420d1ec1989fb4710f490305dc2079990066e33793c6c123823bc"} Jan 20 17:15:29 crc kubenswrapper[4558]: I0120 17:15:29.287730 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="f452978c-ede4-44a9-b660-d9a4676e2fc7" containerName="ceilometer-central-agent" containerID="cri-o://06bca196be427a3d76d403e933c98efdc2048fd31222e4b2bb0a9e6bb56a381f" gracePeriod=30 Jan 20 17:15:29 crc kubenswrapper[4558]: I0120 17:15:29.287882 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:15:29 crc kubenswrapper[4558]: I0120 17:15:29.288023 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="f452978c-ede4-44a9-b660-d9a4676e2fc7" containerName="proxy-httpd" containerID="cri-o://98d171055c6420d1ec1989fb4710f490305dc2079990066e33793c6c123823bc" gracePeriod=30 Jan 20 17:15:29 crc kubenswrapper[4558]: I0120 17:15:29.288129 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="f452978c-ede4-44a9-b660-d9a4676e2fc7" containerName="sg-core" containerID="cri-o://ea7773b5bec38533a4f5a0048b5ed3b0f1f1a38c7244ab758b80155e59ccd04b" gracePeriod=30 Jan 20 17:15:29 crc kubenswrapper[4558]: I0120 17:15:29.288239 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="f452978c-ede4-44a9-b660-d9a4676e2fc7" containerName="ceilometer-notification-agent" containerID="cri-o://98bffc914933dbbca94750b59a3f9f3ae7e1b15e73a501efd1efd50adac6e0a0" gracePeriod=30 Jan 20 17:15:29 crc kubenswrapper[4558]: I0120 17:15:29.305389 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-api-0" podStartSLOduration=2.305376357 podStartE2EDuration="2.305376357s" podCreationTimestamp="2026-01-20 17:15:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:15:29.300242493 +0000 UTC m=+2023.060580461" watchObservedRunningTime="2026-01-20 17:15:29.305376357 +0000 UTC m=+2023.065714324" Jan 20 17:15:29 crc kubenswrapper[4558]: I0120 17:15:29.334575 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ceilometer-0" podStartSLOduration=2.248302334 podStartE2EDuration="6.334556949s" podCreationTimestamp="2026-01-20 17:15:23 +0000 UTC" firstStartedPulling="2026-01-20 17:15:24.050756844 +0000 UTC m=+2017.811094811" lastFinishedPulling="2026-01-20 17:15:28.137011459 +0000 UTC m=+2021.897349426" observedRunningTime="2026-01-20 17:15:29.325505359 +0000 UTC m=+2023.085843326" watchObservedRunningTime="2026-01-20 17:15:29.334556949 +0000 UTC m=+2023.094894916" Jan 20 17:15:30 crc kubenswrapper[4558]: I0120 17:15:30.106787 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:15:30 crc kubenswrapper[4558]: I0120 17:15:30.135598 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f452978c-ede4-44a9-b660-d9a4676e2fc7-run-httpd\") pod \"f452978c-ede4-44a9-b660-d9a4676e2fc7\" (UID: \"f452978c-ede4-44a9-b660-d9a4676e2fc7\") " Jan 20 17:15:30 crc kubenswrapper[4558]: I0120 17:15:30.135655 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/f452978c-ede4-44a9-b660-d9a4676e2fc7-ceilometer-tls-certs\") pod \"f452978c-ede4-44a9-b660-d9a4676e2fc7\" (UID: \"f452978c-ede4-44a9-b660-d9a4676e2fc7\") " Jan 20 17:15:30 crc kubenswrapper[4558]: I0120 17:15:30.135703 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f452978c-ede4-44a9-b660-d9a4676e2fc7-sg-core-conf-yaml\") pod \"f452978c-ede4-44a9-b660-d9a4676e2fc7\" (UID: \"f452978c-ede4-44a9-b660-d9a4676e2fc7\") " Jan 20 17:15:30 crc kubenswrapper[4558]: I0120 17:15:30.135816 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f452978c-ede4-44a9-b660-d9a4676e2fc7-scripts\") pod \"f452978c-ede4-44a9-b660-d9a4676e2fc7\" (UID: \"f452978c-ede4-44a9-b660-d9a4676e2fc7\") " Jan 20 17:15:30 crc kubenswrapper[4558]: I0120 17:15:30.135921 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f452978c-ede4-44a9-b660-d9a4676e2fc7-config-data\") pod \"f452978c-ede4-44a9-b660-d9a4676e2fc7\" (UID: \"f452978c-ede4-44a9-b660-d9a4676e2fc7\") " Jan 20 17:15:30 crc kubenswrapper[4558]: I0120 17:15:30.135949 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f452978c-ede4-44a9-b660-d9a4676e2fc7-combined-ca-bundle\") pod \"f452978c-ede4-44a9-b660-d9a4676e2fc7\" (UID: \"f452978c-ede4-44a9-b660-d9a4676e2fc7\") " Jan 20 17:15:30 crc kubenswrapper[4558]: I0120 17:15:30.135977 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4kbxg\" (UniqueName: \"kubernetes.io/projected/f452978c-ede4-44a9-b660-d9a4676e2fc7-kube-api-access-4kbxg\") pod \"f452978c-ede4-44a9-b660-d9a4676e2fc7\" (UID: \"f452978c-ede4-44a9-b660-d9a4676e2fc7\") " Jan 20 17:15:30 crc kubenswrapper[4558]: I0120 17:15:30.136065 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f452978c-ede4-44a9-b660-d9a4676e2fc7-log-httpd\") pod \"f452978c-ede4-44a9-b660-d9a4676e2fc7\" (UID: \"f452978c-ede4-44a9-b660-d9a4676e2fc7\") " Jan 20 17:15:30 crc kubenswrapper[4558]: I0120 17:15:30.136226 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f452978c-ede4-44a9-b660-d9a4676e2fc7-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "f452978c-ede4-44a9-b660-d9a4676e2fc7" (UID: "f452978c-ede4-44a9-b660-d9a4676e2fc7"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:15:30 crc kubenswrapper[4558]: I0120 17:15:30.137101 4558 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f452978c-ede4-44a9-b660-d9a4676e2fc7-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:15:30 crc kubenswrapper[4558]: I0120 17:15:30.137576 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f452978c-ede4-44a9-b660-d9a4676e2fc7-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "f452978c-ede4-44a9-b660-d9a4676e2fc7" (UID: "f452978c-ede4-44a9-b660-d9a4676e2fc7"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:15:30 crc kubenswrapper[4558]: I0120 17:15:30.143179 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f452978c-ede4-44a9-b660-d9a4676e2fc7-kube-api-access-4kbxg" (OuterVolumeSpecName: "kube-api-access-4kbxg") pod "f452978c-ede4-44a9-b660-d9a4676e2fc7" (UID: "f452978c-ede4-44a9-b660-d9a4676e2fc7"). InnerVolumeSpecName "kube-api-access-4kbxg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:15:30 crc kubenswrapper[4558]: I0120 17:15:30.166438 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f452978c-ede4-44a9-b660-d9a4676e2fc7-scripts" (OuterVolumeSpecName: "scripts") pod "f452978c-ede4-44a9-b660-d9a4676e2fc7" (UID: "f452978c-ede4-44a9-b660-d9a4676e2fc7"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:15:30 crc kubenswrapper[4558]: I0120 17:15:30.168069 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f452978c-ede4-44a9-b660-d9a4676e2fc7-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "f452978c-ede4-44a9-b660-d9a4676e2fc7" (UID: "f452978c-ede4-44a9-b660-d9a4676e2fc7"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:15:30 crc kubenswrapper[4558]: I0120 17:15:30.199421 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f452978c-ede4-44a9-b660-d9a4676e2fc7-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "f452978c-ede4-44a9-b660-d9a4676e2fc7" (UID: "f452978c-ede4-44a9-b660-d9a4676e2fc7"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:15:30 crc kubenswrapper[4558]: I0120 17:15:30.215433 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f452978c-ede4-44a9-b660-d9a4676e2fc7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f452978c-ede4-44a9-b660-d9a4676e2fc7" (UID: "f452978c-ede4-44a9-b660-d9a4676e2fc7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:15:30 crc kubenswrapper[4558]: I0120 17:15:30.220495 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f452978c-ede4-44a9-b660-d9a4676e2fc7-config-data" (OuterVolumeSpecName: "config-data") pod "f452978c-ede4-44a9-b660-d9a4676e2fc7" (UID: "f452978c-ede4-44a9-b660-d9a4676e2fc7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:15:30 crc kubenswrapper[4558]: I0120 17:15:30.239577 4558 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/f452978c-ede4-44a9-b660-d9a4676e2fc7-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:15:30 crc kubenswrapper[4558]: I0120 17:15:30.239604 4558 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f452978c-ede4-44a9-b660-d9a4676e2fc7-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 20 17:15:30 crc kubenswrapper[4558]: I0120 17:15:30.239619 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f452978c-ede4-44a9-b660-d9a4676e2fc7-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:15:30 crc kubenswrapper[4558]: I0120 17:15:30.239630 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f452978c-ede4-44a9-b660-d9a4676e2fc7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:15:30 crc kubenswrapper[4558]: I0120 17:15:30.239639 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f452978c-ede4-44a9-b660-d9a4676e2fc7-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:15:30 crc kubenswrapper[4558]: I0120 17:15:30.239649 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4kbxg\" (UniqueName: \"kubernetes.io/projected/f452978c-ede4-44a9-b660-d9a4676e2fc7-kube-api-access-4kbxg\") on node \"crc\" DevicePath \"\"" Jan 20 17:15:30 crc kubenswrapper[4558]: I0120 17:15:30.239659 4558 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f452978c-ede4-44a9-b660-d9a4676e2fc7-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:15:30 crc kubenswrapper[4558]: I0120 17:15:30.305205 4558 generic.go:334] "Generic (PLEG): container finished" podID="f452978c-ede4-44a9-b660-d9a4676e2fc7" containerID="98d171055c6420d1ec1989fb4710f490305dc2079990066e33793c6c123823bc" exitCode=0 Jan 20 17:15:30 crc kubenswrapper[4558]: I0120 17:15:30.305236 4558 generic.go:334] "Generic (PLEG): container finished" podID="f452978c-ede4-44a9-b660-d9a4676e2fc7" containerID="ea7773b5bec38533a4f5a0048b5ed3b0f1f1a38c7244ab758b80155e59ccd04b" exitCode=2 Jan 20 17:15:30 crc kubenswrapper[4558]: I0120 17:15:30.305246 4558 generic.go:334] "Generic (PLEG): container finished" podID="f452978c-ede4-44a9-b660-d9a4676e2fc7" containerID="98bffc914933dbbca94750b59a3f9f3ae7e1b15e73a501efd1efd50adac6e0a0" exitCode=0 Jan 20 17:15:30 crc kubenswrapper[4558]: I0120 17:15:30.305255 4558 generic.go:334] "Generic (PLEG): container finished" podID="f452978c-ede4-44a9-b660-d9a4676e2fc7" containerID="06bca196be427a3d76d403e933c98efdc2048fd31222e4b2bb0a9e6bb56a381f" exitCode=0 Jan 20 17:15:30 crc kubenswrapper[4558]: I0120 17:15:30.305332 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"f452978c-ede4-44a9-b660-d9a4676e2fc7","Type":"ContainerDied","Data":"98d171055c6420d1ec1989fb4710f490305dc2079990066e33793c6c123823bc"} Jan 20 17:15:30 crc kubenswrapper[4558]: I0120 17:15:30.305419 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"f452978c-ede4-44a9-b660-d9a4676e2fc7","Type":"ContainerDied","Data":"ea7773b5bec38533a4f5a0048b5ed3b0f1f1a38c7244ab758b80155e59ccd04b"} Jan 20 17:15:30 crc kubenswrapper[4558]: I0120 17:15:30.305438 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"f452978c-ede4-44a9-b660-d9a4676e2fc7","Type":"ContainerDied","Data":"98bffc914933dbbca94750b59a3f9f3ae7e1b15e73a501efd1efd50adac6e0a0"} Jan 20 17:15:30 crc kubenswrapper[4558]: I0120 17:15:30.305464 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"f452978c-ede4-44a9-b660-d9a4676e2fc7","Type":"ContainerDied","Data":"06bca196be427a3d76d403e933c98efdc2048fd31222e4b2bb0a9e6bb56a381f"} Jan 20 17:15:30 crc kubenswrapper[4558]: I0120 17:15:30.305476 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"f452978c-ede4-44a9-b660-d9a4676e2fc7","Type":"ContainerDied","Data":"a25f28a228ec7cd754689b8a15d8b061514ac1109890662c52777a60db6208f1"} Jan 20 17:15:30 crc kubenswrapper[4558]: I0120 17:15:30.305350 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:15:30 crc kubenswrapper[4558]: I0120 17:15:30.305522 4558 scope.go:117] "RemoveContainer" containerID="98d171055c6420d1ec1989fb4710f490305dc2079990066e33793c6c123823bc" Jan 20 17:15:30 crc kubenswrapper[4558]: I0120 17:15:30.335773 4558 scope.go:117] "RemoveContainer" containerID="ea7773b5bec38533a4f5a0048b5ed3b0f1f1a38c7244ab758b80155e59ccd04b" Jan 20 17:15:30 crc kubenswrapper[4558]: I0120 17:15:30.366723 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:15:30 crc kubenswrapper[4558]: I0120 17:15:30.375015 4558 scope.go:117] "RemoveContainer" containerID="98bffc914933dbbca94750b59a3f9f3ae7e1b15e73a501efd1efd50adac6e0a0" Jan 20 17:15:30 crc kubenswrapper[4558]: I0120 17:15:30.383622 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:15:30 crc kubenswrapper[4558]: I0120 17:15:30.390455 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:15:30 crc kubenswrapper[4558]: E0120 17:15:30.391144 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f452978c-ede4-44a9-b660-d9a4676e2fc7" containerName="ceilometer-notification-agent" Jan 20 17:15:30 crc kubenswrapper[4558]: I0120 17:15:30.391196 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="f452978c-ede4-44a9-b660-d9a4676e2fc7" containerName="ceilometer-notification-agent" Jan 20 17:15:30 crc kubenswrapper[4558]: E0120 17:15:30.391210 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f452978c-ede4-44a9-b660-d9a4676e2fc7" containerName="proxy-httpd" Jan 20 17:15:30 crc kubenswrapper[4558]: I0120 17:15:30.391216 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="f452978c-ede4-44a9-b660-d9a4676e2fc7" containerName="proxy-httpd" Jan 20 17:15:30 crc kubenswrapper[4558]: E0120 17:15:30.391225 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f452978c-ede4-44a9-b660-d9a4676e2fc7" containerName="ceilometer-central-agent" Jan 20 17:15:30 crc kubenswrapper[4558]: I0120 17:15:30.391235 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="f452978c-ede4-44a9-b660-d9a4676e2fc7" containerName="ceilometer-central-agent" Jan 20 17:15:30 crc kubenswrapper[4558]: E0120 17:15:30.391257 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f452978c-ede4-44a9-b660-d9a4676e2fc7" containerName="sg-core" Jan 20 17:15:30 crc kubenswrapper[4558]: I0120 17:15:30.391264 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="f452978c-ede4-44a9-b660-d9a4676e2fc7" containerName="sg-core" Jan 20 17:15:30 crc kubenswrapper[4558]: I0120 17:15:30.391540 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="f452978c-ede4-44a9-b660-d9a4676e2fc7" containerName="ceilometer-notification-agent" Jan 20 17:15:30 crc kubenswrapper[4558]: I0120 17:15:30.391557 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="f452978c-ede4-44a9-b660-d9a4676e2fc7" containerName="sg-core" Jan 20 17:15:30 crc kubenswrapper[4558]: I0120 17:15:30.391571 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="f452978c-ede4-44a9-b660-d9a4676e2fc7" containerName="proxy-httpd" Jan 20 17:15:30 crc kubenswrapper[4558]: I0120 17:15:30.391582 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="f452978c-ede4-44a9-b660-d9a4676e2fc7" containerName="ceilometer-central-agent" Jan 20 17:15:30 crc kubenswrapper[4558]: I0120 17:15:30.393782 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:15:30 crc kubenswrapper[4558]: I0120 17:15:30.396238 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-config-data" Jan 20 17:15:30 crc kubenswrapper[4558]: I0120 17:15:30.396465 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-scripts" Jan 20 17:15:30 crc kubenswrapper[4558]: I0120 17:15:30.396479 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-ceilometer-internal-svc" Jan 20 17:15:30 crc kubenswrapper[4558]: I0120 17:15:30.397816 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:15:30 crc kubenswrapper[4558]: I0120 17:15:30.405216 4558 scope.go:117] "RemoveContainer" containerID="06bca196be427a3d76d403e933c98efdc2048fd31222e4b2bb0a9e6bb56a381f" Jan 20 17:15:30 crc kubenswrapper[4558]: I0120 17:15:30.436288 4558 scope.go:117] "RemoveContainer" containerID="98d171055c6420d1ec1989fb4710f490305dc2079990066e33793c6c123823bc" Jan 20 17:15:30 crc kubenswrapper[4558]: E0120 17:15:30.436943 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"98d171055c6420d1ec1989fb4710f490305dc2079990066e33793c6c123823bc\": container with ID starting with 98d171055c6420d1ec1989fb4710f490305dc2079990066e33793c6c123823bc not found: ID does not exist" containerID="98d171055c6420d1ec1989fb4710f490305dc2079990066e33793c6c123823bc" Jan 20 17:15:30 crc kubenswrapper[4558]: I0120 17:15:30.436987 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"98d171055c6420d1ec1989fb4710f490305dc2079990066e33793c6c123823bc"} err="failed to get container status \"98d171055c6420d1ec1989fb4710f490305dc2079990066e33793c6c123823bc\": rpc error: code = NotFound desc = could not find container \"98d171055c6420d1ec1989fb4710f490305dc2079990066e33793c6c123823bc\": container with ID starting with 98d171055c6420d1ec1989fb4710f490305dc2079990066e33793c6c123823bc not found: ID does not exist" Jan 20 17:15:30 crc kubenswrapper[4558]: I0120 17:15:30.437012 4558 scope.go:117] "RemoveContainer" containerID="ea7773b5bec38533a4f5a0048b5ed3b0f1f1a38c7244ab758b80155e59ccd04b" Jan 20 17:15:30 crc kubenswrapper[4558]: E0120 17:15:30.438200 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ea7773b5bec38533a4f5a0048b5ed3b0f1f1a38c7244ab758b80155e59ccd04b\": container with ID starting with ea7773b5bec38533a4f5a0048b5ed3b0f1f1a38c7244ab758b80155e59ccd04b not found: ID does not exist" containerID="ea7773b5bec38533a4f5a0048b5ed3b0f1f1a38c7244ab758b80155e59ccd04b" Jan 20 17:15:30 crc kubenswrapper[4558]: I0120 17:15:30.438230 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ea7773b5bec38533a4f5a0048b5ed3b0f1f1a38c7244ab758b80155e59ccd04b"} err="failed to get container status \"ea7773b5bec38533a4f5a0048b5ed3b0f1f1a38c7244ab758b80155e59ccd04b\": rpc error: code = NotFound desc = could not find container \"ea7773b5bec38533a4f5a0048b5ed3b0f1f1a38c7244ab758b80155e59ccd04b\": container with ID starting with ea7773b5bec38533a4f5a0048b5ed3b0f1f1a38c7244ab758b80155e59ccd04b not found: ID does not exist" Jan 20 17:15:30 crc kubenswrapper[4558]: I0120 17:15:30.438266 4558 scope.go:117] "RemoveContainer" containerID="98bffc914933dbbca94750b59a3f9f3ae7e1b15e73a501efd1efd50adac6e0a0" Jan 20 17:15:30 crc kubenswrapper[4558]: E0120 17:15:30.440358 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"98bffc914933dbbca94750b59a3f9f3ae7e1b15e73a501efd1efd50adac6e0a0\": container with ID starting with 98bffc914933dbbca94750b59a3f9f3ae7e1b15e73a501efd1efd50adac6e0a0 not found: ID does not exist" containerID="98bffc914933dbbca94750b59a3f9f3ae7e1b15e73a501efd1efd50adac6e0a0" Jan 20 17:15:30 crc kubenswrapper[4558]: I0120 17:15:30.440409 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"98bffc914933dbbca94750b59a3f9f3ae7e1b15e73a501efd1efd50adac6e0a0"} err="failed to get container status \"98bffc914933dbbca94750b59a3f9f3ae7e1b15e73a501efd1efd50adac6e0a0\": rpc error: code = NotFound desc = could not find container \"98bffc914933dbbca94750b59a3f9f3ae7e1b15e73a501efd1efd50adac6e0a0\": container with ID starting with 98bffc914933dbbca94750b59a3f9f3ae7e1b15e73a501efd1efd50adac6e0a0 not found: ID does not exist" Jan 20 17:15:30 crc kubenswrapper[4558]: I0120 17:15:30.440440 4558 scope.go:117] "RemoveContainer" containerID="06bca196be427a3d76d403e933c98efdc2048fd31222e4b2bb0a9e6bb56a381f" Jan 20 17:15:30 crc kubenswrapper[4558]: E0120 17:15:30.441324 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"06bca196be427a3d76d403e933c98efdc2048fd31222e4b2bb0a9e6bb56a381f\": container with ID starting with 06bca196be427a3d76d403e933c98efdc2048fd31222e4b2bb0a9e6bb56a381f not found: ID does not exist" containerID="06bca196be427a3d76d403e933c98efdc2048fd31222e4b2bb0a9e6bb56a381f" Jan 20 17:15:30 crc kubenswrapper[4558]: I0120 17:15:30.441347 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"06bca196be427a3d76d403e933c98efdc2048fd31222e4b2bb0a9e6bb56a381f"} err="failed to get container status \"06bca196be427a3d76d403e933c98efdc2048fd31222e4b2bb0a9e6bb56a381f\": rpc error: code = NotFound desc = could not find container \"06bca196be427a3d76d403e933c98efdc2048fd31222e4b2bb0a9e6bb56a381f\": container with ID starting with 06bca196be427a3d76d403e933c98efdc2048fd31222e4b2bb0a9e6bb56a381f not found: ID does not exist" Jan 20 17:15:30 crc kubenswrapper[4558]: I0120 17:15:30.441363 4558 scope.go:117] "RemoveContainer" containerID="98d171055c6420d1ec1989fb4710f490305dc2079990066e33793c6c123823bc" Jan 20 17:15:30 crc kubenswrapper[4558]: I0120 17:15:30.441704 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"98d171055c6420d1ec1989fb4710f490305dc2079990066e33793c6c123823bc"} err="failed to get container status \"98d171055c6420d1ec1989fb4710f490305dc2079990066e33793c6c123823bc\": rpc error: code = NotFound desc = could not find container \"98d171055c6420d1ec1989fb4710f490305dc2079990066e33793c6c123823bc\": container with ID starting with 98d171055c6420d1ec1989fb4710f490305dc2079990066e33793c6c123823bc not found: ID does not exist" Jan 20 17:15:30 crc kubenswrapper[4558]: I0120 17:15:30.441726 4558 scope.go:117] "RemoveContainer" containerID="ea7773b5bec38533a4f5a0048b5ed3b0f1f1a38c7244ab758b80155e59ccd04b" Jan 20 17:15:30 crc kubenswrapper[4558]: I0120 17:15:30.442777 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ea7773b5bec38533a4f5a0048b5ed3b0f1f1a38c7244ab758b80155e59ccd04b"} err="failed to get container status \"ea7773b5bec38533a4f5a0048b5ed3b0f1f1a38c7244ab758b80155e59ccd04b\": rpc error: code = NotFound desc = could not find container \"ea7773b5bec38533a4f5a0048b5ed3b0f1f1a38c7244ab758b80155e59ccd04b\": container with ID starting with ea7773b5bec38533a4f5a0048b5ed3b0f1f1a38c7244ab758b80155e59ccd04b not found: ID does not exist" Jan 20 17:15:30 crc kubenswrapper[4558]: I0120 17:15:30.442862 4558 scope.go:117] "RemoveContainer" containerID="98bffc914933dbbca94750b59a3f9f3ae7e1b15e73a501efd1efd50adac6e0a0" Jan 20 17:15:30 crc kubenswrapper[4558]: I0120 17:15:30.443495 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"98bffc914933dbbca94750b59a3f9f3ae7e1b15e73a501efd1efd50adac6e0a0"} err="failed to get container status \"98bffc914933dbbca94750b59a3f9f3ae7e1b15e73a501efd1efd50adac6e0a0\": rpc error: code = NotFound desc = could not find container \"98bffc914933dbbca94750b59a3f9f3ae7e1b15e73a501efd1efd50adac6e0a0\": container with ID starting with 98bffc914933dbbca94750b59a3f9f3ae7e1b15e73a501efd1efd50adac6e0a0 not found: ID does not exist" Jan 20 17:15:30 crc kubenswrapper[4558]: I0120 17:15:30.443521 4558 scope.go:117] "RemoveContainer" containerID="06bca196be427a3d76d403e933c98efdc2048fd31222e4b2bb0a9e6bb56a381f" Jan 20 17:15:30 crc kubenswrapper[4558]: I0120 17:15:30.444799 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j59xz\" (UniqueName: \"kubernetes.io/projected/544c8619-276c-4358-ac79-58df9a462173-kube-api-access-j59xz\") pod \"ceilometer-0\" (UID: \"544c8619-276c-4358-ac79-58df9a462173\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:15:30 crc kubenswrapper[4558]: I0120 17:15:30.444843 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/544c8619-276c-4358-ac79-58df9a462173-config-data\") pod \"ceilometer-0\" (UID: \"544c8619-276c-4358-ac79-58df9a462173\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:15:30 crc kubenswrapper[4558]: I0120 17:15:30.444877 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/544c8619-276c-4358-ac79-58df9a462173-scripts\") pod \"ceilometer-0\" (UID: \"544c8619-276c-4358-ac79-58df9a462173\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:15:30 crc kubenswrapper[4558]: I0120 17:15:30.444898 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/544c8619-276c-4358-ac79-58df9a462173-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"544c8619-276c-4358-ac79-58df9a462173\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:15:30 crc kubenswrapper[4558]: I0120 17:15:30.444971 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/544c8619-276c-4358-ac79-58df9a462173-run-httpd\") pod \"ceilometer-0\" (UID: \"544c8619-276c-4358-ac79-58df9a462173\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:15:30 crc kubenswrapper[4558]: I0120 17:15:30.445006 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"06bca196be427a3d76d403e933c98efdc2048fd31222e4b2bb0a9e6bb56a381f"} err="failed to get container status \"06bca196be427a3d76d403e933c98efdc2048fd31222e4b2bb0a9e6bb56a381f\": rpc error: code = NotFound desc = could not find container \"06bca196be427a3d76d403e933c98efdc2048fd31222e4b2bb0a9e6bb56a381f\": container with ID starting with 06bca196be427a3d76d403e933c98efdc2048fd31222e4b2bb0a9e6bb56a381f not found: ID does not exist" Jan 20 17:15:30 crc kubenswrapper[4558]: I0120 17:15:30.445050 4558 scope.go:117] "RemoveContainer" containerID="98d171055c6420d1ec1989fb4710f490305dc2079990066e33793c6c123823bc" Jan 20 17:15:30 crc kubenswrapper[4558]: I0120 17:15:30.445106 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/544c8619-276c-4358-ac79-58df9a462173-log-httpd\") pod \"ceilometer-0\" (UID: \"544c8619-276c-4358-ac79-58df9a462173\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:15:30 crc kubenswrapper[4558]: I0120 17:15:30.445224 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/544c8619-276c-4358-ac79-58df9a462173-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"544c8619-276c-4358-ac79-58df9a462173\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:15:30 crc kubenswrapper[4558]: I0120 17:15:30.445271 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/544c8619-276c-4358-ac79-58df9a462173-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"544c8619-276c-4358-ac79-58df9a462173\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:15:30 crc kubenswrapper[4558]: I0120 17:15:30.445490 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"98d171055c6420d1ec1989fb4710f490305dc2079990066e33793c6c123823bc"} err="failed to get container status \"98d171055c6420d1ec1989fb4710f490305dc2079990066e33793c6c123823bc\": rpc error: code = NotFound desc = could not find container \"98d171055c6420d1ec1989fb4710f490305dc2079990066e33793c6c123823bc\": container with ID starting with 98d171055c6420d1ec1989fb4710f490305dc2079990066e33793c6c123823bc not found: ID does not exist" Jan 20 17:15:30 crc kubenswrapper[4558]: I0120 17:15:30.445519 4558 scope.go:117] "RemoveContainer" containerID="ea7773b5bec38533a4f5a0048b5ed3b0f1f1a38c7244ab758b80155e59ccd04b" Jan 20 17:15:30 crc kubenswrapper[4558]: I0120 17:15:30.446043 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ea7773b5bec38533a4f5a0048b5ed3b0f1f1a38c7244ab758b80155e59ccd04b"} err="failed to get container status \"ea7773b5bec38533a4f5a0048b5ed3b0f1f1a38c7244ab758b80155e59ccd04b\": rpc error: code = NotFound desc = could not find container \"ea7773b5bec38533a4f5a0048b5ed3b0f1f1a38c7244ab758b80155e59ccd04b\": container with ID starting with ea7773b5bec38533a4f5a0048b5ed3b0f1f1a38c7244ab758b80155e59ccd04b not found: ID does not exist" Jan 20 17:15:30 crc kubenswrapper[4558]: I0120 17:15:30.446068 4558 scope.go:117] "RemoveContainer" containerID="98bffc914933dbbca94750b59a3f9f3ae7e1b15e73a501efd1efd50adac6e0a0" Jan 20 17:15:30 crc kubenswrapper[4558]: I0120 17:15:30.447229 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"98bffc914933dbbca94750b59a3f9f3ae7e1b15e73a501efd1efd50adac6e0a0"} err="failed to get container status \"98bffc914933dbbca94750b59a3f9f3ae7e1b15e73a501efd1efd50adac6e0a0\": rpc error: code = NotFound desc = could not find container \"98bffc914933dbbca94750b59a3f9f3ae7e1b15e73a501efd1efd50adac6e0a0\": container with ID starting with 98bffc914933dbbca94750b59a3f9f3ae7e1b15e73a501efd1efd50adac6e0a0 not found: ID does not exist" Jan 20 17:15:30 crc kubenswrapper[4558]: I0120 17:15:30.447255 4558 scope.go:117] "RemoveContainer" containerID="06bca196be427a3d76d403e933c98efdc2048fd31222e4b2bb0a9e6bb56a381f" Jan 20 17:15:30 crc kubenswrapper[4558]: I0120 17:15:30.447931 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"06bca196be427a3d76d403e933c98efdc2048fd31222e4b2bb0a9e6bb56a381f"} err="failed to get container status \"06bca196be427a3d76d403e933c98efdc2048fd31222e4b2bb0a9e6bb56a381f\": rpc error: code = NotFound desc = could not find container \"06bca196be427a3d76d403e933c98efdc2048fd31222e4b2bb0a9e6bb56a381f\": container with ID starting with 06bca196be427a3d76d403e933c98efdc2048fd31222e4b2bb0a9e6bb56a381f not found: ID does not exist" Jan 20 17:15:30 crc kubenswrapper[4558]: I0120 17:15:30.447974 4558 scope.go:117] "RemoveContainer" containerID="98d171055c6420d1ec1989fb4710f490305dc2079990066e33793c6c123823bc" Jan 20 17:15:30 crc kubenswrapper[4558]: I0120 17:15:30.448607 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"98d171055c6420d1ec1989fb4710f490305dc2079990066e33793c6c123823bc"} err="failed to get container status \"98d171055c6420d1ec1989fb4710f490305dc2079990066e33793c6c123823bc\": rpc error: code = NotFound desc = could not find container \"98d171055c6420d1ec1989fb4710f490305dc2079990066e33793c6c123823bc\": container with ID starting with 98d171055c6420d1ec1989fb4710f490305dc2079990066e33793c6c123823bc not found: ID does not exist" Jan 20 17:15:30 crc kubenswrapper[4558]: I0120 17:15:30.448652 4558 scope.go:117] "RemoveContainer" containerID="ea7773b5bec38533a4f5a0048b5ed3b0f1f1a38c7244ab758b80155e59ccd04b" Jan 20 17:15:30 crc kubenswrapper[4558]: I0120 17:15:30.449060 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ea7773b5bec38533a4f5a0048b5ed3b0f1f1a38c7244ab758b80155e59ccd04b"} err="failed to get container status \"ea7773b5bec38533a4f5a0048b5ed3b0f1f1a38c7244ab758b80155e59ccd04b\": rpc error: code = NotFound desc = could not find container \"ea7773b5bec38533a4f5a0048b5ed3b0f1f1a38c7244ab758b80155e59ccd04b\": container with ID starting with ea7773b5bec38533a4f5a0048b5ed3b0f1f1a38c7244ab758b80155e59ccd04b not found: ID does not exist" Jan 20 17:15:30 crc kubenswrapper[4558]: I0120 17:15:30.449083 4558 scope.go:117] "RemoveContainer" containerID="98bffc914933dbbca94750b59a3f9f3ae7e1b15e73a501efd1efd50adac6e0a0" Jan 20 17:15:30 crc kubenswrapper[4558]: I0120 17:15:30.449364 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"98bffc914933dbbca94750b59a3f9f3ae7e1b15e73a501efd1efd50adac6e0a0"} err="failed to get container status \"98bffc914933dbbca94750b59a3f9f3ae7e1b15e73a501efd1efd50adac6e0a0\": rpc error: code = NotFound desc = could not find container \"98bffc914933dbbca94750b59a3f9f3ae7e1b15e73a501efd1efd50adac6e0a0\": container with ID starting with 98bffc914933dbbca94750b59a3f9f3ae7e1b15e73a501efd1efd50adac6e0a0 not found: ID does not exist" Jan 20 17:15:30 crc kubenswrapper[4558]: I0120 17:15:30.449391 4558 scope.go:117] "RemoveContainer" containerID="06bca196be427a3d76d403e933c98efdc2048fd31222e4b2bb0a9e6bb56a381f" Jan 20 17:15:30 crc kubenswrapper[4558]: I0120 17:15:30.449685 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"06bca196be427a3d76d403e933c98efdc2048fd31222e4b2bb0a9e6bb56a381f"} err="failed to get container status \"06bca196be427a3d76d403e933c98efdc2048fd31222e4b2bb0a9e6bb56a381f\": rpc error: code = NotFound desc = could not find container \"06bca196be427a3d76d403e933c98efdc2048fd31222e4b2bb0a9e6bb56a381f\": container with ID starting with 06bca196be427a3d76d403e933c98efdc2048fd31222e4b2bb0a9e6bb56a381f not found: ID does not exist" Jan 20 17:15:30 crc kubenswrapper[4558]: I0120 17:15:30.547533 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j59xz\" (UniqueName: \"kubernetes.io/projected/544c8619-276c-4358-ac79-58df9a462173-kube-api-access-j59xz\") pod \"ceilometer-0\" (UID: \"544c8619-276c-4358-ac79-58df9a462173\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:15:30 crc kubenswrapper[4558]: I0120 17:15:30.547591 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/544c8619-276c-4358-ac79-58df9a462173-config-data\") pod \"ceilometer-0\" (UID: \"544c8619-276c-4358-ac79-58df9a462173\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:15:30 crc kubenswrapper[4558]: I0120 17:15:30.547646 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/544c8619-276c-4358-ac79-58df9a462173-scripts\") pod \"ceilometer-0\" (UID: \"544c8619-276c-4358-ac79-58df9a462173\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:15:30 crc kubenswrapper[4558]: I0120 17:15:30.547669 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/544c8619-276c-4358-ac79-58df9a462173-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"544c8619-276c-4358-ac79-58df9a462173\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:15:30 crc kubenswrapper[4558]: I0120 17:15:30.547711 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/544c8619-276c-4358-ac79-58df9a462173-run-httpd\") pod \"ceilometer-0\" (UID: \"544c8619-276c-4358-ac79-58df9a462173\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:15:30 crc kubenswrapper[4558]: I0120 17:15:30.547893 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/544c8619-276c-4358-ac79-58df9a462173-log-httpd\") pod \"ceilometer-0\" (UID: \"544c8619-276c-4358-ac79-58df9a462173\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:15:30 crc kubenswrapper[4558]: I0120 17:15:30.548042 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/544c8619-276c-4358-ac79-58df9a462173-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"544c8619-276c-4358-ac79-58df9a462173\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:15:30 crc kubenswrapper[4558]: I0120 17:15:30.548121 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/544c8619-276c-4358-ac79-58df9a462173-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"544c8619-276c-4358-ac79-58df9a462173\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:15:30 crc kubenswrapper[4558]: I0120 17:15:30.548584 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/544c8619-276c-4358-ac79-58df9a462173-log-httpd\") pod \"ceilometer-0\" (UID: \"544c8619-276c-4358-ac79-58df9a462173\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:15:30 crc kubenswrapper[4558]: I0120 17:15:30.549006 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/544c8619-276c-4358-ac79-58df9a462173-run-httpd\") pod \"ceilometer-0\" (UID: \"544c8619-276c-4358-ac79-58df9a462173\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:15:30 crc kubenswrapper[4558]: I0120 17:15:30.553867 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/544c8619-276c-4358-ac79-58df9a462173-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"544c8619-276c-4358-ac79-58df9a462173\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:15:30 crc kubenswrapper[4558]: I0120 17:15:30.553888 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/544c8619-276c-4358-ac79-58df9a462173-scripts\") pod \"ceilometer-0\" (UID: \"544c8619-276c-4358-ac79-58df9a462173\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:15:30 crc kubenswrapper[4558]: I0120 17:15:30.554038 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/544c8619-276c-4358-ac79-58df9a462173-config-data\") pod \"ceilometer-0\" (UID: \"544c8619-276c-4358-ac79-58df9a462173\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:15:30 crc kubenswrapper[4558]: I0120 17:15:30.554311 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/544c8619-276c-4358-ac79-58df9a462173-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"544c8619-276c-4358-ac79-58df9a462173\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:15:30 crc kubenswrapper[4558]: I0120 17:15:30.554977 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/544c8619-276c-4358-ac79-58df9a462173-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"544c8619-276c-4358-ac79-58df9a462173\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:15:30 crc kubenswrapper[4558]: I0120 17:15:30.563260 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j59xz\" (UniqueName: \"kubernetes.io/projected/544c8619-276c-4358-ac79-58df9a462173-kube-api-access-j59xz\") pod \"ceilometer-0\" (UID: \"544c8619-276c-4358-ac79-58df9a462173\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:15:30 crc kubenswrapper[4558]: I0120 17:15:30.591824 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f452978c-ede4-44a9-b660-d9a4676e2fc7" path="/var/lib/kubelet/pods/f452978c-ede4-44a9-b660-d9a4676e2fc7/volumes" Jan 20 17:15:30 crc kubenswrapper[4558]: I0120 17:15:30.717979 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:15:31 crc kubenswrapper[4558]: I0120 17:15:31.142996 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:15:31 crc kubenswrapper[4558]: W0120 17:15:31.145714 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod544c8619_276c_4358_ac79_58df9a462173.slice/crio-78f1dcc15389b29d1baf585f7d61c4fe6334f3e616cfacf39b99ea938cff3a9a WatchSource:0}: Error finding container 78f1dcc15389b29d1baf585f7d61c4fe6334f3e616cfacf39b99ea938cff3a9a: Status 404 returned error can't find the container with id 78f1dcc15389b29d1baf585f7d61c4fe6334f3e616cfacf39b99ea938cff3a9a Jan 20 17:15:31 crc kubenswrapper[4558]: I0120 17:15:31.317493 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"544c8619-276c-4358-ac79-58df9a462173","Type":"ContainerStarted","Data":"78f1dcc15389b29d1baf585f7d61c4fe6334f3e616cfacf39b99ea938cff3a9a"} Jan 20 17:15:32 crc kubenswrapper[4558]: I0120 17:15:32.334231 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"544c8619-276c-4358-ac79-58df9a462173","Type":"ContainerStarted","Data":"aef9e92362cdebb57f6e6fec72040ef9ea0c3ebdec14cebff1a1ec4ae81e35a9"} Jan 20 17:15:33 crc kubenswrapper[4558]: I0120 17:15:33.349541 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"544c8619-276c-4358-ac79-58df9a462173","Type":"ContainerStarted","Data":"12b7e813e5ba4c8b347be431d52bb1ff48a52e3d5f90ccda95991eea8a6553ad"} Jan 20 17:15:34 crc kubenswrapper[4558]: I0120 17:15:34.365942 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"544c8619-276c-4358-ac79-58df9a462173","Type":"ContainerStarted","Data":"821d851a2b03145528569e24bdf1e48df94429d66e29eac4da06f73e2d0ea57e"} Jan 20 17:15:35 crc kubenswrapper[4558]: I0120 17:15:35.378499 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"544c8619-276c-4358-ac79-58df9a462173","Type":"ContainerStarted","Data":"5e7f9a7df82f7908138cd4491a1924604163a709b2ab28b8825d5768baab4058"} Jan 20 17:15:35 crc kubenswrapper[4558]: I0120 17:15:35.379042 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:15:35 crc kubenswrapper[4558]: I0120 17:15:35.400987 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ceilometer-0" podStartSLOduration=1.6932050429999999 podStartE2EDuration="5.400969707s" podCreationTimestamp="2026-01-20 17:15:30 +0000 UTC" firstStartedPulling="2026-01-20 17:15:31.148554666 +0000 UTC m=+2024.908892634" lastFinishedPulling="2026-01-20 17:15:34.85631933 +0000 UTC m=+2028.616657298" observedRunningTime="2026-01-20 17:15:35.395255702 +0000 UTC m=+2029.155593669" watchObservedRunningTime="2026-01-20 17:15:35.400969707 +0000 UTC m=+2029.161307674" Jan 20 17:15:37 crc kubenswrapper[4558]: I0120 17:15:37.657618 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:15:37 crc kubenswrapper[4558]: I0120 17:15:37.658012 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:15:38 crc kubenswrapper[4558]: I0120 17:15:38.678336 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-api-0" podUID="158b4a4e-78aa-4813-a00e-abddfb7214ef" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.102:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 20 17:15:38 crc kubenswrapper[4558]: I0120 17:15:38.679404 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-api-0" podUID="158b4a4e-78aa-4813-a00e-abddfb7214ef" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.102:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 20 17:15:47 crc kubenswrapper[4558]: I0120 17:15:47.663333 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:15:47 crc kubenswrapper[4558]: I0120 17:15:47.664109 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:15:47 crc kubenswrapper[4558]: I0120 17:15:47.664441 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:15:47 crc kubenswrapper[4558]: I0120 17:15:47.664490 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:15:47 crc kubenswrapper[4558]: I0120 17:15:47.669681 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:15:47 crc kubenswrapper[4558]: I0120 17:15:47.669766 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:16:00 crc kubenswrapper[4558]: I0120 17:16:00.725289 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:16:03 crc kubenswrapper[4558]: I0120 17:16:03.050752 4558 scope.go:117] "RemoveContainer" containerID="7f57d98e525d4bcc9c65ec21f59f379838f112b6debbf769cd5bf60c98cb5558" Jan 20 17:16:03 crc kubenswrapper[4558]: I0120 17:16:03.084043 4558 scope.go:117] "RemoveContainer" containerID="05aa1119c41b498e17a30471e75a8154754e7a84508cb65cee9fc6ef9d057e44" Jan 20 17:16:03 crc kubenswrapper[4558]: I0120 17:16:03.139100 4558 scope.go:117] "RemoveContainer" containerID="3d9d12fe62d20a0f517381a88d1e4c4ebb688df85f473c30c5913811a68df201" Jan 20 17:16:03 crc kubenswrapper[4558]: I0120 17:16:03.185397 4558 scope.go:117] "RemoveContainer" containerID="b47901b596b370703e4ec143a0305eb1f5aa2874468f01a805b913daadf8f137" Jan 20 17:16:03 crc kubenswrapper[4558]: E0120 17:16:03.754813 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/combined-ca-bundle: secret "combined-ca-bundle" not found Jan 20 17:16:03 crc kubenswrapper[4558]: E0120 17:16:03.754884 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ef3c342e-679b-4a2e-94df-82adf4200290-combined-ca-bundle podName:ef3c342e-679b-4a2e-94df-82adf4200290 nodeName:}" failed. No retries permitted until 2026-01-20 17:16:04.25486369 +0000 UTC m=+2058.015201657 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/ef3c342e-679b-4a2e-94df-82adf4200290-combined-ca-bundle") pod "ovsdbserver-nb-0" (UID: "ef3c342e-679b-4a2e-94df-82adf4200290") : secret "combined-ca-bundle" not found Jan 20 17:16:04 crc kubenswrapper[4558]: I0120 17:16:04.025210 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/barbican-5773-account-create-update-qmwj7"] Jan 20 17:16:04 crc kubenswrapper[4558]: I0120 17:16:04.026929 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-5773-account-create-update-qmwj7" Jan 20 17:16:04 crc kubenswrapper[4558]: I0120 17:16:04.044647 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"barbican-db-secret" Jan 20 17:16:04 crc kubenswrapper[4558]: I0120 17:16:04.047138 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-5773-account-create-update-ztf6s"] Jan 20 17:16:04 crc kubenswrapper[4558]: I0120 17:16:04.054210 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/openstackclient"] Jan 20 17:16:04 crc kubenswrapper[4558]: I0120 17:16:04.054469 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/openstackclient" podUID="bad8aeff-00a6-4456-bbdf-937441ed04c0" containerName="openstackclient" containerID="cri-o://b209eadd3c0946d42cd2fa2ad03f5f87bda4b804b2952272d092f8c6dbcda131" gracePeriod=2 Jan 20 17:16:04 crc kubenswrapper[4558]: I0120 17:16:04.066414 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/watcher-be54-account-create-update-2fkrk"] Jan 20 17:16:04 crc kubenswrapper[4558]: I0120 17:16:04.071189 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/barbican-5773-account-create-update-ztf6s"] Jan 20 17:16:04 crc kubenswrapper[4558]: I0120 17:16:04.076575 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/watcher-be54-account-create-update-2fkrk"] Jan 20 17:16:04 crc kubenswrapper[4558]: I0120 17:16:04.099082 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/openstackclient"] Jan 20 17:16:04 crc kubenswrapper[4558]: I0120 17:16:04.150697 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-5773-account-create-update-qmwj7"] Jan 20 17:16:04 crc kubenswrapper[4558]: I0120 17:16:04.183515 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/61a672e2-963e-4428-bb97-e2177ac10c06-operator-scripts\") pod \"barbican-5773-account-create-update-qmwj7\" (UID: \"61a672e2-963e-4428-bb97-e2177ac10c06\") " pod="openstack-kuttl-tests/barbican-5773-account-create-update-qmwj7" Jan 20 17:16:04 crc kubenswrapper[4558]: I0120 17:16:04.183647 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l4296\" (UniqueName: \"kubernetes.io/projected/61a672e2-963e-4428-bb97-e2177ac10c06-kube-api-access-l4296\") pod \"barbican-5773-account-create-update-qmwj7\" (UID: \"61a672e2-963e-4428-bb97-e2177ac10c06\") " pod="openstack-kuttl-tests/barbican-5773-account-create-update-qmwj7" Jan 20 17:16:04 crc kubenswrapper[4558]: I0120 17:16:04.293699 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/61a672e2-963e-4428-bb97-e2177ac10c06-operator-scripts\") pod \"barbican-5773-account-create-update-qmwj7\" (UID: \"61a672e2-963e-4428-bb97-e2177ac10c06\") " pod="openstack-kuttl-tests/barbican-5773-account-create-update-qmwj7" Jan 20 17:16:04 crc kubenswrapper[4558]: I0120 17:16:04.293853 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l4296\" (UniqueName: \"kubernetes.io/projected/61a672e2-963e-4428-bb97-e2177ac10c06-kube-api-access-l4296\") pod \"barbican-5773-account-create-update-qmwj7\" (UID: \"61a672e2-963e-4428-bb97-e2177ac10c06\") " pod="openstack-kuttl-tests/barbican-5773-account-create-update-qmwj7" Jan 20 17:16:04 crc kubenswrapper[4558]: E0120 17:16:04.294229 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/combined-ca-bundle: secret "combined-ca-bundle" not found Jan 20 17:16:04 crc kubenswrapper[4558]: E0120 17:16:04.294306 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ef3c342e-679b-4a2e-94df-82adf4200290-combined-ca-bundle podName:ef3c342e-679b-4a2e-94df-82adf4200290 nodeName:}" failed. No retries permitted until 2026-01-20 17:16:05.294283691 +0000 UTC m=+2059.054621648 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/ef3c342e-679b-4a2e-94df-82adf4200290-combined-ca-bundle") pod "ovsdbserver-nb-0" (UID: "ef3c342e-679b-4a2e-94df-82adf4200290") : secret "combined-ca-bundle" not found Jan 20 17:16:04 crc kubenswrapper[4558]: I0120 17:16:04.316311 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/61a672e2-963e-4428-bb97-e2177ac10c06-operator-scripts\") pod \"barbican-5773-account-create-update-qmwj7\" (UID: \"61a672e2-963e-4428-bb97-e2177ac10c06\") " pod="openstack-kuttl-tests/barbican-5773-account-create-update-qmwj7" Jan 20 17:16:04 crc kubenswrapper[4558]: I0120 17:16:04.316519 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/watcher-be54-account-create-update-jd6tl"] Jan 20 17:16:04 crc kubenswrapper[4558]: E0120 17:16:04.317070 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bad8aeff-00a6-4456-bbdf-937441ed04c0" containerName="openstackclient" Jan 20 17:16:04 crc kubenswrapper[4558]: I0120 17:16:04.317086 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="bad8aeff-00a6-4456-bbdf-937441ed04c0" containerName="openstackclient" Jan 20 17:16:04 crc kubenswrapper[4558]: I0120 17:16:04.317316 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="bad8aeff-00a6-4456-bbdf-937441ed04c0" containerName="openstackclient" Jan 20 17:16:04 crc kubenswrapper[4558]: I0120 17:16:04.318207 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/watcher-be54-account-create-update-jd6tl" Jan 20 17:16:04 crc kubenswrapper[4558]: I0120 17:16:04.330760 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"watcher-db-secret" Jan 20 17:16:04 crc kubenswrapper[4558]: I0120 17:16:04.359463 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/watcher-be54-account-create-update-jd6tl"] Jan 20 17:16:04 crc kubenswrapper[4558]: I0120 17:16:04.363662 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l4296\" (UniqueName: \"kubernetes.io/projected/61a672e2-963e-4428-bb97-e2177ac10c06-kube-api-access-l4296\") pod \"barbican-5773-account-create-update-qmwj7\" (UID: \"61a672e2-963e-4428-bb97-e2177ac10c06\") " pod="openstack-kuttl-tests/barbican-5773-account-create-update-qmwj7" Jan 20 17:16:04 crc kubenswrapper[4558]: I0120 17:16:04.401239 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/root-account-create-update-psddp"] Jan 20 17:16:04 crc kubenswrapper[4558]: I0120 17:16:04.402735 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-psddp" Jan 20 17:16:04 crc kubenswrapper[4558]: I0120 17:16:04.405290 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mw495\" (UniqueName: \"kubernetes.io/projected/b66dc001-a49e-439f-ad5f-eb6ed96dc37f-kube-api-access-mw495\") pod \"watcher-be54-account-create-update-jd6tl\" (UID: \"b66dc001-a49e-439f-ad5f-eb6ed96dc37f\") " pod="openstack-kuttl-tests/watcher-be54-account-create-update-jd6tl" Jan 20 17:16:04 crc kubenswrapper[4558]: I0120 17:16:04.405351 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b66dc001-a49e-439f-ad5f-eb6ed96dc37f-operator-scripts\") pod \"watcher-be54-account-create-update-jd6tl\" (UID: \"b66dc001-a49e-439f-ad5f-eb6ed96dc37f\") " pod="openstack-kuttl-tests/watcher-be54-account-create-update-jd6tl" Jan 20 17:16:04 crc kubenswrapper[4558]: I0120 17:16:04.411506 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"openstack-mariadb-root-db-secret" Jan 20 17:16:04 crc kubenswrapper[4558]: I0120 17:16:04.436832 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-9jhcc"] Jan 20 17:16:04 crc kubenswrapper[4558]: I0120 17:16:04.461226 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-9jhcc"] Jan 20 17:16:04 crc kubenswrapper[4558]: I0120 17:16:04.492074 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-psddp"] Jan 20 17:16:04 crc kubenswrapper[4558]: I0120 17:16:04.506257 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/rabbitmq-server-0"] Jan 20 17:16:04 crc kubenswrapper[4558]: I0120 17:16:04.507726 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/691cf1c1-d617-489b-b0cd-1364328d6c60-operator-scripts\") pod \"root-account-create-update-psddp\" (UID: \"691cf1c1-d617-489b-b0cd-1364328d6c60\") " pod="openstack-kuttl-tests/root-account-create-update-psddp" Jan 20 17:16:04 crc kubenswrapper[4558]: I0120 17:16:04.507829 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x8w2w\" (UniqueName: \"kubernetes.io/projected/691cf1c1-d617-489b-b0cd-1364328d6c60-kube-api-access-x8w2w\") pod \"root-account-create-update-psddp\" (UID: \"691cf1c1-d617-489b-b0cd-1364328d6c60\") " pod="openstack-kuttl-tests/root-account-create-update-psddp" Jan 20 17:16:04 crc kubenswrapper[4558]: I0120 17:16:04.507864 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mw495\" (UniqueName: \"kubernetes.io/projected/b66dc001-a49e-439f-ad5f-eb6ed96dc37f-kube-api-access-mw495\") pod \"watcher-be54-account-create-update-jd6tl\" (UID: \"b66dc001-a49e-439f-ad5f-eb6ed96dc37f\") " pod="openstack-kuttl-tests/watcher-be54-account-create-update-jd6tl" Jan 20 17:16:04 crc kubenswrapper[4558]: I0120 17:16:04.507911 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b66dc001-a49e-439f-ad5f-eb6ed96dc37f-operator-scripts\") pod \"watcher-be54-account-create-update-jd6tl\" (UID: \"b66dc001-a49e-439f-ad5f-eb6ed96dc37f\") " pod="openstack-kuttl-tests/watcher-be54-account-create-update-jd6tl" Jan 20 17:16:04 crc kubenswrapper[4558]: I0120 17:16:04.509201 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b66dc001-a49e-439f-ad5f-eb6ed96dc37f-operator-scripts\") pod \"watcher-be54-account-create-update-jd6tl\" (UID: \"b66dc001-a49e-439f-ad5f-eb6ed96dc37f\") " pod="openstack-kuttl-tests/watcher-be54-account-create-update-jd6tl" Jan 20 17:16:04 crc kubenswrapper[4558]: I0120 17:16:04.521753 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-d429-account-create-update-4qp4q"] Jan 20 17:16:04 crc kubenswrapper[4558]: I0120 17:16:04.533356 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/glance-d429-account-create-update-4qp4q"] Jan 20 17:16:04 crc kubenswrapper[4558]: I0120 17:16:04.538993 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovn-northd-0"] Jan 20 17:16:04 crc kubenswrapper[4558]: I0120 17:16:04.539281 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ovn-northd-0" podUID="e8370668-8857-4be3-bcaa-bae8d6cdd158" containerName="ovn-northd" containerID="cri-o://a07ebe0308220308f5b0687be7bf7284b0986dfc78e64ccb177087d9a970bb3f" gracePeriod=30 Jan 20 17:16:04 crc kubenswrapper[4558]: I0120 17:16:04.539659 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ovn-northd-0" podUID="e8370668-8857-4be3-bcaa-bae8d6cdd158" containerName="openstack-network-exporter" containerID="cri-o://642cff711dbe4d54cbee09e66959eb32d2d26fed71a8ef14e847e8bed60f8ee8" gracePeriod=30 Jan 20 17:16:04 crc kubenswrapper[4558]: I0120 17:16:04.577832 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="034d0ad2-ef35-4499-97bf-22bb087bda19" path="/var/lib/kubelet/pods/034d0ad2-ef35-4499-97bf-22bb087bda19/volumes" Jan 20 17:16:04 crc kubenswrapper[4558]: I0120 17:16:04.579735 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0bfa16c9-a8d3-46f7-8e6a-ba4c0d8426b6" path="/var/lib/kubelet/pods/0bfa16c9-a8d3-46f7-8e6a-ba4c0d8426b6/volumes" Jan 20 17:16:04 crc kubenswrapper[4558]: I0120 17:16:04.580270 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3b9f240f-b49c-4184-b5f7-6cc6fa26dc13" path="/var/lib/kubelet/pods/3b9f240f-b49c-4184-b5f7-6cc6fa26dc13/volumes" Jan 20 17:16:04 crc kubenswrapper[4558]: I0120 17:16:04.580761 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="581b4670-ceb9-4190-be1d-2ca27c49b472" path="/var/lib/kubelet/pods/581b4670-ceb9-4190-be1d-2ca27c49b472/volumes" Jan 20 17:16:04 crc kubenswrapper[4558]: I0120 17:16:04.610532 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x8w2w\" (UniqueName: \"kubernetes.io/projected/691cf1c1-d617-489b-b0cd-1364328d6c60-kube-api-access-x8w2w\") pod \"root-account-create-update-psddp\" (UID: \"691cf1c1-d617-489b-b0cd-1364328d6c60\") " pod="openstack-kuttl-tests/root-account-create-update-psddp" Jan 20 17:16:04 crc kubenswrapper[4558]: E0120 17:16:04.610802 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cinder-scripts: secret "cinder-scripts" not found Jan 20 17:16:04 crc kubenswrapper[4558]: E0120 17:16:04.610853 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a-scripts podName:f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a nodeName:}" failed. No retries permitted until 2026-01-20 17:16:05.110837483 +0000 UTC m=+2058.871175450 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "scripts" (UniqueName: "kubernetes.io/secret/f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a-scripts") pod "cinder-api-0" (UID: "f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a") : secret "cinder-scripts" not found Jan 20 17:16:04 crc kubenswrapper[4558]: I0120 17:16:04.611042 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/691cf1c1-d617-489b-b0cd-1364328d6c60-operator-scripts\") pod \"root-account-create-update-psddp\" (UID: \"691cf1c1-d617-489b-b0cd-1364328d6c60\") " pod="openstack-kuttl-tests/root-account-create-update-psddp" Jan 20 17:16:04 crc kubenswrapper[4558]: E0120 17:16:04.611263 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cinder-config-data: secret "cinder-config-data" not found Jan 20 17:16:04 crc kubenswrapper[4558]: E0120 17:16:04.611312 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a-config-data podName:f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a nodeName:}" failed. No retries permitted until 2026-01-20 17:16:05.111290986 +0000 UTC m=+2058.871628952 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/secret/f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a-config-data") pod "cinder-api-0" (UID: "f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a") : secret "cinder-config-data" not found Jan 20 17:16:04 crc kubenswrapper[4558]: E0120 17:16:04.611321 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Jan 20 17:16:04 crc kubenswrapper[4558]: E0120 17:16:04.611367 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/fd2edfa4-790f-49d7-9e32-29571e490aaf-config-data podName:fd2edfa4-790f-49d7-9e32-29571e490aaf nodeName:}" failed. No retries permitted until 2026-01-20 17:16:05.111352652 +0000 UTC m=+2058.871690619 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/fd2edfa4-790f-49d7-9e32-29571e490aaf-config-data") pod "rabbitmq-server-0" (UID: "fd2edfa4-790f-49d7-9e32-29571e490aaf") : configmap "rabbitmq-config-data" not found Jan 20 17:16:04 crc kubenswrapper[4558]: E0120 17:16:04.611967 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-cinder-internal-svc: secret "cert-cinder-internal-svc" not found Jan 20 17:16:04 crc kubenswrapper[4558]: E0120 17:16:04.612012 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a-internal-tls-certs podName:f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a nodeName:}" failed. No retries permitted until 2026-01-20 17:16:05.112003786 +0000 UTC m=+2058.872341753 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "internal-tls-certs" (UniqueName: "kubernetes.io/secret/f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a-internal-tls-certs") pod "cinder-api-0" (UID: "f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a") : secret "cert-cinder-internal-svc" not found Jan 20 17:16:04 crc kubenswrapper[4558]: I0120 17:16:04.612783 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/691cf1c1-d617-489b-b0cd-1364328d6c60-operator-scripts\") pod \"root-account-create-update-psddp\" (UID: \"691cf1c1-d617-489b-b0cd-1364328d6c60\") " pod="openstack-kuttl-tests/root-account-create-update-psddp" Jan 20 17:16:04 crc kubenswrapper[4558]: E0120 17:16:04.612850 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/combined-ca-bundle: secret "combined-ca-bundle" not found Jan 20 17:16:04 crc kubenswrapper[4558]: E0120 17:16:04.612877 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a-combined-ca-bundle podName:f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a nodeName:}" failed. No retries permitted until 2026-01-20 17:16:05.112869635 +0000 UTC m=+2058.873207601 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a-combined-ca-bundle") pod "cinder-api-0" (UID: "f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a") : secret "combined-ca-bundle" not found Jan 20 17:16:04 crc kubenswrapper[4558]: E0120 17:16:04.613323 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-cinder-public-svc: secret "cert-cinder-public-svc" not found Jan 20 17:16:04 crc kubenswrapper[4558]: E0120 17:16:04.613355 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a-public-tls-certs podName:f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a nodeName:}" failed. No retries permitted until 2026-01-20 17:16:05.113346491 +0000 UTC m=+2058.873684458 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "public-tls-certs" (UniqueName: "kubernetes.io/secret/f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a-public-tls-certs") pod "cinder-api-0" (UID: "f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a") : secret "cert-cinder-public-svc" not found Jan 20 17:16:04 crc kubenswrapper[4558]: I0120 17:16:04.628665 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mw495\" (UniqueName: \"kubernetes.io/projected/b66dc001-a49e-439f-ad5f-eb6ed96dc37f-kube-api-access-mw495\") pod \"watcher-be54-account-create-update-jd6tl\" (UID: \"b66dc001-a49e-439f-ad5f-eb6ed96dc37f\") " pod="openstack-kuttl-tests/watcher-be54-account-create-update-jd6tl" Jan 20 17:16:04 crc kubenswrapper[4558]: I0120 17:16:04.642515 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x8w2w\" (UniqueName: \"kubernetes.io/projected/691cf1c1-d617-489b-b0cd-1364328d6c60-kube-api-access-x8w2w\") pod \"root-account-create-update-psddp\" (UID: \"691cf1c1-d617-489b-b0cd-1364328d6c60\") " pod="openstack-kuttl-tests/root-account-create-update-psddp" Jan 20 17:16:04 crc kubenswrapper[4558]: I0120 17:16:04.650581 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-5773-account-create-update-qmwj7" Jan 20 17:16:04 crc kubenswrapper[4558]: I0120 17:16:04.655378 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/watcher-be54-account-create-update-jd6tl" Jan 20 17:16:04 crc kubenswrapper[4558]: I0120 17:16:04.742769 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-psddp" Jan 20 17:16:04 crc kubenswrapper[4558]: I0120 17:16:04.748668 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/swift-ring-rebalance-hj5p8"] Jan 20 17:16:04 crc kubenswrapper[4558]: I0120 17:16:04.776214 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/swift-ring-rebalance-hj5p8"] Jan 20 17:16:04 crc kubenswrapper[4558]: I0120 17:16:04.807354 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/neutron-d62f-account-create-update-8xjrt"] Jan 20 17:16:04 crc kubenswrapper[4558]: I0120 17:16:04.808714 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-d62f-account-create-update-8xjrt" Jan 20 17:16:04 crc kubenswrapper[4558]: I0120 17:16:04.817529 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"neutron-db-secret" Jan 20 17:16:04 crc kubenswrapper[4558]: I0120 17:16:04.834098 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-api-542c-account-create-update-fknl6"] Jan 20 17:16:04 crc kubenswrapper[4558]: I0120 17:16:04.847552 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-542c-account-create-update-fknl6" Jan 20 17:16:04 crc kubenswrapper[4558]: I0120 17:16:04.869189 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-d62f-account-create-update-8xjrt"] Jan 20 17:16:04 crc kubenswrapper[4558]: I0120 17:16:04.875809 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-api-db-secret" Jan 20 17:16:04 crc kubenswrapper[4558]: I0120 17:16:04.891750 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-542c-account-create-update-fknl6"] Jan 20 17:16:04 crc kubenswrapper[4558]: I0120 17:16:04.925880 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nqspr\" (UniqueName: \"kubernetes.io/projected/23341c92-6342-4e5f-904c-17fdff2841c5-kube-api-access-nqspr\") pod \"neutron-d62f-account-create-update-8xjrt\" (UID: \"23341c92-6342-4e5f-904c-17fdff2841c5\") " pod="openstack-kuttl-tests/neutron-d62f-account-create-update-8xjrt" Jan 20 17:16:04 crc kubenswrapper[4558]: I0120 17:16:04.926075 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/23341c92-6342-4e5f-904c-17fdff2841c5-operator-scripts\") pod \"neutron-d62f-account-create-update-8xjrt\" (UID: \"23341c92-6342-4e5f-904c-17fdff2841c5\") " pod="openstack-kuttl-tests/neutron-d62f-account-create-update-8xjrt" Jan 20 17:16:04 crc kubenswrapper[4558]: I0120 17:16:04.930941 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/cinder-7601-account-create-update-wx6mf"] Jan 20 17:16:04 crc kubenswrapper[4558]: I0120 17:16:04.932397 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-7601-account-create-update-wx6mf" Jan 20 17:16:04 crc kubenswrapper[4558]: I0120 17:16:04.943873 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cinder-db-secret" Jan 20 17:16:05 crc kubenswrapper[4558]: I0120 17:16:05.013275 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-nb-0"] Jan 20 17:16:05 crc kubenswrapper[4558]: I0120 17:16:05.013656 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ovsdbserver-nb-0" podUID="ef3c342e-679b-4a2e-94df-82adf4200290" containerName="openstack-network-exporter" containerID="cri-o://8e3ee6cc7b998c32fac77b9ad7200546c9a3386eaad38097f2d7e10239440726" gracePeriod=300 Jan 20 17:16:05 crc kubenswrapper[4558]: I0120 17:16:05.047368 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nqspr\" (UniqueName: \"kubernetes.io/projected/23341c92-6342-4e5f-904c-17fdff2841c5-kube-api-access-nqspr\") pod \"neutron-d62f-account-create-update-8xjrt\" (UID: \"23341c92-6342-4e5f-904c-17fdff2841c5\") " pod="openstack-kuttl-tests/neutron-d62f-account-create-update-8xjrt" Jan 20 17:16:05 crc kubenswrapper[4558]: I0120 17:16:05.047539 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4b51c4ea-1df2-42b3-9aa7-d69471c234ed-operator-scripts\") pod \"nova-api-542c-account-create-update-fknl6\" (UID: \"4b51c4ea-1df2-42b3-9aa7-d69471c234ed\") " pod="openstack-kuttl-tests/nova-api-542c-account-create-update-fknl6" Jan 20 17:16:05 crc kubenswrapper[4558]: I0120 17:16:05.047674 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d9khb\" (UniqueName: \"kubernetes.io/projected/5cdda1de-c1bb-45a1-a67b-9fe6e8d699df-kube-api-access-d9khb\") pod \"cinder-7601-account-create-update-wx6mf\" (UID: \"5cdda1de-c1bb-45a1-a67b-9fe6e8d699df\") " pod="openstack-kuttl-tests/cinder-7601-account-create-update-wx6mf" Jan 20 17:16:05 crc kubenswrapper[4558]: I0120 17:16:05.047749 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5cdda1de-c1bb-45a1-a67b-9fe6e8d699df-operator-scripts\") pod \"cinder-7601-account-create-update-wx6mf\" (UID: \"5cdda1de-c1bb-45a1-a67b-9fe6e8d699df\") " pod="openstack-kuttl-tests/cinder-7601-account-create-update-wx6mf" Jan 20 17:16:05 crc kubenswrapper[4558]: I0120 17:16:05.047789 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cwm5h\" (UniqueName: \"kubernetes.io/projected/4b51c4ea-1df2-42b3-9aa7-d69471c234ed-kube-api-access-cwm5h\") pod \"nova-api-542c-account-create-update-fknl6\" (UID: \"4b51c4ea-1df2-42b3-9aa7-d69471c234ed\") " pod="openstack-kuttl-tests/nova-api-542c-account-create-update-fknl6" Jan 20 17:16:05 crc kubenswrapper[4558]: I0120 17:16:05.047854 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/23341c92-6342-4e5f-904c-17fdff2841c5-operator-scripts\") pod \"neutron-d62f-account-create-update-8xjrt\" (UID: \"23341c92-6342-4e5f-904c-17fdff2841c5\") " pod="openstack-kuttl-tests/neutron-d62f-account-create-update-8xjrt" Jan 20 17:16:05 crc kubenswrapper[4558]: I0120 17:16:05.053857 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/23341c92-6342-4e5f-904c-17fdff2841c5-operator-scripts\") pod \"neutron-d62f-account-create-update-8xjrt\" (UID: \"23341c92-6342-4e5f-904c-17fdff2841c5\") " pod="openstack-kuttl-tests/neutron-d62f-account-create-update-8xjrt" Jan 20 17:16:05 crc kubenswrapper[4558]: I0120 17:16:05.089493 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nqspr\" (UniqueName: \"kubernetes.io/projected/23341c92-6342-4e5f-904c-17fdff2841c5-kube-api-access-nqspr\") pod \"neutron-d62f-account-create-update-8xjrt\" (UID: \"23341c92-6342-4e5f-904c-17fdff2841c5\") " pod="openstack-kuttl-tests/neutron-d62f-account-create-update-8xjrt" Jan 20 17:16:05 crc kubenswrapper[4558]: I0120 17:16:05.130968 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/watcher-db-sync-z8lmb"] Jan 20 17:16:05 crc kubenswrapper[4558]: E0120 17:16:05.153320 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="a07ebe0308220308f5b0687be7bf7284b0986dfc78e64ccb177087d9a970bb3f" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Jan 20 17:16:05 crc kubenswrapper[4558]: I0120 17:16:05.155777 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4b51c4ea-1df2-42b3-9aa7-d69471c234ed-operator-scripts\") pod \"nova-api-542c-account-create-update-fknl6\" (UID: \"4b51c4ea-1df2-42b3-9aa7-d69471c234ed\") " pod="openstack-kuttl-tests/nova-api-542c-account-create-update-fknl6" Jan 20 17:16:05 crc kubenswrapper[4558]: I0120 17:16:05.155942 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d9khb\" (UniqueName: \"kubernetes.io/projected/5cdda1de-c1bb-45a1-a67b-9fe6e8d699df-kube-api-access-d9khb\") pod \"cinder-7601-account-create-update-wx6mf\" (UID: \"5cdda1de-c1bb-45a1-a67b-9fe6e8d699df\") " pod="openstack-kuttl-tests/cinder-7601-account-create-update-wx6mf" Jan 20 17:16:05 crc kubenswrapper[4558]: I0120 17:16:05.155977 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5cdda1de-c1bb-45a1-a67b-9fe6e8d699df-operator-scripts\") pod \"cinder-7601-account-create-update-wx6mf\" (UID: \"5cdda1de-c1bb-45a1-a67b-9fe6e8d699df\") " pod="openstack-kuttl-tests/cinder-7601-account-create-update-wx6mf" Jan 20 17:16:05 crc kubenswrapper[4558]: I0120 17:16:05.156008 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cwm5h\" (UniqueName: \"kubernetes.io/projected/4b51c4ea-1df2-42b3-9aa7-d69471c234ed-kube-api-access-cwm5h\") pod \"nova-api-542c-account-create-update-fknl6\" (UID: \"4b51c4ea-1df2-42b3-9aa7-d69471c234ed\") " pod="openstack-kuttl-tests/nova-api-542c-account-create-update-fknl6" Jan 20 17:16:05 crc kubenswrapper[4558]: E0120 17:16:05.156278 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-cinder-public-svc: secret "cert-cinder-public-svc" not found Jan 20 17:16:05 crc kubenswrapper[4558]: E0120 17:16:05.156350 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a-public-tls-certs podName:f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a nodeName:}" failed. No retries permitted until 2026-01-20 17:16:06.156323611 +0000 UTC m=+2059.916661577 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "public-tls-certs" (UniqueName: "kubernetes.io/secret/f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a-public-tls-certs") pod "cinder-api-0" (UID: "f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a") : secret "cert-cinder-public-svc" not found Jan 20 17:16:05 crc kubenswrapper[4558]: E0120 17:16:05.158518 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Jan 20 17:16:05 crc kubenswrapper[4558]: E0120 17:16:05.158579 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/fd2edfa4-790f-49d7-9e32-29571e490aaf-config-data podName:fd2edfa4-790f-49d7-9e32-29571e490aaf nodeName:}" failed. No retries permitted until 2026-01-20 17:16:06.15856166 +0000 UTC m=+2059.918899626 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/fd2edfa4-790f-49d7-9e32-29571e490aaf-config-data") pod "rabbitmq-server-0" (UID: "fd2edfa4-790f-49d7-9e32-29571e490aaf") : configmap "rabbitmq-config-data" not found Jan 20 17:16:05 crc kubenswrapper[4558]: E0120 17:16:05.158784 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/combined-ca-bundle: secret "combined-ca-bundle" not found Jan 20 17:16:05 crc kubenswrapper[4558]: E0120 17:16:05.158819 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a-combined-ca-bundle podName:f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a nodeName:}" failed. No retries permitted until 2026-01-20 17:16:06.158810688 +0000 UTC m=+2059.919148654 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a-combined-ca-bundle") pod "cinder-api-0" (UID: "f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a") : secret "combined-ca-bundle" not found Jan 20 17:16:05 crc kubenswrapper[4558]: E0120 17:16:05.159188 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cinder-config-data: secret "cinder-config-data" not found Jan 20 17:16:05 crc kubenswrapper[4558]: E0120 17:16:05.159254 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a-config-data podName:f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a nodeName:}" failed. No retries permitted until 2026-01-20 17:16:06.15923764 +0000 UTC m=+2059.919575608 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/secret/f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a-config-data") pod "cinder-api-0" (UID: "f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a") : secret "cinder-config-data" not found Jan 20 17:16:05 crc kubenswrapper[4558]: I0120 17:16:05.159380 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5cdda1de-c1bb-45a1-a67b-9fe6e8d699df-operator-scripts\") pod \"cinder-7601-account-create-update-wx6mf\" (UID: \"5cdda1de-c1bb-45a1-a67b-9fe6e8d699df\") " pod="openstack-kuttl-tests/cinder-7601-account-create-update-wx6mf" Jan 20 17:16:05 crc kubenswrapper[4558]: I0120 17:16:05.159404 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4b51c4ea-1df2-42b3-9aa7-d69471c234ed-operator-scripts\") pod \"nova-api-542c-account-create-update-fknl6\" (UID: \"4b51c4ea-1df2-42b3-9aa7-d69471c234ed\") " pod="openstack-kuttl-tests/nova-api-542c-account-create-update-fknl6" Jan 20 17:16:05 crc kubenswrapper[4558]: E0120 17:16:05.159487 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-cinder-internal-svc: secret "cert-cinder-internal-svc" not found Jan 20 17:16:05 crc kubenswrapper[4558]: E0120 17:16:05.159512 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a-internal-tls-certs podName:f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a nodeName:}" failed. No retries permitted until 2026-01-20 17:16:06.159504803 +0000 UTC m=+2059.919842770 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "internal-tls-certs" (UniqueName: "kubernetes.io/secret/f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a-internal-tls-certs") pod "cinder-api-0" (UID: "f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a") : secret "cert-cinder-internal-svc" not found Jan 20 17:16:05 crc kubenswrapper[4558]: E0120 17:16:05.164245 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cinder-scripts: secret "cinder-scripts" not found Jan 20 17:16:05 crc kubenswrapper[4558]: E0120 17:16:05.164306 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a-scripts podName:f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a nodeName:}" failed. No retries permitted until 2026-01-20 17:16:06.164288538 +0000 UTC m=+2059.924626506 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "scripts" (UniqueName: "kubernetes.io/secret/f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a-scripts") pod "cinder-api-0" (UID: "f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a") : secret "cinder-scripts" not found Jan 20 17:16:05 crc kubenswrapper[4558]: I0120 17:16:05.169422 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/watcher-db-sync-z8lmb"] Jan 20 17:16:05 crc kubenswrapper[4558]: I0120 17:16:05.171769 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-7601-account-create-update-wx6mf"] Jan 20 17:16:05 crc kubenswrapper[4558]: I0120 17:16:05.231351 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cwm5h\" (UniqueName: \"kubernetes.io/projected/4b51c4ea-1df2-42b3-9aa7-d69471c234ed-kube-api-access-cwm5h\") pod \"nova-api-542c-account-create-update-fknl6\" (UID: \"4b51c4ea-1df2-42b3-9aa7-d69471c234ed\") " pod="openstack-kuttl-tests/nova-api-542c-account-create-update-fknl6" Jan 20 17:16:05 crc kubenswrapper[4558]: E0120 17:16:05.257584 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="a07ebe0308220308f5b0687be7bf7284b0986dfc78e64ccb177087d9a970bb3f" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Jan 20 17:16:05 crc kubenswrapper[4558]: E0120 17:16:05.303506 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="a07ebe0308220308f5b0687be7bf7284b0986dfc78e64ccb177087d9a970bb3f" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Jan 20 17:16:05 crc kubenswrapper[4558]: E0120 17:16:05.303560 4558 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack-kuttl-tests/ovn-northd-0" podUID="e8370668-8857-4be3-bcaa-bae8d6cdd158" containerName="ovn-northd" Jan 20 17:16:05 crc kubenswrapper[4558]: I0120 17:16:05.309002 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d9khb\" (UniqueName: \"kubernetes.io/projected/5cdda1de-c1bb-45a1-a67b-9fe6e8d699df-kube-api-access-d9khb\") pod \"cinder-7601-account-create-update-wx6mf\" (UID: \"5cdda1de-c1bb-45a1-a67b-9fe6e8d699df\") " pod="openstack-kuttl-tests/cinder-7601-account-create-update-wx6mf" Jan 20 17:16:05 crc kubenswrapper[4558]: I0120 17:16:05.341285 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-542c-account-create-update-szpdm"] Jan 20 17:16:05 crc kubenswrapper[4558]: E0120 17:16:05.367464 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/combined-ca-bundle: secret "combined-ca-bundle" not found Jan 20 17:16:05 crc kubenswrapper[4558]: E0120 17:16:05.367520 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ef3c342e-679b-4a2e-94df-82adf4200290-combined-ca-bundle podName:ef3c342e-679b-4a2e-94df-82adf4200290 nodeName:}" failed. No retries permitted until 2026-01-20 17:16:07.367508022 +0000 UTC m=+2061.127845989 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/ef3c342e-679b-4a2e-94df-82adf4200290-combined-ca-bundle") pod "ovsdbserver-nb-0" (UID: "ef3c342e-679b-4a2e-94df-82adf4200290") : secret "combined-ca-bundle" not found Jan 20 17:16:05 crc kubenswrapper[4558]: I0120 17:16:05.373230 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-7601-account-create-update-9h885"] Jan 20 17:16:05 crc kubenswrapper[4558]: I0120 17:16:05.381296 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-d62f-account-create-update-8xjrt" Jan 20 17:16:05 crc kubenswrapper[4558]: I0120 17:16:05.416217 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-db-sync-6r4hq"] Jan 20 17:16:05 crc kubenswrapper[4558]: I0120 17:16:05.432624 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-542c-account-create-update-fknl6" Jan 20 17:16:05 crc kubenswrapper[4558]: I0120 17:16:05.474432 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ovsdbserver-nb-0" podUID="ef3c342e-679b-4a2e-94df-82adf4200290" containerName="ovsdbserver-nb" containerID="cri-o://33152c6067fbcddb0b361284c671bb49a76a4cb01da62629d12aee5d3c00f3dc" gracePeriod=300 Jan 20 17:16:05 crc kubenswrapper[4558]: I0120 17:16:05.475292 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-7601-account-create-update-wx6mf" Jan 20 17:16:05 crc kubenswrapper[4558]: I0120 17:16:05.511223 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/placement-e79a-account-create-update-b7lf6"] Jan 20 17:16:05 crc kubenswrapper[4558]: I0120 17:16:05.602614 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-d62f-account-create-update-pmq2x"] Jan 20 17:16:05 crc kubenswrapper[4558]: I0120 17:16:05.656055 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/placement-e79a-account-create-update-b7lf6"] Jan 20 17:16:05 crc kubenswrapper[4558]: I0120 17:16:05.675113 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-api-542c-account-create-update-szpdm"] Jan 20 17:16:05 crc kubenswrapper[4558]: I0120 17:16:05.688042 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-db-sync-skf6m"] Jan 20 17:16:05 crc kubenswrapper[4558]: I0120 17:16:05.700916 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/glance-db-sync-6r4hq"] Jan 20 17:16:05 crc kubenswrapper[4558]: I0120 17:16:05.713607 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/neutron-d62f-account-create-update-pmq2x"] Jan 20 17:16:05 crc kubenswrapper[4558]: E0120 17:16:05.720335 4558 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 20 17:16:05 crc kubenswrapper[4558]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[/bin/sh -c #!/bin/bash Jan 20 17:16:05 crc kubenswrapper[4558]: Jan 20 17:16:05 crc kubenswrapper[4558]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 20 17:16:05 crc kubenswrapper[4558]: Jan 20 17:16:05 crc kubenswrapper[4558]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 20 17:16:05 crc kubenswrapper[4558]: Jan 20 17:16:05 crc kubenswrapper[4558]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 20 17:16:05 crc kubenswrapper[4558]: Jan 20 17:16:05 crc kubenswrapper[4558]: if [ -n "barbican" ]; then Jan 20 17:16:05 crc kubenswrapper[4558]: GRANT_DATABASE="barbican" Jan 20 17:16:05 crc kubenswrapper[4558]: else Jan 20 17:16:05 crc kubenswrapper[4558]: GRANT_DATABASE="*" Jan 20 17:16:05 crc kubenswrapper[4558]: fi Jan 20 17:16:05 crc kubenswrapper[4558]: Jan 20 17:16:05 crc kubenswrapper[4558]: # going for maximum compatibility here: Jan 20 17:16:05 crc kubenswrapper[4558]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 20 17:16:05 crc kubenswrapper[4558]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 20 17:16:05 crc kubenswrapper[4558]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 20 17:16:05 crc kubenswrapper[4558]: # support updates Jan 20 17:16:05 crc kubenswrapper[4558]: Jan 20 17:16:05 crc kubenswrapper[4558]: $MYSQL_CMD < logger="UnhandledError" Jan 20 17:16:05 crc kubenswrapper[4558]: E0120 17:16:05.721755 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"barbican-db-secret\\\" not found\"" pod="openstack-kuttl-tests/barbican-5773-account-create-update-qmwj7" podUID="61a672e2-963e-4428-bb97-e2177ac10c06" Jan 20 17:16:05 crc kubenswrapper[4558]: I0120 17:16:05.727418 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/cinder-7601-account-create-update-9h885"] Jan 20 17:16:05 crc kubenswrapper[4558]: I0120 17:16:05.738862 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/barbican-db-sync-skf6m"] Jan 20 17:16:05 crc kubenswrapper[4558]: I0120 17:16:05.744425 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell0-531b-account-create-update-994sx"] Jan 20 17:16:05 crc kubenswrapper[4558]: I0120 17:16:05.756407 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovsdbserver-nb-0_ef3c342e-679b-4a2e-94df-82adf4200290/ovsdbserver-nb/0.log" Jan 20 17:16:05 crc kubenswrapper[4558]: I0120 17:16:05.756458 4558 generic.go:334] "Generic (PLEG): container finished" podID="ef3c342e-679b-4a2e-94df-82adf4200290" containerID="8e3ee6cc7b998c32fac77b9ad7200546c9a3386eaad38097f2d7e10239440726" exitCode=2 Jan 20 17:16:05 crc kubenswrapper[4558]: I0120 17:16:05.756724 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-nb-0" event={"ID":"ef3c342e-679b-4a2e-94df-82adf4200290","Type":"ContainerDied","Data":"8e3ee6cc7b998c32fac77b9ad7200546c9a3386eaad38097f2d7e10239440726"} Jan 20 17:16:05 crc kubenswrapper[4558]: I0120 17:16:05.756755 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-nb-0" event={"ID":"ef3c342e-679b-4a2e-94df-82adf4200290","Type":"ContainerDied","Data":"33152c6067fbcddb0b361284c671bb49a76a4cb01da62629d12aee5d3c00f3dc"} Jan 20 17:16:05 crc kubenswrapper[4558]: I0120 17:16:05.756474 4558 generic.go:334] "Generic (PLEG): container finished" podID="ef3c342e-679b-4a2e-94df-82adf4200290" containerID="33152c6067fbcddb0b361284c671bb49a76a4cb01da62629d12aee5d3c00f3dc" exitCode=143 Jan 20 17:16:05 crc kubenswrapper[4558]: I0120 17:16:05.764810 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-5773-account-create-update-qmwj7" event={"ID":"61a672e2-963e-4428-bb97-e2177ac10c06","Type":"ContainerStarted","Data":"7d9db7f66647291925c63ae526cfdcf84b09a6cff6065c71778661a618565484"} Jan 20 17:16:05 crc kubenswrapper[4558]: I0120 17:16:05.766359 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-sb-0"] Jan 20 17:16:05 crc kubenswrapper[4558]: I0120 17:16:05.766674 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ovsdbserver-sb-0" podUID="8635d21f-4563-42ce-a3ab-4c67653d4dee" containerName="openstack-network-exporter" containerID="cri-o://4d5b12f2c407a5d33bf76ac52ac15d916d0ebbaa277f7ad8812327a1b03f357f" gracePeriod=300 Jan 20 17:16:05 crc kubenswrapper[4558]: E0120 17:16:05.772256 4558 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 20 17:16:05 crc kubenswrapper[4558]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[/bin/sh -c #!/bin/bash Jan 20 17:16:05 crc kubenswrapper[4558]: Jan 20 17:16:05 crc kubenswrapper[4558]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 20 17:16:05 crc kubenswrapper[4558]: Jan 20 17:16:05 crc kubenswrapper[4558]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 20 17:16:05 crc kubenswrapper[4558]: Jan 20 17:16:05 crc kubenswrapper[4558]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 20 17:16:05 crc kubenswrapper[4558]: Jan 20 17:16:05 crc kubenswrapper[4558]: if [ -n "barbican" ]; then Jan 20 17:16:05 crc kubenswrapper[4558]: GRANT_DATABASE="barbican" Jan 20 17:16:05 crc kubenswrapper[4558]: else Jan 20 17:16:05 crc kubenswrapper[4558]: GRANT_DATABASE="*" Jan 20 17:16:05 crc kubenswrapper[4558]: fi Jan 20 17:16:05 crc kubenswrapper[4558]: Jan 20 17:16:05 crc kubenswrapper[4558]: # going for maximum compatibility here: Jan 20 17:16:05 crc kubenswrapper[4558]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 20 17:16:05 crc kubenswrapper[4558]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 20 17:16:05 crc kubenswrapper[4558]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 20 17:16:05 crc kubenswrapper[4558]: # support updates Jan 20 17:16:05 crc kubenswrapper[4558]: Jan 20 17:16:05 crc kubenswrapper[4558]: $MYSQL_CMD < logger="UnhandledError" Jan 20 17:16:05 crc kubenswrapper[4558]: E0120 17:16:05.773772 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"barbican-db-secret\\\" not found\"" pod="openstack-kuttl-tests/barbican-5773-account-create-update-qmwj7" podUID="61a672e2-963e-4428-bb97-e2177ac10c06" Jan 20 17:16:05 crc kubenswrapper[4558]: I0120 17:16:05.789282 4558 generic.go:334] "Generic (PLEG): container finished" podID="e8370668-8857-4be3-bcaa-bae8d6cdd158" containerID="642cff711dbe4d54cbee09e66959eb32d2d26fed71a8ef14e847e8bed60f8ee8" exitCode=2 Jan 20 17:16:05 crc kubenswrapper[4558]: I0120 17:16:05.789321 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-northd-0" event={"ID":"e8370668-8857-4be3-bcaa-bae8d6cdd158","Type":"ContainerDied","Data":"642cff711dbe4d54cbee09e66959eb32d2d26fed71a8ef14e847e8bed60f8ee8"} Jan 20 17:16:05 crc kubenswrapper[4558]: I0120 17:16:05.799175 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell0-531b-account-create-update-994sx"] Jan 20 17:16:05 crc kubenswrapper[4558]: I0120 17:16:05.814370 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/swift-storage-0"] Jan 20 17:16:05 crc kubenswrapper[4558]: I0120 17:16:05.815158 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5" containerName="account-server" containerID="cri-o://d34ab3fc8aadd821e92e2bf92349870b0e59665705b0c279da24491935dbfa40" gracePeriod=30 Jan 20 17:16:05 crc kubenswrapper[4558]: I0120 17:16:05.818469 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5" containerName="swift-recon-cron" containerID="cri-o://99c4bacb5b9768a4caf100f0cb1002d9cece8b806e2823da791412ee816ffda8" gracePeriod=30 Jan 20 17:16:05 crc kubenswrapper[4558]: I0120 17:16:05.818537 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5" containerName="rsync" containerID="cri-o://f85ec25adaf01df7a7853545852faa186e28a1806e5769092b47b3631e9f6a39" gracePeriod=30 Jan 20 17:16:05 crc kubenswrapper[4558]: I0120 17:16:05.818577 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5" containerName="object-expirer" containerID="cri-o://a05f333c41b9d8ef7c02ce57e5a66f44cf679c3dd3e8cbc037a0abf4cb9c1076" gracePeriod=30 Jan 20 17:16:05 crc kubenswrapper[4558]: I0120 17:16:05.818611 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5" containerName="object-updater" containerID="cri-o://351e496453cf020b285bf45cc107bc71f86bd776035bd828428d72e1c7062529" gracePeriod=30 Jan 20 17:16:05 crc kubenswrapper[4558]: I0120 17:16:05.818649 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5" containerName="object-auditor" containerID="cri-o://e9011c98afeb74bd61067467d261960e71d5f4ce02439018a751b87779a9a042" gracePeriod=30 Jan 20 17:16:05 crc kubenswrapper[4558]: I0120 17:16:05.818680 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5" containerName="object-replicator" containerID="cri-o://0ffad205c7e0b9a3e7a3ed7b7494d0c934e33af468fd39ec2025c4bc685f9b2a" gracePeriod=30 Jan 20 17:16:05 crc kubenswrapper[4558]: I0120 17:16:05.818709 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5" containerName="object-server" containerID="cri-o://e8487e2e852bfbcd349f6ab70c8f1c9163850c213b76cd2ed1af8305cfe8bbf6" gracePeriod=30 Jan 20 17:16:05 crc kubenswrapper[4558]: I0120 17:16:05.818736 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5" containerName="container-updater" containerID="cri-o://b448c6813dc21e1609badba9c414f16cfa90d4bc879f828fc03dc5e2d8c5c01d" gracePeriod=30 Jan 20 17:16:05 crc kubenswrapper[4558]: I0120 17:16:05.818768 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5" containerName="container-auditor" containerID="cri-o://f4843ed0e8d1373bb2683da97ab873974d82c40ca96c66ac0e816208a3870873" gracePeriod=30 Jan 20 17:16:05 crc kubenswrapper[4558]: I0120 17:16:05.818796 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5" containerName="container-replicator" containerID="cri-o://42f3e6080a5aaa7c6b331d0120550bd3df0ded5cc2d89acdcce42e051c81d082" gracePeriod=30 Jan 20 17:16:05 crc kubenswrapper[4558]: I0120 17:16:05.818830 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5" containerName="container-server" containerID="cri-o://0264294b8782deaf18a7c21e66a3cd60ef8bc25c146077c5f8dd36370f4b9f31" gracePeriod=30 Jan 20 17:16:05 crc kubenswrapper[4558]: I0120 17:16:05.818866 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5" containerName="account-replicator" containerID="cri-o://287de52d7a1cbdfd30b38ebc5ce6836c6e7462c638fb7b5cb4514ff2fd994ca5" gracePeriod=30 Jan 20 17:16:05 crc kubenswrapper[4558]: I0120 17:16:05.818862 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5" containerName="account-auditor" containerID="cri-o://a3d60956ff7af8b2370d1374fe03e956eb5d0ac246c55b421bb4276908799a3f" gracePeriod=30 Jan 20 17:16:05 crc kubenswrapper[4558]: I0120 17:16:05.818983 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5" containerName="account-reaper" containerID="cri-o://3042c88a8b0acc604598545abaf566b25ac21482df62b0a3a116f13f8566a2af" gracePeriod=30 Jan 20 17:16:05 crc kubenswrapper[4558]: I0120 17:16:05.874845 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-aad3-account-create-update-tbsnm"] Jan 20 17:16:05 crc kubenswrapper[4558]: I0120 17:16:05.899659 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-db-sync-xmmz7"] Jan 20 17:16:05 crc kubenswrapper[4558]: I0120 17:16:05.909153 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell1-aad3-account-create-update-tbsnm"] Jan 20 17:16:05 crc kubenswrapper[4558]: I0120 17:16:05.922094 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/cinder-db-sync-xmmz7"] Jan 20 17:16:05 crc kubenswrapper[4558]: I0120 17:16:05.931242 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/placement-db-sync-c95c5"] Jan 20 17:16:05 crc kubenswrapper[4558]: I0120 17:16:05.940235 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/placement-db-sync-c95c5"] Jan 20 17:16:05 crc kubenswrapper[4558]: I0120 17:16:05.950228 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-db-sync-5wdp7"] Jan 20 17:16:05 crc kubenswrapper[4558]: I0120 17:16:05.983544 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/neutron-db-sync-5wdp7"] Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.013815 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell0-cell-mapping-qhsxm"] Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.030872 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ovsdbserver-sb-0" podUID="8635d21f-4563-42ce-a3ab-4c67653d4dee" containerName="ovsdbserver-sb" containerID="cri-o://86bcdd3af3dc36b18b179d5cfb21ad164a541e5d79260e3b531c6ec4c20e63e1" gracePeriod=300 Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.046741 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell0-cell-mapping-qhsxm"] Jan 20 17:16:06 crc kubenswrapper[4558]: E0120 17:16:06.064139 4558 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 20 17:16:06 crc kubenswrapper[4558]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[/bin/sh -c #!/bin/bash Jan 20 17:16:06 crc kubenswrapper[4558]: Jan 20 17:16:06 crc kubenswrapper[4558]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 20 17:16:06 crc kubenswrapper[4558]: Jan 20 17:16:06 crc kubenswrapper[4558]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 20 17:16:06 crc kubenswrapper[4558]: Jan 20 17:16:06 crc kubenswrapper[4558]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 20 17:16:06 crc kubenswrapper[4558]: Jan 20 17:16:06 crc kubenswrapper[4558]: if [ -n "watcher" ]; then Jan 20 17:16:06 crc kubenswrapper[4558]: GRANT_DATABASE="watcher" Jan 20 17:16:06 crc kubenswrapper[4558]: else Jan 20 17:16:06 crc kubenswrapper[4558]: GRANT_DATABASE="*" Jan 20 17:16:06 crc kubenswrapper[4558]: fi Jan 20 17:16:06 crc kubenswrapper[4558]: Jan 20 17:16:06 crc kubenswrapper[4558]: # going for maximum compatibility here: Jan 20 17:16:06 crc kubenswrapper[4558]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 20 17:16:06 crc kubenswrapper[4558]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 20 17:16:06 crc kubenswrapper[4558]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 20 17:16:06 crc kubenswrapper[4558]: # support updates Jan 20 17:16:06 crc kubenswrapper[4558]: Jan 20 17:16:06 crc kubenswrapper[4558]: $MYSQL_CMD < logger="UnhandledError" Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.069671 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-cell-mapping-8qtw4"] Jan 20 17:16:06 crc kubenswrapper[4558]: E0120 17:16:06.071953 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"watcher-db-secret\\\" not found\"" pod="openstack-kuttl-tests/watcher-be54-account-create-update-jd6tl" podUID="b66dc001-a49e-439f-ad5f-eb6ed96dc37f" Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.077654 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-ndwh7"] Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.081121 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-ndwh7" Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.089002 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell1-cell-mapping-8qtw4"] Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.114198 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-ndwh7"] Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.124782 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/watcher-decision-engine-0"] Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.124999 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/watcher-decision-engine-0" podUID="2510d24d-b593-4196-92bc-ddde09cc7c15" containerName="watcher-decision-engine" containerID="cri-o://f630bf27700ad23f76cb69b14ae9eed65e06889873b8b21234996b7a87609328" gracePeriod=30 Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.140491 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.140763 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-internal-api-0" podUID="5588d9e2-2f89-4abb-94de-76b5791582a5" containerName="glance-log" containerID="cri-o://f7e6285aee41adbd48413910cd7a5c3aa34306a43105d42dd032ba571f9b7d5c" gracePeriod=30 Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.140910 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-internal-api-0" podUID="5588d9e2-2f89-4abb-94de-76b5791582a5" containerName="glance-httpd" containerID="cri-o://674fe7739106de999aea93678b41abfb64307a7d4593b865b7537759b3ee32a7" gracePeriod=30 Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.149127 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.149337 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-external-api-0" podUID="a9ada454-f6fa-4ea4-b386-3ddbcd37f233" containerName="glance-log" containerID="cri-o://5259e35b7300917470585b85e57577b5c54cf041fcc22c4114363695b0632f21" gracePeriod=30 Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.149770 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-external-api-0" podUID="a9ada454-f6fa-4ea4-b386-3ddbcd37f233" containerName="glance-httpd" containerID="cri-o://dbed1dfb06d84c003e0890032b6a972606b43286eee0111f21ef16dfa9e94a1d" gracePeriod=30 Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.175621 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-5773-account-create-update-qmwj7"] Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.214796 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.215888 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/cinder-scheduler-0" podUID="3072fee5-c73d-407d-80c7-c376628ec545" containerName="cinder-scheduler" containerID="cri-o://fa635aef0a0203fdb13831ee491f982a0ac51b1bc7f7d5c51955065fd5631a37" gracePeriod=30 Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.216358 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/cinder-scheduler-0" podUID="3072fee5-c73d-407d-80c7-c376628ec545" containerName="probe" containerID="cri-o://599766d523b289496126b3e65bad734ac36d67a7eee51faf425f5873eea60397" gracePeriod=30 Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.220816 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ef393209-80db-43c7-a749-3ec7826f2123-config\") pod \"dnsmasq-dnsmasq-84b9f45d47-ndwh7\" (UID: \"ef393209-80db-43c7-a749-3ec7826f2123\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-ndwh7" Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.220881 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w6kpd\" (UniqueName: \"kubernetes.io/projected/ef393209-80db-43c7-a749-3ec7826f2123-kube-api-access-w6kpd\") pod \"dnsmasq-dnsmasq-84b9f45d47-ndwh7\" (UID: \"ef393209-80db-43c7-a749-3ec7826f2123\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-ndwh7" Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.221088 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/ef393209-80db-43c7-a749-3ec7826f2123-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-84b9f45d47-ndwh7\" (UID: \"ef393209-80db-43c7-a749-3ec7826f2123\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-ndwh7" Jan 20 17:16:06 crc kubenswrapper[4558]: E0120 17:16:06.221251 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cinder-config-data: secret "cinder-config-data" not found Jan 20 17:16:06 crc kubenswrapper[4558]: E0120 17:16:06.221312 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a-config-data podName:f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a nodeName:}" failed. No retries permitted until 2026-01-20 17:16:08.221294833 +0000 UTC m=+2061.981632799 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/secret/f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a-config-data") pod "cinder-api-0" (UID: "f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a") : secret "cinder-config-data" not found Jan 20 17:16:06 crc kubenswrapper[4558]: E0120 17:16:06.221357 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cinder-scripts: secret "cinder-scripts" not found Jan 20 17:16:06 crc kubenswrapper[4558]: E0120 17:16:06.221378 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a-scripts podName:f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a nodeName:}" failed. No retries permitted until 2026-01-20 17:16:08.221369322 +0000 UTC m=+2061.981707289 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "scripts" (UniqueName: "kubernetes.io/secret/f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a-scripts") pod "cinder-api-0" (UID: "f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a") : secret "cinder-scripts" not found Jan 20 17:16:06 crc kubenswrapper[4558]: E0120 17:16:06.221415 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Jan 20 17:16:06 crc kubenswrapper[4558]: E0120 17:16:06.221433 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/fd2edfa4-790f-49d7-9e32-29571e490aaf-config-data podName:fd2edfa4-790f-49d7-9e32-29571e490aaf nodeName:}" failed. No retries permitted until 2026-01-20 17:16:08.221428053 +0000 UTC m=+2061.981766021 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/fd2edfa4-790f-49d7-9e32-29571e490aaf-config-data") pod "rabbitmq-server-0" (UID: "fd2edfa4-790f-49d7-9e32-29571e490aaf") : configmap "rabbitmq-config-data" not found Jan 20 17:16:06 crc kubenswrapper[4558]: E0120 17:16:06.221483 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/combined-ca-bundle: secret "combined-ca-bundle" not found Jan 20 17:16:06 crc kubenswrapper[4558]: E0120 17:16:06.221500 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a-combined-ca-bundle podName:f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a nodeName:}" failed. No retries permitted until 2026-01-20 17:16:08.22149525 +0000 UTC m=+2061.981833216 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a-combined-ca-bundle") pod "cinder-api-0" (UID: "f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a") : secret "combined-ca-bundle" not found Jan 20 17:16:06 crc kubenswrapper[4558]: E0120 17:16:06.221528 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-cinder-public-svc: secret "cert-cinder-public-svc" not found Jan 20 17:16:06 crc kubenswrapper[4558]: E0120 17:16:06.221546 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a-public-tls-certs podName:f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a nodeName:}" failed. No retries permitted until 2026-01-20 17:16:08.221541376 +0000 UTC m=+2061.981879343 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "public-tls-certs" (UniqueName: "kubernetes.io/secret/f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a-public-tls-certs") pod "cinder-api-0" (UID: "f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a") : secret "cert-cinder-public-svc" not found Jan 20 17:16:06 crc kubenswrapper[4558]: E0120 17:16:06.221587 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-cinder-internal-svc: secret "cert-cinder-internal-svc" not found Jan 20 17:16:06 crc kubenswrapper[4558]: E0120 17:16:06.221602 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a-internal-tls-certs podName:f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a nodeName:}" failed. No retries permitted until 2026-01-20 17:16:08.221597912 +0000 UTC m=+2061.981935880 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "internal-tls-certs" (UniqueName: "kubernetes.io/secret/f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a-internal-tls-certs") pod "cinder-api-0" (UID: "f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a") : secret "cert-cinder-internal-svc" not found Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.241604 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/watcher-api-0"] Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.241858 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/watcher-api-0" podUID="0a49fae8-fd33-4a50-b967-e8e8cc48731a" containerName="watcher-api-log" containerID="cri-o://8d223b6be945d7320575fde68951e2dcf11d7174379c849a669c1180f13b9071" gracePeriod=30 Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.242303 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/watcher-api-0" podUID="0a49fae8-fd33-4a50-b967-e8e8cc48731a" containerName="watcher-api" containerID="cri-o://f92d43e68f181ffa640787e9d41e5040d775ba561a1cacb4acbc03876430f695" gracePeriod=30 Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.259382 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/watcher-applier-0"] Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.259548 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/watcher-applier-0" podUID="30b9f169-eef8-4a30-b543-83dda735da70" containerName="watcher-applier" containerID="cri-o://146969194ab0a40e402393f07d4594aaad069b32a10f5e882b7b5d9c1bed6628" gracePeriod=30 Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.279231 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/watcher-db-create-4k5sd"] Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.297423 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-db-create-4tdmg"] Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.302594 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/watcher-db-create-4k5sd"] Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.314572 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/glance-db-create-4tdmg"] Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.324303 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/ef393209-80db-43c7-a749-3ec7826f2123-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-84b9f45d47-ndwh7\" (UID: \"ef393209-80db-43c7-a749-3ec7826f2123\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-ndwh7" Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.324400 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ef393209-80db-43c7-a749-3ec7826f2123-config\") pod \"dnsmasq-dnsmasq-84b9f45d47-ndwh7\" (UID: \"ef393209-80db-43c7-a749-3ec7826f2123\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-ndwh7" Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.324459 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w6kpd\" (UniqueName: \"kubernetes.io/projected/ef393209-80db-43c7-a749-3ec7826f2123-kube-api-access-w6kpd\") pod \"dnsmasq-dnsmasq-84b9f45d47-ndwh7\" (UID: \"ef393209-80db-43c7-a749-3ec7826f2123\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-ndwh7" Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.326152 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/ef393209-80db-43c7-a749-3ec7826f2123-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-84b9f45d47-ndwh7\" (UID: \"ef393209-80db-43c7-a749-3ec7826f2123\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-ndwh7" Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.326387 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ef393209-80db-43c7-a749-3ec7826f2123-config\") pod \"dnsmasq-dnsmasq-84b9f45d47-ndwh7\" (UID: \"ef393209-80db-43c7-a749-3ec7826f2123\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-ndwh7" Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.341982 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.342391 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/cinder-api-0" podUID="f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a" containerName="cinder-api-log" containerID="cri-o://1e85d135d8cc85e410ab61877d8a2508b9c4604a7dac00b92f5266230868a2eb" gracePeriod=30 Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.342580 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/cinder-api-0" podUID="f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a" containerName="cinder-api" containerID="cri-o://46a944f926e2a66f4adf5877139aadf5d8d647d7a08c085ceffe4a3ae8061c0b" gracePeriod=30 Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.367616 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w6kpd\" (UniqueName: \"kubernetes.io/projected/ef393209-80db-43c7-a749-3ec7826f2123-kube-api-access-w6kpd\") pod \"dnsmasq-dnsmasq-84b9f45d47-ndwh7\" (UID: \"ef393209-80db-43c7-a749-3ec7826f2123\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-ndwh7" Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.377475 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/watcher-be54-account-create-update-jd6tl"] Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.421767 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/placement-7ddd44bcf8-9tj9c"] Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.422113 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/placement-7ddd44bcf8-9tj9c" podUID="249a3706-31a6-4fb2-9a3f-94700d9ae30e" containerName="placement-log" containerID="cri-o://5a1fe60313509d6c2bf1b4b977c2854c74261997efa724134b9eef9555b92ec0" gracePeriod=30 Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.423469 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/placement-7ddd44bcf8-9tj9c" podUID="249a3706-31a6-4fb2-9a3f-94700d9ae30e" containerName="placement-api" containerID="cri-o://1d078bc9b7292d0036875780ff82d54713ce56fd9923a103f4645e0f8a09e9ff" gracePeriod=30 Jan 20 17:16:06 crc kubenswrapper[4558]: E0120 17:16:06.424177 4558 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 20 17:16:06 crc kubenswrapper[4558]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[/bin/sh -c #!/bin/bash Jan 20 17:16:06 crc kubenswrapper[4558]: Jan 20 17:16:06 crc kubenswrapper[4558]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 20 17:16:06 crc kubenswrapper[4558]: Jan 20 17:16:06 crc kubenswrapper[4558]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 20 17:16:06 crc kubenswrapper[4558]: Jan 20 17:16:06 crc kubenswrapper[4558]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 20 17:16:06 crc kubenswrapper[4558]: Jan 20 17:16:06 crc kubenswrapper[4558]: if [ -n "neutron" ]; then Jan 20 17:16:06 crc kubenswrapper[4558]: GRANT_DATABASE="neutron" Jan 20 17:16:06 crc kubenswrapper[4558]: else Jan 20 17:16:06 crc kubenswrapper[4558]: GRANT_DATABASE="*" Jan 20 17:16:06 crc kubenswrapper[4558]: fi Jan 20 17:16:06 crc kubenswrapper[4558]: Jan 20 17:16:06 crc kubenswrapper[4558]: # going for maximum compatibility here: Jan 20 17:16:06 crc kubenswrapper[4558]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 20 17:16:06 crc kubenswrapper[4558]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 20 17:16:06 crc kubenswrapper[4558]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 20 17:16:06 crc kubenswrapper[4558]: # support updates Jan 20 17:16:06 crc kubenswrapper[4558]: Jan 20 17:16:06 crc kubenswrapper[4558]: $MYSQL_CMD < logger="UnhandledError" Jan 20 17:16:06 crc kubenswrapper[4558]: E0120 17:16:06.429358 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"neutron-db-secret\\\" not found\"" pod="openstack-kuttl-tests/neutron-d62f-account-create-update-8xjrt" podUID="23341c92-6342-4e5f-904c-17fdff2841c5" Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.485500 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-db-create-5fghl"] Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.495343 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-857895d98b-n66x9"] Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.496234 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/neutron-857895d98b-n66x9" podUID="32c4dbf1-2dfe-44a7-be21-1415017ca20c" containerName="neutron-httpd" containerID="cri-o://bfe3f187246a119dce5b9bd953345bf403b08cd980efbe013f04ba0712f25420" gracePeriod=30 Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.496410 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/neutron-857895d98b-n66x9" podUID="32c4dbf1-2dfe-44a7-be21-1415017ca20c" containerName="neutron-api" containerID="cri-o://40fb072bac172dc7253b14ca577f8224ce05af40e481c08769f4d2e454f5196a" gracePeriod=30 Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.507635 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/barbican-db-create-5fghl"] Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.525696 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-5773-account-create-update-qmwj7"] Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.533341 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-ndwh7" Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.533726 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/watcher-be54-account-create-update-jd6tl"] Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.539670 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/placement-db-create-8nrk4"] Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.546288 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/placement-db-create-8nrk4"] Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.557658 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-worker-55f98f9877-gm2wm"] Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.557872 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-worker-55f98f9877-gm2wm" podUID="462f58d4-b41a-45b8-aa8a-003bfe30e625" containerName="barbican-worker-log" containerID="cri-o://696c95ac455228cd89e273ea6e2bcb78c3bf7063a8f5ebba19cde538e34d64bb" gracePeriod=30 Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.558254 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-worker-55f98f9877-gm2wm" podUID="462f58d4-b41a-45b8-aa8a-003bfe30e625" containerName="barbican-worker" containerID="cri-o://0da15b1363218841887085f4193f9590c3888bf6f9f12345ba5c89c89755a3f7" gracePeriod=30 Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.591289 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="170b963c-2b54-4497-82b5-03ac18e07ffe" path="/var/lib/kubelet/pods/170b963c-2b54-4497-82b5-03ac18e07ffe/volumes" Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.613638 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="26dff2aa-11c9-494a-bf96-9ed83afd07b6" path="/var/lib/kubelet/pods/26dff2aa-11c9-494a-bf96-9ed83afd07b6/volumes" Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.614213 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="279f250a-2578-4aa2-9ca7-31b35bc0b791" path="/var/lib/kubelet/pods/279f250a-2578-4aa2-9ca7-31b35bc0b791/volumes" Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.614791 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ff7ded6-4fd1-42cc-9999-eea7e800be38" path="/var/lib/kubelet/pods/3ff7ded6-4fd1-42cc-9999-eea7e800be38/volumes" Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.615323 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4e87c463-6ff6-4a56-a207-b6b86578ec36" path="/var/lib/kubelet/pods/4e87c463-6ff6-4a56-a207-b6b86578ec36/volumes" Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.616937 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4ecd38e8-120d-4e5b-9649-405b6068e9d1" path="/var/lib/kubelet/pods/4ecd38e8-120d-4e5b-9649-405b6068e9d1/volumes" Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.617430 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4f36762c-fdfd-45ac-a5b1-4be2bd718a9b" path="/var/lib/kubelet/pods/4f36762c-fdfd-45ac-a5b1-4be2bd718a9b/volumes" Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.617944 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6906ea2d-0a88-47cd-9c23-3e1c40320286" path="/var/lib/kubelet/pods/6906ea2d-0a88-47cd-9c23-3e1c40320286/volumes" Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.618991 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7869482b-42ff-4c97-84b3-3f9f84ba29f5" path="/var/lib/kubelet/pods/7869482b-42ff-4c97-84b3-3f9f84ba29f5/volumes" Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.619761 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a7b45248-f499-4b04-9591-e7268e39ac5c" path="/var/lib/kubelet/pods/a7b45248-f499-4b04-9591-e7268e39ac5c/volumes" Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.620298 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bdbe1471-99ee-4ffa-b596-d8ebb4674c6d" path="/var/lib/kubelet/pods/bdbe1471-99ee-4ffa-b596-d8ebb4674c6d/volumes" Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.624390 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c3bca81a-32b0-4c02-90c0-ab10bb1eb835" path="/var/lib/kubelet/pods/c3bca81a-32b0-4c02-90c0-ab10bb1eb835/volumes" Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.624986 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c7d7d3af-d2d4-404c-ae30-a8802ea8709a" path="/var/lib/kubelet/pods/c7d7d3af-d2d4-404c-ae30-a8802ea8709a/volumes" Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.625519 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d0da5348-6caa-4cc9-b170-9de34ebda4a1" path="/var/lib/kubelet/pods/d0da5348-6caa-4cc9-b170-9de34ebda4a1/volumes" Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.629316 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="daf7e77a-752b-41c8-8b3d-ac04bfb5fb0d" path="/var/lib/kubelet/pods/daf7e77a-752b-41c8-8b3d-ac04bfb5fb0d/volumes" Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.629833 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dc21aa2e-3cb0-43c7-b1d9-650741794867" path="/var/lib/kubelet/pods/dc21aa2e-3cb0-43c7-b1d9-650741794867/volumes" Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.634776 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ddaf057e-7323-49e4-a1c5-0f91ad0aaaaa" path="/var/lib/kubelet/pods/ddaf057e-7323-49e4-a1c5-0f91ad0aaaaa/volumes" Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.635977 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e6f2b04e-81d0-4840-aa61-97174ccff959" path="/var/lib/kubelet/pods/e6f2b04e-81d0-4840-aa61-97174ccff959/volumes" Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.636492 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fb9a7708-e8da-47ae-a2c7-a6550d399c92" path="/var/lib/kubelet/pods/fb9a7708-e8da-47ae-a2c7-a6550d399c92/volumes" Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.643084 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-psddp"] Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.643137 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.643507 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-metadata-0" podUID="70dda975-7e0b-40a4-a92a-675be53560b7" containerName="nova-metadata-log" containerID="cri-o://4d6bd322347a5bd72fd3102a79147d9af8b5b4497067a83772ce8835798883b0" gracePeriod=30 Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.644001 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-metadata-0" podUID="70dda975-7e0b-40a4-a92a-675be53560b7" containerName="nova-metadata-metadata" containerID="cri-o://dc90c73cc956157f0d2205b7cbb05b8859c8423df552df5ea7b67e88690ae584" gracePeriod=30 Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.684700 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.684949 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-api-0" podUID="158b4a4e-78aa-4813-a00e-abddfb7214ef" containerName="nova-api-log" containerID="cri-o://0d8f4bd8e6d346771ef889450d9cc94bd4e4a03f37ab45706759418add0250d9" gracePeriod=30 Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.685414 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-api-0" podUID="158b4a4e-78aa-4813-a00e-abddfb7214ef" containerName="nova-api-api" containerID="cri-o://32341c380c826a7aa830f2d1cd35459c6a706f48a875899fa1c9c906b440f63c" gracePeriod=30 Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.703810 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-keystone-listener-75484ddb58-9p7fq"] Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.704366 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-keystone-listener-75484ddb58-9p7fq" podUID="8dfaaf1f-540b-4c8b-b93c-1e3556c37801" containerName="barbican-keystone-listener-log" containerID="cri-o://bbac2ee5bf98eddd33a60a5edd0a1dac188b6c3de922ce151b7900626fe0fff8" gracePeriod=30 Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.704540 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-keystone-listener-75484ddb58-9p7fq" podUID="8dfaaf1f-540b-4c8b-b93c-1e3556c37801" containerName="barbican-keystone-listener" containerID="cri-o://ac12e2c480e51bbc2f445d7182e6e37c4208d79ce61c920b8055cf83b75b4e84" gracePeriod=30 Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.724522 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-api-6587ff9c6b-8wmpk"] Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.724723 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-api-6587ff9c6b-8wmpk" podUID="9116eb6c-0630-4022-9c43-8c8793e08922" containerName="barbican-api-log" containerID="cri-o://6da9a00e2025f693c02a7cb6b25aca4e9c3ecc4fc8e7c3494f4026506bdec6c1" gracePeriod=30 Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.724985 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-api-6587ff9c6b-8wmpk" podUID="9116eb6c-0630-4022-9c43-8c8793e08922" containerName="barbican-api" containerID="cri-o://30ace2ed0b9d212ff18f8956aaa8ea0cdaa3a1c7b26e2fe661e7f78b0f3795a2" gracePeriod=30 Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.729436 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/rabbitmq-server-0"] Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.737781 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstackclient" Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.742740 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-d62f-account-create-update-8xjrt"] Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.771335 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/alertmanager-metric-storage-0"] Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.772280 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/alertmanager-metric-storage-0" podUID="13a0a262-6f56-4cf9-9b0d-85110bcff1e7" containerName="config-reloader" containerID="cri-o://abbc56b6e1300066bf6f3c76afd3d47d0ea00a3c7a4a4d427f6e57e82969bee0" gracePeriod=120 Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.771591 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/alertmanager-metric-storage-0" podUID="13a0a262-6f56-4cf9-9b0d-85110bcff1e7" containerName="alertmanager" containerID="cri-o://626d2d61214153aec0449b76e316d8177bdd3708c258717f7715a91a609543f1" gracePeriod=120 Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.784546 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/prometheus-metric-storage-0"] Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.786139 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/rabbitmq-server-0" podUID="fd2edfa4-790f-49d7-9e32-29571e490aaf" containerName="rabbitmq" containerID="cri-o://2e090c322fd27930306ecae5c98d22a52137ae21234138d9f2df2a9c227f3a1b" gracePeriod=604800 Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.786624 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/prometheus-metric-storage-0" podUID="e87a4844-8a90-4965-878f-d95aa09c47bb" containerName="prometheus" containerID="cri-o://83ca1736c91920d790e234d6dd7cb02e325a0698ad1ac3ce1a25312d1f3c51a8" gracePeriod=600 Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.786923 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/prometheus-metric-storage-0" podUID="e87a4844-8a90-4965-878f-d95aa09c47bb" containerName="thanos-sidecar" containerID="cri-o://812f4fcb56b8658b4bbf8cdf87243bd17984898af6dea1b34072f67d46707f0b" gracePeriod=600 Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.786986 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/prometheus-metric-storage-0" podUID="e87a4844-8a90-4965-878f-d95aa09c47bb" containerName="config-reloader" containerID="cri-o://cee5946fda9135814e788d08c6619a590258ddf4784db12ce17b315442c52074" gracePeriod=600 Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.793657 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovsdbserver-nb-0_ef3c342e-679b-4a2e-94df-82adf4200290/ovsdbserver-nb/0.log" Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.793725 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.819282 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell0-db-create-f8nhx"] Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.826202 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell0-db-create-f8nhx"] Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.828358 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-db-create-2vwsk"] Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.837558 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/cinder-db-create-2vwsk"] Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.840442 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-db-create-pz7pt"] Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.850221 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/neutron-db-create-pz7pt"] Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.856113 4558 generic.go:334] "Generic (PLEG): container finished" podID="70dda975-7e0b-40a4-a92a-675be53560b7" containerID="4d6bd322347a5bd72fd3102a79147d9af8b5b4497067a83772ce8835798883b0" exitCode=143 Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.856216 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"70dda975-7e0b-40a4-a92a-675be53560b7","Type":"ContainerDied","Data":"4d6bd322347a5bd72fd3102a79147d9af8b5b4497067a83772ce8835798883b0"} Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.859285 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-7601-account-create-update-wx6mf"] Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.867475 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/bad8aeff-00a6-4456-bbdf-937441ed04c0-openstack-config-secret\") pod \"bad8aeff-00a6-4456-bbdf-937441ed04c0\" (UID: \"bad8aeff-00a6-4456-bbdf-937441ed04c0\") " Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.867665 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vr595\" (UniqueName: \"kubernetes.io/projected/bad8aeff-00a6-4456-bbdf-937441ed04c0-kube-api-access-vr595\") pod \"bad8aeff-00a6-4456-bbdf-937441ed04c0\" (UID: \"bad8aeff-00a6-4456-bbdf-937441ed04c0\") " Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.867754 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bad8aeff-00a6-4456-bbdf-937441ed04c0-combined-ca-bundle\") pod \"bad8aeff-00a6-4456-bbdf-937441ed04c0\" (UID: \"bad8aeff-00a6-4456-bbdf-937441ed04c0\") " Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.867884 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/bad8aeff-00a6-4456-bbdf-937441ed04c0-openstack-config\") pod \"bad8aeff-00a6-4456-bbdf-937441ed04c0\" (UID: \"bad8aeff-00a6-4456-bbdf-937441ed04c0\") " Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.873204 4558 generic.go:334] "Generic (PLEG): container finished" podID="691cf1c1-d617-489b-b0cd-1364328d6c60" containerID="62082d9ed12f1ee2a30cc05cd78b8b133ba9df78efc6ec3471a763937ad7c720" exitCode=1 Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.873987 4558 scope.go:117] "RemoveContainer" containerID="62082d9ed12f1ee2a30cc05cd78b8b133ba9df78efc6ec3471a763937ad7c720" Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.874811 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-d62f-account-create-update-8xjrt"] Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.874830 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/root-account-create-update-psddp" event={"ID":"691cf1c1-d617-489b-b0cd-1364328d6c60","Type":"ContainerDied","Data":"62082d9ed12f1ee2a30cc05cd78b8b133ba9df78efc6ec3471a763937ad7c720"} Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.874848 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/root-account-create-update-psddp" event={"ID":"691cf1c1-d617-489b-b0cd-1364328d6c60","Type":"ContainerStarted","Data":"40d909a66ed44cd6bb4de563faaed560da7fb0bdf5438b93b480b4efa77a2cda"} Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.888438 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.888823 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" podUID="ad371747-3ff0-49b0-b2a6-33d7bb7cd16b" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://a7f915ce1e5f59e4f41a36080909e1ccc6e29dbe8d312e443b304657ef70a93d" gracePeriod=30 Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.914296 4558 generic.go:334] "Generic (PLEG): container finished" podID="0a49fae8-fd33-4a50-b967-e8e8cc48731a" containerID="8d223b6be945d7320575fde68951e2dcf11d7174379c849a669c1180f13b9071" exitCode=143 Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.914388 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-db-create-zhtn8"] Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.914414 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/watcher-api-0" event={"ID":"0a49fae8-fd33-4a50-b967-e8e8cc48731a","Type":"ContainerDied","Data":"8d223b6be945d7320575fde68951e2dcf11d7174379c849a669c1180f13b9071"} Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.919109 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bad8aeff-00a6-4456-bbdf-937441ed04c0-kube-api-access-vr595" (OuterVolumeSpecName: "kube-api-access-vr595") pod "bad8aeff-00a6-4456-bbdf-937441ed04c0" (UID: "bad8aeff-00a6-4456-bbdf-937441ed04c0"). InnerVolumeSpecName "kube-api-access-vr595". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.932479 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-api-db-create-zhtn8"] Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.940209 4558 generic.go:334] "Generic (PLEG): container finished" podID="bad8aeff-00a6-4456-bbdf-937441ed04c0" containerID="b209eadd3c0946d42cd2fa2ad03f5f87bda4b804b2952272d092f8c6dbcda131" exitCode=137 Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.940315 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstackclient" Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.940860 4558 scope.go:117] "RemoveContainer" containerID="b209eadd3c0946d42cd2fa2ad03f5f87bda4b804b2952272d092f8c6dbcda131" Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.942737 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-542c-account-create-update-fknl6"] Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.950957 4558 generic.go:334] "Generic (PLEG): container finished" podID="462f58d4-b41a-45b8-aa8a-003bfe30e625" containerID="696c95ac455228cd89e273ea6e2bcb78c3bf7063a8f5ebba19cde538e34d64bb" exitCode=143 Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.951009 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-worker-55f98f9877-gm2wm" event={"ID":"462f58d4-b41a-45b8-aa8a-003bfe30e625","Type":"ContainerDied","Data":"696c95ac455228cd89e273ea6e2bcb78c3bf7063a8f5ebba19cde538e34d64bb"} Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.960934 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-db-sync-srppb"] Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.966568 4558 generic.go:334] "Generic (PLEG): container finished" podID="32c4dbf1-2dfe-44a7-be21-1415017ca20c" containerID="bfe3f187246a119dce5b9bd953345bf403b08cd980efbe013f04ba0712f25420" exitCode=0 Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.966630 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-857895d98b-n66x9" event={"ID":"32c4dbf1-2dfe-44a7-be21-1415017ca20c","Type":"ContainerDied","Data":"bfe3f187246a119dce5b9bd953345bf403b08cd980efbe013f04ba0712f25420"} Jan 20 17:16:06 crc kubenswrapper[4558]: E0120 17:16:06.974564 4558 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 20 17:16:06 crc kubenswrapper[4558]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[/bin/sh -c #!/bin/bash Jan 20 17:16:06 crc kubenswrapper[4558]: Jan 20 17:16:06 crc kubenswrapper[4558]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 20 17:16:06 crc kubenswrapper[4558]: Jan 20 17:16:06 crc kubenswrapper[4558]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 20 17:16:06 crc kubenswrapper[4558]: Jan 20 17:16:06 crc kubenswrapper[4558]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 20 17:16:06 crc kubenswrapper[4558]: Jan 20 17:16:06 crc kubenswrapper[4558]: if [ -n "nova_api" ]; then Jan 20 17:16:06 crc kubenswrapper[4558]: GRANT_DATABASE="nova_api" Jan 20 17:16:06 crc kubenswrapper[4558]: else Jan 20 17:16:06 crc kubenswrapper[4558]: GRANT_DATABASE="*" Jan 20 17:16:06 crc kubenswrapper[4558]: fi Jan 20 17:16:06 crc kubenswrapper[4558]: Jan 20 17:16:06 crc kubenswrapper[4558]: # going for maximum compatibility here: Jan 20 17:16:06 crc kubenswrapper[4558]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 20 17:16:06 crc kubenswrapper[4558]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 20 17:16:06 crc kubenswrapper[4558]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 20 17:16:06 crc kubenswrapper[4558]: # support updates Jan 20 17:16:06 crc kubenswrapper[4558]: Jan 20 17:16:06 crc kubenswrapper[4558]: $MYSQL_CMD < logger="UnhandledError" Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.974901 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ef3c342e-679b-4a2e-94df-82adf4200290-scripts\") pod \"ef3c342e-679b-4a2e-94df-82adf4200290\" (UID: \"ef3c342e-679b-4a2e-94df-82adf4200290\") " Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.974944 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/ef3c342e-679b-4a2e-94df-82adf4200290-metrics-certs-tls-certs\") pod \"ef3c342e-679b-4a2e-94df-82adf4200290\" (UID: \"ef3c342e-679b-4a2e-94df-82adf4200290\") " Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.974992 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zhx8v\" (UniqueName: \"kubernetes.io/projected/ef3c342e-679b-4a2e-94df-82adf4200290-kube-api-access-zhx8v\") pod \"ef3c342e-679b-4a2e-94df-82adf4200290\" (UID: \"ef3c342e-679b-4a2e-94df-82adf4200290\") " Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.975015 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ef3c342e-679b-4a2e-94df-82adf4200290-combined-ca-bundle\") pod \"ef3c342e-679b-4a2e-94df-82adf4200290\" (UID: \"ef3c342e-679b-4a2e-94df-82adf4200290\") " Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.975076 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/ef3c342e-679b-4a2e-94df-82adf4200290-ovsdbserver-nb-tls-certs\") pod \"ef3c342e-679b-4a2e-94df-82adf4200290\" (UID: \"ef3c342e-679b-4a2e-94df-82adf4200290\") " Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.975145 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ef3c342e-679b-4a2e-94df-82adf4200290-config\") pod \"ef3c342e-679b-4a2e-94df-82adf4200290\" (UID: \"ef3c342e-679b-4a2e-94df-82adf4200290\") " Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.975188 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndbcluster-nb-etc-ovn\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"ef3c342e-679b-4a2e-94df-82adf4200290\" (UID: \"ef3c342e-679b-4a2e-94df-82adf4200290\") " Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.975342 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/ef3c342e-679b-4a2e-94df-82adf4200290-ovsdb-rundir\") pod \"ef3c342e-679b-4a2e-94df-82adf4200290\" (UID: \"ef3c342e-679b-4a2e-94df-82adf4200290\") " Jan 20 17:16:06 crc kubenswrapper[4558]: E0120 17:16:06.975688 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"nova-api-db-secret\\\" not found\"" pod="openstack-kuttl-tests/nova-api-542c-account-create-update-fknl6" podUID="4b51c4ea-1df2-42b3-9aa7-d69471c234ed" Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.975824 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vr595\" (UniqueName: \"kubernetes.io/projected/bad8aeff-00a6-4456-bbdf-937441ed04c0-kube-api-access-vr595\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.976464 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ef3c342e-679b-4a2e-94df-82adf4200290-ovsdb-rundir" (OuterVolumeSpecName: "ovsdb-rundir") pod "ef3c342e-679b-4a2e-94df-82adf4200290" (UID: "ef3c342e-679b-4a2e-94df-82adf4200290"). InnerVolumeSpecName "ovsdb-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.976613 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ef3c342e-679b-4a2e-94df-82adf4200290-config" (OuterVolumeSpecName: "config") pod "ef3c342e-679b-4a2e-94df-82adf4200290" (UID: "ef3c342e-679b-4a2e-94df-82adf4200290"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.977416 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-db-create-5wn2p"] Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.977949 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ef3c342e-679b-4a2e-94df-82adf4200290-scripts" (OuterVolumeSpecName: "scripts") pod "ef3c342e-679b-4a2e-94df-82adf4200290" (UID: "ef3c342e-679b-4a2e-94df-82adf4200290"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.984053 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovsdbserver-sb-0_8635d21f-4563-42ce-a3ab-4c67653d4dee/ovsdbserver-sb/0.log" Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.984136 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:16:06 crc kubenswrapper[4558]: E0120 17:16:06.986058 4558 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 20 17:16:06 crc kubenswrapper[4558]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[/bin/sh -c #!/bin/bash Jan 20 17:16:06 crc kubenswrapper[4558]: Jan 20 17:16:06 crc kubenswrapper[4558]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 20 17:16:06 crc kubenswrapper[4558]: Jan 20 17:16:06 crc kubenswrapper[4558]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 20 17:16:06 crc kubenswrapper[4558]: Jan 20 17:16:06 crc kubenswrapper[4558]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 20 17:16:06 crc kubenswrapper[4558]: Jan 20 17:16:06 crc kubenswrapper[4558]: if [ -n "cinder" ]; then Jan 20 17:16:06 crc kubenswrapper[4558]: GRANT_DATABASE="cinder" Jan 20 17:16:06 crc kubenswrapper[4558]: else Jan 20 17:16:06 crc kubenswrapper[4558]: GRANT_DATABASE="*" Jan 20 17:16:06 crc kubenswrapper[4558]: fi Jan 20 17:16:06 crc kubenswrapper[4558]: Jan 20 17:16:06 crc kubenswrapper[4558]: # going for maximum compatibility here: Jan 20 17:16:06 crc kubenswrapper[4558]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 20 17:16:06 crc kubenswrapper[4558]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 20 17:16:06 crc kubenswrapper[4558]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 20 17:16:06 crc kubenswrapper[4558]: # support updates Jan 20 17:16:06 crc kubenswrapper[4558]: Jan 20 17:16:06 crc kubenswrapper[4558]: $MYSQL_CMD < logger="UnhandledError" Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.991944 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-db-sync-srppb"] Jan 20 17:16:06 crc kubenswrapper[4558]: I0120 17:16:06.998932 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell1-db-create-5wn2p"] Jan 20 17:16:06 crc kubenswrapper[4558]: E0120 17:16:06.999008 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"cinder-db-secret\\\" not found\"" pod="openstack-kuttl-tests/cinder-7601-account-create-update-wx6mf" podUID="5cdda1de-c1bb-45a1-a67b-9fe6e8d699df" Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.008775 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-db-sync-nsn26"] Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.008919 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-db-sync-nsn26"] Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.015707 4558 generic.go:334] "Generic (PLEG): container finished" podID="f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a" containerID="1e85d135d8cc85e410ab61877d8a2508b9c4604a7dac00b92f5266230868a2eb" exitCode=143 Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.015813 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a","Type":"ContainerDied","Data":"1e85d135d8cc85e410ab61877d8a2508b9c4604a7dac00b92f5266230868a2eb"} Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.016483 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ef3c342e-679b-4a2e-94df-82adf4200290-kube-api-access-zhx8v" (OuterVolumeSpecName: "kube-api-access-zhx8v") pod "ef3c342e-679b-4a2e-94df-82adf4200290" (UID: "ef3c342e-679b-4a2e-94df-82adf4200290"). InnerVolumeSpecName "kube-api-access-zhx8v". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.017254 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bad8aeff-00a6-4456-bbdf-937441ed04c0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "bad8aeff-00a6-4456-bbdf-937441ed04c0" (UID: "bad8aeff-00a6-4456-bbdf-937441ed04c0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.021235 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-0"] Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.021718 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-cell1-conductor-0" podUID="217a2ebc-8331-4a6f-9113-2c813563a2b8" containerName="nova-cell1-conductor-conductor" containerID="cri-o://521ed414fb8023645d2152fb8453d458e32e8655c7e7d2cd8ec86ea164244611" gracePeriod=30 Jan 20 17:16:07 crc kubenswrapper[4558]: E0120 17:16:07.084462 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="146969194ab0a40e402393f07d4594aaad069b32a10f5e882b7b5d9c1bed6628" cmd=["/usr/bin/pgrep","-r","DRST","watcher-applier"] Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.084653 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage06-crc" (OuterVolumeSpecName: "ovndbcluster-nb-etc-ovn") pod "ef3c342e-679b-4a2e-94df-82adf4200290" (UID: "ef3c342e-679b-4a2e-94df-82adf4200290"). InnerVolumeSpecName "local-storage06-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.100707 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bad8aeff-00a6-4456-bbdf-937441ed04c0-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "bad8aeff-00a6-4456-bbdf-937441ed04c0" (UID: "bad8aeff-00a6-4456-bbdf-937441ed04c0"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.103392 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/8635d21f-4563-42ce-a3ab-4c67653d4dee-metrics-certs-tls-certs\") pod \"8635d21f-4563-42ce-a3ab-4c67653d4dee\" (UID: \"8635d21f-4563-42ce-a3ab-4c67653d4dee\") " Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.103439 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8635d21f-4563-42ce-a3ab-4c67653d4dee-combined-ca-bundle\") pod \"8635d21f-4563-42ce-a3ab-4c67653d4dee\" (UID: \"8635d21f-4563-42ce-a3ab-4c67653d4dee\") " Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.103505 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndbcluster-sb-etc-ovn\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"8635d21f-4563-42ce-a3ab-4c67653d4dee\" (UID: \"8635d21f-4563-42ce-a3ab-4c67653d4dee\") " Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.103602 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xx4lb\" (UniqueName: \"kubernetes.io/projected/8635d21f-4563-42ce-a3ab-4c67653d4dee-kube-api-access-xx4lb\") pod \"8635d21f-4563-42ce-a3ab-4c67653d4dee\" (UID: \"8635d21f-4563-42ce-a3ab-4c67653d4dee\") " Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.103629 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8635d21f-4563-42ce-a3ab-4c67653d4dee-config\") pod \"8635d21f-4563-42ce-a3ab-4c67653d4dee\" (UID: \"8635d21f-4563-42ce-a3ab-4c67653d4dee\") " Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.103683 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8635d21f-4563-42ce-a3ab-4c67653d4dee-scripts\") pod \"8635d21f-4563-42ce-a3ab-4c67653d4dee\" (UID: \"8635d21f-4563-42ce-a3ab-4c67653d4dee\") " Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.103771 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/8635d21f-4563-42ce-a3ab-4c67653d4dee-ovsdbserver-sb-tls-certs\") pod \"8635d21f-4563-42ce-a3ab-4c67653d4dee\" (UID: \"8635d21f-4563-42ce-a3ab-4c67653d4dee\") " Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.103869 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/8635d21f-4563-42ce-a3ab-4c67653d4dee-ovsdb-rundir\") pod \"8635d21f-4563-42ce-a3ab-4c67653d4dee\" (UID: \"8635d21f-4563-42ce-a3ab-4c67653d4dee\") " Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.125245 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.125549 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-scheduler-0" podUID="c4cb69c1-3d51-4b13-a025-b37c986ffede" containerName="nova-scheduler-scheduler" containerID="cri-o://022773dc93b3f1b894358c04d0057cb8d8780ca1ef654626b973b40167c3532e" gracePeriod=30 Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.126571 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8635d21f-4563-42ce-a3ab-4c67653d4dee-config" (OuterVolumeSpecName: "config") pod "8635d21f-4563-42ce-a3ab-4c67653d4dee" (UID: "8635d21f-4563-42ce-a3ab-4c67653d4dee"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.132260 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8635d21f-4563-42ce-a3ab-4c67653d4dee-scripts" (OuterVolumeSpecName: "scripts") pod "8635d21f-4563-42ce-a3ab-4c67653d4dee" (UID: "8635d21f-4563-42ce-a3ab-4c67653d4dee"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:16:07 crc kubenswrapper[4558]: E0120 17:16:07.162621 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="146969194ab0a40e402393f07d4594aaad069b32a10f5e882b7b5d9c1bed6628" cmd=["/usr/bin/pgrep","-r","DRST","watcher-applier"] Jan 20 17:16:07 crc kubenswrapper[4558]: E0120 17:16:07.176373 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="146969194ab0a40e402393f07d4594aaad069b32a10f5e882b7b5d9c1bed6628" cmd=["/usr/bin/pgrep","-r","DRST","watcher-applier"] Jan 20 17:16:07 crc kubenswrapper[4558]: E0120 17:16:07.176474 4558 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack-kuttl-tests/watcher-applier-0" podUID="30b9f169-eef8-4a30-b543-83dda735da70" containerName="watcher-applier" Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.185235 4558 generic.go:334] "Generic (PLEG): container finished" podID="a9ada454-f6fa-4ea4-b386-3ddbcd37f233" containerID="5259e35b7300917470585b85e57577b5c54cf041fcc22c4114363695b0632f21" exitCode=143 Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.186756 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ef3c342e-679b-4a2e-94df-82adf4200290-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.186782 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zhx8v\" (UniqueName: \"kubernetes.io/projected/ef3c342e-679b-4a2e-94df-82adf4200290-kube-api-access-zhx8v\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.186793 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ef3c342e-679b-4a2e-94df-82adf4200290-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.186805 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bad8aeff-00a6-4456-bbdf-937441ed04c0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.186836 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" " Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.186847 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8635d21f-4563-42ce-a3ab-4c67653d4dee-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.186861 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8635d21f-4563-42ce-a3ab-4c67653d4dee-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.186872 4558 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/bad8aeff-00a6-4456-bbdf-937441ed04c0-openstack-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.186881 4558 reconciler_common.go:293] "Volume detached for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/ef3c342e-679b-4a2e-94df-82adf4200290-ovsdb-rundir\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.187365 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"a9ada454-f6fa-4ea4-b386-3ddbcd37f233","Type":"ContainerDied","Data":"5259e35b7300917470585b85e57577b5c54cf041fcc22c4114363695b0632f21"} Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.197308 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-7601-account-create-update-wx6mf"] Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.200580 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8635d21f-4563-42ce-a3ab-4c67653d4dee-ovsdb-rundir" (OuterVolumeSpecName: "ovsdb-rundir") pod "8635d21f-4563-42ce-a3ab-4c67653d4dee" (UID: "8635d21f-4563-42ce-a3ab-4c67653d4dee"). InnerVolumeSpecName "ovsdb-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.208349 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bad8aeff-00a6-4456-bbdf-937441ed04c0-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "bad8aeff-00a6-4456-bbdf-937441ed04c0" (UID: "bad8aeff-00a6-4456-bbdf-937441ed04c0"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.225301 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage04-crc" (OuterVolumeSpecName: "ovndbcluster-sb-etc-ovn") pod "8635d21f-4563-42ce-a3ab-4c67653d4dee" (UID: "8635d21f-4563-42ce-a3ab-4c67653d4dee"). InnerVolumeSpecName "local-storage04-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.225359 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8635d21f-4563-42ce-a3ab-4c67653d4dee-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8635d21f-4563-42ce-a3ab-4c67653d4dee" (UID: "8635d21f-4563-42ce-a3ab-4c67653d4dee"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.219512 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-542c-account-create-update-fknl6"] Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.265291 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ef3c342e-679b-4a2e-94df-82adf4200290-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ef3c342e-679b-4a2e-94df-82adf4200290" (UID: "ef3c342e-679b-4a2e-94df-82adf4200290"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.270340 4558 scope.go:117] "RemoveContainer" containerID="b209eadd3c0946d42cd2fa2ad03f5f87bda4b804b2952272d092f8c6dbcda131" Jan 20 17:16:07 crc kubenswrapper[4558]: E0120 17:16:07.272304 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="f630bf27700ad23f76cb69b14ae9eed65e06889873b8b21234996b7a87609328" cmd=["/usr/bin/pgrep","-f","-r","DRST","watcher-decision-engine"] Jan 20 17:16:07 crc kubenswrapper[4558]: E0120 17:16:07.279531 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="f630bf27700ad23f76cb69b14ae9eed65e06889873b8b21234996b7a87609328" cmd=["/usr/bin/pgrep","-f","-r","DRST","watcher-decision-engine"] Jan 20 17:16:07 crc kubenswrapper[4558]: E0120 17:16:07.279615 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b209eadd3c0946d42cd2fa2ad03f5f87bda4b804b2952272d092f8c6dbcda131\": container with ID starting with b209eadd3c0946d42cd2fa2ad03f5f87bda4b804b2952272d092f8c6dbcda131 not found: ID does not exist" containerID="b209eadd3c0946d42cd2fa2ad03f5f87bda4b804b2952272d092f8c6dbcda131" Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.279639 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b209eadd3c0946d42cd2fa2ad03f5f87bda4b804b2952272d092f8c6dbcda131"} err="failed to get container status \"b209eadd3c0946d42cd2fa2ad03f5f87bda4b804b2952272d092f8c6dbcda131\": rpc error: code = NotFound desc = could not find container \"b209eadd3c0946d42cd2fa2ad03f5f87bda4b804b2952272d092f8c6dbcda131\": container with ID starting with b209eadd3c0946d42cd2fa2ad03f5f87bda4b804b2952272d092f8c6dbcda131 not found: ID does not exist" Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.283049 4558 generic.go:334] "Generic (PLEG): container finished" podID="249a3706-31a6-4fb2-9a3f-94700d9ae30e" containerID="5a1fe60313509d6c2bf1b4b977c2854c74261997efa724134b9eef9555b92ec0" exitCode=143 Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.283114 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-7ddd44bcf8-9tj9c" event={"ID":"249a3706-31a6-4fb2-9a3f-94700d9ae30e","Type":"ContainerDied","Data":"5a1fe60313509d6c2bf1b4b977c2854c74261997efa724134b9eef9555b92ec0"} Jan 20 17:16:07 crc kubenswrapper[4558]: W0120 17:16:07.288067 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podef393209_80db_43c7_a749_3ec7826f2123.slice/crio-560e74d6ac97887570796d7f33795ec8548eb06564c454862ab5d2d8630bffd6 WatchSource:0}: Error finding container 560e74d6ac97887570796d7f33795ec8548eb06564c454862ab5d2d8630bffd6: Status 404 returned error can't find the container with id 560e74d6ac97887570796d7f33795ec8548eb06564c454862ab5d2d8630bffd6 Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.288951 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8635d21f-4563-42ce-a3ab-4c67653d4dee-kube-api-access-xx4lb" (OuterVolumeSpecName: "kube-api-access-xx4lb") pod "8635d21f-4563-42ce-a3ab-4c67653d4dee" (UID: "8635d21f-4563-42ce-a3ab-4c67653d4dee"). InnerVolumeSpecName "kube-api-access-xx4lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.319027 4558 reconciler_common.go:293] "Volume detached for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/8635d21f-4563-42ce-a3ab-4c67653d4dee-ovsdb-rundir\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.319058 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ef3c342e-679b-4a2e-94df-82adf4200290-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.319070 4558 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/bad8aeff-00a6-4456-bbdf-937441ed04c0-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.319082 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8635d21f-4563-42ce-a3ab-4c67653d4dee-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.319106 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" " Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.319117 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xx4lb\" (UniqueName: \"kubernetes.io/projected/8635d21f-4563-42ce-a3ab-4c67653d4dee-kube-api-access-xx4lb\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:07 crc kubenswrapper[4558]: E0120 17:16:07.324424 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="f630bf27700ad23f76cb69b14ae9eed65e06889873b8b21234996b7a87609328" cmd=["/usr/bin/pgrep","-f","-r","DRST","watcher-decision-engine"] Jan 20 17:16:07 crc kubenswrapper[4558]: E0120 17:16:07.324514 4558 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack-kuttl-tests/watcher-decision-engine-0" podUID="2510d24d-b593-4196-92bc-ddde09cc7c15" containerName="watcher-decision-engine" Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.355815 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage06-crc" (UniqueName: "kubernetes.io/local-volume/local-storage06-crc") on node "crc" Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.374886 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-ndwh7"] Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.382320 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.382561 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="544c8619-276c-4358-ac79-58df9a462173" containerName="ceilometer-central-agent" containerID="cri-o://aef9e92362cdebb57f6e6fec72040ef9ea0c3ebdec14cebff1a1ec4ae81e35a9" gracePeriod=30 Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.389293 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="544c8619-276c-4358-ac79-58df9a462173" containerName="proxy-httpd" containerID="cri-o://5e7f9a7df82f7908138cd4491a1924604163a709b2ab28b8825d5768baab4058" gracePeriod=30 Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.389366 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="544c8619-276c-4358-ac79-58df9a462173" containerName="sg-core" containerID="cri-o://821d851a2b03145528569e24bdf1e48df94429d66e29eac4da06f73e2d0ea57e" gracePeriod=30 Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.389404 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="544c8619-276c-4358-ac79-58df9a462173" containerName="ceilometer-notification-agent" containerID="cri-o://12b7e813e5ba4c8b347be431d52bb1ff48a52e3d5f90ccda95991eea8a6553ad" gracePeriod=30 Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.395508 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.395705 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/kube-state-metrics-0" podUID="6947f138-6d2f-40e2-a236-736d26d3a1e6" containerName="kube-state-metrics" containerID="cri-o://3019d79f9b53d96500bf161bbfffaf2fd1e86e0b8753d308b473cc6f0550db6e" gracePeriod=30 Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.399095 4558 generic.go:334] "Generic (PLEG): container finished" podID="16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5" containerID="f85ec25adaf01df7a7853545852faa186e28a1806e5769092b47b3631e9f6a39" exitCode=0 Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.399145 4558 generic.go:334] "Generic (PLEG): container finished" podID="16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5" containerID="a05f333c41b9d8ef7c02ce57e5a66f44cf679c3dd3e8cbc037a0abf4cb9c1076" exitCode=0 Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.399154 4558 generic.go:334] "Generic (PLEG): container finished" podID="16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5" containerID="351e496453cf020b285bf45cc107bc71f86bd776035bd828428d72e1c7062529" exitCode=0 Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.399190 4558 generic.go:334] "Generic (PLEG): container finished" podID="16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5" containerID="e9011c98afeb74bd61067467d261960e71d5f4ce02439018a751b87779a9a042" exitCode=0 Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.399201 4558 generic.go:334] "Generic (PLEG): container finished" podID="16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5" containerID="0ffad205c7e0b9a3e7a3ed7b7494d0c934e33af468fd39ec2025c4bc685f9b2a" exitCode=0 Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.399209 4558 generic.go:334] "Generic (PLEG): container finished" podID="16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5" containerID="e8487e2e852bfbcd349f6ab70c8f1c9163850c213b76cd2ed1af8305cfe8bbf6" exitCode=0 Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.399215 4558 generic.go:334] "Generic (PLEG): container finished" podID="16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5" containerID="b448c6813dc21e1609badba9c414f16cfa90d4bc879f828fc03dc5e2d8c5c01d" exitCode=0 Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.399223 4558 generic.go:334] "Generic (PLEG): container finished" podID="16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5" containerID="f4843ed0e8d1373bb2683da97ab873974d82c40ca96c66ac0e816208a3870873" exitCode=0 Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.399231 4558 generic.go:334] "Generic (PLEG): container finished" podID="16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5" containerID="42f3e6080a5aaa7c6b331d0120550bd3df0ded5cc2d89acdcce42e051c81d082" exitCode=0 Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.399237 4558 generic.go:334] "Generic (PLEG): container finished" podID="16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5" containerID="0264294b8782deaf18a7c21e66a3cd60ef8bc25c146077c5f8dd36370f4b9f31" exitCode=0 Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.399244 4558 generic.go:334] "Generic (PLEG): container finished" podID="16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5" containerID="3042c88a8b0acc604598545abaf566b25ac21482df62b0a3a116f13f8566a2af" exitCode=0 Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.399266 4558 generic.go:334] "Generic (PLEG): container finished" podID="16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5" containerID="a3d60956ff7af8b2370d1374fe03e956eb5d0ac246c55b421bb4276908799a3f" exitCode=0 Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.399274 4558 generic.go:334] "Generic (PLEG): container finished" podID="16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5" containerID="287de52d7a1cbdfd30b38ebc5ce6836c6e7462c638fb7b5cb4514ff2fd994ca5" exitCode=0 Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.399283 4558 generic.go:334] "Generic (PLEG): container finished" podID="16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5" containerID="d34ab3fc8aadd821e92e2bf92349870b0e59665705b0c279da24491935dbfa40" exitCode=0 Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.399395 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5","Type":"ContainerDied","Data":"f85ec25adaf01df7a7853545852faa186e28a1806e5769092b47b3631e9f6a39"} Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.399436 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5","Type":"ContainerDied","Data":"a05f333c41b9d8ef7c02ce57e5a66f44cf679c3dd3e8cbc037a0abf4cb9c1076"} Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.399448 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5","Type":"ContainerDied","Data":"351e496453cf020b285bf45cc107bc71f86bd776035bd828428d72e1c7062529"} Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.399470 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5","Type":"ContainerDied","Data":"e9011c98afeb74bd61067467d261960e71d5f4ce02439018a751b87779a9a042"} Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.399479 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5","Type":"ContainerDied","Data":"0ffad205c7e0b9a3e7a3ed7b7494d0c934e33af468fd39ec2025c4bc685f9b2a"} Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.399488 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5","Type":"ContainerDied","Data":"e8487e2e852bfbcd349f6ab70c8f1c9163850c213b76cd2ed1af8305cfe8bbf6"} Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.399498 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5","Type":"ContainerDied","Data":"b448c6813dc21e1609badba9c414f16cfa90d4bc879f828fc03dc5e2d8c5c01d"} Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.399506 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5","Type":"ContainerDied","Data":"f4843ed0e8d1373bb2683da97ab873974d82c40ca96c66ac0e816208a3870873"} Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.399514 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5","Type":"ContainerDied","Data":"42f3e6080a5aaa7c6b331d0120550bd3df0ded5cc2d89acdcce42e051c81d082"} Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.399522 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5","Type":"ContainerDied","Data":"0264294b8782deaf18a7c21e66a3cd60ef8bc25c146077c5f8dd36370f4b9f31"} Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.399537 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5","Type":"ContainerDied","Data":"3042c88a8b0acc604598545abaf566b25ac21482df62b0a3a116f13f8566a2af"} Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.399547 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5","Type":"ContainerDied","Data":"a3d60956ff7af8b2370d1374fe03e956eb5d0ac246c55b421bb4276908799a3f"} Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.399557 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5","Type":"ContainerDied","Data":"287de52d7a1cbdfd30b38ebc5ce6836c6e7462c638fb7b5cb4514ff2fd994ca5"} Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.399565 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5","Type":"ContainerDied","Data":"d34ab3fc8aadd821e92e2bf92349870b0e59665705b0c279da24491935dbfa40"} Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.427287 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.433272 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/watcher-be54-account-create-update-jd6tl" event={"ID":"b66dc001-a49e-439f-ad5f-eb6ed96dc37f","Type":"ContainerStarted","Data":"3fc6aafe3d6f3ca575fb48988ed1a9a7bafff517fbd49c1f58cfa6b3908bbc28"} Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.459520 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovsdbserver-sb-0_8635d21f-4563-42ce-a3ab-4c67653d4dee/ovsdbserver-sb/0.log" Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.459584 4558 generic.go:334] "Generic (PLEG): container finished" podID="8635d21f-4563-42ce-a3ab-4c67653d4dee" containerID="4d5b12f2c407a5d33bf76ac52ac15d916d0ebbaa277f7ad8812327a1b03f357f" exitCode=2 Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.459599 4558 generic.go:334] "Generic (PLEG): container finished" podID="8635d21f-4563-42ce-a3ab-4c67653d4dee" containerID="86bcdd3af3dc36b18b179d5cfb21ad164a541e5d79260e3b531c6ec4c20e63e1" exitCode=143 Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.459671 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-sb-0" event={"ID":"8635d21f-4563-42ce-a3ab-4c67653d4dee","Type":"ContainerDied","Data":"4d5b12f2c407a5d33bf76ac52ac15d916d0ebbaa277f7ad8812327a1b03f357f"} Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.459698 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-sb-0" event={"ID":"8635d21f-4563-42ce-a3ab-4c67653d4dee","Type":"ContainerDied","Data":"86bcdd3af3dc36b18b179d5cfb21ad164a541e5d79260e3b531c6ec4c20e63e1"} Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.459735 4558 scope.go:117] "RemoveContainer" containerID="4d5b12f2c407a5d33bf76ac52ac15d916d0ebbaa277f7ad8812327a1b03f357f" Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.459877 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:16:07 crc kubenswrapper[4558]: E0120 17:16:07.461478 4558 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 20 17:16:07 crc kubenswrapper[4558]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[/bin/sh -c #!/bin/bash Jan 20 17:16:07 crc kubenswrapper[4558]: Jan 20 17:16:07 crc kubenswrapper[4558]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 20 17:16:07 crc kubenswrapper[4558]: Jan 20 17:16:07 crc kubenswrapper[4558]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 20 17:16:07 crc kubenswrapper[4558]: Jan 20 17:16:07 crc kubenswrapper[4558]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 20 17:16:07 crc kubenswrapper[4558]: Jan 20 17:16:07 crc kubenswrapper[4558]: if [ -n "watcher" ]; then Jan 20 17:16:07 crc kubenswrapper[4558]: GRANT_DATABASE="watcher" Jan 20 17:16:07 crc kubenswrapper[4558]: else Jan 20 17:16:07 crc kubenswrapper[4558]: GRANT_DATABASE="*" Jan 20 17:16:07 crc kubenswrapper[4558]: fi Jan 20 17:16:07 crc kubenswrapper[4558]: Jan 20 17:16:07 crc kubenswrapper[4558]: # going for maximum compatibility here: Jan 20 17:16:07 crc kubenswrapper[4558]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 20 17:16:07 crc kubenswrapper[4558]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 20 17:16:07 crc kubenswrapper[4558]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 20 17:16:07 crc kubenswrapper[4558]: # support updates Jan 20 17:16:07 crc kubenswrapper[4558]: Jan 20 17:16:07 crc kubenswrapper[4558]: $MYSQL_CMD < logger="UnhandledError" Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.463072 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/swift-proxy-7c48456b-8z4pm"] Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.463300 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-proxy-7c48456b-8z4pm" podUID="fe030148-30ef-4bb7-bf94-8835a0707df1" containerName="proxy-httpd" containerID="cri-o://e3f4b05578fbd81c020b9ad602df15fafd661cd2db06f53de10dce624927e791" gracePeriod=30 Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.463470 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-proxy-7c48456b-8z4pm" podUID="fe030148-30ef-4bb7-bf94-8835a0707df1" containerName="proxy-server" containerID="cri-o://690711c809e51e7b043bae2985f6866cfd82e22825c519971e1f62af4518288a" gracePeriod=30 Jan 20 17:16:07 crc kubenswrapper[4558]: E0120 17:16:07.464808 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"watcher-db-secret\\\" not found\"" pod="openstack-kuttl-tests/watcher-be54-account-create-update-jd6tl" podUID="b66dc001-a49e-439f-ad5f-eb6ed96dc37f" Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.482354 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage04-crc" (UniqueName: "kubernetes.io/local-volume/local-storage04-crc") on node "crc" Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.494816 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovsdbserver-nb-0_ef3c342e-679b-4a2e-94df-82adf4200290/ovsdbserver-nb/0.log" Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.494888 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-nb-0" event={"ID":"ef3c342e-679b-4a2e-94df-82adf4200290","Type":"ContainerDied","Data":"f06caed6fd4ba7ff3885c96d3a4bf10f618cb49633d16f2b3d487a7de7bcfb65"} Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.494978 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.514128 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8635d21f-4563-42ce-a3ab-4c67653d4dee-ovsdbserver-sb-tls-certs" (OuterVolumeSpecName: "ovsdbserver-sb-tls-certs") pod "8635d21f-4563-42ce-a3ab-4c67653d4dee" (UID: "8635d21f-4563-42ce-a3ab-4c67653d4dee"). InnerVolumeSpecName "ovsdbserver-sb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.514549 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-d62f-account-create-update-8xjrt" event={"ID":"23341c92-6342-4e5f-904c-17fdff2841c5","Type":"ContainerStarted","Data":"98c7600ecf6b84c78eb0310e54f222b82522b0ca6f461af596983ebc0749aa39"} Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.521265 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8635d21f-4563-42ce-a3ab-4c67653d4dee-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "8635d21f-4563-42ce-a3ab-4c67653d4dee" (UID: "8635d21f-4563-42ce-a3ab-4c67653d4dee"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.539708 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ef3c342e-679b-4a2e-94df-82adf4200290-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "ef3c342e-679b-4a2e-94df-82adf4200290" (UID: "ef3c342e-679b-4a2e-94df-82adf4200290"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.539835 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ef3c342e-679b-4a2e-94df-82adf4200290-ovsdbserver-nb-tls-certs" (OuterVolumeSpecName: "ovsdbserver-nb-tls-certs") pod "ef3c342e-679b-4a2e-94df-82adf4200290" (UID: "ef3c342e-679b-4a2e-94df-82adf4200290"). InnerVolumeSpecName "ovsdbserver-nb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.546941 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/ef3c342e-679b-4a2e-94df-82adf4200290-metrics-certs-tls-certs\") pod \"ef3c342e-679b-4a2e-94df-82adf4200290\" (UID: \"ef3c342e-679b-4a2e-94df-82adf4200290\") " Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.547647 4558 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/8635d21f-4563-42ce-a3ab-4c67653d4dee-ovsdbserver-sb-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.547744 4558 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/8635d21f-4563-42ce-a3ab-4c67653d4dee-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.547807 4558 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/ef3c342e-679b-4a2e-94df-82adf4200290-ovsdbserver-nb-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.547856 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:07 crc kubenswrapper[4558]: W0120 17:16:07.548242 4558 empty_dir.go:500] Warning: Unmount skipped because path does not exist: /var/lib/kubelet/pods/ef3c342e-679b-4a2e-94df-82adf4200290/volumes/kubernetes.io~secret/metrics-certs-tls-certs Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.548308 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ef3c342e-679b-4a2e-94df-82adf4200290-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "ef3c342e-679b-4a2e-94df-82adf4200290" (UID: "ef3c342e-679b-4a2e-94df-82adf4200290"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:16:07 crc kubenswrapper[4558]: E0120 17:16:07.549668 4558 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 20 17:16:07 crc kubenswrapper[4558]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[/bin/sh -c #!/bin/bash Jan 20 17:16:07 crc kubenswrapper[4558]: Jan 20 17:16:07 crc kubenswrapper[4558]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 20 17:16:07 crc kubenswrapper[4558]: Jan 20 17:16:07 crc kubenswrapper[4558]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 20 17:16:07 crc kubenswrapper[4558]: Jan 20 17:16:07 crc kubenswrapper[4558]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 20 17:16:07 crc kubenswrapper[4558]: Jan 20 17:16:07 crc kubenswrapper[4558]: if [ -n "neutron" ]; then Jan 20 17:16:07 crc kubenswrapper[4558]: GRANT_DATABASE="neutron" Jan 20 17:16:07 crc kubenswrapper[4558]: else Jan 20 17:16:07 crc kubenswrapper[4558]: GRANT_DATABASE="*" Jan 20 17:16:07 crc kubenswrapper[4558]: fi Jan 20 17:16:07 crc kubenswrapper[4558]: Jan 20 17:16:07 crc kubenswrapper[4558]: # going for maximum compatibility here: Jan 20 17:16:07 crc kubenswrapper[4558]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 20 17:16:07 crc kubenswrapper[4558]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 20 17:16:07 crc kubenswrapper[4558]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 20 17:16:07 crc kubenswrapper[4558]: # support updates Jan 20 17:16:07 crc kubenswrapper[4558]: Jan 20 17:16:07 crc kubenswrapper[4558]: $MYSQL_CMD < logger="UnhandledError" Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.550338 4558 generic.go:334] "Generic (PLEG): container finished" podID="5588d9e2-2f89-4abb-94de-76b5791582a5" containerID="f7e6285aee41adbd48413910cd7a5c3aa34306a43105d42dd032ba571f9b7d5c" exitCode=143 Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.550906 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"5588d9e2-2f89-4abb-94de-76b5791582a5","Type":"ContainerDied","Data":"f7e6285aee41adbd48413910cd7a5c3aa34306a43105d42dd032ba571f9b7d5c"} Jan 20 17:16:07 crc kubenswrapper[4558]: E0120 17:16:07.550970 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"neutron-db-secret\\\" not found\"" pod="openstack-kuttl-tests/neutron-d62f-account-create-update-8xjrt" podUID="23341c92-6342-4e5f-904c-17fdff2841c5" Jan 20 17:16:07 crc kubenswrapper[4558]: E0120 17:16:07.582848 4558 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 20 17:16:07 crc kubenswrapper[4558]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[/bin/sh -c #!/bin/bash Jan 20 17:16:07 crc kubenswrapper[4558]: Jan 20 17:16:07 crc kubenswrapper[4558]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 20 17:16:07 crc kubenswrapper[4558]: Jan 20 17:16:07 crc kubenswrapper[4558]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 20 17:16:07 crc kubenswrapper[4558]: Jan 20 17:16:07 crc kubenswrapper[4558]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 20 17:16:07 crc kubenswrapper[4558]: Jan 20 17:16:07 crc kubenswrapper[4558]: if [ -n "barbican" ]; then Jan 20 17:16:07 crc kubenswrapper[4558]: GRANT_DATABASE="barbican" Jan 20 17:16:07 crc kubenswrapper[4558]: else Jan 20 17:16:07 crc kubenswrapper[4558]: GRANT_DATABASE="*" Jan 20 17:16:07 crc kubenswrapper[4558]: fi Jan 20 17:16:07 crc kubenswrapper[4558]: Jan 20 17:16:07 crc kubenswrapper[4558]: # going for maximum compatibility here: Jan 20 17:16:07 crc kubenswrapper[4558]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 20 17:16:07 crc kubenswrapper[4558]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 20 17:16:07 crc kubenswrapper[4558]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 20 17:16:07 crc kubenswrapper[4558]: # support updates Jan 20 17:16:07 crc kubenswrapper[4558]: Jan 20 17:16:07 crc kubenswrapper[4558]: $MYSQL_CMD < logger="UnhandledError" Jan 20 17:16:07 crc kubenswrapper[4558]: E0120 17:16:07.587640 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"barbican-db-secret\\\" not found\"" pod="openstack-kuttl-tests/barbican-5773-account-create-update-qmwj7" podUID="61a672e2-963e-4428-bb97-e2177ac10c06" Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.622942 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/memcached-0"] Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.623231 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/memcached-0" podUID="fc9b3fed-9730-4199-b074-3f01cdc1b7ce" containerName="memcached" containerID="cri-o://e1569c77aa4164d84391079102afae404d6f206b7e90d3e21dd30d7b3d65062f" gracePeriod=30 Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.649684 4558 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/ef3c342e-679b-4a2e-94df-82adf4200290-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.678328 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-3f8b-account-create-update-8lrlw"] Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.701529 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/keystone-3f8b-account-create-update-8lrlw"] Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.720306 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/keystone-3f8b-account-create-update-pg4kh"] Jan 20 17:16:07 crc kubenswrapper[4558]: E0120 17:16:07.720717 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ef3c342e-679b-4a2e-94df-82adf4200290" containerName="openstack-network-exporter" Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.720737 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ef3c342e-679b-4a2e-94df-82adf4200290" containerName="openstack-network-exporter" Jan 20 17:16:07 crc kubenswrapper[4558]: E0120 17:16:07.720769 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8635d21f-4563-42ce-a3ab-4c67653d4dee" containerName="ovsdbserver-sb" Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.720775 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="8635d21f-4563-42ce-a3ab-4c67653d4dee" containerName="ovsdbserver-sb" Jan 20 17:16:07 crc kubenswrapper[4558]: E0120 17:16:07.720782 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ef3c342e-679b-4a2e-94df-82adf4200290" containerName="ovsdbserver-nb" Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.720789 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ef3c342e-679b-4a2e-94df-82adf4200290" containerName="ovsdbserver-nb" Jan 20 17:16:07 crc kubenswrapper[4558]: E0120 17:16:07.720812 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8635d21f-4563-42ce-a3ab-4c67653d4dee" containerName="openstack-network-exporter" Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.720818 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="8635d21f-4563-42ce-a3ab-4c67653d4dee" containerName="openstack-network-exporter" Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.721020 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="8635d21f-4563-42ce-a3ab-4c67653d4dee" containerName="ovsdbserver-sb" Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.721034 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="ef3c342e-679b-4a2e-94df-82adf4200290" containerName="openstack-network-exporter" Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.721042 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="ef3c342e-679b-4a2e-94df-82adf4200290" containerName="ovsdbserver-nb" Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.721051 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="8635d21f-4563-42ce-a3ab-4c67653d4dee" containerName="openstack-network-exporter" Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.721712 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-3f8b-account-create-update-pg4kh" Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.728472 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-db-secret" Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.731802 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-db-sync-5v4np"] Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.739036 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/keystone-db-sync-5v4np"] Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.744734 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-3f8b-account-create-update-pg4kh"] Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.751191 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-bootstrap-wcsxk"] Jan 20 17:16:07 crc kubenswrapper[4558]: E0120 17:16:07.751903 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/openstack-scripts: configmap "openstack-scripts" not found Jan 20 17:16:07 crc kubenswrapper[4558]: E0120 17:16:07.751951 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/61a672e2-963e-4428-bb97-e2177ac10c06-operator-scripts podName:61a672e2-963e-4428-bb97-e2177ac10c06 nodeName:}" failed. No retries permitted until 2026-01-20 17:16:08.251937915 +0000 UTC m=+2062.012275883 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/61a672e2-963e-4428-bb97-e2177ac10c06-operator-scripts") pod "barbican-5773-account-create-update-qmwj7" (UID: "61a672e2-963e-4428-bb97-e2177ac10c06") : configmap "openstack-scripts" not found Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.757626 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/keystone-bootstrap-wcsxk"] Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.759081 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-57f6f664bd-7h7jh"] Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.759283 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/keystone-57f6f664bd-7h7jh" podUID="5efe2c5d-bf53-462a-beb1-36cb940fc6a0" containerName="keystone-api" containerID="cri-o://9ebec879cfd3dcec199dea1e7738ccfa45af9b5be157ef55c3e4756d96cca6ba" gracePeriod=30 Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.765404 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/openstack-galera-0"] Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.789317 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-db-create-zcw7r"] Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.796179 4558 scope.go:117] "RemoveContainer" containerID="86bcdd3af3dc36b18b179d5cfb21ad164a541e5d79260e3b531c6ec4c20e63e1" Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.800211 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/keystone-db-create-zcw7r"] Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.805549 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-3f8b-account-create-update-pg4kh"] Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.814258 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-psddp"] Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.841381 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-sb-0"] Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.855723 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/31afa559-d026-4227-b6dc-44aa2796a781-operator-scripts\") pod \"keystone-3f8b-account-create-update-pg4kh\" (UID: \"31afa559-d026-4227-b6dc-44aa2796a781\") " pod="openstack-kuttl-tests/keystone-3f8b-account-create-update-pg4kh" Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.855790 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g22xf\" (UniqueName: \"kubernetes.io/projected/31afa559-d026-4227-b6dc-44aa2796a781-kube-api-access-g22xf\") pod \"keystone-3f8b-account-create-update-pg4kh\" (UID: \"31afa559-d026-4227-b6dc-44aa2796a781\") " pod="openstack-kuttl-tests/keystone-3f8b-account-create-update-pg4kh" Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.862607 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-sb-0"] Jan 20 17:16:07 crc kubenswrapper[4558]: E0120 17:16:07.864242 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[kube-api-access-g22xf operator-scripts], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack-kuttl-tests/keystone-3f8b-account-create-update-pg4kh" podUID="31afa559-d026-4227-b6dc-44aa2796a781" Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.875210 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-nb-0"] Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.877315 4558 scope.go:117] "RemoveContainer" containerID="4d5b12f2c407a5d33bf76ac52ac15d916d0ebbaa277f7ad8812327a1b03f357f" Jan 20 17:16:07 crc kubenswrapper[4558]: E0120 17:16:07.878758 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4d5b12f2c407a5d33bf76ac52ac15d916d0ebbaa277f7ad8812327a1b03f357f\": container with ID starting with 4d5b12f2c407a5d33bf76ac52ac15d916d0ebbaa277f7ad8812327a1b03f357f not found: ID does not exist" containerID="4d5b12f2c407a5d33bf76ac52ac15d916d0ebbaa277f7ad8812327a1b03f357f" Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.878785 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4d5b12f2c407a5d33bf76ac52ac15d916d0ebbaa277f7ad8812327a1b03f357f"} err="failed to get container status \"4d5b12f2c407a5d33bf76ac52ac15d916d0ebbaa277f7ad8812327a1b03f357f\": rpc error: code = NotFound desc = could not find container \"4d5b12f2c407a5d33bf76ac52ac15d916d0ebbaa277f7ad8812327a1b03f357f\": container with ID starting with 4d5b12f2c407a5d33bf76ac52ac15d916d0ebbaa277f7ad8812327a1b03f357f not found: ID does not exist" Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.878804 4558 scope.go:117] "RemoveContainer" containerID="86bcdd3af3dc36b18b179d5cfb21ad164a541e5d79260e3b531c6ec4c20e63e1" Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.880047 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-nb-0"] Jan 20 17:16:07 crc kubenswrapper[4558]: E0120 17:16:07.885740 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"86bcdd3af3dc36b18b179d5cfb21ad164a541e5d79260e3b531c6ec4c20e63e1\": container with ID starting with 86bcdd3af3dc36b18b179d5cfb21ad164a541e5d79260e3b531c6ec4c20e63e1 not found: ID does not exist" containerID="86bcdd3af3dc36b18b179d5cfb21ad164a541e5d79260e3b531c6ec4c20e63e1" Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.885771 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"86bcdd3af3dc36b18b179d5cfb21ad164a541e5d79260e3b531c6ec4c20e63e1"} err="failed to get container status \"86bcdd3af3dc36b18b179d5cfb21ad164a541e5d79260e3b531c6ec4c20e63e1\": rpc error: code = NotFound desc = could not find container \"86bcdd3af3dc36b18b179d5cfb21ad164a541e5d79260e3b531c6ec4c20e63e1\": container with ID starting with 86bcdd3af3dc36b18b179d5cfb21ad164a541e5d79260e3b531c6ec4c20e63e1 not found: ID does not exist" Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.885789 4558 scope.go:117] "RemoveContainer" containerID="8e3ee6cc7b998c32fac77b9ad7200546c9a3386eaad38097f2d7e10239440726" Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.943415 4558 scope.go:117] "RemoveContainer" containerID="33152c6067fbcddb0b361284c671bb49a76a4cb01da62629d12aee5d3c00f3dc" Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.959716 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/31afa559-d026-4227-b6dc-44aa2796a781-operator-scripts\") pod \"keystone-3f8b-account-create-update-pg4kh\" (UID: \"31afa559-d026-4227-b6dc-44aa2796a781\") " pod="openstack-kuttl-tests/keystone-3f8b-account-create-update-pg4kh" Jan 20 17:16:07 crc kubenswrapper[4558]: I0120 17:16:07.959805 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g22xf\" (UniqueName: \"kubernetes.io/projected/31afa559-d026-4227-b6dc-44aa2796a781-kube-api-access-g22xf\") pod \"keystone-3f8b-account-create-update-pg4kh\" (UID: \"31afa559-d026-4227-b6dc-44aa2796a781\") " pod="openstack-kuttl-tests/keystone-3f8b-account-create-update-pg4kh" Jan 20 17:16:07 crc kubenswrapper[4558]: E0120 17:16:07.960271 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/openstack-scripts: configmap "openstack-scripts" not found Jan 20 17:16:07 crc kubenswrapper[4558]: E0120 17:16:07.960341 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/31afa559-d026-4227-b6dc-44aa2796a781-operator-scripts podName:31afa559-d026-4227-b6dc-44aa2796a781 nodeName:}" failed. No retries permitted until 2026-01-20 17:16:08.460323974 +0000 UTC m=+2062.220661942 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/31afa559-d026-4227-b6dc-44aa2796a781-operator-scripts") pod "keystone-3f8b-account-create-update-pg4kh" (UID: "31afa559-d026-4227-b6dc-44aa2796a781") : configmap "openstack-scripts" not found Jan 20 17:16:07 crc kubenswrapper[4558]: E0120 17:16:07.964841 4558 projected.go:194] Error preparing data for projected volume kube-api-access-g22xf for pod openstack-kuttl-tests/keystone-3f8b-account-create-update-pg4kh: failed to fetch token: serviceaccounts "galera-openstack" not found Jan 20 17:16:07 crc kubenswrapper[4558]: E0120 17:16:07.965229 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/31afa559-d026-4227-b6dc-44aa2796a781-kube-api-access-g22xf podName:31afa559-d026-4227-b6dc-44aa2796a781 nodeName:}" failed. No retries permitted until 2026-01-20 17:16:08.465106347 +0000 UTC m=+2062.225444315 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-g22xf" (UniqueName: "kubernetes.io/projected/31afa559-d026-4227-b6dc-44aa2796a781-kube-api-access-g22xf") pod "keystone-3f8b-account-create-update-pg4kh" (UID: "31afa559-d026-4227-b6dc-44aa2796a781") : failed to fetch token: serviceaccounts "galera-openstack" not found Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.029406 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/alertmanager-metric-storage-0" Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.164394 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/openstack-galera-0" podUID="eed05f95-b1a3-44b9-bb2c-e7b01d91d8cc" containerName="galera" containerID="cri-o://4c99017bce2cd4c2557c9e2d276409ac4a6b6a986805b29e970cc3af2a59f1df" gracePeriod=30 Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.165684 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/13a0a262-6f56-4cf9-9b0d-85110bcff1e7-config-volume\") pod \"13a0a262-6f56-4cf9-9b0d-85110bcff1e7\" (UID: \"13a0a262-6f56-4cf9-9b0d-85110bcff1e7\") " Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.165801 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/13a0a262-6f56-4cf9-9b0d-85110bcff1e7-tls-assets\") pod \"13a0a262-6f56-4cf9-9b0d-85110bcff1e7\" (UID: \"13a0a262-6f56-4cf9-9b0d-85110bcff1e7\") " Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.165936 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p7njw\" (UniqueName: \"kubernetes.io/projected/13a0a262-6f56-4cf9-9b0d-85110bcff1e7-kube-api-access-p7njw\") pod \"13a0a262-6f56-4cf9-9b0d-85110bcff1e7\" (UID: \"13a0a262-6f56-4cf9-9b0d-85110bcff1e7\") " Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.166002 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"alertmanager-metric-storage-db\" (UniqueName: \"kubernetes.io/empty-dir/13a0a262-6f56-4cf9-9b0d-85110bcff1e7-alertmanager-metric-storage-db\") pod \"13a0a262-6f56-4cf9-9b0d-85110bcff1e7\" (UID: \"13a0a262-6f56-4cf9-9b0d-85110bcff1e7\") " Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.166069 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/13a0a262-6f56-4cf9-9b0d-85110bcff1e7-config-out\") pod \"13a0a262-6f56-4cf9-9b0d-85110bcff1e7\" (UID: \"13a0a262-6f56-4cf9-9b0d-85110bcff1e7\") " Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.166149 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cluster-tls-config\" (UniqueName: \"kubernetes.io/secret/13a0a262-6f56-4cf9-9b0d-85110bcff1e7-cluster-tls-config\") pod \"13a0a262-6f56-4cf9-9b0d-85110bcff1e7\" (UID: \"13a0a262-6f56-4cf9-9b0d-85110bcff1e7\") " Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.166231 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/13a0a262-6f56-4cf9-9b0d-85110bcff1e7-web-config\") pod \"13a0a262-6f56-4cf9-9b0d-85110bcff1e7\" (UID: \"13a0a262-6f56-4cf9-9b0d-85110bcff1e7\") " Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.168114 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/13a0a262-6f56-4cf9-9b0d-85110bcff1e7-alertmanager-metric-storage-db" (OuterVolumeSpecName: "alertmanager-metric-storage-db") pod "13a0a262-6f56-4cf9-9b0d-85110bcff1e7" (UID: "13a0a262-6f56-4cf9-9b0d-85110bcff1e7"). InnerVolumeSpecName "alertmanager-metric-storage-db". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.175555 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/13a0a262-6f56-4cf9-9b0d-85110bcff1e7-tls-assets" (OuterVolumeSpecName: "tls-assets") pod "13a0a262-6f56-4cf9-9b0d-85110bcff1e7" (UID: "13a0a262-6f56-4cf9-9b0d-85110bcff1e7"). InnerVolumeSpecName "tls-assets". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.175692 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/13a0a262-6f56-4cf9-9b0d-85110bcff1e7-config-out" (OuterVolumeSpecName: "config-out") pod "13a0a262-6f56-4cf9-9b0d-85110bcff1e7" (UID: "13a0a262-6f56-4cf9-9b0d-85110bcff1e7"). InnerVolumeSpecName "config-out". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.183266 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/13a0a262-6f56-4cf9-9b0d-85110bcff1e7-kube-api-access-p7njw" (OuterVolumeSpecName: "kube-api-access-p7njw") pod "13a0a262-6f56-4cf9-9b0d-85110bcff1e7" (UID: "13a0a262-6f56-4cf9-9b0d-85110bcff1e7"). InnerVolumeSpecName "kube-api-access-p7njw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.188862 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/13a0a262-6f56-4cf9-9b0d-85110bcff1e7-config-volume" (OuterVolumeSpecName: "config-volume") pod "13a0a262-6f56-4cf9-9b0d-85110bcff1e7" (UID: "13a0a262-6f56-4cf9-9b0d-85110bcff1e7"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.238610 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/13a0a262-6f56-4cf9-9b0d-85110bcff1e7-cluster-tls-config" (OuterVolumeSpecName: "cluster-tls-config") pod "13a0a262-6f56-4cf9-9b0d-85110bcff1e7" (UID: "13a0a262-6f56-4cf9-9b0d-85110bcff1e7"). InnerVolumeSpecName "cluster-tls-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:16:08 crc kubenswrapper[4558]: E0120 17:16:08.269520 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/combined-ca-bundle: secret "combined-ca-bundle" not found Jan 20 17:16:08 crc kubenswrapper[4558]: E0120 17:16:08.269694 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a-combined-ca-bundle podName:f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a nodeName:}" failed. No retries permitted until 2026-01-20 17:16:12.269676044 +0000 UTC m=+2066.030014011 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a-combined-ca-bundle") pod "cinder-api-0" (UID: "f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a") : secret "combined-ca-bundle" not found Jan 20 17:16:08 crc kubenswrapper[4558]: E0120 17:16:08.270597 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-cinder-public-svc: secret "cert-cinder-public-svc" not found Jan 20 17:16:08 crc kubenswrapper[4558]: E0120 17:16:08.270860 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a-public-tls-certs podName:f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a nodeName:}" failed. No retries permitted until 2026-01-20 17:16:12.270850343 +0000 UTC m=+2066.031188310 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "public-tls-certs" (UniqueName: "kubernetes.io/secret/f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a-public-tls-certs") pod "cinder-api-0" (UID: "f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a") : secret "cert-cinder-public-svc" not found Jan 20 17:16:08 crc kubenswrapper[4558]: E0120 17:16:08.270965 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Jan 20 17:16:08 crc kubenswrapper[4558]: E0120 17:16:08.271026 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/fd2edfa4-790f-49d7-9e32-29571e490aaf-config-data podName:fd2edfa4-790f-49d7-9e32-29571e490aaf nodeName:}" failed. No retries permitted until 2026-01-20 17:16:12.27101306 +0000 UTC m=+2066.031351026 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/fd2edfa4-790f-49d7-9e32-29571e490aaf-config-data") pod "rabbitmq-server-0" (UID: "fd2edfa4-790f-49d7-9e32-29571e490aaf") : configmap "rabbitmq-config-data" not found Jan 20 17:16:08 crc kubenswrapper[4558]: E0120 17:16:08.271184 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-cinder-internal-svc: secret "cert-cinder-internal-svc" not found Jan 20 17:16:08 crc kubenswrapper[4558]: E0120 17:16:08.271216 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a-internal-tls-certs podName:f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a nodeName:}" failed. No retries permitted until 2026-01-20 17:16:12.271205872 +0000 UTC m=+2066.031543839 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "internal-tls-certs" (UniqueName: "kubernetes.io/secret/f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a-internal-tls-certs") pod "cinder-api-0" (UID: "f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a") : secret "cert-cinder-internal-svc" not found Jan 20 17:16:08 crc kubenswrapper[4558]: E0120 17:16:08.271244 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/openstack-scripts: configmap "openstack-scripts" not found Jan 20 17:16:08 crc kubenswrapper[4558]: E0120 17:16:08.271262 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/61a672e2-963e-4428-bb97-e2177ac10c06-operator-scripts podName:61a672e2-963e-4428-bb97-e2177ac10c06 nodeName:}" failed. No retries permitted until 2026-01-20 17:16:09.271256987 +0000 UTC m=+2063.031594955 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/61a672e2-963e-4428-bb97-e2177ac10c06-operator-scripts") pod "barbican-5773-account-create-update-qmwj7" (UID: "61a672e2-963e-4428-bb97-e2177ac10c06") : configmap "openstack-scripts" not found Jan 20 17:16:08 crc kubenswrapper[4558]: E0120 17:16:08.271399 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cinder-config-data: secret "cinder-config-data" not found Jan 20 17:16:08 crc kubenswrapper[4558]: E0120 17:16:08.271424 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a-config-data podName:f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a nodeName:}" failed. No retries permitted until 2026-01-20 17:16:12.271416558 +0000 UTC m=+2066.031754525 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/secret/f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a-config-data") pod "cinder-api-0" (UID: "f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a") : secret "cinder-config-data" not found Jan 20 17:16:08 crc kubenswrapper[4558]: E0120 17:16:08.271494 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cinder-scripts: secret "cinder-scripts" not found Jan 20 17:16:08 crc kubenswrapper[4558]: E0120 17:16:08.271516 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a-scripts podName:f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a nodeName:}" failed. No retries permitted until 2026-01-20 17:16:12.271508511 +0000 UTC m=+2066.031846477 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "scripts" (UniqueName: "kubernetes.io/secret/f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a-scripts") pod "cinder-api-0" (UID: "f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a") : secret "cinder-scripts" not found Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.270978 4558 reconciler_common.go:293] "Volume detached for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/13a0a262-6f56-4cf9-9b0d-85110bcff1e7-config-out\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.271555 4558 reconciler_common.go:293] "Volume detached for volume \"cluster-tls-config\" (UniqueName: \"kubernetes.io/secret/13a0a262-6f56-4cf9-9b0d-85110bcff1e7-cluster-tls-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.271620 4558 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/13a0a262-6f56-4cf9-9b0d-85110bcff1e7-config-volume\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.271631 4558 reconciler_common.go:293] "Volume detached for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/13a0a262-6f56-4cf9-9b0d-85110bcff1e7-tls-assets\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.271641 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p7njw\" (UniqueName: \"kubernetes.io/projected/13a0a262-6f56-4cf9-9b0d-85110bcff1e7-kube-api-access-p7njw\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.271650 4558 reconciler_common.go:293] "Volume detached for volume \"alertmanager-metric-storage-db\" (UniqueName: \"kubernetes.io/empty-dir/13a0a262-6f56-4cf9-9b0d-85110bcff1e7-alertmanager-metric-storage-db\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.279461 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/13a0a262-6f56-4cf9-9b0d-85110bcff1e7-web-config" (OuterVolumeSpecName: "web-config") pod "13a0a262-6f56-4cf9-9b0d-85110bcff1e7" (UID: "13a0a262-6f56-4cf9-9b0d-85110bcff1e7"). InnerVolumeSpecName "web-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.326265 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.337140 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.353408 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/prometheus-metric-storage-0" Jan 20 17:16:08 crc kubenswrapper[4558]: E0120 17:16:08.356909 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="022773dc93b3f1b894358c04d0057cb8d8780ca1ef654626b973b40167c3532e" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.375114 4558 reconciler_common.go:293] "Volume detached for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/13a0a262-6f56-4cf9-9b0d-85110bcff1e7-web-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:08 crc kubenswrapper[4558]: E0120 17:16:08.392974 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="022773dc93b3f1b894358c04d0057cb8d8780ca1ef654626b973b40167c3532e" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 20 17:16:08 crc kubenswrapper[4558]: E0120 17:16:08.395306 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="022773dc93b3f1b894358c04d0057cb8d8780ca1ef654626b973b40167c3532e" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 20 17:16:08 crc kubenswrapper[4558]: E0120 17:16:08.395351 4558 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack-kuttl-tests/nova-scheduler-0" podUID="c4cb69c1-3d51-4b13-a025-b37c986ffede" containerName="nova-scheduler-scheduler" Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.476898 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e87a4844-8a90-4965-878f-d95aa09c47bb-secret-combined-ca-bundle\") pod \"e87a4844-8a90-4965-878f-d95aa09c47bb\" (UID: \"e87a4844-8a90-4965-878f-d95aa09c47bb\") " Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.476948 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/ad371747-3ff0-49b0-b2a6-33d7bb7cd16b-vencrypt-tls-certs\") pod \"ad371747-3ff0-49b0-b2a6-33d7bb7cd16b\" (UID: \"ad371747-3ff0-49b0-b2a6-33d7bb7cd16b\") " Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.477018 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v2gjr\" (UniqueName: \"kubernetes.io/projected/e87a4844-8a90-4965-878f-d95aa09c47bb-kube-api-access-v2gjr\") pod \"e87a4844-8a90-4965-878f-d95aa09c47bb\" (UID: \"e87a4844-8a90-4965-878f-d95aa09c47bb\") " Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.477060 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/e87a4844-8a90-4965-878f-d95aa09c47bb-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"e87a4844-8a90-4965-878f-d95aa09c47bb\" (UID: \"e87a4844-8a90-4965-878f-d95aa09c47bb\") " Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.477111 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/e87a4844-8a90-4965-878f-d95aa09c47bb-config-out\") pod \"e87a4844-8a90-4965-878f-d95aa09c47bb\" (UID: \"e87a4844-8a90-4965-878f-d95aa09c47bb\") " Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.477132 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tzm5k\" (UniqueName: \"kubernetes.io/projected/6947f138-6d2f-40e2-a236-736d26d3a1e6-kube-api-access-tzm5k\") pod \"6947f138-6d2f-40e2-a236-736d26d3a1e6\" (UID: \"6947f138-6d2f-40e2-a236-736d26d3a1e6\") " Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.477184 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/e87a4844-8a90-4965-878f-d95aa09c47bb-config\") pod \"e87a4844-8a90-4965-878f-d95aa09c47bb\" (UID: \"e87a4844-8a90-4965-878f-d95aa09c47bb\") " Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.477206 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ad371747-3ff0-49b0-b2a6-33d7bb7cd16b-config-data\") pod \"ad371747-3ff0-49b0-b2a6-33d7bb7cd16b\" (UID: \"ad371747-3ff0-49b0-b2a6-33d7bb7cd16b\") " Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.477241 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/e87a4844-8a90-4965-878f-d95aa09c47bb-web-config\") pod \"e87a4844-8a90-4965-878f-d95aa09c47bb\" (UID: \"e87a4844-8a90-4965-878f-d95aa09c47bb\") " Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.477279 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d89mw\" (UniqueName: \"kubernetes.io/projected/ad371747-3ff0-49b0-b2a6-33d7bb7cd16b-kube-api-access-d89mw\") pod \"ad371747-3ff0-49b0-b2a6-33d7bb7cd16b\" (UID: \"ad371747-3ff0-49b0-b2a6-33d7bb7cd16b\") " Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.477300 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/e87a4844-8a90-4965-878f-d95aa09c47bb-prometheus-metric-storage-rulefiles-0\") pod \"e87a4844-8a90-4965-878f-d95aa09c47bb\" (UID: \"e87a4844-8a90-4965-878f-d95aa09c47bb\") " Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.477352 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/6947f138-6d2f-40e2-a236-736d26d3a1e6-kube-state-metrics-tls-certs\") pod \"6947f138-6d2f-40e2-a236-736d26d3a1e6\" (UID: \"6947f138-6d2f-40e2-a236-736d26d3a1e6\") " Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.477377 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/e87a4844-8a90-4965-878f-d95aa09c47bb-thanos-prometheus-http-client-file\") pod \"e87a4844-8a90-4965-878f-d95aa09c47bb\" (UID: \"e87a4844-8a90-4965-878f-d95aa09c47bb\") " Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.477427 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"prometheus-metric-storage-rulefiles-1\" (UniqueName: \"kubernetes.io/configmap/e87a4844-8a90-4965-878f-d95aa09c47bb-prometheus-metric-storage-rulefiles-1\") pod \"e87a4844-8a90-4965-878f-d95aa09c47bb\" (UID: \"e87a4844-8a90-4965-878f-d95aa09c47bb\") " Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.477449 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/6947f138-6d2f-40e2-a236-736d26d3a1e6-kube-state-metrics-tls-config\") pod \"6947f138-6d2f-40e2-a236-736d26d3a1e6\" (UID: \"6947f138-6d2f-40e2-a236-736d26d3a1e6\") " Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.477488 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/e87a4844-8a90-4965-878f-d95aa09c47bb-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"e87a4844-8a90-4965-878f-d95aa09c47bb\" (UID: \"e87a4844-8a90-4965-878f-d95aa09c47bb\") " Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.477524 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/e87a4844-8a90-4965-878f-d95aa09c47bb-tls-assets\") pod \"e87a4844-8a90-4965-878f-d95aa09c47bb\" (UID: \"e87a4844-8a90-4965-878f-d95aa09c47bb\") " Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.477551 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6947f138-6d2f-40e2-a236-736d26d3a1e6-combined-ca-bundle\") pod \"6947f138-6d2f-40e2-a236-736d26d3a1e6\" (UID: \"6947f138-6d2f-40e2-a236-736d26d3a1e6\") " Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.477585 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"prometheus-metric-storage-db\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"e87a4844-8a90-4965-878f-d95aa09c47bb\" (UID: \"e87a4844-8a90-4965-878f-d95aa09c47bb\") " Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.477624 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"prometheus-metric-storage-rulefiles-2\" (UniqueName: \"kubernetes.io/configmap/e87a4844-8a90-4965-878f-d95aa09c47bb-prometheus-metric-storage-rulefiles-2\") pod \"e87a4844-8a90-4965-878f-d95aa09c47bb\" (UID: \"e87a4844-8a90-4965-878f-d95aa09c47bb\") " Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.477675 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/ad371747-3ff0-49b0-b2a6-33d7bb7cd16b-nova-novncproxy-tls-certs\") pod \"ad371747-3ff0-49b0-b2a6-33d7bb7cd16b\" (UID: \"ad371747-3ff0-49b0-b2a6-33d7bb7cd16b\") " Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.477694 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ad371747-3ff0-49b0-b2a6-33d7bb7cd16b-combined-ca-bundle\") pod \"ad371747-3ff0-49b0-b2a6-33d7bb7cd16b\" (UID: \"ad371747-3ff0-49b0-b2a6-33d7bb7cd16b\") " Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.478066 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/31afa559-d026-4227-b6dc-44aa2796a781-operator-scripts\") pod \"keystone-3f8b-account-create-update-pg4kh\" (UID: \"31afa559-d026-4227-b6dc-44aa2796a781\") " pod="openstack-kuttl-tests/keystone-3f8b-account-create-update-pg4kh" Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.478132 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g22xf\" (UniqueName: \"kubernetes.io/projected/31afa559-d026-4227-b6dc-44aa2796a781-kube-api-access-g22xf\") pod \"keystone-3f8b-account-create-update-pg4kh\" (UID: \"31afa559-d026-4227-b6dc-44aa2796a781\") " pod="openstack-kuttl-tests/keystone-3f8b-account-create-update-pg4kh" Jan 20 17:16:08 crc kubenswrapper[4558]: E0120 17:16:08.481850 4558 projected.go:194] Error preparing data for projected volume kube-api-access-g22xf for pod openstack-kuttl-tests/keystone-3f8b-account-create-update-pg4kh: failed to fetch token: serviceaccounts "galera-openstack" not found Jan 20 17:16:08 crc kubenswrapper[4558]: E0120 17:16:08.481962 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/31afa559-d026-4227-b6dc-44aa2796a781-kube-api-access-g22xf podName:31afa559-d026-4227-b6dc-44aa2796a781 nodeName:}" failed. No retries permitted until 2026-01-20 17:16:09.481946848 +0000 UTC m=+2063.242284816 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-g22xf" (UniqueName: "kubernetes.io/projected/31afa559-d026-4227-b6dc-44aa2796a781-kube-api-access-g22xf") pod "keystone-3f8b-account-create-update-pg4kh" (UID: "31afa559-d026-4227-b6dc-44aa2796a781") : failed to fetch token: serviceaccounts "galera-openstack" not found Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.492484 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e87a4844-8a90-4965-878f-d95aa09c47bb-prometheus-metric-storage-rulefiles-0" (OuterVolumeSpecName: "prometheus-metric-storage-rulefiles-0") pod "e87a4844-8a90-4965-878f-d95aa09c47bb" (UID: "e87a4844-8a90-4965-878f-d95aa09c47bb"). InnerVolumeSpecName "prometheus-metric-storage-rulefiles-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.494430 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6947f138-6d2f-40e2-a236-736d26d3a1e6-kube-api-access-tzm5k" (OuterVolumeSpecName: "kube-api-access-tzm5k") pod "6947f138-6d2f-40e2-a236-736d26d3a1e6" (UID: "6947f138-6d2f-40e2-a236-736d26d3a1e6"). InnerVolumeSpecName "kube-api-access-tzm5k". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:16:08 crc kubenswrapper[4558]: E0120 17:16:08.494546 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/openstack-scripts: configmap "openstack-scripts" not found Jan 20 17:16:08 crc kubenswrapper[4558]: E0120 17:16:08.494587 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/31afa559-d026-4227-b6dc-44aa2796a781-operator-scripts podName:31afa559-d026-4227-b6dc-44aa2796a781 nodeName:}" failed. No retries permitted until 2026-01-20 17:16:09.494574635 +0000 UTC m=+2063.254912602 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/31afa559-d026-4227-b6dc-44aa2796a781-operator-scripts") pod "keystone-3f8b-account-create-update-pg4kh" (UID: "31afa559-d026-4227-b6dc-44aa2796a781") : configmap "openstack-scripts" not found Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.494899 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e87a4844-8a90-4965-878f-d95aa09c47bb-prometheus-metric-storage-rulefiles-1" (OuterVolumeSpecName: "prometheus-metric-storage-rulefiles-1") pod "e87a4844-8a90-4965-878f-d95aa09c47bb" (UID: "e87a4844-8a90-4965-878f-d95aa09c47bb"). InnerVolumeSpecName "prometheus-metric-storage-rulefiles-1". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.495982 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e87a4844-8a90-4965-878f-d95aa09c47bb-prometheus-metric-storage-rulefiles-2" (OuterVolumeSpecName: "prometheus-metric-storage-rulefiles-2") pod "e87a4844-8a90-4965-878f-d95aa09c47bb" (UID: "e87a4844-8a90-4965-878f-d95aa09c47bb"). InnerVolumeSpecName "prometheus-metric-storage-rulefiles-2". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.496017 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e87a4844-8a90-4965-878f-d95aa09c47bb-config" (OuterVolumeSpecName: "config") pod "e87a4844-8a90-4965-878f-d95aa09c47bb" (UID: "e87a4844-8a90-4965-878f-d95aa09c47bb"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.496122 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e87a4844-8a90-4965-878f-d95aa09c47bb-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d" (OuterVolumeSpecName: "web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d") pod "e87a4844-8a90-4965-878f-d95aa09c47bb" (UID: "e87a4844-8a90-4965-878f-d95aa09c47bb"). InnerVolumeSpecName "web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.496375 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e87a4844-8a90-4965-878f-d95aa09c47bb-secret-combined-ca-bundle" (OuterVolumeSpecName: "secret-combined-ca-bundle") pod "e87a4844-8a90-4965-878f-d95aa09c47bb" (UID: "e87a4844-8a90-4965-878f-d95aa09c47bb"). InnerVolumeSpecName "secret-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.496488 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ad371747-3ff0-49b0-b2a6-33d7bb7cd16b-kube-api-access-d89mw" (OuterVolumeSpecName: "kube-api-access-d89mw") pod "ad371747-3ff0-49b0-b2a6-33d7bb7cd16b" (UID: "ad371747-3ff0-49b0-b2a6-33d7bb7cd16b"). InnerVolumeSpecName "kube-api-access-d89mw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.501844 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e87a4844-8a90-4965-878f-d95aa09c47bb-config-out" (OuterVolumeSpecName: "config-out") pod "e87a4844-8a90-4965-878f-d95aa09c47bb" (UID: "e87a4844-8a90-4965-878f-d95aa09c47bb"). InnerVolumeSpecName "config-out". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.514911 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e87a4844-8a90-4965-878f-d95aa09c47bb-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d" (OuterVolumeSpecName: "web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d") pod "e87a4844-8a90-4965-878f-d95aa09c47bb" (UID: "e87a4844-8a90-4965-878f-d95aa09c47bb"). InnerVolumeSpecName "web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.515338 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e87a4844-8a90-4965-878f-d95aa09c47bb-tls-assets" (OuterVolumeSpecName: "tls-assets") pod "e87a4844-8a90-4965-878f-d95aa09c47bb" (UID: "e87a4844-8a90-4965-878f-d95aa09c47bb"). InnerVolumeSpecName "tls-assets". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.518287 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e87a4844-8a90-4965-878f-d95aa09c47bb-thanos-prometheus-http-client-file" (OuterVolumeSpecName: "thanos-prometheus-http-client-file") pod "e87a4844-8a90-4965-878f-d95aa09c47bb" (UID: "e87a4844-8a90-4965-878f-d95aa09c47bb"). InnerVolumeSpecName "thanos-prometheus-http-client-file". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.519628 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e87a4844-8a90-4965-878f-d95aa09c47bb-kube-api-access-v2gjr" (OuterVolumeSpecName: "kube-api-access-v2gjr") pod "e87a4844-8a90-4965-878f-d95aa09c47bb" (UID: "e87a4844-8a90-4965-878f-d95aa09c47bb"). InnerVolumeSpecName "kube-api-access-v2gjr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.555779 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage12-crc" (OuterVolumeSpecName: "prometheus-metric-storage-db") pod "e87a4844-8a90-4965-878f-d95aa09c47bb" (UID: "e87a4844-8a90-4965-878f-d95aa09c47bb"). InnerVolumeSpecName "local-storage12-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.587888 4558 reconciler_common.go:293] "Volume detached for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e87a4844-8a90-4965-878f-d95aa09c47bb-secret-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.587929 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v2gjr\" (UniqueName: \"kubernetes.io/projected/e87a4844-8a90-4965-878f-d95aa09c47bb-kube-api-access-v2gjr\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.587953 4558 reconciler_common.go:293] "Volume detached for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/e87a4844-8a90-4965-878f-d95aa09c47bb-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.587969 4558 reconciler_common.go:293] "Volume detached for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/e87a4844-8a90-4965-878f-d95aa09c47bb-config-out\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.587984 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tzm5k\" (UniqueName: \"kubernetes.io/projected/6947f138-6d2f-40e2-a236-736d26d3a1e6-kube-api-access-tzm5k\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.587996 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/e87a4844-8a90-4965-878f-d95aa09c47bb-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.588015 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d89mw\" (UniqueName: \"kubernetes.io/projected/ad371747-3ff0-49b0-b2a6-33d7bb7cd16b-kube-api-access-d89mw\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.588027 4558 reconciler_common.go:293] "Volume detached for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/e87a4844-8a90-4965-878f-d95aa09c47bb-prometheus-metric-storage-rulefiles-0\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.588040 4558 reconciler_common.go:293] "Volume detached for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/e87a4844-8a90-4965-878f-d95aa09c47bb-thanos-prometheus-http-client-file\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.588055 4558 reconciler_common.go:293] "Volume detached for volume \"prometheus-metric-storage-rulefiles-1\" (UniqueName: \"kubernetes.io/configmap/e87a4844-8a90-4965-878f-d95aa09c47bb-prometheus-metric-storage-rulefiles-1\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.588074 4558 reconciler_common.go:293] "Volume detached for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/e87a4844-8a90-4965-878f-d95aa09c47bb-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.588088 4558 reconciler_common.go:293] "Volume detached for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/e87a4844-8a90-4965-878f-d95aa09c47bb-tls-assets\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.588132 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" " Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.588144 4558 reconciler_common.go:293] "Volume detached for volume \"prometheus-metric-storage-rulefiles-2\" (UniqueName: \"kubernetes.io/configmap/e87a4844-8a90-4965-878f-d95aa09c47bb-prometheus-metric-storage-rulefiles-2\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.592620 4558 generic.go:334] "Generic (PLEG): container finished" podID="fc9b3fed-9730-4199-b074-3f01cdc1b7ce" containerID="e1569c77aa4164d84391079102afae404d6f206b7e90d3e21dd30d7b3d65062f" exitCode=0 Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.601725 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="099e720f-99be-44a9-b149-94a07ebe5e02" path="/var/lib/kubelet/pods/099e720f-99be-44a9-b149-94a07ebe5e02/volumes" Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.602434 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="19216a3e-4417-45d8-a039-731d0bc938ca" path="/var/lib/kubelet/pods/19216a3e-4417-45d8-a039-731d0bc938ca/volumes" Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.603044 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="29ba41cf-902a-4ee1-88c9-d1db60f5e9ab" path="/var/lib/kubelet/pods/29ba41cf-902a-4ee1-88c9-d1db60f5e9ab/volumes" Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.603692 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2d068982-e4de-4519-8842-8ae09682ad42" path="/var/lib/kubelet/pods/2d068982-e4de-4519-8842-8ae09682ad42/volumes" Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.605579 4558 generic.go:334] "Generic (PLEG): container finished" podID="544c8619-276c-4358-ac79-58df9a462173" containerID="5e7f9a7df82f7908138cd4491a1924604163a709b2ab28b8825d5768baab4058" exitCode=0 Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.605607 4558 generic.go:334] "Generic (PLEG): container finished" podID="544c8619-276c-4358-ac79-58df9a462173" containerID="821d851a2b03145528569e24bdf1e48df94429d66e29eac4da06f73e2d0ea57e" exitCode=2 Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.605616 4558 generic.go:334] "Generic (PLEG): container finished" podID="544c8619-276c-4358-ac79-58df9a462173" containerID="aef9e92362cdebb57f6e6fec72040ef9ea0c3ebdec14cebff1a1ec4ae81e35a9" exitCode=0 Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.608631 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5cb4c24d-6741-4ee1-a1d5-c7d994499296" path="/var/lib/kubelet/pods/5cb4c24d-6741-4ee1-a1d5-c7d994499296/volumes" Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.609143 4558 generic.go:334] "Generic (PLEG): container finished" podID="e87a4844-8a90-4965-878f-d95aa09c47bb" containerID="812f4fcb56b8658b4bbf8cdf87243bd17984898af6dea1b34072f67d46707f0b" exitCode=0 Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.609208 4558 generic.go:334] "Generic (PLEG): container finished" podID="e87a4844-8a90-4965-878f-d95aa09c47bb" containerID="cee5946fda9135814e788d08c6619a590258ddf4784db12ce17b315442c52074" exitCode=0 Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.609221 4558 generic.go:334] "Generic (PLEG): container finished" podID="e87a4844-8a90-4965-878f-d95aa09c47bb" containerID="83ca1736c91920d790e234d6dd7cb02e325a0698ad1ac3ce1a25312d1f3c51a8" exitCode=0 Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.610213 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/prometheus-metric-storage-0" Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.614801 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6947f138-6d2f-40e2-a236-736d26d3a1e6-kube-state-metrics-tls-config" (OuterVolumeSpecName: "kube-state-metrics-tls-config") pod "6947f138-6d2f-40e2-a236-736d26d3a1e6" (UID: "6947f138-6d2f-40e2-a236-736d26d3a1e6"). InnerVolumeSpecName "kube-state-metrics-tls-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.619574 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="65cdc060-60d9-48a8-b535-31b93f49e1ed" path="/var/lib/kubelet/pods/65cdc060-60d9-48a8-b535-31b93f49e1ed/volumes" Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.621398 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6748d231-22b7-4a78-b3ab-945673683907" path="/var/lib/kubelet/pods/6748d231-22b7-4a78-b3ab-945673683907/volumes" Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.623126 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8635d21f-4563-42ce-a3ab-4c67653d4dee" path="/var/lib/kubelet/pods/8635d21f-4563-42ce-a3ab-4c67653d4dee/volumes" Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.627540 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ad371747-3ff0-49b0-b2a6-33d7bb7cd16b-config-data" (OuterVolumeSpecName: "config-data") pod "ad371747-3ff0-49b0-b2a6-33d7bb7cd16b" (UID: "ad371747-3ff0-49b0-b2a6-33d7bb7cd16b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.632501 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b8bbfcda-5b58-470f-9f0e-e35cb51e1872" path="/var/lib/kubelet/pods/b8bbfcda-5b58-470f-9f0e-e35cb51e1872/volumes" Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.636059 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bad8aeff-00a6-4456-bbdf-937441ed04c0" path="/var/lib/kubelet/pods/bad8aeff-00a6-4456-bbdf-937441ed04c0/volumes" Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.636135 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6947f138-6d2f-40e2-a236-736d26d3a1e6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6947f138-6d2f-40e2-a236-736d26d3a1e6" (UID: "6947f138-6d2f-40e2-a236-736d26d3a1e6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.638781 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bfbc710a-9995-4db3-a621-c7dd17624239" path="/var/lib/kubelet/pods/bfbc710a-9995-4db3-a621-c7dd17624239/volumes" Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.639435 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e303f939-15b8-4652-919a-ceef96c1c997" path="/var/lib/kubelet/pods/e303f939-15b8-4652-919a-ceef96c1c997/volumes" Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.640511 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ecc5d42c-dbf5-44e2-b487-25aa99419dc1" path="/var/lib/kubelet/pods/ecc5d42c-dbf5-44e2-b487-25aa99419dc1/volumes" Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.641592 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ef3c342e-679b-4a2e-94df-82adf4200290" path="/var/lib/kubelet/pods/ef3c342e-679b-4a2e-94df-82adf4200290/volumes" Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.647800 4558 generic.go:334] "Generic (PLEG): container finished" podID="9116eb6c-0630-4022-9c43-8c8793e08922" containerID="6da9a00e2025f693c02a7cb6b25aca4e9c3ecc4fc8e7c3494f4026506bdec6c1" exitCode=143 Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.654519 4558 generic.go:334] "Generic (PLEG): container finished" podID="ef393209-80db-43c7-a749-3ec7826f2123" containerID="412baacf5264ce5460c28e8a452e6be275c6dc44a89ee8bd0b6460f122a289b5" exitCode=0 Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.671395 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e87a4844-8a90-4965-878f-d95aa09c47bb-web-config" (OuterVolumeSpecName: "web-config") pod "e87a4844-8a90-4965-878f-d95aa09c47bb" (UID: "e87a4844-8a90-4965-878f-d95aa09c47bb"). InnerVolumeSpecName "web-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.686265 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/memcached-0" event={"ID":"fc9b3fed-9730-4199-b074-3f01cdc1b7ce","Type":"ContainerDied","Data":"e1569c77aa4164d84391079102afae404d6f206b7e90d3e21dd30d7b3d65062f"} Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.686332 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"544c8619-276c-4358-ac79-58df9a462173","Type":"ContainerDied","Data":"5e7f9a7df82f7908138cd4491a1924604163a709b2ab28b8825d5768baab4058"} Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.686364 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"544c8619-276c-4358-ac79-58df9a462173","Type":"ContainerDied","Data":"821d851a2b03145528569e24bdf1e48df94429d66e29eac4da06f73e2d0ea57e"} Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.686382 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"544c8619-276c-4358-ac79-58df9a462173","Type":"ContainerDied","Data":"aef9e92362cdebb57f6e6fec72040ef9ea0c3ebdec14cebff1a1ec4ae81e35a9"} Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.686401 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/prometheus-metric-storage-0" event={"ID":"e87a4844-8a90-4965-878f-d95aa09c47bb","Type":"ContainerDied","Data":"812f4fcb56b8658b4bbf8cdf87243bd17984898af6dea1b34072f67d46707f0b"} Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.686428 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/prometheus-metric-storage-0" event={"ID":"e87a4844-8a90-4965-878f-d95aa09c47bb","Type":"ContainerDied","Data":"cee5946fda9135814e788d08c6619a590258ddf4784db12ce17b315442c52074"} Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.686445 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/prometheus-metric-storage-0" event={"ID":"e87a4844-8a90-4965-878f-d95aa09c47bb","Type":"ContainerDied","Data":"83ca1736c91920d790e234d6dd7cb02e325a0698ad1ac3ce1a25312d1f3c51a8"} Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.686464 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/prometheus-metric-storage-0" event={"ID":"e87a4844-8a90-4965-878f-d95aa09c47bb","Type":"ContainerDied","Data":"53d4a8330d12668e9d869d236edfd30facbf5ef1c9317515c37714583c2d61ec"} Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.686475 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-7601-account-create-update-wx6mf" event={"ID":"5cdda1de-c1bb-45a1-a67b-9fe6e8d699df","Type":"ContainerStarted","Data":"7148ebe9db90c871beae75d4bfb0313e5f96c860952168a7db736eda9d2247fe"} Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.686498 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-6587ff9c6b-8wmpk" event={"ID":"9116eb6c-0630-4022-9c43-8c8793e08922","Type":"ContainerDied","Data":"6da9a00e2025f693c02a7cb6b25aca4e9c3ecc4fc8e7c3494f4026506bdec6c1"} Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.686512 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-ndwh7" event={"ID":"ef393209-80db-43c7-a749-3ec7826f2123","Type":"ContainerDied","Data":"412baacf5264ce5460c28e8a452e6be275c6dc44a89ee8bd0b6460f122a289b5"} Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.686522 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-ndwh7" event={"ID":"ef393209-80db-43c7-a749-3ec7826f2123","Type":"ContainerStarted","Data":"560e74d6ac97887570796d7f33795ec8548eb06564c454862ab5d2d8630bffd6"} Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.686543 4558 scope.go:117] "RemoveContainer" containerID="812f4fcb56b8658b4bbf8cdf87243bd17984898af6dea1b34072f67d46707f0b" Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.688218 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage12-crc" (UniqueName: "kubernetes.io/local-volume/local-storage12-crc") on node "crc" Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.689100 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-542c-account-create-update-fknl6" event={"ID":"4b51c4ea-1df2-42b3-9aa7-d69471c234ed","Type":"ContainerStarted","Data":"6deab5eb33aca62a5b28b22446217194da0bcba5310ad67a07dcec73acf21a18"} Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.691372 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ad371747-3ff0-49b0-b2a6-33d7bb7cd16b-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.691400 4558 reconciler_common.go:293] "Volume detached for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/e87a4844-8a90-4965-878f-d95aa09c47bb-web-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.691410 4558 reconciler_common.go:293] "Volume detached for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/6947f138-6d2f-40e2-a236-736d26d3a1e6-kube-state-metrics-tls-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.691424 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6947f138-6d2f-40e2-a236-736d26d3a1e6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.691434 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.715632 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ad371747-3ff0-49b0-b2a6-33d7bb7cd16b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ad371747-3ff0-49b0-b2a6-33d7bb7cd16b" (UID: "ad371747-3ff0-49b0-b2a6-33d7bb7cd16b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.727848 4558 generic.go:334] "Generic (PLEG): container finished" podID="691cf1c1-d617-489b-b0cd-1364328d6c60" containerID="bb25797442bb8998337533bd758e5a01a780b03500429252dd5184a37ffe2a56" exitCode=1 Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.727939 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/root-account-create-update-psddp" event={"ID":"691cf1c1-d617-489b-b0cd-1364328d6c60","Type":"ContainerDied","Data":"bb25797442bb8998337533bd758e5a01a780b03500429252dd5184a37ffe2a56"} Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.728754 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-proxy-7c48456b-8z4pm" Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.748436 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ad371747-3ff0-49b0-b2a6-33d7bb7cd16b-vencrypt-tls-certs" (OuterVolumeSpecName: "vencrypt-tls-certs") pod "ad371747-3ff0-49b0-b2a6-33d7bb7cd16b" (UID: "ad371747-3ff0-49b0-b2a6-33d7bb7cd16b"). InnerVolumeSpecName "vencrypt-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.748576 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6947f138-6d2f-40e2-a236-736d26d3a1e6-kube-state-metrics-tls-certs" (OuterVolumeSpecName: "kube-state-metrics-tls-certs") pod "6947f138-6d2f-40e2-a236-736d26d3a1e6" (UID: "6947f138-6d2f-40e2-a236-736d26d3a1e6"). InnerVolumeSpecName "kube-state-metrics-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.752548 4558 generic.go:334] "Generic (PLEG): container finished" podID="13a0a262-6f56-4cf9-9b0d-85110bcff1e7" containerID="abbc56b6e1300066bf6f3c76afd3d47d0ea00a3c7a4a4d427f6e57e82969bee0" exitCode=0 Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.752573 4558 generic.go:334] "Generic (PLEG): container finished" podID="13a0a262-6f56-4cf9-9b0d-85110bcff1e7" containerID="626d2d61214153aec0449b76e316d8177bdd3708c258717f7715a91a609543f1" exitCode=0 Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.752645 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/alertmanager-metric-storage-0" Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.752634 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/alertmanager-metric-storage-0" event={"ID":"13a0a262-6f56-4cf9-9b0d-85110bcff1e7","Type":"ContainerDied","Data":"abbc56b6e1300066bf6f3c76afd3d47d0ea00a3c7a4a4d427f6e57e82969bee0"} Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.752757 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/alertmanager-metric-storage-0" event={"ID":"13a0a262-6f56-4cf9-9b0d-85110bcff1e7","Type":"ContainerDied","Data":"626d2d61214153aec0449b76e316d8177bdd3708c258717f7715a91a609543f1"} Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.752770 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/alertmanager-metric-storage-0" event={"ID":"13a0a262-6f56-4cf9-9b0d-85110bcff1e7","Type":"ContainerDied","Data":"1e7f2a5ae885cc4c28319d54221677567b47d3120f3c178847ab30c414af96bd"} Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.759899 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ad371747-3ff0-49b0-b2a6-33d7bb7cd16b-nova-novncproxy-tls-certs" (OuterVolumeSpecName: "nova-novncproxy-tls-certs") pod "ad371747-3ff0-49b0-b2a6-33d7bb7cd16b" (UID: "ad371747-3ff0-49b0-b2a6-33d7bb7cd16b"). InnerVolumeSpecName "nova-novncproxy-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.766109 4558 generic.go:334] "Generic (PLEG): container finished" podID="8dfaaf1f-540b-4c8b-b93c-1e3556c37801" containerID="bbac2ee5bf98eddd33a60a5edd0a1dac188b6c3de922ce151b7900626fe0fff8" exitCode=143 Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.766195 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-keystone-listener-75484ddb58-9p7fq" event={"ID":"8dfaaf1f-540b-4c8b-b93c-1e3556c37801","Type":"ContainerDied","Data":"bbac2ee5bf98eddd33a60a5edd0a1dac188b6c3de922ce151b7900626fe0fff8"} Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.775292 4558 generic.go:334] "Generic (PLEG): container finished" podID="6947f138-6d2f-40e2-a236-736d26d3a1e6" containerID="3019d79f9b53d96500bf161bbfffaf2fd1e86e0b8753d308b473cc6f0550db6e" exitCode=2 Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.775342 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/kube-state-metrics-0" event={"ID":"6947f138-6d2f-40e2-a236-736d26d3a1e6","Type":"ContainerDied","Data":"3019d79f9b53d96500bf161bbfffaf2fd1e86e0b8753d308b473cc6f0550db6e"} Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.775364 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/kube-state-metrics-0" event={"ID":"6947f138-6d2f-40e2-a236-736d26d3a1e6","Type":"ContainerDied","Data":"b4819a707a14b0b7318668e9392277556e80c25680c1b4bf1615db3adc4f5c57"} Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.775425 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.794617 4558 reconciler_common.go:293] "Volume detached for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/6947f138-6d2f-40e2-a236-736d26d3a1e6-kube-state-metrics-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.794639 4558 reconciler_common.go:293] "Volume detached for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/ad371747-3ff0-49b0-b2a6-33d7bb7cd16b-nova-novncproxy-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.794649 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ad371747-3ff0-49b0-b2a6-33d7bb7cd16b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.794658 4558 reconciler_common.go:293] "Volume detached for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/ad371747-3ff0-49b0-b2a6-33d7bb7cd16b-vencrypt-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.799273 4558 generic.go:334] "Generic (PLEG): container finished" podID="3072fee5-c73d-407d-80c7-c376628ec545" containerID="599766d523b289496126b3e65bad734ac36d67a7eee51faf425f5873eea60397" exitCode=0 Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.799338 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"3072fee5-c73d-407d-80c7-c376628ec545","Type":"ContainerDied","Data":"599766d523b289496126b3e65bad734ac36d67a7eee51faf425f5873eea60397"} Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.806426 4558 generic.go:334] "Generic (PLEG): container finished" podID="fe030148-30ef-4bb7-bf94-8835a0707df1" containerID="690711c809e51e7b043bae2985f6866cfd82e22825c519971e1f62af4518288a" exitCode=0 Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.806461 4558 generic.go:334] "Generic (PLEG): container finished" podID="fe030148-30ef-4bb7-bf94-8835a0707df1" containerID="e3f4b05578fbd81c020b9ad602df15fafd661cd2db06f53de10dce624927e791" exitCode=0 Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.806502 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-proxy-7c48456b-8z4pm" event={"ID":"fe030148-30ef-4bb7-bf94-8835a0707df1","Type":"ContainerDied","Data":"690711c809e51e7b043bae2985f6866cfd82e22825c519971e1f62af4518288a"} Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.806521 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-proxy-7c48456b-8z4pm" event={"ID":"fe030148-30ef-4bb7-bf94-8835a0707df1","Type":"ContainerDied","Data":"e3f4b05578fbd81c020b9ad602df15fafd661cd2db06f53de10dce624927e791"} Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.806530 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-proxy-7c48456b-8z4pm" event={"ID":"fe030148-30ef-4bb7-bf94-8835a0707df1","Type":"ContainerDied","Data":"e10a8df049a111763b05a5a1434dadcfda33fdd06b015a788e15885a841c4323"} Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.806588 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-proxy-7c48456b-8z4pm" Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.811594 4558 generic.go:334] "Generic (PLEG): container finished" podID="ad371747-3ff0-49b0-b2a6-33d7bb7cd16b" containerID="a7f915ce1e5f59e4f41a36080909e1ccc6e29dbe8d312e443b304657ef70a93d" exitCode=0 Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.811641 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"ad371747-3ff0-49b0-b2a6-33d7bb7cd16b","Type":"ContainerDied","Data":"a7f915ce1e5f59e4f41a36080909e1ccc6e29dbe8d312e443b304657ef70a93d"} Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.811658 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"ad371747-3ff0-49b0-b2a6-33d7bb7cd16b","Type":"ContainerDied","Data":"9c502ea7204a81d002cfc4ba30da06e438661fd039b6fbbda85906d12fb85a6a"} Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.811717 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.814495 4558 scope.go:117] "RemoveContainer" containerID="cee5946fda9135814e788d08c6619a590258ddf4784db12ce17b315442c52074" Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.826814 4558 generic.go:334] "Generic (PLEG): container finished" podID="158b4a4e-78aa-4813-a00e-abddfb7214ef" containerID="0d8f4bd8e6d346771ef889450d9cc94bd4e4a03f37ab45706759418add0250d9" exitCode=143 Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.827174 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"158b4a4e-78aa-4813-a00e-abddfb7214ef","Type":"ContainerDied","Data":"0d8f4bd8e6d346771ef889450d9cc94bd4e4a03f37ab45706759418add0250d9"} Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.827357 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-3f8b-account-create-update-pg4kh" Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.850355 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/memcached-0" Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.852920 4558 scope.go:117] "RemoveContainer" containerID="83ca1736c91920d790e234d6dd7cb02e325a0698ad1ac3ce1a25312d1f3c51a8" Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.864816 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-3f8b-account-create-update-pg4kh" Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.895733 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fe030148-30ef-4bb7-bf94-8835a0707df1-run-httpd\") pod \"fe030148-30ef-4bb7-bf94-8835a0707df1\" (UID: \"fe030148-30ef-4bb7-bf94-8835a0707df1\") " Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.895818 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/fe030148-30ef-4bb7-bf94-8835a0707df1-etc-swift\") pod \"fe030148-30ef-4bb7-bf94-8835a0707df1\" (UID: \"fe030148-30ef-4bb7-bf94-8835a0707df1\") " Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.895848 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fe030148-30ef-4bb7-bf94-8835a0707df1-config-data\") pod \"fe030148-30ef-4bb7-bf94-8835a0707df1\" (UID: \"fe030148-30ef-4bb7-bf94-8835a0707df1\") " Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.896028 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h7ptp\" (UniqueName: \"kubernetes.io/projected/fe030148-30ef-4bb7-bf94-8835a0707df1-kube-api-access-h7ptp\") pod \"fe030148-30ef-4bb7-bf94-8835a0707df1\" (UID: \"fe030148-30ef-4bb7-bf94-8835a0707df1\") " Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.896053 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fe030148-30ef-4bb7-bf94-8835a0707df1-log-httpd\") pod \"fe030148-30ef-4bb7-bf94-8835a0707df1\" (UID: \"fe030148-30ef-4bb7-bf94-8835a0707df1\") " Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.896069 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/fe030148-30ef-4bb7-bf94-8835a0707df1-public-tls-certs\") pod \"fe030148-30ef-4bb7-bf94-8835a0707df1\" (UID: \"fe030148-30ef-4bb7-bf94-8835a0707df1\") " Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.896103 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fe030148-30ef-4bb7-bf94-8835a0707df1-combined-ca-bundle\") pod \"fe030148-30ef-4bb7-bf94-8835a0707df1\" (UID: \"fe030148-30ef-4bb7-bf94-8835a0707df1\") " Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.896183 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/fe030148-30ef-4bb7-bf94-8835a0707df1-internal-tls-certs\") pod \"fe030148-30ef-4bb7-bf94-8835a0707df1\" (UID: \"fe030148-30ef-4bb7-bf94-8835a0707df1\") " Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.896647 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fe030148-30ef-4bb7-bf94-8835a0707df1-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "fe030148-30ef-4bb7-bf94-8835a0707df1" (UID: "fe030148-30ef-4bb7-bf94-8835a0707df1"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.897057 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fe030148-30ef-4bb7-bf94-8835a0707df1-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "fe030148-30ef-4bb7-bf94-8835a0707df1" (UID: "fe030148-30ef-4bb7-bf94-8835a0707df1"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.905040 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fe030148-30ef-4bb7-bf94-8835a0707df1-kube-api-access-h7ptp" (OuterVolumeSpecName: "kube-api-access-h7ptp") pod "fe030148-30ef-4bb7-bf94-8835a0707df1" (UID: "fe030148-30ef-4bb7-bf94-8835a0707df1"). InnerVolumeSpecName "kube-api-access-h7ptp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.908887 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fe030148-30ef-4bb7-bf94-8835a0707df1-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "fe030148-30ef-4bb7-bf94-8835a0707df1" (UID: "fe030148-30ef-4bb7-bf94-8835a0707df1"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.925408 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.940134 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fe030148-30ef-4bb7-bf94-8835a0707df1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fe030148-30ef-4bb7-bf94-8835a0707df1" (UID: "fe030148-30ef-4bb7-bf94-8835a0707df1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.943283 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.951402 4558 scope.go:117] "RemoveContainer" containerID="aa1db8c742dbd3178eb1465494b8e471f9616f4072004ba4f8cbbb2be9c8f316" Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.958441 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.969500 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.975746 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/alertmanager-metric-storage-0"] Jan 20 17:16:08 crc kubenswrapper[4558]: I0120 17:16:08.982198 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/alertmanager-metric-storage-0"] Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:08.989592 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/prometheus-metric-storage-0"] Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:08.996848 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/prometheus-metric-storage-0"] Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:08.997146 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fe030148-30ef-4bb7-bf94-8835a0707df1-config-data" (OuterVolumeSpecName: "config-data") pod "fe030148-30ef-4bb7-bf94-8835a0707df1" (UID: "fe030148-30ef-4bb7-bf94-8835a0707df1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:08.997896 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/fc9b3fed-9730-4199-b074-3f01cdc1b7ce-memcached-tls-certs\") pod \"fc9b3fed-9730-4199-b074-3f01cdc1b7ce\" (UID: \"fc9b3fed-9730-4199-b074-3f01cdc1b7ce\") " Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:08.997922 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c5mg8\" (UniqueName: \"kubernetes.io/projected/fc9b3fed-9730-4199-b074-3f01cdc1b7ce-kube-api-access-c5mg8\") pod \"fc9b3fed-9730-4199-b074-3f01cdc1b7ce\" (UID: \"fc9b3fed-9730-4199-b074-3f01cdc1b7ce\") " Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:08.998729 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fe030148-30ef-4bb7-bf94-8835a0707df1-config-data\") pod \"fe030148-30ef-4bb7-bf94-8835a0707df1\" (UID: \"fe030148-30ef-4bb7-bf94-8835a0707df1\") " Jan 20 17:16:09 crc kubenswrapper[4558]: W0120 17:16:08.998905 4558 empty_dir.go:500] Warning: Unmount skipped because path does not exist: /var/lib/kubelet/pods/fe030148-30ef-4bb7-bf94-8835a0707df1/volumes/kubernetes.io~secret/config-data Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:08.998926 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fe030148-30ef-4bb7-bf94-8835a0707df1-config-data" (OuterVolumeSpecName: "config-data") pod "fe030148-30ef-4bb7-bf94-8835a0707df1" (UID: "fe030148-30ef-4bb7-bf94-8835a0707df1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:08.998980 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc9b3fed-9730-4199-b074-3f01cdc1b7ce-combined-ca-bundle\") pod \"fc9b3fed-9730-4199-b074-3f01cdc1b7ce\" (UID: \"fc9b3fed-9730-4199-b074-3f01cdc1b7ce\") " Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:08.999042 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/fc9b3fed-9730-4199-b074-3f01cdc1b7ce-config-data\") pod \"fc9b3fed-9730-4199-b074-3f01cdc1b7ce\" (UID: \"fc9b3fed-9730-4199-b074-3f01cdc1b7ce\") " Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:08.999101 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/fc9b3fed-9730-4199-b074-3f01cdc1b7ce-kolla-config\") pod \"fc9b3fed-9730-4199-b074-3f01cdc1b7ce\" (UID: \"fc9b3fed-9730-4199-b074-3f01cdc1b7ce\") " Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:08.999965 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fc9b3fed-9730-4199-b074-3f01cdc1b7ce-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "fc9b3fed-9730-4199-b074-3f01cdc1b7ce" (UID: "fc9b3fed-9730-4199-b074-3f01cdc1b7ce"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.003519 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fc9b3fed-9730-4199-b074-3f01cdc1b7ce-config-data" (OuterVolumeSpecName: "config-data") pod "fc9b3fed-9730-4199-b074-3f01cdc1b7ce" (UID: "fc9b3fed-9730-4199-b074-3f01cdc1b7ce"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.004151 4558 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/fe030148-30ef-4bb7-bf94-8835a0707df1-etc-swift\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.004201 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fe030148-30ef-4bb7-bf94-8835a0707df1-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.004211 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h7ptp\" (UniqueName: \"kubernetes.io/projected/fe030148-30ef-4bb7-bf94-8835a0707df1-kube-api-access-h7ptp\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.004223 4558 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fe030148-30ef-4bb7-bf94-8835a0707df1-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.004231 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fe030148-30ef-4bb7-bf94-8835a0707df1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.004239 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/fc9b3fed-9730-4199-b074-3f01cdc1b7ce-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.004247 4558 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/fc9b3fed-9730-4199-b074-3f01cdc1b7ce-kolla-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.004255 4558 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fe030148-30ef-4bb7-bf94-8835a0707df1-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.004386 4558 scope.go:117] "RemoveContainer" containerID="812f4fcb56b8658b4bbf8cdf87243bd17984898af6dea1b34072f67d46707f0b" Jan 20 17:16:09 crc kubenswrapper[4558]: E0120 17:16:09.006377 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"812f4fcb56b8658b4bbf8cdf87243bd17984898af6dea1b34072f67d46707f0b\": container with ID starting with 812f4fcb56b8658b4bbf8cdf87243bd17984898af6dea1b34072f67d46707f0b not found: ID does not exist" containerID="812f4fcb56b8658b4bbf8cdf87243bd17984898af6dea1b34072f67d46707f0b" Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.006405 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"812f4fcb56b8658b4bbf8cdf87243bd17984898af6dea1b34072f67d46707f0b"} err="failed to get container status \"812f4fcb56b8658b4bbf8cdf87243bd17984898af6dea1b34072f67d46707f0b\": rpc error: code = NotFound desc = could not find container \"812f4fcb56b8658b4bbf8cdf87243bd17984898af6dea1b34072f67d46707f0b\": container with ID starting with 812f4fcb56b8658b4bbf8cdf87243bd17984898af6dea1b34072f67d46707f0b not found: ID does not exist" Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.006427 4558 scope.go:117] "RemoveContainer" containerID="cee5946fda9135814e788d08c6619a590258ddf4784db12ce17b315442c52074" Jan 20 17:16:09 crc kubenswrapper[4558]: E0120 17:16:09.010811 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cee5946fda9135814e788d08c6619a590258ddf4784db12ce17b315442c52074\": container with ID starting with cee5946fda9135814e788d08c6619a590258ddf4784db12ce17b315442c52074 not found: ID does not exist" containerID="cee5946fda9135814e788d08c6619a590258ddf4784db12ce17b315442c52074" Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.010861 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cee5946fda9135814e788d08c6619a590258ddf4784db12ce17b315442c52074"} err="failed to get container status \"cee5946fda9135814e788d08c6619a590258ddf4784db12ce17b315442c52074\": rpc error: code = NotFound desc = could not find container \"cee5946fda9135814e788d08c6619a590258ddf4784db12ce17b315442c52074\": container with ID starting with cee5946fda9135814e788d08c6619a590258ddf4784db12ce17b315442c52074 not found: ID does not exist" Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.010891 4558 scope.go:117] "RemoveContainer" containerID="83ca1736c91920d790e234d6dd7cb02e325a0698ad1ac3ce1a25312d1f3c51a8" Jan 20 17:16:09 crc kubenswrapper[4558]: E0120 17:16:09.011419 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"83ca1736c91920d790e234d6dd7cb02e325a0698ad1ac3ce1a25312d1f3c51a8\": container with ID starting with 83ca1736c91920d790e234d6dd7cb02e325a0698ad1ac3ce1a25312d1f3c51a8 not found: ID does not exist" containerID="83ca1736c91920d790e234d6dd7cb02e325a0698ad1ac3ce1a25312d1f3c51a8" Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.011444 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"83ca1736c91920d790e234d6dd7cb02e325a0698ad1ac3ce1a25312d1f3c51a8"} err="failed to get container status \"83ca1736c91920d790e234d6dd7cb02e325a0698ad1ac3ce1a25312d1f3c51a8\": rpc error: code = NotFound desc = could not find container \"83ca1736c91920d790e234d6dd7cb02e325a0698ad1ac3ce1a25312d1f3c51a8\": container with ID starting with 83ca1736c91920d790e234d6dd7cb02e325a0698ad1ac3ce1a25312d1f3c51a8 not found: ID does not exist" Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.011474 4558 scope.go:117] "RemoveContainer" containerID="aa1db8c742dbd3178eb1465494b8e471f9616f4072004ba4f8cbbb2be9c8f316" Jan 20 17:16:09 crc kubenswrapper[4558]: E0120 17:16:09.012001 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aa1db8c742dbd3178eb1465494b8e471f9616f4072004ba4f8cbbb2be9c8f316\": container with ID starting with aa1db8c742dbd3178eb1465494b8e471f9616f4072004ba4f8cbbb2be9c8f316 not found: ID does not exist" containerID="aa1db8c742dbd3178eb1465494b8e471f9616f4072004ba4f8cbbb2be9c8f316" Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.012022 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aa1db8c742dbd3178eb1465494b8e471f9616f4072004ba4f8cbbb2be9c8f316"} err="failed to get container status \"aa1db8c742dbd3178eb1465494b8e471f9616f4072004ba4f8cbbb2be9c8f316\": rpc error: code = NotFound desc = could not find container \"aa1db8c742dbd3178eb1465494b8e471f9616f4072004ba4f8cbbb2be9c8f316\": container with ID starting with aa1db8c742dbd3178eb1465494b8e471f9616f4072004ba4f8cbbb2be9c8f316 not found: ID does not exist" Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.012036 4558 scope.go:117] "RemoveContainer" containerID="812f4fcb56b8658b4bbf8cdf87243bd17984898af6dea1b34072f67d46707f0b" Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.012266 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"812f4fcb56b8658b4bbf8cdf87243bd17984898af6dea1b34072f67d46707f0b"} err="failed to get container status \"812f4fcb56b8658b4bbf8cdf87243bd17984898af6dea1b34072f67d46707f0b\": rpc error: code = NotFound desc = could not find container \"812f4fcb56b8658b4bbf8cdf87243bd17984898af6dea1b34072f67d46707f0b\": container with ID starting with 812f4fcb56b8658b4bbf8cdf87243bd17984898af6dea1b34072f67d46707f0b not found: ID does not exist" Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.012277 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fc9b3fed-9730-4199-b074-3f01cdc1b7ce-kube-api-access-c5mg8" (OuterVolumeSpecName: "kube-api-access-c5mg8") pod "fc9b3fed-9730-4199-b074-3f01cdc1b7ce" (UID: "fc9b3fed-9730-4199-b074-3f01cdc1b7ce"). InnerVolumeSpecName "kube-api-access-c5mg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.012287 4558 scope.go:117] "RemoveContainer" containerID="cee5946fda9135814e788d08c6619a590258ddf4784db12ce17b315442c52074" Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.014291 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cee5946fda9135814e788d08c6619a590258ddf4784db12ce17b315442c52074"} err="failed to get container status \"cee5946fda9135814e788d08c6619a590258ddf4784db12ce17b315442c52074\": rpc error: code = NotFound desc = could not find container \"cee5946fda9135814e788d08c6619a590258ddf4784db12ce17b315442c52074\": container with ID starting with cee5946fda9135814e788d08c6619a590258ddf4784db12ce17b315442c52074 not found: ID does not exist" Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.014315 4558 scope.go:117] "RemoveContainer" containerID="83ca1736c91920d790e234d6dd7cb02e325a0698ad1ac3ce1a25312d1f3c51a8" Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.014973 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"83ca1736c91920d790e234d6dd7cb02e325a0698ad1ac3ce1a25312d1f3c51a8"} err="failed to get container status \"83ca1736c91920d790e234d6dd7cb02e325a0698ad1ac3ce1a25312d1f3c51a8\": rpc error: code = NotFound desc = could not find container \"83ca1736c91920d790e234d6dd7cb02e325a0698ad1ac3ce1a25312d1f3c51a8\": container with ID starting with 83ca1736c91920d790e234d6dd7cb02e325a0698ad1ac3ce1a25312d1f3c51a8 not found: ID does not exist" Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.015012 4558 scope.go:117] "RemoveContainer" containerID="aa1db8c742dbd3178eb1465494b8e471f9616f4072004ba4f8cbbb2be9c8f316" Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.015970 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aa1db8c742dbd3178eb1465494b8e471f9616f4072004ba4f8cbbb2be9c8f316"} err="failed to get container status \"aa1db8c742dbd3178eb1465494b8e471f9616f4072004ba4f8cbbb2be9c8f316\": rpc error: code = NotFound desc = could not find container \"aa1db8c742dbd3178eb1465494b8e471f9616f4072004ba4f8cbbb2be9c8f316\": container with ID starting with aa1db8c742dbd3178eb1465494b8e471f9616f4072004ba4f8cbbb2be9c8f316 not found: ID does not exist" Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.015996 4558 scope.go:117] "RemoveContainer" containerID="812f4fcb56b8658b4bbf8cdf87243bd17984898af6dea1b34072f67d46707f0b" Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.016255 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"812f4fcb56b8658b4bbf8cdf87243bd17984898af6dea1b34072f67d46707f0b"} err="failed to get container status \"812f4fcb56b8658b4bbf8cdf87243bd17984898af6dea1b34072f67d46707f0b\": rpc error: code = NotFound desc = could not find container \"812f4fcb56b8658b4bbf8cdf87243bd17984898af6dea1b34072f67d46707f0b\": container with ID starting with 812f4fcb56b8658b4bbf8cdf87243bd17984898af6dea1b34072f67d46707f0b not found: ID does not exist" Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.016303 4558 scope.go:117] "RemoveContainer" containerID="cee5946fda9135814e788d08c6619a590258ddf4784db12ce17b315442c52074" Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.016992 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cee5946fda9135814e788d08c6619a590258ddf4784db12ce17b315442c52074"} err="failed to get container status \"cee5946fda9135814e788d08c6619a590258ddf4784db12ce17b315442c52074\": rpc error: code = NotFound desc = could not find container \"cee5946fda9135814e788d08c6619a590258ddf4784db12ce17b315442c52074\": container with ID starting with cee5946fda9135814e788d08c6619a590258ddf4784db12ce17b315442c52074 not found: ID does not exist" Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.017011 4558 scope.go:117] "RemoveContainer" containerID="83ca1736c91920d790e234d6dd7cb02e325a0698ad1ac3ce1a25312d1f3c51a8" Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.020553 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"83ca1736c91920d790e234d6dd7cb02e325a0698ad1ac3ce1a25312d1f3c51a8"} err="failed to get container status \"83ca1736c91920d790e234d6dd7cb02e325a0698ad1ac3ce1a25312d1f3c51a8\": rpc error: code = NotFound desc = could not find container \"83ca1736c91920d790e234d6dd7cb02e325a0698ad1ac3ce1a25312d1f3c51a8\": container with ID starting with 83ca1736c91920d790e234d6dd7cb02e325a0698ad1ac3ce1a25312d1f3c51a8 not found: ID does not exist" Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.020576 4558 scope.go:117] "RemoveContainer" containerID="aa1db8c742dbd3178eb1465494b8e471f9616f4072004ba4f8cbbb2be9c8f316" Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.020917 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aa1db8c742dbd3178eb1465494b8e471f9616f4072004ba4f8cbbb2be9c8f316"} err="failed to get container status \"aa1db8c742dbd3178eb1465494b8e471f9616f4072004ba4f8cbbb2be9c8f316\": rpc error: code = NotFound desc = could not find container \"aa1db8c742dbd3178eb1465494b8e471f9616f4072004ba4f8cbbb2be9c8f316\": container with ID starting with aa1db8c742dbd3178eb1465494b8e471f9616f4072004ba4f8cbbb2be9c8f316 not found: ID does not exist" Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.020956 4558 scope.go:117] "RemoveContainer" containerID="62082d9ed12f1ee2a30cc05cd78b8b133ba9df78efc6ec3471a763937ad7c720" Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.030639 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fe030148-30ef-4bb7-bf94-8835a0707df1-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "fe030148-30ef-4bb7-bf94-8835a0707df1" (UID: "fe030148-30ef-4bb7-bf94-8835a0707df1"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.042104 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fe030148-30ef-4bb7-bf94-8835a0707df1-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "fe030148-30ef-4bb7-bf94-8835a0707df1" (UID: "fe030148-30ef-4bb7-bf94-8835a0707df1"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.050675 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fc9b3fed-9730-4199-b074-3f01cdc1b7ce-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fc9b3fed-9730-4199-b074-3f01cdc1b7ce" (UID: "fc9b3fed-9730-4199-b074-3f01cdc1b7ce"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.060597 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-7601-account-create-update-wx6mf" Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.068999 4558 scope.go:117] "RemoveContainer" containerID="abbc56b6e1300066bf6f3c76afd3d47d0ea00a3c7a4a4d427f6e57e82969bee0" Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.075190 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fc9b3fed-9730-4199-b074-3f01cdc1b7ce-memcached-tls-certs" (OuterVolumeSpecName: "memcached-tls-certs") pod "fc9b3fed-9730-4199-b074-3f01cdc1b7ce" (UID: "fc9b3fed-9730-4199-b074-3f01cdc1b7ce"). InnerVolumeSpecName "memcached-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.092480 4558 scope.go:117] "RemoveContainer" containerID="626d2d61214153aec0449b76e316d8177bdd3708c258717f7715a91a609543f1" Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.106263 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/fe030148-30ef-4bb7-bf94-8835a0707df1-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.106298 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc9b3fed-9730-4199-b074-3f01cdc1b7ce-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.106309 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/fe030148-30ef-4bb7-bf94-8835a0707df1-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.106319 4558 reconciler_common.go:293] "Volume detached for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/fc9b3fed-9730-4199-b074-3f01cdc1b7ce-memcached-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.106329 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c5mg8\" (UniqueName: \"kubernetes.io/projected/fc9b3fed-9730-4199-b074-3f01cdc1b7ce-kube-api-access-c5mg8\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.187713 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-psddp" Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.188960 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/swift-proxy-7c48456b-8z4pm"] Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.194649 4558 scope.go:117] "RemoveContainer" containerID="f2c8e0e78787bca0b6f637ec67af7122ba24894258eb3e16a2aa600b8bbdc042" Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.202063 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/swift-proxy-7c48456b-8z4pm"] Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.208509 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5cdda1de-c1bb-45a1-a67b-9fe6e8d699df-operator-scripts\") pod \"5cdda1de-c1bb-45a1-a67b-9fe6e8d699df\" (UID: \"5cdda1de-c1bb-45a1-a67b-9fe6e8d699df\") " Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.208560 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d9khb\" (UniqueName: \"kubernetes.io/projected/5cdda1de-c1bb-45a1-a67b-9fe6e8d699df-kube-api-access-d9khb\") pod \"5cdda1de-c1bb-45a1-a67b-9fe6e8d699df\" (UID: \"5cdda1de-c1bb-45a1-a67b-9fe6e8d699df\") " Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.212034 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5cdda1de-c1bb-45a1-a67b-9fe6e8d699df-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "5cdda1de-c1bb-45a1-a67b-9fe6e8d699df" (UID: "5cdda1de-c1bb-45a1-a67b-9fe6e8d699df"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.215417 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5cdda1de-c1bb-45a1-a67b-9fe6e8d699df-kube-api-access-d9khb" (OuterVolumeSpecName: "kube-api-access-d9khb") pod "5cdda1de-c1bb-45a1-a67b-9fe6e8d699df" (UID: "5cdda1de-c1bb-45a1-a67b-9fe6e8d699df"). InnerVolumeSpecName "kube-api-access-d9khb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.220204 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-542c-account-create-update-fknl6" Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.245114 4558 scope.go:117] "RemoveContainer" containerID="abbc56b6e1300066bf6f3c76afd3d47d0ea00a3c7a4a4d427f6e57e82969bee0" Jan 20 17:16:09 crc kubenswrapper[4558]: E0120 17:16:09.248843 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"abbc56b6e1300066bf6f3c76afd3d47d0ea00a3c7a4a4d427f6e57e82969bee0\": container with ID starting with abbc56b6e1300066bf6f3c76afd3d47d0ea00a3c7a4a4d427f6e57e82969bee0 not found: ID does not exist" containerID="abbc56b6e1300066bf6f3c76afd3d47d0ea00a3c7a4a4d427f6e57e82969bee0" Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.248875 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"abbc56b6e1300066bf6f3c76afd3d47d0ea00a3c7a4a4d427f6e57e82969bee0"} err="failed to get container status \"abbc56b6e1300066bf6f3c76afd3d47d0ea00a3c7a4a4d427f6e57e82969bee0\": rpc error: code = NotFound desc = could not find container \"abbc56b6e1300066bf6f3c76afd3d47d0ea00a3c7a4a4d427f6e57e82969bee0\": container with ID starting with abbc56b6e1300066bf6f3c76afd3d47d0ea00a3c7a4a4d427f6e57e82969bee0 not found: ID does not exist" Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.248898 4558 scope.go:117] "RemoveContainer" containerID="626d2d61214153aec0449b76e316d8177bdd3708c258717f7715a91a609543f1" Jan 20 17:16:09 crc kubenswrapper[4558]: E0120 17:16:09.251449 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"626d2d61214153aec0449b76e316d8177bdd3708c258717f7715a91a609543f1\": container with ID starting with 626d2d61214153aec0449b76e316d8177bdd3708c258717f7715a91a609543f1 not found: ID does not exist" containerID="626d2d61214153aec0449b76e316d8177bdd3708c258717f7715a91a609543f1" Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.251487 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"626d2d61214153aec0449b76e316d8177bdd3708c258717f7715a91a609543f1"} err="failed to get container status \"626d2d61214153aec0449b76e316d8177bdd3708c258717f7715a91a609543f1\": rpc error: code = NotFound desc = could not find container \"626d2d61214153aec0449b76e316d8177bdd3708c258717f7715a91a609543f1\": container with ID starting with 626d2d61214153aec0449b76e316d8177bdd3708c258717f7715a91a609543f1 not found: ID does not exist" Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.251513 4558 scope.go:117] "RemoveContainer" containerID="f2c8e0e78787bca0b6f637ec67af7122ba24894258eb3e16a2aa600b8bbdc042" Jan 20 17:16:09 crc kubenswrapper[4558]: E0120 17:16:09.252311 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f2c8e0e78787bca0b6f637ec67af7122ba24894258eb3e16a2aa600b8bbdc042\": container with ID starting with f2c8e0e78787bca0b6f637ec67af7122ba24894258eb3e16a2aa600b8bbdc042 not found: ID does not exist" containerID="f2c8e0e78787bca0b6f637ec67af7122ba24894258eb3e16a2aa600b8bbdc042" Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.252327 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f2c8e0e78787bca0b6f637ec67af7122ba24894258eb3e16a2aa600b8bbdc042"} err="failed to get container status \"f2c8e0e78787bca0b6f637ec67af7122ba24894258eb3e16a2aa600b8bbdc042\": rpc error: code = NotFound desc = could not find container \"f2c8e0e78787bca0b6f637ec67af7122ba24894258eb3e16a2aa600b8bbdc042\": container with ID starting with f2c8e0e78787bca0b6f637ec67af7122ba24894258eb3e16a2aa600b8bbdc042 not found: ID does not exist" Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.252342 4558 scope.go:117] "RemoveContainer" containerID="abbc56b6e1300066bf6f3c76afd3d47d0ea00a3c7a4a4d427f6e57e82969bee0" Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.253491 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"abbc56b6e1300066bf6f3c76afd3d47d0ea00a3c7a4a4d427f6e57e82969bee0"} err="failed to get container status \"abbc56b6e1300066bf6f3c76afd3d47d0ea00a3c7a4a4d427f6e57e82969bee0\": rpc error: code = NotFound desc = could not find container \"abbc56b6e1300066bf6f3c76afd3d47d0ea00a3c7a4a4d427f6e57e82969bee0\": container with ID starting with abbc56b6e1300066bf6f3c76afd3d47d0ea00a3c7a4a4d427f6e57e82969bee0 not found: ID does not exist" Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.253507 4558 scope.go:117] "RemoveContainer" containerID="626d2d61214153aec0449b76e316d8177bdd3708c258717f7715a91a609543f1" Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.253677 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"626d2d61214153aec0449b76e316d8177bdd3708c258717f7715a91a609543f1"} err="failed to get container status \"626d2d61214153aec0449b76e316d8177bdd3708c258717f7715a91a609543f1\": rpc error: code = NotFound desc = could not find container \"626d2d61214153aec0449b76e316d8177bdd3708c258717f7715a91a609543f1\": container with ID starting with 626d2d61214153aec0449b76e316d8177bdd3708c258717f7715a91a609543f1 not found: ID does not exist" Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.253691 4558 scope.go:117] "RemoveContainer" containerID="f2c8e0e78787bca0b6f637ec67af7122ba24894258eb3e16a2aa600b8bbdc042" Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.253840 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f2c8e0e78787bca0b6f637ec67af7122ba24894258eb3e16a2aa600b8bbdc042"} err="failed to get container status \"f2c8e0e78787bca0b6f637ec67af7122ba24894258eb3e16a2aa600b8bbdc042\": rpc error: code = NotFound desc = could not find container \"f2c8e0e78787bca0b6f637ec67af7122ba24894258eb3e16a2aa600b8bbdc042\": container with ID starting with f2c8e0e78787bca0b6f637ec67af7122ba24894258eb3e16a2aa600b8bbdc042 not found: ID does not exist" Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.253853 4558 scope.go:117] "RemoveContainer" containerID="3019d79f9b53d96500bf161bbfffaf2fd1e86e0b8753d308b473cc6f0550db6e" Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.285699 4558 scope.go:117] "RemoveContainer" containerID="3019d79f9b53d96500bf161bbfffaf2fd1e86e0b8753d308b473cc6f0550db6e" Jan 20 17:16:09 crc kubenswrapper[4558]: E0120 17:16:09.297069 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3019d79f9b53d96500bf161bbfffaf2fd1e86e0b8753d308b473cc6f0550db6e\": container with ID starting with 3019d79f9b53d96500bf161bbfffaf2fd1e86e0b8753d308b473cc6f0550db6e not found: ID does not exist" containerID="3019d79f9b53d96500bf161bbfffaf2fd1e86e0b8753d308b473cc6f0550db6e" Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.297091 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3019d79f9b53d96500bf161bbfffaf2fd1e86e0b8753d308b473cc6f0550db6e"} err="failed to get container status \"3019d79f9b53d96500bf161bbfffaf2fd1e86e0b8753d308b473cc6f0550db6e\": rpc error: code = NotFound desc = could not find container \"3019d79f9b53d96500bf161bbfffaf2fd1e86e0b8753d308b473cc6f0550db6e\": container with ID starting with 3019d79f9b53d96500bf161bbfffaf2fd1e86e0b8753d308b473cc6f0550db6e not found: ID does not exist" Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.297106 4558 scope.go:117] "RemoveContainer" containerID="690711c809e51e7b043bae2985f6866cfd82e22825c519971e1f62af4518288a" Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.311729 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x8w2w\" (UniqueName: \"kubernetes.io/projected/691cf1c1-d617-489b-b0cd-1364328d6c60-kube-api-access-x8w2w\") pod \"691cf1c1-d617-489b-b0cd-1364328d6c60\" (UID: \"691cf1c1-d617-489b-b0cd-1364328d6c60\") " Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.311761 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4b51c4ea-1df2-42b3-9aa7-d69471c234ed-operator-scripts\") pod \"4b51c4ea-1df2-42b3-9aa7-d69471c234ed\" (UID: \"4b51c4ea-1df2-42b3-9aa7-d69471c234ed\") " Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.311811 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/691cf1c1-d617-489b-b0cd-1364328d6c60-operator-scripts\") pod \"691cf1c1-d617-489b-b0cd-1364328d6c60\" (UID: \"691cf1c1-d617-489b-b0cd-1364328d6c60\") " Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.311828 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cwm5h\" (UniqueName: \"kubernetes.io/projected/4b51c4ea-1df2-42b3-9aa7-d69471c234ed-kube-api-access-cwm5h\") pod \"4b51c4ea-1df2-42b3-9aa7-d69471c234ed\" (UID: \"4b51c4ea-1df2-42b3-9aa7-d69471c234ed\") " Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.312308 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5cdda1de-c1bb-45a1-a67b-9fe6e8d699df-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.312319 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d9khb\" (UniqueName: \"kubernetes.io/projected/5cdda1de-c1bb-45a1-a67b-9fe6e8d699df-kube-api-access-d9khb\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:09 crc kubenswrapper[4558]: E0120 17:16:09.312370 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/openstack-scripts: configmap "openstack-scripts" not found Jan 20 17:16:09 crc kubenswrapper[4558]: E0120 17:16:09.312409 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/61a672e2-963e-4428-bb97-e2177ac10c06-operator-scripts podName:61a672e2-963e-4428-bb97-e2177ac10c06 nodeName:}" failed. No retries permitted until 2026-01-20 17:16:11.312396505 +0000 UTC m=+2065.072734472 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/61a672e2-963e-4428-bb97-e2177ac10c06-operator-scripts") pod "barbican-5773-account-create-update-qmwj7" (UID: "61a672e2-963e-4428-bb97-e2177ac10c06") : configmap "openstack-scripts" not found Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.317339 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/691cf1c1-d617-489b-b0cd-1364328d6c60-kube-api-access-x8w2w" (OuterVolumeSpecName: "kube-api-access-x8w2w") pod "691cf1c1-d617-489b-b0cd-1364328d6c60" (UID: "691cf1c1-d617-489b-b0cd-1364328d6c60"). InnerVolumeSpecName "kube-api-access-x8w2w". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.317680 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4b51c4ea-1df2-42b3-9aa7-d69471c234ed-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "4b51c4ea-1df2-42b3-9aa7-d69471c234ed" (UID: "4b51c4ea-1df2-42b3-9aa7-d69471c234ed"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.318038 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/691cf1c1-d617-489b-b0cd-1364328d6c60-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "691cf1c1-d617-489b-b0cd-1364328d6c60" (UID: "691cf1c1-d617-489b-b0cd-1364328d6c60"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:16:09 crc kubenswrapper[4558]: E0120 17:16:09.324810 4558 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfe030148_30ef_4bb7_bf94_8835a0707df1.slice/crio-e10a8df049a111763b05a5a1434dadcfda33fdd06b015a788e15885a841c4323\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfe030148_30ef_4bb7_bf94_8835a0707df1.slice\": RecentStats: unable to find data in memory cache]" Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.336344 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4b51c4ea-1df2-42b3-9aa7-d69471c234ed-kube-api-access-cwm5h" (OuterVolumeSpecName: "kube-api-access-cwm5h") pod "4b51c4ea-1df2-42b3-9aa7-d69471c234ed" (UID: "4b51c4ea-1df2-42b3-9aa7-d69471c234ed"). InnerVolumeSpecName "kube-api-access-cwm5h". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.389762 4558 scope.go:117] "RemoveContainer" containerID="e3f4b05578fbd81c020b9ad602df15fafd661cd2db06f53de10dce624927e791" Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.413969 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/691cf1c1-d617-489b-b0cd-1364328d6c60-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.414237 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cwm5h\" (UniqueName: \"kubernetes.io/projected/4b51c4ea-1df2-42b3-9aa7-d69471c234ed-kube-api-access-cwm5h\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.414250 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x8w2w\" (UniqueName: \"kubernetes.io/projected/691cf1c1-d617-489b-b0cd-1364328d6c60-kube-api-access-x8w2w\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.414259 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4b51c4ea-1df2-42b3-9aa7-d69471c234ed-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.494793 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/rabbitmq-server-0" podUID="fd2edfa4-790f-49d7-9e32-29571e490aaf" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.1.170:5671: connect: connection refused" Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.517532 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/31afa559-d026-4227-b6dc-44aa2796a781-operator-scripts\") pod \"keystone-3f8b-account-create-update-pg4kh\" (UID: \"31afa559-d026-4227-b6dc-44aa2796a781\") " pod="openstack-kuttl-tests/keystone-3f8b-account-create-update-pg4kh" Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.517605 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g22xf\" (UniqueName: \"kubernetes.io/projected/31afa559-d026-4227-b6dc-44aa2796a781-kube-api-access-g22xf\") pod \"keystone-3f8b-account-create-update-pg4kh\" (UID: \"31afa559-d026-4227-b6dc-44aa2796a781\") " pod="openstack-kuttl-tests/keystone-3f8b-account-create-update-pg4kh" Jan 20 17:16:09 crc kubenswrapper[4558]: E0120 17:16:09.517698 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/openstack-scripts: configmap "openstack-scripts" not found Jan 20 17:16:09 crc kubenswrapper[4558]: E0120 17:16:09.517763 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/31afa559-d026-4227-b6dc-44aa2796a781-operator-scripts podName:31afa559-d026-4227-b6dc-44aa2796a781 nodeName:}" failed. No retries permitted until 2026-01-20 17:16:11.517747998 +0000 UTC m=+2065.278085964 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/31afa559-d026-4227-b6dc-44aa2796a781-operator-scripts") pod "keystone-3f8b-account-create-update-pg4kh" (UID: "31afa559-d026-4227-b6dc-44aa2796a781") : configmap "openstack-scripts" not found Jan 20 17:16:09 crc kubenswrapper[4558]: E0120 17:16:09.523190 4558 projected.go:194] Error preparing data for projected volume kube-api-access-g22xf for pod openstack-kuttl-tests/keystone-3f8b-account-create-update-pg4kh: failed to fetch token: serviceaccounts "galera-openstack" not found Jan 20 17:16:09 crc kubenswrapper[4558]: E0120 17:16:09.523249 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/31afa559-d026-4227-b6dc-44aa2796a781-kube-api-access-g22xf podName:31afa559-d026-4227-b6dc-44aa2796a781 nodeName:}" failed. No retries permitted until 2026-01-20 17:16:11.523233983 +0000 UTC m=+2065.283571950 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-g22xf" (UniqueName: "kubernetes.io/projected/31afa559-d026-4227-b6dc-44aa2796a781-kube-api-access-g22xf") pod "keystone-3f8b-account-create-update-pg4kh" (UID: "31afa559-d026-4227-b6dc-44aa2796a781") : failed to fetch token: serviceaccounts "galera-openstack" not found Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.786292 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/watcher-api-0" podUID="0a49fae8-fd33-4a50-b967-e8e8cc48731a" containerName="watcher-api-log" probeResult="failure" output="Get \"https://10.217.1.230:9322/\": read tcp 10.217.0.2:52188->10.217.1.230:9322: read: connection reset by peer" Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.786664 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/watcher-api-0" podUID="0a49fae8-fd33-4a50-b967-e8e8cc48731a" containerName="watcher-api" probeResult="failure" output="Get \"https://10.217.1.230:9322/\": read tcp 10.217.0.2:52200->10.217.1.230:9322: read: connection reset by peer" Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.834208 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/nova-metadata-0" podUID="70dda975-7e0b-40a4-a92a-675be53560b7" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.95:8775/\": read tcp 10.217.0.2:33912->10.217.0.95:8775: read: connection reset by peer" Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.834493 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/nova-metadata-0" podUID="70dda975-7e0b-40a4-a92a-675be53560b7" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.95:8775/\": read tcp 10.217.0.2:33918->10.217.0.95:8775: read: connection reset by peer" Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.872573 4558 generic.go:334] "Generic (PLEG): container finished" podID="a9ada454-f6fa-4ea4-b386-3ddbcd37f233" containerID="dbed1dfb06d84c003e0890032b6a972606b43286eee0111f21ef16dfa9e94a1d" exitCode=0 Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.872678 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"a9ada454-f6fa-4ea4-b386-3ddbcd37f233","Type":"ContainerDied","Data":"dbed1dfb06d84c003e0890032b6a972606b43286eee0111f21ef16dfa9e94a1d"} Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.873939 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-5773-account-create-update-qmwj7" event={"ID":"61a672e2-963e-4428-bb97-e2177ac10c06","Type":"ContainerDied","Data":"7d9db7f66647291925c63ae526cfdcf84b09a6cff6065c71778661a618565484"} Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.873966 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7d9db7f66647291925c63ae526cfdcf84b09a6cff6065c71778661a618565484" Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.876375 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/memcached-0" event={"ID":"fc9b3fed-9730-4199-b074-3f01cdc1b7ce","Type":"ContainerDied","Data":"cac139f685c54cd4045122f570a333d81828bb041bd0a2b6deaf3f56cd1930fd"} Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.876512 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/memcached-0" Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.881628 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovn-northd-0_e8370668-8857-4be3-bcaa-bae8d6cdd158/ovn-northd/0.log" Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.881679 4558 generic.go:334] "Generic (PLEG): container finished" podID="e8370668-8857-4be3-bcaa-bae8d6cdd158" containerID="a07ebe0308220308f5b0687be7bf7284b0986dfc78e64ccb177087d9a970bb3f" exitCode=139 Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.881762 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-northd-0" event={"ID":"e8370668-8857-4be3-bcaa-bae8d6cdd158","Type":"ContainerDied","Data":"a07ebe0308220308f5b0687be7bf7284b0986dfc78e64ccb177087d9a970bb3f"} Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.889959 4558 generic.go:334] "Generic (PLEG): container finished" podID="249a3706-31a6-4fb2-9a3f-94700d9ae30e" containerID="1d078bc9b7292d0036875780ff82d54713ce56fd9923a103f4645e0f8a09e9ff" exitCode=0 Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.890010 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-7ddd44bcf8-9tj9c" event={"ID":"249a3706-31a6-4fb2-9a3f-94700d9ae30e","Type":"ContainerDied","Data":"1d078bc9b7292d0036875780ff82d54713ce56fd9923a103f4645e0f8a09e9ff"} Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.894545 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-ndwh7" event={"ID":"ef393209-80db-43c7-a749-3ec7826f2123","Type":"ContainerStarted","Data":"7198bce5f93612a72f1bc29427e2626f3e569032458fe9469fe3c848a987dd0d"} Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.895699 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-ndwh7" Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.902113 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-d62f-account-create-update-8xjrt" event={"ID":"23341c92-6342-4e5f-904c-17fdff2841c5","Type":"ContainerDied","Data":"98c7600ecf6b84c78eb0310e54f222b82522b0ca6f461af596983ebc0749aa39"} Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.902178 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="98c7600ecf6b84c78eb0310e54f222b82522b0ca6f461af596983ebc0749aa39" Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.907239 4558 generic.go:334] "Generic (PLEG): container finished" podID="5588d9e2-2f89-4abb-94de-76b5791582a5" containerID="674fe7739106de999aea93678b41abfb64307a7d4593b865b7537759b3ee32a7" exitCode=0 Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.907312 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"5588d9e2-2f89-4abb-94de-76b5791582a5","Type":"ContainerDied","Data":"674fe7739106de999aea93678b41abfb64307a7d4593b865b7537759b3ee32a7"} Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.908809 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/watcher-be54-account-create-update-jd6tl" event={"ID":"b66dc001-a49e-439f-ad5f-eb6ed96dc37f","Type":"ContainerDied","Data":"3fc6aafe3d6f3ca575fb48988ed1a9a7bafff517fbd49c1f58cfa6b3908bbc28"} Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.908845 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3fc6aafe3d6f3ca575fb48988ed1a9a7bafff517fbd49c1f58cfa6b3908bbc28" Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.912058 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-7601-account-create-update-wx6mf" event={"ID":"5cdda1de-c1bb-45a1-a67b-9fe6e8d699df","Type":"ContainerDied","Data":"7148ebe9db90c871beae75d4bfb0313e5f96c860952168a7db736eda9d2247fe"} Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.912134 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-7601-account-create-update-wx6mf" Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.919404 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/barbican-api-6587ff9c6b-8wmpk" podUID="9116eb6c-0630-4022-9c43-8c8793e08922" containerName="barbican-api" probeResult="failure" output="Get \"https://10.217.1.223:9311/healthcheck\": read tcp 10.217.0.2:44550->10.217.1.223:9311: read: connection reset by peer" Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.919425 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/barbican-api-6587ff9c6b-8wmpk" podUID="9116eb6c-0630-4022-9c43-8c8793e08922" containerName="barbican-api-log" probeResult="failure" output="Get \"https://10.217.1.223:9311/healthcheck\": read tcp 10.217.0.2:44540->10.217.1.223:9311: read: connection reset by peer" Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.922623 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/root-account-create-update-psddp" event={"ID":"691cf1c1-d617-489b-b0cd-1364328d6c60","Type":"ContainerDied","Data":"40d909a66ed44cd6bb4de563faaed560da7fb0bdf5438b93b480b4efa77a2cda"} Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.922756 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-psddp" Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.923792 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-ndwh7" podStartSLOduration=4.923778135 podStartE2EDuration="4.923778135s" podCreationTimestamp="2026-01-20 17:16:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:16:09.91023043 +0000 UTC m=+2063.670568397" watchObservedRunningTime="2026-01-20 17:16:09.923778135 +0000 UTC m=+2063.684116103" Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.964007 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-542c-account-create-update-fknl6" event={"ID":"4b51c4ea-1df2-42b3-9aa7-d69471c234ed","Type":"ContainerDied","Data":"6deab5eb33aca62a5b28b22446217194da0bcba5310ad67a07dcec73acf21a18"} Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.964253 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-542c-account-create-update-fknl6" Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.983150 4558 generic.go:334] "Generic (PLEG): container finished" podID="eed05f95-b1a3-44b9-bb2c-e7b01d91d8cc" containerID="4c99017bce2cd4c2557c9e2d276409ac4a6b6a986805b29e970cc3af2a59f1df" exitCode=0 Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.983234 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-galera-0" event={"ID":"eed05f95-b1a3-44b9-bb2c-e7b01d91d8cc","Type":"ContainerDied","Data":"4c99017bce2cd4c2557c9e2d276409ac4a6b6a986805b29e970cc3af2a59f1df"} Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.983264 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-galera-0" event={"ID":"eed05f95-b1a3-44b9-bb2c-e7b01d91d8cc","Type":"ContainerDied","Data":"d37c6a4934e531436a7197ab1418cec7c93e40413cab503ad229a613d4fe2a94"} Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.983275 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d37c6a4934e531436a7197ab1418cec7c93e40413cab503ad229a613d4fe2a94" Jan 20 17:16:09 crc kubenswrapper[4558]: I0120 17:16:09.991748 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-3f8b-account-create-update-pg4kh" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.077067 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/watcher-be54-account-create-update-jd6tl" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.089839 4558 scope.go:117] "RemoveContainer" containerID="690711c809e51e7b043bae2985f6866cfd82e22825c519971e1f62af4518288a" Jan 20 17:16:10 crc kubenswrapper[4558]: E0120 17:16:10.090760 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"690711c809e51e7b043bae2985f6866cfd82e22825c519971e1f62af4518288a\": container with ID starting with 690711c809e51e7b043bae2985f6866cfd82e22825c519971e1f62af4518288a not found: ID does not exist" containerID="690711c809e51e7b043bae2985f6866cfd82e22825c519971e1f62af4518288a" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.090807 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"690711c809e51e7b043bae2985f6866cfd82e22825c519971e1f62af4518288a"} err="failed to get container status \"690711c809e51e7b043bae2985f6866cfd82e22825c519971e1f62af4518288a\": rpc error: code = NotFound desc = could not find container \"690711c809e51e7b043bae2985f6866cfd82e22825c519971e1f62af4518288a\": container with ID starting with 690711c809e51e7b043bae2985f6866cfd82e22825c519971e1f62af4518288a not found: ID does not exist" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.090832 4558 scope.go:117] "RemoveContainer" containerID="e3f4b05578fbd81c020b9ad602df15fafd661cd2db06f53de10dce624927e791" Jan 20 17:16:10 crc kubenswrapper[4558]: E0120 17:16:10.094214 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e3f4b05578fbd81c020b9ad602df15fafd661cd2db06f53de10dce624927e791\": container with ID starting with e3f4b05578fbd81c020b9ad602df15fafd661cd2db06f53de10dce624927e791 not found: ID does not exist" containerID="e3f4b05578fbd81c020b9ad602df15fafd661cd2db06f53de10dce624927e791" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.094247 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e3f4b05578fbd81c020b9ad602df15fafd661cd2db06f53de10dce624927e791"} err="failed to get container status \"e3f4b05578fbd81c020b9ad602df15fafd661cd2db06f53de10dce624927e791\": rpc error: code = NotFound desc = could not find container \"e3f4b05578fbd81c020b9ad602df15fafd661cd2db06f53de10dce624927e791\": container with ID starting with e3f4b05578fbd81c020b9ad602df15fafd661cd2db06f53de10dce624927e791 not found: ID does not exist" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.094350 4558 scope.go:117] "RemoveContainer" containerID="690711c809e51e7b043bae2985f6866cfd82e22825c519971e1f62af4518288a" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.094439 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-d62f-account-create-update-8xjrt" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.096462 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"690711c809e51e7b043bae2985f6866cfd82e22825c519971e1f62af4518288a"} err="failed to get container status \"690711c809e51e7b043bae2985f6866cfd82e22825c519971e1f62af4518288a\": rpc error: code = NotFound desc = could not find container \"690711c809e51e7b043bae2985f6866cfd82e22825c519971e1f62af4518288a\": container with ID starting with 690711c809e51e7b043bae2985f6866cfd82e22825c519971e1f62af4518288a not found: ID does not exist" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.096485 4558 scope.go:117] "RemoveContainer" containerID="e3f4b05578fbd81c020b9ad602df15fafd661cd2db06f53de10dce624927e791" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.096703 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e3f4b05578fbd81c020b9ad602df15fafd661cd2db06f53de10dce624927e791"} err="failed to get container status \"e3f4b05578fbd81c020b9ad602df15fafd661cd2db06f53de10dce624927e791\": rpc error: code = NotFound desc = could not find container \"e3f4b05578fbd81c020b9ad602df15fafd661cd2db06f53de10dce624927e791\": container with ID starting with e3f4b05578fbd81c020b9ad602df15fafd661cd2db06f53de10dce624927e791 not found: ID does not exist" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.096720 4558 scope.go:117] "RemoveContainer" containerID="a7f915ce1e5f59e4f41a36080909e1ccc6e29dbe8d312e443b304657ef70a93d" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.099651 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-5773-account-create-update-qmwj7" Jan 20 17:16:10 crc kubenswrapper[4558]: E0120 17:16:10.118589 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of a07ebe0308220308f5b0687be7bf7284b0986dfc78e64ccb177087d9a970bb3f is running failed: container process not found" containerID="a07ebe0308220308f5b0687be7bf7284b0986dfc78e64ccb177087d9a970bb3f" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.128098 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-542c-account-create-update-fknl6"] Jan 20 17:16:10 crc kubenswrapper[4558]: E0120 17:16:10.129912 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of a07ebe0308220308f5b0687be7bf7284b0986dfc78e64ccb177087d9a970bb3f is running failed: container process not found" containerID="a07ebe0308220308f5b0687be7bf7284b0986dfc78e64ccb177087d9a970bb3f" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Jan 20 17:16:10 crc kubenswrapper[4558]: E0120 17:16:10.130570 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of a07ebe0308220308f5b0687be7bf7284b0986dfc78e64ccb177087d9a970bb3f is running failed: container process not found" containerID="a07ebe0308220308f5b0687be7bf7284b0986dfc78e64ccb177087d9a970bb3f" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Jan 20 17:16:10 crc kubenswrapper[4558]: E0120 17:16:10.130686 4558 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of a07ebe0308220308f5b0687be7bf7284b0986dfc78e64ccb177087d9a970bb3f is running failed: container process not found" probeType="Readiness" pod="openstack-kuttl-tests/ovn-northd-0" podUID="e8370668-8857-4be3-bcaa-bae8d6cdd158" containerName="ovn-northd" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.131761 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.133446 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.136254 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-api-542c-account-create-update-fknl6"] Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.144400 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/memcached-0"] Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.144448 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/memcached-0"] Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.153526 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.155772 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovn-northd-0_e8370668-8857-4be3-bcaa-bae8d6cdd158/ovn-northd/0.log" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.161237 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.174402 4558 scope.go:117] "RemoveContainer" containerID="a7f915ce1e5f59e4f41a36080909e1ccc6e29dbe8d312e443b304657ef70a93d" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.177001 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-7601-account-create-update-wx6mf"] Jan 20 17:16:10 crc kubenswrapper[4558]: E0120 17:16:10.187316 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a7f915ce1e5f59e4f41a36080909e1ccc6e29dbe8d312e443b304657ef70a93d\": container with ID starting with a7f915ce1e5f59e4f41a36080909e1ccc6e29dbe8d312e443b304657ef70a93d not found: ID does not exist" containerID="a7f915ce1e5f59e4f41a36080909e1ccc6e29dbe8d312e443b304657ef70a93d" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.187361 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a7f915ce1e5f59e4f41a36080909e1ccc6e29dbe8d312e443b304657ef70a93d"} err="failed to get container status \"a7f915ce1e5f59e4f41a36080909e1ccc6e29dbe8d312e443b304657ef70a93d\": rpc error: code = NotFound desc = could not find container \"a7f915ce1e5f59e4f41a36080909e1ccc6e29dbe8d312e443b304657ef70a93d\": container with ID starting with a7f915ce1e5f59e4f41a36080909e1ccc6e29dbe8d312e443b304657ef70a93d not found: ID does not exist" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.187387 4558 scope.go:117] "RemoveContainer" containerID="e1569c77aa4164d84391079102afae404d6f206b7e90d3e21dd30d7b3d65062f" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.193773 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-7ddd44bcf8-9tj9c" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.225507 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/cinder-7601-account-create-update-wx6mf"] Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.239420 4558 scope.go:117] "RemoveContainer" containerID="bb25797442bb8998337533bd758e5a01a780b03500429252dd5184a37ffe2a56" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.246546 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-psddp"] Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.254843 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5588d9e2-2f89-4abb-94de-76b5791582a5-scripts\") pod \"5588d9e2-2f89-4abb-94de-76b5791582a5\" (UID: \"5588d9e2-2f89-4abb-94de-76b5791582a5\") " Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.254924 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/eed05f95-b1a3-44b9-bb2c-e7b01d91d8cc-config-data-generated\") pod \"eed05f95-b1a3-44b9-bb2c-e7b01d91d8cc\" (UID: \"eed05f95-b1a3-44b9-bb2c-e7b01d91d8cc\") " Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.254949 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b66dc001-a49e-439f-ad5f-eb6ed96dc37f-operator-scripts\") pod \"b66dc001-a49e-439f-ad5f-eb6ed96dc37f\" (UID: \"b66dc001-a49e-439f-ad5f-eb6ed96dc37f\") " Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.255365 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a9ada454-f6fa-4ea4-b386-3ddbcd37f233-combined-ca-bundle\") pod \"a9ada454-f6fa-4ea4-b386-3ddbcd37f233\" (UID: \"a9ada454-f6fa-4ea4-b386-3ddbcd37f233\") " Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.255391 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-55bnq\" (UniqueName: \"kubernetes.io/projected/eed05f95-b1a3-44b9-bb2c-e7b01d91d8cc-kube-api-access-55bnq\") pod \"eed05f95-b1a3-44b9-bb2c-e7b01d91d8cc\" (UID: \"eed05f95-b1a3-44b9-bb2c-e7b01d91d8cc\") " Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.255416 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4wvct\" (UniqueName: \"kubernetes.io/projected/5588d9e2-2f89-4abb-94de-76b5791582a5-kube-api-access-4wvct\") pod \"5588d9e2-2f89-4abb-94de-76b5791582a5\" (UID: \"5588d9e2-2f89-4abb-94de-76b5791582a5\") " Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.255446 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/e8370668-8857-4be3-bcaa-bae8d6cdd158-ovn-rundir\") pod \"e8370668-8857-4be3-bcaa-bae8d6cdd158\" (UID: \"e8370668-8857-4be3-bcaa-bae8d6cdd158\") " Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.255475 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/e8370668-8857-4be3-bcaa-bae8d6cdd158-ovn-northd-tls-certs\") pod \"e8370668-8857-4be3-bcaa-bae8d6cdd158\" (UID: \"e8370668-8857-4be3-bcaa-bae8d6cdd158\") " Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.255502 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mysql-db\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"eed05f95-b1a3-44b9-bb2c-e7b01d91d8cc\" (UID: \"eed05f95-b1a3-44b9-bb2c-e7b01d91d8cc\") " Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.255576 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l4296\" (UniqueName: \"kubernetes.io/projected/61a672e2-963e-4428-bb97-e2177ac10c06-kube-api-access-l4296\") pod \"61a672e2-963e-4428-bb97-e2177ac10c06\" (UID: \"61a672e2-963e-4428-bb97-e2177ac10c06\") " Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.255600 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e8370668-8857-4be3-bcaa-bae8d6cdd158-scripts\") pod \"e8370668-8857-4be3-bcaa-bae8d6cdd158\" (UID: \"e8370668-8857-4be3-bcaa-bae8d6cdd158\") " Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.255620 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a9ada454-f6fa-4ea4-b386-3ddbcd37f233-httpd-run\") pod \"a9ada454-f6fa-4ea4-b386-3ddbcd37f233\" (UID: \"a9ada454-f6fa-4ea4-b386-3ddbcd37f233\") " Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.255635 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a9ada454-f6fa-4ea4-b386-3ddbcd37f233-logs\") pod \"a9ada454-f6fa-4ea4-b386-3ddbcd37f233\" (UID: \"a9ada454-f6fa-4ea4-b386-3ddbcd37f233\") " Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.255651 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/eed05f95-b1a3-44b9-bb2c-e7b01d91d8cc-operator-scripts\") pod \"eed05f95-b1a3-44b9-bb2c-e7b01d91d8cc\" (UID: \"eed05f95-b1a3-44b9-bb2c-e7b01d91d8cc\") " Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.255747 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nqspr\" (UniqueName: \"kubernetes.io/projected/23341c92-6342-4e5f-904c-17fdff2841c5-kube-api-access-nqspr\") pod \"23341c92-6342-4e5f-904c-17fdff2841c5\" (UID: \"23341c92-6342-4e5f-904c-17fdff2841c5\") " Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.255772 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mw495\" (UniqueName: \"kubernetes.io/projected/b66dc001-a49e-439f-ad5f-eb6ed96dc37f-kube-api-access-mw495\") pod \"b66dc001-a49e-439f-ad5f-eb6ed96dc37f\" (UID: \"b66dc001-a49e-439f-ad5f-eb6ed96dc37f\") " Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.255797 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-psddp"] Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.255807 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5588d9e2-2f89-4abb-94de-76b5791582a5-httpd-run\") pod \"5588d9e2-2f89-4abb-94de-76b5791582a5\" (UID: \"5588d9e2-2f89-4abb-94de-76b5791582a5\") " Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.255915 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e8370668-8857-4be3-bcaa-bae8d6cdd158-ovn-rundir" (OuterVolumeSpecName: "ovn-rundir") pod "e8370668-8857-4be3-bcaa-bae8d6cdd158" (UID: "e8370668-8857-4be3-bcaa-bae8d6cdd158"). InnerVolumeSpecName "ovn-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.255931 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e8370668-8857-4be3-bcaa-bae8d6cdd158-combined-ca-bundle\") pod \"e8370668-8857-4be3-bcaa-bae8d6cdd158\" (UID: \"e8370668-8857-4be3-bcaa-bae8d6cdd158\") " Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.255973 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/23341c92-6342-4e5f-904c-17fdff2841c5-operator-scripts\") pod \"23341c92-6342-4e5f-904c-17fdff2841c5\" (UID: \"23341c92-6342-4e5f-904c-17fdff2841c5\") " Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.256017 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/eed05f95-b1a3-44b9-bb2c-e7b01d91d8cc-kolla-config\") pod \"eed05f95-b1a3-44b9-bb2c-e7b01d91d8cc\" (UID: \"eed05f95-b1a3-44b9-bb2c-e7b01d91d8cc\") " Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.256041 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage17-crc\") pod \"5588d9e2-2f89-4abb-94de-76b5791582a5\" (UID: \"5588d9e2-2f89-4abb-94de-76b5791582a5\") " Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.256080 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a9ada454-f6fa-4ea4-b386-3ddbcd37f233-public-tls-certs\") pod \"a9ada454-f6fa-4ea4-b386-3ddbcd37f233\" (UID: \"a9ada454-f6fa-4ea4-b386-3ddbcd37f233\") " Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.256107 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/e8370668-8857-4be3-bcaa-bae8d6cdd158-metrics-certs-tls-certs\") pod \"e8370668-8857-4be3-bcaa-bae8d6cdd158\" (UID: \"e8370668-8857-4be3-bcaa-bae8d6cdd158\") " Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.256127 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5588d9e2-2f89-4abb-94de-76b5791582a5-internal-tls-certs\") pod \"5588d9e2-2f89-4abb-94de-76b5791582a5\" (UID: \"5588d9e2-2f89-4abb-94de-76b5791582a5\") " Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.256247 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5588d9e2-2f89-4abb-94de-76b5791582a5-logs\") pod \"5588d9e2-2f89-4abb-94de-76b5791582a5\" (UID: \"5588d9e2-2f89-4abb-94de-76b5791582a5\") " Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.256275 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a9ada454-f6fa-4ea4-b386-3ddbcd37f233-config-data\") pod \"a9ada454-f6fa-4ea4-b386-3ddbcd37f233\" (UID: \"a9ada454-f6fa-4ea4-b386-3ddbcd37f233\") " Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.256345 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e8370668-8857-4be3-bcaa-bae8d6cdd158-config\") pod \"e8370668-8857-4be3-bcaa-bae8d6cdd158\" (UID: \"e8370668-8857-4be3-bcaa-bae8d6cdd158\") " Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.256374 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5588d9e2-2f89-4abb-94de-76b5791582a5-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "5588d9e2-2f89-4abb-94de-76b5791582a5" (UID: "5588d9e2-2f89-4abb-94de-76b5791582a5"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.256450 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9qjg6\" (UniqueName: \"kubernetes.io/projected/e8370668-8857-4be3-bcaa-bae8d6cdd158-kube-api-access-9qjg6\") pod \"e8370668-8857-4be3-bcaa-bae8d6cdd158\" (UID: \"e8370668-8857-4be3-bcaa-bae8d6cdd158\") " Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.256530 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/eed05f95-b1a3-44b9-bb2c-e7b01d91d8cc-galera-tls-certs\") pod \"eed05f95-b1a3-44b9-bb2c-e7b01d91d8cc\" (UID: \"eed05f95-b1a3-44b9-bb2c-e7b01d91d8cc\") " Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.256567 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5588d9e2-2f89-4abb-94de-76b5791582a5-config-data\") pod \"5588d9e2-2f89-4abb-94de-76b5791582a5\" (UID: \"5588d9e2-2f89-4abb-94de-76b5791582a5\") " Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.256742 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5588d9e2-2f89-4abb-94de-76b5791582a5-combined-ca-bundle\") pod \"5588d9e2-2f89-4abb-94de-76b5791582a5\" (UID: \"5588d9e2-2f89-4abb-94de-76b5791582a5\") " Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.256801 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fz7gc\" (UniqueName: \"kubernetes.io/projected/a9ada454-f6fa-4ea4-b386-3ddbcd37f233-kube-api-access-fz7gc\") pod \"a9ada454-f6fa-4ea4-b386-3ddbcd37f233\" (UID: \"a9ada454-f6fa-4ea4-b386-3ddbcd37f233\") " Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.256834 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/eed05f95-b1a3-44b9-bb2c-e7b01d91d8cc-config-data-default\") pod \"eed05f95-b1a3-44b9-bb2c-e7b01d91d8cc\" (UID: \"eed05f95-b1a3-44b9-bb2c-e7b01d91d8cc\") " Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.257439 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/61a672e2-963e-4428-bb97-e2177ac10c06-operator-scripts\") pod \"61a672e2-963e-4428-bb97-e2177ac10c06\" (UID: \"61a672e2-963e-4428-bb97-e2177ac10c06\") " Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.257494 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eed05f95-b1a3-44b9-bb2c-e7b01d91d8cc-combined-ca-bundle\") pod \"eed05f95-b1a3-44b9-bb2c-e7b01d91d8cc\" (UID: \"eed05f95-b1a3-44b9-bb2c-e7b01d91d8cc\") " Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.257542 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a9ada454-f6fa-4ea4-b386-3ddbcd37f233-scripts\") pod \"a9ada454-f6fa-4ea4-b386-3ddbcd37f233\" (UID: \"a9ada454-f6fa-4ea4-b386-3ddbcd37f233\") " Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.257573 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage13-crc\") pod \"a9ada454-f6fa-4ea4-b386-3ddbcd37f233\" (UID: \"a9ada454-f6fa-4ea4-b386-3ddbcd37f233\") " Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.258547 4558 reconciler_common.go:293] "Volume detached for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/e8370668-8857-4be3-bcaa-bae8d6cdd158-ovn-rundir\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.259189 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e8370668-8857-4be3-bcaa-bae8d6cdd158-config" (OuterVolumeSpecName: "config") pod "e8370668-8857-4be3-bcaa-bae8d6cdd158" (UID: "e8370668-8857-4be3-bcaa-bae8d6cdd158"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.259778 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a9ada454-f6fa-4ea4-b386-3ddbcd37f233-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "a9ada454-f6fa-4ea4-b386-3ddbcd37f233" (UID: "a9ada454-f6fa-4ea4-b386-3ddbcd37f233"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.260661 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/eed05f95-b1a3-44b9-bb2c-e7b01d91d8cc-config-data-generated" (OuterVolumeSpecName: "config-data-generated") pod "eed05f95-b1a3-44b9-bb2c-e7b01d91d8cc" (UID: "eed05f95-b1a3-44b9-bb2c-e7b01d91d8cc"). InnerVolumeSpecName "config-data-generated". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.261415 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/eed05f95-b1a3-44b9-bb2c-e7b01d91d8cc-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "eed05f95-b1a3-44b9-bb2c-e7b01d91d8cc" (UID: "eed05f95-b1a3-44b9-bb2c-e7b01d91d8cc"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.261904 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a9ada454-f6fa-4ea4-b386-3ddbcd37f233-logs" (OuterVolumeSpecName: "logs") pod "a9ada454-f6fa-4ea4-b386-3ddbcd37f233" (UID: "a9ada454-f6fa-4ea4-b386-3ddbcd37f233"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.265600 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5588d9e2-2f89-4abb-94de-76b5791582a5-kube-api-access-4wvct" (OuterVolumeSpecName: "kube-api-access-4wvct") pod "5588d9e2-2f89-4abb-94de-76b5791582a5" (UID: "5588d9e2-2f89-4abb-94de-76b5791582a5"). InnerVolumeSpecName "kube-api-access-4wvct". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.266588 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/23341c92-6342-4e5f-904c-17fdff2841c5-kube-api-access-nqspr" (OuterVolumeSpecName: "kube-api-access-nqspr") pod "23341c92-6342-4e5f-904c-17fdff2841c5" (UID: "23341c92-6342-4e5f-904c-17fdff2841c5"). InnerVolumeSpecName "kube-api-access-nqspr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.267117 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/23341c92-6342-4e5f-904c-17fdff2841c5-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "23341c92-6342-4e5f-904c-17fdff2841c5" (UID: "23341c92-6342-4e5f-904c-17fdff2841c5"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.267265 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b66dc001-a49e-439f-ad5f-eb6ed96dc37f-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "b66dc001-a49e-439f-ad5f-eb6ed96dc37f" (UID: "b66dc001-a49e-439f-ad5f-eb6ed96dc37f"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.267638 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/eed05f95-b1a3-44b9-bb2c-e7b01d91d8cc-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "eed05f95-b1a3-44b9-bb2c-e7b01d91d8cc" (UID: "eed05f95-b1a3-44b9-bb2c-e7b01d91d8cc"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.268136 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e8370668-8857-4be3-bcaa-bae8d6cdd158-scripts" (OuterVolumeSpecName: "scripts") pod "e8370668-8857-4be3-bcaa-bae8d6cdd158" (UID: "e8370668-8857-4be3-bcaa-bae8d6cdd158"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.268602 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/eed05f95-b1a3-44b9-bb2c-e7b01d91d8cc-config-data-default" (OuterVolumeSpecName: "config-data-default") pod "eed05f95-b1a3-44b9-bb2c-e7b01d91d8cc" (UID: "eed05f95-b1a3-44b9-bb2c-e7b01d91d8cc"). InnerVolumeSpecName "config-data-default". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.268878 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5588d9e2-2f89-4abb-94de-76b5791582a5-logs" (OuterVolumeSpecName: "logs") pod "5588d9e2-2f89-4abb-94de-76b5791582a5" (UID: "5588d9e2-2f89-4abb-94de-76b5791582a5"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.282787 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eed05f95-b1a3-44b9-bb2c-e7b01d91d8cc-kube-api-access-55bnq" (OuterVolumeSpecName: "kube-api-access-55bnq") pod "eed05f95-b1a3-44b9-bb2c-e7b01d91d8cc" (UID: "eed05f95-b1a3-44b9-bb2c-e7b01d91d8cc"). InnerVolumeSpecName "kube-api-access-55bnq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.285553 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage17-crc" (OuterVolumeSpecName: "glance") pod "5588d9e2-2f89-4abb-94de-76b5791582a5" (UID: "5588d9e2-2f89-4abb-94de-76b5791582a5"). InnerVolumeSpecName "local-storage17-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.286271 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-3f8b-account-create-update-pg4kh"] Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.287561 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a9ada454-f6fa-4ea4-b386-3ddbcd37f233-scripts" (OuterVolumeSpecName: "scripts") pod "a9ada454-f6fa-4ea4-b386-3ddbcd37f233" (UID: "a9ada454-f6fa-4ea4-b386-3ddbcd37f233"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.296179 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage13-crc" (OuterVolumeSpecName: "glance") pod "a9ada454-f6fa-4ea4-b386-3ddbcd37f233" (UID: "a9ada454-f6fa-4ea4-b386-3ddbcd37f233"). InnerVolumeSpecName "local-storage13-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.296200 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/keystone-3f8b-account-create-update-pg4kh"] Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.296271 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a9ada454-f6fa-4ea4-b386-3ddbcd37f233-kube-api-access-fz7gc" (OuterVolumeSpecName: "kube-api-access-fz7gc") pod "a9ada454-f6fa-4ea4-b386-3ddbcd37f233" (UID: "a9ada454-f6fa-4ea4-b386-3ddbcd37f233"). InnerVolumeSpecName "kube-api-access-fz7gc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.296588 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/61a672e2-963e-4428-bb97-e2177ac10c06-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "61a672e2-963e-4428-bb97-e2177ac10c06" (UID: "61a672e2-963e-4428-bb97-e2177ac10c06"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.309010 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e8370668-8857-4be3-bcaa-bae8d6cdd158-kube-api-access-9qjg6" (OuterVolumeSpecName: "kube-api-access-9qjg6") pod "e8370668-8857-4be3-bcaa-bae8d6cdd158" (UID: "e8370668-8857-4be3-bcaa-bae8d6cdd158"). InnerVolumeSpecName "kube-api-access-9qjg6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.314495 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5588d9e2-2f89-4abb-94de-76b5791582a5-scripts" (OuterVolumeSpecName: "scripts") pod "5588d9e2-2f89-4abb-94de-76b5791582a5" (UID: "5588d9e2-2f89-4abb-94de-76b5791582a5"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.314617 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b66dc001-a49e-439f-ad5f-eb6ed96dc37f-kube-api-access-mw495" (OuterVolumeSpecName: "kube-api-access-mw495") pod "b66dc001-a49e-439f-ad5f-eb6ed96dc37f" (UID: "b66dc001-a49e-439f-ad5f-eb6ed96dc37f"). InnerVolumeSpecName "kube-api-access-mw495". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.318734 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/61a672e2-963e-4428-bb97-e2177ac10c06-kube-api-access-l4296" (OuterVolumeSpecName: "kube-api-access-l4296") pod "61a672e2-963e-4428-bb97-e2177ac10c06" (UID: "61a672e2-963e-4428-bb97-e2177ac10c06"). InnerVolumeSpecName "kube-api-access-l4296". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.331339 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage05-crc" (OuterVolumeSpecName: "mysql-db") pod "eed05f95-b1a3-44b9-bb2c-e7b01d91d8cc" (UID: "eed05f95-b1a3-44b9-bb2c-e7b01d91d8cc"). InnerVolumeSpecName "local-storage05-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.346246 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/watcher-api-0" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.359611 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/249a3706-31a6-4fb2-9a3f-94700d9ae30e-public-tls-certs\") pod \"249a3706-31a6-4fb2-9a3f-94700d9ae30e\" (UID: \"249a3706-31a6-4fb2-9a3f-94700d9ae30e\") " Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.359669 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/249a3706-31a6-4fb2-9a3f-94700d9ae30e-internal-tls-certs\") pod \"249a3706-31a6-4fb2-9a3f-94700d9ae30e\" (UID: \"249a3706-31a6-4fb2-9a3f-94700d9ae30e\") " Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.359828 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/249a3706-31a6-4fb2-9a3f-94700d9ae30e-combined-ca-bundle\") pod \"249a3706-31a6-4fb2-9a3f-94700d9ae30e\" (UID: \"249a3706-31a6-4fb2-9a3f-94700d9ae30e\") " Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.359854 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/249a3706-31a6-4fb2-9a3f-94700d9ae30e-logs\") pod \"249a3706-31a6-4fb2-9a3f-94700d9ae30e\" (UID: \"249a3706-31a6-4fb2-9a3f-94700d9ae30e\") " Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.360415 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/249a3706-31a6-4fb2-9a3f-94700d9ae30e-config-data\") pod \"249a3706-31a6-4fb2-9a3f-94700d9ae30e\" (UID: \"249a3706-31a6-4fb2-9a3f-94700d9ae30e\") " Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.360463 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/249a3706-31a6-4fb2-9a3f-94700d9ae30e-scripts\") pod \"249a3706-31a6-4fb2-9a3f-94700d9ae30e\" (UID: \"249a3706-31a6-4fb2-9a3f-94700d9ae30e\") " Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.360563 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xgpbd\" (UniqueName: \"kubernetes.io/projected/249a3706-31a6-4fb2-9a3f-94700d9ae30e-kube-api-access-xgpbd\") pod \"249a3706-31a6-4fb2-9a3f-94700d9ae30e\" (UID: \"249a3706-31a6-4fb2-9a3f-94700d9ae30e\") " Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.360593 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/249a3706-31a6-4fb2-9a3f-94700d9ae30e-logs" (OuterVolumeSpecName: "logs") pod "249a3706-31a6-4fb2-9a3f-94700d9ae30e" (UID: "249a3706-31a6-4fb2-9a3f-94700d9ae30e"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.361585 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e8370668-8857-4be3-bcaa-bae8d6cdd158-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.361604 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9qjg6\" (UniqueName: \"kubernetes.io/projected/e8370668-8857-4be3-bcaa-bae8d6cdd158-kube-api-access-9qjg6\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.361616 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fz7gc\" (UniqueName: \"kubernetes.io/projected/a9ada454-f6fa-4ea4-b386-3ddbcd37f233-kube-api-access-fz7gc\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.361627 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/eed05f95-b1a3-44b9-bb2c-e7b01d91d8cc-config-data-default\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.361637 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/249a3706-31a6-4fb2-9a3f-94700d9ae30e-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.361647 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/61a672e2-963e-4428-bb97-e2177ac10c06-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.361657 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a9ada454-f6fa-4ea4-b386-3ddbcd37f233-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.361678 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage13-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage13-crc\") on node \"crc\" " Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.361687 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/31afa559-d026-4227-b6dc-44aa2796a781-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.361695 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5588d9e2-2f89-4abb-94de-76b5791582a5-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.361704 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/eed05f95-b1a3-44b9-bb2c-e7b01d91d8cc-config-data-generated\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.361716 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b66dc001-a49e-439f-ad5f-eb6ed96dc37f-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.361724 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-55bnq\" (UniqueName: \"kubernetes.io/projected/eed05f95-b1a3-44b9-bb2c-e7b01d91d8cc-kube-api-access-55bnq\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.361733 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4wvct\" (UniqueName: \"kubernetes.io/projected/5588d9e2-2f89-4abb-94de-76b5791582a5-kube-api-access-4wvct\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.361748 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" " Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.361759 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l4296\" (UniqueName: \"kubernetes.io/projected/61a672e2-963e-4428-bb97-e2177ac10c06-kube-api-access-l4296\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.361767 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e8370668-8857-4be3-bcaa-bae8d6cdd158-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.361775 4558 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a9ada454-f6fa-4ea4-b386-3ddbcd37f233-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.361785 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a9ada454-f6fa-4ea4-b386-3ddbcd37f233-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.361794 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/eed05f95-b1a3-44b9-bb2c-e7b01d91d8cc-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.361804 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nqspr\" (UniqueName: \"kubernetes.io/projected/23341c92-6342-4e5f-904c-17fdff2841c5-kube-api-access-nqspr\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.361813 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mw495\" (UniqueName: \"kubernetes.io/projected/b66dc001-a49e-439f-ad5f-eb6ed96dc37f-kube-api-access-mw495\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.361821 4558 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5588d9e2-2f89-4abb-94de-76b5791582a5-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.361830 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g22xf\" (UniqueName: \"kubernetes.io/projected/31afa559-d026-4227-b6dc-44aa2796a781-kube-api-access-g22xf\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.361839 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/23341c92-6342-4e5f-904c-17fdff2841c5-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.361847 4558 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/eed05f95-b1a3-44b9-bb2c-e7b01d91d8cc-kolla-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.361861 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage17-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage17-crc\") on node \"crc\" " Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.361869 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5588d9e2-2f89-4abb-94de-76b5791582a5-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.380333 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/249a3706-31a6-4fb2-9a3f-94700d9ae30e-kube-api-access-xgpbd" (OuterVolumeSpecName: "kube-api-access-xgpbd") pod "249a3706-31a6-4fb2-9a3f-94700d9ae30e" (UID: "249a3706-31a6-4fb2-9a3f-94700d9ae30e"). InnerVolumeSpecName "kube-api-access-xgpbd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.380775 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eed05f95-b1a3-44b9-bb2c-e7b01d91d8cc-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "eed05f95-b1a3-44b9-bb2c-e7b01d91d8cc" (UID: "eed05f95-b1a3-44b9-bb2c-e7b01d91d8cc"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.382793 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/249a3706-31a6-4fb2-9a3f-94700d9ae30e-scripts" (OuterVolumeSpecName: "scripts") pod "249a3706-31a6-4fb2-9a3f-94700d9ae30e" (UID: "249a3706-31a6-4fb2-9a3f-94700d9ae30e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.400354 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage05-crc" (UniqueName: "kubernetes.io/local-volume/local-storage05-crc") on node "crc" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.447958 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a9ada454-f6fa-4ea4-b386-3ddbcd37f233-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a9ada454-f6fa-4ea4-b386-3ddbcd37f233" (UID: "a9ada454-f6fa-4ea4-b386-3ddbcd37f233"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.448225 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage17-crc" (UniqueName: "kubernetes.io/local-volume/local-storage17-crc") on node "crc" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.463723 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0a49fae8-fd33-4a50-b967-e8e8cc48731a-logs\") pod \"0a49fae8-fd33-4a50-b967-e8e8cc48731a\" (UID: \"0a49fae8-fd33-4a50-b967-e8e8cc48731a\") " Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.463779 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/0a49fae8-fd33-4a50-b967-e8e8cc48731a-custom-prometheus-ca\") pod \"0a49fae8-fd33-4a50-b967-e8e8cc48731a\" (UID: \"0a49fae8-fd33-4a50-b967-e8e8cc48731a\") " Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.463807 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0a49fae8-fd33-4a50-b967-e8e8cc48731a-config-data\") pod \"0a49fae8-fd33-4a50-b967-e8e8cc48731a\" (UID: \"0a49fae8-fd33-4a50-b967-e8e8cc48731a\") " Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.464078 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0a49fae8-fd33-4a50-b967-e8e8cc48731a-public-tls-certs\") pod \"0a49fae8-fd33-4a50-b967-e8e8cc48731a\" (UID: \"0a49fae8-fd33-4a50-b967-e8e8cc48731a\") " Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.464718 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0a49fae8-fd33-4a50-b967-e8e8cc48731a-internal-tls-certs\") pod \"0a49fae8-fd33-4a50-b967-e8e8cc48731a\" (UID: \"0a49fae8-fd33-4a50-b967-e8e8cc48731a\") " Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.464769 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mxm9b\" (UniqueName: \"kubernetes.io/projected/0a49fae8-fd33-4a50-b967-e8e8cc48731a-kube-api-access-mxm9b\") pod \"0a49fae8-fd33-4a50-b967-e8e8cc48731a\" (UID: \"0a49fae8-fd33-4a50-b967-e8e8cc48731a\") " Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.464869 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0a49fae8-fd33-4a50-b967-e8e8cc48731a-combined-ca-bundle\") pod \"0a49fae8-fd33-4a50-b967-e8e8cc48731a\" (UID: \"0a49fae8-fd33-4a50-b967-e8e8cc48731a\") " Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.465473 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eed05f95-b1a3-44b9-bb2c-e7b01d91d8cc-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.465732 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/249a3706-31a6-4fb2-9a3f-94700d9ae30e-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.465748 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a9ada454-f6fa-4ea4-b386-3ddbcd37f233-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.465759 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xgpbd\" (UniqueName: \"kubernetes.io/projected/249a3706-31a6-4fb2-9a3f-94700d9ae30e-kube-api-access-xgpbd\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.465771 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.465780 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage17-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage17-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.467088 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0a49fae8-fd33-4a50-b967-e8e8cc48731a-logs" (OuterVolumeSpecName: "logs") pod "0a49fae8-fd33-4a50-b967-e8e8cc48731a" (UID: "0a49fae8-fd33-4a50-b967-e8e8cc48731a"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.471715 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage13-crc" (UniqueName: "kubernetes.io/local-volume/local-storage13-crc") on node "crc" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.492463 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0a49fae8-fd33-4a50-b967-e8e8cc48731a-kube-api-access-mxm9b" (OuterVolumeSpecName: "kube-api-access-mxm9b") pod "0a49fae8-fd33-4a50-b967-e8e8cc48731a" (UID: "0a49fae8-fd33-4a50-b967-e8e8cc48731a"). InnerVolumeSpecName "kube-api-access-mxm9b". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.529280 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5588d9e2-2f89-4abb-94de-76b5791582a5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5588d9e2-2f89-4abb-94de-76b5791582a5" (UID: "5588d9e2-2f89-4abb-94de-76b5791582a5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.548924 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e8370668-8857-4be3-bcaa-bae8d6cdd158-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e8370668-8857-4be3-bcaa-bae8d6cdd158" (UID: "e8370668-8857-4be3-bcaa-bae8d6cdd158"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.576243 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0a49fae8-fd33-4a50-b967-e8e8cc48731a-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.576340 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e8370668-8857-4be3-bcaa-bae8d6cdd158-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.576356 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5588d9e2-2f89-4abb-94de-76b5791582a5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.576366 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage13-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage13-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.576598 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mxm9b\" (UniqueName: \"kubernetes.io/projected/0a49fae8-fd33-4a50-b967-e8e8cc48731a-kube-api-access-mxm9b\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.576239 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eed05f95-b1a3-44b9-bb2c-e7b01d91d8cc-galera-tls-certs" (OuterVolumeSpecName: "galera-tls-certs") pod "eed05f95-b1a3-44b9-bb2c-e7b01d91d8cc" (UID: "eed05f95-b1a3-44b9-bb2c-e7b01d91d8cc"). InnerVolumeSpecName "galera-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.598798 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="13a0a262-6f56-4cf9-9b0d-85110bcff1e7" path="/var/lib/kubelet/pods/13a0a262-6f56-4cf9-9b0d-85110bcff1e7/volumes" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.599758 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31afa559-d026-4227-b6dc-44aa2796a781" path="/var/lib/kubelet/pods/31afa559-d026-4227-b6dc-44aa2796a781/volumes" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.613536 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4b51c4ea-1df2-42b3-9aa7-d69471c234ed" path="/var/lib/kubelet/pods/4b51c4ea-1df2-42b3-9aa7-d69471c234ed/volumes" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.614681 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5cdda1de-c1bb-45a1-a67b-9fe6e8d699df" path="/var/lib/kubelet/pods/5cdda1de-c1bb-45a1-a67b-9fe6e8d699df/volumes" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.615082 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="691cf1c1-d617-489b-b0cd-1364328d6c60" path="/var/lib/kubelet/pods/691cf1c1-d617-489b-b0cd-1364328d6c60/volumes" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.619075 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6947f138-6d2f-40e2-a236-736d26d3a1e6" path="/var/lib/kubelet/pods/6947f138-6d2f-40e2-a236-736d26d3a1e6/volumes" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.619729 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ad371747-3ff0-49b0-b2a6-33d7bb7cd16b" path="/var/lib/kubelet/pods/ad371747-3ff0-49b0-b2a6-33d7bb7cd16b/volumes" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.620526 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e87a4844-8a90-4965-878f-d95aa09c47bb" path="/var/lib/kubelet/pods/e87a4844-8a90-4965-878f-d95aa09c47bb/volumes" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.628182 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fc9b3fed-9730-4199-b074-3f01cdc1b7ce" path="/var/lib/kubelet/pods/fc9b3fed-9730-4199-b074-3f01cdc1b7ce/volumes" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.628787 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fe030148-30ef-4bb7-bf94-8835a0707df1" path="/var/lib/kubelet/pods/fe030148-30ef-4bb7-bf94-8835a0707df1/volumes" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.639708 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5588d9e2-2f89-4abb-94de-76b5791582a5-config-data" (OuterVolumeSpecName: "config-data") pod "5588d9e2-2f89-4abb-94de-76b5791582a5" (UID: "5588d9e2-2f89-4abb-94de-76b5791582a5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.649580 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0a49fae8-fd33-4a50-b967-e8e8cc48731a-custom-prometheus-ca" (OuterVolumeSpecName: "custom-prometheus-ca") pod "0a49fae8-fd33-4a50-b967-e8e8cc48731a" (UID: "0a49fae8-fd33-4a50-b967-e8e8cc48731a"). InnerVolumeSpecName "custom-prometheus-ca". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.653262 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a9ada454-f6fa-4ea4-b386-3ddbcd37f233-config-data" (OuterVolumeSpecName: "config-data") pod "a9ada454-f6fa-4ea4-b386-3ddbcd37f233" (UID: "a9ada454-f6fa-4ea4-b386-3ddbcd37f233"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.670874 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e8370668-8857-4be3-bcaa-bae8d6cdd158-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "e8370668-8857-4be3-bcaa-bae8d6cdd158" (UID: "e8370668-8857-4be3-bcaa-bae8d6cdd158"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.671986 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/249a3706-31a6-4fb2-9a3f-94700d9ae30e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "249a3706-31a6-4fb2-9a3f-94700d9ae30e" (UID: "249a3706-31a6-4fb2-9a3f-94700d9ae30e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.675330 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a9ada454-f6fa-4ea4-b386-3ddbcd37f233-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "a9ada454-f6fa-4ea4-b386-3ddbcd37f233" (UID: "a9ada454-f6fa-4ea4-b386-3ddbcd37f233"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.679636 4558 reconciler_common.go:293] "Volume detached for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/0a49fae8-fd33-4a50-b967-e8e8cc48731a-custom-prometheus-ca\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.679662 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a9ada454-f6fa-4ea4-b386-3ddbcd37f233-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.679672 4558 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/e8370668-8857-4be3-bcaa-bae8d6cdd158-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.679685 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a9ada454-f6fa-4ea4-b386-3ddbcd37f233-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.679696 4558 reconciler_common.go:293] "Volume detached for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/eed05f95-b1a3-44b9-bb2c-e7b01d91d8cc-galera-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.679705 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5588d9e2-2f89-4abb-94de-76b5791582a5-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.679715 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/249a3706-31a6-4fb2-9a3f-94700d9ae30e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.688811 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e8370668-8857-4be3-bcaa-bae8d6cdd158-ovn-northd-tls-certs" (OuterVolumeSpecName: "ovn-northd-tls-certs") pod "e8370668-8857-4be3-bcaa-bae8d6cdd158" (UID: "e8370668-8857-4be3-bcaa-bae8d6cdd158"). InnerVolumeSpecName "ovn-northd-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.690465 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5588d9e2-2f89-4abb-94de-76b5791582a5-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "5588d9e2-2f89-4abb-94de-76b5791582a5" (UID: "5588d9e2-2f89-4abb-94de-76b5791582a5"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.706030 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/249a3706-31a6-4fb2-9a3f-94700d9ae30e-config-data" (OuterVolumeSpecName: "config-data") pod "249a3706-31a6-4fb2-9a3f-94700d9ae30e" (UID: "249a3706-31a6-4fb2-9a3f-94700d9ae30e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.714340 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0a49fae8-fd33-4a50-b967-e8e8cc48731a-config-data" (OuterVolumeSpecName: "config-data") pod "0a49fae8-fd33-4a50-b967-e8e8cc48731a" (UID: "0a49fae8-fd33-4a50-b967-e8e8cc48731a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.716902 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0a49fae8-fd33-4a50-b967-e8e8cc48731a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0a49fae8-fd33-4a50-b967-e8e8cc48731a" (UID: "0a49fae8-fd33-4a50-b967-e8e8cc48731a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.732476 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0a49fae8-fd33-4a50-b967-e8e8cc48731a-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "0a49fae8-fd33-4a50-b967-e8e8cc48731a" (UID: "0a49fae8-fd33-4a50-b967-e8e8cc48731a"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.738258 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0a49fae8-fd33-4a50-b967-e8e8cc48731a-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "0a49fae8-fd33-4a50-b967-e8e8cc48731a" (UID: "0a49fae8-fd33-4a50-b967-e8e8cc48731a"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.746748 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/249a3706-31a6-4fb2-9a3f-94700d9ae30e-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "249a3706-31a6-4fb2-9a3f-94700d9ae30e" (UID: "249a3706-31a6-4fb2-9a3f-94700d9ae30e"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.782357 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0a49fae8-fd33-4a50-b967-e8e8cc48731a-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.782392 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5588d9e2-2f89-4abb-94de-76b5791582a5-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.782404 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0a49fae8-fd33-4a50-b967-e8e8cc48731a-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.782414 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/249a3706-31a6-4fb2-9a3f-94700d9ae30e-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.782424 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0a49fae8-fd33-4a50-b967-e8e8cc48731a-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.782433 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/249a3706-31a6-4fb2-9a3f-94700d9ae30e-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.782443 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0a49fae8-fd33-4a50-b967-e8e8cc48731a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.782460 4558 reconciler_common.go:293] "Volume detached for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/e8370668-8857-4be3-bcaa-bae8d6cdd158-ovn-northd-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.785839 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/249a3706-31a6-4fb2-9a3f-94700d9ae30e-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "249a3706-31a6-4fb2-9a3f-94700d9ae30e" (UID: "249a3706-31a6-4fb2-9a3f-94700d9ae30e"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:16:10 crc kubenswrapper[4558]: E0120 17:16:10.800957 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 521ed414fb8023645d2152fb8453d458e32e8655c7e7d2cd8ec86ea164244611 is running failed: container process not found" containerID="521ed414fb8023645d2152fb8453d458e32e8655c7e7d2cd8ec86ea164244611" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:16:10 crc kubenswrapper[4558]: E0120 17:16:10.801270 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 521ed414fb8023645d2152fb8453d458e32e8655c7e7d2cd8ec86ea164244611 is running failed: container process not found" containerID="521ed414fb8023645d2152fb8453d458e32e8655c7e7d2cd8ec86ea164244611" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:16:10 crc kubenswrapper[4558]: E0120 17:16:10.801534 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 521ed414fb8023645d2152fb8453d458e32e8655c7e7d2cd8ec86ea164244611 is running failed: container process not found" containerID="521ed414fb8023645d2152fb8453d458e32e8655c7e7d2cd8ec86ea164244611" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:16:10 crc kubenswrapper[4558]: E0120 17:16:10.801564 4558 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 521ed414fb8023645d2152fb8453d458e32e8655c7e7d2cd8ec86ea164244611 is running failed: container process not found" probeType="Readiness" pod="openstack-kuttl-tests/nova-cell1-conductor-0" podUID="217a2ebc-8331-4a6f-9113-2c813563a2b8" containerName="nova-cell1-conductor-conductor" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.918640 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/249a3706-31a6-4fb2-9a3f-94700d9ae30e-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.918966 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.964885 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:16:10 crc kubenswrapper[4558]: I0120 17:16:10.979286 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-6587ff9c6b-8wmpk" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.024111 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a-public-tls-certs\") pod \"f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a\" (UID: \"f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a\") " Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.024833 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a-etc-machine-id\") pod \"f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a\" (UID: \"f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a\") " Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.028676 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zmfdr\" (UniqueName: \"kubernetes.io/projected/f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a-kube-api-access-zmfdr\") pod \"f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a\" (UID: \"f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a\") " Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.028709 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a-internal-tls-certs\") pod \"f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a\" (UID: \"f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a\") " Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.028751 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a-combined-ca-bundle\") pod \"f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a\" (UID: \"f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a\") " Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.028774 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a-logs\") pod \"f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a\" (UID: \"f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a\") " Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.028814 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a-scripts\") pod \"f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a\" (UID: \"f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a\") " Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.028832 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a-config-data-custom\") pod \"f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a\" (UID: \"f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a\") " Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.028855 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a-config-data\") pod \"f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a\" (UID: \"f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a\") " Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.035288 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a" (UID: "f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.035626 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a-logs" (OuterVolumeSpecName: "logs") pod "f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a" (UID: "f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.069479 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a-scripts" (OuterVolumeSpecName: "scripts") pod "f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a" (UID: "f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.070016 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a-kube-api-access-zmfdr" (OuterVolumeSpecName: "kube-api-access-zmfdr") pod "f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a" (UID: "f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a"). InnerVolumeSpecName "kube-api-access-zmfdr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.107201 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a" (UID: "f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.107317 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"5588d9e2-2f89-4abb-94de-76b5791582a5","Type":"ContainerDied","Data":"6e377d33262043a09a008c7a0ab4c3ea5568682ab8ca401ccaf1b7b4b5801d9f"} Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.107634 4558 scope.go:117] "RemoveContainer" containerID="674fe7739106de999aea93678b41abfb64307a7d4593b865b7537759b3ee32a7" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.107865 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.120512 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a" (UID: "f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.130250 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qkt95\" (UniqueName: \"kubernetes.io/projected/9116eb6c-0630-4022-9c43-8c8793e08922-kube-api-access-qkt95\") pod \"9116eb6c-0630-4022-9c43-8c8793e08922\" (UID: \"9116eb6c-0630-4022-9c43-8c8793e08922\") " Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.130355 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/70dda975-7e0b-40a4-a92a-675be53560b7-combined-ca-bundle\") pod \"70dda975-7e0b-40a4-a92a-675be53560b7\" (UID: \"70dda975-7e0b-40a4-a92a-675be53560b7\") " Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.130385 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9116eb6c-0630-4022-9c43-8c8793e08922-config-data-custom\") pod \"9116eb6c-0630-4022-9c43-8c8793e08922\" (UID: \"9116eb6c-0630-4022-9c43-8c8793e08922\") " Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.130430 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/70dda975-7e0b-40a4-a92a-675be53560b7-config-data\") pod \"70dda975-7e0b-40a4-a92a-675be53560b7\" (UID: \"70dda975-7e0b-40a4-a92a-675be53560b7\") " Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.130465 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9116eb6c-0630-4022-9c43-8c8793e08922-config-data\") pod \"9116eb6c-0630-4022-9c43-8c8793e08922\" (UID: \"9116eb6c-0630-4022-9c43-8c8793e08922\") " Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.130502 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9116eb6c-0630-4022-9c43-8c8793e08922-logs\") pod \"9116eb6c-0630-4022-9c43-8c8793e08922\" (UID: \"9116eb6c-0630-4022-9c43-8c8793e08922\") " Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.130534 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9116eb6c-0630-4022-9c43-8c8793e08922-internal-tls-certs\") pod \"9116eb6c-0630-4022-9c43-8c8793e08922\" (UID: \"9116eb6c-0630-4022-9c43-8c8793e08922\") " Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.130578 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9116eb6c-0630-4022-9c43-8c8793e08922-combined-ca-bundle\") pod \"9116eb6c-0630-4022-9c43-8c8793e08922\" (UID: \"9116eb6c-0630-4022-9c43-8c8793e08922\") " Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.130699 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/70dda975-7e0b-40a4-a92a-675be53560b7-logs\") pod \"70dda975-7e0b-40a4-a92a-675be53560b7\" (UID: \"70dda975-7e0b-40a4-a92a-675be53560b7\") " Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.130730 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9116eb6c-0630-4022-9c43-8c8793e08922-public-tls-certs\") pod \"9116eb6c-0630-4022-9c43-8c8793e08922\" (UID: \"9116eb6c-0630-4022-9c43-8c8793e08922\") " Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.130749 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/70dda975-7e0b-40a4-a92a-675be53560b7-nova-metadata-tls-certs\") pod \"70dda975-7e0b-40a4-a92a-675be53560b7\" (UID: \"70dda975-7e0b-40a4-a92a-675be53560b7\") " Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.130765 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-45h8b\" (UniqueName: \"kubernetes.io/projected/70dda975-7e0b-40a4-a92a-675be53560b7-kube-api-access-45h8b\") pod \"70dda975-7e0b-40a4-a92a-675be53560b7\" (UID: \"70dda975-7e0b-40a4-a92a-675be53560b7\") " Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.131374 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zmfdr\" (UniqueName: \"kubernetes.io/projected/f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a-kube-api-access-zmfdr\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.131391 4558 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a-etc-machine-id\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.131401 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.131409 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.131419 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.131427 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.132654 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9116eb6c-0630-4022-9c43-8c8793e08922-logs" (OuterVolumeSpecName: "logs") pod "9116eb6c-0630-4022-9c43-8c8793e08922" (UID: "9116eb6c-0630-4022-9c43-8c8793e08922"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.155342 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/70dda975-7e0b-40a4-a92a-675be53560b7-logs" (OuterVolumeSpecName: "logs") pod "70dda975-7e0b-40a4-a92a-675be53560b7" (UID: "70dda975-7e0b-40a4-a92a-675be53560b7"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.170788 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/70dda975-7e0b-40a4-a92a-675be53560b7-kube-api-access-45h8b" (OuterVolumeSpecName: "kube-api-access-45h8b") pod "70dda975-7e0b-40a4-a92a-675be53560b7" (UID: "70dda975-7e0b-40a4-a92a-675be53560b7"). InnerVolumeSpecName "kube-api-access-45h8b". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.177388 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9116eb6c-0630-4022-9c43-8c8793e08922-kube-api-access-qkt95" (OuterVolumeSpecName: "kube-api-access-qkt95") pod "9116eb6c-0630-4022-9c43-8c8793e08922" (UID: "9116eb6c-0630-4022-9c43-8c8793e08922"). InnerVolumeSpecName "kube-api-access-qkt95". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.193915 4558 generic.go:334] "Generic (PLEG): container finished" podID="f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a" containerID="46a944f926e2a66f4adf5877139aadf5d8d647d7a08c085ceffe4a3ae8061c0b" exitCode=0 Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.193997 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a","Type":"ContainerDied","Data":"46a944f926e2a66f4adf5877139aadf5d8d647d7a08c085ceffe4a3ae8061c0b"} Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.194024 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a","Type":"ContainerDied","Data":"1e8c549655f4738930aa4cb22db8a5096d0978d8d183b8feadb66c5d3224ddeb"} Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.194140 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.210642 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9116eb6c-0630-4022-9c43-8c8793e08922-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "9116eb6c-0630-4022-9c43-8c8793e08922" (UID: "9116eb6c-0630-4022-9c43-8c8793e08922"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.215056 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a-config-data" (OuterVolumeSpecName: "config-data") pod "f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a" (UID: "f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.217482 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-7ddd44bcf8-9tj9c" event={"ID":"249a3706-31a6-4fb2-9a3f-94700d9ae30e","Type":"ContainerDied","Data":"3195aba3a065ce0b79495382f56692fe1c0dcdeb28d7112bbbc4da678d816321"} Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.217585 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-7ddd44bcf8-9tj9c" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.220500 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/70dda975-7e0b-40a4-a92a-675be53560b7-config-data" (OuterVolumeSpecName: "config-data") pod "70dda975-7e0b-40a4-a92a-675be53560b7" (UID: "70dda975-7e0b-40a4-a92a-675be53560b7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.233882 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9116eb6c-0630-4022-9c43-8c8793e08922-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.233908 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/70dda975-7e0b-40a4-a92a-675be53560b7-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.233918 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9116eb6c-0630-4022-9c43-8c8793e08922-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.233928 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/70dda975-7e0b-40a4-a92a-675be53560b7-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.233938 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-45h8b\" (UniqueName: \"kubernetes.io/projected/70dda975-7e0b-40a4-a92a-675be53560b7-kube-api-access-45h8b\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.233949 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qkt95\" (UniqueName: \"kubernetes.io/projected/9116eb6c-0630-4022-9c43-8c8793e08922-kube-api-access-qkt95\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.233985 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.235606 4558 generic.go:334] "Generic (PLEG): container finished" podID="0a49fae8-fd33-4a50-b967-e8e8cc48731a" containerID="f92d43e68f181ffa640787e9d41e5040d775ba561a1cacb4acbc03876430f695" exitCode=0 Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.235752 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/watcher-api-0" event={"ID":"0a49fae8-fd33-4a50-b967-e8e8cc48731a","Type":"ContainerDied","Data":"f92d43e68f181ffa640787e9d41e5040d775ba561a1cacb4acbc03876430f695"} Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.235787 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/watcher-api-0" event={"ID":"0a49fae8-fd33-4a50-b967-e8e8cc48731a","Type":"ContainerDied","Data":"e74604d2c7d949fd19df578aba3942d98f6666bf0d5ad3062afde5dc44fe6f40"} Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.235929 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/watcher-api-0" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.239507 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/70dda975-7e0b-40a4-a92a-675be53560b7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "70dda975-7e0b-40a4-a92a-675be53560b7" (UID: "70dda975-7e0b-40a4-a92a-675be53560b7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.245312 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a" (UID: "f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.250551 4558 generic.go:334] "Generic (PLEG): container finished" podID="158b4a4e-78aa-4813-a00e-abddfb7214ef" containerID="32341c380c826a7aa830f2d1cd35459c6a706f48a875899fa1c9c906b440f63c" exitCode=0 Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.250597 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"158b4a4e-78aa-4813-a00e-abddfb7214ef","Type":"ContainerDied","Data":"32341c380c826a7aa830f2d1cd35459c6a706f48a875899fa1c9c906b440f63c"} Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.250617 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"158b4a4e-78aa-4813-a00e-abddfb7214ef","Type":"ContainerDied","Data":"52104433e07b8f12e8c1abd6d48e18ef5e93a62555059f9dad9e59eb47911179"} Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.250628 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="52104433e07b8f12e8c1abd6d48e18ef5e93a62555059f9dad9e59eb47911179" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.252504 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"a9ada454-f6fa-4ea4-b386-3ddbcd37f233","Type":"ContainerDied","Data":"7c1ae5ffe5f58759e7773584026e6bc8dbd274362dd8c71eba12a88a090c95de"} Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.252592 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.265479 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/70dda975-7e0b-40a4-a92a-675be53560b7-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "70dda975-7e0b-40a4-a92a-675be53560b7" (UID: "70dda975-7e0b-40a4-a92a-675be53560b7"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.276532 4558 generic.go:334] "Generic (PLEG): container finished" podID="217a2ebc-8331-4a6f-9113-2c813563a2b8" containerID="521ed414fb8023645d2152fb8453d458e32e8655c7e7d2cd8ec86ea164244611" exitCode=0 Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.276601 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-0" event={"ID":"217a2ebc-8331-4a6f-9113-2c813563a2b8","Type":"ContainerDied","Data":"521ed414fb8023645d2152fb8453d458e32e8655c7e7d2cd8ec86ea164244611"} Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.284232 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9116eb6c-0630-4022-9c43-8c8793e08922-config-data" (OuterVolumeSpecName: "config-data") pod "9116eb6c-0630-4022-9c43-8c8793e08922" (UID: "9116eb6c-0630-4022-9c43-8c8793e08922"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.293149 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9116eb6c-0630-4022-9c43-8c8793e08922-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "9116eb6c-0630-4022-9c43-8c8793e08922" (UID: "9116eb6c-0630-4022-9c43-8c8793e08922"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.295268 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/prometheus-metric-storage-0" podUID="e87a4844-8a90-4965-878f-d95aa09c47bb" containerName="prometheus" probeResult="failure" output="Get \"https://10.217.1.202:9090/-/ready\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.301582 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9116eb6c-0630-4022-9c43-8c8793e08922-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9116eb6c-0630-4022-9c43-8c8793e08922" (UID: "9116eb6c-0630-4022-9c43-8c8793e08922"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.303065 4558 generic.go:334] "Generic (PLEG): container finished" podID="30b9f169-eef8-4a30-b543-83dda735da70" containerID="146969194ab0a40e402393f07d4594aaad069b32a10f5e882b7b5d9c1bed6628" exitCode=0 Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.303154 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/watcher-applier-0" event={"ID":"30b9f169-eef8-4a30-b543-83dda735da70","Type":"ContainerDied","Data":"146969194ab0a40e402393f07d4594aaad069b32a10f5e882b7b5d9c1bed6628"} Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.303212 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/watcher-applier-0" event={"ID":"30b9f169-eef8-4a30-b543-83dda735da70","Type":"ContainerDied","Data":"9dc1080b55204391c2413ceefa32984f19b36cde05aade1975a34ec4f434e74a"} Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.303225 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9dc1080b55204391c2413ceefa32984f19b36cde05aade1975a34ec4f434e74a" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.308146 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovn-northd-0_e8370668-8857-4be3-bcaa-bae8d6cdd158/ovn-northd/0.log" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.308396 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.308444 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-northd-0" event={"ID":"e8370668-8857-4be3-bcaa-bae8d6cdd158","Type":"ContainerDied","Data":"19a79edaa42605a563f764f37b4accff1f775f3b2ba6ff2f66a1ec82094bce5f"} Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.310742 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a" (UID: "f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.318714 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9116eb6c-0630-4022-9c43-8c8793e08922-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "9116eb6c-0630-4022-9c43-8c8793e08922" (UID: "9116eb6c-0630-4022-9c43-8c8793e08922"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.324822 4558 generic.go:334] "Generic (PLEG): container finished" podID="70dda975-7e0b-40a4-a92a-675be53560b7" containerID="dc90c73cc956157f0d2205b7cbb05b8859c8423df552df5ea7b67e88690ae584" exitCode=0 Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.324873 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.324911 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"70dda975-7e0b-40a4-a92a-675be53560b7","Type":"ContainerDied","Data":"dc90c73cc956157f0d2205b7cbb05b8859c8423df552df5ea7b67e88690ae584"} Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.324969 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"70dda975-7e0b-40a4-a92a-675be53560b7","Type":"ContainerDied","Data":"fe17107afca6886a9153cdacb46fecd00018063c05331260ddc6d05020f827a3"} Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.330983 4558 generic.go:334] "Generic (PLEG): container finished" podID="9116eb6c-0630-4022-9c43-8c8793e08922" containerID="30ace2ed0b9d212ff18f8956aaa8ea0cdaa3a1c7b26e2fe661e7f78b0f3795a2" exitCode=0 Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.331071 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/watcher-be54-account-create-update-jd6tl" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.331670 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.331684 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-6587ff9c6b-8wmpk" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.331728 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-5773-account-create-update-qmwj7" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.331748 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-d62f-account-create-update-8xjrt" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.331761 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-6587ff9c6b-8wmpk" event={"ID":"9116eb6c-0630-4022-9c43-8c8793e08922","Type":"ContainerDied","Data":"30ace2ed0b9d212ff18f8956aaa8ea0cdaa3a1c7b26e2fe661e7f78b0f3795a2"} Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.338320 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-6587ff9c6b-8wmpk" event={"ID":"9116eb6c-0630-4022-9c43-8c8793e08922","Type":"ContainerDied","Data":"5f7acaada2e998fa3812813955fef087ffa0d3ed68263d891cd46b28367e673f"} Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.340524 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.340550 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9116eb6c-0630-4022-9c43-8c8793e08922-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.340563 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9116eb6c-0630-4022-9c43-8c8793e08922-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.340572 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9116eb6c-0630-4022-9c43-8c8793e08922-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.340582 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9116eb6c-0630-4022-9c43-8c8793e08922-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.340590 4558 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/70dda975-7e0b-40a4-a92a-675be53560b7-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.340599 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.340701 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/70dda975-7e0b-40a4-a92a-675be53560b7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.610890 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.612555 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/watcher-applier-0" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.612618 4558 scope.go:117] "RemoveContainer" containerID="f7e6285aee41adbd48413910cd7a5c3aa34306a43105d42dd032ba571f9b7d5c" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.618001 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.624680 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-57f6f664bd-7h7jh" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.656646 4558 scope.go:117] "RemoveContainer" containerID="46a944f926e2a66f4adf5877139aadf5d8d647d7a08c085ceffe4a3ae8061c0b" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.689566 4558 scope.go:117] "RemoveContainer" containerID="1e85d135d8cc85e410ab61877d8a2508b9c4604a7dac00b92f5266230868a2eb" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.704649 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/watcher-api-0"] Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.716526 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/watcher-api-0"] Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.721239 4558 scope.go:117] "RemoveContainer" containerID="46a944f926e2a66f4adf5877139aadf5d8d647d7a08c085ceffe4a3ae8061c0b" Jan 20 17:16:11 crc kubenswrapper[4558]: E0120 17:16:11.721683 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"46a944f926e2a66f4adf5877139aadf5d8d647d7a08c085ceffe4a3ae8061c0b\": container with ID starting with 46a944f926e2a66f4adf5877139aadf5d8d647d7a08c085ceffe4a3ae8061c0b not found: ID does not exist" containerID="46a944f926e2a66f4adf5877139aadf5d8d647d7a08c085ceffe4a3ae8061c0b" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.721714 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"46a944f926e2a66f4adf5877139aadf5d8d647d7a08c085ceffe4a3ae8061c0b"} err="failed to get container status \"46a944f926e2a66f4adf5877139aadf5d8d647d7a08c085ceffe4a3ae8061c0b\": rpc error: code = NotFound desc = could not find container \"46a944f926e2a66f4adf5877139aadf5d8d647d7a08c085ceffe4a3ae8061c0b\": container with ID starting with 46a944f926e2a66f4adf5877139aadf5d8d647d7a08c085ceffe4a3ae8061c0b not found: ID does not exist" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.721739 4558 scope.go:117] "RemoveContainer" containerID="1e85d135d8cc85e410ab61877d8a2508b9c4604a7dac00b92f5266230868a2eb" Jan 20 17:16:11 crc kubenswrapper[4558]: E0120 17:16:11.722461 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1e85d135d8cc85e410ab61877d8a2508b9c4604a7dac00b92f5266230868a2eb\": container with ID starting with 1e85d135d8cc85e410ab61877d8a2508b9c4604a7dac00b92f5266230868a2eb not found: ID does not exist" containerID="1e85d135d8cc85e410ab61877d8a2508b9c4604a7dac00b92f5266230868a2eb" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.722511 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1e85d135d8cc85e410ab61877d8a2508b9c4604a7dac00b92f5266230868a2eb"} err="failed to get container status \"1e85d135d8cc85e410ab61877d8a2508b9c4604a7dac00b92f5266230868a2eb\": rpc error: code = NotFound desc = could not find container \"1e85d135d8cc85e410ab61877d8a2508b9c4604a7dac00b92f5266230868a2eb\": container with ID starting with 1e85d135d8cc85e410ab61877d8a2508b9c4604a7dac00b92f5266230868a2eb not found: ID does not exist" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.722542 4558 scope.go:117] "RemoveContainer" containerID="1d078bc9b7292d0036875780ff82d54713ce56fd9923a103f4645e0f8a09e9ff" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.723784 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.727668 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.736377 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.738717 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.749595 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-5773-account-create-update-qmwj7"] Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.754197 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/barbican-5773-account-create-update-qmwj7"] Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.761335 4558 scope.go:117] "RemoveContainer" containerID="5a1fe60313509d6c2bf1b4b977c2854c74261997efa724134b9eef9555b92ec0" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.765234 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/158b4a4e-78aa-4813-a00e-abddfb7214ef-combined-ca-bundle\") pod \"158b4a4e-78aa-4813-a00e-abddfb7214ef\" (UID: \"158b4a4e-78aa-4813-a00e-abddfb7214ef\") " Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.766955 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nx687\" (UniqueName: \"kubernetes.io/projected/5efe2c5d-bf53-462a-beb1-36cb940fc6a0-kube-api-access-nx687\") pod \"5efe2c5d-bf53-462a-beb1-36cb940fc6a0\" (UID: \"5efe2c5d-bf53-462a-beb1-36cb940fc6a0\") " Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.767038 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s6jzx\" (UniqueName: \"kubernetes.io/projected/158b4a4e-78aa-4813-a00e-abddfb7214ef-kube-api-access-s6jzx\") pod \"158b4a4e-78aa-4813-a00e-abddfb7214ef\" (UID: \"158b4a4e-78aa-4813-a00e-abddfb7214ef\") " Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.767097 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rbfpb\" (UniqueName: \"kubernetes.io/projected/217a2ebc-8331-4a6f-9113-2c813563a2b8-kube-api-access-rbfpb\") pod \"217a2ebc-8331-4a6f-9113-2c813563a2b8\" (UID: \"217a2ebc-8331-4a6f-9113-2c813563a2b8\") " Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.767145 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5efe2c5d-bf53-462a-beb1-36cb940fc6a0-public-tls-certs\") pod \"5efe2c5d-bf53-462a-beb1-36cb940fc6a0\" (UID: \"5efe2c5d-bf53-462a-beb1-36cb940fc6a0\") " Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.767235 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/217a2ebc-8331-4a6f-9113-2c813563a2b8-config-data\") pod \"217a2ebc-8331-4a6f-9113-2c813563a2b8\" (UID: \"217a2ebc-8331-4a6f-9113-2c813563a2b8\") " Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.767260 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/5efe2c5d-bf53-462a-beb1-36cb940fc6a0-fernet-keys\") pod \"5efe2c5d-bf53-462a-beb1-36cb940fc6a0\" (UID: \"5efe2c5d-bf53-462a-beb1-36cb940fc6a0\") " Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.767282 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30b9f169-eef8-4a30-b543-83dda735da70-combined-ca-bundle\") pod \"30b9f169-eef8-4a30-b543-83dda735da70\" (UID: \"30b9f169-eef8-4a30-b543-83dda735da70\") " Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.767309 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/217a2ebc-8331-4a6f-9113-2c813563a2b8-combined-ca-bundle\") pod \"217a2ebc-8331-4a6f-9113-2c813563a2b8\" (UID: \"217a2ebc-8331-4a6f-9113-2c813563a2b8\") " Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.767335 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/158b4a4e-78aa-4813-a00e-abddfb7214ef-internal-tls-certs\") pod \"158b4a4e-78aa-4813-a00e-abddfb7214ef\" (UID: \"158b4a4e-78aa-4813-a00e-abddfb7214ef\") " Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.767360 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mj5nf\" (UniqueName: \"kubernetes.io/projected/30b9f169-eef8-4a30-b543-83dda735da70-kube-api-access-mj5nf\") pod \"30b9f169-eef8-4a30-b543-83dda735da70\" (UID: \"30b9f169-eef8-4a30-b543-83dda735da70\") " Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.767379 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/158b4a4e-78aa-4813-a00e-abddfb7214ef-public-tls-certs\") pod \"158b4a4e-78aa-4813-a00e-abddfb7214ef\" (UID: \"158b4a4e-78aa-4813-a00e-abddfb7214ef\") " Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.767408 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/5efe2c5d-bf53-462a-beb1-36cb940fc6a0-credential-keys\") pod \"5efe2c5d-bf53-462a-beb1-36cb940fc6a0\" (UID: \"5efe2c5d-bf53-462a-beb1-36cb940fc6a0\") " Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.767426 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5efe2c5d-bf53-462a-beb1-36cb940fc6a0-internal-tls-certs\") pod \"5efe2c5d-bf53-462a-beb1-36cb940fc6a0\" (UID: \"5efe2c5d-bf53-462a-beb1-36cb940fc6a0\") " Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.767441 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5efe2c5d-bf53-462a-beb1-36cb940fc6a0-scripts\") pod \"5efe2c5d-bf53-462a-beb1-36cb940fc6a0\" (UID: \"5efe2c5d-bf53-462a-beb1-36cb940fc6a0\") " Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.767496 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/30b9f169-eef8-4a30-b543-83dda735da70-logs\") pod \"30b9f169-eef8-4a30-b543-83dda735da70\" (UID: \"30b9f169-eef8-4a30-b543-83dda735da70\") " Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.767518 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/30b9f169-eef8-4a30-b543-83dda735da70-config-data\") pod \"30b9f169-eef8-4a30-b543-83dda735da70\" (UID: \"30b9f169-eef8-4a30-b543-83dda735da70\") " Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.767553 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5efe2c5d-bf53-462a-beb1-36cb940fc6a0-combined-ca-bundle\") pod \"5efe2c5d-bf53-462a-beb1-36cb940fc6a0\" (UID: \"5efe2c5d-bf53-462a-beb1-36cb940fc6a0\") " Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.767579 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5efe2c5d-bf53-462a-beb1-36cb940fc6a0-config-data\") pod \"5efe2c5d-bf53-462a-beb1-36cb940fc6a0\" (UID: \"5efe2c5d-bf53-462a-beb1-36cb940fc6a0\") " Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.767595 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/158b4a4e-78aa-4813-a00e-abddfb7214ef-logs\") pod \"158b4a4e-78aa-4813-a00e-abddfb7214ef\" (UID: \"158b4a4e-78aa-4813-a00e-abddfb7214ef\") " Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.767609 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/158b4a4e-78aa-4813-a00e-abddfb7214ef-config-data\") pod \"158b4a4e-78aa-4813-a00e-abddfb7214ef\" (UID: \"158b4a4e-78aa-4813-a00e-abddfb7214ef\") " Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.768761 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-d62f-account-create-update-8xjrt"] Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.769146 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/30b9f169-eef8-4a30-b543-83dda735da70-logs" (OuterVolumeSpecName: "logs") pod "30b9f169-eef8-4a30-b543-83dda735da70" (UID: "30b9f169-eef8-4a30-b543-83dda735da70"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.777217 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5efe2c5d-bf53-462a-beb1-36cb940fc6a0-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "5efe2c5d-bf53-462a-beb1-36cb940fc6a0" (UID: "5efe2c5d-bf53-462a-beb1-36cb940fc6a0"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.777548 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/neutron-d62f-account-create-update-8xjrt"] Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.781377 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/158b4a4e-78aa-4813-a00e-abddfb7214ef-kube-api-access-s6jzx" (OuterVolumeSpecName: "kube-api-access-s6jzx") pod "158b4a4e-78aa-4813-a00e-abddfb7214ef" (UID: "158b4a4e-78aa-4813-a00e-abddfb7214ef"). InnerVolumeSpecName "kube-api-access-s6jzx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.781556 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/30b9f169-eef8-4a30-b543-83dda735da70-kube-api-access-mj5nf" (OuterVolumeSpecName: "kube-api-access-mj5nf") pod "30b9f169-eef8-4a30-b543-83dda735da70" (UID: "30b9f169-eef8-4a30-b543-83dda735da70"). InnerVolumeSpecName "kube-api-access-mj5nf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.783120 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/158b4a4e-78aa-4813-a00e-abddfb7214ef-logs" (OuterVolumeSpecName: "logs") pod "158b4a4e-78aa-4813-a00e-abddfb7214ef" (UID: "158b4a4e-78aa-4813-a00e-abddfb7214ef"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.787297 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.788122 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/217a2ebc-8331-4a6f-9113-2c813563a2b8-kube-api-access-rbfpb" (OuterVolumeSpecName: "kube-api-access-rbfpb") pod "217a2ebc-8331-4a6f-9113-2c813563a2b8" (UID: "217a2ebc-8331-4a6f-9113-2c813563a2b8"). InnerVolumeSpecName "kube-api-access-rbfpb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.796012 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.799291 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/openstack-galera-0"] Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.803525 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/openstack-galera-0"] Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.804321 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5efe2c5d-bf53-462a-beb1-36cb940fc6a0-scripts" (OuterVolumeSpecName: "scripts") pod "5efe2c5d-bf53-462a-beb1-36cb940fc6a0" (UID: "5efe2c5d-bf53-462a-beb1-36cb940fc6a0"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.804348 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5efe2c5d-bf53-462a-beb1-36cb940fc6a0-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "5efe2c5d-bf53-462a-beb1-36cb940fc6a0" (UID: "5efe2c5d-bf53-462a-beb1-36cb940fc6a0"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.804328 4558 scope.go:117] "RemoveContainer" containerID="f92d43e68f181ffa640787e9d41e5040d775ba561a1cacb4acbc03876430f695" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.804368 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5efe2c5d-bf53-462a-beb1-36cb940fc6a0-kube-api-access-nx687" (OuterVolumeSpecName: "kube-api-access-nx687") pod "5efe2c5d-bf53-462a-beb1-36cb940fc6a0" (UID: "5efe2c5d-bf53-462a-beb1-36cb940fc6a0"). InnerVolumeSpecName "kube-api-access-nx687". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.807536 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-api-6587ff9c6b-8wmpk"] Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.813662 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/barbican-api-6587ff9c6b-8wmpk"] Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.814279 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/158b4a4e-78aa-4813-a00e-abddfb7214ef-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "158b4a4e-78aa-4813-a00e-abddfb7214ef" (UID: "158b4a4e-78aa-4813-a00e-abddfb7214ef"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.814674 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/30b9f169-eef8-4a30-b543-83dda735da70-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "30b9f169-eef8-4a30-b543-83dda735da70" (UID: "30b9f169-eef8-4a30-b543-83dda735da70"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.820156 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovn-northd-0"] Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.826663 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ovn-northd-0"] Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.827739 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/217a2ebc-8331-4a6f-9113-2c813563a2b8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "217a2ebc-8331-4a6f-9113-2c813563a2b8" (UID: "217a2ebc-8331-4a6f-9113-2c813563a2b8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.830134 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5efe2c5d-bf53-462a-beb1-36cb940fc6a0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5efe2c5d-bf53-462a-beb1-36cb940fc6a0" (UID: "5efe2c5d-bf53-462a-beb1-36cb940fc6a0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.831345 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/placement-7ddd44bcf8-9tj9c"] Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.835573 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5efe2c5d-bf53-462a-beb1-36cb940fc6a0-config-data" (OuterVolumeSpecName: "config-data") pod "5efe2c5d-bf53-462a-beb1-36cb940fc6a0" (UID: "5efe2c5d-bf53-462a-beb1-36cb940fc6a0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.839580 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/placement-7ddd44bcf8-9tj9c"] Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.845393 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.850866 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/30b9f169-eef8-4a30-b543-83dda735da70-config-data" (OuterVolumeSpecName: "config-data") pod "30b9f169-eef8-4a30-b543-83dda735da70" (UID: "30b9f169-eef8-4a30-b543-83dda735da70"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.854239 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5efe2c5d-bf53-462a-beb1-36cb940fc6a0-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "5efe2c5d-bf53-462a-beb1-36cb940fc6a0" (UID: "5efe2c5d-bf53-462a-beb1-36cb940fc6a0"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.857204 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/217a2ebc-8331-4a6f-9113-2c813563a2b8-config-data" (OuterVolumeSpecName: "config-data") pod "217a2ebc-8331-4a6f-9113-2c813563a2b8" (UID: "217a2ebc-8331-4a6f-9113-2c813563a2b8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.860464 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/158b4a4e-78aa-4813-a00e-abddfb7214ef-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "158b4a4e-78aa-4813-a00e-abddfb7214ef" (UID: "158b4a4e-78aa-4813-a00e-abddfb7214ef"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.860998 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.861756 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/158b4a4e-78aa-4813-a00e-abddfb7214ef-config-data" (OuterVolumeSpecName: "config-data") pod "158b4a4e-78aa-4813-a00e-abddfb7214ef" (UID: "158b4a4e-78aa-4813-a00e-abddfb7214ef"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.863536 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/158b4a4e-78aa-4813-a00e-abddfb7214ef-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "158b4a4e-78aa-4813-a00e-abddfb7214ef" (UID: "158b4a4e-78aa-4813-a00e-abddfb7214ef"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.872413 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/watcher-be54-account-create-update-jd6tl"] Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.873876 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/30b9f169-eef8-4a30-b543-83dda735da70-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.873902 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5efe2c5d-bf53-462a-beb1-36cb940fc6a0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.873917 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5efe2c5d-bf53-462a-beb1-36cb940fc6a0-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.873929 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/158b4a4e-78aa-4813-a00e-abddfb7214ef-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.873938 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/158b4a4e-78aa-4813-a00e-abddfb7214ef-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.873948 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/158b4a4e-78aa-4813-a00e-abddfb7214ef-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.873959 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nx687\" (UniqueName: \"kubernetes.io/projected/5efe2c5d-bf53-462a-beb1-36cb940fc6a0-kube-api-access-nx687\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.873969 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s6jzx\" (UniqueName: \"kubernetes.io/projected/158b4a4e-78aa-4813-a00e-abddfb7214ef-kube-api-access-s6jzx\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.873980 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rbfpb\" (UniqueName: \"kubernetes.io/projected/217a2ebc-8331-4a6f-9113-2c813563a2b8-kube-api-access-rbfpb\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.873990 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/217a2ebc-8331-4a6f-9113-2c813563a2b8-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.874000 4558 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/5efe2c5d-bf53-462a-beb1-36cb940fc6a0-fernet-keys\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.874010 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30b9f169-eef8-4a30-b543-83dda735da70-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.874019 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/217a2ebc-8331-4a6f-9113-2c813563a2b8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.874032 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/158b4a4e-78aa-4813-a00e-abddfb7214ef-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.874042 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mj5nf\" (UniqueName: \"kubernetes.io/projected/30b9f169-eef8-4a30-b543-83dda735da70-kube-api-access-mj5nf\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.874051 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/158b4a4e-78aa-4813-a00e-abddfb7214ef-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.874061 4558 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/5efe2c5d-bf53-462a-beb1-36cb940fc6a0-credential-keys\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.874070 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5efe2c5d-bf53-462a-beb1-36cb940fc6a0-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.874079 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5efe2c5d-bf53-462a-beb1-36cb940fc6a0-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.874088 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/30b9f169-eef8-4a30-b543-83dda735da70-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.894280 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/watcher-be54-account-create-update-jd6tl"] Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.897026 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5efe2c5d-bf53-462a-beb1-36cb940fc6a0-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "5efe2c5d-bf53-462a-beb1-36cb940fc6a0" (UID: "5efe2c5d-bf53-462a-beb1-36cb940fc6a0"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.900804 4558 scope.go:117] "RemoveContainer" containerID="8d223b6be945d7320575fde68951e2dcf11d7174379c849a669c1180f13b9071" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.920517 4558 scope.go:117] "RemoveContainer" containerID="f92d43e68f181ffa640787e9d41e5040d775ba561a1cacb4acbc03876430f695" Jan 20 17:16:11 crc kubenswrapper[4558]: E0120 17:16:11.920900 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f92d43e68f181ffa640787e9d41e5040d775ba561a1cacb4acbc03876430f695\": container with ID starting with f92d43e68f181ffa640787e9d41e5040d775ba561a1cacb4acbc03876430f695 not found: ID does not exist" containerID="f92d43e68f181ffa640787e9d41e5040d775ba561a1cacb4acbc03876430f695" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.920933 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f92d43e68f181ffa640787e9d41e5040d775ba561a1cacb4acbc03876430f695"} err="failed to get container status \"f92d43e68f181ffa640787e9d41e5040d775ba561a1cacb4acbc03876430f695\": rpc error: code = NotFound desc = could not find container \"f92d43e68f181ffa640787e9d41e5040d775ba561a1cacb4acbc03876430f695\": container with ID starting with f92d43e68f181ffa640787e9d41e5040d775ba561a1cacb4acbc03876430f695 not found: ID does not exist" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.920958 4558 scope.go:117] "RemoveContainer" containerID="8d223b6be945d7320575fde68951e2dcf11d7174379c849a669c1180f13b9071" Jan 20 17:16:11 crc kubenswrapper[4558]: E0120 17:16:11.921369 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8d223b6be945d7320575fde68951e2dcf11d7174379c849a669c1180f13b9071\": container with ID starting with 8d223b6be945d7320575fde68951e2dcf11d7174379c849a669c1180f13b9071 not found: ID does not exist" containerID="8d223b6be945d7320575fde68951e2dcf11d7174379c849a669c1180f13b9071" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.921398 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8d223b6be945d7320575fde68951e2dcf11d7174379c849a669c1180f13b9071"} err="failed to get container status \"8d223b6be945d7320575fde68951e2dcf11d7174379c849a669c1180f13b9071\": rpc error: code = NotFound desc = could not find container \"8d223b6be945d7320575fde68951e2dcf11d7174379c849a669c1180f13b9071\": container with ID starting with 8d223b6be945d7320575fde68951e2dcf11d7174379c849a669c1180f13b9071 not found: ID does not exist" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.921422 4558 scope.go:117] "RemoveContainer" containerID="dbed1dfb06d84c003e0890032b6a972606b43286eee0111f21ef16dfa9e94a1d" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.940398 4558 scope.go:117] "RemoveContainer" containerID="5259e35b7300917470585b85e57577b5c54cf041fcc22c4114363695b0632f21" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.955476 4558 scope.go:117] "RemoveContainer" containerID="642cff711dbe4d54cbee09e66959eb32d2d26fed71a8ef14e847e8bed60f8ee8" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.971318 4558 scope.go:117] "RemoveContainer" containerID="a07ebe0308220308f5b0687be7bf7284b0986dfc78e64ccb177087d9a970bb3f" Jan 20 17:16:11 crc kubenswrapper[4558]: I0120 17:16:11.974825 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5efe2c5d-bf53-462a-beb1-36cb940fc6a0-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:12 crc kubenswrapper[4558]: I0120 17:16:12.000765 4558 scope.go:117] "RemoveContainer" containerID="dc90c73cc956157f0d2205b7cbb05b8859c8423df552df5ea7b67e88690ae584" Jan 20 17:16:12 crc kubenswrapper[4558]: I0120 17:16:12.026324 4558 scope.go:117] "RemoveContainer" containerID="4d6bd322347a5bd72fd3102a79147d9af8b5b4497067a83772ce8835798883b0" Jan 20 17:16:12 crc kubenswrapper[4558]: I0120 17:16:12.053896 4558 scope.go:117] "RemoveContainer" containerID="dc90c73cc956157f0d2205b7cbb05b8859c8423df552df5ea7b67e88690ae584" Jan 20 17:16:12 crc kubenswrapper[4558]: E0120 17:16:12.054296 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dc90c73cc956157f0d2205b7cbb05b8859c8423df552df5ea7b67e88690ae584\": container with ID starting with dc90c73cc956157f0d2205b7cbb05b8859c8423df552df5ea7b67e88690ae584 not found: ID does not exist" containerID="dc90c73cc956157f0d2205b7cbb05b8859c8423df552df5ea7b67e88690ae584" Jan 20 17:16:12 crc kubenswrapper[4558]: I0120 17:16:12.054346 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dc90c73cc956157f0d2205b7cbb05b8859c8423df552df5ea7b67e88690ae584"} err="failed to get container status \"dc90c73cc956157f0d2205b7cbb05b8859c8423df552df5ea7b67e88690ae584\": rpc error: code = NotFound desc = could not find container \"dc90c73cc956157f0d2205b7cbb05b8859c8423df552df5ea7b67e88690ae584\": container with ID starting with dc90c73cc956157f0d2205b7cbb05b8859c8423df552df5ea7b67e88690ae584 not found: ID does not exist" Jan 20 17:16:12 crc kubenswrapper[4558]: I0120 17:16:12.054376 4558 scope.go:117] "RemoveContainer" containerID="4d6bd322347a5bd72fd3102a79147d9af8b5b4497067a83772ce8835798883b0" Jan 20 17:16:12 crc kubenswrapper[4558]: E0120 17:16:12.054730 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4d6bd322347a5bd72fd3102a79147d9af8b5b4497067a83772ce8835798883b0\": container with ID starting with 4d6bd322347a5bd72fd3102a79147d9af8b5b4497067a83772ce8835798883b0 not found: ID does not exist" containerID="4d6bd322347a5bd72fd3102a79147d9af8b5b4497067a83772ce8835798883b0" Jan 20 17:16:12 crc kubenswrapper[4558]: I0120 17:16:12.054757 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4d6bd322347a5bd72fd3102a79147d9af8b5b4497067a83772ce8835798883b0"} err="failed to get container status \"4d6bd322347a5bd72fd3102a79147d9af8b5b4497067a83772ce8835798883b0\": rpc error: code = NotFound desc = could not find container \"4d6bd322347a5bd72fd3102a79147d9af8b5b4497067a83772ce8835798883b0\": container with ID starting with 4d6bd322347a5bd72fd3102a79147d9af8b5b4497067a83772ce8835798883b0 not found: ID does not exist" Jan 20 17:16:12 crc kubenswrapper[4558]: I0120 17:16:12.054779 4558 scope.go:117] "RemoveContainer" containerID="30ace2ed0b9d212ff18f8956aaa8ea0cdaa3a1c7b26e2fe661e7f78b0f3795a2" Jan 20 17:16:12 crc kubenswrapper[4558]: I0120 17:16:12.078314 4558 scope.go:117] "RemoveContainer" containerID="6da9a00e2025f693c02a7cb6b25aca4e9c3ecc4fc8e7c3494f4026506bdec6c1" Jan 20 17:16:12 crc kubenswrapper[4558]: I0120 17:16:12.107549 4558 scope.go:117] "RemoveContainer" containerID="30ace2ed0b9d212ff18f8956aaa8ea0cdaa3a1c7b26e2fe661e7f78b0f3795a2" Jan 20 17:16:12 crc kubenswrapper[4558]: E0120 17:16:12.107904 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"30ace2ed0b9d212ff18f8956aaa8ea0cdaa3a1c7b26e2fe661e7f78b0f3795a2\": container with ID starting with 30ace2ed0b9d212ff18f8956aaa8ea0cdaa3a1c7b26e2fe661e7f78b0f3795a2 not found: ID does not exist" containerID="30ace2ed0b9d212ff18f8956aaa8ea0cdaa3a1c7b26e2fe661e7f78b0f3795a2" Jan 20 17:16:12 crc kubenswrapper[4558]: I0120 17:16:12.107939 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"30ace2ed0b9d212ff18f8956aaa8ea0cdaa3a1c7b26e2fe661e7f78b0f3795a2"} err="failed to get container status \"30ace2ed0b9d212ff18f8956aaa8ea0cdaa3a1c7b26e2fe661e7f78b0f3795a2\": rpc error: code = NotFound desc = could not find container \"30ace2ed0b9d212ff18f8956aaa8ea0cdaa3a1c7b26e2fe661e7f78b0f3795a2\": container with ID starting with 30ace2ed0b9d212ff18f8956aaa8ea0cdaa3a1c7b26e2fe661e7f78b0f3795a2 not found: ID does not exist" Jan 20 17:16:12 crc kubenswrapper[4558]: I0120 17:16:12.107962 4558 scope.go:117] "RemoveContainer" containerID="6da9a00e2025f693c02a7cb6b25aca4e9c3ecc4fc8e7c3494f4026506bdec6c1" Jan 20 17:16:12 crc kubenswrapper[4558]: E0120 17:16:12.108292 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6da9a00e2025f693c02a7cb6b25aca4e9c3ecc4fc8e7c3494f4026506bdec6c1\": container with ID starting with 6da9a00e2025f693c02a7cb6b25aca4e9c3ecc4fc8e7c3494f4026506bdec6c1 not found: ID does not exist" containerID="6da9a00e2025f693c02a7cb6b25aca4e9c3ecc4fc8e7c3494f4026506bdec6c1" Jan 20 17:16:12 crc kubenswrapper[4558]: I0120 17:16:12.108317 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6da9a00e2025f693c02a7cb6b25aca4e9c3ecc4fc8e7c3494f4026506bdec6c1"} err="failed to get container status \"6da9a00e2025f693c02a7cb6b25aca4e9c3ecc4fc8e7c3494f4026506bdec6c1\": rpc error: code = NotFound desc = could not find container \"6da9a00e2025f693c02a7cb6b25aca4e9c3ecc4fc8e7c3494f4026506bdec6c1\": container with ID starting with 6da9a00e2025f693c02a7cb6b25aca4e9c3ecc4fc8e7c3494f4026506bdec6c1 not found: ID does not exist" Jan 20 17:16:12 crc kubenswrapper[4558]: E0120 17:16:12.281910 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Jan 20 17:16:12 crc kubenswrapper[4558]: E0120 17:16:12.281999 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/fd2edfa4-790f-49d7-9e32-29571e490aaf-config-data podName:fd2edfa4-790f-49d7-9e32-29571e490aaf nodeName:}" failed. No retries permitted until 2026-01-20 17:16:20.281974609 +0000 UTC m=+2074.042312576 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/fd2edfa4-790f-49d7-9e32-29571e490aaf-config-data") pod "rabbitmq-server-0" (UID: "fd2edfa4-790f-49d7-9e32-29571e490aaf") : configmap "rabbitmq-config-data" not found Jan 20 17:16:12 crc kubenswrapper[4558]: I0120 17:16:12.362382 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-0" event={"ID":"217a2ebc-8331-4a6f-9113-2c813563a2b8","Type":"ContainerDied","Data":"fbc48c8bed18e946f067cb8bb8a942077ad48821a9531de67a200f0aa5a7d292"} Jan 20 17:16:12 crc kubenswrapper[4558]: I0120 17:16:12.362450 4558 scope.go:117] "RemoveContainer" containerID="521ed414fb8023645d2152fb8453d458e32e8655c7e7d2cd8ec86ea164244611" Jan 20 17:16:12 crc kubenswrapper[4558]: I0120 17:16:12.362639 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:16:12 crc kubenswrapper[4558]: I0120 17:16:12.368413 4558 generic.go:334] "Generic (PLEG): container finished" podID="5efe2c5d-bf53-462a-beb1-36cb940fc6a0" containerID="9ebec879cfd3dcec199dea1e7738ccfa45af9b5be157ef55c3e4756d96cca6ba" exitCode=0 Jan 20 17:16:12 crc kubenswrapper[4558]: I0120 17:16:12.369141 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/watcher-applier-0" Jan 20 17:16:12 crc kubenswrapper[4558]: I0120 17:16:12.368518 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-57f6f664bd-7h7jh" Jan 20 17:16:12 crc kubenswrapper[4558]: I0120 17:16:12.369587 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:16:12 crc kubenswrapper[4558]: I0120 17:16:12.368509 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-57f6f664bd-7h7jh" event={"ID":"5efe2c5d-bf53-462a-beb1-36cb940fc6a0","Type":"ContainerDied","Data":"9ebec879cfd3dcec199dea1e7738ccfa45af9b5be157ef55c3e4756d96cca6ba"} Jan 20 17:16:12 crc kubenswrapper[4558]: I0120 17:16:12.369810 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-57f6f664bd-7h7jh" event={"ID":"5efe2c5d-bf53-462a-beb1-36cb940fc6a0","Type":"ContainerDied","Data":"c886f8dc54026b98639367c19491fdbfd34c24d6c8fda7aeb5c732e54143bc46"} Jan 20 17:16:12 crc kubenswrapper[4558]: I0120 17:16:12.394706 4558 scope.go:117] "RemoveContainer" containerID="9ebec879cfd3dcec199dea1e7738ccfa45af9b5be157ef55c3e4756d96cca6ba" Jan 20 17:16:12 crc kubenswrapper[4558]: I0120 17:16:12.416373 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-0"] Jan 20 17:16:12 crc kubenswrapper[4558]: I0120 17:16:12.427677 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-0"] Jan 20 17:16:12 crc kubenswrapper[4558]: I0120 17:16:12.436090 4558 scope.go:117] "RemoveContainer" containerID="9ebec879cfd3dcec199dea1e7738ccfa45af9b5be157ef55c3e4756d96cca6ba" Jan 20 17:16:12 crc kubenswrapper[4558]: I0120 17:16:12.436483 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:16:12 crc kubenswrapper[4558]: E0120 17:16:12.437290 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9ebec879cfd3dcec199dea1e7738ccfa45af9b5be157ef55c3e4756d96cca6ba\": container with ID starting with 9ebec879cfd3dcec199dea1e7738ccfa45af9b5be157ef55c3e4756d96cca6ba not found: ID does not exist" containerID="9ebec879cfd3dcec199dea1e7738ccfa45af9b5be157ef55c3e4756d96cca6ba" Jan 20 17:16:12 crc kubenswrapper[4558]: I0120 17:16:12.437319 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9ebec879cfd3dcec199dea1e7738ccfa45af9b5be157ef55c3e4756d96cca6ba"} err="failed to get container status \"9ebec879cfd3dcec199dea1e7738ccfa45af9b5be157ef55c3e4756d96cca6ba\": rpc error: code = NotFound desc = could not find container \"9ebec879cfd3dcec199dea1e7738ccfa45af9b5be157ef55c3e4756d96cca6ba\": container with ID starting with 9ebec879cfd3dcec199dea1e7738ccfa45af9b5be157ef55c3e4756d96cca6ba not found: ID does not exist" Jan 20 17:16:12 crc kubenswrapper[4558]: I0120 17:16:12.447809 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:16:12 crc kubenswrapper[4558]: I0120 17:16:12.453357 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/watcher-applier-0"] Jan 20 17:16:12 crc kubenswrapper[4558]: I0120 17:16:12.458399 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/watcher-applier-0"] Jan 20 17:16:12 crc kubenswrapper[4558]: I0120 17:16:12.461917 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-57f6f664bd-7h7jh"] Jan 20 17:16:12 crc kubenswrapper[4558]: I0120 17:16:12.465319 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/keystone-57f6f664bd-7h7jh"] Jan 20 17:16:12 crc kubenswrapper[4558]: I0120 17:16:12.579928 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0a49fae8-fd33-4a50-b967-e8e8cc48731a" path="/var/lib/kubelet/pods/0a49fae8-fd33-4a50-b967-e8e8cc48731a/volumes" Jan 20 17:16:12 crc kubenswrapper[4558]: I0120 17:16:12.580701 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="158b4a4e-78aa-4813-a00e-abddfb7214ef" path="/var/lib/kubelet/pods/158b4a4e-78aa-4813-a00e-abddfb7214ef/volumes" Jan 20 17:16:12 crc kubenswrapper[4558]: I0120 17:16:12.581438 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="217a2ebc-8331-4a6f-9113-2c813563a2b8" path="/var/lib/kubelet/pods/217a2ebc-8331-4a6f-9113-2c813563a2b8/volumes" Jan 20 17:16:12 crc kubenswrapper[4558]: I0120 17:16:12.582590 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="23341c92-6342-4e5f-904c-17fdff2841c5" path="/var/lib/kubelet/pods/23341c92-6342-4e5f-904c-17fdff2841c5/volumes" Jan 20 17:16:12 crc kubenswrapper[4558]: I0120 17:16:12.583221 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="249a3706-31a6-4fb2-9a3f-94700d9ae30e" path="/var/lib/kubelet/pods/249a3706-31a6-4fb2-9a3f-94700d9ae30e/volumes" Jan 20 17:16:12 crc kubenswrapper[4558]: I0120 17:16:12.583861 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="30b9f169-eef8-4a30-b543-83dda735da70" path="/var/lib/kubelet/pods/30b9f169-eef8-4a30-b543-83dda735da70/volumes" Jan 20 17:16:12 crc kubenswrapper[4558]: I0120 17:16:12.584974 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5588d9e2-2f89-4abb-94de-76b5791582a5" path="/var/lib/kubelet/pods/5588d9e2-2f89-4abb-94de-76b5791582a5/volumes" Jan 20 17:16:12 crc kubenswrapper[4558]: I0120 17:16:12.585742 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5efe2c5d-bf53-462a-beb1-36cb940fc6a0" path="/var/lib/kubelet/pods/5efe2c5d-bf53-462a-beb1-36cb940fc6a0/volumes" Jan 20 17:16:12 crc kubenswrapper[4558]: I0120 17:16:12.586495 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="61a672e2-963e-4428-bb97-e2177ac10c06" path="/var/lib/kubelet/pods/61a672e2-963e-4428-bb97-e2177ac10c06/volumes" Jan 20 17:16:12 crc kubenswrapper[4558]: I0120 17:16:12.586903 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="70dda975-7e0b-40a4-a92a-675be53560b7" path="/var/lib/kubelet/pods/70dda975-7e0b-40a4-a92a-675be53560b7/volumes" Jan 20 17:16:12 crc kubenswrapper[4558]: I0120 17:16:12.588015 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9116eb6c-0630-4022-9c43-8c8793e08922" path="/var/lib/kubelet/pods/9116eb6c-0630-4022-9c43-8c8793e08922/volumes" Jan 20 17:16:12 crc kubenswrapper[4558]: I0120 17:16:12.588691 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a9ada454-f6fa-4ea4-b386-3ddbcd37f233" path="/var/lib/kubelet/pods/a9ada454-f6fa-4ea4-b386-3ddbcd37f233/volumes" Jan 20 17:16:12 crc kubenswrapper[4558]: I0120 17:16:12.589832 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b66dc001-a49e-439f-ad5f-eb6ed96dc37f" path="/var/lib/kubelet/pods/b66dc001-a49e-439f-ad5f-eb6ed96dc37f/volumes" Jan 20 17:16:12 crc kubenswrapper[4558]: I0120 17:16:12.590300 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e8370668-8857-4be3-bcaa-bae8d6cdd158" path="/var/lib/kubelet/pods/e8370668-8857-4be3-bcaa-bae8d6cdd158/volumes" Jan 20 17:16:12 crc kubenswrapper[4558]: I0120 17:16:12.590992 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eed05f95-b1a3-44b9-bb2c-e7b01d91d8cc" path="/var/lib/kubelet/pods/eed05f95-b1a3-44b9-bb2c-e7b01d91d8cc/volumes" Jan 20 17:16:12 crc kubenswrapper[4558]: I0120 17:16:12.592779 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a" path="/var/lib/kubelet/pods/f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a/volumes" Jan 20 17:16:13 crc kubenswrapper[4558]: E0120 17:16:13.344284 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="022773dc93b3f1b894358c04d0057cb8d8780ca1ef654626b973b40167c3532e" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 20 17:16:13 crc kubenswrapper[4558]: E0120 17:16:13.345748 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="022773dc93b3f1b894358c04d0057cb8d8780ca1ef654626b973b40167c3532e" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 20 17:16:13 crc kubenswrapper[4558]: E0120 17:16:13.347879 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="022773dc93b3f1b894358c04d0057cb8d8780ca1ef654626b973b40167c3532e" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 20 17:16:13 crc kubenswrapper[4558]: E0120 17:16:13.347979 4558 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack-kuttl-tests/nova-scheduler-0" podUID="c4cb69c1-3d51-4b13-a025-b37c986ffede" containerName="nova-scheduler-scheduler" Jan 20 17:16:13 crc kubenswrapper[4558]: I0120 17:16:13.390091 4558 generic.go:334] "Generic (PLEG): container finished" podID="fd2edfa4-790f-49d7-9e32-29571e490aaf" containerID="2e090c322fd27930306ecae5c98d22a52137ae21234138d9f2df2a9c227f3a1b" exitCode=0 Jan 20 17:16:13 crc kubenswrapper[4558]: I0120 17:16:13.390141 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-server-0" event={"ID":"fd2edfa4-790f-49d7-9e32-29571e490aaf","Type":"ContainerDied","Data":"2e090c322fd27930306ecae5c98d22a52137ae21234138d9f2df2a9c227f3a1b"} Jan 20 17:16:13 crc kubenswrapper[4558]: I0120 17:16:13.390191 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-server-0" event={"ID":"fd2edfa4-790f-49d7-9e32-29571e490aaf","Type":"ContainerDied","Data":"aabe31337edbbbf211a17606aae464568db3b6b1f67edf5ee22c95c8f40bfe41"} Jan 20 17:16:13 crc kubenswrapper[4558]: I0120 17:16:13.390207 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="aabe31337edbbbf211a17606aae464568db3b6b1f67edf5ee22c95c8f40bfe41" Jan 20 17:16:13 crc kubenswrapper[4558]: I0120 17:16:13.421556 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:16:13 crc kubenswrapper[4558]: I0120 17:16:13.495685 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/fd2edfa4-790f-49d7-9e32-29571e490aaf-rabbitmq-confd\") pod \"fd2edfa4-790f-49d7-9e32-29571e490aaf\" (UID: \"fd2edfa4-790f-49d7-9e32-29571e490aaf\") " Jan 20 17:16:13 crc kubenswrapper[4558]: I0120 17:16:13.495772 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/fd2edfa4-790f-49d7-9e32-29571e490aaf-rabbitmq-tls\") pod \"fd2edfa4-790f-49d7-9e32-29571e490aaf\" (UID: \"fd2edfa4-790f-49d7-9e32-29571e490aaf\") " Jan 20 17:16:13 crc kubenswrapper[4558]: I0120 17:16:13.495847 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/fd2edfa4-790f-49d7-9e32-29571e490aaf-erlang-cookie-secret\") pod \"fd2edfa4-790f-49d7-9e32-29571e490aaf\" (UID: \"fd2edfa4-790f-49d7-9e32-29571e490aaf\") " Jan 20 17:16:13 crc kubenswrapper[4558]: I0120 17:16:13.495874 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/fd2edfa4-790f-49d7-9e32-29571e490aaf-pod-info\") pod \"fd2edfa4-790f-49d7-9e32-29571e490aaf\" (UID: \"fd2edfa4-790f-49d7-9e32-29571e490aaf\") " Jan 20 17:16:13 crc kubenswrapper[4558]: I0120 17:16:13.495898 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/fd2edfa4-790f-49d7-9e32-29571e490aaf-rabbitmq-plugins\") pod \"fd2edfa4-790f-49d7-9e32-29571e490aaf\" (UID: \"fd2edfa4-790f-49d7-9e32-29571e490aaf\") " Jan 20 17:16:13 crc kubenswrapper[4558]: I0120 17:16:13.495930 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/fd2edfa4-790f-49d7-9e32-29571e490aaf-config-data\") pod \"fd2edfa4-790f-49d7-9e32-29571e490aaf\" (UID: \"fd2edfa4-790f-49d7-9e32-29571e490aaf\") " Jan 20 17:16:13 crc kubenswrapper[4558]: I0120 17:16:13.496015 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage14-crc\") pod \"fd2edfa4-790f-49d7-9e32-29571e490aaf\" (UID: \"fd2edfa4-790f-49d7-9e32-29571e490aaf\") " Jan 20 17:16:13 crc kubenswrapper[4558]: I0120 17:16:13.496071 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/fd2edfa4-790f-49d7-9e32-29571e490aaf-plugins-conf\") pod \"fd2edfa4-790f-49d7-9e32-29571e490aaf\" (UID: \"fd2edfa4-790f-49d7-9e32-29571e490aaf\") " Jan 20 17:16:13 crc kubenswrapper[4558]: I0120 17:16:13.496089 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/fd2edfa4-790f-49d7-9e32-29571e490aaf-rabbitmq-erlang-cookie\") pod \"fd2edfa4-790f-49d7-9e32-29571e490aaf\" (UID: \"fd2edfa4-790f-49d7-9e32-29571e490aaf\") " Jan 20 17:16:13 crc kubenswrapper[4558]: I0120 17:16:13.496134 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/fd2edfa4-790f-49d7-9e32-29571e490aaf-server-conf\") pod \"fd2edfa4-790f-49d7-9e32-29571e490aaf\" (UID: \"fd2edfa4-790f-49d7-9e32-29571e490aaf\") " Jan 20 17:16:13 crc kubenswrapper[4558]: I0120 17:16:13.496205 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ftjq8\" (UniqueName: \"kubernetes.io/projected/fd2edfa4-790f-49d7-9e32-29571e490aaf-kube-api-access-ftjq8\") pod \"fd2edfa4-790f-49d7-9e32-29571e490aaf\" (UID: \"fd2edfa4-790f-49d7-9e32-29571e490aaf\") " Jan 20 17:16:13 crc kubenswrapper[4558]: I0120 17:16:13.497118 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fd2edfa4-790f-49d7-9e32-29571e490aaf-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "fd2edfa4-790f-49d7-9e32-29571e490aaf" (UID: "fd2edfa4-790f-49d7-9e32-29571e490aaf"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:16:13 crc kubenswrapper[4558]: I0120 17:16:13.497412 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fd2edfa4-790f-49d7-9e32-29571e490aaf-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "fd2edfa4-790f-49d7-9e32-29571e490aaf" (UID: "fd2edfa4-790f-49d7-9e32-29571e490aaf"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:16:13 crc kubenswrapper[4558]: I0120 17:16:13.498096 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fd2edfa4-790f-49d7-9e32-29571e490aaf-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "fd2edfa4-790f-49d7-9e32-29571e490aaf" (UID: "fd2edfa4-790f-49d7-9e32-29571e490aaf"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:16:13 crc kubenswrapper[4558]: I0120 17:16:13.503260 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage14-crc" (OuterVolumeSpecName: "persistence") pod "fd2edfa4-790f-49d7-9e32-29571e490aaf" (UID: "fd2edfa4-790f-49d7-9e32-29571e490aaf"). InnerVolumeSpecName "local-storage14-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:16:13 crc kubenswrapper[4558]: I0120 17:16:13.503928 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fd2edfa4-790f-49d7-9e32-29571e490aaf-kube-api-access-ftjq8" (OuterVolumeSpecName: "kube-api-access-ftjq8") pod "fd2edfa4-790f-49d7-9e32-29571e490aaf" (UID: "fd2edfa4-790f-49d7-9e32-29571e490aaf"). InnerVolumeSpecName "kube-api-access-ftjq8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:16:13 crc kubenswrapper[4558]: I0120 17:16:13.504181 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fd2edfa4-790f-49d7-9e32-29571e490aaf-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "fd2edfa4-790f-49d7-9e32-29571e490aaf" (UID: "fd2edfa4-790f-49d7-9e32-29571e490aaf"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:16:13 crc kubenswrapper[4558]: I0120 17:16:13.504470 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/fd2edfa4-790f-49d7-9e32-29571e490aaf-pod-info" (OuterVolumeSpecName: "pod-info") pod "fd2edfa4-790f-49d7-9e32-29571e490aaf" (UID: "fd2edfa4-790f-49d7-9e32-29571e490aaf"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Jan 20 17:16:13 crc kubenswrapper[4558]: I0120 17:16:13.505021 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fd2edfa4-790f-49d7-9e32-29571e490aaf-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "fd2edfa4-790f-49d7-9e32-29571e490aaf" (UID: "fd2edfa4-790f-49d7-9e32-29571e490aaf"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:16:13 crc kubenswrapper[4558]: I0120 17:16:13.521070 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fd2edfa4-790f-49d7-9e32-29571e490aaf-config-data" (OuterVolumeSpecName: "config-data") pod "fd2edfa4-790f-49d7-9e32-29571e490aaf" (UID: "fd2edfa4-790f-49d7-9e32-29571e490aaf"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:16:13 crc kubenswrapper[4558]: I0120 17:16:13.549912 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fd2edfa4-790f-49d7-9e32-29571e490aaf-server-conf" (OuterVolumeSpecName: "server-conf") pod "fd2edfa4-790f-49d7-9e32-29571e490aaf" (UID: "fd2edfa4-790f-49d7-9e32-29571e490aaf"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:16:13 crc kubenswrapper[4558]: I0120 17:16:13.570612 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fd2edfa4-790f-49d7-9e32-29571e490aaf-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "fd2edfa4-790f-49d7-9e32-29571e490aaf" (UID: "fd2edfa4-790f-49d7-9e32-29571e490aaf"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:16:13 crc kubenswrapper[4558]: I0120 17:16:13.598562 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage14-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage14-crc\") on node \"crc\" " Jan 20 17:16:13 crc kubenswrapper[4558]: I0120 17:16:13.598597 4558 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/fd2edfa4-790f-49d7-9e32-29571e490aaf-plugins-conf\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:13 crc kubenswrapper[4558]: I0120 17:16:13.598614 4558 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/fd2edfa4-790f-49d7-9e32-29571e490aaf-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:13 crc kubenswrapper[4558]: I0120 17:16:13.598628 4558 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/fd2edfa4-790f-49d7-9e32-29571e490aaf-server-conf\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:13 crc kubenswrapper[4558]: I0120 17:16:13.598638 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ftjq8\" (UniqueName: \"kubernetes.io/projected/fd2edfa4-790f-49d7-9e32-29571e490aaf-kube-api-access-ftjq8\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:13 crc kubenswrapper[4558]: I0120 17:16:13.598648 4558 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/fd2edfa4-790f-49d7-9e32-29571e490aaf-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:13 crc kubenswrapper[4558]: I0120 17:16:13.598658 4558 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/fd2edfa4-790f-49d7-9e32-29571e490aaf-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:13 crc kubenswrapper[4558]: I0120 17:16:13.598667 4558 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/fd2edfa4-790f-49d7-9e32-29571e490aaf-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:13 crc kubenswrapper[4558]: I0120 17:16:13.598677 4558 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/fd2edfa4-790f-49d7-9e32-29571e490aaf-pod-info\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:13 crc kubenswrapper[4558]: I0120 17:16:13.598686 4558 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/fd2edfa4-790f-49d7-9e32-29571e490aaf-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:13 crc kubenswrapper[4558]: I0120 17:16:13.598695 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/fd2edfa4-790f-49d7-9e32-29571e490aaf-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:13 crc kubenswrapper[4558]: I0120 17:16:13.619384 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage14-crc" (UniqueName: "kubernetes.io/local-volume/local-storage14-crc") on node "crc" Jan 20 17:16:13 crc kubenswrapper[4558]: I0120 17:16:13.700278 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage14-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage14-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:14 crc kubenswrapper[4558]: I0120 17:16:14.403014 4558 generic.go:334] "Generic (PLEG): container finished" podID="8dfaaf1f-540b-4c8b-b93c-1e3556c37801" containerID="ac12e2c480e51bbc2f445d7182e6e37c4208d79ce61c920b8055cf83b75b4e84" exitCode=0 Jan 20 17:16:14 crc kubenswrapper[4558]: I0120 17:16:14.403092 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-keystone-listener-75484ddb58-9p7fq" event={"ID":"8dfaaf1f-540b-4c8b-b93c-1e3556c37801","Type":"ContainerDied","Data":"ac12e2c480e51bbc2f445d7182e6e37c4208d79ce61c920b8055cf83b75b4e84"} Jan 20 17:16:14 crc kubenswrapper[4558]: I0120 17:16:14.405469 4558 generic.go:334] "Generic (PLEG): container finished" podID="462f58d4-b41a-45b8-aa8a-003bfe30e625" containerID="0da15b1363218841887085f4193f9590c3888bf6f9f12345ba5c89c89755a3f7" exitCode=0 Jan 20 17:16:14 crc kubenswrapper[4558]: I0120 17:16:14.405544 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:16:14 crc kubenswrapper[4558]: I0120 17:16:14.405567 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-worker-55f98f9877-gm2wm" event={"ID":"462f58d4-b41a-45b8-aa8a-003bfe30e625","Type":"ContainerDied","Data":"0da15b1363218841887085f4193f9590c3888bf6f9f12345ba5c89c89755a3f7"} Jan 20 17:16:14 crc kubenswrapper[4558]: I0120 17:16:14.527200 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/rabbitmq-server-0"] Jan 20 17:16:14 crc kubenswrapper[4558]: I0120 17:16:14.539113 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/rabbitmq-server-0"] Jan 20 17:16:14 crc kubenswrapper[4558]: I0120 17:16:14.547033 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-keystone-listener-75484ddb58-9p7fq" Jan 20 17:16:14 crc kubenswrapper[4558]: I0120 17:16:14.580660 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fd2edfa4-790f-49d7-9e32-29571e490aaf" path="/var/lib/kubelet/pods/fd2edfa4-790f-49d7-9e32-29571e490aaf/volumes" Jan 20 17:16:14 crc kubenswrapper[4558]: I0120 17:16:14.624042 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fdvsr\" (UniqueName: \"kubernetes.io/projected/8dfaaf1f-540b-4c8b-b93c-1e3556c37801-kube-api-access-fdvsr\") pod \"8dfaaf1f-540b-4c8b-b93c-1e3556c37801\" (UID: \"8dfaaf1f-540b-4c8b-b93c-1e3556c37801\") " Jan 20 17:16:14 crc kubenswrapper[4558]: I0120 17:16:14.624193 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8dfaaf1f-540b-4c8b-b93c-1e3556c37801-config-data-custom\") pod \"8dfaaf1f-540b-4c8b-b93c-1e3556c37801\" (UID: \"8dfaaf1f-540b-4c8b-b93c-1e3556c37801\") " Jan 20 17:16:14 crc kubenswrapper[4558]: I0120 17:16:14.624258 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8dfaaf1f-540b-4c8b-b93c-1e3556c37801-combined-ca-bundle\") pod \"8dfaaf1f-540b-4c8b-b93c-1e3556c37801\" (UID: \"8dfaaf1f-540b-4c8b-b93c-1e3556c37801\") " Jan 20 17:16:14 crc kubenswrapper[4558]: I0120 17:16:14.624310 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8dfaaf1f-540b-4c8b-b93c-1e3556c37801-logs\") pod \"8dfaaf1f-540b-4c8b-b93c-1e3556c37801\" (UID: \"8dfaaf1f-540b-4c8b-b93c-1e3556c37801\") " Jan 20 17:16:14 crc kubenswrapper[4558]: I0120 17:16:14.624401 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8dfaaf1f-540b-4c8b-b93c-1e3556c37801-config-data\") pod \"8dfaaf1f-540b-4c8b-b93c-1e3556c37801\" (UID: \"8dfaaf1f-540b-4c8b-b93c-1e3556c37801\") " Jan 20 17:16:14 crc kubenswrapper[4558]: I0120 17:16:14.625228 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8dfaaf1f-540b-4c8b-b93c-1e3556c37801-logs" (OuterVolumeSpecName: "logs") pod "8dfaaf1f-540b-4c8b-b93c-1e3556c37801" (UID: "8dfaaf1f-540b-4c8b-b93c-1e3556c37801"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:16:14 crc kubenswrapper[4558]: I0120 17:16:14.628811 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-worker-55f98f9877-gm2wm" Jan 20 17:16:14 crc kubenswrapper[4558]: I0120 17:16:14.639399 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8dfaaf1f-540b-4c8b-b93c-1e3556c37801-kube-api-access-fdvsr" (OuterVolumeSpecName: "kube-api-access-fdvsr") pod "8dfaaf1f-540b-4c8b-b93c-1e3556c37801" (UID: "8dfaaf1f-540b-4c8b-b93c-1e3556c37801"). InnerVolumeSpecName "kube-api-access-fdvsr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:16:14 crc kubenswrapper[4558]: I0120 17:16:14.639621 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8dfaaf1f-540b-4c8b-b93c-1e3556c37801-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "8dfaaf1f-540b-4c8b-b93c-1e3556c37801" (UID: "8dfaaf1f-540b-4c8b-b93c-1e3556c37801"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:16:14 crc kubenswrapper[4558]: I0120 17:16:14.669587 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8dfaaf1f-540b-4c8b-b93c-1e3556c37801-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8dfaaf1f-540b-4c8b-b93c-1e3556c37801" (UID: "8dfaaf1f-540b-4c8b-b93c-1e3556c37801"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:16:14 crc kubenswrapper[4558]: I0120 17:16:14.704078 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8dfaaf1f-540b-4c8b-b93c-1e3556c37801-config-data" (OuterVolumeSpecName: "config-data") pod "8dfaaf1f-540b-4c8b-b93c-1e3556c37801" (UID: "8dfaaf1f-540b-4c8b-b93c-1e3556c37801"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:16:14 crc kubenswrapper[4558]: I0120 17:16:14.725260 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/462f58d4-b41a-45b8-aa8a-003bfe30e625-config-data-custom\") pod \"462f58d4-b41a-45b8-aa8a-003bfe30e625\" (UID: \"462f58d4-b41a-45b8-aa8a-003bfe30e625\") " Jan 20 17:16:14 crc kubenswrapper[4558]: I0120 17:16:14.725344 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/462f58d4-b41a-45b8-aa8a-003bfe30e625-combined-ca-bundle\") pod \"462f58d4-b41a-45b8-aa8a-003bfe30e625\" (UID: \"462f58d4-b41a-45b8-aa8a-003bfe30e625\") " Jan 20 17:16:14 crc kubenswrapper[4558]: I0120 17:16:14.725549 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/462f58d4-b41a-45b8-aa8a-003bfe30e625-config-data\") pod \"462f58d4-b41a-45b8-aa8a-003bfe30e625\" (UID: \"462f58d4-b41a-45b8-aa8a-003bfe30e625\") " Jan 20 17:16:14 crc kubenswrapper[4558]: I0120 17:16:14.725624 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/462f58d4-b41a-45b8-aa8a-003bfe30e625-logs\") pod \"462f58d4-b41a-45b8-aa8a-003bfe30e625\" (UID: \"462f58d4-b41a-45b8-aa8a-003bfe30e625\") " Jan 20 17:16:14 crc kubenswrapper[4558]: I0120 17:16:14.725679 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bpclt\" (UniqueName: \"kubernetes.io/projected/462f58d4-b41a-45b8-aa8a-003bfe30e625-kube-api-access-bpclt\") pod \"462f58d4-b41a-45b8-aa8a-003bfe30e625\" (UID: \"462f58d4-b41a-45b8-aa8a-003bfe30e625\") " Jan 20 17:16:14 crc kubenswrapper[4558]: I0120 17:16:14.726607 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/462f58d4-b41a-45b8-aa8a-003bfe30e625-logs" (OuterVolumeSpecName: "logs") pod "462f58d4-b41a-45b8-aa8a-003bfe30e625" (UID: "462f58d4-b41a-45b8-aa8a-003bfe30e625"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:16:14 crc kubenswrapper[4558]: I0120 17:16:14.726871 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fdvsr\" (UniqueName: \"kubernetes.io/projected/8dfaaf1f-540b-4c8b-b93c-1e3556c37801-kube-api-access-fdvsr\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:14 crc kubenswrapper[4558]: I0120 17:16:14.726906 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8dfaaf1f-540b-4c8b-b93c-1e3556c37801-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:14 crc kubenswrapper[4558]: I0120 17:16:14.726919 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/462f58d4-b41a-45b8-aa8a-003bfe30e625-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:14 crc kubenswrapper[4558]: I0120 17:16:14.726952 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8dfaaf1f-540b-4c8b-b93c-1e3556c37801-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:14 crc kubenswrapper[4558]: I0120 17:16:14.726962 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8dfaaf1f-540b-4c8b-b93c-1e3556c37801-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:14 crc kubenswrapper[4558]: I0120 17:16:14.726972 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8dfaaf1f-540b-4c8b-b93c-1e3556c37801-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:14 crc kubenswrapper[4558]: I0120 17:16:14.729352 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/462f58d4-b41a-45b8-aa8a-003bfe30e625-kube-api-access-bpclt" (OuterVolumeSpecName: "kube-api-access-bpclt") pod "462f58d4-b41a-45b8-aa8a-003bfe30e625" (UID: "462f58d4-b41a-45b8-aa8a-003bfe30e625"). InnerVolumeSpecName "kube-api-access-bpclt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:16:14 crc kubenswrapper[4558]: I0120 17:16:14.729630 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/462f58d4-b41a-45b8-aa8a-003bfe30e625-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "462f58d4-b41a-45b8-aa8a-003bfe30e625" (UID: "462f58d4-b41a-45b8-aa8a-003bfe30e625"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:16:14 crc kubenswrapper[4558]: I0120 17:16:14.741929 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/462f58d4-b41a-45b8-aa8a-003bfe30e625-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "462f58d4-b41a-45b8-aa8a-003bfe30e625" (UID: "462f58d4-b41a-45b8-aa8a-003bfe30e625"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:16:14 crc kubenswrapper[4558]: I0120 17:16:14.756654 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/462f58d4-b41a-45b8-aa8a-003bfe30e625-config-data" (OuterVolumeSpecName: "config-data") pod "462f58d4-b41a-45b8-aa8a-003bfe30e625" (UID: "462f58d4-b41a-45b8-aa8a-003bfe30e625"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:16:14 crc kubenswrapper[4558]: I0120 17:16:14.786831 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:16:14 crc kubenswrapper[4558]: I0120 17:16:14.829059 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/462f58d4-b41a-45b8-aa8a-003bfe30e625-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:14 crc kubenswrapper[4558]: I0120 17:16:14.829102 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bpclt\" (UniqueName: \"kubernetes.io/projected/462f58d4-b41a-45b8-aa8a-003bfe30e625-kube-api-access-bpclt\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:14 crc kubenswrapper[4558]: I0120 17:16:14.829122 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/462f58d4-b41a-45b8-aa8a-003bfe30e625-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:14 crc kubenswrapper[4558]: I0120 17:16:14.829135 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/462f58d4-b41a-45b8-aa8a-003bfe30e625-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:14 crc kubenswrapper[4558]: I0120 17:16:14.930596 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/544c8619-276c-4358-ac79-58df9a462173-run-httpd\") pod \"544c8619-276c-4358-ac79-58df9a462173\" (UID: \"544c8619-276c-4358-ac79-58df9a462173\") " Jan 20 17:16:14 crc kubenswrapper[4558]: I0120 17:16:14.930754 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/544c8619-276c-4358-ac79-58df9a462173-config-data\") pod \"544c8619-276c-4358-ac79-58df9a462173\" (UID: \"544c8619-276c-4358-ac79-58df9a462173\") " Jan 20 17:16:14 crc kubenswrapper[4558]: I0120 17:16:14.930823 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/544c8619-276c-4358-ac79-58df9a462173-sg-core-conf-yaml\") pod \"544c8619-276c-4358-ac79-58df9a462173\" (UID: \"544c8619-276c-4358-ac79-58df9a462173\") " Jan 20 17:16:14 crc kubenswrapper[4558]: I0120 17:16:14.930861 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/544c8619-276c-4358-ac79-58df9a462173-combined-ca-bundle\") pod \"544c8619-276c-4358-ac79-58df9a462173\" (UID: \"544c8619-276c-4358-ac79-58df9a462173\") " Jan 20 17:16:14 crc kubenswrapper[4558]: I0120 17:16:14.930940 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/544c8619-276c-4358-ac79-58df9a462173-log-httpd\") pod \"544c8619-276c-4358-ac79-58df9a462173\" (UID: \"544c8619-276c-4358-ac79-58df9a462173\") " Jan 20 17:16:14 crc kubenswrapper[4558]: I0120 17:16:14.930985 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j59xz\" (UniqueName: \"kubernetes.io/projected/544c8619-276c-4358-ac79-58df9a462173-kube-api-access-j59xz\") pod \"544c8619-276c-4358-ac79-58df9a462173\" (UID: \"544c8619-276c-4358-ac79-58df9a462173\") " Jan 20 17:16:14 crc kubenswrapper[4558]: I0120 17:16:14.931015 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/544c8619-276c-4358-ac79-58df9a462173-ceilometer-tls-certs\") pod \"544c8619-276c-4358-ac79-58df9a462173\" (UID: \"544c8619-276c-4358-ac79-58df9a462173\") " Jan 20 17:16:14 crc kubenswrapper[4558]: I0120 17:16:14.931041 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/544c8619-276c-4358-ac79-58df9a462173-scripts\") pod \"544c8619-276c-4358-ac79-58df9a462173\" (UID: \"544c8619-276c-4358-ac79-58df9a462173\") " Jan 20 17:16:14 crc kubenswrapper[4558]: I0120 17:16:14.931476 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/544c8619-276c-4358-ac79-58df9a462173-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "544c8619-276c-4358-ac79-58df9a462173" (UID: "544c8619-276c-4358-ac79-58df9a462173"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:16:14 crc kubenswrapper[4558]: I0120 17:16:14.931690 4558 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/544c8619-276c-4358-ac79-58df9a462173-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:14 crc kubenswrapper[4558]: I0120 17:16:14.931680 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/544c8619-276c-4358-ac79-58df9a462173-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "544c8619-276c-4358-ac79-58df9a462173" (UID: "544c8619-276c-4358-ac79-58df9a462173"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:16:14 crc kubenswrapper[4558]: I0120 17:16:14.936301 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/544c8619-276c-4358-ac79-58df9a462173-kube-api-access-j59xz" (OuterVolumeSpecName: "kube-api-access-j59xz") pod "544c8619-276c-4358-ac79-58df9a462173" (UID: "544c8619-276c-4358-ac79-58df9a462173"). InnerVolumeSpecName "kube-api-access-j59xz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:16:14 crc kubenswrapper[4558]: I0120 17:16:14.938789 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/544c8619-276c-4358-ac79-58df9a462173-scripts" (OuterVolumeSpecName: "scripts") pod "544c8619-276c-4358-ac79-58df9a462173" (UID: "544c8619-276c-4358-ac79-58df9a462173"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:16:14 crc kubenswrapper[4558]: I0120 17:16:14.959770 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/544c8619-276c-4358-ac79-58df9a462173-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "544c8619-276c-4358-ac79-58df9a462173" (UID: "544c8619-276c-4358-ac79-58df9a462173"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:16:14 crc kubenswrapper[4558]: I0120 17:16:14.975976 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/544c8619-276c-4358-ac79-58df9a462173-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "544c8619-276c-4358-ac79-58df9a462173" (UID: "544c8619-276c-4358-ac79-58df9a462173"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:16:14 crc kubenswrapper[4558]: I0120 17:16:14.987343 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/544c8619-276c-4358-ac79-58df9a462173-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "544c8619-276c-4358-ac79-58df9a462173" (UID: "544c8619-276c-4358-ac79-58df9a462173"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:16:15 crc kubenswrapper[4558]: I0120 17:16:15.002717 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/544c8619-276c-4358-ac79-58df9a462173-config-data" (OuterVolumeSpecName: "config-data") pod "544c8619-276c-4358-ac79-58df9a462173" (UID: "544c8619-276c-4358-ac79-58df9a462173"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:16:15 crc kubenswrapper[4558]: I0120 17:16:15.033053 4558 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/544c8619-276c-4358-ac79-58df9a462173-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:15 crc kubenswrapper[4558]: I0120 17:16:15.033083 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/544c8619-276c-4358-ac79-58df9a462173-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:15 crc kubenswrapper[4558]: I0120 17:16:15.033096 4558 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/544c8619-276c-4358-ac79-58df9a462173-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:15 crc kubenswrapper[4558]: I0120 17:16:15.033109 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/544c8619-276c-4358-ac79-58df9a462173-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:15 crc kubenswrapper[4558]: I0120 17:16:15.033120 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j59xz\" (UniqueName: \"kubernetes.io/projected/544c8619-276c-4358-ac79-58df9a462173-kube-api-access-j59xz\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:15 crc kubenswrapper[4558]: I0120 17:16:15.033131 4558 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/544c8619-276c-4358-ac79-58df9a462173-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:15 crc kubenswrapper[4558]: I0120 17:16:15.033139 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/544c8619-276c-4358-ac79-58df9a462173-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:15 crc kubenswrapper[4558]: I0120 17:16:15.037044 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:16:15 crc kubenswrapper[4558]: I0120 17:16:15.134418 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c4cb69c1-3d51-4b13-a025-b37c986ffede-combined-ca-bundle\") pod \"c4cb69c1-3d51-4b13-a025-b37c986ffede\" (UID: \"c4cb69c1-3d51-4b13-a025-b37c986ffede\") " Jan 20 17:16:15 crc kubenswrapper[4558]: I0120 17:16:15.134961 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c4cb69c1-3d51-4b13-a025-b37c986ffede-config-data\") pod \"c4cb69c1-3d51-4b13-a025-b37c986ffede\" (UID: \"c4cb69c1-3d51-4b13-a025-b37c986ffede\") " Jan 20 17:16:15 crc kubenswrapper[4558]: I0120 17:16:15.135030 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tfkzf\" (UniqueName: \"kubernetes.io/projected/c4cb69c1-3d51-4b13-a025-b37c986ffede-kube-api-access-tfkzf\") pod \"c4cb69c1-3d51-4b13-a025-b37c986ffede\" (UID: \"c4cb69c1-3d51-4b13-a025-b37c986ffede\") " Jan 20 17:16:15 crc kubenswrapper[4558]: I0120 17:16:15.137328 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c4cb69c1-3d51-4b13-a025-b37c986ffede-kube-api-access-tfkzf" (OuterVolumeSpecName: "kube-api-access-tfkzf") pod "c4cb69c1-3d51-4b13-a025-b37c986ffede" (UID: "c4cb69c1-3d51-4b13-a025-b37c986ffede"). InnerVolumeSpecName "kube-api-access-tfkzf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:16:15 crc kubenswrapper[4558]: E0120 17:16:15.160464 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c4cb69c1-3d51-4b13-a025-b37c986ffede-combined-ca-bundle podName:c4cb69c1-3d51-4b13-a025-b37c986ffede nodeName:}" failed. No retries permitted until 2026-01-20 17:16:15.660425353 +0000 UTC m=+2069.420763319 (durationBeforeRetry 500ms). Error: error cleaning subPath mounts for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/c4cb69c1-3d51-4b13-a025-b37c986ffede-combined-ca-bundle") pod "c4cb69c1-3d51-4b13-a025-b37c986ffede" (UID: "c4cb69c1-3d51-4b13-a025-b37c986ffede") : error deleting /var/lib/kubelet/pods/c4cb69c1-3d51-4b13-a025-b37c986ffede/volume-subpaths: remove /var/lib/kubelet/pods/c4cb69c1-3d51-4b13-a025-b37c986ffede/volume-subpaths: no such file or directory Jan 20 17:16:15 crc kubenswrapper[4558]: I0120 17:16:15.163327 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c4cb69c1-3d51-4b13-a025-b37c986ffede-config-data" (OuterVolumeSpecName: "config-data") pod "c4cb69c1-3d51-4b13-a025-b37c986ffede" (UID: "c4cb69c1-3d51-4b13-a025-b37c986ffede"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:16:15 crc kubenswrapper[4558]: I0120 17:16:15.237823 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c4cb69c1-3d51-4b13-a025-b37c986ffede-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:15 crc kubenswrapper[4558]: I0120 17:16:15.237857 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tfkzf\" (UniqueName: \"kubernetes.io/projected/c4cb69c1-3d51-4b13-a025-b37c986ffede-kube-api-access-tfkzf\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:15 crc kubenswrapper[4558]: I0120 17:16:15.416502 4558 generic.go:334] "Generic (PLEG): container finished" podID="544c8619-276c-4358-ac79-58df9a462173" containerID="12b7e813e5ba4c8b347be431d52bb1ff48a52e3d5f90ccda95991eea8a6553ad" exitCode=0 Jan 20 17:16:15 crc kubenswrapper[4558]: I0120 17:16:15.416571 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"544c8619-276c-4358-ac79-58df9a462173","Type":"ContainerDied","Data":"12b7e813e5ba4c8b347be431d52bb1ff48a52e3d5f90ccda95991eea8a6553ad"} Jan 20 17:16:15 crc kubenswrapper[4558]: I0120 17:16:15.416580 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:16:15 crc kubenswrapper[4558]: I0120 17:16:15.416601 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"544c8619-276c-4358-ac79-58df9a462173","Type":"ContainerDied","Data":"78f1dcc15389b29d1baf585f7d61c4fe6334f3e616cfacf39b99ea938cff3a9a"} Jan 20 17:16:15 crc kubenswrapper[4558]: I0120 17:16:15.416619 4558 scope.go:117] "RemoveContainer" containerID="5e7f9a7df82f7908138cd4491a1924604163a709b2ab28b8825d5768baab4058" Jan 20 17:16:15 crc kubenswrapper[4558]: I0120 17:16:15.418306 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-keystone-listener-75484ddb58-9p7fq" event={"ID":"8dfaaf1f-540b-4c8b-b93c-1e3556c37801","Type":"ContainerDied","Data":"04964c7fbc6fe6167016fdbfd0babb05695f0f29b288f1d7c5a026f79e48f86d"} Jan 20 17:16:15 crc kubenswrapper[4558]: I0120 17:16:15.418614 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-keystone-listener-75484ddb58-9p7fq" Jan 20 17:16:15 crc kubenswrapper[4558]: I0120 17:16:15.435332 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-worker-55f98f9877-gm2wm" event={"ID":"462f58d4-b41a-45b8-aa8a-003bfe30e625","Type":"ContainerDied","Data":"db613104669c89ae6b312be6587dc38f5bd96e683a72780c04ce61c6c65b84f9"} Jan 20 17:16:15 crc kubenswrapper[4558]: I0120 17:16:15.435407 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-worker-55f98f9877-gm2wm" Jan 20 17:16:15 crc kubenswrapper[4558]: I0120 17:16:15.439637 4558 scope.go:117] "RemoveContainer" containerID="821d851a2b03145528569e24bdf1e48df94429d66e29eac4da06f73e2d0ea57e" Jan 20 17:16:15 crc kubenswrapper[4558]: I0120 17:16:15.440021 4558 generic.go:334] "Generic (PLEG): container finished" podID="c4cb69c1-3d51-4b13-a025-b37c986ffede" containerID="022773dc93b3f1b894358c04d0057cb8d8780ca1ef654626b973b40167c3532e" exitCode=0 Jan 20 17:16:15 crc kubenswrapper[4558]: I0120 17:16:15.440051 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"c4cb69c1-3d51-4b13-a025-b37c986ffede","Type":"ContainerDied","Data":"022773dc93b3f1b894358c04d0057cb8d8780ca1ef654626b973b40167c3532e"} Jan 20 17:16:15 crc kubenswrapper[4558]: I0120 17:16:15.440068 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"c4cb69c1-3d51-4b13-a025-b37c986ffede","Type":"ContainerDied","Data":"3c735e96acc0786f4271c1844dfa0a1b160606a4ed024ea4da415c596f59c86d"} Jan 20 17:16:15 crc kubenswrapper[4558]: I0120 17:16:15.440103 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:16:15 crc kubenswrapper[4558]: I0120 17:16:15.476342 4558 scope.go:117] "RemoveContainer" containerID="12b7e813e5ba4c8b347be431d52bb1ff48a52e3d5f90ccda95991eea8a6553ad" Jan 20 17:16:15 crc kubenswrapper[4558]: I0120 17:16:15.506207 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-keystone-listener-75484ddb58-9p7fq"] Jan 20 17:16:15 crc kubenswrapper[4558]: I0120 17:16:15.516595 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/barbican-keystone-listener-75484ddb58-9p7fq"] Jan 20 17:16:15 crc kubenswrapper[4558]: I0120 17:16:15.681055 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-worker-55f98f9877-gm2wm"] Jan 20 17:16:15 crc kubenswrapper[4558]: I0120 17:16:15.688651 4558 scope.go:117] "RemoveContainer" containerID="aef9e92362cdebb57f6e6fec72040ef9ea0c3ebdec14cebff1a1ec4ae81e35a9" Jan 20 17:16:15 crc kubenswrapper[4558]: I0120 17:16:15.699573 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/barbican-worker-55f98f9877-gm2wm"] Jan 20 17:16:15 crc kubenswrapper[4558]: I0120 17:16:15.706410 4558 scope.go:117] "RemoveContainer" containerID="5e7f9a7df82f7908138cd4491a1924604163a709b2ab28b8825d5768baab4058" Jan 20 17:16:15 crc kubenswrapper[4558]: I0120 17:16:15.706446 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:16:15 crc kubenswrapper[4558]: E0120 17:16:15.707065 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5e7f9a7df82f7908138cd4491a1924604163a709b2ab28b8825d5768baab4058\": container with ID starting with 5e7f9a7df82f7908138cd4491a1924604163a709b2ab28b8825d5768baab4058 not found: ID does not exist" containerID="5e7f9a7df82f7908138cd4491a1924604163a709b2ab28b8825d5768baab4058" Jan 20 17:16:15 crc kubenswrapper[4558]: I0120 17:16:15.707099 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5e7f9a7df82f7908138cd4491a1924604163a709b2ab28b8825d5768baab4058"} err="failed to get container status \"5e7f9a7df82f7908138cd4491a1924604163a709b2ab28b8825d5768baab4058\": rpc error: code = NotFound desc = could not find container \"5e7f9a7df82f7908138cd4491a1924604163a709b2ab28b8825d5768baab4058\": container with ID starting with 5e7f9a7df82f7908138cd4491a1924604163a709b2ab28b8825d5768baab4058 not found: ID does not exist" Jan 20 17:16:15 crc kubenswrapper[4558]: I0120 17:16:15.707119 4558 scope.go:117] "RemoveContainer" containerID="821d851a2b03145528569e24bdf1e48df94429d66e29eac4da06f73e2d0ea57e" Jan 20 17:16:15 crc kubenswrapper[4558]: E0120 17:16:15.707532 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"821d851a2b03145528569e24bdf1e48df94429d66e29eac4da06f73e2d0ea57e\": container with ID starting with 821d851a2b03145528569e24bdf1e48df94429d66e29eac4da06f73e2d0ea57e not found: ID does not exist" containerID="821d851a2b03145528569e24bdf1e48df94429d66e29eac4da06f73e2d0ea57e" Jan 20 17:16:15 crc kubenswrapper[4558]: I0120 17:16:15.707579 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"821d851a2b03145528569e24bdf1e48df94429d66e29eac4da06f73e2d0ea57e"} err="failed to get container status \"821d851a2b03145528569e24bdf1e48df94429d66e29eac4da06f73e2d0ea57e\": rpc error: code = NotFound desc = could not find container \"821d851a2b03145528569e24bdf1e48df94429d66e29eac4da06f73e2d0ea57e\": container with ID starting with 821d851a2b03145528569e24bdf1e48df94429d66e29eac4da06f73e2d0ea57e not found: ID does not exist" Jan 20 17:16:15 crc kubenswrapper[4558]: I0120 17:16:15.707609 4558 scope.go:117] "RemoveContainer" containerID="12b7e813e5ba4c8b347be431d52bb1ff48a52e3d5f90ccda95991eea8a6553ad" Jan 20 17:16:15 crc kubenswrapper[4558]: E0120 17:16:15.707964 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"12b7e813e5ba4c8b347be431d52bb1ff48a52e3d5f90ccda95991eea8a6553ad\": container with ID starting with 12b7e813e5ba4c8b347be431d52bb1ff48a52e3d5f90ccda95991eea8a6553ad not found: ID does not exist" containerID="12b7e813e5ba4c8b347be431d52bb1ff48a52e3d5f90ccda95991eea8a6553ad" Jan 20 17:16:15 crc kubenswrapper[4558]: I0120 17:16:15.707995 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"12b7e813e5ba4c8b347be431d52bb1ff48a52e3d5f90ccda95991eea8a6553ad"} err="failed to get container status \"12b7e813e5ba4c8b347be431d52bb1ff48a52e3d5f90ccda95991eea8a6553ad\": rpc error: code = NotFound desc = could not find container \"12b7e813e5ba4c8b347be431d52bb1ff48a52e3d5f90ccda95991eea8a6553ad\": container with ID starting with 12b7e813e5ba4c8b347be431d52bb1ff48a52e3d5f90ccda95991eea8a6553ad not found: ID does not exist" Jan 20 17:16:15 crc kubenswrapper[4558]: I0120 17:16:15.708021 4558 scope.go:117] "RemoveContainer" containerID="aef9e92362cdebb57f6e6fec72040ef9ea0c3ebdec14cebff1a1ec4ae81e35a9" Jan 20 17:16:15 crc kubenswrapper[4558]: E0120 17:16:15.708325 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aef9e92362cdebb57f6e6fec72040ef9ea0c3ebdec14cebff1a1ec4ae81e35a9\": container with ID starting with aef9e92362cdebb57f6e6fec72040ef9ea0c3ebdec14cebff1a1ec4ae81e35a9 not found: ID does not exist" containerID="aef9e92362cdebb57f6e6fec72040ef9ea0c3ebdec14cebff1a1ec4ae81e35a9" Jan 20 17:16:15 crc kubenswrapper[4558]: I0120 17:16:15.708364 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aef9e92362cdebb57f6e6fec72040ef9ea0c3ebdec14cebff1a1ec4ae81e35a9"} err="failed to get container status \"aef9e92362cdebb57f6e6fec72040ef9ea0c3ebdec14cebff1a1ec4ae81e35a9\": rpc error: code = NotFound desc = could not find container \"aef9e92362cdebb57f6e6fec72040ef9ea0c3ebdec14cebff1a1ec4ae81e35a9\": container with ID starting with aef9e92362cdebb57f6e6fec72040ef9ea0c3ebdec14cebff1a1ec4ae81e35a9 not found: ID does not exist" Jan 20 17:16:15 crc kubenswrapper[4558]: I0120 17:16:15.708380 4558 scope.go:117] "RemoveContainer" containerID="ac12e2c480e51bbc2f445d7182e6e37c4208d79ce61c920b8055cf83b75b4e84" Jan 20 17:16:15 crc kubenswrapper[4558]: I0120 17:16:15.711739 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:16:15 crc kubenswrapper[4558]: I0120 17:16:15.725995 4558 scope.go:117] "RemoveContainer" containerID="bbac2ee5bf98eddd33a60a5edd0a1dac188b6c3de922ce151b7900626fe0fff8" Jan 20 17:16:15 crc kubenswrapper[4558]: I0120 17:16:15.744915 4558 scope.go:117] "RemoveContainer" containerID="0da15b1363218841887085f4193f9590c3888bf6f9f12345ba5c89c89755a3f7" Jan 20 17:16:15 crc kubenswrapper[4558]: I0120 17:16:15.745145 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c4cb69c1-3d51-4b13-a025-b37c986ffede-combined-ca-bundle\") pod \"c4cb69c1-3d51-4b13-a025-b37c986ffede\" (UID: \"c4cb69c1-3d51-4b13-a025-b37c986ffede\") " Jan 20 17:16:15 crc kubenswrapper[4558]: I0120 17:16:15.759098 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c4cb69c1-3d51-4b13-a025-b37c986ffede-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c4cb69c1-3d51-4b13-a025-b37c986ffede" (UID: "c4cb69c1-3d51-4b13-a025-b37c986ffede"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:16:15 crc kubenswrapper[4558]: I0120 17:16:15.787566 4558 scope.go:117] "RemoveContainer" containerID="696c95ac455228cd89e273ea6e2bcb78c3bf7063a8f5ebba19cde538e34d64bb" Jan 20 17:16:15 crc kubenswrapper[4558]: I0120 17:16:15.809260 4558 scope.go:117] "RemoveContainer" containerID="022773dc93b3f1b894358c04d0057cb8d8780ca1ef654626b973b40167c3532e" Jan 20 17:16:15 crc kubenswrapper[4558]: I0120 17:16:15.824396 4558 scope.go:117] "RemoveContainer" containerID="022773dc93b3f1b894358c04d0057cb8d8780ca1ef654626b973b40167c3532e" Jan 20 17:16:15 crc kubenswrapper[4558]: E0120 17:16:15.824803 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"022773dc93b3f1b894358c04d0057cb8d8780ca1ef654626b973b40167c3532e\": container with ID starting with 022773dc93b3f1b894358c04d0057cb8d8780ca1ef654626b973b40167c3532e not found: ID does not exist" containerID="022773dc93b3f1b894358c04d0057cb8d8780ca1ef654626b973b40167c3532e" Jan 20 17:16:15 crc kubenswrapper[4558]: I0120 17:16:15.824876 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"022773dc93b3f1b894358c04d0057cb8d8780ca1ef654626b973b40167c3532e"} err="failed to get container status \"022773dc93b3f1b894358c04d0057cb8d8780ca1ef654626b973b40167c3532e\": rpc error: code = NotFound desc = could not find container \"022773dc93b3f1b894358c04d0057cb8d8780ca1ef654626b973b40167c3532e\": container with ID starting with 022773dc93b3f1b894358c04d0057cb8d8780ca1ef654626b973b40167c3532e not found: ID does not exist" Jan 20 17:16:15 crc kubenswrapper[4558]: I0120 17:16:15.847271 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c4cb69c1-3d51-4b13-a025-b37c986ffede-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:16 crc kubenswrapper[4558]: I0120 17:16:16.070350 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:16:16 crc kubenswrapper[4558]: I0120 17:16:16.074326 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:16:16 crc kubenswrapper[4558]: I0120 17:16:16.535361 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-ndwh7" Jan 20 17:16:16 crc kubenswrapper[4558]: I0120 17:16:16.590487 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="462f58d4-b41a-45b8-aa8a-003bfe30e625" path="/var/lib/kubelet/pods/462f58d4-b41a-45b8-aa8a-003bfe30e625/volumes" Jan 20 17:16:16 crc kubenswrapper[4558]: I0120 17:16:16.591285 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="544c8619-276c-4358-ac79-58df9a462173" path="/var/lib/kubelet/pods/544c8619-276c-4358-ac79-58df9a462173/volumes" Jan 20 17:16:16 crc kubenswrapper[4558]: I0120 17:16:16.594735 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8dfaaf1f-540b-4c8b-b93c-1e3556c37801" path="/var/lib/kubelet/pods/8dfaaf1f-540b-4c8b-b93c-1e3556c37801/volumes" Jan 20 17:16:16 crc kubenswrapper[4558]: I0120 17:16:16.595451 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c4cb69c1-3d51-4b13-a025-b37c986ffede" path="/var/lib/kubelet/pods/c4cb69c1-3d51-4b13-a025-b37c986ffede/volumes" Jan 20 17:16:16 crc kubenswrapper[4558]: I0120 17:16:16.596027 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-996c57c45-7hr9m"] Jan 20 17:16:16 crc kubenswrapper[4558]: I0120 17:16:16.596297 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-996c57c45-7hr9m" podUID="6e03bea8-761e-4d6d-bcaf-f4397b3ec310" containerName="dnsmasq-dns" containerID="cri-o://f95d7322fcfd2ddfaa632b4101e4e708456d08454d53d6519b66c6fc9742857f" gracePeriod=10 Jan 20 17:16:17 crc kubenswrapper[4558]: I0120 17:16:17.003324 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-996c57c45-7hr9m" Jan 20 17:16:17 crc kubenswrapper[4558]: I0120 17:16:17.065632 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/6e03bea8-761e-4d6d-bcaf-f4397b3ec310-dns-swift-storage-0\") pod \"6e03bea8-761e-4d6d-bcaf-f4397b3ec310\" (UID: \"6e03bea8-761e-4d6d-bcaf-f4397b3ec310\") " Jan 20 17:16:17 crc kubenswrapper[4558]: I0120 17:16:17.065711 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/6e03bea8-761e-4d6d-bcaf-f4397b3ec310-dnsmasq-svc\") pod \"6e03bea8-761e-4d6d-bcaf-f4397b3ec310\" (UID: \"6e03bea8-761e-4d6d-bcaf-f4397b3ec310\") " Jan 20 17:16:17 crc kubenswrapper[4558]: I0120 17:16:17.065737 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-62v9h\" (UniqueName: \"kubernetes.io/projected/6e03bea8-761e-4d6d-bcaf-f4397b3ec310-kube-api-access-62v9h\") pod \"6e03bea8-761e-4d6d-bcaf-f4397b3ec310\" (UID: \"6e03bea8-761e-4d6d-bcaf-f4397b3ec310\") " Jan 20 17:16:17 crc kubenswrapper[4558]: I0120 17:16:17.065825 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6e03bea8-761e-4d6d-bcaf-f4397b3ec310-config\") pod \"6e03bea8-761e-4d6d-bcaf-f4397b3ec310\" (UID: \"6e03bea8-761e-4d6d-bcaf-f4397b3ec310\") " Jan 20 17:16:17 crc kubenswrapper[4558]: I0120 17:16:17.080409 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6e03bea8-761e-4d6d-bcaf-f4397b3ec310-kube-api-access-62v9h" (OuterVolumeSpecName: "kube-api-access-62v9h") pod "6e03bea8-761e-4d6d-bcaf-f4397b3ec310" (UID: "6e03bea8-761e-4d6d-bcaf-f4397b3ec310"). InnerVolumeSpecName "kube-api-access-62v9h". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:16:17 crc kubenswrapper[4558]: I0120 17:16:17.093131 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6e03bea8-761e-4d6d-bcaf-f4397b3ec310-config" (OuterVolumeSpecName: "config") pod "6e03bea8-761e-4d6d-bcaf-f4397b3ec310" (UID: "6e03bea8-761e-4d6d-bcaf-f4397b3ec310"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:16:17 crc kubenswrapper[4558]: I0120 17:16:17.093188 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6e03bea8-761e-4d6d-bcaf-f4397b3ec310-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "6e03bea8-761e-4d6d-bcaf-f4397b3ec310" (UID: "6e03bea8-761e-4d6d-bcaf-f4397b3ec310"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:16:17 crc kubenswrapper[4558]: I0120 17:16:17.096513 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6e03bea8-761e-4d6d-bcaf-f4397b3ec310-dnsmasq-svc" (OuterVolumeSpecName: "dnsmasq-svc") pod "6e03bea8-761e-4d6d-bcaf-f4397b3ec310" (UID: "6e03bea8-761e-4d6d-bcaf-f4397b3ec310"). InnerVolumeSpecName "dnsmasq-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:16:17 crc kubenswrapper[4558]: I0120 17:16:17.167661 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6e03bea8-761e-4d6d-bcaf-f4397b3ec310-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:17 crc kubenswrapper[4558]: I0120 17:16:17.167692 4558 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/6e03bea8-761e-4d6d-bcaf-f4397b3ec310-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:17 crc kubenswrapper[4558]: I0120 17:16:17.167702 4558 reconciler_common.go:293] "Volume detached for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/6e03bea8-761e-4d6d-bcaf-f4397b3ec310-dnsmasq-svc\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:17 crc kubenswrapper[4558]: I0120 17:16:17.167713 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-62v9h\" (UniqueName: \"kubernetes.io/projected/6e03bea8-761e-4d6d-bcaf-f4397b3ec310-kube-api-access-62v9h\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:17 crc kubenswrapper[4558]: I0120 17:16:17.337540 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-857895d98b-n66x9" Jan 20 17:16:17 crc kubenswrapper[4558]: I0120 17:16:17.474606 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/32c4dbf1-2dfe-44a7-be21-1415017ca20c-public-tls-certs\") pod \"32c4dbf1-2dfe-44a7-be21-1415017ca20c\" (UID: \"32c4dbf1-2dfe-44a7-be21-1415017ca20c\") " Jan 20 17:16:17 crc kubenswrapper[4558]: I0120 17:16:17.474848 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rfbbj\" (UniqueName: \"kubernetes.io/projected/32c4dbf1-2dfe-44a7-be21-1415017ca20c-kube-api-access-rfbbj\") pod \"32c4dbf1-2dfe-44a7-be21-1415017ca20c\" (UID: \"32c4dbf1-2dfe-44a7-be21-1415017ca20c\") " Jan 20 17:16:17 crc kubenswrapper[4558]: I0120 17:16:17.474929 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/32c4dbf1-2dfe-44a7-be21-1415017ca20c-config\") pod \"32c4dbf1-2dfe-44a7-be21-1415017ca20c\" (UID: \"32c4dbf1-2dfe-44a7-be21-1415017ca20c\") " Jan 20 17:16:17 crc kubenswrapper[4558]: I0120 17:16:17.474970 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/32c4dbf1-2dfe-44a7-be21-1415017ca20c-httpd-config\") pod \"32c4dbf1-2dfe-44a7-be21-1415017ca20c\" (UID: \"32c4dbf1-2dfe-44a7-be21-1415017ca20c\") " Jan 20 17:16:17 crc kubenswrapper[4558]: I0120 17:16:17.475003 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/32c4dbf1-2dfe-44a7-be21-1415017ca20c-internal-tls-certs\") pod \"32c4dbf1-2dfe-44a7-be21-1415017ca20c\" (UID: \"32c4dbf1-2dfe-44a7-be21-1415017ca20c\") " Jan 20 17:16:17 crc kubenswrapper[4558]: I0120 17:16:17.475027 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/32c4dbf1-2dfe-44a7-be21-1415017ca20c-ovndb-tls-certs\") pod \"32c4dbf1-2dfe-44a7-be21-1415017ca20c\" (UID: \"32c4dbf1-2dfe-44a7-be21-1415017ca20c\") " Jan 20 17:16:17 crc kubenswrapper[4558]: I0120 17:16:17.475055 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/32c4dbf1-2dfe-44a7-be21-1415017ca20c-combined-ca-bundle\") pod \"32c4dbf1-2dfe-44a7-be21-1415017ca20c\" (UID: \"32c4dbf1-2dfe-44a7-be21-1415017ca20c\") " Jan 20 17:16:17 crc kubenswrapper[4558]: I0120 17:16:17.478290 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/32c4dbf1-2dfe-44a7-be21-1415017ca20c-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "32c4dbf1-2dfe-44a7-be21-1415017ca20c" (UID: "32c4dbf1-2dfe-44a7-be21-1415017ca20c"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:16:17 crc kubenswrapper[4558]: I0120 17:16:17.478725 4558 generic.go:334] "Generic (PLEG): container finished" podID="32c4dbf1-2dfe-44a7-be21-1415017ca20c" containerID="40fb072bac172dc7253b14ca577f8224ce05af40e481c08769f4d2e454f5196a" exitCode=0 Jan 20 17:16:17 crc kubenswrapper[4558]: I0120 17:16:17.478840 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-857895d98b-n66x9" Jan 20 17:16:17 crc kubenswrapper[4558]: I0120 17:16:17.479401 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-857895d98b-n66x9" event={"ID":"32c4dbf1-2dfe-44a7-be21-1415017ca20c","Type":"ContainerDied","Data":"40fb072bac172dc7253b14ca577f8224ce05af40e481c08769f4d2e454f5196a"} Jan 20 17:16:17 crc kubenswrapper[4558]: I0120 17:16:17.479434 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-857895d98b-n66x9" event={"ID":"32c4dbf1-2dfe-44a7-be21-1415017ca20c","Type":"ContainerDied","Data":"e846cc85499f6e69b78230561840eb894c958779d3199834bbede89077272e1e"} Jan 20 17:16:17 crc kubenswrapper[4558]: I0120 17:16:17.479452 4558 scope.go:117] "RemoveContainer" containerID="bfe3f187246a119dce5b9bd953345bf403b08cd980efbe013f04ba0712f25420" Jan 20 17:16:17 crc kubenswrapper[4558]: I0120 17:16:17.480745 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/32c4dbf1-2dfe-44a7-be21-1415017ca20c-kube-api-access-rfbbj" (OuterVolumeSpecName: "kube-api-access-rfbbj") pod "32c4dbf1-2dfe-44a7-be21-1415017ca20c" (UID: "32c4dbf1-2dfe-44a7-be21-1415017ca20c"). InnerVolumeSpecName "kube-api-access-rfbbj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:16:17 crc kubenswrapper[4558]: I0120 17:16:17.482193 4558 generic.go:334] "Generic (PLEG): container finished" podID="6e03bea8-761e-4d6d-bcaf-f4397b3ec310" containerID="f95d7322fcfd2ddfaa632b4101e4e708456d08454d53d6519b66c6fc9742857f" exitCode=0 Jan 20 17:16:17 crc kubenswrapper[4558]: I0120 17:16:17.482245 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-996c57c45-7hr9m" event={"ID":"6e03bea8-761e-4d6d-bcaf-f4397b3ec310","Type":"ContainerDied","Data":"f95d7322fcfd2ddfaa632b4101e4e708456d08454d53d6519b66c6fc9742857f"} Jan 20 17:16:17 crc kubenswrapper[4558]: I0120 17:16:17.482271 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-996c57c45-7hr9m" event={"ID":"6e03bea8-761e-4d6d-bcaf-f4397b3ec310","Type":"ContainerDied","Data":"b73a4ba20b3eb0fb213ad6d73eec657ba4caec4f0bcac84dbbc9ab20976ab8d7"} Jan 20 17:16:17 crc kubenswrapper[4558]: I0120 17:16:17.482328 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-996c57c45-7hr9m" Jan 20 17:16:17 crc kubenswrapper[4558]: I0120 17:16:17.484088 4558 generic.go:334] "Generic (PLEG): container finished" podID="2510d24d-b593-4196-92bc-ddde09cc7c15" containerID="f630bf27700ad23f76cb69b14ae9eed65e06889873b8b21234996b7a87609328" exitCode=0 Jan 20 17:16:17 crc kubenswrapper[4558]: I0120 17:16:17.484122 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/watcher-decision-engine-0" event={"ID":"2510d24d-b593-4196-92bc-ddde09cc7c15","Type":"ContainerDied","Data":"f630bf27700ad23f76cb69b14ae9eed65e06889873b8b21234996b7a87609328"} Jan 20 17:16:17 crc kubenswrapper[4558]: I0120 17:16:17.507534 4558 scope.go:117] "RemoveContainer" containerID="40fb072bac172dc7253b14ca577f8224ce05af40e481c08769f4d2e454f5196a" Jan 20 17:16:17 crc kubenswrapper[4558]: I0120 17:16:17.522556 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/32c4dbf1-2dfe-44a7-be21-1415017ca20c-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "32c4dbf1-2dfe-44a7-be21-1415017ca20c" (UID: "32c4dbf1-2dfe-44a7-be21-1415017ca20c"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:16:17 crc kubenswrapper[4558]: I0120 17:16:17.522750 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/32c4dbf1-2dfe-44a7-be21-1415017ca20c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "32c4dbf1-2dfe-44a7-be21-1415017ca20c" (UID: "32c4dbf1-2dfe-44a7-be21-1415017ca20c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:16:17 crc kubenswrapper[4558]: I0120 17:16:17.523756 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-996c57c45-7hr9m"] Jan 20 17:16:17 crc kubenswrapper[4558]: I0120 17:16:17.529641 4558 scope.go:117] "RemoveContainer" containerID="bfe3f187246a119dce5b9bd953345bf403b08cd980efbe013f04ba0712f25420" Jan 20 17:16:17 crc kubenswrapper[4558]: I0120 17:16:17.530092 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-996c57c45-7hr9m"] Jan 20 17:16:17 crc kubenswrapper[4558]: E0120 17:16:17.530135 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bfe3f187246a119dce5b9bd953345bf403b08cd980efbe013f04ba0712f25420\": container with ID starting with bfe3f187246a119dce5b9bd953345bf403b08cd980efbe013f04ba0712f25420 not found: ID does not exist" containerID="bfe3f187246a119dce5b9bd953345bf403b08cd980efbe013f04ba0712f25420" Jan 20 17:16:17 crc kubenswrapper[4558]: I0120 17:16:17.530184 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bfe3f187246a119dce5b9bd953345bf403b08cd980efbe013f04ba0712f25420"} err="failed to get container status \"bfe3f187246a119dce5b9bd953345bf403b08cd980efbe013f04ba0712f25420\": rpc error: code = NotFound desc = could not find container \"bfe3f187246a119dce5b9bd953345bf403b08cd980efbe013f04ba0712f25420\": container with ID starting with bfe3f187246a119dce5b9bd953345bf403b08cd980efbe013f04ba0712f25420 not found: ID does not exist" Jan 20 17:16:17 crc kubenswrapper[4558]: I0120 17:16:17.530211 4558 scope.go:117] "RemoveContainer" containerID="40fb072bac172dc7253b14ca577f8224ce05af40e481c08769f4d2e454f5196a" Jan 20 17:16:17 crc kubenswrapper[4558]: E0120 17:16:17.530468 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"40fb072bac172dc7253b14ca577f8224ce05af40e481c08769f4d2e454f5196a\": container with ID starting with 40fb072bac172dc7253b14ca577f8224ce05af40e481c08769f4d2e454f5196a not found: ID does not exist" containerID="40fb072bac172dc7253b14ca577f8224ce05af40e481c08769f4d2e454f5196a" Jan 20 17:16:17 crc kubenswrapper[4558]: I0120 17:16:17.530505 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"40fb072bac172dc7253b14ca577f8224ce05af40e481c08769f4d2e454f5196a"} err="failed to get container status \"40fb072bac172dc7253b14ca577f8224ce05af40e481c08769f4d2e454f5196a\": rpc error: code = NotFound desc = could not find container \"40fb072bac172dc7253b14ca577f8224ce05af40e481c08769f4d2e454f5196a\": container with ID starting with 40fb072bac172dc7253b14ca577f8224ce05af40e481c08769f4d2e454f5196a not found: ID does not exist" Jan 20 17:16:17 crc kubenswrapper[4558]: I0120 17:16:17.530529 4558 scope.go:117] "RemoveContainer" containerID="f95d7322fcfd2ddfaa632b4101e4e708456d08454d53d6519b66c6fc9742857f" Jan 20 17:16:17 crc kubenswrapper[4558]: I0120 17:16:17.535507 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/32c4dbf1-2dfe-44a7-be21-1415017ca20c-config" (OuterVolumeSpecName: "config") pod "32c4dbf1-2dfe-44a7-be21-1415017ca20c" (UID: "32c4dbf1-2dfe-44a7-be21-1415017ca20c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:16:17 crc kubenswrapper[4558]: I0120 17:16:17.540832 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/32c4dbf1-2dfe-44a7-be21-1415017ca20c-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "32c4dbf1-2dfe-44a7-be21-1415017ca20c" (UID: "32c4dbf1-2dfe-44a7-be21-1415017ca20c"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:16:17 crc kubenswrapper[4558]: I0120 17:16:17.563879 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/watcher-decision-engine-0" Jan 20 17:16:17 crc kubenswrapper[4558]: I0120 17:16:17.565302 4558 scope.go:117] "RemoveContainer" containerID="cd19794387027d0145a1234413d0cd07984694f7a7bd98888d3a026bd9839c77" Jan 20 17:16:17 crc kubenswrapper[4558]: I0120 17:16:17.566509 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/32c4dbf1-2dfe-44a7-be21-1415017ca20c-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "32c4dbf1-2dfe-44a7-be21-1415017ca20c" (UID: "32c4dbf1-2dfe-44a7-be21-1415017ca20c"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:16:17 crc kubenswrapper[4558]: I0120 17:16:17.579139 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/32c4dbf1-2dfe-44a7-be21-1415017ca20c-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:17 crc kubenswrapper[4558]: I0120 17:16:17.579223 4558 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/32c4dbf1-2dfe-44a7-be21-1415017ca20c-httpd-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:17 crc kubenswrapper[4558]: I0120 17:16:17.579306 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/32c4dbf1-2dfe-44a7-be21-1415017ca20c-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:17 crc kubenswrapper[4558]: I0120 17:16:17.579319 4558 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/32c4dbf1-2dfe-44a7-be21-1415017ca20c-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:17 crc kubenswrapper[4558]: I0120 17:16:17.579328 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/32c4dbf1-2dfe-44a7-be21-1415017ca20c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:17 crc kubenswrapper[4558]: I0120 17:16:17.579337 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/32c4dbf1-2dfe-44a7-be21-1415017ca20c-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:17 crc kubenswrapper[4558]: I0120 17:16:17.579347 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rfbbj\" (UniqueName: \"kubernetes.io/projected/32c4dbf1-2dfe-44a7-be21-1415017ca20c-kube-api-access-rfbbj\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:17 crc kubenswrapper[4558]: I0120 17:16:17.583524 4558 scope.go:117] "RemoveContainer" containerID="f95d7322fcfd2ddfaa632b4101e4e708456d08454d53d6519b66c6fc9742857f" Jan 20 17:16:17 crc kubenswrapper[4558]: E0120 17:16:17.584047 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f95d7322fcfd2ddfaa632b4101e4e708456d08454d53d6519b66c6fc9742857f\": container with ID starting with f95d7322fcfd2ddfaa632b4101e4e708456d08454d53d6519b66c6fc9742857f not found: ID does not exist" containerID="f95d7322fcfd2ddfaa632b4101e4e708456d08454d53d6519b66c6fc9742857f" Jan 20 17:16:17 crc kubenswrapper[4558]: I0120 17:16:17.584078 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f95d7322fcfd2ddfaa632b4101e4e708456d08454d53d6519b66c6fc9742857f"} err="failed to get container status \"f95d7322fcfd2ddfaa632b4101e4e708456d08454d53d6519b66c6fc9742857f\": rpc error: code = NotFound desc = could not find container \"f95d7322fcfd2ddfaa632b4101e4e708456d08454d53d6519b66c6fc9742857f\": container with ID starting with f95d7322fcfd2ddfaa632b4101e4e708456d08454d53d6519b66c6fc9742857f not found: ID does not exist" Jan 20 17:16:17 crc kubenswrapper[4558]: I0120 17:16:17.584096 4558 scope.go:117] "RemoveContainer" containerID="cd19794387027d0145a1234413d0cd07984694f7a7bd98888d3a026bd9839c77" Jan 20 17:16:17 crc kubenswrapper[4558]: E0120 17:16:17.584450 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cd19794387027d0145a1234413d0cd07984694f7a7bd98888d3a026bd9839c77\": container with ID starting with cd19794387027d0145a1234413d0cd07984694f7a7bd98888d3a026bd9839c77 not found: ID does not exist" containerID="cd19794387027d0145a1234413d0cd07984694f7a7bd98888d3a026bd9839c77" Jan 20 17:16:17 crc kubenswrapper[4558]: I0120 17:16:17.584475 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cd19794387027d0145a1234413d0cd07984694f7a7bd98888d3a026bd9839c77"} err="failed to get container status \"cd19794387027d0145a1234413d0cd07984694f7a7bd98888d3a026bd9839c77\": rpc error: code = NotFound desc = could not find container \"cd19794387027d0145a1234413d0cd07984694f7a7bd98888d3a026bd9839c77\": container with ID starting with cd19794387027d0145a1234413d0cd07984694f7a7bd98888d3a026bd9839c77 not found: ID does not exist" Jan 20 17:16:17 crc kubenswrapper[4558]: I0120 17:16:17.680268 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2510d24d-b593-4196-92bc-ddde09cc7c15-logs\") pod \"2510d24d-b593-4196-92bc-ddde09cc7c15\" (UID: \"2510d24d-b593-4196-92bc-ddde09cc7c15\") " Jan 20 17:16:17 crc kubenswrapper[4558]: I0120 17:16:17.680328 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2510d24d-b593-4196-92bc-ddde09cc7c15-config-data\") pod \"2510d24d-b593-4196-92bc-ddde09cc7c15\" (UID: \"2510d24d-b593-4196-92bc-ddde09cc7c15\") " Jan 20 17:16:17 crc kubenswrapper[4558]: I0120 17:16:17.680376 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2510d24d-b593-4196-92bc-ddde09cc7c15-combined-ca-bundle\") pod \"2510d24d-b593-4196-92bc-ddde09cc7c15\" (UID: \"2510d24d-b593-4196-92bc-ddde09cc7c15\") " Jan 20 17:16:17 crc kubenswrapper[4558]: I0120 17:16:17.680398 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jjngf\" (UniqueName: \"kubernetes.io/projected/2510d24d-b593-4196-92bc-ddde09cc7c15-kube-api-access-jjngf\") pod \"2510d24d-b593-4196-92bc-ddde09cc7c15\" (UID: \"2510d24d-b593-4196-92bc-ddde09cc7c15\") " Jan 20 17:16:17 crc kubenswrapper[4558]: I0120 17:16:17.680428 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/2510d24d-b593-4196-92bc-ddde09cc7c15-custom-prometheus-ca\") pod \"2510d24d-b593-4196-92bc-ddde09cc7c15\" (UID: \"2510d24d-b593-4196-92bc-ddde09cc7c15\") " Jan 20 17:16:17 crc kubenswrapper[4558]: I0120 17:16:17.680589 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2510d24d-b593-4196-92bc-ddde09cc7c15-logs" (OuterVolumeSpecName: "logs") pod "2510d24d-b593-4196-92bc-ddde09cc7c15" (UID: "2510d24d-b593-4196-92bc-ddde09cc7c15"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:16:17 crc kubenswrapper[4558]: I0120 17:16:17.680881 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2510d24d-b593-4196-92bc-ddde09cc7c15-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:17 crc kubenswrapper[4558]: I0120 17:16:17.684428 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2510d24d-b593-4196-92bc-ddde09cc7c15-kube-api-access-jjngf" (OuterVolumeSpecName: "kube-api-access-jjngf") pod "2510d24d-b593-4196-92bc-ddde09cc7c15" (UID: "2510d24d-b593-4196-92bc-ddde09cc7c15"). InnerVolumeSpecName "kube-api-access-jjngf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:16:17 crc kubenswrapper[4558]: I0120 17:16:17.704069 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2510d24d-b593-4196-92bc-ddde09cc7c15-custom-prometheus-ca" (OuterVolumeSpecName: "custom-prometheus-ca") pod "2510d24d-b593-4196-92bc-ddde09cc7c15" (UID: "2510d24d-b593-4196-92bc-ddde09cc7c15"). InnerVolumeSpecName "custom-prometheus-ca". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:16:17 crc kubenswrapper[4558]: I0120 17:16:17.707290 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2510d24d-b593-4196-92bc-ddde09cc7c15-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2510d24d-b593-4196-92bc-ddde09cc7c15" (UID: "2510d24d-b593-4196-92bc-ddde09cc7c15"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:16:17 crc kubenswrapper[4558]: I0120 17:16:17.728644 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2510d24d-b593-4196-92bc-ddde09cc7c15-config-data" (OuterVolumeSpecName: "config-data") pod "2510d24d-b593-4196-92bc-ddde09cc7c15" (UID: "2510d24d-b593-4196-92bc-ddde09cc7c15"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:16:17 crc kubenswrapper[4558]: I0120 17:16:17.782301 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2510d24d-b593-4196-92bc-ddde09cc7c15-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:17 crc kubenswrapper[4558]: I0120 17:16:17.782337 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jjngf\" (UniqueName: \"kubernetes.io/projected/2510d24d-b593-4196-92bc-ddde09cc7c15-kube-api-access-jjngf\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:17 crc kubenswrapper[4558]: I0120 17:16:17.782351 4558 reconciler_common.go:293] "Volume detached for volume \"custom-prometheus-ca\" (UniqueName: \"kubernetes.io/secret/2510d24d-b593-4196-92bc-ddde09cc7c15-custom-prometheus-ca\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:17 crc kubenswrapper[4558]: I0120 17:16:17.782365 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2510d24d-b593-4196-92bc-ddde09cc7c15-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:17 crc kubenswrapper[4558]: I0120 17:16:17.815718 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-857895d98b-n66x9"] Jan 20 17:16:17 crc kubenswrapper[4558]: I0120 17:16:17.825398 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/neutron-857895d98b-n66x9"] Jan 20 17:16:18 crc kubenswrapper[4558]: I0120 17:16:18.494825 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/watcher-decision-engine-0" Jan 20 17:16:18 crc kubenswrapper[4558]: I0120 17:16:18.494813 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/watcher-decision-engine-0" event={"ID":"2510d24d-b593-4196-92bc-ddde09cc7c15","Type":"ContainerDied","Data":"bce59ba9886161320d99f71934595cd7b5badc8291ab33b3064137614c70f2f1"} Jan 20 17:16:18 crc kubenswrapper[4558]: I0120 17:16:18.494984 4558 scope.go:117] "RemoveContainer" containerID="f630bf27700ad23f76cb69b14ae9eed65e06889873b8b21234996b7a87609328" Jan 20 17:16:18 crc kubenswrapper[4558]: I0120 17:16:18.522636 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/watcher-decision-engine-0"] Jan 20 17:16:18 crc kubenswrapper[4558]: I0120 17:16:18.527383 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/watcher-decision-engine-0"] Jan 20 17:16:18 crc kubenswrapper[4558]: I0120 17:16:18.574251 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2510d24d-b593-4196-92bc-ddde09cc7c15" path="/var/lib/kubelet/pods/2510d24d-b593-4196-92bc-ddde09cc7c15/volumes" Jan 20 17:16:18 crc kubenswrapper[4558]: I0120 17:16:18.574736 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="32c4dbf1-2dfe-44a7-be21-1415017ca20c" path="/var/lib/kubelet/pods/32c4dbf1-2dfe-44a7-be21-1415017ca20c/volumes" Jan 20 17:16:18 crc kubenswrapper[4558]: I0120 17:16:18.575254 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6e03bea8-761e-4d6d-bcaf-f4397b3ec310" path="/var/lib/kubelet/pods/6e03bea8-761e-4d6d-bcaf-f4397b3ec310/volumes" Jan 20 17:16:36 crc kubenswrapper[4558]: I0120 17:16:36.135888 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:16:36 crc kubenswrapper[4558]: I0120 17:16:36.248873 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5-cache\") pod \"16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5\" (UID: \"16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5\") " Jan 20 17:16:36 crc kubenswrapper[4558]: I0120 17:16:36.248933 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5-lock\") pod \"16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5\" (UID: \"16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5\") " Jan 20 17:16:36 crc kubenswrapper[4558]: I0120 17:16:36.249021 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5-etc-swift\") pod \"16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5\" (UID: \"16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5\") " Jan 20 17:16:36 crc kubenswrapper[4558]: I0120 17:16:36.249187 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lkqx2\" (UniqueName: \"kubernetes.io/projected/16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5-kube-api-access-lkqx2\") pod \"16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5\" (UID: \"16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5\") " Jan 20 17:16:36 crc kubenswrapper[4558]: I0120 17:16:36.249212 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swift\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5\" (UID: \"16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5\") " Jan 20 17:16:36 crc kubenswrapper[4558]: I0120 17:16:36.249647 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5-lock" (OuterVolumeSpecName: "lock") pod "16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5" (UID: "16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5"). InnerVolumeSpecName "lock". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:16:36 crc kubenswrapper[4558]: I0120 17:16:36.250379 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5-cache" (OuterVolumeSpecName: "cache") pod "16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5" (UID: "16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5"). InnerVolumeSpecName "cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:16:36 crc kubenswrapper[4558]: I0120 17:16:36.267350 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5-kube-api-access-lkqx2" (OuterVolumeSpecName: "kube-api-access-lkqx2") pod "16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5" (UID: "16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5"). InnerVolumeSpecName "kube-api-access-lkqx2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:16:36 crc kubenswrapper[4558]: I0120 17:16:36.268708 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage08-crc" (OuterVolumeSpecName: "swift") pod "16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5" (UID: "16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5"). InnerVolumeSpecName "local-storage08-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:16:36 crc kubenswrapper[4558]: I0120 17:16:36.302373 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5" (UID: "16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:16:36 crc kubenswrapper[4558]: I0120 17:16:36.351029 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lkqx2\" (UniqueName: \"kubernetes.io/projected/16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5-kube-api-access-lkqx2\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:36 crc kubenswrapper[4558]: I0120 17:16:36.351089 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" " Jan 20 17:16:36 crc kubenswrapper[4558]: I0120 17:16:36.351101 4558 reconciler_common.go:293] "Volume detached for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5-cache\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:36 crc kubenswrapper[4558]: I0120 17:16:36.351110 4558 reconciler_common.go:293] "Volume detached for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5-lock\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:36 crc kubenswrapper[4558]: I0120 17:16:36.351119 4558 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5-etc-swift\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:36 crc kubenswrapper[4558]: I0120 17:16:36.363090 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage08-crc" (UniqueName: "kubernetes.io/local-volume/local-storage08-crc") on node "crc" Jan 20 17:16:36 crc kubenswrapper[4558]: I0120 17:16:36.453058 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:36 crc kubenswrapper[4558]: I0120 17:16:36.687690 4558 generic.go:334] "Generic (PLEG): container finished" podID="16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5" containerID="99c4bacb5b9768a4caf100f0cb1002d9cece8b806e2823da791412ee816ffda8" exitCode=137 Jan 20 17:16:36 crc kubenswrapper[4558]: I0120 17:16:36.687794 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5","Type":"ContainerDied","Data":"99c4bacb5b9768a4caf100f0cb1002d9cece8b806e2823da791412ee816ffda8"} Jan 20 17:16:36 crc kubenswrapper[4558]: I0120 17:16:36.687846 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5","Type":"ContainerDied","Data":"1cd0a2990c13e48d1af8da127be8da6b524c7be98b42a509147ce0af41f1c26c"} Jan 20 17:16:36 crc kubenswrapper[4558]: I0120 17:16:36.687872 4558 scope.go:117] "RemoveContainer" containerID="99c4bacb5b9768a4caf100f0cb1002d9cece8b806e2823da791412ee816ffda8" Jan 20 17:16:36 crc kubenswrapper[4558]: I0120 17:16:36.694064 4558 generic.go:334] "Generic (PLEG): container finished" podID="3072fee5-c73d-407d-80c7-c376628ec545" containerID="fa635aef0a0203fdb13831ee491f982a0ac51b1bc7f7d5c51955065fd5631a37" exitCode=137 Jan 20 17:16:36 crc kubenswrapper[4558]: I0120 17:16:36.694149 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"3072fee5-c73d-407d-80c7-c376628ec545","Type":"ContainerDied","Data":"fa635aef0a0203fdb13831ee491f982a0ac51b1bc7f7d5c51955065fd5631a37"} Jan 20 17:16:36 crc kubenswrapper[4558]: I0120 17:16:36.718811 4558 scope.go:117] "RemoveContainer" containerID="f85ec25adaf01df7a7853545852faa186e28a1806e5769092b47b3631e9f6a39" Jan 20 17:16:36 crc kubenswrapper[4558]: I0120 17:16:36.719120 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:16:36 crc kubenswrapper[4558]: I0120 17:16:36.736455 4558 scope.go:117] "RemoveContainer" containerID="a05f333c41b9d8ef7c02ce57e5a66f44cf679c3dd3e8cbc037a0abf4cb9c1076" Jan 20 17:16:36 crc kubenswrapper[4558]: I0120 17:16:36.747206 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/swift-storage-0"] Jan 20 17:16:36 crc kubenswrapper[4558]: I0120 17:16:36.751611 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/swift-storage-0"] Jan 20 17:16:36 crc kubenswrapper[4558]: I0120 17:16:36.772375 4558 scope.go:117] "RemoveContainer" containerID="351e496453cf020b285bf45cc107bc71f86bd776035bd828428d72e1c7062529" Jan 20 17:16:36 crc kubenswrapper[4558]: I0120 17:16:36.788522 4558 scope.go:117] "RemoveContainer" containerID="e9011c98afeb74bd61067467d261960e71d5f4ce02439018a751b87779a9a042" Jan 20 17:16:36 crc kubenswrapper[4558]: I0120 17:16:36.812290 4558 scope.go:117] "RemoveContainer" containerID="0ffad205c7e0b9a3e7a3ed7b7494d0c934e33af468fd39ec2025c4bc685f9b2a" Jan 20 17:16:36 crc kubenswrapper[4558]: I0120 17:16:36.829507 4558 scope.go:117] "RemoveContainer" containerID="e8487e2e852bfbcd349f6ab70c8f1c9163850c213b76cd2ed1af8305cfe8bbf6" Jan 20 17:16:36 crc kubenswrapper[4558]: I0120 17:16:36.846697 4558 scope.go:117] "RemoveContainer" containerID="b448c6813dc21e1609badba9c414f16cfa90d4bc879f828fc03dc5e2d8c5c01d" Jan 20 17:16:36 crc kubenswrapper[4558]: I0120 17:16:36.872025 4558 scope.go:117] "RemoveContainer" containerID="f4843ed0e8d1373bb2683da97ab873974d82c40ca96c66ac0e816208a3870873" Jan 20 17:16:36 crc kubenswrapper[4558]: I0120 17:16:36.894891 4558 scope.go:117] "RemoveContainer" containerID="42f3e6080a5aaa7c6b331d0120550bd3df0ded5cc2d89acdcce42e051c81d082" Jan 20 17:16:36 crc kubenswrapper[4558]: I0120 17:16:36.918323 4558 scope.go:117] "RemoveContainer" containerID="0264294b8782deaf18a7c21e66a3cd60ef8bc25c146077c5f8dd36370f4b9f31" Jan 20 17:16:36 crc kubenswrapper[4558]: I0120 17:16:36.962286 4558 scope.go:117] "RemoveContainer" containerID="3042c88a8b0acc604598545abaf566b25ac21482df62b0a3a116f13f8566a2af" Jan 20 17:16:36 crc kubenswrapper[4558]: I0120 17:16:36.981496 4558 scope.go:117] "RemoveContainer" containerID="a3d60956ff7af8b2370d1374fe03e956eb5d0ac246c55b421bb4276908799a3f" Jan 20 17:16:37 crc kubenswrapper[4558]: I0120 17:16:37.002995 4558 scope.go:117] "RemoveContainer" containerID="287de52d7a1cbdfd30b38ebc5ce6836c6e7462c638fb7b5cb4514ff2fd994ca5" Jan 20 17:16:37 crc kubenswrapper[4558]: I0120 17:16:37.022999 4558 scope.go:117] "RemoveContainer" containerID="d34ab3fc8aadd821e92e2bf92349870b0e59665705b0c279da24491935dbfa40" Jan 20 17:16:37 crc kubenswrapper[4558]: I0120 17:16:37.040637 4558 scope.go:117] "RemoveContainer" containerID="99c4bacb5b9768a4caf100f0cb1002d9cece8b806e2823da791412ee816ffda8" Jan 20 17:16:37 crc kubenswrapper[4558]: E0120 17:16:37.040965 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"99c4bacb5b9768a4caf100f0cb1002d9cece8b806e2823da791412ee816ffda8\": container with ID starting with 99c4bacb5b9768a4caf100f0cb1002d9cece8b806e2823da791412ee816ffda8 not found: ID does not exist" containerID="99c4bacb5b9768a4caf100f0cb1002d9cece8b806e2823da791412ee816ffda8" Jan 20 17:16:37 crc kubenswrapper[4558]: I0120 17:16:37.040998 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"99c4bacb5b9768a4caf100f0cb1002d9cece8b806e2823da791412ee816ffda8"} err="failed to get container status \"99c4bacb5b9768a4caf100f0cb1002d9cece8b806e2823da791412ee816ffda8\": rpc error: code = NotFound desc = could not find container \"99c4bacb5b9768a4caf100f0cb1002d9cece8b806e2823da791412ee816ffda8\": container with ID starting with 99c4bacb5b9768a4caf100f0cb1002d9cece8b806e2823da791412ee816ffda8 not found: ID does not exist" Jan 20 17:16:37 crc kubenswrapper[4558]: I0120 17:16:37.041018 4558 scope.go:117] "RemoveContainer" containerID="f85ec25adaf01df7a7853545852faa186e28a1806e5769092b47b3631e9f6a39" Jan 20 17:16:37 crc kubenswrapper[4558]: E0120 17:16:37.041335 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f85ec25adaf01df7a7853545852faa186e28a1806e5769092b47b3631e9f6a39\": container with ID starting with f85ec25adaf01df7a7853545852faa186e28a1806e5769092b47b3631e9f6a39 not found: ID does not exist" containerID="f85ec25adaf01df7a7853545852faa186e28a1806e5769092b47b3631e9f6a39" Jan 20 17:16:37 crc kubenswrapper[4558]: I0120 17:16:37.041378 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f85ec25adaf01df7a7853545852faa186e28a1806e5769092b47b3631e9f6a39"} err="failed to get container status \"f85ec25adaf01df7a7853545852faa186e28a1806e5769092b47b3631e9f6a39\": rpc error: code = NotFound desc = could not find container \"f85ec25adaf01df7a7853545852faa186e28a1806e5769092b47b3631e9f6a39\": container with ID starting with f85ec25adaf01df7a7853545852faa186e28a1806e5769092b47b3631e9f6a39 not found: ID does not exist" Jan 20 17:16:37 crc kubenswrapper[4558]: I0120 17:16:37.041403 4558 scope.go:117] "RemoveContainer" containerID="a05f333c41b9d8ef7c02ce57e5a66f44cf679c3dd3e8cbc037a0abf4cb9c1076" Jan 20 17:16:37 crc kubenswrapper[4558]: E0120 17:16:37.041915 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a05f333c41b9d8ef7c02ce57e5a66f44cf679c3dd3e8cbc037a0abf4cb9c1076\": container with ID starting with a05f333c41b9d8ef7c02ce57e5a66f44cf679c3dd3e8cbc037a0abf4cb9c1076 not found: ID does not exist" containerID="a05f333c41b9d8ef7c02ce57e5a66f44cf679c3dd3e8cbc037a0abf4cb9c1076" Jan 20 17:16:37 crc kubenswrapper[4558]: I0120 17:16:37.041944 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a05f333c41b9d8ef7c02ce57e5a66f44cf679c3dd3e8cbc037a0abf4cb9c1076"} err="failed to get container status \"a05f333c41b9d8ef7c02ce57e5a66f44cf679c3dd3e8cbc037a0abf4cb9c1076\": rpc error: code = NotFound desc = could not find container \"a05f333c41b9d8ef7c02ce57e5a66f44cf679c3dd3e8cbc037a0abf4cb9c1076\": container with ID starting with a05f333c41b9d8ef7c02ce57e5a66f44cf679c3dd3e8cbc037a0abf4cb9c1076 not found: ID does not exist" Jan 20 17:16:37 crc kubenswrapper[4558]: I0120 17:16:37.041961 4558 scope.go:117] "RemoveContainer" containerID="351e496453cf020b285bf45cc107bc71f86bd776035bd828428d72e1c7062529" Jan 20 17:16:37 crc kubenswrapper[4558]: E0120 17:16:37.042455 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"351e496453cf020b285bf45cc107bc71f86bd776035bd828428d72e1c7062529\": container with ID starting with 351e496453cf020b285bf45cc107bc71f86bd776035bd828428d72e1c7062529 not found: ID does not exist" containerID="351e496453cf020b285bf45cc107bc71f86bd776035bd828428d72e1c7062529" Jan 20 17:16:37 crc kubenswrapper[4558]: I0120 17:16:37.042487 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"351e496453cf020b285bf45cc107bc71f86bd776035bd828428d72e1c7062529"} err="failed to get container status \"351e496453cf020b285bf45cc107bc71f86bd776035bd828428d72e1c7062529\": rpc error: code = NotFound desc = could not find container \"351e496453cf020b285bf45cc107bc71f86bd776035bd828428d72e1c7062529\": container with ID starting with 351e496453cf020b285bf45cc107bc71f86bd776035bd828428d72e1c7062529 not found: ID does not exist" Jan 20 17:16:37 crc kubenswrapper[4558]: I0120 17:16:37.042503 4558 scope.go:117] "RemoveContainer" containerID="e9011c98afeb74bd61067467d261960e71d5f4ce02439018a751b87779a9a042" Jan 20 17:16:37 crc kubenswrapper[4558]: E0120 17:16:37.043406 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e9011c98afeb74bd61067467d261960e71d5f4ce02439018a751b87779a9a042\": container with ID starting with e9011c98afeb74bd61067467d261960e71d5f4ce02439018a751b87779a9a042 not found: ID does not exist" containerID="e9011c98afeb74bd61067467d261960e71d5f4ce02439018a751b87779a9a042" Jan 20 17:16:37 crc kubenswrapper[4558]: I0120 17:16:37.043435 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e9011c98afeb74bd61067467d261960e71d5f4ce02439018a751b87779a9a042"} err="failed to get container status \"e9011c98afeb74bd61067467d261960e71d5f4ce02439018a751b87779a9a042\": rpc error: code = NotFound desc = could not find container \"e9011c98afeb74bd61067467d261960e71d5f4ce02439018a751b87779a9a042\": container with ID starting with e9011c98afeb74bd61067467d261960e71d5f4ce02439018a751b87779a9a042 not found: ID does not exist" Jan 20 17:16:37 crc kubenswrapper[4558]: I0120 17:16:37.043451 4558 scope.go:117] "RemoveContainer" containerID="0ffad205c7e0b9a3e7a3ed7b7494d0c934e33af468fd39ec2025c4bc685f9b2a" Jan 20 17:16:37 crc kubenswrapper[4558]: E0120 17:16:37.043858 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0ffad205c7e0b9a3e7a3ed7b7494d0c934e33af468fd39ec2025c4bc685f9b2a\": container with ID starting with 0ffad205c7e0b9a3e7a3ed7b7494d0c934e33af468fd39ec2025c4bc685f9b2a not found: ID does not exist" containerID="0ffad205c7e0b9a3e7a3ed7b7494d0c934e33af468fd39ec2025c4bc685f9b2a" Jan 20 17:16:37 crc kubenswrapper[4558]: I0120 17:16:37.043898 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0ffad205c7e0b9a3e7a3ed7b7494d0c934e33af468fd39ec2025c4bc685f9b2a"} err="failed to get container status \"0ffad205c7e0b9a3e7a3ed7b7494d0c934e33af468fd39ec2025c4bc685f9b2a\": rpc error: code = NotFound desc = could not find container \"0ffad205c7e0b9a3e7a3ed7b7494d0c934e33af468fd39ec2025c4bc685f9b2a\": container with ID starting with 0ffad205c7e0b9a3e7a3ed7b7494d0c934e33af468fd39ec2025c4bc685f9b2a not found: ID does not exist" Jan 20 17:16:37 crc kubenswrapper[4558]: I0120 17:16:37.043931 4558 scope.go:117] "RemoveContainer" containerID="e8487e2e852bfbcd349f6ab70c8f1c9163850c213b76cd2ed1af8305cfe8bbf6" Jan 20 17:16:37 crc kubenswrapper[4558]: E0120 17:16:37.044190 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e8487e2e852bfbcd349f6ab70c8f1c9163850c213b76cd2ed1af8305cfe8bbf6\": container with ID starting with e8487e2e852bfbcd349f6ab70c8f1c9163850c213b76cd2ed1af8305cfe8bbf6 not found: ID does not exist" containerID="e8487e2e852bfbcd349f6ab70c8f1c9163850c213b76cd2ed1af8305cfe8bbf6" Jan 20 17:16:37 crc kubenswrapper[4558]: I0120 17:16:37.044209 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e8487e2e852bfbcd349f6ab70c8f1c9163850c213b76cd2ed1af8305cfe8bbf6"} err="failed to get container status \"e8487e2e852bfbcd349f6ab70c8f1c9163850c213b76cd2ed1af8305cfe8bbf6\": rpc error: code = NotFound desc = could not find container \"e8487e2e852bfbcd349f6ab70c8f1c9163850c213b76cd2ed1af8305cfe8bbf6\": container with ID starting with e8487e2e852bfbcd349f6ab70c8f1c9163850c213b76cd2ed1af8305cfe8bbf6 not found: ID does not exist" Jan 20 17:16:37 crc kubenswrapper[4558]: I0120 17:16:37.044222 4558 scope.go:117] "RemoveContainer" containerID="b448c6813dc21e1609badba9c414f16cfa90d4bc879f828fc03dc5e2d8c5c01d" Jan 20 17:16:37 crc kubenswrapper[4558]: E0120 17:16:37.044518 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b448c6813dc21e1609badba9c414f16cfa90d4bc879f828fc03dc5e2d8c5c01d\": container with ID starting with b448c6813dc21e1609badba9c414f16cfa90d4bc879f828fc03dc5e2d8c5c01d not found: ID does not exist" containerID="b448c6813dc21e1609badba9c414f16cfa90d4bc879f828fc03dc5e2d8c5c01d" Jan 20 17:16:37 crc kubenswrapper[4558]: I0120 17:16:37.044534 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b448c6813dc21e1609badba9c414f16cfa90d4bc879f828fc03dc5e2d8c5c01d"} err="failed to get container status \"b448c6813dc21e1609badba9c414f16cfa90d4bc879f828fc03dc5e2d8c5c01d\": rpc error: code = NotFound desc = could not find container \"b448c6813dc21e1609badba9c414f16cfa90d4bc879f828fc03dc5e2d8c5c01d\": container with ID starting with b448c6813dc21e1609badba9c414f16cfa90d4bc879f828fc03dc5e2d8c5c01d not found: ID does not exist" Jan 20 17:16:37 crc kubenswrapper[4558]: I0120 17:16:37.044547 4558 scope.go:117] "RemoveContainer" containerID="f4843ed0e8d1373bb2683da97ab873974d82c40ca96c66ac0e816208a3870873" Jan 20 17:16:37 crc kubenswrapper[4558]: E0120 17:16:37.044803 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f4843ed0e8d1373bb2683da97ab873974d82c40ca96c66ac0e816208a3870873\": container with ID starting with f4843ed0e8d1373bb2683da97ab873974d82c40ca96c66ac0e816208a3870873 not found: ID does not exist" containerID="f4843ed0e8d1373bb2683da97ab873974d82c40ca96c66ac0e816208a3870873" Jan 20 17:16:37 crc kubenswrapper[4558]: I0120 17:16:37.044819 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f4843ed0e8d1373bb2683da97ab873974d82c40ca96c66ac0e816208a3870873"} err="failed to get container status \"f4843ed0e8d1373bb2683da97ab873974d82c40ca96c66ac0e816208a3870873\": rpc error: code = NotFound desc = could not find container \"f4843ed0e8d1373bb2683da97ab873974d82c40ca96c66ac0e816208a3870873\": container with ID starting with f4843ed0e8d1373bb2683da97ab873974d82c40ca96c66ac0e816208a3870873 not found: ID does not exist" Jan 20 17:16:37 crc kubenswrapper[4558]: I0120 17:16:37.044830 4558 scope.go:117] "RemoveContainer" containerID="42f3e6080a5aaa7c6b331d0120550bd3df0ded5cc2d89acdcce42e051c81d082" Jan 20 17:16:37 crc kubenswrapper[4558]: E0120 17:16:37.045058 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"42f3e6080a5aaa7c6b331d0120550bd3df0ded5cc2d89acdcce42e051c81d082\": container with ID starting with 42f3e6080a5aaa7c6b331d0120550bd3df0ded5cc2d89acdcce42e051c81d082 not found: ID does not exist" containerID="42f3e6080a5aaa7c6b331d0120550bd3df0ded5cc2d89acdcce42e051c81d082" Jan 20 17:16:37 crc kubenswrapper[4558]: I0120 17:16:37.045079 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"42f3e6080a5aaa7c6b331d0120550bd3df0ded5cc2d89acdcce42e051c81d082"} err="failed to get container status \"42f3e6080a5aaa7c6b331d0120550bd3df0ded5cc2d89acdcce42e051c81d082\": rpc error: code = NotFound desc = could not find container \"42f3e6080a5aaa7c6b331d0120550bd3df0ded5cc2d89acdcce42e051c81d082\": container with ID starting with 42f3e6080a5aaa7c6b331d0120550bd3df0ded5cc2d89acdcce42e051c81d082 not found: ID does not exist" Jan 20 17:16:37 crc kubenswrapper[4558]: I0120 17:16:37.045095 4558 scope.go:117] "RemoveContainer" containerID="0264294b8782deaf18a7c21e66a3cd60ef8bc25c146077c5f8dd36370f4b9f31" Jan 20 17:16:37 crc kubenswrapper[4558]: E0120 17:16:37.048244 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0264294b8782deaf18a7c21e66a3cd60ef8bc25c146077c5f8dd36370f4b9f31\": container with ID starting with 0264294b8782deaf18a7c21e66a3cd60ef8bc25c146077c5f8dd36370f4b9f31 not found: ID does not exist" containerID="0264294b8782deaf18a7c21e66a3cd60ef8bc25c146077c5f8dd36370f4b9f31" Jan 20 17:16:37 crc kubenswrapper[4558]: I0120 17:16:37.048277 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0264294b8782deaf18a7c21e66a3cd60ef8bc25c146077c5f8dd36370f4b9f31"} err="failed to get container status \"0264294b8782deaf18a7c21e66a3cd60ef8bc25c146077c5f8dd36370f4b9f31\": rpc error: code = NotFound desc = could not find container \"0264294b8782deaf18a7c21e66a3cd60ef8bc25c146077c5f8dd36370f4b9f31\": container with ID starting with 0264294b8782deaf18a7c21e66a3cd60ef8bc25c146077c5f8dd36370f4b9f31 not found: ID does not exist" Jan 20 17:16:37 crc kubenswrapper[4558]: I0120 17:16:37.048302 4558 scope.go:117] "RemoveContainer" containerID="3042c88a8b0acc604598545abaf566b25ac21482df62b0a3a116f13f8566a2af" Jan 20 17:16:37 crc kubenswrapper[4558]: E0120 17:16:37.048598 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3042c88a8b0acc604598545abaf566b25ac21482df62b0a3a116f13f8566a2af\": container with ID starting with 3042c88a8b0acc604598545abaf566b25ac21482df62b0a3a116f13f8566a2af not found: ID does not exist" containerID="3042c88a8b0acc604598545abaf566b25ac21482df62b0a3a116f13f8566a2af" Jan 20 17:16:37 crc kubenswrapper[4558]: I0120 17:16:37.048619 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3042c88a8b0acc604598545abaf566b25ac21482df62b0a3a116f13f8566a2af"} err="failed to get container status \"3042c88a8b0acc604598545abaf566b25ac21482df62b0a3a116f13f8566a2af\": rpc error: code = NotFound desc = could not find container \"3042c88a8b0acc604598545abaf566b25ac21482df62b0a3a116f13f8566a2af\": container with ID starting with 3042c88a8b0acc604598545abaf566b25ac21482df62b0a3a116f13f8566a2af not found: ID does not exist" Jan 20 17:16:37 crc kubenswrapper[4558]: I0120 17:16:37.048633 4558 scope.go:117] "RemoveContainer" containerID="a3d60956ff7af8b2370d1374fe03e956eb5d0ac246c55b421bb4276908799a3f" Jan 20 17:16:37 crc kubenswrapper[4558]: E0120 17:16:37.048914 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a3d60956ff7af8b2370d1374fe03e956eb5d0ac246c55b421bb4276908799a3f\": container with ID starting with a3d60956ff7af8b2370d1374fe03e956eb5d0ac246c55b421bb4276908799a3f not found: ID does not exist" containerID="a3d60956ff7af8b2370d1374fe03e956eb5d0ac246c55b421bb4276908799a3f" Jan 20 17:16:37 crc kubenswrapper[4558]: I0120 17:16:37.048938 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a3d60956ff7af8b2370d1374fe03e956eb5d0ac246c55b421bb4276908799a3f"} err="failed to get container status \"a3d60956ff7af8b2370d1374fe03e956eb5d0ac246c55b421bb4276908799a3f\": rpc error: code = NotFound desc = could not find container \"a3d60956ff7af8b2370d1374fe03e956eb5d0ac246c55b421bb4276908799a3f\": container with ID starting with a3d60956ff7af8b2370d1374fe03e956eb5d0ac246c55b421bb4276908799a3f not found: ID does not exist" Jan 20 17:16:37 crc kubenswrapper[4558]: I0120 17:16:37.048955 4558 scope.go:117] "RemoveContainer" containerID="287de52d7a1cbdfd30b38ebc5ce6836c6e7462c638fb7b5cb4514ff2fd994ca5" Jan 20 17:16:37 crc kubenswrapper[4558]: E0120 17:16:37.049238 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"287de52d7a1cbdfd30b38ebc5ce6836c6e7462c638fb7b5cb4514ff2fd994ca5\": container with ID starting with 287de52d7a1cbdfd30b38ebc5ce6836c6e7462c638fb7b5cb4514ff2fd994ca5 not found: ID does not exist" containerID="287de52d7a1cbdfd30b38ebc5ce6836c6e7462c638fb7b5cb4514ff2fd994ca5" Jan 20 17:16:37 crc kubenswrapper[4558]: I0120 17:16:37.049260 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"287de52d7a1cbdfd30b38ebc5ce6836c6e7462c638fb7b5cb4514ff2fd994ca5"} err="failed to get container status \"287de52d7a1cbdfd30b38ebc5ce6836c6e7462c638fb7b5cb4514ff2fd994ca5\": rpc error: code = NotFound desc = could not find container \"287de52d7a1cbdfd30b38ebc5ce6836c6e7462c638fb7b5cb4514ff2fd994ca5\": container with ID starting with 287de52d7a1cbdfd30b38ebc5ce6836c6e7462c638fb7b5cb4514ff2fd994ca5 not found: ID does not exist" Jan 20 17:16:37 crc kubenswrapper[4558]: I0120 17:16:37.049274 4558 scope.go:117] "RemoveContainer" containerID="d34ab3fc8aadd821e92e2bf92349870b0e59665705b0c279da24491935dbfa40" Jan 20 17:16:37 crc kubenswrapper[4558]: E0120 17:16:37.049552 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d34ab3fc8aadd821e92e2bf92349870b0e59665705b0c279da24491935dbfa40\": container with ID starting with d34ab3fc8aadd821e92e2bf92349870b0e59665705b0c279da24491935dbfa40 not found: ID does not exist" containerID="d34ab3fc8aadd821e92e2bf92349870b0e59665705b0c279da24491935dbfa40" Jan 20 17:16:37 crc kubenswrapper[4558]: I0120 17:16:37.049573 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d34ab3fc8aadd821e92e2bf92349870b0e59665705b0c279da24491935dbfa40"} err="failed to get container status \"d34ab3fc8aadd821e92e2bf92349870b0e59665705b0c279da24491935dbfa40\": rpc error: code = NotFound desc = could not find container \"d34ab3fc8aadd821e92e2bf92349870b0e59665705b0c279da24491935dbfa40\": container with ID starting with d34ab3fc8aadd821e92e2bf92349870b0e59665705b0c279da24491935dbfa40 not found: ID does not exist" Jan 20 17:16:37 crc kubenswrapper[4558]: I0120 17:16:37.175184 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:16:37 crc kubenswrapper[4558]: I0120 17:16:37.267525 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/3072fee5-c73d-407d-80c7-c376628ec545-etc-machine-id\") pod \"3072fee5-c73d-407d-80c7-c376628ec545\" (UID: \"3072fee5-c73d-407d-80c7-c376628ec545\") " Jan 20 17:16:37 crc kubenswrapper[4558]: I0120 17:16:37.267649 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3072fee5-c73d-407d-80c7-c376628ec545-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "3072fee5-c73d-407d-80c7-c376628ec545" (UID: "3072fee5-c73d-407d-80c7-c376628ec545"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 17:16:37 crc kubenswrapper[4558]: I0120 17:16:37.267680 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3072fee5-c73d-407d-80c7-c376628ec545-scripts\") pod \"3072fee5-c73d-407d-80c7-c376628ec545\" (UID: \"3072fee5-c73d-407d-80c7-c376628ec545\") " Jan 20 17:16:37 crc kubenswrapper[4558]: I0120 17:16:37.267937 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3072fee5-c73d-407d-80c7-c376628ec545-config-data-custom\") pod \"3072fee5-c73d-407d-80c7-c376628ec545\" (UID: \"3072fee5-c73d-407d-80c7-c376628ec545\") " Jan 20 17:16:37 crc kubenswrapper[4558]: I0120 17:16:37.267993 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3072fee5-c73d-407d-80c7-c376628ec545-config-data\") pod \"3072fee5-c73d-407d-80c7-c376628ec545\" (UID: \"3072fee5-c73d-407d-80c7-c376628ec545\") " Jan 20 17:16:37 crc kubenswrapper[4558]: I0120 17:16:37.268041 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6b82g\" (UniqueName: \"kubernetes.io/projected/3072fee5-c73d-407d-80c7-c376628ec545-kube-api-access-6b82g\") pod \"3072fee5-c73d-407d-80c7-c376628ec545\" (UID: \"3072fee5-c73d-407d-80c7-c376628ec545\") " Jan 20 17:16:37 crc kubenswrapper[4558]: I0120 17:16:37.268994 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3072fee5-c73d-407d-80c7-c376628ec545-combined-ca-bundle\") pod \"3072fee5-c73d-407d-80c7-c376628ec545\" (UID: \"3072fee5-c73d-407d-80c7-c376628ec545\") " Jan 20 17:16:37 crc kubenswrapper[4558]: I0120 17:16:37.269587 4558 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/3072fee5-c73d-407d-80c7-c376628ec545-etc-machine-id\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:37 crc kubenswrapper[4558]: I0120 17:16:37.271892 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3072fee5-c73d-407d-80c7-c376628ec545-kube-api-access-6b82g" (OuterVolumeSpecName: "kube-api-access-6b82g") pod "3072fee5-c73d-407d-80c7-c376628ec545" (UID: "3072fee5-c73d-407d-80c7-c376628ec545"). InnerVolumeSpecName "kube-api-access-6b82g". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:16:37 crc kubenswrapper[4558]: I0120 17:16:37.272760 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3072fee5-c73d-407d-80c7-c376628ec545-scripts" (OuterVolumeSpecName: "scripts") pod "3072fee5-c73d-407d-80c7-c376628ec545" (UID: "3072fee5-c73d-407d-80c7-c376628ec545"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:16:37 crc kubenswrapper[4558]: I0120 17:16:37.272844 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3072fee5-c73d-407d-80c7-c376628ec545-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "3072fee5-c73d-407d-80c7-c376628ec545" (UID: "3072fee5-c73d-407d-80c7-c376628ec545"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:16:37 crc kubenswrapper[4558]: I0120 17:16:37.307354 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3072fee5-c73d-407d-80c7-c376628ec545-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3072fee5-c73d-407d-80c7-c376628ec545" (UID: "3072fee5-c73d-407d-80c7-c376628ec545"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:16:37 crc kubenswrapper[4558]: I0120 17:16:37.333586 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3072fee5-c73d-407d-80c7-c376628ec545-config-data" (OuterVolumeSpecName: "config-data") pod "3072fee5-c73d-407d-80c7-c376628ec545" (UID: "3072fee5-c73d-407d-80c7-c376628ec545"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:16:37 crc kubenswrapper[4558]: I0120 17:16:37.371067 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3072fee5-c73d-407d-80c7-c376628ec545-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:37 crc kubenswrapper[4558]: I0120 17:16:37.371401 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6b82g\" (UniqueName: \"kubernetes.io/projected/3072fee5-c73d-407d-80c7-c376628ec545-kube-api-access-6b82g\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:37 crc kubenswrapper[4558]: I0120 17:16:37.371478 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3072fee5-c73d-407d-80c7-c376628ec545-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:37 crc kubenswrapper[4558]: I0120 17:16:37.371550 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3072fee5-c73d-407d-80c7-c376628ec545-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:37 crc kubenswrapper[4558]: I0120 17:16:37.371609 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3072fee5-c73d-407d-80c7-c376628ec545-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:37 crc kubenswrapper[4558]: I0120 17:16:37.709146 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:16:37 crc kubenswrapper[4558]: I0120 17:16:37.709143 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"3072fee5-c73d-407d-80c7-c376628ec545","Type":"ContainerDied","Data":"3270877cc1d65f29e106afce6920ca9c56ef00c9c65d7db9adddebed07f1be1b"} Jan 20 17:16:37 crc kubenswrapper[4558]: I0120 17:16:37.709616 4558 scope.go:117] "RemoveContainer" containerID="599766d523b289496126b3e65bad734ac36d67a7eee51faf425f5873eea60397" Jan 20 17:16:37 crc kubenswrapper[4558]: I0120 17:16:37.742618 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:16:37 crc kubenswrapper[4558]: I0120 17:16:37.743270 4558 scope.go:117] "RemoveContainer" containerID="fa635aef0a0203fdb13831ee491f982a0ac51b1bc7f7d5c51955065fd5631a37" Jan 20 17:16:37 crc kubenswrapper[4558]: I0120 17:16:37.749610 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:16:38 crc kubenswrapper[4558]: I0120 17:16:38.578840 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5" path="/var/lib/kubelet/pods/16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5/volumes" Jan 20 17:16:38 crc kubenswrapper[4558]: I0120 17:16:38.584328 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3072fee5-c73d-407d-80c7-c376628ec545" path="/var/lib/kubelet/pods/3072fee5-c73d-407d-80c7-c376628ec545/volumes" Jan 20 17:16:41 crc kubenswrapper[4558]: I0120 17:16:41.609714 4558 pod_container_manager_linux.go:210] "Failed to delete cgroup paths" cgroupName=["kubepods","besteffort","pod249a3706-31a6-4fb2-9a3f-94700d9ae30e"] err="unable to destroy cgroup paths for cgroup [kubepods besteffort pod249a3706-31a6-4fb2-9a3f-94700d9ae30e] : Timed out while waiting for systemd to remove kubepods-besteffort-pod249a3706_31a6_4fb2_9a3f_94700d9ae30e.slice" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.910371 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["crc-storage/crc-storage-crc-x77zr"] Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.915539 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["crc-storage/crc-storage-crc-x77zr"] Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.990890 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["crc-storage/crc-storage-crc-q77fj"] Jan 20 17:16:45 crc kubenswrapper[4558]: E0120 17:16:45.991175 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6e03bea8-761e-4d6d-bcaf-f4397b3ec310" containerName="init" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.991192 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="6e03bea8-761e-4d6d-bcaf-f4397b3ec310" containerName="init" Jan 20 17:16:45 crc kubenswrapper[4558]: E0120 17:16:45.991206 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="462f58d4-b41a-45b8-aa8a-003bfe30e625" containerName="barbican-worker-log" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.991212 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="462f58d4-b41a-45b8-aa8a-003bfe30e625" containerName="barbican-worker-log" Jan 20 17:16:45 crc kubenswrapper[4558]: E0120 17:16:45.991222 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e87a4844-8a90-4965-878f-d95aa09c47bb" containerName="thanos-sidecar" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.991227 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="e87a4844-8a90-4965-878f-d95aa09c47bb" containerName="thanos-sidecar" Jan 20 17:16:45 crc kubenswrapper[4558]: E0120 17:16:45.991233 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5efe2c5d-bf53-462a-beb1-36cb940fc6a0" containerName="keystone-api" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.991238 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="5efe2c5d-bf53-462a-beb1-36cb940fc6a0" containerName="keystone-api" Jan 20 17:16:45 crc kubenswrapper[4558]: E0120 17:16:45.991247 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8dfaaf1f-540b-4c8b-b93c-1e3556c37801" containerName="barbican-keystone-listener" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.991255 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="8dfaaf1f-540b-4c8b-b93c-1e3556c37801" containerName="barbican-keystone-listener" Jan 20 17:16:45 crc kubenswrapper[4558]: E0120 17:16:45.991261 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5" containerName="object-expirer" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.991266 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5" containerName="object-expirer" Jan 20 17:16:45 crc kubenswrapper[4558]: E0120 17:16:45.991273 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e87a4844-8a90-4965-878f-d95aa09c47bb" containerName="prometheus" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.991278 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="e87a4844-8a90-4965-878f-d95aa09c47bb" containerName="prometheus" Jan 20 17:16:45 crc kubenswrapper[4558]: E0120 17:16:45.991290 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6e03bea8-761e-4d6d-bcaf-f4397b3ec310" containerName="dnsmasq-dns" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.991296 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="6e03bea8-761e-4d6d-bcaf-f4397b3ec310" containerName="dnsmasq-dns" Jan 20 17:16:45 crc kubenswrapper[4558]: E0120 17:16:45.991307 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9116eb6c-0630-4022-9c43-8c8793e08922" containerName="barbican-api-log" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.991313 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="9116eb6c-0630-4022-9c43-8c8793e08922" containerName="barbican-api-log" Jan 20 17:16:45 crc kubenswrapper[4558]: E0120 17:16:45.991322 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="158b4a4e-78aa-4813-a00e-abddfb7214ef" containerName="nova-api-log" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.991327 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="158b4a4e-78aa-4813-a00e-abddfb7214ef" containerName="nova-api-log" Jan 20 17:16:45 crc kubenswrapper[4558]: E0120 17:16:45.991336 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0a49fae8-fd33-4a50-b967-e8e8cc48731a" containerName="watcher-api" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.991341 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="0a49fae8-fd33-4a50-b967-e8e8cc48731a" containerName="watcher-api" Jan 20 17:16:45 crc kubenswrapper[4558]: E0120 17:16:45.991347 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a9ada454-f6fa-4ea4-b386-3ddbcd37f233" containerName="glance-httpd" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.991352 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="a9ada454-f6fa-4ea4-b386-3ddbcd37f233" containerName="glance-httpd" Jan 20 17:16:45 crc kubenswrapper[4558]: E0120 17:16:45.991360 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5" containerName="object-replicator" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.991365 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5" containerName="object-replicator" Jan 20 17:16:45 crc kubenswrapper[4558]: E0120 17:16:45.991370 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fe030148-30ef-4bb7-bf94-8835a0707df1" containerName="proxy-httpd" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.991374 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="fe030148-30ef-4bb7-bf94-8835a0707df1" containerName="proxy-httpd" Jan 20 17:16:45 crc kubenswrapper[4558]: E0120 17:16:45.991381 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fd2edfa4-790f-49d7-9e32-29571e490aaf" containerName="rabbitmq" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.991386 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="fd2edfa4-790f-49d7-9e32-29571e490aaf" containerName="rabbitmq" Jan 20 17:16:45 crc kubenswrapper[4558]: E0120 17:16:45.991393 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="249a3706-31a6-4fb2-9a3f-94700d9ae30e" containerName="placement-api" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.991398 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="249a3706-31a6-4fb2-9a3f-94700d9ae30e" containerName="placement-api" Jan 20 17:16:45 crc kubenswrapper[4558]: E0120 17:16:45.991404 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a9ada454-f6fa-4ea4-b386-3ddbcd37f233" containerName="glance-log" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.991410 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="a9ada454-f6fa-4ea4-b386-3ddbcd37f233" containerName="glance-log" Jan 20 17:16:45 crc kubenswrapper[4558]: E0120 17:16:45.991416 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="691cf1c1-d617-489b-b0cd-1364328d6c60" containerName="mariadb-account-create-update" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.991423 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="691cf1c1-d617-489b-b0cd-1364328d6c60" containerName="mariadb-account-create-update" Jan 20 17:16:45 crc kubenswrapper[4558]: E0120 17:16:45.991429 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8dfaaf1f-540b-4c8b-b93c-1e3556c37801" containerName="barbican-keystone-listener-log" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.991434 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="8dfaaf1f-540b-4c8b-b93c-1e3556c37801" containerName="barbican-keystone-listener-log" Jan 20 17:16:45 crc kubenswrapper[4558]: E0120 17:16:45.991443 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eed05f95-b1a3-44b9-bb2c-e7b01d91d8cc" containerName="galera" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.991448 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="eed05f95-b1a3-44b9-bb2c-e7b01d91d8cc" containerName="galera" Jan 20 17:16:45 crc kubenswrapper[4558]: E0120 17:16:45.991452 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="544c8619-276c-4358-ac79-58df9a462173" containerName="sg-core" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.991457 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="544c8619-276c-4358-ac79-58df9a462173" containerName="sg-core" Jan 20 17:16:45 crc kubenswrapper[4558]: E0120 17:16:45.991471 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e87a4844-8a90-4965-878f-d95aa09c47bb" containerName="init-config-reloader" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.991476 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="e87a4844-8a90-4965-878f-d95aa09c47bb" containerName="init-config-reloader" Jan 20 17:16:45 crc kubenswrapper[4558]: E0120 17:16:45.991484 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2510d24d-b593-4196-92bc-ddde09cc7c15" containerName="watcher-decision-engine" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.991489 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="2510d24d-b593-4196-92bc-ddde09cc7c15" containerName="watcher-decision-engine" Jan 20 17:16:45 crc kubenswrapper[4558]: E0120 17:16:45.991495 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a" containerName="cinder-api" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.991500 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a" containerName="cinder-api" Jan 20 17:16:45 crc kubenswrapper[4558]: E0120 17:16:45.991505 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fe030148-30ef-4bb7-bf94-8835a0707df1" containerName="proxy-server" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.991511 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="fe030148-30ef-4bb7-bf94-8835a0707df1" containerName="proxy-server" Jan 20 17:16:45 crc kubenswrapper[4558]: E0120 17:16:45.991518 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5" containerName="swift-recon-cron" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.991523 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5" containerName="swift-recon-cron" Jan 20 17:16:45 crc kubenswrapper[4558]: E0120 17:16:45.991531 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e87a4844-8a90-4965-878f-d95aa09c47bb" containerName="config-reloader" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.991536 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="e87a4844-8a90-4965-878f-d95aa09c47bb" containerName="config-reloader" Jan 20 17:16:45 crc kubenswrapper[4558]: E0120 17:16:45.991544 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="13a0a262-6f56-4cf9-9b0d-85110bcff1e7" containerName="init-config-reloader" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.991551 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="13a0a262-6f56-4cf9-9b0d-85110bcff1e7" containerName="init-config-reloader" Jan 20 17:16:45 crc kubenswrapper[4558]: E0120 17:16:45.991561 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e8370668-8857-4be3-bcaa-bae8d6cdd158" containerName="openstack-network-exporter" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.991566 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="e8370668-8857-4be3-bcaa-bae8d6cdd158" containerName="openstack-network-exporter" Jan 20 17:16:45 crc kubenswrapper[4558]: E0120 17:16:45.991572 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5" containerName="object-auditor" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.991577 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5" containerName="object-auditor" Jan 20 17:16:45 crc kubenswrapper[4558]: E0120 17:16:45.991584 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3072fee5-c73d-407d-80c7-c376628ec545" containerName="probe" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.991589 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="3072fee5-c73d-407d-80c7-c376628ec545" containerName="probe" Jan 20 17:16:45 crc kubenswrapper[4558]: E0120 17:16:45.991595 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5588d9e2-2f89-4abb-94de-76b5791582a5" containerName="glance-httpd" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.991602 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="5588d9e2-2f89-4abb-94de-76b5791582a5" containerName="glance-httpd" Jan 20 17:16:45 crc kubenswrapper[4558]: E0120 17:16:45.991608 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0a49fae8-fd33-4a50-b967-e8e8cc48731a" containerName="watcher-api-log" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.991612 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="0a49fae8-fd33-4a50-b967-e8e8cc48731a" containerName="watcher-api-log" Jan 20 17:16:45 crc kubenswrapper[4558]: E0120 17:16:45.991618 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5" containerName="account-auditor" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.991623 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5" containerName="account-auditor" Jan 20 17:16:45 crc kubenswrapper[4558]: E0120 17:16:45.991631 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5" containerName="account-reaper" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.991636 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5" containerName="account-reaper" Jan 20 17:16:45 crc kubenswrapper[4558]: E0120 17:16:45.991645 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9116eb6c-0630-4022-9c43-8c8793e08922" containerName="barbican-api" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.991651 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="9116eb6c-0630-4022-9c43-8c8793e08922" containerName="barbican-api" Jan 20 17:16:45 crc kubenswrapper[4558]: E0120 17:16:45.991659 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="32c4dbf1-2dfe-44a7-be21-1415017ca20c" containerName="neutron-httpd" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.991664 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="32c4dbf1-2dfe-44a7-be21-1415017ca20c" containerName="neutron-httpd" Jan 20 17:16:45 crc kubenswrapper[4558]: E0120 17:16:45.991672 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3072fee5-c73d-407d-80c7-c376628ec545" containerName="cinder-scheduler" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.991676 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="3072fee5-c73d-407d-80c7-c376628ec545" containerName="cinder-scheduler" Jan 20 17:16:45 crc kubenswrapper[4558]: E0120 17:16:45.991683 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c4cb69c1-3d51-4b13-a025-b37c986ffede" containerName="nova-scheduler-scheduler" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.991689 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="c4cb69c1-3d51-4b13-a025-b37c986ffede" containerName="nova-scheduler-scheduler" Jan 20 17:16:45 crc kubenswrapper[4558]: E0120 17:16:45.991698 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="462f58d4-b41a-45b8-aa8a-003bfe30e625" containerName="barbican-worker" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.991704 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="462f58d4-b41a-45b8-aa8a-003bfe30e625" containerName="barbican-worker" Jan 20 17:16:45 crc kubenswrapper[4558]: E0120 17:16:45.991711 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fd2edfa4-790f-49d7-9e32-29571e490aaf" containerName="setup-container" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.991717 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="fd2edfa4-790f-49d7-9e32-29571e490aaf" containerName="setup-container" Jan 20 17:16:45 crc kubenswrapper[4558]: E0120 17:16:45.991724 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5588d9e2-2f89-4abb-94de-76b5791582a5" containerName="glance-log" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.991729 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="5588d9e2-2f89-4abb-94de-76b5791582a5" containerName="glance-log" Jan 20 17:16:45 crc kubenswrapper[4558]: E0120 17:16:45.991738 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5" containerName="object-server" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.991743 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5" containerName="object-server" Jan 20 17:16:45 crc kubenswrapper[4558]: E0120 17:16:45.991749 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5" containerName="container-auditor" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.991753 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5" containerName="container-auditor" Jan 20 17:16:45 crc kubenswrapper[4558]: E0120 17:16:45.991761 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5" containerName="container-updater" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.991765 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5" containerName="container-updater" Jan 20 17:16:45 crc kubenswrapper[4558]: E0120 17:16:45.991772 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5" containerName="account-server" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.991777 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5" containerName="account-server" Jan 20 17:16:45 crc kubenswrapper[4558]: E0120 17:16:45.991783 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5" containerName="container-replicator" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.991789 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5" containerName="container-replicator" Jan 20 17:16:45 crc kubenswrapper[4558]: E0120 17:16:45.991795 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="249a3706-31a6-4fb2-9a3f-94700d9ae30e" containerName="placement-log" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.991800 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="249a3706-31a6-4fb2-9a3f-94700d9ae30e" containerName="placement-log" Jan 20 17:16:45 crc kubenswrapper[4558]: E0120 17:16:45.991809 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eed05f95-b1a3-44b9-bb2c-e7b01d91d8cc" containerName="mysql-bootstrap" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.991814 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="eed05f95-b1a3-44b9-bb2c-e7b01d91d8cc" containerName="mysql-bootstrap" Jan 20 17:16:45 crc kubenswrapper[4558]: E0120 17:16:45.991820 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="30b9f169-eef8-4a30-b543-83dda735da70" containerName="watcher-applier" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.991825 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="30b9f169-eef8-4a30-b543-83dda735da70" containerName="watcher-applier" Jan 20 17:16:45 crc kubenswrapper[4558]: E0120 17:16:45.991831 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5" containerName="object-updater" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.991836 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5" containerName="object-updater" Jan 20 17:16:45 crc kubenswrapper[4558]: E0120 17:16:45.991841 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="70dda975-7e0b-40a4-a92a-675be53560b7" containerName="nova-metadata-log" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.991846 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="70dda975-7e0b-40a4-a92a-675be53560b7" containerName="nova-metadata-log" Jan 20 17:16:45 crc kubenswrapper[4558]: E0120 17:16:45.991853 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="32c4dbf1-2dfe-44a7-be21-1415017ca20c" containerName="neutron-api" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.991858 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="32c4dbf1-2dfe-44a7-be21-1415017ca20c" containerName="neutron-api" Jan 20 17:16:45 crc kubenswrapper[4558]: E0120 17:16:45.991865 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5" containerName="rsync" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.991870 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5" containerName="rsync" Jan 20 17:16:45 crc kubenswrapper[4558]: E0120 17:16:45.991879 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6947f138-6d2f-40e2-a236-736d26d3a1e6" containerName="kube-state-metrics" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.991884 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="6947f138-6d2f-40e2-a236-736d26d3a1e6" containerName="kube-state-metrics" Jan 20 17:16:45 crc kubenswrapper[4558]: E0120 17:16:45.991894 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="217a2ebc-8331-4a6f-9113-2c813563a2b8" containerName="nova-cell1-conductor-conductor" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.991899 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="217a2ebc-8331-4a6f-9113-2c813563a2b8" containerName="nova-cell1-conductor-conductor" Jan 20 17:16:45 crc kubenswrapper[4558]: E0120 17:16:45.991906 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="70dda975-7e0b-40a4-a92a-675be53560b7" containerName="nova-metadata-metadata" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.991911 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="70dda975-7e0b-40a4-a92a-675be53560b7" containerName="nova-metadata-metadata" Jan 20 17:16:45 crc kubenswrapper[4558]: E0120 17:16:45.991917 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="544c8619-276c-4358-ac79-58df9a462173" containerName="ceilometer-notification-agent" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.991921 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="544c8619-276c-4358-ac79-58df9a462173" containerName="ceilometer-notification-agent" Jan 20 17:16:45 crc kubenswrapper[4558]: E0120 17:16:45.991928 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="544c8619-276c-4358-ac79-58df9a462173" containerName="proxy-httpd" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.991932 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="544c8619-276c-4358-ac79-58df9a462173" containerName="proxy-httpd" Jan 20 17:16:45 crc kubenswrapper[4558]: E0120 17:16:45.991939 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="544c8619-276c-4358-ac79-58df9a462173" containerName="ceilometer-central-agent" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.991944 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="544c8619-276c-4358-ac79-58df9a462173" containerName="ceilometer-central-agent" Jan 20 17:16:45 crc kubenswrapper[4558]: E0120 17:16:45.991950 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc9b3fed-9730-4199-b074-3f01cdc1b7ce" containerName="memcached" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.991955 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc9b3fed-9730-4199-b074-3f01cdc1b7ce" containerName="memcached" Jan 20 17:16:45 crc kubenswrapper[4558]: E0120 17:16:45.991960 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a" containerName="cinder-api-log" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.991965 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a" containerName="cinder-api-log" Jan 20 17:16:45 crc kubenswrapper[4558]: E0120 17:16:45.991973 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e8370668-8857-4be3-bcaa-bae8d6cdd158" containerName="ovn-northd" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.991977 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="e8370668-8857-4be3-bcaa-bae8d6cdd158" containerName="ovn-northd" Jan 20 17:16:45 crc kubenswrapper[4558]: E0120 17:16:45.991985 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5" containerName="container-server" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.991989 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5" containerName="container-server" Jan 20 17:16:45 crc kubenswrapper[4558]: E0120 17:16:45.991995 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5" containerName="account-replicator" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.992000 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5" containerName="account-replicator" Jan 20 17:16:45 crc kubenswrapper[4558]: E0120 17:16:45.992007 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="13a0a262-6f56-4cf9-9b0d-85110bcff1e7" containerName="alertmanager" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.992012 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="13a0a262-6f56-4cf9-9b0d-85110bcff1e7" containerName="alertmanager" Jan 20 17:16:45 crc kubenswrapper[4558]: E0120 17:16:45.992020 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="13a0a262-6f56-4cf9-9b0d-85110bcff1e7" containerName="config-reloader" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.992024 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="13a0a262-6f56-4cf9-9b0d-85110bcff1e7" containerName="config-reloader" Jan 20 17:16:45 crc kubenswrapper[4558]: E0120 17:16:45.992030 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad371747-3ff0-49b0-b2a6-33d7bb7cd16b" containerName="nova-cell1-novncproxy-novncproxy" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.992034 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad371747-3ff0-49b0-b2a6-33d7bb7cd16b" containerName="nova-cell1-novncproxy-novncproxy" Jan 20 17:16:45 crc kubenswrapper[4558]: E0120 17:16:45.992044 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="158b4a4e-78aa-4813-a00e-abddfb7214ef" containerName="nova-api-api" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.992049 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="158b4a4e-78aa-4813-a00e-abddfb7214ef" containerName="nova-api-api" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.992154 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="5efe2c5d-bf53-462a-beb1-36cb940fc6a0" containerName="keystone-api" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.992179 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="158b4a4e-78aa-4813-a00e-abddfb7214ef" containerName="nova-api-log" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.992187 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="6947f138-6d2f-40e2-a236-736d26d3a1e6" containerName="kube-state-metrics" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.992192 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="249a3706-31a6-4fb2-9a3f-94700d9ae30e" containerName="placement-api" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.992198 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="30b9f169-eef8-4a30-b543-83dda735da70" containerName="watcher-applier" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.992205 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="fe030148-30ef-4bb7-bf94-8835a0707df1" containerName="proxy-server" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.992214 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="8dfaaf1f-540b-4c8b-b93c-1e3556c37801" containerName="barbican-keystone-listener-log" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.992221 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="70dda975-7e0b-40a4-a92a-675be53560b7" containerName="nova-metadata-metadata" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.992226 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5" containerName="container-updater" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.992234 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5" containerName="account-replicator" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.992242 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="32c4dbf1-2dfe-44a7-be21-1415017ca20c" containerName="neutron-api" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.992246 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="a9ada454-f6fa-4ea4-b386-3ddbcd37f233" containerName="glance-httpd" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.992254 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="462f58d4-b41a-45b8-aa8a-003bfe30e625" containerName="barbican-worker" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.992262 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="0a49fae8-fd33-4a50-b967-e8e8cc48731a" containerName="watcher-api" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.992272 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="462f58d4-b41a-45b8-aa8a-003bfe30e625" containerName="barbican-worker-log" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.992281 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="544c8619-276c-4358-ac79-58df9a462173" containerName="sg-core" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.992287 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="2510d24d-b593-4196-92bc-ddde09cc7c15" containerName="watcher-decision-engine" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.992294 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a" containerName="cinder-api" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.992301 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="fd2edfa4-790f-49d7-9e32-29571e490aaf" containerName="rabbitmq" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.992307 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="544c8619-276c-4358-ac79-58df9a462173" containerName="ceilometer-central-agent" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.992314 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5" containerName="object-expirer" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.992320 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="544c8619-276c-4358-ac79-58df9a462173" containerName="ceilometer-notification-agent" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.992330 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5" containerName="container-auditor" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.992339 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="eed05f95-b1a3-44b9-bb2c-e7b01d91d8cc" containerName="galera" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.992346 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="70dda975-7e0b-40a4-a92a-675be53560b7" containerName="nova-metadata-log" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.992352 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="e87a4844-8a90-4965-878f-d95aa09c47bb" containerName="config-reloader" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.992358 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="691cf1c1-d617-489b-b0cd-1364328d6c60" containerName="mariadb-account-create-update" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.992366 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5" containerName="account-server" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.992372 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="544c8619-276c-4358-ac79-58df9a462173" containerName="proxy-httpd" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.992377 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="e87a4844-8a90-4965-878f-d95aa09c47bb" containerName="prometheus" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.992383 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="13a0a262-6f56-4cf9-9b0d-85110bcff1e7" containerName="config-reloader" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.992390 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5" containerName="account-reaper" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.992395 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="13a0a262-6f56-4cf9-9b0d-85110bcff1e7" containerName="alertmanager" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.992400 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="3072fee5-c73d-407d-80c7-c376628ec545" containerName="cinder-scheduler" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.992406 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5" containerName="account-auditor" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.992411 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="6e03bea8-761e-4d6d-bcaf-f4397b3ec310" containerName="dnsmasq-dns" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.992417 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="5588d9e2-2f89-4abb-94de-76b5791582a5" containerName="glance-log" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.992423 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="9116eb6c-0630-4022-9c43-8c8793e08922" containerName="barbican-api" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.992430 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="3072fee5-c73d-407d-80c7-c376628ec545" containerName="probe" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.992441 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="fc9b3fed-9730-4199-b074-3f01cdc1b7ce" containerName="memcached" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.992447 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5" containerName="object-replicator" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.992453 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="691cf1c1-d617-489b-b0cd-1364328d6c60" containerName="mariadb-account-create-update" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.992458 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5" containerName="rsync" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.992470 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="e8370668-8857-4be3-bcaa-bae8d6cdd158" containerName="openstack-network-exporter" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.992477 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="32c4dbf1-2dfe-44a7-be21-1415017ca20c" containerName="neutron-httpd" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.992484 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="fe030148-30ef-4bb7-bf94-8835a0707df1" containerName="proxy-httpd" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.992491 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5" containerName="object-server" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.992498 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="f400a6a0-f3fc-4c0f-88cf-b6fbc6a3da4a" containerName="cinder-api-log" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.992504 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="5588d9e2-2f89-4abb-94de-76b5791582a5" containerName="glance-httpd" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.992510 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="217a2ebc-8331-4a6f-9113-2c813563a2b8" containerName="nova-cell1-conductor-conductor" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.992516 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5" containerName="container-replicator" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.992522 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5" containerName="object-updater" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.992529 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5" containerName="swift-recon-cron" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.992538 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="9116eb6c-0630-4022-9c43-8c8793e08922" containerName="barbican-api-log" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.992544 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5" containerName="object-auditor" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.992551 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="c4cb69c1-3d51-4b13-a025-b37c986ffede" containerName="nova-scheduler-scheduler" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.992556 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="158b4a4e-78aa-4813-a00e-abddfb7214ef" containerName="nova-api-api" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.992562 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="e87a4844-8a90-4965-878f-d95aa09c47bb" containerName="thanos-sidecar" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.992569 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="ad371747-3ff0-49b0-b2a6-33d7bb7cd16b" containerName="nova-cell1-novncproxy-novncproxy" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.992576 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="249a3706-31a6-4fb2-9a3f-94700d9ae30e" containerName="placement-log" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.992582 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="0a49fae8-fd33-4a50-b967-e8e8cc48731a" containerName="watcher-api-log" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.992598 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="16ed60a7-ca4e-4460-9ffb-4d6abbf8ced5" containerName="container-server" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.992604 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="e8370668-8857-4be3-bcaa-bae8d6cdd158" containerName="ovn-northd" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.992611 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="8dfaaf1f-540b-4c8b-b93c-1e3556c37801" containerName="barbican-keystone-listener" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.992617 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="a9ada454-f6fa-4ea4-b386-3ddbcd37f233" containerName="glance-log" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.993067 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-q77fj" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.996956 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"openshift-service-ca.crt" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.997210 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"crc-storage" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.997309 4558 reflector.go:368] Caches populated for *v1.Secret from object-"crc-storage"/"crc-storage-dockercfg-k9ht8" Jan 20 17:16:45 crc kubenswrapper[4558]: I0120 17:16:45.997370 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"kube-root-ca.crt" Jan 20 17:16:46 crc kubenswrapper[4558]: I0120 17:16:46.009057 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-q77fj"] Jan 20 17:16:46 crc kubenswrapper[4558]: I0120 17:16:46.109604 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fxk2g\" (UniqueName: \"kubernetes.io/projected/b3eff69c-b716-44e2-b814-4fdbfc40d890-kube-api-access-fxk2g\") pod \"crc-storage-crc-q77fj\" (UID: \"b3eff69c-b716-44e2-b814-4fdbfc40d890\") " pod="crc-storage/crc-storage-crc-q77fj" Jan 20 17:16:46 crc kubenswrapper[4558]: I0120 17:16:46.109670 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/b3eff69c-b716-44e2-b814-4fdbfc40d890-crc-storage\") pod \"crc-storage-crc-q77fj\" (UID: \"b3eff69c-b716-44e2-b814-4fdbfc40d890\") " pod="crc-storage/crc-storage-crc-q77fj" Jan 20 17:16:46 crc kubenswrapper[4558]: I0120 17:16:46.109988 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/b3eff69c-b716-44e2-b814-4fdbfc40d890-node-mnt\") pod \"crc-storage-crc-q77fj\" (UID: \"b3eff69c-b716-44e2-b814-4fdbfc40d890\") " pod="crc-storage/crc-storage-crc-q77fj" Jan 20 17:16:46 crc kubenswrapper[4558]: I0120 17:16:46.212358 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/b3eff69c-b716-44e2-b814-4fdbfc40d890-node-mnt\") pod \"crc-storage-crc-q77fj\" (UID: \"b3eff69c-b716-44e2-b814-4fdbfc40d890\") " pod="crc-storage/crc-storage-crc-q77fj" Jan 20 17:16:46 crc kubenswrapper[4558]: I0120 17:16:46.212473 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fxk2g\" (UniqueName: \"kubernetes.io/projected/b3eff69c-b716-44e2-b814-4fdbfc40d890-kube-api-access-fxk2g\") pod \"crc-storage-crc-q77fj\" (UID: \"b3eff69c-b716-44e2-b814-4fdbfc40d890\") " pod="crc-storage/crc-storage-crc-q77fj" Jan 20 17:16:46 crc kubenswrapper[4558]: I0120 17:16:46.212521 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/b3eff69c-b716-44e2-b814-4fdbfc40d890-crc-storage\") pod \"crc-storage-crc-q77fj\" (UID: \"b3eff69c-b716-44e2-b814-4fdbfc40d890\") " pod="crc-storage/crc-storage-crc-q77fj" Jan 20 17:16:46 crc kubenswrapper[4558]: I0120 17:16:46.212742 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/b3eff69c-b716-44e2-b814-4fdbfc40d890-node-mnt\") pod \"crc-storage-crc-q77fj\" (UID: \"b3eff69c-b716-44e2-b814-4fdbfc40d890\") " pod="crc-storage/crc-storage-crc-q77fj" Jan 20 17:16:46 crc kubenswrapper[4558]: I0120 17:16:46.213511 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/b3eff69c-b716-44e2-b814-4fdbfc40d890-crc-storage\") pod \"crc-storage-crc-q77fj\" (UID: \"b3eff69c-b716-44e2-b814-4fdbfc40d890\") " pod="crc-storage/crc-storage-crc-q77fj" Jan 20 17:16:46 crc kubenswrapper[4558]: I0120 17:16:46.231210 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fxk2g\" (UniqueName: \"kubernetes.io/projected/b3eff69c-b716-44e2-b814-4fdbfc40d890-kube-api-access-fxk2g\") pod \"crc-storage-crc-q77fj\" (UID: \"b3eff69c-b716-44e2-b814-4fdbfc40d890\") " pod="crc-storage/crc-storage-crc-q77fj" Jan 20 17:16:46 crc kubenswrapper[4558]: I0120 17:16:46.315532 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-q77fj" Jan 20 17:16:46 crc kubenswrapper[4558]: I0120 17:16:46.585936 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8a298713-e4df-41b7-a492-31833ff89d7e" path="/var/lib/kubelet/pods/8a298713-e4df-41b7-a492-31833ff89d7e/volumes" Jan 20 17:16:46 crc kubenswrapper[4558]: I0120 17:16:46.707992 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-q77fj"] Jan 20 17:16:46 crc kubenswrapper[4558]: W0120 17:16:46.716343 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb3eff69c_b716_44e2_b814_4fdbfc40d890.slice/crio-a718dfd1eeee65cb32aef453c106f89c823d7318789daccf7316bf37128b840b WatchSource:0}: Error finding container a718dfd1eeee65cb32aef453c106f89c823d7318789daccf7316bf37128b840b: Status 404 returned error can't find the container with id a718dfd1eeee65cb32aef453c106f89c823d7318789daccf7316bf37128b840b Jan 20 17:16:46 crc kubenswrapper[4558]: I0120 17:16:46.815638 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-q77fj" event={"ID":"b3eff69c-b716-44e2-b814-4fdbfc40d890","Type":"ContainerStarted","Data":"a718dfd1eeee65cb32aef453c106f89c823d7318789daccf7316bf37128b840b"} Jan 20 17:16:47 crc kubenswrapper[4558]: I0120 17:16:47.826280 4558 generic.go:334] "Generic (PLEG): container finished" podID="b3eff69c-b716-44e2-b814-4fdbfc40d890" containerID="a11fa90defce42c76ab0b049d20ff0e9aec4fadd756a856e04db760247f336fd" exitCode=0 Jan 20 17:16:47 crc kubenswrapper[4558]: I0120 17:16:47.826382 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-q77fj" event={"ID":"b3eff69c-b716-44e2-b814-4fdbfc40d890","Type":"ContainerDied","Data":"a11fa90defce42c76ab0b049d20ff0e9aec4fadd756a856e04db760247f336fd"} Jan 20 17:16:49 crc kubenswrapper[4558]: I0120 17:16:49.078833 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-q77fj" Jan 20 17:16:49 crc kubenswrapper[4558]: I0120 17:16:49.168889 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/b3eff69c-b716-44e2-b814-4fdbfc40d890-node-mnt\") pod \"b3eff69c-b716-44e2-b814-4fdbfc40d890\" (UID: \"b3eff69c-b716-44e2-b814-4fdbfc40d890\") " Jan 20 17:16:49 crc kubenswrapper[4558]: I0120 17:16:49.168998 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b3eff69c-b716-44e2-b814-4fdbfc40d890-node-mnt" (OuterVolumeSpecName: "node-mnt") pod "b3eff69c-b716-44e2-b814-4fdbfc40d890" (UID: "b3eff69c-b716-44e2-b814-4fdbfc40d890"). InnerVolumeSpecName "node-mnt". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 17:16:49 crc kubenswrapper[4558]: I0120 17:16:49.169058 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fxk2g\" (UniqueName: \"kubernetes.io/projected/b3eff69c-b716-44e2-b814-4fdbfc40d890-kube-api-access-fxk2g\") pod \"b3eff69c-b716-44e2-b814-4fdbfc40d890\" (UID: \"b3eff69c-b716-44e2-b814-4fdbfc40d890\") " Jan 20 17:16:49 crc kubenswrapper[4558]: I0120 17:16:49.169126 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/b3eff69c-b716-44e2-b814-4fdbfc40d890-crc-storage\") pod \"b3eff69c-b716-44e2-b814-4fdbfc40d890\" (UID: \"b3eff69c-b716-44e2-b814-4fdbfc40d890\") " Jan 20 17:16:49 crc kubenswrapper[4558]: I0120 17:16:49.169648 4558 reconciler_common.go:293] "Volume detached for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/b3eff69c-b716-44e2-b814-4fdbfc40d890-node-mnt\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:49 crc kubenswrapper[4558]: I0120 17:16:49.184098 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b3eff69c-b716-44e2-b814-4fdbfc40d890-kube-api-access-fxk2g" (OuterVolumeSpecName: "kube-api-access-fxk2g") pod "b3eff69c-b716-44e2-b814-4fdbfc40d890" (UID: "b3eff69c-b716-44e2-b814-4fdbfc40d890"). InnerVolumeSpecName "kube-api-access-fxk2g". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:16:49 crc kubenswrapper[4558]: I0120 17:16:49.185217 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b3eff69c-b716-44e2-b814-4fdbfc40d890-crc-storage" (OuterVolumeSpecName: "crc-storage") pod "b3eff69c-b716-44e2-b814-4fdbfc40d890" (UID: "b3eff69c-b716-44e2-b814-4fdbfc40d890"). InnerVolumeSpecName "crc-storage". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:16:49 crc kubenswrapper[4558]: I0120 17:16:49.271693 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fxk2g\" (UniqueName: \"kubernetes.io/projected/b3eff69c-b716-44e2-b814-4fdbfc40d890-kube-api-access-fxk2g\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:49 crc kubenswrapper[4558]: I0120 17:16:49.271743 4558 reconciler_common.go:293] "Volume detached for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/b3eff69c-b716-44e2-b814-4fdbfc40d890-crc-storage\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:49 crc kubenswrapper[4558]: I0120 17:16:49.844969 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-q77fj" event={"ID":"b3eff69c-b716-44e2-b814-4fdbfc40d890","Type":"ContainerDied","Data":"a718dfd1eeee65cb32aef453c106f89c823d7318789daccf7316bf37128b840b"} Jan 20 17:16:49 crc kubenswrapper[4558]: I0120 17:16:49.845014 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a718dfd1eeee65cb32aef453c106f89c823d7318789daccf7316bf37128b840b" Jan 20 17:16:49 crc kubenswrapper[4558]: I0120 17:16:49.845184 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-q77fj" Jan 20 17:16:49 crc kubenswrapper[4558]: E0120 17:16:49.983812 4558 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb3eff69c_b716_44e2_b814_4fdbfc40d890.slice\": RecentStats: unable to find data in memory cache]" Jan 20 17:16:51 crc kubenswrapper[4558]: I0120 17:16:51.983796 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["crc-storage/crc-storage-crc-q77fj"] Jan 20 17:16:51 crc kubenswrapper[4558]: I0120 17:16:51.988594 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["crc-storage/crc-storage-crc-q77fj"] Jan 20 17:16:52 crc kubenswrapper[4558]: I0120 17:16:52.085208 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["crc-storage/crc-storage-crc-qdbtt"] Jan 20 17:16:52 crc kubenswrapper[4558]: E0120 17:16:52.085477 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="691cf1c1-d617-489b-b0cd-1364328d6c60" containerName="mariadb-account-create-update" Jan 20 17:16:52 crc kubenswrapper[4558]: I0120 17:16:52.085497 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="691cf1c1-d617-489b-b0cd-1364328d6c60" containerName="mariadb-account-create-update" Jan 20 17:16:52 crc kubenswrapper[4558]: E0120 17:16:52.085522 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3eff69c-b716-44e2-b814-4fdbfc40d890" containerName="storage" Jan 20 17:16:52 crc kubenswrapper[4558]: I0120 17:16:52.085528 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3eff69c-b716-44e2-b814-4fdbfc40d890" containerName="storage" Jan 20 17:16:52 crc kubenswrapper[4558]: I0120 17:16:52.085675 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="b3eff69c-b716-44e2-b814-4fdbfc40d890" containerName="storage" Jan 20 17:16:52 crc kubenswrapper[4558]: I0120 17:16:52.086102 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-qdbtt" Jan 20 17:16:52 crc kubenswrapper[4558]: I0120 17:16:52.087978 4558 reflector.go:368] Caches populated for *v1.Secret from object-"crc-storage"/"crc-storage-dockercfg-k9ht8" Jan 20 17:16:52 crc kubenswrapper[4558]: I0120 17:16:52.088806 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"crc-storage" Jan 20 17:16:52 crc kubenswrapper[4558]: I0120 17:16:52.089553 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"kube-root-ca.crt" Jan 20 17:16:52 crc kubenswrapper[4558]: I0120 17:16:52.092494 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"openshift-service-ca.crt" Jan 20 17:16:52 crc kubenswrapper[4558]: I0120 17:16:52.094135 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-qdbtt"] Jan 20 17:16:52 crc kubenswrapper[4558]: I0120 17:16:52.210590 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/bfb33818-4cdc-4840-ac61-d14fee47a57f-crc-storage\") pod \"crc-storage-crc-qdbtt\" (UID: \"bfb33818-4cdc-4840-ac61-d14fee47a57f\") " pod="crc-storage/crc-storage-crc-qdbtt" Jan 20 17:16:52 crc kubenswrapper[4558]: I0120 17:16:52.210677 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vncp4\" (UniqueName: \"kubernetes.io/projected/bfb33818-4cdc-4840-ac61-d14fee47a57f-kube-api-access-vncp4\") pod \"crc-storage-crc-qdbtt\" (UID: \"bfb33818-4cdc-4840-ac61-d14fee47a57f\") " pod="crc-storage/crc-storage-crc-qdbtt" Jan 20 17:16:52 crc kubenswrapper[4558]: I0120 17:16:52.210713 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/bfb33818-4cdc-4840-ac61-d14fee47a57f-node-mnt\") pod \"crc-storage-crc-qdbtt\" (UID: \"bfb33818-4cdc-4840-ac61-d14fee47a57f\") " pod="crc-storage/crc-storage-crc-qdbtt" Jan 20 17:16:52 crc kubenswrapper[4558]: I0120 17:16:52.311748 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vncp4\" (UniqueName: \"kubernetes.io/projected/bfb33818-4cdc-4840-ac61-d14fee47a57f-kube-api-access-vncp4\") pod \"crc-storage-crc-qdbtt\" (UID: \"bfb33818-4cdc-4840-ac61-d14fee47a57f\") " pod="crc-storage/crc-storage-crc-qdbtt" Jan 20 17:16:52 crc kubenswrapper[4558]: I0120 17:16:52.311802 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/bfb33818-4cdc-4840-ac61-d14fee47a57f-node-mnt\") pod \"crc-storage-crc-qdbtt\" (UID: \"bfb33818-4cdc-4840-ac61-d14fee47a57f\") " pod="crc-storage/crc-storage-crc-qdbtt" Jan 20 17:16:52 crc kubenswrapper[4558]: I0120 17:16:52.311913 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/bfb33818-4cdc-4840-ac61-d14fee47a57f-crc-storage\") pod \"crc-storage-crc-qdbtt\" (UID: \"bfb33818-4cdc-4840-ac61-d14fee47a57f\") " pod="crc-storage/crc-storage-crc-qdbtt" Jan 20 17:16:52 crc kubenswrapper[4558]: I0120 17:16:52.312146 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/bfb33818-4cdc-4840-ac61-d14fee47a57f-node-mnt\") pod \"crc-storage-crc-qdbtt\" (UID: \"bfb33818-4cdc-4840-ac61-d14fee47a57f\") " pod="crc-storage/crc-storage-crc-qdbtt" Jan 20 17:16:52 crc kubenswrapper[4558]: I0120 17:16:52.312558 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/bfb33818-4cdc-4840-ac61-d14fee47a57f-crc-storage\") pod \"crc-storage-crc-qdbtt\" (UID: \"bfb33818-4cdc-4840-ac61-d14fee47a57f\") " pod="crc-storage/crc-storage-crc-qdbtt" Jan 20 17:16:52 crc kubenswrapper[4558]: I0120 17:16:52.329184 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vncp4\" (UniqueName: \"kubernetes.io/projected/bfb33818-4cdc-4840-ac61-d14fee47a57f-kube-api-access-vncp4\") pod \"crc-storage-crc-qdbtt\" (UID: \"bfb33818-4cdc-4840-ac61-d14fee47a57f\") " pod="crc-storage/crc-storage-crc-qdbtt" Jan 20 17:16:52 crc kubenswrapper[4558]: I0120 17:16:52.400446 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-qdbtt" Jan 20 17:16:52 crc kubenswrapper[4558]: I0120 17:16:52.576832 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b3eff69c-b716-44e2-b814-4fdbfc40d890" path="/var/lib/kubelet/pods/b3eff69c-b716-44e2-b814-4fdbfc40d890/volumes" Jan 20 17:16:52 crc kubenswrapper[4558]: I0120 17:16:52.812156 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-qdbtt"] Jan 20 17:16:52 crc kubenswrapper[4558]: I0120 17:16:52.868479 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-qdbtt" event={"ID":"bfb33818-4cdc-4840-ac61-d14fee47a57f","Type":"ContainerStarted","Data":"4693e196f3dc7ff55acf1adcfd3e3b6019f4c2517e4b878c014045737f0e230c"} Jan 20 17:16:53 crc kubenswrapper[4558]: I0120 17:16:53.880440 4558 generic.go:334] "Generic (PLEG): container finished" podID="bfb33818-4cdc-4840-ac61-d14fee47a57f" containerID="644ede42e2610088cf730b8329c0fbe61fa14dedf213de3beb1f25c2ad510023" exitCode=0 Jan 20 17:16:53 crc kubenswrapper[4558]: I0120 17:16:53.880513 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-qdbtt" event={"ID":"bfb33818-4cdc-4840-ac61-d14fee47a57f","Type":"ContainerDied","Data":"644ede42e2610088cf730b8329c0fbe61fa14dedf213de3beb1f25c2ad510023"} Jan 20 17:16:55 crc kubenswrapper[4558]: I0120 17:16:55.142937 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-qdbtt" Jan 20 17:16:55 crc kubenswrapper[4558]: I0120 17:16:55.259822 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/bfb33818-4cdc-4840-ac61-d14fee47a57f-crc-storage\") pod \"bfb33818-4cdc-4840-ac61-d14fee47a57f\" (UID: \"bfb33818-4cdc-4840-ac61-d14fee47a57f\") " Jan 20 17:16:55 crc kubenswrapper[4558]: I0120 17:16:55.260035 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vncp4\" (UniqueName: \"kubernetes.io/projected/bfb33818-4cdc-4840-ac61-d14fee47a57f-kube-api-access-vncp4\") pod \"bfb33818-4cdc-4840-ac61-d14fee47a57f\" (UID: \"bfb33818-4cdc-4840-ac61-d14fee47a57f\") " Jan 20 17:16:55 crc kubenswrapper[4558]: I0120 17:16:55.260272 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/bfb33818-4cdc-4840-ac61-d14fee47a57f-node-mnt\") pod \"bfb33818-4cdc-4840-ac61-d14fee47a57f\" (UID: \"bfb33818-4cdc-4840-ac61-d14fee47a57f\") " Jan 20 17:16:55 crc kubenswrapper[4558]: I0120 17:16:55.260423 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/bfb33818-4cdc-4840-ac61-d14fee47a57f-node-mnt" (OuterVolumeSpecName: "node-mnt") pod "bfb33818-4cdc-4840-ac61-d14fee47a57f" (UID: "bfb33818-4cdc-4840-ac61-d14fee47a57f"). InnerVolumeSpecName "node-mnt". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 17:16:55 crc kubenswrapper[4558]: I0120 17:16:55.260923 4558 reconciler_common.go:293] "Volume detached for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/bfb33818-4cdc-4840-ac61-d14fee47a57f-node-mnt\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:55 crc kubenswrapper[4558]: I0120 17:16:55.267552 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bfb33818-4cdc-4840-ac61-d14fee47a57f-kube-api-access-vncp4" (OuterVolumeSpecName: "kube-api-access-vncp4") pod "bfb33818-4cdc-4840-ac61-d14fee47a57f" (UID: "bfb33818-4cdc-4840-ac61-d14fee47a57f"). InnerVolumeSpecName "kube-api-access-vncp4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:16:55 crc kubenswrapper[4558]: I0120 17:16:55.283658 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bfb33818-4cdc-4840-ac61-d14fee47a57f-crc-storage" (OuterVolumeSpecName: "crc-storage") pod "bfb33818-4cdc-4840-ac61-d14fee47a57f" (UID: "bfb33818-4cdc-4840-ac61-d14fee47a57f"). InnerVolumeSpecName "crc-storage". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:16:55 crc kubenswrapper[4558]: I0120 17:16:55.362937 4558 reconciler_common.go:293] "Volume detached for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/bfb33818-4cdc-4840-ac61-d14fee47a57f-crc-storage\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:55 crc kubenswrapper[4558]: I0120 17:16:55.362970 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vncp4\" (UniqueName: \"kubernetes.io/projected/bfb33818-4cdc-4840-ac61-d14fee47a57f-kube-api-access-vncp4\") on node \"crc\" DevicePath \"\"" Jan 20 17:16:55 crc kubenswrapper[4558]: I0120 17:16:55.898561 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-qdbtt" event={"ID":"bfb33818-4cdc-4840-ac61-d14fee47a57f","Type":"ContainerDied","Data":"4693e196f3dc7ff55acf1adcfd3e3b6019f4c2517e4b878c014045737f0e230c"} Jan 20 17:16:55 crc kubenswrapper[4558]: I0120 17:16:55.898606 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4693e196f3dc7ff55acf1adcfd3e3b6019f4c2517e4b878c014045737f0e230c" Jan 20 17:16:55 crc kubenswrapper[4558]: I0120 17:16:55.898605 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-qdbtt" Jan 20 17:16:57 crc kubenswrapper[4558]: I0120 17:16:57.329828 4558 patch_prober.go:28] interesting pod/machine-config-daemon-2vr4r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 20 17:16:57 crc kubenswrapper[4558]: I0120 17:16:57.330119 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 20 17:17:03 crc kubenswrapper[4558]: I0120 17:17:03.424671 4558 scope.go:117] "RemoveContainer" containerID="8e3efb638bbc1064f16cefab4a99ea7a1094f7ef51dacc45ead8e5b6008f26f4" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.098315 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/rabbitmq-server-0"] Jan 20 17:17:10 crc kubenswrapper[4558]: E0120 17:17:10.099359 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bfb33818-4cdc-4840-ac61-d14fee47a57f" containerName="storage" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.099378 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="bfb33818-4cdc-4840-ac61-d14fee47a57f" containerName="storage" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.099567 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="bfb33818-4cdc-4840-ac61-d14fee47a57f" containerName="storage" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.100371 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.102076 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"rabbitmq-config-data" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.102343 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"rabbitmq-default-user" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.102627 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"rabbitmq-plugins-conf" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.103139 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-rabbitmq-svc" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.103208 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"rabbitmq-server-dockercfg-b7fvz" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.103282 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"rabbitmq-erlang-cookie" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.110591 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"rabbitmq-server-conf" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.133561 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/rabbitmq-server-2"] Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.136131 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/rabbitmq-server-2" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.160132 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/rabbitmq-server-1"] Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.163406 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/rabbitmq-server-1" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.174051 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/rabbitmq-server-1"] Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.178238 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/rabbitmq-server-0"] Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.184787 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/rabbitmq-server-2"] Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.199192 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-server-0\" (UID: \"24114ddb-3b30-42ac-9d61-cfeb15d58728\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.199262 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/24114ddb-3b30-42ac-9d61-cfeb15d58728-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"24114ddb-3b30-42ac-9d61-cfeb15d58728\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.199289 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/24114ddb-3b30-42ac-9d61-cfeb15d58728-pod-info\") pod \"rabbitmq-server-0\" (UID: \"24114ddb-3b30-42ac-9d61-cfeb15d58728\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.199308 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/24114ddb-3b30-42ac-9d61-cfeb15d58728-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"24114ddb-3b30-42ac-9d61-cfeb15d58728\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.199328 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/24114ddb-3b30-42ac-9d61-cfeb15d58728-config-data\") pod \"rabbitmq-server-0\" (UID: \"24114ddb-3b30-42ac-9d61-cfeb15d58728\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.199355 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/24114ddb-3b30-42ac-9d61-cfeb15d58728-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"24114ddb-3b30-42ac-9d61-cfeb15d58728\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.199513 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r8tpl\" (UniqueName: \"kubernetes.io/projected/24114ddb-3b30-42ac-9d61-cfeb15d58728-kube-api-access-r8tpl\") pod \"rabbitmq-server-0\" (UID: \"24114ddb-3b30-42ac-9d61-cfeb15d58728\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.199591 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/24114ddb-3b30-42ac-9d61-cfeb15d58728-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"24114ddb-3b30-42ac-9d61-cfeb15d58728\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.199693 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/24114ddb-3b30-42ac-9d61-cfeb15d58728-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"24114ddb-3b30-42ac-9d61-cfeb15d58728\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.199766 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/24114ddb-3b30-42ac-9d61-cfeb15d58728-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"24114ddb-3b30-42ac-9d61-cfeb15d58728\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.199855 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/24114ddb-3b30-42ac-9d61-cfeb15d58728-server-conf\") pod \"rabbitmq-server-0\" (UID: \"24114ddb-3b30-42ac-9d61-cfeb15d58728\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.300835 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/24114ddb-3b30-42ac-9d61-cfeb15d58728-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"24114ddb-3b30-42ac-9d61-cfeb15d58728\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.301222 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/2c809052-d9bb-4982-8271-5b7a9a6f28f9-server-conf\") pod \"rabbitmq-server-1\" (UID: \"2c809052-d9bb-4982-8271-5b7a9a6f28f9\") " pod="openstack-kuttl-tests/rabbitmq-server-1" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.301245 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2c809052-d9bb-4982-8271-5b7a9a6f28f9-config-data\") pod \"rabbitmq-server-1\" (UID: \"2c809052-d9bb-4982-8271-5b7a9a6f28f9\") " pod="openstack-kuttl-tests/rabbitmq-server-1" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.301273 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/2c809052-d9bb-4982-8271-5b7a9a6f28f9-plugins-conf\") pod \"rabbitmq-server-1\" (UID: \"2c809052-d9bb-4982-8271-5b7a9a6f28f9\") " pod="openstack-kuttl-tests/rabbitmq-server-1" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.301295 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tb228\" (UniqueName: \"kubernetes.io/projected/2c809052-d9bb-4982-8271-5b7a9a6f28f9-kube-api-access-tb228\") pod \"rabbitmq-server-1\" (UID: \"2c809052-d9bb-4982-8271-5b7a9a6f28f9\") " pod="openstack-kuttl-tests/rabbitmq-server-1" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.301318 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/24114ddb-3b30-42ac-9d61-cfeb15d58728-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"24114ddb-3b30-42ac-9d61-cfeb15d58728\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.301318 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/24114ddb-3b30-42ac-9d61-cfeb15d58728-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"24114ddb-3b30-42ac-9d61-cfeb15d58728\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.301336 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/24114ddb-3b30-42ac-9d61-cfeb15d58728-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"24114ddb-3b30-42ac-9d61-cfeb15d58728\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.301392 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/2c809052-d9bb-4982-8271-5b7a9a6f28f9-pod-info\") pod \"rabbitmq-server-1\" (UID: \"2c809052-d9bb-4982-8271-5b7a9a6f28f9\") " pod="openstack-kuttl-tests/rabbitmq-server-1" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.301417 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/9c52efaf-b737-47bf-9ca1-109a28e19113-rabbitmq-confd\") pod \"rabbitmq-server-2\" (UID: \"9c52efaf-b737-47bf-9ca1-109a28e19113\") " pod="openstack-kuttl-tests/rabbitmq-server-2" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.301447 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/9c52efaf-b737-47bf-9ca1-109a28e19113-rabbitmq-tls\") pod \"rabbitmq-server-2\" (UID: \"9c52efaf-b737-47bf-9ca1-109a28e19113\") " pod="openstack-kuttl-tests/rabbitmq-server-2" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.301486 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/24114ddb-3b30-42ac-9d61-cfeb15d58728-server-conf\") pod \"rabbitmq-server-0\" (UID: \"24114ddb-3b30-42ac-9d61-cfeb15d58728\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.301520 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage15-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") pod \"rabbitmq-server-1\" (UID: \"2c809052-d9bb-4982-8271-5b7a9a6f28f9\") " pod="openstack-kuttl-tests/rabbitmq-server-1" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.301553 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-server-0\" (UID: \"24114ddb-3b30-42ac-9d61-cfeb15d58728\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.301572 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/2c809052-d9bb-4982-8271-5b7a9a6f28f9-erlang-cookie-secret\") pod \"rabbitmq-server-1\" (UID: \"2c809052-d9bb-4982-8271-5b7a9a6f28f9\") " pod="openstack-kuttl-tests/rabbitmq-server-1" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.301606 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/9c52efaf-b737-47bf-9ca1-109a28e19113-plugins-conf\") pod \"rabbitmq-server-2\" (UID: \"9c52efaf-b737-47bf-9ca1-109a28e19113\") " pod="openstack-kuttl-tests/rabbitmq-server-2" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.301625 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/2c809052-d9bb-4982-8271-5b7a9a6f28f9-rabbitmq-tls\") pod \"rabbitmq-server-1\" (UID: \"2c809052-d9bb-4982-8271-5b7a9a6f28f9\") " pod="openstack-kuttl-tests/rabbitmq-server-1" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.301672 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/2c809052-d9bb-4982-8271-5b7a9a6f28f9-rabbitmq-plugins\") pod \"rabbitmq-server-1\" (UID: \"2c809052-d9bb-4982-8271-5b7a9a6f28f9\") " pod="openstack-kuttl-tests/rabbitmq-server-1" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.301689 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/9c52efaf-b737-47bf-9ca1-109a28e19113-config-data\") pod \"rabbitmq-server-2\" (UID: \"9c52efaf-b737-47bf-9ca1-109a28e19113\") " pod="openstack-kuttl-tests/rabbitmq-server-2" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.301725 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/2c809052-d9bb-4982-8271-5b7a9a6f28f9-rabbitmq-confd\") pod \"rabbitmq-server-1\" (UID: \"2c809052-d9bb-4982-8271-5b7a9a6f28f9\") " pod="openstack-kuttl-tests/rabbitmq-server-1" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.301749 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tkzmj\" (UniqueName: \"kubernetes.io/projected/9c52efaf-b737-47bf-9ca1-109a28e19113-kube-api-access-tkzmj\") pod \"rabbitmq-server-2\" (UID: \"9c52efaf-b737-47bf-9ca1-109a28e19113\") " pod="openstack-kuttl-tests/rabbitmq-server-2" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.301778 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/24114ddb-3b30-42ac-9d61-cfeb15d58728-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"24114ddb-3b30-42ac-9d61-cfeb15d58728\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.301803 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/9c52efaf-b737-47bf-9ca1-109a28e19113-erlang-cookie-secret\") pod \"rabbitmq-server-2\" (UID: \"9c52efaf-b737-47bf-9ca1-109a28e19113\") " pod="openstack-kuttl-tests/rabbitmq-server-2" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.301824 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/24114ddb-3b30-42ac-9d61-cfeb15d58728-pod-info\") pod \"rabbitmq-server-0\" (UID: \"24114ddb-3b30-42ac-9d61-cfeb15d58728\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.301845 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/24114ddb-3b30-42ac-9d61-cfeb15d58728-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"24114ddb-3b30-42ac-9d61-cfeb15d58728\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.301864 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/24114ddb-3b30-42ac-9d61-cfeb15d58728-config-data\") pod \"rabbitmq-server-0\" (UID: \"24114ddb-3b30-42ac-9d61-cfeb15d58728\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.301930 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/9c52efaf-b737-47bf-9ca1-109a28e19113-server-conf\") pod \"rabbitmq-server-2\" (UID: \"9c52efaf-b737-47bf-9ca1-109a28e19113\") " pod="openstack-kuttl-tests/rabbitmq-server-2" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.302670 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/24114ddb-3b30-42ac-9d61-cfeb15d58728-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"24114ddb-3b30-42ac-9d61-cfeb15d58728\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.302712 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/9c52efaf-b737-47bf-9ca1-109a28e19113-rabbitmq-plugins\") pod \"rabbitmq-server-2\" (UID: \"9c52efaf-b737-47bf-9ca1-109a28e19113\") " pod="openstack-kuttl-tests/rabbitmq-server-2" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.302674 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-server-0\" (UID: \"24114ddb-3b30-42ac-9d61-cfeb15d58728\") device mount path \"/mnt/openstack/pv12\"" pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.302758 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/2c809052-d9bb-4982-8271-5b7a9a6f28f9-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-1\" (UID: \"2c809052-d9bb-4982-8271-5b7a9a6f28f9\") " pod="openstack-kuttl-tests/rabbitmq-server-1" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.302819 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/9c52efaf-b737-47bf-9ca1-109a28e19113-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-2\" (UID: \"9c52efaf-b737-47bf-9ca1-109a28e19113\") " pod="openstack-kuttl-tests/rabbitmq-server-2" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.302846 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r8tpl\" (UniqueName: \"kubernetes.io/projected/24114ddb-3b30-42ac-9d61-cfeb15d58728-kube-api-access-r8tpl\") pod \"rabbitmq-server-0\" (UID: \"24114ddb-3b30-42ac-9d61-cfeb15d58728\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.302866 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/9c52efaf-b737-47bf-9ca1-109a28e19113-pod-info\") pod \"rabbitmq-server-2\" (UID: \"9c52efaf-b737-47bf-9ca1-109a28e19113\") " pod="openstack-kuttl-tests/rabbitmq-server-2" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.302871 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/24114ddb-3b30-42ac-9d61-cfeb15d58728-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"24114ddb-3b30-42ac-9d61-cfeb15d58728\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.302896 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage20-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage20-crc\") pod \"rabbitmq-server-2\" (UID: \"9c52efaf-b737-47bf-9ca1-109a28e19113\") " pod="openstack-kuttl-tests/rabbitmq-server-2" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.303295 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/24114ddb-3b30-42ac-9d61-cfeb15d58728-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"24114ddb-3b30-42ac-9d61-cfeb15d58728\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.303484 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/24114ddb-3b30-42ac-9d61-cfeb15d58728-config-data\") pod \"rabbitmq-server-0\" (UID: \"24114ddb-3b30-42ac-9d61-cfeb15d58728\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.303729 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/24114ddb-3b30-42ac-9d61-cfeb15d58728-server-conf\") pod \"rabbitmq-server-0\" (UID: \"24114ddb-3b30-42ac-9d61-cfeb15d58728\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.309390 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/24114ddb-3b30-42ac-9d61-cfeb15d58728-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"24114ddb-3b30-42ac-9d61-cfeb15d58728\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.310515 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/24114ddb-3b30-42ac-9d61-cfeb15d58728-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"24114ddb-3b30-42ac-9d61-cfeb15d58728\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.311037 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/24114ddb-3b30-42ac-9d61-cfeb15d58728-pod-info\") pod \"rabbitmq-server-0\" (UID: \"24114ddb-3b30-42ac-9d61-cfeb15d58728\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.311579 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/24114ddb-3b30-42ac-9d61-cfeb15d58728-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"24114ddb-3b30-42ac-9d61-cfeb15d58728\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.317132 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r8tpl\" (UniqueName: \"kubernetes.io/projected/24114ddb-3b30-42ac-9d61-cfeb15d58728-kube-api-access-r8tpl\") pod \"rabbitmq-server-0\" (UID: \"24114ddb-3b30-42ac-9d61-cfeb15d58728\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.322225 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-server-0\" (UID: \"24114ddb-3b30-42ac-9d61-cfeb15d58728\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.405516 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage15-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") pod \"rabbitmq-server-1\" (UID: \"2c809052-d9bb-4982-8271-5b7a9a6f28f9\") " pod="openstack-kuttl-tests/rabbitmq-server-1" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.405566 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/2c809052-d9bb-4982-8271-5b7a9a6f28f9-erlang-cookie-secret\") pod \"rabbitmq-server-1\" (UID: \"2c809052-d9bb-4982-8271-5b7a9a6f28f9\") " pod="openstack-kuttl-tests/rabbitmq-server-1" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.405591 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/9c52efaf-b737-47bf-9ca1-109a28e19113-plugins-conf\") pod \"rabbitmq-server-2\" (UID: \"9c52efaf-b737-47bf-9ca1-109a28e19113\") " pod="openstack-kuttl-tests/rabbitmq-server-2" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.405609 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/2c809052-d9bb-4982-8271-5b7a9a6f28f9-rabbitmq-tls\") pod \"rabbitmq-server-1\" (UID: \"2c809052-d9bb-4982-8271-5b7a9a6f28f9\") " pod="openstack-kuttl-tests/rabbitmq-server-1" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.405634 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/2c809052-d9bb-4982-8271-5b7a9a6f28f9-rabbitmq-plugins\") pod \"rabbitmq-server-1\" (UID: \"2c809052-d9bb-4982-8271-5b7a9a6f28f9\") " pod="openstack-kuttl-tests/rabbitmq-server-1" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.405660 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/9c52efaf-b737-47bf-9ca1-109a28e19113-config-data\") pod \"rabbitmq-server-2\" (UID: \"9c52efaf-b737-47bf-9ca1-109a28e19113\") " pod="openstack-kuttl-tests/rabbitmq-server-2" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.405686 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/2c809052-d9bb-4982-8271-5b7a9a6f28f9-rabbitmq-confd\") pod \"rabbitmq-server-1\" (UID: \"2c809052-d9bb-4982-8271-5b7a9a6f28f9\") " pod="openstack-kuttl-tests/rabbitmq-server-1" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.405706 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tkzmj\" (UniqueName: \"kubernetes.io/projected/9c52efaf-b737-47bf-9ca1-109a28e19113-kube-api-access-tkzmj\") pod \"rabbitmq-server-2\" (UID: \"9c52efaf-b737-47bf-9ca1-109a28e19113\") " pod="openstack-kuttl-tests/rabbitmq-server-2" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.405729 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/9c52efaf-b737-47bf-9ca1-109a28e19113-erlang-cookie-secret\") pod \"rabbitmq-server-2\" (UID: \"9c52efaf-b737-47bf-9ca1-109a28e19113\") " pod="openstack-kuttl-tests/rabbitmq-server-2" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.405755 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/9c52efaf-b737-47bf-9ca1-109a28e19113-server-conf\") pod \"rabbitmq-server-2\" (UID: \"9c52efaf-b737-47bf-9ca1-109a28e19113\") " pod="openstack-kuttl-tests/rabbitmq-server-2" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.405781 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/9c52efaf-b737-47bf-9ca1-109a28e19113-rabbitmq-plugins\") pod \"rabbitmq-server-2\" (UID: \"9c52efaf-b737-47bf-9ca1-109a28e19113\") " pod="openstack-kuttl-tests/rabbitmq-server-2" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.405803 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/2c809052-d9bb-4982-8271-5b7a9a6f28f9-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-1\" (UID: \"2c809052-d9bb-4982-8271-5b7a9a6f28f9\") " pod="openstack-kuttl-tests/rabbitmq-server-1" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.405821 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/9c52efaf-b737-47bf-9ca1-109a28e19113-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-2\" (UID: \"9c52efaf-b737-47bf-9ca1-109a28e19113\") " pod="openstack-kuttl-tests/rabbitmq-server-2" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.405843 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/9c52efaf-b737-47bf-9ca1-109a28e19113-pod-info\") pod \"rabbitmq-server-2\" (UID: \"9c52efaf-b737-47bf-9ca1-109a28e19113\") " pod="openstack-kuttl-tests/rabbitmq-server-2" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.405869 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage20-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage20-crc\") pod \"rabbitmq-server-2\" (UID: \"9c52efaf-b737-47bf-9ca1-109a28e19113\") " pod="openstack-kuttl-tests/rabbitmq-server-2" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.406010 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/2c809052-d9bb-4982-8271-5b7a9a6f28f9-server-conf\") pod \"rabbitmq-server-1\" (UID: \"2c809052-d9bb-4982-8271-5b7a9a6f28f9\") " pod="openstack-kuttl-tests/rabbitmq-server-1" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.406621 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/9c52efaf-b737-47bf-9ca1-109a28e19113-rabbitmq-plugins\") pod \"rabbitmq-server-2\" (UID: \"9c52efaf-b737-47bf-9ca1-109a28e19113\") " pod="openstack-kuttl-tests/rabbitmq-server-2" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.406699 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2c809052-d9bb-4982-8271-5b7a9a6f28f9-config-data\") pod \"rabbitmq-server-1\" (UID: \"2c809052-d9bb-4982-8271-5b7a9a6f28f9\") " pod="openstack-kuttl-tests/rabbitmq-server-1" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.407039 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage20-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage20-crc\") pod \"rabbitmq-server-2\" (UID: \"9c52efaf-b737-47bf-9ca1-109a28e19113\") device mount path \"/mnt/openstack/pv20\"" pod="openstack-kuttl-tests/rabbitmq-server-2" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.407773 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/2c809052-d9bb-4982-8271-5b7a9a6f28f9-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-1\" (UID: \"2c809052-d9bb-4982-8271-5b7a9a6f28f9\") " pod="openstack-kuttl-tests/rabbitmq-server-1" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.406760 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/2c809052-d9bb-4982-8271-5b7a9a6f28f9-plugins-conf\") pod \"rabbitmq-server-1\" (UID: \"2c809052-d9bb-4982-8271-5b7a9a6f28f9\") " pod="openstack-kuttl-tests/rabbitmq-server-1" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.408287 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tb228\" (UniqueName: \"kubernetes.io/projected/2c809052-d9bb-4982-8271-5b7a9a6f28f9-kube-api-access-tb228\") pod \"rabbitmq-server-1\" (UID: \"2c809052-d9bb-4982-8271-5b7a9a6f28f9\") " pod="openstack-kuttl-tests/rabbitmq-server-1" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.408391 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/9c52efaf-b737-47bf-9ca1-109a28e19113-rabbitmq-confd\") pod \"rabbitmq-server-2\" (UID: \"9c52efaf-b737-47bf-9ca1-109a28e19113\") " pod="openstack-kuttl-tests/rabbitmq-server-2" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.408421 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/2c809052-d9bb-4982-8271-5b7a9a6f28f9-pod-info\") pod \"rabbitmq-server-1\" (UID: \"2c809052-d9bb-4982-8271-5b7a9a6f28f9\") " pod="openstack-kuttl-tests/rabbitmq-server-1" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.408456 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/9c52efaf-b737-47bf-9ca1-109a28e19113-rabbitmq-tls\") pod \"rabbitmq-server-2\" (UID: \"9c52efaf-b737-47bf-9ca1-109a28e19113\") " pod="openstack-kuttl-tests/rabbitmq-server-2" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.409248 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/2c809052-d9bb-4982-8271-5b7a9a6f28f9-server-conf\") pod \"rabbitmq-server-1\" (UID: \"2c809052-d9bb-4982-8271-5b7a9a6f28f9\") " pod="openstack-kuttl-tests/rabbitmq-server-1" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.409432 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/9c52efaf-b737-47bf-9ca1-109a28e19113-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-2\" (UID: \"9c52efaf-b737-47bf-9ca1-109a28e19113\") " pod="openstack-kuttl-tests/rabbitmq-server-2" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.412385 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/9c52efaf-b737-47bf-9ca1-109a28e19113-config-data\") pod \"rabbitmq-server-2\" (UID: \"9c52efaf-b737-47bf-9ca1-109a28e19113\") " pod="openstack-kuttl-tests/rabbitmq-server-2" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.410328 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/2c809052-d9bb-4982-8271-5b7a9a6f28f9-rabbitmq-plugins\") pod \"rabbitmq-server-1\" (UID: \"2c809052-d9bb-4982-8271-5b7a9a6f28f9\") " pod="openstack-kuttl-tests/rabbitmq-server-1" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.410648 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/9c52efaf-b737-47bf-9ca1-109a28e19113-erlang-cookie-secret\") pod \"rabbitmq-server-2\" (UID: \"9c52efaf-b737-47bf-9ca1-109a28e19113\") " pod="openstack-kuttl-tests/rabbitmq-server-2" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.410933 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2c809052-d9bb-4982-8271-5b7a9a6f28f9-config-data\") pod \"rabbitmq-server-1\" (UID: \"2c809052-d9bb-4982-8271-5b7a9a6f28f9\") " pod="openstack-kuttl-tests/rabbitmq-server-1" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.411780 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/9c52efaf-b737-47bf-9ca1-109a28e19113-server-conf\") pod \"rabbitmq-server-2\" (UID: \"9c52efaf-b737-47bf-9ca1-109a28e19113\") " pod="openstack-kuttl-tests/rabbitmq-server-2" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.411995 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/2c809052-d9bb-4982-8271-5b7a9a6f28f9-plugins-conf\") pod \"rabbitmq-server-1\" (UID: \"2c809052-d9bb-4982-8271-5b7a9a6f28f9\") " pod="openstack-kuttl-tests/rabbitmq-server-1" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.409953 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage15-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") pod \"rabbitmq-server-1\" (UID: \"2c809052-d9bb-4982-8271-5b7a9a6f28f9\") device mount path \"/mnt/openstack/pv15\"" pod="openstack-kuttl-tests/rabbitmq-server-1" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.413050 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/9c52efaf-b737-47bf-9ca1-109a28e19113-pod-info\") pod \"rabbitmq-server-2\" (UID: \"9c52efaf-b737-47bf-9ca1-109a28e19113\") " pod="openstack-kuttl-tests/rabbitmq-server-2" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.414524 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/2c809052-d9bb-4982-8271-5b7a9a6f28f9-erlang-cookie-secret\") pod \"rabbitmq-server-1\" (UID: \"2c809052-d9bb-4982-8271-5b7a9a6f28f9\") " pod="openstack-kuttl-tests/rabbitmq-server-1" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.414721 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/9c52efaf-b737-47bf-9ca1-109a28e19113-plugins-conf\") pod \"rabbitmq-server-2\" (UID: \"9c52efaf-b737-47bf-9ca1-109a28e19113\") " pod="openstack-kuttl-tests/rabbitmq-server-2" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.415628 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/9c52efaf-b737-47bf-9ca1-109a28e19113-rabbitmq-confd\") pod \"rabbitmq-server-2\" (UID: \"9c52efaf-b737-47bf-9ca1-109a28e19113\") " pod="openstack-kuttl-tests/rabbitmq-server-2" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.424016 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/2c809052-d9bb-4982-8271-5b7a9a6f28f9-pod-info\") pod \"rabbitmq-server-1\" (UID: \"2c809052-d9bb-4982-8271-5b7a9a6f28f9\") " pod="openstack-kuttl-tests/rabbitmq-server-1" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.431635 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/9c52efaf-b737-47bf-9ca1-109a28e19113-rabbitmq-tls\") pod \"rabbitmq-server-2\" (UID: \"9c52efaf-b737-47bf-9ca1-109a28e19113\") " pod="openstack-kuttl-tests/rabbitmq-server-2" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.433017 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/2c809052-d9bb-4982-8271-5b7a9a6f28f9-rabbitmq-confd\") pod \"rabbitmq-server-1\" (UID: \"2c809052-d9bb-4982-8271-5b7a9a6f28f9\") " pod="openstack-kuttl-tests/rabbitmq-server-1" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.436873 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tkzmj\" (UniqueName: \"kubernetes.io/projected/9c52efaf-b737-47bf-9ca1-109a28e19113-kube-api-access-tkzmj\") pod \"rabbitmq-server-2\" (UID: \"9c52efaf-b737-47bf-9ca1-109a28e19113\") " pod="openstack-kuttl-tests/rabbitmq-server-2" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.437193 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/2c809052-d9bb-4982-8271-5b7a9a6f28f9-rabbitmq-tls\") pod \"rabbitmq-server-1\" (UID: \"2c809052-d9bb-4982-8271-5b7a9a6f28f9\") " pod="openstack-kuttl-tests/rabbitmq-server-1" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.437781 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tb228\" (UniqueName: \"kubernetes.io/projected/2c809052-d9bb-4982-8271-5b7a9a6f28f9-kube-api-access-tb228\") pod \"rabbitmq-server-1\" (UID: \"2c809052-d9bb-4982-8271-5b7a9a6f28f9\") " pod="openstack-kuttl-tests/rabbitmq-server-1" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.444712 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.445734 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/rabbitmq-cell1-server-0"] Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.447644 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.448028 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage20-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage20-crc\") pod \"rabbitmq-server-2\" (UID: \"9c52efaf-b737-47bf-9ca1-109a28e19113\") " pod="openstack-kuttl-tests/rabbitmq-server-2" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.452267 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"rabbitmq-cell1-server-dockercfg-fbrm2" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.452852 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"rabbitmq-cell1-default-user" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.453106 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"rabbitmq-cell1-erlang-cookie" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.453264 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"rabbitmq-cell1-config-data" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.453444 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-rabbitmq-cell1-svc" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.453617 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"rabbitmq-cell1-server-conf" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.453874 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage15-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") pod \"rabbitmq-server-1\" (UID: \"2c809052-d9bb-4982-8271-5b7a9a6f28f9\") " pod="openstack-kuttl-tests/rabbitmq-server-1" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.454599 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"rabbitmq-cell1-plugins-conf" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.456013 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/rabbitmq-cell1-server-2"] Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.464606 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/rabbitmq-cell1-server-0"] Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.464758 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/rabbitmq-cell1-server-2" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.465893 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/rabbitmq-server-2" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.468784 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/rabbitmq-cell1-server-1"] Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.470096 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/rabbitmq-cell1-server-1" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.472578 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/rabbitmq-cell1-server-2"] Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.478206 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/rabbitmq-cell1-server-1"] Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.487615 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/rabbitmq-server-1" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.510821 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/344f9f31-8a81-4544-b782-5aa78dfc5cc2-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-2\" (UID: \"344f9f31-8a81-4544-b782-5aa78dfc5cc2\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-2" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.510902 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-cell1-server-2\" (UID: \"344f9f31-8a81-4544-b782-5aa78dfc5cc2\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-2" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.510925 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/344f9f31-8a81-4544-b782-5aa78dfc5cc2-pod-info\") pod \"rabbitmq-cell1-server-2\" (UID: \"344f9f31-8a81-4544-b782-5aa78dfc5cc2\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-2" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.510941 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/344f9f31-8a81-4544-b782-5aa78dfc5cc2-rabbitmq-tls\") pod \"rabbitmq-cell1-server-2\" (UID: \"344f9f31-8a81-4544-b782-5aa78dfc5cc2\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-2" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.511131 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/6dae93de-b907-44f7-a94c-c691eee0af7f-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"6dae93de-b907-44f7-a94c-c691eee0af7f\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.511151 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/6dae93de-b907-44f7-a94c-c691eee0af7f-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"6dae93de-b907-44f7-a94c-c691eee0af7f\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.511196 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/344f9f31-8a81-4544-b782-5aa78dfc5cc2-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-2\" (UID: \"344f9f31-8a81-4544-b782-5aa78dfc5cc2\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-2" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.511216 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/344f9f31-8a81-4544-b782-5aa78dfc5cc2-rabbitmq-confd\") pod \"rabbitmq-cell1-server-2\" (UID: \"344f9f31-8a81-4544-b782-5aa78dfc5cc2\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-2" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.511234 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/6dae93de-b907-44f7-a94c-c691eee0af7f-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"6dae93de-b907-44f7-a94c-c691eee0af7f\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.511253 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/6dae93de-b907-44f7-a94c-c691eee0af7f-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"6dae93de-b907-44f7-a94c-c691eee0af7f\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.511280 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/6dae93de-b907-44f7-a94c-c691eee0af7f-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"6dae93de-b907-44f7-a94c-c691eee0af7f\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.511296 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/344f9f31-8a81-4544-b782-5aa78dfc5cc2-config-data\") pod \"rabbitmq-cell1-server-2\" (UID: \"344f9f31-8a81-4544-b782-5aa78dfc5cc2\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-2" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.511317 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zxlm2\" (UniqueName: \"kubernetes.io/projected/344f9f31-8a81-4544-b782-5aa78dfc5cc2-kube-api-access-zxlm2\") pod \"rabbitmq-cell1-server-2\" (UID: \"344f9f31-8a81-4544-b782-5aa78dfc5cc2\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-2" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.511334 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/344f9f31-8a81-4544-b782-5aa78dfc5cc2-server-conf\") pod \"rabbitmq-cell1-server-2\" (UID: \"344f9f31-8a81-4544-b782-5aa78dfc5cc2\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-2" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.511357 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"6dae93de-b907-44f7-a94c-c691eee0af7f\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.511386 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/6dae93de-b907-44f7-a94c-c691eee0af7f-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"6dae93de-b907-44f7-a94c-c691eee0af7f\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.511403 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/344f9f31-8a81-4544-b782-5aa78dfc5cc2-plugins-conf\") pod \"rabbitmq-cell1-server-2\" (UID: \"344f9f31-8a81-4544-b782-5aa78dfc5cc2\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-2" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.511417 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/6dae93de-b907-44f7-a94c-c691eee0af7f-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"6dae93de-b907-44f7-a94c-c691eee0af7f\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.511432 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/6dae93de-b907-44f7-a94c-c691eee0af7f-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"6dae93de-b907-44f7-a94c-c691eee0af7f\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.511449 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/344f9f31-8a81-4544-b782-5aa78dfc5cc2-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-2\" (UID: \"344f9f31-8a81-4544-b782-5aa78dfc5cc2\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-2" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.511468 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vwdm9\" (UniqueName: \"kubernetes.io/projected/6dae93de-b907-44f7-a94c-c691eee0af7f-kube-api-access-vwdm9\") pod \"rabbitmq-cell1-server-0\" (UID: \"6dae93de-b907-44f7-a94c-c691eee0af7f\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.511493 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/6dae93de-b907-44f7-a94c-c691eee0af7f-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"6dae93de-b907-44f7-a94c-c691eee0af7f\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.620423 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/344f9f31-8a81-4544-b782-5aa78dfc5cc2-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-2\" (UID: \"344f9f31-8a81-4544-b782-5aa78dfc5cc2\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-2" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.620696 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/8573a632-84e0-4f80-b811-5646b571c318-plugins-conf\") pod \"rabbitmq-cell1-server-1\" (UID: \"8573a632-84e0-4f80-b811-5646b571c318\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-1" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.620720 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/344f9f31-8a81-4544-b782-5aa78dfc5cc2-rabbitmq-confd\") pod \"rabbitmq-cell1-server-2\" (UID: \"344f9f31-8a81-4544-b782-5aa78dfc5cc2\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-2" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.620739 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/8573a632-84e0-4f80-b811-5646b571c318-rabbitmq-tls\") pod \"rabbitmq-cell1-server-1\" (UID: \"8573a632-84e0-4f80-b811-5646b571c318\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-1" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.620767 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/6dae93de-b907-44f7-a94c-c691eee0af7f-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"6dae93de-b907-44f7-a94c-c691eee0af7f\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.620790 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-cell1-server-1\" (UID: \"8573a632-84e0-4f80-b811-5646b571c318\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-1" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.620814 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/6dae93de-b907-44f7-a94c-c691eee0af7f-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"6dae93de-b907-44f7-a94c-c691eee0af7f\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.620869 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/6dae93de-b907-44f7-a94c-c691eee0af7f-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"6dae93de-b907-44f7-a94c-c691eee0af7f\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.621641 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/344f9f31-8a81-4544-b782-5aa78dfc5cc2-config-data\") pod \"rabbitmq-cell1-server-2\" (UID: \"344f9f31-8a81-4544-b782-5aa78dfc5cc2\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-2" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.621791 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/344f9f31-8a81-4544-b782-5aa78dfc5cc2-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-2\" (UID: \"344f9f31-8a81-4544-b782-5aa78dfc5cc2\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-2" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.620886 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/344f9f31-8a81-4544-b782-5aa78dfc5cc2-config-data\") pod \"rabbitmq-cell1-server-2\" (UID: \"344f9f31-8a81-4544-b782-5aa78dfc5cc2\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-2" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.621977 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/8573a632-84e0-4f80-b811-5646b571c318-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-1\" (UID: \"8573a632-84e0-4f80-b811-5646b571c318\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-1" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.622009 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zxlm2\" (UniqueName: \"kubernetes.io/projected/344f9f31-8a81-4544-b782-5aa78dfc5cc2-kube-api-access-zxlm2\") pod \"rabbitmq-cell1-server-2\" (UID: \"344f9f31-8a81-4544-b782-5aa78dfc5cc2\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-2" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.622033 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/8573a632-84e0-4f80-b811-5646b571c318-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-1\" (UID: \"8573a632-84e0-4f80-b811-5646b571c318\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-1" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.622061 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/344f9f31-8a81-4544-b782-5aa78dfc5cc2-server-conf\") pod \"rabbitmq-cell1-server-2\" (UID: \"344f9f31-8a81-4544-b782-5aa78dfc5cc2\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-2" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.622141 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"6dae93de-b907-44f7-a94c-c691eee0af7f\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.622214 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/8573a632-84e0-4f80-b811-5646b571c318-rabbitmq-confd\") pod \"rabbitmq-cell1-server-1\" (UID: \"8573a632-84e0-4f80-b811-5646b571c318\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-1" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.622235 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v9m9h\" (UniqueName: \"kubernetes.io/projected/8573a632-84e0-4f80-b811-5646b571c318-kube-api-access-v9m9h\") pod \"rabbitmq-cell1-server-1\" (UID: \"8573a632-84e0-4f80-b811-5646b571c318\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-1" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.622255 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/8573a632-84e0-4f80-b811-5646b571c318-config-data\") pod \"rabbitmq-cell1-server-1\" (UID: \"8573a632-84e0-4f80-b811-5646b571c318\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-1" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.622276 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/6dae93de-b907-44f7-a94c-c691eee0af7f-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"6dae93de-b907-44f7-a94c-c691eee0af7f\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.622296 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/344f9f31-8a81-4544-b782-5aa78dfc5cc2-plugins-conf\") pod \"rabbitmq-cell1-server-2\" (UID: \"344f9f31-8a81-4544-b782-5aa78dfc5cc2\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-2" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.622317 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/6dae93de-b907-44f7-a94c-c691eee0af7f-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"6dae93de-b907-44f7-a94c-c691eee0af7f\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.622334 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/6dae93de-b907-44f7-a94c-c691eee0af7f-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"6dae93de-b907-44f7-a94c-c691eee0af7f\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.622362 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/344f9f31-8a81-4544-b782-5aa78dfc5cc2-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-2\" (UID: \"344f9f31-8a81-4544-b782-5aa78dfc5cc2\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-2" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.622391 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vwdm9\" (UniqueName: \"kubernetes.io/projected/6dae93de-b907-44f7-a94c-c691eee0af7f-kube-api-access-vwdm9\") pod \"rabbitmq-cell1-server-0\" (UID: \"6dae93de-b907-44f7-a94c-c691eee0af7f\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.622416 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/8573a632-84e0-4f80-b811-5646b571c318-pod-info\") pod \"rabbitmq-cell1-server-1\" (UID: \"8573a632-84e0-4f80-b811-5646b571c318\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-1" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.622435 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/6dae93de-b907-44f7-a94c-c691eee0af7f-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"6dae93de-b907-44f7-a94c-c691eee0af7f\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.622450 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/8573a632-84e0-4f80-b811-5646b571c318-server-conf\") pod \"rabbitmq-cell1-server-1\" (UID: \"8573a632-84e0-4f80-b811-5646b571c318\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-1" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.622517 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/344f9f31-8a81-4544-b782-5aa78dfc5cc2-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-2\" (UID: \"344f9f31-8a81-4544-b782-5aa78dfc5cc2\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-2" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.622536 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/8573a632-84e0-4f80-b811-5646b571c318-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-1\" (UID: \"8573a632-84e0-4f80-b811-5646b571c318\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-1" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.622552 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/344f9f31-8a81-4544-b782-5aa78dfc5cc2-rabbitmq-tls\") pod \"rabbitmq-cell1-server-2\" (UID: \"344f9f31-8a81-4544-b782-5aa78dfc5cc2\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-2" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.622572 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-cell1-server-2\" (UID: \"344f9f31-8a81-4544-b782-5aa78dfc5cc2\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-2" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.622589 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/344f9f31-8a81-4544-b782-5aa78dfc5cc2-pod-info\") pod \"rabbitmq-cell1-server-2\" (UID: \"344f9f31-8a81-4544-b782-5aa78dfc5cc2\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-2" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.622630 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/6dae93de-b907-44f7-a94c-c691eee0af7f-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"6dae93de-b907-44f7-a94c-c691eee0af7f\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.622650 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/6dae93de-b907-44f7-a94c-c691eee0af7f-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"6dae93de-b907-44f7-a94c-c691eee0af7f\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.623128 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/6dae93de-b907-44f7-a94c-c691eee0af7f-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"6dae93de-b907-44f7-a94c-c691eee0af7f\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.623911 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/344f9f31-8a81-4544-b782-5aa78dfc5cc2-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-2\" (UID: \"344f9f31-8a81-4544-b782-5aa78dfc5cc2\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-2" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.624871 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/344f9f31-8a81-4544-b782-5aa78dfc5cc2-server-conf\") pod \"rabbitmq-cell1-server-2\" (UID: \"344f9f31-8a81-4544-b782-5aa78dfc5cc2\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-2" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.625241 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/6dae93de-b907-44f7-a94c-c691eee0af7f-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"6dae93de-b907-44f7-a94c-c691eee0af7f\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.625655 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/6dae93de-b907-44f7-a94c-c691eee0af7f-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"6dae93de-b907-44f7-a94c-c691eee0af7f\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.626291 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-cell1-server-2\" (UID: \"344f9f31-8a81-4544-b782-5aa78dfc5cc2\") device mount path \"/mnt/openstack/pv04\"" pod="openstack-kuttl-tests/rabbitmq-cell1-server-2" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.626702 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/6dae93de-b907-44f7-a94c-c691eee0af7f-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"6dae93de-b907-44f7-a94c-c691eee0af7f\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.632668 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/6dae93de-b907-44f7-a94c-c691eee0af7f-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"6dae93de-b907-44f7-a94c-c691eee0af7f\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.633061 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/344f9f31-8a81-4544-b782-5aa78dfc5cc2-plugins-conf\") pod \"rabbitmq-cell1-server-2\" (UID: \"344f9f31-8a81-4544-b782-5aa78dfc5cc2\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-2" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.633311 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"6dae93de-b907-44f7-a94c-c691eee0af7f\") device mount path \"/mnt/openstack/pv02\"" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.633651 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/6dae93de-b907-44f7-a94c-c691eee0af7f-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"6dae93de-b907-44f7-a94c-c691eee0af7f\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.642505 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/344f9f31-8a81-4544-b782-5aa78dfc5cc2-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-2\" (UID: \"344f9f31-8a81-4544-b782-5aa78dfc5cc2\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-2" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.643607 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/6dae93de-b907-44f7-a94c-c691eee0af7f-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"6dae93de-b907-44f7-a94c-c691eee0af7f\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.653853 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/344f9f31-8a81-4544-b782-5aa78dfc5cc2-pod-info\") pod \"rabbitmq-cell1-server-2\" (UID: \"344f9f31-8a81-4544-b782-5aa78dfc5cc2\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-2" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.666764 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zxlm2\" (UniqueName: \"kubernetes.io/projected/344f9f31-8a81-4544-b782-5aa78dfc5cc2-kube-api-access-zxlm2\") pod \"rabbitmq-cell1-server-2\" (UID: \"344f9f31-8a81-4544-b782-5aa78dfc5cc2\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-2" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.668037 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/344f9f31-8a81-4544-b782-5aa78dfc5cc2-rabbitmq-confd\") pod \"rabbitmq-cell1-server-2\" (UID: \"344f9f31-8a81-4544-b782-5aa78dfc5cc2\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-2" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.668141 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/6dae93de-b907-44f7-a94c-c691eee0af7f-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"6dae93de-b907-44f7-a94c-c691eee0af7f\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.668359 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/6dae93de-b907-44f7-a94c-c691eee0af7f-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"6dae93de-b907-44f7-a94c-c691eee0af7f\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.668711 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/344f9f31-8a81-4544-b782-5aa78dfc5cc2-rabbitmq-tls\") pod \"rabbitmq-cell1-server-2\" (UID: \"344f9f31-8a81-4544-b782-5aa78dfc5cc2\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-2" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.669464 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-cell1-server-2\" (UID: \"344f9f31-8a81-4544-b782-5aa78dfc5cc2\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-2" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.672061 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vwdm9\" (UniqueName: \"kubernetes.io/projected/6dae93de-b907-44f7-a94c-c691eee0af7f-kube-api-access-vwdm9\") pod \"rabbitmq-cell1-server-0\" (UID: \"6dae93de-b907-44f7-a94c-c691eee0af7f\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.686004 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"6dae93de-b907-44f7-a94c-c691eee0af7f\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.723963 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/8573a632-84e0-4f80-b811-5646b571c318-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-1\" (UID: \"8573a632-84e0-4f80-b811-5646b571c318\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-1" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.724107 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/8573a632-84e0-4f80-b811-5646b571c318-plugins-conf\") pod \"rabbitmq-cell1-server-1\" (UID: \"8573a632-84e0-4f80-b811-5646b571c318\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-1" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.724134 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/8573a632-84e0-4f80-b811-5646b571c318-rabbitmq-tls\") pod \"rabbitmq-cell1-server-1\" (UID: \"8573a632-84e0-4f80-b811-5646b571c318\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-1" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.724208 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-cell1-server-1\" (UID: \"8573a632-84e0-4f80-b811-5646b571c318\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-1" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.724292 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/8573a632-84e0-4f80-b811-5646b571c318-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-1\" (UID: \"8573a632-84e0-4f80-b811-5646b571c318\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-1" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.724321 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/8573a632-84e0-4f80-b811-5646b571c318-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-1\" (UID: \"8573a632-84e0-4f80-b811-5646b571c318\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-1" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.724392 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/8573a632-84e0-4f80-b811-5646b571c318-rabbitmq-confd\") pod \"rabbitmq-cell1-server-1\" (UID: \"8573a632-84e0-4f80-b811-5646b571c318\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-1" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.724418 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/8573a632-84e0-4f80-b811-5646b571c318-config-data\") pod \"rabbitmq-cell1-server-1\" (UID: \"8573a632-84e0-4f80-b811-5646b571c318\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-1" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.724435 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v9m9h\" (UniqueName: \"kubernetes.io/projected/8573a632-84e0-4f80-b811-5646b571c318-kube-api-access-v9m9h\") pod \"rabbitmq-cell1-server-1\" (UID: \"8573a632-84e0-4f80-b811-5646b571c318\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-1" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.724440 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/8573a632-84e0-4f80-b811-5646b571c318-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-1\" (UID: \"8573a632-84e0-4f80-b811-5646b571c318\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-1" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.724535 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/8573a632-84e0-4f80-b811-5646b571c318-pod-info\") pod \"rabbitmq-cell1-server-1\" (UID: \"8573a632-84e0-4f80-b811-5646b571c318\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-1" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.724593 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/8573a632-84e0-4f80-b811-5646b571c318-server-conf\") pod \"rabbitmq-cell1-server-1\" (UID: \"8573a632-84e0-4f80-b811-5646b571c318\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-1" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.724841 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/8573a632-84e0-4f80-b811-5646b571c318-plugins-conf\") pod \"rabbitmq-cell1-server-1\" (UID: \"8573a632-84e0-4f80-b811-5646b571c318\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-1" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.724980 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/8573a632-84e0-4f80-b811-5646b571c318-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-1\" (UID: \"8573a632-84e0-4f80-b811-5646b571c318\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-1" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.725098 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-cell1-server-1\" (UID: \"8573a632-84e0-4f80-b811-5646b571c318\") device mount path \"/mnt/openstack/pv05\"" pod="openstack-kuttl-tests/rabbitmq-cell1-server-1" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.725352 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/8573a632-84e0-4f80-b811-5646b571c318-config-data\") pod \"rabbitmq-cell1-server-1\" (UID: \"8573a632-84e0-4f80-b811-5646b571c318\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-1" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.727643 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/8573a632-84e0-4f80-b811-5646b571c318-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-1\" (UID: \"8573a632-84e0-4f80-b811-5646b571c318\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-1" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.728755 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/8573a632-84e0-4f80-b811-5646b571c318-server-conf\") pod \"rabbitmq-cell1-server-1\" (UID: \"8573a632-84e0-4f80-b811-5646b571c318\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-1" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.728904 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/8573a632-84e0-4f80-b811-5646b571c318-pod-info\") pod \"rabbitmq-cell1-server-1\" (UID: \"8573a632-84e0-4f80-b811-5646b571c318\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-1" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.728997 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/8573a632-84e0-4f80-b811-5646b571c318-rabbitmq-tls\") pod \"rabbitmq-cell1-server-1\" (UID: \"8573a632-84e0-4f80-b811-5646b571c318\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-1" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.730296 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/8573a632-84e0-4f80-b811-5646b571c318-rabbitmq-confd\") pod \"rabbitmq-cell1-server-1\" (UID: \"8573a632-84e0-4f80-b811-5646b571c318\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-1" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.739920 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v9m9h\" (UniqueName: \"kubernetes.io/projected/8573a632-84e0-4f80-b811-5646b571c318-kube-api-access-v9m9h\") pod \"rabbitmq-cell1-server-1\" (UID: \"8573a632-84e0-4f80-b811-5646b571c318\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-1" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.752342 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-cell1-server-1\" (UID: \"8573a632-84e0-4f80-b811-5646b571c318\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-1" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.800592 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.814713 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/rabbitmq-cell1-server-2" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.822641 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/rabbitmq-cell1-server-1" Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.922022 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/rabbitmq-server-0"] Jan 20 17:17:10 crc kubenswrapper[4558]: I0120 17:17:10.985410 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/rabbitmq-server-2"] Jan 20 17:17:11 crc kubenswrapper[4558]: W0120 17:17:11.012106 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9c52efaf_b737_47bf_9ca1_109a28e19113.slice/crio-2cb04f6495d45a0cdaa88baf16dcc76bd306519f1a6882c7808d53d85c6930dd WatchSource:0}: Error finding container 2cb04f6495d45a0cdaa88baf16dcc76bd306519f1a6882c7808d53d85c6930dd: Status 404 returned error can't find the container with id 2cb04f6495d45a0cdaa88baf16dcc76bd306519f1a6882c7808d53d85c6930dd Jan 20 17:17:11 crc kubenswrapper[4558]: I0120 17:17:11.047742 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-server-0" event={"ID":"24114ddb-3b30-42ac-9d61-cfeb15d58728","Type":"ContainerStarted","Data":"491307bb1a7e36c87854cbad521231e4dbe2cb8534a567deb25f01d750f7a550"} Jan 20 17:17:11 crc kubenswrapper[4558]: I0120 17:17:11.049328 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/rabbitmq-cell1-server-0"] Jan 20 17:17:11 crc kubenswrapper[4558]: I0120 17:17:11.058680 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/rabbitmq-server-1"] Jan 20 17:17:11 crc kubenswrapper[4558]: I0120 17:17:11.198140 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/rabbitmq-cell1-server-2"] Jan 20 17:17:11 crc kubenswrapper[4558]: W0120 17:17:11.203758 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod344f9f31_8a81_4544_b782_5aa78dfc5cc2.slice/crio-1761640eb11c76c260cbae9e59a3c25de6075b8709647ef367e91f713ac7d087 WatchSource:0}: Error finding container 1761640eb11c76c260cbae9e59a3c25de6075b8709647ef367e91f713ac7d087: Status 404 returned error can't find the container with id 1761640eb11c76c260cbae9e59a3c25de6075b8709647ef367e91f713ac7d087 Jan 20 17:17:11 crc kubenswrapper[4558]: I0120 17:17:11.373889 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/rabbitmq-cell1-server-1"] Jan 20 17:17:11 crc kubenswrapper[4558]: W0120 17:17:11.378567 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8573a632_84e0_4f80_b811_5646b571c318.slice/crio-b4a66fa84377ea5e80af90700c72d18b5c76aad252ce0cb121848ab3c2c56c40 WatchSource:0}: Error finding container b4a66fa84377ea5e80af90700c72d18b5c76aad252ce0cb121848ab3c2c56c40: Status 404 returned error can't find the container with id b4a66fa84377ea5e80af90700c72d18b5c76aad252ce0cb121848ab3c2c56c40 Jan 20 17:17:12 crc kubenswrapper[4558]: I0120 17:17:11.996266 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/openstack-galera-0"] Jan 20 17:17:12 crc kubenswrapper[4558]: I0120 17:17:11.998201 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:17:12 crc kubenswrapper[4558]: I0120 17:17:12.003704 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-galera-openstack-svc" Jan 20 17:17:12 crc kubenswrapper[4558]: I0120 17:17:12.003918 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-scripts" Jan 20 17:17:12 crc kubenswrapper[4558]: I0120 17:17:12.003940 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-config-data" Jan 20 17:17:12 crc kubenswrapper[4558]: I0120 17:17:12.004444 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"galera-openstack-dockercfg-pc4xf" Jan 20 17:17:12 crc kubenswrapper[4558]: I0120 17:17:12.011498 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"combined-ca-bundle" Jan 20 17:17:12 crc kubenswrapper[4558]: I0120 17:17:12.013982 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/openstack-galera-0"] Jan 20 17:17:12 crc kubenswrapper[4558]: I0120 17:17:12.045213 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/openstack-galera-2"] Jan 20 17:17:12 crc kubenswrapper[4558]: I0120 17:17:12.046736 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-galera-2" Jan 20 17:17:12 crc kubenswrapper[4558]: I0120 17:17:12.060210 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/openstack-galera-1"] Jan 20 17:17:12 crc kubenswrapper[4558]: I0120 17:17:12.062256 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-galera-1" Jan 20 17:17:12 crc kubenswrapper[4558]: I0120 17:17:12.067696 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-cell1-server-1" event={"ID":"8573a632-84e0-4f80-b811-5646b571c318","Type":"ContainerStarted","Data":"b4a66fa84377ea5e80af90700c72d18b5c76aad252ce0cb121848ab3c2c56c40"} Jan 20 17:17:12 crc kubenswrapper[4558]: I0120 17:17:12.071014 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/openstack-galera-2"] Jan 20 17:17:12 crc kubenswrapper[4558]: I0120 17:17:12.073050 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" event={"ID":"6dae93de-b907-44f7-a94c-c691eee0af7f","Type":"ContainerStarted","Data":"65fe42fe25e67f4d2e7a0b380665cfa50e34c28aa831796b5829b20cecf6a5cb"} Jan 20 17:17:12 crc kubenswrapper[4558]: I0120 17:17:12.073092 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" event={"ID":"6dae93de-b907-44f7-a94c-c691eee0af7f","Type":"ContainerStarted","Data":"46fa266734e1ff561fe2d977f15dd262b64d422883e461cef06139f76eadc253"} Jan 20 17:17:12 crc kubenswrapper[4558]: I0120 17:17:12.074311 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-server-0" event={"ID":"24114ddb-3b30-42ac-9d61-cfeb15d58728","Type":"ContainerStarted","Data":"4c8665b5b235011d33eb908501c51ff5695ec224cf17893391187c66bf1ab060"} Jan 20 17:17:12 crc kubenswrapper[4558]: I0120 17:17:12.074906 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/openstack-galera-1"] Jan 20 17:17:12 crc kubenswrapper[4558]: I0120 17:17:12.078250 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-cell1-server-2" event={"ID":"344f9f31-8a81-4544-b782-5aa78dfc5cc2","Type":"ContainerStarted","Data":"1761640eb11c76c260cbae9e59a3c25de6075b8709647ef367e91f713ac7d087"} Jan 20 17:17:12 crc kubenswrapper[4558]: I0120 17:17:12.079397 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-server-1" event={"ID":"2c809052-d9bb-4982-8271-5b7a9a6f28f9","Type":"ContainerStarted","Data":"ad49acfca51d4d3a588a5798f1b752801f80e65a4b53a3236287f42cff68fa6c"} Jan 20 17:17:12 crc kubenswrapper[4558]: I0120 17:17:12.082603 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-server-2" event={"ID":"9c52efaf-b737-47bf-9ca1-109a28e19113","Type":"ContainerStarted","Data":"2cb04f6495d45a0cdaa88baf16dcc76bd306519f1a6882c7808d53d85c6930dd"} Jan 20 17:17:12 crc kubenswrapper[4558]: I0120 17:17:12.149668 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/fa03920f-0e4b-458f-956b-b658786f9792-config-data-generated\") pod \"openstack-galera-2\" (UID: \"fa03920f-0e4b-458f-956b-b658786f9792\") " pod="openstack-kuttl-tests/openstack-galera-2" Jan 20 17:17:12 crc kubenswrapper[4558]: I0120 17:17:12.149741 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/594280a4-cae4-4455-9838-ba4ef8d2f2c1-config-data-default\") pod \"openstack-galera-1\" (UID: \"594280a4-cae4-4455-9838-ba4ef8d2f2c1\") " pod="openstack-kuttl-tests/openstack-galera-1" Jan 20 17:17:12 crc kubenswrapper[4558]: I0120 17:17:12.149772 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/3abae449-b665-40ba-bdac-ea1e908e1952-kolla-config\") pod \"openstack-galera-0\" (UID: \"3abae449-b665-40ba-bdac-ea1e908e1952\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:17:12 crc kubenswrapper[4558]: I0120 17:17:12.149795 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/fa03920f-0e4b-458f-956b-b658786f9792-config-data-default\") pod \"openstack-galera-2\" (UID: \"fa03920f-0e4b-458f-956b-b658786f9792\") " pod="openstack-kuttl-tests/openstack-galera-2" Jan 20 17:17:12 crc kubenswrapper[4558]: I0120 17:17:12.149814 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/3abae449-b665-40ba-bdac-ea1e908e1952-config-data-generated\") pod \"openstack-galera-0\" (UID: \"3abae449-b665-40ba-bdac-ea1e908e1952\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:17:12 crc kubenswrapper[4558]: I0120 17:17:12.149933 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"openstack-galera-0\" (UID: \"3abae449-b665-40ba-bdac-ea1e908e1952\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:17:12 crc kubenswrapper[4558]: I0120 17:17:12.149964 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hcxnl\" (UniqueName: \"kubernetes.io/projected/fa03920f-0e4b-458f-956b-b658786f9792-kube-api-access-hcxnl\") pod \"openstack-galera-2\" (UID: \"fa03920f-0e4b-458f-956b-b658786f9792\") " pod="openstack-kuttl-tests/openstack-galera-2" Jan 20 17:17:12 crc kubenswrapper[4558]: I0120 17:17:12.149985 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fa03920f-0e4b-458f-956b-b658786f9792-operator-scripts\") pod \"openstack-galera-2\" (UID: \"fa03920f-0e4b-458f-956b-b658786f9792\") " pod="openstack-kuttl-tests/openstack-galera-2" Jan 20 17:17:12 crc kubenswrapper[4558]: I0120 17:17:12.150026 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/594280a4-cae4-4455-9838-ba4ef8d2f2c1-config-data-generated\") pod \"openstack-galera-1\" (UID: \"594280a4-cae4-4455-9838-ba4ef8d2f2c1\") " pod="openstack-kuttl-tests/openstack-galera-1" Jan 20 17:17:12 crc kubenswrapper[4558]: I0120 17:17:12.150066 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/594280a4-cae4-4455-9838-ba4ef8d2f2c1-combined-ca-bundle\") pod \"openstack-galera-1\" (UID: \"594280a4-cae4-4455-9838-ba4ef8d2f2c1\") " pod="openstack-kuttl-tests/openstack-galera-1" Jan 20 17:17:12 crc kubenswrapper[4558]: I0120 17:17:12.150091 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/594280a4-cae4-4455-9838-ba4ef8d2f2c1-operator-scripts\") pod \"openstack-galera-1\" (UID: \"594280a4-cae4-4455-9838-ba4ef8d2f2c1\") " pod="openstack-kuttl-tests/openstack-galera-1" Jan 20 17:17:12 crc kubenswrapper[4558]: I0120 17:17:12.150223 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3abae449-b665-40ba-bdac-ea1e908e1952-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"3abae449-b665-40ba-bdac-ea1e908e1952\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:17:12 crc kubenswrapper[4558]: I0120 17:17:12.150274 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rwtfw\" (UniqueName: \"kubernetes.io/projected/3abae449-b665-40ba-bdac-ea1e908e1952-kube-api-access-rwtfw\") pod \"openstack-galera-0\" (UID: \"3abae449-b665-40ba-bdac-ea1e908e1952\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:17:12 crc kubenswrapper[4558]: I0120 17:17:12.150317 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3abae449-b665-40ba-bdac-ea1e908e1952-operator-scripts\") pod \"openstack-galera-0\" (UID: \"3abae449-b665-40ba-bdac-ea1e908e1952\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:17:12 crc kubenswrapper[4558]: I0120 17:17:12.150357 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/fa03920f-0e4b-458f-956b-b658786f9792-galera-tls-certs\") pod \"openstack-galera-2\" (UID: \"fa03920f-0e4b-458f-956b-b658786f9792\") " pod="openstack-kuttl-tests/openstack-galera-2" Jan 20 17:17:12 crc kubenswrapper[4558]: I0120 17:17:12.150388 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/3abae449-b665-40ba-bdac-ea1e908e1952-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"3abae449-b665-40ba-bdac-ea1e908e1952\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:17:12 crc kubenswrapper[4558]: I0120 17:17:12.150425 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/594280a4-cae4-4455-9838-ba4ef8d2f2c1-kolla-config\") pod \"openstack-galera-1\" (UID: \"594280a4-cae4-4455-9838-ba4ef8d2f2c1\") " pod="openstack-kuttl-tests/openstack-galera-1" Jan 20 17:17:12 crc kubenswrapper[4558]: I0120 17:17:12.150442 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fa03920f-0e4b-458f-956b-b658786f9792-combined-ca-bundle\") pod \"openstack-galera-2\" (UID: \"fa03920f-0e4b-458f-956b-b658786f9792\") " pod="openstack-kuttl-tests/openstack-galera-2" Jan 20 17:17:12 crc kubenswrapper[4558]: I0120 17:17:12.150457 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage16-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage16-crc\") pod \"openstack-galera-1\" (UID: \"594280a4-cae4-4455-9838-ba4ef8d2f2c1\") " pod="openstack-kuttl-tests/openstack-galera-1" Jan 20 17:17:12 crc kubenswrapper[4558]: I0120 17:17:12.150471 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/fa03920f-0e4b-458f-956b-b658786f9792-kolla-config\") pod \"openstack-galera-2\" (UID: \"fa03920f-0e4b-458f-956b-b658786f9792\") " pod="openstack-kuttl-tests/openstack-galera-2" Jan 20 17:17:12 crc kubenswrapper[4558]: I0120 17:17:12.150515 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"openstack-galera-2\" (UID: \"fa03920f-0e4b-458f-956b-b658786f9792\") " pod="openstack-kuttl-tests/openstack-galera-2" Jan 20 17:17:12 crc kubenswrapper[4558]: I0120 17:17:12.150555 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rdf2x\" (UniqueName: \"kubernetes.io/projected/594280a4-cae4-4455-9838-ba4ef8d2f2c1-kube-api-access-rdf2x\") pod \"openstack-galera-1\" (UID: \"594280a4-cae4-4455-9838-ba4ef8d2f2c1\") " pod="openstack-kuttl-tests/openstack-galera-1" Jan 20 17:17:12 crc kubenswrapper[4558]: I0120 17:17:12.150581 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/594280a4-cae4-4455-9838-ba4ef8d2f2c1-galera-tls-certs\") pod \"openstack-galera-1\" (UID: \"594280a4-cae4-4455-9838-ba4ef8d2f2c1\") " pod="openstack-kuttl-tests/openstack-galera-1" Jan 20 17:17:12 crc kubenswrapper[4558]: I0120 17:17:12.150598 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/3abae449-b665-40ba-bdac-ea1e908e1952-config-data-default\") pod \"openstack-galera-0\" (UID: \"3abae449-b665-40ba-bdac-ea1e908e1952\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:17:12 crc kubenswrapper[4558]: I0120 17:17:12.253238 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/fa03920f-0e4b-458f-956b-b658786f9792-config-data-generated\") pod \"openstack-galera-2\" (UID: \"fa03920f-0e4b-458f-956b-b658786f9792\") " pod="openstack-kuttl-tests/openstack-galera-2" Jan 20 17:17:12 crc kubenswrapper[4558]: I0120 17:17:12.253302 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/594280a4-cae4-4455-9838-ba4ef8d2f2c1-config-data-default\") pod \"openstack-galera-1\" (UID: \"594280a4-cae4-4455-9838-ba4ef8d2f2c1\") " pod="openstack-kuttl-tests/openstack-galera-1" Jan 20 17:17:12 crc kubenswrapper[4558]: I0120 17:17:12.253342 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/3abae449-b665-40ba-bdac-ea1e908e1952-kolla-config\") pod \"openstack-galera-0\" (UID: \"3abae449-b665-40ba-bdac-ea1e908e1952\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:17:12 crc kubenswrapper[4558]: I0120 17:17:12.253373 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/fa03920f-0e4b-458f-956b-b658786f9792-config-data-default\") pod \"openstack-galera-2\" (UID: \"fa03920f-0e4b-458f-956b-b658786f9792\") " pod="openstack-kuttl-tests/openstack-galera-2" Jan 20 17:17:12 crc kubenswrapper[4558]: I0120 17:17:12.253419 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/3abae449-b665-40ba-bdac-ea1e908e1952-config-data-generated\") pod \"openstack-galera-0\" (UID: \"3abae449-b665-40ba-bdac-ea1e908e1952\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:17:12 crc kubenswrapper[4558]: I0120 17:17:12.253510 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"openstack-galera-0\" (UID: \"3abae449-b665-40ba-bdac-ea1e908e1952\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:17:12 crc kubenswrapper[4558]: I0120 17:17:12.253534 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hcxnl\" (UniqueName: \"kubernetes.io/projected/fa03920f-0e4b-458f-956b-b658786f9792-kube-api-access-hcxnl\") pod \"openstack-galera-2\" (UID: \"fa03920f-0e4b-458f-956b-b658786f9792\") " pod="openstack-kuttl-tests/openstack-galera-2" Jan 20 17:17:12 crc kubenswrapper[4558]: I0120 17:17:12.253555 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fa03920f-0e4b-458f-956b-b658786f9792-operator-scripts\") pod \"openstack-galera-2\" (UID: \"fa03920f-0e4b-458f-956b-b658786f9792\") " pod="openstack-kuttl-tests/openstack-galera-2" Jan 20 17:17:12 crc kubenswrapper[4558]: I0120 17:17:12.253595 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/594280a4-cae4-4455-9838-ba4ef8d2f2c1-config-data-generated\") pod \"openstack-galera-1\" (UID: \"594280a4-cae4-4455-9838-ba4ef8d2f2c1\") " pod="openstack-kuttl-tests/openstack-galera-1" Jan 20 17:17:12 crc kubenswrapper[4558]: I0120 17:17:12.253639 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/594280a4-cae4-4455-9838-ba4ef8d2f2c1-combined-ca-bundle\") pod \"openstack-galera-1\" (UID: \"594280a4-cae4-4455-9838-ba4ef8d2f2c1\") " pod="openstack-kuttl-tests/openstack-galera-1" Jan 20 17:17:12 crc kubenswrapper[4558]: I0120 17:17:12.253712 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/594280a4-cae4-4455-9838-ba4ef8d2f2c1-operator-scripts\") pod \"openstack-galera-1\" (UID: \"594280a4-cae4-4455-9838-ba4ef8d2f2c1\") " pod="openstack-kuttl-tests/openstack-galera-1" Jan 20 17:17:12 crc kubenswrapper[4558]: I0120 17:17:12.253723 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/fa03920f-0e4b-458f-956b-b658786f9792-config-data-generated\") pod \"openstack-galera-2\" (UID: \"fa03920f-0e4b-458f-956b-b658786f9792\") " pod="openstack-kuttl-tests/openstack-galera-2" Jan 20 17:17:12 crc kubenswrapper[4558]: I0120 17:17:12.253799 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3abae449-b665-40ba-bdac-ea1e908e1952-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"3abae449-b665-40ba-bdac-ea1e908e1952\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:17:12 crc kubenswrapper[4558]: I0120 17:17:12.253839 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rwtfw\" (UniqueName: \"kubernetes.io/projected/3abae449-b665-40ba-bdac-ea1e908e1952-kube-api-access-rwtfw\") pod \"openstack-galera-0\" (UID: \"3abae449-b665-40ba-bdac-ea1e908e1952\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:17:12 crc kubenswrapper[4558]: I0120 17:17:12.253918 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3abae449-b665-40ba-bdac-ea1e908e1952-operator-scripts\") pod \"openstack-galera-0\" (UID: \"3abae449-b665-40ba-bdac-ea1e908e1952\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:17:12 crc kubenswrapper[4558]: I0120 17:17:12.253962 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/fa03920f-0e4b-458f-956b-b658786f9792-galera-tls-certs\") pod \"openstack-galera-2\" (UID: \"fa03920f-0e4b-458f-956b-b658786f9792\") " pod="openstack-kuttl-tests/openstack-galera-2" Jan 20 17:17:12 crc kubenswrapper[4558]: I0120 17:17:12.254001 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/3abae449-b665-40ba-bdac-ea1e908e1952-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"3abae449-b665-40ba-bdac-ea1e908e1952\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:17:12 crc kubenswrapper[4558]: I0120 17:17:12.254031 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/594280a4-cae4-4455-9838-ba4ef8d2f2c1-kolla-config\") pod \"openstack-galera-1\" (UID: \"594280a4-cae4-4455-9838-ba4ef8d2f2c1\") " pod="openstack-kuttl-tests/openstack-galera-1" Jan 20 17:17:12 crc kubenswrapper[4558]: I0120 17:17:12.254047 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fa03920f-0e4b-458f-956b-b658786f9792-combined-ca-bundle\") pod \"openstack-galera-2\" (UID: \"fa03920f-0e4b-458f-956b-b658786f9792\") " pod="openstack-kuttl-tests/openstack-galera-2" Jan 20 17:17:12 crc kubenswrapper[4558]: I0120 17:17:12.254067 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage16-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage16-crc\") pod \"openstack-galera-1\" (UID: \"594280a4-cae4-4455-9838-ba4ef8d2f2c1\") " pod="openstack-kuttl-tests/openstack-galera-1" Jan 20 17:17:12 crc kubenswrapper[4558]: I0120 17:17:12.254085 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/fa03920f-0e4b-458f-956b-b658786f9792-kolla-config\") pod \"openstack-galera-2\" (UID: \"fa03920f-0e4b-458f-956b-b658786f9792\") " pod="openstack-kuttl-tests/openstack-galera-2" Jan 20 17:17:12 crc kubenswrapper[4558]: I0120 17:17:12.254199 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"openstack-galera-2\" (UID: \"fa03920f-0e4b-458f-956b-b658786f9792\") " pod="openstack-kuttl-tests/openstack-galera-2" Jan 20 17:17:12 crc kubenswrapper[4558]: I0120 17:17:12.254271 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdf2x\" (UniqueName: \"kubernetes.io/projected/594280a4-cae4-4455-9838-ba4ef8d2f2c1-kube-api-access-rdf2x\") pod \"openstack-galera-1\" (UID: \"594280a4-cae4-4455-9838-ba4ef8d2f2c1\") " pod="openstack-kuttl-tests/openstack-galera-1" Jan 20 17:17:12 crc kubenswrapper[4558]: I0120 17:17:12.254301 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/594280a4-cae4-4455-9838-ba4ef8d2f2c1-galera-tls-certs\") pod \"openstack-galera-1\" (UID: \"594280a4-cae4-4455-9838-ba4ef8d2f2c1\") " pod="openstack-kuttl-tests/openstack-galera-1" Jan 20 17:17:12 crc kubenswrapper[4558]: I0120 17:17:12.254323 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/3abae449-b665-40ba-bdac-ea1e908e1952-config-data-default\") pod \"openstack-galera-0\" (UID: \"3abae449-b665-40ba-bdac-ea1e908e1952\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:17:12 crc kubenswrapper[4558]: I0120 17:17:12.254776 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/594280a4-cae4-4455-9838-ba4ef8d2f2c1-config-data-default\") pod \"openstack-galera-1\" (UID: \"594280a4-cae4-4455-9838-ba4ef8d2f2c1\") " pod="openstack-kuttl-tests/openstack-galera-1" Jan 20 17:17:12 crc kubenswrapper[4558]: I0120 17:17:12.254897 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/3abae449-b665-40ba-bdac-ea1e908e1952-kolla-config\") pod \"openstack-galera-0\" (UID: \"3abae449-b665-40ba-bdac-ea1e908e1952\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:17:12 crc kubenswrapper[4558]: I0120 17:17:12.255224 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/594280a4-cae4-4455-9838-ba4ef8d2f2c1-kolla-config\") pod \"openstack-galera-1\" (UID: \"594280a4-cae4-4455-9838-ba4ef8d2f2c1\") " pod="openstack-kuttl-tests/openstack-galera-1" Jan 20 17:17:12 crc kubenswrapper[4558]: I0120 17:17:12.255282 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/594280a4-cae4-4455-9838-ba4ef8d2f2c1-config-data-generated\") pod \"openstack-galera-1\" (UID: \"594280a4-cae4-4455-9838-ba4ef8d2f2c1\") " pod="openstack-kuttl-tests/openstack-galera-1" Jan 20 17:17:12 crc kubenswrapper[4558]: I0120 17:17:12.255389 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/3abae449-b665-40ba-bdac-ea1e908e1952-config-data-default\") pod \"openstack-galera-0\" (UID: \"3abae449-b665-40ba-bdac-ea1e908e1952\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:17:12 crc kubenswrapper[4558]: I0120 17:17:12.255932 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/fa03920f-0e4b-458f-956b-b658786f9792-config-data-default\") pod \"openstack-galera-2\" (UID: \"fa03920f-0e4b-458f-956b-b658786f9792\") " pod="openstack-kuttl-tests/openstack-galera-2" Jan 20 17:17:12 crc kubenswrapper[4558]: I0120 17:17:12.256408 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fa03920f-0e4b-458f-956b-b658786f9792-operator-scripts\") pod \"openstack-galera-2\" (UID: \"fa03920f-0e4b-458f-956b-b658786f9792\") " pod="openstack-kuttl-tests/openstack-galera-2" Jan 20 17:17:12 crc kubenswrapper[4558]: I0120 17:17:12.256532 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"openstack-galera-0\" (UID: \"3abae449-b665-40ba-bdac-ea1e908e1952\") device mount path \"/mnt/openstack/pv08\"" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:17:12 crc kubenswrapper[4558]: I0120 17:17:12.256664 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/594280a4-cae4-4455-9838-ba4ef8d2f2c1-operator-scripts\") pod \"openstack-galera-1\" (UID: \"594280a4-cae4-4455-9838-ba4ef8d2f2c1\") " pod="openstack-kuttl-tests/openstack-galera-1" Jan 20 17:17:12 crc kubenswrapper[4558]: I0120 17:17:12.256897 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3abae449-b665-40ba-bdac-ea1e908e1952-operator-scripts\") pod \"openstack-galera-0\" (UID: \"3abae449-b665-40ba-bdac-ea1e908e1952\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:17:12 crc kubenswrapper[4558]: I0120 17:17:12.257068 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"openstack-galera-2\" (UID: \"fa03920f-0e4b-458f-956b-b658786f9792\") device mount path \"/mnt/openstack/pv03\"" pod="openstack-kuttl-tests/openstack-galera-2" Jan 20 17:17:12 crc kubenswrapper[4558]: I0120 17:17:12.257095 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage16-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage16-crc\") pod \"openstack-galera-1\" (UID: \"594280a4-cae4-4455-9838-ba4ef8d2f2c1\") device mount path \"/mnt/openstack/pv16\"" pod="openstack-kuttl-tests/openstack-galera-1" Jan 20 17:17:12 crc kubenswrapper[4558]: I0120 17:17:12.258230 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/fa03920f-0e4b-458f-956b-b658786f9792-kolla-config\") pod \"openstack-galera-2\" (UID: \"fa03920f-0e4b-458f-956b-b658786f9792\") " pod="openstack-kuttl-tests/openstack-galera-2" Jan 20 17:17:12 crc kubenswrapper[4558]: I0120 17:17:12.259035 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/594280a4-cae4-4455-9838-ba4ef8d2f2c1-combined-ca-bundle\") pod \"openstack-galera-1\" (UID: \"594280a4-cae4-4455-9838-ba4ef8d2f2c1\") " pod="openstack-kuttl-tests/openstack-galera-1" Jan 20 17:17:12 crc kubenswrapper[4558]: I0120 17:17:12.259626 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/fa03920f-0e4b-458f-956b-b658786f9792-galera-tls-certs\") pod \"openstack-galera-2\" (UID: \"fa03920f-0e4b-458f-956b-b658786f9792\") " pod="openstack-kuttl-tests/openstack-galera-2" Jan 20 17:17:12 crc kubenswrapper[4558]: I0120 17:17:12.260639 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/3abae449-b665-40ba-bdac-ea1e908e1952-config-data-generated\") pod \"openstack-galera-0\" (UID: \"3abae449-b665-40ba-bdac-ea1e908e1952\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:17:12 crc kubenswrapper[4558]: I0120 17:17:12.261250 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/594280a4-cae4-4455-9838-ba4ef8d2f2c1-galera-tls-certs\") pod \"openstack-galera-1\" (UID: \"594280a4-cae4-4455-9838-ba4ef8d2f2c1\") " pod="openstack-kuttl-tests/openstack-galera-1" Jan 20 17:17:12 crc kubenswrapper[4558]: I0120 17:17:12.265589 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3abae449-b665-40ba-bdac-ea1e908e1952-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"3abae449-b665-40ba-bdac-ea1e908e1952\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:17:12 crc kubenswrapper[4558]: I0120 17:17:12.269298 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/3abae449-b665-40ba-bdac-ea1e908e1952-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"3abae449-b665-40ba-bdac-ea1e908e1952\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:17:12 crc kubenswrapper[4558]: I0120 17:17:12.274432 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fa03920f-0e4b-458f-956b-b658786f9792-combined-ca-bundle\") pod \"openstack-galera-2\" (UID: \"fa03920f-0e4b-458f-956b-b658786f9792\") " pod="openstack-kuttl-tests/openstack-galera-2" Jan 20 17:17:12 crc kubenswrapper[4558]: I0120 17:17:12.279838 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hcxnl\" (UniqueName: \"kubernetes.io/projected/fa03920f-0e4b-458f-956b-b658786f9792-kube-api-access-hcxnl\") pod \"openstack-galera-2\" (UID: \"fa03920f-0e4b-458f-956b-b658786f9792\") " pod="openstack-kuttl-tests/openstack-galera-2" Jan 20 17:17:12 crc kubenswrapper[4558]: I0120 17:17:12.284224 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdf2x\" (UniqueName: \"kubernetes.io/projected/594280a4-cae4-4455-9838-ba4ef8d2f2c1-kube-api-access-rdf2x\") pod \"openstack-galera-1\" (UID: \"594280a4-cae4-4455-9838-ba4ef8d2f2c1\") " pod="openstack-kuttl-tests/openstack-galera-1" Jan 20 17:17:12 crc kubenswrapper[4558]: I0120 17:17:12.299210 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rwtfw\" (UniqueName: \"kubernetes.io/projected/3abae449-b665-40ba-bdac-ea1e908e1952-kube-api-access-rwtfw\") pod \"openstack-galera-0\" (UID: \"3abae449-b665-40ba-bdac-ea1e908e1952\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:17:12 crc kubenswrapper[4558]: I0120 17:17:12.302393 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"openstack-galera-2\" (UID: \"fa03920f-0e4b-458f-956b-b658786f9792\") " pod="openstack-kuttl-tests/openstack-galera-2" Jan 20 17:17:12 crc kubenswrapper[4558]: I0120 17:17:12.316484 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage16-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage16-crc\") pod \"openstack-galera-1\" (UID: \"594280a4-cae4-4455-9838-ba4ef8d2f2c1\") " pod="openstack-kuttl-tests/openstack-galera-1" Jan 20 17:17:12 crc kubenswrapper[4558]: I0120 17:17:12.357651 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"openstack-galera-0\" (UID: \"3abae449-b665-40ba-bdac-ea1e908e1952\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:17:12 crc kubenswrapper[4558]: I0120 17:17:12.373763 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-galera-2" Jan 20 17:17:12 crc kubenswrapper[4558]: I0120 17:17:12.388041 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-galera-1" Jan 20 17:17:12 crc kubenswrapper[4558]: I0120 17:17:12.625831 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:17:12 crc kubenswrapper[4558]: I0120 17:17:12.850394 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/openstack-galera-1"] Jan 20 17:17:12 crc kubenswrapper[4558]: I0120 17:17:12.863628 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/openstack-galera-2"] Jan 20 17:17:13 crc kubenswrapper[4558]: W0120 17:17:13.040433 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3abae449_b665_40ba_bdac_ea1e908e1952.slice/crio-fb5f247aa2fb22da905e354e659e22cc895da40706b1f8be85ae812f75f393dc WatchSource:0}: Error finding container fb5f247aa2fb22da905e354e659e22cc895da40706b1f8be85ae812f75f393dc: Status 404 returned error can't find the container with id fb5f247aa2fb22da905e354e659e22cc895da40706b1f8be85ae812f75f393dc Jan 20 17:17:13 crc kubenswrapper[4558]: I0120 17:17:13.041741 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/openstack-galera-0"] Jan 20 17:17:13 crc kubenswrapper[4558]: I0120 17:17:13.090022 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-galera-1" event={"ID":"594280a4-cae4-4455-9838-ba4ef8d2f2c1","Type":"ContainerStarted","Data":"4df69b0aeea95403c345c4fa4578dcd1f0758ef8805869ca9d1cb88b4fb9262e"} Jan 20 17:17:13 crc kubenswrapper[4558]: I0120 17:17:13.090062 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-galera-1" event={"ID":"594280a4-cae4-4455-9838-ba4ef8d2f2c1","Type":"ContainerStarted","Data":"eede67cb845b731dec2634e036e863172a2799d3e43cb08f3f455fb9d998e84d"} Jan 20 17:17:13 crc kubenswrapper[4558]: I0120 17:17:13.091452 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-galera-0" event={"ID":"3abae449-b665-40ba-bdac-ea1e908e1952","Type":"ContainerStarted","Data":"fb5f247aa2fb22da905e354e659e22cc895da40706b1f8be85ae812f75f393dc"} Jan 20 17:17:13 crc kubenswrapper[4558]: I0120 17:17:13.098798 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-cell1-server-2" event={"ID":"344f9f31-8a81-4544-b782-5aa78dfc5cc2","Type":"ContainerStarted","Data":"614476c61e78dcb46b1c1d1cf00064d7ee3ffba88582ebadc80626446ffd327e"} Jan 20 17:17:13 crc kubenswrapper[4558]: I0120 17:17:13.100408 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-galera-2" event={"ID":"fa03920f-0e4b-458f-956b-b658786f9792","Type":"ContainerStarted","Data":"39e4d0dccf7dfdba24ce1b4ccfce236f73f6cb1fd65053360ca6d3b551a3e705"} Jan 20 17:17:13 crc kubenswrapper[4558]: I0120 17:17:13.100443 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-galera-2" event={"ID":"fa03920f-0e4b-458f-956b-b658786f9792","Type":"ContainerStarted","Data":"55e6203b41a29a47da221f4002de5a11842c8aaeac5ff40cf713a78b5aace538"} Jan 20 17:17:13 crc kubenswrapper[4558]: I0120 17:17:13.101794 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-server-1" event={"ID":"2c809052-d9bb-4982-8271-5b7a9a6f28f9","Type":"ContainerStarted","Data":"aef92f821b7a5a08ba1161060fd5100effa5681c8a90295b7e73584d9d1549b4"} Jan 20 17:17:13 crc kubenswrapper[4558]: I0120 17:17:13.103656 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-server-2" event={"ID":"9c52efaf-b737-47bf-9ca1-109a28e19113","Type":"ContainerStarted","Data":"ded34b9dd6d6a2bbb413151aa8d7e879c0b9a135b1b75cfbb4c74ef99adcec5a"} Jan 20 17:17:13 crc kubenswrapper[4558]: I0120 17:17:13.105924 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-cell1-server-1" event={"ID":"8573a632-84e0-4f80-b811-5646b571c318","Type":"ContainerStarted","Data":"7e9618d5dfe234e6e7b347ce911135cc4faaff526b91370dd0e418034794ec1d"} Jan 20 17:17:13 crc kubenswrapper[4558]: I0120 17:17:13.406391 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/openstack-cell1-galera-0"] Jan 20 17:17:13 crc kubenswrapper[4558]: I0120 17:17:13.407577 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:17:13 crc kubenswrapper[4558]: I0120 17:17:13.412516 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-galera-openstack-cell1-svc" Jan 20 17:17:13 crc kubenswrapper[4558]: I0120 17:17:13.413217 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-cell1-scripts" Jan 20 17:17:13 crc kubenswrapper[4558]: I0120 17:17:13.413491 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"galera-openstack-cell1-dockercfg-g8w9h" Jan 20 17:17:13 crc kubenswrapper[4558]: I0120 17:17:13.419953 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-cell1-config-data" Jan 20 17:17:13 crc kubenswrapper[4558]: I0120 17:17:13.421009 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/openstack-cell1-galera-2"] Jan 20 17:17:13 crc kubenswrapper[4558]: I0120 17:17:13.422318 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-cell1-galera-2" Jan 20 17:17:13 crc kubenswrapper[4558]: I0120 17:17:13.427655 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/openstack-cell1-galera-0"] Jan 20 17:17:13 crc kubenswrapper[4558]: I0120 17:17:13.432241 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/openstack-cell1-galera-1"] Jan 20 17:17:13 crc kubenswrapper[4558]: I0120 17:17:13.433663 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-cell1-galera-1" Jan 20 17:17:13 crc kubenswrapper[4558]: I0120 17:17:13.443088 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/openstack-cell1-galera-2"] Jan 20 17:17:13 crc kubenswrapper[4558]: I0120 17:17:13.453882 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/openstack-cell1-galera-1"] Jan 20 17:17:13 crc kubenswrapper[4558]: I0120 17:17:13.487411 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"openstack-cell1-galera-0\" (UID: \"23b973c1-70c3-4d60-8b1d-89efda1a5707\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:17:13 crc kubenswrapper[4558]: I0120 17:17:13.487449 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9adbb250-26e3-4e45-b60f-3b4c83d59ba3-operator-scripts\") pod \"openstack-cell1-galera-2\" (UID: \"9adbb250-26e3-4e45-b60f-3b4c83d59ba3\") " pod="openstack-kuttl-tests/openstack-cell1-galera-2" Jan 20 17:17:13 crc kubenswrapper[4558]: I0120 17:17:13.487481 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/9adbb250-26e3-4e45-b60f-3b4c83d59ba3-kolla-config\") pod \"openstack-cell1-galera-2\" (UID: \"9adbb250-26e3-4e45-b60f-3b4c83d59ba3\") " pod="openstack-kuttl-tests/openstack-cell1-galera-2" Jan 20 17:17:13 crc kubenswrapper[4558]: I0120 17:17:13.487504 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/23b973c1-70c3-4d60-8b1d-89efda1a5707-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"23b973c1-70c3-4d60-8b1d-89efda1a5707\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:17:13 crc kubenswrapper[4558]: I0120 17:17:13.487528 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/9adbb250-26e3-4e45-b60f-3b4c83d59ba3-config-data-generated\") pod \"openstack-cell1-galera-2\" (UID: \"9adbb250-26e3-4e45-b60f-3b4c83d59ba3\") " pod="openstack-kuttl-tests/openstack-cell1-galera-2" Jan 20 17:17:13 crc kubenswrapper[4558]: I0120 17:17:13.487590 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/9adbb250-26e3-4e45-b60f-3b4c83d59ba3-galera-tls-certs\") pod \"openstack-cell1-galera-2\" (UID: \"9adbb250-26e3-4e45-b60f-3b4c83d59ba3\") " pod="openstack-kuttl-tests/openstack-cell1-galera-2" Jan 20 17:17:13 crc kubenswrapper[4558]: I0120 17:17:13.487604 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kxgbq\" (UniqueName: \"kubernetes.io/projected/9adbb250-26e3-4e45-b60f-3b4c83d59ba3-kube-api-access-kxgbq\") pod \"openstack-cell1-galera-2\" (UID: \"9adbb250-26e3-4e45-b60f-3b4c83d59ba3\") " pod="openstack-kuttl-tests/openstack-cell1-galera-2" Jan 20 17:17:13 crc kubenswrapper[4558]: I0120 17:17:13.487629 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/23b973c1-70c3-4d60-8b1d-89efda1a5707-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"23b973c1-70c3-4d60-8b1d-89efda1a5707\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:17:13 crc kubenswrapper[4558]: I0120 17:17:13.487684 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/23b973c1-70c3-4d60-8b1d-89efda1a5707-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"23b973c1-70c3-4d60-8b1d-89efda1a5707\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:17:13 crc kubenswrapper[4558]: I0120 17:17:13.487717 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pggcm\" (UniqueName: \"kubernetes.io/projected/23b973c1-70c3-4d60-8b1d-89efda1a5707-kube-api-access-pggcm\") pod \"openstack-cell1-galera-0\" (UID: \"23b973c1-70c3-4d60-8b1d-89efda1a5707\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:17:13 crc kubenswrapper[4558]: I0120 17:17:13.487738 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/23b973c1-70c3-4d60-8b1d-89efda1a5707-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"23b973c1-70c3-4d60-8b1d-89efda1a5707\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:17:13 crc kubenswrapper[4558]: I0120 17:17:13.487763 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage14-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage14-crc\") pod \"openstack-cell1-galera-2\" (UID: \"9adbb250-26e3-4e45-b60f-3b4c83d59ba3\") " pod="openstack-kuttl-tests/openstack-cell1-galera-2" Jan 20 17:17:13 crc kubenswrapper[4558]: I0120 17:17:13.487781 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/23b973c1-70c3-4d60-8b1d-89efda1a5707-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"23b973c1-70c3-4d60-8b1d-89efda1a5707\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:17:13 crc kubenswrapper[4558]: I0120 17:17:13.487817 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/9adbb250-26e3-4e45-b60f-3b4c83d59ba3-config-data-default\") pod \"openstack-cell1-galera-2\" (UID: \"9adbb250-26e3-4e45-b60f-3b4c83d59ba3\") " pod="openstack-kuttl-tests/openstack-cell1-galera-2" Jan 20 17:17:13 crc kubenswrapper[4558]: I0120 17:17:13.487831 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/23b973c1-70c3-4d60-8b1d-89efda1a5707-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"23b973c1-70c3-4d60-8b1d-89efda1a5707\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:17:13 crc kubenswrapper[4558]: I0120 17:17:13.487864 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9adbb250-26e3-4e45-b60f-3b4c83d59ba3-combined-ca-bundle\") pod \"openstack-cell1-galera-2\" (UID: \"9adbb250-26e3-4e45-b60f-3b4c83d59ba3\") " pod="openstack-kuttl-tests/openstack-cell1-galera-2" Jan 20 17:17:13 crc kubenswrapper[4558]: I0120 17:17:13.589803 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/872ae103-12d2-4686-aa02-4e3375eb510a-config-data-generated\") pod \"openstack-cell1-galera-1\" (UID: \"872ae103-12d2-4686-aa02-4e3375eb510a\") " pod="openstack-kuttl-tests/openstack-cell1-galera-1" Jan 20 17:17:13 crc kubenswrapper[4558]: I0120 17:17:13.589860 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage18-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage18-crc\") pod \"openstack-cell1-galera-1\" (UID: \"872ae103-12d2-4686-aa02-4e3375eb510a\") " pod="openstack-kuttl-tests/openstack-cell1-galera-1" Jan 20 17:17:13 crc kubenswrapper[4558]: I0120 17:17:13.590865 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/9adbb250-26e3-4e45-b60f-3b4c83d59ba3-galera-tls-certs\") pod \"openstack-cell1-galera-2\" (UID: \"9adbb250-26e3-4e45-b60f-3b4c83d59ba3\") " pod="openstack-kuttl-tests/openstack-cell1-galera-2" Jan 20 17:17:13 crc kubenswrapper[4558]: I0120 17:17:13.590891 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kxgbq\" (UniqueName: \"kubernetes.io/projected/9adbb250-26e3-4e45-b60f-3b4c83d59ba3-kube-api-access-kxgbq\") pod \"openstack-cell1-galera-2\" (UID: \"9adbb250-26e3-4e45-b60f-3b4c83d59ba3\") " pod="openstack-kuttl-tests/openstack-cell1-galera-2" Jan 20 17:17:13 crc kubenswrapper[4558]: I0120 17:17:13.590930 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/23b973c1-70c3-4d60-8b1d-89efda1a5707-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"23b973c1-70c3-4d60-8b1d-89efda1a5707\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:17:13 crc kubenswrapper[4558]: I0120 17:17:13.590962 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/872ae103-12d2-4686-aa02-4e3375eb510a-kolla-config\") pod \"openstack-cell1-galera-1\" (UID: \"872ae103-12d2-4686-aa02-4e3375eb510a\") " pod="openstack-kuttl-tests/openstack-cell1-galera-1" Jan 20 17:17:13 crc kubenswrapper[4558]: I0120 17:17:13.590992 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/23b973c1-70c3-4d60-8b1d-89efda1a5707-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"23b973c1-70c3-4d60-8b1d-89efda1a5707\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:17:13 crc kubenswrapper[4558]: I0120 17:17:13.591015 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/872ae103-12d2-4686-aa02-4e3375eb510a-combined-ca-bundle\") pod \"openstack-cell1-galera-1\" (UID: \"872ae103-12d2-4686-aa02-4e3375eb510a\") " pod="openstack-kuttl-tests/openstack-cell1-galera-1" Jan 20 17:17:13 crc kubenswrapper[4558]: I0120 17:17:13.591039 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/872ae103-12d2-4686-aa02-4e3375eb510a-operator-scripts\") pod \"openstack-cell1-galera-1\" (UID: \"872ae103-12d2-4686-aa02-4e3375eb510a\") " pod="openstack-kuttl-tests/openstack-cell1-galera-1" Jan 20 17:17:13 crc kubenswrapper[4558]: I0120 17:17:13.591079 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pggcm\" (UniqueName: \"kubernetes.io/projected/23b973c1-70c3-4d60-8b1d-89efda1a5707-kube-api-access-pggcm\") pod \"openstack-cell1-galera-0\" (UID: \"23b973c1-70c3-4d60-8b1d-89efda1a5707\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:17:13 crc kubenswrapper[4558]: I0120 17:17:13.591102 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/23b973c1-70c3-4d60-8b1d-89efda1a5707-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"23b973c1-70c3-4d60-8b1d-89efda1a5707\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:17:13 crc kubenswrapper[4558]: I0120 17:17:13.591125 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage14-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage14-crc\") pod \"openstack-cell1-galera-2\" (UID: \"9adbb250-26e3-4e45-b60f-3b4c83d59ba3\") " pod="openstack-kuttl-tests/openstack-cell1-galera-2" Jan 20 17:17:13 crc kubenswrapper[4558]: I0120 17:17:13.591142 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/23b973c1-70c3-4d60-8b1d-89efda1a5707-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"23b973c1-70c3-4d60-8b1d-89efda1a5707\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:17:13 crc kubenswrapper[4558]: I0120 17:17:13.591248 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/9adbb250-26e3-4e45-b60f-3b4c83d59ba3-config-data-default\") pod \"openstack-cell1-galera-2\" (UID: \"9adbb250-26e3-4e45-b60f-3b4c83d59ba3\") " pod="openstack-kuttl-tests/openstack-cell1-galera-2" Jan 20 17:17:13 crc kubenswrapper[4558]: I0120 17:17:13.591273 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/23b973c1-70c3-4d60-8b1d-89efda1a5707-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"23b973c1-70c3-4d60-8b1d-89efda1a5707\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:17:13 crc kubenswrapper[4558]: I0120 17:17:13.591307 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/872ae103-12d2-4686-aa02-4e3375eb510a-galera-tls-certs\") pod \"openstack-cell1-galera-1\" (UID: \"872ae103-12d2-4686-aa02-4e3375eb510a\") " pod="openstack-kuttl-tests/openstack-cell1-galera-1" Jan 20 17:17:13 crc kubenswrapper[4558]: I0120 17:17:13.591340 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9adbb250-26e3-4e45-b60f-3b4c83d59ba3-combined-ca-bundle\") pod \"openstack-cell1-galera-2\" (UID: \"9adbb250-26e3-4e45-b60f-3b4c83d59ba3\") " pod="openstack-kuttl-tests/openstack-cell1-galera-2" Jan 20 17:17:13 crc kubenswrapper[4558]: I0120 17:17:13.591378 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cnpm2\" (UniqueName: \"kubernetes.io/projected/872ae103-12d2-4686-aa02-4e3375eb510a-kube-api-access-cnpm2\") pod \"openstack-cell1-galera-1\" (UID: \"872ae103-12d2-4686-aa02-4e3375eb510a\") " pod="openstack-kuttl-tests/openstack-cell1-galera-1" Jan 20 17:17:13 crc kubenswrapper[4558]: I0120 17:17:13.591405 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/872ae103-12d2-4686-aa02-4e3375eb510a-config-data-default\") pod \"openstack-cell1-galera-1\" (UID: \"872ae103-12d2-4686-aa02-4e3375eb510a\") " pod="openstack-kuttl-tests/openstack-cell1-galera-1" Jan 20 17:17:13 crc kubenswrapper[4558]: I0120 17:17:13.591431 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"openstack-cell1-galera-0\" (UID: \"23b973c1-70c3-4d60-8b1d-89efda1a5707\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:17:13 crc kubenswrapper[4558]: I0120 17:17:13.591453 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9adbb250-26e3-4e45-b60f-3b4c83d59ba3-operator-scripts\") pod \"openstack-cell1-galera-2\" (UID: \"9adbb250-26e3-4e45-b60f-3b4c83d59ba3\") " pod="openstack-kuttl-tests/openstack-cell1-galera-2" Jan 20 17:17:13 crc kubenswrapper[4558]: I0120 17:17:13.591483 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/9adbb250-26e3-4e45-b60f-3b4c83d59ba3-kolla-config\") pod \"openstack-cell1-galera-2\" (UID: \"9adbb250-26e3-4e45-b60f-3b4c83d59ba3\") " pod="openstack-kuttl-tests/openstack-cell1-galera-2" Jan 20 17:17:13 crc kubenswrapper[4558]: I0120 17:17:13.591501 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/23b973c1-70c3-4d60-8b1d-89efda1a5707-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"23b973c1-70c3-4d60-8b1d-89efda1a5707\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:17:13 crc kubenswrapper[4558]: I0120 17:17:13.591560 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/9adbb250-26e3-4e45-b60f-3b4c83d59ba3-config-data-generated\") pod \"openstack-cell1-galera-2\" (UID: \"9adbb250-26e3-4e45-b60f-3b4c83d59ba3\") " pod="openstack-kuttl-tests/openstack-cell1-galera-2" Jan 20 17:17:13 crc kubenswrapper[4558]: I0120 17:17:13.591976 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/9adbb250-26e3-4e45-b60f-3b4c83d59ba3-config-data-generated\") pod \"openstack-cell1-galera-2\" (UID: \"9adbb250-26e3-4e45-b60f-3b4c83d59ba3\") " pod="openstack-kuttl-tests/openstack-cell1-galera-2" Jan 20 17:17:13 crc kubenswrapper[4558]: I0120 17:17:13.593498 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"openstack-cell1-galera-0\" (UID: \"23b973c1-70c3-4d60-8b1d-89efda1a5707\") device mount path \"/mnt/openstack/pv10\"" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:17:13 crc kubenswrapper[4558]: I0120 17:17:13.595003 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/9adbb250-26e3-4e45-b60f-3b4c83d59ba3-config-data-default\") pod \"openstack-cell1-galera-2\" (UID: \"9adbb250-26e3-4e45-b60f-3b4c83d59ba3\") " pod="openstack-kuttl-tests/openstack-cell1-galera-2" Jan 20 17:17:13 crc kubenswrapper[4558]: I0120 17:17:13.595033 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/9adbb250-26e3-4e45-b60f-3b4c83d59ba3-kolla-config\") pod \"openstack-cell1-galera-2\" (UID: \"9adbb250-26e3-4e45-b60f-3b4c83d59ba3\") " pod="openstack-kuttl-tests/openstack-cell1-galera-2" Jan 20 17:17:13 crc kubenswrapper[4558]: I0120 17:17:13.595098 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage14-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage14-crc\") pod \"openstack-cell1-galera-2\" (UID: \"9adbb250-26e3-4e45-b60f-3b4c83d59ba3\") device mount path \"/mnt/openstack/pv14\"" pod="openstack-kuttl-tests/openstack-cell1-galera-2" Jan 20 17:17:13 crc kubenswrapper[4558]: I0120 17:17:13.595721 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/23b973c1-70c3-4d60-8b1d-89efda1a5707-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"23b973c1-70c3-4d60-8b1d-89efda1a5707\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:17:13 crc kubenswrapper[4558]: I0120 17:17:13.595778 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9adbb250-26e3-4e45-b60f-3b4c83d59ba3-operator-scripts\") pod \"openstack-cell1-galera-2\" (UID: \"9adbb250-26e3-4e45-b60f-3b4c83d59ba3\") " pod="openstack-kuttl-tests/openstack-cell1-galera-2" Jan 20 17:17:13 crc kubenswrapper[4558]: I0120 17:17:13.596686 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/23b973c1-70c3-4d60-8b1d-89efda1a5707-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"23b973c1-70c3-4d60-8b1d-89efda1a5707\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:17:13 crc kubenswrapper[4558]: I0120 17:17:13.596697 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/23b973c1-70c3-4d60-8b1d-89efda1a5707-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"23b973c1-70c3-4d60-8b1d-89efda1a5707\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:17:13 crc kubenswrapper[4558]: I0120 17:17:13.596772 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/23b973c1-70c3-4d60-8b1d-89efda1a5707-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"23b973c1-70c3-4d60-8b1d-89efda1a5707\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:17:13 crc kubenswrapper[4558]: I0120 17:17:13.601825 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/9adbb250-26e3-4e45-b60f-3b4c83d59ba3-galera-tls-certs\") pod \"openstack-cell1-galera-2\" (UID: \"9adbb250-26e3-4e45-b60f-3b4c83d59ba3\") " pod="openstack-kuttl-tests/openstack-cell1-galera-2" Jan 20 17:17:13 crc kubenswrapper[4558]: I0120 17:17:13.602506 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9adbb250-26e3-4e45-b60f-3b4c83d59ba3-combined-ca-bundle\") pod \"openstack-cell1-galera-2\" (UID: \"9adbb250-26e3-4e45-b60f-3b4c83d59ba3\") " pod="openstack-kuttl-tests/openstack-cell1-galera-2" Jan 20 17:17:13 crc kubenswrapper[4558]: I0120 17:17:13.603265 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/23b973c1-70c3-4d60-8b1d-89efda1a5707-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"23b973c1-70c3-4d60-8b1d-89efda1a5707\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:17:13 crc kubenswrapper[4558]: I0120 17:17:13.611518 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kxgbq\" (UniqueName: \"kubernetes.io/projected/9adbb250-26e3-4e45-b60f-3b4c83d59ba3-kube-api-access-kxgbq\") pod \"openstack-cell1-galera-2\" (UID: \"9adbb250-26e3-4e45-b60f-3b4c83d59ba3\") " pod="openstack-kuttl-tests/openstack-cell1-galera-2" Jan 20 17:17:13 crc kubenswrapper[4558]: I0120 17:17:13.617293 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage14-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage14-crc\") pod \"openstack-cell1-galera-2\" (UID: \"9adbb250-26e3-4e45-b60f-3b4c83d59ba3\") " pod="openstack-kuttl-tests/openstack-cell1-galera-2" Jan 20 17:17:13 crc kubenswrapper[4558]: I0120 17:17:13.621320 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pggcm\" (UniqueName: \"kubernetes.io/projected/23b973c1-70c3-4d60-8b1d-89efda1a5707-kube-api-access-pggcm\") pod \"openstack-cell1-galera-0\" (UID: \"23b973c1-70c3-4d60-8b1d-89efda1a5707\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:17:13 crc kubenswrapper[4558]: I0120 17:17:13.623917 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/23b973c1-70c3-4d60-8b1d-89efda1a5707-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"23b973c1-70c3-4d60-8b1d-89efda1a5707\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:17:13 crc kubenswrapper[4558]: I0120 17:17:13.636813 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"openstack-cell1-galera-0\" (UID: \"23b973c1-70c3-4d60-8b1d-89efda1a5707\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:17:13 crc kubenswrapper[4558]: I0120 17:17:13.682966 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/memcached-0"] Jan 20 17:17:13 crc kubenswrapper[4558]: I0120 17:17:13.683986 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/memcached-0" Jan 20 17:17:13 crc kubenswrapper[4558]: I0120 17:17:13.685979 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"memcached-memcached-dockercfg-kmf4n" Jan 20 17:17:13 crc kubenswrapper[4558]: I0120 17:17:13.686492 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"memcached-config-data" Jan 20 17:17:13 crc kubenswrapper[4558]: I0120 17:17:13.686503 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-memcached-svc" Jan 20 17:17:13 crc kubenswrapper[4558]: I0120 17:17:13.694178 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/872ae103-12d2-4686-aa02-4e3375eb510a-galera-tls-certs\") pod \"openstack-cell1-galera-1\" (UID: \"872ae103-12d2-4686-aa02-4e3375eb510a\") " pod="openstack-kuttl-tests/openstack-cell1-galera-1" Jan 20 17:17:13 crc kubenswrapper[4558]: I0120 17:17:13.694256 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cnpm2\" (UniqueName: \"kubernetes.io/projected/872ae103-12d2-4686-aa02-4e3375eb510a-kube-api-access-cnpm2\") pod \"openstack-cell1-galera-1\" (UID: \"872ae103-12d2-4686-aa02-4e3375eb510a\") " pod="openstack-kuttl-tests/openstack-cell1-galera-1" Jan 20 17:17:13 crc kubenswrapper[4558]: I0120 17:17:13.694291 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/872ae103-12d2-4686-aa02-4e3375eb510a-config-data-default\") pod \"openstack-cell1-galera-1\" (UID: \"872ae103-12d2-4686-aa02-4e3375eb510a\") " pod="openstack-kuttl-tests/openstack-cell1-galera-1" Jan 20 17:17:13 crc kubenswrapper[4558]: I0120 17:17:13.694358 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/872ae103-12d2-4686-aa02-4e3375eb510a-config-data-generated\") pod \"openstack-cell1-galera-1\" (UID: \"872ae103-12d2-4686-aa02-4e3375eb510a\") " pod="openstack-kuttl-tests/openstack-cell1-galera-1" Jan 20 17:17:13 crc kubenswrapper[4558]: I0120 17:17:13.694390 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage18-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage18-crc\") pod \"openstack-cell1-galera-1\" (UID: \"872ae103-12d2-4686-aa02-4e3375eb510a\") " pod="openstack-kuttl-tests/openstack-cell1-galera-1" Jan 20 17:17:13 crc kubenswrapper[4558]: I0120 17:17:13.694440 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/872ae103-12d2-4686-aa02-4e3375eb510a-kolla-config\") pod \"openstack-cell1-galera-1\" (UID: \"872ae103-12d2-4686-aa02-4e3375eb510a\") " pod="openstack-kuttl-tests/openstack-cell1-galera-1" Jan 20 17:17:13 crc kubenswrapper[4558]: I0120 17:17:13.694500 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/872ae103-12d2-4686-aa02-4e3375eb510a-combined-ca-bundle\") pod \"openstack-cell1-galera-1\" (UID: \"872ae103-12d2-4686-aa02-4e3375eb510a\") " pod="openstack-kuttl-tests/openstack-cell1-galera-1" Jan 20 17:17:13 crc kubenswrapper[4558]: I0120 17:17:13.694531 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/872ae103-12d2-4686-aa02-4e3375eb510a-operator-scripts\") pod \"openstack-cell1-galera-1\" (UID: \"872ae103-12d2-4686-aa02-4e3375eb510a\") " pod="openstack-kuttl-tests/openstack-cell1-galera-1" Jan 20 17:17:13 crc kubenswrapper[4558]: I0120 17:17:13.695959 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/872ae103-12d2-4686-aa02-4e3375eb510a-operator-scripts\") pod \"openstack-cell1-galera-1\" (UID: \"872ae103-12d2-4686-aa02-4e3375eb510a\") " pod="openstack-kuttl-tests/openstack-cell1-galera-1" Jan 20 17:17:13 crc kubenswrapper[4558]: I0120 17:17:13.697051 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/872ae103-12d2-4686-aa02-4e3375eb510a-config-data-generated\") pod \"openstack-cell1-galera-1\" (UID: \"872ae103-12d2-4686-aa02-4e3375eb510a\") " pod="openstack-kuttl-tests/openstack-cell1-galera-1" Jan 20 17:17:13 crc kubenswrapper[4558]: I0120 17:17:13.697988 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/872ae103-12d2-4686-aa02-4e3375eb510a-config-data-default\") pod \"openstack-cell1-galera-1\" (UID: \"872ae103-12d2-4686-aa02-4e3375eb510a\") " pod="openstack-kuttl-tests/openstack-cell1-galera-1" Jan 20 17:17:13 crc kubenswrapper[4558]: I0120 17:17:13.698071 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/memcached-0"] Jan 20 17:17:13 crc kubenswrapper[4558]: I0120 17:17:13.698331 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage18-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage18-crc\") pod \"openstack-cell1-galera-1\" (UID: \"872ae103-12d2-4686-aa02-4e3375eb510a\") device mount path \"/mnt/openstack/pv18\"" pod="openstack-kuttl-tests/openstack-cell1-galera-1" Jan 20 17:17:13 crc kubenswrapper[4558]: I0120 17:17:13.698565 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/872ae103-12d2-4686-aa02-4e3375eb510a-kolla-config\") pod \"openstack-cell1-galera-1\" (UID: \"872ae103-12d2-4686-aa02-4e3375eb510a\") " pod="openstack-kuttl-tests/openstack-cell1-galera-1" Jan 20 17:17:13 crc kubenswrapper[4558]: I0120 17:17:13.719985 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/872ae103-12d2-4686-aa02-4e3375eb510a-combined-ca-bundle\") pod \"openstack-cell1-galera-1\" (UID: \"872ae103-12d2-4686-aa02-4e3375eb510a\") " pod="openstack-kuttl-tests/openstack-cell1-galera-1" Jan 20 17:17:13 crc kubenswrapper[4558]: I0120 17:17:13.732023 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/872ae103-12d2-4686-aa02-4e3375eb510a-galera-tls-certs\") pod \"openstack-cell1-galera-1\" (UID: \"872ae103-12d2-4686-aa02-4e3375eb510a\") " pod="openstack-kuttl-tests/openstack-cell1-galera-1" Jan 20 17:17:13 crc kubenswrapper[4558]: I0120 17:17:13.732886 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:17:13 crc kubenswrapper[4558]: I0120 17:17:13.734300 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-cell1-galera-2" Jan 20 17:17:13 crc kubenswrapper[4558]: I0120 17:17:13.734576 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage18-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage18-crc\") pod \"openstack-cell1-galera-1\" (UID: \"872ae103-12d2-4686-aa02-4e3375eb510a\") " pod="openstack-kuttl-tests/openstack-cell1-galera-1" Jan 20 17:17:13 crc kubenswrapper[4558]: I0120 17:17:13.739656 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cnpm2\" (UniqueName: \"kubernetes.io/projected/872ae103-12d2-4686-aa02-4e3375eb510a-kube-api-access-cnpm2\") pod \"openstack-cell1-galera-1\" (UID: \"872ae103-12d2-4686-aa02-4e3375eb510a\") " pod="openstack-kuttl-tests/openstack-cell1-galera-1" Jan 20 17:17:13 crc kubenswrapper[4558]: I0120 17:17:13.752075 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-cell1-galera-1" Jan 20 17:17:13 crc kubenswrapper[4558]: I0120 17:17:13.795799 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f1382710-0399-4564-8c83-271c645ce04e-combined-ca-bundle\") pod \"memcached-0\" (UID: \"f1382710-0399-4564-8c83-271c645ce04e\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:17:13 crc kubenswrapper[4558]: I0120 17:17:13.795850 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/f1382710-0399-4564-8c83-271c645ce04e-kolla-config\") pod \"memcached-0\" (UID: \"f1382710-0399-4564-8c83-271c645ce04e\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:17:13 crc kubenswrapper[4558]: I0120 17:17:13.795874 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/f1382710-0399-4564-8c83-271c645ce04e-memcached-tls-certs\") pod \"memcached-0\" (UID: \"f1382710-0399-4564-8c83-271c645ce04e\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:17:13 crc kubenswrapper[4558]: I0120 17:17:13.795899 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/f1382710-0399-4564-8c83-271c645ce04e-config-data\") pod \"memcached-0\" (UID: \"f1382710-0399-4564-8c83-271c645ce04e\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:17:13 crc kubenswrapper[4558]: I0120 17:17:13.795956 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cknq7\" (UniqueName: \"kubernetes.io/projected/f1382710-0399-4564-8c83-271c645ce04e-kube-api-access-cknq7\") pod \"memcached-0\" (UID: \"f1382710-0399-4564-8c83-271c645ce04e\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:17:13 crc kubenswrapper[4558]: I0120 17:17:13.898330 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cknq7\" (UniqueName: \"kubernetes.io/projected/f1382710-0399-4564-8c83-271c645ce04e-kube-api-access-cknq7\") pod \"memcached-0\" (UID: \"f1382710-0399-4564-8c83-271c645ce04e\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:17:13 crc kubenswrapper[4558]: I0120 17:17:13.898659 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f1382710-0399-4564-8c83-271c645ce04e-combined-ca-bundle\") pod \"memcached-0\" (UID: \"f1382710-0399-4564-8c83-271c645ce04e\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:17:13 crc kubenswrapper[4558]: I0120 17:17:13.898707 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/f1382710-0399-4564-8c83-271c645ce04e-kolla-config\") pod \"memcached-0\" (UID: \"f1382710-0399-4564-8c83-271c645ce04e\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:17:13 crc kubenswrapper[4558]: I0120 17:17:13.898739 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/f1382710-0399-4564-8c83-271c645ce04e-memcached-tls-certs\") pod \"memcached-0\" (UID: \"f1382710-0399-4564-8c83-271c645ce04e\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:17:13 crc kubenswrapper[4558]: I0120 17:17:13.898779 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/f1382710-0399-4564-8c83-271c645ce04e-config-data\") pod \"memcached-0\" (UID: \"f1382710-0399-4564-8c83-271c645ce04e\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:17:13 crc kubenswrapper[4558]: I0120 17:17:13.899579 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/f1382710-0399-4564-8c83-271c645ce04e-config-data\") pod \"memcached-0\" (UID: \"f1382710-0399-4564-8c83-271c645ce04e\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:17:13 crc kubenswrapper[4558]: I0120 17:17:13.900815 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/f1382710-0399-4564-8c83-271c645ce04e-kolla-config\") pod \"memcached-0\" (UID: \"f1382710-0399-4564-8c83-271c645ce04e\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:17:13 crc kubenswrapper[4558]: I0120 17:17:13.904697 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f1382710-0399-4564-8c83-271c645ce04e-combined-ca-bundle\") pod \"memcached-0\" (UID: \"f1382710-0399-4564-8c83-271c645ce04e\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:17:13 crc kubenswrapper[4558]: I0120 17:17:13.904693 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/f1382710-0399-4564-8c83-271c645ce04e-memcached-tls-certs\") pod \"memcached-0\" (UID: \"f1382710-0399-4564-8c83-271c645ce04e\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:17:13 crc kubenswrapper[4558]: I0120 17:17:13.915282 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cknq7\" (UniqueName: \"kubernetes.io/projected/f1382710-0399-4564-8c83-271c645ce04e-kube-api-access-cknq7\") pod \"memcached-0\" (UID: \"f1382710-0399-4564-8c83-271c645ce04e\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:17:14 crc kubenswrapper[4558]: I0120 17:17:14.016754 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/memcached-0" Jan 20 17:17:14 crc kubenswrapper[4558]: I0120 17:17:14.133376 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-galera-0" event={"ID":"3abae449-b665-40ba-bdac-ea1e908e1952","Type":"ContainerStarted","Data":"7776490a42e250bb73aab7faf515747c70ff28768e78ef5a2269fbf3fd5561a5"} Jan 20 17:17:14 crc kubenswrapper[4558]: I0120 17:17:14.211556 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/openstack-cell1-galera-0"] Jan 20 17:17:14 crc kubenswrapper[4558]: I0120 17:17:14.216109 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/openstack-cell1-galera-2"] Jan 20 17:17:14 crc kubenswrapper[4558]: W0120 17:17:14.217904 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9adbb250_26e3_4e45_b60f_3b4c83d59ba3.slice/crio-6c0793a4831f49398284626ae40c77a9333710a5b62feb85f1da44a5bfd5cf9b WatchSource:0}: Error finding container 6c0793a4831f49398284626ae40c77a9333710a5b62feb85f1da44a5bfd5cf9b: Status 404 returned error can't find the container with id 6c0793a4831f49398284626ae40c77a9333710a5b62feb85f1da44a5bfd5cf9b Jan 20 17:17:14 crc kubenswrapper[4558]: I0120 17:17:14.346423 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/openstack-cell1-galera-1"] Jan 20 17:17:14 crc kubenswrapper[4558]: I0120 17:17:14.478293 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/memcached-0"] Jan 20 17:17:14 crc kubenswrapper[4558]: W0120 17:17:14.479618 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf1382710_0399_4564_8c83_271c645ce04e.slice/crio-67b5435d9b589be4e1e201377d49556a02405c5fbe78c6fd08e640e566a5a49e WatchSource:0}: Error finding container 67b5435d9b589be4e1e201377d49556a02405c5fbe78c6fd08e640e566a5a49e: Status 404 returned error can't find the container with id 67b5435d9b589be4e1e201377d49556a02405c5fbe78c6fd08e640e566a5a49e Jan 20 17:17:15 crc kubenswrapper[4558]: I0120 17:17:15.141148 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-cell1-galera-0" event={"ID":"23b973c1-70c3-4d60-8b1d-89efda1a5707","Type":"ContainerStarted","Data":"1444085be1f8fd9f3124056fde67f6e43f79544b6e7418777ad2e190d7088fd6"} Jan 20 17:17:15 crc kubenswrapper[4558]: I0120 17:17:15.141597 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-cell1-galera-0" event={"ID":"23b973c1-70c3-4d60-8b1d-89efda1a5707","Type":"ContainerStarted","Data":"3977a1ac73216f47b9f48d0a4f845cfe8e04acfa669c1c1e86f1bc65da7b4d06"} Jan 20 17:17:15 crc kubenswrapper[4558]: I0120 17:17:15.142639 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-cell1-galera-1" event={"ID":"872ae103-12d2-4686-aa02-4e3375eb510a","Type":"ContainerStarted","Data":"7481a1fc56ba9d6433e79ef32c84d26f8ce7244725116f9bc5912bfab8876618"} Jan 20 17:17:15 crc kubenswrapper[4558]: I0120 17:17:15.142672 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-cell1-galera-1" event={"ID":"872ae103-12d2-4686-aa02-4e3375eb510a","Type":"ContainerStarted","Data":"19d18e73c4dbcda4d915f58c461cac55bf1cd1ec122ac77e249168c20919b44d"} Jan 20 17:17:15 crc kubenswrapper[4558]: I0120 17:17:15.144064 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-cell1-galera-2" event={"ID":"9adbb250-26e3-4e45-b60f-3b4c83d59ba3","Type":"ContainerStarted","Data":"4a003c3cd6aea09a3d0e2dc9693fd65da716ad6878942d1f8aad4e5a3a7fe6b9"} Jan 20 17:17:15 crc kubenswrapper[4558]: I0120 17:17:15.144096 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-cell1-galera-2" event={"ID":"9adbb250-26e3-4e45-b60f-3b4c83d59ba3","Type":"ContainerStarted","Data":"6c0793a4831f49398284626ae40c77a9333710a5b62feb85f1da44a5bfd5cf9b"} Jan 20 17:17:15 crc kubenswrapper[4558]: I0120 17:17:15.145497 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/memcached-0" event={"ID":"f1382710-0399-4564-8c83-271c645ce04e","Type":"ContainerStarted","Data":"0c2f9c3e3d8e63b8e5f33c1b908cb9fc7f21da330edfebb3544ee9108a3c2a66"} Jan 20 17:17:15 crc kubenswrapper[4558]: I0120 17:17:15.145568 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/memcached-0" Jan 20 17:17:15 crc kubenswrapper[4558]: I0120 17:17:15.145592 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/memcached-0" event={"ID":"f1382710-0399-4564-8c83-271c645ce04e","Type":"ContainerStarted","Data":"67b5435d9b589be4e1e201377d49556a02405c5fbe78c6fd08e640e566a5a49e"} Jan 20 17:17:15 crc kubenswrapper[4558]: I0120 17:17:15.192105 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/memcached-0" podStartSLOduration=2.192091406 podStartE2EDuration="2.192091406s" podCreationTimestamp="2026-01-20 17:17:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:17:15.190584542 +0000 UTC m=+2128.950922509" watchObservedRunningTime="2026-01-20 17:17:15.192091406 +0000 UTC m=+2128.952429363" Jan 20 17:17:15 crc kubenswrapper[4558]: I0120 17:17:15.663959 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 17:17:15 crc kubenswrapper[4558]: I0120 17:17:15.665849 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:17:15 crc kubenswrapper[4558]: I0120 17:17:15.667674 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"telemetry-ceilometer-dockercfg-55pdt" Jan 20 17:17:15 crc kubenswrapper[4558]: I0120 17:17:15.677001 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 17:17:15 crc kubenswrapper[4558]: I0120 17:17:15.736273 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7tplz\" (UniqueName: \"kubernetes.io/projected/4bc1cce9-b6fd-4965-b6ff-b8ece1dfca79-kube-api-access-7tplz\") pod \"kube-state-metrics-0\" (UID: \"4bc1cce9-b6fd-4965-b6ff-b8ece1dfca79\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:17:15 crc kubenswrapper[4558]: I0120 17:17:15.838658 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7tplz\" (UniqueName: \"kubernetes.io/projected/4bc1cce9-b6fd-4965-b6ff-b8ece1dfca79-kube-api-access-7tplz\") pod \"kube-state-metrics-0\" (UID: \"4bc1cce9-b6fd-4965-b6ff-b8ece1dfca79\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:17:15 crc kubenswrapper[4558]: I0120 17:17:15.862052 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7tplz\" (UniqueName: \"kubernetes.io/projected/4bc1cce9-b6fd-4965-b6ff-b8ece1dfca79-kube-api-access-7tplz\") pod \"kube-state-metrics-0\" (UID: \"4bc1cce9-b6fd-4965-b6ff-b8ece1dfca79\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:17:15 crc kubenswrapper[4558]: I0120 17:17:15.988091 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:17:16 crc kubenswrapper[4558]: I0120 17:17:16.157087 4558 generic.go:334] "Generic (PLEG): container finished" podID="fa03920f-0e4b-458f-956b-b658786f9792" containerID="39e4d0dccf7dfdba24ce1b4ccfce236f73f6cb1fd65053360ca6d3b551a3e705" exitCode=0 Jan 20 17:17:16 crc kubenswrapper[4558]: I0120 17:17:16.157196 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-galera-2" event={"ID":"fa03920f-0e4b-458f-956b-b658786f9792","Type":"ContainerDied","Data":"39e4d0dccf7dfdba24ce1b4ccfce236f73f6cb1fd65053360ca6d3b551a3e705"} Jan 20 17:17:16 crc kubenswrapper[4558]: I0120 17:17:16.202536 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 17:17:16 crc kubenswrapper[4558]: I0120 17:17:16.290625 4558 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 20 17:17:17 crc kubenswrapper[4558]: I0120 17:17:17.174135 4558 generic.go:334] "Generic (PLEG): container finished" podID="594280a4-cae4-4455-9838-ba4ef8d2f2c1" containerID="4df69b0aeea95403c345c4fa4578dcd1f0758ef8805869ca9d1cb88b4fb9262e" exitCode=0 Jan 20 17:17:17 crc kubenswrapper[4558]: I0120 17:17:17.174221 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-galera-1" event={"ID":"594280a4-cae4-4455-9838-ba4ef8d2f2c1","Type":"ContainerDied","Data":"4df69b0aeea95403c345c4fa4578dcd1f0758ef8805869ca9d1cb88b4fb9262e"} Jan 20 17:17:17 crc kubenswrapper[4558]: I0120 17:17:17.177137 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/kube-state-metrics-0" event={"ID":"4bc1cce9-b6fd-4965-b6ff-b8ece1dfca79","Type":"ContainerStarted","Data":"09119e0ed2114641fce86c060b77da03c11248abf090a66ce6d4f78aa73834b7"} Jan 20 17:17:17 crc kubenswrapper[4558]: I0120 17:17:17.177187 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/kube-state-metrics-0" event={"ID":"4bc1cce9-b6fd-4965-b6ff-b8ece1dfca79","Type":"ContainerStarted","Data":"95efe6d096d623ee783b702009e3a4a4dea7b8f1070cef6caeed4cb346a2d594"} Jan 20 17:17:17 crc kubenswrapper[4558]: I0120 17:17:17.177353 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:17:17 crc kubenswrapper[4558]: I0120 17:17:17.178966 4558 generic.go:334] "Generic (PLEG): container finished" podID="3abae449-b665-40ba-bdac-ea1e908e1952" containerID="7776490a42e250bb73aab7faf515747c70ff28768e78ef5a2269fbf3fd5561a5" exitCode=0 Jan 20 17:17:17 crc kubenswrapper[4558]: I0120 17:17:17.179030 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-galera-0" event={"ID":"3abae449-b665-40ba-bdac-ea1e908e1952","Type":"ContainerDied","Data":"7776490a42e250bb73aab7faf515747c70ff28768e78ef5a2269fbf3fd5561a5"} Jan 20 17:17:17 crc kubenswrapper[4558]: I0120 17:17:17.182651 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-galera-2" event={"ID":"fa03920f-0e4b-458f-956b-b658786f9792","Type":"ContainerStarted","Data":"a62b1eff4cb0e50771699ddb63f1bffd4fb9ff28cbdf146a5a3c121d2a2e02fa"} Jan 20 17:17:17 crc kubenswrapper[4558]: I0120 17:17:17.227025 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/openstack-galera-2" podStartSLOduration=7.226995624 podStartE2EDuration="7.226995624s" podCreationTimestamp="2026-01-20 17:17:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:17:17.21110948 +0000 UTC m=+2130.971447447" watchObservedRunningTime="2026-01-20 17:17:17.226995624 +0000 UTC m=+2130.987333591" Jan 20 17:17:17 crc kubenswrapper[4558]: I0120 17:17:17.246125 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/kube-state-metrics-0" podStartSLOduration=1.966751173 podStartE2EDuration="2.246096684s" podCreationTimestamp="2026-01-20 17:17:15 +0000 UTC" firstStartedPulling="2026-01-20 17:17:16.290270553 +0000 UTC m=+2130.050608511" lastFinishedPulling="2026-01-20 17:17:16.569616055 +0000 UTC m=+2130.329954022" observedRunningTime="2026-01-20 17:17:17.225041349 +0000 UTC m=+2130.985379306" watchObservedRunningTime="2026-01-20 17:17:17.246096684 +0000 UTC m=+2131.006434651" Jan 20 17:17:18 crc kubenswrapper[4558]: I0120 17:17:18.192526 4558 generic.go:334] "Generic (PLEG): container finished" podID="9adbb250-26e3-4e45-b60f-3b4c83d59ba3" containerID="4a003c3cd6aea09a3d0e2dc9693fd65da716ad6878942d1f8aad4e5a3a7fe6b9" exitCode=0 Jan 20 17:17:18 crc kubenswrapper[4558]: I0120 17:17:18.192605 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-cell1-galera-2" event={"ID":"9adbb250-26e3-4e45-b60f-3b4c83d59ba3","Type":"ContainerDied","Data":"4a003c3cd6aea09a3d0e2dc9693fd65da716ad6878942d1f8aad4e5a3a7fe6b9"} Jan 20 17:17:18 crc kubenswrapper[4558]: I0120 17:17:18.195688 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-galera-1" event={"ID":"594280a4-cae4-4455-9838-ba4ef8d2f2c1","Type":"ContainerStarted","Data":"f1681cfb95a469724fd742914f90a54418d9a8689ce838c8542616d40276313e"} Jan 20 17:17:18 crc kubenswrapper[4558]: I0120 17:17:18.199530 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-galera-0" event={"ID":"3abae449-b665-40ba-bdac-ea1e908e1952","Type":"ContainerStarted","Data":"b602058ef33d1b770be4c16f21fd079b2942efdcce4736c8d70e7dae44d7353d"} Jan 20 17:17:18 crc kubenswrapper[4558]: I0120 17:17:18.201806 4558 generic.go:334] "Generic (PLEG): container finished" podID="23b973c1-70c3-4d60-8b1d-89efda1a5707" containerID="1444085be1f8fd9f3124056fde67f6e43f79544b6e7418777ad2e190d7088fd6" exitCode=0 Jan 20 17:17:18 crc kubenswrapper[4558]: I0120 17:17:18.201848 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-cell1-galera-0" event={"ID":"23b973c1-70c3-4d60-8b1d-89efda1a5707","Type":"ContainerDied","Data":"1444085be1f8fd9f3124056fde67f6e43f79544b6e7418777ad2e190d7088fd6"} Jan 20 17:17:18 crc kubenswrapper[4558]: I0120 17:17:18.205003 4558 generic.go:334] "Generic (PLEG): container finished" podID="872ae103-12d2-4686-aa02-4e3375eb510a" containerID="7481a1fc56ba9d6433e79ef32c84d26f8ce7244725116f9bc5912bfab8876618" exitCode=0 Jan 20 17:17:18 crc kubenswrapper[4558]: I0120 17:17:18.205048 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-cell1-galera-1" event={"ID":"872ae103-12d2-4686-aa02-4e3375eb510a","Type":"ContainerDied","Data":"7481a1fc56ba9d6433e79ef32c84d26f8ce7244725116f9bc5912bfab8876618"} Jan 20 17:17:18 crc kubenswrapper[4558]: I0120 17:17:18.253091 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/openstack-galera-0" podStartSLOduration=8.253073094 podStartE2EDuration="8.253073094s" podCreationTimestamp="2026-01-20 17:17:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:17:18.244664701 +0000 UTC m=+2132.005002668" watchObservedRunningTime="2026-01-20 17:17:18.253073094 +0000 UTC m=+2132.013411061" Jan 20 17:17:18 crc kubenswrapper[4558]: I0120 17:17:18.263579 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/openstack-galera-1" podStartSLOduration=8.26356291 podStartE2EDuration="8.26356291s" podCreationTimestamp="2026-01-20 17:17:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:17:18.259361149 +0000 UTC m=+2132.019699115" watchObservedRunningTime="2026-01-20 17:17:18.26356291 +0000 UTC m=+2132.023900878" Jan 20 17:17:19 crc kubenswrapper[4558]: I0120 17:17:19.019253 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/memcached-0" Jan 20 17:17:19 crc kubenswrapper[4558]: I0120 17:17:19.214189 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-cell1-galera-0" event={"ID":"23b973c1-70c3-4d60-8b1d-89efda1a5707","Type":"ContainerStarted","Data":"6ab17e2c19352e87a06de8079d4c76b1930887a93bc8b1a819d497e6fd3c41fd"} Jan 20 17:17:19 crc kubenswrapper[4558]: I0120 17:17:19.216088 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-cell1-galera-1" event={"ID":"872ae103-12d2-4686-aa02-4e3375eb510a","Type":"ContainerStarted","Data":"3c937b5e3f5d38edf270dee165b67ae6bdbf0a55f6bf5b7f0a56a325dd63cec2"} Jan 20 17:17:19 crc kubenswrapper[4558]: I0120 17:17:19.218033 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-cell1-galera-2" event={"ID":"9adbb250-26e3-4e45-b60f-3b4c83d59ba3","Type":"ContainerStarted","Data":"648217de53b0389dbe7690a275b0c4b85571e0144e4fa2b93968f2f4497fa9f1"} Jan 20 17:17:19 crc kubenswrapper[4558]: I0120 17:17:19.235693 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/openstack-cell1-galera-0" podStartSLOduration=7.235396663 podStartE2EDuration="7.235396663s" podCreationTimestamp="2026-01-20 17:17:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:17:19.227626722 +0000 UTC m=+2132.987964680" watchObservedRunningTime="2026-01-20 17:17:19.235396663 +0000 UTC m=+2132.995734631" Jan 20 17:17:19 crc kubenswrapper[4558]: I0120 17:17:19.265458 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/openstack-cell1-galera-2" podStartSLOduration=7.265438649 podStartE2EDuration="7.265438649s" podCreationTimestamp="2026-01-20 17:17:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:17:19.257501843 +0000 UTC m=+2133.017839810" watchObservedRunningTime="2026-01-20 17:17:19.265438649 +0000 UTC m=+2133.025776615" Jan 20 17:17:19 crc kubenswrapper[4558]: I0120 17:17:19.265922 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/openstack-cell1-galera-1" podStartSLOduration=7.265915465 podStartE2EDuration="7.265915465s" podCreationTimestamp="2026-01-20 17:17:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:17:19.244955299 +0000 UTC m=+2133.005293266" watchObservedRunningTime="2026-01-20 17:17:19.265915465 +0000 UTC m=+2133.026253431" Jan 20 17:17:20 crc kubenswrapper[4558]: I0120 17:17:20.215495 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ovsdbserver-nb-0"] Jan 20 17:17:20 crc kubenswrapper[4558]: I0120 17:17:20.218599 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:17:20 crc kubenswrapper[4558]: I0120 17:17:20.221953 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-ovndbcluster-nb-ovndbs" Jan 20 17:17:20 crc kubenswrapper[4558]: I0120 17:17:20.222109 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-ovn-metrics" Jan 20 17:17:20 crc kubenswrapper[4558]: I0120 17:17:20.222707 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"ovndbcluster-nb-scripts" Jan 20 17:17:20 crc kubenswrapper[4558]: I0120 17:17:20.223579 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"ovndbcluster-nb-config" Jan 20 17:17:20 crc kubenswrapper[4558]: I0120 17:17:20.224852 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ovncluster-ovndbcluster-nb-dockercfg-qwhxw" Jan 20 17:17:20 crc kubenswrapper[4558]: I0120 17:17:20.227569 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-nb-0"] Jan 20 17:17:20 crc kubenswrapper[4558]: I0120 17:17:20.335424 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0ccdb20e-7467-40c5-be6c-62102e9ad6bd-config\") pod \"ovsdbserver-nb-0\" (UID: \"0ccdb20e-7467-40c5-be6c-62102e9ad6bd\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:17:20 crc kubenswrapper[4558]: I0120 17:17:20.335492 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0ccdb20e-7467-40c5-be6c-62102e9ad6bd-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"0ccdb20e-7467-40c5-be6c-62102e9ad6bd\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:17:20 crc kubenswrapper[4558]: I0120 17:17:20.335534 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-slgjx\" (UniqueName: \"kubernetes.io/projected/0ccdb20e-7467-40c5-be6c-62102e9ad6bd-kube-api-access-slgjx\") pod \"ovsdbserver-nb-0\" (UID: \"0ccdb20e-7467-40c5-be6c-62102e9ad6bd\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:17:20 crc kubenswrapper[4558]: I0120 17:17:20.335650 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"ovsdbserver-nb-0\" (UID: \"0ccdb20e-7467-40c5-be6c-62102e9ad6bd\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:17:20 crc kubenswrapper[4558]: I0120 17:17:20.335737 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/0ccdb20e-7467-40c5-be6c-62102e9ad6bd-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"0ccdb20e-7467-40c5-be6c-62102e9ad6bd\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:17:20 crc kubenswrapper[4558]: I0120 17:17:20.335865 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0ccdb20e-7467-40c5-be6c-62102e9ad6bd-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"0ccdb20e-7467-40c5-be6c-62102e9ad6bd\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:17:20 crc kubenswrapper[4558]: I0120 17:17:20.336004 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/0ccdb20e-7467-40c5-be6c-62102e9ad6bd-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"0ccdb20e-7467-40c5-be6c-62102e9ad6bd\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:17:20 crc kubenswrapper[4558]: I0120 17:17:20.336073 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/0ccdb20e-7467-40c5-be6c-62102e9ad6bd-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"0ccdb20e-7467-40c5-be6c-62102e9ad6bd\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:17:20 crc kubenswrapper[4558]: I0120 17:17:20.438577 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0ccdb20e-7467-40c5-be6c-62102e9ad6bd-config\") pod \"ovsdbserver-nb-0\" (UID: \"0ccdb20e-7467-40c5-be6c-62102e9ad6bd\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:17:20 crc kubenswrapper[4558]: I0120 17:17:20.438633 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0ccdb20e-7467-40c5-be6c-62102e9ad6bd-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"0ccdb20e-7467-40c5-be6c-62102e9ad6bd\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:17:20 crc kubenswrapper[4558]: I0120 17:17:20.438666 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-slgjx\" (UniqueName: \"kubernetes.io/projected/0ccdb20e-7467-40c5-be6c-62102e9ad6bd-kube-api-access-slgjx\") pod \"ovsdbserver-nb-0\" (UID: \"0ccdb20e-7467-40c5-be6c-62102e9ad6bd\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:17:20 crc kubenswrapper[4558]: I0120 17:17:20.438740 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"ovsdbserver-nb-0\" (UID: \"0ccdb20e-7467-40c5-be6c-62102e9ad6bd\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:17:20 crc kubenswrapper[4558]: I0120 17:17:20.438782 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/0ccdb20e-7467-40c5-be6c-62102e9ad6bd-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"0ccdb20e-7467-40c5-be6c-62102e9ad6bd\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:17:20 crc kubenswrapper[4558]: I0120 17:17:20.438872 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0ccdb20e-7467-40c5-be6c-62102e9ad6bd-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"0ccdb20e-7467-40c5-be6c-62102e9ad6bd\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:17:20 crc kubenswrapper[4558]: I0120 17:17:20.438929 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/0ccdb20e-7467-40c5-be6c-62102e9ad6bd-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"0ccdb20e-7467-40c5-be6c-62102e9ad6bd\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:17:20 crc kubenswrapper[4558]: I0120 17:17:20.438957 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/0ccdb20e-7467-40c5-be6c-62102e9ad6bd-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"0ccdb20e-7467-40c5-be6c-62102e9ad6bd\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:17:20 crc kubenswrapper[4558]: I0120 17:17:20.439489 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"ovsdbserver-nb-0\" (UID: \"0ccdb20e-7467-40c5-be6c-62102e9ad6bd\") device mount path \"/mnt/openstack/pv01\"" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:17:20 crc kubenswrapper[4558]: I0120 17:17:20.440799 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/0ccdb20e-7467-40c5-be6c-62102e9ad6bd-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"0ccdb20e-7467-40c5-be6c-62102e9ad6bd\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:17:20 crc kubenswrapper[4558]: I0120 17:17:20.441520 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0ccdb20e-7467-40c5-be6c-62102e9ad6bd-config\") pod \"ovsdbserver-nb-0\" (UID: \"0ccdb20e-7467-40c5-be6c-62102e9ad6bd\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:17:20 crc kubenswrapper[4558]: I0120 17:17:20.442198 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0ccdb20e-7467-40c5-be6c-62102e9ad6bd-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"0ccdb20e-7467-40c5-be6c-62102e9ad6bd\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:17:20 crc kubenswrapper[4558]: I0120 17:17:20.446735 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/0ccdb20e-7467-40c5-be6c-62102e9ad6bd-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"0ccdb20e-7467-40c5-be6c-62102e9ad6bd\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:17:20 crc kubenswrapper[4558]: I0120 17:17:20.448767 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0ccdb20e-7467-40c5-be6c-62102e9ad6bd-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"0ccdb20e-7467-40c5-be6c-62102e9ad6bd\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:17:20 crc kubenswrapper[4558]: I0120 17:17:20.451619 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/0ccdb20e-7467-40c5-be6c-62102e9ad6bd-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"0ccdb20e-7467-40c5-be6c-62102e9ad6bd\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:17:20 crc kubenswrapper[4558]: I0120 17:17:20.456098 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-slgjx\" (UniqueName: \"kubernetes.io/projected/0ccdb20e-7467-40c5-be6c-62102e9ad6bd-kube-api-access-slgjx\") pod \"ovsdbserver-nb-0\" (UID: \"0ccdb20e-7467-40c5-be6c-62102e9ad6bd\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:17:20 crc kubenswrapper[4558]: I0120 17:17:20.468916 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"ovsdbserver-nb-0\" (UID: \"0ccdb20e-7467-40c5-be6c-62102e9ad6bd\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:17:20 crc kubenswrapper[4558]: I0120 17:17:20.537907 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:17:20 crc kubenswrapper[4558]: I0120 17:17:20.961395 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-nb-0"] Jan 20 17:17:20 crc kubenswrapper[4558]: W0120 17:17:20.971542 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0ccdb20e_7467_40c5_be6c_62102e9ad6bd.slice/crio-f0b4760d94941c1c76a55b8d857a669feb9ec3c02eb791ff4b30bc484111d6d1 WatchSource:0}: Error finding container f0b4760d94941c1c76a55b8d857a669feb9ec3c02eb791ff4b30bc484111d6d1: Status 404 returned error can't find the container with id f0b4760d94941c1c76a55b8d857a669feb9ec3c02eb791ff4b30bc484111d6d1 Jan 20 17:17:21 crc kubenswrapper[4558]: I0120 17:17:21.238227 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-nb-0" event={"ID":"0ccdb20e-7467-40c5-be6c-62102e9ad6bd","Type":"ContainerStarted","Data":"f38d913a0ad6428f06323145898acb1a9022803bf80d196bd130931d31c90fab"} Jan 20 17:17:21 crc kubenswrapper[4558]: I0120 17:17:21.238321 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-nb-0" event={"ID":"0ccdb20e-7467-40c5-be6c-62102e9ad6bd","Type":"ContainerStarted","Data":"f0b4760d94941c1c76a55b8d857a669feb9ec3c02eb791ff4b30bc484111d6d1"} Jan 20 17:17:21 crc kubenswrapper[4558]: I0120 17:17:21.708219 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ovsdbserver-sb-0"] Jan 20 17:17:21 crc kubenswrapper[4558]: I0120 17:17:21.710351 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:17:21 crc kubenswrapper[4558]: I0120 17:17:21.712366 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-sb-0"] Jan 20 17:17:21 crc kubenswrapper[4558]: I0120 17:17:21.712841 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"ovndbcluster-sb-config" Jan 20 17:17:21 crc kubenswrapper[4558]: I0120 17:17:21.712875 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"ovndbcluster-sb-scripts" Jan 20 17:17:21 crc kubenswrapper[4558]: I0120 17:17:21.713054 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ovncluster-ovndbcluster-sb-dockercfg-b48w8" Jan 20 17:17:21 crc kubenswrapper[4558]: I0120 17:17:21.714957 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-ovndbcluster-sb-ovndbs" Jan 20 17:17:21 crc kubenswrapper[4558]: I0120 17:17:21.871986 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wbdcm\" (UniqueName: \"kubernetes.io/projected/b2127dbe-7318-41ef-871f-16ee1db22243-kube-api-access-wbdcm\") pod \"ovsdbserver-sb-0\" (UID: \"b2127dbe-7318-41ef-871f-16ee1db22243\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:17:21 crc kubenswrapper[4558]: I0120 17:17:21.872061 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b2127dbe-7318-41ef-871f-16ee1db22243-config\") pod \"ovsdbserver-sb-0\" (UID: \"b2127dbe-7318-41ef-871f-16ee1db22243\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:17:21 crc kubenswrapper[4558]: I0120 17:17:21.872093 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b2127dbe-7318-41ef-871f-16ee1db22243-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"b2127dbe-7318-41ef-871f-16ee1db22243\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:17:21 crc kubenswrapper[4558]: I0120 17:17:21.872335 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/b2127dbe-7318-41ef-871f-16ee1db22243-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"b2127dbe-7318-41ef-871f-16ee1db22243\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:17:21 crc kubenswrapper[4558]: I0120 17:17:21.872468 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"ovsdbserver-sb-0\" (UID: \"b2127dbe-7318-41ef-871f-16ee1db22243\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:17:21 crc kubenswrapper[4558]: I0120 17:17:21.872525 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b2127dbe-7318-41ef-871f-16ee1db22243-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"b2127dbe-7318-41ef-871f-16ee1db22243\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:17:21 crc kubenswrapper[4558]: I0120 17:17:21.872594 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/b2127dbe-7318-41ef-871f-16ee1db22243-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"b2127dbe-7318-41ef-871f-16ee1db22243\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:17:21 crc kubenswrapper[4558]: I0120 17:17:21.872669 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/b2127dbe-7318-41ef-871f-16ee1db22243-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"b2127dbe-7318-41ef-871f-16ee1db22243\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:17:21 crc kubenswrapper[4558]: I0120 17:17:21.974688 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"ovsdbserver-sb-0\" (UID: \"b2127dbe-7318-41ef-871f-16ee1db22243\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:17:21 crc kubenswrapper[4558]: I0120 17:17:21.974739 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b2127dbe-7318-41ef-871f-16ee1db22243-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"b2127dbe-7318-41ef-871f-16ee1db22243\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:17:21 crc kubenswrapper[4558]: I0120 17:17:21.974791 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/b2127dbe-7318-41ef-871f-16ee1db22243-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"b2127dbe-7318-41ef-871f-16ee1db22243\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:17:21 crc kubenswrapper[4558]: I0120 17:17:21.974840 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/b2127dbe-7318-41ef-871f-16ee1db22243-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"b2127dbe-7318-41ef-871f-16ee1db22243\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:17:21 crc kubenswrapper[4558]: I0120 17:17:21.974898 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wbdcm\" (UniqueName: \"kubernetes.io/projected/b2127dbe-7318-41ef-871f-16ee1db22243-kube-api-access-wbdcm\") pod \"ovsdbserver-sb-0\" (UID: \"b2127dbe-7318-41ef-871f-16ee1db22243\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:17:21 crc kubenswrapper[4558]: I0120 17:17:21.974945 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b2127dbe-7318-41ef-871f-16ee1db22243-config\") pod \"ovsdbserver-sb-0\" (UID: \"b2127dbe-7318-41ef-871f-16ee1db22243\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:17:21 crc kubenswrapper[4558]: I0120 17:17:21.974969 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b2127dbe-7318-41ef-871f-16ee1db22243-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"b2127dbe-7318-41ef-871f-16ee1db22243\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:17:21 crc kubenswrapper[4558]: I0120 17:17:21.975011 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/b2127dbe-7318-41ef-871f-16ee1db22243-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"b2127dbe-7318-41ef-871f-16ee1db22243\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:17:21 crc kubenswrapper[4558]: I0120 17:17:21.975201 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"ovsdbserver-sb-0\" (UID: \"b2127dbe-7318-41ef-871f-16ee1db22243\") device mount path \"/mnt/openstack/pv09\"" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:17:21 crc kubenswrapper[4558]: I0120 17:17:21.975642 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/b2127dbe-7318-41ef-871f-16ee1db22243-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"b2127dbe-7318-41ef-871f-16ee1db22243\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:17:21 crc kubenswrapper[4558]: I0120 17:17:21.976024 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b2127dbe-7318-41ef-871f-16ee1db22243-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"b2127dbe-7318-41ef-871f-16ee1db22243\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:17:21 crc kubenswrapper[4558]: I0120 17:17:21.976155 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b2127dbe-7318-41ef-871f-16ee1db22243-config\") pod \"ovsdbserver-sb-0\" (UID: \"b2127dbe-7318-41ef-871f-16ee1db22243\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:17:21 crc kubenswrapper[4558]: I0120 17:17:21.981427 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/b2127dbe-7318-41ef-871f-16ee1db22243-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"b2127dbe-7318-41ef-871f-16ee1db22243\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:17:21 crc kubenswrapper[4558]: I0120 17:17:21.982269 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b2127dbe-7318-41ef-871f-16ee1db22243-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"b2127dbe-7318-41ef-871f-16ee1db22243\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:17:21 crc kubenswrapper[4558]: I0120 17:17:21.984063 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/b2127dbe-7318-41ef-871f-16ee1db22243-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"b2127dbe-7318-41ef-871f-16ee1db22243\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:17:21 crc kubenswrapper[4558]: I0120 17:17:21.990973 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wbdcm\" (UniqueName: \"kubernetes.io/projected/b2127dbe-7318-41ef-871f-16ee1db22243-kube-api-access-wbdcm\") pod \"ovsdbserver-sb-0\" (UID: \"b2127dbe-7318-41ef-871f-16ee1db22243\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:17:21 crc kubenswrapper[4558]: I0120 17:17:21.996673 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"ovsdbserver-sb-0\" (UID: \"b2127dbe-7318-41ef-871f-16ee1db22243\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:17:22 crc kubenswrapper[4558]: I0120 17:17:22.030103 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:17:22 crc kubenswrapper[4558]: I0120 17:17:22.250461 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-nb-0" event={"ID":"0ccdb20e-7467-40c5-be6c-62102e9ad6bd","Type":"ContainerStarted","Data":"0c6c50e15f6ca65f62124e7e4fd437de49116ef77e49acb8744a70d8b87c4220"} Jan 20 17:17:22 crc kubenswrapper[4558]: I0120 17:17:22.268760 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ovsdbserver-nb-0" podStartSLOduration=3.268739455 podStartE2EDuration="3.268739455s" podCreationTimestamp="2026-01-20 17:17:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:17:22.267394997 +0000 UTC m=+2136.027732965" watchObservedRunningTime="2026-01-20 17:17:22.268739455 +0000 UTC m=+2136.029077423" Jan 20 17:17:22 crc kubenswrapper[4558]: I0120 17:17:22.374431 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/openstack-galera-2" Jan 20 17:17:22 crc kubenswrapper[4558]: I0120 17:17:22.374496 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/openstack-galera-2" Jan 20 17:17:22 crc kubenswrapper[4558]: I0120 17:17:22.389402 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/openstack-galera-1" Jan 20 17:17:22 crc kubenswrapper[4558]: I0120 17:17:22.389464 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/openstack-galera-1" Jan 20 17:17:22 crc kubenswrapper[4558]: I0120 17:17:22.434440 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-sb-0"] Jan 20 17:17:22 crc kubenswrapper[4558]: W0120 17:17:22.436547 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb2127dbe_7318_41ef_871f_16ee1db22243.slice/crio-223ee8504524a30c2495e065b1fc6e55126ea838b301c82a6ab316b95d82b101 WatchSource:0}: Error finding container 223ee8504524a30c2495e065b1fc6e55126ea838b301c82a6ab316b95d82b101: Status 404 returned error can't find the container with id 223ee8504524a30c2495e065b1fc6e55126ea838b301c82a6ab316b95d82b101 Jan 20 17:17:22 crc kubenswrapper[4558]: I0120 17:17:22.626552 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:17:22 crc kubenswrapper[4558]: I0120 17:17:22.627049 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:17:23 crc kubenswrapper[4558]: I0120 17:17:23.261212 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-sb-0" event={"ID":"b2127dbe-7318-41ef-871f-16ee1db22243","Type":"ContainerStarted","Data":"a92ae70194b9e1d3dcba1794977e0ca18866d630d73b53d8ed2581ac1fe1d3ac"} Jan 20 17:17:23 crc kubenswrapper[4558]: I0120 17:17:23.261725 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-sb-0" event={"ID":"b2127dbe-7318-41ef-871f-16ee1db22243","Type":"ContainerStarted","Data":"ba8bdba5d5e7c4a36c71eb8170fbd87c43bae0b18995a96e5c7e162d103c69d1"} Jan 20 17:17:23 crc kubenswrapper[4558]: I0120 17:17:23.261745 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-sb-0" event={"ID":"b2127dbe-7318-41ef-871f-16ee1db22243","Type":"ContainerStarted","Data":"223ee8504524a30c2495e065b1fc6e55126ea838b301c82a6ab316b95d82b101"} Jan 20 17:17:23 crc kubenswrapper[4558]: I0120 17:17:23.290249 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ovsdbserver-sb-0" podStartSLOduration=3.2902308319999998 podStartE2EDuration="3.290230832s" podCreationTimestamp="2026-01-20 17:17:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:17:23.284509553 +0000 UTC m=+2137.044847519" watchObservedRunningTime="2026-01-20 17:17:23.290230832 +0000 UTC m=+2137.050568798" Jan 20 17:17:23 crc kubenswrapper[4558]: I0120 17:17:23.538782 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:17:23 crc kubenswrapper[4558]: I0120 17:17:23.734497 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:17:23 crc kubenswrapper[4558]: I0120 17:17:23.734566 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:17:23 crc kubenswrapper[4558]: I0120 17:17:23.734937 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/openstack-cell1-galera-2" Jan 20 17:17:23 crc kubenswrapper[4558]: I0120 17:17:23.735052 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/openstack-cell1-galera-2" Jan 20 17:17:23 crc kubenswrapper[4558]: I0120 17:17:23.753275 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/openstack-cell1-galera-1" Jan 20 17:17:23 crc kubenswrapper[4558]: I0120 17:17:23.753345 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/openstack-cell1-galera-1" Jan 20 17:17:25 crc kubenswrapper[4558]: I0120 17:17:25.030843 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:17:25 crc kubenswrapper[4558]: I0120 17:17:25.064717 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:17:25 crc kubenswrapper[4558]: I0120 17:17:25.167055 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/openstack-galera-2" Jan 20 17:17:25 crc kubenswrapper[4558]: I0120 17:17:25.232328 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/openstack-galera-2" Jan 20 17:17:25 crc kubenswrapper[4558]: I0120 17:17:25.279224 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:17:25 crc kubenswrapper[4558]: I0120 17:17:25.538699 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:17:25 crc kubenswrapper[4558]: I0120 17:17:25.995429 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:17:26 crc kubenswrapper[4558]: I0120 17:17:26.580495 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:17:26 crc kubenswrapper[4558]: I0120 17:17:26.612801 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:17:26 crc kubenswrapper[4558]: I0120 17:17:26.953177 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/swift-storage-0"] Jan 20 17:17:26 crc kubenswrapper[4558]: I0120 17:17:26.957825 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:17:26 crc kubenswrapper[4558]: I0120 17:17:26.963044 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"swift-conf" Jan 20 17:17:26 crc kubenswrapper[4558]: I0120 17:17:26.963254 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"swift-ring-files" Jan 20 17:17:26 crc kubenswrapper[4558]: I0120 17:17:26.963415 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"swift-swift-dockercfg-vthzt" Jan 20 17:17:26 crc kubenswrapper[4558]: I0120 17:17:26.963582 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"swift-storage-config-data" Jan 20 17:17:26 crc kubenswrapper[4558]: I0120 17:17:26.982842 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/swift-storage-0"] Jan 20 17:17:27 crc kubenswrapper[4558]: I0120 17:17:27.058772 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:17:27 crc kubenswrapper[4558]: I0120 17:17:27.080356 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/fae781f7-d8cb-4f32-992f-43d7dac82655-lock\") pod \"swift-storage-0\" (UID: \"fae781f7-d8cb-4f32-992f-43d7dac82655\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:17:27 crc kubenswrapper[4558]: I0120 17:17:27.080518 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"swift-storage-0\" (UID: \"fae781f7-d8cb-4f32-992f-43d7dac82655\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:17:27 crc kubenswrapper[4558]: I0120 17:17:27.080612 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-67zxv\" (UniqueName: \"kubernetes.io/projected/fae781f7-d8cb-4f32-992f-43d7dac82655-kube-api-access-67zxv\") pod \"swift-storage-0\" (UID: \"fae781f7-d8cb-4f32-992f-43d7dac82655\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:17:27 crc kubenswrapper[4558]: I0120 17:17:27.080657 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/fae781f7-d8cb-4f32-992f-43d7dac82655-cache\") pod \"swift-storage-0\" (UID: \"fae781f7-d8cb-4f32-992f-43d7dac82655\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:17:27 crc kubenswrapper[4558]: I0120 17:17:27.080705 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/fae781f7-d8cb-4f32-992f-43d7dac82655-etc-swift\") pod \"swift-storage-0\" (UID: \"fae781f7-d8cb-4f32-992f-43d7dac82655\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:17:27 crc kubenswrapper[4558]: I0120 17:17:27.182334 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"swift-storage-0\" (UID: \"fae781f7-d8cb-4f32-992f-43d7dac82655\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:17:27 crc kubenswrapper[4558]: I0120 17:17:27.182419 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-67zxv\" (UniqueName: \"kubernetes.io/projected/fae781f7-d8cb-4f32-992f-43d7dac82655-kube-api-access-67zxv\") pod \"swift-storage-0\" (UID: \"fae781f7-d8cb-4f32-992f-43d7dac82655\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:17:27 crc kubenswrapper[4558]: I0120 17:17:27.182455 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/fae781f7-d8cb-4f32-992f-43d7dac82655-cache\") pod \"swift-storage-0\" (UID: \"fae781f7-d8cb-4f32-992f-43d7dac82655\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:17:27 crc kubenswrapper[4558]: I0120 17:17:27.182516 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/fae781f7-d8cb-4f32-992f-43d7dac82655-etc-swift\") pod \"swift-storage-0\" (UID: \"fae781f7-d8cb-4f32-992f-43d7dac82655\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:17:27 crc kubenswrapper[4558]: I0120 17:17:27.182560 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/fae781f7-d8cb-4f32-992f-43d7dac82655-lock\") pod \"swift-storage-0\" (UID: \"fae781f7-d8cb-4f32-992f-43d7dac82655\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:17:27 crc kubenswrapper[4558]: I0120 17:17:27.182704 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"swift-storage-0\" (UID: \"fae781f7-d8cb-4f32-992f-43d7dac82655\") device mount path \"/mnt/openstack/pv07\"" pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:17:27 crc kubenswrapper[4558]: I0120 17:17:27.182974 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/fae781f7-d8cb-4f32-992f-43d7dac82655-cache\") pod \"swift-storage-0\" (UID: \"fae781f7-d8cb-4f32-992f-43d7dac82655\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:17:27 crc kubenswrapper[4558]: E0120 17:17:27.183022 4558 projected.go:288] Couldn't get configMap openstack-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Jan 20 17:17:27 crc kubenswrapper[4558]: E0120 17:17:27.183046 4558 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack-kuttl-tests/swift-storage-0: configmap "swift-ring-files" not found Jan 20 17:17:27 crc kubenswrapper[4558]: E0120 17:17:27.183096 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/fae781f7-d8cb-4f32-992f-43d7dac82655-etc-swift podName:fae781f7-d8cb-4f32-992f-43d7dac82655 nodeName:}" failed. No retries permitted until 2026-01-20 17:17:27.683081718 +0000 UTC m=+2141.443419685 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/fae781f7-d8cb-4f32-992f-43d7dac82655-etc-swift") pod "swift-storage-0" (UID: "fae781f7-d8cb-4f32-992f-43d7dac82655") : configmap "swift-ring-files" not found Jan 20 17:17:27 crc kubenswrapper[4558]: I0120 17:17:27.183134 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/fae781f7-d8cb-4f32-992f-43d7dac82655-lock\") pod \"swift-storage-0\" (UID: \"fae781f7-d8cb-4f32-992f-43d7dac82655\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:17:27 crc kubenswrapper[4558]: I0120 17:17:27.200203 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-67zxv\" (UniqueName: \"kubernetes.io/projected/fae781f7-d8cb-4f32-992f-43d7dac82655-kube-api-access-67zxv\") pod \"swift-storage-0\" (UID: \"fae781f7-d8cb-4f32-992f-43d7dac82655\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:17:27 crc kubenswrapper[4558]: I0120 17:17:27.203970 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ovn-northd-0"] Jan 20 17:17:27 crc kubenswrapper[4558]: I0120 17:17:27.209687 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:17:27 crc kubenswrapper[4558]: I0120 17:17:27.212725 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"ovnnorthd-scripts" Jan 20 17:17:27 crc kubenswrapper[4558]: I0120 17:17:27.213067 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ovnnorthd-ovnnorthd-dockercfg-tzpm7" Jan 20 17:17:27 crc kubenswrapper[4558]: I0120 17:17:27.213365 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-ovnnorthd-ovndbs" Jan 20 17:17:27 crc kubenswrapper[4558]: I0120 17:17:27.213561 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"ovnnorthd-config" Jan 20 17:17:27 crc kubenswrapper[4558]: I0120 17:17:27.214764 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovn-northd-0"] Jan 20 17:17:27 crc kubenswrapper[4558]: I0120 17:17:27.217141 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"swift-storage-0\" (UID: \"fae781f7-d8cb-4f32-992f-43d7dac82655\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:17:27 crc kubenswrapper[4558]: I0120 17:17:27.334410 4558 patch_prober.go:28] interesting pod/machine-config-daemon-2vr4r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 20 17:17:27 crc kubenswrapper[4558]: I0120 17:17:27.334495 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 20 17:17:27 crc kubenswrapper[4558]: I0120 17:17:27.362634 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/swift-ring-rebalance-gms56"] Jan 20 17:17:27 crc kubenswrapper[4558]: I0120 17:17:27.363659 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-ring-rebalance-gms56" Jan 20 17:17:27 crc kubenswrapper[4558]: I0120 17:17:27.364931 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"swift-proxy-config-data" Jan 20 17:17:27 crc kubenswrapper[4558]: I0120 17:17:27.366298 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"swift-ring-scripts" Jan 20 17:17:27 crc kubenswrapper[4558]: I0120 17:17:27.366358 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"swift-ring-config-data" Jan 20 17:17:27 crc kubenswrapper[4558]: I0120 17:17:27.369856 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/swift-ring-rebalance-gms56"] Jan 20 17:17:27 crc kubenswrapper[4558]: I0120 17:17:27.387591 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/88418ec6-c589-463f-b8ec-c12464810c07-scripts\") pod \"ovn-northd-0\" (UID: \"88418ec6-c589-463f-b8ec-c12464810c07\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:17:27 crc kubenswrapper[4558]: I0120 17:17:27.387645 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k5fjg\" (UniqueName: \"kubernetes.io/projected/88418ec6-c589-463f-b8ec-c12464810c07-kube-api-access-k5fjg\") pod \"ovn-northd-0\" (UID: \"88418ec6-c589-463f-b8ec-c12464810c07\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:17:27 crc kubenswrapper[4558]: I0120 17:17:27.387844 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/88418ec6-c589-463f-b8ec-c12464810c07-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"88418ec6-c589-463f-b8ec-c12464810c07\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:17:27 crc kubenswrapper[4558]: I0120 17:17:27.387914 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/88418ec6-c589-463f-b8ec-c12464810c07-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"88418ec6-c589-463f-b8ec-c12464810c07\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:17:27 crc kubenswrapper[4558]: I0120 17:17:27.388107 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/88418ec6-c589-463f-b8ec-c12464810c07-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"88418ec6-c589-463f-b8ec-c12464810c07\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:17:27 crc kubenswrapper[4558]: I0120 17:17:27.388156 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/88418ec6-c589-463f-b8ec-c12464810c07-config\") pod \"ovn-northd-0\" (UID: \"88418ec6-c589-463f-b8ec-c12464810c07\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:17:27 crc kubenswrapper[4558]: I0120 17:17:27.388236 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/88418ec6-c589-463f-b8ec-c12464810c07-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"88418ec6-c589-463f-b8ec-c12464810c07\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:17:27 crc kubenswrapper[4558]: I0120 17:17:27.489501 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/16316176-29c1-476a-8142-6a3427dd7da4-ring-data-devices\") pod \"swift-ring-rebalance-gms56\" (UID: \"16316176-29c1-476a-8142-6a3427dd7da4\") " pod="openstack-kuttl-tests/swift-ring-rebalance-gms56" Jan 20 17:17:27 crc kubenswrapper[4558]: I0120 17:17:27.489556 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/16316176-29c1-476a-8142-6a3427dd7da4-dispersionconf\") pod \"swift-ring-rebalance-gms56\" (UID: \"16316176-29c1-476a-8142-6a3427dd7da4\") " pod="openstack-kuttl-tests/swift-ring-rebalance-gms56" Jan 20 17:17:27 crc kubenswrapper[4558]: I0120 17:17:27.489579 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/16316176-29c1-476a-8142-6a3427dd7da4-combined-ca-bundle\") pod \"swift-ring-rebalance-gms56\" (UID: \"16316176-29c1-476a-8142-6a3427dd7da4\") " pod="openstack-kuttl-tests/swift-ring-rebalance-gms56" Jan 20 17:17:27 crc kubenswrapper[4558]: I0120 17:17:27.489985 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/88418ec6-c589-463f-b8ec-c12464810c07-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"88418ec6-c589-463f-b8ec-c12464810c07\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:17:27 crc kubenswrapper[4558]: I0120 17:17:27.490039 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tlqxb\" (UniqueName: \"kubernetes.io/projected/16316176-29c1-476a-8142-6a3427dd7da4-kube-api-access-tlqxb\") pod \"swift-ring-rebalance-gms56\" (UID: \"16316176-29c1-476a-8142-6a3427dd7da4\") " pod="openstack-kuttl-tests/swift-ring-rebalance-gms56" Jan 20 17:17:27 crc kubenswrapper[4558]: I0120 17:17:27.490074 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/16316176-29c1-476a-8142-6a3427dd7da4-scripts\") pod \"swift-ring-rebalance-gms56\" (UID: \"16316176-29c1-476a-8142-6a3427dd7da4\") " pod="openstack-kuttl-tests/swift-ring-rebalance-gms56" Jan 20 17:17:27 crc kubenswrapper[4558]: I0120 17:17:27.490129 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/88418ec6-c589-463f-b8ec-c12464810c07-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"88418ec6-c589-463f-b8ec-c12464810c07\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:17:27 crc kubenswrapper[4558]: I0120 17:17:27.490613 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/88418ec6-c589-463f-b8ec-c12464810c07-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"88418ec6-c589-463f-b8ec-c12464810c07\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:17:27 crc kubenswrapper[4558]: I0120 17:17:27.490637 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/88418ec6-c589-463f-b8ec-c12464810c07-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"88418ec6-c589-463f-b8ec-c12464810c07\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:17:27 crc kubenswrapper[4558]: I0120 17:17:27.490873 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/88418ec6-c589-463f-b8ec-c12464810c07-config\") pod \"ovn-northd-0\" (UID: \"88418ec6-c589-463f-b8ec-c12464810c07\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:17:27 crc kubenswrapper[4558]: I0120 17:17:27.490994 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/88418ec6-c589-463f-b8ec-c12464810c07-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"88418ec6-c589-463f-b8ec-c12464810c07\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:17:27 crc kubenswrapper[4558]: I0120 17:17:27.491751 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/88418ec6-c589-463f-b8ec-c12464810c07-scripts\") pod \"ovn-northd-0\" (UID: \"88418ec6-c589-463f-b8ec-c12464810c07\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:17:27 crc kubenswrapper[4558]: I0120 17:17:27.491784 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/16316176-29c1-476a-8142-6a3427dd7da4-swiftconf\") pod \"swift-ring-rebalance-gms56\" (UID: \"16316176-29c1-476a-8142-6a3427dd7da4\") " pod="openstack-kuttl-tests/swift-ring-rebalance-gms56" Jan 20 17:17:27 crc kubenswrapper[4558]: I0120 17:17:27.491866 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/16316176-29c1-476a-8142-6a3427dd7da4-etc-swift\") pod \"swift-ring-rebalance-gms56\" (UID: \"16316176-29c1-476a-8142-6a3427dd7da4\") " pod="openstack-kuttl-tests/swift-ring-rebalance-gms56" Jan 20 17:17:27 crc kubenswrapper[4558]: I0120 17:17:27.491896 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k5fjg\" (UniqueName: \"kubernetes.io/projected/88418ec6-c589-463f-b8ec-c12464810c07-kube-api-access-k5fjg\") pod \"ovn-northd-0\" (UID: \"88418ec6-c589-463f-b8ec-c12464810c07\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:17:27 crc kubenswrapper[4558]: I0120 17:17:27.493229 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/88418ec6-c589-463f-b8ec-c12464810c07-config\") pod \"ovn-northd-0\" (UID: \"88418ec6-c589-463f-b8ec-c12464810c07\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:17:27 crc kubenswrapper[4558]: I0120 17:17:27.493262 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/88418ec6-c589-463f-b8ec-c12464810c07-scripts\") pod \"ovn-northd-0\" (UID: \"88418ec6-c589-463f-b8ec-c12464810c07\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:17:27 crc kubenswrapper[4558]: I0120 17:17:27.503506 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/88418ec6-c589-463f-b8ec-c12464810c07-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"88418ec6-c589-463f-b8ec-c12464810c07\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:17:27 crc kubenswrapper[4558]: I0120 17:17:27.503678 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/88418ec6-c589-463f-b8ec-c12464810c07-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"88418ec6-c589-463f-b8ec-c12464810c07\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:17:27 crc kubenswrapper[4558]: I0120 17:17:27.503845 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/88418ec6-c589-463f-b8ec-c12464810c07-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"88418ec6-c589-463f-b8ec-c12464810c07\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:17:27 crc kubenswrapper[4558]: I0120 17:17:27.508704 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k5fjg\" (UniqueName: \"kubernetes.io/projected/88418ec6-c589-463f-b8ec-c12464810c07-kube-api-access-k5fjg\") pod \"ovn-northd-0\" (UID: \"88418ec6-c589-463f-b8ec-c12464810c07\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:17:27 crc kubenswrapper[4558]: I0120 17:17:27.574369 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:17:27 crc kubenswrapper[4558]: I0120 17:17:27.593028 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/16316176-29c1-476a-8142-6a3427dd7da4-swiftconf\") pod \"swift-ring-rebalance-gms56\" (UID: \"16316176-29c1-476a-8142-6a3427dd7da4\") " pod="openstack-kuttl-tests/swift-ring-rebalance-gms56" Jan 20 17:17:27 crc kubenswrapper[4558]: I0120 17:17:27.593079 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/16316176-29c1-476a-8142-6a3427dd7da4-etc-swift\") pod \"swift-ring-rebalance-gms56\" (UID: \"16316176-29c1-476a-8142-6a3427dd7da4\") " pod="openstack-kuttl-tests/swift-ring-rebalance-gms56" Jan 20 17:17:27 crc kubenswrapper[4558]: I0120 17:17:27.593113 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/16316176-29c1-476a-8142-6a3427dd7da4-ring-data-devices\") pod \"swift-ring-rebalance-gms56\" (UID: \"16316176-29c1-476a-8142-6a3427dd7da4\") " pod="openstack-kuttl-tests/swift-ring-rebalance-gms56" Jan 20 17:17:27 crc kubenswrapper[4558]: I0120 17:17:27.593141 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/16316176-29c1-476a-8142-6a3427dd7da4-combined-ca-bundle\") pod \"swift-ring-rebalance-gms56\" (UID: \"16316176-29c1-476a-8142-6a3427dd7da4\") " pod="openstack-kuttl-tests/swift-ring-rebalance-gms56" Jan 20 17:17:27 crc kubenswrapper[4558]: I0120 17:17:27.593172 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/16316176-29c1-476a-8142-6a3427dd7da4-dispersionconf\") pod \"swift-ring-rebalance-gms56\" (UID: \"16316176-29c1-476a-8142-6a3427dd7da4\") " pod="openstack-kuttl-tests/swift-ring-rebalance-gms56" Jan 20 17:17:27 crc kubenswrapper[4558]: I0120 17:17:27.593214 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tlqxb\" (UniqueName: \"kubernetes.io/projected/16316176-29c1-476a-8142-6a3427dd7da4-kube-api-access-tlqxb\") pod \"swift-ring-rebalance-gms56\" (UID: \"16316176-29c1-476a-8142-6a3427dd7da4\") " pod="openstack-kuttl-tests/swift-ring-rebalance-gms56" Jan 20 17:17:27 crc kubenswrapper[4558]: I0120 17:17:27.593234 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/16316176-29c1-476a-8142-6a3427dd7da4-scripts\") pod \"swift-ring-rebalance-gms56\" (UID: \"16316176-29c1-476a-8142-6a3427dd7da4\") " pod="openstack-kuttl-tests/swift-ring-rebalance-gms56" Jan 20 17:17:27 crc kubenswrapper[4558]: I0120 17:17:27.593850 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/16316176-29c1-476a-8142-6a3427dd7da4-scripts\") pod \"swift-ring-rebalance-gms56\" (UID: \"16316176-29c1-476a-8142-6a3427dd7da4\") " pod="openstack-kuttl-tests/swift-ring-rebalance-gms56" Jan 20 17:17:27 crc kubenswrapper[4558]: I0120 17:17:27.595315 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/16316176-29c1-476a-8142-6a3427dd7da4-etc-swift\") pod \"swift-ring-rebalance-gms56\" (UID: \"16316176-29c1-476a-8142-6a3427dd7da4\") " pod="openstack-kuttl-tests/swift-ring-rebalance-gms56" Jan 20 17:17:27 crc kubenswrapper[4558]: I0120 17:17:27.595584 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/16316176-29c1-476a-8142-6a3427dd7da4-ring-data-devices\") pod \"swift-ring-rebalance-gms56\" (UID: \"16316176-29c1-476a-8142-6a3427dd7da4\") " pod="openstack-kuttl-tests/swift-ring-rebalance-gms56" Jan 20 17:17:27 crc kubenswrapper[4558]: I0120 17:17:27.597401 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/16316176-29c1-476a-8142-6a3427dd7da4-dispersionconf\") pod \"swift-ring-rebalance-gms56\" (UID: \"16316176-29c1-476a-8142-6a3427dd7da4\") " pod="openstack-kuttl-tests/swift-ring-rebalance-gms56" Jan 20 17:17:27 crc kubenswrapper[4558]: I0120 17:17:27.597702 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/16316176-29c1-476a-8142-6a3427dd7da4-combined-ca-bundle\") pod \"swift-ring-rebalance-gms56\" (UID: \"16316176-29c1-476a-8142-6a3427dd7da4\") " pod="openstack-kuttl-tests/swift-ring-rebalance-gms56" Jan 20 17:17:27 crc kubenswrapper[4558]: I0120 17:17:27.601452 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/16316176-29c1-476a-8142-6a3427dd7da4-swiftconf\") pod \"swift-ring-rebalance-gms56\" (UID: \"16316176-29c1-476a-8142-6a3427dd7da4\") " pod="openstack-kuttl-tests/swift-ring-rebalance-gms56" Jan 20 17:17:27 crc kubenswrapper[4558]: I0120 17:17:27.609006 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tlqxb\" (UniqueName: \"kubernetes.io/projected/16316176-29c1-476a-8142-6a3427dd7da4-kube-api-access-tlqxb\") pod \"swift-ring-rebalance-gms56\" (UID: \"16316176-29c1-476a-8142-6a3427dd7da4\") " pod="openstack-kuttl-tests/swift-ring-rebalance-gms56" Jan 20 17:17:27 crc kubenswrapper[4558]: I0120 17:17:27.678229 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-ring-rebalance-gms56" Jan 20 17:17:27 crc kubenswrapper[4558]: I0120 17:17:27.696507 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/fae781f7-d8cb-4f32-992f-43d7dac82655-etc-swift\") pod \"swift-storage-0\" (UID: \"fae781f7-d8cb-4f32-992f-43d7dac82655\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:17:27 crc kubenswrapper[4558]: E0120 17:17:27.696684 4558 projected.go:288] Couldn't get configMap openstack-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Jan 20 17:17:27 crc kubenswrapper[4558]: E0120 17:17:27.696993 4558 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack-kuttl-tests/swift-storage-0: configmap "swift-ring-files" not found Jan 20 17:17:27 crc kubenswrapper[4558]: E0120 17:17:27.697092 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/fae781f7-d8cb-4f32-992f-43d7dac82655-etc-swift podName:fae781f7-d8cb-4f32-992f-43d7dac82655 nodeName:}" failed. No retries permitted until 2026-01-20 17:17:28.697072192 +0000 UTC m=+2142.457410159 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/fae781f7-d8cb-4f32-992f-43d7dac82655-etc-swift") pod "swift-storage-0" (UID: "fae781f7-d8cb-4f32-992f-43d7dac82655") : configmap "swift-ring-files" not found Jan 20 17:17:27 crc kubenswrapper[4558]: I0120 17:17:27.813773 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/openstack-cell1-galera-2" Jan 20 17:17:27 crc kubenswrapper[4558]: I0120 17:17:27.890952 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/openstack-cell1-galera-2" Jan 20 17:17:28 crc kubenswrapper[4558]: I0120 17:17:28.013403 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovn-northd-0"] Jan 20 17:17:28 crc kubenswrapper[4558]: E0120 17:17:28.016961 4558 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 192.168.25.8:41274->192.168.25.8:43883: write tcp 192.168.25.8:41274->192.168.25.8:43883: write: broken pipe Jan 20 17:17:28 crc kubenswrapper[4558]: I0120 17:17:28.088984 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/swift-ring-rebalance-gms56"] Jan 20 17:17:28 crc kubenswrapper[4558]: W0120 17:17:28.098519 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod16316176_29c1_476a_8142_6a3427dd7da4.slice/crio-b18e789cd4faad69b62a9cfb7f8085f90e1fe0fef86cdd94fafbc33277bc2f29 WatchSource:0}: Error finding container b18e789cd4faad69b62a9cfb7f8085f90e1fe0fef86cdd94fafbc33277bc2f29: Status 404 returned error can't find the container with id b18e789cd4faad69b62a9cfb7f8085f90e1fe0fef86cdd94fafbc33277bc2f29 Jan 20 17:17:28 crc kubenswrapper[4558]: I0120 17:17:28.320307 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-northd-0" event={"ID":"88418ec6-c589-463f-b8ec-c12464810c07","Type":"ContainerStarted","Data":"9c3bcaf0b0b1fe796f0f39a396e91571e66a0d2ff4f33099f19f4d03733e3f3c"} Jan 20 17:17:28 crc kubenswrapper[4558]: I0120 17:17:28.320653 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-northd-0" event={"ID":"88418ec6-c589-463f-b8ec-c12464810c07","Type":"ContainerStarted","Data":"829d904640074384724ded901ca01ef16df44e91a2fc8f1c0086c25626a6fd37"} Jan 20 17:17:28 crc kubenswrapper[4558]: I0120 17:17:28.324724 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-ring-rebalance-gms56" event={"ID":"16316176-29c1-476a-8142-6a3427dd7da4","Type":"ContainerStarted","Data":"e753fe9a9ff4e55db2f42e6e4809dd92638bc59c527ff9a16c85cbd5e8061320"} Jan 20 17:17:28 crc kubenswrapper[4558]: I0120 17:17:28.324777 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-ring-rebalance-gms56" event={"ID":"16316176-29c1-476a-8142-6a3427dd7da4","Type":"ContainerStarted","Data":"b18e789cd4faad69b62a9cfb7f8085f90e1fe0fef86cdd94fafbc33277bc2f29"} Jan 20 17:17:28 crc kubenswrapper[4558]: I0120 17:17:28.347918 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/swift-ring-rebalance-gms56" podStartSLOduration=1.347897026 podStartE2EDuration="1.347897026s" podCreationTimestamp="2026-01-20 17:17:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:17:28.341530182 +0000 UTC m=+2142.101868149" watchObservedRunningTime="2026-01-20 17:17:28.347897026 +0000 UTC m=+2142.108234992" Jan 20 17:17:28 crc kubenswrapper[4558]: I0120 17:17:28.719755 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/fae781f7-d8cb-4f32-992f-43d7dac82655-etc-swift\") pod \"swift-storage-0\" (UID: \"fae781f7-d8cb-4f32-992f-43d7dac82655\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:17:28 crc kubenswrapper[4558]: E0120 17:17:28.720000 4558 projected.go:288] Couldn't get configMap openstack-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Jan 20 17:17:28 crc kubenswrapper[4558]: E0120 17:17:28.720036 4558 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack-kuttl-tests/swift-storage-0: configmap "swift-ring-files" not found Jan 20 17:17:28 crc kubenswrapper[4558]: E0120 17:17:28.720111 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/fae781f7-d8cb-4f32-992f-43d7dac82655-etc-swift podName:fae781f7-d8cb-4f32-992f-43d7dac82655 nodeName:}" failed. No retries permitted until 2026-01-20 17:17:30.720088025 +0000 UTC m=+2144.480425992 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/fae781f7-d8cb-4f32-992f-43d7dac82655-etc-swift") pod "swift-storage-0" (UID: "fae781f7-d8cb-4f32-992f-43d7dac82655") : configmap "swift-ring-files" not found Jan 20 17:17:29 crc kubenswrapper[4558]: I0120 17:17:29.144025 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/glance-db-create-9d9jp"] Jan 20 17:17:29 crc kubenswrapper[4558]: I0120 17:17:29.145363 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-db-create-9d9jp" Jan 20 17:17:29 crc kubenswrapper[4558]: I0120 17:17:29.159707 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-db-create-9d9jp"] Jan 20 17:17:29 crc kubenswrapper[4558]: I0120 17:17:29.225835 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/glance-36de-account-create-update-pf4bc"] Jan 20 17:17:29 crc kubenswrapper[4558]: I0120 17:17:29.227098 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-36de-account-create-update-pf4bc" Jan 20 17:17:29 crc kubenswrapper[4558]: I0120 17:17:29.229061 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-db-secret" Jan 20 17:17:29 crc kubenswrapper[4558]: I0120 17:17:29.230575 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d9de665d-8b72-45e0-9829-5bd72bd86f41-operator-scripts\") pod \"glance-db-create-9d9jp\" (UID: \"d9de665d-8b72-45e0-9829-5bd72bd86f41\") " pod="openstack-kuttl-tests/glance-db-create-9d9jp" Jan 20 17:17:29 crc kubenswrapper[4558]: I0120 17:17:29.230628 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n66rr\" (UniqueName: \"kubernetes.io/projected/d9de665d-8b72-45e0-9829-5bd72bd86f41-kube-api-access-n66rr\") pod \"glance-db-create-9d9jp\" (UID: \"d9de665d-8b72-45e0-9829-5bd72bd86f41\") " pod="openstack-kuttl-tests/glance-db-create-9d9jp" Jan 20 17:17:29 crc kubenswrapper[4558]: I0120 17:17:29.239245 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-36de-account-create-update-pf4bc"] Jan 20 17:17:29 crc kubenswrapper[4558]: I0120 17:17:29.333361 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d9de665d-8b72-45e0-9829-5bd72bd86f41-operator-scripts\") pod \"glance-db-create-9d9jp\" (UID: \"d9de665d-8b72-45e0-9829-5bd72bd86f41\") " pod="openstack-kuttl-tests/glance-db-create-9d9jp" Jan 20 17:17:29 crc kubenswrapper[4558]: I0120 17:17:29.333405 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n66rr\" (UniqueName: \"kubernetes.io/projected/d9de665d-8b72-45e0-9829-5bd72bd86f41-kube-api-access-n66rr\") pod \"glance-db-create-9d9jp\" (UID: \"d9de665d-8b72-45e0-9829-5bd72bd86f41\") " pod="openstack-kuttl-tests/glance-db-create-9d9jp" Jan 20 17:17:29 crc kubenswrapper[4558]: I0120 17:17:29.333439 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c7lz8\" (UniqueName: \"kubernetes.io/projected/10cc58f5-333d-4a3b-af1e-ed0efe498d8d-kube-api-access-c7lz8\") pod \"glance-36de-account-create-update-pf4bc\" (UID: \"10cc58f5-333d-4a3b-af1e-ed0efe498d8d\") " pod="openstack-kuttl-tests/glance-36de-account-create-update-pf4bc" Jan 20 17:17:29 crc kubenswrapper[4558]: I0120 17:17:29.333500 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/10cc58f5-333d-4a3b-af1e-ed0efe498d8d-operator-scripts\") pod \"glance-36de-account-create-update-pf4bc\" (UID: \"10cc58f5-333d-4a3b-af1e-ed0efe498d8d\") " pod="openstack-kuttl-tests/glance-36de-account-create-update-pf4bc" Jan 20 17:17:29 crc kubenswrapper[4558]: I0120 17:17:29.334536 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d9de665d-8b72-45e0-9829-5bd72bd86f41-operator-scripts\") pod \"glance-db-create-9d9jp\" (UID: \"d9de665d-8b72-45e0-9829-5bd72bd86f41\") " pod="openstack-kuttl-tests/glance-db-create-9d9jp" Jan 20 17:17:29 crc kubenswrapper[4558]: I0120 17:17:29.338216 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-northd-0" event={"ID":"88418ec6-c589-463f-b8ec-c12464810c07","Type":"ContainerStarted","Data":"6ace6c8a650ab1df4d1b142fa5877db45863c733c61ee810479ce811a6bf28d2"} Jan 20 17:17:29 crc kubenswrapper[4558]: I0120 17:17:29.338311 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:17:29 crc kubenswrapper[4558]: I0120 17:17:29.360646 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n66rr\" (UniqueName: \"kubernetes.io/projected/d9de665d-8b72-45e0-9829-5bd72bd86f41-kube-api-access-n66rr\") pod \"glance-db-create-9d9jp\" (UID: \"d9de665d-8b72-45e0-9829-5bd72bd86f41\") " pod="openstack-kuttl-tests/glance-db-create-9d9jp" Jan 20 17:17:29 crc kubenswrapper[4558]: I0120 17:17:29.371815 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ovn-northd-0" podStartSLOduration=2.371794716 podStartE2EDuration="2.371794716s" podCreationTimestamp="2026-01-20 17:17:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:17:29.369766731 +0000 UTC m=+2143.130104699" watchObservedRunningTime="2026-01-20 17:17:29.371794716 +0000 UTC m=+2143.132132682" Jan 20 17:17:29 crc kubenswrapper[4558]: I0120 17:17:29.435292 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c7lz8\" (UniqueName: \"kubernetes.io/projected/10cc58f5-333d-4a3b-af1e-ed0efe498d8d-kube-api-access-c7lz8\") pod \"glance-36de-account-create-update-pf4bc\" (UID: \"10cc58f5-333d-4a3b-af1e-ed0efe498d8d\") " pod="openstack-kuttl-tests/glance-36de-account-create-update-pf4bc" Jan 20 17:17:29 crc kubenswrapper[4558]: I0120 17:17:29.435407 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/10cc58f5-333d-4a3b-af1e-ed0efe498d8d-operator-scripts\") pod \"glance-36de-account-create-update-pf4bc\" (UID: \"10cc58f5-333d-4a3b-af1e-ed0efe498d8d\") " pod="openstack-kuttl-tests/glance-36de-account-create-update-pf4bc" Jan 20 17:17:29 crc kubenswrapper[4558]: I0120 17:17:29.438227 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/10cc58f5-333d-4a3b-af1e-ed0efe498d8d-operator-scripts\") pod \"glance-36de-account-create-update-pf4bc\" (UID: \"10cc58f5-333d-4a3b-af1e-ed0efe498d8d\") " pod="openstack-kuttl-tests/glance-36de-account-create-update-pf4bc" Jan 20 17:17:29 crc kubenswrapper[4558]: I0120 17:17:29.451978 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c7lz8\" (UniqueName: \"kubernetes.io/projected/10cc58f5-333d-4a3b-af1e-ed0efe498d8d-kube-api-access-c7lz8\") pod \"glance-36de-account-create-update-pf4bc\" (UID: \"10cc58f5-333d-4a3b-af1e-ed0efe498d8d\") " pod="openstack-kuttl-tests/glance-36de-account-create-update-pf4bc" Jan 20 17:17:29 crc kubenswrapper[4558]: I0120 17:17:29.460610 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-db-create-9d9jp" Jan 20 17:17:29 crc kubenswrapper[4558]: I0120 17:17:29.539937 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-36de-account-create-update-pf4bc" Jan 20 17:17:29 crc kubenswrapper[4558]: I0120 17:17:29.880491 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-db-create-9d9jp"] Jan 20 17:17:29 crc kubenswrapper[4558]: W0120 17:17:29.884174 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd9de665d_8b72_45e0_9829_5bd72bd86f41.slice/crio-2dd2a4e549971a7c2bdc7c8c2309346dad95f445c3010095c73b3211ed164a91 WatchSource:0}: Error finding container 2dd2a4e549971a7c2bdc7c8c2309346dad95f445c3010095c73b3211ed164a91: Status 404 returned error can't find the container with id 2dd2a4e549971a7c2bdc7c8c2309346dad95f445c3010095c73b3211ed164a91 Jan 20 17:17:29 crc kubenswrapper[4558]: I0120 17:17:29.979606 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-36de-account-create-update-pf4bc"] Jan 20 17:17:29 crc kubenswrapper[4558]: W0120 17:17:29.985388 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod10cc58f5_333d_4a3b_af1e_ed0efe498d8d.slice/crio-b6e12212b81f94118a20426046235ba1108fc8dd6dcbc838f23a918c1f81a583 WatchSource:0}: Error finding container b6e12212b81f94118a20426046235ba1108fc8dd6dcbc838f23a918c1f81a583: Status 404 returned error can't find the container with id b6e12212b81f94118a20426046235ba1108fc8dd6dcbc838f23a918c1f81a583 Jan 20 17:17:30 crc kubenswrapper[4558]: I0120 17:17:30.346232 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-db-create-9d9jp" event={"ID":"d9de665d-8b72-45e0-9829-5bd72bd86f41","Type":"ContainerStarted","Data":"788f6224ee4017a1c49676cf0158019aad3dadc64f19f16e8a8d308ee1b7ca67"} Jan 20 17:17:30 crc kubenswrapper[4558]: I0120 17:17:30.346290 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-db-create-9d9jp" event={"ID":"d9de665d-8b72-45e0-9829-5bd72bd86f41","Type":"ContainerStarted","Data":"2dd2a4e549971a7c2bdc7c8c2309346dad95f445c3010095c73b3211ed164a91"} Jan 20 17:17:30 crc kubenswrapper[4558]: I0120 17:17:30.348493 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-36de-account-create-update-pf4bc" event={"ID":"10cc58f5-333d-4a3b-af1e-ed0efe498d8d","Type":"ContainerStarted","Data":"0404131e6088ce70d30be218580224f5f7c40261b9f6a556f4d91e65b05b4710"} Jan 20 17:17:30 crc kubenswrapper[4558]: I0120 17:17:30.348545 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-36de-account-create-update-pf4bc" event={"ID":"10cc58f5-333d-4a3b-af1e-ed0efe498d8d","Type":"ContainerStarted","Data":"b6e12212b81f94118a20426046235ba1108fc8dd6dcbc838f23a918c1f81a583"} Jan 20 17:17:30 crc kubenswrapper[4558]: I0120 17:17:30.359624 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/glance-db-create-9d9jp" podStartSLOduration=1.3596140700000001 podStartE2EDuration="1.35961407s" podCreationTimestamp="2026-01-20 17:17:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:17:30.357419623 +0000 UTC m=+2144.117757590" watchObservedRunningTime="2026-01-20 17:17:30.35961407 +0000 UTC m=+2144.119952037" Jan 20 17:17:30 crc kubenswrapper[4558]: I0120 17:17:30.377308 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/glance-36de-account-create-update-pf4bc" podStartSLOduration=1.377290451 podStartE2EDuration="1.377290451s" podCreationTimestamp="2026-01-20 17:17:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:17:30.369800064 +0000 UTC m=+2144.130138022" watchObservedRunningTime="2026-01-20 17:17:30.377290451 +0000 UTC m=+2144.137628418" Jan 20 17:17:30 crc kubenswrapper[4558]: I0120 17:17:30.760576 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/fae781f7-d8cb-4f32-992f-43d7dac82655-etc-swift\") pod \"swift-storage-0\" (UID: \"fae781f7-d8cb-4f32-992f-43d7dac82655\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:17:30 crc kubenswrapper[4558]: E0120 17:17:30.760795 4558 projected.go:288] Couldn't get configMap openstack-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Jan 20 17:17:30 crc kubenswrapper[4558]: E0120 17:17:30.760822 4558 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack-kuttl-tests/swift-storage-0: configmap "swift-ring-files" not found Jan 20 17:17:30 crc kubenswrapper[4558]: E0120 17:17:30.760890 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/fae781f7-d8cb-4f32-992f-43d7dac82655-etc-swift podName:fae781f7-d8cb-4f32-992f-43d7dac82655 nodeName:}" failed. No retries permitted until 2026-01-20 17:17:34.760869807 +0000 UTC m=+2148.521207773 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/fae781f7-d8cb-4f32-992f-43d7dac82655-etc-swift") pod "swift-storage-0" (UID: "fae781f7-d8cb-4f32-992f-43d7dac82655") : configmap "swift-ring-files" not found Jan 20 17:17:30 crc kubenswrapper[4558]: I0120 17:17:30.968207 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/root-account-create-update-fvs5h"] Jan 20 17:17:30 crc kubenswrapper[4558]: I0120 17:17:30.969454 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-fvs5h" Jan 20 17:17:30 crc kubenswrapper[4558]: I0120 17:17:30.975442 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"openstack-mariadb-root-db-secret" Jan 20 17:17:30 crc kubenswrapper[4558]: I0120 17:17:30.981519 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-fvs5h"] Jan 20 17:17:31 crc kubenswrapper[4558]: I0120 17:17:31.068367 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n2jpn\" (UniqueName: \"kubernetes.io/projected/2a29c72f-538c-4a48-95a3-1107464f9cb7-kube-api-access-n2jpn\") pod \"root-account-create-update-fvs5h\" (UID: \"2a29c72f-538c-4a48-95a3-1107464f9cb7\") " pod="openstack-kuttl-tests/root-account-create-update-fvs5h" Jan 20 17:17:31 crc kubenswrapper[4558]: I0120 17:17:31.068508 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2a29c72f-538c-4a48-95a3-1107464f9cb7-operator-scripts\") pod \"root-account-create-update-fvs5h\" (UID: \"2a29c72f-538c-4a48-95a3-1107464f9cb7\") " pod="openstack-kuttl-tests/root-account-create-update-fvs5h" Jan 20 17:17:31 crc kubenswrapper[4558]: I0120 17:17:31.170250 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n2jpn\" (UniqueName: \"kubernetes.io/projected/2a29c72f-538c-4a48-95a3-1107464f9cb7-kube-api-access-n2jpn\") pod \"root-account-create-update-fvs5h\" (UID: \"2a29c72f-538c-4a48-95a3-1107464f9cb7\") " pod="openstack-kuttl-tests/root-account-create-update-fvs5h" Jan 20 17:17:31 crc kubenswrapper[4558]: I0120 17:17:31.170365 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2a29c72f-538c-4a48-95a3-1107464f9cb7-operator-scripts\") pod \"root-account-create-update-fvs5h\" (UID: \"2a29c72f-538c-4a48-95a3-1107464f9cb7\") " pod="openstack-kuttl-tests/root-account-create-update-fvs5h" Jan 20 17:17:31 crc kubenswrapper[4558]: I0120 17:17:31.171330 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2a29c72f-538c-4a48-95a3-1107464f9cb7-operator-scripts\") pod \"root-account-create-update-fvs5h\" (UID: \"2a29c72f-538c-4a48-95a3-1107464f9cb7\") " pod="openstack-kuttl-tests/root-account-create-update-fvs5h" Jan 20 17:17:31 crc kubenswrapper[4558]: I0120 17:17:31.196927 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n2jpn\" (UniqueName: \"kubernetes.io/projected/2a29c72f-538c-4a48-95a3-1107464f9cb7-kube-api-access-n2jpn\") pod \"root-account-create-update-fvs5h\" (UID: \"2a29c72f-538c-4a48-95a3-1107464f9cb7\") " pod="openstack-kuttl-tests/root-account-create-update-fvs5h" Jan 20 17:17:31 crc kubenswrapper[4558]: I0120 17:17:31.291537 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-fvs5h" Jan 20 17:17:31 crc kubenswrapper[4558]: I0120 17:17:31.705425 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-fvs5h"] Jan 20 17:17:32 crc kubenswrapper[4558]: I0120 17:17:32.369622 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/root-account-create-update-fvs5h" event={"ID":"2a29c72f-538c-4a48-95a3-1107464f9cb7","Type":"ContainerStarted","Data":"f352ada9d1b11fc0753e4271db70a1d1f3a8416472398fbddeb220865835978e"} Jan 20 17:17:32 crc kubenswrapper[4558]: I0120 17:17:32.369953 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/root-account-create-update-fvs5h" event={"ID":"2a29c72f-538c-4a48-95a3-1107464f9cb7","Type":"ContainerStarted","Data":"fceff2b0812c899c17fa7abc9394506b164ee362548569496d6ec053f1e60409"} Jan 20 17:17:32 crc kubenswrapper[4558]: I0120 17:17:32.390283 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/root-account-create-update-fvs5h" podStartSLOduration=2.390261174 podStartE2EDuration="2.390261174s" podCreationTimestamp="2026-01-20 17:17:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:17:32.385155883 +0000 UTC m=+2146.145493851" watchObservedRunningTime="2026-01-20 17:17:32.390261174 +0000 UTC m=+2146.150599140" Jan 20 17:17:33 crc kubenswrapper[4558]: I0120 17:17:33.383008 4558 generic.go:334] "Generic (PLEG): container finished" podID="2a29c72f-538c-4a48-95a3-1107464f9cb7" containerID="f352ada9d1b11fc0753e4271db70a1d1f3a8416472398fbddeb220865835978e" exitCode=0 Jan 20 17:17:33 crc kubenswrapper[4558]: I0120 17:17:33.383200 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/root-account-create-update-fvs5h" event={"ID":"2a29c72f-538c-4a48-95a3-1107464f9cb7","Type":"ContainerDied","Data":"f352ada9d1b11fc0753e4271db70a1d1f3a8416472398fbddeb220865835978e"} Jan 20 17:17:33 crc kubenswrapper[4558]: I0120 17:17:33.393516 4558 generic.go:334] "Generic (PLEG): container finished" podID="d9de665d-8b72-45e0-9829-5bd72bd86f41" containerID="788f6224ee4017a1c49676cf0158019aad3dadc64f19f16e8a8d308ee1b7ca67" exitCode=0 Jan 20 17:17:33 crc kubenswrapper[4558]: I0120 17:17:33.393662 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-db-create-9d9jp" event={"ID":"d9de665d-8b72-45e0-9829-5bd72bd86f41","Type":"ContainerDied","Data":"788f6224ee4017a1c49676cf0158019aad3dadc64f19f16e8a8d308ee1b7ca67"} Jan 20 17:17:33 crc kubenswrapper[4558]: I0120 17:17:33.397284 4558 generic.go:334] "Generic (PLEG): container finished" podID="10cc58f5-333d-4a3b-af1e-ed0efe498d8d" containerID="0404131e6088ce70d30be218580224f5f7c40261b9f6a556f4d91e65b05b4710" exitCode=0 Jan 20 17:17:33 crc kubenswrapper[4558]: I0120 17:17:33.397312 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-36de-account-create-update-pf4bc" event={"ID":"10cc58f5-333d-4a3b-af1e-ed0efe498d8d","Type":"ContainerDied","Data":"0404131e6088ce70d30be218580224f5f7c40261b9f6a556f4d91e65b05b4710"} Jan 20 17:17:33 crc kubenswrapper[4558]: I0120 17:17:33.588603 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/keystone-db-create-9dzpb"] Jan 20 17:17:33 crc kubenswrapper[4558]: I0120 17:17:33.590223 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-db-create-9dzpb" Jan 20 17:17:33 crc kubenswrapper[4558]: I0120 17:17:33.598671 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-db-create-9dzpb"] Jan 20 17:17:33 crc kubenswrapper[4558]: I0120 17:17:33.686651 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/openstack-galera-1" Jan 20 17:17:33 crc kubenswrapper[4558]: I0120 17:17:33.690866 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/keystone-f51d-account-create-update-tbxbn"] Jan 20 17:17:33 crc kubenswrapper[4558]: I0120 17:17:33.694579 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-f51d-account-create-update-tbxbn" Jan 20 17:17:33 crc kubenswrapper[4558]: I0120 17:17:33.696314 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-db-secret" Jan 20 17:17:33 crc kubenswrapper[4558]: I0120 17:17:33.702709 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-f51d-account-create-update-tbxbn"] Jan 20 17:17:33 crc kubenswrapper[4558]: I0120 17:17:33.724600 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/280c8a81-3eab-49fe-b275-d7238dbd7e0f-operator-scripts\") pod \"keystone-db-create-9dzpb\" (UID: \"280c8a81-3eab-49fe-b275-d7238dbd7e0f\") " pod="openstack-kuttl-tests/keystone-db-create-9dzpb" Jan 20 17:17:33 crc kubenswrapper[4558]: I0120 17:17:33.724732 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vhtjj\" (UniqueName: \"kubernetes.io/projected/280c8a81-3eab-49fe-b275-d7238dbd7e0f-kube-api-access-vhtjj\") pod \"keystone-db-create-9dzpb\" (UID: \"280c8a81-3eab-49fe-b275-d7238dbd7e0f\") " pod="openstack-kuttl-tests/keystone-db-create-9dzpb" Jan 20 17:17:33 crc kubenswrapper[4558]: I0120 17:17:33.773275 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/openstack-galera-1" Jan 20 17:17:33 crc kubenswrapper[4558]: I0120 17:17:33.826998 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vhtjj\" (UniqueName: \"kubernetes.io/projected/280c8a81-3eab-49fe-b275-d7238dbd7e0f-kube-api-access-vhtjj\") pod \"keystone-db-create-9dzpb\" (UID: \"280c8a81-3eab-49fe-b275-d7238dbd7e0f\") " pod="openstack-kuttl-tests/keystone-db-create-9dzpb" Jan 20 17:17:33 crc kubenswrapper[4558]: I0120 17:17:33.827349 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/280c8a81-3eab-49fe-b275-d7238dbd7e0f-operator-scripts\") pod \"keystone-db-create-9dzpb\" (UID: \"280c8a81-3eab-49fe-b275-d7238dbd7e0f\") " pod="openstack-kuttl-tests/keystone-db-create-9dzpb" Jan 20 17:17:33 crc kubenswrapper[4558]: I0120 17:17:33.827687 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/46ce5c99-2891-494b-a8fe-291f53beeeff-operator-scripts\") pod \"keystone-f51d-account-create-update-tbxbn\" (UID: \"46ce5c99-2891-494b-a8fe-291f53beeeff\") " pod="openstack-kuttl-tests/keystone-f51d-account-create-update-tbxbn" Jan 20 17:17:33 crc kubenswrapper[4558]: I0120 17:17:33.827843 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v9rdx\" (UniqueName: \"kubernetes.io/projected/46ce5c99-2891-494b-a8fe-291f53beeeff-kube-api-access-v9rdx\") pod \"keystone-f51d-account-create-update-tbxbn\" (UID: \"46ce5c99-2891-494b-a8fe-291f53beeeff\") " pod="openstack-kuttl-tests/keystone-f51d-account-create-update-tbxbn" Jan 20 17:17:33 crc kubenswrapper[4558]: I0120 17:17:33.828282 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/280c8a81-3eab-49fe-b275-d7238dbd7e0f-operator-scripts\") pod \"keystone-db-create-9dzpb\" (UID: \"280c8a81-3eab-49fe-b275-d7238dbd7e0f\") " pod="openstack-kuttl-tests/keystone-db-create-9dzpb" Jan 20 17:17:33 crc kubenswrapper[4558]: I0120 17:17:33.833497 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/openstack-cell1-galera-2" podUID="9adbb250-26e3-4e45-b60f-3b4c83d59ba3" containerName="galera" probeResult="failure" output=< Jan 20 17:17:33 crc kubenswrapper[4558]: wsrep_local_state_comment (Donor/Desynced) differs from Synced Jan 20 17:17:33 crc kubenswrapper[4558]: > Jan 20 17:17:33 crc kubenswrapper[4558]: I0120 17:17:33.850721 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vhtjj\" (UniqueName: \"kubernetes.io/projected/280c8a81-3eab-49fe-b275-d7238dbd7e0f-kube-api-access-vhtjj\") pod \"keystone-db-create-9dzpb\" (UID: \"280c8a81-3eab-49fe-b275-d7238dbd7e0f\") " pod="openstack-kuttl-tests/keystone-db-create-9dzpb" Jan 20 17:17:33 crc kubenswrapper[4558]: I0120 17:17:33.919934 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-db-create-9dzpb" Jan 20 17:17:33 crc kubenswrapper[4558]: I0120 17:17:33.929531 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/46ce5c99-2891-494b-a8fe-291f53beeeff-operator-scripts\") pod \"keystone-f51d-account-create-update-tbxbn\" (UID: \"46ce5c99-2891-494b-a8fe-291f53beeeff\") " pod="openstack-kuttl-tests/keystone-f51d-account-create-update-tbxbn" Jan 20 17:17:33 crc kubenswrapper[4558]: I0120 17:17:33.929580 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v9rdx\" (UniqueName: \"kubernetes.io/projected/46ce5c99-2891-494b-a8fe-291f53beeeff-kube-api-access-v9rdx\") pod \"keystone-f51d-account-create-update-tbxbn\" (UID: \"46ce5c99-2891-494b-a8fe-291f53beeeff\") " pod="openstack-kuttl-tests/keystone-f51d-account-create-update-tbxbn" Jan 20 17:17:33 crc kubenswrapper[4558]: I0120 17:17:33.930434 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/46ce5c99-2891-494b-a8fe-291f53beeeff-operator-scripts\") pod \"keystone-f51d-account-create-update-tbxbn\" (UID: \"46ce5c99-2891-494b-a8fe-291f53beeeff\") " pod="openstack-kuttl-tests/keystone-f51d-account-create-update-tbxbn" Jan 20 17:17:33 crc kubenswrapper[4558]: I0120 17:17:33.963261 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/placement-db-create-jsj6g"] Jan 20 17:17:33 crc kubenswrapper[4558]: I0120 17:17:33.963683 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v9rdx\" (UniqueName: \"kubernetes.io/projected/46ce5c99-2891-494b-a8fe-291f53beeeff-kube-api-access-v9rdx\") pod \"keystone-f51d-account-create-update-tbxbn\" (UID: \"46ce5c99-2891-494b-a8fe-291f53beeeff\") " pod="openstack-kuttl-tests/keystone-f51d-account-create-update-tbxbn" Jan 20 17:17:33 crc kubenswrapper[4558]: I0120 17:17:33.964597 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-db-create-jsj6g" Jan 20 17:17:33 crc kubenswrapper[4558]: I0120 17:17:33.976349 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-db-create-jsj6g"] Jan 20 17:17:34 crc kubenswrapper[4558]: I0120 17:17:34.008274 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-f51d-account-create-update-tbxbn" Jan 20 17:17:34 crc kubenswrapper[4558]: I0120 17:17:34.085842 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/placement-d448-account-create-update-tf92z"] Jan 20 17:17:34 crc kubenswrapper[4558]: I0120 17:17:34.087428 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-d448-account-create-update-tf92z" Jan 20 17:17:34 crc kubenswrapper[4558]: I0120 17:17:34.090888 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-d448-account-create-update-tf92z"] Jan 20 17:17:34 crc kubenswrapper[4558]: I0120 17:17:34.092637 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"placement-db-secret" Jan 20 17:17:34 crc kubenswrapper[4558]: I0120 17:17:34.134865 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kcjxw\" (UniqueName: \"kubernetes.io/projected/2fff8a17-65b9-4288-81eb-337010e076f1-kube-api-access-kcjxw\") pod \"placement-db-create-jsj6g\" (UID: \"2fff8a17-65b9-4288-81eb-337010e076f1\") " pod="openstack-kuttl-tests/placement-db-create-jsj6g" Jan 20 17:17:34 crc kubenswrapper[4558]: I0120 17:17:34.135060 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2fff8a17-65b9-4288-81eb-337010e076f1-operator-scripts\") pod \"placement-db-create-jsj6g\" (UID: \"2fff8a17-65b9-4288-81eb-337010e076f1\") " pod="openstack-kuttl-tests/placement-db-create-jsj6g" Jan 20 17:17:34 crc kubenswrapper[4558]: I0120 17:17:34.236742 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kcjxw\" (UniqueName: \"kubernetes.io/projected/2fff8a17-65b9-4288-81eb-337010e076f1-kube-api-access-kcjxw\") pod \"placement-db-create-jsj6g\" (UID: \"2fff8a17-65b9-4288-81eb-337010e076f1\") " pod="openstack-kuttl-tests/placement-db-create-jsj6g" Jan 20 17:17:34 crc kubenswrapper[4558]: I0120 17:17:34.236807 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-svx28\" (UniqueName: \"kubernetes.io/projected/27829a8a-3c57-41bc-8e2a-361451d19075-kube-api-access-svx28\") pod \"placement-d448-account-create-update-tf92z\" (UID: \"27829a8a-3c57-41bc-8e2a-361451d19075\") " pod="openstack-kuttl-tests/placement-d448-account-create-update-tf92z" Jan 20 17:17:34 crc kubenswrapper[4558]: I0120 17:17:34.236843 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2fff8a17-65b9-4288-81eb-337010e076f1-operator-scripts\") pod \"placement-db-create-jsj6g\" (UID: \"2fff8a17-65b9-4288-81eb-337010e076f1\") " pod="openstack-kuttl-tests/placement-db-create-jsj6g" Jan 20 17:17:34 crc kubenswrapper[4558]: I0120 17:17:34.237017 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/27829a8a-3c57-41bc-8e2a-361451d19075-operator-scripts\") pod \"placement-d448-account-create-update-tf92z\" (UID: \"27829a8a-3c57-41bc-8e2a-361451d19075\") " pod="openstack-kuttl-tests/placement-d448-account-create-update-tf92z" Jan 20 17:17:34 crc kubenswrapper[4558]: I0120 17:17:34.238120 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2fff8a17-65b9-4288-81eb-337010e076f1-operator-scripts\") pod \"placement-db-create-jsj6g\" (UID: \"2fff8a17-65b9-4288-81eb-337010e076f1\") " pod="openstack-kuttl-tests/placement-db-create-jsj6g" Jan 20 17:17:34 crc kubenswrapper[4558]: I0120 17:17:34.253601 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kcjxw\" (UniqueName: \"kubernetes.io/projected/2fff8a17-65b9-4288-81eb-337010e076f1-kube-api-access-kcjxw\") pod \"placement-db-create-jsj6g\" (UID: \"2fff8a17-65b9-4288-81eb-337010e076f1\") " pod="openstack-kuttl-tests/placement-db-create-jsj6g" Jan 20 17:17:34 crc kubenswrapper[4558]: I0120 17:17:34.328864 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-db-create-jsj6g" Jan 20 17:17:34 crc kubenswrapper[4558]: I0120 17:17:34.338877 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-svx28\" (UniqueName: \"kubernetes.io/projected/27829a8a-3c57-41bc-8e2a-361451d19075-kube-api-access-svx28\") pod \"placement-d448-account-create-update-tf92z\" (UID: \"27829a8a-3c57-41bc-8e2a-361451d19075\") " pod="openstack-kuttl-tests/placement-d448-account-create-update-tf92z" Jan 20 17:17:34 crc kubenswrapper[4558]: I0120 17:17:34.338978 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/27829a8a-3c57-41bc-8e2a-361451d19075-operator-scripts\") pod \"placement-d448-account-create-update-tf92z\" (UID: \"27829a8a-3c57-41bc-8e2a-361451d19075\") " pod="openstack-kuttl-tests/placement-d448-account-create-update-tf92z" Jan 20 17:17:34 crc kubenswrapper[4558]: I0120 17:17:34.339693 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/27829a8a-3c57-41bc-8e2a-361451d19075-operator-scripts\") pod \"placement-d448-account-create-update-tf92z\" (UID: \"27829a8a-3c57-41bc-8e2a-361451d19075\") " pod="openstack-kuttl-tests/placement-d448-account-create-update-tf92z" Jan 20 17:17:34 crc kubenswrapper[4558]: I0120 17:17:34.354536 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-svx28\" (UniqueName: \"kubernetes.io/projected/27829a8a-3c57-41bc-8e2a-361451d19075-kube-api-access-svx28\") pod \"placement-d448-account-create-update-tf92z\" (UID: \"27829a8a-3c57-41bc-8e2a-361451d19075\") " pod="openstack-kuttl-tests/placement-d448-account-create-update-tf92z" Jan 20 17:17:34 crc kubenswrapper[4558]: I0120 17:17:34.419283 4558 generic.go:334] "Generic (PLEG): container finished" podID="16316176-29c1-476a-8142-6a3427dd7da4" containerID="e753fe9a9ff4e55db2f42e6e4809dd92638bc59c527ff9a16c85cbd5e8061320" exitCode=0 Jan 20 17:17:34 crc kubenswrapper[4558]: I0120 17:17:34.419656 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-d448-account-create-update-tf92z" Jan 20 17:17:34 crc kubenswrapper[4558]: I0120 17:17:34.419828 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-ring-rebalance-gms56" event={"ID":"16316176-29c1-476a-8142-6a3427dd7da4","Type":"ContainerDied","Data":"e753fe9a9ff4e55db2f42e6e4809dd92638bc59c527ff9a16c85cbd5e8061320"} Jan 20 17:17:34 crc kubenswrapper[4558]: I0120 17:17:34.460514 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-db-create-9dzpb"] Jan 20 17:17:34 crc kubenswrapper[4558]: I0120 17:17:34.516609 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-f51d-account-create-update-tbxbn"] Jan 20 17:17:34 crc kubenswrapper[4558]: I0120 17:17:34.850991 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/fae781f7-d8cb-4f32-992f-43d7dac82655-etc-swift\") pod \"swift-storage-0\" (UID: \"fae781f7-d8cb-4f32-992f-43d7dac82655\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:17:34 crc kubenswrapper[4558]: I0120 17:17:34.859052 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/fae781f7-d8cb-4f32-992f-43d7dac82655-etc-swift\") pod \"swift-storage-0\" (UID: \"fae781f7-d8cb-4f32-992f-43d7dac82655\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:17:34 crc kubenswrapper[4558]: I0120 17:17:34.944106 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-db-create-jsj6g"] Jan 20 17:17:34 crc kubenswrapper[4558]: W0120 17:17:34.951026 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2fff8a17_65b9_4288_81eb_337010e076f1.slice/crio-0582035dc5209c9549cc2bc8668af4b0dea8d35eaefa2589739a6720845bb828 WatchSource:0}: Error finding container 0582035dc5209c9549cc2bc8668af4b0dea8d35eaefa2589739a6720845bb828: Status 404 returned error can't find the container with id 0582035dc5209c9549cc2bc8668af4b0dea8d35eaefa2589739a6720845bb828 Jan 20 17:17:34 crc kubenswrapper[4558]: I0120 17:17:34.982900 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-fvs5h" Jan 20 17:17:34 crc kubenswrapper[4558]: I0120 17:17:34.991212 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-db-create-9d9jp" Jan 20 17:17:35 crc kubenswrapper[4558]: I0120 17:17:35.001911 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-d448-account-create-update-tf92z"] Jan 20 17:17:35 crc kubenswrapper[4558]: I0120 17:17:35.024787 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-36de-account-create-update-pf4bc" Jan 20 17:17:35 crc kubenswrapper[4558]: I0120 17:17:35.055125 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d9de665d-8b72-45e0-9829-5bd72bd86f41-operator-scripts\") pod \"d9de665d-8b72-45e0-9829-5bd72bd86f41\" (UID: \"d9de665d-8b72-45e0-9829-5bd72bd86f41\") " Jan 20 17:17:35 crc kubenswrapper[4558]: I0120 17:17:35.055325 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n2jpn\" (UniqueName: \"kubernetes.io/projected/2a29c72f-538c-4a48-95a3-1107464f9cb7-kube-api-access-n2jpn\") pod \"2a29c72f-538c-4a48-95a3-1107464f9cb7\" (UID: \"2a29c72f-538c-4a48-95a3-1107464f9cb7\") " Jan 20 17:17:35 crc kubenswrapper[4558]: I0120 17:17:35.055572 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2a29c72f-538c-4a48-95a3-1107464f9cb7-operator-scripts\") pod \"2a29c72f-538c-4a48-95a3-1107464f9cb7\" (UID: \"2a29c72f-538c-4a48-95a3-1107464f9cb7\") " Jan 20 17:17:35 crc kubenswrapper[4558]: I0120 17:17:35.055633 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n66rr\" (UniqueName: \"kubernetes.io/projected/d9de665d-8b72-45e0-9829-5bd72bd86f41-kube-api-access-n66rr\") pod \"d9de665d-8b72-45e0-9829-5bd72bd86f41\" (UID: \"d9de665d-8b72-45e0-9829-5bd72bd86f41\") " Jan 20 17:17:35 crc kubenswrapper[4558]: I0120 17:17:35.055678 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d9de665d-8b72-45e0-9829-5bd72bd86f41-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "d9de665d-8b72-45e0-9829-5bd72bd86f41" (UID: "d9de665d-8b72-45e0-9829-5bd72bd86f41"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:17:35 crc kubenswrapper[4558]: I0120 17:17:35.055946 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2a29c72f-538c-4a48-95a3-1107464f9cb7-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "2a29c72f-538c-4a48-95a3-1107464f9cb7" (UID: "2a29c72f-538c-4a48-95a3-1107464f9cb7"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:17:35 crc kubenswrapper[4558]: I0120 17:17:35.056138 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2a29c72f-538c-4a48-95a3-1107464f9cb7-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:17:35 crc kubenswrapper[4558]: I0120 17:17:35.056152 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d9de665d-8b72-45e0-9829-5bd72bd86f41-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:17:35 crc kubenswrapper[4558]: I0120 17:17:35.060978 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d9de665d-8b72-45e0-9829-5bd72bd86f41-kube-api-access-n66rr" (OuterVolumeSpecName: "kube-api-access-n66rr") pod "d9de665d-8b72-45e0-9829-5bd72bd86f41" (UID: "d9de665d-8b72-45e0-9829-5bd72bd86f41"). InnerVolumeSpecName "kube-api-access-n66rr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:17:35 crc kubenswrapper[4558]: I0120 17:17:35.061242 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2a29c72f-538c-4a48-95a3-1107464f9cb7-kube-api-access-n2jpn" (OuterVolumeSpecName: "kube-api-access-n2jpn") pod "2a29c72f-538c-4a48-95a3-1107464f9cb7" (UID: "2a29c72f-538c-4a48-95a3-1107464f9cb7"). InnerVolumeSpecName "kube-api-access-n2jpn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:17:35 crc kubenswrapper[4558]: I0120 17:17:35.083221 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:17:35 crc kubenswrapper[4558]: I0120 17:17:35.157238 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/10cc58f5-333d-4a3b-af1e-ed0efe498d8d-operator-scripts\") pod \"10cc58f5-333d-4a3b-af1e-ed0efe498d8d\" (UID: \"10cc58f5-333d-4a3b-af1e-ed0efe498d8d\") " Jan 20 17:17:35 crc kubenswrapper[4558]: I0120 17:17:35.157327 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c7lz8\" (UniqueName: \"kubernetes.io/projected/10cc58f5-333d-4a3b-af1e-ed0efe498d8d-kube-api-access-c7lz8\") pod \"10cc58f5-333d-4a3b-af1e-ed0efe498d8d\" (UID: \"10cc58f5-333d-4a3b-af1e-ed0efe498d8d\") " Jan 20 17:17:35 crc kubenswrapper[4558]: I0120 17:17:35.157747 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n66rr\" (UniqueName: \"kubernetes.io/projected/d9de665d-8b72-45e0-9829-5bd72bd86f41-kube-api-access-n66rr\") on node \"crc\" DevicePath \"\"" Jan 20 17:17:35 crc kubenswrapper[4558]: I0120 17:17:35.157764 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n2jpn\" (UniqueName: \"kubernetes.io/projected/2a29c72f-538c-4a48-95a3-1107464f9cb7-kube-api-access-n2jpn\") on node \"crc\" DevicePath \"\"" Jan 20 17:17:35 crc kubenswrapper[4558]: I0120 17:17:35.157847 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/10cc58f5-333d-4a3b-af1e-ed0efe498d8d-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "10cc58f5-333d-4a3b-af1e-ed0efe498d8d" (UID: "10cc58f5-333d-4a3b-af1e-ed0efe498d8d"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:17:35 crc kubenswrapper[4558]: I0120 17:17:35.162454 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/10cc58f5-333d-4a3b-af1e-ed0efe498d8d-kube-api-access-c7lz8" (OuterVolumeSpecName: "kube-api-access-c7lz8") pod "10cc58f5-333d-4a3b-af1e-ed0efe498d8d" (UID: "10cc58f5-333d-4a3b-af1e-ed0efe498d8d"). InnerVolumeSpecName "kube-api-access-c7lz8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:17:35 crc kubenswrapper[4558]: I0120 17:17:35.261269 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/10cc58f5-333d-4a3b-af1e-ed0efe498d8d-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:17:35 crc kubenswrapper[4558]: I0120 17:17:35.261324 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c7lz8\" (UniqueName: \"kubernetes.io/projected/10cc58f5-333d-4a3b-af1e-ed0efe498d8d-kube-api-access-c7lz8\") on node \"crc\" DevicePath \"\"" Jan 20 17:17:35 crc kubenswrapper[4558]: I0120 17:17:35.434532 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-fvs5h" Jan 20 17:17:35 crc kubenswrapper[4558]: I0120 17:17:35.434524 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/root-account-create-update-fvs5h" event={"ID":"2a29c72f-538c-4a48-95a3-1107464f9cb7","Type":"ContainerDied","Data":"fceff2b0812c899c17fa7abc9394506b164ee362548569496d6ec053f1e60409"} Jan 20 17:17:35 crc kubenswrapper[4558]: I0120 17:17:35.435157 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fceff2b0812c899c17fa7abc9394506b164ee362548569496d6ec053f1e60409" Jan 20 17:17:35 crc kubenswrapper[4558]: I0120 17:17:35.445651 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-db-create-9d9jp" Jan 20 17:17:35 crc kubenswrapper[4558]: I0120 17:17:35.447450 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-db-create-9d9jp" event={"ID":"d9de665d-8b72-45e0-9829-5bd72bd86f41","Type":"ContainerDied","Data":"2dd2a4e549971a7c2bdc7c8c2309346dad95f445c3010095c73b3211ed164a91"} Jan 20 17:17:35 crc kubenswrapper[4558]: I0120 17:17:35.447495 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2dd2a4e549971a7c2bdc7c8c2309346dad95f445c3010095c73b3211ed164a91" Jan 20 17:17:35 crc kubenswrapper[4558]: I0120 17:17:35.457040 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-36de-account-create-update-pf4bc" event={"ID":"10cc58f5-333d-4a3b-af1e-ed0efe498d8d","Type":"ContainerDied","Data":"b6e12212b81f94118a20426046235ba1108fc8dd6dcbc838f23a918c1f81a583"} Jan 20 17:17:35 crc kubenswrapper[4558]: I0120 17:17:35.457091 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-36de-account-create-update-pf4bc" Jan 20 17:17:35 crc kubenswrapper[4558]: I0120 17:17:35.457100 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b6e12212b81f94118a20426046235ba1108fc8dd6dcbc838f23a918c1f81a583" Jan 20 17:17:35 crc kubenswrapper[4558]: I0120 17:17:35.458916 4558 generic.go:334] "Generic (PLEG): container finished" podID="27829a8a-3c57-41bc-8e2a-361451d19075" containerID="0592c7c01dbb34374b3da8af2857becf1536c237d8abb2a024508c881acf1773" exitCode=0 Jan 20 17:17:35 crc kubenswrapper[4558]: I0120 17:17:35.458957 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-d448-account-create-update-tf92z" event={"ID":"27829a8a-3c57-41bc-8e2a-361451d19075","Type":"ContainerDied","Data":"0592c7c01dbb34374b3da8af2857becf1536c237d8abb2a024508c881acf1773"} Jan 20 17:17:35 crc kubenswrapper[4558]: I0120 17:17:35.458997 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-d448-account-create-update-tf92z" event={"ID":"27829a8a-3c57-41bc-8e2a-361451d19075","Type":"ContainerStarted","Data":"82b60ee6a24778688e660ba92d403fd37c111813bd7f6791e3698457bb4864b9"} Jan 20 17:17:35 crc kubenswrapper[4558]: I0120 17:17:35.460427 4558 generic.go:334] "Generic (PLEG): container finished" podID="280c8a81-3eab-49fe-b275-d7238dbd7e0f" containerID="11230ed7f43eb0e13b6397efae5fa6ee1ad663e47745015e64507d23add89ab2" exitCode=0 Jan 20 17:17:35 crc kubenswrapper[4558]: I0120 17:17:35.460500 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-db-create-9dzpb" event={"ID":"280c8a81-3eab-49fe-b275-d7238dbd7e0f","Type":"ContainerDied","Data":"11230ed7f43eb0e13b6397efae5fa6ee1ad663e47745015e64507d23add89ab2"} Jan 20 17:17:35 crc kubenswrapper[4558]: I0120 17:17:35.460575 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-db-create-9dzpb" event={"ID":"280c8a81-3eab-49fe-b275-d7238dbd7e0f","Type":"ContainerStarted","Data":"c2c05812ef9809e7f8bada8361f99a3950ee34485029e96184205fd36df6d4da"} Jan 20 17:17:35 crc kubenswrapper[4558]: I0120 17:17:35.462298 4558 generic.go:334] "Generic (PLEG): container finished" podID="46ce5c99-2891-494b-a8fe-291f53beeeff" containerID="1ecf6feb620f00f92cee9629762d308a481961f09f2bdfc3ccd5fb800dea3469" exitCode=0 Jan 20 17:17:35 crc kubenswrapper[4558]: I0120 17:17:35.462360 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-f51d-account-create-update-tbxbn" event={"ID":"46ce5c99-2891-494b-a8fe-291f53beeeff","Type":"ContainerDied","Data":"1ecf6feb620f00f92cee9629762d308a481961f09f2bdfc3ccd5fb800dea3469"} Jan 20 17:17:35 crc kubenswrapper[4558]: I0120 17:17:35.462385 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-f51d-account-create-update-tbxbn" event={"ID":"46ce5c99-2891-494b-a8fe-291f53beeeff","Type":"ContainerStarted","Data":"fd2719ce5192a64a13a9abe72c243688b4af8ffb11399220880c13e3b6235fcf"} Jan 20 17:17:35 crc kubenswrapper[4558]: I0120 17:17:35.463730 4558 generic.go:334] "Generic (PLEG): container finished" podID="2fff8a17-65b9-4288-81eb-337010e076f1" containerID="db6664dc5563480a7755e8b6b12b6a68a3945d57a9ccc1fba3ccffe2963a98f7" exitCode=0 Jan 20 17:17:35 crc kubenswrapper[4558]: I0120 17:17:35.463795 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-db-create-jsj6g" event={"ID":"2fff8a17-65b9-4288-81eb-337010e076f1","Type":"ContainerDied","Data":"db6664dc5563480a7755e8b6b12b6a68a3945d57a9ccc1fba3ccffe2963a98f7"} Jan 20 17:17:35 crc kubenswrapper[4558]: I0120 17:17:35.463844 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-db-create-jsj6g" event={"ID":"2fff8a17-65b9-4288-81eb-337010e076f1","Type":"ContainerStarted","Data":"0582035dc5209c9549cc2bc8668af4b0dea8d35eaefa2589739a6720845bb828"} Jan 20 17:17:35 crc kubenswrapper[4558]: I0120 17:17:35.548157 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/swift-storage-0"] Jan 20 17:17:35 crc kubenswrapper[4558]: I0120 17:17:35.709917 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-ring-rebalance-gms56" Jan 20 17:17:35 crc kubenswrapper[4558]: I0120 17:17:35.772790 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/16316176-29c1-476a-8142-6a3427dd7da4-dispersionconf\") pod \"16316176-29c1-476a-8142-6a3427dd7da4\" (UID: \"16316176-29c1-476a-8142-6a3427dd7da4\") " Jan 20 17:17:35 crc kubenswrapper[4558]: I0120 17:17:35.772843 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/16316176-29c1-476a-8142-6a3427dd7da4-swiftconf\") pod \"16316176-29c1-476a-8142-6a3427dd7da4\" (UID: \"16316176-29c1-476a-8142-6a3427dd7da4\") " Jan 20 17:17:35 crc kubenswrapper[4558]: I0120 17:17:35.772875 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/16316176-29c1-476a-8142-6a3427dd7da4-combined-ca-bundle\") pod \"16316176-29c1-476a-8142-6a3427dd7da4\" (UID: \"16316176-29c1-476a-8142-6a3427dd7da4\") " Jan 20 17:17:35 crc kubenswrapper[4558]: I0120 17:17:35.772937 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/16316176-29c1-476a-8142-6a3427dd7da4-ring-data-devices\") pod \"16316176-29c1-476a-8142-6a3427dd7da4\" (UID: \"16316176-29c1-476a-8142-6a3427dd7da4\") " Jan 20 17:17:35 crc kubenswrapper[4558]: I0120 17:17:35.772959 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tlqxb\" (UniqueName: \"kubernetes.io/projected/16316176-29c1-476a-8142-6a3427dd7da4-kube-api-access-tlqxb\") pod \"16316176-29c1-476a-8142-6a3427dd7da4\" (UID: \"16316176-29c1-476a-8142-6a3427dd7da4\") " Jan 20 17:17:35 crc kubenswrapper[4558]: I0120 17:17:35.773023 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/16316176-29c1-476a-8142-6a3427dd7da4-etc-swift\") pod \"16316176-29c1-476a-8142-6a3427dd7da4\" (UID: \"16316176-29c1-476a-8142-6a3427dd7da4\") " Jan 20 17:17:35 crc kubenswrapper[4558]: I0120 17:17:35.773049 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/16316176-29c1-476a-8142-6a3427dd7da4-scripts\") pod \"16316176-29c1-476a-8142-6a3427dd7da4\" (UID: \"16316176-29c1-476a-8142-6a3427dd7da4\") " Jan 20 17:17:35 crc kubenswrapper[4558]: I0120 17:17:35.777394 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/16316176-29c1-476a-8142-6a3427dd7da4-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "16316176-29c1-476a-8142-6a3427dd7da4" (UID: "16316176-29c1-476a-8142-6a3427dd7da4"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:17:35 crc kubenswrapper[4558]: I0120 17:17:35.778128 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/16316176-29c1-476a-8142-6a3427dd7da4-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "16316176-29c1-476a-8142-6a3427dd7da4" (UID: "16316176-29c1-476a-8142-6a3427dd7da4"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:17:35 crc kubenswrapper[4558]: I0120 17:17:35.783088 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/16316176-29c1-476a-8142-6a3427dd7da4-kube-api-access-tlqxb" (OuterVolumeSpecName: "kube-api-access-tlqxb") pod "16316176-29c1-476a-8142-6a3427dd7da4" (UID: "16316176-29c1-476a-8142-6a3427dd7da4"). InnerVolumeSpecName "kube-api-access-tlqxb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:17:35 crc kubenswrapper[4558]: I0120 17:17:35.787182 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/16316176-29c1-476a-8142-6a3427dd7da4-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "16316176-29c1-476a-8142-6a3427dd7da4" (UID: "16316176-29c1-476a-8142-6a3427dd7da4"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:17:35 crc kubenswrapper[4558]: I0120 17:17:35.801951 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/16316176-29c1-476a-8142-6a3427dd7da4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "16316176-29c1-476a-8142-6a3427dd7da4" (UID: "16316176-29c1-476a-8142-6a3427dd7da4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:17:35 crc kubenswrapper[4558]: I0120 17:17:35.813323 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/16316176-29c1-476a-8142-6a3427dd7da4-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "16316176-29c1-476a-8142-6a3427dd7da4" (UID: "16316176-29c1-476a-8142-6a3427dd7da4"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:17:35 crc kubenswrapper[4558]: I0120 17:17:35.815597 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/16316176-29c1-476a-8142-6a3427dd7da4-scripts" (OuterVolumeSpecName: "scripts") pod "16316176-29c1-476a-8142-6a3427dd7da4" (UID: "16316176-29c1-476a-8142-6a3427dd7da4"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:17:35 crc kubenswrapper[4558]: I0120 17:17:35.876525 4558 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/16316176-29c1-476a-8142-6a3427dd7da4-ring-data-devices\") on node \"crc\" DevicePath \"\"" Jan 20 17:17:35 crc kubenswrapper[4558]: I0120 17:17:35.876564 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tlqxb\" (UniqueName: \"kubernetes.io/projected/16316176-29c1-476a-8142-6a3427dd7da4-kube-api-access-tlqxb\") on node \"crc\" DevicePath \"\"" Jan 20 17:17:35 crc kubenswrapper[4558]: I0120 17:17:35.876575 4558 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/16316176-29c1-476a-8142-6a3427dd7da4-etc-swift\") on node \"crc\" DevicePath \"\"" Jan 20 17:17:35 crc kubenswrapper[4558]: I0120 17:17:35.876587 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/16316176-29c1-476a-8142-6a3427dd7da4-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:17:35 crc kubenswrapper[4558]: I0120 17:17:35.876597 4558 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/16316176-29c1-476a-8142-6a3427dd7da4-dispersionconf\") on node \"crc\" DevicePath \"\"" Jan 20 17:17:35 crc kubenswrapper[4558]: I0120 17:17:35.876606 4558 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/16316176-29c1-476a-8142-6a3427dd7da4-swiftconf\") on node \"crc\" DevicePath \"\"" Jan 20 17:17:35 crc kubenswrapper[4558]: I0120 17:17:35.876616 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/16316176-29c1-476a-8142-6a3427dd7da4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:17:36 crc kubenswrapper[4558]: E0120 17:17:36.190929 4558 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 192.168.25.8:41438->192.168.25.8:43883: write tcp 192.168.25.8:41438->192.168.25.8:43883: write: broken pipe Jan 20 17:17:36 crc kubenswrapper[4558]: I0120 17:17:36.483318 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"fae781f7-d8cb-4f32-992f-43d7dac82655","Type":"ContainerStarted","Data":"14b8554fd67e8203eaf876170ae3e7775aa3b4c177bc6900890e44c6bc4a44f4"} Jan 20 17:17:36 crc kubenswrapper[4558]: I0120 17:17:36.483363 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"fae781f7-d8cb-4f32-992f-43d7dac82655","Type":"ContainerStarted","Data":"097f5821ec52a17e967649b5b6967edea162a947874924e6600e4d07725cae05"} Jan 20 17:17:36 crc kubenswrapper[4558]: I0120 17:17:36.483374 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"fae781f7-d8cb-4f32-992f-43d7dac82655","Type":"ContainerStarted","Data":"03d0eae4935912c7dedaac60a57d7749cb144a4fb7adabc826dffe289fecf3a3"} Jan 20 17:17:36 crc kubenswrapper[4558]: I0120 17:17:36.483384 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"fae781f7-d8cb-4f32-992f-43d7dac82655","Type":"ContainerStarted","Data":"04e1cbf4a49ae809f6cd9ad66cf664edf23a039b8a03c43ccc28c839028f90d4"} Jan 20 17:17:36 crc kubenswrapper[4558]: I0120 17:17:36.483393 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"fae781f7-d8cb-4f32-992f-43d7dac82655","Type":"ContainerStarted","Data":"449d04837104cda04f1b998b58315c57a3f8dd508073572dfb1cf641a8e99cdf"} Jan 20 17:17:36 crc kubenswrapper[4558]: I0120 17:17:36.483401 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"fae781f7-d8cb-4f32-992f-43d7dac82655","Type":"ContainerStarted","Data":"10b8b90c30337c202ffa5efc8376aef571af66cac566f514e6c26f2a3c0a8340"} Jan 20 17:17:36 crc kubenswrapper[4558]: I0120 17:17:36.483410 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"fae781f7-d8cb-4f32-992f-43d7dac82655","Type":"ContainerStarted","Data":"fdf580ae6115f467cbfc7843aee98b52dccea02af4027e953b46e84b91359cac"} Jan 20 17:17:36 crc kubenswrapper[4558]: I0120 17:17:36.485498 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-ring-rebalance-gms56" Jan 20 17:17:36 crc kubenswrapper[4558]: I0120 17:17:36.492877 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-ring-rebalance-gms56" event={"ID":"16316176-29c1-476a-8142-6a3427dd7da4","Type":"ContainerDied","Data":"b18e789cd4faad69b62a9cfb7f8085f90e1fe0fef86cdd94fafbc33277bc2f29"} Jan 20 17:17:36 crc kubenswrapper[4558]: I0120 17:17:36.492899 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b18e789cd4faad69b62a9cfb7f8085f90e1fe0fef86cdd94fafbc33277bc2f29" Jan 20 17:17:36 crc kubenswrapper[4558]: I0120 17:17:36.776395 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-db-create-jsj6g" Jan 20 17:17:36 crc kubenswrapper[4558]: I0120 17:17:36.903256 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kcjxw\" (UniqueName: \"kubernetes.io/projected/2fff8a17-65b9-4288-81eb-337010e076f1-kube-api-access-kcjxw\") pod \"2fff8a17-65b9-4288-81eb-337010e076f1\" (UID: \"2fff8a17-65b9-4288-81eb-337010e076f1\") " Jan 20 17:17:36 crc kubenswrapper[4558]: I0120 17:17:36.903350 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2fff8a17-65b9-4288-81eb-337010e076f1-operator-scripts\") pod \"2fff8a17-65b9-4288-81eb-337010e076f1\" (UID: \"2fff8a17-65b9-4288-81eb-337010e076f1\") " Jan 20 17:17:36 crc kubenswrapper[4558]: I0120 17:17:36.904708 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2fff8a17-65b9-4288-81eb-337010e076f1-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "2fff8a17-65b9-4288-81eb-337010e076f1" (UID: "2fff8a17-65b9-4288-81eb-337010e076f1"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:17:36 crc kubenswrapper[4558]: I0120 17:17:36.914384 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2fff8a17-65b9-4288-81eb-337010e076f1-kube-api-access-kcjxw" (OuterVolumeSpecName: "kube-api-access-kcjxw") pod "2fff8a17-65b9-4288-81eb-337010e076f1" (UID: "2fff8a17-65b9-4288-81eb-337010e076f1"). InnerVolumeSpecName "kube-api-access-kcjxw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:17:37 crc kubenswrapper[4558]: I0120 17:17:37.005920 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kcjxw\" (UniqueName: \"kubernetes.io/projected/2fff8a17-65b9-4288-81eb-337010e076f1-kube-api-access-kcjxw\") on node \"crc\" DevicePath \"\"" Jan 20 17:17:37 crc kubenswrapper[4558]: I0120 17:17:37.005953 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2fff8a17-65b9-4288-81eb-337010e076f1-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:17:37 crc kubenswrapper[4558]: I0120 17:17:37.036584 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-d448-account-create-update-tf92z" Jan 20 17:17:37 crc kubenswrapper[4558]: I0120 17:17:37.044835 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-f51d-account-create-update-tbxbn" Jan 20 17:17:37 crc kubenswrapper[4558]: I0120 17:17:37.065941 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-db-create-9dzpb" Jan 20 17:17:37 crc kubenswrapper[4558]: I0120 17:17:37.107883 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/46ce5c99-2891-494b-a8fe-291f53beeeff-operator-scripts\") pod \"46ce5c99-2891-494b-a8fe-291f53beeeff\" (UID: \"46ce5c99-2891-494b-a8fe-291f53beeeff\") " Jan 20 17:17:37 crc kubenswrapper[4558]: I0120 17:17:37.108187 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/27829a8a-3c57-41bc-8e2a-361451d19075-operator-scripts\") pod \"27829a8a-3c57-41bc-8e2a-361451d19075\" (UID: \"27829a8a-3c57-41bc-8e2a-361451d19075\") " Jan 20 17:17:37 crc kubenswrapper[4558]: I0120 17:17:37.108222 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v9rdx\" (UniqueName: \"kubernetes.io/projected/46ce5c99-2891-494b-a8fe-291f53beeeff-kube-api-access-v9rdx\") pod \"46ce5c99-2891-494b-a8fe-291f53beeeff\" (UID: \"46ce5c99-2891-494b-a8fe-291f53beeeff\") " Jan 20 17:17:37 crc kubenswrapper[4558]: I0120 17:17:37.108294 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-svx28\" (UniqueName: \"kubernetes.io/projected/27829a8a-3c57-41bc-8e2a-361451d19075-kube-api-access-svx28\") pod \"27829a8a-3c57-41bc-8e2a-361451d19075\" (UID: \"27829a8a-3c57-41bc-8e2a-361451d19075\") " Jan 20 17:17:37 crc kubenswrapper[4558]: I0120 17:17:37.108716 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/46ce5c99-2891-494b-a8fe-291f53beeeff-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "46ce5c99-2891-494b-a8fe-291f53beeeff" (UID: "46ce5c99-2891-494b-a8fe-291f53beeeff"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:17:37 crc kubenswrapper[4558]: I0120 17:17:37.108795 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/27829a8a-3c57-41bc-8e2a-361451d19075-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "27829a8a-3c57-41bc-8e2a-361451d19075" (UID: "27829a8a-3c57-41bc-8e2a-361451d19075"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:17:37 crc kubenswrapper[4558]: I0120 17:17:37.111712 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/27829a8a-3c57-41bc-8e2a-361451d19075-kube-api-access-svx28" (OuterVolumeSpecName: "kube-api-access-svx28") pod "27829a8a-3c57-41bc-8e2a-361451d19075" (UID: "27829a8a-3c57-41bc-8e2a-361451d19075"). InnerVolumeSpecName "kube-api-access-svx28". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:17:37 crc kubenswrapper[4558]: I0120 17:17:37.112025 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/46ce5c99-2891-494b-a8fe-291f53beeeff-kube-api-access-v9rdx" (OuterVolumeSpecName: "kube-api-access-v9rdx") pod "46ce5c99-2891-494b-a8fe-291f53beeeff" (UID: "46ce5c99-2891-494b-a8fe-291f53beeeff"). InnerVolumeSpecName "kube-api-access-v9rdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:17:37 crc kubenswrapper[4558]: I0120 17:17:37.210215 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/280c8a81-3eab-49fe-b275-d7238dbd7e0f-operator-scripts\") pod \"280c8a81-3eab-49fe-b275-d7238dbd7e0f\" (UID: \"280c8a81-3eab-49fe-b275-d7238dbd7e0f\") " Jan 20 17:17:37 crc kubenswrapper[4558]: I0120 17:17:37.210367 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vhtjj\" (UniqueName: \"kubernetes.io/projected/280c8a81-3eab-49fe-b275-d7238dbd7e0f-kube-api-access-vhtjj\") pod \"280c8a81-3eab-49fe-b275-d7238dbd7e0f\" (UID: \"280c8a81-3eab-49fe-b275-d7238dbd7e0f\") " Jan 20 17:17:37 crc kubenswrapper[4558]: I0120 17:17:37.210635 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/280c8a81-3eab-49fe-b275-d7238dbd7e0f-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "280c8a81-3eab-49fe-b275-d7238dbd7e0f" (UID: "280c8a81-3eab-49fe-b275-d7238dbd7e0f"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:17:37 crc kubenswrapper[4558]: I0120 17:17:37.211019 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/46ce5c99-2891-494b-a8fe-291f53beeeff-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:17:37 crc kubenswrapper[4558]: I0120 17:17:37.211040 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/27829a8a-3c57-41bc-8e2a-361451d19075-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:17:37 crc kubenswrapper[4558]: I0120 17:17:37.211051 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v9rdx\" (UniqueName: \"kubernetes.io/projected/46ce5c99-2891-494b-a8fe-291f53beeeff-kube-api-access-v9rdx\") on node \"crc\" DevicePath \"\"" Jan 20 17:17:37 crc kubenswrapper[4558]: I0120 17:17:37.211064 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-svx28\" (UniqueName: \"kubernetes.io/projected/27829a8a-3c57-41bc-8e2a-361451d19075-kube-api-access-svx28\") on node \"crc\" DevicePath \"\"" Jan 20 17:17:37 crc kubenswrapper[4558]: I0120 17:17:37.211072 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/280c8a81-3eab-49fe-b275-d7238dbd7e0f-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:17:37 crc kubenswrapper[4558]: I0120 17:17:37.213365 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/280c8a81-3eab-49fe-b275-d7238dbd7e0f-kube-api-access-vhtjj" (OuterVolumeSpecName: "kube-api-access-vhtjj") pod "280c8a81-3eab-49fe-b275-d7238dbd7e0f" (UID: "280c8a81-3eab-49fe-b275-d7238dbd7e0f"). InnerVolumeSpecName "kube-api-access-vhtjj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:17:37 crc kubenswrapper[4558]: I0120 17:17:37.312475 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vhtjj\" (UniqueName: \"kubernetes.io/projected/280c8a81-3eab-49fe-b275-d7238dbd7e0f-kube-api-access-vhtjj\") on node \"crc\" DevicePath \"\"" Jan 20 17:17:37 crc kubenswrapper[4558]: I0120 17:17:37.386015 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-fvs5h"] Jan 20 17:17:37 crc kubenswrapper[4558]: I0120 17:17:37.390600 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-fvs5h"] Jan 20 17:17:37 crc kubenswrapper[4558]: I0120 17:17:37.495833 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-db-create-9dzpb" event={"ID":"280c8a81-3eab-49fe-b275-d7238dbd7e0f","Type":"ContainerDied","Data":"c2c05812ef9809e7f8bada8361f99a3950ee34485029e96184205fd36df6d4da"} Jan 20 17:17:37 crc kubenswrapper[4558]: I0120 17:17:37.495888 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c2c05812ef9809e7f8bada8361f99a3950ee34485029e96184205fd36df6d4da" Jan 20 17:17:37 crc kubenswrapper[4558]: I0120 17:17:37.495897 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-db-create-9dzpb" Jan 20 17:17:37 crc kubenswrapper[4558]: I0120 17:17:37.497667 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-f51d-account-create-update-tbxbn" event={"ID":"46ce5c99-2891-494b-a8fe-291f53beeeff","Type":"ContainerDied","Data":"fd2719ce5192a64a13a9abe72c243688b4af8ffb11399220880c13e3b6235fcf"} Jan 20 17:17:37 crc kubenswrapper[4558]: I0120 17:17:37.497692 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-f51d-account-create-update-tbxbn" Jan 20 17:17:37 crc kubenswrapper[4558]: I0120 17:17:37.497711 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fd2719ce5192a64a13a9abe72c243688b4af8ffb11399220880c13e3b6235fcf" Jan 20 17:17:37 crc kubenswrapper[4558]: I0120 17:17:37.500316 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-db-create-jsj6g" event={"ID":"2fff8a17-65b9-4288-81eb-337010e076f1","Type":"ContainerDied","Data":"0582035dc5209c9549cc2bc8668af4b0dea8d35eaefa2589739a6720845bb828"} Jan 20 17:17:37 crc kubenswrapper[4558]: I0120 17:17:37.500336 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0582035dc5209c9549cc2bc8668af4b0dea8d35eaefa2589739a6720845bb828" Jan 20 17:17:37 crc kubenswrapper[4558]: I0120 17:17:37.500356 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-db-create-jsj6g" Jan 20 17:17:37 crc kubenswrapper[4558]: I0120 17:17:37.505677 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"fae781f7-d8cb-4f32-992f-43d7dac82655","Type":"ContainerStarted","Data":"1a43e8cd885dce5f981539c99de1df7006b77acee5bfe4dc7aee4e81c1433375"} Jan 20 17:17:37 crc kubenswrapper[4558]: I0120 17:17:37.505707 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"fae781f7-d8cb-4f32-992f-43d7dac82655","Type":"ContainerStarted","Data":"cbd610baf4709c4651944a2af3a0c48b9c204a1089c08055228c74c96f5c28b9"} Jan 20 17:17:37 crc kubenswrapper[4558]: I0120 17:17:37.505716 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"fae781f7-d8cb-4f32-992f-43d7dac82655","Type":"ContainerStarted","Data":"617b43e1502aaece76174ffd472b995e859c9b75744441831fd4fbd240eacd18"} Jan 20 17:17:37 crc kubenswrapper[4558]: I0120 17:17:37.505725 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"fae781f7-d8cb-4f32-992f-43d7dac82655","Type":"ContainerStarted","Data":"e739685a9bc530c37217e2f6a62d401027fed825626a553b5d0e17838ebce2ab"} Jan 20 17:17:37 crc kubenswrapper[4558]: I0120 17:17:37.505733 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"fae781f7-d8cb-4f32-992f-43d7dac82655","Type":"ContainerStarted","Data":"ba569f18d36a42b3a1ada97f895e8e6f12935ce1058001df0619906556fc1d82"} Jan 20 17:17:37 crc kubenswrapper[4558]: I0120 17:17:37.505742 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"fae781f7-d8cb-4f32-992f-43d7dac82655","Type":"ContainerStarted","Data":"844d6ed85e1a90ecc3e801bcad9126bb9e1b7e1969ac475bcae5bcc3fac3d05e"} Jan 20 17:17:37 crc kubenswrapper[4558]: I0120 17:17:37.506987 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-d448-account-create-update-tf92z" event={"ID":"27829a8a-3c57-41bc-8e2a-361451d19075","Type":"ContainerDied","Data":"82b60ee6a24778688e660ba92d403fd37c111813bd7f6791e3698457bb4864b9"} Jan 20 17:17:37 crc kubenswrapper[4558]: I0120 17:17:37.507008 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="82b60ee6a24778688e660ba92d403fd37c111813bd7f6791e3698457bb4864b9" Jan 20 17:17:37 crc kubenswrapper[4558]: I0120 17:17:37.507060 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-d448-account-create-update-tf92z" Jan 20 17:17:38 crc kubenswrapper[4558]: I0120 17:17:38.525385 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"fae781f7-d8cb-4f32-992f-43d7dac82655","Type":"ContainerStarted","Data":"1153f574b016e23743a03a0a3af75cf83aa887f85ab8a9f604b6d0beb5416c91"} Jan 20 17:17:38 crc kubenswrapper[4558]: I0120 17:17:38.525809 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"fae781f7-d8cb-4f32-992f-43d7dac82655","Type":"ContainerStarted","Data":"4f8e0074decabf3ae64fdf9bf0b1b9bfcd33bc833a5040505bdb4f80fb68ce7c"} Jan 20 17:17:38 crc kubenswrapper[4558]: I0120 17:17:38.525825 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"fae781f7-d8cb-4f32-992f-43d7dac82655","Type":"ContainerStarted","Data":"0625dd3b638d8b981039c7b40441b9f3eb96afe32772f7a668fc9f6739b2ce43"} Jan 20 17:17:38 crc kubenswrapper[4558]: I0120 17:17:38.564193 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/swift-storage-0" podStartSLOduration=13.564151159 podStartE2EDuration="13.564151159s" podCreationTimestamp="2026-01-20 17:17:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:17:38.552991062 +0000 UTC m=+2152.313329029" watchObservedRunningTime="2026-01-20 17:17:38.564151159 +0000 UTC m=+2152.324489125" Jan 20 17:17:38 crc kubenswrapper[4558]: I0120 17:17:38.576003 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2a29c72f-538c-4a48-95a3-1107464f9cb7" path="/var/lib/kubelet/pods/2a29c72f-538c-4a48-95a3-1107464f9cb7/volumes" Jan 20 17:17:38 crc kubenswrapper[4558]: I0120 17:17:38.707264 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-7745f4bdcc-fcqxh"] Jan 20 17:17:38 crc kubenswrapper[4558]: E0120 17:17:38.707710 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2fff8a17-65b9-4288-81eb-337010e076f1" containerName="mariadb-database-create" Jan 20 17:17:38 crc kubenswrapper[4558]: I0120 17:17:38.707728 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="2fff8a17-65b9-4288-81eb-337010e076f1" containerName="mariadb-database-create" Jan 20 17:17:38 crc kubenswrapper[4558]: E0120 17:17:38.707750 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="27829a8a-3c57-41bc-8e2a-361451d19075" containerName="mariadb-account-create-update" Jan 20 17:17:38 crc kubenswrapper[4558]: I0120 17:17:38.707757 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="27829a8a-3c57-41bc-8e2a-361451d19075" containerName="mariadb-account-create-update" Jan 20 17:17:38 crc kubenswrapper[4558]: E0120 17:17:38.707778 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="16316176-29c1-476a-8142-6a3427dd7da4" containerName="swift-ring-rebalance" Jan 20 17:17:38 crc kubenswrapper[4558]: I0120 17:17:38.707784 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="16316176-29c1-476a-8142-6a3427dd7da4" containerName="swift-ring-rebalance" Jan 20 17:17:38 crc kubenswrapper[4558]: E0120 17:17:38.707790 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="46ce5c99-2891-494b-a8fe-291f53beeeff" containerName="mariadb-account-create-update" Jan 20 17:17:38 crc kubenswrapper[4558]: I0120 17:17:38.707796 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="46ce5c99-2891-494b-a8fe-291f53beeeff" containerName="mariadb-account-create-update" Jan 20 17:17:38 crc kubenswrapper[4558]: E0120 17:17:38.707806 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="280c8a81-3eab-49fe-b275-d7238dbd7e0f" containerName="mariadb-database-create" Jan 20 17:17:38 crc kubenswrapper[4558]: I0120 17:17:38.707813 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="280c8a81-3eab-49fe-b275-d7238dbd7e0f" containerName="mariadb-database-create" Jan 20 17:17:38 crc kubenswrapper[4558]: E0120 17:17:38.707828 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="10cc58f5-333d-4a3b-af1e-ed0efe498d8d" containerName="mariadb-account-create-update" Jan 20 17:17:38 crc kubenswrapper[4558]: I0120 17:17:38.707835 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="10cc58f5-333d-4a3b-af1e-ed0efe498d8d" containerName="mariadb-account-create-update" Jan 20 17:17:38 crc kubenswrapper[4558]: E0120 17:17:38.707842 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2a29c72f-538c-4a48-95a3-1107464f9cb7" containerName="mariadb-account-create-update" Jan 20 17:17:38 crc kubenswrapper[4558]: I0120 17:17:38.707848 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="2a29c72f-538c-4a48-95a3-1107464f9cb7" containerName="mariadb-account-create-update" Jan 20 17:17:38 crc kubenswrapper[4558]: E0120 17:17:38.707867 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d9de665d-8b72-45e0-9829-5bd72bd86f41" containerName="mariadb-database-create" Jan 20 17:17:38 crc kubenswrapper[4558]: I0120 17:17:38.707872 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d9de665d-8b72-45e0-9829-5bd72bd86f41" containerName="mariadb-database-create" Jan 20 17:17:38 crc kubenswrapper[4558]: I0120 17:17:38.708028 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="2a29c72f-538c-4a48-95a3-1107464f9cb7" containerName="mariadb-account-create-update" Jan 20 17:17:38 crc kubenswrapper[4558]: I0120 17:17:38.708039 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="16316176-29c1-476a-8142-6a3427dd7da4" containerName="swift-ring-rebalance" Jan 20 17:17:38 crc kubenswrapper[4558]: I0120 17:17:38.708051 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="46ce5c99-2891-494b-a8fe-291f53beeeff" containerName="mariadb-account-create-update" Jan 20 17:17:38 crc kubenswrapper[4558]: I0120 17:17:38.708061 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="2fff8a17-65b9-4288-81eb-337010e076f1" containerName="mariadb-database-create" Jan 20 17:17:38 crc kubenswrapper[4558]: I0120 17:17:38.708070 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="10cc58f5-333d-4a3b-af1e-ed0efe498d8d" containerName="mariadb-account-create-update" Jan 20 17:17:38 crc kubenswrapper[4558]: I0120 17:17:38.708079 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="280c8a81-3eab-49fe-b275-d7238dbd7e0f" containerName="mariadb-database-create" Jan 20 17:17:38 crc kubenswrapper[4558]: I0120 17:17:38.708086 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="27829a8a-3c57-41bc-8e2a-361451d19075" containerName="mariadb-account-create-update" Jan 20 17:17:38 crc kubenswrapper[4558]: I0120 17:17:38.708099 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="d9de665d-8b72-45e0-9829-5bd72bd86f41" containerName="mariadb-database-create" Jan 20 17:17:38 crc kubenswrapper[4558]: I0120 17:17:38.709125 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-7745f4bdcc-fcqxh" Jan 20 17:17:38 crc kubenswrapper[4558]: I0120 17:17:38.712465 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"dns-swift-storage-0" Jan 20 17:17:38 crc kubenswrapper[4558]: I0120 17:17:38.713035 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-7745f4bdcc-fcqxh"] Jan 20 17:17:38 crc kubenswrapper[4558]: I0120 17:17:38.839077 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vqfkw\" (UniqueName: \"kubernetes.io/projected/67faa384-2157-4ce6-a5b8-aa2c3f4c3b20-kube-api-access-vqfkw\") pod \"dnsmasq-dnsmasq-7745f4bdcc-fcqxh\" (UID: \"67faa384-2157-4ce6-a5b8-aa2c3f4c3b20\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-7745f4bdcc-fcqxh" Jan 20 17:17:38 crc kubenswrapper[4558]: I0120 17:17:38.839235 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/67faa384-2157-4ce6-a5b8-aa2c3f4c3b20-dns-swift-storage-0\") pod \"dnsmasq-dnsmasq-7745f4bdcc-fcqxh\" (UID: \"67faa384-2157-4ce6-a5b8-aa2c3f4c3b20\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-7745f4bdcc-fcqxh" Jan 20 17:17:38 crc kubenswrapper[4558]: I0120 17:17:38.839268 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/67faa384-2157-4ce6-a5b8-aa2c3f4c3b20-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-7745f4bdcc-fcqxh\" (UID: \"67faa384-2157-4ce6-a5b8-aa2c3f4c3b20\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-7745f4bdcc-fcqxh" Jan 20 17:17:38 crc kubenswrapper[4558]: I0120 17:17:38.839504 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/67faa384-2157-4ce6-a5b8-aa2c3f4c3b20-config\") pod \"dnsmasq-dnsmasq-7745f4bdcc-fcqxh\" (UID: \"67faa384-2157-4ce6-a5b8-aa2c3f4c3b20\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-7745f4bdcc-fcqxh" Jan 20 17:17:38 crc kubenswrapper[4558]: I0120 17:17:38.941996 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/67faa384-2157-4ce6-a5b8-aa2c3f4c3b20-config\") pod \"dnsmasq-dnsmasq-7745f4bdcc-fcqxh\" (UID: \"67faa384-2157-4ce6-a5b8-aa2c3f4c3b20\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-7745f4bdcc-fcqxh" Jan 20 17:17:38 crc kubenswrapper[4558]: I0120 17:17:38.942146 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vqfkw\" (UniqueName: \"kubernetes.io/projected/67faa384-2157-4ce6-a5b8-aa2c3f4c3b20-kube-api-access-vqfkw\") pod \"dnsmasq-dnsmasq-7745f4bdcc-fcqxh\" (UID: \"67faa384-2157-4ce6-a5b8-aa2c3f4c3b20\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-7745f4bdcc-fcqxh" Jan 20 17:17:38 crc kubenswrapper[4558]: I0120 17:17:38.942344 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/67faa384-2157-4ce6-a5b8-aa2c3f4c3b20-dns-swift-storage-0\") pod \"dnsmasq-dnsmasq-7745f4bdcc-fcqxh\" (UID: \"67faa384-2157-4ce6-a5b8-aa2c3f4c3b20\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-7745f4bdcc-fcqxh" Jan 20 17:17:38 crc kubenswrapper[4558]: I0120 17:17:38.942377 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/67faa384-2157-4ce6-a5b8-aa2c3f4c3b20-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-7745f4bdcc-fcqxh\" (UID: \"67faa384-2157-4ce6-a5b8-aa2c3f4c3b20\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-7745f4bdcc-fcqxh" Jan 20 17:17:38 crc kubenswrapper[4558]: I0120 17:17:38.942911 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/67faa384-2157-4ce6-a5b8-aa2c3f4c3b20-config\") pod \"dnsmasq-dnsmasq-7745f4bdcc-fcqxh\" (UID: \"67faa384-2157-4ce6-a5b8-aa2c3f4c3b20\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-7745f4bdcc-fcqxh" Jan 20 17:17:38 crc kubenswrapper[4558]: I0120 17:17:38.943300 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/67faa384-2157-4ce6-a5b8-aa2c3f4c3b20-dns-swift-storage-0\") pod \"dnsmasq-dnsmasq-7745f4bdcc-fcqxh\" (UID: \"67faa384-2157-4ce6-a5b8-aa2c3f4c3b20\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-7745f4bdcc-fcqxh" Jan 20 17:17:38 crc kubenswrapper[4558]: I0120 17:17:38.943309 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/67faa384-2157-4ce6-a5b8-aa2c3f4c3b20-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-7745f4bdcc-fcqxh\" (UID: \"67faa384-2157-4ce6-a5b8-aa2c3f4c3b20\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-7745f4bdcc-fcqxh" Jan 20 17:17:38 crc kubenswrapper[4558]: I0120 17:17:38.960710 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vqfkw\" (UniqueName: \"kubernetes.io/projected/67faa384-2157-4ce6-a5b8-aa2c3f4c3b20-kube-api-access-vqfkw\") pod \"dnsmasq-dnsmasq-7745f4bdcc-fcqxh\" (UID: \"67faa384-2157-4ce6-a5b8-aa2c3f4c3b20\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-7745f4bdcc-fcqxh" Jan 20 17:17:39 crc kubenswrapper[4558]: I0120 17:17:39.046095 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-7745f4bdcc-fcqxh" Jan 20 17:17:39 crc kubenswrapper[4558]: I0120 17:17:39.460609 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-7745f4bdcc-fcqxh"] Jan 20 17:17:39 crc kubenswrapper[4558]: I0120 17:17:39.515095 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/glance-db-sync-2cpfk"] Jan 20 17:17:39 crc kubenswrapper[4558]: I0120 17:17:39.516611 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-db-sync-2cpfk" Jan 20 17:17:39 crc kubenswrapper[4558]: I0120 17:17:39.518072 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-glance-dockercfg-d2wnn" Jan 20 17:17:39 crc kubenswrapper[4558]: I0120 17:17:39.518217 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-config-data" Jan 20 17:17:39 crc kubenswrapper[4558]: I0120 17:17:39.522687 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-db-sync-2cpfk"] Jan 20 17:17:39 crc kubenswrapper[4558]: I0120 17:17:39.541843 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-7745f4bdcc-fcqxh" event={"ID":"67faa384-2157-4ce6-a5b8-aa2c3f4c3b20","Type":"ContainerStarted","Data":"3a8af5ecec070e434a30868de7adfa60aa4596669a14322dcdd042ee4952c366"} Jan 20 17:17:39 crc kubenswrapper[4558]: I0120 17:17:39.661738 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/11af03de-aac9-4531-9c0f-af093ba4d7ea-combined-ca-bundle\") pod \"glance-db-sync-2cpfk\" (UID: \"11af03de-aac9-4531-9c0f-af093ba4d7ea\") " pod="openstack-kuttl-tests/glance-db-sync-2cpfk" Jan 20 17:17:39 crc kubenswrapper[4558]: I0120 17:17:39.661910 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/11af03de-aac9-4531-9c0f-af093ba4d7ea-db-sync-config-data\") pod \"glance-db-sync-2cpfk\" (UID: \"11af03de-aac9-4531-9c0f-af093ba4d7ea\") " pod="openstack-kuttl-tests/glance-db-sync-2cpfk" Jan 20 17:17:39 crc kubenswrapper[4558]: I0120 17:17:39.662081 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6649j\" (UniqueName: \"kubernetes.io/projected/11af03de-aac9-4531-9c0f-af093ba4d7ea-kube-api-access-6649j\") pod \"glance-db-sync-2cpfk\" (UID: \"11af03de-aac9-4531-9c0f-af093ba4d7ea\") " pod="openstack-kuttl-tests/glance-db-sync-2cpfk" Jan 20 17:17:39 crc kubenswrapper[4558]: I0120 17:17:39.662113 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/11af03de-aac9-4531-9c0f-af093ba4d7ea-config-data\") pod \"glance-db-sync-2cpfk\" (UID: \"11af03de-aac9-4531-9c0f-af093ba4d7ea\") " pod="openstack-kuttl-tests/glance-db-sync-2cpfk" Jan 20 17:17:39 crc kubenswrapper[4558]: I0120 17:17:39.711953 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/openstack-cell1-galera-1" Jan 20 17:17:39 crc kubenswrapper[4558]: I0120 17:17:39.764078 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/11af03de-aac9-4531-9c0f-af093ba4d7ea-combined-ca-bundle\") pod \"glance-db-sync-2cpfk\" (UID: \"11af03de-aac9-4531-9c0f-af093ba4d7ea\") " pod="openstack-kuttl-tests/glance-db-sync-2cpfk" Jan 20 17:17:39 crc kubenswrapper[4558]: I0120 17:17:39.764280 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/11af03de-aac9-4531-9c0f-af093ba4d7ea-db-sync-config-data\") pod \"glance-db-sync-2cpfk\" (UID: \"11af03de-aac9-4531-9c0f-af093ba4d7ea\") " pod="openstack-kuttl-tests/glance-db-sync-2cpfk" Jan 20 17:17:39 crc kubenswrapper[4558]: I0120 17:17:39.764369 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6649j\" (UniqueName: \"kubernetes.io/projected/11af03de-aac9-4531-9c0f-af093ba4d7ea-kube-api-access-6649j\") pod \"glance-db-sync-2cpfk\" (UID: \"11af03de-aac9-4531-9c0f-af093ba4d7ea\") " pod="openstack-kuttl-tests/glance-db-sync-2cpfk" Jan 20 17:17:39 crc kubenswrapper[4558]: I0120 17:17:39.764403 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/11af03de-aac9-4531-9c0f-af093ba4d7ea-config-data\") pod \"glance-db-sync-2cpfk\" (UID: \"11af03de-aac9-4531-9c0f-af093ba4d7ea\") " pod="openstack-kuttl-tests/glance-db-sync-2cpfk" Jan 20 17:17:39 crc kubenswrapper[4558]: I0120 17:17:39.769238 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/11af03de-aac9-4531-9c0f-af093ba4d7ea-config-data\") pod \"glance-db-sync-2cpfk\" (UID: \"11af03de-aac9-4531-9c0f-af093ba4d7ea\") " pod="openstack-kuttl-tests/glance-db-sync-2cpfk" Jan 20 17:17:39 crc kubenswrapper[4558]: I0120 17:17:39.769462 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/11af03de-aac9-4531-9c0f-af093ba4d7ea-combined-ca-bundle\") pod \"glance-db-sync-2cpfk\" (UID: \"11af03de-aac9-4531-9c0f-af093ba4d7ea\") " pod="openstack-kuttl-tests/glance-db-sync-2cpfk" Jan 20 17:17:39 crc kubenswrapper[4558]: I0120 17:17:39.769665 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/11af03de-aac9-4531-9c0f-af093ba4d7ea-db-sync-config-data\") pod \"glance-db-sync-2cpfk\" (UID: \"11af03de-aac9-4531-9c0f-af093ba4d7ea\") " pod="openstack-kuttl-tests/glance-db-sync-2cpfk" Jan 20 17:17:39 crc kubenswrapper[4558]: I0120 17:17:39.781219 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:17:39 crc kubenswrapper[4558]: I0120 17:17:39.783399 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/openstack-cell1-galera-1" Jan 20 17:17:39 crc kubenswrapper[4558]: I0120 17:17:39.783400 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6649j\" (UniqueName: \"kubernetes.io/projected/11af03de-aac9-4531-9c0f-af093ba4d7ea-kube-api-access-6649j\") pod \"glance-db-sync-2cpfk\" (UID: \"11af03de-aac9-4531-9c0f-af093ba4d7ea\") " pod="openstack-kuttl-tests/glance-db-sync-2cpfk" Jan 20 17:17:39 crc kubenswrapper[4558]: I0120 17:17:39.863930 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-db-sync-2cpfk" Jan 20 17:17:39 crc kubenswrapper[4558]: I0120 17:17:39.876013 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:17:40 crc kubenswrapper[4558]: I0120 17:17:40.282220 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-db-sync-2cpfk"] Jan 20 17:17:40 crc kubenswrapper[4558]: W0120 17:17:40.284478 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod11af03de_aac9_4531_9c0f_af093ba4d7ea.slice/crio-e0679a4a01abe0bd4fbc94f7c9693788c64d4a787d1a54d2d8c5d73dd49a6aaa WatchSource:0}: Error finding container e0679a4a01abe0bd4fbc94f7c9693788c64d4a787d1a54d2d8c5d73dd49a6aaa: Status 404 returned error can't find the container with id e0679a4a01abe0bd4fbc94f7c9693788c64d4a787d1a54d2d8c5d73dd49a6aaa Jan 20 17:17:40 crc kubenswrapper[4558]: I0120 17:17:40.554565 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-db-sync-2cpfk" event={"ID":"11af03de-aac9-4531-9c0f-af093ba4d7ea","Type":"ContainerStarted","Data":"e0679a4a01abe0bd4fbc94f7c9693788c64d4a787d1a54d2d8c5d73dd49a6aaa"} Jan 20 17:17:40 crc kubenswrapper[4558]: I0120 17:17:40.557270 4558 generic.go:334] "Generic (PLEG): container finished" podID="67faa384-2157-4ce6-a5b8-aa2c3f4c3b20" containerID="755e40a8ef22a5dcd70c9d8fcf4e97011ab26ded09be3df19c27930830005f51" exitCode=0 Jan 20 17:17:40 crc kubenswrapper[4558]: I0120 17:17:40.557541 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-7745f4bdcc-fcqxh" event={"ID":"67faa384-2157-4ce6-a5b8-aa2c3f4c3b20","Type":"ContainerDied","Data":"755e40a8ef22a5dcd70c9d8fcf4e97011ab26ded09be3df19c27930830005f51"} Jan 20 17:17:41 crc kubenswrapper[4558]: I0120 17:17:41.570603 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-7745f4bdcc-fcqxh" event={"ID":"67faa384-2157-4ce6-a5b8-aa2c3f4c3b20","Type":"ContainerStarted","Data":"c2105e3b7033a3789680ee649e20d2039972ecf47b6e0b9633d48c2e12154cef"} Jan 20 17:17:41 crc kubenswrapper[4558]: I0120 17:17:41.571116 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-7745f4bdcc-fcqxh" Jan 20 17:17:41 crc kubenswrapper[4558]: I0120 17:17:41.573778 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-db-sync-2cpfk" event={"ID":"11af03de-aac9-4531-9c0f-af093ba4d7ea","Type":"ContainerStarted","Data":"fc125d4ae84f126fbe53fa24fdba0fd63039bffb4d5927be5e014a9e4f598ab4"} Jan 20 17:17:41 crc kubenswrapper[4558]: I0120 17:17:41.618766 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-7745f4bdcc-fcqxh" podStartSLOduration=3.618723623 podStartE2EDuration="3.618723623s" podCreationTimestamp="2026-01-20 17:17:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:17:41.591788592 +0000 UTC m=+2155.352126559" watchObservedRunningTime="2026-01-20 17:17:41.618723623 +0000 UTC m=+2155.379061591" Jan 20 17:17:41 crc kubenswrapper[4558]: I0120 17:17:41.628733 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/glance-db-sync-2cpfk" podStartSLOduration=2.628713099 podStartE2EDuration="2.628713099s" podCreationTimestamp="2026-01-20 17:17:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:17:41.623299398 +0000 UTC m=+2155.383637364" watchObservedRunningTime="2026-01-20 17:17:41.628713099 +0000 UTC m=+2155.389051066" Jan 20 17:17:42 crc kubenswrapper[4558]: I0120 17:17:42.209691 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:17:42 crc kubenswrapper[4558]: I0120 17:17:42.273146 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:17:42 crc kubenswrapper[4558]: I0120 17:17:42.396955 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/root-account-create-update-g65fk"] Jan 20 17:17:42 crc kubenswrapper[4558]: I0120 17:17:42.398347 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-g65fk" Jan 20 17:17:42 crc kubenswrapper[4558]: I0120 17:17:42.403781 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"openstack-cell1-mariadb-root-db-secret" Jan 20 17:17:42 crc kubenswrapper[4558]: I0120 17:17:42.416181 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-g65fk"] Jan 20 17:17:42 crc kubenswrapper[4558]: I0120 17:17:42.524397 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0f1083f8-2d2c-4ab0-8382-7e8d14713532-operator-scripts\") pod \"root-account-create-update-g65fk\" (UID: \"0f1083f8-2d2c-4ab0-8382-7e8d14713532\") " pod="openstack-kuttl-tests/root-account-create-update-g65fk" Jan 20 17:17:42 crc kubenswrapper[4558]: I0120 17:17:42.524933 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-45rrs\" (UniqueName: \"kubernetes.io/projected/0f1083f8-2d2c-4ab0-8382-7e8d14713532-kube-api-access-45rrs\") pod \"root-account-create-update-g65fk\" (UID: \"0f1083f8-2d2c-4ab0-8382-7e8d14713532\") " pod="openstack-kuttl-tests/root-account-create-update-g65fk" Jan 20 17:17:42 crc kubenswrapper[4558]: I0120 17:17:42.627008 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0f1083f8-2d2c-4ab0-8382-7e8d14713532-operator-scripts\") pod \"root-account-create-update-g65fk\" (UID: \"0f1083f8-2d2c-4ab0-8382-7e8d14713532\") " pod="openstack-kuttl-tests/root-account-create-update-g65fk" Jan 20 17:17:42 crc kubenswrapper[4558]: I0120 17:17:42.627142 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-45rrs\" (UniqueName: \"kubernetes.io/projected/0f1083f8-2d2c-4ab0-8382-7e8d14713532-kube-api-access-45rrs\") pod \"root-account-create-update-g65fk\" (UID: \"0f1083f8-2d2c-4ab0-8382-7e8d14713532\") " pod="openstack-kuttl-tests/root-account-create-update-g65fk" Jan 20 17:17:42 crc kubenswrapper[4558]: I0120 17:17:42.629300 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0f1083f8-2d2c-4ab0-8382-7e8d14713532-operator-scripts\") pod \"root-account-create-update-g65fk\" (UID: \"0f1083f8-2d2c-4ab0-8382-7e8d14713532\") " pod="openstack-kuttl-tests/root-account-create-update-g65fk" Jan 20 17:17:42 crc kubenswrapper[4558]: I0120 17:17:42.629434 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:17:42 crc kubenswrapper[4558]: I0120 17:17:42.650684 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-45rrs\" (UniqueName: \"kubernetes.io/projected/0f1083f8-2d2c-4ab0-8382-7e8d14713532-kube-api-access-45rrs\") pod \"root-account-create-update-g65fk\" (UID: \"0f1083f8-2d2c-4ab0-8382-7e8d14713532\") " pod="openstack-kuttl-tests/root-account-create-update-g65fk" Jan 20 17:17:42 crc kubenswrapper[4558]: I0120 17:17:42.728901 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-g65fk" Jan 20 17:17:43 crc kubenswrapper[4558]: I0120 17:17:43.126413 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-g65fk"] Jan 20 17:17:43 crc kubenswrapper[4558]: W0120 17:17:43.128875 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0f1083f8_2d2c_4ab0_8382_7e8d14713532.slice/crio-ad6e195ac480b80fff0424d6c043aa74e83ea69879907decd295c84a8888f88e WatchSource:0}: Error finding container ad6e195ac480b80fff0424d6c043aa74e83ea69879907decd295c84a8888f88e: Status 404 returned error can't find the container with id ad6e195ac480b80fff0424d6c043aa74e83ea69879907decd295c84a8888f88e Jan 20 17:17:43 crc kubenswrapper[4558]: I0120 17:17:43.597855 4558 generic.go:334] "Generic (PLEG): container finished" podID="0f1083f8-2d2c-4ab0-8382-7e8d14713532" containerID="44b59240e6f876632cec3355c15724897d903c1c71ba44d49a8fa7096be4ce9e" exitCode=0 Jan 20 17:17:43 crc kubenswrapper[4558]: I0120 17:17:43.597922 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/root-account-create-update-g65fk" event={"ID":"0f1083f8-2d2c-4ab0-8382-7e8d14713532","Type":"ContainerDied","Data":"44b59240e6f876632cec3355c15724897d903c1c71ba44d49a8fa7096be4ce9e"} Jan 20 17:17:43 crc kubenswrapper[4558]: I0120 17:17:43.597995 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/root-account-create-update-g65fk" event={"ID":"0f1083f8-2d2c-4ab0-8382-7e8d14713532","Type":"ContainerStarted","Data":"ad6e195ac480b80fff0424d6c043aa74e83ea69879907decd295c84a8888f88e"} Jan 20 17:17:43 crc kubenswrapper[4558]: I0120 17:17:43.599482 4558 generic.go:334] "Generic (PLEG): container finished" podID="6dae93de-b907-44f7-a94c-c691eee0af7f" containerID="65fe42fe25e67f4d2e7a0b380665cfa50e34c28aa831796b5829b20cecf6a5cb" exitCode=0 Jan 20 17:17:43 crc kubenswrapper[4558]: I0120 17:17:43.599522 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" event={"ID":"6dae93de-b907-44f7-a94c-c691eee0af7f","Type":"ContainerDied","Data":"65fe42fe25e67f4d2e7a0b380665cfa50e34c28aa831796b5829b20cecf6a5cb"} Jan 20 17:17:44 crc kubenswrapper[4558]: I0120 17:17:44.609376 4558 generic.go:334] "Generic (PLEG): container finished" podID="9c52efaf-b737-47bf-9ca1-109a28e19113" containerID="ded34b9dd6d6a2bbb413151aa8d7e879c0b9a135b1b75cfbb4c74ef99adcec5a" exitCode=0 Jan 20 17:17:44 crc kubenswrapper[4558]: I0120 17:17:44.609404 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-server-2" event={"ID":"9c52efaf-b737-47bf-9ca1-109a28e19113","Type":"ContainerDied","Data":"ded34b9dd6d6a2bbb413151aa8d7e879c0b9a135b1b75cfbb4c74ef99adcec5a"} Jan 20 17:17:44 crc kubenswrapper[4558]: I0120 17:17:44.612364 4558 generic.go:334] "Generic (PLEG): container finished" podID="8573a632-84e0-4f80-b811-5646b571c318" containerID="7e9618d5dfe234e6e7b347ce911135cc4faaff526b91370dd0e418034794ec1d" exitCode=0 Jan 20 17:17:44 crc kubenswrapper[4558]: I0120 17:17:44.612430 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-cell1-server-1" event={"ID":"8573a632-84e0-4f80-b811-5646b571c318","Type":"ContainerDied","Data":"7e9618d5dfe234e6e7b347ce911135cc4faaff526b91370dd0e418034794ec1d"} Jan 20 17:17:44 crc kubenswrapper[4558]: I0120 17:17:44.614614 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" event={"ID":"6dae93de-b907-44f7-a94c-c691eee0af7f","Type":"ContainerStarted","Data":"b7c379986cc6aff3b828f77461a78d1169c8a75f30d3abde660227f21701db47"} Jan 20 17:17:44 crc kubenswrapper[4558]: I0120 17:17:44.614805 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:17:44 crc kubenswrapper[4558]: I0120 17:17:44.624414 4558 generic.go:334] "Generic (PLEG): container finished" podID="24114ddb-3b30-42ac-9d61-cfeb15d58728" containerID="4c8665b5b235011d33eb908501c51ff5695ec224cf17893391187c66bf1ab060" exitCode=0 Jan 20 17:17:44 crc kubenswrapper[4558]: I0120 17:17:44.624473 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-server-0" event={"ID":"24114ddb-3b30-42ac-9d61-cfeb15d58728","Type":"ContainerDied","Data":"4c8665b5b235011d33eb908501c51ff5695ec224cf17893391187c66bf1ab060"} Jan 20 17:17:44 crc kubenswrapper[4558]: I0120 17:17:44.630678 4558 generic.go:334] "Generic (PLEG): container finished" podID="344f9f31-8a81-4544-b782-5aa78dfc5cc2" containerID="614476c61e78dcb46b1c1d1cf00064d7ee3ffba88582ebadc80626446ffd327e" exitCode=0 Jan 20 17:17:44 crc kubenswrapper[4558]: I0120 17:17:44.630736 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-cell1-server-2" event={"ID":"344f9f31-8a81-4544-b782-5aa78dfc5cc2","Type":"ContainerDied","Data":"614476c61e78dcb46b1c1d1cf00064d7ee3ffba88582ebadc80626446ffd327e"} Jan 20 17:17:44 crc kubenswrapper[4558]: I0120 17:17:44.633374 4558 generic.go:334] "Generic (PLEG): container finished" podID="2c809052-d9bb-4982-8271-5b7a9a6f28f9" containerID="aef92f821b7a5a08ba1161060fd5100effa5681c8a90295b7e73584d9d1549b4" exitCode=0 Jan 20 17:17:44 crc kubenswrapper[4558]: I0120 17:17:44.633445 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-server-1" event={"ID":"2c809052-d9bb-4982-8271-5b7a9a6f28f9","Type":"ContainerDied","Data":"aef92f821b7a5a08ba1161060fd5100effa5681c8a90295b7e73584d9d1549b4"} Jan 20 17:17:44 crc kubenswrapper[4558]: I0120 17:17:44.766885 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" podStartSLOduration=35.766867811 podStartE2EDuration="35.766867811s" podCreationTimestamp="2026-01-20 17:17:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:17:44.751554204 +0000 UTC m=+2158.511892171" watchObservedRunningTime="2026-01-20 17:17:44.766867811 +0000 UTC m=+2158.527205779" Jan 20 17:17:44 crc kubenswrapper[4558]: I0120 17:17:44.988476 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-g65fk" Jan 20 17:17:45 crc kubenswrapper[4558]: I0120 17:17:45.090763 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0f1083f8-2d2c-4ab0-8382-7e8d14713532-operator-scripts\") pod \"0f1083f8-2d2c-4ab0-8382-7e8d14713532\" (UID: \"0f1083f8-2d2c-4ab0-8382-7e8d14713532\") " Jan 20 17:17:45 crc kubenswrapper[4558]: I0120 17:17:45.090855 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-45rrs\" (UniqueName: \"kubernetes.io/projected/0f1083f8-2d2c-4ab0-8382-7e8d14713532-kube-api-access-45rrs\") pod \"0f1083f8-2d2c-4ab0-8382-7e8d14713532\" (UID: \"0f1083f8-2d2c-4ab0-8382-7e8d14713532\") " Jan 20 17:17:45 crc kubenswrapper[4558]: I0120 17:17:45.091337 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0f1083f8-2d2c-4ab0-8382-7e8d14713532-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "0f1083f8-2d2c-4ab0-8382-7e8d14713532" (UID: "0f1083f8-2d2c-4ab0-8382-7e8d14713532"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:17:45 crc kubenswrapper[4558]: I0120 17:17:45.098272 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0f1083f8-2d2c-4ab0-8382-7e8d14713532-kube-api-access-45rrs" (OuterVolumeSpecName: "kube-api-access-45rrs") pod "0f1083f8-2d2c-4ab0-8382-7e8d14713532" (UID: "0f1083f8-2d2c-4ab0-8382-7e8d14713532"). InnerVolumeSpecName "kube-api-access-45rrs". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:17:45 crc kubenswrapper[4558]: I0120 17:17:45.193883 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0f1083f8-2d2c-4ab0-8382-7e8d14713532-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:17:45 crc kubenswrapper[4558]: I0120 17:17:45.193921 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-45rrs\" (UniqueName: \"kubernetes.io/projected/0f1083f8-2d2c-4ab0-8382-7e8d14713532-kube-api-access-45rrs\") on node \"crc\" DevicePath \"\"" Jan 20 17:17:45 crc kubenswrapper[4558]: I0120 17:17:45.645062 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-server-2" event={"ID":"9c52efaf-b737-47bf-9ca1-109a28e19113","Type":"ContainerStarted","Data":"ae9e159fcd24af6ebaa1ca94fb0944dc8d3bcbddd517479b3fa0699d53c7017a"} Jan 20 17:17:45 crc kubenswrapper[4558]: I0120 17:17:45.645674 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/rabbitmq-server-2" Jan 20 17:17:45 crc kubenswrapper[4558]: I0120 17:17:45.647387 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/root-account-create-update-g65fk" event={"ID":"0f1083f8-2d2c-4ab0-8382-7e8d14713532","Type":"ContainerDied","Data":"ad6e195ac480b80fff0424d6c043aa74e83ea69879907decd295c84a8888f88e"} Jan 20 17:17:45 crc kubenswrapper[4558]: I0120 17:17:45.647427 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ad6e195ac480b80fff0424d6c043aa74e83ea69879907decd295c84a8888f88e" Jan 20 17:17:45 crc kubenswrapper[4558]: I0120 17:17:45.647477 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-g65fk" Jan 20 17:17:45 crc kubenswrapper[4558]: I0120 17:17:45.654665 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-cell1-server-1" event={"ID":"8573a632-84e0-4f80-b811-5646b571c318","Type":"ContainerStarted","Data":"c5f31a0dc1f900cc5cbc94b222647ecbd6198220c8e700e7650325a56366cf4f"} Jan 20 17:17:45 crc kubenswrapper[4558]: I0120 17:17:45.654937 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/rabbitmq-cell1-server-1" Jan 20 17:17:45 crc kubenswrapper[4558]: I0120 17:17:45.658347 4558 generic.go:334] "Generic (PLEG): container finished" podID="11af03de-aac9-4531-9c0f-af093ba4d7ea" containerID="fc125d4ae84f126fbe53fa24fdba0fd63039bffb4d5927be5e014a9e4f598ab4" exitCode=0 Jan 20 17:17:45 crc kubenswrapper[4558]: I0120 17:17:45.658436 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-db-sync-2cpfk" event={"ID":"11af03de-aac9-4531-9c0f-af093ba4d7ea","Type":"ContainerDied","Data":"fc125d4ae84f126fbe53fa24fdba0fd63039bffb4d5927be5e014a9e4f598ab4"} Jan 20 17:17:45 crc kubenswrapper[4558]: I0120 17:17:45.662089 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-server-0" event={"ID":"24114ddb-3b30-42ac-9d61-cfeb15d58728","Type":"ContainerStarted","Data":"33a9e1abb20c3e3bf6a51b785efcb0a4eaf6964e1b8392e4977fc9f4c7b03f61"} Jan 20 17:17:45 crc kubenswrapper[4558]: I0120 17:17:45.662292 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:17:45 crc kubenswrapper[4558]: I0120 17:17:45.665124 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-cell1-server-2" event={"ID":"344f9f31-8a81-4544-b782-5aa78dfc5cc2","Type":"ContainerStarted","Data":"3723b282ed54cce183bafc40d9019c56750bb156f55f03ff6df98969d9b7a4c6"} Jan 20 17:17:45 crc kubenswrapper[4558]: I0120 17:17:45.665410 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/rabbitmq-cell1-server-2" Jan 20 17:17:45 crc kubenswrapper[4558]: I0120 17:17:45.673529 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/rabbitmq-server-2" podStartSLOduration=36.67351518 podStartE2EDuration="36.67351518s" podCreationTimestamp="2026-01-20 17:17:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:17:45.671499661 +0000 UTC m=+2159.431837628" watchObservedRunningTime="2026-01-20 17:17:45.67351518 +0000 UTC m=+2159.433853147" Jan 20 17:17:45 crc kubenswrapper[4558]: I0120 17:17:45.681822 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-server-1" event={"ID":"2c809052-d9bb-4982-8271-5b7a9a6f28f9","Type":"ContainerStarted","Data":"81a644c98c6093aa1efc528706c31e081b18455e9ddaf52cd7d7f878ae9f848a"} Jan 20 17:17:45 crc kubenswrapper[4558]: I0120 17:17:45.682133 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/rabbitmq-server-1" Jan 20 17:17:45 crc kubenswrapper[4558]: I0120 17:17:45.693556 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/rabbitmq-server-0" podStartSLOduration=36.693533225 podStartE2EDuration="36.693533225s" podCreationTimestamp="2026-01-20 17:17:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:17:45.688711397 +0000 UTC m=+2159.449049365" watchObservedRunningTime="2026-01-20 17:17:45.693533225 +0000 UTC m=+2159.453871192" Jan 20 17:17:45 crc kubenswrapper[4558]: I0120 17:17:45.739760 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/rabbitmq-cell1-server-1" podStartSLOduration=36.739743624 podStartE2EDuration="36.739743624s" podCreationTimestamp="2026-01-20 17:17:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:17:45.7246442 +0000 UTC m=+2159.484982167" watchObservedRunningTime="2026-01-20 17:17:45.739743624 +0000 UTC m=+2159.500081591" Jan 20 17:17:45 crc kubenswrapper[4558]: I0120 17:17:45.787921 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/rabbitmq-cell1-server-2" podStartSLOduration=36.787905824 podStartE2EDuration="36.787905824s" podCreationTimestamp="2026-01-20 17:17:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:17:45.778847609 +0000 UTC m=+2159.539185577" watchObservedRunningTime="2026-01-20 17:17:45.787905824 +0000 UTC m=+2159.548243790" Jan 20 17:17:45 crc kubenswrapper[4558]: I0120 17:17:45.803813 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/rabbitmq-server-1" podStartSLOduration=36.803799231 podStartE2EDuration="36.803799231s" podCreationTimestamp="2026-01-20 17:17:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:17:45.79772529 +0000 UTC m=+2159.558063257" watchObservedRunningTime="2026-01-20 17:17:45.803799231 +0000 UTC m=+2159.564137198" Jan 20 17:17:47 crc kubenswrapper[4558]: I0120 17:17:47.060863 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-db-sync-2cpfk" Jan 20 17:17:47 crc kubenswrapper[4558]: I0120 17:17:47.235395 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/11af03de-aac9-4531-9c0f-af093ba4d7ea-db-sync-config-data\") pod \"11af03de-aac9-4531-9c0f-af093ba4d7ea\" (UID: \"11af03de-aac9-4531-9c0f-af093ba4d7ea\") " Jan 20 17:17:47 crc kubenswrapper[4558]: I0120 17:17:47.235935 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/11af03de-aac9-4531-9c0f-af093ba4d7ea-config-data\") pod \"11af03de-aac9-4531-9c0f-af093ba4d7ea\" (UID: \"11af03de-aac9-4531-9c0f-af093ba4d7ea\") " Jan 20 17:17:47 crc kubenswrapper[4558]: I0120 17:17:47.235973 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/11af03de-aac9-4531-9c0f-af093ba4d7ea-combined-ca-bundle\") pod \"11af03de-aac9-4531-9c0f-af093ba4d7ea\" (UID: \"11af03de-aac9-4531-9c0f-af093ba4d7ea\") " Jan 20 17:17:47 crc kubenswrapper[4558]: I0120 17:17:47.236008 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6649j\" (UniqueName: \"kubernetes.io/projected/11af03de-aac9-4531-9c0f-af093ba4d7ea-kube-api-access-6649j\") pod \"11af03de-aac9-4531-9c0f-af093ba4d7ea\" (UID: \"11af03de-aac9-4531-9c0f-af093ba4d7ea\") " Jan 20 17:17:47 crc kubenswrapper[4558]: I0120 17:17:47.242385 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/11af03de-aac9-4531-9c0f-af093ba4d7ea-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "11af03de-aac9-4531-9c0f-af093ba4d7ea" (UID: "11af03de-aac9-4531-9c0f-af093ba4d7ea"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:17:47 crc kubenswrapper[4558]: I0120 17:17:47.260068 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/11af03de-aac9-4531-9c0f-af093ba4d7ea-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "11af03de-aac9-4531-9c0f-af093ba4d7ea" (UID: "11af03de-aac9-4531-9c0f-af093ba4d7ea"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:17:47 crc kubenswrapper[4558]: I0120 17:17:47.269298 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/11af03de-aac9-4531-9c0f-af093ba4d7ea-kube-api-access-6649j" (OuterVolumeSpecName: "kube-api-access-6649j") pod "11af03de-aac9-4531-9c0f-af093ba4d7ea" (UID: "11af03de-aac9-4531-9c0f-af093ba4d7ea"). InnerVolumeSpecName "kube-api-access-6649j". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:17:47 crc kubenswrapper[4558]: I0120 17:17:47.279412 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/11af03de-aac9-4531-9c0f-af093ba4d7ea-config-data" (OuterVolumeSpecName: "config-data") pod "11af03de-aac9-4531-9c0f-af093ba4d7ea" (UID: "11af03de-aac9-4531-9c0f-af093ba4d7ea"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:17:47 crc kubenswrapper[4558]: I0120 17:17:47.340198 4558 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/11af03de-aac9-4531-9c0f-af093ba4d7ea-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:17:47 crc kubenswrapper[4558]: I0120 17:17:47.340759 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/11af03de-aac9-4531-9c0f-af093ba4d7ea-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:17:47 crc kubenswrapper[4558]: I0120 17:17:47.340780 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/11af03de-aac9-4531-9c0f-af093ba4d7ea-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:17:47 crc kubenswrapper[4558]: I0120 17:17:47.340804 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6649j\" (UniqueName: \"kubernetes.io/projected/11af03de-aac9-4531-9c0f-af093ba4d7ea-kube-api-access-6649j\") on node \"crc\" DevicePath \"\"" Jan 20 17:17:47 crc kubenswrapper[4558]: I0120 17:17:47.710396 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-db-sync-2cpfk" event={"ID":"11af03de-aac9-4531-9c0f-af093ba4d7ea","Type":"ContainerDied","Data":"e0679a4a01abe0bd4fbc94f7c9693788c64d4a787d1a54d2d8c5d73dd49a6aaa"} Jan 20 17:17:47 crc kubenswrapper[4558]: I0120 17:17:47.710439 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e0679a4a01abe0bd4fbc94f7c9693788c64d4a787d1a54d2d8c5d73dd49a6aaa" Jan 20 17:17:47 crc kubenswrapper[4558]: I0120 17:17:47.710458 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-db-sync-2cpfk" Jan 20 17:17:49 crc kubenswrapper[4558]: I0120 17:17:49.047953 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-7745f4bdcc-fcqxh" Jan 20 17:17:49 crc kubenswrapper[4558]: I0120 17:17:49.100656 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-ndwh7"] Jan 20 17:17:49 crc kubenswrapper[4558]: I0120 17:17:49.100873 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-ndwh7" podUID="ef393209-80db-43c7-a749-3ec7826f2123" containerName="dnsmasq-dns" containerID="cri-o://7198bce5f93612a72f1bc29427e2626f3e569032458fe9469fe3c848a987dd0d" gracePeriod=10 Jan 20 17:17:49 crc kubenswrapper[4558]: I0120 17:17:49.513033 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-ndwh7" Jan 20 17:17:49 crc kubenswrapper[4558]: I0120 17:17:49.686191 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/ef393209-80db-43c7-a749-3ec7826f2123-dnsmasq-svc\") pod \"ef393209-80db-43c7-a749-3ec7826f2123\" (UID: \"ef393209-80db-43c7-a749-3ec7826f2123\") " Jan 20 17:17:49 crc kubenswrapper[4558]: I0120 17:17:49.686280 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ef393209-80db-43c7-a749-3ec7826f2123-config\") pod \"ef393209-80db-43c7-a749-3ec7826f2123\" (UID: \"ef393209-80db-43c7-a749-3ec7826f2123\") " Jan 20 17:17:49 crc kubenswrapper[4558]: I0120 17:17:49.686442 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w6kpd\" (UniqueName: \"kubernetes.io/projected/ef393209-80db-43c7-a749-3ec7826f2123-kube-api-access-w6kpd\") pod \"ef393209-80db-43c7-a749-3ec7826f2123\" (UID: \"ef393209-80db-43c7-a749-3ec7826f2123\") " Jan 20 17:17:49 crc kubenswrapper[4558]: I0120 17:17:49.692054 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ef393209-80db-43c7-a749-3ec7826f2123-kube-api-access-w6kpd" (OuterVolumeSpecName: "kube-api-access-w6kpd") pod "ef393209-80db-43c7-a749-3ec7826f2123" (UID: "ef393209-80db-43c7-a749-3ec7826f2123"). InnerVolumeSpecName "kube-api-access-w6kpd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:17:49 crc kubenswrapper[4558]: I0120 17:17:49.723580 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ef393209-80db-43c7-a749-3ec7826f2123-config" (OuterVolumeSpecName: "config") pod "ef393209-80db-43c7-a749-3ec7826f2123" (UID: "ef393209-80db-43c7-a749-3ec7826f2123"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:17:49 crc kubenswrapper[4558]: I0120 17:17:49.726640 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ef393209-80db-43c7-a749-3ec7826f2123-dnsmasq-svc" (OuterVolumeSpecName: "dnsmasq-svc") pod "ef393209-80db-43c7-a749-3ec7826f2123" (UID: "ef393209-80db-43c7-a749-3ec7826f2123"). InnerVolumeSpecName "dnsmasq-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:17:49 crc kubenswrapper[4558]: I0120 17:17:49.731336 4558 generic.go:334] "Generic (PLEG): container finished" podID="ef393209-80db-43c7-a749-3ec7826f2123" containerID="7198bce5f93612a72f1bc29427e2626f3e569032458fe9469fe3c848a987dd0d" exitCode=0 Jan 20 17:17:49 crc kubenswrapper[4558]: I0120 17:17:49.731383 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-ndwh7" event={"ID":"ef393209-80db-43c7-a749-3ec7826f2123","Type":"ContainerDied","Data":"7198bce5f93612a72f1bc29427e2626f3e569032458fe9469fe3c848a987dd0d"} Jan 20 17:17:49 crc kubenswrapper[4558]: I0120 17:17:49.731416 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-ndwh7" event={"ID":"ef393209-80db-43c7-a749-3ec7826f2123","Type":"ContainerDied","Data":"560e74d6ac97887570796d7f33795ec8548eb06564c454862ab5d2d8630bffd6"} Jan 20 17:17:49 crc kubenswrapper[4558]: I0120 17:17:49.731435 4558 scope.go:117] "RemoveContainer" containerID="7198bce5f93612a72f1bc29427e2626f3e569032458fe9469fe3c848a987dd0d" Jan 20 17:17:49 crc kubenswrapper[4558]: I0120 17:17:49.731572 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-ndwh7" Jan 20 17:17:49 crc kubenswrapper[4558]: I0120 17:17:49.773927 4558 scope.go:117] "RemoveContainer" containerID="412baacf5264ce5460c28e8a452e6be275c6dc44a89ee8bd0b6460f122a289b5" Jan 20 17:17:49 crc kubenswrapper[4558]: I0120 17:17:49.777435 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-ndwh7"] Jan 20 17:17:49 crc kubenswrapper[4558]: I0120 17:17:49.785056 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-ndwh7"] Jan 20 17:17:49 crc kubenswrapper[4558]: I0120 17:17:49.789818 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w6kpd\" (UniqueName: \"kubernetes.io/projected/ef393209-80db-43c7-a749-3ec7826f2123-kube-api-access-w6kpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:17:49 crc kubenswrapper[4558]: I0120 17:17:49.789846 4558 reconciler_common.go:293] "Volume detached for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/ef393209-80db-43c7-a749-3ec7826f2123-dnsmasq-svc\") on node \"crc\" DevicePath \"\"" Jan 20 17:17:49 crc kubenswrapper[4558]: I0120 17:17:49.789858 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ef393209-80db-43c7-a749-3ec7826f2123-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:17:49 crc kubenswrapper[4558]: I0120 17:17:49.807494 4558 scope.go:117] "RemoveContainer" containerID="7198bce5f93612a72f1bc29427e2626f3e569032458fe9469fe3c848a987dd0d" Jan 20 17:17:49 crc kubenswrapper[4558]: E0120 17:17:49.807815 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7198bce5f93612a72f1bc29427e2626f3e569032458fe9469fe3c848a987dd0d\": container with ID starting with 7198bce5f93612a72f1bc29427e2626f3e569032458fe9469fe3c848a987dd0d not found: ID does not exist" containerID="7198bce5f93612a72f1bc29427e2626f3e569032458fe9469fe3c848a987dd0d" Jan 20 17:17:49 crc kubenswrapper[4558]: I0120 17:17:49.807853 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7198bce5f93612a72f1bc29427e2626f3e569032458fe9469fe3c848a987dd0d"} err="failed to get container status \"7198bce5f93612a72f1bc29427e2626f3e569032458fe9469fe3c848a987dd0d\": rpc error: code = NotFound desc = could not find container \"7198bce5f93612a72f1bc29427e2626f3e569032458fe9469fe3c848a987dd0d\": container with ID starting with 7198bce5f93612a72f1bc29427e2626f3e569032458fe9469fe3c848a987dd0d not found: ID does not exist" Jan 20 17:17:49 crc kubenswrapper[4558]: I0120 17:17:49.807881 4558 scope.go:117] "RemoveContainer" containerID="412baacf5264ce5460c28e8a452e6be275c6dc44a89ee8bd0b6460f122a289b5" Jan 20 17:17:49 crc kubenswrapper[4558]: E0120 17:17:49.808283 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"412baacf5264ce5460c28e8a452e6be275c6dc44a89ee8bd0b6460f122a289b5\": container with ID starting with 412baacf5264ce5460c28e8a452e6be275c6dc44a89ee8bd0b6460f122a289b5 not found: ID does not exist" containerID="412baacf5264ce5460c28e8a452e6be275c6dc44a89ee8bd0b6460f122a289b5" Jan 20 17:17:49 crc kubenswrapper[4558]: I0120 17:17:49.808302 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"412baacf5264ce5460c28e8a452e6be275c6dc44a89ee8bd0b6460f122a289b5"} err="failed to get container status \"412baacf5264ce5460c28e8a452e6be275c6dc44a89ee8bd0b6460f122a289b5\": rpc error: code = NotFound desc = could not find container \"412baacf5264ce5460c28e8a452e6be275c6dc44a89ee8bd0b6460f122a289b5\": container with ID starting with 412baacf5264ce5460c28e8a452e6be275c6dc44a89ee8bd0b6460f122a289b5 not found: ID does not exist" Jan 20 17:17:50 crc kubenswrapper[4558]: I0120 17:17:50.576675 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ef393209-80db-43c7-a749-3ec7826f2123" path="/var/lib/kubelet/pods/ef393209-80db-43c7-a749-3ec7826f2123/volumes" Jan 20 17:17:57 crc kubenswrapper[4558]: I0120 17:17:57.330393 4558 patch_prober.go:28] interesting pod/machine-config-daemon-2vr4r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 20 17:17:57 crc kubenswrapper[4558]: I0120 17:17:57.331183 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 20 17:17:57 crc kubenswrapper[4558]: I0120 17:17:57.331242 4558 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" Jan 20 17:17:57 crc kubenswrapper[4558]: I0120 17:17:57.331921 4558 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"0ffce44366a8b4466427b682fb41640425366a0f4449210959148de9aef695c6"} pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 20 17:17:57 crc kubenswrapper[4558]: I0120 17:17:57.331984 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" containerID="cri-o://0ffce44366a8b4466427b682fb41640425366a0f4449210959148de9aef695c6" gracePeriod=600 Jan 20 17:17:57 crc kubenswrapper[4558]: E0120 17:17:57.453264 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 17:17:57 crc kubenswrapper[4558]: I0120 17:17:57.840424 4558 generic.go:334] "Generic (PLEG): container finished" podID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerID="0ffce44366a8b4466427b682fb41640425366a0f4449210959148de9aef695c6" exitCode=0 Jan 20 17:17:57 crc kubenswrapper[4558]: I0120 17:17:57.840501 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" event={"ID":"68337d27-3fa6-4a29-88b0-82e60c3739eb","Type":"ContainerDied","Data":"0ffce44366a8b4466427b682fb41640425366a0f4449210959148de9aef695c6"} Jan 20 17:17:57 crc kubenswrapper[4558]: I0120 17:17:57.840794 4558 scope.go:117] "RemoveContainer" containerID="09cede35de046f760d5c008c35cdd13631d8660c06e04fb988751619769147c6" Jan 20 17:17:57 crc kubenswrapper[4558]: I0120 17:17:57.841184 4558 scope.go:117] "RemoveContainer" containerID="0ffce44366a8b4466427b682fb41640425366a0f4449210959148de9aef695c6" Jan 20 17:17:57 crc kubenswrapper[4558]: E0120 17:17:57.841437 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 17:18:00 crc kubenswrapper[4558]: I0120 17:18:00.446770 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/rabbitmq-server-0" podUID="24114ddb-3b30-42ac-9d61-cfeb15d58728" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.114:5671: connect: connection refused" Jan 20 17:18:00 crc kubenswrapper[4558]: I0120 17:18:00.467463 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/rabbitmq-server-2" podUID="9c52efaf-b737-47bf-9ca1-109a28e19113" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.115:5671: connect: connection refused" Jan 20 17:18:00 crc kubenswrapper[4558]: I0120 17:18:00.490253 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/rabbitmq-server-1" podUID="2c809052-d9bb-4982-8271-5b7a9a6f28f9" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.116:5671: connect: connection refused" Jan 20 17:18:00 crc kubenswrapper[4558]: I0120 17:18:00.803991 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" podUID="6dae93de-b907-44f7-a94c-c691eee0af7f" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.117:5671: connect: connection refused" Jan 20 17:18:00 crc kubenswrapper[4558]: I0120 17:18:00.817517 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/rabbitmq-cell1-server-2" podUID="344f9f31-8a81-4544-b782-5aa78dfc5cc2" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.118:5671: connect: connection refused" Jan 20 17:18:00 crc kubenswrapper[4558]: I0120 17:18:00.824523 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/rabbitmq-cell1-server-1" podUID="8573a632-84e0-4f80-b811-5646b571c318" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.119:5671: connect: connection refused" Jan 20 17:18:04 crc kubenswrapper[4558]: I0120 17:18:04.051089 4558 scope.go:117] "RemoveContainer" containerID="c1a4b84370225b549fc02b79145b4c61e6750fe3d5ea99a5fed3b0d5238da749" Jan 20 17:18:04 crc kubenswrapper[4558]: I0120 17:18:04.075116 4558 scope.go:117] "RemoveContainer" containerID="afdc53de64cb2128ddb1ed677df93ded610adda09fa181e5dd671194f1e9cd60" Jan 20 17:18:10 crc kubenswrapper[4558]: I0120 17:18:10.445937 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/rabbitmq-server-0" podUID="24114ddb-3b30-42ac-9d61-cfeb15d58728" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.114:5671: connect: connection refused" Jan 20 17:18:10 crc kubenswrapper[4558]: I0120 17:18:10.468332 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/rabbitmq-server-2" Jan 20 17:18:10 crc kubenswrapper[4558]: I0120 17:18:10.490365 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/rabbitmq-server-1" Jan 20 17:18:10 crc kubenswrapper[4558]: I0120 17:18:10.803373 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:18:10 crc kubenswrapper[4558]: I0120 17:18:10.816302 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/rabbitmq-cell1-server-2" Jan 20 17:18:10 crc kubenswrapper[4558]: I0120 17:18:10.825442 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/rabbitmq-cell1-server-1" Jan 20 17:18:12 crc kubenswrapper[4558]: I0120 17:18:12.943519 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-k26sn"] Jan 20 17:18:12 crc kubenswrapper[4558]: E0120 17:18:12.944142 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0f1083f8-2d2c-4ab0-8382-7e8d14713532" containerName="mariadb-account-create-update" Jan 20 17:18:12 crc kubenswrapper[4558]: I0120 17:18:12.944157 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="0f1083f8-2d2c-4ab0-8382-7e8d14713532" containerName="mariadb-account-create-update" Jan 20 17:18:12 crc kubenswrapper[4558]: E0120 17:18:12.944197 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="11af03de-aac9-4531-9c0f-af093ba4d7ea" containerName="glance-db-sync" Jan 20 17:18:12 crc kubenswrapper[4558]: I0120 17:18:12.944203 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="11af03de-aac9-4531-9c0f-af093ba4d7ea" containerName="glance-db-sync" Jan 20 17:18:12 crc kubenswrapper[4558]: E0120 17:18:12.944217 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ef393209-80db-43c7-a749-3ec7826f2123" containerName="dnsmasq-dns" Jan 20 17:18:12 crc kubenswrapper[4558]: I0120 17:18:12.944223 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ef393209-80db-43c7-a749-3ec7826f2123" containerName="dnsmasq-dns" Jan 20 17:18:12 crc kubenswrapper[4558]: E0120 17:18:12.944239 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ef393209-80db-43c7-a749-3ec7826f2123" containerName="init" Jan 20 17:18:12 crc kubenswrapper[4558]: I0120 17:18:12.944244 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ef393209-80db-43c7-a749-3ec7826f2123" containerName="init" Jan 20 17:18:12 crc kubenswrapper[4558]: I0120 17:18:12.944436 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="ef393209-80db-43c7-a749-3ec7826f2123" containerName="dnsmasq-dns" Jan 20 17:18:12 crc kubenswrapper[4558]: I0120 17:18:12.944448 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="11af03de-aac9-4531-9c0f-af093ba4d7ea" containerName="glance-db-sync" Jan 20 17:18:12 crc kubenswrapper[4558]: I0120 17:18:12.944459 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="0f1083f8-2d2c-4ab0-8382-7e8d14713532" containerName="mariadb-account-create-update" Jan 20 17:18:12 crc kubenswrapper[4558]: I0120 17:18:12.945587 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-k26sn" Jan 20 17:18:12 crc kubenswrapper[4558]: I0120 17:18:12.964742 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-k26sn"] Jan 20 17:18:13 crc kubenswrapper[4558]: I0120 17:18:13.058922 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ckwrn\" (UniqueName: \"kubernetes.io/projected/f4b9badf-c4fb-42f0-9e4e-229e6d19ba09-kube-api-access-ckwrn\") pod \"redhat-operators-k26sn\" (UID: \"f4b9badf-c4fb-42f0-9e4e-229e6d19ba09\") " pod="openshift-marketplace/redhat-operators-k26sn" Jan 20 17:18:13 crc kubenswrapper[4558]: I0120 17:18:13.058988 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f4b9badf-c4fb-42f0-9e4e-229e6d19ba09-catalog-content\") pod \"redhat-operators-k26sn\" (UID: \"f4b9badf-c4fb-42f0-9e4e-229e6d19ba09\") " pod="openshift-marketplace/redhat-operators-k26sn" Jan 20 17:18:13 crc kubenswrapper[4558]: I0120 17:18:13.059110 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f4b9badf-c4fb-42f0-9e4e-229e6d19ba09-utilities\") pod \"redhat-operators-k26sn\" (UID: \"f4b9badf-c4fb-42f0-9e4e-229e6d19ba09\") " pod="openshift-marketplace/redhat-operators-k26sn" Jan 20 17:18:13 crc kubenswrapper[4558]: I0120 17:18:13.161728 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f4b9badf-c4fb-42f0-9e4e-229e6d19ba09-utilities\") pod \"redhat-operators-k26sn\" (UID: \"f4b9badf-c4fb-42f0-9e4e-229e6d19ba09\") " pod="openshift-marketplace/redhat-operators-k26sn" Jan 20 17:18:13 crc kubenswrapper[4558]: I0120 17:18:13.162140 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ckwrn\" (UniqueName: \"kubernetes.io/projected/f4b9badf-c4fb-42f0-9e4e-229e6d19ba09-kube-api-access-ckwrn\") pod \"redhat-operators-k26sn\" (UID: \"f4b9badf-c4fb-42f0-9e4e-229e6d19ba09\") " pod="openshift-marketplace/redhat-operators-k26sn" Jan 20 17:18:13 crc kubenswrapper[4558]: I0120 17:18:13.162245 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f4b9badf-c4fb-42f0-9e4e-229e6d19ba09-catalog-content\") pod \"redhat-operators-k26sn\" (UID: \"f4b9badf-c4fb-42f0-9e4e-229e6d19ba09\") " pod="openshift-marketplace/redhat-operators-k26sn" Jan 20 17:18:13 crc kubenswrapper[4558]: I0120 17:18:13.162324 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f4b9badf-c4fb-42f0-9e4e-229e6d19ba09-utilities\") pod \"redhat-operators-k26sn\" (UID: \"f4b9badf-c4fb-42f0-9e4e-229e6d19ba09\") " pod="openshift-marketplace/redhat-operators-k26sn" Jan 20 17:18:13 crc kubenswrapper[4558]: I0120 17:18:13.162750 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f4b9badf-c4fb-42f0-9e4e-229e6d19ba09-catalog-content\") pod \"redhat-operators-k26sn\" (UID: \"f4b9badf-c4fb-42f0-9e4e-229e6d19ba09\") " pod="openshift-marketplace/redhat-operators-k26sn" Jan 20 17:18:13 crc kubenswrapper[4558]: I0120 17:18:13.185473 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ckwrn\" (UniqueName: \"kubernetes.io/projected/f4b9badf-c4fb-42f0-9e4e-229e6d19ba09-kube-api-access-ckwrn\") pod \"redhat-operators-k26sn\" (UID: \"f4b9badf-c4fb-42f0-9e4e-229e6d19ba09\") " pod="openshift-marketplace/redhat-operators-k26sn" Jan 20 17:18:13 crc kubenswrapper[4558]: I0120 17:18:13.260403 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-k26sn" Jan 20 17:18:13 crc kubenswrapper[4558]: I0120 17:18:13.566158 4558 scope.go:117] "RemoveContainer" containerID="0ffce44366a8b4466427b682fb41640425366a0f4449210959148de9aef695c6" Jan 20 17:18:13 crc kubenswrapper[4558]: E0120 17:18:13.566735 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 17:18:13 crc kubenswrapper[4558]: I0120 17:18:13.666092 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-k26sn"] Jan 20 17:18:14 crc kubenswrapper[4558]: I0120 17:18:14.019921 4558 generic.go:334] "Generic (PLEG): container finished" podID="f4b9badf-c4fb-42f0-9e4e-229e6d19ba09" containerID="0cdbf90271d7eca20f84fd86045926e0c546b432590bb3e3392ed6b4365042d8" exitCode=0 Jan 20 17:18:14 crc kubenswrapper[4558]: I0120 17:18:14.019981 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-k26sn" event={"ID":"f4b9badf-c4fb-42f0-9e4e-229e6d19ba09","Type":"ContainerDied","Data":"0cdbf90271d7eca20f84fd86045926e0c546b432590bb3e3392ed6b4365042d8"} Jan 20 17:18:14 crc kubenswrapper[4558]: I0120 17:18:14.020025 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-k26sn" event={"ID":"f4b9badf-c4fb-42f0-9e4e-229e6d19ba09","Type":"ContainerStarted","Data":"bbcd2b6cca74121d9afac671b41701358ae90f47c7c841cca614c3fadd65e1f1"} Jan 20 17:18:16 crc kubenswrapper[4558]: I0120 17:18:16.047274 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-k26sn" event={"ID":"f4b9badf-c4fb-42f0-9e4e-229e6d19ba09","Type":"ContainerStarted","Data":"5cd8a9ea46f08e9645c0767eadb2e0762e2cc9ecfc79d64ae160a3cc92b68dd0"} Jan 20 17:18:17 crc kubenswrapper[4558]: I0120 17:18:17.060596 4558 generic.go:334] "Generic (PLEG): container finished" podID="f4b9badf-c4fb-42f0-9e4e-229e6d19ba09" containerID="5cd8a9ea46f08e9645c0767eadb2e0762e2cc9ecfc79d64ae160a3cc92b68dd0" exitCode=0 Jan 20 17:18:17 crc kubenswrapper[4558]: I0120 17:18:17.060700 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-k26sn" event={"ID":"f4b9badf-c4fb-42f0-9e4e-229e6d19ba09","Type":"ContainerDied","Data":"5cd8a9ea46f08e9645c0767eadb2e0762e2cc9ecfc79d64ae160a3cc92b68dd0"} Jan 20 17:18:18 crc kubenswrapper[4558]: I0120 17:18:18.075208 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-k26sn" event={"ID":"f4b9badf-c4fb-42f0-9e4e-229e6d19ba09","Type":"ContainerStarted","Data":"d6d27a0865b41eef9e004631f8e86859ba3c7c4c9a8df731efc298d9e05d77d6"} Jan 20 17:18:18 crc kubenswrapper[4558]: I0120 17:18:18.096830 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-k26sn" podStartSLOduration=2.561228196 podStartE2EDuration="6.096806066s" podCreationTimestamp="2026-01-20 17:18:12 +0000 UTC" firstStartedPulling="2026-01-20 17:18:14.021900835 +0000 UTC m=+2187.782238803" lastFinishedPulling="2026-01-20 17:18:17.557478705 +0000 UTC m=+2191.317816673" observedRunningTime="2026-01-20 17:18:18.09539307 +0000 UTC m=+2191.855731036" watchObservedRunningTime="2026-01-20 17:18:18.096806066 +0000 UTC m=+2191.857144033" Jan 20 17:18:19 crc kubenswrapper[4558]: I0120 17:18:19.335092 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-hr4ld"] Jan 20 17:18:19 crc kubenswrapper[4558]: I0120 17:18:19.337688 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hr4ld" Jan 20 17:18:19 crc kubenswrapper[4558]: I0120 17:18:19.354411 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-hr4ld"] Jan 20 17:18:19 crc kubenswrapper[4558]: I0120 17:18:19.486703 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ef3b4518-9c22-4046-9053-142864162a50-catalog-content\") pod \"certified-operators-hr4ld\" (UID: \"ef3b4518-9c22-4046-9053-142864162a50\") " pod="openshift-marketplace/certified-operators-hr4ld" Jan 20 17:18:19 crc kubenswrapper[4558]: I0120 17:18:19.487236 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ef3b4518-9c22-4046-9053-142864162a50-utilities\") pod \"certified-operators-hr4ld\" (UID: \"ef3b4518-9c22-4046-9053-142864162a50\") " pod="openshift-marketplace/certified-operators-hr4ld" Jan 20 17:18:19 crc kubenswrapper[4558]: I0120 17:18:19.487264 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qb6k9\" (UniqueName: \"kubernetes.io/projected/ef3b4518-9c22-4046-9053-142864162a50-kube-api-access-qb6k9\") pod \"certified-operators-hr4ld\" (UID: \"ef3b4518-9c22-4046-9053-142864162a50\") " pod="openshift-marketplace/certified-operators-hr4ld" Jan 20 17:18:19 crc kubenswrapper[4558]: I0120 17:18:19.588773 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ef3b4518-9c22-4046-9053-142864162a50-utilities\") pod \"certified-operators-hr4ld\" (UID: \"ef3b4518-9c22-4046-9053-142864162a50\") " pod="openshift-marketplace/certified-operators-hr4ld" Jan 20 17:18:19 crc kubenswrapper[4558]: I0120 17:18:19.588816 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qb6k9\" (UniqueName: \"kubernetes.io/projected/ef3b4518-9c22-4046-9053-142864162a50-kube-api-access-qb6k9\") pod \"certified-operators-hr4ld\" (UID: \"ef3b4518-9c22-4046-9053-142864162a50\") " pod="openshift-marketplace/certified-operators-hr4ld" Jan 20 17:18:19 crc kubenswrapper[4558]: I0120 17:18:19.589381 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ef3b4518-9c22-4046-9053-142864162a50-utilities\") pod \"certified-operators-hr4ld\" (UID: \"ef3b4518-9c22-4046-9053-142864162a50\") " pod="openshift-marketplace/certified-operators-hr4ld" Jan 20 17:18:19 crc kubenswrapper[4558]: I0120 17:18:19.589731 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ef3b4518-9c22-4046-9053-142864162a50-catalog-content\") pod \"certified-operators-hr4ld\" (UID: \"ef3b4518-9c22-4046-9053-142864162a50\") " pod="openshift-marketplace/certified-operators-hr4ld" Jan 20 17:18:19 crc kubenswrapper[4558]: I0120 17:18:19.590057 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ef3b4518-9c22-4046-9053-142864162a50-catalog-content\") pod \"certified-operators-hr4ld\" (UID: \"ef3b4518-9c22-4046-9053-142864162a50\") " pod="openshift-marketplace/certified-operators-hr4ld" Jan 20 17:18:19 crc kubenswrapper[4558]: I0120 17:18:19.610730 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qb6k9\" (UniqueName: \"kubernetes.io/projected/ef3b4518-9c22-4046-9053-142864162a50-kube-api-access-qb6k9\") pod \"certified-operators-hr4ld\" (UID: \"ef3b4518-9c22-4046-9053-142864162a50\") " pod="openshift-marketplace/certified-operators-hr4ld" Jan 20 17:18:19 crc kubenswrapper[4558]: I0120 17:18:19.661121 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hr4ld" Jan 20 17:18:20 crc kubenswrapper[4558]: I0120 17:18:20.131062 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-hr4ld"] Jan 20 17:18:20 crc kubenswrapper[4558]: W0120 17:18:20.133616 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podef3b4518_9c22_4046_9053_142864162a50.slice/crio-9084a81207b12db62b1cc276844cd49b2213c778988bdd2ca9f180a10c54ae34 WatchSource:0}: Error finding container 9084a81207b12db62b1cc276844cd49b2213c778988bdd2ca9f180a10c54ae34: Status 404 returned error can't find the container with id 9084a81207b12db62b1cc276844cd49b2213c778988bdd2ca9f180a10c54ae34 Jan 20 17:18:20 crc kubenswrapper[4558]: I0120 17:18:20.446352 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:18:20 crc kubenswrapper[4558]: I0120 17:18:20.814653 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/cinder-db-create-vdx94"] Jan 20 17:18:20 crc kubenswrapper[4558]: I0120 17:18:20.815925 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-db-create-vdx94" Jan 20 17:18:20 crc kubenswrapper[4558]: I0120 17:18:20.822536 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-db-create-vdx94"] Jan 20 17:18:20 crc kubenswrapper[4558]: I0120 17:18:20.894811 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/barbican-db-create-z7w7v"] Jan 20 17:18:20 crc kubenswrapper[4558]: I0120 17:18:20.896241 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-db-create-z7w7v" Jan 20 17:18:20 crc kubenswrapper[4558]: I0120 17:18:20.899888 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/barbican-d010-account-create-update-tx77w"] Jan 20 17:18:20 crc kubenswrapper[4558]: I0120 17:18:20.901242 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-d010-account-create-update-tx77w" Jan 20 17:18:20 crc kubenswrapper[4558]: I0120 17:18:20.903355 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"barbican-db-secret" Jan 20 17:18:20 crc kubenswrapper[4558]: I0120 17:18:20.905378 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-db-create-z7w7v"] Jan 20 17:18:20 crc kubenswrapper[4558]: I0120 17:18:20.922540 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-btn2d\" (UniqueName: \"kubernetes.io/projected/17c2543e-2ad7-4bfa-a28f-34be13c6b966-kube-api-access-btn2d\") pod \"cinder-db-create-vdx94\" (UID: \"17c2543e-2ad7-4bfa-a28f-34be13c6b966\") " pod="openstack-kuttl-tests/cinder-db-create-vdx94" Jan 20 17:18:20 crc kubenswrapper[4558]: I0120 17:18:20.922679 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/17c2543e-2ad7-4bfa-a28f-34be13c6b966-operator-scripts\") pod \"cinder-db-create-vdx94\" (UID: \"17c2543e-2ad7-4bfa-a28f-34be13c6b966\") " pod="openstack-kuttl-tests/cinder-db-create-vdx94" Jan 20 17:18:20 crc kubenswrapper[4558]: I0120 17:18:20.933251 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-d010-account-create-update-tx77w"] Jan 20 17:18:21 crc kubenswrapper[4558]: I0120 17:18:21.026141 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1ccfacb0-c21a-4956-8192-10a0a41409c7-operator-scripts\") pod \"barbican-db-create-z7w7v\" (UID: \"1ccfacb0-c21a-4956-8192-10a0a41409c7\") " pod="openstack-kuttl-tests/barbican-db-create-z7w7v" Jan 20 17:18:21 crc kubenswrapper[4558]: I0120 17:18:21.026524 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/17c2543e-2ad7-4bfa-a28f-34be13c6b966-operator-scripts\") pod \"cinder-db-create-vdx94\" (UID: \"17c2543e-2ad7-4bfa-a28f-34be13c6b966\") " pod="openstack-kuttl-tests/cinder-db-create-vdx94" Jan 20 17:18:21 crc kubenswrapper[4558]: I0120 17:18:21.026592 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6f96bd13-4e3b-4731-8b13-7325df4bdd25-operator-scripts\") pod \"barbican-d010-account-create-update-tx77w\" (UID: \"6f96bd13-4e3b-4731-8b13-7325df4bdd25\") " pod="openstack-kuttl-tests/barbican-d010-account-create-update-tx77w" Jan 20 17:18:21 crc kubenswrapper[4558]: I0120 17:18:21.026709 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5hjjh\" (UniqueName: \"kubernetes.io/projected/6f96bd13-4e3b-4731-8b13-7325df4bdd25-kube-api-access-5hjjh\") pod \"barbican-d010-account-create-update-tx77w\" (UID: \"6f96bd13-4e3b-4731-8b13-7325df4bdd25\") " pod="openstack-kuttl-tests/barbican-d010-account-create-update-tx77w" Jan 20 17:18:21 crc kubenswrapper[4558]: I0120 17:18:21.026786 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-btn2d\" (UniqueName: \"kubernetes.io/projected/17c2543e-2ad7-4bfa-a28f-34be13c6b966-kube-api-access-btn2d\") pod \"cinder-db-create-vdx94\" (UID: \"17c2543e-2ad7-4bfa-a28f-34be13c6b966\") " pod="openstack-kuttl-tests/cinder-db-create-vdx94" Jan 20 17:18:21 crc kubenswrapper[4558]: I0120 17:18:21.026826 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fb4hd\" (UniqueName: \"kubernetes.io/projected/1ccfacb0-c21a-4956-8192-10a0a41409c7-kube-api-access-fb4hd\") pod \"barbican-db-create-z7w7v\" (UID: \"1ccfacb0-c21a-4956-8192-10a0a41409c7\") " pod="openstack-kuttl-tests/barbican-db-create-z7w7v" Jan 20 17:18:21 crc kubenswrapper[4558]: I0120 17:18:21.027180 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/17c2543e-2ad7-4bfa-a28f-34be13c6b966-operator-scripts\") pod \"cinder-db-create-vdx94\" (UID: \"17c2543e-2ad7-4bfa-a28f-34be13c6b966\") " pod="openstack-kuttl-tests/cinder-db-create-vdx94" Jan 20 17:18:21 crc kubenswrapper[4558]: I0120 17:18:21.041154 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/cinder-1d9a-account-create-update-7hqxp"] Jan 20 17:18:21 crc kubenswrapper[4558]: I0120 17:18:21.042349 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-1d9a-account-create-update-7hqxp" Jan 20 17:18:21 crc kubenswrapper[4558]: I0120 17:18:21.044363 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cinder-db-secret" Jan 20 17:18:21 crc kubenswrapper[4558]: I0120 17:18:21.053074 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-btn2d\" (UniqueName: \"kubernetes.io/projected/17c2543e-2ad7-4bfa-a28f-34be13c6b966-kube-api-access-btn2d\") pod \"cinder-db-create-vdx94\" (UID: \"17c2543e-2ad7-4bfa-a28f-34be13c6b966\") " pod="openstack-kuttl-tests/cinder-db-create-vdx94" Jan 20 17:18:21 crc kubenswrapper[4558]: I0120 17:18:21.059953 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-1d9a-account-create-update-7hqxp"] Jan 20 17:18:21 crc kubenswrapper[4558]: I0120 17:18:21.107681 4558 generic.go:334] "Generic (PLEG): container finished" podID="ef3b4518-9c22-4046-9053-142864162a50" containerID="218e508ad5a5c1a35f4f75204702c38a6c14be8d230cdae137643d62efe67558" exitCode=0 Jan 20 17:18:21 crc kubenswrapper[4558]: I0120 17:18:21.107723 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hr4ld" event={"ID":"ef3b4518-9c22-4046-9053-142864162a50","Type":"ContainerDied","Data":"218e508ad5a5c1a35f4f75204702c38a6c14be8d230cdae137643d62efe67558"} Jan 20 17:18:21 crc kubenswrapper[4558]: I0120 17:18:21.107749 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hr4ld" event={"ID":"ef3b4518-9c22-4046-9053-142864162a50","Type":"ContainerStarted","Data":"9084a81207b12db62b1cc276844cd49b2213c778988bdd2ca9f180a10c54ae34"} Jan 20 17:18:21 crc kubenswrapper[4558]: I0120 17:18:21.109857 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/neutron-db-create-kpclp"] Jan 20 17:18:21 crc kubenswrapper[4558]: I0120 17:18:21.111309 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-db-create-kpclp" Jan 20 17:18:21 crc kubenswrapper[4558]: I0120 17:18:21.121617 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-db-create-kpclp"] Jan 20 17:18:21 crc kubenswrapper[4558]: I0120 17:18:21.128148 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5hjjh\" (UniqueName: \"kubernetes.io/projected/6f96bd13-4e3b-4731-8b13-7325df4bdd25-kube-api-access-5hjjh\") pod \"barbican-d010-account-create-update-tx77w\" (UID: \"6f96bd13-4e3b-4731-8b13-7325df4bdd25\") " pod="openstack-kuttl-tests/barbican-d010-account-create-update-tx77w" Jan 20 17:18:21 crc kubenswrapper[4558]: I0120 17:18:21.128240 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fb4hd\" (UniqueName: \"kubernetes.io/projected/1ccfacb0-c21a-4956-8192-10a0a41409c7-kube-api-access-fb4hd\") pod \"barbican-db-create-z7w7v\" (UID: \"1ccfacb0-c21a-4956-8192-10a0a41409c7\") " pod="openstack-kuttl-tests/barbican-db-create-z7w7v" Jan 20 17:18:21 crc kubenswrapper[4558]: I0120 17:18:21.128271 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1ccfacb0-c21a-4956-8192-10a0a41409c7-operator-scripts\") pod \"barbican-db-create-z7w7v\" (UID: \"1ccfacb0-c21a-4956-8192-10a0a41409c7\") " pod="openstack-kuttl-tests/barbican-db-create-z7w7v" Jan 20 17:18:21 crc kubenswrapper[4558]: I0120 17:18:21.128351 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6f96bd13-4e3b-4731-8b13-7325df4bdd25-operator-scripts\") pod \"barbican-d010-account-create-update-tx77w\" (UID: \"6f96bd13-4e3b-4731-8b13-7325df4bdd25\") " pod="openstack-kuttl-tests/barbican-d010-account-create-update-tx77w" Jan 20 17:18:21 crc kubenswrapper[4558]: I0120 17:18:21.129002 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6f96bd13-4e3b-4731-8b13-7325df4bdd25-operator-scripts\") pod \"barbican-d010-account-create-update-tx77w\" (UID: \"6f96bd13-4e3b-4731-8b13-7325df4bdd25\") " pod="openstack-kuttl-tests/barbican-d010-account-create-update-tx77w" Jan 20 17:18:21 crc kubenswrapper[4558]: I0120 17:18:21.129775 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1ccfacb0-c21a-4956-8192-10a0a41409c7-operator-scripts\") pod \"barbican-db-create-z7w7v\" (UID: \"1ccfacb0-c21a-4956-8192-10a0a41409c7\") " pod="openstack-kuttl-tests/barbican-db-create-z7w7v" Jan 20 17:18:21 crc kubenswrapper[4558]: I0120 17:18:21.131619 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-db-create-vdx94" Jan 20 17:18:21 crc kubenswrapper[4558]: I0120 17:18:21.154333 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fb4hd\" (UniqueName: \"kubernetes.io/projected/1ccfacb0-c21a-4956-8192-10a0a41409c7-kube-api-access-fb4hd\") pod \"barbican-db-create-z7w7v\" (UID: \"1ccfacb0-c21a-4956-8192-10a0a41409c7\") " pod="openstack-kuttl-tests/barbican-db-create-z7w7v" Jan 20 17:18:21 crc kubenswrapper[4558]: I0120 17:18:21.155398 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5hjjh\" (UniqueName: \"kubernetes.io/projected/6f96bd13-4e3b-4731-8b13-7325df4bdd25-kube-api-access-5hjjh\") pod \"barbican-d010-account-create-update-tx77w\" (UID: \"6f96bd13-4e3b-4731-8b13-7325df4bdd25\") " pod="openstack-kuttl-tests/barbican-d010-account-create-update-tx77w" Jan 20 17:18:21 crc kubenswrapper[4558]: I0120 17:18:21.190296 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/keystone-db-sync-qq5nx"] Jan 20 17:18:21 crc kubenswrapper[4558]: I0120 17:18:21.191875 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-db-sync-qq5nx" Jan 20 17:18:21 crc kubenswrapper[4558]: I0120 17:18:21.196593 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone" Jan 20 17:18:21 crc kubenswrapper[4558]: I0120 17:18:21.196859 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-keystone-dockercfg-n8z97" Jan 20 17:18:21 crc kubenswrapper[4558]: I0120 17:18:21.196980 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-scripts" Jan 20 17:18:21 crc kubenswrapper[4558]: I0120 17:18:21.197106 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-config-data" Jan 20 17:18:21 crc kubenswrapper[4558]: I0120 17:18:21.202231 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-db-sync-qq5nx"] Jan 20 17:18:21 crc kubenswrapper[4558]: I0120 17:18:21.215731 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-db-create-z7w7v" Jan 20 17:18:21 crc kubenswrapper[4558]: I0120 17:18:21.229376 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e2328b3a-4982-4596-a11d-afb6ba3a4915-operator-scripts\") pod \"cinder-1d9a-account-create-update-7hqxp\" (UID: \"e2328b3a-4982-4596-a11d-afb6ba3a4915\") " pod="openstack-kuttl-tests/cinder-1d9a-account-create-update-7hqxp" Jan 20 17:18:21 crc kubenswrapper[4558]: I0120 17:18:21.229528 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5938c698-384b-4c5c-9c62-10f2a8a0ad1a-operator-scripts\") pod \"neutron-db-create-kpclp\" (UID: \"5938c698-384b-4c5c-9c62-10f2a8a0ad1a\") " pod="openstack-kuttl-tests/neutron-db-create-kpclp" Jan 20 17:18:21 crc kubenswrapper[4558]: I0120 17:18:21.229588 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j22sm\" (UniqueName: \"kubernetes.io/projected/e2328b3a-4982-4596-a11d-afb6ba3a4915-kube-api-access-j22sm\") pod \"cinder-1d9a-account-create-update-7hqxp\" (UID: \"e2328b3a-4982-4596-a11d-afb6ba3a4915\") " pod="openstack-kuttl-tests/cinder-1d9a-account-create-update-7hqxp" Jan 20 17:18:21 crc kubenswrapper[4558]: I0120 17:18:21.229609 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-frtd7\" (UniqueName: \"kubernetes.io/projected/5938c698-384b-4c5c-9c62-10f2a8a0ad1a-kube-api-access-frtd7\") pod \"neutron-db-create-kpclp\" (UID: \"5938c698-384b-4c5c-9c62-10f2a8a0ad1a\") " pod="openstack-kuttl-tests/neutron-db-create-kpclp" Jan 20 17:18:21 crc kubenswrapper[4558]: I0120 17:18:21.231059 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-d010-account-create-update-tx77w" Jan 20 17:18:21 crc kubenswrapper[4558]: I0120 17:18:21.306621 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/neutron-247a-account-create-update-x7dvx"] Jan 20 17:18:21 crc kubenswrapper[4558]: I0120 17:18:21.308190 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-247a-account-create-update-x7dvx" Jan 20 17:18:21 crc kubenswrapper[4558]: I0120 17:18:21.311978 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"neutron-db-secret" Jan 20 17:18:21 crc kubenswrapper[4558]: I0120 17:18:21.329233 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-247a-account-create-update-x7dvx"] Jan 20 17:18:21 crc kubenswrapper[4558]: I0120 17:18:21.331802 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5938c698-384b-4c5c-9c62-10f2a8a0ad1a-operator-scripts\") pod \"neutron-db-create-kpclp\" (UID: \"5938c698-384b-4c5c-9c62-10f2a8a0ad1a\") " pod="openstack-kuttl-tests/neutron-db-create-kpclp" Jan 20 17:18:21 crc kubenswrapper[4558]: I0120 17:18:21.331856 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jgdcv\" (UniqueName: \"kubernetes.io/projected/346ea378-eb04-403c-8c0f-0f8c3d2debe8-kube-api-access-jgdcv\") pod \"keystone-db-sync-qq5nx\" (UID: \"346ea378-eb04-403c-8c0f-0f8c3d2debe8\") " pod="openstack-kuttl-tests/keystone-db-sync-qq5nx" Jan 20 17:18:21 crc kubenswrapper[4558]: I0120 17:18:21.331909 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j22sm\" (UniqueName: \"kubernetes.io/projected/e2328b3a-4982-4596-a11d-afb6ba3a4915-kube-api-access-j22sm\") pod \"cinder-1d9a-account-create-update-7hqxp\" (UID: \"e2328b3a-4982-4596-a11d-afb6ba3a4915\") " pod="openstack-kuttl-tests/cinder-1d9a-account-create-update-7hqxp" Jan 20 17:18:21 crc kubenswrapper[4558]: I0120 17:18:21.331927 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-frtd7\" (UniqueName: \"kubernetes.io/projected/5938c698-384b-4c5c-9c62-10f2a8a0ad1a-kube-api-access-frtd7\") pod \"neutron-db-create-kpclp\" (UID: \"5938c698-384b-4c5c-9c62-10f2a8a0ad1a\") " pod="openstack-kuttl-tests/neutron-db-create-kpclp" Jan 20 17:18:21 crc kubenswrapper[4558]: I0120 17:18:21.331975 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/346ea378-eb04-403c-8c0f-0f8c3d2debe8-config-data\") pod \"keystone-db-sync-qq5nx\" (UID: \"346ea378-eb04-403c-8c0f-0f8c3d2debe8\") " pod="openstack-kuttl-tests/keystone-db-sync-qq5nx" Jan 20 17:18:21 crc kubenswrapper[4558]: I0120 17:18:21.332012 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/346ea378-eb04-403c-8c0f-0f8c3d2debe8-combined-ca-bundle\") pod \"keystone-db-sync-qq5nx\" (UID: \"346ea378-eb04-403c-8c0f-0f8c3d2debe8\") " pod="openstack-kuttl-tests/keystone-db-sync-qq5nx" Jan 20 17:18:21 crc kubenswrapper[4558]: I0120 17:18:21.332053 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e2328b3a-4982-4596-a11d-afb6ba3a4915-operator-scripts\") pod \"cinder-1d9a-account-create-update-7hqxp\" (UID: \"e2328b3a-4982-4596-a11d-afb6ba3a4915\") " pod="openstack-kuttl-tests/cinder-1d9a-account-create-update-7hqxp" Jan 20 17:18:21 crc kubenswrapper[4558]: I0120 17:18:21.332823 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e2328b3a-4982-4596-a11d-afb6ba3a4915-operator-scripts\") pod \"cinder-1d9a-account-create-update-7hqxp\" (UID: \"e2328b3a-4982-4596-a11d-afb6ba3a4915\") " pod="openstack-kuttl-tests/cinder-1d9a-account-create-update-7hqxp" Jan 20 17:18:21 crc kubenswrapper[4558]: I0120 17:18:21.332913 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5938c698-384b-4c5c-9c62-10f2a8a0ad1a-operator-scripts\") pod \"neutron-db-create-kpclp\" (UID: \"5938c698-384b-4c5c-9c62-10f2a8a0ad1a\") " pod="openstack-kuttl-tests/neutron-db-create-kpclp" Jan 20 17:18:21 crc kubenswrapper[4558]: I0120 17:18:21.348676 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-frtd7\" (UniqueName: \"kubernetes.io/projected/5938c698-384b-4c5c-9c62-10f2a8a0ad1a-kube-api-access-frtd7\") pod \"neutron-db-create-kpclp\" (UID: \"5938c698-384b-4c5c-9c62-10f2a8a0ad1a\") " pod="openstack-kuttl-tests/neutron-db-create-kpclp" Jan 20 17:18:21 crc kubenswrapper[4558]: I0120 17:18:21.349209 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j22sm\" (UniqueName: \"kubernetes.io/projected/e2328b3a-4982-4596-a11d-afb6ba3a4915-kube-api-access-j22sm\") pod \"cinder-1d9a-account-create-update-7hqxp\" (UID: \"e2328b3a-4982-4596-a11d-afb6ba3a4915\") " pod="openstack-kuttl-tests/cinder-1d9a-account-create-update-7hqxp" Jan 20 17:18:21 crc kubenswrapper[4558]: I0120 17:18:21.402685 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-1d9a-account-create-update-7hqxp" Jan 20 17:18:21 crc kubenswrapper[4558]: I0120 17:18:21.422353 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-db-create-kpclp" Jan 20 17:18:21 crc kubenswrapper[4558]: I0120 17:18:21.434372 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/346ea378-eb04-403c-8c0f-0f8c3d2debe8-combined-ca-bundle\") pod \"keystone-db-sync-qq5nx\" (UID: \"346ea378-eb04-403c-8c0f-0f8c3d2debe8\") " pod="openstack-kuttl-tests/keystone-db-sync-qq5nx" Jan 20 17:18:21 crc kubenswrapper[4558]: I0120 17:18:21.434508 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jgdcv\" (UniqueName: \"kubernetes.io/projected/346ea378-eb04-403c-8c0f-0f8c3d2debe8-kube-api-access-jgdcv\") pod \"keystone-db-sync-qq5nx\" (UID: \"346ea378-eb04-403c-8c0f-0f8c3d2debe8\") " pod="openstack-kuttl-tests/keystone-db-sync-qq5nx" Jan 20 17:18:21 crc kubenswrapper[4558]: I0120 17:18:21.434575 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/56e762d6-78f3-4e7e-8b7b-2b8057aedbdf-operator-scripts\") pod \"neutron-247a-account-create-update-x7dvx\" (UID: \"56e762d6-78f3-4e7e-8b7b-2b8057aedbdf\") " pod="openstack-kuttl-tests/neutron-247a-account-create-update-x7dvx" Jan 20 17:18:21 crc kubenswrapper[4558]: I0120 17:18:21.434609 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hkhv7\" (UniqueName: \"kubernetes.io/projected/56e762d6-78f3-4e7e-8b7b-2b8057aedbdf-kube-api-access-hkhv7\") pod \"neutron-247a-account-create-update-x7dvx\" (UID: \"56e762d6-78f3-4e7e-8b7b-2b8057aedbdf\") " pod="openstack-kuttl-tests/neutron-247a-account-create-update-x7dvx" Jan 20 17:18:21 crc kubenswrapper[4558]: I0120 17:18:21.434742 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/346ea378-eb04-403c-8c0f-0f8c3d2debe8-config-data\") pod \"keystone-db-sync-qq5nx\" (UID: \"346ea378-eb04-403c-8c0f-0f8c3d2debe8\") " pod="openstack-kuttl-tests/keystone-db-sync-qq5nx" Jan 20 17:18:21 crc kubenswrapper[4558]: I0120 17:18:21.441973 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/346ea378-eb04-403c-8c0f-0f8c3d2debe8-combined-ca-bundle\") pod \"keystone-db-sync-qq5nx\" (UID: \"346ea378-eb04-403c-8c0f-0f8c3d2debe8\") " pod="openstack-kuttl-tests/keystone-db-sync-qq5nx" Jan 20 17:18:21 crc kubenswrapper[4558]: I0120 17:18:21.443431 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/346ea378-eb04-403c-8c0f-0f8c3d2debe8-config-data\") pod \"keystone-db-sync-qq5nx\" (UID: \"346ea378-eb04-403c-8c0f-0f8c3d2debe8\") " pod="openstack-kuttl-tests/keystone-db-sync-qq5nx" Jan 20 17:18:21 crc kubenswrapper[4558]: I0120 17:18:21.458927 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jgdcv\" (UniqueName: \"kubernetes.io/projected/346ea378-eb04-403c-8c0f-0f8c3d2debe8-kube-api-access-jgdcv\") pod \"keystone-db-sync-qq5nx\" (UID: \"346ea378-eb04-403c-8c0f-0f8c3d2debe8\") " pod="openstack-kuttl-tests/keystone-db-sync-qq5nx" Jan 20 17:18:21 crc kubenswrapper[4558]: I0120 17:18:21.536740 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/56e762d6-78f3-4e7e-8b7b-2b8057aedbdf-operator-scripts\") pod \"neutron-247a-account-create-update-x7dvx\" (UID: \"56e762d6-78f3-4e7e-8b7b-2b8057aedbdf\") " pod="openstack-kuttl-tests/neutron-247a-account-create-update-x7dvx" Jan 20 17:18:21 crc kubenswrapper[4558]: I0120 17:18:21.536796 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hkhv7\" (UniqueName: \"kubernetes.io/projected/56e762d6-78f3-4e7e-8b7b-2b8057aedbdf-kube-api-access-hkhv7\") pod \"neutron-247a-account-create-update-x7dvx\" (UID: \"56e762d6-78f3-4e7e-8b7b-2b8057aedbdf\") " pod="openstack-kuttl-tests/neutron-247a-account-create-update-x7dvx" Jan 20 17:18:21 crc kubenswrapper[4558]: I0120 17:18:21.537397 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/56e762d6-78f3-4e7e-8b7b-2b8057aedbdf-operator-scripts\") pod \"neutron-247a-account-create-update-x7dvx\" (UID: \"56e762d6-78f3-4e7e-8b7b-2b8057aedbdf\") " pod="openstack-kuttl-tests/neutron-247a-account-create-update-x7dvx" Jan 20 17:18:21 crc kubenswrapper[4558]: I0120 17:18:21.540121 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-db-sync-qq5nx" Jan 20 17:18:21 crc kubenswrapper[4558]: I0120 17:18:21.554399 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hkhv7\" (UniqueName: \"kubernetes.io/projected/56e762d6-78f3-4e7e-8b7b-2b8057aedbdf-kube-api-access-hkhv7\") pod \"neutron-247a-account-create-update-x7dvx\" (UID: \"56e762d6-78f3-4e7e-8b7b-2b8057aedbdf\") " pod="openstack-kuttl-tests/neutron-247a-account-create-update-x7dvx" Jan 20 17:18:21 crc kubenswrapper[4558]: I0120 17:18:21.637654 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-247a-account-create-update-x7dvx" Jan 20 17:18:21 crc kubenswrapper[4558]: I0120 17:18:21.667931 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-1d9a-account-create-update-7hqxp"] Jan 20 17:18:21 crc kubenswrapper[4558]: I0120 17:18:21.709477 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-db-create-vdx94"] Jan 20 17:18:21 crc kubenswrapper[4558]: W0120 17:18:21.761676 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5938c698_384b_4c5c_9c62_10f2a8a0ad1a.slice/crio-3285b49d2fc2be6676607e20b7f9c0ae884e5b15380c28a3784a94b4070b183d WatchSource:0}: Error finding container 3285b49d2fc2be6676607e20b7f9c0ae884e5b15380c28a3784a94b4070b183d: Status 404 returned error can't find the container with id 3285b49d2fc2be6676607e20b7f9c0ae884e5b15380c28a3784a94b4070b183d Jan 20 17:18:21 crc kubenswrapper[4558]: I0120 17:18:21.778426 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-db-create-kpclp"] Jan 20 17:18:21 crc kubenswrapper[4558]: I0120 17:18:21.814626 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-db-create-z7w7v"] Jan 20 17:18:21 crc kubenswrapper[4558]: I0120 17:18:21.860933 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-d010-account-create-update-tx77w"] Jan 20 17:18:21 crc kubenswrapper[4558]: I0120 17:18:21.916743 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-db-sync-qq5nx"] Jan 20 17:18:22 crc kubenswrapper[4558]: I0120 17:18:22.122215 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-db-create-kpclp" event={"ID":"5938c698-384b-4c5c-9c62-10f2a8a0ad1a","Type":"ContainerStarted","Data":"3285b49d2fc2be6676607e20b7f9c0ae884e5b15380c28a3784a94b4070b183d"} Jan 20 17:18:22 crc kubenswrapper[4558]: I0120 17:18:22.124391 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-db-sync-qq5nx" event={"ID":"346ea378-eb04-403c-8c0f-0f8c3d2debe8","Type":"ContainerStarted","Data":"cebe7559cdc4399a6bd722ee51d9ae4b6186c12fb0b6db355f023e0e862dc25e"} Jan 20 17:18:22 crc kubenswrapper[4558]: I0120 17:18:22.129214 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-d010-account-create-update-tx77w" event={"ID":"6f96bd13-4e3b-4731-8b13-7325df4bdd25","Type":"ContainerStarted","Data":"c1e56c44de227b063892274f63dfb3f2dcc42e68e94bfc501f0b6d001bef25c7"} Jan 20 17:18:22 crc kubenswrapper[4558]: I0120 17:18:22.133606 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-db-create-z7w7v" event={"ID":"1ccfacb0-c21a-4956-8192-10a0a41409c7","Type":"ContainerStarted","Data":"62a87df6abc2a0a5d92f04c331690c365da4a4a0efdcf2880ff5b28f5d117ebb"} Jan 20 17:18:22 crc kubenswrapper[4558]: I0120 17:18:22.135347 4558 generic.go:334] "Generic (PLEG): container finished" podID="17c2543e-2ad7-4bfa-a28f-34be13c6b966" containerID="4c46659a6361af98b90de1cf221853c404529cfbd89263f8d6864cc71db115a3" exitCode=0 Jan 20 17:18:22 crc kubenswrapper[4558]: I0120 17:18:22.135472 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-db-create-vdx94" event={"ID":"17c2543e-2ad7-4bfa-a28f-34be13c6b966","Type":"ContainerDied","Data":"4c46659a6361af98b90de1cf221853c404529cfbd89263f8d6864cc71db115a3"} Jan 20 17:18:22 crc kubenswrapper[4558]: I0120 17:18:22.135514 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-db-create-vdx94" event={"ID":"17c2543e-2ad7-4bfa-a28f-34be13c6b966","Type":"ContainerStarted","Data":"75a222d5ada0cf7b713da44a1071d7daf79cf07f72c825b946a630bedb21fd46"} Jan 20 17:18:22 crc kubenswrapper[4558]: I0120 17:18:22.137443 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-1d9a-account-create-update-7hqxp" event={"ID":"e2328b3a-4982-4596-a11d-afb6ba3a4915","Type":"ContainerStarted","Data":"a827e0be6d243551517e150283f3ed864ecaf4a9a6438dd376a0e18717e47a4e"} Jan 20 17:18:22 crc kubenswrapper[4558]: I0120 17:18:22.137472 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-1d9a-account-create-update-7hqxp" event={"ID":"e2328b3a-4982-4596-a11d-afb6ba3a4915","Type":"ContainerStarted","Data":"5882571b775dcf410299b98f8f71c63d52c7dc5e4bb0d2a122bd60cddbeb48f9"} Jan 20 17:18:22 crc kubenswrapper[4558]: I0120 17:18:22.146639 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/barbican-d010-account-create-update-tx77w" podStartSLOduration=2.146624361 podStartE2EDuration="2.146624361s" podCreationTimestamp="2026-01-20 17:18:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:18:22.142718697 +0000 UTC m=+2195.903056664" watchObservedRunningTime="2026-01-20 17:18:22.146624361 +0000 UTC m=+2195.906962449" Jan 20 17:18:22 crc kubenswrapper[4558]: I0120 17:18:22.159653 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/cinder-1d9a-account-create-update-7hqxp" podStartSLOduration=1.159640469 podStartE2EDuration="1.159640469s" podCreationTimestamp="2026-01-20 17:18:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:18:22.158442045 +0000 UTC m=+2195.918780012" watchObservedRunningTime="2026-01-20 17:18:22.159640469 +0000 UTC m=+2195.919978436" Jan 20 17:18:22 crc kubenswrapper[4558]: I0120 17:18:22.177388 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/barbican-db-create-z7w7v" podStartSLOduration=2.177376371 podStartE2EDuration="2.177376371s" podCreationTimestamp="2026-01-20 17:18:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:18:22.171471698 +0000 UTC m=+2195.931809664" watchObservedRunningTime="2026-01-20 17:18:22.177376371 +0000 UTC m=+2195.937714339" Jan 20 17:18:22 crc kubenswrapper[4558]: I0120 17:18:22.215464 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-247a-account-create-update-x7dvx"] Jan 20 17:18:23 crc kubenswrapper[4558]: I0120 17:18:23.150054 4558 generic.go:334] "Generic (PLEG): container finished" podID="e2328b3a-4982-4596-a11d-afb6ba3a4915" containerID="a827e0be6d243551517e150283f3ed864ecaf4a9a6438dd376a0e18717e47a4e" exitCode=0 Jan 20 17:18:23 crc kubenswrapper[4558]: I0120 17:18:23.150141 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-1d9a-account-create-update-7hqxp" event={"ID":"e2328b3a-4982-4596-a11d-afb6ba3a4915","Type":"ContainerDied","Data":"a827e0be6d243551517e150283f3ed864ecaf4a9a6438dd376a0e18717e47a4e"} Jan 20 17:18:23 crc kubenswrapper[4558]: I0120 17:18:23.153081 4558 generic.go:334] "Generic (PLEG): container finished" podID="5938c698-384b-4c5c-9c62-10f2a8a0ad1a" containerID="b0f6f424cc892d0b7647f8fab2e8274ac9636a2d5b6f30acee2ded1f86c9766c" exitCode=0 Jan 20 17:18:23 crc kubenswrapper[4558]: I0120 17:18:23.153181 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-db-create-kpclp" event={"ID":"5938c698-384b-4c5c-9c62-10f2a8a0ad1a","Type":"ContainerDied","Data":"b0f6f424cc892d0b7647f8fab2e8274ac9636a2d5b6f30acee2ded1f86c9766c"} Jan 20 17:18:23 crc kubenswrapper[4558]: I0120 17:18:23.155087 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-db-sync-qq5nx" event={"ID":"346ea378-eb04-403c-8c0f-0f8c3d2debe8","Type":"ContainerStarted","Data":"60492583629e26c3c32820e427ab609fadb431fe51ce67c01898e07bd052f7d1"} Jan 20 17:18:23 crc kubenswrapper[4558]: I0120 17:18:23.156416 4558 generic.go:334] "Generic (PLEG): container finished" podID="56e762d6-78f3-4e7e-8b7b-2b8057aedbdf" containerID="b5130f999056cd5a292684679ca6cc42bd24f28a031e1d30f86981b402f386cf" exitCode=0 Jan 20 17:18:23 crc kubenswrapper[4558]: I0120 17:18:23.156488 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-247a-account-create-update-x7dvx" event={"ID":"56e762d6-78f3-4e7e-8b7b-2b8057aedbdf","Type":"ContainerDied","Data":"b5130f999056cd5a292684679ca6cc42bd24f28a031e1d30f86981b402f386cf"} Jan 20 17:18:23 crc kubenswrapper[4558]: I0120 17:18:23.156531 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-247a-account-create-update-x7dvx" event={"ID":"56e762d6-78f3-4e7e-8b7b-2b8057aedbdf","Type":"ContainerStarted","Data":"aeb4aa630e0b3c3d3e1b4934bb2316973b3e3f0aacc02c7d224ba4fd68de0718"} Jan 20 17:18:23 crc kubenswrapper[4558]: I0120 17:18:23.158286 4558 generic.go:334] "Generic (PLEG): container finished" podID="6f96bd13-4e3b-4731-8b13-7325df4bdd25" containerID="9b285fb9c7ca41e6c11eae827e056e3101ae0ca582d9386a36e9e2ea6091713a" exitCode=0 Jan 20 17:18:23 crc kubenswrapper[4558]: I0120 17:18:23.158330 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-d010-account-create-update-tx77w" event={"ID":"6f96bd13-4e3b-4731-8b13-7325df4bdd25","Type":"ContainerDied","Data":"9b285fb9c7ca41e6c11eae827e056e3101ae0ca582d9386a36e9e2ea6091713a"} Jan 20 17:18:23 crc kubenswrapper[4558]: I0120 17:18:23.162088 4558 generic.go:334] "Generic (PLEG): container finished" podID="ef3b4518-9c22-4046-9053-142864162a50" containerID="de632d948732dd5a4e3d90e35e90a8df58b9bbb6f52788fd2e8da246880843ad" exitCode=0 Jan 20 17:18:23 crc kubenswrapper[4558]: I0120 17:18:23.162155 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hr4ld" event={"ID":"ef3b4518-9c22-4046-9053-142864162a50","Type":"ContainerDied","Data":"de632d948732dd5a4e3d90e35e90a8df58b9bbb6f52788fd2e8da246880843ad"} Jan 20 17:18:23 crc kubenswrapper[4558]: I0120 17:18:23.164095 4558 generic.go:334] "Generic (PLEG): container finished" podID="1ccfacb0-c21a-4956-8192-10a0a41409c7" containerID="f4436c3df66467d4db956a7e111480f4d349ef8b096e62dc047b678d6378ae35" exitCode=0 Jan 20 17:18:23 crc kubenswrapper[4558]: I0120 17:18:23.164215 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-db-create-z7w7v" event={"ID":"1ccfacb0-c21a-4956-8192-10a0a41409c7","Type":"ContainerDied","Data":"f4436c3df66467d4db956a7e111480f4d349ef8b096e62dc047b678d6378ae35"} Jan 20 17:18:23 crc kubenswrapper[4558]: I0120 17:18:23.229601 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/keystone-db-sync-qq5nx" podStartSLOduration=2.229580752 podStartE2EDuration="2.229580752s" podCreationTimestamp="2026-01-20 17:18:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:18:23.22303309 +0000 UTC m=+2196.983371056" watchObservedRunningTime="2026-01-20 17:18:23.229580752 +0000 UTC m=+2196.989918719" Jan 20 17:18:23 crc kubenswrapper[4558]: I0120 17:18:23.261206 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-k26sn" Jan 20 17:18:23 crc kubenswrapper[4558]: I0120 17:18:23.261268 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-k26sn" Jan 20 17:18:23 crc kubenswrapper[4558]: I0120 17:18:23.312321 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-k26sn" Jan 20 17:18:23 crc kubenswrapper[4558]: I0120 17:18:23.489079 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-db-create-vdx94" Jan 20 17:18:23 crc kubenswrapper[4558]: I0120 17:18:23.578596 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-btn2d\" (UniqueName: \"kubernetes.io/projected/17c2543e-2ad7-4bfa-a28f-34be13c6b966-kube-api-access-btn2d\") pod \"17c2543e-2ad7-4bfa-a28f-34be13c6b966\" (UID: \"17c2543e-2ad7-4bfa-a28f-34be13c6b966\") " Jan 20 17:18:23 crc kubenswrapper[4558]: I0120 17:18:23.578979 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/17c2543e-2ad7-4bfa-a28f-34be13c6b966-operator-scripts\") pod \"17c2543e-2ad7-4bfa-a28f-34be13c6b966\" (UID: \"17c2543e-2ad7-4bfa-a28f-34be13c6b966\") " Jan 20 17:18:23 crc kubenswrapper[4558]: I0120 17:18:23.579415 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/17c2543e-2ad7-4bfa-a28f-34be13c6b966-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "17c2543e-2ad7-4bfa-a28f-34be13c6b966" (UID: "17c2543e-2ad7-4bfa-a28f-34be13c6b966"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:18:23 crc kubenswrapper[4558]: I0120 17:18:23.584235 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/17c2543e-2ad7-4bfa-a28f-34be13c6b966-kube-api-access-btn2d" (OuterVolumeSpecName: "kube-api-access-btn2d") pod "17c2543e-2ad7-4bfa-a28f-34be13c6b966" (UID: "17c2543e-2ad7-4bfa-a28f-34be13c6b966"). InnerVolumeSpecName "kube-api-access-btn2d". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:18:23 crc kubenswrapper[4558]: I0120 17:18:23.682601 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-btn2d\" (UniqueName: \"kubernetes.io/projected/17c2543e-2ad7-4bfa-a28f-34be13c6b966-kube-api-access-btn2d\") on node \"crc\" DevicePath \"\"" Jan 20 17:18:23 crc kubenswrapper[4558]: I0120 17:18:23.682627 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/17c2543e-2ad7-4bfa-a28f-34be13c6b966-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:18:24 crc kubenswrapper[4558]: I0120 17:18:24.174479 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-db-create-vdx94" event={"ID":"17c2543e-2ad7-4bfa-a28f-34be13c6b966","Type":"ContainerDied","Data":"75a222d5ada0cf7b713da44a1071d7daf79cf07f72c825b946a630bedb21fd46"} Jan 20 17:18:24 crc kubenswrapper[4558]: I0120 17:18:24.174771 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="75a222d5ada0cf7b713da44a1071d7daf79cf07f72c825b946a630bedb21fd46" Jan 20 17:18:24 crc kubenswrapper[4558]: I0120 17:18:24.174508 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-db-create-vdx94" Jan 20 17:18:24 crc kubenswrapper[4558]: I0120 17:18:24.177982 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hr4ld" event={"ID":"ef3b4518-9c22-4046-9053-142864162a50","Type":"ContainerStarted","Data":"68c705646962bc729eec8442fdff5e74884e6b5af688d6aaac7aedd41c048450"} Jan 20 17:18:24 crc kubenswrapper[4558]: I0120 17:18:24.209331 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-hr4ld" podStartSLOduration=2.544404199 podStartE2EDuration="5.20931919s" podCreationTimestamp="2026-01-20 17:18:19 +0000 UTC" firstStartedPulling="2026-01-20 17:18:21.110434806 +0000 UTC m=+2194.870772773" lastFinishedPulling="2026-01-20 17:18:23.775349787 +0000 UTC m=+2197.535687764" observedRunningTime="2026-01-20 17:18:24.204382187 +0000 UTC m=+2197.964720154" watchObservedRunningTime="2026-01-20 17:18:24.20931919 +0000 UTC m=+2197.969657158" Jan 20 17:18:24 crc kubenswrapper[4558]: I0120 17:18:24.217552 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-k26sn" Jan 20 17:18:24 crc kubenswrapper[4558]: I0120 17:18:24.474224 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-1d9a-account-create-update-7hqxp" Jan 20 17:18:24 crc kubenswrapper[4558]: I0120 17:18:24.610186 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-247a-account-create-update-x7dvx" Jan 20 17:18:24 crc kubenswrapper[4558]: I0120 17:18:24.613906 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e2328b3a-4982-4596-a11d-afb6ba3a4915-operator-scripts\") pod \"e2328b3a-4982-4596-a11d-afb6ba3a4915\" (UID: \"e2328b3a-4982-4596-a11d-afb6ba3a4915\") " Jan 20 17:18:24 crc kubenswrapper[4558]: I0120 17:18:24.614186 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j22sm\" (UniqueName: \"kubernetes.io/projected/e2328b3a-4982-4596-a11d-afb6ba3a4915-kube-api-access-j22sm\") pod \"e2328b3a-4982-4596-a11d-afb6ba3a4915\" (UID: \"e2328b3a-4982-4596-a11d-afb6ba3a4915\") " Jan 20 17:18:24 crc kubenswrapper[4558]: I0120 17:18:24.615401 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e2328b3a-4982-4596-a11d-afb6ba3a4915-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "e2328b3a-4982-4596-a11d-afb6ba3a4915" (UID: "e2328b3a-4982-4596-a11d-afb6ba3a4915"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:18:24 crc kubenswrapper[4558]: I0120 17:18:24.624283 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e2328b3a-4982-4596-a11d-afb6ba3a4915-kube-api-access-j22sm" (OuterVolumeSpecName: "kube-api-access-j22sm") pod "e2328b3a-4982-4596-a11d-afb6ba3a4915" (UID: "e2328b3a-4982-4596-a11d-afb6ba3a4915"). InnerVolumeSpecName "kube-api-access-j22sm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:18:24 crc kubenswrapper[4558]: I0120 17:18:24.721066 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/56e762d6-78f3-4e7e-8b7b-2b8057aedbdf-operator-scripts\") pod \"56e762d6-78f3-4e7e-8b7b-2b8057aedbdf\" (UID: \"56e762d6-78f3-4e7e-8b7b-2b8057aedbdf\") " Jan 20 17:18:24 crc kubenswrapper[4558]: I0120 17:18:24.721118 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hkhv7\" (UniqueName: \"kubernetes.io/projected/56e762d6-78f3-4e7e-8b7b-2b8057aedbdf-kube-api-access-hkhv7\") pod \"56e762d6-78f3-4e7e-8b7b-2b8057aedbdf\" (UID: \"56e762d6-78f3-4e7e-8b7b-2b8057aedbdf\") " Jan 20 17:18:24 crc kubenswrapper[4558]: I0120 17:18:24.721620 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/56e762d6-78f3-4e7e-8b7b-2b8057aedbdf-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "56e762d6-78f3-4e7e-8b7b-2b8057aedbdf" (UID: "56e762d6-78f3-4e7e-8b7b-2b8057aedbdf"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:18:24 crc kubenswrapper[4558]: I0120 17:18:24.722456 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j22sm\" (UniqueName: \"kubernetes.io/projected/e2328b3a-4982-4596-a11d-afb6ba3a4915-kube-api-access-j22sm\") on node \"crc\" DevicePath \"\"" Jan 20 17:18:24 crc kubenswrapper[4558]: I0120 17:18:24.722474 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e2328b3a-4982-4596-a11d-afb6ba3a4915-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:18:24 crc kubenswrapper[4558]: I0120 17:18:24.722483 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/56e762d6-78f3-4e7e-8b7b-2b8057aedbdf-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:18:24 crc kubenswrapper[4558]: I0120 17:18:24.726313 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/56e762d6-78f3-4e7e-8b7b-2b8057aedbdf-kube-api-access-hkhv7" (OuterVolumeSpecName: "kube-api-access-hkhv7") pod "56e762d6-78f3-4e7e-8b7b-2b8057aedbdf" (UID: "56e762d6-78f3-4e7e-8b7b-2b8057aedbdf"). InnerVolumeSpecName "kube-api-access-hkhv7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:18:24 crc kubenswrapper[4558]: I0120 17:18:24.738880 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-d010-account-create-update-tx77w" Jan 20 17:18:24 crc kubenswrapper[4558]: I0120 17:18:24.752755 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-k26sn"] Jan 20 17:18:24 crc kubenswrapper[4558]: I0120 17:18:24.762420 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-db-create-kpclp" Jan 20 17:18:24 crc kubenswrapper[4558]: I0120 17:18:24.766084 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-db-create-z7w7v" Jan 20 17:18:24 crc kubenswrapper[4558]: I0120 17:18:24.824084 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5hjjh\" (UniqueName: \"kubernetes.io/projected/6f96bd13-4e3b-4731-8b13-7325df4bdd25-kube-api-access-5hjjh\") pod \"6f96bd13-4e3b-4731-8b13-7325df4bdd25\" (UID: \"6f96bd13-4e3b-4731-8b13-7325df4bdd25\") " Jan 20 17:18:24 crc kubenswrapper[4558]: I0120 17:18:24.824153 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6f96bd13-4e3b-4731-8b13-7325df4bdd25-operator-scripts\") pod \"6f96bd13-4e3b-4731-8b13-7325df4bdd25\" (UID: \"6f96bd13-4e3b-4731-8b13-7325df4bdd25\") " Jan 20 17:18:24 crc kubenswrapper[4558]: I0120 17:18:24.824614 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hkhv7\" (UniqueName: \"kubernetes.io/projected/56e762d6-78f3-4e7e-8b7b-2b8057aedbdf-kube-api-access-hkhv7\") on node \"crc\" DevicePath \"\"" Jan 20 17:18:24 crc kubenswrapper[4558]: I0120 17:18:24.830841 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6f96bd13-4e3b-4731-8b13-7325df4bdd25-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "6f96bd13-4e3b-4731-8b13-7325df4bdd25" (UID: "6f96bd13-4e3b-4731-8b13-7325df4bdd25"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:18:24 crc kubenswrapper[4558]: I0120 17:18:24.832335 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6f96bd13-4e3b-4731-8b13-7325df4bdd25-kube-api-access-5hjjh" (OuterVolumeSpecName: "kube-api-access-5hjjh") pod "6f96bd13-4e3b-4731-8b13-7325df4bdd25" (UID: "6f96bd13-4e3b-4731-8b13-7325df4bdd25"). InnerVolumeSpecName "kube-api-access-5hjjh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:18:24 crc kubenswrapper[4558]: I0120 17:18:24.925566 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fb4hd\" (UniqueName: \"kubernetes.io/projected/1ccfacb0-c21a-4956-8192-10a0a41409c7-kube-api-access-fb4hd\") pod \"1ccfacb0-c21a-4956-8192-10a0a41409c7\" (UID: \"1ccfacb0-c21a-4956-8192-10a0a41409c7\") " Jan 20 17:18:24 crc kubenswrapper[4558]: I0120 17:18:24.925640 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1ccfacb0-c21a-4956-8192-10a0a41409c7-operator-scripts\") pod \"1ccfacb0-c21a-4956-8192-10a0a41409c7\" (UID: \"1ccfacb0-c21a-4956-8192-10a0a41409c7\") " Jan 20 17:18:24 crc kubenswrapper[4558]: I0120 17:18:24.925802 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5938c698-384b-4c5c-9c62-10f2a8a0ad1a-operator-scripts\") pod \"5938c698-384b-4c5c-9c62-10f2a8a0ad1a\" (UID: \"5938c698-384b-4c5c-9c62-10f2a8a0ad1a\") " Jan 20 17:18:24 crc kubenswrapper[4558]: I0120 17:18:24.925847 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-frtd7\" (UniqueName: \"kubernetes.io/projected/5938c698-384b-4c5c-9c62-10f2a8a0ad1a-kube-api-access-frtd7\") pod \"5938c698-384b-4c5c-9c62-10f2a8a0ad1a\" (UID: \"5938c698-384b-4c5c-9c62-10f2a8a0ad1a\") " Jan 20 17:18:24 crc kubenswrapper[4558]: I0120 17:18:24.926284 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5938c698-384b-4c5c-9c62-10f2a8a0ad1a-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "5938c698-384b-4c5c-9c62-10f2a8a0ad1a" (UID: "5938c698-384b-4c5c-9c62-10f2a8a0ad1a"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:18:24 crc kubenswrapper[4558]: I0120 17:18:24.926400 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1ccfacb0-c21a-4956-8192-10a0a41409c7-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "1ccfacb0-c21a-4956-8192-10a0a41409c7" (UID: "1ccfacb0-c21a-4956-8192-10a0a41409c7"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:18:24 crc kubenswrapper[4558]: I0120 17:18:24.926489 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5hjjh\" (UniqueName: \"kubernetes.io/projected/6f96bd13-4e3b-4731-8b13-7325df4bdd25-kube-api-access-5hjjh\") on node \"crc\" DevicePath \"\"" Jan 20 17:18:24 crc kubenswrapper[4558]: I0120 17:18:24.926514 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6f96bd13-4e3b-4731-8b13-7325df4bdd25-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:18:24 crc kubenswrapper[4558]: I0120 17:18:24.926526 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5938c698-384b-4c5c-9c62-10f2a8a0ad1a-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:18:24 crc kubenswrapper[4558]: I0120 17:18:24.930043 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5938c698-384b-4c5c-9c62-10f2a8a0ad1a-kube-api-access-frtd7" (OuterVolumeSpecName: "kube-api-access-frtd7") pod "5938c698-384b-4c5c-9c62-10f2a8a0ad1a" (UID: "5938c698-384b-4c5c-9c62-10f2a8a0ad1a"). InnerVolumeSpecName "kube-api-access-frtd7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:18:24 crc kubenswrapper[4558]: I0120 17:18:24.930674 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1ccfacb0-c21a-4956-8192-10a0a41409c7-kube-api-access-fb4hd" (OuterVolumeSpecName: "kube-api-access-fb4hd") pod "1ccfacb0-c21a-4956-8192-10a0a41409c7" (UID: "1ccfacb0-c21a-4956-8192-10a0a41409c7"). InnerVolumeSpecName "kube-api-access-fb4hd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:18:25 crc kubenswrapper[4558]: I0120 17:18:25.029550 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1ccfacb0-c21a-4956-8192-10a0a41409c7-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:18:25 crc kubenswrapper[4558]: I0120 17:18:25.029587 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-frtd7\" (UniqueName: \"kubernetes.io/projected/5938c698-384b-4c5c-9c62-10f2a8a0ad1a-kube-api-access-frtd7\") on node \"crc\" DevicePath \"\"" Jan 20 17:18:25 crc kubenswrapper[4558]: I0120 17:18:25.029598 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fb4hd\" (UniqueName: \"kubernetes.io/projected/1ccfacb0-c21a-4956-8192-10a0a41409c7-kube-api-access-fb4hd\") on node \"crc\" DevicePath \"\"" Jan 20 17:18:25 crc kubenswrapper[4558]: I0120 17:18:25.194619 4558 generic.go:334] "Generic (PLEG): container finished" podID="346ea378-eb04-403c-8c0f-0f8c3d2debe8" containerID="60492583629e26c3c32820e427ab609fadb431fe51ce67c01898e07bd052f7d1" exitCode=0 Jan 20 17:18:25 crc kubenswrapper[4558]: I0120 17:18:25.194682 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-db-sync-qq5nx" event={"ID":"346ea378-eb04-403c-8c0f-0f8c3d2debe8","Type":"ContainerDied","Data":"60492583629e26c3c32820e427ab609fadb431fe51ce67c01898e07bd052f7d1"} Jan 20 17:18:25 crc kubenswrapper[4558]: I0120 17:18:25.196206 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-247a-account-create-update-x7dvx" event={"ID":"56e762d6-78f3-4e7e-8b7b-2b8057aedbdf","Type":"ContainerDied","Data":"aeb4aa630e0b3c3d3e1b4934bb2316973b3e3f0aacc02c7d224ba4fd68de0718"} Jan 20 17:18:25 crc kubenswrapper[4558]: I0120 17:18:25.196232 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="aeb4aa630e0b3c3d3e1b4934bb2316973b3e3f0aacc02c7d224ba4fd68de0718" Jan 20 17:18:25 crc kubenswrapper[4558]: I0120 17:18:25.196272 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-247a-account-create-update-x7dvx" Jan 20 17:18:25 crc kubenswrapper[4558]: I0120 17:18:25.202711 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-d010-account-create-update-tx77w" event={"ID":"6f96bd13-4e3b-4731-8b13-7325df4bdd25","Type":"ContainerDied","Data":"c1e56c44de227b063892274f63dfb3f2dcc42e68e94bfc501f0b6d001bef25c7"} Jan 20 17:18:25 crc kubenswrapper[4558]: I0120 17:18:25.202762 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c1e56c44de227b063892274f63dfb3f2dcc42e68e94bfc501f0b6d001bef25c7" Jan 20 17:18:25 crc kubenswrapper[4558]: I0120 17:18:25.202734 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-d010-account-create-update-tx77w" Jan 20 17:18:25 crc kubenswrapper[4558]: I0120 17:18:25.204720 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-db-create-z7w7v" Jan 20 17:18:25 crc kubenswrapper[4558]: I0120 17:18:25.204731 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-db-create-z7w7v" event={"ID":"1ccfacb0-c21a-4956-8192-10a0a41409c7","Type":"ContainerDied","Data":"62a87df6abc2a0a5d92f04c331690c365da4a4a0efdcf2880ff5b28f5d117ebb"} Jan 20 17:18:25 crc kubenswrapper[4558]: I0120 17:18:25.204769 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="62a87df6abc2a0a5d92f04c331690c365da4a4a0efdcf2880ff5b28f5d117ebb" Jan 20 17:18:25 crc kubenswrapper[4558]: I0120 17:18:25.205941 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-1d9a-account-create-update-7hqxp" event={"ID":"e2328b3a-4982-4596-a11d-afb6ba3a4915","Type":"ContainerDied","Data":"5882571b775dcf410299b98f8f71c63d52c7dc5e4bb0d2a122bd60cddbeb48f9"} Jan 20 17:18:25 crc kubenswrapper[4558]: I0120 17:18:25.205967 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5882571b775dcf410299b98f8f71c63d52c7dc5e4bb0d2a122bd60cddbeb48f9" Jan 20 17:18:25 crc kubenswrapper[4558]: I0120 17:18:25.205986 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-1d9a-account-create-update-7hqxp" Jan 20 17:18:25 crc kubenswrapper[4558]: I0120 17:18:25.207028 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-db-create-kpclp" event={"ID":"5938c698-384b-4c5c-9c62-10f2a8a0ad1a","Type":"ContainerDied","Data":"3285b49d2fc2be6676607e20b7f9c0ae884e5b15380c28a3784a94b4070b183d"} Jan 20 17:18:25 crc kubenswrapper[4558]: I0120 17:18:25.207054 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3285b49d2fc2be6676607e20b7f9c0ae884e5b15380c28a3784a94b4070b183d" Jan 20 17:18:25 crc kubenswrapper[4558]: I0120 17:18:25.207066 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-db-create-kpclp" Jan 20 17:18:25 crc kubenswrapper[4558]: I0120 17:18:25.566971 4558 scope.go:117] "RemoveContainer" containerID="0ffce44366a8b4466427b682fb41640425366a0f4449210959148de9aef695c6" Jan 20 17:18:25 crc kubenswrapper[4558]: E0120 17:18:25.567358 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 17:18:26 crc kubenswrapper[4558]: I0120 17:18:26.217236 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-k26sn" podUID="f4b9badf-c4fb-42f0-9e4e-229e6d19ba09" containerName="registry-server" containerID="cri-o://d6d27a0865b41eef9e004631f8e86859ba3c7c4c9a8df731efc298d9e05d77d6" gracePeriod=2 Jan 20 17:18:26 crc kubenswrapper[4558]: I0120 17:18:26.500496 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-db-sync-qq5nx" Jan 20 17:18:26 crc kubenswrapper[4558]: I0120 17:18:26.664190 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jgdcv\" (UniqueName: \"kubernetes.io/projected/346ea378-eb04-403c-8c0f-0f8c3d2debe8-kube-api-access-jgdcv\") pod \"346ea378-eb04-403c-8c0f-0f8c3d2debe8\" (UID: \"346ea378-eb04-403c-8c0f-0f8c3d2debe8\") " Jan 20 17:18:26 crc kubenswrapper[4558]: I0120 17:18:26.664360 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/346ea378-eb04-403c-8c0f-0f8c3d2debe8-config-data\") pod \"346ea378-eb04-403c-8c0f-0f8c3d2debe8\" (UID: \"346ea378-eb04-403c-8c0f-0f8c3d2debe8\") " Jan 20 17:18:26 crc kubenswrapper[4558]: I0120 17:18:26.664599 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/346ea378-eb04-403c-8c0f-0f8c3d2debe8-combined-ca-bundle\") pod \"346ea378-eb04-403c-8c0f-0f8c3d2debe8\" (UID: \"346ea378-eb04-403c-8c0f-0f8c3d2debe8\") " Jan 20 17:18:26 crc kubenswrapper[4558]: I0120 17:18:26.672610 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/346ea378-eb04-403c-8c0f-0f8c3d2debe8-kube-api-access-jgdcv" (OuterVolumeSpecName: "kube-api-access-jgdcv") pod "346ea378-eb04-403c-8c0f-0f8c3d2debe8" (UID: "346ea378-eb04-403c-8c0f-0f8c3d2debe8"). InnerVolumeSpecName "kube-api-access-jgdcv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:18:26 crc kubenswrapper[4558]: I0120 17:18:26.692491 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/346ea378-eb04-403c-8c0f-0f8c3d2debe8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "346ea378-eb04-403c-8c0f-0f8c3d2debe8" (UID: "346ea378-eb04-403c-8c0f-0f8c3d2debe8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:18:26 crc kubenswrapper[4558]: I0120 17:18:26.708722 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/346ea378-eb04-403c-8c0f-0f8c3d2debe8-config-data" (OuterVolumeSpecName: "config-data") pod "346ea378-eb04-403c-8c0f-0f8c3d2debe8" (UID: "346ea378-eb04-403c-8c0f-0f8c3d2debe8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:18:26 crc kubenswrapper[4558]: I0120 17:18:26.767634 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/346ea378-eb04-403c-8c0f-0f8c3d2debe8-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:18:26 crc kubenswrapper[4558]: I0120 17:18:26.767666 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/346ea378-eb04-403c-8c0f-0f8c3d2debe8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:18:26 crc kubenswrapper[4558]: I0120 17:18:26.767680 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jgdcv\" (UniqueName: \"kubernetes.io/projected/346ea378-eb04-403c-8c0f-0f8c3d2debe8-kube-api-access-jgdcv\") on node \"crc\" DevicePath \"\"" Jan 20 17:18:27 crc kubenswrapper[4558]: I0120 17:18:27.097971 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-k26sn" Jan 20 17:18:27 crc kubenswrapper[4558]: I0120 17:18:27.174853 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ckwrn\" (UniqueName: \"kubernetes.io/projected/f4b9badf-c4fb-42f0-9e4e-229e6d19ba09-kube-api-access-ckwrn\") pod \"f4b9badf-c4fb-42f0-9e4e-229e6d19ba09\" (UID: \"f4b9badf-c4fb-42f0-9e4e-229e6d19ba09\") " Jan 20 17:18:27 crc kubenswrapper[4558]: I0120 17:18:27.175309 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f4b9badf-c4fb-42f0-9e4e-229e6d19ba09-catalog-content\") pod \"f4b9badf-c4fb-42f0-9e4e-229e6d19ba09\" (UID: \"f4b9badf-c4fb-42f0-9e4e-229e6d19ba09\") " Jan 20 17:18:27 crc kubenswrapper[4558]: I0120 17:18:27.175359 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f4b9badf-c4fb-42f0-9e4e-229e6d19ba09-utilities\") pod \"f4b9badf-c4fb-42f0-9e4e-229e6d19ba09\" (UID: \"f4b9badf-c4fb-42f0-9e4e-229e6d19ba09\") " Jan 20 17:18:27 crc kubenswrapper[4558]: I0120 17:18:27.176015 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f4b9badf-c4fb-42f0-9e4e-229e6d19ba09-utilities" (OuterVolumeSpecName: "utilities") pod "f4b9badf-c4fb-42f0-9e4e-229e6d19ba09" (UID: "f4b9badf-c4fb-42f0-9e4e-229e6d19ba09"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:18:27 crc kubenswrapper[4558]: I0120 17:18:27.178395 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f4b9badf-c4fb-42f0-9e4e-229e6d19ba09-kube-api-access-ckwrn" (OuterVolumeSpecName: "kube-api-access-ckwrn") pod "f4b9badf-c4fb-42f0-9e4e-229e6d19ba09" (UID: "f4b9badf-c4fb-42f0-9e4e-229e6d19ba09"). InnerVolumeSpecName "kube-api-access-ckwrn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:18:27 crc kubenswrapper[4558]: I0120 17:18:27.234012 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-db-sync-qq5nx" event={"ID":"346ea378-eb04-403c-8c0f-0f8c3d2debe8","Type":"ContainerDied","Data":"cebe7559cdc4399a6bd722ee51d9ae4b6186c12fb0b6db355f023e0e862dc25e"} Jan 20 17:18:27 crc kubenswrapper[4558]: I0120 17:18:27.234062 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cebe7559cdc4399a6bd722ee51d9ae4b6186c12fb0b6db355f023e0e862dc25e" Jan 20 17:18:27 crc kubenswrapper[4558]: I0120 17:18:27.234016 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-db-sync-qq5nx" Jan 20 17:18:27 crc kubenswrapper[4558]: I0120 17:18:27.238182 4558 generic.go:334] "Generic (PLEG): container finished" podID="f4b9badf-c4fb-42f0-9e4e-229e6d19ba09" containerID="d6d27a0865b41eef9e004631f8e86859ba3c7c4c9a8df731efc298d9e05d77d6" exitCode=0 Jan 20 17:18:27 crc kubenswrapper[4558]: I0120 17:18:27.238198 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-k26sn" event={"ID":"f4b9badf-c4fb-42f0-9e4e-229e6d19ba09","Type":"ContainerDied","Data":"d6d27a0865b41eef9e004631f8e86859ba3c7c4c9a8df731efc298d9e05d77d6"} Jan 20 17:18:27 crc kubenswrapper[4558]: I0120 17:18:27.238305 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-k26sn" Jan 20 17:18:27 crc kubenswrapper[4558]: I0120 17:18:27.238302 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-k26sn" event={"ID":"f4b9badf-c4fb-42f0-9e4e-229e6d19ba09","Type":"ContainerDied","Data":"bbcd2b6cca74121d9afac671b41701358ae90f47c7c841cca614c3fadd65e1f1"} Jan 20 17:18:27 crc kubenswrapper[4558]: I0120 17:18:27.238342 4558 scope.go:117] "RemoveContainer" containerID="d6d27a0865b41eef9e004631f8e86859ba3c7c4c9a8df731efc298d9e05d77d6" Jan 20 17:18:27 crc kubenswrapper[4558]: I0120 17:18:27.272686 4558 scope.go:117] "RemoveContainer" containerID="5cd8a9ea46f08e9645c0767eadb2e0762e2cc9ecfc79d64ae160a3cc92b68dd0" Jan 20 17:18:27 crc kubenswrapper[4558]: I0120 17:18:27.273425 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f4b9badf-c4fb-42f0-9e4e-229e6d19ba09-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f4b9badf-c4fb-42f0-9e4e-229e6d19ba09" (UID: "f4b9badf-c4fb-42f0-9e4e-229e6d19ba09"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:18:27 crc kubenswrapper[4558]: I0120 17:18:27.277997 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ckwrn\" (UniqueName: \"kubernetes.io/projected/f4b9badf-c4fb-42f0-9e4e-229e6d19ba09-kube-api-access-ckwrn\") on node \"crc\" DevicePath \"\"" Jan 20 17:18:27 crc kubenswrapper[4558]: I0120 17:18:27.278028 4558 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f4b9badf-c4fb-42f0-9e4e-229e6d19ba09-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 20 17:18:27 crc kubenswrapper[4558]: I0120 17:18:27.278040 4558 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f4b9badf-c4fb-42f0-9e4e-229e6d19ba09-utilities\") on node \"crc\" DevicePath \"\"" Jan 20 17:18:27 crc kubenswrapper[4558]: I0120 17:18:27.294208 4558 scope.go:117] "RemoveContainer" containerID="0cdbf90271d7eca20f84fd86045926e0c546b432590bb3e3392ed6b4365042d8" Jan 20 17:18:27 crc kubenswrapper[4558]: I0120 17:18:27.310978 4558 scope.go:117] "RemoveContainer" containerID="d6d27a0865b41eef9e004631f8e86859ba3c7c4c9a8df731efc298d9e05d77d6" Jan 20 17:18:27 crc kubenswrapper[4558]: E0120 17:18:27.311317 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d6d27a0865b41eef9e004631f8e86859ba3c7c4c9a8df731efc298d9e05d77d6\": container with ID starting with d6d27a0865b41eef9e004631f8e86859ba3c7c4c9a8df731efc298d9e05d77d6 not found: ID does not exist" containerID="d6d27a0865b41eef9e004631f8e86859ba3c7c4c9a8df731efc298d9e05d77d6" Jan 20 17:18:27 crc kubenswrapper[4558]: I0120 17:18:27.311375 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d6d27a0865b41eef9e004631f8e86859ba3c7c4c9a8df731efc298d9e05d77d6"} err="failed to get container status \"d6d27a0865b41eef9e004631f8e86859ba3c7c4c9a8df731efc298d9e05d77d6\": rpc error: code = NotFound desc = could not find container \"d6d27a0865b41eef9e004631f8e86859ba3c7c4c9a8df731efc298d9e05d77d6\": container with ID starting with d6d27a0865b41eef9e004631f8e86859ba3c7c4c9a8df731efc298d9e05d77d6 not found: ID does not exist" Jan 20 17:18:27 crc kubenswrapper[4558]: I0120 17:18:27.311402 4558 scope.go:117] "RemoveContainer" containerID="5cd8a9ea46f08e9645c0767eadb2e0762e2cc9ecfc79d64ae160a3cc92b68dd0" Jan 20 17:18:27 crc kubenswrapper[4558]: E0120 17:18:27.311667 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5cd8a9ea46f08e9645c0767eadb2e0762e2cc9ecfc79d64ae160a3cc92b68dd0\": container with ID starting with 5cd8a9ea46f08e9645c0767eadb2e0762e2cc9ecfc79d64ae160a3cc92b68dd0 not found: ID does not exist" containerID="5cd8a9ea46f08e9645c0767eadb2e0762e2cc9ecfc79d64ae160a3cc92b68dd0" Jan 20 17:18:27 crc kubenswrapper[4558]: I0120 17:18:27.311710 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5cd8a9ea46f08e9645c0767eadb2e0762e2cc9ecfc79d64ae160a3cc92b68dd0"} err="failed to get container status \"5cd8a9ea46f08e9645c0767eadb2e0762e2cc9ecfc79d64ae160a3cc92b68dd0\": rpc error: code = NotFound desc = could not find container \"5cd8a9ea46f08e9645c0767eadb2e0762e2cc9ecfc79d64ae160a3cc92b68dd0\": container with ID starting with 5cd8a9ea46f08e9645c0767eadb2e0762e2cc9ecfc79d64ae160a3cc92b68dd0 not found: ID does not exist" Jan 20 17:18:27 crc kubenswrapper[4558]: I0120 17:18:27.311724 4558 scope.go:117] "RemoveContainer" containerID="0cdbf90271d7eca20f84fd86045926e0c546b432590bb3e3392ed6b4365042d8" Jan 20 17:18:27 crc kubenswrapper[4558]: E0120 17:18:27.312025 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0cdbf90271d7eca20f84fd86045926e0c546b432590bb3e3392ed6b4365042d8\": container with ID starting with 0cdbf90271d7eca20f84fd86045926e0c546b432590bb3e3392ed6b4365042d8 not found: ID does not exist" containerID="0cdbf90271d7eca20f84fd86045926e0c546b432590bb3e3392ed6b4365042d8" Jan 20 17:18:27 crc kubenswrapper[4558]: I0120 17:18:27.312059 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0cdbf90271d7eca20f84fd86045926e0c546b432590bb3e3392ed6b4365042d8"} err="failed to get container status \"0cdbf90271d7eca20f84fd86045926e0c546b432590bb3e3392ed6b4365042d8\": rpc error: code = NotFound desc = could not find container \"0cdbf90271d7eca20f84fd86045926e0c546b432590bb3e3392ed6b4365042d8\": container with ID starting with 0cdbf90271d7eca20f84fd86045926e0c546b432590bb3e3392ed6b4365042d8 not found: ID does not exist" Jan 20 17:18:27 crc kubenswrapper[4558]: I0120 17:18:27.573000 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-k26sn"] Jan 20 17:18:27 crc kubenswrapper[4558]: I0120 17:18:27.581749 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-k26sn"] Jan 20 17:18:27 crc kubenswrapper[4558]: I0120 17:18:27.681755 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/keystone-bootstrap-dgphh"] Jan 20 17:18:27 crc kubenswrapper[4558]: E0120 17:18:27.682101 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1ccfacb0-c21a-4956-8192-10a0a41409c7" containerName="mariadb-database-create" Jan 20 17:18:27 crc kubenswrapper[4558]: I0120 17:18:27.682120 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="1ccfacb0-c21a-4956-8192-10a0a41409c7" containerName="mariadb-database-create" Jan 20 17:18:27 crc kubenswrapper[4558]: E0120 17:18:27.682141 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="17c2543e-2ad7-4bfa-a28f-34be13c6b966" containerName="mariadb-database-create" Jan 20 17:18:27 crc kubenswrapper[4558]: I0120 17:18:27.682148 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="17c2543e-2ad7-4bfa-a28f-34be13c6b966" containerName="mariadb-database-create" Jan 20 17:18:27 crc kubenswrapper[4558]: E0120 17:18:27.682154 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b9badf-c4fb-42f0-9e4e-229e6d19ba09" containerName="extract-content" Jan 20 17:18:27 crc kubenswrapper[4558]: I0120 17:18:27.682175 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b9badf-c4fb-42f0-9e4e-229e6d19ba09" containerName="extract-content" Jan 20 17:18:27 crc kubenswrapper[4558]: E0120 17:18:27.682185 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="56e762d6-78f3-4e7e-8b7b-2b8057aedbdf" containerName="mariadb-account-create-update" Jan 20 17:18:27 crc kubenswrapper[4558]: I0120 17:18:27.682191 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="56e762d6-78f3-4e7e-8b7b-2b8057aedbdf" containerName="mariadb-account-create-update" Jan 20 17:18:27 crc kubenswrapper[4558]: E0120 17:18:27.682203 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b9badf-c4fb-42f0-9e4e-229e6d19ba09" containerName="extract-utilities" Jan 20 17:18:27 crc kubenswrapper[4558]: I0120 17:18:27.682209 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b9badf-c4fb-42f0-9e4e-229e6d19ba09" containerName="extract-utilities" Jan 20 17:18:27 crc kubenswrapper[4558]: E0120 17:18:27.682219 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e2328b3a-4982-4596-a11d-afb6ba3a4915" containerName="mariadb-account-create-update" Jan 20 17:18:27 crc kubenswrapper[4558]: I0120 17:18:27.682224 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="e2328b3a-4982-4596-a11d-afb6ba3a4915" containerName="mariadb-account-create-update" Jan 20 17:18:27 crc kubenswrapper[4558]: E0120 17:18:27.682236 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b9badf-c4fb-42f0-9e4e-229e6d19ba09" containerName="registry-server" Jan 20 17:18:27 crc kubenswrapper[4558]: I0120 17:18:27.682243 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b9badf-c4fb-42f0-9e4e-229e6d19ba09" containerName="registry-server" Jan 20 17:18:27 crc kubenswrapper[4558]: E0120 17:18:27.682261 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6f96bd13-4e3b-4731-8b13-7325df4bdd25" containerName="mariadb-account-create-update" Jan 20 17:18:27 crc kubenswrapper[4558]: I0120 17:18:27.682267 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="6f96bd13-4e3b-4731-8b13-7325df4bdd25" containerName="mariadb-account-create-update" Jan 20 17:18:27 crc kubenswrapper[4558]: E0120 17:18:27.682280 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="346ea378-eb04-403c-8c0f-0f8c3d2debe8" containerName="keystone-db-sync" Jan 20 17:18:27 crc kubenswrapper[4558]: I0120 17:18:27.682289 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="346ea378-eb04-403c-8c0f-0f8c3d2debe8" containerName="keystone-db-sync" Jan 20 17:18:27 crc kubenswrapper[4558]: E0120 17:18:27.682301 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5938c698-384b-4c5c-9c62-10f2a8a0ad1a" containerName="mariadb-database-create" Jan 20 17:18:27 crc kubenswrapper[4558]: I0120 17:18:27.682307 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="5938c698-384b-4c5c-9c62-10f2a8a0ad1a" containerName="mariadb-database-create" Jan 20 17:18:27 crc kubenswrapper[4558]: I0120 17:18:27.690832 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b9badf-c4fb-42f0-9e4e-229e6d19ba09" containerName="registry-server" Jan 20 17:18:27 crc kubenswrapper[4558]: I0120 17:18:27.690860 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="6f96bd13-4e3b-4731-8b13-7325df4bdd25" containerName="mariadb-account-create-update" Jan 20 17:18:27 crc kubenswrapper[4558]: I0120 17:18:27.690878 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="56e762d6-78f3-4e7e-8b7b-2b8057aedbdf" containerName="mariadb-account-create-update" Jan 20 17:18:27 crc kubenswrapper[4558]: I0120 17:18:27.690889 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="1ccfacb0-c21a-4956-8192-10a0a41409c7" containerName="mariadb-database-create" Jan 20 17:18:27 crc kubenswrapper[4558]: I0120 17:18:27.690900 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="e2328b3a-4982-4596-a11d-afb6ba3a4915" containerName="mariadb-account-create-update" Jan 20 17:18:27 crc kubenswrapper[4558]: I0120 17:18:27.690906 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="5938c698-384b-4c5c-9c62-10f2a8a0ad1a" containerName="mariadb-database-create" Jan 20 17:18:27 crc kubenswrapper[4558]: I0120 17:18:27.690917 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="17c2543e-2ad7-4bfa-a28f-34be13c6b966" containerName="mariadb-database-create" Jan 20 17:18:27 crc kubenswrapper[4558]: I0120 17:18:27.690929 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="346ea378-eb04-403c-8c0f-0f8c3d2debe8" containerName="keystone-db-sync" Jan 20 17:18:27 crc kubenswrapper[4558]: I0120 17:18:27.691579 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-bootstrap-dgphh" Jan 20 17:18:27 crc kubenswrapper[4558]: I0120 17:18:27.695917 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-scripts" Jan 20 17:18:27 crc kubenswrapper[4558]: I0120 17:18:27.696019 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-config-data" Jan 20 17:18:27 crc kubenswrapper[4558]: I0120 17:18:27.696145 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone" Jan 20 17:18:27 crc kubenswrapper[4558]: I0120 17:18:27.696301 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-keystone-dockercfg-n8z97" Jan 20 17:18:27 crc kubenswrapper[4558]: I0120 17:18:27.699346 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-bootstrap-dgphh"] Jan 20 17:18:27 crc kubenswrapper[4558]: I0120 17:18:27.705743 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"osp-secret" Jan 20 17:18:27 crc kubenswrapper[4558]: I0120 17:18:27.789251 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/0b08b6e3-03a5-4dfa-aba3-2847529be269-credential-keys\") pod \"keystone-bootstrap-dgphh\" (UID: \"0b08b6e3-03a5-4dfa-aba3-2847529be269\") " pod="openstack-kuttl-tests/keystone-bootstrap-dgphh" Jan 20 17:18:27 crc kubenswrapper[4558]: I0120 17:18:27.789295 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/0b08b6e3-03a5-4dfa-aba3-2847529be269-fernet-keys\") pod \"keystone-bootstrap-dgphh\" (UID: \"0b08b6e3-03a5-4dfa-aba3-2847529be269\") " pod="openstack-kuttl-tests/keystone-bootstrap-dgphh" Jan 20 17:18:27 crc kubenswrapper[4558]: I0120 17:18:27.789322 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0b08b6e3-03a5-4dfa-aba3-2847529be269-scripts\") pod \"keystone-bootstrap-dgphh\" (UID: \"0b08b6e3-03a5-4dfa-aba3-2847529be269\") " pod="openstack-kuttl-tests/keystone-bootstrap-dgphh" Jan 20 17:18:27 crc kubenswrapper[4558]: I0120 17:18:27.789395 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0b08b6e3-03a5-4dfa-aba3-2847529be269-config-data\") pod \"keystone-bootstrap-dgphh\" (UID: \"0b08b6e3-03a5-4dfa-aba3-2847529be269\") " pod="openstack-kuttl-tests/keystone-bootstrap-dgphh" Jan 20 17:18:27 crc kubenswrapper[4558]: I0120 17:18:27.789521 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0b08b6e3-03a5-4dfa-aba3-2847529be269-combined-ca-bundle\") pod \"keystone-bootstrap-dgphh\" (UID: \"0b08b6e3-03a5-4dfa-aba3-2847529be269\") " pod="openstack-kuttl-tests/keystone-bootstrap-dgphh" Jan 20 17:18:27 crc kubenswrapper[4558]: I0120 17:18:27.789734 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7pd85\" (UniqueName: \"kubernetes.io/projected/0b08b6e3-03a5-4dfa-aba3-2847529be269-kube-api-access-7pd85\") pod \"keystone-bootstrap-dgphh\" (UID: \"0b08b6e3-03a5-4dfa-aba3-2847529be269\") " pod="openstack-kuttl-tests/keystone-bootstrap-dgphh" Jan 20 17:18:27 crc kubenswrapper[4558]: I0120 17:18:27.810659 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:18:27 crc kubenswrapper[4558]: I0120 17:18:27.812968 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:18:27 crc kubenswrapper[4558]: I0120 17:18:27.815257 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-scripts" Jan 20 17:18:27 crc kubenswrapper[4558]: I0120 17:18:27.815476 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-config-data" Jan 20 17:18:27 crc kubenswrapper[4558]: I0120 17:18:27.832803 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:18:27 crc kubenswrapper[4558]: I0120 17:18:27.878474 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/neutron-db-sync-682tw"] Jan 20 17:18:27 crc kubenswrapper[4558]: I0120 17:18:27.879664 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-db-sync-682tw" Jan 20 17:18:27 crc kubenswrapper[4558]: I0120 17:18:27.885420 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"neutron-config" Jan 20 17:18:27 crc kubenswrapper[4558]: I0120 17:18:27.885596 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"neutron-httpd-config" Jan 20 17:18:27 crc kubenswrapper[4558]: I0120 17:18:27.885721 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"neutron-neutron-dockercfg-s7sh5" Jan 20 17:18:27 crc kubenswrapper[4558]: I0120 17:18:27.892363 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-db-sync-682tw"] Jan 20 17:18:27 crc kubenswrapper[4558]: I0120 17:18:27.900991 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/037cfbe8-8c84-4901-9e77-9513f22e39a1-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"037cfbe8-8c84-4901-9e77-9513f22e39a1\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:18:27 crc kubenswrapper[4558]: I0120 17:18:27.901026 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wz2p2\" (UniqueName: \"kubernetes.io/projected/037cfbe8-8c84-4901-9e77-9513f22e39a1-kube-api-access-wz2p2\") pod \"ceilometer-0\" (UID: \"037cfbe8-8c84-4901-9e77-9513f22e39a1\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:18:27 crc kubenswrapper[4558]: I0120 17:18:27.901081 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/037cfbe8-8c84-4901-9e77-9513f22e39a1-scripts\") pod \"ceilometer-0\" (UID: \"037cfbe8-8c84-4901-9e77-9513f22e39a1\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:18:27 crc kubenswrapper[4558]: I0120 17:18:27.901121 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/037cfbe8-8c84-4901-9e77-9513f22e39a1-log-httpd\") pod \"ceilometer-0\" (UID: \"037cfbe8-8c84-4901-9e77-9513f22e39a1\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:18:27 crc kubenswrapper[4558]: I0120 17:18:27.901150 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/037cfbe8-8c84-4901-9e77-9513f22e39a1-run-httpd\") pod \"ceilometer-0\" (UID: \"037cfbe8-8c84-4901-9e77-9513f22e39a1\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:18:27 crc kubenswrapper[4558]: I0120 17:18:27.901194 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/0b08b6e3-03a5-4dfa-aba3-2847529be269-credential-keys\") pod \"keystone-bootstrap-dgphh\" (UID: \"0b08b6e3-03a5-4dfa-aba3-2847529be269\") " pod="openstack-kuttl-tests/keystone-bootstrap-dgphh" Jan 20 17:18:27 crc kubenswrapper[4558]: I0120 17:18:27.901215 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/0b08b6e3-03a5-4dfa-aba3-2847529be269-fernet-keys\") pod \"keystone-bootstrap-dgphh\" (UID: \"0b08b6e3-03a5-4dfa-aba3-2847529be269\") " pod="openstack-kuttl-tests/keystone-bootstrap-dgphh" Jan 20 17:18:27 crc kubenswrapper[4558]: I0120 17:18:27.901230 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0b08b6e3-03a5-4dfa-aba3-2847529be269-scripts\") pod \"keystone-bootstrap-dgphh\" (UID: \"0b08b6e3-03a5-4dfa-aba3-2847529be269\") " pod="openstack-kuttl-tests/keystone-bootstrap-dgphh" Jan 20 17:18:27 crc kubenswrapper[4558]: I0120 17:18:27.901250 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/037cfbe8-8c84-4901-9e77-9513f22e39a1-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"037cfbe8-8c84-4901-9e77-9513f22e39a1\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:18:27 crc kubenswrapper[4558]: I0120 17:18:27.901278 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/037cfbe8-8c84-4901-9e77-9513f22e39a1-config-data\") pod \"ceilometer-0\" (UID: \"037cfbe8-8c84-4901-9e77-9513f22e39a1\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:18:27 crc kubenswrapper[4558]: I0120 17:18:27.901310 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0b08b6e3-03a5-4dfa-aba3-2847529be269-config-data\") pod \"keystone-bootstrap-dgphh\" (UID: \"0b08b6e3-03a5-4dfa-aba3-2847529be269\") " pod="openstack-kuttl-tests/keystone-bootstrap-dgphh" Jan 20 17:18:27 crc kubenswrapper[4558]: I0120 17:18:27.901328 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0b08b6e3-03a5-4dfa-aba3-2847529be269-combined-ca-bundle\") pod \"keystone-bootstrap-dgphh\" (UID: \"0b08b6e3-03a5-4dfa-aba3-2847529be269\") " pod="openstack-kuttl-tests/keystone-bootstrap-dgphh" Jan 20 17:18:27 crc kubenswrapper[4558]: I0120 17:18:27.901355 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7pd85\" (UniqueName: \"kubernetes.io/projected/0b08b6e3-03a5-4dfa-aba3-2847529be269-kube-api-access-7pd85\") pod \"keystone-bootstrap-dgphh\" (UID: \"0b08b6e3-03a5-4dfa-aba3-2847529be269\") " pod="openstack-kuttl-tests/keystone-bootstrap-dgphh" Jan 20 17:18:27 crc kubenswrapper[4558]: I0120 17:18:27.916612 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0b08b6e3-03a5-4dfa-aba3-2847529be269-scripts\") pod \"keystone-bootstrap-dgphh\" (UID: \"0b08b6e3-03a5-4dfa-aba3-2847529be269\") " pod="openstack-kuttl-tests/keystone-bootstrap-dgphh" Jan 20 17:18:27 crc kubenswrapper[4558]: I0120 17:18:27.923388 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/placement-db-sync-dm58n"] Jan 20 17:18:27 crc kubenswrapper[4558]: I0120 17:18:27.924677 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-db-sync-dm58n" Jan 20 17:18:27 crc kubenswrapper[4558]: I0120 17:18:27.924797 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/0b08b6e3-03a5-4dfa-aba3-2847529be269-fernet-keys\") pod \"keystone-bootstrap-dgphh\" (UID: \"0b08b6e3-03a5-4dfa-aba3-2847529be269\") " pod="openstack-kuttl-tests/keystone-bootstrap-dgphh" Jan 20 17:18:27 crc kubenswrapper[4558]: I0120 17:18:27.924845 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0b08b6e3-03a5-4dfa-aba3-2847529be269-config-data\") pod \"keystone-bootstrap-dgphh\" (UID: \"0b08b6e3-03a5-4dfa-aba3-2847529be269\") " pod="openstack-kuttl-tests/keystone-bootstrap-dgphh" Jan 20 17:18:27 crc kubenswrapper[4558]: I0120 17:18:27.926039 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"placement-placement-dockercfg-tz8rh" Jan 20 17:18:27 crc kubenswrapper[4558]: I0120 17:18:27.930467 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"placement-scripts" Jan 20 17:18:27 crc kubenswrapper[4558]: I0120 17:18:27.930680 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"placement-config-data" Jan 20 17:18:27 crc kubenswrapper[4558]: I0120 17:18:27.933245 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0b08b6e3-03a5-4dfa-aba3-2847529be269-combined-ca-bundle\") pod \"keystone-bootstrap-dgphh\" (UID: \"0b08b6e3-03a5-4dfa-aba3-2847529be269\") " pod="openstack-kuttl-tests/keystone-bootstrap-dgphh" Jan 20 17:18:27 crc kubenswrapper[4558]: I0120 17:18:27.938098 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7pd85\" (UniqueName: \"kubernetes.io/projected/0b08b6e3-03a5-4dfa-aba3-2847529be269-kube-api-access-7pd85\") pod \"keystone-bootstrap-dgphh\" (UID: \"0b08b6e3-03a5-4dfa-aba3-2847529be269\") " pod="openstack-kuttl-tests/keystone-bootstrap-dgphh" Jan 20 17:18:27 crc kubenswrapper[4558]: I0120 17:18:27.940009 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/0b08b6e3-03a5-4dfa-aba3-2847529be269-credential-keys\") pod \"keystone-bootstrap-dgphh\" (UID: \"0b08b6e3-03a5-4dfa-aba3-2847529be269\") " pod="openstack-kuttl-tests/keystone-bootstrap-dgphh" Jan 20 17:18:27 crc kubenswrapper[4558]: I0120 17:18:27.958352 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/cinder-db-sync-4m92m"] Jan 20 17:18:27 crc kubenswrapper[4558]: I0120 17:18:27.959684 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-db-sync-4m92m" Jan 20 17:18:27 crc kubenswrapper[4558]: I0120 17:18:27.966516 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cinder-config-data" Jan 20 17:18:27 crc kubenswrapper[4558]: I0120 17:18:27.966724 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cinder-scripts" Jan 20 17:18:27 crc kubenswrapper[4558]: I0120 17:18:27.967272 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cinder-cinder-dockercfg-bl5xz" Jan 20 17:18:27 crc kubenswrapper[4558]: I0120 17:18:27.973963 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-db-sync-dm58n"] Jan 20 17:18:27 crc kubenswrapper[4558]: I0120 17:18:27.986387 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-db-sync-4m92m"] Jan 20 17:18:27 crc kubenswrapper[4558]: I0120 17:18:27.991869 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/barbican-db-sync-5vv4z"] Jan 20 17:18:27 crc kubenswrapper[4558]: I0120 17:18:27.992839 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-db-sync-5vv4z" Jan 20 17:18:27 crc kubenswrapper[4558]: I0120 17:18:27.994265 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"barbican-config-data" Jan 20 17:18:27 crc kubenswrapper[4558]: I0120 17:18:27.994879 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"barbican-barbican-dockercfg-jggcf" Jan 20 17:18:28 crc kubenswrapper[4558]: I0120 17:18:28.006981 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/037cfbe8-8c84-4901-9e77-9513f22e39a1-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"037cfbe8-8c84-4901-9e77-9513f22e39a1\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:18:28 crc kubenswrapper[4558]: I0120 17:18:28.007078 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/037cfbe8-8c84-4901-9e77-9513f22e39a1-config-data\") pod \"ceilometer-0\" (UID: \"037cfbe8-8c84-4901-9e77-9513f22e39a1\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:18:28 crc kubenswrapper[4558]: I0120 17:18:28.007191 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/5659e8f8-3581-4ecf-a2ee-0c6d044b17e2-config\") pod \"neutron-db-sync-682tw\" (UID: \"5659e8f8-3581-4ecf-a2ee-0c6d044b17e2\") " pod="openstack-kuttl-tests/neutron-db-sync-682tw" Jan 20 17:18:28 crc kubenswrapper[4558]: I0120 17:18:28.007268 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/037cfbe8-8c84-4901-9e77-9513f22e39a1-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"037cfbe8-8c84-4901-9e77-9513f22e39a1\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:18:28 crc kubenswrapper[4558]: I0120 17:18:28.007291 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wz2p2\" (UniqueName: \"kubernetes.io/projected/037cfbe8-8c84-4901-9e77-9513f22e39a1-kube-api-access-wz2p2\") pod \"ceilometer-0\" (UID: \"037cfbe8-8c84-4901-9e77-9513f22e39a1\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:18:28 crc kubenswrapper[4558]: I0120 17:18:28.007332 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n5cbl\" (UniqueName: \"kubernetes.io/projected/5659e8f8-3581-4ecf-a2ee-0c6d044b17e2-kube-api-access-n5cbl\") pod \"neutron-db-sync-682tw\" (UID: \"5659e8f8-3581-4ecf-a2ee-0c6d044b17e2\") " pod="openstack-kuttl-tests/neutron-db-sync-682tw" Jan 20 17:18:28 crc kubenswrapper[4558]: I0120 17:18:28.007366 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5659e8f8-3581-4ecf-a2ee-0c6d044b17e2-combined-ca-bundle\") pod \"neutron-db-sync-682tw\" (UID: \"5659e8f8-3581-4ecf-a2ee-0c6d044b17e2\") " pod="openstack-kuttl-tests/neutron-db-sync-682tw" Jan 20 17:18:28 crc kubenswrapper[4558]: I0120 17:18:28.007407 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/037cfbe8-8c84-4901-9e77-9513f22e39a1-scripts\") pod \"ceilometer-0\" (UID: \"037cfbe8-8c84-4901-9e77-9513f22e39a1\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:18:28 crc kubenswrapper[4558]: I0120 17:18:28.007453 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/037cfbe8-8c84-4901-9e77-9513f22e39a1-log-httpd\") pod \"ceilometer-0\" (UID: \"037cfbe8-8c84-4901-9e77-9513f22e39a1\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:18:28 crc kubenswrapper[4558]: I0120 17:18:28.007484 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/037cfbe8-8c84-4901-9e77-9513f22e39a1-run-httpd\") pod \"ceilometer-0\" (UID: \"037cfbe8-8c84-4901-9e77-9513f22e39a1\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:18:28 crc kubenswrapper[4558]: I0120 17:18:28.007968 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/037cfbe8-8c84-4901-9e77-9513f22e39a1-run-httpd\") pod \"ceilometer-0\" (UID: \"037cfbe8-8c84-4901-9e77-9513f22e39a1\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:18:28 crc kubenswrapper[4558]: I0120 17:18:28.008257 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-bootstrap-dgphh" Jan 20 17:18:28 crc kubenswrapper[4558]: I0120 17:18:28.011542 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/037cfbe8-8c84-4901-9e77-9513f22e39a1-log-httpd\") pod \"ceilometer-0\" (UID: \"037cfbe8-8c84-4901-9e77-9513f22e39a1\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:18:28 crc kubenswrapper[4558]: I0120 17:18:28.034132 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-db-sync-5vv4z"] Jan 20 17:18:28 crc kubenswrapper[4558]: I0120 17:18:28.044137 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/037cfbe8-8c84-4901-9e77-9513f22e39a1-scripts\") pod \"ceilometer-0\" (UID: \"037cfbe8-8c84-4901-9e77-9513f22e39a1\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:18:28 crc kubenswrapper[4558]: I0120 17:18:28.044250 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/037cfbe8-8c84-4901-9e77-9513f22e39a1-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"037cfbe8-8c84-4901-9e77-9513f22e39a1\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:18:28 crc kubenswrapper[4558]: I0120 17:18:28.045697 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/037cfbe8-8c84-4901-9e77-9513f22e39a1-config-data\") pod \"ceilometer-0\" (UID: \"037cfbe8-8c84-4901-9e77-9513f22e39a1\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:18:28 crc kubenswrapper[4558]: I0120 17:18:28.063576 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wz2p2\" (UniqueName: \"kubernetes.io/projected/037cfbe8-8c84-4901-9e77-9513f22e39a1-kube-api-access-wz2p2\") pod \"ceilometer-0\" (UID: \"037cfbe8-8c84-4901-9e77-9513f22e39a1\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:18:28 crc kubenswrapper[4558]: I0120 17:18:28.068836 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/037cfbe8-8c84-4901-9e77-9513f22e39a1-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"037cfbe8-8c84-4901-9e77-9513f22e39a1\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:18:28 crc kubenswrapper[4558]: I0120 17:18:28.118414 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7abf0081-40ee-4a29-b5fe-3a8c4c9f0214-logs\") pod \"placement-db-sync-dm58n\" (UID: \"7abf0081-40ee-4a29-b5fe-3a8c4c9f0214\") " pod="openstack-kuttl-tests/placement-db-sync-dm58n" Jan 20 17:18:28 crc kubenswrapper[4558]: I0120 17:18:28.118463 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7abf0081-40ee-4a29-b5fe-3a8c4c9f0214-scripts\") pod \"placement-db-sync-dm58n\" (UID: \"7abf0081-40ee-4a29-b5fe-3a8c4c9f0214\") " pod="openstack-kuttl-tests/placement-db-sync-dm58n" Jan 20 17:18:28 crc kubenswrapper[4558]: I0120 17:18:28.118524 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/c720d172-36bf-467b-a900-dec2dbc43d3e-db-sync-config-data\") pod \"barbican-db-sync-5vv4z\" (UID: \"c720d172-36bf-467b-a900-dec2dbc43d3e\") " pod="openstack-kuttl-tests/barbican-db-sync-5vv4z" Jan 20 17:18:28 crc kubenswrapper[4558]: I0120 17:18:28.118557 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4efe1398-34aa-4835-8c35-790f9ec1d514-config-data\") pod \"cinder-db-sync-4m92m\" (UID: \"4efe1398-34aa-4835-8c35-790f9ec1d514\") " pod="openstack-kuttl-tests/cinder-db-sync-4m92m" Jan 20 17:18:28 crc kubenswrapper[4558]: I0120 17:18:28.118580 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7abf0081-40ee-4a29-b5fe-3a8c4c9f0214-combined-ca-bundle\") pod \"placement-db-sync-dm58n\" (UID: \"7abf0081-40ee-4a29-b5fe-3a8c4c9f0214\") " pod="openstack-kuttl-tests/placement-db-sync-dm58n" Jan 20 17:18:28 crc kubenswrapper[4558]: I0120 17:18:28.118694 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/4efe1398-34aa-4835-8c35-790f9ec1d514-db-sync-config-data\") pod \"cinder-db-sync-4m92m\" (UID: \"4efe1398-34aa-4835-8c35-790f9ec1d514\") " pod="openstack-kuttl-tests/cinder-db-sync-4m92m" Jan 20 17:18:28 crc kubenswrapper[4558]: I0120 17:18:28.118718 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mqgg5\" (UniqueName: \"kubernetes.io/projected/4efe1398-34aa-4835-8c35-790f9ec1d514-kube-api-access-mqgg5\") pod \"cinder-db-sync-4m92m\" (UID: \"4efe1398-34aa-4835-8c35-790f9ec1d514\") " pod="openstack-kuttl-tests/cinder-db-sync-4m92m" Jan 20 17:18:28 crc kubenswrapper[4558]: I0120 17:18:28.118859 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pmqlj\" (UniqueName: \"kubernetes.io/projected/c720d172-36bf-467b-a900-dec2dbc43d3e-kube-api-access-pmqlj\") pod \"barbican-db-sync-5vv4z\" (UID: \"c720d172-36bf-467b-a900-dec2dbc43d3e\") " pod="openstack-kuttl-tests/barbican-db-sync-5vv4z" Jan 20 17:18:28 crc kubenswrapper[4558]: I0120 17:18:28.118954 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/5659e8f8-3581-4ecf-a2ee-0c6d044b17e2-config\") pod \"neutron-db-sync-682tw\" (UID: \"5659e8f8-3581-4ecf-a2ee-0c6d044b17e2\") " pod="openstack-kuttl-tests/neutron-db-sync-682tw" Jan 20 17:18:28 crc kubenswrapper[4558]: I0120 17:18:28.119000 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n5cbl\" (UniqueName: \"kubernetes.io/projected/5659e8f8-3581-4ecf-a2ee-0c6d044b17e2-kube-api-access-n5cbl\") pod \"neutron-db-sync-682tw\" (UID: \"5659e8f8-3581-4ecf-a2ee-0c6d044b17e2\") " pod="openstack-kuttl-tests/neutron-db-sync-682tw" Jan 20 17:18:28 crc kubenswrapper[4558]: I0120 17:18:28.119062 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5659e8f8-3581-4ecf-a2ee-0c6d044b17e2-combined-ca-bundle\") pod \"neutron-db-sync-682tw\" (UID: \"5659e8f8-3581-4ecf-a2ee-0c6d044b17e2\") " pod="openstack-kuttl-tests/neutron-db-sync-682tw" Jan 20 17:18:28 crc kubenswrapper[4558]: I0120 17:18:28.119112 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4efe1398-34aa-4835-8c35-790f9ec1d514-scripts\") pod \"cinder-db-sync-4m92m\" (UID: \"4efe1398-34aa-4835-8c35-790f9ec1d514\") " pod="openstack-kuttl-tests/cinder-db-sync-4m92m" Jan 20 17:18:28 crc kubenswrapper[4558]: I0120 17:18:28.119201 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rxhh2\" (UniqueName: \"kubernetes.io/projected/7abf0081-40ee-4a29-b5fe-3a8c4c9f0214-kube-api-access-rxhh2\") pod \"placement-db-sync-dm58n\" (UID: \"7abf0081-40ee-4a29-b5fe-3a8c4c9f0214\") " pod="openstack-kuttl-tests/placement-db-sync-dm58n" Jan 20 17:18:28 crc kubenswrapper[4558]: I0120 17:18:28.119224 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/4efe1398-34aa-4835-8c35-790f9ec1d514-etc-machine-id\") pod \"cinder-db-sync-4m92m\" (UID: \"4efe1398-34aa-4835-8c35-790f9ec1d514\") " pod="openstack-kuttl-tests/cinder-db-sync-4m92m" Jan 20 17:18:28 crc kubenswrapper[4558]: I0120 17:18:28.119246 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4efe1398-34aa-4835-8c35-790f9ec1d514-combined-ca-bundle\") pod \"cinder-db-sync-4m92m\" (UID: \"4efe1398-34aa-4835-8c35-790f9ec1d514\") " pod="openstack-kuttl-tests/cinder-db-sync-4m92m" Jan 20 17:18:28 crc kubenswrapper[4558]: I0120 17:18:28.119292 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c720d172-36bf-467b-a900-dec2dbc43d3e-combined-ca-bundle\") pod \"barbican-db-sync-5vv4z\" (UID: \"c720d172-36bf-467b-a900-dec2dbc43d3e\") " pod="openstack-kuttl-tests/barbican-db-sync-5vv4z" Jan 20 17:18:28 crc kubenswrapper[4558]: I0120 17:18:28.119318 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7abf0081-40ee-4a29-b5fe-3a8c4c9f0214-config-data\") pod \"placement-db-sync-dm58n\" (UID: \"7abf0081-40ee-4a29-b5fe-3a8c4c9f0214\") " pod="openstack-kuttl-tests/placement-db-sync-dm58n" Jan 20 17:18:28 crc kubenswrapper[4558]: I0120 17:18:28.122870 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/5659e8f8-3581-4ecf-a2ee-0c6d044b17e2-config\") pod \"neutron-db-sync-682tw\" (UID: \"5659e8f8-3581-4ecf-a2ee-0c6d044b17e2\") " pod="openstack-kuttl-tests/neutron-db-sync-682tw" Jan 20 17:18:28 crc kubenswrapper[4558]: I0120 17:18:28.133771 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:18:28 crc kubenswrapper[4558]: I0120 17:18:28.141288 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5659e8f8-3581-4ecf-a2ee-0c6d044b17e2-combined-ca-bundle\") pod \"neutron-db-sync-682tw\" (UID: \"5659e8f8-3581-4ecf-a2ee-0c6d044b17e2\") " pod="openstack-kuttl-tests/neutron-db-sync-682tw" Jan 20 17:18:28 crc kubenswrapper[4558]: I0120 17:18:28.158597 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n5cbl\" (UniqueName: \"kubernetes.io/projected/5659e8f8-3581-4ecf-a2ee-0c6d044b17e2-kube-api-access-n5cbl\") pod \"neutron-db-sync-682tw\" (UID: \"5659e8f8-3581-4ecf-a2ee-0c6d044b17e2\") " pod="openstack-kuttl-tests/neutron-db-sync-682tw" Jan 20 17:18:28 crc kubenswrapper[4558]: I0120 17:18:28.202616 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-db-sync-682tw" Jan 20 17:18:28 crc kubenswrapper[4558]: I0120 17:18:28.232354 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7abf0081-40ee-4a29-b5fe-3a8c4c9f0214-logs\") pod \"placement-db-sync-dm58n\" (UID: \"7abf0081-40ee-4a29-b5fe-3a8c4c9f0214\") " pod="openstack-kuttl-tests/placement-db-sync-dm58n" Jan 20 17:18:28 crc kubenswrapper[4558]: I0120 17:18:28.232395 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7abf0081-40ee-4a29-b5fe-3a8c4c9f0214-scripts\") pod \"placement-db-sync-dm58n\" (UID: \"7abf0081-40ee-4a29-b5fe-3a8c4c9f0214\") " pod="openstack-kuttl-tests/placement-db-sync-dm58n" Jan 20 17:18:28 crc kubenswrapper[4558]: I0120 17:18:28.232444 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/c720d172-36bf-467b-a900-dec2dbc43d3e-db-sync-config-data\") pod \"barbican-db-sync-5vv4z\" (UID: \"c720d172-36bf-467b-a900-dec2dbc43d3e\") " pod="openstack-kuttl-tests/barbican-db-sync-5vv4z" Jan 20 17:18:28 crc kubenswrapper[4558]: I0120 17:18:28.232495 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4efe1398-34aa-4835-8c35-790f9ec1d514-config-data\") pod \"cinder-db-sync-4m92m\" (UID: \"4efe1398-34aa-4835-8c35-790f9ec1d514\") " pod="openstack-kuttl-tests/cinder-db-sync-4m92m" Jan 20 17:18:28 crc kubenswrapper[4558]: I0120 17:18:28.232520 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7abf0081-40ee-4a29-b5fe-3a8c4c9f0214-combined-ca-bundle\") pod \"placement-db-sync-dm58n\" (UID: \"7abf0081-40ee-4a29-b5fe-3a8c4c9f0214\") " pod="openstack-kuttl-tests/placement-db-sync-dm58n" Jan 20 17:18:28 crc kubenswrapper[4558]: I0120 17:18:28.232576 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/4efe1398-34aa-4835-8c35-790f9ec1d514-db-sync-config-data\") pod \"cinder-db-sync-4m92m\" (UID: \"4efe1398-34aa-4835-8c35-790f9ec1d514\") " pod="openstack-kuttl-tests/cinder-db-sync-4m92m" Jan 20 17:18:28 crc kubenswrapper[4558]: I0120 17:18:28.232594 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mqgg5\" (UniqueName: \"kubernetes.io/projected/4efe1398-34aa-4835-8c35-790f9ec1d514-kube-api-access-mqgg5\") pod \"cinder-db-sync-4m92m\" (UID: \"4efe1398-34aa-4835-8c35-790f9ec1d514\") " pod="openstack-kuttl-tests/cinder-db-sync-4m92m" Jan 20 17:18:28 crc kubenswrapper[4558]: I0120 17:18:28.232655 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pmqlj\" (UniqueName: \"kubernetes.io/projected/c720d172-36bf-467b-a900-dec2dbc43d3e-kube-api-access-pmqlj\") pod \"barbican-db-sync-5vv4z\" (UID: \"c720d172-36bf-467b-a900-dec2dbc43d3e\") " pod="openstack-kuttl-tests/barbican-db-sync-5vv4z" Jan 20 17:18:28 crc kubenswrapper[4558]: I0120 17:18:28.232724 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4efe1398-34aa-4835-8c35-790f9ec1d514-scripts\") pod \"cinder-db-sync-4m92m\" (UID: \"4efe1398-34aa-4835-8c35-790f9ec1d514\") " pod="openstack-kuttl-tests/cinder-db-sync-4m92m" Jan 20 17:18:28 crc kubenswrapper[4558]: I0120 17:18:28.232753 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rxhh2\" (UniqueName: \"kubernetes.io/projected/7abf0081-40ee-4a29-b5fe-3a8c4c9f0214-kube-api-access-rxhh2\") pod \"placement-db-sync-dm58n\" (UID: \"7abf0081-40ee-4a29-b5fe-3a8c4c9f0214\") " pod="openstack-kuttl-tests/placement-db-sync-dm58n" Jan 20 17:18:28 crc kubenswrapper[4558]: I0120 17:18:28.232771 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/4efe1398-34aa-4835-8c35-790f9ec1d514-etc-machine-id\") pod \"cinder-db-sync-4m92m\" (UID: \"4efe1398-34aa-4835-8c35-790f9ec1d514\") " pod="openstack-kuttl-tests/cinder-db-sync-4m92m" Jan 20 17:18:28 crc kubenswrapper[4558]: I0120 17:18:28.232804 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4efe1398-34aa-4835-8c35-790f9ec1d514-combined-ca-bundle\") pod \"cinder-db-sync-4m92m\" (UID: \"4efe1398-34aa-4835-8c35-790f9ec1d514\") " pod="openstack-kuttl-tests/cinder-db-sync-4m92m" Jan 20 17:18:28 crc kubenswrapper[4558]: I0120 17:18:28.232827 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c720d172-36bf-467b-a900-dec2dbc43d3e-combined-ca-bundle\") pod \"barbican-db-sync-5vv4z\" (UID: \"c720d172-36bf-467b-a900-dec2dbc43d3e\") " pod="openstack-kuttl-tests/barbican-db-sync-5vv4z" Jan 20 17:18:28 crc kubenswrapper[4558]: I0120 17:18:28.232843 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7abf0081-40ee-4a29-b5fe-3a8c4c9f0214-config-data\") pod \"placement-db-sync-dm58n\" (UID: \"7abf0081-40ee-4a29-b5fe-3a8c4c9f0214\") " pod="openstack-kuttl-tests/placement-db-sync-dm58n" Jan 20 17:18:28 crc kubenswrapper[4558]: I0120 17:18:28.239759 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7abf0081-40ee-4a29-b5fe-3a8c4c9f0214-logs\") pod \"placement-db-sync-dm58n\" (UID: \"7abf0081-40ee-4a29-b5fe-3a8c4c9f0214\") " pod="openstack-kuttl-tests/placement-db-sync-dm58n" Jan 20 17:18:28 crc kubenswrapper[4558]: I0120 17:18:28.240998 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/4efe1398-34aa-4835-8c35-790f9ec1d514-etc-machine-id\") pod \"cinder-db-sync-4m92m\" (UID: \"4efe1398-34aa-4835-8c35-790f9ec1d514\") " pod="openstack-kuttl-tests/cinder-db-sync-4m92m" Jan 20 17:18:28 crc kubenswrapper[4558]: I0120 17:18:28.242077 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7abf0081-40ee-4a29-b5fe-3a8c4c9f0214-config-data\") pod \"placement-db-sync-dm58n\" (UID: \"7abf0081-40ee-4a29-b5fe-3a8c4c9f0214\") " pod="openstack-kuttl-tests/placement-db-sync-dm58n" Jan 20 17:18:28 crc kubenswrapper[4558]: I0120 17:18:28.250074 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4efe1398-34aa-4835-8c35-790f9ec1d514-config-data\") pod \"cinder-db-sync-4m92m\" (UID: \"4efe1398-34aa-4835-8c35-790f9ec1d514\") " pod="openstack-kuttl-tests/cinder-db-sync-4m92m" Jan 20 17:18:28 crc kubenswrapper[4558]: I0120 17:18:28.266435 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7abf0081-40ee-4a29-b5fe-3a8c4c9f0214-scripts\") pod \"placement-db-sync-dm58n\" (UID: \"7abf0081-40ee-4a29-b5fe-3a8c4c9f0214\") " pod="openstack-kuttl-tests/placement-db-sync-dm58n" Jan 20 17:18:28 crc kubenswrapper[4558]: I0120 17:18:28.266811 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4efe1398-34aa-4835-8c35-790f9ec1d514-scripts\") pod \"cinder-db-sync-4m92m\" (UID: \"4efe1398-34aa-4835-8c35-790f9ec1d514\") " pod="openstack-kuttl-tests/cinder-db-sync-4m92m" Jan 20 17:18:28 crc kubenswrapper[4558]: I0120 17:18:28.268729 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4efe1398-34aa-4835-8c35-790f9ec1d514-combined-ca-bundle\") pod \"cinder-db-sync-4m92m\" (UID: \"4efe1398-34aa-4835-8c35-790f9ec1d514\") " pod="openstack-kuttl-tests/cinder-db-sync-4m92m" Jan 20 17:18:28 crc kubenswrapper[4558]: I0120 17:18:28.270717 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c720d172-36bf-467b-a900-dec2dbc43d3e-combined-ca-bundle\") pod \"barbican-db-sync-5vv4z\" (UID: \"c720d172-36bf-467b-a900-dec2dbc43d3e\") " pod="openstack-kuttl-tests/barbican-db-sync-5vv4z" Jan 20 17:18:28 crc kubenswrapper[4558]: I0120 17:18:28.271494 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pmqlj\" (UniqueName: \"kubernetes.io/projected/c720d172-36bf-467b-a900-dec2dbc43d3e-kube-api-access-pmqlj\") pod \"barbican-db-sync-5vv4z\" (UID: \"c720d172-36bf-467b-a900-dec2dbc43d3e\") " pod="openstack-kuttl-tests/barbican-db-sync-5vv4z" Jan 20 17:18:28 crc kubenswrapper[4558]: I0120 17:18:28.281533 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/4efe1398-34aa-4835-8c35-790f9ec1d514-db-sync-config-data\") pod \"cinder-db-sync-4m92m\" (UID: \"4efe1398-34aa-4835-8c35-790f9ec1d514\") " pod="openstack-kuttl-tests/cinder-db-sync-4m92m" Jan 20 17:18:28 crc kubenswrapper[4558]: I0120 17:18:28.282484 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/c720d172-36bf-467b-a900-dec2dbc43d3e-db-sync-config-data\") pod \"barbican-db-sync-5vv4z\" (UID: \"c720d172-36bf-467b-a900-dec2dbc43d3e\") " pod="openstack-kuttl-tests/barbican-db-sync-5vv4z" Jan 20 17:18:28 crc kubenswrapper[4558]: I0120 17:18:28.284557 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mqgg5\" (UniqueName: \"kubernetes.io/projected/4efe1398-34aa-4835-8c35-790f9ec1d514-kube-api-access-mqgg5\") pod \"cinder-db-sync-4m92m\" (UID: \"4efe1398-34aa-4835-8c35-790f9ec1d514\") " pod="openstack-kuttl-tests/cinder-db-sync-4m92m" Jan 20 17:18:28 crc kubenswrapper[4558]: I0120 17:18:28.291623 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7abf0081-40ee-4a29-b5fe-3a8c4c9f0214-combined-ca-bundle\") pod \"placement-db-sync-dm58n\" (UID: \"7abf0081-40ee-4a29-b5fe-3a8c4c9f0214\") " pod="openstack-kuttl-tests/placement-db-sync-dm58n" Jan 20 17:18:28 crc kubenswrapper[4558]: I0120 17:18:28.295701 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rxhh2\" (UniqueName: \"kubernetes.io/projected/7abf0081-40ee-4a29-b5fe-3a8c4c9f0214-kube-api-access-rxhh2\") pod \"placement-db-sync-dm58n\" (UID: \"7abf0081-40ee-4a29-b5fe-3a8c4c9f0214\") " pod="openstack-kuttl-tests/placement-db-sync-dm58n" Jan 20 17:18:28 crc kubenswrapper[4558]: I0120 17:18:28.314759 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-db-sync-dm58n" Jan 20 17:18:28 crc kubenswrapper[4558]: I0120 17:18:28.395568 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-bootstrap-dgphh"] Jan 20 17:18:28 crc kubenswrapper[4558]: I0120 17:18:28.418764 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-db-sync-4m92m" Jan 20 17:18:28 crc kubenswrapper[4558]: I0120 17:18:28.428345 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-db-sync-5vv4z" Jan 20 17:18:28 crc kubenswrapper[4558]: I0120 17:18:28.582528 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4b9badf-c4fb-42f0-9e4e-229e6d19ba09" path="/var/lib/kubelet/pods/f4b9badf-c4fb-42f0-9e4e-229e6d19ba09/volumes" Jan 20 17:18:28 crc kubenswrapper[4558]: I0120 17:18:28.761343 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:18:28 crc kubenswrapper[4558]: I0120 17:18:28.763945 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:18:28 crc kubenswrapper[4558]: I0120 17:18:28.768170 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-glance-dockercfg-d2wnn" Jan 20 17:18:28 crc kubenswrapper[4558]: I0120 17:18:28.770136 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-scripts" Jan 20 17:18:28 crc kubenswrapper[4558]: I0120 17:18:28.770356 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-glance-default-public-svc" Jan 20 17:18:28 crc kubenswrapper[4558]: I0120 17:18:28.770491 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-default-external-config-data" Jan 20 17:18:28 crc kubenswrapper[4558]: I0120 17:18:28.773745 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:18:28 crc kubenswrapper[4558]: I0120 17:18:28.829985 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:18:28 crc kubenswrapper[4558]: I0120 17:18:28.831744 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:18:28 crc kubenswrapper[4558]: I0120 17:18:28.833858 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-glance-default-internal-svc" Jan 20 17:18:28 crc kubenswrapper[4558]: W0120 17:18:28.834483 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod037cfbe8_8c84_4901_9e77_9513f22e39a1.slice/crio-5c3169fe6410f0d62ef9f1869c8f77981b1dec8a5ff59900d6aa35f7b9c3a584 WatchSource:0}: Error finding container 5c3169fe6410f0d62ef9f1869c8f77981b1dec8a5ff59900d6aa35f7b9c3a584: Status 404 returned error can't find the container with id 5c3169fe6410f0d62ef9f1869c8f77981b1dec8a5ff59900d6aa35f7b9c3a584 Jan 20 17:18:28 crc kubenswrapper[4558]: I0120 17:18:28.834832 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-default-internal-config-data" Jan 20 17:18:28 crc kubenswrapper[4558]: I0120 17:18:28.840795 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:18:28 crc kubenswrapper[4558]: I0120 17:18:28.851276 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:18:28 crc kubenswrapper[4558]: I0120 17:18:28.935877 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-db-sync-682tw"] Jan 20 17:18:28 crc kubenswrapper[4558]: I0120 17:18:28.948581 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage13-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage13-crc\") pod \"glance-default-internal-api-0\" (UID: \"6f64d722-5520-4fc7-b28c-378dfcced10b\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:18:28 crc kubenswrapper[4558]: I0120 17:18:28.948647 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/6f64d722-5520-4fc7-b28c-378dfcced10b-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"6f64d722-5520-4fc7-b28c-378dfcced10b\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:18:28 crc kubenswrapper[4558]: I0120 17:18:28.948669 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6f64d722-5520-4fc7-b28c-378dfcced10b-logs\") pod \"glance-default-internal-api-0\" (UID: \"6f64d722-5520-4fc7-b28c-378dfcced10b\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:18:28 crc kubenswrapper[4558]: I0120 17:18:28.948766 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c34a8ba-597f-464a-babf-0ded54c9ebfb-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"8c34a8ba-597f-464a-babf-0ded54c9ebfb\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:18:28 crc kubenswrapper[4558]: I0120 17:18:28.948789 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5zb6f\" (UniqueName: \"kubernetes.io/projected/8c34a8ba-597f-464a-babf-0ded54c9ebfb-kube-api-access-5zb6f\") pod \"glance-default-external-api-0\" (UID: \"8c34a8ba-597f-464a-babf-0ded54c9ebfb\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:18:28 crc kubenswrapper[4558]: I0120 17:18:28.948827 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8c34a8ba-597f-464a-babf-0ded54c9ebfb-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"8c34a8ba-597f-464a-babf-0ded54c9ebfb\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:18:28 crc kubenswrapper[4558]: I0120 17:18:28.948866 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8c34a8ba-597f-464a-babf-0ded54c9ebfb-scripts\") pod \"glance-default-external-api-0\" (UID: \"8c34a8ba-597f-464a-babf-0ded54c9ebfb\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:18:28 crc kubenswrapper[4558]: I0120 17:18:28.948885 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8c34a8ba-597f-464a-babf-0ded54c9ebfb-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"8c34a8ba-597f-464a-babf-0ded54c9ebfb\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:18:28 crc kubenswrapper[4558]: I0120 17:18:28.948922 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage17-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage17-crc\") pod \"glance-default-external-api-0\" (UID: \"8c34a8ba-597f-464a-babf-0ded54c9ebfb\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:18:28 crc kubenswrapper[4558]: I0120 17:18:28.949009 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8c34a8ba-597f-464a-babf-0ded54c9ebfb-logs\") pod \"glance-default-external-api-0\" (UID: \"8c34a8ba-597f-464a-babf-0ded54c9ebfb\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:18:28 crc kubenswrapper[4558]: I0120 17:18:28.949036 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6f64d722-5520-4fc7-b28c-378dfcced10b-config-data\") pod \"glance-default-internal-api-0\" (UID: \"6f64d722-5520-4fc7-b28c-378dfcced10b\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:18:28 crc kubenswrapper[4558]: I0120 17:18:28.949078 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6f64d722-5520-4fc7-b28c-378dfcced10b-scripts\") pod \"glance-default-internal-api-0\" (UID: \"6f64d722-5520-4fc7-b28c-378dfcced10b\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:18:28 crc kubenswrapper[4558]: I0120 17:18:28.949093 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6f64d722-5520-4fc7-b28c-378dfcced10b-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"6f64d722-5520-4fc7-b28c-378dfcced10b\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:18:28 crc kubenswrapper[4558]: I0120 17:18:28.949116 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6f64d722-5520-4fc7-b28c-378dfcced10b-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"6f64d722-5520-4fc7-b28c-378dfcced10b\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:18:28 crc kubenswrapper[4558]: I0120 17:18:28.949181 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8c34a8ba-597f-464a-babf-0ded54c9ebfb-config-data\") pod \"glance-default-external-api-0\" (UID: \"8c34a8ba-597f-464a-babf-0ded54c9ebfb\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:18:28 crc kubenswrapper[4558]: I0120 17:18:28.949207 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6zg6j\" (UniqueName: \"kubernetes.io/projected/6f64d722-5520-4fc7-b28c-378dfcced10b-kube-api-access-6zg6j\") pod \"glance-default-internal-api-0\" (UID: \"6f64d722-5520-4fc7-b28c-378dfcced10b\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:18:28 crc kubenswrapper[4558]: I0120 17:18:28.955145 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-db-sync-dm58n"] Jan 20 17:18:28 crc kubenswrapper[4558]: W0120 17:18:28.964757 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7abf0081_40ee_4a29_b5fe_3a8c4c9f0214.slice/crio-f11ba6332c6aeee803c7373afa780d9d3b5327cddb76f21d013f4075a450ca99 WatchSource:0}: Error finding container f11ba6332c6aeee803c7373afa780d9d3b5327cddb76f21d013f4075a450ca99: Status 404 returned error can't find the container with id f11ba6332c6aeee803c7373afa780d9d3b5327cddb76f21d013f4075a450ca99 Jan 20 17:18:29 crc kubenswrapper[4558]: W0120 17:18:29.001060 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc720d172_36bf_467b_a900_dec2dbc43d3e.slice/crio-785077f032f9a4874f05a3eeef23dd89e4fdb8ecaaab8f5f3235c42468bc09ed WatchSource:0}: Error finding container 785077f032f9a4874f05a3eeef23dd89e4fdb8ecaaab8f5f3235c42468bc09ed: Status 404 returned error can't find the container with id 785077f032f9a4874f05a3eeef23dd89e4fdb8ecaaab8f5f3235c42468bc09ed Jan 20 17:18:29 crc kubenswrapper[4558]: I0120 17:18:29.001145 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-db-sync-5vv4z"] Jan 20 17:18:29 crc kubenswrapper[4558]: I0120 17:18:29.006750 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-db-sync-4m92m"] Jan 20 17:18:29 crc kubenswrapper[4558]: W0120 17:18:29.020272 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4efe1398_34aa_4835_8c35_790f9ec1d514.slice/crio-f4eb821f8d88c20f0623d9f391cdf7bb32ab0d0d6820e7369377ea27d5011d36 WatchSource:0}: Error finding container f4eb821f8d88c20f0623d9f391cdf7bb32ab0d0d6820e7369377ea27d5011d36: Status 404 returned error can't find the container with id f4eb821f8d88c20f0623d9f391cdf7bb32ab0d0d6820e7369377ea27d5011d36 Jan 20 17:18:29 crc kubenswrapper[4558]: I0120 17:18:29.051597 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c34a8ba-597f-464a-babf-0ded54c9ebfb-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"8c34a8ba-597f-464a-babf-0ded54c9ebfb\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:18:29 crc kubenswrapper[4558]: I0120 17:18:29.051632 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5zb6f\" (UniqueName: \"kubernetes.io/projected/8c34a8ba-597f-464a-babf-0ded54c9ebfb-kube-api-access-5zb6f\") pod \"glance-default-external-api-0\" (UID: \"8c34a8ba-597f-464a-babf-0ded54c9ebfb\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:18:29 crc kubenswrapper[4558]: I0120 17:18:29.051669 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8c34a8ba-597f-464a-babf-0ded54c9ebfb-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"8c34a8ba-597f-464a-babf-0ded54c9ebfb\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:18:29 crc kubenswrapper[4558]: I0120 17:18:29.051696 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8c34a8ba-597f-464a-babf-0ded54c9ebfb-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"8c34a8ba-597f-464a-babf-0ded54c9ebfb\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:18:29 crc kubenswrapper[4558]: I0120 17:18:29.051715 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8c34a8ba-597f-464a-babf-0ded54c9ebfb-scripts\") pod \"glance-default-external-api-0\" (UID: \"8c34a8ba-597f-464a-babf-0ded54c9ebfb\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:18:29 crc kubenswrapper[4558]: I0120 17:18:29.051740 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage17-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage17-crc\") pod \"glance-default-external-api-0\" (UID: \"8c34a8ba-597f-464a-babf-0ded54c9ebfb\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:18:29 crc kubenswrapper[4558]: I0120 17:18:29.051794 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8c34a8ba-597f-464a-babf-0ded54c9ebfb-logs\") pod \"glance-default-external-api-0\" (UID: \"8c34a8ba-597f-464a-babf-0ded54c9ebfb\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:18:29 crc kubenswrapper[4558]: I0120 17:18:29.051816 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6f64d722-5520-4fc7-b28c-378dfcced10b-config-data\") pod \"glance-default-internal-api-0\" (UID: \"6f64d722-5520-4fc7-b28c-378dfcced10b\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:18:29 crc kubenswrapper[4558]: I0120 17:18:29.051841 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6f64d722-5520-4fc7-b28c-378dfcced10b-scripts\") pod \"glance-default-internal-api-0\" (UID: \"6f64d722-5520-4fc7-b28c-378dfcced10b\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:18:29 crc kubenswrapper[4558]: I0120 17:18:29.051860 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6f64d722-5520-4fc7-b28c-378dfcced10b-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"6f64d722-5520-4fc7-b28c-378dfcced10b\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:18:29 crc kubenswrapper[4558]: I0120 17:18:29.051877 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6f64d722-5520-4fc7-b28c-378dfcced10b-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"6f64d722-5520-4fc7-b28c-378dfcced10b\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:18:29 crc kubenswrapper[4558]: I0120 17:18:29.051909 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8c34a8ba-597f-464a-babf-0ded54c9ebfb-config-data\") pod \"glance-default-external-api-0\" (UID: \"8c34a8ba-597f-464a-babf-0ded54c9ebfb\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:18:29 crc kubenswrapper[4558]: I0120 17:18:29.051927 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6zg6j\" (UniqueName: \"kubernetes.io/projected/6f64d722-5520-4fc7-b28c-378dfcced10b-kube-api-access-6zg6j\") pod \"glance-default-internal-api-0\" (UID: \"6f64d722-5520-4fc7-b28c-378dfcced10b\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:18:29 crc kubenswrapper[4558]: I0120 17:18:29.052006 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage13-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage13-crc\") pod \"glance-default-internal-api-0\" (UID: \"6f64d722-5520-4fc7-b28c-378dfcced10b\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:18:29 crc kubenswrapper[4558]: I0120 17:18:29.052031 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/6f64d722-5520-4fc7-b28c-378dfcced10b-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"6f64d722-5520-4fc7-b28c-378dfcced10b\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:18:29 crc kubenswrapper[4558]: I0120 17:18:29.052047 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6f64d722-5520-4fc7-b28c-378dfcced10b-logs\") pod \"glance-default-internal-api-0\" (UID: \"6f64d722-5520-4fc7-b28c-378dfcced10b\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:18:29 crc kubenswrapper[4558]: I0120 17:18:29.052492 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6f64d722-5520-4fc7-b28c-378dfcced10b-logs\") pod \"glance-default-internal-api-0\" (UID: \"6f64d722-5520-4fc7-b28c-378dfcced10b\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:18:29 crc kubenswrapper[4558]: I0120 17:18:29.053047 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8c34a8ba-597f-464a-babf-0ded54c9ebfb-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"8c34a8ba-597f-464a-babf-0ded54c9ebfb\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:18:29 crc kubenswrapper[4558]: I0120 17:18:29.053522 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage17-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage17-crc\") pod \"glance-default-external-api-0\" (UID: \"8c34a8ba-597f-464a-babf-0ded54c9ebfb\") device mount path \"/mnt/openstack/pv17\"" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:18:29 crc kubenswrapper[4558]: I0120 17:18:29.055479 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage13-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage13-crc\") pod \"glance-default-internal-api-0\" (UID: \"6f64d722-5520-4fc7-b28c-378dfcced10b\") device mount path \"/mnt/openstack/pv13\"" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:18:29 crc kubenswrapper[4558]: I0120 17:18:29.056690 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/6f64d722-5520-4fc7-b28c-378dfcced10b-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"6f64d722-5520-4fc7-b28c-378dfcced10b\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:18:29 crc kubenswrapper[4558]: I0120 17:18:29.057537 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8c34a8ba-597f-464a-babf-0ded54c9ebfb-logs\") pod \"glance-default-external-api-0\" (UID: \"8c34a8ba-597f-464a-babf-0ded54c9ebfb\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:18:29 crc kubenswrapper[4558]: I0120 17:18:29.058881 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6f64d722-5520-4fc7-b28c-378dfcced10b-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"6f64d722-5520-4fc7-b28c-378dfcced10b\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:18:29 crc kubenswrapper[4558]: I0120 17:18:29.059963 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6f64d722-5520-4fc7-b28c-378dfcced10b-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"6f64d722-5520-4fc7-b28c-378dfcced10b\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:18:29 crc kubenswrapper[4558]: I0120 17:18:29.060602 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6f64d722-5520-4fc7-b28c-378dfcced10b-scripts\") pod \"glance-default-internal-api-0\" (UID: \"6f64d722-5520-4fc7-b28c-378dfcced10b\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:18:29 crc kubenswrapper[4558]: I0120 17:18:29.061129 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8c34a8ba-597f-464a-babf-0ded54c9ebfb-config-data\") pod \"glance-default-external-api-0\" (UID: \"8c34a8ba-597f-464a-babf-0ded54c9ebfb\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:18:29 crc kubenswrapper[4558]: I0120 17:18:29.061970 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8c34a8ba-597f-464a-babf-0ded54c9ebfb-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"8c34a8ba-597f-464a-babf-0ded54c9ebfb\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:18:29 crc kubenswrapper[4558]: I0120 17:18:29.062337 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c34a8ba-597f-464a-babf-0ded54c9ebfb-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"8c34a8ba-597f-464a-babf-0ded54c9ebfb\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:18:29 crc kubenswrapper[4558]: I0120 17:18:29.067194 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6f64d722-5520-4fc7-b28c-378dfcced10b-config-data\") pod \"glance-default-internal-api-0\" (UID: \"6f64d722-5520-4fc7-b28c-378dfcced10b\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:18:29 crc kubenswrapper[4558]: I0120 17:18:29.067968 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8c34a8ba-597f-464a-babf-0ded54c9ebfb-scripts\") pod \"glance-default-external-api-0\" (UID: \"8c34a8ba-597f-464a-babf-0ded54c9ebfb\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:18:29 crc kubenswrapper[4558]: I0120 17:18:29.073159 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5zb6f\" (UniqueName: \"kubernetes.io/projected/8c34a8ba-597f-464a-babf-0ded54c9ebfb-kube-api-access-5zb6f\") pod \"glance-default-external-api-0\" (UID: \"8c34a8ba-597f-464a-babf-0ded54c9ebfb\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:18:29 crc kubenswrapper[4558]: I0120 17:18:29.073623 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6zg6j\" (UniqueName: \"kubernetes.io/projected/6f64d722-5520-4fc7-b28c-378dfcced10b-kube-api-access-6zg6j\") pod \"glance-default-internal-api-0\" (UID: \"6f64d722-5520-4fc7-b28c-378dfcced10b\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:18:29 crc kubenswrapper[4558]: I0120 17:18:29.088093 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage13-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage13-crc\") pod \"glance-default-internal-api-0\" (UID: \"6f64d722-5520-4fc7-b28c-378dfcced10b\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:18:29 crc kubenswrapper[4558]: I0120 17:18:29.095399 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage17-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage17-crc\") pod \"glance-default-external-api-0\" (UID: \"8c34a8ba-597f-464a-babf-0ded54c9ebfb\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:18:29 crc kubenswrapper[4558]: I0120 17:18:29.183557 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:18:29 crc kubenswrapper[4558]: I0120 17:18:29.388293 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:18:29 crc kubenswrapper[4558]: I0120 17:18:29.392427 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-bootstrap-dgphh" event={"ID":"0b08b6e3-03a5-4dfa-aba3-2847529be269","Type":"ContainerStarted","Data":"e7f3fec701ac7c97a615fd06b3109321d456b072a7460d87900d9cdedcaa35e1"} Jan 20 17:18:29 crc kubenswrapper[4558]: I0120 17:18:29.392489 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-bootstrap-dgphh" event={"ID":"0b08b6e3-03a5-4dfa-aba3-2847529be269","Type":"ContainerStarted","Data":"d654e8cdfc73c203480241f31f4ecdf58346ad5adc2de94c844c290a20303fbb"} Jan 20 17:18:29 crc kubenswrapper[4558]: I0120 17:18:29.397584 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-db-sync-682tw" event={"ID":"5659e8f8-3581-4ecf-a2ee-0c6d044b17e2","Type":"ContainerStarted","Data":"207e164549e3e5e61bf2c71ee34097728114f9cd05bc02c75ea304dc4cbd25f8"} Jan 20 17:18:29 crc kubenswrapper[4558]: I0120 17:18:29.397636 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-db-sync-682tw" event={"ID":"5659e8f8-3581-4ecf-a2ee-0c6d044b17e2","Type":"ContainerStarted","Data":"a14511826e3b186473ec8515d5d8898fa7add3a63377d2f29e8df73a36c70e15"} Jan 20 17:18:29 crc kubenswrapper[4558]: I0120 17:18:29.404041 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-db-sync-dm58n" event={"ID":"7abf0081-40ee-4a29-b5fe-3a8c4c9f0214","Type":"ContainerStarted","Data":"eb9de43bb9c67ceb4188a84f8dae178f1ade4dab62bbd644fa80972fe8028a59"} Jan 20 17:18:29 crc kubenswrapper[4558]: I0120 17:18:29.404080 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-db-sync-dm58n" event={"ID":"7abf0081-40ee-4a29-b5fe-3a8c4c9f0214","Type":"ContainerStarted","Data":"f11ba6332c6aeee803c7373afa780d9d3b5327cddb76f21d013f4075a450ca99"} Jan 20 17:18:29 crc kubenswrapper[4558]: I0120 17:18:29.409800 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/keystone-bootstrap-dgphh" podStartSLOduration=2.409789509 podStartE2EDuration="2.409789509s" podCreationTimestamp="2026-01-20 17:18:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:18:29.407040581 +0000 UTC m=+2203.167378547" watchObservedRunningTime="2026-01-20 17:18:29.409789509 +0000 UTC m=+2203.170127476" Jan 20 17:18:29 crc kubenswrapper[4558]: I0120 17:18:29.410769 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"037cfbe8-8c84-4901-9e77-9513f22e39a1","Type":"ContainerStarted","Data":"5c3169fe6410f0d62ef9f1869c8f77981b1dec8a5ff59900d6aa35f7b9c3a584"} Jan 20 17:18:29 crc kubenswrapper[4558]: I0120 17:18:29.416134 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-db-sync-5vv4z" event={"ID":"c720d172-36bf-467b-a900-dec2dbc43d3e","Type":"ContainerStarted","Data":"3d05fe873d2ff712ba8633c9dc802a7a515585115de35683f8b31b901ea5991d"} Jan 20 17:18:29 crc kubenswrapper[4558]: I0120 17:18:29.416196 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-db-sync-5vv4z" event={"ID":"c720d172-36bf-467b-a900-dec2dbc43d3e","Type":"ContainerStarted","Data":"785077f032f9a4874f05a3eeef23dd89e4fdb8ecaaab8f5f3235c42468bc09ed"} Jan 20 17:18:29 crc kubenswrapper[4558]: I0120 17:18:29.421588 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/placement-db-sync-dm58n" podStartSLOduration=2.4215736789999998 podStartE2EDuration="2.421573679s" podCreationTimestamp="2026-01-20 17:18:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:18:29.42121253 +0000 UTC m=+2203.181550488" watchObservedRunningTime="2026-01-20 17:18:29.421573679 +0000 UTC m=+2203.181911647" Jan 20 17:18:29 crc kubenswrapper[4558]: I0120 17:18:29.430029 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-db-sync-4m92m" event={"ID":"4efe1398-34aa-4835-8c35-790f9ec1d514","Type":"ContainerStarted","Data":"f4eb821f8d88c20f0623d9f391cdf7bb32ab0d0d6820e7369377ea27d5011d36"} Jan 20 17:18:29 crc kubenswrapper[4558]: I0120 17:18:29.470589 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/neutron-db-sync-682tw" podStartSLOduration=2.470569888 podStartE2EDuration="2.470569888s" podCreationTimestamp="2026-01-20 17:18:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:18:29.451450042 +0000 UTC m=+2203.211788009" watchObservedRunningTime="2026-01-20 17:18:29.470569888 +0000 UTC m=+2203.230907854" Jan 20 17:18:29 crc kubenswrapper[4558]: I0120 17:18:29.477354 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/barbican-db-sync-5vv4z" podStartSLOduration=2.477345399 podStartE2EDuration="2.477345399s" podCreationTimestamp="2026-01-20 17:18:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:18:29.471423863 +0000 UTC m=+2203.231761830" watchObservedRunningTime="2026-01-20 17:18:29.477345399 +0000 UTC m=+2203.237683366" Jan 20 17:18:29 crc kubenswrapper[4558]: I0120 17:18:29.659591 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:18:29 crc kubenswrapper[4558]: I0120 17:18:29.662864 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-hr4ld" Jan 20 17:18:29 crc kubenswrapper[4558]: I0120 17:18:29.664349 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-hr4ld" Jan 20 17:18:29 crc kubenswrapper[4558]: I0120 17:18:29.746574 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-hr4ld" Jan 20 17:18:29 crc kubenswrapper[4558]: I0120 17:18:29.925272 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:18:29 crc kubenswrapper[4558]: I0120 17:18:29.933711 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:18:29 crc kubenswrapper[4558]: W0120 17:18:29.941990 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8c34a8ba_597f_464a_babf_0ded54c9ebfb.slice/crio-c5cce300ff0ff49a6a34bda94f636b16078bdc5275581dbd132e9eccdb9907fd WatchSource:0}: Error finding container c5cce300ff0ff49a6a34bda94f636b16078bdc5275581dbd132e9eccdb9907fd: Status 404 returned error can't find the container with id c5cce300ff0ff49a6a34bda94f636b16078bdc5275581dbd132e9eccdb9907fd Jan 20 17:18:29 crc kubenswrapper[4558]: I0120 17:18:29.983550 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:18:30 crc kubenswrapper[4558]: I0120 17:18:30.009433 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:18:30 crc kubenswrapper[4558]: I0120 17:18:30.452145 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-db-sync-4m92m" event={"ID":"4efe1398-34aa-4835-8c35-790f9ec1d514","Type":"ContainerStarted","Data":"606e0dbff67da472a309954e0767fc9f44fcce3c5d73c3e05d4853ba37288e26"} Jan 20 17:18:30 crc kubenswrapper[4558]: I0120 17:18:30.456794 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"8c34a8ba-597f-464a-babf-0ded54c9ebfb","Type":"ContainerStarted","Data":"c5cce300ff0ff49a6a34bda94f636b16078bdc5275581dbd132e9eccdb9907fd"} Jan 20 17:18:30 crc kubenswrapper[4558]: I0120 17:18:30.459132 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"037cfbe8-8c84-4901-9e77-9513f22e39a1","Type":"ContainerStarted","Data":"50673c604a510d5000c90395016ae5092b6d9a96de87261edc85c70805ed811c"} Jan 20 17:18:30 crc kubenswrapper[4558]: I0120 17:18:30.462057 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"6f64d722-5520-4fc7-b28c-378dfcced10b","Type":"ContainerStarted","Data":"00e0204807b0281f2971f11d0303dc3eeae0baa284660b2b6f07c287f5cc1dca"} Jan 20 17:18:30 crc kubenswrapper[4558]: I0120 17:18:30.462082 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"6f64d722-5520-4fc7-b28c-378dfcced10b","Type":"ContainerStarted","Data":"3d6c7e70046ff04313bd8625093fb9e99852be0a052dc24ba2c4ff2badadc759"} Jan 20 17:18:30 crc kubenswrapper[4558]: I0120 17:18:30.485860 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/cinder-db-sync-4m92m" podStartSLOduration=3.485842759 podStartE2EDuration="3.485842759s" podCreationTimestamp="2026-01-20 17:18:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:18:30.466036523 +0000 UTC m=+2204.226374490" watchObservedRunningTime="2026-01-20 17:18:30.485842759 +0000 UTC m=+2204.246180715" Jan 20 17:18:30 crc kubenswrapper[4558]: I0120 17:18:30.579092 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-hr4ld" Jan 20 17:18:30 crc kubenswrapper[4558]: I0120 17:18:30.931733 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-hr4ld"] Jan 20 17:18:31 crc kubenswrapper[4558]: I0120 17:18:31.474136 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"6f64d722-5520-4fc7-b28c-378dfcced10b","Type":"ContainerStarted","Data":"8f43d06b2a4dbf32e0a9848e094fa72e0d178ba07a481c9b8769108fba35efde"} Jan 20 17:18:31 crc kubenswrapper[4558]: I0120 17:18:31.474973 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-internal-api-0" podUID="6f64d722-5520-4fc7-b28c-378dfcced10b" containerName="glance-log" containerID="cri-o://00e0204807b0281f2971f11d0303dc3eeae0baa284660b2b6f07c287f5cc1dca" gracePeriod=30 Jan 20 17:18:31 crc kubenswrapper[4558]: I0120 17:18:31.475074 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-internal-api-0" podUID="6f64d722-5520-4fc7-b28c-378dfcced10b" containerName="glance-httpd" containerID="cri-o://8f43d06b2a4dbf32e0a9848e094fa72e0d178ba07a481c9b8769108fba35efde" gracePeriod=30 Jan 20 17:18:31 crc kubenswrapper[4558]: I0120 17:18:31.477824 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-external-api-0" podUID="8c34a8ba-597f-464a-babf-0ded54c9ebfb" containerName="glance-log" containerID="cri-o://e0dc689ec21d9e22c527810a07babd52b67b4001126584d804ab8eb4f44545de" gracePeriod=30 Jan 20 17:18:31 crc kubenswrapper[4558]: I0120 17:18:31.477710 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"8c34a8ba-597f-464a-babf-0ded54c9ebfb","Type":"ContainerStarted","Data":"b6ca4c57616936e05ff017943d6ae125b5d76fa49486200e5088d7d00b8bebdb"} Jan 20 17:18:31 crc kubenswrapper[4558]: I0120 17:18:31.478017 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"8c34a8ba-597f-464a-babf-0ded54c9ebfb","Type":"ContainerStarted","Data":"e0dc689ec21d9e22c527810a07babd52b67b4001126584d804ab8eb4f44545de"} Jan 20 17:18:31 crc kubenswrapper[4558]: I0120 17:18:31.477928 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-external-api-0" podUID="8c34a8ba-597f-464a-babf-0ded54c9ebfb" containerName="glance-httpd" containerID="cri-o://b6ca4c57616936e05ff017943d6ae125b5d76fa49486200e5088d7d00b8bebdb" gracePeriod=30 Jan 20 17:18:31 crc kubenswrapper[4558]: I0120 17:18:31.492659 4558 generic.go:334] "Generic (PLEG): container finished" podID="7abf0081-40ee-4a29-b5fe-3a8c4c9f0214" containerID="eb9de43bb9c67ceb4188a84f8dae178f1ade4dab62bbd644fa80972fe8028a59" exitCode=0 Jan 20 17:18:31 crc kubenswrapper[4558]: I0120 17:18:31.492718 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-db-sync-dm58n" event={"ID":"7abf0081-40ee-4a29-b5fe-3a8c4c9f0214","Type":"ContainerDied","Data":"eb9de43bb9c67ceb4188a84f8dae178f1ade4dab62bbd644fa80972fe8028a59"} Jan 20 17:18:31 crc kubenswrapper[4558]: I0120 17:18:31.501009 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"037cfbe8-8c84-4901-9e77-9513f22e39a1","Type":"ContainerStarted","Data":"ad8ce1fbd71496aaf4e5071bac5dc4ace92331dacd698f83a04eda2a6f65bb64"} Jan 20 17:18:31 crc kubenswrapper[4558]: I0120 17:18:31.504743 4558 generic.go:334] "Generic (PLEG): container finished" podID="c720d172-36bf-467b-a900-dec2dbc43d3e" containerID="3d05fe873d2ff712ba8633c9dc802a7a515585115de35683f8b31b901ea5991d" exitCode=0 Jan 20 17:18:31 crc kubenswrapper[4558]: I0120 17:18:31.504788 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-db-sync-5vv4z" event={"ID":"c720d172-36bf-467b-a900-dec2dbc43d3e","Type":"ContainerDied","Data":"3d05fe873d2ff712ba8633c9dc802a7a515585115de35683f8b31b901ea5991d"} Jan 20 17:18:31 crc kubenswrapper[4558]: I0120 17:18:31.524308 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/glance-default-internal-api-0" podStartSLOduration=4.524293706 podStartE2EDuration="4.524293706s" podCreationTimestamp="2026-01-20 17:18:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:18:31.516585792 +0000 UTC m=+2205.276923759" watchObservedRunningTime="2026-01-20 17:18:31.524293706 +0000 UTC m=+2205.284631674" Jan 20 17:18:31 crc kubenswrapper[4558]: I0120 17:18:31.556582 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/glance-default-external-api-0" podStartSLOduration=4.556566816 podStartE2EDuration="4.556566816s" podCreationTimestamp="2026-01-20 17:18:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:18:31.552971365 +0000 UTC m=+2205.313309333" watchObservedRunningTime="2026-01-20 17:18:31.556566816 +0000 UTC m=+2205.316904783" Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.121064 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.199257 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5zb6f\" (UniqueName: \"kubernetes.io/projected/8c34a8ba-597f-464a-babf-0ded54c9ebfb-kube-api-access-5zb6f\") pod \"8c34a8ba-597f-464a-babf-0ded54c9ebfb\" (UID: \"8c34a8ba-597f-464a-babf-0ded54c9ebfb\") " Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.199339 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c34a8ba-597f-464a-babf-0ded54c9ebfb-combined-ca-bundle\") pod \"8c34a8ba-597f-464a-babf-0ded54c9ebfb\" (UID: \"8c34a8ba-597f-464a-babf-0ded54c9ebfb\") " Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.199456 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8c34a8ba-597f-464a-babf-0ded54c9ebfb-config-data\") pod \"8c34a8ba-597f-464a-babf-0ded54c9ebfb\" (UID: \"8c34a8ba-597f-464a-babf-0ded54c9ebfb\") " Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.199494 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage17-crc\") pod \"8c34a8ba-597f-464a-babf-0ded54c9ebfb\" (UID: \"8c34a8ba-597f-464a-babf-0ded54c9ebfb\") " Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.199597 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8c34a8ba-597f-464a-babf-0ded54c9ebfb-scripts\") pod \"8c34a8ba-597f-464a-babf-0ded54c9ebfb\" (UID: \"8c34a8ba-597f-464a-babf-0ded54c9ebfb\") " Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.199645 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8c34a8ba-597f-464a-babf-0ded54c9ebfb-httpd-run\") pod \"8c34a8ba-597f-464a-babf-0ded54c9ebfb\" (UID: \"8c34a8ba-597f-464a-babf-0ded54c9ebfb\") " Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.199672 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8c34a8ba-597f-464a-babf-0ded54c9ebfb-logs\") pod \"8c34a8ba-597f-464a-babf-0ded54c9ebfb\" (UID: \"8c34a8ba-597f-464a-babf-0ded54c9ebfb\") " Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.199708 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8c34a8ba-597f-464a-babf-0ded54c9ebfb-public-tls-certs\") pod \"8c34a8ba-597f-464a-babf-0ded54c9ebfb\" (UID: \"8c34a8ba-597f-464a-babf-0ded54c9ebfb\") " Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.209475 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8c34a8ba-597f-464a-babf-0ded54c9ebfb-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "8c34a8ba-597f-464a-babf-0ded54c9ebfb" (UID: "8c34a8ba-597f-464a-babf-0ded54c9ebfb"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.211072 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8c34a8ba-597f-464a-babf-0ded54c9ebfb-logs" (OuterVolumeSpecName: "logs") pod "8c34a8ba-597f-464a-babf-0ded54c9ebfb" (UID: "8c34a8ba-597f-464a-babf-0ded54c9ebfb"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.212511 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage17-crc" (OuterVolumeSpecName: "glance") pod "8c34a8ba-597f-464a-babf-0ded54c9ebfb" (UID: "8c34a8ba-597f-464a-babf-0ded54c9ebfb"). InnerVolumeSpecName "local-storage17-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.214921 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8c34a8ba-597f-464a-babf-0ded54c9ebfb-scripts" (OuterVolumeSpecName: "scripts") pod "8c34a8ba-597f-464a-babf-0ded54c9ebfb" (UID: "8c34a8ba-597f-464a-babf-0ded54c9ebfb"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.215091 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8c34a8ba-597f-464a-babf-0ded54c9ebfb-kube-api-access-5zb6f" (OuterVolumeSpecName: "kube-api-access-5zb6f") pod "8c34a8ba-597f-464a-babf-0ded54c9ebfb" (UID: "8c34a8ba-597f-464a-babf-0ded54c9ebfb"). InnerVolumeSpecName "kube-api-access-5zb6f". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:18:32 crc kubenswrapper[4558]: E0120 17:18:32.232321 4558 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6f64d722_5520_4fc7_b28c_378dfcced10b.slice/crio-conmon-8f43d06b2a4dbf32e0a9848e094fa72e0d178ba07a481c9b8769108fba35efde.scope\": RecentStats: unable to find data in memory cache]" Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.248058 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8c34a8ba-597f-464a-babf-0ded54c9ebfb-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8c34a8ba-597f-464a-babf-0ded54c9ebfb" (UID: "8c34a8ba-597f-464a-babf-0ded54c9ebfb"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.274860 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8c34a8ba-597f-464a-babf-0ded54c9ebfb-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "8c34a8ba-597f-464a-babf-0ded54c9ebfb" (UID: "8c34a8ba-597f-464a-babf-0ded54c9ebfb"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.277565 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8c34a8ba-597f-464a-babf-0ded54c9ebfb-config-data" (OuterVolumeSpecName: "config-data") pod "8c34a8ba-597f-464a-babf-0ded54c9ebfb" (UID: "8c34a8ba-597f-464a-babf-0ded54c9ebfb"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.302137 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8c34a8ba-597f-464a-babf-0ded54c9ebfb-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.302187 4558 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8c34a8ba-597f-464a-babf-0ded54c9ebfb-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.302199 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8c34a8ba-597f-464a-babf-0ded54c9ebfb-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.302210 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8c34a8ba-597f-464a-babf-0ded54c9ebfb-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.302222 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5zb6f\" (UniqueName: \"kubernetes.io/projected/8c34a8ba-597f-464a-babf-0ded54c9ebfb-kube-api-access-5zb6f\") on node \"crc\" DevicePath \"\"" Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.302232 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8c34a8ba-597f-464a-babf-0ded54c9ebfb-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.302241 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8c34a8ba-597f-464a-babf-0ded54c9ebfb-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.302267 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage17-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage17-crc\") on node \"crc\" " Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.310412 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.318919 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage17-crc" (UniqueName: "kubernetes.io/local-volume/local-storage17-crc") on node "crc" Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.403311 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage13-crc\") pod \"6f64d722-5520-4fc7-b28c-378dfcced10b\" (UID: \"6f64d722-5520-4fc7-b28c-378dfcced10b\") " Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.403362 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6f64d722-5520-4fc7-b28c-378dfcced10b-scripts\") pod \"6f64d722-5520-4fc7-b28c-378dfcced10b\" (UID: \"6f64d722-5520-4fc7-b28c-378dfcced10b\") " Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.403394 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6f64d722-5520-4fc7-b28c-378dfcced10b-internal-tls-certs\") pod \"6f64d722-5520-4fc7-b28c-378dfcced10b\" (UID: \"6f64d722-5520-4fc7-b28c-378dfcced10b\") " Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.403417 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6f64d722-5520-4fc7-b28c-378dfcced10b-logs\") pod \"6f64d722-5520-4fc7-b28c-378dfcced10b\" (UID: \"6f64d722-5520-4fc7-b28c-378dfcced10b\") " Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.403484 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6f64d722-5520-4fc7-b28c-378dfcced10b-config-data\") pod \"6f64d722-5520-4fc7-b28c-378dfcced10b\" (UID: \"6f64d722-5520-4fc7-b28c-378dfcced10b\") " Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.403511 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6f64d722-5520-4fc7-b28c-378dfcced10b-combined-ca-bundle\") pod \"6f64d722-5520-4fc7-b28c-378dfcced10b\" (UID: \"6f64d722-5520-4fc7-b28c-378dfcced10b\") " Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.403589 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/6f64d722-5520-4fc7-b28c-378dfcced10b-httpd-run\") pod \"6f64d722-5520-4fc7-b28c-378dfcced10b\" (UID: \"6f64d722-5520-4fc7-b28c-378dfcced10b\") " Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.403651 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6zg6j\" (UniqueName: \"kubernetes.io/projected/6f64d722-5520-4fc7-b28c-378dfcced10b-kube-api-access-6zg6j\") pod \"6f64d722-5520-4fc7-b28c-378dfcced10b\" (UID: \"6f64d722-5520-4fc7-b28c-378dfcced10b\") " Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.404038 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage17-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage17-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.408950 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6f64d722-5520-4fc7-b28c-378dfcced10b-kube-api-access-6zg6j" (OuterVolumeSpecName: "kube-api-access-6zg6j") pod "6f64d722-5520-4fc7-b28c-378dfcced10b" (UID: "6f64d722-5520-4fc7-b28c-378dfcced10b"). InnerVolumeSpecName "kube-api-access-6zg6j". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.412743 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage13-crc" (OuterVolumeSpecName: "glance") pod "6f64d722-5520-4fc7-b28c-378dfcced10b" (UID: "6f64d722-5520-4fc7-b28c-378dfcced10b"). InnerVolumeSpecName "local-storage13-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.412850 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6f64d722-5520-4fc7-b28c-378dfcced10b-scripts" (OuterVolumeSpecName: "scripts") pod "6f64d722-5520-4fc7-b28c-378dfcced10b" (UID: "6f64d722-5520-4fc7-b28c-378dfcced10b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.417945 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6f64d722-5520-4fc7-b28c-378dfcced10b-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "6f64d722-5520-4fc7-b28c-378dfcced10b" (UID: "6f64d722-5520-4fc7-b28c-378dfcced10b"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.418014 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6f64d722-5520-4fc7-b28c-378dfcced10b-logs" (OuterVolumeSpecName: "logs") pod "6f64d722-5520-4fc7-b28c-378dfcced10b" (UID: "6f64d722-5520-4fc7-b28c-378dfcced10b"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.436930 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6f64d722-5520-4fc7-b28c-378dfcced10b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6f64d722-5520-4fc7-b28c-378dfcced10b" (UID: "6f64d722-5520-4fc7-b28c-378dfcced10b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.453779 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6f64d722-5520-4fc7-b28c-378dfcced10b-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "6f64d722-5520-4fc7-b28c-378dfcced10b" (UID: "6f64d722-5520-4fc7-b28c-378dfcced10b"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.482737 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6f64d722-5520-4fc7-b28c-378dfcced10b-config-data" (OuterVolumeSpecName: "config-data") pod "6f64d722-5520-4fc7-b28c-378dfcced10b" (UID: "6f64d722-5520-4fc7-b28c-378dfcced10b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.507093 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6f64d722-5520-4fc7-b28c-378dfcced10b-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.507137 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6f64d722-5520-4fc7-b28c-378dfcced10b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.507152 4558 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/6f64d722-5520-4fc7-b28c-378dfcced10b-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.507186 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6zg6j\" (UniqueName: \"kubernetes.io/projected/6f64d722-5520-4fc7-b28c-378dfcced10b-kube-api-access-6zg6j\") on node \"crc\" DevicePath \"\"" Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.507211 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage13-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage13-crc\") on node \"crc\" " Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.507221 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6f64d722-5520-4fc7-b28c-378dfcced10b-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.507230 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6f64d722-5520-4fc7-b28c-378dfcced10b-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.507238 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6f64d722-5520-4fc7-b28c-378dfcced10b-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.521235 4558 generic.go:334] "Generic (PLEG): container finished" podID="8c34a8ba-597f-464a-babf-0ded54c9ebfb" containerID="b6ca4c57616936e05ff017943d6ae125b5d76fa49486200e5088d7d00b8bebdb" exitCode=143 Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.521273 4558 generic.go:334] "Generic (PLEG): container finished" podID="8c34a8ba-597f-464a-babf-0ded54c9ebfb" containerID="e0dc689ec21d9e22c527810a07babd52b67b4001126584d804ab8eb4f44545de" exitCode=143 Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.521417 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.523136 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"8c34a8ba-597f-464a-babf-0ded54c9ebfb","Type":"ContainerDied","Data":"b6ca4c57616936e05ff017943d6ae125b5d76fa49486200e5088d7d00b8bebdb"} Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.523206 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"8c34a8ba-597f-464a-babf-0ded54c9ebfb","Type":"ContainerDied","Data":"e0dc689ec21d9e22c527810a07babd52b67b4001126584d804ab8eb4f44545de"} Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.523220 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"8c34a8ba-597f-464a-babf-0ded54c9ebfb","Type":"ContainerDied","Data":"c5cce300ff0ff49a6a34bda94f636b16078bdc5275581dbd132e9eccdb9907fd"} Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.523241 4558 scope.go:117] "RemoveContainer" containerID="b6ca4c57616936e05ff017943d6ae125b5d76fa49486200e5088d7d00b8bebdb" Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.534360 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage13-crc" (UniqueName: "kubernetes.io/local-volume/local-storage13-crc") on node "crc" Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.536280 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"037cfbe8-8c84-4901-9e77-9513f22e39a1","Type":"ContainerStarted","Data":"3d4f7073c9297ff51b0671e585e6bd7d44df4da52a2cc36bdaa3ea09506286ed"} Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.543575 4558 generic.go:334] "Generic (PLEG): container finished" podID="6f64d722-5520-4fc7-b28c-378dfcced10b" containerID="8f43d06b2a4dbf32e0a9848e094fa72e0d178ba07a481c9b8769108fba35efde" exitCode=0 Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.543609 4558 generic.go:334] "Generic (PLEG): container finished" podID="6f64d722-5520-4fc7-b28c-378dfcced10b" containerID="00e0204807b0281f2971f11d0303dc3eeae0baa284660b2b6f07c287f5cc1dca" exitCode=143 Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.543655 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.543728 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"6f64d722-5520-4fc7-b28c-378dfcced10b","Type":"ContainerDied","Data":"8f43d06b2a4dbf32e0a9848e094fa72e0d178ba07a481c9b8769108fba35efde"} Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.543762 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"6f64d722-5520-4fc7-b28c-378dfcced10b","Type":"ContainerDied","Data":"00e0204807b0281f2971f11d0303dc3eeae0baa284660b2b6f07c287f5cc1dca"} Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.543773 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"6f64d722-5520-4fc7-b28c-378dfcced10b","Type":"ContainerDied","Data":"3d6c7e70046ff04313bd8625093fb9e99852be0a052dc24ba2c4ff2badadc759"} Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.544043 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-hr4ld" podUID="ef3b4518-9c22-4046-9053-142864162a50" containerName="registry-server" containerID="cri-o://68c705646962bc729eec8442fdff5e74884e6b5af688d6aaac7aedd41c048450" gracePeriod=2 Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.584143 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.614580 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage13-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage13-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.615497 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.626666 4558 scope.go:117] "RemoveContainer" containerID="e0dc689ec21d9e22c527810a07babd52b67b4001126584d804ab8eb4f44545de" Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.661323 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.669861 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.687206 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:18:32 crc kubenswrapper[4558]: E0120 17:18:32.687660 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8c34a8ba-597f-464a-babf-0ded54c9ebfb" containerName="glance-log" Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.687674 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="8c34a8ba-597f-464a-babf-0ded54c9ebfb" containerName="glance-log" Jan 20 17:18:32 crc kubenswrapper[4558]: E0120 17:18:32.687692 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6f64d722-5520-4fc7-b28c-378dfcced10b" containerName="glance-log" Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.687699 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="6f64d722-5520-4fc7-b28c-378dfcced10b" containerName="glance-log" Jan 20 17:18:32 crc kubenswrapper[4558]: E0120 17:18:32.687707 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6f64d722-5520-4fc7-b28c-378dfcced10b" containerName="glance-httpd" Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.687713 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="6f64d722-5520-4fc7-b28c-378dfcced10b" containerName="glance-httpd" Jan 20 17:18:32 crc kubenswrapper[4558]: E0120 17:18:32.687738 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8c34a8ba-597f-464a-babf-0ded54c9ebfb" containerName="glance-httpd" Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.687747 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="8c34a8ba-597f-464a-babf-0ded54c9ebfb" containerName="glance-httpd" Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.687897 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="6f64d722-5520-4fc7-b28c-378dfcced10b" containerName="glance-httpd" Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.687910 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="8c34a8ba-597f-464a-babf-0ded54c9ebfb" containerName="glance-log" Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.687924 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="8c34a8ba-597f-464a-babf-0ded54c9ebfb" containerName="glance-httpd" Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.687936 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="6f64d722-5520-4fc7-b28c-378dfcced10b" containerName="glance-log" Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.691364 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.695983 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-default-external-config-data" Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.696081 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-glance-default-public-svc" Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.696002 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-scripts" Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.696329 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-glance-dockercfg-d2wnn" Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.716971 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/18c8f0a6-157c-42f1-81b2-6bccecdcf626-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"18c8f0a6-157c-42f1-81b2-6bccecdcf626\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.717125 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/18c8f0a6-157c-42f1-81b2-6bccecdcf626-scripts\") pod \"glance-default-external-api-0\" (UID: \"18c8f0a6-157c-42f1-81b2-6bccecdcf626\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.717251 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/18c8f0a6-157c-42f1-81b2-6bccecdcf626-logs\") pod \"glance-default-external-api-0\" (UID: \"18c8f0a6-157c-42f1-81b2-6bccecdcf626\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.717338 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/18c8f0a6-157c-42f1-81b2-6bccecdcf626-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"18c8f0a6-157c-42f1-81b2-6bccecdcf626\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.717517 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/18c8f0a6-157c-42f1-81b2-6bccecdcf626-config-data\") pod \"glance-default-external-api-0\" (UID: \"18c8f0a6-157c-42f1-81b2-6bccecdcf626\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.717594 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-slhdl\" (UniqueName: \"kubernetes.io/projected/18c8f0a6-157c-42f1-81b2-6bccecdcf626-kube-api-access-slhdl\") pod \"glance-default-external-api-0\" (UID: \"18c8f0a6-157c-42f1-81b2-6bccecdcf626\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.717654 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/18c8f0a6-157c-42f1-81b2-6bccecdcf626-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"18c8f0a6-157c-42f1-81b2-6bccecdcf626\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.717731 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage17-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage17-crc\") pod \"glance-default-external-api-0\" (UID: \"18c8f0a6-157c-42f1-81b2-6bccecdcf626\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.728304 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.747321 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.748767 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.751559 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-glance-default-internal-svc" Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.755415 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-default-internal-config-data" Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.761117 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.804466 4558 scope.go:117] "RemoveContainer" containerID="b6ca4c57616936e05ff017943d6ae125b5d76fa49486200e5088d7d00b8bebdb" Jan 20 17:18:32 crc kubenswrapper[4558]: E0120 17:18:32.804945 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b6ca4c57616936e05ff017943d6ae125b5d76fa49486200e5088d7d00b8bebdb\": container with ID starting with b6ca4c57616936e05ff017943d6ae125b5d76fa49486200e5088d7d00b8bebdb not found: ID does not exist" containerID="b6ca4c57616936e05ff017943d6ae125b5d76fa49486200e5088d7d00b8bebdb" Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.804974 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b6ca4c57616936e05ff017943d6ae125b5d76fa49486200e5088d7d00b8bebdb"} err="failed to get container status \"b6ca4c57616936e05ff017943d6ae125b5d76fa49486200e5088d7d00b8bebdb\": rpc error: code = NotFound desc = could not find container \"b6ca4c57616936e05ff017943d6ae125b5d76fa49486200e5088d7d00b8bebdb\": container with ID starting with b6ca4c57616936e05ff017943d6ae125b5d76fa49486200e5088d7d00b8bebdb not found: ID does not exist" Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.804997 4558 scope.go:117] "RemoveContainer" containerID="e0dc689ec21d9e22c527810a07babd52b67b4001126584d804ab8eb4f44545de" Jan 20 17:18:32 crc kubenswrapper[4558]: E0120 17:18:32.805262 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e0dc689ec21d9e22c527810a07babd52b67b4001126584d804ab8eb4f44545de\": container with ID starting with e0dc689ec21d9e22c527810a07babd52b67b4001126584d804ab8eb4f44545de not found: ID does not exist" containerID="e0dc689ec21d9e22c527810a07babd52b67b4001126584d804ab8eb4f44545de" Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.805283 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e0dc689ec21d9e22c527810a07babd52b67b4001126584d804ab8eb4f44545de"} err="failed to get container status \"e0dc689ec21d9e22c527810a07babd52b67b4001126584d804ab8eb4f44545de\": rpc error: code = NotFound desc = could not find container \"e0dc689ec21d9e22c527810a07babd52b67b4001126584d804ab8eb4f44545de\": container with ID starting with e0dc689ec21d9e22c527810a07babd52b67b4001126584d804ab8eb4f44545de not found: ID does not exist" Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.805305 4558 scope.go:117] "RemoveContainer" containerID="b6ca4c57616936e05ff017943d6ae125b5d76fa49486200e5088d7d00b8bebdb" Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.805481 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b6ca4c57616936e05ff017943d6ae125b5d76fa49486200e5088d7d00b8bebdb"} err="failed to get container status \"b6ca4c57616936e05ff017943d6ae125b5d76fa49486200e5088d7d00b8bebdb\": rpc error: code = NotFound desc = could not find container \"b6ca4c57616936e05ff017943d6ae125b5d76fa49486200e5088d7d00b8bebdb\": container with ID starting with b6ca4c57616936e05ff017943d6ae125b5d76fa49486200e5088d7d00b8bebdb not found: ID does not exist" Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.805497 4558 scope.go:117] "RemoveContainer" containerID="e0dc689ec21d9e22c527810a07babd52b67b4001126584d804ab8eb4f44545de" Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.805696 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e0dc689ec21d9e22c527810a07babd52b67b4001126584d804ab8eb4f44545de"} err="failed to get container status \"e0dc689ec21d9e22c527810a07babd52b67b4001126584d804ab8eb4f44545de\": rpc error: code = NotFound desc = could not find container \"e0dc689ec21d9e22c527810a07babd52b67b4001126584d804ab8eb4f44545de\": container with ID starting with e0dc689ec21d9e22c527810a07babd52b67b4001126584d804ab8eb4f44545de not found: ID does not exist" Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.805709 4558 scope.go:117] "RemoveContainer" containerID="8f43d06b2a4dbf32e0a9848e094fa72e0d178ba07a481c9b8769108fba35efde" Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.819060 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/18c8f0a6-157c-42f1-81b2-6bccecdcf626-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"18c8f0a6-157c-42f1-81b2-6bccecdcf626\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.819115 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage17-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage17-crc\") pod \"glance-default-external-api-0\" (UID: \"18c8f0a6-157c-42f1-81b2-6bccecdcf626\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.819222 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/18c8f0a6-157c-42f1-81b2-6bccecdcf626-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"18c8f0a6-157c-42f1-81b2-6bccecdcf626\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.819271 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/18c8f0a6-157c-42f1-81b2-6bccecdcf626-scripts\") pod \"glance-default-external-api-0\" (UID: \"18c8f0a6-157c-42f1-81b2-6bccecdcf626\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.819335 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/18c8f0a6-157c-42f1-81b2-6bccecdcf626-logs\") pod \"glance-default-external-api-0\" (UID: \"18c8f0a6-157c-42f1-81b2-6bccecdcf626\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.819388 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/18c8f0a6-157c-42f1-81b2-6bccecdcf626-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"18c8f0a6-157c-42f1-81b2-6bccecdcf626\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.819418 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/18c8f0a6-157c-42f1-81b2-6bccecdcf626-config-data\") pod \"glance-default-external-api-0\" (UID: \"18c8f0a6-157c-42f1-81b2-6bccecdcf626\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.819444 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-slhdl\" (UniqueName: \"kubernetes.io/projected/18c8f0a6-157c-42f1-81b2-6bccecdcf626-kube-api-access-slhdl\") pod \"glance-default-external-api-0\" (UID: \"18c8f0a6-157c-42f1-81b2-6bccecdcf626\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.819639 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage17-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage17-crc\") pod \"glance-default-external-api-0\" (UID: \"18c8f0a6-157c-42f1-81b2-6bccecdcf626\") device mount path \"/mnt/openstack/pv17\"" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.821494 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/18c8f0a6-157c-42f1-81b2-6bccecdcf626-logs\") pod \"glance-default-external-api-0\" (UID: \"18c8f0a6-157c-42f1-81b2-6bccecdcf626\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.821544 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/18c8f0a6-157c-42f1-81b2-6bccecdcf626-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"18c8f0a6-157c-42f1-81b2-6bccecdcf626\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.824527 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/18c8f0a6-157c-42f1-81b2-6bccecdcf626-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"18c8f0a6-157c-42f1-81b2-6bccecdcf626\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.826405 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/18c8f0a6-157c-42f1-81b2-6bccecdcf626-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"18c8f0a6-157c-42f1-81b2-6bccecdcf626\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.828139 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/18c8f0a6-157c-42f1-81b2-6bccecdcf626-config-data\") pod \"glance-default-external-api-0\" (UID: \"18c8f0a6-157c-42f1-81b2-6bccecdcf626\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.830371 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/18c8f0a6-157c-42f1-81b2-6bccecdcf626-scripts\") pod \"glance-default-external-api-0\" (UID: \"18c8f0a6-157c-42f1-81b2-6bccecdcf626\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.834123 4558 scope.go:117] "RemoveContainer" containerID="00e0204807b0281f2971f11d0303dc3eeae0baa284660b2b6f07c287f5cc1dca" Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.847840 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-slhdl\" (UniqueName: \"kubernetes.io/projected/18c8f0a6-157c-42f1-81b2-6bccecdcf626-kube-api-access-slhdl\") pod \"glance-default-external-api-0\" (UID: \"18c8f0a6-157c-42f1-81b2-6bccecdcf626\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.858357 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage17-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage17-crc\") pod \"glance-default-external-api-0\" (UID: \"18c8f0a6-157c-42f1-81b2-6bccecdcf626\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.909289 4558 scope.go:117] "RemoveContainer" containerID="8f43d06b2a4dbf32e0a9848e094fa72e0d178ba07a481c9b8769108fba35efde" Jan 20 17:18:32 crc kubenswrapper[4558]: E0120 17:18:32.909936 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8f43d06b2a4dbf32e0a9848e094fa72e0d178ba07a481c9b8769108fba35efde\": container with ID starting with 8f43d06b2a4dbf32e0a9848e094fa72e0d178ba07a481c9b8769108fba35efde not found: ID does not exist" containerID="8f43d06b2a4dbf32e0a9848e094fa72e0d178ba07a481c9b8769108fba35efde" Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.909968 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8f43d06b2a4dbf32e0a9848e094fa72e0d178ba07a481c9b8769108fba35efde"} err="failed to get container status \"8f43d06b2a4dbf32e0a9848e094fa72e0d178ba07a481c9b8769108fba35efde\": rpc error: code = NotFound desc = could not find container \"8f43d06b2a4dbf32e0a9848e094fa72e0d178ba07a481c9b8769108fba35efde\": container with ID starting with 8f43d06b2a4dbf32e0a9848e094fa72e0d178ba07a481c9b8769108fba35efde not found: ID does not exist" Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.909987 4558 scope.go:117] "RemoveContainer" containerID="00e0204807b0281f2971f11d0303dc3eeae0baa284660b2b6f07c287f5cc1dca" Jan 20 17:18:32 crc kubenswrapper[4558]: E0120 17:18:32.910283 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"00e0204807b0281f2971f11d0303dc3eeae0baa284660b2b6f07c287f5cc1dca\": container with ID starting with 00e0204807b0281f2971f11d0303dc3eeae0baa284660b2b6f07c287f5cc1dca not found: ID does not exist" containerID="00e0204807b0281f2971f11d0303dc3eeae0baa284660b2b6f07c287f5cc1dca" Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.910299 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"00e0204807b0281f2971f11d0303dc3eeae0baa284660b2b6f07c287f5cc1dca"} err="failed to get container status \"00e0204807b0281f2971f11d0303dc3eeae0baa284660b2b6f07c287f5cc1dca\": rpc error: code = NotFound desc = could not find container \"00e0204807b0281f2971f11d0303dc3eeae0baa284660b2b6f07c287f5cc1dca\": container with ID starting with 00e0204807b0281f2971f11d0303dc3eeae0baa284660b2b6f07c287f5cc1dca not found: ID does not exist" Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.910313 4558 scope.go:117] "RemoveContainer" containerID="8f43d06b2a4dbf32e0a9848e094fa72e0d178ba07a481c9b8769108fba35efde" Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.911135 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8f43d06b2a4dbf32e0a9848e094fa72e0d178ba07a481c9b8769108fba35efde"} err="failed to get container status \"8f43d06b2a4dbf32e0a9848e094fa72e0d178ba07a481c9b8769108fba35efde\": rpc error: code = NotFound desc = could not find container \"8f43d06b2a4dbf32e0a9848e094fa72e0d178ba07a481c9b8769108fba35efde\": container with ID starting with 8f43d06b2a4dbf32e0a9848e094fa72e0d178ba07a481c9b8769108fba35efde not found: ID does not exist" Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.911191 4558 scope.go:117] "RemoveContainer" containerID="00e0204807b0281f2971f11d0303dc3eeae0baa284660b2b6f07c287f5cc1dca" Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.915259 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"00e0204807b0281f2971f11d0303dc3eeae0baa284660b2b6f07c287f5cc1dca"} err="failed to get container status \"00e0204807b0281f2971f11d0303dc3eeae0baa284660b2b6f07c287f5cc1dca\": rpc error: code = NotFound desc = could not find container \"00e0204807b0281f2971f11d0303dc3eeae0baa284660b2b6f07c287f5cc1dca\": container with ID starting with 00e0204807b0281f2971f11d0303dc3eeae0baa284660b2b6f07c287f5cc1dca not found: ID does not exist" Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.921051 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4f358da8-45ff-43a3-a671-05c81bb940a8-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"4f358da8-45ff-43a3-a671-05c81bb940a8\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.921145 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4f358da8-45ff-43a3-a671-05c81bb940a8-config-data\") pod \"glance-default-internal-api-0\" (UID: \"4f358da8-45ff-43a3-a671-05c81bb940a8\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.921205 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f358da8-45ff-43a3-a671-05c81bb940a8-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"4f358da8-45ff-43a3-a671-05c81bb940a8\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.921244 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4f358da8-45ff-43a3-a671-05c81bb940a8-logs\") pod \"glance-default-internal-api-0\" (UID: \"4f358da8-45ff-43a3-a671-05c81bb940a8\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.921279 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t9r5v\" (UniqueName: \"kubernetes.io/projected/4f358da8-45ff-43a3-a671-05c81bb940a8-kube-api-access-t9r5v\") pod \"glance-default-internal-api-0\" (UID: \"4f358da8-45ff-43a3-a671-05c81bb940a8\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.921308 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4f358da8-45ff-43a3-a671-05c81bb940a8-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"4f358da8-45ff-43a3-a671-05c81bb940a8\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.921385 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4f358da8-45ff-43a3-a671-05c81bb940a8-scripts\") pod \"glance-default-internal-api-0\" (UID: \"4f358da8-45ff-43a3-a671-05c81bb940a8\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:18:32 crc kubenswrapper[4558]: I0120 17:18:32.921412 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage13-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage13-crc\") pod \"glance-default-internal-api-0\" (UID: \"4f358da8-45ff-43a3-a671-05c81bb940a8\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:18:33 crc kubenswrapper[4558]: I0120 17:18:33.023281 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4f358da8-45ff-43a3-a671-05c81bb940a8-scripts\") pod \"glance-default-internal-api-0\" (UID: \"4f358da8-45ff-43a3-a671-05c81bb940a8\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:18:33 crc kubenswrapper[4558]: I0120 17:18:33.023342 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage13-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage13-crc\") pod \"glance-default-internal-api-0\" (UID: \"4f358da8-45ff-43a3-a671-05c81bb940a8\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:18:33 crc kubenswrapper[4558]: I0120 17:18:33.023430 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4f358da8-45ff-43a3-a671-05c81bb940a8-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"4f358da8-45ff-43a3-a671-05c81bb940a8\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:18:33 crc kubenswrapper[4558]: I0120 17:18:33.023871 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage13-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage13-crc\") pod \"glance-default-internal-api-0\" (UID: \"4f358da8-45ff-43a3-a671-05c81bb940a8\") device mount path \"/mnt/openstack/pv13\"" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:18:33 crc kubenswrapper[4558]: I0120 17:18:33.023487 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4f358da8-45ff-43a3-a671-05c81bb940a8-config-data\") pod \"glance-default-internal-api-0\" (UID: \"4f358da8-45ff-43a3-a671-05c81bb940a8\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:18:33 crc kubenswrapper[4558]: I0120 17:18:33.024051 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f358da8-45ff-43a3-a671-05c81bb940a8-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"4f358da8-45ff-43a3-a671-05c81bb940a8\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:18:33 crc kubenswrapper[4558]: I0120 17:18:33.024104 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4f358da8-45ff-43a3-a671-05c81bb940a8-logs\") pod \"glance-default-internal-api-0\" (UID: \"4f358da8-45ff-43a3-a671-05c81bb940a8\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:18:33 crc kubenswrapper[4558]: I0120 17:18:33.024138 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t9r5v\" (UniqueName: \"kubernetes.io/projected/4f358da8-45ff-43a3-a671-05c81bb940a8-kube-api-access-t9r5v\") pod \"glance-default-internal-api-0\" (UID: \"4f358da8-45ff-43a3-a671-05c81bb940a8\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:18:33 crc kubenswrapper[4558]: I0120 17:18:33.024198 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4f358da8-45ff-43a3-a671-05c81bb940a8-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"4f358da8-45ff-43a3-a671-05c81bb940a8\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:18:33 crc kubenswrapper[4558]: I0120 17:18:33.024209 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4f358da8-45ff-43a3-a671-05c81bb940a8-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"4f358da8-45ff-43a3-a671-05c81bb940a8\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:18:33 crc kubenswrapper[4558]: I0120 17:18:33.024630 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4f358da8-45ff-43a3-a671-05c81bb940a8-logs\") pod \"glance-default-internal-api-0\" (UID: \"4f358da8-45ff-43a3-a671-05c81bb940a8\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:18:33 crc kubenswrapper[4558]: I0120 17:18:33.034880 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4f358da8-45ff-43a3-a671-05c81bb940a8-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"4f358da8-45ff-43a3-a671-05c81bb940a8\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:18:33 crc kubenswrapper[4558]: I0120 17:18:33.035112 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4f358da8-45ff-43a3-a671-05c81bb940a8-scripts\") pod \"glance-default-internal-api-0\" (UID: \"4f358da8-45ff-43a3-a671-05c81bb940a8\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:18:33 crc kubenswrapper[4558]: I0120 17:18:33.035246 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4f358da8-45ff-43a3-a671-05c81bb940a8-config-data\") pod \"glance-default-internal-api-0\" (UID: \"4f358da8-45ff-43a3-a671-05c81bb940a8\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:18:33 crc kubenswrapper[4558]: I0120 17:18:33.035495 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f358da8-45ff-43a3-a671-05c81bb940a8-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"4f358da8-45ff-43a3-a671-05c81bb940a8\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:18:33 crc kubenswrapper[4558]: I0120 17:18:33.044794 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t9r5v\" (UniqueName: \"kubernetes.io/projected/4f358da8-45ff-43a3-a671-05c81bb940a8-kube-api-access-t9r5v\") pod \"glance-default-internal-api-0\" (UID: \"4f358da8-45ff-43a3-a671-05c81bb940a8\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:18:33 crc kubenswrapper[4558]: I0120 17:18:33.065861 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage13-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage13-crc\") pod \"glance-default-internal-api-0\" (UID: \"4f358da8-45ff-43a3-a671-05c81bb940a8\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:18:33 crc kubenswrapper[4558]: I0120 17:18:33.080279 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-db-sync-dm58n" Jan 20 17:18:33 crc kubenswrapper[4558]: I0120 17:18:33.111347 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:18:33 crc kubenswrapper[4558]: I0120 17:18:33.123928 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:18:33 crc kubenswrapper[4558]: I0120 17:18:33.175926 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hr4ld" Jan 20 17:18:33 crc kubenswrapper[4558]: I0120 17:18:33.181746 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-db-sync-5vv4z" Jan 20 17:18:33 crc kubenswrapper[4558]: I0120 17:18:33.227312 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7abf0081-40ee-4a29-b5fe-3a8c4c9f0214-config-data\") pod \"7abf0081-40ee-4a29-b5fe-3a8c4c9f0214\" (UID: \"7abf0081-40ee-4a29-b5fe-3a8c4c9f0214\") " Jan 20 17:18:33 crc kubenswrapper[4558]: I0120 17:18:33.227405 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7abf0081-40ee-4a29-b5fe-3a8c4c9f0214-combined-ca-bundle\") pod \"7abf0081-40ee-4a29-b5fe-3a8c4c9f0214\" (UID: \"7abf0081-40ee-4a29-b5fe-3a8c4c9f0214\") " Jan 20 17:18:33 crc kubenswrapper[4558]: I0120 17:18:33.227440 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7abf0081-40ee-4a29-b5fe-3a8c4c9f0214-scripts\") pod \"7abf0081-40ee-4a29-b5fe-3a8c4c9f0214\" (UID: \"7abf0081-40ee-4a29-b5fe-3a8c4c9f0214\") " Jan 20 17:18:33 crc kubenswrapper[4558]: I0120 17:18:33.227588 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7abf0081-40ee-4a29-b5fe-3a8c4c9f0214-logs\") pod \"7abf0081-40ee-4a29-b5fe-3a8c4c9f0214\" (UID: \"7abf0081-40ee-4a29-b5fe-3a8c4c9f0214\") " Jan 20 17:18:33 crc kubenswrapper[4558]: I0120 17:18:33.227664 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rxhh2\" (UniqueName: \"kubernetes.io/projected/7abf0081-40ee-4a29-b5fe-3a8c4c9f0214-kube-api-access-rxhh2\") pod \"7abf0081-40ee-4a29-b5fe-3a8c4c9f0214\" (UID: \"7abf0081-40ee-4a29-b5fe-3a8c4c9f0214\") " Jan 20 17:18:33 crc kubenswrapper[4558]: I0120 17:18:33.229106 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7abf0081-40ee-4a29-b5fe-3a8c4c9f0214-logs" (OuterVolumeSpecName: "logs") pod "7abf0081-40ee-4a29-b5fe-3a8c4c9f0214" (UID: "7abf0081-40ee-4a29-b5fe-3a8c4c9f0214"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:18:33 crc kubenswrapper[4558]: I0120 17:18:33.233218 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7abf0081-40ee-4a29-b5fe-3a8c4c9f0214-kube-api-access-rxhh2" (OuterVolumeSpecName: "kube-api-access-rxhh2") pod "7abf0081-40ee-4a29-b5fe-3a8c4c9f0214" (UID: "7abf0081-40ee-4a29-b5fe-3a8c4c9f0214"). InnerVolumeSpecName "kube-api-access-rxhh2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:18:33 crc kubenswrapper[4558]: I0120 17:18:33.236421 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7abf0081-40ee-4a29-b5fe-3a8c4c9f0214-scripts" (OuterVolumeSpecName: "scripts") pod "7abf0081-40ee-4a29-b5fe-3a8c4c9f0214" (UID: "7abf0081-40ee-4a29-b5fe-3a8c4c9f0214"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:18:33 crc kubenswrapper[4558]: I0120 17:18:33.253185 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7abf0081-40ee-4a29-b5fe-3a8c4c9f0214-config-data" (OuterVolumeSpecName: "config-data") pod "7abf0081-40ee-4a29-b5fe-3a8c4c9f0214" (UID: "7abf0081-40ee-4a29-b5fe-3a8c4c9f0214"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:18:33 crc kubenswrapper[4558]: I0120 17:18:33.280935 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7abf0081-40ee-4a29-b5fe-3a8c4c9f0214-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7abf0081-40ee-4a29-b5fe-3a8c4c9f0214" (UID: "7abf0081-40ee-4a29-b5fe-3a8c4c9f0214"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:18:33 crc kubenswrapper[4558]: I0120 17:18:33.330037 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qb6k9\" (UniqueName: \"kubernetes.io/projected/ef3b4518-9c22-4046-9053-142864162a50-kube-api-access-qb6k9\") pod \"ef3b4518-9c22-4046-9053-142864162a50\" (UID: \"ef3b4518-9c22-4046-9053-142864162a50\") " Jan 20 17:18:33 crc kubenswrapper[4558]: I0120 17:18:33.330230 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pmqlj\" (UniqueName: \"kubernetes.io/projected/c720d172-36bf-467b-a900-dec2dbc43d3e-kube-api-access-pmqlj\") pod \"c720d172-36bf-467b-a900-dec2dbc43d3e\" (UID: \"c720d172-36bf-467b-a900-dec2dbc43d3e\") " Jan 20 17:18:33 crc kubenswrapper[4558]: I0120 17:18:33.330274 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/c720d172-36bf-467b-a900-dec2dbc43d3e-db-sync-config-data\") pod \"c720d172-36bf-467b-a900-dec2dbc43d3e\" (UID: \"c720d172-36bf-467b-a900-dec2dbc43d3e\") " Jan 20 17:18:33 crc kubenswrapper[4558]: I0120 17:18:33.330303 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c720d172-36bf-467b-a900-dec2dbc43d3e-combined-ca-bundle\") pod \"c720d172-36bf-467b-a900-dec2dbc43d3e\" (UID: \"c720d172-36bf-467b-a900-dec2dbc43d3e\") " Jan 20 17:18:33 crc kubenswrapper[4558]: I0120 17:18:33.330359 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ef3b4518-9c22-4046-9053-142864162a50-utilities\") pod \"ef3b4518-9c22-4046-9053-142864162a50\" (UID: \"ef3b4518-9c22-4046-9053-142864162a50\") " Jan 20 17:18:33 crc kubenswrapper[4558]: I0120 17:18:33.330480 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ef3b4518-9c22-4046-9053-142864162a50-catalog-content\") pod \"ef3b4518-9c22-4046-9053-142864162a50\" (UID: \"ef3b4518-9c22-4046-9053-142864162a50\") " Jan 20 17:18:33 crc kubenswrapper[4558]: I0120 17:18:33.331193 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7abf0081-40ee-4a29-b5fe-3a8c4c9f0214-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:18:33 crc kubenswrapper[4558]: I0120 17:18:33.331210 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rxhh2\" (UniqueName: \"kubernetes.io/projected/7abf0081-40ee-4a29-b5fe-3a8c4c9f0214-kube-api-access-rxhh2\") on node \"crc\" DevicePath \"\"" Jan 20 17:18:33 crc kubenswrapper[4558]: I0120 17:18:33.331221 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7abf0081-40ee-4a29-b5fe-3a8c4c9f0214-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:18:33 crc kubenswrapper[4558]: I0120 17:18:33.331231 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7abf0081-40ee-4a29-b5fe-3a8c4c9f0214-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:18:33 crc kubenswrapper[4558]: I0120 17:18:33.331242 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7abf0081-40ee-4a29-b5fe-3a8c4c9f0214-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:18:33 crc kubenswrapper[4558]: I0120 17:18:33.331894 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ef3b4518-9c22-4046-9053-142864162a50-utilities" (OuterVolumeSpecName: "utilities") pod "ef3b4518-9c22-4046-9053-142864162a50" (UID: "ef3b4518-9c22-4046-9053-142864162a50"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:18:33 crc kubenswrapper[4558]: I0120 17:18:33.334311 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ef3b4518-9c22-4046-9053-142864162a50-kube-api-access-qb6k9" (OuterVolumeSpecName: "kube-api-access-qb6k9") pod "ef3b4518-9c22-4046-9053-142864162a50" (UID: "ef3b4518-9c22-4046-9053-142864162a50"). InnerVolumeSpecName "kube-api-access-qb6k9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:18:33 crc kubenswrapper[4558]: I0120 17:18:33.334655 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c720d172-36bf-467b-a900-dec2dbc43d3e-kube-api-access-pmqlj" (OuterVolumeSpecName: "kube-api-access-pmqlj") pod "c720d172-36bf-467b-a900-dec2dbc43d3e" (UID: "c720d172-36bf-467b-a900-dec2dbc43d3e"). InnerVolumeSpecName "kube-api-access-pmqlj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:18:33 crc kubenswrapper[4558]: I0120 17:18:33.336354 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c720d172-36bf-467b-a900-dec2dbc43d3e-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "c720d172-36bf-467b-a900-dec2dbc43d3e" (UID: "c720d172-36bf-467b-a900-dec2dbc43d3e"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:18:33 crc kubenswrapper[4558]: I0120 17:18:33.361588 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c720d172-36bf-467b-a900-dec2dbc43d3e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c720d172-36bf-467b-a900-dec2dbc43d3e" (UID: "c720d172-36bf-467b-a900-dec2dbc43d3e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:18:33 crc kubenswrapper[4558]: I0120 17:18:33.395646 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ef3b4518-9c22-4046-9053-142864162a50-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ef3b4518-9c22-4046-9053-142864162a50" (UID: "ef3b4518-9c22-4046-9053-142864162a50"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:18:33 crc kubenswrapper[4558]: I0120 17:18:33.433961 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qb6k9\" (UniqueName: \"kubernetes.io/projected/ef3b4518-9c22-4046-9053-142864162a50-kube-api-access-qb6k9\") on node \"crc\" DevicePath \"\"" Jan 20 17:18:33 crc kubenswrapper[4558]: I0120 17:18:33.433996 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pmqlj\" (UniqueName: \"kubernetes.io/projected/c720d172-36bf-467b-a900-dec2dbc43d3e-kube-api-access-pmqlj\") on node \"crc\" DevicePath \"\"" Jan 20 17:18:33 crc kubenswrapper[4558]: I0120 17:18:33.434009 4558 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/c720d172-36bf-467b-a900-dec2dbc43d3e-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:18:33 crc kubenswrapper[4558]: I0120 17:18:33.434020 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c720d172-36bf-467b-a900-dec2dbc43d3e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:18:33 crc kubenswrapper[4558]: I0120 17:18:33.434031 4558 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ef3b4518-9c22-4046-9053-142864162a50-utilities\") on node \"crc\" DevicePath \"\"" Jan 20 17:18:33 crc kubenswrapper[4558]: I0120 17:18:33.434041 4558 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ef3b4518-9c22-4046-9053-142864162a50-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 20 17:18:33 crc kubenswrapper[4558]: I0120 17:18:33.571149 4558 generic.go:334] "Generic (PLEG): container finished" podID="0b08b6e3-03a5-4dfa-aba3-2847529be269" containerID="e7f3fec701ac7c97a615fd06b3109321d456b072a7460d87900d9cdedcaa35e1" exitCode=0 Jan 20 17:18:33 crc kubenswrapper[4558]: I0120 17:18:33.571267 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-bootstrap-dgphh" event={"ID":"0b08b6e3-03a5-4dfa-aba3-2847529be269","Type":"ContainerDied","Data":"e7f3fec701ac7c97a615fd06b3109321d456b072a7460d87900d9cdedcaa35e1"} Jan 20 17:18:33 crc kubenswrapper[4558]: I0120 17:18:33.579571 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-db-sync-dm58n" Jan 20 17:18:33 crc kubenswrapper[4558]: I0120 17:18:33.579557 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-db-sync-dm58n" event={"ID":"7abf0081-40ee-4a29-b5fe-3a8c4c9f0214","Type":"ContainerDied","Data":"f11ba6332c6aeee803c7373afa780d9d3b5327cddb76f21d013f4075a450ca99"} Jan 20 17:18:33 crc kubenswrapper[4558]: I0120 17:18:33.579753 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f11ba6332c6aeee803c7373afa780d9d3b5327cddb76f21d013f4075a450ca99" Jan 20 17:18:33 crc kubenswrapper[4558]: I0120 17:18:33.581554 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-db-sync-5vv4z" event={"ID":"c720d172-36bf-467b-a900-dec2dbc43d3e","Type":"ContainerDied","Data":"785077f032f9a4874f05a3eeef23dd89e4fdb8ecaaab8f5f3235c42468bc09ed"} Jan 20 17:18:33 crc kubenswrapper[4558]: I0120 17:18:33.581595 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="785077f032f9a4874f05a3eeef23dd89e4fdb8ecaaab8f5f3235c42468bc09ed" Jan 20 17:18:33 crc kubenswrapper[4558]: I0120 17:18:33.581617 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-db-sync-5vv4z" Jan 20 17:18:33 crc kubenswrapper[4558]: I0120 17:18:33.584059 4558 generic.go:334] "Generic (PLEG): container finished" podID="ef3b4518-9c22-4046-9053-142864162a50" containerID="68c705646962bc729eec8442fdff5e74884e6b5af688d6aaac7aedd41c048450" exitCode=0 Jan 20 17:18:33 crc kubenswrapper[4558]: I0120 17:18:33.584102 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hr4ld" event={"ID":"ef3b4518-9c22-4046-9053-142864162a50","Type":"ContainerDied","Data":"68c705646962bc729eec8442fdff5e74884e6b5af688d6aaac7aedd41c048450"} Jan 20 17:18:33 crc kubenswrapper[4558]: I0120 17:18:33.584120 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hr4ld" event={"ID":"ef3b4518-9c22-4046-9053-142864162a50","Type":"ContainerDied","Data":"9084a81207b12db62b1cc276844cd49b2213c778988bdd2ca9f180a10c54ae34"} Jan 20 17:18:33 crc kubenswrapper[4558]: I0120 17:18:33.584139 4558 scope.go:117] "RemoveContainer" containerID="68c705646962bc729eec8442fdff5e74884e6b5af688d6aaac7aedd41c048450" Jan 20 17:18:33 crc kubenswrapper[4558]: I0120 17:18:33.584315 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hr4ld" Jan 20 17:18:33 crc kubenswrapper[4558]: I0120 17:18:33.664677 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:18:33 crc kubenswrapper[4558]: I0120 17:18:33.670650 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:18:33 crc kubenswrapper[4558]: I0120 17:18:33.732098 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/placement-74b74897d6-9wxhf"] Jan 20 17:18:33 crc kubenswrapper[4558]: E0120 17:18:33.732638 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7abf0081-40ee-4a29-b5fe-3a8c4c9f0214" containerName="placement-db-sync" Jan 20 17:18:33 crc kubenswrapper[4558]: I0120 17:18:33.732710 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="7abf0081-40ee-4a29-b5fe-3a8c4c9f0214" containerName="placement-db-sync" Jan 20 17:18:33 crc kubenswrapper[4558]: E0120 17:18:33.732778 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ef3b4518-9c22-4046-9053-142864162a50" containerName="extract-utilities" Jan 20 17:18:33 crc kubenswrapper[4558]: I0120 17:18:33.732822 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ef3b4518-9c22-4046-9053-142864162a50" containerName="extract-utilities" Jan 20 17:18:33 crc kubenswrapper[4558]: E0120 17:18:33.732879 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c720d172-36bf-467b-a900-dec2dbc43d3e" containerName="barbican-db-sync" Jan 20 17:18:33 crc kubenswrapper[4558]: I0120 17:18:33.732928 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="c720d172-36bf-467b-a900-dec2dbc43d3e" containerName="barbican-db-sync" Jan 20 17:18:33 crc kubenswrapper[4558]: E0120 17:18:33.732981 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ef3b4518-9c22-4046-9053-142864162a50" containerName="registry-server" Jan 20 17:18:33 crc kubenswrapper[4558]: I0120 17:18:33.733021 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ef3b4518-9c22-4046-9053-142864162a50" containerName="registry-server" Jan 20 17:18:33 crc kubenswrapper[4558]: E0120 17:18:33.733076 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ef3b4518-9c22-4046-9053-142864162a50" containerName="extract-content" Jan 20 17:18:33 crc kubenswrapper[4558]: I0120 17:18:33.733124 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ef3b4518-9c22-4046-9053-142864162a50" containerName="extract-content" Jan 20 17:18:33 crc kubenswrapper[4558]: I0120 17:18:33.733376 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="c720d172-36bf-467b-a900-dec2dbc43d3e" containerName="barbican-db-sync" Jan 20 17:18:33 crc kubenswrapper[4558]: I0120 17:18:33.733442 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="ef3b4518-9c22-4046-9053-142864162a50" containerName="registry-server" Jan 20 17:18:33 crc kubenswrapper[4558]: I0120 17:18:33.733499 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="7abf0081-40ee-4a29-b5fe-3a8c4c9f0214" containerName="placement-db-sync" Jan 20 17:18:33 crc kubenswrapper[4558]: I0120 17:18:33.736628 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-74b74897d6-9wxhf" Jan 20 17:18:33 crc kubenswrapper[4558]: I0120 17:18:33.748865 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"placement-scripts" Jan 20 17:18:33 crc kubenswrapper[4558]: I0120 17:18:33.755250 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"placement-placement-dockercfg-tz8rh" Jan 20 17:18:33 crc kubenswrapper[4558]: I0120 17:18:33.765655 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"placement-config-data" Jan 20 17:18:33 crc kubenswrapper[4558]: I0120 17:18:33.774700 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-74b74897d6-9wxhf"] Jan 20 17:18:33 crc kubenswrapper[4558]: I0120 17:18:33.848058 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7adfb76f-c71c-4038-8b25-346511ef09d5-config-data\") pod \"placement-74b74897d6-9wxhf\" (UID: \"7adfb76f-c71c-4038-8b25-346511ef09d5\") " pod="openstack-kuttl-tests/placement-74b74897d6-9wxhf" Jan 20 17:18:33 crc kubenswrapper[4558]: I0120 17:18:33.848107 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7adfb76f-c71c-4038-8b25-346511ef09d5-combined-ca-bundle\") pod \"placement-74b74897d6-9wxhf\" (UID: \"7adfb76f-c71c-4038-8b25-346511ef09d5\") " pod="openstack-kuttl-tests/placement-74b74897d6-9wxhf" Jan 20 17:18:33 crc kubenswrapper[4558]: I0120 17:18:33.848141 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7adfb76f-c71c-4038-8b25-346511ef09d5-scripts\") pod \"placement-74b74897d6-9wxhf\" (UID: \"7adfb76f-c71c-4038-8b25-346511ef09d5\") " pod="openstack-kuttl-tests/placement-74b74897d6-9wxhf" Jan 20 17:18:33 crc kubenswrapper[4558]: I0120 17:18:33.848180 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4622k\" (UniqueName: \"kubernetes.io/projected/7adfb76f-c71c-4038-8b25-346511ef09d5-kube-api-access-4622k\") pod \"placement-74b74897d6-9wxhf\" (UID: \"7adfb76f-c71c-4038-8b25-346511ef09d5\") " pod="openstack-kuttl-tests/placement-74b74897d6-9wxhf" Jan 20 17:18:33 crc kubenswrapper[4558]: I0120 17:18:33.848217 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7adfb76f-c71c-4038-8b25-346511ef09d5-logs\") pod \"placement-74b74897d6-9wxhf\" (UID: \"7adfb76f-c71c-4038-8b25-346511ef09d5\") " pod="openstack-kuttl-tests/placement-74b74897d6-9wxhf" Jan 20 17:18:33 crc kubenswrapper[4558]: I0120 17:18:33.861672 4558 scope.go:117] "RemoveContainer" containerID="de632d948732dd5a4e3d90e35e90a8df58b9bbb6f52788fd2e8da246880843ad" Jan 20 17:18:33 crc kubenswrapper[4558]: I0120 17:18:33.868741 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/barbican-worker-795d8f7875-gfh24"] Jan 20 17:18:33 crc kubenswrapper[4558]: I0120 17:18:33.871201 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-worker-795d8f7875-gfh24" Jan 20 17:18:33 crc kubenswrapper[4558]: I0120 17:18:33.873602 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"barbican-barbican-dockercfg-jggcf" Jan 20 17:18:33 crc kubenswrapper[4558]: I0120 17:18:33.876476 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"barbican-worker-config-data" Jan 20 17:18:33 crc kubenswrapper[4558]: I0120 17:18:33.877513 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"barbican-config-data" Jan 20 17:18:33 crc kubenswrapper[4558]: I0120 17:18:33.895221 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-worker-795d8f7875-gfh24"] Jan 20 17:18:33 crc kubenswrapper[4558]: I0120 17:18:33.920285 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/barbican-keystone-listener-9b8896fdb-7mrlf"] Jan 20 17:18:33 crc kubenswrapper[4558]: I0120 17:18:33.928697 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-keystone-listener-9b8896fdb-7mrlf" Jan 20 17:18:33 crc kubenswrapper[4558]: I0120 17:18:33.933332 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"barbican-keystone-listener-config-data" Jan 20 17:18:33 crc kubenswrapper[4558]: I0120 17:18:33.945868 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-hr4ld"] Jan 20 17:18:33 crc kubenswrapper[4558]: I0120 17:18:33.952718 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7adfb76f-c71c-4038-8b25-346511ef09d5-scripts\") pod \"placement-74b74897d6-9wxhf\" (UID: \"7adfb76f-c71c-4038-8b25-346511ef09d5\") " pod="openstack-kuttl-tests/placement-74b74897d6-9wxhf" Jan 20 17:18:33 crc kubenswrapper[4558]: I0120 17:18:33.952770 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4622k\" (UniqueName: \"kubernetes.io/projected/7adfb76f-c71c-4038-8b25-346511ef09d5-kube-api-access-4622k\") pod \"placement-74b74897d6-9wxhf\" (UID: \"7adfb76f-c71c-4038-8b25-346511ef09d5\") " pod="openstack-kuttl-tests/placement-74b74897d6-9wxhf" Jan 20 17:18:33 crc kubenswrapper[4558]: I0120 17:18:33.952804 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7adfb76f-c71c-4038-8b25-346511ef09d5-logs\") pod \"placement-74b74897d6-9wxhf\" (UID: \"7adfb76f-c71c-4038-8b25-346511ef09d5\") " pod="openstack-kuttl-tests/placement-74b74897d6-9wxhf" Jan 20 17:18:33 crc kubenswrapper[4558]: I0120 17:18:33.952928 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7adfb76f-c71c-4038-8b25-346511ef09d5-config-data\") pod \"placement-74b74897d6-9wxhf\" (UID: \"7adfb76f-c71c-4038-8b25-346511ef09d5\") " pod="openstack-kuttl-tests/placement-74b74897d6-9wxhf" Jan 20 17:18:33 crc kubenswrapper[4558]: I0120 17:18:33.952954 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7adfb76f-c71c-4038-8b25-346511ef09d5-combined-ca-bundle\") pod \"placement-74b74897d6-9wxhf\" (UID: \"7adfb76f-c71c-4038-8b25-346511ef09d5\") " pod="openstack-kuttl-tests/placement-74b74897d6-9wxhf" Jan 20 17:18:33 crc kubenswrapper[4558]: I0120 17:18:33.953625 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7adfb76f-c71c-4038-8b25-346511ef09d5-logs\") pod \"placement-74b74897d6-9wxhf\" (UID: \"7adfb76f-c71c-4038-8b25-346511ef09d5\") " pod="openstack-kuttl-tests/placement-74b74897d6-9wxhf" Jan 20 17:18:33 crc kubenswrapper[4558]: I0120 17:18:33.960285 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7adfb76f-c71c-4038-8b25-346511ef09d5-combined-ca-bundle\") pod \"placement-74b74897d6-9wxhf\" (UID: \"7adfb76f-c71c-4038-8b25-346511ef09d5\") " pod="openstack-kuttl-tests/placement-74b74897d6-9wxhf" Jan 20 17:18:33 crc kubenswrapper[4558]: I0120 17:18:33.960511 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7adfb76f-c71c-4038-8b25-346511ef09d5-config-data\") pod \"placement-74b74897d6-9wxhf\" (UID: \"7adfb76f-c71c-4038-8b25-346511ef09d5\") " pod="openstack-kuttl-tests/placement-74b74897d6-9wxhf" Jan 20 17:18:33 crc kubenswrapper[4558]: I0120 17:18:33.961429 4558 scope.go:117] "RemoveContainer" containerID="218e508ad5a5c1a35f4f75204702c38a6c14be8d230cdae137643d62efe67558" Jan 20 17:18:33 crc kubenswrapper[4558]: I0120 17:18:33.964817 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7adfb76f-c71c-4038-8b25-346511ef09d5-scripts\") pod \"placement-74b74897d6-9wxhf\" (UID: \"7adfb76f-c71c-4038-8b25-346511ef09d5\") " pod="openstack-kuttl-tests/placement-74b74897d6-9wxhf" Jan 20 17:18:33 crc kubenswrapper[4558]: I0120 17:18:33.968124 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4622k\" (UniqueName: \"kubernetes.io/projected/7adfb76f-c71c-4038-8b25-346511ef09d5-kube-api-access-4622k\") pod \"placement-74b74897d6-9wxhf\" (UID: \"7adfb76f-c71c-4038-8b25-346511ef09d5\") " pod="openstack-kuttl-tests/placement-74b74897d6-9wxhf" Jan 20 17:18:33 crc kubenswrapper[4558]: I0120 17:18:33.970380 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-hr4ld"] Jan 20 17:18:33 crc kubenswrapper[4558]: I0120 17:18:33.976586 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-keystone-listener-9b8896fdb-7mrlf"] Jan 20 17:18:34 crc kubenswrapper[4558]: I0120 17:18:34.036305 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/barbican-api-678474f9b-4hxj7"] Jan 20 17:18:34 crc kubenswrapper[4558]: I0120 17:18:34.042264 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-678474f9b-4hxj7" Jan 20 17:18:34 crc kubenswrapper[4558]: I0120 17:18:34.044849 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"barbican-api-config-data" Jan 20 17:18:34 crc kubenswrapper[4558]: I0120 17:18:34.048961 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-api-678474f9b-4hxj7"] Jan 20 17:18:34 crc kubenswrapper[4558]: I0120 17:18:34.056885 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2f8ae24b-258e-4d32-b312-99f5015f83d6-config-data-custom\") pod \"barbican-worker-795d8f7875-gfh24\" (UID: \"2f8ae24b-258e-4d32-b312-99f5015f83d6\") " pod="openstack-kuttl-tests/barbican-worker-795d8f7875-gfh24" Jan 20 17:18:34 crc kubenswrapper[4558]: I0120 17:18:34.056967 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a948066b-fa95-4fbb-83e6-6f26f6c76652-combined-ca-bundle\") pod \"barbican-keystone-listener-9b8896fdb-7mrlf\" (UID: \"a948066b-fa95-4fbb-83e6-6f26f6c76652\") " pod="openstack-kuttl-tests/barbican-keystone-listener-9b8896fdb-7mrlf" Jan 20 17:18:34 crc kubenswrapper[4558]: I0120 17:18:34.057013 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-998h7\" (UniqueName: \"kubernetes.io/projected/a948066b-fa95-4fbb-83e6-6f26f6c76652-kube-api-access-998h7\") pod \"barbican-keystone-listener-9b8896fdb-7mrlf\" (UID: \"a948066b-fa95-4fbb-83e6-6f26f6c76652\") " pod="openstack-kuttl-tests/barbican-keystone-listener-9b8896fdb-7mrlf" Jan 20 17:18:34 crc kubenswrapper[4558]: I0120 17:18:34.057089 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2f8ae24b-258e-4d32-b312-99f5015f83d6-combined-ca-bundle\") pod \"barbican-worker-795d8f7875-gfh24\" (UID: \"2f8ae24b-258e-4d32-b312-99f5015f83d6\") " pod="openstack-kuttl-tests/barbican-worker-795d8f7875-gfh24" Jan 20 17:18:34 crc kubenswrapper[4558]: I0120 17:18:34.057122 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2f8ae24b-258e-4d32-b312-99f5015f83d6-logs\") pod \"barbican-worker-795d8f7875-gfh24\" (UID: \"2f8ae24b-258e-4d32-b312-99f5015f83d6\") " pod="openstack-kuttl-tests/barbican-worker-795d8f7875-gfh24" Jan 20 17:18:34 crc kubenswrapper[4558]: I0120 17:18:34.057149 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xr484\" (UniqueName: \"kubernetes.io/projected/2f8ae24b-258e-4d32-b312-99f5015f83d6-kube-api-access-xr484\") pod \"barbican-worker-795d8f7875-gfh24\" (UID: \"2f8ae24b-258e-4d32-b312-99f5015f83d6\") " pod="openstack-kuttl-tests/barbican-worker-795d8f7875-gfh24" Jan 20 17:18:34 crc kubenswrapper[4558]: I0120 17:18:34.057215 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2f8ae24b-258e-4d32-b312-99f5015f83d6-config-data\") pod \"barbican-worker-795d8f7875-gfh24\" (UID: \"2f8ae24b-258e-4d32-b312-99f5015f83d6\") " pod="openstack-kuttl-tests/barbican-worker-795d8f7875-gfh24" Jan 20 17:18:34 crc kubenswrapper[4558]: I0120 17:18:34.057252 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a948066b-fa95-4fbb-83e6-6f26f6c76652-config-data-custom\") pod \"barbican-keystone-listener-9b8896fdb-7mrlf\" (UID: \"a948066b-fa95-4fbb-83e6-6f26f6c76652\") " pod="openstack-kuttl-tests/barbican-keystone-listener-9b8896fdb-7mrlf" Jan 20 17:18:34 crc kubenswrapper[4558]: I0120 17:18:34.059567 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a948066b-fa95-4fbb-83e6-6f26f6c76652-config-data\") pod \"barbican-keystone-listener-9b8896fdb-7mrlf\" (UID: \"a948066b-fa95-4fbb-83e6-6f26f6c76652\") " pod="openstack-kuttl-tests/barbican-keystone-listener-9b8896fdb-7mrlf" Jan 20 17:18:34 crc kubenswrapper[4558]: I0120 17:18:34.059696 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a948066b-fa95-4fbb-83e6-6f26f6c76652-logs\") pod \"barbican-keystone-listener-9b8896fdb-7mrlf\" (UID: \"a948066b-fa95-4fbb-83e6-6f26f6c76652\") " pod="openstack-kuttl-tests/barbican-keystone-listener-9b8896fdb-7mrlf" Jan 20 17:18:34 crc kubenswrapper[4558]: I0120 17:18:34.072958 4558 scope.go:117] "RemoveContainer" containerID="68c705646962bc729eec8442fdff5e74884e6b5af688d6aaac7aedd41c048450" Jan 20 17:18:34 crc kubenswrapper[4558]: E0120 17:18:34.076857 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"68c705646962bc729eec8442fdff5e74884e6b5af688d6aaac7aedd41c048450\": container with ID starting with 68c705646962bc729eec8442fdff5e74884e6b5af688d6aaac7aedd41c048450 not found: ID does not exist" containerID="68c705646962bc729eec8442fdff5e74884e6b5af688d6aaac7aedd41c048450" Jan 20 17:18:34 crc kubenswrapper[4558]: I0120 17:18:34.076912 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"68c705646962bc729eec8442fdff5e74884e6b5af688d6aaac7aedd41c048450"} err="failed to get container status \"68c705646962bc729eec8442fdff5e74884e6b5af688d6aaac7aedd41c048450\": rpc error: code = NotFound desc = could not find container \"68c705646962bc729eec8442fdff5e74884e6b5af688d6aaac7aedd41c048450\": container with ID starting with 68c705646962bc729eec8442fdff5e74884e6b5af688d6aaac7aedd41c048450 not found: ID does not exist" Jan 20 17:18:34 crc kubenswrapper[4558]: I0120 17:18:34.076936 4558 scope.go:117] "RemoveContainer" containerID="de632d948732dd5a4e3d90e35e90a8df58b9bbb6f52788fd2e8da246880843ad" Jan 20 17:18:34 crc kubenswrapper[4558]: E0120 17:18:34.077374 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"de632d948732dd5a4e3d90e35e90a8df58b9bbb6f52788fd2e8da246880843ad\": container with ID starting with de632d948732dd5a4e3d90e35e90a8df58b9bbb6f52788fd2e8da246880843ad not found: ID does not exist" containerID="de632d948732dd5a4e3d90e35e90a8df58b9bbb6f52788fd2e8da246880843ad" Jan 20 17:18:34 crc kubenswrapper[4558]: I0120 17:18:34.077417 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"de632d948732dd5a4e3d90e35e90a8df58b9bbb6f52788fd2e8da246880843ad"} err="failed to get container status \"de632d948732dd5a4e3d90e35e90a8df58b9bbb6f52788fd2e8da246880843ad\": rpc error: code = NotFound desc = could not find container \"de632d948732dd5a4e3d90e35e90a8df58b9bbb6f52788fd2e8da246880843ad\": container with ID starting with de632d948732dd5a4e3d90e35e90a8df58b9bbb6f52788fd2e8da246880843ad not found: ID does not exist" Jan 20 17:18:34 crc kubenswrapper[4558]: I0120 17:18:34.077434 4558 scope.go:117] "RemoveContainer" containerID="218e508ad5a5c1a35f4f75204702c38a6c14be8d230cdae137643d62efe67558" Jan 20 17:18:34 crc kubenswrapper[4558]: E0120 17:18:34.077660 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"218e508ad5a5c1a35f4f75204702c38a6c14be8d230cdae137643d62efe67558\": container with ID starting with 218e508ad5a5c1a35f4f75204702c38a6c14be8d230cdae137643d62efe67558 not found: ID does not exist" containerID="218e508ad5a5c1a35f4f75204702c38a6c14be8d230cdae137643d62efe67558" Jan 20 17:18:34 crc kubenswrapper[4558]: I0120 17:18:34.077684 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"218e508ad5a5c1a35f4f75204702c38a6c14be8d230cdae137643d62efe67558"} err="failed to get container status \"218e508ad5a5c1a35f4f75204702c38a6c14be8d230cdae137643d62efe67558\": rpc error: code = NotFound desc = could not find container \"218e508ad5a5c1a35f4f75204702c38a6c14be8d230cdae137643d62efe67558\": container with ID starting with 218e508ad5a5c1a35f4f75204702c38a6c14be8d230cdae137643d62efe67558 not found: ID does not exist" Jan 20 17:18:34 crc kubenswrapper[4558]: I0120 17:18:34.128496 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-74b74897d6-9wxhf" Jan 20 17:18:34 crc kubenswrapper[4558]: I0120 17:18:34.165899 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2f8ae24b-258e-4d32-b312-99f5015f83d6-combined-ca-bundle\") pod \"barbican-worker-795d8f7875-gfh24\" (UID: \"2f8ae24b-258e-4d32-b312-99f5015f83d6\") " pod="openstack-kuttl-tests/barbican-worker-795d8f7875-gfh24" Jan 20 17:18:34 crc kubenswrapper[4558]: I0120 17:18:34.165966 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2f8ae24b-258e-4d32-b312-99f5015f83d6-logs\") pod \"barbican-worker-795d8f7875-gfh24\" (UID: \"2f8ae24b-258e-4d32-b312-99f5015f83d6\") " pod="openstack-kuttl-tests/barbican-worker-795d8f7875-gfh24" Jan 20 17:18:34 crc kubenswrapper[4558]: I0120 17:18:34.166015 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/47c6e50e-b277-48d8-928d-f0d571acf238-config-data-custom\") pod \"barbican-api-678474f9b-4hxj7\" (UID: \"47c6e50e-b277-48d8-928d-f0d571acf238\") " pod="openstack-kuttl-tests/barbican-api-678474f9b-4hxj7" Jan 20 17:18:34 crc kubenswrapper[4558]: I0120 17:18:34.166043 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xr484\" (UniqueName: \"kubernetes.io/projected/2f8ae24b-258e-4d32-b312-99f5015f83d6-kube-api-access-xr484\") pod \"barbican-worker-795d8f7875-gfh24\" (UID: \"2f8ae24b-258e-4d32-b312-99f5015f83d6\") " pod="openstack-kuttl-tests/barbican-worker-795d8f7875-gfh24" Jan 20 17:18:34 crc kubenswrapper[4558]: I0120 17:18:34.166637 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2f8ae24b-258e-4d32-b312-99f5015f83d6-logs\") pod \"barbican-worker-795d8f7875-gfh24\" (UID: \"2f8ae24b-258e-4d32-b312-99f5015f83d6\") " pod="openstack-kuttl-tests/barbican-worker-795d8f7875-gfh24" Jan 20 17:18:34 crc kubenswrapper[4558]: I0120 17:18:34.172072 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2f8ae24b-258e-4d32-b312-99f5015f83d6-config-data\") pod \"barbican-worker-795d8f7875-gfh24\" (UID: \"2f8ae24b-258e-4d32-b312-99f5015f83d6\") " pod="openstack-kuttl-tests/barbican-worker-795d8f7875-gfh24" Jan 20 17:18:34 crc kubenswrapper[4558]: I0120 17:18:34.172156 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fg24b\" (UniqueName: \"kubernetes.io/projected/47c6e50e-b277-48d8-928d-f0d571acf238-kube-api-access-fg24b\") pod \"barbican-api-678474f9b-4hxj7\" (UID: \"47c6e50e-b277-48d8-928d-f0d571acf238\") " pod="openstack-kuttl-tests/barbican-api-678474f9b-4hxj7" Jan 20 17:18:34 crc kubenswrapper[4558]: I0120 17:18:34.172213 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a948066b-fa95-4fbb-83e6-6f26f6c76652-config-data-custom\") pod \"barbican-keystone-listener-9b8896fdb-7mrlf\" (UID: \"a948066b-fa95-4fbb-83e6-6f26f6c76652\") " pod="openstack-kuttl-tests/barbican-keystone-listener-9b8896fdb-7mrlf" Jan 20 17:18:34 crc kubenswrapper[4558]: I0120 17:18:34.172243 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/47c6e50e-b277-48d8-928d-f0d571acf238-config-data\") pod \"barbican-api-678474f9b-4hxj7\" (UID: \"47c6e50e-b277-48d8-928d-f0d571acf238\") " pod="openstack-kuttl-tests/barbican-api-678474f9b-4hxj7" Jan 20 17:18:34 crc kubenswrapper[4558]: I0120 17:18:34.172282 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a948066b-fa95-4fbb-83e6-6f26f6c76652-config-data\") pod \"barbican-keystone-listener-9b8896fdb-7mrlf\" (UID: \"a948066b-fa95-4fbb-83e6-6f26f6c76652\") " pod="openstack-kuttl-tests/barbican-keystone-listener-9b8896fdb-7mrlf" Jan 20 17:18:34 crc kubenswrapper[4558]: I0120 17:18:34.172342 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a948066b-fa95-4fbb-83e6-6f26f6c76652-logs\") pod \"barbican-keystone-listener-9b8896fdb-7mrlf\" (UID: \"a948066b-fa95-4fbb-83e6-6f26f6c76652\") " pod="openstack-kuttl-tests/barbican-keystone-listener-9b8896fdb-7mrlf" Jan 20 17:18:34 crc kubenswrapper[4558]: I0120 17:18:34.172440 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/47c6e50e-b277-48d8-928d-f0d571acf238-logs\") pod \"barbican-api-678474f9b-4hxj7\" (UID: \"47c6e50e-b277-48d8-928d-f0d571acf238\") " pod="openstack-kuttl-tests/barbican-api-678474f9b-4hxj7" Jan 20 17:18:34 crc kubenswrapper[4558]: I0120 17:18:34.172609 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2f8ae24b-258e-4d32-b312-99f5015f83d6-config-data-custom\") pod \"barbican-worker-795d8f7875-gfh24\" (UID: \"2f8ae24b-258e-4d32-b312-99f5015f83d6\") " pod="openstack-kuttl-tests/barbican-worker-795d8f7875-gfh24" Jan 20 17:18:34 crc kubenswrapper[4558]: I0120 17:18:34.172709 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a948066b-fa95-4fbb-83e6-6f26f6c76652-combined-ca-bundle\") pod \"barbican-keystone-listener-9b8896fdb-7mrlf\" (UID: \"a948066b-fa95-4fbb-83e6-6f26f6c76652\") " pod="openstack-kuttl-tests/barbican-keystone-listener-9b8896fdb-7mrlf" Jan 20 17:18:34 crc kubenswrapper[4558]: I0120 17:18:34.172785 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-998h7\" (UniqueName: \"kubernetes.io/projected/a948066b-fa95-4fbb-83e6-6f26f6c76652-kube-api-access-998h7\") pod \"barbican-keystone-listener-9b8896fdb-7mrlf\" (UID: \"a948066b-fa95-4fbb-83e6-6f26f6c76652\") " pod="openstack-kuttl-tests/barbican-keystone-listener-9b8896fdb-7mrlf" Jan 20 17:18:34 crc kubenswrapper[4558]: I0120 17:18:34.172893 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/47c6e50e-b277-48d8-928d-f0d571acf238-combined-ca-bundle\") pod \"barbican-api-678474f9b-4hxj7\" (UID: \"47c6e50e-b277-48d8-928d-f0d571acf238\") " pod="openstack-kuttl-tests/barbican-api-678474f9b-4hxj7" Jan 20 17:18:34 crc kubenswrapper[4558]: I0120 17:18:34.172924 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2f8ae24b-258e-4d32-b312-99f5015f83d6-combined-ca-bundle\") pod \"barbican-worker-795d8f7875-gfh24\" (UID: \"2f8ae24b-258e-4d32-b312-99f5015f83d6\") " pod="openstack-kuttl-tests/barbican-worker-795d8f7875-gfh24" Jan 20 17:18:34 crc kubenswrapper[4558]: I0120 17:18:34.173537 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a948066b-fa95-4fbb-83e6-6f26f6c76652-logs\") pod \"barbican-keystone-listener-9b8896fdb-7mrlf\" (UID: \"a948066b-fa95-4fbb-83e6-6f26f6c76652\") " pod="openstack-kuttl-tests/barbican-keystone-listener-9b8896fdb-7mrlf" Jan 20 17:18:34 crc kubenswrapper[4558]: I0120 17:18:34.178528 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2f8ae24b-258e-4d32-b312-99f5015f83d6-config-data\") pod \"barbican-worker-795d8f7875-gfh24\" (UID: \"2f8ae24b-258e-4d32-b312-99f5015f83d6\") " pod="openstack-kuttl-tests/barbican-worker-795d8f7875-gfh24" Jan 20 17:18:34 crc kubenswrapper[4558]: I0120 17:18:34.178692 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a948066b-fa95-4fbb-83e6-6f26f6c76652-config-data-custom\") pod \"barbican-keystone-listener-9b8896fdb-7mrlf\" (UID: \"a948066b-fa95-4fbb-83e6-6f26f6c76652\") " pod="openstack-kuttl-tests/barbican-keystone-listener-9b8896fdb-7mrlf" Jan 20 17:18:34 crc kubenswrapper[4558]: I0120 17:18:34.182008 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2f8ae24b-258e-4d32-b312-99f5015f83d6-config-data-custom\") pod \"barbican-worker-795d8f7875-gfh24\" (UID: \"2f8ae24b-258e-4d32-b312-99f5015f83d6\") " pod="openstack-kuttl-tests/barbican-worker-795d8f7875-gfh24" Jan 20 17:18:34 crc kubenswrapper[4558]: I0120 17:18:34.182803 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xr484\" (UniqueName: \"kubernetes.io/projected/2f8ae24b-258e-4d32-b312-99f5015f83d6-kube-api-access-xr484\") pod \"barbican-worker-795d8f7875-gfh24\" (UID: \"2f8ae24b-258e-4d32-b312-99f5015f83d6\") " pod="openstack-kuttl-tests/barbican-worker-795d8f7875-gfh24" Jan 20 17:18:34 crc kubenswrapper[4558]: I0120 17:18:34.184142 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a948066b-fa95-4fbb-83e6-6f26f6c76652-config-data\") pod \"barbican-keystone-listener-9b8896fdb-7mrlf\" (UID: \"a948066b-fa95-4fbb-83e6-6f26f6c76652\") " pod="openstack-kuttl-tests/barbican-keystone-listener-9b8896fdb-7mrlf" Jan 20 17:18:34 crc kubenswrapper[4558]: I0120 17:18:34.185043 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a948066b-fa95-4fbb-83e6-6f26f6c76652-combined-ca-bundle\") pod \"barbican-keystone-listener-9b8896fdb-7mrlf\" (UID: \"a948066b-fa95-4fbb-83e6-6f26f6c76652\") " pod="openstack-kuttl-tests/barbican-keystone-listener-9b8896fdb-7mrlf" Jan 20 17:18:34 crc kubenswrapper[4558]: I0120 17:18:34.192807 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/placement-6b7cbc9658-z4t27"] Jan 20 17:18:34 crc kubenswrapper[4558]: I0120 17:18:34.194672 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-998h7\" (UniqueName: \"kubernetes.io/projected/a948066b-fa95-4fbb-83e6-6f26f6c76652-kube-api-access-998h7\") pod \"barbican-keystone-listener-9b8896fdb-7mrlf\" (UID: \"a948066b-fa95-4fbb-83e6-6f26f6c76652\") " pod="openstack-kuttl-tests/barbican-keystone-listener-9b8896fdb-7mrlf" Jan 20 17:18:34 crc kubenswrapper[4558]: I0120 17:18:34.197654 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-6b7cbc9658-z4t27" Jan 20 17:18:34 crc kubenswrapper[4558]: I0120 17:18:34.212380 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-placement-internal-svc" Jan 20 17:18:34 crc kubenswrapper[4558]: I0120 17:18:34.212529 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-placement-public-svc" Jan 20 17:18:34 crc kubenswrapper[4558]: I0120 17:18:34.215193 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-worker-795d8f7875-gfh24" Jan 20 17:18:34 crc kubenswrapper[4558]: I0120 17:18:34.260841 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-6b7cbc9658-z4t27"] Jan 20 17:18:34 crc kubenswrapper[4558]: I0120 17:18:34.275510 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/47c6e50e-b277-48d8-928d-f0d571acf238-logs\") pod \"barbican-api-678474f9b-4hxj7\" (UID: \"47c6e50e-b277-48d8-928d-f0d571acf238\") " pod="openstack-kuttl-tests/barbican-api-678474f9b-4hxj7" Jan 20 17:18:34 crc kubenswrapper[4558]: I0120 17:18:34.275917 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/47c6e50e-b277-48d8-928d-f0d571acf238-logs\") pod \"barbican-api-678474f9b-4hxj7\" (UID: \"47c6e50e-b277-48d8-928d-f0d571acf238\") " pod="openstack-kuttl-tests/barbican-api-678474f9b-4hxj7" Jan 20 17:18:34 crc kubenswrapper[4558]: I0120 17:18:34.275951 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/47c6e50e-b277-48d8-928d-f0d571acf238-combined-ca-bundle\") pod \"barbican-api-678474f9b-4hxj7\" (UID: \"47c6e50e-b277-48d8-928d-f0d571acf238\") " pod="openstack-kuttl-tests/barbican-api-678474f9b-4hxj7" Jan 20 17:18:34 crc kubenswrapper[4558]: I0120 17:18:34.276139 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/47c6e50e-b277-48d8-928d-f0d571acf238-config-data-custom\") pod \"barbican-api-678474f9b-4hxj7\" (UID: \"47c6e50e-b277-48d8-928d-f0d571acf238\") " pod="openstack-kuttl-tests/barbican-api-678474f9b-4hxj7" Jan 20 17:18:34 crc kubenswrapper[4558]: I0120 17:18:34.276247 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fg24b\" (UniqueName: \"kubernetes.io/projected/47c6e50e-b277-48d8-928d-f0d571acf238-kube-api-access-fg24b\") pod \"barbican-api-678474f9b-4hxj7\" (UID: \"47c6e50e-b277-48d8-928d-f0d571acf238\") " pod="openstack-kuttl-tests/barbican-api-678474f9b-4hxj7" Jan 20 17:18:34 crc kubenswrapper[4558]: I0120 17:18:34.276280 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/47c6e50e-b277-48d8-928d-f0d571acf238-config-data\") pod \"barbican-api-678474f9b-4hxj7\" (UID: \"47c6e50e-b277-48d8-928d-f0d571acf238\") " pod="openstack-kuttl-tests/barbican-api-678474f9b-4hxj7" Jan 20 17:18:34 crc kubenswrapper[4558]: I0120 17:18:34.281558 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/47c6e50e-b277-48d8-928d-f0d571acf238-config-data\") pod \"barbican-api-678474f9b-4hxj7\" (UID: \"47c6e50e-b277-48d8-928d-f0d571acf238\") " pod="openstack-kuttl-tests/barbican-api-678474f9b-4hxj7" Jan 20 17:18:34 crc kubenswrapper[4558]: I0120 17:18:34.281829 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/47c6e50e-b277-48d8-928d-f0d571acf238-combined-ca-bundle\") pod \"barbican-api-678474f9b-4hxj7\" (UID: \"47c6e50e-b277-48d8-928d-f0d571acf238\") " pod="openstack-kuttl-tests/barbican-api-678474f9b-4hxj7" Jan 20 17:18:34 crc kubenswrapper[4558]: I0120 17:18:34.283622 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-keystone-listener-9b8896fdb-7mrlf" Jan 20 17:18:34 crc kubenswrapper[4558]: I0120 17:18:34.289275 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/47c6e50e-b277-48d8-928d-f0d571acf238-config-data-custom\") pod \"barbican-api-678474f9b-4hxj7\" (UID: \"47c6e50e-b277-48d8-928d-f0d571acf238\") " pod="openstack-kuttl-tests/barbican-api-678474f9b-4hxj7" Jan 20 17:18:34 crc kubenswrapper[4558]: I0120 17:18:34.293658 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fg24b\" (UniqueName: \"kubernetes.io/projected/47c6e50e-b277-48d8-928d-f0d571acf238-kube-api-access-fg24b\") pod \"barbican-api-678474f9b-4hxj7\" (UID: \"47c6e50e-b277-48d8-928d-f0d571acf238\") " pod="openstack-kuttl-tests/barbican-api-678474f9b-4hxj7" Jan 20 17:18:34 crc kubenswrapper[4558]: I0120 17:18:34.361071 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-678474f9b-4hxj7" Jan 20 17:18:34 crc kubenswrapper[4558]: I0120 17:18:34.379175 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f2c69e78-4cbd-41a8-810d-bdc4f56cabfd-internal-tls-certs\") pod \"placement-6b7cbc9658-z4t27\" (UID: \"f2c69e78-4cbd-41a8-810d-bdc4f56cabfd\") " pod="openstack-kuttl-tests/placement-6b7cbc9658-z4t27" Jan 20 17:18:34 crc kubenswrapper[4558]: I0120 17:18:34.379231 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f2c69e78-4cbd-41a8-810d-bdc4f56cabfd-logs\") pod \"placement-6b7cbc9658-z4t27\" (UID: \"f2c69e78-4cbd-41a8-810d-bdc4f56cabfd\") " pod="openstack-kuttl-tests/placement-6b7cbc9658-z4t27" Jan 20 17:18:34 crc kubenswrapper[4558]: I0120 17:18:34.379271 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2c69e78-4cbd-41a8-810d-bdc4f56cabfd-combined-ca-bundle\") pod \"placement-6b7cbc9658-z4t27\" (UID: \"f2c69e78-4cbd-41a8-810d-bdc4f56cabfd\") " pod="openstack-kuttl-tests/placement-6b7cbc9658-z4t27" Jan 20 17:18:34 crc kubenswrapper[4558]: I0120 17:18:34.379321 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f2c69e78-4cbd-41a8-810d-bdc4f56cabfd-config-data\") pod \"placement-6b7cbc9658-z4t27\" (UID: \"f2c69e78-4cbd-41a8-810d-bdc4f56cabfd\") " pod="openstack-kuttl-tests/placement-6b7cbc9658-z4t27" Jan 20 17:18:34 crc kubenswrapper[4558]: I0120 17:18:34.379823 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f2c69e78-4cbd-41a8-810d-bdc4f56cabfd-scripts\") pod \"placement-6b7cbc9658-z4t27\" (UID: \"f2c69e78-4cbd-41a8-810d-bdc4f56cabfd\") " pod="openstack-kuttl-tests/placement-6b7cbc9658-z4t27" Jan 20 17:18:34 crc kubenswrapper[4558]: I0120 17:18:34.379875 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fns9l\" (UniqueName: \"kubernetes.io/projected/f2c69e78-4cbd-41a8-810d-bdc4f56cabfd-kube-api-access-fns9l\") pod \"placement-6b7cbc9658-z4t27\" (UID: \"f2c69e78-4cbd-41a8-810d-bdc4f56cabfd\") " pod="openstack-kuttl-tests/placement-6b7cbc9658-z4t27" Jan 20 17:18:34 crc kubenswrapper[4558]: I0120 17:18:34.380146 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f2c69e78-4cbd-41a8-810d-bdc4f56cabfd-public-tls-certs\") pod \"placement-6b7cbc9658-z4t27\" (UID: \"f2c69e78-4cbd-41a8-810d-bdc4f56cabfd\") " pod="openstack-kuttl-tests/placement-6b7cbc9658-z4t27" Jan 20 17:18:34 crc kubenswrapper[4558]: I0120 17:18:34.482075 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f2c69e78-4cbd-41a8-810d-bdc4f56cabfd-internal-tls-certs\") pod \"placement-6b7cbc9658-z4t27\" (UID: \"f2c69e78-4cbd-41a8-810d-bdc4f56cabfd\") " pod="openstack-kuttl-tests/placement-6b7cbc9658-z4t27" Jan 20 17:18:34 crc kubenswrapper[4558]: I0120 17:18:34.482147 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f2c69e78-4cbd-41a8-810d-bdc4f56cabfd-logs\") pod \"placement-6b7cbc9658-z4t27\" (UID: \"f2c69e78-4cbd-41a8-810d-bdc4f56cabfd\") " pod="openstack-kuttl-tests/placement-6b7cbc9658-z4t27" Jan 20 17:18:34 crc kubenswrapper[4558]: I0120 17:18:34.482198 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2c69e78-4cbd-41a8-810d-bdc4f56cabfd-combined-ca-bundle\") pod \"placement-6b7cbc9658-z4t27\" (UID: \"f2c69e78-4cbd-41a8-810d-bdc4f56cabfd\") " pod="openstack-kuttl-tests/placement-6b7cbc9658-z4t27" Jan 20 17:18:34 crc kubenswrapper[4558]: I0120 17:18:34.482237 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f2c69e78-4cbd-41a8-810d-bdc4f56cabfd-config-data\") pod \"placement-6b7cbc9658-z4t27\" (UID: \"f2c69e78-4cbd-41a8-810d-bdc4f56cabfd\") " pod="openstack-kuttl-tests/placement-6b7cbc9658-z4t27" Jan 20 17:18:34 crc kubenswrapper[4558]: I0120 17:18:34.482281 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f2c69e78-4cbd-41a8-810d-bdc4f56cabfd-scripts\") pod \"placement-6b7cbc9658-z4t27\" (UID: \"f2c69e78-4cbd-41a8-810d-bdc4f56cabfd\") " pod="openstack-kuttl-tests/placement-6b7cbc9658-z4t27" Jan 20 17:18:34 crc kubenswrapper[4558]: I0120 17:18:34.482303 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fns9l\" (UniqueName: \"kubernetes.io/projected/f2c69e78-4cbd-41a8-810d-bdc4f56cabfd-kube-api-access-fns9l\") pod \"placement-6b7cbc9658-z4t27\" (UID: \"f2c69e78-4cbd-41a8-810d-bdc4f56cabfd\") " pod="openstack-kuttl-tests/placement-6b7cbc9658-z4t27" Jan 20 17:18:34 crc kubenswrapper[4558]: I0120 17:18:34.482477 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f2c69e78-4cbd-41a8-810d-bdc4f56cabfd-public-tls-certs\") pod \"placement-6b7cbc9658-z4t27\" (UID: \"f2c69e78-4cbd-41a8-810d-bdc4f56cabfd\") " pod="openstack-kuttl-tests/placement-6b7cbc9658-z4t27" Jan 20 17:18:34 crc kubenswrapper[4558]: I0120 17:18:34.483479 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f2c69e78-4cbd-41a8-810d-bdc4f56cabfd-logs\") pod \"placement-6b7cbc9658-z4t27\" (UID: \"f2c69e78-4cbd-41a8-810d-bdc4f56cabfd\") " pod="openstack-kuttl-tests/placement-6b7cbc9658-z4t27" Jan 20 17:18:34 crc kubenswrapper[4558]: I0120 17:18:34.487426 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f2c69e78-4cbd-41a8-810d-bdc4f56cabfd-internal-tls-certs\") pod \"placement-6b7cbc9658-z4t27\" (UID: \"f2c69e78-4cbd-41a8-810d-bdc4f56cabfd\") " pod="openstack-kuttl-tests/placement-6b7cbc9658-z4t27" Jan 20 17:18:34 crc kubenswrapper[4558]: I0120 17:18:34.488112 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f2c69e78-4cbd-41a8-810d-bdc4f56cabfd-scripts\") pod \"placement-6b7cbc9658-z4t27\" (UID: \"f2c69e78-4cbd-41a8-810d-bdc4f56cabfd\") " pod="openstack-kuttl-tests/placement-6b7cbc9658-z4t27" Jan 20 17:18:34 crc kubenswrapper[4558]: I0120 17:18:34.490845 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f2c69e78-4cbd-41a8-810d-bdc4f56cabfd-public-tls-certs\") pod \"placement-6b7cbc9658-z4t27\" (UID: \"f2c69e78-4cbd-41a8-810d-bdc4f56cabfd\") " pod="openstack-kuttl-tests/placement-6b7cbc9658-z4t27" Jan 20 17:18:34 crc kubenswrapper[4558]: I0120 17:18:34.492762 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f2c69e78-4cbd-41a8-810d-bdc4f56cabfd-config-data\") pod \"placement-6b7cbc9658-z4t27\" (UID: \"f2c69e78-4cbd-41a8-810d-bdc4f56cabfd\") " pod="openstack-kuttl-tests/placement-6b7cbc9658-z4t27" Jan 20 17:18:34 crc kubenswrapper[4558]: I0120 17:18:34.496998 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2c69e78-4cbd-41a8-810d-bdc4f56cabfd-combined-ca-bundle\") pod \"placement-6b7cbc9658-z4t27\" (UID: \"f2c69e78-4cbd-41a8-810d-bdc4f56cabfd\") " pod="openstack-kuttl-tests/placement-6b7cbc9658-z4t27" Jan 20 17:18:34 crc kubenswrapper[4558]: I0120 17:18:34.500442 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fns9l\" (UniqueName: \"kubernetes.io/projected/f2c69e78-4cbd-41a8-810d-bdc4f56cabfd-kube-api-access-fns9l\") pod \"placement-6b7cbc9658-z4t27\" (UID: \"f2c69e78-4cbd-41a8-810d-bdc4f56cabfd\") " pod="openstack-kuttl-tests/placement-6b7cbc9658-z4t27" Jan 20 17:18:34 crc kubenswrapper[4558]: I0120 17:18:34.527313 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-6b7cbc9658-z4t27" Jan 20 17:18:34 crc kubenswrapper[4558]: I0120 17:18:34.615324 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6f64d722-5520-4fc7-b28c-378dfcced10b" path="/var/lib/kubelet/pods/6f64d722-5520-4fc7-b28c-378dfcced10b/volumes" Jan 20 17:18:34 crc kubenswrapper[4558]: I0120 17:18:34.619348 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8c34a8ba-597f-464a-babf-0ded54c9ebfb" path="/var/lib/kubelet/pods/8c34a8ba-597f-464a-babf-0ded54c9ebfb/volumes" Jan 20 17:18:34 crc kubenswrapper[4558]: I0120 17:18:34.620232 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ef3b4518-9c22-4046-9053-142864162a50" path="/var/lib/kubelet/pods/ef3b4518-9c22-4046-9053-142864162a50/volumes" Jan 20 17:18:34 crc kubenswrapper[4558]: I0120 17:18:34.660494 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-74b74897d6-9wxhf"] Jan 20 17:18:34 crc kubenswrapper[4558]: I0120 17:18:34.689220 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"4f358da8-45ff-43a3-a671-05c81bb940a8","Type":"ContainerStarted","Data":"b23bae312bc4e60f787d8847aad548910d03b315006299b5cc3696ec352c0048"} Jan 20 17:18:34 crc kubenswrapper[4558]: I0120 17:18:34.732286 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"037cfbe8-8c84-4901-9e77-9513f22e39a1","Type":"ContainerStarted","Data":"190e41f2d5afbe5c79e170291043bb0b447c107807fc02a22e38828768768d9d"} Jan 20 17:18:34 crc kubenswrapper[4558]: I0120 17:18:34.732695 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="037cfbe8-8c84-4901-9e77-9513f22e39a1" containerName="ceilometer-central-agent" containerID="cri-o://50673c604a510d5000c90395016ae5092b6d9a96de87261edc85c70805ed811c" gracePeriod=30 Jan 20 17:18:34 crc kubenswrapper[4558]: I0120 17:18:34.732861 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:18:34 crc kubenswrapper[4558]: I0120 17:18:34.733429 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="037cfbe8-8c84-4901-9e77-9513f22e39a1" containerName="proxy-httpd" containerID="cri-o://190e41f2d5afbe5c79e170291043bb0b447c107807fc02a22e38828768768d9d" gracePeriod=30 Jan 20 17:18:34 crc kubenswrapper[4558]: I0120 17:18:34.733584 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="037cfbe8-8c84-4901-9e77-9513f22e39a1" containerName="sg-core" containerID="cri-o://3d4f7073c9297ff51b0671e585e6bd7d44df4da52a2cc36bdaa3ea09506286ed" gracePeriod=30 Jan 20 17:18:34 crc kubenswrapper[4558]: I0120 17:18:34.733749 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="037cfbe8-8c84-4901-9e77-9513f22e39a1" containerName="ceilometer-notification-agent" containerID="cri-o://ad8ce1fbd71496aaf4e5071bac5dc4ace92331dacd698f83a04eda2a6f65bb64" gracePeriod=30 Jan 20 17:18:34 crc kubenswrapper[4558]: I0120 17:18:34.765303 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"18c8f0a6-157c-42f1-81b2-6bccecdcf626","Type":"ContainerStarted","Data":"2c4b181c93ebe61a4fc92f9ec89fe7a5c1de8fef8ba73ced0be26ecf02055bb5"} Jan 20 17:18:34 crc kubenswrapper[4558]: I0120 17:18:34.838320 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-worker-795d8f7875-gfh24"] Jan 20 17:18:34 crc kubenswrapper[4558]: I0120 17:18:34.848930 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ceilometer-0" podStartSLOduration=2.502369608 podStartE2EDuration="7.848906856s" podCreationTimestamp="2026-01-20 17:18:27 +0000 UTC" firstStartedPulling="2026-01-20 17:18:28.838840263 +0000 UTC m=+2202.599178231" lastFinishedPulling="2026-01-20 17:18:34.185377512 +0000 UTC m=+2207.945715479" observedRunningTime="2026-01-20 17:18:34.789858495 +0000 UTC m=+2208.550196462" watchObservedRunningTime="2026-01-20 17:18:34.848906856 +0000 UTC m=+2208.609244824" Jan 20 17:18:35 crc kubenswrapper[4558]: I0120 17:18:35.114631 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-api-678474f9b-4hxj7"] Jan 20 17:18:35 crc kubenswrapper[4558]: I0120 17:18:35.177094 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-keystone-listener-9b8896fdb-7mrlf"] Jan 20 17:18:35 crc kubenswrapper[4558]: I0120 17:18:35.406235 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-bootstrap-dgphh" Jan 20 17:18:35 crc kubenswrapper[4558]: I0120 17:18:35.502216 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-6b7cbc9658-z4t27"] Jan 20 17:18:35 crc kubenswrapper[4558]: I0120 17:18:35.517860 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0b08b6e3-03a5-4dfa-aba3-2847529be269-scripts\") pod \"0b08b6e3-03a5-4dfa-aba3-2847529be269\" (UID: \"0b08b6e3-03a5-4dfa-aba3-2847529be269\") " Jan 20 17:18:35 crc kubenswrapper[4558]: I0120 17:18:35.517915 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/0b08b6e3-03a5-4dfa-aba3-2847529be269-credential-keys\") pod \"0b08b6e3-03a5-4dfa-aba3-2847529be269\" (UID: \"0b08b6e3-03a5-4dfa-aba3-2847529be269\") " Jan 20 17:18:35 crc kubenswrapper[4558]: I0120 17:18:35.518118 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/0b08b6e3-03a5-4dfa-aba3-2847529be269-fernet-keys\") pod \"0b08b6e3-03a5-4dfa-aba3-2847529be269\" (UID: \"0b08b6e3-03a5-4dfa-aba3-2847529be269\") " Jan 20 17:18:35 crc kubenswrapper[4558]: I0120 17:18:35.518137 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0b08b6e3-03a5-4dfa-aba3-2847529be269-combined-ca-bundle\") pod \"0b08b6e3-03a5-4dfa-aba3-2847529be269\" (UID: \"0b08b6e3-03a5-4dfa-aba3-2847529be269\") " Jan 20 17:18:35 crc kubenswrapper[4558]: I0120 17:18:35.518202 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0b08b6e3-03a5-4dfa-aba3-2847529be269-config-data\") pod \"0b08b6e3-03a5-4dfa-aba3-2847529be269\" (UID: \"0b08b6e3-03a5-4dfa-aba3-2847529be269\") " Jan 20 17:18:35 crc kubenswrapper[4558]: I0120 17:18:35.518263 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7pd85\" (UniqueName: \"kubernetes.io/projected/0b08b6e3-03a5-4dfa-aba3-2847529be269-kube-api-access-7pd85\") pod \"0b08b6e3-03a5-4dfa-aba3-2847529be269\" (UID: \"0b08b6e3-03a5-4dfa-aba3-2847529be269\") " Jan 20 17:18:35 crc kubenswrapper[4558]: I0120 17:18:35.522806 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b08b6e3-03a5-4dfa-aba3-2847529be269-scripts" (OuterVolumeSpecName: "scripts") pod "0b08b6e3-03a5-4dfa-aba3-2847529be269" (UID: "0b08b6e3-03a5-4dfa-aba3-2847529be269"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:18:35 crc kubenswrapper[4558]: I0120 17:18:35.526185 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b08b6e3-03a5-4dfa-aba3-2847529be269-kube-api-access-7pd85" (OuterVolumeSpecName: "kube-api-access-7pd85") pod "0b08b6e3-03a5-4dfa-aba3-2847529be269" (UID: "0b08b6e3-03a5-4dfa-aba3-2847529be269"). InnerVolumeSpecName "kube-api-access-7pd85". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:18:35 crc kubenswrapper[4558]: I0120 17:18:35.526862 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b08b6e3-03a5-4dfa-aba3-2847529be269-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "0b08b6e3-03a5-4dfa-aba3-2847529be269" (UID: "0b08b6e3-03a5-4dfa-aba3-2847529be269"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:18:35 crc kubenswrapper[4558]: I0120 17:18:35.527287 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b08b6e3-03a5-4dfa-aba3-2847529be269-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "0b08b6e3-03a5-4dfa-aba3-2847529be269" (UID: "0b08b6e3-03a5-4dfa-aba3-2847529be269"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:18:35 crc kubenswrapper[4558]: I0120 17:18:35.571390 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b08b6e3-03a5-4dfa-aba3-2847529be269-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0b08b6e3-03a5-4dfa-aba3-2847529be269" (UID: "0b08b6e3-03a5-4dfa-aba3-2847529be269"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:18:35 crc kubenswrapper[4558]: I0120 17:18:35.577371 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b08b6e3-03a5-4dfa-aba3-2847529be269-config-data" (OuterVolumeSpecName: "config-data") pod "0b08b6e3-03a5-4dfa-aba3-2847529be269" (UID: "0b08b6e3-03a5-4dfa-aba3-2847529be269"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:18:35 crc kubenswrapper[4558]: I0120 17:18:35.621765 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7pd85\" (UniqueName: \"kubernetes.io/projected/0b08b6e3-03a5-4dfa-aba3-2847529be269-kube-api-access-7pd85\") on node \"crc\" DevicePath \"\"" Jan 20 17:18:35 crc kubenswrapper[4558]: I0120 17:18:35.622015 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0b08b6e3-03a5-4dfa-aba3-2847529be269-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:18:35 crc kubenswrapper[4558]: I0120 17:18:35.622027 4558 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/0b08b6e3-03a5-4dfa-aba3-2847529be269-credential-keys\") on node \"crc\" DevicePath \"\"" Jan 20 17:18:35 crc kubenswrapper[4558]: I0120 17:18:35.622039 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0b08b6e3-03a5-4dfa-aba3-2847529be269-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:18:35 crc kubenswrapper[4558]: I0120 17:18:35.622048 4558 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/0b08b6e3-03a5-4dfa-aba3-2847529be269-fernet-keys\") on node \"crc\" DevicePath \"\"" Jan 20 17:18:35 crc kubenswrapper[4558]: I0120 17:18:35.622057 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0b08b6e3-03a5-4dfa-aba3-2847529be269-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:18:35 crc kubenswrapper[4558]: I0120 17:18:35.696765 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-bootstrap-dgphh"] Jan 20 17:18:35 crc kubenswrapper[4558]: I0120 17:18:35.718516 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/keystone-bootstrap-dgphh"] Jan 20 17:18:35 crc kubenswrapper[4558]: I0120 17:18:35.765420 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/keystone-bootstrap-v4bgz"] Jan 20 17:18:35 crc kubenswrapper[4558]: E0120 17:18:35.765875 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0b08b6e3-03a5-4dfa-aba3-2847529be269" containerName="keystone-bootstrap" Jan 20 17:18:35 crc kubenswrapper[4558]: I0120 17:18:35.765897 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="0b08b6e3-03a5-4dfa-aba3-2847529be269" containerName="keystone-bootstrap" Jan 20 17:18:35 crc kubenswrapper[4558]: I0120 17:18:35.766097 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="0b08b6e3-03a5-4dfa-aba3-2847529be269" containerName="keystone-bootstrap" Jan 20 17:18:35 crc kubenswrapper[4558]: I0120 17:18:35.766866 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-bootstrap-v4bgz" Jan 20 17:18:35 crc kubenswrapper[4558]: I0120 17:18:35.791950 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-bootstrap-v4bgz"] Jan 20 17:18:35 crc kubenswrapper[4558]: I0120 17:18:35.833857 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-678474f9b-4hxj7" event={"ID":"47c6e50e-b277-48d8-928d-f0d571acf238","Type":"ContainerStarted","Data":"9dc6bc15ff6caf6b71886854b59c8e24cc9c17a04a1198f272d418effb557cc1"} Jan 20 17:18:35 crc kubenswrapper[4558]: I0120 17:18:35.834006 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-678474f9b-4hxj7" event={"ID":"47c6e50e-b277-48d8-928d-f0d571acf238","Type":"ContainerStarted","Data":"169a9c5070cf92fbb59c5c38dc9e4daf276d86dd454c6d9e7d6e117a81260d4c"} Jan 20 17:18:35 crc kubenswrapper[4558]: I0120 17:18:35.840769 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d654e8cdfc73c203480241f31f4ecdf58346ad5adc2de94c844c290a20303fbb" Jan 20 17:18:35 crc kubenswrapper[4558]: I0120 17:18:35.840845 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-bootstrap-dgphh" Jan 20 17:18:35 crc kubenswrapper[4558]: I0120 17:18:35.846127 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-keystone-listener-9b8896fdb-7mrlf" event={"ID":"a948066b-fa95-4fbb-83e6-6f26f6c76652","Type":"ContainerStarted","Data":"c97f19d9a03644214f7c3a4eb6b798453f97f1b12067ab4dab1050a2bc364779"} Jan 20 17:18:35 crc kubenswrapper[4558]: I0120 17:18:35.846181 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-keystone-listener-9b8896fdb-7mrlf" event={"ID":"a948066b-fa95-4fbb-83e6-6f26f6c76652","Type":"ContainerStarted","Data":"d370341deb7c313621e85ad08e7b1eb9630f0e743e8ef7f6282693bbe0430efe"} Jan 20 17:18:35 crc kubenswrapper[4558]: I0120 17:18:35.850307 4558 generic.go:334] "Generic (PLEG): container finished" podID="037cfbe8-8c84-4901-9e77-9513f22e39a1" containerID="190e41f2d5afbe5c79e170291043bb0b447c107807fc02a22e38828768768d9d" exitCode=0 Jan 20 17:18:35 crc kubenswrapper[4558]: I0120 17:18:35.850333 4558 generic.go:334] "Generic (PLEG): container finished" podID="037cfbe8-8c84-4901-9e77-9513f22e39a1" containerID="3d4f7073c9297ff51b0671e585e6bd7d44df4da52a2cc36bdaa3ea09506286ed" exitCode=2 Jan 20 17:18:35 crc kubenswrapper[4558]: I0120 17:18:35.850340 4558 generic.go:334] "Generic (PLEG): container finished" podID="037cfbe8-8c84-4901-9e77-9513f22e39a1" containerID="ad8ce1fbd71496aaf4e5071bac5dc4ace92331dacd698f83a04eda2a6f65bb64" exitCode=0 Jan 20 17:18:35 crc kubenswrapper[4558]: I0120 17:18:35.850349 4558 generic.go:334] "Generic (PLEG): container finished" podID="037cfbe8-8c84-4901-9e77-9513f22e39a1" containerID="50673c604a510d5000c90395016ae5092b6d9a96de87261edc85c70805ed811c" exitCode=0 Jan 20 17:18:35 crc kubenswrapper[4558]: I0120 17:18:35.850379 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"037cfbe8-8c84-4901-9e77-9513f22e39a1","Type":"ContainerDied","Data":"190e41f2d5afbe5c79e170291043bb0b447c107807fc02a22e38828768768d9d"} Jan 20 17:18:35 crc kubenswrapper[4558]: I0120 17:18:35.850395 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"037cfbe8-8c84-4901-9e77-9513f22e39a1","Type":"ContainerDied","Data":"3d4f7073c9297ff51b0671e585e6bd7d44df4da52a2cc36bdaa3ea09506286ed"} Jan 20 17:18:35 crc kubenswrapper[4558]: I0120 17:18:35.850405 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"037cfbe8-8c84-4901-9e77-9513f22e39a1","Type":"ContainerDied","Data":"ad8ce1fbd71496aaf4e5071bac5dc4ace92331dacd698f83a04eda2a6f65bb64"} Jan 20 17:18:35 crc kubenswrapper[4558]: I0120 17:18:35.850414 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"037cfbe8-8c84-4901-9e77-9513f22e39a1","Type":"ContainerDied","Data":"50673c604a510d5000c90395016ae5092b6d9a96de87261edc85c70805ed811c"} Jan 20 17:18:35 crc kubenswrapper[4558]: I0120 17:18:35.852266 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-worker-795d8f7875-gfh24" event={"ID":"2f8ae24b-258e-4d32-b312-99f5015f83d6","Type":"ContainerStarted","Data":"daed2ff09c57989ebf417f453b1c97a9e38554550e8b7bcfd620161001b8c813"} Jan 20 17:18:35 crc kubenswrapper[4558]: I0120 17:18:35.852344 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-worker-795d8f7875-gfh24" event={"ID":"2f8ae24b-258e-4d32-b312-99f5015f83d6","Type":"ContainerStarted","Data":"afd7329580125fa4f2079f9103592be41cb2ac5fbf2ed5b7c2441396f114a014"} Jan 20 17:18:35 crc kubenswrapper[4558]: I0120 17:18:35.855145 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"18c8f0a6-157c-42f1-81b2-6bccecdcf626","Type":"ContainerStarted","Data":"1cbcd65376eed320dc67eb5d4e26e980c1e4099da6b8e76656548d1fdcc0484d"} Jan 20 17:18:35 crc kubenswrapper[4558]: I0120 17:18:35.857460 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-6b7cbc9658-z4t27" event={"ID":"f2c69e78-4cbd-41a8-810d-bdc4f56cabfd","Type":"ContainerStarted","Data":"0270ba2b6c46bed7c0eb7d2bafcb0e00525bc89f1ec0b572d313c45e45598504"} Jan 20 17:18:35 crc kubenswrapper[4558]: I0120 17:18:35.861383 4558 generic.go:334] "Generic (PLEG): container finished" podID="4efe1398-34aa-4835-8c35-790f9ec1d514" containerID="606e0dbff67da472a309954e0767fc9f44fcce3c5d73c3e05d4853ba37288e26" exitCode=0 Jan 20 17:18:35 crc kubenswrapper[4558]: I0120 17:18:35.861455 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-db-sync-4m92m" event={"ID":"4efe1398-34aa-4835-8c35-790f9ec1d514","Type":"ContainerDied","Data":"606e0dbff67da472a309954e0767fc9f44fcce3c5d73c3e05d4853ba37288e26"} Jan 20 17:18:35 crc kubenswrapper[4558]: I0120 17:18:35.863558 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-74b74897d6-9wxhf" event={"ID":"7adfb76f-c71c-4038-8b25-346511ef09d5","Type":"ContainerStarted","Data":"b77009660854d3de3a199377eaa154471285aa6e3f681e052addcc597f3388dd"} Jan 20 17:18:35 crc kubenswrapper[4558]: I0120 17:18:35.863586 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-74b74897d6-9wxhf" event={"ID":"7adfb76f-c71c-4038-8b25-346511ef09d5","Type":"ContainerStarted","Data":"cd27dda48d1a8254712f52b2254a78f0300e79619727ffc67514404c74f610b8"} Jan 20 17:18:35 crc kubenswrapper[4558]: I0120 17:18:35.865957 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"4f358da8-45ff-43a3-a671-05c81bb940a8","Type":"ContainerStarted","Data":"4717ec95ba7dc2a4dd5cf5832311139309bda02a9c8b0d18d06bbddbc1b283bb"} Jan 20 17:18:35 crc kubenswrapper[4558]: I0120 17:18:35.929650 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/6fa38873-97d7-458b-8963-ceeae696c9af-fernet-keys\") pod \"keystone-bootstrap-v4bgz\" (UID: \"6fa38873-97d7-458b-8963-ceeae696c9af\") " pod="openstack-kuttl-tests/keystone-bootstrap-v4bgz" Jan 20 17:18:35 crc kubenswrapper[4558]: I0120 17:18:35.929725 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bgkqd\" (UniqueName: \"kubernetes.io/projected/6fa38873-97d7-458b-8963-ceeae696c9af-kube-api-access-bgkqd\") pod \"keystone-bootstrap-v4bgz\" (UID: \"6fa38873-97d7-458b-8963-ceeae696c9af\") " pod="openstack-kuttl-tests/keystone-bootstrap-v4bgz" Jan 20 17:18:35 crc kubenswrapper[4558]: I0120 17:18:35.929771 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/6fa38873-97d7-458b-8963-ceeae696c9af-credential-keys\") pod \"keystone-bootstrap-v4bgz\" (UID: \"6fa38873-97d7-458b-8963-ceeae696c9af\") " pod="openstack-kuttl-tests/keystone-bootstrap-v4bgz" Jan 20 17:18:35 crc kubenswrapper[4558]: I0120 17:18:35.929826 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6fa38873-97d7-458b-8963-ceeae696c9af-scripts\") pod \"keystone-bootstrap-v4bgz\" (UID: \"6fa38873-97d7-458b-8963-ceeae696c9af\") " pod="openstack-kuttl-tests/keystone-bootstrap-v4bgz" Jan 20 17:18:35 crc kubenswrapper[4558]: I0120 17:18:35.929853 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6fa38873-97d7-458b-8963-ceeae696c9af-combined-ca-bundle\") pod \"keystone-bootstrap-v4bgz\" (UID: \"6fa38873-97d7-458b-8963-ceeae696c9af\") " pod="openstack-kuttl-tests/keystone-bootstrap-v4bgz" Jan 20 17:18:35 crc kubenswrapper[4558]: I0120 17:18:35.929921 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6fa38873-97d7-458b-8963-ceeae696c9af-config-data\") pod \"keystone-bootstrap-v4bgz\" (UID: \"6fa38873-97d7-458b-8963-ceeae696c9af\") " pod="openstack-kuttl-tests/keystone-bootstrap-v4bgz" Jan 20 17:18:35 crc kubenswrapper[4558]: I0120 17:18:35.942874 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:18:36 crc kubenswrapper[4558]: I0120 17:18:36.032375 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/6fa38873-97d7-458b-8963-ceeae696c9af-credential-keys\") pod \"keystone-bootstrap-v4bgz\" (UID: \"6fa38873-97d7-458b-8963-ceeae696c9af\") " pod="openstack-kuttl-tests/keystone-bootstrap-v4bgz" Jan 20 17:18:36 crc kubenswrapper[4558]: I0120 17:18:36.032688 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6fa38873-97d7-458b-8963-ceeae696c9af-scripts\") pod \"keystone-bootstrap-v4bgz\" (UID: \"6fa38873-97d7-458b-8963-ceeae696c9af\") " pod="openstack-kuttl-tests/keystone-bootstrap-v4bgz" Jan 20 17:18:36 crc kubenswrapper[4558]: I0120 17:18:36.032724 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6fa38873-97d7-458b-8963-ceeae696c9af-combined-ca-bundle\") pod \"keystone-bootstrap-v4bgz\" (UID: \"6fa38873-97d7-458b-8963-ceeae696c9af\") " pod="openstack-kuttl-tests/keystone-bootstrap-v4bgz" Jan 20 17:18:36 crc kubenswrapper[4558]: I0120 17:18:36.032755 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6fa38873-97d7-458b-8963-ceeae696c9af-config-data\") pod \"keystone-bootstrap-v4bgz\" (UID: \"6fa38873-97d7-458b-8963-ceeae696c9af\") " pod="openstack-kuttl-tests/keystone-bootstrap-v4bgz" Jan 20 17:18:36 crc kubenswrapper[4558]: I0120 17:18:36.032797 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/6fa38873-97d7-458b-8963-ceeae696c9af-fernet-keys\") pod \"keystone-bootstrap-v4bgz\" (UID: \"6fa38873-97d7-458b-8963-ceeae696c9af\") " pod="openstack-kuttl-tests/keystone-bootstrap-v4bgz" Jan 20 17:18:36 crc kubenswrapper[4558]: I0120 17:18:36.032849 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bgkqd\" (UniqueName: \"kubernetes.io/projected/6fa38873-97d7-458b-8963-ceeae696c9af-kube-api-access-bgkqd\") pod \"keystone-bootstrap-v4bgz\" (UID: \"6fa38873-97d7-458b-8963-ceeae696c9af\") " pod="openstack-kuttl-tests/keystone-bootstrap-v4bgz" Jan 20 17:18:36 crc kubenswrapper[4558]: I0120 17:18:36.036059 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6fa38873-97d7-458b-8963-ceeae696c9af-scripts\") pod \"keystone-bootstrap-v4bgz\" (UID: \"6fa38873-97d7-458b-8963-ceeae696c9af\") " pod="openstack-kuttl-tests/keystone-bootstrap-v4bgz" Jan 20 17:18:36 crc kubenswrapper[4558]: I0120 17:18:36.039527 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6fa38873-97d7-458b-8963-ceeae696c9af-combined-ca-bundle\") pod \"keystone-bootstrap-v4bgz\" (UID: \"6fa38873-97d7-458b-8963-ceeae696c9af\") " pod="openstack-kuttl-tests/keystone-bootstrap-v4bgz" Jan 20 17:18:36 crc kubenswrapper[4558]: I0120 17:18:36.040215 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6fa38873-97d7-458b-8963-ceeae696c9af-config-data\") pod \"keystone-bootstrap-v4bgz\" (UID: \"6fa38873-97d7-458b-8963-ceeae696c9af\") " pod="openstack-kuttl-tests/keystone-bootstrap-v4bgz" Jan 20 17:18:36 crc kubenswrapper[4558]: I0120 17:18:36.044744 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/6fa38873-97d7-458b-8963-ceeae696c9af-credential-keys\") pod \"keystone-bootstrap-v4bgz\" (UID: \"6fa38873-97d7-458b-8963-ceeae696c9af\") " pod="openstack-kuttl-tests/keystone-bootstrap-v4bgz" Jan 20 17:18:36 crc kubenswrapper[4558]: I0120 17:18:36.048856 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/6fa38873-97d7-458b-8963-ceeae696c9af-fernet-keys\") pod \"keystone-bootstrap-v4bgz\" (UID: \"6fa38873-97d7-458b-8963-ceeae696c9af\") " pod="openstack-kuttl-tests/keystone-bootstrap-v4bgz" Jan 20 17:18:36 crc kubenswrapper[4558]: I0120 17:18:36.050914 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bgkqd\" (UniqueName: \"kubernetes.io/projected/6fa38873-97d7-458b-8963-ceeae696c9af-kube-api-access-bgkqd\") pod \"keystone-bootstrap-v4bgz\" (UID: \"6fa38873-97d7-458b-8963-ceeae696c9af\") " pod="openstack-kuttl-tests/keystone-bootstrap-v4bgz" Jan 20 17:18:36 crc kubenswrapper[4558]: I0120 17:18:36.133779 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wz2p2\" (UniqueName: \"kubernetes.io/projected/037cfbe8-8c84-4901-9e77-9513f22e39a1-kube-api-access-wz2p2\") pod \"037cfbe8-8c84-4901-9e77-9513f22e39a1\" (UID: \"037cfbe8-8c84-4901-9e77-9513f22e39a1\") " Jan 20 17:18:36 crc kubenswrapper[4558]: I0120 17:18:36.133846 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/037cfbe8-8c84-4901-9e77-9513f22e39a1-scripts\") pod \"037cfbe8-8c84-4901-9e77-9513f22e39a1\" (UID: \"037cfbe8-8c84-4901-9e77-9513f22e39a1\") " Jan 20 17:18:36 crc kubenswrapper[4558]: I0120 17:18:36.133868 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/037cfbe8-8c84-4901-9e77-9513f22e39a1-sg-core-conf-yaml\") pod \"037cfbe8-8c84-4901-9e77-9513f22e39a1\" (UID: \"037cfbe8-8c84-4901-9e77-9513f22e39a1\") " Jan 20 17:18:36 crc kubenswrapper[4558]: I0120 17:18:36.133892 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/037cfbe8-8c84-4901-9e77-9513f22e39a1-combined-ca-bundle\") pod \"037cfbe8-8c84-4901-9e77-9513f22e39a1\" (UID: \"037cfbe8-8c84-4901-9e77-9513f22e39a1\") " Jan 20 17:18:36 crc kubenswrapper[4558]: I0120 17:18:36.133915 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/037cfbe8-8c84-4901-9e77-9513f22e39a1-config-data\") pod \"037cfbe8-8c84-4901-9e77-9513f22e39a1\" (UID: \"037cfbe8-8c84-4901-9e77-9513f22e39a1\") " Jan 20 17:18:36 crc kubenswrapper[4558]: I0120 17:18:36.133990 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/037cfbe8-8c84-4901-9e77-9513f22e39a1-log-httpd\") pod \"037cfbe8-8c84-4901-9e77-9513f22e39a1\" (UID: \"037cfbe8-8c84-4901-9e77-9513f22e39a1\") " Jan 20 17:18:36 crc kubenswrapper[4558]: I0120 17:18:36.134104 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/037cfbe8-8c84-4901-9e77-9513f22e39a1-run-httpd\") pod \"037cfbe8-8c84-4901-9e77-9513f22e39a1\" (UID: \"037cfbe8-8c84-4901-9e77-9513f22e39a1\") " Jan 20 17:18:36 crc kubenswrapper[4558]: I0120 17:18:36.135017 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/037cfbe8-8c84-4901-9e77-9513f22e39a1-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "037cfbe8-8c84-4901-9e77-9513f22e39a1" (UID: "037cfbe8-8c84-4901-9e77-9513f22e39a1"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:18:36 crc kubenswrapper[4558]: I0120 17:18:36.136178 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/037cfbe8-8c84-4901-9e77-9513f22e39a1-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "037cfbe8-8c84-4901-9e77-9513f22e39a1" (UID: "037cfbe8-8c84-4901-9e77-9513f22e39a1"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:18:36 crc kubenswrapper[4558]: I0120 17:18:36.137709 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/037cfbe8-8c84-4901-9e77-9513f22e39a1-kube-api-access-wz2p2" (OuterVolumeSpecName: "kube-api-access-wz2p2") pod "037cfbe8-8c84-4901-9e77-9513f22e39a1" (UID: "037cfbe8-8c84-4901-9e77-9513f22e39a1"). InnerVolumeSpecName "kube-api-access-wz2p2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:18:36 crc kubenswrapper[4558]: I0120 17:18:36.139959 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/037cfbe8-8c84-4901-9e77-9513f22e39a1-scripts" (OuterVolumeSpecName: "scripts") pod "037cfbe8-8c84-4901-9e77-9513f22e39a1" (UID: "037cfbe8-8c84-4901-9e77-9513f22e39a1"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:18:36 crc kubenswrapper[4558]: I0120 17:18:36.176398 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/037cfbe8-8c84-4901-9e77-9513f22e39a1-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "037cfbe8-8c84-4901-9e77-9513f22e39a1" (UID: "037cfbe8-8c84-4901-9e77-9513f22e39a1"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:18:36 crc kubenswrapper[4558]: I0120 17:18:36.213323 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/037cfbe8-8c84-4901-9e77-9513f22e39a1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "037cfbe8-8c84-4901-9e77-9513f22e39a1" (UID: "037cfbe8-8c84-4901-9e77-9513f22e39a1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:18:36 crc kubenswrapper[4558]: I0120 17:18:36.230596 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-bootstrap-v4bgz" Jan 20 17:18:36 crc kubenswrapper[4558]: I0120 17:18:36.237014 4558 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/037cfbe8-8c84-4901-9e77-9513f22e39a1-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:18:36 crc kubenswrapper[4558]: I0120 17:18:36.237043 4558 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/037cfbe8-8c84-4901-9e77-9513f22e39a1-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:18:36 crc kubenswrapper[4558]: I0120 17:18:36.237053 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wz2p2\" (UniqueName: \"kubernetes.io/projected/037cfbe8-8c84-4901-9e77-9513f22e39a1-kube-api-access-wz2p2\") on node \"crc\" DevicePath \"\"" Jan 20 17:18:36 crc kubenswrapper[4558]: I0120 17:18:36.237064 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/037cfbe8-8c84-4901-9e77-9513f22e39a1-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:18:36 crc kubenswrapper[4558]: I0120 17:18:36.237076 4558 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/037cfbe8-8c84-4901-9e77-9513f22e39a1-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 20 17:18:36 crc kubenswrapper[4558]: I0120 17:18:36.237086 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/037cfbe8-8c84-4901-9e77-9513f22e39a1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:18:36 crc kubenswrapper[4558]: I0120 17:18:36.277487 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/037cfbe8-8c84-4901-9e77-9513f22e39a1-config-data" (OuterVolumeSpecName: "config-data") pod "037cfbe8-8c84-4901-9e77-9513f22e39a1" (UID: "037cfbe8-8c84-4901-9e77-9513f22e39a1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:18:36 crc kubenswrapper[4558]: I0120 17:18:36.345476 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/037cfbe8-8c84-4901-9e77-9513f22e39a1-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:18:36 crc kubenswrapper[4558]: I0120 17:18:36.581304 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b08b6e3-03a5-4dfa-aba3-2847529be269" path="/var/lib/kubelet/pods/0b08b6e3-03a5-4dfa-aba3-2847529be269/volumes" Jan 20 17:18:36 crc kubenswrapper[4558]: I0120 17:18:36.731388 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-bootstrap-v4bgz"] Jan 20 17:18:36 crc kubenswrapper[4558]: I0120 17:18:36.882086 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-keystone-listener-9b8896fdb-7mrlf" event={"ID":"a948066b-fa95-4fbb-83e6-6f26f6c76652","Type":"ContainerStarted","Data":"67f52d695e7843e5e1f00cfd6b4ff76e79ba46c21e0baabc0a21e634dcae0c6e"} Jan 20 17:18:36 crc kubenswrapper[4558]: I0120 17:18:36.885794 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"4f358da8-45ff-43a3-a671-05c81bb940a8","Type":"ContainerStarted","Data":"8d2b5cee7d58d2eb3a0c47e5dd22befe62dc6fe6e9a175f8454c9dd6bdae69b1"} Jan 20 17:18:36 crc kubenswrapper[4558]: I0120 17:18:36.899039 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"037cfbe8-8c84-4901-9e77-9513f22e39a1","Type":"ContainerDied","Data":"5c3169fe6410f0d62ef9f1869c8f77981b1dec8a5ff59900d6aa35f7b9c3a584"} Jan 20 17:18:36 crc kubenswrapper[4558]: I0120 17:18:36.899078 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:18:36 crc kubenswrapper[4558]: I0120 17:18:36.899106 4558 scope.go:117] "RemoveContainer" containerID="190e41f2d5afbe5c79e170291043bb0b447c107807fc02a22e38828768768d9d" Jan 20 17:18:36 crc kubenswrapper[4558]: I0120 17:18:36.901251 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"18c8f0a6-157c-42f1-81b2-6bccecdcf626","Type":"ContainerStarted","Data":"9dae4a94a0dceb1b3d4ff1d0035c4b59f730610c281890e1dedf27394f9fa41c"} Jan 20 17:18:36 crc kubenswrapper[4558]: I0120 17:18:36.910982 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-6b7cbc9658-z4t27" event={"ID":"f2c69e78-4cbd-41a8-810d-bdc4f56cabfd","Type":"ContainerStarted","Data":"fd0522a1018450fa8aaf5f378cfa36bd4a81f9a88e9be2f7d7e743bbb0155ccd"} Jan 20 17:18:36 crc kubenswrapper[4558]: I0120 17:18:36.911036 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-6b7cbc9658-z4t27" event={"ID":"f2c69e78-4cbd-41a8-810d-bdc4f56cabfd","Type":"ContainerStarted","Data":"3282788cd0138924b505daecd0b6616ec4d4fb23cc9f1c9f3179db8ba4606bbd"} Jan 20 17:18:36 crc kubenswrapper[4558]: I0120 17:18:36.913538 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/placement-6b7cbc9658-z4t27" Jan 20 17:18:36 crc kubenswrapper[4558]: I0120 17:18:36.914000 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/placement-6b7cbc9658-z4t27" Jan 20 17:18:36 crc kubenswrapper[4558]: I0120 17:18:36.918864 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/barbican-keystone-listener-9b8896fdb-7mrlf" podStartSLOduration=3.918854064 podStartE2EDuration="3.918854064s" podCreationTimestamp="2026-01-20 17:18:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:18:36.913909566 +0000 UTC m=+2210.674247533" watchObservedRunningTime="2026-01-20 17:18:36.918854064 +0000 UTC m=+2210.679192030" Jan 20 17:18:36 crc kubenswrapper[4558]: I0120 17:18:36.940743 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-74b74897d6-9wxhf" event={"ID":"7adfb76f-c71c-4038-8b25-346511ef09d5","Type":"ContainerStarted","Data":"aea56698e7541c06ec49388659b311c8f5d6544e3ff75a7b5df928dfa78d9344"} Jan 20 17:18:36 crc kubenswrapper[4558]: I0120 17:18:36.941526 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/placement-74b74897d6-9wxhf" Jan 20 17:18:36 crc kubenswrapper[4558]: I0120 17:18:36.941555 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/placement-74b74897d6-9wxhf" Jan 20 17:18:36 crc kubenswrapper[4558]: I0120 17:18:36.947745 4558 scope.go:117] "RemoveContainer" containerID="3d4f7073c9297ff51b0671e585e6bd7d44df4da52a2cc36bdaa3ea09506286ed" Jan 20 17:18:36 crc kubenswrapper[4558]: I0120 17:18:36.955961 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-678474f9b-4hxj7" event={"ID":"47c6e50e-b277-48d8-928d-f0d571acf238","Type":"ContainerStarted","Data":"b8cb069501804ced5862f25dd0e9a29484e1f9a42e27c83b54682de815e90065"} Jan 20 17:18:36 crc kubenswrapper[4558]: I0120 17:18:36.956824 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/barbican-api-678474f9b-4hxj7" Jan 20 17:18:36 crc kubenswrapper[4558]: I0120 17:18:36.956913 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/barbican-api-678474f9b-4hxj7" Jan 20 17:18:36 crc kubenswrapper[4558]: I0120 17:18:36.960706 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/placement-6b7cbc9658-z4t27" podStartSLOduration=2.9606938940000003 podStartE2EDuration="2.960693894s" podCreationTimestamp="2026-01-20 17:18:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:18:36.941791227 +0000 UTC m=+2210.702129195" watchObservedRunningTime="2026-01-20 17:18:36.960693894 +0000 UTC m=+2210.721031861" Jan 20 17:18:36 crc kubenswrapper[4558]: I0120 17:18:36.978757 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-bootstrap-v4bgz" event={"ID":"6fa38873-97d7-458b-8963-ceeae696c9af","Type":"ContainerStarted","Data":"0f199f022ec29848ee81ec8516b8e901fa649b2f2665c88aeee10fb34ceaa444"} Jan 20 17:18:36 crc kubenswrapper[4558]: I0120 17:18:36.986274 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:18:36 crc kubenswrapper[4558]: I0120 17:18:36.989700 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-worker-795d8f7875-gfh24" event={"ID":"2f8ae24b-258e-4d32-b312-99f5015f83d6","Type":"ContainerStarted","Data":"454a41b471f0eb3acc32e253ce4e5b3a99599e9feb264d9effdd29c6859dd9d0"} Jan 20 17:18:36 crc kubenswrapper[4558]: I0120 17:18:36.995779 4558 scope.go:117] "RemoveContainer" containerID="ad8ce1fbd71496aaf4e5071bac5dc4ace92331dacd698f83a04eda2a6f65bb64" Jan 20 17:18:36 crc kubenswrapper[4558]: I0120 17:18:36.997885 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:18:37 crc kubenswrapper[4558]: I0120 17:18:37.012220 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:18:37 crc kubenswrapper[4558]: E0120 17:18:37.012747 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="037cfbe8-8c84-4901-9e77-9513f22e39a1" containerName="ceilometer-central-agent" Jan 20 17:18:37 crc kubenswrapper[4558]: I0120 17:18:37.012769 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="037cfbe8-8c84-4901-9e77-9513f22e39a1" containerName="ceilometer-central-agent" Jan 20 17:18:37 crc kubenswrapper[4558]: E0120 17:18:37.012783 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="037cfbe8-8c84-4901-9e77-9513f22e39a1" containerName="proxy-httpd" Jan 20 17:18:37 crc kubenswrapper[4558]: I0120 17:18:37.012791 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="037cfbe8-8c84-4901-9e77-9513f22e39a1" containerName="proxy-httpd" Jan 20 17:18:37 crc kubenswrapper[4558]: E0120 17:18:37.012818 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="037cfbe8-8c84-4901-9e77-9513f22e39a1" containerName="sg-core" Jan 20 17:18:37 crc kubenswrapper[4558]: I0120 17:18:37.012825 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="037cfbe8-8c84-4901-9e77-9513f22e39a1" containerName="sg-core" Jan 20 17:18:37 crc kubenswrapper[4558]: E0120 17:18:37.012848 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="037cfbe8-8c84-4901-9e77-9513f22e39a1" containerName="ceilometer-notification-agent" Jan 20 17:18:37 crc kubenswrapper[4558]: I0120 17:18:37.012857 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="037cfbe8-8c84-4901-9e77-9513f22e39a1" containerName="ceilometer-notification-agent" Jan 20 17:18:37 crc kubenswrapper[4558]: I0120 17:18:37.013077 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="037cfbe8-8c84-4901-9e77-9513f22e39a1" containerName="sg-core" Jan 20 17:18:37 crc kubenswrapper[4558]: I0120 17:18:37.013093 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="037cfbe8-8c84-4901-9e77-9513f22e39a1" containerName="ceilometer-central-agent" Jan 20 17:18:37 crc kubenswrapper[4558]: I0120 17:18:37.013115 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="037cfbe8-8c84-4901-9e77-9513f22e39a1" containerName="ceilometer-notification-agent" Jan 20 17:18:37 crc kubenswrapper[4558]: I0120 17:18:37.013126 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="037cfbe8-8c84-4901-9e77-9513f22e39a1" containerName="proxy-httpd" Jan 20 17:18:37 crc kubenswrapper[4558]: I0120 17:18:37.015084 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:18:37 crc kubenswrapper[4558]: I0120 17:18:37.021642 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/glance-default-external-api-0" podStartSLOduration=5.021618132 podStartE2EDuration="5.021618132s" podCreationTimestamp="2026-01-20 17:18:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:18:36.984368594 +0000 UTC m=+2210.744706561" watchObservedRunningTime="2026-01-20 17:18:37.021618132 +0000 UTC m=+2210.781956130" Jan 20 17:18:37 crc kubenswrapper[4558]: I0120 17:18:37.022246 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-scripts" Jan 20 17:18:37 crc kubenswrapper[4558]: I0120 17:18:37.022347 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-config-data" Jan 20 17:18:37 crc kubenswrapper[4558]: I0120 17:18:37.041882 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:18:37 crc kubenswrapper[4558]: I0120 17:18:37.057117 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/glance-default-internal-api-0" podStartSLOduration=5.057094446 podStartE2EDuration="5.057094446s" podCreationTimestamp="2026-01-20 17:18:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:18:37.022473501 +0000 UTC m=+2210.782811468" watchObservedRunningTime="2026-01-20 17:18:37.057094446 +0000 UTC m=+2210.817432413" Jan 20 17:18:37 crc kubenswrapper[4558]: I0120 17:18:37.070611 4558 scope.go:117] "RemoveContainer" containerID="50673c604a510d5000c90395016ae5092b6d9a96de87261edc85c70805ed811c" Jan 20 17:18:37 crc kubenswrapper[4558]: I0120 17:18:37.118025 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/barbican-worker-795d8f7875-gfh24" podStartSLOduration=4.116083446 podStartE2EDuration="4.116083446s" podCreationTimestamp="2026-01-20 17:18:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:18:37.071030203 +0000 UTC m=+2210.831368170" watchObservedRunningTime="2026-01-20 17:18:37.116083446 +0000 UTC m=+2210.876421413" Jan 20 17:18:37 crc kubenswrapper[4558]: I0120 17:18:37.126214 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/barbican-api-678474f9b-4hxj7" podStartSLOduration=4.126198968 podStartE2EDuration="4.126198968s" podCreationTimestamp="2026-01-20 17:18:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:18:37.092412122 +0000 UTC m=+2210.852750088" watchObservedRunningTime="2026-01-20 17:18:37.126198968 +0000 UTC m=+2210.886536935" Jan 20 17:18:37 crc kubenswrapper[4558]: I0120 17:18:37.129401 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/placement-74b74897d6-9wxhf" podStartSLOduration=4.129392964 podStartE2EDuration="4.129392964s" podCreationTimestamp="2026-01-20 17:18:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:18:37.120442894 +0000 UTC m=+2210.880780861" watchObservedRunningTime="2026-01-20 17:18:37.129392964 +0000 UTC m=+2210.889730931" Jan 20 17:18:37 crc kubenswrapper[4558]: I0120 17:18:37.142352 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/keystone-bootstrap-v4bgz" podStartSLOduration=2.142338418 podStartE2EDuration="2.142338418s" podCreationTimestamp="2026-01-20 17:18:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:18:37.136772502 +0000 UTC m=+2210.897110468" watchObservedRunningTime="2026-01-20 17:18:37.142338418 +0000 UTC m=+2210.902676385" Jan 20 17:18:37 crc kubenswrapper[4558]: I0120 17:18:37.165564 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b5e92730-f89e-4f1d-9a29-925c5acda961-config-data\") pod \"ceilometer-0\" (UID: \"b5e92730-f89e-4f1d-9a29-925c5acda961\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:18:37 crc kubenswrapper[4558]: I0120 17:18:37.165600 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b5e92730-f89e-4f1d-9a29-925c5acda961-scripts\") pod \"ceilometer-0\" (UID: \"b5e92730-f89e-4f1d-9a29-925c5acda961\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:18:37 crc kubenswrapper[4558]: I0120 17:18:37.165637 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b5e92730-f89e-4f1d-9a29-925c5acda961-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"b5e92730-f89e-4f1d-9a29-925c5acda961\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:18:37 crc kubenswrapper[4558]: I0120 17:18:37.165658 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b5e92730-f89e-4f1d-9a29-925c5acda961-run-httpd\") pod \"ceilometer-0\" (UID: \"b5e92730-f89e-4f1d-9a29-925c5acda961\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:18:37 crc kubenswrapper[4558]: I0120 17:18:37.165693 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qczsr\" (UniqueName: \"kubernetes.io/projected/b5e92730-f89e-4f1d-9a29-925c5acda961-kube-api-access-qczsr\") pod \"ceilometer-0\" (UID: \"b5e92730-f89e-4f1d-9a29-925c5acda961\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:18:37 crc kubenswrapper[4558]: I0120 17:18:37.165710 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b5e92730-f89e-4f1d-9a29-925c5acda961-log-httpd\") pod \"ceilometer-0\" (UID: \"b5e92730-f89e-4f1d-9a29-925c5acda961\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:18:37 crc kubenswrapper[4558]: I0120 17:18:37.165815 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b5e92730-f89e-4f1d-9a29-925c5acda961-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"b5e92730-f89e-4f1d-9a29-925c5acda961\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:18:37 crc kubenswrapper[4558]: I0120 17:18:37.266676 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b5e92730-f89e-4f1d-9a29-925c5acda961-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"b5e92730-f89e-4f1d-9a29-925c5acda961\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:18:37 crc kubenswrapper[4558]: I0120 17:18:37.266740 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b5e92730-f89e-4f1d-9a29-925c5acda961-config-data\") pod \"ceilometer-0\" (UID: \"b5e92730-f89e-4f1d-9a29-925c5acda961\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:18:37 crc kubenswrapper[4558]: I0120 17:18:37.266767 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b5e92730-f89e-4f1d-9a29-925c5acda961-scripts\") pod \"ceilometer-0\" (UID: \"b5e92730-f89e-4f1d-9a29-925c5acda961\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:18:37 crc kubenswrapper[4558]: I0120 17:18:37.266796 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b5e92730-f89e-4f1d-9a29-925c5acda961-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"b5e92730-f89e-4f1d-9a29-925c5acda961\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:18:37 crc kubenswrapper[4558]: I0120 17:18:37.266828 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b5e92730-f89e-4f1d-9a29-925c5acda961-run-httpd\") pod \"ceilometer-0\" (UID: \"b5e92730-f89e-4f1d-9a29-925c5acda961\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:18:37 crc kubenswrapper[4558]: I0120 17:18:37.266858 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qczsr\" (UniqueName: \"kubernetes.io/projected/b5e92730-f89e-4f1d-9a29-925c5acda961-kube-api-access-qczsr\") pod \"ceilometer-0\" (UID: \"b5e92730-f89e-4f1d-9a29-925c5acda961\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:18:37 crc kubenswrapper[4558]: I0120 17:18:37.266879 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b5e92730-f89e-4f1d-9a29-925c5acda961-log-httpd\") pod \"ceilometer-0\" (UID: \"b5e92730-f89e-4f1d-9a29-925c5acda961\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:18:37 crc kubenswrapper[4558]: I0120 17:18:37.267300 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b5e92730-f89e-4f1d-9a29-925c5acda961-log-httpd\") pod \"ceilometer-0\" (UID: \"b5e92730-f89e-4f1d-9a29-925c5acda961\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:18:37 crc kubenswrapper[4558]: I0120 17:18:37.268524 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b5e92730-f89e-4f1d-9a29-925c5acda961-run-httpd\") pod \"ceilometer-0\" (UID: \"b5e92730-f89e-4f1d-9a29-925c5acda961\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:18:37 crc kubenswrapper[4558]: I0120 17:18:37.272804 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b5e92730-f89e-4f1d-9a29-925c5acda961-scripts\") pod \"ceilometer-0\" (UID: \"b5e92730-f89e-4f1d-9a29-925c5acda961\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:18:37 crc kubenswrapper[4558]: I0120 17:18:37.276773 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b5e92730-f89e-4f1d-9a29-925c5acda961-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"b5e92730-f89e-4f1d-9a29-925c5acda961\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:18:37 crc kubenswrapper[4558]: I0120 17:18:37.277330 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b5e92730-f89e-4f1d-9a29-925c5acda961-config-data\") pod \"ceilometer-0\" (UID: \"b5e92730-f89e-4f1d-9a29-925c5acda961\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:18:37 crc kubenswrapper[4558]: I0120 17:18:37.278711 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b5e92730-f89e-4f1d-9a29-925c5acda961-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"b5e92730-f89e-4f1d-9a29-925c5acda961\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:18:37 crc kubenswrapper[4558]: I0120 17:18:37.285748 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qczsr\" (UniqueName: \"kubernetes.io/projected/b5e92730-f89e-4f1d-9a29-925c5acda961-kube-api-access-qczsr\") pod \"ceilometer-0\" (UID: \"b5e92730-f89e-4f1d-9a29-925c5acda961\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:18:37 crc kubenswrapper[4558]: I0120 17:18:37.339857 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:18:37 crc kubenswrapper[4558]: I0120 17:18:37.353347 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-db-sync-4m92m" Jan 20 17:18:37 crc kubenswrapper[4558]: I0120 17:18:37.469379 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mqgg5\" (UniqueName: \"kubernetes.io/projected/4efe1398-34aa-4835-8c35-790f9ec1d514-kube-api-access-mqgg5\") pod \"4efe1398-34aa-4835-8c35-790f9ec1d514\" (UID: \"4efe1398-34aa-4835-8c35-790f9ec1d514\") " Jan 20 17:18:37 crc kubenswrapper[4558]: I0120 17:18:37.469719 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/4efe1398-34aa-4835-8c35-790f9ec1d514-db-sync-config-data\") pod \"4efe1398-34aa-4835-8c35-790f9ec1d514\" (UID: \"4efe1398-34aa-4835-8c35-790f9ec1d514\") " Jan 20 17:18:37 crc kubenswrapper[4558]: I0120 17:18:37.469858 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4efe1398-34aa-4835-8c35-790f9ec1d514-combined-ca-bundle\") pod \"4efe1398-34aa-4835-8c35-790f9ec1d514\" (UID: \"4efe1398-34aa-4835-8c35-790f9ec1d514\") " Jan 20 17:18:37 crc kubenswrapper[4558]: I0120 17:18:37.469919 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4efe1398-34aa-4835-8c35-790f9ec1d514-scripts\") pod \"4efe1398-34aa-4835-8c35-790f9ec1d514\" (UID: \"4efe1398-34aa-4835-8c35-790f9ec1d514\") " Jan 20 17:18:37 crc kubenswrapper[4558]: I0120 17:18:37.470097 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4efe1398-34aa-4835-8c35-790f9ec1d514-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "4efe1398-34aa-4835-8c35-790f9ec1d514" (UID: "4efe1398-34aa-4835-8c35-790f9ec1d514"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 17:18:37 crc kubenswrapper[4558]: I0120 17:18:37.469957 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/4efe1398-34aa-4835-8c35-790f9ec1d514-etc-machine-id\") pod \"4efe1398-34aa-4835-8c35-790f9ec1d514\" (UID: \"4efe1398-34aa-4835-8c35-790f9ec1d514\") " Jan 20 17:18:37 crc kubenswrapper[4558]: I0120 17:18:37.470535 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4efe1398-34aa-4835-8c35-790f9ec1d514-config-data\") pod \"4efe1398-34aa-4835-8c35-790f9ec1d514\" (UID: \"4efe1398-34aa-4835-8c35-790f9ec1d514\") " Jan 20 17:18:37 crc kubenswrapper[4558]: I0120 17:18:37.471061 4558 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/4efe1398-34aa-4835-8c35-790f9ec1d514-etc-machine-id\") on node \"crc\" DevicePath \"\"" Jan 20 17:18:37 crc kubenswrapper[4558]: I0120 17:18:37.475583 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4efe1398-34aa-4835-8c35-790f9ec1d514-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "4efe1398-34aa-4835-8c35-790f9ec1d514" (UID: "4efe1398-34aa-4835-8c35-790f9ec1d514"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:18:37 crc kubenswrapper[4558]: I0120 17:18:37.475767 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4efe1398-34aa-4835-8c35-790f9ec1d514-kube-api-access-mqgg5" (OuterVolumeSpecName: "kube-api-access-mqgg5") pod "4efe1398-34aa-4835-8c35-790f9ec1d514" (UID: "4efe1398-34aa-4835-8c35-790f9ec1d514"). InnerVolumeSpecName "kube-api-access-mqgg5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:18:37 crc kubenswrapper[4558]: I0120 17:18:37.487266 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4efe1398-34aa-4835-8c35-790f9ec1d514-scripts" (OuterVolumeSpecName: "scripts") pod "4efe1398-34aa-4835-8c35-790f9ec1d514" (UID: "4efe1398-34aa-4835-8c35-790f9ec1d514"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:18:37 crc kubenswrapper[4558]: I0120 17:18:37.499302 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4efe1398-34aa-4835-8c35-790f9ec1d514-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4efe1398-34aa-4835-8c35-790f9ec1d514" (UID: "4efe1398-34aa-4835-8c35-790f9ec1d514"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:18:37 crc kubenswrapper[4558]: I0120 17:18:37.530900 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4efe1398-34aa-4835-8c35-790f9ec1d514-config-data" (OuterVolumeSpecName: "config-data") pod "4efe1398-34aa-4835-8c35-790f9ec1d514" (UID: "4efe1398-34aa-4835-8c35-790f9ec1d514"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:18:37 crc kubenswrapper[4558]: I0120 17:18:37.574022 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4efe1398-34aa-4835-8c35-790f9ec1d514-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:18:37 crc kubenswrapper[4558]: I0120 17:18:37.574056 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mqgg5\" (UniqueName: \"kubernetes.io/projected/4efe1398-34aa-4835-8c35-790f9ec1d514-kube-api-access-mqgg5\") on node \"crc\" DevicePath \"\"" Jan 20 17:18:37 crc kubenswrapper[4558]: I0120 17:18:37.574069 4558 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/4efe1398-34aa-4835-8c35-790f9ec1d514-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:18:37 crc kubenswrapper[4558]: I0120 17:18:37.574080 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4efe1398-34aa-4835-8c35-790f9ec1d514-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:18:37 crc kubenswrapper[4558]: I0120 17:18:37.574088 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4efe1398-34aa-4835-8c35-790f9ec1d514-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:18:37 crc kubenswrapper[4558]: I0120 17:18:37.797558 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:18:37 crc kubenswrapper[4558]: W0120 17:18:37.797970 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb5e92730_f89e_4f1d_9a29_925c5acda961.slice/crio-5d6fb5041b9b1332a47f5ae04986ff4293f2a325641b93e98a82449cc40f5088 WatchSource:0}: Error finding container 5d6fb5041b9b1332a47f5ae04986ff4293f2a325641b93e98a82449cc40f5088: Status 404 returned error can't find the container with id 5d6fb5041b9b1332a47f5ae04986ff4293f2a325641b93e98a82449cc40f5088 Jan 20 17:18:37 crc kubenswrapper[4558]: I0120 17:18:37.999647 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-bootstrap-v4bgz" event={"ID":"6fa38873-97d7-458b-8963-ceeae696c9af","Type":"ContainerStarted","Data":"3798eba8e78b98bed61f098684e8e33e9b75e12fe73a743523793c25884ee006"} Jan 20 17:18:38 crc kubenswrapper[4558]: I0120 17:18:38.002216 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"b5e92730-f89e-4f1d-9a29-925c5acda961","Type":"ContainerStarted","Data":"5d6fb5041b9b1332a47f5ae04986ff4293f2a325641b93e98a82449cc40f5088"} Jan 20 17:18:38 crc kubenswrapper[4558]: I0120 17:18:38.005516 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-db-sync-4m92m" event={"ID":"4efe1398-34aa-4835-8c35-790f9ec1d514","Type":"ContainerDied","Data":"f4eb821f8d88c20f0623d9f391cdf7bb32ab0d0d6820e7369377ea27d5011d36"} Jan 20 17:18:38 crc kubenswrapper[4558]: I0120 17:18:38.005591 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f4eb821f8d88c20f0623d9f391cdf7bb32ab0d0d6820e7369377ea27d5011d36" Jan 20 17:18:38 crc kubenswrapper[4558]: I0120 17:18:38.005560 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-db-sync-4m92m" Jan 20 17:18:38 crc kubenswrapper[4558]: I0120 17:18:38.071585 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:18:38 crc kubenswrapper[4558]: E0120 17:18:38.071944 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4efe1398-34aa-4835-8c35-790f9ec1d514" containerName="cinder-db-sync" Jan 20 17:18:38 crc kubenswrapper[4558]: I0120 17:18:38.071963 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="4efe1398-34aa-4835-8c35-790f9ec1d514" containerName="cinder-db-sync" Jan 20 17:18:38 crc kubenswrapper[4558]: I0120 17:18:38.072192 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="4efe1398-34aa-4835-8c35-790f9ec1d514" containerName="cinder-db-sync" Jan 20 17:18:38 crc kubenswrapper[4558]: I0120 17:18:38.073141 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:18:38 crc kubenswrapper[4558]: I0120 17:18:38.099356 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cinder-scheduler-config-data" Jan 20 17:18:38 crc kubenswrapper[4558]: I0120 17:18:38.100062 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cinder-cinder-dockercfg-bl5xz" Jan 20 17:18:38 crc kubenswrapper[4558]: I0120 17:18:38.108617 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cinder-scripts" Jan 20 17:18:38 crc kubenswrapper[4558]: I0120 17:18:38.109412 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cinder-config-data" Jan 20 17:18:38 crc kubenswrapper[4558]: I0120 17:18:38.137753 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:18:38 crc kubenswrapper[4558]: I0120 17:18:38.195871 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e7f15828-3902-4413-b35a-b33487057ea6-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"e7f15828-3902-4413-b35a-b33487057ea6\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:18:38 crc kubenswrapper[4558]: I0120 17:18:38.196097 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e7f15828-3902-4413-b35a-b33487057ea6-scripts\") pod \"cinder-scheduler-0\" (UID: \"e7f15828-3902-4413-b35a-b33487057ea6\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:18:38 crc kubenswrapper[4558]: I0120 17:18:38.196234 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/e7f15828-3902-4413-b35a-b33487057ea6-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"e7f15828-3902-4413-b35a-b33487057ea6\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:18:38 crc kubenswrapper[4558]: I0120 17:18:38.196371 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7f15828-3902-4413-b35a-b33487057ea6-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"e7f15828-3902-4413-b35a-b33487057ea6\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:18:38 crc kubenswrapper[4558]: I0120 17:18:38.196492 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e7f15828-3902-4413-b35a-b33487057ea6-config-data\") pod \"cinder-scheduler-0\" (UID: \"e7f15828-3902-4413-b35a-b33487057ea6\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:18:38 crc kubenswrapper[4558]: I0120 17:18:38.196629 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sr6fd\" (UniqueName: \"kubernetes.io/projected/e7f15828-3902-4413-b35a-b33487057ea6-kube-api-access-sr6fd\") pod \"cinder-scheduler-0\" (UID: \"e7f15828-3902-4413-b35a-b33487057ea6\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:18:38 crc kubenswrapper[4558]: I0120 17:18:38.252369 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:18:38 crc kubenswrapper[4558]: I0120 17:18:38.254278 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:18:38 crc kubenswrapper[4558]: I0120 17:18:38.256728 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cinder-api-config-data" Jan 20 17:18:38 crc kubenswrapper[4558]: I0120 17:18:38.262572 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:18:38 crc kubenswrapper[4558]: I0120 17:18:38.299651 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e7f15828-3902-4413-b35a-b33487057ea6-scripts\") pod \"cinder-scheduler-0\" (UID: \"e7f15828-3902-4413-b35a-b33487057ea6\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:18:38 crc kubenswrapper[4558]: I0120 17:18:38.299706 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/bf21e7b5-8a7d-4009-98cc-f8bbe4150000-etc-machine-id\") pod \"cinder-api-0\" (UID: \"bf21e7b5-8a7d-4009-98cc-f8bbe4150000\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:18:38 crc kubenswrapper[4558]: I0120 17:18:38.299729 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bf21e7b5-8a7d-4009-98cc-f8bbe4150000-config-data\") pod \"cinder-api-0\" (UID: \"bf21e7b5-8a7d-4009-98cc-f8bbe4150000\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:18:38 crc kubenswrapper[4558]: I0120 17:18:38.299758 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5rsng\" (UniqueName: \"kubernetes.io/projected/bf21e7b5-8a7d-4009-98cc-f8bbe4150000-kube-api-access-5rsng\") pod \"cinder-api-0\" (UID: \"bf21e7b5-8a7d-4009-98cc-f8bbe4150000\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:18:38 crc kubenswrapper[4558]: I0120 17:18:38.299791 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/e7f15828-3902-4413-b35a-b33487057ea6-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"e7f15828-3902-4413-b35a-b33487057ea6\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:18:38 crc kubenswrapper[4558]: I0120 17:18:38.299813 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bf21e7b5-8a7d-4009-98cc-f8bbe4150000-scripts\") pod \"cinder-api-0\" (UID: \"bf21e7b5-8a7d-4009-98cc-f8bbe4150000\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:18:38 crc kubenswrapper[4558]: I0120 17:18:38.299877 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7f15828-3902-4413-b35a-b33487057ea6-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"e7f15828-3902-4413-b35a-b33487057ea6\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:18:38 crc kubenswrapper[4558]: I0120 17:18:38.299922 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bf21e7b5-8a7d-4009-98cc-f8bbe4150000-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"bf21e7b5-8a7d-4009-98cc-f8bbe4150000\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:18:38 crc kubenswrapper[4558]: I0120 17:18:38.299957 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e7f15828-3902-4413-b35a-b33487057ea6-config-data\") pod \"cinder-scheduler-0\" (UID: \"e7f15828-3902-4413-b35a-b33487057ea6\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:18:38 crc kubenswrapper[4558]: I0120 17:18:38.299987 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bf21e7b5-8a7d-4009-98cc-f8bbe4150000-config-data-custom\") pod \"cinder-api-0\" (UID: \"bf21e7b5-8a7d-4009-98cc-f8bbe4150000\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:18:38 crc kubenswrapper[4558]: I0120 17:18:38.300037 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sr6fd\" (UniqueName: \"kubernetes.io/projected/e7f15828-3902-4413-b35a-b33487057ea6-kube-api-access-sr6fd\") pod \"cinder-scheduler-0\" (UID: \"e7f15828-3902-4413-b35a-b33487057ea6\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:18:38 crc kubenswrapper[4558]: I0120 17:18:38.300111 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e7f15828-3902-4413-b35a-b33487057ea6-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"e7f15828-3902-4413-b35a-b33487057ea6\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:18:38 crc kubenswrapper[4558]: I0120 17:18:38.300136 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bf21e7b5-8a7d-4009-98cc-f8bbe4150000-logs\") pod \"cinder-api-0\" (UID: \"bf21e7b5-8a7d-4009-98cc-f8bbe4150000\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:18:38 crc kubenswrapper[4558]: I0120 17:18:38.301291 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/e7f15828-3902-4413-b35a-b33487057ea6-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"e7f15828-3902-4413-b35a-b33487057ea6\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:18:38 crc kubenswrapper[4558]: I0120 17:18:38.305804 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e7f15828-3902-4413-b35a-b33487057ea6-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"e7f15828-3902-4413-b35a-b33487057ea6\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:18:38 crc kubenswrapper[4558]: I0120 17:18:38.306890 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7f15828-3902-4413-b35a-b33487057ea6-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"e7f15828-3902-4413-b35a-b33487057ea6\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:18:38 crc kubenswrapper[4558]: I0120 17:18:38.314119 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e7f15828-3902-4413-b35a-b33487057ea6-config-data\") pod \"cinder-scheduler-0\" (UID: \"e7f15828-3902-4413-b35a-b33487057ea6\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:18:38 crc kubenswrapper[4558]: I0120 17:18:38.315868 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e7f15828-3902-4413-b35a-b33487057ea6-scripts\") pod \"cinder-scheduler-0\" (UID: \"e7f15828-3902-4413-b35a-b33487057ea6\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:18:38 crc kubenswrapper[4558]: I0120 17:18:38.317275 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sr6fd\" (UniqueName: \"kubernetes.io/projected/e7f15828-3902-4413-b35a-b33487057ea6-kube-api-access-sr6fd\") pod \"cinder-scheduler-0\" (UID: \"e7f15828-3902-4413-b35a-b33487057ea6\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:18:38 crc kubenswrapper[4558]: I0120 17:18:38.401685 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bf21e7b5-8a7d-4009-98cc-f8bbe4150000-logs\") pod \"cinder-api-0\" (UID: \"bf21e7b5-8a7d-4009-98cc-f8bbe4150000\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:18:38 crc kubenswrapper[4558]: I0120 17:18:38.401836 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/bf21e7b5-8a7d-4009-98cc-f8bbe4150000-etc-machine-id\") pod \"cinder-api-0\" (UID: \"bf21e7b5-8a7d-4009-98cc-f8bbe4150000\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:18:38 crc kubenswrapper[4558]: I0120 17:18:38.401903 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bf21e7b5-8a7d-4009-98cc-f8bbe4150000-config-data\") pod \"cinder-api-0\" (UID: \"bf21e7b5-8a7d-4009-98cc-f8bbe4150000\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:18:38 crc kubenswrapper[4558]: I0120 17:18:38.401970 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5rsng\" (UniqueName: \"kubernetes.io/projected/bf21e7b5-8a7d-4009-98cc-f8bbe4150000-kube-api-access-5rsng\") pod \"cinder-api-0\" (UID: \"bf21e7b5-8a7d-4009-98cc-f8bbe4150000\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:18:38 crc kubenswrapper[4558]: I0120 17:18:38.402041 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bf21e7b5-8a7d-4009-98cc-f8bbe4150000-scripts\") pod \"cinder-api-0\" (UID: \"bf21e7b5-8a7d-4009-98cc-f8bbe4150000\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:18:38 crc kubenswrapper[4558]: I0120 17:18:38.402105 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/bf21e7b5-8a7d-4009-98cc-f8bbe4150000-etc-machine-id\") pod \"cinder-api-0\" (UID: \"bf21e7b5-8a7d-4009-98cc-f8bbe4150000\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:18:38 crc kubenswrapper[4558]: I0120 17:18:38.402250 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bf21e7b5-8a7d-4009-98cc-f8bbe4150000-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"bf21e7b5-8a7d-4009-98cc-f8bbe4150000\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:18:38 crc kubenswrapper[4558]: I0120 17:18:38.402329 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bf21e7b5-8a7d-4009-98cc-f8bbe4150000-config-data-custom\") pod \"cinder-api-0\" (UID: \"bf21e7b5-8a7d-4009-98cc-f8bbe4150000\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:18:38 crc kubenswrapper[4558]: I0120 17:18:38.402797 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bf21e7b5-8a7d-4009-98cc-f8bbe4150000-logs\") pod \"cinder-api-0\" (UID: \"bf21e7b5-8a7d-4009-98cc-f8bbe4150000\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:18:38 crc kubenswrapper[4558]: I0120 17:18:38.411007 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bf21e7b5-8a7d-4009-98cc-f8bbe4150000-config-data\") pod \"cinder-api-0\" (UID: \"bf21e7b5-8a7d-4009-98cc-f8bbe4150000\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:18:38 crc kubenswrapper[4558]: I0120 17:18:38.417371 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bf21e7b5-8a7d-4009-98cc-f8bbe4150000-scripts\") pod \"cinder-api-0\" (UID: \"bf21e7b5-8a7d-4009-98cc-f8bbe4150000\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:18:38 crc kubenswrapper[4558]: I0120 17:18:38.418832 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bf21e7b5-8a7d-4009-98cc-f8bbe4150000-config-data-custom\") pod \"cinder-api-0\" (UID: \"bf21e7b5-8a7d-4009-98cc-f8bbe4150000\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:18:38 crc kubenswrapper[4558]: I0120 17:18:38.419936 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bf21e7b5-8a7d-4009-98cc-f8bbe4150000-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"bf21e7b5-8a7d-4009-98cc-f8bbe4150000\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:18:38 crc kubenswrapper[4558]: I0120 17:18:38.425652 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5rsng\" (UniqueName: \"kubernetes.io/projected/bf21e7b5-8a7d-4009-98cc-f8bbe4150000-kube-api-access-5rsng\") pod \"cinder-api-0\" (UID: \"bf21e7b5-8a7d-4009-98cc-f8bbe4150000\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:18:38 crc kubenswrapper[4558]: I0120 17:18:38.425857 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:18:38 crc kubenswrapper[4558]: I0120 17:18:38.572311 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:18:38 crc kubenswrapper[4558]: I0120 17:18:38.599878 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="037cfbe8-8c84-4901-9e77-9513f22e39a1" path="/var/lib/kubelet/pods/037cfbe8-8c84-4901-9e77-9513f22e39a1/volumes" Jan 20 17:18:39 crc kubenswrapper[4558]: I0120 17:18:39.015599 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"b5e92730-f89e-4f1d-9a29-925c5acda961","Type":"ContainerStarted","Data":"9003fbf8af830e35a1d6485bbee760ae6d290f957bbb030215bddf31d0f8fc2c"} Jan 20 17:18:39 crc kubenswrapper[4558]: I0120 17:18:39.072125 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:18:39 crc kubenswrapper[4558]: W0120 17:18:39.078072 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode7f15828_3902_4413_b35a_b33487057ea6.slice/crio-a17b6876bf8dbd5a0e5775eeb15deaec2d5acd8068f6e505ffeb722ee6a91dd3 WatchSource:0}: Error finding container a17b6876bf8dbd5a0e5775eeb15deaec2d5acd8068f6e505ffeb722ee6a91dd3: Status 404 returned error can't find the container with id a17b6876bf8dbd5a0e5775eeb15deaec2d5acd8068f6e505ffeb722ee6a91dd3 Jan 20 17:18:39 crc kubenswrapper[4558]: I0120 17:18:39.131704 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:18:39 crc kubenswrapper[4558]: W0120 17:18:39.133819 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbf21e7b5_8a7d_4009_98cc_f8bbe4150000.slice/crio-674402ebd5ca164ebec58b19dc921489355cff4c1987778ccec4029c5551f803 WatchSource:0}: Error finding container 674402ebd5ca164ebec58b19dc921489355cff4c1987778ccec4029c5551f803: Status 404 returned error can't find the container with id 674402ebd5ca164ebec58b19dc921489355cff4c1987778ccec4029c5551f803 Jan 20 17:18:39 crc kubenswrapper[4558]: I0120 17:18:39.565953 4558 scope.go:117] "RemoveContainer" containerID="0ffce44366a8b4466427b682fb41640425366a0f4449210959148de9aef695c6" Jan 20 17:18:39 crc kubenswrapper[4558]: E0120 17:18:39.566442 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 17:18:40 crc kubenswrapper[4558]: I0120 17:18:40.056278 4558 generic.go:334] "Generic (PLEG): container finished" podID="6fa38873-97d7-458b-8963-ceeae696c9af" containerID="3798eba8e78b98bed61f098684e8e33e9b75e12fe73a743523793c25884ee006" exitCode=0 Jan 20 17:18:40 crc kubenswrapper[4558]: I0120 17:18:40.056329 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-bootstrap-v4bgz" event={"ID":"6fa38873-97d7-458b-8963-ceeae696c9af","Type":"ContainerDied","Data":"3798eba8e78b98bed61f098684e8e33e9b75e12fe73a743523793c25884ee006"} Jan 20 17:18:40 crc kubenswrapper[4558]: I0120 17:18:40.073148 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"b5e92730-f89e-4f1d-9a29-925c5acda961","Type":"ContainerStarted","Data":"d30f7131a9e2cd443f7a4870ac2230d1ab5b7a51a7a7439098ff9ccda4a3e25b"} Jan 20 17:18:40 crc kubenswrapper[4558]: I0120 17:18:40.077015 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"e7f15828-3902-4413-b35a-b33487057ea6","Type":"ContainerStarted","Data":"2f4a6e34fee542fde92457779ab42b75ad188b49ed973211487d82f9b9c24284"} Jan 20 17:18:40 crc kubenswrapper[4558]: I0120 17:18:40.077065 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"e7f15828-3902-4413-b35a-b33487057ea6","Type":"ContainerStarted","Data":"a17b6876bf8dbd5a0e5775eeb15deaec2d5acd8068f6e505ffeb722ee6a91dd3"} Jan 20 17:18:40 crc kubenswrapper[4558]: I0120 17:18:40.079381 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"bf21e7b5-8a7d-4009-98cc-f8bbe4150000","Type":"ContainerStarted","Data":"3cee2b98d305e1a2cdcd226de3670be5d5e874f44424bc2f6f0c585dd44af77c"} Jan 20 17:18:40 crc kubenswrapper[4558]: I0120 17:18:40.079421 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"bf21e7b5-8a7d-4009-98cc-f8bbe4150000","Type":"ContainerStarted","Data":"674402ebd5ca164ebec58b19dc921489355cff4c1987778ccec4029c5551f803"} Jan 20 17:18:41 crc kubenswrapper[4558]: I0120 17:18:41.066529 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-4bc72"] Jan 20 17:18:41 crc kubenswrapper[4558]: I0120 17:18:41.068774 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4bc72" Jan 20 17:18:41 crc kubenswrapper[4558]: I0120 17:18:41.078308 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-4bc72"] Jan 20 17:18:41 crc kubenswrapper[4558]: I0120 17:18:41.094445 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"bf21e7b5-8a7d-4009-98cc-f8bbe4150000","Type":"ContainerStarted","Data":"4db25d25ca4cc1e4675ba6c19698d9ed43d213338095bc6a344265062fee9b73"} Jan 20 17:18:41 crc kubenswrapper[4558]: I0120 17:18:41.094528 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:18:41 crc kubenswrapper[4558]: I0120 17:18:41.096598 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"b5e92730-f89e-4f1d-9a29-925c5acda961","Type":"ContainerStarted","Data":"b54c224c9ceb5230ae536b85978dfae73a97fbd2ecddcabd5d1b2615d78a8104"} Jan 20 17:18:41 crc kubenswrapper[4558]: I0120 17:18:41.102517 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"e7f15828-3902-4413-b35a-b33487057ea6","Type":"ContainerStarted","Data":"308537a807ea98cf0cb1db26e2083a6fc000a2f81a9f211aa0566066bc04752c"} Jan 20 17:18:41 crc kubenswrapper[4558]: I0120 17:18:41.162082 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/cinder-api-0" podStartSLOduration=3.162059986 podStartE2EDuration="3.162059986s" podCreationTimestamp="2026-01-20 17:18:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:18:41.145474287 +0000 UTC m=+2214.905812254" watchObservedRunningTime="2026-01-20 17:18:41.162059986 +0000 UTC m=+2214.922397954" Jan 20 17:18:41 crc kubenswrapper[4558]: I0120 17:18:41.169113 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/cinder-scheduler-0" podStartSLOduration=3.169105966 podStartE2EDuration="3.169105966s" podCreationTimestamp="2026-01-20 17:18:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:18:41.162049437 +0000 UTC m=+2214.922387403" watchObservedRunningTime="2026-01-20 17:18:41.169105966 +0000 UTC m=+2214.929443924" Jan 20 17:18:41 crc kubenswrapper[4558]: I0120 17:18:41.175114 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6bc4e0a0-7c1f-4b44-8724-dd6fa3f41d1d-utilities\") pod \"redhat-marketplace-4bc72\" (UID: \"6bc4e0a0-7c1f-4b44-8724-dd6fa3f41d1d\") " pod="openshift-marketplace/redhat-marketplace-4bc72" Jan 20 17:18:41 crc kubenswrapper[4558]: I0120 17:18:41.175233 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d4nxg\" (UniqueName: \"kubernetes.io/projected/6bc4e0a0-7c1f-4b44-8724-dd6fa3f41d1d-kube-api-access-d4nxg\") pod \"redhat-marketplace-4bc72\" (UID: \"6bc4e0a0-7c1f-4b44-8724-dd6fa3f41d1d\") " pod="openshift-marketplace/redhat-marketplace-4bc72" Jan 20 17:18:41 crc kubenswrapper[4558]: I0120 17:18:41.175300 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6bc4e0a0-7c1f-4b44-8724-dd6fa3f41d1d-catalog-content\") pod \"redhat-marketplace-4bc72\" (UID: \"6bc4e0a0-7c1f-4b44-8724-dd6fa3f41d1d\") " pod="openshift-marketplace/redhat-marketplace-4bc72" Jan 20 17:18:41 crc kubenswrapper[4558]: I0120 17:18:41.282782 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6bc4e0a0-7c1f-4b44-8724-dd6fa3f41d1d-catalog-content\") pod \"redhat-marketplace-4bc72\" (UID: \"6bc4e0a0-7c1f-4b44-8724-dd6fa3f41d1d\") " pod="openshift-marketplace/redhat-marketplace-4bc72" Jan 20 17:18:41 crc kubenswrapper[4558]: I0120 17:18:41.283001 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6bc4e0a0-7c1f-4b44-8724-dd6fa3f41d1d-utilities\") pod \"redhat-marketplace-4bc72\" (UID: \"6bc4e0a0-7c1f-4b44-8724-dd6fa3f41d1d\") " pod="openshift-marketplace/redhat-marketplace-4bc72" Jan 20 17:18:41 crc kubenswrapper[4558]: I0120 17:18:41.283063 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d4nxg\" (UniqueName: \"kubernetes.io/projected/6bc4e0a0-7c1f-4b44-8724-dd6fa3f41d1d-kube-api-access-d4nxg\") pod \"redhat-marketplace-4bc72\" (UID: \"6bc4e0a0-7c1f-4b44-8724-dd6fa3f41d1d\") " pod="openshift-marketplace/redhat-marketplace-4bc72" Jan 20 17:18:41 crc kubenswrapper[4558]: I0120 17:18:41.284757 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6bc4e0a0-7c1f-4b44-8724-dd6fa3f41d1d-utilities\") pod \"redhat-marketplace-4bc72\" (UID: \"6bc4e0a0-7c1f-4b44-8724-dd6fa3f41d1d\") " pod="openshift-marketplace/redhat-marketplace-4bc72" Jan 20 17:18:41 crc kubenswrapper[4558]: I0120 17:18:41.284946 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6bc4e0a0-7c1f-4b44-8724-dd6fa3f41d1d-catalog-content\") pod \"redhat-marketplace-4bc72\" (UID: \"6bc4e0a0-7c1f-4b44-8724-dd6fa3f41d1d\") " pod="openshift-marketplace/redhat-marketplace-4bc72" Jan 20 17:18:41 crc kubenswrapper[4558]: I0120 17:18:41.318802 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d4nxg\" (UniqueName: \"kubernetes.io/projected/6bc4e0a0-7c1f-4b44-8724-dd6fa3f41d1d-kube-api-access-d4nxg\") pod \"redhat-marketplace-4bc72\" (UID: \"6bc4e0a0-7c1f-4b44-8724-dd6fa3f41d1d\") " pod="openshift-marketplace/redhat-marketplace-4bc72" Jan 20 17:18:41 crc kubenswrapper[4558]: I0120 17:18:41.385749 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4bc72" Jan 20 17:18:41 crc kubenswrapper[4558]: I0120 17:18:41.563065 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-bootstrap-v4bgz" Jan 20 17:18:41 crc kubenswrapper[4558]: I0120 17:18:41.690141 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6fa38873-97d7-458b-8963-ceeae696c9af-scripts\") pod \"6fa38873-97d7-458b-8963-ceeae696c9af\" (UID: \"6fa38873-97d7-458b-8963-ceeae696c9af\") " Jan 20 17:18:41 crc kubenswrapper[4558]: I0120 17:18:41.690386 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6fa38873-97d7-458b-8963-ceeae696c9af-config-data\") pod \"6fa38873-97d7-458b-8963-ceeae696c9af\" (UID: \"6fa38873-97d7-458b-8963-ceeae696c9af\") " Jan 20 17:18:41 crc kubenswrapper[4558]: I0120 17:18:41.690673 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/6fa38873-97d7-458b-8963-ceeae696c9af-credential-keys\") pod \"6fa38873-97d7-458b-8963-ceeae696c9af\" (UID: \"6fa38873-97d7-458b-8963-ceeae696c9af\") " Jan 20 17:18:41 crc kubenswrapper[4558]: I0120 17:18:41.690744 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/6fa38873-97d7-458b-8963-ceeae696c9af-fernet-keys\") pod \"6fa38873-97d7-458b-8963-ceeae696c9af\" (UID: \"6fa38873-97d7-458b-8963-ceeae696c9af\") " Jan 20 17:18:41 crc kubenswrapper[4558]: I0120 17:18:41.690766 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6fa38873-97d7-458b-8963-ceeae696c9af-combined-ca-bundle\") pod \"6fa38873-97d7-458b-8963-ceeae696c9af\" (UID: \"6fa38873-97d7-458b-8963-ceeae696c9af\") " Jan 20 17:18:41 crc kubenswrapper[4558]: I0120 17:18:41.690798 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bgkqd\" (UniqueName: \"kubernetes.io/projected/6fa38873-97d7-458b-8963-ceeae696c9af-kube-api-access-bgkqd\") pod \"6fa38873-97d7-458b-8963-ceeae696c9af\" (UID: \"6fa38873-97d7-458b-8963-ceeae696c9af\") " Jan 20 17:18:41 crc kubenswrapper[4558]: I0120 17:18:41.710279 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6fa38873-97d7-458b-8963-ceeae696c9af-kube-api-access-bgkqd" (OuterVolumeSpecName: "kube-api-access-bgkqd") pod "6fa38873-97d7-458b-8963-ceeae696c9af" (UID: "6fa38873-97d7-458b-8963-ceeae696c9af"). InnerVolumeSpecName "kube-api-access-bgkqd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:18:41 crc kubenswrapper[4558]: I0120 17:18:41.720881 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6fa38873-97d7-458b-8963-ceeae696c9af-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "6fa38873-97d7-458b-8963-ceeae696c9af" (UID: "6fa38873-97d7-458b-8963-ceeae696c9af"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:18:41 crc kubenswrapper[4558]: I0120 17:18:41.724192 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6fa38873-97d7-458b-8963-ceeae696c9af-scripts" (OuterVolumeSpecName: "scripts") pod "6fa38873-97d7-458b-8963-ceeae696c9af" (UID: "6fa38873-97d7-458b-8963-ceeae696c9af"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:18:41 crc kubenswrapper[4558]: I0120 17:18:41.727251 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6fa38873-97d7-458b-8963-ceeae696c9af-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "6fa38873-97d7-458b-8963-ceeae696c9af" (UID: "6fa38873-97d7-458b-8963-ceeae696c9af"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:18:41 crc kubenswrapper[4558]: I0120 17:18:41.737194 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6fa38873-97d7-458b-8963-ceeae696c9af-config-data" (OuterVolumeSpecName: "config-data") pod "6fa38873-97d7-458b-8963-ceeae696c9af" (UID: "6fa38873-97d7-458b-8963-ceeae696c9af"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:18:41 crc kubenswrapper[4558]: I0120 17:18:41.737736 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6fa38873-97d7-458b-8963-ceeae696c9af-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6fa38873-97d7-458b-8963-ceeae696c9af" (UID: "6fa38873-97d7-458b-8963-ceeae696c9af"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:18:41 crc kubenswrapper[4558]: I0120 17:18:41.793755 4558 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/6fa38873-97d7-458b-8963-ceeae696c9af-fernet-keys\") on node \"crc\" DevicePath \"\"" Jan 20 17:18:41 crc kubenswrapper[4558]: I0120 17:18:41.793787 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6fa38873-97d7-458b-8963-ceeae696c9af-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:18:41 crc kubenswrapper[4558]: I0120 17:18:41.793802 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bgkqd\" (UniqueName: \"kubernetes.io/projected/6fa38873-97d7-458b-8963-ceeae696c9af-kube-api-access-bgkqd\") on node \"crc\" DevicePath \"\"" Jan 20 17:18:41 crc kubenswrapper[4558]: I0120 17:18:41.793813 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6fa38873-97d7-458b-8963-ceeae696c9af-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:18:41 crc kubenswrapper[4558]: I0120 17:18:41.793822 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6fa38873-97d7-458b-8963-ceeae696c9af-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:18:41 crc kubenswrapper[4558]: I0120 17:18:41.793831 4558 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/6fa38873-97d7-458b-8963-ceeae696c9af-credential-keys\") on node \"crc\" DevicePath \"\"" Jan 20 17:18:41 crc kubenswrapper[4558]: I0120 17:18:41.868014 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-4bc72"] Jan 20 17:18:42 crc kubenswrapper[4558]: I0120 17:18:42.112746 4558 generic.go:334] "Generic (PLEG): container finished" podID="6bc4e0a0-7c1f-4b44-8724-dd6fa3f41d1d" containerID="0d7f477e7e06832c7429e011343b9dc748dcc87bee4a71816003bd72474f5ea3" exitCode=0 Jan 20 17:18:42 crc kubenswrapper[4558]: I0120 17:18:42.112848 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4bc72" event={"ID":"6bc4e0a0-7c1f-4b44-8724-dd6fa3f41d1d","Type":"ContainerDied","Data":"0d7f477e7e06832c7429e011343b9dc748dcc87bee4a71816003bd72474f5ea3"} Jan 20 17:18:42 crc kubenswrapper[4558]: I0120 17:18:42.112903 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4bc72" event={"ID":"6bc4e0a0-7c1f-4b44-8724-dd6fa3f41d1d","Type":"ContainerStarted","Data":"fcbdec7d170469e601ec0709578dca22e0d00415f727877c71e80ba4ab104553"} Jan 20 17:18:42 crc kubenswrapper[4558]: I0120 17:18:42.114775 4558 generic.go:334] "Generic (PLEG): container finished" podID="5659e8f8-3581-4ecf-a2ee-0c6d044b17e2" containerID="207e164549e3e5e61bf2c71ee34097728114f9cd05bc02c75ea304dc4cbd25f8" exitCode=0 Jan 20 17:18:42 crc kubenswrapper[4558]: I0120 17:18:42.114819 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-db-sync-682tw" event={"ID":"5659e8f8-3581-4ecf-a2ee-0c6d044b17e2","Type":"ContainerDied","Data":"207e164549e3e5e61bf2c71ee34097728114f9cd05bc02c75ea304dc4cbd25f8"} Jan 20 17:18:42 crc kubenswrapper[4558]: I0120 17:18:42.117399 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-bootstrap-v4bgz" event={"ID":"6fa38873-97d7-458b-8963-ceeae696c9af","Type":"ContainerDied","Data":"0f199f022ec29848ee81ec8516b8e901fa649b2f2665c88aeee10fb34ceaa444"} Jan 20 17:18:42 crc kubenswrapper[4558]: I0120 17:18:42.117435 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0f199f022ec29848ee81ec8516b8e901fa649b2f2665c88aeee10fb34ceaa444" Jan 20 17:18:42 crc kubenswrapper[4558]: I0120 17:18:42.117446 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-bootstrap-v4bgz" Jan 20 17:18:42 crc kubenswrapper[4558]: I0120 17:18:42.121293 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"b5e92730-f89e-4f1d-9a29-925c5acda961","Type":"ContainerStarted","Data":"05ca3f2636e110d0332674062bdecc1c997c9e6bac0b7a0e7421774ccd7af025"} Jan 20 17:18:42 crc kubenswrapper[4558]: I0120 17:18:42.157110 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ceilometer-0" podStartSLOduration=2.453918387 podStartE2EDuration="6.157092966s" podCreationTimestamp="2026-01-20 17:18:36 +0000 UTC" firstStartedPulling="2026-01-20 17:18:37.802103523 +0000 UTC m=+2211.562441490" lastFinishedPulling="2026-01-20 17:18:41.505278102 +0000 UTC m=+2215.265616069" observedRunningTime="2026-01-20 17:18:42.14846992 +0000 UTC m=+2215.908807887" watchObservedRunningTime="2026-01-20 17:18:42.157092966 +0000 UTC m=+2215.917430934" Jan 20 17:18:42 crc kubenswrapper[4558]: I0120 17:18:42.206136 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/keystone-5585fd7cd8-gk5qt"] Jan 20 17:18:42 crc kubenswrapper[4558]: E0120 17:18:42.206585 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6fa38873-97d7-458b-8963-ceeae696c9af" containerName="keystone-bootstrap" Jan 20 17:18:42 crc kubenswrapper[4558]: I0120 17:18:42.206602 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="6fa38873-97d7-458b-8963-ceeae696c9af" containerName="keystone-bootstrap" Jan 20 17:18:42 crc kubenswrapper[4558]: I0120 17:18:42.206806 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="6fa38873-97d7-458b-8963-ceeae696c9af" containerName="keystone-bootstrap" Jan 20 17:18:42 crc kubenswrapper[4558]: I0120 17:18:42.207472 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-5585fd7cd8-gk5qt" Jan 20 17:18:42 crc kubenswrapper[4558]: I0120 17:18:42.210877 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone" Jan 20 17:18:42 crc kubenswrapper[4558]: I0120 17:18:42.211889 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-keystone-internal-svc" Jan 20 17:18:42 crc kubenswrapper[4558]: I0120 17:18:42.211979 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-scripts" Jan 20 17:18:42 crc kubenswrapper[4558]: I0120 17:18:42.211985 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-keystone-dockercfg-n8z97" Jan 20 17:18:42 crc kubenswrapper[4558]: I0120 17:18:42.211985 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-keystone-public-svc" Jan 20 17:18:42 crc kubenswrapper[4558]: I0120 17:18:42.213715 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-config-data" Jan 20 17:18:42 crc kubenswrapper[4558]: I0120 17:18:42.223476 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-5585fd7cd8-gk5qt"] Jan 20 17:18:42 crc kubenswrapper[4558]: I0120 17:18:42.307225 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ea1fc77a-9a6a-4b9b-8d27-34f9b9ca61d9-scripts\") pod \"keystone-5585fd7cd8-gk5qt\" (UID: \"ea1fc77a-9a6a-4b9b-8d27-34f9b9ca61d9\") " pod="openstack-kuttl-tests/keystone-5585fd7cd8-gk5qt" Jan 20 17:18:42 crc kubenswrapper[4558]: I0120 17:18:42.307377 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t4vhd\" (UniqueName: \"kubernetes.io/projected/ea1fc77a-9a6a-4b9b-8d27-34f9b9ca61d9-kube-api-access-t4vhd\") pod \"keystone-5585fd7cd8-gk5qt\" (UID: \"ea1fc77a-9a6a-4b9b-8d27-34f9b9ca61d9\") " pod="openstack-kuttl-tests/keystone-5585fd7cd8-gk5qt" Jan 20 17:18:42 crc kubenswrapper[4558]: I0120 17:18:42.307690 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ea1fc77a-9a6a-4b9b-8d27-34f9b9ca61d9-public-tls-certs\") pod \"keystone-5585fd7cd8-gk5qt\" (UID: \"ea1fc77a-9a6a-4b9b-8d27-34f9b9ca61d9\") " pod="openstack-kuttl-tests/keystone-5585fd7cd8-gk5qt" Jan 20 17:18:42 crc kubenswrapper[4558]: I0120 17:18:42.307888 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ea1fc77a-9a6a-4b9b-8d27-34f9b9ca61d9-config-data\") pod \"keystone-5585fd7cd8-gk5qt\" (UID: \"ea1fc77a-9a6a-4b9b-8d27-34f9b9ca61d9\") " pod="openstack-kuttl-tests/keystone-5585fd7cd8-gk5qt" Jan 20 17:18:42 crc kubenswrapper[4558]: I0120 17:18:42.308999 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ea1fc77a-9a6a-4b9b-8d27-34f9b9ca61d9-combined-ca-bundle\") pod \"keystone-5585fd7cd8-gk5qt\" (UID: \"ea1fc77a-9a6a-4b9b-8d27-34f9b9ca61d9\") " pod="openstack-kuttl-tests/keystone-5585fd7cd8-gk5qt" Jan 20 17:18:42 crc kubenswrapper[4558]: I0120 17:18:42.309275 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/ea1fc77a-9a6a-4b9b-8d27-34f9b9ca61d9-fernet-keys\") pod \"keystone-5585fd7cd8-gk5qt\" (UID: \"ea1fc77a-9a6a-4b9b-8d27-34f9b9ca61d9\") " pod="openstack-kuttl-tests/keystone-5585fd7cd8-gk5qt" Jan 20 17:18:42 crc kubenswrapper[4558]: I0120 17:18:42.309376 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ea1fc77a-9a6a-4b9b-8d27-34f9b9ca61d9-internal-tls-certs\") pod \"keystone-5585fd7cd8-gk5qt\" (UID: \"ea1fc77a-9a6a-4b9b-8d27-34f9b9ca61d9\") " pod="openstack-kuttl-tests/keystone-5585fd7cd8-gk5qt" Jan 20 17:18:42 crc kubenswrapper[4558]: I0120 17:18:42.309518 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/ea1fc77a-9a6a-4b9b-8d27-34f9b9ca61d9-credential-keys\") pod \"keystone-5585fd7cd8-gk5qt\" (UID: \"ea1fc77a-9a6a-4b9b-8d27-34f9b9ca61d9\") " pod="openstack-kuttl-tests/keystone-5585fd7cd8-gk5qt" Jan 20 17:18:42 crc kubenswrapper[4558]: I0120 17:18:42.412322 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/ea1fc77a-9a6a-4b9b-8d27-34f9b9ca61d9-credential-keys\") pod \"keystone-5585fd7cd8-gk5qt\" (UID: \"ea1fc77a-9a6a-4b9b-8d27-34f9b9ca61d9\") " pod="openstack-kuttl-tests/keystone-5585fd7cd8-gk5qt" Jan 20 17:18:42 crc kubenswrapper[4558]: I0120 17:18:42.412437 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ea1fc77a-9a6a-4b9b-8d27-34f9b9ca61d9-scripts\") pod \"keystone-5585fd7cd8-gk5qt\" (UID: \"ea1fc77a-9a6a-4b9b-8d27-34f9b9ca61d9\") " pod="openstack-kuttl-tests/keystone-5585fd7cd8-gk5qt" Jan 20 17:18:42 crc kubenswrapper[4558]: I0120 17:18:42.412492 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t4vhd\" (UniqueName: \"kubernetes.io/projected/ea1fc77a-9a6a-4b9b-8d27-34f9b9ca61d9-kube-api-access-t4vhd\") pod \"keystone-5585fd7cd8-gk5qt\" (UID: \"ea1fc77a-9a6a-4b9b-8d27-34f9b9ca61d9\") " pod="openstack-kuttl-tests/keystone-5585fd7cd8-gk5qt" Jan 20 17:18:42 crc kubenswrapper[4558]: I0120 17:18:42.412582 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ea1fc77a-9a6a-4b9b-8d27-34f9b9ca61d9-public-tls-certs\") pod \"keystone-5585fd7cd8-gk5qt\" (UID: \"ea1fc77a-9a6a-4b9b-8d27-34f9b9ca61d9\") " pod="openstack-kuttl-tests/keystone-5585fd7cd8-gk5qt" Jan 20 17:18:42 crc kubenswrapper[4558]: I0120 17:18:42.412629 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ea1fc77a-9a6a-4b9b-8d27-34f9b9ca61d9-config-data\") pod \"keystone-5585fd7cd8-gk5qt\" (UID: \"ea1fc77a-9a6a-4b9b-8d27-34f9b9ca61d9\") " pod="openstack-kuttl-tests/keystone-5585fd7cd8-gk5qt" Jan 20 17:18:42 crc kubenswrapper[4558]: I0120 17:18:42.412663 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ea1fc77a-9a6a-4b9b-8d27-34f9b9ca61d9-combined-ca-bundle\") pod \"keystone-5585fd7cd8-gk5qt\" (UID: \"ea1fc77a-9a6a-4b9b-8d27-34f9b9ca61d9\") " pod="openstack-kuttl-tests/keystone-5585fd7cd8-gk5qt" Jan 20 17:18:42 crc kubenswrapper[4558]: I0120 17:18:42.412702 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/ea1fc77a-9a6a-4b9b-8d27-34f9b9ca61d9-fernet-keys\") pod \"keystone-5585fd7cd8-gk5qt\" (UID: \"ea1fc77a-9a6a-4b9b-8d27-34f9b9ca61d9\") " pod="openstack-kuttl-tests/keystone-5585fd7cd8-gk5qt" Jan 20 17:18:42 crc kubenswrapper[4558]: I0120 17:18:42.412743 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ea1fc77a-9a6a-4b9b-8d27-34f9b9ca61d9-internal-tls-certs\") pod \"keystone-5585fd7cd8-gk5qt\" (UID: \"ea1fc77a-9a6a-4b9b-8d27-34f9b9ca61d9\") " pod="openstack-kuttl-tests/keystone-5585fd7cd8-gk5qt" Jan 20 17:18:42 crc kubenswrapper[4558]: I0120 17:18:42.420335 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ea1fc77a-9a6a-4b9b-8d27-34f9b9ca61d9-combined-ca-bundle\") pod \"keystone-5585fd7cd8-gk5qt\" (UID: \"ea1fc77a-9a6a-4b9b-8d27-34f9b9ca61d9\") " pod="openstack-kuttl-tests/keystone-5585fd7cd8-gk5qt" Jan 20 17:18:42 crc kubenswrapper[4558]: I0120 17:18:42.420667 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ea1fc77a-9a6a-4b9b-8d27-34f9b9ca61d9-config-data\") pod \"keystone-5585fd7cd8-gk5qt\" (UID: \"ea1fc77a-9a6a-4b9b-8d27-34f9b9ca61d9\") " pod="openstack-kuttl-tests/keystone-5585fd7cd8-gk5qt" Jan 20 17:18:42 crc kubenswrapper[4558]: I0120 17:18:42.420754 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ea1fc77a-9a6a-4b9b-8d27-34f9b9ca61d9-scripts\") pod \"keystone-5585fd7cd8-gk5qt\" (UID: \"ea1fc77a-9a6a-4b9b-8d27-34f9b9ca61d9\") " pod="openstack-kuttl-tests/keystone-5585fd7cd8-gk5qt" Jan 20 17:18:42 crc kubenswrapper[4558]: I0120 17:18:42.420780 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ea1fc77a-9a6a-4b9b-8d27-34f9b9ca61d9-public-tls-certs\") pod \"keystone-5585fd7cd8-gk5qt\" (UID: \"ea1fc77a-9a6a-4b9b-8d27-34f9b9ca61d9\") " pod="openstack-kuttl-tests/keystone-5585fd7cd8-gk5qt" Jan 20 17:18:42 crc kubenswrapper[4558]: I0120 17:18:42.420919 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ea1fc77a-9a6a-4b9b-8d27-34f9b9ca61d9-internal-tls-certs\") pod \"keystone-5585fd7cd8-gk5qt\" (UID: \"ea1fc77a-9a6a-4b9b-8d27-34f9b9ca61d9\") " pod="openstack-kuttl-tests/keystone-5585fd7cd8-gk5qt" Jan 20 17:18:42 crc kubenswrapper[4558]: I0120 17:18:42.423722 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/ea1fc77a-9a6a-4b9b-8d27-34f9b9ca61d9-credential-keys\") pod \"keystone-5585fd7cd8-gk5qt\" (UID: \"ea1fc77a-9a6a-4b9b-8d27-34f9b9ca61d9\") " pod="openstack-kuttl-tests/keystone-5585fd7cd8-gk5qt" Jan 20 17:18:42 crc kubenswrapper[4558]: I0120 17:18:42.424267 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/ea1fc77a-9a6a-4b9b-8d27-34f9b9ca61d9-fernet-keys\") pod \"keystone-5585fd7cd8-gk5qt\" (UID: \"ea1fc77a-9a6a-4b9b-8d27-34f9b9ca61d9\") " pod="openstack-kuttl-tests/keystone-5585fd7cd8-gk5qt" Jan 20 17:18:42 crc kubenswrapper[4558]: I0120 17:18:42.434736 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t4vhd\" (UniqueName: \"kubernetes.io/projected/ea1fc77a-9a6a-4b9b-8d27-34f9b9ca61d9-kube-api-access-t4vhd\") pod \"keystone-5585fd7cd8-gk5qt\" (UID: \"ea1fc77a-9a6a-4b9b-8d27-34f9b9ca61d9\") " pod="openstack-kuttl-tests/keystone-5585fd7cd8-gk5qt" Jan 20 17:18:42 crc kubenswrapper[4558]: I0120 17:18:42.522873 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-5585fd7cd8-gk5qt" Jan 20 17:18:42 crc kubenswrapper[4558]: I0120 17:18:42.542223 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:18:43 crc kubenswrapper[4558]: I0120 17:18:43.022675 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/barbican-api-7b899ccb8d-cfqnl"] Jan 20 17:18:43 crc kubenswrapper[4558]: I0120 17:18:43.033465 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-7b899ccb8d-cfqnl" Jan 20 17:18:43 crc kubenswrapper[4558]: I0120 17:18:43.070573 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-barbican-internal-svc" Jan 20 17:18:43 crc kubenswrapper[4558]: I0120 17:18:43.070779 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-barbican-public-svc" Jan 20 17:18:43 crc kubenswrapper[4558]: I0120 17:18:43.079562 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-api-7b899ccb8d-cfqnl"] Jan 20 17:18:43 crc kubenswrapper[4558]: I0120 17:18:43.086468 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-5585fd7cd8-gk5qt"] Jan 20 17:18:43 crc kubenswrapper[4558]: I0120 17:18:43.112603 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:18:43 crc kubenswrapper[4558]: I0120 17:18:43.112660 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:18:43 crc kubenswrapper[4558]: I0120 17:18:43.125573 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:18:43 crc kubenswrapper[4558]: I0120 17:18:43.125601 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:18:43 crc kubenswrapper[4558]: I0120 17:18:43.131115 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/14af4a81-8568-425c-a209-4d90a042c365-public-tls-certs\") pod \"barbican-api-7b899ccb8d-cfqnl\" (UID: \"14af4a81-8568-425c-a209-4d90a042c365\") " pod="openstack-kuttl-tests/barbican-api-7b899ccb8d-cfqnl" Jan 20 17:18:43 crc kubenswrapper[4558]: I0120 17:18:43.131177 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/14af4a81-8568-425c-a209-4d90a042c365-config-data\") pod \"barbican-api-7b899ccb8d-cfqnl\" (UID: \"14af4a81-8568-425c-a209-4d90a042c365\") " pod="openstack-kuttl-tests/barbican-api-7b899ccb8d-cfqnl" Jan 20 17:18:43 crc kubenswrapper[4558]: I0120 17:18:43.131204 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14af4a81-8568-425c-a209-4d90a042c365-combined-ca-bundle\") pod \"barbican-api-7b899ccb8d-cfqnl\" (UID: \"14af4a81-8568-425c-a209-4d90a042c365\") " pod="openstack-kuttl-tests/barbican-api-7b899ccb8d-cfqnl" Jan 20 17:18:43 crc kubenswrapper[4558]: I0120 17:18:43.131393 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/14af4a81-8568-425c-a209-4d90a042c365-config-data-custom\") pod \"barbican-api-7b899ccb8d-cfqnl\" (UID: \"14af4a81-8568-425c-a209-4d90a042c365\") " pod="openstack-kuttl-tests/barbican-api-7b899ccb8d-cfqnl" Jan 20 17:18:43 crc kubenswrapper[4558]: I0120 17:18:43.131563 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lbgv5\" (UniqueName: \"kubernetes.io/projected/14af4a81-8568-425c-a209-4d90a042c365-kube-api-access-lbgv5\") pod \"barbican-api-7b899ccb8d-cfqnl\" (UID: \"14af4a81-8568-425c-a209-4d90a042c365\") " pod="openstack-kuttl-tests/barbican-api-7b899ccb8d-cfqnl" Jan 20 17:18:43 crc kubenswrapper[4558]: I0120 17:18:43.131619 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/14af4a81-8568-425c-a209-4d90a042c365-logs\") pod \"barbican-api-7b899ccb8d-cfqnl\" (UID: \"14af4a81-8568-425c-a209-4d90a042c365\") " pod="openstack-kuttl-tests/barbican-api-7b899ccb8d-cfqnl" Jan 20 17:18:43 crc kubenswrapper[4558]: I0120 17:18:43.131715 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/14af4a81-8568-425c-a209-4d90a042c365-internal-tls-certs\") pod \"barbican-api-7b899ccb8d-cfqnl\" (UID: \"14af4a81-8568-425c-a209-4d90a042c365\") " pod="openstack-kuttl-tests/barbican-api-7b899ccb8d-cfqnl" Jan 20 17:18:43 crc kubenswrapper[4558]: I0120 17:18:43.141456 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-5585fd7cd8-gk5qt" event={"ID":"ea1fc77a-9a6a-4b9b-8d27-34f9b9ca61d9","Type":"ContainerStarted","Data":"a9490e0bfb667bbf0d5d2683462bf8e91fca116a71c21c093bc25591a5f2f0e5"} Jan 20 17:18:43 crc kubenswrapper[4558]: I0120 17:18:43.141618 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:18:43 crc kubenswrapper[4558]: I0120 17:18:43.142121 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/cinder-api-0" podUID="bf21e7b5-8a7d-4009-98cc-f8bbe4150000" containerName="cinder-api-log" containerID="cri-o://3cee2b98d305e1a2cdcd226de3670be5d5e874f44424bc2f6f0c585dd44af77c" gracePeriod=30 Jan 20 17:18:43 crc kubenswrapper[4558]: I0120 17:18:43.142207 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/cinder-api-0" podUID="bf21e7b5-8a7d-4009-98cc-f8bbe4150000" containerName="cinder-api" containerID="cri-o://4db25d25ca4cc1e4675ba6c19698d9ed43d213338095bc6a344265062fee9b73" gracePeriod=30 Jan 20 17:18:43 crc kubenswrapper[4558]: I0120 17:18:43.193107 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:18:43 crc kubenswrapper[4558]: I0120 17:18:43.194873 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:18:43 crc kubenswrapper[4558]: I0120 17:18:43.199923 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:18:43 crc kubenswrapper[4558]: I0120 17:18:43.200010 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:18:43 crc kubenswrapper[4558]: I0120 17:18:43.235755 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/14af4a81-8568-425c-a209-4d90a042c365-config-data\") pod \"barbican-api-7b899ccb8d-cfqnl\" (UID: \"14af4a81-8568-425c-a209-4d90a042c365\") " pod="openstack-kuttl-tests/barbican-api-7b899ccb8d-cfqnl" Jan 20 17:18:43 crc kubenswrapper[4558]: I0120 17:18:43.236052 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14af4a81-8568-425c-a209-4d90a042c365-combined-ca-bundle\") pod \"barbican-api-7b899ccb8d-cfqnl\" (UID: \"14af4a81-8568-425c-a209-4d90a042c365\") " pod="openstack-kuttl-tests/barbican-api-7b899ccb8d-cfqnl" Jan 20 17:18:43 crc kubenswrapper[4558]: I0120 17:18:43.236208 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/14af4a81-8568-425c-a209-4d90a042c365-config-data-custom\") pod \"barbican-api-7b899ccb8d-cfqnl\" (UID: \"14af4a81-8568-425c-a209-4d90a042c365\") " pod="openstack-kuttl-tests/barbican-api-7b899ccb8d-cfqnl" Jan 20 17:18:43 crc kubenswrapper[4558]: I0120 17:18:43.236304 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lbgv5\" (UniqueName: \"kubernetes.io/projected/14af4a81-8568-425c-a209-4d90a042c365-kube-api-access-lbgv5\") pod \"barbican-api-7b899ccb8d-cfqnl\" (UID: \"14af4a81-8568-425c-a209-4d90a042c365\") " pod="openstack-kuttl-tests/barbican-api-7b899ccb8d-cfqnl" Jan 20 17:18:43 crc kubenswrapper[4558]: I0120 17:18:43.236336 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/14af4a81-8568-425c-a209-4d90a042c365-logs\") pod \"barbican-api-7b899ccb8d-cfqnl\" (UID: \"14af4a81-8568-425c-a209-4d90a042c365\") " pod="openstack-kuttl-tests/barbican-api-7b899ccb8d-cfqnl" Jan 20 17:18:43 crc kubenswrapper[4558]: I0120 17:18:43.236404 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/14af4a81-8568-425c-a209-4d90a042c365-internal-tls-certs\") pod \"barbican-api-7b899ccb8d-cfqnl\" (UID: \"14af4a81-8568-425c-a209-4d90a042c365\") " pod="openstack-kuttl-tests/barbican-api-7b899ccb8d-cfqnl" Jan 20 17:18:43 crc kubenswrapper[4558]: I0120 17:18:43.236642 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/14af4a81-8568-425c-a209-4d90a042c365-public-tls-certs\") pod \"barbican-api-7b899ccb8d-cfqnl\" (UID: \"14af4a81-8568-425c-a209-4d90a042c365\") " pod="openstack-kuttl-tests/barbican-api-7b899ccb8d-cfqnl" Jan 20 17:18:43 crc kubenswrapper[4558]: I0120 17:18:43.236896 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/14af4a81-8568-425c-a209-4d90a042c365-logs\") pod \"barbican-api-7b899ccb8d-cfqnl\" (UID: \"14af4a81-8568-425c-a209-4d90a042c365\") " pod="openstack-kuttl-tests/barbican-api-7b899ccb8d-cfqnl" Jan 20 17:18:43 crc kubenswrapper[4558]: I0120 17:18:43.240776 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/14af4a81-8568-425c-a209-4d90a042c365-config-data\") pod \"barbican-api-7b899ccb8d-cfqnl\" (UID: \"14af4a81-8568-425c-a209-4d90a042c365\") " pod="openstack-kuttl-tests/barbican-api-7b899ccb8d-cfqnl" Jan 20 17:18:43 crc kubenswrapper[4558]: I0120 17:18:43.241374 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14af4a81-8568-425c-a209-4d90a042c365-combined-ca-bundle\") pod \"barbican-api-7b899ccb8d-cfqnl\" (UID: \"14af4a81-8568-425c-a209-4d90a042c365\") " pod="openstack-kuttl-tests/barbican-api-7b899ccb8d-cfqnl" Jan 20 17:18:43 crc kubenswrapper[4558]: I0120 17:18:43.256990 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/14af4a81-8568-425c-a209-4d90a042c365-config-data-custom\") pod \"barbican-api-7b899ccb8d-cfqnl\" (UID: \"14af4a81-8568-425c-a209-4d90a042c365\") " pod="openstack-kuttl-tests/barbican-api-7b899ccb8d-cfqnl" Jan 20 17:18:43 crc kubenswrapper[4558]: I0120 17:18:43.257272 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/14af4a81-8568-425c-a209-4d90a042c365-internal-tls-certs\") pod \"barbican-api-7b899ccb8d-cfqnl\" (UID: \"14af4a81-8568-425c-a209-4d90a042c365\") " pod="openstack-kuttl-tests/barbican-api-7b899ccb8d-cfqnl" Jan 20 17:18:43 crc kubenswrapper[4558]: I0120 17:18:43.257586 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/14af4a81-8568-425c-a209-4d90a042c365-public-tls-certs\") pod \"barbican-api-7b899ccb8d-cfqnl\" (UID: \"14af4a81-8568-425c-a209-4d90a042c365\") " pod="openstack-kuttl-tests/barbican-api-7b899ccb8d-cfqnl" Jan 20 17:18:43 crc kubenswrapper[4558]: I0120 17:18:43.262369 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lbgv5\" (UniqueName: \"kubernetes.io/projected/14af4a81-8568-425c-a209-4d90a042c365-kube-api-access-lbgv5\") pod \"barbican-api-7b899ccb8d-cfqnl\" (UID: \"14af4a81-8568-425c-a209-4d90a042c365\") " pod="openstack-kuttl-tests/barbican-api-7b899ccb8d-cfqnl" Jan 20 17:18:43 crc kubenswrapper[4558]: I0120 17:18:43.267740 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-7b899ccb8d-cfqnl" Jan 20 17:18:43 crc kubenswrapper[4558]: I0120 17:18:43.426064 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:18:43 crc kubenswrapper[4558]: I0120 17:18:43.444659 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-db-sync-682tw" Jan 20 17:18:43 crc kubenswrapper[4558]: I0120 17:18:43.645092 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/5659e8f8-3581-4ecf-a2ee-0c6d044b17e2-config\") pod \"5659e8f8-3581-4ecf-a2ee-0c6d044b17e2\" (UID: \"5659e8f8-3581-4ecf-a2ee-0c6d044b17e2\") " Jan 20 17:18:43 crc kubenswrapper[4558]: I0120 17:18:43.645155 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n5cbl\" (UniqueName: \"kubernetes.io/projected/5659e8f8-3581-4ecf-a2ee-0c6d044b17e2-kube-api-access-n5cbl\") pod \"5659e8f8-3581-4ecf-a2ee-0c6d044b17e2\" (UID: \"5659e8f8-3581-4ecf-a2ee-0c6d044b17e2\") " Jan 20 17:18:43 crc kubenswrapper[4558]: I0120 17:18:43.645325 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5659e8f8-3581-4ecf-a2ee-0c6d044b17e2-combined-ca-bundle\") pod \"5659e8f8-3581-4ecf-a2ee-0c6d044b17e2\" (UID: \"5659e8f8-3581-4ecf-a2ee-0c6d044b17e2\") " Jan 20 17:18:43 crc kubenswrapper[4558]: I0120 17:18:43.654348 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5659e8f8-3581-4ecf-a2ee-0c6d044b17e2-kube-api-access-n5cbl" (OuterVolumeSpecName: "kube-api-access-n5cbl") pod "5659e8f8-3581-4ecf-a2ee-0c6d044b17e2" (UID: "5659e8f8-3581-4ecf-a2ee-0c6d044b17e2"). InnerVolumeSpecName "kube-api-access-n5cbl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:18:43 crc kubenswrapper[4558]: I0120 17:18:43.683565 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5659e8f8-3581-4ecf-a2ee-0c6d044b17e2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5659e8f8-3581-4ecf-a2ee-0c6d044b17e2" (UID: "5659e8f8-3581-4ecf-a2ee-0c6d044b17e2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:18:43 crc kubenswrapper[4558]: I0120 17:18:43.732112 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5659e8f8-3581-4ecf-a2ee-0c6d044b17e2-config" (OuterVolumeSpecName: "config") pod "5659e8f8-3581-4ecf-a2ee-0c6d044b17e2" (UID: "5659e8f8-3581-4ecf-a2ee-0c6d044b17e2"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:18:43 crc kubenswrapper[4558]: I0120 17:18:43.750904 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5659e8f8-3581-4ecf-a2ee-0c6d044b17e2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:18:43 crc kubenswrapper[4558]: I0120 17:18:43.750928 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/5659e8f8-3581-4ecf-a2ee-0c6d044b17e2-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:18:43 crc kubenswrapper[4558]: I0120 17:18:43.750954 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n5cbl\" (UniqueName: \"kubernetes.io/projected/5659e8f8-3581-4ecf-a2ee-0c6d044b17e2-kube-api-access-n5cbl\") on node \"crc\" DevicePath \"\"" Jan 20 17:18:43 crc kubenswrapper[4558]: I0120 17:18:43.798296 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-api-7b899ccb8d-cfqnl"] Jan 20 17:18:43 crc kubenswrapper[4558]: W0120 17:18:43.813317 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod14af4a81_8568_425c_a209_4d90a042c365.slice/crio-b742b24d2dcfdeae6c57c3ce7694ebef5ce4136a958d26792057a3321f96adce WatchSource:0}: Error finding container b742b24d2dcfdeae6c57c3ce7694ebef5ce4136a958d26792057a3321f96adce: Status 404 returned error can't find the container with id b742b24d2dcfdeae6c57c3ce7694ebef5ce4136a958d26792057a3321f96adce Jan 20 17:18:43 crc kubenswrapper[4558]: I0120 17:18:43.842729 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:18:43 crc kubenswrapper[4558]: I0120 17:18:43.852054 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bf21e7b5-8a7d-4009-98cc-f8bbe4150000-logs\") pod \"bf21e7b5-8a7d-4009-98cc-f8bbe4150000\" (UID: \"bf21e7b5-8a7d-4009-98cc-f8bbe4150000\") " Jan 20 17:18:43 crc kubenswrapper[4558]: I0120 17:18:43.852143 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/bf21e7b5-8a7d-4009-98cc-f8bbe4150000-etc-machine-id\") pod \"bf21e7b5-8a7d-4009-98cc-f8bbe4150000\" (UID: \"bf21e7b5-8a7d-4009-98cc-f8bbe4150000\") " Jan 20 17:18:43 crc kubenswrapper[4558]: I0120 17:18:43.852192 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bf21e7b5-8a7d-4009-98cc-f8bbe4150000-scripts\") pod \"bf21e7b5-8a7d-4009-98cc-f8bbe4150000\" (UID: \"bf21e7b5-8a7d-4009-98cc-f8bbe4150000\") " Jan 20 17:18:43 crc kubenswrapper[4558]: I0120 17:18:43.852345 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bf21e7b5-8a7d-4009-98cc-f8bbe4150000-config-data-custom\") pod \"bf21e7b5-8a7d-4009-98cc-f8bbe4150000\" (UID: \"bf21e7b5-8a7d-4009-98cc-f8bbe4150000\") " Jan 20 17:18:43 crc kubenswrapper[4558]: I0120 17:18:43.852373 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bf21e7b5-8a7d-4009-98cc-f8bbe4150000-combined-ca-bundle\") pod \"bf21e7b5-8a7d-4009-98cc-f8bbe4150000\" (UID: \"bf21e7b5-8a7d-4009-98cc-f8bbe4150000\") " Jan 20 17:18:43 crc kubenswrapper[4558]: I0120 17:18:43.852411 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5rsng\" (UniqueName: \"kubernetes.io/projected/bf21e7b5-8a7d-4009-98cc-f8bbe4150000-kube-api-access-5rsng\") pod \"bf21e7b5-8a7d-4009-98cc-f8bbe4150000\" (UID: \"bf21e7b5-8a7d-4009-98cc-f8bbe4150000\") " Jan 20 17:18:43 crc kubenswrapper[4558]: I0120 17:18:43.852430 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bf21e7b5-8a7d-4009-98cc-f8bbe4150000-config-data\") pod \"bf21e7b5-8a7d-4009-98cc-f8bbe4150000\" (UID: \"bf21e7b5-8a7d-4009-98cc-f8bbe4150000\") " Jan 20 17:18:43 crc kubenswrapper[4558]: I0120 17:18:43.852842 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bf21e7b5-8a7d-4009-98cc-f8bbe4150000-logs" (OuterVolumeSpecName: "logs") pod "bf21e7b5-8a7d-4009-98cc-f8bbe4150000" (UID: "bf21e7b5-8a7d-4009-98cc-f8bbe4150000"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:18:43 crc kubenswrapper[4558]: I0120 17:18:43.856708 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/bf21e7b5-8a7d-4009-98cc-f8bbe4150000-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "bf21e7b5-8a7d-4009-98cc-f8bbe4150000" (UID: "bf21e7b5-8a7d-4009-98cc-f8bbe4150000"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 17:18:43 crc kubenswrapper[4558]: I0120 17:18:43.863321 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf21e7b5-8a7d-4009-98cc-f8bbe4150000-scripts" (OuterVolumeSpecName: "scripts") pod "bf21e7b5-8a7d-4009-98cc-f8bbe4150000" (UID: "bf21e7b5-8a7d-4009-98cc-f8bbe4150000"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:18:43 crc kubenswrapper[4558]: I0120 17:18:43.863497 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf21e7b5-8a7d-4009-98cc-f8bbe4150000-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "bf21e7b5-8a7d-4009-98cc-f8bbe4150000" (UID: "bf21e7b5-8a7d-4009-98cc-f8bbe4150000"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:18:43 crc kubenswrapper[4558]: I0120 17:18:43.870308 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf21e7b5-8a7d-4009-98cc-f8bbe4150000-kube-api-access-5rsng" (OuterVolumeSpecName: "kube-api-access-5rsng") pod "bf21e7b5-8a7d-4009-98cc-f8bbe4150000" (UID: "bf21e7b5-8a7d-4009-98cc-f8bbe4150000"). InnerVolumeSpecName "kube-api-access-5rsng". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:18:43 crc kubenswrapper[4558]: I0120 17:18:43.920670 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf21e7b5-8a7d-4009-98cc-f8bbe4150000-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "bf21e7b5-8a7d-4009-98cc-f8bbe4150000" (UID: "bf21e7b5-8a7d-4009-98cc-f8bbe4150000"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:18:43 crc kubenswrapper[4558]: I0120 17:18:43.932113 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf21e7b5-8a7d-4009-98cc-f8bbe4150000-config-data" (OuterVolumeSpecName: "config-data") pod "bf21e7b5-8a7d-4009-98cc-f8bbe4150000" (UID: "bf21e7b5-8a7d-4009-98cc-f8bbe4150000"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:18:43 crc kubenswrapper[4558]: I0120 17:18:43.955769 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/bf21e7b5-8a7d-4009-98cc-f8bbe4150000-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:18:43 crc kubenswrapper[4558]: I0120 17:18:43.955817 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bf21e7b5-8a7d-4009-98cc-f8bbe4150000-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:18:43 crc kubenswrapper[4558]: I0120 17:18:43.955830 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5rsng\" (UniqueName: \"kubernetes.io/projected/bf21e7b5-8a7d-4009-98cc-f8bbe4150000-kube-api-access-5rsng\") on node \"crc\" DevicePath \"\"" Jan 20 17:18:43 crc kubenswrapper[4558]: I0120 17:18:43.955844 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bf21e7b5-8a7d-4009-98cc-f8bbe4150000-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:18:43 crc kubenswrapper[4558]: I0120 17:18:43.955855 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bf21e7b5-8a7d-4009-98cc-f8bbe4150000-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:18:43 crc kubenswrapper[4558]: I0120 17:18:43.955864 4558 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/bf21e7b5-8a7d-4009-98cc-f8bbe4150000-etc-machine-id\") on node \"crc\" DevicePath \"\"" Jan 20 17:18:43 crc kubenswrapper[4558]: I0120 17:18:43.955873 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bf21e7b5-8a7d-4009-98cc-f8bbe4150000-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:18:44 crc kubenswrapper[4558]: I0120 17:18:44.159324 4558 generic.go:334] "Generic (PLEG): container finished" podID="6bc4e0a0-7c1f-4b44-8724-dd6fa3f41d1d" containerID="f72fc1315a1244479b03514e7e7f276969fbdaf21c95b987015c41f6692d09d2" exitCode=0 Jan 20 17:18:44 crc kubenswrapper[4558]: I0120 17:18:44.159398 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4bc72" event={"ID":"6bc4e0a0-7c1f-4b44-8724-dd6fa3f41d1d","Type":"ContainerDied","Data":"f72fc1315a1244479b03514e7e7f276969fbdaf21c95b987015c41f6692d09d2"} Jan 20 17:18:44 crc kubenswrapper[4558]: I0120 17:18:44.171893 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-7b899ccb8d-cfqnl" event={"ID":"14af4a81-8568-425c-a209-4d90a042c365","Type":"ContainerStarted","Data":"c09143ce6ec1936f6f2c98c148ac4afc51f7b093ad7e905e871ff92ac19dfd9f"} Jan 20 17:18:44 crc kubenswrapper[4558]: I0120 17:18:44.172229 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-7b899ccb8d-cfqnl" event={"ID":"14af4a81-8568-425c-a209-4d90a042c365","Type":"ContainerStarted","Data":"b742b24d2dcfdeae6c57c3ce7694ebef5ce4136a958d26792057a3321f96adce"} Jan 20 17:18:44 crc kubenswrapper[4558]: I0120 17:18:44.175559 4558 generic.go:334] "Generic (PLEG): container finished" podID="bf21e7b5-8a7d-4009-98cc-f8bbe4150000" containerID="4db25d25ca4cc1e4675ba6c19698d9ed43d213338095bc6a344265062fee9b73" exitCode=0 Jan 20 17:18:44 crc kubenswrapper[4558]: I0120 17:18:44.175597 4558 generic.go:334] "Generic (PLEG): container finished" podID="bf21e7b5-8a7d-4009-98cc-f8bbe4150000" containerID="3cee2b98d305e1a2cdcd226de3670be5d5e874f44424bc2f6f0c585dd44af77c" exitCode=143 Jan 20 17:18:44 crc kubenswrapper[4558]: I0120 17:18:44.175722 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:18:44 crc kubenswrapper[4558]: I0120 17:18:44.176309 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"bf21e7b5-8a7d-4009-98cc-f8bbe4150000","Type":"ContainerDied","Data":"4db25d25ca4cc1e4675ba6c19698d9ed43d213338095bc6a344265062fee9b73"} Jan 20 17:18:44 crc kubenswrapper[4558]: I0120 17:18:44.176449 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"bf21e7b5-8a7d-4009-98cc-f8bbe4150000","Type":"ContainerDied","Data":"3cee2b98d305e1a2cdcd226de3670be5d5e874f44424bc2f6f0c585dd44af77c"} Jan 20 17:18:44 crc kubenswrapper[4558]: I0120 17:18:44.176518 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"bf21e7b5-8a7d-4009-98cc-f8bbe4150000","Type":"ContainerDied","Data":"674402ebd5ca164ebec58b19dc921489355cff4c1987778ccec4029c5551f803"} Jan 20 17:18:44 crc kubenswrapper[4558]: I0120 17:18:44.176618 4558 scope.go:117] "RemoveContainer" containerID="4db25d25ca4cc1e4675ba6c19698d9ed43d213338095bc6a344265062fee9b73" Jan 20 17:18:44 crc kubenswrapper[4558]: I0120 17:18:44.178476 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-5585fd7cd8-gk5qt" event={"ID":"ea1fc77a-9a6a-4b9b-8d27-34f9b9ca61d9","Type":"ContainerStarted","Data":"b488e52ae9a816a6a1a205b5450c535dd59fdb90d6345dcef8eb1e186a1d90c5"} Jan 20 17:18:44 crc kubenswrapper[4558]: I0120 17:18:44.178571 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/keystone-5585fd7cd8-gk5qt" Jan 20 17:18:44 crc kubenswrapper[4558]: I0120 17:18:44.189687 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-db-sync-682tw" event={"ID":"5659e8f8-3581-4ecf-a2ee-0c6d044b17e2","Type":"ContainerDied","Data":"a14511826e3b186473ec8515d5d8898fa7add3a63377d2f29e8df73a36c70e15"} Jan 20 17:18:44 crc kubenswrapper[4558]: I0120 17:18:44.189733 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a14511826e3b186473ec8515d5d8898fa7add3a63377d2f29e8df73a36c70e15" Jan 20 17:18:44 crc kubenswrapper[4558]: I0120 17:18:44.189801 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-db-sync-682tw" Jan 20 17:18:44 crc kubenswrapper[4558]: I0120 17:18:44.190788 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:18:44 crc kubenswrapper[4558]: I0120 17:18:44.190869 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:18:44 crc kubenswrapper[4558]: I0120 17:18:44.191121 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:18:44 crc kubenswrapper[4558]: I0120 17:18:44.192119 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:18:44 crc kubenswrapper[4558]: I0120 17:18:44.216497 4558 scope.go:117] "RemoveContainer" containerID="3cee2b98d305e1a2cdcd226de3670be5d5e874f44424bc2f6f0c585dd44af77c" Jan 20 17:18:44 crc kubenswrapper[4558]: I0120 17:18:44.220214 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/keystone-5585fd7cd8-gk5qt" podStartSLOduration=2.2201940589999998 podStartE2EDuration="2.220194059s" podCreationTimestamp="2026-01-20 17:18:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:18:44.203063215 +0000 UTC m=+2217.963401182" watchObservedRunningTime="2026-01-20 17:18:44.220194059 +0000 UTC m=+2217.980532026" Jan 20 17:18:44 crc kubenswrapper[4558]: I0120 17:18:44.262573 4558 scope.go:117] "RemoveContainer" containerID="4db25d25ca4cc1e4675ba6c19698d9ed43d213338095bc6a344265062fee9b73" Jan 20 17:18:44 crc kubenswrapper[4558]: E0120 17:18:44.263144 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4db25d25ca4cc1e4675ba6c19698d9ed43d213338095bc6a344265062fee9b73\": container with ID starting with 4db25d25ca4cc1e4675ba6c19698d9ed43d213338095bc6a344265062fee9b73 not found: ID does not exist" containerID="4db25d25ca4cc1e4675ba6c19698d9ed43d213338095bc6a344265062fee9b73" Jan 20 17:18:44 crc kubenswrapper[4558]: I0120 17:18:44.263195 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4db25d25ca4cc1e4675ba6c19698d9ed43d213338095bc6a344265062fee9b73"} err="failed to get container status \"4db25d25ca4cc1e4675ba6c19698d9ed43d213338095bc6a344265062fee9b73\": rpc error: code = NotFound desc = could not find container \"4db25d25ca4cc1e4675ba6c19698d9ed43d213338095bc6a344265062fee9b73\": container with ID starting with 4db25d25ca4cc1e4675ba6c19698d9ed43d213338095bc6a344265062fee9b73 not found: ID does not exist" Jan 20 17:18:44 crc kubenswrapper[4558]: I0120 17:18:44.263219 4558 scope.go:117] "RemoveContainer" containerID="3cee2b98d305e1a2cdcd226de3670be5d5e874f44424bc2f6f0c585dd44af77c" Jan 20 17:18:44 crc kubenswrapper[4558]: E0120 17:18:44.263535 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3cee2b98d305e1a2cdcd226de3670be5d5e874f44424bc2f6f0c585dd44af77c\": container with ID starting with 3cee2b98d305e1a2cdcd226de3670be5d5e874f44424bc2f6f0c585dd44af77c not found: ID does not exist" containerID="3cee2b98d305e1a2cdcd226de3670be5d5e874f44424bc2f6f0c585dd44af77c" Jan 20 17:18:44 crc kubenswrapper[4558]: I0120 17:18:44.263561 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3cee2b98d305e1a2cdcd226de3670be5d5e874f44424bc2f6f0c585dd44af77c"} err="failed to get container status \"3cee2b98d305e1a2cdcd226de3670be5d5e874f44424bc2f6f0c585dd44af77c\": rpc error: code = NotFound desc = could not find container \"3cee2b98d305e1a2cdcd226de3670be5d5e874f44424bc2f6f0c585dd44af77c\": container with ID starting with 3cee2b98d305e1a2cdcd226de3670be5d5e874f44424bc2f6f0c585dd44af77c not found: ID does not exist" Jan 20 17:18:44 crc kubenswrapper[4558]: I0120 17:18:44.263577 4558 scope.go:117] "RemoveContainer" containerID="4db25d25ca4cc1e4675ba6c19698d9ed43d213338095bc6a344265062fee9b73" Jan 20 17:18:44 crc kubenswrapper[4558]: I0120 17:18:44.263876 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4db25d25ca4cc1e4675ba6c19698d9ed43d213338095bc6a344265062fee9b73"} err="failed to get container status \"4db25d25ca4cc1e4675ba6c19698d9ed43d213338095bc6a344265062fee9b73\": rpc error: code = NotFound desc = could not find container \"4db25d25ca4cc1e4675ba6c19698d9ed43d213338095bc6a344265062fee9b73\": container with ID starting with 4db25d25ca4cc1e4675ba6c19698d9ed43d213338095bc6a344265062fee9b73 not found: ID does not exist" Jan 20 17:18:44 crc kubenswrapper[4558]: I0120 17:18:44.263897 4558 scope.go:117] "RemoveContainer" containerID="3cee2b98d305e1a2cdcd226de3670be5d5e874f44424bc2f6f0c585dd44af77c" Jan 20 17:18:44 crc kubenswrapper[4558]: I0120 17:18:44.264140 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3cee2b98d305e1a2cdcd226de3670be5d5e874f44424bc2f6f0c585dd44af77c"} err="failed to get container status \"3cee2b98d305e1a2cdcd226de3670be5d5e874f44424bc2f6f0c585dd44af77c\": rpc error: code = NotFound desc = could not find container \"3cee2b98d305e1a2cdcd226de3670be5d5e874f44424bc2f6f0c585dd44af77c\": container with ID starting with 3cee2b98d305e1a2cdcd226de3670be5d5e874f44424bc2f6f0c585dd44af77c not found: ID does not exist" Jan 20 17:18:44 crc kubenswrapper[4558]: I0120 17:18:44.305659 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:18:44 crc kubenswrapper[4558]: I0120 17:18:44.319218 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:18:44 crc kubenswrapper[4558]: I0120 17:18:44.324324 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:18:44 crc kubenswrapper[4558]: E0120 17:18:44.324868 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5659e8f8-3581-4ecf-a2ee-0c6d044b17e2" containerName="neutron-db-sync" Jan 20 17:18:44 crc kubenswrapper[4558]: I0120 17:18:44.324888 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="5659e8f8-3581-4ecf-a2ee-0c6d044b17e2" containerName="neutron-db-sync" Jan 20 17:18:44 crc kubenswrapper[4558]: E0120 17:18:44.324908 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bf21e7b5-8a7d-4009-98cc-f8bbe4150000" containerName="cinder-api-log" Jan 20 17:18:44 crc kubenswrapper[4558]: I0120 17:18:44.324915 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="bf21e7b5-8a7d-4009-98cc-f8bbe4150000" containerName="cinder-api-log" Jan 20 17:18:44 crc kubenswrapper[4558]: E0120 17:18:44.324951 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bf21e7b5-8a7d-4009-98cc-f8bbe4150000" containerName="cinder-api" Jan 20 17:18:44 crc kubenswrapper[4558]: I0120 17:18:44.324958 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="bf21e7b5-8a7d-4009-98cc-f8bbe4150000" containerName="cinder-api" Jan 20 17:18:44 crc kubenswrapper[4558]: I0120 17:18:44.325147 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="5659e8f8-3581-4ecf-a2ee-0c6d044b17e2" containerName="neutron-db-sync" Jan 20 17:18:44 crc kubenswrapper[4558]: I0120 17:18:44.325181 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="bf21e7b5-8a7d-4009-98cc-f8bbe4150000" containerName="cinder-api-log" Jan 20 17:18:44 crc kubenswrapper[4558]: I0120 17:18:44.325196 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="bf21e7b5-8a7d-4009-98cc-f8bbe4150000" containerName="cinder-api" Jan 20 17:18:44 crc kubenswrapper[4558]: I0120 17:18:44.326269 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:18:44 crc kubenswrapper[4558]: I0120 17:18:44.330803 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cinder-api-config-data" Jan 20 17:18:44 crc kubenswrapper[4558]: I0120 17:18:44.334402 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-cinder-internal-svc" Jan 20 17:18:44 crc kubenswrapper[4558]: I0120 17:18:44.334605 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-cinder-public-svc" Jan 20 17:18:44 crc kubenswrapper[4558]: I0120 17:18:44.341813 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:18:44 crc kubenswrapper[4558]: I0120 17:18:44.366209 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/neutron-86cc899657-ftskx"] Jan 20 17:18:44 crc kubenswrapper[4558]: I0120 17:18:44.368036 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-86cc899657-ftskx" Jan 20 17:18:44 crc kubenswrapper[4558]: I0120 17:18:44.373275 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-neutron-ovndbs" Jan 20 17:18:44 crc kubenswrapper[4558]: I0120 17:18:44.373532 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"neutron-httpd-config" Jan 20 17:18:44 crc kubenswrapper[4558]: I0120 17:18:44.373663 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"neutron-config" Jan 20 17:18:44 crc kubenswrapper[4558]: I0120 17:18:44.373793 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"neutron-neutron-dockercfg-s7sh5" Jan 20 17:18:44 crc kubenswrapper[4558]: I0120 17:18:44.386828 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-86cc899657-ftskx"] Jan 20 17:18:44 crc kubenswrapper[4558]: I0120 17:18:44.479594 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4b6b8c27-bb47-4423-92c4-70306ec7ef9e-combined-ca-bundle\") pod \"neutron-86cc899657-ftskx\" (UID: \"4b6b8c27-bb47-4423-92c4-70306ec7ef9e\") " pod="openstack-kuttl-tests/neutron-86cc899657-ftskx" Jan 20 17:18:44 crc kubenswrapper[4558]: I0120 17:18:44.479864 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e72083f2-d324-432e-9bb0-5f44f2023489-config-data-custom\") pod \"cinder-api-0\" (UID: \"e72083f2-d324-432e-9bb0-5f44f2023489\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:18:44 crc kubenswrapper[4558]: I0120 17:18:44.479949 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e72083f2-d324-432e-9bb0-5f44f2023489-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"e72083f2-d324-432e-9bb0-5f44f2023489\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:18:44 crc kubenswrapper[4558]: I0120 17:18:44.480011 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e72083f2-d324-432e-9bb0-5f44f2023489-public-tls-certs\") pod \"cinder-api-0\" (UID: \"e72083f2-d324-432e-9bb0-5f44f2023489\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:18:44 crc kubenswrapper[4558]: I0120 17:18:44.480083 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bvwng\" (UniqueName: \"kubernetes.io/projected/4b6b8c27-bb47-4423-92c4-70306ec7ef9e-kube-api-access-bvwng\") pod \"neutron-86cc899657-ftskx\" (UID: \"4b6b8c27-bb47-4423-92c4-70306ec7ef9e\") " pod="openstack-kuttl-tests/neutron-86cc899657-ftskx" Jan 20 17:18:44 crc kubenswrapper[4558]: I0120 17:18:44.480175 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e72083f2-d324-432e-9bb0-5f44f2023489-config-data\") pod \"cinder-api-0\" (UID: \"e72083f2-d324-432e-9bb0-5f44f2023489\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:18:44 crc kubenswrapper[4558]: I0120 17:18:44.480258 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e72083f2-d324-432e-9bb0-5f44f2023489-logs\") pod \"cinder-api-0\" (UID: \"e72083f2-d324-432e-9bb0-5f44f2023489\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:18:44 crc kubenswrapper[4558]: I0120 17:18:44.480328 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/4b6b8c27-bb47-4423-92c4-70306ec7ef9e-ovndb-tls-certs\") pod \"neutron-86cc899657-ftskx\" (UID: \"4b6b8c27-bb47-4423-92c4-70306ec7ef9e\") " pod="openstack-kuttl-tests/neutron-86cc899657-ftskx" Jan 20 17:18:44 crc kubenswrapper[4558]: I0120 17:18:44.480487 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/e72083f2-d324-432e-9bb0-5f44f2023489-etc-machine-id\") pod \"cinder-api-0\" (UID: \"e72083f2-d324-432e-9bb0-5f44f2023489\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:18:44 crc kubenswrapper[4558]: I0120 17:18:44.480570 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e72083f2-d324-432e-9bb0-5f44f2023489-scripts\") pod \"cinder-api-0\" (UID: \"e72083f2-d324-432e-9bb0-5f44f2023489\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:18:44 crc kubenswrapper[4558]: I0120 17:18:44.480633 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e72083f2-d324-432e-9bb0-5f44f2023489-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"e72083f2-d324-432e-9bb0-5f44f2023489\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:18:44 crc kubenswrapper[4558]: I0120 17:18:44.480694 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8lt2s\" (UniqueName: \"kubernetes.io/projected/e72083f2-d324-432e-9bb0-5f44f2023489-kube-api-access-8lt2s\") pod \"cinder-api-0\" (UID: \"e72083f2-d324-432e-9bb0-5f44f2023489\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:18:44 crc kubenswrapper[4558]: I0120 17:18:44.480758 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/4b6b8c27-bb47-4423-92c4-70306ec7ef9e-config\") pod \"neutron-86cc899657-ftskx\" (UID: \"4b6b8c27-bb47-4423-92c4-70306ec7ef9e\") " pod="openstack-kuttl-tests/neutron-86cc899657-ftskx" Jan 20 17:18:44 crc kubenswrapper[4558]: I0120 17:18:44.480819 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/4b6b8c27-bb47-4423-92c4-70306ec7ef9e-httpd-config\") pod \"neutron-86cc899657-ftskx\" (UID: \"4b6b8c27-bb47-4423-92c4-70306ec7ef9e\") " pod="openstack-kuttl-tests/neutron-86cc899657-ftskx" Jan 20 17:18:44 crc kubenswrapper[4558]: I0120 17:18:44.582325 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/4b6b8c27-bb47-4423-92c4-70306ec7ef9e-config\") pod \"neutron-86cc899657-ftskx\" (UID: \"4b6b8c27-bb47-4423-92c4-70306ec7ef9e\") " pod="openstack-kuttl-tests/neutron-86cc899657-ftskx" Jan 20 17:18:44 crc kubenswrapper[4558]: I0120 17:18:44.582373 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/4b6b8c27-bb47-4423-92c4-70306ec7ef9e-httpd-config\") pod \"neutron-86cc899657-ftskx\" (UID: \"4b6b8c27-bb47-4423-92c4-70306ec7ef9e\") " pod="openstack-kuttl-tests/neutron-86cc899657-ftskx" Jan 20 17:18:44 crc kubenswrapper[4558]: I0120 17:18:44.582478 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4b6b8c27-bb47-4423-92c4-70306ec7ef9e-combined-ca-bundle\") pod \"neutron-86cc899657-ftskx\" (UID: \"4b6b8c27-bb47-4423-92c4-70306ec7ef9e\") " pod="openstack-kuttl-tests/neutron-86cc899657-ftskx" Jan 20 17:18:44 crc kubenswrapper[4558]: I0120 17:18:44.582499 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e72083f2-d324-432e-9bb0-5f44f2023489-config-data-custom\") pod \"cinder-api-0\" (UID: \"e72083f2-d324-432e-9bb0-5f44f2023489\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:18:44 crc kubenswrapper[4558]: I0120 17:18:44.582527 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e72083f2-d324-432e-9bb0-5f44f2023489-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"e72083f2-d324-432e-9bb0-5f44f2023489\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:18:44 crc kubenswrapper[4558]: I0120 17:18:44.582544 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e72083f2-d324-432e-9bb0-5f44f2023489-public-tls-certs\") pod \"cinder-api-0\" (UID: \"e72083f2-d324-432e-9bb0-5f44f2023489\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:18:44 crc kubenswrapper[4558]: I0120 17:18:44.582561 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bvwng\" (UniqueName: \"kubernetes.io/projected/4b6b8c27-bb47-4423-92c4-70306ec7ef9e-kube-api-access-bvwng\") pod \"neutron-86cc899657-ftskx\" (UID: \"4b6b8c27-bb47-4423-92c4-70306ec7ef9e\") " pod="openstack-kuttl-tests/neutron-86cc899657-ftskx" Jan 20 17:18:44 crc kubenswrapper[4558]: I0120 17:18:44.582601 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e72083f2-d324-432e-9bb0-5f44f2023489-config-data\") pod \"cinder-api-0\" (UID: \"e72083f2-d324-432e-9bb0-5f44f2023489\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:18:44 crc kubenswrapper[4558]: I0120 17:18:44.582620 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e72083f2-d324-432e-9bb0-5f44f2023489-logs\") pod \"cinder-api-0\" (UID: \"e72083f2-d324-432e-9bb0-5f44f2023489\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:18:44 crc kubenswrapper[4558]: I0120 17:18:44.582651 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/4b6b8c27-bb47-4423-92c4-70306ec7ef9e-ovndb-tls-certs\") pod \"neutron-86cc899657-ftskx\" (UID: \"4b6b8c27-bb47-4423-92c4-70306ec7ef9e\") " pod="openstack-kuttl-tests/neutron-86cc899657-ftskx" Jan 20 17:18:44 crc kubenswrapper[4558]: I0120 17:18:44.582725 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/e72083f2-d324-432e-9bb0-5f44f2023489-etc-machine-id\") pod \"cinder-api-0\" (UID: \"e72083f2-d324-432e-9bb0-5f44f2023489\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:18:44 crc kubenswrapper[4558]: I0120 17:18:44.582755 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e72083f2-d324-432e-9bb0-5f44f2023489-scripts\") pod \"cinder-api-0\" (UID: \"e72083f2-d324-432e-9bb0-5f44f2023489\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:18:44 crc kubenswrapper[4558]: I0120 17:18:44.582770 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e72083f2-d324-432e-9bb0-5f44f2023489-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"e72083f2-d324-432e-9bb0-5f44f2023489\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:18:44 crc kubenswrapper[4558]: I0120 17:18:44.582802 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8lt2s\" (UniqueName: \"kubernetes.io/projected/e72083f2-d324-432e-9bb0-5f44f2023489-kube-api-access-8lt2s\") pod \"cinder-api-0\" (UID: \"e72083f2-d324-432e-9bb0-5f44f2023489\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:18:44 crc kubenswrapper[4558]: I0120 17:18:44.598803 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e72083f2-d324-432e-9bb0-5f44f2023489-logs\") pod \"cinder-api-0\" (UID: \"e72083f2-d324-432e-9bb0-5f44f2023489\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:18:44 crc kubenswrapper[4558]: I0120 17:18:44.600315 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/e72083f2-d324-432e-9bb0-5f44f2023489-etc-machine-id\") pod \"cinder-api-0\" (UID: \"e72083f2-d324-432e-9bb0-5f44f2023489\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:18:44 crc kubenswrapper[4558]: I0120 17:18:44.602482 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/4b6b8c27-bb47-4423-92c4-70306ec7ef9e-config\") pod \"neutron-86cc899657-ftskx\" (UID: \"4b6b8c27-bb47-4423-92c4-70306ec7ef9e\") " pod="openstack-kuttl-tests/neutron-86cc899657-ftskx" Jan 20 17:18:44 crc kubenswrapper[4558]: I0120 17:18:44.603001 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e72083f2-d324-432e-9bb0-5f44f2023489-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"e72083f2-d324-432e-9bb0-5f44f2023489\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:18:44 crc kubenswrapper[4558]: I0120 17:18:44.603608 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/4b6b8c27-bb47-4423-92c4-70306ec7ef9e-ovndb-tls-certs\") pod \"neutron-86cc899657-ftskx\" (UID: \"4b6b8c27-bb47-4423-92c4-70306ec7ef9e\") " pod="openstack-kuttl-tests/neutron-86cc899657-ftskx" Jan 20 17:18:44 crc kubenswrapper[4558]: I0120 17:18:44.608908 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e72083f2-d324-432e-9bb0-5f44f2023489-config-data\") pod \"cinder-api-0\" (UID: \"e72083f2-d324-432e-9bb0-5f44f2023489\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:18:44 crc kubenswrapper[4558]: I0120 17:18:44.609572 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e72083f2-d324-432e-9bb0-5f44f2023489-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"e72083f2-d324-432e-9bb0-5f44f2023489\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:18:44 crc kubenswrapper[4558]: I0120 17:18:44.610030 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/4b6b8c27-bb47-4423-92c4-70306ec7ef9e-httpd-config\") pod \"neutron-86cc899657-ftskx\" (UID: \"4b6b8c27-bb47-4423-92c4-70306ec7ef9e\") " pod="openstack-kuttl-tests/neutron-86cc899657-ftskx" Jan 20 17:18:44 crc kubenswrapper[4558]: I0120 17:18:44.610409 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e72083f2-d324-432e-9bb0-5f44f2023489-config-data-custom\") pod \"cinder-api-0\" (UID: \"e72083f2-d324-432e-9bb0-5f44f2023489\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:18:44 crc kubenswrapper[4558]: I0120 17:18:44.610436 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e72083f2-d324-432e-9bb0-5f44f2023489-scripts\") pod \"cinder-api-0\" (UID: \"e72083f2-d324-432e-9bb0-5f44f2023489\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:18:44 crc kubenswrapper[4558]: I0120 17:18:44.610522 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bvwng\" (UniqueName: \"kubernetes.io/projected/4b6b8c27-bb47-4423-92c4-70306ec7ef9e-kube-api-access-bvwng\") pod \"neutron-86cc899657-ftskx\" (UID: \"4b6b8c27-bb47-4423-92c4-70306ec7ef9e\") " pod="openstack-kuttl-tests/neutron-86cc899657-ftskx" Jan 20 17:18:44 crc kubenswrapper[4558]: I0120 17:18:44.612662 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8lt2s\" (UniqueName: \"kubernetes.io/projected/e72083f2-d324-432e-9bb0-5f44f2023489-kube-api-access-8lt2s\") pod \"cinder-api-0\" (UID: \"e72083f2-d324-432e-9bb0-5f44f2023489\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:18:44 crc kubenswrapper[4558]: I0120 17:18:44.615316 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e72083f2-d324-432e-9bb0-5f44f2023489-public-tls-certs\") pod \"cinder-api-0\" (UID: \"e72083f2-d324-432e-9bb0-5f44f2023489\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:18:44 crc kubenswrapper[4558]: I0120 17:18:44.636322 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4b6b8c27-bb47-4423-92c4-70306ec7ef9e-combined-ca-bundle\") pod \"neutron-86cc899657-ftskx\" (UID: \"4b6b8c27-bb47-4423-92c4-70306ec7ef9e\") " pod="openstack-kuttl-tests/neutron-86cc899657-ftskx" Jan 20 17:18:44 crc kubenswrapper[4558]: I0120 17:18:44.645143 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf21e7b5-8a7d-4009-98cc-f8bbe4150000" path="/var/lib/kubelet/pods/bf21e7b5-8a7d-4009-98cc-f8bbe4150000/volumes" Jan 20 17:18:44 crc kubenswrapper[4558]: I0120 17:18:44.661675 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:18:44 crc kubenswrapper[4558]: I0120 17:18:44.717266 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-86cc899657-ftskx" Jan 20 17:18:45 crc kubenswrapper[4558]: I0120 17:18:45.206569 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4bc72" event={"ID":"6bc4e0a0-7c1f-4b44-8724-dd6fa3f41d1d","Type":"ContainerStarted","Data":"f7d8723a08de461423cea85e9e4652a6f1771077ccdcb61ce6da413ca9834535"} Jan 20 17:18:45 crc kubenswrapper[4558]: I0120 17:18:45.209057 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-7b899ccb8d-cfqnl" event={"ID":"14af4a81-8568-425c-a209-4d90a042c365","Type":"ContainerStarted","Data":"e46adee8ea97e0c6d658d436cc56ebc7b96b6a0ab6ea4685475eaa56dee6f641"} Jan 20 17:18:45 crc kubenswrapper[4558]: I0120 17:18:45.209537 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/barbican-api-7b899ccb8d-cfqnl" Jan 20 17:18:45 crc kubenswrapper[4558]: I0120 17:18:45.209569 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/barbican-api-7b899ccb8d-cfqnl" Jan 20 17:18:45 crc kubenswrapper[4558]: I0120 17:18:45.224499 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-4bc72" podStartSLOduration=1.567803711 podStartE2EDuration="4.224393898s" podCreationTimestamp="2026-01-20 17:18:41 +0000 UTC" firstStartedPulling="2026-01-20 17:18:42.114354285 +0000 UTC m=+2215.874692252" lastFinishedPulling="2026-01-20 17:18:44.770944471 +0000 UTC m=+2218.531282439" observedRunningTime="2026-01-20 17:18:45.219835615 +0000 UTC m=+2218.980173583" watchObservedRunningTime="2026-01-20 17:18:45.224393898 +0000 UTC m=+2218.984731865" Jan 20 17:18:45 crc kubenswrapper[4558]: I0120 17:18:45.254990 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/barbican-api-7b899ccb8d-cfqnl" podStartSLOduration=3.254967833 podStartE2EDuration="3.254967833s" podCreationTimestamp="2026-01-20 17:18:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:18:45.24852659 +0000 UTC m=+2219.008864557" watchObservedRunningTime="2026-01-20 17:18:45.254967833 +0000 UTC m=+2219.015305800" Jan 20 17:18:45 crc kubenswrapper[4558]: W0120 17:18:45.345557 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode72083f2_d324_432e_9bb0_5f44f2023489.slice/crio-285b3478741b1d12d0aa4c1b7f7bc6f97edc86dc49a11d1a1e1b86cd0183b202 WatchSource:0}: Error finding container 285b3478741b1d12d0aa4c1b7f7bc6f97edc86dc49a11d1a1e1b86cd0183b202: Status 404 returned error can't find the container with id 285b3478741b1d12d0aa4c1b7f7bc6f97edc86dc49a11d1a1e1b86cd0183b202 Jan 20 17:18:45 crc kubenswrapper[4558]: I0120 17:18:45.366142 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:18:45 crc kubenswrapper[4558]: I0120 17:18:45.376208 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-86cc899657-ftskx"] Jan 20 17:18:46 crc kubenswrapper[4558]: I0120 17:18:46.227906 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-86cc899657-ftskx" event={"ID":"4b6b8c27-bb47-4423-92c4-70306ec7ef9e","Type":"ContainerStarted","Data":"e1afb75acea050bdb63b13351b8a84c1d2fac666461bccd1cbaea618b932d880"} Jan 20 17:18:46 crc kubenswrapper[4558]: I0120 17:18:46.228408 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-86cc899657-ftskx" event={"ID":"4b6b8c27-bb47-4423-92c4-70306ec7ef9e","Type":"ContainerStarted","Data":"0c2098a04697cc264fc23d7f9d0254d28b8e9e3cd5ecaa284c8d7ad393d63df4"} Jan 20 17:18:46 crc kubenswrapper[4558]: I0120 17:18:46.234815 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"e72083f2-d324-432e-9bb0-5f44f2023489","Type":"ContainerStarted","Data":"e1f212cc615420dc0adbf996976347dda8d27a4a5471651930f4f497d67e903f"} Jan 20 17:18:46 crc kubenswrapper[4558]: I0120 17:18:46.234848 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"e72083f2-d324-432e-9bb0-5f44f2023489","Type":"ContainerStarted","Data":"285b3478741b1d12d0aa4c1b7f7bc6f97edc86dc49a11d1a1e1b86cd0183b202"} Jan 20 17:18:46 crc kubenswrapper[4558]: I0120 17:18:46.234884 4558 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 20 17:18:46 crc kubenswrapper[4558]: I0120 17:18:46.234906 4558 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 20 17:18:46 crc kubenswrapper[4558]: I0120 17:18:46.323293 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/barbican-api-678474f9b-4hxj7" Jan 20 17:18:47 crc kubenswrapper[4558]: I0120 17:18:47.002782 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/barbican-api-678474f9b-4hxj7" Jan 20 17:18:47 crc kubenswrapper[4558]: I0120 17:18:47.126229 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:18:47 crc kubenswrapper[4558]: I0120 17:18:47.126378 4558 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 20 17:18:47 crc kubenswrapper[4558]: I0120 17:18:47.245869 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-86cc899657-ftskx" event={"ID":"4b6b8c27-bb47-4423-92c4-70306ec7ef9e","Type":"ContainerStarted","Data":"d58b041275b1b3dbc0399d88808986db01f1af4ee195d068ab83c28f4898a194"} Jan 20 17:18:47 crc kubenswrapper[4558]: I0120 17:18:47.247050 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/neutron-86cc899657-ftskx" Jan 20 17:18:47 crc kubenswrapper[4558]: I0120 17:18:47.248132 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"e72083f2-d324-432e-9bb0-5f44f2023489","Type":"ContainerStarted","Data":"76ea6272fca9c46b59f2db4c5161f6d34060b5a40bd0d5c5b65cea949a4dad9e"} Jan 20 17:18:47 crc kubenswrapper[4558]: I0120 17:18:47.268744 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/neutron-86cc899657-ftskx" podStartSLOduration=3.268724183 podStartE2EDuration="3.268724183s" podCreationTimestamp="2026-01-20 17:18:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:18:47.262638709 +0000 UTC m=+2221.022976676" watchObservedRunningTime="2026-01-20 17:18:47.268724183 +0000 UTC m=+2221.029062149" Jan 20 17:18:47 crc kubenswrapper[4558]: I0120 17:18:47.299443 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/cinder-api-0" podStartSLOduration=3.299423673 podStartE2EDuration="3.299423673s" podCreationTimestamp="2026-01-20 17:18:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:18:47.291542702 +0000 UTC m=+2221.051880670" watchObservedRunningTime="2026-01-20 17:18:47.299423673 +0000 UTC m=+2221.059761640" Jan 20 17:18:47 crc kubenswrapper[4558]: I0120 17:18:47.507441 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:18:47 crc kubenswrapper[4558]: I0120 17:18:47.507566 4558 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 20 17:18:47 crc kubenswrapper[4558]: I0120 17:18:47.515413 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:18:47 crc kubenswrapper[4558]: I0120 17:18:47.734833 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:18:48 crc kubenswrapper[4558]: I0120 17:18:48.249419 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/neutron-5c7bf5978d-d2t27"] Jan 20 17:18:48 crc kubenswrapper[4558]: I0120 17:18:48.251498 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-5c7bf5978d-d2t27" Jan 20 17:18:48 crc kubenswrapper[4558]: I0120 17:18:48.256471 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-neutron-internal-svc" Jan 20 17:18:48 crc kubenswrapper[4558]: I0120 17:18:48.257072 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:18:48 crc kubenswrapper[4558]: I0120 17:18:48.259007 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-neutron-public-svc" Jan 20 17:18:48 crc kubenswrapper[4558]: I0120 17:18:48.269768 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-5c7bf5978d-d2t27"] Jan 20 17:18:48 crc kubenswrapper[4558]: I0120 17:18:48.411485 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/abb17bba-a50c-4b0f-941b-3a09a8bcac1e-config\") pod \"neutron-5c7bf5978d-d2t27\" (UID: \"abb17bba-a50c-4b0f-941b-3a09a8bcac1e\") " pod="openstack-kuttl-tests/neutron-5c7bf5978d-d2t27" Jan 20 17:18:48 crc kubenswrapper[4558]: I0120 17:18:48.411887 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/abb17bba-a50c-4b0f-941b-3a09a8bcac1e-combined-ca-bundle\") pod \"neutron-5c7bf5978d-d2t27\" (UID: \"abb17bba-a50c-4b0f-941b-3a09a8bcac1e\") " pod="openstack-kuttl-tests/neutron-5c7bf5978d-d2t27" Jan 20 17:18:48 crc kubenswrapper[4558]: I0120 17:18:48.412051 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zrtcb\" (UniqueName: \"kubernetes.io/projected/abb17bba-a50c-4b0f-941b-3a09a8bcac1e-kube-api-access-zrtcb\") pod \"neutron-5c7bf5978d-d2t27\" (UID: \"abb17bba-a50c-4b0f-941b-3a09a8bcac1e\") " pod="openstack-kuttl-tests/neutron-5c7bf5978d-d2t27" Jan 20 17:18:48 crc kubenswrapper[4558]: I0120 17:18:48.412465 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/abb17bba-a50c-4b0f-941b-3a09a8bcac1e-ovndb-tls-certs\") pod \"neutron-5c7bf5978d-d2t27\" (UID: \"abb17bba-a50c-4b0f-941b-3a09a8bcac1e\") " pod="openstack-kuttl-tests/neutron-5c7bf5978d-d2t27" Jan 20 17:18:48 crc kubenswrapper[4558]: I0120 17:18:48.412644 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/abb17bba-a50c-4b0f-941b-3a09a8bcac1e-internal-tls-certs\") pod \"neutron-5c7bf5978d-d2t27\" (UID: \"abb17bba-a50c-4b0f-941b-3a09a8bcac1e\") " pod="openstack-kuttl-tests/neutron-5c7bf5978d-d2t27" Jan 20 17:18:48 crc kubenswrapper[4558]: I0120 17:18:48.412740 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/abb17bba-a50c-4b0f-941b-3a09a8bcac1e-httpd-config\") pod \"neutron-5c7bf5978d-d2t27\" (UID: \"abb17bba-a50c-4b0f-941b-3a09a8bcac1e\") " pod="openstack-kuttl-tests/neutron-5c7bf5978d-d2t27" Jan 20 17:18:48 crc kubenswrapper[4558]: I0120 17:18:48.412904 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/abb17bba-a50c-4b0f-941b-3a09a8bcac1e-public-tls-certs\") pod \"neutron-5c7bf5978d-d2t27\" (UID: \"abb17bba-a50c-4b0f-941b-3a09a8bcac1e\") " pod="openstack-kuttl-tests/neutron-5c7bf5978d-d2t27" Jan 20 17:18:48 crc kubenswrapper[4558]: I0120 17:18:48.515622 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/abb17bba-a50c-4b0f-941b-3a09a8bcac1e-combined-ca-bundle\") pod \"neutron-5c7bf5978d-d2t27\" (UID: \"abb17bba-a50c-4b0f-941b-3a09a8bcac1e\") " pod="openstack-kuttl-tests/neutron-5c7bf5978d-d2t27" Jan 20 17:18:48 crc kubenswrapper[4558]: I0120 17:18:48.515730 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zrtcb\" (UniqueName: \"kubernetes.io/projected/abb17bba-a50c-4b0f-941b-3a09a8bcac1e-kube-api-access-zrtcb\") pod \"neutron-5c7bf5978d-d2t27\" (UID: \"abb17bba-a50c-4b0f-941b-3a09a8bcac1e\") " pod="openstack-kuttl-tests/neutron-5c7bf5978d-d2t27" Jan 20 17:18:48 crc kubenswrapper[4558]: I0120 17:18:48.515871 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/abb17bba-a50c-4b0f-941b-3a09a8bcac1e-ovndb-tls-certs\") pod \"neutron-5c7bf5978d-d2t27\" (UID: \"abb17bba-a50c-4b0f-941b-3a09a8bcac1e\") " pod="openstack-kuttl-tests/neutron-5c7bf5978d-d2t27" Jan 20 17:18:48 crc kubenswrapper[4558]: I0120 17:18:48.515899 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/abb17bba-a50c-4b0f-941b-3a09a8bcac1e-internal-tls-certs\") pod \"neutron-5c7bf5978d-d2t27\" (UID: \"abb17bba-a50c-4b0f-941b-3a09a8bcac1e\") " pod="openstack-kuttl-tests/neutron-5c7bf5978d-d2t27" Jan 20 17:18:48 crc kubenswrapper[4558]: I0120 17:18:48.515944 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/abb17bba-a50c-4b0f-941b-3a09a8bcac1e-httpd-config\") pod \"neutron-5c7bf5978d-d2t27\" (UID: \"abb17bba-a50c-4b0f-941b-3a09a8bcac1e\") " pod="openstack-kuttl-tests/neutron-5c7bf5978d-d2t27" Jan 20 17:18:48 crc kubenswrapper[4558]: I0120 17:18:48.516014 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/abb17bba-a50c-4b0f-941b-3a09a8bcac1e-public-tls-certs\") pod \"neutron-5c7bf5978d-d2t27\" (UID: \"abb17bba-a50c-4b0f-941b-3a09a8bcac1e\") " pod="openstack-kuttl-tests/neutron-5c7bf5978d-d2t27" Jan 20 17:18:48 crc kubenswrapper[4558]: I0120 17:18:48.516052 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/abb17bba-a50c-4b0f-941b-3a09a8bcac1e-config\") pod \"neutron-5c7bf5978d-d2t27\" (UID: \"abb17bba-a50c-4b0f-941b-3a09a8bcac1e\") " pod="openstack-kuttl-tests/neutron-5c7bf5978d-d2t27" Jan 20 17:18:48 crc kubenswrapper[4558]: I0120 17:18:48.526097 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/abb17bba-a50c-4b0f-941b-3a09a8bcac1e-internal-tls-certs\") pod \"neutron-5c7bf5978d-d2t27\" (UID: \"abb17bba-a50c-4b0f-941b-3a09a8bcac1e\") " pod="openstack-kuttl-tests/neutron-5c7bf5978d-d2t27" Jan 20 17:18:48 crc kubenswrapper[4558]: I0120 17:18:48.526195 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/abb17bba-a50c-4b0f-941b-3a09a8bcac1e-config\") pod \"neutron-5c7bf5978d-d2t27\" (UID: \"abb17bba-a50c-4b0f-941b-3a09a8bcac1e\") " pod="openstack-kuttl-tests/neutron-5c7bf5978d-d2t27" Jan 20 17:18:48 crc kubenswrapper[4558]: I0120 17:18:48.528293 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/abb17bba-a50c-4b0f-941b-3a09a8bcac1e-httpd-config\") pod \"neutron-5c7bf5978d-d2t27\" (UID: \"abb17bba-a50c-4b0f-941b-3a09a8bcac1e\") " pod="openstack-kuttl-tests/neutron-5c7bf5978d-d2t27" Jan 20 17:18:48 crc kubenswrapper[4558]: I0120 17:18:48.529009 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/abb17bba-a50c-4b0f-941b-3a09a8bcac1e-public-tls-certs\") pod \"neutron-5c7bf5978d-d2t27\" (UID: \"abb17bba-a50c-4b0f-941b-3a09a8bcac1e\") " pod="openstack-kuttl-tests/neutron-5c7bf5978d-d2t27" Jan 20 17:18:48 crc kubenswrapper[4558]: I0120 17:18:48.531242 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/abb17bba-a50c-4b0f-941b-3a09a8bcac1e-combined-ca-bundle\") pod \"neutron-5c7bf5978d-d2t27\" (UID: \"abb17bba-a50c-4b0f-941b-3a09a8bcac1e\") " pod="openstack-kuttl-tests/neutron-5c7bf5978d-d2t27" Jan 20 17:18:48 crc kubenswrapper[4558]: I0120 17:18:48.531274 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/abb17bba-a50c-4b0f-941b-3a09a8bcac1e-ovndb-tls-certs\") pod \"neutron-5c7bf5978d-d2t27\" (UID: \"abb17bba-a50c-4b0f-941b-3a09a8bcac1e\") " pod="openstack-kuttl-tests/neutron-5c7bf5978d-d2t27" Jan 20 17:18:48 crc kubenswrapper[4558]: I0120 17:18:48.539938 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zrtcb\" (UniqueName: \"kubernetes.io/projected/abb17bba-a50c-4b0f-941b-3a09a8bcac1e-kube-api-access-zrtcb\") pod \"neutron-5c7bf5978d-d2t27\" (UID: \"abb17bba-a50c-4b0f-941b-3a09a8bcac1e\") " pod="openstack-kuttl-tests/neutron-5c7bf5978d-d2t27" Jan 20 17:18:48 crc kubenswrapper[4558]: I0120 17:18:48.566213 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-5c7bf5978d-d2t27" Jan 20 17:18:48 crc kubenswrapper[4558]: I0120 17:18:48.663459 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:18:48 crc kubenswrapper[4558]: I0120 17:18:48.710105 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:18:49 crc kubenswrapper[4558]: I0120 17:18:49.013762 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-5c7bf5978d-d2t27"] Jan 20 17:18:49 crc kubenswrapper[4558]: W0120 17:18:49.019516 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podabb17bba_a50c_4b0f_941b_3a09a8bcac1e.slice/crio-742418c85c932e77f7c12d61027061005c5117cb0365e2a4c5c45dab03dcd4ad WatchSource:0}: Error finding container 742418c85c932e77f7c12d61027061005c5117cb0365e2a4c5c45dab03dcd4ad: Status 404 returned error can't find the container with id 742418c85c932e77f7c12d61027061005c5117cb0365e2a4c5c45dab03dcd4ad Jan 20 17:18:49 crc kubenswrapper[4558]: I0120 17:18:49.266806 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-5c7bf5978d-d2t27" event={"ID":"abb17bba-a50c-4b0f-941b-3a09a8bcac1e","Type":"ContainerStarted","Data":"fb57b9facd8c8baac31528678e058b6e227a981807b81524108e8fec27a589a1"} Jan 20 17:18:49 crc kubenswrapper[4558]: I0120 17:18:49.267146 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-5c7bf5978d-d2t27" event={"ID":"abb17bba-a50c-4b0f-941b-3a09a8bcac1e","Type":"ContainerStarted","Data":"742418c85c932e77f7c12d61027061005c5117cb0365e2a4c5c45dab03dcd4ad"} Jan 20 17:18:49 crc kubenswrapper[4558]: I0120 17:18:49.267370 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/cinder-scheduler-0" podUID="e7f15828-3902-4413-b35a-b33487057ea6" containerName="cinder-scheduler" containerID="cri-o://2f4a6e34fee542fde92457779ab42b75ad188b49ed973211487d82f9b9c24284" gracePeriod=30 Jan 20 17:18:49 crc kubenswrapper[4558]: I0120 17:18:49.267809 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/cinder-scheduler-0" podUID="e7f15828-3902-4413-b35a-b33487057ea6" containerName="probe" containerID="cri-o://308537a807ea98cf0cb1db26e2083a6fc000a2f81a9f211aa0566066bc04752c" gracePeriod=30 Jan 20 17:18:49 crc kubenswrapper[4558]: I0120 17:18:49.575854 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/barbican-api-7b899ccb8d-cfqnl" Jan 20 17:18:50 crc kubenswrapper[4558]: I0120 17:18:50.279084 4558 generic.go:334] "Generic (PLEG): container finished" podID="e7f15828-3902-4413-b35a-b33487057ea6" containerID="308537a807ea98cf0cb1db26e2083a6fc000a2f81a9f211aa0566066bc04752c" exitCode=0 Jan 20 17:18:50 crc kubenswrapper[4558]: I0120 17:18:50.279157 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"e7f15828-3902-4413-b35a-b33487057ea6","Type":"ContainerDied","Data":"308537a807ea98cf0cb1db26e2083a6fc000a2f81a9f211aa0566066bc04752c"} Jan 20 17:18:50 crc kubenswrapper[4558]: I0120 17:18:50.280914 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-5c7bf5978d-d2t27" event={"ID":"abb17bba-a50c-4b0f-941b-3a09a8bcac1e","Type":"ContainerStarted","Data":"71a8bccb6db5181577ff2da6379fafa98f3cfa0c15a2c71a8806e5f0bc7f567a"} Jan 20 17:18:50 crc kubenswrapper[4558]: I0120 17:18:50.282362 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/neutron-5c7bf5978d-d2t27" Jan 20 17:18:50 crc kubenswrapper[4558]: I0120 17:18:50.309231 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/neutron-5c7bf5978d-d2t27" podStartSLOduration=2.309216479 podStartE2EDuration="2.309216479s" podCreationTimestamp="2026-01-20 17:18:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:18:50.301536097 +0000 UTC m=+2224.061874064" watchObservedRunningTime="2026-01-20 17:18:50.309216479 +0000 UTC m=+2224.069554446" Jan 20 17:18:51 crc kubenswrapper[4558]: I0120 17:18:51.006070 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/barbican-api-7b899ccb8d-cfqnl" Jan 20 17:18:51 crc kubenswrapper[4558]: I0120 17:18:51.072597 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:18:51 crc kubenswrapper[4558]: I0120 17:18:51.082601 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-api-678474f9b-4hxj7"] Jan 20 17:18:51 crc kubenswrapper[4558]: I0120 17:18:51.082813 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-api-678474f9b-4hxj7" podUID="47c6e50e-b277-48d8-928d-f0d571acf238" containerName="barbican-api-log" containerID="cri-o://9dc6bc15ff6caf6b71886854b59c8e24cc9c17a04a1198f272d418effb557cc1" gracePeriod=30 Jan 20 17:18:51 crc kubenswrapper[4558]: I0120 17:18:51.082884 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-api-678474f9b-4hxj7" podUID="47c6e50e-b277-48d8-928d-f0d571acf238" containerName="barbican-api" containerID="cri-o://b8cb069501804ced5862f25dd0e9a29484e1f9a42e27c83b54682de815e90065" gracePeriod=30 Jan 20 17:18:51 crc kubenswrapper[4558]: I0120 17:18:51.185179 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e7f15828-3902-4413-b35a-b33487057ea6-config-data\") pod \"e7f15828-3902-4413-b35a-b33487057ea6\" (UID: \"e7f15828-3902-4413-b35a-b33487057ea6\") " Jan 20 17:18:51 crc kubenswrapper[4558]: I0120 17:18:51.185243 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e7f15828-3902-4413-b35a-b33487057ea6-scripts\") pod \"e7f15828-3902-4413-b35a-b33487057ea6\" (UID: \"e7f15828-3902-4413-b35a-b33487057ea6\") " Jan 20 17:18:51 crc kubenswrapper[4558]: I0120 17:18:51.185313 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sr6fd\" (UniqueName: \"kubernetes.io/projected/e7f15828-3902-4413-b35a-b33487057ea6-kube-api-access-sr6fd\") pod \"e7f15828-3902-4413-b35a-b33487057ea6\" (UID: \"e7f15828-3902-4413-b35a-b33487057ea6\") " Jan 20 17:18:51 crc kubenswrapper[4558]: I0120 17:18:51.185360 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e7f15828-3902-4413-b35a-b33487057ea6-config-data-custom\") pod \"e7f15828-3902-4413-b35a-b33487057ea6\" (UID: \"e7f15828-3902-4413-b35a-b33487057ea6\") " Jan 20 17:18:51 crc kubenswrapper[4558]: I0120 17:18:51.185432 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/e7f15828-3902-4413-b35a-b33487057ea6-etc-machine-id\") pod \"e7f15828-3902-4413-b35a-b33487057ea6\" (UID: \"e7f15828-3902-4413-b35a-b33487057ea6\") " Jan 20 17:18:51 crc kubenswrapper[4558]: I0120 17:18:51.185503 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7f15828-3902-4413-b35a-b33487057ea6-combined-ca-bundle\") pod \"e7f15828-3902-4413-b35a-b33487057ea6\" (UID: \"e7f15828-3902-4413-b35a-b33487057ea6\") " Jan 20 17:18:51 crc kubenswrapper[4558]: I0120 17:18:51.185601 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e7f15828-3902-4413-b35a-b33487057ea6-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "e7f15828-3902-4413-b35a-b33487057ea6" (UID: "e7f15828-3902-4413-b35a-b33487057ea6"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 17:18:51 crc kubenswrapper[4558]: I0120 17:18:51.186360 4558 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/e7f15828-3902-4413-b35a-b33487057ea6-etc-machine-id\") on node \"crc\" DevicePath \"\"" Jan 20 17:18:51 crc kubenswrapper[4558]: I0120 17:18:51.192468 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7f15828-3902-4413-b35a-b33487057ea6-scripts" (OuterVolumeSpecName: "scripts") pod "e7f15828-3902-4413-b35a-b33487057ea6" (UID: "e7f15828-3902-4413-b35a-b33487057ea6"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:18:51 crc kubenswrapper[4558]: I0120 17:18:51.192609 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7f15828-3902-4413-b35a-b33487057ea6-kube-api-access-sr6fd" (OuterVolumeSpecName: "kube-api-access-sr6fd") pod "e7f15828-3902-4413-b35a-b33487057ea6" (UID: "e7f15828-3902-4413-b35a-b33487057ea6"). InnerVolumeSpecName "kube-api-access-sr6fd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:18:51 crc kubenswrapper[4558]: I0120 17:18:51.194908 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7f15828-3902-4413-b35a-b33487057ea6-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "e7f15828-3902-4413-b35a-b33487057ea6" (UID: "e7f15828-3902-4413-b35a-b33487057ea6"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:18:51 crc kubenswrapper[4558]: I0120 17:18:51.255188 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7f15828-3902-4413-b35a-b33487057ea6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e7f15828-3902-4413-b35a-b33487057ea6" (UID: "e7f15828-3902-4413-b35a-b33487057ea6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:18:51 crc kubenswrapper[4558]: I0120 17:18:51.288353 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sr6fd\" (UniqueName: \"kubernetes.io/projected/e7f15828-3902-4413-b35a-b33487057ea6-kube-api-access-sr6fd\") on node \"crc\" DevicePath \"\"" Jan 20 17:18:51 crc kubenswrapper[4558]: I0120 17:18:51.288657 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e7f15828-3902-4413-b35a-b33487057ea6-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:18:51 crc kubenswrapper[4558]: I0120 17:18:51.288669 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7f15828-3902-4413-b35a-b33487057ea6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:18:51 crc kubenswrapper[4558]: I0120 17:18:51.288678 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e7f15828-3902-4413-b35a-b33487057ea6-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:18:51 crc kubenswrapper[4558]: I0120 17:18:51.290002 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7f15828-3902-4413-b35a-b33487057ea6-config-data" (OuterVolumeSpecName: "config-data") pod "e7f15828-3902-4413-b35a-b33487057ea6" (UID: "e7f15828-3902-4413-b35a-b33487057ea6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:18:51 crc kubenswrapper[4558]: I0120 17:18:51.295325 4558 generic.go:334] "Generic (PLEG): container finished" podID="e7f15828-3902-4413-b35a-b33487057ea6" containerID="2f4a6e34fee542fde92457779ab42b75ad188b49ed973211487d82f9b9c24284" exitCode=0 Jan 20 17:18:51 crc kubenswrapper[4558]: I0120 17:18:51.295384 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"e7f15828-3902-4413-b35a-b33487057ea6","Type":"ContainerDied","Data":"2f4a6e34fee542fde92457779ab42b75ad188b49ed973211487d82f9b9c24284"} Jan 20 17:18:51 crc kubenswrapper[4558]: I0120 17:18:51.295413 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"e7f15828-3902-4413-b35a-b33487057ea6","Type":"ContainerDied","Data":"a17b6876bf8dbd5a0e5775eeb15deaec2d5acd8068f6e505ffeb722ee6a91dd3"} Jan 20 17:18:51 crc kubenswrapper[4558]: I0120 17:18:51.295429 4558 scope.go:117] "RemoveContainer" containerID="308537a807ea98cf0cb1db26e2083a6fc000a2f81a9f211aa0566066bc04752c" Jan 20 17:18:51 crc kubenswrapper[4558]: I0120 17:18:51.295564 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:18:51 crc kubenswrapper[4558]: I0120 17:18:51.302569 4558 generic.go:334] "Generic (PLEG): container finished" podID="47c6e50e-b277-48d8-928d-f0d571acf238" containerID="9dc6bc15ff6caf6b71886854b59c8e24cc9c17a04a1198f272d418effb557cc1" exitCode=143 Jan 20 17:18:51 crc kubenswrapper[4558]: I0120 17:18:51.302672 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-678474f9b-4hxj7" event={"ID":"47c6e50e-b277-48d8-928d-f0d571acf238","Type":"ContainerDied","Data":"9dc6bc15ff6caf6b71886854b59c8e24cc9c17a04a1198f272d418effb557cc1"} Jan 20 17:18:51 crc kubenswrapper[4558]: I0120 17:18:51.331514 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:18:51 crc kubenswrapper[4558]: I0120 17:18:51.333603 4558 scope.go:117] "RemoveContainer" containerID="2f4a6e34fee542fde92457779ab42b75ad188b49ed973211487d82f9b9c24284" Jan 20 17:18:51 crc kubenswrapper[4558]: I0120 17:18:51.341991 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:18:51 crc kubenswrapper[4558]: I0120 17:18:51.361815 4558 scope.go:117] "RemoveContainer" containerID="308537a807ea98cf0cb1db26e2083a6fc000a2f81a9f211aa0566066bc04752c" Jan 20 17:18:51 crc kubenswrapper[4558]: E0120 17:18:51.362433 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"308537a807ea98cf0cb1db26e2083a6fc000a2f81a9f211aa0566066bc04752c\": container with ID starting with 308537a807ea98cf0cb1db26e2083a6fc000a2f81a9f211aa0566066bc04752c not found: ID does not exist" containerID="308537a807ea98cf0cb1db26e2083a6fc000a2f81a9f211aa0566066bc04752c" Jan 20 17:18:51 crc kubenswrapper[4558]: I0120 17:18:51.362470 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"308537a807ea98cf0cb1db26e2083a6fc000a2f81a9f211aa0566066bc04752c"} err="failed to get container status \"308537a807ea98cf0cb1db26e2083a6fc000a2f81a9f211aa0566066bc04752c\": rpc error: code = NotFound desc = could not find container \"308537a807ea98cf0cb1db26e2083a6fc000a2f81a9f211aa0566066bc04752c\": container with ID starting with 308537a807ea98cf0cb1db26e2083a6fc000a2f81a9f211aa0566066bc04752c not found: ID does not exist" Jan 20 17:18:51 crc kubenswrapper[4558]: I0120 17:18:51.362491 4558 scope.go:117] "RemoveContainer" containerID="2f4a6e34fee542fde92457779ab42b75ad188b49ed973211487d82f9b9c24284" Jan 20 17:18:51 crc kubenswrapper[4558]: E0120 17:18:51.362754 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2f4a6e34fee542fde92457779ab42b75ad188b49ed973211487d82f9b9c24284\": container with ID starting with 2f4a6e34fee542fde92457779ab42b75ad188b49ed973211487d82f9b9c24284 not found: ID does not exist" containerID="2f4a6e34fee542fde92457779ab42b75ad188b49ed973211487d82f9b9c24284" Jan 20 17:18:51 crc kubenswrapper[4558]: I0120 17:18:51.362776 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2f4a6e34fee542fde92457779ab42b75ad188b49ed973211487d82f9b9c24284"} err="failed to get container status \"2f4a6e34fee542fde92457779ab42b75ad188b49ed973211487d82f9b9c24284\": rpc error: code = NotFound desc = could not find container \"2f4a6e34fee542fde92457779ab42b75ad188b49ed973211487d82f9b9c24284\": container with ID starting with 2f4a6e34fee542fde92457779ab42b75ad188b49ed973211487d82f9b9c24284 not found: ID does not exist" Jan 20 17:18:51 crc kubenswrapper[4558]: I0120 17:18:51.363974 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:18:51 crc kubenswrapper[4558]: E0120 17:18:51.364545 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e7f15828-3902-4413-b35a-b33487057ea6" containerName="cinder-scheduler" Jan 20 17:18:51 crc kubenswrapper[4558]: I0120 17:18:51.364572 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="e7f15828-3902-4413-b35a-b33487057ea6" containerName="cinder-scheduler" Jan 20 17:18:51 crc kubenswrapper[4558]: E0120 17:18:51.364606 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e7f15828-3902-4413-b35a-b33487057ea6" containerName="probe" Jan 20 17:18:51 crc kubenswrapper[4558]: I0120 17:18:51.364614 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="e7f15828-3902-4413-b35a-b33487057ea6" containerName="probe" Jan 20 17:18:51 crc kubenswrapper[4558]: I0120 17:18:51.364863 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="e7f15828-3902-4413-b35a-b33487057ea6" containerName="cinder-scheduler" Jan 20 17:18:51 crc kubenswrapper[4558]: I0120 17:18:51.364882 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="e7f15828-3902-4413-b35a-b33487057ea6" containerName="probe" Jan 20 17:18:51 crc kubenswrapper[4558]: I0120 17:18:51.366092 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:18:51 crc kubenswrapper[4558]: I0120 17:18:51.376318 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cinder-scheduler-config-data" Jan 20 17:18:51 crc kubenswrapper[4558]: I0120 17:18:51.384046 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:18:51 crc kubenswrapper[4558]: I0120 17:18:51.387066 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-4bc72" Jan 20 17:18:51 crc kubenswrapper[4558]: I0120 17:18:51.387105 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-4bc72" Jan 20 17:18:51 crc kubenswrapper[4558]: I0120 17:18:51.390527 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e7f15828-3902-4413-b35a-b33487057ea6-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:18:51 crc kubenswrapper[4558]: I0120 17:18:51.426963 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-4bc72" Jan 20 17:18:51 crc kubenswrapper[4558]: I0120 17:18:51.492826 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/413b348d-82a1-466a-8027-d2bfd6f97cb7-scripts\") pod \"cinder-scheduler-0\" (UID: \"413b348d-82a1-466a-8027-d2bfd6f97cb7\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:18:51 crc kubenswrapper[4558]: I0120 17:18:51.492893 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/413b348d-82a1-466a-8027-d2bfd6f97cb7-config-data\") pod \"cinder-scheduler-0\" (UID: \"413b348d-82a1-466a-8027-d2bfd6f97cb7\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:18:51 crc kubenswrapper[4558]: I0120 17:18:51.492916 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/413b348d-82a1-466a-8027-d2bfd6f97cb7-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"413b348d-82a1-466a-8027-d2bfd6f97cb7\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:18:51 crc kubenswrapper[4558]: I0120 17:18:51.493287 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bgwqv\" (UniqueName: \"kubernetes.io/projected/413b348d-82a1-466a-8027-d2bfd6f97cb7-kube-api-access-bgwqv\") pod \"cinder-scheduler-0\" (UID: \"413b348d-82a1-466a-8027-d2bfd6f97cb7\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:18:51 crc kubenswrapper[4558]: I0120 17:18:51.493515 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/413b348d-82a1-466a-8027-d2bfd6f97cb7-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"413b348d-82a1-466a-8027-d2bfd6f97cb7\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:18:51 crc kubenswrapper[4558]: I0120 17:18:51.493676 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/413b348d-82a1-466a-8027-d2bfd6f97cb7-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"413b348d-82a1-466a-8027-d2bfd6f97cb7\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:18:51 crc kubenswrapper[4558]: I0120 17:18:51.596441 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bgwqv\" (UniqueName: \"kubernetes.io/projected/413b348d-82a1-466a-8027-d2bfd6f97cb7-kube-api-access-bgwqv\") pod \"cinder-scheduler-0\" (UID: \"413b348d-82a1-466a-8027-d2bfd6f97cb7\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:18:51 crc kubenswrapper[4558]: I0120 17:18:51.596545 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/413b348d-82a1-466a-8027-d2bfd6f97cb7-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"413b348d-82a1-466a-8027-d2bfd6f97cb7\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:18:51 crc kubenswrapper[4558]: I0120 17:18:51.596616 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/413b348d-82a1-466a-8027-d2bfd6f97cb7-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"413b348d-82a1-466a-8027-d2bfd6f97cb7\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:18:51 crc kubenswrapper[4558]: I0120 17:18:51.596738 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/413b348d-82a1-466a-8027-d2bfd6f97cb7-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"413b348d-82a1-466a-8027-d2bfd6f97cb7\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:18:51 crc kubenswrapper[4558]: I0120 17:18:51.596748 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/413b348d-82a1-466a-8027-d2bfd6f97cb7-scripts\") pod \"cinder-scheduler-0\" (UID: \"413b348d-82a1-466a-8027-d2bfd6f97cb7\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:18:51 crc kubenswrapper[4558]: I0120 17:18:51.596911 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/413b348d-82a1-466a-8027-d2bfd6f97cb7-config-data\") pod \"cinder-scheduler-0\" (UID: \"413b348d-82a1-466a-8027-d2bfd6f97cb7\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:18:51 crc kubenswrapper[4558]: I0120 17:18:51.596947 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/413b348d-82a1-466a-8027-d2bfd6f97cb7-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"413b348d-82a1-466a-8027-d2bfd6f97cb7\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:18:51 crc kubenswrapper[4558]: I0120 17:18:51.602151 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/413b348d-82a1-466a-8027-d2bfd6f97cb7-scripts\") pod \"cinder-scheduler-0\" (UID: \"413b348d-82a1-466a-8027-d2bfd6f97cb7\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:18:51 crc kubenswrapper[4558]: I0120 17:18:51.602983 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/413b348d-82a1-466a-8027-d2bfd6f97cb7-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"413b348d-82a1-466a-8027-d2bfd6f97cb7\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:18:51 crc kubenswrapper[4558]: I0120 17:18:51.603396 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/413b348d-82a1-466a-8027-d2bfd6f97cb7-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"413b348d-82a1-466a-8027-d2bfd6f97cb7\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:18:51 crc kubenswrapper[4558]: I0120 17:18:51.605290 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/413b348d-82a1-466a-8027-d2bfd6f97cb7-config-data\") pod \"cinder-scheduler-0\" (UID: \"413b348d-82a1-466a-8027-d2bfd6f97cb7\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:18:51 crc kubenswrapper[4558]: I0120 17:18:51.611600 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bgwqv\" (UniqueName: \"kubernetes.io/projected/413b348d-82a1-466a-8027-d2bfd6f97cb7-kube-api-access-bgwqv\") pod \"cinder-scheduler-0\" (UID: \"413b348d-82a1-466a-8027-d2bfd6f97cb7\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:18:51 crc kubenswrapper[4558]: I0120 17:18:51.697726 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:18:52 crc kubenswrapper[4558]: I0120 17:18:52.115565 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:18:52 crc kubenswrapper[4558]: I0120 17:18:52.317572 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"413b348d-82a1-466a-8027-d2bfd6f97cb7","Type":"ContainerStarted","Data":"adcce5a4f5dd6fc247e791891ccb892e1fe88a8d1bf103a3c067c6cc28e3425f"} Jan 20 17:18:52 crc kubenswrapper[4558]: I0120 17:18:52.360140 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-4bc72" Jan 20 17:18:52 crc kubenswrapper[4558]: I0120 17:18:52.416023 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-4bc72"] Jan 20 17:18:52 crc kubenswrapper[4558]: I0120 17:18:52.565992 4558 scope.go:117] "RemoveContainer" containerID="0ffce44366a8b4466427b682fb41640425366a0f4449210959148de9aef695c6" Jan 20 17:18:52 crc kubenswrapper[4558]: E0120 17:18:52.566433 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 17:18:52 crc kubenswrapper[4558]: I0120 17:18:52.577762 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7f15828-3902-4413-b35a-b33487057ea6" path="/var/lib/kubelet/pods/e7f15828-3902-4413-b35a-b33487057ea6/volumes" Jan 20 17:18:53 crc kubenswrapper[4558]: I0120 17:18:53.332715 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"413b348d-82a1-466a-8027-d2bfd6f97cb7","Type":"ContainerStarted","Data":"7ee0f2d8df26b1c3c3fbe8f6a262fe806d7e730b90e603316c261fe601cf8b66"} Jan 20 17:18:53 crc kubenswrapper[4558]: I0120 17:18:53.333040 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"413b348d-82a1-466a-8027-d2bfd6f97cb7","Type":"ContainerStarted","Data":"0326fe1340c68a89c66bd369e9d86e881cb9466234bb1a9b24d4732251805082"} Jan 20 17:18:53 crc kubenswrapper[4558]: I0120 17:18:53.352942 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/cinder-scheduler-0" podStartSLOduration=2.352923382 podStartE2EDuration="2.352923382s" podCreationTimestamp="2026-01-20 17:18:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:18:53.351891261 +0000 UTC m=+2227.112229229" watchObservedRunningTime="2026-01-20 17:18:53.352923382 +0000 UTC m=+2227.113261348" Jan 20 17:18:54 crc kubenswrapper[4558]: I0120 17:18:54.345873 4558 generic.go:334] "Generic (PLEG): container finished" podID="47c6e50e-b277-48d8-928d-f0d571acf238" containerID="b8cb069501804ced5862f25dd0e9a29484e1f9a42e27c83b54682de815e90065" exitCode=0 Jan 20 17:18:54 crc kubenswrapper[4558]: I0120 17:18:54.345948 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-678474f9b-4hxj7" event={"ID":"47c6e50e-b277-48d8-928d-f0d571acf238","Type":"ContainerDied","Data":"b8cb069501804ced5862f25dd0e9a29484e1f9a42e27c83b54682de815e90065"} Jan 20 17:18:54 crc kubenswrapper[4558]: I0120 17:18:54.346323 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-4bc72" podUID="6bc4e0a0-7c1f-4b44-8724-dd6fa3f41d1d" containerName="registry-server" containerID="cri-o://f7d8723a08de461423cea85e9e4652a6f1771077ccdcb61ce6da413ca9834535" gracePeriod=2 Jan 20 17:18:54 crc kubenswrapper[4558]: I0120 17:18:54.362347 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/barbican-api-678474f9b-4hxj7" podUID="47c6e50e-b277-48d8-928d-f0d571acf238" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.166:9311/healthcheck\": dial tcp 10.217.0.166:9311: connect: connection refused" Jan 20 17:18:54 crc kubenswrapper[4558]: I0120 17:18:54.362296 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/barbican-api-678474f9b-4hxj7" podUID="47c6e50e-b277-48d8-928d-f0d571acf238" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.166:9311/healthcheck\": dial tcp 10.217.0.166:9311: connect: connection refused" Jan 20 17:18:54 crc kubenswrapper[4558]: I0120 17:18:54.687805 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-678474f9b-4hxj7" Jan 20 17:18:54 crc kubenswrapper[4558]: I0120 17:18:54.778418 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/47c6e50e-b277-48d8-928d-f0d571acf238-combined-ca-bundle\") pod \"47c6e50e-b277-48d8-928d-f0d571acf238\" (UID: \"47c6e50e-b277-48d8-928d-f0d571acf238\") " Jan 20 17:18:54 crc kubenswrapper[4558]: I0120 17:18:54.778637 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/47c6e50e-b277-48d8-928d-f0d571acf238-config-data-custom\") pod \"47c6e50e-b277-48d8-928d-f0d571acf238\" (UID: \"47c6e50e-b277-48d8-928d-f0d571acf238\") " Jan 20 17:18:54 crc kubenswrapper[4558]: I0120 17:18:54.778840 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/47c6e50e-b277-48d8-928d-f0d571acf238-logs\") pod \"47c6e50e-b277-48d8-928d-f0d571acf238\" (UID: \"47c6e50e-b277-48d8-928d-f0d571acf238\") " Jan 20 17:18:54 crc kubenswrapper[4558]: I0120 17:18:54.778983 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/47c6e50e-b277-48d8-928d-f0d571acf238-config-data\") pod \"47c6e50e-b277-48d8-928d-f0d571acf238\" (UID: \"47c6e50e-b277-48d8-928d-f0d571acf238\") " Jan 20 17:18:54 crc kubenswrapper[4558]: I0120 17:18:54.779037 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fg24b\" (UniqueName: \"kubernetes.io/projected/47c6e50e-b277-48d8-928d-f0d571acf238-kube-api-access-fg24b\") pod \"47c6e50e-b277-48d8-928d-f0d571acf238\" (UID: \"47c6e50e-b277-48d8-928d-f0d571acf238\") " Jan 20 17:18:54 crc kubenswrapper[4558]: I0120 17:18:54.779702 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/47c6e50e-b277-48d8-928d-f0d571acf238-logs" (OuterVolumeSpecName: "logs") pod "47c6e50e-b277-48d8-928d-f0d571acf238" (UID: "47c6e50e-b277-48d8-928d-f0d571acf238"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:18:54 crc kubenswrapper[4558]: I0120 17:18:54.780739 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/47c6e50e-b277-48d8-928d-f0d571acf238-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:18:54 crc kubenswrapper[4558]: I0120 17:18:54.797395 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/47c6e50e-b277-48d8-928d-f0d571acf238-kube-api-access-fg24b" (OuterVolumeSpecName: "kube-api-access-fg24b") pod "47c6e50e-b277-48d8-928d-f0d571acf238" (UID: "47c6e50e-b277-48d8-928d-f0d571acf238"). InnerVolumeSpecName "kube-api-access-fg24b". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:18:54 crc kubenswrapper[4558]: I0120 17:18:54.805278 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/47c6e50e-b277-48d8-928d-f0d571acf238-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "47c6e50e-b277-48d8-928d-f0d571acf238" (UID: "47c6e50e-b277-48d8-928d-f0d571acf238"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:18:54 crc kubenswrapper[4558]: I0120 17:18:54.835744 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/47c6e50e-b277-48d8-928d-f0d571acf238-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "47c6e50e-b277-48d8-928d-f0d571acf238" (UID: "47c6e50e-b277-48d8-928d-f0d571acf238"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:18:54 crc kubenswrapper[4558]: I0120 17:18:54.847392 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/47c6e50e-b277-48d8-928d-f0d571acf238-config-data" (OuterVolumeSpecName: "config-data") pod "47c6e50e-b277-48d8-928d-f0d571acf238" (UID: "47c6e50e-b277-48d8-928d-f0d571acf238"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:18:54 crc kubenswrapper[4558]: I0120 17:18:54.850965 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4bc72" Jan 20 17:18:54 crc kubenswrapper[4558]: I0120 17:18:54.883634 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/47c6e50e-b277-48d8-928d-f0d571acf238-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:18:54 crc kubenswrapper[4558]: I0120 17:18:54.883665 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/47c6e50e-b277-48d8-928d-f0d571acf238-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:18:54 crc kubenswrapper[4558]: I0120 17:18:54.883677 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/47c6e50e-b277-48d8-928d-f0d571acf238-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:18:54 crc kubenswrapper[4558]: I0120 17:18:54.883687 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fg24b\" (UniqueName: \"kubernetes.io/projected/47c6e50e-b277-48d8-928d-f0d571acf238-kube-api-access-fg24b\") on node \"crc\" DevicePath \"\"" Jan 20 17:18:54 crc kubenswrapper[4558]: I0120 17:18:54.984589 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4nxg\" (UniqueName: \"kubernetes.io/projected/6bc4e0a0-7c1f-4b44-8724-dd6fa3f41d1d-kube-api-access-d4nxg\") pod \"6bc4e0a0-7c1f-4b44-8724-dd6fa3f41d1d\" (UID: \"6bc4e0a0-7c1f-4b44-8724-dd6fa3f41d1d\") " Jan 20 17:18:54 crc kubenswrapper[4558]: I0120 17:18:54.984739 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6bc4e0a0-7c1f-4b44-8724-dd6fa3f41d1d-utilities\") pod \"6bc4e0a0-7c1f-4b44-8724-dd6fa3f41d1d\" (UID: \"6bc4e0a0-7c1f-4b44-8724-dd6fa3f41d1d\") " Jan 20 17:18:54 crc kubenswrapper[4558]: I0120 17:18:54.984886 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6bc4e0a0-7c1f-4b44-8724-dd6fa3f41d1d-catalog-content\") pod \"6bc4e0a0-7c1f-4b44-8724-dd6fa3f41d1d\" (UID: \"6bc4e0a0-7c1f-4b44-8724-dd6fa3f41d1d\") " Jan 20 17:18:54 crc kubenswrapper[4558]: I0120 17:18:54.986449 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6bc4e0a0-7c1f-4b44-8724-dd6fa3f41d1d-utilities" (OuterVolumeSpecName: "utilities") pod "6bc4e0a0-7c1f-4b44-8724-dd6fa3f41d1d" (UID: "6bc4e0a0-7c1f-4b44-8724-dd6fa3f41d1d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:18:54 crc kubenswrapper[4558]: I0120 17:18:54.989211 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6bc4e0a0-7c1f-4b44-8724-dd6fa3f41d1d-kube-api-access-d4nxg" (OuterVolumeSpecName: "kube-api-access-d4nxg") pod "6bc4e0a0-7c1f-4b44-8724-dd6fa3f41d1d" (UID: "6bc4e0a0-7c1f-4b44-8724-dd6fa3f41d1d"). InnerVolumeSpecName "kube-api-access-d4nxg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:18:55 crc kubenswrapper[4558]: I0120 17:18:55.003713 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6bc4e0a0-7c1f-4b44-8724-dd6fa3f41d1d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6bc4e0a0-7c1f-4b44-8724-dd6fa3f41d1d" (UID: "6bc4e0a0-7c1f-4b44-8724-dd6fa3f41d1d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:18:55 crc kubenswrapper[4558]: I0120 17:18:55.087124 4558 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6bc4e0a0-7c1f-4b44-8724-dd6fa3f41d1d-utilities\") on node \"crc\" DevicePath \"\"" Jan 20 17:18:55 crc kubenswrapper[4558]: I0120 17:18:55.087146 4558 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6bc4e0a0-7c1f-4b44-8724-dd6fa3f41d1d-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 20 17:18:55 crc kubenswrapper[4558]: I0120 17:18:55.087159 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4nxg\" (UniqueName: \"kubernetes.io/projected/6bc4e0a0-7c1f-4b44-8724-dd6fa3f41d1d-kube-api-access-d4nxg\") on node \"crc\" DevicePath \"\"" Jan 20 17:18:55 crc kubenswrapper[4558]: I0120 17:18:55.361026 4558 generic.go:334] "Generic (PLEG): container finished" podID="6bc4e0a0-7c1f-4b44-8724-dd6fa3f41d1d" containerID="f7d8723a08de461423cea85e9e4652a6f1771077ccdcb61ce6da413ca9834535" exitCode=0 Jan 20 17:18:55 crc kubenswrapper[4558]: I0120 17:18:55.361115 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4bc72" Jan 20 17:18:55 crc kubenswrapper[4558]: I0120 17:18:55.361120 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4bc72" event={"ID":"6bc4e0a0-7c1f-4b44-8724-dd6fa3f41d1d","Type":"ContainerDied","Data":"f7d8723a08de461423cea85e9e4652a6f1771077ccdcb61ce6da413ca9834535"} Jan 20 17:18:55 crc kubenswrapper[4558]: I0120 17:18:55.361206 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4bc72" event={"ID":"6bc4e0a0-7c1f-4b44-8724-dd6fa3f41d1d","Type":"ContainerDied","Data":"fcbdec7d170469e601ec0709578dca22e0d00415f727877c71e80ba4ab104553"} Jan 20 17:18:55 crc kubenswrapper[4558]: I0120 17:18:55.361232 4558 scope.go:117] "RemoveContainer" containerID="f7d8723a08de461423cea85e9e4652a6f1771077ccdcb61ce6da413ca9834535" Jan 20 17:18:55 crc kubenswrapper[4558]: I0120 17:18:55.363792 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-678474f9b-4hxj7" event={"ID":"47c6e50e-b277-48d8-928d-f0d571acf238","Type":"ContainerDied","Data":"169a9c5070cf92fbb59c5c38dc9e4daf276d86dd454c6d9e7d6e117a81260d4c"} Jan 20 17:18:55 crc kubenswrapper[4558]: I0120 17:18:55.363836 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-678474f9b-4hxj7" Jan 20 17:18:55 crc kubenswrapper[4558]: I0120 17:18:55.405087 4558 scope.go:117] "RemoveContainer" containerID="f72fc1315a1244479b03514e7e7f276969fbdaf21c95b987015c41f6692d09d2" Jan 20 17:18:55 crc kubenswrapper[4558]: I0120 17:18:55.408631 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-4bc72"] Jan 20 17:18:55 crc kubenswrapper[4558]: I0120 17:18:55.423641 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-4bc72"] Jan 20 17:18:55 crc kubenswrapper[4558]: I0120 17:18:55.430309 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-api-678474f9b-4hxj7"] Jan 20 17:18:55 crc kubenswrapper[4558]: I0120 17:18:55.434990 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/barbican-api-678474f9b-4hxj7"] Jan 20 17:18:55 crc kubenswrapper[4558]: I0120 17:18:55.438259 4558 scope.go:117] "RemoveContainer" containerID="0d7f477e7e06832c7429e011343b9dc748dcc87bee4a71816003bd72474f5ea3" Jan 20 17:18:55 crc kubenswrapper[4558]: I0120 17:18:55.485001 4558 scope.go:117] "RemoveContainer" containerID="f7d8723a08de461423cea85e9e4652a6f1771077ccdcb61ce6da413ca9834535" Jan 20 17:18:55 crc kubenswrapper[4558]: E0120 17:18:55.485413 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f7d8723a08de461423cea85e9e4652a6f1771077ccdcb61ce6da413ca9834535\": container with ID starting with f7d8723a08de461423cea85e9e4652a6f1771077ccdcb61ce6da413ca9834535 not found: ID does not exist" containerID="f7d8723a08de461423cea85e9e4652a6f1771077ccdcb61ce6da413ca9834535" Jan 20 17:18:55 crc kubenswrapper[4558]: I0120 17:18:55.485449 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f7d8723a08de461423cea85e9e4652a6f1771077ccdcb61ce6da413ca9834535"} err="failed to get container status \"f7d8723a08de461423cea85e9e4652a6f1771077ccdcb61ce6da413ca9834535\": rpc error: code = NotFound desc = could not find container \"f7d8723a08de461423cea85e9e4652a6f1771077ccdcb61ce6da413ca9834535\": container with ID starting with f7d8723a08de461423cea85e9e4652a6f1771077ccdcb61ce6da413ca9834535 not found: ID does not exist" Jan 20 17:18:55 crc kubenswrapper[4558]: I0120 17:18:55.485473 4558 scope.go:117] "RemoveContainer" containerID="f72fc1315a1244479b03514e7e7f276969fbdaf21c95b987015c41f6692d09d2" Jan 20 17:18:55 crc kubenswrapper[4558]: E0120 17:18:55.485894 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f72fc1315a1244479b03514e7e7f276969fbdaf21c95b987015c41f6692d09d2\": container with ID starting with f72fc1315a1244479b03514e7e7f276969fbdaf21c95b987015c41f6692d09d2 not found: ID does not exist" containerID="f72fc1315a1244479b03514e7e7f276969fbdaf21c95b987015c41f6692d09d2" Jan 20 17:18:55 crc kubenswrapper[4558]: I0120 17:18:55.485939 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f72fc1315a1244479b03514e7e7f276969fbdaf21c95b987015c41f6692d09d2"} err="failed to get container status \"f72fc1315a1244479b03514e7e7f276969fbdaf21c95b987015c41f6692d09d2\": rpc error: code = NotFound desc = could not find container \"f72fc1315a1244479b03514e7e7f276969fbdaf21c95b987015c41f6692d09d2\": container with ID starting with f72fc1315a1244479b03514e7e7f276969fbdaf21c95b987015c41f6692d09d2 not found: ID does not exist" Jan 20 17:18:55 crc kubenswrapper[4558]: I0120 17:18:55.485976 4558 scope.go:117] "RemoveContainer" containerID="0d7f477e7e06832c7429e011343b9dc748dcc87bee4a71816003bd72474f5ea3" Jan 20 17:18:55 crc kubenswrapper[4558]: E0120 17:18:55.486413 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0d7f477e7e06832c7429e011343b9dc748dcc87bee4a71816003bd72474f5ea3\": container with ID starting with 0d7f477e7e06832c7429e011343b9dc748dcc87bee4a71816003bd72474f5ea3 not found: ID does not exist" containerID="0d7f477e7e06832c7429e011343b9dc748dcc87bee4a71816003bd72474f5ea3" Jan 20 17:18:55 crc kubenswrapper[4558]: I0120 17:18:55.486455 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0d7f477e7e06832c7429e011343b9dc748dcc87bee4a71816003bd72474f5ea3"} err="failed to get container status \"0d7f477e7e06832c7429e011343b9dc748dcc87bee4a71816003bd72474f5ea3\": rpc error: code = NotFound desc = could not find container \"0d7f477e7e06832c7429e011343b9dc748dcc87bee4a71816003bd72474f5ea3\": container with ID starting with 0d7f477e7e06832c7429e011343b9dc748dcc87bee4a71816003bd72474f5ea3 not found: ID does not exist" Jan 20 17:18:55 crc kubenswrapper[4558]: I0120 17:18:55.486480 4558 scope.go:117] "RemoveContainer" containerID="b8cb069501804ced5862f25dd0e9a29484e1f9a42e27c83b54682de815e90065" Jan 20 17:18:55 crc kubenswrapper[4558]: I0120 17:18:55.527565 4558 scope.go:117] "RemoveContainer" containerID="9dc6bc15ff6caf6b71886854b59c8e24cc9c17a04a1198f272d418effb557cc1" Jan 20 17:18:56 crc kubenswrapper[4558]: I0120 17:18:56.319432 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:18:56 crc kubenswrapper[4558]: I0120 17:18:56.579072 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="47c6e50e-b277-48d8-928d-f0d571acf238" path="/var/lib/kubelet/pods/47c6e50e-b277-48d8-928d-f0d571acf238/volumes" Jan 20 17:18:56 crc kubenswrapper[4558]: I0120 17:18:56.579845 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6bc4e0a0-7c1f-4b44-8724-dd6fa3f41d1d" path="/var/lib/kubelet/pods/6bc4e0a0-7c1f-4b44-8724-dd6fa3f41d1d/volumes" Jan 20 17:18:56 crc kubenswrapper[4558]: I0120 17:18:56.698236 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:19:01 crc kubenswrapper[4558]: I0120 17:19:01.899567 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:19:03 crc kubenswrapper[4558]: I0120 17:19:03.567254 4558 scope.go:117] "RemoveContainer" containerID="0ffce44366a8b4466427b682fb41640425366a0f4449210959148de9aef695c6" Jan 20 17:19:03 crc kubenswrapper[4558]: E0120 17:19:03.568209 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 17:19:04 crc kubenswrapper[4558]: I0120 17:19:04.196491 4558 scope.go:117] "RemoveContainer" containerID="5ae967978a075f7c2d6bb7b5491362516aba9ecdf1c98fba3638a9a6a4285cab" Jan 20 17:19:04 crc kubenswrapper[4558]: I0120 17:19:04.229749 4558 scope.go:117] "RemoveContainer" containerID="c03edbfea1cb0bae5898092d9ae12c1998d6252afb73ed3b9dd034e275deae47" Jan 20 17:19:04 crc kubenswrapper[4558]: I0120 17:19:04.264737 4558 scope.go:117] "RemoveContainer" containerID="1395ab77fe1bb470be25d5faad0eaa0e7b01a32afbd5a14b58917b77c984fae7" Jan 20 17:19:04 crc kubenswrapper[4558]: I0120 17:19:04.301705 4558 scope.go:117] "RemoveContainer" containerID="51b5b5fb5af49adbe1f8ad6fd74aa519237b6772474430f5d67767dfe8302c67" Jan 20 17:19:04 crc kubenswrapper[4558]: I0120 17:19:04.344078 4558 scope.go:117] "RemoveContainer" containerID="7d286e7f84165b7ba56adec4f49203713926bb3402c7da156ca7d232602d1f8c" Jan 20 17:19:04 crc kubenswrapper[4558]: I0120 17:19:04.385016 4558 scope.go:117] "RemoveContainer" containerID="072e7d885f97c7bad5483c48de51474ef74dc6ad6f70322b9de1ba59a261dcff" Jan 20 17:19:04 crc kubenswrapper[4558]: I0120 17:19:04.416195 4558 scope.go:117] "RemoveContainer" containerID="2e090c322fd27930306ecae5c98d22a52137ae21234138d9f2df2a9c227f3a1b" Jan 20 17:19:04 crc kubenswrapper[4558]: I0120 17:19:04.432964 4558 scope.go:117] "RemoveContainer" containerID="2df608c682d3185fad4a76b76ebef175530127ff27ccf6e17867915e943b9657" Jan 20 17:19:04 crc kubenswrapper[4558]: I0120 17:19:04.467886 4558 scope.go:117] "RemoveContainer" containerID="4c99017bce2cd4c2557c9e2d276409ac4a6b6a986805b29e970cc3af2a59f1df" Jan 20 17:19:04 crc kubenswrapper[4558]: I0120 17:19:04.498778 4558 scope.go:117] "RemoveContainer" containerID="547add12d01aa7c468fe2c72184b3e108a17eca0453172299f58c20adaeabe41" Jan 20 17:19:04 crc kubenswrapper[4558]: I0120 17:19:04.520866 4558 scope.go:117] "RemoveContainer" containerID="5e32ab1155526e1fa5af89a3aed00623d506307a8c78e5c9ed1d89cf46db001d" Jan 20 17:19:04 crc kubenswrapper[4558]: I0120 17:19:04.544117 4558 scope.go:117] "RemoveContainer" containerID="e148c477809aecae4b26fafb6fa46c29c5861e22d0ce0d0c52f698f488c73169" Jan 20 17:19:04 crc kubenswrapper[4558]: I0120 17:19:04.565029 4558 scope.go:117] "RemoveContainer" containerID="efad400cecbafa3e01864aab70dc1aa8234d0684805bf1a431ddc104e6872701" Jan 20 17:19:04 crc kubenswrapper[4558]: I0120 17:19:04.590427 4558 scope.go:117] "RemoveContainer" containerID="c0310ca75de4ab98af768cbc03504523fccea796fa93999b9f55b3a18b67ca3b" Jan 20 17:19:04 crc kubenswrapper[4558]: I0120 17:19:04.630320 4558 scope.go:117] "RemoveContainer" containerID="933310e106b177e33387ea977d5513b4b20d06f30928f4803750ca49dab87ab7" Jan 20 17:19:04 crc kubenswrapper[4558]: I0120 17:19:04.654373 4558 scope.go:117] "RemoveContainer" containerID="ef4526e531f7d5807e6513289cdeac244b91e66048cb4682c4a8bb251a104b9d" Jan 20 17:19:04 crc kubenswrapper[4558]: I0120 17:19:04.683029 4558 scope.go:117] "RemoveContainer" containerID="428595a29d94d4ba528e047a5b7e52f2fb67f15ff604242487cb42e3df5dbd56" Jan 20 17:19:04 crc kubenswrapper[4558]: I0120 17:19:04.712694 4558 scope.go:117] "RemoveContainer" containerID="e33f52cf7ad70d9b08e69aacbd68898760b0b7c600e8b28a20625caa9cbc16eb" Jan 20 17:19:04 crc kubenswrapper[4558]: I0120 17:19:04.745027 4558 scope.go:117] "RemoveContainer" containerID="6be9f1e75267073ffc4217f778b1284a97ccddc602c74c02869b9a557b447c2f" Jan 20 17:19:04 crc kubenswrapper[4558]: I0120 17:19:04.765429 4558 scope.go:117] "RemoveContainer" containerID="b91a14b7427c528f3415bf8592c7e627a68ca466d0f73da79aa04ad1ec9ef426" Jan 20 17:19:04 crc kubenswrapper[4558]: I0120 17:19:04.802040 4558 scope.go:117] "RemoveContainer" containerID="bc15a614c0fd478376937dcd3b1eb44701ee9b454a54a11ed2ce7c51605d407a" Jan 20 17:19:04 crc kubenswrapper[4558]: I0120 17:19:04.821582 4558 scope.go:117] "RemoveContainer" containerID="6de11959c90a5cfbeca69efc88d3c21904e93473ebaeb4c8af59fc4d0cb196b9" Jan 20 17:19:05 crc kubenswrapper[4558]: I0120 17:19:05.045475 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/placement-74b74897d6-9wxhf" Jan 20 17:19:05 crc kubenswrapper[4558]: I0120 17:19:05.046223 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/placement-74b74897d6-9wxhf" Jan 20 17:19:05 crc kubenswrapper[4558]: I0120 17:19:05.488238 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/placement-6b7cbc9658-z4t27" Jan 20 17:19:05 crc kubenswrapper[4558]: I0120 17:19:05.519246 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/placement-6b7cbc9658-z4t27" Jan 20 17:19:05 crc kubenswrapper[4558]: I0120 17:19:05.571907 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/placement-74b74897d6-9wxhf"] Jan 20 17:19:06 crc kubenswrapper[4558]: I0120 17:19:06.515111 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/placement-74b74897d6-9wxhf" podUID="7adfb76f-c71c-4038-8b25-346511ef09d5" containerName="placement-log" containerID="cri-o://b77009660854d3de3a199377eaa154471285aa6e3f681e052addcc597f3388dd" gracePeriod=30 Jan 20 17:19:06 crc kubenswrapper[4558]: I0120 17:19:06.515283 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/placement-74b74897d6-9wxhf" podUID="7adfb76f-c71c-4038-8b25-346511ef09d5" containerName="placement-api" containerID="cri-o://aea56698e7541c06ec49388659b311c8f5d6544e3ff75a7b5df928dfa78d9344" gracePeriod=30 Jan 20 17:19:07 crc kubenswrapper[4558]: I0120 17:19:07.346548 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:19:07 crc kubenswrapper[4558]: I0120 17:19:07.525803 4558 generic.go:334] "Generic (PLEG): container finished" podID="7adfb76f-c71c-4038-8b25-346511ef09d5" containerID="b77009660854d3de3a199377eaa154471285aa6e3f681e052addcc597f3388dd" exitCode=143 Jan 20 17:19:07 crc kubenswrapper[4558]: I0120 17:19:07.525854 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-74b74897d6-9wxhf" event={"ID":"7adfb76f-c71c-4038-8b25-346511ef09d5","Type":"ContainerDied","Data":"b77009660854d3de3a199377eaa154471285aa6e3f681e052addcc597f3388dd"} Jan 20 17:19:10 crc kubenswrapper[4558]: I0120 17:19:10.072611 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-74b74897d6-9wxhf" Jan 20 17:19:10 crc kubenswrapper[4558]: I0120 17:19:10.128190 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7adfb76f-c71c-4038-8b25-346511ef09d5-config-data\") pod \"7adfb76f-c71c-4038-8b25-346511ef09d5\" (UID: \"7adfb76f-c71c-4038-8b25-346511ef09d5\") " Jan 20 17:19:10 crc kubenswrapper[4558]: I0120 17:19:10.128308 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4622k\" (UniqueName: \"kubernetes.io/projected/7adfb76f-c71c-4038-8b25-346511ef09d5-kube-api-access-4622k\") pod \"7adfb76f-c71c-4038-8b25-346511ef09d5\" (UID: \"7adfb76f-c71c-4038-8b25-346511ef09d5\") " Jan 20 17:19:10 crc kubenswrapper[4558]: I0120 17:19:10.128350 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7adfb76f-c71c-4038-8b25-346511ef09d5-combined-ca-bundle\") pod \"7adfb76f-c71c-4038-8b25-346511ef09d5\" (UID: \"7adfb76f-c71c-4038-8b25-346511ef09d5\") " Jan 20 17:19:10 crc kubenswrapper[4558]: I0120 17:19:10.128387 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7adfb76f-c71c-4038-8b25-346511ef09d5-scripts\") pod \"7adfb76f-c71c-4038-8b25-346511ef09d5\" (UID: \"7adfb76f-c71c-4038-8b25-346511ef09d5\") " Jan 20 17:19:10 crc kubenswrapper[4558]: I0120 17:19:10.128425 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7adfb76f-c71c-4038-8b25-346511ef09d5-logs\") pod \"7adfb76f-c71c-4038-8b25-346511ef09d5\" (UID: \"7adfb76f-c71c-4038-8b25-346511ef09d5\") " Jan 20 17:19:10 crc kubenswrapper[4558]: I0120 17:19:10.128913 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7adfb76f-c71c-4038-8b25-346511ef09d5-logs" (OuterVolumeSpecName: "logs") pod "7adfb76f-c71c-4038-8b25-346511ef09d5" (UID: "7adfb76f-c71c-4038-8b25-346511ef09d5"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:19:10 crc kubenswrapper[4558]: I0120 17:19:10.129380 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7adfb76f-c71c-4038-8b25-346511ef09d5-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:19:10 crc kubenswrapper[4558]: I0120 17:19:10.136256 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7adfb76f-c71c-4038-8b25-346511ef09d5-scripts" (OuterVolumeSpecName: "scripts") pod "7adfb76f-c71c-4038-8b25-346511ef09d5" (UID: "7adfb76f-c71c-4038-8b25-346511ef09d5"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:19:10 crc kubenswrapper[4558]: I0120 17:19:10.136558 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7adfb76f-c71c-4038-8b25-346511ef09d5-kube-api-access-4622k" (OuterVolumeSpecName: "kube-api-access-4622k") pod "7adfb76f-c71c-4038-8b25-346511ef09d5" (UID: "7adfb76f-c71c-4038-8b25-346511ef09d5"). InnerVolumeSpecName "kube-api-access-4622k". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:19:10 crc kubenswrapper[4558]: I0120 17:19:10.169083 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7adfb76f-c71c-4038-8b25-346511ef09d5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7adfb76f-c71c-4038-8b25-346511ef09d5" (UID: "7adfb76f-c71c-4038-8b25-346511ef09d5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:19:10 crc kubenswrapper[4558]: I0120 17:19:10.170889 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7adfb76f-c71c-4038-8b25-346511ef09d5-config-data" (OuterVolumeSpecName: "config-data") pod "7adfb76f-c71c-4038-8b25-346511ef09d5" (UID: "7adfb76f-c71c-4038-8b25-346511ef09d5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:19:10 crc kubenswrapper[4558]: I0120 17:19:10.231732 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7adfb76f-c71c-4038-8b25-346511ef09d5-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:19:10 crc kubenswrapper[4558]: I0120 17:19:10.231755 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7adfb76f-c71c-4038-8b25-346511ef09d5-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:19:10 crc kubenswrapper[4558]: I0120 17:19:10.231767 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4622k\" (UniqueName: \"kubernetes.io/projected/7adfb76f-c71c-4038-8b25-346511ef09d5-kube-api-access-4622k\") on node \"crc\" DevicePath \"\"" Jan 20 17:19:10 crc kubenswrapper[4558]: I0120 17:19:10.231778 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7adfb76f-c71c-4038-8b25-346511ef09d5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:19:10 crc kubenswrapper[4558]: I0120 17:19:10.559743 4558 generic.go:334] "Generic (PLEG): container finished" podID="7adfb76f-c71c-4038-8b25-346511ef09d5" containerID="aea56698e7541c06ec49388659b311c8f5d6544e3ff75a7b5df928dfa78d9344" exitCode=0 Jan 20 17:19:10 crc kubenswrapper[4558]: I0120 17:19:10.559805 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-74b74897d6-9wxhf" event={"ID":"7adfb76f-c71c-4038-8b25-346511ef09d5","Type":"ContainerDied","Data":"aea56698e7541c06ec49388659b311c8f5d6544e3ff75a7b5df928dfa78d9344"} Jan 20 17:19:10 crc kubenswrapper[4558]: I0120 17:19:10.559844 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-74b74897d6-9wxhf" event={"ID":"7adfb76f-c71c-4038-8b25-346511ef09d5","Type":"ContainerDied","Data":"cd27dda48d1a8254712f52b2254a78f0300e79619727ffc67514404c74f610b8"} Jan 20 17:19:10 crc kubenswrapper[4558]: I0120 17:19:10.559863 4558 scope.go:117] "RemoveContainer" containerID="aea56698e7541c06ec49388659b311c8f5d6544e3ff75a7b5df928dfa78d9344" Jan 20 17:19:10 crc kubenswrapper[4558]: I0120 17:19:10.560053 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-74b74897d6-9wxhf" Jan 20 17:19:10 crc kubenswrapper[4558]: I0120 17:19:10.580084 4558 scope.go:117] "RemoveContainer" containerID="b77009660854d3de3a199377eaa154471285aa6e3f681e052addcc597f3388dd" Jan 20 17:19:10 crc kubenswrapper[4558]: I0120 17:19:10.595973 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/placement-74b74897d6-9wxhf"] Jan 20 17:19:10 crc kubenswrapper[4558]: I0120 17:19:10.603180 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/placement-74b74897d6-9wxhf"] Jan 20 17:19:10 crc kubenswrapper[4558]: I0120 17:19:10.604606 4558 scope.go:117] "RemoveContainer" containerID="aea56698e7541c06ec49388659b311c8f5d6544e3ff75a7b5df928dfa78d9344" Jan 20 17:19:10 crc kubenswrapper[4558]: E0120 17:19:10.604941 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aea56698e7541c06ec49388659b311c8f5d6544e3ff75a7b5df928dfa78d9344\": container with ID starting with aea56698e7541c06ec49388659b311c8f5d6544e3ff75a7b5df928dfa78d9344 not found: ID does not exist" containerID="aea56698e7541c06ec49388659b311c8f5d6544e3ff75a7b5df928dfa78d9344" Jan 20 17:19:10 crc kubenswrapper[4558]: I0120 17:19:10.604992 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aea56698e7541c06ec49388659b311c8f5d6544e3ff75a7b5df928dfa78d9344"} err="failed to get container status \"aea56698e7541c06ec49388659b311c8f5d6544e3ff75a7b5df928dfa78d9344\": rpc error: code = NotFound desc = could not find container \"aea56698e7541c06ec49388659b311c8f5d6544e3ff75a7b5df928dfa78d9344\": container with ID starting with aea56698e7541c06ec49388659b311c8f5d6544e3ff75a7b5df928dfa78d9344 not found: ID does not exist" Jan 20 17:19:10 crc kubenswrapper[4558]: I0120 17:19:10.605019 4558 scope.go:117] "RemoveContainer" containerID="b77009660854d3de3a199377eaa154471285aa6e3f681e052addcc597f3388dd" Jan 20 17:19:10 crc kubenswrapper[4558]: E0120 17:19:10.605380 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b77009660854d3de3a199377eaa154471285aa6e3f681e052addcc597f3388dd\": container with ID starting with b77009660854d3de3a199377eaa154471285aa6e3f681e052addcc597f3388dd not found: ID does not exist" containerID="b77009660854d3de3a199377eaa154471285aa6e3f681e052addcc597f3388dd" Jan 20 17:19:10 crc kubenswrapper[4558]: I0120 17:19:10.605433 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b77009660854d3de3a199377eaa154471285aa6e3f681e052addcc597f3388dd"} err="failed to get container status \"b77009660854d3de3a199377eaa154471285aa6e3f681e052addcc597f3388dd\": rpc error: code = NotFound desc = could not find container \"b77009660854d3de3a199377eaa154471285aa6e3f681e052addcc597f3388dd\": container with ID starting with b77009660854d3de3a199377eaa154471285aa6e3f681e052addcc597f3388dd not found: ID does not exist" Jan 20 17:19:12 crc kubenswrapper[4558]: I0120 17:19:12.580289 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7adfb76f-c71c-4038-8b25-346511ef09d5" path="/var/lib/kubelet/pods/7adfb76f-c71c-4038-8b25-346511ef09d5/volumes" Jan 20 17:19:13 crc kubenswrapper[4558]: I0120 17:19:13.915457 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/keystone-5585fd7cd8-gk5qt" Jan 20 17:19:14 crc kubenswrapper[4558]: I0120 17:19:14.730788 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/neutron-86cc899657-ftskx" Jan 20 17:19:15 crc kubenswrapper[4558]: I0120 17:19:15.370898 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/openstackclient"] Jan 20 17:19:15 crc kubenswrapper[4558]: E0120 17:19:15.371300 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="47c6e50e-b277-48d8-928d-f0d571acf238" containerName="barbican-api-log" Jan 20 17:19:15 crc kubenswrapper[4558]: I0120 17:19:15.371313 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="47c6e50e-b277-48d8-928d-f0d571acf238" containerName="barbican-api-log" Jan 20 17:19:15 crc kubenswrapper[4558]: E0120 17:19:15.371335 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7adfb76f-c71c-4038-8b25-346511ef09d5" containerName="placement-log" Jan 20 17:19:15 crc kubenswrapper[4558]: I0120 17:19:15.371341 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="7adfb76f-c71c-4038-8b25-346511ef09d5" containerName="placement-log" Jan 20 17:19:15 crc kubenswrapper[4558]: E0120 17:19:15.371355 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7adfb76f-c71c-4038-8b25-346511ef09d5" containerName="placement-api" Jan 20 17:19:15 crc kubenswrapper[4558]: I0120 17:19:15.371361 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="7adfb76f-c71c-4038-8b25-346511ef09d5" containerName="placement-api" Jan 20 17:19:15 crc kubenswrapper[4558]: E0120 17:19:15.371375 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6bc4e0a0-7c1f-4b44-8724-dd6fa3f41d1d" containerName="extract-content" Jan 20 17:19:15 crc kubenswrapper[4558]: I0120 17:19:15.371380 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="6bc4e0a0-7c1f-4b44-8724-dd6fa3f41d1d" containerName="extract-content" Jan 20 17:19:15 crc kubenswrapper[4558]: E0120 17:19:15.371392 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6bc4e0a0-7c1f-4b44-8724-dd6fa3f41d1d" containerName="registry-server" Jan 20 17:19:15 crc kubenswrapper[4558]: I0120 17:19:15.371397 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="6bc4e0a0-7c1f-4b44-8724-dd6fa3f41d1d" containerName="registry-server" Jan 20 17:19:15 crc kubenswrapper[4558]: E0120 17:19:15.371405 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6bc4e0a0-7c1f-4b44-8724-dd6fa3f41d1d" containerName="extract-utilities" Jan 20 17:19:15 crc kubenswrapper[4558]: I0120 17:19:15.371411 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="6bc4e0a0-7c1f-4b44-8724-dd6fa3f41d1d" containerName="extract-utilities" Jan 20 17:19:15 crc kubenswrapper[4558]: E0120 17:19:15.371424 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="47c6e50e-b277-48d8-928d-f0d571acf238" containerName="barbican-api" Jan 20 17:19:15 crc kubenswrapper[4558]: I0120 17:19:15.371431 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="47c6e50e-b277-48d8-928d-f0d571acf238" containerName="barbican-api" Jan 20 17:19:15 crc kubenswrapper[4558]: I0120 17:19:15.371607 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="47c6e50e-b277-48d8-928d-f0d571acf238" containerName="barbican-api" Jan 20 17:19:15 crc kubenswrapper[4558]: I0120 17:19:15.371623 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="47c6e50e-b277-48d8-928d-f0d571acf238" containerName="barbican-api-log" Jan 20 17:19:15 crc kubenswrapper[4558]: I0120 17:19:15.371637 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="7adfb76f-c71c-4038-8b25-346511ef09d5" containerName="placement-api" Jan 20 17:19:15 crc kubenswrapper[4558]: I0120 17:19:15.371647 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="7adfb76f-c71c-4038-8b25-346511ef09d5" containerName="placement-log" Jan 20 17:19:15 crc kubenswrapper[4558]: I0120 17:19:15.371659 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="6bc4e0a0-7c1f-4b44-8724-dd6fa3f41d1d" containerName="registry-server" Jan 20 17:19:15 crc kubenswrapper[4558]: I0120 17:19:15.372366 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstackclient" Jan 20 17:19:15 crc kubenswrapper[4558]: I0120 17:19:15.374008 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"openstack-config-secret" Jan 20 17:19:15 crc kubenswrapper[4558]: I0120 17:19:15.374304 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-config" Jan 20 17:19:15 crc kubenswrapper[4558]: I0120 17:19:15.375306 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"openstackclient-openstackclient-dockercfg-qxm58" Jan 20 17:19:15 crc kubenswrapper[4558]: I0120 17:19:15.380012 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/openstackclient"] Jan 20 17:19:15 crc kubenswrapper[4558]: I0120 17:19:15.403660 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/openstackclient"] Jan 20 17:19:15 crc kubenswrapper[4558]: E0120 17:19:15.404712 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[combined-ca-bundle kube-api-access-gqk6l openstack-config openstack-config-secret], unattached volumes=[], failed to process volumes=[combined-ca-bundle kube-api-access-gqk6l openstack-config openstack-config-secret]: context canceled" pod="openstack-kuttl-tests/openstackclient" podUID="bb3eea94-340a-4efe-92a6-6d1c4a2f1488" Jan 20 17:19:15 crc kubenswrapper[4558]: I0120 17:19:15.411415 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/openstackclient"] Jan 20 17:19:15 crc kubenswrapper[4558]: I0120 17:19:15.427759 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/openstackclient"] Jan 20 17:19:15 crc kubenswrapper[4558]: I0120 17:19:15.429021 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstackclient" Jan 20 17:19:15 crc kubenswrapper[4558]: I0120 17:19:15.433701 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/openstackclient"] Jan 20 17:19:15 crc kubenswrapper[4558]: I0120 17:19:15.437536 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/7490595f-9782-44f0-bfb1-811f2f1d65e3-openstack-config-secret\") pod \"openstackclient\" (UID: \"7490595f-9782-44f0-bfb1-811f2f1d65e3\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:19:15 crc kubenswrapper[4558]: I0120 17:19:15.437617 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/7490595f-9782-44f0-bfb1-811f2f1d65e3-openstack-config\") pod \"openstackclient\" (UID: \"7490595f-9782-44f0-bfb1-811f2f1d65e3\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:19:15 crc kubenswrapper[4558]: I0120 17:19:15.437661 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7490595f-9782-44f0-bfb1-811f2f1d65e3-combined-ca-bundle\") pod \"openstackclient\" (UID: \"7490595f-9782-44f0-bfb1-811f2f1d65e3\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:19:15 crc kubenswrapper[4558]: I0120 17:19:15.437696 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rw8cj\" (UniqueName: \"kubernetes.io/projected/7490595f-9782-44f0-bfb1-811f2f1d65e3-kube-api-access-rw8cj\") pod \"openstackclient\" (UID: \"7490595f-9782-44f0-bfb1-811f2f1d65e3\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:19:15 crc kubenswrapper[4558]: I0120 17:19:15.540135 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/7490595f-9782-44f0-bfb1-811f2f1d65e3-openstack-config-secret\") pod \"openstackclient\" (UID: \"7490595f-9782-44f0-bfb1-811f2f1d65e3\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:19:15 crc kubenswrapper[4558]: I0120 17:19:15.540232 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/7490595f-9782-44f0-bfb1-811f2f1d65e3-openstack-config\") pod \"openstackclient\" (UID: \"7490595f-9782-44f0-bfb1-811f2f1d65e3\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:19:15 crc kubenswrapper[4558]: I0120 17:19:15.540282 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7490595f-9782-44f0-bfb1-811f2f1d65e3-combined-ca-bundle\") pod \"openstackclient\" (UID: \"7490595f-9782-44f0-bfb1-811f2f1d65e3\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:19:15 crc kubenswrapper[4558]: I0120 17:19:15.540317 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rw8cj\" (UniqueName: \"kubernetes.io/projected/7490595f-9782-44f0-bfb1-811f2f1d65e3-kube-api-access-rw8cj\") pod \"openstackclient\" (UID: \"7490595f-9782-44f0-bfb1-811f2f1d65e3\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:19:15 crc kubenswrapper[4558]: I0120 17:19:15.542259 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/7490595f-9782-44f0-bfb1-811f2f1d65e3-openstack-config\") pod \"openstackclient\" (UID: \"7490595f-9782-44f0-bfb1-811f2f1d65e3\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:19:15 crc kubenswrapper[4558]: I0120 17:19:15.547444 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7490595f-9782-44f0-bfb1-811f2f1d65e3-combined-ca-bundle\") pod \"openstackclient\" (UID: \"7490595f-9782-44f0-bfb1-811f2f1d65e3\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:19:15 crc kubenswrapper[4558]: I0120 17:19:15.549459 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/7490595f-9782-44f0-bfb1-811f2f1d65e3-openstack-config-secret\") pod \"openstackclient\" (UID: \"7490595f-9782-44f0-bfb1-811f2f1d65e3\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:19:15 crc kubenswrapper[4558]: I0120 17:19:15.557418 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rw8cj\" (UniqueName: \"kubernetes.io/projected/7490595f-9782-44f0-bfb1-811f2f1d65e3-kube-api-access-rw8cj\") pod \"openstackclient\" (UID: \"7490595f-9782-44f0-bfb1-811f2f1d65e3\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:19:15 crc kubenswrapper[4558]: I0120 17:19:15.567797 4558 scope.go:117] "RemoveContainer" containerID="0ffce44366a8b4466427b682fb41640425366a0f4449210959148de9aef695c6" Jan 20 17:19:15 crc kubenswrapper[4558]: E0120 17:19:15.568090 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 17:19:15 crc kubenswrapper[4558]: I0120 17:19:15.615494 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstackclient" Jan 20 17:19:15 crc kubenswrapper[4558]: I0120 17:19:15.618439 4558 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack-kuttl-tests/openstackclient" oldPodUID="bb3eea94-340a-4efe-92a6-6d1c4a2f1488" podUID="7490595f-9782-44f0-bfb1-811f2f1d65e3" Jan 20 17:19:15 crc kubenswrapper[4558]: I0120 17:19:15.635562 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstackclient" Jan 20 17:19:15 crc kubenswrapper[4558]: I0120 17:19:15.748798 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstackclient" Jan 20 17:19:16 crc kubenswrapper[4558]: I0120 17:19:16.169115 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/openstackclient"] Jan 20 17:19:16 crc kubenswrapper[4558]: I0120 17:19:16.577453 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bb3eea94-340a-4efe-92a6-6d1c4a2f1488" path="/var/lib/kubelet/pods/bb3eea94-340a-4efe-92a6-6d1c4a2f1488/volumes" Jan 20 17:19:16 crc kubenswrapper[4558]: I0120 17:19:16.623059 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstackclient" Jan 20 17:19:16 crc kubenswrapper[4558]: I0120 17:19:16.623887 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstackclient" event={"ID":"7490595f-9782-44f0-bfb1-811f2f1d65e3","Type":"ContainerStarted","Data":"077461e7717ab6966108b72a1527abc834132888244fc51cc68cc84642df8e4e"} Jan 20 17:19:16 crc kubenswrapper[4558]: I0120 17:19:16.623925 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstackclient" event={"ID":"7490595f-9782-44f0-bfb1-811f2f1d65e3","Type":"ContainerStarted","Data":"967efddfcdd95bb1617db47381253d13d55004bd9d1a16e461bc402763f198fb"} Jan 20 17:19:16 crc kubenswrapper[4558]: I0120 17:19:16.637999 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/openstackclient" podStartSLOduration=1.6379893920000002 podStartE2EDuration="1.637989392s" podCreationTimestamp="2026-01-20 17:19:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:19:16.634609636 +0000 UTC m=+2250.394947602" watchObservedRunningTime="2026-01-20 17:19:16.637989392 +0000 UTC m=+2250.398327359" Jan 20 17:19:16 crc kubenswrapper[4558]: I0120 17:19:16.638700 4558 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack-kuttl-tests/openstackclient" oldPodUID="bb3eea94-340a-4efe-92a6-6d1c4a2f1488" podUID="7490595f-9782-44f0-bfb1-811f2f1d65e3" Jan 20 17:19:18 crc kubenswrapper[4558]: I0120 17:19:18.533767 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/swift-proxy-85c8dfdd74-sjsz2"] Jan 20 17:19:18 crc kubenswrapper[4558]: I0120 17:19:18.537192 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-proxy-85c8dfdd74-sjsz2" Jan 20 17:19:18 crc kubenswrapper[4558]: I0120 17:19:18.552763 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-swift-public-svc" Jan 20 17:19:18 crc kubenswrapper[4558]: I0120 17:19:18.552803 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"swift-proxy-config-data" Jan 20 17:19:18 crc kubenswrapper[4558]: I0120 17:19:18.553845 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-swift-internal-svc" Jan 20 17:19:18 crc kubenswrapper[4558]: I0120 17:19:18.562092 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/swift-proxy-85c8dfdd74-sjsz2"] Jan 20 17:19:18 crc kubenswrapper[4558]: I0120 17:19:18.589796 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/neutron-5c7bf5978d-d2t27" Jan 20 17:19:18 crc kubenswrapper[4558]: I0120 17:19:18.615608 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9d974489-32f7-4541-9a58-6a215c5d2071-run-httpd\") pod \"swift-proxy-85c8dfdd74-sjsz2\" (UID: \"9d974489-32f7-4541-9a58-6a215c5d2071\") " pod="openstack-kuttl-tests/swift-proxy-85c8dfdd74-sjsz2" Jan 20 17:19:18 crc kubenswrapper[4558]: I0120 17:19:18.615685 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9d974489-32f7-4541-9a58-6a215c5d2071-config-data\") pod \"swift-proxy-85c8dfdd74-sjsz2\" (UID: \"9d974489-32f7-4541-9a58-6a215c5d2071\") " pod="openstack-kuttl-tests/swift-proxy-85c8dfdd74-sjsz2" Jan 20 17:19:18 crc kubenswrapper[4558]: I0120 17:19:18.615719 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/9d974489-32f7-4541-9a58-6a215c5d2071-etc-swift\") pod \"swift-proxy-85c8dfdd74-sjsz2\" (UID: \"9d974489-32f7-4541-9a58-6a215c5d2071\") " pod="openstack-kuttl-tests/swift-proxy-85c8dfdd74-sjsz2" Jan 20 17:19:18 crc kubenswrapper[4558]: I0120 17:19:18.615785 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9d974489-32f7-4541-9a58-6a215c5d2071-public-tls-certs\") pod \"swift-proxy-85c8dfdd74-sjsz2\" (UID: \"9d974489-32f7-4541-9a58-6a215c5d2071\") " pod="openstack-kuttl-tests/swift-proxy-85c8dfdd74-sjsz2" Jan 20 17:19:18 crc kubenswrapper[4558]: I0120 17:19:18.616643 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9d974489-32f7-4541-9a58-6a215c5d2071-combined-ca-bundle\") pod \"swift-proxy-85c8dfdd74-sjsz2\" (UID: \"9d974489-32f7-4541-9a58-6a215c5d2071\") " pod="openstack-kuttl-tests/swift-proxy-85c8dfdd74-sjsz2" Jan 20 17:19:18 crc kubenswrapper[4558]: I0120 17:19:18.616762 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gm7bd\" (UniqueName: \"kubernetes.io/projected/9d974489-32f7-4541-9a58-6a215c5d2071-kube-api-access-gm7bd\") pod \"swift-proxy-85c8dfdd74-sjsz2\" (UID: \"9d974489-32f7-4541-9a58-6a215c5d2071\") " pod="openstack-kuttl-tests/swift-proxy-85c8dfdd74-sjsz2" Jan 20 17:19:18 crc kubenswrapper[4558]: I0120 17:19:18.616828 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9d974489-32f7-4541-9a58-6a215c5d2071-log-httpd\") pod \"swift-proxy-85c8dfdd74-sjsz2\" (UID: \"9d974489-32f7-4541-9a58-6a215c5d2071\") " pod="openstack-kuttl-tests/swift-proxy-85c8dfdd74-sjsz2" Jan 20 17:19:18 crc kubenswrapper[4558]: I0120 17:19:18.616884 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9d974489-32f7-4541-9a58-6a215c5d2071-internal-tls-certs\") pod \"swift-proxy-85c8dfdd74-sjsz2\" (UID: \"9d974489-32f7-4541-9a58-6a215c5d2071\") " pod="openstack-kuttl-tests/swift-proxy-85c8dfdd74-sjsz2" Jan 20 17:19:18 crc kubenswrapper[4558]: I0120 17:19:18.643039 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-86cc899657-ftskx"] Jan 20 17:19:18 crc kubenswrapper[4558]: I0120 17:19:18.643300 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/neutron-86cc899657-ftskx" podUID="4b6b8c27-bb47-4423-92c4-70306ec7ef9e" containerName="neutron-api" containerID="cri-o://e1afb75acea050bdb63b13351b8a84c1d2fac666461bccd1cbaea618b932d880" gracePeriod=30 Jan 20 17:19:18 crc kubenswrapper[4558]: I0120 17:19:18.643449 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/neutron-86cc899657-ftskx" podUID="4b6b8c27-bb47-4423-92c4-70306ec7ef9e" containerName="neutron-httpd" containerID="cri-o://d58b041275b1b3dbc0399d88808986db01f1af4ee195d068ab83c28f4898a194" gracePeriod=30 Jan 20 17:19:18 crc kubenswrapper[4558]: I0120 17:19:18.718943 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9d974489-32f7-4541-9a58-6a215c5d2071-run-httpd\") pod \"swift-proxy-85c8dfdd74-sjsz2\" (UID: \"9d974489-32f7-4541-9a58-6a215c5d2071\") " pod="openstack-kuttl-tests/swift-proxy-85c8dfdd74-sjsz2" Jan 20 17:19:18 crc kubenswrapper[4558]: I0120 17:19:18.719021 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9d974489-32f7-4541-9a58-6a215c5d2071-config-data\") pod \"swift-proxy-85c8dfdd74-sjsz2\" (UID: \"9d974489-32f7-4541-9a58-6a215c5d2071\") " pod="openstack-kuttl-tests/swift-proxy-85c8dfdd74-sjsz2" Jan 20 17:19:18 crc kubenswrapper[4558]: I0120 17:19:18.719052 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/9d974489-32f7-4541-9a58-6a215c5d2071-etc-swift\") pod \"swift-proxy-85c8dfdd74-sjsz2\" (UID: \"9d974489-32f7-4541-9a58-6a215c5d2071\") " pod="openstack-kuttl-tests/swift-proxy-85c8dfdd74-sjsz2" Jan 20 17:19:18 crc kubenswrapper[4558]: I0120 17:19:18.719101 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9d974489-32f7-4541-9a58-6a215c5d2071-public-tls-certs\") pod \"swift-proxy-85c8dfdd74-sjsz2\" (UID: \"9d974489-32f7-4541-9a58-6a215c5d2071\") " pod="openstack-kuttl-tests/swift-proxy-85c8dfdd74-sjsz2" Jan 20 17:19:18 crc kubenswrapper[4558]: I0120 17:19:18.719130 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9d974489-32f7-4541-9a58-6a215c5d2071-combined-ca-bundle\") pod \"swift-proxy-85c8dfdd74-sjsz2\" (UID: \"9d974489-32f7-4541-9a58-6a215c5d2071\") " pod="openstack-kuttl-tests/swift-proxy-85c8dfdd74-sjsz2" Jan 20 17:19:18 crc kubenswrapper[4558]: I0120 17:19:18.719199 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gm7bd\" (UniqueName: \"kubernetes.io/projected/9d974489-32f7-4541-9a58-6a215c5d2071-kube-api-access-gm7bd\") pod \"swift-proxy-85c8dfdd74-sjsz2\" (UID: \"9d974489-32f7-4541-9a58-6a215c5d2071\") " pod="openstack-kuttl-tests/swift-proxy-85c8dfdd74-sjsz2" Jan 20 17:19:18 crc kubenswrapper[4558]: I0120 17:19:18.719239 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9d974489-32f7-4541-9a58-6a215c5d2071-log-httpd\") pod \"swift-proxy-85c8dfdd74-sjsz2\" (UID: \"9d974489-32f7-4541-9a58-6a215c5d2071\") " pod="openstack-kuttl-tests/swift-proxy-85c8dfdd74-sjsz2" Jan 20 17:19:18 crc kubenswrapper[4558]: I0120 17:19:18.719286 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9d974489-32f7-4541-9a58-6a215c5d2071-internal-tls-certs\") pod \"swift-proxy-85c8dfdd74-sjsz2\" (UID: \"9d974489-32f7-4541-9a58-6a215c5d2071\") " pod="openstack-kuttl-tests/swift-proxy-85c8dfdd74-sjsz2" Jan 20 17:19:18 crc kubenswrapper[4558]: I0120 17:19:18.720066 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9d974489-32f7-4541-9a58-6a215c5d2071-log-httpd\") pod \"swift-proxy-85c8dfdd74-sjsz2\" (UID: \"9d974489-32f7-4541-9a58-6a215c5d2071\") " pod="openstack-kuttl-tests/swift-proxy-85c8dfdd74-sjsz2" Jan 20 17:19:18 crc kubenswrapper[4558]: I0120 17:19:18.720428 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9d974489-32f7-4541-9a58-6a215c5d2071-run-httpd\") pod \"swift-proxy-85c8dfdd74-sjsz2\" (UID: \"9d974489-32f7-4541-9a58-6a215c5d2071\") " pod="openstack-kuttl-tests/swift-proxy-85c8dfdd74-sjsz2" Jan 20 17:19:18 crc kubenswrapper[4558]: I0120 17:19:18.725010 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9d974489-32f7-4541-9a58-6a215c5d2071-public-tls-certs\") pod \"swift-proxy-85c8dfdd74-sjsz2\" (UID: \"9d974489-32f7-4541-9a58-6a215c5d2071\") " pod="openstack-kuttl-tests/swift-proxy-85c8dfdd74-sjsz2" Jan 20 17:19:18 crc kubenswrapper[4558]: I0120 17:19:18.725493 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9d974489-32f7-4541-9a58-6a215c5d2071-internal-tls-certs\") pod \"swift-proxy-85c8dfdd74-sjsz2\" (UID: \"9d974489-32f7-4541-9a58-6a215c5d2071\") " pod="openstack-kuttl-tests/swift-proxy-85c8dfdd74-sjsz2" Jan 20 17:19:18 crc kubenswrapper[4558]: I0120 17:19:18.725777 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9d974489-32f7-4541-9a58-6a215c5d2071-combined-ca-bundle\") pod \"swift-proxy-85c8dfdd74-sjsz2\" (UID: \"9d974489-32f7-4541-9a58-6a215c5d2071\") " pod="openstack-kuttl-tests/swift-proxy-85c8dfdd74-sjsz2" Jan 20 17:19:18 crc kubenswrapper[4558]: I0120 17:19:18.727063 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9d974489-32f7-4541-9a58-6a215c5d2071-config-data\") pod \"swift-proxy-85c8dfdd74-sjsz2\" (UID: \"9d974489-32f7-4541-9a58-6a215c5d2071\") " pod="openstack-kuttl-tests/swift-proxy-85c8dfdd74-sjsz2" Jan 20 17:19:18 crc kubenswrapper[4558]: I0120 17:19:18.733558 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gm7bd\" (UniqueName: \"kubernetes.io/projected/9d974489-32f7-4541-9a58-6a215c5d2071-kube-api-access-gm7bd\") pod \"swift-proxy-85c8dfdd74-sjsz2\" (UID: \"9d974489-32f7-4541-9a58-6a215c5d2071\") " pod="openstack-kuttl-tests/swift-proxy-85c8dfdd74-sjsz2" Jan 20 17:19:18 crc kubenswrapper[4558]: I0120 17:19:18.734191 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/9d974489-32f7-4541-9a58-6a215c5d2071-etc-swift\") pod \"swift-proxy-85c8dfdd74-sjsz2\" (UID: \"9d974489-32f7-4541-9a58-6a215c5d2071\") " pod="openstack-kuttl-tests/swift-proxy-85c8dfdd74-sjsz2" Jan 20 17:19:18 crc kubenswrapper[4558]: I0120 17:19:18.852708 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-proxy-85c8dfdd74-sjsz2" Jan 20 17:19:19 crc kubenswrapper[4558]: W0120 17:19:19.306870 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9d974489_32f7_4541_9a58_6a215c5d2071.slice/crio-d8c405b29a445cc4fb8281d7777849ac6bf770a4845bc92a7f21671be550d05b WatchSource:0}: Error finding container d8c405b29a445cc4fb8281d7777849ac6bf770a4845bc92a7f21671be550d05b: Status 404 returned error can't find the container with id d8c405b29a445cc4fb8281d7777849ac6bf770a4845bc92a7f21671be550d05b Jan 20 17:19:19 crc kubenswrapper[4558]: I0120 17:19:19.308402 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/swift-proxy-85c8dfdd74-sjsz2"] Jan 20 17:19:19 crc kubenswrapper[4558]: I0120 17:19:19.663028 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-proxy-85c8dfdd74-sjsz2" event={"ID":"9d974489-32f7-4541-9a58-6a215c5d2071","Type":"ContainerStarted","Data":"cfbe93ac18d993e620cd8106e1b690070a764b00d594313c60a12f9f0b8d2527"} Jan 20 17:19:19 crc kubenswrapper[4558]: I0120 17:19:19.663081 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-proxy-85c8dfdd74-sjsz2" event={"ID":"9d974489-32f7-4541-9a58-6a215c5d2071","Type":"ContainerStarted","Data":"d8c405b29a445cc4fb8281d7777849ac6bf770a4845bc92a7f21671be550d05b"} Jan 20 17:19:19 crc kubenswrapper[4558]: I0120 17:19:19.665744 4558 generic.go:334] "Generic (PLEG): container finished" podID="4b6b8c27-bb47-4423-92c4-70306ec7ef9e" containerID="d58b041275b1b3dbc0399d88808986db01f1af4ee195d068ab83c28f4898a194" exitCode=0 Jan 20 17:19:19 crc kubenswrapper[4558]: I0120 17:19:19.665796 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-86cc899657-ftskx" event={"ID":"4b6b8c27-bb47-4423-92c4-70306ec7ef9e","Type":"ContainerDied","Data":"d58b041275b1b3dbc0399d88808986db01f1af4ee195d068ab83c28f4898a194"} Jan 20 17:19:19 crc kubenswrapper[4558]: I0120 17:19:19.709390 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:19:19 crc kubenswrapper[4558]: I0120 17:19:19.709775 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="b5e92730-f89e-4f1d-9a29-925c5acda961" containerName="ceilometer-central-agent" containerID="cri-o://9003fbf8af830e35a1d6485bbee760ae6d290f957bbb030215bddf31d0f8fc2c" gracePeriod=30 Jan 20 17:19:19 crc kubenswrapper[4558]: I0120 17:19:19.709852 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="b5e92730-f89e-4f1d-9a29-925c5acda961" containerName="sg-core" containerID="cri-o://b54c224c9ceb5230ae536b85978dfae73a97fbd2ecddcabd5d1b2615d78a8104" gracePeriod=30 Jan 20 17:19:19 crc kubenswrapper[4558]: I0120 17:19:19.709869 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="b5e92730-f89e-4f1d-9a29-925c5acda961" containerName="ceilometer-notification-agent" containerID="cri-o://d30f7131a9e2cd443f7a4870ac2230d1ab5b7a51a7a7439098ff9ccda4a3e25b" gracePeriod=30 Jan 20 17:19:19 crc kubenswrapper[4558]: I0120 17:19:19.709857 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="b5e92730-f89e-4f1d-9a29-925c5acda961" containerName="proxy-httpd" containerID="cri-o://05ca3f2636e110d0332674062bdecc1c997c9e6bac0b7a0e7421774ccd7af025" gracePeriod=30 Jan 20 17:19:20 crc kubenswrapper[4558]: I0120 17:19:20.161839 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-86cc899657-ftskx" Jan 20 17:19:20 crc kubenswrapper[4558]: I0120 17:19:20.356582 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/4b6b8c27-bb47-4423-92c4-70306ec7ef9e-httpd-config\") pod \"4b6b8c27-bb47-4423-92c4-70306ec7ef9e\" (UID: \"4b6b8c27-bb47-4423-92c4-70306ec7ef9e\") " Jan 20 17:19:20 crc kubenswrapper[4558]: I0120 17:19:20.357087 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/4b6b8c27-bb47-4423-92c4-70306ec7ef9e-ovndb-tls-certs\") pod \"4b6b8c27-bb47-4423-92c4-70306ec7ef9e\" (UID: \"4b6b8c27-bb47-4423-92c4-70306ec7ef9e\") " Jan 20 17:19:20 crc kubenswrapper[4558]: I0120 17:19:20.357430 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/4b6b8c27-bb47-4423-92c4-70306ec7ef9e-config\") pod \"4b6b8c27-bb47-4423-92c4-70306ec7ef9e\" (UID: \"4b6b8c27-bb47-4423-92c4-70306ec7ef9e\") " Jan 20 17:19:20 crc kubenswrapper[4558]: I0120 17:19:20.357614 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bvwng\" (UniqueName: \"kubernetes.io/projected/4b6b8c27-bb47-4423-92c4-70306ec7ef9e-kube-api-access-bvwng\") pod \"4b6b8c27-bb47-4423-92c4-70306ec7ef9e\" (UID: \"4b6b8c27-bb47-4423-92c4-70306ec7ef9e\") " Jan 20 17:19:20 crc kubenswrapper[4558]: I0120 17:19:20.357888 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4b6b8c27-bb47-4423-92c4-70306ec7ef9e-combined-ca-bundle\") pod \"4b6b8c27-bb47-4423-92c4-70306ec7ef9e\" (UID: \"4b6b8c27-bb47-4423-92c4-70306ec7ef9e\") " Jan 20 17:19:20 crc kubenswrapper[4558]: I0120 17:19:20.363791 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4b6b8c27-bb47-4423-92c4-70306ec7ef9e-kube-api-access-bvwng" (OuterVolumeSpecName: "kube-api-access-bvwng") pod "4b6b8c27-bb47-4423-92c4-70306ec7ef9e" (UID: "4b6b8c27-bb47-4423-92c4-70306ec7ef9e"). InnerVolumeSpecName "kube-api-access-bvwng". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:19:20 crc kubenswrapper[4558]: I0120 17:19:20.363989 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4b6b8c27-bb47-4423-92c4-70306ec7ef9e-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "4b6b8c27-bb47-4423-92c4-70306ec7ef9e" (UID: "4b6b8c27-bb47-4423-92c4-70306ec7ef9e"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:19:20 crc kubenswrapper[4558]: I0120 17:19:20.411604 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4b6b8c27-bb47-4423-92c4-70306ec7ef9e-config" (OuterVolumeSpecName: "config") pod "4b6b8c27-bb47-4423-92c4-70306ec7ef9e" (UID: "4b6b8c27-bb47-4423-92c4-70306ec7ef9e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:19:20 crc kubenswrapper[4558]: I0120 17:19:20.423273 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4b6b8c27-bb47-4423-92c4-70306ec7ef9e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4b6b8c27-bb47-4423-92c4-70306ec7ef9e" (UID: "4b6b8c27-bb47-4423-92c4-70306ec7ef9e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:19:20 crc kubenswrapper[4558]: I0120 17:19:20.429105 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4b6b8c27-bb47-4423-92c4-70306ec7ef9e-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "4b6b8c27-bb47-4423-92c4-70306ec7ef9e" (UID: "4b6b8c27-bb47-4423-92c4-70306ec7ef9e"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:19:20 crc kubenswrapper[4558]: I0120 17:19:20.462035 4558 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/4b6b8c27-bb47-4423-92c4-70306ec7ef9e-httpd-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:19:20 crc kubenswrapper[4558]: I0120 17:19:20.462067 4558 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/4b6b8c27-bb47-4423-92c4-70306ec7ef9e-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:19:20 crc kubenswrapper[4558]: I0120 17:19:20.462084 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/4b6b8c27-bb47-4423-92c4-70306ec7ef9e-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:19:20 crc kubenswrapper[4558]: I0120 17:19:20.462095 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bvwng\" (UniqueName: \"kubernetes.io/projected/4b6b8c27-bb47-4423-92c4-70306ec7ef9e-kube-api-access-bvwng\") on node \"crc\" DevicePath \"\"" Jan 20 17:19:20 crc kubenswrapper[4558]: I0120 17:19:20.462108 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4b6b8c27-bb47-4423-92c4-70306ec7ef9e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:19:20 crc kubenswrapper[4558]: I0120 17:19:20.679327 4558 generic.go:334] "Generic (PLEG): container finished" podID="4b6b8c27-bb47-4423-92c4-70306ec7ef9e" containerID="e1afb75acea050bdb63b13351b8a84c1d2fac666461bccd1cbaea618b932d880" exitCode=0 Jan 20 17:19:20 crc kubenswrapper[4558]: I0120 17:19:20.680285 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-86cc899657-ftskx" event={"ID":"4b6b8c27-bb47-4423-92c4-70306ec7ef9e","Type":"ContainerDied","Data":"e1afb75acea050bdb63b13351b8a84c1d2fac666461bccd1cbaea618b932d880"} Jan 20 17:19:20 crc kubenswrapper[4558]: I0120 17:19:20.680367 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-86cc899657-ftskx" event={"ID":"4b6b8c27-bb47-4423-92c4-70306ec7ef9e","Type":"ContainerDied","Data":"0c2098a04697cc264fc23d7f9d0254d28b8e9e3cd5ecaa284c8d7ad393d63df4"} Jan 20 17:19:20 crc kubenswrapper[4558]: I0120 17:19:20.680428 4558 scope.go:117] "RemoveContainer" containerID="d58b041275b1b3dbc0399d88808986db01f1af4ee195d068ab83c28f4898a194" Jan 20 17:19:20 crc kubenswrapper[4558]: I0120 17:19:20.680613 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-86cc899657-ftskx" Jan 20 17:19:20 crc kubenswrapper[4558]: I0120 17:19:20.684318 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-proxy-85c8dfdd74-sjsz2" event={"ID":"9d974489-32f7-4541-9a58-6a215c5d2071","Type":"ContainerStarted","Data":"47bcce107135d29c478003906fbf3f85f03c5a969a0a67dbccdda34ff7841132"} Jan 20 17:19:20 crc kubenswrapper[4558]: I0120 17:19:20.684503 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/swift-proxy-85c8dfdd74-sjsz2" Jan 20 17:19:20 crc kubenswrapper[4558]: I0120 17:19:20.684717 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/swift-proxy-85c8dfdd74-sjsz2" Jan 20 17:19:20 crc kubenswrapper[4558]: I0120 17:19:20.687224 4558 generic.go:334] "Generic (PLEG): container finished" podID="b5e92730-f89e-4f1d-9a29-925c5acda961" containerID="05ca3f2636e110d0332674062bdecc1c997c9e6bac0b7a0e7421774ccd7af025" exitCode=0 Jan 20 17:19:20 crc kubenswrapper[4558]: I0120 17:19:20.687272 4558 generic.go:334] "Generic (PLEG): container finished" podID="b5e92730-f89e-4f1d-9a29-925c5acda961" containerID="b54c224c9ceb5230ae536b85978dfae73a97fbd2ecddcabd5d1b2615d78a8104" exitCode=2 Jan 20 17:19:20 crc kubenswrapper[4558]: I0120 17:19:20.687283 4558 generic.go:334] "Generic (PLEG): container finished" podID="b5e92730-f89e-4f1d-9a29-925c5acda961" containerID="9003fbf8af830e35a1d6485bbee760ae6d290f957bbb030215bddf31d0f8fc2c" exitCode=0 Jan 20 17:19:20 crc kubenswrapper[4558]: I0120 17:19:20.687302 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"b5e92730-f89e-4f1d-9a29-925c5acda961","Type":"ContainerDied","Data":"05ca3f2636e110d0332674062bdecc1c997c9e6bac0b7a0e7421774ccd7af025"} Jan 20 17:19:20 crc kubenswrapper[4558]: I0120 17:19:20.687324 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"b5e92730-f89e-4f1d-9a29-925c5acda961","Type":"ContainerDied","Data":"b54c224c9ceb5230ae536b85978dfae73a97fbd2ecddcabd5d1b2615d78a8104"} Jan 20 17:19:20 crc kubenswrapper[4558]: I0120 17:19:20.687357 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"b5e92730-f89e-4f1d-9a29-925c5acda961","Type":"ContainerDied","Data":"9003fbf8af830e35a1d6485bbee760ae6d290f957bbb030215bddf31d0f8fc2c"} Jan 20 17:19:20 crc kubenswrapper[4558]: I0120 17:19:20.705503 4558 scope.go:117] "RemoveContainer" containerID="e1afb75acea050bdb63b13351b8a84c1d2fac666461bccd1cbaea618b932d880" Jan 20 17:19:20 crc kubenswrapper[4558]: I0120 17:19:20.710070 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/swift-proxy-85c8dfdd74-sjsz2" podStartSLOduration=2.71004771 podStartE2EDuration="2.71004771s" podCreationTimestamp="2026-01-20 17:19:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:19:20.703780985 +0000 UTC m=+2254.464118952" watchObservedRunningTime="2026-01-20 17:19:20.71004771 +0000 UTC m=+2254.470385677" Jan 20 17:19:20 crc kubenswrapper[4558]: I0120 17:19:20.736408 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-86cc899657-ftskx"] Jan 20 17:19:20 crc kubenswrapper[4558]: I0120 17:19:20.737930 4558 scope.go:117] "RemoveContainer" containerID="d58b041275b1b3dbc0399d88808986db01f1af4ee195d068ab83c28f4898a194" Jan 20 17:19:20 crc kubenswrapper[4558]: E0120 17:19:20.738608 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d58b041275b1b3dbc0399d88808986db01f1af4ee195d068ab83c28f4898a194\": container with ID starting with d58b041275b1b3dbc0399d88808986db01f1af4ee195d068ab83c28f4898a194 not found: ID does not exist" containerID="d58b041275b1b3dbc0399d88808986db01f1af4ee195d068ab83c28f4898a194" Jan 20 17:19:20 crc kubenswrapper[4558]: I0120 17:19:20.738649 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d58b041275b1b3dbc0399d88808986db01f1af4ee195d068ab83c28f4898a194"} err="failed to get container status \"d58b041275b1b3dbc0399d88808986db01f1af4ee195d068ab83c28f4898a194\": rpc error: code = NotFound desc = could not find container \"d58b041275b1b3dbc0399d88808986db01f1af4ee195d068ab83c28f4898a194\": container with ID starting with d58b041275b1b3dbc0399d88808986db01f1af4ee195d068ab83c28f4898a194 not found: ID does not exist" Jan 20 17:19:20 crc kubenswrapper[4558]: I0120 17:19:20.738677 4558 scope.go:117] "RemoveContainer" containerID="e1afb75acea050bdb63b13351b8a84c1d2fac666461bccd1cbaea618b932d880" Jan 20 17:19:20 crc kubenswrapper[4558]: E0120 17:19:20.738931 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e1afb75acea050bdb63b13351b8a84c1d2fac666461bccd1cbaea618b932d880\": container with ID starting with e1afb75acea050bdb63b13351b8a84c1d2fac666461bccd1cbaea618b932d880 not found: ID does not exist" containerID="e1afb75acea050bdb63b13351b8a84c1d2fac666461bccd1cbaea618b932d880" Jan 20 17:19:20 crc kubenswrapper[4558]: I0120 17:19:20.738949 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e1afb75acea050bdb63b13351b8a84c1d2fac666461bccd1cbaea618b932d880"} err="failed to get container status \"e1afb75acea050bdb63b13351b8a84c1d2fac666461bccd1cbaea618b932d880\": rpc error: code = NotFound desc = could not find container \"e1afb75acea050bdb63b13351b8a84c1d2fac666461bccd1cbaea618b932d880\": container with ID starting with e1afb75acea050bdb63b13351b8a84c1d2fac666461bccd1cbaea618b932d880 not found: ID does not exist" Jan 20 17:19:20 crc kubenswrapper[4558]: I0120 17:19:20.742650 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/neutron-86cc899657-ftskx"] Jan 20 17:19:22 crc kubenswrapper[4558]: I0120 17:19:22.575329 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4b6b8c27-bb47-4423-92c4-70306ec7ef9e" path="/var/lib/kubelet/pods/4b6b8c27-bb47-4423-92c4-70306ec7ef9e/volumes" Jan 20 17:19:22 crc kubenswrapper[4558]: I0120 17:19:22.576215 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-api-db-create-7nj9x"] Jan 20 17:19:22 crc kubenswrapper[4558]: E0120 17:19:22.576569 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4b6b8c27-bb47-4423-92c4-70306ec7ef9e" containerName="neutron-api" Jan 20 17:19:22 crc kubenswrapper[4558]: I0120 17:19:22.576587 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="4b6b8c27-bb47-4423-92c4-70306ec7ef9e" containerName="neutron-api" Jan 20 17:19:22 crc kubenswrapper[4558]: E0120 17:19:22.576615 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4b6b8c27-bb47-4423-92c4-70306ec7ef9e" containerName="neutron-httpd" Jan 20 17:19:22 crc kubenswrapper[4558]: I0120 17:19:22.576622 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="4b6b8c27-bb47-4423-92c4-70306ec7ef9e" containerName="neutron-httpd" Jan 20 17:19:22 crc kubenswrapper[4558]: I0120 17:19:22.576782 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="4b6b8c27-bb47-4423-92c4-70306ec7ef9e" containerName="neutron-api" Jan 20 17:19:22 crc kubenswrapper[4558]: I0120 17:19:22.576798 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="4b6b8c27-bb47-4423-92c4-70306ec7ef9e" containerName="neutron-httpd" Jan 20 17:19:22 crc kubenswrapper[4558]: I0120 17:19:22.577462 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-db-create-7nj9x" Jan 20 17:19:22 crc kubenswrapper[4558]: I0120 17:19:22.585183 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-db-create-7nj9x"] Jan 20 17:19:22 crc kubenswrapper[4558]: I0120 17:19:22.604809 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f2s5w\" (UniqueName: \"kubernetes.io/projected/9a1b94fe-db72-418e-bc3b-5dd27adff225-kube-api-access-f2s5w\") pod \"nova-api-db-create-7nj9x\" (UID: \"9a1b94fe-db72-418e-bc3b-5dd27adff225\") " pod="openstack-kuttl-tests/nova-api-db-create-7nj9x" Jan 20 17:19:22 crc kubenswrapper[4558]: I0120 17:19:22.604931 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9a1b94fe-db72-418e-bc3b-5dd27adff225-operator-scripts\") pod \"nova-api-db-create-7nj9x\" (UID: \"9a1b94fe-db72-418e-bc3b-5dd27adff225\") " pod="openstack-kuttl-tests/nova-api-db-create-7nj9x" Jan 20 17:19:22 crc kubenswrapper[4558]: I0120 17:19:22.674531 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell0-db-create-hpfj8"] Jan 20 17:19:22 crc kubenswrapper[4558]: I0120 17:19:22.676060 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-db-create-hpfj8" Jan 20 17:19:22 crc kubenswrapper[4558]: I0120 17:19:22.684144 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-db-create-hpfj8"] Jan 20 17:19:22 crc kubenswrapper[4558]: I0120 17:19:22.707846 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9a1b94fe-db72-418e-bc3b-5dd27adff225-operator-scripts\") pod \"nova-api-db-create-7nj9x\" (UID: \"9a1b94fe-db72-418e-bc3b-5dd27adff225\") " pod="openstack-kuttl-tests/nova-api-db-create-7nj9x" Jan 20 17:19:22 crc kubenswrapper[4558]: I0120 17:19:22.707893 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bcwsf\" (UniqueName: \"kubernetes.io/projected/3a99ccdf-41ef-42bc-8d10-4c2cf74d3ac9-kube-api-access-bcwsf\") pod \"nova-cell0-db-create-hpfj8\" (UID: \"3a99ccdf-41ef-42bc-8d10-4c2cf74d3ac9\") " pod="openstack-kuttl-tests/nova-cell0-db-create-hpfj8" Jan 20 17:19:22 crc kubenswrapper[4558]: I0120 17:19:22.708001 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3a99ccdf-41ef-42bc-8d10-4c2cf74d3ac9-operator-scripts\") pod \"nova-cell0-db-create-hpfj8\" (UID: \"3a99ccdf-41ef-42bc-8d10-4c2cf74d3ac9\") " pod="openstack-kuttl-tests/nova-cell0-db-create-hpfj8" Jan 20 17:19:22 crc kubenswrapper[4558]: I0120 17:19:22.708039 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f2s5w\" (UniqueName: \"kubernetes.io/projected/9a1b94fe-db72-418e-bc3b-5dd27adff225-kube-api-access-f2s5w\") pod \"nova-api-db-create-7nj9x\" (UID: \"9a1b94fe-db72-418e-bc3b-5dd27adff225\") " pod="openstack-kuttl-tests/nova-api-db-create-7nj9x" Jan 20 17:19:22 crc kubenswrapper[4558]: I0120 17:19:22.708767 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9a1b94fe-db72-418e-bc3b-5dd27adff225-operator-scripts\") pod \"nova-api-db-create-7nj9x\" (UID: \"9a1b94fe-db72-418e-bc3b-5dd27adff225\") " pod="openstack-kuttl-tests/nova-api-db-create-7nj9x" Jan 20 17:19:22 crc kubenswrapper[4558]: I0120 17:19:22.727940 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f2s5w\" (UniqueName: \"kubernetes.io/projected/9a1b94fe-db72-418e-bc3b-5dd27adff225-kube-api-access-f2s5w\") pod \"nova-api-db-create-7nj9x\" (UID: \"9a1b94fe-db72-418e-bc3b-5dd27adff225\") " pod="openstack-kuttl-tests/nova-api-db-create-7nj9x" Jan 20 17:19:22 crc kubenswrapper[4558]: I0120 17:19:22.778211 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-api-4e5b-account-create-update-jp7rq"] Jan 20 17:19:22 crc kubenswrapper[4558]: I0120 17:19:22.779767 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-4e5b-account-create-update-jp7rq" Jan 20 17:19:22 crc kubenswrapper[4558]: I0120 17:19:22.784914 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-api-db-secret" Jan 20 17:19:22 crc kubenswrapper[4558]: I0120 17:19:22.789147 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-4e5b-account-create-update-jp7rq"] Jan 20 17:19:22 crc kubenswrapper[4558]: I0120 17:19:22.810431 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bcwsf\" (UniqueName: \"kubernetes.io/projected/3a99ccdf-41ef-42bc-8d10-4c2cf74d3ac9-kube-api-access-bcwsf\") pod \"nova-cell0-db-create-hpfj8\" (UID: \"3a99ccdf-41ef-42bc-8d10-4c2cf74d3ac9\") " pod="openstack-kuttl-tests/nova-cell0-db-create-hpfj8" Jan 20 17:19:22 crc kubenswrapper[4558]: I0120 17:19:22.810512 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/400d0672-2068-470f-983f-df899be25bd5-operator-scripts\") pod \"nova-api-4e5b-account-create-update-jp7rq\" (UID: \"400d0672-2068-470f-983f-df899be25bd5\") " pod="openstack-kuttl-tests/nova-api-4e5b-account-create-update-jp7rq" Jan 20 17:19:22 crc kubenswrapper[4558]: I0120 17:19:22.810552 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h8kg2\" (UniqueName: \"kubernetes.io/projected/400d0672-2068-470f-983f-df899be25bd5-kube-api-access-h8kg2\") pod \"nova-api-4e5b-account-create-update-jp7rq\" (UID: \"400d0672-2068-470f-983f-df899be25bd5\") " pod="openstack-kuttl-tests/nova-api-4e5b-account-create-update-jp7rq" Jan 20 17:19:22 crc kubenswrapper[4558]: I0120 17:19:22.810610 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3a99ccdf-41ef-42bc-8d10-4c2cf74d3ac9-operator-scripts\") pod \"nova-cell0-db-create-hpfj8\" (UID: \"3a99ccdf-41ef-42bc-8d10-4c2cf74d3ac9\") " pod="openstack-kuttl-tests/nova-cell0-db-create-hpfj8" Jan 20 17:19:22 crc kubenswrapper[4558]: I0120 17:19:22.811247 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3a99ccdf-41ef-42bc-8d10-4c2cf74d3ac9-operator-scripts\") pod \"nova-cell0-db-create-hpfj8\" (UID: \"3a99ccdf-41ef-42bc-8d10-4c2cf74d3ac9\") " pod="openstack-kuttl-tests/nova-cell0-db-create-hpfj8" Jan 20 17:19:22 crc kubenswrapper[4558]: I0120 17:19:22.826137 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bcwsf\" (UniqueName: \"kubernetes.io/projected/3a99ccdf-41ef-42bc-8d10-4c2cf74d3ac9-kube-api-access-bcwsf\") pod \"nova-cell0-db-create-hpfj8\" (UID: \"3a99ccdf-41ef-42bc-8d10-4c2cf74d3ac9\") " pod="openstack-kuttl-tests/nova-cell0-db-create-hpfj8" Jan 20 17:19:22 crc kubenswrapper[4558]: I0120 17:19:22.886193 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell1-db-create-vvdf2"] Jan 20 17:19:22 crc kubenswrapper[4558]: I0120 17:19:22.887535 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-db-create-vvdf2" Jan 20 17:19:22 crc kubenswrapper[4558]: I0120 17:19:22.900537 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-db-create-vvdf2"] Jan 20 17:19:22 crc kubenswrapper[4558]: I0120 17:19:22.907548 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-db-create-7nj9x" Jan 20 17:19:22 crc kubenswrapper[4558]: I0120 17:19:22.912811 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h8kg2\" (UniqueName: \"kubernetes.io/projected/400d0672-2068-470f-983f-df899be25bd5-kube-api-access-h8kg2\") pod \"nova-api-4e5b-account-create-update-jp7rq\" (UID: \"400d0672-2068-470f-983f-df899be25bd5\") " pod="openstack-kuttl-tests/nova-api-4e5b-account-create-update-jp7rq" Jan 20 17:19:22 crc kubenswrapper[4558]: I0120 17:19:22.912892 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c9ccad35-4742-4e3e-b281-d76be66e44fd-operator-scripts\") pod \"nova-cell1-db-create-vvdf2\" (UID: \"c9ccad35-4742-4e3e-b281-d76be66e44fd\") " pod="openstack-kuttl-tests/nova-cell1-db-create-vvdf2" Jan 20 17:19:22 crc kubenswrapper[4558]: I0120 17:19:22.913212 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jcbn8\" (UniqueName: \"kubernetes.io/projected/c9ccad35-4742-4e3e-b281-d76be66e44fd-kube-api-access-jcbn8\") pod \"nova-cell1-db-create-vvdf2\" (UID: \"c9ccad35-4742-4e3e-b281-d76be66e44fd\") " pod="openstack-kuttl-tests/nova-cell1-db-create-vvdf2" Jan 20 17:19:22 crc kubenswrapper[4558]: I0120 17:19:22.913242 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/400d0672-2068-470f-983f-df899be25bd5-operator-scripts\") pod \"nova-api-4e5b-account-create-update-jp7rq\" (UID: \"400d0672-2068-470f-983f-df899be25bd5\") " pod="openstack-kuttl-tests/nova-api-4e5b-account-create-update-jp7rq" Jan 20 17:19:22 crc kubenswrapper[4558]: I0120 17:19:22.913955 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/400d0672-2068-470f-983f-df899be25bd5-operator-scripts\") pod \"nova-api-4e5b-account-create-update-jp7rq\" (UID: \"400d0672-2068-470f-983f-df899be25bd5\") " pod="openstack-kuttl-tests/nova-api-4e5b-account-create-update-jp7rq" Jan 20 17:19:22 crc kubenswrapper[4558]: I0120 17:19:22.927769 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h8kg2\" (UniqueName: \"kubernetes.io/projected/400d0672-2068-470f-983f-df899be25bd5-kube-api-access-h8kg2\") pod \"nova-api-4e5b-account-create-update-jp7rq\" (UID: \"400d0672-2068-470f-983f-df899be25bd5\") " pod="openstack-kuttl-tests/nova-api-4e5b-account-create-update-jp7rq" Jan 20 17:19:22 crc kubenswrapper[4558]: I0120 17:19:22.993112 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell0-b69b-account-create-update-ct4qn"] Jan 20 17:19:22 crc kubenswrapper[4558]: I0120 17:19:22.993677 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-db-create-hpfj8" Jan 20 17:19:22 crc kubenswrapper[4558]: I0120 17:19:22.994453 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-b69b-account-create-update-ct4qn" Jan 20 17:19:22 crc kubenswrapper[4558]: I0120 17:19:22.996802 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell0-db-secret" Jan 20 17:19:23 crc kubenswrapper[4558]: I0120 17:19:23.018267 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c9ccad35-4742-4e3e-b281-d76be66e44fd-operator-scripts\") pod \"nova-cell1-db-create-vvdf2\" (UID: \"c9ccad35-4742-4e3e-b281-d76be66e44fd\") " pod="openstack-kuttl-tests/nova-cell1-db-create-vvdf2" Jan 20 17:19:23 crc kubenswrapper[4558]: I0120 17:19:23.018351 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t5694\" (UniqueName: \"kubernetes.io/projected/9812bc23-9bb1-4b3a-975b-0bffa6e3184f-kube-api-access-t5694\") pod \"nova-cell0-b69b-account-create-update-ct4qn\" (UID: \"9812bc23-9bb1-4b3a-975b-0bffa6e3184f\") " pod="openstack-kuttl-tests/nova-cell0-b69b-account-create-update-ct4qn" Jan 20 17:19:23 crc kubenswrapper[4558]: I0120 17:19:23.018458 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9812bc23-9bb1-4b3a-975b-0bffa6e3184f-operator-scripts\") pod \"nova-cell0-b69b-account-create-update-ct4qn\" (UID: \"9812bc23-9bb1-4b3a-975b-0bffa6e3184f\") " pod="openstack-kuttl-tests/nova-cell0-b69b-account-create-update-ct4qn" Jan 20 17:19:23 crc kubenswrapper[4558]: I0120 17:19:23.018508 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jcbn8\" (UniqueName: \"kubernetes.io/projected/c9ccad35-4742-4e3e-b281-d76be66e44fd-kube-api-access-jcbn8\") pod \"nova-cell1-db-create-vvdf2\" (UID: \"c9ccad35-4742-4e3e-b281-d76be66e44fd\") " pod="openstack-kuttl-tests/nova-cell1-db-create-vvdf2" Jan 20 17:19:23 crc kubenswrapper[4558]: I0120 17:19:23.019540 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c9ccad35-4742-4e3e-b281-d76be66e44fd-operator-scripts\") pod \"nova-cell1-db-create-vvdf2\" (UID: \"c9ccad35-4742-4e3e-b281-d76be66e44fd\") " pod="openstack-kuttl-tests/nova-cell1-db-create-vvdf2" Jan 20 17:19:23 crc kubenswrapper[4558]: I0120 17:19:23.026248 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-b69b-account-create-update-ct4qn"] Jan 20 17:19:23 crc kubenswrapper[4558]: I0120 17:19:23.037750 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jcbn8\" (UniqueName: \"kubernetes.io/projected/c9ccad35-4742-4e3e-b281-d76be66e44fd-kube-api-access-jcbn8\") pod \"nova-cell1-db-create-vvdf2\" (UID: \"c9ccad35-4742-4e3e-b281-d76be66e44fd\") " pod="openstack-kuttl-tests/nova-cell1-db-create-vvdf2" Jan 20 17:19:23 crc kubenswrapper[4558]: I0120 17:19:23.101393 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-db-create-vvdf2" Jan 20 17:19:23 crc kubenswrapper[4558]: I0120 17:19:23.105427 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-4e5b-account-create-update-jp7rq" Jan 20 17:19:23 crc kubenswrapper[4558]: I0120 17:19:23.119738 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:19:23 crc kubenswrapper[4558]: I0120 17:19:23.120603 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t5694\" (UniqueName: \"kubernetes.io/projected/9812bc23-9bb1-4b3a-975b-0bffa6e3184f-kube-api-access-t5694\") pod \"nova-cell0-b69b-account-create-update-ct4qn\" (UID: \"9812bc23-9bb1-4b3a-975b-0bffa6e3184f\") " pod="openstack-kuttl-tests/nova-cell0-b69b-account-create-update-ct4qn" Jan 20 17:19:23 crc kubenswrapper[4558]: I0120 17:19:23.120879 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9812bc23-9bb1-4b3a-975b-0bffa6e3184f-operator-scripts\") pod \"nova-cell0-b69b-account-create-update-ct4qn\" (UID: \"9812bc23-9bb1-4b3a-975b-0bffa6e3184f\") " pod="openstack-kuttl-tests/nova-cell0-b69b-account-create-update-ct4qn" Jan 20 17:19:23 crc kubenswrapper[4558]: I0120 17:19:23.121399 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9812bc23-9bb1-4b3a-975b-0bffa6e3184f-operator-scripts\") pod \"nova-cell0-b69b-account-create-update-ct4qn\" (UID: \"9812bc23-9bb1-4b3a-975b-0bffa6e3184f\") " pod="openstack-kuttl-tests/nova-cell0-b69b-account-create-update-ct4qn" Jan 20 17:19:23 crc kubenswrapper[4558]: I0120 17:19:23.149967 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t5694\" (UniqueName: \"kubernetes.io/projected/9812bc23-9bb1-4b3a-975b-0bffa6e3184f-kube-api-access-t5694\") pod \"nova-cell0-b69b-account-create-update-ct4qn\" (UID: \"9812bc23-9bb1-4b3a-975b-0bffa6e3184f\") " pod="openstack-kuttl-tests/nova-cell0-b69b-account-create-update-ct4qn" Jan 20 17:19:23 crc kubenswrapper[4558]: I0120 17:19:23.187521 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell1-12cd-account-create-update-8bwmv"] Jan 20 17:19:23 crc kubenswrapper[4558]: E0120 17:19:23.189873 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b5e92730-f89e-4f1d-9a29-925c5acda961" containerName="sg-core" Jan 20 17:19:23 crc kubenswrapper[4558]: I0120 17:19:23.189901 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="b5e92730-f89e-4f1d-9a29-925c5acda961" containerName="sg-core" Jan 20 17:19:23 crc kubenswrapper[4558]: E0120 17:19:23.189989 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b5e92730-f89e-4f1d-9a29-925c5acda961" containerName="proxy-httpd" Jan 20 17:19:23 crc kubenswrapper[4558]: I0120 17:19:23.190000 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="b5e92730-f89e-4f1d-9a29-925c5acda961" containerName="proxy-httpd" Jan 20 17:19:23 crc kubenswrapper[4558]: E0120 17:19:23.190011 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b5e92730-f89e-4f1d-9a29-925c5acda961" containerName="ceilometer-central-agent" Jan 20 17:19:23 crc kubenswrapper[4558]: I0120 17:19:23.190019 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="b5e92730-f89e-4f1d-9a29-925c5acda961" containerName="ceilometer-central-agent" Jan 20 17:19:23 crc kubenswrapper[4558]: E0120 17:19:23.190063 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b5e92730-f89e-4f1d-9a29-925c5acda961" containerName="ceilometer-notification-agent" Jan 20 17:19:23 crc kubenswrapper[4558]: I0120 17:19:23.190072 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="b5e92730-f89e-4f1d-9a29-925c5acda961" containerName="ceilometer-notification-agent" Jan 20 17:19:23 crc kubenswrapper[4558]: I0120 17:19:23.190784 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="b5e92730-f89e-4f1d-9a29-925c5acda961" containerName="proxy-httpd" Jan 20 17:19:23 crc kubenswrapper[4558]: I0120 17:19:23.190804 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="b5e92730-f89e-4f1d-9a29-925c5acda961" containerName="ceilometer-central-agent" Jan 20 17:19:23 crc kubenswrapper[4558]: I0120 17:19:23.190833 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="b5e92730-f89e-4f1d-9a29-925c5acda961" containerName="sg-core" Jan 20 17:19:23 crc kubenswrapper[4558]: I0120 17:19:23.190851 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="b5e92730-f89e-4f1d-9a29-925c5acda961" containerName="ceilometer-notification-agent" Jan 20 17:19:23 crc kubenswrapper[4558]: I0120 17:19:23.191982 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-12cd-account-create-update-8bwmv" Jan 20 17:19:23 crc kubenswrapper[4558]: I0120 17:19:23.194431 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell1-db-secret" Jan 20 17:19:23 crc kubenswrapper[4558]: I0120 17:19:23.213330 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-12cd-account-create-update-8bwmv"] Jan 20 17:19:23 crc kubenswrapper[4558]: I0120 17:19:23.222194 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b5e92730-f89e-4f1d-9a29-925c5acda961-combined-ca-bundle\") pod \"b5e92730-f89e-4f1d-9a29-925c5acda961\" (UID: \"b5e92730-f89e-4f1d-9a29-925c5acda961\") " Jan 20 17:19:23 crc kubenswrapper[4558]: I0120 17:19:23.222284 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b5e92730-f89e-4f1d-9a29-925c5acda961-scripts\") pod \"b5e92730-f89e-4f1d-9a29-925c5acda961\" (UID: \"b5e92730-f89e-4f1d-9a29-925c5acda961\") " Jan 20 17:19:23 crc kubenswrapper[4558]: I0120 17:19:23.222534 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b5e92730-f89e-4f1d-9a29-925c5acda961-sg-core-conf-yaml\") pod \"b5e92730-f89e-4f1d-9a29-925c5acda961\" (UID: \"b5e92730-f89e-4f1d-9a29-925c5acda961\") " Jan 20 17:19:23 crc kubenswrapper[4558]: I0120 17:19:23.222626 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b5e92730-f89e-4f1d-9a29-925c5acda961-config-data\") pod \"b5e92730-f89e-4f1d-9a29-925c5acda961\" (UID: \"b5e92730-f89e-4f1d-9a29-925c5acda961\") " Jan 20 17:19:23 crc kubenswrapper[4558]: I0120 17:19:23.222817 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qczsr\" (UniqueName: \"kubernetes.io/projected/b5e92730-f89e-4f1d-9a29-925c5acda961-kube-api-access-qczsr\") pod \"b5e92730-f89e-4f1d-9a29-925c5acda961\" (UID: \"b5e92730-f89e-4f1d-9a29-925c5acda961\") " Jan 20 17:19:23 crc kubenswrapper[4558]: I0120 17:19:23.222878 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b5e92730-f89e-4f1d-9a29-925c5acda961-run-httpd\") pod \"b5e92730-f89e-4f1d-9a29-925c5acda961\" (UID: \"b5e92730-f89e-4f1d-9a29-925c5acda961\") " Jan 20 17:19:23 crc kubenswrapper[4558]: I0120 17:19:23.223009 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b5e92730-f89e-4f1d-9a29-925c5acda961-log-httpd\") pod \"b5e92730-f89e-4f1d-9a29-925c5acda961\" (UID: \"b5e92730-f89e-4f1d-9a29-925c5acda961\") " Jan 20 17:19:23 crc kubenswrapper[4558]: I0120 17:19:23.223510 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xmc7q\" (UniqueName: \"kubernetes.io/projected/8e497b8c-aac4-4095-a801-65e24590b325-kube-api-access-xmc7q\") pod \"nova-cell1-12cd-account-create-update-8bwmv\" (UID: \"8e497b8c-aac4-4095-a801-65e24590b325\") " pod="openstack-kuttl-tests/nova-cell1-12cd-account-create-update-8bwmv" Jan 20 17:19:23 crc kubenswrapper[4558]: I0120 17:19:23.223564 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8e497b8c-aac4-4095-a801-65e24590b325-operator-scripts\") pod \"nova-cell1-12cd-account-create-update-8bwmv\" (UID: \"8e497b8c-aac4-4095-a801-65e24590b325\") " pod="openstack-kuttl-tests/nova-cell1-12cd-account-create-update-8bwmv" Jan 20 17:19:23 crc kubenswrapper[4558]: I0120 17:19:23.224086 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b5e92730-f89e-4f1d-9a29-925c5acda961-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "b5e92730-f89e-4f1d-9a29-925c5acda961" (UID: "b5e92730-f89e-4f1d-9a29-925c5acda961"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:19:23 crc kubenswrapper[4558]: I0120 17:19:23.225722 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b5e92730-f89e-4f1d-9a29-925c5acda961-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "b5e92730-f89e-4f1d-9a29-925c5acda961" (UID: "b5e92730-f89e-4f1d-9a29-925c5acda961"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:19:23 crc kubenswrapper[4558]: I0120 17:19:23.232346 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b5e92730-f89e-4f1d-9a29-925c5acda961-kube-api-access-qczsr" (OuterVolumeSpecName: "kube-api-access-qczsr") pod "b5e92730-f89e-4f1d-9a29-925c5acda961" (UID: "b5e92730-f89e-4f1d-9a29-925c5acda961"). InnerVolumeSpecName "kube-api-access-qczsr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:19:23 crc kubenswrapper[4558]: I0120 17:19:23.238296 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b5e92730-f89e-4f1d-9a29-925c5acda961-scripts" (OuterVolumeSpecName: "scripts") pod "b5e92730-f89e-4f1d-9a29-925c5acda961" (UID: "b5e92730-f89e-4f1d-9a29-925c5acda961"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:19:23 crc kubenswrapper[4558]: I0120 17:19:23.271322 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b5e92730-f89e-4f1d-9a29-925c5acda961-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "b5e92730-f89e-4f1d-9a29-925c5acda961" (UID: "b5e92730-f89e-4f1d-9a29-925c5acda961"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:19:23 crc kubenswrapper[4558]: I0120 17:19:23.331728 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xmc7q\" (UniqueName: \"kubernetes.io/projected/8e497b8c-aac4-4095-a801-65e24590b325-kube-api-access-xmc7q\") pod \"nova-cell1-12cd-account-create-update-8bwmv\" (UID: \"8e497b8c-aac4-4095-a801-65e24590b325\") " pod="openstack-kuttl-tests/nova-cell1-12cd-account-create-update-8bwmv" Jan 20 17:19:23 crc kubenswrapper[4558]: I0120 17:19:23.331886 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8e497b8c-aac4-4095-a801-65e24590b325-operator-scripts\") pod \"nova-cell1-12cd-account-create-update-8bwmv\" (UID: \"8e497b8c-aac4-4095-a801-65e24590b325\") " pod="openstack-kuttl-tests/nova-cell1-12cd-account-create-update-8bwmv" Jan 20 17:19:23 crc kubenswrapper[4558]: I0120 17:19:23.332156 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b5e92730-f89e-4f1d-9a29-925c5acda961-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:19:23 crc kubenswrapper[4558]: I0120 17:19:23.332197 4558 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b5e92730-f89e-4f1d-9a29-925c5acda961-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 20 17:19:23 crc kubenswrapper[4558]: I0120 17:19:23.332209 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qczsr\" (UniqueName: \"kubernetes.io/projected/b5e92730-f89e-4f1d-9a29-925c5acda961-kube-api-access-qczsr\") on node \"crc\" DevicePath \"\"" Jan 20 17:19:23 crc kubenswrapper[4558]: I0120 17:19:23.332219 4558 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b5e92730-f89e-4f1d-9a29-925c5acda961-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:19:23 crc kubenswrapper[4558]: I0120 17:19:23.332228 4558 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b5e92730-f89e-4f1d-9a29-925c5acda961-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:19:23 crc kubenswrapper[4558]: I0120 17:19:23.333368 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8e497b8c-aac4-4095-a801-65e24590b325-operator-scripts\") pod \"nova-cell1-12cd-account-create-update-8bwmv\" (UID: \"8e497b8c-aac4-4095-a801-65e24590b325\") " pod="openstack-kuttl-tests/nova-cell1-12cd-account-create-update-8bwmv" Jan 20 17:19:23 crc kubenswrapper[4558]: I0120 17:19:23.343640 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b5e92730-f89e-4f1d-9a29-925c5acda961-config-data" (OuterVolumeSpecName: "config-data") pod "b5e92730-f89e-4f1d-9a29-925c5acda961" (UID: "b5e92730-f89e-4f1d-9a29-925c5acda961"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:19:23 crc kubenswrapper[4558]: I0120 17:19:23.347515 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xmc7q\" (UniqueName: \"kubernetes.io/projected/8e497b8c-aac4-4095-a801-65e24590b325-kube-api-access-xmc7q\") pod \"nova-cell1-12cd-account-create-update-8bwmv\" (UID: \"8e497b8c-aac4-4095-a801-65e24590b325\") " pod="openstack-kuttl-tests/nova-cell1-12cd-account-create-update-8bwmv" Jan 20 17:19:23 crc kubenswrapper[4558]: I0120 17:19:23.355796 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b5e92730-f89e-4f1d-9a29-925c5acda961-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b5e92730-f89e-4f1d-9a29-925c5acda961" (UID: "b5e92730-f89e-4f1d-9a29-925c5acda961"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:19:23 crc kubenswrapper[4558]: W0120 17:19:23.388608 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9a1b94fe_db72_418e_bc3b_5dd27adff225.slice/crio-2cc8a32abdd5268f2784b4089fbbfe4cb6505339b76ba83fd88e7bc20bf2c11e WatchSource:0}: Error finding container 2cc8a32abdd5268f2784b4089fbbfe4cb6505339b76ba83fd88e7bc20bf2c11e: Status 404 returned error can't find the container with id 2cc8a32abdd5268f2784b4089fbbfe4cb6505339b76ba83fd88e7bc20bf2c11e Jan 20 17:19:23 crc kubenswrapper[4558]: I0120 17:19:23.389362 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-db-create-7nj9x"] Jan 20 17:19:23 crc kubenswrapper[4558]: I0120 17:19:23.415117 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-b69b-account-create-update-ct4qn" Jan 20 17:19:23 crc kubenswrapper[4558]: I0120 17:19:23.433516 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b5e92730-f89e-4f1d-9a29-925c5acda961-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:19:23 crc kubenswrapper[4558]: I0120 17:19:23.433547 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b5e92730-f89e-4f1d-9a29-925c5acda961-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:19:23 crc kubenswrapper[4558]: I0120 17:19:23.508053 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-db-create-hpfj8"] Jan 20 17:19:23 crc kubenswrapper[4558]: I0120 17:19:23.508605 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-12cd-account-create-update-8bwmv" Jan 20 17:19:23 crc kubenswrapper[4558]: I0120 17:19:23.621969 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-db-create-vvdf2"] Jan 20 17:19:23 crc kubenswrapper[4558]: I0120 17:19:23.675433 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-4e5b-account-create-update-jp7rq"] Jan 20 17:19:23 crc kubenswrapper[4558]: I0120 17:19:23.725464 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-db-create-7nj9x" event={"ID":"9a1b94fe-db72-418e-bc3b-5dd27adff225","Type":"ContainerStarted","Data":"cdc1d8ff5500de6fc7f67919f8800d1bb3a91e560eaa7ae6eb032ebb3cf4ef8f"} Jan 20 17:19:23 crc kubenswrapper[4558]: I0120 17:19:23.725506 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-db-create-7nj9x" event={"ID":"9a1b94fe-db72-418e-bc3b-5dd27adff225","Type":"ContainerStarted","Data":"2cc8a32abdd5268f2784b4089fbbfe4cb6505339b76ba83fd88e7bc20bf2c11e"} Jan 20 17:19:23 crc kubenswrapper[4558]: I0120 17:19:23.729686 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-db-create-hpfj8" event={"ID":"3a99ccdf-41ef-42bc-8d10-4c2cf74d3ac9","Type":"ContainerStarted","Data":"138776071c1cd283c04ad3bac5afc2413a017d74f6650ba993ce0dddc48944ab"} Jan 20 17:19:23 crc kubenswrapper[4558]: I0120 17:19:23.729729 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-db-create-hpfj8" event={"ID":"3a99ccdf-41ef-42bc-8d10-4c2cf74d3ac9","Type":"ContainerStarted","Data":"d3893fafa0154321bbfbc6d69a94994034c7cfc16aecd63ed0c840654bc8c88c"} Jan 20 17:19:23 crc kubenswrapper[4558]: I0120 17:19:23.733514 4558 generic.go:334] "Generic (PLEG): container finished" podID="b5e92730-f89e-4f1d-9a29-925c5acda961" containerID="d30f7131a9e2cd443f7a4870ac2230d1ab5b7a51a7a7439098ff9ccda4a3e25b" exitCode=0 Jan 20 17:19:23 crc kubenswrapper[4558]: I0120 17:19:23.733594 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"b5e92730-f89e-4f1d-9a29-925c5acda961","Type":"ContainerDied","Data":"d30f7131a9e2cd443f7a4870ac2230d1ab5b7a51a7a7439098ff9ccda4a3e25b"} Jan 20 17:19:23 crc kubenswrapper[4558]: I0120 17:19:23.733630 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"b5e92730-f89e-4f1d-9a29-925c5acda961","Type":"ContainerDied","Data":"5d6fb5041b9b1332a47f5ae04986ff4293f2a325641b93e98a82449cc40f5088"} Jan 20 17:19:23 crc kubenswrapper[4558]: I0120 17:19:23.733664 4558 scope.go:117] "RemoveContainer" containerID="05ca3f2636e110d0332674062bdecc1c997c9e6bac0b7a0e7421774ccd7af025" Jan 20 17:19:23 crc kubenswrapper[4558]: I0120 17:19:23.733830 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:19:23 crc kubenswrapper[4558]: I0120 17:19:23.744594 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-api-db-create-7nj9x" podStartSLOduration=1.7445823539999998 podStartE2EDuration="1.744582354s" podCreationTimestamp="2026-01-20 17:19:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:19:23.739922551 +0000 UTC m=+2257.500260528" watchObservedRunningTime="2026-01-20 17:19:23.744582354 +0000 UTC m=+2257.504920321" Jan 20 17:19:23 crc kubenswrapper[4558]: I0120 17:19:23.745300 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-4e5b-account-create-update-jp7rq" event={"ID":"400d0672-2068-470f-983f-df899be25bd5","Type":"ContainerStarted","Data":"10d590f128d62555192f25ece98c1ac03744eea992f59ddd0f7a23599fc75759"} Jan 20 17:19:23 crc kubenswrapper[4558]: I0120 17:19:23.752271 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-db-create-vvdf2" event={"ID":"c9ccad35-4742-4e3e-b281-d76be66e44fd","Type":"ContainerStarted","Data":"b70e888c4f9d1f09a7591affc7f3cb35eca17be6133bdd9ddc39723d58674730"} Jan 20 17:19:23 crc kubenswrapper[4558]: I0120 17:19:23.765527 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell0-db-create-hpfj8" podStartSLOduration=1.765502985 podStartE2EDuration="1.765502985s" podCreationTimestamp="2026-01-20 17:19:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:19:23.757654626 +0000 UTC m=+2257.517992593" watchObservedRunningTime="2026-01-20 17:19:23.765502985 +0000 UTC m=+2257.525840952" Jan 20 17:19:23 crc kubenswrapper[4558]: I0120 17:19:23.832830 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-b69b-account-create-update-ct4qn"] Jan 20 17:19:23 crc kubenswrapper[4558]: I0120 17:19:23.973883 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-12cd-account-create-update-8bwmv"] Jan 20 17:19:23 crc kubenswrapper[4558]: W0120 17:19:23.999866 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8e497b8c_aac4_4095_a801_65e24590b325.slice/crio-19c6b76c74511c423e05ac254b43a066b8a39e021ed6b590375bdaa8ac511b62 WatchSource:0}: Error finding container 19c6b76c74511c423e05ac254b43a066b8a39e021ed6b590375bdaa8ac511b62: Status 404 returned error can't find the container with id 19c6b76c74511c423e05ac254b43a066b8a39e021ed6b590375bdaa8ac511b62 Jan 20 17:19:24 crc kubenswrapper[4558]: I0120 17:19:24.073968 4558 scope.go:117] "RemoveContainer" containerID="b54c224c9ceb5230ae536b85978dfae73a97fbd2ecddcabd5d1b2615d78a8104" Jan 20 17:19:24 crc kubenswrapper[4558]: I0120 17:19:24.104997 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:19:24 crc kubenswrapper[4558]: I0120 17:19:24.105006 4558 scope.go:117] "RemoveContainer" containerID="d30f7131a9e2cd443f7a4870ac2230d1ab5b7a51a7a7439098ff9ccda4a3e25b" Jan 20 17:19:24 crc kubenswrapper[4558]: I0120 17:19:24.121203 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:19:24 crc kubenswrapper[4558]: I0120 17:19:24.131093 4558 scope.go:117] "RemoveContainer" containerID="9003fbf8af830e35a1d6485bbee760ae6d290f957bbb030215bddf31d0f8fc2c" Jan 20 17:19:24 crc kubenswrapper[4558]: I0120 17:19:24.131236 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:19:24 crc kubenswrapper[4558]: I0120 17:19:24.139124 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:19:24 crc kubenswrapper[4558]: I0120 17:19:24.145636 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-scripts" Jan 20 17:19:24 crc kubenswrapper[4558]: I0120 17:19:24.145811 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-config-data" Jan 20 17:19:24 crc kubenswrapper[4558]: I0120 17:19:24.149934 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee678e41-8db4-4efd-995f-20521fff70ea-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ee678e41-8db4-4efd-995f-20521fff70ea\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:19:24 crc kubenswrapper[4558]: I0120 17:19:24.149983 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ee678e41-8db4-4efd-995f-20521fff70ea-run-httpd\") pod \"ceilometer-0\" (UID: \"ee678e41-8db4-4efd-995f-20521fff70ea\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:19:24 crc kubenswrapper[4558]: I0120 17:19:24.150033 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ee678e41-8db4-4efd-995f-20521fff70ea-config-data\") pod \"ceilometer-0\" (UID: \"ee678e41-8db4-4efd-995f-20521fff70ea\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:19:24 crc kubenswrapper[4558]: I0120 17:19:24.150077 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wzhhz\" (UniqueName: \"kubernetes.io/projected/ee678e41-8db4-4efd-995f-20521fff70ea-kube-api-access-wzhhz\") pod \"ceilometer-0\" (UID: \"ee678e41-8db4-4efd-995f-20521fff70ea\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:19:24 crc kubenswrapper[4558]: I0120 17:19:24.150099 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ee678e41-8db4-4efd-995f-20521fff70ea-log-httpd\") pod \"ceilometer-0\" (UID: \"ee678e41-8db4-4efd-995f-20521fff70ea\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:19:24 crc kubenswrapper[4558]: I0120 17:19:24.150123 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ee678e41-8db4-4efd-995f-20521fff70ea-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ee678e41-8db4-4efd-995f-20521fff70ea\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:19:24 crc kubenswrapper[4558]: I0120 17:19:24.150154 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ee678e41-8db4-4efd-995f-20521fff70ea-scripts\") pod \"ceilometer-0\" (UID: \"ee678e41-8db4-4efd-995f-20521fff70ea\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:19:24 crc kubenswrapper[4558]: I0120 17:19:24.150781 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:19:24 crc kubenswrapper[4558]: I0120 17:19:24.159505 4558 scope.go:117] "RemoveContainer" containerID="05ca3f2636e110d0332674062bdecc1c997c9e6bac0b7a0e7421774ccd7af025" Jan 20 17:19:24 crc kubenswrapper[4558]: E0120 17:19:24.160355 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"05ca3f2636e110d0332674062bdecc1c997c9e6bac0b7a0e7421774ccd7af025\": container with ID starting with 05ca3f2636e110d0332674062bdecc1c997c9e6bac0b7a0e7421774ccd7af025 not found: ID does not exist" containerID="05ca3f2636e110d0332674062bdecc1c997c9e6bac0b7a0e7421774ccd7af025" Jan 20 17:19:24 crc kubenswrapper[4558]: I0120 17:19:24.160442 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"05ca3f2636e110d0332674062bdecc1c997c9e6bac0b7a0e7421774ccd7af025"} err="failed to get container status \"05ca3f2636e110d0332674062bdecc1c997c9e6bac0b7a0e7421774ccd7af025\": rpc error: code = NotFound desc = could not find container \"05ca3f2636e110d0332674062bdecc1c997c9e6bac0b7a0e7421774ccd7af025\": container with ID starting with 05ca3f2636e110d0332674062bdecc1c997c9e6bac0b7a0e7421774ccd7af025 not found: ID does not exist" Jan 20 17:19:24 crc kubenswrapper[4558]: I0120 17:19:24.160490 4558 scope.go:117] "RemoveContainer" containerID="b54c224c9ceb5230ae536b85978dfae73a97fbd2ecddcabd5d1b2615d78a8104" Jan 20 17:19:24 crc kubenswrapper[4558]: E0120 17:19:24.160755 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b54c224c9ceb5230ae536b85978dfae73a97fbd2ecddcabd5d1b2615d78a8104\": container with ID starting with b54c224c9ceb5230ae536b85978dfae73a97fbd2ecddcabd5d1b2615d78a8104 not found: ID does not exist" containerID="b54c224c9ceb5230ae536b85978dfae73a97fbd2ecddcabd5d1b2615d78a8104" Jan 20 17:19:24 crc kubenswrapper[4558]: I0120 17:19:24.160791 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b54c224c9ceb5230ae536b85978dfae73a97fbd2ecddcabd5d1b2615d78a8104"} err="failed to get container status \"b54c224c9ceb5230ae536b85978dfae73a97fbd2ecddcabd5d1b2615d78a8104\": rpc error: code = NotFound desc = could not find container \"b54c224c9ceb5230ae536b85978dfae73a97fbd2ecddcabd5d1b2615d78a8104\": container with ID starting with b54c224c9ceb5230ae536b85978dfae73a97fbd2ecddcabd5d1b2615d78a8104 not found: ID does not exist" Jan 20 17:19:24 crc kubenswrapper[4558]: I0120 17:19:24.160805 4558 scope.go:117] "RemoveContainer" containerID="d30f7131a9e2cd443f7a4870ac2230d1ab5b7a51a7a7439098ff9ccda4a3e25b" Jan 20 17:19:24 crc kubenswrapper[4558]: E0120 17:19:24.161075 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d30f7131a9e2cd443f7a4870ac2230d1ab5b7a51a7a7439098ff9ccda4a3e25b\": container with ID starting with d30f7131a9e2cd443f7a4870ac2230d1ab5b7a51a7a7439098ff9ccda4a3e25b not found: ID does not exist" containerID="d30f7131a9e2cd443f7a4870ac2230d1ab5b7a51a7a7439098ff9ccda4a3e25b" Jan 20 17:19:24 crc kubenswrapper[4558]: I0120 17:19:24.161090 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d30f7131a9e2cd443f7a4870ac2230d1ab5b7a51a7a7439098ff9ccda4a3e25b"} err="failed to get container status \"d30f7131a9e2cd443f7a4870ac2230d1ab5b7a51a7a7439098ff9ccda4a3e25b\": rpc error: code = NotFound desc = could not find container \"d30f7131a9e2cd443f7a4870ac2230d1ab5b7a51a7a7439098ff9ccda4a3e25b\": container with ID starting with d30f7131a9e2cd443f7a4870ac2230d1ab5b7a51a7a7439098ff9ccda4a3e25b not found: ID does not exist" Jan 20 17:19:24 crc kubenswrapper[4558]: I0120 17:19:24.161101 4558 scope.go:117] "RemoveContainer" containerID="9003fbf8af830e35a1d6485bbee760ae6d290f957bbb030215bddf31d0f8fc2c" Jan 20 17:19:24 crc kubenswrapper[4558]: E0120 17:19:24.162312 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9003fbf8af830e35a1d6485bbee760ae6d290f957bbb030215bddf31d0f8fc2c\": container with ID starting with 9003fbf8af830e35a1d6485bbee760ae6d290f957bbb030215bddf31d0f8fc2c not found: ID does not exist" containerID="9003fbf8af830e35a1d6485bbee760ae6d290f957bbb030215bddf31d0f8fc2c" Jan 20 17:19:24 crc kubenswrapper[4558]: I0120 17:19:24.162337 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9003fbf8af830e35a1d6485bbee760ae6d290f957bbb030215bddf31d0f8fc2c"} err="failed to get container status \"9003fbf8af830e35a1d6485bbee760ae6d290f957bbb030215bddf31d0f8fc2c\": rpc error: code = NotFound desc = could not find container \"9003fbf8af830e35a1d6485bbee760ae6d290f957bbb030215bddf31d0f8fc2c\": container with ID starting with 9003fbf8af830e35a1d6485bbee760ae6d290f957bbb030215bddf31d0f8fc2c not found: ID does not exist" Jan 20 17:19:24 crc kubenswrapper[4558]: I0120 17:19:24.251242 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ee678e41-8db4-4efd-995f-20521fff70ea-config-data\") pod \"ceilometer-0\" (UID: \"ee678e41-8db4-4efd-995f-20521fff70ea\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:19:24 crc kubenswrapper[4558]: I0120 17:19:24.251284 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wzhhz\" (UniqueName: \"kubernetes.io/projected/ee678e41-8db4-4efd-995f-20521fff70ea-kube-api-access-wzhhz\") pod \"ceilometer-0\" (UID: \"ee678e41-8db4-4efd-995f-20521fff70ea\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:19:24 crc kubenswrapper[4558]: I0120 17:19:24.251314 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ee678e41-8db4-4efd-995f-20521fff70ea-log-httpd\") pod \"ceilometer-0\" (UID: \"ee678e41-8db4-4efd-995f-20521fff70ea\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:19:24 crc kubenswrapper[4558]: I0120 17:19:24.251341 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ee678e41-8db4-4efd-995f-20521fff70ea-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ee678e41-8db4-4efd-995f-20521fff70ea\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:19:24 crc kubenswrapper[4558]: I0120 17:19:24.251362 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ee678e41-8db4-4efd-995f-20521fff70ea-scripts\") pod \"ceilometer-0\" (UID: \"ee678e41-8db4-4efd-995f-20521fff70ea\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:19:24 crc kubenswrapper[4558]: I0120 17:19:24.251505 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee678e41-8db4-4efd-995f-20521fff70ea-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ee678e41-8db4-4efd-995f-20521fff70ea\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:19:24 crc kubenswrapper[4558]: I0120 17:19:24.251575 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ee678e41-8db4-4efd-995f-20521fff70ea-run-httpd\") pod \"ceilometer-0\" (UID: \"ee678e41-8db4-4efd-995f-20521fff70ea\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:19:24 crc kubenswrapper[4558]: I0120 17:19:24.251964 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ee678e41-8db4-4efd-995f-20521fff70ea-run-httpd\") pod \"ceilometer-0\" (UID: \"ee678e41-8db4-4efd-995f-20521fff70ea\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:19:24 crc kubenswrapper[4558]: I0120 17:19:24.253001 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ee678e41-8db4-4efd-995f-20521fff70ea-log-httpd\") pod \"ceilometer-0\" (UID: \"ee678e41-8db4-4efd-995f-20521fff70ea\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:19:24 crc kubenswrapper[4558]: I0120 17:19:24.257729 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ee678e41-8db4-4efd-995f-20521fff70ea-scripts\") pod \"ceilometer-0\" (UID: \"ee678e41-8db4-4efd-995f-20521fff70ea\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:19:24 crc kubenswrapper[4558]: I0120 17:19:24.258988 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee678e41-8db4-4efd-995f-20521fff70ea-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ee678e41-8db4-4efd-995f-20521fff70ea\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:19:24 crc kubenswrapper[4558]: I0120 17:19:24.259229 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ee678e41-8db4-4efd-995f-20521fff70ea-config-data\") pod \"ceilometer-0\" (UID: \"ee678e41-8db4-4efd-995f-20521fff70ea\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:19:24 crc kubenswrapper[4558]: I0120 17:19:24.265040 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ee678e41-8db4-4efd-995f-20521fff70ea-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ee678e41-8db4-4efd-995f-20521fff70ea\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:19:24 crc kubenswrapper[4558]: I0120 17:19:24.268473 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wzhhz\" (UniqueName: \"kubernetes.io/projected/ee678e41-8db4-4efd-995f-20521fff70ea-kube-api-access-wzhhz\") pod \"ceilometer-0\" (UID: \"ee678e41-8db4-4efd-995f-20521fff70ea\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:19:24 crc kubenswrapper[4558]: I0120 17:19:24.457921 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:19:24 crc kubenswrapper[4558]: I0120 17:19:24.584808 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b5e92730-f89e-4f1d-9a29-925c5acda961" path="/var/lib/kubelet/pods/b5e92730-f89e-4f1d-9a29-925c5acda961/volumes" Jan 20 17:19:24 crc kubenswrapper[4558]: I0120 17:19:24.768233 4558 generic.go:334] "Generic (PLEG): container finished" podID="400d0672-2068-470f-983f-df899be25bd5" containerID="a3e6795494d00285a308da4e296be3028957ed7dd4cc3ee1924e313b657aaf6d" exitCode=0 Jan 20 17:19:24 crc kubenswrapper[4558]: I0120 17:19:24.768340 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-4e5b-account-create-update-jp7rq" event={"ID":"400d0672-2068-470f-983f-df899be25bd5","Type":"ContainerDied","Data":"a3e6795494d00285a308da4e296be3028957ed7dd4cc3ee1924e313b657aaf6d"} Jan 20 17:19:24 crc kubenswrapper[4558]: I0120 17:19:24.771884 4558 generic.go:334] "Generic (PLEG): container finished" podID="c9ccad35-4742-4e3e-b281-d76be66e44fd" containerID="b3a2a6a756812c1e7b932ed4a16bde48b9ef348b603e679ada0e05b62df3feb9" exitCode=0 Jan 20 17:19:24 crc kubenswrapper[4558]: I0120 17:19:24.772118 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-db-create-vvdf2" event={"ID":"c9ccad35-4742-4e3e-b281-d76be66e44fd","Type":"ContainerDied","Data":"b3a2a6a756812c1e7b932ed4a16bde48b9ef348b603e679ada0e05b62df3feb9"} Jan 20 17:19:24 crc kubenswrapper[4558]: I0120 17:19:24.781824 4558 generic.go:334] "Generic (PLEG): container finished" podID="8e497b8c-aac4-4095-a801-65e24590b325" containerID="7b5c845f8dfcc82a5614043c05df9578e3d20bc8729179a6b662786a23386251" exitCode=0 Jan 20 17:19:24 crc kubenswrapper[4558]: I0120 17:19:24.781936 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-12cd-account-create-update-8bwmv" event={"ID":"8e497b8c-aac4-4095-a801-65e24590b325","Type":"ContainerDied","Data":"7b5c845f8dfcc82a5614043c05df9578e3d20bc8729179a6b662786a23386251"} Jan 20 17:19:24 crc kubenswrapper[4558]: I0120 17:19:24.781987 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-12cd-account-create-update-8bwmv" event={"ID":"8e497b8c-aac4-4095-a801-65e24590b325","Type":"ContainerStarted","Data":"19c6b76c74511c423e05ac254b43a066b8a39e021ed6b590375bdaa8ac511b62"} Jan 20 17:19:24 crc kubenswrapper[4558]: I0120 17:19:24.787368 4558 generic.go:334] "Generic (PLEG): container finished" podID="9812bc23-9bb1-4b3a-975b-0bffa6e3184f" containerID="5e4527bd6236a18820049fb582501f32bdd922a11816b5d507d98697a896f50a" exitCode=0 Jan 20 17:19:24 crc kubenswrapper[4558]: I0120 17:19:24.787498 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-b69b-account-create-update-ct4qn" event={"ID":"9812bc23-9bb1-4b3a-975b-0bffa6e3184f","Type":"ContainerDied","Data":"5e4527bd6236a18820049fb582501f32bdd922a11816b5d507d98697a896f50a"} Jan 20 17:19:24 crc kubenswrapper[4558]: I0120 17:19:24.787539 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-b69b-account-create-update-ct4qn" event={"ID":"9812bc23-9bb1-4b3a-975b-0bffa6e3184f","Type":"ContainerStarted","Data":"8e93e8379219b74677e7ec49af683020a604cdc4c20c504fd57b42214eaa24ef"} Jan 20 17:19:24 crc kubenswrapper[4558]: I0120 17:19:24.789026 4558 generic.go:334] "Generic (PLEG): container finished" podID="9a1b94fe-db72-418e-bc3b-5dd27adff225" containerID="cdc1d8ff5500de6fc7f67919f8800d1bb3a91e560eaa7ae6eb032ebb3cf4ef8f" exitCode=0 Jan 20 17:19:24 crc kubenswrapper[4558]: I0120 17:19:24.789106 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-db-create-7nj9x" event={"ID":"9a1b94fe-db72-418e-bc3b-5dd27adff225","Type":"ContainerDied","Data":"cdc1d8ff5500de6fc7f67919f8800d1bb3a91e560eaa7ae6eb032ebb3cf4ef8f"} Jan 20 17:19:24 crc kubenswrapper[4558]: I0120 17:19:24.790814 4558 generic.go:334] "Generic (PLEG): container finished" podID="3a99ccdf-41ef-42bc-8d10-4c2cf74d3ac9" containerID="138776071c1cd283c04ad3bac5afc2413a017d74f6650ba993ce0dddc48944ab" exitCode=0 Jan 20 17:19:24 crc kubenswrapper[4558]: I0120 17:19:24.790882 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-db-create-hpfj8" event={"ID":"3a99ccdf-41ef-42bc-8d10-4c2cf74d3ac9","Type":"ContainerDied","Data":"138776071c1cd283c04ad3bac5afc2413a017d74f6650ba993ce0dddc48944ab"} Jan 20 17:19:24 crc kubenswrapper[4558]: I0120 17:19:24.876060 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:19:24 crc kubenswrapper[4558]: W0120 17:19:24.876616 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podee678e41_8db4_4efd_995f_20521fff70ea.slice/crio-f9320d05323d892d0d5f93dc233c7faa5987742150a6fcea23c6c032bfa86d0f WatchSource:0}: Error finding container f9320d05323d892d0d5f93dc233c7faa5987742150a6fcea23c6c032bfa86d0f: Status 404 returned error can't find the container with id f9320d05323d892d0d5f93dc233c7faa5987742150a6fcea23c6c032bfa86d0f Jan 20 17:19:25 crc kubenswrapper[4558]: I0120 17:19:25.816242 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"ee678e41-8db4-4efd-995f-20521fff70ea","Type":"ContainerStarted","Data":"d596ad3c068f61543bd30a635e39b820fdca5a47ea6a64733298f12c6dfb6d32"} Jan 20 17:19:25 crc kubenswrapper[4558]: I0120 17:19:25.816575 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"ee678e41-8db4-4efd-995f-20521fff70ea","Type":"ContainerStarted","Data":"f9320d05323d892d0d5f93dc233c7faa5987742150a6fcea23c6c032bfa86d0f"} Jan 20 17:19:26 crc kubenswrapper[4558]: I0120 17:19:26.190771 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-db-create-vvdf2" Jan 20 17:19:26 crc kubenswrapper[4558]: I0120 17:19:26.201959 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jcbn8\" (UniqueName: \"kubernetes.io/projected/c9ccad35-4742-4e3e-b281-d76be66e44fd-kube-api-access-jcbn8\") pod \"c9ccad35-4742-4e3e-b281-d76be66e44fd\" (UID: \"c9ccad35-4742-4e3e-b281-d76be66e44fd\") " Jan 20 17:19:26 crc kubenswrapper[4558]: I0120 17:19:26.202212 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c9ccad35-4742-4e3e-b281-d76be66e44fd-operator-scripts\") pod \"c9ccad35-4742-4e3e-b281-d76be66e44fd\" (UID: \"c9ccad35-4742-4e3e-b281-d76be66e44fd\") " Jan 20 17:19:26 crc kubenswrapper[4558]: I0120 17:19:26.202826 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c9ccad35-4742-4e3e-b281-d76be66e44fd-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "c9ccad35-4742-4e3e-b281-d76be66e44fd" (UID: "c9ccad35-4742-4e3e-b281-d76be66e44fd"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:19:26 crc kubenswrapper[4558]: I0120 17:19:26.216338 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c9ccad35-4742-4e3e-b281-d76be66e44fd-kube-api-access-jcbn8" (OuterVolumeSpecName: "kube-api-access-jcbn8") pod "c9ccad35-4742-4e3e-b281-d76be66e44fd" (UID: "c9ccad35-4742-4e3e-b281-d76be66e44fd"). InnerVolumeSpecName "kube-api-access-jcbn8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:19:26 crc kubenswrapper[4558]: I0120 17:19:26.304538 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jcbn8\" (UniqueName: \"kubernetes.io/projected/c9ccad35-4742-4e3e-b281-d76be66e44fd-kube-api-access-jcbn8\") on node \"crc\" DevicePath \"\"" Jan 20 17:19:26 crc kubenswrapper[4558]: I0120 17:19:26.304566 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c9ccad35-4742-4e3e-b281-d76be66e44fd-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:19:26 crc kubenswrapper[4558]: I0120 17:19:26.477851 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-b69b-account-create-update-ct4qn" Jan 20 17:19:26 crc kubenswrapper[4558]: I0120 17:19:26.481933 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-4e5b-account-create-update-jp7rq" Jan 20 17:19:26 crc kubenswrapper[4558]: I0120 17:19:26.485726 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-12cd-account-create-update-8bwmv" Jan 20 17:19:26 crc kubenswrapper[4558]: I0120 17:19:26.490477 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-db-create-7nj9x" Jan 20 17:19:26 crc kubenswrapper[4558]: I0120 17:19:26.507070 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9a1b94fe-db72-418e-bc3b-5dd27adff225-operator-scripts\") pod \"9a1b94fe-db72-418e-bc3b-5dd27adff225\" (UID: \"9a1b94fe-db72-418e-bc3b-5dd27adff225\") " Jan 20 17:19:26 crc kubenswrapper[4558]: I0120 17:19:26.507113 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9812bc23-9bb1-4b3a-975b-0bffa6e3184f-operator-scripts\") pod \"9812bc23-9bb1-4b3a-975b-0bffa6e3184f\" (UID: \"9812bc23-9bb1-4b3a-975b-0bffa6e3184f\") " Jan 20 17:19:26 crc kubenswrapper[4558]: I0120 17:19:26.507148 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f2s5w\" (UniqueName: \"kubernetes.io/projected/9a1b94fe-db72-418e-bc3b-5dd27adff225-kube-api-access-f2s5w\") pod \"9a1b94fe-db72-418e-bc3b-5dd27adff225\" (UID: \"9a1b94fe-db72-418e-bc3b-5dd27adff225\") " Jan 20 17:19:26 crc kubenswrapper[4558]: I0120 17:19:26.507214 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8e497b8c-aac4-4095-a801-65e24590b325-operator-scripts\") pod \"8e497b8c-aac4-4095-a801-65e24590b325\" (UID: \"8e497b8c-aac4-4095-a801-65e24590b325\") " Jan 20 17:19:26 crc kubenswrapper[4558]: I0120 17:19:26.507263 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t5694\" (UniqueName: \"kubernetes.io/projected/9812bc23-9bb1-4b3a-975b-0bffa6e3184f-kube-api-access-t5694\") pod \"9812bc23-9bb1-4b3a-975b-0bffa6e3184f\" (UID: \"9812bc23-9bb1-4b3a-975b-0bffa6e3184f\") " Jan 20 17:19:26 crc kubenswrapper[4558]: I0120 17:19:26.507332 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h8kg2\" (UniqueName: \"kubernetes.io/projected/400d0672-2068-470f-983f-df899be25bd5-kube-api-access-h8kg2\") pod \"400d0672-2068-470f-983f-df899be25bd5\" (UID: \"400d0672-2068-470f-983f-df899be25bd5\") " Jan 20 17:19:26 crc kubenswrapper[4558]: I0120 17:19:26.507351 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xmc7q\" (UniqueName: \"kubernetes.io/projected/8e497b8c-aac4-4095-a801-65e24590b325-kube-api-access-xmc7q\") pod \"8e497b8c-aac4-4095-a801-65e24590b325\" (UID: \"8e497b8c-aac4-4095-a801-65e24590b325\") " Jan 20 17:19:26 crc kubenswrapper[4558]: I0120 17:19:26.507384 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/400d0672-2068-470f-983f-df899be25bd5-operator-scripts\") pod \"400d0672-2068-470f-983f-df899be25bd5\" (UID: \"400d0672-2068-470f-983f-df899be25bd5\") " Jan 20 17:19:26 crc kubenswrapper[4558]: I0120 17:19:26.507748 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8e497b8c-aac4-4095-a801-65e24590b325-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "8e497b8c-aac4-4095-a801-65e24590b325" (UID: "8e497b8c-aac4-4095-a801-65e24590b325"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:19:26 crc kubenswrapper[4558]: I0120 17:19:26.507799 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9812bc23-9bb1-4b3a-975b-0bffa6e3184f-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "9812bc23-9bb1-4b3a-975b-0bffa6e3184f" (UID: "9812bc23-9bb1-4b3a-975b-0bffa6e3184f"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:19:26 crc kubenswrapper[4558]: I0120 17:19:26.508225 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9a1b94fe-db72-418e-bc3b-5dd27adff225-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "9a1b94fe-db72-418e-bc3b-5dd27adff225" (UID: "9a1b94fe-db72-418e-bc3b-5dd27adff225"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:19:26 crc kubenswrapper[4558]: I0120 17:19:26.508663 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/400d0672-2068-470f-983f-df899be25bd5-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "400d0672-2068-470f-983f-df899be25bd5" (UID: "400d0672-2068-470f-983f-df899be25bd5"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:19:26 crc kubenswrapper[4558]: I0120 17:19:26.508982 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/400d0672-2068-470f-983f-df899be25bd5-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:19:26 crc kubenswrapper[4558]: I0120 17:19:26.509005 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9a1b94fe-db72-418e-bc3b-5dd27adff225-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:19:26 crc kubenswrapper[4558]: I0120 17:19:26.509015 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9812bc23-9bb1-4b3a-975b-0bffa6e3184f-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:19:26 crc kubenswrapper[4558]: I0120 17:19:26.509026 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8e497b8c-aac4-4095-a801-65e24590b325-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:19:26 crc kubenswrapper[4558]: I0120 17:19:26.511980 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9812bc23-9bb1-4b3a-975b-0bffa6e3184f-kube-api-access-t5694" (OuterVolumeSpecName: "kube-api-access-t5694") pod "9812bc23-9bb1-4b3a-975b-0bffa6e3184f" (UID: "9812bc23-9bb1-4b3a-975b-0bffa6e3184f"). InnerVolumeSpecName "kube-api-access-t5694". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:19:26 crc kubenswrapper[4558]: I0120 17:19:26.512352 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/400d0672-2068-470f-983f-df899be25bd5-kube-api-access-h8kg2" (OuterVolumeSpecName: "kube-api-access-h8kg2") pod "400d0672-2068-470f-983f-df899be25bd5" (UID: "400d0672-2068-470f-983f-df899be25bd5"). InnerVolumeSpecName "kube-api-access-h8kg2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:19:26 crc kubenswrapper[4558]: I0120 17:19:26.512499 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8e497b8c-aac4-4095-a801-65e24590b325-kube-api-access-xmc7q" (OuterVolumeSpecName: "kube-api-access-xmc7q") pod "8e497b8c-aac4-4095-a801-65e24590b325" (UID: "8e497b8c-aac4-4095-a801-65e24590b325"). InnerVolumeSpecName "kube-api-access-xmc7q". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:19:26 crc kubenswrapper[4558]: I0120 17:19:26.515497 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9a1b94fe-db72-418e-bc3b-5dd27adff225-kube-api-access-f2s5w" (OuterVolumeSpecName: "kube-api-access-f2s5w") pod "9a1b94fe-db72-418e-bc3b-5dd27adff225" (UID: "9a1b94fe-db72-418e-bc3b-5dd27adff225"). InnerVolumeSpecName "kube-api-access-f2s5w". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:19:26 crc kubenswrapper[4558]: I0120 17:19:26.515900 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-db-create-hpfj8" Jan 20 17:19:26 crc kubenswrapper[4558]: I0120 17:19:26.612341 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f2s5w\" (UniqueName: \"kubernetes.io/projected/9a1b94fe-db72-418e-bc3b-5dd27adff225-kube-api-access-f2s5w\") on node \"crc\" DevicePath \"\"" Jan 20 17:19:26 crc kubenswrapper[4558]: I0120 17:19:26.612499 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t5694\" (UniqueName: \"kubernetes.io/projected/9812bc23-9bb1-4b3a-975b-0bffa6e3184f-kube-api-access-t5694\") on node \"crc\" DevicePath \"\"" Jan 20 17:19:26 crc kubenswrapper[4558]: I0120 17:19:26.612600 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h8kg2\" (UniqueName: \"kubernetes.io/projected/400d0672-2068-470f-983f-df899be25bd5-kube-api-access-h8kg2\") on node \"crc\" DevicePath \"\"" Jan 20 17:19:26 crc kubenswrapper[4558]: I0120 17:19:26.612721 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xmc7q\" (UniqueName: \"kubernetes.io/projected/8e497b8c-aac4-4095-a801-65e24590b325-kube-api-access-xmc7q\") on node \"crc\" DevicePath \"\"" Jan 20 17:19:26 crc kubenswrapper[4558]: I0120 17:19:26.714390 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bcwsf\" (UniqueName: \"kubernetes.io/projected/3a99ccdf-41ef-42bc-8d10-4c2cf74d3ac9-kube-api-access-bcwsf\") pod \"3a99ccdf-41ef-42bc-8d10-4c2cf74d3ac9\" (UID: \"3a99ccdf-41ef-42bc-8d10-4c2cf74d3ac9\") " Jan 20 17:19:26 crc kubenswrapper[4558]: I0120 17:19:26.714668 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3a99ccdf-41ef-42bc-8d10-4c2cf74d3ac9-operator-scripts\") pod \"3a99ccdf-41ef-42bc-8d10-4c2cf74d3ac9\" (UID: \"3a99ccdf-41ef-42bc-8d10-4c2cf74d3ac9\") " Jan 20 17:19:26 crc kubenswrapper[4558]: I0120 17:19:26.715945 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3a99ccdf-41ef-42bc-8d10-4c2cf74d3ac9-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "3a99ccdf-41ef-42bc-8d10-4c2cf74d3ac9" (UID: "3a99ccdf-41ef-42bc-8d10-4c2cf74d3ac9"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:19:26 crc kubenswrapper[4558]: I0120 17:19:26.719343 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3a99ccdf-41ef-42bc-8d10-4c2cf74d3ac9-kube-api-access-bcwsf" (OuterVolumeSpecName: "kube-api-access-bcwsf") pod "3a99ccdf-41ef-42bc-8d10-4c2cf74d3ac9" (UID: "3a99ccdf-41ef-42bc-8d10-4c2cf74d3ac9"). InnerVolumeSpecName "kube-api-access-bcwsf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:19:26 crc kubenswrapper[4558]: I0120 17:19:26.824799 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bcwsf\" (UniqueName: \"kubernetes.io/projected/3a99ccdf-41ef-42bc-8d10-4c2cf74d3ac9-kube-api-access-bcwsf\") on node \"crc\" DevicePath \"\"" Jan 20 17:19:26 crc kubenswrapper[4558]: I0120 17:19:26.824842 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3a99ccdf-41ef-42bc-8d10-4c2cf74d3ac9-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:19:26 crc kubenswrapper[4558]: I0120 17:19:26.839599 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-12cd-account-create-update-8bwmv" event={"ID":"8e497b8c-aac4-4095-a801-65e24590b325","Type":"ContainerDied","Data":"19c6b76c74511c423e05ac254b43a066b8a39e021ed6b590375bdaa8ac511b62"} Jan 20 17:19:26 crc kubenswrapper[4558]: I0120 17:19:26.839867 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="19c6b76c74511c423e05ac254b43a066b8a39e021ed6b590375bdaa8ac511b62" Jan 20 17:19:26 crc kubenswrapper[4558]: I0120 17:19:26.839626 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-12cd-account-create-update-8bwmv" Jan 20 17:19:26 crc kubenswrapper[4558]: I0120 17:19:26.841836 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-b69b-account-create-update-ct4qn" event={"ID":"9812bc23-9bb1-4b3a-975b-0bffa6e3184f","Type":"ContainerDied","Data":"8e93e8379219b74677e7ec49af683020a604cdc4c20c504fd57b42214eaa24ef"} Jan 20 17:19:26 crc kubenswrapper[4558]: I0120 17:19:26.841935 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8e93e8379219b74677e7ec49af683020a604cdc4c20c504fd57b42214eaa24ef" Jan 20 17:19:26 crc kubenswrapper[4558]: I0120 17:19:26.841849 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-b69b-account-create-update-ct4qn" Jan 20 17:19:26 crc kubenswrapper[4558]: I0120 17:19:26.844663 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-db-create-7nj9x" event={"ID":"9a1b94fe-db72-418e-bc3b-5dd27adff225","Type":"ContainerDied","Data":"2cc8a32abdd5268f2784b4089fbbfe4cb6505339b76ba83fd88e7bc20bf2c11e"} Jan 20 17:19:26 crc kubenswrapper[4558]: I0120 17:19:26.844721 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2cc8a32abdd5268f2784b4089fbbfe4cb6505339b76ba83fd88e7bc20bf2c11e" Jan 20 17:19:26 crc kubenswrapper[4558]: I0120 17:19:26.844832 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-db-create-7nj9x" Jan 20 17:19:26 crc kubenswrapper[4558]: I0120 17:19:26.847390 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"ee678e41-8db4-4efd-995f-20521fff70ea","Type":"ContainerStarted","Data":"749f4f02ed611a21a04fad4c9d72e8071779fe9c7f457f44841339d9e5513b35"} Jan 20 17:19:26 crc kubenswrapper[4558]: I0120 17:19:26.849536 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-db-create-hpfj8" event={"ID":"3a99ccdf-41ef-42bc-8d10-4c2cf74d3ac9","Type":"ContainerDied","Data":"d3893fafa0154321bbfbc6d69a94994034c7cfc16aecd63ed0c840654bc8c88c"} Jan 20 17:19:26 crc kubenswrapper[4558]: I0120 17:19:26.849566 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d3893fafa0154321bbfbc6d69a94994034c7cfc16aecd63ed0c840654bc8c88c" Jan 20 17:19:26 crc kubenswrapper[4558]: I0120 17:19:26.849714 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-db-create-hpfj8" Jan 20 17:19:26 crc kubenswrapper[4558]: I0120 17:19:26.850802 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-4e5b-account-create-update-jp7rq" event={"ID":"400d0672-2068-470f-983f-df899be25bd5","Type":"ContainerDied","Data":"10d590f128d62555192f25ece98c1ac03744eea992f59ddd0f7a23599fc75759"} Jan 20 17:19:26 crc kubenswrapper[4558]: I0120 17:19:26.850824 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="10d590f128d62555192f25ece98c1ac03744eea992f59ddd0f7a23599fc75759" Jan 20 17:19:26 crc kubenswrapper[4558]: I0120 17:19:26.850889 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-4e5b-account-create-update-jp7rq" Jan 20 17:19:26 crc kubenswrapper[4558]: I0120 17:19:26.854245 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-db-create-vvdf2" event={"ID":"c9ccad35-4742-4e3e-b281-d76be66e44fd","Type":"ContainerDied","Data":"b70e888c4f9d1f09a7591affc7f3cb35eca17be6133bdd9ddc39723d58674730"} Jan 20 17:19:26 crc kubenswrapper[4558]: I0120 17:19:26.854886 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b70e888c4f9d1f09a7591affc7f3cb35eca17be6133bdd9ddc39723d58674730" Jan 20 17:19:26 crc kubenswrapper[4558]: I0120 17:19:26.854291 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-db-create-vvdf2" Jan 20 17:19:27 crc kubenswrapper[4558]: I0120 17:19:27.330057 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:19:27 crc kubenswrapper[4558]: I0120 17:19:27.872053 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"ee678e41-8db4-4efd-995f-20521fff70ea","Type":"ContainerStarted","Data":"b6cfe71a0076941015d0577220c08851725d70b0a29e663b1c8b13a24fbc926a"} Jan 20 17:19:28 crc kubenswrapper[4558]: I0120 17:19:28.160751 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-db-sync-2rm29"] Jan 20 17:19:28 crc kubenswrapper[4558]: E0120 17:19:28.161439 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c9ccad35-4742-4e3e-b281-d76be66e44fd" containerName="mariadb-database-create" Jan 20 17:19:28 crc kubenswrapper[4558]: I0120 17:19:28.161457 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="c9ccad35-4742-4e3e-b281-d76be66e44fd" containerName="mariadb-database-create" Jan 20 17:19:28 crc kubenswrapper[4558]: E0120 17:19:28.161478 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9a1b94fe-db72-418e-bc3b-5dd27adff225" containerName="mariadb-database-create" Jan 20 17:19:28 crc kubenswrapper[4558]: I0120 17:19:28.161484 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="9a1b94fe-db72-418e-bc3b-5dd27adff225" containerName="mariadb-database-create" Jan 20 17:19:28 crc kubenswrapper[4558]: E0120 17:19:28.161494 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3a99ccdf-41ef-42bc-8d10-4c2cf74d3ac9" containerName="mariadb-database-create" Jan 20 17:19:28 crc kubenswrapper[4558]: I0120 17:19:28.161501 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="3a99ccdf-41ef-42bc-8d10-4c2cf74d3ac9" containerName="mariadb-database-create" Jan 20 17:19:28 crc kubenswrapper[4558]: E0120 17:19:28.161530 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="400d0672-2068-470f-983f-df899be25bd5" containerName="mariadb-account-create-update" Jan 20 17:19:28 crc kubenswrapper[4558]: I0120 17:19:28.161536 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="400d0672-2068-470f-983f-df899be25bd5" containerName="mariadb-account-create-update" Jan 20 17:19:28 crc kubenswrapper[4558]: E0120 17:19:28.161555 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9812bc23-9bb1-4b3a-975b-0bffa6e3184f" containerName="mariadb-account-create-update" Jan 20 17:19:28 crc kubenswrapper[4558]: I0120 17:19:28.161561 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="9812bc23-9bb1-4b3a-975b-0bffa6e3184f" containerName="mariadb-account-create-update" Jan 20 17:19:28 crc kubenswrapper[4558]: E0120 17:19:28.161567 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8e497b8c-aac4-4095-a801-65e24590b325" containerName="mariadb-account-create-update" Jan 20 17:19:28 crc kubenswrapper[4558]: I0120 17:19:28.161572 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="8e497b8c-aac4-4095-a801-65e24590b325" containerName="mariadb-account-create-update" Jan 20 17:19:28 crc kubenswrapper[4558]: I0120 17:19:28.161742 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="400d0672-2068-470f-983f-df899be25bd5" containerName="mariadb-account-create-update" Jan 20 17:19:28 crc kubenswrapper[4558]: I0120 17:19:28.161752 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="9a1b94fe-db72-418e-bc3b-5dd27adff225" containerName="mariadb-database-create" Jan 20 17:19:28 crc kubenswrapper[4558]: I0120 17:19:28.161760 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="c9ccad35-4742-4e3e-b281-d76be66e44fd" containerName="mariadb-database-create" Jan 20 17:19:28 crc kubenswrapper[4558]: I0120 17:19:28.161770 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="9812bc23-9bb1-4b3a-975b-0bffa6e3184f" containerName="mariadb-account-create-update" Jan 20 17:19:28 crc kubenswrapper[4558]: I0120 17:19:28.161781 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="3a99ccdf-41ef-42bc-8d10-4c2cf74d3ac9" containerName="mariadb-database-create" Jan 20 17:19:28 crc kubenswrapper[4558]: I0120 17:19:28.161798 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="8e497b8c-aac4-4095-a801-65e24590b325" containerName="mariadb-account-create-update" Jan 20 17:19:28 crc kubenswrapper[4558]: I0120 17:19:28.162497 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-2rm29" Jan 20 17:19:28 crc kubenswrapper[4558]: I0120 17:19:28.163955 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell0-conductor-scripts" Jan 20 17:19:28 crc kubenswrapper[4558]: I0120 17:19:28.164974 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell0-conductor-config-data" Jan 20 17:19:28 crc kubenswrapper[4558]: I0120 17:19:28.165098 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-nova-dockercfg-tw79t" Jan 20 17:19:28 crc kubenswrapper[4558]: I0120 17:19:28.170222 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-db-sync-2rm29"] Jan 20 17:19:28 crc kubenswrapper[4558]: I0120 17:19:28.363587 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/743c3bad-0487-4ff7-a447-82cec153136a-config-data\") pod \"nova-cell0-conductor-db-sync-2rm29\" (UID: \"743c3bad-0487-4ff7-a447-82cec153136a\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-2rm29" Jan 20 17:19:28 crc kubenswrapper[4558]: I0120 17:19:28.363705 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nff5p\" (UniqueName: \"kubernetes.io/projected/743c3bad-0487-4ff7-a447-82cec153136a-kube-api-access-nff5p\") pod \"nova-cell0-conductor-db-sync-2rm29\" (UID: \"743c3bad-0487-4ff7-a447-82cec153136a\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-2rm29" Jan 20 17:19:28 crc kubenswrapper[4558]: I0120 17:19:28.363734 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/743c3bad-0487-4ff7-a447-82cec153136a-scripts\") pod \"nova-cell0-conductor-db-sync-2rm29\" (UID: \"743c3bad-0487-4ff7-a447-82cec153136a\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-2rm29" Jan 20 17:19:28 crc kubenswrapper[4558]: I0120 17:19:28.363807 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/743c3bad-0487-4ff7-a447-82cec153136a-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-2rm29\" (UID: \"743c3bad-0487-4ff7-a447-82cec153136a\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-2rm29" Jan 20 17:19:28 crc kubenswrapper[4558]: I0120 17:19:28.468451 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/743c3bad-0487-4ff7-a447-82cec153136a-config-data\") pod \"nova-cell0-conductor-db-sync-2rm29\" (UID: \"743c3bad-0487-4ff7-a447-82cec153136a\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-2rm29" Jan 20 17:19:28 crc kubenswrapper[4558]: I0120 17:19:28.468668 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nff5p\" (UniqueName: \"kubernetes.io/projected/743c3bad-0487-4ff7-a447-82cec153136a-kube-api-access-nff5p\") pod \"nova-cell0-conductor-db-sync-2rm29\" (UID: \"743c3bad-0487-4ff7-a447-82cec153136a\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-2rm29" Jan 20 17:19:28 crc kubenswrapper[4558]: I0120 17:19:28.468713 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/743c3bad-0487-4ff7-a447-82cec153136a-scripts\") pod \"nova-cell0-conductor-db-sync-2rm29\" (UID: \"743c3bad-0487-4ff7-a447-82cec153136a\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-2rm29" Jan 20 17:19:28 crc kubenswrapper[4558]: I0120 17:19:28.468832 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/743c3bad-0487-4ff7-a447-82cec153136a-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-2rm29\" (UID: \"743c3bad-0487-4ff7-a447-82cec153136a\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-2rm29" Jan 20 17:19:28 crc kubenswrapper[4558]: I0120 17:19:28.478031 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/743c3bad-0487-4ff7-a447-82cec153136a-scripts\") pod \"nova-cell0-conductor-db-sync-2rm29\" (UID: \"743c3bad-0487-4ff7-a447-82cec153136a\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-2rm29" Jan 20 17:19:28 crc kubenswrapper[4558]: I0120 17:19:28.478385 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/743c3bad-0487-4ff7-a447-82cec153136a-config-data\") pod \"nova-cell0-conductor-db-sync-2rm29\" (UID: \"743c3bad-0487-4ff7-a447-82cec153136a\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-2rm29" Jan 20 17:19:28 crc kubenswrapper[4558]: I0120 17:19:28.479081 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/743c3bad-0487-4ff7-a447-82cec153136a-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-2rm29\" (UID: \"743c3bad-0487-4ff7-a447-82cec153136a\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-2rm29" Jan 20 17:19:28 crc kubenswrapper[4558]: I0120 17:19:28.488899 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nff5p\" (UniqueName: \"kubernetes.io/projected/743c3bad-0487-4ff7-a447-82cec153136a-kube-api-access-nff5p\") pod \"nova-cell0-conductor-db-sync-2rm29\" (UID: \"743c3bad-0487-4ff7-a447-82cec153136a\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-2rm29" Jan 20 17:19:28 crc kubenswrapper[4558]: I0120 17:19:28.567328 4558 scope.go:117] "RemoveContainer" containerID="0ffce44366a8b4466427b682fb41640425366a0f4449210959148de9aef695c6" Jan 20 17:19:28 crc kubenswrapper[4558]: E0120 17:19:28.567657 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 17:19:28 crc kubenswrapper[4558]: I0120 17:19:28.784966 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-2rm29" Jan 20 17:19:28 crc kubenswrapper[4558]: I0120 17:19:28.860137 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/swift-proxy-85c8dfdd74-sjsz2" Jan 20 17:19:28 crc kubenswrapper[4558]: I0120 17:19:28.864092 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/swift-proxy-85c8dfdd74-sjsz2" Jan 20 17:19:29 crc kubenswrapper[4558]: I0120 17:19:29.268095 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-db-sync-2rm29"] Jan 20 17:19:29 crc kubenswrapper[4558]: I0120 17:19:29.917484 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-2rm29" event={"ID":"743c3bad-0487-4ff7-a447-82cec153136a","Type":"ContainerStarted","Data":"fca2ff1c6569ac1015ba889896d9030e224df70272a9e9b20388cfb82fa616ca"} Jan 20 17:19:29 crc kubenswrapper[4558]: I0120 17:19:29.917930 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-2rm29" event={"ID":"743c3bad-0487-4ff7-a447-82cec153136a","Type":"ContainerStarted","Data":"ef4f0b9880ea70adc5885ba48263aa5a3590f0d41a65838e42ba528c7fa5f971"} Jan 20 17:19:29 crc kubenswrapper[4558]: I0120 17:19:29.922179 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"ee678e41-8db4-4efd-995f-20521fff70ea","Type":"ContainerStarted","Data":"c609721cd0f7e7990b84215837c14634c6d28fc45f9e08800ec3d1ffd1655ac4"} Jan 20 17:19:29 crc kubenswrapper[4558]: I0120 17:19:29.922381 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="ee678e41-8db4-4efd-995f-20521fff70ea" containerName="ceilometer-central-agent" containerID="cri-o://d596ad3c068f61543bd30a635e39b820fdca5a47ea6a64733298f12c6dfb6d32" gracePeriod=30 Jan 20 17:19:29 crc kubenswrapper[4558]: I0120 17:19:29.922733 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:19:29 crc kubenswrapper[4558]: I0120 17:19:29.922811 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="ee678e41-8db4-4efd-995f-20521fff70ea" containerName="proxy-httpd" containerID="cri-o://c609721cd0f7e7990b84215837c14634c6d28fc45f9e08800ec3d1ffd1655ac4" gracePeriod=30 Jan 20 17:19:29 crc kubenswrapper[4558]: I0120 17:19:29.922882 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="ee678e41-8db4-4efd-995f-20521fff70ea" containerName="sg-core" containerID="cri-o://b6cfe71a0076941015d0577220c08851725d70b0a29e663b1c8b13a24fbc926a" gracePeriod=30 Jan 20 17:19:29 crc kubenswrapper[4558]: I0120 17:19:29.922938 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="ee678e41-8db4-4efd-995f-20521fff70ea" containerName="ceilometer-notification-agent" containerID="cri-o://749f4f02ed611a21a04fad4c9d72e8071779fe9c7f457f44841339d9e5513b35" gracePeriod=30 Jan 20 17:19:29 crc kubenswrapper[4558]: I0120 17:19:29.939737 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-2rm29" podStartSLOduration=1.9397203950000002 podStartE2EDuration="1.939720395s" podCreationTimestamp="2026-01-20 17:19:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:19:29.935130004 +0000 UTC m=+2263.695467970" watchObservedRunningTime="2026-01-20 17:19:29.939720395 +0000 UTC m=+2263.700058362" Jan 20 17:19:29 crc kubenswrapper[4558]: I0120 17:19:29.960692 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ceilometer-0" podStartSLOduration=2.097243818 podStartE2EDuration="5.960680721s" podCreationTimestamp="2026-01-20 17:19:24 +0000 UTC" firstStartedPulling="2026-01-20 17:19:24.879358823 +0000 UTC m=+2258.639696790" lastFinishedPulling="2026-01-20 17:19:28.742795737 +0000 UTC m=+2262.503133693" observedRunningTime="2026-01-20 17:19:29.952308217 +0000 UTC m=+2263.712646184" watchObservedRunningTime="2026-01-20 17:19:29.960680721 +0000 UTC m=+2263.721018688" Jan 20 17:19:30 crc kubenswrapper[4558]: I0120 17:19:30.609239 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:19:30 crc kubenswrapper[4558]: I0120 17:19:30.722015 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee678e41-8db4-4efd-995f-20521fff70ea-combined-ca-bundle\") pod \"ee678e41-8db4-4efd-995f-20521fff70ea\" (UID: \"ee678e41-8db4-4efd-995f-20521fff70ea\") " Jan 20 17:19:30 crc kubenswrapper[4558]: I0120 17:19:30.722112 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ee678e41-8db4-4efd-995f-20521fff70ea-config-data\") pod \"ee678e41-8db4-4efd-995f-20521fff70ea\" (UID: \"ee678e41-8db4-4efd-995f-20521fff70ea\") " Jan 20 17:19:30 crc kubenswrapper[4558]: I0120 17:19:30.722194 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wzhhz\" (UniqueName: \"kubernetes.io/projected/ee678e41-8db4-4efd-995f-20521fff70ea-kube-api-access-wzhhz\") pod \"ee678e41-8db4-4efd-995f-20521fff70ea\" (UID: \"ee678e41-8db4-4efd-995f-20521fff70ea\") " Jan 20 17:19:30 crc kubenswrapper[4558]: I0120 17:19:30.722414 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ee678e41-8db4-4efd-995f-20521fff70ea-sg-core-conf-yaml\") pod \"ee678e41-8db4-4efd-995f-20521fff70ea\" (UID: \"ee678e41-8db4-4efd-995f-20521fff70ea\") " Jan 20 17:19:30 crc kubenswrapper[4558]: I0120 17:19:30.723867 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ee678e41-8db4-4efd-995f-20521fff70ea-log-httpd\") pod \"ee678e41-8db4-4efd-995f-20521fff70ea\" (UID: \"ee678e41-8db4-4efd-995f-20521fff70ea\") " Jan 20 17:19:30 crc kubenswrapper[4558]: I0120 17:19:30.724001 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ee678e41-8db4-4efd-995f-20521fff70ea-run-httpd\") pod \"ee678e41-8db4-4efd-995f-20521fff70ea\" (UID: \"ee678e41-8db4-4efd-995f-20521fff70ea\") " Jan 20 17:19:30 crc kubenswrapper[4558]: I0120 17:19:30.724084 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ee678e41-8db4-4efd-995f-20521fff70ea-scripts\") pod \"ee678e41-8db4-4efd-995f-20521fff70ea\" (UID: \"ee678e41-8db4-4efd-995f-20521fff70ea\") " Jan 20 17:19:30 crc kubenswrapper[4558]: I0120 17:19:30.724494 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ee678e41-8db4-4efd-995f-20521fff70ea-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "ee678e41-8db4-4efd-995f-20521fff70ea" (UID: "ee678e41-8db4-4efd-995f-20521fff70ea"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:19:30 crc kubenswrapper[4558]: I0120 17:19:30.725536 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ee678e41-8db4-4efd-995f-20521fff70ea-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "ee678e41-8db4-4efd-995f-20521fff70ea" (UID: "ee678e41-8db4-4efd-995f-20521fff70ea"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:19:30 crc kubenswrapper[4558]: I0120 17:19:30.727283 4558 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ee678e41-8db4-4efd-995f-20521fff70ea-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:19:30 crc kubenswrapper[4558]: I0120 17:19:30.727317 4558 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ee678e41-8db4-4efd-995f-20521fff70ea-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:19:30 crc kubenswrapper[4558]: I0120 17:19:30.732316 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ee678e41-8db4-4efd-995f-20521fff70ea-kube-api-access-wzhhz" (OuterVolumeSpecName: "kube-api-access-wzhhz") pod "ee678e41-8db4-4efd-995f-20521fff70ea" (UID: "ee678e41-8db4-4efd-995f-20521fff70ea"). InnerVolumeSpecName "kube-api-access-wzhhz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:19:30 crc kubenswrapper[4558]: I0120 17:19:30.738425 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ee678e41-8db4-4efd-995f-20521fff70ea-scripts" (OuterVolumeSpecName: "scripts") pod "ee678e41-8db4-4efd-995f-20521fff70ea" (UID: "ee678e41-8db4-4efd-995f-20521fff70ea"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:19:30 crc kubenswrapper[4558]: I0120 17:19:30.772546 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ee678e41-8db4-4efd-995f-20521fff70ea-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "ee678e41-8db4-4efd-995f-20521fff70ea" (UID: "ee678e41-8db4-4efd-995f-20521fff70ea"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:19:30 crc kubenswrapper[4558]: I0120 17:19:30.797231 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ee678e41-8db4-4efd-995f-20521fff70ea-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ee678e41-8db4-4efd-995f-20521fff70ea" (UID: "ee678e41-8db4-4efd-995f-20521fff70ea"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:19:30 crc kubenswrapper[4558]: I0120 17:19:30.828906 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ee678e41-8db4-4efd-995f-20521fff70ea-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:19:30 crc kubenswrapper[4558]: I0120 17:19:30.829017 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wzhhz\" (UniqueName: \"kubernetes.io/projected/ee678e41-8db4-4efd-995f-20521fff70ea-kube-api-access-wzhhz\") on node \"crc\" DevicePath \"\"" Jan 20 17:19:30 crc kubenswrapper[4558]: I0120 17:19:30.829033 4558 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ee678e41-8db4-4efd-995f-20521fff70ea-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 20 17:19:30 crc kubenswrapper[4558]: I0120 17:19:30.829047 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ee678e41-8db4-4efd-995f-20521fff70ea-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:19:30 crc kubenswrapper[4558]: I0120 17:19:30.841241 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ee678e41-8db4-4efd-995f-20521fff70ea-config-data" (OuterVolumeSpecName: "config-data") pod "ee678e41-8db4-4efd-995f-20521fff70ea" (UID: "ee678e41-8db4-4efd-995f-20521fff70ea"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:19:30 crc kubenswrapper[4558]: I0120 17:19:30.932086 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ee678e41-8db4-4efd-995f-20521fff70ea-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:19:30 crc kubenswrapper[4558]: I0120 17:19:30.933453 4558 generic.go:334] "Generic (PLEG): container finished" podID="ee678e41-8db4-4efd-995f-20521fff70ea" containerID="c609721cd0f7e7990b84215837c14634c6d28fc45f9e08800ec3d1ffd1655ac4" exitCode=0 Jan 20 17:19:30 crc kubenswrapper[4558]: I0120 17:19:30.934231 4558 generic.go:334] "Generic (PLEG): container finished" podID="ee678e41-8db4-4efd-995f-20521fff70ea" containerID="b6cfe71a0076941015d0577220c08851725d70b0a29e663b1c8b13a24fbc926a" exitCode=2 Jan 20 17:19:30 crc kubenswrapper[4558]: I0120 17:19:30.934244 4558 generic.go:334] "Generic (PLEG): container finished" podID="ee678e41-8db4-4efd-995f-20521fff70ea" containerID="749f4f02ed611a21a04fad4c9d72e8071779fe9c7f457f44841339d9e5513b35" exitCode=0 Jan 20 17:19:30 crc kubenswrapper[4558]: I0120 17:19:30.934253 4558 generic.go:334] "Generic (PLEG): container finished" podID="ee678e41-8db4-4efd-995f-20521fff70ea" containerID="d596ad3c068f61543bd30a635e39b820fdca5a47ea6a64733298f12c6dfb6d32" exitCode=0 Jan 20 17:19:30 crc kubenswrapper[4558]: I0120 17:19:30.935030 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:19:30 crc kubenswrapper[4558]: I0120 17:19:30.935242 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"ee678e41-8db4-4efd-995f-20521fff70ea","Type":"ContainerDied","Data":"c609721cd0f7e7990b84215837c14634c6d28fc45f9e08800ec3d1ffd1655ac4"} Jan 20 17:19:30 crc kubenswrapper[4558]: I0120 17:19:30.935304 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"ee678e41-8db4-4efd-995f-20521fff70ea","Type":"ContainerDied","Data":"b6cfe71a0076941015d0577220c08851725d70b0a29e663b1c8b13a24fbc926a"} Jan 20 17:19:30 crc kubenswrapper[4558]: I0120 17:19:30.935320 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"ee678e41-8db4-4efd-995f-20521fff70ea","Type":"ContainerDied","Data":"749f4f02ed611a21a04fad4c9d72e8071779fe9c7f457f44841339d9e5513b35"} Jan 20 17:19:30 crc kubenswrapper[4558]: I0120 17:19:30.935331 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"ee678e41-8db4-4efd-995f-20521fff70ea","Type":"ContainerDied","Data":"d596ad3c068f61543bd30a635e39b820fdca5a47ea6a64733298f12c6dfb6d32"} Jan 20 17:19:30 crc kubenswrapper[4558]: I0120 17:19:30.935341 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"ee678e41-8db4-4efd-995f-20521fff70ea","Type":"ContainerDied","Data":"f9320d05323d892d0d5f93dc233c7faa5987742150a6fcea23c6c032bfa86d0f"} Jan 20 17:19:30 crc kubenswrapper[4558]: I0120 17:19:30.935362 4558 scope.go:117] "RemoveContainer" containerID="c609721cd0f7e7990b84215837c14634c6d28fc45f9e08800ec3d1ffd1655ac4" Jan 20 17:19:30 crc kubenswrapper[4558]: I0120 17:19:30.972062 4558 scope.go:117] "RemoveContainer" containerID="b6cfe71a0076941015d0577220c08851725d70b0a29e663b1c8b13a24fbc926a" Jan 20 17:19:30 crc kubenswrapper[4558]: I0120 17:19:30.972792 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:19:30 crc kubenswrapper[4558]: I0120 17:19:30.984630 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:19:31 crc kubenswrapper[4558]: I0120 17:19:31.000120 4558 scope.go:117] "RemoveContainer" containerID="749f4f02ed611a21a04fad4c9d72e8071779fe9c7f457f44841339d9e5513b35" Jan 20 17:19:31 crc kubenswrapper[4558]: I0120 17:19:31.002838 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:19:31 crc kubenswrapper[4558]: E0120 17:19:31.003774 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ee678e41-8db4-4efd-995f-20521fff70ea" containerName="sg-core" Jan 20 17:19:31 crc kubenswrapper[4558]: I0120 17:19:31.003797 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ee678e41-8db4-4efd-995f-20521fff70ea" containerName="sg-core" Jan 20 17:19:31 crc kubenswrapper[4558]: E0120 17:19:31.003817 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ee678e41-8db4-4efd-995f-20521fff70ea" containerName="ceilometer-notification-agent" Jan 20 17:19:31 crc kubenswrapper[4558]: I0120 17:19:31.003825 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ee678e41-8db4-4efd-995f-20521fff70ea" containerName="ceilometer-notification-agent" Jan 20 17:19:31 crc kubenswrapper[4558]: E0120 17:19:31.003851 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ee678e41-8db4-4efd-995f-20521fff70ea" containerName="ceilometer-central-agent" Jan 20 17:19:31 crc kubenswrapper[4558]: I0120 17:19:31.003858 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ee678e41-8db4-4efd-995f-20521fff70ea" containerName="ceilometer-central-agent" Jan 20 17:19:31 crc kubenswrapper[4558]: E0120 17:19:31.003871 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ee678e41-8db4-4efd-995f-20521fff70ea" containerName="proxy-httpd" Jan 20 17:19:31 crc kubenswrapper[4558]: I0120 17:19:31.003877 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ee678e41-8db4-4efd-995f-20521fff70ea" containerName="proxy-httpd" Jan 20 17:19:31 crc kubenswrapper[4558]: I0120 17:19:31.004037 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="ee678e41-8db4-4efd-995f-20521fff70ea" containerName="proxy-httpd" Jan 20 17:19:31 crc kubenswrapper[4558]: I0120 17:19:31.004053 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="ee678e41-8db4-4efd-995f-20521fff70ea" containerName="ceilometer-notification-agent" Jan 20 17:19:31 crc kubenswrapper[4558]: I0120 17:19:31.004062 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="ee678e41-8db4-4efd-995f-20521fff70ea" containerName="ceilometer-central-agent" Jan 20 17:19:31 crc kubenswrapper[4558]: I0120 17:19:31.004071 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="ee678e41-8db4-4efd-995f-20521fff70ea" containerName="sg-core" Jan 20 17:19:31 crc kubenswrapper[4558]: I0120 17:19:31.005741 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:19:31 crc kubenswrapper[4558]: I0120 17:19:31.007385 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-scripts" Jan 20 17:19:31 crc kubenswrapper[4558]: I0120 17:19:31.014351 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-config-data" Jan 20 17:19:31 crc kubenswrapper[4558]: I0120 17:19:31.014580 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:19:31 crc kubenswrapper[4558]: I0120 17:19:31.032016 4558 scope.go:117] "RemoveContainer" containerID="d596ad3c068f61543bd30a635e39b820fdca5a47ea6a64733298f12c6dfb6d32" Jan 20 17:19:31 crc kubenswrapper[4558]: I0120 17:19:31.033460 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/14e220fa-c2cf-40d1-b016-745fa9789bb8-log-httpd\") pod \"ceilometer-0\" (UID: \"14e220fa-c2cf-40d1-b016-745fa9789bb8\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:19:31 crc kubenswrapper[4558]: I0120 17:19:31.033533 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/14e220fa-c2cf-40d1-b016-745fa9789bb8-config-data\") pod \"ceilometer-0\" (UID: \"14e220fa-c2cf-40d1-b016-745fa9789bb8\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:19:31 crc kubenswrapper[4558]: I0120 17:19:31.033565 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/14e220fa-c2cf-40d1-b016-745fa9789bb8-run-httpd\") pod \"ceilometer-0\" (UID: \"14e220fa-c2cf-40d1-b016-745fa9789bb8\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:19:31 crc kubenswrapper[4558]: I0120 17:19:31.033601 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/14e220fa-c2cf-40d1-b016-745fa9789bb8-scripts\") pod \"ceilometer-0\" (UID: \"14e220fa-c2cf-40d1-b016-745fa9789bb8\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:19:31 crc kubenswrapper[4558]: I0120 17:19:31.033678 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/14e220fa-c2cf-40d1-b016-745fa9789bb8-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"14e220fa-c2cf-40d1-b016-745fa9789bb8\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:19:31 crc kubenswrapper[4558]: I0120 17:19:31.034000 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14e220fa-c2cf-40d1-b016-745fa9789bb8-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"14e220fa-c2cf-40d1-b016-745fa9789bb8\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:19:31 crc kubenswrapper[4558]: I0120 17:19:31.034044 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zqsps\" (UniqueName: \"kubernetes.io/projected/14e220fa-c2cf-40d1-b016-745fa9789bb8-kube-api-access-zqsps\") pod \"ceilometer-0\" (UID: \"14e220fa-c2cf-40d1-b016-745fa9789bb8\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:19:31 crc kubenswrapper[4558]: I0120 17:19:31.052280 4558 scope.go:117] "RemoveContainer" containerID="c609721cd0f7e7990b84215837c14634c6d28fc45f9e08800ec3d1ffd1655ac4" Jan 20 17:19:31 crc kubenswrapper[4558]: E0120 17:19:31.052756 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c609721cd0f7e7990b84215837c14634c6d28fc45f9e08800ec3d1ffd1655ac4\": container with ID starting with c609721cd0f7e7990b84215837c14634c6d28fc45f9e08800ec3d1ffd1655ac4 not found: ID does not exist" containerID="c609721cd0f7e7990b84215837c14634c6d28fc45f9e08800ec3d1ffd1655ac4" Jan 20 17:19:31 crc kubenswrapper[4558]: I0120 17:19:31.052795 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c609721cd0f7e7990b84215837c14634c6d28fc45f9e08800ec3d1ffd1655ac4"} err="failed to get container status \"c609721cd0f7e7990b84215837c14634c6d28fc45f9e08800ec3d1ffd1655ac4\": rpc error: code = NotFound desc = could not find container \"c609721cd0f7e7990b84215837c14634c6d28fc45f9e08800ec3d1ffd1655ac4\": container with ID starting with c609721cd0f7e7990b84215837c14634c6d28fc45f9e08800ec3d1ffd1655ac4 not found: ID does not exist" Jan 20 17:19:31 crc kubenswrapper[4558]: I0120 17:19:31.052826 4558 scope.go:117] "RemoveContainer" containerID="b6cfe71a0076941015d0577220c08851725d70b0a29e663b1c8b13a24fbc926a" Jan 20 17:19:31 crc kubenswrapper[4558]: E0120 17:19:31.053209 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b6cfe71a0076941015d0577220c08851725d70b0a29e663b1c8b13a24fbc926a\": container with ID starting with b6cfe71a0076941015d0577220c08851725d70b0a29e663b1c8b13a24fbc926a not found: ID does not exist" containerID="b6cfe71a0076941015d0577220c08851725d70b0a29e663b1c8b13a24fbc926a" Jan 20 17:19:31 crc kubenswrapper[4558]: I0120 17:19:31.053241 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b6cfe71a0076941015d0577220c08851725d70b0a29e663b1c8b13a24fbc926a"} err="failed to get container status \"b6cfe71a0076941015d0577220c08851725d70b0a29e663b1c8b13a24fbc926a\": rpc error: code = NotFound desc = could not find container \"b6cfe71a0076941015d0577220c08851725d70b0a29e663b1c8b13a24fbc926a\": container with ID starting with b6cfe71a0076941015d0577220c08851725d70b0a29e663b1c8b13a24fbc926a not found: ID does not exist" Jan 20 17:19:31 crc kubenswrapper[4558]: I0120 17:19:31.053265 4558 scope.go:117] "RemoveContainer" containerID="749f4f02ed611a21a04fad4c9d72e8071779fe9c7f457f44841339d9e5513b35" Jan 20 17:19:31 crc kubenswrapper[4558]: E0120 17:19:31.053565 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"749f4f02ed611a21a04fad4c9d72e8071779fe9c7f457f44841339d9e5513b35\": container with ID starting with 749f4f02ed611a21a04fad4c9d72e8071779fe9c7f457f44841339d9e5513b35 not found: ID does not exist" containerID="749f4f02ed611a21a04fad4c9d72e8071779fe9c7f457f44841339d9e5513b35" Jan 20 17:19:31 crc kubenswrapper[4558]: I0120 17:19:31.053602 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"749f4f02ed611a21a04fad4c9d72e8071779fe9c7f457f44841339d9e5513b35"} err="failed to get container status \"749f4f02ed611a21a04fad4c9d72e8071779fe9c7f457f44841339d9e5513b35\": rpc error: code = NotFound desc = could not find container \"749f4f02ed611a21a04fad4c9d72e8071779fe9c7f457f44841339d9e5513b35\": container with ID starting with 749f4f02ed611a21a04fad4c9d72e8071779fe9c7f457f44841339d9e5513b35 not found: ID does not exist" Jan 20 17:19:31 crc kubenswrapper[4558]: I0120 17:19:31.053632 4558 scope.go:117] "RemoveContainer" containerID="d596ad3c068f61543bd30a635e39b820fdca5a47ea6a64733298f12c6dfb6d32" Jan 20 17:19:31 crc kubenswrapper[4558]: E0120 17:19:31.053996 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d596ad3c068f61543bd30a635e39b820fdca5a47ea6a64733298f12c6dfb6d32\": container with ID starting with d596ad3c068f61543bd30a635e39b820fdca5a47ea6a64733298f12c6dfb6d32 not found: ID does not exist" containerID="d596ad3c068f61543bd30a635e39b820fdca5a47ea6a64733298f12c6dfb6d32" Jan 20 17:19:31 crc kubenswrapper[4558]: I0120 17:19:31.054020 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d596ad3c068f61543bd30a635e39b820fdca5a47ea6a64733298f12c6dfb6d32"} err="failed to get container status \"d596ad3c068f61543bd30a635e39b820fdca5a47ea6a64733298f12c6dfb6d32\": rpc error: code = NotFound desc = could not find container \"d596ad3c068f61543bd30a635e39b820fdca5a47ea6a64733298f12c6dfb6d32\": container with ID starting with d596ad3c068f61543bd30a635e39b820fdca5a47ea6a64733298f12c6dfb6d32 not found: ID does not exist" Jan 20 17:19:31 crc kubenswrapper[4558]: I0120 17:19:31.054035 4558 scope.go:117] "RemoveContainer" containerID="c609721cd0f7e7990b84215837c14634c6d28fc45f9e08800ec3d1ffd1655ac4" Jan 20 17:19:31 crc kubenswrapper[4558]: I0120 17:19:31.054287 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c609721cd0f7e7990b84215837c14634c6d28fc45f9e08800ec3d1ffd1655ac4"} err="failed to get container status \"c609721cd0f7e7990b84215837c14634c6d28fc45f9e08800ec3d1ffd1655ac4\": rpc error: code = NotFound desc = could not find container \"c609721cd0f7e7990b84215837c14634c6d28fc45f9e08800ec3d1ffd1655ac4\": container with ID starting with c609721cd0f7e7990b84215837c14634c6d28fc45f9e08800ec3d1ffd1655ac4 not found: ID does not exist" Jan 20 17:19:31 crc kubenswrapper[4558]: I0120 17:19:31.054312 4558 scope.go:117] "RemoveContainer" containerID="b6cfe71a0076941015d0577220c08851725d70b0a29e663b1c8b13a24fbc926a" Jan 20 17:19:31 crc kubenswrapper[4558]: I0120 17:19:31.054567 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b6cfe71a0076941015d0577220c08851725d70b0a29e663b1c8b13a24fbc926a"} err="failed to get container status \"b6cfe71a0076941015d0577220c08851725d70b0a29e663b1c8b13a24fbc926a\": rpc error: code = NotFound desc = could not find container \"b6cfe71a0076941015d0577220c08851725d70b0a29e663b1c8b13a24fbc926a\": container with ID starting with b6cfe71a0076941015d0577220c08851725d70b0a29e663b1c8b13a24fbc926a not found: ID does not exist" Jan 20 17:19:31 crc kubenswrapper[4558]: I0120 17:19:31.054591 4558 scope.go:117] "RemoveContainer" containerID="749f4f02ed611a21a04fad4c9d72e8071779fe9c7f457f44841339d9e5513b35" Jan 20 17:19:31 crc kubenswrapper[4558]: I0120 17:19:31.054873 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"749f4f02ed611a21a04fad4c9d72e8071779fe9c7f457f44841339d9e5513b35"} err="failed to get container status \"749f4f02ed611a21a04fad4c9d72e8071779fe9c7f457f44841339d9e5513b35\": rpc error: code = NotFound desc = could not find container \"749f4f02ed611a21a04fad4c9d72e8071779fe9c7f457f44841339d9e5513b35\": container with ID starting with 749f4f02ed611a21a04fad4c9d72e8071779fe9c7f457f44841339d9e5513b35 not found: ID does not exist" Jan 20 17:19:31 crc kubenswrapper[4558]: I0120 17:19:31.054897 4558 scope.go:117] "RemoveContainer" containerID="d596ad3c068f61543bd30a635e39b820fdca5a47ea6a64733298f12c6dfb6d32" Jan 20 17:19:31 crc kubenswrapper[4558]: I0120 17:19:31.055085 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d596ad3c068f61543bd30a635e39b820fdca5a47ea6a64733298f12c6dfb6d32"} err="failed to get container status \"d596ad3c068f61543bd30a635e39b820fdca5a47ea6a64733298f12c6dfb6d32\": rpc error: code = NotFound desc = could not find container \"d596ad3c068f61543bd30a635e39b820fdca5a47ea6a64733298f12c6dfb6d32\": container with ID starting with d596ad3c068f61543bd30a635e39b820fdca5a47ea6a64733298f12c6dfb6d32 not found: ID does not exist" Jan 20 17:19:31 crc kubenswrapper[4558]: I0120 17:19:31.055107 4558 scope.go:117] "RemoveContainer" containerID="c609721cd0f7e7990b84215837c14634c6d28fc45f9e08800ec3d1ffd1655ac4" Jan 20 17:19:31 crc kubenswrapper[4558]: I0120 17:19:31.055403 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c609721cd0f7e7990b84215837c14634c6d28fc45f9e08800ec3d1ffd1655ac4"} err="failed to get container status \"c609721cd0f7e7990b84215837c14634c6d28fc45f9e08800ec3d1ffd1655ac4\": rpc error: code = NotFound desc = could not find container \"c609721cd0f7e7990b84215837c14634c6d28fc45f9e08800ec3d1ffd1655ac4\": container with ID starting with c609721cd0f7e7990b84215837c14634c6d28fc45f9e08800ec3d1ffd1655ac4 not found: ID does not exist" Jan 20 17:19:31 crc kubenswrapper[4558]: I0120 17:19:31.055425 4558 scope.go:117] "RemoveContainer" containerID="b6cfe71a0076941015d0577220c08851725d70b0a29e663b1c8b13a24fbc926a" Jan 20 17:19:31 crc kubenswrapper[4558]: I0120 17:19:31.056026 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b6cfe71a0076941015d0577220c08851725d70b0a29e663b1c8b13a24fbc926a"} err="failed to get container status \"b6cfe71a0076941015d0577220c08851725d70b0a29e663b1c8b13a24fbc926a\": rpc error: code = NotFound desc = could not find container \"b6cfe71a0076941015d0577220c08851725d70b0a29e663b1c8b13a24fbc926a\": container with ID starting with b6cfe71a0076941015d0577220c08851725d70b0a29e663b1c8b13a24fbc926a not found: ID does not exist" Jan 20 17:19:31 crc kubenswrapper[4558]: I0120 17:19:31.056061 4558 scope.go:117] "RemoveContainer" containerID="749f4f02ed611a21a04fad4c9d72e8071779fe9c7f457f44841339d9e5513b35" Jan 20 17:19:31 crc kubenswrapper[4558]: I0120 17:19:31.056449 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"749f4f02ed611a21a04fad4c9d72e8071779fe9c7f457f44841339d9e5513b35"} err="failed to get container status \"749f4f02ed611a21a04fad4c9d72e8071779fe9c7f457f44841339d9e5513b35\": rpc error: code = NotFound desc = could not find container \"749f4f02ed611a21a04fad4c9d72e8071779fe9c7f457f44841339d9e5513b35\": container with ID starting with 749f4f02ed611a21a04fad4c9d72e8071779fe9c7f457f44841339d9e5513b35 not found: ID does not exist" Jan 20 17:19:31 crc kubenswrapper[4558]: I0120 17:19:31.056475 4558 scope.go:117] "RemoveContainer" containerID="d596ad3c068f61543bd30a635e39b820fdca5a47ea6a64733298f12c6dfb6d32" Jan 20 17:19:31 crc kubenswrapper[4558]: I0120 17:19:31.056835 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d596ad3c068f61543bd30a635e39b820fdca5a47ea6a64733298f12c6dfb6d32"} err="failed to get container status \"d596ad3c068f61543bd30a635e39b820fdca5a47ea6a64733298f12c6dfb6d32\": rpc error: code = NotFound desc = could not find container \"d596ad3c068f61543bd30a635e39b820fdca5a47ea6a64733298f12c6dfb6d32\": container with ID starting with d596ad3c068f61543bd30a635e39b820fdca5a47ea6a64733298f12c6dfb6d32 not found: ID does not exist" Jan 20 17:19:31 crc kubenswrapper[4558]: I0120 17:19:31.056857 4558 scope.go:117] "RemoveContainer" containerID="c609721cd0f7e7990b84215837c14634c6d28fc45f9e08800ec3d1ffd1655ac4" Jan 20 17:19:31 crc kubenswrapper[4558]: I0120 17:19:31.057082 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c609721cd0f7e7990b84215837c14634c6d28fc45f9e08800ec3d1ffd1655ac4"} err="failed to get container status \"c609721cd0f7e7990b84215837c14634c6d28fc45f9e08800ec3d1ffd1655ac4\": rpc error: code = NotFound desc = could not find container \"c609721cd0f7e7990b84215837c14634c6d28fc45f9e08800ec3d1ffd1655ac4\": container with ID starting with c609721cd0f7e7990b84215837c14634c6d28fc45f9e08800ec3d1ffd1655ac4 not found: ID does not exist" Jan 20 17:19:31 crc kubenswrapper[4558]: I0120 17:19:31.057103 4558 scope.go:117] "RemoveContainer" containerID="b6cfe71a0076941015d0577220c08851725d70b0a29e663b1c8b13a24fbc926a" Jan 20 17:19:31 crc kubenswrapper[4558]: I0120 17:19:31.057380 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b6cfe71a0076941015d0577220c08851725d70b0a29e663b1c8b13a24fbc926a"} err="failed to get container status \"b6cfe71a0076941015d0577220c08851725d70b0a29e663b1c8b13a24fbc926a\": rpc error: code = NotFound desc = could not find container \"b6cfe71a0076941015d0577220c08851725d70b0a29e663b1c8b13a24fbc926a\": container with ID starting with b6cfe71a0076941015d0577220c08851725d70b0a29e663b1c8b13a24fbc926a not found: ID does not exist" Jan 20 17:19:31 crc kubenswrapper[4558]: I0120 17:19:31.057402 4558 scope.go:117] "RemoveContainer" containerID="749f4f02ed611a21a04fad4c9d72e8071779fe9c7f457f44841339d9e5513b35" Jan 20 17:19:31 crc kubenswrapper[4558]: I0120 17:19:31.057583 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"749f4f02ed611a21a04fad4c9d72e8071779fe9c7f457f44841339d9e5513b35"} err="failed to get container status \"749f4f02ed611a21a04fad4c9d72e8071779fe9c7f457f44841339d9e5513b35\": rpc error: code = NotFound desc = could not find container \"749f4f02ed611a21a04fad4c9d72e8071779fe9c7f457f44841339d9e5513b35\": container with ID starting with 749f4f02ed611a21a04fad4c9d72e8071779fe9c7f457f44841339d9e5513b35 not found: ID does not exist" Jan 20 17:19:31 crc kubenswrapper[4558]: I0120 17:19:31.057602 4558 scope.go:117] "RemoveContainer" containerID="d596ad3c068f61543bd30a635e39b820fdca5a47ea6a64733298f12c6dfb6d32" Jan 20 17:19:31 crc kubenswrapper[4558]: I0120 17:19:31.058451 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d596ad3c068f61543bd30a635e39b820fdca5a47ea6a64733298f12c6dfb6d32"} err="failed to get container status \"d596ad3c068f61543bd30a635e39b820fdca5a47ea6a64733298f12c6dfb6d32\": rpc error: code = NotFound desc = could not find container \"d596ad3c068f61543bd30a635e39b820fdca5a47ea6a64733298f12c6dfb6d32\": container with ID starting with d596ad3c068f61543bd30a635e39b820fdca5a47ea6a64733298f12c6dfb6d32 not found: ID does not exist" Jan 20 17:19:31 crc kubenswrapper[4558]: I0120 17:19:31.135628 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/14e220fa-c2cf-40d1-b016-745fa9789bb8-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"14e220fa-c2cf-40d1-b016-745fa9789bb8\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:19:31 crc kubenswrapper[4558]: I0120 17:19:31.135695 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14e220fa-c2cf-40d1-b016-745fa9789bb8-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"14e220fa-c2cf-40d1-b016-745fa9789bb8\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:19:31 crc kubenswrapper[4558]: I0120 17:19:31.135738 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zqsps\" (UniqueName: \"kubernetes.io/projected/14e220fa-c2cf-40d1-b016-745fa9789bb8-kube-api-access-zqsps\") pod \"ceilometer-0\" (UID: \"14e220fa-c2cf-40d1-b016-745fa9789bb8\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:19:31 crc kubenswrapper[4558]: I0120 17:19:31.135787 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/14e220fa-c2cf-40d1-b016-745fa9789bb8-log-httpd\") pod \"ceilometer-0\" (UID: \"14e220fa-c2cf-40d1-b016-745fa9789bb8\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:19:31 crc kubenswrapper[4558]: I0120 17:19:31.135836 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/14e220fa-c2cf-40d1-b016-745fa9789bb8-config-data\") pod \"ceilometer-0\" (UID: \"14e220fa-c2cf-40d1-b016-745fa9789bb8\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:19:31 crc kubenswrapper[4558]: I0120 17:19:31.135870 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/14e220fa-c2cf-40d1-b016-745fa9789bb8-run-httpd\") pod \"ceilometer-0\" (UID: \"14e220fa-c2cf-40d1-b016-745fa9789bb8\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:19:31 crc kubenswrapper[4558]: I0120 17:19:31.135898 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/14e220fa-c2cf-40d1-b016-745fa9789bb8-scripts\") pod \"ceilometer-0\" (UID: \"14e220fa-c2cf-40d1-b016-745fa9789bb8\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:19:31 crc kubenswrapper[4558]: I0120 17:19:31.136485 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/14e220fa-c2cf-40d1-b016-745fa9789bb8-run-httpd\") pod \"ceilometer-0\" (UID: \"14e220fa-c2cf-40d1-b016-745fa9789bb8\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:19:31 crc kubenswrapper[4558]: I0120 17:19:31.136803 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/14e220fa-c2cf-40d1-b016-745fa9789bb8-log-httpd\") pod \"ceilometer-0\" (UID: \"14e220fa-c2cf-40d1-b016-745fa9789bb8\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:19:31 crc kubenswrapper[4558]: I0120 17:19:31.141386 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/14e220fa-c2cf-40d1-b016-745fa9789bb8-scripts\") pod \"ceilometer-0\" (UID: \"14e220fa-c2cf-40d1-b016-745fa9789bb8\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:19:31 crc kubenswrapper[4558]: I0120 17:19:31.141467 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14e220fa-c2cf-40d1-b016-745fa9789bb8-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"14e220fa-c2cf-40d1-b016-745fa9789bb8\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:19:31 crc kubenswrapper[4558]: I0120 17:19:31.144665 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/14e220fa-c2cf-40d1-b016-745fa9789bb8-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"14e220fa-c2cf-40d1-b016-745fa9789bb8\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:19:31 crc kubenswrapper[4558]: I0120 17:19:31.145128 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/14e220fa-c2cf-40d1-b016-745fa9789bb8-config-data\") pod \"ceilometer-0\" (UID: \"14e220fa-c2cf-40d1-b016-745fa9789bb8\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:19:31 crc kubenswrapper[4558]: I0120 17:19:31.154711 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zqsps\" (UniqueName: \"kubernetes.io/projected/14e220fa-c2cf-40d1-b016-745fa9789bb8-kube-api-access-zqsps\") pod \"ceilometer-0\" (UID: \"14e220fa-c2cf-40d1-b016-745fa9789bb8\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:19:31 crc kubenswrapper[4558]: I0120 17:19:31.273290 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:19:31 crc kubenswrapper[4558]: I0120 17:19:31.273543 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-internal-api-0" podUID="4f358da8-45ff-43a3-a671-05c81bb940a8" containerName="glance-log" containerID="cri-o://4717ec95ba7dc2a4dd5cf5832311139309bda02a9c8b0d18d06bbddbc1b283bb" gracePeriod=30 Jan 20 17:19:31 crc kubenswrapper[4558]: I0120 17:19:31.273618 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-internal-api-0" podUID="4f358da8-45ff-43a3-a671-05c81bb940a8" containerName="glance-httpd" containerID="cri-o://8d2b5cee7d58d2eb3a0c47e5dd22befe62dc6fe6e9a175f8454c9dd6bdae69b1" gracePeriod=30 Jan 20 17:19:31 crc kubenswrapper[4558]: I0120 17:19:31.335308 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:19:31 crc kubenswrapper[4558]: I0120 17:19:31.861789 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:19:31 crc kubenswrapper[4558]: W0120 17:19:31.865960 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod14e220fa_c2cf_40d1_b016_745fa9789bb8.slice/crio-de944a213d08dea859e4b6c8ae4720016e420fbeb25ab4e4e6af500ce9e9fa25 WatchSource:0}: Error finding container de944a213d08dea859e4b6c8ae4720016e420fbeb25ab4e4e6af500ce9e9fa25: Status 404 returned error can't find the container with id de944a213d08dea859e4b6c8ae4720016e420fbeb25ab4e4e6af500ce9e9fa25 Jan 20 17:19:31 crc kubenswrapper[4558]: I0120 17:19:31.945181 4558 generic.go:334] "Generic (PLEG): container finished" podID="4f358da8-45ff-43a3-a671-05c81bb940a8" containerID="4717ec95ba7dc2a4dd5cf5832311139309bda02a9c8b0d18d06bbddbc1b283bb" exitCode=143 Jan 20 17:19:31 crc kubenswrapper[4558]: I0120 17:19:31.945254 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"4f358da8-45ff-43a3-a671-05c81bb940a8","Type":"ContainerDied","Data":"4717ec95ba7dc2a4dd5cf5832311139309bda02a9c8b0d18d06bbddbc1b283bb"} Jan 20 17:19:31 crc kubenswrapper[4558]: I0120 17:19:31.949231 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"14e220fa-c2cf-40d1-b016-745fa9789bb8","Type":"ContainerStarted","Data":"de944a213d08dea859e4b6c8ae4720016e420fbeb25ab4e4e6af500ce9e9fa25"} Jan 20 17:19:32 crc kubenswrapper[4558]: I0120 17:19:32.325327 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:19:32 crc kubenswrapper[4558]: I0120 17:19:32.583139 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ee678e41-8db4-4efd-995f-20521fff70ea" path="/var/lib/kubelet/pods/ee678e41-8db4-4efd-995f-20521fff70ea/volumes" Jan 20 17:19:32 crc kubenswrapper[4558]: I0120 17:19:32.964988 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"14e220fa-c2cf-40d1-b016-745fa9789bb8","Type":"ContainerStarted","Data":"0dce20dbfe3a972f0fe47fca1883db6999c23e623e3091d1ceb24d004670b189"} Jan 20 17:19:33 crc kubenswrapper[4558]: I0120 17:19:33.978116 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"14e220fa-c2cf-40d1-b016-745fa9789bb8","Type":"ContainerStarted","Data":"ec5415b6b921e276981646446295fef3794235a9a2ad227f22d709452690a49e"} Jan 20 17:19:34 crc kubenswrapper[4558]: I0120 17:19:34.675985 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:19:34 crc kubenswrapper[4558]: I0120 17:19:34.676441 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-external-api-0" podUID="18c8f0a6-157c-42f1-81b2-6bccecdcf626" containerName="glance-log" containerID="cri-o://1cbcd65376eed320dc67eb5d4e26e980c1e4099da6b8e76656548d1fdcc0484d" gracePeriod=30 Jan 20 17:19:34 crc kubenswrapper[4558]: I0120 17:19:34.676506 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-external-api-0" podUID="18c8f0a6-157c-42f1-81b2-6bccecdcf626" containerName="glance-httpd" containerID="cri-o://9dae4a94a0dceb1b3d4ff1d0035c4b59f730610c281890e1dedf27394f9fa41c" gracePeriod=30 Jan 20 17:19:34 crc kubenswrapper[4558]: I0120 17:19:34.988076 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"14e220fa-c2cf-40d1-b016-745fa9789bb8","Type":"ContainerStarted","Data":"4a6dda18259fa409b66e1258f77611034bd2c9dedbd9d0b83a2b9ea4bf10a811"} Jan 20 17:19:34 crc kubenswrapper[4558]: I0120 17:19:34.990389 4558 generic.go:334] "Generic (PLEG): container finished" podID="18c8f0a6-157c-42f1-81b2-6bccecdcf626" containerID="1cbcd65376eed320dc67eb5d4e26e980c1e4099da6b8e76656548d1fdcc0484d" exitCode=143 Jan 20 17:19:34 crc kubenswrapper[4558]: I0120 17:19:34.990421 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"18c8f0a6-157c-42f1-81b2-6bccecdcf626","Type":"ContainerDied","Data":"1cbcd65376eed320dc67eb5d4e26e980c1e4099da6b8e76656548d1fdcc0484d"} Jan 20 17:19:35 crc kubenswrapper[4558]: I0120 17:19:35.697808 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/glance-default-internal-api-0" podUID="4f358da8-45ff-43a3-a671-05c81bb940a8" containerName="glance-httpd" probeResult="failure" output="Get \"https://10.217.0.162:9292/healthcheck\": read tcp 10.217.0.2:42108->10.217.0.162:9292: read: connection reset by peer" Jan 20 17:19:35 crc kubenswrapper[4558]: I0120 17:19:35.697809 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/glance-default-internal-api-0" podUID="4f358da8-45ff-43a3-a671-05c81bb940a8" containerName="glance-log" probeResult="failure" output="Get \"https://10.217.0.162:9292/healthcheck\": read tcp 10.217.0.2:42114->10.217.0.162:9292: read: connection reset by peer" Jan 20 17:19:36 crc kubenswrapper[4558]: I0120 17:19:36.013959 4558 generic.go:334] "Generic (PLEG): container finished" podID="4f358da8-45ff-43a3-a671-05c81bb940a8" containerID="8d2b5cee7d58d2eb3a0c47e5dd22befe62dc6fe6e9a175f8454c9dd6bdae69b1" exitCode=0 Jan 20 17:19:36 crc kubenswrapper[4558]: I0120 17:19:36.014006 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"4f358da8-45ff-43a3-a671-05c81bb940a8","Type":"ContainerDied","Data":"8d2b5cee7d58d2eb3a0c47e5dd22befe62dc6fe6e9a175f8454c9dd6bdae69b1"} Jan 20 17:19:36 crc kubenswrapper[4558]: I0120 17:19:36.293333 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:19:36 crc kubenswrapper[4558]: I0120 17:19:36.362375 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t9r5v\" (UniqueName: \"kubernetes.io/projected/4f358da8-45ff-43a3-a671-05c81bb940a8-kube-api-access-t9r5v\") pod \"4f358da8-45ff-43a3-a671-05c81bb940a8\" (UID: \"4f358da8-45ff-43a3-a671-05c81bb940a8\") " Jan 20 17:19:36 crc kubenswrapper[4558]: I0120 17:19:36.362435 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f358da8-45ff-43a3-a671-05c81bb940a8-combined-ca-bundle\") pod \"4f358da8-45ff-43a3-a671-05c81bb940a8\" (UID: \"4f358da8-45ff-43a3-a671-05c81bb940a8\") " Jan 20 17:19:36 crc kubenswrapper[4558]: I0120 17:19:36.362507 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4f358da8-45ff-43a3-a671-05c81bb940a8-httpd-run\") pod \"4f358da8-45ff-43a3-a671-05c81bb940a8\" (UID: \"4f358da8-45ff-43a3-a671-05c81bb940a8\") " Jan 20 17:19:36 crc kubenswrapper[4558]: I0120 17:19:36.362579 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4f358da8-45ff-43a3-a671-05c81bb940a8-scripts\") pod \"4f358da8-45ff-43a3-a671-05c81bb940a8\" (UID: \"4f358da8-45ff-43a3-a671-05c81bb940a8\") " Jan 20 17:19:36 crc kubenswrapper[4558]: I0120 17:19:36.362675 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage13-crc\") pod \"4f358da8-45ff-43a3-a671-05c81bb940a8\" (UID: \"4f358da8-45ff-43a3-a671-05c81bb940a8\") " Jan 20 17:19:36 crc kubenswrapper[4558]: I0120 17:19:36.362717 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4f358da8-45ff-43a3-a671-05c81bb940a8-logs\") pod \"4f358da8-45ff-43a3-a671-05c81bb940a8\" (UID: \"4f358da8-45ff-43a3-a671-05c81bb940a8\") " Jan 20 17:19:36 crc kubenswrapper[4558]: I0120 17:19:36.362776 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4f358da8-45ff-43a3-a671-05c81bb940a8-internal-tls-certs\") pod \"4f358da8-45ff-43a3-a671-05c81bb940a8\" (UID: \"4f358da8-45ff-43a3-a671-05c81bb940a8\") " Jan 20 17:19:36 crc kubenswrapper[4558]: I0120 17:19:36.362803 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4f358da8-45ff-43a3-a671-05c81bb940a8-config-data\") pod \"4f358da8-45ff-43a3-a671-05c81bb940a8\" (UID: \"4f358da8-45ff-43a3-a671-05c81bb940a8\") " Jan 20 17:19:36 crc kubenswrapper[4558]: I0120 17:19:36.363083 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4f358da8-45ff-43a3-a671-05c81bb940a8-logs" (OuterVolumeSpecName: "logs") pod "4f358da8-45ff-43a3-a671-05c81bb940a8" (UID: "4f358da8-45ff-43a3-a671-05c81bb940a8"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:19:36 crc kubenswrapper[4558]: I0120 17:19:36.363654 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4f358da8-45ff-43a3-a671-05c81bb940a8-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:19:36 crc kubenswrapper[4558]: I0120 17:19:36.363819 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4f358da8-45ff-43a3-a671-05c81bb940a8-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "4f358da8-45ff-43a3-a671-05c81bb940a8" (UID: "4f358da8-45ff-43a3-a671-05c81bb940a8"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:19:36 crc kubenswrapper[4558]: I0120 17:19:36.369979 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4f358da8-45ff-43a3-a671-05c81bb940a8-kube-api-access-t9r5v" (OuterVolumeSpecName: "kube-api-access-t9r5v") pod "4f358da8-45ff-43a3-a671-05c81bb940a8" (UID: "4f358da8-45ff-43a3-a671-05c81bb940a8"). InnerVolumeSpecName "kube-api-access-t9r5v". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:19:36 crc kubenswrapper[4558]: I0120 17:19:36.379583 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage13-crc" (OuterVolumeSpecName: "glance") pod "4f358da8-45ff-43a3-a671-05c81bb940a8" (UID: "4f358da8-45ff-43a3-a671-05c81bb940a8"). InnerVolumeSpecName "local-storage13-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:19:36 crc kubenswrapper[4558]: I0120 17:19:36.381056 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4f358da8-45ff-43a3-a671-05c81bb940a8-scripts" (OuterVolumeSpecName: "scripts") pod "4f358da8-45ff-43a3-a671-05c81bb940a8" (UID: "4f358da8-45ff-43a3-a671-05c81bb940a8"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:19:36 crc kubenswrapper[4558]: I0120 17:19:36.402953 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4f358da8-45ff-43a3-a671-05c81bb940a8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4f358da8-45ff-43a3-a671-05c81bb940a8" (UID: "4f358da8-45ff-43a3-a671-05c81bb940a8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:19:36 crc kubenswrapper[4558]: I0120 17:19:36.423343 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4f358da8-45ff-43a3-a671-05c81bb940a8-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "4f358da8-45ff-43a3-a671-05c81bb940a8" (UID: "4f358da8-45ff-43a3-a671-05c81bb940a8"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:19:36 crc kubenswrapper[4558]: I0120 17:19:36.441495 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4f358da8-45ff-43a3-a671-05c81bb940a8-config-data" (OuterVolumeSpecName: "config-data") pod "4f358da8-45ff-43a3-a671-05c81bb940a8" (UID: "4f358da8-45ff-43a3-a671-05c81bb940a8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:19:36 crc kubenswrapper[4558]: I0120 17:19:36.465901 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage13-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage13-crc\") on node \"crc\" " Jan 20 17:19:36 crc kubenswrapper[4558]: I0120 17:19:36.465948 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4f358da8-45ff-43a3-a671-05c81bb940a8-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:19:36 crc kubenswrapper[4558]: I0120 17:19:36.465960 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4f358da8-45ff-43a3-a671-05c81bb940a8-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:19:36 crc kubenswrapper[4558]: I0120 17:19:36.465972 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t9r5v\" (UniqueName: \"kubernetes.io/projected/4f358da8-45ff-43a3-a671-05c81bb940a8-kube-api-access-t9r5v\") on node \"crc\" DevicePath \"\"" Jan 20 17:19:36 crc kubenswrapper[4558]: I0120 17:19:36.465982 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f358da8-45ff-43a3-a671-05c81bb940a8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:19:36 crc kubenswrapper[4558]: I0120 17:19:36.465991 4558 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4f358da8-45ff-43a3-a671-05c81bb940a8-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 20 17:19:36 crc kubenswrapper[4558]: I0120 17:19:36.465999 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4f358da8-45ff-43a3-a671-05c81bb940a8-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:19:36 crc kubenswrapper[4558]: I0120 17:19:36.484469 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage13-crc" (UniqueName: "kubernetes.io/local-volume/local-storage13-crc") on node "crc" Jan 20 17:19:36 crc kubenswrapper[4558]: I0120 17:19:36.567839 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage13-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage13-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:19:37 crc kubenswrapper[4558]: I0120 17:19:37.027439 4558 generic.go:334] "Generic (PLEG): container finished" podID="743c3bad-0487-4ff7-a447-82cec153136a" containerID="fca2ff1c6569ac1015ba889896d9030e224df70272a9e9b20388cfb82fa616ca" exitCode=0 Jan 20 17:19:37 crc kubenswrapper[4558]: I0120 17:19:37.027517 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-2rm29" event={"ID":"743c3bad-0487-4ff7-a447-82cec153136a","Type":"ContainerDied","Data":"fca2ff1c6569ac1015ba889896d9030e224df70272a9e9b20388cfb82fa616ca"} Jan 20 17:19:37 crc kubenswrapper[4558]: I0120 17:19:37.038420 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"14e220fa-c2cf-40d1-b016-745fa9789bb8","Type":"ContainerStarted","Data":"df12d41982db133a6ef8674d7ad124b0b6d567e3ba158b3289c23642e5c730c7"} Jan 20 17:19:37 crc kubenswrapper[4558]: I0120 17:19:37.038617 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="14e220fa-c2cf-40d1-b016-745fa9789bb8" containerName="ceilometer-central-agent" containerID="cri-o://0dce20dbfe3a972f0fe47fca1883db6999c23e623e3091d1ceb24d004670b189" gracePeriod=30 Jan 20 17:19:37 crc kubenswrapper[4558]: I0120 17:19:37.038766 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:19:37 crc kubenswrapper[4558]: I0120 17:19:37.038833 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="14e220fa-c2cf-40d1-b016-745fa9789bb8" containerName="proxy-httpd" containerID="cri-o://df12d41982db133a6ef8674d7ad124b0b6d567e3ba158b3289c23642e5c730c7" gracePeriod=30 Jan 20 17:19:37 crc kubenswrapper[4558]: I0120 17:19:37.038877 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="14e220fa-c2cf-40d1-b016-745fa9789bb8" containerName="sg-core" containerID="cri-o://4a6dda18259fa409b66e1258f77611034bd2c9dedbd9d0b83a2b9ea4bf10a811" gracePeriod=30 Jan 20 17:19:37 crc kubenswrapper[4558]: I0120 17:19:37.038927 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="14e220fa-c2cf-40d1-b016-745fa9789bb8" containerName="ceilometer-notification-agent" containerID="cri-o://ec5415b6b921e276981646446295fef3794235a9a2ad227f22d709452690a49e" gracePeriod=30 Jan 20 17:19:37 crc kubenswrapper[4558]: I0120 17:19:37.060381 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"4f358da8-45ff-43a3-a671-05c81bb940a8","Type":"ContainerDied","Data":"b23bae312bc4e60f787d8847aad548910d03b315006299b5cc3696ec352c0048"} Jan 20 17:19:37 crc kubenswrapper[4558]: I0120 17:19:37.060462 4558 scope.go:117] "RemoveContainer" containerID="8d2b5cee7d58d2eb3a0c47e5dd22befe62dc6fe6e9a175f8454c9dd6bdae69b1" Jan 20 17:19:37 crc kubenswrapper[4558]: I0120 17:19:37.060688 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:19:37 crc kubenswrapper[4558]: I0120 17:19:37.069866 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ceilometer-0" podStartSLOduration=2.785993762 podStartE2EDuration="7.069857373s" podCreationTimestamp="2026-01-20 17:19:30 +0000 UTC" firstStartedPulling="2026-01-20 17:19:31.868207225 +0000 UTC m=+2265.628545192" lastFinishedPulling="2026-01-20 17:19:36.152070836 +0000 UTC m=+2269.912408803" observedRunningTime="2026-01-20 17:19:37.064321191 +0000 UTC m=+2270.824659149" watchObservedRunningTime="2026-01-20 17:19:37.069857373 +0000 UTC m=+2270.830195339" Jan 20 17:19:37 crc kubenswrapper[4558]: I0120 17:19:37.090020 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:19:37 crc kubenswrapper[4558]: I0120 17:19:37.096568 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:19:37 crc kubenswrapper[4558]: I0120 17:19:37.108127 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:19:37 crc kubenswrapper[4558]: E0120 17:19:37.108562 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4f358da8-45ff-43a3-a671-05c81bb940a8" containerName="glance-log" Jan 20 17:19:37 crc kubenswrapper[4558]: I0120 17:19:37.108575 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="4f358da8-45ff-43a3-a671-05c81bb940a8" containerName="glance-log" Jan 20 17:19:37 crc kubenswrapper[4558]: E0120 17:19:37.108594 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4f358da8-45ff-43a3-a671-05c81bb940a8" containerName="glance-httpd" Jan 20 17:19:37 crc kubenswrapper[4558]: I0120 17:19:37.108600 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="4f358da8-45ff-43a3-a671-05c81bb940a8" containerName="glance-httpd" Jan 20 17:19:37 crc kubenswrapper[4558]: I0120 17:19:37.108793 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="4f358da8-45ff-43a3-a671-05c81bb940a8" containerName="glance-log" Jan 20 17:19:37 crc kubenswrapper[4558]: I0120 17:19:37.108809 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="4f358da8-45ff-43a3-a671-05c81bb940a8" containerName="glance-httpd" Jan 20 17:19:37 crc kubenswrapper[4558]: I0120 17:19:37.109893 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:19:37 crc kubenswrapper[4558]: I0120 17:19:37.113466 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-glance-default-internal-svc" Jan 20 17:19:37 crc kubenswrapper[4558]: I0120 17:19:37.113667 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-default-internal-config-data" Jan 20 17:19:37 crc kubenswrapper[4558]: I0120 17:19:37.114057 4558 scope.go:117] "RemoveContainer" containerID="4717ec95ba7dc2a4dd5cf5832311139309bda02a9c8b0d18d06bbddbc1b283bb" Jan 20 17:19:37 crc kubenswrapper[4558]: I0120 17:19:37.122482 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:19:37 crc kubenswrapper[4558]: I0120 17:19:37.180850 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55138e51-715c-42a1-8e1d-bca65a31d55c-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"55138e51-715c-42a1-8e1d-bca65a31d55c\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:19:37 crc kubenswrapper[4558]: I0120 17:19:37.180931 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xgf4x\" (UniqueName: \"kubernetes.io/projected/55138e51-715c-42a1-8e1d-bca65a31d55c-kube-api-access-xgf4x\") pod \"glance-default-internal-api-0\" (UID: \"55138e51-715c-42a1-8e1d-bca65a31d55c\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:19:37 crc kubenswrapper[4558]: I0120 17:19:37.180963 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage13-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage13-crc\") pod \"glance-default-internal-api-0\" (UID: \"55138e51-715c-42a1-8e1d-bca65a31d55c\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:19:37 crc kubenswrapper[4558]: I0120 17:19:37.180982 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/55138e51-715c-42a1-8e1d-bca65a31d55c-config-data\") pod \"glance-default-internal-api-0\" (UID: \"55138e51-715c-42a1-8e1d-bca65a31d55c\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:19:37 crc kubenswrapper[4558]: I0120 17:19:37.181012 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/55138e51-715c-42a1-8e1d-bca65a31d55c-logs\") pod \"glance-default-internal-api-0\" (UID: \"55138e51-715c-42a1-8e1d-bca65a31d55c\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:19:37 crc kubenswrapper[4558]: I0120 17:19:37.181040 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/55138e51-715c-42a1-8e1d-bca65a31d55c-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"55138e51-715c-42a1-8e1d-bca65a31d55c\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:19:37 crc kubenswrapper[4558]: I0120 17:19:37.181107 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/55138e51-715c-42a1-8e1d-bca65a31d55c-scripts\") pod \"glance-default-internal-api-0\" (UID: \"55138e51-715c-42a1-8e1d-bca65a31d55c\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:19:37 crc kubenswrapper[4558]: I0120 17:19:37.181135 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/55138e51-715c-42a1-8e1d-bca65a31d55c-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"55138e51-715c-42a1-8e1d-bca65a31d55c\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:19:37 crc kubenswrapper[4558]: I0120 17:19:37.283131 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xgf4x\" (UniqueName: \"kubernetes.io/projected/55138e51-715c-42a1-8e1d-bca65a31d55c-kube-api-access-xgf4x\") pod \"glance-default-internal-api-0\" (UID: \"55138e51-715c-42a1-8e1d-bca65a31d55c\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:19:37 crc kubenswrapper[4558]: I0120 17:19:37.283313 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage13-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage13-crc\") pod \"glance-default-internal-api-0\" (UID: \"55138e51-715c-42a1-8e1d-bca65a31d55c\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:19:37 crc kubenswrapper[4558]: I0120 17:19:37.283401 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/55138e51-715c-42a1-8e1d-bca65a31d55c-config-data\") pod \"glance-default-internal-api-0\" (UID: \"55138e51-715c-42a1-8e1d-bca65a31d55c\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:19:37 crc kubenswrapper[4558]: I0120 17:19:37.283541 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage13-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage13-crc\") pod \"glance-default-internal-api-0\" (UID: \"55138e51-715c-42a1-8e1d-bca65a31d55c\") device mount path \"/mnt/openstack/pv13\"" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:19:37 crc kubenswrapper[4558]: I0120 17:19:37.283551 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/55138e51-715c-42a1-8e1d-bca65a31d55c-logs\") pod \"glance-default-internal-api-0\" (UID: \"55138e51-715c-42a1-8e1d-bca65a31d55c\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:19:37 crc kubenswrapper[4558]: I0120 17:19:37.283721 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/55138e51-715c-42a1-8e1d-bca65a31d55c-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"55138e51-715c-42a1-8e1d-bca65a31d55c\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:19:37 crc kubenswrapper[4558]: I0120 17:19:37.283936 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/55138e51-715c-42a1-8e1d-bca65a31d55c-scripts\") pod \"glance-default-internal-api-0\" (UID: \"55138e51-715c-42a1-8e1d-bca65a31d55c\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:19:37 crc kubenswrapper[4558]: I0120 17:19:37.284002 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/55138e51-715c-42a1-8e1d-bca65a31d55c-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"55138e51-715c-42a1-8e1d-bca65a31d55c\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:19:37 crc kubenswrapper[4558]: I0120 17:19:37.284131 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55138e51-715c-42a1-8e1d-bca65a31d55c-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"55138e51-715c-42a1-8e1d-bca65a31d55c\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:19:37 crc kubenswrapper[4558]: I0120 17:19:37.284237 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/55138e51-715c-42a1-8e1d-bca65a31d55c-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"55138e51-715c-42a1-8e1d-bca65a31d55c\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:19:37 crc kubenswrapper[4558]: I0120 17:19:37.284430 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/55138e51-715c-42a1-8e1d-bca65a31d55c-logs\") pod \"glance-default-internal-api-0\" (UID: \"55138e51-715c-42a1-8e1d-bca65a31d55c\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:19:37 crc kubenswrapper[4558]: I0120 17:19:37.287962 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/55138e51-715c-42a1-8e1d-bca65a31d55c-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"55138e51-715c-42a1-8e1d-bca65a31d55c\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:19:37 crc kubenswrapper[4558]: I0120 17:19:37.288099 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/55138e51-715c-42a1-8e1d-bca65a31d55c-config-data\") pod \"glance-default-internal-api-0\" (UID: \"55138e51-715c-42a1-8e1d-bca65a31d55c\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:19:37 crc kubenswrapper[4558]: I0120 17:19:37.289829 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55138e51-715c-42a1-8e1d-bca65a31d55c-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"55138e51-715c-42a1-8e1d-bca65a31d55c\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:19:37 crc kubenswrapper[4558]: I0120 17:19:37.291677 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/55138e51-715c-42a1-8e1d-bca65a31d55c-scripts\") pod \"glance-default-internal-api-0\" (UID: \"55138e51-715c-42a1-8e1d-bca65a31d55c\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:19:37 crc kubenswrapper[4558]: I0120 17:19:37.303884 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xgf4x\" (UniqueName: \"kubernetes.io/projected/55138e51-715c-42a1-8e1d-bca65a31d55c-kube-api-access-xgf4x\") pod \"glance-default-internal-api-0\" (UID: \"55138e51-715c-42a1-8e1d-bca65a31d55c\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:19:37 crc kubenswrapper[4558]: I0120 17:19:37.311977 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage13-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage13-crc\") pod \"glance-default-internal-api-0\" (UID: \"55138e51-715c-42a1-8e1d-bca65a31d55c\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:19:37 crc kubenswrapper[4558]: I0120 17:19:37.475502 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:19:37 crc kubenswrapper[4558]: I0120 17:19:37.615678 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:19:37 crc kubenswrapper[4558]: I0120 17:19:37.698766 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/14e220fa-c2cf-40d1-b016-745fa9789bb8-scripts\") pod \"14e220fa-c2cf-40d1-b016-745fa9789bb8\" (UID: \"14e220fa-c2cf-40d1-b016-745fa9789bb8\") " Jan 20 17:19:37 crc kubenswrapper[4558]: I0120 17:19:37.699175 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/14e220fa-c2cf-40d1-b016-745fa9789bb8-sg-core-conf-yaml\") pod \"14e220fa-c2cf-40d1-b016-745fa9789bb8\" (UID: \"14e220fa-c2cf-40d1-b016-745fa9789bb8\") " Jan 20 17:19:37 crc kubenswrapper[4558]: I0120 17:19:37.699298 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/14e220fa-c2cf-40d1-b016-745fa9789bb8-log-httpd\") pod \"14e220fa-c2cf-40d1-b016-745fa9789bb8\" (UID: \"14e220fa-c2cf-40d1-b016-745fa9789bb8\") " Jan 20 17:19:37 crc kubenswrapper[4558]: I0120 17:19:37.699378 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14e220fa-c2cf-40d1-b016-745fa9789bb8-combined-ca-bundle\") pod \"14e220fa-c2cf-40d1-b016-745fa9789bb8\" (UID: \"14e220fa-c2cf-40d1-b016-745fa9789bb8\") " Jan 20 17:19:37 crc kubenswrapper[4558]: I0120 17:19:37.699413 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/14e220fa-c2cf-40d1-b016-745fa9789bb8-config-data\") pod \"14e220fa-c2cf-40d1-b016-745fa9789bb8\" (UID: \"14e220fa-c2cf-40d1-b016-745fa9789bb8\") " Jan 20 17:19:37 crc kubenswrapper[4558]: I0120 17:19:37.699447 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/14e220fa-c2cf-40d1-b016-745fa9789bb8-run-httpd\") pod \"14e220fa-c2cf-40d1-b016-745fa9789bb8\" (UID: \"14e220fa-c2cf-40d1-b016-745fa9789bb8\") " Jan 20 17:19:37 crc kubenswrapper[4558]: I0120 17:19:37.699536 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zqsps\" (UniqueName: \"kubernetes.io/projected/14e220fa-c2cf-40d1-b016-745fa9789bb8-kube-api-access-zqsps\") pod \"14e220fa-c2cf-40d1-b016-745fa9789bb8\" (UID: \"14e220fa-c2cf-40d1-b016-745fa9789bb8\") " Jan 20 17:19:37 crc kubenswrapper[4558]: I0120 17:19:37.700650 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/14e220fa-c2cf-40d1-b016-745fa9789bb8-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "14e220fa-c2cf-40d1-b016-745fa9789bb8" (UID: "14e220fa-c2cf-40d1-b016-745fa9789bb8"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:19:37 crc kubenswrapper[4558]: I0120 17:19:37.700764 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/14e220fa-c2cf-40d1-b016-745fa9789bb8-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "14e220fa-c2cf-40d1-b016-745fa9789bb8" (UID: "14e220fa-c2cf-40d1-b016-745fa9789bb8"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:19:37 crc kubenswrapper[4558]: I0120 17:19:37.705390 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/14e220fa-c2cf-40d1-b016-745fa9789bb8-kube-api-access-zqsps" (OuterVolumeSpecName: "kube-api-access-zqsps") pod "14e220fa-c2cf-40d1-b016-745fa9789bb8" (UID: "14e220fa-c2cf-40d1-b016-745fa9789bb8"). InnerVolumeSpecName "kube-api-access-zqsps". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:19:37 crc kubenswrapper[4558]: I0120 17:19:37.707311 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/14e220fa-c2cf-40d1-b016-745fa9789bb8-scripts" (OuterVolumeSpecName: "scripts") pod "14e220fa-c2cf-40d1-b016-745fa9789bb8" (UID: "14e220fa-c2cf-40d1-b016-745fa9789bb8"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:19:37 crc kubenswrapper[4558]: I0120 17:19:37.732394 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/14e220fa-c2cf-40d1-b016-745fa9789bb8-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "14e220fa-c2cf-40d1-b016-745fa9789bb8" (UID: "14e220fa-c2cf-40d1-b016-745fa9789bb8"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:19:37 crc kubenswrapper[4558]: I0120 17:19:37.774205 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/14e220fa-c2cf-40d1-b016-745fa9789bb8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "14e220fa-c2cf-40d1-b016-745fa9789bb8" (UID: "14e220fa-c2cf-40d1-b016-745fa9789bb8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:19:37 crc kubenswrapper[4558]: I0120 17:19:37.793363 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/14e220fa-c2cf-40d1-b016-745fa9789bb8-config-data" (OuterVolumeSpecName: "config-data") pod "14e220fa-c2cf-40d1-b016-745fa9789bb8" (UID: "14e220fa-c2cf-40d1-b016-745fa9789bb8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:19:37 crc kubenswrapper[4558]: I0120 17:19:37.803964 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14e220fa-c2cf-40d1-b016-745fa9789bb8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:19:37 crc kubenswrapper[4558]: I0120 17:19:37.804010 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/14e220fa-c2cf-40d1-b016-745fa9789bb8-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:19:37 crc kubenswrapper[4558]: I0120 17:19:37.804022 4558 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/14e220fa-c2cf-40d1-b016-745fa9789bb8-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:19:37 crc kubenswrapper[4558]: I0120 17:19:37.804037 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zqsps\" (UniqueName: \"kubernetes.io/projected/14e220fa-c2cf-40d1-b016-745fa9789bb8-kube-api-access-zqsps\") on node \"crc\" DevicePath \"\"" Jan 20 17:19:37 crc kubenswrapper[4558]: I0120 17:19:37.804051 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/14e220fa-c2cf-40d1-b016-745fa9789bb8-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:19:37 crc kubenswrapper[4558]: I0120 17:19:37.804063 4558 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/14e220fa-c2cf-40d1-b016-745fa9789bb8-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 20 17:19:37 crc kubenswrapper[4558]: I0120 17:19:37.804074 4558 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/14e220fa-c2cf-40d1-b016-745fa9789bb8-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:19:37 crc kubenswrapper[4558]: I0120 17:19:37.931031 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:19:37 crc kubenswrapper[4558]: W0120 17:19:37.936519 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod55138e51_715c_42a1_8e1d_bca65a31d55c.slice/crio-82d6c49a8096269526b0400cbd2f95809c16b6793518acd69f37baf1e020f831 WatchSource:0}: Error finding container 82d6c49a8096269526b0400cbd2f95809c16b6793518acd69f37baf1e020f831: Status 404 returned error can't find the container with id 82d6c49a8096269526b0400cbd2f95809c16b6793518acd69f37baf1e020f831 Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.080226 4558 generic.go:334] "Generic (PLEG): container finished" podID="14e220fa-c2cf-40d1-b016-745fa9789bb8" containerID="df12d41982db133a6ef8674d7ad124b0b6d567e3ba158b3289c23642e5c730c7" exitCode=0 Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.080257 4558 generic.go:334] "Generic (PLEG): container finished" podID="14e220fa-c2cf-40d1-b016-745fa9789bb8" containerID="4a6dda18259fa409b66e1258f77611034bd2c9dedbd9d0b83a2b9ea4bf10a811" exitCode=2 Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.080265 4558 generic.go:334] "Generic (PLEG): container finished" podID="14e220fa-c2cf-40d1-b016-745fa9789bb8" containerID="ec5415b6b921e276981646446295fef3794235a9a2ad227f22d709452690a49e" exitCode=0 Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.080272 4558 generic.go:334] "Generic (PLEG): container finished" podID="14e220fa-c2cf-40d1-b016-745fa9789bb8" containerID="0dce20dbfe3a972f0fe47fca1883db6999c23e623e3091d1ceb24d004670b189" exitCode=0 Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.080293 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.080363 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"14e220fa-c2cf-40d1-b016-745fa9789bb8","Type":"ContainerDied","Data":"df12d41982db133a6ef8674d7ad124b0b6d567e3ba158b3289c23642e5c730c7"} Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.080420 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"14e220fa-c2cf-40d1-b016-745fa9789bb8","Type":"ContainerDied","Data":"4a6dda18259fa409b66e1258f77611034bd2c9dedbd9d0b83a2b9ea4bf10a811"} Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.080432 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"14e220fa-c2cf-40d1-b016-745fa9789bb8","Type":"ContainerDied","Data":"ec5415b6b921e276981646446295fef3794235a9a2ad227f22d709452690a49e"} Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.080441 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"14e220fa-c2cf-40d1-b016-745fa9789bb8","Type":"ContainerDied","Data":"0dce20dbfe3a972f0fe47fca1883db6999c23e623e3091d1ceb24d004670b189"} Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.080450 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"14e220fa-c2cf-40d1-b016-745fa9789bb8","Type":"ContainerDied","Data":"de944a213d08dea859e4b6c8ae4720016e420fbeb25ab4e4e6af500ce9e9fa25"} Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.080467 4558 scope.go:117] "RemoveContainer" containerID="df12d41982db133a6ef8674d7ad124b0b6d567e3ba158b3289c23642e5c730c7" Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.085313 4558 generic.go:334] "Generic (PLEG): container finished" podID="18c8f0a6-157c-42f1-81b2-6bccecdcf626" containerID="9dae4a94a0dceb1b3d4ff1d0035c4b59f730610c281890e1dedf27394f9fa41c" exitCode=0 Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.085388 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"18c8f0a6-157c-42f1-81b2-6bccecdcf626","Type":"ContainerDied","Data":"9dae4a94a0dceb1b3d4ff1d0035c4b59f730610c281890e1dedf27394f9fa41c"} Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.087284 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"55138e51-715c-42a1-8e1d-bca65a31d55c","Type":"ContainerStarted","Data":"82d6c49a8096269526b0400cbd2f95809c16b6793518acd69f37baf1e020f831"} Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.246146 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.259791 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.270552 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:19:38 crc kubenswrapper[4558]: E0120 17:19:38.272767 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="14e220fa-c2cf-40d1-b016-745fa9789bb8" containerName="sg-core" Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.273047 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="14e220fa-c2cf-40d1-b016-745fa9789bb8" containerName="sg-core" Jan 20 17:19:38 crc kubenswrapper[4558]: E0120 17:19:38.273073 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="14e220fa-c2cf-40d1-b016-745fa9789bb8" containerName="ceilometer-notification-agent" Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.275948 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="14e220fa-c2cf-40d1-b016-745fa9789bb8" containerName="ceilometer-notification-agent" Jan 20 17:19:38 crc kubenswrapper[4558]: E0120 17:19:38.275995 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="14e220fa-c2cf-40d1-b016-745fa9789bb8" containerName="proxy-httpd" Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.276003 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="14e220fa-c2cf-40d1-b016-745fa9789bb8" containerName="proxy-httpd" Jan 20 17:19:38 crc kubenswrapper[4558]: E0120 17:19:38.276050 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="14e220fa-c2cf-40d1-b016-745fa9789bb8" containerName="ceilometer-central-agent" Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.276057 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="14e220fa-c2cf-40d1-b016-745fa9789bb8" containerName="ceilometer-central-agent" Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.286054 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.287729 4558 scope.go:117] "RemoveContainer" containerID="4a6dda18259fa409b66e1258f77611034bd2c9dedbd9d0b83a2b9ea4bf10a811" Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.291120 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="14e220fa-c2cf-40d1-b016-745fa9789bb8" containerName="sg-core" Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.291159 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="14e220fa-c2cf-40d1-b016-745fa9789bb8" containerName="ceilometer-central-agent" Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.291201 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="14e220fa-c2cf-40d1-b016-745fa9789bb8" containerName="ceilometer-notification-agent" Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.291210 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="14e220fa-c2cf-40d1-b016-745fa9789bb8" containerName="proxy-httpd" Jan 20 17:19:38 crc kubenswrapper[4558]: E0120 17:19:38.293814 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="18c8f0a6-157c-42f1-81b2-6bccecdcf626" containerName="glance-log" Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.293840 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="18c8f0a6-157c-42f1-81b2-6bccecdcf626" containerName="glance-log" Jan 20 17:19:38 crc kubenswrapper[4558]: E0120 17:19:38.293892 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="18c8f0a6-157c-42f1-81b2-6bccecdcf626" containerName="glance-httpd" Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.293901 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="18c8f0a6-157c-42f1-81b2-6bccecdcf626" containerName="glance-httpd" Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.294267 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="18c8f0a6-157c-42f1-81b2-6bccecdcf626" containerName="glance-httpd" Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.294288 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="18c8f0a6-157c-42f1-81b2-6bccecdcf626" containerName="glance-log" Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.296398 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.298667 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-scripts" Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.298853 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-config-data" Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.301728 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.317379 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/18c8f0a6-157c-42f1-81b2-6bccecdcf626-combined-ca-bundle\") pod \"18c8f0a6-157c-42f1-81b2-6bccecdcf626\" (UID: \"18c8f0a6-157c-42f1-81b2-6bccecdcf626\") " Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.317499 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/18c8f0a6-157c-42f1-81b2-6bccecdcf626-logs\") pod \"18c8f0a6-157c-42f1-81b2-6bccecdcf626\" (UID: \"18c8f0a6-157c-42f1-81b2-6bccecdcf626\") " Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.317533 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-slhdl\" (UniqueName: \"kubernetes.io/projected/18c8f0a6-157c-42f1-81b2-6bccecdcf626-kube-api-access-slhdl\") pod \"18c8f0a6-157c-42f1-81b2-6bccecdcf626\" (UID: \"18c8f0a6-157c-42f1-81b2-6bccecdcf626\") " Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.317575 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage17-crc\") pod \"18c8f0a6-157c-42f1-81b2-6bccecdcf626\" (UID: \"18c8f0a6-157c-42f1-81b2-6bccecdcf626\") " Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.317643 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/18c8f0a6-157c-42f1-81b2-6bccecdcf626-httpd-run\") pod \"18c8f0a6-157c-42f1-81b2-6bccecdcf626\" (UID: \"18c8f0a6-157c-42f1-81b2-6bccecdcf626\") " Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.317689 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/18c8f0a6-157c-42f1-81b2-6bccecdcf626-public-tls-certs\") pod \"18c8f0a6-157c-42f1-81b2-6bccecdcf626\" (UID: \"18c8f0a6-157c-42f1-81b2-6bccecdcf626\") " Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.317713 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/18c8f0a6-157c-42f1-81b2-6bccecdcf626-config-data\") pod \"18c8f0a6-157c-42f1-81b2-6bccecdcf626\" (UID: \"18c8f0a6-157c-42f1-81b2-6bccecdcf626\") " Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.317754 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/18c8f0a6-157c-42f1-81b2-6bccecdcf626-scripts\") pod \"18c8f0a6-157c-42f1-81b2-6bccecdcf626\" (UID: \"18c8f0a6-157c-42f1-81b2-6bccecdcf626\") " Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.318076 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fpkb5\" (UniqueName: \"kubernetes.io/projected/c11b2eee-13cf-408d-9491-999f63a8d17d-kube-api-access-fpkb5\") pod \"ceilometer-0\" (UID: \"c11b2eee-13cf-408d-9491-999f63a8d17d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.318146 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c11b2eee-13cf-408d-9491-999f63a8d17d-scripts\") pod \"ceilometer-0\" (UID: \"c11b2eee-13cf-408d-9491-999f63a8d17d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.318267 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c11b2eee-13cf-408d-9491-999f63a8d17d-log-httpd\") pod \"ceilometer-0\" (UID: \"c11b2eee-13cf-408d-9491-999f63a8d17d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.318315 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c11b2eee-13cf-408d-9491-999f63a8d17d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"c11b2eee-13cf-408d-9491-999f63a8d17d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.318362 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c11b2eee-13cf-408d-9491-999f63a8d17d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"c11b2eee-13cf-408d-9491-999f63a8d17d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.318417 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c11b2eee-13cf-408d-9491-999f63a8d17d-config-data\") pod \"ceilometer-0\" (UID: \"c11b2eee-13cf-408d-9491-999f63a8d17d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.318451 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c11b2eee-13cf-408d-9491-999f63a8d17d-run-httpd\") pod \"ceilometer-0\" (UID: \"c11b2eee-13cf-408d-9491-999f63a8d17d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.319258 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/18c8f0a6-157c-42f1-81b2-6bccecdcf626-logs" (OuterVolumeSpecName: "logs") pod "18c8f0a6-157c-42f1-81b2-6bccecdcf626" (UID: "18c8f0a6-157c-42f1-81b2-6bccecdcf626"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.323442 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/18c8f0a6-157c-42f1-81b2-6bccecdcf626-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "18c8f0a6-157c-42f1-81b2-6bccecdcf626" (UID: "18c8f0a6-157c-42f1-81b2-6bccecdcf626"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.331703 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/18c8f0a6-157c-42f1-81b2-6bccecdcf626-scripts" (OuterVolumeSpecName: "scripts") pod "18c8f0a6-157c-42f1-81b2-6bccecdcf626" (UID: "18c8f0a6-157c-42f1-81b2-6bccecdcf626"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.351575 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage17-crc" (OuterVolumeSpecName: "glance") pod "18c8f0a6-157c-42f1-81b2-6bccecdcf626" (UID: "18c8f0a6-157c-42f1-81b2-6bccecdcf626"). InnerVolumeSpecName "local-storage17-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.362640 4558 scope.go:117] "RemoveContainer" containerID="ec5415b6b921e276981646446295fef3794235a9a2ad227f22d709452690a49e" Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.371782 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/18c8f0a6-157c-42f1-81b2-6bccecdcf626-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "18c8f0a6-157c-42f1-81b2-6bccecdcf626" (UID: "18c8f0a6-157c-42f1-81b2-6bccecdcf626"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.377332 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/18c8f0a6-157c-42f1-81b2-6bccecdcf626-kube-api-access-slhdl" (OuterVolumeSpecName: "kube-api-access-slhdl") pod "18c8f0a6-157c-42f1-81b2-6bccecdcf626" (UID: "18c8f0a6-157c-42f1-81b2-6bccecdcf626"). InnerVolumeSpecName "kube-api-access-slhdl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.385335 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/18c8f0a6-157c-42f1-81b2-6bccecdcf626-config-data" (OuterVolumeSpecName: "config-data") pod "18c8f0a6-157c-42f1-81b2-6bccecdcf626" (UID: "18c8f0a6-157c-42f1-81b2-6bccecdcf626"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.401312 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/18c8f0a6-157c-42f1-81b2-6bccecdcf626-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "18c8f0a6-157c-42f1-81b2-6bccecdcf626" (UID: "18c8f0a6-157c-42f1-81b2-6bccecdcf626"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.421596 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c11b2eee-13cf-408d-9491-999f63a8d17d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"c11b2eee-13cf-408d-9491-999f63a8d17d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.421804 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c11b2eee-13cf-408d-9491-999f63a8d17d-config-data\") pod \"ceilometer-0\" (UID: \"c11b2eee-13cf-408d-9491-999f63a8d17d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.421923 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c11b2eee-13cf-408d-9491-999f63a8d17d-run-httpd\") pod \"ceilometer-0\" (UID: \"c11b2eee-13cf-408d-9491-999f63a8d17d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.422025 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fpkb5\" (UniqueName: \"kubernetes.io/projected/c11b2eee-13cf-408d-9491-999f63a8d17d-kube-api-access-fpkb5\") pod \"ceilometer-0\" (UID: \"c11b2eee-13cf-408d-9491-999f63a8d17d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.422294 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c11b2eee-13cf-408d-9491-999f63a8d17d-scripts\") pod \"ceilometer-0\" (UID: \"c11b2eee-13cf-408d-9491-999f63a8d17d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.422568 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c11b2eee-13cf-408d-9491-999f63a8d17d-log-httpd\") pod \"ceilometer-0\" (UID: \"c11b2eee-13cf-408d-9491-999f63a8d17d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.422727 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c11b2eee-13cf-408d-9491-999f63a8d17d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"c11b2eee-13cf-408d-9491-999f63a8d17d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.422918 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/18c8f0a6-157c-42f1-81b2-6bccecdcf626-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.422986 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/18c8f0a6-157c-42f1-81b2-6bccecdcf626-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.423047 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-slhdl\" (UniqueName: \"kubernetes.io/projected/18c8f0a6-157c-42f1-81b2-6bccecdcf626-kube-api-access-slhdl\") on node \"crc\" DevicePath \"\"" Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.423150 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage17-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage17-crc\") on node \"crc\" " Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.423342 4558 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/18c8f0a6-157c-42f1-81b2-6bccecdcf626-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.423405 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/18c8f0a6-157c-42f1-81b2-6bccecdcf626-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.423463 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/18c8f0a6-157c-42f1-81b2-6bccecdcf626-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.423517 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/18c8f0a6-157c-42f1-81b2-6bccecdcf626-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.422952 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c11b2eee-13cf-408d-9491-999f63a8d17d-run-httpd\") pod \"ceilometer-0\" (UID: \"c11b2eee-13cf-408d-9491-999f63a8d17d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.423420 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c11b2eee-13cf-408d-9491-999f63a8d17d-log-httpd\") pod \"ceilometer-0\" (UID: \"c11b2eee-13cf-408d-9491-999f63a8d17d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.427000 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c11b2eee-13cf-408d-9491-999f63a8d17d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"c11b2eee-13cf-408d-9491-999f63a8d17d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.427172 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c11b2eee-13cf-408d-9491-999f63a8d17d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"c11b2eee-13cf-408d-9491-999f63a8d17d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.432473 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c11b2eee-13cf-408d-9491-999f63a8d17d-config-data\") pod \"ceilometer-0\" (UID: \"c11b2eee-13cf-408d-9491-999f63a8d17d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.432608 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c11b2eee-13cf-408d-9491-999f63a8d17d-scripts\") pod \"ceilometer-0\" (UID: \"c11b2eee-13cf-408d-9491-999f63a8d17d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.440546 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fpkb5\" (UniqueName: \"kubernetes.io/projected/c11b2eee-13cf-408d-9491-999f63a8d17d-kube-api-access-fpkb5\") pod \"ceilometer-0\" (UID: \"c11b2eee-13cf-408d-9491-999f63a8d17d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.448208 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage17-crc" (UniqueName: "kubernetes.io/local-volume/local-storage17-crc") on node "crc" Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.474791 4558 scope.go:117] "RemoveContainer" containerID="0dce20dbfe3a972f0fe47fca1883db6999c23e623e3091d1ceb24d004670b189" Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.487722 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.487956 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-2rm29" Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.502788 4558 scope.go:117] "RemoveContainer" containerID="df12d41982db133a6ef8674d7ad124b0b6d567e3ba158b3289c23642e5c730c7" Jan 20 17:19:38 crc kubenswrapper[4558]: E0120 17:19:38.509992 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"df12d41982db133a6ef8674d7ad124b0b6d567e3ba158b3289c23642e5c730c7\": container with ID starting with df12d41982db133a6ef8674d7ad124b0b6d567e3ba158b3289c23642e5c730c7 not found: ID does not exist" containerID="df12d41982db133a6ef8674d7ad124b0b6d567e3ba158b3289c23642e5c730c7" Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.510034 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"df12d41982db133a6ef8674d7ad124b0b6d567e3ba158b3289c23642e5c730c7"} err="failed to get container status \"df12d41982db133a6ef8674d7ad124b0b6d567e3ba158b3289c23642e5c730c7\": rpc error: code = NotFound desc = could not find container \"df12d41982db133a6ef8674d7ad124b0b6d567e3ba158b3289c23642e5c730c7\": container with ID starting with df12d41982db133a6ef8674d7ad124b0b6d567e3ba158b3289c23642e5c730c7 not found: ID does not exist" Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.510057 4558 scope.go:117] "RemoveContainer" containerID="4a6dda18259fa409b66e1258f77611034bd2c9dedbd9d0b83a2b9ea4bf10a811" Jan 20 17:19:38 crc kubenswrapper[4558]: E0120 17:19:38.510421 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4a6dda18259fa409b66e1258f77611034bd2c9dedbd9d0b83a2b9ea4bf10a811\": container with ID starting with 4a6dda18259fa409b66e1258f77611034bd2c9dedbd9d0b83a2b9ea4bf10a811 not found: ID does not exist" containerID="4a6dda18259fa409b66e1258f77611034bd2c9dedbd9d0b83a2b9ea4bf10a811" Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.510444 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4a6dda18259fa409b66e1258f77611034bd2c9dedbd9d0b83a2b9ea4bf10a811"} err="failed to get container status \"4a6dda18259fa409b66e1258f77611034bd2c9dedbd9d0b83a2b9ea4bf10a811\": rpc error: code = NotFound desc = could not find container \"4a6dda18259fa409b66e1258f77611034bd2c9dedbd9d0b83a2b9ea4bf10a811\": container with ID starting with 4a6dda18259fa409b66e1258f77611034bd2c9dedbd9d0b83a2b9ea4bf10a811 not found: ID does not exist" Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.510461 4558 scope.go:117] "RemoveContainer" containerID="ec5415b6b921e276981646446295fef3794235a9a2ad227f22d709452690a49e" Jan 20 17:19:38 crc kubenswrapper[4558]: E0120 17:19:38.518236 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ec5415b6b921e276981646446295fef3794235a9a2ad227f22d709452690a49e\": container with ID starting with ec5415b6b921e276981646446295fef3794235a9a2ad227f22d709452690a49e not found: ID does not exist" containerID="ec5415b6b921e276981646446295fef3794235a9a2ad227f22d709452690a49e" Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.518268 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ec5415b6b921e276981646446295fef3794235a9a2ad227f22d709452690a49e"} err="failed to get container status \"ec5415b6b921e276981646446295fef3794235a9a2ad227f22d709452690a49e\": rpc error: code = NotFound desc = could not find container \"ec5415b6b921e276981646446295fef3794235a9a2ad227f22d709452690a49e\": container with ID starting with ec5415b6b921e276981646446295fef3794235a9a2ad227f22d709452690a49e not found: ID does not exist" Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.518286 4558 scope.go:117] "RemoveContainer" containerID="0dce20dbfe3a972f0fe47fca1883db6999c23e623e3091d1ceb24d004670b189" Jan 20 17:19:38 crc kubenswrapper[4558]: E0120 17:19:38.518724 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0dce20dbfe3a972f0fe47fca1883db6999c23e623e3091d1ceb24d004670b189\": container with ID starting with 0dce20dbfe3a972f0fe47fca1883db6999c23e623e3091d1ceb24d004670b189 not found: ID does not exist" containerID="0dce20dbfe3a972f0fe47fca1883db6999c23e623e3091d1ceb24d004670b189" Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.518785 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0dce20dbfe3a972f0fe47fca1883db6999c23e623e3091d1ceb24d004670b189"} err="failed to get container status \"0dce20dbfe3a972f0fe47fca1883db6999c23e623e3091d1ceb24d004670b189\": rpc error: code = NotFound desc = could not find container \"0dce20dbfe3a972f0fe47fca1883db6999c23e623e3091d1ceb24d004670b189\": container with ID starting with 0dce20dbfe3a972f0fe47fca1883db6999c23e623e3091d1ceb24d004670b189 not found: ID does not exist" Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.518818 4558 scope.go:117] "RemoveContainer" containerID="df12d41982db133a6ef8674d7ad124b0b6d567e3ba158b3289c23642e5c730c7" Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.519173 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"df12d41982db133a6ef8674d7ad124b0b6d567e3ba158b3289c23642e5c730c7"} err="failed to get container status \"df12d41982db133a6ef8674d7ad124b0b6d567e3ba158b3289c23642e5c730c7\": rpc error: code = NotFound desc = could not find container \"df12d41982db133a6ef8674d7ad124b0b6d567e3ba158b3289c23642e5c730c7\": container with ID starting with df12d41982db133a6ef8674d7ad124b0b6d567e3ba158b3289c23642e5c730c7 not found: ID does not exist" Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.519254 4558 scope.go:117] "RemoveContainer" containerID="4a6dda18259fa409b66e1258f77611034bd2c9dedbd9d0b83a2b9ea4bf10a811" Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.519547 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4a6dda18259fa409b66e1258f77611034bd2c9dedbd9d0b83a2b9ea4bf10a811"} err="failed to get container status \"4a6dda18259fa409b66e1258f77611034bd2c9dedbd9d0b83a2b9ea4bf10a811\": rpc error: code = NotFound desc = could not find container \"4a6dda18259fa409b66e1258f77611034bd2c9dedbd9d0b83a2b9ea4bf10a811\": container with ID starting with 4a6dda18259fa409b66e1258f77611034bd2c9dedbd9d0b83a2b9ea4bf10a811 not found: ID does not exist" Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.519648 4558 scope.go:117] "RemoveContainer" containerID="ec5415b6b921e276981646446295fef3794235a9a2ad227f22d709452690a49e" Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.519955 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ec5415b6b921e276981646446295fef3794235a9a2ad227f22d709452690a49e"} err="failed to get container status \"ec5415b6b921e276981646446295fef3794235a9a2ad227f22d709452690a49e\": rpc error: code = NotFound desc = could not find container \"ec5415b6b921e276981646446295fef3794235a9a2ad227f22d709452690a49e\": container with ID starting with ec5415b6b921e276981646446295fef3794235a9a2ad227f22d709452690a49e not found: ID does not exist" Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.519988 4558 scope.go:117] "RemoveContainer" containerID="0dce20dbfe3a972f0fe47fca1883db6999c23e623e3091d1ceb24d004670b189" Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.521153 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0dce20dbfe3a972f0fe47fca1883db6999c23e623e3091d1ceb24d004670b189"} err="failed to get container status \"0dce20dbfe3a972f0fe47fca1883db6999c23e623e3091d1ceb24d004670b189\": rpc error: code = NotFound desc = could not find container \"0dce20dbfe3a972f0fe47fca1883db6999c23e623e3091d1ceb24d004670b189\": container with ID starting with 0dce20dbfe3a972f0fe47fca1883db6999c23e623e3091d1ceb24d004670b189 not found: ID does not exist" Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.521186 4558 scope.go:117] "RemoveContainer" containerID="df12d41982db133a6ef8674d7ad124b0b6d567e3ba158b3289c23642e5c730c7" Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.522952 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"df12d41982db133a6ef8674d7ad124b0b6d567e3ba158b3289c23642e5c730c7"} err="failed to get container status \"df12d41982db133a6ef8674d7ad124b0b6d567e3ba158b3289c23642e5c730c7\": rpc error: code = NotFound desc = could not find container \"df12d41982db133a6ef8674d7ad124b0b6d567e3ba158b3289c23642e5c730c7\": container with ID starting with df12d41982db133a6ef8674d7ad124b0b6d567e3ba158b3289c23642e5c730c7 not found: ID does not exist" Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.523007 4558 scope.go:117] "RemoveContainer" containerID="4a6dda18259fa409b66e1258f77611034bd2c9dedbd9d0b83a2b9ea4bf10a811" Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.524145 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4a6dda18259fa409b66e1258f77611034bd2c9dedbd9d0b83a2b9ea4bf10a811"} err="failed to get container status \"4a6dda18259fa409b66e1258f77611034bd2c9dedbd9d0b83a2b9ea4bf10a811\": rpc error: code = NotFound desc = could not find container \"4a6dda18259fa409b66e1258f77611034bd2c9dedbd9d0b83a2b9ea4bf10a811\": container with ID starting with 4a6dda18259fa409b66e1258f77611034bd2c9dedbd9d0b83a2b9ea4bf10a811 not found: ID does not exist" Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.524185 4558 scope.go:117] "RemoveContainer" containerID="ec5415b6b921e276981646446295fef3794235a9a2ad227f22d709452690a49e" Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.524441 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ec5415b6b921e276981646446295fef3794235a9a2ad227f22d709452690a49e"} err="failed to get container status \"ec5415b6b921e276981646446295fef3794235a9a2ad227f22d709452690a49e\": rpc error: code = NotFound desc = could not find container \"ec5415b6b921e276981646446295fef3794235a9a2ad227f22d709452690a49e\": container with ID starting with ec5415b6b921e276981646446295fef3794235a9a2ad227f22d709452690a49e not found: ID does not exist" Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.524486 4558 scope.go:117] "RemoveContainer" containerID="0dce20dbfe3a972f0fe47fca1883db6999c23e623e3091d1ceb24d004670b189" Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.524773 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/743c3bad-0487-4ff7-a447-82cec153136a-combined-ca-bundle\") pod \"743c3bad-0487-4ff7-a447-82cec153136a\" (UID: \"743c3bad-0487-4ff7-a447-82cec153136a\") " Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.524985 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/743c3bad-0487-4ff7-a447-82cec153136a-config-data\") pod \"743c3bad-0487-4ff7-a447-82cec153136a\" (UID: \"743c3bad-0487-4ff7-a447-82cec153136a\") " Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.525077 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/743c3bad-0487-4ff7-a447-82cec153136a-scripts\") pod \"743c3bad-0487-4ff7-a447-82cec153136a\" (UID: \"743c3bad-0487-4ff7-a447-82cec153136a\") " Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.525186 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nff5p\" (UniqueName: \"kubernetes.io/projected/743c3bad-0487-4ff7-a447-82cec153136a-kube-api-access-nff5p\") pod \"743c3bad-0487-4ff7-a447-82cec153136a\" (UID: \"743c3bad-0487-4ff7-a447-82cec153136a\") " Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.525872 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage17-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage17-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.526042 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0dce20dbfe3a972f0fe47fca1883db6999c23e623e3091d1ceb24d004670b189"} err="failed to get container status \"0dce20dbfe3a972f0fe47fca1883db6999c23e623e3091d1ceb24d004670b189\": rpc error: code = NotFound desc = could not find container \"0dce20dbfe3a972f0fe47fca1883db6999c23e623e3091d1ceb24d004670b189\": container with ID starting with 0dce20dbfe3a972f0fe47fca1883db6999c23e623e3091d1ceb24d004670b189 not found: ID does not exist" Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.526072 4558 scope.go:117] "RemoveContainer" containerID="df12d41982db133a6ef8674d7ad124b0b6d567e3ba158b3289c23642e5c730c7" Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.526385 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"df12d41982db133a6ef8674d7ad124b0b6d567e3ba158b3289c23642e5c730c7"} err="failed to get container status \"df12d41982db133a6ef8674d7ad124b0b6d567e3ba158b3289c23642e5c730c7\": rpc error: code = NotFound desc = could not find container \"df12d41982db133a6ef8674d7ad124b0b6d567e3ba158b3289c23642e5c730c7\": container with ID starting with df12d41982db133a6ef8674d7ad124b0b6d567e3ba158b3289c23642e5c730c7 not found: ID does not exist" Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.526408 4558 scope.go:117] "RemoveContainer" containerID="4a6dda18259fa409b66e1258f77611034bd2c9dedbd9d0b83a2b9ea4bf10a811" Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.526607 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4a6dda18259fa409b66e1258f77611034bd2c9dedbd9d0b83a2b9ea4bf10a811"} err="failed to get container status \"4a6dda18259fa409b66e1258f77611034bd2c9dedbd9d0b83a2b9ea4bf10a811\": rpc error: code = NotFound desc = could not find container \"4a6dda18259fa409b66e1258f77611034bd2c9dedbd9d0b83a2b9ea4bf10a811\": container with ID starting with 4a6dda18259fa409b66e1258f77611034bd2c9dedbd9d0b83a2b9ea4bf10a811 not found: ID does not exist" Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.526629 4558 scope.go:117] "RemoveContainer" containerID="ec5415b6b921e276981646446295fef3794235a9a2ad227f22d709452690a49e" Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.526807 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ec5415b6b921e276981646446295fef3794235a9a2ad227f22d709452690a49e"} err="failed to get container status \"ec5415b6b921e276981646446295fef3794235a9a2ad227f22d709452690a49e\": rpc error: code = NotFound desc = could not find container \"ec5415b6b921e276981646446295fef3794235a9a2ad227f22d709452690a49e\": container with ID starting with ec5415b6b921e276981646446295fef3794235a9a2ad227f22d709452690a49e not found: ID does not exist" Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.526825 4558 scope.go:117] "RemoveContainer" containerID="0dce20dbfe3a972f0fe47fca1883db6999c23e623e3091d1ceb24d004670b189" Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.528455 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0dce20dbfe3a972f0fe47fca1883db6999c23e623e3091d1ceb24d004670b189"} err="failed to get container status \"0dce20dbfe3a972f0fe47fca1883db6999c23e623e3091d1ceb24d004670b189\": rpc error: code = NotFound desc = could not find container \"0dce20dbfe3a972f0fe47fca1883db6999c23e623e3091d1ceb24d004670b189\": container with ID starting with 0dce20dbfe3a972f0fe47fca1883db6999c23e623e3091d1ceb24d004670b189 not found: ID does not exist" Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.529450 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/743c3bad-0487-4ff7-a447-82cec153136a-scripts" (OuterVolumeSpecName: "scripts") pod "743c3bad-0487-4ff7-a447-82cec153136a" (UID: "743c3bad-0487-4ff7-a447-82cec153136a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.530409 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/743c3bad-0487-4ff7-a447-82cec153136a-kube-api-access-nff5p" (OuterVolumeSpecName: "kube-api-access-nff5p") pod "743c3bad-0487-4ff7-a447-82cec153136a" (UID: "743c3bad-0487-4ff7-a447-82cec153136a"). InnerVolumeSpecName "kube-api-access-nff5p". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.573529 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/743c3bad-0487-4ff7-a447-82cec153136a-config-data" (OuterVolumeSpecName: "config-data") pod "743c3bad-0487-4ff7-a447-82cec153136a" (UID: "743c3bad-0487-4ff7-a447-82cec153136a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.576498 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/743c3bad-0487-4ff7-a447-82cec153136a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "743c3bad-0487-4ff7-a447-82cec153136a" (UID: "743c3bad-0487-4ff7-a447-82cec153136a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.583335 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="14e220fa-c2cf-40d1-b016-745fa9789bb8" path="/var/lib/kubelet/pods/14e220fa-c2cf-40d1-b016-745fa9789bb8/volumes" Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.584224 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4f358da8-45ff-43a3-a671-05c81bb940a8" path="/var/lib/kubelet/pods/4f358da8-45ff-43a3-a671-05c81bb940a8/volumes" Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.627756 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/743c3bad-0487-4ff7-a447-82cec153136a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.628194 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/743c3bad-0487-4ff7-a447-82cec153136a-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.628206 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/743c3bad-0487-4ff7-a447-82cec153136a-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.628216 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nff5p\" (UniqueName: \"kubernetes.io/projected/743c3bad-0487-4ff7-a447-82cec153136a-kube-api-access-nff5p\") on node \"crc\" DevicePath \"\"" Jan 20 17:19:38 crc kubenswrapper[4558]: I0120 17:19:38.915241 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:19:38 crc kubenswrapper[4558]: W0120 17:19:38.925157 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc11b2eee_13cf_408d_9491_999f63a8d17d.slice/crio-42f3307c82790a12fe5a33fa13c5602ba216db9d618fad5d336cb001f1a3c66a WatchSource:0}: Error finding container 42f3307c82790a12fe5a33fa13c5602ba216db9d618fad5d336cb001f1a3c66a: Status 404 returned error can't find the container with id 42f3307c82790a12fe5a33fa13c5602ba216db9d618fad5d336cb001f1a3c66a Jan 20 17:19:39 crc kubenswrapper[4558]: I0120 17:19:39.100196 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"c11b2eee-13cf-408d-9491-999f63a8d17d","Type":"ContainerStarted","Data":"42f3307c82790a12fe5a33fa13c5602ba216db9d618fad5d336cb001f1a3c66a"} Jan 20 17:19:39 crc kubenswrapper[4558]: I0120 17:19:39.103599 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-2rm29" event={"ID":"743c3bad-0487-4ff7-a447-82cec153136a","Type":"ContainerDied","Data":"ef4f0b9880ea70adc5885ba48263aa5a3590f0d41a65838e42ba528c7fa5f971"} Jan 20 17:19:39 crc kubenswrapper[4558]: I0120 17:19:39.103652 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ef4f0b9880ea70adc5885ba48263aa5a3590f0d41a65838e42ba528c7fa5f971" Jan 20 17:19:39 crc kubenswrapper[4558]: I0120 17:19:39.103730 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-2rm29" Jan 20 17:19:39 crc kubenswrapper[4558]: I0120 17:19:39.121998 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"18c8f0a6-157c-42f1-81b2-6bccecdcf626","Type":"ContainerDied","Data":"2c4b181c93ebe61a4fc92f9ec89fe7a5c1de8fef8ba73ced0be26ecf02055bb5"} Jan 20 17:19:39 crc kubenswrapper[4558]: I0120 17:19:39.122071 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:19:39 crc kubenswrapper[4558]: I0120 17:19:39.122798 4558 scope.go:117] "RemoveContainer" containerID="9dae4a94a0dceb1b3d4ff1d0035c4b59f730610c281890e1dedf27394f9fa41c" Jan 20 17:19:39 crc kubenswrapper[4558]: I0120 17:19:39.128360 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-0"] Jan 20 17:19:39 crc kubenswrapper[4558]: E0120 17:19:39.128843 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="743c3bad-0487-4ff7-a447-82cec153136a" containerName="nova-cell0-conductor-db-sync" Jan 20 17:19:39 crc kubenswrapper[4558]: I0120 17:19:39.128863 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="743c3bad-0487-4ff7-a447-82cec153136a" containerName="nova-cell0-conductor-db-sync" Jan 20 17:19:39 crc kubenswrapper[4558]: I0120 17:19:39.129114 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="743c3bad-0487-4ff7-a447-82cec153136a" containerName="nova-cell0-conductor-db-sync" Jan 20 17:19:39 crc kubenswrapper[4558]: I0120 17:19:39.129799 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:19:39 crc kubenswrapper[4558]: I0120 17:19:39.132681 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell0-conductor-config-data" Jan 20 17:19:39 crc kubenswrapper[4558]: I0120 17:19:39.132941 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-nova-dockercfg-tw79t" Jan 20 17:19:39 crc kubenswrapper[4558]: I0120 17:19:39.134671 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"55138e51-715c-42a1-8e1d-bca65a31d55c","Type":"ContainerStarted","Data":"9a84634fbab19582b6a07c9fe1a1511ad1a285ca37ba97810d1c6c72e31ebe4c"} Jan 20 17:19:39 crc kubenswrapper[4558]: I0120 17:19:39.136431 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-0"] Jan 20 17:19:39 crc kubenswrapper[4558]: I0120 17:19:39.176264 4558 scope.go:117] "RemoveContainer" containerID="1cbcd65376eed320dc67eb5d4e26e980c1e4099da6b8e76656548d1fdcc0484d" Jan 20 17:19:39 crc kubenswrapper[4558]: I0120 17:19:39.186744 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/glance-default-internal-api-0" podStartSLOduration=2.186725295 podStartE2EDuration="2.186725295s" podCreationTimestamp="2026-01-20 17:19:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:19:39.176654075 +0000 UTC m=+2272.936992043" watchObservedRunningTime="2026-01-20 17:19:39.186725295 +0000 UTC m=+2272.947063262" Jan 20 17:19:39 crc kubenswrapper[4558]: I0120 17:19:39.203747 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:19:39 crc kubenswrapper[4558]: I0120 17:19:39.211092 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:19:39 crc kubenswrapper[4558]: I0120 17:19:39.219127 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:19:39 crc kubenswrapper[4558]: I0120 17:19:39.220905 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:19:39 crc kubenswrapper[4558]: I0120 17:19:39.222724 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-default-external-config-data" Jan 20 17:19:39 crc kubenswrapper[4558]: I0120 17:19:39.222911 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-glance-default-public-svc" Jan 20 17:19:39 crc kubenswrapper[4558]: I0120 17:19:39.224949 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:19:39 crc kubenswrapper[4558]: I0120 17:19:39.243575 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1ce622e1-3bed-423c-8870-1243fabacc8e-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"1ce622e1-3bed-423c-8870-1243fabacc8e\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:19:39 crc kubenswrapper[4558]: I0120 17:19:39.243645 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/20cdc3df-1626-4f56-8a92-9df64c1ed2fe-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"20cdc3df-1626-4f56-8a92-9df64c1ed2fe\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:19:39 crc kubenswrapper[4558]: I0120 17:19:39.243672 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-psk66\" (UniqueName: \"kubernetes.io/projected/20cdc3df-1626-4f56-8a92-9df64c1ed2fe-kube-api-access-psk66\") pod \"glance-default-external-api-0\" (UID: \"20cdc3df-1626-4f56-8a92-9df64c1ed2fe\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:19:39 crc kubenswrapper[4558]: I0120 17:19:39.243691 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage17-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage17-crc\") pod \"glance-default-external-api-0\" (UID: \"20cdc3df-1626-4f56-8a92-9df64c1ed2fe\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:19:39 crc kubenswrapper[4558]: I0120 17:19:39.243725 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/20cdc3df-1626-4f56-8a92-9df64c1ed2fe-scripts\") pod \"glance-default-external-api-0\" (UID: \"20cdc3df-1626-4f56-8a92-9df64c1ed2fe\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:19:39 crc kubenswrapper[4558]: I0120 17:19:39.243802 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/20cdc3df-1626-4f56-8a92-9df64c1ed2fe-logs\") pod \"glance-default-external-api-0\" (UID: \"20cdc3df-1626-4f56-8a92-9df64c1ed2fe\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:19:39 crc kubenswrapper[4558]: I0120 17:19:39.243823 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/20cdc3df-1626-4f56-8a92-9df64c1ed2fe-config-data\") pod \"glance-default-external-api-0\" (UID: \"20cdc3df-1626-4f56-8a92-9df64c1ed2fe\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:19:39 crc kubenswrapper[4558]: I0120 17:19:39.243858 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/20cdc3df-1626-4f56-8a92-9df64c1ed2fe-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"20cdc3df-1626-4f56-8a92-9df64c1ed2fe\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:19:39 crc kubenswrapper[4558]: I0120 17:19:39.243900 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1ce622e1-3bed-423c-8870-1243fabacc8e-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"1ce622e1-3bed-423c-8870-1243fabacc8e\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:19:39 crc kubenswrapper[4558]: I0120 17:19:39.243929 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/20cdc3df-1626-4f56-8a92-9df64c1ed2fe-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"20cdc3df-1626-4f56-8a92-9df64c1ed2fe\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:19:39 crc kubenswrapper[4558]: I0120 17:19:39.243973 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dwpm2\" (UniqueName: \"kubernetes.io/projected/1ce622e1-3bed-423c-8870-1243fabacc8e-kube-api-access-dwpm2\") pod \"nova-cell0-conductor-0\" (UID: \"1ce622e1-3bed-423c-8870-1243fabacc8e\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:19:39 crc kubenswrapper[4558]: I0120 17:19:39.346548 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/20cdc3df-1626-4f56-8a92-9df64c1ed2fe-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"20cdc3df-1626-4f56-8a92-9df64c1ed2fe\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:19:39 crc kubenswrapper[4558]: I0120 17:19:39.346620 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-psk66\" (UniqueName: \"kubernetes.io/projected/20cdc3df-1626-4f56-8a92-9df64c1ed2fe-kube-api-access-psk66\") pod \"glance-default-external-api-0\" (UID: \"20cdc3df-1626-4f56-8a92-9df64c1ed2fe\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:19:39 crc kubenswrapper[4558]: I0120 17:19:39.346651 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage17-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage17-crc\") pod \"glance-default-external-api-0\" (UID: \"20cdc3df-1626-4f56-8a92-9df64c1ed2fe\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:19:39 crc kubenswrapper[4558]: I0120 17:19:39.346683 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/20cdc3df-1626-4f56-8a92-9df64c1ed2fe-scripts\") pod \"glance-default-external-api-0\" (UID: \"20cdc3df-1626-4f56-8a92-9df64c1ed2fe\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:19:39 crc kubenswrapper[4558]: I0120 17:19:39.346784 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/20cdc3df-1626-4f56-8a92-9df64c1ed2fe-logs\") pod \"glance-default-external-api-0\" (UID: \"20cdc3df-1626-4f56-8a92-9df64c1ed2fe\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:19:39 crc kubenswrapper[4558]: I0120 17:19:39.346809 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/20cdc3df-1626-4f56-8a92-9df64c1ed2fe-config-data\") pod \"glance-default-external-api-0\" (UID: \"20cdc3df-1626-4f56-8a92-9df64c1ed2fe\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:19:39 crc kubenswrapper[4558]: I0120 17:19:39.346854 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/20cdc3df-1626-4f56-8a92-9df64c1ed2fe-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"20cdc3df-1626-4f56-8a92-9df64c1ed2fe\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:19:39 crc kubenswrapper[4558]: I0120 17:19:39.346913 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1ce622e1-3bed-423c-8870-1243fabacc8e-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"1ce622e1-3bed-423c-8870-1243fabacc8e\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:19:39 crc kubenswrapper[4558]: I0120 17:19:39.346956 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/20cdc3df-1626-4f56-8a92-9df64c1ed2fe-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"20cdc3df-1626-4f56-8a92-9df64c1ed2fe\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:19:39 crc kubenswrapper[4558]: I0120 17:19:39.347002 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dwpm2\" (UniqueName: \"kubernetes.io/projected/1ce622e1-3bed-423c-8870-1243fabacc8e-kube-api-access-dwpm2\") pod \"nova-cell0-conductor-0\" (UID: \"1ce622e1-3bed-423c-8870-1243fabacc8e\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:19:39 crc kubenswrapper[4558]: I0120 17:19:39.347063 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1ce622e1-3bed-423c-8870-1243fabacc8e-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"1ce622e1-3bed-423c-8870-1243fabacc8e\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:19:39 crc kubenswrapper[4558]: I0120 17:19:39.347623 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/20cdc3df-1626-4f56-8a92-9df64c1ed2fe-logs\") pod \"glance-default-external-api-0\" (UID: \"20cdc3df-1626-4f56-8a92-9df64c1ed2fe\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:19:39 crc kubenswrapper[4558]: I0120 17:19:39.347720 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/20cdc3df-1626-4f56-8a92-9df64c1ed2fe-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"20cdc3df-1626-4f56-8a92-9df64c1ed2fe\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:19:39 crc kubenswrapper[4558]: I0120 17:19:39.347949 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage17-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage17-crc\") pod \"glance-default-external-api-0\" (UID: \"20cdc3df-1626-4f56-8a92-9df64c1ed2fe\") device mount path \"/mnt/openstack/pv17\"" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:19:39 crc kubenswrapper[4558]: I0120 17:19:39.353751 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/20cdc3df-1626-4f56-8a92-9df64c1ed2fe-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"20cdc3df-1626-4f56-8a92-9df64c1ed2fe\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:19:39 crc kubenswrapper[4558]: I0120 17:19:39.355087 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/20cdc3df-1626-4f56-8a92-9df64c1ed2fe-config-data\") pod \"glance-default-external-api-0\" (UID: \"20cdc3df-1626-4f56-8a92-9df64c1ed2fe\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:19:39 crc kubenswrapper[4558]: I0120 17:19:39.355824 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1ce622e1-3bed-423c-8870-1243fabacc8e-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"1ce622e1-3bed-423c-8870-1243fabacc8e\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:19:39 crc kubenswrapper[4558]: I0120 17:19:39.356016 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/20cdc3df-1626-4f56-8a92-9df64c1ed2fe-scripts\") pod \"glance-default-external-api-0\" (UID: \"20cdc3df-1626-4f56-8a92-9df64c1ed2fe\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:19:39 crc kubenswrapper[4558]: I0120 17:19:39.356324 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/20cdc3df-1626-4f56-8a92-9df64c1ed2fe-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"20cdc3df-1626-4f56-8a92-9df64c1ed2fe\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:19:39 crc kubenswrapper[4558]: I0120 17:19:39.370921 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1ce622e1-3bed-423c-8870-1243fabacc8e-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"1ce622e1-3bed-423c-8870-1243fabacc8e\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:19:39 crc kubenswrapper[4558]: I0120 17:19:39.376825 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dwpm2\" (UniqueName: \"kubernetes.io/projected/1ce622e1-3bed-423c-8870-1243fabacc8e-kube-api-access-dwpm2\") pod \"nova-cell0-conductor-0\" (UID: \"1ce622e1-3bed-423c-8870-1243fabacc8e\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:19:39 crc kubenswrapper[4558]: I0120 17:19:39.378157 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-psk66\" (UniqueName: \"kubernetes.io/projected/20cdc3df-1626-4f56-8a92-9df64c1ed2fe-kube-api-access-psk66\") pod \"glance-default-external-api-0\" (UID: \"20cdc3df-1626-4f56-8a92-9df64c1ed2fe\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:19:39 crc kubenswrapper[4558]: I0120 17:19:39.398945 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage17-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage17-crc\") pod \"glance-default-external-api-0\" (UID: \"20cdc3df-1626-4f56-8a92-9df64c1ed2fe\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:19:39 crc kubenswrapper[4558]: I0120 17:19:39.473473 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:19:39 crc kubenswrapper[4558]: I0120 17:19:39.543974 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:19:39 crc kubenswrapper[4558]: I0120 17:19:39.566725 4558 scope.go:117] "RemoveContainer" containerID="0ffce44366a8b4466427b682fb41640425366a0f4449210959148de9aef695c6" Jan 20 17:19:39 crc kubenswrapper[4558]: E0120 17:19:39.567056 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 17:19:39 crc kubenswrapper[4558]: I0120 17:19:39.894714 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-0"] Jan 20 17:19:40 crc kubenswrapper[4558]: W0120 17:19:40.056546 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod20cdc3df_1626_4f56_8a92_9df64c1ed2fe.slice/crio-1e0d541d89501c62bc139c127c6a46f23f965cd6aa3deda808f036d69f424ad5 WatchSource:0}: Error finding container 1e0d541d89501c62bc139c127c6a46f23f965cd6aa3deda808f036d69f424ad5: Status 404 returned error can't find the container with id 1e0d541d89501c62bc139c127c6a46f23f965cd6aa3deda808f036d69f424ad5 Jan 20 17:19:40 crc kubenswrapper[4558]: I0120 17:19:40.063413 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:19:40 crc kubenswrapper[4558]: I0120 17:19:40.151608 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"20cdc3df-1626-4f56-8a92-9df64c1ed2fe","Type":"ContainerStarted","Data":"1e0d541d89501c62bc139c127c6a46f23f965cd6aa3deda808f036d69f424ad5"} Jan 20 17:19:40 crc kubenswrapper[4558]: I0120 17:19:40.153979 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-0" event={"ID":"1ce622e1-3bed-423c-8870-1243fabacc8e","Type":"ContainerStarted","Data":"d1ff32632fd1bfa14bc2e72839ef954e6b06e71a5542a2689718a87ddb352720"} Jan 20 17:19:40 crc kubenswrapper[4558]: I0120 17:19:40.154094 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-0" event={"ID":"1ce622e1-3bed-423c-8870-1243fabacc8e","Type":"ContainerStarted","Data":"bc5b80c80e88cafd8548eb929c12b47ab9d6d388c35b0a91f6d2a8159d3d92c7"} Jan 20 17:19:40 crc kubenswrapper[4558]: I0120 17:19:40.154190 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:19:40 crc kubenswrapper[4558]: I0120 17:19:40.156854 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"55138e51-715c-42a1-8e1d-bca65a31d55c","Type":"ContainerStarted","Data":"ad7e074652e180a31db91a23980de422d274542b0626f62f61e86a2d3f3088d7"} Jan 20 17:19:40 crc kubenswrapper[4558]: I0120 17:19:40.158973 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"c11b2eee-13cf-408d-9491-999f63a8d17d","Type":"ContainerStarted","Data":"d407bc3d33d63e6d07b6d9bde845c1c840d6bde6647d2e7de707c8f66f77f335"} Jan 20 17:19:40 crc kubenswrapper[4558]: I0120 17:19:40.175026 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell0-conductor-0" podStartSLOduration=1.175006137 podStartE2EDuration="1.175006137s" podCreationTimestamp="2026-01-20 17:19:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:19:40.170717742 +0000 UTC m=+2273.931055709" watchObservedRunningTime="2026-01-20 17:19:40.175006137 +0000 UTC m=+2273.935344104" Jan 20 17:19:40 crc kubenswrapper[4558]: I0120 17:19:40.581296 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="18c8f0a6-157c-42f1-81b2-6bccecdcf626" path="/var/lib/kubelet/pods/18c8f0a6-157c-42f1-81b2-6bccecdcf626/volumes" Jan 20 17:19:41 crc kubenswrapper[4558]: I0120 17:19:41.175562 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"20cdc3df-1626-4f56-8a92-9df64c1ed2fe","Type":"ContainerStarted","Data":"37ed8e0f109228dfcc4e9447b9278994669497b6597de7b6864c0f3e20436236"} Jan 20 17:19:41 crc kubenswrapper[4558]: I0120 17:19:41.175923 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"20cdc3df-1626-4f56-8a92-9df64c1ed2fe","Type":"ContainerStarted","Data":"e156d2f38d6403388f01acbe07670bd0efccb2c563ca79b7e6061ec9d3596440"} Jan 20 17:19:41 crc kubenswrapper[4558]: I0120 17:19:41.178791 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"c11b2eee-13cf-408d-9491-999f63a8d17d","Type":"ContainerStarted","Data":"eac4d88ed5ece7169be09bc311c9059d08357e72d46f8f7d7499d7362b4ec861"} Jan 20 17:19:41 crc kubenswrapper[4558]: I0120 17:19:41.206679 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/glance-default-external-api-0" podStartSLOduration=2.206664812 podStartE2EDuration="2.206664812s" podCreationTimestamp="2026-01-20 17:19:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:19:41.19665604 +0000 UTC m=+2274.956994007" watchObservedRunningTime="2026-01-20 17:19:41.206664812 +0000 UTC m=+2274.967002779" Jan 20 17:19:42 crc kubenswrapper[4558]: I0120 17:19:42.201907 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"c11b2eee-13cf-408d-9491-999f63a8d17d","Type":"ContainerStarted","Data":"320e50bb2db9f8366b0723a0424263a4a16c5acd0e11b09caa9c6f4235d12b7d"} Jan 20 17:19:44 crc kubenswrapper[4558]: I0120 17:19:44.233633 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"c11b2eee-13cf-408d-9491-999f63a8d17d","Type":"ContainerStarted","Data":"c7f179345fc798000e40658b959729e175165500b34eea0e2d2437d4788d294a"} Jan 20 17:19:44 crc kubenswrapper[4558]: I0120 17:19:44.234301 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:19:44 crc kubenswrapper[4558]: I0120 17:19:44.254727 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ceilometer-0" podStartSLOduration=2.1196086 podStartE2EDuration="6.254692389s" podCreationTimestamp="2026-01-20 17:19:38 +0000 UTC" firstStartedPulling="2026-01-20 17:19:38.92936763 +0000 UTC m=+2272.689705597" lastFinishedPulling="2026-01-20 17:19:43.064451418 +0000 UTC m=+2276.824789386" observedRunningTime="2026-01-20 17:19:44.250485589 +0000 UTC m=+2278.010823556" watchObservedRunningTime="2026-01-20 17:19:44.254692389 +0000 UTC m=+2278.015030357" Jan 20 17:19:47 crc kubenswrapper[4558]: I0120 17:19:47.477481 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:19:47 crc kubenswrapper[4558]: I0120 17:19:47.478022 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:19:47 crc kubenswrapper[4558]: I0120 17:19:47.506764 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:19:47 crc kubenswrapper[4558]: I0120 17:19:47.512592 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:19:48 crc kubenswrapper[4558]: I0120 17:19:48.279708 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:19:48 crc kubenswrapper[4558]: I0120 17:19:48.279764 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:19:49 crc kubenswrapper[4558]: I0120 17:19:49.501371 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:19:49 crc kubenswrapper[4558]: I0120 17:19:49.544509 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:19:49 crc kubenswrapper[4558]: I0120 17:19:49.544566 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:19:49 crc kubenswrapper[4558]: I0120 17:19:49.572199 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:19:49 crc kubenswrapper[4558]: I0120 17:19:49.575997 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:19:49 crc kubenswrapper[4558]: I0120 17:19:49.911897 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell0-cell-mapping-tfgvg"] Jan 20 17:19:49 crc kubenswrapper[4558]: I0120 17:19:49.913223 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-cell-mapping-tfgvg" Jan 20 17:19:49 crc kubenswrapper[4558]: I0120 17:19:49.915907 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell0-manage-scripts" Jan 20 17:19:49 crc kubenswrapper[4558]: I0120 17:19:49.916015 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell0-manage-config-data" Jan 20 17:19:49 crc kubenswrapper[4558]: I0120 17:19:49.922863 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-cell-mapping-tfgvg"] Jan 20 17:19:50 crc kubenswrapper[4558]: I0120 17:19:50.004383 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:19:50 crc kubenswrapper[4558]: I0120 17:19:50.006396 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e2e3a805-bb9e-4e87-b57d-99fb395130bf-scripts\") pod \"nova-cell0-cell-mapping-tfgvg\" (UID: \"e2e3a805-bb9e-4e87-b57d-99fb395130bf\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-tfgvg" Jan 20 17:19:50 crc kubenswrapper[4558]: I0120 17:19:50.006599 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e2e3a805-bb9e-4e87-b57d-99fb395130bf-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-tfgvg\" (UID: \"e2e3a805-bb9e-4e87-b57d-99fb395130bf\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-tfgvg" Jan 20 17:19:50 crc kubenswrapper[4558]: I0120 17:19:50.006716 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e2e3a805-bb9e-4e87-b57d-99fb395130bf-config-data\") pod \"nova-cell0-cell-mapping-tfgvg\" (UID: \"e2e3a805-bb9e-4e87-b57d-99fb395130bf\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-tfgvg" Jan 20 17:19:50 crc kubenswrapper[4558]: I0120 17:19:50.006750 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zv4m6\" (UniqueName: \"kubernetes.io/projected/e2e3a805-bb9e-4e87-b57d-99fb395130bf-kube-api-access-zv4m6\") pod \"nova-cell0-cell-mapping-tfgvg\" (UID: \"e2e3a805-bb9e-4e87-b57d-99fb395130bf\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-tfgvg" Jan 20 17:19:50 crc kubenswrapper[4558]: I0120 17:19:50.091582 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:19:50 crc kubenswrapper[4558]: I0120 17:19:50.093643 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:19:50 crc kubenswrapper[4558]: I0120 17:19:50.096695 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-api-config-data" Jan 20 17:19:50 crc kubenswrapper[4558]: I0120 17:19:50.110105 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e2e3a805-bb9e-4e87-b57d-99fb395130bf-scripts\") pod \"nova-cell0-cell-mapping-tfgvg\" (UID: \"e2e3a805-bb9e-4e87-b57d-99fb395130bf\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-tfgvg" Jan 20 17:19:50 crc kubenswrapper[4558]: I0120 17:19:50.110269 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e2e3a805-bb9e-4e87-b57d-99fb395130bf-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-tfgvg\" (UID: \"e2e3a805-bb9e-4e87-b57d-99fb395130bf\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-tfgvg" Jan 20 17:19:50 crc kubenswrapper[4558]: I0120 17:19:50.110350 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e2e3a805-bb9e-4e87-b57d-99fb395130bf-config-data\") pod \"nova-cell0-cell-mapping-tfgvg\" (UID: \"e2e3a805-bb9e-4e87-b57d-99fb395130bf\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-tfgvg" Jan 20 17:19:50 crc kubenswrapper[4558]: I0120 17:19:50.110373 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zv4m6\" (UniqueName: \"kubernetes.io/projected/e2e3a805-bb9e-4e87-b57d-99fb395130bf-kube-api-access-zv4m6\") pod \"nova-cell0-cell-mapping-tfgvg\" (UID: \"e2e3a805-bb9e-4e87-b57d-99fb395130bf\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-tfgvg" Jan 20 17:19:50 crc kubenswrapper[4558]: I0120 17:19:50.119305 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e2e3a805-bb9e-4e87-b57d-99fb395130bf-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-tfgvg\" (UID: \"e2e3a805-bb9e-4e87-b57d-99fb395130bf\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-tfgvg" Jan 20 17:19:50 crc kubenswrapper[4558]: I0120 17:19:50.119372 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:19:50 crc kubenswrapper[4558]: I0120 17:19:50.120746 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e2e3a805-bb9e-4e87-b57d-99fb395130bf-scripts\") pod \"nova-cell0-cell-mapping-tfgvg\" (UID: \"e2e3a805-bb9e-4e87-b57d-99fb395130bf\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-tfgvg" Jan 20 17:19:50 crc kubenswrapper[4558]: I0120 17:19:50.122731 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e2e3a805-bb9e-4e87-b57d-99fb395130bf-config-data\") pod \"nova-cell0-cell-mapping-tfgvg\" (UID: \"e2e3a805-bb9e-4e87-b57d-99fb395130bf\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-tfgvg" Jan 20 17:19:50 crc kubenswrapper[4558]: I0120 17:19:50.122860 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:19:50 crc kubenswrapper[4558]: I0120 17:19:50.131558 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-scheduler-config-data" Jan 20 17:19:50 crc kubenswrapper[4558]: I0120 17:19:50.135109 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zv4m6\" (UniqueName: \"kubernetes.io/projected/e2e3a805-bb9e-4e87-b57d-99fb395130bf-kube-api-access-zv4m6\") pod \"nova-cell0-cell-mapping-tfgvg\" (UID: \"e2e3a805-bb9e-4e87-b57d-99fb395130bf\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-tfgvg" Jan 20 17:19:50 crc kubenswrapper[4558]: I0120 17:19:50.135206 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:19:50 crc kubenswrapper[4558]: I0120 17:19:50.137278 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:19:50 crc kubenswrapper[4558]: I0120 17:19:50.142444 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-metadata-config-data" Jan 20 17:19:50 crc kubenswrapper[4558]: I0120 17:19:50.153213 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:19:50 crc kubenswrapper[4558]: I0120 17:19:50.164394 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:19:50 crc kubenswrapper[4558]: I0120 17:19:50.188246 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:19:50 crc kubenswrapper[4558]: I0120 17:19:50.198231 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:19:50 crc kubenswrapper[4558]: I0120 17:19:50.214943 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f595b84e-54f3-47dc-91e6-8c4ba1281542-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"f595b84e-54f3-47dc-91e6-8c4ba1281542\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:19:50 crc kubenswrapper[4558]: I0120 17:19:50.215011 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-blfvf\" (UniqueName: \"kubernetes.io/projected/49f5b91e-2110-4750-b1ca-5dbcf2df3f18-kube-api-access-blfvf\") pod \"nova-api-0\" (UID: \"49f5b91e-2110-4750-b1ca-5dbcf2df3f18\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:19:50 crc kubenswrapper[4558]: I0120 17:19:50.215043 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f595b84e-54f3-47dc-91e6-8c4ba1281542-config-data\") pod \"nova-scheduler-0\" (UID: \"f595b84e-54f3-47dc-91e6-8c4ba1281542\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:19:50 crc kubenswrapper[4558]: I0120 17:19:50.215060 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-95stq\" (UniqueName: \"kubernetes.io/projected/f595b84e-54f3-47dc-91e6-8c4ba1281542-kube-api-access-95stq\") pod \"nova-scheduler-0\" (UID: \"f595b84e-54f3-47dc-91e6-8c4ba1281542\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:19:50 crc kubenswrapper[4558]: I0120 17:19:50.215119 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/49f5b91e-2110-4750-b1ca-5dbcf2df3f18-logs\") pod \"nova-api-0\" (UID: \"49f5b91e-2110-4750-b1ca-5dbcf2df3f18\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:19:50 crc kubenswrapper[4558]: I0120 17:19:50.215140 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/49f5b91e-2110-4750-b1ca-5dbcf2df3f18-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"49f5b91e-2110-4750-b1ca-5dbcf2df3f18\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:19:50 crc kubenswrapper[4558]: I0120 17:19:50.215172 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/49f5b91e-2110-4750-b1ca-5dbcf2df3f18-config-data\") pod \"nova-api-0\" (UID: \"49f5b91e-2110-4750-b1ca-5dbcf2df3f18\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:19:50 crc kubenswrapper[4558]: I0120 17:19:50.247478 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-cell-mapping-tfgvg" Jan 20 17:19:50 crc kubenswrapper[4558]: I0120 17:19:50.317886 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f595b84e-54f3-47dc-91e6-8c4ba1281542-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"f595b84e-54f3-47dc-91e6-8c4ba1281542\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:19:50 crc kubenswrapper[4558]: I0120 17:19:50.317964 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3d471859-e08c-4bb9-a9d1-881c4e93e522-config-data\") pod \"nova-metadata-0\" (UID: \"3d471859-e08c-4bb9-a9d1-881c4e93e522\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:19:50 crc kubenswrapper[4558]: I0120 17:19:50.317996 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zvxp8\" (UniqueName: \"kubernetes.io/projected/3d471859-e08c-4bb9-a9d1-881c4e93e522-kube-api-access-zvxp8\") pod \"nova-metadata-0\" (UID: \"3d471859-e08c-4bb9-a9d1-881c4e93e522\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:19:50 crc kubenswrapper[4558]: I0120 17:19:50.318037 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-blfvf\" (UniqueName: \"kubernetes.io/projected/49f5b91e-2110-4750-b1ca-5dbcf2df3f18-kube-api-access-blfvf\") pod \"nova-api-0\" (UID: \"49f5b91e-2110-4750-b1ca-5dbcf2df3f18\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:19:50 crc kubenswrapper[4558]: I0120 17:19:50.318067 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f595b84e-54f3-47dc-91e6-8c4ba1281542-config-data\") pod \"nova-scheduler-0\" (UID: \"f595b84e-54f3-47dc-91e6-8c4ba1281542\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:19:50 crc kubenswrapper[4558]: I0120 17:19:50.318085 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-95stq\" (UniqueName: \"kubernetes.io/projected/f595b84e-54f3-47dc-91e6-8c4ba1281542-kube-api-access-95stq\") pod \"nova-scheduler-0\" (UID: \"f595b84e-54f3-47dc-91e6-8c4ba1281542\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:19:50 crc kubenswrapper[4558]: I0120 17:19:50.318111 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3d471859-e08c-4bb9-a9d1-881c4e93e522-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"3d471859-e08c-4bb9-a9d1-881c4e93e522\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:19:50 crc kubenswrapper[4558]: I0120 17:19:50.318414 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/49f5b91e-2110-4750-b1ca-5dbcf2df3f18-logs\") pod \"nova-api-0\" (UID: \"49f5b91e-2110-4750-b1ca-5dbcf2df3f18\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:19:50 crc kubenswrapper[4558]: I0120 17:19:50.318449 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/49f5b91e-2110-4750-b1ca-5dbcf2df3f18-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"49f5b91e-2110-4750-b1ca-5dbcf2df3f18\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:19:50 crc kubenswrapper[4558]: I0120 17:19:50.318475 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/49f5b91e-2110-4750-b1ca-5dbcf2df3f18-config-data\") pod \"nova-api-0\" (UID: \"49f5b91e-2110-4750-b1ca-5dbcf2df3f18\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:19:50 crc kubenswrapper[4558]: I0120 17:19:50.318515 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3d471859-e08c-4bb9-a9d1-881c4e93e522-logs\") pod \"nova-metadata-0\" (UID: \"3d471859-e08c-4bb9-a9d1-881c4e93e522\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:19:50 crc kubenswrapper[4558]: I0120 17:19:50.331614 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f595b84e-54f3-47dc-91e6-8c4ba1281542-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"f595b84e-54f3-47dc-91e6-8c4ba1281542\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:19:50 crc kubenswrapper[4558]: I0120 17:19:50.331940 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/49f5b91e-2110-4750-b1ca-5dbcf2df3f18-logs\") pod \"nova-api-0\" (UID: \"49f5b91e-2110-4750-b1ca-5dbcf2df3f18\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:19:50 crc kubenswrapper[4558]: I0120 17:19:50.335143 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:19:50 crc kubenswrapper[4558]: I0120 17:19:50.336638 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:19:50 crc kubenswrapper[4558]: I0120 17:19:50.337988 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/49f5b91e-2110-4750-b1ca-5dbcf2df3f18-config-data\") pod \"nova-api-0\" (UID: \"49f5b91e-2110-4750-b1ca-5dbcf2df3f18\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:19:50 crc kubenswrapper[4558]: I0120 17:19:50.340646 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f595b84e-54f3-47dc-91e6-8c4ba1281542-config-data\") pod \"nova-scheduler-0\" (UID: \"f595b84e-54f3-47dc-91e6-8c4ba1281542\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:19:50 crc kubenswrapper[4558]: I0120 17:19:50.341075 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/49f5b91e-2110-4750-b1ca-5dbcf2df3f18-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"49f5b91e-2110-4750-b1ca-5dbcf2df3f18\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:19:50 crc kubenswrapper[4558]: I0120 17:19:50.361482 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell1-novncproxy-config-data" Jan 20 17:19:50 crc kubenswrapper[4558]: I0120 17:19:50.365643 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:19:50 crc kubenswrapper[4558]: I0120 17:19:50.365678 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:19:50 crc kubenswrapper[4558]: I0120 17:19:50.371420 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-95stq\" (UniqueName: \"kubernetes.io/projected/f595b84e-54f3-47dc-91e6-8c4ba1281542-kube-api-access-95stq\") pod \"nova-scheduler-0\" (UID: \"f595b84e-54f3-47dc-91e6-8c4ba1281542\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:19:50 crc kubenswrapper[4558]: I0120 17:19:50.378152 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:19:50 crc kubenswrapper[4558]: I0120 17:19:50.379674 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-blfvf\" (UniqueName: \"kubernetes.io/projected/49f5b91e-2110-4750-b1ca-5dbcf2df3f18-kube-api-access-blfvf\") pod \"nova-api-0\" (UID: \"49f5b91e-2110-4750-b1ca-5dbcf2df3f18\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:19:50 crc kubenswrapper[4558]: I0120 17:19:50.417791 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:19:50 crc kubenswrapper[4558]: I0120 17:19:50.421541 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3d471859-e08c-4bb9-a9d1-881c4e93e522-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"3d471859-e08c-4bb9-a9d1-881c4e93e522\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:19:50 crc kubenswrapper[4558]: I0120 17:19:50.421749 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3d471859-e08c-4bb9-a9d1-881c4e93e522-logs\") pod \"nova-metadata-0\" (UID: \"3d471859-e08c-4bb9-a9d1-881c4e93e522\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:19:50 crc kubenswrapper[4558]: I0120 17:19:50.422066 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3d471859-e08c-4bb9-a9d1-881c4e93e522-config-data\") pod \"nova-metadata-0\" (UID: \"3d471859-e08c-4bb9-a9d1-881c4e93e522\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:19:50 crc kubenswrapper[4558]: I0120 17:19:50.422150 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zvxp8\" (UniqueName: \"kubernetes.io/projected/3d471859-e08c-4bb9-a9d1-881c4e93e522-kube-api-access-zvxp8\") pod \"nova-metadata-0\" (UID: \"3d471859-e08c-4bb9-a9d1-881c4e93e522\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:19:50 crc kubenswrapper[4558]: I0120 17:19:50.426200 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3d471859-e08c-4bb9-a9d1-881c4e93e522-logs\") pod \"nova-metadata-0\" (UID: \"3d471859-e08c-4bb9-a9d1-881c4e93e522\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:19:50 crc kubenswrapper[4558]: I0120 17:19:50.428419 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3d471859-e08c-4bb9-a9d1-881c4e93e522-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"3d471859-e08c-4bb9-a9d1-881c4e93e522\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:19:50 crc kubenswrapper[4558]: I0120 17:19:50.430710 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3d471859-e08c-4bb9-a9d1-881c4e93e522-config-data\") pod \"nova-metadata-0\" (UID: \"3d471859-e08c-4bb9-a9d1-881c4e93e522\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:19:50 crc kubenswrapper[4558]: I0120 17:19:50.442649 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zvxp8\" (UniqueName: \"kubernetes.io/projected/3d471859-e08c-4bb9-a9d1-881c4e93e522-kube-api-access-zvxp8\") pod \"nova-metadata-0\" (UID: \"3d471859-e08c-4bb9-a9d1-881c4e93e522\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:19:50 crc kubenswrapper[4558]: I0120 17:19:50.528947 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/921591a4-64f3-4761-ab6d-014fb2627a1c-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"921591a4-64f3-4761-ab6d-014fb2627a1c\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:19:50 crc kubenswrapper[4558]: I0120 17:19:50.529016 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8rsjc\" (UniqueName: \"kubernetes.io/projected/921591a4-64f3-4761-ab6d-014fb2627a1c-kube-api-access-8rsjc\") pod \"nova-cell1-novncproxy-0\" (UID: \"921591a4-64f3-4761-ab6d-014fb2627a1c\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:19:50 crc kubenswrapper[4558]: I0120 17:19:50.529158 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/921591a4-64f3-4761-ab6d-014fb2627a1c-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"921591a4-64f3-4761-ab6d-014fb2627a1c\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:19:50 crc kubenswrapper[4558]: I0120 17:19:50.562817 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:19:50 crc kubenswrapper[4558]: I0120 17:19:50.563724 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:19:50 crc kubenswrapper[4558]: I0120 17:19:50.636873 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/921591a4-64f3-4761-ab6d-014fb2627a1c-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"921591a4-64f3-4761-ab6d-014fb2627a1c\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:19:50 crc kubenswrapper[4558]: I0120 17:19:50.636923 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8rsjc\" (UniqueName: \"kubernetes.io/projected/921591a4-64f3-4761-ab6d-014fb2627a1c-kube-api-access-8rsjc\") pod \"nova-cell1-novncproxy-0\" (UID: \"921591a4-64f3-4761-ab6d-014fb2627a1c\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:19:50 crc kubenswrapper[4558]: I0120 17:19:50.637000 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/921591a4-64f3-4761-ab6d-014fb2627a1c-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"921591a4-64f3-4761-ab6d-014fb2627a1c\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:19:50 crc kubenswrapper[4558]: I0120 17:19:50.645102 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/921591a4-64f3-4761-ab6d-014fb2627a1c-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"921591a4-64f3-4761-ab6d-014fb2627a1c\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:19:50 crc kubenswrapper[4558]: I0120 17:19:50.645975 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/921591a4-64f3-4761-ab6d-014fb2627a1c-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"921591a4-64f3-4761-ab6d-014fb2627a1c\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:19:50 crc kubenswrapper[4558]: I0120 17:19:50.658735 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8rsjc\" (UniqueName: \"kubernetes.io/projected/921591a4-64f3-4761-ab6d-014fb2627a1c-kube-api-access-8rsjc\") pod \"nova-cell1-novncproxy-0\" (UID: \"921591a4-64f3-4761-ab6d-014fb2627a1c\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:19:50 crc kubenswrapper[4558]: I0120 17:19:50.776132 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:19:50 crc kubenswrapper[4558]: I0120 17:19:50.912942 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-cell-mapping-tfgvg"] Jan 20 17:19:50 crc kubenswrapper[4558]: W0120 17:19:50.924650 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode2e3a805_bb9e_4e87_b57d_99fb395130bf.slice/crio-a35fb9e42d2619cfd723ce69b07ca7892459b2bffb3b0fbe8a3b63b03954b245 WatchSource:0}: Error finding container a35fb9e42d2619cfd723ce69b07ca7892459b2bffb3b0fbe8a3b63b03954b245: Status 404 returned error can't find the container with id a35fb9e42d2619cfd723ce69b07ca7892459b2bffb3b0fbe8a3b63b03954b245 Jan 20 17:19:51 crc kubenswrapper[4558]: I0120 17:19:51.030288 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:19:51 crc kubenswrapper[4558]: I0120 17:19:51.111659 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:19:51 crc kubenswrapper[4558]: I0120 17:19:51.121717 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:19:51 crc kubenswrapper[4558]: I0120 17:19:51.311704 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:19:51 crc kubenswrapper[4558]: I0120 17:19:51.334305 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-db-sync-pnr7q"] Jan 20 17:19:51 crc kubenswrapper[4558]: I0120 17:19:51.358687 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-pnr7q" Jan 20 17:19:51 crc kubenswrapper[4558]: I0120 17:19:51.360142 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-db-sync-pnr7q"] Jan 20 17:19:51 crc kubenswrapper[4558]: I0120 17:19:51.363405 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell1-conductor-scripts" Jan 20 17:19:51 crc kubenswrapper[4558]: I0120 17:19:51.363598 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell1-conductor-config-data" Jan 20 17:19:51 crc kubenswrapper[4558]: I0120 17:19:51.382269 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"3d471859-e08c-4bb9-a9d1-881c4e93e522","Type":"ContainerStarted","Data":"22d7c35f64b2305444c01157be0f57eef9da579c5c7f9f07bcd2d7695cf122f8"} Jan 20 17:19:51 crc kubenswrapper[4558]: I0120 17:19:51.383926 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"f595b84e-54f3-47dc-91e6-8c4ba1281542","Type":"ContainerStarted","Data":"48e49c068b0b72d8a8d5682f293b3c02b8625bdea4abf6a3374dc986da1642bd"} Jan 20 17:19:51 crc kubenswrapper[4558]: I0120 17:19:51.387230 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"49f5b91e-2110-4750-b1ca-5dbcf2df3f18","Type":"ContainerStarted","Data":"1637c0d200c720552335ea1fd68fd8a8132bc8709e8e6bf1336fb22350e1d393"} Jan 20 17:19:51 crc kubenswrapper[4558]: I0120 17:19:51.388145 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"921591a4-64f3-4761-ab6d-014fb2627a1c","Type":"ContainerStarted","Data":"21f3353369b55182667c0e4ef5c8635f68a5b34bd9991d58744749f1ddea5f3d"} Jan 20 17:19:51 crc kubenswrapper[4558]: I0120 17:19:51.389614 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-cell-mapping-tfgvg" event={"ID":"e2e3a805-bb9e-4e87-b57d-99fb395130bf","Type":"ContainerStarted","Data":"28a7103e4c02a6e13470eb63cb676ead463e061f88b33398c0d422e2998d9f49"} Jan 20 17:19:51 crc kubenswrapper[4558]: I0120 17:19:51.389713 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-cell-mapping-tfgvg" event={"ID":"e2e3a805-bb9e-4e87-b57d-99fb395130bf","Type":"ContainerStarted","Data":"a35fb9e42d2619cfd723ce69b07ca7892459b2bffb3b0fbe8a3b63b03954b245"} Jan 20 17:19:51 crc kubenswrapper[4558]: I0120 17:19:51.450481 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell0-cell-mapping-tfgvg" podStartSLOduration=2.4504615579999998 podStartE2EDuration="2.450461558s" podCreationTimestamp="2026-01-20 17:19:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:19:51.417234325 +0000 UTC m=+2285.177572291" watchObservedRunningTime="2026-01-20 17:19:51.450461558 +0000 UTC m=+2285.210799525" Jan 20 17:19:51 crc kubenswrapper[4558]: I0120 17:19:51.474052 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wg8br\" (UniqueName: \"kubernetes.io/projected/df5e4bdf-f36a-40cd-b3a0-968ffe61c5e7-kube-api-access-wg8br\") pod \"nova-cell1-conductor-db-sync-pnr7q\" (UID: \"df5e4bdf-f36a-40cd-b3a0-968ffe61c5e7\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-pnr7q" Jan 20 17:19:51 crc kubenswrapper[4558]: I0120 17:19:51.474630 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/df5e4bdf-f36a-40cd-b3a0-968ffe61c5e7-config-data\") pod \"nova-cell1-conductor-db-sync-pnr7q\" (UID: \"df5e4bdf-f36a-40cd-b3a0-968ffe61c5e7\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-pnr7q" Jan 20 17:19:51 crc kubenswrapper[4558]: I0120 17:19:51.474759 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/df5e4bdf-f36a-40cd-b3a0-968ffe61c5e7-scripts\") pod \"nova-cell1-conductor-db-sync-pnr7q\" (UID: \"df5e4bdf-f36a-40cd-b3a0-968ffe61c5e7\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-pnr7q" Jan 20 17:19:51 crc kubenswrapper[4558]: I0120 17:19:51.474917 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/df5e4bdf-f36a-40cd-b3a0-968ffe61c5e7-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-pnr7q\" (UID: \"df5e4bdf-f36a-40cd-b3a0-968ffe61c5e7\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-pnr7q" Jan 20 17:19:51 crc kubenswrapper[4558]: I0120 17:19:51.576886 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/df5e4bdf-f36a-40cd-b3a0-968ffe61c5e7-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-pnr7q\" (UID: \"df5e4bdf-f36a-40cd-b3a0-968ffe61c5e7\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-pnr7q" Jan 20 17:19:51 crc kubenswrapper[4558]: I0120 17:19:51.576989 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wg8br\" (UniqueName: \"kubernetes.io/projected/df5e4bdf-f36a-40cd-b3a0-968ffe61c5e7-kube-api-access-wg8br\") pod \"nova-cell1-conductor-db-sync-pnr7q\" (UID: \"df5e4bdf-f36a-40cd-b3a0-968ffe61c5e7\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-pnr7q" Jan 20 17:19:51 crc kubenswrapper[4558]: I0120 17:19:51.577055 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/df5e4bdf-f36a-40cd-b3a0-968ffe61c5e7-config-data\") pod \"nova-cell1-conductor-db-sync-pnr7q\" (UID: \"df5e4bdf-f36a-40cd-b3a0-968ffe61c5e7\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-pnr7q" Jan 20 17:19:51 crc kubenswrapper[4558]: I0120 17:19:51.577119 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/df5e4bdf-f36a-40cd-b3a0-968ffe61c5e7-scripts\") pod \"nova-cell1-conductor-db-sync-pnr7q\" (UID: \"df5e4bdf-f36a-40cd-b3a0-968ffe61c5e7\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-pnr7q" Jan 20 17:19:51 crc kubenswrapper[4558]: I0120 17:19:51.587145 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/df5e4bdf-f36a-40cd-b3a0-968ffe61c5e7-scripts\") pod \"nova-cell1-conductor-db-sync-pnr7q\" (UID: \"df5e4bdf-f36a-40cd-b3a0-968ffe61c5e7\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-pnr7q" Jan 20 17:19:51 crc kubenswrapper[4558]: I0120 17:19:51.587476 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/df5e4bdf-f36a-40cd-b3a0-968ffe61c5e7-config-data\") pod \"nova-cell1-conductor-db-sync-pnr7q\" (UID: \"df5e4bdf-f36a-40cd-b3a0-968ffe61c5e7\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-pnr7q" Jan 20 17:19:51 crc kubenswrapper[4558]: I0120 17:19:51.597847 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wg8br\" (UniqueName: \"kubernetes.io/projected/df5e4bdf-f36a-40cd-b3a0-968ffe61c5e7-kube-api-access-wg8br\") pod \"nova-cell1-conductor-db-sync-pnr7q\" (UID: \"df5e4bdf-f36a-40cd-b3a0-968ffe61c5e7\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-pnr7q" Jan 20 17:19:51 crc kubenswrapper[4558]: I0120 17:19:51.599391 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/df5e4bdf-f36a-40cd-b3a0-968ffe61c5e7-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-pnr7q\" (UID: \"df5e4bdf-f36a-40cd-b3a0-968ffe61c5e7\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-pnr7q" Jan 20 17:19:51 crc kubenswrapper[4558]: I0120 17:19:51.812671 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-pnr7q" Jan 20 17:19:52 crc kubenswrapper[4558]: I0120 17:19:52.254665 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-db-sync-pnr7q"] Jan 20 17:19:52 crc kubenswrapper[4558]: I0120 17:19:52.419908 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"3d471859-e08c-4bb9-a9d1-881c4e93e522","Type":"ContainerStarted","Data":"d49e97643ec6cc17aabe7ee763f2d9296d2cf5adab487b1d211d979327e0ea1d"} Jan 20 17:19:52 crc kubenswrapper[4558]: I0120 17:19:52.419943 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"3d471859-e08c-4bb9-a9d1-881c4e93e522","Type":"ContainerStarted","Data":"5840b80abea983efd2133d7b64de3ad36a6e9dcd4f31037dae89ceaa601c7f3d"} Jan 20 17:19:52 crc kubenswrapper[4558]: I0120 17:19:52.426275 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"f595b84e-54f3-47dc-91e6-8c4ba1281542","Type":"ContainerStarted","Data":"e813ccf21ae7caa0f1376a2b6593c47e8b15a47bdbac4cc50c86256faf3e25eb"} Jan 20 17:19:52 crc kubenswrapper[4558]: I0120 17:19:52.432310 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"49f5b91e-2110-4750-b1ca-5dbcf2df3f18","Type":"ContainerStarted","Data":"467965ce8f331c5d889ed685c001186deef46149b751fc2d12d3b544f29a577d"} Jan 20 17:19:52 crc kubenswrapper[4558]: I0120 17:19:52.432333 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"49f5b91e-2110-4750-b1ca-5dbcf2df3f18","Type":"ContainerStarted","Data":"4d4114d53817aa83a519886615769c5abf77ffb13ff027228a58bde10719d179"} Jan 20 17:19:52 crc kubenswrapper[4558]: I0120 17:19:52.433899 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-pnr7q" event={"ID":"df5e4bdf-f36a-40cd-b3a0-968ffe61c5e7","Type":"ContainerStarted","Data":"69007e5c5ef70529078fd7501af1c9b9a3430c16eea1ffa608c325c209549e0f"} Jan 20 17:19:52 crc kubenswrapper[4558]: I0120 17:19:52.445228 4558 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 20 17:19:52 crc kubenswrapper[4558]: I0120 17:19:52.445250 4558 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 20 17:19:52 crc kubenswrapper[4558]: I0120 17:19:52.445321 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"921591a4-64f3-4761-ab6d-014fb2627a1c","Type":"ContainerStarted","Data":"c53058cff58ceeab69e6a464f6012918cd042d3ad6b0eb188d9592a215be465a"} Jan 20 17:19:52 crc kubenswrapper[4558]: I0120 17:19:52.452906 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-metadata-0" podStartSLOduration=2.452882501 podStartE2EDuration="2.452882501s" podCreationTimestamp="2026-01-20 17:19:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:19:52.437366914 +0000 UTC m=+2286.197704882" watchObservedRunningTime="2026-01-20 17:19:52.452882501 +0000 UTC m=+2286.213220468" Jan 20 17:19:52 crc kubenswrapper[4558]: I0120 17:19:52.467776 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-scheduler-0" podStartSLOduration=2.467762643 podStartE2EDuration="2.467762643s" podCreationTimestamp="2026-01-20 17:19:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:19:52.451325132 +0000 UTC m=+2286.211663099" watchObservedRunningTime="2026-01-20 17:19:52.467762643 +0000 UTC m=+2286.228100610" Jan 20 17:19:52 crc kubenswrapper[4558]: I0120 17:19:52.488429 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-api-0" podStartSLOduration=2.48842013 podStartE2EDuration="2.48842013s" podCreationTimestamp="2026-01-20 17:19:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:19:52.466980222 +0000 UTC m=+2286.227318209" watchObservedRunningTime="2026-01-20 17:19:52.48842013 +0000 UTC m=+2286.248758097" Jan 20 17:19:52 crc kubenswrapper[4558]: I0120 17:19:52.493950 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" podStartSLOduration=2.493941753 podStartE2EDuration="2.493941753s" podCreationTimestamp="2026-01-20 17:19:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:19:52.484223588 +0000 UTC m=+2286.244561555" watchObservedRunningTime="2026-01-20 17:19:52.493941753 +0000 UTC m=+2286.254279720" Jan 20 17:19:52 crc kubenswrapper[4558]: I0120 17:19:52.566824 4558 scope.go:117] "RemoveContainer" containerID="0ffce44366a8b4466427b682fb41640425366a0f4449210959148de9aef695c6" Jan 20 17:19:52 crc kubenswrapper[4558]: E0120 17:19:52.567222 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 17:19:52 crc kubenswrapper[4558]: I0120 17:19:52.680106 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:19:52 crc kubenswrapper[4558]: I0120 17:19:52.699805 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:19:52 crc kubenswrapper[4558]: I0120 17:19:52.730900 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:19:52 crc kubenswrapper[4558]: I0120 17:19:52.745371 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:19:53 crc kubenswrapper[4558]: I0120 17:19:53.454709 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-pnr7q" event={"ID":"df5e4bdf-f36a-40cd-b3a0-968ffe61c5e7","Type":"ContainerStarted","Data":"3c17229250a08990f5717d8b2d81a48865a2701c19467dbee26edd7db7249611"} Jan 20 17:19:53 crc kubenswrapper[4558]: I0120 17:19:53.476689 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-pnr7q" podStartSLOduration=2.476664584 podStartE2EDuration="2.476664584s" podCreationTimestamp="2026-01-20 17:19:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:19:53.471465087 +0000 UTC m=+2287.231803053" watchObservedRunningTime="2026-01-20 17:19:53.476664584 +0000 UTC m=+2287.237002551" Jan 20 17:19:54 crc kubenswrapper[4558]: I0120 17:19:54.465254 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" podUID="921591a4-64f3-4761-ab6d-014fb2627a1c" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://c53058cff58ceeab69e6a464f6012918cd042d3ad6b0eb188d9592a215be465a" gracePeriod=30 Jan 20 17:19:54 crc kubenswrapper[4558]: I0120 17:19:54.465793 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-metadata-0" podUID="3d471859-e08c-4bb9-a9d1-881c4e93e522" containerName="nova-metadata-log" containerID="cri-o://5840b80abea983efd2133d7b64de3ad36a6e9dcd4f31037dae89ceaa601c7f3d" gracePeriod=30 Jan 20 17:19:54 crc kubenswrapper[4558]: I0120 17:19:54.465893 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-metadata-0" podUID="3d471859-e08c-4bb9-a9d1-881c4e93e522" containerName="nova-metadata-metadata" containerID="cri-o://d49e97643ec6cc17aabe7ee763f2d9296d2cf5adab487b1d211d979327e0ea1d" gracePeriod=30 Jan 20 17:19:54 crc kubenswrapper[4558]: E0120 17:19:54.987469 4558 kubelet_node_status.go:756] "Failed to set some node status fields" err="failed to validate nodeIP: route ip+net: no such network interface" node="crc" Jan 20 17:19:55 crc kubenswrapper[4558]: I0120 17:19:55.213348 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:19:55 crc kubenswrapper[4558]: I0120 17:19:55.217118 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:19:55 crc kubenswrapper[4558]: I0120 17:19:55.283650 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/921591a4-64f3-4761-ab6d-014fb2627a1c-combined-ca-bundle\") pod \"921591a4-64f3-4761-ab6d-014fb2627a1c\" (UID: \"921591a4-64f3-4761-ab6d-014fb2627a1c\") " Jan 20 17:19:55 crc kubenswrapper[4558]: I0120 17:19:55.283825 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8rsjc\" (UniqueName: \"kubernetes.io/projected/921591a4-64f3-4761-ab6d-014fb2627a1c-kube-api-access-8rsjc\") pod \"921591a4-64f3-4761-ab6d-014fb2627a1c\" (UID: \"921591a4-64f3-4761-ab6d-014fb2627a1c\") " Jan 20 17:19:55 crc kubenswrapper[4558]: I0120 17:19:55.283850 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/921591a4-64f3-4761-ab6d-014fb2627a1c-config-data\") pod \"921591a4-64f3-4761-ab6d-014fb2627a1c\" (UID: \"921591a4-64f3-4761-ab6d-014fb2627a1c\") " Jan 20 17:19:55 crc kubenswrapper[4558]: I0120 17:19:55.283888 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3d471859-e08c-4bb9-a9d1-881c4e93e522-config-data\") pod \"3d471859-e08c-4bb9-a9d1-881c4e93e522\" (UID: \"3d471859-e08c-4bb9-a9d1-881c4e93e522\") " Jan 20 17:19:55 crc kubenswrapper[4558]: I0120 17:19:55.284014 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3d471859-e08c-4bb9-a9d1-881c4e93e522-combined-ca-bundle\") pod \"3d471859-e08c-4bb9-a9d1-881c4e93e522\" (UID: \"3d471859-e08c-4bb9-a9d1-881c4e93e522\") " Jan 20 17:19:55 crc kubenswrapper[4558]: I0120 17:19:55.284087 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zvxp8\" (UniqueName: \"kubernetes.io/projected/3d471859-e08c-4bb9-a9d1-881c4e93e522-kube-api-access-zvxp8\") pod \"3d471859-e08c-4bb9-a9d1-881c4e93e522\" (UID: \"3d471859-e08c-4bb9-a9d1-881c4e93e522\") " Jan 20 17:19:55 crc kubenswrapper[4558]: I0120 17:19:55.284117 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3d471859-e08c-4bb9-a9d1-881c4e93e522-logs\") pod \"3d471859-e08c-4bb9-a9d1-881c4e93e522\" (UID: \"3d471859-e08c-4bb9-a9d1-881c4e93e522\") " Jan 20 17:19:55 crc kubenswrapper[4558]: I0120 17:19:55.284617 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3d471859-e08c-4bb9-a9d1-881c4e93e522-logs" (OuterVolumeSpecName: "logs") pod "3d471859-e08c-4bb9-a9d1-881c4e93e522" (UID: "3d471859-e08c-4bb9-a9d1-881c4e93e522"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:19:55 crc kubenswrapper[4558]: I0120 17:19:55.290331 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/921591a4-64f3-4761-ab6d-014fb2627a1c-kube-api-access-8rsjc" (OuterVolumeSpecName: "kube-api-access-8rsjc") pod "921591a4-64f3-4761-ab6d-014fb2627a1c" (UID: "921591a4-64f3-4761-ab6d-014fb2627a1c"). InnerVolumeSpecName "kube-api-access-8rsjc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:19:55 crc kubenswrapper[4558]: I0120 17:19:55.290521 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3d471859-e08c-4bb9-a9d1-881c4e93e522-kube-api-access-zvxp8" (OuterVolumeSpecName: "kube-api-access-zvxp8") pod "3d471859-e08c-4bb9-a9d1-881c4e93e522" (UID: "3d471859-e08c-4bb9-a9d1-881c4e93e522"). InnerVolumeSpecName "kube-api-access-zvxp8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:19:55 crc kubenswrapper[4558]: I0120 17:19:55.310646 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/921591a4-64f3-4761-ab6d-014fb2627a1c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "921591a4-64f3-4761-ab6d-014fb2627a1c" (UID: "921591a4-64f3-4761-ab6d-014fb2627a1c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:19:55 crc kubenswrapper[4558]: I0120 17:19:55.311900 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3d471859-e08c-4bb9-a9d1-881c4e93e522-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3d471859-e08c-4bb9-a9d1-881c4e93e522" (UID: "3d471859-e08c-4bb9-a9d1-881c4e93e522"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:19:55 crc kubenswrapper[4558]: I0120 17:19:55.313111 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3d471859-e08c-4bb9-a9d1-881c4e93e522-config-data" (OuterVolumeSpecName: "config-data") pod "3d471859-e08c-4bb9-a9d1-881c4e93e522" (UID: "3d471859-e08c-4bb9-a9d1-881c4e93e522"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:19:55 crc kubenswrapper[4558]: I0120 17:19:55.313192 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/921591a4-64f3-4761-ab6d-014fb2627a1c-config-data" (OuterVolumeSpecName: "config-data") pod "921591a4-64f3-4761-ab6d-014fb2627a1c" (UID: "921591a4-64f3-4761-ab6d-014fb2627a1c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:19:55 crc kubenswrapper[4558]: I0120 17:19:55.388785 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3d471859-e08c-4bb9-a9d1-881c4e93e522-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:19:55 crc kubenswrapper[4558]: I0120 17:19:55.388824 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zvxp8\" (UniqueName: \"kubernetes.io/projected/3d471859-e08c-4bb9-a9d1-881c4e93e522-kube-api-access-zvxp8\") on node \"crc\" DevicePath \"\"" Jan 20 17:19:55 crc kubenswrapper[4558]: I0120 17:19:55.388841 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3d471859-e08c-4bb9-a9d1-881c4e93e522-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:19:55 crc kubenswrapper[4558]: I0120 17:19:55.388855 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/921591a4-64f3-4761-ab6d-014fb2627a1c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:19:55 crc kubenswrapper[4558]: I0120 17:19:55.388867 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8rsjc\" (UniqueName: \"kubernetes.io/projected/921591a4-64f3-4761-ab6d-014fb2627a1c-kube-api-access-8rsjc\") on node \"crc\" DevicePath \"\"" Jan 20 17:19:55 crc kubenswrapper[4558]: I0120 17:19:55.388877 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/921591a4-64f3-4761-ab6d-014fb2627a1c-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:19:55 crc kubenswrapper[4558]: I0120 17:19:55.388888 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3d471859-e08c-4bb9-a9d1-881c4e93e522-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:19:55 crc kubenswrapper[4558]: I0120 17:19:55.477046 4558 generic.go:334] "Generic (PLEG): container finished" podID="921591a4-64f3-4761-ab6d-014fb2627a1c" containerID="c53058cff58ceeab69e6a464f6012918cd042d3ad6b0eb188d9592a215be465a" exitCode=0 Jan 20 17:19:55 crc kubenswrapper[4558]: I0120 17:19:55.477101 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"921591a4-64f3-4761-ab6d-014fb2627a1c","Type":"ContainerDied","Data":"c53058cff58ceeab69e6a464f6012918cd042d3ad6b0eb188d9592a215be465a"} Jan 20 17:19:55 crc kubenswrapper[4558]: I0120 17:19:55.477152 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"921591a4-64f3-4761-ab6d-014fb2627a1c","Type":"ContainerDied","Data":"21f3353369b55182667c0e4ef5c8635f68a5b34bd9991d58744749f1ddea5f3d"} Jan 20 17:19:55 crc kubenswrapper[4558]: I0120 17:19:55.477184 4558 scope.go:117] "RemoveContainer" containerID="c53058cff58ceeab69e6a464f6012918cd042d3ad6b0eb188d9592a215be465a" Jan 20 17:19:55 crc kubenswrapper[4558]: I0120 17:19:55.477949 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:19:55 crc kubenswrapper[4558]: I0120 17:19:55.479872 4558 generic.go:334] "Generic (PLEG): container finished" podID="3d471859-e08c-4bb9-a9d1-881c4e93e522" containerID="d49e97643ec6cc17aabe7ee763f2d9296d2cf5adab487b1d211d979327e0ea1d" exitCode=0 Jan 20 17:19:55 crc kubenswrapper[4558]: I0120 17:19:55.479905 4558 generic.go:334] "Generic (PLEG): container finished" podID="3d471859-e08c-4bb9-a9d1-881c4e93e522" containerID="5840b80abea983efd2133d7b64de3ad36a6e9dcd4f31037dae89ceaa601c7f3d" exitCode=143 Jan 20 17:19:55 crc kubenswrapper[4558]: I0120 17:19:55.479932 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:19:55 crc kubenswrapper[4558]: I0120 17:19:55.479931 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"3d471859-e08c-4bb9-a9d1-881c4e93e522","Type":"ContainerDied","Data":"d49e97643ec6cc17aabe7ee763f2d9296d2cf5adab487b1d211d979327e0ea1d"} Jan 20 17:19:55 crc kubenswrapper[4558]: I0120 17:19:55.480069 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"3d471859-e08c-4bb9-a9d1-881c4e93e522","Type":"ContainerDied","Data":"5840b80abea983efd2133d7b64de3ad36a6e9dcd4f31037dae89ceaa601c7f3d"} Jan 20 17:19:55 crc kubenswrapper[4558]: I0120 17:19:55.480100 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"3d471859-e08c-4bb9-a9d1-881c4e93e522","Type":"ContainerDied","Data":"22d7c35f64b2305444c01157be0f57eef9da579c5c7f9f07bcd2d7695cf122f8"} Jan 20 17:19:55 crc kubenswrapper[4558]: I0120 17:19:55.499408 4558 scope.go:117] "RemoveContainer" containerID="c53058cff58ceeab69e6a464f6012918cd042d3ad6b0eb188d9592a215be465a" Jan 20 17:19:55 crc kubenswrapper[4558]: E0120 17:19:55.501227 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c53058cff58ceeab69e6a464f6012918cd042d3ad6b0eb188d9592a215be465a\": container with ID starting with c53058cff58ceeab69e6a464f6012918cd042d3ad6b0eb188d9592a215be465a not found: ID does not exist" containerID="c53058cff58ceeab69e6a464f6012918cd042d3ad6b0eb188d9592a215be465a" Jan 20 17:19:55 crc kubenswrapper[4558]: I0120 17:19:55.505356 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c53058cff58ceeab69e6a464f6012918cd042d3ad6b0eb188d9592a215be465a"} err="failed to get container status \"c53058cff58ceeab69e6a464f6012918cd042d3ad6b0eb188d9592a215be465a\": rpc error: code = NotFound desc = could not find container \"c53058cff58ceeab69e6a464f6012918cd042d3ad6b0eb188d9592a215be465a\": container with ID starting with c53058cff58ceeab69e6a464f6012918cd042d3ad6b0eb188d9592a215be465a not found: ID does not exist" Jan 20 17:19:55 crc kubenswrapper[4558]: I0120 17:19:55.505474 4558 scope.go:117] "RemoveContainer" containerID="d49e97643ec6cc17aabe7ee763f2d9296d2cf5adab487b1d211d979327e0ea1d" Jan 20 17:19:55 crc kubenswrapper[4558]: I0120 17:19:55.512522 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:19:55 crc kubenswrapper[4558]: I0120 17:19:55.519598 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:19:55 crc kubenswrapper[4558]: I0120 17:19:55.525835 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:19:55 crc kubenswrapper[4558]: I0120 17:19:55.531090 4558 scope.go:117] "RemoveContainer" containerID="5840b80abea983efd2133d7b64de3ad36a6e9dcd4f31037dae89ceaa601c7f3d" Jan 20 17:19:55 crc kubenswrapper[4558]: I0120 17:19:55.536923 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:19:55 crc kubenswrapper[4558]: I0120 17:19:55.560609 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:19:55 crc kubenswrapper[4558]: E0120 17:19:55.561129 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3d471859-e08c-4bb9-a9d1-881c4e93e522" containerName="nova-metadata-metadata" Jan 20 17:19:55 crc kubenswrapper[4558]: I0120 17:19:55.561150 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="3d471859-e08c-4bb9-a9d1-881c4e93e522" containerName="nova-metadata-metadata" Jan 20 17:19:55 crc kubenswrapper[4558]: E0120 17:19:55.561246 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="921591a4-64f3-4761-ab6d-014fb2627a1c" containerName="nova-cell1-novncproxy-novncproxy" Jan 20 17:19:55 crc kubenswrapper[4558]: I0120 17:19:55.561261 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="921591a4-64f3-4761-ab6d-014fb2627a1c" containerName="nova-cell1-novncproxy-novncproxy" Jan 20 17:19:55 crc kubenswrapper[4558]: E0120 17:19:55.561275 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3d471859-e08c-4bb9-a9d1-881c4e93e522" containerName="nova-metadata-log" Jan 20 17:19:55 crc kubenswrapper[4558]: I0120 17:19:55.561282 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="3d471859-e08c-4bb9-a9d1-881c4e93e522" containerName="nova-metadata-log" Jan 20 17:19:55 crc kubenswrapper[4558]: I0120 17:19:55.561482 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="3d471859-e08c-4bb9-a9d1-881c4e93e522" containerName="nova-metadata-metadata" Jan 20 17:19:55 crc kubenswrapper[4558]: I0120 17:19:55.561501 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="3d471859-e08c-4bb9-a9d1-881c4e93e522" containerName="nova-metadata-log" Jan 20 17:19:55 crc kubenswrapper[4558]: I0120 17:19:55.561517 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="921591a4-64f3-4761-ab6d-014fb2627a1c" containerName="nova-cell1-novncproxy-novncproxy" Jan 20 17:19:55 crc kubenswrapper[4558]: I0120 17:19:55.562289 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:19:55 crc kubenswrapper[4558]: I0120 17:19:55.569626 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:19:55 crc kubenswrapper[4558]: I0120 17:19:55.570589 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell1-novncproxy-config-data" Jan 20 17:19:55 crc kubenswrapper[4558]: I0120 17:19:55.570651 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-novncproxy-cell1-public-svc" Jan 20 17:19:55 crc kubenswrapper[4558]: I0120 17:19:55.571313 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-novncproxy-cell1-vencrypt" Jan 20 17:19:55 crc kubenswrapper[4558]: I0120 17:19:55.590952 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:19:55 crc kubenswrapper[4558]: I0120 17:19:55.597819 4558 scope.go:117] "RemoveContainer" containerID="d49e97643ec6cc17aabe7ee763f2d9296d2cf5adab487b1d211d979327e0ea1d" Jan 20 17:19:55 crc kubenswrapper[4558]: E0120 17:19:55.604722 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d49e97643ec6cc17aabe7ee763f2d9296d2cf5adab487b1d211d979327e0ea1d\": container with ID starting with d49e97643ec6cc17aabe7ee763f2d9296d2cf5adab487b1d211d979327e0ea1d not found: ID does not exist" containerID="d49e97643ec6cc17aabe7ee763f2d9296d2cf5adab487b1d211d979327e0ea1d" Jan 20 17:19:55 crc kubenswrapper[4558]: I0120 17:19:55.604769 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d49e97643ec6cc17aabe7ee763f2d9296d2cf5adab487b1d211d979327e0ea1d"} err="failed to get container status \"d49e97643ec6cc17aabe7ee763f2d9296d2cf5adab487b1d211d979327e0ea1d\": rpc error: code = NotFound desc = could not find container \"d49e97643ec6cc17aabe7ee763f2d9296d2cf5adab487b1d211d979327e0ea1d\": container with ID starting with d49e97643ec6cc17aabe7ee763f2d9296d2cf5adab487b1d211d979327e0ea1d not found: ID does not exist" Jan 20 17:19:55 crc kubenswrapper[4558]: I0120 17:19:55.604799 4558 scope.go:117] "RemoveContainer" containerID="5840b80abea983efd2133d7b64de3ad36a6e9dcd4f31037dae89ceaa601c7f3d" Jan 20 17:19:55 crc kubenswrapper[4558]: E0120 17:19:55.605352 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5840b80abea983efd2133d7b64de3ad36a6e9dcd4f31037dae89ceaa601c7f3d\": container with ID starting with 5840b80abea983efd2133d7b64de3ad36a6e9dcd4f31037dae89ceaa601c7f3d not found: ID does not exist" containerID="5840b80abea983efd2133d7b64de3ad36a6e9dcd4f31037dae89ceaa601c7f3d" Jan 20 17:19:55 crc kubenswrapper[4558]: I0120 17:19:55.605383 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5840b80abea983efd2133d7b64de3ad36a6e9dcd4f31037dae89ceaa601c7f3d"} err="failed to get container status \"5840b80abea983efd2133d7b64de3ad36a6e9dcd4f31037dae89ceaa601c7f3d\": rpc error: code = NotFound desc = could not find container \"5840b80abea983efd2133d7b64de3ad36a6e9dcd4f31037dae89ceaa601c7f3d\": container with ID starting with 5840b80abea983efd2133d7b64de3ad36a6e9dcd4f31037dae89ceaa601c7f3d not found: ID does not exist" Jan 20 17:19:55 crc kubenswrapper[4558]: I0120 17:19:55.605400 4558 scope.go:117] "RemoveContainer" containerID="d49e97643ec6cc17aabe7ee763f2d9296d2cf5adab487b1d211d979327e0ea1d" Jan 20 17:19:55 crc kubenswrapper[4558]: I0120 17:19:55.605847 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d49e97643ec6cc17aabe7ee763f2d9296d2cf5adab487b1d211d979327e0ea1d"} err="failed to get container status \"d49e97643ec6cc17aabe7ee763f2d9296d2cf5adab487b1d211d979327e0ea1d\": rpc error: code = NotFound desc = could not find container \"d49e97643ec6cc17aabe7ee763f2d9296d2cf5adab487b1d211d979327e0ea1d\": container with ID starting with d49e97643ec6cc17aabe7ee763f2d9296d2cf5adab487b1d211d979327e0ea1d not found: ID does not exist" Jan 20 17:19:55 crc kubenswrapper[4558]: I0120 17:19:55.605897 4558 scope.go:117] "RemoveContainer" containerID="5840b80abea983efd2133d7b64de3ad36a6e9dcd4f31037dae89ceaa601c7f3d" Jan 20 17:19:55 crc kubenswrapper[4558]: I0120 17:19:55.607849 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5840b80abea983efd2133d7b64de3ad36a6e9dcd4f31037dae89ceaa601c7f3d"} err="failed to get container status \"5840b80abea983efd2133d7b64de3ad36a6e9dcd4f31037dae89ceaa601c7f3d\": rpc error: code = NotFound desc = could not find container \"5840b80abea983efd2133d7b64de3ad36a6e9dcd4f31037dae89ceaa601c7f3d\": container with ID starting with 5840b80abea983efd2133d7b64de3ad36a6e9dcd4f31037dae89ceaa601c7f3d not found: ID does not exist" Jan 20 17:19:55 crc kubenswrapper[4558]: I0120 17:19:55.614599 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:19:55 crc kubenswrapper[4558]: I0120 17:19:55.617378 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:19:55 crc kubenswrapper[4558]: I0120 17:19:55.621786 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-metadata-config-data" Jan 20 17:19:55 crc kubenswrapper[4558]: I0120 17:19:55.623610 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-metadata-internal-svc" Jan 20 17:19:55 crc kubenswrapper[4558]: I0120 17:19:55.634534 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:19:55 crc kubenswrapper[4558]: I0120 17:19:55.705759 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/64462382-0fd6-4796-8079-d66c2bc27d95-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"64462382-0fd6-4796-8079-d66c2bc27d95\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:19:55 crc kubenswrapper[4558]: I0120 17:19:55.705828 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9435883-4191-45a5-8b34-46d84d9c5fed-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"c9435883-4191-45a5-8b34-46d84d9c5fed\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:19:55 crc kubenswrapper[4558]: I0120 17:19:55.705861 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/64462382-0fd6-4796-8079-d66c2bc27d95-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"64462382-0fd6-4796-8079-d66c2bc27d95\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:19:55 crc kubenswrapper[4558]: I0120 17:19:55.706620 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64462382-0fd6-4796-8079-d66c2bc27d95-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"64462382-0fd6-4796-8079-d66c2bc27d95\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:19:55 crc kubenswrapper[4558]: I0120 17:19:55.706683 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/c9435883-4191-45a5-8b34-46d84d9c5fed-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"c9435883-4191-45a5-8b34-46d84d9c5fed\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:19:55 crc kubenswrapper[4558]: I0120 17:19:55.706713 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c9435883-4191-45a5-8b34-46d84d9c5fed-logs\") pod \"nova-metadata-0\" (UID: \"c9435883-4191-45a5-8b34-46d84d9c5fed\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:19:55 crc kubenswrapper[4558]: I0120 17:19:55.706785 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c9435883-4191-45a5-8b34-46d84d9c5fed-config-data\") pod \"nova-metadata-0\" (UID: \"c9435883-4191-45a5-8b34-46d84d9c5fed\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:19:55 crc kubenswrapper[4558]: I0120 17:19:55.706908 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nz9lt\" (UniqueName: \"kubernetes.io/projected/64462382-0fd6-4796-8079-d66c2bc27d95-kube-api-access-nz9lt\") pod \"nova-cell1-novncproxy-0\" (UID: \"64462382-0fd6-4796-8079-d66c2bc27d95\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:19:55 crc kubenswrapper[4558]: I0120 17:19:55.706960 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/64462382-0fd6-4796-8079-d66c2bc27d95-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"64462382-0fd6-4796-8079-d66c2bc27d95\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:19:55 crc kubenswrapper[4558]: I0120 17:19:55.707015 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dmkbf\" (UniqueName: \"kubernetes.io/projected/c9435883-4191-45a5-8b34-46d84d9c5fed-kube-api-access-dmkbf\") pod \"nova-metadata-0\" (UID: \"c9435883-4191-45a5-8b34-46d84d9c5fed\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:19:55 crc kubenswrapper[4558]: I0120 17:19:55.808186 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dmkbf\" (UniqueName: \"kubernetes.io/projected/c9435883-4191-45a5-8b34-46d84d9c5fed-kube-api-access-dmkbf\") pod \"nova-metadata-0\" (UID: \"c9435883-4191-45a5-8b34-46d84d9c5fed\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:19:55 crc kubenswrapper[4558]: I0120 17:19:55.808283 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/64462382-0fd6-4796-8079-d66c2bc27d95-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"64462382-0fd6-4796-8079-d66c2bc27d95\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:19:55 crc kubenswrapper[4558]: I0120 17:19:55.808321 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9435883-4191-45a5-8b34-46d84d9c5fed-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"c9435883-4191-45a5-8b34-46d84d9c5fed\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:19:55 crc kubenswrapper[4558]: I0120 17:19:55.808347 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/64462382-0fd6-4796-8079-d66c2bc27d95-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"64462382-0fd6-4796-8079-d66c2bc27d95\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:19:55 crc kubenswrapper[4558]: I0120 17:19:55.808406 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64462382-0fd6-4796-8079-d66c2bc27d95-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"64462382-0fd6-4796-8079-d66c2bc27d95\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:19:55 crc kubenswrapper[4558]: I0120 17:19:55.808434 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/c9435883-4191-45a5-8b34-46d84d9c5fed-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"c9435883-4191-45a5-8b34-46d84d9c5fed\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:19:55 crc kubenswrapper[4558]: I0120 17:19:55.808460 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c9435883-4191-45a5-8b34-46d84d9c5fed-logs\") pod \"nova-metadata-0\" (UID: \"c9435883-4191-45a5-8b34-46d84d9c5fed\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:19:55 crc kubenswrapper[4558]: I0120 17:19:55.808488 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c9435883-4191-45a5-8b34-46d84d9c5fed-config-data\") pod \"nova-metadata-0\" (UID: \"c9435883-4191-45a5-8b34-46d84d9c5fed\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:19:55 crc kubenswrapper[4558]: I0120 17:19:55.808516 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nz9lt\" (UniqueName: \"kubernetes.io/projected/64462382-0fd6-4796-8079-d66c2bc27d95-kube-api-access-nz9lt\") pod \"nova-cell1-novncproxy-0\" (UID: \"64462382-0fd6-4796-8079-d66c2bc27d95\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:19:55 crc kubenswrapper[4558]: I0120 17:19:55.808570 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/64462382-0fd6-4796-8079-d66c2bc27d95-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"64462382-0fd6-4796-8079-d66c2bc27d95\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:19:55 crc kubenswrapper[4558]: I0120 17:19:55.809120 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c9435883-4191-45a5-8b34-46d84d9c5fed-logs\") pod \"nova-metadata-0\" (UID: \"c9435883-4191-45a5-8b34-46d84d9c5fed\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:19:55 crc kubenswrapper[4558]: I0120 17:19:55.813886 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/64462382-0fd6-4796-8079-d66c2bc27d95-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"64462382-0fd6-4796-8079-d66c2bc27d95\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:19:55 crc kubenswrapper[4558]: I0120 17:19:55.815758 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64462382-0fd6-4796-8079-d66c2bc27d95-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"64462382-0fd6-4796-8079-d66c2bc27d95\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:19:55 crc kubenswrapper[4558]: I0120 17:19:55.816928 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/64462382-0fd6-4796-8079-d66c2bc27d95-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"64462382-0fd6-4796-8079-d66c2bc27d95\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:19:55 crc kubenswrapper[4558]: I0120 17:19:55.817054 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9435883-4191-45a5-8b34-46d84d9c5fed-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"c9435883-4191-45a5-8b34-46d84d9c5fed\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:19:55 crc kubenswrapper[4558]: I0120 17:19:55.822674 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/64462382-0fd6-4796-8079-d66c2bc27d95-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"64462382-0fd6-4796-8079-d66c2bc27d95\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:19:55 crc kubenswrapper[4558]: I0120 17:19:55.824805 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/c9435883-4191-45a5-8b34-46d84d9c5fed-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"c9435883-4191-45a5-8b34-46d84d9c5fed\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:19:55 crc kubenswrapper[4558]: I0120 17:19:55.825638 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c9435883-4191-45a5-8b34-46d84d9c5fed-config-data\") pod \"nova-metadata-0\" (UID: \"c9435883-4191-45a5-8b34-46d84d9c5fed\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:19:55 crc kubenswrapper[4558]: I0120 17:19:55.826051 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nz9lt\" (UniqueName: \"kubernetes.io/projected/64462382-0fd6-4796-8079-d66c2bc27d95-kube-api-access-nz9lt\") pod \"nova-cell1-novncproxy-0\" (UID: \"64462382-0fd6-4796-8079-d66c2bc27d95\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:19:55 crc kubenswrapper[4558]: I0120 17:19:55.828540 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dmkbf\" (UniqueName: \"kubernetes.io/projected/c9435883-4191-45a5-8b34-46d84d9c5fed-kube-api-access-dmkbf\") pod \"nova-metadata-0\" (UID: \"c9435883-4191-45a5-8b34-46d84d9c5fed\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:19:55 crc kubenswrapper[4558]: I0120 17:19:55.941138 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:19:55 crc kubenswrapper[4558]: I0120 17:19:55.947932 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:19:56 crc kubenswrapper[4558]: I0120 17:19:56.370879 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:19:56 crc kubenswrapper[4558]: I0120 17:19:56.444642 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:19:56 crc kubenswrapper[4558]: I0120 17:19:56.490676 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"c9435883-4191-45a5-8b34-46d84d9c5fed","Type":"ContainerStarted","Data":"b644432e0478dec82549110e0656ba2bc180b1dd78400030647027705acc54bf"} Jan 20 17:19:56 crc kubenswrapper[4558]: I0120 17:19:56.493632 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"64462382-0fd6-4796-8079-d66c2bc27d95","Type":"ContainerStarted","Data":"e4e63edeba8a080debb087c844f5f85560048679ab9a95850ea4ed2b9792ecc6"} Jan 20 17:19:56 crc kubenswrapper[4558]: I0120 17:19:56.586314 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3d471859-e08c-4bb9-a9d1-881c4e93e522" path="/var/lib/kubelet/pods/3d471859-e08c-4bb9-a9d1-881c4e93e522/volumes" Jan 20 17:19:56 crc kubenswrapper[4558]: I0120 17:19:56.587461 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="921591a4-64f3-4761-ab6d-014fb2627a1c" path="/var/lib/kubelet/pods/921591a4-64f3-4761-ab6d-014fb2627a1c/volumes" Jan 20 17:19:57 crc kubenswrapper[4558]: I0120 17:19:57.508276 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"c9435883-4191-45a5-8b34-46d84d9c5fed","Type":"ContainerStarted","Data":"015d9ef73c6661866790a96c36a7cd7e9eccd25020783afa973f87aac9594196"} Jan 20 17:19:57 crc kubenswrapper[4558]: I0120 17:19:57.508325 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"c9435883-4191-45a5-8b34-46d84d9c5fed","Type":"ContainerStarted","Data":"42a6810bbfb151e49186d7726c2a07e0ec30198f40340b3816d662c9ce88dd3c"} Jan 20 17:19:57 crc kubenswrapper[4558]: I0120 17:19:57.509670 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"64462382-0fd6-4796-8079-d66c2bc27d95","Type":"ContainerStarted","Data":"0a8bffbad678e5cea118776d64f4cc6254e390d1759d980ea03c2b9491caaaa2"} Jan 20 17:19:57 crc kubenswrapper[4558]: I0120 17:19:57.511112 4558 generic.go:334] "Generic (PLEG): container finished" podID="df5e4bdf-f36a-40cd-b3a0-968ffe61c5e7" containerID="3c17229250a08990f5717d8b2d81a48865a2701c19467dbee26edd7db7249611" exitCode=0 Jan 20 17:19:57 crc kubenswrapper[4558]: I0120 17:19:57.511156 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-pnr7q" event={"ID":"df5e4bdf-f36a-40cd-b3a0-968ffe61c5e7","Type":"ContainerDied","Data":"3c17229250a08990f5717d8b2d81a48865a2701c19467dbee26edd7db7249611"} Jan 20 17:19:57 crc kubenswrapper[4558]: I0120 17:19:57.532189 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-metadata-0" podStartSLOduration=2.532170866 podStartE2EDuration="2.532170866s" podCreationTimestamp="2026-01-20 17:19:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:19:57.521689475 +0000 UTC m=+2291.282027442" watchObservedRunningTime="2026-01-20 17:19:57.532170866 +0000 UTC m=+2291.292508833" Jan 20 17:19:57 crc kubenswrapper[4558]: I0120 17:19:57.555979 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" podStartSLOduration=2.555961334 podStartE2EDuration="2.555961334s" podCreationTimestamp="2026-01-20 17:19:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:19:57.54803593 +0000 UTC m=+2291.308373898" watchObservedRunningTime="2026-01-20 17:19:57.555961334 +0000 UTC m=+2291.316299301" Jan 20 17:19:58 crc kubenswrapper[4558]: I0120 17:19:58.525337 4558 generic.go:334] "Generic (PLEG): container finished" podID="e2e3a805-bb9e-4e87-b57d-99fb395130bf" containerID="28a7103e4c02a6e13470eb63cb676ead463e061f88b33398c0d422e2998d9f49" exitCode=0 Jan 20 17:19:58 crc kubenswrapper[4558]: I0120 17:19:58.526299 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-cell-mapping-tfgvg" event={"ID":"e2e3a805-bb9e-4e87-b57d-99fb395130bf","Type":"ContainerDied","Data":"28a7103e4c02a6e13470eb63cb676ead463e061f88b33398c0d422e2998d9f49"} Jan 20 17:19:58 crc kubenswrapper[4558]: I0120 17:19:58.805589 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-pnr7q" Jan 20 17:19:58 crc kubenswrapper[4558]: I0120 17:19:58.882558 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wg8br\" (UniqueName: \"kubernetes.io/projected/df5e4bdf-f36a-40cd-b3a0-968ffe61c5e7-kube-api-access-wg8br\") pod \"df5e4bdf-f36a-40cd-b3a0-968ffe61c5e7\" (UID: \"df5e4bdf-f36a-40cd-b3a0-968ffe61c5e7\") " Jan 20 17:19:58 crc kubenswrapper[4558]: I0120 17:19:58.882776 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/df5e4bdf-f36a-40cd-b3a0-968ffe61c5e7-config-data\") pod \"df5e4bdf-f36a-40cd-b3a0-968ffe61c5e7\" (UID: \"df5e4bdf-f36a-40cd-b3a0-968ffe61c5e7\") " Jan 20 17:19:58 crc kubenswrapper[4558]: I0120 17:19:58.882833 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/df5e4bdf-f36a-40cd-b3a0-968ffe61c5e7-scripts\") pod \"df5e4bdf-f36a-40cd-b3a0-968ffe61c5e7\" (UID: \"df5e4bdf-f36a-40cd-b3a0-968ffe61c5e7\") " Jan 20 17:19:58 crc kubenswrapper[4558]: I0120 17:19:58.883092 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/df5e4bdf-f36a-40cd-b3a0-968ffe61c5e7-combined-ca-bundle\") pod \"df5e4bdf-f36a-40cd-b3a0-968ffe61c5e7\" (UID: \"df5e4bdf-f36a-40cd-b3a0-968ffe61c5e7\") " Jan 20 17:19:58 crc kubenswrapper[4558]: I0120 17:19:58.889031 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/df5e4bdf-f36a-40cd-b3a0-968ffe61c5e7-kube-api-access-wg8br" (OuterVolumeSpecName: "kube-api-access-wg8br") pod "df5e4bdf-f36a-40cd-b3a0-968ffe61c5e7" (UID: "df5e4bdf-f36a-40cd-b3a0-968ffe61c5e7"). InnerVolumeSpecName "kube-api-access-wg8br". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:19:58 crc kubenswrapper[4558]: I0120 17:19:58.889096 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/df5e4bdf-f36a-40cd-b3a0-968ffe61c5e7-scripts" (OuterVolumeSpecName: "scripts") pod "df5e4bdf-f36a-40cd-b3a0-968ffe61c5e7" (UID: "df5e4bdf-f36a-40cd-b3a0-968ffe61c5e7"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:19:58 crc kubenswrapper[4558]: I0120 17:19:58.912509 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/df5e4bdf-f36a-40cd-b3a0-968ffe61c5e7-config-data" (OuterVolumeSpecName: "config-data") pod "df5e4bdf-f36a-40cd-b3a0-968ffe61c5e7" (UID: "df5e4bdf-f36a-40cd-b3a0-968ffe61c5e7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:19:58 crc kubenswrapper[4558]: I0120 17:19:58.913062 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/df5e4bdf-f36a-40cd-b3a0-968ffe61c5e7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "df5e4bdf-f36a-40cd-b3a0-968ffe61c5e7" (UID: "df5e4bdf-f36a-40cd-b3a0-968ffe61c5e7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:19:58 crc kubenswrapper[4558]: I0120 17:19:58.987262 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/df5e4bdf-f36a-40cd-b3a0-968ffe61c5e7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:19:58 crc kubenswrapper[4558]: I0120 17:19:58.987301 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wg8br\" (UniqueName: \"kubernetes.io/projected/df5e4bdf-f36a-40cd-b3a0-968ffe61c5e7-kube-api-access-wg8br\") on node \"crc\" DevicePath \"\"" Jan 20 17:19:58 crc kubenswrapper[4558]: I0120 17:19:58.987316 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/df5e4bdf-f36a-40cd-b3a0-968ffe61c5e7-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:19:58 crc kubenswrapper[4558]: I0120 17:19:58.987328 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/df5e4bdf-f36a-40cd-b3a0-968ffe61c5e7-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:19:59 crc kubenswrapper[4558]: I0120 17:19:59.540656 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-pnr7q" event={"ID":"df5e4bdf-f36a-40cd-b3a0-968ffe61c5e7","Type":"ContainerDied","Data":"69007e5c5ef70529078fd7501af1c9b9a3430c16eea1ffa608c325c209549e0f"} Jan 20 17:19:59 crc kubenswrapper[4558]: I0120 17:19:59.540733 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="69007e5c5ef70529078fd7501af1c9b9a3430c16eea1ffa608c325c209549e0f" Jan 20 17:19:59 crc kubenswrapper[4558]: I0120 17:19:59.540741 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-pnr7q" Jan 20 17:19:59 crc kubenswrapper[4558]: I0120 17:19:59.626470 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-0"] Jan 20 17:19:59 crc kubenswrapper[4558]: E0120 17:19:59.626956 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="df5e4bdf-f36a-40cd-b3a0-968ffe61c5e7" containerName="nova-cell1-conductor-db-sync" Jan 20 17:19:59 crc kubenswrapper[4558]: I0120 17:19:59.626975 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="df5e4bdf-f36a-40cd-b3a0-968ffe61c5e7" containerName="nova-cell1-conductor-db-sync" Jan 20 17:19:59 crc kubenswrapper[4558]: I0120 17:19:59.627246 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="df5e4bdf-f36a-40cd-b3a0-968ffe61c5e7" containerName="nova-cell1-conductor-db-sync" Jan 20 17:19:59 crc kubenswrapper[4558]: I0120 17:19:59.627963 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:19:59 crc kubenswrapper[4558]: I0120 17:19:59.629965 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell1-conductor-config-data" Jan 20 17:19:59 crc kubenswrapper[4558]: I0120 17:19:59.634564 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-0"] Jan 20 17:19:59 crc kubenswrapper[4558]: I0120 17:19:59.700154 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/477f15ac-57ae-4bb2-9230-2395ff3bf9ad-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"477f15ac-57ae-4bb2-9230-2395ff3bf9ad\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:19:59 crc kubenswrapper[4558]: I0120 17:19:59.700579 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-thw4m\" (UniqueName: \"kubernetes.io/projected/477f15ac-57ae-4bb2-9230-2395ff3bf9ad-kube-api-access-thw4m\") pod \"nova-cell1-conductor-0\" (UID: \"477f15ac-57ae-4bb2-9230-2395ff3bf9ad\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:19:59 crc kubenswrapper[4558]: I0120 17:19:59.700654 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/477f15ac-57ae-4bb2-9230-2395ff3bf9ad-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"477f15ac-57ae-4bb2-9230-2395ff3bf9ad\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:19:59 crc kubenswrapper[4558]: I0120 17:19:59.802081 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-thw4m\" (UniqueName: \"kubernetes.io/projected/477f15ac-57ae-4bb2-9230-2395ff3bf9ad-kube-api-access-thw4m\") pod \"nova-cell1-conductor-0\" (UID: \"477f15ac-57ae-4bb2-9230-2395ff3bf9ad\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:19:59 crc kubenswrapper[4558]: I0120 17:19:59.802178 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/477f15ac-57ae-4bb2-9230-2395ff3bf9ad-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"477f15ac-57ae-4bb2-9230-2395ff3bf9ad\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:19:59 crc kubenswrapper[4558]: I0120 17:19:59.802305 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/477f15ac-57ae-4bb2-9230-2395ff3bf9ad-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"477f15ac-57ae-4bb2-9230-2395ff3bf9ad\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:19:59 crc kubenswrapper[4558]: I0120 17:19:59.807573 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/477f15ac-57ae-4bb2-9230-2395ff3bf9ad-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"477f15ac-57ae-4bb2-9230-2395ff3bf9ad\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:19:59 crc kubenswrapper[4558]: I0120 17:19:59.810277 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/477f15ac-57ae-4bb2-9230-2395ff3bf9ad-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"477f15ac-57ae-4bb2-9230-2395ff3bf9ad\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:19:59 crc kubenswrapper[4558]: I0120 17:19:59.819938 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-thw4m\" (UniqueName: \"kubernetes.io/projected/477f15ac-57ae-4bb2-9230-2395ff3bf9ad-kube-api-access-thw4m\") pod \"nova-cell1-conductor-0\" (UID: \"477f15ac-57ae-4bb2-9230-2395ff3bf9ad\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:19:59 crc kubenswrapper[4558]: I0120 17:19:59.902572 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-cell-mapping-tfgvg" Jan 20 17:19:59 crc kubenswrapper[4558]: I0120 17:19:59.949924 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:20:00 crc kubenswrapper[4558]: I0120 17:20:00.005841 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zv4m6\" (UniqueName: \"kubernetes.io/projected/e2e3a805-bb9e-4e87-b57d-99fb395130bf-kube-api-access-zv4m6\") pod \"e2e3a805-bb9e-4e87-b57d-99fb395130bf\" (UID: \"e2e3a805-bb9e-4e87-b57d-99fb395130bf\") " Jan 20 17:20:00 crc kubenswrapper[4558]: I0120 17:20:00.005910 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e2e3a805-bb9e-4e87-b57d-99fb395130bf-config-data\") pod \"e2e3a805-bb9e-4e87-b57d-99fb395130bf\" (UID: \"e2e3a805-bb9e-4e87-b57d-99fb395130bf\") " Jan 20 17:20:00 crc kubenswrapper[4558]: I0120 17:20:00.006038 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e2e3a805-bb9e-4e87-b57d-99fb395130bf-scripts\") pod \"e2e3a805-bb9e-4e87-b57d-99fb395130bf\" (UID: \"e2e3a805-bb9e-4e87-b57d-99fb395130bf\") " Jan 20 17:20:00 crc kubenswrapper[4558]: I0120 17:20:00.006071 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e2e3a805-bb9e-4e87-b57d-99fb395130bf-combined-ca-bundle\") pod \"e2e3a805-bb9e-4e87-b57d-99fb395130bf\" (UID: \"e2e3a805-bb9e-4e87-b57d-99fb395130bf\") " Jan 20 17:20:00 crc kubenswrapper[4558]: I0120 17:20:00.010738 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e2e3a805-bb9e-4e87-b57d-99fb395130bf-scripts" (OuterVolumeSpecName: "scripts") pod "e2e3a805-bb9e-4e87-b57d-99fb395130bf" (UID: "e2e3a805-bb9e-4e87-b57d-99fb395130bf"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:20:00 crc kubenswrapper[4558]: I0120 17:20:00.010856 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e2e3a805-bb9e-4e87-b57d-99fb395130bf-kube-api-access-zv4m6" (OuterVolumeSpecName: "kube-api-access-zv4m6") pod "e2e3a805-bb9e-4e87-b57d-99fb395130bf" (UID: "e2e3a805-bb9e-4e87-b57d-99fb395130bf"). InnerVolumeSpecName "kube-api-access-zv4m6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:20:00 crc kubenswrapper[4558]: I0120 17:20:00.031693 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e2e3a805-bb9e-4e87-b57d-99fb395130bf-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e2e3a805-bb9e-4e87-b57d-99fb395130bf" (UID: "e2e3a805-bb9e-4e87-b57d-99fb395130bf"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:20:00 crc kubenswrapper[4558]: I0120 17:20:00.051448 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e2e3a805-bb9e-4e87-b57d-99fb395130bf-config-data" (OuterVolumeSpecName: "config-data") pod "e2e3a805-bb9e-4e87-b57d-99fb395130bf" (UID: "e2e3a805-bb9e-4e87-b57d-99fb395130bf"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:20:00 crc kubenswrapper[4558]: I0120 17:20:00.108781 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e2e3a805-bb9e-4e87-b57d-99fb395130bf-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:20:00 crc kubenswrapper[4558]: I0120 17:20:00.109040 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e2e3a805-bb9e-4e87-b57d-99fb395130bf-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:20:00 crc kubenswrapper[4558]: I0120 17:20:00.109056 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zv4m6\" (UniqueName: \"kubernetes.io/projected/e2e3a805-bb9e-4e87-b57d-99fb395130bf-kube-api-access-zv4m6\") on node \"crc\" DevicePath \"\"" Jan 20 17:20:00 crc kubenswrapper[4558]: I0120 17:20:00.109066 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e2e3a805-bb9e-4e87-b57d-99fb395130bf-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:20:00 crc kubenswrapper[4558]: I0120 17:20:00.374047 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-0"] Jan 20 17:20:00 crc kubenswrapper[4558]: I0120 17:20:00.418694 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:20:00 crc kubenswrapper[4558]: I0120 17:20:00.418765 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:20:00 crc kubenswrapper[4558]: I0120 17:20:00.560423 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-cell-mapping-tfgvg" event={"ID":"e2e3a805-bb9e-4e87-b57d-99fb395130bf","Type":"ContainerDied","Data":"a35fb9e42d2619cfd723ce69b07ca7892459b2bffb3b0fbe8a3b63b03954b245"} Jan 20 17:20:00 crc kubenswrapper[4558]: I0120 17:20:00.560469 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a35fb9e42d2619cfd723ce69b07ca7892459b2bffb3b0fbe8a3b63b03954b245" Jan 20 17:20:00 crc kubenswrapper[4558]: I0120 17:20:00.560548 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-cell-mapping-tfgvg" Jan 20 17:20:00 crc kubenswrapper[4558]: I0120 17:20:00.594425 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:20:00 crc kubenswrapper[4558]: I0120 17:20:00.594465 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-0" event={"ID":"477f15ac-57ae-4bb2-9230-2395ff3bf9ad","Type":"ContainerStarted","Data":"cd9412be70441b3487a5c8e313f30e1490c914b1b61e47d06c5f1b8c6448e338"} Jan 20 17:20:00 crc kubenswrapper[4558]: I0120 17:20:00.594496 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:20:00 crc kubenswrapper[4558]: I0120 17:20:00.594509 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-0" event={"ID":"477f15ac-57ae-4bb2-9230-2395ff3bf9ad","Type":"ContainerStarted","Data":"8348e1fd285061bbf538f64bcac88a4065f2ab9a3412701ea2f38618f348bb5d"} Jan 20 17:20:00 crc kubenswrapper[4558]: I0120 17:20:00.608471 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell1-conductor-0" podStartSLOduration=1.608442527 podStartE2EDuration="1.608442527s" podCreationTimestamp="2026-01-20 17:19:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:20:00.595019455 +0000 UTC m=+2294.355357422" watchObservedRunningTime="2026-01-20 17:20:00.608442527 +0000 UTC m=+2294.368780494" Jan 20 17:20:00 crc kubenswrapper[4558]: I0120 17:20:00.613700 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:20:00 crc kubenswrapper[4558]: I0120 17:20:00.728814 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:20:00 crc kubenswrapper[4558]: I0120 17:20:00.729191 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-api-0" podUID="49f5b91e-2110-4750-b1ca-5dbcf2df3f18" containerName="nova-api-log" containerID="cri-o://4d4114d53817aa83a519886615769c5abf77ffb13ff027228a58bde10719d179" gracePeriod=30 Jan 20 17:20:00 crc kubenswrapper[4558]: I0120 17:20:00.729416 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-api-0" podUID="49f5b91e-2110-4750-b1ca-5dbcf2df3f18" containerName="nova-api-api" containerID="cri-o://467965ce8f331c5d889ed685c001186deef46149b751fc2d12d3b544f29a577d" gracePeriod=30 Jan 20 17:20:00 crc kubenswrapper[4558]: I0120 17:20:00.743567 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-api-0" podUID="49f5b91e-2110-4750-b1ca-5dbcf2df3f18" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.196:8774/\": EOF" Jan 20 17:20:00 crc kubenswrapper[4558]: I0120 17:20:00.746271 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:20:00 crc kubenswrapper[4558]: I0120 17:20:00.754290 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-api-0" podUID="49f5b91e-2110-4750-b1ca-5dbcf2df3f18" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.196:8774/\": EOF" Jan 20 17:20:00 crc kubenswrapper[4558]: I0120 17:20:00.754350 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:20:00 crc kubenswrapper[4558]: I0120 17:20:00.754826 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-metadata-0" podUID="c9435883-4191-45a5-8b34-46d84d9c5fed" containerName="nova-metadata-log" containerID="cri-o://42a6810bbfb151e49186d7726c2a07e0ec30198f40340b3816d662c9ce88dd3c" gracePeriod=30 Jan 20 17:20:00 crc kubenswrapper[4558]: I0120 17:20:00.755297 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-metadata-0" podUID="c9435883-4191-45a5-8b34-46d84d9c5fed" containerName="nova-metadata-metadata" containerID="cri-o://015d9ef73c6661866790a96c36a7cd7e9eccd25020783afa973f87aac9594196" gracePeriod=30 Jan 20 17:20:00 crc kubenswrapper[4558]: I0120 17:20:00.942858 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:20:00 crc kubenswrapper[4558]: I0120 17:20:00.949394 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:20:00 crc kubenswrapper[4558]: I0120 17:20:00.949451 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:20:01 crc kubenswrapper[4558]: I0120 17:20:01.283495 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:20:01 crc kubenswrapper[4558]: I0120 17:20:01.335091 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dmkbf\" (UniqueName: \"kubernetes.io/projected/c9435883-4191-45a5-8b34-46d84d9c5fed-kube-api-access-dmkbf\") pod \"c9435883-4191-45a5-8b34-46d84d9c5fed\" (UID: \"c9435883-4191-45a5-8b34-46d84d9c5fed\") " Jan 20 17:20:01 crc kubenswrapper[4558]: I0120 17:20:01.335175 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c9435883-4191-45a5-8b34-46d84d9c5fed-logs\") pod \"c9435883-4191-45a5-8b34-46d84d9c5fed\" (UID: \"c9435883-4191-45a5-8b34-46d84d9c5fed\") " Jan 20 17:20:01 crc kubenswrapper[4558]: I0120 17:20:01.335254 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/c9435883-4191-45a5-8b34-46d84d9c5fed-nova-metadata-tls-certs\") pod \"c9435883-4191-45a5-8b34-46d84d9c5fed\" (UID: \"c9435883-4191-45a5-8b34-46d84d9c5fed\") " Jan 20 17:20:01 crc kubenswrapper[4558]: I0120 17:20:01.335287 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9435883-4191-45a5-8b34-46d84d9c5fed-combined-ca-bundle\") pod \"c9435883-4191-45a5-8b34-46d84d9c5fed\" (UID: \"c9435883-4191-45a5-8b34-46d84d9c5fed\") " Jan 20 17:20:01 crc kubenswrapper[4558]: I0120 17:20:01.335374 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c9435883-4191-45a5-8b34-46d84d9c5fed-config-data\") pod \"c9435883-4191-45a5-8b34-46d84d9c5fed\" (UID: \"c9435883-4191-45a5-8b34-46d84d9c5fed\") " Jan 20 17:20:01 crc kubenswrapper[4558]: I0120 17:20:01.335520 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c9435883-4191-45a5-8b34-46d84d9c5fed-logs" (OuterVolumeSpecName: "logs") pod "c9435883-4191-45a5-8b34-46d84d9c5fed" (UID: "c9435883-4191-45a5-8b34-46d84d9c5fed"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:20:01 crc kubenswrapper[4558]: I0120 17:20:01.336183 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c9435883-4191-45a5-8b34-46d84d9c5fed-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:20:01 crc kubenswrapper[4558]: I0120 17:20:01.343515 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c9435883-4191-45a5-8b34-46d84d9c5fed-kube-api-access-dmkbf" (OuterVolumeSpecName: "kube-api-access-dmkbf") pod "c9435883-4191-45a5-8b34-46d84d9c5fed" (UID: "c9435883-4191-45a5-8b34-46d84d9c5fed"). InnerVolumeSpecName "kube-api-access-dmkbf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:20:01 crc kubenswrapper[4558]: I0120 17:20:01.364692 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c9435883-4191-45a5-8b34-46d84d9c5fed-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c9435883-4191-45a5-8b34-46d84d9c5fed" (UID: "c9435883-4191-45a5-8b34-46d84d9c5fed"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:20:01 crc kubenswrapper[4558]: I0120 17:20:01.366425 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c9435883-4191-45a5-8b34-46d84d9c5fed-config-data" (OuterVolumeSpecName: "config-data") pod "c9435883-4191-45a5-8b34-46d84d9c5fed" (UID: "c9435883-4191-45a5-8b34-46d84d9c5fed"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:20:01 crc kubenswrapper[4558]: I0120 17:20:01.382939 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c9435883-4191-45a5-8b34-46d84d9c5fed-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "c9435883-4191-45a5-8b34-46d84d9c5fed" (UID: "c9435883-4191-45a5-8b34-46d84d9c5fed"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:20:01 crc kubenswrapper[4558]: I0120 17:20:01.448615 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c9435883-4191-45a5-8b34-46d84d9c5fed-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:20:01 crc kubenswrapper[4558]: I0120 17:20:01.448646 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dmkbf\" (UniqueName: \"kubernetes.io/projected/c9435883-4191-45a5-8b34-46d84d9c5fed-kube-api-access-dmkbf\") on node \"crc\" DevicePath \"\"" Jan 20 17:20:01 crc kubenswrapper[4558]: I0120 17:20:01.448658 4558 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/c9435883-4191-45a5-8b34-46d84d9c5fed-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:20:01 crc kubenswrapper[4558]: I0120 17:20:01.448668 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9435883-4191-45a5-8b34-46d84d9c5fed-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:20:01 crc kubenswrapper[4558]: I0120 17:20:01.585604 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"c9435883-4191-45a5-8b34-46d84d9c5fed","Type":"ContainerDied","Data":"015d9ef73c6661866790a96c36a7cd7e9eccd25020783afa973f87aac9594196"} Jan 20 17:20:01 crc kubenswrapper[4558]: I0120 17:20:01.585627 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:20:01 crc kubenswrapper[4558]: I0120 17:20:01.585574 4558 generic.go:334] "Generic (PLEG): container finished" podID="c9435883-4191-45a5-8b34-46d84d9c5fed" containerID="015d9ef73c6661866790a96c36a7cd7e9eccd25020783afa973f87aac9594196" exitCode=0 Jan 20 17:20:01 crc kubenswrapper[4558]: I0120 17:20:01.585805 4558 generic.go:334] "Generic (PLEG): container finished" podID="c9435883-4191-45a5-8b34-46d84d9c5fed" containerID="42a6810bbfb151e49186d7726c2a07e0ec30198f40340b3816d662c9ce88dd3c" exitCode=143 Jan 20 17:20:01 crc kubenswrapper[4558]: I0120 17:20:01.585678 4558 scope.go:117] "RemoveContainer" containerID="015d9ef73c6661866790a96c36a7cd7e9eccd25020783afa973f87aac9594196" Jan 20 17:20:01 crc kubenswrapper[4558]: I0120 17:20:01.585973 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"c9435883-4191-45a5-8b34-46d84d9c5fed","Type":"ContainerDied","Data":"42a6810bbfb151e49186d7726c2a07e0ec30198f40340b3816d662c9ce88dd3c"} Jan 20 17:20:01 crc kubenswrapper[4558]: I0120 17:20:01.588781 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"c9435883-4191-45a5-8b34-46d84d9c5fed","Type":"ContainerDied","Data":"b644432e0478dec82549110e0656ba2bc180b1dd78400030647027705acc54bf"} Jan 20 17:20:01 crc kubenswrapper[4558]: I0120 17:20:01.591252 4558 generic.go:334] "Generic (PLEG): container finished" podID="49f5b91e-2110-4750-b1ca-5dbcf2df3f18" containerID="4d4114d53817aa83a519886615769c5abf77ffb13ff027228a58bde10719d179" exitCode=143 Jan 20 17:20:01 crc kubenswrapper[4558]: I0120 17:20:01.591368 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"49f5b91e-2110-4750-b1ca-5dbcf2df3f18","Type":"ContainerDied","Data":"4d4114d53817aa83a519886615769c5abf77ffb13ff027228a58bde10719d179"} Jan 20 17:20:01 crc kubenswrapper[4558]: I0120 17:20:01.621495 4558 scope.go:117] "RemoveContainer" containerID="42a6810bbfb151e49186d7726c2a07e0ec30198f40340b3816d662c9ce88dd3c" Jan 20 17:20:01 crc kubenswrapper[4558]: I0120 17:20:01.624918 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:20:01 crc kubenswrapper[4558]: I0120 17:20:01.637812 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:20:01 crc kubenswrapper[4558]: I0120 17:20:01.643948 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:20:01 crc kubenswrapper[4558]: I0120 17:20:01.646242 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:20:01 crc kubenswrapper[4558]: E0120 17:20:01.646737 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c9435883-4191-45a5-8b34-46d84d9c5fed" containerName="nova-metadata-metadata" Jan 20 17:20:01 crc kubenswrapper[4558]: I0120 17:20:01.646757 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="c9435883-4191-45a5-8b34-46d84d9c5fed" containerName="nova-metadata-metadata" Jan 20 17:20:01 crc kubenswrapper[4558]: E0120 17:20:01.646775 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e2e3a805-bb9e-4e87-b57d-99fb395130bf" containerName="nova-manage" Jan 20 17:20:01 crc kubenswrapper[4558]: I0120 17:20:01.646782 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="e2e3a805-bb9e-4e87-b57d-99fb395130bf" containerName="nova-manage" Jan 20 17:20:01 crc kubenswrapper[4558]: E0120 17:20:01.646812 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c9435883-4191-45a5-8b34-46d84d9c5fed" containerName="nova-metadata-log" Jan 20 17:20:01 crc kubenswrapper[4558]: I0120 17:20:01.646819 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="c9435883-4191-45a5-8b34-46d84d9c5fed" containerName="nova-metadata-log" Jan 20 17:20:01 crc kubenswrapper[4558]: I0120 17:20:01.647035 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="c9435883-4191-45a5-8b34-46d84d9c5fed" containerName="nova-metadata-log" Jan 20 17:20:01 crc kubenswrapper[4558]: I0120 17:20:01.647061 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="e2e3a805-bb9e-4e87-b57d-99fb395130bf" containerName="nova-manage" Jan 20 17:20:01 crc kubenswrapper[4558]: I0120 17:20:01.647075 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="c9435883-4191-45a5-8b34-46d84d9c5fed" containerName="nova-metadata-metadata" Jan 20 17:20:01 crc kubenswrapper[4558]: I0120 17:20:01.648261 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:20:01 crc kubenswrapper[4558]: I0120 17:20:01.650021 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-metadata-config-data" Jan 20 17:20:01 crc kubenswrapper[4558]: I0120 17:20:01.650341 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-metadata-internal-svc" Jan 20 17:20:01 crc kubenswrapper[4558]: I0120 17:20:01.653588 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:20:01 crc kubenswrapper[4558]: I0120 17:20:01.660271 4558 scope.go:117] "RemoveContainer" containerID="015d9ef73c6661866790a96c36a7cd7e9eccd25020783afa973f87aac9594196" Jan 20 17:20:01 crc kubenswrapper[4558]: E0120 17:20:01.663450 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"015d9ef73c6661866790a96c36a7cd7e9eccd25020783afa973f87aac9594196\": container with ID starting with 015d9ef73c6661866790a96c36a7cd7e9eccd25020783afa973f87aac9594196 not found: ID does not exist" containerID="015d9ef73c6661866790a96c36a7cd7e9eccd25020783afa973f87aac9594196" Jan 20 17:20:01 crc kubenswrapper[4558]: I0120 17:20:01.663485 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"015d9ef73c6661866790a96c36a7cd7e9eccd25020783afa973f87aac9594196"} err="failed to get container status \"015d9ef73c6661866790a96c36a7cd7e9eccd25020783afa973f87aac9594196\": rpc error: code = NotFound desc = could not find container \"015d9ef73c6661866790a96c36a7cd7e9eccd25020783afa973f87aac9594196\": container with ID starting with 015d9ef73c6661866790a96c36a7cd7e9eccd25020783afa973f87aac9594196 not found: ID does not exist" Jan 20 17:20:01 crc kubenswrapper[4558]: I0120 17:20:01.663515 4558 scope.go:117] "RemoveContainer" containerID="42a6810bbfb151e49186d7726c2a07e0ec30198f40340b3816d662c9ce88dd3c" Jan 20 17:20:01 crc kubenswrapper[4558]: E0120 17:20:01.673231 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"42a6810bbfb151e49186d7726c2a07e0ec30198f40340b3816d662c9ce88dd3c\": container with ID starting with 42a6810bbfb151e49186d7726c2a07e0ec30198f40340b3816d662c9ce88dd3c not found: ID does not exist" containerID="42a6810bbfb151e49186d7726c2a07e0ec30198f40340b3816d662c9ce88dd3c" Jan 20 17:20:01 crc kubenswrapper[4558]: I0120 17:20:01.673278 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"42a6810bbfb151e49186d7726c2a07e0ec30198f40340b3816d662c9ce88dd3c"} err="failed to get container status \"42a6810bbfb151e49186d7726c2a07e0ec30198f40340b3816d662c9ce88dd3c\": rpc error: code = NotFound desc = could not find container \"42a6810bbfb151e49186d7726c2a07e0ec30198f40340b3816d662c9ce88dd3c\": container with ID starting with 42a6810bbfb151e49186d7726c2a07e0ec30198f40340b3816d662c9ce88dd3c not found: ID does not exist" Jan 20 17:20:01 crc kubenswrapper[4558]: I0120 17:20:01.673305 4558 scope.go:117] "RemoveContainer" containerID="015d9ef73c6661866790a96c36a7cd7e9eccd25020783afa973f87aac9594196" Jan 20 17:20:01 crc kubenswrapper[4558]: I0120 17:20:01.674737 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"015d9ef73c6661866790a96c36a7cd7e9eccd25020783afa973f87aac9594196"} err="failed to get container status \"015d9ef73c6661866790a96c36a7cd7e9eccd25020783afa973f87aac9594196\": rpc error: code = NotFound desc = could not find container \"015d9ef73c6661866790a96c36a7cd7e9eccd25020783afa973f87aac9594196\": container with ID starting with 015d9ef73c6661866790a96c36a7cd7e9eccd25020783afa973f87aac9594196 not found: ID does not exist" Jan 20 17:20:01 crc kubenswrapper[4558]: I0120 17:20:01.674778 4558 scope.go:117] "RemoveContainer" containerID="42a6810bbfb151e49186d7726c2a07e0ec30198f40340b3816d662c9ce88dd3c" Jan 20 17:20:01 crc kubenswrapper[4558]: I0120 17:20:01.676142 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"42a6810bbfb151e49186d7726c2a07e0ec30198f40340b3816d662c9ce88dd3c"} err="failed to get container status \"42a6810bbfb151e49186d7726c2a07e0ec30198f40340b3816d662c9ce88dd3c\": rpc error: code = NotFound desc = could not find container \"42a6810bbfb151e49186d7726c2a07e0ec30198f40340b3816d662c9ce88dd3c\": container with ID starting with 42a6810bbfb151e49186d7726c2a07e0ec30198f40340b3816d662c9ce88dd3c not found: ID does not exist" Jan 20 17:20:01 crc kubenswrapper[4558]: I0120 17:20:01.761574 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/333fe534-20d7-4081-ad19-9caff5789f86-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"333fe534-20d7-4081-ad19-9caff5789f86\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:20:01 crc kubenswrapper[4558]: I0120 17:20:01.761710 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/333fe534-20d7-4081-ad19-9caff5789f86-config-data\") pod \"nova-metadata-0\" (UID: \"333fe534-20d7-4081-ad19-9caff5789f86\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:20:01 crc kubenswrapper[4558]: I0120 17:20:01.761748 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hjpn4\" (UniqueName: \"kubernetes.io/projected/333fe534-20d7-4081-ad19-9caff5789f86-kube-api-access-hjpn4\") pod \"nova-metadata-0\" (UID: \"333fe534-20d7-4081-ad19-9caff5789f86\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:20:01 crc kubenswrapper[4558]: I0120 17:20:01.761778 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/333fe534-20d7-4081-ad19-9caff5789f86-logs\") pod \"nova-metadata-0\" (UID: \"333fe534-20d7-4081-ad19-9caff5789f86\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:20:01 crc kubenswrapper[4558]: I0120 17:20:01.762302 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/333fe534-20d7-4081-ad19-9caff5789f86-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"333fe534-20d7-4081-ad19-9caff5789f86\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:20:01 crc kubenswrapper[4558]: I0120 17:20:01.864840 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/333fe534-20d7-4081-ad19-9caff5789f86-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"333fe534-20d7-4081-ad19-9caff5789f86\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:20:01 crc kubenswrapper[4558]: I0120 17:20:01.865052 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/333fe534-20d7-4081-ad19-9caff5789f86-config-data\") pod \"nova-metadata-0\" (UID: \"333fe534-20d7-4081-ad19-9caff5789f86\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:20:01 crc kubenswrapper[4558]: I0120 17:20:01.865127 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hjpn4\" (UniqueName: \"kubernetes.io/projected/333fe534-20d7-4081-ad19-9caff5789f86-kube-api-access-hjpn4\") pod \"nova-metadata-0\" (UID: \"333fe534-20d7-4081-ad19-9caff5789f86\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:20:01 crc kubenswrapper[4558]: I0120 17:20:01.865185 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/333fe534-20d7-4081-ad19-9caff5789f86-logs\") pod \"nova-metadata-0\" (UID: \"333fe534-20d7-4081-ad19-9caff5789f86\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:20:01 crc kubenswrapper[4558]: I0120 17:20:01.866384 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/333fe534-20d7-4081-ad19-9caff5789f86-logs\") pod \"nova-metadata-0\" (UID: \"333fe534-20d7-4081-ad19-9caff5789f86\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:20:01 crc kubenswrapper[4558]: I0120 17:20:01.866470 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/333fe534-20d7-4081-ad19-9caff5789f86-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"333fe534-20d7-4081-ad19-9caff5789f86\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:20:01 crc kubenswrapper[4558]: I0120 17:20:01.869980 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/333fe534-20d7-4081-ad19-9caff5789f86-config-data\") pod \"nova-metadata-0\" (UID: \"333fe534-20d7-4081-ad19-9caff5789f86\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:20:01 crc kubenswrapper[4558]: I0120 17:20:01.871026 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/333fe534-20d7-4081-ad19-9caff5789f86-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"333fe534-20d7-4081-ad19-9caff5789f86\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:20:01 crc kubenswrapper[4558]: I0120 17:20:01.880770 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/333fe534-20d7-4081-ad19-9caff5789f86-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"333fe534-20d7-4081-ad19-9caff5789f86\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:20:01 crc kubenswrapper[4558]: I0120 17:20:01.882135 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hjpn4\" (UniqueName: \"kubernetes.io/projected/333fe534-20d7-4081-ad19-9caff5789f86-kube-api-access-hjpn4\") pod \"nova-metadata-0\" (UID: \"333fe534-20d7-4081-ad19-9caff5789f86\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:20:01 crc kubenswrapper[4558]: I0120 17:20:01.981049 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:20:02 crc kubenswrapper[4558]: I0120 17:20:02.433560 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:20:02 crc kubenswrapper[4558]: I0120 17:20:02.575618 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c9435883-4191-45a5-8b34-46d84d9c5fed" path="/var/lib/kubelet/pods/c9435883-4191-45a5-8b34-46d84d9c5fed/volumes" Jan 20 17:20:02 crc kubenswrapper[4558]: I0120 17:20:02.610157 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"333fe534-20d7-4081-ad19-9caff5789f86","Type":"ContainerStarted","Data":"42f8774047cb1caa112b3941ceefba82f8f4780e8be212d055680736ef8b48d8"} Jan 20 17:20:02 crc kubenswrapper[4558]: I0120 17:20:02.611053 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"333fe534-20d7-4081-ad19-9caff5789f86","Type":"ContainerStarted","Data":"989bce89f7febf007bf8011a75a5a94c9e27baf504881f5b14aeb7d11e9bafd9"} Jan 20 17:20:02 crc kubenswrapper[4558]: I0120 17:20:02.612179 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-scheduler-0" podUID="f595b84e-54f3-47dc-91e6-8c4ba1281542" containerName="nova-scheduler-scheduler" containerID="cri-o://e813ccf21ae7caa0f1376a2b6593c47e8b15a47bdbac4cc50c86256faf3e25eb" gracePeriod=30 Jan 20 17:20:03 crc kubenswrapper[4558]: I0120 17:20:03.625586 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"333fe534-20d7-4081-ad19-9caff5789f86","Type":"ContainerStarted","Data":"baff23dd43f4e034d383c7be8ae7a9d5a3dd0ec441be819834b472bdcbd0466d"} Jan 20 17:20:03 crc kubenswrapper[4558]: I0120 17:20:03.648876 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-metadata-0" podStartSLOduration=2.648854193 podStartE2EDuration="2.648854193s" podCreationTimestamp="2026-01-20 17:20:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:20:03.640273406 +0000 UTC m=+2297.400611373" watchObservedRunningTime="2026-01-20 17:20:03.648854193 +0000 UTC m=+2297.409192160" Jan 20 17:20:05 crc kubenswrapper[4558]: I0120 17:20:05.502747 4558 scope.go:117] "RemoveContainer" containerID="bd20e6e920cac2215a98a5c447c2c55bc432c00d80f0d36813856b38b27308b8" Jan 20 17:20:05 crc kubenswrapper[4558]: I0120 17:20:05.534854 4558 scope.go:117] "RemoveContainer" containerID="f99b4296bd5b00c49e313c96740dc5b93241fc455baebd491918d5e761d9430d" Jan 20 17:20:05 crc kubenswrapper[4558]: I0120 17:20:05.566418 4558 scope.go:117] "RemoveContainer" containerID="0ffce44366a8b4466427b682fb41640425366a0f4449210959148de9aef695c6" Jan 20 17:20:05 crc kubenswrapper[4558]: E0120 17:20:05.566778 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 17:20:05 crc kubenswrapper[4558]: E0120 17:20:05.567312 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="e813ccf21ae7caa0f1376a2b6593c47e8b15a47bdbac4cc50c86256faf3e25eb" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 20 17:20:05 crc kubenswrapper[4558]: E0120 17:20:05.569613 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="e813ccf21ae7caa0f1376a2b6593c47e8b15a47bdbac4cc50c86256faf3e25eb" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 20 17:20:05 crc kubenswrapper[4558]: E0120 17:20:05.574609 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="e813ccf21ae7caa0f1376a2b6593c47e8b15a47bdbac4cc50c86256faf3e25eb" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 20 17:20:05 crc kubenswrapper[4558]: E0120 17:20:05.574674 4558 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack-kuttl-tests/nova-scheduler-0" podUID="f595b84e-54f3-47dc-91e6-8c4ba1281542" containerName="nova-scheduler-scheduler" Jan 20 17:20:05 crc kubenswrapper[4558]: I0120 17:20:05.586575 4558 scope.go:117] "RemoveContainer" containerID="77f2cc9689416d67bb531c240255bcc3dba26ca8b5684419ffa0bcb7727882f2" Jan 20 17:20:05 crc kubenswrapper[4558]: I0120 17:20:05.616212 4558 scope.go:117] "RemoveContainer" containerID="8540f5c72d8ae0ebbfdf8ace4baa83976e8887ffaba9374a18c9d79a16cd9d30" Jan 20 17:20:05 crc kubenswrapper[4558]: I0120 17:20:05.652773 4558 scope.go:117] "RemoveContainer" containerID="c0a3f252e721c2ca07be91f94b01bf52c6603bf0f355466ad7d645ee1891d878" Jan 20 17:20:05 crc kubenswrapper[4558]: I0120 17:20:05.690731 4558 scope.go:117] "RemoveContainer" containerID="146969194ab0a40e402393f07d4594aaad069b32a10f5e882b7b5d9c1bed6628" Jan 20 17:20:05 crc kubenswrapper[4558]: I0120 17:20:05.710648 4558 scope.go:117] "RemoveContainer" containerID="ede71d31814bb504757b712e0cf2f0927853e428f6428a924676c1d65b768351" Jan 20 17:20:05 crc kubenswrapper[4558]: I0120 17:20:05.752248 4558 scope.go:117] "RemoveContainer" containerID="59d0c6bafc63cc80658f7568160ae47885a8cf64a674673643c37344ad9aa432" Jan 20 17:20:05 crc kubenswrapper[4558]: I0120 17:20:05.943237 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:20:05 crc kubenswrapper[4558]: I0120 17:20:05.966222 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:20:06 crc kubenswrapper[4558]: I0120 17:20:06.506941 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:20:06 crc kubenswrapper[4558]: I0120 17:20:06.566177 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/49f5b91e-2110-4750-b1ca-5dbcf2df3f18-config-data\") pod \"49f5b91e-2110-4750-b1ca-5dbcf2df3f18\" (UID: \"49f5b91e-2110-4750-b1ca-5dbcf2df3f18\") " Jan 20 17:20:06 crc kubenswrapper[4558]: I0120 17:20:06.566516 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/49f5b91e-2110-4750-b1ca-5dbcf2df3f18-combined-ca-bundle\") pod \"49f5b91e-2110-4750-b1ca-5dbcf2df3f18\" (UID: \"49f5b91e-2110-4750-b1ca-5dbcf2df3f18\") " Jan 20 17:20:06 crc kubenswrapper[4558]: I0120 17:20:06.566562 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-blfvf\" (UniqueName: \"kubernetes.io/projected/49f5b91e-2110-4750-b1ca-5dbcf2df3f18-kube-api-access-blfvf\") pod \"49f5b91e-2110-4750-b1ca-5dbcf2df3f18\" (UID: \"49f5b91e-2110-4750-b1ca-5dbcf2df3f18\") " Jan 20 17:20:06 crc kubenswrapper[4558]: I0120 17:20:06.566684 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/49f5b91e-2110-4750-b1ca-5dbcf2df3f18-logs\") pod \"49f5b91e-2110-4750-b1ca-5dbcf2df3f18\" (UID: \"49f5b91e-2110-4750-b1ca-5dbcf2df3f18\") " Jan 20 17:20:06 crc kubenswrapper[4558]: I0120 17:20:06.567427 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/49f5b91e-2110-4750-b1ca-5dbcf2df3f18-logs" (OuterVolumeSpecName: "logs") pod "49f5b91e-2110-4750-b1ca-5dbcf2df3f18" (UID: "49f5b91e-2110-4750-b1ca-5dbcf2df3f18"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:20:06 crc kubenswrapper[4558]: I0120 17:20:06.572154 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49f5b91e-2110-4750-b1ca-5dbcf2df3f18-kube-api-access-blfvf" (OuterVolumeSpecName: "kube-api-access-blfvf") pod "49f5b91e-2110-4750-b1ca-5dbcf2df3f18" (UID: "49f5b91e-2110-4750-b1ca-5dbcf2df3f18"). InnerVolumeSpecName "kube-api-access-blfvf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:20:06 crc kubenswrapper[4558]: I0120 17:20:06.591820 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49f5b91e-2110-4750-b1ca-5dbcf2df3f18-config-data" (OuterVolumeSpecName: "config-data") pod "49f5b91e-2110-4750-b1ca-5dbcf2df3f18" (UID: "49f5b91e-2110-4750-b1ca-5dbcf2df3f18"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:20:06 crc kubenswrapper[4558]: I0120 17:20:06.593987 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49f5b91e-2110-4750-b1ca-5dbcf2df3f18-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "49f5b91e-2110-4750-b1ca-5dbcf2df3f18" (UID: "49f5b91e-2110-4750-b1ca-5dbcf2df3f18"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:20:06 crc kubenswrapper[4558]: I0120 17:20:06.666473 4558 generic.go:334] "Generic (PLEG): container finished" podID="49f5b91e-2110-4750-b1ca-5dbcf2df3f18" containerID="467965ce8f331c5d889ed685c001186deef46149b751fc2d12d3b544f29a577d" exitCode=0 Jan 20 17:20:06 crc kubenswrapper[4558]: I0120 17:20:06.666557 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:20:06 crc kubenswrapper[4558]: I0120 17:20:06.666580 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"49f5b91e-2110-4750-b1ca-5dbcf2df3f18","Type":"ContainerDied","Data":"467965ce8f331c5d889ed685c001186deef46149b751fc2d12d3b544f29a577d"} Jan 20 17:20:06 crc kubenswrapper[4558]: I0120 17:20:06.666643 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"49f5b91e-2110-4750-b1ca-5dbcf2df3f18","Type":"ContainerDied","Data":"1637c0d200c720552335ea1fd68fd8a8132bc8709e8e6bf1336fb22350e1d393"} Jan 20 17:20:06 crc kubenswrapper[4558]: I0120 17:20:06.666693 4558 scope.go:117] "RemoveContainer" containerID="467965ce8f331c5d889ed685c001186deef46149b751fc2d12d3b544f29a577d" Jan 20 17:20:06 crc kubenswrapper[4558]: I0120 17:20:06.670747 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/49f5b91e-2110-4750-b1ca-5dbcf2df3f18-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:20:06 crc kubenswrapper[4558]: I0120 17:20:06.670777 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/49f5b91e-2110-4750-b1ca-5dbcf2df3f18-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:20:06 crc kubenswrapper[4558]: I0120 17:20:06.670791 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/49f5b91e-2110-4750-b1ca-5dbcf2df3f18-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:20:06 crc kubenswrapper[4558]: I0120 17:20:06.670803 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-blfvf\" (UniqueName: \"kubernetes.io/projected/49f5b91e-2110-4750-b1ca-5dbcf2df3f18-kube-api-access-blfvf\") on node \"crc\" DevicePath \"\"" Jan 20 17:20:06 crc kubenswrapper[4558]: I0120 17:20:06.685932 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:20:06 crc kubenswrapper[4558]: I0120 17:20:06.691326 4558 scope.go:117] "RemoveContainer" containerID="4d4114d53817aa83a519886615769c5abf77ffb13ff027228a58bde10719d179" Jan 20 17:20:06 crc kubenswrapper[4558]: I0120 17:20:06.703824 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:20:06 crc kubenswrapper[4558]: I0120 17:20:06.718452 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:20:06 crc kubenswrapper[4558]: I0120 17:20:06.720857 4558 scope.go:117] "RemoveContainer" containerID="467965ce8f331c5d889ed685c001186deef46149b751fc2d12d3b544f29a577d" Jan 20 17:20:06 crc kubenswrapper[4558]: E0120 17:20:06.721852 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"467965ce8f331c5d889ed685c001186deef46149b751fc2d12d3b544f29a577d\": container with ID starting with 467965ce8f331c5d889ed685c001186deef46149b751fc2d12d3b544f29a577d not found: ID does not exist" containerID="467965ce8f331c5d889ed685c001186deef46149b751fc2d12d3b544f29a577d" Jan 20 17:20:06 crc kubenswrapper[4558]: I0120 17:20:06.722070 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"467965ce8f331c5d889ed685c001186deef46149b751fc2d12d3b544f29a577d"} err="failed to get container status \"467965ce8f331c5d889ed685c001186deef46149b751fc2d12d3b544f29a577d\": rpc error: code = NotFound desc = could not find container \"467965ce8f331c5d889ed685c001186deef46149b751fc2d12d3b544f29a577d\": container with ID starting with 467965ce8f331c5d889ed685c001186deef46149b751fc2d12d3b544f29a577d not found: ID does not exist" Jan 20 17:20:06 crc kubenswrapper[4558]: I0120 17:20:06.722240 4558 scope.go:117] "RemoveContainer" containerID="4d4114d53817aa83a519886615769c5abf77ffb13ff027228a58bde10719d179" Jan 20 17:20:06 crc kubenswrapper[4558]: E0120 17:20:06.724203 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4d4114d53817aa83a519886615769c5abf77ffb13ff027228a58bde10719d179\": container with ID starting with 4d4114d53817aa83a519886615769c5abf77ffb13ff027228a58bde10719d179 not found: ID does not exist" containerID="4d4114d53817aa83a519886615769c5abf77ffb13ff027228a58bde10719d179" Jan 20 17:20:06 crc kubenswrapper[4558]: I0120 17:20:06.724245 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4d4114d53817aa83a519886615769c5abf77ffb13ff027228a58bde10719d179"} err="failed to get container status \"4d4114d53817aa83a519886615769c5abf77ffb13ff027228a58bde10719d179\": rpc error: code = NotFound desc = could not find container \"4d4114d53817aa83a519886615769c5abf77ffb13ff027228a58bde10719d179\": container with ID starting with 4d4114d53817aa83a519886615769c5abf77ffb13ff027228a58bde10719d179 not found: ID does not exist" Jan 20 17:20:06 crc kubenswrapper[4558]: I0120 17:20:06.734930 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:20:06 crc kubenswrapper[4558]: E0120 17:20:06.735446 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="49f5b91e-2110-4750-b1ca-5dbcf2df3f18" containerName="nova-api-log" Jan 20 17:20:06 crc kubenswrapper[4558]: I0120 17:20:06.735466 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="49f5b91e-2110-4750-b1ca-5dbcf2df3f18" containerName="nova-api-log" Jan 20 17:20:06 crc kubenswrapper[4558]: E0120 17:20:06.735488 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="49f5b91e-2110-4750-b1ca-5dbcf2df3f18" containerName="nova-api-api" Jan 20 17:20:06 crc kubenswrapper[4558]: I0120 17:20:06.735496 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="49f5b91e-2110-4750-b1ca-5dbcf2df3f18" containerName="nova-api-api" Jan 20 17:20:06 crc kubenswrapper[4558]: I0120 17:20:06.735706 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="49f5b91e-2110-4750-b1ca-5dbcf2df3f18" containerName="nova-api-log" Jan 20 17:20:06 crc kubenswrapper[4558]: I0120 17:20:06.735727 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="49f5b91e-2110-4750-b1ca-5dbcf2df3f18" containerName="nova-api-api" Jan 20 17:20:06 crc kubenswrapper[4558]: I0120 17:20:06.736812 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:20:06 crc kubenswrapper[4558]: I0120 17:20:06.738878 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-api-config-data" Jan 20 17:20:06 crc kubenswrapper[4558]: I0120 17:20:06.758774 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:20:06 crc kubenswrapper[4558]: I0120 17:20:06.772364 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/52669504-5bf2-4ea7-af48-d8e25256df3a-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"52669504-5bf2-4ea7-af48-d8e25256df3a\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:20:06 crc kubenswrapper[4558]: I0120 17:20:06.772474 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/52669504-5bf2-4ea7-af48-d8e25256df3a-logs\") pod \"nova-api-0\" (UID: \"52669504-5bf2-4ea7-af48-d8e25256df3a\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:20:06 crc kubenswrapper[4558]: I0120 17:20:06.772589 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lt54l\" (UniqueName: \"kubernetes.io/projected/52669504-5bf2-4ea7-af48-d8e25256df3a-kube-api-access-lt54l\") pod \"nova-api-0\" (UID: \"52669504-5bf2-4ea7-af48-d8e25256df3a\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:20:06 crc kubenswrapper[4558]: I0120 17:20:06.772817 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/52669504-5bf2-4ea7-af48-d8e25256df3a-config-data\") pod \"nova-api-0\" (UID: \"52669504-5bf2-4ea7-af48-d8e25256df3a\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:20:06 crc kubenswrapper[4558]: I0120 17:20:06.875395 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/52669504-5bf2-4ea7-af48-d8e25256df3a-config-data\") pod \"nova-api-0\" (UID: \"52669504-5bf2-4ea7-af48-d8e25256df3a\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:20:06 crc kubenswrapper[4558]: I0120 17:20:06.875502 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/52669504-5bf2-4ea7-af48-d8e25256df3a-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"52669504-5bf2-4ea7-af48-d8e25256df3a\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:20:06 crc kubenswrapper[4558]: I0120 17:20:06.875551 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/52669504-5bf2-4ea7-af48-d8e25256df3a-logs\") pod \"nova-api-0\" (UID: \"52669504-5bf2-4ea7-af48-d8e25256df3a\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:20:06 crc kubenswrapper[4558]: I0120 17:20:06.875606 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lt54l\" (UniqueName: \"kubernetes.io/projected/52669504-5bf2-4ea7-af48-d8e25256df3a-kube-api-access-lt54l\") pod \"nova-api-0\" (UID: \"52669504-5bf2-4ea7-af48-d8e25256df3a\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:20:06 crc kubenswrapper[4558]: I0120 17:20:06.876228 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/52669504-5bf2-4ea7-af48-d8e25256df3a-logs\") pod \"nova-api-0\" (UID: \"52669504-5bf2-4ea7-af48-d8e25256df3a\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:20:06 crc kubenswrapper[4558]: I0120 17:20:06.880311 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/52669504-5bf2-4ea7-af48-d8e25256df3a-config-data\") pod \"nova-api-0\" (UID: \"52669504-5bf2-4ea7-af48-d8e25256df3a\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:20:06 crc kubenswrapper[4558]: I0120 17:20:06.882157 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/52669504-5bf2-4ea7-af48-d8e25256df3a-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"52669504-5bf2-4ea7-af48-d8e25256df3a\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:20:06 crc kubenswrapper[4558]: I0120 17:20:06.890400 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lt54l\" (UniqueName: \"kubernetes.io/projected/52669504-5bf2-4ea7-af48-d8e25256df3a-kube-api-access-lt54l\") pod \"nova-api-0\" (UID: \"52669504-5bf2-4ea7-af48-d8e25256df3a\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:20:06 crc kubenswrapper[4558]: I0120 17:20:06.981399 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:20:06 crc kubenswrapper[4558]: I0120 17:20:06.981540 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:20:07 crc kubenswrapper[4558]: I0120 17:20:07.053132 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:20:07 crc kubenswrapper[4558]: I0120 17:20:07.481697 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:20:07 crc kubenswrapper[4558]: I0120 17:20:07.685503 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"52669504-5bf2-4ea7-af48-d8e25256df3a","Type":"ContainerStarted","Data":"1d27645c7b569a7b27ca33871d7e4cbcafb17d65fbbb9b8a04902060625c9323"} Jan 20 17:20:08 crc kubenswrapper[4558]: I0120 17:20:08.114017 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:20:08 crc kubenswrapper[4558]: I0120 17:20:08.210932 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f595b84e-54f3-47dc-91e6-8c4ba1281542-config-data\") pod \"f595b84e-54f3-47dc-91e6-8c4ba1281542\" (UID: \"f595b84e-54f3-47dc-91e6-8c4ba1281542\") " Jan 20 17:20:08 crc kubenswrapper[4558]: I0120 17:20:08.211012 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f595b84e-54f3-47dc-91e6-8c4ba1281542-combined-ca-bundle\") pod \"f595b84e-54f3-47dc-91e6-8c4ba1281542\" (UID: \"f595b84e-54f3-47dc-91e6-8c4ba1281542\") " Jan 20 17:20:08 crc kubenswrapper[4558]: I0120 17:20:08.211088 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-95stq\" (UniqueName: \"kubernetes.io/projected/f595b84e-54f3-47dc-91e6-8c4ba1281542-kube-api-access-95stq\") pod \"f595b84e-54f3-47dc-91e6-8c4ba1281542\" (UID: \"f595b84e-54f3-47dc-91e6-8c4ba1281542\") " Jan 20 17:20:08 crc kubenswrapper[4558]: I0120 17:20:08.216297 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f595b84e-54f3-47dc-91e6-8c4ba1281542-kube-api-access-95stq" (OuterVolumeSpecName: "kube-api-access-95stq") pod "f595b84e-54f3-47dc-91e6-8c4ba1281542" (UID: "f595b84e-54f3-47dc-91e6-8c4ba1281542"). InnerVolumeSpecName "kube-api-access-95stq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:20:08 crc kubenswrapper[4558]: I0120 17:20:08.236912 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f595b84e-54f3-47dc-91e6-8c4ba1281542-config-data" (OuterVolumeSpecName: "config-data") pod "f595b84e-54f3-47dc-91e6-8c4ba1281542" (UID: "f595b84e-54f3-47dc-91e6-8c4ba1281542"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:20:08 crc kubenswrapper[4558]: I0120 17:20:08.237327 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f595b84e-54f3-47dc-91e6-8c4ba1281542-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f595b84e-54f3-47dc-91e6-8c4ba1281542" (UID: "f595b84e-54f3-47dc-91e6-8c4ba1281542"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:20:08 crc kubenswrapper[4558]: I0120 17:20:08.315008 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f595b84e-54f3-47dc-91e6-8c4ba1281542-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:20:08 crc kubenswrapper[4558]: I0120 17:20:08.315052 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f595b84e-54f3-47dc-91e6-8c4ba1281542-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:20:08 crc kubenswrapper[4558]: I0120 17:20:08.315073 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-95stq\" (UniqueName: \"kubernetes.io/projected/f595b84e-54f3-47dc-91e6-8c4ba1281542-kube-api-access-95stq\") on node \"crc\" DevicePath \"\"" Jan 20 17:20:08 crc kubenswrapper[4558]: I0120 17:20:08.494995 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:20:08 crc kubenswrapper[4558]: I0120 17:20:08.599603 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49f5b91e-2110-4750-b1ca-5dbcf2df3f18" path="/var/lib/kubelet/pods/49f5b91e-2110-4750-b1ca-5dbcf2df3f18/volumes" Jan 20 17:20:08 crc kubenswrapper[4558]: I0120 17:20:08.701419 4558 generic.go:334] "Generic (PLEG): container finished" podID="f595b84e-54f3-47dc-91e6-8c4ba1281542" containerID="e813ccf21ae7caa0f1376a2b6593c47e8b15a47bdbac4cc50c86256faf3e25eb" exitCode=0 Jan 20 17:20:08 crc kubenswrapper[4558]: I0120 17:20:08.701508 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:20:08 crc kubenswrapper[4558]: I0120 17:20:08.701544 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"f595b84e-54f3-47dc-91e6-8c4ba1281542","Type":"ContainerDied","Data":"e813ccf21ae7caa0f1376a2b6593c47e8b15a47bdbac4cc50c86256faf3e25eb"} Jan 20 17:20:08 crc kubenswrapper[4558]: I0120 17:20:08.701630 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"f595b84e-54f3-47dc-91e6-8c4ba1281542","Type":"ContainerDied","Data":"48e49c068b0b72d8a8d5682f293b3c02b8625bdea4abf6a3374dc986da1642bd"} Jan 20 17:20:08 crc kubenswrapper[4558]: I0120 17:20:08.701660 4558 scope.go:117] "RemoveContainer" containerID="e813ccf21ae7caa0f1376a2b6593c47e8b15a47bdbac4cc50c86256faf3e25eb" Jan 20 17:20:08 crc kubenswrapper[4558]: I0120 17:20:08.704267 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"52669504-5bf2-4ea7-af48-d8e25256df3a","Type":"ContainerStarted","Data":"939c97fe4abf9f0204b6b9e6360703b7c26dbf5e0fef66c228871f01266dfa25"} Jan 20 17:20:08 crc kubenswrapper[4558]: I0120 17:20:08.704318 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"52669504-5bf2-4ea7-af48-d8e25256df3a","Type":"ContainerStarted","Data":"3fc41f2436d25e582e3d1880db1cdbcbb54ff85b18e1cede6cc27ca7aa9718df"} Jan 20 17:20:08 crc kubenswrapper[4558]: I0120 17:20:08.725901 4558 scope.go:117] "RemoveContainer" containerID="e813ccf21ae7caa0f1376a2b6593c47e8b15a47bdbac4cc50c86256faf3e25eb" Jan 20 17:20:08 crc kubenswrapper[4558]: E0120 17:20:08.726343 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e813ccf21ae7caa0f1376a2b6593c47e8b15a47bdbac4cc50c86256faf3e25eb\": container with ID starting with e813ccf21ae7caa0f1376a2b6593c47e8b15a47bdbac4cc50c86256faf3e25eb not found: ID does not exist" containerID="e813ccf21ae7caa0f1376a2b6593c47e8b15a47bdbac4cc50c86256faf3e25eb" Jan 20 17:20:08 crc kubenswrapper[4558]: I0120 17:20:08.726378 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e813ccf21ae7caa0f1376a2b6593c47e8b15a47bdbac4cc50c86256faf3e25eb"} err="failed to get container status \"e813ccf21ae7caa0f1376a2b6593c47e8b15a47bdbac4cc50c86256faf3e25eb\": rpc error: code = NotFound desc = could not find container \"e813ccf21ae7caa0f1376a2b6593c47e8b15a47bdbac4cc50c86256faf3e25eb\": container with ID starting with e813ccf21ae7caa0f1376a2b6593c47e8b15a47bdbac4cc50c86256faf3e25eb not found: ID does not exist" Jan 20 17:20:08 crc kubenswrapper[4558]: I0120 17:20:08.732738 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-api-0" podStartSLOduration=2.73272273 podStartE2EDuration="2.73272273s" podCreationTimestamp="2026-01-20 17:20:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:20:08.727735702 +0000 UTC m=+2302.488073669" watchObservedRunningTime="2026-01-20 17:20:08.73272273 +0000 UTC m=+2302.493060687" Jan 20 17:20:08 crc kubenswrapper[4558]: I0120 17:20:08.753684 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:20:08 crc kubenswrapper[4558]: I0120 17:20:08.761199 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:20:08 crc kubenswrapper[4558]: I0120 17:20:08.772788 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:20:08 crc kubenswrapper[4558]: E0120 17:20:08.773370 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f595b84e-54f3-47dc-91e6-8c4ba1281542" containerName="nova-scheduler-scheduler" Jan 20 17:20:08 crc kubenswrapper[4558]: I0120 17:20:08.773390 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="f595b84e-54f3-47dc-91e6-8c4ba1281542" containerName="nova-scheduler-scheduler" Jan 20 17:20:08 crc kubenswrapper[4558]: I0120 17:20:08.773645 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="f595b84e-54f3-47dc-91e6-8c4ba1281542" containerName="nova-scheduler-scheduler" Jan 20 17:20:08 crc kubenswrapper[4558]: I0120 17:20:08.774507 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:20:08 crc kubenswrapper[4558]: I0120 17:20:08.776122 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-scheduler-config-data" Jan 20 17:20:08 crc kubenswrapper[4558]: I0120 17:20:08.782229 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:20:08 crc kubenswrapper[4558]: I0120 17:20:08.843046 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd789fa2-9e67-47ad-a16c-84f613376e9a-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"fd789fa2-9e67-47ad-a16c-84f613376e9a\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:20:08 crc kubenswrapper[4558]: I0120 17:20:08.843149 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fd789fa2-9e67-47ad-a16c-84f613376e9a-config-data\") pod \"nova-scheduler-0\" (UID: \"fd789fa2-9e67-47ad-a16c-84f613376e9a\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:20:08 crc kubenswrapper[4558]: I0120 17:20:08.843227 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-592ns\" (UniqueName: \"kubernetes.io/projected/fd789fa2-9e67-47ad-a16c-84f613376e9a-kube-api-access-592ns\") pod \"nova-scheduler-0\" (UID: \"fd789fa2-9e67-47ad-a16c-84f613376e9a\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:20:08 crc kubenswrapper[4558]: I0120 17:20:08.944949 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd789fa2-9e67-47ad-a16c-84f613376e9a-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"fd789fa2-9e67-47ad-a16c-84f613376e9a\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:20:08 crc kubenswrapper[4558]: I0120 17:20:08.945038 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fd789fa2-9e67-47ad-a16c-84f613376e9a-config-data\") pod \"nova-scheduler-0\" (UID: \"fd789fa2-9e67-47ad-a16c-84f613376e9a\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:20:08 crc kubenswrapper[4558]: I0120 17:20:08.945082 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-592ns\" (UniqueName: \"kubernetes.io/projected/fd789fa2-9e67-47ad-a16c-84f613376e9a-kube-api-access-592ns\") pod \"nova-scheduler-0\" (UID: \"fd789fa2-9e67-47ad-a16c-84f613376e9a\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:20:08 crc kubenswrapper[4558]: I0120 17:20:08.950934 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fd789fa2-9e67-47ad-a16c-84f613376e9a-config-data\") pod \"nova-scheduler-0\" (UID: \"fd789fa2-9e67-47ad-a16c-84f613376e9a\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:20:08 crc kubenswrapper[4558]: I0120 17:20:08.952555 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd789fa2-9e67-47ad-a16c-84f613376e9a-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"fd789fa2-9e67-47ad-a16c-84f613376e9a\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:20:08 crc kubenswrapper[4558]: I0120 17:20:08.963432 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-592ns\" (UniqueName: \"kubernetes.io/projected/fd789fa2-9e67-47ad-a16c-84f613376e9a-kube-api-access-592ns\") pod \"nova-scheduler-0\" (UID: \"fd789fa2-9e67-47ad-a16c-84f613376e9a\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:20:09 crc kubenswrapper[4558]: I0120 17:20:09.092288 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:20:09 crc kubenswrapper[4558]: I0120 17:20:09.506674 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:20:09 crc kubenswrapper[4558]: W0120 17:20:09.510055 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfd789fa2_9e67_47ad_a16c_84f613376e9a.slice/crio-323e90a49cb7fcad4b74be3d7daa58463f1cc5480c06dbe458d51ae2b184fa62 WatchSource:0}: Error finding container 323e90a49cb7fcad4b74be3d7daa58463f1cc5480c06dbe458d51ae2b184fa62: Status 404 returned error can't find the container with id 323e90a49cb7fcad4b74be3d7daa58463f1cc5480c06dbe458d51ae2b184fa62 Jan 20 17:20:09 crc kubenswrapper[4558]: I0120 17:20:09.724115 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"fd789fa2-9e67-47ad-a16c-84f613376e9a","Type":"ContainerStarted","Data":"323e90a49cb7fcad4b74be3d7daa58463f1cc5480c06dbe458d51ae2b184fa62"} Jan 20 17:20:09 crc kubenswrapper[4558]: I0120 17:20:09.979395 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:20:10 crc kubenswrapper[4558]: I0120 17:20:10.417607 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell1-cell-mapping-mc7gs"] Jan 20 17:20:10 crc kubenswrapper[4558]: I0120 17:20:10.419279 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-cell-mapping-mc7gs" Jan 20 17:20:10 crc kubenswrapper[4558]: I0120 17:20:10.421443 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell1-manage-config-data" Jan 20 17:20:10 crc kubenswrapper[4558]: I0120 17:20:10.421791 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell1-manage-scripts" Jan 20 17:20:10 crc kubenswrapper[4558]: I0120 17:20:10.424344 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-cell-mapping-mc7gs"] Jan 20 17:20:10 crc kubenswrapper[4558]: I0120 17:20:10.474725 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f50f6bbe-7952-4b0d-aa1b-88578330e103-config-data\") pod \"nova-cell1-cell-mapping-mc7gs\" (UID: \"f50f6bbe-7952-4b0d-aa1b-88578330e103\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-mc7gs" Jan 20 17:20:10 crc kubenswrapper[4558]: I0120 17:20:10.474782 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bk5x9\" (UniqueName: \"kubernetes.io/projected/f50f6bbe-7952-4b0d-aa1b-88578330e103-kube-api-access-bk5x9\") pod \"nova-cell1-cell-mapping-mc7gs\" (UID: \"f50f6bbe-7952-4b0d-aa1b-88578330e103\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-mc7gs" Jan 20 17:20:10 crc kubenswrapper[4558]: I0120 17:20:10.474868 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f50f6bbe-7952-4b0d-aa1b-88578330e103-scripts\") pod \"nova-cell1-cell-mapping-mc7gs\" (UID: \"f50f6bbe-7952-4b0d-aa1b-88578330e103\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-mc7gs" Jan 20 17:20:10 crc kubenswrapper[4558]: I0120 17:20:10.475069 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f50f6bbe-7952-4b0d-aa1b-88578330e103-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-mc7gs\" (UID: \"f50f6bbe-7952-4b0d-aa1b-88578330e103\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-mc7gs" Jan 20 17:20:10 crc kubenswrapper[4558]: I0120 17:20:10.576141 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f595b84e-54f3-47dc-91e6-8c4ba1281542" path="/var/lib/kubelet/pods/f595b84e-54f3-47dc-91e6-8c4ba1281542/volumes" Jan 20 17:20:10 crc kubenswrapper[4558]: I0120 17:20:10.576908 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f50f6bbe-7952-4b0d-aa1b-88578330e103-config-data\") pod \"nova-cell1-cell-mapping-mc7gs\" (UID: \"f50f6bbe-7952-4b0d-aa1b-88578330e103\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-mc7gs" Jan 20 17:20:10 crc kubenswrapper[4558]: I0120 17:20:10.576972 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bk5x9\" (UniqueName: \"kubernetes.io/projected/f50f6bbe-7952-4b0d-aa1b-88578330e103-kube-api-access-bk5x9\") pod \"nova-cell1-cell-mapping-mc7gs\" (UID: \"f50f6bbe-7952-4b0d-aa1b-88578330e103\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-mc7gs" Jan 20 17:20:10 crc kubenswrapper[4558]: I0120 17:20:10.577012 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f50f6bbe-7952-4b0d-aa1b-88578330e103-scripts\") pod \"nova-cell1-cell-mapping-mc7gs\" (UID: \"f50f6bbe-7952-4b0d-aa1b-88578330e103\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-mc7gs" Jan 20 17:20:10 crc kubenswrapper[4558]: I0120 17:20:10.577073 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f50f6bbe-7952-4b0d-aa1b-88578330e103-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-mc7gs\" (UID: \"f50f6bbe-7952-4b0d-aa1b-88578330e103\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-mc7gs" Jan 20 17:20:10 crc kubenswrapper[4558]: I0120 17:20:10.584277 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f50f6bbe-7952-4b0d-aa1b-88578330e103-config-data\") pod \"nova-cell1-cell-mapping-mc7gs\" (UID: \"f50f6bbe-7952-4b0d-aa1b-88578330e103\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-mc7gs" Jan 20 17:20:10 crc kubenswrapper[4558]: I0120 17:20:10.590459 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f50f6bbe-7952-4b0d-aa1b-88578330e103-scripts\") pod \"nova-cell1-cell-mapping-mc7gs\" (UID: \"f50f6bbe-7952-4b0d-aa1b-88578330e103\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-mc7gs" Jan 20 17:20:10 crc kubenswrapper[4558]: I0120 17:20:10.590890 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f50f6bbe-7952-4b0d-aa1b-88578330e103-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-mc7gs\" (UID: \"f50f6bbe-7952-4b0d-aa1b-88578330e103\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-mc7gs" Jan 20 17:20:10 crc kubenswrapper[4558]: I0120 17:20:10.596411 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bk5x9\" (UniqueName: \"kubernetes.io/projected/f50f6bbe-7952-4b0d-aa1b-88578330e103-kube-api-access-bk5x9\") pod \"nova-cell1-cell-mapping-mc7gs\" (UID: \"f50f6bbe-7952-4b0d-aa1b-88578330e103\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-mc7gs" Jan 20 17:20:10 crc kubenswrapper[4558]: I0120 17:20:10.734781 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"fd789fa2-9e67-47ad-a16c-84f613376e9a","Type":"ContainerStarted","Data":"6bfc7c4eeeabb7764777f76623bd506d3ce621420962d421971008ad73e73f4f"} Jan 20 17:20:10 crc kubenswrapper[4558]: I0120 17:20:10.736551 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-cell-mapping-mc7gs" Jan 20 17:20:10 crc kubenswrapper[4558]: I0120 17:20:10.750734 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-scheduler-0" podStartSLOduration=2.750720887 podStartE2EDuration="2.750720887s" podCreationTimestamp="2026-01-20 17:20:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:20:10.745449594 +0000 UTC m=+2304.505787560" watchObservedRunningTime="2026-01-20 17:20:10.750720887 +0000 UTC m=+2304.511058854" Jan 20 17:20:11 crc kubenswrapper[4558]: I0120 17:20:11.191685 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-cell-mapping-mc7gs"] Jan 20 17:20:11 crc kubenswrapper[4558]: I0120 17:20:11.759779 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-cell-mapping-mc7gs" event={"ID":"f50f6bbe-7952-4b0d-aa1b-88578330e103","Type":"ContainerStarted","Data":"b34a6c671f617790fb62ac67cbbca0c103dc87a0f332ae0fb1b00118d75fcf77"} Jan 20 17:20:11 crc kubenswrapper[4558]: I0120 17:20:11.760124 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-cell-mapping-mc7gs" event={"ID":"f50f6bbe-7952-4b0d-aa1b-88578330e103","Type":"ContainerStarted","Data":"9b351a9d3e0818c2ba3ccc2f0ad05645f1a54e3d250512795398aaf1c7cf77f4"} Jan 20 17:20:11 crc kubenswrapper[4558]: I0120 17:20:11.778961 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell1-cell-mapping-mc7gs" podStartSLOduration=1.778949151 podStartE2EDuration="1.778949151s" podCreationTimestamp="2026-01-20 17:20:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:20:11.776517828 +0000 UTC m=+2305.536855795" watchObservedRunningTime="2026-01-20 17:20:11.778949151 +0000 UTC m=+2305.539287118" Jan 20 17:20:11 crc kubenswrapper[4558]: I0120 17:20:11.981470 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:20:11 crc kubenswrapper[4558]: I0120 17:20:11.981937 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:20:12 crc kubenswrapper[4558]: I0120 17:20:12.008457 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 17:20:12 crc kubenswrapper[4558]: I0120 17:20:12.008694 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/kube-state-metrics-0" podUID="4bc1cce9-b6fd-4965-b6ff-b8ece1dfca79" containerName="kube-state-metrics" containerID="cri-o://09119e0ed2114641fce86c060b77da03c11248abf090a66ce6d4f78aa73834b7" gracePeriod=30 Jan 20 17:20:12 crc kubenswrapper[4558]: I0120 17:20:12.483674 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:20:12 crc kubenswrapper[4558]: I0120 17:20:12.518024 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7tplz\" (UniqueName: \"kubernetes.io/projected/4bc1cce9-b6fd-4965-b6ff-b8ece1dfca79-kube-api-access-7tplz\") pod \"4bc1cce9-b6fd-4965-b6ff-b8ece1dfca79\" (UID: \"4bc1cce9-b6fd-4965-b6ff-b8ece1dfca79\") " Jan 20 17:20:12 crc kubenswrapper[4558]: I0120 17:20:12.525404 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bc1cce9-b6fd-4965-b6ff-b8ece1dfca79-kube-api-access-7tplz" (OuterVolumeSpecName: "kube-api-access-7tplz") pod "4bc1cce9-b6fd-4965-b6ff-b8ece1dfca79" (UID: "4bc1cce9-b6fd-4965-b6ff-b8ece1dfca79"). InnerVolumeSpecName "kube-api-access-7tplz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:20:12 crc kubenswrapper[4558]: I0120 17:20:12.620699 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7tplz\" (UniqueName: \"kubernetes.io/projected/4bc1cce9-b6fd-4965-b6ff-b8ece1dfca79-kube-api-access-7tplz\") on node \"crc\" DevicePath \"\"" Jan 20 17:20:12 crc kubenswrapper[4558]: I0120 17:20:12.771872 4558 generic.go:334] "Generic (PLEG): container finished" podID="4bc1cce9-b6fd-4965-b6ff-b8ece1dfca79" containerID="09119e0ed2114641fce86c060b77da03c11248abf090a66ce6d4f78aa73834b7" exitCode=2 Jan 20 17:20:12 crc kubenswrapper[4558]: I0120 17:20:12.772739 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:20:12 crc kubenswrapper[4558]: I0120 17:20:12.773235 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/kube-state-metrics-0" event={"ID":"4bc1cce9-b6fd-4965-b6ff-b8ece1dfca79","Type":"ContainerDied","Data":"09119e0ed2114641fce86c060b77da03c11248abf090a66ce6d4f78aa73834b7"} Jan 20 17:20:12 crc kubenswrapper[4558]: I0120 17:20:12.773274 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/kube-state-metrics-0" event={"ID":"4bc1cce9-b6fd-4965-b6ff-b8ece1dfca79","Type":"ContainerDied","Data":"95efe6d096d623ee783b702009e3a4a4dea7b8f1070cef6caeed4cb346a2d594"} Jan 20 17:20:12 crc kubenswrapper[4558]: I0120 17:20:12.773297 4558 scope.go:117] "RemoveContainer" containerID="09119e0ed2114641fce86c060b77da03c11248abf090a66ce6d4f78aa73834b7" Jan 20 17:20:12 crc kubenswrapper[4558]: I0120 17:20:12.801842 4558 scope.go:117] "RemoveContainer" containerID="09119e0ed2114641fce86c060b77da03c11248abf090a66ce6d4f78aa73834b7" Jan 20 17:20:12 crc kubenswrapper[4558]: I0120 17:20:12.801829 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 17:20:12 crc kubenswrapper[4558]: E0120 17:20:12.802590 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"09119e0ed2114641fce86c060b77da03c11248abf090a66ce6d4f78aa73834b7\": container with ID starting with 09119e0ed2114641fce86c060b77da03c11248abf090a66ce6d4f78aa73834b7 not found: ID does not exist" containerID="09119e0ed2114641fce86c060b77da03c11248abf090a66ce6d4f78aa73834b7" Jan 20 17:20:12 crc kubenswrapper[4558]: I0120 17:20:12.802645 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"09119e0ed2114641fce86c060b77da03c11248abf090a66ce6d4f78aa73834b7"} err="failed to get container status \"09119e0ed2114641fce86c060b77da03c11248abf090a66ce6d4f78aa73834b7\": rpc error: code = NotFound desc = could not find container \"09119e0ed2114641fce86c060b77da03c11248abf090a66ce6d4f78aa73834b7\": container with ID starting with 09119e0ed2114641fce86c060b77da03c11248abf090a66ce6d4f78aa73834b7 not found: ID does not exist" Jan 20 17:20:12 crc kubenswrapper[4558]: I0120 17:20:12.812376 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 17:20:12 crc kubenswrapper[4558]: I0120 17:20:12.820228 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 17:20:12 crc kubenswrapper[4558]: E0120 17:20:12.826572 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4bc1cce9-b6fd-4965-b6ff-b8ece1dfca79" containerName="kube-state-metrics" Jan 20 17:20:12 crc kubenswrapper[4558]: I0120 17:20:12.826617 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="4bc1cce9-b6fd-4965-b6ff-b8ece1dfca79" containerName="kube-state-metrics" Jan 20 17:20:12 crc kubenswrapper[4558]: I0120 17:20:12.827011 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="4bc1cce9-b6fd-4965-b6ff-b8ece1dfca79" containerName="kube-state-metrics" Jan 20 17:20:12 crc kubenswrapper[4558]: I0120 17:20:12.828131 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:20:12 crc kubenswrapper[4558]: I0120 17:20:12.828393 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 17:20:12 crc kubenswrapper[4558]: I0120 17:20:12.830633 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-kube-state-metrics-svc" Jan 20 17:20:12 crc kubenswrapper[4558]: I0120 17:20:12.830754 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"kube-state-metrics-tls-config" Jan 20 17:20:12 crc kubenswrapper[4558]: I0120 17:20:12.926964 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f07ba882-d06f-4937-ac0a-e66fcd719b8d-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"f07ba882-d06f-4937-ac0a-e66fcd719b8d\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:20:12 crc kubenswrapper[4558]: I0120 17:20:12.927143 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/f07ba882-d06f-4937-ac0a-e66fcd719b8d-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"f07ba882-d06f-4937-ac0a-e66fcd719b8d\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:20:12 crc kubenswrapper[4558]: I0120 17:20:12.927312 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-khdmg\" (UniqueName: \"kubernetes.io/projected/f07ba882-d06f-4937-ac0a-e66fcd719b8d-kube-api-access-khdmg\") pod \"kube-state-metrics-0\" (UID: \"f07ba882-d06f-4937-ac0a-e66fcd719b8d\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:20:12 crc kubenswrapper[4558]: I0120 17:20:12.927383 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/f07ba882-d06f-4937-ac0a-e66fcd719b8d-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"f07ba882-d06f-4937-ac0a-e66fcd719b8d\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:20:12 crc kubenswrapper[4558]: I0120 17:20:12.995376 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-metadata-0" podUID="333fe534-20d7-4081-ad19-9caff5789f86" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.204:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 20 17:20:12 crc kubenswrapper[4558]: I0120 17:20:12.995417 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-metadata-0" podUID="333fe534-20d7-4081-ad19-9caff5789f86" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.204:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 20 17:20:13 crc kubenswrapper[4558]: I0120 17:20:13.029637 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-khdmg\" (UniqueName: \"kubernetes.io/projected/f07ba882-d06f-4937-ac0a-e66fcd719b8d-kube-api-access-khdmg\") pod \"kube-state-metrics-0\" (UID: \"f07ba882-d06f-4937-ac0a-e66fcd719b8d\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:20:13 crc kubenswrapper[4558]: I0120 17:20:13.029708 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/f07ba882-d06f-4937-ac0a-e66fcd719b8d-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"f07ba882-d06f-4937-ac0a-e66fcd719b8d\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:20:13 crc kubenswrapper[4558]: I0120 17:20:13.029877 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f07ba882-d06f-4937-ac0a-e66fcd719b8d-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"f07ba882-d06f-4937-ac0a-e66fcd719b8d\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:20:13 crc kubenswrapper[4558]: I0120 17:20:13.029944 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/f07ba882-d06f-4937-ac0a-e66fcd719b8d-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"f07ba882-d06f-4937-ac0a-e66fcd719b8d\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:20:13 crc kubenswrapper[4558]: I0120 17:20:13.037454 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/f07ba882-d06f-4937-ac0a-e66fcd719b8d-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"f07ba882-d06f-4937-ac0a-e66fcd719b8d\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:20:13 crc kubenswrapper[4558]: I0120 17:20:13.037656 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/f07ba882-d06f-4937-ac0a-e66fcd719b8d-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"f07ba882-d06f-4937-ac0a-e66fcd719b8d\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:20:13 crc kubenswrapper[4558]: I0120 17:20:13.039074 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f07ba882-d06f-4937-ac0a-e66fcd719b8d-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"f07ba882-d06f-4937-ac0a-e66fcd719b8d\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:20:13 crc kubenswrapper[4558]: I0120 17:20:13.044363 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-khdmg\" (UniqueName: \"kubernetes.io/projected/f07ba882-d06f-4937-ac0a-e66fcd719b8d-kube-api-access-khdmg\") pod \"kube-state-metrics-0\" (UID: \"f07ba882-d06f-4937-ac0a-e66fcd719b8d\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:20:13 crc kubenswrapper[4558]: I0120 17:20:13.147131 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:20:13 crc kubenswrapper[4558]: I0120 17:20:13.596281 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 17:20:13 crc kubenswrapper[4558]: W0120 17:20:13.598945 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf07ba882_d06f_4937_ac0a_e66fcd719b8d.slice/crio-85e4fa8762e8b2aa90c99c4aed1717ca86a0b44825ed3ade66e5c8a4c5a6fd50 WatchSource:0}: Error finding container 85e4fa8762e8b2aa90c99c4aed1717ca86a0b44825ed3ade66e5c8a4c5a6fd50: Status 404 returned error can't find the container with id 85e4fa8762e8b2aa90c99c4aed1717ca86a0b44825ed3ade66e5c8a4c5a6fd50 Jan 20 17:20:13 crc kubenswrapper[4558]: I0120 17:20:13.670775 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:20:13 crc kubenswrapper[4558]: I0120 17:20:13.671038 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="c11b2eee-13cf-408d-9491-999f63a8d17d" containerName="ceilometer-central-agent" containerID="cri-o://d407bc3d33d63e6d07b6d9bde845c1c840d6bde6647d2e7de707c8f66f77f335" gracePeriod=30 Jan 20 17:20:13 crc kubenswrapper[4558]: I0120 17:20:13.671143 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="c11b2eee-13cf-408d-9491-999f63a8d17d" containerName="sg-core" containerID="cri-o://320e50bb2db9f8366b0723a0424263a4a16c5acd0e11b09caa9c6f4235d12b7d" gracePeriod=30 Jan 20 17:20:13 crc kubenswrapper[4558]: I0120 17:20:13.671152 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="c11b2eee-13cf-408d-9491-999f63a8d17d" containerName="ceilometer-notification-agent" containerID="cri-o://eac4d88ed5ece7169be09bc311c9059d08357e72d46f8f7d7499d7362b4ec861" gracePeriod=30 Jan 20 17:20:13 crc kubenswrapper[4558]: I0120 17:20:13.671256 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="c11b2eee-13cf-408d-9491-999f63a8d17d" containerName="proxy-httpd" containerID="cri-o://c7f179345fc798000e40658b959729e175165500b34eea0e2d2437d4788d294a" gracePeriod=30 Jan 20 17:20:13 crc kubenswrapper[4558]: I0120 17:20:13.800331 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/kube-state-metrics-0" event={"ID":"f07ba882-d06f-4937-ac0a-e66fcd719b8d","Type":"ContainerStarted","Data":"85e4fa8762e8b2aa90c99c4aed1717ca86a0b44825ed3ade66e5c8a4c5a6fd50"} Jan 20 17:20:14 crc kubenswrapper[4558]: I0120 17:20:14.092444 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:20:14 crc kubenswrapper[4558]: I0120 17:20:14.581284 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bc1cce9-b6fd-4965-b6ff-b8ece1dfca79" path="/var/lib/kubelet/pods/4bc1cce9-b6fd-4965-b6ff-b8ece1dfca79/volumes" Jan 20 17:20:14 crc kubenswrapper[4558]: I0120 17:20:14.814352 4558 generic.go:334] "Generic (PLEG): container finished" podID="c11b2eee-13cf-408d-9491-999f63a8d17d" containerID="c7f179345fc798000e40658b959729e175165500b34eea0e2d2437d4788d294a" exitCode=0 Jan 20 17:20:14 crc kubenswrapper[4558]: I0120 17:20:14.814397 4558 generic.go:334] "Generic (PLEG): container finished" podID="c11b2eee-13cf-408d-9491-999f63a8d17d" containerID="320e50bb2db9f8366b0723a0424263a4a16c5acd0e11b09caa9c6f4235d12b7d" exitCode=2 Jan 20 17:20:14 crc kubenswrapper[4558]: I0120 17:20:14.814405 4558 generic.go:334] "Generic (PLEG): container finished" podID="c11b2eee-13cf-408d-9491-999f63a8d17d" containerID="d407bc3d33d63e6d07b6d9bde845c1c840d6bde6647d2e7de707c8f66f77f335" exitCode=0 Jan 20 17:20:14 crc kubenswrapper[4558]: I0120 17:20:14.814467 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"c11b2eee-13cf-408d-9491-999f63a8d17d","Type":"ContainerDied","Data":"c7f179345fc798000e40658b959729e175165500b34eea0e2d2437d4788d294a"} Jan 20 17:20:14 crc kubenswrapper[4558]: I0120 17:20:14.814511 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"c11b2eee-13cf-408d-9491-999f63a8d17d","Type":"ContainerDied","Data":"320e50bb2db9f8366b0723a0424263a4a16c5acd0e11b09caa9c6f4235d12b7d"} Jan 20 17:20:14 crc kubenswrapper[4558]: I0120 17:20:14.814522 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"c11b2eee-13cf-408d-9491-999f63a8d17d","Type":"ContainerDied","Data":"d407bc3d33d63e6d07b6d9bde845c1c840d6bde6647d2e7de707c8f66f77f335"} Jan 20 17:20:14 crc kubenswrapper[4558]: I0120 17:20:14.816015 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/kube-state-metrics-0" event={"ID":"f07ba882-d06f-4937-ac0a-e66fcd719b8d","Type":"ContainerStarted","Data":"63c838d8c51346650a84faa8ff095a9f00ae13df2999dcf2d156cd7c1b906029"} Jan 20 17:20:14 crc kubenswrapper[4558]: I0120 17:20:14.817395 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:20:14 crc kubenswrapper[4558]: I0120 17:20:14.839407 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/kube-state-metrics-0" podStartSLOduration=2.53959847 podStartE2EDuration="2.83937893s" podCreationTimestamp="2026-01-20 17:20:12 +0000 UTC" firstStartedPulling="2026-01-20 17:20:13.600669134 +0000 UTC m=+2307.361007101" lastFinishedPulling="2026-01-20 17:20:13.900449604 +0000 UTC m=+2307.660787561" observedRunningTime="2026-01-20 17:20:14.829585403 +0000 UTC m=+2308.589923371" watchObservedRunningTime="2026-01-20 17:20:14.83937893 +0000 UTC m=+2308.599716898" Jan 20 17:20:15 crc kubenswrapper[4558]: I0120 17:20:15.795250 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:20:15 crc kubenswrapper[4558]: I0120 17:20:15.804828 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c11b2eee-13cf-408d-9491-999f63a8d17d-log-httpd\") pod \"c11b2eee-13cf-408d-9491-999f63a8d17d\" (UID: \"c11b2eee-13cf-408d-9491-999f63a8d17d\") " Jan 20 17:20:15 crc kubenswrapper[4558]: I0120 17:20:15.804961 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fpkb5\" (UniqueName: \"kubernetes.io/projected/c11b2eee-13cf-408d-9491-999f63a8d17d-kube-api-access-fpkb5\") pod \"c11b2eee-13cf-408d-9491-999f63a8d17d\" (UID: \"c11b2eee-13cf-408d-9491-999f63a8d17d\") " Jan 20 17:20:15 crc kubenswrapper[4558]: I0120 17:20:15.805245 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c11b2eee-13cf-408d-9491-999f63a8d17d-config-data\") pod \"c11b2eee-13cf-408d-9491-999f63a8d17d\" (UID: \"c11b2eee-13cf-408d-9491-999f63a8d17d\") " Jan 20 17:20:15 crc kubenswrapper[4558]: I0120 17:20:15.805336 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c11b2eee-13cf-408d-9491-999f63a8d17d-sg-core-conf-yaml\") pod \"c11b2eee-13cf-408d-9491-999f63a8d17d\" (UID: \"c11b2eee-13cf-408d-9491-999f63a8d17d\") " Jan 20 17:20:15 crc kubenswrapper[4558]: I0120 17:20:15.805397 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c11b2eee-13cf-408d-9491-999f63a8d17d-run-httpd\") pod \"c11b2eee-13cf-408d-9491-999f63a8d17d\" (UID: \"c11b2eee-13cf-408d-9491-999f63a8d17d\") " Jan 20 17:20:15 crc kubenswrapper[4558]: I0120 17:20:15.805463 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c11b2eee-13cf-408d-9491-999f63a8d17d-scripts\") pod \"c11b2eee-13cf-408d-9491-999f63a8d17d\" (UID: \"c11b2eee-13cf-408d-9491-999f63a8d17d\") " Jan 20 17:20:15 crc kubenswrapper[4558]: I0120 17:20:15.805334 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c11b2eee-13cf-408d-9491-999f63a8d17d-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "c11b2eee-13cf-408d-9491-999f63a8d17d" (UID: "c11b2eee-13cf-408d-9491-999f63a8d17d"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:20:15 crc kubenswrapper[4558]: I0120 17:20:15.805728 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c11b2eee-13cf-408d-9491-999f63a8d17d-combined-ca-bundle\") pod \"c11b2eee-13cf-408d-9491-999f63a8d17d\" (UID: \"c11b2eee-13cf-408d-9491-999f63a8d17d\") " Jan 20 17:20:15 crc kubenswrapper[4558]: I0120 17:20:15.806612 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c11b2eee-13cf-408d-9491-999f63a8d17d-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "c11b2eee-13cf-408d-9491-999f63a8d17d" (UID: "c11b2eee-13cf-408d-9491-999f63a8d17d"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:20:15 crc kubenswrapper[4558]: I0120 17:20:15.807385 4558 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c11b2eee-13cf-408d-9491-999f63a8d17d-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:20:15 crc kubenswrapper[4558]: I0120 17:20:15.807477 4558 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c11b2eee-13cf-408d-9491-999f63a8d17d-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:20:15 crc kubenswrapper[4558]: I0120 17:20:15.815324 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c11b2eee-13cf-408d-9491-999f63a8d17d-scripts" (OuterVolumeSpecName: "scripts") pod "c11b2eee-13cf-408d-9491-999f63a8d17d" (UID: "c11b2eee-13cf-408d-9491-999f63a8d17d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:20:15 crc kubenswrapper[4558]: I0120 17:20:15.815524 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c11b2eee-13cf-408d-9491-999f63a8d17d-kube-api-access-fpkb5" (OuterVolumeSpecName: "kube-api-access-fpkb5") pod "c11b2eee-13cf-408d-9491-999f63a8d17d" (UID: "c11b2eee-13cf-408d-9491-999f63a8d17d"). InnerVolumeSpecName "kube-api-access-fpkb5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:20:15 crc kubenswrapper[4558]: I0120 17:20:15.831921 4558 generic.go:334] "Generic (PLEG): container finished" podID="f50f6bbe-7952-4b0d-aa1b-88578330e103" containerID="b34a6c671f617790fb62ac67cbbca0c103dc87a0f332ae0fb1b00118d75fcf77" exitCode=0 Jan 20 17:20:15 crc kubenswrapper[4558]: I0120 17:20:15.831992 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-cell-mapping-mc7gs" event={"ID":"f50f6bbe-7952-4b0d-aa1b-88578330e103","Type":"ContainerDied","Data":"b34a6c671f617790fb62ac67cbbca0c103dc87a0f332ae0fb1b00118d75fcf77"} Jan 20 17:20:15 crc kubenswrapper[4558]: I0120 17:20:15.835254 4558 generic.go:334] "Generic (PLEG): container finished" podID="c11b2eee-13cf-408d-9491-999f63a8d17d" containerID="eac4d88ed5ece7169be09bc311c9059d08357e72d46f8f7d7499d7362b4ec861" exitCode=0 Jan 20 17:20:15 crc kubenswrapper[4558]: I0120 17:20:15.835303 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"c11b2eee-13cf-408d-9491-999f63a8d17d","Type":"ContainerDied","Data":"eac4d88ed5ece7169be09bc311c9059d08357e72d46f8f7d7499d7362b4ec861"} Jan 20 17:20:15 crc kubenswrapper[4558]: I0120 17:20:15.835330 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"c11b2eee-13cf-408d-9491-999f63a8d17d","Type":"ContainerDied","Data":"42f3307c82790a12fe5a33fa13c5602ba216db9d618fad5d336cb001f1a3c66a"} Jan 20 17:20:15 crc kubenswrapper[4558]: I0120 17:20:15.835351 4558 scope.go:117] "RemoveContainer" containerID="c7f179345fc798000e40658b959729e175165500b34eea0e2d2437d4788d294a" Jan 20 17:20:15 crc kubenswrapper[4558]: I0120 17:20:15.835484 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:20:15 crc kubenswrapper[4558]: I0120 17:20:15.864434 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c11b2eee-13cf-408d-9491-999f63a8d17d-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "c11b2eee-13cf-408d-9491-999f63a8d17d" (UID: "c11b2eee-13cf-408d-9491-999f63a8d17d"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:20:15 crc kubenswrapper[4558]: I0120 17:20:15.900052 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c11b2eee-13cf-408d-9491-999f63a8d17d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c11b2eee-13cf-408d-9491-999f63a8d17d" (UID: "c11b2eee-13cf-408d-9491-999f63a8d17d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:20:15 crc kubenswrapper[4558]: I0120 17:20:15.908349 4558 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c11b2eee-13cf-408d-9491-999f63a8d17d-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 20 17:20:15 crc kubenswrapper[4558]: I0120 17:20:15.908372 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c11b2eee-13cf-408d-9491-999f63a8d17d-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:20:15 crc kubenswrapper[4558]: I0120 17:20:15.908382 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c11b2eee-13cf-408d-9491-999f63a8d17d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:20:15 crc kubenswrapper[4558]: I0120 17:20:15.908392 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fpkb5\" (UniqueName: \"kubernetes.io/projected/c11b2eee-13cf-408d-9491-999f63a8d17d-kube-api-access-fpkb5\") on node \"crc\" DevicePath \"\"" Jan 20 17:20:15 crc kubenswrapper[4558]: I0120 17:20:15.921192 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c11b2eee-13cf-408d-9491-999f63a8d17d-config-data" (OuterVolumeSpecName: "config-data") pod "c11b2eee-13cf-408d-9491-999f63a8d17d" (UID: "c11b2eee-13cf-408d-9491-999f63a8d17d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:20:15 crc kubenswrapper[4558]: I0120 17:20:15.929442 4558 scope.go:117] "RemoveContainer" containerID="320e50bb2db9f8366b0723a0424263a4a16c5acd0e11b09caa9c6f4235d12b7d" Jan 20 17:20:15 crc kubenswrapper[4558]: I0120 17:20:15.954567 4558 scope.go:117] "RemoveContainer" containerID="eac4d88ed5ece7169be09bc311c9059d08357e72d46f8f7d7499d7362b4ec861" Jan 20 17:20:15 crc kubenswrapper[4558]: I0120 17:20:15.978602 4558 scope.go:117] "RemoveContainer" containerID="d407bc3d33d63e6d07b6d9bde845c1c840d6bde6647d2e7de707c8f66f77f335" Jan 20 17:20:15 crc kubenswrapper[4558]: I0120 17:20:15.999292 4558 scope.go:117] "RemoveContainer" containerID="c7f179345fc798000e40658b959729e175165500b34eea0e2d2437d4788d294a" Jan 20 17:20:15 crc kubenswrapper[4558]: E0120 17:20:15.999694 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c7f179345fc798000e40658b959729e175165500b34eea0e2d2437d4788d294a\": container with ID starting with c7f179345fc798000e40658b959729e175165500b34eea0e2d2437d4788d294a not found: ID does not exist" containerID="c7f179345fc798000e40658b959729e175165500b34eea0e2d2437d4788d294a" Jan 20 17:20:15 crc kubenswrapper[4558]: I0120 17:20:15.999756 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c7f179345fc798000e40658b959729e175165500b34eea0e2d2437d4788d294a"} err="failed to get container status \"c7f179345fc798000e40658b959729e175165500b34eea0e2d2437d4788d294a\": rpc error: code = NotFound desc = could not find container \"c7f179345fc798000e40658b959729e175165500b34eea0e2d2437d4788d294a\": container with ID starting with c7f179345fc798000e40658b959729e175165500b34eea0e2d2437d4788d294a not found: ID does not exist" Jan 20 17:20:15 crc kubenswrapper[4558]: I0120 17:20:15.999792 4558 scope.go:117] "RemoveContainer" containerID="320e50bb2db9f8366b0723a0424263a4a16c5acd0e11b09caa9c6f4235d12b7d" Jan 20 17:20:16 crc kubenswrapper[4558]: E0120 17:20:16.000290 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"320e50bb2db9f8366b0723a0424263a4a16c5acd0e11b09caa9c6f4235d12b7d\": container with ID starting with 320e50bb2db9f8366b0723a0424263a4a16c5acd0e11b09caa9c6f4235d12b7d not found: ID does not exist" containerID="320e50bb2db9f8366b0723a0424263a4a16c5acd0e11b09caa9c6f4235d12b7d" Jan 20 17:20:16 crc kubenswrapper[4558]: I0120 17:20:16.000330 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"320e50bb2db9f8366b0723a0424263a4a16c5acd0e11b09caa9c6f4235d12b7d"} err="failed to get container status \"320e50bb2db9f8366b0723a0424263a4a16c5acd0e11b09caa9c6f4235d12b7d\": rpc error: code = NotFound desc = could not find container \"320e50bb2db9f8366b0723a0424263a4a16c5acd0e11b09caa9c6f4235d12b7d\": container with ID starting with 320e50bb2db9f8366b0723a0424263a4a16c5acd0e11b09caa9c6f4235d12b7d not found: ID does not exist" Jan 20 17:20:16 crc kubenswrapper[4558]: I0120 17:20:16.000360 4558 scope.go:117] "RemoveContainer" containerID="eac4d88ed5ece7169be09bc311c9059d08357e72d46f8f7d7499d7362b4ec861" Jan 20 17:20:16 crc kubenswrapper[4558]: E0120 17:20:16.000720 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eac4d88ed5ece7169be09bc311c9059d08357e72d46f8f7d7499d7362b4ec861\": container with ID starting with eac4d88ed5ece7169be09bc311c9059d08357e72d46f8f7d7499d7362b4ec861 not found: ID does not exist" containerID="eac4d88ed5ece7169be09bc311c9059d08357e72d46f8f7d7499d7362b4ec861" Jan 20 17:20:16 crc kubenswrapper[4558]: I0120 17:20:16.000775 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eac4d88ed5ece7169be09bc311c9059d08357e72d46f8f7d7499d7362b4ec861"} err="failed to get container status \"eac4d88ed5ece7169be09bc311c9059d08357e72d46f8f7d7499d7362b4ec861\": rpc error: code = NotFound desc = could not find container \"eac4d88ed5ece7169be09bc311c9059d08357e72d46f8f7d7499d7362b4ec861\": container with ID starting with eac4d88ed5ece7169be09bc311c9059d08357e72d46f8f7d7499d7362b4ec861 not found: ID does not exist" Jan 20 17:20:16 crc kubenswrapper[4558]: I0120 17:20:16.000808 4558 scope.go:117] "RemoveContainer" containerID="d407bc3d33d63e6d07b6d9bde845c1c840d6bde6647d2e7de707c8f66f77f335" Jan 20 17:20:16 crc kubenswrapper[4558]: E0120 17:20:16.001202 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d407bc3d33d63e6d07b6d9bde845c1c840d6bde6647d2e7de707c8f66f77f335\": container with ID starting with d407bc3d33d63e6d07b6d9bde845c1c840d6bde6647d2e7de707c8f66f77f335 not found: ID does not exist" containerID="d407bc3d33d63e6d07b6d9bde845c1c840d6bde6647d2e7de707c8f66f77f335" Jan 20 17:20:16 crc kubenswrapper[4558]: I0120 17:20:16.001237 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d407bc3d33d63e6d07b6d9bde845c1c840d6bde6647d2e7de707c8f66f77f335"} err="failed to get container status \"d407bc3d33d63e6d07b6d9bde845c1c840d6bde6647d2e7de707c8f66f77f335\": rpc error: code = NotFound desc = could not find container \"d407bc3d33d63e6d07b6d9bde845c1c840d6bde6647d2e7de707c8f66f77f335\": container with ID starting with d407bc3d33d63e6d07b6d9bde845c1c840d6bde6647d2e7de707c8f66f77f335 not found: ID does not exist" Jan 20 17:20:16 crc kubenswrapper[4558]: I0120 17:20:16.011836 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c11b2eee-13cf-408d-9491-999f63a8d17d-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:20:16 crc kubenswrapper[4558]: I0120 17:20:16.174351 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:20:16 crc kubenswrapper[4558]: I0120 17:20:16.182926 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:20:16 crc kubenswrapper[4558]: I0120 17:20:16.197598 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:20:16 crc kubenswrapper[4558]: E0120 17:20:16.198037 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c11b2eee-13cf-408d-9491-999f63a8d17d" containerName="ceilometer-notification-agent" Jan 20 17:20:16 crc kubenswrapper[4558]: I0120 17:20:16.198056 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="c11b2eee-13cf-408d-9491-999f63a8d17d" containerName="ceilometer-notification-agent" Jan 20 17:20:16 crc kubenswrapper[4558]: E0120 17:20:16.198066 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c11b2eee-13cf-408d-9491-999f63a8d17d" containerName="ceilometer-central-agent" Jan 20 17:20:16 crc kubenswrapper[4558]: I0120 17:20:16.198072 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="c11b2eee-13cf-408d-9491-999f63a8d17d" containerName="ceilometer-central-agent" Jan 20 17:20:16 crc kubenswrapper[4558]: E0120 17:20:16.198083 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c11b2eee-13cf-408d-9491-999f63a8d17d" containerName="sg-core" Jan 20 17:20:16 crc kubenswrapper[4558]: I0120 17:20:16.198088 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="c11b2eee-13cf-408d-9491-999f63a8d17d" containerName="sg-core" Jan 20 17:20:16 crc kubenswrapper[4558]: E0120 17:20:16.198109 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c11b2eee-13cf-408d-9491-999f63a8d17d" containerName="proxy-httpd" Jan 20 17:20:16 crc kubenswrapper[4558]: I0120 17:20:16.198114 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="c11b2eee-13cf-408d-9491-999f63a8d17d" containerName="proxy-httpd" Jan 20 17:20:16 crc kubenswrapper[4558]: I0120 17:20:16.198328 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="c11b2eee-13cf-408d-9491-999f63a8d17d" containerName="ceilometer-notification-agent" Jan 20 17:20:16 crc kubenswrapper[4558]: I0120 17:20:16.198345 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="c11b2eee-13cf-408d-9491-999f63a8d17d" containerName="ceilometer-central-agent" Jan 20 17:20:16 crc kubenswrapper[4558]: I0120 17:20:16.198356 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="c11b2eee-13cf-408d-9491-999f63a8d17d" containerName="proxy-httpd" Jan 20 17:20:16 crc kubenswrapper[4558]: I0120 17:20:16.198368 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="c11b2eee-13cf-408d-9491-999f63a8d17d" containerName="sg-core" Jan 20 17:20:16 crc kubenswrapper[4558]: I0120 17:20:16.200064 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:20:16 crc kubenswrapper[4558]: I0120 17:20:16.201617 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-scripts" Jan 20 17:20:16 crc kubenswrapper[4558]: I0120 17:20:16.202898 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-ceilometer-internal-svc" Jan 20 17:20:16 crc kubenswrapper[4558]: I0120 17:20:16.204678 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-config-data" Jan 20 17:20:16 crc kubenswrapper[4558]: I0120 17:20:16.214344 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:20:16 crc kubenswrapper[4558]: I0120 17:20:16.215055 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/590319f8-5fc7-4ecd-ab83-eb4d59262fd9-scripts\") pod \"ceilometer-0\" (UID: \"590319f8-5fc7-4ecd-ab83-eb4d59262fd9\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:20:16 crc kubenswrapper[4558]: I0120 17:20:16.215095 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/590319f8-5fc7-4ecd-ab83-eb4d59262fd9-run-httpd\") pod \"ceilometer-0\" (UID: \"590319f8-5fc7-4ecd-ab83-eb4d59262fd9\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:20:16 crc kubenswrapper[4558]: I0120 17:20:16.215131 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/590319f8-5fc7-4ecd-ab83-eb4d59262fd9-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"590319f8-5fc7-4ecd-ab83-eb4d59262fd9\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:20:16 crc kubenswrapper[4558]: I0120 17:20:16.215155 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fs5ct\" (UniqueName: \"kubernetes.io/projected/590319f8-5fc7-4ecd-ab83-eb4d59262fd9-kube-api-access-fs5ct\") pod \"ceilometer-0\" (UID: \"590319f8-5fc7-4ecd-ab83-eb4d59262fd9\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:20:16 crc kubenswrapper[4558]: I0120 17:20:16.215249 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/590319f8-5fc7-4ecd-ab83-eb4d59262fd9-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"590319f8-5fc7-4ecd-ab83-eb4d59262fd9\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:20:16 crc kubenswrapper[4558]: I0120 17:20:16.215293 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/590319f8-5fc7-4ecd-ab83-eb4d59262fd9-config-data\") pod \"ceilometer-0\" (UID: \"590319f8-5fc7-4ecd-ab83-eb4d59262fd9\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:20:16 crc kubenswrapper[4558]: I0120 17:20:16.215325 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/590319f8-5fc7-4ecd-ab83-eb4d59262fd9-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"590319f8-5fc7-4ecd-ab83-eb4d59262fd9\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:20:16 crc kubenswrapper[4558]: I0120 17:20:16.215353 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/590319f8-5fc7-4ecd-ab83-eb4d59262fd9-log-httpd\") pod \"ceilometer-0\" (UID: \"590319f8-5fc7-4ecd-ab83-eb4d59262fd9\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:20:16 crc kubenswrapper[4558]: I0120 17:20:16.317293 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/590319f8-5fc7-4ecd-ab83-eb4d59262fd9-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"590319f8-5fc7-4ecd-ab83-eb4d59262fd9\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:20:16 crc kubenswrapper[4558]: I0120 17:20:16.317372 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/590319f8-5fc7-4ecd-ab83-eb4d59262fd9-config-data\") pod \"ceilometer-0\" (UID: \"590319f8-5fc7-4ecd-ab83-eb4d59262fd9\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:20:16 crc kubenswrapper[4558]: I0120 17:20:16.317431 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/590319f8-5fc7-4ecd-ab83-eb4d59262fd9-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"590319f8-5fc7-4ecd-ab83-eb4d59262fd9\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:20:16 crc kubenswrapper[4558]: I0120 17:20:16.317481 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/590319f8-5fc7-4ecd-ab83-eb4d59262fd9-log-httpd\") pod \"ceilometer-0\" (UID: \"590319f8-5fc7-4ecd-ab83-eb4d59262fd9\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:20:16 crc kubenswrapper[4558]: I0120 17:20:16.317557 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/590319f8-5fc7-4ecd-ab83-eb4d59262fd9-scripts\") pod \"ceilometer-0\" (UID: \"590319f8-5fc7-4ecd-ab83-eb4d59262fd9\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:20:16 crc kubenswrapper[4558]: I0120 17:20:16.317583 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/590319f8-5fc7-4ecd-ab83-eb4d59262fd9-run-httpd\") pod \"ceilometer-0\" (UID: \"590319f8-5fc7-4ecd-ab83-eb4d59262fd9\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:20:16 crc kubenswrapper[4558]: I0120 17:20:16.317645 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/590319f8-5fc7-4ecd-ab83-eb4d59262fd9-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"590319f8-5fc7-4ecd-ab83-eb4d59262fd9\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:20:16 crc kubenswrapper[4558]: I0120 17:20:16.317678 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fs5ct\" (UniqueName: \"kubernetes.io/projected/590319f8-5fc7-4ecd-ab83-eb4d59262fd9-kube-api-access-fs5ct\") pod \"ceilometer-0\" (UID: \"590319f8-5fc7-4ecd-ab83-eb4d59262fd9\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:20:16 crc kubenswrapper[4558]: I0120 17:20:16.318578 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/590319f8-5fc7-4ecd-ab83-eb4d59262fd9-run-httpd\") pod \"ceilometer-0\" (UID: \"590319f8-5fc7-4ecd-ab83-eb4d59262fd9\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:20:16 crc kubenswrapper[4558]: I0120 17:20:16.318680 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/590319f8-5fc7-4ecd-ab83-eb4d59262fd9-log-httpd\") pod \"ceilometer-0\" (UID: \"590319f8-5fc7-4ecd-ab83-eb4d59262fd9\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:20:16 crc kubenswrapper[4558]: I0120 17:20:16.322442 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/590319f8-5fc7-4ecd-ab83-eb4d59262fd9-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"590319f8-5fc7-4ecd-ab83-eb4d59262fd9\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:20:16 crc kubenswrapper[4558]: I0120 17:20:16.323139 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/590319f8-5fc7-4ecd-ab83-eb4d59262fd9-config-data\") pod \"ceilometer-0\" (UID: \"590319f8-5fc7-4ecd-ab83-eb4d59262fd9\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:20:16 crc kubenswrapper[4558]: I0120 17:20:16.326048 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/590319f8-5fc7-4ecd-ab83-eb4d59262fd9-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"590319f8-5fc7-4ecd-ab83-eb4d59262fd9\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:20:16 crc kubenswrapper[4558]: I0120 17:20:16.327275 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/590319f8-5fc7-4ecd-ab83-eb4d59262fd9-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"590319f8-5fc7-4ecd-ab83-eb4d59262fd9\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:20:16 crc kubenswrapper[4558]: I0120 17:20:16.327491 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/590319f8-5fc7-4ecd-ab83-eb4d59262fd9-scripts\") pod \"ceilometer-0\" (UID: \"590319f8-5fc7-4ecd-ab83-eb4d59262fd9\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:20:16 crc kubenswrapper[4558]: I0120 17:20:16.333712 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fs5ct\" (UniqueName: \"kubernetes.io/projected/590319f8-5fc7-4ecd-ab83-eb4d59262fd9-kube-api-access-fs5ct\") pod \"ceilometer-0\" (UID: \"590319f8-5fc7-4ecd-ab83-eb4d59262fd9\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:20:16 crc kubenswrapper[4558]: I0120 17:20:16.516189 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:20:16 crc kubenswrapper[4558]: I0120 17:20:16.582815 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c11b2eee-13cf-408d-9491-999f63a8d17d" path="/var/lib/kubelet/pods/c11b2eee-13cf-408d-9491-999f63a8d17d/volumes" Jan 20 17:20:17 crc kubenswrapper[4558]: I0120 17:20:17.038726 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:20:17 crc kubenswrapper[4558]: I0120 17:20:17.053924 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:20:17 crc kubenswrapper[4558]: I0120 17:20:17.053985 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:20:17 crc kubenswrapper[4558]: I0120 17:20:17.114961 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-cell-mapping-mc7gs" Jan 20 17:20:17 crc kubenswrapper[4558]: I0120 17:20:17.247259 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f50f6bbe-7952-4b0d-aa1b-88578330e103-config-data\") pod \"f50f6bbe-7952-4b0d-aa1b-88578330e103\" (UID: \"f50f6bbe-7952-4b0d-aa1b-88578330e103\") " Jan 20 17:20:17 crc kubenswrapper[4558]: I0120 17:20:17.247440 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f50f6bbe-7952-4b0d-aa1b-88578330e103-scripts\") pod \"f50f6bbe-7952-4b0d-aa1b-88578330e103\" (UID: \"f50f6bbe-7952-4b0d-aa1b-88578330e103\") " Jan 20 17:20:17 crc kubenswrapper[4558]: I0120 17:20:17.247493 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bk5x9\" (UniqueName: \"kubernetes.io/projected/f50f6bbe-7952-4b0d-aa1b-88578330e103-kube-api-access-bk5x9\") pod \"f50f6bbe-7952-4b0d-aa1b-88578330e103\" (UID: \"f50f6bbe-7952-4b0d-aa1b-88578330e103\") " Jan 20 17:20:17 crc kubenswrapper[4558]: I0120 17:20:17.247802 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f50f6bbe-7952-4b0d-aa1b-88578330e103-combined-ca-bundle\") pod \"f50f6bbe-7952-4b0d-aa1b-88578330e103\" (UID: \"f50f6bbe-7952-4b0d-aa1b-88578330e103\") " Jan 20 17:20:17 crc kubenswrapper[4558]: I0120 17:20:17.256420 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f50f6bbe-7952-4b0d-aa1b-88578330e103-kube-api-access-bk5x9" (OuterVolumeSpecName: "kube-api-access-bk5x9") pod "f50f6bbe-7952-4b0d-aa1b-88578330e103" (UID: "f50f6bbe-7952-4b0d-aa1b-88578330e103"). InnerVolumeSpecName "kube-api-access-bk5x9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:20:17 crc kubenswrapper[4558]: I0120 17:20:17.256498 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f50f6bbe-7952-4b0d-aa1b-88578330e103-scripts" (OuterVolumeSpecName: "scripts") pod "f50f6bbe-7952-4b0d-aa1b-88578330e103" (UID: "f50f6bbe-7952-4b0d-aa1b-88578330e103"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:20:17 crc kubenswrapper[4558]: I0120 17:20:17.280372 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f50f6bbe-7952-4b0d-aa1b-88578330e103-config-data" (OuterVolumeSpecName: "config-data") pod "f50f6bbe-7952-4b0d-aa1b-88578330e103" (UID: "f50f6bbe-7952-4b0d-aa1b-88578330e103"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:20:17 crc kubenswrapper[4558]: I0120 17:20:17.280960 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f50f6bbe-7952-4b0d-aa1b-88578330e103-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f50f6bbe-7952-4b0d-aa1b-88578330e103" (UID: "f50f6bbe-7952-4b0d-aa1b-88578330e103"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:20:17 crc kubenswrapper[4558]: I0120 17:20:17.351497 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f50f6bbe-7952-4b0d-aa1b-88578330e103-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:20:17 crc kubenswrapper[4558]: I0120 17:20:17.351533 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f50f6bbe-7952-4b0d-aa1b-88578330e103-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:20:17 crc kubenswrapper[4558]: I0120 17:20:17.351555 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f50f6bbe-7952-4b0d-aa1b-88578330e103-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:20:17 crc kubenswrapper[4558]: I0120 17:20:17.351571 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bk5x9\" (UniqueName: \"kubernetes.io/projected/f50f6bbe-7952-4b0d-aa1b-88578330e103-kube-api-access-bk5x9\") on node \"crc\" DevicePath \"\"" Jan 20 17:20:17 crc kubenswrapper[4558]: I0120 17:20:17.871475 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"590319f8-5fc7-4ecd-ab83-eb4d59262fd9","Type":"ContainerStarted","Data":"81e7c4f925c3e45867cdd806f74bba8ad198e38197924a7db0fd66975b17c937"} Jan 20 17:20:17 crc kubenswrapper[4558]: I0120 17:20:17.871524 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"590319f8-5fc7-4ecd-ab83-eb4d59262fd9","Type":"ContainerStarted","Data":"4b423c58baeeafc41a9ee9140d2059a719143546c5d8ba22b785924dae00df96"} Jan 20 17:20:17 crc kubenswrapper[4558]: I0120 17:20:17.873080 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-cell-mapping-mc7gs" event={"ID":"f50f6bbe-7952-4b0d-aa1b-88578330e103","Type":"ContainerDied","Data":"9b351a9d3e0818c2ba3ccc2f0ad05645f1a54e3d250512795398aaf1c7cf77f4"} Jan 20 17:20:17 crc kubenswrapper[4558]: I0120 17:20:17.873130 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-cell-mapping-mc7gs" Jan 20 17:20:17 crc kubenswrapper[4558]: I0120 17:20:17.873134 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9b351a9d3e0818c2ba3ccc2f0ad05645f1a54e3d250512795398aaf1c7cf77f4" Jan 20 17:20:18 crc kubenswrapper[4558]: I0120 17:20:18.049219 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:20:18 crc kubenswrapper[4558]: I0120 17:20:18.049501 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-api-0" podUID="52669504-5bf2-4ea7-af48-d8e25256df3a" containerName="nova-api-log" containerID="cri-o://3fc41f2436d25e582e3d1880db1cdbcbb54ff85b18e1cede6cc27ca7aa9718df" gracePeriod=30 Jan 20 17:20:18 crc kubenswrapper[4558]: I0120 17:20:18.049544 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-api-0" podUID="52669504-5bf2-4ea7-af48-d8e25256df3a" containerName="nova-api-api" containerID="cri-o://939c97fe4abf9f0204b6b9e6360703b7c26dbf5e0fef66c228871f01266dfa25" gracePeriod=30 Jan 20 17:20:18 crc kubenswrapper[4558]: I0120 17:20:18.058640 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-api-0" podUID="52669504-5bf2-4ea7-af48-d8e25256df3a" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.205:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 20 17:20:18 crc kubenswrapper[4558]: I0120 17:20:18.058636 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-api-0" podUID="52669504-5bf2-4ea7-af48-d8e25256df3a" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.205:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 20 17:20:18 crc kubenswrapper[4558]: I0120 17:20:18.058803 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:20:18 crc kubenswrapper[4558]: I0120 17:20:18.059018 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-scheduler-0" podUID="fd789fa2-9e67-47ad-a16c-84f613376e9a" containerName="nova-scheduler-scheduler" containerID="cri-o://6bfc7c4eeeabb7764777f76623bd506d3ce621420962d421971008ad73e73f4f" gracePeriod=30 Jan 20 17:20:18 crc kubenswrapper[4558]: I0120 17:20:18.064418 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:20:18 crc kubenswrapper[4558]: I0120 17:20:18.064654 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-metadata-0" podUID="333fe534-20d7-4081-ad19-9caff5789f86" containerName="nova-metadata-log" containerID="cri-o://42f8774047cb1caa112b3941ceefba82f8f4780e8be212d055680736ef8b48d8" gracePeriod=30 Jan 20 17:20:18 crc kubenswrapper[4558]: I0120 17:20:18.065222 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-metadata-0" podUID="333fe534-20d7-4081-ad19-9caff5789f86" containerName="nova-metadata-metadata" containerID="cri-o://baff23dd43f4e034d383c7be8ae7a9d5a3dd0ec441be819834b472bdcbd0466d" gracePeriod=30 Jan 20 17:20:18 crc kubenswrapper[4558]: I0120 17:20:18.883649 4558 generic.go:334] "Generic (PLEG): container finished" podID="52669504-5bf2-4ea7-af48-d8e25256df3a" containerID="3fc41f2436d25e582e3d1880db1cdbcbb54ff85b18e1cede6cc27ca7aa9718df" exitCode=143 Jan 20 17:20:18 crc kubenswrapper[4558]: I0120 17:20:18.883698 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"52669504-5bf2-4ea7-af48-d8e25256df3a","Type":"ContainerDied","Data":"3fc41f2436d25e582e3d1880db1cdbcbb54ff85b18e1cede6cc27ca7aa9718df"} Jan 20 17:20:18 crc kubenswrapper[4558]: I0120 17:20:18.886205 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"590319f8-5fc7-4ecd-ab83-eb4d59262fd9","Type":"ContainerStarted","Data":"d3a96fa2ed8c1d387f7ca0bca75e6b24f97198f5d3efc7b29c6b348c9f0acaa3"} Jan 20 17:20:18 crc kubenswrapper[4558]: I0120 17:20:18.887795 4558 generic.go:334] "Generic (PLEG): container finished" podID="333fe534-20d7-4081-ad19-9caff5789f86" containerID="42f8774047cb1caa112b3941ceefba82f8f4780e8be212d055680736ef8b48d8" exitCode=143 Jan 20 17:20:18 crc kubenswrapper[4558]: I0120 17:20:18.887832 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"333fe534-20d7-4081-ad19-9caff5789f86","Type":"ContainerDied","Data":"42f8774047cb1caa112b3941ceefba82f8f4780e8be212d055680736ef8b48d8"} Jan 20 17:20:19 crc kubenswrapper[4558]: I0120 17:20:19.566847 4558 scope.go:117] "RemoveContainer" containerID="0ffce44366a8b4466427b682fb41640425366a0f4449210959148de9aef695c6" Jan 20 17:20:19 crc kubenswrapper[4558]: E0120 17:20:19.567633 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 17:20:19 crc kubenswrapper[4558]: I0120 17:20:19.906150 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"590319f8-5fc7-4ecd-ab83-eb4d59262fd9","Type":"ContainerStarted","Data":"537c61cfa49585fd06616291a85a452aae2d22ed19564435a3b36df407997f4b"} Jan 20 17:20:20 crc kubenswrapper[4558]: I0120 17:20:20.923049 4558 generic.go:334] "Generic (PLEG): container finished" podID="fd789fa2-9e67-47ad-a16c-84f613376e9a" containerID="6bfc7c4eeeabb7764777f76623bd506d3ce621420962d421971008ad73e73f4f" exitCode=0 Jan 20 17:20:20 crc kubenswrapper[4558]: I0120 17:20:20.923119 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"fd789fa2-9e67-47ad-a16c-84f613376e9a","Type":"ContainerDied","Data":"6bfc7c4eeeabb7764777f76623bd506d3ce621420962d421971008ad73e73f4f"} Jan 20 17:20:20 crc kubenswrapper[4558]: I0120 17:20:20.923426 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"fd789fa2-9e67-47ad-a16c-84f613376e9a","Type":"ContainerDied","Data":"323e90a49cb7fcad4b74be3d7daa58463f1cc5480c06dbe458d51ae2b184fa62"} Jan 20 17:20:20 crc kubenswrapper[4558]: I0120 17:20:20.923454 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="323e90a49cb7fcad4b74be3d7daa58463f1cc5480c06dbe458d51ae2b184fa62" Jan 20 17:20:20 crc kubenswrapper[4558]: I0120 17:20:20.979185 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:20:21 crc kubenswrapper[4558]: I0120 17:20:21.039179 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fd789fa2-9e67-47ad-a16c-84f613376e9a-config-data\") pod \"fd789fa2-9e67-47ad-a16c-84f613376e9a\" (UID: \"fd789fa2-9e67-47ad-a16c-84f613376e9a\") " Jan 20 17:20:21 crc kubenswrapper[4558]: I0120 17:20:21.039255 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd789fa2-9e67-47ad-a16c-84f613376e9a-combined-ca-bundle\") pod \"fd789fa2-9e67-47ad-a16c-84f613376e9a\" (UID: \"fd789fa2-9e67-47ad-a16c-84f613376e9a\") " Jan 20 17:20:21 crc kubenswrapper[4558]: I0120 17:20:21.039323 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-592ns\" (UniqueName: \"kubernetes.io/projected/fd789fa2-9e67-47ad-a16c-84f613376e9a-kube-api-access-592ns\") pod \"fd789fa2-9e67-47ad-a16c-84f613376e9a\" (UID: \"fd789fa2-9e67-47ad-a16c-84f613376e9a\") " Jan 20 17:20:21 crc kubenswrapper[4558]: I0120 17:20:21.045589 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fd789fa2-9e67-47ad-a16c-84f613376e9a-kube-api-access-592ns" (OuterVolumeSpecName: "kube-api-access-592ns") pod "fd789fa2-9e67-47ad-a16c-84f613376e9a" (UID: "fd789fa2-9e67-47ad-a16c-84f613376e9a"). InnerVolumeSpecName "kube-api-access-592ns". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:20:21 crc kubenswrapper[4558]: I0120 17:20:21.064652 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fd789fa2-9e67-47ad-a16c-84f613376e9a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fd789fa2-9e67-47ad-a16c-84f613376e9a" (UID: "fd789fa2-9e67-47ad-a16c-84f613376e9a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:20:21 crc kubenswrapper[4558]: I0120 17:20:21.066123 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fd789fa2-9e67-47ad-a16c-84f613376e9a-config-data" (OuterVolumeSpecName: "config-data") pod "fd789fa2-9e67-47ad-a16c-84f613376e9a" (UID: "fd789fa2-9e67-47ad-a16c-84f613376e9a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:20:21 crc kubenswrapper[4558]: I0120 17:20:21.141866 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-592ns\" (UniqueName: \"kubernetes.io/projected/fd789fa2-9e67-47ad-a16c-84f613376e9a-kube-api-access-592ns\") on node \"crc\" DevicePath \"\"" Jan 20 17:20:21 crc kubenswrapper[4558]: I0120 17:20:21.141901 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fd789fa2-9e67-47ad-a16c-84f613376e9a-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:20:21 crc kubenswrapper[4558]: I0120 17:20:21.141917 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd789fa2-9e67-47ad-a16c-84f613376e9a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:20:21 crc kubenswrapper[4558]: I0120 17:20:21.504077 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:20:21 crc kubenswrapper[4558]: I0120 17:20:21.554256 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hjpn4\" (UniqueName: \"kubernetes.io/projected/333fe534-20d7-4081-ad19-9caff5789f86-kube-api-access-hjpn4\") pod \"333fe534-20d7-4081-ad19-9caff5789f86\" (UID: \"333fe534-20d7-4081-ad19-9caff5789f86\") " Jan 20 17:20:21 crc kubenswrapper[4558]: I0120 17:20:21.554389 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/333fe534-20d7-4081-ad19-9caff5789f86-nova-metadata-tls-certs\") pod \"333fe534-20d7-4081-ad19-9caff5789f86\" (UID: \"333fe534-20d7-4081-ad19-9caff5789f86\") " Jan 20 17:20:21 crc kubenswrapper[4558]: I0120 17:20:21.554434 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/333fe534-20d7-4081-ad19-9caff5789f86-combined-ca-bundle\") pod \"333fe534-20d7-4081-ad19-9caff5789f86\" (UID: \"333fe534-20d7-4081-ad19-9caff5789f86\") " Jan 20 17:20:21 crc kubenswrapper[4558]: I0120 17:20:21.554521 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/333fe534-20d7-4081-ad19-9caff5789f86-config-data\") pod \"333fe534-20d7-4081-ad19-9caff5789f86\" (UID: \"333fe534-20d7-4081-ad19-9caff5789f86\") " Jan 20 17:20:21 crc kubenswrapper[4558]: I0120 17:20:21.554756 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/333fe534-20d7-4081-ad19-9caff5789f86-logs\") pod \"333fe534-20d7-4081-ad19-9caff5789f86\" (UID: \"333fe534-20d7-4081-ad19-9caff5789f86\") " Jan 20 17:20:21 crc kubenswrapper[4558]: I0120 17:20:21.555597 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/333fe534-20d7-4081-ad19-9caff5789f86-logs" (OuterVolumeSpecName: "logs") pod "333fe534-20d7-4081-ad19-9caff5789f86" (UID: "333fe534-20d7-4081-ad19-9caff5789f86"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:20:21 crc kubenswrapper[4558]: I0120 17:20:21.560503 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/333fe534-20d7-4081-ad19-9caff5789f86-kube-api-access-hjpn4" (OuterVolumeSpecName: "kube-api-access-hjpn4") pod "333fe534-20d7-4081-ad19-9caff5789f86" (UID: "333fe534-20d7-4081-ad19-9caff5789f86"). InnerVolumeSpecName "kube-api-access-hjpn4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:20:21 crc kubenswrapper[4558]: I0120 17:20:21.582194 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/333fe534-20d7-4081-ad19-9caff5789f86-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "333fe534-20d7-4081-ad19-9caff5789f86" (UID: "333fe534-20d7-4081-ad19-9caff5789f86"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:20:21 crc kubenswrapper[4558]: I0120 17:20:21.582991 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/333fe534-20d7-4081-ad19-9caff5789f86-config-data" (OuterVolumeSpecName: "config-data") pod "333fe534-20d7-4081-ad19-9caff5789f86" (UID: "333fe534-20d7-4081-ad19-9caff5789f86"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:20:21 crc kubenswrapper[4558]: I0120 17:20:21.604424 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/333fe534-20d7-4081-ad19-9caff5789f86-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "333fe534-20d7-4081-ad19-9caff5789f86" (UID: "333fe534-20d7-4081-ad19-9caff5789f86"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:20:21 crc kubenswrapper[4558]: I0120 17:20:21.658738 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/333fe534-20d7-4081-ad19-9caff5789f86-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:20:21 crc kubenswrapper[4558]: I0120 17:20:21.658785 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hjpn4\" (UniqueName: \"kubernetes.io/projected/333fe534-20d7-4081-ad19-9caff5789f86-kube-api-access-hjpn4\") on node \"crc\" DevicePath \"\"" Jan 20 17:20:21 crc kubenswrapper[4558]: I0120 17:20:21.658799 4558 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/333fe534-20d7-4081-ad19-9caff5789f86-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:20:21 crc kubenswrapper[4558]: I0120 17:20:21.658811 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/333fe534-20d7-4081-ad19-9caff5789f86-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:20:21 crc kubenswrapper[4558]: I0120 17:20:21.658822 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/333fe534-20d7-4081-ad19-9caff5789f86-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:20:21 crc kubenswrapper[4558]: I0120 17:20:21.937122 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"590319f8-5fc7-4ecd-ab83-eb4d59262fd9","Type":"ContainerStarted","Data":"fd74c139943f48673f27e23f704b12a18fc952b16d5c63dad1e7eb14fba84a62"} Jan 20 17:20:21 crc kubenswrapper[4558]: I0120 17:20:21.937674 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:20:21 crc kubenswrapper[4558]: I0120 17:20:21.939757 4558 generic.go:334] "Generic (PLEG): container finished" podID="333fe534-20d7-4081-ad19-9caff5789f86" containerID="baff23dd43f4e034d383c7be8ae7a9d5a3dd0ec441be819834b472bdcbd0466d" exitCode=0 Jan 20 17:20:21 crc kubenswrapper[4558]: I0120 17:20:21.939874 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:20:21 crc kubenswrapper[4558]: I0120 17:20:21.940154 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:20:21 crc kubenswrapper[4558]: I0120 17:20:21.943317 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"333fe534-20d7-4081-ad19-9caff5789f86","Type":"ContainerDied","Data":"baff23dd43f4e034d383c7be8ae7a9d5a3dd0ec441be819834b472bdcbd0466d"} Jan 20 17:20:21 crc kubenswrapper[4558]: I0120 17:20:21.943373 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"333fe534-20d7-4081-ad19-9caff5789f86","Type":"ContainerDied","Data":"989bce89f7febf007bf8011a75a5a94c9e27baf504881f5b14aeb7d11e9bafd9"} Jan 20 17:20:21 crc kubenswrapper[4558]: I0120 17:20:21.943404 4558 scope.go:117] "RemoveContainer" containerID="baff23dd43f4e034d383c7be8ae7a9d5a3dd0ec441be819834b472bdcbd0466d" Jan 20 17:20:21 crc kubenswrapper[4558]: I0120 17:20:21.965441 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ceilometer-0" podStartSLOduration=1.389088305 podStartE2EDuration="5.965432198s" podCreationTimestamp="2026-01-20 17:20:16 +0000 UTC" firstStartedPulling="2026-01-20 17:20:17.021889134 +0000 UTC m=+2310.782227101" lastFinishedPulling="2026-01-20 17:20:21.598233026 +0000 UTC m=+2315.358570994" observedRunningTime="2026-01-20 17:20:21.959017315 +0000 UTC m=+2315.719355342" watchObservedRunningTime="2026-01-20 17:20:21.965432198 +0000 UTC m=+2315.725770165" Jan 20 17:20:21 crc kubenswrapper[4558]: I0120 17:20:21.977318 4558 scope.go:117] "RemoveContainer" containerID="42f8774047cb1caa112b3941ceefba82f8f4780e8be212d055680736ef8b48d8" Jan 20 17:20:21 crc kubenswrapper[4558]: I0120 17:20:21.995923 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:20:22 crc kubenswrapper[4558]: I0120 17:20:22.005078 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:20:22 crc kubenswrapper[4558]: I0120 17:20:22.006265 4558 scope.go:117] "RemoveContainer" containerID="baff23dd43f4e034d383c7be8ae7a9d5a3dd0ec441be819834b472bdcbd0466d" Jan 20 17:20:22 crc kubenswrapper[4558]: E0120 17:20:22.009255 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"baff23dd43f4e034d383c7be8ae7a9d5a3dd0ec441be819834b472bdcbd0466d\": container with ID starting with baff23dd43f4e034d383c7be8ae7a9d5a3dd0ec441be819834b472bdcbd0466d not found: ID does not exist" containerID="baff23dd43f4e034d383c7be8ae7a9d5a3dd0ec441be819834b472bdcbd0466d" Jan 20 17:20:22 crc kubenswrapper[4558]: I0120 17:20:22.009297 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"baff23dd43f4e034d383c7be8ae7a9d5a3dd0ec441be819834b472bdcbd0466d"} err="failed to get container status \"baff23dd43f4e034d383c7be8ae7a9d5a3dd0ec441be819834b472bdcbd0466d\": rpc error: code = NotFound desc = could not find container \"baff23dd43f4e034d383c7be8ae7a9d5a3dd0ec441be819834b472bdcbd0466d\": container with ID starting with baff23dd43f4e034d383c7be8ae7a9d5a3dd0ec441be819834b472bdcbd0466d not found: ID does not exist" Jan 20 17:20:22 crc kubenswrapper[4558]: I0120 17:20:22.009327 4558 scope.go:117] "RemoveContainer" containerID="42f8774047cb1caa112b3941ceefba82f8f4780e8be212d055680736ef8b48d8" Jan 20 17:20:22 crc kubenswrapper[4558]: E0120 17:20:22.009827 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"42f8774047cb1caa112b3941ceefba82f8f4780e8be212d055680736ef8b48d8\": container with ID starting with 42f8774047cb1caa112b3941ceefba82f8f4780e8be212d055680736ef8b48d8 not found: ID does not exist" containerID="42f8774047cb1caa112b3941ceefba82f8f4780e8be212d055680736ef8b48d8" Jan 20 17:20:22 crc kubenswrapper[4558]: I0120 17:20:22.009854 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"42f8774047cb1caa112b3941ceefba82f8f4780e8be212d055680736ef8b48d8"} err="failed to get container status \"42f8774047cb1caa112b3941ceefba82f8f4780e8be212d055680736ef8b48d8\": rpc error: code = NotFound desc = could not find container \"42f8774047cb1caa112b3941ceefba82f8f4780e8be212d055680736ef8b48d8\": container with ID starting with 42f8774047cb1caa112b3941ceefba82f8f4780e8be212d055680736ef8b48d8 not found: ID does not exist" Jan 20 17:20:22 crc kubenswrapper[4558]: I0120 17:20:22.017590 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:20:22 crc kubenswrapper[4558]: I0120 17:20:22.023734 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:20:22 crc kubenswrapper[4558]: I0120 17:20:22.029476 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:20:22 crc kubenswrapper[4558]: E0120 17:20:22.029865 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f50f6bbe-7952-4b0d-aa1b-88578330e103" containerName="nova-manage" Jan 20 17:20:22 crc kubenswrapper[4558]: I0120 17:20:22.029885 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="f50f6bbe-7952-4b0d-aa1b-88578330e103" containerName="nova-manage" Jan 20 17:20:22 crc kubenswrapper[4558]: E0120 17:20:22.029920 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fd789fa2-9e67-47ad-a16c-84f613376e9a" containerName="nova-scheduler-scheduler" Jan 20 17:20:22 crc kubenswrapper[4558]: I0120 17:20:22.029928 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="fd789fa2-9e67-47ad-a16c-84f613376e9a" containerName="nova-scheduler-scheduler" Jan 20 17:20:22 crc kubenswrapper[4558]: E0120 17:20:22.029945 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="333fe534-20d7-4081-ad19-9caff5789f86" containerName="nova-metadata-log" Jan 20 17:20:22 crc kubenswrapper[4558]: I0120 17:20:22.029951 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="333fe534-20d7-4081-ad19-9caff5789f86" containerName="nova-metadata-log" Jan 20 17:20:22 crc kubenswrapper[4558]: E0120 17:20:22.029959 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="333fe534-20d7-4081-ad19-9caff5789f86" containerName="nova-metadata-metadata" Jan 20 17:20:22 crc kubenswrapper[4558]: I0120 17:20:22.029964 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="333fe534-20d7-4081-ad19-9caff5789f86" containerName="nova-metadata-metadata" Jan 20 17:20:22 crc kubenswrapper[4558]: I0120 17:20:22.030132 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="fd789fa2-9e67-47ad-a16c-84f613376e9a" containerName="nova-scheduler-scheduler" Jan 20 17:20:22 crc kubenswrapper[4558]: I0120 17:20:22.030153 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="333fe534-20d7-4081-ad19-9caff5789f86" containerName="nova-metadata-log" Jan 20 17:20:22 crc kubenswrapper[4558]: I0120 17:20:22.031219 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="333fe534-20d7-4081-ad19-9caff5789f86" containerName="nova-metadata-metadata" Jan 20 17:20:22 crc kubenswrapper[4558]: I0120 17:20:22.031260 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="f50f6bbe-7952-4b0d-aa1b-88578330e103" containerName="nova-manage" Jan 20 17:20:22 crc kubenswrapper[4558]: I0120 17:20:22.031953 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:20:22 crc kubenswrapper[4558]: I0120 17:20:22.035837 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-scheduler-config-data" Jan 20 17:20:22 crc kubenswrapper[4558]: I0120 17:20:22.040648 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:20:22 crc kubenswrapper[4558]: I0120 17:20:22.049776 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:20:22 crc kubenswrapper[4558]: I0120 17:20:22.053005 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:20:22 crc kubenswrapper[4558]: I0120 17:20:22.054918 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-metadata-config-data" Jan 20 17:20:22 crc kubenswrapper[4558]: I0120 17:20:22.056309 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-metadata-internal-svc" Jan 20 17:20:22 crc kubenswrapper[4558]: I0120 17:20:22.062400 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:20:22 crc kubenswrapper[4558]: I0120 17:20:22.069732 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8eb09b19-2337-4fa1-8c9a-a3a45c9d8a48-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"8eb09b19-2337-4fa1-8c9a-a3a45c9d8a48\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:20:22 crc kubenswrapper[4558]: I0120 17:20:22.069776 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8eb09b19-2337-4fa1-8c9a-a3a45c9d8a48-config-data\") pod \"nova-scheduler-0\" (UID: \"8eb09b19-2337-4fa1-8c9a-a3a45c9d8a48\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:20:22 crc kubenswrapper[4558]: I0120 17:20:22.069817 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/09d95674-9970-471d-8061-997e69ddda11-config-data\") pod \"nova-metadata-0\" (UID: \"09d95674-9970-471d-8061-997e69ddda11\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:20:22 crc kubenswrapper[4558]: I0120 17:20:22.069843 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/09d95674-9970-471d-8061-997e69ddda11-logs\") pod \"nova-metadata-0\" (UID: \"09d95674-9970-471d-8061-997e69ddda11\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:20:22 crc kubenswrapper[4558]: I0120 17:20:22.069906 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/09d95674-9970-471d-8061-997e69ddda11-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"09d95674-9970-471d-8061-997e69ddda11\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:20:22 crc kubenswrapper[4558]: I0120 17:20:22.069946 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c4b8x\" (UniqueName: \"kubernetes.io/projected/09d95674-9970-471d-8061-997e69ddda11-kube-api-access-c4b8x\") pod \"nova-metadata-0\" (UID: \"09d95674-9970-471d-8061-997e69ddda11\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:20:22 crc kubenswrapper[4558]: I0120 17:20:22.070010 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/09d95674-9970-471d-8061-997e69ddda11-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"09d95674-9970-471d-8061-997e69ddda11\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:20:22 crc kubenswrapper[4558]: I0120 17:20:22.070050 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jcncz\" (UniqueName: \"kubernetes.io/projected/8eb09b19-2337-4fa1-8c9a-a3a45c9d8a48-kube-api-access-jcncz\") pod \"nova-scheduler-0\" (UID: \"8eb09b19-2337-4fa1-8c9a-a3a45c9d8a48\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:20:22 crc kubenswrapper[4558]: I0120 17:20:22.171518 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8eb09b19-2337-4fa1-8c9a-a3a45c9d8a48-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"8eb09b19-2337-4fa1-8c9a-a3a45c9d8a48\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:20:22 crc kubenswrapper[4558]: I0120 17:20:22.171596 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8eb09b19-2337-4fa1-8c9a-a3a45c9d8a48-config-data\") pod \"nova-scheduler-0\" (UID: \"8eb09b19-2337-4fa1-8c9a-a3a45c9d8a48\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:20:22 crc kubenswrapper[4558]: I0120 17:20:22.171647 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/09d95674-9970-471d-8061-997e69ddda11-config-data\") pod \"nova-metadata-0\" (UID: \"09d95674-9970-471d-8061-997e69ddda11\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:20:22 crc kubenswrapper[4558]: I0120 17:20:22.171672 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/09d95674-9970-471d-8061-997e69ddda11-logs\") pod \"nova-metadata-0\" (UID: \"09d95674-9970-471d-8061-997e69ddda11\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:20:22 crc kubenswrapper[4558]: I0120 17:20:22.171702 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/09d95674-9970-471d-8061-997e69ddda11-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"09d95674-9970-471d-8061-997e69ddda11\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:20:22 crc kubenswrapper[4558]: I0120 17:20:22.171752 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c4b8x\" (UniqueName: \"kubernetes.io/projected/09d95674-9970-471d-8061-997e69ddda11-kube-api-access-c4b8x\") pod \"nova-metadata-0\" (UID: \"09d95674-9970-471d-8061-997e69ddda11\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:20:22 crc kubenswrapper[4558]: I0120 17:20:22.171831 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/09d95674-9970-471d-8061-997e69ddda11-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"09d95674-9970-471d-8061-997e69ddda11\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:20:22 crc kubenswrapper[4558]: I0120 17:20:22.171874 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jcncz\" (UniqueName: \"kubernetes.io/projected/8eb09b19-2337-4fa1-8c9a-a3a45c9d8a48-kube-api-access-jcncz\") pod \"nova-scheduler-0\" (UID: \"8eb09b19-2337-4fa1-8c9a-a3a45c9d8a48\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:20:22 crc kubenswrapper[4558]: I0120 17:20:22.174455 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/09d95674-9970-471d-8061-997e69ddda11-logs\") pod \"nova-metadata-0\" (UID: \"09d95674-9970-471d-8061-997e69ddda11\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:20:22 crc kubenswrapper[4558]: I0120 17:20:22.177991 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8eb09b19-2337-4fa1-8c9a-a3a45c9d8a48-config-data\") pod \"nova-scheduler-0\" (UID: \"8eb09b19-2337-4fa1-8c9a-a3a45c9d8a48\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:20:22 crc kubenswrapper[4558]: I0120 17:20:22.178385 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8eb09b19-2337-4fa1-8c9a-a3a45c9d8a48-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"8eb09b19-2337-4fa1-8c9a-a3a45c9d8a48\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:20:22 crc kubenswrapper[4558]: I0120 17:20:22.179219 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/09d95674-9970-471d-8061-997e69ddda11-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"09d95674-9970-471d-8061-997e69ddda11\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:20:22 crc kubenswrapper[4558]: I0120 17:20:22.179944 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/09d95674-9970-471d-8061-997e69ddda11-config-data\") pod \"nova-metadata-0\" (UID: \"09d95674-9970-471d-8061-997e69ddda11\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:20:22 crc kubenswrapper[4558]: I0120 17:20:22.185634 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/09d95674-9970-471d-8061-997e69ddda11-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"09d95674-9970-471d-8061-997e69ddda11\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:20:22 crc kubenswrapper[4558]: I0120 17:20:22.187912 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c4b8x\" (UniqueName: \"kubernetes.io/projected/09d95674-9970-471d-8061-997e69ddda11-kube-api-access-c4b8x\") pod \"nova-metadata-0\" (UID: \"09d95674-9970-471d-8061-997e69ddda11\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:20:22 crc kubenswrapper[4558]: I0120 17:20:22.194259 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jcncz\" (UniqueName: \"kubernetes.io/projected/8eb09b19-2337-4fa1-8c9a-a3a45c9d8a48-kube-api-access-jcncz\") pod \"nova-scheduler-0\" (UID: \"8eb09b19-2337-4fa1-8c9a-a3a45c9d8a48\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:20:22 crc kubenswrapper[4558]: I0120 17:20:22.354997 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:20:22 crc kubenswrapper[4558]: I0120 17:20:22.366400 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:20:22 crc kubenswrapper[4558]: I0120 17:20:22.584037 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="333fe534-20d7-4081-ad19-9caff5789f86" path="/var/lib/kubelet/pods/333fe534-20d7-4081-ad19-9caff5789f86/volumes" Jan 20 17:20:22 crc kubenswrapper[4558]: I0120 17:20:22.584970 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fd789fa2-9e67-47ad-a16c-84f613376e9a" path="/var/lib/kubelet/pods/fd789fa2-9e67-47ad-a16c-84f613376e9a/volumes" Jan 20 17:20:22 crc kubenswrapper[4558]: I0120 17:20:22.807908 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:20:22 crc kubenswrapper[4558]: W0120 17:20:22.809385 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8eb09b19_2337_4fa1_8c9a_a3a45c9d8a48.slice/crio-6889cb3ea4db5abe63bcb930d3789df4e6a069fa4d913d711b8a3640b230076d WatchSource:0}: Error finding container 6889cb3ea4db5abe63bcb930d3789df4e6a069fa4d913d711b8a3640b230076d: Status 404 returned error can't find the container with id 6889cb3ea4db5abe63bcb930d3789df4e6a069fa4d913d711b8a3640b230076d Jan 20 17:20:22 crc kubenswrapper[4558]: W0120 17:20:22.921659 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod09d95674_9970_471d_8061_997e69ddda11.slice/crio-eb79635f225ca9e6f5dfb4cbc3fdc0d4ac29a6eeed2163e63d4fa1536097a060 WatchSource:0}: Error finding container eb79635f225ca9e6f5dfb4cbc3fdc0d4ac29a6eeed2163e63d4fa1536097a060: Status 404 returned error can't find the container with id eb79635f225ca9e6f5dfb4cbc3fdc0d4ac29a6eeed2163e63d4fa1536097a060 Jan 20 17:20:22 crc kubenswrapper[4558]: I0120 17:20:22.921993 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:20:22 crc kubenswrapper[4558]: I0120 17:20:22.945993 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:20:22 crc kubenswrapper[4558]: I0120 17:20:22.955046 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"8eb09b19-2337-4fa1-8c9a-a3a45c9d8a48","Type":"ContainerStarted","Data":"6889cb3ea4db5abe63bcb930d3789df4e6a069fa4d913d711b8a3640b230076d"} Jan 20 17:20:22 crc kubenswrapper[4558]: I0120 17:20:22.962527 4558 generic.go:334] "Generic (PLEG): container finished" podID="52669504-5bf2-4ea7-af48-d8e25256df3a" containerID="939c97fe4abf9f0204b6b9e6360703b7c26dbf5e0fef66c228871f01266dfa25" exitCode=0 Jan 20 17:20:22 crc kubenswrapper[4558]: I0120 17:20:22.962609 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:20:22 crc kubenswrapper[4558]: I0120 17:20:22.962615 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"52669504-5bf2-4ea7-af48-d8e25256df3a","Type":"ContainerDied","Data":"939c97fe4abf9f0204b6b9e6360703b7c26dbf5e0fef66c228871f01266dfa25"} Jan 20 17:20:22 crc kubenswrapper[4558]: I0120 17:20:22.962649 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"52669504-5bf2-4ea7-af48-d8e25256df3a","Type":"ContainerDied","Data":"1d27645c7b569a7b27ca33871d7e4cbcafb17d65fbbb9b8a04902060625c9323"} Jan 20 17:20:22 crc kubenswrapper[4558]: I0120 17:20:22.962676 4558 scope.go:117] "RemoveContainer" containerID="939c97fe4abf9f0204b6b9e6360703b7c26dbf5e0fef66c228871f01266dfa25" Jan 20 17:20:22 crc kubenswrapper[4558]: I0120 17:20:22.964409 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"09d95674-9970-471d-8061-997e69ddda11","Type":"ContainerStarted","Data":"eb79635f225ca9e6f5dfb4cbc3fdc0d4ac29a6eeed2163e63d4fa1536097a060"} Jan 20 17:20:22 crc kubenswrapper[4558]: I0120 17:20:22.984350 4558 scope.go:117] "RemoveContainer" containerID="3fc41f2436d25e582e3d1880db1cdbcbb54ff85b18e1cede6cc27ca7aa9718df" Jan 20 17:20:23 crc kubenswrapper[4558]: I0120 17:20:23.005303 4558 scope.go:117] "RemoveContainer" containerID="939c97fe4abf9f0204b6b9e6360703b7c26dbf5e0fef66c228871f01266dfa25" Jan 20 17:20:23 crc kubenswrapper[4558]: E0120 17:20:23.005746 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"939c97fe4abf9f0204b6b9e6360703b7c26dbf5e0fef66c228871f01266dfa25\": container with ID starting with 939c97fe4abf9f0204b6b9e6360703b7c26dbf5e0fef66c228871f01266dfa25 not found: ID does not exist" containerID="939c97fe4abf9f0204b6b9e6360703b7c26dbf5e0fef66c228871f01266dfa25" Jan 20 17:20:23 crc kubenswrapper[4558]: I0120 17:20:23.005796 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"939c97fe4abf9f0204b6b9e6360703b7c26dbf5e0fef66c228871f01266dfa25"} err="failed to get container status \"939c97fe4abf9f0204b6b9e6360703b7c26dbf5e0fef66c228871f01266dfa25\": rpc error: code = NotFound desc = could not find container \"939c97fe4abf9f0204b6b9e6360703b7c26dbf5e0fef66c228871f01266dfa25\": container with ID starting with 939c97fe4abf9f0204b6b9e6360703b7c26dbf5e0fef66c228871f01266dfa25 not found: ID does not exist" Jan 20 17:20:23 crc kubenswrapper[4558]: I0120 17:20:23.005825 4558 scope.go:117] "RemoveContainer" containerID="3fc41f2436d25e582e3d1880db1cdbcbb54ff85b18e1cede6cc27ca7aa9718df" Jan 20 17:20:23 crc kubenswrapper[4558]: E0120 17:20:23.006174 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3fc41f2436d25e582e3d1880db1cdbcbb54ff85b18e1cede6cc27ca7aa9718df\": container with ID starting with 3fc41f2436d25e582e3d1880db1cdbcbb54ff85b18e1cede6cc27ca7aa9718df not found: ID does not exist" containerID="3fc41f2436d25e582e3d1880db1cdbcbb54ff85b18e1cede6cc27ca7aa9718df" Jan 20 17:20:23 crc kubenswrapper[4558]: I0120 17:20:23.006202 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3fc41f2436d25e582e3d1880db1cdbcbb54ff85b18e1cede6cc27ca7aa9718df"} err="failed to get container status \"3fc41f2436d25e582e3d1880db1cdbcbb54ff85b18e1cede6cc27ca7aa9718df\": rpc error: code = NotFound desc = could not find container \"3fc41f2436d25e582e3d1880db1cdbcbb54ff85b18e1cede6cc27ca7aa9718df\": container with ID starting with 3fc41f2436d25e582e3d1880db1cdbcbb54ff85b18e1cede6cc27ca7aa9718df not found: ID does not exist" Jan 20 17:20:23 crc kubenswrapper[4558]: I0120 17:20:23.095977 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/52669504-5bf2-4ea7-af48-d8e25256df3a-config-data\") pod \"52669504-5bf2-4ea7-af48-d8e25256df3a\" (UID: \"52669504-5bf2-4ea7-af48-d8e25256df3a\") " Jan 20 17:20:23 crc kubenswrapper[4558]: I0120 17:20:23.096067 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lt54l\" (UniqueName: \"kubernetes.io/projected/52669504-5bf2-4ea7-af48-d8e25256df3a-kube-api-access-lt54l\") pod \"52669504-5bf2-4ea7-af48-d8e25256df3a\" (UID: \"52669504-5bf2-4ea7-af48-d8e25256df3a\") " Jan 20 17:20:23 crc kubenswrapper[4558]: I0120 17:20:23.096436 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/52669504-5bf2-4ea7-af48-d8e25256df3a-logs\") pod \"52669504-5bf2-4ea7-af48-d8e25256df3a\" (UID: \"52669504-5bf2-4ea7-af48-d8e25256df3a\") " Jan 20 17:20:23 crc kubenswrapper[4558]: I0120 17:20:23.096495 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/52669504-5bf2-4ea7-af48-d8e25256df3a-combined-ca-bundle\") pod \"52669504-5bf2-4ea7-af48-d8e25256df3a\" (UID: \"52669504-5bf2-4ea7-af48-d8e25256df3a\") " Jan 20 17:20:23 crc kubenswrapper[4558]: I0120 17:20:23.098263 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/52669504-5bf2-4ea7-af48-d8e25256df3a-logs" (OuterVolumeSpecName: "logs") pod "52669504-5bf2-4ea7-af48-d8e25256df3a" (UID: "52669504-5bf2-4ea7-af48-d8e25256df3a"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:20:23 crc kubenswrapper[4558]: I0120 17:20:23.101958 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/52669504-5bf2-4ea7-af48-d8e25256df3a-kube-api-access-lt54l" (OuterVolumeSpecName: "kube-api-access-lt54l") pod "52669504-5bf2-4ea7-af48-d8e25256df3a" (UID: "52669504-5bf2-4ea7-af48-d8e25256df3a"). InnerVolumeSpecName "kube-api-access-lt54l". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:20:23 crc kubenswrapper[4558]: I0120 17:20:23.125598 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/52669504-5bf2-4ea7-af48-d8e25256df3a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "52669504-5bf2-4ea7-af48-d8e25256df3a" (UID: "52669504-5bf2-4ea7-af48-d8e25256df3a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:20:23 crc kubenswrapper[4558]: I0120 17:20:23.131313 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/52669504-5bf2-4ea7-af48-d8e25256df3a-config-data" (OuterVolumeSpecName: "config-data") pod "52669504-5bf2-4ea7-af48-d8e25256df3a" (UID: "52669504-5bf2-4ea7-af48-d8e25256df3a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:20:23 crc kubenswrapper[4558]: I0120 17:20:23.155971 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:20:23 crc kubenswrapper[4558]: I0120 17:20:23.203914 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/52669504-5bf2-4ea7-af48-d8e25256df3a-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:20:23 crc kubenswrapper[4558]: I0120 17:20:23.204001 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/52669504-5bf2-4ea7-af48-d8e25256df3a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:20:23 crc kubenswrapper[4558]: I0120 17:20:23.204036 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/52669504-5bf2-4ea7-af48-d8e25256df3a-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:20:23 crc kubenswrapper[4558]: I0120 17:20:23.204063 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lt54l\" (UniqueName: \"kubernetes.io/projected/52669504-5bf2-4ea7-af48-d8e25256df3a-kube-api-access-lt54l\") on node \"crc\" DevicePath \"\"" Jan 20 17:20:23 crc kubenswrapper[4558]: I0120 17:20:23.300965 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:20:23 crc kubenswrapper[4558]: I0120 17:20:23.324353 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:20:23 crc kubenswrapper[4558]: I0120 17:20:23.340665 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:20:23 crc kubenswrapper[4558]: E0120 17:20:23.341283 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="52669504-5bf2-4ea7-af48-d8e25256df3a" containerName="nova-api-log" Jan 20 17:20:23 crc kubenswrapper[4558]: I0120 17:20:23.341301 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="52669504-5bf2-4ea7-af48-d8e25256df3a" containerName="nova-api-log" Jan 20 17:20:23 crc kubenswrapper[4558]: E0120 17:20:23.341328 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="52669504-5bf2-4ea7-af48-d8e25256df3a" containerName="nova-api-api" Jan 20 17:20:23 crc kubenswrapper[4558]: I0120 17:20:23.341334 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="52669504-5bf2-4ea7-af48-d8e25256df3a" containerName="nova-api-api" Jan 20 17:20:23 crc kubenswrapper[4558]: I0120 17:20:23.341549 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="52669504-5bf2-4ea7-af48-d8e25256df3a" containerName="nova-api-log" Jan 20 17:20:23 crc kubenswrapper[4558]: I0120 17:20:23.341574 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="52669504-5bf2-4ea7-af48-d8e25256df3a" containerName="nova-api-api" Jan 20 17:20:23 crc kubenswrapper[4558]: I0120 17:20:23.342816 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:20:23 crc kubenswrapper[4558]: I0120 17:20:23.344365 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-api-config-data" Jan 20 17:20:23 crc kubenswrapper[4558]: I0120 17:20:23.354761 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:20:23 crc kubenswrapper[4558]: I0120 17:20:23.515069 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a5c2a475-4aaf-4fd2-9c8d-ce699388c298-config-data\") pod \"nova-api-0\" (UID: \"a5c2a475-4aaf-4fd2-9c8d-ce699388c298\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:20:23 crc kubenswrapper[4558]: I0120 17:20:23.515123 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a5c2a475-4aaf-4fd2-9c8d-ce699388c298-logs\") pod \"nova-api-0\" (UID: \"a5c2a475-4aaf-4fd2-9c8d-ce699388c298\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:20:23 crc kubenswrapper[4558]: I0120 17:20:23.515188 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dxvlz\" (UniqueName: \"kubernetes.io/projected/a5c2a475-4aaf-4fd2-9c8d-ce699388c298-kube-api-access-dxvlz\") pod \"nova-api-0\" (UID: \"a5c2a475-4aaf-4fd2-9c8d-ce699388c298\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:20:23 crc kubenswrapper[4558]: I0120 17:20:23.515565 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a5c2a475-4aaf-4fd2-9c8d-ce699388c298-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"a5c2a475-4aaf-4fd2-9c8d-ce699388c298\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:20:23 crc kubenswrapper[4558]: I0120 17:20:23.617324 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a5c2a475-4aaf-4fd2-9c8d-ce699388c298-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"a5c2a475-4aaf-4fd2-9c8d-ce699388c298\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:20:23 crc kubenswrapper[4558]: I0120 17:20:23.617918 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a5c2a475-4aaf-4fd2-9c8d-ce699388c298-config-data\") pod \"nova-api-0\" (UID: \"a5c2a475-4aaf-4fd2-9c8d-ce699388c298\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:20:23 crc kubenswrapper[4558]: I0120 17:20:23.618004 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a5c2a475-4aaf-4fd2-9c8d-ce699388c298-logs\") pod \"nova-api-0\" (UID: \"a5c2a475-4aaf-4fd2-9c8d-ce699388c298\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:20:23 crc kubenswrapper[4558]: I0120 17:20:23.618065 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dxvlz\" (UniqueName: \"kubernetes.io/projected/a5c2a475-4aaf-4fd2-9c8d-ce699388c298-kube-api-access-dxvlz\") pod \"nova-api-0\" (UID: \"a5c2a475-4aaf-4fd2-9c8d-ce699388c298\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:20:23 crc kubenswrapper[4558]: I0120 17:20:23.618443 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a5c2a475-4aaf-4fd2-9c8d-ce699388c298-logs\") pod \"nova-api-0\" (UID: \"a5c2a475-4aaf-4fd2-9c8d-ce699388c298\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:20:23 crc kubenswrapper[4558]: I0120 17:20:23.622391 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a5c2a475-4aaf-4fd2-9c8d-ce699388c298-config-data\") pod \"nova-api-0\" (UID: \"a5c2a475-4aaf-4fd2-9c8d-ce699388c298\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:20:23 crc kubenswrapper[4558]: I0120 17:20:23.623020 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a5c2a475-4aaf-4fd2-9c8d-ce699388c298-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"a5c2a475-4aaf-4fd2-9c8d-ce699388c298\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:20:23 crc kubenswrapper[4558]: I0120 17:20:23.633314 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dxvlz\" (UniqueName: \"kubernetes.io/projected/a5c2a475-4aaf-4fd2-9c8d-ce699388c298-kube-api-access-dxvlz\") pod \"nova-api-0\" (UID: \"a5c2a475-4aaf-4fd2-9c8d-ce699388c298\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:20:23 crc kubenswrapper[4558]: I0120 17:20:23.660834 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:20:23 crc kubenswrapper[4558]: I0120 17:20:23.974921 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"09d95674-9970-471d-8061-997e69ddda11","Type":"ContainerStarted","Data":"065345d0e077b3bb2b2e67f7ed62c2413c1984c254d80dff447c7a5693b57103"} Jan 20 17:20:23 crc kubenswrapper[4558]: I0120 17:20:23.975205 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"09d95674-9970-471d-8061-997e69ddda11","Type":"ContainerStarted","Data":"523536e2b2a85c3c10d4dc392b34b04a3e55effe5a2bcd5562e1367958047b32"} Jan 20 17:20:23 crc kubenswrapper[4558]: I0120 17:20:23.975984 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"8eb09b19-2337-4fa1-8c9a-a3a45c9d8a48","Type":"ContainerStarted","Data":"cef9f816792ffb8c331439791b6687f021b7a0df270a63be33a28f933f0c1c2b"} Jan 20 17:20:23 crc kubenswrapper[4558]: I0120 17:20:23.996980 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-metadata-0" podStartSLOduration=1.9969628529999999 podStartE2EDuration="1.996962853s" podCreationTimestamp="2026-01-20 17:20:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:20:23.994051057 +0000 UTC m=+2317.754389024" watchObservedRunningTime="2026-01-20 17:20:23.996962853 +0000 UTC m=+2317.757300809" Jan 20 17:20:24 crc kubenswrapper[4558]: I0120 17:20:24.015533 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-scheduler-0" podStartSLOduration=3.015513998 podStartE2EDuration="3.015513998s" podCreationTimestamp="2026-01-20 17:20:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:20:24.005397675 +0000 UTC m=+2317.765735642" watchObservedRunningTime="2026-01-20 17:20:24.015513998 +0000 UTC m=+2317.775851966" Jan 20 17:20:24 crc kubenswrapper[4558]: I0120 17:20:24.081947 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:20:24 crc kubenswrapper[4558]: I0120 17:20:24.577741 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="52669504-5bf2-4ea7-af48-d8e25256df3a" path="/var/lib/kubelet/pods/52669504-5bf2-4ea7-af48-d8e25256df3a/volumes" Jan 20 17:20:24 crc kubenswrapper[4558]: I0120 17:20:24.989000 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"a5c2a475-4aaf-4fd2-9c8d-ce699388c298","Type":"ContainerStarted","Data":"9c1c32b8a6ee5141b8006fd7c9f37821ec075e5089f976d9237eb542664079f9"} Jan 20 17:20:24 crc kubenswrapper[4558]: I0120 17:20:24.989071 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"a5c2a475-4aaf-4fd2-9c8d-ce699388c298","Type":"ContainerStarted","Data":"f57a731e89a8714639377a4caf5629340e4e8c23f00dfc670ca552777e92a864"} Jan 20 17:20:24 crc kubenswrapper[4558]: I0120 17:20:24.989086 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"a5c2a475-4aaf-4fd2-9c8d-ce699388c298","Type":"ContainerStarted","Data":"b290449dc464ea932052c928ddb9f178486f85bd174381542b66daab2c500c16"} Jan 20 17:20:25 crc kubenswrapper[4558]: I0120 17:20:25.015513 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-api-0" podStartSLOduration=2.015490885 podStartE2EDuration="2.015490885s" podCreationTimestamp="2026-01-20 17:20:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:20:25.005780084 +0000 UTC m=+2318.766118051" watchObservedRunningTime="2026-01-20 17:20:25.015490885 +0000 UTC m=+2318.775828852" Jan 20 17:20:27 crc kubenswrapper[4558]: I0120 17:20:27.355435 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:20:27 crc kubenswrapper[4558]: I0120 17:20:27.366702 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:20:27 crc kubenswrapper[4558]: I0120 17:20:27.366736 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:20:32 crc kubenswrapper[4558]: I0120 17:20:32.355395 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:20:32 crc kubenswrapper[4558]: I0120 17:20:32.367035 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:20:32 crc kubenswrapper[4558]: I0120 17:20:32.367122 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:20:32 crc kubenswrapper[4558]: I0120 17:20:32.379400 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:20:33 crc kubenswrapper[4558]: I0120 17:20:33.092710 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:20:33 crc kubenswrapper[4558]: I0120 17:20:33.381293 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-metadata-0" podUID="09d95674-9970-471d-8061-997e69ddda11" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.211:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 20 17:20:33 crc kubenswrapper[4558]: I0120 17:20:33.381323 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-metadata-0" podUID="09d95674-9970-471d-8061-997e69ddda11" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.211:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 20 17:20:33 crc kubenswrapper[4558]: I0120 17:20:33.662003 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:20:33 crc kubenswrapper[4558]: I0120 17:20:33.663633 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:20:34 crc kubenswrapper[4558]: I0120 17:20:34.566290 4558 scope.go:117] "RemoveContainer" containerID="0ffce44366a8b4466427b682fb41640425366a0f4449210959148de9aef695c6" Jan 20 17:20:34 crc kubenswrapper[4558]: E0120 17:20:34.566572 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 17:20:34 crc kubenswrapper[4558]: I0120 17:20:34.703339 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-api-0" podUID="a5c2a475-4aaf-4fd2-9c8d-ce699388c298" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.212:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 20 17:20:34 crc kubenswrapper[4558]: I0120 17:20:34.744311 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-api-0" podUID="a5c2a475-4aaf-4fd2-9c8d-ce699388c298" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.212:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 20 17:20:42 crc kubenswrapper[4558]: I0120 17:20:42.376749 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:20:42 crc kubenswrapper[4558]: I0120 17:20:42.377572 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:20:42 crc kubenswrapper[4558]: I0120 17:20:42.383188 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:20:42 crc kubenswrapper[4558]: I0120 17:20:42.384004 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:20:43 crc kubenswrapper[4558]: I0120 17:20:43.665736 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:20:43 crc kubenswrapper[4558]: I0120 17:20:43.666344 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:20:43 crc kubenswrapper[4558]: I0120 17:20:43.666368 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:20:43 crc kubenswrapper[4558]: I0120 17:20:43.668741 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:20:44 crc kubenswrapper[4558]: I0120 17:20:44.194284 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:20:44 crc kubenswrapper[4558]: I0120 17:20:44.201633 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:20:46 crc kubenswrapper[4558]: I0120 17:20:46.094701 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:20:46 crc kubenswrapper[4558]: I0120 17:20:46.095437 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="590319f8-5fc7-4ecd-ab83-eb4d59262fd9" containerName="ceilometer-central-agent" containerID="cri-o://81e7c4f925c3e45867cdd806f74bba8ad198e38197924a7db0fd66975b17c937" gracePeriod=30 Jan 20 17:20:46 crc kubenswrapper[4558]: I0120 17:20:46.096799 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="590319f8-5fc7-4ecd-ab83-eb4d59262fd9" containerName="proxy-httpd" containerID="cri-o://fd74c139943f48673f27e23f704b12a18fc952b16d5c63dad1e7eb14fba84a62" gracePeriod=30 Jan 20 17:20:46 crc kubenswrapper[4558]: I0120 17:20:46.096892 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="590319f8-5fc7-4ecd-ab83-eb4d59262fd9" containerName="sg-core" containerID="cri-o://537c61cfa49585fd06616291a85a452aae2d22ed19564435a3b36df407997f4b" gracePeriod=30 Jan 20 17:20:46 crc kubenswrapper[4558]: I0120 17:20:46.096937 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="590319f8-5fc7-4ecd-ab83-eb4d59262fd9" containerName="ceilometer-notification-agent" containerID="cri-o://d3a96fa2ed8c1d387f7ca0bca75e6b24f97198f5d3efc7b29c6b348c9f0acaa3" gracePeriod=30 Jan 20 17:20:46 crc kubenswrapper[4558]: I0120 17:20:46.124085 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/ceilometer-0" podUID="590319f8-5fc7-4ecd-ab83-eb4d59262fd9" containerName="proxy-httpd" probeResult="failure" output="Get \"https://10.217.0.209:3000/\": EOF" Jan 20 17:20:46 crc kubenswrapper[4558]: I0120 17:20:46.516724 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/ceilometer-0" podUID="590319f8-5fc7-4ecd-ab83-eb4d59262fd9" containerName="proxy-httpd" probeResult="failure" output="Get \"https://10.217.0.209:3000/\": dial tcp 10.217.0.209:3000: connect: connection refused" Jan 20 17:20:46 crc kubenswrapper[4558]: I0120 17:20:46.590118 4558 scope.go:117] "RemoveContainer" containerID="0ffce44366a8b4466427b682fb41640425366a0f4449210959148de9aef695c6" Jan 20 17:20:46 crc kubenswrapper[4558]: E0120 17:20:46.590558 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 17:20:47 crc kubenswrapper[4558]: I0120 17:20:47.230307 4558 generic.go:334] "Generic (PLEG): container finished" podID="590319f8-5fc7-4ecd-ab83-eb4d59262fd9" containerID="fd74c139943f48673f27e23f704b12a18fc952b16d5c63dad1e7eb14fba84a62" exitCode=0 Jan 20 17:20:47 crc kubenswrapper[4558]: I0120 17:20:47.230344 4558 generic.go:334] "Generic (PLEG): container finished" podID="590319f8-5fc7-4ecd-ab83-eb4d59262fd9" containerID="537c61cfa49585fd06616291a85a452aae2d22ed19564435a3b36df407997f4b" exitCode=2 Jan 20 17:20:47 crc kubenswrapper[4558]: I0120 17:20:47.230353 4558 generic.go:334] "Generic (PLEG): container finished" podID="590319f8-5fc7-4ecd-ab83-eb4d59262fd9" containerID="81e7c4f925c3e45867cdd806f74bba8ad198e38197924a7db0fd66975b17c937" exitCode=0 Jan 20 17:20:47 crc kubenswrapper[4558]: I0120 17:20:47.230392 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"590319f8-5fc7-4ecd-ab83-eb4d59262fd9","Type":"ContainerDied","Data":"fd74c139943f48673f27e23f704b12a18fc952b16d5c63dad1e7eb14fba84a62"} Jan 20 17:20:47 crc kubenswrapper[4558]: I0120 17:20:47.230452 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"590319f8-5fc7-4ecd-ab83-eb4d59262fd9","Type":"ContainerDied","Data":"537c61cfa49585fd06616291a85a452aae2d22ed19564435a3b36df407997f4b"} Jan 20 17:20:47 crc kubenswrapper[4558]: I0120 17:20:47.230463 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"590319f8-5fc7-4ecd-ab83-eb4d59262fd9","Type":"ContainerDied","Data":"81e7c4f925c3e45867cdd806f74bba8ad198e38197924a7db0fd66975b17c937"} Jan 20 17:20:47 crc kubenswrapper[4558]: I0120 17:20:47.457393 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:20:47 crc kubenswrapper[4558]: I0120 17:20:47.457618 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-api-0" podUID="a5c2a475-4aaf-4fd2-9c8d-ce699388c298" containerName="nova-api-log" containerID="cri-o://f57a731e89a8714639377a4caf5629340e4e8c23f00dfc670ca552777e92a864" gracePeriod=30 Jan 20 17:20:47 crc kubenswrapper[4558]: I0120 17:20:47.457750 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-api-0" podUID="a5c2a475-4aaf-4fd2-9c8d-ce699388c298" containerName="nova-api-api" containerID="cri-o://9c1c32b8a6ee5141b8006fd7c9f37821ec075e5089f976d9237eb542664079f9" gracePeriod=30 Jan 20 17:20:48 crc kubenswrapper[4558]: I0120 17:20:48.243258 4558 generic.go:334] "Generic (PLEG): container finished" podID="a5c2a475-4aaf-4fd2-9c8d-ce699388c298" containerID="f57a731e89a8714639377a4caf5629340e4e8c23f00dfc670ca552777e92a864" exitCode=143 Jan 20 17:20:48 crc kubenswrapper[4558]: I0120 17:20:48.243344 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"a5c2a475-4aaf-4fd2-9c8d-ce699388c298","Type":"ContainerDied","Data":"f57a731e89a8714639377a4caf5629340e4e8c23f00dfc670ca552777e92a864"} Jan 20 17:20:48 crc kubenswrapper[4558]: I0120 17:20:48.978459 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:20:49 crc kubenswrapper[4558]: I0120 17:20:49.051178 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/590319f8-5fc7-4ecd-ab83-eb4d59262fd9-ceilometer-tls-certs\") pod \"590319f8-5fc7-4ecd-ab83-eb4d59262fd9\" (UID: \"590319f8-5fc7-4ecd-ab83-eb4d59262fd9\") " Jan 20 17:20:49 crc kubenswrapper[4558]: I0120 17:20:49.051232 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/590319f8-5fc7-4ecd-ab83-eb4d59262fd9-config-data\") pod \"590319f8-5fc7-4ecd-ab83-eb4d59262fd9\" (UID: \"590319f8-5fc7-4ecd-ab83-eb4d59262fd9\") " Jan 20 17:20:49 crc kubenswrapper[4558]: I0120 17:20:49.051267 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/590319f8-5fc7-4ecd-ab83-eb4d59262fd9-log-httpd\") pod \"590319f8-5fc7-4ecd-ab83-eb4d59262fd9\" (UID: \"590319f8-5fc7-4ecd-ab83-eb4d59262fd9\") " Jan 20 17:20:49 crc kubenswrapper[4558]: I0120 17:20:49.051327 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fs5ct\" (UniqueName: \"kubernetes.io/projected/590319f8-5fc7-4ecd-ab83-eb4d59262fd9-kube-api-access-fs5ct\") pod \"590319f8-5fc7-4ecd-ab83-eb4d59262fd9\" (UID: \"590319f8-5fc7-4ecd-ab83-eb4d59262fd9\") " Jan 20 17:20:49 crc kubenswrapper[4558]: I0120 17:20:49.051484 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/590319f8-5fc7-4ecd-ab83-eb4d59262fd9-combined-ca-bundle\") pod \"590319f8-5fc7-4ecd-ab83-eb4d59262fd9\" (UID: \"590319f8-5fc7-4ecd-ab83-eb4d59262fd9\") " Jan 20 17:20:49 crc kubenswrapper[4558]: I0120 17:20:49.051530 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/590319f8-5fc7-4ecd-ab83-eb4d59262fd9-sg-core-conf-yaml\") pod \"590319f8-5fc7-4ecd-ab83-eb4d59262fd9\" (UID: \"590319f8-5fc7-4ecd-ab83-eb4d59262fd9\") " Jan 20 17:20:49 crc kubenswrapper[4558]: I0120 17:20:49.051642 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/590319f8-5fc7-4ecd-ab83-eb4d59262fd9-run-httpd\") pod \"590319f8-5fc7-4ecd-ab83-eb4d59262fd9\" (UID: \"590319f8-5fc7-4ecd-ab83-eb4d59262fd9\") " Jan 20 17:20:49 crc kubenswrapper[4558]: I0120 17:20:49.051709 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/590319f8-5fc7-4ecd-ab83-eb4d59262fd9-scripts\") pod \"590319f8-5fc7-4ecd-ab83-eb4d59262fd9\" (UID: \"590319f8-5fc7-4ecd-ab83-eb4d59262fd9\") " Jan 20 17:20:49 crc kubenswrapper[4558]: I0120 17:20:49.052676 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/590319f8-5fc7-4ecd-ab83-eb4d59262fd9-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "590319f8-5fc7-4ecd-ab83-eb4d59262fd9" (UID: "590319f8-5fc7-4ecd-ab83-eb4d59262fd9"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:20:49 crc kubenswrapper[4558]: I0120 17:20:49.053419 4558 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/590319f8-5fc7-4ecd-ab83-eb4d59262fd9-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:20:49 crc kubenswrapper[4558]: I0120 17:20:49.060343 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/590319f8-5fc7-4ecd-ab83-eb4d59262fd9-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "590319f8-5fc7-4ecd-ab83-eb4d59262fd9" (UID: "590319f8-5fc7-4ecd-ab83-eb4d59262fd9"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:20:49 crc kubenswrapper[4558]: I0120 17:20:49.077961 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/590319f8-5fc7-4ecd-ab83-eb4d59262fd9-kube-api-access-fs5ct" (OuterVolumeSpecName: "kube-api-access-fs5ct") pod "590319f8-5fc7-4ecd-ab83-eb4d59262fd9" (UID: "590319f8-5fc7-4ecd-ab83-eb4d59262fd9"). InnerVolumeSpecName "kube-api-access-fs5ct". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:20:49 crc kubenswrapper[4558]: I0120 17:20:49.090771 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/590319f8-5fc7-4ecd-ab83-eb4d59262fd9-scripts" (OuterVolumeSpecName: "scripts") pod "590319f8-5fc7-4ecd-ab83-eb4d59262fd9" (UID: "590319f8-5fc7-4ecd-ab83-eb4d59262fd9"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:20:49 crc kubenswrapper[4558]: I0120 17:20:49.120376 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/590319f8-5fc7-4ecd-ab83-eb4d59262fd9-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "590319f8-5fc7-4ecd-ab83-eb4d59262fd9" (UID: "590319f8-5fc7-4ecd-ab83-eb4d59262fd9"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:20:49 crc kubenswrapper[4558]: I0120 17:20:49.156159 4558 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/590319f8-5fc7-4ecd-ab83-eb4d59262fd9-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:20:49 crc kubenswrapper[4558]: I0120 17:20:49.156489 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fs5ct\" (UniqueName: \"kubernetes.io/projected/590319f8-5fc7-4ecd-ab83-eb4d59262fd9-kube-api-access-fs5ct\") on node \"crc\" DevicePath \"\"" Jan 20 17:20:49 crc kubenswrapper[4558]: I0120 17:20:49.156563 4558 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/590319f8-5fc7-4ecd-ab83-eb4d59262fd9-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 20 17:20:49 crc kubenswrapper[4558]: I0120 17:20:49.156622 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/590319f8-5fc7-4ecd-ab83-eb4d59262fd9-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:20:49 crc kubenswrapper[4558]: I0120 17:20:49.175327 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/590319f8-5fc7-4ecd-ab83-eb4d59262fd9-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "590319f8-5fc7-4ecd-ab83-eb4d59262fd9" (UID: "590319f8-5fc7-4ecd-ab83-eb4d59262fd9"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:20:49 crc kubenswrapper[4558]: I0120 17:20:49.176459 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/590319f8-5fc7-4ecd-ab83-eb4d59262fd9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "590319f8-5fc7-4ecd-ab83-eb4d59262fd9" (UID: "590319f8-5fc7-4ecd-ab83-eb4d59262fd9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:20:49 crc kubenswrapper[4558]: I0120 17:20:49.218182 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/590319f8-5fc7-4ecd-ab83-eb4d59262fd9-config-data" (OuterVolumeSpecName: "config-data") pod "590319f8-5fc7-4ecd-ab83-eb4d59262fd9" (UID: "590319f8-5fc7-4ecd-ab83-eb4d59262fd9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:20:49 crc kubenswrapper[4558]: I0120 17:20:49.258118 4558 generic.go:334] "Generic (PLEG): container finished" podID="590319f8-5fc7-4ecd-ab83-eb4d59262fd9" containerID="d3a96fa2ed8c1d387f7ca0bca75e6b24f97198f5d3efc7b29c6b348c9f0acaa3" exitCode=0 Jan 20 17:20:49 crc kubenswrapper[4558]: I0120 17:20:49.258178 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"590319f8-5fc7-4ecd-ab83-eb4d59262fd9","Type":"ContainerDied","Data":"d3a96fa2ed8c1d387f7ca0bca75e6b24f97198f5d3efc7b29c6b348c9f0acaa3"} Jan 20 17:20:49 crc kubenswrapper[4558]: I0120 17:20:49.258216 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"590319f8-5fc7-4ecd-ab83-eb4d59262fd9","Type":"ContainerDied","Data":"4b423c58baeeafc41a9ee9140d2059a719143546c5d8ba22b785924dae00df96"} Jan 20 17:20:49 crc kubenswrapper[4558]: I0120 17:20:49.258211 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:20:49 crc kubenswrapper[4558]: I0120 17:20:49.258233 4558 scope.go:117] "RemoveContainer" containerID="fd74c139943f48673f27e23f704b12a18fc952b16d5c63dad1e7eb14fba84a62" Jan 20 17:20:49 crc kubenswrapper[4558]: I0120 17:20:49.258777 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/590319f8-5fc7-4ecd-ab83-eb4d59262fd9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:20:49 crc kubenswrapper[4558]: I0120 17:20:49.259465 4558 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/590319f8-5fc7-4ecd-ab83-eb4d59262fd9-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:20:49 crc kubenswrapper[4558]: I0120 17:20:49.259498 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/590319f8-5fc7-4ecd-ab83-eb4d59262fd9-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:20:49 crc kubenswrapper[4558]: I0120 17:20:49.275938 4558 scope.go:117] "RemoveContainer" containerID="537c61cfa49585fd06616291a85a452aae2d22ed19564435a3b36df407997f4b" Jan 20 17:20:49 crc kubenswrapper[4558]: I0120 17:20:49.285768 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:20:49 crc kubenswrapper[4558]: I0120 17:20:49.296988 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:20:49 crc kubenswrapper[4558]: I0120 17:20:49.301815 4558 scope.go:117] "RemoveContainer" containerID="d3a96fa2ed8c1d387f7ca0bca75e6b24f97198f5d3efc7b29c6b348c9f0acaa3" Jan 20 17:20:49 crc kubenswrapper[4558]: I0120 17:20:49.321955 4558 scope.go:117] "RemoveContainer" containerID="81e7c4f925c3e45867cdd806f74bba8ad198e38197924a7db0fd66975b17c937" Jan 20 17:20:49 crc kubenswrapper[4558]: I0120 17:20:49.333671 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:20:49 crc kubenswrapper[4558]: E0120 17:20:49.334341 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="590319f8-5fc7-4ecd-ab83-eb4d59262fd9" containerName="proxy-httpd" Jan 20 17:20:49 crc kubenswrapper[4558]: I0120 17:20:49.334432 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="590319f8-5fc7-4ecd-ab83-eb4d59262fd9" containerName="proxy-httpd" Jan 20 17:20:49 crc kubenswrapper[4558]: E0120 17:20:49.334498 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="590319f8-5fc7-4ecd-ab83-eb4d59262fd9" containerName="ceilometer-central-agent" Jan 20 17:20:49 crc kubenswrapper[4558]: I0120 17:20:49.334571 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="590319f8-5fc7-4ecd-ab83-eb4d59262fd9" containerName="ceilometer-central-agent" Jan 20 17:20:49 crc kubenswrapper[4558]: E0120 17:20:49.334643 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="590319f8-5fc7-4ecd-ab83-eb4d59262fd9" containerName="sg-core" Jan 20 17:20:49 crc kubenswrapper[4558]: I0120 17:20:49.334695 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="590319f8-5fc7-4ecd-ab83-eb4d59262fd9" containerName="sg-core" Jan 20 17:20:49 crc kubenswrapper[4558]: E0120 17:20:49.334754 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="590319f8-5fc7-4ecd-ab83-eb4d59262fd9" containerName="ceilometer-notification-agent" Jan 20 17:20:49 crc kubenswrapper[4558]: I0120 17:20:49.334804 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="590319f8-5fc7-4ecd-ab83-eb4d59262fd9" containerName="ceilometer-notification-agent" Jan 20 17:20:49 crc kubenswrapper[4558]: I0120 17:20:49.335089 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="590319f8-5fc7-4ecd-ab83-eb4d59262fd9" containerName="proxy-httpd" Jan 20 17:20:49 crc kubenswrapper[4558]: I0120 17:20:49.335157 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="590319f8-5fc7-4ecd-ab83-eb4d59262fd9" containerName="ceilometer-central-agent" Jan 20 17:20:49 crc kubenswrapper[4558]: I0120 17:20:49.335247 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="590319f8-5fc7-4ecd-ab83-eb4d59262fd9" containerName="ceilometer-notification-agent" Jan 20 17:20:49 crc kubenswrapper[4558]: I0120 17:20:49.335311 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="590319f8-5fc7-4ecd-ab83-eb4d59262fd9" containerName="sg-core" Jan 20 17:20:49 crc kubenswrapper[4558]: I0120 17:20:49.337114 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:20:49 crc kubenswrapper[4558]: I0120 17:20:49.339740 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-ceilometer-internal-svc" Jan 20 17:20:49 crc kubenswrapper[4558]: I0120 17:20:49.340040 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-config-data" Jan 20 17:20:49 crc kubenswrapper[4558]: I0120 17:20:49.340347 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-scripts" Jan 20 17:20:49 crc kubenswrapper[4558]: I0120 17:20:49.344656 4558 scope.go:117] "RemoveContainer" containerID="fd74c139943f48673f27e23f704b12a18fc952b16d5c63dad1e7eb14fba84a62" Jan 20 17:20:49 crc kubenswrapper[4558]: E0120 17:20:49.345097 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fd74c139943f48673f27e23f704b12a18fc952b16d5c63dad1e7eb14fba84a62\": container with ID starting with fd74c139943f48673f27e23f704b12a18fc952b16d5c63dad1e7eb14fba84a62 not found: ID does not exist" containerID="fd74c139943f48673f27e23f704b12a18fc952b16d5c63dad1e7eb14fba84a62" Jan 20 17:20:49 crc kubenswrapper[4558]: I0120 17:20:49.345145 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fd74c139943f48673f27e23f704b12a18fc952b16d5c63dad1e7eb14fba84a62"} err="failed to get container status \"fd74c139943f48673f27e23f704b12a18fc952b16d5c63dad1e7eb14fba84a62\": rpc error: code = NotFound desc = could not find container \"fd74c139943f48673f27e23f704b12a18fc952b16d5c63dad1e7eb14fba84a62\": container with ID starting with fd74c139943f48673f27e23f704b12a18fc952b16d5c63dad1e7eb14fba84a62 not found: ID does not exist" Jan 20 17:20:49 crc kubenswrapper[4558]: I0120 17:20:49.345194 4558 scope.go:117] "RemoveContainer" containerID="537c61cfa49585fd06616291a85a452aae2d22ed19564435a3b36df407997f4b" Jan 20 17:20:49 crc kubenswrapper[4558]: E0120 17:20:49.345541 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"537c61cfa49585fd06616291a85a452aae2d22ed19564435a3b36df407997f4b\": container with ID starting with 537c61cfa49585fd06616291a85a452aae2d22ed19564435a3b36df407997f4b not found: ID does not exist" containerID="537c61cfa49585fd06616291a85a452aae2d22ed19564435a3b36df407997f4b" Jan 20 17:20:49 crc kubenswrapper[4558]: I0120 17:20:49.345583 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"537c61cfa49585fd06616291a85a452aae2d22ed19564435a3b36df407997f4b"} err="failed to get container status \"537c61cfa49585fd06616291a85a452aae2d22ed19564435a3b36df407997f4b\": rpc error: code = NotFound desc = could not find container \"537c61cfa49585fd06616291a85a452aae2d22ed19564435a3b36df407997f4b\": container with ID starting with 537c61cfa49585fd06616291a85a452aae2d22ed19564435a3b36df407997f4b not found: ID does not exist" Jan 20 17:20:49 crc kubenswrapper[4558]: I0120 17:20:49.345602 4558 scope.go:117] "RemoveContainer" containerID="d3a96fa2ed8c1d387f7ca0bca75e6b24f97198f5d3efc7b29c6b348c9f0acaa3" Jan 20 17:20:49 crc kubenswrapper[4558]: E0120 17:20:49.345862 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d3a96fa2ed8c1d387f7ca0bca75e6b24f97198f5d3efc7b29c6b348c9f0acaa3\": container with ID starting with d3a96fa2ed8c1d387f7ca0bca75e6b24f97198f5d3efc7b29c6b348c9f0acaa3 not found: ID does not exist" containerID="d3a96fa2ed8c1d387f7ca0bca75e6b24f97198f5d3efc7b29c6b348c9f0acaa3" Jan 20 17:20:49 crc kubenswrapper[4558]: I0120 17:20:49.345879 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d3a96fa2ed8c1d387f7ca0bca75e6b24f97198f5d3efc7b29c6b348c9f0acaa3"} err="failed to get container status \"d3a96fa2ed8c1d387f7ca0bca75e6b24f97198f5d3efc7b29c6b348c9f0acaa3\": rpc error: code = NotFound desc = could not find container \"d3a96fa2ed8c1d387f7ca0bca75e6b24f97198f5d3efc7b29c6b348c9f0acaa3\": container with ID starting with d3a96fa2ed8c1d387f7ca0bca75e6b24f97198f5d3efc7b29c6b348c9f0acaa3 not found: ID does not exist" Jan 20 17:20:49 crc kubenswrapper[4558]: I0120 17:20:49.345896 4558 scope.go:117] "RemoveContainer" containerID="81e7c4f925c3e45867cdd806f74bba8ad198e38197924a7db0fd66975b17c937" Jan 20 17:20:49 crc kubenswrapper[4558]: E0120 17:20:49.346114 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"81e7c4f925c3e45867cdd806f74bba8ad198e38197924a7db0fd66975b17c937\": container with ID starting with 81e7c4f925c3e45867cdd806f74bba8ad198e38197924a7db0fd66975b17c937 not found: ID does not exist" containerID="81e7c4f925c3e45867cdd806f74bba8ad198e38197924a7db0fd66975b17c937" Jan 20 17:20:49 crc kubenswrapper[4558]: I0120 17:20:49.346136 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"81e7c4f925c3e45867cdd806f74bba8ad198e38197924a7db0fd66975b17c937"} err="failed to get container status \"81e7c4f925c3e45867cdd806f74bba8ad198e38197924a7db0fd66975b17c937\": rpc error: code = NotFound desc = could not find container \"81e7c4f925c3e45867cdd806f74bba8ad198e38197924a7db0fd66975b17c937\": container with ID starting with 81e7c4f925c3e45867cdd806f74bba8ad198e38197924a7db0fd66975b17c937 not found: ID does not exist" Jan 20 17:20:49 crc kubenswrapper[4558]: I0120 17:20:49.349189 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:20:49 crc kubenswrapper[4558]: I0120 17:20:49.462437 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/92f9ec95-57c2-4584-b1d2-77c0f05c0af1-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"92f9ec95-57c2-4584-b1d2-77c0f05c0af1\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:20:49 crc kubenswrapper[4558]: I0120 17:20:49.462480 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/92f9ec95-57c2-4584-b1d2-77c0f05c0af1-config-data\") pod \"ceilometer-0\" (UID: \"92f9ec95-57c2-4584-b1d2-77c0f05c0af1\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:20:49 crc kubenswrapper[4558]: I0120 17:20:49.462503 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/92f9ec95-57c2-4584-b1d2-77c0f05c0af1-log-httpd\") pod \"ceilometer-0\" (UID: \"92f9ec95-57c2-4584-b1d2-77c0f05c0af1\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:20:49 crc kubenswrapper[4558]: I0120 17:20:49.462615 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/92f9ec95-57c2-4584-b1d2-77c0f05c0af1-scripts\") pod \"ceilometer-0\" (UID: \"92f9ec95-57c2-4584-b1d2-77c0f05c0af1\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:20:49 crc kubenswrapper[4558]: I0120 17:20:49.462661 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zmms6\" (UniqueName: \"kubernetes.io/projected/92f9ec95-57c2-4584-b1d2-77c0f05c0af1-kube-api-access-zmms6\") pod \"ceilometer-0\" (UID: \"92f9ec95-57c2-4584-b1d2-77c0f05c0af1\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:20:49 crc kubenswrapper[4558]: I0120 17:20:49.462683 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/92f9ec95-57c2-4584-b1d2-77c0f05c0af1-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"92f9ec95-57c2-4584-b1d2-77c0f05c0af1\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:20:49 crc kubenswrapper[4558]: I0120 17:20:49.462714 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/92f9ec95-57c2-4584-b1d2-77c0f05c0af1-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"92f9ec95-57c2-4584-b1d2-77c0f05c0af1\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:20:49 crc kubenswrapper[4558]: I0120 17:20:49.462749 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/92f9ec95-57c2-4584-b1d2-77c0f05c0af1-run-httpd\") pod \"ceilometer-0\" (UID: \"92f9ec95-57c2-4584-b1d2-77c0f05c0af1\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:20:49 crc kubenswrapper[4558]: I0120 17:20:49.566074 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/92f9ec95-57c2-4584-b1d2-77c0f05c0af1-scripts\") pod \"ceilometer-0\" (UID: \"92f9ec95-57c2-4584-b1d2-77c0f05c0af1\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:20:49 crc kubenswrapper[4558]: I0120 17:20:49.566221 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zmms6\" (UniqueName: \"kubernetes.io/projected/92f9ec95-57c2-4584-b1d2-77c0f05c0af1-kube-api-access-zmms6\") pod \"ceilometer-0\" (UID: \"92f9ec95-57c2-4584-b1d2-77c0f05c0af1\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:20:49 crc kubenswrapper[4558]: I0120 17:20:49.566260 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/92f9ec95-57c2-4584-b1d2-77c0f05c0af1-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"92f9ec95-57c2-4584-b1d2-77c0f05c0af1\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:20:49 crc kubenswrapper[4558]: I0120 17:20:49.566329 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/92f9ec95-57c2-4584-b1d2-77c0f05c0af1-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"92f9ec95-57c2-4584-b1d2-77c0f05c0af1\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:20:49 crc kubenswrapper[4558]: I0120 17:20:49.566403 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/92f9ec95-57c2-4584-b1d2-77c0f05c0af1-run-httpd\") pod \"ceilometer-0\" (UID: \"92f9ec95-57c2-4584-b1d2-77c0f05c0af1\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:20:49 crc kubenswrapper[4558]: I0120 17:20:49.566621 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/92f9ec95-57c2-4584-b1d2-77c0f05c0af1-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"92f9ec95-57c2-4584-b1d2-77c0f05c0af1\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:20:49 crc kubenswrapper[4558]: I0120 17:20:49.566675 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/92f9ec95-57c2-4584-b1d2-77c0f05c0af1-config-data\") pod \"ceilometer-0\" (UID: \"92f9ec95-57c2-4584-b1d2-77c0f05c0af1\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:20:49 crc kubenswrapper[4558]: I0120 17:20:49.566699 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/92f9ec95-57c2-4584-b1d2-77c0f05c0af1-log-httpd\") pod \"ceilometer-0\" (UID: \"92f9ec95-57c2-4584-b1d2-77c0f05c0af1\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:20:49 crc kubenswrapper[4558]: I0120 17:20:49.567125 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/92f9ec95-57c2-4584-b1d2-77c0f05c0af1-run-httpd\") pod \"ceilometer-0\" (UID: \"92f9ec95-57c2-4584-b1d2-77c0f05c0af1\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:20:49 crc kubenswrapper[4558]: I0120 17:20:49.567310 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/92f9ec95-57c2-4584-b1d2-77c0f05c0af1-log-httpd\") pod \"ceilometer-0\" (UID: \"92f9ec95-57c2-4584-b1d2-77c0f05c0af1\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:20:49 crc kubenswrapper[4558]: I0120 17:20:49.571672 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/92f9ec95-57c2-4584-b1d2-77c0f05c0af1-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"92f9ec95-57c2-4584-b1d2-77c0f05c0af1\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:20:49 crc kubenswrapper[4558]: I0120 17:20:49.571778 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/92f9ec95-57c2-4584-b1d2-77c0f05c0af1-scripts\") pod \"ceilometer-0\" (UID: \"92f9ec95-57c2-4584-b1d2-77c0f05c0af1\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:20:49 crc kubenswrapper[4558]: I0120 17:20:49.572087 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/92f9ec95-57c2-4584-b1d2-77c0f05c0af1-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"92f9ec95-57c2-4584-b1d2-77c0f05c0af1\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:20:49 crc kubenswrapper[4558]: I0120 17:20:49.572929 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/92f9ec95-57c2-4584-b1d2-77c0f05c0af1-config-data\") pod \"ceilometer-0\" (UID: \"92f9ec95-57c2-4584-b1d2-77c0f05c0af1\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:20:49 crc kubenswrapper[4558]: I0120 17:20:49.573589 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/92f9ec95-57c2-4584-b1d2-77c0f05c0af1-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"92f9ec95-57c2-4584-b1d2-77c0f05c0af1\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:20:49 crc kubenswrapper[4558]: I0120 17:20:49.583744 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zmms6\" (UniqueName: \"kubernetes.io/projected/92f9ec95-57c2-4584-b1d2-77c0f05c0af1-kube-api-access-zmms6\") pod \"ceilometer-0\" (UID: \"92f9ec95-57c2-4584-b1d2-77c0f05c0af1\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:20:49 crc kubenswrapper[4558]: I0120 17:20:49.653035 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:20:49 crc kubenswrapper[4558]: I0120 17:20:49.694731 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:20:50 crc kubenswrapper[4558]: I0120 17:20:50.074938 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:20:50 crc kubenswrapper[4558]: I0120 17:20:50.271768 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"92f9ec95-57c2-4584-b1d2-77c0f05c0af1","Type":"ContainerStarted","Data":"921e6b3c70578c3c232ed96b123dbf031f9535812ffbefffdf09f9007f32303e"} Jan 20 17:20:50 crc kubenswrapper[4558]: I0120 17:20:50.578421 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="590319f8-5fc7-4ecd-ab83-eb4d59262fd9" path="/var/lib/kubelet/pods/590319f8-5fc7-4ecd-ab83-eb4d59262fd9/volumes" Jan 20 17:20:50 crc kubenswrapper[4558]: I0120 17:20:50.993578 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:20:51 crc kubenswrapper[4558]: I0120 17:20:51.106397 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dxvlz\" (UniqueName: \"kubernetes.io/projected/a5c2a475-4aaf-4fd2-9c8d-ce699388c298-kube-api-access-dxvlz\") pod \"a5c2a475-4aaf-4fd2-9c8d-ce699388c298\" (UID: \"a5c2a475-4aaf-4fd2-9c8d-ce699388c298\") " Jan 20 17:20:51 crc kubenswrapper[4558]: I0120 17:20:51.106601 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a5c2a475-4aaf-4fd2-9c8d-ce699388c298-logs\") pod \"a5c2a475-4aaf-4fd2-9c8d-ce699388c298\" (UID: \"a5c2a475-4aaf-4fd2-9c8d-ce699388c298\") " Jan 20 17:20:51 crc kubenswrapper[4558]: I0120 17:20:51.107002 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a5c2a475-4aaf-4fd2-9c8d-ce699388c298-logs" (OuterVolumeSpecName: "logs") pod "a5c2a475-4aaf-4fd2-9c8d-ce699388c298" (UID: "a5c2a475-4aaf-4fd2-9c8d-ce699388c298"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:20:51 crc kubenswrapper[4558]: I0120 17:20:51.107052 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a5c2a475-4aaf-4fd2-9c8d-ce699388c298-combined-ca-bundle\") pod \"a5c2a475-4aaf-4fd2-9c8d-ce699388c298\" (UID: \"a5c2a475-4aaf-4fd2-9c8d-ce699388c298\") " Jan 20 17:20:51 crc kubenswrapper[4558]: I0120 17:20:51.107344 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a5c2a475-4aaf-4fd2-9c8d-ce699388c298-config-data\") pod \"a5c2a475-4aaf-4fd2-9c8d-ce699388c298\" (UID: \"a5c2a475-4aaf-4fd2-9c8d-ce699388c298\") " Jan 20 17:20:51 crc kubenswrapper[4558]: I0120 17:20:51.108468 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a5c2a475-4aaf-4fd2-9c8d-ce699388c298-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:20:51 crc kubenswrapper[4558]: I0120 17:20:51.111623 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a5c2a475-4aaf-4fd2-9c8d-ce699388c298-kube-api-access-dxvlz" (OuterVolumeSpecName: "kube-api-access-dxvlz") pod "a5c2a475-4aaf-4fd2-9c8d-ce699388c298" (UID: "a5c2a475-4aaf-4fd2-9c8d-ce699388c298"). InnerVolumeSpecName "kube-api-access-dxvlz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:20:51 crc kubenswrapper[4558]: I0120 17:20:51.134434 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a5c2a475-4aaf-4fd2-9c8d-ce699388c298-config-data" (OuterVolumeSpecName: "config-data") pod "a5c2a475-4aaf-4fd2-9c8d-ce699388c298" (UID: "a5c2a475-4aaf-4fd2-9c8d-ce699388c298"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:20:51 crc kubenswrapper[4558]: I0120 17:20:51.140322 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a5c2a475-4aaf-4fd2-9c8d-ce699388c298-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a5c2a475-4aaf-4fd2-9c8d-ce699388c298" (UID: "a5c2a475-4aaf-4fd2-9c8d-ce699388c298"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:20:51 crc kubenswrapper[4558]: I0120 17:20:51.209619 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a5c2a475-4aaf-4fd2-9c8d-ce699388c298-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:20:51 crc kubenswrapper[4558]: I0120 17:20:51.209649 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dxvlz\" (UniqueName: \"kubernetes.io/projected/a5c2a475-4aaf-4fd2-9c8d-ce699388c298-kube-api-access-dxvlz\") on node \"crc\" DevicePath \"\"" Jan 20 17:20:51 crc kubenswrapper[4558]: I0120 17:20:51.209660 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a5c2a475-4aaf-4fd2-9c8d-ce699388c298-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:20:51 crc kubenswrapper[4558]: I0120 17:20:51.283968 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"92f9ec95-57c2-4584-b1d2-77c0f05c0af1","Type":"ContainerStarted","Data":"272675fe6d3890f5144654af1035e7e1268b5268b69af0ce91aa57817dae1595"} Jan 20 17:20:51 crc kubenswrapper[4558]: I0120 17:20:51.286399 4558 generic.go:334] "Generic (PLEG): container finished" podID="a5c2a475-4aaf-4fd2-9c8d-ce699388c298" containerID="9c1c32b8a6ee5141b8006fd7c9f37821ec075e5089f976d9237eb542664079f9" exitCode=0 Jan 20 17:20:51 crc kubenswrapper[4558]: I0120 17:20:51.286443 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"a5c2a475-4aaf-4fd2-9c8d-ce699388c298","Type":"ContainerDied","Data":"9c1c32b8a6ee5141b8006fd7c9f37821ec075e5089f976d9237eb542664079f9"} Jan 20 17:20:51 crc kubenswrapper[4558]: I0120 17:20:51.286470 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"a5c2a475-4aaf-4fd2-9c8d-ce699388c298","Type":"ContainerDied","Data":"b290449dc464ea932052c928ddb9f178486f85bd174381542b66daab2c500c16"} Jan 20 17:20:51 crc kubenswrapper[4558]: I0120 17:20:51.286490 4558 scope.go:117] "RemoveContainer" containerID="9c1c32b8a6ee5141b8006fd7c9f37821ec075e5089f976d9237eb542664079f9" Jan 20 17:20:51 crc kubenswrapper[4558]: I0120 17:20:51.286640 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:20:51 crc kubenswrapper[4558]: I0120 17:20:51.315030 4558 scope.go:117] "RemoveContainer" containerID="f57a731e89a8714639377a4caf5629340e4e8c23f00dfc670ca552777e92a864" Jan 20 17:20:51 crc kubenswrapper[4558]: I0120 17:20:51.326647 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:20:51 crc kubenswrapper[4558]: I0120 17:20:51.333960 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:20:51 crc kubenswrapper[4558]: I0120 17:20:51.340293 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:20:51 crc kubenswrapper[4558]: E0120 17:20:51.340665 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a5c2a475-4aaf-4fd2-9c8d-ce699388c298" containerName="nova-api-log" Jan 20 17:20:51 crc kubenswrapper[4558]: I0120 17:20:51.340682 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="a5c2a475-4aaf-4fd2-9c8d-ce699388c298" containerName="nova-api-log" Jan 20 17:20:51 crc kubenswrapper[4558]: E0120 17:20:51.340695 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a5c2a475-4aaf-4fd2-9c8d-ce699388c298" containerName="nova-api-api" Jan 20 17:20:51 crc kubenswrapper[4558]: I0120 17:20:51.340702 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="a5c2a475-4aaf-4fd2-9c8d-ce699388c298" containerName="nova-api-api" Jan 20 17:20:51 crc kubenswrapper[4558]: I0120 17:20:51.340885 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="a5c2a475-4aaf-4fd2-9c8d-ce699388c298" containerName="nova-api-api" Jan 20 17:20:51 crc kubenswrapper[4558]: I0120 17:20:51.340902 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="a5c2a475-4aaf-4fd2-9c8d-ce699388c298" containerName="nova-api-log" Jan 20 17:20:51 crc kubenswrapper[4558]: I0120 17:20:51.341793 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:20:51 crc kubenswrapper[4558]: I0120 17:20:51.342645 4558 scope.go:117] "RemoveContainer" containerID="9c1c32b8a6ee5141b8006fd7c9f37821ec075e5089f976d9237eb542664079f9" Jan 20 17:20:51 crc kubenswrapper[4558]: I0120 17:20:51.346530 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-api-config-data" Jan 20 17:20:51 crc kubenswrapper[4558]: I0120 17:20:51.346778 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-public-svc" Jan 20 17:20:51 crc kubenswrapper[4558]: I0120 17:20:51.346928 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-internal-svc" Jan 20 17:20:51 crc kubenswrapper[4558]: E0120 17:20:51.347063 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9c1c32b8a6ee5141b8006fd7c9f37821ec075e5089f976d9237eb542664079f9\": container with ID starting with 9c1c32b8a6ee5141b8006fd7c9f37821ec075e5089f976d9237eb542664079f9 not found: ID does not exist" containerID="9c1c32b8a6ee5141b8006fd7c9f37821ec075e5089f976d9237eb542664079f9" Jan 20 17:20:51 crc kubenswrapper[4558]: I0120 17:20:51.347089 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9c1c32b8a6ee5141b8006fd7c9f37821ec075e5089f976d9237eb542664079f9"} err="failed to get container status \"9c1c32b8a6ee5141b8006fd7c9f37821ec075e5089f976d9237eb542664079f9\": rpc error: code = NotFound desc = could not find container \"9c1c32b8a6ee5141b8006fd7c9f37821ec075e5089f976d9237eb542664079f9\": container with ID starting with 9c1c32b8a6ee5141b8006fd7c9f37821ec075e5089f976d9237eb542664079f9 not found: ID does not exist" Jan 20 17:20:51 crc kubenswrapper[4558]: I0120 17:20:51.347112 4558 scope.go:117] "RemoveContainer" containerID="f57a731e89a8714639377a4caf5629340e4e8c23f00dfc670ca552777e92a864" Jan 20 17:20:51 crc kubenswrapper[4558]: E0120 17:20:51.347648 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f57a731e89a8714639377a4caf5629340e4e8c23f00dfc670ca552777e92a864\": container with ID starting with f57a731e89a8714639377a4caf5629340e4e8c23f00dfc670ca552777e92a864 not found: ID does not exist" containerID="f57a731e89a8714639377a4caf5629340e4e8c23f00dfc670ca552777e92a864" Jan 20 17:20:51 crc kubenswrapper[4558]: I0120 17:20:51.347680 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f57a731e89a8714639377a4caf5629340e4e8c23f00dfc670ca552777e92a864"} err="failed to get container status \"f57a731e89a8714639377a4caf5629340e4e8c23f00dfc670ca552777e92a864\": rpc error: code = NotFound desc = could not find container \"f57a731e89a8714639377a4caf5629340e4e8c23f00dfc670ca552777e92a864\": container with ID starting with f57a731e89a8714639377a4caf5629340e4e8c23f00dfc670ca552777e92a864 not found: ID does not exist" Jan 20 17:20:51 crc kubenswrapper[4558]: I0120 17:20:51.362649 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:20:51 crc kubenswrapper[4558]: I0120 17:20:51.412274 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/533cb8e0-3be7-4eb2-b0ce-bdebf3973a0b-logs\") pod \"nova-api-0\" (UID: \"533cb8e0-3be7-4eb2-b0ce-bdebf3973a0b\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:20:51 crc kubenswrapper[4558]: I0120 17:20:51.412310 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/533cb8e0-3be7-4eb2-b0ce-bdebf3973a0b-internal-tls-certs\") pod \"nova-api-0\" (UID: \"533cb8e0-3be7-4eb2-b0ce-bdebf3973a0b\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:20:51 crc kubenswrapper[4558]: I0120 17:20:51.412522 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/533cb8e0-3be7-4eb2-b0ce-bdebf3973a0b-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"533cb8e0-3be7-4eb2-b0ce-bdebf3973a0b\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:20:51 crc kubenswrapper[4558]: I0120 17:20:51.412589 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2tw8d\" (UniqueName: \"kubernetes.io/projected/533cb8e0-3be7-4eb2-b0ce-bdebf3973a0b-kube-api-access-2tw8d\") pod \"nova-api-0\" (UID: \"533cb8e0-3be7-4eb2-b0ce-bdebf3973a0b\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:20:51 crc kubenswrapper[4558]: I0120 17:20:51.412671 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/533cb8e0-3be7-4eb2-b0ce-bdebf3973a0b-public-tls-certs\") pod \"nova-api-0\" (UID: \"533cb8e0-3be7-4eb2-b0ce-bdebf3973a0b\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:20:51 crc kubenswrapper[4558]: I0120 17:20:51.412863 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/533cb8e0-3be7-4eb2-b0ce-bdebf3973a0b-config-data\") pod \"nova-api-0\" (UID: \"533cb8e0-3be7-4eb2-b0ce-bdebf3973a0b\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:20:51 crc kubenswrapper[4558]: I0120 17:20:51.515488 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/533cb8e0-3be7-4eb2-b0ce-bdebf3973a0b-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"533cb8e0-3be7-4eb2-b0ce-bdebf3973a0b\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:20:51 crc kubenswrapper[4558]: I0120 17:20:51.515546 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2tw8d\" (UniqueName: \"kubernetes.io/projected/533cb8e0-3be7-4eb2-b0ce-bdebf3973a0b-kube-api-access-2tw8d\") pod \"nova-api-0\" (UID: \"533cb8e0-3be7-4eb2-b0ce-bdebf3973a0b\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:20:51 crc kubenswrapper[4558]: I0120 17:20:51.515601 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/533cb8e0-3be7-4eb2-b0ce-bdebf3973a0b-public-tls-certs\") pod \"nova-api-0\" (UID: \"533cb8e0-3be7-4eb2-b0ce-bdebf3973a0b\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:20:51 crc kubenswrapper[4558]: I0120 17:20:51.515675 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/533cb8e0-3be7-4eb2-b0ce-bdebf3973a0b-config-data\") pod \"nova-api-0\" (UID: \"533cb8e0-3be7-4eb2-b0ce-bdebf3973a0b\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:20:51 crc kubenswrapper[4558]: I0120 17:20:51.515733 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/533cb8e0-3be7-4eb2-b0ce-bdebf3973a0b-logs\") pod \"nova-api-0\" (UID: \"533cb8e0-3be7-4eb2-b0ce-bdebf3973a0b\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:20:51 crc kubenswrapper[4558]: I0120 17:20:51.515753 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/533cb8e0-3be7-4eb2-b0ce-bdebf3973a0b-internal-tls-certs\") pod \"nova-api-0\" (UID: \"533cb8e0-3be7-4eb2-b0ce-bdebf3973a0b\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:20:51 crc kubenswrapper[4558]: I0120 17:20:51.516586 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/533cb8e0-3be7-4eb2-b0ce-bdebf3973a0b-logs\") pod \"nova-api-0\" (UID: \"533cb8e0-3be7-4eb2-b0ce-bdebf3973a0b\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:20:51 crc kubenswrapper[4558]: I0120 17:20:51.521024 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/533cb8e0-3be7-4eb2-b0ce-bdebf3973a0b-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"533cb8e0-3be7-4eb2-b0ce-bdebf3973a0b\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:20:51 crc kubenswrapper[4558]: I0120 17:20:51.521817 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/533cb8e0-3be7-4eb2-b0ce-bdebf3973a0b-config-data\") pod \"nova-api-0\" (UID: \"533cb8e0-3be7-4eb2-b0ce-bdebf3973a0b\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:20:51 crc kubenswrapper[4558]: I0120 17:20:51.521903 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/533cb8e0-3be7-4eb2-b0ce-bdebf3973a0b-internal-tls-certs\") pod \"nova-api-0\" (UID: \"533cb8e0-3be7-4eb2-b0ce-bdebf3973a0b\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:20:51 crc kubenswrapper[4558]: I0120 17:20:51.522306 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/533cb8e0-3be7-4eb2-b0ce-bdebf3973a0b-public-tls-certs\") pod \"nova-api-0\" (UID: \"533cb8e0-3be7-4eb2-b0ce-bdebf3973a0b\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:20:51 crc kubenswrapper[4558]: I0120 17:20:51.529617 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2tw8d\" (UniqueName: \"kubernetes.io/projected/533cb8e0-3be7-4eb2-b0ce-bdebf3973a0b-kube-api-access-2tw8d\") pod \"nova-api-0\" (UID: \"533cb8e0-3be7-4eb2-b0ce-bdebf3973a0b\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:20:51 crc kubenswrapper[4558]: I0120 17:20:51.657806 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:20:52 crc kubenswrapper[4558]: I0120 17:20:52.121493 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:20:52 crc kubenswrapper[4558]: I0120 17:20:52.320334 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"533cb8e0-3be7-4eb2-b0ce-bdebf3973a0b","Type":"ContainerStarted","Data":"2eff65431cd3f96e218fee39fc3cc4517345e8ec098ef9aeba65d6a34e79cd42"} Jan 20 17:20:52 crc kubenswrapper[4558]: I0120 17:20:52.320611 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"533cb8e0-3be7-4eb2-b0ce-bdebf3973a0b","Type":"ContainerStarted","Data":"f30510222a66cfc9a50f9cf06421bd56ec0eb63b96631931e75f7c3f79491603"} Jan 20 17:20:52 crc kubenswrapper[4558]: I0120 17:20:52.322427 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"92f9ec95-57c2-4584-b1d2-77c0f05c0af1","Type":"ContainerStarted","Data":"1aaf78f3cefcb2d1ac289920cc3a50d6f83270297ac7eed6e2857df5e21738be"} Jan 20 17:20:52 crc kubenswrapper[4558]: I0120 17:20:52.590522 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a5c2a475-4aaf-4fd2-9c8d-ce699388c298" path="/var/lib/kubelet/pods/a5c2a475-4aaf-4fd2-9c8d-ce699388c298/volumes" Jan 20 17:20:53 crc kubenswrapper[4558]: I0120 17:20:53.336832 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"533cb8e0-3be7-4eb2-b0ce-bdebf3973a0b","Type":"ContainerStarted","Data":"d68462f667579ef49cbcb5dabe1e6eb12c8f1227e9a7202b73ef439191bd1da7"} Jan 20 17:20:53 crc kubenswrapper[4558]: I0120 17:20:53.340702 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"92f9ec95-57c2-4584-b1d2-77c0f05c0af1","Type":"ContainerStarted","Data":"4c1e24d067a19105d1aea61557859bbbc31f1068a20abc92da5bf5f3a02e72f4"} Jan 20 17:20:53 crc kubenswrapper[4558]: I0120 17:20:53.365759 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-api-0" podStartSLOduration=2.365742348 podStartE2EDuration="2.365742348s" podCreationTimestamp="2026-01-20 17:20:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:20:53.357607828 +0000 UTC m=+2347.117945784" watchObservedRunningTime="2026-01-20 17:20:53.365742348 +0000 UTC m=+2347.126080315" Jan 20 17:20:55 crc kubenswrapper[4558]: I0120 17:20:55.359532 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"92f9ec95-57c2-4584-b1d2-77c0f05c0af1","Type":"ContainerStarted","Data":"6f6bd0c96b12f13b8d49f31c1320d3a730b77689e097da24211645319eac3e5b"} Jan 20 17:20:55 crc kubenswrapper[4558]: I0120 17:20:55.360018 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:20:55 crc kubenswrapper[4558]: I0120 17:20:55.359782 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="92f9ec95-57c2-4584-b1d2-77c0f05c0af1" containerName="ceilometer-notification-agent" containerID="cri-o://1aaf78f3cefcb2d1ac289920cc3a50d6f83270297ac7eed6e2857df5e21738be" gracePeriod=30 Jan 20 17:20:55 crc kubenswrapper[4558]: I0120 17:20:55.359711 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="92f9ec95-57c2-4584-b1d2-77c0f05c0af1" containerName="ceilometer-central-agent" containerID="cri-o://272675fe6d3890f5144654af1035e7e1268b5268b69af0ce91aa57817dae1595" gracePeriod=30 Jan 20 17:20:55 crc kubenswrapper[4558]: I0120 17:20:55.359766 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="92f9ec95-57c2-4584-b1d2-77c0f05c0af1" containerName="proxy-httpd" containerID="cri-o://6f6bd0c96b12f13b8d49f31c1320d3a730b77689e097da24211645319eac3e5b" gracePeriod=30 Jan 20 17:20:55 crc kubenswrapper[4558]: I0120 17:20:55.359812 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="92f9ec95-57c2-4584-b1d2-77c0f05c0af1" containerName="sg-core" containerID="cri-o://4c1e24d067a19105d1aea61557859bbbc31f1068a20abc92da5bf5f3a02e72f4" gracePeriod=30 Jan 20 17:20:55 crc kubenswrapper[4558]: I0120 17:20:55.380527 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ceilometer-0" podStartSLOduration=1.971980817 podStartE2EDuration="6.38051257s" podCreationTimestamp="2026-01-20 17:20:49 +0000 UTC" firstStartedPulling="2026-01-20 17:20:50.072356863 +0000 UTC m=+2343.832694830" lastFinishedPulling="2026-01-20 17:20:54.480888615 +0000 UTC m=+2348.241226583" observedRunningTime="2026-01-20 17:20:55.379812974 +0000 UTC m=+2349.140150942" watchObservedRunningTime="2026-01-20 17:20:55.38051257 +0000 UTC m=+2349.140850538" Jan 20 17:20:55 crc kubenswrapper[4558]: E0120 17:20:55.582901 4558 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod92f9ec95_57c2_4584_b1d2_77c0f05c0af1.slice/crio-conmon-4c1e24d067a19105d1aea61557859bbbc31f1068a20abc92da5bf5f3a02e72f4.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod92f9ec95_57c2_4584_b1d2_77c0f05c0af1.slice/crio-6f6bd0c96b12f13b8d49f31c1320d3a730b77689e097da24211645319eac3e5b.scope\": RecentStats: unable to find data in memory cache]" Jan 20 17:20:56 crc kubenswrapper[4558]: I0120 17:20:56.376782 4558 generic.go:334] "Generic (PLEG): container finished" podID="92f9ec95-57c2-4584-b1d2-77c0f05c0af1" containerID="6f6bd0c96b12f13b8d49f31c1320d3a730b77689e097da24211645319eac3e5b" exitCode=0 Jan 20 17:20:56 crc kubenswrapper[4558]: I0120 17:20:56.376824 4558 generic.go:334] "Generic (PLEG): container finished" podID="92f9ec95-57c2-4584-b1d2-77c0f05c0af1" containerID="4c1e24d067a19105d1aea61557859bbbc31f1068a20abc92da5bf5f3a02e72f4" exitCode=2 Jan 20 17:20:56 crc kubenswrapper[4558]: I0120 17:20:56.376835 4558 generic.go:334] "Generic (PLEG): container finished" podID="92f9ec95-57c2-4584-b1d2-77c0f05c0af1" containerID="1aaf78f3cefcb2d1ac289920cc3a50d6f83270297ac7eed6e2857df5e21738be" exitCode=0 Jan 20 17:20:56 crc kubenswrapper[4558]: I0120 17:20:56.376862 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"92f9ec95-57c2-4584-b1d2-77c0f05c0af1","Type":"ContainerDied","Data":"6f6bd0c96b12f13b8d49f31c1320d3a730b77689e097da24211645319eac3e5b"} Jan 20 17:20:56 crc kubenswrapper[4558]: I0120 17:20:56.376924 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"92f9ec95-57c2-4584-b1d2-77c0f05c0af1","Type":"ContainerDied","Data":"4c1e24d067a19105d1aea61557859bbbc31f1068a20abc92da5bf5f3a02e72f4"} Jan 20 17:20:56 crc kubenswrapper[4558]: I0120 17:20:56.376937 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"92f9ec95-57c2-4584-b1d2-77c0f05c0af1","Type":"ContainerDied","Data":"1aaf78f3cefcb2d1ac289920cc3a50d6f83270297ac7eed6e2857df5e21738be"} Jan 20 17:20:56 crc kubenswrapper[4558]: I0120 17:20:56.702779 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:20:56 crc kubenswrapper[4558]: I0120 17:20:56.863535 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/92f9ec95-57c2-4584-b1d2-77c0f05c0af1-run-httpd\") pod \"92f9ec95-57c2-4584-b1d2-77c0f05c0af1\" (UID: \"92f9ec95-57c2-4584-b1d2-77c0f05c0af1\") " Jan 20 17:20:56 crc kubenswrapper[4558]: I0120 17:20:56.863876 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/92f9ec95-57c2-4584-b1d2-77c0f05c0af1-log-httpd\") pod \"92f9ec95-57c2-4584-b1d2-77c0f05c0af1\" (UID: \"92f9ec95-57c2-4584-b1d2-77c0f05c0af1\") " Jan 20 17:20:56 crc kubenswrapper[4558]: I0120 17:20:56.864031 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/92f9ec95-57c2-4584-b1d2-77c0f05c0af1-ceilometer-tls-certs\") pod \"92f9ec95-57c2-4584-b1d2-77c0f05c0af1\" (UID: \"92f9ec95-57c2-4584-b1d2-77c0f05c0af1\") " Jan 20 17:20:56 crc kubenswrapper[4558]: I0120 17:20:56.863865 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/92f9ec95-57c2-4584-b1d2-77c0f05c0af1-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "92f9ec95-57c2-4584-b1d2-77c0f05c0af1" (UID: "92f9ec95-57c2-4584-b1d2-77c0f05c0af1"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:20:56 crc kubenswrapper[4558]: I0120 17:20:56.864084 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/92f9ec95-57c2-4584-b1d2-77c0f05c0af1-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "92f9ec95-57c2-4584-b1d2-77c0f05c0af1" (UID: "92f9ec95-57c2-4584-b1d2-77c0f05c0af1"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:20:56 crc kubenswrapper[4558]: I0120 17:20:56.864348 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/92f9ec95-57c2-4584-b1d2-77c0f05c0af1-config-data\") pod \"92f9ec95-57c2-4584-b1d2-77c0f05c0af1\" (UID: \"92f9ec95-57c2-4584-b1d2-77c0f05c0af1\") " Jan 20 17:20:56 crc kubenswrapper[4558]: I0120 17:20:56.864512 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zmms6\" (UniqueName: \"kubernetes.io/projected/92f9ec95-57c2-4584-b1d2-77c0f05c0af1-kube-api-access-zmms6\") pod \"92f9ec95-57c2-4584-b1d2-77c0f05c0af1\" (UID: \"92f9ec95-57c2-4584-b1d2-77c0f05c0af1\") " Jan 20 17:20:56 crc kubenswrapper[4558]: I0120 17:20:56.864637 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/92f9ec95-57c2-4584-b1d2-77c0f05c0af1-combined-ca-bundle\") pod \"92f9ec95-57c2-4584-b1d2-77c0f05c0af1\" (UID: \"92f9ec95-57c2-4584-b1d2-77c0f05c0af1\") " Jan 20 17:20:56 crc kubenswrapper[4558]: I0120 17:20:56.864754 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/92f9ec95-57c2-4584-b1d2-77c0f05c0af1-scripts\") pod \"92f9ec95-57c2-4584-b1d2-77c0f05c0af1\" (UID: \"92f9ec95-57c2-4584-b1d2-77c0f05c0af1\") " Jan 20 17:20:56 crc kubenswrapper[4558]: I0120 17:20:56.864882 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/92f9ec95-57c2-4584-b1d2-77c0f05c0af1-sg-core-conf-yaml\") pod \"92f9ec95-57c2-4584-b1d2-77c0f05c0af1\" (UID: \"92f9ec95-57c2-4584-b1d2-77c0f05c0af1\") " Jan 20 17:20:56 crc kubenswrapper[4558]: I0120 17:20:56.866000 4558 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/92f9ec95-57c2-4584-b1d2-77c0f05c0af1-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:20:56 crc kubenswrapper[4558]: I0120 17:20:56.866074 4558 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/92f9ec95-57c2-4584-b1d2-77c0f05c0af1-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:20:56 crc kubenswrapper[4558]: I0120 17:20:56.871389 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/92f9ec95-57c2-4584-b1d2-77c0f05c0af1-kube-api-access-zmms6" (OuterVolumeSpecName: "kube-api-access-zmms6") pod "92f9ec95-57c2-4584-b1d2-77c0f05c0af1" (UID: "92f9ec95-57c2-4584-b1d2-77c0f05c0af1"). InnerVolumeSpecName "kube-api-access-zmms6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:20:56 crc kubenswrapper[4558]: I0120 17:20:56.871538 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/92f9ec95-57c2-4584-b1d2-77c0f05c0af1-scripts" (OuterVolumeSpecName: "scripts") pod "92f9ec95-57c2-4584-b1d2-77c0f05c0af1" (UID: "92f9ec95-57c2-4584-b1d2-77c0f05c0af1"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:20:56 crc kubenswrapper[4558]: I0120 17:20:56.895644 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/92f9ec95-57c2-4584-b1d2-77c0f05c0af1-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "92f9ec95-57c2-4584-b1d2-77c0f05c0af1" (UID: "92f9ec95-57c2-4584-b1d2-77c0f05c0af1"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:20:56 crc kubenswrapper[4558]: I0120 17:20:56.915504 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/92f9ec95-57c2-4584-b1d2-77c0f05c0af1-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "92f9ec95-57c2-4584-b1d2-77c0f05c0af1" (UID: "92f9ec95-57c2-4584-b1d2-77c0f05c0af1"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:20:56 crc kubenswrapper[4558]: I0120 17:20:56.933480 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/92f9ec95-57c2-4584-b1d2-77c0f05c0af1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "92f9ec95-57c2-4584-b1d2-77c0f05c0af1" (UID: "92f9ec95-57c2-4584-b1d2-77c0f05c0af1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:20:56 crc kubenswrapper[4558]: I0120 17:20:56.952307 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/92f9ec95-57c2-4584-b1d2-77c0f05c0af1-config-data" (OuterVolumeSpecName: "config-data") pod "92f9ec95-57c2-4584-b1d2-77c0f05c0af1" (UID: "92f9ec95-57c2-4584-b1d2-77c0f05c0af1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:20:56 crc kubenswrapper[4558]: I0120 17:20:56.967773 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/92f9ec95-57c2-4584-b1d2-77c0f05c0af1-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:20:56 crc kubenswrapper[4558]: I0120 17:20:56.967806 4558 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/92f9ec95-57c2-4584-b1d2-77c0f05c0af1-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 20 17:20:56 crc kubenswrapper[4558]: I0120 17:20:56.967818 4558 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/92f9ec95-57c2-4584-b1d2-77c0f05c0af1-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:20:56 crc kubenswrapper[4558]: I0120 17:20:56.967831 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/92f9ec95-57c2-4584-b1d2-77c0f05c0af1-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:20:56 crc kubenswrapper[4558]: I0120 17:20:56.967843 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zmms6\" (UniqueName: \"kubernetes.io/projected/92f9ec95-57c2-4584-b1d2-77c0f05c0af1-kube-api-access-zmms6\") on node \"crc\" DevicePath \"\"" Jan 20 17:20:56 crc kubenswrapper[4558]: I0120 17:20:56.967854 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/92f9ec95-57c2-4584-b1d2-77c0f05c0af1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:20:57 crc kubenswrapper[4558]: I0120 17:20:57.392017 4558 generic.go:334] "Generic (PLEG): container finished" podID="92f9ec95-57c2-4584-b1d2-77c0f05c0af1" containerID="272675fe6d3890f5144654af1035e7e1268b5268b69af0ce91aa57817dae1595" exitCode=0 Jan 20 17:20:57 crc kubenswrapper[4558]: I0120 17:20:57.392077 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"92f9ec95-57c2-4584-b1d2-77c0f05c0af1","Type":"ContainerDied","Data":"272675fe6d3890f5144654af1035e7e1268b5268b69af0ce91aa57817dae1595"} Jan 20 17:20:57 crc kubenswrapper[4558]: I0120 17:20:57.392106 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:20:57 crc kubenswrapper[4558]: I0120 17:20:57.392123 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"92f9ec95-57c2-4584-b1d2-77c0f05c0af1","Type":"ContainerDied","Data":"921e6b3c70578c3c232ed96b123dbf031f9535812ffbefffdf09f9007f32303e"} Jan 20 17:20:57 crc kubenswrapper[4558]: I0120 17:20:57.392145 4558 scope.go:117] "RemoveContainer" containerID="6f6bd0c96b12f13b8d49f31c1320d3a730b77689e097da24211645319eac3e5b" Jan 20 17:20:57 crc kubenswrapper[4558]: I0120 17:20:57.416955 4558 scope.go:117] "RemoveContainer" containerID="4c1e24d067a19105d1aea61557859bbbc31f1068a20abc92da5bf5f3a02e72f4" Jan 20 17:20:57 crc kubenswrapper[4558]: I0120 17:20:57.425136 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:20:57 crc kubenswrapper[4558]: I0120 17:20:57.439147 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:20:57 crc kubenswrapper[4558]: I0120 17:20:57.440470 4558 scope.go:117] "RemoveContainer" containerID="1aaf78f3cefcb2d1ac289920cc3a50d6f83270297ac7eed6e2857df5e21738be" Jan 20 17:20:57 crc kubenswrapper[4558]: I0120 17:20:57.452311 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:20:57 crc kubenswrapper[4558]: E0120 17:20:57.452825 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="92f9ec95-57c2-4584-b1d2-77c0f05c0af1" containerName="proxy-httpd" Jan 20 17:20:57 crc kubenswrapper[4558]: I0120 17:20:57.452847 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="92f9ec95-57c2-4584-b1d2-77c0f05c0af1" containerName="proxy-httpd" Jan 20 17:20:57 crc kubenswrapper[4558]: E0120 17:20:57.452871 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="92f9ec95-57c2-4584-b1d2-77c0f05c0af1" containerName="sg-core" Jan 20 17:20:57 crc kubenswrapper[4558]: I0120 17:20:57.452877 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="92f9ec95-57c2-4584-b1d2-77c0f05c0af1" containerName="sg-core" Jan 20 17:20:57 crc kubenswrapper[4558]: E0120 17:20:57.452890 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="92f9ec95-57c2-4584-b1d2-77c0f05c0af1" containerName="ceilometer-central-agent" Jan 20 17:20:57 crc kubenswrapper[4558]: I0120 17:20:57.452895 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="92f9ec95-57c2-4584-b1d2-77c0f05c0af1" containerName="ceilometer-central-agent" Jan 20 17:20:57 crc kubenswrapper[4558]: E0120 17:20:57.452920 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="92f9ec95-57c2-4584-b1d2-77c0f05c0af1" containerName="ceilometer-notification-agent" Jan 20 17:20:57 crc kubenswrapper[4558]: I0120 17:20:57.452926 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="92f9ec95-57c2-4584-b1d2-77c0f05c0af1" containerName="ceilometer-notification-agent" Jan 20 17:20:57 crc kubenswrapper[4558]: I0120 17:20:57.453127 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="92f9ec95-57c2-4584-b1d2-77c0f05c0af1" containerName="sg-core" Jan 20 17:20:57 crc kubenswrapper[4558]: I0120 17:20:57.453148 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="92f9ec95-57c2-4584-b1d2-77c0f05c0af1" containerName="ceilometer-central-agent" Jan 20 17:20:57 crc kubenswrapper[4558]: I0120 17:20:57.453155 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="92f9ec95-57c2-4584-b1d2-77c0f05c0af1" containerName="ceilometer-notification-agent" Jan 20 17:20:57 crc kubenswrapper[4558]: I0120 17:20:57.453187 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="92f9ec95-57c2-4584-b1d2-77c0f05c0af1" containerName="proxy-httpd" Jan 20 17:20:57 crc kubenswrapper[4558]: I0120 17:20:57.454922 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:20:57 crc kubenswrapper[4558]: I0120 17:20:57.457213 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-scripts" Jan 20 17:20:57 crc kubenswrapper[4558]: I0120 17:20:57.457315 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-config-data" Jan 20 17:20:57 crc kubenswrapper[4558]: I0120 17:20:57.459358 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-ceilometer-internal-svc" Jan 20 17:20:57 crc kubenswrapper[4558]: I0120 17:20:57.460513 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:20:57 crc kubenswrapper[4558]: I0120 17:20:57.461796 4558 scope.go:117] "RemoveContainer" containerID="272675fe6d3890f5144654af1035e7e1268b5268b69af0ce91aa57817dae1595" Jan 20 17:20:57 crc kubenswrapper[4558]: I0120 17:20:57.486038 4558 scope.go:117] "RemoveContainer" containerID="6f6bd0c96b12f13b8d49f31c1320d3a730b77689e097da24211645319eac3e5b" Jan 20 17:20:57 crc kubenswrapper[4558]: E0120 17:20:57.486470 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6f6bd0c96b12f13b8d49f31c1320d3a730b77689e097da24211645319eac3e5b\": container with ID starting with 6f6bd0c96b12f13b8d49f31c1320d3a730b77689e097da24211645319eac3e5b not found: ID does not exist" containerID="6f6bd0c96b12f13b8d49f31c1320d3a730b77689e097da24211645319eac3e5b" Jan 20 17:20:57 crc kubenswrapper[4558]: I0120 17:20:57.486502 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6f6bd0c96b12f13b8d49f31c1320d3a730b77689e097da24211645319eac3e5b"} err="failed to get container status \"6f6bd0c96b12f13b8d49f31c1320d3a730b77689e097da24211645319eac3e5b\": rpc error: code = NotFound desc = could not find container \"6f6bd0c96b12f13b8d49f31c1320d3a730b77689e097da24211645319eac3e5b\": container with ID starting with 6f6bd0c96b12f13b8d49f31c1320d3a730b77689e097da24211645319eac3e5b not found: ID does not exist" Jan 20 17:20:57 crc kubenswrapper[4558]: I0120 17:20:57.486525 4558 scope.go:117] "RemoveContainer" containerID="4c1e24d067a19105d1aea61557859bbbc31f1068a20abc92da5bf5f3a02e72f4" Jan 20 17:20:57 crc kubenswrapper[4558]: E0120 17:20:57.486783 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4c1e24d067a19105d1aea61557859bbbc31f1068a20abc92da5bf5f3a02e72f4\": container with ID starting with 4c1e24d067a19105d1aea61557859bbbc31f1068a20abc92da5bf5f3a02e72f4 not found: ID does not exist" containerID="4c1e24d067a19105d1aea61557859bbbc31f1068a20abc92da5bf5f3a02e72f4" Jan 20 17:20:57 crc kubenswrapper[4558]: I0120 17:20:57.486816 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4c1e24d067a19105d1aea61557859bbbc31f1068a20abc92da5bf5f3a02e72f4"} err="failed to get container status \"4c1e24d067a19105d1aea61557859bbbc31f1068a20abc92da5bf5f3a02e72f4\": rpc error: code = NotFound desc = could not find container \"4c1e24d067a19105d1aea61557859bbbc31f1068a20abc92da5bf5f3a02e72f4\": container with ID starting with 4c1e24d067a19105d1aea61557859bbbc31f1068a20abc92da5bf5f3a02e72f4 not found: ID does not exist" Jan 20 17:20:57 crc kubenswrapper[4558]: I0120 17:20:57.486837 4558 scope.go:117] "RemoveContainer" containerID="1aaf78f3cefcb2d1ac289920cc3a50d6f83270297ac7eed6e2857df5e21738be" Jan 20 17:20:57 crc kubenswrapper[4558]: E0120 17:20:57.487132 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1aaf78f3cefcb2d1ac289920cc3a50d6f83270297ac7eed6e2857df5e21738be\": container with ID starting with 1aaf78f3cefcb2d1ac289920cc3a50d6f83270297ac7eed6e2857df5e21738be not found: ID does not exist" containerID="1aaf78f3cefcb2d1ac289920cc3a50d6f83270297ac7eed6e2857df5e21738be" Jan 20 17:20:57 crc kubenswrapper[4558]: I0120 17:20:57.487159 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1aaf78f3cefcb2d1ac289920cc3a50d6f83270297ac7eed6e2857df5e21738be"} err="failed to get container status \"1aaf78f3cefcb2d1ac289920cc3a50d6f83270297ac7eed6e2857df5e21738be\": rpc error: code = NotFound desc = could not find container \"1aaf78f3cefcb2d1ac289920cc3a50d6f83270297ac7eed6e2857df5e21738be\": container with ID starting with 1aaf78f3cefcb2d1ac289920cc3a50d6f83270297ac7eed6e2857df5e21738be not found: ID does not exist" Jan 20 17:20:57 crc kubenswrapper[4558]: I0120 17:20:57.487185 4558 scope.go:117] "RemoveContainer" containerID="272675fe6d3890f5144654af1035e7e1268b5268b69af0ce91aa57817dae1595" Jan 20 17:20:57 crc kubenswrapper[4558]: E0120 17:20:57.487416 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"272675fe6d3890f5144654af1035e7e1268b5268b69af0ce91aa57817dae1595\": container with ID starting with 272675fe6d3890f5144654af1035e7e1268b5268b69af0ce91aa57817dae1595 not found: ID does not exist" containerID="272675fe6d3890f5144654af1035e7e1268b5268b69af0ce91aa57817dae1595" Jan 20 17:20:57 crc kubenswrapper[4558]: I0120 17:20:57.487436 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"272675fe6d3890f5144654af1035e7e1268b5268b69af0ce91aa57817dae1595"} err="failed to get container status \"272675fe6d3890f5144654af1035e7e1268b5268b69af0ce91aa57817dae1595\": rpc error: code = NotFound desc = could not find container \"272675fe6d3890f5144654af1035e7e1268b5268b69af0ce91aa57817dae1595\": container with ID starting with 272675fe6d3890f5144654af1035e7e1268b5268b69af0ce91aa57817dae1595 not found: ID does not exist" Jan 20 17:20:57 crc kubenswrapper[4558]: I0120 17:20:57.588812 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0378e8e5-48af-48c3-9100-80622a77533b-log-httpd\") pod \"ceilometer-0\" (UID: \"0378e8e5-48af-48c3-9100-80622a77533b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:20:57 crc kubenswrapper[4558]: I0120 17:20:57.588858 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0378e8e5-48af-48c3-9100-80622a77533b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"0378e8e5-48af-48c3-9100-80622a77533b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:20:57 crc kubenswrapper[4558]: I0120 17:20:57.588884 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0378e8e5-48af-48c3-9100-80622a77533b-scripts\") pod \"ceilometer-0\" (UID: \"0378e8e5-48af-48c3-9100-80622a77533b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:20:57 crc kubenswrapper[4558]: I0120 17:20:57.588908 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/0378e8e5-48af-48c3-9100-80622a77533b-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"0378e8e5-48af-48c3-9100-80622a77533b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:20:57 crc kubenswrapper[4558]: I0120 17:20:57.589237 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0378e8e5-48af-48c3-9100-80622a77533b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"0378e8e5-48af-48c3-9100-80622a77533b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:20:57 crc kubenswrapper[4558]: I0120 17:20:57.589741 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pppjg\" (UniqueName: \"kubernetes.io/projected/0378e8e5-48af-48c3-9100-80622a77533b-kube-api-access-pppjg\") pod \"ceilometer-0\" (UID: \"0378e8e5-48af-48c3-9100-80622a77533b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:20:57 crc kubenswrapper[4558]: I0120 17:20:57.590063 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0378e8e5-48af-48c3-9100-80622a77533b-run-httpd\") pod \"ceilometer-0\" (UID: \"0378e8e5-48af-48c3-9100-80622a77533b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:20:57 crc kubenswrapper[4558]: I0120 17:20:57.590183 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0378e8e5-48af-48c3-9100-80622a77533b-config-data\") pod \"ceilometer-0\" (UID: \"0378e8e5-48af-48c3-9100-80622a77533b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:20:57 crc kubenswrapper[4558]: I0120 17:20:57.691488 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pppjg\" (UniqueName: \"kubernetes.io/projected/0378e8e5-48af-48c3-9100-80622a77533b-kube-api-access-pppjg\") pod \"ceilometer-0\" (UID: \"0378e8e5-48af-48c3-9100-80622a77533b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:20:57 crc kubenswrapper[4558]: I0120 17:20:57.691613 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0378e8e5-48af-48c3-9100-80622a77533b-run-httpd\") pod \"ceilometer-0\" (UID: \"0378e8e5-48af-48c3-9100-80622a77533b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:20:57 crc kubenswrapper[4558]: I0120 17:20:57.691646 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0378e8e5-48af-48c3-9100-80622a77533b-config-data\") pod \"ceilometer-0\" (UID: \"0378e8e5-48af-48c3-9100-80622a77533b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:20:57 crc kubenswrapper[4558]: I0120 17:20:57.691679 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0378e8e5-48af-48c3-9100-80622a77533b-log-httpd\") pod \"ceilometer-0\" (UID: \"0378e8e5-48af-48c3-9100-80622a77533b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:20:57 crc kubenswrapper[4558]: I0120 17:20:57.691698 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0378e8e5-48af-48c3-9100-80622a77533b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"0378e8e5-48af-48c3-9100-80622a77533b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:20:57 crc kubenswrapper[4558]: I0120 17:20:57.691716 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0378e8e5-48af-48c3-9100-80622a77533b-scripts\") pod \"ceilometer-0\" (UID: \"0378e8e5-48af-48c3-9100-80622a77533b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:20:57 crc kubenswrapper[4558]: I0120 17:20:57.691745 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/0378e8e5-48af-48c3-9100-80622a77533b-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"0378e8e5-48af-48c3-9100-80622a77533b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:20:57 crc kubenswrapper[4558]: I0120 17:20:57.691784 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0378e8e5-48af-48c3-9100-80622a77533b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"0378e8e5-48af-48c3-9100-80622a77533b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:20:57 crc kubenswrapper[4558]: I0120 17:20:57.692596 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0378e8e5-48af-48c3-9100-80622a77533b-log-httpd\") pod \"ceilometer-0\" (UID: \"0378e8e5-48af-48c3-9100-80622a77533b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:20:57 crc kubenswrapper[4558]: I0120 17:20:57.692953 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0378e8e5-48af-48c3-9100-80622a77533b-run-httpd\") pod \"ceilometer-0\" (UID: \"0378e8e5-48af-48c3-9100-80622a77533b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:20:57 crc kubenswrapper[4558]: I0120 17:20:57.701080 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/0378e8e5-48af-48c3-9100-80622a77533b-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"0378e8e5-48af-48c3-9100-80622a77533b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:20:57 crc kubenswrapper[4558]: I0120 17:20:57.703376 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0378e8e5-48af-48c3-9100-80622a77533b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"0378e8e5-48af-48c3-9100-80622a77533b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:20:57 crc kubenswrapper[4558]: I0120 17:20:57.704450 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0378e8e5-48af-48c3-9100-80622a77533b-config-data\") pod \"ceilometer-0\" (UID: \"0378e8e5-48af-48c3-9100-80622a77533b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:20:57 crc kubenswrapper[4558]: I0120 17:20:57.708932 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0378e8e5-48af-48c3-9100-80622a77533b-scripts\") pod \"ceilometer-0\" (UID: \"0378e8e5-48af-48c3-9100-80622a77533b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:20:57 crc kubenswrapper[4558]: I0120 17:20:57.710358 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pppjg\" (UniqueName: \"kubernetes.io/projected/0378e8e5-48af-48c3-9100-80622a77533b-kube-api-access-pppjg\") pod \"ceilometer-0\" (UID: \"0378e8e5-48af-48c3-9100-80622a77533b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:20:57 crc kubenswrapper[4558]: I0120 17:20:57.720008 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0378e8e5-48af-48c3-9100-80622a77533b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"0378e8e5-48af-48c3-9100-80622a77533b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:20:57 crc kubenswrapper[4558]: I0120 17:20:57.773528 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:20:58 crc kubenswrapper[4558]: W0120 17:20:58.185473 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0378e8e5_48af_48c3_9100_80622a77533b.slice/crio-8242e34be97c8c6bad622b1f540b04254e03ba46c6ececd1dffb4c8d13ea36e3 WatchSource:0}: Error finding container 8242e34be97c8c6bad622b1f540b04254e03ba46c6ececd1dffb4c8d13ea36e3: Status 404 returned error can't find the container with id 8242e34be97c8c6bad622b1f540b04254e03ba46c6ececd1dffb4c8d13ea36e3 Jan 20 17:20:58 crc kubenswrapper[4558]: I0120 17:20:58.190376 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:20:58 crc kubenswrapper[4558]: I0120 17:20:58.404808 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"0378e8e5-48af-48c3-9100-80622a77533b","Type":"ContainerStarted","Data":"8242e34be97c8c6bad622b1f540b04254e03ba46c6ececd1dffb4c8d13ea36e3"} Jan 20 17:20:58 crc kubenswrapper[4558]: I0120 17:20:58.586405 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="92f9ec95-57c2-4584-b1d2-77c0f05c0af1" path="/var/lib/kubelet/pods/92f9ec95-57c2-4584-b1d2-77c0f05c0af1/volumes" Jan 20 17:20:59 crc kubenswrapper[4558]: I0120 17:20:59.422265 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"0378e8e5-48af-48c3-9100-80622a77533b","Type":"ContainerStarted","Data":"6afebd361ef0e45cdb43a62661e5fbd5a98fd241267936b37385d0bfde77a3bd"} Jan 20 17:21:00 crc kubenswrapper[4558]: I0120 17:21:00.437421 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"0378e8e5-48af-48c3-9100-80622a77533b","Type":"ContainerStarted","Data":"253959d11bdee04e0c4129162924f6cddfeb4ed19ae0ee14fb63f2b31dd7810a"} Jan 20 17:21:00 crc kubenswrapper[4558]: I0120 17:21:00.566145 4558 scope.go:117] "RemoveContainer" containerID="0ffce44366a8b4466427b682fb41640425366a0f4449210959148de9aef695c6" Jan 20 17:21:00 crc kubenswrapper[4558]: E0120 17:21:00.566719 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 17:21:01 crc kubenswrapper[4558]: I0120 17:21:01.451929 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"0378e8e5-48af-48c3-9100-80622a77533b","Type":"ContainerStarted","Data":"75449ae33552c994c4ad5b2bca2b18138daeaa442df1920c73602222ea16d437"} Jan 20 17:21:01 crc kubenswrapper[4558]: I0120 17:21:01.658716 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:21:01 crc kubenswrapper[4558]: I0120 17:21:01.658776 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:21:02 crc kubenswrapper[4558]: I0120 17:21:02.467110 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"0378e8e5-48af-48c3-9100-80622a77533b","Type":"ContainerStarted","Data":"d207a7762509b3d4788233142ef6435d9d2416a2d5fceb734f0d9d4e3721e67e"} Jan 20 17:21:02 crc kubenswrapper[4558]: I0120 17:21:02.468574 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:21:02 crc kubenswrapper[4558]: I0120 17:21:02.495044 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ceilometer-0" podStartSLOduration=1.854061315 podStartE2EDuration="5.49502586s" podCreationTimestamp="2026-01-20 17:20:57 +0000 UTC" firstStartedPulling="2026-01-20 17:20:58.18925021 +0000 UTC m=+2351.949588177" lastFinishedPulling="2026-01-20 17:21:01.830214755 +0000 UTC m=+2355.590552722" observedRunningTime="2026-01-20 17:21:02.493471817 +0000 UTC m=+2356.253809785" watchObservedRunningTime="2026-01-20 17:21:02.49502586 +0000 UTC m=+2356.255363828" Jan 20 17:21:02 crc kubenswrapper[4558]: I0120 17:21:02.676357 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-api-0" podUID="533cb8e0-3be7-4eb2-b0ce-bdebf3973a0b" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.214:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 20 17:21:02 crc kubenswrapper[4558]: I0120 17:21:02.676435 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-api-0" podUID="533cb8e0-3be7-4eb2-b0ce-bdebf3973a0b" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.214:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 20 17:21:06 crc kubenswrapper[4558]: I0120 17:21:06.051255 4558 scope.go:117] "RemoveContainer" containerID="0a0eaadfee5356b39912e205ecdbed7a048b984f1c3c340a311f0c6d29670423" Jan 20 17:21:06 crc kubenswrapper[4558]: I0120 17:21:06.094825 4558 scope.go:117] "RemoveContainer" containerID="883394b0eba68b6b185ae8ea2cf0e2b62ea0e3012167ae18ee5ac40175a88877" Jan 20 17:21:06 crc kubenswrapper[4558]: I0120 17:21:06.117580 4558 scope.go:117] "RemoveContainer" containerID="b814fd8702534f35668ee31f88682bb9c76df8f8dfbcd5d7d7c5cbda3ab01f91" Jan 20 17:21:06 crc kubenswrapper[4558]: I0120 17:21:06.172482 4558 scope.go:117] "RemoveContainer" containerID="2b7ec2d7ccc9dca2793c497e669041069a2d75e8c81cc8deccc7613b50550351" Jan 20 17:21:06 crc kubenswrapper[4558]: I0120 17:21:06.191729 4558 scope.go:117] "RemoveContainer" containerID="f993c3851cad4942bd43255e4b46e389ed11dd15b246b851874c551543ffd036" Jan 20 17:21:06 crc kubenswrapper[4558]: I0120 17:21:06.231914 4558 scope.go:117] "RemoveContainer" containerID="9b22e669bf253aa92c22917982ce64b58f8825c76606560a2e22b75e74428371" Jan 20 17:21:06 crc kubenswrapper[4558]: I0120 17:21:06.274813 4558 scope.go:117] "RemoveContainer" containerID="2922ddc4410807395574c4bcc5718acbb05a0ff484308d0ea59126f2251e8de3" Jan 20 17:21:06 crc kubenswrapper[4558]: I0120 17:21:06.313685 4558 scope.go:117] "RemoveContainer" containerID="c509ef0942ef4f61cb8995d82ebb71f9cf485dd51e17ff30d688714ae3d81de1" Jan 20 17:21:06 crc kubenswrapper[4558]: I0120 17:21:06.330763 4558 scope.go:117] "RemoveContainer" containerID="34592e4c65b898d25a46f138f47e5ebed319feee771e779589feba4abd2545b5" Jan 20 17:21:06 crc kubenswrapper[4558]: I0120 17:21:06.363578 4558 scope.go:117] "RemoveContainer" containerID="a5660a8652ca704957d7bced65ab7d2377b377fe46ad27b55d4e4ae2ccdc42e9" Jan 20 17:21:11 crc kubenswrapper[4558]: I0120 17:21:11.566299 4558 scope.go:117] "RemoveContainer" containerID="0ffce44366a8b4466427b682fb41640425366a0f4449210959148de9aef695c6" Jan 20 17:21:11 crc kubenswrapper[4558]: E0120 17:21:11.567175 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 17:21:11 crc kubenswrapper[4558]: I0120 17:21:11.665814 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:21:11 crc kubenswrapper[4558]: I0120 17:21:11.666337 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:21:11 crc kubenswrapper[4558]: I0120 17:21:11.666958 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:21:11 crc kubenswrapper[4558]: I0120 17:21:11.670466 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:21:12 crc kubenswrapper[4558]: I0120 17:21:12.575390 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:21:12 crc kubenswrapper[4558]: I0120 17:21:12.575442 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:21:23 crc kubenswrapper[4558]: I0120 17:21:23.567245 4558 scope.go:117] "RemoveContainer" containerID="0ffce44366a8b4466427b682fb41640425366a0f4449210959148de9aef695c6" Jan 20 17:21:23 crc kubenswrapper[4558]: E0120 17:21:23.568271 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 17:21:27 crc kubenswrapper[4558]: I0120 17:21:27.781433 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:21:31 crc kubenswrapper[4558]: I0120 17:21:31.242898 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/rabbitmq-cell1-server-0"] Jan 20 17:21:31 crc kubenswrapper[4558]: I0120 17:21:31.353397 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/rabbitmq-cell1-server-2"] Jan 20 17:21:31 crc kubenswrapper[4558]: E0120 17:21:31.367077 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Jan 20 17:21:31 crc kubenswrapper[4558]: E0120 17:21:31.367143 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/6dae93de-b907-44f7-a94c-c691eee0af7f-config-data podName:6dae93de-b907-44f7-a94c-c691eee0af7f nodeName:}" failed. No retries permitted until 2026-01-20 17:21:31.867124209 +0000 UTC m=+2385.627462176 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/6dae93de-b907-44f7-a94c-c691eee0af7f-config-data") pod "rabbitmq-cell1-server-0" (UID: "6dae93de-b907-44f7-a94c-c691eee0af7f") : configmap "rabbitmq-cell1-config-data" not found Jan 20 17:21:31 crc kubenswrapper[4558]: I0120 17:21:31.394303 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/openstackclient"] Jan 20 17:21:31 crc kubenswrapper[4558]: I0120 17:21:31.394581 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/openstackclient" podUID="7490595f-9782-44f0-bfb1-811f2f1d65e3" containerName="openstackclient" containerID="cri-o://077461e7717ab6966108b72a1527abc834132888244fc51cc68cc84642df8e4e" gracePeriod=2 Jan 20 17:21:31 crc kubenswrapper[4558]: I0120 17:21:31.416248 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/openstackclient"] Jan 20 17:21:31 crc kubenswrapper[4558]: I0120 17:21:31.442982 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/rabbitmq-cell1-server-1"] Jan 20 17:21:31 crc kubenswrapper[4558]: E0120 17:21:31.476121 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Jan 20 17:21:31 crc kubenswrapper[4558]: E0120 17:21:31.476193 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/344f9f31-8a81-4544-b782-5aa78dfc5cc2-config-data podName:344f9f31-8a81-4544-b782-5aa78dfc5cc2 nodeName:}" failed. No retries permitted until 2026-01-20 17:21:31.976180711 +0000 UTC m=+2385.736518679 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/344f9f31-8a81-4544-b782-5aa78dfc5cc2-config-data") pod "rabbitmq-cell1-server-2" (UID: "344f9f31-8a81-4544-b782-5aa78dfc5cc2") : configmap "rabbitmq-cell1-config-data" not found Jan 20 17:21:31 crc kubenswrapper[4558]: I0120 17:21:31.528178 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/root-account-create-update-r6qz8"] Jan 20 17:21:31 crc kubenswrapper[4558]: E0120 17:21:31.528522 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7490595f-9782-44f0-bfb1-811f2f1d65e3" containerName="openstackclient" Jan 20 17:21:31 crc kubenswrapper[4558]: I0120 17:21:31.528539 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="7490595f-9782-44f0-bfb1-811f2f1d65e3" containerName="openstackclient" Jan 20 17:21:31 crc kubenswrapper[4558]: I0120 17:21:31.528719 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="7490595f-9782-44f0-bfb1-811f2f1d65e3" containerName="openstackclient" Jan 20 17:21:31 crc kubenswrapper[4558]: I0120 17:21:31.532525 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-r6qz8" Jan 20 17:21:31 crc kubenswrapper[4558]: I0120 17:21:31.537407 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"openstack-mariadb-root-db-secret" Jan 20 17:21:31 crc kubenswrapper[4558]: I0120 17:21:31.552310 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-r6qz8"] Jan 20 17:21:31 crc kubenswrapper[4558]: I0120 17:21:31.580255 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/rabbitmq-server-2"] Jan 20 17:21:31 crc kubenswrapper[4558]: E0120 17:21:31.582685 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Jan 20 17:21:31 crc kubenswrapper[4558]: E0120 17:21:31.582750 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/8573a632-84e0-4f80-b811-5646b571c318-config-data podName:8573a632-84e0-4f80-b811-5646b571c318 nodeName:}" failed. No retries permitted until 2026-01-20 17:21:32.082734106 +0000 UTC m=+2385.843072073 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/8573a632-84e0-4f80-b811-5646b571c318-config-data") pod "rabbitmq-cell1-server-1" (UID: "8573a632-84e0-4f80-b811-5646b571c318") : configmap "rabbitmq-cell1-config-data" not found Jan 20 17:21:31 crc kubenswrapper[4558]: I0120 17:21:31.605382 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/rabbitmq-server-0"] Jan 20 17:21:31 crc kubenswrapper[4558]: I0120 17:21:31.629353 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/rabbitmq-server-1"] Jan 20 17:21:31 crc kubenswrapper[4558]: E0120 17:21:31.637993 4558 desired_state_of_world_populator.go:312] "Error processing volume" err="error processing PVC openstack-kuttl-tests/mysql-db-openstack-galera-1: PVC is being deleted" pod="openstack-kuttl-tests/openstack-galera-1" volumeName="mysql-db" Jan 20 17:21:31 crc kubenswrapper[4558]: I0120 17:21:31.667068 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/glance-36de-account-create-update-n4ql4"] Jan 20 17:21:31 crc kubenswrapper[4558]: I0120 17:21:31.668705 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-36de-account-create-update-n4ql4" Jan 20 17:21:31 crc kubenswrapper[4558]: I0120 17:21:31.672356 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-db-secret" Jan 20 17:21:31 crc kubenswrapper[4558]: I0120 17:21:31.677775 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-36de-account-create-update-n4ql4"] Jan 20 17:21:31 crc kubenswrapper[4558]: I0120 17:21:31.684111 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k9xdl\" (UniqueName: \"kubernetes.io/projected/4eb93277-301f-486e-a1e9-f323f9bd9cb4-kube-api-access-k9xdl\") pod \"root-account-create-update-r6qz8\" (UID: \"4eb93277-301f-486e-a1e9-f323f9bd9cb4\") " pod="openstack-kuttl-tests/root-account-create-update-r6qz8" Jan 20 17:21:31 crc kubenswrapper[4558]: I0120 17:21:31.684304 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4eb93277-301f-486e-a1e9-f323f9bd9cb4-operator-scripts\") pod \"root-account-create-update-r6qz8\" (UID: \"4eb93277-301f-486e-a1e9-f323f9bd9cb4\") " pod="openstack-kuttl-tests/root-account-create-update-r6qz8" Jan 20 17:21:31 crc kubenswrapper[4558]: E0120 17:21:31.685189 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Jan 20 17:21:31 crc kubenswrapper[4558]: E0120 17:21:31.685216 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Jan 20 17:21:31 crc kubenswrapper[4558]: E0120 17:21:31.685229 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/9c52efaf-b737-47bf-9ca1-109a28e19113-config-data podName:9c52efaf-b737-47bf-9ca1-109a28e19113 nodeName:}" failed. No retries permitted until 2026-01-20 17:21:32.185215972 +0000 UTC m=+2385.945553939 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/9c52efaf-b737-47bf-9ca1-109a28e19113-config-data") pod "rabbitmq-server-2" (UID: "9c52efaf-b737-47bf-9ca1-109a28e19113") : configmap "rabbitmq-config-data" not found Jan 20 17:21:31 crc kubenswrapper[4558]: E0120 17:21:31.685265 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/2c809052-d9bb-4982-8271-5b7a9a6f28f9-config-data podName:2c809052-d9bb-4982-8271-5b7a9a6f28f9 nodeName:}" failed. No retries permitted until 2026-01-20 17:21:32.185250176 +0000 UTC m=+2385.945588133 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/2c809052-d9bb-4982-8271-5b7a9a6f28f9-config-data") pod "rabbitmq-server-1" (UID: "2c809052-d9bb-4982-8271-5b7a9a6f28f9") : configmap "rabbitmq-config-data" not found Jan 20 17:21:31 crc kubenswrapper[4558]: E0120 17:21:31.689277 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Jan 20 17:21:31 crc kubenswrapper[4558]: E0120 17:21:31.689341 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/24114ddb-3b30-42ac-9d61-cfeb15d58728-config-data podName:24114ddb-3b30-42ac-9d61-cfeb15d58728 nodeName:}" failed. No retries permitted until 2026-01-20 17:21:32.189325291 +0000 UTC m=+2385.949663258 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/24114ddb-3b30-42ac-9d61-cfeb15d58728-config-data") pod "rabbitmq-server-0" (UID: "24114ddb-3b30-42ac-9d61-cfeb15d58728") : configmap "rabbitmq-config-data" not found Jan 20 17:21:31 crc kubenswrapper[4558]: I0120 17:21:31.710048 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-nb-0"] Jan 20 17:21:31 crc kubenswrapper[4558]: I0120 17:21:31.710752 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ovsdbserver-nb-0" podUID="0ccdb20e-7467-40c5-be6c-62102e9ad6bd" containerName="openstack-network-exporter" containerID="cri-o://0c6c50e15f6ca65f62124e7e4fd437de49116ef77e49acb8744a70d8b87c4220" gracePeriod=300 Jan 20 17:21:31 crc kubenswrapper[4558]: I0120 17:21:31.745695 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-1d9a-account-create-update-7hqxp"] Jan 20 17:21:31 crc kubenswrapper[4558]: I0120 17:21:31.770111 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ovsdbserver-nb-0" podUID="0ccdb20e-7467-40c5-be6c-62102e9ad6bd" containerName="ovsdbserver-nb" containerID="cri-o://f38d913a0ad6428f06323145898acb1a9022803bf80d196bd130931d31c90fab" gracePeriod=300 Jan 20 17:21:31 crc kubenswrapper[4558]: I0120 17:21:31.778424 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/cinder-1d9a-account-create-update-7hqxp"] Jan 20 17:21:31 crc kubenswrapper[4558]: I0120 17:21:31.787403 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8hcpc\" (UniqueName: \"kubernetes.io/projected/52e65b62-cbeb-475e-b3ec-2a6259c44b25-kube-api-access-8hcpc\") pod \"glance-36de-account-create-update-n4ql4\" (UID: \"52e65b62-cbeb-475e-b3ec-2a6259c44b25\") " pod="openstack-kuttl-tests/glance-36de-account-create-update-n4ql4" Jan 20 17:21:31 crc kubenswrapper[4558]: I0120 17:21:31.787442 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k9xdl\" (UniqueName: \"kubernetes.io/projected/4eb93277-301f-486e-a1e9-f323f9bd9cb4-kube-api-access-k9xdl\") pod \"root-account-create-update-r6qz8\" (UID: \"4eb93277-301f-486e-a1e9-f323f9bd9cb4\") " pod="openstack-kuttl-tests/root-account-create-update-r6qz8" Jan 20 17:21:31 crc kubenswrapper[4558]: I0120 17:21:31.787524 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4eb93277-301f-486e-a1e9-f323f9bd9cb4-operator-scripts\") pod \"root-account-create-update-r6qz8\" (UID: \"4eb93277-301f-486e-a1e9-f323f9bd9cb4\") " pod="openstack-kuttl-tests/root-account-create-update-r6qz8" Jan 20 17:21:31 crc kubenswrapper[4558]: I0120 17:21:31.787604 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/52e65b62-cbeb-475e-b3ec-2a6259c44b25-operator-scripts\") pod \"glance-36de-account-create-update-n4ql4\" (UID: \"52e65b62-cbeb-475e-b3ec-2a6259c44b25\") " pod="openstack-kuttl-tests/glance-36de-account-create-update-n4ql4" Jan 20 17:21:31 crc kubenswrapper[4558]: I0120 17:21:31.788520 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4eb93277-301f-486e-a1e9-f323f9bd9cb4-operator-scripts\") pod \"root-account-create-update-r6qz8\" (UID: \"4eb93277-301f-486e-a1e9-f323f9bd9cb4\") " pod="openstack-kuttl-tests/root-account-create-update-r6qz8" Jan 20 17:21:31 crc kubenswrapper[4558]: I0120 17:21:31.789419 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-sb-0"] Jan 20 17:21:31 crc kubenswrapper[4558]: I0120 17:21:31.790342 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ovsdbserver-sb-0" podUID="b2127dbe-7318-41ef-871f-16ee1db22243" containerName="openstack-network-exporter" containerID="cri-o://a92ae70194b9e1d3dcba1794977e0ca18866d630d73b53d8ed2581ac1fe1d3ac" gracePeriod=300 Jan 20 17:21:31 crc kubenswrapper[4558]: I0120 17:21:31.796213 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-12cd-account-create-update-8bwmv"] Jan 20 17:21:31 crc kubenswrapper[4558]: I0120 17:21:31.804867 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell1-12cd-account-create-update-8bwmv"] Jan 20 17:21:31 crc kubenswrapper[4558]: I0120 17:21:31.846378 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/neutron-247a-account-create-update-8drms"] Jan 20 17:21:31 crc kubenswrapper[4558]: I0120 17:21:31.847650 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-247a-account-create-update-8drms" Jan 20 17:21:31 crc kubenswrapper[4558]: I0120 17:21:31.853549 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"neutron-db-secret" Jan 20 17:21:31 crc kubenswrapper[4558]: I0120 17:21:31.853754 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k9xdl\" (UniqueName: \"kubernetes.io/projected/4eb93277-301f-486e-a1e9-f323f9bd9cb4-kube-api-access-k9xdl\") pod \"root-account-create-update-r6qz8\" (UID: \"4eb93277-301f-486e-a1e9-f323f9bd9cb4\") " pod="openstack-kuttl-tests/root-account-create-update-r6qz8" Jan 20 17:21:31 crc kubenswrapper[4558]: I0120 17:21:31.869131 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-r6qz8" Jan 20 17:21:31 crc kubenswrapper[4558]: I0120 17:21:31.877888 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-g65fk"] Jan 20 17:21:31 crc kubenswrapper[4558]: I0120 17:21:31.890449 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8hcpc\" (UniqueName: \"kubernetes.io/projected/52e65b62-cbeb-475e-b3ec-2a6259c44b25-kube-api-access-8hcpc\") pod \"glance-36de-account-create-update-n4ql4\" (UID: \"52e65b62-cbeb-475e-b3ec-2a6259c44b25\") " pod="openstack-kuttl-tests/glance-36de-account-create-update-n4ql4" Jan 20 17:21:31 crc kubenswrapper[4558]: I0120 17:21:31.890696 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6vr6p\" (UniqueName: \"kubernetes.io/projected/25e48519-09af-42b4-835a-485273267aca-kube-api-access-6vr6p\") pod \"neutron-247a-account-create-update-8drms\" (UID: \"25e48519-09af-42b4-835a-485273267aca\") " pod="openstack-kuttl-tests/neutron-247a-account-create-update-8drms" Jan 20 17:21:31 crc kubenswrapper[4558]: I0120 17:21:31.890783 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/52e65b62-cbeb-475e-b3ec-2a6259c44b25-operator-scripts\") pod \"glance-36de-account-create-update-n4ql4\" (UID: \"52e65b62-cbeb-475e-b3ec-2a6259c44b25\") " pod="openstack-kuttl-tests/glance-36de-account-create-update-n4ql4" Jan 20 17:21:31 crc kubenswrapper[4558]: I0120 17:21:31.890837 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/25e48519-09af-42b4-835a-485273267aca-operator-scripts\") pod \"neutron-247a-account-create-update-8drms\" (UID: \"25e48519-09af-42b4-835a-485273267aca\") " pod="openstack-kuttl-tests/neutron-247a-account-create-update-8drms" Jan 20 17:21:31 crc kubenswrapper[4558]: E0120 17:21:31.891465 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Jan 20 17:21:31 crc kubenswrapper[4558]: E0120 17:21:31.891532 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/6dae93de-b907-44f7-a94c-c691eee0af7f-config-data podName:6dae93de-b907-44f7-a94c-c691eee0af7f nodeName:}" failed. No retries permitted until 2026-01-20 17:21:32.891503342 +0000 UTC m=+2386.651841299 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/6dae93de-b907-44f7-a94c-c691eee0af7f-config-data") pod "rabbitmq-cell1-server-0" (UID: "6dae93de-b907-44f7-a94c-c691eee0af7f") : configmap "rabbitmq-cell1-config-data" not found Jan 20 17:21:31 crc kubenswrapper[4558]: I0120 17:21:31.892380 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/52e65b62-cbeb-475e-b3ec-2a6259c44b25-operator-scripts\") pod \"glance-36de-account-create-update-n4ql4\" (UID: \"52e65b62-cbeb-475e-b3ec-2a6259c44b25\") " pod="openstack-kuttl-tests/glance-36de-account-create-update-n4ql4" Jan 20 17:21:31 crc kubenswrapper[4558]: I0120 17:21:31.904141 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-g65fk"] Jan 20 17:21:31 crc kubenswrapper[4558]: I0120 17:21:31.911199 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-247a-account-create-update-8drms"] Jan 20 17:21:31 crc kubenswrapper[4558]: I0120 17:21:31.928150 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8hcpc\" (UniqueName: \"kubernetes.io/projected/52e65b62-cbeb-475e-b3ec-2a6259c44b25-kube-api-access-8hcpc\") pod \"glance-36de-account-create-update-n4ql4\" (UID: \"52e65b62-cbeb-475e-b3ec-2a6259c44b25\") " pod="openstack-kuttl-tests/glance-36de-account-create-update-n4ql4" Jan 20 17:21:31 crc kubenswrapper[4558]: I0120 17:21:31.951199 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ovsdbserver-sb-0" podUID="b2127dbe-7318-41ef-871f-16ee1db22243" containerName="ovsdbserver-sb" containerID="cri-o://ba8bdba5d5e7c4a36c71eb8170fbd87c43bae0b18995a96e5c7e162d103c69d1" gracePeriod=300 Jan 20 17:21:31 crc kubenswrapper[4558]: I0120 17:21:31.989974 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-36de-account-create-update-n4ql4" Jan 20 17:21:31 crc kubenswrapper[4558]: I0120 17:21:31.992955 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6vr6p\" (UniqueName: \"kubernetes.io/projected/25e48519-09af-42b4-835a-485273267aca-kube-api-access-6vr6p\") pod \"neutron-247a-account-create-update-8drms\" (UID: \"25e48519-09af-42b4-835a-485273267aca\") " pod="openstack-kuttl-tests/neutron-247a-account-create-update-8drms" Jan 20 17:21:31 crc kubenswrapper[4558]: I0120 17:21:31.993074 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/25e48519-09af-42b4-835a-485273267aca-operator-scripts\") pod \"neutron-247a-account-create-update-8drms\" (UID: \"25e48519-09af-42b4-835a-485273267aca\") " pod="openstack-kuttl-tests/neutron-247a-account-create-update-8drms" Jan 20 17:21:31 crc kubenswrapper[4558]: E0120 17:21:31.993331 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Jan 20 17:21:31 crc kubenswrapper[4558]: E0120 17:21:31.993402 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/344f9f31-8a81-4544-b782-5aa78dfc5cc2-config-data podName:344f9f31-8a81-4544-b782-5aa78dfc5cc2 nodeName:}" failed. No retries permitted until 2026-01-20 17:21:32.993387734 +0000 UTC m=+2386.753725701 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/344f9f31-8a81-4544-b782-5aa78dfc5cc2-config-data") pod "rabbitmq-cell1-server-2" (UID: "344f9f31-8a81-4544-b782-5aa78dfc5cc2") : configmap "rabbitmq-cell1-config-data" not found Jan 20 17:21:31 crc kubenswrapper[4558]: I0120 17:21:31.994177 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/25e48519-09af-42b4-835a-485273267aca-operator-scripts\") pod \"neutron-247a-account-create-update-8drms\" (UID: \"25e48519-09af-42b4-835a-485273267aca\") " pod="openstack-kuttl-tests/neutron-247a-account-create-update-8drms" Jan 20 17:21:32 crc kubenswrapper[4558]: I0120 17:21:32.009682 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6vr6p\" (UniqueName: \"kubernetes.io/projected/25e48519-09af-42b4-835a-485273267aca-kube-api-access-6vr6p\") pod \"neutron-247a-account-create-update-8drms\" (UID: \"25e48519-09af-42b4-835a-485273267aca\") " pod="openstack-kuttl-tests/neutron-247a-account-create-update-8drms" Jan 20 17:21:32 crc kubenswrapper[4558]: E0120 17:21:32.036463 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of ba8bdba5d5e7c4a36c71eb8170fbd87c43bae0b18995a96e5c7e162d103c69d1 is running failed: container process not found" containerID="ba8bdba5d5e7c4a36c71eb8170fbd87c43bae0b18995a96e5c7e162d103c69d1" cmd=["/usr/bin/pidof","ovsdb-server"] Jan 20 17:21:32 crc kubenswrapper[4558]: E0120 17:21:32.036884 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of ba8bdba5d5e7c4a36c71eb8170fbd87c43bae0b18995a96e5c7e162d103c69d1 is running failed: container process not found" containerID="ba8bdba5d5e7c4a36c71eb8170fbd87c43bae0b18995a96e5c7e162d103c69d1" cmd=["/usr/bin/pidof","ovsdb-server"] Jan 20 17:21:32 crc kubenswrapper[4558]: E0120 17:21:32.039533 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of ba8bdba5d5e7c4a36c71eb8170fbd87c43bae0b18995a96e5c7e162d103c69d1 is running failed: container process not found" containerID="ba8bdba5d5e7c4a36c71eb8170fbd87c43bae0b18995a96e5c7e162d103c69d1" cmd=["/usr/bin/pidof","ovsdb-server"] Jan 20 17:21:32 crc kubenswrapper[4558]: E0120 17:21:32.039568 4558 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of ba8bdba5d5e7c4a36c71eb8170fbd87c43bae0b18995a96e5c7e162d103c69d1 is running failed: container process not found" probeType="Readiness" pod="openstack-kuttl-tests/ovsdbserver-sb-0" podUID="b2127dbe-7318-41ef-871f-16ee1db22243" containerName="ovsdbserver-sb" Jan 20 17:21:32 crc kubenswrapper[4558]: I0120 17:21:32.048387 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-db-sync-4m92m"] Jan 20 17:21:32 crc kubenswrapper[4558]: I0120 17:21:32.056066 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/cinder-db-sync-4m92m"] Jan 20 17:21:32 crc kubenswrapper[4558]: I0120 17:21:32.071488 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-36de-account-create-update-pf4bc"] Jan 20 17:21:32 crc kubenswrapper[4558]: I0120 17:21:32.084846 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/glance-36de-account-create-update-pf4bc"] Jan 20 17:21:32 crc kubenswrapper[4558]: I0120 17:21:32.093386 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovn-northd-0"] Jan 20 17:21:32 crc kubenswrapper[4558]: I0120 17:21:32.093625 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ovn-northd-0" podUID="88418ec6-c589-463f-b8ec-c12464810c07" containerName="ovn-northd" containerID="cri-o://9c3bcaf0b0b1fe796f0f39a396e91571e66a0d2ff4f33099f19f4d03733e3f3c" gracePeriod=30 Jan 20 17:21:32 crc kubenswrapper[4558]: I0120 17:21:32.094082 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ovn-northd-0" podUID="88418ec6-c589-463f-b8ec-c12464810c07" containerName="openstack-network-exporter" containerID="cri-o://6ace6c8a650ab1df4d1b142fa5877db45863c733c61ee810479ce811a6bf28d2" gracePeriod=30 Jan 20 17:21:32 crc kubenswrapper[4558]: E0120 17:21:32.095064 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Jan 20 17:21:32 crc kubenswrapper[4558]: E0120 17:21:32.095117 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/8573a632-84e0-4f80-b811-5646b571c318-config-data podName:8573a632-84e0-4f80-b811-5646b571c318 nodeName:}" failed. No retries permitted until 2026-01-20 17:21:33.095101614 +0000 UTC m=+2386.855439582 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/8573a632-84e0-4f80-b811-5646b571c318-config-data") pod "rabbitmq-cell1-server-1" (UID: "8573a632-84e0-4f80-b811-5646b571c318") : configmap "rabbitmq-cell1-config-data" not found Jan 20 17:21:32 crc kubenswrapper[4558]: I0120 17:21:32.106998 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/placement-db-sync-dm58n"] Jan 20 17:21:32 crc kubenswrapper[4558]: I0120 17:21:32.141255 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/placement-db-sync-dm58n"] Jan 20 17:21:32 crc kubenswrapper[4558]: I0120 17:21:32.159796 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-db-sync-5vv4z"] Jan 20 17:21:32 crc kubenswrapper[4558]: E0120 17:21:32.197259 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Jan 20 17:21:32 crc kubenswrapper[4558]: E0120 17:21:32.197555 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/2c809052-d9bb-4982-8271-5b7a9a6f28f9-config-data podName:2c809052-d9bb-4982-8271-5b7a9a6f28f9 nodeName:}" failed. No retries permitted until 2026-01-20 17:21:33.197538546 +0000 UTC m=+2386.957876513 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/2c809052-d9bb-4982-8271-5b7a9a6f28f9-config-data") pod "rabbitmq-server-1" (UID: "2c809052-d9bb-4982-8271-5b7a9a6f28f9") : configmap "rabbitmq-config-data" not found Jan 20 17:21:32 crc kubenswrapper[4558]: E0120 17:21:32.197906 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Jan 20 17:21:32 crc kubenswrapper[4558]: E0120 17:21:32.197931 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/9c52efaf-b737-47bf-9ca1-109a28e19113-config-data podName:9c52efaf-b737-47bf-9ca1-109a28e19113 nodeName:}" failed. No retries permitted until 2026-01-20 17:21:33.197922317 +0000 UTC m=+2386.958260285 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/9c52efaf-b737-47bf-9ca1-109a28e19113-config-data") pod "rabbitmq-server-2" (UID: "9c52efaf-b737-47bf-9ca1-109a28e19113") : configmap "rabbitmq-config-data" not found Jan 20 17:21:32 crc kubenswrapper[4558]: E0120 17:21:32.197959 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Jan 20 17:21:32 crc kubenswrapper[4558]: E0120 17:21:32.197976 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/24114ddb-3b30-42ac-9d61-cfeb15d58728-config-data podName:24114ddb-3b30-42ac-9d61-cfeb15d58728 nodeName:}" failed. No retries permitted until 2026-01-20 17:21:33.197969306 +0000 UTC m=+2386.958307273 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/24114ddb-3b30-42ac-9d61-cfeb15d58728-config-data") pod "rabbitmq-server-0" (UID: "24114ddb-3b30-42ac-9d61-cfeb15d58728") : configmap "rabbitmq-config-data" not found Jan 20 17:21:32 crc kubenswrapper[4558]: I0120 17:21:32.198276 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-247a-account-create-update-8drms" Jan 20 17:21:32 crc kubenswrapper[4558]: I0120 17:21:32.202371 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/barbican-db-sync-5vv4z"] Jan 20 17:21:32 crc kubenswrapper[4558]: I0120 17:21:32.314079 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-247a-account-create-update-x7dvx"] Jan 20 17:21:32 crc kubenswrapper[4558]: I0120 17:21:32.337728 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/neutron-247a-account-create-update-x7dvx"] Jan 20 17:21:32 crc kubenswrapper[4558]: I0120 17:21:32.397627 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-cell-mapping-mc7gs"] Jan 20 17:21:32 crc kubenswrapper[4558]: I0120 17:21:32.415042 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell1-cell-mapping-mc7gs"] Jan 20 17:21:32 crc kubenswrapper[4558]: I0120 17:21:32.424457 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell0-cell-mapping-tfgvg"] Jan 20 17:21:32 crc kubenswrapper[4558]: I0120 17:21:32.436237 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell0-cell-mapping-tfgvg"] Jan 20 17:21:32 crc kubenswrapper[4558]: I0120 17:21:32.578373 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0f1083f8-2d2c-4ab0-8382-7e8d14713532" path="/var/lib/kubelet/pods/0f1083f8-2d2c-4ab0-8382-7e8d14713532/volumes" Jan 20 17:21:32 crc kubenswrapper[4558]: I0120 17:21:32.581528 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="10cc58f5-333d-4a3b-af1e-ed0efe498d8d" path="/var/lib/kubelet/pods/10cc58f5-333d-4a3b-af1e-ed0efe498d8d/volumes" Jan 20 17:21:32 crc kubenswrapper[4558]: I0120 17:21:32.582078 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4efe1398-34aa-4835-8c35-790f9ec1d514" path="/var/lib/kubelet/pods/4efe1398-34aa-4835-8c35-790f9ec1d514/volumes" Jan 20 17:21:32 crc kubenswrapper[4558]: E0120 17:21:32.583376 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="9c3bcaf0b0b1fe796f0f39a396e91571e66a0d2ff4f33099f19f4d03733e3f3c" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Jan 20 17:21:32 crc kubenswrapper[4558]: I0120 17:21:32.585515 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="56e762d6-78f3-4e7e-8b7b-2b8057aedbdf" path="/var/lib/kubelet/pods/56e762d6-78f3-4e7e-8b7b-2b8057aedbdf/volumes" Jan 20 17:21:32 crc kubenswrapper[4558]: I0120 17:21:32.586301 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7abf0081-40ee-4a29-b5fe-3a8c4c9f0214" path="/var/lib/kubelet/pods/7abf0081-40ee-4a29-b5fe-3a8c4c9f0214/volumes" Jan 20 17:21:32 crc kubenswrapper[4558]: I0120 17:21:32.586906 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8e497b8c-aac4-4095-a801-65e24590b325" path="/var/lib/kubelet/pods/8e497b8c-aac4-4095-a801-65e24590b325/volumes" Jan 20 17:21:32 crc kubenswrapper[4558]: I0120 17:21:32.587965 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c720d172-36bf-467b-a900-dec2dbc43d3e" path="/var/lib/kubelet/pods/c720d172-36bf-467b-a900-dec2dbc43d3e/volumes" Jan 20 17:21:32 crc kubenswrapper[4558]: I0120 17:21:32.588504 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e2328b3a-4982-4596-a11d-afb6ba3a4915" path="/var/lib/kubelet/pods/e2328b3a-4982-4596-a11d-afb6ba3a4915/volumes" Jan 20 17:21:32 crc kubenswrapper[4558]: I0120 17:21:32.591258 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e2e3a805-bb9e-4e87-b57d-99fb395130bf" path="/var/lib/kubelet/pods/e2e3a805-bb9e-4e87-b57d-99fb395130bf/volumes" Jan 20 17:21:32 crc kubenswrapper[4558]: I0120 17:21:32.591873 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f50f6bbe-7952-4b0d-aa1b-88578330e103" path="/var/lib/kubelet/pods/f50f6bbe-7952-4b0d-aa1b-88578330e103/volumes" Jan 20 17:21:32 crc kubenswrapper[4558]: E0120 17:21:32.592649 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="9c3bcaf0b0b1fe796f0f39a396e91571e66a0d2ff4f33099f19f4d03733e3f3c" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Jan 20 17:21:32 crc kubenswrapper[4558]: E0120 17:21:32.608439 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="9c3bcaf0b0b1fe796f0f39a396e91571e66a0d2ff4f33099f19f4d03733e3f3c" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Jan 20 17:21:32 crc kubenswrapper[4558]: E0120 17:21:32.608492 4558 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack-kuttl-tests/ovn-northd-0" podUID="88418ec6-c589-463f-b8ec-c12464810c07" containerName="ovn-northd" Jan 20 17:21:32 crc kubenswrapper[4558]: E0120 17:21:32.624042 4558 desired_state_of_world_populator.go:312] "Error processing volume" err="error processing PVC openstack-kuttl-tests/mysql-db-openstack-cell1-galera-2: PVC is being deleted" pod="openstack-kuttl-tests/openstack-cell1-galera-2" volumeName="mysql-db" Jan 20 17:21:32 crc kubenswrapper[4558]: I0120 17:21:32.669624 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-db-sync-2cpfk"] Jan 20 17:21:32 crc kubenswrapper[4558]: I0120 17:21:32.678962 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/glance-db-sync-2cpfk"] Jan 20 17:21:32 crc kubenswrapper[4558]: I0120 17:21:32.767512 4558 generic.go:334] "Generic (PLEG): container finished" podID="88418ec6-c589-463f-b8ec-c12464810c07" containerID="6ace6c8a650ab1df4d1b142fa5877db45863c733c61ee810479ce811a6bf28d2" exitCode=2 Jan 20 17:21:32 crc kubenswrapper[4558]: I0120 17:21:32.767640 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-northd-0" event={"ID":"88418ec6-c589-463f-b8ec-c12464810c07","Type":"ContainerDied","Data":"6ace6c8a650ab1df4d1b142fa5877db45863c733c61ee810479ce811a6bf28d2"} Jan 20 17:21:32 crc kubenswrapper[4558]: I0120 17:21:32.771141 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovsdbserver-sb-0_b2127dbe-7318-41ef-871f-16ee1db22243/ovsdbserver-sb/0.log" Jan 20 17:21:32 crc kubenswrapper[4558]: I0120 17:21:32.771200 4558 generic.go:334] "Generic (PLEG): container finished" podID="b2127dbe-7318-41ef-871f-16ee1db22243" containerID="a92ae70194b9e1d3dcba1794977e0ca18866d630d73b53d8ed2581ac1fe1d3ac" exitCode=2 Jan 20 17:21:32 crc kubenswrapper[4558]: I0120 17:21:32.771219 4558 generic.go:334] "Generic (PLEG): container finished" podID="b2127dbe-7318-41ef-871f-16ee1db22243" containerID="ba8bdba5d5e7c4a36c71eb8170fbd87c43bae0b18995a96e5c7e162d103c69d1" exitCode=143 Jan 20 17:21:32 crc kubenswrapper[4558]: I0120 17:21:32.771300 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-sb-0" event={"ID":"b2127dbe-7318-41ef-871f-16ee1db22243","Type":"ContainerDied","Data":"a92ae70194b9e1d3dcba1794977e0ca18866d630d73b53d8ed2581ac1fe1d3ac"} Jan 20 17:21:32 crc kubenswrapper[4558]: I0120 17:21:32.771356 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-sb-0" event={"ID":"b2127dbe-7318-41ef-871f-16ee1db22243","Type":"ContainerDied","Data":"ba8bdba5d5e7c4a36c71eb8170fbd87c43bae0b18995a96e5c7e162d103c69d1"} Jan 20 17:21:32 crc kubenswrapper[4558]: I0120 17:21:32.774929 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovsdbserver-nb-0_0ccdb20e-7467-40c5-be6c-62102e9ad6bd/ovsdbserver-nb/0.log" Jan 20 17:21:32 crc kubenswrapper[4558]: I0120 17:21:32.774985 4558 generic.go:334] "Generic (PLEG): container finished" podID="0ccdb20e-7467-40c5-be6c-62102e9ad6bd" containerID="0c6c50e15f6ca65f62124e7e4fd437de49116ef77e49acb8744a70d8b87c4220" exitCode=2 Jan 20 17:21:32 crc kubenswrapper[4558]: I0120 17:21:32.774996 4558 generic.go:334] "Generic (PLEG): container finished" podID="0ccdb20e-7467-40c5-be6c-62102e9ad6bd" containerID="f38d913a0ad6428f06323145898acb1a9022803bf80d196bd130931d31c90fab" exitCode=143 Jan 20 17:21:32 crc kubenswrapper[4558]: I0120 17:21:32.775019 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-nb-0" event={"ID":"0ccdb20e-7467-40c5-be6c-62102e9ad6bd","Type":"ContainerDied","Data":"0c6c50e15f6ca65f62124e7e4fd437de49116ef77e49acb8744a70d8b87c4220"} Jan 20 17:21:32 crc kubenswrapper[4558]: I0120 17:21:32.775038 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-nb-0" event={"ID":"0ccdb20e-7467-40c5-be6c-62102e9ad6bd","Type":"ContainerDied","Data":"f38d913a0ad6428f06323145898acb1a9022803bf80d196bd130931d31c90fab"} Jan 20 17:21:32 crc kubenswrapper[4558]: I0120 17:21:32.882482 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-db-sync-682tw"] Jan 20 17:21:32 crc kubenswrapper[4558]: I0120 17:21:32.891753 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/neutron-db-sync-682tw"] Jan 20 17:21:32 crc kubenswrapper[4558]: I0120 17:21:32.903374 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/swift-ring-rebalance-gms56"] Jan 20 17:21:32 crc kubenswrapper[4558]: I0120 17:21:32.911839 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/swift-ring-rebalance-gms56"] Jan 20 17:21:32 crc kubenswrapper[4558]: E0120 17:21:32.926005 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Jan 20 17:21:32 crc kubenswrapper[4558]: E0120 17:21:32.926066 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/6dae93de-b907-44f7-a94c-c691eee0af7f-config-data podName:6dae93de-b907-44f7-a94c-c691eee0af7f nodeName:}" failed. No retries permitted until 2026-01-20 17:21:34.926052781 +0000 UTC m=+2388.686390748 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/6dae93de-b907-44f7-a94c-c691eee0af7f-config-data") pod "rabbitmq-cell1-server-0" (UID: "6dae93de-b907-44f7-a94c-c691eee0af7f") : configmap "rabbitmq-cell1-config-data" not found Jan 20 17:21:32 crc kubenswrapper[4558]: I0120 17:21:32.928590 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/placement-6b7cbc9658-z4t27"] Jan 20 17:21:32 crc kubenswrapper[4558]: I0120 17:21:32.928870 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/placement-6b7cbc9658-z4t27" podUID="f2c69e78-4cbd-41a8-810d-bdc4f56cabfd" containerName="placement-log" containerID="cri-o://3282788cd0138924b505daecd0b6616ec4d4fb23cc9f1c9f3179db8ba4606bbd" gracePeriod=30 Jan 20 17:21:32 crc kubenswrapper[4558]: I0120 17:21:32.929142 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/placement-6b7cbc9658-z4t27" podUID="f2c69e78-4cbd-41a8-810d-bdc4f56cabfd" containerName="placement-api" containerID="cri-o://fd0522a1018450fa8aaf5f378cfa36bd4a81f9a88e9be2f7d7e743bbb0155ccd" gracePeriod=30 Jan 20 17:21:32 crc kubenswrapper[4558]: I0120 17:21:32.953070 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:21:32 crc kubenswrapper[4558]: I0120 17:21:32.953751 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/cinder-api-0" podUID="e72083f2-d324-432e-9bb0-5f44f2023489" containerName="cinder-api-log" containerID="cri-o://e1f212cc615420dc0adbf996976347dda8d27a4a5471651930f4f497d67e903f" gracePeriod=30 Jan 20 17:21:32 crc kubenswrapper[4558]: I0120 17:21:32.954035 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/cinder-api-0" podUID="e72083f2-d324-432e-9bb0-5f44f2023489" containerName="cinder-api" containerID="cri-o://76ea6272fca9c46b59f2db4c5161f6d34060b5a40bd0d5c5b65cea949a4dad9e" gracePeriod=30 Jan 20 17:21:32 crc kubenswrapper[4558]: I0120 17:21:32.994036 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.014472 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/cinder-scheduler-0" podUID="413b348d-82a1-466a-8027-d2bfd6f97cb7" containerName="cinder-scheduler" containerID="cri-o://0326fe1340c68a89c66bd369e9d86e881cb9466234bb1a9b24d4732251805082" gracePeriod=30 Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.014730 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/cinder-scheduler-0" podUID="413b348d-82a1-466a-8027-d2bfd6f97cb7" containerName="probe" containerID="cri-o://7ee0f2d8df26b1c3c3fbe8f6a262fe806d7e730b90e603316c261fe601cf8b66" gracePeriod=30 Jan 20 17:21:33 crc kubenswrapper[4558]: E0120 17:21:33.042075 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Jan 20 17:21:33 crc kubenswrapper[4558]: E0120 17:21:33.042187 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/344f9f31-8a81-4544-b782-5aa78dfc5cc2-config-data podName:344f9f31-8a81-4544-b782-5aa78dfc5cc2 nodeName:}" failed. No retries permitted until 2026-01-20 17:21:35.042144428 +0000 UTC m=+2388.802482395 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/344f9f31-8a81-4544-b782-5aa78dfc5cc2-config-data") pod "rabbitmq-cell1-server-2" (UID: "344f9f31-8a81-4544-b782-5aa78dfc5cc2") : configmap "rabbitmq-cell1-config-data" not found Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.068577 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/rabbitmq-cell1-server-0"] Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.107192 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/rabbitmq-cell1-server-2"] Jan 20 17:21:33 crc kubenswrapper[4558]: E0120 17:21:33.145440 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Jan 20 17:21:33 crc kubenswrapper[4558]: E0120 17:21:33.145542 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/8573a632-84e0-4f80-b811-5646b571c318-config-data podName:8573a632-84e0-4f80-b811-5646b571c318 nodeName:}" failed. No retries permitted until 2026-01-20 17:21:35.145524292 +0000 UTC m=+2388.905862259 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/8573a632-84e0-4f80-b811-5646b571c318-config-data") pod "rabbitmq-cell1-server-1" (UID: "8573a632-84e0-4f80-b811-5646b571c318") : configmap "rabbitmq-cell1-config-data" not found Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.159480 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovsdbserver-nb-0_0ccdb20e-7467-40c5-be6c-62102e9ad6bd/ovsdbserver-nb/0.log" Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.159603 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.160974 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/rabbitmq-cell1-server-2" podUID="344f9f31-8a81-4544-b782-5aa78dfc5cc2" containerName="rabbitmq" containerID="cri-o://3723b282ed54cce183bafc40d9019c56750bb156f55f03ff6df98969d9b7a4c6" gracePeriod=604800 Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.175280 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/rabbitmq-cell1-server-1"] Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.177775 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" podUID="6dae93de-b907-44f7-a94c-c691eee0af7f" containerName="rabbitmq" containerID="cri-o://b7c379986cc6aff3b828f77461a78d1169c8a75f30d3abde660227f21701db47" gracePeriod=604800 Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.188122 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovsdbserver-sb-0_b2127dbe-7318-41ef-871f-16ee1db22243/ovsdbserver-sb/0.log" Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.188222 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.223476 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/rabbitmq-cell1-server-1" podUID="8573a632-84e0-4f80-b811-5646b571c318" containerName="rabbitmq" containerID="cri-o://c5f31a0dc1f900cc5cbc94b222647ecbd6198220c8e700e7650325a56366cf4f" gracePeriod=604800 Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.223993 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/swift-storage-0"] Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.224458 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="fae781f7-d8cb-4f32-992f-43d7dac82655" containerName="account-server" containerID="cri-o://10b8b90c30337c202ffa5efc8376aef571af66cac566f514e6c26f2a3c0a8340" gracePeriod=30 Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.224467 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="fae781f7-d8cb-4f32-992f-43d7dac82655" containerName="object-expirer" containerID="cri-o://0625dd3b638d8b981039c7b40441b9f3eb96afe32772f7a668fc9f6739b2ce43" gracePeriod=30 Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.224492 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="fae781f7-d8cb-4f32-992f-43d7dac82655" containerName="swift-recon-cron" containerID="cri-o://1153f574b016e23743a03a0a3af75cf83aa887f85ab8a9f604b6d0beb5416c91" gracePeriod=30 Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.224532 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="fae781f7-d8cb-4f32-992f-43d7dac82655" containerName="rsync" containerID="cri-o://4f8e0074decabf3ae64fdf9bf0b1b9bfcd33bc833a5040505bdb4f80fb68ce7c" gracePeriod=30 Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.224610 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="fae781f7-d8cb-4f32-992f-43d7dac82655" containerName="object-server" containerID="cri-o://e739685a9bc530c37217e2f6a62d401027fed825626a553b5d0e17838ebce2ab" gracePeriod=30 Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.224587 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="fae781f7-d8cb-4f32-992f-43d7dac82655" containerName="object-replicator" containerID="cri-o://617b43e1502aaece76174ffd472b995e859c9b75744441831fd4fbd240eacd18" gracePeriod=30 Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.224568 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="fae781f7-d8cb-4f32-992f-43d7dac82655" containerName="container-auditor" containerID="cri-o://844d6ed85e1a90ecc3e801bcad9126bb9e1b7e1969ac475bcae5bcc3fac3d05e" gracePeriod=30 Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.224654 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="fae781f7-d8cb-4f32-992f-43d7dac82655" containerName="account-auditor" containerID="cri-o://04e1cbf4a49ae809f6cd9ad66cf664edf23a039b8a03c43ccc28c839028f90d4" gracePeriod=30 Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.224640 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="fae781f7-d8cb-4f32-992f-43d7dac82655" containerName="account-reaper" containerID="cri-o://03d0eae4935912c7dedaac60a57d7749cb144a4fb7adabc826dffe289fecf3a3" gracePeriod=30 Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.224696 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="fae781f7-d8cb-4f32-992f-43d7dac82655" containerName="object-updater" containerID="cri-o://1a43e8cd885dce5f981539c99de1df7006b77acee5bfe4dc7aee4e81c1433375" gracePeriod=30 Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.224684 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="fae781f7-d8cb-4f32-992f-43d7dac82655" containerName="account-replicator" containerID="cri-o://449d04837104cda04f1b998b58315c57a3f8dd508073572dfb1cf641a8e99cdf" gracePeriod=30 Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.224720 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="fae781f7-d8cb-4f32-992f-43d7dac82655" containerName="object-auditor" containerID="cri-o://cbd610baf4709c4651944a2af3a0c48b9c204a1089c08055228c74c96f5c28b9" gracePeriod=30 Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.224748 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="fae781f7-d8cb-4f32-992f-43d7dac82655" containerName="container-replicator" containerID="cri-o://14b8554fd67e8203eaf876170ae3e7775aa3b4c177bc6900890e44c6bc4a44f4" gracePeriod=30 Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.224770 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="fae781f7-d8cb-4f32-992f-43d7dac82655" containerName="container-server" containerID="cri-o://097f5821ec52a17e967649b5b6967edea162a947874924e6600e4d07725cae05" gracePeriod=30 Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.224637 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="fae781f7-d8cb-4f32-992f-43d7dac82655" containerName="container-updater" containerID="cri-o://ba569f18d36a42b3a1ada97f895e8e6f12935ce1058001df0619906556fc1d82" gracePeriod=30 Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.254579 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.255079 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-metadata-0" podUID="09d95674-9970-471d-8061-997e69ddda11" containerName="nova-metadata-log" containerID="cri-o://523536e2b2a85c3c10d4dc392b34b04a3e55effe5a2bcd5562e1367958047b32" gracePeriod=30 Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.255239 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-metadata-0" podUID="09d95674-9970-471d-8061-997e69ddda11" containerName="nova-metadata-metadata" containerID="cri-o://065345d0e077b3bb2b2e67f7ed62c2413c1984c254d80dff447c7a5693b57103" gracePeriod=30 Jan 20 17:21:33 crc kubenswrapper[4558]: E0120 17:21:33.259261 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Jan 20 17:21:33 crc kubenswrapper[4558]: E0120 17:21:33.259300 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Jan 20 17:21:33 crc kubenswrapper[4558]: E0120 17:21:33.259350 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/2c809052-d9bb-4982-8271-5b7a9a6f28f9-config-data podName:2c809052-d9bb-4982-8271-5b7a9a6f28f9 nodeName:}" failed. No retries permitted until 2026-01-20 17:21:35.259332113 +0000 UTC m=+2389.019670079 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/2c809052-d9bb-4982-8271-5b7a9a6f28f9-config-data") pod "rabbitmq-server-1" (UID: "2c809052-d9bb-4982-8271-5b7a9a6f28f9") : configmap "rabbitmq-config-data" not found Jan 20 17:21:33 crc kubenswrapper[4558]: E0120 17:21:33.259384 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Jan 20 17:21:33 crc kubenswrapper[4558]: E0120 17:21:33.259396 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/9c52efaf-b737-47bf-9ca1-109a28e19113-config-data podName:9c52efaf-b737-47bf-9ca1-109a28e19113 nodeName:}" failed. No retries permitted until 2026-01-20 17:21:35.25936263 +0000 UTC m=+2389.019700598 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/9c52efaf-b737-47bf-9ca1-109a28e19113-config-data") pod "rabbitmq-server-2" (UID: "9c52efaf-b737-47bf-9ca1-109a28e19113") : configmap "rabbitmq-config-data" not found Jan 20 17:21:33 crc kubenswrapper[4558]: E0120 17:21:33.259411 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/24114ddb-3b30-42ac-9d61-cfeb15d58728-config-data podName:24114ddb-3b30-42ac-9d61-cfeb15d58728 nodeName:}" failed. No retries permitted until 2026-01-20 17:21:35.25940509 +0000 UTC m=+2389.019743057 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/24114ddb-3b30-42ac-9d61-cfeb15d58728-config-data") pod "rabbitmq-server-0" (UID: "24114ddb-3b30-42ac-9d61-cfeb15d58728") : configmap "rabbitmq-config-data" not found Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.275696 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-psfz5"] Jan 20 17:21:33 crc kubenswrapper[4558]: E0120 17:21:33.276155 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b2127dbe-7318-41ef-871f-16ee1db22243" containerName="openstack-network-exporter" Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.276185 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="b2127dbe-7318-41ef-871f-16ee1db22243" containerName="openstack-network-exporter" Jan 20 17:21:33 crc kubenswrapper[4558]: E0120 17:21:33.276195 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0ccdb20e-7467-40c5-be6c-62102e9ad6bd" containerName="openstack-network-exporter" Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.276200 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="0ccdb20e-7467-40c5-be6c-62102e9ad6bd" containerName="openstack-network-exporter" Jan 20 17:21:33 crc kubenswrapper[4558]: E0120 17:21:33.276214 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b2127dbe-7318-41ef-871f-16ee1db22243" containerName="ovsdbserver-sb" Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.276220 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="b2127dbe-7318-41ef-871f-16ee1db22243" containerName="ovsdbserver-sb" Jan 20 17:21:33 crc kubenswrapper[4558]: E0120 17:21:33.276233 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0ccdb20e-7467-40c5-be6c-62102e9ad6bd" containerName="ovsdbserver-nb" Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.276238 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="0ccdb20e-7467-40c5-be6c-62102e9ad6bd" containerName="ovsdbserver-nb" Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.276413 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="0ccdb20e-7467-40c5-be6c-62102e9ad6bd" containerName="ovsdbserver-nb" Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.276432 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="0ccdb20e-7467-40c5-be6c-62102e9ad6bd" containerName="openstack-network-exporter" Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.276439 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="b2127dbe-7318-41ef-871f-16ee1db22243" containerName="ovsdbserver-sb" Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.276451 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="b2127dbe-7318-41ef-871f-16ee1db22243" containerName="openstack-network-exporter" Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.278568 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-psfz5" Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.316258 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.316506 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-api-0" podUID="533cb8e0-3be7-4eb2-b0ce-bdebf3973a0b" containerName="nova-api-log" containerID="cri-o://2eff65431cd3f96e218fee39fc3cc4517345e8ec098ef9aeba65d6a34e79cd42" gracePeriod=30 Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.316980 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-api-0" podUID="533cb8e0-3be7-4eb2-b0ce-bdebf3973a0b" containerName="nova-api-api" containerID="cri-o://d68462f667579ef49cbcb5dabe1e6eb12c8f1227e9a7202b73ef439191bd1da7" gracePeriod=30 Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.359652 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/b2127dbe-7318-41ef-871f-16ee1db22243-ovsdb-rundir\") pod \"b2127dbe-7318-41ef-871f-16ee1db22243\" (UID: \"b2127dbe-7318-41ef-871f-16ee1db22243\") " Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.359728 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/b2127dbe-7318-41ef-871f-16ee1db22243-metrics-certs-tls-certs\") pod \"b2127dbe-7318-41ef-871f-16ee1db22243\" (UID: \"b2127dbe-7318-41ef-871f-16ee1db22243\") " Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.359794 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndbcluster-nb-etc-ovn\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"0ccdb20e-7467-40c5-be6c-62102e9ad6bd\" (UID: \"0ccdb20e-7467-40c5-be6c-62102e9ad6bd\") " Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.359821 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wbdcm\" (UniqueName: \"kubernetes.io/projected/b2127dbe-7318-41ef-871f-16ee1db22243-kube-api-access-wbdcm\") pod \"b2127dbe-7318-41ef-871f-16ee1db22243\" (UID: \"b2127dbe-7318-41ef-871f-16ee1db22243\") " Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.359844 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-slgjx\" (UniqueName: \"kubernetes.io/projected/0ccdb20e-7467-40c5-be6c-62102e9ad6bd-kube-api-access-slgjx\") pod \"0ccdb20e-7467-40c5-be6c-62102e9ad6bd\" (UID: \"0ccdb20e-7467-40c5-be6c-62102e9ad6bd\") " Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.359866 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/b2127dbe-7318-41ef-871f-16ee1db22243-ovsdbserver-sb-tls-certs\") pod \"b2127dbe-7318-41ef-871f-16ee1db22243\" (UID: \"b2127dbe-7318-41ef-871f-16ee1db22243\") " Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.359885 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0ccdb20e-7467-40c5-be6c-62102e9ad6bd-config\") pod \"0ccdb20e-7467-40c5-be6c-62102e9ad6bd\" (UID: \"0ccdb20e-7467-40c5-be6c-62102e9ad6bd\") " Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.359919 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndbcluster-sb-etc-ovn\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"b2127dbe-7318-41ef-871f-16ee1db22243\" (UID: \"b2127dbe-7318-41ef-871f-16ee1db22243\") " Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.359957 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b2127dbe-7318-41ef-871f-16ee1db22243-scripts\") pod \"b2127dbe-7318-41ef-871f-16ee1db22243\" (UID: \"b2127dbe-7318-41ef-871f-16ee1db22243\") " Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.360048 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b2127dbe-7318-41ef-871f-16ee1db22243-combined-ca-bundle\") pod \"b2127dbe-7318-41ef-871f-16ee1db22243\" (UID: \"b2127dbe-7318-41ef-871f-16ee1db22243\") " Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.360067 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/0ccdb20e-7467-40c5-be6c-62102e9ad6bd-metrics-certs-tls-certs\") pod \"0ccdb20e-7467-40c5-be6c-62102e9ad6bd\" (UID: \"0ccdb20e-7467-40c5-be6c-62102e9ad6bd\") " Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.360089 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/0ccdb20e-7467-40c5-be6c-62102e9ad6bd-ovsdbserver-nb-tls-certs\") pod \"0ccdb20e-7467-40c5-be6c-62102e9ad6bd\" (UID: \"0ccdb20e-7467-40c5-be6c-62102e9ad6bd\") " Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.360122 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0ccdb20e-7467-40c5-be6c-62102e9ad6bd-combined-ca-bundle\") pod \"0ccdb20e-7467-40c5-be6c-62102e9ad6bd\" (UID: \"0ccdb20e-7467-40c5-be6c-62102e9ad6bd\") " Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.360148 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0ccdb20e-7467-40c5-be6c-62102e9ad6bd-scripts\") pod \"0ccdb20e-7467-40c5-be6c-62102e9ad6bd\" (UID: \"0ccdb20e-7467-40c5-be6c-62102e9ad6bd\") " Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.360180 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b2127dbe-7318-41ef-871f-16ee1db22243-config\") pod \"b2127dbe-7318-41ef-871f-16ee1db22243\" (UID: \"b2127dbe-7318-41ef-871f-16ee1db22243\") " Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.360267 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/0ccdb20e-7467-40c5-be6c-62102e9ad6bd-ovsdb-rundir\") pod \"0ccdb20e-7467-40c5-be6c-62102e9ad6bd\" (UID: \"0ccdb20e-7467-40c5-be6c-62102e9ad6bd\") " Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.364015 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b2127dbe-7318-41ef-871f-16ee1db22243-scripts" (OuterVolumeSpecName: "scripts") pod "b2127dbe-7318-41ef-871f-16ee1db22243" (UID: "b2127dbe-7318-41ef-871f-16ee1db22243"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.367456 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b2127dbe-7318-41ef-871f-16ee1db22243-ovsdb-rundir" (OuterVolumeSpecName: "ovsdb-rundir") pod "b2127dbe-7318-41ef-871f-16ee1db22243" (UID: "b2127dbe-7318-41ef-871f-16ee1db22243"). InnerVolumeSpecName "ovsdb-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.367967 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0ccdb20e-7467-40c5-be6c-62102e9ad6bd-ovsdb-rundir" (OuterVolumeSpecName: "ovsdb-rundir") pod "0ccdb20e-7467-40c5-be6c-62102e9ad6bd" (UID: "0ccdb20e-7467-40c5-be6c-62102e9ad6bd"). InnerVolumeSpecName "ovsdb-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.370834 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0ccdb20e-7467-40c5-be6c-62102e9ad6bd-scripts" (OuterVolumeSpecName: "scripts") pod "0ccdb20e-7467-40c5-be6c-62102e9ad6bd" (UID: "0ccdb20e-7467-40c5-be6c-62102e9ad6bd"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.376058 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b2127dbe-7318-41ef-871f-16ee1db22243-config" (OuterVolumeSpecName: "config") pod "b2127dbe-7318-41ef-871f-16ee1db22243" (UID: "b2127dbe-7318-41ef-871f-16ee1db22243"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.379908 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage01-crc" (OuterVolumeSpecName: "ovndbcluster-nb-etc-ovn") pod "0ccdb20e-7467-40c5-be6c-62102e9ad6bd" (UID: "0ccdb20e-7467-40c5-be6c-62102e9ad6bd"). InnerVolumeSpecName "local-storage01-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.379989 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0ccdb20e-7467-40c5-be6c-62102e9ad6bd-config" (OuterVolumeSpecName: "config") pod "0ccdb20e-7467-40c5-be6c-62102e9ad6bd" (UID: "0ccdb20e-7467-40c5-be6c-62102e9ad6bd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.380157 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage09-crc" (OuterVolumeSpecName: "ovndbcluster-sb-etc-ovn") pod "b2127dbe-7318-41ef-871f-16ee1db22243" (UID: "b2127dbe-7318-41ef-871f-16ee1db22243"). InnerVolumeSpecName "local-storage09-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.389391 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-psfz5"] Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.390356 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b2127dbe-7318-41ef-871f-16ee1db22243-kube-api-access-wbdcm" (OuterVolumeSpecName: "kube-api-access-wbdcm") pod "b2127dbe-7318-41ef-871f-16ee1db22243" (UID: "b2127dbe-7318-41ef-871f-16ee1db22243"). InnerVolumeSpecName "kube-api-access-wbdcm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.429610 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0ccdb20e-7467-40c5-be6c-62102e9ad6bd-kube-api-access-slgjx" (OuterVolumeSpecName: "kube-api-access-slgjx") pod "0ccdb20e-7467-40c5-be6c-62102e9ad6bd" (UID: "0ccdb20e-7467-40c5-be6c-62102e9ad6bd"). InnerVolumeSpecName "kube-api-access-slgjx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.441198 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/placement-d448-account-create-update-tf92z"] Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.459240 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/placement-d448-account-create-update-tf92z"] Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.462403 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bcc56301-83b3-4ff9-9290-ed029940a6da-config\") pod \"dnsmasq-dnsmasq-84b9f45d47-psfz5\" (UID: \"bcc56301-83b3-4ff9-9290-ed029940a6da\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-psfz5" Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.462464 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mlwwz\" (UniqueName: \"kubernetes.io/projected/bcc56301-83b3-4ff9-9290-ed029940a6da-kube-api-access-mlwwz\") pod \"dnsmasq-dnsmasq-84b9f45d47-psfz5\" (UID: \"bcc56301-83b3-4ff9-9290-ed029940a6da\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-psfz5" Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.462641 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/bcc56301-83b3-4ff9-9290-ed029940a6da-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-84b9f45d47-psfz5\" (UID: \"bcc56301-83b3-4ff9-9290-ed029940a6da\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-psfz5" Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.462731 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wbdcm\" (UniqueName: \"kubernetes.io/projected/b2127dbe-7318-41ef-871f-16ee1db22243-kube-api-access-wbdcm\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.462747 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-slgjx\" (UniqueName: \"kubernetes.io/projected/0ccdb20e-7467-40c5-be6c-62102e9ad6bd-kube-api-access-slgjx\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.462757 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0ccdb20e-7467-40c5-be6c-62102e9ad6bd-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.462775 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" " Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.462785 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b2127dbe-7318-41ef-871f-16ee1db22243-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.462793 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0ccdb20e-7467-40c5-be6c-62102e9ad6bd-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.462801 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b2127dbe-7318-41ef-871f-16ee1db22243-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.462810 4558 reconciler_common.go:293] "Volume detached for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/0ccdb20e-7467-40c5-be6c-62102e9ad6bd-ovsdb-rundir\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.462819 4558 reconciler_common.go:293] "Volume detached for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/b2127dbe-7318-41ef-871f-16ee1db22243-ovsdb-rundir\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.462830 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" " Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.465144 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-d010-account-create-update-tx77w"] Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.472328 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/barbican-d010-account-create-update-tx77w"] Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.480027 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-db-create-vdx94"] Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.522080 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0ccdb20e-7467-40c5-be6c-62102e9ad6bd-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0ccdb20e-7467-40c5-be6c-62102e9ad6bd" (UID: "0ccdb20e-7467-40c5-be6c-62102e9ad6bd"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.525485 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-4e5b-account-create-update-jp7rq"] Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.542327 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage01-crc" (UniqueName: "kubernetes.io/local-volume/local-storage01-crc") on node "crc" Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.545314 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage09-crc" (UniqueName: "kubernetes.io/local-volume/local-storage09-crc") on node "crc" Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.557781 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/placement-db-create-jsj6g"] Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.565420 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/bcc56301-83b3-4ff9-9290-ed029940a6da-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-84b9f45d47-psfz5\" (UID: \"bcc56301-83b3-4ff9-9290-ed029940a6da\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-psfz5" Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.565508 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-api-4e5b-account-create-update-jp7rq"] Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.565681 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bcc56301-83b3-4ff9-9290-ed029940a6da-config\") pod \"dnsmasq-dnsmasq-84b9f45d47-psfz5\" (UID: \"bcc56301-83b3-4ff9-9290-ed029940a6da\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-psfz5" Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.566154 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mlwwz\" (UniqueName: \"kubernetes.io/projected/bcc56301-83b3-4ff9-9290-ed029940a6da-kube-api-access-mlwwz\") pod \"dnsmasq-dnsmasq-84b9f45d47-psfz5\" (UID: \"bcc56301-83b3-4ff9-9290-ed029940a6da\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-psfz5" Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.567006 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.567897 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.568145 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0ccdb20e-7467-40c5-be6c-62102e9ad6bd-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.568902 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bcc56301-83b3-4ff9-9290-ed029940a6da-config\") pod \"dnsmasq-dnsmasq-84b9f45d47-psfz5\" (UID: \"bcc56301-83b3-4ff9-9290-ed029940a6da\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-psfz5" Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.569793 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/bcc56301-83b3-4ff9-9290-ed029940a6da-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-84b9f45d47-psfz5\" (UID: \"bcc56301-83b3-4ff9-9290-ed029940a6da\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-psfz5" Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.574248 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/cinder-db-create-vdx94"] Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.582775 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/placement-db-create-jsj6g"] Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.587190 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mlwwz\" (UniqueName: \"kubernetes.io/projected/bcc56301-83b3-4ff9-9290-ed029940a6da-kube-api-access-mlwwz\") pod \"dnsmasq-dnsmasq-84b9f45d47-psfz5\" (UID: \"bcc56301-83b3-4ff9-9290-ed029940a6da\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-psfz5" Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.589476 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/rabbitmq-server-0"] Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.600382 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/rabbitmq-server-2"] Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.606465 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/rabbitmq-server-1"] Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.607824 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b2127dbe-7318-41ef-871f-16ee1db22243-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b2127dbe-7318-41ef-871f-16ee1db22243" (UID: "b2127dbe-7318-41ef-871f-16ee1db22243"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.621690 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell0-b69b-account-create-update-ct4qn"] Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.623329 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0ccdb20e-7467-40c5-be6c-62102e9ad6bd-ovsdbserver-nb-tls-certs" (OuterVolumeSpecName: "ovsdbserver-nb-tls-certs") pod "0ccdb20e-7467-40c5-be6c-62102e9ad6bd" (UID: "0ccdb20e-7467-40c5-be6c-62102e9ad6bd"). InnerVolumeSpecName "ovsdbserver-nb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.624158 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0ccdb20e-7467-40c5-be6c-62102e9ad6bd-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "0ccdb20e-7467-40c5-be6c-62102e9ad6bd" (UID: "0ccdb20e-7467-40c5-be6c-62102e9ad6bd"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.628704 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell0-b69b-account-create-update-ct4qn"] Jan 20 17:21:33 crc kubenswrapper[4558]: E0120 17:21:33.631722 4558 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 20 17:21:33 crc kubenswrapper[4558]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[/bin/sh -c #!/bin/bash Jan 20 17:21:33 crc kubenswrapper[4558]: Jan 20 17:21:33 crc kubenswrapper[4558]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 20 17:21:33 crc kubenswrapper[4558]: Jan 20 17:21:33 crc kubenswrapper[4558]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 20 17:21:33 crc kubenswrapper[4558]: Jan 20 17:21:33 crc kubenswrapper[4558]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 20 17:21:33 crc kubenswrapper[4558]: Jan 20 17:21:33 crc kubenswrapper[4558]: if [ -n "glance" ]; then Jan 20 17:21:33 crc kubenswrapper[4558]: GRANT_DATABASE="glance" Jan 20 17:21:33 crc kubenswrapper[4558]: else Jan 20 17:21:33 crc kubenswrapper[4558]: GRANT_DATABASE="*" Jan 20 17:21:33 crc kubenswrapper[4558]: fi Jan 20 17:21:33 crc kubenswrapper[4558]: Jan 20 17:21:33 crc kubenswrapper[4558]: # going for maximum compatibility here: Jan 20 17:21:33 crc kubenswrapper[4558]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 20 17:21:33 crc kubenswrapper[4558]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 20 17:21:33 crc kubenswrapper[4558]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 20 17:21:33 crc kubenswrapper[4558]: # support updates Jan 20 17:21:33 crc kubenswrapper[4558]: Jan 20 17:21:33 crc kubenswrapper[4558]: $MYSQL_CMD < logger="UnhandledError" Jan 20 17:21:33 crc kubenswrapper[4558]: E0120 17:21:33.634081 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"glance-db-secret\\\" not found\"" pod="openstack-kuttl-tests/glance-36de-account-create-update-n4ql4" podUID="52e65b62-cbeb-475e-b3ec-2a6259c44b25" Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.635386 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b2127dbe-7318-41ef-871f-16ee1db22243-ovsdbserver-sb-tls-certs" (OuterVolumeSpecName: "ovsdbserver-sb-tls-certs") pod "b2127dbe-7318-41ef-871f-16ee1db22243" (UID: "b2127dbe-7318-41ef-871f-16ee1db22243"). InnerVolumeSpecName "ovsdbserver-sb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.636339 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-db-create-z7w7v"] Jan 20 17:21:33 crc kubenswrapper[4558]: E0120 17:21:33.637725 4558 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 20 17:21:33 crc kubenswrapper[4558]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[/bin/sh -c #!/bin/bash Jan 20 17:21:33 crc kubenswrapper[4558]: Jan 20 17:21:33 crc kubenswrapper[4558]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 20 17:21:33 crc kubenswrapper[4558]: Jan 20 17:21:33 crc kubenswrapper[4558]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 20 17:21:33 crc kubenswrapper[4558]: Jan 20 17:21:33 crc kubenswrapper[4558]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 20 17:21:33 crc kubenswrapper[4558]: Jan 20 17:21:33 crc kubenswrapper[4558]: if [ -n "neutron" ]; then Jan 20 17:21:33 crc kubenswrapper[4558]: GRANT_DATABASE="neutron" Jan 20 17:21:33 crc kubenswrapper[4558]: else Jan 20 17:21:33 crc kubenswrapper[4558]: GRANT_DATABASE="*" Jan 20 17:21:33 crc kubenswrapper[4558]: fi Jan 20 17:21:33 crc kubenswrapper[4558]: Jan 20 17:21:33 crc kubenswrapper[4558]: # going for maximum compatibility here: Jan 20 17:21:33 crc kubenswrapper[4558]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 20 17:21:33 crc kubenswrapper[4558]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 20 17:21:33 crc kubenswrapper[4558]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 20 17:21:33 crc kubenswrapper[4558]: # support updates Jan 20 17:21:33 crc kubenswrapper[4558]: Jan 20 17:21:33 crc kubenswrapper[4558]: $MYSQL_CMD < logger="UnhandledError" Jan 20 17:21:33 crc kubenswrapper[4558]: E0120 17:21:33.638942 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"neutron-db-secret\\\" not found\"" pod="openstack-kuttl-tests/neutron-247a-account-create-update-8drms" podUID="25e48519-09af-42b4-835a-485273267aca" Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.639403 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/rabbitmq-server-0" podUID="24114ddb-3b30-42ac-9d61-cfeb15d58728" containerName="rabbitmq" containerID="cri-o://33a9e1abb20c3e3bf6a51b785efcb0a4eaf6964e1b8392e4977fc9f4c7b03f61" gracePeriod=604800 Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.644814 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/barbican-db-create-z7w7v"] Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.646965 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-psfz5" Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.652109 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/rabbitmq-server-2" podUID="9c52efaf-b737-47bf-9ca1-109a28e19113" containerName="rabbitmq" containerID="cri-o://ae9e159fcd24af6ebaa1ca94fb0944dc8d3bcbddd517479b3fa0699d53c7017a" gracePeriod=604800 Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.657656 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell0-db-create-hpfj8"] Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.675385 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.675781 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-internal-api-0" podUID="55138e51-715c-42a1-8e1d-bca65a31d55c" containerName="glance-log" containerID="cri-o://9a84634fbab19582b6a07c9fe1a1511ad1a285ca37ba97810d1c6c72e31ebe4c" gracePeriod=30 Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.676443 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-internal-api-0" podUID="55138e51-715c-42a1-8e1d-bca65a31d55c" containerName="glance-httpd" containerID="cri-o://ad7e074652e180a31db91a23980de422d274542b0626f62f61e86a2d3f3088d7" gracePeriod=30 Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.681931 4558 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/b2127dbe-7318-41ef-871f-16ee1db22243-ovsdbserver-sb-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.681951 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b2127dbe-7318-41ef-871f-16ee1db22243-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.681961 4558 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/0ccdb20e-7467-40c5-be6c-62102e9ad6bd-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.681970 4558 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/0ccdb20e-7467-40c5-be6c-62102e9ad6bd-ovsdbserver-nb-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.687380 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell0-db-create-hpfj8"] Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.697247 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b2127dbe-7318-41ef-871f-16ee1db22243-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "b2127dbe-7318-41ef-871f-16ee1db22243" (UID: "b2127dbe-7318-41ef-871f-16ee1db22243"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.707999 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-db-create-7nj9x"] Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.717239 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/rabbitmq-server-1" podUID="2c809052-d9bb-4982-8271-5b7a9a6f28f9" containerName="rabbitmq" containerID="cri-o://81a644c98c6093aa1efc528706c31e081b18455e9ddaf52cd7d7f878ae9f848a" gracePeriod=604800 Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.719523 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-api-db-create-7nj9x"] Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.730215 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.730471 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-external-api-0" podUID="20cdc3df-1626-4f56-8a92-9df64c1ed2fe" containerName="glance-log" containerID="cri-o://e156d2f38d6403388f01acbe07670bd0efccb2c563ca79b7e6061ec9d3596440" gracePeriod=30 Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.730908 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-external-api-0" podUID="20cdc3df-1626-4f56-8a92-9df64c1ed2fe" containerName="glance-httpd" containerID="cri-o://37ed8e0f109228dfcc4e9447b9278994669497b6597de7b6864c0f3e20436236" gracePeriod=30 Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.740342 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-r6qz8"] Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.750228 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/openstack-cell1-galera-1"] Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.763262 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/openstack-cell1-galera-2"] Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.771040 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/openstack-cell1-galera-0"] Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.779885 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-db-create-vvdf2"] Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.788147 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell1-db-create-vvdf2"] Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.794456 4558 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/b2127dbe-7318-41ef-871f-16ee1db22243-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.794668 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-247a-account-create-update-8drms"] Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.818582 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-36de-account-create-update-n4ql4"] Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.818642 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-db-create-9d9jp"] Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.834711 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/glance-db-create-9d9jp"] Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.903909 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-5c7bf5978d-d2t27"] Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.904158 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/neutron-5c7bf5978d-d2t27" podUID="abb17bba-a50c-4b0f-941b-3a09a8bcac1e" containerName="neutron-api" containerID="cri-o://fb57b9facd8c8baac31528678e058b6e227a981807b81524108e8fec27a589a1" gracePeriod=30 Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.904605 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/neutron-5c7bf5978d-d2t27" podUID="abb17bba-a50c-4b0f-941b-3a09a8bcac1e" containerName="neutron-httpd" containerID="cri-o://71a8bccb6db5181577ff2da6379fafa98f3cfa0c15a2c71a8806e5f0bc7f567a" gracePeriod=30 Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.948222 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-worker-795d8f7875-gfh24"] Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.948485 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-worker-795d8f7875-gfh24" podUID="2f8ae24b-258e-4d32-b312-99f5015f83d6" containerName="barbican-worker-log" containerID="cri-o://daed2ff09c57989ebf417f453b1c97a9e38554550e8b7bcfd620161001b8c813" gracePeriod=30 Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.948673 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-worker-795d8f7875-gfh24" podUID="2f8ae24b-258e-4d32-b312-99f5015f83d6" containerName="barbican-worker" containerID="cri-o://454a41b471f0eb3acc32e253ce4e5b3a99599e9feb264d9effdd29c6859dd9d0" gracePeriod=30 Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.953343 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-keystone-listener-9b8896fdb-7mrlf"] Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.953506 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-keystone-listener-9b8896fdb-7mrlf" podUID="a948066b-fa95-4fbb-83e6-6f26f6c76652" containerName="barbican-keystone-listener-log" containerID="cri-o://c97f19d9a03644214f7c3a4eb6b798453f97f1b12067ab4dab1050a2bc364779" gracePeriod=30 Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.953694 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-keystone-listener-9b8896fdb-7mrlf" podUID="a948066b-fa95-4fbb-83e6-6f26f6c76652" containerName="barbican-keystone-listener" containerID="cri-o://67f52d695e7843e5e1f00cfd6b4ff76e79ba46c21e0baabc0a21e634dcae0c6e" gracePeriod=30 Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.956378 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-db-create-kpclp"] Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.965188 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/neutron-db-create-kpclp"] Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.972934 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-api-7b899ccb8d-cfqnl"] Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.973298 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-api-7b899ccb8d-cfqnl" podUID="14af4a81-8568-425c-a209-4d90a042c365" containerName="barbican-api-log" containerID="cri-o://c09143ce6ec1936f6f2c98c148ac4afc51f7b093ad7e905e871ff92ac19dfd9f" gracePeriod=30 Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.973535 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-api-7b899ccb8d-cfqnl" podUID="14af4a81-8568-425c-a209-4d90a042c365" containerName="barbican-api" containerID="cri-o://e46adee8ea97e0c6d658d436cc56ebc7b96b6a0ab6ea4685475eaa56dee6f641" gracePeriod=30 Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.981540 4558 generic.go:334] "Generic (PLEG): container finished" podID="533cb8e0-3be7-4eb2-b0ce-bdebf3973a0b" containerID="2eff65431cd3f96e218fee39fc3cc4517345e8ec098ef9aeba65d6a34e79cd42" exitCode=143 Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.981639 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"533cb8e0-3be7-4eb2-b0ce-bdebf3973a0b","Type":"ContainerDied","Data":"2eff65431cd3f96e218fee39fc3cc4517345e8ec098ef9aeba65d6a34e79cd42"} Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.984307 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:21:33 crc kubenswrapper[4558]: I0120 17:21:33.984495 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" podUID="64462382-0fd6-4796-8079-d66c2bc27d95" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://0a8bffbad678e5cea118776d64f4cc6254e390d1759d980ea03c2b9491caaaa2" gracePeriod=30 Jan 20 17:21:34 crc kubenswrapper[4558]: I0120 17:21:33.997037 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-36de-account-create-update-n4ql4"] Jan 20 17:21:34 crc kubenswrapper[4558]: I0120 17:21:34.000532 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-36de-account-create-update-n4ql4" event={"ID":"52e65b62-cbeb-475e-b3ec-2a6259c44b25","Type":"ContainerStarted","Data":"d83892ba5e59e6bfcd672ecc79b79b0cb0fffed58e60b1769c488844ef02fbeb"} Jan 20 17:21:34 crc kubenswrapper[4558]: E0120 17:21:34.005759 4558 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 20 17:21:34 crc kubenswrapper[4558]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[/bin/sh -c #!/bin/bash Jan 20 17:21:34 crc kubenswrapper[4558]: Jan 20 17:21:34 crc kubenswrapper[4558]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 20 17:21:34 crc kubenswrapper[4558]: Jan 20 17:21:34 crc kubenswrapper[4558]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 20 17:21:34 crc kubenswrapper[4558]: Jan 20 17:21:34 crc kubenswrapper[4558]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 20 17:21:34 crc kubenswrapper[4558]: Jan 20 17:21:34 crc kubenswrapper[4558]: if [ -n "glance" ]; then Jan 20 17:21:34 crc kubenswrapper[4558]: GRANT_DATABASE="glance" Jan 20 17:21:34 crc kubenswrapper[4558]: else Jan 20 17:21:34 crc kubenswrapper[4558]: GRANT_DATABASE="*" Jan 20 17:21:34 crc kubenswrapper[4558]: fi Jan 20 17:21:34 crc kubenswrapper[4558]: Jan 20 17:21:34 crc kubenswrapper[4558]: # going for maximum compatibility here: Jan 20 17:21:34 crc kubenswrapper[4558]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 20 17:21:34 crc kubenswrapper[4558]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 20 17:21:34 crc kubenswrapper[4558]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 20 17:21:34 crc kubenswrapper[4558]: # support updates Jan 20 17:21:34 crc kubenswrapper[4558]: Jan 20 17:21:34 crc kubenswrapper[4558]: $MYSQL_CMD < logger="UnhandledError" Jan 20 17:21:34 crc kubenswrapper[4558]: E0120 17:21:34.007682 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"glance-db-secret\\\" not found\"" pod="openstack-kuttl-tests/glance-36de-account-create-update-n4ql4" podUID="52e65b62-cbeb-475e-b3ec-2a6259c44b25" Jan 20 17:21:34 crc kubenswrapper[4558]: I0120 17:21:34.025267 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/root-account-create-update-r6qz8" event={"ID":"4eb93277-301f-486e-a1e9-f323f9bd9cb4","Type":"ContainerStarted","Data":"96a895c6fbd90cf00b90eff69a6a1940e04dcdc631f75368bf539acb994d8e55"} Jan 20 17:21:34 crc kubenswrapper[4558]: I0120 17:21:34.051137 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-247a-account-create-update-8drms"] Jan 20 17:21:34 crc kubenswrapper[4558]: I0120 17:21:34.105151 4558 generic.go:334] "Generic (PLEG): container finished" podID="e72083f2-d324-432e-9bb0-5f44f2023489" containerID="e1f212cc615420dc0adbf996976347dda8d27a4a5471651930f4f497d67e903f" exitCode=143 Jan 20 17:21:34 crc kubenswrapper[4558]: I0120 17:21:34.105415 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"e72083f2-d324-432e-9bb0-5f44f2023489","Type":"ContainerDied","Data":"e1f212cc615420dc0adbf996976347dda8d27a4a5471651930f4f497d67e903f"} Jan 20 17:21:34 crc kubenswrapper[4558]: I0120 17:21:34.117615 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/root-account-create-update-r6qz8" podStartSLOduration=3.117584244 podStartE2EDuration="3.117584244s" podCreationTimestamp="2026-01-20 17:21:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:21:34.105662078 +0000 UTC m=+2387.866000044" watchObservedRunningTime="2026-01-20 17:21:34.117584244 +0000 UTC m=+2387.877922211" Jan 20 17:21:34 crc kubenswrapper[4558]: I0120 17:21:34.145211 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovsdbserver-sb-0_b2127dbe-7318-41ef-871f-16ee1db22243/ovsdbserver-sb/0.log" Jan 20 17:21:34 crc kubenswrapper[4558]: I0120 17:21:34.145354 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-sb-0" event={"ID":"b2127dbe-7318-41ef-871f-16ee1db22243","Type":"ContainerDied","Data":"223ee8504524a30c2495e065b1fc6e55126ea838b301c82a6ab316b95d82b101"} Jan 20 17:21:34 crc kubenswrapper[4558]: I0120 17:21:34.145361 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:21:34 crc kubenswrapper[4558]: I0120 17:21:34.145408 4558 scope.go:117] "RemoveContainer" containerID="a92ae70194b9e1d3dcba1794977e0ca18866d630d73b53d8ed2581ac1fe1d3ac" Jan 20 17:21:34 crc kubenswrapper[4558]: I0120 17:21:34.344579 4558 scope.go:117] "RemoveContainer" containerID="ba8bdba5d5e7c4a36c71eb8170fbd87c43bae0b18995a96e5c7e162d103c69d1" Jan 20 17:21:34 crc kubenswrapper[4558]: I0120 17:21:34.352143 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-psfz5"] Jan 20 17:21:34 crc kubenswrapper[4558]: I0120 17:21:34.356469 4558 generic.go:334] "Generic (PLEG): container finished" podID="09d95674-9970-471d-8061-997e69ddda11" containerID="523536e2b2a85c3c10d4dc392b34b04a3e55effe5a2bcd5562e1367958047b32" exitCode=143 Jan 20 17:21:34 crc kubenswrapper[4558]: I0120 17:21:34.356532 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"09d95674-9970-471d-8061-997e69ddda11","Type":"ContainerDied","Data":"523536e2b2a85c3c10d4dc392b34b04a3e55effe5a2bcd5562e1367958047b32"} Jan 20 17:21:34 crc kubenswrapper[4558]: I0120 17:21:34.378352 4558 generic.go:334] "Generic (PLEG): container finished" podID="7490595f-9782-44f0-bfb1-811f2f1d65e3" containerID="077461e7717ab6966108b72a1527abc834132888244fc51cc68cc84642df8e4e" exitCode=137 Jan 20 17:21:34 crc kubenswrapper[4558]: I0120 17:21:34.381199 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-247a-account-create-update-8drms" event={"ID":"25e48519-09af-42b4-835a-485273267aca","Type":"ContainerStarted","Data":"a7926e3301a17486714d5610f57524ed1433920eee2805d317efcb8cd65b12d3"} Jan 20 17:21:34 crc kubenswrapper[4558]: E0120 17:21:34.392428 4558 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 20 17:21:34 crc kubenswrapper[4558]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[/bin/sh -c #!/bin/bash Jan 20 17:21:34 crc kubenswrapper[4558]: Jan 20 17:21:34 crc kubenswrapper[4558]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 20 17:21:34 crc kubenswrapper[4558]: Jan 20 17:21:34 crc kubenswrapper[4558]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 20 17:21:34 crc kubenswrapper[4558]: Jan 20 17:21:34 crc kubenswrapper[4558]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 20 17:21:34 crc kubenswrapper[4558]: Jan 20 17:21:34 crc kubenswrapper[4558]: if [ -n "neutron" ]; then Jan 20 17:21:34 crc kubenswrapper[4558]: GRANT_DATABASE="neutron" Jan 20 17:21:34 crc kubenswrapper[4558]: else Jan 20 17:21:34 crc kubenswrapper[4558]: GRANT_DATABASE="*" Jan 20 17:21:34 crc kubenswrapper[4558]: fi Jan 20 17:21:34 crc kubenswrapper[4558]: Jan 20 17:21:34 crc kubenswrapper[4558]: # going for maximum compatibility here: Jan 20 17:21:34 crc kubenswrapper[4558]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 20 17:21:34 crc kubenswrapper[4558]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 20 17:21:34 crc kubenswrapper[4558]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 20 17:21:34 crc kubenswrapper[4558]: # support updates Jan 20 17:21:34 crc kubenswrapper[4558]: Jan 20 17:21:34 crc kubenswrapper[4558]: $MYSQL_CMD < logger="UnhandledError" Jan 20 17:21:34 crc kubenswrapper[4558]: I0120 17:21:34.404475 4558 generic.go:334] "Generic (PLEG): container finished" podID="f2c69e78-4cbd-41a8-810d-bdc4f56cabfd" containerID="3282788cd0138924b505daecd0b6616ec4d4fb23cc9f1c9f3179db8ba4606bbd" exitCode=143 Jan 20 17:21:34 crc kubenswrapper[4558]: E0120 17:21:34.404518 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"neutron-db-secret\\\" not found\"" pod="openstack-kuttl-tests/neutron-247a-account-create-update-8drms" podUID="25e48519-09af-42b4-835a-485273267aca" Jan 20 17:21:34 crc kubenswrapper[4558]: I0120 17:21:34.404527 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-6b7cbc9658-z4t27" event={"ID":"f2c69e78-4cbd-41a8-810d-bdc4f56cabfd","Type":"ContainerDied","Data":"3282788cd0138924b505daecd0b6616ec4d4fb23cc9f1c9f3179db8ba4606bbd"} Jan 20 17:21:34 crc kubenswrapper[4558]: I0120 17:21:34.426358 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/openstack-cell1-galera-2" podUID="9adbb250-26e3-4e45-b60f-3b4c83d59ba3" containerName="galera" containerID="cri-o://648217de53b0389dbe7690a275b0c4b85571e0144e4fa2b93968f2f4497fa9f1" gracePeriod=30 Jan 20 17:21:34 crc kubenswrapper[4558]: I0120 17:21:34.432388 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-sb-0"] Jan 20 17:21:34 crc kubenswrapper[4558]: I0120 17:21:34.438764 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-sb-0"] Jan 20 17:21:34 crc kubenswrapper[4558]: I0120 17:21:34.443397 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstackclient" Jan 20 17:21:34 crc kubenswrapper[4558]: I0120 17:21:34.459304 4558 generic.go:334] "Generic (PLEG): container finished" podID="fae781f7-d8cb-4f32-992f-43d7dac82655" containerID="4f8e0074decabf3ae64fdf9bf0b1b9bfcd33bc833a5040505bdb4f80fb68ce7c" exitCode=0 Jan 20 17:21:34 crc kubenswrapper[4558]: I0120 17:21:34.459327 4558 generic.go:334] "Generic (PLEG): container finished" podID="fae781f7-d8cb-4f32-992f-43d7dac82655" containerID="0625dd3b638d8b981039c7b40441b9f3eb96afe32772f7a668fc9f6739b2ce43" exitCode=0 Jan 20 17:21:34 crc kubenswrapper[4558]: I0120 17:21:34.459337 4558 generic.go:334] "Generic (PLEG): container finished" podID="fae781f7-d8cb-4f32-992f-43d7dac82655" containerID="1a43e8cd885dce5f981539c99de1df7006b77acee5bfe4dc7aee4e81c1433375" exitCode=0 Jan 20 17:21:34 crc kubenswrapper[4558]: I0120 17:21:34.459345 4558 generic.go:334] "Generic (PLEG): container finished" podID="fae781f7-d8cb-4f32-992f-43d7dac82655" containerID="cbd610baf4709c4651944a2af3a0c48b9c204a1089c08055228c74c96f5c28b9" exitCode=0 Jan 20 17:21:34 crc kubenswrapper[4558]: I0120 17:21:34.459352 4558 generic.go:334] "Generic (PLEG): container finished" podID="fae781f7-d8cb-4f32-992f-43d7dac82655" containerID="617b43e1502aaece76174ffd472b995e859c9b75744441831fd4fbd240eacd18" exitCode=0 Jan 20 17:21:34 crc kubenswrapper[4558]: I0120 17:21:34.459358 4558 generic.go:334] "Generic (PLEG): container finished" podID="fae781f7-d8cb-4f32-992f-43d7dac82655" containerID="e739685a9bc530c37217e2f6a62d401027fed825626a553b5d0e17838ebce2ab" exitCode=0 Jan 20 17:21:34 crc kubenswrapper[4558]: I0120 17:21:34.459364 4558 generic.go:334] "Generic (PLEG): container finished" podID="fae781f7-d8cb-4f32-992f-43d7dac82655" containerID="ba569f18d36a42b3a1ada97f895e8e6f12935ce1058001df0619906556fc1d82" exitCode=0 Jan 20 17:21:34 crc kubenswrapper[4558]: I0120 17:21:34.459370 4558 generic.go:334] "Generic (PLEG): container finished" podID="fae781f7-d8cb-4f32-992f-43d7dac82655" containerID="844d6ed85e1a90ecc3e801bcad9126bb9e1b7e1969ac475bcae5bcc3fac3d05e" exitCode=0 Jan 20 17:21:34 crc kubenswrapper[4558]: I0120 17:21:34.459375 4558 generic.go:334] "Generic (PLEG): container finished" podID="fae781f7-d8cb-4f32-992f-43d7dac82655" containerID="14b8554fd67e8203eaf876170ae3e7775aa3b4c177bc6900890e44c6bc4a44f4" exitCode=0 Jan 20 17:21:34 crc kubenswrapper[4558]: I0120 17:21:34.459381 4558 generic.go:334] "Generic (PLEG): container finished" podID="fae781f7-d8cb-4f32-992f-43d7dac82655" containerID="03d0eae4935912c7dedaac60a57d7749cb144a4fb7adabc826dffe289fecf3a3" exitCode=0 Jan 20 17:21:34 crc kubenswrapper[4558]: I0120 17:21:34.459386 4558 generic.go:334] "Generic (PLEG): container finished" podID="fae781f7-d8cb-4f32-992f-43d7dac82655" containerID="04e1cbf4a49ae809f6cd9ad66cf664edf23a039b8a03c43ccc28c839028f90d4" exitCode=0 Jan 20 17:21:34 crc kubenswrapper[4558]: I0120 17:21:34.459392 4558 generic.go:334] "Generic (PLEG): container finished" podID="fae781f7-d8cb-4f32-992f-43d7dac82655" containerID="449d04837104cda04f1b998b58315c57a3f8dd508073572dfb1cf641a8e99cdf" exitCode=0 Jan 20 17:21:34 crc kubenswrapper[4558]: I0120 17:21:34.459397 4558 generic.go:334] "Generic (PLEG): container finished" podID="fae781f7-d8cb-4f32-992f-43d7dac82655" containerID="10b8b90c30337c202ffa5efc8376aef571af66cac566f514e6c26f2a3c0a8340" exitCode=0 Jan 20 17:21:34 crc kubenswrapper[4558]: I0120 17:21:34.459401 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"fae781f7-d8cb-4f32-992f-43d7dac82655","Type":"ContainerDied","Data":"4f8e0074decabf3ae64fdf9bf0b1b9bfcd33bc833a5040505bdb4f80fb68ce7c"} Jan 20 17:21:34 crc kubenswrapper[4558]: I0120 17:21:34.459452 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"fae781f7-d8cb-4f32-992f-43d7dac82655","Type":"ContainerDied","Data":"0625dd3b638d8b981039c7b40441b9f3eb96afe32772f7a668fc9f6739b2ce43"} Jan 20 17:21:34 crc kubenswrapper[4558]: I0120 17:21:34.459468 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"fae781f7-d8cb-4f32-992f-43d7dac82655","Type":"ContainerDied","Data":"1a43e8cd885dce5f981539c99de1df7006b77acee5bfe4dc7aee4e81c1433375"} Jan 20 17:21:34 crc kubenswrapper[4558]: I0120 17:21:34.459476 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"fae781f7-d8cb-4f32-992f-43d7dac82655","Type":"ContainerDied","Data":"cbd610baf4709c4651944a2af3a0c48b9c204a1089c08055228c74c96f5c28b9"} Jan 20 17:21:34 crc kubenswrapper[4558]: I0120 17:21:34.459485 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"fae781f7-d8cb-4f32-992f-43d7dac82655","Type":"ContainerDied","Data":"617b43e1502aaece76174ffd472b995e859c9b75744441831fd4fbd240eacd18"} Jan 20 17:21:34 crc kubenswrapper[4558]: I0120 17:21:34.459494 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"fae781f7-d8cb-4f32-992f-43d7dac82655","Type":"ContainerDied","Data":"e739685a9bc530c37217e2f6a62d401027fed825626a553b5d0e17838ebce2ab"} Jan 20 17:21:34 crc kubenswrapper[4558]: I0120 17:21:34.459502 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"fae781f7-d8cb-4f32-992f-43d7dac82655","Type":"ContainerDied","Data":"ba569f18d36a42b3a1ada97f895e8e6f12935ce1058001df0619906556fc1d82"} Jan 20 17:21:34 crc kubenswrapper[4558]: I0120 17:21:34.459510 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"fae781f7-d8cb-4f32-992f-43d7dac82655","Type":"ContainerDied","Data":"844d6ed85e1a90ecc3e801bcad9126bb9e1b7e1969ac475bcae5bcc3fac3d05e"} Jan 20 17:21:34 crc kubenswrapper[4558]: I0120 17:21:34.459518 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"fae781f7-d8cb-4f32-992f-43d7dac82655","Type":"ContainerDied","Data":"14b8554fd67e8203eaf876170ae3e7775aa3b4c177bc6900890e44c6bc4a44f4"} Jan 20 17:21:34 crc kubenswrapper[4558]: I0120 17:21:34.459526 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"fae781f7-d8cb-4f32-992f-43d7dac82655","Type":"ContainerDied","Data":"03d0eae4935912c7dedaac60a57d7749cb144a4fb7adabc826dffe289fecf3a3"} Jan 20 17:21:34 crc kubenswrapper[4558]: I0120 17:21:34.459536 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"fae781f7-d8cb-4f32-992f-43d7dac82655","Type":"ContainerDied","Data":"04e1cbf4a49ae809f6cd9ad66cf664edf23a039b8a03c43ccc28c839028f90d4"} Jan 20 17:21:34 crc kubenswrapper[4558]: I0120 17:21:34.459552 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"fae781f7-d8cb-4f32-992f-43d7dac82655","Type":"ContainerDied","Data":"449d04837104cda04f1b998b58315c57a3f8dd508073572dfb1cf641a8e99cdf"} Jan 20 17:21:34 crc kubenswrapper[4558]: I0120 17:21:34.459560 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"fae781f7-d8cb-4f32-992f-43d7dac82655","Type":"ContainerDied","Data":"10b8b90c30337c202ffa5efc8376aef571af66cac566f514e6c26f2a3c0a8340"} Jan 20 17:21:34 crc kubenswrapper[4558]: I0120 17:21:34.479439 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovsdbserver-nb-0_0ccdb20e-7467-40c5-be6c-62102e9ad6bd/ovsdbserver-nb/0.log" Jan 20 17:21:34 crc kubenswrapper[4558]: I0120 17:21:34.479503 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-nb-0" event={"ID":"0ccdb20e-7467-40c5-be6c-62102e9ad6bd","Type":"ContainerDied","Data":"f0b4760d94941c1c76a55b8d857a669feb9ec3c02eb791ff4b30bc484111d6d1"} Jan 20 17:21:34 crc kubenswrapper[4558]: I0120 17:21:34.479528 4558 scope.go:117] "RemoveContainer" containerID="0c6c50e15f6ca65f62124e7e4fd437de49116ef77e49acb8744a70d8b87c4220" Jan 20 17:21:34 crc kubenswrapper[4558]: I0120 17:21:34.479616 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:21:34 crc kubenswrapper[4558]: I0120 17:21:34.486971 4558 generic.go:334] "Generic (PLEG): container finished" podID="55138e51-715c-42a1-8e1d-bca65a31d55c" containerID="9a84634fbab19582b6a07c9fe1a1511ad1a285ca37ba97810d1c6c72e31ebe4c" exitCode=143 Jan 20 17:21:34 crc kubenswrapper[4558]: I0120 17:21:34.487036 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"55138e51-715c-42a1-8e1d-bca65a31d55c","Type":"ContainerDied","Data":"9a84634fbab19582b6a07c9fe1a1511ad1a285ca37ba97810d1c6c72e31ebe4c"} Jan 20 17:21:34 crc kubenswrapper[4558]: I0120 17:21:34.534633 4558 scope.go:117] "RemoveContainer" containerID="f38d913a0ad6428f06323145898acb1a9022803bf80d196bd130931d31c90fab" Jan 20 17:21:34 crc kubenswrapper[4558]: I0120 17:21:34.534877 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-nb-0"] Jan 20 17:21:34 crc kubenswrapper[4558]: I0120 17:21:34.536044 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rw8cj\" (UniqueName: \"kubernetes.io/projected/7490595f-9782-44f0-bfb1-811f2f1d65e3-kube-api-access-rw8cj\") pod \"7490595f-9782-44f0-bfb1-811f2f1d65e3\" (UID: \"7490595f-9782-44f0-bfb1-811f2f1d65e3\") " Jan 20 17:21:34 crc kubenswrapper[4558]: I0120 17:21:34.536194 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/7490595f-9782-44f0-bfb1-811f2f1d65e3-openstack-config\") pod \"7490595f-9782-44f0-bfb1-811f2f1d65e3\" (UID: \"7490595f-9782-44f0-bfb1-811f2f1d65e3\") " Jan 20 17:21:34 crc kubenswrapper[4558]: I0120 17:21:34.536355 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/7490595f-9782-44f0-bfb1-811f2f1d65e3-openstack-config-secret\") pod \"7490595f-9782-44f0-bfb1-811f2f1d65e3\" (UID: \"7490595f-9782-44f0-bfb1-811f2f1d65e3\") " Jan 20 17:21:34 crc kubenswrapper[4558]: I0120 17:21:34.536835 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7490595f-9782-44f0-bfb1-811f2f1d65e3-combined-ca-bundle\") pod \"7490595f-9782-44f0-bfb1-811f2f1d65e3\" (UID: \"7490595f-9782-44f0-bfb1-811f2f1d65e3\") " Jan 20 17:21:34 crc kubenswrapper[4558]: I0120 17:21:34.544534 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-nb-0"] Jan 20 17:21:34 crc kubenswrapper[4558]: I0120 17:21:34.554894 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7490595f-9782-44f0-bfb1-811f2f1d65e3-kube-api-access-rw8cj" (OuterVolumeSpecName: "kube-api-access-rw8cj") pod "7490595f-9782-44f0-bfb1-811f2f1d65e3" (UID: "7490595f-9782-44f0-bfb1-811f2f1d65e3"). InnerVolumeSpecName "kube-api-access-rw8cj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:21:34 crc kubenswrapper[4558]: I0120 17:21:34.584490 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0ccdb20e-7467-40c5-be6c-62102e9ad6bd" path="/var/lib/kubelet/pods/0ccdb20e-7467-40c5-be6c-62102e9ad6bd/volumes" Jan 20 17:21:34 crc kubenswrapper[4558]: I0120 17:21:34.585122 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="11af03de-aac9-4531-9c0f-af093ba4d7ea" path="/var/lib/kubelet/pods/11af03de-aac9-4531-9c0f-af093ba4d7ea/volumes" Jan 20 17:21:34 crc kubenswrapper[4558]: I0120 17:21:34.586893 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="16316176-29c1-476a-8142-6a3427dd7da4" path="/var/lib/kubelet/pods/16316176-29c1-476a-8142-6a3427dd7da4/volumes" Jan 20 17:21:34 crc kubenswrapper[4558]: I0120 17:21:34.588589 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="17c2543e-2ad7-4bfa-a28f-34be13c6b966" path="/var/lib/kubelet/pods/17c2543e-2ad7-4bfa-a28f-34be13c6b966/volumes" Jan 20 17:21:34 crc kubenswrapper[4558]: I0120 17:21:34.589108 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1ccfacb0-c21a-4956-8192-10a0a41409c7" path="/var/lib/kubelet/pods/1ccfacb0-c21a-4956-8192-10a0a41409c7/volumes" Jan 20 17:21:34 crc kubenswrapper[4558]: I0120 17:21:34.589903 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="27829a8a-3c57-41bc-8e2a-361451d19075" path="/var/lib/kubelet/pods/27829a8a-3c57-41bc-8e2a-361451d19075/volumes" Jan 20 17:21:34 crc kubenswrapper[4558]: I0120 17:21:34.590922 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2fff8a17-65b9-4288-81eb-337010e076f1" path="/var/lib/kubelet/pods/2fff8a17-65b9-4288-81eb-337010e076f1/volumes" Jan 20 17:21:34 crc kubenswrapper[4558]: I0120 17:21:34.592018 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3a99ccdf-41ef-42bc-8d10-4c2cf74d3ac9" path="/var/lib/kubelet/pods/3a99ccdf-41ef-42bc-8d10-4c2cf74d3ac9/volumes" Jan 20 17:21:34 crc kubenswrapper[4558]: I0120 17:21:34.592544 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="400d0672-2068-470f-983f-df899be25bd5" path="/var/lib/kubelet/pods/400d0672-2068-470f-983f-df899be25bd5/volumes" Jan 20 17:21:34 crc kubenswrapper[4558]: I0120 17:21:34.593027 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5659e8f8-3581-4ecf-a2ee-0c6d044b17e2" path="/var/lib/kubelet/pods/5659e8f8-3581-4ecf-a2ee-0c6d044b17e2/volumes" Jan 20 17:21:34 crc kubenswrapper[4558]: I0120 17:21:34.593976 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5938c698-384b-4c5c-9c62-10f2a8a0ad1a" path="/var/lib/kubelet/pods/5938c698-384b-4c5c-9c62-10f2a8a0ad1a/volumes" Jan 20 17:21:34 crc kubenswrapper[4558]: I0120 17:21:34.594487 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6f96bd13-4e3b-4731-8b13-7325df4bdd25" path="/var/lib/kubelet/pods/6f96bd13-4e3b-4731-8b13-7325df4bdd25/volumes" Jan 20 17:21:34 crc kubenswrapper[4558]: I0120 17:21:34.595056 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9812bc23-9bb1-4b3a-975b-0bffa6e3184f" path="/var/lib/kubelet/pods/9812bc23-9bb1-4b3a-975b-0bffa6e3184f/volumes" Jan 20 17:21:34 crc kubenswrapper[4558]: I0120 17:21:34.595909 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9a1b94fe-db72-418e-bc3b-5dd27adff225" path="/var/lib/kubelet/pods/9a1b94fe-db72-418e-bc3b-5dd27adff225/volumes" Jan 20 17:21:34 crc kubenswrapper[4558]: I0120 17:21:34.596451 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b2127dbe-7318-41ef-871f-16ee1db22243" path="/var/lib/kubelet/pods/b2127dbe-7318-41ef-871f-16ee1db22243/volumes" Jan 20 17:21:34 crc kubenswrapper[4558]: I0120 17:21:34.596950 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c9ccad35-4742-4e3e-b281-d76be66e44fd" path="/var/lib/kubelet/pods/c9ccad35-4742-4e3e-b281-d76be66e44fd/volumes" Jan 20 17:21:34 crc kubenswrapper[4558]: I0120 17:21:34.597961 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d9de665d-8b72-45e0-9829-5bd72bd86f41" path="/var/lib/kubelet/pods/d9de665d-8b72-45e0-9829-5bd72bd86f41/volumes" Jan 20 17:21:34 crc kubenswrapper[4558]: I0120 17:21:34.619374 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7490595f-9782-44f0-bfb1-811f2f1d65e3-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "7490595f-9782-44f0-bfb1-811f2f1d65e3" (UID: "7490595f-9782-44f0-bfb1-811f2f1d65e3"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:21:34 crc kubenswrapper[4558]: I0120 17:21:34.639529 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rw8cj\" (UniqueName: \"kubernetes.io/projected/7490595f-9782-44f0-bfb1-811f2f1d65e3-kube-api-access-rw8cj\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:34 crc kubenswrapper[4558]: I0120 17:21:34.639562 4558 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/7490595f-9782-44f0-bfb1-811f2f1d65e3-openstack-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:34 crc kubenswrapper[4558]: I0120 17:21:34.654123 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7490595f-9782-44f0-bfb1-811f2f1d65e3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7490595f-9782-44f0-bfb1-811f2f1d65e3" (UID: "7490595f-9782-44f0-bfb1-811f2f1d65e3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:21:34 crc kubenswrapper[4558]: I0120 17:21:34.661394 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7490595f-9782-44f0-bfb1-811f2f1d65e3-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "7490595f-9782-44f0-bfb1-811f2f1d65e3" (UID: "7490595f-9782-44f0-bfb1-811f2f1d65e3"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:21:34 crc kubenswrapper[4558]: I0120 17:21:34.718712 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/swift-proxy-85c8dfdd74-sjsz2"] Jan 20 17:21:34 crc kubenswrapper[4558]: I0120 17:21:34.719379 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-proxy-85c8dfdd74-sjsz2" podUID="9d974489-32f7-4541-9a58-6a215c5d2071" containerName="proxy-httpd" containerID="cri-o://cfbe93ac18d993e620cd8106e1b690070a764b00d594313c60a12f9f0b8d2527" gracePeriod=30 Jan 20 17:21:34 crc kubenswrapper[4558]: I0120 17:21:34.719715 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-proxy-85c8dfdd74-sjsz2" podUID="9d974489-32f7-4541-9a58-6a215c5d2071" containerName="proxy-server" containerID="cri-o://47bcce107135d29c478003906fbf3f85f03c5a969a0a67dbccdda34ff7841132" gracePeriod=30 Jan 20 17:21:34 crc kubenswrapper[4558]: I0120 17:21:34.742524 4558 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/7490595f-9782-44f0-bfb1-811f2f1d65e3-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:34 crc kubenswrapper[4558]: I0120 17:21:34.742633 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7490595f-9782-44f0-bfb1-811f2f1d65e3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:34 crc kubenswrapper[4558]: E0120 17:21:34.949492 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Jan 20 17:21:34 crc kubenswrapper[4558]: E0120 17:21:34.949561 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/6dae93de-b907-44f7-a94c-c691eee0af7f-config-data podName:6dae93de-b907-44f7-a94c-c691eee0af7f nodeName:}" failed. No retries permitted until 2026-01-20 17:21:38.949546352 +0000 UTC m=+2392.709884319 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/6dae93de-b907-44f7-a94c-c691eee0af7f-config-data") pod "rabbitmq-cell1-server-0" (UID: "6dae93de-b907-44f7-a94c-c691eee0af7f") : configmap "rabbitmq-cell1-config-data" not found Jan 20 17:21:35 crc kubenswrapper[4558]: E0120 17:21:35.052113 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Jan 20 17:21:35 crc kubenswrapper[4558]: E0120 17:21:35.052285 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/344f9f31-8a81-4544-b782-5aa78dfc5cc2-config-data podName:344f9f31-8a81-4544-b782-5aa78dfc5cc2 nodeName:}" failed. No retries permitted until 2026-01-20 17:21:39.052267949 +0000 UTC m=+2392.812605915 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/344f9f31-8a81-4544-b782-5aa78dfc5cc2-config-data") pod "rabbitmq-cell1-server-2" (UID: "344f9f31-8a81-4544-b782-5aa78dfc5cc2") : configmap "rabbitmq-cell1-config-data" not found Jan 20 17:21:35 crc kubenswrapper[4558]: E0120 17:21:35.157342 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Jan 20 17:21:35 crc kubenswrapper[4558]: E0120 17:21:35.157509 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/8573a632-84e0-4f80-b811-5646b571c318-config-data podName:8573a632-84e0-4f80-b811-5646b571c318 nodeName:}" failed. No retries permitted until 2026-01-20 17:21:39.157480421 +0000 UTC m=+2392.917818387 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/8573a632-84e0-4f80-b811-5646b571c318-config-data") pod "rabbitmq-cell1-server-1" (UID: "8573a632-84e0-4f80-b811-5646b571c318") : configmap "rabbitmq-cell1-config-data" not found Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.192860 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.260868 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/64462382-0fd6-4796-8079-d66c2bc27d95-nova-novncproxy-tls-certs\") pod \"64462382-0fd6-4796-8079-d66c2bc27d95\" (UID: \"64462382-0fd6-4796-8079-d66c2bc27d95\") " Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.260978 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/64462382-0fd6-4796-8079-d66c2bc27d95-vencrypt-tls-certs\") pod \"64462382-0fd6-4796-8079-d66c2bc27d95\" (UID: \"64462382-0fd6-4796-8079-d66c2bc27d95\") " Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.261239 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nz9lt\" (UniqueName: \"kubernetes.io/projected/64462382-0fd6-4796-8079-d66c2bc27d95-kube-api-access-nz9lt\") pod \"64462382-0fd6-4796-8079-d66c2bc27d95\" (UID: \"64462382-0fd6-4796-8079-d66c2bc27d95\") " Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.261307 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64462382-0fd6-4796-8079-d66c2bc27d95-combined-ca-bundle\") pod \"64462382-0fd6-4796-8079-d66c2bc27d95\" (UID: \"64462382-0fd6-4796-8079-d66c2bc27d95\") " Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.261464 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/64462382-0fd6-4796-8079-d66c2bc27d95-config-data\") pod \"64462382-0fd6-4796-8079-d66c2bc27d95\" (UID: \"64462382-0fd6-4796-8079-d66c2bc27d95\") " Jan 20 17:21:35 crc kubenswrapper[4558]: E0120 17:21:35.262682 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Jan 20 17:21:35 crc kubenswrapper[4558]: E0120 17:21:35.262747 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/24114ddb-3b30-42ac-9d61-cfeb15d58728-config-data podName:24114ddb-3b30-42ac-9d61-cfeb15d58728 nodeName:}" failed. No retries permitted until 2026-01-20 17:21:39.262729671 +0000 UTC m=+2393.023067638 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/24114ddb-3b30-42ac-9d61-cfeb15d58728-config-data") pod "rabbitmq-server-0" (UID: "24114ddb-3b30-42ac-9d61-cfeb15d58728") : configmap "rabbitmq-config-data" not found Jan 20 17:21:35 crc kubenswrapper[4558]: E0120 17:21:35.265115 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Jan 20 17:21:35 crc kubenswrapper[4558]: E0120 17:21:35.265221 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/9c52efaf-b737-47bf-9ca1-109a28e19113-config-data podName:9c52efaf-b737-47bf-9ca1-109a28e19113 nodeName:}" failed. No retries permitted until 2026-01-20 17:21:39.265208694 +0000 UTC m=+2393.025546662 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/9c52efaf-b737-47bf-9ca1-109a28e19113-config-data") pod "rabbitmq-server-2" (UID: "9c52efaf-b737-47bf-9ca1-109a28e19113") : configmap "rabbitmq-config-data" not found Jan 20 17:21:35 crc kubenswrapper[4558]: E0120 17:21:35.265289 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Jan 20 17:21:35 crc kubenswrapper[4558]: E0120 17:21:35.265316 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/2c809052-d9bb-4982-8271-5b7a9a6f28f9-config-data podName:2c809052-d9bb-4982-8271-5b7a9a6f28f9 nodeName:}" failed. No retries permitted until 2026-01-20 17:21:39.265309003 +0000 UTC m=+2393.025646971 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/2c809052-d9bb-4982-8271-5b7a9a6f28f9-config-data") pod "rabbitmq-server-1" (UID: "2c809052-d9bb-4982-8271-5b7a9a6f28f9") : configmap "rabbitmq-config-data" not found Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.283623 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/64462382-0fd6-4796-8079-d66c2bc27d95-kube-api-access-nz9lt" (OuterVolumeSpecName: "kube-api-access-nz9lt") pod "64462382-0fd6-4796-8079-d66c2bc27d95" (UID: "64462382-0fd6-4796-8079-d66c2bc27d95"). InnerVolumeSpecName "kube-api-access-nz9lt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.298766 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/64462382-0fd6-4796-8079-d66c2bc27d95-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "64462382-0fd6-4796-8079-d66c2bc27d95" (UID: "64462382-0fd6-4796-8079-d66c2bc27d95"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.324965 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/64462382-0fd6-4796-8079-d66c2bc27d95-vencrypt-tls-certs" (OuterVolumeSpecName: "vencrypt-tls-certs") pod "64462382-0fd6-4796-8079-d66c2bc27d95" (UID: "64462382-0fd6-4796-8079-d66c2bc27d95"). InnerVolumeSpecName "vencrypt-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.332044 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/64462382-0fd6-4796-8079-d66c2bc27d95-config-data" (OuterVolumeSpecName: "config-data") pod "64462382-0fd6-4796-8079-d66c2bc27d95" (UID: "64462382-0fd6-4796-8079-d66c2bc27d95"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.366157 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/64462382-0fd6-4796-8079-d66c2bc27d95-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.366210 4558 reconciler_common.go:293] "Volume detached for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/64462382-0fd6-4796-8079-d66c2bc27d95-vencrypt-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.366227 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nz9lt\" (UniqueName: \"kubernetes.io/projected/64462382-0fd6-4796-8079-d66c2bc27d95-kube-api-access-nz9lt\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.366241 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64462382-0fd6-4796-8079-d66c2bc27d95-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.371286 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/64462382-0fd6-4796-8079-d66c2bc27d95-nova-novncproxy-tls-certs" (OuterVolumeSpecName: "nova-novncproxy-tls-certs") pod "64462382-0fd6-4796-8079-d66c2bc27d95" (UID: "64462382-0fd6-4796-8079-d66c2bc27d95"). InnerVolumeSpecName "nova-novncproxy-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.449324 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-cell1-galera-2" Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.472801 4558 reconciler_common.go:293] "Volume detached for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/64462382-0fd6-4796-8079-d66c2bc27d95-nova-novncproxy-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.497577 4558 generic.go:334] "Generic (PLEG): container finished" podID="a948066b-fa95-4fbb-83e6-6f26f6c76652" containerID="c97f19d9a03644214f7c3a4eb6b798453f97f1b12067ab4dab1050a2bc364779" exitCode=143 Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.497669 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-keystone-listener-9b8896fdb-7mrlf" event={"ID":"a948066b-fa95-4fbb-83e6-6f26f6c76652","Type":"ContainerDied","Data":"c97f19d9a03644214f7c3a4eb6b798453f97f1b12067ab4dab1050a2bc364779"} Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.501105 4558 generic.go:334] "Generic (PLEG): container finished" podID="2f8ae24b-258e-4d32-b312-99f5015f83d6" containerID="daed2ff09c57989ebf417f453b1c97a9e38554550e8b7bcfd620161001b8c813" exitCode=143 Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.501190 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-worker-795d8f7875-gfh24" event={"ID":"2f8ae24b-258e-4d32-b312-99f5015f83d6","Type":"ContainerDied","Data":"daed2ff09c57989ebf417f453b1c97a9e38554550e8b7bcfd620161001b8c813"} Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.507368 4558 generic.go:334] "Generic (PLEG): container finished" podID="abb17bba-a50c-4b0f-941b-3a09a8bcac1e" containerID="71a8bccb6db5181577ff2da6379fafa98f3cfa0c15a2c71a8806e5f0bc7f567a" exitCode=0 Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.507531 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-5c7bf5978d-d2t27" event={"ID":"abb17bba-a50c-4b0f-941b-3a09a8bcac1e","Type":"ContainerDied","Data":"71a8bccb6db5181577ff2da6379fafa98f3cfa0c15a2c71a8806e5f0bc7f567a"} Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.512467 4558 generic.go:334] "Generic (PLEG): container finished" podID="413b348d-82a1-466a-8027-d2bfd6f97cb7" containerID="7ee0f2d8df26b1c3c3fbe8f6a262fe806d7e730b90e603316c261fe601cf8b66" exitCode=0 Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.512528 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"413b348d-82a1-466a-8027-d2bfd6f97cb7","Type":"ContainerDied","Data":"7ee0f2d8df26b1c3c3fbe8f6a262fe806d7e730b90e603316c261fe601cf8b66"} Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.521879 4558 generic.go:334] "Generic (PLEG): container finished" podID="fae781f7-d8cb-4f32-992f-43d7dac82655" containerID="097f5821ec52a17e967649b5b6967edea162a947874924e6600e4d07725cae05" exitCode=0 Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.521912 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"fae781f7-d8cb-4f32-992f-43d7dac82655","Type":"ContainerDied","Data":"097f5821ec52a17e967649b5b6967edea162a947874924e6600e4d07725cae05"} Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.525058 4558 generic.go:334] "Generic (PLEG): container finished" podID="14af4a81-8568-425c-a209-4d90a042c365" containerID="c09143ce6ec1936f6f2c98c148ac4afc51f7b093ad7e905e871ff92ac19dfd9f" exitCode=143 Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.525132 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-7b899ccb8d-cfqnl" event={"ID":"14af4a81-8568-425c-a209-4d90a042c365","Type":"ContainerDied","Data":"c09143ce6ec1936f6f2c98c148ac4afc51f7b093ad7e905e871ff92ac19dfd9f"} Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.527850 4558 generic.go:334] "Generic (PLEG): container finished" podID="9d974489-32f7-4541-9a58-6a215c5d2071" containerID="47bcce107135d29c478003906fbf3f85f03c5a969a0a67dbccdda34ff7841132" exitCode=0 Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.527885 4558 generic.go:334] "Generic (PLEG): container finished" podID="9d974489-32f7-4541-9a58-6a215c5d2071" containerID="cfbe93ac18d993e620cd8106e1b690070a764b00d594313c60a12f9f0b8d2527" exitCode=0 Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.527933 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-proxy-85c8dfdd74-sjsz2" event={"ID":"9d974489-32f7-4541-9a58-6a215c5d2071","Type":"ContainerDied","Data":"47bcce107135d29c478003906fbf3f85f03c5a969a0a67dbccdda34ff7841132"} Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.527963 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-proxy-85c8dfdd74-sjsz2" event={"ID":"9d974489-32f7-4541-9a58-6a215c5d2071","Type":"ContainerDied","Data":"cfbe93ac18d993e620cd8106e1b690070a764b00d594313c60a12f9f0b8d2527"} Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.531126 4558 generic.go:334] "Generic (PLEG): container finished" podID="bcc56301-83b3-4ff9-9290-ed029940a6da" containerID="b6872f845e094138749e172f30290826b82d6e116c124ea5839c4bcffe150159" exitCode=0 Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.531197 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-psfz5" event={"ID":"bcc56301-83b3-4ff9-9290-ed029940a6da","Type":"ContainerDied","Data":"b6872f845e094138749e172f30290826b82d6e116c124ea5839c4bcffe150159"} Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.531233 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-psfz5" event={"ID":"bcc56301-83b3-4ff9-9290-ed029940a6da","Type":"ContainerStarted","Data":"afbd24b3c392464f207695cb23d1a44e8143e7a1cf578f22d952abcc64204423"} Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.541137 4558 generic.go:334] "Generic (PLEG): container finished" podID="64462382-0fd6-4796-8079-d66c2bc27d95" containerID="0a8bffbad678e5cea118776d64f4cc6254e390d1759d980ea03c2b9491caaaa2" exitCode=0 Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.541205 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.541216 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"64462382-0fd6-4796-8079-d66c2bc27d95","Type":"ContainerDied","Data":"0a8bffbad678e5cea118776d64f4cc6254e390d1759d980ea03c2b9491caaaa2"} Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.542895 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"64462382-0fd6-4796-8079-d66c2bc27d95","Type":"ContainerDied","Data":"e4e63edeba8a080debb087c844f5f85560048679ab9a95850ea4ed2b9792ecc6"} Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.542932 4558 scope.go:117] "RemoveContainer" containerID="0a8bffbad678e5cea118776d64f4cc6254e390d1759d980ea03c2b9491caaaa2" Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.556037 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstackclient" Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.567038 4558 generic.go:334] "Generic (PLEG): container finished" podID="9adbb250-26e3-4e45-b60f-3b4c83d59ba3" containerID="648217de53b0389dbe7690a275b0c4b85571e0144e4fa2b93968f2f4497fa9f1" exitCode=0 Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.567088 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-cell1-galera-2" event={"ID":"9adbb250-26e3-4e45-b60f-3b4c83d59ba3","Type":"ContainerDied","Data":"648217de53b0389dbe7690a275b0c4b85571e0144e4fa2b93968f2f4497fa9f1"} Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.567109 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-cell1-galera-2" event={"ID":"9adbb250-26e3-4e45-b60f-3b4c83d59ba3","Type":"ContainerDied","Data":"6c0793a4831f49398284626ae40c77a9333710a5b62feb85f1da44a5bfd5cf9b"} Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.567177 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-cell1-galera-2" Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.579337 4558 generic.go:334] "Generic (PLEG): container finished" podID="20cdc3df-1626-4f56-8a92-9df64c1ed2fe" containerID="e156d2f38d6403388f01acbe07670bd0efccb2c563ca79b7e6061ec9d3596440" exitCode=143 Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.579419 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"20cdc3df-1626-4f56-8a92-9df64c1ed2fe","Type":"ContainerDied","Data":"e156d2f38d6403388f01acbe07670bd0efccb2c563ca79b7e6061ec9d3596440"} Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.581692 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/9adbb250-26e3-4e45-b60f-3b4c83d59ba3-config-data-default\") pod \"9adbb250-26e3-4e45-b60f-3b4c83d59ba3\" (UID: \"9adbb250-26e3-4e45-b60f-3b4c83d59ba3\") " Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.581841 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kxgbq\" (UniqueName: \"kubernetes.io/projected/9adbb250-26e3-4e45-b60f-3b4c83d59ba3-kube-api-access-kxgbq\") pod \"9adbb250-26e3-4e45-b60f-3b4c83d59ba3\" (UID: \"9adbb250-26e3-4e45-b60f-3b4c83d59ba3\") " Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.582860 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9adbb250-26e3-4e45-b60f-3b4c83d59ba3-combined-ca-bundle\") pod \"9adbb250-26e3-4e45-b60f-3b4c83d59ba3\" (UID: \"9adbb250-26e3-4e45-b60f-3b4c83d59ba3\") " Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.583322 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9adbb250-26e3-4e45-b60f-3b4c83d59ba3-config-data-default" (OuterVolumeSpecName: "config-data-default") pod "9adbb250-26e3-4e45-b60f-3b4c83d59ba3" (UID: "9adbb250-26e3-4e45-b60f-3b4c83d59ba3"). InnerVolumeSpecName "config-data-default". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.583579 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.586913 4558 generic.go:334] "Generic (PLEG): container finished" podID="4eb93277-301f-486e-a1e9-f323f9bd9cb4" containerID="9950c650a695726a02bff29b5e4e3575dd63e33b9799fdca78ec0fa848678043" exitCode=1 Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.587009 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9adbb250-26e3-4e45-b60f-3b4c83d59ba3-kube-api-access-kxgbq" (OuterVolumeSpecName: "kube-api-access-kxgbq") pod "9adbb250-26e3-4e45-b60f-3b4c83d59ba3" (UID: "9adbb250-26e3-4e45-b60f-3b4c83d59ba3"). InnerVolumeSpecName "kube-api-access-kxgbq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.587185 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/root-account-create-update-r6qz8" event={"ID":"4eb93277-301f-486e-a1e9-f323f9bd9cb4","Type":"ContainerDied","Data":"9950c650a695726a02bff29b5e4e3575dd63e33b9799fdca78ec0fa848678043"} Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.588740 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.589296 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mysql-db\" (UniqueName: \"kubernetes.io/local-volume/local-storage14-crc\") pod \"9adbb250-26e3-4e45-b60f-3b4c83d59ba3\" (UID: \"9adbb250-26e3-4e45-b60f-3b4c83d59ba3\") " Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.589437 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9adbb250-26e3-4e45-b60f-3b4c83d59ba3-operator-scripts\") pod \"9adbb250-26e3-4e45-b60f-3b4c83d59ba3\" (UID: \"9adbb250-26e3-4e45-b60f-3b4c83d59ba3\") " Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.589327 4558 scope.go:117] "RemoveContainer" containerID="9950c650a695726a02bff29b5e4e3575dd63e33b9799fdca78ec0fa848678043" Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.589689 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/9adbb250-26e3-4e45-b60f-3b4c83d59ba3-config-data-generated\") pod \"9adbb250-26e3-4e45-b60f-3b4c83d59ba3\" (UID: \"9adbb250-26e3-4e45-b60f-3b4c83d59ba3\") " Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.589795 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/9adbb250-26e3-4e45-b60f-3b4c83d59ba3-kolla-config\") pod \"9adbb250-26e3-4e45-b60f-3b4c83d59ba3\" (UID: \"9adbb250-26e3-4e45-b60f-3b4c83d59ba3\") " Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.589965 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/9adbb250-26e3-4e45-b60f-3b4c83d59ba3-galera-tls-certs\") pod \"9adbb250-26e3-4e45-b60f-3b4c83d59ba3\" (UID: \"9adbb250-26e3-4e45-b60f-3b4c83d59ba3\") " Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.590219 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9adbb250-26e3-4e45-b60f-3b4c83d59ba3-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "9adbb250-26e3-4e45-b60f-3b4c83d59ba3" (UID: "9adbb250-26e3-4e45-b60f-3b4c83d59ba3"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.590468 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9adbb250-26e3-4e45-b60f-3b4c83d59ba3-config-data-generated" (OuterVolumeSpecName: "config-data-generated") pod "9adbb250-26e3-4e45-b60f-3b4c83d59ba3" (UID: "9adbb250-26e3-4e45-b60f-3b4c83d59ba3"). InnerVolumeSpecName "config-data-generated". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.591027 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9adbb250-26e3-4e45-b60f-3b4c83d59ba3-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "9adbb250-26e3-4e45-b60f-3b4c83d59ba3" (UID: "9adbb250-26e3-4e45-b60f-3b4c83d59ba3"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.591822 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9adbb250-26e3-4e45-b60f-3b4c83d59ba3-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.591856 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/9adbb250-26e3-4e45-b60f-3b4c83d59ba3-config-data-generated\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.591870 4558 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/9adbb250-26e3-4e45-b60f-3b4c83d59ba3-kolla-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.591881 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/9adbb250-26e3-4e45-b60f-3b4c83d59ba3-config-data-default\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.591892 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kxgbq\" (UniqueName: \"kubernetes.io/projected/9adbb250-26e3-4e45-b60f-3b4c83d59ba3-kube-api-access-kxgbq\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.594298 4558 scope.go:117] "RemoveContainer" containerID="0a8bffbad678e5cea118776d64f4cc6254e390d1759d980ea03c2b9491caaaa2" Jan 20 17:21:35 crc kubenswrapper[4558]: E0120 17:21:35.594792 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0a8bffbad678e5cea118776d64f4cc6254e390d1759d980ea03c2b9491caaaa2\": container with ID starting with 0a8bffbad678e5cea118776d64f4cc6254e390d1759d980ea03c2b9491caaaa2 not found: ID does not exist" containerID="0a8bffbad678e5cea118776d64f4cc6254e390d1759d980ea03c2b9491caaaa2" Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.594836 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0a8bffbad678e5cea118776d64f4cc6254e390d1759d980ea03c2b9491caaaa2"} err="failed to get container status \"0a8bffbad678e5cea118776d64f4cc6254e390d1759d980ea03c2b9491caaaa2\": rpc error: code = NotFound desc = could not find container \"0a8bffbad678e5cea118776d64f4cc6254e390d1759d980ea03c2b9491caaaa2\": container with ID starting with 0a8bffbad678e5cea118776d64f4cc6254e390d1759d980ea03c2b9491caaaa2 not found: ID does not exist" Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.594866 4558 scope.go:117] "RemoveContainer" containerID="077461e7717ab6966108b72a1527abc834132888244fc51cc68cc84642df8e4e" Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.606342 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage14-crc" (OuterVolumeSpecName: "mysql-db") pod "9adbb250-26e3-4e45-b60f-3b4c83d59ba3" (UID: "9adbb250-26e3-4e45-b60f-3b4c83d59ba3"). InnerVolumeSpecName "local-storage14-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.630402 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9adbb250-26e3-4e45-b60f-3b4c83d59ba3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9adbb250-26e3-4e45-b60f-3b4c83d59ba3" (UID: "9adbb250-26e3-4e45-b60f-3b4c83d59ba3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.650558 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9adbb250-26e3-4e45-b60f-3b4c83d59ba3-galera-tls-certs" (OuterVolumeSpecName: "galera-tls-certs") pod "9adbb250-26e3-4e45-b60f-3b4c83d59ba3" (UID: "9adbb250-26e3-4e45-b60f-3b4c83d59ba3"). InnerVolumeSpecName "galera-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.674825 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.675216 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="0378e8e5-48af-48c3-9100-80622a77533b" containerName="ceilometer-central-agent" containerID="cri-o://6afebd361ef0e45cdb43a62661e5fbd5a98fd241267936b37385d0bfde77a3bd" gracePeriod=30 Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.675846 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="0378e8e5-48af-48c3-9100-80622a77533b" containerName="proxy-httpd" containerID="cri-o://d207a7762509b3d4788233142ef6435d9d2416a2d5fceb734f0d9d4e3721e67e" gracePeriod=30 Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.675960 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="0378e8e5-48af-48c3-9100-80622a77533b" containerName="sg-core" containerID="cri-o://75449ae33552c994c4ad5b2bca2b18138daeaa442df1920c73602222ea16d437" gracePeriod=30 Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.676055 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="0378e8e5-48af-48c3-9100-80622a77533b" containerName="ceilometer-notification-agent" containerID="cri-o://253959d11bdee04e0c4129162924f6cddfeb4ed19ae0ee14fb63f2b31dd7810a" gracePeriod=30 Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.694619 4558 reconciler_common.go:293] "Volume detached for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/9adbb250-26e3-4e45-b60f-3b4c83d59ba3-galera-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.694699 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9adbb250-26e3-4e45-b60f-3b4c83d59ba3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.694775 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage14-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage14-crc\") on node \"crc\" " Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.698491 4558 scope.go:117] "RemoveContainer" containerID="648217de53b0389dbe7690a275b0c4b85571e0144e4fa2b93968f2f4497fa9f1" Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.706240 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.711412 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/kube-state-metrics-0" podUID="f07ba882-d06f-4937-ac0a-e66fcd719b8d" containerName="kube-state-metrics" containerID="cri-o://63c838d8c51346650a84faa8ff095a9f00ae13df2999dcf2d156cd7c1b906029" gracePeriod=30 Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.722520 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage14-crc" (UniqueName: "kubernetes.io/local-volume/local-storage14-crc") on node "crc" Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.800926 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage14-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage14-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.806905 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-f51d-account-create-update-tbxbn"] Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.821540 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-proxy-85c8dfdd74-sjsz2" Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.822053 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/keystone-f51d-account-create-update-tbxbn"] Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.835410 4558 scope.go:117] "RemoveContainer" containerID="4a003c3cd6aea09a3d0e2dc9693fd65da716ad6878942d1f8aad4e5a3a7fe6b9" Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.840030 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/keystone-f51d-account-create-update-wwmpb"] Jan 20 17:21:35 crc kubenswrapper[4558]: E0120 17:21:35.841519 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64462382-0fd6-4796-8079-d66c2bc27d95" containerName="nova-cell1-novncproxy-novncproxy" Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.841541 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="64462382-0fd6-4796-8079-d66c2bc27d95" containerName="nova-cell1-novncproxy-novncproxy" Jan 20 17:21:35 crc kubenswrapper[4558]: E0120 17:21:35.841559 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9d974489-32f7-4541-9a58-6a215c5d2071" containerName="proxy-httpd" Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.841565 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="9d974489-32f7-4541-9a58-6a215c5d2071" containerName="proxy-httpd" Jan 20 17:21:35 crc kubenswrapper[4558]: E0120 17:21:35.841590 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9adbb250-26e3-4e45-b60f-3b4c83d59ba3" containerName="galera" Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.841607 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="9adbb250-26e3-4e45-b60f-3b4c83d59ba3" containerName="galera" Jan 20 17:21:35 crc kubenswrapper[4558]: E0120 17:21:35.841633 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9adbb250-26e3-4e45-b60f-3b4c83d59ba3" containerName="mysql-bootstrap" Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.841639 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="9adbb250-26e3-4e45-b60f-3b4c83d59ba3" containerName="mysql-bootstrap" Jan 20 17:21:35 crc kubenswrapper[4558]: E0120 17:21:35.841657 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9d974489-32f7-4541-9a58-6a215c5d2071" containerName="proxy-server" Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.841664 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="9d974489-32f7-4541-9a58-6a215c5d2071" containerName="proxy-server" Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.841875 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="64462382-0fd6-4796-8079-d66c2bc27d95" containerName="nova-cell1-novncproxy-novncproxy" Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.841889 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="9d974489-32f7-4541-9a58-6a215c5d2071" containerName="proxy-httpd" Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.841913 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="9adbb250-26e3-4e45-b60f-3b4c83d59ba3" containerName="galera" Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.841924 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="9d974489-32f7-4541-9a58-6a215c5d2071" containerName="proxy-server" Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.847130 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-f51d-account-create-update-wwmpb" Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.850483 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-db-secret" Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.875547 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-f51d-account-create-update-wwmpb"] Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.888298 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-bootstrap-v4bgz"] Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.904681 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9d974489-32f7-4541-9a58-6a215c5d2071-combined-ca-bundle\") pod \"9d974489-32f7-4541-9a58-6a215c5d2071\" (UID: \"9d974489-32f7-4541-9a58-6a215c5d2071\") " Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.904773 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9d974489-32f7-4541-9a58-6a215c5d2071-internal-tls-certs\") pod \"9d974489-32f7-4541-9a58-6a215c5d2071\" (UID: \"9d974489-32f7-4541-9a58-6a215c5d2071\") " Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.904961 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9d974489-32f7-4541-9a58-6a215c5d2071-config-data\") pod \"9d974489-32f7-4541-9a58-6a215c5d2071\" (UID: \"9d974489-32f7-4541-9a58-6a215c5d2071\") " Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.905009 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9d974489-32f7-4541-9a58-6a215c5d2071-run-httpd\") pod \"9d974489-32f7-4541-9a58-6a215c5d2071\" (UID: \"9d974489-32f7-4541-9a58-6a215c5d2071\") " Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.905070 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gm7bd\" (UniqueName: \"kubernetes.io/projected/9d974489-32f7-4541-9a58-6a215c5d2071-kube-api-access-gm7bd\") pod \"9d974489-32f7-4541-9a58-6a215c5d2071\" (UID: \"9d974489-32f7-4541-9a58-6a215c5d2071\") " Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.905144 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/9d974489-32f7-4541-9a58-6a215c5d2071-etc-swift\") pod \"9d974489-32f7-4541-9a58-6a215c5d2071\" (UID: \"9d974489-32f7-4541-9a58-6a215c5d2071\") " Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.905207 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9d974489-32f7-4541-9a58-6a215c5d2071-public-tls-certs\") pod \"9d974489-32f7-4541-9a58-6a215c5d2071\" (UID: \"9d974489-32f7-4541-9a58-6a215c5d2071\") " Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.905269 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9d974489-32f7-4541-9a58-6a215c5d2071-log-httpd\") pod \"9d974489-32f7-4541-9a58-6a215c5d2071\" (UID: \"9d974489-32f7-4541-9a58-6a215c5d2071\") " Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.906029 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n74cc\" (UniqueName: \"kubernetes.io/projected/32b13960-8f0f-47f6-b529-37f7a918fcd6-kube-api-access-n74cc\") pod \"keystone-f51d-account-create-update-wwmpb\" (UID: \"32b13960-8f0f-47f6-b529-37f7a918fcd6\") " pod="openstack-kuttl-tests/keystone-f51d-account-create-update-wwmpb" Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.906121 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/32b13960-8f0f-47f6-b529-37f7a918fcd6-operator-scripts\") pod \"keystone-f51d-account-create-update-wwmpb\" (UID: \"32b13960-8f0f-47f6-b529-37f7a918fcd6\") " pod="openstack-kuttl-tests/keystone-f51d-account-create-update-wwmpb" Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.912512 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9d974489-32f7-4541-9a58-6a215c5d2071-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "9d974489-32f7-4541-9a58-6a215c5d2071" (UID: "9d974489-32f7-4541-9a58-6a215c5d2071"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.918093 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9d974489-32f7-4541-9a58-6a215c5d2071-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "9d974489-32f7-4541-9a58-6a215c5d2071" (UID: "9d974489-32f7-4541-9a58-6a215c5d2071"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.930357 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d974489-32f7-4541-9a58-6a215c5d2071-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "9d974489-32f7-4541-9a58-6a215c5d2071" (UID: "9d974489-32f7-4541-9a58-6a215c5d2071"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.930442 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/keystone-bootstrap-v4bgz"] Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.963365 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-db-sync-qq5nx"] Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.979539 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d974489-32f7-4541-9a58-6a215c5d2071-kube-api-access-gm7bd" (OuterVolumeSpecName: "kube-api-access-gm7bd") pod "9d974489-32f7-4541-9a58-6a215c5d2071" (UID: "9d974489-32f7-4541-9a58-6a215c5d2071"). InnerVolumeSpecName "kube-api-access-gm7bd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:21:35 crc kubenswrapper[4558]: I0120 17:21:35.984054 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/keystone-db-sync-qq5nx"] Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.031226 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d974489-32f7-4541-9a58-6a215c5d2071-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9d974489-32f7-4541-9a58-6a215c5d2071" (UID: "9d974489-32f7-4541-9a58-6a215c5d2071"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.041472 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/32b13960-8f0f-47f6-b529-37f7a918fcd6-operator-scripts\") pod \"keystone-f51d-account-create-update-wwmpb\" (UID: \"32b13960-8f0f-47f6-b529-37f7a918fcd6\") " pod="openstack-kuttl-tests/keystone-f51d-account-create-update-wwmpb" Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.041577 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-5585fd7cd8-gk5qt"] Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.041918 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/keystone-5585fd7cd8-gk5qt" podUID="ea1fc77a-9a6a-4b9b-8d27-34f9b9ca61d9" containerName="keystone-api" containerID="cri-o://b488e52ae9a816a6a1a205b5450c535dd59fdb90d6345dcef8eb1e186a1d90c5" gracePeriod=30 Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.042227 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n74cc\" (UniqueName: \"kubernetes.io/projected/32b13960-8f0f-47f6-b529-37f7a918fcd6-kube-api-access-n74cc\") pod \"keystone-f51d-account-create-update-wwmpb\" (UID: \"32b13960-8f0f-47f6-b529-37f7a918fcd6\") " pod="openstack-kuttl-tests/keystone-f51d-account-create-update-wwmpb" Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.042473 4558 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9d974489-32f7-4541-9a58-6a215c5d2071-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.042495 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gm7bd\" (UniqueName: \"kubernetes.io/projected/9d974489-32f7-4541-9a58-6a215c5d2071-kube-api-access-gm7bd\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.042506 4558 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/9d974489-32f7-4541-9a58-6a215c5d2071-etc-swift\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.042515 4558 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9d974489-32f7-4541-9a58-6a215c5d2071-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:36 crc kubenswrapper[4558]: E0120 17:21:36.042517 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/openstack-scripts: configmap "openstack-scripts" not found Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.042531 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9d974489-32f7-4541-9a58-6a215c5d2071-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:36 crc kubenswrapper[4558]: E0120 17:21:36.042588 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/32b13960-8f0f-47f6-b529-37f7a918fcd6-operator-scripts podName:32b13960-8f0f-47f6-b529-37f7a918fcd6 nodeName:}" failed. No retries permitted until 2026-01-20 17:21:36.542570245 +0000 UTC m=+2390.302908212 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/32b13960-8f0f-47f6-b529-37f7a918fcd6-operator-scripts") pod "keystone-f51d-account-create-update-wwmpb" (UID: "32b13960-8f0f-47f6-b529-37f7a918fcd6") : configmap "openstack-scripts" not found Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.044313 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d974489-32f7-4541-9a58-6a215c5d2071-config-data" (OuterVolumeSpecName: "config-data") pod "9d974489-32f7-4541-9a58-6a215c5d2071" (UID: "9d974489-32f7-4541-9a58-6a215c5d2071"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:21:36 crc kubenswrapper[4558]: E0120 17:21:36.046073 4558 projected.go:194] Error preparing data for projected volume kube-api-access-n74cc for pod openstack-kuttl-tests/keystone-f51d-account-create-update-wwmpb: failed to fetch token: serviceaccounts "galera-openstack" not found Jan 20 17:21:36 crc kubenswrapper[4558]: E0120 17:21:36.046148 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/32b13960-8f0f-47f6-b529-37f7a918fcd6-kube-api-access-n74cc podName:32b13960-8f0f-47f6-b529-37f7a918fcd6 nodeName:}" failed. No retries permitted until 2026-01-20 17:21:36.546125153 +0000 UTC m=+2390.306463120 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-n74cc" (UniqueName: "kubernetes.io/projected/32b13960-8f0f-47f6-b529-37f7a918fcd6-kube-api-access-n74cc") pod "keystone-f51d-account-create-update-wwmpb" (UID: "32b13960-8f0f-47f6-b529-37f7a918fcd6") : failed to fetch token: serviceaccounts "galera-openstack" not found Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.049236 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d974489-32f7-4541-9a58-6a215c5d2071-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "9d974489-32f7-4541-9a58-6a215c5d2071" (UID: "9d974489-32f7-4541-9a58-6a215c5d2071"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.063144 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/openstack-galera-1"] Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.067787 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/openstack-galera-2"] Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.076132 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d974489-32f7-4541-9a58-6a215c5d2071-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "9d974489-32f7-4541-9a58-6a215c5d2071" (UID: "9d974489-32f7-4541-9a58-6a215c5d2071"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.091035 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/openstack-galera-0"] Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.106797 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-r6qz8"] Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.114900 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-db-create-9dzpb"] Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.119447 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/keystone-db-create-9dzpb"] Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.134136 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-f51d-account-create-update-wwmpb"] Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.140305 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/placement-6b7cbc9658-z4t27" podUID="f2c69e78-4cbd-41a8-810d-bdc4f56cabfd" containerName="placement-log" probeResult="failure" output="Get \"https://10.217.0.167:8778/\": read tcp 10.217.0.2:55798->10.217.0.167:8778: read: connection reset by peer" Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.140311 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/placement-6b7cbc9658-z4t27" podUID="f2c69e78-4cbd-41a8-810d-bdc4f56cabfd" containerName="placement-api" probeResult="failure" output="Get \"https://10.217.0.167:8778/\": read tcp 10.217.0.2:55794->10.217.0.167:8778: read: connection reset by peer" Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.146364 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9d974489-32f7-4541-9a58-6a215c5d2071-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.146390 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9d974489-32f7-4541-9a58-6a215c5d2071-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.146401 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9d974489-32f7-4541-9a58-6a215c5d2071-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.150840 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/cinder-api-0" podUID="e72083f2-d324-432e-9bb0-5f44f2023489" containerName="cinder-api" probeResult="failure" output="Get \"https://10.217.0.175:8776/healthcheck\": read tcp 10.217.0.2:54160->10.217.0.175:8776: read: connection reset by peer" Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.295033 4558 scope.go:117] "RemoveContainer" containerID="648217de53b0389dbe7690a275b0c4b85571e0144e4fa2b93968f2f4497fa9f1" Jan 20 17:21:36 crc kubenswrapper[4558]: E0120 17:21:36.296362 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"648217de53b0389dbe7690a275b0c4b85571e0144e4fa2b93968f2f4497fa9f1\": container with ID starting with 648217de53b0389dbe7690a275b0c4b85571e0144e4fa2b93968f2f4497fa9f1 not found: ID does not exist" containerID="648217de53b0389dbe7690a275b0c4b85571e0144e4fa2b93968f2f4497fa9f1" Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.296409 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"648217de53b0389dbe7690a275b0c4b85571e0144e4fa2b93968f2f4497fa9f1"} err="failed to get container status \"648217de53b0389dbe7690a275b0c4b85571e0144e4fa2b93968f2f4497fa9f1\": rpc error: code = NotFound desc = could not find container \"648217de53b0389dbe7690a275b0c4b85571e0144e4fa2b93968f2f4497fa9f1\": container with ID starting with 648217de53b0389dbe7690a275b0c4b85571e0144e4fa2b93968f2f4497fa9f1 not found: ID does not exist" Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.296442 4558 scope.go:117] "RemoveContainer" containerID="4a003c3cd6aea09a3d0e2dc9693fd65da716ad6878942d1f8aad4e5a3a7fe6b9" Jan 20 17:21:36 crc kubenswrapper[4558]: E0120 17:21:36.297187 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4a003c3cd6aea09a3d0e2dc9693fd65da716ad6878942d1f8aad4e5a3a7fe6b9\": container with ID starting with 4a003c3cd6aea09a3d0e2dc9693fd65da716ad6878942d1f8aad4e5a3a7fe6b9 not found: ID does not exist" containerID="4a003c3cd6aea09a3d0e2dc9693fd65da716ad6878942d1f8aad4e5a3a7fe6b9" Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.297226 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4a003c3cd6aea09a3d0e2dc9693fd65da716ad6878942d1f8aad4e5a3a7fe6b9"} err="failed to get container status \"4a003c3cd6aea09a3d0e2dc9693fd65da716ad6878942d1f8aad4e5a3a7fe6b9\": rpc error: code = NotFound desc = could not find container \"4a003c3cd6aea09a3d0e2dc9693fd65da716ad6878942d1f8aad4e5a3a7fe6b9\": container with ID starting with 4a003c3cd6aea09a3d0e2dc9693fd65da716ad6878942d1f8aad4e5a3a7fe6b9 not found: ID does not exist" Jan 20 17:21:36 crc kubenswrapper[4558]: E0120 17:21:36.317434 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[kube-api-access-n74cc operator-scripts], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack-kuttl-tests/keystone-f51d-account-create-update-wwmpb" podUID="32b13960-8f0f-47f6-b529-37f7a918fcd6" Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.325282 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-36de-account-create-update-n4ql4" Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.333307 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/openstack-cell1-galera-2"] Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.334157 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-247a-account-create-update-8drms" Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.345652 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/openstack-cell1-galera-2"] Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.361868 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/openstack-galera-2" podUID="fa03920f-0e4b-458f-956b-b658786f9792" containerName="galera" containerID="cri-o://a62b1eff4cb0e50771699ddb63f1bffd4fb9ff28cbdf146a5a3c121d2a2e02fa" gracePeriod=30 Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.406214 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.451667 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8hcpc\" (UniqueName: \"kubernetes.io/projected/52e65b62-cbeb-475e-b3ec-2a6259c44b25-kube-api-access-8hcpc\") pod \"52e65b62-cbeb-475e-b3ec-2a6259c44b25\" (UID: \"52e65b62-cbeb-475e-b3ec-2a6259c44b25\") " Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.451911 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/25e48519-09af-42b4-835a-485273267aca-operator-scripts\") pod \"25e48519-09af-42b4-835a-485273267aca\" (UID: \"25e48519-09af-42b4-835a-485273267aca\") " Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.451940 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/52e65b62-cbeb-475e-b3ec-2a6259c44b25-operator-scripts\") pod \"52e65b62-cbeb-475e-b3ec-2a6259c44b25\" (UID: \"52e65b62-cbeb-475e-b3ec-2a6259c44b25\") " Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.452088 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6vr6p\" (UniqueName: \"kubernetes.io/projected/25e48519-09af-42b4-835a-485273267aca-kube-api-access-6vr6p\") pod \"25e48519-09af-42b4-835a-485273267aca\" (UID: \"25e48519-09af-42b4-835a-485273267aca\") " Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.452648 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e48519-09af-42b4-835a-485273267aca-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "25e48519-09af-42b4-835a-485273267aca" (UID: "25e48519-09af-42b4-835a-485273267aca"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.452808 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/25e48519-09af-42b4-835a-485273267aca-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.453576 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/52e65b62-cbeb-475e-b3ec-2a6259c44b25-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "52e65b62-cbeb-475e-b3ec-2a6259c44b25" (UID: "52e65b62-cbeb-475e-b3ec-2a6259c44b25"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.467405 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e48519-09af-42b4-835a-485273267aca-kube-api-access-6vr6p" (OuterVolumeSpecName: "kube-api-access-6vr6p") pod "25e48519-09af-42b4-835a-485273267aca" (UID: "25e48519-09af-42b4-835a-485273267aca"). InnerVolumeSpecName "kube-api-access-6vr6p". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.469295 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/52e65b62-cbeb-475e-b3ec-2a6259c44b25-kube-api-access-8hcpc" (OuterVolumeSpecName: "kube-api-access-8hcpc") pod "52e65b62-cbeb-475e-b3ec-2a6259c44b25" (UID: "52e65b62-cbeb-475e-b3ec-2a6259c44b25"). InnerVolumeSpecName "kube-api-access-8hcpc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:21:36 crc kubenswrapper[4558]: E0120 17:21:36.512064 4558 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9adbb250_26e3_4e45_b60f_3b4c83d59ba3.slice/crio-6c0793a4831f49398284626ae40c77a9333710a5b62feb85f1da44a5bfd5cf9b\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0378e8e5_48af_48c3_9100_80622a77533b.slice/crio-conmon-6afebd361ef0e45cdb43a62661e5fbd5a98fd241267936b37385d0bfde77a3bd.scope\": RecentStats: unable to find data in memory cache]" Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.515319 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/openstack-cell1-galera-1" podUID="872ae103-12d2-4686-aa02-4e3375eb510a" containerName="galera" containerID="cri-o://3c937b5e3f5d38edf270dee165b67ae6bdbf0a55f6bf5b7f0a56a325dd63cec2" gracePeriod=28 Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.556053 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-khdmg\" (UniqueName: \"kubernetes.io/projected/f07ba882-d06f-4937-ac0a-e66fcd719b8d-kube-api-access-khdmg\") pod \"f07ba882-d06f-4937-ac0a-e66fcd719b8d\" (UID: \"f07ba882-d06f-4937-ac0a-e66fcd719b8d\") " Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.556219 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/f07ba882-d06f-4937-ac0a-e66fcd719b8d-kube-state-metrics-tls-certs\") pod \"f07ba882-d06f-4937-ac0a-e66fcd719b8d\" (UID: \"f07ba882-d06f-4937-ac0a-e66fcd719b8d\") " Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.556363 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f07ba882-d06f-4937-ac0a-e66fcd719b8d-combined-ca-bundle\") pod \"f07ba882-d06f-4937-ac0a-e66fcd719b8d\" (UID: \"f07ba882-d06f-4937-ac0a-e66fcd719b8d\") " Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.556432 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/f07ba882-d06f-4937-ac0a-e66fcd719b8d-kube-state-metrics-tls-config\") pod \"f07ba882-d06f-4937-ac0a-e66fcd719b8d\" (UID: \"f07ba882-d06f-4937-ac0a-e66fcd719b8d\") " Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.556849 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n74cc\" (UniqueName: \"kubernetes.io/projected/32b13960-8f0f-47f6-b529-37f7a918fcd6-kube-api-access-n74cc\") pod \"keystone-f51d-account-create-update-wwmpb\" (UID: \"32b13960-8f0f-47f6-b529-37f7a918fcd6\") " pod="openstack-kuttl-tests/keystone-f51d-account-create-update-wwmpb" Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.556901 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/32b13960-8f0f-47f6-b529-37f7a918fcd6-operator-scripts\") pod \"keystone-f51d-account-create-update-wwmpb\" (UID: \"32b13960-8f0f-47f6-b529-37f7a918fcd6\") " pod="openstack-kuttl-tests/keystone-f51d-account-create-update-wwmpb" Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.557049 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/52e65b62-cbeb-475e-b3ec-2a6259c44b25-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.557061 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6vr6p\" (UniqueName: \"kubernetes.io/projected/25e48519-09af-42b4-835a-485273267aca-kube-api-access-6vr6p\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.557071 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8hcpc\" (UniqueName: \"kubernetes.io/projected/52e65b62-cbeb-475e-b3ec-2a6259c44b25-kube-api-access-8hcpc\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:36 crc kubenswrapper[4558]: E0120 17:21:36.557128 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/openstack-scripts: configmap "openstack-scripts" not found Jan 20 17:21:36 crc kubenswrapper[4558]: E0120 17:21:36.557192 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/32b13960-8f0f-47f6-b529-37f7a918fcd6-operator-scripts podName:32b13960-8f0f-47f6-b529-37f7a918fcd6 nodeName:}" failed. No retries permitted until 2026-01-20 17:21:37.557154184 +0000 UTC m=+2391.317492140 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/32b13960-8f0f-47f6-b529-37f7a918fcd6-operator-scripts") pod "keystone-f51d-account-create-update-wwmpb" (UID: "32b13960-8f0f-47f6-b529-37f7a918fcd6") : configmap "openstack-scripts" not found Jan 20 17:21:36 crc kubenswrapper[4558]: E0120 17:21:36.561722 4558 projected.go:194] Error preparing data for projected volume kube-api-access-n74cc for pod openstack-kuttl-tests/keystone-f51d-account-create-update-wwmpb: failed to fetch token: serviceaccounts "galera-openstack" not found Jan 20 17:21:36 crc kubenswrapper[4558]: E0120 17:21:36.561813 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/32b13960-8f0f-47f6-b529-37f7a918fcd6-kube-api-access-n74cc podName:32b13960-8f0f-47f6-b529-37f7a918fcd6 nodeName:}" failed. No retries permitted until 2026-01-20 17:21:37.561778351 +0000 UTC m=+2391.322116318 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-n74cc" (UniqueName: "kubernetes.io/projected/32b13960-8f0f-47f6-b529-37f7a918fcd6-kube-api-access-n74cc") pod "keystone-f51d-account-create-update-wwmpb" (UID: "32b13960-8f0f-47f6-b529-37f7a918fcd6") : failed to fetch token: serviceaccounts "galera-openstack" not found Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.562148 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f07ba882-d06f-4937-ac0a-e66fcd719b8d-kube-api-access-khdmg" (OuterVolumeSpecName: "kube-api-access-khdmg") pod "f07ba882-d06f-4937-ac0a-e66fcd719b8d" (UID: "f07ba882-d06f-4937-ac0a-e66fcd719b8d"). InnerVolumeSpecName "kube-api-access-khdmg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.568185 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.590303 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="280c8a81-3eab-49fe-b275-d7238dbd7e0f" path="/var/lib/kubelet/pods/280c8a81-3eab-49fe-b275-d7238dbd7e0f/volumes" Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.590810 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="346ea378-eb04-403c-8c0f-0f8c3d2debe8" path="/var/lib/kubelet/pods/346ea378-eb04-403c-8c0f-0f8c3d2debe8/volumes" Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.591308 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="46ce5c99-2891-494b-a8fe-291f53beeeff" path="/var/lib/kubelet/pods/46ce5c99-2891-494b-a8fe-291f53beeeff/volumes" Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.591767 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="64462382-0fd6-4796-8079-d66c2bc27d95" path="/var/lib/kubelet/pods/64462382-0fd6-4796-8079-d66c2bc27d95/volumes" Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.594310 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f07ba882-d06f-4937-ac0a-e66fcd719b8d-kube-state-metrics-tls-config" (OuterVolumeSpecName: "kube-state-metrics-tls-config") pod "f07ba882-d06f-4937-ac0a-e66fcd719b8d" (UID: "f07ba882-d06f-4937-ac0a-e66fcd719b8d"). InnerVolumeSpecName "kube-state-metrics-tls-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.598361 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f07ba882-d06f-4937-ac0a-e66fcd719b8d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f07ba882-d06f-4937-ac0a-e66fcd719b8d" (UID: "f07ba882-d06f-4937-ac0a-e66fcd719b8d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.600789 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6fa38873-97d7-458b-8963-ceeae696c9af" path="/var/lib/kubelet/pods/6fa38873-97d7-458b-8963-ceeae696c9af/volumes" Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.601360 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7490595f-9782-44f0-bfb1-811f2f1d65e3" path="/var/lib/kubelet/pods/7490595f-9782-44f0-bfb1-811f2f1d65e3/volumes" Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.601974 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9adbb250-26e3-4e45-b60f-3b4c83d59ba3" path="/var/lib/kubelet/pods/9adbb250-26e3-4e45-b60f-3b4c83d59ba3/volumes" Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.608117 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-36de-account-create-update-n4ql4" Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.621682 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-36de-account-create-update-n4ql4" event={"ID":"52e65b62-cbeb-475e-b3ec-2a6259c44b25","Type":"ContainerDied","Data":"d83892ba5e59e6bfcd672ecc79b79b0cb0fffed58e60b1769c488844ef02fbeb"} Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.637895 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/root-account-create-update-r6qz8" event={"ID":"4eb93277-301f-486e-a1e9-f323f9bd9cb4","Type":"ContainerStarted","Data":"68c910efbea1cdab426d9f23de2d058f94a3d54c56eccdecb828d1307a55edd1"} Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.659033 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-proxy-85c8dfdd74-sjsz2" Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.660551 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-proxy-85c8dfdd74-sjsz2" event={"ID":"9d974489-32f7-4541-9a58-6a215c5d2071","Type":"ContainerDied","Data":"d8c405b29a445cc4fb8281d7777849ac6bf770a4845bc92a7f21671be550d05b"} Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.660611 4558 scope.go:117] "RemoveContainer" containerID="47bcce107135d29c478003906fbf3f85f03c5a969a0a67dbccdda34ff7841132" Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.665056 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-247a-account-create-update-8drms" event={"ID":"25e48519-09af-42b4-835a-485273267aca","Type":"ContainerDied","Data":"a7926e3301a17486714d5610f57524ed1433920eee2805d317efcb8cd65b12d3"} Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.668197 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/e72083f2-d324-432e-9bb0-5f44f2023489-etc-machine-id\") pod \"e72083f2-d324-432e-9bb0-5f44f2023489\" (UID: \"e72083f2-d324-432e-9bb0-5f44f2023489\") " Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.668254 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e72083f2-d324-432e-9bb0-5f44f2023489-combined-ca-bundle\") pod \"e72083f2-d324-432e-9bb0-5f44f2023489\" (UID: \"e72083f2-d324-432e-9bb0-5f44f2023489\") " Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.668307 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e72083f2-d324-432e-9bb0-5f44f2023489-public-tls-certs\") pod \"e72083f2-d324-432e-9bb0-5f44f2023489\" (UID: \"e72083f2-d324-432e-9bb0-5f44f2023489\") " Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.668657 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8lt2s\" (UniqueName: \"kubernetes.io/projected/e72083f2-d324-432e-9bb0-5f44f2023489-kube-api-access-8lt2s\") pod \"e72083f2-d324-432e-9bb0-5f44f2023489\" (UID: \"e72083f2-d324-432e-9bb0-5f44f2023489\") " Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.668741 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e72083f2-d324-432e-9bb0-5f44f2023489-internal-tls-certs\") pod \"e72083f2-d324-432e-9bb0-5f44f2023489\" (UID: \"e72083f2-d324-432e-9bb0-5f44f2023489\") " Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.668773 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e72083f2-d324-432e-9bb0-5f44f2023489-scripts\") pod \"e72083f2-d324-432e-9bb0-5f44f2023489\" (UID: \"e72083f2-d324-432e-9bb0-5f44f2023489\") " Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.668808 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e72083f2-d324-432e-9bb0-5f44f2023489-config-data-custom\") pod \"e72083f2-d324-432e-9bb0-5f44f2023489\" (UID: \"e72083f2-d324-432e-9bb0-5f44f2023489\") " Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.669159 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-247a-account-create-update-8drms" Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.675185 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e72083f2-d324-432e-9bb0-5f44f2023489-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "e72083f2-d324-432e-9bb0-5f44f2023489" (UID: "e72083f2-d324-432e-9bb0-5f44f2023489"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.676900 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f07ba882-d06f-4937-ac0a-e66fcd719b8d-kube-state-metrics-tls-certs" (OuterVolumeSpecName: "kube-state-metrics-tls-certs") pod "f07ba882-d06f-4937-ac0a-e66fcd719b8d" (UID: "f07ba882-d06f-4937-ac0a-e66fcd719b8d"). InnerVolumeSpecName "kube-state-metrics-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.680131 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e72083f2-d324-432e-9bb0-5f44f2023489-config-data\") pod \"e72083f2-d324-432e-9bb0-5f44f2023489\" (UID: \"e72083f2-d324-432e-9bb0-5f44f2023489\") " Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.680196 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e72083f2-d324-432e-9bb0-5f44f2023489-logs\") pod \"e72083f2-d324-432e-9bb0-5f44f2023489\" (UID: \"e72083f2-d324-432e-9bb0-5f44f2023489\") " Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.681130 4558 reconciler_common.go:293] "Volume detached for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/f07ba882-d06f-4937-ac0a-e66fcd719b8d-kube-state-metrics-tls-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.681144 4558 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/e72083f2-d324-432e-9bb0-5f44f2023489-etc-machine-id\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.681155 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-khdmg\" (UniqueName: \"kubernetes.io/projected/f07ba882-d06f-4937-ac0a-e66fcd719b8d-kube-api-access-khdmg\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.681223 4558 reconciler_common.go:293] "Volume detached for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/f07ba882-d06f-4937-ac0a-e66fcd719b8d-kube-state-metrics-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.681233 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f07ba882-d06f-4937-ac0a-e66fcd719b8d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.681898 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e72083f2-d324-432e-9bb0-5f44f2023489-logs" (OuterVolumeSpecName: "logs") pod "e72083f2-d324-432e-9bb0-5f44f2023489" (UID: "e72083f2-d324-432e-9bb0-5f44f2023489"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.684456 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e72083f2-d324-432e-9bb0-5f44f2023489-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "e72083f2-d324-432e-9bb0-5f44f2023489" (UID: "e72083f2-d324-432e-9bb0-5f44f2023489"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.692376 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e72083f2-d324-432e-9bb0-5f44f2023489-kube-api-access-8lt2s" (OuterVolumeSpecName: "kube-api-access-8lt2s") pod "e72083f2-d324-432e-9bb0-5f44f2023489" (UID: "e72083f2-d324-432e-9bb0-5f44f2023489"). InnerVolumeSpecName "kube-api-access-8lt2s". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.692676 4558 generic.go:334] "Generic (PLEG): container finished" podID="f2c69e78-4cbd-41a8-810d-bdc4f56cabfd" containerID="fd0522a1018450fa8aaf5f378cfa36bd4a81f9a88e9be2f7d7e743bbb0155ccd" exitCode=0 Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.692731 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-6b7cbc9658-z4t27" event={"ID":"f2c69e78-4cbd-41a8-810d-bdc4f56cabfd","Type":"ContainerDied","Data":"fd0522a1018450fa8aaf5f378cfa36bd4a81f9a88e9be2f7d7e743bbb0155ccd"} Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.695791 4558 generic.go:334] "Generic (PLEG): container finished" podID="e72083f2-d324-432e-9bb0-5f44f2023489" containerID="76ea6272fca9c46b59f2db4c5161f6d34060b5a40bd0d5c5b65cea949a4dad9e" exitCode=0 Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.695829 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"e72083f2-d324-432e-9bb0-5f44f2023489","Type":"ContainerDied","Data":"76ea6272fca9c46b59f2db4c5161f6d34060b5a40bd0d5c5b65cea949a4dad9e"} Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.695855 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"e72083f2-d324-432e-9bb0-5f44f2023489","Type":"ContainerDied","Data":"285b3478741b1d12d0aa4c1b7f7bc6f97edc86dc49a11d1a1e1b86cd0183b202"} Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.695921 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.697819 4558 generic.go:334] "Generic (PLEG): container finished" podID="f07ba882-d06f-4937-ac0a-e66fcd719b8d" containerID="63c838d8c51346650a84faa8ff095a9f00ae13df2999dcf2d156cd7c1b906029" exitCode=2 Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.697855 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/kube-state-metrics-0" event={"ID":"f07ba882-d06f-4937-ac0a-e66fcd719b8d","Type":"ContainerDied","Data":"63c838d8c51346650a84faa8ff095a9f00ae13df2999dcf2d156cd7c1b906029"} Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.697870 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/kube-state-metrics-0" event={"ID":"f07ba882-d06f-4937-ac0a-e66fcd719b8d","Type":"ContainerDied","Data":"85e4fa8762e8b2aa90c99c4aed1717ca86a0b44825ed3ade66e5c8a4c5a6fd50"} Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.697914 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.704391 4558 generic.go:334] "Generic (PLEG): container finished" podID="0378e8e5-48af-48c3-9100-80622a77533b" containerID="d207a7762509b3d4788233142ef6435d9d2416a2d5fceb734f0d9d4e3721e67e" exitCode=0 Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.704420 4558 generic.go:334] "Generic (PLEG): container finished" podID="0378e8e5-48af-48c3-9100-80622a77533b" containerID="75449ae33552c994c4ad5b2bca2b18138daeaa442df1920c73602222ea16d437" exitCode=2 Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.704429 4558 generic.go:334] "Generic (PLEG): container finished" podID="0378e8e5-48af-48c3-9100-80622a77533b" containerID="6afebd361ef0e45cdb43a62661e5fbd5a98fd241267936b37385d0bfde77a3bd" exitCode=0 Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.704465 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"0378e8e5-48af-48c3-9100-80622a77533b","Type":"ContainerDied","Data":"d207a7762509b3d4788233142ef6435d9d2416a2d5fceb734f0d9d4e3721e67e"} Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.704490 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"0378e8e5-48af-48c3-9100-80622a77533b","Type":"ContainerDied","Data":"75449ae33552c994c4ad5b2bca2b18138daeaa442df1920c73602222ea16d437"} Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.704501 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"0378e8e5-48af-48c3-9100-80622a77533b","Type":"ContainerDied","Data":"6afebd361ef0e45cdb43a62661e5fbd5a98fd241267936b37385d0bfde77a3bd"} Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.712426 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-f51d-account-create-update-wwmpb" Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.713011 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-psfz5" event={"ID":"bcc56301-83b3-4ff9-9290-ed029940a6da","Type":"ContainerStarted","Data":"3cf0e8299f84b5df2b4be00b863bd89ef88937e1f0734e67e76b770c4c5bf382"} Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.725449 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e72083f2-d324-432e-9bb0-5f44f2023489-scripts" (OuterVolumeSpecName: "scripts") pod "e72083f2-d324-432e-9bb0-5f44f2023489" (UID: "e72083f2-d324-432e-9bb0-5f44f2023489"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.744429 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-psfz5" podStartSLOduration=3.7444179589999997 podStartE2EDuration="3.744417959s" podCreationTimestamp="2026-01-20 17:21:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:21:36.736802645 +0000 UTC m=+2390.497140612" watchObservedRunningTime="2026-01-20 17:21:36.744417959 +0000 UTC m=+2390.504755926" Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.748768 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e72083f2-d324-432e-9bb0-5f44f2023489-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e72083f2-d324-432e-9bb0-5f44f2023489" (UID: "e72083f2-d324-432e-9bb0-5f44f2023489"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.766322 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e72083f2-d324-432e-9bb0-5f44f2023489-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "e72083f2-d324-432e-9bb0-5f44f2023489" (UID: "e72083f2-d324-432e-9bb0-5f44f2023489"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.783010 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8lt2s\" (UniqueName: \"kubernetes.io/projected/e72083f2-d324-432e-9bb0-5f44f2023489-kube-api-access-8lt2s\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.783034 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e72083f2-d324-432e-9bb0-5f44f2023489-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.783044 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e72083f2-d324-432e-9bb0-5f44f2023489-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.783053 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e72083f2-d324-432e-9bb0-5f44f2023489-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.783063 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e72083f2-d324-432e-9bb0-5f44f2023489-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.783071 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e72083f2-d324-432e-9bb0-5f44f2023489-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.783689 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e72083f2-d324-432e-9bb0-5f44f2023489-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "e72083f2-d324-432e-9bb0-5f44f2023489" (UID: "e72083f2-d324-432e-9bb0-5f44f2023489"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.798370 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e72083f2-d324-432e-9bb0-5f44f2023489-config-data" (OuterVolumeSpecName: "config-data") pod "e72083f2-d324-432e-9bb0-5f44f2023489" (UID: "e72083f2-d324-432e-9bb0-5f44f2023489"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.886184 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e72083f2-d324-432e-9bb0-5f44f2023489-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:36 crc kubenswrapper[4558]: I0120 17:21:36.886461 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e72083f2-d324-432e-9bb0-5f44f2023489-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.073832 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-6b7cbc9658-z4t27" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.083546 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-f51d-account-create-update-wwmpb" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.086810 4558 scope.go:117] "RemoveContainer" containerID="cfbe93ac18d993e620cd8106e1b690070a764b00d594313c60a12f9f0b8d2527" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.091449 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f2c69e78-4cbd-41a8-810d-bdc4f56cabfd-config-data\") pod \"f2c69e78-4cbd-41a8-810d-bdc4f56cabfd\" (UID: \"f2c69e78-4cbd-41a8-810d-bdc4f56cabfd\") " Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.091561 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f2c69e78-4cbd-41a8-810d-bdc4f56cabfd-public-tls-certs\") pod \"f2c69e78-4cbd-41a8-810d-bdc4f56cabfd\" (UID: \"f2c69e78-4cbd-41a8-810d-bdc4f56cabfd\") " Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.091622 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f2c69e78-4cbd-41a8-810d-bdc4f56cabfd-scripts\") pod \"f2c69e78-4cbd-41a8-810d-bdc4f56cabfd\" (UID: \"f2c69e78-4cbd-41a8-810d-bdc4f56cabfd\") " Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.091673 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fns9l\" (UniqueName: \"kubernetes.io/projected/f2c69e78-4cbd-41a8-810d-bdc4f56cabfd-kube-api-access-fns9l\") pod \"f2c69e78-4cbd-41a8-810d-bdc4f56cabfd\" (UID: \"f2c69e78-4cbd-41a8-810d-bdc4f56cabfd\") " Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.091752 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f2c69e78-4cbd-41a8-810d-bdc4f56cabfd-logs\") pod \"f2c69e78-4cbd-41a8-810d-bdc4f56cabfd\" (UID: \"f2c69e78-4cbd-41a8-810d-bdc4f56cabfd\") " Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.091801 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2c69e78-4cbd-41a8-810d-bdc4f56cabfd-combined-ca-bundle\") pod \"f2c69e78-4cbd-41a8-810d-bdc4f56cabfd\" (UID: \"f2c69e78-4cbd-41a8-810d-bdc4f56cabfd\") " Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.091837 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f2c69e78-4cbd-41a8-810d-bdc4f56cabfd-internal-tls-certs\") pod \"f2c69e78-4cbd-41a8-810d-bdc4f56cabfd\" (UID: \"f2c69e78-4cbd-41a8-810d-bdc4f56cabfd\") " Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.096398 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f2c69e78-4cbd-41a8-810d-bdc4f56cabfd-logs" (OuterVolumeSpecName: "logs") pod "f2c69e78-4cbd-41a8-810d-bdc4f56cabfd" (UID: "f2c69e78-4cbd-41a8-810d-bdc4f56cabfd"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.106744 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f2c69e78-4cbd-41a8-810d-bdc4f56cabfd-scripts" (OuterVolumeSpecName: "scripts") pod "f2c69e78-4cbd-41a8-810d-bdc4f56cabfd" (UID: "f2c69e78-4cbd-41a8-810d-bdc4f56cabfd"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.107372 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f2c69e78-4cbd-41a8-810d-bdc4f56cabfd-kube-api-access-fns9l" (OuterVolumeSpecName: "kube-api-access-fns9l") pod "f2c69e78-4cbd-41a8-810d-bdc4f56cabfd" (UID: "f2c69e78-4cbd-41a8-810d-bdc4f56cabfd"). InnerVolumeSpecName "kube-api-access-fns9l". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.109801 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-r6qz8" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.125541 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/swift-proxy-85c8dfdd74-sjsz2"] Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.133078 4558 scope.go:117] "RemoveContainer" containerID="76ea6272fca9c46b59f2db4c5161f6d34060b5a40bd0d5c5b65cea949a4dad9e" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.139835 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/swift-proxy-85c8dfdd74-sjsz2"] Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.153280 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-36de-account-create-update-n4ql4"] Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.174099 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/glance-36de-account-create-update-n4ql4"] Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.188429 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f2c69e78-4cbd-41a8-810d-bdc4f56cabfd-config-data" (OuterVolumeSpecName: "config-data") pod "f2c69e78-4cbd-41a8-810d-bdc4f56cabfd" (UID: "f2c69e78-4cbd-41a8-810d-bdc4f56cabfd"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.204316 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-247a-account-create-update-8drms"] Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.212731 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4eb93277-301f-486e-a1e9-f323f9bd9cb4-operator-scripts\") pod \"4eb93277-301f-486e-a1e9-f323f9bd9cb4\" (UID: \"4eb93277-301f-486e-a1e9-f323f9bd9cb4\") " Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.213141 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k9xdl\" (UniqueName: \"kubernetes.io/projected/4eb93277-301f-486e-a1e9-f323f9bd9cb4-kube-api-access-k9xdl\") pod \"4eb93277-301f-486e-a1e9-f323f9bd9cb4\" (UID: \"4eb93277-301f-486e-a1e9-f323f9bd9cb4\") " Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.213218 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/neutron-247a-account-create-update-8drms"] Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.215027 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4eb93277-301f-486e-a1e9-f323f9bd9cb4-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "4eb93277-301f-486e-a1e9-f323f9bd9cb4" (UID: "4eb93277-301f-486e-a1e9-f323f9bd9cb4"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.216731 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f2c69e78-4cbd-41a8-810d-bdc4f56cabfd-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.216968 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4eb93277-301f-486e-a1e9-f323f9bd9cb4-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.217037 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f2c69e78-4cbd-41a8-810d-bdc4f56cabfd-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.217090 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fns9l\" (UniqueName: \"kubernetes.io/projected/f2c69e78-4cbd-41a8-810d-bdc4f56cabfd-kube-api-access-fns9l\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.217142 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f2c69e78-4cbd-41a8-810d-bdc4f56cabfd-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.223274 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.232371 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.239581 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4eb93277-301f-486e-a1e9-f323f9bd9cb4-kube-api-access-k9xdl" (OuterVolumeSpecName: "kube-api-access-k9xdl") pod "4eb93277-301f-486e-a1e9-f323f9bd9cb4" (UID: "4eb93277-301f-486e-a1e9-f323f9bd9cb4"). InnerVolumeSpecName "kube-api-access-k9xdl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.241096 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.253379 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.272994 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovn-northd-0_88418ec6-c589-463f-b8ec-c12464810c07/ovn-northd/0.log" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.273080 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.294963 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f2c69e78-4cbd-41a8-810d-bdc4f56cabfd-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "f2c69e78-4cbd-41a8-810d-bdc4f56cabfd" (UID: "f2c69e78-4cbd-41a8-810d-bdc4f56cabfd"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.299858 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f2c69e78-4cbd-41a8-810d-bdc4f56cabfd-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f2c69e78-4cbd-41a8-810d-bdc4f56cabfd" (UID: "f2c69e78-4cbd-41a8-810d-bdc4f56cabfd"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.318816 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/88418ec6-c589-463f-b8ec-c12464810c07-config\") pod \"88418ec6-c589-463f-b8ec-c12464810c07\" (UID: \"88418ec6-c589-463f-b8ec-c12464810c07\") " Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.318896 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/88418ec6-c589-463f-b8ec-c12464810c07-metrics-certs-tls-certs\") pod \"88418ec6-c589-463f-b8ec-c12464810c07\" (UID: \"88418ec6-c589-463f-b8ec-c12464810c07\") " Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.318961 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/88418ec6-c589-463f-b8ec-c12464810c07-ovn-northd-tls-certs\") pod \"88418ec6-c589-463f-b8ec-c12464810c07\" (UID: \"88418ec6-c589-463f-b8ec-c12464810c07\") " Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.319051 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/88418ec6-c589-463f-b8ec-c12464810c07-ovn-rundir\") pod \"88418ec6-c589-463f-b8ec-c12464810c07\" (UID: \"88418ec6-c589-463f-b8ec-c12464810c07\") " Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.319274 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/88418ec6-c589-463f-b8ec-c12464810c07-scripts\") pod \"88418ec6-c589-463f-b8ec-c12464810c07\" (UID: \"88418ec6-c589-463f-b8ec-c12464810c07\") " Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.319297 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/88418ec6-c589-463f-b8ec-c12464810c07-combined-ca-bundle\") pod \"88418ec6-c589-463f-b8ec-c12464810c07\" (UID: \"88418ec6-c589-463f-b8ec-c12464810c07\") " Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.319325 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k5fjg\" (UniqueName: \"kubernetes.io/projected/88418ec6-c589-463f-b8ec-c12464810c07-kube-api-access-k5fjg\") pod \"88418ec6-c589-463f-b8ec-c12464810c07\" (UID: \"88418ec6-c589-463f-b8ec-c12464810c07\") " Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.319397 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/88418ec6-c589-463f-b8ec-c12464810c07-config" (OuterVolumeSpecName: "config") pod "88418ec6-c589-463f-b8ec-c12464810c07" (UID: "88418ec6-c589-463f-b8ec-c12464810c07"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.319660 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/88418ec6-c589-463f-b8ec-c12464810c07-ovn-rundir" (OuterVolumeSpecName: "ovn-rundir") pod "88418ec6-c589-463f-b8ec-c12464810c07" (UID: "88418ec6-c589-463f-b8ec-c12464810c07"). InnerVolumeSpecName "ovn-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.320246 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k9xdl\" (UniqueName: \"kubernetes.io/projected/4eb93277-301f-486e-a1e9-f323f9bd9cb4-kube-api-access-k9xdl\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.320271 4558 reconciler_common.go:293] "Volume detached for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/88418ec6-c589-463f-b8ec-c12464810c07-ovn-rundir\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.320281 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f2c69e78-4cbd-41a8-810d-bdc4f56cabfd-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.320290 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/88418ec6-c589-463f-b8ec-c12464810c07-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.320303 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2c69e78-4cbd-41a8-810d-bdc4f56cabfd-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.320622 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/88418ec6-c589-463f-b8ec-c12464810c07-scripts" (OuterVolumeSpecName: "scripts") pod "88418ec6-c589-463f-b8ec-c12464810c07" (UID: "88418ec6-c589-463f-b8ec-c12464810c07"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.326402 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/88418ec6-c589-463f-b8ec-c12464810c07-kube-api-access-k5fjg" (OuterVolumeSpecName: "kube-api-access-k5fjg") pod "88418ec6-c589-463f-b8ec-c12464810c07" (UID: "88418ec6-c589-463f-b8ec-c12464810c07"). InnerVolumeSpecName "kube-api-access-k5fjg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.334577 4558 scope.go:117] "RemoveContainer" containerID="e1f212cc615420dc0adbf996976347dda8d27a4a5471651930f4f497d67e903f" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.371285 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.372136 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/88418ec6-c589-463f-b8ec-c12464810c07-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "88418ec6-c589-463f-b8ec-c12464810c07" (UID: "88418ec6-c589-463f-b8ec-c12464810c07"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.381623 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f2c69e78-4cbd-41a8-810d-bdc4f56cabfd-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "f2c69e78-4cbd-41a8-810d-bdc4f56cabfd" (UID: "f2c69e78-4cbd-41a8-810d-bdc4f56cabfd"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.401140 4558 scope.go:117] "RemoveContainer" containerID="76ea6272fca9c46b59f2db4c5161f6d34060b5a40bd0d5c5b65cea949a4dad9e" Jan 20 17:21:37 crc kubenswrapper[4558]: E0120 17:21:37.401930 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"76ea6272fca9c46b59f2db4c5161f6d34060b5a40bd0d5c5b65cea949a4dad9e\": container with ID starting with 76ea6272fca9c46b59f2db4c5161f6d34060b5a40bd0d5c5b65cea949a4dad9e not found: ID does not exist" containerID="76ea6272fca9c46b59f2db4c5161f6d34060b5a40bd0d5c5b65cea949a4dad9e" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.401957 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"76ea6272fca9c46b59f2db4c5161f6d34060b5a40bd0d5c5b65cea949a4dad9e"} err="failed to get container status \"76ea6272fca9c46b59f2db4c5161f6d34060b5a40bd0d5c5b65cea949a4dad9e\": rpc error: code = NotFound desc = could not find container \"76ea6272fca9c46b59f2db4c5161f6d34060b5a40bd0d5c5b65cea949a4dad9e\": container with ID starting with 76ea6272fca9c46b59f2db4c5161f6d34060b5a40bd0d5c5b65cea949a4dad9e not found: ID does not exist" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.401976 4558 scope.go:117] "RemoveContainer" containerID="e1f212cc615420dc0adbf996976347dda8d27a4a5471651930f4f497d67e903f" Jan 20 17:21:37 crc kubenswrapper[4558]: E0120 17:21:37.402284 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e1f212cc615420dc0adbf996976347dda8d27a4a5471651930f4f497d67e903f\": container with ID starting with e1f212cc615420dc0adbf996976347dda8d27a4a5471651930f4f497d67e903f not found: ID does not exist" containerID="e1f212cc615420dc0adbf996976347dda8d27a4a5471651930f4f497d67e903f" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.402304 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e1f212cc615420dc0adbf996976347dda8d27a4a5471651930f4f497d67e903f"} err="failed to get container status \"e1f212cc615420dc0adbf996976347dda8d27a4a5471651930f4f497d67e903f\": rpc error: code = NotFound desc = could not find container \"e1f212cc615420dc0adbf996976347dda8d27a4a5471651930f4f497d67e903f\": container with ID starting with e1f212cc615420dc0adbf996976347dda8d27a4a5471651930f4f497d67e903f not found: ID does not exist" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.402315 4558 scope.go:117] "RemoveContainer" containerID="63c838d8c51346650a84faa8ff095a9f00ae13df2999dcf2d156cd7c1b906029" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.402471 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.404541 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-keystone-listener-9b8896fdb-7mrlf" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.422132 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/55138e51-715c-42a1-8e1d-bca65a31d55c-httpd-run\") pod \"55138e51-715c-42a1-8e1d-bca65a31d55c\" (UID: \"55138e51-715c-42a1-8e1d-bca65a31d55c\") " Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.422240 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage13-crc\") pod \"55138e51-715c-42a1-8e1d-bca65a31d55c\" (UID: \"55138e51-715c-42a1-8e1d-bca65a31d55c\") " Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.422375 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xgf4x\" (UniqueName: \"kubernetes.io/projected/55138e51-715c-42a1-8e1d-bca65a31d55c-kube-api-access-xgf4x\") pod \"55138e51-715c-42a1-8e1d-bca65a31d55c\" (UID: \"55138e51-715c-42a1-8e1d-bca65a31d55c\") " Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.422469 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/55138e51-715c-42a1-8e1d-bca65a31d55c-internal-tls-certs\") pod \"55138e51-715c-42a1-8e1d-bca65a31d55c\" (UID: \"55138e51-715c-42a1-8e1d-bca65a31d55c\") " Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.422488 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/55138e51-715c-42a1-8e1d-bca65a31d55c-logs\") pod \"55138e51-715c-42a1-8e1d-bca65a31d55c\" (UID: \"55138e51-715c-42a1-8e1d-bca65a31d55c\") " Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.422523 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55138e51-715c-42a1-8e1d-bca65a31d55c-combined-ca-bundle\") pod \"55138e51-715c-42a1-8e1d-bca65a31d55c\" (UID: \"55138e51-715c-42a1-8e1d-bca65a31d55c\") " Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.422579 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/55138e51-715c-42a1-8e1d-bca65a31d55c-config-data\") pod \"55138e51-715c-42a1-8e1d-bca65a31d55c\" (UID: \"55138e51-715c-42a1-8e1d-bca65a31d55c\") " Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.422632 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/55138e51-715c-42a1-8e1d-bca65a31d55c-scripts\") pod \"55138e51-715c-42a1-8e1d-bca65a31d55c\" (UID: \"55138e51-715c-42a1-8e1d-bca65a31d55c\") " Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.422727 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/55138e51-715c-42a1-8e1d-bca65a31d55c-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "55138e51-715c-42a1-8e1d-bca65a31d55c" (UID: "55138e51-715c-42a1-8e1d-bca65a31d55c"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.423135 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/88418ec6-c589-463f-b8ec-c12464810c07-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.423147 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/88418ec6-c589-463f-b8ec-c12464810c07-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.423157 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k5fjg\" (UniqueName: \"kubernetes.io/projected/88418ec6-c589-463f-b8ec-c12464810c07-kube-api-access-k5fjg\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.424491 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f2c69e78-4cbd-41a8-810d-bdc4f56cabfd-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.424502 4558 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/55138e51-715c-42a1-8e1d-bca65a31d55c-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.423637 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/55138e51-715c-42a1-8e1d-bca65a31d55c-logs" (OuterVolumeSpecName: "logs") pod "55138e51-715c-42a1-8e1d-bca65a31d55c" (UID: "55138e51-715c-42a1-8e1d-bca65a31d55c"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.426511 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage13-crc" (OuterVolumeSpecName: "glance") pod "55138e51-715c-42a1-8e1d-bca65a31d55c" (UID: "55138e51-715c-42a1-8e1d-bca65a31d55c"). InnerVolumeSpecName "local-storage13-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.427448 4558 scope.go:117] "RemoveContainer" containerID="63c838d8c51346650a84faa8ff095a9f00ae13df2999dcf2d156cd7c1b906029" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.427827 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/55138e51-715c-42a1-8e1d-bca65a31d55c-scripts" (OuterVolumeSpecName: "scripts") pod "55138e51-715c-42a1-8e1d-bca65a31d55c" (UID: "55138e51-715c-42a1-8e1d-bca65a31d55c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:21:37 crc kubenswrapper[4558]: E0120 17:21:37.428210 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"63c838d8c51346650a84faa8ff095a9f00ae13df2999dcf2d156cd7c1b906029\": container with ID starting with 63c838d8c51346650a84faa8ff095a9f00ae13df2999dcf2d156cd7c1b906029 not found: ID does not exist" containerID="63c838d8c51346650a84faa8ff095a9f00ae13df2999dcf2d156cd7c1b906029" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.428244 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"63c838d8c51346650a84faa8ff095a9f00ae13df2999dcf2d156cd7c1b906029"} err="failed to get container status \"63c838d8c51346650a84faa8ff095a9f00ae13df2999dcf2d156cd7c1b906029\": rpc error: code = NotFound desc = could not find container \"63c838d8c51346650a84faa8ff095a9f00ae13df2999dcf2d156cd7c1b906029\": container with ID starting with 63c838d8c51346650a84faa8ff095a9f00ae13df2999dcf2d156cd7c1b906029 not found: ID does not exist" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.440301 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/55138e51-715c-42a1-8e1d-bca65a31d55c-kube-api-access-xgf4x" (OuterVolumeSpecName: "kube-api-access-xgf4x") pod "55138e51-715c-42a1-8e1d-bca65a31d55c" (UID: "55138e51-715c-42a1-8e1d-bca65a31d55c"). InnerVolumeSpecName "kube-api-access-xgf4x". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.470261 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/88418ec6-c589-463f-b8ec-c12464810c07-ovn-northd-tls-certs" (OuterVolumeSpecName: "ovn-northd-tls-certs") pod "88418ec6-c589-463f-b8ec-c12464810c07" (UID: "88418ec6-c589-463f-b8ec-c12464810c07"). InnerVolumeSpecName "ovn-northd-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.471085 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/55138e51-715c-42a1-8e1d-bca65a31d55c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "55138e51-715c-42a1-8e1d-bca65a31d55c" (UID: "55138e51-715c-42a1-8e1d-bca65a31d55c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.477421 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/88418ec6-c589-463f-b8ec-c12464810c07-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "88418ec6-c589-463f-b8ec-c12464810c07" (UID: "88418ec6-c589-463f-b8ec-c12464810c07"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.483891 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/55138e51-715c-42a1-8e1d-bca65a31d55c-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "55138e51-715c-42a1-8e1d-bca65a31d55c" (UID: "55138e51-715c-42a1-8e1d-bca65a31d55c"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.498368 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/55138e51-715c-42a1-8e1d-bca65a31d55c-config-data" (OuterVolumeSpecName: "config-data") pod "55138e51-715c-42a1-8e1d-bca65a31d55c" (UID: "55138e51-715c-42a1-8e1d-bca65a31d55c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.525871 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-998h7\" (UniqueName: \"kubernetes.io/projected/a948066b-fa95-4fbb-83e6-6f26f6c76652-kube-api-access-998h7\") pod \"a948066b-fa95-4fbb-83e6-6f26f6c76652\" (UID: \"a948066b-fa95-4fbb-83e6-6f26f6c76652\") " Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.526580 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c4b8x\" (UniqueName: \"kubernetes.io/projected/09d95674-9970-471d-8061-997e69ddda11-kube-api-access-c4b8x\") pod \"09d95674-9970-471d-8061-997e69ddda11\" (UID: \"09d95674-9970-471d-8061-997e69ddda11\") " Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.526622 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/09d95674-9970-471d-8061-997e69ddda11-nova-metadata-tls-certs\") pod \"09d95674-9970-471d-8061-997e69ddda11\" (UID: \"09d95674-9970-471d-8061-997e69ddda11\") " Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.526683 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a948066b-fa95-4fbb-83e6-6f26f6c76652-logs\") pod \"a948066b-fa95-4fbb-83e6-6f26f6c76652\" (UID: \"a948066b-fa95-4fbb-83e6-6f26f6c76652\") " Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.526839 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a948066b-fa95-4fbb-83e6-6f26f6c76652-config-data\") pod \"a948066b-fa95-4fbb-83e6-6f26f6c76652\" (UID: \"a948066b-fa95-4fbb-83e6-6f26f6c76652\") " Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.526871 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/09d95674-9970-471d-8061-997e69ddda11-config-data\") pod \"09d95674-9970-471d-8061-997e69ddda11\" (UID: \"09d95674-9970-471d-8061-997e69ddda11\") " Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.526925 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a948066b-fa95-4fbb-83e6-6f26f6c76652-combined-ca-bundle\") pod \"a948066b-fa95-4fbb-83e6-6f26f6c76652\" (UID: \"a948066b-fa95-4fbb-83e6-6f26f6c76652\") " Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.526950 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/09d95674-9970-471d-8061-997e69ddda11-combined-ca-bundle\") pod \"09d95674-9970-471d-8061-997e69ddda11\" (UID: \"09d95674-9970-471d-8061-997e69ddda11\") " Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.527093 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a948066b-fa95-4fbb-83e6-6f26f6c76652-config-data-custom\") pod \"a948066b-fa95-4fbb-83e6-6f26f6c76652\" (UID: \"a948066b-fa95-4fbb-83e6-6f26f6c76652\") " Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.527126 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/09d95674-9970-471d-8061-997e69ddda11-logs\") pod \"09d95674-9970-471d-8061-997e69ddda11\" (UID: \"09d95674-9970-471d-8061-997e69ddda11\") " Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.527623 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a948066b-fa95-4fbb-83e6-6f26f6c76652-logs" (OuterVolumeSpecName: "logs") pod "a948066b-fa95-4fbb-83e6-6f26f6c76652" (UID: "a948066b-fa95-4fbb-83e6-6f26f6c76652"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.527736 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/09d95674-9970-471d-8061-997e69ddda11-logs" (OuterVolumeSpecName: "logs") pod "09d95674-9970-471d-8061-997e69ddda11" (UID: "09d95674-9970-471d-8061-997e69ddda11"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.528472 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/55138e51-715c-42a1-8e1d-bca65a31d55c-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.528508 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/55138e51-715c-42a1-8e1d-bca65a31d55c-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.528519 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/09d95674-9970-471d-8061-997e69ddda11-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.528529 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/55138e51-715c-42a1-8e1d-bca65a31d55c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.528541 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/55138e51-715c-42a1-8e1d-bca65a31d55c-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.528551 4558 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/88418ec6-c589-463f-b8ec-c12464810c07-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.528577 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/55138e51-715c-42a1-8e1d-bca65a31d55c-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.528588 4558 reconciler_common.go:293] "Volume detached for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/88418ec6-c589-463f-b8ec-c12464810c07-ovn-northd-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.528604 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a948066b-fa95-4fbb-83e6-6f26f6c76652-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.528634 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage13-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage13-crc\") on node \"crc\" " Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.528660 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xgf4x\" (UniqueName: \"kubernetes.io/projected/55138e51-715c-42a1-8e1d-bca65a31d55c-kube-api-access-xgf4x\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.534984 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a948066b-fa95-4fbb-83e6-6f26f6c76652-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "a948066b-fa95-4fbb-83e6-6f26f6c76652" (UID: "a948066b-fa95-4fbb-83e6-6f26f6c76652"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.535024 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09d95674-9970-471d-8061-997e69ddda11-kube-api-access-c4b8x" (OuterVolumeSpecName: "kube-api-access-c4b8x") pod "09d95674-9970-471d-8061-997e69ddda11" (UID: "09d95674-9970-471d-8061-997e69ddda11"). InnerVolumeSpecName "kube-api-access-c4b8x". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.535093 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a948066b-fa95-4fbb-83e6-6f26f6c76652-kube-api-access-998h7" (OuterVolumeSpecName: "kube-api-access-998h7") pod "a948066b-fa95-4fbb-83e6-6f26f6c76652" (UID: "a948066b-fa95-4fbb-83e6-6f26f6c76652"). InnerVolumeSpecName "kube-api-access-998h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.543915 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage13-crc" (UniqueName: "kubernetes.io/local-volume/local-storage13-crc") on node "crc" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.560546 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a948066b-fa95-4fbb-83e6-6f26f6c76652-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a948066b-fa95-4fbb-83e6-6f26f6c76652" (UID: "a948066b-fa95-4fbb-83e6-6f26f6c76652"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.562241 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09d95674-9970-471d-8061-997e69ddda11-config-data" (OuterVolumeSpecName: "config-data") pod "09d95674-9970-471d-8061-997e69ddda11" (UID: "09d95674-9970-471d-8061-997e69ddda11"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.562383 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09d95674-9970-471d-8061-997e69ddda11-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "09d95674-9970-471d-8061-997e69ddda11" (UID: "09d95674-9970-471d-8061-997e69ddda11"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.570950 4558 scope.go:117] "RemoveContainer" containerID="0ffce44366a8b4466427b682fb41640425366a0f4449210959148de9aef695c6" Jan 20 17:21:37 crc kubenswrapper[4558]: E0120 17:21:37.571516 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.595518 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a948066b-fa95-4fbb-83e6-6f26f6c76652-config-data" (OuterVolumeSpecName: "config-data") pod "a948066b-fa95-4fbb-83e6-6f26f6c76652" (UID: "a948066b-fa95-4fbb-83e6-6f26f6c76652"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.596284 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09d95674-9970-471d-8061-997e69ddda11-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "09d95674-9970-471d-8061-997e69ddda11" (UID: "09d95674-9970-471d-8061-997e69ddda11"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.630347 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n74cc\" (UniqueName: \"kubernetes.io/projected/32b13960-8f0f-47f6-b529-37f7a918fcd6-kube-api-access-n74cc\") pod \"keystone-f51d-account-create-update-wwmpb\" (UID: \"32b13960-8f0f-47f6-b529-37f7a918fcd6\") " pod="openstack-kuttl-tests/keystone-f51d-account-create-update-wwmpb" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.630414 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/32b13960-8f0f-47f6-b529-37f7a918fcd6-operator-scripts\") pod \"keystone-f51d-account-create-update-wwmpb\" (UID: \"32b13960-8f0f-47f6-b529-37f7a918fcd6\") " pod="openstack-kuttl-tests/keystone-f51d-account-create-update-wwmpb" Jan 20 17:21:37 crc kubenswrapper[4558]: E0120 17:21:37.630914 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/openstack-scripts: configmap "openstack-scripts" not found Jan 20 17:21:37 crc kubenswrapper[4558]: E0120 17:21:37.630995 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/32b13960-8f0f-47f6-b529-37f7a918fcd6-operator-scripts podName:32b13960-8f0f-47f6-b529-37f7a918fcd6 nodeName:}" failed. No retries permitted until 2026-01-20 17:21:39.630968812 +0000 UTC m=+2393.391306779 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/32b13960-8f0f-47f6-b529-37f7a918fcd6-operator-scripts") pod "keystone-f51d-account-create-update-wwmpb" (UID: "32b13960-8f0f-47f6-b529-37f7a918fcd6") : configmap "openstack-scripts" not found Jan 20 17:21:37 crc kubenswrapper[4558]: E0120 17:21:37.632631 4558 projected.go:194] Error preparing data for projected volume kube-api-access-n74cc for pod openstack-kuttl-tests/keystone-f51d-account-create-update-wwmpb: failed to fetch token: serviceaccounts "galera-openstack" not found Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.632660 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-998h7\" (UniqueName: \"kubernetes.io/projected/a948066b-fa95-4fbb-83e6-6f26f6c76652-kube-api-access-998h7\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:37 crc kubenswrapper[4558]: E0120 17:21:37.632689 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/32b13960-8f0f-47f6-b529-37f7a918fcd6-kube-api-access-n74cc podName:32b13960-8f0f-47f6-b529-37f7a918fcd6 nodeName:}" failed. No retries permitted until 2026-01-20 17:21:39.632672236 +0000 UTC m=+2393.393010204 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-n74cc" (UniqueName: "kubernetes.io/projected/32b13960-8f0f-47f6-b529-37f7a918fcd6-kube-api-access-n74cc") pod "keystone-f51d-account-create-update-wwmpb" (UID: "32b13960-8f0f-47f6-b529-37f7a918fcd6") : failed to fetch token: serviceaccounts "galera-openstack" not found Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.632717 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c4b8x\" (UniqueName: \"kubernetes.io/projected/09d95674-9970-471d-8061-997e69ddda11-kube-api-access-c4b8x\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.632737 4558 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/09d95674-9970-471d-8061-997e69ddda11-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.632760 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage13-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage13-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.632774 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a948066b-fa95-4fbb-83e6-6f26f6c76652-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.632786 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/09d95674-9970-471d-8061-997e69ddda11-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.632798 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a948066b-fa95-4fbb-83e6-6f26f6c76652-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.632809 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/09d95674-9970-471d-8061-997e69ddda11-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.632818 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a948066b-fa95-4fbb-83e6-6f26f6c76652-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.649181 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.733572 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage17-crc\") pod \"20cdc3df-1626-4f56-8a92-9df64c1ed2fe\" (UID: \"20cdc3df-1626-4f56-8a92-9df64c1ed2fe\") " Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.733630 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/20cdc3df-1626-4f56-8a92-9df64c1ed2fe-httpd-run\") pod \"20cdc3df-1626-4f56-8a92-9df64c1ed2fe\" (UID: \"20cdc3df-1626-4f56-8a92-9df64c1ed2fe\") " Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.733666 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/20cdc3df-1626-4f56-8a92-9df64c1ed2fe-logs\") pod \"20cdc3df-1626-4f56-8a92-9df64c1ed2fe\" (UID: \"20cdc3df-1626-4f56-8a92-9df64c1ed2fe\") " Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.733725 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/20cdc3df-1626-4f56-8a92-9df64c1ed2fe-public-tls-certs\") pod \"20cdc3df-1626-4f56-8a92-9df64c1ed2fe\" (UID: \"20cdc3df-1626-4f56-8a92-9df64c1ed2fe\") " Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.733755 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-psk66\" (UniqueName: \"kubernetes.io/projected/20cdc3df-1626-4f56-8a92-9df64c1ed2fe-kube-api-access-psk66\") pod \"20cdc3df-1626-4f56-8a92-9df64c1ed2fe\" (UID: \"20cdc3df-1626-4f56-8a92-9df64c1ed2fe\") " Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.733780 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/20cdc3df-1626-4f56-8a92-9df64c1ed2fe-combined-ca-bundle\") pod \"20cdc3df-1626-4f56-8a92-9df64c1ed2fe\" (UID: \"20cdc3df-1626-4f56-8a92-9df64c1ed2fe\") " Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.733858 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/20cdc3df-1626-4f56-8a92-9df64c1ed2fe-config-data\") pod \"20cdc3df-1626-4f56-8a92-9df64c1ed2fe\" (UID: \"20cdc3df-1626-4f56-8a92-9df64c1ed2fe\") " Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.733931 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/20cdc3df-1626-4f56-8a92-9df64c1ed2fe-scripts\") pod \"20cdc3df-1626-4f56-8a92-9df64c1ed2fe\" (UID: \"20cdc3df-1626-4f56-8a92-9df64c1ed2fe\") " Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.733955 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/20cdc3df-1626-4f56-8a92-9df64c1ed2fe-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "20cdc3df-1626-4f56-8a92-9df64c1ed2fe" (UID: "20cdc3df-1626-4f56-8a92-9df64c1ed2fe"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.734478 4558 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/20cdc3df-1626-4f56-8a92-9df64c1ed2fe-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.737815 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage17-crc" (OuterVolumeSpecName: "glance") pod "20cdc3df-1626-4f56-8a92-9df64c1ed2fe" (UID: "20cdc3df-1626-4f56-8a92-9df64c1ed2fe"). InnerVolumeSpecName "local-storage17-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.738865 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20cdc3df-1626-4f56-8a92-9df64c1ed2fe-kube-api-access-psk66" (OuterVolumeSpecName: "kube-api-access-psk66") pod "20cdc3df-1626-4f56-8a92-9df64c1ed2fe" (UID: "20cdc3df-1626-4f56-8a92-9df64c1ed2fe"). InnerVolumeSpecName "kube-api-access-psk66". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.739230 4558 generic.go:334] "Generic (PLEG): container finished" podID="fa03920f-0e4b-458f-956b-b658786f9792" containerID="a62b1eff4cb0e50771699ddb63f1bffd4fb9ff28cbdf146a5a3c121d2a2e02fa" exitCode=0 Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.739315 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-galera-2" event={"ID":"fa03920f-0e4b-458f-956b-b658786f9792","Type":"ContainerDied","Data":"a62b1eff4cb0e50771699ddb63f1bffd4fb9ff28cbdf146a5a3c121d2a2e02fa"} Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.740353 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20cdc3df-1626-4f56-8a92-9df64c1ed2fe-scripts" (OuterVolumeSpecName: "scripts") pod "20cdc3df-1626-4f56-8a92-9df64c1ed2fe" (UID: "20cdc3df-1626-4f56-8a92-9df64c1ed2fe"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.742399 4558 generic.go:334] "Generic (PLEG): container finished" podID="20cdc3df-1626-4f56-8a92-9df64c1ed2fe" containerID="37ed8e0f109228dfcc4e9447b9278994669497b6597de7b6864c0f3e20436236" exitCode=0 Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.742471 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.742482 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"20cdc3df-1626-4f56-8a92-9df64c1ed2fe","Type":"ContainerDied","Data":"37ed8e0f109228dfcc4e9447b9278994669497b6597de7b6864c0f3e20436236"} Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.742517 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"20cdc3df-1626-4f56-8a92-9df64c1ed2fe","Type":"ContainerDied","Data":"1e0d541d89501c62bc139c127c6a46f23f965cd6aa3deda808f036d69f424ad5"} Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.742538 4558 scope.go:117] "RemoveContainer" containerID="37ed8e0f109228dfcc4e9447b9278994669497b6597de7b6864c0f3e20436236" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.743421 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/20cdc3df-1626-4f56-8a92-9df64c1ed2fe-logs" (OuterVolumeSpecName: "logs") pod "20cdc3df-1626-4f56-8a92-9df64c1ed2fe" (UID: "20cdc3df-1626-4f56-8a92-9df64c1ed2fe"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.747406 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-galera-2" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.756534 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-6b7cbc9658-z4t27" event={"ID":"f2c69e78-4cbd-41a8-810d-bdc4f56cabfd","Type":"ContainerDied","Data":"0270ba2b6c46bed7c0eb7d2bafcb0e00525bc89f1ec0b572d313c45e45598504"} Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.756719 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-6b7cbc9658-z4t27" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.760508 4558 generic.go:334] "Generic (PLEG): container finished" podID="4eb93277-301f-486e-a1e9-f323f9bd9cb4" containerID="68c910efbea1cdab426d9f23de2d058f94a3d54c56eccdecb828d1307a55edd1" exitCode=1 Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.760592 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/root-account-create-update-r6qz8" event={"ID":"4eb93277-301f-486e-a1e9-f323f9bd9cb4","Type":"ContainerDied","Data":"68c910efbea1cdab426d9f23de2d058f94a3d54c56eccdecb828d1307a55edd1"} Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.760632 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/root-account-create-update-r6qz8" event={"ID":"4eb93277-301f-486e-a1e9-f323f9bd9cb4","Type":"ContainerDied","Data":"96a895c6fbd90cf00b90eff69a6a1940e04dcdc631f75368bf539acb994d8e55"} Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.760695 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-r6qz8" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.760959 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20cdc3df-1626-4f56-8a92-9df64c1ed2fe-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "20cdc3df-1626-4f56-8a92-9df64c1ed2fe" (UID: "20cdc3df-1626-4f56-8a92-9df64c1ed2fe"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.767038 4558 generic.go:334] "Generic (PLEG): container finished" podID="55138e51-715c-42a1-8e1d-bca65a31d55c" containerID="ad7e074652e180a31db91a23980de422d274542b0626f62f61e86a2d3f3088d7" exitCode=0 Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.767085 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"55138e51-715c-42a1-8e1d-bca65a31d55c","Type":"ContainerDied","Data":"ad7e074652e180a31db91a23980de422d274542b0626f62f61e86a2d3f3088d7"} Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.767107 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"55138e51-715c-42a1-8e1d-bca65a31d55c","Type":"ContainerDied","Data":"82d6c49a8096269526b0400cbd2f95809c16b6793518acd69f37baf1e020f831"} Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.767159 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.776243 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovn-northd-0_88418ec6-c589-463f-b8ec-c12464810c07/ovn-northd/0.log" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.776285 4558 generic.go:334] "Generic (PLEG): container finished" podID="88418ec6-c589-463f-b8ec-c12464810c07" containerID="9c3bcaf0b0b1fe796f0f39a396e91571e66a0d2ff4f33099f19f4d03733e3f3c" exitCode=139 Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.776338 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-northd-0" event={"ID":"88418ec6-c589-463f-b8ec-c12464810c07","Type":"ContainerDied","Data":"9c3bcaf0b0b1fe796f0f39a396e91571e66a0d2ff4f33099f19f4d03733e3f3c"} Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.776376 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-northd-0" event={"ID":"88418ec6-c589-463f-b8ec-c12464810c07","Type":"ContainerDied","Data":"829d904640074384724ded901ca01ef16df44e91a2fc8f1c0086c25626a6fd37"} Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.776486 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.777689 4558 scope.go:117] "RemoveContainer" containerID="e156d2f38d6403388f01acbe07670bd0efccb2c563ca79b7e6061ec9d3596440" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.780907 4558 generic.go:334] "Generic (PLEG): container finished" podID="09d95674-9970-471d-8061-997e69ddda11" containerID="065345d0e077b3bb2b2e67f7ed62c2413c1984c254d80dff447c7a5693b57103" exitCode=0 Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.780955 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"09d95674-9970-471d-8061-997e69ddda11","Type":"ContainerDied","Data":"065345d0e077b3bb2b2e67f7ed62c2413c1984c254d80dff447c7a5693b57103"} Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.780976 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"09d95674-9970-471d-8061-997e69ddda11","Type":"ContainerDied","Data":"eb79635f225ca9e6f5dfb4cbc3fdc0d4ac29a6eeed2163e63d4fa1536097a060"} Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.781026 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.786489 4558 generic.go:334] "Generic (PLEG): container finished" podID="a948066b-fa95-4fbb-83e6-6f26f6c76652" containerID="67f52d695e7843e5e1f00cfd6b4ff76e79ba46c21e0baabc0a21e634dcae0c6e" exitCode=0 Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.786872 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-keystone-listener-9b8896fdb-7mrlf" event={"ID":"a948066b-fa95-4fbb-83e6-6f26f6c76652","Type":"ContainerDied","Data":"67f52d695e7843e5e1f00cfd6b4ff76e79ba46c21e0baabc0a21e634dcae0c6e"} Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.786918 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-keystone-listener-9b8896fdb-7mrlf" event={"ID":"a948066b-fa95-4fbb-83e6-6f26f6c76652","Type":"ContainerDied","Data":"d370341deb7c313621e85ad08e7b1eb9630f0e743e8ef7f6282693bbe0430efe"} Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.786963 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-keystone-listener-9b8896fdb-7mrlf" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.787687 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-f51d-account-create-update-wwmpb" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.788680 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-psfz5" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.795607 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20cdc3df-1626-4f56-8a92-9df64c1ed2fe-config-data" (OuterVolumeSpecName: "config-data") pod "20cdc3df-1626-4f56-8a92-9df64c1ed2fe" (UID: "20cdc3df-1626-4f56-8a92-9df64c1ed2fe"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.819648 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20cdc3df-1626-4f56-8a92-9df64c1ed2fe-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "20cdc3df-1626-4f56-8a92-9df64c1ed2fe" (UID: "20cdc3df-1626-4f56-8a92-9df64c1ed2fe"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.820034 4558 scope.go:117] "RemoveContainer" containerID="37ed8e0f109228dfcc4e9447b9278994669497b6597de7b6864c0f3e20436236" Jan 20 17:21:37 crc kubenswrapper[4558]: E0120 17:21:37.821386 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"37ed8e0f109228dfcc4e9447b9278994669497b6597de7b6864c0f3e20436236\": container with ID starting with 37ed8e0f109228dfcc4e9447b9278994669497b6597de7b6864c0f3e20436236 not found: ID does not exist" containerID="37ed8e0f109228dfcc4e9447b9278994669497b6597de7b6864c0f3e20436236" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.821419 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"37ed8e0f109228dfcc4e9447b9278994669497b6597de7b6864c0f3e20436236"} err="failed to get container status \"37ed8e0f109228dfcc4e9447b9278994669497b6597de7b6864c0f3e20436236\": rpc error: code = NotFound desc = could not find container \"37ed8e0f109228dfcc4e9447b9278994669497b6597de7b6864c0f3e20436236\": container with ID starting with 37ed8e0f109228dfcc4e9447b9278994669497b6597de7b6864c0f3e20436236 not found: ID does not exist" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.821442 4558 scope.go:117] "RemoveContainer" containerID="e156d2f38d6403388f01acbe07670bd0efccb2c563ca79b7e6061ec9d3596440" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.824142 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-r6qz8"] Jan 20 17:21:37 crc kubenswrapper[4558]: E0120 17:21:37.824692 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e156d2f38d6403388f01acbe07670bd0efccb2c563ca79b7e6061ec9d3596440\": container with ID starting with e156d2f38d6403388f01acbe07670bd0efccb2c563ca79b7e6061ec9d3596440 not found: ID does not exist" containerID="e156d2f38d6403388f01acbe07670bd0efccb2c563ca79b7e6061ec9d3596440" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.824783 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e156d2f38d6403388f01acbe07670bd0efccb2c563ca79b7e6061ec9d3596440"} err="failed to get container status \"e156d2f38d6403388f01acbe07670bd0efccb2c563ca79b7e6061ec9d3596440\": rpc error: code = NotFound desc = could not find container \"e156d2f38d6403388f01acbe07670bd0efccb2c563ca79b7e6061ec9d3596440\": container with ID starting with e156d2f38d6403388f01acbe07670bd0efccb2c563ca79b7e6061ec9d3596440 not found: ID does not exist" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.824814 4558 scope.go:117] "RemoveContainer" containerID="fd0522a1018450fa8aaf5f378cfa36bd4a81f9a88e9be2f7d7e743bbb0155ccd" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.830459 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-r6qz8"] Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.835361 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fa03920f-0e4b-458f-956b-b658786f9792-combined-ca-bundle\") pod \"fa03920f-0e4b-458f-956b-b658786f9792\" (UID: \"fa03920f-0e4b-458f-956b-b658786f9792\") " Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.835544 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/fa03920f-0e4b-458f-956b-b658786f9792-galera-tls-certs\") pod \"fa03920f-0e4b-458f-956b-b658786f9792\" (UID: \"fa03920f-0e4b-458f-956b-b658786f9792\") " Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.835579 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mysql-db\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"fa03920f-0e4b-458f-956b-b658786f9792\" (UID: \"fa03920f-0e4b-458f-956b-b658786f9792\") " Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.835609 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/fa03920f-0e4b-458f-956b-b658786f9792-config-data-default\") pod \"fa03920f-0e4b-458f-956b-b658786f9792\" (UID: \"fa03920f-0e4b-458f-956b-b658786f9792\") " Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.835630 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fa03920f-0e4b-458f-956b-b658786f9792-operator-scripts\") pod \"fa03920f-0e4b-458f-956b-b658786f9792\" (UID: \"fa03920f-0e4b-458f-956b-b658786f9792\") " Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.835655 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/fa03920f-0e4b-458f-956b-b658786f9792-config-data-generated\") pod \"fa03920f-0e4b-458f-956b-b658786f9792\" (UID: \"fa03920f-0e4b-458f-956b-b658786f9792\") " Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.835699 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/fa03920f-0e4b-458f-956b-b658786f9792-kolla-config\") pod \"fa03920f-0e4b-458f-956b-b658786f9792\" (UID: \"fa03920f-0e4b-458f-956b-b658786f9792\") " Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.835908 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hcxnl\" (UniqueName: \"kubernetes.io/projected/fa03920f-0e4b-458f-956b-b658786f9792-kube-api-access-hcxnl\") pod \"fa03920f-0e4b-458f-956b-b658786f9792\" (UID: \"fa03920f-0e4b-458f-956b-b658786f9792\") " Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.836443 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage17-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage17-crc\") on node \"crc\" " Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.836473 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/20cdc3df-1626-4f56-8a92-9df64c1ed2fe-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.836483 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/20cdc3df-1626-4f56-8a92-9df64c1ed2fe-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.836492 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-psk66\" (UniqueName: \"kubernetes.io/projected/20cdc3df-1626-4f56-8a92-9df64c1ed2fe-kube-api-access-psk66\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.836502 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/20cdc3df-1626-4f56-8a92-9df64c1ed2fe-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.836511 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/20cdc3df-1626-4f56-8a92-9df64c1ed2fe-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.836519 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/20cdc3df-1626-4f56-8a92-9df64c1ed2fe-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.838110 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fa03920f-0e4b-458f-956b-b658786f9792-config-data-default" (OuterVolumeSpecName: "config-data-default") pod "fa03920f-0e4b-458f-956b-b658786f9792" (UID: "fa03920f-0e4b-458f-956b-b658786f9792"). InnerVolumeSpecName "config-data-default". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.838658 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fa03920f-0e4b-458f-956b-b658786f9792-config-data-generated" (OuterVolumeSpecName: "config-data-generated") pod "fa03920f-0e4b-458f-956b-b658786f9792" (UID: "fa03920f-0e4b-458f-956b-b658786f9792"). InnerVolumeSpecName "config-data-generated". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.838873 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fa03920f-0e4b-458f-956b-b658786f9792-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "fa03920f-0e4b-458f-956b-b658786f9792" (UID: "fa03920f-0e4b-458f-956b-b658786f9792"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.847254 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fa03920f-0e4b-458f-956b-b658786f9792-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "fa03920f-0e4b-458f-956b-b658786f9792" (UID: "fa03920f-0e4b-458f-956b-b658786f9792"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.857547 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fa03920f-0e4b-458f-956b-b658786f9792-kube-api-access-hcxnl" (OuterVolumeSpecName: "kube-api-access-hcxnl") pod "fa03920f-0e4b-458f-956b-b658786f9792" (UID: "fa03920f-0e4b-458f-956b-b658786f9792"). InnerVolumeSpecName "kube-api-access-hcxnl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.858406 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage17-crc" (UniqueName: "kubernetes.io/local-volume/local-storage17-crc") on node "crc" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.859187 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage03-crc" (OuterVolumeSpecName: "mysql-db") pod "fa03920f-0e4b-458f-956b-b658786f9792" (UID: "fa03920f-0e4b-458f-956b-b658786f9792"). InnerVolumeSpecName "local-storage03-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.863865 4558 scope.go:117] "RemoveContainer" containerID="3282788cd0138924b505daecd0b6616ec4d4fb23cc9f1c9f3179db8ba4606bbd" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.883854 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/placement-6b7cbc9658-z4t27"] Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.888224 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fa03920f-0e4b-458f-956b-b658786f9792-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fa03920f-0e4b-458f-956b-b658786f9792" (UID: "fa03920f-0e4b-458f-956b-b658786f9792"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.897635 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fa03920f-0e4b-458f-956b-b658786f9792-galera-tls-certs" (OuterVolumeSpecName: "galera-tls-certs") pod "fa03920f-0e4b-458f-956b-b658786f9792" (UID: "fa03920f-0e4b-458f-956b-b658786f9792"). InnerVolumeSpecName "galera-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.910709 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/placement-6b7cbc9658-z4t27"] Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.923786 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-f51d-account-create-update-wwmpb"] Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.928141 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/keystone-f51d-account-create-update-wwmpb"] Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.933538 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-keystone-listener-9b8896fdb-7mrlf"] Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.938407 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/barbican-keystone-listener-9b8896fdb-7mrlf"] Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.939508 4558 reconciler_common.go:293] "Volume detached for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/fa03920f-0e4b-458f-956b-b658786f9792-galera-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.939573 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" " Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.939589 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/fa03920f-0e4b-458f-956b-b658786f9792-config-data-default\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.939610 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fa03920f-0e4b-458f-956b-b658786f9792-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.939624 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/fa03920f-0e4b-458f-956b-b658786f9792-config-data-generated\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.939636 4558 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/fa03920f-0e4b-458f-956b-b658786f9792-kolla-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.939648 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hcxnl\" (UniqueName: \"kubernetes.io/projected/fa03920f-0e4b-458f-956b-b658786f9792-kube-api-access-hcxnl\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.939664 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fa03920f-0e4b-458f-956b-b658786f9792-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.939675 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage17-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage17-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.942403 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.946316 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:21:37 crc kubenswrapper[4558]: I0120 17:21:37.956920 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage03-crc" (UniqueName: "kubernetes.io/local-volume/local-storage03-crc") on node "crc" Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.042366 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/32b13960-8f0f-47f6-b529-37f7a918fcd6-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.042630 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n74cc\" (UniqueName: \"kubernetes.io/projected/32b13960-8f0f-47f6-b529-37f7a918fcd6-kube-api-access-n74cc\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.042812 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.116153 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovn-northd-0"] Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.118821 4558 scope.go:117] "RemoveContainer" containerID="68c910efbea1cdab426d9f23de2d058f94a3d54c56eccdecb828d1307a55edd1" Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.119348 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ovn-northd-0"] Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.140032 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.149590 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.165127 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.180840 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.279192 4558 scope.go:117] "RemoveContainer" containerID="9950c650a695726a02bff29b5e4e3575dd63e33b9799fdca78ec0fa848678043" Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.300666 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/openstack-galera-1" podUID="594280a4-cae4-4455-9838-ba4ef8d2f2c1" containerName="galera" containerID="cri-o://f1681cfb95a469724fd742914f90a54418d9a8689ce838c8542616d40276313e" gracePeriod=28 Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.308126 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-worker-795d8f7875-gfh24" Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.320084 4558 scope.go:117] "RemoveContainer" containerID="68c910efbea1cdab426d9f23de2d058f94a3d54c56eccdecb828d1307a55edd1" Jan 20 17:21:38 crc kubenswrapper[4558]: E0120 17:21:38.320539 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"68c910efbea1cdab426d9f23de2d058f94a3d54c56eccdecb828d1307a55edd1\": container with ID starting with 68c910efbea1cdab426d9f23de2d058f94a3d54c56eccdecb828d1307a55edd1 not found: ID does not exist" containerID="68c910efbea1cdab426d9f23de2d058f94a3d54c56eccdecb828d1307a55edd1" Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.320582 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"68c910efbea1cdab426d9f23de2d058f94a3d54c56eccdecb828d1307a55edd1"} err="failed to get container status \"68c910efbea1cdab426d9f23de2d058f94a3d54c56eccdecb828d1307a55edd1\": rpc error: code = NotFound desc = could not find container \"68c910efbea1cdab426d9f23de2d058f94a3d54c56eccdecb828d1307a55edd1\": container with ID starting with 68c910efbea1cdab426d9f23de2d058f94a3d54c56eccdecb828d1307a55edd1 not found: ID does not exist" Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.320614 4558 scope.go:117] "RemoveContainer" containerID="9950c650a695726a02bff29b5e4e3575dd63e33b9799fdca78ec0fa848678043" Jan 20 17:21:38 crc kubenswrapper[4558]: E0120 17:21:38.320871 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9950c650a695726a02bff29b5e4e3575dd63e33b9799fdca78ec0fa848678043\": container with ID starting with 9950c650a695726a02bff29b5e4e3575dd63e33b9799fdca78ec0fa848678043 not found: ID does not exist" containerID="9950c650a695726a02bff29b5e4e3575dd63e33b9799fdca78ec0fa848678043" Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.320905 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9950c650a695726a02bff29b5e4e3575dd63e33b9799fdca78ec0fa848678043"} err="failed to get container status \"9950c650a695726a02bff29b5e4e3575dd63e33b9799fdca78ec0fa848678043\": rpc error: code = NotFound desc = could not find container \"9950c650a695726a02bff29b5e4e3575dd63e33b9799fdca78ec0fa848678043\": container with ID starting with 9950c650a695726a02bff29b5e4e3575dd63e33b9799fdca78ec0fa848678043 not found: ID does not exist" Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.320919 4558 scope.go:117] "RemoveContainer" containerID="ad7e074652e180a31db91a23980de422d274542b0626f62f61e86a2d3f3088d7" Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.351835 4558 scope.go:117] "RemoveContainer" containerID="9a84634fbab19582b6a07c9fe1a1511ad1a285ca37ba97810d1c6c72e31ebe4c" Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.357643 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2f8ae24b-258e-4d32-b312-99f5015f83d6-combined-ca-bundle\") pod \"2f8ae24b-258e-4d32-b312-99f5015f83d6\" (UID: \"2f8ae24b-258e-4d32-b312-99f5015f83d6\") " Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.357698 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2f8ae24b-258e-4d32-b312-99f5015f83d6-config-data\") pod \"2f8ae24b-258e-4d32-b312-99f5015f83d6\" (UID: \"2f8ae24b-258e-4d32-b312-99f5015f83d6\") " Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.357751 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xr484\" (UniqueName: \"kubernetes.io/projected/2f8ae24b-258e-4d32-b312-99f5015f83d6-kube-api-access-xr484\") pod \"2f8ae24b-258e-4d32-b312-99f5015f83d6\" (UID: \"2f8ae24b-258e-4d32-b312-99f5015f83d6\") " Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.357788 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2f8ae24b-258e-4d32-b312-99f5015f83d6-config-data-custom\") pod \"2f8ae24b-258e-4d32-b312-99f5015f83d6\" (UID: \"2f8ae24b-258e-4d32-b312-99f5015f83d6\") " Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.357864 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2f8ae24b-258e-4d32-b312-99f5015f83d6-logs\") pod \"2f8ae24b-258e-4d32-b312-99f5015f83d6\" (UID: \"2f8ae24b-258e-4d32-b312-99f5015f83d6\") " Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.359294 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2f8ae24b-258e-4d32-b312-99f5015f83d6-logs" (OuterVolumeSpecName: "logs") pod "2f8ae24b-258e-4d32-b312-99f5015f83d6" (UID: "2f8ae24b-258e-4d32-b312-99f5015f83d6"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.362340 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2f8ae24b-258e-4d32-b312-99f5015f83d6-kube-api-access-xr484" (OuterVolumeSpecName: "kube-api-access-xr484") pod "2f8ae24b-258e-4d32-b312-99f5015f83d6" (UID: "2f8ae24b-258e-4d32-b312-99f5015f83d6"). InnerVolumeSpecName "kube-api-access-xr484". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.363680 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2f8ae24b-258e-4d32-b312-99f5015f83d6-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "2f8ae24b-258e-4d32-b312-99f5015f83d6" (UID: "2f8ae24b-258e-4d32-b312-99f5015f83d6"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.381336 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2f8ae24b-258e-4d32-b312-99f5015f83d6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2f8ae24b-258e-4d32-b312-99f5015f83d6" (UID: "2f8ae24b-258e-4d32-b312-99f5015f83d6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.386413 4558 scope.go:117] "RemoveContainer" containerID="ad7e074652e180a31db91a23980de422d274542b0626f62f61e86a2d3f3088d7" Jan 20 17:21:38 crc kubenswrapper[4558]: E0120 17:21:38.387216 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ad7e074652e180a31db91a23980de422d274542b0626f62f61e86a2d3f3088d7\": container with ID starting with ad7e074652e180a31db91a23980de422d274542b0626f62f61e86a2d3f3088d7 not found: ID does not exist" containerID="ad7e074652e180a31db91a23980de422d274542b0626f62f61e86a2d3f3088d7" Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.387274 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ad7e074652e180a31db91a23980de422d274542b0626f62f61e86a2d3f3088d7"} err="failed to get container status \"ad7e074652e180a31db91a23980de422d274542b0626f62f61e86a2d3f3088d7\": rpc error: code = NotFound desc = could not find container \"ad7e074652e180a31db91a23980de422d274542b0626f62f61e86a2d3f3088d7\": container with ID starting with ad7e074652e180a31db91a23980de422d274542b0626f62f61e86a2d3f3088d7 not found: ID does not exist" Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.387305 4558 scope.go:117] "RemoveContainer" containerID="9a84634fbab19582b6a07c9fe1a1511ad1a285ca37ba97810d1c6c72e31ebe4c" Jan 20 17:21:38 crc kubenswrapper[4558]: E0120 17:21:38.387650 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9a84634fbab19582b6a07c9fe1a1511ad1a285ca37ba97810d1c6c72e31ebe4c\": container with ID starting with 9a84634fbab19582b6a07c9fe1a1511ad1a285ca37ba97810d1c6c72e31ebe4c not found: ID does not exist" containerID="9a84634fbab19582b6a07c9fe1a1511ad1a285ca37ba97810d1c6c72e31ebe4c" Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.387673 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9a84634fbab19582b6a07c9fe1a1511ad1a285ca37ba97810d1c6c72e31ebe4c"} err="failed to get container status \"9a84634fbab19582b6a07c9fe1a1511ad1a285ca37ba97810d1c6c72e31ebe4c\": rpc error: code = NotFound desc = could not find container \"9a84634fbab19582b6a07c9fe1a1511ad1a285ca37ba97810d1c6c72e31ebe4c\": container with ID starting with 9a84634fbab19582b6a07c9fe1a1511ad1a285ca37ba97810d1c6c72e31ebe4c not found: ID does not exist" Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.387688 4558 scope.go:117] "RemoveContainer" containerID="6ace6c8a650ab1df4d1b142fa5877db45863c733c61ee810479ce811a6bf28d2" Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.400349 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2f8ae24b-258e-4d32-b312-99f5015f83d6-config-data" (OuterVolumeSpecName: "config-data") pod "2f8ae24b-258e-4d32-b312-99f5015f83d6" (UID: "2f8ae24b-258e-4d32-b312-99f5015f83d6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.409030 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-cell1-galera-1" Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.416146 4558 scope.go:117] "RemoveContainer" containerID="9c3bcaf0b0b1fe796f0f39a396e91571e66a0d2ff4f33099f19f4d03733e3f3c" Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.437920 4558 scope.go:117] "RemoveContainer" containerID="6ace6c8a650ab1df4d1b142fa5877db45863c733c61ee810479ce811a6bf28d2" Jan 20 17:21:38 crc kubenswrapper[4558]: E0120 17:21:38.439945 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6ace6c8a650ab1df4d1b142fa5877db45863c733c61ee810479ce811a6bf28d2\": container with ID starting with 6ace6c8a650ab1df4d1b142fa5877db45863c733c61ee810479ce811a6bf28d2 not found: ID does not exist" containerID="6ace6c8a650ab1df4d1b142fa5877db45863c733c61ee810479ce811a6bf28d2" Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.439973 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6ace6c8a650ab1df4d1b142fa5877db45863c733c61ee810479ce811a6bf28d2"} err="failed to get container status \"6ace6c8a650ab1df4d1b142fa5877db45863c733c61ee810479ce811a6bf28d2\": rpc error: code = NotFound desc = could not find container \"6ace6c8a650ab1df4d1b142fa5877db45863c733c61ee810479ce811a6bf28d2\": container with ID starting with 6ace6c8a650ab1df4d1b142fa5877db45863c733c61ee810479ce811a6bf28d2 not found: ID does not exist" Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.439991 4558 scope.go:117] "RemoveContainer" containerID="9c3bcaf0b0b1fe796f0f39a396e91571e66a0d2ff4f33099f19f4d03733e3f3c" Jan 20 17:21:38 crc kubenswrapper[4558]: E0120 17:21:38.440215 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9c3bcaf0b0b1fe796f0f39a396e91571e66a0d2ff4f33099f19f4d03733e3f3c\": container with ID starting with 9c3bcaf0b0b1fe796f0f39a396e91571e66a0d2ff4f33099f19f4d03733e3f3c not found: ID does not exist" containerID="9c3bcaf0b0b1fe796f0f39a396e91571e66a0d2ff4f33099f19f4d03733e3f3c" Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.440232 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9c3bcaf0b0b1fe796f0f39a396e91571e66a0d2ff4f33099f19f4d03733e3f3c"} err="failed to get container status \"9c3bcaf0b0b1fe796f0f39a396e91571e66a0d2ff4f33099f19f4d03733e3f3c\": rpc error: code = NotFound desc = could not find container \"9c3bcaf0b0b1fe796f0f39a396e91571e66a0d2ff4f33099f19f4d03733e3f3c\": container with ID starting with 9c3bcaf0b0b1fe796f0f39a396e91571e66a0d2ff4f33099f19f4d03733e3f3c not found: ID does not exist" Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.440244 4558 scope.go:117] "RemoveContainer" containerID="065345d0e077b3bb2b2e67f7ed62c2413c1984c254d80dff447c7a5693b57103" Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.450800 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/barbican-api-7b899ccb8d-cfqnl" podUID="14af4a81-8568-425c-a209-4d90a042c365" containerName="barbican-api-log" probeResult="failure" output="Get \"https://10.217.0.174:9311/healthcheck\": read tcp 10.217.0.2:57002->10.217.0.174:9311: read: connection reset by peer" Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.450851 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/barbican-api-7b899ccb8d-cfqnl" podUID="14af4a81-8568-425c-a209-4d90a042c365" containerName="barbican-api" probeResult="failure" output="Get \"https://10.217.0.174:9311/healthcheck\": read tcp 10.217.0.2:56996->10.217.0.174:9311: read: connection reset by peer" Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.459088 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/872ae103-12d2-4686-aa02-4e3375eb510a-kolla-config\") pod \"872ae103-12d2-4686-aa02-4e3375eb510a\" (UID: \"872ae103-12d2-4686-aa02-4e3375eb510a\") " Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.459210 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/872ae103-12d2-4686-aa02-4e3375eb510a-combined-ca-bundle\") pod \"872ae103-12d2-4686-aa02-4e3375eb510a\" (UID: \"872ae103-12d2-4686-aa02-4e3375eb510a\") " Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.459570 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/872ae103-12d2-4686-aa02-4e3375eb510a-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "872ae103-12d2-4686-aa02-4e3375eb510a" (UID: "872ae103-12d2-4686-aa02-4e3375eb510a"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.460670 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mysql-db\" (UniqueName: \"kubernetes.io/local-volume/local-storage18-crc\") pod \"872ae103-12d2-4686-aa02-4e3375eb510a\" (UID: \"872ae103-12d2-4686-aa02-4e3375eb510a\") " Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.460860 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/872ae103-12d2-4686-aa02-4e3375eb510a-config-data-generated\") pod \"872ae103-12d2-4686-aa02-4e3375eb510a\" (UID: \"872ae103-12d2-4686-aa02-4e3375eb510a\") " Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.460950 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/872ae103-12d2-4686-aa02-4e3375eb510a-config-data-default\") pod \"872ae103-12d2-4686-aa02-4e3375eb510a\" (UID: \"872ae103-12d2-4686-aa02-4e3375eb510a\") " Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.461106 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cnpm2\" (UniqueName: \"kubernetes.io/projected/872ae103-12d2-4686-aa02-4e3375eb510a-kube-api-access-cnpm2\") pod \"872ae103-12d2-4686-aa02-4e3375eb510a\" (UID: \"872ae103-12d2-4686-aa02-4e3375eb510a\") " Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.461503 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/872ae103-12d2-4686-aa02-4e3375eb510a-galera-tls-certs\") pod \"872ae103-12d2-4686-aa02-4e3375eb510a\" (UID: \"872ae103-12d2-4686-aa02-4e3375eb510a\") " Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.461588 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/872ae103-12d2-4686-aa02-4e3375eb510a-operator-scripts\") pod \"872ae103-12d2-4686-aa02-4e3375eb510a\" (UID: \"872ae103-12d2-4686-aa02-4e3375eb510a\") " Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.461820 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/872ae103-12d2-4686-aa02-4e3375eb510a-config-data-generated" (OuterVolumeSpecName: "config-data-generated") pod "872ae103-12d2-4686-aa02-4e3375eb510a" (UID: "872ae103-12d2-4686-aa02-4e3375eb510a"). InnerVolumeSpecName "config-data-generated". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.461933 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/872ae103-12d2-4686-aa02-4e3375eb510a-config-data-default" (OuterVolumeSpecName: "config-data-default") pod "872ae103-12d2-4686-aa02-4e3375eb510a" (UID: "872ae103-12d2-4686-aa02-4e3375eb510a"). InnerVolumeSpecName "config-data-default". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.462147 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/872ae103-12d2-4686-aa02-4e3375eb510a-config-data-generated\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.462233 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2f8ae24b-258e-4d32-b312-99f5015f83d6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.462285 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/872ae103-12d2-4686-aa02-4e3375eb510a-config-data-default\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.462334 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2f8ae24b-258e-4d32-b312-99f5015f83d6-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.462397 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xr484\" (UniqueName: \"kubernetes.io/projected/2f8ae24b-258e-4d32-b312-99f5015f83d6-kube-api-access-xr484\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.462461 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2f8ae24b-258e-4d32-b312-99f5015f83d6-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.462511 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2f8ae24b-258e-4d32-b312-99f5015f83d6-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.462566 4558 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/872ae103-12d2-4686-aa02-4e3375eb510a-kolla-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.462526 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/872ae103-12d2-4686-aa02-4e3375eb510a-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "872ae103-12d2-4686-aa02-4e3375eb510a" (UID: "872ae103-12d2-4686-aa02-4e3375eb510a"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.464843 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/872ae103-12d2-4686-aa02-4e3375eb510a-kube-api-access-cnpm2" (OuterVolumeSpecName: "kube-api-access-cnpm2") pod "872ae103-12d2-4686-aa02-4e3375eb510a" (UID: "872ae103-12d2-4686-aa02-4e3375eb510a"). InnerVolumeSpecName "kube-api-access-cnpm2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.467282 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage18-crc" (OuterVolumeSpecName: "mysql-db") pod "872ae103-12d2-4686-aa02-4e3375eb510a" (UID: "872ae103-12d2-4686-aa02-4e3375eb510a"). InnerVolumeSpecName "local-storage18-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.467845 4558 scope.go:117] "RemoveContainer" containerID="523536e2b2a85c3c10d4dc392b34b04a3e55effe5a2bcd5562e1367958047b32" Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.486774 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/872ae103-12d2-4686-aa02-4e3375eb510a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "872ae103-12d2-4686-aa02-4e3375eb510a" (UID: "872ae103-12d2-4686-aa02-4e3375eb510a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.498702 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/872ae103-12d2-4686-aa02-4e3375eb510a-galera-tls-certs" (OuterVolumeSpecName: "galera-tls-certs") pod "872ae103-12d2-4686-aa02-4e3375eb510a" (UID: "872ae103-12d2-4686-aa02-4e3375eb510a"). InnerVolumeSpecName "galera-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.532823 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/openstack-cell1-galera-0" podUID="23b973c1-70c3-4d60-8b1d-89efda1a5707" containerName="galera" containerID="cri-o://6ab17e2c19352e87a06de8079d4c76b1930887a93bc8b1a819d497e6fd3c41fd" gracePeriod=26 Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.533242 4558 scope.go:117] "RemoveContainer" containerID="065345d0e077b3bb2b2e67f7ed62c2413c1984c254d80dff447c7a5693b57103" Jan 20 17:21:38 crc kubenswrapper[4558]: E0120 17:21:38.534985 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"065345d0e077b3bb2b2e67f7ed62c2413c1984c254d80dff447c7a5693b57103\": container with ID starting with 065345d0e077b3bb2b2e67f7ed62c2413c1984c254d80dff447c7a5693b57103 not found: ID does not exist" containerID="065345d0e077b3bb2b2e67f7ed62c2413c1984c254d80dff447c7a5693b57103" Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.535055 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"065345d0e077b3bb2b2e67f7ed62c2413c1984c254d80dff447c7a5693b57103"} err="failed to get container status \"065345d0e077b3bb2b2e67f7ed62c2413c1984c254d80dff447c7a5693b57103\": rpc error: code = NotFound desc = could not find container \"065345d0e077b3bb2b2e67f7ed62c2413c1984c254d80dff447c7a5693b57103\": container with ID starting with 065345d0e077b3bb2b2e67f7ed62c2413c1984c254d80dff447c7a5693b57103 not found: ID does not exist" Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.535096 4558 scope.go:117] "RemoveContainer" containerID="523536e2b2a85c3c10d4dc392b34b04a3e55effe5a2bcd5562e1367958047b32" Jan 20 17:21:38 crc kubenswrapper[4558]: E0120 17:21:38.536020 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"523536e2b2a85c3c10d4dc392b34b04a3e55effe5a2bcd5562e1367958047b32\": container with ID starting with 523536e2b2a85c3c10d4dc392b34b04a3e55effe5a2bcd5562e1367958047b32 not found: ID does not exist" containerID="523536e2b2a85c3c10d4dc392b34b04a3e55effe5a2bcd5562e1367958047b32" Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.536061 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"523536e2b2a85c3c10d4dc392b34b04a3e55effe5a2bcd5562e1367958047b32"} err="failed to get container status \"523536e2b2a85c3c10d4dc392b34b04a3e55effe5a2bcd5562e1367958047b32\": rpc error: code = NotFound desc = could not find container \"523536e2b2a85c3c10d4dc392b34b04a3e55effe5a2bcd5562e1367958047b32\": container with ID starting with 523536e2b2a85c3c10d4dc392b34b04a3e55effe5a2bcd5562e1367958047b32 not found: ID does not exist" Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.536088 4558 scope.go:117] "RemoveContainer" containerID="67f52d695e7843e5e1f00cfd6b4ff76e79ba46c21e0baabc0a21e634dcae0c6e" Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.564852 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cnpm2\" (UniqueName: \"kubernetes.io/projected/872ae103-12d2-4686-aa02-4e3375eb510a-kube-api-access-cnpm2\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.564881 4558 reconciler_common.go:293] "Volume detached for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/872ae103-12d2-4686-aa02-4e3375eb510a-galera-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.564893 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/872ae103-12d2-4686-aa02-4e3375eb510a-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.564904 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/872ae103-12d2-4686-aa02-4e3375eb510a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.564936 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage18-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage18-crc\") on node \"crc\" " Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.565722 4558 scope.go:117] "RemoveContainer" containerID="c97f19d9a03644214f7c3a4eb6b798453f97f1b12067ab4dab1050a2bc364779" Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.575056 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09d95674-9970-471d-8061-997e69ddda11" path="/var/lib/kubelet/pods/09d95674-9970-471d-8061-997e69ddda11/volumes" Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.575839 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20cdc3df-1626-4f56-8a92-9df64c1ed2fe" path="/var/lib/kubelet/pods/20cdc3df-1626-4f56-8a92-9df64c1ed2fe/volumes" Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.576638 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e48519-09af-42b4-835a-485273267aca" path="/var/lib/kubelet/pods/25e48519-09af-42b4-835a-485273267aca/volumes" Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.577464 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="32b13960-8f0f-47f6-b529-37f7a918fcd6" path="/var/lib/kubelet/pods/32b13960-8f0f-47f6-b529-37f7a918fcd6/volumes" Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.577843 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4eb93277-301f-486e-a1e9-f323f9bd9cb4" path="/var/lib/kubelet/pods/4eb93277-301f-486e-a1e9-f323f9bd9cb4/volumes" Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.578426 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="52e65b62-cbeb-475e-b3ec-2a6259c44b25" path="/var/lib/kubelet/pods/52e65b62-cbeb-475e-b3ec-2a6259c44b25/volumes" Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.578912 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="55138e51-715c-42a1-8e1d-bca65a31d55c" path="/var/lib/kubelet/pods/55138e51-715c-42a1-8e1d-bca65a31d55c/volumes" Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.580049 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="88418ec6-c589-463f-b8ec-c12464810c07" path="/var/lib/kubelet/pods/88418ec6-c589-463f-b8ec-c12464810c07/volumes" Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.580129 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage18-crc" (UniqueName: "kubernetes.io/local-volume/local-storage18-crc") on node "crc" Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.580770 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d974489-32f7-4541-9a58-6a215c5d2071" path="/var/lib/kubelet/pods/9d974489-32f7-4541-9a58-6a215c5d2071/volumes" Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.581444 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a948066b-fa95-4fbb-83e6-6f26f6c76652" path="/var/lib/kubelet/pods/a948066b-fa95-4fbb-83e6-6f26f6c76652/volumes" Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.582500 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e72083f2-d324-432e-9bb0-5f44f2023489" path="/var/lib/kubelet/pods/e72083f2-d324-432e-9bb0-5f44f2023489/volumes" Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.583230 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f07ba882-d06f-4937-ac0a-e66fcd719b8d" path="/var/lib/kubelet/pods/f07ba882-d06f-4937-ac0a-e66fcd719b8d/volumes" Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.583794 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f2c69e78-4cbd-41a8-810d-bdc4f56cabfd" path="/var/lib/kubelet/pods/f2c69e78-4cbd-41a8-810d-bdc4f56cabfd/volumes" Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.584697 4558 scope.go:117] "RemoveContainer" containerID="67f52d695e7843e5e1f00cfd6b4ff76e79ba46c21e0baabc0a21e634dcae0c6e" Jan 20 17:21:38 crc kubenswrapper[4558]: E0120 17:21:38.585018 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"67f52d695e7843e5e1f00cfd6b4ff76e79ba46c21e0baabc0a21e634dcae0c6e\": container with ID starting with 67f52d695e7843e5e1f00cfd6b4ff76e79ba46c21e0baabc0a21e634dcae0c6e not found: ID does not exist" containerID="67f52d695e7843e5e1f00cfd6b4ff76e79ba46c21e0baabc0a21e634dcae0c6e" Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.585061 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"67f52d695e7843e5e1f00cfd6b4ff76e79ba46c21e0baabc0a21e634dcae0c6e"} err="failed to get container status \"67f52d695e7843e5e1f00cfd6b4ff76e79ba46c21e0baabc0a21e634dcae0c6e\": rpc error: code = NotFound desc = could not find container \"67f52d695e7843e5e1f00cfd6b4ff76e79ba46c21e0baabc0a21e634dcae0c6e\": container with ID starting with 67f52d695e7843e5e1f00cfd6b4ff76e79ba46c21e0baabc0a21e634dcae0c6e not found: ID does not exist" Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.585092 4558 scope.go:117] "RemoveContainer" containerID="c97f19d9a03644214f7c3a4eb6b798453f97f1b12067ab4dab1050a2bc364779" Jan 20 17:21:38 crc kubenswrapper[4558]: E0120 17:21:38.585421 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c97f19d9a03644214f7c3a4eb6b798453f97f1b12067ab4dab1050a2bc364779\": container with ID starting with c97f19d9a03644214f7c3a4eb6b798453f97f1b12067ab4dab1050a2bc364779 not found: ID does not exist" containerID="c97f19d9a03644214f7c3a4eb6b798453f97f1b12067ab4dab1050a2bc364779" Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.585455 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c97f19d9a03644214f7c3a4eb6b798453f97f1b12067ab4dab1050a2bc364779"} err="failed to get container status \"c97f19d9a03644214f7c3a4eb6b798453f97f1b12067ab4dab1050a2bc364779\": rpc error: code = NotFound desc = could not find container \"c97f19d9a03644214f7c3a4eb6b798453f97f1b12067ab4dab1050a2bc364779\": container with ID starting with c97f19d9a03644214f7c3a4eb6b798453f97f1b12067ab4dab1050a2bc364779 not found: ID does not exist" Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.667901 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage18-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage18-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.778349 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-7b899ccb8d-cfqnl" Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.807735 4558 generic.go:334] "Generic (PLEG): container finished" podID="14af4a81-8568-425c-a209-4d90a042c365" containerID="e46adee8ea97e0c6d658d436cc56ebc7b96b6a0ab6ea4685475eaa56dee6f641" exitCode=0 Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.807794 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-7b899ccb8d-cfqnl" event={"ID":"14af4a81-8568-425c-a209-4d90a042c365","Type":"ContainerDied","Data":"e46adee8ea97e0c6d658d436cc56ebc7b96b6a0ab6ea4685475eaa56dee6f641"} Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.807822 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-7b899ccb8d-cfqnl" event={"ID":"14af4a81-8568-425c-a209-4d90a042c365","Type":"ContainerDied","Data":"b742b24d2dcfdeae6c57c3ce7694ebef5ce4136a958d26792057a3321f96adce"} Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.807842 4558 scope.go:117] "RemoveContainer" containerID="e46adee8ea97e0c6d658d436cc56ebc7b96b6a0ab6ea4685475eaa56dee6f641" Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.807942 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-7b899ccb8d-cfqnl" Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.819290 4558 generic.go:334] "Generic (PLEG): container finished" podID="2f8ae24b-258e-4d32-b312-99f5015f83d6" containerID="454a41b471f0eb3acc32e253ce4e5b3a99599e9feb264d9effdd29c6859dd9d0" exitCode=0 Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.819334 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-worker-795d8f7875-gfh24" event={"ID":"2f8ae24b-258e-4d32-b312-99f5015f83d6","Type":"ContainerDied","Data":"454a41b471f0eb3acc32e253ce4e5b3a99599e9feb264d9effdd29c6859dd9d0"} Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.819353 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-worker-795d8f7875-gfh24" event={"ID":"2f8ae24b-258e-4d32-b312-99f5015f83d6","Type":"ContainerDied","Data":"afd7329580125fa4f2079f9103592be41cb2ac5fbf2ed5b7c2441396f114a014"} Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.819400 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-worker-795d8f7875-gfh24" Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.832113 4558 scope.go:117] "RemoveContainer" containerID="c09143ce6ec1936f6f2c98c148ac4afc51f7b093ad7e905e871ff92ac19dfd9f" Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.833974 4558 generic.go:334] "Generic (PLEG): container finished" podID="872ae103-12d2-4686-aa02-4e3375eb510a" containerID="3c937b5e3f5d38edf270dee165b67ae6bdbf0a55f6bf5b7f0a56a325dd63cec2" exitCode=0 Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.834017 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-cell1-galera-1" event={"ID":"872ae103-12d2-4686-aa02-4e3375eb510a","Type":"ContainerDied","Data":"3c937b5e3f5d38edf270dee165b67ae6bdbf0a55f6bf5b7f0a56a325dd63cec2"} Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.834034 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-cell1-galera-1" event={"ID":"872ae103-12d2-4686-aa02-4e3375eb510a","Type":"ContainerDied","Data":"19d18e73c4dbcda4d915f58c461cac55bf1cd1ec122ac77e249168c20919b44d"} Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.834179 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-cell1-galera-1" Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.844329 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-worker-795d8f7875-gfh24"] Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.850993 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-galera-2" Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.854322 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-galera-2" event={"ID":"fa03920f-0e4b-458f-956b-b658786f9792","Type":"ContainerDied","Data":"55e6203b41a29a47da221f4002de5a11842c8aaeac5ff40cf713a78b5aace538"} Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.854389 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/barbican-worker-795d8f7875-gfh24"] Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.868771 4558 scope.go:117] "RemoveContainer" containerID="e46adee8ea97e0c6d658d436cc56ebc7b96b6a0ab6ea4685475eaa56dee6f641" Jan 20 17:21:38 crc kubenswrapper[4558]: E0120 17:21:38.870070 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e46adee8ea97e0c6d658d436cc56ebc7b96b6a0ab6ea4685475eaa56dee6f641\": container with ID starting with e46adee8ea97e0c6d658d436cc56ebc7b96b6a0ab6ea4685475eaa56dee6f641 not found: ID does not exist" containerID="e46adee8ea97e0c6d658d436cc56ebc7b96b6a0ab6ea4685475eaa56dee6f641" Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.870105 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e46adee8ea97e0c6d658d436cc56ebc7b96b6a0ab6ea4685475eaa56dee6f641"} err="failed to get container status \"e46adee8ea97e0c6d658d436cc56ebc7b96b6a0ab6ea4685475eaa56dee6f641\": rpc error: code = NotFound desc = could not find container \"e46adee8ea97e0c6d658d436cc56ebc7b96b6a0ab6ea4685475eaa56dee6f641\": container with ID starting with e46adee8ea97e0c6d658d436cc56ebc7b96b6a0ab6ea4685475eaa56dee6f641 not found: ID does not exist" Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.870130 4558 scope.go:117] "RemoveContainer" containerID="c09143ce6ec1936f6f2c98c148ac4afc51f7b093ad7e905e871ff92ac19dfd9f" Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.871107 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/14af4a81-8568-425c-a209-4d90a042c365-config-data-custom\") pod \"14af4a81-8568-425c-a209-4d90a042c365\" (UID: \"14af4a81-8568-425c-a209-4d90a042c365\") " Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.871417 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14af4a81-8568-425c-a209-4d90a042c365-combined-ca-bundle\") pod \"14af4a81-8568-425c-a209-4d90a042c365\" (UID: \"14af4a81-8568-425c-a209-4d90a042c365\") " Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.871455 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lbgv5\" (UniqueName: \"kubernetes.io/projected/14af4a81-8568-425c-a209-4d90a042c365-kube-api-access-lbgv5\") pod \"14af4a81-8568-425c-a209-4d90a042c365\" (UID: \"14af4a81-8568-425c-a209-4d90a042c365\") " Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.871536 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/14af4a81-8568-425c-a209-4d90a042c365-public-tls-certs\") pod \"14af4a81-8568-425c-a209-4d90a042c365\" (UID: \"14af4a81-8568-425c-a209-4d90a042c365\") " Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.871589 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/14af4a81-8568-425c-a209-4d90a042c365-internal-tls-certs\") pod \"14af4a81-8568-425c-a209-4d90a042c365\" (UID: \"14af4a81-8568-425c-a209-4d90a042c365\") " Jan 20 17:21:38 crc kubenswrapper[4558]: E0120 17:21:38.871864 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c09143ce6ec1936f6f2c98c148ac4afc51f7b093ad7e905e871ff92ac19dfd9f\": container with ID starting with c09143ce6ec1936f6f2c98c148ac4afc51f7b093ad7e905e871ff92ac19dfd9f not found: ID does not exist" containerID="c09143ce6ec1936f6f2c98c148ac4afc51f7b093ad7e905e871ff92ac19dfd9f" Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.871916 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c09143ce6ec1936f6f2c98c148ac4afc51f7b093ad7e905e871ff92ac19dfd9f"} err="failed to get container status \"c09143ce6ec1936f6f2c98c148ac4afc51f7b093ad7e905e871ff92ac19dfd9f\": rpc error: code = NotFound desc = could not find container \"c09143ce6ec1936f6f2c98c148ac4afc51f7b093ad7e905e871ff92ac19dfd9f\": container with ID starting with c09143ce6ec1936f6f2c98c148ac4afc51f7b093ad7e905e871ff92ac19dfd9f not found: ID does not exist" Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.871954 4558 scope.go:117] "RemoveContainer" containerID="454a41b471f0eb3acc32e253ce4e5b3a99599e9feb264d9effdd29c6859dd9d0" Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.872011 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/14af4a81-8568-425c-a209-4d90a042c365-logs\") pod \"14af4a81-8568-425c-a209-4d90a042c365\" (UID: \"14af4a81-8568-425c-a209-4d90a042c365\") " Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.872065 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/14af4a81-8568-425c-a209-4d90a042c365-config-data\") pod \"14af4a81-8568-425c-a209-4d90a042c365\" (UID: \"14af4a81-8568-425c-a209-4d90a042c365\") " Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.873486 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/14af4a81-8568-425c-a209-4d90a042c365-logs" (OuterVolumeSpecName: "logs") pod "14af4a81-8568-425c-a209-4d90a042c365" (UID: "14af4a81-8568-425c-a209-4d90a042c365"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.875067 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/openstack-cell1-galera-1"] Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.879865 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/14af4a81-8568-425c-a209-4d90a042c365-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "14af4a81-8568-425c-a209-4d90a042c365" (UID: "14af4a81-8568-425c-a209-4d90a042c365"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.882027 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/14af4a81-8568-425c-a209-4d90a042c365-kube-api-access-lbgv5" (OuterVolumeSpecName: "kube-api-access-lbgv5") pod "14af4a81-8568-425c-a209-4d90a042c365" (UID: "14af4a81-8568-425c-a209-4d90a042c365"). InnerVolumeSpecName "kube-api-access-lbgv5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.883308 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/openstack-cell1-galera-1"] Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.916207 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/14af4a81-8568-425c-a209-4d90a042c365-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "14af4a81-8568-425c-a209-4d90a042c365" (UID: "14af4a81-8568-425c-a209-4d90a042c365"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.923910 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/14af4a81-8568-425c-a209-4d90a042c365-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "14af4a81-8568-425c-a209-4d90a042c365" (UID: "14af4a81-8568-425c-a209-4d90a042c365"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.928992 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/14af4a81-8568-425c-a209-4d90a042c365-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "14af4a81-8568-425c-a209-4d90a042c365" (UID: "14af4a81-8568-425c-a209-4d90a042c365"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.940064 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/14af4a81-8568-425c-a209-4d90a042c365-config-data" (OuterVolumeSpecName: "config-data") pod "14af4a81-8568-425c-a209-4d90a042c365" (UID: "14af4a81-8568-425c-a209-4d90a042c365"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.974783 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14af4a81-8568-425c-a209-4d90a042c365-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:38 crc kubenswrapper[4558]: E0120 17:21:38.974810 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.974879 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lbgv5\" (UniqueName: \"kubernetes.io/projected/14af4a81-8568-425c-a209-4d90a042c365-kube-api-access-lbgv5\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:38 crc kubenswrapper[4558]: E0120 17:21:38.974968 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/6dae93de-b907-44f7-a94c-c691eee0af7f-config-data podName:6dae93de-b907-44f7-a94c-c691eee0af7f nodeName:}" failed. No retries permitted until 2026-01-20 17:21:46.974949142 +0000 UTC m=+2400.735287109 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/6dae93de-b907-44f7-a94c-c691eee0af7f-config-data") pod "rabbitmq-cell1-server-0" (UID: "6dae93de-b907-44f7-a94c-c691eee0af7f") : configmap "rabbitmq-cell1-config-data" not found Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.975042 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/14af4a81-8568-425c-a209-4d90a042c365-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.976034 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/14af4a81-8568-425c-a209-4d90a042c365-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.976434 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/14af4a81-8568-425c-a209-4d90a042c365-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.976459 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/14af4a81-8568-425c-a209-4d90a042c365-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:38 crc kubenswrapper[4558]: I0120 17:21:38.976471 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/14af4a81-8568-425c-a209-4d90a042c365-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:39 crc kubenswrapper[4558]: E0120 17:21:39.079663 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Jan 20 17:21:39 crc kubenswrapper[4558]: E0120 17:21:39.079728 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/344f9f31-8a81-4544-b782-5aa78dfc5cc2-config-data podName:344f9f31-8a81-4544-b782-5aa78dfc5cc2 nodeName:}" failed. No retries permitted until 2026-01-20 17:21:47.079712028 +0000 UTC m=+2400.840049995 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/344f9f31-8a81-4544-b782-5aa78dfc5cc2-config-data") pod "rabbitmq-cell1-server-2" (UID: "344f9f31-8a81-4544-b782-5aa78dfc5cc2") : configmap "rabbitmq-cell1-config-data" not found Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.088200 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/openstack-galera-2"] Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.092317 4558 scope.go:117] "RemoveContainer" containerID="daed2ff09c57989ebf417f453b1c97a9e38554550e8b7bcfd620161001b8c813" Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.094809 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/openstack-galera-2"] Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.120061 4558 scope.go:117] "RemoveContainer" containerID="454a41b471f0eb3acc32e253ce4e5b3a99599e9feb264d9effdd29c6859dd9d0" Jan 20 17:21:39 crc kubenswrapper[4558]: E0120 17:21:39.120845 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"454a41b471f0eb3acc32e253ce4e5b3a99599e9feb264d9effdd29c6859dd9d0\": container with ID starting with 454a41b471f0eb3acc32e253ce4e5b3a99599e9feb264d9effdd29c6859dd9d0 not found: ID does not exist" containerID="454a41b471f0eb3acc32e253ce4e5b3a99599e9feb264d9effdd29c6859dd9d0" Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.120880 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"454a41b471f0eb3acc32e253ce4e5b3a99599e9feb264d9effdd29c6859dd9d0"} err="failed to get container status \"454a41b471f0eb3acc32e253ce4e5b3a99599e9feb264d9effdd29c6859dd9d0\": rpc error: code = NotFound desc = could not find container \"454a41b471f0eb3acc32e253ce4e5b3a99599e9feb264d9effdd29c6859dd9d0\": container with ID starting with 454a41b471f0eb3acc32e253ce4e5b3a99599e9feb264d9effdd29c6859dd9d0 not found: ID does not exist" Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.120910 4558 scope.go:117] "RemoveContainer" containerID="daed2ff09c57989ebf417f453b1c97a9e38554550e8b7bcfd620161001b8c813" Jan 20 17:21:39 crc kubenswrapper[4558]: E0120 17:21:39.121120 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"daed2ff09c57989ebf417f453b1c97a9e38554550e8b7bcfd620161001b8c813\": container with ID starting with daed2ff09c57989ebf417f453b1c97a9e38554550e8b7bcfd620161001b8c813 not found: ID does not exist" containerID="daed2ff09c57989ebf417f453b1c97a9e38554550e8b7bcfd620161001b8c813" Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.121143 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"daed2ff09c57989ebf417f453b1c97a9e38554550e8b7bcfd620161001b8c813"} err="failed to get container status \"daed2ff09c57989ebf417f453b1c97a9e38554550e8b7bcfd620161001b8c813\": rpc error: code = NotFound desc = could not find container \"daed2ff09c57989ebf417f453b1c97a9e38554550e8b7bcfd620161001b8c813\": container with ID starting with daed2ff09c57989ebf417f453b1c97a9e38554550e8b7bcfd620161001b8c813 not found: ID does not exist" Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.121160 4558 scope.go:117] "RemoveContainer" containerID="3c937b5e3f5d38edf270dee165b67ae6bdbf0a55f6bf5b7f0a56a325dd63cec2" Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.140046 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-api-7b899ccb8d-cfqnl"] Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.145627 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/barbican-api-7b899ccb8d-cfqnl"] Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.148300 4558 scope.go:117] "RemoveContainer" containerID="7481a1fc56ba9d6433e79ef32c84d26f8ce7244725116f9bc5912bfab8876618" Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.170070 4558 scope.go:117] "RemoveContainer" containerID="3c937b5e3f5d38edf270dee165b67ae6bdbf0a55f6bf5b7f0a56a325dd63cec2" Jan 20 17:21:39 crc kubenswrapper[4558]: E0120 17:21:39.170446 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3c937b5e3f5d38edf270dee165b67ae6bdbf0a55f6bf5b7f0a56a325dd63cec2\": container with ID starting with 3c937b5e3f5d38edf270dee165b67ae6bdbf0a55f6bf5b7f0a56a325dd63cec2 not found: ID does not exist" containerID="3c937b5e3f5d38edf270dee165b67ae6bdbf0a55f6bf5b7f0a56a325dd63cec2" Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.170479 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3c937b5e3f5d38edf270dee165b67ae6bdbf0a55f6bf5b7f0a56a325dd63cec2"} err="failed to get container status \"3c937b5e3f5d38edf270dee165b67ae6bdbf0a55f6bf5b7f0a56a325dd63cec2\": rpc error: code = NotFound desc = could not find container \"3c937b5e3f5d38edf270dee165b67ae6bdbf0a55f6bf5b7f0a56a325dd63cec2\": container with ID starting with 3c937b5e3f5d38edf270dee165b67ae6bdbf0a55f6bf5b7f0a56a325dd63cec2 not found: ID does not exist" Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.170501 4558 scope.go:117] "RemoveContainer" containerID="7481a1fc56ba9d6433e79ef32c84d26f8ce7244725116f9bc5912bfab8876618" Jan 20 17:21:39 crc kubenswrapper[4558]: E0120 17:21:39.170810 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7481a1fc56ba9d6433e79ef32c84d26f8ce7244725116f9bc5912bfab8876618\": container with ID starting with 7481a1fc56ba9d6433e79ef32c84d26f8ce7244725116f9bc5912bfab8876618 not found: ID does not exist" containerID="7481a1fc56ba9d6433e79ef32c84d26f8ce7244725116f9bc5912bfab8876618" Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.170852 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7481a1fc56ba9d6433e79ef32c84d26f8ce7244725116f9bc5912bfab8876618"} err="failed to get container status \"7481a1fc56ba9d6433e79ef32c84d26f8ce7244725116f9bc5912bfab8876618\": rpc error: code = NotFound desc = could not find container \"7481a1fc56ba9d6433e79ef32c84d26f8ce7244725116f9bc5912bfab8876618\": container with ID starting with 7481a1fc56ba9d6433e79ef32c84d26f8ce7244725116f9bc5912bfab8876618 not found: ID does not exist" Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.170883 4558 scope.go:117] "RemoveContainer" containerID="a62b1eff4cb0e50771699ddb63f1bffd4fb9ff28cbdf146a5a3c121d2a2e02fa" Jan 20 17:21:39 crc kubenswrapper[4558]: E0120 17:21:39.182608 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Jan 20 17:21:39 crc kubenswrapper[4558]: E0120 17:21:39.182692 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/8573a632-84e0-4f80-b811-5646b571c318-config-data podName:8573a632-84e0-4f80-b811-5646b571c318 nodeName:}" failed. No retries permitted until 2026-01-20 17:21:47.1826703 +0000 UTC m=+2400.943008267 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/8573a632-84e0-4f80-b811-5646b571c318-config-data") pod "rabbitmq-cell1-server-1" (UID: "8573a632-84e0-4f80-b811-5646b571c318") : configmap "rabbitmq-cell1-config-data" not found Jan 20 17:21:39 crc kubenswrapper[4558]: E0120 17:21:39.284420 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Jan 20 17:21:39 crc kubenswrapper[4558]: E0120 17:21:39.284473 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/2c809052-d9bb-4982-8271-5b7a9a6f28f9-config-data podName:2c809052-d9bb-4982-8271-5b7a9a6f28f9 nodeName:}" failed. No retries permitted until 2026-01-20 17:21:47.284461877 +0000 UTC m=+2401.044799833 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/2c809052-d9bb-4982-8271-5b7a9a6f28f9-config-data") pod "rabbitmq-server-1" (UID: "2c809052-d9bb-4982-8271-5b7a9a6f28f9") : configmap "rabbitmq-config-data" not found Jan 20 17:21:39 crc kubenswrapper[4558]: E0120 17:21:39.284790 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Jan 20 17:21:39 crc kubenswrapper[4558]: E0120 17:21:39.284827 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/9c52efaf-b737-47bf-9ca1-109a28e19113-config-data podName:9c52efaf-b737-47bf-9ca1-109a28e19113 nodeName:}" failed. No retries permitted until 2026-01-20 17:21:47.284819579 +0000 UTC m=+2401.045157546 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/9c52efaf-b737-47bf-9ca1-109a28e19113-config-data") pod "rabbitmq-server-2" (UID: "9c52efaf-b737-47bf-9ca1-109a28e19113") : configmap "rabbitmq-config-data" not found Jan 20 17:21:39 crc kubenswrapper[4558]: E0120 17:21:39.284857 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Jan 20 17:21:39 crc kubenswrapper[4558]: E0120 17:21:39.284878 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/24114ddb-3b30-42ac-9d61-cfeb15d58728-config-data podName:24114ddb-3b30-42ac-9d61-cfeb15d58728 nodeName:}" failed. No retries permitted until 2026-01-20 17:21:47.284871849 +0000 UTC m=+2401.045209805 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/24114ddb-3b30-42ac-9d61-cfeb15d58728-config-data") pod "rabbitmq-server-0" (UID: "24114ddb-3b30-42ac-9d61-cfeb15d58728") : configmap "rabbitmq-config-data" not found Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.300467 4558 scope.go:117] "RemoveContainer" containerID="39e4d0dccf7dfdba24ce1b4ccfce236f73f6cb1fd65053360ca6d3b551a3e705" Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.313700 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.385661 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/23b973c1-70c3-4d60-8b1d-89efda1a5707-galera-tls-certs\") pod \"23b973c1-70c3-4d60-8b1d-89efda1a5707\" (UID: \"23b973c1-70c3-4d60-8b1d-89efda1a5707\") " Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.386012 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mysql-db\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"23b973c1-70c3-4d60-8b1d-89efda1a5707\" (UID: \"23b973c1-70c3-4d60-8b1d-89efda1a5707\") " Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.386092 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/23b973c1-70c3-4d60-8b1d-89efda1a5707-config-data-generated\") pod \"23b973c1-70c3-4d60-8b1d-89efda1a5707\" (UID: \"23b973c1-70c3-4d60-8b1d-89efda1a5707\") " Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.386123 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/23b973c1-70c3-4d60-8b1d-89efda1a5707-kolla-config\") pod \"23b973c1-70c3-4d60-8b1d-89efda1a5707\" (UID: \"23b973c1-70c3-4d60-8b1d-89efda1a5707\") " Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.386181 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/23b973c1-70c3-4d60-8b1d-89efda1a5707-config-data-default\") pod \"23b973c1-70c3-4d60-8b1d-89efda1a5707\" (UID: \"23b973c1-70c3-4d60-8b1d-89efda1a5707\") " Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.386251 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pggcm\" (UniqueName: \"kubernetes.io/projected/23b973c1-70c3-4d60-8b1d-89efda1a5707-kube-api-access-pggcm\") pod \"23b973c1-70c3-4d60-8b1d-89efda1a5707\" (UID: \"23b973c1-70c3-4d60-8b1d-89efda1a5707\") " Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.386324 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/23b973c1-70c3-4d60-8b1d-89efda1a5707-combined-ca-bundle\") pod \"23b973c1-70c3-4d60-8b1d-89efda1a5707\" (UID: \"23b973c1-70c3-4d60-8b1d-89efda1a5707\") " Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.386359 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/23b973c1-70c3-4d60-8b1d-89efda1a5707-operator-scripts\") pod \"23b973c1-70c3-4d60-8b1d-89efda1a5707\" (UID: \"23b973c1-70c3-4d60-8b1d-89efda1a5707\") " Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.386414 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/23b973c1-70c3-4d60-8b1d-89efda1a5707-config-data-generated" (OuterVolumeSpecName: "config-data-generated") pod "23b973c1-70c3-4d60-8b1d-89efda1a5707" (UID: "23b973c1-70c3-4d60-8b1d-89efda1a5707"). InnerVolumeSpecName "config-data-generated". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.386629 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/23b973c1-70c3-4d60-8b1d-89efda1a5707-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "23b973c1-70c3-4d60-8b1d-89efda1a5707" (UID: "23b973c1-70c3-4d60-8b1d-89efda1a5707"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.386759 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/23b973c1-70c3-4d60-8b1d-89efda1a5707-config-data-default" (OuterVolumeSpecName: "config-data-default") pod "23b973c1-70c3-4d60-8b1d-89efda1a5707" (UID: "23b973c1-70c3-4d60-8b1d-89efda1a5707"). InnerVolumeSpecName "config-data-default". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.387010 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/23b973c1-70c3-4d60-8b1d-89efda1a5707-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "23b973c1-70c3-4d60-8b1d-89efda1a5707" (UID: "23b973c1-70c3-4d60-8b1d-89efda1a5707"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.387408 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/23b973c1-70c3-4d60-8b1d-89efda1a5707-config-data-generated\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.387432 4558 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/23b973c1-70c3-4d60-8b1d-89efda1a5707-kolla-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.387445 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/23b973c1-70c3-4d60-8b1d-89efda1a5707-config-data-default\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.387455 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/23b973c1-70c3-4d60-8b1d-89efda1a5707-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.391244 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/23b973c1-70c3-4d60-8b1d-89efda1a5707-kube-api-access-pggcm" (OuterVolumeSpecName: "kube-api-access-pggcm") pod "23b973c1-70c3-4d60-8b1d-89efda1a5707" (UID: "23b973c1-70c3-4d60-8b1d-89efda1a5707"). InnerVolumeSpecName "kube-api-access-pggcm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.397102 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage10-crc" (OuterVolumeSpecName: "mysql-db") pod "23b973c1-70c3-4d60-8b1d-89efda1a5707" (UID: "23b973c1-70c3-4d60-8b1d-89efda1a5707"). InnerVolumeSpecName "local-storage10-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.407976 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/23b973c1-70c3-4d60-8b1d-89efda1a5707-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "23b973c1-70c3-4d60-8b1d-89efda1a5707" (UID: "23b973c1-70c3-4d60-8b1d-89efda1a5707"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.426914 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/23b973c1-70c3-4d60-8b1d-89efda1a5707-galera-tls-certs" (OuterVolumeSpecName: "galera-tls-certs") pod "23b973c1-70c3-4d60-8b1d-89efda1a5707" (UID: "23b973c1-70c3-4d60-8b1d-89efda1a5707"). InnerVolumeSpecName "galera-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.489876 4558 reconciler_common.go:293] "Volume detached for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/23b973c1-70c3-4d60-8b1d-89efda1a5707-galera-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.489936 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" " Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.489950 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pggcm\" (UniqueName: \"kubernetes.io/projected/23b973c1-70c3-4d60-8b1d-89efda1a5707-kube-api-access-pggcm\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.489964 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/23b973c1-70c3-4d60-8b1d-89efda1a5707-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.504294 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage10-crc" (UniqueName: "kubernetes.io/local-volume/local-storage10-crc") on node "crc" Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.547533 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-5585fd7cd8-gk5qt" Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.591581 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ea1fc77a-9a6a-4b9b-8d27-34f9b9ca61d9-config-data\") pod \"ea1fc77a-9a6a-4b9b-8d27-34f9b9ca61d9\" (UID: \"ea1fc77a-9a6a-4b9b-8d27-34f9b9ca61d9\") " Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.591647 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ea1fc77a-9a6a-4b9b-8d27-34f9b9ca61d9-public-tls-certs\") pod \"ea1fc77a-9a6a-4b9b-8d27-34f9b9ca61d9\" (UID: \"ea1fc77a-9a6a-4b9b-8d27-34f9b9ca61d9\") " Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.591725 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ea1fc77a-9a6a-4b9b-8d27-34f9b9ca61d9-combined-ca-bundle\") pod \"ea1fc77a-9a6a-4b9b-8d27-34f9b9ca61d9\" (UID: \"ea1fc77a-9a6a-4b9b-8d27-34f9b9ca61d9\") " Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.591816 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/ea1fc77a-9a6a-4b9b-8d27-34f9b9ca61d9-credential-keys\") pod \"ea1fc77a-9a6a-4b9b-8d27-34f9b9ca61d9\" (UID: \"ea1fc77a-9a6a-4b9b-8d27-34f9b9ca61d9\") " Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.591856 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t4vhd\" (UniqueName: \"kubernetes.io/projected/ea1fc77a-9a6a-4b9b-8d27-34f9b9ca61d9-kube-api-access-t4vhd\") pod \"ea1fc77a-9a6a-4b9b-8d27-34f9b9ca61d9\" (UID: \"ea1fc77a-9a6a-4b9b-8d27-34f9b9ca61d9\") " Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.591919 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ea1fc77a-9a6a-4b9b-8d27-34f9b9ca61d9-internal-tls-certs\") pod \"ea1fc77a-9a6a-4b9b-8d27-34f9b9ca61d9\" (UID: \"ea1fc77a-9a6a-4b9b-8d27-34f9b9ca61d9\") " Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.591992 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/ea1fc77a-9a6a-4b9b-8d27-34f9b9ca61d9-fernet-keys\") pod \"ea1fc77a-9a6a-4b9b-8d27-34f9b9ca61d9\" (UID: \"ea1fc77a-9a6a-4b9b-8d27-34f9b9ca61d9\") " Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.592013 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ea1fc77a-9a6a-4b9b-8d27-34f9b9ca61d9-scripts\") pod \"ea1fc77a-9a6a-4b9b-8d27-34f9b9ca61d9\" (UID: \"ea1fc77a-9a6a-4b9b-8d27-34f9b9ca61d9\") " Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.592914 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.597290 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ea1fc77a-9a6a-4b9b-8d27-34f9b9ca61d9-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "ea1fc77a-9a6a-4b9b-8d27-34f9b9ca61d9" (UID: "ea1fc77a-9a6a-4b9b-8d27-34f9b9ca61d9"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.599050 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ea1fc77a-9a6a-4b9b-8d27-34f9b9ca61d9-kube-api-access-t4vhd" (OuterVolumeSpecName: "kube-api-access-t4vhd") pod "ea1fc77a-9a6a-4b9b-8d27-34f9b9ca61d9" (UID: "ea1fc77a-9a6a-4b9b-8d27-34f9b9ca61d9"). InnerVolumeSpecName "kube-api-access-t4vhd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.601673 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ea1fc77a-9a6a-4b9b-8d27-34f9b9ca61d9-scripts" (OuterVolumeSpecName: "scripts") pod "ea1fc77a-9a6a-4b9b-8d27-34f9b9ca61d9" (UID: "ea1fc77a-9a6a-4b9b-8d27-34f9b9ca61d9"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.602803 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ea1fc77a-9a6a-4b9b-8d27-34f9b9ca61d9-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "ea1fc77a-9a6a-4b9b-8d27-34f9b9ca61d9" (UID: "ea1fc77a-9a6a-4b9b-8d27-34f9b9ca61d9"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.619478 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ea1fc77a-9a6a-4b9b-8d27-34f9b9ca61d9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ea1fc77a-9a6a-4b9b-8d27-34f9b9ca61d9" (UID: "ea1fc77a-9a6a-4b9b-8d27-34f9b9ca61d9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.636615 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ea1fc77a-9a6a-4b9b-8d27-34f9b9ca61d9-config-data" (OuterVolumeSpecName: "config-data") pod "ea1fc77a-9a6a-4b9b-8d27-34f9b9ca61d9" (UID: "ea1fc77a-9a6a-4b9b-8d27-34f9b9ca61d9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.645127 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ea1fc77a-9a6a-4b9b-8d27-34f9b9ca61d9-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "ea1fc77a-9a6a-4b9b-8d27-34f9b9ca61d9" (UID: "ea1fc77a-9a6a-4b9b-8d27-34f9b9ca61d9"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.650507 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ea1fc77a-9a6a-4b9b-8d27-34f9b9ca61d9-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "ea1fc77a-9a6a-4b9b-8d27-34f9b9ca61d9" (UID: "ea1fc77a-9a6a-4b9b-8d27-34f9b9ca61d9"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.694047 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ea1fc77a-9a6a-4b9b-8d27-34f9b9ca61d9-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.694073 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ea1fc77a-9a6a-4b9b-8d27-34f9b9ca61d9-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.694088 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ea1fc77a-9a6a-4b9b-8d27-34f9b9ca61d9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.694100 4558 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/ea1fc77a-9a6a-4b9b-8d27-34f9b9ca61d9-credential-keys\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.694110 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t4vhd\" (UniqueName: \"kubernetes.io/projected/ea1fc77a-9a6a-4b9b-8d27-34f9b9ca61d9-kube-api-access-t4vhd\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.694119 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ea1fc77a-9a6a-4b9b-8d27-34f9b9ca61d9-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.694128 4558 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/ea1fc77a-9a6a-4b9b-8d27-34f9b9ca61d9-fernet-keys\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.694137 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ea1fc77a-9a6a-4b9b-8d27-34f9b9ca61d9-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.696865 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/rabbitmq-cell1-server-2" Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.795757 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/344f9f31-8a81-4544-b782-5aa78dfc5cc2-server-conf\") pod \"344f9f31-8a81-4544-b782-5aa78dfc5cc2\" (UID: \"344f9f31-8a81-4544-b782-5aa78dfc5cc2\") " Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.795868 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/344f9f31-8a81-4544-b782-5aa78dfc5cc2-rabbitmq-plugins\") pod \"344f9f31-8a81-4544-b782-5aa78dfc5cc2\" (UID: \"344f9f31-8a81-4544-b782-5aa78dfc5cc2\") " Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.795922 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/344f9f31-8a81-4544-b782-5aa78dfc5cc2-rabbitmq-confd\") pod \"344f9f31-8a81-4544-b782-5aa78dfc5cc2\" (UID: \"344f9f31-8a81-4544-b782-5aa78dfc5cc2\") " Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.796026 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/344f9f31-8a81-4544-b782-5aa78dfc5cc2-plugins-conf\") pod \"344f9f31-8a81-4544-b782-5aa78dfc5cc2\" (UID: \"344f9f31-8a81-4544-b782-5aa78dfc5cc2\") " Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.796054 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/344f9f31-8a81-4544-b782-5aa78dfc5cc2-erlang-cookie-secret\") pod \"344f9f31-8a81-4544-b782-5aa78dfc5cc2\" (UID: \"344f9f31-8a81-4544-b782-5aa78dfc5cc2\") " Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.796084 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"344f9f31-8a81-4544-b782-5aa78dfc5cc2\" (UID: \"344f9f31-8a81-4544-b782-5aa78dfc5cc2\") " Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.796147 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/344f9f31-8a81-4544-b782-5aa78dfc5cc2-pod-info\") pod \"344f9f31-8a81-4544-b782-5aa78dfc5cc2\" (UID: \"344f9f31-8a81-4544-b782-5aa78dfc5cc2\") " Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.796267 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zxlm2\" (UniqueName: \"kubernetes.io/projected/344f9f31-8a81-4544-b782-5aa78dfc5cc2-kube-api-access-zxlm2\") pod \"344f9f31-8a81-4544-b782-5aa78dfc5cc2\" (UID: \"344f9f31-8a81-4544-b782-5aa78dfc5cc2\") " Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.796407 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/344f9f31-8a81-4544-b782-5aa78dfc5cc2-config-data\") pod \"344f9f31-8a81-4544-b782-5aa78dfc5cc2\" (UID: \"344f9f31-8a81-4544-b782-5aa78dfc5cc2\") " Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.796441 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/344f9f31-8a81-4544-b782-5aa78dfc5cc2-rabbitmq-erlang-cookie\") pod \"344f9f31-8a81-4544-b782-5aa78dfc5cc2\" (UID: \"344f9f31-8a81-4544-b782-5aa78dfc5cc2\") " Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.796554 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/344f9f31-8a81-4544-b782-5aa78dfc5cc2-rabbitmq-tls\") pod \"344f9f31-8a81-4544-b782-5aa78dfc5cc2\" (UID: \"344f9f31-8a81-4544-b782-5aa78dfc5cc2\") " Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.798005 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/344f9f31-8a81-4544-b782-5aa78dfc5cc2-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "344f9f31-8a81-4544-b782-5aa78dfc5cc2" (UID: "344f9f31-8a81-4544-b782-5aa78dfc5cc2"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.799000 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/344f9f31-8a81-4544-b782-5aa78dfc5cc2-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "344f9f31-8a81-4544-b782-5aa78dfc5cc2" (UID: "344f9f31-8a81-4544-b782-5aa78dfc5cc2"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.799779 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/344f9f31-8a81-4544-b782-5aa78dfc5cc2-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "344f9f31-8a81-4544-b782-5aa78dfc5cc2" (UID: "344f9f31-8a81-4544-b782-5aa78dfc5cc2"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.802421 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/344f9f31-8a81-4544-b782-5aa78dfc5cc2-kube-api-access-zxlm2" (OuterVolumeSpecName: "kube-api-access-zxlm2") pod "344f9f31-8a81-4544-b782-5aa78dfc5cc2" (UID: "344f9f31-8a81-4544-b782-5aa78dfc5cc2"). InnerVolumeSpecName "kube-api-access-zxlm2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.802496 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/344f9f31-8a81-4544-b782-5aa78dfc5cc2-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "344f9f31-8a81-4544-b782-5aa78dfc5cc2" (UID: "344f9f31-8a81-4544-b782-5aa78dfc5cc2"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.804642 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/344f9f31-8a81-4544-b782-5aa78dfc5cc2-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "344f9f31-8a81-4544-b782-5aa78dfc5cc2" (UID: "344f9f31-8a81-4544-b782-5aa78dfc5cc2"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.806517 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/344f9f31-8a81-4544-b782-5aa78dfc5cc2-pod-info" (OuterVolumeSpecName: "pod-info") pod "344f9f31-8a81-4544-b782-5aa78dfc5cc2" (UID: "344f9f31-8a81-4544-b782-5aa78dfc5cc2"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.816737 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage04-crc" (OuterVolumeSpecName: "persistence") pod "344f9f31-8a81-4544-b782-5aa78dfc5cc2" (UID: "344f9f31-8a81-4544-b782-5aa78dfc5cc2"). InnerVolumeSpecName "local-storage04-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.819883 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/344f9f31-8a81-4544-b782-5aa78dfc5cc2-config-data" (OuterVolumeSpecName: "config-data") pod "344f9f31-8a81-4544-b782-5aa78dfc5cc2" (UID: "344f9f31-8a81-4544-b782-5aa78dfc5cc2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.832685 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/344f9f31-8a81-4544-b782-5aa78dfc5cc2-server-conf" (OuterVolumeSpecName: "server-conf") pod "344f9f31-8a81-4544-b782-5aa78dfc5cc2" (UID: "344f9f31-8a81-4544-b782-5aa78dfc5cc2"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.869254 4558 generic.go:334] "Generic (PLEG): container finished" podID="8573a632-84e0-4f80-b811-5646b571c318" containerID="c5f31a0dc1f900cc5cbc94b222647ecbd6198220c8e700e7650325a56366cf4f" exitCode=0 Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.869632 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-cell1-server-1" event={"ID":"8573a632-84e0-4f80-b811-5646b571c318","Type":"ContainerDied","Data":"c5f31a0dc1f900cc5cbc94b222647ecbd6198220c8e700e7650325a56366cf4f"} Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.871670 4558 generic.go:334] "Generic (PLEG): container finished" podID="23b973c1-70c3-4d60-8b1d-89efda1a5707" containerID="6ab17e2c19352e87a06de8079d4c76b1930887a93bc8b1a819d497e6fd3c41fd" exitCode=0 Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.871759 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.871719 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-cell1-galera-0" event={"ID":"23b973c1-70c3-4d60-8b1d-89efda1a5707","Type":"ContainerDied","Data":"6ab17e2c19352e87a06de8079d4c76b1930887a93bc8b1a819d497e6fd3c41fd"} Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.871909 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-cell1-galera-0" event={"ID":"23b973c1-70c3-4d60-8b1d-89efda1a5707","Type":"ContainerDied","Data":"3977a1ac73216f47b9f48d0a4f845cfe8e04acfa669c1c1e86f1bc65da7b4d06"} Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.871997 4558 scope.go:117] "RemoveContainer" containerID="6ab17e2c19352e87a06de8079d4c76b1930887a93bc8b1a819d497e6fd3c41fd" Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.876488 4558 generic.go:334] "Generic (PLEG): container finished" podID="ea1fc77a-9a6a-4b9b-8d27-34f9b9ca61d9" containerID="b488e52ae9a816a6a1a205b5450c535dd59fdb90d6345dcef8eb1e186a1d90c5" exitCode=0 Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.876557 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-5585fd7cd8-gk5qt" event={"ID":"ea1fc77a-9a6a-4b9b-8d27-34f9b9ca61d9","Type":"ContainerDied","Data":"b488e52ae9a816a6a1a205b5450c535dd59fdb90d6345dcef8eb1e186a1d90c5"} Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.876562 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-5585fd7cd8-gk5qt" Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.876585 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-5585fd7cd8-gk5qt" event={"ID":"ea1fc77a-9a6a-4b9b-8d27-34f9b9ca61d9","Type":"ContainerDied","Data":"a9490e0bfb667bbf0d5d2683462bf8e91fca116a71c21c093bc25591a5f2f0e5"} Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.879725 4558 generic.go:334] "Generic (PLEG): container finished" podID="24114ddb-3b30-42ac-9d61-cfeb15d58728" containerID="33a9e1abb20c3e3bf6a51b785efcb0a4eaf6964e1b8392e4977fc9f4c7b03f61" exitCode=0 Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.879848 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-server-0" event={"ID":"24114ddb-3b30-42ac-9d61-cfeb15d58728","Type":"ContainerDied","Data":"33a9e1abb20c3e3bf6a51b785efcb0a4eaf6964e1b8392e4977fc9f4c7b03f61"} Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.884438 4558 generic.go:334] "Generic (PLEG): container finished" podID="6dae93de-b907-44f7-a94c-c691eee0af7f" containerID="b7c379986cc6aff3b828f77461a78d1169c8a75f30d3abde660227f21701db47" exitCode=0 Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.884510 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" event={"ID":"6dae93de-b907-44f7-a94c-c691eee0af7f","Type":"ContainerDied","Data":"b7c379986cc6aff3b828f77461a78d1169c8a75f30d3abde660227f21701db47"} Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.884550 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" event={"ID":"6dae93de-b907-44f7-a94c-c691eee0af7f","Type":"ContainerDied","Data":"46fa266734e1ff561fe2d977f15dd262b64d422883e461cef06139f76eadc253"} Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.884564 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="46fa266734e1ff561fe2d977f15dd262b64d422883e461cef06139f76eadc253" Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.890552 4558 generic.go:334] "Generic (PLEG): container finished" podID="344f9f31-8a81-4544-b782-5aa78dfc5cc2" containerID="3723b282ed54cce183bafc40d9019c56750bb156f55f03ff6df98969d9b7a4c6" exitCode=0 Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.890622 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-cell1-server-2" event={"ID":"344f9f31-8a81-4544-b782-5aa78dfc5cc2","Type":"ContainerDied","Data":"3723b282ed54cce183bafc40d9019c56750bb156f55f03ff6df98969d9b7a4c6"} Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.890632 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/rabbitmq-cell1-server-2" Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.890640 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-cell1-server-2" event={"ID":"344f9f31-8a81-4544-b782-5aa78dfc5cc2","Type":"ContainerDied","Data":"1761640eb11c76c260cbae9e59a3c25de6075b8709647ef367e91f713ac7d087"} Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.894325 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/344f9f31-8a81-4544-b782-5aa78dfc5cc2-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "344f9f31-8a81-4544-b782-5aa78dfc5cc2" (UID: "344f9f31-8a81-4544-b782-5aa78dfc5cc2"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.899700 4558 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/344f9f31-8a81-4544-b782-5aa78dfc5cc2-plugins-conf\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.899726 4558 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/344f9f31-8a81-4544-b782-5aa78dfc5cc2-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.899753 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" " Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.899764 4558 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/344f9f31-8a81-4544-b782-5aa78dfc5cc2-pod-info\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.899775 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zxlm2\" (UniqueName: \"kubernetes.io/projected/344f9f31-8a81-4544-b782-5aa78dfc5cc2-kube-api-access-zxlm2\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.899784 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/344f9f31-8a81-4544-b782-5aa78dfc5cc2-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.899793 4558 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/344f9f31-8a81-4544-b782-5aa78dfc5cc2-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.899802 4558 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/344f9f31-8a81-4544-b782-5aa78dfc5cc2-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.899810 4558 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/344f9f31-8a81-4544-b782-5aa78dfc5cc2-server-conf\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.899820 4558 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/344f9f31-8a81-4544-b782-5aa78dfc5cc2-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.899828 4558 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/344f9f31-8a81-4544-b782-5aa78dfc5cc2-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:39 crc kubenswrapper[4558]: I0120 17:21:39.932147 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage04-crc" (UniqueName: "kubernetes.io/local-volume/local-storage04-crc") on node "crc" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.002777 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.270698 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.292631 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/rabbitmq-cell1-server-1" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.303308 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.311664 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"6dae93de-b907-44f7-a94c-c691eee0af7f\" (UID: \"6dae93de-b907-44f7-a94c-c691eee0af7f\") " Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.311806 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/6dae93de-b907-44f7-a94c-c691eee0af7f-erlang-cookie-secret\") pod \"6dae93de-b907-44f7-a94c-c691eee0af7f\" (UID: \"6dae93de-b907-44f7-a94c-c691eee0af7f\") " Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.311931 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/6dae93de-b907-44f7-a94c-c691eee0af7f-pod-info\") pod \"6dae93de-b907-44f7-a94c-c691eee0af7f\" (UID: \"6dae93de-b907-44f7-a94c-c691eee0af7f\") " Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.311958 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/6dae93de-b907-44f7-a94c-c691eee0af7f-rabbitmq-tls\") pod \"6dae93de-b907-44f7-a94c-c691eee0af7f\" (UID: \"6dae93de-b907-44f7-a94c-c691eee0af7f\") " Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.312008 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/6dae93de-b907-44f7-a94c-c691eee0af7f-config-data\") pod \"6dae93de-b907-44f7-a94c-c691eee0af7f\" (UID: \"6dae93de-b907-44f7-a94c-c691eee0af7f\") " Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.312051 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vwdm9\" (UniqueName: \"kubernetes.io/projected/6dae93de-b907-44f7-a94c-c691eee0af7f-kube-api-access-vwdm9\") pod \"6dae93de-b907-44f7-a94c-c691eee0af7f\" (UID: \"6dae93de-b907-44f7-a94c-c691eee0af7f\") " Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.312109 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/6dae93de-b907-44f7-a94c-c691eee0af7f-rabbitmq-confd\") pod \"6dae93de-b907-44f7-a94c-c691eee0af7f\" (UID: \"6dae93de-b907-44f7-a94c-c691eee0af7f\") " Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.312766 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/6dae93de-b907-44f7-a94c-c691eee0af7f-rabbitmq-plugins\") pod \"6dae93de-b907-44f7-a94c-c691eee0af7f\" (UID: \"6dae93de-b907-44f7-a94c-c691eee0af7f\") " Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.312810 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/6dae93de-b907-44f7-a94c-c691eee0af7f-plugins-conf\") pod \"6dae93de-b907-44f7-a94c-c691eee0af7f\" (UID: \"6dae93de-b907-44f7-a94c-c691eee0af7f\") " Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.312871 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/6dae93de-b907-44f7-a94c-c691eee0af7f-rabbitmq-erlang-cookie\") pod \"6dae93de-b907-44f7-a94c-c691eee0af7f\" (UID: \"6dae93de-b907-44f7-a94c-c691eee0af7f\") " Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.312909 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/6dae93de-b907-44f7-a94c-c691eee0af7f-server-conf\") pod \"6dae93de-b907-44f7-a94c-c691eee0af7f\" (UID: \"6dae93de-b907-44f7-a94c-c691eee0af7f\") " Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.318648 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6dae93de-b907-44f7-a94c-c691eee0af7f-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "6dae93de-b907-44f7-a94c-c691eee0af7f" (UID: "6dae93de-b907-44f7-a94c-c691eee0af7f"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.318751 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6dae93de-b907-44f7-a94c-c691eee0af7f-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "6dae93de-b907-44f7-a94c-c691eee0af7f" (UID: "6dae93de-b907-44f7-a94c-c691eee0af7f"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.319342 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6dae93de-b907-44f7-a94c-c691eee0af7f-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "6dae93de-b907-44f7-a94c-c691eee0af7f" (UID: "6dae93de-b907-44f7-a94c-c691eee0af7f"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.319979 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6dae93de-b907-44f7-a94c-c691eee0af7f-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "6dae93de-b907-44f7-a94c-c691eee0af7f" (UID: "6dae93de-b907-44f7-a94c-c691eee0af7f"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.320579 4558 scope.go:117] "RemoveContainer" containerID="1444085be1f8fd9f3124056fde67f6e43f79544b6e7418777ad2e190d7088fd6" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.323546 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage02-crc" (OuterVolumeSpecName: "persistence") pod "6dae93de-b907-44f7-a94c-c691eee0af7f" (UID: "6dae93de-b907-44f7-a94c-c691eee0af7f"). InnerVolumeSpecName "local-storage02-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.323741 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6dae93de-b907-44f7-a94c-c691eee0af7f-kube-api-access-vwdm9" (OuterVolumeSpecName: "kube-api-access-vwdm9") pod "6dae93de-b907-44f7-a94c-c691eee0af7f" (UID: "6dae93de-b907-44f7-a94c-c691eee0af7f"). InnerVolumeSpecName "kube-api-access-vwdm9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.323890 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/6dae93de-b907-44f7-a94c-c691eee0af7f-pod-info" (OuterVolumeSpecName: "pod-info") pod "6dae93de-b907-44f7-a94c-c691eee0af7f" (UID: "6dae93de-b907-44f7-a94c-c691eee0af7f"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.333517 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6dae93de-b907-44f7-a94c-c691eee0af7f-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "6dae93de-b907-44f7-a94c-c691eee0af7f" (UID: "6dae93de-b907-44f7-a94c-c691eee0af7f"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.361490 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/openstack-cell1-galera-0"] Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.376872 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/openstack-cell1-galera-0"] Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.378178 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6dae93de-b907-44f7-a94c-c691eee0af7f-config-data" (OuterVolumeSpecName: "config-data") pod "6dae93de-b907-44f7-a94c-c691eee0af7f" (UID: "6dae93de-b907-44f7-a94c-c691eee0af7f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.392965 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-5585fd7cd8-gk5qt"] Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.397383 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/rabbitmq-server-1" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.399146 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/rabbitmq-server-2" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.407328 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/keystone-5585fd7cd8-gk5qt"] Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.410491 4558 scope.go:117] "RemoveContainer" containerID="6ab17e2c19352e87a06de8079d4c76b1930887a93bc8b1a819d497e6fd3c41fd" Jan 20 17:21:40 crc kubenswrapper[4558]: E0120 17:21:40.410847 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6ab17e2c19352e87a06de8079d4c76b1930887a93bc8b1a819d497e6fd3c41fd\": container with ID starting with 6ab17e2c19352e87a06de8079d4c76b1930887a93bc8b1a819d497e6fd3c41fd not found: ID does not exist" containerID="6ab17e2c19352e87a06de8079d4c76b1930887a93bc8b1a819d497e6fd3c41fd" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.410895 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6ab17e2c19352e87a06de8079d4c76b1930887a93bc8b1a819d497e6fd3c41fd"} err="failed to get container status \"6ab17e2c19352e87a06de8079d4c76b1930887a93bc8b1a819d497e6fd3c41fd\": rpc error: code = NotFound desc = could not find container \"6ab17e2c19352e87a06de8079d4c76b1930887a93bc8b1a819d497e6fd3c41fd\": container with ID starting with 6ab17e2c19352e87a06de8079d4c76b1930887a93bc8b1a819d497e6fd3c41fd not found: ID does not exist" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.410925 4558 scope.go:117] "RemoveContainer" containerID="1444085be1f8fd9f3124056fde67f6e43f79544b6e7418777ad2e190d7088fd6" Jan 20 17:21:40 crc kubenswrapper[4558]: E0120 17:21:40.413719 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1444085be1f8fd9f3124056fde67f6e43f79544b6e7418777ad2e190d7088fd6\": container with ID starting with 1444085be1f8fd9f3124056fde67f6e43f79544b6e7418777ad2e190d7088fd6 not found: ID does not exist" containerID="1444085be1f8fd9f3124056fde67f6e43f79544b6e7418777ad2e190d7088fd6" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.413759 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1444085be1f8fd9f3124056fde67f6e43f79544b6e7418777ad2e190d7088fd6"} err="failed to get container status \"1444085be1f8fd9f3124056fde67f6e43f79544b6e7418777ad2e190d7088fd6\": rpc error: code = NotFound desc = could not find container \"1444085be1f8fd9f3124056fde67f6e43f79544b6e7418777ad2e190d7088fd6\": container with ID starting with 1444085be1f8fd9f3124056fde67f6e43f79544b6e7418777ad2e190d7088fd6 not found: ID does not exist" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.413813 4558 scope.go:117] "RemoveContainer" containerID="b488e52ae9a816a6a1a205b5450c535dd59fdb90d6345dcef8eb1e186a1d90c5" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.414986 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/8573a632-84e0-4f80-b811-5646b571c318-server-conf\") pod \"8573a632-84e0-4f80-b811-5646b571c318\" (UID: \"8573a632-84e0-4f80-b811-5646b571c318\") " Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.415032 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/8573a632-84e0-4f80-b811-5646b571c318-config-data\") pod \"8573a632-84e0-4f80-b811-5646b571c318\" (UID: \"8573a632-84e0-4f80-b811-5646b571c318\") " Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.415071 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/8573a632-84e0-4f80-b811-5646b571c318-rabbitmq-erlang-cookie\") pod \"8573a632-84e0-4f80-b811-5646b571c318\" (UID: \"8573a632-84e0-4f80-b811-5646b571c318\") " Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.415104 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r8tpl\" (UniqueName: \"kubernetes.io/projected/24114ddb-3b30-42ac-9d61-cfeb15d58728-kube-api-access-r8tpl\") pod \"24114ddb-3b30-42ac-9d61-cfeb15d58728\" (UID: \"24114ddb-3b30-42ac-9d61-cfeb15d58728\") " Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.415124 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/24114ddb-3b30-42ac-9d61-cfeb15d58728-rabbitmq-confd\") pod \"24114ddb-3b30-42ac-9d61-cfeb15d58728\" (UID: \"24114ddb-3b30-42ac-9d61-cfeb15d58728\") " Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.415181 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/24114ddb-3b30-42ac-9d61-cfeb15d58728-server-conf\") pod \"24114ddb-3b30-42ac-9d61-cfeb15d58728\" (UID: \"24114ddb-3b30-42ac-9d61-cfeb15d58728\") " Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.415227 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/24114ddb-3b30-42ac-9d61-cfeb15d58728-pod-info\") pod \"24114ddb-3b30-42ac-9d61-cfeb15d58728\" (UID: \"24114ddb-3b30-42ac-9d61-cfeb15d58728\") " Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.415253 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/24114ddb-3b30-42ac-9d61-cfeb15d58728-rabbitmq-erlang-cookie\") pod \"24114ddb-3b30-42ac-9d61-cfeb15d58728\" (UID: \"24114ddb-3b30-42ac-9d61-cfeb15d58728\") " Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.415267 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/8573a632-84e0-4f80-b811-5646b571c318-rabbitmq-confd\") pod \"8573a632-84e0-4f80-b811-5646b571c318\" (UID: \"8573a632-84e0-4f80-b811-5646b571c318\") " Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.415322 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/8573a632-84e0-4f80-b811-5646b571c318-rabbitmq-tls\") pod \"8573a632-84e0-4f80-b811-5646b571c318\" (UID: \"8573a632-84e0-4f80-b811-5646b571c318\") " Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.415358 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/24114ddb-3b30-42ac-9d61-cfeb15d58728-rabbitmq-plugins\") pod \"24114ddb-3b30-42ac-9d61-cfeb15d58728\" (UID: \"24114ddb-3b30-42ac-9d61-cfeb15d58728\") " Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.415386 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"24114ddb-3b30-42ac-9d61-cfeb15d58728\" (UID: \"24114ddb-3b30-42ac-9d61-cfeb15d58728\") " Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.415426 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"8573a632-84e0-4f80-b811-5646b571c318\" (UID: \"8573a632-84e0-4f80-b811-5646b571c318\") " Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.415467 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v9m9h\" (UniqueName: \"kubernetes.io/projected/8573a632-84e0-4f80-b811-5646b571c318-kube-api-access-v9m9h\") pod \"8573a632-84e0-4f80-b811-5646b571c318\" (UID: \"8573a632-84e0-4f80-b811-5646b571c318\") " Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.415484 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/8573a632-84e0-4f80-b811-5646b571c318-plugins-conf\") pod \"8573a632-84e0-4f80-b811-5646b571c318\" (UID: \"8573a632-84e0-4f80-b811-5646b571c318\") " Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.415536 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/8573a632-84e0-4f80-b811-5646b571c318-erlang-cookie-secret\") pod \"8573a632-84e0-4f80-b811-5646b571c318\" (UID: \"8573a632-84e0-4f80-b811-5646b571c318\") " Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.415555 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/24114ddb-3b30-42ac-9d61-cfeb15d58728-erlang-cookie-secret\") pod \"24114ddb-3b30-42ac-9d61-cfeb15d58728\" (UID: \"24114ddb-3b30-42ac-9d61-cfeb15d58728\") " Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.415623 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/24114ddb-3b30-42ac-9d61-cfeb15d58728-config-data\") pod \"24114ddb-3b30-42ac-9d61-cfeb15d58728\" (UID: \"24114ddb-3b30-42ac-9d61-cfeb15d58728\") " Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.415649 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/8573a632-84e0-4f80-b811-5646b571c318-pod-info\") pod \"8573a632-84e0-4f80-b811-5646b571c318\" (UID: \"8573a632-84e0-4f80-b811-5646b571c318\") " Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.415676 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/24114ddb-3b30-42ac-9d61-cfeb15d58728-plugins-conf\") pod \"24114ddb-3b30-42ac-9d61-cfeb15d58728\" (UID: \"24114ddb-3b30-42ac-9d61-cfeb15d58728\") " Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.415690 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/24114ddb-3b30-42ac-9d61-cfeb15d58728-rabbitmq-tls\") pod \"24114ddb-3b30-42ac-9d61-cfeb15d58728\" (UID: \"24114ddb-3b30-42ac-9d61-cfeb15d58728\") " Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.415737 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/8573a632-84e0-4f80-b811-5646b571c318-rabbitmq-plugins\") pod \"8573a632-84e0-4f80-b811-5646b571c318\" (UID: \"8573a632-84e0-4f80-b811-5646b571c318\") " Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.416466 4558 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/6dae93de-b907-44f7-a94c-c691eee0af7f-pod-info\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.416489 4558 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/6dae93de-b907-44f7-a94c-c691eee0af7f-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.416499 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/6dae93de-b907-44f7-a94c-c691eee0af7f-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.416509 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vwdm9\" (UniqueName: \"kubernetes.io/projected/6dae93de-b907-44f7-a94c-c691eee0af7f-kube-api-access-vwdm9\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.416521 4558 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/6dae93de-b907-44f7-a94c-c691eee0af7f-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.416530 4558 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/6dae93de-b907-44f7-a94c-c691eee0af7f-plugins-conf\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.416539 4558 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/6dae93de-b907-44f7-a94c-c691eee0af7f-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.416562 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" " Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.416572 4558 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/6dae93de-b907-44f7-a94c-c691eee0af7f-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.420347 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/openstack-galera-0" podUID="3abae449-b665-40ba-bdac-ea1e908e1952" containerName="galera" containerID="cri-o://b602058ef33d1b770be4c16f21fd079b2942efdcce4736c8d70e7dae44d7353d" gracePeriod=26 Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.420649 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/24114ddb-3b30-42ac-9d61-cfeb15d58728-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "24114ddb-3b30-42ac-9d61-cfeb15d58728" (UID: "24114ddb-3b30-42ac-9d61-cfeb15d58728"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.420921 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8573a632-84e0-4f80-b811-5646b571c318-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "8573a632-84e0-4f80-b811-5646b571c318" (UID: "8573a632-84e0-4f80-b811-5646b571c318"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.423817 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/24114ddb-3b30-42ac-9d61-cfeb15d58728-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "24114ddb-3b30-42ac-9d61-cfeb15d58728" (UID: "24114ddb-3b30-42ac-9d61-cfeb15d58728"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.427830 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage05-crc" (OuterVolumeSpecName: "persistence") pod "8573a632-84e0-4f80-b811-5646b571c318" (UID: "8573a632-84e0-4f80-b811-5646b571c318"). InnerVolumeSpecName "local-storage05-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.427949 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8573a632-84e0-4f80-b811-5646b571c318-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "8573a632-84e0-4f80-b811-5646b571c318" (UID: "8573a632-84e0-4f80-b811-5646b571c318"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.428590 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8573a632-84e0-4f80-b811-5646b571c318-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "8573a632-84e0-4f80-b811-5646b571c318" (UID: "8573a632-84e0-4f80-b811-5646b571c318"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.429176 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/24114ddb-3b30-42ac-9d61-cfeb15d58728-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "24114ddb-3b30-42ac-9d61-cfeb15d58728" (UID: "24114ddb-3b30-42ac-9d61-cfeb15d58728"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.432955 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8573a632-84e0-4f80-b811-5646b571c318-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "8573a632-84e0-4f80-b811-5646b571c318" (UID: "8573a632-84e0-4f80-b811-5646b571c318"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.435597 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-galera-1" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.443048 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/8573a632-84e0-4f80-b811-5646b571c318-pod-info" (OuterVolumeSpecName: "pod-info") pod "8573a632-84e0-4f80-b811-5646b571c318" (UID: "8573a632-84e0-4f80-b811-5646b571c318"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.443479 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/24114ddb-3b30-42ac-9d61-cfeb15d58728-pod-info" (OuterVolumeSpecName: "pod-info") pod "24114ddb-3b30-42ac-9d61-cfeb15d58728" (UID: "24114ddb-3b30-42ac-9d61-cfeb15d58728"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.445006 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8573a632-84e0-4f80-b811-5646b571c318-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "8573a632-84e0-4f80-b811-5646b571c318" (UID: "8573a632-84e0-4f80-b811-5646b571c318"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.445565 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage12-crc" (OuterVolumeSpecName: "persistence") pod "24114ddb-3b30-42ac-9d61-cfeb15d58728" (UID: "24114ddb-3b30-42ac-9d61-cfeb15d58728"). InnerVolumeSpecName "local-storage12-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.449284 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/24114ddb-3b30-42ac-9d61-cfeb15d58728-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "24114ddb-3b30-42ac-9d61-cfeb15d58728" (UID: "24114ddb-3b30-42ac-9d61-cfeb15d58728"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.451208 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/rabbitmq-cell1-server-2"] Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.456490 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/24114ddb-3b30-42ac-9d61-cfeb15d58728-kube-api-access-r8tpl" (OuterVolumeSpecName: "kube-api-access-r8tpl") pod "24114ddb-3b30-42ac-9d61-cfeb15d58728" (UID: "24114ddb-3b30-42ac-9d61-cfeb15d58728"). InnerVolumeSpecName "kube-api-access-r8tpl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.460011 4558 scope.go:117] "RemoveContainer" containerID="b488e52ae9a816a6a1a205b5450c535dd59fdb90d6345dcef8eb1e186a1d90c5" Jan 20 17:21:40 crc kubenswrapper[4558]: E0120 17:21:40.460405 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b488e52ae9a816a6a1a205b5450c535dd59fdb90d6345dcef8eb1e186a1d90c5\": container with ID starting with b488e52ae9a816a6a1a205b5450c535dd59fdb90d6345dcef8eb1e186a1d90c5 not found: ID does not exist" containerID="b488e52ae9a816a6a1a205b5450c535dd59fdb90d6345dcef8eb1e186a1d90c5" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.460437 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b488e52ae9a816a6a1a205b5450c535dd59fdb90d6345dcef8eb1e186a1d90c5"} err="failed to get container status \"b488e52ae9a816a6a1a205b5450c535dd59fdb90d6345dcef8eb1e186a1d90c5\": rpc error: code = NotFound desc = could not find container \"b488e52ae9a816a6a1a205b5450c535dd59fdb90d6345dcef8eb1e186a1d90c5\": container with ID starting with b488e52ae9a816a6a1a205b5450c535dd59fdb90d6345dcef8eb1e186a1d90c5 not found: ID does not exist" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.460459 4558 scope.go:117] "RemoveContainer" containerID="3723b282ed54cce183bafc40d9019c56750bb156f55f03ff6df98969d9b7a4c6" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.460560 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/24114ddb-3b30-42ac-9d61-cfeb15d58728-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "24114ddb-3b30-42ac-9d61-cfeb15d58728" (UID: "24114ddb-3b30-42ac-9d61-cfeb15d58728"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.460565 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/rabbitmq-cell1-server-2"] Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.460730 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8573a632-84e0-4f80-b811-5646b571c318-kube-api-access-v9m9h" (OuterVolumeSpecName: "kube-api-access-v9m9h") pod "8573a632-84e0-4f80-b811-5646b571c318" (UID: "8573a632-84e0-4f80-b811-5646b571c318"). InnerVolumeSpecName "kube-api-access-v9m9h". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.482255 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8573a632-84e0-4f80-b811-5646b571c318-config-data" (OuterVolumeSpecName: "config-data") pod "8573a632-84e0-4f80-b811-5646b571c318" (UID: "8573a632-84e0-4f80-b811-5646b571c318"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.484285 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage02-crc" (UniqueName: "kubernetes.io/local-volume/local-storage02-crc") on node "crc" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.487077 4558 scope.go:117] "RemoveContainer" containerID="614476c61e78dcb46b1c1d1cf00064d7ee3ffba88582ebadc80626446ffd327e" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.491578 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/24114ddb-3b30-42ac-9d61-cfeb15d58728-config-data" (OuterVolumeSpecName: "config-data") pod "24114ddb-3b30-42ac-9d61-cfeb15d58728" (UID: "24114ddb-3b30-42ac-9d61-cfeb15d58728"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.504244 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6dae93de-b907-44f7-a94c-c691eee0af7f-server-conf" (OuterVolumeSpecName: "server-conf") pod "6dae93de-b907-44f7-a94c-c691eee0af7f" (UID: "6dae93de-b907-44f7-a94c-c691eee0af7f"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.507446 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/24114ddb-3b30-42ac-9d61-cfeb15d58728-server-conf" (OuterVolumeSpecName: "server-conf") pod "24114ddb-3b30-42ac-9d61-cfeb15d58728" (UID: "24114ddb-3b30-42ac-9d61-cfeb15d58728"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.507771 4558 scope.go:117] "RemoveContainer" containerID="3723b282ed54cce183bafc40d9019c56750bb156f55f03ff6df98969d9b7a4c6" Jan 20 17:21:40 crc kubenswrapper[4558]: E0120 17:21:40.508292 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3723b282ed54cce183bafc40d9019c56750bb156f55f03ff6df98969d9b7a4c6\": container with ID starting with 3723b282ed54cce183bafc40d9019c56750bb156f55f03ff6df98969d9b7a4c6 not found: ID does not exist" containerID="3723b282ed54cce183bafc40d9019c56750bb156f55f03ff6df98969d9b7a4c6" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.508375 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3723b282ed54cce183bafc40d9019c56750bb156f55f03ff6df98969d9b7a4c6"} err="failed to get container status \"3723b282ed54cce183bafc40d9019c56750bb156f55f03ff6df98969d9b7a4c6\": rpc error: code = NotFound desc = could not find container \"3723b282ed54cce183bafc40d9019c56750bb156f55f03ff6df98969d9b7a4c6\": container with ID starting with 3723b282ed54cce183bafc40d9019c56750bb156f55f03ff6df98969d9b7a4c6 not found: ID does not exist" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.508466 4558 scope.go:117] "RemoveContainer" containerID="614476c61e78dcb46b1c1d1cf00064d7ee3ffba88582ebadc80626446ffd327e" Jan 20 17:21:40 crc kubenswrapper[4558]: E0120 17:21:40.508783 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"614476c61e78dcb46b1c1d1cf00064d7ee3ffba88582ebadc80626446ffd327e\": container with ID starting with 614476c61e78dcb46b1c1d1cf00064d7ee3ffba88582ebadc80626446ffd327e not found: ID does not exist" containerID="614476c61e78dcb46b1c1d1cf00064d7ee3ffba88582ebadc80626446ffd327e" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.508836 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"614476c61e78dcb46b1c1d1cf00064d7ee3ffba88582ebadc80626446ffd327e"} err="failed to get container status \"614476c61e78dcb46b1c1d1cf00064d7ee3ffba88582ebadc80626446ffd327e\": rpc error: code = NotFound desc = could not find container \"614476c61e78dcb46b1c1d1cf00064d7ee3ffba88582ebadc80626446ffd327e\": container with ID starting with 614476c61e78dcb46b1c1d1cf00064d7ee3ffba88582ebadc80626446ffd327e not found: ID does not exist" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.516514 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8573a632-84e0-4f80-b811-5646b571c318-server-conf" (OuterVolumeSpecName: "server-conf") pod "8573a632-84e0-4f80-b811-5646b571c318" (UID: "8573a632-84e0-4f80-b811-5646b571c318"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.517576 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage20-crc\") pod \"9c52efaf-b737-47bf-9ca1-109a28e19113\" (UID: \"9c52efaf-b737-47bf-9ca1-109a28e19113\") " Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.517757 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tkzmj\" (UniqueName: \"kubernetes.io/projected/9c52efaf-b737-47bf-9ca1-109a28e19113-kube-api-access-tkzmj\") pod \"9c52efaf-b737-47bf-9ca1-109a28e19113\" (UID: \"9c52efaf-b737-47bf-9ca1-109a28e19113\") " Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.517840 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rdf2x\" (UniqueName: \"kubernetes.io/projected/594280a4-cae4-4455-9838-ba4ef8d2f2c1-kube-api-access-rdf2x\") pod \"594280a4-cae4-4455-9838-ba4ef8d2f2c1\" (UID: \"594280a4-cae4-4455-9838-ba4ef8d2f2c1\") " Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.517919 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mysql-db\" (UniqueName: \"kubernetes.io/local-volume/local-storage16-crc\") pod \"594280a4-cae4-4455-9838-ba4ef8d2f2c1\" (UID: \"594280a4-cae4-4455-9838-ba4ef8d2f2c1\") " Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.518007 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/2c809052-d9bb-4982-8271-5b7a9a6f28f9-rabbitmq-erlang-cookie\") pod \"2c809052-d9bb-4982-8271-5b7a9a6f28f9\" (UID: \"2c809052-d9bb-4982-8271-5b7a9a6f28f9\") " Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.518084 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2c809052-d9bb-4982-8271-5b7a9a6f28f9-config-data\") pod \"2c809052-d9bb-4982-8271-5b7a9a6f28f9\" (UID: \"2c809052-d9bb-4982-8271-5b7a9a6f28f9\") " Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.518373 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/2c809052-d9bb-4982-8271-5b7a9a6f28f9-rabbitmq-confd\") pod \"2c809052-d9bb-4982-8271-5b7a9a6f28f9\" (UID: \"2c809052-d9bb-4982-8271-5b7a9a6f28f9\") " Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.518455 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/594280a4-cae4-4455-9838-ba4ef8d2f2c1-kolla-config\") pod \"594280a4-cae4-4455-9838-ba4ef8d2f2c1\" (UID: \"594280a4-cae4-4455-9838-ba4ef8d2f2c1\") " Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.518528 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/9c52efaf-b737-47bf-9ca1-109a28e19113-rabbitmq-confd\") pod \"9c52efaf-b737-47bf-9ca1-109a28e19113\" (UID: \"9c52efaf-b737-47bf-9ca1-109a28e19113\") " Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.518593 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/9c52efaf-b737-47bf-9ca1-109a28e19113-config-data\") pod \"9c52efaf-b737-47bf-9ca1-109a28e19113\" (UID: \"9c52efaf-b737-47bf-9ca1-109a28e19113\") " Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.518703 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/9c52efaf-b737-47bf-9ca1-109a28e19113-rabbitmq-erlang-cookie\") pod \"9c52efaf-b737-47bf-9ca1-109a28e19113\" (UID: \"9c52efaf-b737-47bf-9ca1-109a28e19113\") " Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.518763 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/594280a4-cae4-4455-9838-ba4ef8d2f2c1-config-data-default\") pod \"594280a4-cae4-4455-9838-ba4ef8d2f2c1\" (UID: \"594280a4-cae4-4455-9838-ba4ef8d2f2c1\") " Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.518841 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") pod \"2c809052-d9bb-4982-8271-5b7a9a6f28f9\" (UID: \"2c809052-d9bb-4982-8271-5b7a9a6f28f9\") " Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.518914 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/9c52efaf-b737-47bf-9ca1-109a28e19113-rabbitmq-tls\") pod \"9c52efaf-b737-47bf-9ca1-109a28e19113\" (UID: \"9c52efaf-b737-47bf-9ca1-109a28e19113\") " Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.519018 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/2c809052-d9bb-4982-8271-5b7a9a6f28f9-pod-info\") pod \"2c809052-d9bb-4982-8271-5b7a9a6f28f9\" (UID: \"2c809052-d9bb-4982-8271-5b7a9a6f28f9\") " Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.519095 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/2c809052-d9bb-4982-8271-5b7a9a6f28f9-rabbitmq-plugins\") pod \"2c809052-d9bb-4982-8271-5b7a9a6f28f9\" (UID: \"2c809052-d9bb-4982-8271-5b7a9a6f28f9\") " Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.519186 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/9c52efaf-b737-47bf-9ca1-109a28e19113-erlang-cookie-secret\") pod \"9c52efaf-b737-47bf-9ca1-109a28e19113\" (UID: \"9c52efaf-b737-47bf-9ca1-109a28e19113\") " Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.519290 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/9c52efaf-b737-47bf-9ca1-109a28e19113-rabbitmq-plugins\") pod \"9c52efaf-b737-47bf-9ca1-109a28e19113\" (UID: \"9c52efaf-b737-47bf-9ca1-109a28e19113\") " Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.519368 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/2c809052-d9bb-4982-8271-5b7a9a6f28f9-erlang-cookie-secret\") pod \"2c809052-d9bb-4982-8271-5b7a9a6f28f9\" (UID: \"2c809052-d9bb-4982-8271-5b7a9a6f28f9\") " Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.519432 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/594280a4-cae4-4455-9838-ba4ef8d2f2c1-config-data-generated\") pod \"594280a4-cae4-4455-9838-ba4ef8d2f2c1\" (UID: \"594280a4-cae4-4455-9838-ba4ef8d2f2c1\") " Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.519521 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/9c52efaf-b737-47bf-9ca1-109a28e19113-server-conf\") pod \"9c52efaf-b737-47bf-9ca1-109a28e19113\" (UID: \"9c52efaf-b737-47bf-9ca1-109a28e19113\") " Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.519615 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/2c809052-d9bb-4982-8271-5b7a9a6f28f9-server-conf\") pod \"2c809052-d9bb-4982-8271-5b7a9a6f28f9\" (UID: \"2c809052-d9bb-4982-8271-5b7a9a6f28f9\") " Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.519687 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/9c52efaf-b737-47bf-9ca1-109a28e19113-pod-info\") pod \"9c52efaf-b737-47bf-9ca1-109a28e19113\" (UID: \"9c52efaf-b737-47bf-9ca1-109a28e19113\") " Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.519770 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/9c52efaf-b737-47bf-9ca1-109a28e19113-plugins-conf\") pod \"9c52efaf-b737-47bf-9ca1-109a28e19113\" (UID: \"9c52efaf-b737-47bf-9ca1-109a28e19113\") " Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.519849 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/594280a4-cae4-4455-9838-ba4ef8d2f2c1-galera-tls-certs\") pod \"594280a4-cae4-4455-9838-ba4ef8d2f2c1\" (UID: \"594280a4-cae4-4455-9838-ba4ef8d2f2c1\") " Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.519913 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/594280a4-cae4-4455-9838-ba4ef8d2f2c1-operator-scripts\") pod \"594280a4-cae4-4455-9838-ba4ef8d2f2c1\" (UID: \"594280a4-cae4-4455-9838-ba4ef8d2f2c1\") " Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.520038 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/2c809052-d9bb-4982-8271-5b7a9a6f28f9-rabbitmq-tls\") pod \"2c809052-d9bb-4982-8271-5b7a9a6f28f9\" (UID: \"2c809052-d9bb-4982-8271-5b7a9a6f28f9\") " Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.520109 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/594280a4-cae4-4455-9838-ba4ef8d2f2c1-combined-ca-bundle\") pod \"594280a4-cae4-4455-9838-ba4ef8d2f2c1\" (UID: \"594280a4-cae4-4455-9838-ba4ef8d2f2c1\") " Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.520227 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/2c809052-d9bb-4982-8271-5b7a9a6f28f9-plugins-conf\") pod \"2c809052-d9bb-4982-8271-5b7a9a6f28f9\" (UID: \"2c809052-d9bb-4982-8271-5b7a9a6f28f9\") " Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.520358 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tb228\" (UniqueName: \"kubernetes.io/projected/2c809052-d9bb-4982-8271-5b7a9a6f28f9-kube-api-access-tb228\") pod \"2c809052-d9bb-4982-8271-5b7a9a6f28f9\" (UID: \"2c809052-d9bb-4982-8271-5b7a9a6f28f9\") " Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.520923 4558 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/8573a632-84e0-4f80-b811-5646b571c318-server-conf\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.520985 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/8573a632-84e0-4f80-b811-5646b571c318-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.521046 4558 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/8573a632-84e0-4f80-b811-5646b571c318-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.521099 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r8tpl\" (UniqueName: \"kubernetes.io/projected/24114ddb-3b30-42ac-9d61-cfeb15d58728-kube-api-access-r8tpl\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.521148 4558 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/24114ddb-3b30-42ac-9d61-cfeb15d58728-server-conf\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.521232 4558 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/24114ddb-3b30-42ac-9d61-cfeb15d58728-pod-info\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.521294 4558 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/24114ddb-3b30-42ac-9d61-cfeb15d58728-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.521342 4558 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/8573a632-84e0-4f80-b811-5646b571c318-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.521400 4558 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/24114ddb-3b30-42ac-9d61-cfeb15d58728-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.521467 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" " Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.521623 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" " Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.521665 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v9m9h\" (UniqueName: \"kubernetes.io/projected/8573a632-84e0-4f80-b811-5646b571c318-kube-api-access-v9m9h\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.521684 4558 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/8573a632-84e0-4f80-b811-5646b571c318-plugins-conf\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.521699 4558 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/6dae93de-b907-44f7-a94c-c691eee0af7f-server-conf\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.521712 4558 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/8573a632-84e0-4f80-b811-5646b571c318-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.521722 4558 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/24114ddb-3b30-42ac-9d61-cfeb15d58728-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.521737 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.521747 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/24114ddb-3b30-42ac-9d61-cfeb15d58728-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.521760 4558 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/8573a632-84e0-4f80-b811-5646b571c318-pod-info\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.521772 4558 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/24114ddb-3b30-42ac-9d61-cfeb15d58728-plugins-conf\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.521782 4558 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/24114ddb-3b30-42ac-9d61-cfeb15d58728-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.521796 4558 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/8573a632-84e0-4f80-b811-5646b571c318-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.519626 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2c809052-d9bb-4982-8271-5b7a9a6f28f9-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "2c809052-d9bb-4982-8271-5b7a9a6f28f9" (UID: "2c809052-d9bb-4982-8271-5b7a9a6f28f9"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.522348 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/594280a4-cae4-4455-9838-ba4ef8d2f2c1-config-data-generated" (OuterVolumeSpecName: "config-data-generated") pod "594280a4-cae4-4455-9838-ba4ef8d2f2c1" (UID: "594280a4-cae4-4455-9838-ba4ef8d2f2c1"). InnerVolumeSpecName "config-data-generated". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.522527 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage20-crc" (OuterVolumeSpecName: "persistence") pod "9c52efaf-b737-47bf-9ca1-109a28e19113" (UID: "9c52efaf-b737-47bf-9ca1-109a28e19113"). InnerVolumeSpecName "local-storage20-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.523519 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2c809052-d9bb-4982-8271-5b7a9a6f28f9-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "2c809052-d9bb-4982-8271-5b7a9a6f28f9" (UID: "2c809052-d9bb-4982-8271-5b7a9a6f28f9"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.524022 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/594280a4-cae4-4455-9838-ba4ef8d2f2c1-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "594280a4-cae4-4455-9838-ba4ef8d2f2c1" (UID: "594280a4-cae4-4455-9838-ba4ef8d2f2c1"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.524050 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/594280a4-cae4-4455-9838-ba4ef8d2f2c1-config-data-default" (OuterVolumeSpecName: "config-data-default") pod "594280a4-cae4-4455-9838-ba4ef8d2f2c1" (UID: "594280a4-cae4-4455-9838-ba4ef8d2f2c1"). InnerVolumeSpecName "config-data-default". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.525006 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9c52efaf-b737-47bf-9ca1-109a28e19113-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "9c52efaf-b737-47bf-9ca1-109a28e19113" (UID: "9c52efaf-b737-47bf-9ca1-109a28e19113"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.525145 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/594280a4-cae4-4455-9838-ba4ef8d2f2c1-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "594280a4-cae4-4455-9838-ba4ef8d2f2c1" (UID: "594280a4-cae4-4455-9838-ba4ef8d2f2c1"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.526513 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9c52efaf-b737-47bf-9ca1-109a28e19113-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "9c52efaf-b737-47bf-9ca1-109a28e19113" (UID: "9c52efaf-b737-47bf-9ca1-109a28e19113"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.527064 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2c809052-d9bb-4982-8271-5b7a9a6f28f9-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "2c809052-d9bb-4982-8271-5b7a9a6f28f9" (UID: "2c809052-d9bb-4982-8271-5b7a9a6f28f9"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.527203 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9c52efaf-b737-47bf-9ca1-109a28e19113-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "9c52efaf-b737-47bf-9ca1-109a28e19113" (UID: "9c52efaf-b737-47bf-9ca1-109a28e19113"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.533569 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/594280a4-cae4-4455-9838-ba4ef8d2f2c1-kube-api-access-rdf2x" (OuterVolumeSpecName: "kube-api-access-rdf2x") pod "594280a4-cae4-4455-9838-ba4ef8d2f2c1" (UID: "594280a4-cae4-4455-9838-ba4ef8d2f2c1"). InnerVolumeSpecName "kube-api-access-rdf2x". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.533786 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2c809052-d9bb-4982-8271-5b7a9a6f28f9-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "2c809052-d9bb-4982-8271-5b7a9a6f28f9" (UID: "2c809052-d9bb-4982-8271-5b7a9a6f28f9"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.533832 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2c809052-d9bb-4982-8271-5b7a9a6f28f9-kube-api-access-tb228" (OuterVolumeSpecName: "kube-api-access-tb228") pod "2c809052-d9bb-4982-8271-5b7a9a6f28f9" (UID: "2c809052-d9bb-4982-8271-5b7a9a6f28f9"). InnerVolumeSpecName "kube-api-access-tb228". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.533986 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9c52efaf-b737-47bf-9ca1-109a28e19113-kube-api-access-tkzmj" (OuterVolumeSpecName: "kube-api-access-tkzmj") pod "9c52efaf-b737-47bf-9ca1-109a28e19113" (UID: "9c52efaf-b737-47bf-9ca1-109a28e19113"). InnerVolumeSpecName "kube-api-access-tkzmj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.534068 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/2c809052-d9bb-4982-8271-5b7a9a6f28f9-pod-info" (OuterVolumeSpecName: "pod-info") pod "2c809052-d9bb-4982-8271-5b7a9a6f28f9" (UID: "2c809052-d9bb-4982-8271-5b7a9a6f28f9"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.535777 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2c809052-d9bb-4982-8271-5b7a9a6f28f9-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "2c809052-d9bb-4982-8271-5b7a9a6f28f9" (UID: "2c809052-d9bb-4982-8271-5b7a9a6f28f9"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.537334 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9c52efaf-b737-47bf-9ca1-109a28e19113-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "9c52efaf-b737-47bf-9ca1-109a28e19113" (UID: "9c52efaf-b737-47bf-9ca1-109a28e19113"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.537792 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage16-crc" (OuterVolumeSpecName: "mysql-db") pod "594280a4-cae4-4455-9838-ba4ef8d2f2c1" (UID: "594280a4-cae4-4455-9838-ba4ef8d2f2c1"). InnerVolumeSpecName "local-storage16-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.537938 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage15-crc" (OuterVolumeSpecName: "persistence") pod "2c809052-d9bb-4982-8271-5b7a9a6f28f9" (UID: "2c809052-d9bb-4982-8271-5b7a9a6f28f9"). InnerVolumeSpecName "local-storage15-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.538530 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/9c52efaf-b737-47bf-9ca1-109a28e19113-pod-info" (OuterVolumeSpecName: "pod-info") pod "9c52efaf-b737-47bf-9ca1-109a28e19113" (UID: "9c52efaf-b737-47bf-9ca1-109a28e19113"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.540219 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8573a632-84e0-4f80-b811-5646b571c318-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "8573a632-84e0-4f80-b811-5646b571c318" (UID: "8573a632-84e0-4f80-b811-5646b571c318"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.540703 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9c52efaf-b737-47bf-9ca1-109a28e19113-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "9c52efaf-b737-47bf-9ca1-109a28e19113" (UID: "9c52efaf-b737-47bf-9ca1-109a28e19113"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.544078 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage12-crc" (UniqueName: "kubernetes.io/local-volume/local-storage12-crc") on node "crc" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.544924 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6dae93de-b907-44f7-a94c-c691eee0af7f-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "6dae93de-b907-44f7-a94c-c691eee0af7f" (UID: "6dae93de-b907-44f7-a94c-c691eee0af7f"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.545762 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage05-crc" (UniqueName: "kubernetes.io/local-volume/local-storage05-crc") on node "crc" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.554044 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/594280a4-cae4-4455-9838-ba4ef8d2f2c1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "594280a4-cae4-4455-9838-ba4ef8d2f2c1" (UID: "594280a4-cae4-4455-9838-ba4ef8d2f2c1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.558289 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9c52efaf-b737-47bf-9ca1-109a28e19113-config-data" (OuterVolumeSpecName: "config-data") pod "9c52efaf-b737-47bf-9ca1-109a28e19113" (UID: "9c52efaf-b737-47bf-9ca1-109a28e19113"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.563351 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2c809052-d9bb-4982-8271-5b7a9a6f28f9-config-data" (OuterVolumeSpecName: "config-data") pod "2c809052-d9bb-4982-8271-5b7a9a6f28f9" (UID: "2c809052-d9bb-4982-8271-5b7a9a6f28f9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.568994 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2c809052-d9bb-4982-8271-5b7a9a6f28f9-server-conf" (OuterVolumeSpecName: "server-conf") pod "2c809052-d9bb-4982-8271-5b7a9a6f28f9" (UID: "2c809052-d9bb-4982-8271-5b7a9a6f28f9"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.575201 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/24114ddb-3b30-42ac-9d61-cfeb15d58728-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "24114ddb-3b30-42ac-9d61-cfeb15d58728" (UID: "24114ddb-3b30-42ac-9d61-cfeb15d58728"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.579639 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9c52efaf-b737-47bf-9ca1-109a28e19113-server-conf" (OuterVolumeSpecName: "server-conf") pod "9c52efaf-b737-47bf-9ca1-109a28e19113" (UID: "9c52efaf-b737-47bf-9ca1-109a28e19113"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.580193 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="14af4a81-8568-425c-a209-4d90a042c365" path="/var/lib/kubelet/pods/14af4a81-8568-425c-a209-4d90a042c365/volumes" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.580982 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="23b973c1-70c3-4d60-8b1d-89efda1a5707" path="/var/lib/kubelet/pods/23b973c1-70c3-4d60-8b1d-89efda1a5707/volumes" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.581079 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/594280a4-cae4-4455-9838-ba4ef8d2f2c1-galera-tls-certs" (OuterVolumeSpecName: "galera-tls-certs") pod "594280a4-cae4-4455-9838-ba4ef8d2f2c1" (UID: "594280a4-cae4-4455-9838-ba4ef8d2f2c1"). InnerVolumeSpecName "galera-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.581635 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2f8ae24b-258e-4d32-b312-99f5015f83d6" path="/var/lib/kubelet/pods/2f8ae24b-258e-4d32-b312-99f5015f83d6/volumes" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.582959 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="344f9f31-8a81-4544-b782-5aa78dfc5cc2" path="/var/lib/kubelet/pods/344f9f31-8a81-4544-b782-5aa78dfc5cc2/volumes" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.583762 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="872ae103-12d2-4686-aa02-4e3375eb510a" path="/var/lib/kubelet/pods/872ae103-12d2-4686-aa02-4e3375eb510a/volumes" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.584775 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ea1fc77a-9a6a-4b9b-8d27-34f9b9ca61d9" path="/var/lib/kubelet/pods/ea1fc77a-9a6a-4b9b-8d27-34f9b9ca61d9/volumes" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.585456 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fa03920f-0e4b-458f-956b-b658786f9792" path="/var/lib/kubelet/pods/fa03920f-0e4b-458f-956b-b658786f9792/volumes" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.598494 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2c809052-d9bb-4982-8271-5b7a9a6f28f9-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "2c809052-d9bb-4982-8271-5b7a9a6f28f9" (UID: "2c809052-d9bb-4982-8271-5b7a9a6f28f9"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.623479 4558 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/9c52efaf-b737-47bf-9ca1-109a28e19113-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.623503 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/594280a4-cae4-4455-9838-ba4ef8d2f2c1-config-data-default\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.623522 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage15-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") on node \"crc\" " Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.623532 4558 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/9c52efaf-b737-47bf-9ca1-109a28e19113-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.623541 4558 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/24114ddb-3b30-42ac-9d61-cfeb15d58728-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.623550 4558 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/2c809052-d9bb-4982-8271-5b7a9a6f28f9-pod-info\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.623558 4558 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/2c809052-d9bb-4982-8271-5b7a9a6f28f9-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.623567 4558 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/9c52efaf-b737-47bf-9ca1-109a28e19113-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.623575 4558 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/9c52efaf-b737-47bf-9ca1-109a28e19113-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.623583 4558 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/2c809052-d9bb-4982-8271-5b7a9a6f28f9-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.623594 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/594280a4-cae4-4455-9838-ba4ef8d2f2c1-config-data-generated\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.623610 4558 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/8573a632-84e0-4f80-b811-5646b571c318-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.623618 4558 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/9c52efaf-b737-47bf-9ca1-109a28e19113-server-conf\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.623625 4558 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/2c809052-d9bb-4982-8271-5b7a9a6f28f9-server-conf\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.623633 4558 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/9c52efaf-b737-47bf-9ca1-109a28e19113-pod-info\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.623641 4558 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/9c52efaf-b737-47bf-9ca1-109a28e19113-plugins-conf\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.623650 4558 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/6dae93de-b907-44f7-a94c-c691eee0af7f-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.623661 4558 reconciler_common.go:293] "Volume detached for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/594280a4-cae4-4455-9838-ba4ef8d2f2c1-galera-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.623671 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/594280a4-cae4-4455-9838-ba4ef8d2f2c1-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.623681 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.623690 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.623698 4558 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/2c809052-d9bb-4982-8271-5b7a9a6f28f9-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.623707 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/594280a4-cae4-4455-9838-ba4ef8d2f2c1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.623717 4558 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/2c809052-d9bb-4982-8271-5b7a9a6f28f9-plugins-conf\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.623727 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tb228\" (UniqueName: \"kubernetes.io/projected/2c809052-d9bb-4982-8271-5b7a9a6f28f9-kube-api-access-tb228\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.623740 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage20-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage20-crc\") on node \"crc\" " Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.623751 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tkzmj\" (UniqueName: \"kubernetes.io/projected/9c52efaf-b737-47bf-9ca1-109a28e19113-kube-api-access-tkzmj\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.623760 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rdf2x\" (UniqueName: \"kubernetes.io/projected/594280a4-cae4-4455-9838-ba4ef8d2f2c1-kube-api-access-rdf2x\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.623772 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage16-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage16-crc\") on node \"crc\" " Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.623781 4558 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/2c809052-d9bb-4982-8271-5b7a9a6f28f9-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.623790 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2c809052-d9bb-4982-8271-5b7a9a6f28f9-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.623799 4558 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/2c809052-d9bb-4982-8271-5b7a9a6f28f9-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.623808 4558 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/594280a4-cae4-4455-9838-ba4ef8d2f2c1-kolla-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.623817 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/9c52efaf-b737-47bf-9ca1-109a28e19113-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.627854 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9c52efaf-b737-47bf-9ca1-109a28e19113-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "9c52efaf-b737-47bf-9ca1-109a28e19113" (UID: "9c52efaf-b737-47bf-9ca1-109a28e19113"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.641842 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage16-crc" (UniqueName: "kubernetes.io/local-volume/local-storage16-crc") on node "crc" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.643929 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage20-crc" (UniqueName: "kubernetes.io/local-volume/local-storage20-crc") on node "crc" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.650945 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage15-crc" (UniqueName: "kubernetes.io/local-volume/local-storage15-crc") on node "crc" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.731793 4558 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/9c52efaf-b737-47bf-9ca1-109a28e19113-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.731824 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage15-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.731837 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage20-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage20-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.731849 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage16-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage16-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.907922 4558 generic.go:334] "Generic (PLEG): container finished" podID="2c809052-d9bb-4982-8271-5b7a9a6f28f9" containerID="81a644c98c6093aa1efc528706c31e081b18455e9ddaf52cd7d7f878ae9f848a" exitCode=0 Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.908283 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-server-1" event={"ID":"2c809052-d9bb-4982-8271-5b7a9a6f28f9","Type":"ContainerDied","Data":"81a644c98c6093aa1efc528706c31e081b18455e9ddaf52cd7d7f878ae9f848a"} Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.908316 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-server-1" event={"ID":"2c809052-d9bb-4982-8271-5b7a9a6f28f9","Type":"ContainerDied","Data":"ad49acfca51d4d3a588a5798f1b752801f80e65a4b53a3236287f42cff68fa6c"} Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.908336 4558 scope.go:117] "RemoveContainer" containerID="81a644c98c6093aa1efc528706c31e081b18455e9ddaf52cd7d7f878ae9f848a" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.908448 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/rabbitmq-server-1" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.913804 4558 generic.go:334] "Generic (PLEG): container finished" podID="9c52efaf-b737-47bf-9ca1-109a28e19113" containerID="ae9e159fcd24af6ebaa1ca94fb0944dc8d3bcbddd517479b3fa0699d53c7017a" exitCode=0 Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.913920 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-server-2" event={"ID":"9c52efaf-b737-47bf-9ca1-109a28e19113","Type":"ContainerDied","Data":"ae9e159fcd24af6ebaa1ca94fb0944dc8d3bcbddd517479b3fa0699d53c7017a"} Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.913969 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-server-2" event={"ID":"9c52efaf-b737-47bf-9ca1-109a28e19113","Type":"ContainerDied","Data":"2cb04f6495d45a0cdaa88baf16dcc76bd306519f1a6882c7808d53d85c6930dd"} Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.914100 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/rabbitmq-server-2" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.917473 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-cell1-server-1" event={"ID":"8573a632-84e0-4f80-b811-5646b571c318","Type":"ContainerDied","Data":"b4a66fa84377ea5e80af90700c72d18b5c76aad252ce0cb121848ab3c2c56c40"} Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.917541 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/rabbitmq-cell1-server-1" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.923900 4558 generic.go:334] "Generic (PLEG): container finished" podID="594280a4-cae4-4455-9838-ba4ef8d2f2c1" containerID="f1681cfb95a469724fd742914f90a54418d9a8689ce838c8542616d40276313e" exitCode=0 Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.923948 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-galera-1" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.924010 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-galera-1" event={"ID":"594280a4-cae4-4455-9838-ba4ef8d2f2c1","Type":"ContainerDied","Data":"f1681cfb95a469724fd742914f90a54418d9a8689ce838c8542616d40276313e"} Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.924081 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-galera-1" event={"ID":"594280a4-cae4-4455-9838-ba4ef8d2f2c1","Type":"ContainerDied","Data":"eede67cb845b731dec2634e036e863172a2799d3e43cb08f3f455fb9d998e84d"} Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.928950 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.928993 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:21:40 crc kubenswrapper[4558]: I0120 17:21:40.929016 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-server-0" event={"ID":"24114ddb-3b30-42ac-9d61-cfeb15d58728","Type":"ContainerDied","Data":"491307bb1a7e36c87854cbad521231e4dbe2cb8534a567deb25f01d750f7a550"} Jan 20 17:21:41 crc kubenswrapper[4558]: I0120 17:21:41.004636 4558 scope.go:117] "RemoveContainer" containerID="aef92f821b7a5a08ba1161060fd5100effa5681c8a90295b7e73584d9d1549b4" Jan 20 17:21:41 crc kubenswrapper[4558]: I0120 17:21:41.026038 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/rabbitmq-cell1-server-0"] Jan 20 17:21:41 crc kubenswrapper[4558]: I0120 17:21:41.066070 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/rabbitmq-cell1-server-0"] Jan 20 17:21:41 crc kubenswrapper[4558]: I0120 17:21:41.087064 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/rabbitmq-server-2"] Jan 20 17:21:41 crc kubenswrapper[4558]: I0120 17:21:41.096536 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/rabbitmq-server-2"] Jan 20 17:21:41 crc kubenswrapper[4558]: I0120 17:21:41.099982 4558 scope.go:117] "RemoveContainer" containerID="81a644c98c6093aa1efc528706c31e081b18455e9ddaf52cd7d7f878ae9f848a" Jan 20 17:21:41 crc kubenswrapper[4558]: E0120 17:21:41.100440 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"81a644c98c6093aa1efc528706c31e081b18455e9ddaf52cd7d7f878ae9f848a\": container with ID starting with 81a644c98c6093aa1efc528706c31e081b18455e9ddaf52cd7d7f878ae9f848a not found: ID does not exist" containerID="81a644c98c6093aa1efc528706c31e081b18455e9ddaf52cd7d7f878ae9f848a" Jan 20 17:21:41 crc kubenswrapper[4558]: I0120 17:21:41.100481 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"81a644c98c6093aa1efc528706c31e081b18455e9ddaf52cd7d7f878ae9f848a"} err="failed to get container status \"81a644c98c6093aa1efc528706c31e081b18455e9ddaf52cd7d7f878ae9f848a\": rpc error: code = NotFound desc = could not find container \"81a644c98c6093aa1efc528706c31e081b18455e9ddaf52cd7d7f878ae9f848a\": container with ID starting with 81a644c98c6093aa1efc528706c31e081b18455e9ddaf52cd7d7f878ae9f848a not found: ID does not exist" Jan 20 17:21:41 crc kubenswrapper[4558]: I0120 17:21:41.100507 4558 scope.go:117] "RemoveContainer" containerID="aef92f821b7a5a08ba1161060fd5100effa5681c8a90295b7e73584d9d1549b4" Jan 20 17:21:41 crc kubenswrapper[4558]: E0120 17:21:41.100809 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aef92f821b7a5a08ba1161060fd5100effa5681c8a90295b7e73584d9d1549b4\": container with ID starting with aef92f821b7a5a08ba1161060fd5100effa5681c8a90295b7e73584d9d1549b4 not found: ID does not exist" containerID="aef92f821b7a5a08ba1161060fd5100effa5681c8a90295b7e73584d9d1549b4" Jan 20 17:21:41 crc kubenswrapper[4558]: I0120 17:21:41.100833 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aef92f821b7a5a08ba1161060fd5100effa5681c8a90295b7e73584d9d1549b4"} err="failed to get container status \"aef92f821b7a5a08ba1161060fd5100effa5681c8a90295b7e73584d9d1549b4\": rpc error: code = NotFound desc = could not find container \"aef92f821b7a5a08ba1161060fd5100effa5681c8a90295b7e73584d9d1549b4\": container with ID starting with aef92f821b7a5a08ba1161060fd5100effa5681c8a90295b7e73584d9d1549b4 not found: ID does not exist" Jan 20 17:21:41 crc kubenswrapper[4558]: I0120 17:21:41.100854 4558 scope.go:117] "RemoveContainer" containerID="ae9e159fcd24af6ebaa1ca94fb0944dc8d3bcbddd517479b3fa0699d53c7017a" Jan 20 17:21:41 crc kubenswrapper[4558]: I0120 17:21:41.101219 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/rabbitmq-server-0"] Jan 20 17:21:41 crc kubenswrapper[4558]: I0120 17:21:41.105297 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/rabbitmq-server-0"] Jan 20 17:21:41 crc kubenswrapper[4558]: I0120 17:21:41.109158 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/rabbitmq-cell1-server-1"] Jan 20 17:21:41 crc kubenswrapper[4558]: I0120 17:21:41.113585 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/rabbitmq-cell1-server-1"] Jan 20 17:21:41 crc kubenswrapper[4558]: I0120 17:21:41.116521 4558 scope.go:117] "RemoveContainer" containerID="ded34b9dd6d6a2bbb413151aa8d7e879c0b9a135b1b75cfbb4c74ef99adcec5a" Jan 20 17:21:41 crc kubenswrapper[4558]: I0120 17:21:41.117368 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/openstack-galera-1"] Jan 20 17:21:41 crc kubenswrapper[4558]: I0120 17:21:41.121389 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/openstack-galera-1"] Jan 20 17:21:41 crc kubenswrapper[4558]: I0120 17:21:41.125180 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/rabbitmq-server-1"] Jan 20 17:21:41 crc kubenswrapper[4558]: I0120 17:21:41.128929 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/rabbitmq-server-1"] Jan 20 17:21:41 crc kubenswrapper[4558]: I0120 17:21:41.131768 4558 scope.go:117] "RemoveContainer" containerID="ae9e159fcd24af6ebaa1ca94fb0944dc8d3bcbddd517479b3fa0699d53c7017a" Jan 20 17:21:41 crc kubenswrapper[4558]: E0120 17:21:41.132245 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ae9e159fcd24af6ebaa1ca94fb0944dc8d3bcbddd517479b3fa0699d53c7017a\": container with ID starting with ae9e159fcd24af6ebaa1ca94fb0944dc8d3bcbddd517479b3fa0699d53c7017a not found: ID does not exist" containerID="ae9e159fcd24af6ebaa1ca94fb0944dc8d3bcbddd517479b3fa0699d53c7017a" Jan 20 17:21:41 crc kubenswrapper[4558]: I0120 17:21:41.132299 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ae9e159fcd24af6ebaa1ca94fb0944dc8d3bcbddd517479b3fa0699d53c7017a"} err="failed to get container status \"ae9e159fcd24af6ebaa1ca94fb0944dc8d3bcbddd517479b3fa0699d53c7017a\": rpc error: code = NotFound desc = could not find container \"ae9e159fcd24af6ebaa1ca94fb0944dc8d3bcbddd517479b3fa0699d53c7017a\": container with ID starting with ae9e159fcd24af6ebaa1ca94fb0944dc8d3bcbddd517479b3fa0699d53c7017a not found: ID does not exist" Jan 20 17:21:41 crc kubenswrapper[4558]: I0120 17:21:41.132331 4558 scope.go:117] "RemoveContainer" containerID="ded34b9dd6d6a2bbb413151aa8d7e879c0b9a135b1b75cfbb4c74ef99adcec5a" Jan 20 17:21:41 crc kubenswrapper[4558]: E0120 17:21:41.132697 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ded34b9dd6d6a2bbb413151aa8d7e879c0b9a135b1b75cfbb4c74ef99adcec5a\": container with ID starting with ded34b9dd6d6a2bbb413151aa8d7e879c0b9a135b1b75cfbb4c74ef99adcec5a not found: ID does not exist" containerID="ded34b9dd6d6a2bbb413151aa8d7e879c0b9a135b1b75cfbb4c74ef99adcec5a" Jan 20 17:21:41 crc kubenswrapper[4558]: I0120 17:21:41.132728 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ded34b9dd6d6a2bbb413151aa8d7e879c0b9a135b1b75cfbb4c74ef99adcec5a"} err="failed to get container status \"ded34b9dd6d6a2bbb413151aa8d7e879c0b9a135b1b75cfbb4c74ef99adcec5a\": rpc error: code = NotFound desc = could not find container \"ded34b9dd6d6a2bbb413151aa8d7e879c0b9a135b1b75cfbb4c74ef99adcec5a\": container with ID starting with ded34b9dd6d6a2bbb413151aa8d7e879c0b9a135b1b75cfbb4c74ef99adcec5a not found: ID does not exist" Jan 20 17:21:41 crc kubenswrapper[4558]: I0120 17:21:41.132744 4558 scope.go:117] "RemoveContainer" containerID="c5f31a0dc1f900cc5cbc94b222647ecbd6198220c8e700e7650325a56366cf4f" Jan 20 17:21:41 crc kubenswrapper[4558]: I0120 17:21:41.183492 4558 scope.go:117] "RemoveContainer" containerID="7e9618d5dfe234e6e7b347ce911135cc4faaff526b91370dd0e418034794ec1d" Jan 20 17:21:41 crc kubenswrapper[4558]: I0120 17:21:41.219540 4558 scope.go:117] "RemoveContainer" containerID="f1681cfb95a469724fd742914f90a54418d9a8689ce838c8542616d40276313e" Jan 20 17:21:41 crc kubenswrapper[4558]: I0120 17:21:41.243846 4558 scope.go:117] "RemoveContainer" containerID="4df69b0aeea95403c345c4fa4578dcd1f0758ef8805869ca9d1cb88b4fb9262e" Jan 20 17:21:41 crc kubenswrapper[4558]: I0120 17:21:41.290790 4558 scope.go:117] "RemoveContainer" containerID="f1681cfb95a469724fd742914f90a54418d9a8689ce838c8542616d40276313e" Jan 20 17:21:41 crc kubenswrapper[4558]: E0120 17:21:41.292185 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f1681cfb95a469724fd742914f90a54418d9a8689ce838c8542616d40276313e\": container with ID starting with f1681cfb95a469724fd742914f90a54418d9a8689ce838c8542616d40276313e not found: ID does not exist" containerID="f1681cfb95a469724fd742914f90a54418d9a8689ce838c8542616d40276313e" Jan 20 17:21:41 crc kubenswrapper[4558]: I0120 17:21:41.292218 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f1681cfb95a469724fd742914f90a54418d9a8689ce838c8542616d40276313e"} err="failed to get container status \"f1681cfb95a469724fd742914f90a54418d9a8689ce838c8542616d40276313e\": rpc error: code = NotFound desc = could not find container \"f1681cfb95a469724fd742914f90a54418d9a8689ce838c8542616d40276313e\": container with ID starting with f1681cfb95a469724fd742914f90a54418d9a8689ce838c8542616d40276313e not found: ID does not exist" Jan 20 17:21:41 crc kubenswrapper[4558]: I0120 17:21:41.292240 4558 scope.go:117] "RemoveContainer" containerID="4df69b0aeea95403c345c4fa4578dcd1f0758ef8805869ca9d1cb88b4fb9262e" Jan 20 17:21:41 crc kubenswrapper[4558]: E0120 17:21:41.292540 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4df69b0aeea95403c345c4fa4578dcd1f0758ef8805869ca9d1cb88b4fb9262e\": container with ID starting with 4df69b0aeea95403c345c4fa4578dcd1f0758ef8805869ca9d1cb88b4fb9262e not found: ID does not exist" containerID="4df69b0aeea95403c345c4fa4578dcd1f0758ef8805869ca9d1cb88b4fb9262e" Jan 20 17:21:41 crc kubenswrapper[4558]: I0120 17:21:41.292562 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4df69b0aeea95403c345c4fa4578dcd1f0758ef8805869ca9d1cb88b4fb9262e"} err="failed to get container status \"4df69b0aeea95403c345c4fa4578dcd1f0758ef8805869ca9d1cb88b4fb9262e\": rpc error: code = NotFound desc = could not find container \"4df69b0aeea95403c345c4fa4578dcd1f0758ef8805869ca9d1cb88b4fb9262e\": container with ID starting with 4df69b0aeea95403c345c4fa4578dcd1f0758ef8805869ca9d1cb88b4fb9262e not found: ID does not exist" Jan 20 17:21:41 crc kubenswrapper[4558]: I0120 17:21:41.292575 4558 scope.go:117] "RemoveContainer" containerID="33a9e1abb20c3e3bf6a51b785efcb0a4eaf6964e1b8392e4977fc9f4c7b03f61" Jan 20 17:21:41 crc kubenswrapper[4558]: I0120 17:21:41.312411 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:21:41 crc kubenswrapper[4558]: I0120 17:21:41.312708 4558 scope.go:117] "RemoveContainer" containerID="4c8665b5b235011d33eb908501c51ff5695ec224cf17893391187c66bf1ab060" Jan 20 17:21:41 crc kubenswrapper[4558]: I0120 17:21:41.452874 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/3abae449-b665-40ba-bdac-ea1e908e1952-kolla-config\") pod \"3abae449-b665-40ba-bdac-ea1e908e1952\" (UID: \"3abae449-b665-40ba-bdac-ea1e908e1952\") " Jan 20 17:21:41 crc kubenswrapper[4558]: I0120 17:21:41.453048 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/3abae449-b665-40ba-bdac-ea1e908e1952-galera-tls-certs\") pod \"3abae449-b665-40ba-bdac-ea1e908e1952\" (UID: \"3abae449-b665-40ba-bdac-ea1e908e1952\") " Jan 20 17:21:41 crc kubenswrapper[4558]: I0120 17:21:41.453125 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/3abae449-b665-40ba-bdac-ea1e908e1952-config-data-generated\") pod \"3abae449-b665-40ba-bdac-ea1e908e1952\" (UID: \"3abae449-b665-40ba-bdac-ea1e908e1952\") " Jan 20 17:21:41 crc kubenswrapper[4558]: I0120 17:21:41.453184 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3abae449-b665-40ba-bdac-ea1e908e1952-operator-scripts\") pod \"3abae449-b665-40ba-bdac-ea1e908e1952\" (UID: \"3abae449-b665-40ba-bdac-ea1e908e1952\") " Jan 20 17:21:41 crc kubenswrapper[4558]: I0120 17:21:41.453230 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mysql-db\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"3abae449-b665-40ba-bdac-ea1e908e1952\" (UID: \"3abae449-b665-40ba-bdac-ea1e908e1952\") " Jan 20 17:21:41 crc kubenswrapper[4558]: I0120 17:21:41.453273 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/3abae449-b665-40ba-bdac-ea1e908e1952-config-data-default\") pod \"3abae449-b665-40ba-bdac-ea1e908e1952\" (UID: \"3abae449-b665-40ba-bdac-ea1e908e1952\") " Jan 20 17:21:41 crc kubenswrapper[4558]: I0120 17:21:41.453322 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rwtfw\" (UniqueName: \"kubernetes.io/projected/3abae449-b665-40ba-bdac-ea1e908e1952-kube-api-access-rwtfw\") pod \"3abae449-b665-40ba-bdac-ea1e908e1952\" (UID: \"3abae449-b665-40ba-bdac-ea1e908e1952\") " Jan 20 17:21:41 crc kubenswrapper[4558]: I0120 17:21:41.453342 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3abae449-b665-40ba-bdac-ea1e908e1952-combined-ca-bundle\") pod \"3abae449-b665-40ba-bdac-ea1e908e1952\" (UID: \"3abae449-b665-40ba-bdac-ea1e908e1952\") " Jan 20 17:21:41 crc kubenswrapper[4558]: I0120 17:21:41.453805 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3abae449-b665-40ba-bdac-ea1e908e1952-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "3abae449-b665-40ba-bdac-ea1e908e1952" (UID: "3abae449-b665-40ba-bdac-ea1e908e1952"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:21:41 crc kubenswrapper[4558]: I0120 17:21:41.454206 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3abae449-b665-40ba-bdac-ea1e908e1952-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "3abae449-b665-40ba-bdac-ea1e908e1952" (UID: "3abae449-b665-40ba-bdac-ea1e908e1952"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:21:41 crc kubenswrapper[4558]: I0120 17:21:41.454597 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3abae449-b665-40ba-bdac-ea1e908e1952-config-data-default" (OuterVolumeSpecName: "config-data-default") pod "3abae449-b665-40ba-bdac-ea1e908e1952" (UID: "3abae449-b665-40ba-bdac-ea1e908e1952"). InnerVolumeSpecName "config-data-default". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:21:41 crc kubenswrapper[4558]: I0120 17:21:41.455039 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3abae449-b665-40ba-bdac-ea1e908e1952-config-data-generated" (OuterVolumeSpecName: "config-data-generated") pod "3abae449-b665-40ba-bdac-ea1e908e1952" (UID: "3abae449-b665-40ba-bdac-ea1e908e1952"). InnerVolumeSpecName "config-data-generated". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:21:41 crc kubenswrapper[4558]: I0120 17:21:41.458745 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3abae449-b665-40ba-bdac-ea1e908e1952-kube-api-access-rwtfw" (OuterVolumeSpecName: "kube-api-access-rwtfw") pod "3abae449-b665-40ba-bdac-ea1e908e1952" (UID: "3abae449-b665-40ba-bdac-ea1e908e1952"). InnerVolumeSpecName "kube-api-access-rwtfw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:21:41 crc kubenswrapper[4558]: I0120 17:21:41.464889 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage08-crc" (OuterVolumeSpecName: "mysql-db") pod "3abae449-b665-40ba-bdac-ea1e908e1952" (UID: "3abae449-b665-40ba-bdac-ea1e908e1952"). InnerVolumeSpecName "local-storage08-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:21:41 crc kubenswrapper[4558]: I0120 17:21:41.477032 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3abae449-b665-40ba-bdac-ea1e908e1952-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3abae449-b665-40ba-bdac-ea1e908e1952" (UID: "3abae449-b665-40ba-bdac-ea1e908e1952"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:21:41 crc kubenswrapper[4558]: I0120 17:21:41.489134 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3abae449-b665-40ba-bdac-ea1e908e1952-galera-tls-certs" (OuterVolumeSpecName: "galera-tls-certs") pod "3abae449-b665-40ba-bdac-ea1e908e1952" (UID: "3abae449-b665-40ba-bdac-ea1e908e1952"). InnerVolumeSpecName "galera-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:21:41 crc kubenswrapper[4558]: I0120 17:21:41.555093 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rwtfw\" (UniqueName: \"kubernetes.io/projected/3abae449-b665-40ba-bdac-ea1e908e1952-kube-api-access-rwtfw\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:41 crc kubenswrapper[4558]: I0120 17:21:41.555124 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3abae449-b665-40ba-bdac-ea1e908e1952-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:41 crc kubenswrapper[4558]: I0120 17:21:41.555138 4558 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/3abae449-b665-40ba-bdac-ea1e908e1952-kolla-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:41 crc kubenswrapper[4558]: I0120 17:21:41.555148 4558 reconciler_common.go:293] "Volume detached for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/3abae449-b665-40ba-bdac-ea1e908e1952-galera-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:41 crc kubenswrapper[4558]: I0120 17:21:41.555159 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/3abae449-b665-40ba-bdac-ea1e908e1952-config-data-generated\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:41 crc kubenswrapper[4558]: I0120 17:21:41.555184 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3abae449-b665-40ba-bdac-ea1e908e1952-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:41 crc kubenswrapper[4558]: I0120 17:21:41.555221 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" " Jan 20 17:21:41 crc kubenswrapper[4558]: I0120 17:21:41.555232 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/3abae449-b665-40ba-bdac-ea1e908e1952-config-data-default\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:41 crc kubenswrapper[4558]: I0120 17:21:41.569258 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage08-crc" (UniqueName: "kubernetes.io/local-volume/local-storage08-crc") on node "crc" Jan 20 17:21:41 crc kubenswrapper[4558]: I0120 17:21:41.656358 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:41 crc kubenswrapper[4558]: I0120 17:21:41.945453 4558 generic.go:334] "Generic (PLEG): container finished" podID="3abae449-b665-40ba-bdac-ea1e908e1952" containerID="b602058ef33d1b770be4c16f21fd079b2942efdcce4736c8d70e7dae44d7353d" exitCode=0 Jan 20 17:21:41 crc kubenswrapper[4558]: I0120 17:21:41.945797 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:21:41 crc kubenswrapper[4558]: I0120 17:21:41.945678 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-galera-0" event={"ID":"3abae449-b665-40ba-bdac-ea1e908e1952","Type":"ContainerDied","Data":"b602058ef33d1b770be4c16f21fd079b2942efdcce4736c8d70e7dae44d7353d"} Jan 20 17:21:41 crc kubenswrapper[4558]: I0120 17:21:41.945909 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-galera-0" event={"ID":"3abae449-b665-40ba-bdac-ea1e908e1952","Type":"ContainerDied","Data":"fb5f247aa2fb22da905e354e659e22cc895da40706b1f8be85ae812f75f393dc"} Jan 20 17:21:41 crc kubenswrapper[4558]: I0120 17:21:41.945972 4558 scope.go:117] "RemoveContainer" containerID="b602058ef33d1b770be4c16f21fd079b2942efdcce4736c8d70e7dae44d7353d" Jan 20 17:21:41 crc kubenswrapper[4558]: I0120 17:21:41.984949 4558 scope.go:117] "RemoveContainer" containerID="7776490a42e250bb73aab7faf515747c70ff28768e78ef5a2269fbf3fd5561a5" Jan 20 17:21:41 crc kubenswrapper[4558]: I0120 17:21:41.985879 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/openstack-galera-0"] Jan 20 17:21:41 crc kubenswrapper[4558]: I0120 17:21:41.989691 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/openstack-galera-0"] Jan 20 17:21:42 crc kubenswrapper[4558]: I0120 17:21:42.008557 4558 scope.go:117] "RemoveContainer" containerID="b602058ef33d1b770be4c16f21fd079b2942efdcce4736c8d70e7dae44d7353d" Jan 20 17:21:42 crc kubenswrapper[4558]: E0120 17:21:42.008973 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b602058ef33d1b770be4c16f21fd079b2942efdcce4736c8d70e7dae44d7353d\": container with ID starting with b602058ef33d1b770be4c16f21fd079b2942efdcce4736c8d70e7dae44d7353d not found: ID does not exist" containerID="b602058ef33d1b770be4c16f21fd079b2942efdcce4736c8d70e7dae44d7353d" Jan 20 17:21:42 crc kubenswrapper[4558]: I0120 17:21:42.009014 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b602058ef33d1b770be4c16f21fd079b2942efdcce4736c8d70e7dae44d7353d"} err="failed to get container status \"b602058ef33d1b770be4c16f21fd079b2942efdcce4736c8d70e7dae44d7353d\": rpc error: code = NotFound desc = could not find container \"b602058ef33d1b770be4c16f21fd079b2942efdcce4736c8d70e7dae44d7353d\": container with ID starting with b602058ef33d1b770be4c16f21fd079b2942efdcce4736c8d70e7dae44d7353d not found: ID does not exist" Jan 20 17:21:42 crc kubenswrapper[4558]: I0120 17:21:42.009045 4558 scope.go:117] "RemoveContainer" containerID="7776490a42e250bb73aab7faf515747c70ff28768e78ef5a2269fbf3fd5561a5" Jan 20 17:21:42 crc kubenswrapper[4558]: E0120 17:21:42.009497 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7776490a42e250bb73aab7faf515747c70ff28768e78ef5a2269fbf3fd5561a5\": container with ID starting with 7776490a42e250bb73aab7faf515747c70ff28768e78ef5a2269fbf3fd5561a5 not found: ID does not exist" containerID="7776490a42e250bb73aab7faf515747c70ff28768e78ef5a2269fbf3fd5561a5" Jan 20 17:21:42 crc kubenswrapper[4558]: I0120 17:21:42.009520 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7776490a42e250bb73aab7faf515747c70ff28768e78ef5a2269fbf3fd5561a5"} err="failed to get container status \"7776490a42e250bb73aab7faf515747c70ff28768e78ef5a2269fbf3fd5561a5\": rpc error: code = NotFound desc = could not find container \"7776490a42e250bb73aab7faf515747c70ff28768e78ef5a2269fbf3fd5561a5\": container with ID starting with 7776490a42e250bb73aab7faf515747c70ff28768e78ef5a2269fbf3fd5561a5 not found: ID does not exist" Jan 20 17:21:42 crc kubenswrapper[4558]: I0120 17:21:42.367881 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/nova-metadata-0" podUID="09d95674-9970-471d-8061-997e69ddda11" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.211:8775/\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Jan 20 17:21:42 crc kubenswrapper[4558]: I0120 17:21:42.368298 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/nova-metadata-0" podUID="09d95674-9970-471d-8061-997e69ddda11" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.211:8775/\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Jan 20 17:21:42 crc kubenswrapper[4558]: I0120 17:21:42.491014 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-db-sync-pnr7q"] Jan 20 17:21:42 crc kubenswrapper[4558]: I0120 17:21:42.503514 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-db-sync-pnr7q"] Jan 20 17:21:42 crc kubenswrapper[4558]: I0120 17:21:42.519432 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-0"] Jan 20 17:21:42 crc kubenswrapper[4558]: I0120 17:21:42.519754 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-cell1-conductor-0" podUID="477f15ac-57ae-4bb2-9230-2395ff3bf9ad" containerName="nova-cell1-conductor-conductor" containerID="cri-o://cd9412be70441b3487a5c8e313f30e1490c914b1b61e47d06c5f1b8c6448e338" gracePeriod=30 Jan 20 17:21:42 crc kubenswrapper[4558]: I0120 17:21:42.525202 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-0"] Jan 20 17:21:42 crc kubenswrapper[4558]: I0120 17:21:42.525486 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-cell0-conductor-0" podUID="1ce622e1-3bed-423c-8870-1243fabacc8e" containerName="nova-cell0-conductor-conductor" containerID="cri-o://d1ff32632fd1bfa14bc2e72839ef954e6b06e71a5542a2689718a87ddb352720" gracePeriod=30 Jan 20 17:21:42 crc kubenswrapper[4558]: I0120 17:21:42.530693 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-db-sync-2rm29"] Jan 20 17:21:42 crc kubenswrapper[4558]: I0120 17:21:42.534228 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-db-sync-2rm29"] Jan 20 17:21:42 crc kubenswrapper[4558]: I0120 17:21:42.576353 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="24114ddb-3b30-42ac-9d61-cfeb15d58728" path="/var/lib/kubelet/pods/24114ddb-3b30-42ac-9d61-cfeb15d58728/volumes" Jan 20 17:21:42 crc kubenswrapper[4558]: I0120 17:21:42.577466 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2c809052-d9bb-4982-8271-5b7a9a6f28f9" path="/var/lib/kubelet/pods/2c809052-d9bb-4982-8271-5b7a9a6f28f9/volumes" Jan 20 17:21:42 crc kubenswrapper[4558]: I0120 17:21:42.578154 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3abae449-b665-40ba-bdac-ea1e908e1952" path="/var/lib/kubelet/pods/3abae449-b665-40ba-bdac-ea1e908e1952/volumes" Jan 20 17:21:42 crc kubenswrapper[4558]: I0120 17:21:42.578846 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="594280a4-cae4-4455-9838-ba4ef8d2f2c1" path="/var/lib/kubelet/pods/594280a4-cae4-4455-9838-ba4ef8d2f2c1/volumes" Jan 20 17:21:42 crc kubenswrapper[4558]: I0120 17:21:42.579600 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6dae93de-b907-44f7-a94c-c691eee0af7f" path="/var/lib/kubelet/pods/6dae93de-b907-44f7-a94c-c691eee0af7f/volumes" Jan 20 17:21:42 crc kubenswrapper[4558]: I0120 17:21:42.580135 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="743c3bad-0487-4ff7-a447-82cec153136a" path="/var/lib/kubelet/pods/743c3bad-0487-4ff7-a447-82cec153136a/volumes" Jan 20 17:21:42 crc kubenswrapper[4558]: I0120 17:21:42.580793 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8573a632-84e0-4f80-b811-5646b571c318" path="/var/lib/kubelet/pods/8573a632-84e0-4f80-b811-5646b571c318/volumes" Jan 20 17:21:42 crc kubenswrapper[4558]: I0120 17:21:42.582683 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9c52efaf-b737-47bf-9ca1-109a28e19113" path="/var/lib/kubelet/pods/9c52efaf-b737-47bf-9ca1-109a28e19113/volumes" Jan 20 17:21:42 crc kubenswrapper[4558]: I0120 17:21:42.583502 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="df5e4bdf-f36a-40cd-b3a0-968ffe61c5e7" path="/var/lib/kubelet/pods/df5e4bdf-f36a-40cd-b3a0-968ffe61c5e7/volumes" Jan 20 17:21:43 crc kubenswrapper[4558]: I0120 17:21:43.087402 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/memcached-0"] Jan 20 17:21:43 crc kubenswrapper[4558]: I0120 17:21:43.087663 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/memcached-0" podUID="f1382710-0399-4564-8c83-271c645ce04e" containerName="memcached" containerID="cri-o://0c2f9c3e3d8e63b8e5f33c1b908cb9fc7f21da330edfebb3544ee9108a3c2a66" gracePeriod=30 Jan 20 17:21:43 crc kubenswrapper[4558]: I0120 17:21:43.097341 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:21:43 crc kubenswrapper[4558]: I0120 17:21:43.097622 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-scheduler-0" podUID="8eb09b19-2337-4fa1-8c9a-a3a45c9d8a48" containerName="nova-scheduler-scheduler" containerID="cri-o://cef9f816792ffb8c331439791b6687f021b7a0df270a63be33a28f933f0c1c2b" gracePeriod=30 Jan 20 17:21:43 crc kubenswrapper[4558]: I0120 17:21:43.648388 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-psfz5" Jan 20 17:21:43 crc kubenswrapper[4558]: I0120 17:21:43.713365 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-7745f4bdcc-fcqxh"] Jan 20 17:21:43 crc kubenswrapper[4558]: I0120 17:21:43.713625 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-7745f4bdcc-fcqxh" podUID="67faa384-2157-4ce6-a5b8-aa2c3f4c3b20" containerName="dnsmasq-dns" containerID="cri-o://c2105e3b7033a3789680ee649e20d2039972ecf47b6e0b9633d48c2e12154cef" gracePeriod=10 Jan 20 17:21:43 crc kubenswrapper[4558]: I0120 17:21:43.934706 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/memcached-0" Jan 20 17:21:43 crc kubenswrapper[4558]: I0120 17:21:43.981248 4558 generic.go:334] "Generic (PLEG): container finished" podID="67faa384-2157-4ce6-a5b8-aa2c3f4c3b20" containerID="c2105e3b7033a3789680ee649e20d2039972ecf47b6e0b9633d48c2e12154cef" exitCode=0 Jan 20 17:21:43 crc kubenswrapper[4558]: I0120 17:21:43.981338 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-7745f4bdcc-fcqxh" event={"ID":"67faa384-2157-4ce6-a5b8-aa2c3f4c3b20","Type":"ContainerDied","Data":"c2105e3b7033a3789680ee649e20d2039972ecf47b6e0b9633d48c2e12154cef"} Jan 20 17:21:43 crc kubenswrapper[4558]: I0120 17:21:43.985548 4558 generic.go:334] "Generic (PLEG): container finished" podID="f1382710-0399-4564-8c83-271c645ce04e" containerID="0c2f9c3e3d8e63b8e5f33c1b908cb9fc7f21da330edfebb3544ee9108a3c2a66" exitCode=0 Jan 20 17:21:43 crc kubenswrapper[4558]: I0120 17:21:43.985616 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/memcached-0" event={"ID":"f1382710-0399-4564-8c83-271c645ce04e","Type":"ContainerDied","Data":"0c2f9c3e3d8e63b8e5f33c1b908cb9fc7f21da330edfebb3544ee9108a3c2a66"} Jan 20 17:21:43 crc kubenswrapper[4558]: I0120 17:21:43.985659 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/memcached-0" event={"ID":"f1382710-0399-4564-8c83-271c645ce04e","Type":"ContainerDied","Data":"67b5435d9b589be4e1e201377d49556a02405c5fbe78c6fd08e640e566a5a49e"} Jan 20 17:21:43 crc kubenswrapper[4558]: I0120 17:21:43.985691 4558 scope.go:117] "RemoveContainer" containerID="0c2f9c3e3d8e63b8e5f33c1b908cb9fc7f21da330edfebb3544ee9108a3c2a66" Jan 20 17:21:43 crc kubenswrapper[4558]: I0120 17:21:43.986038 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/memcached-0" Jan 20 17:21:44 crc kubenswrapper[4558]: I0120 17:21:44.006018 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f1382710-0399-4564-8c83-271c645ce04e-combined-ca-bundle\") pod \"f1382710-0399-4564-8c83-271c645ce04e\" (UID: \"f1382710-0399-4564-8c83-271c645ce04e\") " Jan 20 17:21:44 crc kubenswrapper[4558]: I0120 17:21:44.006135 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/f1382710-0399-4564-8c83-271c645ce04e-kolla-config\") pod \"f1382710-0399-4564-8c83-271c645ce04e\" (UID: \"f1382710-0399-4564-8c83-271c645ce04e\") " Jan 20 17:21:44 crc kubenswrapper[4558]: I0120 17:21:44.006157 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/f1382710-0399-4564-8c83-271c645ce04e-memcached-tls-certs\") pod \"f1382710-0399-4564-8c83-271c645ce04e\" (UID: \"f1382710-0399-4564-8c83-271c645ce04e\") " Jan 20 17:21:44 crc kubenswrapper[4558]: I0120 17:21:44.006395 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cknq7\" (UniqueName: \"kubernetes.io/projected/f1382710-0399-4564-8c83-271c645ce04e-kube-api-access-cknq7\") pod \"f1382710-0399-4564-8c83-271c645ce04e\" (UID: \"f1382710-0399-4564-8c83-271c645ce04e\") " Jan 20 17:21:44 crc kubenswrapper[4558]: I0120 17:21:44.006456 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/f1382710-0399-4564-8c83-271c645ce04e-config-data\") pod \"f1382710-0399-4564-8c83-271c645ce04e\" (UID: \"f1382710-0399-4564-8c83-271c645ce04e\") " Jan 20 17:21:44 crc kubenswrapper[4558]: I0120 17:21:44.007183 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f1382710-0399-4564-8c83-271c645ce04e-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "f1382710-0399-4564-8c83-271c645ce04e" (UID: "f1382710-0399-4564-8c83-271c645ce04e"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:21:44 crc kubenswrapper[4558]: I0120 17:21:44.007215 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f1382710-0399-4564-8c83-271c645ce04e-config-data" (OuterVolumeSpecName: "config-data") pod "f1382710-0399-4564-8c83-271c645ce04e" (UID: "f1382710-0399-4564-8c83-271c645ce04e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:21:44 crc kubenswrapper[4558]: I0120 17:21:44.007667 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/f1382710-0399-4564-8c83-271c645ce04e-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:44 crc kubenswrapper[4558]: I0120 17:21:44.007690 4558 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/f1382710-0399-4564-8c83-271c645ce04e-kolla-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:44 crc kubenswrapper[4558]: I0120 17:21:44.025412 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f1382710-0399-4564-8c83-271c645ce04e-kube-api-access-cknq7" (OuterVolumeSpecName: "kube-api-access-cknq7") pod "f1382710-0399-4564-8c83-271c645ce04e" (UID: "f1382710-0399-4564-8c83-271c645ce04e"). InnerVolumeSpecName "kube-api-access-cknq7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:21:44 crc kubenswrapper[4558]: I0120 17:21:44.029751 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f1382710-0399-4564-8c83-271c645ce04e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f1382710-0399-4564-8c83-271c645ce04e" (UID: "f1382710-0399-4564-8c83-271c645ce04e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:21:44 crc kubenswrapper[4558]: I0120 17:21:44.030916 4558 scope.go:117] "RemoveContainer" containerID="0c2f9c3e3d8e63b8e5f33c1b908cb9fc7f21da330edfebb3544ee9108a3c2a66" Jan 20 17:21:44 crc kubenswrapper[4558]: E0120 17:21:44.034261 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0c2f9c3e3d8e63b8e5f33c1b908cb9fc7f21da330edfebb3544ee9108a3c2a66\": container with ID starting with 0c2f9c3e3d8e63b8e5f33c1b908cb9fc7f21da330edfebb3544ee9108a3c2a66 not found: ID does not exist" containerID="0c2f9c3e3d8e63b8e5f33c1b908cb9fc7f21da330edfebb3544ee9108a3c2a66" Jan 20 17:21:44 crc kubenswrapper[4558]: I0120 17:21:44.034307 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0c2f9c3e3d8e63b8e5f33c1b908cb9fc7f21da330edfebb3544ee9108a3c2a66"} err="failed to get container status \"0c2f9c3e3d8e63b8e5f33c1b908cb9fc7f21da330edfebb3544ee9108a3c2a66\": rpc error: code = NotFound desc = could not find container \"0c2f9c3e3d8e63b8e5f33c1b908cb9fc7f21da330edfebb3544ee9108a3c2a66\": container with ID starting with 0c2f9c3e3d8e63b8e5f33c1b908cb9fc7f21da330edfebb3544ee9108a3c2a66 not found: ID does not exist" Jan 20 17:21:44 crc kubenswrapper[4558]: I0120 17:21:44.043511 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f1382710-0399-4564-8c83-271c645ce04e-memcached-tls-certs" (OuterVolumeSpecName: "memcached-tls-certs") pod "f1382710-0399-4564-8c83-271c645ce04e" (UID: "f1382710-0399-4564-8c83-271c645ce04e"). InnerVolumeSpecName "memcached-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:21:44 crc kubenswrapper[4558]: I0120 17:21:44.087940 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-7745f4bdcc-fcqxh" Jan 20 17:21:44 crc kubenswrapper[4558]: I0120 17:21:44.110095 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f1382710-0399-4564-8c83-271c645ce04e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:44 crc kubenswrapper[4558]: I0120 17:21:44.110136 4558 reconciler_common.go:293] "Volume detached for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/f1382710-0399-4564-8c83-271c645ce04e-memcached-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:44 crc kubenswrapper[4558]: I0120 17:21:44.110182 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cknq7\" (UniqueName: \"kubernetes.io/projected/f1382710-0399-4564-8c83-271c645ce04e-kube-api-access-cknq7\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:44 crc kubenswrapper[4558]: I0120 17:21:44.210613 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vqfkw\" (UniqueName: \"kubernetes.io/projected/67faa384-2157-4ce6-a5b8-aa2c3f4c3b20-kube-api-access-vqfkw\") pod \"67faa384-2157-4ce6-a5b8-aa2c3f4c3b20\" (UID: \"67faa384-2157-4ce6-a5b8-aa2c3f4c3b20\") " Jan 20 17:21:44 crc kubenswrapper[4558]: I0120 17:21:44.210655 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/67faa384-2157-4ce6-a5b8-aa2c3f4c3b20-dns-swift-storage-0\") pod \"67faa384-2157-4ce6-a5b8-aa2c3f4c3b20\" (UID: \"67faa384-2157-4ce6-a5b8-aa2c3f4c3b20\") " Jan 20 17:21:44 crc kubenswrapper[4558]: I0120 17:21:44.210802 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/67faa384-2157-4ce6-a5b8-aa2c3f4c3b20-config\") pod \"67faa384-2157-4ce6-a5b8-aa2c3f4c3b20\" (UID: \"67faa384-2157-4ce6-a5b8-aa2c3f4c3b20\") " Jan 20 17:21:44 crc kubenswrapper[4558]: I0120 17:21:44.210853 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/67faa384-2157-4ce6-a5b8-aa2c3f4c3b20-dnsmasq-svc\") pod \"67faa384-2157-4ce6-a5b8-aa2c3f4c3b20\" (UID: \"67faa384-2157-4ce6-a5b8-aa2c3f4c3b20\") " Jan 20 17:21:44 crc kubenswrapper[4558]: I0120 17:21:44.214090 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/67faa384-2157-4ce6-a5b8-aa2c3f4c3b20-kube-api-access-vqfkw" (OuterVolumeSpecName: "kube-api-access-vqfkw") pod "67faa384-2157-4ce6-a5b8-aa2c3f4c3b20" (UID: "67faa384-2157-4ce6-a5b8-aa2c3f4c3b20"). InnerVolumeSpecName "kube-api-access-vqfkw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:21:44 crc kubenswrapper[4558]: I0120 17:21:44.240820 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/67faa384-2157-4ce6-a5b8-aa2c3f4c3b20-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "67faa384-2157-4ce6-a5b8-aa2c3f4c3b20" (UID: "67faa384-2157-4ce6-a5b8-aa2c3f4c3b20"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:21:44 crc kubenswrapper[4558]: I0120 17:21:44.241283 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/67faa384-2157-4ce6-a5b8-aa2c3f4c3b20-config" (OuterVolumeSpecName: "config") pod "67faa384-2157-4ce6-a5b8-aa2c3f4c3b20" (UID: "67faa384-2157-4ce6-a5b8-aa2c3f4c3b20"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:21:44 crc kubenswrapper[4558]: I0120 17:21:44.251200 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/67faa384-2157-4ce6-a5b8-aa2c3f4c3b20-dnsmasq-svc" (OuterVolumeSpecName: "dnsmasq-svc") pod "67faa384-2157-4ce6-a5b8-aa2c3f4c3b20" (UID: "67faa384-2157-4ce6-a5b8-aa2c3f4c3b20"). InnerVolumeSpecName "dnsmasq-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:21:44 crc kubenswrapper[4558]: I0120 17:21:44.312353 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/67faa384-2157-4ce6-a5b8-aa2c3f4c3b20-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:44 crc kubenswrapper[4558]: I0120 17:21:44.312383 4558 reconciler_common.go:293] "Volume detached for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/67faa384-2157-4ce6-a5b8-aa2c3f4c3b20-dnsmasq-svc\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:44 crc kubenswrapper[4558]: I0120 17:21:44.312394 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vqfkw\" (UniqueName: \"kubernetes.io/projected/67faa384-2157-4ce6-a5b8-aa2c3f4c3b20-kube-api-access-vqfkw\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:44 crc kubenswrapper[4558]: I0120 17:21:44.312406 4558 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/67faa384-2157-4ce6-a5b8-aa2c3f4c3b20-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:44 crc kubenswrapper[4558]: I0120 17:21:44.314396 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/memcached-0"] Jan 20 17:21:44 crc kubenswrapper[4558]: I0120 17:21:44.321486 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/memcached-0"] Jan 20 17:21:44 crc kubenswrapper[4558]: E0120 17:21:44.477258 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d1ff32632fd1bfa14bc2e72839ef954e6b06e71a5542a2689718a87ddb352720" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:21:44 crc kubenswrapper[4558]: E0120 17:21:44.478866 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d1ff32632fd1bfa14bc2e72839ef954e6b06e71a5542a2689718a87ddb352720" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:21:44 crc kubenswrapper[4558]: E0120 17:21:44.480434 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d1ff32632fd1bfa14bc2e72839ef954e6b06e71a5542a2689718a87ddb352720" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:21:44 crc kubenswrapper[4558]: E0120 17:21:44.480467 4558 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack-kuttl-tests/nova-cell0-conductor-0" podUID="1ce622e1-3bed-423c-8870-1243fabacc8e" containerName="nova-cell0-conductor-conductor" Jan 20 17:21:44 crc kubenswrapper[4558]: I0120 17:21:44.575446 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f1382710-0399-4564-8c83-271c645ce04e" path="/var/lib/kubelet/pods/f1382710-0399-4564-8c83-271c645ce04e/volumes" Jan 20 17:21:44 crc kubenswrapper[4558]: E0120 17:21:44.952651 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="cd9412be70441b3487a5c8e313f30e1490c914b1b61e47d06c5f1b8c6448e338" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:21:44 crc kubenswrapper[4558]: E0120 17:21:44.954424 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="cd9412be70441b3487a5c8e313f30e1490c914b1b61e47d06c5f1b8c6448e338" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:21:44 crc kubenswrapper[4558]: E0120 17:21:44.955821 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="cd9412be70441b3487a5c8e313f30e1490c914b1b61e47d06c5f1b8c6448e338" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:21:44 crc kubenswrapper[4558]: E0120 17:21:44.955891 4558 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack-kuttl-tests/nova-cell1-conductor-0" podUID="477f15ac-57ae-4bb2-9230-2395ff3bf9ad" containerName="nova-cell1-conductor-conductor" Jan 20 17:21:44 crc kubenswrapper[4558]: I0120 17:21:44.998648 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-7745f4bdcc-fcqxh" event={"ID":"67faa384-2157-4ce6-a5b8-aa2c3f4c3b20","Type":"ContainerDied","Data":"3a8af5ecec070e434a30868de7adfa60aa4596669a14322dcdd042ee4952c366"} Jan 20 17:21:44 crc kubenswrapper[4558]: I0120 17:21:44.998721 4558 scope.go:117] "RemoveContainer" containerID="c2105e3b7033a3789680ee649e20d2039972ecf47b6e0b9633d48c2e12154cef" Jan 20 17:21:44 crc kubenswrapper[4558]: I0120 17:21:44.998727 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-7745f4bdcc-fcqxh" Jan 20 17:21:45 crc kubenswrapper[4558]: I0120 17:21:45.023651 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-7745f4bdcc-fcqxh"] Jan 20 17:21:45 crc kubenswrapper[4558]: I0120 17:21:45.026400 4558 scope.go:117] "RemoveContainer" containerID="755e40a8ef22a5dcd70c9d8fcf4e97011ab26ded09be3df19c27930830005f51" Jan 20 17:21:45 crc kubenswrapper[4558]: I0120 17:21:45.027865 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-7745f4bdcc-fcqxh"] Jan 20 17:21:46 crc kubenswrapper[4558]: I0120 17:21:46.577278 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="67faa384-2157-4ce6-a5b8-aa2c3f4c3b20" path="/var/lib/kubelet/pods/67faa384-2157-4ce6-a5b8-aa2c3f4c3b20/volumes" Jan 20 17:21:47 crc kubenswrapper[4558]: I0120 17:21:47.046446 4558 generic.go:334] "Generic (PLEG): container finished" podID="533cb8e0-3be7-4eb2-b0ce-bdebf3973a0b" containerID="d68462f667579ef49cbcb5dabe1e6eb12c8f1227e9a7202b73ef439191bd1da7" exitCode=0 Jan 20 17:21:47 crc kubenswrapper[4558]: I0120 17:21:47.046515 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"533cb8e0-3be7-4eb2-b0ce-bdebf3973a0b","Type":"ContainerDied","Data":"d68462f667579ef49cbcb5dabe1e6eb12c8f1227e9a7202b73ef439191bd1da7"} Jan 20 17:21:47 crc kubenswrapper[4558]: I0120 17:21:47.139130 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:21:47 crc kubenswrapper[4558]: I0120 17:21:47.247390 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/533cb8e0-3be7-4eb2-b0ce-bdebf3973a0b-public-tls-certs\") pod \"533cb8e0-3be7-4eb2-b0ce-bdebf3973a0b\" (UID: \"533cb8e0-3be7-4eb2-b0ce-bdebf3973a0b\") " Jan 20 17:21:47 crc kubenswrapper[4558]: I0120 17:21:47.247449 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/533cb8e0-3be7-4eb2-b0ce-bdebf3973a0b-logs\") pod \"533cb8e0-3be7-4eb2-b0ce-bdebf3973a0b\" (UID: \"533cb8e0-3be7-4eb2-b0ce-bdebf3973a0b\") " Jan 20 17:21:47 crc kubenswrapper[4558]: I0120 17:21:47.247485 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2tw8d\" (UniqueName: \"kubernetes.io/projected/533cb8e0-3be7-4eb2-b0ce-bdebf3973a0b-kube-api-access-2tw8d\") pod \"533cb8e0-3be7-4eb2-b0ce-bdebf3973a0b\" (UID: \"533cb8e0-3be7-4eb2-b0ce-bdebf3973a0b\") " Jan 20 17:21:47 crc kubenswrapper[4558]: I0120 17:21:47.247538 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/533cb8e0-3be7-4eb2-b0ce-bdebf3973a0b-combined-ca-bundle\") pod \"533cb8e0-3be7-4eb2-b0ce-bdebf3973a0b\" (UID: \"533cb8e0-3be7-4eb2-b0ce-bdebf3973a0b\") " Jan 20 17:21:47 crc kubenswrapper[4558]: I0120 17:21:47.247604 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/533cb8e0-3be7-4eb2-b0ce-bdebf3973a0b-internal-tls-certs\") pod \"533cb8e0-3be7-4eb2-b0ce-bdebf3973a0b\" (UID: \"533cb8e0-3be7-4eb2-b0ce-bdebf3973a0b\") " Jan 20 17:21:47 crc kubenswrapper[4558]: I0120 17:21:47.247659 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/533cb8e0-3be7-4eb2-b0ce-bdebf3973a0b-config-data\") pod \"533cb8e0-3be7-4eb2-b0ce-bdebf3973a0b\" (UID: \"533cb8e0-3be7-4eb2-b0ce-bdebf3973a0b\") " Jan 20 17:21:47 crc kubenswrapper[4558]: I0120 17:21:47.247921 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/533cb8e0-3be7-4eb2-b0ce-bdebf3973a0b-logs" (OuterVolumeSpecName: "logs") pod "533cb8e0-3be7-4eb2-b0ce-bdebf3973a0b" (UID: "533cb8e0-3be7-4eb2-b0ce-bdebf3973a0b"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:21:47 crc kubenswrapper[4558]: I0120 17:21:47.248079 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/533cb8e0-3be7-4eb2-b0ce-bdebf3973a0b-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:47 crc kubenswrapper[4558]: I0120 17:21:47.253699 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/533cb8e0-3be7-4eb2-b0ce-bdebf3973a0b-kube-api-access-2tw8d" (OuterVolumeSpecName: "kube-api-access-2tw8d") pod "533cb8e0-3be7-4eb2-b0ce-bdebf3973a0b" (UID: "533cb8e0-3be7-4eb2-b0ce-bdebf3973a0b"). InnerVolumeSpecName "kube-api-access-2tw8d". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:21:47 crc kubenswrapper[4558]: I0120 17:21:47.276010 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/533cb8e0-3be7-4eb2-b0ce-bdebf3973a0b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "533cb8e0-3be7-4eb2-b0ce-bdebf3973a0b" (UID: "533cb8e0-3be7-4eb2-b0ce-bdebf3973a0b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:21:47 crc kubenswrapper[4558]: I0120 17:21:47.277055 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/533cb8e0-3be7-4eb2-b0ce-bdebf3973a0b-config-data" (OuterVolumeSpecName: "config-data") pod "533cb8e0-3be7-4eb2-b0ce-bdebf3973a0b" (UID: "533cb8e0-3be7-4eb2-b0ce-bdebf3973a0b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:21:47 crc kubenswrapper[4558]: I0120 17:21:47.284404 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/533cb8e0-3be7-4eb2-b0ce-bdebf3973a0b-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "533cb8e0-3be7-4eb2-b0ce-bdebf3973a0b" (UID: "533cb8e0-3be7-4eb2-b0ce-bdebf3973a0b"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:21:47 crc kubenswrapper[4558]: I0120 17:21:47.287125 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/533cb8e0-3be7-4eb2-b0ce-bdebf3973a0b-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "533cb8e0-3be7-4eb2-b0ce-bdebf3973a0b" (UID: "533cb8e0-3be7-4eb2-b0ce-bdebf3973a0b"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:21:47 crc kubenswrapper[4558]: I0120 17:21:47.352568 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/533cb8e0-3be7-4eb2-b0ce-bdebf3973a0b-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:47 crc kubenswrapper[4558]: I0120 17:21:47.352646 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2tw8d\" (UniqueName: \"kubernetes.io/projected/533cb8e0-3be7-4eb2-b0ce-bdebf3973a0b-kube-api-access-2tw8d\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:47 crc kubenswrapper[4558]: I0120 17:21:47.352663 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/533cb8e0-3be7-4eb2-b0ce-bdebf3973a0b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:47 crc kubenswrapper[4558]: I0120 17:21:47.352674 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/533cb8e0-3be7-4eb2-b0ce-bdebf3973a0b-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:47 crc kubenswrapper[4558]: I0120 17:21:47.352687 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/533cb8e0-3be7-4eb2-b0ce-bdebf3973a0b-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:21:47 crc kubenswrapper[4558]: E0120 17:21:47.357696 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="cef9f816792ffb8c331439791b6687f021b7a0df270a63be33a28f933f0c1c2b" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 20 17:21:47 crc kubenswrapper[4558]: E0120 17:21:47.359674 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="cef9f816792ffb8c331439791b6687f021b7a0df270a63be33a28f933f0c1c2b" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 20 17:21:47 crc kubenswrapper[4558]: E0120 17:21:47.361143 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="cef9f816792ffb8c331439791b6687f021b7a0df270a63be33a28f933f0c1c2b" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 20 17:21:47 crc kubenswrapper[4558]: E0120 17:21:47.361217 4558 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack-kuttl-tests/nova-scheduler-0" podUID="8eb09b19-2337-4fa1-8c9a-a3a45c9d8a48" containerName="nova-scheduler-scheduler" Jan 20 17:21:48 crc kubenswrapper[4558]: I0120 17:21:48.063755 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"533cb8e0-3be7-4eb2-b0ce-bdebf3973a0b","Type":"ContainerDied","Data":"f30510222a66cfc9a50f9cf06421bd56ec0eb63b96631931e75f7c3f79491603"} Jan 20 17:21:48 crc kubenswrapper[4558]: I0120 17:21:48.064757 4558 scope.go:117] "RemoveContainer" containerID="d68462f667579ef49cbcb5dabe1e6eb12c8f1227e9a7202b73ef439191bd1da7" Jan 20 17:21:48 crc kubenswrapper[4558]: I0120 17:21:48.064798 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:21:48 crc kubenswrapper[4558]: I0120 17:21:48.095684 4558 scope.go:117] "RemoveContainer" containerID="2eff65431cd3f96e218fee39fc3cc4517345e8ec098ef9aeba65d6a34e79cd42" Jan 20 17:21:48 crc kubenswrapper[4558]: I0120 17:21:48.108804 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:21:48 crc kubenswrapper[4558]: I0120 17:21:48.113257 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:21:48 crc kubenswrapper[4558]: I0120 17:21:48.569626 4558 scope.go:117] "RemoveContainer" containerID="0ffce44366a8b4466427b682fb41640425366a0f4449210959148de9aef695c6" Jan 20 17:21:48 crc kubenswrapper[4558]: E0120 17:21:48.570056 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 17:21:48 crc kubenswrapper[4558]: I0120 17:21:48.570800 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/neutron-5c7bf5978d-d2t27" podUID="abb17bba-a50c-4b0f-941b-3a09a8bcac1e" containerName="neutron-httpd" probeResult="failure" output="Get \"https://10.217.0.177:9696/\": dial tcp 10.217.0.177:9696: connect: connection refused" Jan 20 17:21:48 crc kubenswrapper[4558]: I0120 17:21:48.576031 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="533cb8e0-3be7-4eb2-b0ce-bdebf3973a0b" path="/var/lib/kubelet/pods/533cb8e0-3be7-4eb2-b0ce-bdebf3973a0b/volumes" Jan 20 17:21:49 crc kubenswrapper[4558]: I0120 17:21:49.046864 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-7745f4bdcc-fcqxh" podUID="67faa384-2157-4ce6-a5b8-aa2c3f4c3b20" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.141:5353: i/o timeout" Jan 20 17:21:49 crc kubenswrapper[4558]: E0120 17:21:49.477138 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d1ff32632fd1bfa14bc2e72839ef954e6b06e71a5542a2689718a87ddb352720" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:21:49 crc kubenswrapper[4558]: E0120 17:21:49.480270 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d1ff32632fd1bfa14bc2e72839ef954e6b06e71a5542a2689718a87ddb352720" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:21:49 crc kubenswrapper[4558]: E0120 17:21:49.483519 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d1ff32632fd1bfa14bc2e72839ef954e6b06e71a5542a2689718a87ddb352720" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:21:49 crc kubenswrapper[4558]: E0120 17:21:49.483622 4558 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack-kuttl-tests/nova-cell0-conductor-0" podUID="1ce622e1-3bed-423c-8870-1243fabacc8e" containerName="nova-cell0-conductor-conductor" Jan 20 17:21:49 crc kubenswrapper[4558]: E0120 17:21:49.952646 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="cd9412be70441b3487a5c8e313f30e1490c914b1b61e47d06c5f1b8c6448e338" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:21:49 crc kubenswrapper[4558]: E0120 17:21:49.954294 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="cd9412be70441b3487a5c8e313f30e1490c914b1b61e47d06c5f1b8c6448e338" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:21:49 crc kubenswrapper[4558]: E0120 17:21:49.955859 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="cd9412be70441b3487a5c8e313f30e1490c914b1b61e47d06c5f1b8c6448e338" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:21:49 crc kubenswrapper[4558]: E0120 17:21:49.955988 4558 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack-kuttl-tests/nova-cell1-conductor-0" podUID="477f15ac-57ae-4bb2-9230-2395ff3bf9ad" containerName="nova-cell1-conductor-conductor" Jan 20 17:21:52 crc kubenswrapper[4558]: E0120 17:21:52.357685 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="cef9f816792ffb8c331439791b6687f021b7a0df270a63be33a28f933f0c1c2b" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 20 17:21:52 crc kubenswrapper[4558]: E0120 17:21:52.359731 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="cef9f816792ffb8c331439791b6687f021b7a0df270a63be33a28f933f0c1c2b" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 20 17:21:52 crc kubenswrapper[4558]: E0120 17:21:52.361144 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="cef9f816792ffb8c331439791b6687f021b7a0df270a63be33a28f933f0c1c2b" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 20 17:21:52 crc kubenswrapper[4558]: E0120 17:21:52.361187 4558 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack-kuttl-tests/nova-scheduler-0" podUID="8eb09b19-2337-4fa1-8c9a-a3a45c9d8a48" containerName="nova-scheduler-scheduler" Jan 20 17:21:52 crc kubenswrapper[4558]: E0120 17:21:52.397883 4558 fsHandler.go:119] failed to collect filesystem stats - rootDiskErr: could not stat "/var/lib/containers/storage/overlay/9aa00d20e63d78f9ff44aef143698d526066ed431002dfff11f4337f89e26c7b/diff" to get inode usage: stat /var/lib/containers/storage/overlay/9aa00d20e63d78f9ff44aef143698d526066ed431002dfff11f4337f89e26c7b/diff: no such file or directory, extraDiskErr: could not stat "/var/log/pods/openstack-kuttl-tests_nova-api-0_533cb8e0-3be7-4eb2-b0ce-bdebf3973a0b/nova-api-api/0.log" to get inode usage: stat /var/log/pods/openstack-kuttl-tests_nova-api-0_533cb8e0-3be7-4eb2-b0ce-bdebf3973a0b/nova-api-api/0.log: no such file or directory Jan 20 17:21:54 crc kubenswrapper[4558]: E0120 17:21:54.476273 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d1ff32632fd1bfa14bc2e72839ef954e6b06e71a5542a2689718a87ddb352720" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:21:54 crc kubenswrapper[4558]: E0120 17:21:54.478933 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d1ff32632fd1bfa14bc2e72839ef954e6b06e71a5542a2689718a87ddb352720" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:21:54 crc kubenswrapper[4558]: E0120 17:21:54.480252 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d1ff32632fd1bfa14bc2e72839ef954e6b06e71a5542a2689718a87ddb352720" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:21:54 crc kubenswrapper[4558]: E0120 17:21:54.480340 4558 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack-kuttl-tests/nova-cell0-conductor-0" podUID="1ce622e1-3bed-423c-8870-1243fabacc8e" containerName="nova-cell0-conductor-conductor" Jan 20 17:21:54 crc kubenswrapper[4558]: E0120 17:21:54.952422 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="cd9412be70441b3487a5c8e313f30e1490c914b1b61e47d06c5f1b8c6448e338" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:21:54 crc kubenswrapper[4558]: E0120 17:21:54.953894 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="cd9412be70441b3487a5c8e313f30e1490c914b1b61e47d06c5f1b8c6448e338" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:21:54 crc kubenswrapper[4558]: E0120 17:21:54.954956 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="cd9412be70441b3487a5c8e313f30e1490c914b1b61e47d06c5f1b8c6448e338" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:21:54 crc kubenswrapper[4558]: E0120 17:21:54.955020 4558 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack-kuttl-tests/nova-cell1-conductor-0" podUID="477f15ac-57ae-4bb2-9230-2395ff3bf9ad" containerName="nova-cell1-conductor-conductor" Jan 20 17:21:57 crc kubenswrapper[4558]: E0120 17:21:57.357195 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="cef9f816792ffb8c331439791b6687f021b7a0df270a63be33a28f933f0c1c2b" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 20 17:21:57 crc kubenswrapper[4558]: E0120 17:21:57.359091 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="cef9f816792ffb8c331439791b6687f021b7a0df270a63be33a28f933f0c1c2b" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 20 17:21:57 crc kubenswrapper[4558]: E0120 17:21:57.369853 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="cef9f816792ffb8c331439791b6687f021b7a0df270a63be33a28f933f0c1c2b" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 20 17:21:57 crc kubenswrapper[4558]: E0120 17:21:57.369939 4558 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack-kuttl-tests/nova-scheduler-0" podUID="8eb09b19-2337-4fa1-8c9a-a3a45c9d8a48" containerName="nova-scheduler-scheduler" Jan 20 17:21:57 crc kubenswrapper[4558]: I0120 17:21:57.775756 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/ceilometer-0" podUID="0378e8e5-48af-48c3-9100-80622a77533b" containerName="proxy-httpd" probeResult="failure" output="Get \"https://10.217.0.215:3000/\": dial tcp 10.217.0.215:3000: connect: connection refused" Jan 20 17:21:59 crc kubenswrapper[4558]: E0120 17:21:59.476900 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d1ff32632fd1bfa14bc2e72839ef954e6b06e71a5542a2689718a87ddb352720" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:21:59 crc kubenswrapper[4558]: E0120 17:21:59.478542 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d1ff32632fd1bfa14bc2e72839ef954e6b06e71a5542a2689718a87ddb352720" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:21:59 crc kubenswrapper[4558]: E0120 17:21:59.480216 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d1ff32632fd1bfa14bc2e72839ef954e6b06e71a5542a2689718a87ddb352720" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:21:59 crc kubenswrapper[4558]: E0120 17:21:59.480262 4558 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack-kuttl-tests/nova-cell0-conductor-0" podUID="1ce622e1-3bed-423c-8870-1243fabacc8e" containerName="nova-cell0-conductor-conductor" Jan 20 17:21:59 crc kubenswrapper[4558]: E0120 17:21:59.952953 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="cd9412be70441b3487a5c8e313f30e1490c914b1b61e47d06c5f1b8c6448e338" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:21:59 crc kubenswrapper[4558]: E0120 17:21:59.954428 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="cd9412be70441b3487a5c8e313f30e1490c914b1b61e47d06c5f1b8c6448e338" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:21:59 crc kubenswrapper[4558]: E0120 17:21:59.955442 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="cd9412be70441b3487a5c8e313f30e1490c914b1b61e47d06c5f1b8c6448e338" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:21:59 crc kubenswrapper[4558]: E0120 17:21:59.955480 4558 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack-kuttl-tests/nova-cell1-conductor-0" podUID="477f15ac-57ae-4bb2-9230-2395ff3bf9ad" containerName="nova-cell1-conductor-conductor" Jan 20 17:22:00 crc kubenswrapper[4558]: I0120 17:22:00.565940 4558 scope.go:117] "RemoveContainer" containerID="0ffce44366a8b4466427b682fb41640425366a0f4449210959148de9aef695c6" Jan 20 17:22:00 crc kubenswrapper[4558]: E0120 17:22:00.567019 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 17:22:02 crc kubenswrapper[4558]: E0120 17:22:02.357512 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="cef9f816792ffb8c331439791b6687f021b7a0df270a63be33a28f933f0c1c2b" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 20 17:22:02 crc kubenswrapper[4558]: E0120 17:22:02.359108 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="cef9f816792ffb8c331439791b6687f021b7a0df270a63be33a28f933f0c1c2b" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 20 17:22:02 crc kubenswrapper[4558]: E0120 17:22:02.360918 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="cef9f816792ffb8c331439791b6687f021b7a0df270a63be33a28f933f0c1c2b" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 20 17:22:02 crc kubenswrapper[4558]: E0120 17:22:02.360960 4558 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack-kuttl-tests/nova-scheduler-0" podUID="8eb09b19-2337-4fa1-8c9a-a3a45c9d8a48" containerName="nova-scheduler-scheduler" Jan 20 17:22:03 crc kubenswrapper[4558]: I0120 17:22:03.250023 4558 generic.go:334] "Generic (PLEG): container finished" podID="413b348d-82a1-466a-8027-d2bfd6f97cb7" containerID="0326fe1340c68a89c66bd369e9d86e881cb9466234bb1a9b24d4732251805082" exitCode=137 Jan 20 17:22:03 crc kubenswrapper[4558]: I0120 17:22:03.250078 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"413b348d-82a1-466a-8027-d2bfd6f97cb7","Type":"ContainerDied","Data":"0326fe1340c68a89c66bd369e9d86e881cb9466234bb1a9b24d4732251805082"} Jan 20 17:22:03 crc kubenswrapper[4558]: I0120 17:22:03.403429 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:22:03 crc kubenswrapper[4558]: I0120 17:22:03.542787 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bgwqv\" (UniqueName: \"kubernetes.io/projected/413b348d-82a1-466a-8027-d2bfd6f97cb7-kube-api-access-bgwqv\") pod \"413b348d-82a1-466a-8027-d2bfd6f97cb7\" (UID: \"413b348d-82a1-466a-8027-d2bfd6f97cb7\") " Jan 20 17:22:03 crc kubenswrapper[4558]: I0120 17:22:03.543120 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/413b348d-82a1-466a-8027-d2bfd6f97cb7-scripts\") pod \"413b348d-82a1-466a-8027-d2bfd6f97cb7\" (UID: \"413b348d-82a1-466a-8027-d2bfd6f97cb7\") " Jan 20 17:22:03 crc kubenswrapper[4558]: I0120 17:22:03.543234 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/413b348d-82a1-466a-8027-d2bfd6f97cb7-config-data-custom\") pod \"413b348d-82a1-466a-8027-d2bfd6f97cb7\" (UID: \"413b348d-82a1-466a-8027-d2bfd6f97cb7\") " Jan 20 17:22:03 crc kubenswrapper[4558]: I0120 17:22:03.543258 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/413b348d-82a1-466a-8027-d2bfd6f97cb7-etc-machine-id\") pod \"413b348d-82a1-466a-8027-d2bfd6f97cb7\" (UID: \"413b348d-82a1-466a-8027-d2bfd6f97cb7\") " Jan 20 17:22:03 crc kubenswrapper[4558]: I0120 17:22:03.543329 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/413b348d-82a1-466a-8027-d2bfd6f97cb7-config-data\") pod \"413b348d-82a1-466a-8027-d2bfd6f97cb7\" (UID: \"413b348d-82a1-466a-8027-d2bfd6f97cb7\") " Jan 20 17:22:03 crc kubenswrapper[4558]: I0120 17:22:03.543362 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/413b348d-82a1-466a-8027-d2bfd6f97cb7-combined-ca-bundle\") pod \"413b348d-82a1-466a-8027-d2bfd6f97cb7\" (UID: \"413b348d-82a1-466a-8027-d2bfd6f97cb7\") " Jan 20 17:22:03 crc kubenswrapper[4558]: I0120 17:22:03.549409 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/413b348d-82a1-466a-8027-d2bfd6f97cb7-kube-api-access-bgwqv" (OuterVolumeSpecName: "kube-api-access-bgwqv") pod "413b348d-82a1-466a-8027-d2bfd6f97cb7" (UID: "413b348d-82a1-466a-8027-d2bfd6f97cb7"). InnerVolumeSpecName "kube-api-access-bgwqv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:22:03 crc kubenswrapper[4558]: I0120 17:22:03.550242 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/413b348d-82a1-466a-8027-d2bfd6f97cb7-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "413b348d-82a1-466a-8027-d2bfd6f97cb7" (UID: "413b348d-82a1-466a-8027-d2bfd6f97cb7"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 17:22:03 crc kubenswrapper[4558]: I0120 17:22:03.552296 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/413b348d-82a1-466a-8027-d2bfd6f97cb7-scripts" (OuterVolumeSpecName: "scripts") pod "413b348d-82a1-466a-8027-d2bfd6f97cb7" (UID: "413b348d-82a1-466a-8027-d2bfd6f97cb7"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:22:03 crc kubenswrapper[4558]: I0120 17:22:03.553226 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/413b348d-82a1-466a-8027-d2bfd6f97cb7-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "413b348d-82a1-466a-8027-d2bfd6f97cb7" (UID: "413b348d-82a1-466a-8027-d2bfd6f97cb7"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:22:03 crc kubenswrapper[4558]: I0120 17:22:03.583506 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/413b348d-82a1-466a-8027-d2bfd6f97cb7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "413b348d-82a1-466a-8027-d2bfd6f97cb7" (UID: "413b348d-82a1-466a-8027-d2bfd6f97cb7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:22:03 crc kubenswrapper[4558]: I0120 17:22:03.604552 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:22:03 crc kubenswrapper[4558]: I0120 17:22:03.616539 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/413b348d-82a1-466a-8027-d2bfd6f97cb7-config-data" (OuterVolumeSpecName: "config-data") pod "413b348d-82a1-466a-8027-d2bfd6f97cb7" (UID: "413b348d-82a1-466a-8027-d2bfd6f97cb7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:22:03 crc kubenswrapper[4558]: I0120 17:22:03.645052 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bgwqv\" (UniqueName: \"kubernetes.io/projected/413b348d-82a1-466a-8027-d2bfd6f97cb7-kube-api-access-bgwqv\") on node \"crc\" DevicePath \"\"" Jan 20 17:22:03 crc kubenswrapper[4558]: I0120 17:22:03.645181 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/413b348d-82a1-466a-8027-d2bfd6f97cb7-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:22:03 crc kubenswrapper[4558]: I0120 17:22:03.645260 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/413b348d-82a1-466a-8027-d2bfd6f97cb7-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:22:03 crc kubenswrapper[4558]: I0120 17:22:03.645310 4558 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/413b348d-82a1-466a-8027-d2bfd6f97cb7-etc-machine-id\") on node \"crc\" DevicePath \"\"" Jan 20 17:22:03 crc kubenswrapper[4558]: I0120 17:22:03.645360 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/413b348d-82a1-466a-8027-d2bfd6f97cb7-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:22:03 crc kubenswrapper[4558]: I0120 17:22:03.645404 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/413b348d-82a1-466a-8027-d2bfd6f97cb7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:22:03 crc kubenswrapper[4558]: I0120 17:22:03.746208 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/fae781f7-d8cb-4f32-992f-43d7dac82655-cache\") pod \"fae781f7-d8cb-4f32-992f-43d7dac82655\" (UID: \"fae781f7-d8cb-4f32-992f-43d7dac82655\") " Jan 20 17:22:03 crc kubenswrapper[4558]: I0120 17:22:03.746734 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fae781f7-d8cb-4f32-992f-43d7dac82655-cache" (OuterVolumeSpecName: "cache") pod "fae781f7-d8cb-4f32-992f-43d7dac82655" (UID: "fae781f7-d8cb-4f32-992f-43d7dac82655"). InnerVolumeSpecName "cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:22:03 crc kubenswrapper[4558]: I0120 17:22:03.746824 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/fae781f7-d8cb-4f32-992f-43d7dac82655-lock\") pod \"fae781f7-d8cb-4f32-992f-43d7dac82655\" (UID: \"fae781f7-d8cb-4f32-992f-43d7dac82655\") " Jan 20 17:22:03 crc kubenswrapper[4558]: I0120 17:22:03.746874 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-67zxv\" (UniqueName: \"kubernetes.io/projected/fae781f7-d8cb-4f32-992f-43d7dac82655-kube-api-access-67zxv\") pod \"fae781f7-d8cb-4f32-992f-43d7dac82655\" (UID: \"fae781f7-d8cb-4f32-992f-43d7dac82655\") " Jan 20 17:22:03 crc kubenswrapper[4558]: I0120 17:22:03.747304 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fae781f7-d8cb-4f32-992f-43d7dac82655-lock" (OuterVolumeSpecName: "lock") pod "fae781f7-d8cb-4f32-992f-43d7dac82655" (UID: "fae781f7-d8cb-4f32-992f-43d7dac82655"). InnerVolumeSpecName "lock". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:22:03 crc kubenswrapper[4558]: I0120 17:22:03.747374 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swift\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"fae781f7-d8cb-4f32-992f-43d7dac82655\" (UID: \"fae781f7-d8cb-4f32-992f-43d7dac82655\") " Jan 20 17:22:03 crc kubenswrapper[4558]: I0120 17:22:03.747718 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/fae781f7-d8cb-4f32-992f-43d7dac82655-etc-swift\") pod \"fae781f7-d8cb-4f32-992f-43d7dac82655\" (UID: \"fae781f7-d8cb-4f32-992f-43d7dac82655\") " Jan 20 17:22:03 crc kubenswrapper[4558]: I0120 17:22:03.748368 4558 reconciler_common.go:293] "Volume detached for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/fae781f7-d8cb-4f32-992f-43d7dac82655-cache\") on node \"crc\" DevicePath \"\"" Jan 20 17:22:03 crc kubenswrapper[4558]: I0120 17:22:03.748390 4558 reconciler_common.go:293] "Volume detached for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/fae781f7-d8cb-4f32-992f-43d7dac82655-lock\") on node \"crc\" DevicePath \"\"" Jan 20 17:22:03 crc kubenswrapper[4558]: I0120 17:22:03.756824 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage07-crc" (OuterVolumeSpecName: "swift") pod "fae781f7-d8cb-4f32-992f-43d7dac82655" (UID: "fae781f7-d8cb-4f32-992f-43d7dac82655"). InnerVolumeSpecName "local-storage07-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:22:03 crc kubenswrapper[4558]: I0120 17:22:03.756853 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fae781f7-d8cb-4f32-992f-43d7dac82655-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "fae781f7-d8cb-4f32-992f-43d7dac82655" (UID: "fae781f7-d8cb-4f32-992f-43d7dac82655"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:22:03 crc kubenswrapper[4558]: I0120 17:22:03.756964 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fae781f7-d8cb-4f32-992f-43d7dac82655-kube-api-access-67zxv" (OuterVolumeSpecName: "kube-api-access-67zxv") pod "fae781f7-d8cb-4f32-992f-43d7dac82655" (UID: "fae781f7-d8cb-4f32-992f-43d7dac82655"). InnerVolumeSpecName "kube-api-access-67zxv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:22:03 crc kubenswrapper[4558]: I0120 17:22:03.850655 4558 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/fae781f7-d8cb-4f32-992f-43d7dac82655-etc-swift\") on node \"crc\" DevicePath \"\"" Jan 20 17:22:03 crc kubenswrapper[4558]: I0120 17:22:03.850692 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-67zxv\" (UniqueName: \"kubernetes.io/projected/fae781f7-d8cb-4f32-992f-43d7dac82655-kube-api-access-67zxv\") on node \"crc\" DevicePath \"\"" Jan 20 17:22:03 crc kubenswrapper[4558]: I0120 17:22:03.850728 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" " Jan 20 17:22:03 crc kubenswrapper[4558]: I0120 17:22:03.863756 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage07-crc" (UniqueName: "kubernetes.io/local-volume/local-storage07-crc") on node "crc" Jan 20 17:22:03 crc kubenswrapper[4558]: I0120 17:22:03.952899 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:22:04 crc kubenswrapper[4558]: I0120 17:22:04.261469 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_neutron-5c7bf5978d-d2t27_abb17bba-a50c-4b0f-941b-3a09a8bcac1e/neutron-api/0.log" Jan 20 17:22:04 crc kubenswrapper[4558]: I0120 17:22:04.261521 4558 generic.go:334] "Generic (PLEG): container finished" podID="abb17bba-a50c-4b0f-941b-3a09a8bcac1e" containerID="fb57b9facd8c8baac31528678e058b6e227a981807b81524108e8fec27a589a1" exitCode=137 Jan 20 17:22:04 crc kubenswrapper[4558]: I0120 17:22:04.261578 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-5c7bf5978d-d2t27" event={"ID":"abb17bba-a50c-4b0f-941b-3a09a8bcac1e","Type":"ContainerDied","Data":"fb57b9facd8c8baac31528678e058b6e227a981807b81524108e8fec27a589a1"} Jan 20 17:22:04 crc kubenswrapper[4558]: I0120 17:22:04.263725 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"413b348d-82a1-466a-8027-d2bfd6f97cb7","Type":"ContainerDied","Data":"adcce5a4f5dd6fc247e791891ccb892e1fe88a8d1bf103a3c067c6cc28e3425f"} Jan 20 17:22:04 crc kubenswrapper[4558]: I0120 17:22:04.263759 4558 scope.go:117] "RemoveContainer" containerID="7ee0f2d8df26b1c3c3fbe8f6a262fe806d7e730b90e603316c261fe601cf8b66" Jan 20 17:22:04 crc kubenswrapper[4558]: I0120 17:22:04.263791 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:22:04 crc kubenswrapper[4558]: I0120 17:22:04.271253 4558 generic.go:334] "Generic (PLEG): container finished" podID="fae781f7-d8cb-4f32-992f-43d7dac82655" containerID="1153f574b016e23743a03a0a3af75cf83aa887f85ab8a9f604b6d0beb5416c91" exitCode=137 Jan 20 17:22:04 crc kubenswrapper[4558]: I0120 17:22:04.271305 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"fae781f7-d8cb-4f32-992f-43d7dac82655","Type":"ContainerDied","Data":"1153f574b016e23743a03a0a3af75cf83aa887f85ab8a9f604b6d0beb5416c91"} Jan 20 17:22:04 crc kubenswrapper[4558]: I0120 17:22:04.271333 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"fae781f7-d8cb-4f32-992f-43d7dac82655","Type":"ContainerDied","Data":"fdf580ae6115f467cbfc7843aee98b52dccea02af4027e953b46e84b91359cac"} Jan 20 17:22:04 crc kubenswrapper[4558]: I0120 17:22:04.271422 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:22:04 crc kubenswrapper[4558]: I0120 17:22:04.288207 4558 scope.go:117] "RemoveContainer" containerID="0326fe1340c68a89c66bd369e9d86e881cb9466234bb1a9b24d4732251805082" Jan 20 17:22:04 crc kubenswrapper[4558]: I0120 17:22:04.296842 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:22:04 crc kubenswrapper[4558]: I0120 17:22:04.310428 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:22:04 crc kubenswrapper[4558]: I0120 17:22:04.315223 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/swift-storage-0"] Jan 20 17:22:04 crc kubenswrapper[4558]: I0120 17:22:04.319011 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/swift-storage-0"] Jan 20 17:22:04 crc kubenswrapper[4558]: I0120 17:22:04.346688 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_neutron-5c7bf5978d-d2t27_abb17bba-a50c-4b0f-941b-3a09a8bcac1e/neutron-api/0.log" Jan 20 17:22:04 crc kubenswrapper[4558]: I0120 17:22:04.346850 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-5c7bf5978d-d2t27" Jan 20 17:22:04 crc kubenswrapper[4558]: I0120 17:22:04.348831 4558 scope.go:117] "RemoveContainer" containerID="1153f574b016e23743a03a0a3af75cf83aa887f85ab8a9f604b6d0beb5416c91" Jan 20 17:22:04 crc kubenswrapper[4558]: I0120 17:22:04.377745 4558 scope.go:117] "RemoveContainer" containerID="4f8e0074decabf3ae64fdf9bf0b1b9bfcd33bc833a5040505bdb4f80fb68ce7c" Jan 20 17:22:04 crc kubenswrapper[4558]: I0120 17:22:04.400417 4558 scope.go:117] "RemoveContainer" containerID="0625dd3b638d8b981039c7b40441b9f3eb96afe32772f7a668fc9f6739b2ce43" Jan 20 17:22:04 crc kubenswrapper[4558]: I0120 17:22:04.423961 4558 scope.go:117] "RemoveContainer" containerID="1a43e8cd885dce5f981539c99de1df7006b77acee5bfe4dc7aee4e81c1433375" Jan 20 17:22:04 crc kubenswrapper[4558]: I0120 17:22:04.441774 4558 scope.go:117] "RemoveContainer" containerID="cbd610baf4709c4651944a2af3a0c48b9c204a1089c08055228c74c96f5c28b9" Jan 20 17:22:04 crc kubenswrapper[4558]: I0120 17:22:04.459789 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zrtcb\" (UniqueName: \"kubernetes.io/projected/abb17bba-a50c-4b0f-941b-3a09a8bcac1e-kube-api-access-zrtcb\") pod \"abb17bba-a50c-4b0f-941b-3a09a8bcac1e\" (UID: \"abb17bba-a50c-4b0f-941b-3a09a8bcac1e\") " Jan 20 17:22:04 crc kubenswrapper[4558]: I0120 17:22:04.459836 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/abb17bba-a50c-4b0f-941b-3a09a8bcac1e-config\") pod \"abb17bba-a50c-4b0f-941b-3a09a8bcac1e\" (UID: \"abb17bba-a50c-4b0f-941b-3a09a8bcac1e\") " Jan 20 17:22:04 crc kubenswrapper[4558]: I0120 17:22:04.459911 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/abb17bba-a50c-4b0f-941b-3a09a8bcac1e-combined-ca-bundle\") pod \"abb17bba-a50c-4b0f-941b-3a09a8bcac1e\" (UID: \"abb17bba-a50c-4b0f-941b-3a09a8bcac1e\") " Jan 20 17:22:04 crc kubenswrapper[4558]: I0120 17:22:04.459936 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/abb17bba-a50c-4b0f-941b-3a09a8bcac1e-public-tls-certs\") pod \"abb17bba-a50c-4b0f-941b-3a09a8bcac1e\" (UID: \"abb17bba-a50c-4b0f-941b-3a09a8bcac1e\") " Jan 20 17:22:04 crc kubenswrapper[4558]: I0120 17:22:04.459975 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/abb17bba-a50c-4b0f-941b-3a09a8bcac1e-internal-tls-certs\") pod \"abb17bba-a50c-4b0f-941b-3a09a8bcac1e\" (UID: \"abb17bba-a50c-4b0f-941b-3a09a8bcac1e\") " Jan 20 17:22:04 crc kubenswrapper[4558]: I0120 17:22:04.460048 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/abb17bba-a50c-4b0f-941b-3a09a8bcac1e-ovndb-tls-certs\") pod \"abb17bba-a50c-4b0f-941b-3a09a8bcac1e\" (UID: \"abb17bba-a50c-4b0f-941b-3a09a8bcac1e\") " Jan 20 17:22:04 crc kubenswrapper[4558]: I0120 17:22:04.460089 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/abb17bba-a50c-4b0f-941b-3a09a8bcac1e-httpd-config\") pod \"abb17bba-a50c-4b0f-941b-3a09a8bcac1e\" (UID: \"abb17bba-a50c-4b0f-941b-3a09a8bcac1e\") " Jan 20 17:22:04 crc kubenswrapper[4558]: I0120 17:22:04.465474 4558 scope.go:117] "RemoveContainer" containerID="617b43e1502aaece76174ffd472b995e859c9b75744441831fd4fbd240eacd18" Jan 20 17:22:04 crc kubenswrapper[4558]: I0120 17:22:04.466212 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/abb17bba-a50c-4b0f-941b-3a09a8bcac1e-kube-api-access-zrtcb" (OuterVolumeSpecName: "kube-api-access-zrtcb") pod "abb17bba-a50c-4b0f-941b-3a09a8bcac1e" (UID: "abb17bba-a50c-4b0f-941b-3a09a8bcac1e"). InnerVolumeSpecName "kube-api-access-zrtcb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:22:04 crc kubenswrapper[4558]: I0120 17:22:04.466703 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/abb17bba-a50c-4b0f-941b-3a09a8bcac1e-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "abb17bba-a50c-4b0f-941b-3a09a8bcac1e" (UID: "abb17bba-a50c-4b0f-941b-3a09a8bcac1e"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:22:04 crc kubenswrapper[4558]: E0120 17:22:04.481538 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d1ff32632fd1bfa14bc2e72839ef954e6b06e71a5542a2689718a87ddb352720" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:22:04 crc kubenswrapper[4558]: E0120 17:22:04.483777 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d1ff32632fd1bfa14bc2e72839ef954e6b06e71a5542a2689718a87ddb352720" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:22:04 crc kubenswrapper[4558]: E0120 17:22:04.488044 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d1ff32632fd1bfa14bc2e72839ef954e6b06e71a5542a2689718a87ddb352720" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:22:04 crc kubenswrapper[4558]: E0120 17:22:04.488195 4558 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack-kuttl-tests/nova-cell0-conductor-0" podUID="1ce622e1-3bed-423c-8870-1243fabacc8e" containerName="nova-cell0-conductor-conductor" Jan 20 17:22:04 crc kubenswrapper[4558]: I0120 17:22:04.506120 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/abb17bba-a50c-4b0f-941b-3a09a8bcac1e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "abb17bba-a50c-4b0f-941b-3a09a8bcac1e" (UID: "abb17bba-a50c-4b0f-941b-3a09a8bcac1e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:22:04 crc kubenswrapper[4558]: I0120 17:22:04.507788 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/abb17bba-a50c-4b0f-941b-3a09a8bcac1e-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "abb17bba-a50c-4b0f-941b-3a09a8bcac1e" (UID: "abb17bba-a50c-4b0f-941b-3a09a8bcac1e"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:22:04 crc kubenswrapper[4558]: I0120 17:22:04.512838 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/abb17bba-a50c-4b0f-941b-3a09a8bcac1e-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "abb17bba-a50c-4b0f-941b-3a09a8bcac1e" (UID: "abb17bba-a50c-4b0f-941b-3a09a8bcac1e"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:22:04 crc kubenswrapper[4558]: I0120 17:22:04.517009 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/abb17bba-a50c-4b0f-941b-3a09a8bcac1e-config" (OuterVolumeSpecName: "config") pod "abb17bba-a50c-4b0f-941b-3a09a8bcac1e" (UID: "abb17bba-a50c-4b0f-941b-3a09a8bcac1e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:22:04 crc kubenswrapper[4558]: I0120 17:22:04.526911 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/abb17bba-a50c-4b0f-941b-3a09a8bcac1e-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "abb17bba-a50c-4b0f-941b-3a09a8bcac1e" (UID: "abb17bba-a50c-4b0f-941b-3a09a8bcac1e"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:22:04 crc kubenswrapper[4558]: I0120 17:22:04.531978 4558 scope.go:117] "RemoveContainer" containerID="e739685a9bc530c37217e2f6a62d401027fed825626a553b5d0e17838ebce2ab" Jan 20 17:22:04 crc kubenswrapper[4558]: I0120 17:22:04.546194 4558 scope.go:117] "RemoveContainer" containerID="ba569f18d36a42b3a1ada97f895e8e6f12935ce1058001df0619906556fc1d82" Jan 20 17:22:04 crc kubenswrapper[4558]: I0120 17:22:04.561846 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zrtcb\" (UniqueName: \"kubernetes.io/projected/abb17bba-a50c-4b0f-941b-3a09a8bcac1e-kube-api-access-zrtcb\") on node \"crc\" DevicePath \"\"" Jan 20 17:22:04 crc kubenswrapper[4558]: I0120 17:22:04.561877 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/abb17bba-a50c-4b0f-941b-3a09a8bcac1e-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:22:04 crc kubenswrapper[4558]: I0120 17:22:04.561891 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/abb17bba-a50c-4b0f-941b-3a09a8bcac1e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:22:04 crc kubenswrapper[4558]: I0120 17:22:04.561901 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/abb17bba-a50c-4b0f-941b-3a09a8bcac1e-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:22:04 crc kubenswrapper[4558]: I0120 17:22:04.561909 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/abb17bba-a50c-4b0f-941b-3a09a8bcac1e-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:22:04 crc kubenswrapper[4558]: I0120 17:22:04.561918 4558 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/abb17bba-a50c-4b0f-941b-3a09a8bcac1e-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:22:04 crc kubenswrapper[4558]: I0120 17:22:04.561926 4558 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/abb17bba-a50c-4b0f-941b-3a09a8bcac1e-httpd-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:22:04 crc kubenswrapper[4558]: I0120 17:22:04.562466 4558 scope.go:117] "RemoveContainer" containerID="844d6ed85e1a90ecc3e801bcad9126bb9e1b7e1969ac475bcae5bcc3fac3d05e" Jan 20 17:22:04 crc kubenswrapper[4558]: I0120 17:22:04.573345 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="413b348d-82a1-466a-8027-d2bfd6f97cb7" path="/var/lib/kubelet/pods/413b348d-82a1-466a-8027-d2bfd6f97cb7/volumes" Jan 20 17:22:04 crc kubenswrapper[4558]: I0120 17:22:04.573944 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fae781f7-d8cb-4f32-992f-43d7dac82655" path="/var/lib/kubelet/pods/fae781f7-d8cb-4f32-992f-43d7dac82655/volumes" Jan 20 17:22:04 crc kubenswrapper[4558]: I0120 17:22:04.579709 4558 scope.go:117] "RemoveContainer" containerID="14b8554fd67e8203eaf876170ae3e7775aa3b4c177bc6900890e44c6bc4a44f4" Jan 20 17:22:04 crc kubenswrapper[4558]: I0120 17:22:04.617182 4558 scope.go:117] "RemoveContainer" containerID="097f5821ec52a17e967649b5b6967edea162a947874924e6600e4d07725cae05" Jan 20 17:22:04 crc kubenswrapper[4558]: I0120 17:22:04.632725 4558 scope.go:117] "RemoveContainer" containerID="03d0eae4935912c7dedaac60a57d7749cb144a4fb7adabc826dffe289fecf3a3" Jan 20 17:22:04 crc kubenswrapper[4558]: I0120 17:22:04.649196 4558 scope.go:117] "RemoveContainer" containerID="04e1cbf4a49ae809f6cd9ad66cf664edf23a039b8a03c43ccc28c839028f90d4" Jan 20 17:22:04 crc kubenswrapper[4558]: I0120 17:22:04.663275 4558 scope.go:117] "RemoveContainer" containerID="449d04837104cda04f1b998b58315c57a3f8dd508073572dfb1cf641a8e99cdf" Jan 20 17:22:04 crc kubenswrapper[4558]: I0120 17:22:04.677776 4558 scope.go:117] "RemoveContainer" containerID="10b8b90c30337c202ffa5efc8376aef571af66cac566f514e6c26f2a3c0a8340" Jan 20 17:22:04 crc kubenswrapper[4558]: I0120 17:22:04.701783 4558 scope.go:117] "RemoveContainer" containerID="1153f574b016e23743a03a0a3af75cf83aa887f85ab8a9f604b6d0beb5416c91" Jan 20 17:22:04 crc kubenswrapper[4558]: E0120 17:22:04.704359 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1153f574b016e23743a03a0a3af75cf83aa887f85ab8a9f604b6d0beb5416c91\": container with ID starting with 1153f574b016e23743a03a0a3af75cf83aa887f85ab8a9f604b6d0beb5416c91 not found: ID does not exist" containerID="1153f574b016e23743a03a0a3af75cf83aa887f85ab8a9f604b6d0beb5416c91" Jan 20 17:22:04 crc kubenswrapper[4558]: I0120 17:22:04.704429 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1153f574b016e23743a03a0a3af75cf83aa887f85ab8a9f604b6d0beb5416c91"} err="failed to get container status \"1153f574b016e23743a03a0a3af75cf83aa887f85ab8a9f604b6d0beb5416c91\": rpc error: code = NotFound desc = could not find container \"1153f574b016e23743a03a0a3af75cf83aa887f85ab8a9f604b6d0beb5416c91\": container with ID starting with 1153f574b016e23743a03a0a3af75cf83aa887f85ab8a9f604b6d0beb5416c91 not found: ID does not exist" Jan 20 17:22:04 crc kubenswrapper[4558]: I0120 17:22:04.704474 4558 scope.go:117] "RemoveContainer" containerID="4f8e0074decabf3ae64fdf9bf0b1b9bfcd33bc833a5040505bdb4f80fb68ce7c" Jan 20 17:22:04 crc kubenswrapper[4558]: E0120 17:22:04.705029 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4f8e0074decabf3ae64fdf9bf0b1b9bfcd33bc833a5040505bdb4f80fb68ce7c\": container with ID starting with 4f8e0074decabf3ae64fdf9bf0b1b9bfcd33bc833a5040505bdb4f80fb68ce7c not found: ID does not exist" containerID="4f8e0074decabf3ae64fdf9bf0b1b9bfcd33bc833a5040505bdb4f80fb68ce7c" Jan 20 17:22:04 crc kubenswrapper[4558]: I0120 17:22:04.705063 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4f8e0074decabf3ae64fdf9bf0b1b9bfcd33bc833a5040505bdb4f80fb68ce7c"} err="failed to get container status \"4f8e0074decabf3ae64fdf9bf0b1b9bfcd33bc833a5040505bdb4f80fb68ce7c\": rpc error: code = NotFound desc = could not find container \"4f8e0074decabf3ae64fdf9bf0b1b9bfcd33bc833a5040505bdb4f80fb68ce7c\": container with ID starting with 4f8e0074decabf3ae64fdf9bf0b1b9bfcd33bc833a5040505bdb4f80fb68ce7c not found: ID does not exist" Jan 20 17:22:04 crc kubenswrapper[4558]: I0120 17:22:04.705089 4558 scope.go:117] "RemoveContainer" containerID="0625dd3b638d8b981039c7b40441b9f3eb96afe32772f7a668fc9f6739b2ce43" Jan 20 17:22:04 crc kubenswrapper[4558]: E0120 17:22:04.705534 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0625dd3b638d8b981039c7b40441b9f3eb96afe32772f7a668fc9f6739b2ce43\": container with ID starting with 0625dd3b638d8b981039c7b40441b9f3eb96afe32772f7a668fc9f6739b2ce43 not found: ID does not exist" containerID="0625dd3b638d8b981039c7b40441b9f3eb96afe32772f7a668fc9f6739b2ce43" Jan 20 17:22:04 crc kubenswrapper[4558]: I0120 17:22:04.705559 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0625dd3b638d8b981039c7b40441b9f3eb96afe32772f7a668fc9f6739b2ce43"} err="failed to get container status \"0625dd3b638d8b981039c7b40441b9f3eb96afe32772f7a668fc9f6739b2ce43\": rpc error: code = NotFound desc = could not find container \"0625dd3b638d8b981039c7b40441b9f3eb96afe32772f7a668fc9f6739b2ce43\": container with ID starting with 0625dd3b638d8b981039c7b40441b9f3eb96afe32772f7a668fc9f6739b2ce43 not found: ID does not exist" Jan 20 17:22:04 crc kubenswrapper[4558]: I0120 17:22:04.705577 4558 scope.go:117] "RemoveContainer" containerID="1a43e8cd885dce5f981539c99de1df7006b77acee5bfe4dc7aee4e81c1433375" Jan 20 17:22:04 crc kubenswrapper[4558]: E0120 17:22:04.705878 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1a43e8cd885dce5f981539c99de1df7006b77acee5bfe4dc7aee4e81c1433375\": container with ID starting with 1a43e8cd885dce5f981539c99de1df7006b77acee5bfe4dc7aee4e81c1433375 not found: ID does not exist" containerID="1a43e8cd885dce5f981539c99de1df7006b77acee5bfe4dc7aee4e81c1433375" Jan 20 17:22:04 crc kubenswrapper[4558]: I0120 17:22:04.705901 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1a43e8cd885dce5f981539c99de1df7006b77acee5bfe4dc7aee4e81c1433375"} err="failed to get container status \"1a43e8cd885dce5f981539c99de1df7006b77acee5bfe4dc7aee4e81c1433375\": rpc error: code = NotFound desc = could not find container \"1a43e8cd885dce5f981539c99de1df7006b77acee5bfe4dc7aee4e81c1433375\": container with ID starting with 1a43e8cd885dce5f981539c99de1df7006b77acee5bfe4dc7aee4e81c1433375 not found: ID does not exist" Jan 20 17:22:04 crc kubenswrapper[4558]: I0120 17:22:04.705920 4558 scope.go:117] "RemoveContainer" containerID="cbd610baf4709c4651944a2af3a0c48b9c204a1089c08055228c74c96f5c28b9" Jan 20 17:22:04 crc kubenswrapper[4558]: E0120 17:22:04.707893 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cbd610baf4709c4651944a2af3a0c48b9c204a1089c08055228c74c96f5c28b9\": container with ID starting with cbd610baf4709c4651944a2af3a0c48b9c204a1089c08055228c74c96f5c28b9 not found: ID does not exist" containerID="cbd610baf4709c4651944a2af3a0c48b9c204a1089c08055228c74c96f5c28b9" Jan 20 17:22:04 crc kubenswrapper[4558]: I0120 17:22:04.707922 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cbd610baf4709c4651944a2af3a0c48b9c204a1089c08055228c74c96f5c28b9"} err="failed to get container status \"cbd610baf4709c4651944a2af3a0c48b9c204a1089c08055228c74c96f5c28b9\": rpc error: code = NotFound desc = could not find container \"cbd610baf4709c4651944a2af3a0c48b9c204a1089c08055228c74c96f5c28b9\": container with ID starting with cbd610baf4709c4651944a2af3a0c48b9c204a1089c08055228c74c96f5c28b9 not found: ID does not exist" Jan 20 17:22:04 crc kubenswrapper[4558]: I0120 17:22:04.707942 4558 scope.go:117] "RemoveContainer" containerID="617b43e1502aaece76174ffd472b995e859c9b75744441831fd4fbd240eacd18" Jan 20 17:22:04 crc kubenswrapper[4558]: E0120 17:22:04.708265 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"617b43e1502aaece76174ffd472b995e859c9b75744441831fd4fbd240eacd18\": container with ID starting with 617b43e1502aaece76174ffd472b995e859c9b75744441831fd4fbd240eacd18 not found: ID does not exist" containerID="617b43e1502aaece76174ffd472b995e859c9b75744441831fd4fbd240eacd18" Jan 20 17:22:04 crc kubenswrapper[4558]: I0120 17:22:04.708287 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"617b43e1502aaece76174ffd472b995e859c9b75744441831fd4fbd240eacd18"} err="failed to get container status \"617b43e1502aaece76174ffd472b995e859c9b75744441831fd4fbd240eacd18\": rpc error: code = NotFound desc = could not find container \"617b43e1502aaece76174ffd472b995e859c9b75744441831fd4fbd240eacd18\": container with ID starting with 617b43e1502aaece76174ffd472b995e859c9b75744441831fd4fbd240eacd18 not found: ID does not exist" Jan 20 17:22:04 crc kubenswrapper[4558]: I0120 17:22:04.708306 4558 scope.go:117] "RemoveContainer" containerID="e739685a9bc530c37217e2f6a62d401027fed825626a553b5d0e17838ebce2ab" Jan 20 17:22:04 crc kubenswrapper[4558]: E0120 17:22:04.708781 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e739685a9bc530c37217e2f6a62d401027fed825626a553b5d0e17838ebce2ab\": container with ID starting with e739685a9bc530c37217e2f6a62d401027fed825626a553b5d0e17838ebce2ab not found: ID does not exist" containerID="e739685a9bc530c37217e2f6a62d401027fed825626a553b5d0e17838ebce2ab" Jan 20 17:22:04 crc kubenswrapper[4558]: I0120 17:22:04.708804 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e739685a9bc530c37217e2f6a62d401027fed825626a553b5d0e17838ebce2ab"} err="failed to get container status \"e739685a9bc530c37217e2f6a62d401027fed825626a553b5d0e17838ebce2ab\": rpc error: code = NotFound desc = could not find container \"e739685a9bc530c37217e2f6a62d401027fed825626a553b5d0e17838ebce2ab\": container with ID starting with e739685a9bc530c37217e2f6a62d401027fed825626a553b5d0e17838ebce2ab not found: ID does not exist" Jan 20 17:22:04 crc kubenswrapper[4558]: I0120 17:22:04.708821 4558 scope.go:117] "RemoveContainer" containerID="ba569f18d36a42b3a1ada97f895e8e6f12935ce1058001df0619906556fc1d82" Jan 20 17:22:04 crc kubenswrapper[4558]: E0120 17:22:04.709123 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ba569f18d36a42b3a1ada97f895e8e6f12935ce1058001df0619906556fc1d82\": container with ID starting with ba569f18d36a42b3a1ada97f895e8e6f12935ce1058001df0619906556fc1d82 not found: ID does not exist" containerID="ba569f18d36a42b3a1ada97f895e8e6f12935ce1058001df0619906556fc1d82" Jan 20 17:22:04 crc kubenswrapper[4558]: I0120 17:22:04.709149 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ba569f18d36a42b3a1ada97f895e8e6f12935ce1058001df0619906556fc1d82"} err="failed to get container status \"ba569f18d36a42b3a1ada97f895e8e6f12935ce1058001df0619906556fc1d82\": rpc error: code = NotFound desc = could not find container \"ba569f18d36a42b3a1ada97f895e8e6f12935ce1058001df0619906556fc1d82\": container with ID starting with ba569f18d36a42b3a1ada97f895e8e6f12935ce1058001df0619906556fc1d82 not found: ID does not exist" Jan 20 17:22:04 crc kubenswrapper[4558]: I0120 17:22:04.709191 4558 scope.go:117] "RemoveContainer" containerID="844d6ed85e1a90ecc3e801bcad9126bb9e1b7e1969ac475bcae5bcc3fac3d05e" Jan 20 17:22:04 crc kubenswrapper[4558]: E0120 17:22:04.709558 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"844d6ed85e1a90ecc3e801bcad9126bb9e1b7e1969ac475bcae5bcc3fac3d05e\": container with ID starting with 844d6ed85e1a90ecc3e801bcad9126bb9e1b7e1969ac475bcae5bcc3fac3d05e not found: ID does not exist" containerID="844d6ed85e1a90ecc3e801bcad9126bb9e1b7e1969ac475bcae5bcc3fac3d05e" Jan 20 17:22:04 crc kubenswrapper[4558]: I0120 17:22:04.709577 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"844d6ed85e1a90ecc3e801bcad9126bb9e1b7e1969ac475bcae5bcc3fac3d05e"} err="failed to get container status \"844d6ed85e1a90ecc3e801bcad9126bb9e1b7e1969ac475bcae5bcc3fac3d05e\": rpc error: code = NotFound desc = could not find container \"844d6ed85e1a90ecc3e801bcad9126bb9e1b7e1969ac475bcae5bcc3fac3d05e\": container with ID starting with 844d6ed85e1a90ecc3e801bcad9126bb9e1b7e1969ac475bcae5bcc3fac3d05e not found: ID does not exist" Jan 20 17:22:04 crc kubenswrapper[4558]: I0120 17:22:04.709591 4558 scope.go:117] "RemoveContainer" containerID="14b8554fd67e8203eaf876170ae3e7775aa3b4c177bc6900890e44c6bc4a44f4" Jan 20 17:22:04 crc kubenswrapper[4558]: E0120 17:22:04.709890 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"14b8554fd67e8203eaf876170ae3e7775aa3b4c177bc6900890e44c6bc4a44f4\": container with ID starting with 14b8554fd67e8203eaf876170ae3e7775aa3b4c177bc6900890e44c6bc4a44f4 not found: ID does not exist" containerID="14b8554fd67e8203eaf876170ae3e7775aa3b4c177bc6900890e44c6bc4a44f4" Jan 20 17:22:04 crc kubenswrapper[4558]: I0120 17:22:04.709909 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"14b8554fd67e8203eaf876170ae3e7775aa3b4c177bc6900890e44c6bc4a44f4"} err="failed to get container status \"14b8554fd67e8203eaf876170ae3e7775aa3b4c177bc6900890e44c6bc4a44f4\": rpc error: code = NotFound desc = could not find container \"14b8554fd67e8203eaf876170ae3e7775aa3b4c177bc6900890e44c6bc4a44f4\": container with ID starting with 14b8554fd67e8203eaf876170ae3e7775aa3b4c177bc6900890e44c6bc4a44f4 not found: ID does not exist" Jan 20 17:22:04 crc kubenswrapper[4558]: I0120 17:22:04.709924 4558 scope.go:117] "RemoveContainer" containerID="097f5821ec52a17e967649b5b6967edea162a947874924e6600e4d07725cae05" Jan 20 17:22:04 crc kubenswrapper[4558]: E0120 17:22:04.710229 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"097f5821ec52a17e967649b5b6967edea162a947874924e6600e4d07725cae05\": container with ID starting with 097f5821ec52a17e967649b5b6967edea162a947874924e6600e4d07725cae05 not found: ID does not exist" containerID="097f5821ec52a17e967649b5b6967edea162a947874924e6600e4d07725cae05" Jan 20 17:22:04 crc kubenswrapper[4558]: I0120 17:22:04.710255 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"097f5821ec52a17e967649b5b6967edea162a947874924e6600e4d07725cae05"} err="failed to get container status \"097f5821ec52a17e967649b5b6967edea162a947874924e6600e4d07725cae05\": rpc error: code = NotFound desc = could not find container \"097f5821ec52a17e967649b5b6967edea162a947874924e6600e4d07725cae05\": container with ID starting with 097f5821ec52a17e967649b5b6967edea162a947874924e6600e4d07725cae05 not found: ID does not exist" Jan 20 17:22:04 crc kubenswrapper[4558]: I0120 17:22:04.710273 4558 scope.go:117] "RemoveContainer" containerID="03d0eae4935912c7dedaac60a57d7749cb144a4fb7adabc826dffe289fecf3a3" Jan 20 17:22:04 crc kubenswrapper[4558]: E0120 17:22:04.710600 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"03d0eae4935912c7dedaac60a57d7749cb144a4fb7adabc826dffe289fecf3a3\": container with ID starting with 03d0eae4935912c7dedaac60a57d7749cb144a4fb7adabc826dffe289fecf3a3 not found: ID does not exist" containerID="03d0eae4935912c7dedaac60a57d7749cb144a4fb7adabc826dffe289fecf3a3" Jan 20 17:22:04 crc kubenswrapper[4558]: I0120 17:22:04.710632 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"03d0eae4935912c7dedaac60a57d7749cb144a4fb7adabc826dffe289fecf3a3"} err="failed to get container status \"03d0eae4935912c7dedaac60a57d7749cb144a4fb7adabc826dffe289fecf3a3\": rpc error: code = NotFound desc = could not find container \"03d0eae4935912c7dedaac60a57d7749cb144a4fb7adabc826dffe289fecf3a3\": container with ID starting with 03d0eae4935912c7dedaac60a57d7749cb144a4fb7adabc826dffe289fecf3a3 not found: ID does not exist" Jan 20 17:22:04 crc kubenswrapper[4558]: I0120 17:22:04.710647 4558 scope.go:117] "RemoveContainer" containerID="04e1cbf4a49ae809f6cd9ad66cf664edf23a039b8a03c43ccc28c839028f90d4" Jan 20 17:22:04 crc kubenswrapper[4558]: E0120 17:22:04.711053 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"04e1cbf4a49ae809f6cd9ad66cf664edf23a039b8a03c43ccc28c839028f90d4\": container with ID starting with 04e1cbf4a49ae809f6cd9ad66cf664edf23a039b8a03c43ccc28c839028f90d4 not found: ID does not exist" containerID="04e1cbf4a49ae809f6cd9ad66cf664edf23a039b8a03c43ccc28c839028f90d4" Jan 20 17:22:04 crc kubenswrapper[4558]: I0120 17:22:04.711072 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"04e1cbf4a49ae809f6cd9ad66cf664edf23a039b8a03c43ccc28c839028f90d4"} err="failed to get container status \"04e1cbf4a49ae809f6cd9ad66cf664edf23a039b8a03c43ccc28c839028f90d4\": rpc error: code = NotFound desc = could not find container \"04e1cbf4a49ae809f6cd9ad66cf664edf23a039b8a03c43ccc28c839028f90d4\": container with ID starting with 04e1cbf4a49ae809f6cd9ad66cf664edf23a039b8a03c43ccc28c839028f90d4 not found: ID does not exist" Jan 20 17:22:04 crc kubenswrapper[4558]: I0120 17:22:04.711085 4558 scope.go:117] "RemoveContainer" containerID="449d04837104cda04f1b998b58315c57a3f8dd508073572dfb1cf641a8e99cdf" Jan 20 17:22:04 crc kubenswrapper[4558]: E0120 17:22:04.711519 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"449d04837104cda04f1b998b58315c57a3f8dd508073572dfb1cf641a8e99cdf\": container with ID starting with 449d04837104cda04f1b998b58315c57a3f8dd508073572dfb1cf641a8e99cdf not found: ID does not exist" containerID="449d04837104cda04f1b998b58315c57a3f8dd508073572dfb1cf641a8e99cdf" Jan 20 17:22:04 crc kubenswrapper[4558]: I0120 17:22:04.711540 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"449d04837104cda04f1b998b58315c57a3f8dd508073572dfb1cf641a8e99cdf"} err="failed to get container status \"449d04837104cda04f1b998b58315c57a3f8dd508073572dfb1cf641a8e99cdf\": rpc error: code = NotFound desc = could not find container \"449d04837104cda04f1b998b58315c57a3f8dd508073572dfb1cf641a8e99cdf\": container with ID starting with 449d04837104cda04f1b998b58315c57a3f8dd508073572dfb1cf641a8e99cdf not found: ID does not exist" Jan 20 17:22:04 crc kubenswrapper[4558]: I0120 17:22:04.711555 4558 scope.go:117] "RemoveContainer" containerID="10b8b90c30337c202ffa5efc8376aef571af66cac566f514e6c26f2a3c0a8340" Jan 20 17:22:04 crc kubenswrapper[4558]: E0120 17:22:04.711828 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"10b8b90c30337c202ffa5efc8376aef571af66cac566f514e6c26f2a3c0a8340\": container with ID starting with 10b8b90c30337c202ffa5efc8376aef571af66cac566f514e6c26f2a3c0a8340 not found: ID does not exist" containerID="10b8b90c30337c202ffa5efc8376aef571af66cac566f514e6c26f2a3c0a8340" Jan 20 17:22:04 crc kubenswrapper[4558]: I0120 17:22:04.711848 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"10b8b90c30337c202ffa5efc8376aef571af66cac566f514e6c26f2a3c0a8340"} err="failed to get container status \"10b8b90c30337c202ffa5efc8376aef571af66cac566f514e6c26f2a3c0a8340\": rpc error: code = NotFound desc = could not find container \"10b8b90c30337c202ffa5efc8376aef571af66cac566f514e6c26f2a3c0a8340\": container with ID starting with 10b8b90c30337c202ffa5efc8376aef571af66cac566f514e6c26f2a3c0a8340 not found: ID does not exist" Jan 20 17:22:04 crc kubenswrapper[4558]: E0120 17:22:04.951840 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="cd9412be70441b3487a5c8e313f30e1490c914b1b61e47d06c5f1b8c6448e338" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:22:04 crc kubenswrapper[4558]: E0120 17:22:04.953383 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="cd9412be70441b3487a5c8e313f30e1490c914b1b61e47d06c5f1b8c6448e338" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:22:04 crc kubenswrapper[4558]: E0120 17:22:04.954851 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="cd9412be70441b3487a5c8e313f30e1490c914b1b61e47d06c5f1b8c6448e338" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:22:04 crc kubenswrapper[4558]: E0120 17:22:04.954917 4558 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack-kuttl-tests/nova-cell1-conductor-0" podUID="477f15ac-57ae-4bb2-9230-2395ff3bf9ad" containerName="nova-cell1-conductor-conductor" Jan 20 17:22:05 crc kubenswrapper[4558]: I0120 17:22:05.283813 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_neutron-5c7bf5978d-d2t27_abb17bba-a50c-4b0f-941b-3a09a8bcac1e/neutron-api/0.log" Jan 20 17:22:05 crc kubenswrapper[4558]: I0120 17:22:05.284243 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-5c7bf5978d-d2t27" event={"ID":"abb17bba-a50c-4b0f-941b-3a09a8bcac1e","Type":"ContainerDied","Data":"742418c85c932e77f7c12d61027061005c5117cb0365e2a4c5c45dab03dcd4ad"} Jan 20 17:22:05 crc kubenswrapper[4558]: I0120 17:22:05.284295 4558 scope.go:117] "RemoveContainer" containerID="71a8bccb6db5181577ff2da6379fafa98f3cfa0c15a2c71a8806e5f0bc7f567a" Jan 20 17:22:05 crc kubenswrapper[4558]: I0120 17:22:05.284280 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-5c7bf5978d-d2t27" Jan 20 17:22:05 crc kubenswrapper[4558]: I0120 17:22:05.306321 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-5c7bf5978d-d2t27"] Jan 20 17:22:05 crc kubenswrapper[4558]: I0120 17:22:05.311269 4558 scope.go:117] "RemoveContainer" containerID="fb57b9facd8c8baac31528678e058b6e227a981807b81524108e8fec27a589a1" Jan 20 17:22:05 crc kubenswrapper[4558]: I0120 17:22:05.315152 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/neutron-5c7bf5978d-d2t27"] Jan 20 17:22:06 crc kubenswrapper[4558]: I0120 17:22:06.085398 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:22:06 crc kubenswrapper[4558]: I0120 17:22:06.187259 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0378e8e5-48af-48c3-9100-80622a77533b-config-data\") pod \"0378e8e5-48af-48c3-9100-80622a77533b\" (UID: \"0378e8e5-48af-48c3-9100-80622a77533b\") " Jan 20 17:22:06 crc kubenswrapper[4558]: I0120 17:22:06.187344 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/0378e8e5-48af-48c3-9100-80622a77533b-ceilometer-tls-certs\") pod \"0378e8e5-48af-48c3-9100-80622a77533b\" (UID: \"0378e8e5-48af-48c3-9100-80622a77533b\") " Jan 20 17:22:06 crc kubenswrapper[4558]: I0120 17:22:06.187418 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0378e8e5-48af-48c3-9100-80622a77533b-sg-core-conf-yaml\") pod \"0378e8e5-48af-48c3-9100-80622a77533b\" (UID: \"0378e8e5-48af-48c3-9100-80622a77533b\") " Jan 20 17:22:06 crc kubenswrapper[4558]: I0120 17:22:06.187455 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0378e8e5-48af-48c3-9100-80622a77533b-run-httpd\") pod \"0378e8e5-48af-48c3-9100-80622a77533b\" (UID: \"0378e8e5-48af-48c3-9100-80622a77533b\") " Jan 20 17:22:06 crc kubenswrapper[4558]: I0120 17:22:06.187483 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0378e8e5-48af-48c3-9100-80622a77533b-combined-ca-bundle\") pod \"0378e8e5-48af-48c3-9100-80622a77533b\" (UID: \"0378e8e5-48af-48c3-9100-80622a77533b\") " Jan 20 17:22:06 crc kubenswrapper[4558]: I0120 17:22:06.187519 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0378e8e5-48af-48c3-9100-80622a77533b-log-httpd\") pod \"0378e8e5-48af-48c3-9100-80622a77533b\" (UID: \"0378e8e5-48af-48c3-9100-80622a77533b\") " Jan 20 17:22:06 crc kubenswrapper[4558]: I0120 17:22:06.187584 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0378e8e5-48af-48c3-9100-80622a77533b-scripts\") pod \"0378e8e5-48af-48c3-9100-80622a77533b\" (UID: \"0378e8e5-48af-48c3-9100-80622a77533b\") " Jan 20 17:22:06 crc kubenswrapper[4558]: I0120 17:22:06.187649 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pppjg\" (UniqueName: \"kubernetes.io/projected/0378e8e5-48af-48c3-9100-80622a77533b-kube-api-access-pppjg\") pod \"0378e8e5-48af-48c3-9100-80622a77533b\" (UID: \"0378e8e5-48af-48c3-9100-80622a77533b\") " Jan 20 17:22:06 crc kubenswrapper[4558]: I0120 17:22:06.188079 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0378e8e5-48af-48c3-9100-80622a77533b-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "0378e8e5-48af-48c3-9100-80622a77533b" (UID: "0378e8e5-48af-48c3-9100-80622a77533b"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:22:06 crc kubenswrapper[4558]: I0120 17:22:06.188431 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0378e8e5-48af-48c3-9100-80622a77533b-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "0378e8e5-48af-48c3-9100-80622a77533b" (UID: "0378e8e5-48af-48c3-9100-80622a77533b"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:22:06 crc kubenswrapper[4558]: I0120 17:22:06.193971 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0378e8e5-48af-48c3-9100-80622a77533b-scripts" (OuterVolumeSpecName: "scripts") pod "0378e8e5-48af-48c3-9100-80622a77533b" (UID: "0378e8e5-48af-48c3-9100-80622a77533b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:22:06 crc kubenswrapper[4558]: I0120 17:22:06.194015 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0378e8e5-48af-48c3-9100-80622a77533b-kube-api-access-pppjg" (OuterVolumeSpecName: "kube-api-access-pppjg") pod "0378e8e5-48af-48c3-9100-80622a77533b" (UID: "0378e8e5-48af-48c3-9100-80622a77533b"). InnerVolumeSpecName "kube-api-access-pppjg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:22:06 crc kubenswrapper[4558]: I0120 17:22:06.207673 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0378e8e5-48af-48c3-9100-80622a77533b-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "0378e8e5-48af-48c3-9100-80622a77533b" (UID: "0378e8e5-48af-48c3-9100-80622a77533b"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:22:06 crc kubenswrapper[4558]: I0120 17:22:06.222028 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0378e8e5-48af-48c3-9100-80622a77533b-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "0378e8e5-48af-48c3-9100-80622a77533b" (UID: "0378e8e5-48af-48c3-9100-80622a77533b"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:22:06 crc kubenswrapper[4558]: I0120 17:22:06.232454 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0378e8e5-48af-48c3-9100-80622a77533b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0378e8e5-48af-48c3-9100-80622a77533b" (UID: "0378e8e5-48af-48c3-9100-80622a77533b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:22:06 crc kubenswrapper[4558]: I0120 17:22:06.245748 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0378e8e5-48af-48c3-9100-80622a77533b-config-data" (OuterVolumeSpecName: "config-data") pod "0378e8e5-48af-48c3-9100-80622a77533b" (UID: "0378e8e5-48af-48c3-9100-80622a77533b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:22:06 crc kubenswrapper[4558]: I0120 17:22:06.289905 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pppjg\" (UniqueName: \"kubernetes.io/projected/0378e8e5-48af-48c3-9100-80622a77533b-kube-api-access-pppjg\") on node \"crc\" DevicePath \"\"" Jan 20 17:22:06 crc kubenswrapper[4558]: I0120 17:22:06.289938 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0378e8e5-48af-48c3-9100-80622a77533b-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:22:06 crc kubenswrapper[4558]: I0120 17:22:06.289952 4558 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/0378e8e5-48af-48c3-9100-80622a77533b-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:22:06 crc kubenswrapper[4558]: I0120 17:22:06.289965 4558 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0378e8e5-48af-48c3-9100-80622a77533b-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 20 17:22:06 crc kubenswrapper[4558]: I0120 17:22:06.289976 4558 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0378e8e5-48af-48c3-9100-80622a77533b-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:22:06 crc kubenswrapper[4558]: I0120 17:22:06.289987 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0378e8e5-48af-48c3-9100-80622a77533b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:22:06 crc kubenswrapper[4558]: I0120 17:22:06.289997 4558 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0378e8e5-48af-48c3-9100-80622a77533b-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:22:06 crc kubenswrapper[4558]: I0120 17:22:06.290005 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0378e8e5-48af-48c3-9100-80622a77533b-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:22:06 crc kubenswrapper[4558]: I0120 17:22:06.303866 4558 generic.go:334] "Generic (PLEG): container finished" podID="0378e8e5-48af-48c3-9100-80622a77533b" containerID="253959d11bdee04e0c4129162924f6cddfeb4ed19ae0ee14fb63f2b31dd7810a" exitCode=137 Jan 20 17:22:06 crc kubenswrapper[4558]: I0120 17:22:06.303916 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"0378e8e5-48af-48c3-9100-80622a77533b","Type":"ContainerDied","Data":"253959d11bdee04e0c4129162924f6cddfeb4ed19ae0ee14fb63f2b31dd7810a"} Jan 20 17:22:06 crc kubenswrapper[4558]: I0120 17:22:06.303959 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:22:06 crc kubenswrapper[4558]: I0120 17:22:06.303979 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"0378e8e5-48af-48c3-9100-80622a77533b","Type":"ContainerDied","Data":"8242e34be97c8c6bad622b1f540b04254e03ba46c6ececd1dffb4c8d13ea36e3"} Jan 20 17:22:06 crc kubenswrapper[4558]: I0120 17:22:06.304026 4558 scope.go:117] "RemoveContainer" containerID="d207a7762509b3d4788233142ef6435d9d2416a2d5fceb734f0d9d4e3721e67e" Jan 20 17:22:06 crc kubenswrapper[4558]: I0120 17:22:06.326713 4558 scope.go:117] "RemoveContainer" containerID="75449ae33552c994c4ad5b2bca2b18138daeaa442df1920c73602222ea16d437" Jan 20 17:22:06 crc kubenswrapper[4558]: I0120 17:22:06.349334 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:22:06 crc kubenswrapper[4558]: I0120 17:22:06.353485 4558 scope.go:117] "RemoveContainer" containerID="253959d11bdee04e0c4129162924f6cddfeb4ed19ae0ee14fb63f2b31dd7810a" Jan 20 17:22:06 crc kubenswrapper[4558]: I0120 17:22:06.357737 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:22:06 crc kubenswrapper[4558]: I0120 17:22:06.371007 4558 scope.go:117] "RemoveContainer" containerID="6afebd361ef0e45cdb43a62661e5fbd5a98fd241267936b37385d0bfde77a3bd" Jan 20 17:22:06 crc kubenswrapper[4558]: I0120 17:22:06.386604 4558 scope.go:117] "RemoveContainer" containerID="d207a7762509b3d4788233142ef6435d9d2416a2d5fceb734f0d9d4e3721e67e" Jan 20 17:22:06 crc kubenswrapper[4558]: E0120 17:22:06.387073 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d207a7762509b3d4788233142ef6435d9d2416a2d5fceb734f0d9d4e3721e67e\": container with ID starting with d207a7762509b3d4788233142ef6435d9d2416a2d5fceb734f0d9d4e3721e67e not found: ID does not exist" containerID="d207a7762509b3d4788233142ef6435d9d2416a2d5fceb734f0d9d4e3721e67e" Jan 20 17:22:06 crc kubenswrapper[4558]: I0120 17:22:06.387148 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d207a7762509b3d4788233142ef6435d9d2416a2d5fceb734f0d9d4e3721e67e"} err="failed to get container status \"d207a7762509b3d4788233142ef6435d9d2416a2d5fceb734f0d9d4e3721e67e\": rpc error: code = NotFound desc = could not find container \"d207a7762509b3d4788233142ef6435d9d2416a2d5fceb734f0d9d4e3721e67e\": container with ID starting with d207a7762509b3d4788233142ef6435d9d2416a2d5fceb734f0d9d4e3721e67e not found: ID does not exist" Jan 20 17:22:06 crc kubenswrapper[4558]: I0120 17:22:06.387191 4558 scope.go:117] "RemoveContainer" containerID="75449ae33552c994c4ad5b2bca2b18138daeaa442df1920c73602222ea16d437" Jan 20 17:22:06 crc kubenswrapper[4558]: E0120 17:22:06.387480 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"75449ae33552c994c4ad5b2bca2b18138daeaa442df1920c73602222ea16d437\": container with ID starting with 75449ae33552c994c4ad5b2bca2b18138daeaa442df1920c73602222ea16d437 not found: ID does not exist" containerID="75449ae33552c994c4ad5b2bca2b18138daeaa442df1920c73602222ea16d437" Jan 20 17:22:06 crc kubenswrapper[4558]: I0120 17:22:06.387513 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"75449ae33552c994c4ad5b2bca2b18138daeaa442df1920c73602222ea16d437"} err="failed to get container status \"75449ae33552c994c4ad5b2bca2b18138daeaa442df1920c73602222ea16d437\": rpc error: code = NotFound desc = could not find container \"75449ae33552c994c4ad5b2bca2b18138daeaa442df1920c73602222ea16d437\": container with ID starting with 75449ae33552c994c4ad5b2bca2b18138daeaa442df1920c73602222ea16d437 not found: ID does not exist" Jan 20 17:22:06 crc kubenswrapper[4558]: I0120 17:22:06.387539 4558 scope.go:117] "RemoveContainer" containerID="253959d11bdee04e0c4129162924f6cddfeb4ed19ae0ee14fb63f2b31dd7810a" Jan 20 17:22:06 crc kubenswrapper[4558]: E0120 17:22:06.387775 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"253959d11bdee04e0c4129162924f6cddfeb4ed19ae0ee14fb63f2b31dd7810a\": container with ID starting with 253959d11bdee04e0c4129162924f6cddfeb4ed19ae0ee14fb63f2b31dd7810a not found: ID does not exist" containerID="253959d11bdee04e0c4129162924f6cddfeb4ed19ae0ee14fb63f2b31dd7810a" Jan 20 17:22:06 crc kubenswrapper[4558]: I0120 17:22:06.387803 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"253959d11bdee04e0c4129162924f6cddfeb4ed19ae0ee14fb63f2b31dd7810a"} err="failed to get container status \"253959d11bdee04e0c4129162924f6cddfeb4ed19ae0ee14fb63f2b31dd7810a\": rpc error: code = NotFound desc = could not find container \"253959d11bdee04e0c4129162924f6cddfeb4ed19ae0ee14fb63f2b31dd7810a\": container with ID starting with 253959d11bdee04e0c4129162924f6cddfeb4ed19ae0ee14fb63f2b31dd7810a not found: ID does not exist" Jan 20 17:22:06 crc kubenswrapper[4558]: I0120 17:22:06.387820 4558 scope.go:117] "RemoveContainer" containerID="6afebd361ef0e45cdb43a62661e5fbd5a98fd241267936b37385d0bfde77a3bd" Jan 20 17:22:06 crc kubenswrapper[4558]: E0120 17:22:06.388015 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6afebd361ef0e45cdb43a62661e5fbd5a98fd241267936b37385d0bfde77a3bd\": container with ID starting with 6afebd361ef0e45cdb43a62661e5fbd5a98fd241267936b37385d0bfde77a3bd not found: ID does not exist" containerID="6afebd361ef0e45cdb43a62661e5fbd5a98fd241267936b37385d0bfde77a3bd" Jan 20 17:22:06 crc kubenswrapper[4558]: I0120 17:22:06.388036 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6afebd361ef0e45cdb43a62661e5fbd5a98fd241267936b37385d0bfde77a3bd"} err="failed to get container status \"6afebd361ef0e45cdb43a62661e5fbd5a98fd241267936b37385d0bfde77a3bd\": rpc error: code = NotFound desc = could not find container \"6afebd361ef0e45cdb43a62661e5fbd5a98fd241267936b37385d0bfde77a3bd\": container with ID starting with 6afebd361ef0e45cdb43a62661e5fbd5a98fd241267936b37385d0bfde77a3bd not found: ID does not exist" Jan 20 17:22:06 crc kubenswrapper[4558]: I0120 17:22:06.579037 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0378e8e5-48af-48c3-9100-80622a77533b" path="/var/lib/kubelet/pods/0378e8e5-48af-48c3-9100-80622a77533b/volumes" Jan 20 17:22:06 crc kubenswrapper[4558]: I0120 17:22:06.580093 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="abb17bba-a50c-4b0f-941b-3a09a8bcac1e" path="/var/lib/kubelet/pods/abb17bba-a50c-4b0f-941b-3a09a8bcac1e/volumes" Jan 20 17:22:06 crc kubenswrapper[4558]: I0120 17:22:06.674907 4558 pod_container_manager_linux.go:210] "Failed to delete cgroup paths" cgroupName=["kubepods","besteffort","pod9d974489-32f7-4541-9a58-6a215c5d2071"] err="unable to destroy cgroup paths for cgroup [kubepods besteffort pod9d974489-32f7-4541-9a58-6a215c5d2071] : Timed out while waiting for systemd to remove kubepods-besteffort-pod9d974489_32f7_4541_9a58_6a215c5d2071.slice" Jan 20 17:22:06 crc kubenswrapper[4558]: I0120 17:22:06.676435 4558 scope.go:117] "RemoveContainer" containerID="0d8f4bd8e6d346771ef889450d9cc94bd4e4a03f37ab45706759418add0250d9" Jan 20 17:22:06 crc kubenswrapper[4558]: I0120 17:22:06.699845 4558 scope.go:117] "RemoveContainer" containerID="32341c380c826a7aa830f2d1cd35459c6a706f48a875899fa1c9c906b440f63c" Jan 20 17:22:07 crc kubenswrapper[4558]: I0120 17:22:07.063936 4558 pod_container_manager_linux.go:210] "Failed to delete cgroup paths" cgroupName=["kubepods","besteffort","pod52e65b62-cbeb-475e-b3ec-2a6259c44b25"] err="unable to destroy cgroup paths for cgroup [kubepods besteffort pod52e65b62-cbeb-475e-b3ec-2a6259c44b25] : Timed out while waiting for systemd to remove kubepods-besteffort-pod52e65b62_cbeb_475e_b3ec_2a6259c44b25.slice" Jan 20 17:22:07 crc kubenswrapper[4558]: I0120 17:22:07.073378 4558 pod_container_manager_linux.go:210] "Failed to delete cgroup paths" cgroupName=["kubepods","besteffort","pod25e48519-09af-42b4-835a-485273267aca"] err="unable to destroy cgroup paths for cgroup [kubepods besteffort pod25e48519-09af-42b4-835a-485273267aca] : Timed out while waiting for systemd to remove kubepods-besteffort-pod25e48519_09af_42b4_835a_485273267aca.slice" Jan 20 17:22:07 crc kubenswrapper[4558]: E0120 17:22:07.357550 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="cef9f816792ffb8c331439791b6687f021b7a0df270a63be33a28f933f0c1c2b" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 20 17:22:07 crc kubenswrapper[4558]: E0120 17:22:07.359761 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="cef9f816792ffb8c331439791b6687f021b7a0df270a63be33a28f933f0c1c2b" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 20 17:22:07 crc kubenswrapper[4558]: E0120 17:22:07.361079 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="cef9f816792ffb8c331439791b6687f021b7a0df270a63be33a28f933f0c1c2b" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 20 17:22:07 crc kubenswrapper[4558]: E0120 17:22:07.361123 4558 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack-kuttl-tests/nova-scheduler-0" podUID="8eb09b19-2337-4fa1-8c9a-a3a45c9d8a48" containerName="nova-scheduler-scheduler" Jan 20 17:22:09 crc kubenswrapper[4558]: E0120 17:22:09.476896 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d1ff32632fd1bfa14bc2e72839ef954e6b06e71a5542a2689718a87ddb352720" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:22:09 crc kubenswrapper[4558]: E0120 17:22:09.478790 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d1ff32632fd1bfa14bc2e72839ef954e6b06e71a5542a2689718a87ddb352720" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:22:09 crc kubenswrapper[4558]: E0120 17:22:09.480717 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d1ff32632fd1bfa14bc2e72839ef954e6b06e71a5542a2689718a87ddb352720" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:22:09 crc kubenswrapper[4558]: E0120 17:22:09.480794 4558 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack-kuttl-tests/nova-cell0-conductor-0" podUID="1ce622e1-3bed-423c-8870-1243fabacc8e" containerName="nova-cell0-conductor-conductor" Jan 20 17:22:09 crc kubenswrapper[4558]: E0120 17:22:09.953842 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="cd9412be70441b3487a5c8e313f30e1490c914b1b61e47d06c5f1b8c6448e338" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:22:09 crc kubenswrapper[4558]: E0120 17:22:09.955417 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="cd9412be70441b3487a5c8e313f30e1490c914b1b61e47d06c5f1b8c6448e338" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:22:09 crc kubenswrapper[4558]: E0120 17:22:09.956609 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="cd9412be70441b3487a5c8e313f30e1490c914b1b61e47d06c5f1b8c6448e338" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:22:09 crc kubenswrapper[4558]: E0120 17:22:09.956702 4558 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack-kuttl-tests/nova-cell1-conductor-0" podUID="477f15ac-57ae-4bb2-9230-2395ff3bf9ad" containerName="nova-cell1-conductor-conductor" Jan 20 17:22:12 crc kubenswrapper[4558]: E0120 17:22:12.358467 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="cef9f816792ffb8c331439791b6687f021b7a0df270a63be33a28f933f0c1c2b" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 20 17:22:12 crc kubenswrapper[4558]: E0120 17:22:12.360441 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="cef9f816792ffb8c331439791b6687f021b7a0df270a63be33a28f933f0c1c2b" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 20 17:22:12 crc kubenswrapper[4558]: E0120 17:22:12.362455 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="cef9f816792ffb8c331439791b6687f021b7a0df270a63be33a28f933f0c1c2b" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 20 17:22:12 crc kubenswrapper[4558]: E0120 17:22:12.362541 4558 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack-kuttl-tests/nova-scheduler-0" podUID="8eb09b19-2337-4fa1-8c9a-a3a45c9d8a48" containerName="nova-scheduler-scheduler" Jan 20 17:22:12 crc kubenswrapper[4558]: I0120 17:22:12.916623 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:22:12 crc kubenswrapper[4558]: I0120 17:22:12.922834 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:22:12 crc kubenswrapper[4558]: I0120 17:22:12.993121 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["crc-storage/crc-storage-crc-qdbtt"] Jan 20 17:22:12 crc kubenswrapper[4558]: I0120 17:22:12.996897 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["crc-storage/crc-storage-crc-qdbtt"] Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.102364 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["crc-storage/crc-storage-crc-4wlx8"] Jan 20 17:22:13 crc kubenswrapper[4558]: E0120 17:22:13.102747 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fae781f7-d8cb-4f32-992f-43d7dac82655" containerName="account-reaper" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.102769 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="fae781f7-d8cb-4f32-992f-43d7dac82655" containerName="account-reaper" Jan 20 17:22:13 crc kubenswrapper[4558]: E0120 17:22:13.102786 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4eb93277-301f-486e-a1e9-f323f9bd9cb4" containerName="mariadb-account-create-update" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.102794 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="4eb93277-301f-486e-a1e9-f323f9bd9cb4" containerName="mariadb-account-create-update" Jan 20 17:22:13 crc kubenswrapper[4558]: E0120 17:22:13.102803 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0378e8e5-48af-48c3-9100-80622a77533b" containerName="ceilometer-notification-agent" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.102822 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="0378e8e5-48af-48c3-9100-80622a77533b" containerName="ceilometer-notification-agent" Jan 20 17:22:13 crc kubenswrapper[4558]: E0120 17:22:13.102836 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ea1fc77a-9a6a-4b9b-8d27-34f9b9ca61d9" containerName="keystone-api" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.102841 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ea1fc77a-9a6a-4b9b-8d27-34f9b9ca61d9" containerName="keystone-api" Jan 20 17:22:13 crc kubenswrapper[4558]: E0120 17:22:13.102849 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e72083f2-d324-432e-9bb0-5f44f2023489" containerName="cinder-api-log" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.102853 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="e72083f2-d324-432e-9bb0-5f44f2023489" containerName="cinder-api-log" Jan 20 17:22:13 crc kubenswrapper[4558]: E0120 17:22:13.102867 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="67faa384-2157-4ce6-a5b8-aa2c3f4c3b20" containerName="init" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.102872 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="67faa384-2157-4ce6-a5b8-aa2c3f4c3b20" containerName="init" Jan 20 17:22:13 crc kubenswrapper[4558]: E0120 17:22:13.102883 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6dae93de-b907-44f7-a94c-c691eee0af7f" containerName="setup-container" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.102890 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="6dae93de-b907-44f7-a94c-c691eee0af7f" containerName="setup-container" Jan 20 17:22:13 crc kubenswrapper[4558]: E0120 17:22:13.102898 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="344f9f31-8a81-4544-b782-5aa78dfc5cc2" containerName="setup-container" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.102903 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="344f9f31-8a81-4544-b782-5aa78dfc5cc2" containerName="setup-container" Jan 20 17:22:13 crc kubenswrapper[4558]: E0120 17:22:13.102911 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fae781f7-d8cb-4f32-992f-43d7dac82655" containerName="account-server" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.102916 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="fae781f7-d8cb-4f32-992f-43d7dac82655" containerName="account-server" Jan 20 17:22:13 crc kubenswrapper[4558]: E0120 17:22:13.102925 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8573a632-84e0-4f80-b811-5646b571c318" containerName="rabbitmq" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.102930 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="8573a632-84e0-4f80-b811-5646b571c318" containerName="rabbitmq" Jan 20 17:22:13 crc kubenswrapper[4558]: E0120 17:22:13.102940 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f1382710-0399-4564-8c83-271c645ce04e" containerName="memcached" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.102945 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="f1382710-0399-4564-8c83-271c645ce04e" containerName="memcached" Jan 20 17:22:13 crc kubenswrapper[4558]: E0120 17:22:13.102954 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="872ae103-12d2-4686-aa02-4e3375eb510a" containerName="galera" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.102959 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="872ae103-12d2-4686-aa02-4e3375eb510a" containerName="galera" Jan 20 17:22:13 crc kubenswrapper[4558]: E0120 17:22:13.102967 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4eb93277-301f-486e-a1e9-f323f9bd9cb4" containerName="mariadb-account-create-update" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.102972 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="4eb93277-301f-486e-a1e9-f323f9bd9cb4" containerName="mariadb-account-create-update" Jan 20 17:22:13 crc kubenswrapper[4558]: E0120 17:22:13.102978 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09d95674-9970-471d-8061-997e69ddda11" containerName="nova-metadata-log" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.102983 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="09d95674-9970-471d-8061-997e69ddda11" containerName="nova-metadata-log" Jan 20 17:22:13 crc kubenswrapper[4558]: E0120 17:22:13.102990 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fae781f7-d8cb-4f32-992f-43d7dac82655" containerName="object-auditor" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.102996 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="fae781f7-d8cb-4f32-992f-43d7dac82655" containerName="object-auditor" Jan 20 17:22:13 crc kubenswrapper[4558]: E0120 17:22:13.103001 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1ce622e1-3bed-423c-8870-1243fabacc8e" containerName="nova-cell0-conductor-conductor" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.103006 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="1ce622e1-3bed-423c-8870-1243fabacc8e" containerName="nova-cell0-conductor-conductor" Jan 20 17:22:13 crc kubenswrapper[4558]: E0120 17:22:13.103014 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e72083f2-d324-432e-9bb0-5f44f2023489" containerName="cinder-api" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.103019 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="e72083f2-d324-432e-9bb0-5f44f2023489" containerName="cinder-api" Jan 20 17:22:13 crc kubenswrapper[4558]: E0120 17:22:13.103025 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="88418ec6-c589-463f-b8ec-c12464810c07" containerName="openstack-network-exporter" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.103030 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="88418ec6-c589-463f-b8ec-c12464810c07" containerName="openstack-network-exporter" Jan 20 17:22:13 crc kubenswrapper[4558]: E0120 17:22:13.103039 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2f8ae24b-258e-4d32-b312-99f5015f83d6" containerName="barbican-worker-log" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.103044 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="2f8ae24b-258e-4d32-b312-99f5015f83d6" containerName="barbican-worker-log" Jan 20 17:22:13 crc kubenswrapper[4558]: E0120 17:22:13.103051 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9c52efaf-b737-47bf-9ca1-109a28e19113" containerName="rabbitmq" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.103056 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="9c52efaf-b737-47bf-9ca1-109a28e19113" containerName="rabbitmq" Jan 20 17:22:13 crc kubenswrapper[4558]: E0120 17:22:13.103066 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0378e8e5-48af-48c3-9100-80622a77533b" containerName="sg-core" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.103072 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="0378e8e5-48af-48c3-9100-80622a77533b" containerName="sg-core" Jan 20 17:22:13 crc kubenswrapper[4558]: E0120 17:22:13.103079 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="55138e51-715c-42a1-8e1d-bca65a31d55c" containerName="glance-log" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.103084 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="55138e51-715c-42a1-8e1d-bca65a31d55c" containerName="glance-log" Jan 20 17:22:13 crc kubenswrapper[4558]: E0120 17:22:13.103090 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="594280a4-cae4-4455-9838-ba4ef8d2f2c1" containerName="mysql-bootstrap" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.103095 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="594280a4-cae4-4455-9838-ba4ef8d2f2c1" containerName="mysql-bootstrap" Jan 20 17:22:13 crc kubenswrapper[4558]: E0120 17:22:13.103103 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8573a632-84e0-4f80-b811-5646b571c318" containerName="setup-container" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.103110 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="8573a632-84e0-4f80-b811-5646b571c318" containerName="setup-container" Jan 20 17:22:13 crc kubenswrapper[4558]: E0120 17:22:13.103120 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="14af4a81-8568-425c-a209-4d90a042c365" containerName="barbican-api" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.103126 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="14af4a81-8568-425c-a209-4d90a042c365" containerName="barbican-api" Jan 20 17:22:13 crc kubenswrapper[4558]: E0120 17:22:13.103133 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fae781f7-d8cb-4f32-992f-43d7dac82655" containerName="container-updater" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.103139 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="fae781f7-d8cb-4f32-992f-43d7dac82655" containerName="container-updater" Jan 20 17:22:13 crc kubenswrapper[4558]: E0120 17:22:13.103148 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fae781f7-d8cb-4f32-992f-43d7dac82655" containerName="object-server" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.103155 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="fae781f7-d8cb-4f32-992f-43d7dac82655" containerName="object-server" Jan 20 17:22:13 crc kubenswrapper[4558]: E0120 17:22:13.103180 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="24114ddb-3b30-42ac-9d61-cfeb15d58728" containerName="setup-container" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.103185 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="24114ddb-3b30-42ac-9d61-cfeb15d58728" containerName="setup-container" Jan 20 17:22:13 crc kubenswrapper[4558]: E0120 17:22:13.103193 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2c809052-d9bb-4982-8271-5b7a9a6f28f9" containerName="setup-container" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.103198 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="2c809052-d9bb-4982-8271-5b7a9a6f28f9" containerName="setup-container" Jan 20 17:22:13 crc kubenswrapper[4558]: E0120 17:22:13.103205 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fae781f7-d8cb-4f32-992f-43d7dac82655" containerName="object-replicator" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.103209 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="fae781f7-d8cb-4f32-992f-43d7dac82655" containerName="object-replicator" Jan 20 17:22:13 crc kubenswrapper[4558]: E0120 17:22:13.103217 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fae781f7-d8cb-4f32-992f-43d7dac82655" containerName="object-expirer" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.103222 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="fae781f7-d8cb-4f32-992f-43d7dac82655" containerName="object-expirer" Jan 20 17:22:13 crc kubenswrapper[4558]: E0120 17:22:13.103229 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fae781f7-d8cb-4f32-992f-43d7dac82655" containerName="swift-recon-cron" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.103234 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="fae781f7-d8cb-4f32-992f-43d7dac82655" containerName="swift-recon-cron" Jan 20 17:22:13 crc kubenswrapper[4558]: E0120 17:22:13.103241 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="20cdc3df-1626-4f56-8a92-9df64c1ed2fe" containerName="glance-log" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.103246 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="20cdc3df-1626-4f56-8a92-9df64c1ed2fe" containerName="glance-log" Jan 20 17:22:13 crc kubenswrapper[4558]: E0120 17:22:13.103253 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="533cb8e0-3be7-4eb2-b0ce-bdebf3973a0b" containerName="nova-api-log" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.103258 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="533cb8e0-3be7-4eb2-b0ce-bdebf3973a0b" containerName="nova-api-log" Jan 20 17:22:13 crc kubenswrapper[4558]: E0120 17:22:13.103268 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fae781f7-d8cb-4f32-992f-43d7dac82655" containerName="account-auditor" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.103273 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="fae781f7-d8cb-4f32-992f-43d7dac82655" containerName="account-auditor" Jan 20 17:22:13 crc kubenswrapper[4558]: E0120 17:22:13.103282 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f2c69e78-4cbd-41a8-810d-bdc4f56cabfd" containerName="placement-api" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.103287 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="f2c69e78-4cbd-41a8-810d-bdc4f56cabfd" containerName="placement-api" Jan 20 17:22:13 crc kubenswrapper[4558]: E0120 17:22:13.103298 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="413b348d-82a1-466a-8027-d2bfd6f97cb7" containerName="probe" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.103302 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="413b348d-82a1-466a-8027-d2bfd6f97cb7" containerName="probe" Jan 20 17:22:13 crc kubenswrapper[4558]: E0120 17:22:13.103308 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9c52efaf-b737-47bf-9ca1-109a28e19113" containerName="setup-container" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.103313 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="9c52efaf-b737-47bf-9ca1-109a28e19113" containerName="setup-container" Jan 20 17:22:13 crc kubenswrapper[4558]: E0120 17:22:13.103320 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fae781f7-d8cb-4f32-992f-43d7dac82655" containerName="rsync" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.103325 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="fae781f7-d8cb-4f32-992f-43d7dac82655" containerName="rsync" Jan 20 17:22:13 crc kubenswrapper[4558]: E0120 17:22:13.103331 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="abb17bba-a50c-4b0f-941b-3a09a8bcac1e" containerName="neutron-httpd" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.103335 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="abb17bba-a50c-4b0f-941b-3a09a8bcac1e" containerName="neutron-httpd" Jan 20 17:22:13 crc kubenswrapper[4558]: E0120 17:22:13.103343 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa03920f-0e4b-458f-956b-b658786f9792" containerName="galera" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.103348 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa03920f-0e4b-458f-956b-b658786f9792" containerName="galera" Jan 20 17:22:13 crc kubenswrapper[4558]: E0120 17:22:13.103353 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0378e8e5-48af-48c3-9100-80622a77533b" containerName="proxy-httpd" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.103358 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="0378e8e5-48af-48c3-9100-80622a77533b" containerName="proxy-httpd" Jan 20 17:22:13 crc kubenswrapper[4558]: E0120 17:22:13.103364 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fae781f7-d8cb-4f32-992f-43d7dac82655" containerName="container-replicator" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.103370 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="fae781f7-d8cb-4f32-992f-43d7dac82655" containerName="container-replicator" Jan 20 17:22:13 crc kubenswrapper[4558]: E0120 17:22:13.103377 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fae781f7-d8cb-4f32-992f-43d7dac82655" containerName="account-replicator" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.103382 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="fae781f7-d8cb-4f32-992f-43d7dac82655" containerName="account-replicator" Jan 20 17:22:13 crc kubenswrapper[4558]: E0120 17:22:13.103389 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="55138e51-715c-42a1-8e1d-bca65a31d55c" containerName="glance-httpd" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.103394 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="55138e51-715c-42a1-8e1d-bca65a31d55c" containerName="glance-httpd" Jan 20 17:22:13 crc kubenswrapper[4558]: E0120 17:22:13.103400 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="88418ec6-c589-463f-b8ec-c12464810c07" containerName="ovn-northd" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.103406 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="88418ec6-c589-463f-b8ec-c12464810c07" containerName="ovn-northd" Jan 20 17:22:13 crc kubenswrapper[4558]: E0120 17:22:13.103412 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fae781f7-d8cb-4f32-992f-43d7dac82655" containerName="object-updater" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.103417 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="fae781f7-d8cb-4f32-992f-43d7dac82655" containerName="object-updater" Jan 20 17:22:13 crc kubenswrapper[4558]: E0120 17:22:13.103423 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="477f15ac-57ae-4bb2-9230-2395ff3bf9ad" containerName="nova-cell1-conductor-conductor" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.103429 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="477f15ac-57ae-4bb2-9230-2395ff3bf9ad" containerName="nova-cell1-conductor-conductor" Jan 20 17:22:13 crc kubenswrapper[4558]: E0120 17:22:13.103435 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f2c69e78-4cbd-41a8-810d-bdc4f56cabfd" containerName="placement-log" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.103440 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="f2c69e78-4cbd-41a8-810d-bdc4f56cabfd" containerName="placement-log" Jan 20 17:22:13 crc kubenswrapper[4558]: E0120 17:22:13.103450 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fae781f7-d8cb-4f32-992f-43d7dac82655" containerName="container-server" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.103456 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="fae781f7-d8cb-4f32-992f-43d7dac82655" containerName="container-server" Jan 20 17:22:13 crc kubenswrapper[4558]: E0120 17:22:13.103464 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="14af4a81-8568-425c-a209-4d90a042c365" containerName="barbican-api-log" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.103469 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="14af4a81-8568-425c-a209-4d90a042c365" containerName="barbican-api-log" Jan 20 17:22:13 crc kubenswrapper[4558]: E0120 17:22:13.103476 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="67faa384-2157-4ce6-a5b8-aa2c3f4c3b20" containerName="dnsmasq-dns" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.103481 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="67faa384-2157-4ce6-a5b8-aa2c3f4c3b20" containerName="dnsmasq-dns" Jan 20 17:22:13 crc kubenswrapper[4558]: E0120 17:22:13.103492 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6dae93de-b907-44f7-a94c-c691eee0af7f" containerName="rabbitmq" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.103497 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="6dae93de-b907-44f7-a94c-c691eee0af7f" containerName="rabbitmq" Jan 20 17:22:13 crc kubenswrapper[4558]: E0120 17:22:13.103505 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="413b348d-82a1-466a-8027-d2bfd6f97cb7" containerName="cinder-scheduler" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.103510 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="413b348d-82a1-466a-8027-d2bfd6f97cb7" containerName="cinder-scheduler" Jan 20 17:22:13 crc kubenswrapper[4558]: E0120 17:22:13.103517 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09d95674-9970-471d-8061-997e69ddda11" containerName="nova-metadata-metadata" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.103524 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="09d95674-9970-471d-8061-997e69ddda11" containerName="nova-metadata-metadata" Jan 20 17:22:13 crc kubenswrapper[4558]: E0120 17:22:13.103530 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2c809052-d9bb-4982-8271-5b7a9a6f28f9" containerName="rabbitmq" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.103535 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="2c809052-d9bb-4982-8271-5b7a9a6f28f9" containerName="rabbitmq" Jan 20 17:22:13 crc kubenswrapper[4558]: E0120 17:22:13.103545 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0378e8e5-48af-48c3-9100-80622a77533b" containerName="ceilometer-central-agent" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.103550 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="0378e8e5-48af-48c3-9100-80622a77533b" containerName="ceilometer-central-agent" Jan 20 17:22:13 crc kubenswrapper[4558]: E0120 17:22:13.103555 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2f8ae24b-258e-4d32-b312-99f5015f83d6" containerName="barbican-worker" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.103560 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="2f8ae24b-258e-4d32-b312-99f5015f83d6" containerName="barbican-worker" Jan 20 17:22:13 crc kubenswrapper[4558]: E0120 17:22:13.103566 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="344f9f31-8a81-4544-b782-5aa78dfc5cc2" containerName="rabbitmq" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.103571 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="344f9f31-8a81-4544-b782-5aa78dfc5cc2" containerName="rabbitmq" Jan 20 17:22:13 crc kubenswrapper[4558]: E0120 17:22:13.103578 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a948066b-fa95-4fbb-83e6-6f26f6c76652" containerName="barbican-keystone-listener-log" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.103583 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="a948066b-fa95-4fbb-83e6-6f26f6c76652" containerName="barbican-keystone-listener-log" Jan 20 17:22:13 crc kubenswrapper[4558]: E0120 17:22:13.103589 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="533cb8e0-3be7-4eb2-b0ce-bdebf3973a0b" containerName="nova-api-api" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.103593 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="533cb8e0-3be7-4eb2-b0ce-bdebf3973a0b" containerName="nova-api-api" Jan 20 17:22:13 crc kubenswrapper[4558]: E0120 17:22:13.103603 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa03920f-0e4b-458f-956b-b658786f9792" containerName="mysql-bootstrap" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.103608 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa03920f-0e4b-458f-956b-b658786f9792" containerName="mysql-bootstrap" Jan 20 17:22:13 crc kubenswrapper[4558]: E0120 17:22:13.103614 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="24114ddb-3b30-42ac-9d61-cfeb15d58728" containerName="rabbitmq" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.103619 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="24114ddb-3b30-42ac-9d61-cfeb15d58728" containerName="rabbitmq" Jan 20 17:22:13 crc kubenswrapper[4558]: E0120 17:22:13.103626 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="abb17bba-a50c-4b0f-941b-3a09a8bcac1e" containerName="neutron-api" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.103639 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="abb17bba-a50c-4b0f-941b-3a09a8bcac1e" containerName="neutron-api" Jan 20 17:22:13 crc kubenswrapper[4558]: E0120 17:22:13.103648 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="20cdc3df-1626-4f56-8a92-9df64c1ed2fe" containerName="glance-httpd" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.103652 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="20cdc3df-1626-4f56-8a92-9df64c1ed2fe" containerName="glance-httpd" Jan 20 17:22:13 crc kubenswrapper[4558]: E0120 17:22:13.103660 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3abae449-b665-40ba-bdac-ea1e908e1952" containerName="galera" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.103665 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="3abae449-b665-40ba-bdac-ea1e908e1952" containerName="galera" Jan 20 17:22:13 crc kubenswrapper[4558]: E0120 17:22:13.103672 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="23b973c1-70c3-4d60-8b1d-89efda1a5707" containerName="galera" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.104241 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="23b973c1-70c3-4d60-8b1d-89efda1a5707" containerName="galera" Jan 20 17:22:13 crc kubenswrapper[4558]: E0120 17:22:13.104249 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3abae449-b665-40ba-bdac-ea1e908e1952" containerName="mysql-bootstrap" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.104255 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="3abae449-b665-40ba-bdac-ea1e908e1952" containerName="mysql-bootstrap" Jan 20 17:22:13 crc kubenswrapper[4558]: E0120 17:22:13.104264 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="23b973c1-70c3-4d60-8b1d-89efda1a5707" containerName="mysql-bootstrap" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.104270 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="23b973c1-70c3-4d60-8b1d-89efda1a5707" containerName="mysql-bootstrap" Jan 20 17:22:13 crc kubenswrapper[4558]: E0120 17:22:13.104275 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fae781f7-d8cb-4f32-992f-43d7dac82655" containerName="container-auditor" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.104281 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="fae781f7-d8cb-4f32-992f-43d7dac82655" containerName="container-auditor" Jan 20 17:22:13 crc kubenswrapper[4558]: E0120 17:22:13.104289 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="594280a4-cae4-4455-9838-ba4ef8d2f2c1" containerName="galera" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.104294 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="594280a4-cae4-4455-9838-ba4ef8d2f2c1" containerName="galera" Jan 20 17:22:13 crc kubenswrapper[4558]: E0120 17:22:13.104303 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a948066b-fa95-4fbb-83e6-6f26f6c76652" containerName="barbican-keystone-listener" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.104308 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="a948066b-fa95-4fbb-83e6-6f26f6c76652" containerName="barbican-keystone-listener" Jan 20 17:22:13 crc kubenswrapper[4558]: E0120 17:22:13.104316 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f07ba882-d06f-4937-ac0a-e66fcd719b8d" containerName="kube-state-metrics" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.104321 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="f07ba882-d06f-4937-ac0a-e66fcd719b8d" containerName="kube-state-metrics" Jan 20 17:22:13 crc kubenswrapper[4558]: E0120 17:22:13.104330 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="872ae103-12d2-4686-aa02-4e3375eb510a" containerName="mysql-bootstrap" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.104335 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="872ae103-12d2-4686-aa02-4e3375eb510a" containerName="mysql-bootstrap" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.104501 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="fae781f7-d8cb-4f32-992f-43d7dac82655" containerName="object-expirer" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.104515 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="09d95674-9970-471d-8061-997e69ddda11" containerName="nova-metadata-metadata" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.104522 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="abb17bba-a50c-4b0f-941b-3a09a8bcac1e" containerName="neutron-api" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.104528 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="f2c69e78-4cbd-41a8-810d-bdc4f56cabfd" containerName="placement-api" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.104539 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="fae781f7-d8cb-4f32-992f-43d7dac82655" containerName="container-auditor" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.104546 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="fae781f7-d8cb-4f32-992f-43d7dac82655" containerName="container-updater" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.104554 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="872ae103-12d2-4686-aa02-4e3375eb510a" containerName="galera" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.104562 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="55138e51-715c-42a1-8e1d-bca65a31d55c" containerName="glance-log" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.104571 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="88418ec6-c589-463f-b8ec-c12464810c07" containerName="ovn-northd" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.104577 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="88418ec6-c589-463f-b8ec-c12464810c07" containerName="openstack-network-exporter" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.104585 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="fae781f7-d8cb-4f32-992f-43d7dac82655" containerName="object-auditor" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.104590 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="9c52efaf-b737-47bf-9ca1-109a28e19113" containerName="rabbitmq" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.104599 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="2f8ae24b-258e-4d32-b312-99f5015f83d6" containerName="barbican-worker-log" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.104608 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="fae781f7-d8cb-4f32-992f-43d7dac82655" containerName="account-reaper" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.104614 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="fae781f7-d8cb-4f32-992f-43d7dac82655" containerName="swift-recon-cron" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.104619 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="20cdc3df-1626-4f56-8a92-9df64c1ed2fe" containerName="glance-log" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.104627 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="4eb93277-301f-486e-a1e9-f323f9bd9cb4" containerName="mariadb-account-create-update" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.104640 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="f2c69e78-4cbd-41a8-810d-bdc4f56cabfd" containerName="placement-log" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.104646 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="fae781f7-d8cb-4f32-992f-43d7dac82655" containerName="account-auditor" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.104654 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="e72083f2-d324-432e-9bb0-5f44f2023489" containerName="cinder-api-log" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.104661 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="533cb8e0-3be7-4eb2-b0ce-bdebf3973a0b" containerName="nova-api-log" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.104666 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="ea1fc77a-9a6a-4b9b-8d27-34f9b9ca61d9" containerName="keystone-api" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.104674 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="2f8ae24b-258e-4d32-b312-99f5015f83d6" containerName="barbican-worker" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.104681 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="344f9f31-8a81-4544-b782-5aa78dfc5cc2" containerName="rabbitmq" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.104686 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="413b348d-82a1-466a-8027-d2bfd6f97cb7" containerName="cinder-scheduler" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.104693 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="0378e8e5-48af-48c3-9100-80622a77533b" containerName="ceilometer-notification-agent" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.104701 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="477f15ac-57ae-4bb2-9230-2395ff3bf9ad" containerName="nova-cell1-conductor-conductor" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.104707 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="fae781f7-d8cb-4f32-992f-43d7dac82655" containerName="object-replicator" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.104713 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="67faa384-2157-4ce6-a5b8-aa2c3f4c3b20" containerName="dnsmasq-dns" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.104719 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="f07ba882-d06f-4937-ac0a-e66fcd719b8d" containerName="kube-state-metrics" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.104726 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="0378e8e5-48af-48c3-9100-80622a77533b" containerName="proxy-httpd" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.104732 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="fae781f7-d8cb-4f32-992f-43d7dac82655" containerName="object-updater" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.104739 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="fae781f7-d8cb-4f32-992f-43d7dac82655" containerName="account-replicator" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.104746 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="2c809052-d9bb-4982-8271-5b7a9a6f28f9" containerName="rabbitmq" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.104752 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="24114ddb-3b30-42ac-9d61-cfeb15d58728" containerName="rabbitmq" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.104759 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="14af4a81-8568-425c-a209-4d90a042c365" containerName="barbican-api-log" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.104764 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="413b348d-82a1-466a-8027-d2bfd6f97cb7" containerName="probe" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.104771 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="55138e51-715c-42a1-8e1d-bca65a31d55c" containerName="glance-httpd" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.104780 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="8573a632-84e0-4f80-b811-5646b571c318" containerName="rabbitmq" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.104787 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="20cdc3df-1626-4f56-8a92-9df64c1ed2fe" containerName="glance-httpd" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.104793 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="533cb8e0-3be7-4eb2-b0ce-bdebf3973a0b" containerName="nova-api-api" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.104802 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="abb17bba-a50c-4b0f-941b-3a09a8bcac1e" containerName="neutron-httpd" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.104808 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="fae781f7-d8cb-4f32-992f-43d7dac82655" containerName="container-replicator" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.104814 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="0378e8e5-48af-48c3-9100-80622a77533b" containerName="ceilometer-central-agent" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.104830 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="a948066b-fa95-4fbb-83e6-6f26f6c76652" containerName="barbican-keystone-listener" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.104836 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="fae781f7-d8cb-4f32-992f-43d7dac82655" containerName="object-server" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.104844 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="e72083f2-d324-432e-9bb0-5f44f2023489" containerName="cinder-api" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.104850 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="f1382710-0399-4564-8c83-271c645ce04e" containerName="memcached" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.104855 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="6dae93de-b907-44f7-a94c-c691eee0af7f" containerName="rabbitmq" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.104862 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="fa03920f-0e4b-458f-956b-b658786f9792" containerName="galera" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.104869 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="fae781f7-d8cb-4f32-992f-43d7dac82655" containerName="account-server" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.104874 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="14af4a81-8568-425c-a209-4d90a042c365" containerName="barbican-api" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.104880 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="3abae449-b665-40ba-bdac-ea1e908e1952" containerName="galera" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.104886 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="23b973c1-70c3-4d60-8b1d-89efda1a5707" containerName="galera" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.104893 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="09d95674-9970-471d-8061-997e69ddda11" containerName="nova-metadata-log" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.104901 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="4eb93277-301f-486e-a1e9-f323f9bd9cb4" containerName="mariadb-account-create-update" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.104907 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="a948066b-fa95-4fbb-83e6-6f26f6c76652" containerName="barbican-keystone-listener-log" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.104913 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="594280a4-cae4-4455-9838-ba4ef8d2f2c1" containerName="galera" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.104921 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="fae781f7-d8cb-4f32-992f-43d7dac82655" containerName="rsync" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.104929 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="fae781f7-d8cb-4f32-992f-43d7dac82655" containerName="container-server" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.104937 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="1ce622e1-3bed-423c-8870-1243fabacc8e" containerName="nova-cell0-conductor-conductor" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.104944 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="0378e8e5-48af-48c3-9100-80622a77533b" containerName="sg-core" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.105545 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-4wlx8" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.106896 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1ce622e1-3bed-423c-8870-1243fabacc8e-combined-ca-bundle\") pod \"1ce622e1-3bed-423c-8870-1243fabacc8e\" (UID: \"1ce622e1-3bed-423c-8870-1243fabacc8e\") " Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.107016 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-thw4m\" (UniqueName: \"kubernetes.io/projected/477f15ac-57ae-4bb2-9230-2395ff3bf9ad-kube-api-access-thw4m\") pod \"477f15ac-57ae-4bb2-9230-2395ff3bf9ad\" (UID: \"477f15ac-57ae-4bb2-9230-2395ff3bf9ad\") " Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.107106 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/477f15ac-57ae-4bb2-9230-2395ff3bf9ad-config-data\") pod \"477f15ac-57ae-4bb2-9230-2395ff3bf9ad\" (UID: \"477f15ac-57ae-4bb2-9230-2395ff3bf9ad\") " Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.107158 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1ce622e1-3bed-423c-8870-1243fabacc8e-config-data\") pod \"1ce622e1-3bed-423c-8870-1243fabacc8e\" (UID: \"1ce622e1-3bed-423c-8870-1243fabacc8e\") " Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.107570 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-4wlx8"] Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.108295 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dwpm2\" (UniqueName: \"kubernetes.io/projected/1ce622e1-3bed-423c-8870-1243fabacc8e-kube-api-access-dwpm2\") pod \"1ce622e1-3bed-423c-8870-1243fabacc8e\" (UID: \"1ce622e1-3bed-423c-8870-1243fabacc8e\") " Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.108366 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/477f15ac-57ae-4bb2-9230-2395ff3bf9ad-combined-ca-bundle\") pod \"477f15ac-57ae-4bb2-9230-2395ff3bf9ad\" (UID: \"477f15ac-57ae-4bb2-9230-2395ff3bf9ad\") " Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.108700 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tsrrt\" (UniqueName: \"kubernetes.io/projected/b331ca96-50cf-498f-b164-f2b06fc330b6-kube-api-access-tsrrt\") pod \"crc-storage-crc-4wlx8\" (UID: \"b331ca96-50cf-498f-b164-f2b06fc330b6\") " pod="crc-storage/crc-storage-crc-4wlx8" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.109120 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/b331ca96-50cf-498f-b164-f2b06fc330b6-crc-storage\") pod \"crc-storage-crc-4wlx8\" (UID: \"b331ca96-50cf-498f-b164-f2b06fc330b6\") " pod="crc-storage/crc-storage-crc-4wlx8" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.109203 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/b331ca96-50cf-498f-b164-f2b06fc330b6-node-mnt\") pod \"crc-storage-crc-4wlx8\" (UID: \"b331ca96-50cf-498f-b164-f2b06fc330b6\") " pod="crc-storage/crc-storage-crc-4wlx8" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.112695 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"crc-storage" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.113236 4558 reflector.go:368] Caches populated for *v1.Secret from object-"crc-storage"/"crc-storage-dockercfg-k9ht8" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.113281 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"kube-root-ca.crt" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.113406 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"openshift-service-ca.crt" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.114241 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/477f15ac-57ae-4bb2-9230-2395ff3bf9ad-kube-api-access-thw4m" (OuterVolumeSpecName: "kube-api-access-thw4m") pod "477f15ac-57ae-4bb2-9230-2395ff3bf9ad" (UID: "477f15ac-57ae-4bb2-9230-2395ff3bf9ad"). InnerVolumeSpecName "kube-api-access-thw4m". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.118341 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1ce622e1-3bed-423c-8870-1243fabacc8e-kube-api-access-dwpm2" (OuterVolumeSpecName: "kube-api-access-dwpm2") pod "1ce622e1-3bed-423c-8870-1243fabacc8e" (UID: "1ce622e1-3bed-423c-8870-1243fabacc8e"). InnerVolumeSpecName "kube-api-access-dwpm2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.128724 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1ce622e1-3bed-423c-8870-1243fabacc8e-config-data" (OuterVolumeSpecName: "config-data") pod "1ce622e1-3bed-423c-8870-1243fabacc8e" (UID: "1ce622e1-3bed-423c-8870-1243fabacc8e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.148788 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/477f15ac-57ae-4bb2-9230-2395ff3bf9ad-config-data" (OuterVolumeSpecName: "config-data") pod "477f15ac-57ae-4bb2-9230-2395ff3bf9ad" (UID: "477f15ac-57ae-4bb2-9230-2395ff3bf9ad"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.166605 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/477f15ac-57ae-4bb2-9230-2395ff3bf9ad-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "477f15ac-57ae-4bb2-9230-2395ff3bf9ad" (UID: "477f15ac-57ae-4bb2-9230-2395ff3bf9ad"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.166675 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1ce622e1-3bed-423c-8870-1243fabacc8e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1ce622e1-3bed-423c-8870-1243fabacc8e" (UID: "1ce622e1-3bed-423c-8870-1243fabacc8e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.210713 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tsrrt\" (UniqueName: \"kubernetes.io/projected/b331ca96-50cf-498f-b164-f2b06fc330b6-kube-api-access-tsrrt\") pod \"crc-storage-crc-4wlx8\" (UID: \"b331ca96-50cf-498f-b164-f2b06fc330b6\") " pod="crc-storage/crc-storage-crc-4wlx8" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.210812 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/b331ca96-50cf-498f-b164-f2b06fc330b6-crc-storage\") pod \"crc-storage-crc-4wlx8\" (UID: \"b331ca96-50cf-498f-b164-f2b06fc330b6\") " pod="crc-storage/crc-storage-crc-4wlx8" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.210846 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/b331ca96-50cf-498f-b164-f2b06fc330b6-node-mnt\") pod \"crc-storage-crc-4wlx8\" (UID: \"b331ca96-50cf-498f-b164-f2b06fc330b6\") " pod="crc-storage/crc-storage-crc-4wlx8" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.210903 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/477f15ac-57ae-4bb2-9230-2395ff3bf9ad-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.210918 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1ce622e1-3bed-423c-8870-1243fabacc8e-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.210930 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dwpm2\" (UniqueName: \"kubernetes.io/projected/1ce622e1-3bed-423c-8870-1243fabacc8e-kube-api-access-dwpm2\") on node \"crc\" DevicePath \"\"" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.210945 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/477f15ac-57ae-4bb2-9230-2395ff3bf9ad-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.210955 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1ce622e1-3bed-423c-8870-1243fabacc8e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.210964 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-thw4m\" (UniqueName: \"kubernetes.io/projected/477f15ac-57ae-4bb2-9230-2395ff3bf9ad-kube-api-access-thw4m\") on node \"crc\" DevicePath \"\"" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.211305 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/b331ca96-50cf-498f-b164-f2b06fc330b6-node-mnt\") pod \"crc-storage-crc-4wlx8\" (UID: \"b331ca96-50cf-498f-b164-f2b06fc330b6\") " pod="crc-storage/crc-storage-crc-4wlx8" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.211989 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/b331ca96-50cf-498f-b164-f2b06fc330b6-crc-storage\") pod \"crc-storage-crc-4wlx8\" (UID: \"b331ca96-50cf-498f-b164-f2b06fc330b6\") " pod="crc-storage/crc-storage-crc-4wlx8" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.226472 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tsrrt\" (UniqueName: \"kubernetes.io/projected/b331ca96-50cf-498f-b164-f2b06fc330b6-kube-api-access-tsrrt\") pod \"crc-storage-crc-4wlx8\" (UID: \"b331ca96-50cf-498f-b164-f2b06fc330b6\") " pod="crc-storage/crc-storage-crc-4wlx8" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.379713 4558 generic.go:334] "Generic (PLEG): container finished" podID="8eb09b19-2337-4fa1-8c9a-a3a45c9d8a48" containerID="cef9f816792ffb8c331439791b6687f021b7a0df270a63be33a28f933f0c1c2b" exitCode=137 Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.379800 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"8eb09b19-2337-4fa1-8c9a-a3a45c9d8a48","Type":"ContainerDied","Data":"cef9f816792ffb8c331439791b6687f021b7a0df270a63be33a28f933f0c1c2b"} Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.381573 4558 generic.go:334] "Generic (PLEG): container finished" podID="477f15ac-57ae-4bb2-9230-2395ff3bf9ad" containerID="cd9412be70441b3487a5c8e313f30e1490c914b1b61e47d06c5f1b8c6448e338" exitCode=137 Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.381658 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-0" event={"ID":"477f15ac-57ae-4bb2-9230-2395ff3bf9ad","Type":"ContainerDied","Data":"cd9412be70441b3487a5c8e313f30e1490c914b1b61e47d06c5f1b8c6448e338"} Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.381711 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-0" event={"ID":"477f15ac-57ae-4bb2-9230-2395ff3bf9ad","Type":"ContainerDied","Data":"8348e1fd285061bbf538f64bcac88a4065f2ab9a3412701ea2f38618f348bb5d"} Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.381724 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.381741 4558 scope.go:117] "RemoveContainer" containerID="cd9412be70441b3487a5c8e313f30e1490c914b1b61e47d06c5f1b8c6448e338" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.386133 4558 generic.go:334] "Generic (PLEG): container finished" podID="1ce622e1-3bed-423c-8870-1243fabacc8e" containerID="d1ff32632fd1bfa14bc2e72839ef954e6b06e71a5542a2689718a87ddb352720" exitCode=137 Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.386209 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-0" event={"ID":"1ce622e1-3bed-423c-8870-1243fabacc8e","Type":"ContainerDied","Data":"d1ff32632fd1bfa14bc2e72839ef954e6b06e71a5542a2689718a87ddb352720"} Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.386250 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-0" event={"ID":"1ce622e1-3bed-423c-8870-1243fabacc8e","Type":"ContainerDied","Data":"bc5b80c80e88cafd8548eb929c12b47ab9d6d388c35b0a91f6d2a8159d3d92c7"} Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.386297 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.387794 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.409552 4558 scope.go:117] "RemoveContainer" containerID="cd9412be70441b3487a5c8e313f30e1490c914b1b61e47d06c5f1b8c6448e338" Jan 20 17:22:13 crc kubenswrapper[4558]: E0120 17:22:13.410040 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cd9412be70441b3487a5c8e313f30e1490c914b1b61e47d06c5f1b8c6448e338\": container with ID starting with cd9412be70441b3487a5c8e313f30e1490c914b1b61e47d06c5f1b8c6448e338 not found: ID does not exist" containerID="cd9412be70441b3487a5c8e313f30e1490c914b1b61e47d06c5f1b8c6448e338" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.410072 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cd9412be70441b3487a5c8e313f30e1490c914b1b61e47d06c5f1b8c6448e338"} err="failed to get container status \"cd9412be70441b3487a5c8e313f30e1490c914b1b61e47d06c5f1b8c6448e338\": rpc error: code = NotFound desc = could not find container \"cd9412be70441b3487a5c8e313f30e1490c914b1b61e47d06c5f1b8c6448e338\": container with ID starting with cd9412be70441b3487a5c8e313f30e1490c914b1b61e47d06c5f1b8c6448e338 not found: ID does not exist" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.410093 4558 scope.go:117] "RemoveContainer" containerID="d1ff32632fd1bfa14bc2e72839ef954e6b06e71a5542a2689718a87ddb352720" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.429949 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-4wlx8" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.434636 4558 scope.go:117] "RemoveContainer" containerID="d1ff32632fd1bfa14bc2e72839ef954e6b06e71a5542a2689718a87ddb352720" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.434935 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-0"] Jan 20 17:22:13 crc kubenswrapper[4558]: E0120 17:22:13.435177 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d1ff32632fd1bfa14bc2e72839ef954e6b06e71a5542a2689718a87ddb352720\": container with ID starting with d1ff32632fd1bfa14bc2e72839ef954e6b06e71a5542a2689718a87ddb352720 not found: ID does not exist" containerID="d1ff32632fd1bfa14bc2e72839ef954e6b06e71a5542a2689718a87ddb352720" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.435206 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d1ff32632fd1bfa14bc2e72839ef954e6b06e71a5542a2689718a87ddb352720"} err="failed to get container status \"d1ff32632fd1bfa14bc2e72839ef954e6b06e71a5542a2689718a87ddb352720\": rpc error: code = NotFound desc = could not find container \"d1ff32632fd1bfa14bc2e72839ef954e6b06e71a5542a2689718a87ddb352720\": container with ID starting with d1ff32632fd1bfa14bc2e72839ef954e6b06e71a5542a2689718a87ddb352720 not found: ID does not exist" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.441481 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-0"] Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.446757 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-0"] Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.450618 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-0"] Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.514249 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8eb09b19-2337-4fa1-8c9a-a3a45c9d8a48-combined-ca-bundle\") pod \"8eb09b19-2337-4fa1-8c9a-a3a45c9d8a48\" (UID: \"8eb09b19-2337-4fa1-8c9a-a3a45c9d8a48\") " Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.514357 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8eb09b19-2337-4fa1-8c9a-a3a45c9d8a48-config-data\") pod \"8eb09b19-2337-4fa1-8c9a-a3a45c9d8a48\" (UID: \"8eb09b19-2337-4fa1-8c9a-a3a45c9d8a48\") " Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.514474 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jcncz\" (UniqueName: \"kubernetes.io/projected/8eb09b19-2337-4fa1-8c9a-a3a45c9d8a48-kube-api-access-jcncz\") pod \"8eb09b19-2337-4fa1-8c9a-a3a45c9d8a48\" (UID: \"8eb09b19-2337-4fa1-8c9a-a3a45c9d8a48\") " Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.522607 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8eb09b19-2337-4fa1-8c9a-a3a45c9d8a48-kube-api-access-jcncz" (OuterVolumeSpecName: "kube-api-access-jcncz") pod "8eb09b19-2337-4fa1-8c9a-a3a45c9d8a48" (UID: "8eb09b19-2337-4fa1-8c9a-a3a45c9d8a48"). InnerVolumeSpecName "kube-api-access-jcncz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.531567 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8eb09b19-2337-4fa1-8c9a-a3a45c9d8a48-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8eb09b19-2337-4fa1-8c9a-a3a45c9d8a48" (UID: "8eb09b19-2337-4fa1-8c9a-a3a45c9d8a48"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.532935 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8eb09b19-2337-4fa1-8c9a-a3a45c9d8a48-config-data" (OuterVolumeSpecName: "config-data") pod "8eb09b19-2337-4fa1-8c9a-a3a45c9d8a48" (UID: "8eb09b19-2337-4fa1-8c9a-a3a45c9d8a48"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.617095 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8eb09b19-2337-4fa1-8c9a-a3a45c9d8a48-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.617545 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8eb09b19-2337-4fa1-8c9a-a3a45c9d8a48-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.617557 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jcncz\" (UniqueName: \"kubernetes.io/projected/8eb09b19-2337-4fa1-8c9a-a3a45c9d8a48-kube-api-access-jcncz\") on node \"crc\" DevicePath \"\"" Jan 20 17:22:13 crc kubenswrapper[4558]: I0120 17:22:13.827298 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-4wlx8"] Jan 20 17:22:14 crc kubenswrapper[4558]: I0120 17:22:14.398183 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"8eb09b19-2337-4fa1-8c9a-a3a45c9d8a48","Type":"ContainerDied","Data":"6889cb3ea4db5abe63bcb930d3789df4e6a069fa4d913d711b8a3640b230076d"} Jan 20 17:22:14 crc kubenswrapper[4558]: I0120 17:22:14.398258 4558 scope.go:117] "RemoveContainer" containerID="cef9f816792ffb8c331439791b6687f021b7a0df270a63be33a28f933f0c1c2b" Jan 20 17:22:14 crc kubenswrapper[4558]: I0120 17:22:14.398269 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:22:14 crc kubenswrapper[4558]: I0120 17:22:14.400124 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-4wlx8" event={"ID":"b331ca96-50cf-498f-b164-f2b06fc330b6","Type":"ContainerStarted","Data":"5a9061a8c25b029521beaa052f3df1aef4ffb9b21c3db8b1dd3da3fb605486ef"} Jan 20 17:22:14 crc kubenswrapper[4558]: I0120 17:22:14.431716 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:22:14 crc kubenswrapper[4558]: I0120 17:22:14.438734 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:22:14 crc kubenswrapper[4558]: I0120 17:22:14.566119 4558 scope.go:117] "RemoveContainer" containerID="0ffce44366a8b4466427b682fb41640425366a0f4449210959148de9aef695c6" Jan 20 17:22:14 crc kubenswrapper[4558]: E0120 17:22:14.566396 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 17:22:14 crc kubenswrapper[4558]: I0120 17:22:14.575589 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1ce622e1-3bed-423c-8870-1243fabacc8e" path="/var/lib/kubelet/pods/1ce622e1-3bed-423c-8870-1243fabacc8e/volumes" Jan 20 17:22:14 crc kubenswrapper[4558]: I0120 17:22:14.576121 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="477f15ac-57ae-4bb2-9230-2395ff3bf9ad" path="/var/lib/kubelet/pods/477f15ac-57ae-4bb2-9230-2395ff3bf9ad/volumes" Jan 20 17:22:14 crc kubenswrapper[4558]: I0120 17:22:14.576616 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8eb09b19-2337-4fa1-8c9a-a3a45c9d8a48" path="/var/lib/kubelet/pods/8eb09b19-2337-4fa1-8c9a-a3a45c9d8a48/volumes" Jan 20 17:22:14 crc kubenswrapper[4558]: I0120 17:22:14.577091 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bfb33818-4cdc-4840-ac61-d14fee47a57f" path="/var/lib/kubelet/pods/bfb33818-4cdc-4840-ac61-d14fee47a57f/volumes" Jan 20 17:22:15 crc kubenswrapper[4558]: I0120 17:22:15.413806 4558 generic.go:334] "Generic (PLEG): container finished" podID="b331ca96-50cf-498f-b164-f2b06fc330b6" containerID="5e28de7a7ee3d35b6b18ea4eac47cc3ed5820910b779402f078366abc8119fcd" exitCode=0 Jan 20 17:22:15 crc kubenswrapper[4558]: I0120 17:22:15.414048 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-4wlx8" event={"ID":"b331ca96-50cf-498f-b164-f2b06fc330b6","Type":"ContainerDied","Data":"5e28de7a7ee3d35b6b18ea4eac47cc3ed5820910b779402f078366abc8119fcd"} Jan 20 17:22:16 crc kubenswrapper[4558]: I0120 17:22:16.668039 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-4wlx8" Jan 20 17:22:16 crc kubenswrapper[4558]: I0120 17:22:16.764002 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/b331ca96-50cf-498f-b164-f2b06fc330b6-crc-storage\") pod \"b331ca96-50cf-498f-b164-f2b06fc330b6\" (UID: \"b331ca96-50cf-498f-b164-f2b06fc330b6\") " Jan 20 17:22:16 crc kubenswrapper[4558]: I0120 17:22:16.764144 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/b331ca96-50cf-498f-b164-f2b06fc330b6-node-mnt\") pod \"b331ca96-50cf-498f-b164-f2b06fc330b6\" (UID: \"b331ca96-50cf-498f-b164-f2b06fc330b6\") " Jan 20 17:22:16 crc kubenswrapper[4558]: I0120 17:22:16.764225 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b331ca96-50cf-498f-b164-f2b06fc330b6-node-mnt" (OuterVolumeSpecName: "node-mnt") pod "b331ca96-50cf-498f-b164-f2b06fc330b6" (UID: "b331ca96-50cf-498f-b164-f2b06fc330b6"). InnerVolumeSpecName "node-mnt". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 17:22:16 crc kubenswrapper[4558]: I0120 17:22:16.764246 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tsrrt\" (UniqueName: \"kubernetes.io/projected/b331ca96-50cf-498f-b164-f2b06fc330b6-kube-api-access-tsrrt\") pod \"b331ca96-50cf-498f-b164-f2b06fc330b6\" (UID: \"b331ca96-50cf-498f-b164-f2b06fc330b6\") " Jan 20 17:22:16 crc kubenswrapper[4558]: I0120 17:22:16.764669 4558 reconciler_common.go:293] "Volume detached for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/b331ca96-50cf-498f-b164-f2b06fc330b6-node-mnt\") on node \"crc\" DevicePath \"\"" Jan 20 17:22:16 crc kubenswrapper[4558]: I0120 17:22:16.769851 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b331ca96-50cf-498f-b164-f2b06fc330b6-kube-api-access-tsrrt" (OuterVolumeSpecName: "kube-api-access-tsrrt") pod "b331ca96-50cf-498f-b164-f2b06fc330b6" (UID: "b331ca96-50cf-498f-b164-f2b06fc330b6"). InnerVolumeSpecName "kube-api-access-tsrrt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:22:16 crc kubenswrapper[4558]: I0120 17:22:16.781074 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b331ca96-50cf-498f-b164-f2b06fc330b6-crc-storage" (OuterVolumeSpecName: "crc-storage") pod "b331ca96-50cf-498f-b164-f2b06fc330b6" (UID: "b331ca96-50cf-498f-b164-f2b06fc330b6"). InnerVolumeSpecName "crc-storage". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:22:16 crc kubenswrapper[4558]: I0120 17:22:16.865856 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tsrrt\" (UniqueName: \"kubernetes.io/projected/b331ca96-50cf-498f-b164-f2b06fc330b6-kube-api-access-tsrrt\") on node \"crc\" DevicePath \"\"" Jan 20 17:22:16 crc kubenswrapper[4558]: I0120 17:22:16.865885 4558 reconciler_common.go:293] "Volume detached for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/b331ca96-50cf-498f-b164-f2b06fc330b6-crc-storage\") on node \"crc\" DevicePath \"\"" Jan 20 17:22:17 crc kubenswrapper[4558]: I0120 17:22:17.437117 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-4wlx8" event={"ID":"b331ca96-50cf-498f-b164-f2b06fc330b6","Type":"ContainerDied","Data":"5a9061a8c25b029521beaa052f3df1aef4ffb9b21c3db8b1dd3da3fb605486ef"} Jan 20 17:22:17 crc kubenswrapper[4558]: I0120 17:22:17.437190 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5a9061a8c25b029521beaa052f3df1aef4ffb9b21c3db8b1dd3da3fb605486ef" Jan 20 17:22:17 crc kubenswrapper[4558]: I0120 17:22:17.437425 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-4wlx8" Jan 20 17:22:19 crc kubenswrapper[4558]: I0120 17:22:19.589514 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["crc-storage/crc-storage-crc-4wlx8"] Jan 20 17:22:19 crc kubenswrapper[4558]: I0120 17:22:19.594057 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["crc-storage/crc-storage-crc-4wlx8"] Jan 20 17:22:19 crc kubenswrapper[4558]: I0120 17:22:19.681767 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["crc-storage/crc-storage-crc-6flbs"] Jan 20 17:22:19 crc kubenswrapper[4558]: E0120 17:22:19.682049 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8eb09b19-2337-4fa1-8c9a-a3a45c9d8a48" containerName="nova-scheduler-scheduler" Jan 20 17:22:19 crc kubenswrapper[4558]: I0120 17:22:19.682071 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="8eb09b19-2337-4fa1-8c9a-a3a45c9d8a48" containerName="nova-scheduler-scheduler" Jan 20 17:22:19 crc kubenswrapper[4558]: E0120 17:22:19.682104 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b331ca96-50cf-498f-b164-f2b06fc330b6" containerName="storage" Jan 20 17:22:19 crc kubenswrapper[4558]: I0120 17:22:19.682112 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="b331ca96-50cf-498f-b164-f2b06fc330b6" containerName="storage" Jan 20 17:22:19 crc kubenswrapper[4558]: I0120 17:22:19.682262 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="8eb09b19-2337-4fa1-8c9a-a3a45c9d8a48" containerName="nova-scheduler-scheduler" Jan 20 17:22:19 crc kubenswrapper[4558]: I0120 17:22:19.682296 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="b331ca96-50cf-498f-b164-f2b06fc330b6" containerName="storage" Jan 20 17:22:19 crc kubenswrapper[4558]: I0120 17:22:19.682730 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-6flbs" Jan 20 17:22:19 crc kubenswrapper[4558]: I0120 17:22:19.684595 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"openshift-service-ca.crt" Jan 20 17:22:19 crc kubenswrapper[4558]: I0120 17:22:19.684903 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"kube-root-ca.crt" Jan 20 17:22:19 crc kubenswrapper[4558]: I0120 17:22:19.685176 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"crc-storage" Jan 20 17:22:19 crc kubenswrapper[4558]: I0120 17:22:19.685282 4558 reflector.go:368] Caches populated for *v1.Secret from object-"crc-storage"/"crc-storage-dockercfg-k9ht8" Jan 20 17:22:19 crc kubenswrapper[4558]: I0120 17:22:19.702365 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-6flbs"] Jan 20 17:22:19 crc kubenswrapper[4558]: I0120 17:22:19.813120 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gnk9s\" (UniqueName: \"kubernetes.io/projected/703ab252-bbd5-40ba-a904-6031bc2a1c1f-kube-api-access-gnk9s\") pod \"crc-storage-crc-6flbs\" (UID: \"703ab252-bbd5-40ba-a904-6031bc2a1c1f\") " pod="crc-storage/crc-storage-crc-6flbs" Jan 20 17:22:19 crc kubenswrapper[4558]: I0120 17:22:19.813214 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/703ab252-bbd5-40ba-a904-6031bc2a1c1f-crc-storage\") pod \"crc-storage-crc-6flbs\" (UID: \"703ab252-bbd5-40ba-a904-6031bc2a1c1f\") " pod="crc-storage/crc-storage-crc-6flbs" Jan 20 17:22:19 crc kubenswrapper[4558]: I0120 17:22:19.813275 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/703ab252-bbd5-40ba-a904-6031bc2a1c1f-node-mnt\") pod \"crc-storage-crc-6flbs\" (UID: \"703ab252-bbd5-40ba-a904-6031bc2a1c1f\") " pod="crc-storage/crc-storage-crc-6flbs" Jan 20 17:22:19 crc kubenswrapper[4558]: I0120 17:22:19.914549 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/703ab252-bbd5-40ba-a904-6031bc2a1c1f-crc-storage\") pod \"crc-storage-crc-6flbs\" (UID: \"703ab252-bbd5-40ba-a904-6031bc2a1c1f\") " pod="crc-storage/crc-storage-crc-6flbs" Jan 20 17:22:19 crc kubenswrapper[4558]: I0120 17:22:19.914618 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/703ab252-bbd5-40ba-a904-6031bc2a1c1f-node-mnt\") pod \"crc-storage-crc-6flbs\" (UID: \"703ab252-bbd5-40ba-a904-6031bc2a1c1f\") " pod="crc-storage/crc-storage-crc-6flbs" Jan 20 17:22:19 crc kubenswrapper[4558]: I0120 17:22:19.914707 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gnk9s\" (UniqueName: \"kubernetes.io/projected/703ab252-bbd5-40ba-a904-6031bc2a1c1f-kube-api-access-gnk9s\") pod \"crc-storage-crc-6flbs\" (UID: \"703ab252-bbd5-40ba-a904-6031bc2a1c1f\") " pod="crc-storage/crc-storage-crc-6flbs" Jan 20 17:22:19 crc kubenswrapper[4558]: I0120 17:22:19.914882 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/703ab252-bbd5-40ba-a904-6031bc2a1c1f-node-mnt\") pod \"crc-storage-crc-6flbs\" (UID: \"703ab252-bbd5-40ba-a904-6031bc2a1c1f\") " pod="crc-storage/crc-storage-crc-6flbs" Jan 20 17:22:19 crc kubenswrapper[4558]: I0120 17:22:19.915255 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/703ab252-bbd5-40ba-a904-6031bc2a1c1f-crc-storage\") pod \"crc-storage-crc-6flbs\" (UID: \"703ab252-bbd5-40ba-a904-6031bc2a1c1f\") " pod="crc-storage/crc-storage-crc-6flbs" Jan 20 17:22:19 crc kubenswrapper[4558]: I0120 17:22:19.932450 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gnk9s\" (UniqueName: \"kubernetes.io/projected/703ab252-bbd5-40ba-a904-6031bc2a1c1f-kube-api-access-gnk9s\") pod \"crc-storage-crc-6flbs\" (UID: \"703ab252-bbd5-40ba-a904-6031bc2a1c1f\") " pod="crc-storage/crc-storage-crc-6flbs" Jan 20 17:22:20 crc kubenswrapper[4558]: I0120 17:22:20.004357 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-6flbs" Jan 20 17:22:20 crc kubenswrapper[4558]: I0120 17:22:20.377308 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-6flbs"] Jan 20 17:22:20 crc kubenswrapper[4558]: I0120 17:22:20.384779 4558 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 20 17:22:20 crc kubenswrapper[4558]: I0120 17:22:20.459812 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-6flbs" event={"ID":"703ab252-bbd5-40ba-a904-6031bc2a1c1f","Type":"ContainerStarted","Data":"e8eccb112034b74c26b00f555bb941cd942f748c7afba0836fd3c6dc79b5d95a"} Jan 20 17:22:20 crc kubenswrapper[4558]: I0120 17:22:20.576296 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b331ca96-50cf-498f-b164-f2b06fc330b6" path="/var/lib/kubelet/pods/b331ca96-50cf-498f-b164-f2b06fc330b6/volumes" Jan 20 17:22:21 crc kubenswrapper[4558]: I0120 17:22:21.472617 4558 generic.go:334] "Generic (PLEG): container finished" podID="703ab252-bbd5-40ba-a904-6031bc2a1c1f" containerID="3b5dee10562c22a67048b6fc6c3b8f6768b24f57a85fa0e1abeee9c3745fe3ec" exitCode=0 Jan 20 17:22:21 crc kubenswrapper[4558]: I0120 17:22:21.472707 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-6flbs" event={"ID":"703ab252-bbd5-40ba-a904-6031bc2a1c1f","Type":"ContainerDied","Data":"3b5dee10562c22a67048b6fc6c3b8f6768b24f57a85fa0e1abeee9c3745fe3ec"} Jan 20 17:22:22 crc kubenswrapper[4558]: I0120 17:22:22.718749 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-6flbs" Jan 20 17:22:22 crc kubenswrapper[4558]: I0120 17:22:22.851410 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gnk9s\" (UniqueName: \"kubernetes.io/projected/703ab252-bbd5-40ba-a904-6031bc2a1c1f-kube-api-access-gnk9s\") pod \"703ab252-bbd5-40ba-a904-6031bc2a1c1f\" (UID: \"703ab252-bbd5-40ba-a904-6031bc2a1c1f\") " Jan 20 17:22:22 crc kubenswrapper[4558]: I0120 17:22:22.851582 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/703ab252-bbd5-40ba-a904-6031bc2a1c1f-node-mnt\") pod \"703ab252-bbd5-40ba-a904-6031bc2a1c1f\" (UID: \"703ab252-bbd5-40ba-a904-6031bc2a1c1f\") " Jan 20 17:22:22 crc kubenswrapper[4558]: I0120 17:22:22.851716 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/703ab252-bbd5-40ba-a904-6031bc2a1c1f-crc-storage\") pod \"703ab252-bbd5-40ba-a904-6031bc2a1c1f\" (UID: \"703ab252-bbd5-40ba-a904-6031bc2a1c1f\") " Jan 20 17:22:22 crc kubenswrapper[4558]: I0120 17:22:22.851735 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/703ab252-bbd5-40ba-a904-6031bc2a1c1f-node-mnt" (OuterVolumeSpecName: "node-mnt") pod "703ab252-bbd5-40ba-a904-6031bc2a1c1f" (UID: "703ab252-bbd5-40ba-a904-6031bc2a1c1f"). InnerVolumeSpecName "node-mnt". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 17:22:22 crc kubenswrapper[4558]: I0120 17:22:22.852055 4558 reconciler_common.go:293] "Volume detached for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/703ab252-bbd5-40ba-a904-6031bc2a1c1f-node-mnt\") on node \"crc\" DevicePath \"\"" Jan 20 17:22:22 crc kubenswrapper[4558]: I0120 17:22:22.856968 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/703ab252-bbd5-40ba-a904-6031bc2a1c1f-kube-api-access-gnk9s" (OuterVolumeSpecName: "kube-api-access-gnk9s") pod "703ab252-bbd5-40ba-a904-6031bc2a1c1f" (UID: "703ab252-bbd5-40ba-a904-6031bc2a1c1f"). InnerVolumeSpecName "kube-api-access-gnk9s". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:22:22 crc kubenswrapper[4558]: I0120 17:22:22.869262 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/703ab252-bbd5-40ba-a904-6031bc2a1c1f-crc-storage" (OuterVolumeSpecName: "crc-storage") pod "703ab252-bbd5-40ba-a904-6031bc2a1c1f" (UID: "703ab252-bbd5-40ba-a904-6031bc2a1c1f"). InnerVolumeSpecName "crc-storage". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:22:22 crc kubenswrapper[4558]: I0120 17:22:22.953785 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gnk9s\" (UniqueName: \"kubernetes.io/projected/703ab252-bbd5-40ba-a904-6031bc2a1c1f-kube-api-access-gnk9s\") on node \"crc\" DevicePath \"\"" Jan 20 17:22:22 crc kubenswrapper[4558]: I0120 17:22:22.953931 4558 reconciler_common.go:293] "Volume detached for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/703ab252-bbd5-40ba-a904-6031bc2a1c1f-crc-storage\") on node \"crc\" DevicePath \"\"" Jan 20 17:22:23 crc kubenswrapper[4558]: I0120 17:22:23.492109 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-6flbs" event={"ID":"703ab252-bbd5-40ba-a904-6031bc2a1c1f","Type":"ContainerDied","Data":"e8eccb112034b74c26b00f555bb941cd942f748c7afba0836fd3c6dc79b5d95a"} Jan 20 17:22:23 crc kubenswrapper[4558]: I0120 17:22:23.492160 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e8eccb112034b74c26b00f555bb941cd942f748c7afba0836fd3c6dc79b5d95a" Jan 20 17:22:23 crc kubenswrapper[4558]: I0120 17:22:23.492198 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-6flbs" Jan 20 17:22:26 crc kubenswrapper[4558]: I0120 17:22:26.568907 4558 scope.go:117] "RemoveContainer" containerID="0ffce44366a8b4466427b682fb41640425366a0f4449210959148de9aef695c6" Jan 20 17:22:26 crc kubenswrapper[4558]: E0120 17:22:26.569837 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 17:22:35 crc kubenswrapper[4558]: I0120 17:22:35.750355 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/rabbitmq-server-0"] Jan 20 17:22:35 crc kubenswrapper[4558]: E0120 17:22:35.751157 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="703ab252-bbd5-40ba-a904-6031bc2a1c1f" containerName="storage" Jan 20 17:22:35 crc kubenswrapper[4558]: I0120 17:22:35.751188 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="703ab252-bbd5-40ba-a904-6031bc2a1c1f" containerName="storage" Jan 20 17:22:35 crc kubenswrapper[4558]: I0120 17:22:35.751331 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="703ab252-bbd5-40ba-a904-6031bc2a1c1f" containerName="storage" Jan 20 17:22:35 crc kubenswrapper[4558]: I0120 17:22:35.752078 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:22:35 crc kubenswrapper[4558]: I0120 17:22:35.754459 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"rabbitmq-plugins-conf" Jan 20 17:22:35 crc kubenswrapper[4558]: I0120 17:22:35.754549 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-rabbitmq-svc" Jan 20 17:22:35 crc kubenswrapper[4558]: I0120 17:22:35.754822 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"rabbitmq-config-data" Jan 20 17:22:35 crc kubenswrapper[4558]: I0120 17:22:35.754919 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"rabbitmq-erlang-cookie" Jan 20 17:22:35 crc kubenswrapper[4558]: I0120 17:22:35.755405 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"rabbitmq-default-user" Jan 20 17:22:35 crc kubenswrapper[4558]: I0120 17:22:35.755684 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"rabbitmq-server-dockercfg-dnkqh" Jan 20 17:22:35 crc kubenswrapper[4558]: I0120 17:22:35.756358 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"rabbitmq-server-conf" Jan 20 17:22:35 crc kubenswrapper[4558]: I0120 17:22:35.765467 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/rabbitmq-server-0"] Jan 20 17:22:35 crc kubenswrapper[4558]: I0120 17:22:35.866268 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/033e5f26-b8e8-48f1-affd-3b9e7fba316a-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"033e5f26-b8e8-48f1-affd-3b9e7fba316a\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:22:35 crc kubenswrapper[4558]: I0120 17:22:35.866332 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/033e5f26-b8e8-48f1-affd-3b9e7fba316a-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"033e5f26-b8e8-48f1-affd-3b9e7fba316a\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:22:35 crc kubenswrapper[4558]: I0120 17:22:35.866381 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ts2c8\" (UniqueName: \"kubernetes.io/projected/033e5f26-b8e8-48f1-affd-3b9e7fba316a-kube-api-access-ts2c8\") pod \"rabbitmq-server-0\" (UID: \"033e5f26-b8e8-48f1-affd-3b9e7fba316a\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:22:35 crc kubenswrapper[4558]: I0120 17:22:35.866591 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/033e5f26-b8e8-48f1-affd-3b9e7fba316a-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"033e5f26-b8e8-48f1-affd-3b9e7fba316a\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:22:35 crc kubenswrapper[4558]: I0120 17:22:35.866697 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/033e5f26-b8e8-48f1-affd-3b9e7fba316a-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"033e5f26-b8e8-48f1-affd-3b9e7fba316a\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:22:35 crc kubenswrapper[4558]: I0120 17:22:35.866763 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"rabbitmq-server-0\" (UID: \"033e5f26-b8e8-48f1-affd-3b9e7fba316a\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:22:35 crc kubenswrapper[4558]: I0120 17:22:35.866845 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/033e5f26-b8e8-48f1-affd-3b9e7fba316a-config-data\") pod \"rabbitmq-server-0\" (UID: \"033e5f26-b8e8-48f1-affd-3b9e7fba316a\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:22:35 crc kubenswrapper[4558]: I0120 17:22:35.866903 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/033e5f26-b8e8-48f1-affd-3b9e7fba316a-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"033e5f26-b8e8-48f1-affd-3b9e7fba316a\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:22:35 crc kubenswrapper[4558]: I0120 17:22:35.866993 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/033e5f26-b8e8-48f1-affd-3b9e7fba316a-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"033e5f26-b8e8-48f1-affd-3b9e7fba316a\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:22:35 crc kubenswrapper[4558]: I0120 17:22:35.867055 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/033e5f26-b8e8-48f1-affd-3b9e7fba316a-pod-info\") pod \"rabbitmq-server-0\" (UID: \"033e5f26-b8e8-48f1-affd-3b9e7fba316a\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:22:35 crc kubenswrapper[4558]: I0120 17:22:35.867207 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/033e5f26-b8e8-48f1-affd-3b9e7fba316a-server-conf\") pod \"rabbitmq-server-0\" (UID: \"033e5f26-b8e8-48f1-affd-3b9e7fba316a\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:22:35 crc kubenswrapper[4558]: I0120 17:22:35.968618 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/033e5f26-b8e8-48f1-affd-3b9e7fba316a-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"033e5f26-b8e8-48f1-affd-3b9e7fba316a\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:22:35 crc kubenswrapper[4558]: I0120 17:22:35.968690 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/033e5f26-b8e8-48f1-affd-3b9e7fba316a-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"033e5f26-b8e8-48f1-affd-3b9e7fba316a\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:22:35 crc kubenswrapper[4558]: I0120 17:22:35.968742 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ts2c8\" (UniqueName: \"kubernetes.io/projected/033e5f26-b8e8-48f1-affd-3b9e7fba316a-kube-api-access-ts2c8\") pod \"rabbitmq-server-0\" (UID: \"033e5f26-b8e8-48f1-affd-3b9e7fba316a\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:22:35 crc kubenswrapper[4558]: I0120 17:22:35.968781 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/033e5f26-b8e8-48f1-affd-3b9e7fba316a-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"033e5f26-b8e8-48f1-affd-3b9e7fba316a\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:22:35 crc kubenswrapper[4558]: I0120 17:22:35.968804 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/033e5f26-b8e8-48f1-affd-3b9e7fba316a-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"033e5f26-b8e8-48f1-affd-3b9e7fba316a\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:22:35 crc kubenswrapper[4558]: I0120 17:22:35.968828 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"rabbitmq-server-0\" (UID: \"033e5f26-b8e8-48f1-affd-3b9e7fba316a\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:22:35 crc kubenswrapper[4558]: I0120 17:22:35.969121 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"rabbitmq-server-0\" (UID: \"033e5f26-b8e8-48f1-affd-3b9e7fba316a\") device mount path \"/mnt/openstack/pv07\"" pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:22:35 crc kubenswrapper[4558]: I0120 17:22:35.969147 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/033e5f26-b8e8-48f1-affd-3b9e7fba316a-config-data\") pod \"rabbitmq-server-0\" (UID: \"033e5f26-b8e8-48f1-affd-3b9e7fba316a\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:22:35 crc kubenswrapper[4558]: I0120 17:22:35.969326 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/033e5f26-b8e8-48f1-affd-3b9e7fba316a-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"033e5f26-b8e8-48f1-affd-3b9e7fba316a\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:22:35 crc kubenswrapper[4558]: I0120 17:22:35.969362 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/033e5f26-b8e8-48f1-affd-3b9e7fba316a-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"033e5f26-b8e8-48f1-affd-3b9e7fba316a\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:22:35 crc kubenswrapper[4558]: I0120 17:22:35.969394 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/033e5f26-b8e8-48f1-affd-3b9e7fba316a-pod-info\") pod \"rabbitmq-server-0\" (UID: \"033e5f26-b8e8-48f1-affd-3b9e7fba316a\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:22:35 crc kubenswrapper[4558]: I0120 17:22:35.969440 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/033e5f26-b8e8-48f1-affd-3b9e7fba316a-server-conf\") pod \"rabbitmq-server-0\" (UID: \"033e5f26-b8e8-48f1-affd-3b9e7fba316a\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:22:35 crc kubenswrapper[4558]: I0120 17:22:35.969280 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/033e5f26-b8e8-48f1-affd-3b9e7fba316a-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"033e5f26-b8e8-48f1-affd-3b9e7fba316a\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:22:35 crc kubenswrapper[4558]: I0120 17:22:35.969733 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/033e5f26-b8e8-48f1-affd-3b9e7fba316a-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"033e5f26-b8e8-48f1-affd-3b9e7fba316a\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:22:35 crc kubenswrapper[4558]: I0120 17:22:35.969254 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/033e5f26-b8e8-48f1-affd-3b9e7fba316a-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"033e5f26-b8e8-48f1-affd-3b9e7fba316a\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:22:35 crc kubenswrapper[4558]: I0120 17:22:35.970407 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/033e5f26-b8e8-48f1-affd-3b9e7fba316a-config-data\") pod \"rabbitmq-server-0\" (UID: \"033e5f26-b8e8-48f1-affd-3b9e7fba316a\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:22:35 crc kubenswrapper[4558]: I0120 17:22:35.970632 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/033e5f26-b8e8-48f1-affd-3b9e7fba316a-server-conf\") pod \"rabbitmq-server-0\" (UID: \"033e5f26-b8e8-48f1-affd-3b9e7fba316a\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:22:35 crc kubenswrapper[4558]: I0120 17:22:35.974649 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/033e5f26-b8e8-48f1-affd-3b9e7fba316a-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"033e5f26-b8e8-48f1-affd-3b9e7fba316a\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:22:35 crc kubenswrapper[4558]: I0120 17:22:35.974701 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/033e5f26-b8e8-48f1-affd-3b9e7fba316a-pod-info\") pod \"rabbitmq-server-0\" (UID: \"033e5f26-b8e8-48f1-affd-3b9e7fba316a\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:22:35 crc kubenswrapper[4558]: I0120 17:22:35.975345 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/033e5f26-b8e8-48f1-affd-3b9e7fba316a-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"033e5f26-b8e8-48f1-affd-3b9e7fba316a\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:22:35 crc kubenswrapper[4558]: I0120 17:22:35.976259 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/033e5f26-b8e8-48f1-affd-3b9e7fba316a-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"033e5f26-b8e8-48f1-affd-3b9e7fba316a\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:22:35 crc kubenswrapper[4558]: I0120 17:22:35.982328 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ts2c8\" (UniqueName: \"kubernetes.io/projected/033e5f26-b8e8-48f1-affd-3b9e7fba316a-kube-api-access-ts2c8\") pod \"rabbitmq-server-0\" (UID: \"033e5f26-b8e8-48f1-affd-3b9e7fba316a\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:22:35 crc kubenswrapper[4558]: I0120 17:22:35.984843 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"rabbitmq-server-0\" (UID: \"033e5f26-b8e8-48f1-affd-3b9e7fba316a\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:22:36 crc kubenswrapper[4558]: I0120 17:22:36.071861 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:22:36 crc kubenswrapper[4558]: I0120 17:22:36.078035 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/rabbitmq-cell1-server-0"] Jan 20 17:22:36 crc kubenswrapper[4558]: I0120 17:22:36.080382 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:22:36 crc kubenswrapper[4558]: I0120 17:22:36.084052 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"rabbitmq-cell1-default-user" Jan 20 17:22:36 crc kubenswrapper[4558]: I0120 17:22:36.084276 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"rabbitmq-cell1-server-dockercfg-zh5dn" Jan 20 17:22:36 crc kubenswrapper[4558]: I0120 17:22:36.084386 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"rabbitmq-cell1-plugins-conf" Jan 20 17:22:36 crc kubenswrapper[4558]: I0120 17:22:36.084284 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"rabbitmq-cell1-server-conf" Jan 20 17:22:36 crc kubenswrapper[4558]: I0120 17:22:36.084106 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"rabbitmq-cell1-erlang-cookie" Jan 20 17:22:36 crc kubenswrapper[4558]: I0120 17:22:36.084090 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"rabbitmq-cell1-config-data" Jan 20 17:22:36 crc kubenswrapper[4558]: I0120 17:22:36.085180 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-rabbitmq-cell1-svc" Jan 20 17:22:36 crc kubenswrapper[4558]: I0120 17:22:36.097441 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/rabbitmq-cell1-server-0"] Jan 20 17:22:36 crc kubenswrapper[4558]: I0120 17:22:36.275918 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/7a2c4722-11cc-426c-8101-496c9ee97ca2-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"7a2c4722-11cc-426c-8101-496c9ee97ca2\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:22:36 crc kubenswrapper[4558]: I0120 17:22:36.276343 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/7a2c4722-11cc-426c-8101-496c9ee97ca2-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"7a2c4722-11cc-426c-8101-496c9ee97ca2\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:22:36 crc kubenswrapper[4558]: I0120 17:22:36.276374 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zdxrq\" (UniqueName: \"kubernetes.io/projected/7a2c4722-11cc-426c-8101-496c9ee97ca2-kube-api-access-zdxrq\") pod \"rabbitmq-cell1-server-0\" (UID: \"7a2c4722-11cc-426c-8101-496c9ee97ca2\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:22:36 crc kubenswrapper[4558]: I0120 17:22:36.276409 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/7a2c4722-11cc-426c-8101-496c9ee97ca2-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"7a2c4722-11cc-426c-8101-496c9ee97ca2\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:22:36 crc kubenswrapper[4558]: I0120 17:22:36.276497 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"7a2c4722-11cc-426c-8101-496c9ee97ca2\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:22:36 crc kubenswrapper[4558]: I0120 17:22:36.276612 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7a2c4722-11cc-426c-8101-496c9ee97ca2-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"7a2c4722-11cc-426c-8101-496c9ee97ca2\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:22:36 crc kubenswrapper[4558]: I0120 17:22:36.276667 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/7a2c4722-11cc-426c-8101-496c9ee97ca2-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"7a2c4722-11cc-426c-8101-496c9ee97ca2\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:22:36 crc kubenswrapper[4558]: I0120 17:22:36.276699 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/7a2c4722-11cc-426c-8101-496c9ee97ca2-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"7a2c4722-11cc-426c-8101-496c9ee97ca2\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:22:36 crc kubenswrapper[4558]: I0120 17:22:36.276764 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/7a2c4722-11cc-426c-8101-496c9ee97ca2-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"7a2c4722-11cc-426c-8101-496c9ee97ca2\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:22:36 crc kubenswrapper[4558]: I0120 17:22:36.276845 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/7a2c4722-11cc-426c-8101-496c9ee97ca2-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"7a2c4722-11cc-426c-8101-496c9ee97ca2\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:22:36 crc kubenswrapper[4558]: I0120 17:22:36.276871 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/7a2c4722-11cc-426c-8101-496c9ee97ca2-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"7a2c4722-11cc-426c-8101-496c9ee97ca2\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:22:36 crc kubenswrapper[4558]: I0120 17:22:36.378403 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/7a2c4722-11cc-426c-8101-496c9ee97ca2-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"7a2c4722-11cc-426c-8101-496c9ee97ca2\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:22:36 crc kubenswrapper[4558]: I0120 17:22:36.378450 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/7a2c4722-11cc-426c-8101-496c9ee97ca2-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"7a2c4722-11cc-426c-8101-496c9ee97ca2\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:22:36 crc kubenswrapper[4558]: I0120 17:22:36.378497 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zdxrq\" (UniqueName: \"kubernetes.io/projected/7a2c4722-11cc-426c-8101-496c9ee97ca2-kube-api-access-zdxrq\") pod \"rabbitmq-cell1-server-0\" (UID: \"7a2c4722-11cc-426c-8101-496c9ee97ca2\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:22:36 crc kubenswrapper[4558]: I0120 17:22:36.378525 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/7a2c4722-11cc-426c-8101-496c9ee97ca2-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"7a2c4722-11cc-426c-8101-496c9ee97ca2\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:22:36 crc kubenswrapper[4558]: I0120 17:22:36.378558 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"7a2c4722-11cc-426c-8101-496c9ee97ca2\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:22:36 crc kubenswrapper[4558]: I0120 17:22:36.378612 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7a2c4722-11cc-426c-8101-496c9ee97ca2-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"7a2c4722-11cc-426c-8101-496c9ee97ca2\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:22:36 crc kubenswrapper[4558]: I0120 17:22:36.378637 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/7a2c4722-11cc-426c-8101-496c9ee97ca2-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"7a2c4722-11cc-426c-8101-496c9ee97ca2\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:22:36 crc kubenswrapper[4558]: I0120 17:22:36.378666 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/7a2c4722-11cc-426c-8101-496c9ee97ca2-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"7a2c4722-11cc-426c-8101-496c9ee97ca2\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:22:36 crc kubenswrapper[4558]: I0120 17:22:36.378686 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/7a2c4722-11cc-426c-8101-496c9ee97ca2-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"7a2c4722-11cc-426c-8101-496c9ee97ca2\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:22:36 crc kubenswrapper[4558]: I0120 17:22:36.378729 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/7a2c4722-11cc-426c-8101-496c9ee97ca2-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"7a2c4722-11cc-426c-8101-496c9ee97ca2\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:22:36 crc kubenswrapper[4558]: I0120 17:22:36.378744 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/7a2c4722-11cc-426c-8101-496c9ee97ca2-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"7a2c4722-11cc-426c-8101-496c9ee97ca2\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:22:36 crc kubenswrapper[4558]: I0120 17:22:36.379485 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"7a2c4722-11cc-426c-8101-496c9ee97ca2\") device mount path \"/mnt/openstack/pv04\"" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:22:36 crc kubenswrapper[4558]: I0120 17:22:36.379690 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/7a2c4722-11cc-426c-8101-496c9ee97ca2-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"7a2c4722-11cc-426c-8101-496c9ee97ca2\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:22:36 crc kubenswrapper[4558]: I0120 17:22:36.379800 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/7a2c4722-11cc-426c-8101-496c9ee97ca2-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"7a2c4722-11cc-426c-8101-496c9ee97ca2\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:22:36 crc kubenswrapper[4558]: I0120 17:22:36.379825 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/7a2c4722-11cc-426c-8101-496c9ee97ca2-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"7a2c4722-11cc-426c-8101-496c9ee97ca2\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:22:36 crc kubenswrapper[4558]: I0120 17:22:36.380623 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/7a2c4722-11cc-426c-8101-496c9ee97ca2-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"7a2c4722-11cc-426c-8101-496c9ee97ca2\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:22:36 crc kubenswrapper[4558]: I0120 17:22:36.381799 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7a2c4722-11cc-426c-8101-496c9ee97ca2-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"7a2c4722-11cc-426c-8101-496c9ee97ca2\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:22:36 crc kubenswrapper[4558]: I0120 17:22:36.394691 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/7a2c4722-11cc-426c-8101-496c9ee97ca2-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"7a2c4722-11cc-426c-8101-496c9ee97ca2\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:22:36 crc kubenswrapper[4558]: I0120 17:22:36.415698 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/7a2c4722-11cc-426c-8101-496c9ee97ca2-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"7a2c4722-11cc-426c-8101-496c9ee97ca2\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:22:36 crc kubenswrapper[4558]: I0120 17:22:36.415772 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zdxrq\" (UniqueName: \"kubernetes.io/projected/7a2c4722-11cc-426c-8101-496c9ee97ca2-kube-api-access-zdxrq\") pod \"rabbitmq-cell1-server-0\" (UID: \"7a2c4722-11cc-426c-8101-496c9ee97ca2\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:22:36 crc kubenswrapper[4558]: I0120 17:22:36.415916 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/7a2c4722-11cc-426c-8101-496c9ee97ca2-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"7a2c4722-11cc-426c-8101-496c9ee97ca2\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:22:36 crc kubenswrapper[4558]: I0120 17:22:36.415994 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/7a2c4722-11cc-426c-8101-496c9ee97ca2-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"7a2c4722-11cc-426c-8101-496c9ee97ca2\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:22:36 crc kubenswrapper[4558]: I0120 17:22:36.423391 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"7a2c4722-11cc-426c-8101-496c9ee97ca2\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:22:36 crc kubenswrapper[4558]: I0120 17:22:36.457662 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:22:36 crc kubenswrapper[4558]: I0120 17:22:36.506954 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/rabbitmq-server-0"] Jan 20 17:22:36 crc kubenswrapper[4558]: I0120 17:22:36.597479 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-server-0" event={"ID":"033e5f26-b8e8-48f1-affd-3b9e7fba316a","Type":"ContainerStarted","Data":"fcde411c60da965e4e82222fa7f3fa0dab22bb371ca3f9772e2fa456c513c0eb"} Jan 20 17:22:36 crc kubenswrapper[4558]: I0120 17:22:36.884442 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/rabbitmq-cell1-server-0"] Jan 20 17:22:36 crc kubenswrapper[4558]: W0120 17:22:36.885205 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7a2c4722_11cc_426c_8101_496c9ee97ca2.slice/crio-a404c6b69974a9bb907bc97d8324c893fb2b73341afff70845cd81ca99024ca5 WatchSource:0}: Error finding container a404c6b69974a9bb907bc97d8324c893fb2b73341afff70845cd81ca99024ca5: Status 404 returned error can't find the container with id a404c6b69974a9bb907bc97d8324c893fb2b73341afff70845cd81ca99024ca5 Jan 20 17:22:37 crc kubenswrapper[4558]: I0120 17:22:37.441235 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/openstack-galera-0"] Jan 20 17:22:37 crc kubenswrapper[4558]: I0120 17:22:37.442592 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:22:37 crc kubenswrapper[4558]: I0120 17:22:37.444490 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-config-data" Jan 20 17:22:37 crc kubenswrapper[4558]: I0120 17:22:37.448736 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-scripts" Jan 20 17:22:37 crc kubenswrapper[4558]: I0120 17:22:37.449275 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-galera-openstack-svc" Jan 20 17:22:37 crc kubenswrapper[4558]: I0120 17:22:37.449407 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"galera-openstack-dockercfg-82xg7" Jan 20 17:22:37 crc kubenswrapper[4558]: I0120 17:22:37.452184 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"combined-ca-bundle" Jan 20 17:22:37 crc kubenswrapper[4558]: I0120 17:22:37.455578 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/openstack-galera-0"] Jan 20 17:22:37 crc kubenswrapper[4558]: I0120 17:22:37.599195 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage17-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage17-crc\") pod \"openstack-galera-0\" (UID: \"9db3acdd-184a-4004-a8d3-451673126318\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:22:37 crc kubenswrapper[4558]: I0120 17:22:37.599327 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9db3acdd-184a-4004-a8d3-451673126318-operator-scripts\") pod \"openstack-galera-0\" (UID: \"9db3acdd-184a-4004-a8d3-451673126318\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:22:37 crc kubenswrapper[4558]: I0120 17:22:37.599368 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/9db3acdd-184a-4004-a8d3-451673126318-config-data-generated\") pod \"openstack-galera-0\" (UID: \"9db3acdd-184a-4004-a8d3-451673126318\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:22:37 crc kubenswrapper[4558]: I0120 17:22:37.599404 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9db3acdd-184a-4004-a8d3-451673126318-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"9db3acdd-184a-4004-a8d3-451673126318\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:22:37 crc kubenswrapper[4558]: I0120 17:22:37.599438 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/9db3acdd-184a-4004-a8d3-451673126318-kolla-config\") pod \"openstack-galera-0\" (UID: \"9db3acdd-184a-4004-a8d3-451673126318\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:22:37 crc kubenswrapper[4558]: I0120 17:22:37.599477 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/9db3acdd-184a-4004-a8d3-451673126318-config-data-default\") pod \"openstack-galera-0\" (UID: \"9db3acdd-184a-4004-a8d3-451673126318\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:22:37 crc kubenswrapper[4558]: I0120 17:22:37.599505 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kfj97\" (UniqueName: \"kubernetes.io/projected/9db3acdd-184a-4004-a8d3-451673126318-kube-api-access-kfj97\") pod \"openstack-galera-0\" (UID: \"9db3acdd-184a-4004-a8d3-451673126318\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:22:37 crc kubenswrapper[4558]: I0120 17:22:37.599599 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/9db3acdd-184a-4004-a8d3-451673126318-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"9db3acdd-184a-4004-a8d3-451673126318\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:22:37 crc kubenswrapper[4558]: I0120 17:22:37.605592 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" event={"ID":"7a2c4722-11cc-426c-8101-496c9ee97ca2","Type":"ContainerStarted","Data":"a404c6b69974a9bb907bc97d8324c893fb2b73341afff70845cd81ca99024ca5"} Jan 20 17:22:37 crc kubenswrapper[4558]: I0120 17:22:37.701222 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/9db3acdd-184a-4004-a8d3-451673126318-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"9db3acdd-184a-4004-a8d3-451673126318\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:22:37 crc kubenswrapper[4558]: I0120 17:22:37.701294 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage17-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage17-crc\") pod \"openstack-galera-0\" (UID: \"9db3acdd-184a-4004-a8d3-451673126318\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:22:37 crc kubenswrapper[4558]: I0120 17:22:37.701349 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9db3acdd-184a-4004-a8d3-451673126318-operator-scripts\") pod \"openstack-galera-0\" (UID: \"9db3acdd-184a-4004-a8d3-451673126318\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:22:37 crc kubenswrapper[4558]: I0120 17:22:37.701374 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/9db3acdd-184a-4004-a8d3-451673126318-config-data-generated\") pod \"openstack-galera-0\" (UID: \"9db3acdd-184a-4004-a8d3-451673126318\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:22:37 crc kubenswrapper[4558]: I0120 17:22:37.701398 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9db3acdd-184a-4004-a8d3-451673126318-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"9db3acdd-184a-4004-a8d3-451673126318\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:22:37 crc kubenswrapper[4558]: I0120 17:22:37.701418 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/9db3acdd-184a-4004-a8d3-451673126318-kolla-config\") pod \"openstack-galera-0\" (UID: \"9db3acdd-184a-4004-a8d3-451673126318\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:22:37 crc kubenswrapper[4558]: I0120 17:22:37.701443 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/9db3acdd-184a-4004-a8d3-451673126318-config-data-default\") pod \"openstack-galera-0\" (UID: \"9db3acdd-184a-4004-a8d3-451673126318\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:22:37 crc kubenswrapper[4558]: I0120 17:22:37.701463 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kfj97\" (UniqueName: \"kubernetes.io/projected/9db3acdd-184a-4004-a8d3-451673126318-kube-api-access-kfj97\") pod \"openstack-galera-0\" (UID: \"9db3acdd-184a-4004-a8d3-451673126318\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:22:37 crc kubenswrapper[4558]: I0120 17:22:37.702551 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/9db3acdd-184a-4004-a8d3-451673126318-config-data-generated\") pod \"openstack-galera-0\" (UID: \"9db3acdd-184a-4004-a8d3-451673126318\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:22:37 crc kubenswrapper[4558]: I0120 17:22:37.703093 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/9db3acdd-184a-4004-a8d3-451673126318-kolla-config\") pod \"openstack-galera-0\" (UID: \"9db3acdd-184a-4004-a8d3-451673126318\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:22:37 crc kubenswrapper[4558]: I0120 17:22:37.703380 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage17-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage17-crc\") pod \"openstack-galera-0\" (UID: \"9db3acdd-184a-4004-a8d3-451673126318\") device mount path \"/mnt/openstack/pv17\"" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:22:37 crc kubenswrapper[4558]: I0120 17:22:37.703520 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9db3acdd-184a-4004-a8d3-451673126318-operator-scripts\") pod \"openstack-galera-0\" (UID: \"9db3acdd-184a-4004-a8d3-451673126318\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:22:37 crc kubenswrapper[4558]: I0120 17:22:37.705248 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/9db3acdd-184a-4004-a8d3-451673126318-config-data-default\") pod \"openstack-galera-0\" (UID: \"9db3acdd-184a-4004-a8d3-451673126318\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:22:37 crc kubenswrapper[4558]: I0120 17:22:37.705375 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/9db3acdd-184a-4004-a8d3-451673126318-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"9db3acdd-184a-4004-a8d3-451673126318\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:22:37 crc kubenswrapper[4558]: I0120 17:22:37.705664 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9db3acdd-184a-4004-a8d3-451673126318-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"9db3acdd-184a-4004-a8d3-451673126318\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:22:37 crc kubenswrapper[4558]: I0120 17:22:37.718593 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kfj97\" (UniqueName: \"kubernetes.io/projected/9db3acdd-184a-4004-a8d3-451673126318-kube-api-access-kfj97\") pod \"openstack-galera-0\" (UID: \"9db3acdd-184a-4004-a8d3-451673126318\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:22:37 crc kubenswrapper[4558]: I0120 17:22:37.724607 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage17-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage17-crc\") pod \"openstack-galera-0\" (UID: \"9db3acdd-184a-4004-a8d3-451673126318\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:22:37 crc kubenswrapper[4558]: I0120 17:22:37.761229 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:22:38 crc kubenswrapper[4558]: I0120 17:22:38.160098 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/openstack-galera-0"] Jan 20 17:22:38 crc kubenswrapper[4558]: I0120 17:22:38.614608 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-server-0" event={"ID":"033e5f26-b8e8-48f1-affd-3b9e7fba316a","Type":"ContainerStarted","Data":"b6108373b6dc1c8073ee4649e4bf895e88d86f76fea047217ce0066f6364f215"} Jan 20 17:22:38 crc kubenswrapper[4558]: I0120 17:22:38.618099 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-galera-0" event={"ID":"9db3acdd-184a-4004-a8d3-451673126318","Type":"ContainerStarted","Data":"31bf315baedc02ab08496f1a6ef75754e50f376e1ce0ce44b48404a088ee0df2"} Jan 20 17:22:38 crc kubenswrapper[4558]: I0120 17:22:38.618140 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-galera-0" event={"ID":"9db3acdd-184a-4004-a8d3-451673126318","Type":"ContainerStarted","Data":"dab11a458a378a0ff226a6959ddd7ccd20e52fd9484d0d1c4c1489bc148993e6"} Jan 20 17:22:38 crc kubenswrapper[4558]: I0120 17:22:38.620433 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" event={"ID":"7a2c4722-11cc-426c-8101-496c9ee97ca2","Type":"ContainerStarted","Data":"844cb778093e33ece39e19f2cf2d5530f5852b1038b5c9c019b1574903b32025"} Jan 20 17:22:38 crc kubenswrapper[4558]: I0120 17:22:38.748636 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/openstack-cell1-galera-0"] Jan 20 17:22:38 crc kubenswrapper[4558]: I0120 17:22:38.750046 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:22:38 crc kubenswrapper[4558]: I0120 17:22:38.751577 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"galera-openstack-cell1-dockercfg-ct6ph" Jan 20 17:22:38 crc kubenswrapper[4558]: I0120 17:22:38.752698 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-galera-openstack-cell1-svc" Jan 20 17:22:38 crc kubenswrapper[4558]: I0120 17:22:38.753042 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-cell1-scripts" Jan 20 17:22:38 crc kubenswrapper[4558]: I0120 17:22:38.756715 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/openstack-cell1-galera-0"] Jan 20 17:22:38 crc kubenswrapper[4558]: I0120 17:22:38.761870 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-cell1-config-data" Jan 20 17:22:38 crc kubenswrapper[4558]: I0120 17:22:38.824622 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/d274ba80-2e42-4823-bbf2-02691e791ec9-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"d274ba80-2e42-4823-bbf2-02691e791ec9\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:22:38 crc kubenswrapper[4558]: I0120 17:22:38.824669 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"openstack-cell1-galera-0\" (UID: \"d274ba80-2e42-4823-bbf2-02691e791ec9\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:22:38 crc kubenswrapper[4558]: I0120 17:22:38.824730 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d274ba80-2e42-4823-bbf2-02691e791ec9-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"d274ba80-2e42-4823-bbf2-02691e791ec9\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:22:38 crc kubenswrapper[4558]: I0120 17:22:38.825117 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/d274ba80-2e42-4823-bbf2-02691e791ec9-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"d274ba80-2e42-4823-bbf2-02691e791ec9\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:22:38 crc kubenswrapper[4558]: I0120 17:22:38.825152 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/d274ba80-2e42-4823-bbf2-02691e791ec9-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"d274ba80-2e42-4823-bbf2-02691e791ec9\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:22:38 crc kubenswrapper[4558]: I0120 17:22:38.825350 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r9wdx\" (UniqueName: \"kubernetes.io/projected/d274ba80-2e42-4823-bbf2-02691e791ec9-kube-api-access-r9wdx\") pod \"openstack-cell1-galera-0\" (UID: \"d274ba80-2e42-4823-bbf2-02691e791ec9\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:22:38 crc kubenswrapper[4558]: I0120 17:22:38.825407 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/d274ba80-2e42-4823-bbf2-02691e791ec9-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"d274ba80-2e42-4823-bbf2-02691e791ec9\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:22:38 crc kubenswrapper[4558]: I0120 17:22:38.826008 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d274ba80-2e42-4823-bbf2-02691e791ec9-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"d274ba80-2e42-4823-bbf2-02691e791ec9\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:22:38 crc kubenswrapper[4558]: I0120 17:22:38.927994 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/d274ba80-2e42-4823-bbf2-02691e791ec9-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"d274ba80-2e42-4823-bbf2-02691e791ec9\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:22:38 crc kubenswrapper[4558]: I0120 17:22:38.928037 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/d274ba80-2e42-4823-bbf2-02691e791ec9-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"d274ba80-2e42-4823-bbf2-02691e791ec9\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:22:38 crc kubenswrapper[4558]: I0120 17:22:38.928085 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r9wdx\" (UniqueName: \"kubernetes.io/projected/d274ba80-2e42-4823-bbf2-02691e791ec9-kube-api-access-r9wdx\") pod \"openstack-cell1-galera-0\" (UID: \"d274ba80-2e42-4823-bbf2-02691e791ec9\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:22:38 crc kubenswrapper[4558]: I0120 17:22:38.928107 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/d274ba80-2e42-4823-bbf2-02691e791ec9-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"d274ba80-2e42-4823-bbf2-02691e791ec9\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:22:38 crc kubenswrapper[4558]: I0120 17:22:38.928181 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d274ba80-2e42-4823-bbf2-02691e791ec9-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"d274ba80-2e42-4823-bbf2-02691e791ec9\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:22:38 crc kubenswrapper[4558]: I0120 17:22:38.928258 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"openstack-cell1-galera-0\" (UID: \"d274ba80-2e42-4823-bbf2-02691e791ec9\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:22:38 crc kubenswrapper[4558]: I0120 17:22:38.928276 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/d274ba80-2e42-4823-bbf2-02691e791ec9-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"d274ba80-2e42-4823-bbf2-02691e791ec9\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:22:38 crc kubenswrapper[4558]: I0120 17:22:38.928312 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d274ba80-2e42-4823-bbf2-02691e791ec9-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"d274ba80-2e42-4823-bbf2-02691e791ec9\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:22:38 crc kubenswrapper[4558]: I0120 17:22:38.929376 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"openstack-cell1-galera-0\" (UID: \"d274ba80-2e42-4823-bbf2-02691e791ec9\") device mount path \"/mnt/openstack/pv10\"" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:22:38 crc kubenswrapper[4558]: I0120 17:22:38.929499 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/d274ba80-2e42-4823-bbf2-02691e791ec9-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"d274ba80-2e42-4823-bbf2-02691e791ec9\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:22:38 crc kubenswrapper[4558]: I0120 17:22:38.930329 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/d274ba80-2e42-4823-bbf2-02691e791ec9-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"d274ba80-2e42-4823-bbf2-02691e791ec9\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:22:38 crc kubenswrapper[4558]: I0120 17:22:38.930520 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d274ba80-2e42-4823-bbf2-02691e791ec9-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"d274ba80-2e42-4823-bbf2-02691e791ec9\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:22:38 crc kubenswrapper[4558]: I0120 17:22:38.930985 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/d274ba80-2e42-4823-bbf2-02691e791ec9-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"d274ba80-2e42-4823-bbf2-02691e791ec9\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:22:38 crc kubenswrapper[4558]: I0120 17:22:38.933766 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d274ba80-2e42-4823-bbf2-02691e791ec9-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"d274ba80-2e42-4823-bbf2-02691e791ec9\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:22:38 crc kubenswrapper[4558]: I0120 17:22:38.943570 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/d274ba80-2e42-4823-bbf2-02691e791ec9-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"d274ba80-2e42-4823-bbf2-02691e791ec9\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:22:38 crc kubenswrapper[4558]: I0120 17:22:38.943834 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r9wdx\" (UniqueName: \"kubernetes.io/projected/d274ba80-2e42-4823-bbf2-02691e791ec9-kube-api-access-r9wdx\") pod \"openstack-cell1-galera-0\" (UID: \"d274ba80-2e42-4823-bbf2-02691e791ec9\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:22:38 crc kubenswrapper[4558]: I0120 17:22:38.955120 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"openstack-cell1-galera-0\" (UID: \"d274ba80-2e42-4823-bbf2-02691e791ec9\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:22:39 crc kubenswrapper[4558]: I0120 17:22:39.026375 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/memcached-0"] Jan 20 17:22:39 crc kubenswrapper[4558]: I0120 17:22:39.027375 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/memcached-0" Jan 20 17:22:39 crc kubenswrapper[4558]: I0120 17:22:39.029029 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"memcached-memcached-dockercfg-pp5ft" Jan 20 17:22:39 crc kubenswrapper[4558]: I0120 17:22:39.029326 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-memcached-svc" Jan 20 17:22:39 crc kubenswrapper[4558]: I0120 17:22:39.032687 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"memcached-config-data" Jan 20 17:22:39 crc kubenswrapper[4558]: I0120 17:22:39.040953 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/memcached-0"] Jan 20 17:22:39 crc kubenswrapper[4558]: I0120 17:22:39.065175 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:22:39 crc kubenswrapper[4558]: I0120 17:22:39.131767 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zk5cx\" (UniqueName: \"kubernetes.io/projected/987d6a20-4fc9-411e-ae1f-9ff5dd78e80d-kube-api-access-zk5cx\") pod \"memcached-0\" (UID: \"987d6a20-4fc9-411e-ae1f-9ff5dd78e80d\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:22:39 crc kubenswrapper[4558]: I0120 17:22:39.131832 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/987d6a20-4fc9-411e-ae1f-9ff5dd78e80d-kolla-config\") pod \"memcached-0\" (UID: \"987d6a20-4fc9-411e-ae1f-9ff5dd78e80d\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:22:39 crc kubenswrapper[4558]: I0120 17:22:39.131868 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/987d6a20-4fc9-411e-ae1f-9ff5dd78e80d-combined-ca-bundle\") pod \"memcached-0\" (UID: \"987d6a20-4fc9-411e-ae1f-9ff5dd78e80d\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:22:39 crc kubenswrapper[4558]: I0120 17:22:39.132213 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/987d6a20-4fc9-411e-ae1f-9ff5dd78e80d-memcached-tls-certs\") pod \"memcached-0\" (UID: \"987d6a20-4fc9-411e-ae1f-9ff5dd78e80d\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:22:39 crc kubenswrapper[4558]: I0120 17:22:39.132276 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/987d6a20-4fc9-411e-ae1f-9ff5dd78e80d-config-data\") pod \"memcached-0\" (UID: \"987d6a20-4fc9-411e-ae1f-9ff5dd78e80d\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:22:39 crc kubenswrapper[4558]: I0120 17:22:39.234716 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/987d6a20-4fc9-411e-ae1f-9ff5dd78e80d-memcached-tls-certs\") pod \"memcached-0\" (UID: \"987d6a20-4fc9-411e-ae1f-9ff5dd78e80d\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:22:39 crc kubenswrapper[4558]: I0120 17:22:39.234784 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/987d6a20-4fc9-411e-ae1f-9ff5dd78e80d-config-data\") pod \"memcached-0\" (UID: \"987d6a20-4fc9-411e-ae1f-9ff5dd78e80d\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:22:39 crc kubenswrapper[4558]: I0120 17:22:39.234846 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zk5cx\" (UniqueName: \"kubernetes.io/projected/987d6a20-4fc9-411e-ae1f-9ff5dd78e80d-kube-api-access-zk5cx\") pod \"memcached-0\" (UID: \"987d6a20-4fc9-411e-ae1f-9ff5dd78e80d\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:22:39 crc kubenswrapper[4558]: I0120 17:22:39.234901 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/987d6a20-4fc9-411e-ae1f-9ff5dd78e80d-kolla-config\") pod \"memcached-0\" (UID: \"987d6a20-4fc9-411e-ae1f-9ff5dd78e80d\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:22:39 crc kubenswrapper[4558]: I0120 17:22:39.234933 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/987d6a20-4fc9-411e-ae1f-9ff5dd78e80d-combined-ca-bundle\") pod \"memcached-0\" (UID: \"987d6a20-4fc9-411e-ae1f-9ff5dd78e80d\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:22:39 crc kubenswrapper[4558]: I0120 17:22:39.237051 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/987d6a20-4fc9-411e-ae1f-9ff5dd78e80d-config-data\") pod \"memcached-0\" (UID: \"987d6a20-4fc9-411e-ae1f-9ff5dd78e80d\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:22:39 crc kubenswrapper[4558]: I0120 17:22:39.237206 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/987d6a20-4fc9-411e-ae1f-9ff5dd78e80d-kolla-config\") pod \"memcached-0\" (UID: \"987d6a20-4fc9-411e-ae1f-9ff5dd78e80d\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:22:39 crc kubenswrapper[4558]: I0120 17:22:39.239373 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/987d6a20-4fc9-411e-ae1f-9ff5dd78e80d-memcached-tls-certs\") pod \"memcached-0\" (UID: \"987d6a20-4fc9-411e-ae1f-9ff5dd78e80d\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:22:39 crc kubenswrapper[4558]: I0120 17:22:39.240513 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/987d6a20-4fc9-411e-ae1f-9ff5dd78e80d-combined-ca-bundle\") pod \"memcached-0\" (UID: \"987d6a20-4fc9-411e-ae1f-9ff5dd78e80d\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:22:39 crc kubenswrapper[4558]: I0120 17:22:39.253926 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zk5cx\" (UniqueName: \"kubernetes.io/projected/987d6a20-4fc9-411e-ae1f-9ff5dd78e80d-kube-api-access-zk5cx\") pod \"memcached-0\" (UID: \"987d6a20-4fc9-411e-ae1f-9ff5dd78e80d\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:22:39 crc kubenswrapper[4558]: I0120 17:22:39.343524 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/memcached-0" Jan 20 17:22:39 crc kubenswrapper[4558]: I0120 17:22:39.499843 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/openstack-cell1-galera-0"] Jan 20 17:22:39 crc kubenswrapper[4558]: I0120 17:22:39.638606 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-cell1-galera-0" event={"ID":"d274ba80-2e42-4823-bbf2-02691e791ec9","Type":"ContainerStarted","Data":"32bab0bb78471eae5e794a30e1a2b093ec27e805a0be0cf942b4b318fcbdef9f"} Jan 20 17:22:39 crc kubenswrapper[4558]: I0120 17:22:39.756185 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/memcached-0"] Jan 20 17:22:39 crc kubenswrapper[4558]: W0120 17:22:39.766074 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod987d6a20_4fc9_411e_ae1f_9ff5dd78e80d.slice/crio-7632ebad1469d2f19951c3a650fdd764e24c47ac23b221cece303c5d595e0540 WatchSource:0}: Error finding container 7632ebad1469d2f19951c3a650fdd764e24c47ac23b221cece303c5d595e0540: Status 404 returned error can't find the container with id 7632ebad1469d2f19951c3a650fdd764e24c47ac23b221cece303c5d595e0540 Jan 20 17:22:40 crc kubenswrapper[4558]: I0120 17:22:40.649321 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/memcached-0" event={"ID":"987d6a20-4fc9-411e-ae1f-9ff5dd78e80d","Type":"ContainerStarted","Data":"9e7a56b68f74368ebdccf3a2050688f4660eec04ee70f20c2816ab7ef47f5d08"} Jan 20 17:22:40 crc kubenswrapper[4558]: I0120 17:22:40.649666 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/memcached-0" event={"ID":"987d6a20-4fc9-411e-ae1f-9ff5dd78e80d","Type":"ContainerStarted","Data":"7632ebad1469d2f19951c3a650fdd764e24c47ac23b221cece303c5d595e0540"} Jan 20 17:22:40 crc kubenswrapper[4558]: I0120 17:22:40.649688 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/memcached-0" Jan 20 17:22:40 crc kubenswrapper[4558]: I0120 17:22:40.651097 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-cell1-galera-0" event={"ID":"d274ba80-2e42-4823-bbf2-02691e791ec9","Type":"ContainerStarted","Data":"90717528255ead8997bbaf58b8826534843ca75599c5bbaefead8ea9f5018821"} Jan 20 17:22:40 crc kubenswrapper[4558]: I0120 17:22:40.671136 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/memcached-0" podStartSLOduration=1.6711184000000001 podStartE2EDuration="1.6711184s" podCreationTimestamp="2026-01-20 17:22:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:22:40.665522614 +0000 UTC m=+2454.425860581" watchObservedRunningTime="2026-01-20 17:22:40.6711184 +0000 UTC m=+2454.431456367" Jan 20 17:22:41 crc kubenswrapper[4558]: I0120 17:22:41.107641 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 17:22:41 crc kubenswrapper[4558]: I0120 17:22:41.109045 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:22:41 crc kubenswrapper[4558]: I0120 17:22:41.110694 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"telemetry-ceilometer-dockercfg-5nz7c" Jan 20 17:22:41 crc kubenswrapper[4558]: I0120 17:22:41.122603 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 17:22:41 crc kubenswrapper[4558]: I0120 17:22:41.168458 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mwbjr\" (UniqueName: \"kubernetes.io/projected/f1f8c221-1f45-40d0-8742-38ab3d5c0d2f-kube-api-access-mwbjr\") pod \"kube-state-metrics-0\" (UID: \"f1f8c221-1f45-40d0-8742-38ab3d5c0d2f\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:22:41 crc kubenswrapper[4558]: I0120 17:22:41.270092 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mwbjr\" (UniqueName: \"kubernetes.io/projected/f1f8c221-1f45-40d0-8742-38ab3d5c0d2f-kube-api-access-mwbjr\") pod \"kube-state-metrics-0\" (UID: \"f1f8c221-1f45-40d0-8742-38ab3d5c0d2f\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:22:41 crc kubenswrapper[4558]: I0120 17:22:41.289103 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mwbjr\" (UniqueName: \"kubernetes.io/projected/f1f8c221-1f45-40d0-8742-38ab3d5c0d2f-kube-api-access-mwbjr\") pod \"kube-state-metrics-0\" (UID: \"f1f8c221-1f45-40d0-8742-38ab3d5c0d2f\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:22:41 crc kubenswrapper[4558]: I0120 17:22:41.434502 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:22:41 crc kubenswrapper[4558]: I0120 17:22:41.566556 4558 scope.go:117] "RemoveContainer" containerID="0ffce44366a8b4466427b682fb41640425366a0f4449210959148de9aef695c6" Jan 20 17:22:41 crc kubenswrapper[4558]: E0120 17:22:41.567026 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 17:22:41 crc kubenswrapper[4558]: I0120 17:22:41.664244 4558 generic.go:334] "Generic (PLEG): container finished" podID="9db3acdd-184a-4004-a8d3-451673126318" containerID="31bf315baedc02ab08496f1a6ef75754e50f376e1ce0ce44b48404a088ee0df2" exitCode=0 Jan 20 17:22:41 crc kubenswrapper[4558]: I0120 17:22:41.664329 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-galera-0" event={"ID":"9db3acdd-184a-4004-a8d3-451673126318","Type":"ContainerDied","Data":"31bf315baedc02ab08496f1a6ef75754e50f376e1ce0ce44b48404a088ee0df2"} Jan 20 17:22:41 crc kubenswrapper[4558]: I0120 17:22:41.881155 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 17:22:42 crc kubenswrapper[4558]: I0120 17:22:42.676112 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/kube-state-metrics-0" event={"ID":"f1f8c221-1f45-40d0-8742-38ab3d5c0d2f","Type":"ContainerStarted","Data":"3361ee4e50c10f2a05b4a510cf99025036c5f5dd140fd944a3939bdc7eb53ec8"} Jan 20 17:22:42 crc kubenswrapper[4558]: I0120 17:22:42.676212 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/kube-state-metrics-0" event={"ID":"f1f8c221-1f45-40d0-8742-38ab3d5c0d2f","Type":"ContainerStarted","Data":"c66f9fe5d0946bf8a3b4cc1770194beb42716b50d5c67d582af5dd099ce6b6d0"} Jan 20 17:22:42 crc kubenswrapper[4558]: I0120 17:22:42.676405 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:22:42 crc kubenswrapper[4558]: I0120 17:22:42.679472 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-galera-0" event={"ID":"9db3acdd-184a-4004-a8d3-451673126318","Type":"ContainerStarted","Data":"74db7b040a7e0441aa4205a49b98fee925418b1a898081b635b0efcdf2e08d5a"} Jan 20 17:22:42 crc kubenswrapper[4558]: I0120 17:22:42.681603 4558 generic.go:334] "Generic (PLEG): container finished" podID="d274ba80-2e42-4823-bbf2-02691e791ec9" containerID="90717528255ead8997bbaf58b8826534843ca75599c5bbaefead8ea9f5018821" exitCode=0 Jan 20 17:22:42 crc kubenswrapper[4558]: I0120 17:22:42.681674 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-cell1-galera-0" event={"ID":"d274ba80-2e42-4823-bbf2-02691e791ec9","Type":"ContainerDied","Data":"90717528255ead8997bbaf58b8826534843ca75599c5bbaefead8ea9f5018821"} Jan 20 17:22:42 crc kubenswrapper[4558]: I0120 17:22:42.693338 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/kube-state-metrics-0" podStartSLOduration=1.418945995 podStartE2EDuration="1.693321233s" podCreationTimestamp="2026-01-20 17:22:41 +0000 UTC" firstStartedPulling="2026-01-20 17:22:41.882088908 +0000 UTC m=+2455.642426876" lastFinishedPulling="2026-01-20 17:22:42.156464147 +0000 UTC m=+2455.916802114" observedRunningTime="2026-01-20 17:22:42.690031584 +0000 UTC m=+2456.450369551" watchObservedRunningTime="2026-01-20 17:22:42.693321233 +0000 UTC m=+2456.453659199" Jan 20 17:22:42 crc kubenswrapper[4558]: I0120 17:22:42.712447 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/openstack-galera-0" podStartSLOduration=6.712436269 podStartE2EDuration="6.712436269s" podCreationTimestamp="2026-01-20 17:22:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:22:42.70721064 +0000 UTC m=+2456.467548606" watchObservedRunningTime="2026-01-20 17:22:42.712436269 +0000 UTC m=+2456.472774236" Jan 20 17:22:43 crc kubenswrapper[4558]: I0120 17:22:43.690787 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-cell1-galera-0" event={"ID":"d274ba80-2e42-4823-bbf2-02691e791ec9","Type":"ContainerStarted","Data":"96ef199805699173d17182add49a78cb239dafab7eff49108278d5e224dfb51a"} Jan 20 17:22:43 crc kubenswrapper[4558]: I0120 17:22:43.708856 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/openstack-cell1-galera-0" podStartSLOduration=6.708844752 podStartE2EDuration="6.708844752s" podCreationTimestamp="2026-01-20 17:22:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:22:43.706462571 +0000 UTC m=+2457.466800538" watchObservedRunningTime="2026-01-20 17:22:43.708844752 +0000 UTC m=+2457.469182709" Jan 20 17:22:44 crc kubenswrapper[4558]: I0120 17:22:44.347363 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/memcached-0" Jan 20 17:22:45 crc kubenswrapper[4558]: I0120 17:22:45.214447 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ovsdbserver-nb-0"] Jan 20 17:22:45 crc kubenswrapper[4558]: I0120 17:22:45.216560 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:22:45 crc kubenswrapper[4558]: I0120 17:22:45.218933 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ovncluster-ovndbcluster-nb-dockercfg-6zkzl" Jan 20 17:22:45 crc kubenswrapper[4558]: I0120 17:22:45.221244 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"ovndbcluster-nb-config" Jan 20 17:22:45 crc kubenswrapper[4558]: I0120 17:22:45.221593 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"ovndbcluster-nb-scripts" Jan 20 17:22:45 crc kubenswrapper[4558]: I0120 17:22:45.221827 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-ovn-metrics" Jan 20 17:22:45 crc kubenswrapper[4558]: I0120 17:22:45.221265 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-ovndbcluster-nb-ovndbs" Jan 20 17:22:45 crc kubenswrapper[4558]: I0120 17:22:45.225819 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-nb-0"] Jan 20 17:22:45 crc kubenswrapper[4558]: I0120 17:22:45.246428 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/aa79d817-2b73-469c-8b09-f2f312835773-config\") pod \"ovsdbserver-nb-0\" (UID: \"aa79d817-2b73-469c-8b09-f2f312835773\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:22:45 crc kubenswrapper[4558]: I0120 17:22:45.246546 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/aa79d817-2b73-469c-8b09-f2f312835773-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"aa79d817-2b73-469c-8b09-f2f312835773\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:22:45 crc kubenswrapper[4558]: I0120 17:22:45.246609 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/aa79d817-2b73-469c-8b09-f2f312835773-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"aa79d817-2b73-469c-8b09-f2f312835773\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:22:45 crc kubenswrapper[4558]: I0120 17:22:45.246778 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa79d817-2b73-469c-8b09-f2f312835773-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"aa79d817-2b73-469c-8b09-f2f312835773\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:22:45 crc kubenswrapper[4558]: I0120 17:22:45.246867 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bwjzm\" (UniqueName: \"kubernetes.io/projected/aa79d817-2b73-469c-8b09-f2f312835773-kube-api-access-bwjzm\") pod \"ovsdbserver-nb-0\" (UID: \"aa79d817-2b73-469c-8b09-f2f312835773\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:22:45 crc kubenswrapper[4558]: I0120 17:22:45.246976 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/aa79d817-2b73-469c-8b09-f2f312835773-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"aa79d817-2b73-469c-8b09-f2f312835773\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:22:45 crc kubenswrapper[4558]: I0120 17:22:45.247081 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/aa79d817-2b73-469c-8b09-f2f312835773-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"aa79d817-2b73-469c-8b09-f2f312835773\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:22:45 crc kubenswrapper[4558]: I0120 17:22:45.247199 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"ovsdbserver-nb-0\" (UID: \"aa79d817-2b73-469c-8b09-f2f312835773\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:22:45 crc kubenswrapper[4558]: I0120 17:22:45.349089 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/aa79d817-2b73-469c-8b09-f2f312835773-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"aa79d817-2b73-469c-8b09-f2f312835773\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:22:45 crc kubenswrapper[4558]: I0120 17:22:45.349192 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa79d817-2b73-469c-8b09-f2f312835773-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"aa79d817-2b73-469c-8b09-f2f312835773\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:22:45 crc kubenswrapper[4558]: I0120 17:22:45.349222 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bwjzm\" (UniqueName: \"kubernetes.io/projected/aa79d817-2b73-469c-8b09-f2f312835773-kube-api-access-bwjzm\") pod \"ovsdbserver-nb-0\" (UID: \"aa79d817-2b73-469c-8b09-f2f312835773\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:22:45 crc kubenswrapper[4558]: I0120 17:22:45.349262 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/aa79d817-2b73-469c-8b09-f2f312835773-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"aa79d817-2b73-469c-8b09-f2f312835773\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:22:45 crc kubenswrapper[4558]: I0120 17:22:45.349321 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/aa79d817-2b73-469c-8b09-f2f312835773-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"aa79d817-2b73-469c-8b09-f2f312835773\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:22:45 crc kubenswrapper[4558]: I0120 17:22:45.349394 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"ovsdbserver-nb-0\" (UID: \"aa79d817-2b73-469c-8b09-f2f312835773\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:22:45 crc kubenswrapper[4558]: I0120 17:22:45.349500 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/aa79d817-2b73-469c-8b09-f2f312835773-config\") pod \"ovsdbserver-nb-0\" (UID: \"aa79d817-2b73-469c-8b09-f2f312835773\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:22:45 crc kubenswrapper[4558]: I0120 17:22:45.349627 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/aa79d817-2b73-469c-8b09-f2f312835773-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"aa79d817-2b73-469c-8b09-f2f312835773\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:22:45 crc kubenswrapper[4558]: I0120 17:22:45.350006 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"ovsdbserver-nb-0\" (UID: \"aa79d817-2b73-469c-8b09-f2f312835773\") device mount path \"/mnt/openstack/pv09\"" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:22:45 crc kubenswrapper[4558]: I0120 17:22:45.350186 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/aa79d817-2b73-469c-8b09-f2f312835773-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"aa79d817-2b73-469c-8b09-f2f312835773\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:22:45 crc kubenswrapper[4558]: I0120 17:22:45.350744 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/aa79d817-2b73-469c-8b09-f2f312835773-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"aa79d817-2b73-469c-8b09-f2f312835773\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:22:45 crc kubenswrapper[4558]: I0120 17:22:45.350942 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/aa79d817-2b73-469c-8b09-f2f312835773-config\") pod \"ovsdbserver-nb-0\" (UID: \"aa79d817-2b73-469c-8b09-f2f312835773\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:22:45 crc kubenswrapper[4558]: I0120 17:22:45.355256 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/aa79d817-2b73-469c-8b09-f2f312835773-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"aa79d817-2b73-469c-8b09-f2f312835773\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:22:45 crc kubenswrapper[4558]: I0120 17:22:45.356939 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/aa79d817-2b73-469c-8b09-f2f312835773-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"aa79d817-2b73-469c-8b09-f2f312835773\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:22:45 crc kubenswrapper[4558]: I0120 17:22:45.361969 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa79d817-2b73-469c-8b09-f2f312835773-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"aa79d817-2b73-469c-8b09-f2f312835773\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:22:45 crc kubenswrapper[4558]: I0120 17:22:45.366933 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bwjzm\" (UniqueName: \"kubernetes.io/projected/aa79d817-2b73-469c-8b09-f2f312835773-kube-api-access-bwjzm\") pod \"ovsdbserver-nb-0\" (UID: \"aa79d817-2b73-469c-8b09-f2f312835773\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:22:45 crc kubenswrapper[4558]: I0120 17:22:45.377775 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"ovsdbserver-nb-0\" (UID: \"aa79d817-2b73-469c-8b09-f2f312835773\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:22:45 crc kubenswrapper[4558]: I0120 17:22:45.537428 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:22:45 crc kubenswrapper[4558]: I0120 17:22:45.973774 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-nb-0"] Jan 20 17:22:45 crc kubenswrapper[4558]: W0120 17:22:45.975049 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podaa79d817_2b73_469c_8b09_f2f312835773.slice/crio-e6da935e2ec40f9f80911536c92ee50f3e6ae508066cd6ccbd070c3710fc353f WatchSource:0}: Error finding container e6da935e2ec40f9f80911536c92ee50f3e6ae508066cd6ccbd070c3710fc353f: Status 404 returned error can't find the container with id e6da935e2ec40f9f80911536c92ee50f3e6ae508066cd6ccbd070c3710fc353f Jan 20 17:22:46 crc kubenswrapper[4558]: I0120 17:22:46.735274 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-nb-0" event={"ID":"aa79d817-2b73-469c-8b09-f2f312835773","Type":"ContainerStarted","Data":"9902de0f0a19fcf3501305d4c98aa8a835da1fc64164b28742b36af1db06a046"} Jan 20 17:22:46 crc kubenswrapper[4558]: I0120 17:22:46.737283 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-nb-0" event={"ID":"aa79d817-2b73-469c-8b09-f2f312835773","Type":"ContainerStarted","Data":"f5e6e1f7dc89862c9157326c6e877a21963b94977d4b4616a286d30936fb463f"} Jan 20 17:22:46 crc kubenswrapper[4558]: I0120 17:22:46.737387 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-nb-0" event={"ID":"aa79d817-2b73-469c-8b09-f2f312835773","Type":"ContainerStarted","Data":"e6da935e2ec40f9f80911536c92ee50f3e6ae508066cd6ccbd070c3710fc353f"} Jan 20 17:22:46 crc kubenswrapper[4558]: I0120 17:22:46.760350 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ovsdbserver-nb-0" podStartSLOduration=2.760324456 podStartE2EDuration="2.760324456s" podCreationTimestamp="2026-01-20 17:22:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:22:46.757345242 +0000 UTC m=+2460.517683209" watchObservedRunningTime="2026-01-20 17:22:46.760324456 +0000 UTC m=+2460.520662423" Jan 20 17:22:47 crc kubenswrapper[4558]: I0120 17:22:47.321583 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ovsdbserver-sb-0"] Jan 20 17:22:47 crc kubenswrapper[4558]: I0120 17:22:47.322835 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:22:47 crc kubenswrapper[4558]: I0120 17:22:47.324770 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-ovndbcluster-sb-ovndbs" Jan 20 17:22:47 crc kubenswrapper[4558]: I0120 17:22:47.325859 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"ovndbcluster-sb-config" Jan 20 17:22:47 crc kubenswrapper[4558]: I0120 17:22:47.325974 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"ovndbcluster-sb-scripts" Jan 20 17:22:47 crc kubenswrapper[4558]: I0120 17:22:47.326187 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ovncluster-ovndbcluster-sb-dockercfg-2c6d4" Jan 20 17:22:47 crc kubenswrapper[4558]: I0120 17:22:47.356876 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-sb-0"] Jan 20 17:22:47 crc kubenswrapper[4558]: I0120 17:22:47.484632 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/63766878-2a9a-47b6-9209-554b59500f10-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"63766878-2a9a-47b6-9209-554b59500f10\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:22:47 crc kubenswrapper[4558]: I0120 17:22:47.484710 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gcwf4\" (UniqueName: \"kubernetes.io/projected/63766878-2a9a-47b6-9209-554b59500f10-kube-api-access-gcwf4\") pod \"ovsdbserver-sb-0\" (UID: \"63766878-2a9a-47b6-9209-554b59500f10\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:22:47 crc kubenswrapper[4558]: I0120 17:22:47.484789 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/63766878-2a9a-47b6-9209-554b59500f10-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"63766878-2a9a-47b6-9209-554b59500f10\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:22:47 crc kubenswrapper[4558]: I0120 17:22:47.484844 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"ovsdbserver-sb-0\" (UID: \"63766878-2a9a-47b6-9209-554b59500f10\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:22:47 crc kubenswrapper[4558]: I0120 17:22:47.484878 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/63766878-2a9a-47b6-9209-554b59500f10-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"63766878-2a9a-47b6-9209-554b59500f10\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:22:47 crc kubenswrapper[4558]: I0120 17:22:47.484907 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/63766878-2a9a-47b6-9209-554b59500f10-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"63766878-2a9a-47b6-9209-554b59500f10\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:22:47 crc kubenswrapper[4558]: I0120 17:22:47.484930 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/63766878-2a9a-47b6-9209-554b59500f10-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"63766878-2a9a-47b6-9209-554b59500f10\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:22:47 crc kubenswrapper[4558]: I0120 17:22:47.485140 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/63766878-2a9a-47b6-9209-554b59500f10-config\") pod \"ovsdbserver-sb-0\" (UID: \"63766878-2a9a-47b6-9209-554b59500f10\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:22:47 crc kubenswrapper[4558]: I0120 17:22:47.586610 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/63766878-2a9a-47b6-9209-554b59500f10-config\") pod \"ovsdbserver-sb-0\" (UID: \"63766878-2a9a-47b6-9209-554b59500f10\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:22:47 crc kubenswrapper[4558]: I0120 17:22:47.586723 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/63766878-2a9a-47b6-9209-554b59500f10-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"63766878-2a9a-47b6-9209-554b59500f10\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:22:47 crc kubenswrapper[4558]: I0120 17:22:47.586793 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gcwf4\" (UniqueName: \"kubernetes.io/projected/63766878-2a9a-47b6-9209-554b59500f10-kube-api-access-gcwf4\") pod \"ovsdbserver-sb-0\" (UID: \"63766878-2a9a-47b6-9209-554b59500f10\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:22:47 crc kubenswrapper[4558]: I0120 17:22:47.586833 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/63766878-2a9a-47b6-9209-554b59500f10-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"63766878-2a9a-47b6-9209-554b59500f10\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:22:47 crc kubenswrapper[4558]: I0120 17:22:47.586874 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"ovsdbserver-sb-0\" (UID: \"63766878-2a9a-47b6-9209-554b59500f10\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:22:47 crc kubenswrapper[4558]: I0120 17:22:47.586921 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/63766878-2a9a-47b6-9209-554b59500f10-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"63766878-2a9a-47b6-9209-554b59500f10\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:22:47 crc kubenswrapper[4558]: I0120 17:22:47.586962 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/63766878-2a9a-47b6-9209-554b59500f10-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"63766878-2a9a-47b6-9209-554b59500f10\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:22:47 crc kubenswrapper[4558]: I0120 17:22:47.587000 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/63766878-2a9a-47b6-9209-554b59500f10-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"63766878-2a9a-47b6-9209-554b59500f10\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:22:47 crc kubenswrapper[4558]: I0120 17:22:47.587402 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/63766878-2a9a-47b6-9209-554b59500f10-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"63766878-2a9a-47b6-9209-554b59500f10\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:22:47 crc kubenswrapper[4558]: I0120 17:22:47.587504 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"ovsdbserver-sb-0\" (UID: \"63766878-2a9a-47b6-9209-554b59500f10\") device mount path \"/mnt/openstack/pv05\"" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:22:47 crc kubenswrapper[4558]: I0120 17:22:47.587764 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/63766878-2a9a-47b6-9209-554b59500f10-config\") pod \"ovsdbserver-sb-0\" (UID: \"63766878-2a9a-47b6-9209-554b59500f10\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:22:47 crc kubenswrapper[4558]: I0120 17:22:47.588457 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/63766878-2a9a-47b6-9209-554b59500f10-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"63766878-2a9a-47b6-9209-554b59500f10\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:22:47 crc kubenswrapper[4558]: I0120 17:22:47.593574 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/63766878-2a9a-47b6-9209-554b59500f10-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"63766878-2a9a-47b6-9209-554b59500f10\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:22:47 crc kubenswrapper[4558]: I0120 17:22:47.594954 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/63766878-2a9a-47b6-9209-554b59500f10-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"63766878-2a9a-47b6-9209-554b59500f10\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:22:47 crc kubenswrapper[4558]: I0120 17:22:47.596364 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/63766878-2a9a-47b6-9209-554b59500f10-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"63766878-2a9a-47b6-9209-554b59500f10\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:22:47 crc kubenswrapper[4558]: I0120 17:22:47.609896 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gcwf4\" (UniqueName: \"kubernetes.io/projected/63766878-2a9a-47b6-9209-554b59500f10-kube-api-access-gcwf4\") pod \"ovsdbserver-sb-0\" (UID: \"63766878-2a9a-47b6-9209-554b59500f10\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:22:47 crc kubenswrapper[4558]: I0120 17:22:47.611382 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"ovsdbserver-sb-0\" (UID: \"63766878-2a9a-47b6-9209-554b59500f10\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:22:47 crc kubenswrapper[4558]: I0120 17:22:47.669660 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:22:47 crc kubenswrapper[4558]: I0120 17:22:47.761388 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:22:47 crc kubenswrapper[4558]: I0120 17:22:47.761589 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:22:48 crc kubenswrapper[4558]: I0120 17:22:48.080790 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-sb-0"] Jan 20 17:22:48 crc kubenswrapper[4558]: W0120 17:22:48.081796 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod63766878_2a9a_47b6_9209_554b59500f10.slice/crio-2492a122f242489df747c3740a3ae369146cc7b6726c89e71751c5fa0bdb91d4 WatchSource:0}: Error finding container 2492a122f242489df747c3740a3ae369146cc7b6726c89e71751c5fa0bdb91d4: Status 404 returned error can't find the container with id 2492a122f242489df747c3740a3ae369146cc7b6726c89e71751c5fa0bdb91d4 Jan 20 17:22:48 crc kubenswrapper[4558]: I0120 17:22:48.538618 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:22:48 crc kubenswrapper[4558]: I0120 17:22:48.763718 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-sb-0" event={"ID":"63766878-2a9a-47b6-9209-554b59500f10","Type":"ContainerStarted","Data":"f69a90baad4062187a2630067e5b5be8fe21f3f7ef14d41a5c46fea09bbfc6d4"} Jan 20 17:22:48 crc kubenswrapper[4558]: I0120 17:22:48.763780 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-sb-0" event={"ID":"63766878-2a9a-47b6-9209-554b59500f10","Type":"ContainerStarted","Data":"37f931cc076b0de2ba4bd77c0544f81aec55cb5d8ae2f041d7332ba974499651"} Jan 20 17:22:48 crc kubenswrapper[4558]: I0120 17:22:48.763800 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-sb-0" event={"ID":"63766878-2a9a-47b6-9209-554b59500f10","Type":"ContainerStarted","Data":"2492a122f242489df747c3740a3ae369146cc7b6726c89e71751c5fa0bdb91d4"} Jan 20 17:22:48 crc kubenswrapper[4558]: I0120 17:22:48.784727 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ovsdbserver-sb-0" podStartSLOduration=2.784711528 podStartE2EDuration="2.784711528s" podCreationTimestamp="2026-01-20 17:22:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:22:48.778918651 +0000 UTC m=+2462.539256618" watchObservedRunningTime="2026-01-20 17:22:48.784711528 +0000 UTC m=+2462.545049495" Jan 20 17:22:49 crc kubenswrapper[4558]: I0120 17:22:49.065877 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:22:49 crc kubenswrapper[4558]: I0120 17:22:49.065935 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:22:49 crc kubenswrapper[4558]: I0120 17:22:49.135589 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:22:49 crc kubenswrapper[4558]: I0120 17:22:49.824066 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:22:50 crc kubenswrapper[4558]: I0120 17:22:50.034808 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:22:50 crc kubenswrapper[4558]: I0120 17:22:50.095248 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:22:50 crc kubenswrapper[4558]: I0120 17:22:50.538313 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:22:50 crc kubenswrapper[4558]: I0120 17:22:50.670807 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:22:51 crc kubenswrapper[4558]: I0120 17:22:51.439121 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:22:51 crc kubenswrapper[4558]: I0120 17:22:51.569423 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:22:51 crc kubenswrapper[4558]: I0120 17:22:51.817502 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:22:52 crc kubenswrapper[4558]: I0120 17:22:52.511965 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/swift-storage-0"] Jan 20 17:22:52 crc kubenswrapper[4558]: I0120 17:22:52.518450 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:22:52 crc kubenswrapper[4558]: I0120 17:22:52.520962 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"swift-conf" Jan 20 17:22:52 crc kubenswrapper[4558]: I0120 17:22:52.521281 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"swift-swift-dockercfg-vgvcg" Jan 20 17:22:52 crc kubenswrapper[4558]: I0120 17:22:52.522411 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"swift-storage-config-data" Jan 20 17:22:52 crc kubenswrapper[4558]: I0120 17:22:52.522859 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"swift-ring-files" Jan 20 17:22:52 crc kubenswrapper[4558]: I0120 17:22:52.527327 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/swift-storage-0"] Jan 20 17:22:52 crc kubenswrapper[4558]: I0120 17:22:52.570083 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kxw6w\" (UniqueName: \"kubernetes.io/projected/cbbfff95-9f90-4be2-9ed7-0a1cbbe43542-kube-api-access-kxw6w\") pod \"swift-storage-0\" (UID: \"cbbfff95-9f90-4be2-9ed7-0a1cbbe43542\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:22:52 crc kubenswrapper[4558]: I0120 17:22:52.570128 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/cbbfff95-9f90-4be2-9ed7-0a1cbbe43542-etc-swift\") pod \"swift-storage-0\" (UID: \"cbbfff95-9f90-4be2-9ed7-0a1cbbe43542\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:22:52 crc kubenswrapper[4558]: I0120 17:22:52.570294 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"swift-storage-0\" (UID: \"cbbfff95-9f90-4be2-9ed7-0a1cbbe43542\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:22:52 crc kubenswrapper[4558]: I0120 17:22:52.570460 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/cbbfff95-9f90-4be2-9ed7-0a1cbbe43542-lock\") pod \"swift-storage-0\" (UID: \"cbbfff95-9f90-4be2-9ed7-0a1cbbe43542\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:22:52 crc kubenswrapper[4558]: I0120 17:22:52.570515 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/cbbfff95-9f90-4be2-9ed7-0a1cbbe43542-cache\") pod \"swift-storage-0\" (UID: \"cbbfff95-9f90-4be2-9ed7-0a1cbbe43542\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:22:52 crc kubenswrapper[4558]: I0120 17:22:52.670143 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:22:52 crc kubenswrapper[4558]: I0120 17:22:52.672785 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"swift-storage-0\" (UID: \"cbbfff95-9f90-4be2-9ed7-0a1cbbe43542\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:22:52 crc kubenswrapper[4558]: I0120 17:22:52.673000 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/cbbfff95-9f90-4be2-9ed7-0a1cbbe43542-lock\") pod \"swift-storage-0\" (UID: \"cbbfff95-9f90-4be2-9ed7-0a1cbbe43542\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:22:52 crc kubenswrapper[4558]: I0120 17:22:52.673093 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/cbbfff95-9f90-4be2-9ed7-0a1cbbe43542-cache\") pod \"swift-storage-0\" (UID: \"cbbfff95-9f90-4be2-9ed7-0a1cbbe43542\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:22:52 crc kubenswrapper[4558]: I0120 17:22:52.673222 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kxw6w\" (UniqueName: \"kubernetes.io/projected/cbbfff95-9f90-4be2-9ed7-0a1cbbe43542-kube-api-access-kxw6w\") pod \"swift-storage-0\" (UID: \"cbbfff95-9f90-4be2-9ed7-0a1cbbe43542\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:22:52 crc kubenswrapper[4558]: I0120 17:22:52.673284 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"swift-storage-0\" (UID: \"cbbfff95-9f90-4be2-9ed7-0a1cbbe43542\") device mount path \"/mnt/openstack/pv12\"" pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:22:52 crc kubenswrapper[4558]: I0120 17:22:52.673375 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/cbbfff95-9f90-4be2-9ed7-0a1cbbe43542-etc-swift\") pod \"swift-storage-0\" (UID: \"cbbfff95-9f90-4be2-9ed7-0a1cbbe43542\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:22:52 crc kubenswrapper[4558]: I0120 17:22:52.673517 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/cbbfff95-9f90-4be2-9ed7-0a1cbbe43542-lock\") pod \"swift-storage-0\" (UID: \"cbbfff95-9f90-4be2-9ed7-0a1cbbe43542\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:22:52 crc kubenswrapper[4558]: I0120 17:22:52.674086 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/cbbfff95-9f90-4be2-9ed7-0a1cbbe43542-cache\") pod \"swift-storage-0\" (UID: \"cbbfff95-9f90-4be2-9ed7-0a1cbbe43542\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:22:52 crc kubenswrapper[4558]: E0120 17:22:52.674143 4558 projected.go:288] Couldn't get configMap openstack-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Jan 20 17:22:52 crc kubenswrapper[4558]: E0120 17:22:52.674191 4558 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack-kuttl-tests/swift-storage-0: configmap "swift-ring-files" not found Jan 20 17:22:52 crc kubenswrapper[4558]: E0120 17:22:52.674274 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/cbbfff95-9f90-4be2-9ed7-0a1cbbe43542-etc-swift podName:cbbfff95-9f90-4be2-9ed7-0a1cbbe43542 nodeName:}" failed. No retries permitted until 2026-01-20 17:22:53.174248856 +0000 UTC m=+2466.934586812 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/cbbfff95-9f90-4be2-9ed7-0a1cbbe43542-etc-swift") pod "swift-storage-0" (UID: "cbbfff95-9f90-4be2-9ed7-0a1cbbe43542") : configmap "swift-ring-files" not found Jan 20 17:22:52 crc kubenswrapper[4558]: I0120 17:22:52.696724 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kxw6w\" (UniqueName: \"kubernetes.io/projected/cbbfff95-9f90-4be2-9ed7-0a1cbbe43542-kube-api-access-kxw6w\") pod \"swift-storage-0\" (UID: \"cbbfff95-9f90-4be2-9ed7-0a1cbbe43542\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:22:52 crc kubenswrapper[4558]: I0120 17:22:52.703108 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"swift-storage-0\" (UID: \"cbbfff95-9f90-4be2-9ed7-0a1cbbe43542\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:22:52 crc kubenswrapper[4558]: I0120 17:22:52.826904 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/swift-ring-rebalance-xkgps"] Jan 20 17:22:52 crc kubenswrapper[4558]: I0120 17:22:52.828498 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-ring-rebalance-xkgps" Jan 20 17:22:52 crc kubenswrapper[4558]: I0120 17:22:52.830350 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"swift-ring-scripts" Jan 20 17:22:52 crc kubenswrapper[4558]: I0120 17:22:52.830665 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"swift-proxy-config-data" Jan 20 17:22:52 crc kubenswrapper[4558]: I0120 17:22:52.842077 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"swift-ring-config-data" Jan 20 17:22:52 crc kubenswrapper[4558]: I0120 17:22:52.848760 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/swift-ring-rebalance-xkgps"] Jan 20 17:22:52 crc kubenswrapper[4558]: E0120 17:22:52.850599 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[combined-ca-bundle dispersionconf etc-swift kube-api-access-d4kbv ring-data-devices scripts swiftconf], unattached volumes=[], failed to process volumes=[combined-ca-bundle dispersionconf etc-swift kube-api-access-d4kbv ring-data-devices scripts swiftconf]: context canceled" pod="openstack-kuttl-tests/swift-ring-rebalance-xkgps" podUID="a02208fb-7a91-4fdb-b4c6-fa78fb9ebf71" Jan 20 17:22:52 crc kubenswrapper[4558]: I0120 17:22:52.864480 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/swift-ring-rebalance-xkgps"] Jan 20 17:22:52 crc kubenswrapper[4558]: I0120 17:22:52.879134 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/swift-ring-rebalance-scr8s"] Jan 20 17:22:52 crc kubenswrapper[4558]: I0120 17:22:52.880089 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/swift-ring-rebalance-scr8s"] Jan 20 17:22:52 crc kubenswrapper[4558]: I0120 17:22:52.880187 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-ring-rebalance-scr8s" Jan 20 17:22:52 crc kubenswrapper[4558]: I0120 17:22:52.882863 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a02208fb-7a91-4fdb-b4c6-fa78fb9ebf71-scripts\") pod \"swift-ring-rebalance-xkgps\" (UID: \"a02208fb-7a91-4fdb-b4c6-fa78fb9ebf71\") " pod="openstack-kuttl-tests/swift-ring-rebalance-xkgps" Jan 20 17:22:52 crc kubenswrapper[4558]: I0120 17:22:52.882920 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d4kbv\" (UniqueName: \"kubernetes.io/projected/a02208fb-7a91-4fdb-b4c6-fa78fb9ebf71-kube-api-access-d4kbv\") pod \"swift-ring-rebalance-xkgps\" (UID: \"a02208fb-7a91-4fdb-b4c6-fa78fb9ebf71\") " pod="openstack-kuttl-tests/swift-ring-rebalance-xkgps" Jan 20 17:22:52 crc kubenswrapper[4558]: I0120 17:22:52.882964 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/a02208fb-7a91-4fdb-b4c6-fa78fb9ebf71-swiftconf\") pod \"swift-ring-rebalance-xkgps\" (UID: \"a02208fb-7a91-4fdb-b4c6-fa78fb9ebf71\") " pod="openstack-kuttl-tests/swift-ring-rebalance-xkgps" Jan 20 17:22:52 crc kubenswrapper[4558]: I0120 17:22:52.882992 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a02208fb-7a91-4fdb-b4c6-fa78fb9ebf71-combined-ca-bundle\") pod \"swift-ring-rebalance-xkgps\" (UID: \"a02208fb-7a91-4fdb-b4c6-fa78fb9ebf71\") " pod="openstack-kuttl-tests/swift-ring-rebalance-xkgps" Jan 20 17:22:52 crc kubenswrapper[4558]: I0120 17:22:52.883008 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/a02208fb-7a91-4fdb-b4c6-fa78fb9ebf71-dispersionconf\") pod \"swift-ring-rebalance-xkgps\" (UID: \"a02208fb-7a91-4fdb-b4c6-fa78fb9ebf71\") " pod="openstack-kuttl-tests/swift-ring-rebalance-xkgps" Jan 20 17:22:52 crc kubenswrapper[4558]: I0120 17:22:52.883258 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/a02208fb-7a91-4fdb-b4c6-fa78fb9ebf71-etc-swift\") pod \"swift-ring-rebalance-xkgps\" (UID: \"a02208fb-7a91-4fdb-b4c6-fa78fb9ebf71\") " pod="openstack-kuttl-tests/swift-ring-rebalance-xkgps" Jan 20 17:22:52 crc kubenswrapper[4558]: I0120 17:22:52.883406 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/a02208fb-7a91-4fdb-b4c6-fa78fb9ebf71-ring-data-devices\") pod \"swift-ring-rebalance-xkgps\" (UID: \"a02208fb-7a91-4fdb-b4c6-fa78fb9ebf71\") " pod="openstack-kuttl-tests/swift-ring-rebalance-xkgps" Jan 20 17:22:52 crc kubenswrapper[4558]: I0120 17:22:52.984586 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d4kbv\" (UniqueName: \"kubernetes.io/projected/a02208fb-7a91-4fdb-b4c6-fa78fb9ebf71-kube-api-access-d4kbv\") pod \"swift-ring-rebalance-xkgps\" (UID: \"a02208fb-7a91-4fdb-b4c6-fa78fb9ebf71\") " pod="openstack-kuttl-tests/swift-ring-rebalance-xkgps" Jan 20 17:22:52 crc kubenswrapper[4558]: I0120 17:22:52.984661 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/90fa68b7-a335-4dba-b8f1-a9ab54c07786-etc-swift\") pod \"swift-ring-rebalance-scr8s\" (UID: \"90fa68b7-a335-4dba-b8f1-a9ab54c07786\") " pod="openstack-kuttl-tests/swift-ring-rebalance-scr8s" Jan 20 17:22:52 crc kubenswrapper[4558]: I0120 17:22:52.984745 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/a02208fb-7a91-4fdb-b4c6-fa78fb9ebf71-swiftconf\") pod \"swift-ring-rebalance-xkgps\" (UID: \"a02208fb-7a91-4fdb-b4c6-fa78fb9ebf71\") " pod="openstack-kuttl-tests/swift-ring-rebalance-xkgps" Jan 20 17:22:52 crc kubenswrapper[4558]: I0120 17:22:52.984789 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/a02208fb-7a91-4fdb-b4c6-fa78fb9ebf71-dispersionconf\") pod \"swift-ring-rebalance-xkgps\" (UID: \"a02208fb-7a91-4fdb-b4c6-fa78fb9ebf71\") " pod="openstack-kuttl-tests/swift-ring-rebalance-xkgps" Jan 20 17:22:52 crc kubenswrapper[4558]: I0120 17:22:52.984811 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a02208fb-7a91-4fdb-b4c6-fa78fb9ebf71-combined-ca-bundle\") pod \"swift-ring-rebalance-xkgps\" (UID: \"a02208fb-7a91-4fdb-b4c6-fa78fb9ebf71\") " pod="openstack-kuttl-tests/swift-ring-rebalance-xkgps" Jan 20 17:22:52 crc kubenswrapper[4558]: I0120 17:22:52.984840 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/90fa68b7-a335-4dba-b8f1-a9ab54c07786-combined-ca-bundle\") pod \"swift-ring-rebalance-scr8s\" (UID: \"90fa68b7-a335-4dba-b8f1-a9ab54c07786\") " pod="openstack-kuttl-tests/swift-ring-rebalance-scr8s" Jan 20 17:22:52 crc kubenswrapper[4558]: I0120 17:22:52.984865 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/90fa68b7-a335-4dba-b8f1-a9ab54c07786-dispersionconf\") pod \"swift-ring-rebalance-scr8s\" (UID: \"90fa68b7-a335-4dba-b8f1-a9ab54c07786\") " pod="openstack-kuttl-tests/swift-ring-rebalance-scr8s" Jan 20 17:22:52 crc kubenswrapper[4558]: I0120 17:22:52.984893 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/90fa68b7-a335-4dba-b8f1-a9ab54c07786-ring-data-devices\") pod \"swift-ring-rebalance-scr8s\" (UID: \"90fa68b7-a335-4dba-b8f1-a9ab54c07786\") " pod="openstack-kuttl-tests/swift-ring-rebalance-scr8s" Jan 20 17:22:52 crc kubenswrapper[4558]: I0120 17:22:52.984919 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/a02208fb-7a91-4fdb-b4c6-fa78fb9ebf71-etc-swift\") pod \"swift-ring-rebalance-xkgps\" (UID: \"a02208fb-7a91-4fdb-b4c6-fa78fb9ebf71\") " pod="openstack-kuttl-tests/swift-ring-rebalance-xkgps" Jan 20 17:22:52 crc kubenswrapper[4558]: I0120 17:22:52.985060 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/90fa68b7-a335-4dba-b8f1-a9ab54c07786-swiftconf\") pod \"swift-ring-rebalance-scr8s\" (UID: \"90fa68b7-a335-4dba-b8f1-a9ab54c07786\") " pod="openstack-kuttl-tests/swift-ring-rebalance-scr8s" Jan 20 17:22:52 crc kubenswrapper[4558]: I0120 17:22:52.985106 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rzm6b\" (UniqueName: \"kubernetes.io/projected/90fa68b7-a335-4dba-b8f1-a9ab54c07786-kube-api-access-rzm6b\") pod \"swift-ring-rebalance-scr8s\" (UID: \"90fa68b7-a335-4dba-b8f1-a9ab54c07786\") " pod="openstack-kuttl-tests/swift-ring-rebalance-scr8s" Jan 20 17:22:52 crc kubenswrapper[4558]: I0120 17:22:52.985150 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/a02208fb-7a91-4fdb-b4c6-fa78fb9ebf71-ring-data-devices\") pod \"swift-ring-rebalance-xkgps\" (UID: \"a02208fb-7a91-4fdb-b4c6-fa78fb9ebf71\") " pod="openstack-kuttl-tests/swift-ring-rebalance-xkgps" Jan 20 17:22:52 crc kubenswrapper[4558]: I0120 17:22:52.985328 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/90fa68b7-a335-4dba-b8f1-a9ab54c07786-scripts\") pod \"swift-ring-rebalance-scr8s\" (UID: \"90fa68b7-a335-4dba-b8f1-a9ab54c07786\") " pod="openstack-kuttl-tests/swift-ring-rebalance-scr8s" Jan 20 17:22:52 crc kubenswrapper[4558]: I0120 17:22:52.985438 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/a02208fb-7a91-4fdb-b4c6-fa78fb9ebf71-etc-swift\") pod \"swift-ring-rebalance-xkgps\" (UID: \"a02208fb-7a91-4fdb-b4c6-fa78fb9ebf71\") " pod="openstack-kuttl-tests/swift-ring-rebalance-xkgps" Jan 20 17:22:52 crc kubenswrapper[4558]: I0120 17:22:52.985441 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a02208fb-7a91-4fdb-b4c6-fa78fb9ebf71-scripts\") pod \"swift-ring-rebalance-xkgps\" (UID: \"a02208fb-7a91-4fdb-b4c6-fa78fb9ebf71\") " pod="openstack-kuttl-tests/swift-ring-rebalance-xkgps" Jan 20 17:22:52 crc kubenswrapper[4558]: I0120 17:22:52.986427 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a02208fb-7a91-4fdb-b4c6-fa78fb9ebf71-scripts\") pod \"swift-ring-rebalance-xkgps\" (UID: \"a02208fb-7a91-4fdb-b4c6-fa78fb9ebf71\") " pod="openstack-kuttl-tests/swift-ring-rebalance-xkgps" Jan 20 17:22:52 crc kubenswrapper[4558]: I0120 17:22:52.986494 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/a02208fb-7a91-4fdb-b4c6-fa78fb9ebf71-ring-data-devices\") pod \"swift-ring-rebalance-xkgps\" (UID: \"a02208fb-7a91-4fdb-b4c6-fa78fb9ebf71\") " pod="openstack-kuttl-tests/swift-ring-rebalance-xkgps" Jan 20 17:22:52 crc kubenswrapper[4558]: I0120 17:22:52.988782 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/a02208fb-7a91-4fdb-b4c6-fa78fb9ebf71-dispersionconf\") pod \"swift-ring-rebalance-xkgps\" (UID: \"a02208fb-7a91-4fdb-b4c6-fa78fb9ebf71\") " pod="openstack-kuttl-tests/swift-ring-rebalance-xkgps" Jan 20 17:22:52 crc kubenswrapper[4558]: I0120 17:22:52.991332 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/a02208fb-7a91-4fdb-b4c6-fa78fb9ebf71-swiftconf\") pod \"swift-ring-rebalance-xkgps\" (UID: \"a02208fb-7a91-4fdb-b4c6-fa78fb9ebf71\") " pod="openstack-kuttl-tests/swift-ring-rebalance-xkgps" Jan 20 17:22:52 crc kubenswrapper[4558]: I0120 17:22:52.999381 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a02208fb-7a91-4fdb-b4c6-fa78fb9ebf71-combined-ca-bundle\") pod \"swift-ring-rebalance-xkgps\" (UID: \"a02208fb-7a91-4fdb-b4c6-fa78fb9ebf71\") " pod="openstack-kuttl-tests/swift-ring-rebalance-xkgps" Jan 20 17:22:53 crc kubenswrapper[4558]: I0120 17:22:53.008584 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d4kbv\" (UniqueName: \"kubernetes.io/projected/a02208fb-7a91-4fdb-b4c6-fa78fb9ebf71-kube-api-access-d4kbv\") pod \"swift-ring-rebalance-xkgps\" (UID: \"a02208fb-7a91-4fdb-b4c6-fa78fb9ebf71\") " pod="openstack-kuttl-tests/swift-ring-rebalance-xkgps" Jan 20 17:22:53 crc kubenswrapper[4558]: I0120 17:22:53.086970 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/90fa68b7-a335-4dba-b8f1-a9ab54c07786-swiftconf\") pod \"swift-ring-rebalance-scr8s\" (UID: \"90fa68b7-a335-4dba-b8f1-a9ab54c07786\") " pod="openstack-kuttl-tests/swift-ring-rebalance-scr8s" Jan 20 17:22:53 crc kubenswrapper[4558]: I0120 17:22:53.087046 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rzm6b\" (UniqueName: \"kubernetes.io/projected/90fa68b7-a335-4dba-b8f1-a9ab54c07786-kube-api-access-rzm6b\") pod \"swift-ring-rebalance-scr8s\" (UID: \"90fa68b7-a335-4dba-b8f1-a9ab54c07786\") " pod="openstack-kuttl-tests/swift-ring-rebalance-scr8s" Jan 20 17:22:53 crc kubenswrapper[4558]: I0120 17:22:53.087479 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/90fa68b7-a335-4dba-b8f1-a9ab54c07786-scripts\") pod \"swift-ring-rebalance-scr8s\" (UID: \"90fa68b7-a335-4dba-b8f1-a9ab54c07786\") " pod="openstack-kuttl-tests/swift-ring-rebalance-scr8s" Jan 20 17:22:53 crc kubenswrapper[4558]: I0120 17:22:53.088107 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/90fa68b7-a335-4dba-b8f1-a9ab54c07786-scripts\") pod \"swift-ring-rebalance-scr8s\" (UID: \"90fa68b7-a335-4dba-b8f1-a9ab54c07786\") " pod="openstack-kuttl-tests/swift-ring-rebalance-scr8s" Jan 20 17:22:53 crc kubenswrapper[4558]: I0120 17:22:53.088313 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/90fa68b7-a335-4dba-b8f1-a9ab54c07786-etc-swift\") pod \"swift-ring-rebalance-scr8s\" (UID: \"90fa68b7-a335-4dba-b8f1-a9ab54c07786\") " pod="openstack-kuttl-tests/swift-ring-rebalance-scr8s" Jan 20 17:22:53 crc kubenswrapper[4558]: I0120 17:22:53.088587 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/90fa68b7-a335-4dba-b8f1-a9ab54c07786-etc-swift\") pod \"swift-ring-rebalance-scr8s\" (UID: \"90fa68b7-a335-4dba-b8f1-a9ab54c07786\") " pod="openstack-kuttl-tests/swift-ring-rebalance-scr8s" Jan 20 17:22:53 crc kubenswrapper[4558]: I0120 17:22:53.088702 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/90fa68b7-a335-4dba-b8f1-a9ab54c07786-combined-ca-bundle\") pod \"swift-ring-rebalance-scr8s\" (UID: \"90fa68b7-a335-4dba-b8f1-a9ab54c07786\") " pod="openstack-kuttl-tests/swift-ring-rebalance-scr8s" Jan 20 17:22:53 crc kubenswrapper[4558]: I0120 17:22:53.088733 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/90fa68b7-a335-4dba-b8f1-a9ab54c07786-dispersionconf\") pod \"swift-ring-rebalance-scr8s\" (UID: \"90fa68b7-a335-4dba-b8f1-a9ab54c07786\") " pod="openstack-kuttl-tests/swift-ring-rebalance-scr8s" Jan 20 17:22:53 crc kubenswrapper[4558]: I0120 17:22:53.089094 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/90fa68b7-a335-4dba-b8f1-a9ab54c07786-ring-data-devices\") pod \"swift-ring-rebalance-scr8s\" (UID: \"90fa68b7-a335-4dba-b8f1-a9ab54c07786\") " pod="openstack-kuttl-tests/swift-ring-rebalance-scr8s" Jan 20 17:22:53 crc kubenswrapper[4558]: I0120 17:22:53.089725 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/90fa68b7-a335-4dba-b8f1-a9ab54c07786-ring-data-devices\") pod \"swift-ring-rebalance-scr8s\" (UID: \"90fa68b7-a335-4dba-b8f1-a9ab54c07786\") " pod="openstack-kuttl-tests/swift-ring-rebalance-scr8s" Jan 20 17:22:53 crc kubenswrapper[4558]: I0120 17:22:53.090538 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/90fa68b7-a335-4dba-b8f1-a9ab54c07786-swiftconf\") pod \"swift-ring-rebalance-scr8s\" (UID: \"90fa68b7-a335-4dba-b8f1-a9ab54c07786\") " pod="openstack-kuttl-tests/swift-ring-rebalance-scr8s" Jan 20 17:22:53 crc kubenswrapper[4558]: I0120 17:22:53.093428 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/90fa68b7-a335-4dba-b8f1-a9ab54c07786-dispersionconf\") pod \"swift-ring-rebalance-scr8s\" (UID: \"90fa68b7-a335-4dba-b8f1-a9ab54c07786\") " pod="openstack-kuttl-tests/swift-ring-rebalance-scr8s" Jan 20 17:22:53 crc kubenswrapper[4558]: I0120 17:22:53.093686 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/90fa68b7-a335-4dba-b8f1-a9ab54c07786-combined-ca-bundle\") pod \"swift-ring-rebalance-scr8s\" (UID: \"90fa68b7-a335-4dba-b8f1-a9ab54c07786\") " pod="openstack-kuttl-tests/swift-ring-rebalance-scr8s" Jan 20 17:22:53 crc kubenswrapper[4558]: I0120 17:22:53.104827 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rzm6b\" (UniqueName: \"kubernetes.io/projected/90fa68b7-a335-4dba-b8f1-a9ab54c07786-kube-api-access-rzm6b\") pod \"swift-ring-rebalance-scr8s\" (UID: \"90fa68b7-a335-4dba-b8f1-a9ab54c07786\") " pod="openstack-kuttl-tests/swift-ring-rebalance-scr8s" Jan 20 17:22:53 crc kubenswrapper[4558]: I0120 17:22:53.192141 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/cbbfff95-9f90-4be2-9ed7-0a1cbbe43542-etc-swift\") pod \"swift-storage-0\" (UID: \"cbbfff95-9f90-4be2-9ed7-0a1cbbe43542\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:22:53 crc kubenswrapper[4558]: E0120 17:22:53.192345 4558 projected.go:288] Couldn't get configMap openstack-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Jan 20 17:22:53 crc kubenswrapper[4558]: E0120 17:22:53.192383 4558 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack-kuttl-tests/swift-storage-0: configmap "swift-ring-files" not found Jan 20 17:22:53 crc kubenswrapper[4558]: E0120 17:22:53.192456 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/cbbfff95-9f90-4be2-9ed7-0a1cbbe43542-etc-swift podName:cbbfff95-9f90-4be2-9ed7-0a1cbbe43542 nodeName:}" failed. No retries permitted until 2026-01-20 17:22:54.192435059 +0000 UTC m=+2467.952773026 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/cbbfff95-9f90-4be2-9ed7-0a1cbbe43542-etc-swift") pod "swift-storage-0" (UID: "cbbfff95-9f90-4be2-9ed7-0a1cbbe43542") : configmap "swift-ring-files" not found Jan 20 17:22:53 crc kubenswrapper[4558]: I0120 17:22:53.197773 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-ring-rebalance-scr8s" Jan 20 17:22:53 crc kubenswrapper[4558]: I0120 17:22:53.566681 4558 scope.go:117] "RemoveContainer" containerID="0ffce44366a8b4466427b682fb41640425366a0f4449210959148de9aef695c6" Jan 20 17:22:53 crc kubenswrapper[4558]: E0120 17:22:53.567037 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 17:22:53 crc kubenswrapper[4558]: I0120 17:22:53.600833 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/swift-ring-rebalance-scr8s"] Jan 20 17:22:53 crc kubenswrapper[4558]: W0120 17:22:53.604823 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod90fa68b7_a335_4dba_b8f1_a9ab54c07786.slice/crio-e79f71f85d20d1bcd9bd6e9db445113413ef99252cf1ca1a26f178fc595d2ea9 WatchSource:0}: Error finding container e79f71f85d20d1bcd9bd6e9db445113413ef99252cf1ca1a26f178fc595d2ea9: Status 404 returned error can't find the container with id e79f71f85d20d1bcd9bd6e9db445113413ef99252cf1ca1a26f178fc595d2ea9 Jan 20 17:22:53 crc kubenswrapper[4558]: I0120 17:22:53.709139 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:22:53 crc kubenswrapper[4558]: I0120 17:22:53.744143 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:22:53 crc kubenswrapper[4558]: I0120 17:22:53.821058 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-ring-rebalance-scr8s" event={"ID":"90fa68b7-a335-4dba-b8f1-a9ab54c07786","Type":"ContainerStarted","Data":"f51941633ec981d615e3f81dbf2603d9890a9ed4aa990ee310afe05d0cc363d5"} Jan 20 17:22:53 crc kubenswrapper[4558]: I0120 17:22:53.821129 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-ring-rebalance-scr8s" event={"ID":"90fa68b7-a335-4dba-b8f1-a9ab54c07786","Type":"ContainerStarted","Data":"e79f71f85d20d1bcd9bd6e9db445113413ef99252cf1ca1a26f178fc595d2ea9"} Jan 20 17:22:53 crc kubenswrapper[4558]: I0120 17:22:53.821212 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-ring-rebalance-xkgps" Jan 20 17:22:53 crc kubenswrapper[4558]: I0120 17:22:53.843303 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/swift-ring-rebalance-scr8s" podStartSLOduration=1.84327226 podStartE2EDuration="1.84327226s" podCreationTimestamp="2026-01-20 17:22:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:22:53.836635926 +0000 UTC m=+2467.596973893" watchObservedRunningTime="2026-01-20 17:22:53.84327226 +0000 UTC m=+2467.603610226" Jan 20 17:22:53 crc kubenswrapper[4558]: I0120 17:22:53.847727 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-ring-rebalance-xkgps" Jan 20 17:22:53 crc kubenswrapper[4558]: I0120 17:22:53.916868 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/a02208fb-7a91-4fdb-b4c6-fa78fb9ebf71-etc-swift\") pod \"a02208fb-7a91-4fdb-b4c6-fa78fb9ebf71\" (UID: \"a02208fb-7a91-4fdb-b4c6-fa78fb9ebf71\") " Jan 20 17:22:53 crc kubenswrapper[4558]: I0120 17:22:53.916913 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a02208fb-7a91-4fdb-b4c6-fa78fb9ebf71-scripts\") pod \"a02208fb-7a91-4fdb-b4c6-fa78fb9ebf71\" (UID: \"a02208fb-7a91-4fdb-b4c6-fa78fb9ebf71\") " Jan 20 17:22:53 crc kubenswrapper[4558]: I0120 17:22:53.917005 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/a02208fb-7a91-4fdb-b4c6-fa78fb9ebf71-dispersionconf\") pod \"a02208fb-7a91-4fdb-b4c6-fa78fb9ebf71\" (UID: \"a02208fb-7a91-4fdb-b4c6-fa78fb9ebf71\") " Jan 20 17:22:53 crc kubenswrapper[4558]: I0120 17:22:53.917053 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a02208fb-7a91-4fdb-b4c6-fa78fb9ebf71-combined-ca-bundle\") pod \"a02208fb-7a91-4fdb-b4c6-fa78fb9ebf71\" (UID: \"a02208fb-7a91-4fdb-b4c6-fa78fb9ebf71\") " Jan 20 17:22:53 crc kubenswrapper[4558]: I0120 17:22:53.917085 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/a02208fb-7a91-4fdb-b4c6-fa78fb9ebf71-swiftconf\") pod \"a02208fb-7a91-4fdb-b4c6-fa78fb9ebf71\" (UID: \"a02208fb-7a91-4fdb-b4c6-fa78fb9ebf71\") " Jan 20 17:22:53 crc kubenswrapper[4558]: I0120 17:22:53.917129 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4kbv\" (UniqueName: \"kubernetes.io/projected/a02208fb-7a91-4fdb-b4c6-fa78fb9ebf71-kube-api-access-d4kbv\") pod \"a02208fb-7a91-4fdb-b4c6-fa78fb9ebf71\" (UID: \"a02208fb-7a91-4fdb-b4c6-fa78fb9ebf71\") " Jan 20 17:22:53 crc kubenswrapper[4558]: I0120 17:22:53.917201 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/a02208fb-7a91-4fdb-b4c6-fa78fb9ebf71-ring-data-devices\") pod \"a02208fb-7a91-4fdb-b4c6-fa78fb9ebf71\" (UID: \"a02208fb-7a91-4fdb-b4c6-fa78fb9ebf71\") " Jan 20 17:22:53 crc kubenswrapper[4558]: I0120 17:22:53.918334 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a02208fb-7a91-4fdb-b4c6-fa78fb9ebf71-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "a02208fb-7a91-4fdb-b4c6-fa78fb9ebf71" (UID: "a02208fb-7a91-4fdb-b4c6-fa78fb9ebf71"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:22:53 crc kubenswrapper[4558]: I0120 17:22:53.918679 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a02208fb-7a91-4fdb-b4c6-fa78fb9ebf71-scripts" (OuterVolumeSpecName: "scripts") pod "a02208fb-7a91-4fdb-b4c6-fa78fb9ebf71" (UID: "a02208fb-7a91-4fdb-b4c6-fa78fb9ebf71"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:22:53 crc kubenswrapper[4558]: I0120 17:22:53.919064 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ovn-northd-0"] Jan 20 17:22:53 crc kubenswrapper[4558]: I0120 17:22:53.919666 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a02208fb-7a91-4fdb-b4c6-fa78fb9ebf71-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "a02208fb-7a91-4fdb-b4c6-fa78fb9ebf71" (UID: "a02208fb-7a91-4fdb-b4c6-fa78fb9ebf71"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:22:53 crc kubenswrapper[4558]: I0120 17:22:53.920579 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:22:53 crc kubenswrapper[4558]: I0120 17:22:53.923087 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"ovnnorthd-scripts" Jan 20 17:22:53 crc kubenswrapper[4558]: I0120 17:22:53.923640 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-ovnnorthd-ovndbs" Jan 20 17:22:53 crc kubenswrapper[4558]: I0120 17:22:53.923876 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a02208fb-7a91-4fdb-b4c6-fa78fb9ebf71-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a02208fb-7a91-4fdb-b4c6-fa78fb9ebf71" (UID: "a02208fb-7a91-4fdb-b4c6-fa78fb9ebf71"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:22:53 crc kubenswrapper[4558]: I0120 17:22:53.923903 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"ovnnorthd-config" Jan 20 17:22:53 crc kubenswrapper[4558]: I0120 17:22:53.924062 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ovnnorthd-ovnnorthd-dockercfg-wkprl" Jan 20 17:22:53 crc kubenswrapper[4558]: I0120 17:22:53.925097 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a02208fb-7a91-4fdb-b4c6-fa78fb9ebf71-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "a02208fb-7a91-4fdb-b4c6-fa78fb9ebf71" (UID: "a02208fb-7a91-4fdb-b4c6-fa78fb9ebf71"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:22:53 crc kubenswrapper[4558]: I0120 17:22:53.926222 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a02208fb-7a91-4fdb-b4c6-fa78fb9ebf71-kube-api-access-d4kbv" (OuterVolumeSpecName: "kube-api-access-d4kbv") pod "a02208fb-7a91-4fdb-b4c6-fa78fb9ebf71" (UID: "a02208fb-7a91-4fdb-b4c6-fa78fb9ebf71"). InnerVolumeSpecName "kube-api-access-d4kbv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:22:53 crc kubenswrapper[4558]: I0120 17:22:53.926333 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a02208fb-7a91-4fdb-b4c6-fa78fb9ebf71-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "a02208fb-7a91-4fdb-b4c6-fa78fb9ebf71" (UID: "a02208fb-7a91-4fdb-b4c6-fa78fb9ebf71"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:22:53 crc kubenswrapper[4558]: I0120 17:22:53.926497 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovn-northd-0"] Jan 20 17:22:54 crc kubenswrapper[4558]: I0120 17:22:54.019458 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/c08cb973-6eb3-414d-b127-fc146b0fb1f2-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"c08cb973-6eb3-414d-b127-fc146b0fb1f2\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:22:54 crc kubenswrapper[4558]: I0120 17:22:54.019523 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xbtgw\" (UniqueName: \"kubernetes.io/projected/c08cb973-6eb3-414d-b127-fc146b0fb1f2-kube-api-access-xbtgw\") pod \"ovn-northd-0\" (UID: \"c08cb973-6eb3-414d-b127-fc146b0fb1f2\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:22:54 crc kubenswrapper[4558]: I0120 17:22:54.019607 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/c08cb973-6eb3-414d-b127-fc146b0fb1f2-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"c08cb973-6eb3-414d-b127-fc146b0fb1f2\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:22:54 crc kubenswrapper[4558]: I0120 17:22:54.019634 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c08cb973-6eb3-414d-b127-fc146b0fb1f2-scripts\") pod \"ovn-northd-0\" (UID: \"c08cb973-6eb3-414d-b127-fc146b0fb1f2\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:22:54 crc kubenswrapper[4558]: I0120 17:22:54.019718 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c08cb973-6eb3-414d-b127-fc146b0fb1f2-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"c08cb973-6eb3-414d-b127-fc146b0fb1f2\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:22:54 crc kubenswrapper[4558]: I0120 17:22:54.019771 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/c08cb973-6eb3-414d-b127-fc146b0fb1f2-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"c08cb973-6eb3-414d-b127-fc146b0fb1f2\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:22:54 crc kubenswrapper[4558]: I0120 17:22:54.019840 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c08cb973-6eb3-414d-b127-fc146b0fb1f2-config\") pod \"ovn-northd-0\" (UID: \"c08cb973-6eb3-414d-b127-fc146b0fb1f2\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:22:54 crc kubenswrapper[4558]: I0120 17:22:54.020094 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4kbv\" (UniqueName: \"kubernetes.io/projected/a02208fb-7a91-4fdb-b4c6-fa78fb9ebf71-kube-api-access-d4kbv\") on node \"crc\" DevicePath \"\"" Jan 20 17:22:54 crc kubenswrapper[4558]: I0120 17:22:54.020132 4558 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/a02208fb-7a91-4fdb-b4c6-fa78fb9ebf71-ring-data-devices\") on node \"crc\" DevicePath \"\"" Jan 20 17:22:54 crc kubenswrapper[4558]: I0120 17:22:54.020147 4558 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/a02208fb-7a91-4fdb-b4c6-fa78fb9ebf71-etc-swift\") on node \"crc\" DevicePath \"\"" Jan 20 17:22:54 crc kubenswrapper[4558]: I0120 17:22:54.020158 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a02208fb-7a91-4fdb-b4c6-fa78fb9ebf71-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:22:54 crc kubenswrapper[4558]: I0120 17:22:54.020187 4558 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/a02208fb-7a91-4fdb-b4c6-fa78fb9ebf71-dispersionconf\") on node \"crc\" DevicePath \"\"" Jan 20 17:22:54 crc kubenswrapper[4558]: I0120 17:22:54.020199 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a02208fb-7a91-4fdb-b4c6-fa78fb9ebf71-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:22:54 crc kubenswrapper[4558]: I0120 17:22:54.020209 4558 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/a02208fb-7a91-4fdb-b4c6-fa78fb9ebf71-swiftconf\") on node \"crc\" DevicePath \"\"" Jan 20 17:22:54 crc kubenswrapper[4558]: I0120 17:22:54.121317 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/c08cb973-6eb3-414d-b127-fc146b0fb1f2-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"c08cb973-6eb3-414d-b127-fc146b0fb1f2\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:22:54 crc kubenswrapper[4558]: I0120 17:22:54.121364 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c08cb973-6eb3-414d-b127-fc146b0fb1f2-scripts\") pod \"ovn-northd-0\" (UID: \"c08cb973-6eb3-414d-b127-fc146b0fb1f2\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:22:54 crc kubenswrapper[4558]: I0120 17:22:54.121389 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c08cb973-6eb3-414d-b127-fc146b0fb1f2-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"c08cb973-6eb3-414d-b127-fc146b0fb1f2\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:22:54 crc kubenswrapper[4558]: I0120 17:22:54.121420 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/c08cb973-6eb3-414d-b127-fc146b0fb1f2-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"c08cb973-6eb3-414d-b127-fc146b0fb1f2\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:22:54 crc kubenswrapper[4558]: I0120 17:22:54.121462 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c08cb973-6eb3-414d-b127-fc146b0fb1f2-config\") pod \"ovn-northd-0\" (UID: \"c08cb973-6eb3-414d-b127-fc146b0fb1f2\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:22:54 crc kubenswrapper[4558]: I0120 17:22:54.121508 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/c08cb973-6eb3-414d-b127-fc146b0fb1f2-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"c08cb973-6eb3-414d-b127-fc146b0fb1f2\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:22:54 crc kubenswrapper[4558]: I0120 17:22:54.121534 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xbtgw\" (UniqueName: \"kubernetes.io/projected/c08cb973-6eb3-414d-b127-fc146b0fb1f2-kube-api-access-xbtgw\") pod \"ovn-northd-0\" (UID: \"c08cb973-6eb3-414d-b127-fc146b0fb1f2\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:22:54 crc kubenswrapper[4558]: I0120 17:22:54.121802 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/c08cb973-6eb3-414d-b127-fc146b0fb1f2-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"c08cb973-6eb3-414d-b127-fc146b0fb1f2\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:22:54 crc kubenswrapper[4558]: I0120 17:22:54.122610 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c08cb973-6eb3-414d-b127-fc146b0fb1f2-config\") pod \"ovn-northd-0\" (UID: \"c08cb973-6eb3-414d-b127-fc146b0fb1f2\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:22:54 crc kubenswrapper[4558]: I0120 17:22:54.122677 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c08cb973-6eb3-414d-b127-fc146b0fb1f2-scripts\") pod \"ovn-northd-0\" (UID: \"c08cb973-6eb3-414d-b127-fc146b0fb1f2\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:22:54 crc kubenswrapper[4558]: I0120 17:22:54.132700 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/c08cb973-6eb3-414d-b127-fc146b0fb1f2-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"c08cb973-6eb3-414d-b127-fc146b0fb1f2\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:22:54 crc kubenswrapper[4558]: I0120 17:22:54.133135 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c08cb973-6eb3-414d-b127-fc146b0fb1f2-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"c08cb973-6eb3-414d-b127-fc146b0fb1f2\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:22:54 crc kubenswrapper[4558]: I0120 17:22:54.133274 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/c08cb973-6eb3-414d-b127-fc146b0fb1f2-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"c08cb973-6eb3-414d-b127-fc146b0fb1f2\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:22:54 crc kubenswrapper[4558]: I0120 17:22:54.136227 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xbtgw\" (UniqueName: \"kubernetes.io/projected/c08cb973-6eb3-414d-b127-fc146b0fb1f2-kube-api-access-xbtgw\") pod \"ovn-northd-0\" (UID: \"c08cb973-6eb3-414d-b127-fc146b0fb1f2\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:22:54 crc kubenswrapper[4558]: I0120 17:22:54.222199 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/cbbfff95-9f90-4be2-9ed7-0a1cbbe43542-etc-swift\") pod \"swift-storage-0\" (UID: \"cbbfff95-9f90-4be2-9ed7-0a1cbbe43542\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:22:54 crc kubenswrapper[4558]: E0120 17:22:54.222445 4558 projected.go:288] Couldn't get configMap openstack-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Jan 20 17:22:54 crc kubenswrapper[4558]: E0120 17:22:54.222467 4558 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack-kuttl-tests/swift-storage-0: configmap "swift-ring-files" not found Jan 20 17:22:54 crc kubenswrapper[4558]: E0120 17:22:54.222517 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/cbbfff95-9f90-4be2-9ed7-0a1cbbe43542-etc-swift podName:cbbfff95-9f90-4be2-9ed7-0a1cbbe43542 nodeName:}" failed. No retries permitted until 2026-01-20 17:22:56.222502227 +0000 UTC m=+2469.982840194 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/cbbfff95-9f90-4be2-9ed7-0a1cbbe43542-etc-swift") pod "swift-storage-0" (UID: "cbbfff95-9f90-4be2-9ed7-0a1cbbe43542") : configmap "swift-ring-files" not found Jan 20 17:22:54 crc kubenswrapper[4558]: I0120 17:22:54.266452 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:22:54 crc kubenswrapper[4558]: I0120 17:22:54.457175 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/glance-db-create-thhtc"] Jan 20 17:22:54 crc kubenswrapper[4558]: I0120 17:22:54.461738 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-db-create-thhtc" Jan 20 17:22:54 crc kubenswrapper[4558]: I0120 17:22:54.468932 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-db-create-thhtc"] Jan 20 17:22:54 crc kubenswrapper[4558]: I0120 17:22:54.527939 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qmxvq\" (UniqueName: \"kubernetes.io/projected/2a434215-321c-46c8-940b-7341b5c2a8c1-kube-api-access-qmxvq\") pod \"glance-db-create-thhtc\" (UID: \"2a434215-321c-46c8-940b-7341b5c2a8c1\") " pod="openstack-kuttl-tests/glance-db-create-thhtc" Jan 20 17:22:54 crc kubenswrapper[4558]: I0120 17:22:54.528040 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2a434215-321c-46c8-940b-7341b5c2a8c1-operator-scripts\") pod \"glance-db-create-thhtc\" (UID: \"2a434215-321c-46c8-940b-7341b5c2a8c1\") " pod="openstack-kuttl-tests/glance-db-create-thhtc" Jan 20 17:22:54 crc kubenswrapper[4558]: I0120 17:22:54.564115 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/glance-65cd-account-create-update-dr789"] Jan 20 17:22:54 crc kubenswrapper[4558]: I0120 17:22:54.565888 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-65cd-account-create-update-dr789" Jan 20 17:22:54 crc kubenswrapper[4558]: I0120 17:22:54.567926 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-db-secret" Jan 20 17:22:54 crc kubenswrapper[4558]: I0120 17:22:54.579399 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-65cd-account-create-update-dr789"] Jan 20 17:22:54 crc kubenswrapper[4558]: I0120 17:22:54.630144 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4swqk\" (UniqueName: \"kubernetes.io/projected/9c26bd33-30c8-48f1-b6f5-2a6cbe8cc92b-kube-api-access-4swqk\") pod \"glance-65cd-account-create-update-dr789\" (UID: \"9c26bd33-30c8-48f1-b6f5-2a6cbe8cc92b\") " pod="openstack-kuttl-tests/glance-65cd-account-create-update-dr789" Jan 20 17:22:54 crc kubenswrapper[4558]: I0120 17:22:54.630308 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9c26bd33-30c8-48f1-b6f5-2a6cbe8cc92b-operator-scripts\") pod \"glance-65cd-account-create-update-dr789\" (UID: \"9c26bd33-30c8-48f1-b6f5-2a6cbe8cc92b\") " pod="openstack-kuttl-tests/glance-65cd-account-create-update-dr789" Jan 20 17:22:54 crc kubenswrapper[4558]: I0120 17:22:54.630513 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qmxvq\" (UniqueName: \"kubernetes.io/projected/2a434215-321c-46c8-940b-7341b5c2a8c1-kube-api-access-qmxvq\") pod \"glance-db-create-thhtc\" (UID: \"2a434215-321c-46c8-940b-7341b5c2a8c1\") " pod="openstack-kuttl-tests/glance-db-create-thhtc" Jan 20 17:22:54 crc kubenswrapper[4558]: I0120 17:22:54.630601 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2a434215-321c-46c8-940b-7341b5c2a8c1-operator-scripts\") pod \"glance-db-create-thhtc\" (UID: \"2a434215-321c-46c8-940b-7341b5c2a8c1\") " pod="openstack-kuttl-tests/glance-db-create-thhtc" Jan 20 17:22:54 crc kubenswrapper[4558]: I0120 17:22:54.631311 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2a434215-321c-46c8-940b-7341b5c2a8c1-operator-scripts\") pod \"glance-db-create-thhtc\" (UID: \"2a434215-321c-46c8-940b-7341b5c2a8c1\") " pod="openstack-kuttl-tests/glance-db-create-thhtc" Jan 20 17:22:54 crc kubenswrapper[4558]: I0120 17:22:54.648707 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qmxvq\" (UniqueName: \"kubernetes.io/projected/2a434215-321c-46c8-940b-7341b5c2a8c1-kube-api-access-qmxvq\") pod \"glance-db-create-thhtc\" (UID: \"2a434215-321c-46c8-940b-7341b5c2a8c1\") " pod="openstack-kuttl-tests/glance-db-create-thhtc" Jan 20 17:22:54 crc kubenswrapper[4558]: I0120 17:22:54.659206 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovn-northd-0"] Jan 20 17:22:54 crc kubenswrapper[4558]: W0120 17:22:54.666230 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc08cb973_6eb3_414d_b127_fc146b0fb1f2.slice/crio-07666f608209fe81c7d6bd117e4853ca3c0442f2555801913598516c8e27281a WatchSource:0}: Error finding container 07666f608209fe81c7d6bd117e4853ca3c0442f2555801913598516c8e27281a: Status 404 returned error can't find the container with id 07666f608209fe81c7d6bd117e4853ca3c0442f2555801913598516c8e27281a Jan 20 17:22:54 crc kubenswrapper[4558]: I0120 17:22:54.732128 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4swqk\" (UniqueName: \"kubernetes.io/projected/9c26bd33-30c8-48f1-b6f5-2a6cbe8cc92b-kube-api-access-4swqk\") pod \"glance-65cd-account-create-update-dr789\" (UID: \"9c26bd33-30c8-48f1-b6f5-2a6cbe8cc92b\") " pod="openstack-kuttl-tests/glance-65cd-account-create-update-dr789" Jan 20 17:22:54 crc kubenswrapper[4558]: I0120 17:22:54.732536 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9c26bd33-30c8-48f1-b6f5-2a6cbe8cc92b-operator-scripts\") pod \"glance-65cd-account-create-update-dr789\" (UID: \"9c26bd33-30c8-48f1-b6f5-2a6cbe8cc92b\") " pod="openstack-kuttl-tests/glance-65cd-account-create-update-dr789" Jan 20 17:22:54 crc kubenswrapper[4558]: I0120 17:22:54.733377 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9c26bd33-30c8-48f1-b6f5-2a6cbe8cc92b-operator-scripts\") pod \"glance-65cd-account-create-update-dr789\" (UID: \"9c26bd33-30c8-48f1-b6f5-2a6cbe8cc92b\") " pod="openstack-kuttl-tests/glance-65cd-account-create-update-dr789" Jan 20 17:22:54 crc kubenswrapper[4558]: I0120 17:22:54.749629 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4swqk\" (UniqueName: \"kubernetes.io/projected/9c26bd33-30c8-48f1-b6f5-2a6cbe8cc92b-kube-api-access-4swqk\") pod \"glance-65cd-account-create-update-dr789\" (UID: \"9c26bd33-30c8-48f1-b6f5-2a6cbe8cc92b\") " pod="openstack-kuttl-tests/glance-65cd-account-create-update-dr789" Jan 20 17:22:54 crc kubenswrapper[4558]: I0120 17:22:54.779821 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-db-create-thhtc" Jan 20 17:22:54 crc kubenswrapper[4558]: I0120 17:22:54.832286 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-ring-rebalance-xkgps" Jan 20 17:22:54 crc kubenswrapper[4558]: I0120 17:22:54.832905 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-northd-0" event={"ID":"c08cb973-6eb3-414d-b127-fc146b0fb1f2","Type":"ContainerStarted","Data":"07666f608209fe81c7d6bd117e4853ca3c0442f2555801913598516c8e27281a"} Jan 20 17:22:54 crc kubenswrapper[4558]: I0120 17:22:54.879876 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-65cd-account-create-update-dr789" Jan 20 17:22:54 crc kubenswrapper[4558]: I0120 17:22:54.899053 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/swift-ring-rebalance-xkgps"] Jan 20 17:22:54 crc kubenswrapper[4558]: I0120 17:22:54.904321 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/swift-ring-rebalance-xkgps"] Jan 20 17:22:55 crc kubenswrapper[4558]: I0120 17:22:55.215385 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-db-create-thhtc"] Jan 20 17:22:55 crc kubenswrapper[4558]: W0120 17:22:55.222896 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2a434215_321c_46c8_940b_7341b5c2a8c1.slice/crio-7d7e3825ef4b386ba934c154cab7cee919802f04dfab4821b131b9db6caf2efc WatchSource:0}: Error finding container 7d7e3825ef4b386ba934c154cab7cee919802f04dfab4821b131b9db6caf2efc: Status 404 returned error can't find the container with id 7d7e3825ef4b386ba934c154cab7cee919802f04dfab4821b131b9db6caf2efc Jan 20 17:22:55 crc kubenswrapper[4558]: I0120 17:22:55.350325 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-65cd-account-create-update-dr789"] Jan 20 17:22:55 crc kubenswrapper[4558]: W0120 17:22:55.358019 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9c26bd33_30c8_48f1_b6f5_2a6cbe8cc92b.slice/crio-b74e543e2bdab426abcd718e5d8b604aed80fcc50d87c423cc12ae22a46d0e2c WatchSource:0}: Error finding container b74e543e2bdab426abcd718e5d8b604aed80fcc50d87c423cc12ae22a46d0e2c: Status 404 returned error can't find the container with id b74e543e2bdab426abcd718e5d8b604aed80fcc50d87c423cc12ae22a46d0e2c Jan 20 17:22:55 crc kubenswrapper[4558]: I0120 17:22:55.843234 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-northd-0" event={"ID":"c08cb973-6eb3-414d-b127-fc146b0fb1f2","Type":"ContainerStarted","Data":"97eae6723cbae5007ecc1c7e159332138d8c92b6e1ffae4831a2b5b8e46b5a7e"} Jan 20 17:22:55 crc kubenswrapper[4558]: I0120 17:22:55.843302 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-northd-0" event={"ID":"c08cb973-6eb3-414d-b127-fc146b0fb1f2","Type":"ContainerStarted","Data":"bdc31a24aa03485670883d3ea8d3795dfe70b5c89196347668c72f2086bf2bea"} Jan 20 17:22:55 crc kubenswrapper[4558]: I0120 17:22:55.843588 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:22:55 crc kubenswrapper[4558]: I0120 17:22:55.846057 4558 generic.go:334] "Generic (PLEG): container finished" podID="9c26bd33-30c8-48f1-b6f5-2a6cbe8cc92b" containerID="2ce6a52b519e78433be47bb493bd9e81226e10f0796bf02066e39b30593b9275" exitCode=0 Jan 20 17:22:55 crc kubenswrapper[4558]: I0120 17:22:55.846154 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-65cd-account-create-update-dr789" event={"ID":"9c26bd33-30c8-48f1-b6f5-2a6cbe8cc92b","Type":"ContainerDied","Data":"2ce6a52b519e78433be47bb493bd9e81226e10f0796bf02066e39b30593b9275"} Jan 20 17:22:55 crc kubenswrapper[4558]: I0120 17:22:55.846230 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-65cd-account-create-update-dr789" event={"ID":"9c26bd33-30c8-48f1-b6f5-2a6cbe8cc92b","Type":"ContainerStarted","Data":"b74e543e2bdab426abcd718e5d8b604aed80fcc50d87c423cc12ae22a46d0e2c"} Jan 20 17:22:55 crc kubenswrapper[4558]: I0120 17:22:55.848788 4558 generic.go:334] "Generic (PLEG): container finished" podID="2a434215-321c-46c8-940b-7341b5c2a8c1" containerID="19d2eb91531f091cee0633f6be93c0f1072901e75d5c21b701c2426abfc50181" exitCode=0 Jan 20 17:22:55 crc kubenswrapper[4558]: I0120 17:22:55.848838 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-db-create-thhtc" event={"ID":"2a434215-321c-46c8-940b-7341b5c2a8c1","Type":"ContainerDied","Data":"19d2eb91531f091cee0633f6be93c0f1072901e75d5c21b701c2426abfc50181"} Jan 20 17:22:55 crc kubenswrapper[4558]: I0120 17:22:55.848859 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-db-create-thhtc" event={"ID":"2a434215-321c-46c8-940b-7341b5c2a8c1","Type":"ContainerStarted","Data":"7d7e3825ef4b386ba934c154cab7cee919802f04dfab4821b131b9db6caf2efc"} Jan 20 17:22:55 crc kubenswrapper[4558]: I0120 17:22:55.878291 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ovn-northd-0" podStartSLOduration=2.878278867 podStartE2EDuration="2.878278867s" podCreationTimestamp="2026-01-20 17:22:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:22:55.872949371 +0000 UTC m=+2469.633287339" watchObservedRunningTime="2026-01-20 17:22:55.878278867 +0000 UTC m=+2469.638616834" Jan 20 17:22:56 crc kubenswrapper[4558]: I0120 17:22:56.269296 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/cbbfff95-9f90-4be2-9ed7-0a1cbbe43542-etc-swift\") pod \"swift-storage-0\" (UID: \"cbbfff95-9f90-4be2-9ed7-0a1cbbe43542\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:22:56 crc kubenswrapper[4558]: E0120 17:22:56.269512 4558 projected.go:288] Couldn't get configMap openstack-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Jan 20 17:22:56 crc kubenswrapper[4558]: E0120 17:22:56.269544 4558 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack-kuttl-tests/swift-storage-0: configmap "swift-ring-files" not found Jan 20 17:22:56 crc kubenswrapper[4558]: E0120 17:22:56.269603 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/cbbfff95-9f90-4be2-9ed7-0a1cbbe43542-etc-swift podName:cbbfff95-9f90-4be2-9ed7-0a1cbbe43542 nodeName:}" failed. No retries permitted until 2026-01-20 17:23:00.269585913 +0000 UTC m=+2474.029923880 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/cbbfff95-9f90-4be2-9ed7-0a1cbbe43542-etc-swift") pod "swift-storage-0" (UID: "cbbfff95-9f90-4be2-9ed7-0a1cbbe43542") : configmap "swift-ring-files" not found Jan 20 17:22:56 crc kubenswrapper[4558]: I0120 17:22:56.431194 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/root-account-create-update-5tvxx"] Jan 20 17:22:56 crc kubenswrapper[4558]: I0120 17:22:56.432765 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-5tvxx" Jan 20 17:22:56 crc kubenswrapper[4558]: I0120 17:22:56.436068 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"openstack-mariadb-root-db-secret" Jan 20 17:22:56 crc kubenswrapper[4558]: I0120 17:22:56.450125 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-5tvxx"] Jan 20 17:22:56 crc kubenswrapper[4558]: I0120 17:22:56.477919 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8c6a7f55-6c04-40fa-89f1-083b382e8b07-operator-scripts\") pod \"root-account-create-update-5tvxx\" (UID: \"8c6a7f55-6c04-40fa-89f1-083b382e8b07\") " pod="openstack-kuttl-tests/root-account-create-update-5tvxx" Jan 20 17:22:56 crc kubenswrapper[4558]: I0120 17:22:56.478356 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ssc76\" (UniqueName: \"kubernetes.io/projected/8c6a7f55-6c04-40fa-89f1-083b382e8b07-kube-api-access-ssc76\") pod \"root-account-create-update-5tvxx\" (UID: \"8c6a7f55-6c04-40fa-89f1-083b382e8b07\") " pod="openstack-kuttl-tests/root-account-create-update-5tvxx" Jan 20 17:22:56 crc kubenswrapper[4558]: I0120 17:22:56.576792 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a02208fb-7a91-4fdb-b4c6-fa78fb9ebf71" path="/var/lib/kubelet/pods/a02208fb-7a91-4fdb-b4c6-fa78fb9ebf71/volumes" Jan 20 17:22:56 crc kubenswrapper[4558]: I0120 17:22:56.580641 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8c6a7f55-6c04-40fa-89f1-083b382e8b07-operator-scripts\") pod \"root-account-create-update-5tvxx\" (UID: \"8c6a7f55-6c04-40fa-89f1-083b382e8b07\") " pod="openstack-kuttl-tests/root-account-create-update-5tvxx" Jan 20 17:22:56 crc kubenswrapper[4558]: I0120 17:22:56.581293 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ssc76\" (UniqueName: \"kubernetes.io/projected/8c6a7f55-6c04-40fa-89f1-083b382e8b07-kube-api-access-ssc76\") pod \"root-account-create-update-5tvxx\" (UID: \"8c6a7f55-6c04-40fa-89f1-083b382e8b07\") " pod="openstack-kuttl-tests/root-account-create-update-5tvxx" Jan 20 17:22:56 crc kubenswrapper[4558]: I0120 17:22:56.581537 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8c6a7f55-6c04-40fa-89f1-083b382e8b07-operator-scripts\") pod \"root-account-create-update-5tvxx\" (UID: \"8c6a7f55-6c04-40fa-89f1-083b382e8b07\") " pod="openstack-kuttl-tests/root-account-create-update-5tvxx" Jan 20 17:22:56 crc kubenswrapper[4558]: I0120 17:22:56.603106 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ssc76\" (UniqueName: \"kubernetes.io/projected/8c6a7f55-6c04-40fa-89f1-083b382e8b07-kube-api-access-ssc76\") pod \"root-account-create-update-5tvxx\" (UID: \"8c6a7f55-6c04-40fa-89f1-083b382e8b07\") " pod="openstack-kuttl-tests/root-account-create-update-5tvxx" Jan 20 17:22:56 crc kubenswrapper[4558]: I0120 17:22:56.753927 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-5tvxx" Jan 20 17:22:57 crc kubenswrapper[4558]: I0120 17:22:57.183182 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-5tvxx"] Jan 20 17:22:57 crc kubenswrapper[4558]: I0120 17:22:57.359817 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-65cd-account-create-update-dr789" Jan 20 17:22:57 crc kubenswrapper[4558]: I0120 17:22:57.391914 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-db-create-thhtc" Jan 20 17:22:57 crc kubenswrapper[4558]: I0120 17:22:57.407795 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9c26bd33-30c8-48f1-b6f5-2a6cbe8cc92b-operator-scripts\") pod \"9c26bd33-30c8-48f1-b6f5-2a6cbe8cc92b\" (UID: \"9c26bd33-30c8-48f1-b6f5-2a6cbe8cc92b\") " Jan 20 17:22:57 crc kubenswrapper[4558]: I0120 17:22:57.407863 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4swqk\" (UniqueName: \"kubernetes.io/projected/9c26bd33-30c8-48f1-b6f5-2a6cbe8cc92b-kube-api-access-4swqk\") pod \"9c26bd33-30c8-48f1-b6f5-2a6cbe8cc92b\" (UID: \"9c26bd33-30c8-48f1-b6f5-2a6cbe8cc92b\") " Jan 20 17:22:57 crc kubenswrapper[4558]: I0120 17:22:57.409320 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9c26bd33-30c8-48f1-b6f5-2a6cbe8cc92b-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "9c26bd33-30c8-48f1-b6f5-2a6cbe8cc92b" (UID: "9c26bd33-30c8-48f1-b6f5-2a6cbe8cc92b"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:22:57 crc kubenswrapper[4558]: I0120 17:22:57.415365 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9c26bd33-30c8-48f1-b6f5-2a6cbe8cc92b-kube-api-access-4swqk" (OuterVolumeSpecName: "kube-api-access-4swqk") pod "9c26bd33-30c8-48f1-b6f5-2a6cbe8cc92b" (UID: "9c26bd33-30c8-48f1-b6f5-2a6cbe8cc92b"). InnerVolumeSpecName "kube-api-access-4swqk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:22:57 crc kubenswrapper[4558]: I0120 17:22:57.509776 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2a434215-321c-46c8-940b-7341b5c2a8c1-operator-scripts\") pod \"2a434215-321c-46c8-940b-7341b5c2a8c1\" (UID: \"2a434215-321c-46c8-940b-7341b5c2a8c1\") " Jan 20 17:22:57 crc kubenswrapper[4558]: I0120 17:22:57.509871 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qmxvq\" (UniqueName: \"kubernetes.io/projected/2a434215-321c-46c8-940b-7341b5c2a8c1-kube-api-access-qmxvq\") pod \"2a434215-321c-46c8-940b-7341b5c2a8c1\" (UID: \"2a434215-321c-46c8-940b-7341b5c2a8c1\") " Jan 20 17:22:57 crc kubenswrapper[4558]: I0120 17:22:57.510278 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2a434215-321c-46c8-940b-7341b5c2a8c1-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "2a434215-321c-46c8-940b-7341b5c2a8c1" (UID: "2a434215-321c-46c8-940b-7341b5c2a8c1"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:22:57 crc kubenswrapper[4558]: I0120 17:22:57.510729 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2a434215-321c-46c8-940b-7341b5c2a8c1-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:22:57 crc kubenswrapper[4558]: I0120 17:22:57.510754 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9c26bd33-30c8-48f1-b6f5-2a6cbe8cc92b-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:22:57 crc kubenswrapper[4558]: I0120 17:22:57.510768 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4swqk\" (UniqueName: \"kubernetes.io/projected/9c26bd33-30c8-48f1-b6f5-2a6cbe8cc92b-kube-api-access-4swqk\") on node \"crc\" DevicePath \"\"" Jan 20 17:22:57 crc kubenswrapper[4558]: I0120 17:22:57.515346 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2a434215-321c-46c8-940b-7341b5c2a8c1-kube-api-access-qmxvq" (OuterVolumeSpecName: "kube-api-access-qmxvq") pod "2a434215-321c-46c8-940b-7341b5c2a8c1" (UID: "2a434215-321c-46c8-940b-7341b5c2a8c1"). InnerVolumeSpecName "kube-api-access-qmxvq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:22:57 crc kubenswrapper[4558]: I0120 17:22:57.611867 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qmxvq\" (UniqueName: \"kubernetes.io/projected/2a434215-321c-46c8-940b-7341b5c2a8c1-kube-api-access-qmxvq\") on node \"crc\" DevicePath \"\"" Jan 20 17:22:57 crc kubenswrapper[4558]: I0120 17:22:57.874243 4558 generic.go:334] "Generic (PLEG): container finished" podID="8c6a7f55-6c04-40fa-89f1-083b382e8b07" containerID="3ab396d4ddc6d93934c5b2cdd59f60c8abc4faa48b09f179bfba616f9732c273" exitCode=0 Jan 20 17:22:57 crc kubenswrapper[4558]: I0120 17:22:57.874309 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/root-account-create-update-5tvxx" event={"ID":"8c6a7f55-6c04-40fa-89f1-083b382e8b07","Type":"ContainerDied","Data":"3ab396d4ddc6d93934c5b2cdd59f60c8abc4faa48b09f179bfba616f9732c273"} Jan 20 17:22:57 crc kubenswrapper[4558]: I0120 17:22:57.874339 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/root-account-create-update-5tvxx" event={"ID":"8c6a7f55-6c04-40fa-89f1-083b382e8b07","Type":"ContainerStarted","Data":"7dcbd0fa78667e845315e3daf0e5df7824a9af0b1764a0964bb7b9310d76a55d"} Jan 20 17:22:57 crc kubenswrapper[4558]: I0120 17:22:57.876866 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-65cd-account-create-update-dr789" event={"ID":"9c26bd33-30c8-48f1-b6f5-2a6cbe8cc92b","Type":"ContainerDied","Data":"b74e543e2bdab426abcd718e5d8b604aed80fcc50d87c423cc12ae22a46d0e2c"} Jan 20 17:22:57 crc kubenswrapper[4558]: I0120 17:22:57.876893 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b74e543e2bdab426abcd718e5d8b604aed80fcc50d87c423cc12ae22a46d0e2c" Jan 20 17:22:57 crc kubenswrapper[4558]: I0120 17:22:57.876908 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-65cd-account-create-update-dr789" Jan 20 17:22:57 crc kubenswrapper[4558]: I0120 17:22:57.878635 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-db-create-thhtc" event={"ID":"2a434215-321c-46c8-940b-7341b5c2a8c1","Type":"ContainerDied","Data":"7d7e3825ef4b386ba934c154cab7cee919802f04dfab4821b131b9db6caf2efc"} Jan 20 17:22:57 crc kubenswrapper[4558]: I0120 17:22:57.878657 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-db-create-thhtc" Jan 20 17:22:57 crc kubenswrapper[4558]: I0120 17:22:57.878668 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7d7e3825ef4b386ba934c154cab7cee919802f04dfab4821b131b9db6caf2efc" Jan 20 17:22:58 crc kubenswrapper[4558]: I0120 17:22:58.937564 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/keystone-db-create-c6v64"] Jan 20 17:22:58 crc kubenswrapper[4558]: E0120 17:22:58.937882 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9c26bd33-30c8-48f1-b6f5-2a6cbe8cc92b" containerName="mariadb-account-create-update" Jan 20 17:22:58 crc kubenswrapper[4558]: I0120 17:22:58.937896 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="9c26bd33-30c8-48f1-b6f5-2a6cbe8cc92b" containerName="mariadb-account-create-update" Jan 20 17:22:58 crc kubenswrapper[4558]: E0120 17:22:58.937932 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2a434215-321c-46c8-940b-7341b5c2a8c1" containerName="mariadb-database-create" Jan 20 17:22:58 crc kubenswrapper[4558]: I0120 17:22:58.937938 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="2a434215-321c-46c8-940b-7341b5c2a8c1" containerName="mariadb-database-create" Jan 20 17:22:58 crc kubenswrapper[4558]: I0120 17:22:58.938082 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="2a434215-321c-46c8-940b-7341b5c2a8c1" containerName="mariadb-database-create" Jan 20 17:22:58 crc kubenswrapper[4558]: I0120 17:22:58.938091 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="9c26bd33-30c8-48f1-b6f5-2a6cbe8cc92b" containerName="mariadb-account-create-update" Jan 20 17:22:58 crc kubenswrapper[4558]: I0120 17:22:58.938613 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-db-create-c6v64" Jan 20 17:22:58 crc kubenswrapper[4558]: I0120 17:22:58.947661 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-db-create-c6v64"] Jan 20 17:22:59 crc kubenswrapper[4558]: I0120 17:22:59.038652 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9e043cfa-4b67-4f6c-a46b-626a8ab6d797-operator-scripts\") pod \"keystone-db-create-c6v64\" (UID: \"9e043cfa-4b67-4f6c-a46b-626a8ab6d797\") " pod="openstack-kuttl-tests/keystone-db-create-c6v64" Jan 20 17:22:59 crc kubenswrapper[4558]: I0120 17:22:59.038831 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5ps4m\" (UniqueName: \"kubernetes.io/projected/9e043cfa-4b67-4f6c-a46b-626a8ab6d797-kube-api-access-5ps4m\") pod \"keystone-db-create-c6v64\" (UID: \"9e043cfa-4b67-4f6c-a46b-626a8ab6d797\") " pod="openstack-kuttl-tests/keystone-db-create-c6v64" Jan 20 17:22:59 crc kubenswrapper[4558]: I0120 17:22:59.042865 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/keystone-64c1-account-create-update-h6gct"] Jan 20 17:22:59 crc kubenswrapper[4558]: I0120 17:22:59.045009 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-64c1-account-create-update-h6gct" Jan 20 17:22:59 crc kubenswrapper[4558]: I0120 17:22:59.048006 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-db-secret" Jan 20 17:22:59 crc kubenswrapper[4558]: I0120 17:22:59.050687 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-64c1-account-create-update-h6gct"] Jan 20 17:22:59 crc kubenswrapper[4558]: I0120 17:22:59.140873 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2f0bbcf9-0bf8-43ec-bd9c-220f67300878-operator-scripts\") pod \"keystone-64c1-account-create-update-h6gct\" (UID: \"2f0bbcf9-0bf8-43ec-bd9c-220f67300878\") " pod="openstack-kuttl-tests/keystone-64c1-account-create-update-h6gct" Jan 20 17:22:59 crc kubenswrapper[4558]: I0120 17:22:59.141229 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8cb4l\" (UniqueName: \"kubernetes.io/projected/2f0bbcf9-0bf8-43ec-bd9c-220f67300878-kube-api-access-8cb4l\") pod \"keystone-64c1-account-create-update-h6gct\" (UID: \"2f0bbcf9-0bf8-43ec-bd9c-220f67300878\") " pod="openstack-kuttl-tests/keystone-64c1-account-create-update-h6gct" Jan 20 17:22:59 crc kubenswrapper[4558]: I0120 17:22:59.141304 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9e043cfa-4b67-4f6c-a46b-626a8ab6d797-operator-scripts\") pod \"keystone-db-create-c6v64\" (UID: \"9e043cfa-4b67-4f6c-a46b-626a8ab6d797\") " pod="openstack-kuttl-tests/keystone-db-create-c6v64" Jan 20 17:22:59 crc kubenswrapper[4558]: I0120 17:22:59.141334 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5ps4m\" (UniqueName: \"kubernetes.io/projected/9e043cfa-4b67-4f6c-a46b-626a8ab6d797-kube-api-access-5ps4m\") pod \"keystone-db-create-c6v64\" (UID: \"9e043cfa-4b67-4f6c-a46b-626a8ab6d797\") " pod="openstack-kuttl-tests/keystone-db-create-c6v64" Jan 20 17:22:59 crc kubenswrapper[4558]: I0120 17:22:59.142476 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9e043cfa-4b67-4f6c-a46b-626a8ab6d797-operator-scripts\") pod \"keystone-db-create-c6v64\" (UID: \"9e043cfa-4b67-4f6c-a46b-626a8ab6d797\") " pod="openstack-kuttl-tests/keystone-db-create-c6v64" Jan 20 17:22:59 crc kubenswrapper[4558]: I0120 17:22:59.159545 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5ps4m\" (UniqueName: \"kubernetes.io/projected/9e043cfa-4b67-4f6c-a46b-626a8ab6d797-kube-api-access-5ps4m\") pod \"keystone-db-create-c6v64\" (UID: \"9e043cfa-4b67-4f6c-a46b-626a8ab6d797\") " pod="openstack-kuttl-tests/keystone-db-create-c6v64" Jan 20 17:22:59 crc kubenswrapper[4558]: I0120 17:22:59.246953 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2f0bbcf9-0bf8-43ec-bd9c-220f67300878-operator-scripts\") pod \"keystone-64c1-account-create-update-h6gct\" (UID: \"2f0bbcf9-0bf8-43ec-bd9c-220f67300878\") " pod="openstack-kuttl-tests/keystone-64c1-account-create-update-h6gct" Jan 20 17:22:59 crc kubenswrapper[4558]: I0120 17:22:59.247014 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8cb4l\" (UniqueName: \"kubernetes.io/projected/2f0bbcf9-0bf8-43ec-bd9c-220f67300878-kube-api-access-8cb4l\") pod \"keystone-64c1-account-create-update-h6gct\" (UID: \"2f0bbcf9-0bf8-43ec-bd9c-220f67300878\") " pod="openstack-kuttl-tests/keystone-64c1-account-create-update-h6gct" Jan 20 17:22:59 crc kubenswrapper[4558]: I0120 17:22:59.247452 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/placement-db-create-zk697"] Jan 20 17:22:59 crc kubenswrapper[4558]: I0120 17:22:59.247908 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2f0bbcf9-0bf8-43ec-bd9c-220f67300878-operator-scripts\") pod \"keystone-64c1-account-create-update-h6gct\" (UID: \"2f0bbcf9-0bf8-43ec-bd9c-220f67300878\") " pod="openstack-kuttl-tests/keystone-64c1-account-create-update-h6gct" Jan 20 17:22:59 crc kubenswrapper[4558]: I0120 17:22:59.248619 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-db-create-zk697" Jan 20 17:22:59 crc kubenswrapper[4558]: I0120 17:22:59.258506 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-db-create-zk697"] Jan 20 17:22:59 crc kubenswrapper[4558]: I0120 17:22:59.262115 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8cb4l\" (UniqueName: \"kubernetes.io/projected/2f0bbcf9-0bf8-43ec-bd9c-220f67300878-kube-api-access-8cb4l\") pod \"keystone-64c1-account-create-update-h6gct\" (UID: \"2f0bbcf9-0bf8-43ec-bd9c-220f67300878\") " pod="openstack-kuttl-tests/keystone-64c1-account-create-update-h6gct" Jan 20 17:22:59 crc kubenswrapper[4558]: I0120 17:22:59.265264 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-5tvxx" Jan 20 17:22:59 crc kubenswrapper[4558]: I0120 17:22:59.282747 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-db-create-c6v64" Jan 20 17:22:59 crc kubenswrapper[4558]: I0120 17:22:59.349375 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8c6a7f55-6c04-40fa-89f1-083b382e8b07-operator-scripts\") pod \"8c6a7f55-6c04-40fa-89f1-083b382e8b07\" (UID: \"8c6a7f55-6c04-40fa-89f1-083b382e8b07\") " Jan 20 17:22:59 crc kubenswrapper[4558]: I0120 17:22:59.349494 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ssc76\" (UniqueName: \"kubernetes.io/projected/8c6a7f55-6c04-40fa-89f1-083b382e8b07-kube-api-access-ssc76\") pod \"8c6a7f55-6c04-40fa-89f1-083b382e8b07\" (UID: \"8c6a7f55-6c04-40fa-89f1-083b382e8b07\") " Jan 20 17:22:59 crc kubenswrapper[4558]: I0120 17:22:59.350247 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ffd4b518-58dc-44f5-a3f9-614a77d06ca2-operator-scripts\") pod \"placement-db-create-zk697\" (UID: \"ffd4b518-58dc-44f5-a3f9-614a77d06ca2\") " pod="openstack-kuttl-tests/placement-db-create-zk697" Jan 20 17:22:59 crc kubenswrapper[4558]: I0120 17:22:59.350385 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v7jc9\" (UniqueName: \"kubernetes.io/projected/ffd4b518-58dc-44f5-a3f9-614a77d06ca2-kube-api-access-v7jc9\") pod \"placement-db-create-zk697\" (UID: \"ffd4b518-58dc-44f5-a3f9-614a77d06ca2\") " pod="openstack-kuttl-tests/placement-db-create-zk697" Jan 20 17:22:59 crc kubenswrapper[4558]: I0120 17:22:59.351329 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8c6a7f55-6c04-40fa-89f1-083b382e8b07-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "8c6a7f55-6c04-40fa-89f1-083b382e8b07" (UID: "8c6a7f55-6c04-40fa-89f1-083b382e8b07"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:22:59 crc kubenswrapper[4558]: I0120 17:22:59.353108 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/placement-7ab2-account-create-update-s556d"] Jan 20 17:22:59 crc kubenswrapper[4558]: I0120 17:22:59.354550 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8c6a7f55-6c04-40fa-89f1-083b382e8b07-kube-api-access-ssc76" (OuterVolumeSpecName: "kube-api-access-ssc76") pod "8c6a7f55-6c04-40fa-89f1-083b382e8b07" (UID: "8c6a7f55-6c04-40fa-89f1-083b382e8b07"). InnerVolumeSpecName "kube-api-access-ssc76". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:22:59 crc kubenswrapper[4558]: E0120 17:22:59.355121 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8c6a7f55-6c04-40fa-89f1-083b382e8b07" containerName="mariadb-account-create-update" Jan 20 17:22:59 crc kubenswrapper[4558]: I0120 17:22:59.355147 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="8c6a7f55-6c04-40fa-89f1-083b382e8b07" containerName="mariadb-account-create-update" Jan 20 17:22:59 crc kubenswrapper[4558]: I0120 17:22:59.355347 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="8c6a7f55-6c04-40fa-89f1-083b382e8b07" containerName="mariadb-account-create-update" Jan 20 17:22:59 crc kubenswrapper[4558]: I0120 17:22:59.355911 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-7ab2-account-create-update-s556d" Jan 20 17:22:59 crc kubenswrapper[4558]: I0120 17:22:59.358369 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"placement-db-secret" Jan 20 17:22:59 crc kubenswrapper[4558]: I0120 17:22:59.374565 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-64c1-account-create-update-h6gct" Jan 20 17:22:59 crc kubenswrapper[4558]: I0120 17:22:59.375021 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-7ab2-account-create-update-s556d"] Jan 20 17:22:59 crc kubenswrapper[4558]: I0120 17:22:59.453634 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b7b28a1d-af79-4ed2-aa96-15e39e665994-operator-scripts\") pod \"placement-7ab2-account-create-update-s556d\" (UID: \"b7b28a1d-af79-4ed2-aa96-15e39e665994\") " pod="openstack-kuttl-tests/placement-7ab2-account-create-update-s556d" Jan 20 17:22:59 crc kubenswrapper[4558]: I0120 17:22:59.453786 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ffd4b518-58dc-44f5-a3f9-614a77d06ca2-operator-scripts\") pod \"placement-db-create-zk697\" (UID: \"ffd4b518-58dc-44f5-a3f9-614a77d06ca2\") " pod="openstack-kuttl-tests/placement-db-create-zk697" Jan 20 17:22:59 crc kubenswrapper[4558]: I0120 17:22:59.453881 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v7jc9\" (UniqueName: \"kubernetes.io/projected/ffd4b518-58dc-44f5-a3f9-614a77d06ca2-kube-api-access-v7jc9\") pod \"placement-db-create-zk697\" (UID: \"ffd4b518-58dc-44f5-a3f9-614a77d06ca2\") " pod="openstack-kuttl-tests/placement-db-create-zk697" Jan 20 17:22:59 crc kubenswrapper[4558]: I0120 17:22:59.453933 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wwc99\" (UniqueName: \"kubernetes.io/projected/b7b28a1d-af79-4ed2-aa96-15e39e665994-kube-api-access-wwc99\") pod \"placement-7ab2-account-create-update-s556d\" (UID: \"b7b28a1d-af79-4ed2-aa96-15e39e665994\") " pod="openstack-kuttl-tests/placement-7ab2-account-create-update-s556d" Jan 20 17:22:59 crc kubenswrapper[4558]: I0120 17:22:59.454029 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8c6a7f55-6c04-40fa-89f1-083b382e8b07-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:22:59 crc kubenswrapper[4558]: I0120 17:22:59.454043 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ssc76\" (UniqueName: \"kubernetes.io/projected/8c6a7f55-6c04-40fa-89f1-083b382e8b07-kube-api-access-ssc76\") on node \"crc\" DevicePath \"\"" Jan 20 17:22:59 crc kubenswrapper[4558]: I0120 17:22:59.454806 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ffd4b518-58dc-44f5-a3f9-614a77d06ca2-operator-scripts\") pod \"placement-db-create-zk697\" (UID: \"ffd4b518-58dc-44f5-a3f9-614a77d06ca2\") " pod="openstack-kuttl-tests/placement-db-create-zk697" Jan 20 17:22:59 crc kubenswrapper[4558]: I0120 17:22:59.470671 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v7jc9\" (UniqueName: \"kubernetes.io/projected/ffd4b518-58dc-44f5-a3f9-614a77d06ca2-kube-api-access-v7jc9\") pod \"placement-db-create-zk697\" (UID: \"ffd4b518-58dc-44f5-a3f9-614a77d06ca2\") " pod="openstack-kuttl-tests/placement-db-create-zk697" Jan 20 17:22:59 crc kubenswrapper[4558]: I0120 17:22:59.555915 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wwc99\" (UniqueName: \"kubernetes.io/projected/b7b28a1d-af79-4ed2-aa96-15e39e665994-kube-api-access-wwc99\") pod \"placement-7ab2-account-create-update-s556d\" (UID: \"b7b28a1d-af79-4ed2-aa96-15e39e665994\") " pod="openstack-kuttl-tests/placement-7ab2-account-create-update-s556d" Jan 20 17:22:59 crc kubenswrapper[4558]: I0120 17:22:59.556054 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b7b28a1d-af79-4ed2-aa96-15e39e665994-operator-scripts\") pod \"placement-7ab2-account-create-update-s556d\" (UID: \"b7b28a1d-af79-4ed2-aa96-15e39e665994\") " pod="openstack-kuttl-tests/placement-7ab2-account-create-update-s556d" Jan 20 17:22:59 crc kubenswrapper[4558]: I0120 17:22:59.557505 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b7b28a1d-af79-4ed2-aa96-15e39e665994-operator-scripts\") pod \"placement-7ab2-account-create-update-s556d\" (UID: \"b7b28a1d-af79-4ed2-aa96-15e39e665994\") " pod="openstack-kuttl-tests/placement-7ab2-account-create-update-s556d" Jan 20 17:22:59 crc kubenswrapper[4558]: I0120 17:22:59.576522 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wwc99\" (UniqueName: \"kubernetes.io/projected/b7b28a1d-af79-4ed2-aa96-15e39e665994-kube-api-access-wwc99\") pod \"placement-7ab2-account-create-update-s556d\" (UID: \"b7b28a1d-af79-4ed2-aa96-15e39e665994\") " pod="openstack-kuttl-tests/placement-7ab2-account-create-update-s556d" Jan 20 17:22:59 crc kubenswrapper[4558]: I0120 17:22:59.595007 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-db-create-zk697" Jan 20 17:22:59 crc kubenswrapper[4558]: I0120 17:22:59.679613 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-7ab2-account-create-update-s556d" Jan 20 17:22:59 crc kubenswrapper[4558]: I0120 17:22:59.696211 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-db-create-c6v64"] Jan 20 17:22:59 crc kubenswrapper[4558]: W0120 17:22:59.714820 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9e043cfa_4b67_4f6c_a46b_626a8ab6d797.slice/crio-49633c651a67741ead4514947f392ad0e0481812e594792ade8680adbddc892b WatchSource:0}: Error finding container 49633c651a67741ead4514947f392ad0e0481812e594792ade8680adbddc892b: Status 404 returned error can't find the container with id 49633c651a67741ead4514947f392ad0e0481812e594792ade8680adbddc892b Jan 20 17:22:59 crc kubenswrapper[4558]: I0120 17:22:59.797710 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-64c1-account-create-update-h6gct"] Jan 20 17:22:59 crc kubenswrapper[4558]: I0120 17:22:59.855988 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/glance-db-sync-q78xq"] Jan 20 17:22:59 crc kubenswrapper[4558]: I0120 17:22:59.857573 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-db-sync-q78xq" Jan 20 17:22:59 crc kubenswrapper[4558]: I0120 17:22:59.860632 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-config-data" Jan 20 17:22:59 crc kubenswrapper[4558]: I0120 17:22:59.861767 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-glance-dockercfg-np554" Jan 20 17:22:59 crc kubenswrapper[4558]: I0120 17:22:59.876485 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-db-sync-q78xq"] Jan 20 17:22:59 crc kubenswrapper[4558]: I0120 17:22:59.897208 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/root-account-create-update-5tvxx" event={"ID":"8c6a7f55-6c04-40fa-89f1-083b382e8b07","Type":"ContainerDied","Data":"7dcbd0fa78667e845315e3daf0e5df7824a9af0b1764a0964bb7b9310d76a55d"} Jan 20 17:22:59 crc kubenswrapper[4558]: I0120 17:22:59.897253 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7dcbd0fa78667e845315e3daf0e5df7824a9af0b1764a0964bb7b9310d76a55d" Jan 20 17:22:59 crc kubenswrapper[4558]: I0120 17:22:59.897316 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-5tvxx" Jan 20 17:22:59 crc kubenswrapper[4558]: I0120 17:22:59.899152 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-64c1-account-create-update-h6gct" event={"ID":"2f0bbcf9-0bf8-43ec-bd9c-220f67300878","Type":"ContainerStarted","Data":"3c292432f188cb4f06fd7c8bf654041b6c7916a50673ef99c0076ba457c893fc"} Jan 20 17:22:59 crc kubenswrapper[4558]: I0120 17:22:59.900563 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-db-create-c6v64" event={"ID":"9e043cfa-4b67-4f6c-a46b-626a8ab6d797","Type":"ContainerStarted","Data":"96a6b2b57e1f805d5d79906202bc8db8cfd9f39f17f625dbff6ed62ab2634b68"} Jan 20 17:22:59 crc kubenswrapper[4558]: I0120 17:22:59.900648 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-db-create-c6v64" event={"ID":"9e043cfa-4b67-4f6c-a46b-626a8ab6d797","Type":"ContainerStarted","Data":"49633c651a67741ead4514947f392ad0e0481812e594792ade8680adbddc892b"} Jan 20 17:22:59 crc kubenswrapper[4558]: I0120 17:22:59.912212 4558 generic.go:334] "Generic (PLEG): container finished" podID="90fa68b7-a335-4dba-b8f1-a9ab54c07786" containerID="f51941633ec981d615e3f81dbf2603d9890a9ed4aa990ee310afe05d0cc363d5" exitCode=0 Jan 20 17:22:59 crc kubenswrapper[4558]: I0120 17:22:59.912254 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-ring-rebalance-scr8s" event={"ID":"90fa68b7-a335-4dba-b8f1-a9ab54c07786","Type":"ContainerDied","Data":"f51941633ec981d615e3f81dbf2603d9890a9ed4aa990ee310afe05d0cc363d5"} Jan 20 17:22:59 crc kubenswrapper[4558]: I0120 17:22:59.927637 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/keystone-db-create-c6v64" podStartSLOduration=1.927617181 podStartE2EDuration="1.927617181s" podCreationTimestamp="2026-01-20 17:22:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:22:59.918740373 +0000 UTC m=+2473.679078341" watchObservedRunningTime="2026-01-20 17:22:59.927617181 +0000 UTC m=+2473.687955149" Jan 20 17:22:59 crc kubenswrapper[4558]: I0120 17:22:59.970753 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d1f49886-b3f9-40f5-aa03-731030fbb9fc-combined-ca-bundle\") pod \"glance-db-sync-q78xq\" (UID: \"d1f49886-b3f9-40f5-aa03-731030fbb9fc\") " pod="openstack-kuttl-tests/glance-db-sync-q78xq" Jan 20 17:22:59 crc kubenswrapper[4558]: I0120 17:22:59.970812 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d1f49886-b3f9-40f5-aa03-731030fbb9fc-config-data\") pod \"glance-db-sync-q78xq\" (UID: \"d1f49886-b3f9-40f5-aa03-731030fbb9fc\") " pod="openstack-kuttl-tests/glance-db-sync-q78xq" Jan 20 17:22:59 crc kubenswrapper[4558]: I0120 17:22:59.970854 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/d1f49886-b3f9-40f5-aa03-731030fbb9fc-db-sync-config-data\") pod \"glance-db-sync-q78xq\" (UID: \"d1f49886-b3f9-40f5-aa03-731030fbb9fc\") " pod="openstack-kuttl-tests/glance-db-sync-q78xq" Jan 20 17:22:59 crc kubenswrapper[4558]: I0120 17:22:59.970931 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sn5s8\" (UniqueName: \"kubernetes.io/projected/d1f49886-b3f9-40f5-aa03-731030fbb9fc-kube-api-access-sn5s8\") pod \"glance-db-sync-q78xq\" (UID: \"d1f49886-b3f9-40f5-aa03-731030fbb9fc\") " pod="openstack-kuttl-tests/glance-db-sync-q78xq" Jan 20 17:22:59 crc kubenswrapper[4558]: I0120 17:22:59.994073 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-db-create-zk697"] Jan 20 17:22:59 crc kubenswrapper[4558]: W0120 17:22:59.995080 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podffd4b518_58dc_44f5_a3f9_614a77d06ca2.slice/crio-6a5c8103b32b77067cb040c4d3d37b64049d3ddb099601f517f9515bf78415ed WatchSource:0}: Error finding container 6a5c8103b32b77067cb040c4d3d37b64049d3ddb099601f517f9515bf78415ed: Status 404 returned error can't find the container with id 6a5c8103b32b77067cb040c4d3d37b64049d3ddb099601f517f9515bf78415ed Jan 20 17:23:00 crc kubenswrapper[4558]: I0120 17:23:00.071785 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sn5s8\" (UniqueName: \"kubernetes.io/projected/d1f49886-b3f9-40f5-aa03-731030fbb9fc-kube-api-access-sn5s8\") pod \"glance-db-sync-q78xq\" (UID: \"d1f49886-b3f9-40f5-aa03-731030fbb9fc\") " pod="openstack-kuttl-tests/glance-db-sync-q78xq" Jan 20 17:23:00 crc kubenswrapper[4558]: I0120 17:23:00.071909 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d1f49886-b3f9-40f5-aa03-731030fbb9fc-combined-ca-bundle\") pod \"glance-db-sync-q78xq\" (UID: \"d1f49886-b3f9-40f5-aa03-731030fbb9fc\") " pod="openstack-kuttl-tests/glance-db-sync-q78xq" Jan 20 17:23:00 crc kubenswrapper[4558]: I0120 17:23:00.071971 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d1f49886-b3f9-40f5-aa03-731030fbb9fc-config-data\") pod \"glance-db-sync-q78xq\" (UID: \"d1f49886-b3f9-40f5-aa03-731030fbb9fc\") " pod="openstack-kuttl-tests/glance-db-sync-q78xq" Jan 20 17:23:00 crc kubenswrapper[4558]: I0120 17:23:00.072707 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/d1f49886-b3f9-40f5-aa03-731030fbb9fc-db-sync-config-data\") pod \"glance-db-sync-q78xq\" (UID: \"d1f49886-b3f9-40f5-aa03-731030fbb9fc\") " pod="openstack-kuttl-tests/glance-db-sync-q78xq" Jan 20 17:23:00 crc kubenswrapper[4558]: I0120 17:23:00.077079 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d1f49886-b3f9-40f5-aa03-731030fbb9fc-config-data\") pod \"glance-db-sync-q78xq\" (UID: \"d1f49886-b3f9-40f5-aa03-731030fbb9fc\") " pod="openstack-kuttl-tests/glance-db-sync-q78xq" Jan 20 17:23:00 crc kubenswrapper[4558]: I0120 17:23:00.079229 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/d1f49886-b3f9-40f5-aa03-731030fbb9fc-db-sync-config-data\") pod \"glance-db-sync-q78xq\" (UID: \"d1f49886-b3f9-40f5-aa03-731030fbb9fc\") " pod="openstack-kuttl-tests/glance-db-sync-q78xq" Jan 20 17:23:00 crc kubenswrapper[4558]: I0120 17:23:00.079704 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d1f49886-b3f9-40f5-aa03-731030fbb9fc-combined-ca-bundle\") pod \"glance-db-sync-q78xq\" (UID: \"d1f49886-b3f9-40f5-aa03-731030fbb9fc\") " pod="openstack-kuttl-tests/glance-db-sync-q78xq" Jan 20 17:23:00 crc kubenswrapper[4558]: I0120 17:23:00.086553 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sn5s8\" (UniqueName: \"kubernetes.io/projected/d1f49886-b3f9-40f5-aa03-731030fbb9fc-kube-api-access-sn5s8\") pod \"glance-db-sync-q78xq\" (UID: \"d1f49886-b3f9-40f5-aa03-731030fbb9fc\") " pod="openstack-kuttl-tests/glance-db-sync-q78xq" Jan 20 17:23:00 crc kubenswrapper[4558]: I0120 17:23:00.134310 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-7ab2-account-create-update-s556d"] Jan 20 17:23:00 crc kubenswrapper[4558]: W0120 17:23:00.196600 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb7b28a1d_af79_4ed2_aa96_15e39e665994.slice/crio-8c2817d4e397a4a0bd90481ec10a77e931321440eaec4f0e0553b5b581998fea WatchSource:0}: Error finding container 8c2817d4e397a4a0bd90481ec10a77e931321440eaec4f0e0553b5b581998fea: Status 404 returned error can't find the container with id 8c2817d4e397a4a0bd90481ec10a77e931321440eaec4f0e0553b5b581998fea Jan 20 17:23:00 crc kubenswrapper[4558]: I0120 17:23:00.205939 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-db-sync-q78xq" Jan 20 17:23:00 crc kubenswrapper[4558]: I0120 17:23:00.277529 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/cbbfff95-9f90-4be2-9ed7-0a1cbbe43542-etc-swift\") pod \"swift-storage-0\" (UID: \"cbbfff95-9f90-4be2-9ed7-0a1cbbe43542\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:23:00 crc kubenswrapper[4558]: I0120 17:23:00.283105 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/cbbfff95-9f90-4be2-9ed7-0a1cbbe43542-etc-swift\") pod \"swift-storage-0\" (UID: \"cbbfff95-9f90-4be2-9ed7-0a1cbbe43542\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:23:00 crc kubenswrapper[4558]: I0120 17:23:00.342573 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:23:00 crc kubenswrapper[4558]: I0120 17:23:00.631527 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-db-sync-q78xq"] Jan 20 17:23:00 crc kubenswrapper[4558]: W0120 17:23:00.634248 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd1f49886_b3f9_40f5_aa03_731030fbb9fc.slice/crio-716d057963f462bc5aab035efc332d93009fbf95ead09ed95c82556362bbc404 WatchSource:0}: Error finding container 716d057963f462bc5aab035efc332d93009fbf95ead09ed95c82556362bbc404: Status 404 returned error can't find the container with id 716d057963f462bc5aab035efc332d93009fbf95ead09ed95c82556362bbc404 Jan 20 17:23:00 crc kubenswrapper[4558]: I0120 17:23:00.773136 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/swift-storage-0"] Jan 20 17:23:00 crc kubenswrapper[4558]: W0120 17:23:00.776358 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcbbfff95_9f90_4be2_9ed7_0a1cbbe43542.slice/crio-0381c18e7f0d5c5edd126013c4674e47264581b209587bb0ec30b7d737e157c2 WatchSource:0}: Error finding container 0381c18e7f0d5c5edd126013c4674e47264581b209587bb0ec30b7d737e157c2: Status 404 returned error can't find the container with id 0381c18e7f0d5c5edd126013c4674e47264581b209587bb0ec30b7d737e157c2 Jan 20 17:23:00 crc kubenswrapper[4558]: I0120 17:23:00.927809 4558 generic.go:334] "Generic (PLEG): container finished" podID="b7b28a1d-af79-4ed2-aa96-15e39e665994" containerID="d4528f4868b3df3d0078aaff6c6ad31ee69b350696bbce4aef20138bc1d0e447" exitCode=0 Jan 20 17:23:00 crc kubenswrapper[4558]: I0120 17:23:00.927916 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-7ab2-account-create-update-s556d" event={"ID":"b7b28a1d-af79-4ed2-aa96-15e39e665994","Type":"ContainerDied","Data":"d4528f4868b3df3d0078aaff6c6ad31ee69b350696bbce4aef20138bc1d0e447"} Jan 20 17:23:00 crc kubenswrapper[4558]: I0120 17:23:00.927957 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-7ab2-account-create-update-s556d" event={"ID":"b7b28a1d-af79-4ed2-aa96-15e39e665994","Type":"ContainerStarted","Data":"8c2817d4e397a4a0bd90481ec10a77e931321440eaec4f0e0553b5b581998fea"} Jan 20 17:23:00 crc kubenswrapper[4558]: I0120 17:23:00.931072 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"cbbfff95-9f90-4be2-9ed7-0a1cbbe43542","Type":"ContainerStarted","Data":"e6440af1d01d3d7e989418e06350468870193b38937d2ae69a49ff7a72d20bf2"} Jan 20 17:23:00 crc kubenswrapper[4558]: I0120 17:23:00.931106 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"cbbfff95-9f90-4be2-9ed7-0a1cbbe43542","Type":"ContainerStarted","Data":"0381c18e7f0d5c5edd126013c4674e47264581b209587bb0ec30b7d737e157c2"} Jan 20 17:23:00 crc kubenswrapper[4558]: I0120 17:23:00.933369 4558 generic.go:334] "Generic (PLEG): container finished" podID="9e043cfa-4b67-4f6c-a46b-626a8ab6d797" containerID="96a6b2b57e1f805d5d79906202bc8db8cfd9f39f17f625dbff6ed62ab2634b68" exitCode=0 Jan 20 17:23:00 crc kubenswrapper[4558]: I0120 17:23:00.933425 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-db-create-c6v64" event={"ID":"9e043cfa-4b67-4f6c-a46b-626a8ab6d797","Type":"ContainerDied","Data":"96a6b2b57e1f805d5d79906202bc8db8cfd9f39f17f625dbff6ed62ab2634b68"} Jan 20 17:23:00 crc kubenswrapper[4558]: I0120 17:23:00.935735 4558 generic.go:334] "Generic (PLEG): container finished" podID="ffd4b518-58dc-44f5-a3f9-614a77d06ca2" containerID="4ea077acc81e2f99881f558d59d1f75afc856a3ed13d145c17a81d4ad3d3ce4c" exitCode=0 Jan 20 17:23:00 crc kubenswrapper[4558]: I0120 17:23:00.935820 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-db-create-zk697" event={"ID":"ffd4b518-58dc-44f5-a3f9-614a77d06ca2","Type":"ContainerDied","Data":"4ea077acc81e2f99881f558d59d1f75afc856a3ed13d145c17a81d4ad3d3ce4c"} Jan 20 17:23:00 crc kubenswrapper[4558]: I0120 17:23:00.935847 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-db-create-zk697" event={"ID":"ffd4b518-58dc-44f5-a3f9-614a77d06ca2","Type":"ContainerStarted","Data":"6a5c8103b32b77067cb040c4d3d37b64049d3ddb099601f517f9515bf78415ed"} Jan 20 17:23:00 crc kubenswrapper[4558]: I0120 17:23:00.949449 4558 generic.go:334] "Generic (PLEG): container finished" podID="2f0bbcf9-0bf8-43ec-bd9c-220f67300878" containerID="888ebccf523afdd2da82f932d8088d8e28c16e7be4d09466d29dbfe4ed5235aa" exitCode=0 Jan 20 17:23:00 crc kubenswrapper[4558]: I0120 17:23:00.949536 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-64c1-account-create-update-h6gct" event={"ID":"2f0bbcf9-0bf8-43ec-bd9c-220f67300878","Type":"ContainerDied","Data":"888ebccf523afdd2da82f932d8088d8e28c16e7be4d09466d29dbfe4ed5235aa"} Jan 20 17:23:00 crc kubenswrapper[4558]: I0120 17:23:00.968736 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-db-sync-q78xq" event={"ID":"d1f49886-b3f9-40f5-aa03-731030fbb9fc","Type":"ContainerStarted","Data":"716d057963f462bc5aab035efc332d93009fbf95ead09ed95c82556362bbc404"} Jan 20 17:23:01 crc kubenswrapper[4558]: I0120 17:23:01.344021 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-ring-rebalance-scr8s" Jan 20 17:23:01 crc kubenswrapper[4558]: I0120 17:23:01.397955 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/90fa68b7-a335-4dba-b8f1-a9ab54c07786-ring-data-devices\") pod \"90fa68b7-a335-4dba-b8f1-a9ab54c07786\" (UID: \"90fa68b7-a335-4dba-b8f1-a9ab54c07786\") " Jan 20 17:23:01 crc kubenswrapper[4558]: I0120 17:23:01.398016 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/90fa68b7-a335-4dba-b8f1-a9ab54c07786-etc-swift\") pod \"90fa68b7-a335-4dba-b8f1-a9ab54c07786\" (UID: \"90fa68b7-a335-4dba-b8f1-a9ab54c07786\") " Jan 20 17:23:01 crc kubenswrapper[4558]: I0120 17:23:01.398060 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/90fa68b7-a335-4dba-b8f1-a9ab54c07786-swiftconf\") pod \"90fa68b7-a335-4dba-b8f1-a9ab54c07786\" (UID: \"90fa68b7-a335-4dba-b8f1-a9ab54c07786\") " Jan 20 17:23:01 crc kubenswrapper[4558]: I0120 17:23:01.398138 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rzm6b\" (UniqueName: \"kubernetes.io/projected/90fa68b7-a335-4dba-b8f1-a9ab54c07786-kube-api-access-rzm6b\") pod \"90fa68b7-a335-4dba-b8f1-a9ab54c07786\" (UID: \"90fa68b7-a335-4dba-b8f1-a9ab54c07786\") " Jan 20 17:23:01 crc kubenswrapper[4558]: I0120 17:23:01.398221 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/90fa68b7-a335-4dba-b8f1-a9ab54c07786-dispersionconf\") pod \"90fa68b7-a335-4dba-b8f1-a9ab54c07786\" (UID: \"90fa68b7-a335-4dba-b8f1-a9ab54c07786\") " Jan 20 17:23:01 crc kubenswrapper[4558]: I0120 17:23:01.398315 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/90fa68b7-a335-4dba-b8f1-a9ab54c07786-combined-ca-bundle\") pod \"90fa68b7-a335-4dba-b8f1-a9ab54c07786\" (UID: \"90fa68b7-a335-4dba-b8f1-a9ab54c07786\") " Jan 20 17:23:01 crc kubenswrapper[4558]: I0120 17:23:01.398342 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/90fa68b7-a335-4dba-b8f1-a9ab54c07786-scripts\") pod \"90fa68b7-a335-4dba-b8f1-a9ab54c07786\" (UID: \"90fa68b7-a335-4dba-b8f1-a9ab54c07786\") " Jan 20 17:23:01 crc kubenswrapper[4558]: I0120 17:23:01.398714 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/90fa68b7-a335-4dba-b8f1-a9ab54c07786-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "90fa68b7-a335-4dba-b8f1-a9ab54c07786" (UID: "90fa68b7-a335-4dba-b8f1-a9ab54c07786"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:23:01 crc kubenswrapper[4558]: I0120 17:23:01.399764 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/90fa68b7-a335-4dba-b8f1-a9ab54c07786-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "90fa68b7-a335-4dba-b8f1-a9ab54c07786" (UID: "90fa68b7-a335-4dba-b8f1-a9ab54c07786"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:23:01 crc kubenswrapper[4558]: I0120 17:23:01.421319 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/90fa68b7-a335-4dba-b8f1-a9ab54c07786-kube-api-access-rzm6b" (OuterVolumeSpecName: "kube-api-access-rzm6b") pod "90fa68b7-a335-4dba-b8f1-a9ab54c07786" (UID: "90fa68b7-a335-4dba-b8f1-a9ab54c07786"). InnerVolumeSpecName "kube-api-access-rzm6b". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:23:01 crc kubenswrapper[4558]: I0120 17:23:01.422727 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/90fa68b7-a335-4dba-b8f1-a9ab54c07786-scripts" (OuterVolumeSpecName: "scripts") pod "90fa68b7-a335-4dba-b8f1-a9ab54c07786" (UID: "90fa68b7-a335-4dba-b8f1-a9ab54c07786"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:23:01 crc kubenswrapper[4558]: I0120 17:23:01.424729 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/90fa68b7-a335-4dba-b8f1-a9ab54c07786-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "90fa68b7-a335-4dba-b8f1-a9ab54c07786" (UID: "90fa68b7-a335-4dba-b8f1-a9ab54c07786"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:23:01 crc kubenswrapper[4558]: I0120 17:23:01.440291 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/90fa68b7-a335-4dba-b8f1-a9ab54c07786-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "90fa68b7-a335-4dba-b8f1-a9ab54c07786" (UID: "90fa68b7-a335-4dba-b8f1-a9ab54c07786"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:23:01 crc kubenswrapper[4558]: I0120 17:23:01.456280 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/90fa68b7-a335-4dba-b8f1-a9ab54c07786-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "90fa68b7-a335-4dba-b8f1-a9ab54c07786" (UID: "90fa68b7-a335-4dba-b8f1-a9ab54c07786"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:23:01 crc kubenswrapper[4558]: I0120 17:23:01.514382 4558 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/90fa68b7-a335-4dba-b8f1-a9ab54c07786-ring-data-devices\") on node \"crc\" DevicePath \"\"" Jan 20 17:23:01 crc kubenswrapper[4558]: I0120 17:23:01.514425 4558 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/90fa68b7-a335-4dba-b8f1-a9ab54c07786-etc-swift\") on node \"crc\" DevicePath \"\"" Jan 20 17:23:01 crc kubenswrapper[4558]: I0120 17:23:01.514435 4558 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/90fa68b7-a335-4dba-b8f1-a9ab54c07786-swiftconf\") on node \"crc\" DevicePath \"\"" Jan 20 17:23:01 crc kubenswrapper[4558]: I0120 17:23:01.514446 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rzm6b\" (UniqueName: \"kubernetes.io/projected/90fa68b7-a335-4dba-b8f1-a9ab54c07786-kube-api-access-rzm6b\") on node \"crc\" DevicePath \"\"" Jan 20 17:23:01 crc kubenswrapper[4558]: I0120 17:23:01.514457 4558 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/90fa68b7-a335-4dba-b8f1-a9ab54c07786-dispersionconf\") on node \"crc\" DevicePath \"\"" Jan 20 17:23:01 crc kubenswrapper[4558]: I0120 17:23:01.514466 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/90fa68b7-a335-4dba-b8f1-a9ab54c07786-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:23:01 crc kubenswrapper[4558]: I0120 17:23:01.514474 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/90fa68b7-a335-4dba-b8f1-a9ab54c07786-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:23:01 crc kubenswrapper[4558]: I0120 17:23:01.984663 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"cbbfff95-9f90-4be2-9ed7-0a1cbbe43542","Type":"ContainerStarted","Data":"8dfb739a96b0388de7f0bca3f773a09ff87c39b3fc3e077a97ec5ad2b9469fe8"} Jan 20 17:23:01 crc kubenswrapper[4558]: I0120 17:23:01.984800 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"cbbfff95-9f90-4be2-9ed7-0a1cbbe43542","Type":"ContainerStarted","Data":"fac08b16dfb922f531dfe4ed284cdec4dfd765c94432b6d5e4237c6ab1063c5f"} Jan 20 17:23:01 crc kubenswrapper[4558]: I0120 17:23:01.984860 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"cbbfff95-9f90-4be2-9ed7-0a1cbbe43542","Type":"ContainerStarted","Data":"9f94ead147cad59ed2c937f2aace48390c6cf7ba843fc0898b176e2475d251b5"} Jan 20 17:23:01 crc kubenswrapper[4558]: I0120 17:23:01.984912 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"cbbfff95-9f90-4be2-9ed7-0a1cbbe43542","Type":"ContainerStarted","Data":"22cdeeaca39d5197ecff3f1352fe3fc3b073ce7ef0565d67854f14d8c72aeba3"} Jan 20 17:23:01 crc kubenswrapper[4558]: I0120 17:23:01.985002 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"cbbfff95-9f90-4be2-9ed7-0a1cbbe43542","Type":"ContainerStarted","Data":"33c92b2ec4e98e2de94cfd9d49d4bf673da1f8bed18bac8e9c5007c99a8dc48e"} Jan 20 17:23:01 crc kubenswrapper[4558]: I0120 17:23:01.985060 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"cbbfff95-9f90-4be2-9ed7-0a1cbbe43542","Type":"ContainerStarted","Data":"36bfb57970925c866cd826c6992c1bb5eb23be201b3fee3a44013164f6bf6c45"} Jan 20 17:23:01 crc kubenswrapper[4558]: I0120 17:23:01.986580 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-ring-rebalance-scr8s" event={"ID":"90fa68b7-a335-4dba-b8f1-a9ab54c07786","Type":"ContainerDied","Data":"e79f71f85d20d1bcd9bd6e9db445113413ef99252cf1ca1a26f178fc595d2ea9"} Jan 20 17:23:01 crc kubenswrapper[4558]: I0120 17:23:01.986658 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e79f71f85d20d1bcd9bd6e9db445113413ef99252cf1ca1a26f178fc595d2ea9" Jan 20 17:23:01 crc kubenswrapper[4558]: I0120 17:23:01.986776 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-ring-rebalance-scr8s" Jan 20 17:23:01 crc kubenswrapper[4558]: I0120 17:23:01.991440 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-db-sync-q78xq" event={"ID":"d1f49886-b3f9-40f5-aa03-731030fbb9fc","Type":"ContainerStarted","Data":"51c4471e615f0278850f1aad746f15c73d4493f0a751b60dad03afce53c2f92a"} Jan 20 17:23:02 crc kubenswrapper[4558]: I0120 17:23:02.009945 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/glance-db-sync-q78xq" podStartSLOduration=3.009934213 podStartE2EDuration="3.009934213s" podCreationTimestamp="2026-01-20 17:22:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:23:02.004522334 +0000 UTC m=+2475.764860300" watchObservedRunningTime="2026-01-20 17:23:02.009934213 +0000 UTC m=+2475.770272181" Jan 20 17:23:02 crc kubenswrapper[4558]: I0120 17:23:02.403481 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-db-create-c6v64" Jan 20 17:23:02 crc kubenswrapper[4558]: I0120 17:23:02.438789 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5ps4m\" (UniqueName: \"kubernetes.io/projected/9e043cfa-4b67-4f6c-a46b-626a8ab6d797-kube-api-access-5ps4m\") pod \"9e043cfa-4b67-4f6c-a46b-626a8ab6d797\" (UID: \"9e043cfa-4b67-4f6c-a46b-626a8ab6d797\") " Jan 20 17:23:02 crc kubenswrapper[4558]: I0120 17:23:02.438889 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9e043cfa-4b67-4f6c-a46b-626a8ab6d797-operator-scripts\") pod \"9e043cfa-4b67-4f6c-a46b-626a8ab6d797\" (UID: \"9e043cfa-4b67-4f6c-a46b-626a8ab6d797\") " Jan 20 17:23:02 crc kubenswrapper[4558]: I0120 17:23:02.439966 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9e043cfa-4b67-4f6c-a46b-626a8ab6d797-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "9e043cfa-4b67-4f6c-a46b-626a8ab6d797" (UID: "9e043cfa-4b67-4f6c-a46b-626a8ab6d797"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:23:02 crc kubenswrapper[4558]: I0120 17:23:02.450925 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9e043cfa-4b67-4f6c-a46b-626a8ab6d797-kube-api-access-5ps4m" (OuterVolumeSpecName: "kube-api-access-5ps4m") pod "9e043cfa-4b67-4f6c-a46b-626a8ab6d797" (UID: "9e043cfa-4b67-4f6c-a46b-626a8ab6d797"). InnerVolumeSpecName "kube-api-access-5ps4m". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:23:02 crc kubenswrapper[4558]: I0120 17:23:02.541116 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5ps4m\" (UniqueName: \"kubernetes.io/projected/9e043cfa-4b67-4f6c-a46b-626a8ab6d797-kube-api-access-5ps4m\") on node \"crc\" DevicePath \"\"" Jan 20 17:23:02 crc kubenswrapper[4558]: I0120 17:23:02.541144 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9e043cfa-4b67-4f6c-a46b-626a8ab6d797-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:23:02 crc kubenswrapper[4558]: I0120 17:23:02.578374 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-db-create-zk697" Jan 20 17:23:02 crc kubenswrapper[4558]: I0120 17:23:02.586986 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-7ab2-account-create-update-s556d" Jan 20 17:23:02 crc kubenswrapper[4558]: I0120 17:23:02.597689 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-64c1-account-create-update-h6gct" Jan 20 17:23:02 crc kubenswrapper[4558]: I0120 17:23:02.642115 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b7b28a1d-af79-4ed2-aa96-15e39e665994-operator-scripts\") pod \"b7b28a1d-af79-4ed2-aa96-15e39e665994\" (UID: \"b7b28a1d-af79-4ed2-aa96-15e39e665994\") " Jan 20 17:23:02 crc kubenswrapper[4558]: I0120 17:23:02.642251 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8cb4l\" (UniqueName: \"kubernetes.io/projected/2f0bbcf9-0bf8-43ec-bd9c-220f67300878-kube-api-access-8cb4l\") pod \"2f0bbcf9-0bf8-43ec-bd9c-220f67300878\" (UID: \"2f0bbcf9-0bf8-43ec-bd9c-220f67300878\") " Jan 20 17:23:02 crc kubenswrapper[4558]: I0120 17:23:02.642303 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v7jc9\" (UniqueName: \"kubernetes.io/projected/ffd4b518-58dc-44f5-a3f9-614a77d06ca2-kube-api-access-v7jc9\") pod \"ffd4b518-58dc-44f5-a3f9-614a77d06ca2\" (UID: \"ffd4b518-58dc-44f5-a3f9-614a77d06ca2\") " Jan 20 17:23:02 crc kubenswrapper[4558]: I0120 17:23:02.642327 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wwc99\" (UniqueName: \"kubernetes.io/projected/b7b28a1d-af79-4ed2-aa96-15e39e665994-kube-api-access-wwc99\") pod \"b7b28a1d-af79-4ed2-aa96-15e39e665994\" (UID: \"b7b28a1d-af79-4ed2-aa96-15e39e665994\") " Jan 20 17:23:02 crc kubenswrapper[4558]: I0120 17:23:02.642474 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2f0bbcf9-0bf8-43ec-bd9c-220f67300878-operator-scripts\") pod \"2f0bbcf9-0bf8-43ec-bd9c-220f67300878\" (UID: \"2f0bbcf9-0bf8-43ec-bd9c-220f67300878\") " Jan 20 17:23:02 crc kubenswrapper[4558]: I0120 17:23:02.642537 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ffd4b518-58dc-44f5-a3f9-614a77d06ca2-operator-scripts\") pod \"ffd4b518-58dc-44f5-a3f9-614a77d06ca2\" (UID: \"ffd4b518-58dc-44f5-a3f9-614a77d06ca2\") " Jan 20 17:23:02 crc kubenswrapper[4558]: I0120 17:23:02.642593 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b7b28a1d-af79-4ed2-aa96-15e39e665994-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "b7b28a1d-af79-4ed2-aa96-15e39e665994" (UID: "b7b28a1d-af79-4ed2-aa96-15e39e665994"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:23:02 crc kubenswrapper[4558]: I0120 17:23:02.642956 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b7b28a1d-af79-4ed2-aa96-15e39e665994-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:23:02 crc kubenswrapper[4558]: I0120 17:23:02.643398 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ffd4b518-58dc-44f5-a3f9-614a77d06ca2-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "ffd4b518-58dc-44f5-a3f9-614a77d06ca2" (UID: "ffd4b518-58dc-44f5-a3f9-614a77d06ca2"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:23:02 crc kubenswrapper[4558]: I0120 17:23:02.643559 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2f0bbcf9-0bf8-43ec-bd9c-220f67300878-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "2f0bbcf9-0bf8-43ec-bd9c-220f67300878" (UID: "2f0bbcf9-0bf8-43ec-bd9c-220f67300878"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:23:02 crc kubenswrapper[4558]: I0120 17:23:02.645652 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b7b28a1d-af79-4ed2-aa96-15e39e665994-kube-api-access-wwc99" (OuterVolumeSpecName: "kube-api-access-wwc99") pod "b7b28a1d-af79-4ed2-aa96-15e39e665994" (UID: "b7b28a1d-af79-4ed2-aa96-15e39e665994"). InnerVolumeSpecName "kube-api-access-wwc99". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:23:02 crc kubenswrapper[4558]: I0120 17:23:02.646504 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2f0bbcf9-0bf8-43ec-bd9c-220f67300878-kube-api-access-8cb4l" (OuterVolumeSpecName: "kube-api-access-8cb4l") pod "2f0bbcf9-0bf8-43ec-bd9c-220f67300878" (UID: "2f0bbcf9-0bf8-43ec-bd9c-220f67300878"). InnerVolumeSpecName "kube-api-access-8cb4l". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:23:02 crc kubenswrapper[4558]: I0120 17:23:02.656240 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ffd4b518-58dc-44f5-a3f9-614a77d06ca2-kube-api-access-v7jc9" (OuterVolumeSpecName: "kube-api-access-v7jc9") pod "ffd4b518-58dc-44f5-a3f9-614a77d06ca2" (UID: "ffd4b518-58dc-44f5-a3f9-614a77d06ca2"). InnerVolumeSpecName "kube-api-access-v7jc9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:23:02 crc kubenswrapper[4558]: I0120 17:23:02.744883 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8cb4l\" (UniqueName: \"kubernetes.io/projected/2f0bbcf9-0bf8-43ec-bd9c-220f67300878-kube-api-access-8cb4l\") on node \"crc\" DevicePath \"\"" Jan 20 17:23:02 crc kubenswrapper[4558]: I0120 17:23:02.745335 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v7jc9\" (UniqueName: \"kubernetes.io/projected/ffd4b518-58dc-44f5-a3f9-614a77d06ca2-kube-api-access-v7jc9\") on node \"crc\" DevicePath \"\"" Jan 20 17:23:02 crc kubenswrapper[4558]: I0120 17:23:02.745350 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wwc99\" (UniqueName: \"kubernetes.io/projected/b7b28a1d-af79-4ed2-aa96-15e39e665994-kube-api-access-wwc99\") on node \"crc\" DevicePath \"\"" Jan 20 17:23:02 crc kubenswrapper[4558]: I0120 17:23:02.745363 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2f0bbcf9-0bf8-43ec-bd9c-220f67300878-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:23:02 crc kubenswrapper[4558]: I0120 17:23:02.745375 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ffd4b518-58dc-44f5-a3f9-614a77d06ca2-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:23:02 crc kubenswrapper[4558]: I0120 17:23:02.771196 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-5tvxx"] Jan 20 17:23:02 crc kubenswrapper[4558]: I0120 17:23:02.782725 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-5tvxx"] Jan 20 17:23:03 crc kubenswrapper[4558]: I0120 17:23:03.006448 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-db-create-c6v64" event={"ID":"9e043cfa-4b67-4f6c-a46b-626a8ab6d797","Type":"ContainerDied","Data":"49633c651a67741ead4514947f392ad0e0481812e594792ade8680adbddc892b"} Jan 20 17:23:03 crc kubenswrapper[4558]: I0120 17:23:03.006524 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="49633c651a67741ead4514947f392ad0e0481812e594792ade8680adbddc892b" Jan 20 17:23:03 crc kubenswrapper[4558]: I0120 17:23:03.006472 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-db-create-c6v64" Jan 20 17:23:03 crc kubenswrapper[4558]: I0120 17:23:03.016219 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-db-create-zk697" event={"ID":"ffd4b518-58dc-44f5-a3f9-614a77d06ca2","Type":"ContainerDied","Data":"6a5c8103b32b77067cb040c4d3d37b64049d3ddb099601f517f9515bf78415ed"} Jan 20 17:23:03 crc kubenswrapper[4558]: I0120 17:23:03.016262 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6a5c8103b32b77067cb040c4d3d37b64049d3ddb099601f517f9515bf78415ed" Jan 20 17:23:03 crc kubenswrapper[4558]: I0120 17:23:03.016324 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-db-create-zk697" Jan 20 17:23:03 crc kubenswrapper[4558]: I0120 17:23:03.018143 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-64c1-account-create-update-h6gct" event={"ID":"2f0bbcf9-0bf8-43ec-bd9c-220f67300878","Type":"ContainerDied","Data":"3c292432f188cb4f06fd7c8bf654041b6c7916a50673ef99c0076ba457c893fc"} Jan 20 17:23:03 crc kubenswrapper[4558]: I0120 17:23:03.018195 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3c292432f188cb4f06fd7c8bf654041b6c7916a50673ef99c0076ba457c893fc" Jan 20 17:23:03 crc kubenswrapper[4558]: I0120 17:23:03.018240 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-64c1-account-create-update-h6gct" Jan 20 17:23:03 crc kubenswrapper[4558]: I0120 17:23:03.024301 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-7ab2-account-create-update-s556d" event={"ID":"b7b28a1d-af79-4ed2-aa96-15e39e665994","Type":"ContainerDied","Data":"8c2817d4e397a4a0bd90481ec10a77e931321440eaec4f0e0553b5b581998fea"} Jan 20 17:23:03 crc kubenswrapper[4558]: I0120 17:23:03.024344 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8c2817d4e397a4a0bd90481ec10a77e931321440eaec4f0e0553b5b581998fea" Jan 20 17:23:03 crc kubenswrapper[4558]: I0120 17:23:03.024403 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-7ab2-account-create-update-s556d" Jan 20 17:23:03 crc kubenswrapper[4558]: I0120 17:23:03.042475 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"cbbfff95-9f90-4be2-9ed7-0a1cbbe43542","Type":"ContainerStarted","Data":"9016dbbaf3d5e8bdb4e9556a7b608c196cc888571331abb5670be24de359a45a"} Jan 20 17:23:03 crc kubenswrapper[4558]: I0120 17:23:03.042514 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"cbbfff95-9f90-4be2-9ed7-0a1cbbe43542","Type":"ContainerStarted","Data":"68fbcc76c0e2dedc82e6715e0ec8b4d992e77e10e439a698ddd8d943bc08f443"} Jan 20 17:23:03 crc kubenswrapper[4558]: I0120 17:23:03.042528 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"cbbfff95-9f90-4be2-9ed7-0a1cbbe43542","Type":"ContainerStarted","Data":"50eea846d91051f2dd7105d2a67595fd30c64b90855e0b8b97a766b5024a4194"} Jan 20 17:23:03 crc kubenswrapper[4558]: I0120 17:23:03.042537 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"cbbfff95-9f90-4be2-9ed7-0a1cbbe43542","Type":"ContainerStarted","Data":"a9f1e3687db381bd17fdfb1b0e3aaf9c0fa91fd605b0d73ed9095c3bd3bd381f"} Jan 20 17:23:03 crc kubenswrapper[4558]: I0120 17:23:03.042545 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"cbbfff95-9f90-4be2-9ed7-0a1cbbe43542","Type":"ContainerStarted","Data":"dc5ce68e3517c196bdee8bba30197f747f505c80c7dfe2ac492a71348d08c4f8"} Jan 20 17:23:03 crc kubenswrapper[4558]: I0120 17:23:03.042554 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"cbbfff95-9f90-4be2-9ed7-0a1cbbe43542","Type":"ContainerStarted","Data":"f471fa142ace1139e497b71d2408b5de3eb8a81c1e52fc0f3e0b7abe127fa420"} Jan 20 17:23:03 crc kubenswrapper[4558]: I0120 17:23:03.042561 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"cbbfff95-9f90-4be2-9ed7-0a1cbbe43542","Type":"ContainerStarted","Data":"7d429e35b6e23b8fe5da4b2ba40f03361d3954fbabd9c27927cee2dca39904f9"} Jan 20 17:23:04 crc kubenswrapper[4558]: I0120 17:23:04.056283 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"cbbfff95-9f90-4be2-9ed7-0a1cbbe43542","Type":"ContainerStarted","Data":"2b148f67806ea893dea736f70886bafe16751aa4932ab889670bd2755c1dad06"} Jan 20 17:23:04 crc kubenswrapper[4558]: I0120 17:23:04.100288 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/swift-storage-0" podStartSLOduration=13.100266793 podStartE2EDuration="13.100266793s" podCreationTimestamp="2026-01-20 17:22:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:23:04.082994353 +0000 UTC m=+2477.843332310" watchObservedRunningTime="2026-01-20 17:23:04.100266793 +0000 UTC m=+2477.860604761" Jan 20 17:23:04 crc kubenswrapper[4558]: I0120 17:23:04.201284 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-c5fc7f8f-s5hwr"] Jan 20 17:23:04 crc kubenswrapper[4558]: E0120 17:23:04.201639 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b7b28a1d-af79-4ed2-aa96-15e39e665994" containerName="mariadb-account-create-update" Jan 20 17:23:04 crc kubenswrapper[4558]: I0120 17:23:04.201658 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="b7b28a1d-af79-4ed2-aa96-15e39e665994" containerName="mariadb-account-create-update" Jan 20 17:23:04 crc kubenswrapper[4558]: E0120 17:23:04.201676 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="90fa68b7-a335-4dba-b8f1-a9ab54c07786" containerName="swift-ring-rebalance" Jan 20 17:23:04 crc kubenswrapper[4558]: I0120 17:23:04.201690 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="90fa68b7-a335-4dba-b8f1-a9ab54c07786" containerName="swift-ring-rebalance" Jan 20 17:23:04 crc kubenswrapper[4558]: E0120 17:23:04.201699 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9e043cfa-4b67-4f6c-a46b-626a8ab6d797" containerName="mariadb-database-create" Jan 20 17:23:04 crc kubenswrapper[4558]: I0120 17:23:04.201705 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="9e043cfa-4b67-4f6c-a46b-626a8ab6d797" containerName="mariadb-database-create" Jan 20 17:23:04 crc kubenswrapper[4558]: E0120 17:23:04.201717 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2f0bbcf9-0bf8-43ec-bd9c-220f67300878" containerName="mariadb-account-create-update" Jan 20 17:23:04 crc kubenswrapper[4558]: I0120 17:23:04.201722 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="2f0bbcf9-0bf8-43ec-bd9c-220f67300878" containerName="mariadb-account-create-update" Jan 20 17:23:04 crc kubenswrapper[4558]: E0120 17:23:04.201734 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ffd4b518-58dc-44f5-a3f9-614a77d06ca2" containerName="mariadb-database-create" Jan 20 17:23:04 crc kubenswrapper[4558]: I0120 17:23:04.201740 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ffd4b518-58dc-44f5-a3f9-614a77d06ca2" containerName="mariadb-database-create" Jan 20 17:23:04 crc kubenswrapper[4558]: I0120 17:23:04.201920 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="ffd4b518-58dc-44f5-a3f9-614a77d06ca2" containerName="mariadb-database-create" Jan 20 17:23:04 crc kubenswrapper[4558]: I0120 17:23:04.201931 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="90fa68b7-a335-4dba-b8f1-a9ab54c07786" containerName="swift-ring-rebalance" Jan 20 17:23:04 crc kubenswrapper[4558]: I0120 17:23:04.201947 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="2f0bbcf9-0bf8-43ec-bd9c-220f67300878" containerName="mariadb-account-create-update" Jan 20 17:23:04 crc kubenswrapper[4558]: I0120 17:23:04.201963 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="9e043cfa-4b67-4f6c-a46b-626a8ab6d797" containerName="mariadb-database-create" Jan 20 17:23:04 crc kubenswrapper[4558]: I0120 17:23:04.201971 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="b7b28a1d-af79-4ed2-aa96-15e39e665994" containerName="mariadb-account-create-update" Jan 20 17:23:04 crc kubenswrapper[4558]: I0120 17:23:04.202960 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-c5fc7f8f-s5hwr" Jan 20 17:23:04 crc kubenswrapper[4558]: I0120 17:23:04.214965 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-c5fc7f8f-s5hwr"] Jan 20 17:23:04 crc kubenswrapper[4558]: I0120 17:23:04.217654 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"dns-swift-storage-0" Jan 20 17:23:04 crc kubenswrapper[4558]: I0120 17:23:04.274335 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e1484eaf-3b1f-4dfa-9413-dc02309dfbbf-dns-swift-storage-0\") pod \"dnsmasq-dnsmasq-c5fc7f8f-s5hwr\" (UID: \"e1484eaf-3b1f-4dfa-9413-dc02309dfbbf\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-c5fc7f8f-s5hwr" Jan 20 17:23:04 crc kubenswrapper[4558]: I0120 17:23:04.274614 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ltf6c\" (UniqueName: \"kubernetes.io/projected/e1484eaf-3b1f-4dfa-9413-dc02309dfbbf-kube-api-access-ltf6c\") pod \"dnsmasq-dnsmasq-c5fc7f8f-s5hwr\" (UID: \"e1484eaf-3b1f-4dfa-9413-dc02309dfbbf\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-c5fc7f8f-s5hwr" Jan 20 17:23:04 crc kubenswrapper[4558]: I0120 17:23:04.274743 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/e1484eaf-3b1f-4dfa-9413-dc02309dfbbf-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-c5fc7f8f-s5hwr\" (UID: \"e1484eaf-3b1f-4dfa-9413-dc02309dfbbf\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-c5fc7f8f-s5hwr" Jan 20 17:23:04 crc kubenswrapper[4558]: I0120 17:23:04.274970 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e1484eaf-3b1f-4dfa-9413-dc02309dfbbf-config\") pod \"dnsmasq-dnsmasq-c5fc7f8f-s5hwr\" (UID: \"e1484eaf-3b1f-4dfa-9413-dc02309dfbbf\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-c5fc7f8f-s5hwr" Jan 20 17:23:04 crc kubenswrapper[4558]: I0120 17:23:04.328353 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:23:04 crc kubenswrapper[4558]: I0120 17:23:04.376850 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e1484eaf-3b1f-4dfa-9413-dc02309dfbbf-config\") pod \"dnsmasq-dnsmasq-c5fc7f8f-s5hwr\" (UID: \"e1484eaf-3b1f-4dfa-9413-dc02309dfbbf\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-c5fc7f8f-s5hwr" Jan 20 17:23:04 crc kubenswrapper[4558]: I0120 17:23:04.377009 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e1484eaf-3b1f-4dfa-9413-dc02309dfbbf-dns-swift-storage-0\") pod \"dnsmasq-dnsmasq-c5fc7f8f-s5hwr\" (UID: \"e1484eaf-3b1f-4dfa-9413-dc02309dfbbf\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-c5fc7f8f-s5hwr" Jan 20 17:23:04 crc kubenswrapper[4558]: I0120 17:23:04.377114 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ltf6c\" (UniqueName: \"kubernetes.io/projected/e1484eaf-3b1f-4dfa-9413-dc02309dfbbf-kube-api-access-ltf6c\") pod \"dnsmasq-dnsmasq-c5fc7f8f-s5hwr\" (UID: \"e1484eaf-3b1f-4dfa-9413-dc02309dfbbf\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-c5fc7f8f-s5hwr" Jan 20 17:23:04 crc kubenswrapper[4558]: I0120 17:23:04.377286 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/e1484eaf-3b1f-4dfa-9413-dc02309dfbbf-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-c5fc7f8f-s5hwr\" (UID: \"e1484eaf-3b1f-4dfa-9413-dc02309dfbbf\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-c5fc7f8f-s5hwr" Jan 20 17:23:04 crc kubenswrapper[4558]: I0120 17:23:04.378112 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e1484eaf-3b1f-4dfa-9413-dc02309dfbbf-config\") pod \"dnsmasq-dnsmasq-c5fc7f8f-s5hwr\" (UID: \"e1484eaf-3b1f-4dfa-9413-dc02309dfbbf\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-c5fc7f8f-s5hwr" Jan 20 17:23:04 crc kubenswrapper[4558]: I0120 17:23:04.378474 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/e1484eaf-3b1f-4dfa-9413-dc02309dfbbf-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-c5fc7f8f-s5hwr\" (UID: \"e1484eaf-3b1f-4dfa-9413-dc02309dfbbf\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-c5fc7f8f-s5hwr" Jan 20 17:23:04 crc kubenswrapper[4558]: I0120 17:23:04.378893 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e1484eaf-3b1f-4dfa-9413-dc02309dfbbf-dns-swift-storage-0\") pod \"dnsmasq-dnsmasq-c5fc7f8f-s5hwr\" (UID: \"e1484eaf-3b1f-4dfa-9413-dc02309dfbbf\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-c5fc7f8f-s5hwr" Jan 20 17:23:04 crc kubenswrapper[4558]: I0120 17:23:04.398620 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ltf6c\" (UniqueName: \"kubernetes.io/projected/e1484eaf-3b1f-4dfa-9413-dc02309dfbbf-kube-api-access-ltf6c\") pod \"dnsmasq-dnsmasq-c5fc7f8f-s5hwr\" (UID: \"e1484eaf-3b1f-4dfa-9413-dc02309dfbbf\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-c5fc7f8f-s5hwr" Jan 20 17:23:04 crc kubenswrapper[4558]: I0120 17:23:04.523247 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-c5fc7f8f-s5hwr" Jan 20 17:23:04 crc kubenswrapper[4558]: I0120 17:23:04.580980 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8c6a7f55-6c04-40fa-89f1-083b382e8b07" path="/var/lib/kubelet/pods/8c6a7f55-6c04-40fa-89f1-083b382e8b07/volumes" Jan 20 17:23:04 crc kubenswrapper[4558]: I0120 17:23:04.968447 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-c5fc7f8f-s5hwr"] Jan 20 17:23:04 crc kubenswrapper[4558]: W0120 17:23:04.974932 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode1484eaf_3b1f_4dfa_9413_dc02309dfbbf.slice/crio-99072e33c1b7ee8af93204acbc7321535e897fa11b45ecebcf70849083a4824d WatchSource:0}: Error finding container 99072e33c1b7ee8af93204acbc7321535e897fa11b45ecebcf70849083a4824d: Status 404 returned error can't find the container with id 99072e33c1b7ee8af93204acbc7321535e897fa11b45ecebcf70849083a4824d Jan 20 17:23:05 crc kubenswrapper[4558]: I0120 17:23:05.067021 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-c5fc7f8f-s5hwr" event={"ID":"e1484eaf-3b1f-4dfa-9413-dc02309dfbbf","Type":"ContainerStarted","Data":"99072e33c1b7ee8af93204acbc7321535e897fa11b45ecebcf70849083a4824d"} Jan 20 17:23:06 crc kubenswrapper[4558]: I0120 17:23:06.076594 4558 generic.go:334] "Generic (PLEG): container finished" podID="e1484eaf-3b1f-4dfa-9413-dc02309dfbbf" containerID="44a0737b01ce0cd786f9b99cafcc7751a34c8d80cd152766679f7c00f1f35b84" exitCode=0 Jan 20 17:23:06 crc kubenswrapper[4558]: I0120 17:23:06.076660 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-c5fc7f8f-s5hwr" event={"ID":"e1484eaf-3b1f-4dfa-9413-dc02309dfbbf","Type":"ContainerDied","Data":"44a0737b01ce0cd786f9b99cafcc7751a34c8d80cd152766679f7c00f1f35b84"} Jan 20 17:23:06 crc kubenswrapper[4558]: I0120 17:23:06.079072 4558 generic.go:334] "Generic (PLEG): container finished" podID="d1f49886-b3f9-40f5-aa03-731030fbb9fc" containerID="51c4471e615f0278850f1aad746f15c73d4493f0a751b60dad03afce53c2f92a" exitCode=0 Jan 20 17:23:06 crc kubenswrapper[4558]: I0120 17:23:06.079150 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-db-sync-q78xq" event={"ID":"d1f49886-b3f9-40f5-aa03-731030fbb9fc","Type":"ContainerDied","Data":"51c4471e615f0278850f1aad746f15c73d4493f0a751b60dad03afce53c2f92a"} Jan 20 17:23:06 crc kubenswrapper[4558]: I0120 17:23:06.570503 4558 scope.go:117] "RemoveContainer" containerID="0ffce44366a8b4466427b682fb41640425366a0f4449210959148de9aef695c6" Jan 20 17:23:07 crc kubenswrapper[4558]: I0120 17:23:07.094507 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" event={"ID":"68337d27-3fa6-4a29-88b0-82e60c3739eb","Type":"ContainerStarted","Data":"2d144cfe8b47aa78680f1723838f69b34b4c8966312c6a5804d87ce86bc40c18"} Jan 20 17:23:07 crc kubenswrapper[4558]: I0120 17:23:07.096019 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-c5fc7f8f-s5hwr" event={"ID":"e1484eaf-3b1f-4dfa-9413-dc02309dfbbf","Type":"ContainerStarted","Data":"1c137771e465e33a67364c32486ffdbc563cc7ed837bc1e32b1d407c435a9739"} Jan 20 17:23:07 crc kubenswrapper[4558]: I0120 17:23:07.129433 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-c5fc7f8f-s5hwr" podStartSLOduration=3.129414719 podStartE2EDuration="3.129414719s" podCreationTimestamp="2026-01-20 17:23:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:23:07.125184482 +0000 UTC m=+2480.885522449" watchObservedRunningTime="2026-01-20 17:23:07.129414719 +0000 UTC m=+2480.889752686" Jan 20 17:23:07 crc kubenswrapper[4558]: I0120 17:23:07.313275 4558 scope.go:117] "RemoveContainer" containerID="a11fa90defce42c76ab0b049d20ff0e9aec4fadd756a856e04db760247f336fd" Jan 20 17:23:07 crc kubenswrapper[4558]: I0120 17:23:07.368696 4558 scope.go:117] "RemoveContainer" containerID="644ede42e2610088cf730b8329c0fbe61fa14dedf213de3beb1f25c2ad510023" Jan 20 17:23:07 crc kubenswrapper[4558]: I0120 17:23:07.493097 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-db-sync-q78xq" Jan 20 17:23:07 crc kubenswrapper[4558]: I0120 17:23:07.546337 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d1f49886-b3f9-40f5-aa03-731030fbb9fc-combined-ca-bundle\") pod \"d1f49886-b3f9-40f5-aa03-731030fbb9fc\" (UID: \"d1f49886-b3f9-40f5-aa03-731030fbb9fc\") " Jan 20 17:23:07 crc kubenswrapper[4558]: I0120 17:23:07.546434 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d1f49886-b3f9-40f5-aa03-731030fbb9fc-config-data\") pod \"d1f49886-b3f9-40f5-aa03-731030fbb9fc\" (UID: \"d1f49886-b3f9-40f5-aa03-731030fbb9fc\") " Jan 20 17:23:07 crc kubenswrapper[4558]: I0120 17:23:07.546551 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/d1f49886-b3f9-40f5-aa03-731030fbb9fc-db-sync-config-data\") pod \"d1f49886-b3f9-40f5-aa03-731030fbb9fc\" (UID: \"d1f49886-b3f9-40f5-aa03-731030fbb9fc\") " Jan 20 17:23:07 crc kubenswrapper[4558]: I0120 17:23:07.546626 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sn5s8\" (UniqueName: \"kubernetes.io/projected/d1f49886-b3f9-40f5-aa03-731030fbb9fc-kube-api-access-sn5s8\") pod \"d1f49886-b3f9-40f5-aa03-731030fbb9fc\" (UID: \"d1f49886-b3f9-40f5-aa03-731030fbb9fc\") " Jan 20 17:23:07 crc kubenswrapper[4558]: I0120 17:23:07.553080 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d1f49886-b3f9-40f5-aa03-731030fbb9fc-kube-api-access-sn5s8" (OuterVolumeSpecName: "kube-api-access-sn5s8") pod "d1f49886-b3f9-40f5-aa03-731030fbb9fc" (UID: "d1f49886-b3f9-40f5-aa03-731030fbb9fc"). InnerVolumeSpecName "kube-api-access-sn5s8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:23:07 crc kubenswrapper[4558]: I0120 17:23:07.553432 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d1f49886-b3f9-40f5-aa03-731030fbb9fc-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "d1f49886-b3f9-40f5-aa03-731030fbb9fc" (UID: "d1f49886-b3f9-40f5-aa03-731030fbb9fc"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:23:07 crc kubenswrapper[4558]: I0120 17:23:07.571407 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d1f49886-b3f9-40f5-aa03-731030fbb9fc-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d1f49886-b3f9-40f5-aa03-731030fbb9fc" (UID: "d1f49886-b3f9-40f5-aa03-731030fbb9fc"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:23:07 crc kubenswrapper[4558]: I0120 17:23:07.588721 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d1f49886-b3f9-40f5-aa03-731030fbb9fc-config-data" (OuterVolumeSpecName: "config-data") pod "d1f49886-b3f9-40f5-aa03-731030fbb9fc" (UID: "d1f49886-b3f9-40f5-aa03-731030fbb9fc"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:23:07 crc kubenswrapper[4558]: I0120 17:23:07.648868 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sn5s8\" (UniqueName: \"kubernetes.io/projected/d1f49886-b3f9-40f5-aa03-731030fbb9fc-kube-api-access-sn5s8\") on node \"crc\" DevicePath \"\"" Jan 20 17:23:07 crc kubenswrapper[4558]: I0120 17:23:07.648904 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d1f49886-b3f9-40f5-aa03-731030fbb9fc-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:23:07 crc kubenswrapper[4558]: I0120 17:23:07.648915 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d1f49886-b3f9-40f5-aa03-731030fbb9fc-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:23:07 crc kubenswrapper[4558]: I0120 17:23:07.648927 4558 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/d1f49886-b3f9-40f5-aa03-731030fbb9fc-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:23:07 crc kubenswrapper[4558]: I0120 17:23:07.790938 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/root-account-create-update-zbfxs"] Jan 20 17:23:07 crc kubenswrapper[4558]: E0120 17:23:07.791348 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d1f49886-b3f9-40f5-aa03-731030fbb9fc" containerName="glance-db-sync" Jan 20 17:23:07 crc kubenswrapper[4558]: I0120 17:23:07.791366 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d1f49886-b3f9-40f5-aa03-731030fbb9fc" containerName="glance-db-sync" Jan 20 17:23:07 crc kubenswrapper[4558]: I0120 17:23:07.791507 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="d1f49886-b3f9-40f5-aa03-731030fbb9fc" containerName="glance-db-sync" Jan 20 17:23:07 crc kubenswrapper[4558]: I0120 17:23:07.792092 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-zbfxs" Jan 20 17:23:07 crc kubenswrapper[4558]: I0120 17:23:07.793892 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"openstack-cell1-mariadb-root-db-secret" Jan 20 17:23:07 crc kubenswrapper[4558]: I0120 17:23:07.796198 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-zbfxs"] Jan 20 17:23:07 crc kubenswrapper[4558]: I0120 17:23:07.851778 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z7lkf\" (UniqueName: \"kubernetes.io/projected/979d210b-e852-4deb-bef1-345fbd8ccf52-kube-api-access-z7lkf\") pod \"root-account-create-update-zbfxs\" (UID: \"979d210b-e852-4deb-bef1-345fbd8ccf52\") " pod="openstack-kuttl-tests/root-account-create-update-zbfxs" Jan 20 17:23:07 crc kubenswrapper[4558]: I0120 17:23:07.851839 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/979d210b-e852-4deb-bef1-345fbd8ccf52-operator-scripts\") pod \"root-account-create-update-zbfxs\" (UID: \"979d210b-e852-4deb-bef1-345fbd8ccf52\") " pod="openstack-kuttl-tests/root-account-create-update-zbfxs" Jan 20 17:23:07 crc kubenswrapper[4558]: I0120 17:23:07.954440 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z7lkf\" (UniqueName: \"kubernetes.io/projected/979d210b-e852-4deb-bef1-345fbd8ccf52-kube-api-access-z7lkf\") pod \"root-account-create-update-zbfxs\" (UID: \"979d210b-e852-4deb-bef1-345fbd8ccf52\") " pod="openstack-kuttl-tests/root-account-create-update-zbfxs" Jan 20 17:23:07 crc kubenswrapper[4558]: I0120 17:23:07.954548 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/979d210b-e852-4deb-bef1-345fbd8ccf52-operator-scripts\") pod \"root-account-create-update-zbfxs\" (UID: \"979d210b-e852-4deb-bef1-345fbd8ccf52\") " pod="openstack-kuttl-tests/root-account-create-update-zbfxs" Jan 20 17:23:07 crc kubenswrapper[4558]: I0120 17:23:07.955465 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/979d210b-e852-4deb-bef1-345fbd8ccf52-operator-scripts\") pod \"root-account-create-update-zbfxs\" (UID: \"979d210b-e852-4deb-bef1-345fbd8ccf52\") " pod="openstack-kuttl-tests/root-account-create-update-zbfxs" Jan 20 17:23:07 crc kubenswrapper[4558]: I0120 17:23:07.974554 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z7lkf\" (UniqueName: \"kubernetes.io/projected/979d210b-e852-4deb-bef1-345fbd8ccf52-kube-api-access-z7lkf\") pod \"root-account-create-update-zbfxs\" (UID: \"979d210b-e852-4deb-bef1-345fbd8ccf52\") " pod="openstack-kuttl-tests/root-account-create-update-zbfxs" Jan 20 17:23:08 crc kubenswrapper[4558]: I0120 17:23:08.109973 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-db-sync-q78xq" event={"ID":"d1f49886-b3f9-40f5-aa03-731030fbb9fc","Type":"ContainerDied","Data":"716d057963f462bc5aab035efc332d93009fbf95ead09ed95c82556362bbc404"} Jan 20 17:23:08 crc kubenswrapper[4558]: I0120 17:23:08.110060 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="716d057963f462bc5aab035efc332d93009fbf95ead09ed95c82556362bbc404" Jan 20 17:23:08 crc kubenswrapper[4558]: I0120 17:23:08.110009 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-db-sync-q78xq" Jan 20 17:23:08 crc kubenswrapper[4558]: I0120 17:23:08.110374 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-c5fc7f8f-s5hwr" Jan 20 17:23:08 crc kubenswrapper[4558]: I0120 17:23:08.111470 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-zbfxs" Jan 20 17:23:08 crc kubenswrapper[4558]: I0120 17:23:08.589677 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-zbfxs"] Jan 20 17:23:09 crc kubenswrapper[4558]: I0120 17:23:09.119823 4558 generic.go:334] "Generic (PLEG): container finished" podID="979d210b-e852-4deb-bef1-345fbd8ccf52" containerID="99f840cfec6f5fd24affe278783ce784e3dc668ee377d2172d230ab73c6e60dc" exitCode=0 Jan 20 17:23:09 crc kubenswrapper[4558]: I0120 17:23:09.119930 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/root-account-create-update-zbfxs" event={"ID":"979d210b-e852-4deb-bef1-345fbd8ccf52","Type":"ContainerDied","Data":"99f840cfec6f5fd24affe278783ce784e3dc668ee377d2172d230ab73c6e60dc"} Jan 20 17:23:09 crc kubenswrapper[4558]: I0120 17:23:09.120153 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/root-account-create-update-zbfxs" event={"ID":"979d210b-e852-4deb-bef1-345fbd8ccf52","Type":"ContainerStarted","Data":"84b47cfc5ae9610832db7ef423b33839e5ec2278b99031560f2d3a19e2401b40"} Jan 20 17:23:10 crc kubenswrapper[4558]: I0120 17:23:10.130133 4558 generic.go:334] "Generic (PLEG): container finished" podID="7a2c4722-11cc-426c-8101-496c9ee97ca2" containerID="844cb778093e33ece39e19f2cf2d5530f5852b1038b5c9c019b1574903b32025" exitCode=0 Jan 20 17:23:10 crc kubenswrapper[4558]: I0120 17:23:10.130240 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" event={"ID":"7a2c4722-11cc-426c-8101-496c9ee97ca2","Type":"ContainerDied","Data":"844cb778093e33ece39e19f2cf2d5530f5852b1038b5c9c019b1574903b32025"} Jan 20 17:23:10 crc kubenswrapper[4558]: I0120 17:23:10.131668 4558 generic.go:334] "Generic (PLEG): container finished" podID="033e5f26-b8e8-48f1-affd-3b9e7fba316a" containerID="b6108373b6dc1c8073ee4649e4bf895e88d86f76fea047217ce0066f6364f215" exitCode=0 Jan 20 17:23:10 crc kubenswrapper[4558]: I0120 17:23:10.131749 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-server-0" event={"ID":"033e5f26-b8e8-48f1-affd-3b9e7fba316a","Type":"ContainerDied","Data":"b6108373b6dc1c8073ee4649e4bf895e88d86f76fea047217ce0066f6364f215"} Jan 20 17:23:10 crc kubenswrapper[4558]: I0120 17:23:10.453625 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-zbfxs" Jan 20 17:23:10 crc kubenswrapper[4558]: I0120 17:23:10.604886 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/979d210b-e852-4deb-bef1-345fbd8ccf52-operator-scripts\") pod \"979d210b-e852-4deb-bef1-345fbd8ccf52\" (UID: \"979d210b-e852-4deb-bef1-345fbd8ccf52\") " Jan 20 17:23:10 crc kubenswrapper[4558]: I0120 17:23:10.605023 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z7lkf\" (UniqueName: \"kubernetes.io/projected/979d210b-e852-4deb-bef1-345fbd8ccf52-kube-api-access-z7lkf\") pod \"979d210b-e852-4deb-bef1-345fbd8ccf52\" (UID: \"979d210b-e852-4deb-bef1-345fbd8ccf52\") " Jan 20 17:23:10 crc kubenswrapper[4558]: I0120 17:23:10.605452 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/979d210b-e852-4deb-bef1-345fbd8ccf52-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "979d210b-e852-4deb-bef1-345fbd8ccf52" (UID: "979d210b-e852-4deb-bef1-345fbd8ccf52"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:23:10 crc kubenswrapper[4558]: I0120 17:23:10.605589 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/979d210b-e852-4deb-bef1-345fbd8ccf52-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:23:10 crc kubenswrapper[4558]: I0120 17:23:10.609003 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/979d210b-e852-4deb-bef1-345fbd8ccf52-kube-api-access-z7lkf" (OuterVolumeSpecName: "kube-api-access-z7lkf") pod "979d210b-e852-4deb-bef1-345fbd8ccf52" (UID: "979d210b-e852-4deb-bef1-345fbd8ccf52"). InnerVolumeSpecName "kube-api-access-z7lkf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:23:10 crc kubenswrapper[4558]: I0120 17:23:10.707465 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z7lkf\" (UniqueName: \"kubernetes.io/projected/979d210b-e852-4deb-bef1-345fbd8ccf52-kube-api-access-z7lkf\") on node \"crc\" DevicePath \"\"" Jan 20 17:23:11 crc kubenswrapper[4558]: I0120 17:23:11.142063 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" event={"ID":"7a2c4722-11cc-426c-8101-496c9ee97ca2","Type":"ContainerStarted","Data":"d6656270debe6f92e12b0faf0e10cdbadf8fd2e24c19d3019d8548693ab17740"} Jan 20 17:23:11 crc kubenswrapper[4558]: I0120 17:23:11.142443 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:23:11 crc kubenswrapper[4558]: I0120 17:23:11.145945 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-server-0" event={"ID":"033e5f26-b8e8-48f1-affd-3b9e7fba316a","Type":"ContainerStarted","Data":"a02a42e340dab5cef6984f56ed207a62ce75f87172edeb2b63be0e769779bf17"} Jan 20 17:23:11 crc kubenswrapper[4558]: I0120 17:23:11.146272 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:23:11 crc kubenswrapper[4558]: I0120 17:23:11.148322 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/root-account-create-update-zbfxs" event={"ID":"979d210b-e852-4deb-bef1-345fbd8ccf52","Type":"ContainerDied","Data":"84b47cfc5ae9610832db7ef423b33839e5ec2278b99031560f2d3a19e2401b40"} Jan 20 17:23:11 crc kubenswrapper[4558]: I0120 17:23:11.148372 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="84b47cfc5ae9610832db7ef423b33839e5ec2278b99031560f2d3a19e2401b40" Jan 20 17:23:11 crc kubenswrapper[4558]: I0120 17:23:11.148412 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-zbfxs" Jan 20 17:23:11 crc kubenswrapper[4558]: I0120 17:23:11.177362 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" podStartSLOduration=36.177302676 podStartE2EDuration="36.177302676s" podCreationTimestamp="2026-01-20 17:22:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:23:11.168885092 +0000 UTC m=+2484.929223058" watchObservedRunningTime="2026-01-20 17:23:11.177302676 +0000 UTC m=+2484.937640643" Jan 20 17:23:11 crc kubenswrapper[4558]: I0120 17:23:11.187495 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/rabbitmq-server-0" podStartSLOduration=37.187479749 podStartE2EDuration="37.187479749s" podCreationTimestamp="2026-01-20 17:22:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:23:11.185748793 +0000 UTC m=+2484.946086760" watchObservedRunningTime="2026-01-20 17:23:11.187479749 +0000 UTC m=+2484.947817716" Jan 20 17:23:14 crc kubenswrapper[4558]: I0120 17:23:14.524357 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-c5fc7f8f-s5hwr" Jan 20 17:23:14 crc kubenswrapper[4558]: I0120 17:23:14.574474 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-psfz5"] Jan 20 17:23:14 crc kubenswrapper[4558]: I0120 17:23:14.574656 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-psfz5" podUID="bcc56301-83b3-4ff9-9290-ed029940a6da" containerName="dnsmasq-dns" containerID="cri-o://3cf0e8299f84b5df2b4be00b863bd89ef88937e1f0734e67e76b770c4c5bf382" gracePeriod=10 Jan 20 17:23:15 crc kubenswrapper[4558]: I0120 17:23:15.043477 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-psfz5" Jan 20 17:23:15 crc kubenswrapper[4558]: I0120 17:23:15.187308 4558 generic.go:334] "Generic (PLEG): container finished" podID="bcc56301-83b3-4ff9-9290-ed029940a6da" containerID="3cf0e8299f84b5df2b4be00b863bd89ef88937e1f0734e67e76b770c4c5bf382" exitCode=0 Jan 20 17:23:15 crc kubenswrapper[4558]: I0120 17:23:15.187370 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-psfz5" event={"ID":"bcc56301-83b3-4ff9-9290-ed029940a6da","Type":"ContainerDied","Data":"3cf0e8299f84b5df2b4be00b863bd89ef88937e1f0734e67e76b770c4c5bf382"} Jan 20 17:23:15 crc kubenswrapper[4558]: I0120 17:23:15.187417 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-psfz5" event={"ID":"bcc56301-83b3-4ff9-9290-ed029940a6da","Type":"ContainerDied","Data":"afbd24b3c392464f207695cb23d1a44e8143e7a1cf578f22d952abcc64204423"} Jan 20 17:23:15 crc kubenswrapper[4558]: I0120 17:23:15.187438 4558 scope.go:117] "RemoveContainer" containerID="3cf0e8299f84b5df2b4be00b863bd89ef88937e1f0734e67e76b770c4c5bf382" Jan 20 17:23:15 crc kubenswrapper[4558]: I0120 17:23:15.187721 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-psfz5" Jan 20 17:23:15 crc kubenswrapper[4558]: I0120 17:23:15.190772 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bcc56301-83b3-4ff9-9290-ed029940a6da-config\") pod \"bcc56301-83b3-4ff9-9290-ed029940a6da\" (UID: \"bcc56301-83b3-4ff9-9290-ed029940a6da\") " Jan 20 17:23:15 crc kubenswrapper[4558]: I0120 17:23:15.190925 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mlwwz\" (UniqueName: \"kubernetes.io/projected/bcc56301-83b3-4ff9-9290-ed029940a6da-kube-api-access-mlwwz\") pod \"bcc56301-83b3-4ff9-9290-ed029940a6da\" (UID: \"bcc56301-83b3-4ff9-9290-ed029940a6da\") " Jan 20 17:23:15 crc kubenswrapper[4558]: I0120 17:23:15.191022 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/bcc56301-83b3-4ff9-9290-ed029940a6da-dnsmasq-svc\") pod \"bcc56301-83b3-4ff9-9290-ed029940a6da\" (UID: \"bcc56301-83b3-4ff9-9290-ed029940a6da\") " Jan 20 17:23:15 crc kubenswrapper[4558]: I0120 17:23:15.213904 4558 scope.go:117] "RemoveContainer" containerID="b6872f845e094138749e172f30290826b82d6e116c124ea5839c4bcffe150159" Jan 20 17:23:15 crc kubenswrapper[4558]: I0120 17:23:15.213973 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bcc56301-83b3-4ff9-9290-ed029940a6da-kube-api-access-mlwwz" (OuterVolumeSpecName: "kube-api-access-mlwwz") pod "bcc56301-83b3-4ff9-9290-ed029940a6da" (UID: "bcc56301-83b3-4ff9-9290-ed029940a6da"). InnerVolumeSpecName "kube-api-access-mlwwz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:23:15 crc kubenswrapper[4558]: I0120 17:23:15.240444 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bcc56301-83b3-4ff9-9290-ed029940a6da-config" (OuterVolumeSpecName: "config") pod "bcc56301-83b3-4ff9-9290-ed029940a6da" (UID: "bcc56301-83b3-4ff9-9290-ed029940a6da"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:23:15 crc kubenswrapper[4558]: I0120 17:23:15.241414 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bcc56301-83b3-4ff9-9290-ed029940a6da-dnsmasq-svc" (OuterVolumeSpecName: "dnsmasq-svc") pod "bcc56301-83b3-4ff9-9290-ed029940a6da" (UID: "bcc56301-83b3-4ff9-9290-ed029940a6da"). InnerVolumeSpecName "dnsmasq-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:23:15 crc kubenswrapper[4558]: I0120 17:23:15.280027 4558 scope.go:117] "RemoveContainer" containerID="3cf0e8299f84b5df2b4be00b863bd89ef88937e1f0734e67e76b770c4c5bf382" Jan 20 17:23:15 crc kubenswrapper[4558]: E0120 17:23:15.280499 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3cf0e8299f84b5df2b4be00b863bd89ef88937e1f0734e67e76b770c4c5bf382\": container with ID starting with 3cf0e8299f84b5df2b4be00b863bd89ef88937e1f0734e67e76b770c4c5bf382 not found: ID does not exist" containerID="3cf0e8299f84b5df2b4be00b863bd89ef88937e1f0734e67e76b770c4c5bf382" Jan 20 17:23:15 crc kubenswrapper[4558]: I0120 17:23:15.280536 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3cf0e8299f84b5df2b4be00b863bd89ef88937e1f0734e67e76b770c4c5bf382"} err="failed to get container status \"3cf0e8299f84b5df2b4be00b863bd89ef88937e1f0734e67e76b770c4c5bf382\": rpc error: code = NotFound desc = could not find container \"3cf0e8299f84b5df2b4be00b863bd89ef88937e1f0734e67e76b770c4c5bf382\": container with ID starting with 3cf0e8299f84b5df2b4be00b863bd89ef88937e1f0734e67e76b770c4c5bf382 not found: ID does not exist" Jan 20 17:23:15 crc kubenswrapper[4558]: I0120 17:23:15.280561 4558 scope.go:117] "RemoveContainer" containerID="b6872f845e094138749e172f30290826b82d6e116c124ea5839c4bcffe150159" Jan 20 17:23:15 crc kubenswrapper[4558]: E0120 17:23:15.280946 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b6872f845e094138749e172f30290826b82d6e116c124ea5839c4bcffe150159\": container with ID starting with b6872f845e094138749e172f30290826b82d6e116c124ea5839c4bcffe150159 not found: ID does not exist" containerID="b6872f845e094138749e172f30290826b82d6e116c124ea5839c4bcffe150159" Jan 20 17:23:15 crc kubenswrapper[4558]: I0120 17:23:15.280968 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b6872f845e094138749e172f30290826b82d6e116c124ea5839c4bcffe150159"} err="failed to get container status \"b6872f845e094138749e172f30290826b82d6e116c124ea5839c4bcffe150159\": rpc error: code = NotFound desc = could not find container \"b6872f845e094138749e172f30290826b82d6e116c124ea5839c4bcffe150159\": container with ID starting with b6872f845e094138749e172f30290826b82d6e116c124ea5839c4bcffe150159 not found: ID does not exist" Jan 20 17:23:15 crc kubenswrapper[4558]: I0120 17:23:15.293812 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bcc56301-83b3-4ff9-9290-ed029940a6da-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:23:15 crc kubenswrapper[4558]: I0120 17:23:15.293841 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mlwwz\" (UniqueName: \"kubernetes.io/projected/bcc56301-83b3-4ff9-9290-ed029940a6da-kube-api-access-mlwwz\") on node \"crc\" DevicePath \"\"" Jan 20 17:23:15 crc kubenswrapper[4558]: I0120 17:23:15.293853 4558 reconciler_common.go:293] "Volume detached for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/bcc56301-83b3-4ff9-9290-ed029940a6da-dnsmasq-svc\") on node \"crc\" DevicePath \"\"" Jan 20 17:23:15 crc kubenswrapper[4558]: I0120 17:23:15.515980 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-psfz5"] Jan 20 17:23:15 crc kubenswrapper[4558]: I0120 17:23:15.521550 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-psfz5"] Jan 20 17:23:16 crc kubenswrapper[4558]: I0120 17:23:16.576132 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bcc56301-83b3-4ff9-9290-ed029940a6da" path="/var/lib/kubelet/pods/bcc56301-83b3-4ff9-9290-ed029940a6da/volumes" Jan 20 17:23:26 crc kubenswrapper[4558]: I0120 17:23:26.077463 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:23:26 crc kubenswrapper[4558]: I0120 17:23:26.398547 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/cinder-db-create-gh87k"] Jan 20 17:23:26 crc kubenswrapper[4558]: E0120 17:23:26.398896 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bcc56301-83b3-4ff9-9290-ed029940a6da" containerName="dnsmasq-dns" Jan 20 17:23:26 crc kubenswrapper[4558]: I0120 17:23:26.398914 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="bcc56301-83b3-4ff9-9290-ed029940a6da" containerName="dnsmasq-dns" Jan 20 17:23:26 crc kubenswrapper[4558]: E0120 17:23:26.398934 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bcc56301-83b3-4ff9-9290-ed029940a6da" containerName="init" Jan 20 17:23:26 crc kubenswrapper[4558]: I0120 17:23:26.398940 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="bcc56301-83b3-4ff9-9290-ed029940a6da" containerName="init" Jan 20 17:23:26 crc kubenswrapper[4558]: E0120 17:23:26.398950 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="979d210b-e852-4deb-bef1-345fbd8ccf52" containerName="mariadb-account-create-update" Jan 20 17:23:26 crc kubenswrapper[4558]: I0120 17:23:26.398955 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="979d210b-e852-4deb-bef1-345fbd8ccf52" containerName="mariadb-account-create-update" Jan 20 17:23:26 crc kubenswrapper[4558]: I0120 17:23:26.399107 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="979d210b-e852-4deb-bef1-345fbd8ccf52" containerName="mariadb-account-create-update" Jan 20 17:23:26 crc kubenswrapper[4558]: I0120 17:23:26.399130 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="bcc56301-83b3-4ff9-9290-ed029940a6da" containerName="dnsmasq-dns" Jan 20 17:23:26 crc kubenswrapper[4558]: I0120 17:23:26.399652 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-db-create-gh87k" Jan 20 17:23:26 crc kubenswrapper[4558]: I0120 17:23:26.406280 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/neutron-b136-account-create-update-x288d"] Jan 20 17:23:26 crc kubenswrapper[4558]: I0120 17:23:26.407742 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-b136-account-create-update-x288d" Jan 20 17:23:26 crc kubenswrapper[4558]: I0120 17:23:26.411454 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-db-create-gh87k"] Jan 20 17:23:26 crc kubenswrapper[4558]: I0120 17:23:26.418045 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"neutron-db-secret" Jan 20 17:23:26 crc kubenswrapper[4558]: I0120 17:23:26.422567 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-b136-account-create-update-x288d"] Jan 20 17:23:26 crc kubenswrapper[4558]: I0120 17:23:26.460331 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:23:26 crc kubenswrapper[4558]: I0120 17:23:26.501692 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/neutron-db-create-sz4kq"] Jan 20 17:23:26 crc kubenswrapper[4558]: I0120 17:23:26.502871 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-db-create-sz4kq" Jan 20 17:23:26 crc kubenswrapper[4558]: I0120 17:23:26.522373 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-db-create-sz4kq"] Jan 20 17:23:26 crc kubenswrapper[4558]: I0120 17:23:26.587414 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/72189413-de76-4a83-87ce-441c69a4e319-operator-scripts\") pod \"neutron-b136-account-create-update-x288d\" (UID: \"72189413-de76-4a83-87ce-441c69a4e319\") " pod="openstack-kuttl-tests/neutron-b136-account-create-update-x288d" Jan 20 17:23:26 crc kubenswrapper[4558]: I0120 17:23:26.587517 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vmkqm\" (UniqueName: \"kubernetes.io/projected/33a5b64d-fca0-4a5f-92df-908c67df28cc-kube-api-access-vmkqm\") pod \"cinder-db-create-gh87k\" (UID: \"33a5b64d-fca0-4a5f-92df-908c67df28cc\") " pod="openstack-kuttl-tests/cinder-db-create-gh87k" Jan 20 17:23:26 crc kubenswrapper[4558]: I0120 17:23:26.587687 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/33a5b64d-fca0-4a5f-92df-908c67df28cc-operator-scripts\") pod \"cinder-db-create-gh87k\" (UID: \"33a5b64d-fca0-4a5f-92df-908c67df28cc\") " pod="openstack-kuttl-tests/cinder-db-create-gh87k" Jan 20 17:23:26 crc kubenswrapper[4558]: I0120 17:23:26.587733 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5vtqh\" (UniqueName: \"kubernetes.io/projected/72189413-de76-4a83-87ce-441c69a4e319-kube-api-access-5vtqh\") pod \"neutron-b136-account-create-update-x288d\" (UID: \"72189413-de76-4a83-87ce-441c69a4e319\") " pod="openstack-kuttl-tests/neutron-b136-account-create-update-x288d" Jan 20 17:23:26 crc kubenswrapper[4558]: I0120 17:23:26.618289 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/barbican-db-create-4t4nh"] Jan 20 17:23:26 crc kubenswrapper[4558]: I0120 17:23:26.622340 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-db-create-4t4nh" Jan 20 17:23:26 crc kubenswrapper[4558]: I0120 17:23:26.637028 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/cinder-43e3-account-create-update-l4gms"] Jan 20 17:23:26 crc kubenswrapper[4558]: I0120 17:23:26.639963 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-43e3-account-create-update-l4gms" Jan 20 17:23:26 crc kubenswrapper[4558]: I0120 17:23:26.641966 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cinder-db-secret" Jan 20 17:23:26 crc kubenswrapper[4558]: I0120 17:23:26.649895 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-43e3-account-create-update-l4gms"] Jan 20 17:23:26 crc kubenswrapper[4558]: I0120 17:23:26.668332 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-db-create-4t4nh"] Jan 20 17:23:26 crc kubenswrapper[4558]: I0120 17:23:26.687303 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/keystone-db-sync-x2fdq"] Jan 20 17:23:26 crc kubenswrapper[4558]: I0120 17:23:26.688682 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-db-sync-x2fdq" Jan 20 17:23:26 crc kubenswrapper[4558]: I0120 17:23:26.689755 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-db-sync-x2fdq"] Jan 20 17:23:26 crc kubenswrapper[4558]: I0120 17:23:26.689885 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/33a5b64d-fca0-4a5f-92df-908c67df28cc-operator-scripts\") pod \"cinder-db-create-gh87k\" (UID: \"33a5b64d-fca0-4a5f-92df-908c67df28cc\") " pod="openstack-kuttl-tests/cinder-db-create-gh87k" Jan 20 17:23:26 crc kubenswrapper[4558]: I0120 17:23:26.689933 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5vtqh\" (UniqueName: \"kubernetes.io/projected/72189413-de76-4a83-87ce-441c69a4e319-kube-api-access-5vtqh\") pod \"neutron-b136-account-create-update-x288d\" (UID: \"72189413-de76-4a83-87ce-441c69a4e319\") " pod="openstack-kuttl-tests/neutron-b136-account-create-update-x288d" Jan 20 17:23:26 crc kubenswrapper[4558]: I0120 17:23:26.690056 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a7cbf118-c70e-4058-b7b8-e14883e1fdc2-operator-scripts\") pod \"neutron-db-create-sz4kq\" (UID: \"a7cbf118-c70e-4058-b7b8-e14883e1fdc2\") " pod="openstack-kuttl-tests/neutron-db-create-sz4kq" Jan 20 17:23:26 crc kubenswrapper[4558]: I0120 17:23:26.690748 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/33a5b64d-fca0-4a5f-92df-908c67df28cc-operator-scripts\") pod \"cinder-db-create-gh87k\" (UID: \"33a5b64d-fca0-4a5f-92df-908c67df28cc\") " pod="openstack-kuttl-tests/cinder-db-create-gh87k" Jan 20 17:23:26 crc kubenswrapper[4558]: I0120 17:23:26.690801 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-config-data" Jan 20 17:23:26 crc kubenswrapper[4558]: I0120 17:23:26.691497 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/72189413-de76-4a83-87ce-441c69a4e319-operator-scripts\") pod \"neutron-b136-account-create-update-x288d\" (UID: \"72189413-de76-4a83-87ce-441c69a4e319\") " pod="openstack-kuttl-tests/neutron-b136-account-create-update-x288d" Jan 20 17:23:26 crc kubenswrapper[4558]: I0120 17:23:26.692637 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/72189413-de76-4a83-87ce-441c69a4e319-operator-scripts\") pod \"neutron-b136-account-create-update-x288d\" (UID: \"72189413-de76-4a83-87ce-441c69a4e319\") " pod="openstack-kuttl-tests/neutron-b136-account-create-update-x288d" Jan 20 17:23:26 crc kubenswrapper[4558]: I0120 17:23:26.690850 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone" Jan 20 17:23:26 crc kubenswrapper[4558]: I0120 17:23:26.692683 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s94tv\" (UniqueName: \"kubernetes.io/projected/a7cbf118-c70e-4058-b7b8-e14883e1fdc2-kube-api-access-s94tv\") pod \"neutron-db-create-sz4kq\" (UID: \"a7cbf118-c70e-4058-b7b8-e14883e1fdc2\") " pod="openstack-kuttl-tests/neutron-db-create-sz4kq" Jan 20 17:23:26 crc kubenswrapper[4558]: I0120 17:23:26.692744 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vmkqm\" (UniqueName: \"kubernetes.io/projected/33a5b64d-fca0-4a5f-92df-908c67df28cc-kube-api-access-vmkqm\") pod \"cinder-db-create-gh87k\" (UID: \"33a5b64d-fca0-4a5f-92df-908c67df28cc\") " pod="openstack-kuttl-tests/cinder-db-create-gh87k" Jan 20 17:23:26 crc kubenswrapper[4558]: I0120 17:23:26.691574 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-keystone-dockercfg-njbkz" Jan 20 17:23:26 crc kubenswrapper[4558]: I0120 17:23:26.691651 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-scripts" Jan 20 17:23:26 crc kubenswrapper[4558]: I0120 17:23:26.711460 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5vtqh\" (UniqueName: \"kubernetes.io/projected/72189413-de76-4a83-87ce-441c69a4e319-kube-api-access-5vtqh\") pod \"neutron-b136-account-create-update-x288d\" (UID: \"72189413-de76-4a83-87ce-441c69a4e319\") " pod="openstack-kuttl-tests/neutron-b136-account-create-update-x288d" Jan 20 17:23:26 crc kubenswrapper[4558]: I0120 17:23:26.717156 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/barbican-e949-account-create-update-swzjb"] Jan 20 17:23:26 crc kubenswrapper[4558]: I0120 17:23:26.723152 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-e949-account-create-update-swzjb" Jan 20 17:23:26 crc kubenswrapper[4558]: I0120 17:23:26.726457 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"barbican-db-secret" Jan 20 17:23:26 crc kubenswrapper[4558]: I0120 17:23:26.730218 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vmkqm\" (UniqueName: \"kubernetes.io/projected/33a5b64d-fca0-4a5f-92df-908c67df28cc-kube-api-access-vmkqm\") pod \"cinder-db-create-gh87k\" (UID: \"33a5b64d-fca0-4a5f-92df-908c67df28cc\") " pod="openstack-kuttl-tests/cinder-db-create-gh87k" Jan 20 17:23:26 crc kubenswrapper[4558]: I0120 17:23:26.734098 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-e949-account-create-update-swzjb"] Jan 20 17:23:26 crc kubenswrapper[4558]: I0120 17:23:26.734533 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-b136-account-create-update-x288d" Jan 20 17:23:26 crc kubenswrapper[4558]: I0120 17:23:26.797592 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s94tv\" (UniqueName: \"kubernetes.io/projected/a7cbf118-c70e-4058-b7b8-e14883e1fdc2-kube-api-access-s94tv\") pod \"neutron-db-create-sz4kq\" (UID: \"a7cbf118-c70e-4058-b7b8-e14883e1fdc2\") " pod="openstack-kuttl-tests/neutron-db-create-sz4kq" Jan 20 17:23:26 crc kubenswrapper[4558]: I0120 17:23:26.797641 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/213f0f23-1d2e-456d-9a25-de15571358a2-config-data\") pod \"keystone-db-sync-x2fdq\" (UID: \"213f0f23-1d2e-456d-9a25-de15571358a2\") " pod="openstack-kuttl-tests/keystone-db-sync-x2fdq" Jan 20 17:23:26 crc kubenswrapper[4558]: I0120 17:23:26.797735 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v29cw\" (UniqueName: \"kubernetes.io/projected/3c810cd2-bc0a-407e-8626-e52eb0ba5296-kube-api-access-v29cw\") pod \"barbican-db-create-4t4nh\" (UID: \"3c810cd2-bc0a-407e-8626-e52eb0ba5296\") " pod="openstack-kuttl-tests/barbican-db-create-4t4nh" Jan 20 17:23:26 crc kubenswrapper[4558]: I0120 17:23:26.797802 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pkfvq\" (UniqueName: \"kubernetes.io/projected/d39d453b-abd1-42ef-8b7c-0cf6d9404ad6-kube-api-access-pkfvq\") pod \"cinder-43e3-account-create-update-l4gms\" (UID: \"d39d453b-abd1-42ef-8b7c-0cf6d9404ad6\") " pod="openstack-kuttl-tests/cinder-43e3-account-create-update-l4gms" Jan 20 17:23:26 crc kubenswrapper[4558]: I0120 17:23:26.797835 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/213f0f23-1d2e-456d-9a25-de15571358a2-combined-ca-bundle\") pod \"keystone-db-sync-x2fdq\" (UID: \"213f0f23-1d2e-456d-9a25-de15571358a2\") " pod="openstack-kuttl-tests/keystone-db-sync-x2fdq" Jan 20 17:23:26 crc kubenswrapper[4558]: I0120 17:23:26.797870 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a7cbf118-c70e-4058-b7b8-e14883e1fdc2-operator-scripts\") pod \"neutron-db-create-sz4kq\" (UID: \"a7cbf118-c70e-4058-b7b8-e14883e1fdc2\") " pod="openstack-kuttl-tests/neutron-db-create-sz4kq" Jan 20 17:23:26 crc kubenswrapper[4558]: I0120 17:23:26.797906 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d39d453b-abd1-42ef-8b7c-0cf6d9404ad6-operator-scripts\") pod \"cinder-43e3-account-create-update-l4gms\" (UID: \"d39d453b-abd1-42ef-8b7c-0cf6d9404ad6\") " pod="openstack-kuttl-tests/cinder-43e3-account-create-update-l4gms" Jan 20 17:23:26 crc kubenswrapper[4558]: I0120 17:23:26.797951 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-25xgp\" (UniqueName: \"kubernetes.io/projected/213f0f23-1d2e-456d-9a25-de15571358a2-kube-api-access-25xgp\") pod \"keystone-db-sync-x2fdq\" (UID: \"213f0f23-1d2e-456d-9a25-de15571358a2\") " pod="openstack-kuttl-tests/keystone-db-sync-x2fdq" Jan 20 17:23:26 crc kubenswrapper[4558]: I0120 17:23:26.797993 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3c810cd2-bc0a-407e-8626-e52eb0ba5296-operator-scripts\") pod \"barbican-db-create-4t4nh\" (UID: \"3c810cd2-bc0a-407e-8626-e52eb0ba5296\") " pod="openstack-kuttl-tests/barbican-db-create-4t4nh" Jan 20 17:23:26 crc kubenswrapper[4558]: I0120 17:23:26.798604 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a7cbf118-c70e-4058-b7b8-e14883e1fdc2-operator-scripts\") pod \"neutron-db-create-sz4kq\" (UID: \"a7cbf118-c70e-4058-b7b8-e14883e1fdc2\") " pod="openstack-kuttl-tests/neutron-db-create-sz4kq" Jan 20 17:23:26 crc kubenswrapper[4558]: I0120 17:23:26.816305 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s94tv\" (UniqueName: \"kubernetes.io/projected/a7cbf118-c70e-4058-b7b8-e14883e1fdc2-kube-api-access-s94tv\") pod \"neutron-db-create-sz4kq\" (UID: \"a7cbf118-c70e-4058-b7b8-e14883e1fdc2\") " pod="openstack-kuttl-tests/neutron-db-create-sz4kq" Jan 20 17:23:26 crc kubenswrapper[4558]: I0120 17:23:26.833605 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-db-create-sz4kq" Jan 20 17:23:26 crc kubenswrapper[4558]: I0120 17:23:26.899902 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pkfvq\" (UniqueName: \"kubernetes.io/projected/d39d453b-abd1-42ef-8b7c-0cf6d9404ad6-kube-api-access-pkfvq\") pod \"cinder-43e3-account-create-update-l4gms\" (UID: \"d39d453b-abd1-42ef-8b7c-0cf6d9404ad6\") " pod="openstack-kuttl-tests/cinder-43e3-account-create-update-l4gms" Jan 20 17:23:26 crc kubenswrapper[4558]: I0120 17:23:26.899953 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/213f0f23-1d2e-456d-9a25-de15571358a2-combined-ca-bundle\") pod \"keystone-db-sync-x2fdq\" (UID: \"213f0f23-1d2e-456d-9a25-de15571358a2\") " pod="openstack-kuttl-tests/keystone-db-sync-x2fdq" Jan 20 17:23:26 crc kubenswrapper[4558]: I0120 17:23:26.899987 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d39d453b-abd1-42ef-8b7c-0cf6d9404ad6-operator-scripts\") pod \"cinder-43e3-account-create-update-l4gms\" (UID: \"d39d453b-abd1-42ef-8b7c-0cf6d9404ad6\") " pod="openstack-kuttl-tests/cinder-43e3-account-create-update-l4gms" Jan 20 17:23:26 crc kubenswrapper[4558]: I0120 17:23:26.900021 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-25xgp\" (UniqueName: \"kubernetes.io/projected/213f0f23-1d2e-456d-9a25-de15571358a2-kube-api-access-25xgp\") pod \"keystone-db-sync-x2fdq\" (UID: \"213f0f23-1d2e-456d-9a25-de15571358a2\") " pod="openstack-kuttl-tests/keystone-db-sync-x2fdq" Jan 20 17:23:26 crc kubenswrapper[4558]: I0120 17:23:26.900048 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3c810cd2-bc0a-407e-8626-e52eb0ba5296-operator-scripts\") pod \"barbican-db-create-4t4nh\" (UID: \"3c810cd2-bc0a-407e-8626-e52eb0ba5296\") " pod="openstack-kuttl-tests/barbican-db-create-4t4nh" Jan 20 17:23:26 crc kubenswrapper[4558]: I0120 17:23:26.900070 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z2c5k\" (UniqueName: \"kubernetes.io/projected/0e4d0567-44de-45db-977c-50fc9725b092-kube-api-access-z2c5k\") pod \"barbican-e949-account-create-update-swzjb\" (UID: \"0e4d0567-44de-45db-977c-50fc9725b092\") " pod="openstack-kuttl-tests/barbican-e949-account-create-update-swzjb" Jan 20 17:23:26 crc kubenswrapper[4558]: I0120 17:23:26.900108 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0e4d0567-44de-45db-977c-50fc9725b092-operator-scripts\") pod \"barbican-e949-account-create-update-swzjb\" (UID: \"0e4d0567-44de-45db-977c-50fc9725b092\") " pod="openstack-kuttl-tests/barbican-e949-account-create-update-swzjb" Jan 20 17:23:26 crc kubenswrapper[4558]: I0120 17:23:26.900138 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/213f0f23-1d2e-456d-9a25-de15571358a2-config-data\") pod \"keystone-db-sync-x2fdq\" (UID: \"213f0f23-1d2e-456d-9a25-de15571358a2\") " pod="openstack-kuttl-tests/keystone-db-sync-x2fdq" Jan 20 17:23:26 crc kubenswrapper[4558]: I0120 17:23:26.900192 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v29cw\" (UniqueName: \"kubernetes.io/projected/3c810cd2-bc0a-407e-8626-e52eb0ba5296-kube-api-access-v29cw\") pod \"barbican-db-create-4t4nh\" (UID: \"3c810cd2-bc0a-407e-8626-e52eb0ba5296\") " pod="openstack-kuttl-tests/barbican-db-create-4t4nh" Jan 20 17:23:26 crc kubenswrapper[4558]: I0120 17:23:26.901816 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d39d453b-abd1-42ef-8b7c-0cf6d9404ad6-operator-scripts\") pod \"cinder-43e3-account-create-update-l4gms\" (UID: \"d39d453b-abd1-42ef-8b7c-0cf6d9404ad6\") " pod="openstack-kuttl-tests/cinder-43e3-account-create-update-l4gms" Jan 20 17:23:26 crc kubenswrapper[4558]: I0120 17:23:26.901852 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3c810cd2-bc0a-407e-8626-e52eb0ba5296-operator-scripts\") pod \"barbican-db-create-4t4nh\" (UID: \"3c810cd2-bc0a-407e-8626-e52eb0ba5296\") " pod="openstack-kuttl-tests/barbican-db-create-4t4nh" Jan 20 17:23:26 crc kubenswrapper[4558]: I0120 17:23:26.905808 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/213f0f23-1d2e-456d-9a25-de15571358a2-combined-ca-bundle\") pod \"keystone-db-sync-x2fdq\" (UID: \"213f0f23-1d2e-456d-9a25-de15571358a2\") " pod="openstack-kuttl-tests/keystone-db-sync-x2fdq" Jan 20 17:23:26 crc kubenswrapper[4558]: I0120 17:23:26.908064 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/213f0f23-1d2e-456d-9a25-de15571358a2-config-data\") pod \"keystone-db-sync-x2fdq\" (UID: \"213f0f23-1d2e-456d-9a25-de15571358a2\") " pod="openstack-kuttl-tests/keystone-db-sync-x2fdq" Jan 20 17:23:26 crc kubenswrapper[4558]: I0120 17:23:26.917088 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v29cw\" (UniqueName: \"kubernetes.io/projected/3c810cd2-bc0a-407e-8626-e52eb0ba5296-kube-api-access-v29cw\") pod \"barbican-db-create-4t4nh\" (UID: \"3c810cd2-bc0a-407e-8626-e52eb0ba5296\") " pod="openstack-kuttl-tests/barbican-db-create-4t4nh" Jan 20 17:23:26 crc kubenswrapper[4558]: I0120 17:23:26.918811 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pkfvq\" (UniqueName: \"kubernetes.io/projected/d39d453b-abd1-42ef-8b7c-0cf6d9404ad6-kube-api-access-pkfvq\") pod \"cinder-43e3-account-create-update-l4gms\" (UID: \"d39d453b-abd1-42ef-8b7c-0cf6d9404ad6\") " pod="openstack-kuttl-tests/cinder-43e3-account-create-update-l4gms" Jan 20 17:23:26 crc kubenswrapper[4558]: I0120 17:23:26.919597 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-25xgp\" (UniqueName: \"kubernetes.io/projected/213f0f23-1d2e-456d-9a25-de15571358a2-kube-api-access-25xgp\") pod \"keystone-db-sync-x2fdq\" (UID: \"213f0f23-1d2e-456d-9a25-de15571358a2\") " pod="openstack-kuttl-tests/keystone-db-sync-x2fdq" Jan 20 17:23:26 crc kubenswrapper[4558]: I0120 17:23:26.945552 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-db-create-4t4nh" Jan 20 17:23:26 crc kubenswrapper[4558]: I0120 17:23:26.957999 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-43e3-account-create-update-l4gms" Jan 20 17:23:27 crc kubenswrapper[4558]: I0120 17:23:27.001541 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z2c5k\" (UniqueName: \"kubernetes.io/projected/0e4d0567-44de-45db-977c-50fc9725b092-kube-api-access-z2c5k\") pod \"barbican-e949-account-create-update-swzjb\" (UID: \"0e4d0567-44de-45db-977c-50fc9725b092\") " pod="openstack-kuttl-tests/barbican-e949-account-create-update-swzjb" Jan 20 17:23:27 crc kubenswrapper[4558]: I0120 17:23:27.001634 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0e4d0567-44de-45db-977c-50fc9725b092-operator-scripts\") pod \"barbican-e949-account-create-update-swzjb\" (UID: \"0e4d0567-44de-45db-977c-50fc9725b092\") " pod="openstack-kuttl-tests/barbican-e949-account-create-update-swzjb" Jan 20 17:23:27 crc kubenswrapper[4558]: I0120 17:23:27.003423 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0e4d0567-44de-45db-977c-50fc9725b092-operator-scripts\") pod \"barbican-e949-account-create-update-swzjb\" (UID: \"0e4d0567-44de-45db-977c-50fc9725b092\") " pod="openstack-kuttl-tests/barbican-e949-account-create-update-swzjb" Jan 20 17:23:27 crc kubenswrapper[4558]: I0120 17:23:27.017379 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-db-create-gh87k" Jan 20 17:23:27 crc kubenswrapper[4558]: I0120 17:23:27.029674 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z2c5k\" (UniqueName: \"kubernetes.io/projected/0e4d0567-44de-45db-977c-50fc9725b092-kube-api-access-z2c5k\") pod \"barbican-e949-account-create-update-swzjb\" (UID: \"0e4d0567-44de-45db-977c-50fc9725b092\") " pod="openstack-kuttl-tests/barbican-e949-account-create-update-swzjb" Jan 20 17:23:27 crc kubenswrapper[4558]: I0120 17:23:27.156483 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-db-sync-x2fdq" Jan 20 17:23:27 crc kubenswrapper[4558]: I0120 17:23:27.165948 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-e949-account-create-update-swzjb" Jan 20 17:23:27 crc kubenswrapper[4558]: I0120 17:23:27.255780 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-b136-account-create-update-x288d"] Jan 20 17:23:27 crc kubenswrapper[4558]: I0120 17:23:27.342696 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-b136-account-create-update-x288d" event={"ID":"72189413-de76-4a83-87ce-441c69a4e319","Type":"ContainerStarted","Data":"49dedd67d5a0753180798baccf0a981b69ce5ce23d9eea75e68d2f6f7af657c2"} Jan 20 17:23:27 crc kubenswrapper[4558]: I0120 17:23:27.364748 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-db-create-sz4kq"] Jan 20 17:23:27 crc kubenswrapper[4558]: W0120 17:23:27.367766 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda7cbf118_c70e_4058_b7b8_e14883e1fdc2.slice/crio-7cd2596d76e4acb0e380c595eb18ac63629386909de0d94e98a229f68ff37942 WatchSource:0}: Error finding container 7cd2596d76e4acb0e380c595eb18ac63629386909de0d94e98a229f68ff37942: Status 404 returned error can't find the container with id 7cd2596d76e4acb0e380c595eb18ac63629386909de0d94e98a229f68ff37942 Jan 20 17:23:27 crc kubenswrapper[4558]: I0120 17:23:27.495084 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-db-create-4t4nh"] Jan 20 17:23:27 crc kubenswrapper[4558]: I0120 17:23:27.570621 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-43e3-account-create-update-l4gms"] Jan 20 17:23:27 crc kubenswrapper[4558]: I0120 17:23:27.673079 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-db-sync-x2fdq"] Jan 20 17:23:27 crc kubenswrapper[4558]: W0120 17:23:27.677257 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod213f0f23_1d2e_456d_9a25_de15571358a2.slice/crio-8ce629da2085c2696a3c9c67b5c564935f4cc787ba2b58e73c92e1b1dcdb69b4 WatchSource:0}: Error finding container 8ce629da2085c2696a3c9c67b5c564935f4cc787ba2b58e73c92e1b1dcdb69b4: Status 404 returned error can't find the container with id 8ce629da2085c2696a3c9c67b5c564935f4cc787ba2b58e73c92e1b1dcdb69b4 Jan 20 17:23:27 crc kubenswrapper[4558]: I0120 17:23:27.710076 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-db-create-gh87k"] Jan 20 17:23:27 crc kubenswrapper[4558]: W0120 17:23:27.754635 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod33a5b64d_fca0_4a5f_92df_908c67df28cc.slice/crio-efec8231afbc4cdbc16b1d636e9df9be30fe57e705c1b1952d5ae5e6d0b15c89 WatchSource:0}: Error finding container efec8231afbc4cdbc16b1d636e9df9be30fe57e705c1b1952d5ae5e6d0b15c89: Status 404 returned error can't find the container with id efec8231afbc4cdbc16b1d636e9df9be30fe57e705c1b1952d5ae5e6d0b15c89 Jan 20 17:23:27 crc kubenswrapper[4558]: I0120 17:23:27.797911 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-e949-account-create-update-swzjb"] Jan 20 17:23:27 crc kubenswrapper[4558]: W0120 17:23:27.818946 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0e4d0567_44de_45db_977c_50fc9725b092.slice/crio-0dd57e846afb0606ae7ebe3f4f3842a87bd93699aec0be92cb7eafbe5b79737a WatchSource:0}: Error finding container 0dd57e846afb0606ae7ebe3f4f3842a87bd93699aec0be92cb7eafbe5b79737a: Status 404 returned error can't find the container with id 0dd57e846afb0606ae7ebe3f4f3842a87bd93699aec0be92cb7eafbe5b79737a Jan 20 17:23:28 crc kubenswrapper[4558]: I0120 17:23:28.375970 4558 generic.go:334] "Generic (PLEG): container finished" podID="3c810cd2-bc0a-407e-8626-e52eb0ba5296" containerID="ff11f5de1d67fe6865fff0f4a7d6166b294cda32601d7f50b6f2472fca230f0f" exitCode=0 Jan 20 17:23:28 crc kubenswrapper[4558]: I0120 17:23:28.376323 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-db-create-4t4nh" event={"ID":"3c810cd2-bc0a-407e-8626-e52eb0ba5296","Type":"ContainerDied","Data":"ff11f5de1d67fe6865fff0f4a7d6166b294cda32601d7f50b6f2472fca230f0f"} Jan 20 17:23:28 crc kubenswrapper[4558]: I0120 17:23:28.376359 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-db-create-4t4nh" event={"ID":"3c810cd2-bc0a-407e-8626-e52eb0ba5296","Type":"ContainerStarted","Data":"c9eefc73f2c6d64adf4afe5088fc0474951eadad16c46ca7c62d64f65ae75208"} Jan 20 17:23:28 crc kubenswrapper[4558]: I0120 17:23:28.384965 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-e949-account-create-update-swzjb" event={"ID":"0e4d0567-44de-45db-977c-50fc9725b092","Type":"ContainerDied","Data":"f11848ac2fc339da2a4334199ad841e032de939b5925b9d1a2ce800535efd8c2"} Jan 20 17:23:28 crc kubenswrapper[4558]: I0120 17:23:28.384938 4558 generic.go:334] "Generic (PLEG): container finished" podID="0e4d0567-44de-45db-977c-50fc9725b092" containerID="f11848ac2fc339da2a4334199ad841e032de939b5925b9d1a2ce800535efd8c2" exitCode=0 Jan 20 17:23:28 crc kubenswrapper[4558]: I0120 17:23:28.385277 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-e949-account-create-update-swzjb" event={"ID":"0e4d0567-44de-45db-977c-50fc9725b092","Type":"ContainerStarted","Data":"0dd57e846afb0606ae7ebe3f4f3842a87bd93699aec0be92cb7eafbe5b79737a"} Jan 20 17:23:28 crc kubenswrapper[4558]: I0120 17:23:28.398097 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-db-sync-x2fdq" event={"ID":"213f0f23-1d2e-456d-9a25-de15571358a2","Type":"ContainerStarted","Data":"17c1bc185af60a311f104555bef3f1cc5c63333bd26c434a782630b2668d6f84"} Jan 20 17:23:28 crc kubenswrapper[4558]: I0120 17:23:28.398194 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-db-sync-x2fdq" event={"ID":"213f0f23-1d2e-456d-9a25-de15571358a2","Type":"ContainerStarted","Data":"8ce629da2085c2696a3c9c67b5c564935f4cc787ba2b58e73c92e1b1dcdb69b4"} Jan 20 17:23:28 crc kubenswrapper[4558]: I0120 17:23:28.400952 4558 generic.go:334] "Generic (PLEG): container finished" podID="d39d453b-abd1-42ef-8b7c-0cf6d9404ad6" containerID="bf0dac54002843637dcb1d66cd90c6114eab7b2d37bfe7b7235714925a431b60" exitCode=0 Jan 20 17:23:28 crc kubenswrapper[4558]: I0120 17:23:28.401035 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-43e3-account-create-update-l4gms" event={"ID":"d39d453b-abd1-42ef-8b7c-0cf6d9404ad6","Type":"ContainerDied","Data":"bf0dac54002843637dcb1d66cd90c6114eab7b2d37bfe7b7235714925a431b60"} Jan 20 17:23:28 crc kubenswrapper[4558]: I0120 17:23:28.401067 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-43e3-account-create-update-l4gms" event={"ID":"d39d453b-abd1-42ef-8b7c-0cf6d9404ad6","Type":"ContainerStarted","Data":"168b789481dbd0897062755ce23b0c64e81b841b91fb40d9ee6c59e0e5fafecc"} Jan 20 17:23:28 crc kubenswrapper[4558]: I0120 17:23:28.402916 4558 generic.go:334] "Generic (PLEG): container finished" podID="a7cbf118-c70e-4058-b7b8-e14883e1fdc2" containerID="0af8204595f0ad0b8bd69669cc56303c864bdc64f7220c28eb66afc83033aa7c" exitCode=0 Jan 20 17:23:28 crc kubenswrapper[4558]: I0120 17:23:28.402992 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-db-create-sz4kq" event={"ID":"a7cbf118-c70e-4058-b7b8-e14883e1fdc2","Type":"ContainerDied","Data":"0af8204595f0ad0b8bd69669cc56303c864bdc64f7220c28eb66afc83033aa7c"} Jan 20 17:23:28 crc kubenswrapper[4558]: I0120 17:23:28.403024 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-db-create-sz4kq" event={"ID":"a7cbf118-c70e-4058-b7b8-e14883e1fdc2","Type":"ContainerStarted","Data":"7cd2596d76e4acb0e380c595eb18ac63629386909de0d94e98a229f68ff37942"} Jan 20 17:23:28 crc kubenswrapper[4558]: I0120 17:23:28.404910 4558 generic.go:334] "Generic (PLEG): container finished" podID="33a5b64d-fca0-4a5f-92df-908c67df28cc" containerID="2b330ecbee7bbb3b71ca63bcecf70606762e1f81d67e6e0a6cfab6537f33a7a4" exitCode=0 Jan 20 17:23:28 crc kubenswrapper[4558]: I0120 17:23:28.405010 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-db-create-gh87k" event={"ID":"33a5b64d-fca0-4a5f-92df-908c67df28cc","Type":"ContainerDied","Data":"2b330ecbee7bbb3b71ca63bcecf70606762e1f81d67e6e0a6cfab6537f33a7a4"} Jan 20 17:23:28 crc kubenswrapper[4558]: I0120 17:23:28.405051 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-db-create-gh87k" event={"ID":"33a5b64d-fca0-4a5f-92df-908c67df28cc","Type":"ContainerStarted","Data":"efec8231afbc4cdbc16b1d636e9df9be30fe57e705c1b1952d5ae5e6d0b15c89"} Jan 20 17:23:28 crc kubenswrapper[4558]: I0120 17:23:28.409307 4558 generic.go:334] "Generic (PLEG): container finished" podID="72189413-de76-4a83-87ce-441c69a4e319" containerID="a48e3d27977f717a8a67a15fc538b85cc155b19326c26e2228f720ee64ecbce3" exitCode=0 Jan 20 17:23:28 crc kubenswrapper[4558]: I0120 17:23:28.409371 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-b136-account-create-update-x288d" event={"ID":"72189413-de76-4a83-87ce-441c69a4e319","Type":"ContainerDied","Data":"a48e3d27977f717a8a67a15fc538b85cc155b19326c26e2228f720ee64ecbce3"} Jan 20 17:23:28 crc kubenswrapper[4558]: I0120 17:23:28.446218 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/keystone-db-sync-x2fdq" podStartSLOduration=2.446195956 podStartE2EDuration="2.446195956s" podCreationTimestamp="2026-01-20 17:23:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:23:28.437291717 +0000 UTC m=+2502.197629683" watchObservedRunningTime="2026-01-20 17:23:28.446195956 +0000 UTC m=+2502.206533923" Jan 20 17:23:29 crc kubenswrapper[4558]: I0120 17:23:29.738809 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-db-create-4t4nh" Jan 20 17:23:29 crc kubenswrapper[4558]: I0120 17:23:29.860958 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3c810cd2-bc0a-407e-8626-e52eb0ba5296-operator-scripts\") pod \"3c810cd2-bc0a-407e-8626-e52eb0ba5296\" (UID: \"3c810cd2-bc0a-407e-8626-e52eb0ba5296\") " Jan 20 17:23:29 crc kubenswrapper[4558]: I0120 17:23:29.861367 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v29cw\" (UniqueName: \"kubernetes.io/projected/3c810cd2-bc0a-407e-8626-e52eb0ba5296-kube-api-access-v29cw\") pod \"3c810cd2-bc0a-407e-8626-e52eb0ba5296\" (UID: \"3c810cd2-bc0a-407e-8626-e52eb0ba5296\") " Jan 20 17:23:29 crc kubenswrapper[4558]: I0120 17:23:29.861682 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3c810cd2-bc0a-407e-8626-e52eb0ba5296-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "3c810cd2-bc0a-407e-8626-e52eb0ba5296" (UID: "3c810cd2-bc0a-407e-8626-e52eb0ba5296"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:23:29 crc kubenswrapper[4558]: I0120 17:23:29.862270 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3c810cd2-bc0a-407e-8626-e52eb0ba5296-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:23:29 crc kubenswrapper[4558]: I0120 17:23:29.867033 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3c810cd2-bc0a-407e-8626-e52eb0ba5296-kube-api-access-v29cw" (OuterVolumeSpecName: "kube-api-access-v29cw") pod "3c810cd2-bc0a-407e-8626-e52eb0ba5296" (UID: "3c810cd2-bc0a-407e-8626-e52eb0ba5296"). InnerVolumeSpecName "kube-api-access-v29cw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:23:29 crc kubenswrapper[4558]: I0120 17:23:29.933396 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-e949-account-create-update-swzjb" Jan 20 17:23:29 crc kubenswrapper[4558]: I0120 17:23:29.946208 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-b136-account-create-update-x288d" Jan 20 17:23:29 crc kubenswrapper[4558]: I0120 17:23:29.952908 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-db-create-gh87k" Jan 20 17:23:29 crc kubenswrapper[4558]: I0120 17:23:29.957074 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-43e3-account-create-update-l4gms" Jan 20 17:23:29 crc kubenswrapper[4558]: I0120 17:23:29.963733 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v29cw\" (UniqueName: \"kubernetes.io/projected/3c810cd2-bc0a-407e-8626-e52eb0ba5296-kube-api-access-v29cw\") on node \"crc\" DevicePath \"\"" Jan 20 17:23:29 crc kubenswrapper[4558]: I0120 17:23:29.965442 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-db-create-sz4kq" Jan 20 17:23:30 crc kubenswrapper[4558]: I0120 17:23:30.064830 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s94tv\" (UniqueName: \"kubernetes.io/projected/a7cbf118-c70e-4058-b7b8-e14883e1fdc2-kube-api-access-s94tv\") pod \"a7cbf118-c70e-4058-b7b8-e14883e1fdc2\" (UID: \"a7cbf118-c70e-4058-b7b8-e14883e1fdc2\") " Jan 20 17:23:30 crc kubenswrapper[4558]: I0120 17:23:30.064881 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pkfvq\" (UniqueName: \"kubernetes.io/projected/d39d453b-abd1-42ef-8b7c-0cf6d9404ad6-kube-api-access-pkfvq\") pod \"d39d453b-abd1-42ef-8b7c-0cf6d9404ad6\" (UID: \"d39d453b-abd1-42ef-8b7c-0cf6d9404ad6\") " Jan 20 17:23:30 crc kubenswrapper[4558]: I0120 17:23:30.064935 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vmkqm\" (UniqueName: \"kubernetes.io/projected/33a5b64d-fca0-4a5f-92df-908c67df28cc-kube-api-access-vmkqm\") pod \"33a5b64d-fca0-4a5f-92df-908c67df28cc\" (UID: \"33a5b64d-fca0-4a5f-92df-908c67df28cc\") " Jan 20 17:23:30 crc kubenswrapper[4558]: I0120 17:23:30.064992 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d39d453b-abd1-42ef-8b7c-0cf6d9404ad6-operator-scripts\") pod \"d39d453b-abd1-42ef-8b7c-0cf6d9404ad6\" (UID: \"d39d453b-abd1-42ef-8b7c-0cf6d9404ad6\") " Jan 20 17:23:30 crc kubenswrapper[4558]: I0120 17:23:30.065060 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/72189413-de76-4a83-87ce-441c69a4e319-operator-scripts\") pod \"72189413-de76-4a83-87ce-441c69a4e319\" (UID: \"72189413-de76-4a83-87ce-441c69a4e319\") " Jan 20 17:23:30 crc kubenswrapper[4558]: I0120 17:23:30.065082 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/33a5b64d-fca0-4a5f-92df-908c67df28cc-operator-scripts\") pod \"33a5b64d-fca0-4a5f-92df-908c67df28cc\" (UID: \"33a5b64d-fca0-4a5f-92df-908c67df28cc\") " Jan 20 17:23:30 crc kubenswrapper[4558]: I0120 17:23:30.065133 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0e4d0567-44de-45db-977c-50fc9725b092-operator-scripts\") pod \"0e4d0567-44de-45db-977c-50fc9725b092\" (UID: \"0e4d0567-44de-45db-977c-50fc9725b092\") " Jan 20 17:23:30 crc kubenswrapper[4558]: I0120 17:23:30.065313 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a7cbf118-c70e-4058-b7b8-e14883e1fdc2-operator-scripts\") pod \"a7cbf118-c70e-4058-b7b8-e14883e1fdc2\" (UID: \"a7cbf118-c70e-4058-b7b8-e14883e1fdc2\") " Jan 20 17:23:30 crc kubenswrapper[4558]: I0120 17:23:30.065342 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5vtqh\" (UniqueName: \"kubernetes.io/projected/72189413-de76-4a83-87ce-441c69a4e319-kube-api-access-5vtqh\") pod \"72189413-de76-4a83-87ce-441c69a4e319\" (UID: \"72189413-de76-4a83-87ce-441c69a4e319\") " Jan 20 17:23:30 crc kubenswrapper[4558]: I0120 17:23:30.065409 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z2c5k\" (UniqueName: \"kubernetes.io/projected/0e4d0567-44de-45db-977c-50fc9725b092-kube-api-access-z2c5k\") pod \"0e4d0567-44de-45db-977c-50fc9725b092\" (UID: \"0e4d0567-44de-45db-977c-50fc9725b092\") " Jan 20 17:23:30 crc kubenswrapper[4558]: I0120 17:23:30.065992 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a7cbf118-c70e-4058-b7b8-e14883e1fdc2-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "a7cbf118-c70e-4058-b7b8-e14883e1fdc2" (UID: "a7cbf118-c70e-4058-b7b8-e14883e1fdc2"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:23:30 crc kubenswrapper[4558]: I0120 17:23:30.066036 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0e4d0567-44de-45db-977c-50fc9725b092-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "0e4d0567-44de-45db-977c-50fc9725b092" (UID: "0e4d0567-44de-45db-977c-50fc9725b092"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:23:30 crc kubenswrapper[4558]: I0120 17:23:30.066037 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d39d453b-abd1-42ef-8b7c-0cf6d9404ad6-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "d39d453b-abd1-42ef-8b7c-0cf6d9404ad6" (UID: "d39d453b-abd1-42ef-8b7c-0cf6d9404ad6"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:23:30 crc kubenswrapper[4558]: I0120 17:23:30.066193 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/72189413-de76-4a83-87ce-441c69a4e319-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "72189413-de76-4a83-87ce-441c69a4e319" (UID: "72189413-de76-4a83-87ce-441c69a4e319"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:23:30 crc kubenswrapper[4558]: I0120 17:23:30.066622 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/33a5b64d-fca0-4a5f-92df-908c67df28cc-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "33a5b64d-fca0-4a5f-92df-908c67df28cc" (UID: "33a5b64d-fca0-4a5f-92df-908c67df28cc"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:23:30 crc kubenswrapper[4558]: I0120 17:23:30.070300 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0e4d0567-44de-45db-977c-50fc9725b092-kube-api-access-z2c5k" (OuterVolumeSpecName: "kube-api-access-z2c5k") pod "0e4d0567-44de-45db-977c-50fc9725b092" (UID: "0e4d0567-44de-45db-977c-50fc9725b092"). InnerVolumeSpecName "kube-api-access-z2c5k". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:23:30 crc kubenswrapper[4558]: I0120 17:23:30.070468 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a7cbf118-c70e-4058-b7b8-e14883e1fdc2-kube-api-access-s94tv" (OuterVolumeSpecName: "kube-api-access-s94tv") pod "a7cbf118-c70e-4058-b7b8-e14883e1fdc2" (UID: "a7cbf118-c70e-4058-b7b8-e14883e1fdc2"). InnerVolumeSpecName "kube-api-access-s94tv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:23:30 crc kubenswrapper[4558]: I0120 17:23:30.070947 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d39d453b-abd1-42ef-8b7c-0cf6d9404ad6-kube-api-access-pkfvq" (OuterVolumeSpecName: "kube-api-access-pkfvq") pod "d39d453b-abd1-42ef-8b7c-0cf6d9404ad6" (UID: "d39d453b-abd1-42ef-8b7c-0cf6d9404ad6"). InnerVolumeSpecName "kube-api-access-pkfvq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:23:30 crc kubenswrapper[4558]: I0120 17:23:30.071043 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/72189413-de76-4a83-87ce-441c69a4e319-kube-api-access-5vtqh" (OuterVolumeSpecName: "kube-api-access-5vtqh") pod "72189413-de76-4a83-87ce-441c69a4e319" (UID: "72189413-de76-4a83-87ce-441c69a4e319"). InnerVolumeSpecName "kube-api-access-5vtqh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:23:30 crc kubenswrapper[4558]: I0120 17:23:30.071622 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/33a5b64d-fca0-4a5f-92df-908c67df28cc-kube-api-access-vmkqm" (OuterVolumeSpecName: "kube-api-access-vmkqm") pod "33a5b64d-fca0-4a5f-92df-908c67df28cc" (UID: "33a5b64d-fca0-4a5f-92df-908c67df28cc"). InnerVolumeSpecName "kube-api-access-vmkqm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:23:30 crc kubenswrapper[4558]: I0120 17:23:30.167933 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d39d453b-abd1-42ef-8b7c-0cf6d9404ad6-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:23:30 crc kubenswrapper[4558]: I0120 17:23:30.167961 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/33a5b64d-fca0-4a5f-92df-908c67df28cc-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:23:30 crc kubenswrapper[4558]: I0120 17:23:30.167971 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/72189413-de76-4a83-87ce-441c69a4e319-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:23:30 crc kubenswrapper[4558]: I0120 17:23:30.167983 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0e4d0567-44de-45db-977c-50fc9725b092-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:23:30 crc kubenswrapper[4558]: I0120 17:23:30.167995 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5vtqh\" (UniqueName: \"kubernetes.io/projected/72189413-de76-4a83-87ce-441c69a4e319-kube-api-access-5vtqh\") on node \"crc\" DevicePath \"\"" Jan 20 17:23:30 crc kubenswrapper[4558]: I0120 17:23:30.168005 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a7cbf118-c70e-4058-b7b8-e14883e1fdc2-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:23:30 crc kubenswrapper[4558]: I0120 17:23:30.168014 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z2c5k\" (UniqueName: \"kubernetes.io/projected/0e4d0567-44de-45db-977c-50fc9725b092-kube-api-access-z2c5k\") on node \"crc\" DevicePath \"\"" Jan 20 17:23:30 crc kubenswrapper[4558]: I0120 17:23:30.168024 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s94tv\" (UniqueName: \"kubernetes.io/projected/a7cbf118-c70e-4058-b7b8-e14883e1fdc2-kube-api-access-s94tv\") on node \"crc\" DevicePath \"\"" Jan 20 17:23:30 crc kubenswrapper[4558]: I0120 17:23:30.168033 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pkfvq\" (UniqueName: \"kubernetes.io/projected/d39d453b-abd1-42ef-8b7c-0cf6d9404ad6-kube-api-access-pkfvq\") on node \"crc\" DevicePath \"\"" Jan 20 17:23:30 crc kubenswrapper[4558]: I0120 17:23:30.168041 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vmkqm\" (UniqueName: \"kubernetes.io/projected/33a5b64d-fca0-4a5f-92df-908c67df28cc-kube-api-access-vmkqm\") on node \"crc\" DevicePath \"\"" Jan 20 17:23:30 crc kubenswrapper[4558]: I0120 17:23:30.429498 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-db-create-gh87k" Jan 20 17:23:30 crc kubenswrapper[4558]: I0120 17:23:30.429489 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-db-create-gh87k" event={"ID":"33a5b64d-fca0-4a5f-92df-908c67df28cc","Type":"ContainerDied","Data":"efec8231afbc4cdbc16b1d636e9df9be30fe57e705c1b1952d5ae5e6d0b15c89"} Jan 20 17:23:30 crc kubenswrapper[4558]: I0120 17:23:30.429637 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="efec8231afbc4cdbc16b1d636e9df9be30fe57e705c1b1952d5ae5e6d0b15c89" Jan 20 17:23:30 crc kubenswrapper[4558]: I0120 17:23:30.431445 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-b136-account-create-update-x288d" event={"ID":"72189413-de76-4a83-87ce-441c69a4e319","Type":"ContainerDied","Data":"49dedd67d5a0753180798baccf0a981b69ce5ce23d9eea75e68d2f6f7af657c2"} Jan 20 17:23:30 crc kubenswrapper[4558]: I0120 17:23:30.431473 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="49dedd67d5a0753180798baccf0a981b69ce5ce23d9eea75e68d2f6f7af657c2" Jan 20 17:23:30 crc kubenswrapper[4558]: I0120 17:23:30.431531 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-b136-account-create-update-x288d" Jan 20 17:23:30 crc kubenswrapper[4558]: I0120 17:23:30.434127 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-db-create-4t4nh" event={"ID":"3c810cd2-bc0a-407e-8626-e52eb0ba5296","Type":"ContainerDied","Data":"c9eefc73f2c6d64adf4afe5088fc0474951eadad16c46ca7c62d64f65ae75208"} Jan 20 17:23:30 crc kubenswrapper[4558]: I0120 17:23:30.434216 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c9eefc73f2c6d64adf4afe5088fc0474951eadad16c46ca7c62d64f65ae75208" Jan 20 17:23:30 crc kubenswrapper[4558]: I0120 17:23:30.434144 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-db-create-4t4nh" Jan 20 17:23:30 crc kubenswrapper[4558]: I0120 17:23:30.436143 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-e949-account-create-update-swzjb" event={"ID":"0e4d0567-44de-45db-977c-50fc9725b092","Type":"ContainerDied","Data":"0dd57e846afb0606ae7ebe3f4f3842a87bd93699aec0be92cb7eafbe5b79737a"} Jan 20 17:23:30 crc kubenswrapper[4558]: I0120 17:23:30.436205 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0dd57e846afb0606ae7ebe3f4f3842a87bd93699aec0be92cb7eafbe5b79737a" Jan 20 17:23:30 crc kubenswrapper[4558]: I0120 17:23:30.436209 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-e949-account-create-update-swzjb" Jan 20 17:23:30 crc kubenswrapper[4558]: I0120 17:23:30.437924 4558 generic.go:334] "Generic (PLEG): container finished" podID="213f0f23-1d2e-456d-9a25-de15571358a2" containerID="17c1bc185af60a311f104555bef3f1cc5c63333bd26c434a782630b2668d6f84" exitCode=0 Jan 20 17:23:30 crc kubenswrapper[4558]: I0120 17:23:30.437994 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-db-sync-x2fdq" event={"ID":"213f0f23-1d2e-456d-9a25-de15571358a2","Type":"ContainerDied","Data":"17c1bc185af60a311f104555bef3f1cc5c63333bd26c434a782630b2668d6f84"} Jan 20 17:23:30 crc kubenswrapper[4558]: I0120 17:23:30.446330 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-43e3-account-create-update-l4gms" event={"ID":"d39d453b-abd1-42ef-8b7c-0cf6d9404ad6","Type":"ContainerDied","Data":"168b789481dbd0897062755ce23b0c64e81b841b91fb40d9ee6c59e0e5fafecc"} Jan 20 17:23:30 crc kubenswrapper[4558]: I0120 17:23:30.446348 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-43e3-account-create-update-l4gms" Jan 20 17:23:30 crc kubenswrapper[4558]: I0120 17:23:30.446365 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="168b789481dbd0897062755ce23b0c64e81b841b91fb40d9ee6c59e0e5fafecc" Jan 20 17:23:30 crc kubenswrapper[4558]: I0120 17:23:30.448079 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-db-create-sz4kq" event={"ID":"a7cbf118-c70e-4058-b7b8-e14883e1fdc2","Type":"ContainerDied","Data":"7cd2596d76e4acb0e380c595eb18ac63629386909de0d94e98a229f68ff37942"} Jan 20 17:23:30 crc kubenswrapper[4558]: I0120 17:23:30.448106 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-db-create-sz4kq" Jan 20 17:23:30 crc kubenswrapper[4558]: I0120 17:23:30.448107 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7cd2596d76e4acb0e380c595eb18ac63629386909de0d94e98a229f68ff37942" Jan 20 17:23:31 crc kubenswrapper[4558]: I0120 17:23:31.741239 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-db-sync-x2fdq" Jan 20 17:23:31 crc kubenswrapper[4558]: I0120 17:23:31.897760 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-25xgp\" (UniqueName: \"kubernetes.io/projected/213f0f23-1d2e-456d-9a25-de15571358a2-kube-api-access-25xgp\") pod \"213f0f23-1d2e-456d-9a25-de15571358a2\" (UID: \"213f0f23-1d2e-456d-9a25-de15571358a2\") " Jan 20 17:23:31 crc kubenswrapper[4558]: I0120 17:23:31.897918 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/213f0f23-1d2e-456d-9a25-de15571358a2-config-data\") pod \"213f0f23-1d2e-456d-9a25-de15571358a2\" (UID: \"213f0f23-1d2e-456d-9a25-de15571358a2\") " Jan 20 17:23:31 crc kubenswrapper[4558]: I0120 17:23:31.898049 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/213f0f23-1d2e-456d-9a25-de15571358a2-combined-ca-bundle\") pod \"213f0f23-1d2e-456d-9a25-de15571358a2\" (UID: \"213f0f23-1d2e-456d-9a25-de15571358a2\") " Jan 20 17:23:31 crc kubenswrapper[4558]: I0120 17:23:31.904066 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/213f0f23-1d2e-456d-9a25-de15571358a2-kube-api-access-25xgp" (OuterVolumeSpecName: "kube-api-access-25xgp") pod "213f0f23-1d2e-456d-9a25-de15571358a2" (UID: "213f0f23-1d2e-456d-9a25-de15571358a2"). InnerVolumeSpecName "kube-api-access-25xgp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:23:31 crc kubenswrapper[4558]: I0120 17:23:31.919374 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/213f0f23-1d2e-456d-9a25-de15571358a2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "213f0f23-1d2e-456d-9a25-de15571358a2" (UID: "213f0f23-1d2e-456d-9a25-de15571358a2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:23:31 crc kubenswrapper[4558]: I0120 17:23:31.938568 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/213f0f23-1d2e-456d-9a25-de15571358a2-config-data" (OuterVolumeSpecName: "config-data") pod "213f0f23-1d2e-456d-9a25-de15571358a2" (UID: "213f0f23-1d2e-456d-9a25-de15571358a2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:23:32 crc kubenswrapper[4558]: I0120 17:23:32.000958 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-25xgp\" (UniqueName: \"kubernetes.io/projected/213f0f23-1d2e-456d-9a25-de15571358a2-kube-api-access-25xgp\") on node \"crc\" DevicePath \"\"" Jan 20 17:23:32 crc kubenswrapper[4558]: I0120 17:23:32.000993 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/213f0f23-1d2e-456d-9a25-de15571358a2-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:23:32 crc kubenswrapper[4558]: I0120 17:23:32.001005 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/213f0f23-1d2e-456d-9a25-de15571358a2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:23:32 crc kubenswrapper[4558]: I0120 17:23:32.468831 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-db-sync-x2fdq" event={"ID":"213f0f23-1d2e-456d-9a25-de15571358a2","Type":"ContainerDied","Data":"8ce629da2085c2696a3c9c67b5c564935f4cc787ba2b58e73c92e1b1dcdb69b4"} Jan 20 17:23:32 crc kubenswrapper[4558]: I0120 17:23:32.468885 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8ce629da2085c2696a3c9c67b5c564935f4cc787ba2b58e73c92e1b1dcdb69b4" Jan 20 17:23:32 crc kubenswrapper[4558]: I0120 17:23:32.468884 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-db-sync-x2fdq" Jan 20 17:23:32 crc kubenswrapper[4558]: I0120 17:23:32.590204 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/keystone-bootstrap-rcz7c"] Jan 20 17:23:32 crc kubenswrapper[4558]: E0120 17:23:32.590516 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a7cbf118-c70e-4058-b7b8-e14883e1fdc2" containerName="mariadb-database-create" Jan 20 17:23:32 crc kubenswrapper[4558]: I0120 17:23:32.590528 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="a7cbf118-c70e-4058-b7b8-e14883e1fdc2" containerName="mariadb-database-create" Jan 20 17:23:32 crc kubenswrapper[4558]: E0120 17:23:32.590548 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="33a5b64d-fca0-4a5f-92df-908c67df28cc" containerName="mariadb-database-create" Jan 20 17:23:32 crc kubenswrapper[4558]: I0120 17:23:32.590554 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="33a5b64d-fca0-4a5f-92df-908c67df28cc" containerName="mariadb-database-create" Jan 20 17:23:32 crc kubenswrapper[4558]: E0120 17:23:32.590566 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0e4d0567-44de-45db-977c-50fc9725b092" containerName="mariadb-account-create-update" Jan 20 17:23:32 crc kubenswrapper[4558]: I0120 17:23:32.590572 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="0e4d0567-44de-45db-977c-50fc9725b092" containerName="mariadb-account-create-update" Jan 20 17:23:32 crc kubenswrapper[4558]: E0120 17:23:32.590579 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="213f0f23-1d2e-456d-9a25-de15571358a2" containerName="keystone-db-sync" Jan 20 17:23:32 crc kubenswrapper[4558]: I0120 17:23:32.590585 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="213f0f23-1d2e-456d-9a25-de15571358a2" containerName="keystone-db-sync" Jan 20 17:23:32 crc kubenswrapper[4558]: E0120 17:23:32.590594 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="72189413-de76-4a83-87ce-441c69a4e319" containerName="mariadb-account-create-update" Jan 20 17:23:32 crc kubenswrapper[4558]: I0120 17:23:32.590599 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="72189413-de76-4a83-87ce-441c69a4e319" containerName="mariadb-account-create-update" Jan 20 17:23:32 crc kubenswrapper[4558]: E0120 17:23:32.590609 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d39d453b-abd1-42ef-8b7c-0cf6d9404ad6" containerName="mariadb-account-create-update" Jan 20 17:23:32 crc kubenswrapper[4558]: I0120 17:23:32.590614 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d39d453b-abd1-42ef-8b7c-0cf6d9404ad6" containerName="mariadb-account-create-update" Jan 20 17:23:32 crc kubenswrapper[4558]: E0120 17:23:32.590623 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3c810cd2-bc0a-407e-8626-e52eb0ba5296" containerName="mariadb-database-create" Jan 20 17:23:32 crc kubenswrapper[4558]: I0120 17:23:32.590628 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="3c810cd2-bc0a-407e-8626-e52eb0ba5296" containerName="mariadb-database-create" Jan 20 17:23:32 crc kubenswrapper[4558]: I0120 17:23:32.594502 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="3c810cd2-bc0a-407e-8626-e52eb0ba5296" containerName="mariadb-database-create" Jan 20 17:23:32 crc kubenswrapper[4558]: I0120 17:23:32.594523 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="a7cbf118-c70e-4058-b7b8-e14883e1fdc2" containerName="mariadb-database-create" Jan 20 17:23:32 crc kubenswrapper[4558]: I0120 17:23:32.595215 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="72189413-de76-4a83-87ce-441c69a4e319" containerName="mariadb-account-create-update" Jan 20 17:23:32 crc kubenswrapper[4558]: I0120 17:23:32.595231 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="33a5b64d-fca0-4a5f-92df-908c67df28cc" containerName="mariadb-database-create" Jan 20 17:23:32 crc kubenswrapper[4558]: I0120 17:23:32.595240 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="213f0f23-1d2e-456d-9a25-de15571358a2" containerName="keystone-db-sync" Jan 20 17:23:32 crc kubenswrapper[4558]: I0120 17:23:32.595269 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="0e4d0567-44de-45db-977c-50fc9725b092" containerName="mariadb-account-create-update" Jan 20 17:23:32 crc kubenswrapper[4558]: I0120 17:23:32.595278 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="d39d453b-abd1-42ef-8b7c-0cf6d9404ad6" containerName="mariadb-account-create-update" Jan 20 17:23:32 crc kubenswrapper[4558]: I0120 17:23:32.595793 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-bootstrap-rcz7c" Jan 20 17:23:32 crc kubenswrapper[4558]: I0120 17:23:32.597478 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-bootstrap-rcz7c"] Jan 20 17:23:32 crc kubenswrapper[4558]: I0120 17:23:32.598581 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-scripts" Jan 20 17:23:32 crc kubenswrapper[4558]: I0120 17:23:32.598910 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-keystone-dockercfg-njbkz" Jan 20 17:23:32 crc kubenswrapper[4558]: I0120 17:23:32.599074 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"osp-secret" Jan 20 17:23:32 crc kubenswrapper[4558]: I0120 17:23:32.599297 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone" Jan 20 17:23:32 crc kubenswrapper[4558]: I0120 17:23:32.599407 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-config-data" Jan 20 17:23:32 crc kubenswrapper[4558]: I0120 17:23:32.704896 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:23:32 crc kubenswrapper[4558]: I0120 17:23:32.707646 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:23:32 crc kubenswrapper[4558]: I0120 17:23:32.709650 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-scripts" Jan 20 17:23:32 crc kubenswrapper[4558]: I0120 17:23:32.711427 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb55b781-1113-4879-8efd-d04a2284dffc-combined-ca-bundle\") pod \"keystone-bootstrap-rcz7c\" (UID: \"fb55b781-1113-4879-8efd-d04a2284dffc\") " pod="openstack-kuttl-tests/keystone-bootstrap-rcz7c" Jan 20 17:23:32 crc kubenswrapper[4558]: I0120 17:23:32.711461 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/fb55b781-1113-4879-8efd-d04a2284dffc-fernet-keys\") pod \"keystone-bootstrap-rcz7c\" (UID: \"fb55b781-1113-4879-8efd-d04a2284dffc\") " pod="openstack-kuttl-tests/keystone-bootstrap-rcz7c" Jan 20 17:23:32 crc kubenswrapper[4558]: I0120 17:23:32.711542 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fb55b781-1113-4879-8efd-d04a2284dffc-scripts\") pod \"keystone-bootstrap-rcz7c\" (UID: \"fb55b781-1113-4879-8efd-d04a2284dffc\") " pod="openstack-kuttl-tests/keystone-bootstrap-rcz7c" Jan 20 17:23:32 crc kubenswrapper[4558]: I0120 17:23:32.711573 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/fb55b781-1113-4879-8efd-d04a2284dffc-credential-keys\") pod \"keystone-bootstrap-rcz7c\" (UID: \"fb55b781-1113-4879-8efd-d04a2284dffc\") " pod="openstack-kuttl-tests/keystone-bootstrap-rcz7c" Jan 20 17:23:32 crc kubenswrapper[4558]: I0120 17:23:32.711670 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zw27l\" (UniqueName: \"kubernetes.io/projected/fb55b781-1113-4879-8efd-d04a2284dffc-kube-api-access-zw27l\") pod \"keystone-bootstrap-rcz7c\" (UID: \"fb55b781-1113-4879-8efd-d04a2284dffc\") " pod="openstack-kuttl-tests/keystone-bootstrap-rcz7c" Jan 20 17:23:32 crc kubenswrapper[4558]: I0120 17:23:32.711691 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fb55b781-1113-4879-8efd-d04a2284dffc-config-data\") pod \"keystone-bootstrap-rcz7c\" (UID: \"fb55b781-1113-4879-8efd-d04a2284dffc\") " pod="openstack-kuttl-tests/keystone-bootstrap-rcz7c" Jan 20 17:23:32 crc kubenswrapper[4558]: I0120 17:23:32.714415 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-config-data" Jan 20 17:23:32 crc kubenswrapper[4558]: I0120 17:23:32.723575 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:23:32 crc kubenswrapper[4558]: I0120 17:23:32.771344 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/neutron-db-sync-plxqg"] Jan 20 17:23:32 crc kubenswrapper[4558]: I0120 17:23:32.772521 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-db-sync-plxqg" Jan 20 17:23:32 crc kubenswrapper[4558]: I0120 17:23:32.777618 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"neutron-httpd-config" Jan 20 17:23:32 crc kubenswrapper[4558]: I0120 17:23:32.777837 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"neutron-neutron-dockercfg-4hpsm" Jan 20 17:23:32 crc kubenswrapper[4558]: I0120 17:23:32.777959 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"neutron-config" Jan 20 17:23:32 crc kubenswrapper[4558]: I0120 17:23:32.780980 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-db-sync-plxqg"] Jan 20 17:23:32 crc kubenswrapper[4558]: I0120 17:23:32.799979 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/cinder-db-sync-dpr25"] Jan 20 17:23:32 crc kubenswrapper[4558]: I0120 17:23:32.801301 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-db-sync-dpr25" Jan 20 17:23:32 crc kubenswrapper[4558]: I0120 17:23:32.805732 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cinder-cinder-dockercfg-msjfn" Jan 20 17:23:32 crc kubenswrapper[4558]: I0120 17:23:32.805841 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cinder-config-data" Jan 20 17:23:32 crc kubenswrapper[4558]: I0120 17:23:32.806039 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cinder-scripts" Jan 20 17:23:32 crc kubenswrapper[4558]: I0120 17:23:32.814943 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb55b781-1113-4879-8efd-d04a2284dffc-combined-ca-bundle\") pod \"keystone-bootstrap-rcz7c\" (UID: \"fb55b781-1113-4879-8efd-d04a2284dffc\") " pod="openstack-kuttl-tests/keystone-bootstrap-rcz7c" Jan 20 17:23:32 crc kubenswrapper[4558]: I0120 17:23:32.815048 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65bcc9ab-370b-457f-a256-de44fa447395-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"65bcc9ab-370b-457f-a256-de44fa447395\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:23:32 crc kubenswrapper[4558]: I0120 17:23:32.815147 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/fb55b781-1113-4879-8efd-d04a2284dffc-fernet-keys\") pod \"keystone-bootstrap-rcz7c\" (UID: \"fb55b781-1113-4879-8efd-d04a2284dffc\") " pod="openstack-kuttl-tests/keystone-bootstrap-rcz7c" Jan 20 17:23:32 crc kubenswrapper[4558]: I0120 17:23:32.815269 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/65bcc9ab-370b-457f-a256-de44fa447395-scripts\") pod \"ceilometer-0\" (UID: \"65bcc9ab-370b-457f-a256-de44fa447395\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:23:32 crc kubenswrapper[4558]: I0120 17:23:32.815354 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/65bcc9ab-370b-457f-a256-de44fa447395-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"65bcc9ab-370b-457f-a256-de44fa447395\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:23:32 crc kubenswrapper[4558]: I0120 17:23:32.815418 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fb55b781-1113-4879-8efd-d04a2284dffc-scripts\") pod \"keystone-bootstrap-rcz7c\" (UID: \"fb55b781-1113-4879-8efd-d04a2284dffc\") " pod="openstack-kuttl-tests/keystone-bootstrap-rcz7c" Jan 20 17:23:32 crc kubenswrapper[4558]: I0120 17:23:32.815477 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/65bcc9ab-370b-457f-a256-de44fa447395-log-httpd\") pod \"ceilometer-0\" (UID: \"65bcc9ab-370b-457f-a256-de44fa447395\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:23:32 crc kubenswrapper[4558]: I0120 17:23:32.815559 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/fb55b781-1113-4879-8efd-d04a2284dffc-credential-keys\") pod \"keystone-bootstrap-rcz7c\" (UID: \"fb55b781-1113-4879-8efd-d04a2284dffc\") " pod="openstack-kuttl-tests/keystone-bootstrap-rcz7c" Jan 20 17:23:32 crc kubenswrapper[4558]: I0120 17:23:32.815637 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/65bcc9ab-370b-457f-a256-de44fa447395-config-data\") pod \"ceilometer-0\" (UID: \"65bcc9ab-370b-457f-a256-de44fa447395\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:23:32 crc kubenswrapper[4558]: I0120 17:23:32.815836 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/65bcc9ab-370b-457f-a256-de44fa447395-run-httpd\") pod \"ceilometer-0\" (UID: \"65bcc9ab-370b-457f-a256-de44fa447395\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:23:32 crc kubenswrapper[4558]: I0120 17:23:32.815925 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zw27l\" (UniqueName: \"kubernetes.io/projected/fb55b781-1113-4879-8efd-d04a2284dffc-kube-api-access-zw27l\") pod \"keystone-bootstrap-rcz7c\" (UID: \"fb55b781-1113-4879-8efd-d04a2284dffc\") " pod="openstack-kuttl-tests/keystone-bootstrap-rcz7c" Jan 20 17:23:32 crc kubenswrapper[4558]: I0120 17:23:32.815989 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fb55b781-1113-4879-8efd-d04a2284dffc-config-data\") pod \"keystone-bootstrap-rcz7c\" (UID: \"fb55b781-1113-4879-8efd-d04a2284dffc\") " pod="openstack-kuttl-tests/keystone-bootstrap-rcz7c" Jan 20 17:23:32 crc kubenswrapper[4558]: I0120 17:23:32.816085 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-27bj7\" (UniqueName: \"kubernetes.io/projected/65bcc9ab-370b-457f-a256-de44fa447395-kube-api-access-27bj7\") pod \"ceilometer-0\" (UID: \"65bcc9ab-370b-457f-a256-de44fa447395\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:23:32 crc kubenswrapper[4558]: I0120 17:23:32.825427 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb55b781-1113-4879-8efd-d04a2284dffc-combined-ca-bundle\") pod \"keystone-bootstrap-rcz7c\" (UID: \"fb55b781-1113-4879-8efd-d04a2284dffc\") " pod="openstack-kuttl-tests/keystone-bootstrap-rcz7c" Jan 20 17:23:32 crc kubenswrapper[4558]: I0120 17:23:32.833158 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fb55b781-1113-4879-8efd-d04a2284dffc-config-data\") pod \"keystone-bootstrap-rcz7c\" (UID: \"fb55b781-1113-4879-8efd-d04a2284dffc\") " pod="openstack-kuttl-tests/keystone-bootstrap-rcz7c" Jan 20 17:23:32 crc kubenswrapper[4558]: I0120 17:23:32.833555 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fb55b781-1113-4879-8efd-d04a2284dffc-scripts\") pod \"keystone-bootstrap-rcz7c\" (UID: \"fb55b781-1113-4879-8efd-d04a2284dffc\") " pod="openstack-kuttl-tests/keystone-bootstrap-rcz7c" Jan 20 17:23:32 crc kubenswrapper[4558]: I0120 17:23:32.833698 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/fb55b781-1113-4879-8efd-d04a2284dffc-fernet-keys\") pod \"keystone-bootstrap-rcz7c\" (UID: \"fb55b781-1113-4879-8efd-d04a2284dffc\") " pod="openstack-kuttl-tests/keystone-bootstrap-rcz7c" Jan 20 17:23:32 crc kubenswrapper[4558]: I0120 17:23:32.875629 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/fb55b781-1113-4879-8efd-d04a2284dffc-credential-keys\") pod \"keystone-bootstrap-rcz7c\" (UID: \"fb55b781-1113-4879-8efd-d04a2284dffc\") " pod="openstack-kuttl-tests/keystone-bootstrap-rcz7c" Jan 20 17:23:32 crc kubenswrapper[4558]: I0120 17:23:32.880677 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zw27l\" (UniqueName: \"kubernetes.io/projected/fb55b781-1113-4879-8efd-d04a2284dffc-kube-api-access-zw27l\") pod \"keystone-bootstrap-rcz7c\" (UID: \"fb55b781-1113-4879-8efd-d04a2284dffc\") " pod="openstack-kuttl-tests/keystone-bootstrap-rcz7c" Jan 20 17:23:32 crc kubenswrapper[4558]: I0120 17:23:32.906144 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-db-sync-dpr25"] Jan 20 17:23:32 crc kubenswrapper[4558]: I0120 17:23:32.916108 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-bootstrap-rcz7c" Jan 20 17:23:32 crc kubenswrapper[4558]: I0120 17:23:32.918519 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/65bcc9ab-370b-457f-a256-de44fa447395-config-data\") pod \"ceilometer-0\" (UID: \"65bcc9ab-370b-457f-a256-de44fa447395\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:23:32 crc kubenswrapper[4558]: I0120 17:23:32.918595 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b03348ed-66d6-4076-a5fa-0ccf182e8e3c-config-data\") pod \"cinder-db-sync-dpr25\" (UID: \"b03348ed-66d6-4076-a5fa-0ccf182e8e3c\") " pod="openstack-kuttl-tests/cinder-db-sync-dpr25" Jan 20 17:23:32 crc kubenswrapper[4558]: I0120 17:23:32.918665 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b03348ed-66d6-4076-a5fa-0ccf182e8e3c-scripts\") pod \"cinder-db-sync-dpr25\" (UID: \"b03348ed-66d6-4076-a5fa-0ccf182e8e3c\") " pod="openstack-kuttl-tests/cinder-db-sync-dpr25" Jan 20 17:23:32 crc kubenswrapper[4558]: I0120 17:23:32.918699 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b03348ed-66d6-4076-a5fa-0ccf182e8e3c-etc-machine-id\") pod \"cinder-db-sync-dpr25\" (UID: \"b03348ed-66d6-4076-a5fa-0ccf182e8e3c\") " pod="openstack-kuttl-tests/cinder-db-sync-dpr25" Jan 20 17:23:32 crc kubenswrapper[4558]: I0120 17:23:32.918744 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/65bcc9ab-370b-457f-a256-de44fa447395-run-httpd\") pod \"ceilometer-0\" (UID: \"65bcc9ab-370b-457f-a256-de44fa447395\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:23:32 crc kubenswrapper[4558]: I0120 17:23:32.918766 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b03348ed-66d6-4076-a5fa-0ccf182e8e3c-db-sync-config-data\") pod \"cinder-db-sync-dpr25\" (UID: \"b03348ed-66d6-4076-a5fa-0ccf182e8e3c\") " pod="openstack-kuttl-tests/cinder-db-sync-dpr25" Jan 20 17:23:32 crc kubenswrapper[4558]: I0120 17:23:32.918828 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-27bj7\" (UniqueName: \"kubernetes.io/projected/65bcc9ab-370b-457f-a256-de44fa447395-kube-api-access-27bj7\") pod \"ceilometer-0\" (UID: \"65bcc9ab-370b-457f-a256-de44fa447395\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:23:32 crc kubenswrapper[4558]: I0120 17:23:32.918851 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/c28fa234-6cff-43e9-9cf5-a5cd2f3b67be-config\") pod \"neutron-db-sync-plxqg\" (UID: \"c28fa234-6cff-43e9-9cf5-a5cd2f3b67be\") " pod="openstack-kuttl-tests/neutron-db-sync-plxqg" Jan 20 17:23:32 crc kubenswrapper[4558]: I0120 17:23:32.918869 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c28fa234-6cff-43e9-9cf5-a5cd2f3b67be-combined-ca-bundle\") pod \"neutron-db-sync-plxqg\" (UID: \"c28fa234-6cff-43e9-9cf5-a5cd2f3b67be\") " pod="openstack-kuttl-tests/neutron-db-sync-plxqg" Jan 20 17:23:32 crc kubenswrapper[4558]: I0120 17:23:32.918907 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65bcc9ab-370b-457f-a256-de44fa447395-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"65bcc9ab-370b-457f-a256-de44fa447395\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:23:32 crc kubenswrapper[4558]: I0120 17:23:32.918935 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wbr49\" (UniqueName: \"kubernetes.io/projected/b03348ed-66d6-4076-a5fa-0ccf182e8e3c-kube-api-access-wbr49\") pod \"cinder-db-sync-dpr25\" (UID: \"b03348ed-66d6-4076-a5fa-0ccf182e8e3c\") " pod="openstack-kuttl-tests/cinder-db-sync-dpr25" Jan 20 17:23:32 crc kubenswrapper[4558]: I0120 17:23:32.918973 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/65bcc9ab-370b-457f-a256-de44fa447395-scripts\") pod \"ceilometer-0\" (UID: \"65bcc9ab-370b-457f-a256-de44fa447395\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:23:32 crc kubenswrapper[4558]: I0120 17:23:32.918999 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/65bcc9ab-370b-457f-a256-de44fa447395-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"65bcc9ab-370b-457f-a256-de44fa447395\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:23:32 crc kubenswrapper[4558]: I0120 17:23:32.919015 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/65bcc9ab-370b-457f-a256-de44fa447395-log-httpd\") pod \"ceilometer-0\" (UID: \"65bcc9ab-370b-457f-a256-de44fa447395\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:23:32 crc kubenswrapper[4558]: I0120 17:23:32.919053 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b03348ed-66d6-4076-a5fa-0ccf182e8e3c-combined-ca-bundle\") pod \"cinder-db-sync-dpr25\" (UID: \"b03348ed-66d6-4076-a5fa-0ccf182e8e3c\") " pod="openstack-kuttl-tests/cinder-db-sync-dpr25" Jan 20 17:23:32 crc kubenswrapper[4558]: I0120 17:23:32.919073 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5gb2g\" (UniqueName: \"kubernetes.io/projected/c28fa234-6cff-43e9-9cf5-a5cd2f3b67be-kube-api-access-5gb2g\") pod \"neutron-db-sync-plxqg\" (UID: \"c28fa234-6cff-43e9-9cf5-a5cd2f3b67be\") " pod="openstack-kuttl-tests/neutron-db-sync-plxqg" Jan 20 17:23:32 crc kubenswrapper[4558]: I0120 17:23:32.921765 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/65bcc9ab-370b-457f-a256-de44fa447395-run-httpd\") pod \"ceilometer-0\" (UID: \"65bcc9ab-370b-457f-a256-de44fa447395\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:23:32 crc kubenswrapper[4558]: I0120 17:23:32.921963 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/65bcc9ab-370b-457f-a256-de44fa447395-log-httpd\") pod \"ceilometer-0\" (UID: \"65bcc9ab-370b-457f-a256-de44fa447395\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:23:32 crc kubenswrapper[4558]: I0120 17:23:32.924742 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/65bcc9ab-370b-457f-a256-de44fa447395-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"65bcc9ab-370b-457f-a256-de44fa447395\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:23:32 crc kubenswrapper[4558]: I0120 17:23:32.926595 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65bcc9ab-370b-457f-a256-de44fa447395-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"65bcc9ab-370b-457f-a256-de44fa447395\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:23:32 crc kubenswrapper[4558]: I0120 17:23:32.928508 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/65bcc9ab-370b-457f-a256-de44fa447395-config-data\") pod \"ceilometer-0\" (UID: \"65bcc9ab-370b-457f-a256-de44fa447395\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:23:32 crc kubenswrapper[4558]: I0120 17:23:32.932726 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/65bcc9ab-370b-457f-a256-de44fa447395-scripts\") pod \"ceilometer-0\" (UID: \"65bcc9ab-370b-457f-a256-de44fa447395\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:23:32 crc kubenswrapper[4558]: I0120 17:23:32.936437 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/placement-db-sync-tbwhp"] Jan 20 17:23:32 crc kubenswrapper[4558]: I0120 17:23:32.937977 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-db-sync-tbwhp" Jan 20 17:23:32 crc kubenswrapper[4558]: I0120 17:23:32.938763 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-27bj7\" (UniqueName: \"kubernetes.io/projected/65bcc9ab-370b-457f-a256-de44fa447395-kube-api-access-27bj7\") pod \"ceilometer-0\" (UID: \"65bcc9ab-370b-457f-a256-de44fa447395\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:23:32 crc kubenswrapper[4558]: I0120 17:23:32.948437 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"placement-placement-dockercfg-nmsw2" Jan 20 17:23:32 crc kubenswrapper[4558]: I0120 17:23:32.950748 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"placement-scripts" Jan 20 17:23:32 crc kubenswrapper[4558]: I0120 17:23:32.951026 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"placement-config-data" Jan 20 17:23:32 crc kubenswrapper[4558]: I0120 17:23:32.958821 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/barbican-db-sync-cnn24"] Jan 20 17:23:32 crc kubenswrapper[4558]: I0120 17:23:32.960697 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-db-sync-cnn24" Jan 20 17:23:32 crc kubenswrapper[4558]: I0120 17:23:32.965006 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"barbican-barbican-dockercfg-vrxkb" Jan 20 17:23:32 crc kubenswrapper[4558]: I0120 17:23:32.969701 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"barbican-config-data" Jan 20 17:23:32 crc kubenswrapper[4558]: I0120 17:23:32.969821 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-db-sync-cnn24"] Jan 20 17:23:32 crc kubenswrapper[4558]: I0120 17:23:32.976696 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-db-sync-tbwhp"] Jan 20 17:23:33 crc kubenswrapper[4558]: I0120 17:23:33.020329 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2e492b4c-9f94-4c1e-87e7-5f63dbb9344b-config-data\") pod \"placement-db-sync-tbwhp\" (UID: \"2e492b4c-9f94-4c1e-87e7-5f63dbb9344b\") " pod="openstack-kuttl-tests/placement-db-sync-tbwhp" Jan 20 17:23:33 crc kubenswrapper[4558]: I0120 17:23:33.020575 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b03348ed-66d6-4076-a5fa-0ccf182e8e3c-combined-ca-bundle\") pod \"cinder-db-sync-dpr25\" (UID: \"b03348ed-66d6-4076-a5fa-0ccf182e8e3c\") " pod="openstack-kuttl-tests/cinder-db-sync-dpr25" Jan 20 17:23:33 crc kubenswrapper[4558]: I0120 17:23:33.020613 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5gb2g\" (UniqueName: \"kubernetes.io/projected/c28fa234-6cff-43e9-9cf5-a5cd2f3b67be-kube-api-access-5gb2g\") pod \"neutron-db-sync-plxqg\" (UID: \"c28fa234-6cff-43e9-9cf5-a5cd2f3b67be\") " pod="openstack-kuttl-tests/neutron-db-sync-plxqg" Jan 20 17:23:33 crc kubenswrapper[4558]: I0120 17:23:33.020688 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b03348ed-66d6-4076-a5fa-0ccf182e8e3c-config-data\") pod \"cinder-db-sync-dpr25\" (UID: \"b03348ed-66d6-4076-a5fa-0ccf182e8e3c\") " pod="openstack-kuttl-tests/cinder-db-sync-dpr25" Jan 20 17:23:33 crc kubenswrapper[4558]: I0120 17:23:33.020729 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2e492b4c-9f94-4c1e-87e7-5f63dbb9344b-scripts\") pod \"placement-db-sync-tbwhp\" (UID: \"2e492b4c-9f94-4c1e-87e7-5f63dbb9344b\") " pod="openstack-kuttl-tests/placement-db-sync-tbwhp" Jan 20 17:23:33 crc kubenswrapper[4558]: I0120 17:23:33.020785 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b03348ed-66d6-4076-a5fa-0ccf182e8e3c-scripts\") pod \"cinder-db-sync-dpr25\" (UID: \"b03348ed-66d6-4076-a5fa-0ccf182e8e3c\") " pod="openstack-kuttl-tests/cinder-db-sync-dpr25" Jan 20 17:23:33 crc kubenswrapper[4558]: I0120 17:23:33.020831 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b03348ed-66d6-4076-a5fa-0ccf182e8e3c-etc-machine-id\") pod \"cinder-db-sync-dpr25\" (UID: \"b03348ed-66d6-4076-a5fa-0ccf182e8e3c\") " pod="openstack-kuttl-tests/cinder-db-sync-dpr25" Jan 20 17:23:33 crc kubenswrapper[4558]: I0120 17:23:33.020855 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2e492b4c-9f94-4c1e-87e7-5f63dbb9344b-logs\") pod \"placement-db-sync-tbwhp\" (UID: \"2e492b4c-9f94-4c1e-87e7-5f63dbb9344b\") " pod="openstack-kuttl-tests/placement-db-sync-tbwhp" Jan 20 17:23:33 crc kubenswrapper[4558]: I0120 17:23:33.020884 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b03348ed-66d6-4076-a5fa-0ccf182e8e3c-db-sync-config-data\") pod \"cinder-db-sync-dpr25\" (UID: \"b03348ed-66d6-4076-a5fa-0ccf182e8e3c\") " pod="openstack-kuttl-tests/cinder-db-sync-dpr25" Jan 20 17:23:33 crc kubenswrapper[4558]: I0120 17:23:33.020955 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/c28fa234-6cff-43e9-9cf5-a5cd2f3b67be-config\") pod \"neutron-db-sync-plxqg\" (UID: \"c28fa234-6cff-43e9-9cf5-a5cd2f3b67be\") " pod="openstack-kuttl-tests/neutron-db-sync-plxqg" Jan 20 17:23:33 crc kubenswrapper[4558]: I0120 17:23:33.020973 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c28fa234-6cff-43e9-9cf5-a5cd2f3b67be-combined-ca-bundle\") pod \"neutron-db-sync-plxqg\" (UID: \"c28fa234-6cff-43e9-9cf5-a5cd2f3b67be\") " pod="openstack-kuttl-tests/neutron-db-sync-plxqg" Jan 20 17:23:33 crc kubenswrapper[4558]: I0120 17:23:33.020998 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2e492b4c-9f94-4c1e-87e7-5f63dbb9344b-combined-ca-bundle\") pod \"placement-db-sync-tbwhp\" (UID: \"2e492b4c-9f94-4c1e-87e7-5f63dbb9344b\") " pod="openstack-kuttl-tests/placement-db-sync-tbwhp" Jan 20 17:23:33 crc kubenswrapper[4558]: I0120 17:23:33.021040 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wbr49\" (UniqueName: \"kubernetes.io/projected/b03348ed-66d6-4076-a5fa-0ccf182e8e3c-kube-api-access-wbr49\") pod \"cinder-db-sync-dpr25\" (UID: \"b03348ed-66d6-4076-a5fa-0ccf182e8e3c\") " pod="openstack-kuttl-tests/cinder-db-sync-dpr25" Jan 20 17:23:33 crc kubenswrapper[4558]: I0120 17:23:33.021066 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9f5p8\" (UniqueName: \"kubernetes.io/projected/2e492b4c-9f94-4c1e-87e7-5f63dbb9344b-kube-api-access-9f5p8\") pod \"placement-db-sync-tbwhp\" (UID: \"2e492b4c-9f94-4c1e-87e7-5f63dbb9344b\") " pod="openstack-kuttl-tests/placement-db-sync-tbwhp" Jan 20 17:23:33 crc kubenswrapper[4558]: I0120 17:23:33.023350 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b03348ed-66d6-4076-a5fa-0ccf182e8e3c-etc-machine-id\") pod \"cinder-db-sync-dpr25\" (UID: \"b03348ed-66d6-4076-a5fa-0ccf182e8e3c\") " pod="openstack-kuttl-tests/cinder-db-sync-dpr25" Jan 20 17:23:33 crc kubenswrapper[4558]: I0120 17:23:33.027871 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c28fa234-6cff-43e9-9cf5-a5cd2f3b67be-combined-ca-bundle\") pod \"neutron-db-sync-plxqg\" (UID: \"c28fa234-6cff-43e9-9cf5-a5cd2f3b67be\") " pod="openstack-kuttl-tests/neutron-db-sync-plxqg" Jan 20 17:23:33 crc kubenswrapper[4558]: I0120 17:23:33.028132 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b03348ed-66d6-4076-a5fa-0ccf182e8e3c-config-data\") pod \"cinder-db-sync-dpr25\" (UID: \"b03348ed-66d6-4076-a5fa-0ccf182e8e3c\") " pod="openstack-kuttl-tests/cinder-db-sync-dpr25" Jan 20 17:23:33 crc kubenswrapper[4558]: I0120 17:23:33.028526 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/c28fa234-6cff-43e9-9cf5-a5cd2f3b67be-config\") pod \"neutron-db-sync-plxqg\" (UID: \"c28fa234-6cff-43e9-9cf5-a5cd2f3b67be\") " pod="openstack-kuttl-tests/neutron-db-sync-plxqg" Jan 20 17:23:33 crc kubenswrapper[4558]: I0120 17:23:33.029966 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:23:33 crc kubenswrapper[4558]: I0120 17:23:33.033593 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b03348ed-66d6-4076-a5fa-0ccf182e8e3c-db-sync-config-data\") pod \"cinder-db-sync-dpr25\" (UID: \"b03348ed-66d6-4076-a5fa-0ccf182e8e3c\") " pod="openstack-kuttl-tests/cinder-db-sync-dpr25" Jan 20 17:23:33 crc kubenswrapper[4558]: I0120 17:23:33.036102 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b03348ed-66d6-4076-a5fa-0ccf182e8e3c-scripts\") pod \"cinder-db-sync-dpr25\" (UID: \"b03348ed-66d6-4076-a5fa-0ccf182e8e3c\") " pod="openstack-kuttl-tests/cinder-db-sync-dpr25" Jan 20 17:23:33 crc kubenswrapper[4558]: I0120 17:23:33.036351 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b03348ed-66d6-4076-a5fa-0ccf182e8e3c-combined-ca-bundle\") pod \"cinder-db-sync-dpr25\" (UID: \"b03348ed-66d6-4076-a5fa-0ccf182e8e3c\") " pod="openstack-kuttl-tests/cinder-db-sync-dpr25" Jan 20 17:23:33 crc kubenswrapper[4558]: I0120 17:23:33.045673 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wbr49\" (UniqueName: \"kubernetes.io/projected/b03348ed-66d6-4076-a5fa-0ccf182e8e3c-kube-api-access-wbr49\") pod \"cinder-db-sync-dpr25\" (UID: \"b03348ed-66d6-4076-a5fa-0ccf182e8e3c\") " pod="openstack-kuttl-tests/cinder-db-sync-dpr25" Jan 20 17:23:33 crc kubenswrapper[4558]: I0120 17:23:33.045733 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5gb2g\" (UniqueName: \"kubernetes.io/projected/c28fa234-6cff-43e9-9cf5-a5cd2f3b67be-kube-api-access-5gb2g\") pod \"neutron-db-sync-plxqg\" (UID: \"c28fa234-6cff-43e9-9cf5-a5cd2f3b67be\") " pod="openstack-kuttl-tests/neutron-db-sync-plxqg" Jan 20 17:23:33 crc kubenswrapper[4558]: I0120 17:23:33.088773 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-db-sync-plxqg" Jan 20 17:23:33 crc kubenswrapper[4558]: I0120 17:23:33.126038 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2e492b4c-9f94-4c1e-87e7-5f63dbb9344b-logs\") pod \"placement-db-sync-tbwhp\" (UID: \"2e492b4c-9f94-4c1e-87e7-5f63dbb9344b\") " pod="openstack-kuttl-tests/placement-db-sync-tbwhp" Jan 20 17:23:33 crc kubenswrapper[4558]: I0120 17:23:33.126089 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/acb70f0a-6559-44d8-9182-a43381f314b4-combined-ca-bundle\") pod \"barbican-db-sync-cnn24\" (UID: \"acb70f0a-6559-44d8-9182-a43381f314b4\") " pod="openstack-kuttl-tests/barbican-db-sync-cnn24" Jan 20 17:23:33 crc kubenswrapper[4558]: I0120 17:23:33.126138 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/acb70f0a-6559-44d8-9182-a43381f314b4-db-sync-config-data\") pod \"barbican-db-sync-cnn24\" (UID: \"acb70f0a-6559-44d8-9182-a43381f314b4\") " pod="openstack-kuttl-tests/barbican-db-sync-cnn24" Jan 20 17:23:33 crc kubenswrapper[4558]: I0120 17:23:33.126188 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2e492b4c-9f94-4c1e-87e7-5f63dbb9344b-combined-ca-bundle\") pod \"placement-db-sync-tbwhp\" (UID: \"2e492b4c-9f94-4c1e-87e7-5f63dbb9344b\") " pod="openstack-kuttl-tests/placement-db-sync-tbwhp" Jan 20 17:23:33 crc kubenswrapper[4558]: I0120 17:23:33.126238 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9f5p8\" (UniqueName: \"kubernetes.io/projected/2e492b4c-9f94-4c1e-87e7-5f63dbb9344b-kube-api-access-9f5p8\") pod \"placement-db-sync-tbwhp\" (UID: \"2e492b4c-9f94-4c1e-87e7-5f63dbb9344b\") " pod="openstack-kuttl-tests/placement-db-sync-tbwhp" Jan 20 17:23:33 crc kubenswrapper[4558]: I0120 17:23:33.126274 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2e492b4c-9f94-4c1e-87e7-5f63dbb9344b-config-data\") pod \"placement-db-sync-tbwhp\" (UID: \"2e492b4c-9f94-4c1e-87e7-5f63dbb9344b\") " pod="openstack-kuttl-tests/placement-db-sync-tbwhp" Jan 20 17:23:33 crc kubenswrapper[4558]: I0120 17:23:33.126315 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-79gr8\" (UniqueName: \"kubernetes.io/projected/acb70f0a-6559-44d8-9182-a43381f314b4-kube-api-access-79gr8\") pod \"barbican-db-sync-cnn24\" (UID: \"acb70f0a-6559-44d8-9182-a43381f314b4\") " pod="openstack-kuttl-tests/barbican-db-sync-cnn24" Jan 20 17:23:33 crc kubenswrapper[4558]: I0120 17:23:33.126361 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2e492b4c-9f94-4c1e-87e7-5f63dbb9344b-scripts\") pod \"placement-db-sync-tbwhp\" (UID: \"2e492b4c-9f94-4c1e-87e7-5f63dbb9344b\") " pod="openstack-kuttl-tests/placement-db-sync-tbwhp" Jan 20 17:23:33 crc kubenswrapper[4558]: I0120 17:23:33.126569 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2e492b4c-9f94-4c1e-87e7-5f63dbb9344b-logs\") pod \"placement-db-sync-tbwhp\" (UID: \"2e492b4c-9f94-4c1e-87e7-5f63dbb9344b\") " pod="openstack-kuttl-tests/placement-db-sync-tbwhp" Jan 20 17:23:33 crc kubenswrapper[4558]: I0120 17:23:33.139513 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2e492b4c-9f94-4c1e-87e7-5f63dbb9344b-scripts\") pod \"placement-db-sync-tbwhp\" (UID: \"2e492b4c-9f94-4c1e-87e7-5f63dbb9344b\") " pod="openstack-kuttl-tests/placement-db-sync-tbwhp" Jan 20 17:23:33 crc kubenswrapper[4558]: I0120 17:23:33.141850 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2e492b4c-9f94-4c1e-87e7-5f63dbb9344b-config-data\") pod \"placement-db-sync-tbwhp\" (UID: \"2e492b4c-9f94-4c1e-87e7-5f63dbb9344b\") " pod="openstack-kuttl-tests/placement-db-sync-tbwhp" Jan 20 17:23:33 crc kubenswrapper[4558]: I0120 17:23:33.142691 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2e492b4c-9f94-4c1e-87e7-5f63dbb9344b-combined-ca-bundle\") pod \"placement-db-sync-tbwhp\" (UID: \"2e492b4c-9f94-4c1e-87e7-5f63dbb9344b\") " pod="openstack-kuttl-tests/placement-db-sync-tbwhp" Jan 20 17:23:33 crc kubenswrapper[4558]: I0120 17:23:33.149650 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9f5p8\" (UniqueName: \"kubernetes.io/projected/2e492b4c-9f94-4c1e-87e7-5f63dbb9344b-kube-api-access-9f5p8\") pod \"placement-db-sync-tbwhp\" (UID: \"2e492b4c-9f94-4c1e-87e7-5f63dbb9344b\") " pod="openstack-kuttl-tests/placement-db-sync-tbwhp" Jan 20 17:23:33 crc kubenswrapper[4558]: I0120 17:23:33.215716 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-db-sync-dpr25" Jan 20 17:23:33 crc kubenswrapper[4558]: I0120 17:23:33.228017 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/acb70f0a-6559-44d8-9182-a43381f314b4-combined-ca-bundle\") pod \"barbican-db-sync-cnn24\" (UID: \"acb70f0a-6559-44d8-9182-a43381f314b4\") " pod="openstack-kuttl-tests/barbican-db-sync-cnn24" Jan 20 17:23:33 crc kubenswrapper[4558]: I0120 17:23:33.228070 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/acb70f0a-6559-44d8-9182-a43381f314b4-db-sync-config-data\") pod \"barbican-db-sync-cnn24\" (UID: \"acb70f0a-6559-44d8-9182-a43381f314b4\") " pod="openstack-kuttl-tests/barbican-db-sync-cnn24" Jan 20 17:23:33 crc kubenswrapper[4558]: I0120 17:23:33.228130 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-79gr8\" (UniqueName: \"kubernetes.io/projected/acb70f0a-6559-44d8-9182-a43381f314b4-kube-api-access-79gr8\") pod \"barbican-db-sync-cnn24\" (UID: \"acb70f0a-6559-44d8-9182-a43381f314b4\") " pod="openstack-kuttl-tests/barbican-db-sync-cnn24" Jan 20 17:23:33 crc kubenswrapper[4558]: I0120 17:23:33.235552 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/acb70f0a-6559-44d8-9182-a43381f314b4-db-sync-config-data\") pod \"barbican-db-sync-cnn24\" (UID: \"acb70f0a-6559-44d8-9182-a43381f314b4\") " pod="openstack-kuttl-tests/barbican-db-sync-cnn24" Jan 20 17:23:33 crc kubenswrapper[4558]: I0120 17:23:33.240235 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/acb70f0a-6559-44d8-9182-a43381f314b4-combined-ca-bundle\") pod \"barbican-db-sync-cnn24\" (UID: \"acb70f0a-6559-44d8-9182-a43381f314b4\") " pod="openstack-kuttl-tests/barbican-db-sync-cnn24" Jan 20 17:23:33 crc kubenswrapper[4558]: I0120 17:23:33.265589 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-79gr8\" (UniqueName: \"kubernetes.io/projected/acb70f0a-6559-44d8-9182-a43381f314b4-kube-api-access-79gr8\") pod \"barbican-db-sync-cnn24\" (UID: \"acb70f0a-6559-44d8-9182-a43381f314b4\") " pod="openstack-kuttl-tests/barbican-db-sync-cnn24" Jan 20 17:23:33 crc kubenswrapper[4558]: I0120 17:23:33.316632 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-db-sync-tbwhp" Jan 20 17:23:33 crc kubenswrapper[4558]: I0120 17:23:33.330619 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-db-sync-cnn24" Jan 20 17:23:33 crc kubenswrapper[4558]: I0120 17:23:33.398781 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-bootstrap-rcz7c"] Jan 20 17:23:33 crc kubenswrapper[4558]: W0120 17:23:33.413324 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfb55b781_1113_4879_8efd_d04a2284dffc.slice/crio-b9649bb5b2d137b6ce0e382f3b68c9f71e5c595a5c981fdd50f06e83f5e2186d WatchSource:0}: Error finding container b9649bb5b2d137b6ce0e382f3b68c9f71e5c595a5c981fdd50f06e83f5e2186d: Status 404 returned error can't find the container with id b9649bb5b2d137b6ce0e382f3b68c9f71e5c595a5c981fdd50f06e83f5e2186d Jan 20 17:23:33 crc kubenswrapper[4558]: I0120 17:23:33.497519 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-bootstrap-rcz7c" event={"ID":"fb55b781-1113-4879-8efd-d04a2284dffc","Type":"ContainerStarted","Data":"b9649bb5b2d137b6ce0e382f3b68c9f71e5c595a5c981fdd50f06e83f5e2186d"} Jan 20 17:23:33 crc kubenswrapper[4558]: W0120 17:23:33.650948 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod65bcc9ab_370b_457f_a256_de44fa447395.slice/crio-bf7b272f5abf5f74f137fe6db2c9ec95c3967a559e6d66c30b670ce70c1a3a4f WatchSource:0}: Error finding container bf7b272f5abf5f74f137fe6db2c9ec95c3967a559e6d66c30b670ce70c1a3a4f: Status 404 returned error can't find the container with id bf7b272f5abf5f74f137fe6db2c9ec95c3967a559e6d66c30b670ce70c1a3a4f Jan 20 17:23:33 crc kubenswrapper[4558]: I0120 17:23:33.655146 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:23:33 crc kubenswrapper[4558]: I0120 17:23:33.666201 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:23:33 crc kubenswrapper[4558]: I0120 17:23:33.667979 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:23:33 crc kubenswrapper[4558]: I0120 17:23:33.670623 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-default-external-config-data" Jan 20 17:23:33 crc kubenswrapper[4558]: I0120 17:23:33.670642 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-glance-default-public-svc" Jan 20 17:23:33 crc kubenswrapper[4558]: I0120 17:23:33.670879 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-glance-dockercfg-np554" Jan 20 17:23:33 crc kubenswrapper[4558]: I0120 17:23:33.674095 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:23:33 crc kubenswrapper[4558]: I0120 17:23:33.679091 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-scripts" Jan 20 17:23:33 crc kubenswrapper[4558]: I0120 17:23:33.728326 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:23:33 crc kubenswrapper[4558]: I0120 17:23:33.729748 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:23:33 crc kubenswrapper[4558]: I0120 17:23:33.736091 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-default-internal-config-data" Jan 20 17:23:33 crc kubenswrapper[4558]: I0120 17:23:33.736599 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-glance-default-internal-svc" Jan 20 17:23:33 crc kubenswrapper[4558]: I0120 17:23:33.739404 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/654eadba-3696-45e5-97fa-71a403f8a840-scripts\") pod \"glance-default-external-api-0\" (UID: \"654eadba-3696-45e5-97fa-71a403f8a840\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:23:33 crc kubenswrapper[4558]: I0120 17:23:33.739448 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vx566\" (UniqueName: \"kubernetes.io/projected/654eadba-3696-45e5-97fa-71a403f8a840-kube-api-access-vx566\") pod \"glance-default-external-api-0\" (UID: \"654eadba-3696-45e5-97fa-71a403f8a840\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:23:33 crc kubenswrapper[4558]: I0120 17:23:33.739561 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/654eadba-3696-45e5-97fa-71a403f8a840-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"654eadba-3696-45e5-97fa-71a403f8a840\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:23:33 crc kubenswrapper[4558]: I0120 17:23:33.739674 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage14-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage14-crc\") pod \"glance-default-external-api-0\" (UID: \"654eadba-3696-45e5-97fa-71a403f8a840\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:23:33 crc kubenswrapper[4558]: I0120 17:23:33.739714 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/654eadba-3696-45e5-97fa-71a403f8a840-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"654eadba-3696-45e5-97fa-71a403f8a840\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:23:33 crc kubenswrapper[4558]: I0120 17:23:33.739852 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/654eadba-3696-45e5-97fa-71a403f8a840-config-data\") pod \"glance-default-external-api-0\" (UID: \"654eadba-3696-45e5-97fa-71a403f8a840\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:23:33 crc kubenswrapper[4558]: I0120 17:23:33.739924 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/654eadba-3696-45e5-97fa-71a403f8a840-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"654eadba-3696-45e5-97fa-71a403f8a840\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:23:33 crc kubenswrapper[4558]: I0120 17:23:33.740012 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/654eadba-3696-45e5-97fa-71a403f8a840-logs\") pod \"glance-default-external-api-0\" (UID: \"654eadba-3696-45e5-97fa-71a403f8a840\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:23:33 crc kubenswrapper[4558]: I0120 17:23:33.758842 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:23:33 crc kubenswrapper[4558]: I0120 17:23:33.784846 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-db-sync-plxqg"] Jan 20 17:23:33 crc kubenswrapper[4558]: I0120 17:23:33.792215 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-db-sync-dpr25"] Jan 20 17:23:33 crc kubenswrapper[4558]: I0120 17:23:33.841394 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/654eadba-3696-45e5-97fa-71a403f8a840-config-data\") pod \"glance-default-external-api-0\" (UID: \"654eadba-3696-45e5-97fa-71a403f8a840\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:23:33 crc kubenswrapper[4558]: I0120 17:23:33.841455 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4c4e9d7a-ee9e-4f5f-b9e2-aadbbcb15ae1-config-data\") pod \"glance-default-internal-api-0\" (UID: \"4c4e9d7a-ee9e-4f5f-b9e2-aadbbcb15ae1\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:23:33 crc kubenswrapper[4558]: I0120 17:23:33.841494 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/654eadba-3696-45e5-97fa-71a403f8a840-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"654eadba-3696-45e5-97fa-71a403f8a840\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:23:33 crc kubenswrapper[4558]: I0120 17:23:33.841556 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/654eadba-3696-45e5-97fa-71a403f8a840-logs\") pod \"glance-default-external-api-0\" (UID: \"654eadba-3696-45e5-97fa-71a403f8a840\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:23:33 crc kubenswrapper[4558]: I0120 17:23:33.841589 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage15-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") pod \"glance-default-internal-api-0\" (UID: \"4c4e9d7a-ee9e-4f5f-b9e2-aadbbcb15ae1\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:23:33 crc kubenswrapper[4558]: I0120 17:23:33.841632 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/654eadba-3696-45e5-97fa-71a403f8a840-scripts\") pod \"glance-default-external-api-0\" (UID: \"654eadba-3696-45e5-97fa-71a403f8a840\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:23:33 crc kubenswrapper[4558]: I0120 17:23:33.841661 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vx566\" (UniqueName: \"kubernetes.io/projected/654eadba-3696-45e5-97fa-71a403f8a840-kube-api-access-vx566\") pod \"glance-default-external-api-0\" (UID: \"654eadba-3696-45e5-97fa-71a403f8a840\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:23:33 crc kubenswrapper[4558]: I0120 17:23:33.841680 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4c4e9d7a-ee9e-4f5f-b9e2-aadbbcb15ae1-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"4c4e9d7a-ee9e-4f5f-b9e2-aadbbcb15ae1\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:23:33 crc kubenswrapper[4558]: I0120 17:23:33.841699 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4c4e9d7a-ee9e-4f5f-b9e2-aadbbcb15ae1-scripts\") pod \"glance-default-internal-api-0\" (UID: \"4c4e9d7a-ee9e-4f5f-b9e2-aadbbcb15ae1\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:23:33 crc kubenswrapper[4558]: I0120 17:23:33.841742 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4c4e9d7a-ee9e-4f5f-b9e2-aadbbcb15ae1-logs\") pod \"glance-default-internal-api-0\" (UID: \"4c4e9d7a-ee9e-4f5f-b9e2-aadbbcb15ae1\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:23:33 crc kubenswrapper[4558]: I0120 17:23:33.841761 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/654eadba-3696-45e5-97fa-71a403f8a840-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"654eadba-3696-45e5-97fa-71a403f8a840\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:23:33 crc kubenswrapper[4558]: I0120 17:23:33.841783 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4c4e9d7a-ee9e-4f5f-b9e2-aadbbcb15ae1-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"4c4e9d7a-ee9e-4f5f-b9e2-aadbbcb15ae1\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:23:33 crc kubenswrapper[4558]: I0120 17:23:33.841834 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage14-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage14-crc\") pod \"glance-default-external-api-0\" (UID: \"654eadba-3696-45e5-97fa-71a403f8a840\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:23:33 crc kubenswrapper[4558]: I0120 17:23:33.841857 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/654eadba-3696-45e5-97fa-71a403f8a840-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"654eadba-3696-45e5-97fa-71a403f8a840\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:23:33 crc kubenswrapper[4558]: I0120 17:23:33.841887 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c4e9d7a-ee9e-4f5f-b9e2-aadbbcb15ae1-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"4c4e9d7a-ee9e-4f5f-b9e2-aadbbcb15ae1\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:23:33 crc kubenswrapper[4558]: I0120 17:23:33.841941 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kl5zk\" (UniqueName: \"kubernetes.io/projected/4c4e9d7a-ee9e-4f5f-b9e2-aadbbcb15ae1-kube-api-access-kl5zk\") pod \"glance-default-internal-api-0\" (UID: \"4c4e9d7a-ee9e-4f5f-b9e2-aadbbcb15ae1\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:23:33 crc kubenswrapper[4558]: I0120 17:23:33.845789 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/654eadba-3696-45e5-97fa-71a403f8a840-logs\") pod \"glance-default-external-api-0\" (UID: \"654eadba-3696-45e5-97fa-71a403f8a840\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:23:33 crc kubenswrapper[4558]: I0120 17:23:33.845867 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/654eadba-3696-45e5-97fa-71a403f8a840-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"654eadba-3696-45e5-97fa-71a403f8a840\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:23:33 crc kubenswrapper[4558]: I0120 17:23:33.845947 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/654eadba-3696-45e5-97fa-71a403f8a840-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"654eadba-3696-45e5-97fa-71a403f8a840\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:23:33 crc kubenswrapper[4558]: I0120 17:23:33.845970 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage14-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage14-crc\") pod \"glance-default-external-api-0\" (UID: \"654eadba-3696-45e5-97fa-71a403f8a840\") device mount path \"/mnt/openstack/pv14\"" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:23:33 crc kubenswrapper[4558]: I0120 17:23:33.847008 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/654eadba-3696-45e5-97fa-71a403f8a840-config-data\") pod \"glance-default-external-api-0\" (UID: \"654eadba-3696-45e5-97fa-71a403f8a840\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:23:33 crc kubenswrapper[4558]: I0120 17:23:33.851774 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/654eadba-3696-45e5-97fa-71a403f8a840-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"654eadba-3696-45e5-97fa-71a403f8a840\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:23:33 crc kubenswrapper[4558]: I0120 17:23:33.851811 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/654eadba-3696-45e5-97fa-71a403f8a840-scripts\") pod \"glance-default-external-api-0\" (UID: \"654eadba-3696-45e5-97fa-71a403f8a840\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:23:33 crc kubenswrapper[4558]: I0120 17:23:33.860574 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vx566\" (UniqueName: \"kubernetes.io/projected/654eadba-3696-45e5-97fa-71a403f8a840-kube-api-access-vx566\") pod \"glance-default-external-api-0\" (UID: \"654eadba-3696-45e5-97fa-71a403f8a840\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:23:33 crc kubenswrapper[4558]: I0120 17:23:33.891902 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage14-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage14-crc\") pod \"glance-default-external-api-0\" (UID: \"654eadba-3696-45e5-97fa-71a403f8a840\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:23:33 crc kubenswrapper[4558]: I0120 17:23:33.893177 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-db-sync-tbwhp"] Jan 20 17:23:33 crc kubenswrapper[4558]: I0120 17:23:33.905879 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-db-sync-cnn24"] Jan 20 17:23:33 crc kubenswrapper[4558]: W0120 17:23:33.906307 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2e492b4c_9f94_4c1e_87e7_5f63dbb9344b.slice/crio-68b68f5c616e89ffdb1554638200414389d2de5b9c7d95ffc2588dff1f1713f2 WatchSource:0}: Error finding container 68b68f5c616e89ffdb1554638200414389d2de5b9c7d95ffc2588dff1f1713f2: Status 404 returned error can't find the container with id 68b68f5c616e89ffdb1554638200414389d2de5b9c7d95ffc2588dff1f1713f2 Jan 20 17:23:33 crc kubenswrapper[4558]: W0120 17:23:33.909555 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podacb70f0a_6559_44d8_9182_a43381f314b4.slice/crio-f5da9307e63d269f37fda0240ab410043f248ae133189cfe6c4c23b84cf54681 WatchSource:0}: Error finding container f5da9307e63d269f37fda0240ab410043f248ae133189cfe6c4c23b84cf54681: Status 404 returned error can't find the container with id f5da9307e63d269f37fda0240ab410043f248ae133189cfe6c4c23b84cf54681 Jan 20 17:23:33 crc kubenswrapper[4558]: I0120 17:23:33.950478 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage15-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") pod \"glance-default-internal-api-0\" (UID: \"4c4e9d7a-ee9e-4f5f-b9e2-aadbbcb15ae1\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:23:33 crc kubenswrapper[4558]: I0120 17:23:33.950558 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4c4e9d7a-ee9e-4f5f-b9e2-aadbbcb15ae1-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"4c4e9d7a-ee9e-4f5f-b9e2-aadbbcb15ae1\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:23:33 crc kubenswrapper[4558]: I0120 17:23:33.950583 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4c4e9d7a-ee9e-4f5f-b9e2-aadbbcb15ae1-scripts\") pod \"glance-default-internal-api-0\" (UID: \"4c4e9d7a-ee9e-4f5f-b9e2-aadbbcb15ae1\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:23:33 crc kubenswrapper[4558]: I0120 17:23:33.950610 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4c4e9d7a-ee9e-4f5f-b9e2-aadbbcb15ae1-logs\") pod \"glance-default-internal-api-0\" (UID: \"4c4e9d7a-ee9e-4f5f-b9e2-aadbbcb15ae1\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:23:33 crc kubenswrapper[4558]: I0120 17:23:33.950632 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4c4e9d7a-ee9e-4f5f-b9e2-aadbbcb15ae1-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"4c4e9d7a-ee9e-4f5f-b9e2-aadbbcb15ae1\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:23:33 crc kubenswrapper[4558]: I0120 17:23:33.950700 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c4e9d7a-ee9e-4f5f-b9e2-aadbbcb15ae1-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"4c4e9d7a-ee9e-4f5f-b9e2-aadbbcb15ae1\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:23:33 crc kubenswrapper[4558]: I0120 17:23:33.950764 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kl5zk\" (UniqueName: \"kubernetes.io/projected/4c4e9d7a-ee9e-4f5f-b9e2-aadbbcb15ae1-kube-api-access-kl5zk\") pod \"glance-default-internal-api-0\" (UID: \"4c4e9d7a-ee9e-4f5f-b9e2-aadbbcb15ae1\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:23:33 crc kubenswrapper[4558]: I0120 17:23:33.950818 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4c4e9d7a-ee9e-4f5f-b9e2-aadbbcb15ae1-config-data\") pod \"glance-default-internal-api-0\" (UID: \"4c4e9d7a-ee9e-4f5f-b9e2-aadbbcb15ae1\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:23:33 crc kubenswrapper[4558]: I0120 17:23:33.950861 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage15-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") pod \"glance-default-internal-api-0\" (UID: \"4c4e9d7a-ee9e-4f5f-b9e2-aadbbcb15ae1\") device mount path \"/mnt/openstack/pv15\"" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:23:33 crc kubenswrapper[4558]: I0120 17:23:33.951114 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4c4e9d7a-ee9e-4f5f-b9e2-aadbbcb15ae1-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"4c4e9d7a-ee9e-4f5f-b9e2-aadbbcb15ae1\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:23:33 crc kubenswrapper[4558]: I0120 17:23:33.954657 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c4e9d7a-ee9e-4f5f-b9e2-aadbbcb15ae1-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"4c4e9d7a-ee9e-4f5f-b9e2-aadbbcb15ae1\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:23:33 crc kubenswrapper[4558]: I0120 17:23:33.954918 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4c4e9d7a-ee9e-4f5f-b9e2-aadbbcb15ae1-logs\") pod \"glance-default-internal-api-0\" (UID: \"4c4e9d7a-ee9e-4f5f-b9e2-aadbbcb15ae1\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:23:33 crc kubenswrapper[4558]: I0120 17:23:33.963726 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4c4e9d7a-ee9e-4f5f-b9e2-aadbbcb15ae1-scripts\") pod \"glance-default-internal-api-0\" (UID: \"4c4e9d7a-ee9e-4f5f-b9e2-aadbbcb15ae1\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:23:33 crc kubenswrapper[4558]: I0120 17:23:33.969139 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4c4e9d7a-ee9e-4f5f-b9e2-aadbbcb15ae1-config-data\") pod \"glance-default-internal-api-0\" (UID: \"4c4e9d7a-ee9e-4f5f-b9e2-aadbbcb15ae1\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:23:33 crc kubenswrapper[4558]: I0120 17:23:33.970979 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4c4e9d7a-ee9e-4f5f-b9e2-aadbbcb15ae1-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"4c4e9d7a-ee9e-4f5f-b9e2-aadbbcb15ae1\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:23:33 crc kubenswrapper[4558]: I0120 17:23:33.975795 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kl5zk\" (UniqueName: \"kubernetes.io/projected/4c4e9d7a-ee9e-4f5f-b9e2-aadbbcb15ae1-kube-api-access-kl5zk\") pod \"glance-default-internal-api-0\" (UID: \"4c4e9d7a-ee9e-4f5f-b9e2-aadbbcb15ae1\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:23:33 crc kubenswrapper[4558]: I0120 17:23:33.991634 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage15-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") pod \"glance-default-internal-api-0\" (UID: \"4c4e9d7a-ee9e-4f5f-b9e2-aadbbcb15ae1\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:23:33 crc kubenswrapper[4558]: I0120 17:23:33.995039 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:23:34 crc kubenswrapper[4558]: I0120 17:23:34.042379 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:23:34 crc kubenswrapper[4558]: I0120 17:23:34.314247 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:23:34 crc kubenswrapper[4558]: I0120 17:23:34.549672 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-db-sync-plxqg" event={"ID":"c28fa234-6cff-43e9-9cf5-a5cd2f3b67be","Type":"ContainerStarted","Data":"bc4cc07eb90d7d3d3906a3d0cffea06fd114245a87ecda09cca341976ee3130b"} Jan 20 17:23:34 crc kubenswrapper[4558]: I0120 17:23:34.549755 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-db-sync-plxqg" event={"ID":"c28fa234-6cff-43e9-9cf5-a5cd2f3b67be","Type":"ContainerStarted","Data":"1f6de6abe858aaebe72cc976768454c62c63c2ceeb81c03964d8eed7f09b3b3e"} Jan 20 17:23:34 crc kubenswrapper[4558]: I0120 17:23:34.563540 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-db-sync-dpr25" event={"ID":"b03348ed-66d6-4076-a5fa-0ccf182e8e3c","Type":"ContainerStarted","Data":"17bde0333457ba40d4fb298f15c25764aa324510751fc022d849b46f9554a23b"} Jan 20 17:23:34 crc kubenswrapper[4558]: I0120 17:23:34.590823 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/neutron-db-sync-plxqg" podStartSLOduration=2.5908051199999997 podStartE2EDuration="2.59080512s" podCreationTimestamp="2026-01-20 17:23:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:23:34.578813612 +0000 UTC m=+2508.339151580" watchObservedRunningTime="2026-01-20 17:23:34.59080512 +0000 UTC m=+2508.351143087" Jan 20 17:23:34 crc kubenswrapper[4558]: I0120 17:23:34.597457 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"65bcc9ab-370b-457f-a256-de44fa447395","Type":"ContainerStarted","Data":"bf7b272f5abf5f74f137fe6db2c9ec95c3967a559e6d66c30b670ce70c1a3a4f"} Jan 20 17:23:34 crc kubenswrapper[4558]: I0120 17:23:34.597511 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-db-sync-cnn24" event={"ID":"acb70f0a-6559-44d8-9182-a43381f314b4","Type":"ContainerStarted","Data":"27473c2ff83e868f723f6402e1bd91d3230ab7a18cd06577ec40b96f27a8676a"} Jan 20 17:23:34 crc kubenswrapper[4558]: I0120 17:23:34.597524 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-db-sync-cnn24" event={"ID":"acb70f0a-6559-44d8-9182-a43381f314b4","Type":"ContainerStarted","Data":"f5da9307e63d269f37fda0240ab410043f248ae133189cfe6c4c23b84cf54681"} Jan 20 17:23:34 crc kubenswrapper[4558]: I0120 17:23:34.597534 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"654eadba-3696-45e5-97fa-71a403f8a840","Type":"ContainerStarted","Data":"38b4b1692d70ecb89986ac5e68c979d3f4977413f9e9b9a0ed529b57b99440a0"} Jan 20 17:23:34 crc kubenswrapper[4558]: I0120 17:23:34.598775 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-db-sync-tbwhp" event={"ID":"2e492b4c-9f94-4c1e-87e7-5f63dbb9344b","Type":"ContainerStarted","Data":"735dcde94cbdc51444810e74df6806f075f4ccbace4e341d82a534d321b0a02d"} Jan 20 17:23:34 crc kubenswrapper[4558]: I0120 17:23:34.598813 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-db-sync-tbwhp" event={"ID":"2e492b4c-9f94-4c1e-87e7-5f63dbb9344b","Type":"ContainerStarted","Data":"68b68f5c616e89ffdb1554638200414389d2de5b9c7d95ffc2588dff1f1713f2"} Jan 20 17:23:34 crc kubenswrapper[4558]: I0120 17:23:34.605255 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-bootstrap-rcz7c" event={"ID":"fb55b781-1113-4879-8efd-d04a2284dffc","Type":"ContainerStarted","Data":"c3f90e45f10a2d4bedae0abbbf6303211fbf4e5ffaad8f639762a180b6dadc32"} Jan 20 17:23:34 crc kubenswrapper[4558]: I0120 17:23:34.611965 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/barbican-db-sync-cnn24" podStartSLOduration=2.611954182 podStartE2EDuration="2.611954182s" podCreationTimestamp="2026-01-20 17:23:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:23:34.608692908 +0000 UTC m=+2508.369030875" watchObservedRunningTime="2026-01-20 17:23:34.611954182 +0000 UTC m=+2508.372292149" Jan 20 17:23:34 crc kubenswrapper[4558]: I0120 17:23:34.654273 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:23:34 crc kubenswrapper[4558]: I0120 17:23:34.661178 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/keystone-bootstrap-rcz7c" podStartSLOduration=2.6611479989999998 podStartE2EDuration="2.661147999s" podCreationTimestamp="2026-01-20 17:23:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:23:34.64218572 +0000 UTC m=+2508.402523687" watchObservedRunningTime="2026-01-20 17:23:34.661147999 +0000 UTC m=+2508.421485966" Jan 20 17:23:34 crc kubenswrapper[4558]: I0120 17:23:34.666148 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/placement-db-sync-tbwhp" podStartSLOduration=2.666141983 podStartE2EDuration="2.666141983s" podCreationTimestamp="2026-01-20 17:23:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:23:34.659977457 +0000 UTC m=+2508.420315424" watchObservedRunningTime="2026-01-20 17:23:34.666141983 +0000 UTC m=+2508.426479951" Jan 20 17:23:35 crc kubenswrapper[4558]: I0120 17:23:35.124310 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:23:35 crc kubenswrapper[4558]: I0120 17:23:35.197374 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:23:35 crc kubenswrapper[4558]: I0120 17:23:35.222844 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:23:35 crc kubenswrapper[4558]: I0120 17:23:35.628601 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"65bcc9ab-370b-457f-a256-de44fa447395","Type":"ContainerStarted","Data":"f222360e5761260af77d9f2762b3887e3a8141792a6c327b2acf7b0128e1ca74"} Jan 20 17:23:35 crc kubenswrapper[4558]: I0120 17:23:35.632259 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"654eadba-3696-45e5-97fa-71a403f8a840","Type":"ContainerStarted","Data":"1e90f20beec3d58b2399a5c37f3c6de6066c3d312b1d6319ae583d0849901cb8"} Jan 20 17:23:35 crc kubenswrapper[4558]: I0120 17:23:35.636147 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"4c4e9d7a-ee9e-4f5f-b9e2-aadbbcb15ae1","Type":"ContainerStarted","Data":"3bb26a3f6eda2dc11df1af3b6c5da8eaade00754e53dae834b3ffc90bd723a4d"} Jan 20 17:23:35 crc kubenswrapper[4558]: I0120 17:23:35.636226 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"4c4e9d7a-ee9e-4f5f-b9e2-aadbbcb15ae1","Type":"ContainerStarted","Data":"be103909586400b8765e537c13a749338b63d6281388ba807ece2af858fbe8e9"} Jan 20 17:23:35 crc kubenswrapper[4558]: I0120 17:23:35.641891 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-db-sync-dpr25" event={"ID":"b03348ed-66d6-4076-a5fa-0ccf182e8e3c","Type":"ContainerStarted","Data":"ff1a9f56eba4d6a58e189c76b9c17f98147866827a5459b702643795452019b3"} Jan 20 17:23:35 crc kubenswrapper[4558]: I0120 17:23:35.665808 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/cinder-db-sync-dpr25" podStartSLOduration=3.665788086 podStartE2EDuration="3.665788086s" podCreationTimestamp="2026-01-20 17:23:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:23:35.660780016 +0000 UTC m=+2509.421117983" watchObservedRunningTime="2026-01-20 17:23:35.665788086 +0000 UTC m=+2509.426126053" Jan 20 17:23:36 crc kubenswrapper[4558]: I0120 17:23:36.659191 4558 generic.go:334] "Generic (PLEG): container finished" podID="acb70f0a-6559-44d8-9182-a43381f314b4" containerID="27473c2ff83e868f723f6402e1bd91d3230ab7a18cd06577ec40b96f27a8676a" exitCode=0 Jan 20 17:23:36 crc kubenswrapper[4558]: I0120 17:23:36.659611 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-db-sync-cnn24" event={"ID":"acb70f0a-6559-44d8-9182-a43381f314b4","Type":"ContainerDied","Data":"27473c2ff83e868f723f6402e1bd91d3230ab7a18cd06577ec40b96f27a8676a"} Jan 20 17:23:36 crc kubenswrapper[4558]: I0120 17:23:36.663832 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"654eadba-3696-45e5-97fa-71a403f8a840","Type":"ContainerStarted","Data":"9194cd59871bca2c7d7c2dbc5f2485ab9cb276b8804c5323f5e3a3c721a71e00"} Jan 20 17:23:36 crc kubenswrapper[4558]: I0120 17:23:36.663961 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-external-api-0" podUID="654eadba-3696-45e5-97fa-71a403f8a840" containerName="glance-log" containerID="cri-o://1e90f20beec3d58b2399a5c37f3c6de6066c3d312b1d6319ae583d0849901cb8" gracePeriod=30 Jan 20 17:23:36 crc kubenswrapper[4558]: I0120 17:23:36.664261 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-external-api-0" podUID="654eadba-3696-45e5-97fa-71a403f8a840" containerName="glance-httpd" containerID="cri-o://9194cd59871bca2c7d7c2dbc5f2485ab9cb276b8804c5323f5e3a3c721a71e00" gracePeriod=30 Jan 20 17:23:36 crc kubenswrapper[4558]: I0120 17:23:36.674500 4558 generic.go:334] "Generic (PLEG): container finished" podID="2e492b4c-9f94-4c1e-87e7-5f63dbb9344b" containerID="735dcde94cbdc51444810e74df6806f075f4ccbace4e341d82a534d321b0a02d" exitCode=0 Jan 20 17:23:36 crc kubenswrapper[4558]: I0120 17:23:36.674559 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-db-sync-tbwhp" event={"ID":"2e492b4c-9f94-4c1e-87e7-5f63dbb9344b","Type":"ContainerDied","Data":"735dcde94cbdc51444810e74df6806f075f4ccbace4e341d82a534d321b0a02d"} Jan 20 17:23:36 crc kubenswrapper[4558]: I0120 17:23:36.676950 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"4c4e9d7a-ee9e-4f5f-b9e2-aadbbcb15ae1","Type":"ContainerStarted","Data":"18761a5d720239c02db342d5b5b3ea378812a4ec0db6f34be8c95502ff3eb619"} Jan 20 17:23:36 crc kubenswrapper[4558]: I0120 17:23:36.677079 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-internal-api-0" podUID="4c4e9d7a-ee9e-4f5f-b9e2-aadbbcb15ae1" containerName="glance-log" containerID="cri-o://3bb26a3f6eda2dc11df1af3b6c5da8eaade00754e53dae834b3ffc90bd723a4d" gracePeriod=30 Jan 20 17:23:36 crc kubenswrapper[4558]: I0120 17:23:36.677207 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-internal-api-0" podUID="4c4e9d7a-ee9e-4f5f-b9e2-aadbbcb15ae1" containerName="glance-httpd" containerID="cri-o://18761a5d720239c02db342d5b5b3ea378812a4ec0db6f34be8c95502ff3eb619" gracePeriod=30 Jan 20 17:23:36 crc kubenswrapper[4558]: I0120 17:23:36.679923 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"65bcc9ab-370b-457f-a256-de44fa447395","Type":"ContainerStarted","Data":"45c0e030f75bcd8df529339f70c23bd7500873605fb95750023ec5f527191a3a"} Jan 20 17:23:36 crc kubenswrapper[4558]: I0120 17:23:36.713266 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/glance-default-external-api-0" podStartSLOduration=4.713243352 podStartE2EDuration="4.713243352s" podCreationTimestamp="2026-01-20 17:23:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:23:36.69259421 +0000 UTC m=+2510.452932177" watchObservedRunningTime="2026-01-20 17:23:36.713243352 +0000 UTC m=+2510.473581319" Jan 20 17:23:36 crc kubenswrapper[4558]: I0120 17:23:36.746139 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/glance-default-internal-api-0" podStartSLOduration=4.746116369 podStartE2EDuration="4.746116369s" podCreationTimestamp="2026-01-20 17:23:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:23:36.723643465 +0000 UTC m=+2510.483981432" watchObservedRunningTime="2026-01-20 17:23:36.746116369 +0000 UTC m=+2510.506454336" Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.439801 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.444440 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.560484 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4c4e9d7a-ee9e-4f5f-b9e2-aadbbcb15ae1-config-data\") pod \"4c4e9d7a-ee9e-4f5f-b9e2-aadbbcb15ae1\" (UID: \"4c4e9d7a-ee9e-4f5f-b9e2-aadbbcb15ae1\") " Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.560962 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4c4e9d7a-ee9e-4f5f-b9e2-aadbbcb15ae1-logs\") pod \"4c4e9d7a-ee9e-4f5f-b9e2-aadbbcb15ae1\" (UID: \"4c4e9d7a-ee9e-4f5f-b9e2-aadbbcb15ae1\") " Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.561018 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/654eadba-3696-45e5-97fa-71a403f8a840-config-data\") pod \"654eadba-3696-45e5-97fa-71a403f8a840\" (UID: \"654eadba-3696-45e5-97fa-71a403f8a840\") " Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.561043 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") pod \"4c4e9d7a-ee9e-4f5f-b9e2-aadbbcb15ae1\" (UID: \"4c4e9d7a-ee9e-4f5f-b9e2-aadbbcb15ae1\") " Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.561114 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/654eadba-3696-45e5-97fa-71a403f8a840-httpd-run\") pod \"654eadba-3696-45e5-97fa-71a403f8a840\" (UID: \"654eadba-3696-45e5-97fa-71a403f8a840\") " Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.561147 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vx566\" (UniqueName: \"kubernetes.io/projected/654eadba-3696-45e5-97fa-71a403f8a840-kube-api-access-vx566\") pod \"654eadba-3696-45e5-97fa-71a403f8a840\" (UID: \"654eadba-3696-45e5-97fa-71a403f8a840\") " Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.561180 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/654eadba-3696-45e5-97fa-71a403f8a840-logs\") pod \"654eadba-3696-45e5-97fa-71a403f8a840\" (UID: \"654eadba-3696-45e5-97fa-71a403f8a840\") " Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.561295 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage14-crc\") pod \"654eadba-3696-45e5-97fa-71a403f8a840\" (UID: \"654eadba-3696-45e5-97fa-71a403f8a840\") " Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.561359 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/654eadba-3696-45e5-97fa-71a403f8a840-scripts\") pod \"654eadba-3696-45e5-97fa-71a403f8a840\" (UID: \"654eadba-3696-45e5-97fa-71a403f8a840\") " Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.561410 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4c4e9d7a-ee9e-4f5f-b9e2-aadbbcb15ae1-httpd-run\") pod \"4c4e9d7a-ee9e-4f5f-b9e2-aadbbcb15ae1\" (UID: \"4c4e9d7a-ee9e-4f5f-b9e2-aadbbcb15ae1\") " Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.561500 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c4e9d7a-ee9e-4f5f-b9e2-aadbbcb15ae1-combined-ca-bundle\") pod \"4c4e9d7a-ee9e-4f5f-b9e2-aadbbcb15ae1\" (UID: \"4c4e9d7a-ee9e-4f5f-b9e2-aadbbcb15ae1\") " Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.561530 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4c4e9d7a-ee9e-4f5f-b9e2-aadbbcb15ae1-logs" (OuterVolumeSpecName: "logs") pod "4c4e9d7a-ee9e-4f5f-b9e2-aadbbcb15ae1" (UID: "4c4e9d7a-ee9e-4f5f-b9e2-aadbbcb15ae1"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.561550 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4c4e9d7a-ee9e-4f5f-b9e2-aadbbcb15ae1-scripts\") pod \"4c4e9d7a-ee9e-4f5f-b9e2-aadbbcb15ae1\" (UID: \"4c4e9d7a-ee9e-4f5f-b9e2-aadbbcb15ae1\") " Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.561684 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/654eadba-3696-45e5-97fa-71a403f8a840-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "654eadba-3696-45e5-97fa-71a403f8a840" (UID: "654eadba-3696-45e5-97fa-71a403f8a840"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.561733 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kl5zk\" (UniqueName: \"kubernetes.io/projected/4c4e9d7a-ee9e-4f5f-b9e2-aadbbcb15ae1-kube-api-access-kl5zk\") pod \"4c4e9d7a-ee9e-4f5f-b9e2-aadbbcb15ae1\" (UID: \"4c4e9d7a-ee9e-4f5f-b9e2-aadbbcb15ae1\") " Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.561819 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/654eadba-3696-45e5-97fa-71a403f8a840-public-tls-certs\") pod \"654eadba-3696-45e5-97fa-71a403f8a840\" (UID: \"654eadba-3696-45e5-97fa-71a403f8a840\") " Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.561889 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4c4e9d7a-ee9e-4f5f-b9e2-aadbbcb15ae1-internal-tls-certs\") pod \"4c4e9d7a-ee9e-4f5f-b9e2-aadbbcb15ae1\" (UID: \"4c4e9d7a-ee9e-4f5f-b9e2-aadbbcb15ae1\") " Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.561925 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/654eadba-3696-45e5-97fa-71a403f8a840-combined-ca-bundle\") pod \"654eadba-3696-45e5-97fa-71a403f8a840\" (UID: \"654eadba-3696-45e5-97fa-71a403f8a840\") " Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.563206 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4c4e9d7a-ee9e-4f5f-b9e2-aadbbcb15ae1-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.563220 4558 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/654eadba-3696-45e5-97fa-71a403f8a840-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.564408 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4c4e9d7a-ee9e-4f5f-b9e2-aadbbcb15ae1-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "4c4e9d7a-ee9e-4f5f-b9e2-aadbbcb15ae1" (UID: "4c4e9d7a-ee9e-4f5f-b9e2-aadbbcb15ae1"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.568033 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4c4e9d7a-ee9e-4f5f-b9e2-aadbbcb15ae1-scripts" (OuterVolumeSpecName: "scripts") pod "4c4e9d7a-ee9e-4f5f-b9e2-aadbbcb15ae1" (UID: "4c4e9d7a-ee9e-4f5f-b9e2-aadbbcb15ae1"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.568371 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/654eadba-3696-45e5-97fa-71a403f8a840-logs" (OuterVolumeSpecName: "logs") pod "654eadba-3696-45e5-97fa-71a403f8a840" (UID: "654eadba-3696-45e5-97fa-71a403f8a840"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.568570 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/654eadba-3696-45e5-97fa-71a403f8a840-scripts" (OuterVolumeSpecName: "scripts") pod "654eadba-3696-45e5-97fa-71a403f8a840" (UID: "654eadba-3696-45e5-97fa-71a403f8a840"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.572631 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage14-crc" (OuterVolumeSpecName: "glance") pod "654eadba-3696-45e5-97fa-71a403f8a840" (UID: "654eadba-3696-45e5-97fa-71a403f8a840"). InnerVolumeSpecName "local-storage14-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.573141 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4c4e9d7a-ee9e-4f5f-b9e2-aadbbcb15ae1-kube-api-access-kl5zk" (OuterVolumeSpecName: "kube-api-access-kl5zk") pod "4c4e9d7a-ee9e-4f5f-b9e2-aadbbcb15ae1" (UID: "4c4e9d7a-ee9e-4f5f-b9e2-aadbbcb15ae1"). InnerVolumeSpecName "kube-api-access-kl5zk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.574308 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/654eadba-3696-45e5-97fa-71a403f8a840-kube-api-access-vx566" (OuterVolumeSpecName: "kube-api-access-vx566") pod "654eadba-3696-45e5-97fa-71a403f8a840" (UID: "654eadba-3696-45e5-97fa-71a403f8a840"). InnerVolumeSpecName "kube-api-access-vx566". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.579225 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage15-crc" (OuterVolumeSpecName: "glance") pod "4c4e9d7a-ee9e-4f5f-b9e2-aadbbcb15ae1" (UID: "4c4e9d7a-ee9e-4f5f-b9e2-aadbbcb15ae1"). InnerVolumeSpecName "local-storage15-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.600277 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/654eadba-3696-45e5-97fa-71a403f8a840-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "654eadba-3696-45e5-97fa-71a403f8a840" (UID: "654eadba-3696-45e5-97fa-71a403f8a840"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.611320 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4c4e9d7a-ee9e-4f5f-b9e2-aadbbcb15ae1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4c4e9d7a-ee9e-4f5f-b9e2-aadbbcb15ae1" (UID: "4c4e9d7a-ee9e-4f5f-b9e2-aadbbcb15ae1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.625224 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4c4e9d7a-ee9e-4f5f-b9e2-aadbbcb15ae1-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "4c4e9d7a-ee9e-4f5f-b9e2-aadbbcb15ae1" (UID: "4c4e9d7a-ee9e-4f5f-b9e2-aadbbcb15ae1"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.626317 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/654eadba-3696-45e5-97fa-71a403f8a840-config-data" (OuterVolumeSpecName: "config-data") pod "654eadba-3696-45e5-97fa-71a403f8a840" (UID: "654eadba-3696-45e5-97fa-71a403f8a840"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.641227 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/654eadba-3696-45e5-97fa-71a403f8a840-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "654eadba-3696-45e5-97fa-71a403f8a840" (UID: "654eadba-3696-45e5-97fa-71a403f8a840"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.651754 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4c4e9d7a-ee9e-4f5f-b9e2-aadbbcb15ae1-config-data" (OuterVolumeSpecName: "config-data") pod "4c4e9d7a-ee9e-4f5f-b9e2-aadbbcb15ae1" (UID: "4c4e9d7a-ee9e-4f5f-b9e2-aadbbcb15ae1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.665481 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4c4e9d7a-ee9e-4f5f-b9e2-aadbbcb15ae1-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.665513 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/654eadba-3696-45e5-97fa-71a403f8a840-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.665524 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4c4e9d7a-ee9e-4f5f-b9e2-aadbbcb15ae1-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.665533 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/654eadba-3696-45e5-97fa-71a403f8a840-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.665554 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage15-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") on node \"crc\" " Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.665564 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vx566\" (UniqueName: \"kubernetes.io/projected/654eadba-3696-45e5-97fa-71a403f8a840-kube-api-access-vx566\") on node \"crc\" DevicePath \"\"" Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.665574 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/654eadba-3696-45e5-97fa-71a403f8a840-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.665587 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage14-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage14-crc\") on node \"crc\" " Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.665597 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/654eadba-3696-45e5-97fa-71a403f8a840-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.665606 4558 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4c4e9d7a-ee9e-4f5f-b9e2-aadbbcb15ae1-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.665616 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c4e9d7a-ee9e-4f5f-b9e2-aadbbcb15ae1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.665626 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4c4e9d7a-ee9e-4f5f-b9e2-aadbbcb15ae1-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.665634 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kl5zk\" (UniqueName: \"kubernetes.io/projected/4c4e9d7a-ee9e-4f5f-b9e2-aadbbcb15ae1-kube-api-access-kl5zk\") on node \"crc\" DevicePath \"\"" Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.665680 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/654eadba-3696-45e5-97fa-71a403f8a840-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.679862 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage14-crc" (UniqueName: "kubernetes.io/local-volume/local-storage14-crc") on node "crc" Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.680093 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage15-crc" (UniqueName: "kubernetes.io/local-volume/local-storage15-crc") on node "crc" Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.689727 4558 generic.go:334] "Generic (PLEG): container finished" podID="654eadba-3696-45e5-97fa-71a403f8a840" containerID="9194cd59871bca2c7d7c2dbc5f2485ab9cb276b8804c5323f5e3a3c721a71e00" exitCode=0 Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.689755 4558 generic.go:334] "Generic (PLEG): container finished" podID="654eadba-3696-45e5-97fa-71a403f8a840" containerID="1e90f20beec3d58b2399a5c37f3c6de6066c3d312b1d6319ae583d0849901cb8" exitCode=143 Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.689800 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"654eadba-3696-45e5-97fa-71a403f8a840","Type":"ContainerDied","Data":"9194cd59871bca2c7d7c2dbc5f2485ab9cb276b8804c5323f5e3a3c721a71e00"} Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.689831 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"654eadba-3696-45e5-97fa-71a403f8a840","Type":"ContainerDied","Data":"1e90f20beec3d58b2399a5c37f3c6de6066c3d312b1d6319ae583d0849901cb8"} Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.689844 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"654eadba-3696-45e5-97fa-71a403f8a840","Type":"ContainerDied","Data":"38b4b1692d70ecb89986ac5e68c979d3f4977413f9e9b9a0ed529b57b99440a0"} Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.689860 4558 scope.go:117] "RemoveContainer" containerID="9194cd59871bca2c7d7c2dbc5f2485ab9cb276b8804c5323f5e3a3c721a71e00" Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.689987 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.698916 4558 generic.go:334] "Generic (PLEG): container finished" podID="4c4e9d7a-ee9e-4f5f-b9e2-aadbbcb15ae1" containerID="18761a5d720239c02db342d5b5b3ea378812a4ec0db6f34be8c95502ff3eb619" exitCode=0 Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.698937 4558 generic.go:334] "Generic (PLEG): container finished" podID="4c4e9d7a-ee9e-4f5f-b9e2-aadbbcb15ae1" containerID="3bb26a3f6eda2dc11df1af3b6c5da8eaade00754e53dae834b3ffc90bd723a4d" exitCode=143 Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.698979 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.699003 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"4c4e9d7a-ee9e-4f5f-b9e2-aadbbcb15ae1","Type":"ContainerDied","Data":"18761a5d720239c02db342d5b5b3ea378812a4ec0db6f34be8c95502ff3eb619"} Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.699038 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"4c4e9d7a-ee9e-4f5f-b9e2-aadbbcb15ae1","Type":"ContainerDied","Data":"3bb26a3f6eda2dc11df1af3b6c5da8eaade00754e53dae834b3ffc90bd723a4d"} Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.699517 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"4c4e9d7a-ee9e-4f5f-b9e2-aadbbcb15ae1","Type":"ContainerDied","Data":"be103909586400b8765e537c13a749338b63d6281388ba807ece2af858fbe8e9"} Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.703844 4558 generic.go:334] "Generic (PLEG): container finished" podID="fb55b781-1113-4879-8efd-d04a2284dffc" containerID="c3f90e45f10a2d4bedae0abbbf6303211fbf4e5ffaad8f639762a180b6dadc32" exitCode=0 Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.703890 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-bootstrap-rcz7c" event={"ID":"fb55b781-1113-4879-8efd-d04a2284dffc","Type":"ContainerDied","Data":"c3f90e45f10a2d4bedae0abbbf6303211fbf4e5ffaad8f639762a180b6dadc32"} Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.706285 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"65bcc9ab-370b-457f-a256-de44fa447395","Type":"ContainerStarted","Data":"7d1eca58dad37d535e3971097db8c897d92fc464555543dfe4336c4625410888"} Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.722313 4558 scope.go:117] "RemoveContainer" containerID="1e90f20beec3d58b2399a5c37f3c6de6066c3d312b1d6319ae583d0849901cb8" Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.771742 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.772365 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage15-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.772403 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage14-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage14-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.789382 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.797353 4558 scope.go:117] "RemoveContainer" containerID="9194cd59871bca2c7d7c2dbc5f2485ab9cb276b8804c5323f5e3a3c721a71e00" Jan 20 17:23:37 crc kubenswrapper[4558]: E0120 17:23:37.797771 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9194cd59871bca2c7d7c2dbc5f2485ab9cb276b8804c5323f5e3a3c721a71e00\": container with ID starting with 9194cd59871bca2c7d7c2dbc5f2485ab9cb276b8804c5323f5e3a3c721a71e00 not found: ID does not exist" containerID="9194cd59871bca2c7d7c2dbc5f2485ab9cb276b8804c5323f5e3a3c721a71e00" Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.797811 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9194cd59871bca2c7d7c2dbc5f2485ab9cb276b8804c5323f5e3a3c721a71e00"} err="failed to get container status \"9194cd59871bca2c7d7c2dbc5f2485ab9cb276b8804c5323f5e3a3c721a71e00\": rpc error: code = NotFound desc = could not find container \"9194cd59871bca2c7d7c2dbc5f2485ab9cb276b8804c5323f5e3a3c721a71e00\": container with ID starting with 9194cd59871bca2c7d7c2dbc5f2485ab9cb276b8804c5323f5e3a3c721a71e00 not found: ID does not exist" Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.797836 4558 scope.go:117] "RemoveContainer" containerID="1e90f20beec3d58b2399a5c37f3c6de6066c3d312b1d6319ae583d0849901cb8" Jan 20 17:23:37 crc kubenswrapper[4558]: E0120 17:23:37.800616 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1e90f20beec3d58b2399a5c37f3c6de6066c3d312b1d6319ae583d0849901cb8\": container with ID starting with 1e90f20beec3d58b2399a5c37f3c6de6066c3d312b1d6319ae583d0849901cb8 not found: ID does not exist" containerID="1e90f20beec3d58b2399a5c37f3c6de6066c3d312b1d6319ae583d0849901cb8" Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.800651 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1e90f20beec3d58b2399a5c37f3c6de6066c3d312b1d6319ae583d0849901cb8"} err="failed to get container status \"1e90f20beec3d58b2399a5c37f3c6de6066c3d312b1d6319ae583d0849901cb8\": rpc error: code = NotFound desc = could not find container \"1e90f20beec3d58b2399a5c37f3c6de6066c3d312b1d6319ae583d0849901cb8\": container with ID starting with 1e90f20beec3d58b2399a5c37f3c6de6066c3d312b1d6319ae583d0849901cb8 not found: ID does not exist" Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.800670 4558 scope.go:117] "RemoveContainer" containerID="9194cd59871bca2c7d7c2dbc5f2485ab9cb276b8804c5323f5e3a3c721a71e00" Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.813284 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9194cd59871bca2c7d7c2dbc5f2485ab9cb276b8804c5323f5e3a3c721a71e00"} err="failed to get container status \"9194cd59871bca2c7d7c2dbc5f2485ab9cb276b8804c5323f5e3a3c721a71e00\": rpc error: code = NotFound desc = could not find container \"9194cd59871bca2c7d7c2dbc5f2485ab9cb276b8804c5323f5e3a3c721a71e00\": container with ID starting with 9194cd59871bca2c7d7c2dbc5f2485ab9cb276b8804c5323f5e3a3c721a71e00 not found: ID does not exist" Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.813549 4558 scope.go:117] "RemoveContainer" containerID="1e90f20beec3d58b2399a5c37f3c6de6066c3d312b1d6319ae583d0849901cb8" Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.818246 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1e90f20beec3d58b2399a5c37f3c6de6066c3d312b1d6319ae583d0849901cb8"} err="failed to get container status \"1e90f20beec3d58b2399a5c37f3c6de6066c3d312b1d6319ae583d0849901cb8\": rpc error: code = NotFound desc = could not find container \"1e90f20beec3d58b2399a5c37f3c6de6066c3d312b1d6319ae583d0849901cb8\": container with ID starting with 1e90f20beec3d58b2399a5c37f3c6de6066c3d312b1d6319ae583d0849901cb8 not found: ID does not exist" Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.818272 4558 scope.go:117] "RemoveContainer" containerID="18761a5d720239c02db342d5b5b3ea378812a4ec0db6f34be8c95502ff3eb619" Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.820474 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:23:37 crc kubenswrapper[4558]: E0120 17:23:37.820805 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="654eadba-3696-45e5-97fa-71a403f8a840" containerName="glance-log" Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.820823 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="654eadba-3696-45e5-97fa-71a403f8a840" containerName="glance-log" Jan 20 17:23:37 crc kubenswrapper[4558]: E0120 17:23:37.820841 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4c4e9d7a-ee9e-4f5f-b9e2-aadbbcb15ae1" containerName="glance-httpd" Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.820847 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="4c4e9d7a-ee9e-4f5f-b9e2-aadbbcb15ae1" containerName="glance-httpd" Jan 20 17:23:37 crc kubenswrapper[4558]: E0120 17:23:37.820860 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4c4e9d7a-ee9e-4f5f-b9e2-aadbbcb15ae1" containerName="glance-log" Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.820866 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="4c4e9d7a-ee9e-4f5f-b9e2-aadbbcb15ae1" containerName="glance-log" Jan 20 17:23:37 crc kubenswrapper[4558]: E0120 17:23:37.820878 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="654eadba-3696-45e5-97fa-71a403f8a840" containerName="glance-httpd" Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.820884 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="654eadba-3696-45e5-97fa-71a403f8a840" containerName="glance-httpd" Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.821033 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="4c4e9d7a-ee9e-4f5f-b9e2-aadbbcb15ae1" containerName="glance-log" Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.821050 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="654eadba-3696-45e5-97fa-71a403f8a840" containerName="glance-httpd" Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.821062 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="654eadba-3696-45e5-97fa-71a403f8a840" containerName="glance-log" Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.821074 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="4c4e9d7a-ee9e-4f5f-b9e2-aadbbcb15ae1" containerName="glance-httpd" Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.821885 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.826361 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.831299 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.845412 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.845839 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-default-external-config-data" Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.846027 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-glance-dockercfg-np554" Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.846358 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-scripts" Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.846401 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-glance-default-public-svc" Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.862058 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.863515 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.869042 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-glance-default-internal-svc" Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.869300 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-default-internal-config-data" Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.890677 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.931604 4558 scope.go:117] "RemoveContainer" containerID="3bb26a3f6eda2dc11df1af3b6c5da8eaade00754e53dae834b3ffc90bd723a4d" Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.970814 4558 scope.go:117] "RemoveContainer" containerID="18761a5d720239c02db342d5b5b3ea378812a4ec0db6f34be8c95502ff3eb619" Jan 20 17:23:37 crc kubenswrapper[4558]: E0120 17:23:37.972496 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"18761a5d720239c02db342d5b5b3ea378812a4ec0db6f34be8c95502ff3eb619\": container with ID starting with 18761a5d720239c02db342d5b5b3ea378812a4ec0db6f34be8c95502ff3eb619 not found: ID does not exist" containerID="18761a5d720239c02db342d5b5b3ea378812a4ec0db6f34be8c95502ff3eb619" Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.972522 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"18761a5d720239c02db342d5b5b3ea378812a4ec0db6f34be8c95502ff3eb619"} err="failed to get container status \"18761a5d720239c02db342d5b5b3ea378812a4ec0db6f34be8c95502ff3eb619\": rpc error: code = NotFound desc = could not find container \"18761a5d720239c02db342d5b5b3ea378812a4ec0db6f34be8c95502ff3eb619\": container with ID starting with 18761a5d720239c02db342d5b5b3ea378812a4ec0db6f34be8c95502ff3eb619 not found: ID does not exist" Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.972543 4558 scope.go:117] "RemoveContainer" containerID="3bb26a3f6eda2dc11df1af3b6c5da8eaade00754e53dae834b3ffc90bd723a4d" Jan 20 17:23:37 crc kubenswrapper[4558]: E0120 17:23:37.972931 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3bb26a3f6eda2dc11df1af3b6c5da8eaade00754e53dae834b3ffc90bd723a4d\": container with ID starting with 3bb26a3f6eda2dc11df1af3b6c5da8eaade00754e53dae834b3ffc90bd723a4d not found: ID does not exist" containerID="3bb26a3f6eda2dc11df1af3b6c5da8eaade00754e53dae834b3ffc90bd723a4d" Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.972946 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3bb26a3f6eda2dc11df1af3b6c5da8eaade00754e53dae834b3ffc90bd723a4d"} err="failed to get container status \"3bb26a3f6eda2dc11df1af3b6c5da8eaade00754e53dae834b3ffc90bd723a4d\": rpc error: code = NotFound desc = could not find container \"3bb26a3f6eda2dc11df1af3b6c5da8eaade00754e53dae834b3ffc90bd723a4d\": container with ID starting with 3bb26a3f6eda2dc11df1af3b6c5da8eaade00754e53dae834b3ffc90bd723a4d not found: ID does not exist" Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.972957 4558 scope.go:117] "RemoveContainer" containerID="18761a5d720239c02db342d5b5b3ea378812a4ec0db6f34be8c95502ff3eb619" Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.973513 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"18761a5d720239c02db342d5b5b3ea378812a4ec0db6f34be8c95502ff3eb619"} err="failed to get container status \"18761a5d720239c02db342d5b5b3ea378812a4ec0db6f34be8c95502ff3eb619\": rpc error: code = NotFound desc = could not find container \"18761a5d720239c02db342d5b5b3ea378812a4ec0db6f34be8c95502ff3eb619\": container with ID starting with 18761a5d720239c02db342d5b5b3ea378812a4ec0db6f34be8c95502ff3eb619 not found: ID does not exist" Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.973525 4558 scope.go:117] "RemoveContainer" containerID="3bb26a3f6eda2dc11df1af3b6c5da8eaade00754e53dae834b3ffc90bd723a4d" Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.973788 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3bb26a3f6eda2dc11df1af3b6c5da8eaade00754e53dae834b3ffc90bd723a4d"} err="failed to get container status \"3bb26a3f6eda2dc11df1af3b6c5da8eaade00754e53dae834b3ffc90bd723a4d\": rpc error: code = NotFound desc = could not find container \"3bb26a3f6eda2dc11df1af3b6c5da8eaade00754e53dae834b3ffc90bd723a4d\": container with ID starting with 3bb26a3f6eda2dc11df1af3b6c5da8eaade00754e53dae834b3ffc90bd723a4d not found: ID does not exist" Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.977174 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05e1a302-040a-46ca-a3c2-e4c8c6390091-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"05e1a302-040a-46ca-a3c2-e4c8c6390091\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.977213 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f2f1744d-79dd-43bc-8bea-93e00672c805-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"f2f1744d-79dd-43bc-8bea-93e00672c805\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.977240 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/05e1a302-040a-46ca-a3c2-e4c8c6390091-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"05e1a302-040a-46ca-a3c2-e4c8c6390091\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.977257 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/05e1a302-040a-46ca-a3c2-e4c8c6390091-scripts\") pod \"glance-default-external-api-0\" (UID: \"05e1a302-040a-46ca-a3c2-e4c8c6390091\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.977277 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f2f1744d-79dd-43bc-8bea-93e00672c805-scripts\") pod \"glance-default-internal-api-0\" (UID: \"f2f1744d-79dd-43bc-8bea-93e00672c805\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.977296 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage14-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage14-crc\") pod \"glance-default-external-api-0\" (UID: \"05e1a302-040a-46ca-a3c2-e4c8c6390091\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.977337 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f2f1744d-79dd-43bc-8bea-93e00672c805-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"f2f1744d-79dd-43bc-8bea-93e00672c805\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.977365 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f2f1744d-79dd-43bc-8bea-93e00672c805-config-data\") pod \"glance-default-internal-api-0\" (UID: \"f2f1744d-79dd-43bc-8bea-93e00672c805\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.977445 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/05e1a302-040a-46ca-a3c2-e4c8c6390091-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"05e1a302-040a-46ca-a3c2-e4c8c6390091\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.977467 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g568f\" (UniqueName: \"kubernetes.io/projected/05e1a302-040a-46ca-a3c2-e4c8c6390091-kube-api-access-g568f\") pod \"glance-default-external-api-0\" (UID: \"05e1a302-040a-46ca-a3c2-e4c8c6390091\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.977506 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f2f1744d-79dd-43bc-8bea-93e00672c805-logs\") pod \"glance-default-internal-api-0\" (UID: \"f2f1744d-79dd-43bc-8bea-93e00672c805\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.977571 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage15-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") pod \"glance-default-internal-api-0\" (UID: \"f2f1744d-79dd-43bc-8bea-93e00672c805\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.985931 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wvv8l\" (UniqueName: \"kubernetes.io/projected/f2f1744d-79dd-43bc-8bea-93e00672c805-kube-api-access-wvv8l\") pod \"glance-default-internal-api-0\" (UID: \"f2f1744d-79dd-43bc-8bea-93e00672c805\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.985995 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/05e1a302-040a-46ca-a3c2-e4c8c6390091-logs\") pod \"glance-default-external-api-0\" (UID: \"05e1a302-040a-46ca-a3c2-e4c8c6390091\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.986037 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2f1744d-79dd-43bc-8bea-93e00672c805-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"f2f1744d-79dd-43bc-8bea-93e00672c805\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:23:37 crc kubenswrapper[4558]: I0120 17:23:37.986121 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/05e1a302-040a-46ca-a3c2-e4c8c6390091-config-data\") pod \"glance-default-external-api-0\" (UID: \"05e1a302-040a-46ca-a3c2-e4c8c6390091\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:23:38 crc kubenswrapper[4558]: I0120 17:23:38.088333 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/05e1a302-040a-46ca-a3c2-e4c8c6390091-config-data\") pod \"glance-default-external-api-0\" (UID: \"05e1a302-040a-46ca-a3c2-e4c8c6390091\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:23:38 crc kubenswrapper[4558]: I0120 17:23:38.088388 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05e1a302-040a-46ca-a3c2-e4c8c6390091-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"05e1a302-040a-46ca-a3c2-e4c8c6390091\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:23:38 crc kubenswrapper[4558]: I0120 17:23:38.088409 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f2f1744d-79dd-43bc-8bea-93e00672c805-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"f2f1744d-79dd-43bc-8bea-93e00672c805\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:23:38 crc kubenswrapper[4558]: I0120 17:23:38.088435 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/05e1a302-040a-46ca-a3c2-e4c8c6390091-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"05e1a302-040a-46ca-a3c2-e4c8c6390091\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:23:38 crc kubenswrapper[4558]: I0120 17:23:38.088456 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/05e1a302-040a-46ca-a3c2-e4c8c6390091-scripts\") pod \"glance-default-external-api-0\" (UID: \"05e1a302-040a-46ca-a3c2-e4c8c6390091\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:23:38 crc kubenswrapper[4558]: I0120 17:23:38.088472 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f2f1744d-79dd-43bc-8bea-93e00672c805-scripts\") pod \"glance-default-internal-api-0\" (UID: \"f2f1744d-79dd-43bc-8bea-93e00672c805\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:23:38 crc kubenswrapper[4558]: I0120 17:23:38.088489 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage14-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage14-crc\") pod \"glance-default-external-api-0\" (UID: \"05e1a302-040a-46ca-a3c2-e4c8c6390091\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:23:38 crc kubenswrapper[4558]: I0120 17:23:38.088516 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f2f1744d-79dd-43bc-8bea-93e00672c805-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"f2f1744d-79dd-43bc-8bea-93e00672c805\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:23:38 crc kubenswrapper[4558]: I0120 17:23:38.088539 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f2f1744d-79dd-43bc-8bea-93e00672c805-config-data\") pod \"glance-default-internal-api-0\" (UID: \"f2f1744d-79dd-43bc-8bea-93e00672c805\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:23:38 crc kubenswrapper[4558]: I0120 17:23:38.088598 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/05e1a302-040a-46ca-a3c2-e4c8c6390091-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"05e1a302-040a-46ca-a3c2-e4c8c6390091\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:23:38 crc kubenswrapper[4558]: I0120 17:23:38.088616 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g568f\" (UniqueName: \"kubernetes.io/projected/05e1a302-040a-46ca-a3c2-e4c8c6390091-kube-api-access-g568f\") pod \"glance-default-external-api-0\" (UID: \"05e1a302-040a-46ca-a3c2-e4c8c6390091\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:23:38 crc kubenswrapper[4558]: I0120 17:23:38.088649 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f2f1744d-79dd-43bc-8bea-93e00672c805-logs\") pod \"glance-default-internal-api-0\" (UID: \"f2f1744d-79dd-43bc-8bea-93e00672c805\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:23:38 crc kubenswrapper[4558]: I0120 17:23:38.088704 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage15-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") pod \"glance-default-internal-api-0\" (UID: \"f2f1744d-79dd-43bc-8bea-93e00672c805\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:23:38 crc kubenswrapper[4558]: I0120 17:23:38.088737 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wvv8l\" (UniqueName: \"kubernetes.io/projected/f2f1744d-79dd-43bc-8bea-93e00672c805-kube-api-access-wvv8l\") pod \"glance-default-internal-api-0\" (UID: \"f2f1744d-79dd-43bc-8bea-93e00672c805\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:23:38 crc kubenswrapper[4558]: I0120 17:23:38.088763 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/05e1a302-040a-46ca-a3c2-e4c8c6390091-logs\") pod \"glance-default-external-api-0\" (UID: \"05e1a302-040a-46ca-a3c2-e4c8c6390091\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:23:38 crc kubenswrapper[4558]: I0120 17:23:38.088785 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2f1744d-79dd-43bc-8bea-93e00672c805-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"f2f1744d-79dd-43bc-8bea-93e00672c805\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:23:38 crc kubenswrapper[4558]: I0120 17:23:38.091014 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f2f1744d-79dd-43bc-8bea-93e00672c805-logs\") pod \"glance-default-internal-api-0\" (UID: \"f2f1744d-79dd-43bc-8bea-93e00672c805\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:23:38 crc kubenswrapper[4558]: I0120 17:23:38.091322 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/05e1a302-040a-46ca-a3c2-e4c8c6390091-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"05e1a302-040a-46ca-a3c2-e4c8c6390091\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:23:38 crc kubenswrapper[4558]: I0120 17:23:38.091926 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/05e1a302-040a-46ca-a3c2-e4c8c6390091-logs\") pod \"glance-default-external-api-0\" (UID: \"05e1a302-040a-46ca-a3c2-e4c8c6390091\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:23:38 crc kubenswrapper[4558]: I0120 17:23:38.091990 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage14-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage14-crc\") pod \"glance-default-external-api-0\" (UID: \"05e1a302-040a-46ca-a3c2-e4c8c6390091\") device mount path \"/mnt/openstack/pv14\"" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:23:38 crc kubenswrapper[4558]: I0120 17:23:38.092063 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage15-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") pod \"glance-default-internal-api-0\" (UID: \"f2f1744d-79dd-43bc-8bea-93e00672c805\") device mount path \"/mnt/openstack/pv15\"" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:23:38 crc kubenswrapper[4558]: I0120 17:23:38.092236 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f2f1744d-79dd-43bc-8bea-93e00672c805-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"f2f1744d-79dd-43bc-8bea-93e00672c805\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:23:38 crc kubenswrapper[4558]: I0120 17:23:38.101199 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f2f1744d-79dd-43bc-8bea-93e00672c805-config-data\") pod \"glance-default-internal-api-0\" (UID: \"f2f1744d-79dd-43bc-8bea-93e00672c805\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:23:38 crc kubenswrapper[4558]: I0120 17:23:38.101587 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05e1a302-040a-46ca-a3c2-e4c8c6390091-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"05e1a302-040a-46ca-a3c2-e4c8c6390091\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:23:38 crc kubenswrapper[4558]: I0120 17:23:38.105745 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g568f\" (UniqueName: \"kubernetes.io/projected/05e1a302-040a-46ca-a3c2-e4c8c6390091-kube-api-access-g568f\") pod \"glance-default-external-api-0\" (UID: \"05e1a302-040a-46ca-a3c2-e4c8c6390091\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:23:38 crc kubenswrapper[4558]: I0120 17:23:38.106688 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2f1744d-79dd-43bc-8bea-93e00672c805-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"f2f1744d-79dd-43bc-8bea-93e00672c805\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:23:38 crc kubenswrapper[4558]: I0120 17:23:38.107272 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/05e1a302-040a-46ca-a3c2-e4c8c6390091-config-data\") pod \"glance-default-external-api-0\" (UID: \"05e1a302-040a-46ca-a3c2-e4c8c6390091\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:23:38 crc kubenswrapper[4558]: I0120 17:23:38.111072 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/05e1a302-040a-46ca-a3c2-e4c8c6390091-scripts\") pod \"glance-default-external-api-0\" (UID: \"05e1a302-040a-46ca-a3c2-e4c8c6390091\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:23:38 crc kubenswrapper[4558]: I0120 17:23:38.111085 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f2f1744d-79dd-43bc-8bea-93e00672c805-scripts\") pod \"glance-default-internal-api-0\" (UID: \"f2f1744d-79dd-43bc-8bea-93e00672c805\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:23:38 crc kubenswrapper[4558]: I0120 17:23:38.111317 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f2f1744d-79dd-43bc-8bea-93e00672c805-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"f2f1744d-79dd-43bc-8bea-93e00672c805\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:23:38 crc kubenswrapper[4558]: I0120 17:23:38.111673 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wvv8l\" (UniqueName: \"kubernetes.io/projected/f2f1744d-79dd-43bc-8bea-93e00672c805-kube-api-access-wvv8l\") pod \"glance-default-internal-api-0\" (UID: \"f2f1744d-79dd-43bc-8bea-93e00672c805\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:23:38 crc kubenswrapper[4558]: I0120 17:23:38.113063 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/05e1a302-040a-46ca-a3c2-e4c8c6390091-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"05e1a302-040a-46ca-a3c2-e4c8c6390091\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:23:38 crc kubenswrapper[4558]: I0120 17:23:38.128028 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage14-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage14-crc\") pod \"glance-default-external-api-0\" (UID: \"05e1a302-040a-46ca-a3c2-e4c8c6390091\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:23:38 crc kubenswrapper[4558]: I0120 17:23:38.157528 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage15-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") pod \"glance-default-internal-api-0\" (UID: \"f2f1744d-79dd-43bc-8bea-93e00672c805\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:23:38 crc kubenswrapper[4558]: I0120 17:23:38.190419 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:23:38 crc kubenswrapper[4558]: I0120 17:23:38.207784 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-db-sync-tbwhp" Jan 20 17:23:38 crc kubenswrapper[4558]: I0120 17:23:38.217593 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-db-sync-cnn24" Jan 20 17:23:38 crc kubenswrapper[4558]: I0120 17:23:38.222309 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:23:38 crc kubenswrapper[4558]: I0120 17:23:38.291064 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2e492b4c-9f94-4c1e-87e7-5f63dbb9344b-logs\") pod \"2e492b4c-9f94-4c1e-87e7-5f63dbb9344b\" (UID: \"2e492b4c-9f94-4c1e-87e7-5f63dbb9344b\") " Jan 20 17:23:38 crc kubenswrapper[4558]: I0120 17:23:38.291483 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2e492b4c-9f94-4c1e-87e7-5f63dbb9344b-logs" (OuterVolumeSpecName: "logs") pod "2e492b4c-9f94-4c1e-87e7-5f63dbb9344b" (UID: "2e492b4c-9f94-4c1e-87e7-5f63dbb9344b"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:23:38 crc kubenswrapper[4558]: I0120 17:23:38.300239 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-79gr8\" (UniqueName: \"kubernetes.io/projected/acb70f0a-6559-44d8-9182-a43381f314b4-kube-api-access-79gr8\") pod \"acb70f0a-6559-44d8-9182-a43381f314b4\" (UID: \"acb70f0a-6559-44d8-9182-a43381f314b4\") " Jan 20 17:23:38 crc kubenswrapper[4558]: I0120 17:23:38.300791 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9f5p8\" (UniqueName: \"kubernetes.io/projected/2e492b4c-9f94-4c1e-87e7-5f63dbb9344b-kube-api-access-9f5p8\") pod \"2e492b4c-9f94-4c1e-87e7-5f63dbb9344b\" (UID: \"2e492b4c-9f94-4c1e-87e7-5f63dbb9344b\") " Jan 20 17:23:38 crc kubenswrapper[4558]: I0120 17:23:38.300838 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/acb70f0a-6559-44d8-9182-a43381f314b4-db-sync-config-data\") pod \"acb70f0a-6559-44d8-9182-a43381f314b4\" (UID: \"acb70f0a-6559-44d8-9182-a43381f314b4\") " Jan 20 17:23:38 crc kubenswrapper[4558]: I0120 17:23:38.300948 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2e492b4c-9f94-4c1e-87e7-5f63dbb9344b-combined-ca-bundle\") pod \"2e492b4c-9f94-4c1e-87e7-5f63dbb9344b\" (UID: \"2e492b4c-9f94-4c1e-87e7-5f63dbb9344b\") " Jan 20 17:23:38 crc kubenswrapper[4558]: I0120 17:23:38.300974 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/acb70f0a-6559-44d8-9182-a43381f314b4-combined-ca-bundle\") pod \"acb70f0a-6559-44d8-9182-a43381f314b4\" (UID: \"acb70f0a-6559-44d8-9182-a43381f314b4\") " Jan 20 17:23:38 crc kubenswrapper[4558]: I0120 17:23:38.303445 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2e492b4c-9f94-4c1e-87e7-5f63dbb9344b-kube-api-access-9f5p8" (OuterVolumeSpecName: "kube-api-access-9f5p8") pod "2e492b4c-9f94-4c1e-87e7-5f63dbb9344b" (UID: "2e492b4c-9f94-4c1e-87e7-5f63dbb9344b"). InnerVolumeSpecName "kube-api-access-9f5p8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:23:38 crc kubenswrapper[4558]: I0120 17:23:38.304947 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/acb70f0a-6559-44d8-9182-a43381f314b4-kube-api-access-79gr8" (OuterVolumeSpecName: "kube-api-access-79gr8") pod "acb70f0a-6559-44d8-9182-a43381f314b4" (UID: "acb70f0a-6559-44d8-9182-a43381f314b4"). InnerVolumeSpecName "kube-api-access-79gr8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:23:38 crc kubenswrapper[4558]: I0120 17:23:38.304993 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2e492b4c-9f94-4c1e-87e7-5f63dbb9344b-scripts\") pod \"2e492b4c-9f94-4c1e-87e7-5f63dbb9344b\" (UID: \"2e492b4c-9f94-4c1e-87e7-5f63dbb9344b\") " Jan 20 17:23:38 crc kubenswrapper[4558]: I0120 17:23:38.305034 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2e492b4c-9f94-4c1e-87e7-5f63dbb9344b-config-data\") pod \"2e492b4c-9f94-4c1e-87e7-5f63dbb9344b\" (UID: \"2e492b4c-9f94-4c1e-87e7-5f63dbb9344b\") " Jan 20 17:23:38 crc kubenswrapper[4558]: I0120 17:23:38.310015 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2e492b4c-9f94-4c1e-87e7-5f63dbb9344b-scripts" (OuterVolumeSpecName: "scripts") pod "2e492b4c-9f94-4c1e-87e7-5f63dbb9344b" (UID: "2e492b4c-9f94-4c1e-87e7-5f63dbb9344b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:23:38 crc kubenswrapper[4558]: I0120 17:23:38.310446 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/acb70f0a-6559-44d8-9182-a43381f314b4-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "acb70f0a-6559-44d8-9182-a43381f314b4" (UID: "acb70f0a-6559-44d8-9182-a43381f314b4"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:23:38 crc kubenswrapper[4558]: I0120 17:23:38.311830 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-79gr8\" (UniqueName: \"kubernetes.io/projected/acb70f0a-6559-44d8-9182-a43381f314b4-kube-api-access-79gr8\") on node \"crc\" DevicePath \"\"" Jan 20 17:23:38 crc kubenswrapper[4558]: I0120 17:23:38.311851 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9f5p8\" (UniqueName: \"kubernetes.io/projected/2e492b4c-9f94-4c1e-87e7-5f63dbb9344b-kube-api-access-9f5p8\") on node \"crc\" DevicePath \"\"" Jan 20 17:23:38 crc kubenswrapper[4558]: I0120 17:23:38.311862 4558 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/acb70f0a-6559-44d8-9182-a43381f314b4-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:23:38 crc kubenswrapper[4558]: I0120 17:23:38.311874 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2e492b4c-9f94-4c1e-87e7-5f63dbb9344b-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:23:38 crc kubenswrapper[4558]: I0120 17:23:38.311885 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2e492b4c-9f94-4c1e-87e7-5f63dbb9344b-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:23:38 crc kubenswrapper[4558]: I0120 17:23:38.326053 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2e492b4c-9f94-4c1e-87e7-5f63dbb9344b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2e492b4c-9f94-4c1e-87e7-5f63dbb9344b" (UID: "2e492b4c-9f94-4c1e-87e7-5f63dbb9344b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:23:38 crc kubenswrapper[4558]: I0120 17:23:38.328691 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/acb70f0a-6559-44d8-9182-a43381f314b4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "acb70f0a-6559-44d8-9182-a43381f314b4" (UID: "acb70f0a-6559-44d8-9182-a43381f314b4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:23:38 crc kubenswrapper[4558]: I0120 17:23:38.331643 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2e492b4c-9f94-4c1e-87e7-5f63dbb9344b-config-data" (OuterVolumeSpecName: "config-data") pod "2e492b4c-9f94-4c1e-87e7-5f63dbb9344b" (UID: "2e492b4c-9f94-4c1e-87e7-5f63dbb9344b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:23:38 crc kubenswrapper[4558]: I0120 17:23:38.414096 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2e492b4c-9f94-4c1e-87e7-5f63dbb9344b-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:23:38 crc kubenswrapper[4558]: I0120 17:23:38.414131 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2e492b4c-9f94-4c1e-87e7-5f63dbb9344b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:23:38 crc kubenswrapper[4558]: I0120 17:23:38.414146 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/acb70f0a-6559-44d8-9182-a43381f314b4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:23:38 crc kubenswrapper[4558]: I0120 17:23:38.576400 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4c4e9d7a-ee9e-4f5f-b9e2-aadbbcb15ae1" path="/var/lib/kubelet/pods/4c4e9d7a-ee9e-4f5f-b9e2-aadbbcb15ae1/volumes" Jan 20 17:23:38 crc kubenswrapper[4558]: I0120 17:23:38.577548 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="654eadba-3696-45e5-97fa-71a403f8a840" path="/var/lib/kubelet/pods/654eadba-3696-45e5-97fa-71a403f8a840/volumes" Jan 20 17:23:38 crc kubenswrapper[4558]: W0120 17:23:38.645496 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod05e1a302_040a_46ca_a3c2_e4c8c6390091.slice/crio-63c772d2033e23df0816d0065fd2885f141742dee74609e5acad19676410ab7d WatchSource:0}: Error finding container 63c772d2033e23df0816d0065fd2885f141742dee74609e5acad19676410ab7d: Status 404 returned error can't find the container with id 63c772d2033e23df0816d0065fd2885f141742dee74609e5acad19676410ab7d Jan 20 17:23:38 crc kubenswrapper[4558]: I0120 17:23:38.650044 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:23:38 crc kubenswrapper[4558]: I0120 17:23:38.728917 4558 generic.go:334] "Generic (PLEG): container finished" podID="b03348ed-66d6-4076-a5fa-0ccf182e8e3c" containerID="ff1a9f56eba4d6a58e189c76b9c17f98147866827a5459b702643795452019b3" exitCode=0 Jan 20 17:23:38 crc kubenswrapper[4558]: I0120 17:23:38.729005 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-db-sync-dpr25" event={"ID":"b03348ed-66d6-4076-a5fa-0ccf182e8e3c","Type":"ContainerDied","Data":"ff1a9f56eba4d6a58e189c76b9c17f98147866827a5459b702643795452019b3"} Jan 20 17:23:38 crc kubenswrapper[4558]: I0120 17:23:38.744497 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"65bcc9ab-370b-457f-a256-de44fa447395","Type":"ContainerStarted","Data":"453e58ee049b9ad309e7d2b3540a3139398c90cc16acc2c0fafb3e6dcb9ce927"} Jan 20 17:23:38 crc kubenswrapper[4558]: I0120 17:23:38.744652 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="65bcc9ab-370b-457f-a256-de44fa447395" containerName="ceilometer-central-agent" containerID="cri-o://f222360e5761260af77d9f2762b3887e3a8141792a6c327b2acf7b0128e1ca74" gracePeriod=30 Jan 20 17:23:38 crc kubenswrapper[4558]: I0120 17:23:38.744732 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="65bcc9ab-370b-457f-a256-de44fa447395" containerName="proxy-httpd" containerID="cri-o://453e58ee049b9ad309e7d2b3540a3139398c90cc16acc2c0fafb3e6dcb9ce927" gracePeriod=30 Jan 20 17:23:38 crc kubenswrapper[4558]: I0120 17:23:38.744784 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="65bcc9ab-370b-457f-a256-de44fa447395" containerName="ceilometer-notification-agent" containerID="cri-o://45c0e030f75bcd8df529339f70c23bd7500873605fb95750023ec5f527191a3a" gracePeriod=30 Jan 20 17:23:38 crc kubenswrapper[4558]: I0120 17:23:38.744801 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="65bcc9ab-370b-457f-a256-de44fa447395" containerName="sg-core" containerID="cri-o://7d1eca58dad37d535e3971097db8c897d92fc464555543dfe4336c4625410888" gracePeriod=30 Jan 20 17:23:38 crc kubenswrapper[4558]: I0120 17:23:38.744732 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:23:38 crc kubenswrapper[4558]: I0120 17:23:38.745281 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:23:38 crc kubenswrapper[4558]: I0120 17:23:38.751337 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-db-sync-cnn24" event={"ID":"acb70f0a-6559-44d8-9182-a43381f314b4","Type":"ContainerDied","Data":"f5da9307e63d269f37fda0240ab410043f248ae133189cfe6c4c23b84cf54681"} Jan 20 17:23:38 crc kubenswrapper[4558]: I0120 17:23:38.751367 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f5da9307e63d269f37fda0240ab410043f248ae133189cfe6c4c23b84cf54681" Jan 20 17:23:38 crc kubenswrapper[4558]: I0120 17:23:38.751421 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-db-sync-cnn24" Jan 20 17:23:38 crc kubenswrapper[4558]: I0120 17:23:38.759086 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"05e1a302-040a-46ca-a3c2-e4c8c6390091","Type":"ContainerStarted","Data":"63c772d2033e23df0816d0065fd2885f141742dee74609e5acad19676410ab7d"} Jan 20 17:23:38 crc kubenswrapper[4558]: I0120 17:23:38.761492 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-db-sync-tbwhp" Jan 20 17:23:38 crc kubenswrapper[4558]: I0120 17:23:38.762019 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-db-sync-tbwhp" event={"ID":"2e492b4c-9f94-4c1e-87e7-5f63dbb9344b","Type":"ContainerDied","Data":"68b68f5c616e89ffdb1554638200414389d2de5b9c7d95ffc2588dff1f1713f2"} Jan 20 17:23:38 crc kubenswrapper[4558]: I0120 17:23:38.762039 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="68b68f5c616e89ffdb1554638200414389d2de5b9c7d95ffc2588dff1f1713f2" Jan 20 17:23:38 crc kubenswrapper[4558]: I0120 17:23:38.803784 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/placement-56dc9f6ff9-hc2fq"] Jan 20 17:23:38 crc kubenswrapper[4558]: E0120 17:23:38.804148 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="acb70f0a-6559-44d8-9182-a43381f314b4" containerName="barbican-db-sync" Jan 20 17:23:38 crc kubenswrapper[4558]: W0120 17:23:38.807992 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf2f1744d_79dd_43bc_8bea_93e00672c805.slice/crio-d3e06842b937cb58486b7028892a08f5c2149d2f9eeee036b5f7c7b8461615b9 WatchSource:0}: Error finding container d3e06842b937cb58486b7028892a08f5c2149d2f9eeee036b5f7c7b8461615b9: Status 404 returned error can't find the container with id d3e06842b937cb58486b7028892a08f5c2149d2f9eeee036b5f7c7b8461615b9 Jan 20 17:23:38 crc kubenswrapper[4558]: I0120 17:23:38.804181 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="acb70f0a-6559-44d8-9182-a43381f314b4" containerName="barbican-db-sync" Jan 20 17:23:38 crc kubenswrapper[4558]: E0120 17:23:38.811929 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2e492b4c-9f94-4c1e-87e7-5f63dbb9344b" containerName="placement-db-sync" Jan 20 17:23:38 crc kubenswrapper[4558]: I0120 17:23:38.811941 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="2e492b4c-9f94-4c1e-87e7-5f63dbb9344b" containerName="placement-db-sync" Jan 20 17:23:38 crc kubenswrapper[4558]: I0120 17:23:38.812287 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="2e492b4c-9f94-4c1e-87e7-5f63dbb9344b" containerName="placement-db-sync" Jan 20 17:23:38 crc kubenswrapper[4558]: I0120 17:23:38.812312 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="acb70f0a-6559-44d8-9182-a43381f314b4" containerName="barbican-db-sync" Jan 20 17:23:38 crc kubenswrapper[4558]: I0120 17:23:38.813120 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-56dc9f6ff9-hc2fq" Jan 20 17:23:38 crc kubenswrapper[4558]: I0120 17:23:38.830095 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"placement-placement-dockercfg-nmsw2" Jan 20 17:23:38 crc kubenswrapper[4558]: I0120 17:23:38.830103 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"placement-scripts" Jan 20 17:23:38 crc kubenswrapper[4558]: I0120 17:23:38.830224 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"placement-config-data" Jan 20 17:23:38 crc kubenswrapper[4558]: I0120 17:23:38.831528 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ceilometer-0" podStartSLOduration=2.216219808 podStartE2EDuration="6.83151142s" podCreationTimestamp="2026-01-20 17:23:32 +0000 UTC" firstStartedPulling="2026-01-20 17:23:33.655522308 +0000 UTC m=+2507.415860275" lastFinishedPulling="2026-01-20 17:23:38.270813919 +0000 UTC m=+2512.031151887" observedRunningTime="2026-01-20 17:23:38.811524413 +0000 UTC m=+2512.571862380" watchObservedRunningTime="2026-01-20 17:23:38.83151142 +0000 UTC m=+2512.591849388" Jan 20 17:23:38 crc kubenswrapper[4558]: I0120 17:23:38.833751 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-56dc9f6ff9-hc2fq"] Jan 20 17:23:38 crc kubenswrapper[4558]: I0120 17:23:38.892329 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/barbican-worker-67ccd5cf94-6lkng"] Jan 20 17:23:38 crc kubenswrapper[4558]: I0120 17:23:38.893609 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-worker-67ccd5cf94-6lkng" Jan 20 17:23:38 crc kubenswrapper[4558]: I0120 17:23:38.895860 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"barbican-worker-config-data" Jan 20 17:23:38 crc kubenswrapper[4558]: I0120 17:23:38.896115 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"barbican-config-data" Jan 20 17:23:38 crc kubenswrapper[4558]: I0120 17:23:38.896366 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"barbican-barbican-dockercfg-vrxkb" Jan 20 17:23:38 crc kubenswrapper[4558]: I0120 17:23:38.925805 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/36591742-f9df-4244-b32c-cc952d81893a-config-data\") pod \"placement-56dc9f6ff9-hc2fq\" (UID: \"36591742-f9df-4244-b32c-cc952d81893a\") " pod="openstack-kuttl-tests/placement-56dc9f6ff9-hc2fq" Jan 20 17:23:38 crc kubenswrapper[4558]: I0120 17:23:38.925851 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/36591742-f9df-4244-b32c-cc952d81893a-scripts\") pod \"placement-56dc9f6ff9-hc2fq\" (UID: \"36591742-f9df-4244-b32c-cc952d81893a\") " pod="openstack-kuttl-tests/placement-56dc9f6ff9-hc2fq" Jan 20 17:23:38 crc kubenswrapper[4558]: I0120 17:23:38.925936 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/36591742-f9df-4244-b32c-cc952d81893a-logs\") pod \"placement-56dc9f6ff9-hc2fq\" (UID: \"36591742-f9df-4244-b32c-cc952d81893a\") " pod="openstack-kuttl-tests/placement-56dc9f6ff9-hc2fq" Jan 20 17:23:38 crc kubenswrapper[4558]: I0120 17:23:38.925999 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36591742-f9df-4244-b32c-cc952d81893a-combined-ca-bundle\") pod \"placement-56dc9f6ff9-hc2fq\" (UID: \"36591742-f9df-4244-b32c-cc952d81893a\") " pod="openstack-kuttl-tests/placement-56dc9f6ff9-hc2fq" Jan 20 17:23:38 crc kubenswrapper[4558]: I0120 17:23:38.926056 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-msl4r\" (UniqueName: \"kubernetes.io/projected/36591742-f9df-4244-b32c-cc952d81893a-kube-api-access-msl4r\") pod \"placement-56dc9f6ff9-hc2fq\" (UID: \"36591742-f9df-4244-b32c-cc952d81893a\") " pod="openstack-kuttl-tests/placement-56dc9f6ff9-hc2fq" Jan 20 17:23:38 crc kubenswrapper[4558]: I0120 17:23:38.939449 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-worker-67ccd5cf94-6lkng"] Jan 20 17:23:38 crc kubenswrapper[4558]: I0120 17:23:38.949466 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/barbican-keystone-listener-5cbb954895-rgzbf"] Jan 20 17:23:38 crc kubenswrapper[4558]: I0120 17:23:38.951727 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-keystone-listener-5cbb954895-rgzbf" Jan 20 17:23:38 crc kubenswrapper[4558]: I0120 17:23:38.953904 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"barbican-keystone-listener-config-data" Jan 20 17:23:38 crc kubenswrapper[4558]: I0120 17:23:38.988217 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-keystone-listener-5cbb954895-rgzbf"] Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.027520 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/36591742-f9df-4244-b32c-cc952d81893a-logs\") pod \"placement-56dc9f6ff9-hc2fq\" (UID: \"36591742-f9df-4244-b32c-cc952d81893a\") " pod="openstack-kuttl-tests/placement-56dc9f6ff9-hc2fq" Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.027578 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36591742-f9df-4244-b32c-cc952d81893a-combined-ca-bundle\") pod \"placement-56dc9f6ff9-hc2fq\" (UID: \"36591742-f9df-4244-b32c-cc952d81893a\") " pod="openstack-kuttl-tests/placement-56dc9f6ff9-hc2fq" Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.027622 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-msl4r\" (UniqueName: \"kubernetes.io/projected/36591742-f9df-4244-b32c-cc952d81893a-kube-api-access-msl4r\") pod \"placement-56dc9f6ff9-hc2fq\" (UID: \"36591742-f9df-4244-b32c-cc952d81893a\") " pod="openstack-kuttl-tests/placement-56dc9f6ff9-hc2fq" Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.027648 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d4f8d7f7-b0d0-4643-834d-1a5056d5cf89-config-data\") pod \"barbican-worker-67ccd5cf94-6lkng\" (UID: \"d4f8d7f7-b0d0-4643-834d-1a5056d5cf89\") " pod="openstack-kuttl-tests/barbican-worker-67ccd5cf94-6lkng" Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.027669 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b8f85165-600b-47f4-b459-e1eb023907a6-config-data\") pod \"barbican-keystone-listener-5cbb954895-rgzbf\" (UID: \"b8f85165-600b-47f4-b459-e1eb023907a6\") " pod="openstack-kuttl-tests/barbican-keystone-listener-5cbb954895-rgzbf" Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.027688 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b8f85165-600b-47f4-b459-e1eb023907a6-config-data-custom\") pod \"barbican-keystone-listener-5cbb954895-rgzbf\" (UID: \"b8f85165-600b-47f4-b459-e1eb023907a6\") " pod="openstack-kuttl-tests/barbican-keystone-listener-5cbb954895-rgzbf" Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.027731 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d4f8d7f7-b0d0-4643-834d-1a5056d5cf89-config-data-custom\") pod \"barbican-worker-67ccd5cf94-6lkng\" (UID: \"d4f8d7f7-b0d0-4643-834d-1a5056d5cf89\") " pod="openstack-kuttl-tests/barbican-worker-67ccd5cf94-6lkng" Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.027754 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g5l9d\" (UniqueName: \"kubernetes.io/projected/d4f8d7f7-b0d0-4643-834d-1a5056d5cf89-kube-api-access-g5l9d\") pod \"barbican-worker-67ccd5cf94-6lkng\" (UID: \"d4f8d7f7-b0d0-4643-834d-1a5056d5cf89\") " pod="openstack-kuttl-tests/barbican-worker-67ccd5cf94-6lkng" Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.027782 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b8f85165-600b-47f4-b459-e1eb023907a6-logs\") pod \"barbican-keystone-listener-5cbb954895-rgzbf\" (UID: \"b8f85165-600b-47f4-b459-e1eb023907a6\") " pod="openstack-kuttl-tests/barbican-keystone-listener-5cbb954895-rgzbf" Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.027819 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4f8d7f7-b0d0-4643-834d-1a5056d5cf89-combined-ca-bundle\") pod \"barbican-worker-67ccd5cf94-6lkng\" (UID: \"d4f8d7f7-b0d0-4643-834d-1a5056d5cf89\") " pod="openstack-kuttl-tests/barbican-worker-67ccd5cf94-6lkng" Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.027836 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8f85165-600b-47f4-b459-e1eb023907a6-combined-ca-bundle\") pod \"barbican-keystone-listener-5cbb954895-rgzbf\" (UID: \"b8f85165-600b-47f4-b459-e1eb023907a6\") " pod="openstack-kuttl-tests/barbican-keystone-listener-5cbb954895-rgzbf" Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.027858 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d4f8d7f7-b0d0-4643-834d-1a5056d5cf89-logs\") pod \"barbican-worker-67ccd5cf94-6lkng\" (UID: \"d4f8d7f7-b0d0-4643-834d-1a5056d5cf89\") " pod="openstack-kuttl-tests/barbican-worker-67ccd5cf94-6lkng" Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.027876 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/36591742-f9df-4244-b32c-cc952d81893a-config-data\") pod \"placement-56dc9f6ff9-hc2fq\" (UID: \"36591742-f9df-4244-b32c-cc952d81893a\") " pod="openstack-kuttl-tests/placement-56dc9f6ff9-hc2fq" Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.027898 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-94lnp\" (UniqueName: \"kubernetes.io/projected/b8f85165-600b-47f4-b459-e1eb023907a6-kube-api-access-94lnp\") pod \"barbican-keystone-listener-5cbb954895-rgzbf\" (UID: \"b8f85165-600b-47f4-b459-e1eb023907a6\") " pod="openstack-kuttl-tests/barbican-keystone-listener-5cbb954895-rgzbf" Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.027925 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/36591742-f9df-4244-b32c-cc952d81893a-scripts\") pod \"placement-56dc9f6ff9-hc2fq\" (UID: \"36591742-f9df-4244-b32c-cc952d81893a\") " pod="openstack-kuttl-tests/placement-56dc9f6ff9-hc2fq" Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.030856 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/36591742-f9df-4244-b32c-cc952d81893a-logs\") pod \"placement-56dc9f6ff9-hc2fq\" (UID: \"36591742-f9df-4244-b32c-cc952d81893a\") " pod="openstack-kuttl-tests/placement-56dc9f6ff9-hc2fq" Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.035872 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/36591742-f9df-4244-b32c-cc952d81893a-scripts\") pod \"placement-56dc9f6ff9-hc2fq\" (UID: \"36591742-f9df-4244-b32c-cc952d81893a\") " pod="openstack-kuttl-tests/placement-56dc9f6ff9-hc2fq" Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.038921 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36591742-f9df-4244-b32c-cc952d81893a-combined-ca-bundle\") pod \"placement-56dc9f6ff9-hc2fq\" (UID: \"36591742-f9df-4244-b32c-cc952d81893a\") " pod="openstack-kuttl-tests/placement-56dc9f6ff9-hc2fq" Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.040843 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/36591742-f9df-4244-b32c-cc952d81893a-config-data\") pod \"placement-56dc9f6ff9-hc2fq\" (UID: \"36591742-f9df-4244-b32c-cc952d81893a\") " pod="openstack-kuttl-tests/placement-56dc9f6ff9-hc2fq" Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.054227 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/barbican-api-547b6df4c5-87fjk"] Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.055312 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-msl4r\" (UniqueName: \"kubernetes.io/projected/36591742-f9df-4244-b32c-cc952d81893a-kube-api-access-msl4r\") pod \"placement-56dc9f6ff9-hc2fq\" (UID: \"36591742-f9df-4244-b32c-cc952d81893a\") " pod="openstack-kuttl-tests/placement-56dc9f6ff9-hc2fq" Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.056139 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-547b6df4c5-87fjk" Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.058823 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"barbican-api-config-data" Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.070052 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-api-547b6df4c5-87fjk"] Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.129667 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b8f85165-600b-47f4-b459-e1eb023907a6-logs\") pod \"barbican-keystone-listener-5cbb954895-rgzbf\" (UID: \"b8f85165-600b-47f4-b459-e1eb023907a6\") " pod="openstack-kuttl-tests/barbican-keystone-listener-5cbb954895-rgzbf" Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.129725 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/db32104f-5145-43ca-bc9a-5c410bf89933-config-data-custom\") pod \"barbican-api-547b6df4c5-87fjk\" (UID: \"db32104f-5145-43ca-bc9a-5c410bf89933\") " pod="openstack-kuttl-tests/barbican-api-547b6df4c5-87fjk" Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.129789 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4f8d7f7-b0d0-4643-834d-1a5056d5cf89-combined-ca-bundle\") pod \"barbican-worker-67ccd5cf94-6lkng\" (UID: \"d4f8d7f7-b0d0-4643-834d-1a5056d5cf89\") " pod="openstack-kuttl-tests/barbican-worker-67ccd5cf94-6lkng" Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.129812 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8f85165-600b-47f4-b459-e1eb023907a6-combined-ca-bundle\") pod \"barbican-keystone-listener-5cbb954895-rgzbf\" (UID: \"b8f85165-600b-47f4-b459-e1eb023907a6\") " pod="openstack-kuttl-tests/barbican-keystone-listener-5cbb954895-rgzbf" Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.129834 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d4f8d7f7-b0d0-4643-834d-1a5056d5cf89-logs\") pod \"barbican-worker-67ccd5cf94-6lkng\" (UID: \"d4f8d7f7-b0d0-4643-834d-1a5056d5cf89\") " pod="openstack-kuttl-tests/barbican-worker-67ccd5cf94-6lkng" Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.129872 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-94lnp\" (UniqueName: \"kubernetes.io/projected/b8f85165-600b-47f4-b459-e1eb023907a6-kube-api-access-94lnp\") pod \"barbican-keystone-listener-5cbb954895-rgzbf\" (UID: \"b8f85165-600b-47f4-b459-e1eb023907a6\") " pod="openstack-kuttl-tests/barbican-keystone-listener-5cbb954895-rgzbf" Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.129924 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/db32104f-5145-43ca-bc9a-5c410bf89933-logs\") pod \"barbican-api-547b6df4c5-87fjk\" (UID: \"db32104f-5145-43ca-bc9a-5c410bf89933\") " pod="openstack-kuttl-tests/barbican-api-547b6df4c5-87fjk" Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.129973 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/db32104f-5145-43ca-bc9a-5c410bf89933-combined-ca-bundle\") pod \"barbican-api-547b6df4c5-87fjk\" (UID: \"db32104f-5145-43ca-bc9a-5c410bf89933\") " pod="openstack-kuttl-tests/barbican-api-547b6df4c5-87fjk" Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.130052 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d4f8d7f7-b0d0-4643-834d-1a5056d5cf89-config-data\") pod \"barbican-worker-67ccd5cf94-6lkng\" (UID: \"d4f8d7f7-b0d0-4643-834d-1a5056d5cf89\") " pod="openstack-kuttl-tests/barbican-worker-67ccd5cf94-6lkng" Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.130075 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b8f85165-600b-47f4-b459-e1eb023907a6-config-data\") pod \"barbican-keystone-listener-5cbb954895-rgzbf\" (UID: \"b8f85165-600b-47f4-b459-e1eb023907a6\") " pod="openstack-kuttl-tests/barbican-keystone-listener-5cbb954895-rgzbf" Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.130094 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/db32104f-5145-43ca-bc9a-5c410bf89933-config-data\") pod \"barbican-api-547b6df4c5-87fjk\" (UID: \"db32104f-5145-43ca-bc9a-5c410bf89933\") " pod="openstack-kuttl-tests/barbican-api-547b6df4c5-87fjk" Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.130114 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b8f85165-600b-47f4-b459-e1eb023907a6-config-data-custom\") pod \"barbican-keystone-listener-5cbb954895-rgzbf\" (UID: \"b8f85165-600b-47f4-b459-e1eb023907a6\") " pod="openstack-kuttl-tests/barbican-keystone-listener-5cbb954895-rgzbf" Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.130157 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gmmfp\" (UniqueName: \"kubernetes.io/projected/db32104f-5145-43ca-bc9a-5c410bf89933-kube-api-access-gmmfp\") pod \"barbican-api-547b6df4c5-87fjk\" (UID: \"db32104f-5145-43ca-bc9a-5c410bf89933\") " pod="openstack-kuttl-tests/barbican-api-547b6df4c5-87fjk" Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.130510 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d4f8d7f7-b0d0-4643-834d-1a5056d5cf89-config-data-custom\") pod \"barbican-worker-67ccd5cf94-6lkng\" (UID: \"d4f8d7f7-b0d0-4643-834d-1a5056d5cf89\") " pod="openstack-kuttl-tests/barbican-worker-67ccd5cf94-6lkng" Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.130561 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g5l9d\" (UniqueName: \"kubernetes.io/projected/d4f8d7f7-b0d0-4643-834d-1a5056d5cf89-kube-api-access-g5l9d\") pod \"barbican-worker-67ccd5cf94-6lkng\" (UID: \"d4f8d7f7-b0d0-4643-834d-1a5056d5cf89\") " pod="openstack-kuttl-tests/barbican-worker-67ccd5cf94-6lkng" Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.130785 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b8f85165-600b-47f4-b459-e1eb023907a6-logs\") pod \"barbican-keystone-listener-5cbb954895-rgzbf\" (UID: \"b8f85165-600b-47f4-b459-e1eb023907a6\") " pod="openstack-kuttl-tests/barbican-keystone-listener-5cbb954895-rgzbf" Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.134000 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8f85165-600b-47f4-b459-e1eb023907a6-combined-ca-bundle\") pod \"barbican-keystone-listener-5cbb954895-rgzbf\" (UID: \"b8f85165-600b-47f4-b459-e1eb023907a6\") " pod="openstack-kuttl-tests/barbican-keystone-listener-5cbb954895-rgzbf" Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.134876 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4f8d7f7-b0d0-4643-834d-1a5056d5cf89-combined-ca-bundle\") pod \"barbican-worker-67ccd5cf94-6lkng\" (UID: \"d4f8d7f7-b0d0-4643-834d-1a5056d5cf89\") " pod="openstack-kuttl-tests/barbican-worker-67ccd5cf94-6lkng" Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.134900 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d4f8d7f7-b0d0-4643-834d-1a5056d5cf89-logs\") pod \"barbican-worker-67ccd5cf94-6lkng\" (UID: \"d4f8d7f7-b0d0-4643-834d-1a5056d5cf89\") " pod="openstack-kuttl-tests/barbican-worker-67ccd5cf94-6lkng" Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.140184 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d4f8d7f7-b0d0-4643-834d-1a5056d5cf89-config-data-custom\") pod \"barbican-worker-67ccd5cf94-6lkng\" (UID: \"d4f8d7f7-b0d0-4643-834d-1a5056d5cf89\") " pod="openstack-kuttl-tests/barbican-worker-67ccd5cf94-6lkng" Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.140588 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b8f85165-600b-47f4-b459-e1eb023907a6-config-data-custom\") pod \"barbican-keystone-listener-5cbb954895-rgzbf\" (UID: \"b8f85165-600b-47f4-b459-e1eb023907a6\") " pod="openstack-kuttl-tests/barbican-keystone-listener-5cbb954895-rgzbf" Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.142545 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d4f8d7f7-b0d0-4643-834d-1a5056d5cf89-config-data\") pod \"barbican-worker-67ccd5cf94-6lkng\" (UID: \"d4f8d7f7-b0d0-4643-834d-1a5056d5cf89\") " pod="openstack-kuttl-tests/barbican-worker-67ccd5cf94-6lkng" Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.146399 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g5l9d\" (UniqueName: \"kubernetes.io/projected/d4f8d7f7-b0d0-4643-834d-1a5056d5cf89-kube-api-access-g5l9d\") pod \"barbican-worker-67ccd5cf94-6lkng\" (UID: \"d4f8d7f7-b0d0-4643-834d-1a5056d5cf89\") " pod="openstack-kuttl-tests/barbican-worker-67ccd5cf94-6lkng" Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.148769 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-94lnp\" (UniqueName: \"kubernetes.io/projected/b8f85165-600b-47f4-b459-e1eb023907a6-kube-api-access-94lnp\") pod \"barbican-keystone-listener-5cbb954895-rgzbf\" (UID: \"b8f85165-600b-47f4-b459-e1eb023907a6\") " pod="openstack-kuttl-tests/barbican-keystone-listener-5cbb954895-rgzbf" Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.154685 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b8f85165-600b-47f4-b459-e1eb023907a6-config-data\") pod \"barbican-keystone-listener-5cbb954895-rgzbf\" (UID: \"b8f85165-600b-47f4-b459-e1eb023907a6\") " pod="openstack-kuttl-tests/barbican-keystone-listener-5cbb954895-rgzbf" Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.221392 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/placement-66fdf9847-wwwbv"] Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.230129 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-66fdf9847-wwwbv" Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.234098 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-placement-internal-svc" Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.235446 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-placement-public-svc" Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.242371 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-66fdf9847-wwwbv"] Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.244905 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/db32104f-5145-43ca-bc9a-5c410bf89933-config-data\") pod \"barbican-api-547b6df4c5-87fjk\" (UID: \"db32104f-5145-43ca-bc9a-5c410bf89933\") " pod="openstack-kuttl-tests/barbican-api-547b6df4c5-87fjk" Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.244985 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gmmfp\" (UniqueName: \"kubernetes.io/projected/db32104f-5145-43ca-bc9a-5c410bf89933-kube-api-access-gmmfp\") pod \"barbican-api-547b6df4c5-87fjk\" (UID: \"db32104f-5145-43ca-bc9a-5c410bf89933\") " pod="openstack-kuttl-tests/barbican-api-547b6df4c5-87fjk" Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.245072 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/db32104f-5145-43ca-bc9a-5c410bf89933-config-data-custom\") pod \"barbican-api-547b6df4c5-87fjk\" (UID: \"db32104f-5145-43ca-bc9a-5c410bf89933\") " pod="openstack-kuttl-tests/barbican-api-547b6df4c5-87fjk" Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.245196 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/db32104f-5145-43ca-bc9a-5c410bf89933-logs\") pod \"barbican-api-547b6df4c5-87fjk\" (UID: \"db32104f-5145-43ca-bc9a-5c410bf89933\") " pod="openstack-kuttl-tests/barbican-api-547b6df4c5-87fjk" Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.245239 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/db32104f-5145-43ca-bc9a-5c410bf89933-combined-ca-bundle\") pod \"barbican-api-547b6df4c5-87fjk\" (UID: \"db32104f-5145-43ca-bc9a-5c410bf89933\") " pod="openstack-kuttl-tests/barbican-api-547b6df4c5-87fjk" Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.248388 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/db32104f-5145-43ca-bc9a-5c410bf89933-logs\") pod \"barbican-api-547b6df4c5-87fjk\" (UID: \"db32104f-5145-43ca-bc9a-5c410bf89933\") " pod="openstack-kuttl-tests/barbican-api-547b6df4c5-87fjk" Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.250688 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/db32104f-5145-43ca-bc9a-5c410bf89933-config-data-custom\") pod \"barbican-api-547b6df4c5-87fjk\" (UID: \"db32104f-5145-43ca-bc9a-5c410bf89933\") " pod="openstack-kuttl-tests/barbican-api-547b6df4c5-87fjk" Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.254436 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/db32104f-5145-43ca-bc9a-5c410bf89933-combined-ca-bundle\") pod \"barbican-api-547b6df4c5-87fjk\" (UID: \"db32104f-5145-43ca-bc9a-5c410bf89933\") " pod="openstack-kuttl-tests/barbican-api-547b6df4c5-87fjk" Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.259179 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/db32104f-5145-43ca-bc9a-5c410bf89933-config-data\") pod \"barbican-api-547b6df4c5-87fjk\" (UID: \"db32104f-5145-43ca-bc9a-5c410bf89933\") " pod="openstack-kuttl-tests/barbican-api-547b6df4c5-87fjk" Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.274081 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gmmfp\" (UniqueName: \"kubernetes.io/projected/db32104f-5145-43ca-bc9a-5c410bf89933-kube-api-access-gmmfp\") pod \"barbican-api-547b6df4c5-87fjk\" (UID: \"db32104f-5145-43ca-bc9a-5c410bf89933\") " pod="openstack-kuttl-tests/barbican-api-547b6df4c5-87fjk" Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.304751 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-bootstrap-rcz7c" Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.311887 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-56dc9f6ff9-hc2fq" Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.352229 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ef71f603-5259-468f-b39f-4e726cdcb2f5-internal-tls-certs\") pod \"placement-66fdf9847-wwwbv\" (UID: \"ef71f603-5259-468f-b39f-4e726cdcb2f5\") " pod="openstack-kuttl-tests/placement-66fdf9847-wwwbv" Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.352538 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ef71f603-5259-468f-b39f-4e726cdcb2f5-public-tls-certs\") pod \"placement-66fdf9847-wwwbv\" (UID: \"ef71f603-5259-468f-b39f-4e726cdcb2f5\") " pod="openstack-kuttl-tests/placement-66fdf9847-wwwbv" Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.352591 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lkxmz\" (UniqueName: \"kubernetes.io/projected/ef71f603-5259-468f-b39f-4e726cdcb2f5-kube-api-access-lkxmz\") pod \"placement-66fdf9847-wwwbv\" (UID: \"ef71f603-5259-468f-b39f-4e726cdcb2f5\") " pod="openstack-kuttl-tests/placement-66fdf9847-wwwbv" Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.352725 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ef71f603-5259-468f-b39f-4e726cdcb2f5-scripts\") pod \"placement-66fdf9847-wwwbv\" (UID: \"ef71f603-5259-468f-b39f-4e726cdcb2f5\") " pod="openstack-kuttl-tests/placement-66fdf9847-wwwbv" Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.352756 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ef71f603-5259-468f-b39f-4e726cdcb2f5-config-data\") pod \"placement-66fdf9847-wwwbv\" (UID: \"ef71f603-5259-468f-b39f-4e726cdcb2f5\") " pod="openstack-kuttl-tests/placement-66fdf9847-wwwbv" Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.352781 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ef71f603-5259-468f-b39f-4e726cdcb2f5-logs\") pod \"placement-66fdf9847-wwwbv\" (UID: \"ef71f603-5259-468f-b39f-4e726cdcb2f5\") " pod="openstack-kuttl-tests/placement-66fdf9847-wwwbv" Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.353087 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ef71f603-5259-468f-b39f-4e726cdcb2f5-combined-ca-bundle\") pod \"placement-66fdf9847-wwwbv\" (UID: \"ef71f603-5259-468f-b39f-4e726cdcb2f5\") " pod="openstack-kuttl-tests/placement-66fdf9847-wwwbv" Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.366868 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-worker-67ccd5cf94-6lkng" Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.376600 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-keystone-listener-5cbb954895-rgzbf" Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.383564 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-547b6df4c5-87fjk" Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.454453 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb55b781-1113-4879-8efd-d04a2284dffc-combined-ca-bundle\") pod \"fb55b781-1113-4879-8efd-d04a2284dffc\" (UID: \"fb55b781-1113-4879-8efd-d04a2284dffc\") " Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.454562 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zw27l\" (UniqueName: \"kubernetes.io/projected/fb55b781-1113-4879-8efd-d04a2284dffc-kube-api-access-zw27l\") pod \"fb55b781-1113-4879-8efd-d04a2284dffc\" (UID: \"fb55b781-1113-4879-8efd-d04a2284dffc\") " Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.454625 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fb55b781-1113-4879-8efd-d04a2284dffc-scripts\") pod \"fb55b781-1113-4879-8efd-d04a2284dffc\" (UID: \"fb55b781-1113-4879-8efd-d04a2284dffc\") " Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.454722 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/fb55b781-1113-4879-8efd-d04a2284dffc-credential-keys\") pod \"fb55b781-1113-4879-8efd-d04a2284dffc\" (UID: \"fb55b781-1113-4879-8efd-d04a2284dffc\") " Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.454904 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fb55b781-1113-4879-8efd-d04a2284dffc-config-data\") pod \"fb55b781-1113-4879-8efd-d04a2284dffc\" (UID: \"fb55b781-1113-4879-8efd-d04a2284dffc\") " Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.454939 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/fb55b781-1113-4879-8efd-d04a2284dffc-fernet-keys\") pod \"fb55b781-1113-4879-8efd-d04a2284dffc\" (UID: \"fb55b781-1113-4879-8efd-d04a2284dffc\") " Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.455288 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ef71f603-5259-468f-b39f-4e726cdcb2f5-public-tls-certs\") pod \"placement-66fdf9847-wwwbv\" (UID: \"ef71f603-5259-468f-b39f-4e726cdcb2f5\") " pod="openstack-kuttl-tests/placement-66fdf9847-wwwbv" Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.455318 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lkxmz\" (UniqueName: \"kubernetes.io/projected/ef71f603-5259-468f-b39f-4e726cdcb2f5-kube-api-access-lkxmz\") pod \"placement-66fdf9847-wwwbv\" (UID: \"ef71f603-5259-468f-b39f-4e726cdcb2f5\") " pod="openstack-kuttl-tests/placement-66fdf9847-wwwbv" Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.455355 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ef71f603-5259-468f-b39f-4e726cdcb2f5-scripts\") pod \"placement-66fdf9847-wwwbv\" (UID: \"ef71f603-5259-468f-b39f-4e726cdcb2f5\") " pod="openstack-kuttl-tests/placement-66fdf9847-wwwbv" Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.455378 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ef71f603-5259-468f-b39f-4e726cdcb2f5-config-data\") pod \"placement-66fdf9847-wwwbv\" (UID: \"ef71f603-5259-468f-b39f-4e726cdcb2f5\") " pod="openstack-kuttl-tests/placement-66fdf9847-wwwbv" Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.455398 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ef71f603-5259-468f-b39f-4e726cdcb2f5-logs\") pod \"placement-66fdf9847-wwwbv\" (UID: \"ef71f603-5259-468f-b39f-4e726cdcb2f5\") " pod="openstack-kuttl-tests/placement-66fdf9847-wwwbv" Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.455476 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ef71f603-5259-468f-b39f-4e726cdcb2f5-combined-ca-bundle\") pod \"placement-66fdf9847-wwwbv\" (UID: \"ef71f603-5259-468f-b39f-4e726cdcb2f5\") " pod="openstack-kuttl-tests/placement-66fdf9847-wwwbv" Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.455511 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ef71f603-5259-468f-b39f-4e726cdcb2f5-internal-tls-certs\") pod \"placement-66fdf9847-wwwbv\" (UID: \"ef71f603-5259-468f-b39f-4e726cdcb2f5\") " pod="openstack-kuttl-tests/placement-66fdf9847-wwwbv" Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.458348 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ef71f603-5259-468f-b39f-4e726cdcb2f5-logs\") pod \"placement-66fdf9847-wwwbv\" (UID: \"ef71f603-5259-468f-b39f-4e726cdcb2f5\") " pod="openstack-kuttl-tests/placement-66fdf9847-wwwbv" Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.466870 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ef71f603-5259-468f-b39f-4e726cdcb2f5-public-tls-certs\") pod \"placement-66fdf9847-wwwbv\" (UID: \"ef71f603-5259-468f-b39f-4e726cdcb2f5\") " pod="openstack-kuttl-tests/placement-66fdf9847-wwwbv" Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.469111 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ef71f603-5259-468f-b39f-4e726cdcb2f5-scripts\") pod \"placement-66fdf9847-wwwbv\" (UID: \"ef71f603-5259-468f-b39f-4e726cdcb2f5\") " pod="openstack-kuttl-tests/placement-66fdf9847-wwwbv" Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.471110 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ef71f603-5259-468f-b39f-4e726cdcb2f5-config-data\") pod \"placement-66fdf9847-wwwbv\" (UID: \"ef71f603-5259-468f-b39f-4e726cdcb2f5\") " pod="openstack-kuttl-tests/placement-66fdf9847-wwwbv" Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.471288 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fb55b781-1113-4879-8efd-d04a2284dffc-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "fb55b781-1113-4879-8efd-d04a2284dffc" (UID: "fb55b781-1113-4879-8efd-d04a2284dffc"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.471397 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fb55b781-1113-4879-8efd-d04a2284dffc-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "fb55b781-1113-4879-8efd-d04a2284dffc" (UID: "fb55b781-1113-4879-8efd-d04a2284dffc"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.471677 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fb55b781-1113-4879-8efd-d04a2284dffc-scripts" (OuterVolumeSpecName: "scripts") pod "fb55b781-1113-4879-8efd-d04a2284dffc" (UID: "fb55b781-1113-4879-8efd-d04a2284dffc"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.474754 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ef71f603-5259-468f-b39f-4e726cdcb2f5-combined-ca-bundle\") pod \"placement-66fdf9847-wwwbv\" (UID: \"ef71f603-5259-468f-b39f-4e726cdcb2f5\") " pod="openstack-kuttl-tests/placement-66fdf9847-wwwbv" Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.476794 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ef71f603-5259-468f-b39f-4e726cdcb2f5-internal-tls-certs\") pod \"placement-66fdf9847-wwwbv\" (UID: \"ef71f603-5259-468f-b39f-4e726cdcb2f5\") " pod="openstack-kuttl-tests/placement-66fdf9847-wwwbv" Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.482840 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lkxmz\" (UniqueName: \"kubernetes.io/projected/ef71f603-5259-468f-b39f-4e726cdcb2f5-kube-api-access-lkxmz\") pod \"placement-66fdf9847-wwwbv\" (UID: \"ef71f603-5259-468f-b39f-4e726cdcb2f5\") " pod="openstack-kuttl-tests/placement-66fdf9847-wwwbv" Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.483151 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fb55b781-1113-4879-8efd-d04a2284dffc-kube-api-access-zw27l" (OuterVolumeSpecName: "kube-api-access-zw27l") pod "fb55b781-1113-4879-8efd-d04a2284dffc" (UID: "fb55b781-1113-4879-8efd-d04a2284dffc"). InnerVolumeSpecName "kube-api-access-zw27l". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.504316 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fb55b781-1113-4879-8efd-d04a2284dffc-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fb55b781-1113-4879-8efd-d04a2284dffc" (UID: "fb55b781-1113-4879-8efd-d04a2284dffc"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.525551 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fb55b781-1113-4879-8efd-d04a2284dffc-config-data" (OuterVolumeSpecName: "config-data") pod "fb55b781-1113-4879-8efd-d04a2284dffc" (UID: "fb55b781-1113-4879-8efd-d04a2284dffc"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.559768 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fb55b781-1113-4879-8efd-d04a2284dffc-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.560041 4558 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/fb55b781-1113-4879-8efd-d04a2284dffc-fernet-keys\") on node \"crc\" DevicePath \"\"" Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.560054 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb55b781-1113-4879-8efd-d04a2284dffc-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.560069 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zw27l\" (UniqueName: \"kubernetes.io/projected/fb55b781-1113-4879-8efd-d04a2284dffc-kube-api-access-zw27l\") on node \"crc\" DevicePath \"\"" Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.560079 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fb55b781-1113-4879-8efd-d04a2284dffc-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.560088 4558 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/fb55b781-1113-4879-8efd-d04a2284dffc-credential-keys\") on node \"crc\" DevicePath \"\"" Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.562479 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-66fdf9847-wwwbv" Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.760882 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.808369 4558 generic.go:334] "Generic (PLEG): container finished" podID="65bcc9ab-370b-457f-a256-de44fa447395" containerID="453e58ee049b9ad309e7d2b3540a3139398c90cc16acc2c0fafb3e6dcb9ce927" exitCode=0 Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.808411 4558 generic.go:334] "Generic (PLEG): container finished" podID="65bcc9ab-370b-457f-a256-de44fa447395" containerID="7d1eca58dad37d535e3971097db8c897d92fc464555543dfe4336c4625410888" exitCode=2 Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.808420 4558 generic.go:334] "Generic (PLEG): container finished" podID="65bcc9ab-370b-457f-a256-de44fa447395" containerID="45c0e030f75bcd8df529339f70c23bd7500873605fb95750023ec5f527191a3a" exitCode=0 Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.808427 4558 generic.go:334] "Generic (PLEG): container finished" podID="65bcc9ab-370b-457f-a256-de44fa447395" containerID="f222360e5761260af77d9f2762b3887e3a8141792a6c327b2acf7b0128e1ca74" exitCode=0 Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.808471 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"65bcc9ab-370b-457f-a256-de44fa447395","Type":"ContainerDied","Data":"453e58ee049b9ad309e7d2b3540a3139398c90cc16acc2c0fafb3e6dcb9ce927"} Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.808501 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"65bcc9ab-370b-457f-a256-de44fa447395","Type":"ContainerDied","Data":"7d1eca58dad37d535e3971097db8c897d92fc464555543dfe4336c4625410888"} Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.808510 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"65bcc9ab-370b-457f-a256-de44fa447395","Type":"ContainerDied","Data":"45c0e030f75bcd8df529339f70c23bd7500873605fb95750023ec5f527191a3a"} Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.808518 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"65bcc9ab-370b-457f-a256-de44fa447395","Type":"ContainerDied","Data":"f222360e5761260af77d9f2762b3887e3a8141792a6c327b2acf7b0128e1ca74"} Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.808527 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"65bcc9ab-370b-457f-a256-de44fa447395","Type":"ContainerDied","Data":"bf7b272f5abf5f74f137fe6db2c9ec95c3967a559e6d66c30b670ce70c1a3a4f"} Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.808542 4558 scope.go:117] "RemoveContainer" containerID="453e58ee049b9ad309e7d2b3540a3139398c90cc16acc2c0fafb3e6dcb9ce927" Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.808672 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.812825 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"05e1a302-040a-46ca-a3c2-e4c8c6390091","Type":"ContainerStarted","Data":"8059066dd8ca082e955bcb29f49441ed9509119295b8ded32faebb8c4410e534"} Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.815699 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-bootstrap-rcz7c" event={"ID":"fb55b781-1113-4879-8efd-d04a2284dffc","Type":"ContainerDied","Data":"b9649bb5b2d137b6ce0e382f3b68c9f71e5c595a5c981fdd50f06e83f5e2186d"} Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.815733 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b9649bb5b2d137b6ce0e382f3b68c9f71e5c595a5c981fdd50f06e83f5e2186d" Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.815738 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-bootstrap-rcz7c" Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.819355 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"f2f1744d-79dd-43bc-8bea-93e00672c805","Type":"ContainerStarted","Data":"86863d93cda6303d400888f0fe4d36dd71ee9de6e0a32e2a3f3d124d43b93601"} Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.819384 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"f2f1744d-79dd-43bc-8bea-93e00672c805","Type":"ContainerStarted","Data":"d3e06842b937cb58486b7028892a08f5c2149d2f9eeee036b5f7c7b8461615b9"} Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.836025 4558 scope.go:117] "RemoveContainer" containerID="7d1eca58dad37d535e3971097db8c897d92fc464555543dfe4336c4625410888" Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.873293 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65bcc9ab-370b-457f-a256-de44fa447395-combined-ca-bundle\") pod \"65bcc9ab-370b-457f-a256-de44fa447395\" (UID: \"65bcc9ab-370b-457f-a256-de44fa447395\") " Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.873417 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/65bcc9ab-370b-457f-a256-de44fa447395-sg-core-conf-yaml\") pod \"65bcc9ab-370b-457f-a256-de44fa447395\" (UID: \"65bcc9ab-370b-457f-a256-de44fa447395\") " Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.873513 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/65bcc9ab-370b-457f-a256-de44fa447395-config-data\") pod \"65bcc9ab-370b-457f-a256-de44fa447395\" (UID: \"65bcc9ab-370b-457f-a256-de44fa447395\") " Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.873556 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-27bj7\" (UniqueName: \"kubernetes.io/projected/65bcc9ab-370b-457f-a256-de44fa447395-kube-api-access-27bj7\") pod \"65bcc9ab-370b-457f-a256-de44fa447395\" (UID: \"65bcc9ab-370b-457f-a256-de44fa447395\") " Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.873614 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/65bcc9ab-370b-457f-a256-de44fa447395-log-httpd\") pod \"65bcc9ab-370b-457f-a256-de44fa447395\" (UID: \"65bcc9ab-370b-457f-a256-de44fa447395\") " Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.873972 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/65bcc9ab-370b-457f-a256-de44fa447395-run-httpd\") pod \"65bcc9ab-370b-457f-a256-de44fa447395\" (UID: \"65bcc9ab-370b-457f-a256-de44fa447395\") " Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.874033 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/65bcc9ab-370b-457f-a256-de44fa447395-scripts\") pod \"65bcc9ab-370b-457f-a256-de44fa447395\" (UID: \"65bcc9ab-370b-457f-a256-de44fa447395\") " Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.874737 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/65bcc9ab-370b-457f-a256-de44fa447395-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "65bcc9ab-370b-457f-a256-de44fa447395" (UID: "65bcc9ab-370b-457f-a256-de44fa447395"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.876260 4558 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/65bcc9ab-370b-457f-a256-de44fa447395-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.885782 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/65bcc9ab-370b-457f-a256-de44fa447395-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "65bcc9ab-370b-457f-a256-de44fa447395" (UID: "65bcc9ab-370b-457f-a256-de44fa447395"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.895493 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/65bcc9ab-370b-457f-a256-de44fa447395-scripts" (OuterVolumeSpecName: "scripts") pod "65bcc9ab-370b-457f-a256-de44fa447395" (UID: "65bcc9ab-370b-457f-a256-de44fa447395"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.895626 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/65bcc9ab-370b-457f-a256-de44fa447395-kube-api-access-27bj7" (OuterVolumeSpecName: "kube-api-access-27bj7") pod "65bcc9ab-370b-457f-a256-de44fa447395" (UID: "65bcc9ab-370b-457f-a256-de44fa447395"). InnerVolumeSpecName "kube-api-access-27bj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.899638 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-bootstrap-rcz7c"] Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.907617 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/65bcc9ab-370b-457f-a256-de44fa447395-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "65bcc9ab-370b-457f-a256-de44fa447395" (UID: "65bcc9ab-370b-457f-a256-de44fa447395"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.908295 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/keystone-bootstrap-rcz7c"] Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.921611 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-56dc9f6ff9-hc2fq"] Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.988594 4558 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/65bcc9ab-370b-457f-a256-de44fa447395-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.988618 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/65bcc9ab-370b-457f-a256-de44fa447395-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.988627 4558 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/65bcc9ab-370b-457f-a256-de44fa447395-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 20 17:23:39 crc kubenswrapper[4558]: I0120 17:23:39.988637 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-27bj7\" (UniqueName: \"kubernetes.io/projected/65bcc9ab-370b-457f-a256-de44fa447395-kube-api-access-27bj7\") on node \"crc\" DevicePath \"\"" Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.014716 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/keystone-bootstrap-hsd6l"] Jan 20 17:23:40 crc kubenswrapper[4558]: E0120 17:23:40.015149 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="65bcc9ab-370b-457f-a256-de44fa447395" containerName="ceilometer-notification-agent" Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.015183 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="65bcc9ab-370b-457f-a256-de44fa447395" containerName="ceilometer-notification-agent" Jan 20 17:23:40 crc kubenswrapper[4558]: E0120 17:23:40.015197 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="65bcc9ab-370b-457f-a256-de44fa447395" containerName="ceilometer-central-agent" Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.015205 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="65bcc9ab-370b-457f-a256-de44fa447395" containerName="ceilometer-central-agent" Jan 20 17:23:40 crc kubenswrapper[4558]: E0120 17:23:40.015240 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="65bcc9ab-370b-457f-a256-de44fa447395" containerName="proxy-httpd" Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.015246 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="65bcc9ab-370b-457f-a256-de44fa447395" containerName="proxy-httpd" Jan 20 17:23:40 crc kubenswrapper[4558]: E0120 17:23:40.015255 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fb55b781-1113-4879-8efd-d04a2284dffc" containerName="keystone-bootstrap" Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.015261 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="fb55b781-1113-4879-8efd-d04a2284dffc" containerName="keystone-bootstrap" Jan 20 17:23:40 crc kubenswrapper[4558]: E0120 17:23:40.015279 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="65bcc9ab-370b-457f-a256-de44fa447395" containerName="sg-core" Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.015286 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="65bcc9ab-370b-457f-a256-de44fa447395" containerName="sg-core" Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.015494 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="65bcc9ab-370b-457f-a256-de44fa447395" containerName="ceilometer-central-agent" Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.015517 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="65bcc9ab-370b-457f-a256-de44fa447395" containerName="sg-core" Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.015532 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="65bcc9ab-370b-457f-a256-de44fa447395" containerName="ceilometer-notification-agent" Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.015541 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="65bcc9ab-370b-457f-a256-de44fa447395" containerName="proxy-httpd" Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.015549 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="fb55b781-1113-4879-8efd-d04a2284dffc" containerName="keystone-bootstrap" Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.016221 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-bootstrap-hsd6l" Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.022822 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-keystone-dockercfg-njbkz" Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.023776 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-scripts" Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.024179 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"osp-secret" Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.024339 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone" Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.024428 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-config-data" Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.030491 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-bootstrap-hsd6l"] Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.049027 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/65bcc9ab-370b-457f-a256-de44fa447395-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "65bcc9ab-370b-457f-a256-de44fa447395" (UID: "65bcc9ab-370b-457f-a256-de44fa447395"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.078102 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/65bcc9ab-370b-457f-a256-de44fa447395-config-data" (OuterVolumeSpecName: "config-data") pod "65bcc9ab-370b-457f-a256-de44fa447395" (UID: "65bcc9ab-370b-457f-a256-de44fa447395"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.079064 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-api-547b6df4c5-87fjk"] Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.091722 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a9d1ae84-0a1b-4418-a336-39d0313a9857-combined-ca-bundle\") pod \"keystone-bootstrap-hsd6l\" (UID: \"a9d1ae84-0a1b-4418-a336-39d0313a9857\") " pod="openstack-kuttl-tests/keystone-bootstrap-hsd6l" Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.091776 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a9d1ae84-0a1b-4418-a336-39d0313a9857-config-data\") pod \"keystone-bootstrap-hsd6l\" (UID: \"a9d1ae84-0a1b-4418-a336-39d0313a9857\") " pod="openstack-kuttl-tests/keystone-bootstrap-hsd6l" Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.091809 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/a9d1ae84-0a1b-4418-a336-39d0313a9857-fernet-keys\") pod \"keystone-bootstrap-hsd6l\" (UID: \"a9d1ae84-0a1b-4418-a336-39d0313a9857\") " pod="openstack-kuttl-tests/keystone-bootstrap-hsd6l" Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.091840 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a9d1ae84-0a1b-4418-a336-39d0313a9857-scripts\") pod \"keystone-bootstrap-hsd6l\" (UID: \"a9d1ae84-0a1b-4418-a336-39d0313a9857\") " pod="openstack-kuttl-tests/keystone-bootstrap-hsd6l" Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.091862 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wv5gs\" (UniqueName: \"kubernetes.io/projected/a9d1ae84-0a1b-4418-a336-39d0313a9857-kube-api-access-wv5gs\") pod \"keystone-bootstrap-hsd6l\" (UID: \"a9d1ae84-0a1b-4418-a336-39d0313a9857\") " pod="openstack-kuttl-tests/keystone-bootstrap-hsd6l" Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.091889 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/a9d1ae84-0a1b-4418-a336-39d0313a9857-credential-keys\") pod \"keystone-bootstrap-hsd6l\" (UID: \"a9d1ae84-0a1b-4418-a336-39d0313a9857\") " pod="openstack-kuttl-tests/keystone-bootstrap-hsd6l" Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.091985 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65bcc9ab-370b-457f-a256-de44fa447395-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.091998 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/65bcc9ab-370b-457f-a256-de44fa447395-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.092863 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-worker-67ccd5cf94-6lkng"] Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.127284 4558 scope.go:117] "RemoveContainer" containerID="45c0e030f75bcd8df529339f70c23bd7500873605fb95750023ec5f527191a3a" Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.140449 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.156887 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.167076 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.169255 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.170924 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-config-data" Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.171244 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-scripts" Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.176016 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.193526 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a9d1ae84-0a1b-4418-a336-39d0313a9857-combined-ca-bundle\") pod \"keystone-bootstrap-hsd6l\" (UID: \"a9d1ae84-0a1b-4418-a336-39d0313a9857\") " pod="openstack-kuttl-tests/keystone-bootstrap-hsd6l" Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.193600 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a9d1ae84-0a1b-4418-a336-39d0313a9857-config-data\") pod \"keystone-bootstrap-hsd6l\" (UID: \"a9d1ae84-0a1b-4418-a336-39d0313a9857\") " pod="openstack-kuttl-tests/keystone-bootstrap-hsd6l" Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.193662 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/a9d1ae84-0a1b-4418-a336-39d0313a9857-fernet-keys\") pod \"keystone-bootstrap-hsd6l\" (UID: \"a9d1ae84-0a1b-4418-a336-39d0313a9857\") " pod="openstack-kuttl-tests/keystone-bootstrap-hsd6l" Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.193724 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a9d1ae84-0a1b-4418-a336-39d0313a9857-scripts\") pod \"keystone-bootstrap-hsd6l\" (UID: \"a9d1ae84-0a1b-4418-a336-39d0313a9857\") " pod="openstack-kuttl-tests/keystone-bootstrap-hsd6l" Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.193748 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wv5gs\" (UniqueName: \"kubernetes.io/projected/a9d1ae84-0a1b-4418-a336-39d0313a9857-kube-api-access-wv5gs\") pod \"keystone-bootstrap-hsd6l\" (UID: \"a9d1ae84-0a1b-4418-a336-39d0313a9857\") " pod="openstack-kuttl-tests/keystone-bootstrap-hsd6l" Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.193797 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/a9d1ae84-0a1b-4418-a336-39d0313a9857-credential-keys\") pod \"keystone-bootstrap-hsd6l\" (UID: \"a9d1ae84-0a1b-4418-a336-39d0313a9857\") " pod="openstack-kuttl-tests/keystone-bootstrap-hsd6l" Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.202679 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-keystone-listener-5cbb954895-rgzbf"] Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.204063 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a9d1ae84-0a1b-4418-a336-39d0313a9857-combined-ca-bundle\") pod \"keystone-bootstrap-hsd6l\" (UID: \"a9d1ae84-0a1b-4418-a336-39d0313a9857\") " pod="openstack-kuttl-tests/keystone-bootstrap-hsd6l" Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.206097 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a9d1ae84-0a1b-4418-a336-39d0313a9857-scripts\") pod \"keystone-bootstrap-hsd6l\" (UID: \"a9d1ae84-0a1b-4418-a336-39d0313a9857\") " pod="openstack-kuttl-tests/keystone-bootstrap-hsd6l" Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.210501 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/a9d1ae84-0a1b-4418-a336-39d0313a9857-fernet-keys\") pod \"keystone-bootstrap-hsd6l\" (UID: \"a9d1ae84-0a1b-4418-a336-39d0313a9857\") " pod="openstack-kuttl-tests/keystone-bootstrap-hsd6l" Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.212847 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/a9d1ae84-0a1b-4418-a336-39d0313a9857-credential-keys\") pod \"keystone-bootstrap-hsd6l\" (UID: \"a9d1ae84-0a1b-4418-a336-39d0313a9857\") " pod="openstack-kuttl-tests/keystone-bootstrap-hsd6l" Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.213029 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a9d1ae84-0a1b-4418-a336-39d0313a9857-config-data\") pod \"keystone-bootstrap-hsd6l\" (UID: \"a9d1ae84-0a1b-4418-a336-39d0313a9857\") " pod="openstack-kuttl-tests/keystone-bootstrap-hsd6l" Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.222603 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wv5gs\" (UniqueName: \"kubernetes.io/projected/a9d1ae84-0a1b-4418-a336-39d0313a9857-kube-api-access-wv5gs\") pod \"keystone-bootstrap-hsd6l\" (UID: \"a9d1ae84-0a1b-4418-a336-39d0313a9857\") " pod="openstack-kuttl-tests/keystone-bootstrap-hsd6l" Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.243095 4558 scope.go:117] "RemoveContainer" containerID="f222360e5761260af77d9f2762b3887e3a8141792a6c327b2acf7b0128e1ca74" Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.299508 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-djvrg\" (UniqueName: \"kubernetes.io/projected/d4bf950d-0c4c-41a1-b3c8-a82d3a0f65dd-kube-api-access-djvrg\") pod \"ceilometer-0\" (UID: \"d4bf950d-0c4c-41a1-b3c8-a82d3a0f65dd\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.299759 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d4bf950d-0c4c-41a1-b3c8-a82d3a0f65dd-run-httpd\") pod \"ceilometer-0\" (UID: \"d4bf950d-0c4c-41a1-b3c8-a82d3a0f65dd\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.299833 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4bf950d-0c4c-41a1-b3c8-a82d3a0f65dd-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"d4bf950d-0c4c-41a1-b3c8-a82d3a0f65dd\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.299904 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d4bf950d-0c4c-41a1-b3c8-a82d3a0f65dd-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"d4bf950d-0c4c-41a1-b3c8-a82d3a0f65dd\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.300075 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d4bf950d-0c4c-41a1-b3c8-a82d3a0f65dd-config-data\") pod \"ceilometer-0\" (UID: \"d4bf950d-0c4c-41a1-b3c8-a82d3a0f65dd\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.300121 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d4bf950d-0c4c-41a1-b3c8-a82d3a0f65dd-scripts\") pod \"ceilometer-0\" (UID: \"d4bf950d-0c4c-41a1-b3c8-a82d3a0f65dd\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.300178 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d4bf950d-0c4c-41a1-b3c8-a82d3a0f65dd-log-httpd\") pod \"ceilometer-0\" (UID: \"d4bf950d-0c4c-41a1-b3c8-a82d3a0f65dd\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.304038 4558 scope.go:117] "RemoveContainer" containerID="453e58ee049b9ad309e7d2b3540a3139398c90cc16acc2c0fafb3e6dcb9ce927" Jan 20 17:23:40 crc kubenswrapper[4558]: E0120 17:23:40.305492 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"453e58ee049b9ad309e7d2b3540a3139398c90cc16acc2c0fafb3e6dcb9ce927\": container with ID starting with 453e58ee049b9ad309e7d2b3540a3139398c90cc16acc2c0fafb3e6dcb9ce927 not found: ID does not exist" containerID="453e58ee049b9ad309e7d2b3540a3139398c90cc16acc2c0fafb3e6dcb9ce927" Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.305515 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"453e58ee049b9ad309e7d2b3540a3139398c90cc16acc2c0fafb3e6dcb9ce927"} err="failed to get container status \"453e58ee049b9ad309e7d2b3540a3139398c90cc16acc2c0fafb3e6dcb9ce927\": rpc error: code = NotFound desc = could not find container \"453e58ee049b9ad309e7d2b3540a3139398c90cc16acc2c0fafb3e6dcb9ce927\": container with ID starting with 453e58ee049b9ad309e7d2b3540a3139398c90cc16acc2c0fafb3e6dcb9ce927 not found: ID does not exist" Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.305532 4558 scope.go:117] "RemoveContainer" containerID="7d1eca58dad37d535e3971097db8c897d92fc464555543dfe4336c4625410888" Jan 20 17:23:40 crc kubenswrapper[4558]: E0120 17:23:40.305930 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7d1eca58dad37d535e3971097db8c897d92fc464555543dfe4336c4625410888\": container with ID starting with 7d1eca58dad37d535e3971097db8c897d92fc464555543dfe4336c4625410888 not found: ID does not exist" containerID="7d1eca58dad37d535e3971097db8c897d92fc464555543dfe4336c4625410888" Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.305947 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7d1eca58dad37d535e3971097db8c897d92fc464555543dfe4336c4625410888"} err="failed to get container status \"7d1eca58dad37d535e3971097db8c897d92fc464555543dfe4336c4625410888\": rpc error: code = NotFound desc = could not find container \"7d1eca58dad37d535e3971097db8c897d92fc464555543dfe4336c4625410888\": container with ID starting with 7d1eca58dad37d535e3971097db8c897d92fc464555543dfe4336c4625410888 not found: ID does not exist" Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.305975 4558 scope.go:117] "RemoveContainer" containerID="45c0e030f75bcd8df529339f70c23bd7500873605fb95750023ec5f527191a3a" Jan 20 17:23:40 crc kubenswrapper[4558]: E0120 17:23:40.306858 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"45c0e030f75bcd8df529339f70c23bd7500873605fb95750023ec5f527191a3a\": container with ID starting with 45c0e030f75bcd8df529339f70c23bd7500873605fb95750023ec5f527191a3a not found: ID does not exist" containerID="45c0e030f75bcd8df529339f70c23bd7500873605fb95750023ec5f527191a3a" Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.306892 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"45c0e030f75bcd8df529339f70c23bd7500873605fb95750023ec5f527191a3a"} err="failed to get container status \"45c0e030f75bcd8df529339f70c23bd7500873605fb95750023ec5f527191a3a\": rpc error: code = NotFound desc = could not find container \"45c0e030f75bcd8df529339f70c23bd7500873605fb95750023ec5f527191a3a\": container with ID starting with 45c0e030f75bcd8df529339f70c23bd7500873605fb95750023ec5f527191a3a not found: ID does not exist" Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.306920 4558 scope.go:117] "RemoveContainer" containerID="f222360e5761260af77d9f2762b3887e3a8141792a6c327b2acf7b0128e1ca74" Jan 20 17:23:40 crc kubenswrapper[4558]: E0120 17:23:40.307319 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f222360e5761260af77d9f2762b3887e3a8141792a6c327b2acf7b0128e1ca74\": container with ID starting with f222360e5761260af77d9f2762b3887e3a8141792a6c327b2acf7b0128e1ca74 not found: ID does not exist" containerID="f222360e5761260af77d9f2762b3887e3a8141792a6c327b2acf7b0128e1ca74" Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.307340 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f222360e5761260af77d9f2762b3887e3a8141792a6c327b2acf7b0128e1ca74"} err="failed to get container status \"f222360e5761260af77d9f2762b3887e3a8141792a6c327b2acf7b0128e1ca74\": rpc error: code = NotFound desc = could not find container \"f222360e5761260af77d9f2762b3887e3a8141792a6c327b2acf7b0128e1ca74\": container with ID starting with f222360e5761260af77d9f2762b3887e3a8141792a6c327b2acf7b0128e1ca74 not found: ID does not exist" Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.307356 4558 scope.go:117] "RemoveContainer" containerID="453e58ee049b9ad309e7d2b3540a3139398c90cc16acc2c0fafb3e6dcb9ce927" Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.307541 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"453e58ee049b9ad309e7d2b3540a3139398c90cc16acc2c0fafb3e6dcb9ce927"} err="failed to get container status \"453e58ee049b9ad309e7d2b3540a3139398c90cc16acc2c0fafb3e6dcb9ce927\": rpc error: code = NotFound desc = could not find container \"453e58ee049b9ad309e7d2b3540a3139398c90cc16acc2c0fafb3e6dcb9ce927\": container with ID starting with 453e58ee049b9ad309e7d2b3540a3139398c90cc16acc2c0fafb3e6dcb9ce927 not found: ID does not exist" Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.307555 4558 scope.go:117] "RemoveContainer" containerID="7d1eca58dad37d535e3971097db8c897d92fc464555543dfe4336c4625410888" Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.307754 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7d1eca58dad37d535e3971097db8c897d92fc464555543dfe4336c4625410888"} err="failed to get container status \"7d1eca58dad37d535e3971097db8c897d92fc464555543dfe4336c4625410888\": rpc error: code = NotFound desc = could not find container \"7d1eca58dad37d535e3971097db8c897d92fc464555543dfe4336c4625410888\": container with ID starting with 7d1eca58dad37d535e3971097db8c897d92fc464555543dfe4336c4625410888 not found: ID does not exist" Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.307768 4558 scope.go:117] "RemoveContainer" containerID="45c0e030f75bcd8df529339f70c23bd7500873605fb95750023ec5f527191a3a" Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.308005 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"45c0e030f75bcd8df529339f70c23bd7500873605fb95750023ec5f527191a3a"} err="failed to get container status \"45c0e030f75bcd8df529339f70c23bd7500873605fb95750023ec5f527191a3a\": rpc error: code = NotFound desc = could not find container \"45c0e030f75bcd8df529339f70c23bd7500873605fb95750023ec5f527191a3a\": container with ID starting with 45c0e030f75bcd8df529339f70c23bd7500873605fb95750023ec5f527191a3a not found: ID does not exist" Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.308051 4558 scope.go:117] "RemoveContainer" containerID="f222360e5761260af77d9f2762b3887e3a8141792a6c327b2acf7b0128e1ca74" Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.308260 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f222360e5761260af77d9f2762b3887e3a8141792a6c327b2acf7b0128e1ca74"} err="failed to get container status \"f222360e5761260af77d9f2762b3887e3a8141792a6c327b2acf7b0128e1ca74\": rpc error: code = NotFound desc = could not find container \"f222360e5761260af77d9f2762b3887e3a8141792a6c327b2acf7b0128e1ca74\": container with ID starting with f222360e5761260af77d9f2762b3887e3a8141792a6c327b2acf7b0128e1ca74 not found: ID does not exist" Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.308277 4558 scope.go:117] "RemoveContainer" containerID="453e58ee049b9ad309e7d2b3540a3139398c90cc16acc2c0fafb3e6dcb9ce927" Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.308495 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"453e58ee049b9ad309e7d2b3540a3139398c90cc16acc2c0fafb3e6dcb9ce927"} err="failed to get container status \"453e58ee049b9ad309e7d2b3540a3139398c90cc16acc2c0fafb3e6dcb9ce927\": rpc error: code = NotFound desc = could not find container \"453e58ee049b9ad309e7d2b3540a3139398c90cc16acc2c0fafb3e6dcb9ce927\": container with ID starting with 453e58ee049b9ad309e7d2b3540a3139398c90cc16acc2c0fafb3e6dcb9ce927 not found: ID does not exist" Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.308509 4558 scope.go:117] "RemoveContainer" containerID="7d1eca58dad37d535e3971097db8c897d92fc464555543dfe4336c4625410888" Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.308650 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7d1eca58dad37d535e3971097db8c897d92fc464555543dfe4336c4625410888"} err="failed to get container status \"7d1eca58dad37d535e3971097db8c897d92fc464555543dfe4336c4625410888\": rpc error: code = NotFound desc = could not find container \"7d1eca58dad37d535e3971097db8c897d92fc464555543dfe4336c4625410888\": container with ID starting with 7d1eca58dad37d535e3971097db8c897d92fc464555543dfe4336c4625410888 not found: ID does not exist" Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.308663 4558 scope.go:117] "RemoveContainer" containerID="45c0e030f75bcd8df529339f70c23bd7500873605fb95750023ec5f527191a3a" Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.308855 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"45c0e030f75bcd8df529339f70c23bd7500873605fb95750023ec5f527191a3a"} err="failed to get container status \"45c0e030f75bcd8df529339f70c23bd7500873605fb95750023ec5f527191a3a\": rpc error: code = NotFound desc = could not find container \"45c0e030f75bcd8df529339f70c23bd7500873605fb95750023ec5f527191a3a\": container with ID starting with 45c0e030f75bcd8df529339f70c23bd7500873605fb95750023ec5f527191a3a not found: ID does not exist" Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.308868 4558 scope.go:117] "RemoveContainer" containerID="f222360e5761260af77d9f2762b3887e3a8141792a6c327b2acf7b0128e1ca74" Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.309087 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f222360e5761260af77d9f2762b3887e3a8141792a6c327b2acf7b0128e1ca74"} err="failed to get container status \"f222360e5761260af77d9f2762b3887e3a8141792a6c327b2acf7b0128e1ca74\": rpc error: code = NotFound desc = could not find container \"f222360e5761260af77d9f2762b3887e3a8141792a6c327b2acf7b0128e1ca74\": container with ID starting with f222360e5761260af77d9f2762b3887e3a8141792a6c327b2acf7b0128e1ca74 not found: ID does not exist" Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.309119 4558 scope.go:117] "RemoveContainer" containerID="453e58ee049b9ad309e7d2b3540a3139398c90cc16acc2c0fafb3e6dcb9ce927" Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.309993 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"453e58ee049b9ad309e7d2b3540a3139398c90cc16acc2c0fafb3e6dcb9ce927"} err="failed to get container status \"453e58ee049b9ad309e7d2b3540a3139398c90cc16acc2c0fafb3e6dcb9ce927\": rpc error: code = NotFound desc = could not find container \"453e58ee049b9ad309e7d2b3540a3139398c90cc16acc2c0fafb3e6dcb9ce927\": container with ID starting with 453e58ee049b9ad309e7d2b3540a3139398c90cc16acc2c0fafb3e6dcb9ce927 not found: ID does not exist" Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.310007 4558 scope.go:117] "RemoveContainer" containerID="7d1eca58dad37d535e3971097db8c897d92fc464555543dfe4336c4625410888" Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.310252 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7d1eca58dad37d535e3971097db8c897d92fc464555543dfe4336c4625410888"} err="failed to get container status \"7d1eca58dad37d535e3971097db8c897d92fc464555543dfe4336c4625410888\": rpc error: code = NotFound desc = could not find container \"7d1eca58dad37d535e3971097db8c897d92fc464555543dfe4336c4625410888\": container with ID starting with 7d1eca58dad37d535e3971097db8c897d92fc464555543dfe4336c4625410888 not found: ID does not exist" Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.310265 4558 scope.go:117] "RemoveContainer" containerID="45c0e030f75bcd8df529339f70c23bd7500873605fb95750023ec5f527191a3a" Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.310470 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"45c0e030f75bcd8df529339f70c23bd7500873605fb95750023ec5f527191a3a"} err="failed to get container status \"45c0e030f75bcd8df529339f70c23bd7500873605fb95750023ec5f527191a3a\": rpc error: code = NotFound desc = could not find container \"45c0e030f75bcd8df529339f70c23bd7500873605fb95750023ec5f527191a3a\": container with ID starting with 45c0e030f75bcd8df529339f70c23bd7500873605fb95750023ec5f527191a3a not found: ID does not exist" Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.310484 4558 scope.go:117] "RemoveContainer" containerID="f222360e5761260af77d9f2762b3887e3a8141792a6c327b2acf7b0128e1ca74" Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.310729 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f222360e5761260af77d9f2762b3887e3a8141792a6c327b2acf7b0128e1ca74"} err="failed to get container status \"f222360e5761260af77d9f2762b3887e3a8141792a6c327b2acf7b0128e1ca74\": rpc error: code = NotFound desc = could not find container \"f222360e5761260af77d9f2762b3887e3a8141792a6c327b2acf7b0128e1ca74\": container with ID starting with f222360e5761260af77d9f2762b3887e3a8141792a6c327b2acf7b0128e1ca74 not found: ID does not exist" Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.369060 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-bootstrap-hsd6l" Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.369617 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-66fdf9847-wwwbv"] Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.381876 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-db-sync-dpr25" Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.403055 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d4bf950d-0c4c-41a1-b3c8-a82d3a0f65dd-config-data\") pod \"ceilometer-0\" (UID: \"d4bf950d-0c4c-41a1-b3c8-a82d3a0f65dd\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.403105 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d4bf950d-0c4c-41a1-b3c8-a82d3a0f65dd-scripts\") pod \"ceilometer-0\" (UID: \"d4bf950d-0c4c-41a1-b3c8-a82d3a0f65dd\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.403136 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d4bf950d-0c4c-41a1-b3c8-a82d3a0f65dd-log-httpd\") pod \"ceilometer-0\" (UID: \"d4bf950d-0c4c-41a1-b3c8-a82d3a0f65dd\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.403194 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-djvrg\" (UniqueName: \"kubernetes.io/projected/d4bf950d-0c4c-41a1-b3c8-a82d3a0f65dd-kube-api-access-djvrg\") pod \"ceilometer-0\" (UID: \"d4bf950d-0c4c-41a1-b3c8-a82d3a0f65dd\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.403348 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d4bf950d-0c4c-41a1-b3c8-a82d3a0f65dd-run-httpd\") pod \"ceilometer-0\" (UID: \"d4bf950d-0c4c-41a1-b3c8-a82d3a0f65dd\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.403408 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4bf950d-0c4c-41a1-b3c8-a82d3a0f65dd-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"d4bf950d-0c4c-41a1-b3c8-a82d3a0f65dd\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.403470 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d4bf950d-0c4c-41a1-b3c8-a82d3a0f65dd-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"d4bf950d-0c4c-41a1-b3c8-a82d3a0f65dd\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.403650 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d4bf950d-0c4c-41a1-b3c8-a82d3a0f65dd-log-httpd\") pod \"ceilometer-0\" (UID: \"d4bf950d-0c4c-41a1-b3c8-a82d3a0f65dd\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.404316 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d4bf950d-0c4c-41a1-b3c8-a82d3a0f65dd-run-httpd\") pod \"ceilometer-0\" (UID: \"d4bf950d-0c4c-41a1-b3c8-a82d3a0f65dd\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.406764 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d4bf950d-0c4c-41a1-b3c8-a82d3a0f65dd-scripts\") pod \"ceilometer-0\" (UID: \"d4bf950d-0c4c-41a1-b3c8-a82d3a0f65dd\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.408106 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d4bf950d-0c4c-41a1-b3c8-a82d3a0f65dd-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"d4bf950d-0c4c-41a1-b3c8-a82d3a0f65dd\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.418554 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4bf950d-0c4c-41a1-b3c8-a82d3a0f65dd-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"d4bf950d-0c4c-41a1-b3c8-a82d3a0f65dd\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.429456 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d4bf950d-0c4c-41a1-b3c8-a82d3a0f65dd-config-data\") pod \"ceilometer-0\" (UID: \"d4bf950d-0c4c-41a1-b3c8-a82d3a0f65dd\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.432844 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-djvrg\" (UniqueName: \"kubernetes.io/projected/d4bf950d-0c4c-41a1-b3c8-a82d3a0f65dd-kube-api-access-djvrg\") pod \"ceilometer-0\" (UID: \"d4bf950d-0c4c-41a1-b3c8-a82d3a0f65dd\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.494687 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.510891 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b03348ed-66d6-4076-a5fa-0ccf182e8e3c-config-data\") pod \"b03348ed-66d6-4076-a5fa-0ccf182e8e3c\" (UID: \"b03348ed-66d6-4076-a5fa-0ccf182e8e3c\") " Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.511047 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b03348ed-66d6-4076-a5fa-0ccf182e8e3c-etc-machine-id\") pod \"b03348ed-66d6-4076-a5fa-0ccf182e8e3c\" (UID: \"b03348ed-66d6-4076-a5fa-0ccf182e8e3c\") " Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.511105 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wbr49\" (UniqueName: \"kubernetes.io/projected/b03348ed-66d6-4076-a5fa-0ccf182e8e3c-kube-api-access-wbr49\") pod \"b03348ed-66d6-4076-a5fa-0ccf182e8e3c\" (UID: \"b03348ed-66d6-4076-a5fa-0ccf182e8e3c\") " Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.511173 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b03348ed-66d6-4076-a5fa-0ccf182e8e3c-scripts\") pod \"b03348ed-66d6-4076-a5fa-0ccf182e8e3c\" (UID: \"b03348ed-66d6-4076-a5fa-0ccf182e8e3c\") " Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.511236 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b03348ed-66d6-4076-a5fa-0ccf182e8e3c-combined-ca-bundle\") pod \"b03348ed-66d6-4076-a5fa-0ccf182e8e3c\" (UID: \"b03348ed-66d6-4076-a5fa-0ccf182e8e3c\") " Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.511267 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b03348ed-66d6-4076-a5fa-0ccf182e8e3c-db-sync-config-data\") pod \"b03348ed-66d6-4076-a5fa-0ccf182e8e3c\" (UID: \"b03348ed-66d6-4076-a5fa-0ccf182e8e3c\") " Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.514134 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b03348ed-66d6-4076-a5fa-0ccf182e8e3c-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "b03348ed-66d6-4076-a5fa-0ccf182e8e3c" (UID: "b03348ed-66d6-4076-a5fa-0ccf182e8e3c"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.525233 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b03348ed-66d6-4076-a5fa-0ccf182e8e3c-scripts" (OuterVolumeSpecName: "scripts") pod "b03348ed-66d6-4076-a5fa-0ccf182e8e3c" (UID: "b03348ed-66d6-4076-a5fa-0ccf182e8e3c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.525257 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b03348ed-66d6-4076-a5fa-0ccf182e8e3c-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "b03348ed-66d6-4076-a5fa-0ccf182e8e3c" (UID: "b03348ed-66d6-4076-a5fa-0ccf182e8e3c"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.533993 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b03348ed-66d6-4076-a5fa-0ccf182e8e3c-kube-api-access-wbr49" (OuterVolumeSpecName: "kube-api-access-wbr49") pod "b03348ed-66d6-4076-a5fa-0ccf182e8e3c" (UID: "b03348ed-66d6-4076-a5fa-0ccf182e8e3c"). InnerVolumeSpecName "kube-api-access-wbr49". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.580328 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b03348ed-66d6-4076-a5fa-0ccf182e8e3c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b03348ed-66d6-4076-a5fa-0ccf182e8e3c" (UID: "b03348ed-66d6-4076-a5fa-0ccf182e8e3c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.588555 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="65bcc9ab-370b-457f-a256-de44fa447395" path="/var/lib/kubelet/pods/65bcc9ab-370b-457f-a256-de44fa447395/volumes" Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.589575 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fb55b781-1113-4879-8efd-d04a2284dffc" path="/var/lib/kubelet/pods/fb55b781-1113-4879-8efd-d04a2284dffc/volumes" Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.613307 4558 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b03348ed-66d6-4076-a5fa-0ccf182e8e3c-etc-machine-id\") on node \"crc\" DevicePath \"\"" Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.613340 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wbr49\" (UniqueName: \"kubernetes.io/projected/b03348ed-66d6-4076-a5fa-0ccf182e8e3c-kube-api-access-wbr49\") on node \"crc\" DevicePath \"\"" Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.613353 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b03348ed-66d6-4076-a5fa-0ccf182e8e3c-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.613365 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b03348ed-66d6-4076-a5fa-0ccf182e8e3c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.613375 4558 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b03348ed-66d6-4076-a5fa-0ccf182e8e3c-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.664800 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b03348ed-66d6-4076-a5fa-0ccf182e8e3c-config-data" (OuterVolumeSpecName: "config-data") pod "b03348ed-66d6-4076-a5fa-0ccf182e8e3c" (UID: "b03348ed-66d6-4076-a5fa-0ccf182e8e3c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.714769 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b03348ed-66d6-4076-a5fa-0ccf182e8e3c-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.874087 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.904433 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-bootstrap-hsd6l"] Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.917762 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-547b6df4c5-87fjk" event={"ID":"db32104f-5145-43ca-bc9a-5c410bf89933","Type":"ContainerStarted","Data":"30ed7aba992e1e08f5fc61d9f1994bef19246447cd45bbdb4562c998c6ff53d3"} Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.922281 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-547b6df4c5-87fjk" event={"ID":"db32104f-5145-43ca-bc9a-5c410bf89933","Type":"ContainerStarted","Data":"79684ab8301c7dad7ec9010931af1515fb53eb6dfd15283896a2eb64ccc91b4f"} Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.922812 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/barbican-api-547b6df4c5-87fjk" Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.940615 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/barbican-api-547b6df4c5-87fjk" Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.940702 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"f2f1744d-79dd-43bc-8bea-93e00672c805","Type":"ContainerStarted","Data":"929e11abc56a6402a23f271e8df464870a363c2850f358242cc4b22371fc6ec0"} Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.962501 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-66fdf9847-wwwbv" event={"ID":"ef71f603-5259-468f-b39f-4e726cdcb2f5","Type":"ContainerStarted","Data":"f7ce9ab01f40e824acf195a6b4913a12b3fb6a924673bfa550a16e418b812cff"} Jan 20 17:23:40 crc kubenswrapper[4558]: I0120 17:23:40.963568 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-66fdf9847-wwwbv" event={"ID":"ef71f603-5259-468f-b39f-4e726cdcb2f5","Type":"ContainerStarted","Data":"178e67168083a3ca4fd77829a812a47e54bb69b9e9849a55530f849b73d6f201"} Jan 20 17:23:41 crc kubenswrapper[4558]: I0120 17:23:41.014298 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:23:41 crc kubenswrapper[4558]: E0120 17:23:41.014803 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b03348ed-66d6-4076-a5fa-0ccf182e8e3c" containerName="cinder-db-sync" Jan 20 17:23:41 crc kubenswrapper[4558]: I0120 17:23:41.014818 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="b03348ed-66d6-4076-a5fa-0ccf182e8e3c" containerName="cinder-db-sync" Jan 20 17:23:41 crc kubenswrapper[4558]: I0120 17:23:41.014986 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="b03348ed-66d6-4076-a5fa-0ccf182e8e3c" containerName="cinder-db-sync" Jan 20 17:23:41 crc kubenswrapper[4558]: I0120 17:23:41.015956 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:23:41 crc kubenswrapper[4558]: I0120 17:23:41.019492 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-db-sync-dpr25" event={"ID":"b03348ed-66d6-4076-a5fa-0ccf182e8e3c","Type":"ContainerDied","Data":"17bde0333457ba40d4fb298f15c25764aa324510751fc022d849b46f9554a23b"} Jan 20 17:23:41 crc kubenswrapper[4558]: I0120 17:23:41.019530 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="17bde0333457ba40d4fb298f15c25764aa324510751fc022d849b46f9554a23b" Jan 20 17:23:41 crc kubenswrapper[4558]: I0120 17:23:41.019608 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-db-sync-dpr25" Jan 20 17:23:41 crc kubenswrapper[4558]: I0120 17:23:41.023022 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cinder-scheduler-config-data" Jan 20 17:23:41 crc kubenswrapper[4558]: I0120 17:23:41.029801 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-keystone-listener-5cbb954895-rgzbf" event={"ID":"b8f85165-600b-47f4-b459-e1eb023907a6","Type":"ContainerStarted","Data":"b1b8dd351e12ca92ea5d56e15dd5ab075ca2eb4ee0213bbd1f5c91319f363239"} Jan 20 17:23:41 crc kubenswrapper[4558]: I0120 17:23:41.029866 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-keystone-listener-5cbb954895-rgzbf" event={"ID":"b8f85165-600b-47f4-b459-e1eb023907a6","Type":"ContainerStarted","Data":"0323677f63509ece02e01ee5f3833ba23b2e1d09a04dc41591cbdaf181c384f9"} Jan 20 17:23:41 crc kubenswrapper[4558]: I0120 17:23:41.031684 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/barbican-api-547b6df4c5-87fjk" podStartSLOduration=3.031657507 podStartE2EDuration="3.031657507s" podCreationTimestamp="2026-01-20 17:23:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:23:40.956460576 +0000 UTC m=+2514.716798542" watchObservedRunningTime="2026-01-20 17:23:41.031657507 +0000 UTC m=+2514.791995464" Jan 20 17:23:41 crc kubenswrapper[4558]: I0120 17:23:41.031890 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"05e1a302-040a-46ca-a3c2-e4c8c6390091","Type":"ContainerStarted","Data":"8ba8938da0a00c9308d3eea1def52748e499a64ab544e915a92e8d41b1af332b"} Jan 20 17:23:41 crc kubenswrapper[4558]: I0120 17:23:41.046291 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:23:41 crc kubenswrapper[4558]: I0120 17:23:41.047922 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/glance-default-internal-api-0" podStartSLOduration=4.047903877 podStartE2EDuration="4.047903877s" podCreationTimestamp="2026-01-20 17:23:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:23:41.007297553 +0000 UTC m=+2514.767635520" watchObservedRunningTime="2026-01-20 17:23:41.047903877 +0000 UTC m=+2514.808241834" Jan 20 17:23:41 crc kubenswrapper[4558]: I0120 17:23:41.085356 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-worker-67ccd5cf94-6lkng" event={"ID":"d4f8d7f7-b0d0-4643-834d-1a5056d5cf89","Type":"ContainerStarted","Data":"9370273bf4e481977fef5391473819b681d5b9a82f04da2dbb3c091078de2d23"} Jan 20 17:23:41 crc kubenswrapper[4558]: I0120 17:23:41.085389 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-worker-67ccd5cf94-6lkng" event={"ID":"d4f8d7f7-b0d0-4643-834d-1a5056d5cf89","Type":"ContainerStarted","Data":"c6c1817123e3f5c8cea565cdf69b0faaf8aa7b2548ca93d325c3dcfe94cc8266"} Jan 20 17:23:41 crc kubenswrapper[4558]: I0120 17:23:41.085398 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-worker-67ccd5cf94-6lkng" event={"ID":"d4f8d7f7-b0d0-4643-834d-1a5056d5cf89","Type":"ContainerStarted","Data":"8ae9a7b0c7ca5fa6dadabdb9990fde848cdca6d3d61876dba5886a547e136754"} Jan 20 17:23:41 crc kubenswrapper[4558]: I0120 17:23:41.089131 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-56dc9f6ff9-hc2fq" event={"ID":"36591742-f9df-4244-b32c-cc952d81893a","Type":"ContainerStarted","Data":"3531ff1008e8ac29849b0e884411abd86aa7bf3787f9aa9fa9e1af55600dfb39"} Jan 20 17:23:41 crc kubenswrapper[4558]: I0120 17:23:41.089153 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-56dc9f6ff9-hc2fq" event={"ID":"36591742-f9df-4244-b32c-cc952d81893a","Type":"ContainerStarted","Data":"64ef811896fb701a56067ae09192dab6f941d9eb7d07c6bf99ee816f9a9c9612"} Jan 20 17:23:41 crc kubenswrapper[4558]: I0120 17:23:41.089198 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-56dc9f6ff9-hc2fq" event={"ID":"36591742-f9df-4244-b32c-cc952d81893a","Type":"ContainerStarted","Data":"5c0a2cdc3bd0ff591e7cf030fbdf36a6ef7176dd2dcf678998bce908c0ae0595"} Jan 20 17:23:41 crc kubenswrapper[4558]: I0120 17:23:41.089616 4558 scope.go:117] "RemoveContainer" containerID="3531ff1008e8ac29849b0e884411abd86aa7bf3787f9aa9fa9e1af55600dfb39" Jan 20 17:23:41 crc kubenswrapper[4558]: I0120 17:23:41.090068 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/placement-56dc9f6ff9-hc2fq" Jan 20 17:23:41 crc kubenswrapper[4558]: I0120 17:23:41.126807 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hc4s8\" (UniqueName: \"kubernetes.io/projected/3afcf923-8d53-4638-bba0-09afa338a2e0-kube-api-access-hc4s8\") pod \"cinder-scheduler-0\" (UID: \"3afcf923-8d53-4638-bba0-09afa338a2e0\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:23:41 crc kubenswrapper[4558]: I0120 17:23:41.126924 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/3afcf923-8d53-4638-bba0-09afa338a2e0-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"3afcf923-8d53-4638-bba0-09afa338a2e0\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:23:41 crc kubenswrapper[4558]: I0120 17:23:41.126949 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3afcf923-8d53-4638-bba0-09afa338a2e0-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"3afcf923-8d53-4638-bba0-09afa338a2e0\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:23:41 crc kubenswrapper[4558]: I0120 17:23:41.126981 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3afcf923-8d53-4638-bba0-09afa338a2e0-scripts\") pod \"cinder-scheduler-0\" (UID: \"3afcf923-8d53-4638-bba0-09afa338a2e0\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:23:41 crc kubenswrapper[4558]: I0120 17:23:41.127007 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3afcf923-8d53-4638-bba0-09afa338a2e0-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"3afcf923-8d53-4638-bba0-09afa338a2e0\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:23:41 crc kubenswrapper[4558]: I0120 17:23:41.127081 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3afcf923-8d53-4638-bba0-09afa338a2e0-config-data\") pod \"cinder-scheduler-0\" (UID: \"3afcf923-8d53-4638-bba0-09afa338a2e0\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:23:41 crc kubenswrapper[4558]: I0120 17:23:41.187096 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/glance-default-external-api-0" podStartSLOduration=4.18707611 podStartE2EDuration="4.18707611s" podCreationTimestamp="2026-01-20 17:23:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:23:41.145442774 +0000 UTC m=+2514.905780741" watchObservedRunningTime="2026-01-20 17:23:41.18707611 +0000 UTC m=+2514.947414077" Jan 20 17:23:41 crc kubenswrapper[4558]: I0120 17:23:41.222193 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/barbican-worker-67ccd5cf94-6lkng" podStartSLOduration=3.222155667 podStartE2EDuration="3.222155667s" podCreationTimestamp="2026-01-20 17:23:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:23:41.206539231 +0000 UTC m=+2514.966877197" watchObservedRunningTime="2026-01-20 17:23:41.222155667 +0000 UTC m=+2514.982493634" Jan 20 17:23:41 crc kubenswrapper[4558]: I0120 17:23:41.229282 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hc4s8\" (UniqueName: \"kubernetes.io/projected/3afcf923-8d53-4638-bba0-09afa338a2e0-kube-api-access-hc4s8\") pod \"cinder-scheduler-0\" (UID: \"3afcf923-8d53-4638-bba0-09afa338a2e0\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:23:41 crc kubenswrapper[4558]: I0120 17:23:41.229354 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/3afcf923-8d53-4638-bba0-09afa338a2e0-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"3afcf923-8d53-4638-bba0-09afa338a2e0\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:23:41 crc kubenswrapper[4558]: I0120 17:23:41.229377 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3afcf923-8d53-4638-bba0-09afa338a2e0-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"3afcf923-8d53-4638-bba0-09afa338a2e0\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:23:41 crc kubenswrapper[4558]: I0120 17:23:41.229400 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3afcf923-8d53-4638-bba0-09afa338a2e0-scripts\") pod \"cinder-scheduler-0\" (UID: \"3afcf923-8d53-4638-bba0-09afa338a2e0\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:23:41 crc kubenswrapper[4558]: I0120 17:23:41.229426 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3afcf923-8d53-4638-bba0-09afa338a2e0-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"3afcf923-8d53-4638-bba0-09afa338a2e0\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:23:41 crc kubenswrapper[4558]: I0120 17:23:41.229468 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3afcf923-8d53-4638-bba0-09afa338a2e0-config-data\") pod \"cinder-scheduler-0\" (UID: \"3afcf923-8d53-4638-bba0-09afa338a2e0\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:23:41 crc kubenswrapper[4558]: I0120 17:23:41.229869 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/3afcf923-8d53-4638-bba0-09afa338a2e0-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"3afcf923-8d53-4638-bba0-09afa338a2e0\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:23:41 crc kubenswrapper[4558]: I0120 17:23:41.237276 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:23:41 crc kubenswrapper[4558]: I0120 17:23:41.239782 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3afcf923-8d53-4638-bba0-09afa338a2e0-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"3afcf923-8d53-4638-bba0-09afa338a2e0\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:23:41 crc kubenswrapper[4558]: I0120 17:23:41.246535 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:23:41 crc kubenswrapper[4558]: I0120 17:23:41.250477 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cinder-api-config-data" Jan 20 17:23:41 crc kubenswrapper[4558]: I0120 17:23:41.252988 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3afcf923-8d53-4638-bba0-09afa338a2e0-config-data\") pod \"cinder-scheduler-0\" (UID: \"3afcf923-8d53-4638-bba0-09afa338a2e0\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:23:41 crc kubenswrapper[4558]: I0120 17:23:41.254660 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:23:41 crc kubenswrapper[4558]: I0120 17:23:41.273428 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3afcf923-8d53-4638-bba0-09afa338a2e0-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"3afcf923-8d53-4638-bba0-09afa338a2e0\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:23:41 crc kubenswrapper[4558]: I0120 17:23:41.292550 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3afcf923-8d53-4638-bba0-09afa338a2e0-scripts\") pod \"cinder-scheduler-0\" (UID: \"3afcf923-8d53-4638-bba0-09afa338a2e0\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:23:41 crc kubenswrapper[4558]: I0120 17:23:41.313662 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hc4s8\" (UniqueName: \"kubernetes.io/projected/3afcf923-8d53-4638-bba0-09afa338a2e0-kube-api-access-hc4s8\") pod \"cinder-scheduler-0\" (UID: \"3afcf923-8d53-4638-bba0-09afa338a2e0\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:23:41 crc kubenswrapper[4558]: I0120 17:23:41.334696 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/171c3e7f-d112-4980-858e-a06eeb205241-etc-machine-id\") pod \"cinder-api-0\" (UID: \"171c3e7f-d112-4980-858e-a06eeb205241\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:23:41 crc kubenswrapper[4558]: I0120 17:23:41.334791 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/171c3e7f-d112-4980-858e-a06eeb205241-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"171c3e7f-d112-4980-858e-a06eeb205241\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:23:41 crc kubenswrapper[4558]: I0120 17:23:41.334923 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/171c3e7f-d112-4980-858e-a06eeb205241-scripts\") pod \"cinder-api-0\" (UID: \"171c3e7f-d112-4980-858e-a06eeb205241\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:23:41 crc kubenswrapper[4558]: I0120 17:23:41.335115 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dkpkb\" (UniqueName: \"kubernetes.io/projected/171c3e7f-d112-4980-858e-a06eeb205241-kube-api-access-dkpkb\") pod \"cinder-api-0\" (UID: \"171c3e7f-d112-4980-858e-a06eeb205241\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:23:41 crc kubenswrapper[4558]: I0120 17:23:41.335197 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/171c3e7f-d112-4980-858e-a06eeb205241-config-data-custom\") pod \"cinder-api-0\" (UID: \"171c3e7f-d112-4980-858e-a06eeb205241\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:23:41 crc kubenswrapper[4558]: I0120 17:23:41.335276 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/171c3e7f-d112-4980-858e-a06eeb205241-logs\") pod \"cinder-api-0\" (UID: \"171c3e7f-d112-4980-858e-a06eeb205241\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:23:41 crc kubenswrapper[4558]: I0120 17:23:41.335312 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/171c3e7f-d112-4980-858e-a06eeb205241-config-data\") pod \"cinder-api-0\" (UID: \"171c3e7f-d112-4980-858e-a06eeb205241\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:23:41 crc kubenswrapper[4558]: I0120 17:23:41.414539 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:23:41 crc kubenswrapper[4558]: I0120 17:23:41.442184 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/171c3e7f-d112-4980-858e-a06eeb205241-config-data-custom\") pod \"cinder-api-0\" (UID: \"171c3e7f-d112-4980-858e-a06eeb205241\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:23:41 crc kubenswrapper[4558]: I0120 17:23:41.442560 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/171c3e7f-d112-4980-858e-a06eeb205241-logs\") pod \"cinder-api-0\" (UID: \"171c3e7f-d112-4980-858e-a06eeb205241\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:23:41 crc kubenswrapper[4558]: I0120 17:23:41.442588 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/171c3e7f-d112-4980-858e-a06eeb205241-config-data\") pod \"cinder-api-0\" (UID: \"171c3e7f-d112-4980-858e-a06eeb205241\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:23:41 crc kubenswrapper[4558]: I0120 17:23:41.442651 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/171c3e7f-d112-4980-858e-a06eeb205241-etc-machine-id\") pod \"cinder-api-0\" (UID: \"171c3e7f-d112-4980-858e-a06eeb205241\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:23:41 crc kubenswrapper[4558]: I0120 17:23:41.442682 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/171c3e7f-d112-4980-858e-a06eeb205241-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"171c3e7f-d112-4980-858e-a06eeb205241\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:23:41 crc kubenswrapper[4558]: I0120 17:23:41.442736 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/171c3e7f-d112-4980-858e-a06eeb205241-scripts\") pod \"cinder-api-0\" (UID: \"171c3e7f-d112-4980-858e-a06eeb205241\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:23:41 crc kubenswrapper[4558]: I0120 17:23:41.442779 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dkpkb\" (UniqueName: \"kubernetes.io/projected/171c3e7f-d112-4980-858e-a06eeb205241-kube-api-access-dkpkb\") pod \"cinder-api-0\" (UID: \"171c3e7f-d112-4980-858e-a06eeb205241\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:23:41 crc kubenswrapper[4558]: I0120 17:23:41.445247 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/171c3e7f-d112-4980-858e-a06eeb205241-etc-machine-id\") pod \"cinder-api-0\" (UID: \"171c3e7f-d112-4980-858e-a06eeb205241\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:23:41 crc kubenswrapper[4558]: I0120 17:23:41.445570 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/171c3e7f-d112-4980-858e-a06eeb205241-logs\") pod \"cinder-api-0\" (UID: \"171c3e7f-d112-4980-858e-a06eeb205241\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:23:41 crc kubenswrapper[4558]: I0120 17:23:41.452049 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/171c3e7f-d112-4980-858e-a06eeb205241-config-data\") pod \"cinder-api-0\" (UID: \"171c3e7f-d112-4980-858e-a06eeb205241\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:23:41 crc kubenswrapper[4558]: I0120 17:23:41.452693 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/171c3e7f-d112-4980-858e-a06eeb205241-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"171c3e7f-d112-4980-858e-a06eeb205241\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:23:41 crc kubenswrapper[4558]: I0120 17:23:41.453096 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/171c3e7f-d112-4980-858e-a06eeb205241-config-data-custom\") pod \"cinder-api-0\" (UID: \"171c3e7f-d112-4980-858e-a06eeb205241\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:23:41 crc kubenswrapper[4558]: I0120 17:23:41.454566 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/171c3e7f-d112-4980-858e-a06eeb205241-scripts\") pod \"cinder-api-0\" (UID: \"171c3e7f-d112-4980-858e-a06eeb205241\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:23:41 crc kubenswrapper[4558]: I0120 17:23:41.466698 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dkpkb\" (UniqueName: \"kubernetes.io/projected/171c3e7f-d112-4980-858e-a06eeb205241-kube-api-access-dkpkb\") pod \"cinder-api-0\" (UID: \"171c3e7f-d112-4980-858e-a06eeb205241\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:23:41 crc kubenswrapper[4558]: I0120 17:23:41.604845 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:23:41 crc kubenswrapper[4558]: I0120 17:23:41.984264 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:23:42 crc kubenswrapper[4558]: I0120 17:23:42.104463 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-547b6df4c5-87fjk" event={"ID":"db32104f-5145-43ca-bc9a-5c410bf89933","Type":"ContainerStarted","Data":"a14d46445c620e36992101173d2a0d80fcb77d671533a2ad4c05704ae7e3dd72"} Jan 20 17:23:42 crc kubenswrapper[4558]: I0120 17:23:42.107764 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-66fdf9847-wwwbv" event={"ID":"ef71f603-5259-468f-b39f-4e726cdcb2f5","Type":"ContainerStarted","Data":"7ae163fcbc93afed4572da8b9cfe88991120b5aebe851c19ca31f198e3260a32"} Jan 20 17:23:42 crc kubenswrapper[4558]: I0120 17:23:42.109548 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/placement-66fdf9847-wwwbv" Jan 20 17:23:42 crc kubenswrapper[4558]: I0120 17:23:42.109599 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/placement-66fdf9847-wwwbv" Jan 20 17:23:42 crc kubenswrapper[4558]: I0120 17:23:42.118446 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-keystone-listener-5cbb954895-rgzbf" event={"ID":"b8f85165-600b-47f4-b459-e1eb023907a6","Type":"ContainerStarted","Data":"99a4a1d716eab3ac3b920c0fee4a68d744e9cc640f5f82e54e1934eaf6260207"} Jan 20 17:23:42 crc kubenswrapper[4558]: I0120 17:23:42.121459 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-bootstrap-hsd6l" event={"ID":"a9d1ae84-0a1b-4418-a336-39d0313a9857","Type":"ContainerStarted","Data":"663137f7885e57e3788c4f07743e4e3a3ed295d4d32787f249df7daefd8a4ea6"} Jan 20 17:23:42 crc kubenswrapper[4558]: I0120 17:23:42.121480 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-bootstrap-hsd6l" event={"ID":"a9d1ae84-0a1b-4418-a336-39d0313a9857","Type":"ContainerStarted","Data":"0237dfb4cdb734ba3d600e92de3d3f153e25e4ec0eef9a3ae033c2273a1fbbec"} Jan 20 17:23:42 crc kubenswrapper[4558]: I0120 17:23:42.123000 4558 generic.go:334] "Generic (PLEG): container finished" podID="36591742-f9df-4244-b32c-cc952d81893a" containerID="3531ff1008e8ac29849b0e884411abd86aa7bf3787f9aa9fa9e1af55600dfb39" exitCode=1 Jan 20 17:23:42 crc kubenswrapper[4558]: I0120 17:23:42.123018 4558 generic.go:334] "Generic (PLEG): container finished" podID="36591742-f9df-4244-b32c-cc952d81893a" containerID="5ea93b24584a2124e067d69a33744acf8ad8f9ba3cdebcca7c752363ecc038ad" exitCode=1 Jan 20 17:23:42 crc kubenswrapper[4558]: I0120 17:23:42.123050 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-56dc9f6ff9-hc2fq" event={"ID":"36591742-f9df-4244-b32c-cc952d81893a","Type":"ContainerDied","Data":"3531ff1008e8ac29849b0e884411abd86aa7bf3787f9aa9fa9e1af55600dfb39"} Jan 20 17:23:42 crc kubenswrapper[4558]: I0120 17:23:42.123067 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-56dc9f6ff9-hc2fq" event={"ID":"36591742-f9df-4244-b32c-cc952d81893a","Type":"ContainerDied","Data":"5ea93b24584a2124e067d69a33744acf8ad8f9ba3cdebcca7c752363ecc038ad"} Jan 20 17:23:42 crc kubenswrapper[4558]: I0120 17:23:42.123083 4558 scope.go:117] "RemoveContainer" containerID="3531ff1008e8ac29849b0e884411abd86aa7bf3787f9aa9fa9e1af55600dfb39" Jan 20 17:23:42 crc kubenswrapper[4558]: I0120 17:23:42.124131 4558 scope.go:117] "RemoveContainer" containerID="5ea93b24584a2124e067d69a33744acf8ad8f9ba3cdebcca7c752363ecc038ad" Jan 20 17:23:42 crc kubenswrapper[4558]: E0120 17:23:42.124363 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"placement-api\" with CrashLoopBackOff: \"back-off 10s restarting failed container=placement-api pod=placement-56dc9f6ff9-hc2fq_openstack-kuttl-tests(36591742-f9df-4244-b32c-cc952d81893a)\"" pod="openstack-kuttl-tests/placement-56dc9f6ff9-hc2fq" podUID="36591742-f9df-4244-b32c-cc952d81893a" Jan 20 17:23:42 crc kubenswrapper[4558]: I0120 17:23:42.133676 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/placement-66fdf9847-wwwbv" podStartSLOduration=3.133656503 podStartE2EDuration="3.133656503s" podCreationTimestamp="2026-01-20 17:23:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:23:42.12966292 +0000 UTC m=+2515.890000887" watchObservedRunningTime="2026-01-20 17:23:42.133656503 +0000 UTC m=+2515.893994470" Jan 20 17:23:42 crc kubenswrapper[4558]: I0120 17:23:42.134264 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"3afcf923-8d53-4638-bba0-09afa338a2e0","Type":"ContainerStarted","Data":"8c9ae0e517861e049938b2bbfe8ea91034126f24fe10242f797d96cfcbe6a5cd"} Jan 20 17:23:42 crc kubenswrapper[4558]: I0120 17:23:42.139225 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"d4bf950d-0c4c-41a1-b3c8-a82d3a0f65dd","Type":"ContainerStarted","Data":"ef796dfab67a8244c396cd18509d66bae814af609de469193e0d1ada820bdf4c"} Jan 20 17:23:42 crc kubenswrapper[4558]: I0120 17:23:42.139278 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"d4bf950d-0c4c-41a1-b3c8-a82d3a0f65dd","Type":"ContainerStarted","Data":"a13349c6ad853851840538d5939251323a25413ba127237b96c37ec9568d3a1d"} Jan 20 17:23:42 crc kubenswrapper[4558]: I0120 17:23:42.172216 4558 scope.go:117] "RemoveContainer" containerID="3531ff1008e8ac29849b0e884411abd86aa7bf3787f9aa9fa9e1af55600dfb39" Jan 20 17:23:42 crc kubenswrapper[4558]: E0120 17:23:42.173569 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3531ff1008e8ac29849b0e884411abd86aa7bf3787f9aa9fa9e1af55600dfb39\": container with ID starting with 3531ff1008e8ac29849b0e884411abd86aa7bf3787f9aa9fa9e1af55600dfb39 not found: ID does not exist" containerID="3531ff1008e8ac29849b0e884411abd86aa7bf3787f9aa9fa9e1af55600dfb39" Jan 20 17:23:42 crc kubenswrapper[4558]: I0120 17:23:42.173604 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3531ff1008e8ac29849b0e884411abd86aa7bf3787f9aa9fa9e1af55600dfb39"} err="failed to get container status \"3531ff1008e8ac29849b0e884411abd86aa7bf3787f9aa9fa9e1af55600dfb39\": rpc error: code = NotFound desc = could not find container \"3531ff1008e8ac29849b0e884411abd86aa7bf3787f9aa9fa9e1af55600dfb39\": container with ID starting with 3531ff1008e8ac29849b0e884411abd86aa7bf3787f9aa9fa9e1af55600dfb39 not found: ID does not exist" Jan 20 17:23:42 crc kubenswrapper[4558]: I0120 17:23:42.174075 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/keystone-bootstrap-hsd6l" podStartSLOduration=3.174058422 podStartE2EDuration="3.174058422s" podCreationTimestamp="2026-01-20 17:23:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:23:42.162025146 +0000 UTC m=+2515.922363114" watchObservedRunningTime="2026-01-20 17:23:42.174058422 +0000 UTC m=+2515.934396389" Jan 20 17:23:42 crc kubenswrapper[4558]: I0120 17:23:42.177623 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/barbican-keystone-listener-5cbb954895-rgzbf" podStartSLOduration=4.177609522 podStartE2EDuration="4.177609522s" podCreationTimestamp="2026-01-20 17:23:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:23:42.149875802 +0000 UTC m=+2515.910213770" watchObservedRunningTime="2026-01-20 17:23:42.177609522 +0000 UTC m=+2515.937947488" Jan 20 17:23:42 crc kubenswrapper[4558]: I0120 17:23:42.233753 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:23:43 crc kubenswrapper[4558]: I0120 17:23:43.158965 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"171c3e7f-d112-4980-858e-a06eeb205241","Type":"ContainerStarted","Data":"e96266598d97f1aa937e1d5e0c2c2170f0641464f13f72b74edd6a22a34006ac"} Jan 20 17:23:43 crc kubenswrapper[4558]: I0120 17:23:43.159411 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"171c3e7f-d112-4980-858e-a06eeb205241","Type":"ContainerStarted","Data":"3f65739176eb2149b4677e850f398ec86a1a533184fb7686438d58b520caa661"} Jan 20 17:23:43 crc kubenswrapper[4558]: I0120 17:23:43.174819 4558 scope.go:117] "RemoveContainer" containerID="5ea93b24584a2124e067d69a33744acf8ad8f9ba3cdebcca7c752363ecc038ad" Jan 20 17:23:43 crc kubenswrapper[4558]: E0120 17:23:43.175070 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"placement-api\" with CrashLoopBackOff: \"back-off 10s restarting failed container=placement-api pod=placement-56dc9f6ff9-hc2fq_openstack-kuttl-tests(36591742-f9df-4244-b32c-cc952d81893a)\"" pod="openstack-kuttl-tests/placement-56dc9f6ff9-hc2fq" podUID="36591742-f9df-4244-b32c-cc952d81893a" Jan 20 17:23:43 crc kubenswrapper[4558]: I0120 17:23:43.180303 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"3afcf923-8d53-4638-bba0-09afa338a2e0","Type":"ContainerStarted","Data":"590199eee583708baf8a6aaf5fd46b030cdff78491c91231b22f6e1896974f18"} Jan 20 17:23:43 crc kubenswrapper[4558]: I0120 17:23:43.183676 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"d4bf950d-0c4c-41a1-b3c8-a82d3a0f65dd","Type":"ContainerStarted","Data":"1b4e6a1e48903d69f46a2a4b2a8193e676013ecda2045e0f0db4923d287957b1"} Jan 20 17:23:44 crc kubenswrapper[4558]: I0120 17:23:44.195182 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"171c3e7f-d112-4980-858e-a06eeb205241","Type":"ContainerStarted","Data":"fe10b83beff97523dabad8105396d686fc6188217f34fb4f48747a3b705ea2c1"} Jan 20 17:23:44 crc kubenswrapper[4558]: I0120 17:23:44.195509 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:23:44 crc kubenswrapper[4558]: I0120 17:23:44.199699 4558 generic.go:334] "Generic (PLEG): container finished" podID="a9d1ae84-0a1b-4418-a336-39d0313a9857" containerID="663137f7885e57e3788c4f07743e4e3a3ed295d4d32787f249df7daefd8a4ea6" exitCode=0 Jan 20 17:23:44 crc kubenswrapper[4558]: I0120 17:23:44.199783 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-bootstrap-hsd6l" event={"ID":"a9d1ae84-0a1b-4418-a336-39d0313a9857","Type":"ContainerDied","Data":"663137f7885e57e3788c4f07743e4e3a3ed295d4d32787f249df7daefd8a4ea6"} Jan 20 17:23:44 crc kubenswrapper[4558]: I0120 17:23:44.201655 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"3afcf923-8d53-4638-bba0-09afa338a2e0","Type":"ContainerStarted","Data":"1d6dc3037ab7163e8ca4ec36d788250ba4e3e3a24cbb39f3ee4b6e153c1ed4b8"} Jan 20 17:23:44 crc kubenswrapper[4558]: I0120 17:23:44.212342 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"d4bf950d-0c4c-41a1-b3c8-a82d3a0f65dd","Type":"ContainerStarted","Data":"3dd48463ebaf0c4cc73e6dd204f9dc91627d23f0e17873f60b0ab36ca889d105"} Jan 20 17:23:44 crc kubenswrapper[4558]: I0120 17:23:44.216076 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/cinder-api-0" podStartSLOduration=3.216060389 podStartE2EDuration="3.216060389s" podCreationTimestamp="2026-01-20 17:23:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:23:44.212501994 +0000 UTC m=+2517.972839961" watchObservedRunningTime="2026-01-20 17:23:44.216060389 +0000 UTC m=+2517.976398355" Jan 20 17:23:44 crc kubenswrapper[4558]: I0120 17:23:44.236482 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/cinder-scheduler-0" podStartSLOduration=4.23644892 podStartE2EDuration="4.23644892s" podCreationTimestamp="2026-01-20 17:23:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:23:44.231270188 +0000 UTC m=+2517.991608155" watchObservedRunningTime="2026-01-20 17:23:44.23644892 +0000 UTC m=+2517.996786887" Jan 20 17:23:45 crc kubenswrapper[4558]: I0120 17:23:45.225482 4558 generic.go:334] "Generic (PLEG): container finished" podID="c28fa234-6cff-43e9-9cf5-a5cd2f3b67be" containerID="bc4cc07eb90d7d3d3906a3d0cffea06fd114245a87ecda09cca341976ee3130b" exitCode=0 Jan 20 17:23:45 crc kubenswrapper[4558]: I0120 17:23:45.225537 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-db-sync-plxqg" event={"ID":"c28fa234-6cff-43e9-9cf5-a5cd2f3b67be","Type":"ContainerDied","Data":"bc4cc07eb90d7d3d3906a3d0cffea06fd114245a87ecda09cca341976ee3130b"} Jan 20 17:23:45 crc kubenswrapper[4558]: I0120 17:23:45.230276 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"d4bf950d-0c4c-41a1-b3c8-a82d3a0f65dd","Type":"ContainerStarted","Data":"1f1826fa85536c90dc6a1fb82f634d04594f8e977b10383c34ca6e82e13e96b3"} Jan 20 17:23:45 crc kubenswrapper[4558]: I0120 17:23:45.272754 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ceilometer-0" podStartSLOduration=1.29396214 podStartE2EDuration="5.27273672s" podCreationTimestamp="2026-01-20 17:23:40 +0000 UTC" firstStartedPulling="2026-01-20 17:23:40.867434011 +0000 UTC m=+2514.627771978" lastFinishedPulling="2026-01-20 17:23:44.846208591 +0000 UTC m=+2518.606546558" observedRunningTime="2026-01-20 17:23:45.2658685 +0000 UTC m=+2519.026206467" watchObservedRunningTime="2026-01-20 17:23:45.27273672 +0000 UTC m=+2519.033074687" Jan 20 17:23:45 crc kubenswrapper[4558]: I0120 17:23:45.578005 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-bootstrap-hsd6l" Jan 20 17:23:45 crc kubenswrapper[4558]: I0120 17:23:45.660677 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/a9d1ae84-0a1b-4418-a336-39d0313a9857-fernet-keys\") pod \"a9d1ae84-0a1b-4418-a336-39d0313a9857\" (UID: \"a9d1ae84-0a1b-4418-a336-39d0313a9857\") " Jan 20 17:23:45 crc kubenswrapper[4558]: I0120 17:23:45.660796 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a9d1ae84-0a1b-4418-a336-39d0313a9857-scripts\") pod \"a9d1ae84-0a1b-4418-a336-39d0313a9857\" (UID: \"a9d1ae84-0a1b-4418-a336-39d0313a9857\") " Jan 20 17:23:45 crc kubenswrapper[4558]: I0120 17:23:45.660836 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wv5gs\" (UniqueName: \"kubernetes.io/projected/a9d1ae84-0a1b-4418-a336-39d0313a9857-kube-api-access-wv5gs\") pod \"a9d1ae84-0a1b-4418-a336-39d0313a9857\" (UID: \"a9d1ae84-0a1b-4418-a336-39d0313a9857\") " Jan 20 17:23:45 crc kubenswrapper[4558]: I0120 17:23:45.661048 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a9d1ae84-0a1b-4418-a336-39d0313a9857-config-data\") pod \"a9d1ae84-0a1b-4418-a336-39d0313a9857\" (UID: \"a9d1ae84-0a1b-4418-a336-39d0313a9857\") " Jan 20 17:23:45 crc kubenswrapper[4558]: I0120 17:23:45.661132 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/a9d1ae84-0a1b-4418-a336-39d0313a9857-credential-keys\") pod \"a9d1ae84-0a1b-4418-a336-39d0313a9857\" (UID: \"a9d1ae84-0a1b-4418-a336-39d0313a9857\") " Jan 20 17:23:45 crc kubenswrapper[4558]: I0120 17:23:45.661355 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a9d1ae84-0a1b-4418-a336-39d0313a9857-combined-ca-bundle\") pod \"a9d1ae84-0a1b-4418-a336-39d0313a9857\" (UID: \"a9d1ae84-0a1b-4418-a336-39d0313a9857\") " Jan 20 17:23:45 crc kubenswrapper[4558]: I0120 17:23:45.668522 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a9d1ae84-0a1b-4418-a336-39d0313a9857-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "a9d1ae84-0a1b-4418-a336-39d0313a9857" (UID: "a9d1ae84-0a1b-4418-a336-39d0313a9857"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:23:45 crc kubenswrapper[4558]: I0120 17:23:45.669551 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a9d1ae84-0a1b-4418-a336-39d0313a9857-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "a9d1ae84-0a1b-4418-a336-39d0313a9857" (UID: "a9d1ae84-0a1b-4418-a336-39d0313a9857"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:23:45 crc kubenswrapper[4558]: I0120 17:23:45.671321 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a9d1ae84-0a1b-4418-a336-39d0313a9857-kube-api-access-wv5gs" (OuterVolumeSpecName: "kube-api-access-wv5gs") pod "a9d1ae84-0a1b-4418-a336-39d0313a9857" (UID: "a9d1ae84-0a1b-4418-a336-39d0313a9857"). InnerVolumeSpecName "kube-api-access-wv5gs". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:23:45 crc kubenswrapper[4558]: I0120 17:23:45.674507 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a9d1ae84-0a1b-4418-a336-39d0313a9857-scripts" (OuterVolumeSpecName: "scripts") pod "a9d1ae84-0a1b-4418-a336-39d0313a9857" (UID: "a9d1ae84-0a1b-4418-a336-39d0313a9857"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:23:45 crc kubenswrapper[4558]: E0120 17:23:45.690520 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a9d1ae84-0a1b-4418-a336-39d0313a9857-combined-ca-bundle podName:a9d1ae84-0a1b-4418-a336-39d0313a9857 nodeName:}" failed. No retries permitted until 2026-01-20 17:23:46.190487186 +0000 UTC m=+2519.950825154 (durationBeforeRetry 500ms). Error: error cleaning subPath mounts for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/a9d1ae84-0a1b-4418-a336-39d0313a9857-combined-ca-bundle") pod "a9d1ae84-0a1b-4418-a336-39d0313a9857" (UID: "a9d1ae84-0a1b-4418-a336-39d0313a9857") : error deleting /var/lib/kubelet/pods/a9d1ae84-0a1b-4418-a336-39d0313a9857/volume-subpaths: remove /var/lib/kubelet/pods/a9d1ae84-0a1b-4418-a336-39d0313a9857/volume-subpaths: no such file or directory Jan 20 17:23:45 crc kubenswrapper[4558]: I0120 17:23:45.693234 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a9d1ae84-0a1b-4418-a336-39d0313a9857-config-data" (OuterVolumeSpecName: "config-data") pod "a9d1ae84-0a1b-4418-a336-39d0313a9857" (UID: "a9d1ae84-0a1b-4418-a336-39d0313a9857"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:23:45 crc kubenswrapper[4558]: I0120 17:23:45.763941 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a9d1ae84-0a1b-4418-a336-39d0313a9857-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:23:45 crc kubenswrapper[4558]: I0120 17:23:45.763974 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wv5gs\" (UniqueName: \"kubernetes.io/projected/a9d1ae84-0a1b-4418-a336-39d0313a9857-kube-api-access-wv5gs\") on node \"crc\" DevicePath \"\"" Jan 20 17:23:45 crc kubenswrapper[4558]: I0120 17:23:45.763986 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a9d1ae84-0a1b-4418-a336-39d0313a9857-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:23:45 crc kubenswrapper[4558]: I0120 17:23:45.763995 4558 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/a9d1ae84-0a1b-4418-a336-39d0313a9857-credential-keys\") on node \"crc\" DevicePath \"\"" Jan 20 17:23:45 crc kubenswrapper[4558]: I0120 17:23:45.764004 4558 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/a9d1ae84-0a1b-4418-a336-39d0313a9857-fernet-keys\") on node \"crc\" DevicePath \"\"" Jan 20 17:23:46 crc kubenswrapper[4558]: I0120 17:23:46.092080 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:23:46 crc kubenswrapper[4558]: I0120 17:23:46.253118 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-bootstrap-hsd6l" event={"ID":"a9d1ae84-0a1b-4418-a336-39d0313a9857","Type":"ContainerDied","Data":"0237dfb4cdb734ba3d600e92de3d3f153e25e4ec0eef9a3ae033c2273a1fbbec"} Jan 20 17:23:46 crc kubenswrapper[4558]: I0120 17:23:46.253183 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0237dfb4cdb734ba3d600e92de3d3f153e25e4ec0eef9a3ae033c2273a1fbbec" Jan 20 17:23:46 crc kubenswrapper[4558]: I0120 17:23:46.253289 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-bootstrap-hsd6l" Jan 20 17:23:46 crc kubenswrapper[4558]: I0120 17:23:46.253420 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/cinder-api-0" podUID="171c3e7f-d112-4980-858e-a06eeb205241" containerName="cinder-api-log" containerID="cri-o://e96266598d97f1aa937e1d5e0c2c2170f0641464f13f72b74edd6a22a34006ac" gracePeriod=30 Jan 20 17:23:46 crc kubenswrapper[4558]: I0120 17:23:46.253601 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/cinder-api-0" podUID="171c3e7f-d112-4980-858e-a06eeb205241" containerName="cinder-api" containerID="cri-o://fe10b83beff97523dabad8105396d686fc6188217f34fb4f48747a3b705ea2c1" gracePeriod=30 Jan 20 17:23:46 crc kubenswrapper[4558]: I0120 17:23:46.255950 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:23:46 crc kubenswrapper[4558]: I0120 17:23:46.274926 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a9d1ae84-0a1b-4418-a336-39d0313a9857-combined-ca-bundle\") pod \"a9d1ae84-0a1b-4418-a336-39d0313a9857\" (UID: \"a9d1ae84-0a1b-4418-a336-39d0313a9857\") " Jan 20 17:23:46 crc kubenswrapper[4558]: I0120 17:23:46.324366 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a9d1ae84-0a1b-4418-a336-39d0313a9857-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a9d1ae84-0a1b-4418-a336-39d0313a9857" (UID: "a9d1ae84-0a1b-4418-a336-39d0313a9857"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:23:46 crc kubenswrapper[4558]: I0120 17:23:46.378205 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a9d1ae84-0a1b-4418-a336-39d0313a9857-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:23:46 crc kubenswrapper[4558]: I0120 17:23:46.415679 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:23:46 crc kubenswrapper[4558]: I0120 17:23:46.432976 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/keystone-6866498598-2wxkp"] Jan 20 17:23:46 crc kubenswrapper[4558]: E0120 17:23:46.433289 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a9d1ae84-0a1b-4418-a336-39d0313a9857" containerName="keystone-bootstrap" Jan 20 17:23:46 crc kubenswrapper[4558]: I0120 17:23:46.433306 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="a9d1ae84-0a1b-4418-a336-39d0313a9857" containerName="keystone-bootstrap" Jan 20 17:23:46 crc kubenswrapper[4558]: I0120 17:23:46.433476 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="a9d1ae84-0a1b-4418-a336-39d0313a9857" containerName="keystone-bootstrap" Jan 20 17:23:46 crc kubenswrapper[4558]: I0120 17:23:46.433998 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-6866498598-2wxkp" Jan 20 17:23:46 crc kubenswrapper[4558]: I0120 17:23:46.436288 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-keystone-internal-svc" Jan 20 17:23:46 crc kubenswrapper[4558]: I0120 17:23:46.436524 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-keystone-public-svc" Jan 20 17:23:46 crc kubenswrapper[4558]: I0120 17:23:46.513317 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-6866498598-2wxkp"] Jan 20 17:23:46 crc kubenswrapper[4558]: I0120 17:23:46.590947 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zlr65\" (UniqueName: \"kubernetes.io/projected/72394a99-d879-415b-9ae9-21165451ac70-kube-api-access-zlr65\") pod \"keystone-6866498598-2wxkp\" (UID: \"72394a99-d879-415b-9ae9-21165451ac70\") " pod="openstack-kuttl-tests/keystone-6866498598-2wxkp" Jan 20 17:23:46 crc kubenswrapper[4558]: I0120 17:23:46.591239 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/72394a99-d879-415b-9ae9-21165451ac70-public-tls-certs\") pod \"keystone-6866498598-2wxkp\" (UID: \"72394a99-d879-415b-9ae9-21165451ac70\") " pod="openstack-kuttl-tests/keystone-6866498598-2wxkp" Jan 20 17:23:46 crc kubenswrapper[4558]: I0120 17:23:46.591321 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/72394a99-d879-415b-9ae9-21165451ac70-scripts\") pod \"keystone-6866498598-2wxkp\" (UID: \"72394a99-d879-415b-9ae9-21165451ac70\") " pod="openstack-kuttl-tests/keystone-6866498598-2wxkp" Jan 20 17:23:46 crc kubenswrapper[4558]: I0120 17:23:46.591404 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/72394a99-d879-415b-9ae9-21165451ac70-fernet-keys\") pod \"keystone-6866498598-2wxkp\" (UID: \"72394a99-d879-415b-9ae9-21165451ac70\") " pod="openstack-kuttl-tests/keystone-6866498598-2wxkp" Jan 20 17:23:46 crc kubenswrapper[4558]: I0120 17:23:46.591456 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/72394a99-d879-415b-9ae9-21165451ac70-credential-keys\") pod \"keystone-6866498598-2wxkp\" (UID: \"72394a99-d879-415b-9ae9-21165451ac70\") " pod="openstack-kuttl-tests/keystone-6866498598-2wxkp" Jan 20 17:23:46 crc kubenswrapper[4558]: I0120 17:23:46.591477 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/72394a99-d879-415b-9ae9-21165451ac70-combined-ca-bundle\") pod \"keystone-6866498598-2wxkp\" (UID: \"72394a99-d879-415b-9ae9-21165451ac70\") " pod="openstack-kuttl-tests/keystone-6866498598-2wxkp" Jan 20 17:23:46 crc kubenswrapper[4558]: I0120 17:23:46.591510 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/72394a99-d879-415b-9ae9-21165451ac70-internal-tls-certs\") pod \"keystone-6866498598-2wxkp\" (UID: \"72394a99-d879-415b-9ae9-21165451ac70\") " pod="openstack-kuttl-tests/keystone-6866498598-2wxkp" Jan 20 17:23:46 crc kubenswrapper[4558]: I0120 17:23:46.591546 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/72394a99-d879-415b-9ae9-21165451ac70-config-data\") pod \"keystone-6866498598-2wxkp\" (UID: \"72394a99-d879-415b-9ae9-21165451ac70\") " pod="openstack-kuttl-tests/keystone-6866498598-2wxkp" Jan 20 17:23:46 crc kubenswrapper[4558]: I0120 17:23:46.697687 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/72394a99-d879-415b-9ae9-21165451ac70-public-tls-certs\") pod \"keystone-6866498598-2wxkp\" (UID: \"72394a99-d879-415b-9ae9-21165451ac70\") " pod="openstack-kuttl-tests/keystone-6866498598-2wxkp" Jan 20 17:23:46 crc kubenswrapper[4558]: I0120 17:23:46.697769 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/72394a99-d879-415b-9ae9-21165451ac70-scripts\") pod \"keystone-6866498598-2wxkp\" (UID: \"72394a99-d879-415b-9ae9-21165451ac70\") " pod="openstack-kuttl-tests/keystone-6866498598-2wxkp" Jan 20 17:23:46 crc kubenswrapper[4558]: I0120 17:23:46.697867 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/72394a99-d879-415b-9ae9-21165451ac70-fernet-keys\") pod \"keystone-6866498598-2wxkp\" (UID: \"72394a99-d879-415b-9ae9-21165451ac70\") " pod="openstack-kuttl-tests/keystone-6866498598-2wxkp" Jan 20 17:23:46 crc kubenswrapper[4558]: I0120 17:23:46.697917 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/72394a99-d879-415b-9ae9-21165451ac70-credential-keys\") pod \"keystone-6866498598-2wxkp\" (UID: \"72394a99-d879-415b-9ae9-21165451ac70\") " pod="openstack-kuttl-tests/keystone-6866498598-2wxkp" Jan 20 17:23:46 crc kubenswrapper[4558]: I0120 17:23:46.697939 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/72394a99-d879-415b-9ae9-21165451ac70-combined-ca-bundle\") pod \"keystone-6866498598-2wxkp\" (UID: \"72394a99-d879-415b-9ae9-21165451ac70\") " pod="openstack-kuttl-tests/keystone-6866498598-2wxkp" Jan 20 17:23:46 crc kubenswrapper[4558]: I0120 17:23:46.697971 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/72394a99-d879-415b-9ae9-21165451ac70-internal-tls-certs\") pod \"keystone-6866498598-2wxkp\" (UID: \"72394a99-d879-415b-9ae9-21165451ac70\") " pod="openstack-kuttl-tests/keystone-6866498598-2wxkp" Jan 20 17:23:46 crc kubenswrapper[4558]: I0120 17:23:46.697989 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/72394a99-d879-415b-9ae9-21165451ac70-config-data\") pod \"keystone-6866498598-2wxkp\" (UID: \"72394a99-d879-415b-9ae9-21165451ac70\") " pod="openstack-kuttl-tests/keystone-6866498598-2wxkp" Jan 20 17:23:46 crc kubenswrapper[4558]: I0120 17:23:46.698047 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zlr65\" (UniqueName: \"kubernetes.io/projected/72394a99-d879-415b-9ae9-21165451ac70-kube-api-access-zlr65\") pod \"keystone-6866498598-2wxkp\" (UID: \"72394a99-d879-415b-9ae9-21165451ac70\") " pod="openstack-kuttl-tests/keystone-6866498598-2wxkp" Jan 20 17:23:46 crc kubenswrapper[4558]: I0120 17:23:46.706182 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-keystone-public-svc" Jan 20 17:23:46 crc kubenswrapper[4558]: I0120 17:23:46.709923 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/72394a99-d879-415b-9ae9-21165451ac70-config-data\") pod \"keystone-6866498598-2wxkp\" (UID: \"72394a99-d879-415b-9ae9-21165451ac70\") " pod="openstack-kuttl-tests/keystone-6866498598-2wxkp" Jan 20 17:23:46 crc kubenswrapper[4558]: I0120 17:23:46.712700 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-keystone-internal-svc" Jan 20 17:23:46 crc kubenswrapper[4558]: I0120 17:23:46.713961 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/72394a99-d879-415b-9ae9-21165451ac70-fernet-keys\") pod \"keystone-6866498598-2wxkp\" (UID: \"72394a99-d879-415b-9ae9-21165451ac70\") " pod="openstack-kuttl-tests/keystone-6866498598-2wxkp" Jan 20 17:23:46 crc kubenswrapper[4558]: I0120 17:23:46.714246 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/72394a99-d879-415b-9ae9-21165451ac70-credential-keys\") pod \"keystone-6866498598-2wxkp\" (UID: \"72394a99-d879-415b-9ae9-21165451ac70\") " pod="openstack-kuttl-tests/keystone-6866498598-2wxkp" Jan 20 17:23:46 crc kubenswrapper[4558]: I0120 17:23:46.719281 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/72394a99-d879-415b-9ae9-21165451ac70-scripts\") pod \"keystone-6866498598-2wxkp\" (UID: \"72394a99-d879-415b-9ae9-21165451ac70\") " pod="openstack-kuttl-tests/keystone-6866498598-2wxkp" Jan 20 17:23:46 crc kubenswrapper[4558]: I0120 17:23:46.719573 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/72394a99-d879-415b-9ae9-21165451ac70-public-tls-certs\") pod \"keystone-6866498598-2wxkp\" (UID: \"72394a99-d879-415b-9ae9-21165451ac70\") " pod="openstack-kuttl-tests/keystone-6866498598-2wxkp" Jan 20 17:23:46 crc kubenswrapper[4558]: I0120 17:23:46.720013 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zlr65\" (UniqueName: \"kubernetes.io/projected/72394a99-d879-415b-9ae9-21165451ac70-kube-api-access-zlr65\") pod \"keystone-6866498598-2wxkp\" (UID: \"72394a99-d879-415b-9ae9-21165451ac70\") " pod="openstack-kuttl-tests/keystone-6866498598-2wxkp" Jan 20 17:23:46 crc kubenswrapper[4558]: I0120 17:23:46.720139 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/72394a99-d879-415b-9ae9-21165451ac70-combined-ca-bundle\") pod \"keystone-6866498598-2wxkp\" (UID: \"72394a99-d879-415b-9ae9-21165451ac70\") " pod="openstack-kuttl-tests/keystone-6866498598-2wxkp" Jan 20 17:23:46 crc kubenswrapper[4558]: I0120 17:23:46.722530 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/72394a99-d879-415b-9ae9-21165451ac70-internal-tls-certs\") pod \"keystone-6866498598-2wxkp\" (UID: \"72394a99-d879-415b-9ae9-21165451ac70\") " pod="openstack-kuttl-tests/keystone-6866498598-2wxkp" Jan 20 17:23:46 crc kubenswrapper[4558]: I0120 17:23:46.760747 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-6866498598-2wxkp" Jan 20 17:23:46 crc kubenswrapper[4558]: I0120 17:23:46.806941 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-db-sync-plxqg" Jan 20 17:23:46 crc kubenswrapper[4558]: I0120 17:23:46.900294 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/c28fa234-6cff-43e9-9cf5-a5cd2f3b67be-config\") pod \"c28fa234-6cff-43e9-9cf5-a5cd2f3b67be\" (UID: \"c28fa234-6cff-43e9-9cf5-a5cd2f3b67be\") " Jan 20 17:23:46 crc kubenswrapper[4558]: I0120 17:23:46.900512 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5gb2g\" (UniqueName: \"kubernetes.io/projected/c28fa234-6cff-43e9-9cf5-a5cd2f3b67be-kube-api-access-5gb2g\") pod \"c28fa234-6cff-43e9-9cf5-a5cd2f3b67be\" (UID: \"c28fa234-6cff-43e9-9cf5-a5cd2f3b67be\") " Jan 20 17:23:46 crc kubenswrapper[4558]: I0120 17:23:46.900629 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c28fa234-6cff-43e9-9cf5-a5cd2f3b67be-combined-ca-bundle\") pod \"c28fa234-6cff-43e9-9cf5-a5cd2f3b67be\" (UID: \"c28fa234-6cff-43e9-9cf5-a5cd2f3b67be\") " Jan 20 17:23:46 crc kubenswrapper[4558]: I0120 17:23:46.908680 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c28fa234-6cff-43e9-9cf5-a5cd2f3b67be-kube-api-access-5gb2g" (OuterVolumeSpecName: "kube-api-access-5gb2g") pod "c28fa234-6cff-43e9-9cf5-a5cd2f3b67be" (UID: "c28fa234-6cff-43e9-9cf5-a5cd2f3b67be"). InnerVolumeSpecName "kube-api-access-5gb2g". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:23:46 crc kubenswrapper[4558]: I0120 17:23:46.928611 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c28fa234-6cff-43e9-9cf5-a5cd2f3b67be-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c28fa234-6cff-43e9-9cf5-a5cd2f3b67be" (UID: "c28fa234-6cff-43e9-9cf5-a5cd2f3b67be"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:23:46 crc kubenswrapper[4558]: I0120 17:23:46.941254 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c28fa234-6cff-43e9-9cf5-a5cd2f3b67be-config" (OuterVolumeSpecName: "config") pod "c28fa234-6cff-43e9-9cf5-a5cd2f3b67be" (UID: "c28fa234-6cff-43e9-9cf5-a5cd2f3b67be"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:23:47 crc kubenswrapper[4558]: I0120 17:23:47.004614 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5gb2g\" (UniqueName: \"kubernetes.io/projected/c28fa234-6cff-43e9-9cf5-a5cd2f3b67be-kube-api-access-5gb2g\") on node \"crc\" DevicePath \"\"" Jan 20 17:23:47 crc kubenswrapper[4558]: I0120 17:23:47.004699 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c28fa234-6cff-43e9-9cf5-a5cd2f3b67be-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:23:47 crc kubenswrapper[4558]: I0120 17:23:47.004714 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/c28fa234-6cff-43e9-9cf5-a5cd2f3b67be-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:23:47 crc kubenswrapper[4558]: I0120 17:23:47.246580 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-6866498598-2wxkp"] Jan 20 17:23:47 crc kubenswrapper[4558]: I0120 17:23:47.271534 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-db-sync-plxqg" Jan 20 17:23:47 crc kubenswrapper[4558]: I0120 17:23:47.272809 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-db-sync-plxqg" event={"ID":"c28fa234-6cff-43e9-9cf5-a5cd2f3b67be","Type":"ContainerDied","Data":"1f6de6abe858aaebe72cc976768454c62c63c2ceeb81c03964d8eed7f09b3b3e"} Jan 20 17:23:47 crc kubenswrapper[4558]: I0120 17:23:47.272878 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1f6de6abe858aaebe72cc976768454c62c63c2ceeb81c03964d8eed7f09b3b3e" Jan 20 17:23:47 crc kubenswrapper[4558]: I0120 17:23:47.277809 4558 generic.go:334] "Generic (PLEG): container finished" podID="171c3e7f-d112-4980-858e-a06eeb205241" containerID="fe10b83beff97523dabad8105396d686fc6188217f34fb4f48747a3b705ea2c1" exitCode=0 Jan 20 17:23:47 crc kubenswrapper[4558]: I0120 17:23:47.277846 4558 generic.go:334] "Generic (PLEG): container finished" podID="171c3e7f-d112-4980-858e-a06eeb205241" containerID="e96266598d97f1aa937e1d5e0c2c2170f0641464f13f72b74edd6a22a34006ac" exitCode=143 Jan 20 17:23:47 crc kubenswrapper[4558]: I0120 17:23:47.277889 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"171c3e7f-d112-4980-858e-a06eeb205241","Type":"ContainerDied","Data":"fe10b83beff97523dabad8105396d686fc6188217f34fb4f48747a3b705ea2c1"} Jan 20 17:23:47 crc kubenswrapper[4558]: I0120 17:23:47.277937 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"171c3e7f-d112-4980-858e-a06eeb205241","Type":"ContainerDied","Data":"e96266598d97f1aa937e1d5e0c2c2170f0641464f13f72b74edd6a22a34006ac"} Jan 20 17:23:47 crc kubenswrapper[4558]: I0120 17:23:47.279184 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-6866498598-2wxkp" event={"ID":"72394a99-d879-415b-9ae9-21165451ac70","Type":"ContainerStarted","Data":"9ad156af82950e8f5e276e76cb19e3ef406261de26707bcdd959116491a9ace8"} Jan 20 17:23:47 crc kubenswrapper[4558]: I0120 17:23:47.348562 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:23:47 crc kubenswrapper[4558]: I0120 17:23:47.412646 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/171c3e7f-d112-4980-858e-a06eeb205241-etc-machine-id\") pod \"171c3e7f-d112-4980-858e-a06eeb205241\" (UID: \"171c3e7f-d112-4980-858e-a06eeb205241\") " Jan 20 17:23:47 crc kubenswrapper[4558]: I0120 17:23:47.412731 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/171c3e7f-d112-4980-858e-a06eeb205241-logs\") pod \"171c3e7f-d112-4980-858e-a06eeb205241\" (UID: \"171c3e7f-d112-4980-858e-a06eeb205241\") " Jan 20 17:23:47 crc kubenswrapper[4558]: I0120 17:23:47.412873 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/171c3e7f-d112-4980-858e-a06eeb205241-config-data-custom\") pod \"171c3e7f-d112-4980-858e-a06eeb205241\" (UID: \"171c3e7f-d112-4980-858e-a06eeb205241\") " Jan 20 17:23:47 crc kubenswrapper[4558]: I0120 17:23:47.412938 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/171c3e7f-d112-4980-858e-a06eeb205241-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "171c3e7f-d112-4980-858e-a06eeb205241" (UID: "171c3e7f-d112-4980-858e-a06eeb205241"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 17:23:47 crc kubenswrapper[4558]: I0120 17:23:47.412957 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/171c3e7f-d112-4980-858e-a06eeb205241-combined-ca-bundle\") pod \"171c3e7f-d112-4980-858e-a06eeb205241\" (UID: \"171c3e7f-d112-4980-858e-a06eeb205241\") " Jan 20 17:23:47 crc kubenswrapper[4558]: I0120 17:23:47.413225 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/171c3e7f-d112-4980-858e-a06eeb205241-config-data\") pod \"171c3e7f-d112-4980-858e-a06eeb205241\" (UID: \"171c3e7f-d112-4980-858e-a06eeb205241\") " Jan 20 17:23:47 crc kubenswrapper[4558]: I0120 17:23:47.413338 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dkpkb\" (UniqueName: \"kubernetes.io/projected/171c3e7f-d112-4980-858e-a06eeb205241-kube-api-access-dkpkb\") pod \"171c3e7f-d112-4980-858e-a06eeb205241\" (UID: \"171c3e7f-d112-4980-858e-a06eeb205241\") " Jan 20 17:23:47 crc kubenswrapper[4558]: I0120 17:23:47.413407 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/171c3e7f-d112-4980-858e-a06eeb205241-scripts\") pod \"171c3e7f-d112-4980-858e-a06eeb205241\" (UID: \"171c3e7f-d112-4980-858e-a06eeb205241\") " Jan 20 17:23:47 crc kubenswrapper[4558]: I0120 17:23:47.414289 4558 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/171c3e7f-d112-4980-858e-a06eeb205241-etc-machine-id\") on node \"crc\" DevicePath \"\"" Jan 20 17:23:47 crc kubenswrapper[4558]: I0120 17:23:47.413221 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/171c3e7f-d112-4980-858e-a06eeb205241-logs" (OuterVolumeSpecName: "logs") pod "171c3e7f-d112-4980-858e-a06eeb205241" (UID: "171c3e7f-d112-4980-858e-a06eeb205241"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:23:47 crc kubenswrapper[4558]: I0120 17:23:47.417987 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/171c3e7f-d112-4980-858e-a06eeb205241-scripts" (OuterVolumeSpecName: "scripts") pod "171c3e7f-d112-4980-858e-a06eeb205241" (UID: "171c3e7f-d112-4980-858e-a06eeb205241"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:23:47 crc kubenswrapper[4558]: I0120 17:23:47.418562 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/171c3e7f-d112-4980-858e-a06eeb205241-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "171c3e7f-d112-4980-858e-a06eeb205241" (UID: "171c3e7f-d112-4980-858e-a06eeb205241"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:23:47 crc kubenswrapper[4558]: I0120 17:23:47.420967 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/171c3e7f-d112-4980-858e-a06eeb205241-kube-api-access-dkpkb" (OuterVolumeSpecName: "kube-api-access-dkpkb") pod "171c3e7f-d112-4980-858e-a06eeb205241" (UID: "171c3e7f-d112-4980-858e-a06eeb205241"). InnerVolumeSpecName "kube-api-access-dkpkb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:23:47 crc kubenswrapper[4558]: I0120 17:23:47.466525 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/171c3e7f-d112-4980-858e-a06eeb205241-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "171c3e7f-d112-4980-858e-a06eeb205241" (UID: "171c3e7f-d112-4980-858e-a06eeb205241"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:23:47 crc kubenswrapper[4558]: I0120 17:23:47.486750 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/171c3e7f-d112-4980-858e-a06eeb205241-config-data" (OuterVolumeSpecName: "config-data") pod "171c3e7f-d112-4980-858e-a06eeb205241" (UID: "171c3e7f-d112-4980-858e-a06eeb205241"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:23:47 crc kubenswrapper[4558]: I0120 17:23:47.516574 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/171c3e7f-d112-4980-858e-a06eeb205241-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:23:47 crc kubenswrapper[4558]: I0120 17:23:47.516849 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/171c3e7f-d112-4980-858e-a06eeb205241-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:23:47 crc kubenswrapper[4558]: I0120 17:23:47.516861 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/171c3e7f-d112-4980-858e-a06eeb205241-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:23:47 crc kubenswrapper[4558]: I0120 17:23:47.516872 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dkpkb\" (UniqueName: \"kubernetes.io/projected/171c3e7f-d112-4980-858e-a06eeb205241-kube-api-access-dkpkb\") on node \"crc\" DevicePath \"\"" Jan 20 17:23:47 crc kubenswrapper[4558]: I0120 17:23:47.516886 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/171c3e7f-d112-4980-858e-a06eeb205241-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:23:47 crc kubenswrapper[4558]: I0120 17:23:47.516894 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/171c3e7f-d112-4980-858e-a06eeb205241-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:23:47 crc kubenswrapper[4558]: I0120 17:23:47.620343 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/neutron-8668888664-j9vrg"] Jan 20 17:23:47 crc kubenswrapper[4558]: E0120 17:23:47.620726 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="171c3e7f-d112-4980-858e-a06eeb205241" containerName="cinder-api" Jan 20 17:23:47 crc kubenswrapper[4558]: I0120 17:23:47.620744 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="171c3e7f-d112-4980-858e-a06eeb205241" containerName="cinder-api" Jan 20 17:23:47 crc kubenswrapper[4558]: E0120 17:23:47.620758 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c28fa234-6cff-43e9-9cf5-a5cd2f3b67be" containerName="neutron-db-sync" Jan 20 17:23:47 crc kubenswrapper[4558]: I0120 17:23:47.620764 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="c28fa234-6cff-43e9-9cf5-a5cd2f3b67be" containerName="neutron-db-sync" Jan 20 17:23:47 crc kubenswrapper[4558]: E0120 17:23:47.620790 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="171c3e7f-d112-4980-858e-a06eeb205241" containerName="cinder-api-log" Jan 20 17:23:47 crc kubenswrapper[4558]: I0120 17:23:47.620796 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="171c3e7f-d112-4980-858e-a06eeb205241" containerName="cinder-api-log" Jan 20 17:23:47 crc kubenswrapper[4558]: I0120 17:23:47.620977 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="171c3e7f-d112-4980-858e-a06eeb205241" containerName="cinder-api" Jan 20 17:23:47 crc kubenswrapper[4558]: I0120 17:23:47.620991 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="171c3e7f-d112-4980-858e-a06eeb205241" containerName="cinder-api-log" Jan 20 17:23:47 crc kubenswrapper[4558]: I0120 17:23:47.621001 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="c28fa234-6cff-43e9-9cf5-a5cd2f3b67be" containerName="neutron-db-sync" Jan 20 17:23:47 crc kubenswrapper[4558]: I0120 17:23:47.621856 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-8668888664-j9vrg" Jan 20 17:23:47 crc kubenswrapper[4558]: I0120 17:23:47.627423 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"neutron-httpd-config" Jan 20 17:23:47 crc kubenswrapper[4558]: I0120 17:23:47.627881 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-neutron-ovndbs" Jan 20 17:23:47 crc kubenswrapper[4558]: I0120 17:23:47.628049 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"neutron-neutron-dockercfg-4hpsm" Jan 20 17:23:47 crc kubenswrapper[4558]: I0120 17:23:47.628155 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"neutron-config" Jan 20 17:23:47 crc kubenswrapper[4558]: I0120 17:23:47.648465 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-8668888664-j9vrg"] Jan 20 17:23:47 crc kubenswrapper[4558]: I0120 17:23:47.710928 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/barbican-api-666d4cc7c-5vfvn"] Jan 20 17:23:47 crc kubenswrapper[4558]: I0120 17:23:47.713233 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-666d4cc7c-5vfvn" Jan 20 17:23:47 crc kubenswrapper[4558]: I0120 17:23:47.716271 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-barbican-internal-svc" Jan 20 17:23:47 crc kubenswrapper[4558]: I0120 17:23:47.718059 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-barbican-public-svc" Jan 20 17:23:47 crc kubenswrapper[4558]: I0120 17:23:47.736821 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tpk55\" (UniqueName: \"kubernetes.io/projected/12f5bd2b-9da9-49be-992d-d2345bb393e2-kube-api-access-tpk55\") pod \"neutron-8668888664-j9vrg\" (UID: \"12f5bd2b-9da9-49be-992d-d2345bb393e2\") " pod="openstack-kuttl-tests/neutron-8668888664-j9vrg" Jan 20 17:23:47 crc kubenswrapper[4558]: I0120 17:23:47.736995 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/12f5bd2b-9da9-49be-992d-d2345bb393e2-httpd-config\") pod \"neutron-8668888664-j9vrg\" (UID: \"12f5bd2b-9da9-49be-992d-d2345bb393e2\") " pod="openstack-kuttl-tests/neutron-8668888664-j9vrg" Jan 20 17:23:47 crc kubenswrapper[4558]: I0120 17:23:47.737057 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/12f5bd2b-9da9-49be-992d-d2345bb393e2-combined-ca-bundle\") pod \"neutron-8668888664-j9vrg\" (UID: \"12f5bd2b-9da9-49be-992d-d2345bb393e2\") " pod="openstack-kuttl-tests/neutron-8668888664-j9vrg" Jan 20 17:23:47 crc kubenswrapper[4558]: I0120 17:23:47.737543 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/12f5bd2b-9da9-49be-992d-d2345bb393e2-config\") pod \"neutron-8668888664-j9vrg\" (UID: \"12f5bd2b-9da9-49be-992d-d2345bb393e2\") " pod="openstack-kuttl-tests/neutron-8668888664-j9vrg" Jan 20 17:23:47 crc kubenswrapper[4558]: I0120 17:23:47.737585 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/12f5bd2b-9da9-49be-992d-d2345bb393e2-ovndb-tls-certs\") pod \"neutron-8668888664-j9vrg\" (UID: \"12f5bd2b-9da9-49be-992d-d2345bb393e2\") " pod="openstack-kuttl-tests/neutron-8668888664-j9vrg" Jan 20 17:23:47 crc kubenswrapper[4558]: I0120 17:23:47.754483 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-api-666d4cc7c-5vfvn"] Jan 20 17:23:47 crc kubenswrapper[4558]: I0120 17:23:47.839793 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d86wh\" (UniqueName: \"kubernetes.io/projected/000dbc78-69b3-452b-8d87-9781c58b7b06-kube-api-access-d86wh\") pod \"barbican-api-666d4cc7c-5vfvn\" (UID: \"000dbc78-69b3-452b-8d87-9781c58b7b06\") " pod="openstack-kuttl-tests/barbican-api-666d4cc7c-5vfvn" Jan 20 17:23:47 crc kubenswrapper[4558]: I0120 17:23:47.839838 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/000dbc78-69b3-452b-8d87-9781c58b7b06-logs\") pod \"barbican-api-666d4cc7c-5vfvn\" (UID: \"000dbc78-69b3-452b-8d87-9781c58b7b06\") " pod="openstack-kuttl-tests/barbican-api-666d4cc7c-5vfvn" Jan 20 17:23:47 crc kubenswrapper[4558]: I0120 17:23:47.839872 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/12f5bd2b-9da9-49be-992d-d2345bb393e2-config\") pod \"neutron-8668888664-j9vrg\" (UID: \"12f5bd2b-9da9-49be-992d-d2345bb393e2\") " pod="openstack-kuttl-tests/neutron-8668888664-j9vrg" Jan 20 17:23:47 crc kubenswrapper[4558]: I0120 17:23:47.839892 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/000dbc78-69b3-452b-8d87-9781c58b7b06-config-data-custom\") pod \"barbican-api-666d4cc7c-5vfvn\" (UID: \"000dbc78-69b3-452b-8d87-9781c58b7b06\") " pod="openstack-kuttl-tests/barbican-api-666d4cc7c-5vfvn" Jan 20 17:23:47 crc kubenswrapper[4558]: I0120 17:23:47.839920 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/12f5bd2b-9da9-49be-992d-d2345bb393e2-ovndb-tls-certs\") pod \"neutron-8668888664-j9vrg\" (UID: \"12f5bd2b-9da9-49be-992d-d2345bb393e2\") " pod="openstack-kuttl-tests/neutron-8668888664-j9vrg" Jan 20 17:23:47 crc kubenswrapper[4558]: I0120 17:23:47.840048 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/000dbc78-69b3-452b-8d87-9781c58b7b06-internal-tls-certs\") pod \"barbican-api-666d4cc7c-5vfvn\" (UID: \"000dbc78-69b3-452b-8d87-9781c58b7b06\") " pod="openstack-kuttl-tests/barbican-api-666d4cc7c-5vfvn" Jan 20 17:23:47 crc kubenswrapper[4558]: I0120 17:23:47.840076 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/000dbc78-69b3-452b-8d87-9781c58b7b06-public-tls-certs\") pod \"barbican-api-666d4cc7c-5vfvn\" (UID: \"000dbc78-69b3-452b-8d87-9781c58b7b06\") " pod="openstack-kuttl-tests/barbican-api-666d4cc7c-5vfvn" Jan 20 17:23:47 crc kubenswrapper[4558]: I0120 17:23:47.840105 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/000dbc78-69b3-452b-8d87-9781c58b7b06-combined-ca-bundle\") pod \"barbican-api-666d4cc7c-5vfvn\" (UID: \"000dbc78-69b3-452b-8d87-9781c58b7b06\") " pod="openstack-kuttl-tests/barbican-api-666d4cc7c-5vfvn" Jan 20 17:23:47 crc kubenswrapper[4558]: I0120 17:23:47.840198 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tpk55\" (UniqueName: \"kubernetes.io/projected/12f5bd2b-9da9-49be-992d-d2345bb393e2-kube-api-access-tpk55\") pod \"neutron-8668888664-j9vrg\" (UID: \"12f5bd2b-9da9-49be-992d-d2345bb393e2\") " pod="openstack-kuttl-tests/neutron-8668888664-j9vrg" Jan 20 17:23:47 crc kubenswrapper[4558]: I0120 17:23:47.840237 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/000dbc78-69b3-452b-8d87-9781c58b7b06-config-data\") pod \"barbican-api-666d4cc7c-5vfvn\" (UID: \"000dbc78-69b3-452b-8d87-9781c58b7b06\") " pod="openstack-kuttl-tests/barbican-api-666d4cc7c-5vfvn" Jan 20 17:23:47 crc kubenswrapper[4558]: I0120 17:23:47.840260 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/12f5bd2b-9da9-49be-992d-d2345bb393e2-httpd-config\") pod \"neutron-8668888664-j9vrg\" (UID: \"12f5bd2b-9da9-49be-992d-d2345bb393e2\") " pod="openstack-kuttl-tests/neutron-8668888664-j9vrg" Jan 20 17:23:47 crc kubenswrapper[4558]: I0120 17:23:47.840321 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/12f5bd2b-9da9-49be-992d-d2345bb393e2-combined-ca-bundle\") pod \"neutron-8668888664-j9vrg\" (UID: \"12f5bd2b-9da9-49be-992d-d2345bb393e2\") " pod="openstack-kuttl-tests/neutron-8668888664-j9vrg" Jan 20 17:23:47 crc kubenswrapper[4558]: I0120 17:23:47.845341 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/12f5bd2b-9da9-49be-992d-d2345bb393e2-combined-ca-bundle\") pod \"neutron-8668888664-j9vrg\" (UID: \"12f5bd2b-9da9-49be-992d-d2345bb393e2\") " pod="openstack-kuttl-tests/neutron-8668888664-j9vrg" Jan 20 17:23:47 crc kubenswrapper[4558]: I0120 17:23:47.845400 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/12f5bd2b-9da9-49be-992d-d2345bb393e2-ovndb-tls-certs\") pod \"neutron-8668888664-j9vrg\" (UID: \"12f5bd2b-9da9-49be-992d-d2345bb393e2\") " pod="openstack-kuttl-tests/neutron-8668888664-j9vrg" Jan 20 17:23:47 crc kubenswrapper[4558]: I0120 17:23:47.846293 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/12f5bd2b-9da9-49be-992d-d2345bb393e2-config\") pod \"neutron-8668888664-j9vrg\" (UID: \"12f5bd2b-9da9-49be-992d-d2345bb393e2\") " pod="openstack-kuttl-tests/neutron-8668888664-j9vrg" Jan 20 17:23:47 crc kubenswrapper[4558]: I0120 17:23:47.846337 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/12f5bd2b-9da9-49be-992d-d2345bb393e2-httpd-config\") pod \"neutron-8668888664-j9vrg\" (UID: \"12f5bd2b-9da9-49be-992d-d2345bb393e2\") " pod="openstack-kuttl-tests/neutron-8668888664-j9vrg" Jan 20 17:23:47 crc kubenswrapper[4558]: I0120 17:23:47.854906 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tpk55\" (UniqueName: \"kubernetes.io/projected/12f5bd2b-9da9-49be-992d-d2345bb393e2-kube-api-access-tpk55\") pod \"neutron-8668888664-j9vrg\" (UID: \"12f5bd2b-9da9-49be-992d-d2345bb393e2\") " pod="openstack-kuttl-tests/neutron-8668888664-j9vrg" Jan 20 17:23:47 crc kubenswrapper[4558]: I0120 17:23:47.942484 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/000dbc78-69b3-452b-8d87-9781c58b7b06-internal-tls-certs\") pod \"barbican-api-666d4cc7c-5vfvn\" (UID: \"000dbc78-69b3-452b-8d87-9781c58b7b06\") " pod="openstack-kuttl-tests/barbican-api-666d4cc7c-5vfvn" Jan 20 17:23:47 crc kubenswrapper[4558]: I0120 17:23:47.942550 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/000dbc78-69b3-452b-8d87-9781c58b7b06-public-tls-certs\") pod \"barbican-api-666d4cc7c-5vfvn\" (UID: \"000dbc78-69b3-452b-8d87-9781c58b7b06\") " pod="openstack-kuttl-tests/barbican-api-666d4cc7c-5vfvn" Jan 20 17:23:47 crc kubenswrapper[4558]: I0120 17:23:47.942586 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/000dbc78-69b3-452b-8d87-9781c58b7b06-combined-ca-bundle\") pod \"barbican-api-666d4cc7c-5vfvn\" (UID: \"000dbc78-69b3-452b-8d87-9781c58b7b06\") " pod="openstack-kuttl-tests/barbican-api-666d4cc7c-5vfvn" Jan 20 17:23:47 crc kubenswrapper[4558]: I0120 17:23:47.942694 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/000dbc78-69b3-452b-8d87-9781c58b7b06-config-data\") pod \"barbican-api-666d4cc7c-5vfvn\" (UID: \"000dbc78-69b3-452b-8d87-9781c58b7b06\") " pod="openstack-kuttl-tests/barbican-api-666d4cc7c-5vfvn" Jan 20 17:23:47 crc kubenswrapper[4558]: I0120 17:23:47.942815 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d86wh\" (UniqueName: \"kubernetes.io/projected/000dbc78-69b3-452b-8d87-9781c58b7b06-kube-api-access-d86wh\") pod \"barbican-api-666d4cc7c-5vfvn\" (UID: \"000dbc78-69b3-452b-8d87-9781c58b7b06\") " pod="openstack-kuttl-tests/barbican-api-666d4cc7c-5vfvn" Jan 20 17:23:47 crc kubenswrapper[4558]: I0120 17:23:47.942836 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/000dbc78-69b3-452b-8d87-9781c58b7b06-logs\") pod \"barbican-api-666d4cc7c-5vfvn\" (UID: \"000dbc78-69b3-452b-8d87-9781c58b7b06\") " pod="openstack-kuttl-tests/barbican-api-666d4cc7c-5vfvn" Jan 20 17:23:47 crc kubenswrapper[4558]: I0120 17:23:47.942865 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/000dbc78-69b3-452b-8d87-9781c58b7b06-config-data-custom\") pod \"barbican-api-666d4cc7c-5vfvn\" (UID: \"000dbc78-69b3-452b-8d87-9781c58b7b06\") " pod="openstack-kuttl-tests/barbican-api-666d4cc7c-5vfvn" Jan 20 17:23:47 crc kubenswrapper[4558]: I0120 17:23:47.943861 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/000dbc78-69b3-452b-8d87-9781c58b7b06-logs\") pod \"barbican-api-666d4cc7c-5vfvn\" (UID: \"000dbc78-69b3-452b-8d87-9781c58b7b06\") " pod="openstack-kuttl-tests/barbican-api-666d4cc7c-5vfvn" Jan 20 17:23:47 crc kubenswrapper[4558]: I0120 17:23:47.946186 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/000dbc78-69b3-452b-8d87-9781c58b7b06-public-tls-certs\") pod \"barbican-api-666d4cc7c-5vfvn\" (UID: \"000dbc78-69b3-452b-8d87-9781c58b7b06\") " pod="openstack-kuttl-tests/barbican-api-666d4cc7c-5vfvn" Jan 20 17:23:47 crc kubenswrapper[4558]: I0120 17:23:47.946511 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/000dbc78-69b3-452b-8d87-9781c58b7b06-combined-ca-bundle\") pod \"barbican-api-666d4cc7c-5vfvn\" (UID: \"000dbc78-69b3-452b-8d87-9781c58b7b06\") " pod="openstack-kuttl-tests/barbican-api-666d4cc7c-5vfvn" Jan 20 17:23:47 crc kubenswrapper[4558]: I0120 17:23:47.946790 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/000dbc78-69b3-452b-8d87-9781c58b7b06-internal-tls-certs\") pod \"barbican-api-666d4cc7c-5vfvn\" (UID: \"000dbc78-69b3-452b-8d87-9781c58b7b06\") " pod="openstack-kuttl-tests/barbican-api-666d4cc7c-5vfvn" Jan 20 17:23:47 crc kubenswrapper[4558]: I0120 17:23:47.947137 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/000dbc78-69b3-452b-8d87-9781c58b7b06-config-data-custom\") pod \"barbican-api-666d4cc7c-5vfvn\" (UID: \"000dbc78-69b3-452b-8d87-9781c58b7b06\") " pod="openstack-kuttl-tests/barbican-api-666d4cc7c-5vfvn" Jan 20 17:23:47 crc kubenswrapper[4558]: I0120 17:23:47.947879 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/000dbc78-69b3-452b-8d87-9781c58b7b06-config-data\") pod \"barbican-api-666d4cc7c-5vfvn\" (UID: \"000dbc78-69b3-452b-8d87-9781c58b7b06\") " pod="openstack-kuttl-tests/barbican-api-666d4cc7c-5vfvn" Jan 20 17:23:47 crc kubenswrapper[4558]: I0120 17:23:47.962617 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d86wh\" (UniqueName: \"kubernetes.io/projected/000dbc78-69b3-452b-8d87-9781c58b7b06-kube-api-access-d86wh\") pod \"barbican-api-666d4cc7c-5vfvn\" (UID: \"000dbc78-69b3-452b-8d87-9781c58b7b06\") " pod="openstack-kuttl-tests/barbican-api-666d4cc7c-5vfvn" Jan 20 17:23:47 crc kubenswrapper[4558]: I0120 17:23:47.978450 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-8668888664-j9vrg" Jan 20 17:23:48 crc kubenswrapper[4558]: I0120 17:23:48.059632 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-666d4cc7c-5vfvn" Jan 20 17:23:48 crc kubenswrapper[4558]: I0120 17:23:48.191601 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:23:48 crc kubenswrapper[4558]: I0120 17:23:48.191648 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:23:48 crc kubenswrapper[4558]: I0120 17:23:48.246679 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:23:48 crc kubenswrapper[4558]: I0120 17:23:48.252693 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:23:48 crc kubenswrapper[4558]: I0120 17:23:48.252817 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:23:48 crc kubenswrapper[4558]: I0120 17:23:48.288715 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:23:48 crc kubenswrapper[4558]: I0120 17:23:48.298244 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:23:48 crc kubenswrapper[4558]: I0120 17:23:48.303083 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:23:48 crc kubenswrapper[4558]: I0120 17:23:48.303198 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"171c3e7f-d112-4980-858e-a06eeb205241","Type":"ContainerDied","Data":"3f65739176eb2149b4677e850f398ec86a1a533184fb7686438d58b520caa661"} Jan 20 17:23:48 crc kubenswrapper[4558]: I0120 17:23:48.303233 4558 scope.go:117] "RemoveContainer" containerID="fe10b83beff97523dabad8105396d686fc6188217f34fb4f48747a3b705ea2c1" Jan 20 17:23:48 crc kubenswrapper[4558]: I0120 17:23:48.303335 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:23:48 crc kubenswrapper[4558]: I0120 17:23:48.320710 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-6866498598-2wxkp" event={"ID":"72394a99-d879-415b-9ae9-21165451ac70","Type":"ContainerStarted","Data":"57a221fa6c7f95ec5c64257c235ed54b2e2bd0a8d13ecc18c3f5f0a4dce5307b"} Jan 20 17:23:48 crc kubenswrapper[4558]: I0120 17:23:48.321622 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:23:48 crc kubenswrapper[4558]: I0120 17:23:48.324700 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:23:48 crc kubenswrapper[4558]: I0120 17:23:48.324769 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:23:48 crc kubenswrapper[4558]: I0120 17:23:48.329602 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:23:48 crc kubenswrapper[4558]: I0120 17:23:48.329640 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/keystone-6866498598-2wxkp" Jan 20 17:23:48 crc kubenswrapper[4558]: I0120 17:23:48.340220 4558 scope.go:117] "RemoveContainer" containerID="e96266598d97f1aa937e1d5e0c2c2170f0641464f13f72b74edd6a22a34006ac" Jan 20 17:23:48 crc kubenswrapper[4558]: I0120 17:23:48.370840 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:23:48 crc kubenswrapper[4558]: I0120 17:23:48.389256 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:23:48 crc kubenswrapper[4558]: I0120 17:23:48.412440 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:23:48 crc kubenswrapper[4558]: I0120 17:23:48.413954 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:23:48 crc kubenswrapper[4558]: I0120 17:23:48.420760 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:23:48 crc kubenswrapper[4558]: I0120 17:23:48.421714 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-cinder-public-svc" Jan 20 17:23:48 crc kubenswrapper[4558]: I0120 17:23:48.421749 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-cinder-internal-svc" Jan 20 17:23:48 crc kubenswrapper[4558]: I0120 17:23:48.421760 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/keystone-6866498598-2wxkp" podStartSLOduration=2.42173893 podStartE2EDuration="2.42173893s" podCreationTimestamp="2026-01-20 17:23:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:23:48.383003576 +0000 UTC m=+2522.143341542" watchObservedRunningTime="2026-01-20 17:23:48.42173893 +0000 UTC m=+2522.182076897" Jan 20 17:23:48 crc kubenswrapper[4558]: I0120 17:23:48.422702 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cinder-api-config-data" Jan 20 17:23:48 crc kubenswrapper[4558]: I0120 17:23:48.474075 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-8668888664-j9vrg"] Jan 20 17:23:48 crc kubenswrapper[4558]: W0120 17:23:48.482918 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod12f5bd2b_9da9_49be_992d_d2345bb393e2.slice/crio-e8bbd31c79781590df1c20f76b22ddb8a30637baec2b4b0a79c197143353f273 WatchSource:0}: Error finding container e8bbd31c79781590df1c20f76b22ddb8a30637baec2b4b0a79c197143353f273: Status 404 returned error can't find the container with id e8bbd31c79781590df1c20f76b22ddb8a30637baec2b4b0a79c197143353f273 Jan 20 17:23:48 crc kubenswrapper[4558]: I0120 17:23:48.565820 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wpps6\" (UniqueName: \"kubernetes.io/projected/174beb08-6b5c-40ef-9809-a4e17c718392-kube-api-access-wpps6\") pod \"cinder-api-0\" (UID: \"174beb08-6b5c-40ef-9809-a4e17c718392\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:23:48 crc kubenswrapper[4558]: I0120 17:23:48.565861 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/174beb08-6b5c-40ef-9809-a4e17c718392-scripts\") pod \"cinder-api-0\" (UID: \"174beb08-6b5c-40ef-9809-a4e17c718392\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:23:48 crc kubenswrapper[4558]: I0120 17:23:48.565899 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/174beb08-6b5c-40ef-9809-a4e17c718392-config-data-custom\") pod \"cinder-api-0\" (UID: \"174beb08-6b5c-40ef-9809-a4e17c718392\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:23:48 crc kubenswrapper[4558]: I0120 17:23:48.565964 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/174beb08-6b5c-40ef-9809-a4e17c718392-logs\") pod \"cinder-api-0\" (UID: \"174beb08-6b5c-40ef-9809-a4e17c718392\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:23:48 crc kubenswrapper[4558]: I0120 17:23:48.566036 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/174beb08-6b5c-40ef-9809-a4e17c718392-config-data\") pod \"cinder-api-0\" (UID: \"174beb08-6b5c-40ef-9809-a4e17c718392\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:23:48 crc kubenswrapper[4558]: I0120 17:23:48.566124 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/174beb08-6b5c-40ef-9809-a4e17c718392-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"174beb08-6b5c-40ef-9809-a4e17c718392\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:23:48 crc kubenswrapper[4558]: I0120 17:23:48.566187 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/174beb08-6b5c-40ef-9809-a4e17c718392-etc-machine-id\") pod \"cinder-api-0\" (UID: \"174beb08-6b5c-40ef-9809-a4e17c718392\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:23:48 crc kubenswrapper[4558]: I0120 17:23:48.566205 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/174beb08-6b5c-40ef-9809-a4e17c718392-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"174beb08-6b5c-40ef-9809-a4e17c718392\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:23:48 crc kubenswrapper[4558]: I0120 17:23:48.566281 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/174beb08-6b5c-40ef-9809-a4e17c718392-public-tls-certs\") pod \"cinder-api-0\" (UID: \"174beb08-6b5c-40ef-9809-a4e17c718392\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:23:48 crc kubenswrapper[4558]: I0120 17:23:48.583586 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="171c3e7f-d112-4980-858e-a06eeb205241" path="/var/lib/kubelet/pods/171c3e7f-d112-4980-858e-a06eeb205241/volumes" Jan 20 17:23:48 crc kubenswrapper[4558]: I0120 17:23:48.584394 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-api-666d4cc7c-5vfvn"] Jan 20 17:23:48 crc kubenswrapper[4558]: I0120 17:23:48.668111 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/174beb08-6b5c-40ef-9809-a4e17c718392-logs\") pod \"cinder-api-0\" (UID: \"174beb08-6b5c-40ef-9809-a4e17c718392\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:23:48 crc kubenswrapper[4558]: I0120 17:23:48.668215 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/174beb08-6b5c-40ef-9809-a4e17c718392-config-data\") pod \"cinder-api-0\" (UID: \"174beb08-6b5c-40ef-9809-a4e17c718392\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:23:48 crc kubenswrapper[4558]: I0120 17:23:48.668323 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/174beb08-6b5c-40ef-9809-a4e17c718392-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"174beb08-6b5c-40ef-9809-a4e17c718392\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:23:48 crc kubenswrapper[4558]: I0120 17:23:48.668371 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/174beb08-6b5c-40ef-9809-a4e17c718392-etc-machine-id\") pod \"cinder-api-0\" (UID: \"174beb08-6b5c-40ef-9809-a4e17c718392\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:23:48 crc kubenswrapper[4558]: I0120 17:23:48.668389 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/174beb08-6b5c-40ef-9809-a4e17c718392-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"174beb08-6b5c-40ef-9809-a4e17c718392\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:23:48 crc kubenswrapper[4558]: I0120 17:23:48.668458 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/174beb08-6b5c-40ef-9809-a4e17c718392-public-tls-certs\") pod \"cinder-api-0\" (UID: \"174beb08-6b5c-40ef-9809-a4e17c718392\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:23:48 crc kubenswrapper[4558]: I0120 17:23:48.668507 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wpps6\" (UniqueName: \"kubernetes.io/projected/174beb08-6b5c-40ef-9809-a4e17c718392-kube-api-access-wpps6\") pod \"cinder-api-0\" (UID: \"174beb08-6b5c-40ef-9809-a4e17c718392\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:23:48 crc kubenswrapper[4558]: I0120 17:23:48.668528 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/174beb08-6b5c-40ef-9809-a4e17c718392-scripts\") pod \"cinder-api-0\" (UID: \"174beb08-6b5c-40ef-9809-a4e17c718392\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:23:48 crc kubenswrapper[4558]: I0120 17:23:48.668561 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/174beb08-6b5c-40ef-9809-a4e17c718392-config-data-custom\") pod \"cinder-api-0\" (UID: \"174beb08-6b5c-40ef-9809-a4e17c718392\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:23:48 crc kubenswrapper[4558]: I0120 17:23:48.670131 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/174beb08-6b5c-40ef-9809-a4e17c718392-logs\") pod \"cinder-api-0\" (UID: \"174beb08-6b5c-40ef-9809-a4e17c718392\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:23:48 crc kubenswrapper[4558]: I0120 17:23:48.671804 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/174beb08-6b5c-40ef-9809-a4e17c718392-etc-machine-id\") pod \"cinder-api-0\" (UID: \"174beb08-6b5c-40ef-9809-a4e17c718392\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:23:48 crc kubenswrapper[4558]: I0120 17:23:48.685432 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/174beb08-6b5c-40ef-9809-a4e17c718392-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"174beb08-6b5c-40ef-9809-a4e17c718392\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:23:48 crc kubenswrapper[4558]: I0120 17:23:48.686066 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/174beb08-6b5c-40ef-9809-a4e17c718392-scripts\") pod \"cinder-api-0\" (UID: \"174beb08-6b5c-40ef-9809-a4e17c718392\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:23:48 crc kubenswrapper[4558]: I0120 17:23:48.686600 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/174beb08-6b5c-40ef-9809-a4e17c718392-config-data\") pod \"cinder-api-0\" (UID: \"174beb08-6b5c-40ef-9809-a4e17c718392\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:23:48 crc kubenswrapper[4558]: I0120 17:23:48.689826 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/174beb08-6b5c-40ef-9809-a4e17c718392-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"174beb08-6b5c-40ef-9809-a4e17c718392\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:23:48 crc kubenswrapper[4558]: I0120 17:23:48.694545 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/174beb08-6b5c-40ef-9809-a4e17c718392-public-tls-certs\") pod \"cinder-api-0\" (UID: \"174beb08-6b5c-40ef-9809-a4e17c718392\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:23:48 crc kubenswrapper[4558]: I0120 17:23:48.695700 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/174beb08-6b5c-40ef-9809-a4e17c718392-config-data-custom\") pod \"cinder-api-0\" (UID: \"174beb08-6b5c-40ef-9809-a4e17c718392\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:23:48 crc kubenswrapper[4558]: I0120 17:23:48.700637 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wpps6\" (UniqueName: \"kubernetes.io/projected/174beb08-6b5c-40ef-9809-a4e17c718392-kube-api-access-wpps6\") pod \"cinder-api-0\" (UID: \"174beb08-6b5c-40ef-9809-a4e17c718392\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:23:48 crc kubenswrapper[4558]: I0120 17:23:48.733677 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:23:49 crc kubenswrapper[4558]: I0120 17:23:49.317851 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:23:49 crc kubenswrapper[4558]: I0120 17:23:49.346114 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-8668888664-j9vrg" event={"ID":"12f5bd2b-9da9-49be-992d-d2345bb393e2","Type":"ContainerStarted","Data":"22d5113e75d9189dc9caeca78d1cfc5da4714ea69e4a871cf431f53e792eedf7"} Jan 20 17:23:49 crc kubenswrapper[4558]: I0120 17:23:49.346196 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-8668888664-j9vrg" event={"ID":"12f5bd2b-9da9-49be-992d-d2345bb393e2","Type":"ContainerStarted","Data":"3afad2651184b267144218ea011d4cc9a8d8837d442d3000b7e3730c47f50841"} Jan 20 17:23:49 crc kubenswrapper[4558]: I0120 17:23:49.346209 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-8668888664-j9vrg" event={"ID":"12f5bd2b-9da9-49be-992d-d2345bb393e2","Type":"ContainerStarted","Data":"e8bbd31c79781590df1c20f76b22ddb8a30637baec2b4b0a79c197143353f273"} Jan 20 17:23:49 crc kubenswrapper[4558]: I0120 17:23:49.347429 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/neutron-8668888664-j9vrg" Jan 20 17:23:49 crc kubenswrapper[4558]: I0120 17:23:49.360661 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-666d4cc7c-5vfvn" event={"ID":"000dbc78-69b3-452b-8d87-9781c58b7b06","Type":"ContainerStarted","Data":"9c3cde8598228c2fcc2d02a66724249be75d689574a13d9797a4510b0c5f177a"} Jan 20 17:23:49 crc kubenswrapper[4558]: I0120 17:23:49.360690 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/barbican-api-666d4cc7c-5vfvn" Jan 20 17:23:49 crc kubenswrapper[4558]: I0120 17:23:49.360701 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-666d4cc7c-5vfvn" event={"ID":"000dbc78-69b3-452b-8d87-9781c58b7b06","Type":"ContainerStarted","Data":"da0acfbf374913ebf99cfd01ff5c7b709490f7d87e56e16f3846a5f546e7bbbf"} Jan 20 17:23:49 crc kubenswrapper[4558]: I0120 17:23:49.360715 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-666d4cc7c-5vfvn" event={"ID":"000dbc78-69b3-452b-8d87-9781c58b7b06","Type":"ContainerStarted","Data":"c73bd646b1102096930ce3ac3e0d4246c05222379157d2eddcc65e7d0c3e3f03"} Jan 20 17:23:49 crc kubenswrapper[4558]: I0120 17:23:49.360736 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/barbican-api-666d4cc7c-5vfvn" Jan 20 17:23:49 crc kubenswrapper[4558]: I0120 17:23:49.393460 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/neutron-8668888664-j9vrg" podStartSLOduration=2.39344166 podStartE2EDuration="2.39344166s" podCreationTimestamp="2026-01-20 17:23:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:23:49.370371272 +0000 UTC m=+2523.130709240" watchObservedRunningTime="2026-01-20 17:23:49.39344166 +0000 UTC m=+2523.153779617" Jan 20 17:23:49 crc kubenswrapper[4558]: I0120 17:23:49.398607 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/barbican-api-666d4cc7c-5vfvn" podStartSLOduration=2.398600123 podStartE2EDuration="2.398600123s" podCreationTimestamp="2026-01-20 17:23:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:23:49.386938697 +0000 UTC m=+2523.147276664" watchObservedRunningTime="2026-01-20 17:23:49.398600123 +0000 UTC m=+2523.158938090" Jan 20 17:23:50 crc kubenswrapper[4558]: I0120 17:23:50.371982 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"174beb08-6b5c-40ef-9809-a4e17c718392","Type":"ContainerStarted","Data":"8a7fa99f3021ee9c8829c1eabdc8f6e24a1e1ce1bc1bcf976f8b0fa8e46ef699"} Jan 20 17:23:50 crc kubenswrapper[4558]: I0120 17:23:50.372562 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"174beb08-6b5c-40ef-9809-a4e17c718392","Type":"ContainerStarted","Data":"a1b8ae8a6fb04666cfb0a4f2a9ffa1b84b72f9d6e76dfc7648b09de4c44e719b"} Jan 20 17:23:50 crc kubenswrapper[4558]: I0120 17:23:50.372107 4558 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 20 17:23:50 crc kubenswrapper[4558]: I0120 17:23:50.372595 4558 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 20 17:23:50 crc kubenswrapper[4558]: I0120 17:23:50.372063 4558 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 20 17:23:50 crc kubenswrapper[4558]: I0120 17:23:50.372787 4558 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 20 17:23:50 crc kubenswrapper[4558]: I0120 17:23:50.415497 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:23:50 crc kubenswrapper[4558]: I0120 17:23:50.445157 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:23:50 crc kubenswrapper[4558]: I0120 17:23:50.585711 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:23:51 crc kubenswrapper[4558]: I0120 17:23:51.279397 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/barbican-api-547b6df4c5-87fjk" Jan 20 17:23:51 crc kubenswrapper[4558]: I0120 17:23:51.303368 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:23:51 crc kubenswrapper[4558]: I0120 17:23:51.382662 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"174beb08-6b5c-40ef-9809-a4e17c718392","Type":"ContainerStarted","Data":"025c28c93d5af302d74da36c6dd18d125f4d085beedce3d041db5da919c5c252"} Jan 20 17:23:51 crc kubenswrapper[4558]: I0120 17:23:51.382800 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:23:51 crc kubenswrapper[4558]: I0120 17:23:51.406704 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/cinder-api-0" podStartSLOduration=3.406683436 podStartE2EDuration="3.406683436s" podCreationTimestamp="2026-01-20 17:23:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:23:51.399087327 +0000 UTC m=+2525.159425294" watchObservedRunningTime="2026-01-20 17:23:51.406683436 +0000 UTC m=+2525.167021403" Jan 20 17:23:51 crc kubenswrapper[4558]: I0120 17:23:51.478238 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/barbican-api-547b6df4c5-87fjk" Jan 20 17:23:51 crc kubenswrapper[4558]: I0120 17:23:51.686727 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:23:51 crc kubenswrapper[4558]: I0120 17:23:51.731050 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:23:51 crc kubenswrapper[4558]: I0120 17:23:51.794183 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/neutron-5855ff84bf-bczz4"] Jan 20 17:23:51 crc kubenswrapper[4558]: I0120 17:23:51.795959 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-5855ff84bf-bczz4" Jan 20 17:23:51 crc kubenswrapper[4558]: I0120 17:23:51.797827 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-neutron-public-svc" Jan 20 17:23:51 crc kubenswrapper[4558]: I0120 17:23:51.798066 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-neutron-internal-svc" Jan 20 17:23:51 crc kubenswrapper[4558]: I0120 17:23:51.816330 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-5855ff84bf-bczz4"] Jan 20 17:23:51 crc kubenswrapper[4558]: I0120 17:23:51.958626 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/bd387ac0-76d7-418c-a34f-19be32ae37f9-ovndb-tls-certs\") pod \"neutron-5855ff84bf-bczz4\" (UID: \"bd387ac0-76d7-418c-a34f-19be32ae37f9\") " pod="openstack-kuttl-tests/neutron-5855ff84bf-bczz4" Jan 20 17:23:51 crc kubenswrapper[4558]: I0120 17:23:51.958712 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/bd387ac0-76d7-418c-a34f-19be32ae37f9-httpd-config\") pod \"neutron-5855ff84bf-bczz4\" (UID: \"bd387ac0-76d7-418c-a34f-19be32ae37f9\") " pod="openstack-kuttl-tests/neutron-5855ff84bf-bczz4" Jan 20 17:23:51 crc kubenswrapper[4558]: I0120 17:23:51.958873 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xnr96\" (UniqueName: \"kubernetes.io/projected/bd387ac0-76d7-418c-a34f-19be32ae37f9-kube-api-access-xnr96\") pod \"neutron-5855ff84bf-bczz4\" (UID: \"bd387ac0-76d7-418c-a34f-19be32ae37f9\") " pod="openstack-kuttl-tests/neutron-5855ff84bf-bczz4" Jan 20 17:23:51 crc kubenswrapper[4558]: I0120 17:23:51.958949 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/bd387ac0-76d7-418c-a34f-19be32ae37f9-internal-tls-certs\") pod \"neutron-5855ff84bf-bczz4\" (UID: \"bd387ac0-76d7-418c-a34f-19be32ae37f9\") " pod="openstack-kuttl-tests/neutron-5855ff84bf-bczz4" Jan 20 17:23:51 crc kubenswrapper[4558]: I0120 17:23:51.959023 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/bd387ac0-76d7-418c-a34f-19be32ae37f9-config\") pod \"neutron-5855ff84bf-bczz4\" (UID: \"bd387ac0-76d7-418c-a34f-19be32ae37f9\") " pod="openstack-kuttl-tests/neutron-5855ff84bf-bczz4" Jan 20 17:23:51 crc kubenswrapper[4558]: I0120 17:23:51.959127 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bd387ac0-76d7-418c-a34f-19be32ae37f9-combined-ca-bundle\") pod \"neutron-5855ff84bf-bczz4\" (UID: \"bd387ac0-76d7-418c-a34f-19be32ae37f9\") " pod="openstack-kuttl-tests/neutron-5855ff84bf-bczz4" Jan 20 17:23:51 crc kubenswrapper[4558]: I0120 17:23:51.959184 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/bd387ac0-76d7-418c-a34f-19be32ae37f9-public-tls-certs\") pod \"neutron-5855ff84bf-bczz4\" (UID: \"bd387ac0-76d7-418c-a34f-19be32ae37f9\") " pod="openstack-kuttl-tests/neutron-5855ff84bf-bczz4" Jan 20 17:23:52 crc kubenswrapper[4558]: I0120 17:23:52.061019 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xnr96\" (UniqueName: \"kubernetes.io/projected/bd387ac0-76d7-418c-a34f-19be32ae37f9-kube-api-access-xnr96\") pod \"neutron-5855ff84bf-bczz4\" (UID: \"bd387ac0-76d7-418c-a34f-19be32ae37f9\") " pod="openstack-kuttl-tests/neutron-5855ff84bf-bczz4" Jan 20 17:23:52 crc kubenswrapper[4558]: I0120 17:23:52.061080 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/bd387ac0-76d7-418c-a34f-19be32ae37f9-internal-tls-certs\") pod \"neutron-5855ff84bf-bczz4\" (UID: \"bd387ac0-76d7-418c-a34f-19be32ae37f9\") " pod="openstack-kuttl-tests/neutron-5855ff84bf-bczz4" Jan 20 17:23:52 crc kubenswrapper[4558]: I0120 17:23:52.061143 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/bd387ac0-76d7-418c-a34f-19be32ae37f9-config\") pod \"neutron-5855ff84bf-bczz4\" (UID: \"bd387ac0-76d7-418c-a34f-19be32ae37f9\") " pod="openstack-kuttl-tests/neutron-5855ff84bf-bczz4" Jan 20 17:23:52 crc kubenswrapper[4558]: I0120 17:23:52.061236 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bd387ac0-76d7-418c-a34f-19be32ae37f9-combined-ca-bundle\") pod \"neutron-5855ff84bf-bczz4\" (UID: \"bd387ac0-76d7-418c-a34f-19be32ae37f9\") " pod="openstack-kuttl-tests/neutron-5855ff84bf-bczz4" Jan 20 17:23:52 crc kubenswrapper[4558]: I0120 17:23:52.061259 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/bd387ac0-76d7-418c-a34f-19be32ae37f9-public-tls-certs\") pod \"neutron-5855ff84bf-bczz4\" (UID: \"bd387ac0-76d7-418c-a34f-19be32ae37f9\") " pod="openstack-kuttl-tests/neutron-5855ff84bf-bczz4" Jan 20 17:23:52 crc kubenswrapper[4558]: I0120 17:23:52.061316 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/bd387ac0-76d7-418c-a34f-19be32ae37f9-ovndb-tls-certs\") pod \"neutron-5855ff84bf-bczz4\" (UID: \"bd387ac0-76d7-418c-a34f-19be32ae37f9\") " pod="openstack-kuttl-tests/neutron-5855ff84bf-bczz4" Jan 20 17:23:52 crc kubenswrapper[4558]: I0120 17:23:52.061347 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/bd387ac0-76d7-418c-a34f-19be32ae37f9-httpd-config\") pod \"neutron-5855ff84bf-bczz4\" (UID: \"bd387ac0-76d7-418c-a34f-19be32ae37f9\") " pod="openstack-kuttl-tests/neutron-5855ff84bf-bczz4" Jan 20 17:23:52 crc kubenswrapper[4558]: I0120 17:23:52.070031 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/bd387ac0-76d7-418c-a34f-19be32ae37f9-ovndb-tls-certs\") pod \"neutron-5855ff84bf-bczz4\" (UID: \"bd387ac0-76d7-418c-a34f-19be32ae37f9\") " pod="openstack-kuttl-tests/neutron-5855ff84bf-bczz4" Jan 20 17:23:52 crc kubenswrapper[4558]: I0120 17:23:52.070136 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/bd387ac0-76d7-418c-a34f-19be32ae37f9-config\") pod \"neutron-5855ff84bf-bczz4\" (UID: \"bd387ac0-76d7-418c-a34f-19be32ae37f9\") " pod="openstack-kuttl-tests/neutron-5855ff84bf-bczz4" Jan 20 17:23:52 crc kubenswrapper[4558]: I0120 17:23:52.071105 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/bd387ac0-76d7-418c-a34f-19be32ae37f9-public-tls-certs\") pod \"neutron-5855ff84bf-bczz4\" (UID: \"bd387ac0-76d7-418c-a34f-19be32ae37f9\") " pod="openstack-kuttl-tests/neutron-5855ff84bf-bczz4" Jan 20 17:23:52 crc kubenswrapper[4558]: I0120 17:23:52.072350 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/bd387ac0-76d7-418c-a34f-19be32ae37f9-httpd-config\") pod \"neutron-5855ff84bf-bczz4\" (UID: \"bd387ac0-76d7-418c-a34f-19be32ae37f9\") " pod="openstack-kuttl-tests/neutron-5855ff84bf-bczz4" Jan 20 17:23:52 crc kubenswrapper[4558]: I0120 17:23:52.078155 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xnr96\" (UniqueName: \"kubernetes.io/projected/bd387ac0-76d7-418c-a34f-19be32ae37f9-kube-api-access-xnr96\") pod \"neutron-5855ff84bf-bczz4\" (UID: \"bd387ac0-76d7-418c-a34f-19be32ae37f9\") " pod="openstack-kuttl-tests/neutron-5855ff84bf-bczz4" Jan 20 17:23:52 crc kubenswrapper[4558]: I0120 17:23:52.083481 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/bd387ac0-76d7-418c-a34f-19be32ae37f9-internal-tls-certs\") pod \"neutron-5855ff84bf-bczz4\" (UID: \"bd387ac0-76d7-418c-a34f-19be32ae37f9\") " pod="openstack-kuttl-tests/neutron-5855ff84bf-bczz4" Jan 20 17:23:52 crc kubenswrapper[4558]: I0120 17:23:52.085677 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bd387ac0-76d7-418c-a34f-19be32ae37f9-combined-ca-bundle\") pod \"neutron-5855ff84bf-bczz4\" (UID: \"bd387ac0-76d7-418c-a34f-19be32ae37f9\") " pod="openstack-kuttl-tests/neutron-5855ff84bf-bczz4" Jan 20 17:23:52 crc kubenswrapper[4558]: I0120 17:23:52.113222 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-5855ff84bf-bczz4" Jan 20 17:23:52 crc kubenswrapper[4558]: I0120 17:23:52.394967 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/cinder-scheduler-0" podUID="3afcf923-8d53-4638-bba0-09afa338a2e0" containerName="probe" containerID="cri-o://1d6dc3037ab7163e8ca4ec36d788250ba4e3e3a24cbb39f3ee4b6e153c1ed4b8" gracePeriod=30 Jan 20 17:23:52 crc kubenswrapper[4558]: I0120 17:23:52.394945 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/cinder-scheduler-0" podUID="3afcf923-8d53-4638-bba0-09afa338a2e0" containerName="cinder-scheduler" containerID="cri-o://590199eee583708baf8a6aaf5fd46b030cdff78491c91231b22f6e1896974f18" gracePeriod=30 Jan 20 17:23:52 crc kubenswrapper[4558]: I0120 17:23:52.575514 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-5855ff84bf-bczz4"] Jan 20 17:23:53 crc kubenswrapper[4558]: I0120 17:23:53.415436 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-5855ff84bf-bczz4" event={"ID":"bd387ac0-76d7-418c-a34f-19be32ae37f9","Type":"ContainerStarted","Data":"976dcb4a79daec7424bb1f359fd90302b7391b2695da9a6003214e35e5292830"} Jan 20 17:23:53 crc kubenswrapper[4558]: I0120 17:23:53.415804 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-5855ff84bf-bczz4" event={"ID":"bd387ac0-76d7-418c-a34f-19be32ae37f9","Type":"ContainerStarted","Data":"e8b21ab038396136a9090ecf6bdb12372852799fe29e7b256ebbd56bdce61f7b"} Jan 20 17:23:53 crc kubenswrapper[4558]: I0120 17:23:53.415816 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-5855ff84bf-bczz4" event={"ID":"bd387ac0-76d7-418c-a34f-19be32ae37f9","Type":"ContainerStarted","Data":"21d9c25d51c6ffa8cfbdb4331abc7cf8daae151992e7c397f352af90b1798d2c"} Jan 20 17:23:53 crc kubenswrapper[4558]: I0120 17:23:53.416211 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/neutron-5855ff84bf-bczz4" Jan 20 17:23:53 crc kubenswrapper[4558]: I0120 17:23:53.422458 4558 generic.go:334] "Generic (PLEG): container finished" podID="3afcf923-8d53-4638-bba0-09afa338a2e0" containerID="1d6dc3037ab7163e8ca4ec36d788250ba4e3e3a24cbb39f3ee4b6e153c1ed4b8" exitCode=0 Jan 20 17:23:53 crc kubenswrapper[4558]: I0120 17:23:53.422533 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"3afcf923-8d53-4638-bba0-09afa338a2e0","Type":"ContainerDied","Data":"1d6dc3037ab7163e8ca4ec36d788250ba4e3e3a24cbb39f3ee4b6e153c1ed4b8"} Jan 20 17:23:53 crc kubenswrapper[4558]: I0120 17:23:53.450796 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/neutron-5855ff84bf-bczz4" podStartSLOduration=2.450769283 podStartE2EDuration="2.450769283s" podCreationTimestamp="2026-01-20 17:23:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:23:53.444504739 +0000 UTC m=+2527.204842706" watchObservedRunningTime="2026-01-20 17:23:53.450769283 +0000 UTC m=+2527.211107250" Jan 20 17:23:53 crc kubenswrapper[4558]: I0120 17:23:53.798124 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:23:53 crc kubenswrapper[4558]: I0120 17:23:53.909606 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3afcf923-8d53-4638-bba0-09afa338a2e0-scripts\") pod \"3afcf923-8d53-4638-bba0-09afa338a2e0\" (UID: \"3afcf923-8d53-4638-bba0-09afa338a2e0\") " Jan 20 17:23:53 crc kubenswrapper[4558]: I0120 17:23:53.909945 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/3afcf923-8d53-4638-bba0-09afa338a2e0-etc-machine-id\") pod \"3afcf923-8d53-4638-bba0-09afa338a2e0\" (UID: \"3afcf923-8d53-4638-bba0-09afa338a2e0\") " Jan 20 17:23:53 crc kubenswrapper[4558]: I0120 17:23:53.910004 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3afcf923-8d53-4638-bba0-09afa338a2e0-config-data-custom\") pod \"3afcf923-8d53-4638-bba0-09afa338a2e0\" (UID: \"3afcf923-8d53-4638-bba0-09afa338a2e0\") " Jan 20 17:23:53 crc kubenswrapper[4558]: I0120 17:23:53.910037 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3afcf923-8d53-4638-bba0-09afa338a2e0-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "3afcf923-8d53-4638-bba0-09afa338a2e0" (UID: "3afcf923-8d53-4638-bba0-09afa338a2e0"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 17:23:53 crc kubenswrapper[4558]: I0120 17:23:53.910213 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hc4s8\" (UniqueName: \"kubernetes.io/projected/3afcf923-8d53-4638-bba0-09afa338a2e0-kube-api-access-hc4s8\") pod \"3afcf923-8d53-4638-bba0-09afa338a2e0\" (UID: \"3afcf923-8d53-4638-bba0-09afa338a2e0\") " Jan 20 17:23:53 crc kubenswrapper[4558]: I0120 17:23:53.910282 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3afcf923-8d53-4638-bba0-09afa338a2e0-config-data\") pod \"3afcf923-8d53-4638-bba0-09afa338a2e0\" (UID: \"3afcf923-8d53-4638-bba0-09afa338a2e0\") " Jan 20 17:23:53 crc kubenswrapper[4558]: I0120 17:23:53.910372 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3afcf923-8d53-4638-bba0-09afa338a2e0-combined-ca-bundle\") pod \"3afcf923-8d53-4638-bba0-09afa338a2e0\" (UID: \"3afcf923-8d53-4638-bba0-09afa338a2e0\") " Jan 20 17:23:53 crc kubenswrapper[4558]: I0120 17:23:53.911230 4558 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/3afcf923-8d53-4638-bba0-09afa338a2e0-etc-machine-id\") on node \"crc\" DevicePath \"\"" Jan 20 17:23:53 crc kubenswrapper[4558]: I0120 17:23:53.917992 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3afcf923-8d53-4638-bba0-09afa338a2e0-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "3afcf923-8d53-4638-bba0-09afa338a2e0" (UID: "3afcf923-8d53-4638-bba0-09afa338a2e0"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:23:53 crc kubenswrapper[4558]: I0120 17:23:53.918050 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3afcf923-8d53-4638-bba0-09afa338a2e0-kube-api-access-hc4s8" (OuterVolumeSpecName: "kube-api-access-hc4s8") pod "3afcf923-8d53-4638-bba0-09afa338a2e0" (UID: "3afcf923-8d53-4638-bba0-09afa338a2e0"). InnerVolumeSpecName "kube-api-access-hc4s8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:23:53 crc kubenswrapper[4558]: I0120 17:23:53.924305 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3afcf923-8d53-4638-bba0-09afa338a2e0-scripts" (OuterVolumeSpecName: "scripts") pod "3afcf923-8d53-4638-bba0-09afa338a2e0" (UID: "3afcf923-8d53-4638-bba0-09afa338a2e0"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:23:53 crc kubenswrapper[4558]: I0120 17:23:53.959356 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3afcf923-8d53-4638-bba0-09afa338a2e0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3afcf923-8d53-4638-bba0-09afa338a2e0" (UID: "3afcf923-8d53-4638-bba0-09afa338a2e0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:23:53 crc kubenswrapper[4558]: I0120 17:23:53.991745 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3afcf923-8d53-4638-bba0-09afa338a2e0-config-data" (OuterVolumeSpecName: "config-data") pod "3afcf923-8d53-4638-bba0-09afa338a2e0" (UID: "3afcf923-8d53-4638-bba0-09afa338a2e0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:23:54 crc kubenswrapper[4558]: I0120 17:23:54.013784 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3afcf923-8d53-4638-bba0-09afa338a2e0-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:23:54 crc kubenswrapper[4558]: I0120 17:23:54.013808 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hc4s8\" (UniqueName: \"kubernetes.io/projected/3afcf923-8d53-4638-bba0-09afa338a2e0-kube-api-access-hc4s8\") on node \"crc\" DevicePath \"\"" Jan 20 17:23:54 crc kubenswrapper[4558]: I0120 17:23:54.013819 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3afcf923-8d53-4638-bba0-09afa338a2e0-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:23:54 crc kubenswrapper[4558]: I0120 17:23:54.013830 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3afcf923-8d53-4638-bba0-09afa338a2e0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:23:54 crc kubenswrapper[4558]: I0120 17:23:54.013839 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3afcf923-8d53-4638-bba0-09afa338a2e0-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:23:54 crc kubenswrapper[4558]: I0120 17:23:54.405264 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/barbican-api-666d4cc7c-5vfvn" Jan 20 17:23:54 crc kubenswrapper[4558]: I0120 17:23:54.438620 4558 generic.go:334] "Generic (PLEG): container finished" podID="3afcf923-8d53-4638-bba0-09afa338a2e0" containerID="590199eee583708baf8a6aaf5fd46b030cdff78491c91231b22f6e1896974f18" exitCode=0 Jan 20 17:23:54 crc kubenswrapper[4558]: I0120 17:23:54.438866 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"3afcf923-8d53-4638-bba0-09afa338a2e0","Type":"ContainerDied","Data":"590199eee583708baf8a6aaf5fd46b030cdff78491c91231b22f6e1896974f18"} Jan 20 17:23:54 crc kubenswrapper[4558]: I0120 17:23:54.438922 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:23:54 crc kubenswrapper[4558]: I0120 17:23:54.438977 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"3afcf923-8d53-4638-bba0-09afa338a2e0","Type":"ContainerDied","Data":"8c9ae0e517861e049938b2bbfe8ea91034126f24fe10242f797d96cfcbe6a5cd"} Jan 20 17:23:54 crc kubenswrapper[4558]: I0120 17:23:54.439006 4558 scope.go:117] "RemoveContainer" containerID="1d6dc3037ab7163e8ca4ec36d788250ba4e3e3a24cbb39f3ee4b6e153c1ed4b8" Jan 20 17:23:54 crc kubenswrapper[4558]: I0120 17:23:54.468901 4558 scope.go:117] "RemoveContainer" containerID="590199eee583708baf8a6aaf5fd46b030cdff78491c91231b22f6e1896974f18" Jan 20 17:23:54 crc kubenswrapper[4558]: I0120 17:23:54.477185 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:23:54 crc kubenswrapper[4558]: I0120 17:23:54.487685 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:23:54 crc kubenswrapper[4558]: I0120 17:23:54.489047 4558 scope.go:117] "RemoveContainer" containerID="1d6dc3037ab7163e8ca4ec36d788250ba4e3e3a24cbb39f3ee4b6e153c1ed4b8" Jan 20 17:23:54 crc kubenswrapper[4558]: E0120 17:23:54.489474 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1d6dc3037ab7163e8ca4ec36d788250ba4e3e3a24cbb39f3ee4b6e153c1ed4b8\": container with ID starting with 1d6dc3037ab7163e8ca4ec36d788250ba4e3e3a24cbb39f3ee4b6e153c1ed4b8 not found: ID does not exist" containerID="1d6dc3037ab7163e8ca4ec36d788250ba4e3e3a24cbb39f3ee4b6e153c1ed4b8" Jan 20 17:23:54 crc kubenswrapper[4558]: I0120 17:23:54.489503 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1d6dc3037ab7163e8ca4ec36d788250ba4e3e3a24cbb39f3ee4b6e153c1ed4b8"} err="failed to get container status \"1d6dc3037ab7163e8ca4ec36d788250ba4e3e3a24cbb39f3ee4b6e153c1ed4b8\": rpc error: code = NotFound desc = could not find container \"1d6dc3037ab7163e8ca4ec36d788250ba4e3e3a24cbb39f3ee4b6e153c1ed4b8\": container with ID starting with 1d6dc3037ab7163e8ca4ec36d788250ba4e3e3a24cbb39f3ee4b6e153c1ed4b8 not found: ID does not exist" Jan 20 17:23:54 crc kubenswrapper[4558]: I0120 17:23:54.489525 4558 scope.go:117] "RemoveContainer" containerID="590199eee583708baf8a6aaf5fd46b030cdff78491c91231b22f6e1896974f18" Jan 20 17:23:54 crc kubenswrapper[4558]: E0120 17:23:54.489776 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"590199eee583708baf8a6aaf5fd46b030cdff78491c91231b22f6e1896974f18\": container with ID starting with 590199eee583708baf8a6aaf5fd46b030cdff78491c91231b22f6e1896974f18 not found: ID does not exist" containerID="590199eee583708baf8a6aaf5fd46b030cdff78491c91231b22f6e1896974f18" Jan 20 17:23:54 crc kubenswrapper[4558]: I0120 17:23:54.489797 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"590199eee583708baf8a6aaf5fd46b030cdff78491c91231b22f6e1896974f18"} err="failed to get container status \"590199eee583708baf8a6aaf5fd46b030cdff78491c91231b22f6e1896974f18\": rpc error: code = NotFound desc = could not find container \"590199eee583708baf8a6aaf5fd46b030cdff78491c91231b22f6e1896974f18\": container with ID starting with 590199eee583708baf8a6aaf5fd46b030cdff78491c91231b22f6e1896974f18 not found: ID does not exist" Jan 20 17:23:54 crc kubenswrapper[4558]: I0120 17:23:54.494797 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:23:54 crc kubenswrapper[4558]: E0120 17:23:54.495260 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3afcf923-8d53-4638-bba0-09afa338a2e0" containerName="probe" Jan 20 17:23:54 crc kubenswrapper[4558]: I0120 17:23:54.495280 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="3afcf923-8d53-4638-bba0-09afa338a2e0" containerName="probe" Jan 20 17:23:54 crc kubenswrapper[4558]: E0120 17:23:54.495303 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3afcf923-8d53-4638-bba0-09afa338a2e0" containerName="cinder-scheduler" Jan 20 17:23:54 crc kubenswrapper[4558]: I0120 17:23:54.495310 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="3afcf923-8d53-4638-bba0-09afa338a2e0" containerName="cinder-scheduler" Jan 20 17:23:54 crc kubenswrapper[4558]: I0120 17:23:54.495498 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="3afcf923-8d53-4638-bba0-09afa338a2e0" containerName="probe" Jan 20 17:23:54 crc kubenswrapper[4558]: I0120 17:23:54.495517 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="3afcf923-8d53-4638-bba0-09afa338a2e0" containerName="cinder-scheduler" Jan 20 17:23:54 crc kubenswrapper[4558]: I0120 17:23:54.496501 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:23:54 crc kubenswrapper[4558]: I0120 17:23:54.499487 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cinder-scheduler-config-data" Jan 20 17:23:54 crc kubenswrapper[4558]: I0120 17:23:54.500131 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:23:54 crc kubenswrapper[4558]: I0120 17:23:54.576847 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3afcf923-8d53-4638-bba0-09afa338a2e0" path="/var/lib/kubelet/pods/3afcf923-8d53-4638-bba0-09afa338a2e0/volumes" Jan 20 17:23:54 crc kubenswrapper[4558]: I0120 17:23:54.625841 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7c7138fb-59b8-47c0-a431-023fa79404f1-scripts\") pod \"cinder-scheduler-0\" (UID: \"7c7138fb-59b8-47c0-a431-023fa79404f1\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:23:54 crc kubenswrapper[4558]: I0120 17:23:54.625906 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7c7138fb-59b8-47c0-a431-023fa79404f1-config-data\") pod \"cinder-scheduler-0\" (UID: \"7c7138fb-59b8-47c0-a431-023fa79404f1\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:23:54 crc kubenswrapper[4558]: I0120 17:23:54.625976 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c7138fb-59b8-47c0-a431-023fa79404f1-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"7c7138fb-59b8-47c0-a431-023fa79404f1\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:23:54 crc kubenswrapper[4558]: I0120 17:23:54.626038 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7c7138fb-59b8-47c0-a431-023fa79404f1-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"7c7138fb-59b8-47c0-a431-023fa79404f1\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:23:54 crc kubenswrapper[4558]: I0120 17:23:54.626086 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7c7138fb-59b8-47c0-a431-023fa79404f1-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"7c7138fb-59b8-47c0-a431-023fa79404f1\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:23:54 crc kubenswrapper[4558]: I0120 17:23:54.626116 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9mbjv\" (UniqueName: \"kubernetes.io/projected/7c7138fb-59b8-47c0-a431-023fa79404f1-kube-api-access-9mbjv\") pod \"cinder-scheduler-0\" (UID: \"7c7138fb-59b8-47c0-a431-023fa79404f1\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:23:54 crc kubenswrapper[4558]: I0120 17:23:54.727642 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c7138fb-59b8-47c0-a431-023fa79404f1-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"7c7138fb-59b8-47c0-a431-023fa79404f1\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:23:54 crc kubenswrapper[4558]: I0120 17:23:54.727741 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7c7138fb-59b8-47c0-a431-023fa79404f1-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"7c7138fb-59b8-47c0-a431-023fa79404f1\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:23:54 crc kubenswrapper[4558]: I0120 17:23:54.727830 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7c7138fb-59b8-47c0-a431-023fa79404f1-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"7c7138fb-59b8-47c0-a431-023fa79404f1\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:23:54 crc kubenswrapper[4558]: I0120 17:23:54.727861 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9mbjv\" (UniqueName: \"kubernetes.io/projected/7c7138fb-59b8-47c0-a431-023fa79404f1-kube-api-access-9mbjv\") pod \"cinder-scheduler-0\" (UID: \"7c7138fb-59b8-47c0-a431-023fa79404f1\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:23:54 crc kubenswrapper[4558]: I0120 17:23:54.727890 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7c7138fb-59b8-47c0-a431-023fa79404f1-scripts\") pod \"cinder-scheduler-0\" (UID: \"7c7138fb-59b8-47c0-a431-023fa79404f1\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:23:54 crc kubenswrapper[4558]: I0120 17:23:54.727946 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7c7138fb-59b8-47c0-a431-023fa79404f1-config-data\") pod \"cinder-scheduler-0\" (UID: \"7c7138fb-59b8-47c0-a431-023fa79404f1\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:23:54 crc kubenswrapper[4558]: I0120 17:23:54.728611 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7c7138fb-59b8-47c0-a431-023fa79404f1-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"7c7138fb-59b8-47c0-a431-023fa79404f1\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:23:54 crc kubenswrapper[4558]: I0120 17:23:54.732634 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7c7138fb-59b8-47c0-a431-023fa79404f1-scripts\") pod \"cinder-scheduler-0\" (UID: \"7c7138fb-59b8-47c0-a431-023fa79404f1\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:23:54 crc kubenswrapper[4558]: I0120 17:23:54.732957 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c7138fb-59b8-47c0-a431-023fa79404f1-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"7c7138fb-59b8-47c0-a431-023fa79404f1\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:23:54 crc kubenswrapper[4558]: I0120 17:23:54.735863 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7c7138fb-59b8-47c0-a431-023fa79404f1-config-data\") pod \"cinder-scheduler-0\" (UID: \"7c7138fb-59b8-47c0-a431-023fa79404f1\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:23:54 crc kubenswrapper[4558]: I0120 17:23:54.752551 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7c7138fb-59b8-47c0-a431-023fa79404f1-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"7c7138fb-59b8-47c0-a431-023fa79404f1\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:23:54 crc kubenswrapper[4558]: I0120 17:23:54.762591 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9mbjv\" (UniqueName: \"kubernetes.io/projected/7c7138fb-59b8-47c0-a431-023fa79404f1-kube-api-access-9mbjv\") pod \"cinder-scheduler-0\" (UID: \"7c7138fb-59b8-47c0-a431-023fa79404f1\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:23:54 crc kubenswrapper[4558]: I0120 17:23:54.821733 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:23:55 crc kubenswrapper[4558]: I0120 17:23:55.385758 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:23:55 crc kubenswrapper[4558]: I0120 17:23:55.455811 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"7c7138fb-59b8-47c0-a431-023fa79404f1","Type":"ContainerStarted","Data":"b5d4c7b2fd5aaea450b495d6eb85db94cb27143a6d1814862eae36a4d86ed3f9"} Jan 20 17:23:55 crc kubenswrapper[4558]: I0120 17:23:55.927135 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/barbican-api-666d4cc7c-5vfvn" Jan 20 17:23:55 crc kubenswrapper[4558]: I0120 17:23:55.978643 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-api-547b6df4c5-87fjk"] Jan 20 17:23:55 crc kubenswrapper[4558]: I0120 17:23:55.979115 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-api-547b6df4c5-87fjk" podUID="db32104f-5145-43ca-bc9a-5c410bf89933" containerName="barbican-api-log" containerID="cri-o://30ed7aba992e1e08f5fc61d9f1994bef19246447cd45bbdb4562c998c6ff53d3" gracePeriod=30 Jan 20 17:23:55 crc kubenswrapper[4558]: I0120 17:23:55.979532 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-api-547b6df4c5-87fjk" podUID="db32104f-5145-43ca-bc9a-5c410bf89933" containerName="barbican-api" containerID="cri-o://a14d46445c620e36992101173d2a0d80fcb77d671533a2ad4c05704ae7e3dd72" gracePeriod=30 Jan 20 17:23:56 crc kubenswrapper[4558]: I0120 17:23:56.478471 4558 generic.go:334] "Generic (PLEG): container finished" podID="db32104f-5145-43ca-bc9a-5c410bf89933" containerID="30ed7aba992e1e08f5fc61d9f1994bef19246447cd45bbdb4562c998c6ff53d3" exitCode=143 Jan 20 17:23:56 crc kubenswrapper[4558]: I0120 17:23:56.478543 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-547b6df4c5-87fjk" event={"ID":"db32104f-5145-43ca-bc9a-5c410bf89933","Type":"ContainerDied","Data":"30ed7aba992e1e08f5fc61d9f1994bef19246447cd45bbdb4562c998c6ff53d3"} Jan 20 17:23:56 crc kubenswrapper[4558]: I0120 17:23:56.480872 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"7c7138fb-59b8-47c0-a431-023fa79404f1","Type":"ContainerStarted","Data":"98d4b6952becbe3b78b67cf6ee7b8b7cc7435eda4e3a11dead4ce7aba086ca2a"} Jan 20 17:23:57 crc kubenswrapper[4558]: I0120 17:23:57.493297 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"7c7138fb-59b8-47c0-a431-023fa79404f1","Type":"ContainerStarted","Data":"b4e76db6b8f078c1dc089ec0784ae087f970f12f50927744b7cf7a30c75eb1e1"} Jan 20 17:23:57 crc kubenswrapper[4558]: I0120 17:23:57.513047 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/cinder-scheduler-0" podStartSLOduration=3.513028502 podStartE2EDuration="3.513028502s" podCreationTimestamp="2026-01-20 17:23:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:23:57.508544498 +0000 UTC m=+2531.268882465" watchObservedRunningTime="2026-01-20 17:23:57.513028502 +0000 UTC m=+2531.273366459" Jan 20 17:23:58 crc kubenswrapper[4558]: I0120 17:23:58.568085 4558 scope.go:117] "RemoveContainer" containerID="5ea93b24584a2124e067d69a33744acf8ad8f9ba3cdebcca7c752363ecc038ad" Jan 20 17:23:58 crc kubenswrapper[4558]: I0120 17:23:58.569918 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/placement-56dc9f6ff9-hc2fq" podUID="36591742-f9df-4244-b32c-cc952d81893a" containerName="placement-log" probeResult="failure" output="Get \"http://10.217.1.6:8778/\": dial tcp 10.217.1.6:8778: connect: connection refused" Jan 20 17:23:59 crc kubenswrapper[4558]: I0120 17:23:59.525640 4558 generic.go:334] "Generic (PLEG): container finished" podID="db32104f-5145-43ca-bc9a-5c410bf89933" containerID="a14d46445c620e36992101173d2a0d80fcb77d671533a2ad4c05704ae7e3dd72" exitCode=0 Jan 20 17:23:59 crc kubenswrapper[4558]: I0120 17:23:59.525718 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-547b6df4c5-87fjk" event={"ID":"db32104f-5145-43ca-bc9a-5c410bf89933","Type":"ContainerDied","Data":"a14d46445c620e36992101173d2a0d80fcb77d671533a2ad4c05704ae7e3dd72"} Jan 20 17:23:59 crc kubenswrapper[4558]: I0120 17:23:59.526029 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-547b6df4c5-87fjk" event={"ID":"db32104f-5145-43ca-bc9a-5c410bf89933","Type":"ContainerDied","Data":"79684ab8301c7dad7ec9010931af1515fb53eb6dfd15283896a2eb64ccc91b4f"} Jan 20 17:23:59 crc kubenswrapper[4558]: I0120 17:23:59.526045 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="79684ab8301c7dad7ec9010931af1515fb53eb6dfd15283896a2eb64ccc91b4f" Jan 20 17:23:59 crc kubenswrapper[4558]: I0120 17:23:59.528120 4558 generic.go:334] "Generic (PLEG): container finished" podID="36591742-f9df-4244-b32c-cc952d81893a" containerID="bca1cb65e2496aef07b0f01e5ea84e3764b8ca5b07400205c39fd0bf35b97ead" exitCode=1 Jan 20 17:23:59 crc kubenswrapper[4558]: I0120 17:23:59.528183 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-56dc9f6ff9-hc2fq" event={"ID":"36591742-f9df-4244-b32c-cc952d81893a","Type":"ContainerDied","Data":"bca1cb65e2496aef07b0f01e5ea84e3764b8ca5b07400205c39fd0bf35b97ead"} Jan 20 17:23:59 crc kubenswrapper[4558]: I0120 17:23:59.528232 4558 scope.go:117] "RemoveContainer" containerID="5ea93b24584a2124e067d69a33744acf8ad8f9ba3cdebcca7c752363ecc038ad" Jan 20 17:23:59 crc kubenswrapper[4558]: I0120 17:23:59.528770 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/placement-56dc9f6ff9-hc2fq" podUID="36591742-f9df-4244-b32c-cc952d81893a" containerName="placement-log" probeResult="failure" output="Get \"http://10.217.1.6:8778/\": dial tcp 10.217.1.6:8778: connect: connection refused" Jan 20 17:23:59 crc kubenswrapper[4558]: I0120 17:23:59.528838 4558 scope.go:117] "RemoveContainer" containerID="bca1cb65e2496aef07b0f01e5ea84e3764b8ca5b07400205c39fd0bf35b97ead" Jan 20 17:23:59 crc kubenswrapper[4558]: E0120 17:23:59.529098 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"placement-api\" with CrashLoopBackOff: \"back-off 20s restarting failed container=placement-api pod=placement-56dc9f6ff9-hc2fq_openstack-kuttl-tests(36591742-f9df-4244-b32c-cc952d81893a)\"" pod="openstack-kuttl-tests/placement-56dc9f6ff9-hc2fq" podUID="36591742-f9df-4244-b32c-cc952d81893a" Jan 20 17:23:59 crc kubenswrapper[4558]: I0120 17:23:59.578904 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-547b6df4c5-87fjk" Jan 20 17:23:59 crc kubenswrapper[4558]: I0120 17:23:59.754092 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gmmfp\" (UniqueName: \"kubernetes.io/projected/db32104f-5145-43ca-bc9a-5c410bf89933-kube-api-access-gmmfp\") pod \"db32104f-5145-43ca-bc9a-5c410bf89933\" (UID: \"db32104f-5145-43ca-bc9a-5c410bf89933\") " Jan 20 17:23:59 crc kubenswrapper[4558]: I0120 17:23:59.754200 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/db32104f-5145-43ca-bc9a-5c410bf89933-config-data-custom\") pod \"db32104f-5145-43ca-bc9a-5c410bf89933\" (UID: \"db32104f-5145-43ca-bc9a-5c410bf89933\") " Jan 20 17:23:59 crc kubenswrapper[4558]: I0120 17:23:59.754242 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/db32104f-5145-43ca-bc9a-5c410bf89933-logs\") pod \"db32104f-5145-43ca-bc9a-5c410bf89933\" (UID: \"db32104f-5145-43ca-bc9a-5c410bf89933\") " Jan 20 17:23:59 crc kubenswrapper[4558]: I0120 17:23:59.754269 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/db32104f-5145-43ca-bc9a-5c410bf89933-config-data\") pod \"db32104f-5145-43ca-bc9a-5c410bf89933\" (UID: \"db32104f-5145-43ca-bc9a-5c410bf89933\") " Jan 20 17:23:59 crc kubenswrapper[4558]: I0120 17:23:59.754369 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/db32104f-5145-43ca-bc9a-5c410bf89933-combined-ca-bundle\") pod \"db32104f-5145-43ca-bc9a-5c410bf89933\" (UID: \"db32104f-5145-43ca-bc9a-5c410bf89933\") " Jan 20 17:23:59 crc kubenswrapper[4558]: I0120 17:23:59.754941 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/db32104f-5145-43ca-bc9a-5c410bf89933-logs" (OuterVolumeSpecName: "logs") pod "db32104f-5145-43ca-bc9a-5c410bf89933" (UID: "db32104f-5145-43ca-bc9a-5c410bf89933"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:23:59 crc kubenswrapper[4558]: I0120 17:23:59.755994 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/db32104f-5145-43ca-bc9a-5c410bf89933-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:23:59 crc kubenswrapper[4558]: I0120 17:23:59.761284 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/db32104f-5145-43ca-bc9a-5c410bf89933-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "db32104f-5145-43ca-bc9a-5c410bf89933" (UID: "db32104f-5145-43ca-bc9a-5c410bf89933"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:23:59 crc kubenswrapper[4558]: I0120 17:23:59.761358 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/db32104f-5145-43ca-bc9a-5c410bf89933-kube-api-access-gmmfp" (OuterVolumeSpecName: "kube-api-access-gmmfp") pod "db32104f-5145-43ca-bc9a-5c410bf89933" (UID: "db32104f-5145-43ca-bc9a-5c410bf89933"). InnerVolumeSpecName "kube-api-access-gmmfp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:23:59 crc kubenswrapper[4558]: I0120 17:23:59.781572 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/db32104f-5145-43ca-bc9a-5c410bf89933-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "db32104f-5145-43ca-bc9a-5c410bf89933" (UID: "db32104f-5145-43ca-bc9a-5c410bf89933"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:23:59 crc kubenswrapper[4558]: I0120 17:23:59.793102 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/db32104f-5145-43ca-bc9a-5c410bf89933-config-data" (OuterVolumeSpecName: "config-data") pod "db32104f-5145-43ca-bc9a-5c410bf89933" (UID: "db32104f-5145-43ca-bc9a-5c410bf89933"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:23:59 crc kubenswrapper[4558]: I0120 17:23:59.823580 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:23:59 crc kubenswrapper[4558]: I0120 17:23:59.856980 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/db32104f-5145-43ca-bc9a-5c410bf89933-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:23:59 crc kubenswrapper[4558]: I0120 17:23:59.857008 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/db32104f-5145-43ca-bc9a-5c410bf89933-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:23:59 crc kubenswrapper[4558]: I0120 17:23:59.857020 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gmmfp\" (UniqueName: \"kubernetes.io/projected/db32104f-5145-43ca-bc9a-5c410bf89933-kube-api-access-gmmfp\") on node \"crc\" DevicePath \"\"" Jan 20 17:23:59 crc kubenswrapper[4558]: I0120 17:23:59.857032 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/db32104f-5145-43ca-bc9a-5c410bf89933-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:24:00 crc kubenswrapper[4558]: I0120 17:24:00.412634 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:24:00 crc kubenswrapper[4558]: I0120 17:24:00.541004 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-547b6df4c5-87fjk" Jan 20 17:24:00 crc kubenswrapper[4558]: I0120 17:24:00.579013 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-api-547b6df4c5-87fjk"] Jan 20 17:24:00 crc kubenswrapper[4558]: I0120 17:24:00.579056 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/barbican-api-547b6df4c5-87fjk"] Jan 20 17:24:02 crc kubenswrapper[4558]: I0120 17:24:02.576658 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="db32104f-5145-43ca-bc9a-5c410bf89933" path="/var/lib/kubelet/pods/db32104f-5145-43ca-bc9a-5c410bf89933/volumes" Jan 20 17:24:04 crc kubenswrapper[4558]: I0120 17:24:04.385065 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/barbican-api-547b6df4c5-87fjk" podUID="db32104f-5145-43ca-bc9a-5c410bf89933" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.1.9:9311/healthcheck\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 20 17:24:04 crc kubenswrapper[4558]: I0120 17:24:04.385052 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/barbican-api-547b6df4c5-87fjk" podUID="db32104f-5145-43ca-bc9a-5c410bf89933" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.1.9:9311/healthcheck\": dial tcp 10.217.1.9:9311: i/o timeout (Client.Timeout exceeded while awaiting headers)" Jan 20 17:24:05 crc kubenswrapper[4558]: I0120 17:24:05.056131 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:24:07 crc kubenswrapper[4558]: I0120 17:24:07.472097 4558 scope.go:117] "RemoveContainer" containerID="b7c379986cc6aff3b828f77461a78d1169c8a75f30d3abde660227f21701db47" Jan 20 17:24:07 crc kubenswrapper[4558]: I0120 17:24:07.500125 4558 scope.go:117] "RemoveContainer" containerID="db6664dc5563480a7755e8b6b12b6a68a3945d57a9ccc1fba3ccffe2963a98f7" Jan 20 17:24:07 crc kubenswrapper[4558]: I0120 17:24:07.529364 4558 scope.go:117] "RemoveContainer" containerID="0404131e6088ce70d30be218580224f5f7c40261b9f6a556f4d91e65b05b4710" Jan 20 17:24:07 crc kubenswrapper[4558]: I0120 17:24:07.565820 4558 scope.go:117] "RemoveContainer" containerID="788f6224ee4017a1c49676cf0158019aad3dadc64f19f16e8a8d308ee1b7ca67" Jan 20 17:24:07 crc kubenswrapper[4558]: I0120 17:24:07.603957 4558 scope.go:117] "RemoveContainer" containerID="e753fe9a9ff4e55db2f42e6e4809dd92638bc59c527ff9a16c85cbd5e8061320" Jan 20 17:24:07 crc kubenswrapper[4558]: I0120 17:24:07.636558 4558 scope.go:117] "RemoveContainer" containerID="1ecf6feb620f00f92cee9629762d308a481961f09f2bdfc3ccd5fb800dea3469" Jan 20 17:24:07 crc kubenswrapper[4558]: I0120 17:24:07.661570 4558 scope.go:117] "RemoveContainer" containerID="44b59240e6f876632cec3355c15724897d903c1c71ba44d49a8fa7096be4ce9e" Jan 20 17:24:07 crc kubenswrapper[4558]: I0120 17:24:07.701764 4558 scope.go:117] "RemoveContainer" containerID="65fe42fe25e67f4d2e7a0b380665cfa50e34c28aa831796b5829b20cecf6a5cb" Jan 20 17:24:07 crc kubenswrapper[4558]: I0120 17:24:07.723730 4558 scope.go:117] "RemoveContainer" containerID="0592c7c01dbb34374b3da8af2857becf1536c237d8abb2a024508c881acf1773" Jan 20 17:24:07 crc kubenswrapper[4558]: I0120 17:24:07.747201 4558 scope.go:117] "RemoveContainer" containerID="fc125d4ae84f126fbe53fa24fdba0fd63039bffb4d5927be5e014a9e4f598ab4" Jan 20 17:24:07 crc kubenswrapper[4558]: I0120 17:24:07.788024 4558 scope.go:117] "RemoveContainer" containerID="f352ada9d1b11fc0753e4271db70a1d1f3a8416472398fbddeb220865835978e" Jan 20 17:24:07 crc kubenswrapper[4558]: I0120 17:24:07.809411 4558 scope.go:117] "RemoveContainer" containerID="11230ed7f43eb0e13b6397efae5fa6ee1ad663e47745015e64507d23add89ab2" Jan 20 17:24:09 crc kubenswrapper[4558]: I0120 17:24:09.313914 4558 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack-kuttl-tests/placement-56dc9f6ff9-hc2fq" Jan 20 17:24:09 crc kubenswrapper[4558]: I0120 17:24:09.314758 4558 scope.go:117] "RemoveContainer" containerID="bca1cb65e2496aef07b0f01e5ea84e3764b8ca5b07400205c39fd0bf35b97ead" Jan 20 17:24:09 crc kubenswrapper[4558]: E0120 17:24:09.315014 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"placement-api\" with CrashLoopBackOff: \"back-off 20s restarting failed container=placement-api pod=placement-56dc9f6ff9-hc2fq_openstack-kuttl-tests(36591742-f9df-4244-b32c-cc952d81893a)\"" pod="openstack-kuttl-tests/placement-56dc9f6ff9-hc2fq" podUID="36591742-f9df-4244-b32c-cc952d81893a" Jan 20 17:24:09 crc kubenswrapper[4558]: I0120 17:24:09.316132 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/placement-56dc9f6ff9-hc2fq" Jan 20 17:24:09 crc kubenswrapper[4558]: I0120 17:24:09.318004 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack-kuttl-tests/placement-56dc9f6ff9-hc2fq" podUID="36591742-f9df-4244-b32c-cc952d81893a" containerName="placement-log" probeResult="failure" output="Get \"http://10.217.1.6:8778/\": dial tcp 10.217.1.6:8778: connect: connection refused" Jan 20 17:24:09 crc kubenswrapper[4558]: I0120 17:24:09.318098 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/placement-56dc9f6ff9-hc2fq" podUID="36591742-f9df-4244-b32c-cc952d81893a" containerName="placement-log" probeResult="failure" output="Get \"http://10.217.1.6:8778/\": dial tcp 10.217.1.6:8778: connect: connection refused" Jan 20 17:24:09 crc kubenswrapper[4558]: I0120 17:24:09.318412 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/placement-56dc9f6ff9-hc2fq" podUID="36591742-f9df-4244-b32c-cc952d81893a" containerName="placement-log" probeResult="failure" output="Get \"http://10.217.1.6:8778/\": dial tcp 10.217.1.6:8778: connect: connection refused" Jan 20 17:24:09 crc kubenswrapper[4558]: I0120 17:24:09.646446 4558 scope.go:117] "RemoveContainer" containerID="bca1cb65e2496aef07b0f01e5ea84e3764b8ca5b07400205c39fd0bf35b97ead" Jan 20 17:24:09 crc kubenswrapper[4558]: E0120 17:24:09.646934 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"placement-api\" with CrashLoopBackOff: \"back-off 20s restarting failed container=placement-api pod=placement-56dc9f6ff9-hc2fq_openstack-kuttl-tests(36591742-f9df-4244-b32c-cc952d81893a)\"" pod="openstack-kuttl-tests/placement-56dc9f6ff9-hc2fq" podUID="36591742-f9df-4244-b32c-cc952d81893a" Jan 20 17:24:09 crc kubenswrapper[4558]: I0120 17:24:09.647093 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/placement-56dc9f6ff9-hc2fq" podUID="36591742-f9df-4244-b32c-cc952d81893a" containerName="placement-log" probeResult="failure" output="Get \"http://10.217.1.6:8778/\": dial tcp 10.217.1.6:8778: connect: connection refused" Jan 20 17:24:10 crc kubenswrapper[4558]: I0120 17:24:10.488913 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/placement-66fdf9847-wwwbv" Jan 20 17:24:10 crc kubenswrapper[4558]: I0120 17:24:10.489956 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/placement-66fdf9847-wwwbv" Jan 20 17:24:10 crc kubenswrapper[4558]: I0120 17:24:10.499360 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:24:10 crc kubenswrapper[4558]: I0120 17:24:10.627869 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/placement-56dc9f6ff9-hc2fq"] Jan 20 17:24:10 crc kubenswrapper[4558]: I0120 17:24:10.655126 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/placement-56dc9f6ff9-hc2fq" podUID="36591742-f9df-4244-b32c-cc952d81893a" containerName="placement-log" containerID="cri-o://64ef811896fb701a56067ae09192dab6f941d9eb7d07c6bf99ee816f9a9c9612" gracePeriod=30 Jan 20 17:24:10 crc kubenswrapper[4558]: I0120 17:24:10.655312 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/placement-56dc9f6ff9-hc2fq" podUID="36591742-f9df-4244-b32c-cc952d81893a" containerName="placement-log" probeResult="failure" output="Get \"http://10.217.1.6:8778/\": dial tcp 10.217.1.6:8778: connect: connection refused" Jan 20 17:24:11 crc kubenswrapper[4558]: I0120 17:24:11.132815 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-56dc9f6ff9-hc2fq" Jan 20 17:24:11 crc kubenswrapper[4558]: I0120 17:24:11.179360 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-msl4r\" (UniqueName: \"kubernetes.io/projected/36591742-f9df-4244-b32c-cc952d81893a-kube-api-access-msl4r\") pod \"36591742-f9df-4244-b32c-cc952d81893a\" (UID: \"36591742-f9df-4244-b32c-cc952d81893a\") " Jan 20 17:24:11 crc kubenswrapper[4558]: I0120 17:24:11.179410 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/36591742-f9df-4244-b32c-cc952d81893a-logs\") pod \"36591742-f9df-4244-b32c-cc952d81893a\" (UID: \"36591742-f9df-4244-b32c-cc952d81893a\") " Jan 20 17:24:11 crc kubenswrapper[4558]: I0120 17:24:11.179433 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/36591742-f9df-4244-b32c-cc952d81893a-config-data\") pod \"36591742-f9df-4244-b32c-cc952d81893a\" (UID: \"36591742-f9df-4244-b32c-cc952d81893a\") " Jan 20 17:24:11 crc kubenswrapper[4558]: I0120 17:24:11.179473 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/36591742-f9df-4244-b32c-cc952d81893a-scripts\") pod \"36591742-f9df-4244-b32c-cc952d81893a\" (UID: \"36591742-f9df-4244-b32c-cc952d81893a\") " Jan 20 17:24:11 crc kubenswrapper[4558]: I0120 17:24:11.179567 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36591742-f9df-4244-b32c-cc952d81893a-combined-ca-bundle\") pod \"36591742-f9df-4244-b32c-cc952d81893a\" (UID: \"36591742-f9df-4244-b32c-cc952d81893a\") " Jan 20 17:24:11 crc kubenswrapper[4558]: I0120 17:24:11.180332 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/36591742-f9df-4244-b32c-cc952d81893a-logs" (OuterVolumeSpecName: "logs") pod "36591742-f9df-4244-b32c-cc952d81893a" (UID: "36591742-f9df-4244-b32c-cc952d81893a"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:24:11 crc kubenswrapper[4558]: I0120 17:24:11.185425 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/36591742-f9df-4244-b32c-cc952d81893a-scripts" (OuterVolumeSpecName: "scripts") pod "36591742-f9df-4244-b32c-cc952d81893a" (UID: "36591742-f9df-4244-b32c-cc952d81893a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:24:11 crc kubenswrapper[4558]: I0120 17:24:11.185533 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/36591742-f9df-4244-b32c-cc952d81893a-kube-api-access-msl4r" (OuterVolumeSpecName: "kube-api-access-msl4r") pod "36591742-f9df-4244-b32c-cc952d81893a" (UID: "36591742-f9df-4244-b32c-cc952d81893a"). InnerVolumeSpecName "kube-api-access-msl4r". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:24:11 crc kubenswrapper[4558]: I0120 17:24:11.219822 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/36591742-f9df-4244-b32c-cc952d81893a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "36591742-f9df-4244-b32c-cc952d81893a" (UID: "36591742-f9df-4244-b32c-cc952d81893a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:24:11 crc kubenswrapper[4558]: I0120 17:24:11.220818 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/36591742-f9df-4244-b32c-cc952d81893a-config-data" (OuterVolumeSpecName: "config-data") pod "36591742-f9df-4244-b32c-cc952d81893a" (UID: "36591742-f9df-4244-b32c-cc952d81893a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:24:11 crc kubenswrapper[4558]: I0120 17:24:11.281959 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36591742-f9df-4244-b32c-cc952d81893a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:24:11 crc kubenswrapper[4558]: I0120 17:24:11.281997 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-msl4r\" (UniqueName: \"kubernetes.io/projected/36591742-f9df-4244-b32c-cc952d81893a-kube-api-access-msl4r\") on node \"crc\" DevicePath \"\"" Jan 20 17:24:11 crc kubenswrapper[4558]: I0120 17:24:11.282015 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/36591742-f9df-4244-b32c-cc952d81893a-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:24:11 crc kubenswrapper[4558]: I0120 17:24:11.282028 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/36591742-f9df-4244-b32c-cc952d81893a-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:24:11 crc kubenswrapper[4558]: I0120 17:24:11.282039 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/36591742-f9df-4244-b32c-cc952d81893a-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:24:11 crc kubenswrapper[4558]: I0120 17:24:11.669852 4558 generic.go:334] "Generic (PLEG): container finished" podID="36591742-f9df-4244-b32c-cc952d81893a" containerID="64ef811896fb701a56067ae09192dab6f941d9eb7d07c6bf99ee816f9a9c9612" exitCode=143 Jan 20 17:24:11 crc kubenswrapper[4558]: I0120 17:24:11.669916 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-56dc9f6ff9-hc2fq" Jan 20 17:24:11 crc kubenswrapper[4558]: I0120 17:24:11.669932 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-56dc9f6ff9-hc2fq" event={"ID":"36591742-f9df-4244-b32c-cc952d81893a","Type":"ContainerDied","Data":"64ef811896fb701a56067ae09192dab6f941d9eb7d07c6bf99ee816f9a9c9612"} Jan 20 17:24:11 crc kubenswrapper[4558]: I0120 17:24:11.670520 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-56dc9f6ff9-hc2fq" event={"ID":"36591742-f9df-4244-b32c-cc952d81893a","Type":"ContainerDied","Data":"5c0a2cdc3bd0ff591e7cf030fbdf36a6ef7176dd2dcf678998bce908c0ae0595"} Jan 20 17:24:11 crc kubenswrapper[4558]: I0120 17:24:11.670550 4558 scope.go:117] "RemoveContainer" containerID="bca1cb65e2496aef07b0f01e5ea84e3764b8ca5b07400205c39fd0bf35b97ead" Jan 20 17:24:11 crc kubenswrapper[4558]: I0120 17:24:11.703445 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/placement-56dc9f6ff9-hc2fq"] Jan 20 17:24:11 crc kubenswrapper[4558]: I0120 17:24:11.706434 4558 scope.go:117] "RemoveContainer" containerID="64ef811896fb701a56067ae09192dab6f941d9eb7d07c6bf99ee816f9a9c9612" Jan 20 17:24:11 crc kubenswrapper[4558]: I0120 17:24:11.709227 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/placement-56dc9f6ff9-hc2fq"] Jan 20 17:24:11 crc kubenswrapper[4558]: I0120 17:24:11.728002 4558 scope.go:117] "RemoveContainer" containerID="bca1cb65e2496aef07b0f01e5ea84e3764b8ca5b07400205c39fd0bf35b97ead" Jan 20 17:24:11 crc kubenswrapper[4558]: E0120 17:24:11.728326 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bca1cb65e2496aef07b0f01e5ea84e3764b8ca5b07400205c39fd0bf35b97ead\": container with ID starting with bca1cb65e2496aef07b0f01e5ea84e3764b8ca5b07400205c39fd0bf35b97ead not found: ID does not exist" containerID="bca1cb65e2496aef07b0f01e5ea84e3764b8ca5b07400205c39fd0bf35b97ead" Jan 20 17:24:11 crc kubenswrapper[4558]: I0120 17:24:11.728359 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bca1cb65e2496aef07b0f01e5ea84e3764b8ca5b07400205c39fd0bf35b97ead"} err="failed to get container status \"bca1cb65e2496aef07b0f01e5ea84e3764b8ca5b07400205c39fd0bf35b97ead\": rpc error: code = NotFound desc = could not find container \"bca1cb65e2496aef07b0f01e5ea84e3764b8ca5b07400205c39fd0bf35b97ead\": container with ID starting with bca1cb65e2496aef07b0f01e5ea84e3764b8ca5b07400205c39fd0bf35b97ead not found: ID does not exist" Jan 20 17:24:11 crc kubenswrapper[4558]: I0120 17:24:11.728381 4558 scope.go:117] "RemoveContainer" containerID="64ef811896fb701a56067ae09192dab6f941d9eb7d07c6bf99ee816f9a9c9612" Jan 20 17:24:11 crc kubenswrapper[4558]: E0120 17:24:11.728689 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"64ef811896fb701a56067ae09192dab6f941d9eb7d07c6bf99ee816f9a9c9612\": container with ID starting with 64ef811896fb701a56067ae09192dab6f941d9eb7d07c6bf99ee816f9a9c9612 not found: ID does not exist" containerID="64ef811896fb701a56067ae09192dab6f941d9eb7d07c6bf99ee816f9a9c9612" Jan 20 17:24:11 crc kubenswrapper[4558]: I0120 17:24:11.728711 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"64ef811896fb701a56067ae09192dab6f941d9eb7d07c6bf99ee816f9a9c9612"} err="failed to get container status \"64ef811896fb701a56067ae09192dab6f941d9eb7d07c6bf99ee816f9a9c9612\": rpc error: code = NotFound desc = could not find container \"64ef811896fb701a56067ae09192dab6f941d9eb7d07c6bf99ee816f9a9c9612\": container with ID starting with 64ef811896fb701a56067ae09192dab6f941d9eb7d07c6bf99ee816f9a9c9612 not found: ID does not exist" Jan 20 17:24:12 crc kubenswrapper[4558]: I0120 17:24:12.577962 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="36591742-f9df-4244-b32c-cc952d81893a" path="/var/lib/kubelet/pods/36591742-f9df-4244-b32c-cc952d81893a/volumes" Jan 20 17:24:17 crc kubenswrapper[4558]: I0120 17:24:17.990931 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/neutron-8668888664-j9vrg" Jan 20 17:24:18 crc kubenswrapper[4558]: I0120 17:24:18.136718 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/keystone-6866498598-2wxkp" Jan 20 17:24:20 crc kubenswrapper[4558]: I0120 17:24:20.680917 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/openstackclient"] Jan 20 17:24:20 crc kubenswrapper[4558]: E0120 17:24:20.682218 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="db32104f-5145-43ca-bc9a-5c410bf89933" containerName="barbican-api-log" Jan 20 17:24:20 crc kubenswrapper[4558]: I0120 17:24:20.682236 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="db32104f-5145-43ca-bc9a-5c410bf89933" containerName="barbican-api-log" Jan 20 17:24:20 crc kubenswrapper[4558]: E0120 17:24:20.682268 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="db32104f-5145-43ca-bc9a-5c410bf89933" containerName="barbican-api" Jan 20 17:24:20 crc kubenswrapper[4558]: I0120 17:24:20.682274 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="db32104f-5145-43ca-bc9a-5c410bf89933" containerName="barbican-api" Jan 20 17:24:20 crc kubenswrapper[4558]: E0120 17:24:20.682300 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="36591742-f9df-4244-b32c-cc952d81893a" containerName="placement-api" Jan 20 17:24:20 crc kubenswrapper[4558]: I0120 17:24:20.682306 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="36591742-f9df-4244-b32c-cc952d81893a" containerName="placement-api" Jan 20 17:24:20 crc kubenswrapper[4558]: E0120 17:24:20.682318 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="36591742-f9df-4244-b32c-cc952d81893a" containerName="placement-api" Jan 20 17:24:20 crc kubenswrapper[4558]: I0120 17:24:20.682324 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="36591742-f9df-4244-b32c-cc952d81893a" containerName="placement-api" Jan 20 17:24:20 crc kubenswrapper[4558]: E0120 17:24:20.682341 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="36591742-f9df-4244-b32c-cc952d81893a" containerName="placement-api" Jan 20 17:24:20 crc kubenswrapper[4558]: I0120 17:24:20.682348 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="36591742-f9df-4244-b32c-cc952d81893a" containerName="placement-api" Jan 20 17:24:20 crc kubenswrapper[4558]: E0120 17:24:20.682370 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="36591742-f9df-4244-b32c-cc952d81893a" containerName="placement-log" Jan 20 17:24:20 crc kubenswrapper[4558]: I0120 17:24:20.682376 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="36591742-f9df-4244-b32c-cc952d81893a" containerName="placement-log" Jan 20 17:24:20 crc kubenswrapper[4558]: I0120 17:24:20.682597 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="36591742-f9df-4244-b32c-cc952d81893a" containerName="placement-log" Jan 20 17:24:20 crc kubenswrapper[4558]: I0120 17:24:20.682615 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="db32104f-5145-43ca-bc9a-5c410bf89933" containerName="barbican-api-log" Jan 20 17:24:20 crc kubenswrapper[4558]: I0120 17:24:20.682625 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="36591742-f9df-4244-b32c-cc952d81893a" containerName="placement-api" Jan 20 17:24:20 crc kubenswrapper[4558]: I0120 17:24:20.682635 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="36591742-f9df-4244-b32c-cc952d81893a" containerName="placement-api" Jan 20 17:24:20 crc kubenswrapper[4558]: I0120 17:24:20.682643 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="db32104f-5145-43ca-bc9a-5c410bf89933" containerName="barbican-api" Jan 20 17:24:20 crc kubenswrapper[4558]: I0120 17:24:20.683424 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstackclient" Jan 20 17:24:20 crc kubenswrapper[4558]: I0120 17:24:20.686840 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-config" Jan 20 17:24:20 crc kubenswrapper[4558]: I0120 17:24:20.686860 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"openstack-config-secret" Jan 20 17:24:20 crc kubenswrapper[4558]: I0120 17:24:20.687185 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"openstackclient-openstackclient-dockercfg-w79ss" Jan 20 17:24:20 crc kubenswrapper[4558]: I0120 17:24:20.715079 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/openstackclient"] Jan 20 17:24:20 crc kubenswrapper[4558]: I0120 17:24:20.878436 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/4d845835-77d4-4d8a-9fad-0286ad5291ae-openstack-config\") pod \"openstackclient\" (UID: \"4d845835-77d4-4d8a-9fad-0286ad5291ae\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:24:20 crc kubenswrapper[4558]: I0120 17:24:20.878556 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/4d845835-77d4-4d8a-9fad-0286ad5291ae-openstack-config-secret\") pod \"openstackclient\" (UID: \"4d845835-77d4-4d8a-9fad-0286ad5291ae\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:24:20 crc kubenswrapper[4558]: I0120 17:24:20.878632 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4d845835-77d4-4d8a-9fad-0286ad5291ae-combined-ca-bundle\") pod \"openstackclient\" (UID: \"4d845835-77d4-4d8a-9fad-0286ad5291ae\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:24:20 crc kubenswrapper[4558]: I0120 17:24:20.878723 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4lw8x\" (UniqueName: \"kubernetes.io/projected/4d845835-77d4-4d8a-9fad-0286ad5291ae-kube-api-access-4lw8x\") pod \"openstackclient\" (UID: \"4d845835-77d4-4d8a-9fad-0286ad5291ae\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:24:20 crc kubenswrapper[4558]: I0120 17:24:20.980907 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4lw8x\" (UniqueName: \"kubernetes.io/projected/4d845835-77d4-4d8a-9fad-0286ad5291ae-kube-api-access-4lw8x\") pod \"openstackclient\" (UID: \"4d845835-77d4-4d8a-9fad-0286ad5291ae\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:24:20 crc kubenswrapper[4558]: I0120 17:24:20.981382 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/4d845835-77d4-4d8a-9fad-0286ad5291ae-openstack-config\") pod \"openstackclient\" (UID: \"4d845835-77d4-4d8a-9fad-0286ad5291ae\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:24:20 crc kubenswrapper[4558]: I0120 17:24:20.982141 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/4d845835-77d4-4d8a-9fad-0286ad5291ae-openstack-config\") pod \"openstackclient\" (UID: \"4d845835-77d4-4d8a-9fad-0286ad5291ae\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:24:20 crc kubenswrapper[4558]: I0120 17:24:20.982325 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/4d845835-77d4-4d8a-9fad-0286ad5291ae-openstack-config-secret\") pod \"openstackclient\" (UID: \"4d845835-77d4-4d8a-9fad-0286ad5291ae\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:24:20 crc kubenswrapper[4558]: I0120 17:24:20.983304 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4d845835-77d4-4d8a-9fad-0286ad5291ae-combined-ca-bundle\") pod \"openstackclient\" (UID: \"4d845835-77d4-4d8a-9fad-0286ad5291ae\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:24:20 crc kubenswrapper[4558]: I0120 17:24:20.989898 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/4d845835-77d4-4d8a-9fad-0286ad5291ae-openstack-config-secret\") pod \"openstackclient\" (UID: \"4d845835-77d4-4d8a-9fad-0286ad5291ae\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:24:20 crc kubenswrapper[4558]: I0120 17:24:20.990016 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4d845835-77d4-4d8a-9fad-0286ad5291ae-combined-ca-bundle\") pod \"openstackclient\" (UID: \"4d845835-77d4-4d8a-9fad-0286ad5291ae\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:24:20 crc kubenswrapper[4558]: I0120 17:24:20.995692 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4lw8x\" (UniqueName: \"kubernetes.io/projected/4d845835-77d4-4d8a-9fad-0286ad5291ae-kube-api-access-4lw8x\") pod \"openstackclient\" (UID: \"4d845835-77d4-4d8a-9fad-0286ad5291ae\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:24:21 crc kubenswrapper[4558]: I0120 17:24:21.009825 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstackclient" Jan 20 17:24:21 crc kubenswrapper[4558]: I0120 17:24:21.439005 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/openstackclient"] Jan 20 17:24:21 crc kubenswrapper[4558]: I0120 17:24:21.787206 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstackclient" event={"ID":"4d845835-77d4-4d8a-9fad-0286ad5291ae","Type":"ContainerStarted","Data":"e33ca27067cb626a71e4e70acb048cf75b1fdf4f714c880df6a27aa61155fa45"} Jan 20 17:24:21 crc kubenswrapper[4558]: I0120 17:24:21.788396 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstackclient" event={"ID":"4d845835-77d4-4d8a-9fad-0286ad5291ae","Type":"ContainerStarted","Data":"ad4aaa1ad61e5573c3abd17ce4c70a86d54321accb28a91dc275ccbf83fd1b26"} Jan 20 17:24:21 crc kubenswrapper[4558]: I0120 17:24:21.810330 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/openstackclient" podStartSLOduration=1.8103091519999999 podStartE2EDuration="1.810309152s" podCreationTimestamp="2026-01-20 17:24:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:24:21.7993462 +0000 UTC m=+2555.559684177" watchObservedRunningTime="2026-01-20 17:24:21.810309152 +0000 UTC m=+2555.570647120" Jan 20 17:24:22 crc kubenswrapper[4558]: I0120 17:24:22.128732 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/neutron-5855ff84bf-bczz4" Jan 20 17:24:22 crc kubenswrapper[4558]: I0120 17:24:22.191149 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-8668888664-j9vrg"] Jan 20 17:24:22 crc kubenswrapper[4558]: I0120 17:24:22.191438 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/neutron-8668888664-j9vrg" podUID="12f5bd2b-9da9-49be-992d-d2345bb393e2" containerName="neutron-api" containerID="cri-o://3afad2651184b267144218ea011d4cc9a8d8837d442d3000b7e3730c47f50841" gracePeriod=30 Jan 20 17:24:22 crc kubenswrapper[4558]: I0120 17:24:22.191500 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/neutron-8668888664-j9vrg" podUID="12f5bd2b-9da9-49be-992d-d2345bb393e2" containerName="neutron-httpd" containerID="cri-o://22d5113e75d9189dc9caeca78d1cfc5da4714ea69e4a871cf431f53e792eedf7" gracePeriod=30 Jan 20 17:24:22 crc kubenswrapper[4558]: I0120 17:24:22.798060 4558 generic.go:334] "Generic (PLEG): container finished" podID="12f5bd2b-9da9-49be-992d-d2345bb393e2" containerID="22d5113e75d9189dc9caeca78d1cfc5da4714ea69e4a871cf431f53e792eedf7" exitCode=0 Jan 20 17:24:22 crc kubenswrapper[4558]: I0120 17:24:22.798153 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-8668888664-j9vrg" event={"ID":"12f5bd2b-9da9-49be-992d-d2345bb393e2","Type":"ContainerDied","Data":"22d5113e75d9189dc9caeca78d1cfc5da4714ea69e4a871cf431f53e792eedf7"} Jan 20 17:24:23 crc kubenswrapper[4558]: I0120 17:24:23.542784 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-s4fbf"] Jan 20 17:24:23 crc kubenswrapper[4558]: I0120 17:24:23.543580 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="36591742-f9df-4244-b32c-cc952d81893a" containerName="placement-api" Jan 20 17:24:23 crc kubenswrapper[4558]: I0120 17:24:23.544815 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-s4fbf" Jan 20 17:24:23 crc kubenswrapper[4558]: I0120 17:24:23.552425 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-s4fbf"] Jan 20 17:24:23 crc kubenswrapper[4558]: I0120 17:24:23.642376 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7jx45\" (UniqueName: \"kubernetes.io/projected/11c22b1a-a7cd-40d8-9b6d-cd095b913a81-kube-api-access-7jx45\") pod \"community-operators-s4fbf\" (UID: \"11c22b1a-a7cd-40d8-9b6d-cd095b913a81\") " pod="openshift-marketplace/community-operators-s4fbf" Jan 20 17:24:23 crc kubenswrapper[4558]: I0120 17:24:23.642430 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/11c22b1a-a7cd-40d8-9b6d-cd095b913a81-catalog-content\") pod \"community-operators-s4fbf\" (UID: \"11c22b1a-a7cd-40d8-9b6d-cd095b913a81\") " pod="openshift-marketplace/community-operators-s4fbf" Jan 20 17:24:23 crc kubenswrapper[4558]: I0120 17:24:23.642459 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/11c22b1a-a7cd-40d8-9b6d-cd095b913a81-utilities\") pod \"community-operators-s4fbf\" (UID: \"11c22b1a-a7cd-40d8-9b6d-cd095b913a81\") " pod="openshift-marketplace/community-operators-s4fbf" Jan 20 17:24:23 crc kubenswrapper[4558]: I0120 17:24:23.744280 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7jx45\" (UniqueName: \"kubernetes.io/projected/11c22b1a-a7cd-40d8-9b6d-cd095b913a81-kube-api-access-7jx45\") pod \"community-operators-s4fbf\" (UID: \"11c22b1a-a7cd-40d8-9b6d-cd095b913a81\") " pod="openshift-marketplace/community-operators-s4fbf" Jan 20 17:24:23 crc kubenswrapper[4558]: I0120 17:24:23.744347 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/11c22b1a-a7cd-40d8-9b6d-cd095b913a81-catalog-content\") pod \"community-operators-s4fbf\" (UID: \"11c22b1a-a7cd-40d8-9b6d-cd095b913a81\") " pod="openshift-marketplace/community-operators-s4fbf" Jan 20 17:24:23 crc kubenswrapper[4558]: I0120 17:24:23.744407 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/11c22b1a-a7cd-40d8-9b6d-cd095b913a81-utilities\") pod \"community-operators-s4fbf\" (UID: \"11c22b1a-a7cd-40d8-9b6d-cd095b913a81\") " pod="openshift-marketplace/community-operators-s4fbf" Jan 20 17:24:23 crc kubenswrapper[4558]: I0120 17:24:23.744919 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/11c22b1a-a7cd-40d8-9b6d-cd095b913a81-catalog-content\") pod \"community-operators-s4fbf\" (UID: \"11c22b1a-a7cd-40d8-9b6d-cd095b913a81\") " pod="openshift-marketplace/community-operators-s4fbf" Jan 20 17:24:23 crc kubenswrapper[4558]: I0120 17:24:23.745098 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/11c22b1a-a7cd-40d8-9b6d-cd095b913a81-utilities\") pod \"community-operators-s4fbf\" (UID: \"11c22b1a-a7cd-40d8-9b6d-cd095b913a81\") " pod="openshift-marketplace/community-operators-s4fbf" Jan 20 17:24:23 crc kubenswrapper[4558]: I0120 17:24:23.766867 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7jx45\" (UniqueName: \"kubernetes.io/projected/11c22b1a-a7cd-40d8-9b6d-cd095b913a81-kube-api-access-7jx45\") pod \"community-operators-s4fbf\" (UID: \"11c22b1a-a7cd-40d8-9b6d-cd095b913a81\") " pod="openshift-marketplace/community-operators-s4fbf" Jan 20 17:24:23 crc kubenswrapper[4558]: I0120 17:24:23.865019 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-s4fbf" Jan 20 17:24:24 crc kubenswrapper[4558]: I0120 17:24:24.346382 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-s4fbf"] Jan 20 17:24:24 crc kubenswrapper[4558]: I0120 17:24:24.486127 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-8668888664-j9vrg" Jan 20 17:24:24 crc kubenswrapper[4558]: I0120 17:24:24.668127 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/12f5bd2b-9da9-49be-992d-d2345bb393e2-ovndb-tls-certs\") pod \"12f5bd2b-9da9-49be-992d-d2345bb393e2\" (UID: \"12f5bd2b-9da9-49be-992d-d2345bb393e2\") " Jan 20 17:24:24 crc kubenswrapper[4558]: I0120 17:24:24.668328 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/12f5bd2b-9da9-49be-992d-d2345bb393e2-httpd-config\") pod \"12f5bd2b-9da9-49be-992d-d2345bb393e2\" (UID: \"12f5bd2b-9da9-49be-992d-d2345bb393e2\") " Jan 20 17:24:24 crc kubenswrapper[4558]: I0120 17:24:24.668436 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/12f5bd2b-9da9-49be-992d-d2345bb393e2-combined-ca-bundle\") pod \"12f5bd2b-9da9-49be-992d-d2345bb393e2\" (UID: \"12f5bd2b-9da9-49be-992d-d2345bb393e2\") " Jan 20 17:24:24 crc kubenswrapper[4558]: I0120 17:24:24.668523 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/12f5bd2b-9da9-49be-992d-d2345bb393e2-config\") pod \"12f5bd2b-9da9-49be-992d-d2345bb393e2\" (UID: \"12f5bd2b-9da9-49be-992d-d2345bb393e2\") " Jan 20 17:24:24 crc kubenswrapper[4558]: I0120 17:24:24.668668 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tpk55\" (UniqueName: \"kubernetes.io/projected/12f5bd2b-9da9-49be-992d-d2345bb393e2-kube-api-access-tpk55\") pod \"12f5bd2b-9da9-49be-992d-d2345bb393e2\" (UID: \"12f5bd2b-9da9-49be-992d-d2345bb393e2\") " Jan 20 17:24:24 crc kubenswrapper[4558]: I0120 17:24:24.676324 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/12f5bd2b-9da9-49be-992d-d2345bb393e2-kube-api-access-tpk55" (OuterVolumeSpecName: "kube-api-access-tpk55") pod "12f5bd2b-9da9-49be-992d-d2345bb393e2" (UID: "12f5bd2b-9da9-49be-992d-d2345bb393e2"). InnerVolumeSpecName "kube-api-access-tpk55". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:24:24 crc kubenswrapper[4558]: I0120 17:24:24.676543 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/12f5bd2b-9da9-49be-992d-d2345bb393e2-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "12f5bd2b-9da9-49be-992d-d2345bb393e2" (UID: "12f5bd2b-9da9-49be-992d-d2345bb393e2"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:24:24 crc kubenswrapper[4558]: I0120 17:24:24.713189 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/12f5bd2b-9da9-49be-992d-d2345bb393e2-config" (OuterVolumeSpecName: "config") pod "12f5bd2b-9da9-49be-992d-d2345bb393e2" (UID: "12f5bd2b-9da9-49be-992d-d2345bb393e2"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:24:24 crc kubenswrapper[4558]: I0120 17:24:24.721574 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/12f5bd2b-9da9-49be-992d-d2345bb393e2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "12f5bd2b-9da9-49be-992d-d2345bb393e2" (UID: "12f5bd2b-9da9-49be-992d-d2345bb393e2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:24:24 crc kubenswrapper[4558]: I0120 17:24:24.728429 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/12f5bd2b-9da9-49be-992d-d2345bb393e2-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "12f5bd2b-9da9-49be-992d-d2345bb393e2" (UID: "12f5bd2b-9da9-49be-992d-d2345bb393e2"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:24:24 crc kubenswrapper[4558]: I0120 17:24:24.774498 4558 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/12f5bd2b-9da9-49be-992d-d2345bb393e2-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:24:24 crc kubenswrapper[4558]: I0120 17:24:24.774535 4558 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/12f5bd2b-9da9-49be-992d-d2345bb393e2-httpd-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:24:24 crc kubenswrapper[4558]: I0120 17:24:24.774546 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/12f5bd2b-9da9-49be-992d-d2345bb393e2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:24:24 crc kubenswrapper[4558]: I0120 17:24:24.774557 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/12f5bd2b-9da9-49be-992d-d2345bb393e2-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:24:24 crc kubenswrapper[4558]: I0120 17:24:24.774569 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tpk55\" (UniqueName: \"kubernetes.io/projected/12f5bd2b-9da9-49be-992d-d2345bb393e2-kube-api-access-tpk55\") on node \"crc\" DevicePath \"\"" Jan 20 17:24:24 crc kubenswrapper[4558]: I0120 17:24:24.796587 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/swift-proxy-7459567f99-gx6dr"] Jan 20 17:24:24 crc kubenswrapper[4558]: E0120 17:24:24.797225 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="12f5bd2b-9da9-49be-992d-d2345bb393e2" containerName="neutron-api" Jan 20 17:24:24 crc kubenswrapper[4558]: I0120 17:24:24.797247 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="12f5bd2b-9da9-49be-992d-d2345bb393e2" containerName="neutron-api" Jan 20 17:24:24 crc kubenswrapper[4558]: E0120 17:24:24.797275 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="12f5bd2b-9da9-49be-992d-d2345bb393e2" containerName="neutron-httpd" Jan 20 17:24:24 crc kubenswrapper[4558]: I0120 17:24:24.797281 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="12f5bd2b-9da9-49be-992d-d2345bb393e2" containerName="neutron-httpd" Jan 20 17:24:24 crc kubenswrapper[4558]: I0120 17:24:24.797595 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="12f5bd2b-9da9-49be-992d-d2345bb393e2" containerName="neutron-api" Jan 20 17:24:24 crc kubenswrapper[4558]: I0120 17:24:24.797641 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="12f5bd2b-9da9-49be-992d-d2345bb393e2" containerName="neutron-httpd" Jan 20 17:24:24 crc kubenswrapper[4558]: I0120 17:24:24.803357 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-proxy-7459567f99-gx6dr" Jan 20 17:24:24 crc kubenswrapper[4558]: I0120 17:24:24.808845 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"swift-proxy-config-data" Jan 20 17:24:24 crc kubenswrapper[4558]: I0120 17:24:24.808879 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-swift-internal-svc" Jan 20 17:24:24 crc kubenswrapper[4558]: I0120 17:24:24.809588 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-swift-public-svc" Jan 20 17:24:24 crc kubenswrapper[4558]: I0120 17:24:24.813115 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/swift-proxy-7459567f99-gx6dr"] Jan 20 17:24:24 crc kubenswrapper[4558]: I0120 17:24:24.816407 4558 generic.go:334] "Generic (PLEG): container finished" podID="11c22b1a-a7cd-40d8-9b6d-cd095b913a81" containerID="e21f77f3f020a87a98aff7a31130f629dc8bbc433cb6252ad83ebf1169f0494c" exitCode=0 Jan 20 17:24:24 crc kubenswrapper[4558]: I0120 17:24:24.816534 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-s4fbf" event={"ID":"11c22b1a-a7cd-40d8-9b6d-cd095b913a81","Type":"ContainerDied","Data":"e21f77f3f020a87a98aff7a31130f629dc8bbc433cb6252ad83ebf1169f0494c"} Jan 20 17:24:24 crc kubenswrapper[4558]: I0120 17:24:24.816566 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-s4fbf" event={"ID":"11c22b1a-a7cd-40d8-9b6d-cd095b913a81","Type":"ContainerStarted","Data":"7713f54b7023b837a5753498dca128d5953ae3496c5c5aa20aacb6f3fea4985c"} Jan 20 17:24:24 crc kubenswrapper[4558]: I0120 17:24:24.834780 4558 generic.go:334] "Generic (PLEG): container finished" podID="12f5bd2b-9da9-49be-992d-d2345bb393e2" containerID="3afad2651184b267144218ea011d4cc9a8d8837d442d3000b7e3730c47f50841" exitCode=0 Jan 20 17:24:24 crc kubenswrapper[4558]: I0120 17:24:24.834829 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-8668888664-j9vrg" event={"ID":"12f5bd2b-9da9-49be-992d-d2345bb393e2","Type":"ContainerDied","Data":"3afad2651184b267144218ea011d4cc9a8d8837d442d3000b7e3730c47f50841"} Jan 20 17:24:24 crc kubenswrapper[4558]: I0120 17:24:24.834859 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-8668888664-j9vrg" event={"ID":"12f5bd2b-9da9-49be-992d-d2345bb393e2","Type":"ContainerDied","Data":"e8bbd31c79781590df1c20f76b22ddb8a30637baec2b4b0a79c197143353f273"} Jan 20 17:24:24 crc kubenswrapper[4558]: I0120 17:24:24.834876 4558 scope.go:117] "RemoveContainer" containerID="22d5113e75d9189dc9caeca78d1cfc5da4714ea69e4a871cf431f53e792eedf7" Jan 20 17:24:24 crc kubenswrapper[4558]: I0120 17:24:24.835005 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-8668888664-j9vrg" Jan 20 17:24:24 crc kubenswrapper[4558]: I0120 17:24:24.857995 4558 scope.go:117] "RemoveContainer" containerID="3afad2651184b267144218ea011d4cc9a8d8837d442d3000b7e3730c47f50841" Jan 20 17:24:24 crc kubenswrapper[4558]: I0120 17:24:24.873509 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-8668888664-j9vrg"] Jan 20 17:24:24 crc kubenswrapper[4558]: I0120 17:24:24.880888 4558 scope.go:117] "RemoveContainer" containerID="22d5113e75d9189dc9caeca78d1cfc5da4714ea69e4a871cf431f53e792eedf7" Jan 20 17:24:24 crc kubenswrapper[4558]: E0120 17:24:24.881825 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"22d5113e75d9189dc9caeca78d1cfc5da4714ea69e4a871cf431f53e792eedf7\": container with ID starting with 22d5113e75d9189dc9caeca78d1cfc5da4714ea69e4a871cf431f53e792eedf7 not found: ID does not exist" containerID="22d5113e75d9189dc9caeca78d1cfc5da4714ea69e4a871cf431f53e792eedf7" Jan 20 17:24:24 crc kubenswrapper[4558]: I0120 17:24:24.881863 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"22d5113e75d9189dc9caeca78d1cfc5da4714ea69e4a871cf431f53e792eedf7"} err="failed to get container status \"22d5113e75d9189dc9caeca78d1cfc5da4714ea69e4a871cf431f53e792eedf7\": rpc error: code = NotFound desc = could not find container \"22d5113e75d9189dc9caeca78d1cfc5da4714ea69e4a871cf431f53e792eedf7\": container with ID starting with 22d5113e75d9189dc9caeca78d1cfc5da4714ea69e4a871cf431f53e792eedf7 not found: ID does not exist" Jan 20 17:24:24 crc kubenswrapper[4558]: I0120 17:24:24.881887 4558 scope.go:117] "RemoveContainer" containerID="3afad2651184b267144218ea011d4cc9a8d8837d442d3000b7e3730c47f50841" Jan 20 17:24:24 crc kubenswrapper[4558]: E0120 17:24:24.882253 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3afad2651184b267144218ea011d4cc9a8d8837d442d3000b7e3730c47f50841\": container with ID starting with 3afad2651184b267144218ea011d4cc9a8d8837d442d3000b7e3730c47f50841 not found: ID does not exist" containerID="3afad2651184b267144218ea011d4cc9a8d8837d442d3000b7e3730c47f50841" Jan 20 17:24:24 crc kubenswrapper[4558]: I0120 17:24:24.882293 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3afad2651184b267144218ea011d4cc9a8d8837d442d3000b7e3730c47f50841"} err="failed to get container status \"3afad2651184b267144218ea011d4cc9a8d8837d442d3000b7e3730c47f50841\": rpc error: code = NotFound desc = could not find container \"3afad2651184b267144218ea011d4cc9a8d8837d442d3000b7e3730c47f50841\": container with ID starting with 3afad2651184b267144218ea011d4cc9a8d8837d442d3000b7e3730c47f50841 not found: ID does not exist" Jan 20 17:24:24 crc kubenswrapper[4558]: I0120 17:24:24.884227 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/neutron-8668888664-j9vrg"] Jan 20 17:24:24 crc kubenswrapper[4558]: I0120 17:24:24.978779 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2bee9ff9-52bb-4dae-971c-8c6236a4e563-config-data\") pod \"swift-proxy-7459567f99-gx6dr\" (UID: \"2bee9ff9-52bb-4dae-971c-8c6236a4e563\") " pod="openstack-kuttl-tests/swift-proxy-7459567f99-gx6dr" Jan 20 17:24:24 crc kubenswrapper[4558]: I0120 17:24:24.978899 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2bee9ff9-52bb-4dae-971c-8c6236a4e563-combined-ca-bundle\") pod \"swift-proxy-7459567f99-gx6dr\" (UID: \"2bee9ff9-52bb-4dae-971c-8c6236a4e563\") " pod="openstack-kuttl-tests/swift-proxy-7459567f99-gx6dr" Jan 20 17:24:24 crc kubenswrapper[4558]: I0120 17:24:24.979024 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/2bee9ff9-52bb-4dae-971c-8c6236a4e563-etc-swift\") pod \"swift-proxy-7459567f99-gx6dr\" (UID: \"2bee9ff9-52bb-4dae-971c-8c6236a4e563\") " pod="openstack-kuttl-tests/swift-proxy-7459567f99-gx6dr" Jan 20 17:24:24 crc kubenswrapper[4558]: I0120 17:24:24.979061 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2bee9ff9-52bb-4dae-971c-8c6236a4e563-run-httpd\") pod \"swift-proxy-7459567f99-gx6dr\" (UID: \"2bee9ff9-52bb-4dae-971c-8c6236a4e563\") " pod="openstack-kuttl-tests/swift-proxy-7459567f99-gx6dr" Jan 20 17:24:24 crc kubenswrapper[4558]: I0120 17:24:24.979115 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hcqdx\" (UniqueName: \"kubernetes.io/projected/2bee9ff9-52bb-4dae-971c-8c6236a4e563-kube-api-access-hcqdx\") pod \"swift-proxy-7459567f99-gx6dr\" (UID: \"2bee9ff9-52bb-4dae-971c-8c6236a4e563\") " pod="openstack-kuttl-tests/swift-proxy-7459567f99-gx6dr" Jan 20 17:24:24 crc kubenswrapper[4558]: I0120 17:24:24.979207 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2bee9ff9-52bb-4dae-971c-8c6236a4e563-public-tls-certs\") pod \"swift-proxy-7459567f99-gx6dr\" (UID: \"2bee9ff9-52bb-4dae-971c-8c6236a4e563\") " pod="openstack-kuttl-tests/swift-proxy-7459567f99-gx6dr" Jan 20 17:24:24 crc kubenswrapper[4558]: I0120 17:24:24.979253 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2bee9ff9-52bb-4dae-971c-8c6236a4e563-log-httpd\") pod \"swift-proxy-7459567f99-gx6dr\" (UID: \"2bee9ff9-52bb-4dae-971c-8c6236a4e563\") " pod="openstack-kuttl-tests/swift-proxy-7459567f99-gx6dr" Jan 20 17:24:24 crc kubenswrapper[4558]: I0120 17:24:24.979276 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2bee9ff9-52bb-4dae-971c-8c6236a4e563-internal-tls-certs\") pod \"swift-proxy-7459567f99-gx6dr\" (UID: \"2bee9ff9-52bb-4dae-971c-8c6236a4e563\") " pod="openstack-kuttl-tests/swift-proxy-7459567f99-gx6dr" Jan 20 17:24:25 crc kubenswrapper[4558]: I0120 17:24:25.080350 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2bee9ff9-52bb-4dae-971c-8c6236a4e563-config-data\") pod \"swift-proxy-7459567f99-gx6dr\" (UID: \"2bee9ff9-52bb-4dae-971c-8c6236a4e563\") " pod="openstack-kuttl-tests/swift-proxy-7459567f99-gx6dr" Jan 20 17:24:25 crc kubenswrapper[4558]: I0120 17:24:25.080395 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2bee9ff9-52bb-4dae-971c-8c6236a4e563-combined-ca-bundle\") pod \"swift-proxy-7459567f99-gx6dr\" (UID: \"2bee9ff9-52bb-4dae-971c-8c6236a4e563\") " pod="openstack-kuttl-tests/swift-proxy-7459567f99-gx6dr" Jan 20 17:24:25 crc kubenswrapper[4558]: I0120 17:24:25.080438 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/2bee9ff9-52bb-4dae-971c-8c6236a4e563-etc-swift\") pod \"swift-proxy-7459567f99-gx6dr\" (UID: \"2bee9ff9-52bb-4dae-971c-8c6236a4e563\") " pod="openstack-kuttl-tests/swift-proxy-7459567f99-gx6dr" Jan 20 17:24:25 crc kubenswrapper[4558]: I0120 17:24:25.080458 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2bee9ff9-52bb-4dae-971c-8c6236a4e563-run-httpd\") pod \"swift-proxy-7459567f99-gx6dr\" (UID: \"2bee9ff9-52bb-4dae-971c-8c6236a4e563\") " pod="openstack-kuttl-tests/swift-proxy-7459567f99-gx6dr" Jan 20 17:24:25 crc kubenswrapper[4558]: I0120 17:24:25.080482 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hcqdx\" (UniqueName: \"kubernetes.io/projected/2bee9ff9-52bb-4dae-971c-8c6236a4e563-kube-api-access-hcqdx\") pod \"swift-proxy-7459567f99-gx6dr\" (UID: \"2bee9ff9-52bb-4dae-971c-8c6236a4e563\") " pod="openstack-kuttl-tests/swift-proxy-7459567f99-gx6dr" Jan 20 17:24:25 crc kubenswrapper[4558]: I0120 17:24:25.080513 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2bee9ff9-52bb-4dae-971c-8c6236a4e563-public-tls-certs\") pod \"swift-proxy-7459567f99-gx6dr\" (UID: \"2bee9ff9-52bb-4dae-971c-8c6236a4e563\") " pod="openstack-kuttl-tests/swift-proxy-7459567f99-gx6dr" Jan 20 17:24:25 crc kubenswrapper[4558]: I0120 17:24:25.080531 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2bee9ff9-52bb-4dae-971c-8c6236a4e563-log-httpd\") pod \"swift-proxy-7459567f99-gx6dr\" (UID: \"2bee9ff9-52bb-4dae-971c-8c6236a4e563\") " pod="openstack-kuttl-tests/swift-proxy-7459567f99-gx6dr" Jan 20 17:24:25 crc kubenswrapper[4558]: I0120 17:24:25.080546 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2bee9ff9-52bb-4dae-971c-8c6236a4e563-internal-tls-certs\") pod \"swift-proxy-7459567f99-gx6dr\" (UID: \"2bee9ff9-52bb-4dae-971c-8c6236a4e563\") " pod="openstack-kuttl-tests/swift-proxy-7459567f99-gx6dr" Jan 20 17:24:25 crc kubenswrapper[4558]: I0120 17:24:25.081135 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2bee9ff9-52bb-4dae-971c-8c6236a4e563-run-httpd\") pod \"swift-proxy-7459567f99-gx6dr\" (UID: \"2bee9ff9-52bb-4dae-971c-8c6236a4e563\") " pod="openstack-kuttl-tests/swift-proxy-7459567f99-gx6dr" Jan 20 17:24:25 crc kubenswrapper[4558]: I0120 17:24:25.081204 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2bee9ff9-52bb-4dae-971c-8c6236a4e563-log-httpd\") pod \"swift-proxy-7459567f99-gx6dr\" (UID: \"2bee9ff9-52bb-4dae-971c-8c6236a4e563\") " pod="openstack-kuttl-tests/swift-proxy-7459567f99-gx6dr" Jan 20 17:24:25 crc kubenswrapper[4558]: I0120 17:24:25.084815 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2bee9ff9-52bb-4dae-971c-8c6236a4e563-public-tls-certs\") pod \"swift-proxy-7459567f99-gx6dr\" (UID: \"2bee9ff9-52bb-4dae-971c-8c6236a4e563\") " pod="openstack-kuttl-tests/swift-proxy-7459567f99-gx6dr" Jan 20 17:24:25 crc kubenswrapper[4558]: I0120 17:24:25.084836 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2bee9ff9-52bb-4dae-971c-8c6236a4e563-combined-ca-bundle\") pod \"swift-proxy-7459567f99-gx6dr\" (UID: \"2bee9ff9-52bb-4dae-971c-8c6236a4e563\") " pod="openstack-kuttl-tests/swift-proxy-7459567f99-gx6dr" Jan 20 17:24:25 crc kubenswrapper[4558]: I0120 17:24:25.084818 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2bee9ff9-52bb-4dae-971c-8c6236a4e563-config-data\") pod \"swift-proxy-7459567f99-gx6dr\" (UID: \"2bee9ff9-52bb-4dae-971c-8c6236a4e563\") " pod="openstack-kuttl-tests/swift-proxy-7459567f99-gx6dr" Jan 20 17:24:25 crc kubenswrapper[4558]: I0120 17:24:25.086144 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/2bee9ff9-52bb-4dae-971c-8c6236a4e563-etc-swift\") pod \"swift-proxy-7459567f99-gx6dr\" (UID: \"2bee9ff9-52bb-4dae-971c-8c6236a4e563\") " pod="openstack-kuttl-tests/swift-proxy-7459567f99-gx6dr" Jan 20 17:24:25 crc kubenswrapper[4558]: I0120 17:24:25.091044 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2bee9ff9-52bb-4dae-971c-8c6236a4e563-internal-tls-certs\") pod \"swift-proxy-7459567f99-gx6dr\" (UID: \"2bee9ff9-52bb-4dae-971c-8c6236a4e563\") " pod="openstack-kuttl-tests/swift-proxy-7459567f99-gx6dr" Jan 20 17:24:25 crc kubenswrapper[4558]: I0120 17:24:25.092508 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hcqdx\" (UniqueName: \"kubernetes.io/projected/2bee9ff9-52bb-4dae-971c-8c6236a4e563-kube-api-access-hcqdx\") pod \"swift-proxy-7459567f99-gx6dr\" (UID: \"2bee9ff9-52bb-4dae-971c-8c6236a4e563\") " pod="openstack-kuttl-tests/swift-proxy-7459567f99-gx6dr" Jan 20 17:24:25 crc kubenswrapper[4558]: I0120 17:24:25.135670 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-proxy-7459567f99-gx6dr" Jan 20 17:24:25 crc kubenswrapper[4558]: I0120 17:24:25.493588 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:24:25 crc kubenswrapper[4558]: I0120 17:24:25.493872 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="d4bf950d-0c4c-41a1-b3c8-a82d3a0f65dd" containerName="ceilometer-central-agent" containerID="cri-o://ef796dfab67a8244c396cd18509d66bae814af609de469193e0d1ada820bdf4c" gracePeriod=30 Jan 20 17:24:25 crc kubenswrapper[4558]: I0120 17:24:25.494030 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="d4bf950d-0c4c-41a1-b3c8-a82d3a0f65dd" containerName="proxy-httpd" containerID="cri-o://1f1826fa85536c90dc6a1fb82f634d04594f8e977b10383c34ca6e82e13e96b3" gracePeriod=30 Jan 20 17:24:25 crc kubenswrapper[4558]: I0120 17:24:25.494115 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="d4bf950d-0c4c-41a1-b3c8-a82d3a0f65dd" containerName="ceilometer-notification-agent" containerID="cri-o://1b4e6a1e48903d69f46a2a4b2a8193e676013ecda2045e0f0db4923d287957b1" gracePeriod=30 Jan 20 17:24:25 crc kubenswrapper[4558]: I0120 17:24:25.494337 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="d4bf950d-0c4c-41a1-b3c8-a82d3a0f65dd" containerName="sg-core" containerID="cri-o://3dd48463ebaf0c4cc73e6dd204f9dc91627d23f0e17873f60b0ab36ca889d105" gracePeriod=30 Jan 20 17:24:25 crc kubenswrapper[4558]: W0120 17:24:25.540393 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2bee9ff9_52bb_4dae_971c_8c6236a4e563.slice/crio-b78f2da417e81673e98774d9598a50961cb7fbede4d984710706cd12f7cb03b8 WatchSource:0}: Error finding container b78f2da417e81673e98774d9598a50961cb7fbede4d984710706cd12f7cb03b8: Status 404 returned error can't find the container with id b78f2da417e81673e98774d9598a50961cb7fbede4d984710706cd12f7cb03b8 Jan 20 17:24:25 crc kubenswrapper[4558]: I0120 17:24:25.541917 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/swift-proxy-7459567f99-gx6dr"] Jan 20 17:24:25 crc kubenswrapper[4558]: I0120 17:24:25.869477 4558 generic.go:334] "Generic (PLEG): container finished" podID="d4bf950d-0c4c-41a1-b3c8-a82d3a0f65dd" containerID="1f1826fa85536c90dc6a1fb82f634d04594f8e977b10383c34ca6e82e13e96b3" exitCode=0 Jan 20 17:24:25 crc kubenswrapper[4558]: I0120 17:24:25.869762 4558 generic.go:334] "Generic (PLEG): container finished" podID="d4bf950d-0c4c-41a1-b3c8-a82d3a0f65dd" containerID="3dd48463ebaf0c4cc73e6dd204f9dc91627d23f0e17873f60b0ab36ca889d105" exitCode=2 Jan 20 17:24:25 crc kubenswrapper[4558]: I0120 17:24:25.869774 4558 generic.go:334] "Generic (PLEG): container finished" podID="d4bf950d-0c4c-41a1-b3c8-a82d3a0f65dd" containerID="ef796dfab67a8244c396cd18509d66bae814af609de469193e0d1ada820bdf4c" exitCode=0 Jan 20 17:24:25 crc kubenswrapper[4558]: I0120 17:24:25.869558 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"d4bf950d-0c4c-41a1-b3c8-a82d3a0f65dd","Type":"ContainerDied","Data":"1f1826fa85536c90dc6a1fb82f634d04594f8e977b10383c34ca6e82e13e96b3"} Jan 20 17:24:25 crc kubenswrapper[4558]: I0120 17:24:25.869865 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"d4bf950d-0c4c-41a1-b3c8-a82d3a0f65dd","Type":"ContainerDied","Data":"3dd48463ebaf0c4cc73e6dd204f9dc91627d23f0e17873f60b0ab36ca889d105"} Jan 20 17:24:25 crc kubenswrapper[4558]: I0120 17:24:25.869878 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"d4bf950d-0c4c-41a1-b3c8-a82d3a0f65dd","Type":"ContainerDied","Data":"ef796dfab67a8244c396cd18509d66bae814af609de469193e0d1ada820bdf4c"} Jan 20 17:24:25 crc kubenswrapper[4558]: I0120 17:24:25.871515 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-proxy-7459567f99-gx6dr" event={"ID":"2bee9ff9-52bb-4dae-971c-8c6236a4e563","Type":"ContainerStarted","Data":"95487ba24ea1b5e70725b0d792dbc8653b76f5c38eac7b039714b3d9fd0a7854"} Jan 20 17:24:25 crc kubenswrapper[4558]: I0120 17:24:25.871562 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-proxy-7459567f99-gx6dr" event={"ID":"2bee9ff9-52bb-4dae-971c-8c6236a4e563","Type":"ContainerStarted","Data":"b78f2da417e81673e98774d9598a50961cb7fbede4d984710706cd12f7cb03b8"} Jan 20 17:24:25 crc kubenswrapper[4558]: I0120 17:24:25.874334 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-s4fbf" event={"ID":"11c22b1a-a7cd-40d8-9b6d-cd095b913a81","Type":"ContainerStarted","Data":"cfacd8d412c42b01a9472434d49e531879f6bbd0b69041950da03b4acbed033a"} Jan 20 17:24:26 crc kubenswrapper[4558]: I0120 17:24:26.579009 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="12f5bd2b-9da9-49be-992d-d2345bb393e2" path="/var/lib/kubelet/pods/12f5bd2b-9da9-49be-992d-d2345bb393e2/volumes" Jan 20 17:24:26 crc kubenswrapper[4558]: I0120 17:24:26.842152 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-api-db-create-f6jhs"] Jan 20 17:24:26 crc kubenswrapper[4558]: I0120 17:24:26.843459 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-db-create-f6jhs" Jan 20 17:24:26 crc kubenswrapper[4558]: I0120 17:24:26.853751 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-db-create-f6jhs"] Jan 20 17:24:26 crc kubenswrapper[4558]: I0120 17:24:26.902323 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-proxy-7459567f99-gx6dr" event={"ID":"2bee9ff9-52bb-4dae-971c-8c6236a4e563","Type":"ContainerStarted","Data":"6a0521946798f9e4098ead7f6a1d8d950d13146e8392b370d0b39a8082a8515a"} Jan 20 17:24:26 crc kubenswrapper[4558]: I0120 17:24:26.903432 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/swift-proxy-7459567f99-gx6dr" Jan 20 17:24:26 crc kubenswrapper[4558]: I0120 17:24:26.903461 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/swift-proxy-7459567f99-gx6dr" Jan 20 17:24:26 crc kubenswrapper[4558]: I0120 17:24:26.906203 4558 generic.go:334] "Generic (PLEG): container finished" podID="11c22b1a-a7cd-40d8-9b6d-cd095b913a81" containerID="cfacd8d412c42b01a9472434d49e531879f6bbd0b69041950da03b4acbed033a" exitCode=0 Jan 20 17:24:26 crc kubenswrapper[4558]: I0120 17:24:26.906229 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-s4fbf" event={"ID":"11c22b1a-a7cd-40d8-9b6d-cd095b913a81","Type":"ContainerDied","Data":"cfacd8d412c42b01a9472434d49e531879f6bbd0b69041950da03b4acbed033a"} Jan 20 17:24:26 crc kubenswrapper[4558]: I0120 17:24:26.919093 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/730a8fa5-7c66-41ff-be4b-f170d49a6b0b-operator-scripts\") pod \"nova-api-db-create-f6jhs\" (UID: \"730a8fa5-7c66-41ff-be4b-f170d49a6b0b\") " pod="openstack-kuttl-tests/nova-api-db-create-f6jhs" Jan 20 17:24:26 crc kubenswrapper[4558]: I0120 17:24:26.919147 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5kqt6\" (UniqueName: \"kubernetes.io/projected/730a8fa5-7c66-41ff-be4b-f170d49a6b0b-kube-api-access-5kqt6\") pod \"nova-api-db-create-f6jhs\" (UID: \"730a8fa5-7c66-41ff-be4b-f170d49a6b0b\") " pod="openstack-kuttl-tests/nova-api-db-create-f6jhs" Jan 20 17:24:26 crc kubenswrapper[4558]: I0120 17:24:26.929933 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/swift-proxy-7459567f99-gx6dr" podStartSLOduration=2.929921306 podStartE2EDuration="2.929921306s" podCreationTimestamp="2026-01-20 17:24:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:24:26.925497264 +0000 UTC m=+2560.685835231" watchObservedRunningTime="2026-01-20 17:24:26.929921306 +0000 UTC m=+2560.690259272" Jan 20 17:24:26 crc kubenswrapper[4558]: I0120 17:24:26.954955 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell0-db-create-2c6lh"] Jan 20 17:24:26 crc kubenswrapper[4558]: I0120 17:24:26.956065 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-db-create-2c6lh" Jan 20 17:24:26 crc kubenswrapper[4558]: I0120 17:24:26.976280 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-api-a772-account-create-update-sfz2c"] Jan 20 17:24:26 crc kubenswrapper[4558]: I0120 17:24:26.977504 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-a772-account-create-update-sfz2c" Jan 20 17:24:26 crc kubenswrapper[4558]: I0120 17:24:26.981258 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-db-create-2c6lh"] Jan 20 17:24:26 crc kubenswrapper[4558]: I0120 17:24:26.981367 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-api-db-secret" Jan 20 17:24:26 crc kubenswrapper[4558]: I0120 17:24:26.995837 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-a772-account-create-update-sfz2c"] Jan 20 17:24:27 crc kubenswrapper[4558]: I0120 17:24:27.020817 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/730a8fa5-7c66-41ff-be4b-f170d49a6b0b-operator-scripts\") pod \"nova-api-db-create-f6jhs\" (UID: \"730a8fa5-7c66-41ff-be4b-f170d49a6b0b\") " pod="openstack-kuttl-tests/nova-api-db-create-f6jhs" Jan 20 17:24:27 crc kubenswrapper[4558]: I0120 17:24:27.020871 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5kqt6\" (UniqueName: \"kubernetes.io/projected/730a8fa5-7c66-41ff-be4b-f170d49a6b0b-kube-api-access-5kqt6\") pod \"nova-api-db-create-f6jhs\" (UID: \"730a8fa5-7c66-41ff-be4b-f170d49a6b0b\") " pod="openstack-kuttl-tests/nova-api-db-create-f6jhs" Jan 20 17:24:27 crc kubenswrapper[4558]: I0120 17:24:27.023226 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/730a8fa5-7c66-41ff-be4b-f170d49a6b0b-operator-scripts\") pod \"nova-api-db-create-f6jhs\" (UID: \"730a8fa5-7c66-41ff-be4b-f170d49a6b0b\") " pod="openstack-kuttl-tests/nova-api-db-create-f6jhs" Jan 20 17:24:27 crc kubenswrapper[4558]: I0120 17:24:27.040802 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5kqt6\" (UniqueName: \"kubernetes.io/projected/730a8fa5-7c66-41ff-be4b-f170d49a6b0b-kube-api-access-5kqt6\") pod \"nova-api-db-create-f6jhs\" (UID: \"730a8fa5-7c66-41ff-be4b-f170d49a6b0b\") " pod="openstack-kuttl-tests/nova-api-db-create-f6jhs" Jan 20 17:24:27 crc kubenswrapper[4558]: I0120 17:24:27.054414 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell1-db-create-pxpgt"] Jan 20 17:24:27 crc kubenswrapper[4558]: I0120 17:24:27.055701 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-db-create-pxpgt" Jan 20 17:24:27 crc kubenswrapper[4558]: I0120 17:24:27.064259 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-db-create-pxpgt"] Jan 20 17:24:27 crc kubenswrapper[4558]: I0120 17:24:27.125730 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cglhv\" (UniqueName: \"kubernetes.io/projected/f59de47e-9a81-41d9-bc7c-fd5ff6723291-kube-api-access-cglhv\") pod \"nova-cell0-db-create-2c6lh\" (UID: \"f59de47e-9a81-41d9-bc7c-fd5ff6723291\") " pod="openstack-kuttl-tests/nova-cell0-db-create-2c6lh" Jan 20 17:24:27 crc kubenswrapper[4558]: I0120 17:24:27.125811 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ebbaaabe-8492-4108-981c-0c00cf1561f0-operator-scripts\") pod \"nova-cell1-db-create-pxpgt\" (UID: \"ebbaaabe-8492-4108-981c-0c00cf1561f0\") " pod="openstack-kuttl-tests/nova-cell1-db-create-pxpgt" Jan 20 17:24:27 crc kubenswrapper[4558]: I0120 17:24:27.125846 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kj4wk\" (UniqueName: \"kubernetes.io/projected/ca42fa74-258c-428a-83e0-0410cd4f2961-kube-api-access-kj4wk\") pod \"nova-api-a772-account-create-update-sfz2c\" (UID: \"ca42fa74-258c-428a-83e0-0410cd4f2961\") " pod="openstack-kuttl-tests/nova-api-a772-account-create-update-sfz2c" Jan 20 17:24:27 crc kubenswrapper[4558]: I0120 17:24:27.125894 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z8rpx\" (UniqueName: \"kubernetes.io/projected/ebbaaabe-8492-4108-981c-0c00cf1561f0-kube-api-access-z8rpx\") pod \"nova-cell1-db-create-pxpgt\" (UID: \"ebbaaabe-8492-4108-981c-0c00cf1561f0\") " pod="openstack-kuttl-tests/nova-cell1-db-create-pxpgt" Jan 20 17:24:27 crc kubenswrapper[4558]: I0120 17:24:27.125942 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f59de47e-9a81-41d9-bc7c-fd5ff6723291-operator-scripts\") pod \"nova-cell0-db-create-2c6lh\" (UID: \"f59de47e-9a81-41d9-bc7c-fd5ff6723291\") " pod="openstack-kuttl-tests/nova-cell0-db-create-2c6lh" Jan 20 17:24:27 crc kubenswrapper[4558]: I0120 17:24:27.125968 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ca42fa74-258c-428a-83e0-0410cd4f2961-operator-scripts\") pod \"nova-api-a772-account-create-update-sfz2c\" (UID: \"ca42fa74-258c-428a-83e0-0410cd4f2961\") " pod="openstack-kuttl-tests/nova-api-a772-account-create-update-sfz2c" Jan 20 17:24:27 crc kubenswrapper[4558]: I0120 17:24:27.147206 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell0-f949-account-create-update-4ndsp"] Jan 20 17:24:27 crc kubenswrapper[4558]: I0120 17:24:27.148435 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-f949-account-create-update-4ndsp" Jan 20 17:24:27 crc kubenswrapper[4558]: I0120 17:24:27.150120 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell0-db-secret" Jan 20 17:24:27 crc kubenswrapper[4558]: I0120 17:24:27.152736 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-f949-account-create-update-4ndsp"] Jan 20 17:24:27 crc kubenswrapper[4558]: I0120 17:24:27.172944 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-db-create-f6jhs" Jan 20 17:24:27 crc kubenswrapper[4558]: I0120 17:24:27.228365 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cglhv\" (UniqueName: \"kubernetes.io/projected/f59de47e-9a81-41d9-bc7c-fd5ff6723291-kube-api-access-cglhv\") pod \"nova-cell0-db-create-2c6lh\" (UID: \"f59de47e-9a81-41d9-bc7c-fd5ff6723291\") " pod="openstack-kuttl-tests/nova-cell0-db-create-2c6lh" Jan 20 17:24:27 crc kubenswrapper[4558]: I0120 17:24:27.228492 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/90c87493-4105-4f33-b9a8-863231d6e367-operator-scripts\") pod \"nova-cell0-f949-account-create-update-4ndsp\" (UID: \"90c87493-4105-4f33-b9a8-863231d6e367\") " pod="openstack-kuttl-tests/nova-cell0-f949-account-create-update-4ndsp" Jan 20 17:24:27 crc kubenswrapper[4558]: I0120 17:24:27.228525 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ebbaaabe-8492-4108-981c-0c00cf1561f0-operator-scripts\") pod \"nova-cell1-db-create-pxpgt\" (UID: \"ebbaaabe-8492-4108-981c-0c00cf1561f0\") " pod="openstack-kuttl-tests/nova-cell1-db-create-pxpgt" Jan 20 17:24:27 crc kubenswrapper[4558]: I0120 17:24:27.228569 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kj4wk\" (UniqueName: \"kubernetes.io/projected/ca42fa74-258c-428a-83e0-0410cd4f2961-kube-api-access-kj4wk\") pod \"nova-api-a772-account-create-update-sfz2c\" (UID: \"ca42fa74-258c-428a-83e0-0410cd4f2961\") " pod="openstack-kuttl-tests/nova-api-a772-account-create-update-sfz2c" Jan 20 17:24:27 crc kubenswrapper[4558]: I0120 17:24:27.228638 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z8rpx\" (UniqueName: \"kubernetes.io/projected/ebbaaabe-8492-4108-981c-0c00cf1561f0-kube-api-access-z8rpx\") pod \"nova-cell1-db-create-pxpgt\" (UID: \"ebbaaabe-8492-4108-981c-0c00cf1561f0\") " pod="openstack-kuttl-tests/nova-cell1-db-create-pxpgt" Jan 20 17:24:27 crc kubenswrapper[4558]: I0120 17:24:27.228714 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f59de47e-9a81-41d9-bc7c-fd5ff6723291-operator-scripts\") pod \"nova-cell0-db-create-2c6lh\" (UID: \"f59de47e-9a81-41d9-bc7c-fd5ff6723291\") " pod="openstack-kuttl-tests/nova-cell0-db-create-2c6lh" Jan 20 17:24:27 crc kubenswrapper[4558]: I0120 17:24:27.228749 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hk6mq\" (UniqueName: \"kubernetes.io/projected/90c87493-4105-4f33-b9a8-863231d6e367-kube-api-access-hk6mq\") pod \"nova-cell0-f949-account-create-update-4ndsp\" (UID: \"90c87493-4105-4f33-b9a8-863231d6e367\") " pod="openstack-kuttl-tests/nova-cell0-f949-account-create-update-4ndsp" Jan 20 17:24:27 crc kubenswrapper[4558]: I0120 17:24:27.228785 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ca42fa74-258c-428a-83e0-0410cd4f2961-operator-scripts\") pod \"nova-api-a772-account-create-update-sfz2c\" (UID: \"ca42fa74-258c-428a-83e0-0410cd4f2961\") " pod="openstack-kuttl-tests/nova-api-a772-account-create-update-sfz2c" Jan 20 17:24:27 crc kubenswrapper[4558]: I0120 17:24:27.229510 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ebbaaabe-8492-4108-981c-0c00cf1561f0-operator-scripts\") pod \"nova-cell1-db-create-pxpgt\" (UID: \"ebbaaabe-8492-4108-981c-0c00cf1561f0\") " pod="openstack-kuttl-tests/nova-cell1-db-create-pxpgt" Jan 20 17:24:27 crc kubenswrapper[4558]: I0120 17:24:27.229530 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f59de47e-9a81-41d9-bc7c-fd5ff6723291-operator-scripts\") pod \"nova-cell0-db-create-2c6lh\" (UID: \"f59de47e-9a81-41d9-bc7c-fd5ff6723291\") " pod="openstack-kuttl-tests/nova-cell0-db-create-2c6lh" Jan 20 17:24:27 crc kubenswrapper[4558]: I0120 17:24:27.229610 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ca42fa74-258c-428a-83e0-0410cd4f2961-operator-scripts\") pod \"nova-api-a772-account-create-update-sfz2c\" (UID: \"ca42fa74-258c-428a-83e0-0410cd4f2961\") " pod="openstack-kuttl-tests/nova-api-a772-account-create-update-sfz2c" Jan 20 17:24:27 crc kubenswrapper[4558]: I0120 17:24:27.244794 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kj4wk\" (UniqueName: \"kubernetes.io/projected/ca42fa74-258c-428a-83e0-0410cd4f2961-kube-api-access-kj4wk\") pod \"nova-api-a772-account-create-update-sfz2c\" (UID: \"ca42fa74-258c-428a-83e0-0410cd4f2961\") " pod="openstack-kuttl-tests/nova-api-a772-account-create-update-sfz2c" Jan 20 17:24:27 crc kubenswrapper[4558]: I0120 17:24:27.245127 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cglhv\" (UniqueName: \"kubernetes.io/projected/f59de47e-9a81-41d9-bc7c-fd5ff6723291-kube-api-access-cglhv\") pod \"nova-cell0-db-create-2c6lh\" (UID: \"f59de47e-9a81-41d9-bc7c-fd5ff6723291\") " pod="openstack-kuttl-tests/nova-cell0-db-create-2c6lh" Jan 20 17:24:27 crc kubenswrapper[4558]: I0120 17:24:27.246702 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z8rpx\" (UniqueName: \"kubernetes.io/projected/ebbaaabe-8492-4108-981c-0c00cf1561f0-kube-api-access-z8rpx\") pod \"nova-cell1-db-create-pxpgt\" (UID: \"ebbaaabe-8492-4108-981c-0c00cf1561f0\") " pod="openstack-kuttl-tests/nova-cell1-db-create-pxpgt" Jan 20 17:24:27 crc kubenswrapper[4558]: I0120 17:24:27.270463 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-db-create-2c6lh" Jan 20 17:24:27 crc kubenswrapper[4558]: I0120 17:24:27.304100 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-a772-account-create-update-sfz2c" Jan 20 17:24:27 crc kubenswrapper[4558]: I0120 17:24:27.333217 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hk6mq\" (UniqueName: \"kubernetes.io/projected/90c87493-4105-4f33-b9a8-863231d6e367-kube-api-access-hk6mq\") pod \"nova-cell0-f949-account-create-update-4ndsp\" (UID: \"90c87493-4105-4f33-b9a8-863231d6e367\") " pod="openstack-kuttl-tests/nova-cell0-f949-account-create-update-4ndsp" Jan 20 17:24:27 crc kubenswrapper[4558]: I0120 17:24:27.333329 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/90c87493-4105-4f33-b9a8-863231d6e367-operator-scripts\") pod \"nova-cell0-f949-account-create-update-4ndsp\" (UID: \"90c87493-4105-4f33-b9a8-863231d6e367\") " pod="openstack-kuttl-tests/nova-cell0-f949-account-create-update-4ndsp" Jan 20 17:24:27 crc kubenswrapper[4558]: I0120 17:24:27.334086 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/90c87493-4105-4f33-b9a8-863231d6e367-operator-scripts\") pod \"nova-cell0-f949-account-create-update-4ndsp\" (UID: \"90c87493-4105-4f33-b9a8-863231d6e367\") " pod="openstack-kuttl-tests/nova-cell0-f949-account-create-update-4ndsp" Jan 20 17:24:27 crc kubenswrapper[4558]: I0120 17:24:27.355564 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hk6mq\" (UniqueName: \"kubernetes.io/projected/90c87493-4105-4f33-b9a8-863231d6e367-kube-api-access-hk6mq\") pod \"nova-cell0-f949-account-create-update-4ndsp\" (UID: \"90c87493-4105-4f33-b9a8-863231d6e367\") " pod="openstack-kuttl-tests/nova-cell0-f949-account-create-update-4ndsp" Jan 20 17:24:27 crc kubenswrapper[4558]: I0120 17:24:27.357350 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell1-af2b-account-create-update-ckc4c"] Jan 20 17:24:27 crc kubenswrapper[4558]: I0120 17:24:27.358709 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-af2b-account-create-update-ckc4c" Jan 20 17:24:27 crc kubenswrapper[4558]: I0120 17:24:27.360750 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell1-db-secret" Jan 20 17:24:27 crc kubenswrapper[4558]: I0120 17:24:27.379623 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-af2b-account-create-update-ckc4c"] Jan 20 17:24:27 crc kubenswrapper[4558]: I0120 17:24:27.409098 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-db-create-pxpgt" Jan 20 17:24:27 crc kubenswrapper[4558]: I0120 17:24:27.435914 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zkf47\" (UniqueName: \"kubernetes.io/projected/a3e891d2-b4a9-4486-ae23-c3437ae07e01-kube-api-access-zkf47\") pod \"nova-cell1-af2b-account-create-update-ckc4c\" (UID: \"a3e891d2-b4a9-4486-ae23-c3437ae07e01\") " pod="openstack-kuttl-tests/nova-cell1-af2b-account-create-update-ckc4c" Jan 20 17:24:27 crc kubenswrapper[4558]: I0120 17:24:27.436335 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a3e891d2-b4a9-4486-ae23-c3437ae07e01-operator-scripts\") pod \"nova-cell1-af2b-account-create-update-ckc4c\" (UID: \"a3e891d2-b4a9-4486-ae23-c3437ae07e01\") " pod="openstack-kuttl-tests/nova-cell1-af2b-account-create-update-ckc4c" Jan 20 17:24:27 crc kubenswrapper[4558]: I0120 17:24:27.465343 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-f949-account-create-update-4ndsp" Jan 20 17:24:27 crc kubenswrapper[4558]: I0120 17:24:27.540875 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a3e891d2-b4a9-4486-ae23-c3437ae07e01-operator-scripts\") pod \"nova-cell1-af2b-account-create-update-ckc4c\" (UID: \"a3e891d2-b4a9-4486-ae23-c3437ae07e01\") " pod="openstack-kuttl-tests/nova-cell1-af2b-account-create-update-ckc4c" Jan 20 17:24:27 crc kubenswrapper[4558]: I0120 17:24:27.541198 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zkf47\" (UniqueName: \"kubernetes.io/projected/a3e891d2-b4a9-4486-ae23-c3437ae07e01-kube-api-access-zkf47\") pod \"nova-cell1-af2b-account-create-update-ckc4c\" (UID: \"a3e891d2-b4a9-4486-ae23-c3437ae07e01\") " pod="openstack-kuttl-tests/nova-cell1-af2b-account-create-update-ckc4c" Jan 20 17:24:27 crc kubenswrapper[4558]: I0120 17:24:27.542088 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a3e891d2-b4a9-4486-ae23-c3437ae07e01-operator-scripts\") pod \"nova-cell1-af2b-account-create-update-ckc4c\" (UID: \"a3e891d2-b4a9-4486-ae23-c3437ae07e01\") " pod="openstack-kuttl-tests/nova-cell1-af2b-account-create-update-ckc4c" Jan 20 17:24:27 crc kubenswrapper[4558]: I0120 17:24:27.558798 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zkf47\" (UniqueName: \"kubernetes.io/projected/a3e891d2-b4a9-4486-ae23-c3437ae07e01-kube-api-access-zkf47\") pod \"nova-cell1-af2b-account-create-update-ckc4c\" (UID: \"a3e891d2-b4a9-4486-ae23-c3437ae07e01\") " pod="openstack-kuttl-tests/nova-cell1-af2b-account-create-update-ckc4c" Jan 20 17:24:27 crc kubenswrapper[4558]: I0120 17:24:27.612148 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-db-create-f6jhs"] Jan 20 17:24:28 crc kubenswrapper[4558]: I0120 17:24:27.687860 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-af2b-account-create-update-ckc4c" Jan 20 17:24:28 crc kubenswrapper[4558]: I0120 17:24:27.747370 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-db-create-2c6lh"] Jan 20 17:24:28 crc kubenswrapper[4558]: I0120 17:24:27.829128 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-a772-account-create-update-sfz2c"] Jan 20 17:24:28 crc kubenswrapper[4558]: I0120 17:24:27.927001 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-db-create-f6jhs" event={"ID":"730a8fa5-7c66-41ff-be4b-f170d49a6b0b","Type":"ContainerStarted","Data":"d140d7cfdbff94ef6b5907046676f6d45c60f5cd711589f64e1ce555bd15a8a6"} Jan 20 17:24:28 crc kubenswrapper[4558]: I0120 17:24:27.927039 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-db-create-f6jhs" event={"ID":"730a8fa5-7c66-41ff-be4b-f170d49a6b0b","Type":"ContainerStarted","Data":"f0370d6d84ba17295ccbdeb20488d6f25d2eb7e79f6cea79772893ab1f095bb3"} Jan 20 17:24:28 crc kubenswrapper[4558]: I0120 17:24:27.930385 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-a772-account-create-update-sfz2c" event={"ID":"ca42fa74-258c-428a-83e0-0410cd4f2961","Type":"ContainerStarted","Data":"4e066d8ac49bf9f6853eee6a02eeb3ae300f6e682255a3d4f212b5a9f92a185b"} Jan 20 17:24:28 crc kubenswrapper[4558]: I0120 17:24:27.935271 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-db-create-2c6lh" event={"ID":"f59de47e-9a81-41d9-bc7c-fd5ff6723291","Type":"ContainerStarted","Data":"bf789c7ba1fd3d873c553316e077914a3be7e06a3589d12ba5a30d4e51f87b2c"} Jan 20 17:24:28 crc kubenswrapper[4558]: I0120 17:24:27.938971 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-db-create-pxpgt"] Jan 20 17:24:28 crc kubenswrapper[4558]: I0120 17:24:27.947770 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-s4fbf" event={"ID":"11c22b1a-a7cd-40d8-9b6d-cd095b913a81","Type":"ContainerStarted","Data":"99dba0a4688df1b240e481e351c05168a3e1900680a0e4f241c80a822442a9a2"} Jan 20 17:24:28 crc kubenswrapper[4558]: I0120 17:24:27.971200 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-s4fbf" podStartSLOduration=2.324711576 podStartE2EDuration="4.971183281s" podCreationTimestamp="2026-01-20 17:24:23 +0000 UTC" firstStartedPulling="2026-01-20 17:24:24.818547696 +0000 UTC m=+2558.578885664" lastFinishedPulling="2026-01-20 17:24:27.465019402 +0000 UTC m=+2561.225357369" observedRunningTime="2026-01-20 17:24:27.969700573 +0000 UTC m=+2561.730038540" watchObservedRunningTime="2026-01-20 17:24:27.971183281 +0000 UTC m=+2561.731521248" Jan 20 17:24:28 crc kubenswrapper[4558]: W0120 17:24:28.556505 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod90c87493_4105_4f33_b9a8_863231d6e367.slice/crio-40c8020ccafd66e61eeaf6101cbb693d12e5286f5b1e205d3e52e93fc55eaa92 WatchSource:0}: Error finding container 40c8020ccafd66e61eeaf6101cbb693d12e5286f5b1e205d3e52e93fc55eaa92: Status 404 returned error can't find the container with id 40c8020ccafd66e61eeaf6101cbb693d12e5286f5b1e205d3e52e93fc55eaa92 Jan 20 17:24:28 crc kubenswrapper[4558]: I0120 17:24:28.563074 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-f949-account-create-update-4ndsp"] Jan 20 17:24:28 crc kubenswrapper[4558]: I0120 17:24:28.613685 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-af2b-account-create-update-ckc4c"] Jan 20 17:24:28 crc kubenswrapper[4558]: W0120 17:24:28.619024 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda3e891d2_b4a9_4486_ae23_c3437ae07e01.slice/crio-493d533846f7a31185b53e85b3e4bc17bce5525b55185a1a17516881eec3597d WatchSource:0}: Error finding container 493d533846f7a31185b53e85b3e4bc17bce5525b55185a1a17516881eec3597d: Status 404 returned error can't find the container with id 493d533846f7a31185b53e85b3e4bc17bce5525b55185a1a17516881eec3597d Jan 20 17:24:28 crc kubenswrapper[4558]: I0120 17:24:28.957375 4558 generic.go:334] "Generic (PLEG): container finished" podID="f59de47e-9a81-41d9-bc7c-fd5ff6723291" containerID="5df938cc30440b9b047183119d309af1693ed6d0bba0b1b9b61720b412b8c825" exitCode=0 Jan 20 17:24:28 crc kubenswrapper[4558]: I0120 17:24:28.957441 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-db-create-2c6lh" event={"ID":"f59de47e-9a81-41d9-bc7c-fd5ff6723291","Type":"ContainerDied","Data":"5df938cc30440b9b047183119d309af1693ed6d0bba0b1b9b61720b412b8c825"} Jan 20 17:24:28 crc kubenswrapper[4558]: I0120 17:24:28.959754 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-af2b-account-create-update-ckc4c" event={"ID":"a3e891d2-b4a9-4486-ae23-c3437ae07e01","Type":"ContainerStarted","Data":"e19a292a92ee00b776620f6d6da97636958e79b7d260ef36ae81df04a9975fff"} Jan 20 17:24:28 crc kubenswrapper[4558]: I0120 17:24:28.959815 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-af2b-account-create-update-ckc4c" event={"ID":"a3e891d2-b4a9-4486-ae23-c3437ae07e01","Type":"ContainerStarted","Data":"493d533846f7a31185b53e85b3e4bc17bce5525b55185a1a17516881eec3597d"} Jan 20 17:24:28 crc kubenswrapper[4558]: I0120 17:24:28.961524 4558 generic.go:334] "Generic (PLEG): container finished" podID="ebbaaabe-8492-4108-981c-0c00cf1561f0" containerID="86a4a8a8eeeac8eb783b7fe9897a2f20ca9d3bc70a3d18047e9800db618ef2ad" exitCode=0 Jan 20 17:24:28 crc kubenswrapper[4558]: I0120 17:24:28.961593 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-db-create-pxpgt" event={"ID":"ebbaaabe-8492-4108-981c-0c00cf1561f0","Type":"ContainerDied","Data":"86a4a8a8eeeac8eb783b7fe9897a2f20ca9d3bc70a3d18047e9800db618ef2ad"} Jan 20 17:24:28 crc kubenswrapper[4558]: I0120 17:24:28.961617 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-db-create-pxpgt" event={"ID":"ebbaaabe-8492-4108-981c-0c00cf1561f0","Type":"ContainerStarted","Data":"e25a01e84c13e96b6f12b89ab0df1d4b69ca3fb26e6012a7325dcfd4f68f116a"} Jan 20 17:24:28 crc kubenswrapper[4558]: I0120 17:24:28.963229 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-f949-account-create-update-4ndsp" event={"ID":"90c87493-4105-4f33-b9a8-863231d6e367","Type":"ContainerStarted","Data":"f3cfae4fc913da224a21d06bfc724e3180240bdec6193cf871256739ecf1d2d6"} Jan 20 17:24:28 crc kubenswrapper[4558]: I0120 17:24:28.963283 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-f949-account-create-update-4ndsp" event={"ID":"90c87493-4105-4f33-b9a8-863231d6e367","Type":"ContainerStarted","Data":"40c8020ccafd66e61eeaf6101cbb693d12e5286f5b1e205d3e52e93fc55eaa92"} Jan 20 17:24:28 crc kubenswrapper[4558]: I0120 17:24:28.965011 4558 generic.go:334] "Generic (PLEG): container finished" podID="730a8fa5-7c66-41ff-be4b-f170d49a6b0b" containerID="d140d7cfdbff94ef6b5907046676f6d45c60f5cd711589f64e1ce555bd15a8a6" exitCode=0 Jan 20 17:24:28 crc kubenswrapper[4558]: I0120 17:24:28.965094 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-db-create-f6jhs" event={"ID":"730a8fa5-7c66-41ff-be4b-f170d49a6b0b","Type":"ContainerDied","Data":"d140d7cfdbff94ef6b5907046676f6d45c60f5cd711589f64e1ce555bd15a8a6"} Jan 20 17:24:28 crc kubenswrapper[4558]: I0120 17:24:28.967745 4558 generic.go:334] "Generic (PLEG): container finished" podID="ca42fa74-258c-428a-83e0-0410cd4f2961" containerID="e208edb098530123771487358f9d48b2af80ac41b80b4e499a06bbe58e7eec93" exitCode=0 Jan 20 17:24:28 crc kubenswrapper[4558]: I0120 17:24:28.967824 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-a772-account-create-update-sfz2c" event={"ID":"ca42fa74-258c-428a-83e0-0410cd4f2961","Type":"ContainerDied","Data":"e208edb098530123771487358f9d48b2af80ac41b80b4e499a06bbe58e7eec93"} Jan 20 17:24:29 crc kubenswrapper[4558]: I0120 17:24:29.013268 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell0-f949-account-create-update-4ndsp" podStartSLOduration=2.013252565 podStartE2EDuration="2.013252565s" podCreationTimestamp="2026-01-20 17:24:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:24:29.007023929 +0000 UTC m=+2562.767361896" watchObservedRunningTime="2026-01-20 17:24:29.013252565 +0000 UTC m=+2562.773590533" Jan 20 17:24:29 crc kubenswrapper[4558]: I0120 17:24:29.036505 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell1-af2b-account-create-update-ckc4c" podStartSLOduration=2.03649183 podStartE2EDuration="2.03649183s" podCreationTimestamp="2026-01-20 17:24:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:24:29.029471133 +0000 UTC m=+2562.789809101" watchObservedRunningTime="2026-01-20 17:24:29.03649183 +0000 UTC m=+2562.796829797" Jan 20 17:24:29 crc kubenswrapper[4558]: I0120 17:24:29.274158 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-db-create-f6jhs" Jan 20 17:24:29 crc kubenswrapper[4558]: I0120 17:24:29.283928 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/730a8fa5-7c66-41ff-be4b-f170d49a6b0b-operator-scripts\") pod \"730a8fa5-7c66-41ff-be4b-f170d49a6b0b\" (UID: \"730a8fa5-7c66-41ff-be4b-f170d49a6b0b\") " Jan 20 17:24:29 crc kubenswrapper[4558]: I0120 17:24:29.283992 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5kqt6\" (UniqueName: \"kubernetes.io/projected/730a8fa5-7c66-41ff-be4b-f170d49a6b0b-kube-api-access-5kqt6\") pod \"730a8fa5-7c66-41ff-be4b-f170d49a6b0b\" (UID: \"730a8fa5-7c66-41ff-be4b-f170d49a6b0b\") " Jan 20 17:24:29 crc kubenswrapper[4558]: I0120 17:24:29.284986 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/730a8fa5-7c66-41ff-be4b-f170d49a6b0b-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "730a8fa5-7c66-41ff-be4b-f170d49a6b0b" (UID: "730a8fa5-7c66-41ff-be4b-f170d49a6b0b"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:24:29 crc kubenswrapper[4558]: I0120 17:24:29.291788 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/730a8fa5-7c66-41ff-be4b-f170d49a6b0b-kube-api-access-5kqt6" (OuterVolumeSpecName: "kube-api-access-5kqt6") pod "730a8fa5-7c66-41ff-be4b-f170d49a6b0b" (UID: "730a8fa5-7c66-41ff-be4b-f170d49a6b0b"). InnerVolumeSpecName "kube-api-access-5kqt6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:24:29 crc kubenswrapper[4558]: I0120 17:24:29.386052 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/730a8fa5-7c66-41ff-be4b-f170d49a6b0b-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:24:29 crc kubenswrapper[4558]: I0120 17:24:29.386184 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5kqt6\" (UniqueName: \"kubernetes.io/projected/730a8fa5-7c66-41ff-be4b-f170d49a6b0b-kube-api-access-5kqt6\") on node \"crc\" DevicePath \"\"" Jan 20 17:24:29 crc kubenswrapper[4558]: I0120 17:24:29.977974 4558 generic.go:334] "Generic (PLEG): container finished" podID="a3e891d2-b4a9-4486-ae23-c3437ae07e01" containerID="e19a292a92ee00b776620f6d6da97636958e79b7d260ef36ae81df04a9975fff" exitCode=0 Jan 20 17:24:29 crc kubenswrapper[4558]: I0120 17:24:29.978084 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-af2b-account-create-update-ckc4c" event={"ID":"a3e891d2-b4a9-4486-ae23-c3437ae07e01","Type":"ContainerDied","Data":"e19a292a92ee00b776620f6d6da97636958e79b7d260ef36ae81df04a9975fff"} Jan 20 17:24:29 crc kubenswrapper[4558]: I0120 17:24:29.979573 4558 generic.go:334] "Generic (PLEG): container finished" podID="90c87493-4105-4f33-b9a8-863231d6e367" containerID="f3cfae4fc913da224a21d06bfc724e3180240bdec6193cf871256739ecf1d2d6" exitCode=0 Jan 20 17:24:29 crc kubenswrapper[4558]: I0120 17:24:29.979674 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-f949-account-create-update-4ndsp" event={"ID":"90c87493-4105-4f33-b9a8-863231d6e367","Type":"ContainerDied","Data":"f3cfae4fc913da224a21d06bfc724e3180240bdec6193cf871256739ecf1d2d6"} Jan 20 17:24:29 crc kubenswrapper[4558]: I0120 17:24:29.981852 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-db-create-f6jhs" Jan 20 17:24:29 crc kubenswrapper[4558]: I0120 17:24:29.984315 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-db-create-f6jhs" event={"ID":"730a8fa5-7c66-41ff-be4b-f170d49a6b0b","Type":"ContainerDied","Data":"f0370d6d84ba17295ccbdeb20488d6f25d2eb7e79f6cea79772893ab1f095bb3"} Jan 20 17:24:29 crc kubenswrapper[4558]: I0120 17:24:29.984395 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f0370d6d84ba17295ccbdeb20488d6f25d2eb7e79f6cea79772893ab1f095bb3" Jan 20 17:24:30 crc kubenswrapper[4558]: I0120 17:24:30.149911 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/swift-proxy-7459567f99-gx6dr" Jan 20 17:24:30 crc kubenswrapper[4558]: I0120 17:24:30.156568 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/swift-proxy-7459567f99-gx6dr" Jan 20 17:24:30 crc kubenswrapper[4558]: I0120 17:24:30.240629 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-db-create-pxpgt" Jan 20 17:24:30 crc kubenswrapper[4558]: I0120 17:24:30.406662 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z8rpx\" (UniqueName: \"kubernetes.io/projected/ebbaaabe-8492-4108-981c-0c00cf1561f0-kube-api-access-z8rpx\") pod \"ebbaaabe-8492-4108-981c-0c00cf1561f0\" (UID: \"ebbaaabe-8492-4108-981c-0c00cf1561f0\") " Jan 20 17:24:30 crc kubenswrapper[4558]: I0120 17:24:30.407016 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ebbaaabe-8492-4108-981c-0c00cf1561f0-operator-scripts\") pod \"ebbaaabe-8492-4108-981c-0c00cf1561f0\" (UID: \"ebbaaabe-8492-4108-981c-0c00cf1561f0\") " Jan 20 17:24:30 crc kubenswrapper[4558]: I0120 17:24:30.412707 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ebbaaabe-8492-4108-981c-0c00cf1561f0-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "ebbaaabe-8492-4108-981c-0c00cf1561f0" (UID: "ebbaaabe-8492-4108-981c-0c00cf1561f0"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:24:30 crc kubenswrapper[4558]: I0120 17:24:30.414903 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ebbaaabe-8492-4108-981c-0c00cf1561f0-kube-api-access-z8rpx" (OuterVolumeSpecName: "kube-api-access-z8rpx") pod "ebbaaabe-8492-4108-981c-0c00cf1561f0" (UID: "ebbaaabe-8492-4108-981c-0c00cf1561f0"). InnerVolumeSpecName "kube-api-access-z8rpx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:24:30 crc kubenswrapper[4558]: I0120 17:24:30.480773 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-a772-account-create-update-sfz2c" Jan 20 17:24:30 crc kubenswrapper[4558]: I0120 17:24:30.486473 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-db-create-2c6lh" Jan 20 17:24:30 crc kubenswrapper[4558]: I0120 17:24:30.493755 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:24:30 crc kubenswrapper[4558]: I0120 17:24:30.509687 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ebbaaabe-8492-4108-981c-0c00cf1561f0-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:24:30 crc kubenswrapper[4558]: I0120 17:24:30.509721 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z8rpx\" (UniqueName: \"kubernetes.io/projected/ebbaaabe-8492-4108-981c-0c00cf1561f0-kube-api-access-z8rpx\") on node \"crc\" DevicePath \"\"" Jan 20 17:24:30 crc kubenswrapper[4558]: I0120 17:24:30.610864 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kj4wk\" (UniqueName: \"kubernetes.io/projected/ca42fa74-258c-428a-83e0-0410cd4f2961-kube-api-access-kj4wk\") pod \"ca42fa74-258c-428a-83e0-0410cd4f2961\" (UID: \"ca42fa74-258c-428a-83e0-0410cd4f2961\") " Jan 20 17:24:30 crc kubenswrapper[4558]: I0120 17:24:30.610955 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cglhv\" (UniqueName: \"kubernetes.io/projected/f59de47e-9a81-41d9-bc7c-fd5ff6723291-kube-api-access-cglhv\") pod \"f59de47e-9a81-41d9-bc7c-fd5ff6723291\" (UID: \"f59de47e-9a81-41d9-bc7c-fd5ff6723291\") " Jan 20 17:24:30 crc kubenswrapper[4558]: I0120 17:24:30.610985 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d4bf950d-0c4c-41a1-b3c8-a82d3a0f65dd-config-data\") pod \"d4bf950d-0c4c-41a1-b3c8-a82d3a0f65dd\" (UID: \"d4bf950d-0c4c-41a1-b3c8-a82d3a0f65dd\") " Jan 20 17:24:30 crc kubenswrapper[4558]: I0120 17:24:30.611202 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-djvrg\" (UniqueName: \"kubernetes.io/projected/d4bf950d-0c4c-41a1-b3c8-a82d3a0f65dd-kube-api-access-djvrg\") pod \"d4bf950d-0c4c-41a1-b3c8-a82d3a0f65dd\" (UID: \"d4bf950d-0c4c-41a1-b3c8-a82d3a0f65dd\") " Jan 20 17:24:30 crc kubenswrapper[4558]: I0120 17:24:30.611276 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d4bf950d-0c4c-41a1-b3c8-a82d3a0f65dd-scripts\") pod \"d4bf950d-0c4c-41a1-b3c8-a82d3a0f65dd\" (UID: \"d4bf950d-0c4c-41a1-b3c8-a82d3a0f65dd\") " Jan 20 17:24:30 crc kubenswrapper[4558]: I0120 17:24:30.611300 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f59de47e-9a81-41d9-bc7c-fd5ff6723291-operator-scripts\") pod \"f59de47e-9a81-41d9-bc7c-fd5ff6723291\" (UID: \"f59de47e-9a81-41d9-bc7c-fd5ff6723291\") " Jan 20 17:24:30 crc kubenswrapper[4558]: I0120 17:24:30.611338 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d4bf950d-0c4c-41a1-b3c8-a82d3a0f65dd-sg-core-conf-yaml\") pod \"d4bf950d-0c4c-41a1-b3c8-a82d3a0f65dd\" (UID: \"d4bf950d-0c4c-41a1-b3c8-a82d3a0f65dd\") " Jan 20 17:24:30 crc kubenswrapper[4558]: I0120 17:24:30.611396 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d4bf950d-0c4c-41a1-b3c8-a82d3a0f65dd-log-httpd\") pod \"d4bf950d-0c4c-41a1-b3c8-a82d3a0f65dd\" (UID: \"d4bf950d-0c4c-41a1-b3c8-a82d3a0f65dd\") " Jan 20 17:24:30 crc kubenswrapper[4558]: I0120 17:24:30.611453 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d4bf950d-0c4c-41a1-b3c8-a82d3a0f65dd-run-httpd\") pod \"d4bf950d-0c4c-41a1-b3c8-a82d3a0f65dd\" (UID: \"d4bf950d-0c4c-41a1-b3c8-a82d3a0f65dd\") " Jan 20 17:24:30 crc kubenswrapper[4558]: I0120 17:24:30.611544 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ca42fa74-258c-428a-83e0-0410cd4f2961-operator-scripts\") pod \"ca42fa74-258c-428a-83e0-0410cd4f2961\" (UID: \"ca42fa74-258c-428a-83e0-0410cd4f2961\") " Jan 20 17:24:30 crc kubenswrapper[4558]: I0120 17:24:30.611570 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4bf950d-0c4c-41a1-b3c8-a82d3a0f65dd-combined-ca-bundle\") pod \"d4bf950d-0c4c-41a1-b3c8-a82d3a0f65dd\" (UID: \"d4bf950d-0c4c-41a1-b3c8-a82d3a0f65dd\") " Jan 20 17:24:30 crc kubenswrapper[4558]: I0120 17:24:30.612198 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d4bf950d-0c4c-41a1-b3c8-a82d3a0f65dd-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "d4bf950d-0c4c-41a1-b3c8-a82d3a0f65dd" (UID: "d4bf950d-0c4c-41a1-b3c8-a82d3a0f65dd"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:24:30 crc kubenswrapper[4558]: I0120 17:24:30.612359 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ca42fa74-258c-428a-83e0-0410cd4f2961-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "ca42fa74-258c-428a-83e0-0410cd4f2961" (UID: "ca42fa74-258c-428a-83e0-0410cd4f2961"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:24:30 crc kubenswrapper[4558]: I0120 17:24:30.612530 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f59de47e-9a81-41d9-bc7c-fd5ff6723291-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "f59de47e-9a81-41d9-bc7c-fd5ff6723291" (UID: "f59de47e-9a81-41d9-bc7c-fd5ff6723291"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:24:30 crc kubenswrapper[4558]: I0120 17:24:30.613154 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d4bf950d-0c4c-41a1-b3c8-a82d3a0f65dd-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "d4bf950d-0c4c-41a1-b3c8-a82d3a0f65dd" (UID: "d4bf950d-0c4c-41a1-b3c8-a82d3a0f65dd"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:24:30 crc kubenswrapper[4558]: I0120 17:24:30.614666 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f59de47e-9a81-41d9-bc7c-fd5ff6723291-kube-api-access-cglhv" (OuterVolumeSpecName: "kube-api-access-cglhv") pod "f59de47e-9a81-41d9-bc7c-fd5ff6723291" (UID: "f59de47e-9a81-41d9-bc7c-fd5ff6723291"). InnerVolumeSpecName "kube-api-access-cglhv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:24:30 crc kubenswrapper[4558]: I0120 17:24:30.615552 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d4bf950d-0c4c-41a1-b3c8-a82d3a0f65dd-scripts" (OuterVolumeSpecName: "scripts") pod "d4bf950d-0c4c-41a1-b3c8-a82d3a0f65dd" (UID: "d4bf950d-0c4c-41a1-b3c8-a82d3a0f65dd"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:24:30 crc kubenswrapper[4558]: I0120 17:24:30.616133 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ca42fa74-258c-428a-83e0-0410cd4f2961-kube-api-access-kj4wk" (OuterVolumeSpecName: "kube-api-access-kj4wk") pod "ca42fa74-258c-428a-83e0-0410cd4f2961" (UID: "ca42fa74-258c-428a-83e0-0410cd4f2961"). InnerVolumeSpecName "kube-api-access-kj4wk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:24:30 crc kubenswrapper[4558]: I0120 17:24:30.616977 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d4bf950d-0c4c-41a1-b3c8-a82d3a0f65dd-kube-api-access-djvrg" (OuterVolumeSpecName: "kube-api-access-djvrg") pod "d4bf950d-0c4c-41a1-b3c8-a82d3a0f65dd" (UID: "d4bf950d-0c4c-41a1-b3c8-a82d3a0f65dd"). InnerVolumeSpecName "kube-api-access-djvrg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:24:30 crc kubenswrapper[4558]: I0120 17:24:30.634968 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d4bf950d-0c4c-41a1-b3c8-a82d3a0f65dd-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "d4bf950d-0c4c-41a1-b3c8-a82d3a0f65dd" (UID: "d4bf950d-0c4c-41a1-b3c8-a82d3a0f65dd"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:24:30 crc kubenswrapper[4558]: I0120 17:24:30.673457 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d4bf950d-0c4c-41a1-b3c8-a82d3a0f65dd-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d4bf950d-0c4c-41a1-b3c8-a82d3a0f65dd" (UID: "d4bf950d-0c4c-41a1-b3c8-a82d3a0f65dd"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:24:30 crc kubenswrapper[4558]: I0120 17:24:30.687728 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d4bf950d-0c4c-41a1-b3c8-a82d3a0f65dd-config-data" (OuterVolumeSpecName: "config-data") pod "d4bf950d-0c4c-41a1-b3c8-a82d3a0f65dd" (UID: "d4bf950d-0c4c-41a1-b3c8-a82d3a0f65dd"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:24:30 crc kubenswrapper[4558]: I0120 17:24:30.713188 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kj4wk\" (UniqueName: \"kubernetes.io/projected/ca42fa74-258c-428a-83e0-0410cd4f2961-kube-api-access-kj4wk\") on node \"crc\" DevicePath \"\"" Jan 20 17:24:30 crc kubenswrapper[4558]: I0120 17:24:30.713217 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cglhv\" (UniqueName: \"kubernetes.io/projected/f59de47e-9a81-41d9-bc7c-fd5ff6723291-kube-api-access-cglhv\") on node \"crc\" DevicePath \"\"" Jan 20 17:24:30 crc kubenswrapper[4558]: I0120 17:24:30.713229 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d4bf950d-0c4c-41a1-b3c8-a82d3a0f65dd-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:24:30 crc kubenswrapper[4558]: I0120 17:24:30.713241 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-djvrg\" (UniqueName: \"kubernetes.io/projected/d4bf950d-0c4c-41a1-b3c8-a82d3a0f65dd-kube-api-access-djvrg\") on node \"crc\" DevicePath \"\"" Jan 20 17:24:30 crc kubenswrapper[4558]: I0120 17:24:30.713253 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d4bf950d-0c4c-41a1-b3c8-a82d3a0f65dd-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:24:30 crc kubenswrapper[4558]: I0120 17:24:30.713265 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f59de47e-9a81-41d9-bc7c-fd5ff6723291-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:24:30 crc kubenswrapper[4558]: I0120 17:24:30.713274 4558 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d4bf950d-0c4c-41a1-b3c8-a82d3a0f65dd-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 20 17:24:30 crc kubenswrapper[4558]: I0120 17:24:30.713282 4558 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d4bf950d-0c4c-41a1-b3c8-a82d3a0f65dd-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:24:30 crc kubenswrapper[4558]: I0120 17:24:30.713291 4558 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d4bf950d-0c4c-41a1-b3c8-a82d3a0f65dd-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:24:30 crc kubenswrapper[4558]: I0120 17:24:30.713299 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ca42fa74-258c-428a-83e0-0410cd4f2961-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:24:30 crc kubenswrapper[4558]: I0120 17:24:30.713308 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4bf950d-0c4c-41a1-b3c8-a82d3a0f65dd-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:24:30 crc kubenswrapper[4558]: I0120 17:24:30.874810 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:24:30 crc kubenswrapper[4558]: I0120 17:24:30.875490 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-external-api-0" podUID="05e1a302-040a-46ca-a3c2-e4c8c6390091" containerName="glance-httpd" containerID="cri-o://8ba8938da0a00c9308d3eea1def52748e499a64ab544e915a92e8d41b1af332b" gracePeriod=30 Jan 20 17:24:30 crc kubenswrapper[4558]: I0120 17:24:30.875326 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-external-api-0" podUID="05e1a302-040a-46ca-a3c2-e4c8c6390091" containerName="glance-log" containerID="cri-o://8059066dd8ca082e955bcb29f49441ed9509119295b8ded32faebb8c4410e534" gracePeriod=30 Jan 20 17:24:31 crc kubenswrapper[4558]: I0120 17:24:31.000473 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-a772-account-create-update-sfz2c" event={"ID":"ca42fa74-258c-428a-83e0-0410cd4f2961","Type":"ContainerDied","Data":"4e066d8ac49bf9f6853eee6a02eeb3ae300f6e682255a3d4f212b5a9f92a185b"} Jan 20 17:24:31 crc kubenswrapper[4558]: I0120 17:24:31.000830 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4e066d8ac49bf9f6853eee6a02eeb3ae300f6e682255a3d4f212b5a9f92a185b" Jan 20 17:24:31 crc kubenswrapper[4558]: I0120 17:24:31.000546 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-a772-account-create-update-sfz2c" Jan 20 17:24:31 crc kubenswrapper[4558]: I0120 17:24:31.002789 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-db-create-2c6lh" event={"ID":"f59de47e-9a81-41d9-bc7c-fd5ff6723291","Type":"ContainerDied","Data":"bf789c7ba1fd3d873c553316e077914a3be7e06a3589d12ba5a30d4e51f87b2c"} Jan 20 17:24:31 crc kubenswrapper[4558]: I0120 17:24:31.002844 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bf789c7ba1fd3d873c553316e077914a3be7e06a3589d12ba5a30d4e51f87b2c" Jan 20 17:24:31 crc kubenswrapper[4558]: I0120 17:24:31.002842 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-db-create-2c6lh" Jan 20 17:24:31 crc kubenswrapper[4558]: I0120 17:24:31.006464 4558 generic.go:334] "Generic (PLEG): container finished" podID="d4bf950d-0c4c-41a1-b3c8-a82d3a0f65dd" containerID="1b4e6a1e48903d69f46a2a4b2a8193e676013ecda2045e0f0db4923d287957b1" exitCode=0 Jan 20 17:24:31 crc kubenswrapper[4558]: I0120 17:24:31.006540 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"d4bf950d-0c4c-41a1-b3c8-a82d3a0f65dd","Type":"ContainerDied","Data":"1b4e6a1e48903d69f46a2a4b2a8193e676013ecda2045e0f0db4923d287957b1"} Jan 20 17:24:31 crc kubenswrapper[4558]: I0120 17:24:31.006563 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"d4bf950d-0c4c-41a1-b3c8-a82d3a0f65dd","Type":"ContainerDied","Data":"a13349c6ad853851840538d5939251323a25413ba127237b96c37ec9568d3a1d"} Jan 20 17:24:31 crc kubenswrapper[4558]: I0120 17:24:31.006569 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:24:31 crc kubenswrapper[4558]: I0120 17:24:31.006588 4558 scope.go:117] "RemoveContainer" containerID="1f1826fa85536c90dc6a1fb82f634d04594f8e977b10383c34ca6e82e13e96b3" Jan 20 17:24:31 crc kubenswrapper[4558]: I0120 17:24:31.009635 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-db-create-pxpgt" Jan 20 17:24:31 crc kubenswrapper[4558]: I0120 17:24:31.009755 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-db-create-pxpgt" event={"ID":"ebbaaabe-8492-4108-981c-0c00cf1561f0","Type":"ContainerDied","Data":"e25a01e84c13e96b6f12b89ab0df1d4b69ca3fb26e6012a7325dcfd4f68f116a"} Jan 20 17:24:31 crc kubenswrapper[4558]: I0120 17:24:31.009850 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e25a01e84c13e96b6f12b89ab0df1d4b69ca3fb26e6012a7325dcfd4f68f116a" Jan 20 17:24:31 crc kubenswrapper[4558]: I0120 17:24:31.046063 4558 scope.go:117] "RemoveContainer" containerID="3dd48463ebaf0c4cc73e6dd204f9dc91627d23f0e17873f60b0ab36ca889d105" Jan 20 17:24:31 crc kubenswrapper[4558]: I0120 17:24:31.056324 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:24:31 crc kubenswrapper[4558]: I0120 17:24:31.068999 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:24:31 crc kubenswrapper[4558]: I0120 17:24:31.073765 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:24:31 crc kubenswrapper[4558]: E0120 17:24:31.074261 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d4bf950d-0c4c-41a1-b3c8-a82d3a0f65dd" containerName="ceilometer-notification-agent" Jan 20 17:24:31 crc kubenswrapper[4558]: I0120 17:24:31.074281 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d4bf950d-0c4c-41a1-b3c8-a82d3a0f65dd" containerName="ceilometer-notification-agent" Jan 20 17:24:31 crc kubenswrapper[4558]: E0120 17:24:31.074304 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f59de47e-9a81-41d9-bc7c-fd5ff6723291" containerName="mariadb-database-create" Jan 20 17:24:31 crc kubenswrapper[4558]: I0120 17:24:31.074311 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="f59de47e-9a81-41d9-bc7c-fd5ff6723291" containerName="mariadb-database-create" Jan 20 17:24:31 crc kubenswrapper[4558]: E0120 17:24:31.074318 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="730a8fa5-7c66-41ff-be4b-f170d49a6b0b" containerName="mariadb-database-create" Jan 20 17:24:31 crc kubenswrapper[4558]: I0120 17:24:31.074324 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="730a8fa5-7c66-41ff-be4b-f170d49a6b0b" containerName="mariadb-database-create" Jan 20 17:24:31 crc kubenswrapper[4558]: E0120 17:24:31.074351 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d4bf950d-0c4c-41a1-b3c8-a82d3a0f65dd" containerName="sg-core" Jan 20 17:24:31 crc kubenswrapper[4558]: I0120 17:24:31.074367 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d4bf950d-0c4c-41a1-b3c8-a82d3a0f65dd" containerName="sg-core" Jan 20 17:24:31 crc kubenswrapper[4558]: E0120 17:24:31.074381 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ca42fa74-258c-428a-83e0-0410cd4f2961" containerName="mariadb-account-create-update" Jan 20 17:24:31 crc kubenswrapper[4558]: I0120 17:24:31.074388 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ca42fa74-258c-428a-83e0-0410cd4f2961" containerName="mariadb-account-create-update" Jan 20 17:24:31 crc kubenswrapper[4558]: E0120 17:24:31.074399 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ebbaaabe-8492-4108-981c-0c00cf1561f0" containerName="mariadb-database-create" Jan 20 17:24:31 crc kubenswrapper[4558]: I0120 17:24:31.074406 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ebbaaabe-8492-4108-981c-0c00cf1561f0" containerName="mariadb-database-create" Jan 20 17:24:31 crc kubenswrapper[4558]: E0120 17:24:31.074420 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d4bf950d-0c4c-41a1-b3c8-a82d3a0f65dd" containerName="ceilometer-central-agent" Jan 20 17:24:31 crc kubenswrapper[4558]: I0120 17:24:31.074431 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d4bf950d-0c4c-41a1-b3c8-a82d3a0f65dd" containerName="ceilometer-central-agent" Jan 20 17:24:31 crc kubenswrapper[4558]: E0120 17:24:31.074442 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d4bf950d-0c4c-41a1-b3c8-a82d3a0f65dd" containerName="proxy-httpd" Jan 20 17:24:31 crc kubenswrapper[4558]: I0120 17:24:31.074447 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d4bf950d-0c4c-41a1-b3c8-a82d3a0f65dd" containerName="proxy-httpd" Jan 20 17:24:31 crc kubenswrapper[4558]: I0120 17:24:31.074638 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="d4bf950d-0c4c-41a1-b3c8-a82d3a0f65dd" containerName="ceilometer-notification-agent" Jan 20 17:24:31 crc kubenswrapper[4558]: I0120 17:24:31.074663 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="ebbaaabe-8492-4108-981c-0c00cf1561f0" containerName="mariadb-database-create" Jan 20 17:24:31 crc kubenswrapper[4558]: I0120 17:24:31.074674 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="d4bf950d-0c4c-41a1-b3c8-a82d3a0f65dd" containerName="proxy-httpd" Jan 20 17:24:31 crc kubenswrapper[4558]: I0120 17:24:31.074684 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="d4bf950d-0c4c-41a1-b3c8-a82d3a0f65dd" containerName="sg-core" Jan 20 17:24:31 crc kubenswrapper[4558]: I0120 17:24:31.074693 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="730a8fa5-7c66-41ff-be4b-f170d49a6b0b" containerName="mariadb-database-create" Jan 20 17:24:31 crc kubenswrapper[4558]: I0120 17:24:31.074707 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="ca42fa74-258c-428a-83e0-0410cd4f2961" containerName="mariadb-account-create-update" Jan 20 17:24:31 crc kubenswrapper[4558]: I0120 17:24:31.074718 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="f59de47e-9a81-41d9-bc7c-fd5ff6723291" containerName="mariadb-database-create" Jan 20 17:24:31 crc kubenswrapper[4558]: I0120 17:24:31.074727 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="d4bf950d-0c4c-41a1-b3c8-a82d3a0f65dd" containerName="ceilometer-central-agent" Jan 20 17:24:31 crc kubenswrapper[4558]: I0120 17:24:31.076521 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:24:31 crc kubenswrapper[4558]: I0120 17:24:31.078463 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-config-data" Jan 20 17:24:31 crc kubenswrapper[4558]: I0120 17:24:31.079472 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-scripts" Jan 20 17:24:31 crc kubenswrapper[4558]: I0120 17:24:31.079959 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:24:31 crc kubenswrapper[4558]: I0120 17:24:31.099384 4558 scope.go:117] "RemoveContainer" containerID="1b4e6a1e48903d69f46a2a4b2a8193e676013ecda2045e0f0db4923d287957b1" Jan 20 17:24:31 crc kubenswrapper[4558]: I0120 17:24:31.120656 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/901ea0e0-d046-4871-887c-ea58a1d8fcb0-log-httpd\") pod \"ceilometer-0\" (UID: \"901ea0e0-d046-4871-887c-ea58a1d8fcb0\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:24:31 crc kubenswrapper[4558]: I0120 17:24:31.120723 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/901ea0e0-d046-4871-887c-ea58a1d8fcb0-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"901ea0e0-d046-4871-887c-ea58a1d8fcb0\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:24:31 crc kubenswrapper[4558]: I0120 17:24:31.120804 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f5fmc\" (UniqueName: \"kubernetes.io/projected/901ea0e0-d046-4871-887c-ea58a1d8fcb0-kube-api-access-f5fmc\") pod \"ceilometer-0\" (UID: \"901ea0e0-d046-4871-887c-ea58a1d8fcb0\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:24:31 crc kubenswrapper[4558]: I0120 17:24:31.120839 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/901ea0e0-d046-4871-887c-ea58a1d8fcb0-config-data\") pod \"ceilometer-0\" (UID: \"901ea0e0-d046-4871-887c-ea58a1d8fcb0\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:24:31 crc kubenswrapper[4558]: I0120 17:24:31.120865 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/901ea0e0-d046-4871-887c-ea58a1d8fcb0-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"901ea0e0-d046-4871-887c-ea58a1d8fcb0\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:24:31 crc kubenswrapper[4558]: I0120 17:24:31.120900 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/901ea0e0-d046-4871-887c-ea58a1d8fcb0-scripts\") pod \"ceilometer-0\" (UID: \"901ea0e0-d046-4871-887c-ea58a1d8fcb0\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:24:31 crc kubenswrapper[4558]: I0120 17:24:31.120915 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/901ea0e0-d046-4871-887c-ea58a1d8fcb0-run-httpd\") pod \"ceilometer-0\" (UID: \"901ea0e0-d046-4871-887c-ea58a1d8fcb0\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:24:31 crc kubenswrapper[4558]: I0120 17:24:31.122457 4558 scope.go:117] "RemoveContainer" containerID="ef796dfab67a8244c396cd18509d66bae814af609de469193e0d1ada820bdf4c" Jan 20 17:24:31 crc kubenswrapper[4558]: I0120 17:24:31.141802 4558 scope.go:117] "RemoveContainer" containerID="1f1826fa85536c90dc6a1fb82f634d04594f8e977b10383c34ca6e82e13e96b3" Jan 20 17:24:31 crc kubenswrapper[4558]: E0120 17:24:31.142327 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1f1826fa85536c90dc6a1fb82f634d04594f8e977b10383c34ca6e82e13e96b3\": container with ID starting with 1f1826fa85536c90dc6a1fb82f634d04594f8e977b10383c34ca6e82e13e96b3 not found: ID does not exist" containerID="1f1826fa85536c90dc6a1fb82f634d04594f8e977b10383c34ca6e82e13e96b3" Jan 20 17:24:31 crc kubenswrapper[4558]: I0120 17:24:31.142390 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1f1826fa85536c90dc6a1fb82f634d04594f8e977b10383c34ca6e82e13e96b3"} err="failed to get container status \"1f1826fa85536c90dc6a1fb82f634d04594f8e977b10383c34ca6e82e13e96b3\": rpc error: code = NotFound desc = could not find container \"1f1826fa85536c90dc6a1fb82f634d04594f8e977b10383c34ca6e82e13e96b3\": container with ID starting with 1f1826fa85536c90dc6a1fb82f634d04594f8e977b10383c34ca6e82e13e96b3 not found: ID does not exist" Jan 20 17:24:31 crc kubenswrapper[4558]: I0120 17:24:31.142421 4558 scope.go:117] "RemoveContainer" containerID="3dd48463ebaf0c4cc73e6dd204f9dc91627d23f0e17873f60b0ab36ca889d105" Jan 20 17:24:31 crc kubenswrapper[4558]: E0120 17:24:31.142752 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3dd48463ebaf0c4cc73e6dd204f9dc91627d23f0e17873f60b0ab36ca889d105\": container with ID starting with 3dd48463ebaf0c4cc73e6dd204f9dc91627d23f0e17873f60b0ab36ca889d105 not found: ID does not exist" containerID="3dd48463ebaf0c4cc73e6dd204f9dc91627d23f0e17873f60b0ab36ca889d105" Jan 20 17:24:31 crc kubenswrapper[4558]: I0120 17:24:31.142798 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3dd48463ebaf0c4cc73e6dd204f9dc91627d23f0e17873f60b0ab36ca889d105"} err="failed to get container status \"3dd48463ebaf0c4cc73e6dd204f9dc91627d23f0e17873f60b0ab36ca889d105\": rpc error: code = NotFound desc = could not find container \"3dd48463ebaf0c4cc73e6dd204f9dc91627d23f0e17873f60b0ab36ca889d105\": container with ID starting with 3dd48463ebaf0c4cc73e6dd204f9dc91627d23f0e17873f60b0ab36ca889d105 not found: ID does not exist" Jan 20 17:24:31 crc kubenswrapper[4558]: I0120 17:24:31.142825 4558 scope.go:117] "RemoveContainer" containerID="1b4e6a1e48903d69f46a2a4b2a8193e676013ecda2045e0f0db4923d287957b1" Jan 20 17:24:31 crc kubenswrapper[4558]: E0120 17:24:31.143176 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1b4e6a1e48903d69f46a2a4b2a8193e676013ecda2045e0f0db4923d287957b1\": container with ID starting with 1b4e6a1e48903d69f46a2a4b2a8193e676013ecda2045e0f0db4923d287957b1 not found: ID does not exist" containerID="1b4e6a1e48903d69f46a2a4b2a8193e676013ecda2045e0f0db4923d287957b1" Jan 20 17:24:31 crc kubenswrapper[4558]: I0120 17:24:31.143205 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1b4e6a1e48903d69f46a2a4b2a8193e676013ecda2045e0f0db4923d287957b1"} err="failed to get container status \"1b4e6a1e48903d69f46a2a4b2a8193e676013ecda2045e0f0db4923d287957b1\": rpc error: code = NotFound desc = could not find container \"1b4e6a1e48903d69f46a2a4b2a8193e676013ecda2045e0f0db4923d287957b1\": container with ID starting with 1b4e6a1e48903d69f46a2a4b2a8193e676013ecda2045e0f0db4923d287957b1 not found: ID does not exist" Jan 20 17:24:31 crc kubenswrapper[4558]: I0120 17:24:31.143220 4558 scope.go:117] "RemoveContainer" containerID="ef796dfab67a8244c396cd18509d66bae814af609de469193e0d1ada820bdf4c" Jan 20 17:24:31 crc kubenswrapper[4558]: E0120 17:24:31.143473 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ef796dfab67a8244c396cd18509d66bae814af609de469193e0d1ada820bdf4c\": container with ID starting with ef796dfab67a8244c396cd18509d66bae814af609de469193e0d1ada820bdf4c not found: ID does not exist" containerID="ef796dfab67a8244c396cd18509d66bae814af609de469193e0d1ada820bdf4c" Jan 20 17:24:31 crc kubenswrapper[4558]: I0120 17:24:31.143497 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ef796dfab67a8244c396cd18509d66bae814af609de469193e0d1ada820bdf4c"} err="failed to get container status \"ef796dfab67a8244c396cd18509d66bae814af609de469193e0d1ada820bdf4c\": rpc error: code = NotFound desc = could not find container \"ef796dfab67a8244c396cd18509d66bae814af609de469193e0d1ada820bdf4c\": container with ID starting with ef796dfab67a8244c396cd18509d66bae814af609de469193e0d1ada820bdf4c not found: ID does not exist" Jan 20 17:24:31 crc kubenswrapper[4558]: I0120 17:24:31.222724 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/901ea0e0-d046-4871-887c-ea58a1d8fcb0-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"901ea0e0-d046-4871-887c-ea58a1d8fcb0\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:24:31 crc kubenswrapper[4558]: I0120 17:24:31.222877 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f5fmc\" (UniqueName: \"kubernetes.io/projected/901ea0e0-d046-4871-887c-ea58a1d8fcb0-kube-api-access-f5fmc\") pod \"ceilometer-0\" (UID: \"901ea0e0-d046-4871-887c-ea58a1d8fcb0\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:24:31 crc kubenswrapper[4558]: I0120 17:24:31.222939 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/901ea0e0-d046-4871-887c-ea58a1d8fcb0-config-data\") pod \"ceilometer-0\" (UID: \"901ea0e0-d046-4871-887c-ea58a1d8fcb0\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:24:31 crc kubenswrapper[4558]: I0120 17:24:31.222988 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/901ea0e0-d046-4871-887c-ea58a1d8fcb0-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"901ea0e0-d046-4871-887c-ea58a1d8fcb0\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:24:31 crc kubenswrapper[4558]: I0120 17:24:31.223048 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/901ea0e0-d046-4871-887c-ea58a1d8fcb0-scripts\") pod \"ceilometer-0\" (UID: \"901ea0e0-d046-4871-887c-ea58a1d8fcb0\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:24:31 crc kubenswrapper[4558]: I0120 17:24:31.223073 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/901ea0e0-d046-4871-887c-ea58a1d8fcb0-run-httpd\") pod \"ceilometer-0\" (UID: \"901ea0e0-d046-4871-887c-ea58a1d8fcb0\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:24:31 crc kubenswrapper[4558]: I0120 17:24:31.223235 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/901ea0e0-d046-4871-887c-ea58a1d8fcb0-log-httpd\") pod \"ceilometer-0\" (UID: \"901ea0e0-d046-4871-887c-ea58a1d8fcb0\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:24:31 crc kubenswrapper[4558]: I0120 17:24:31.224049 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/901ea0e0-d046-4871-887c-ea58a1d8fcb0-run-httpd\") pod \"ceilometer-0\" (UID: \"901ea0e0-d046-4871-887c-ea58a1d8fcb0\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:24:31 crc kubenswrapper[4558]: I0120 17:24:31.224316 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/901ea0e0-d046-4871-887c-ea58a1d8fcb0-log-httpd\") pod \"ceilometer-0\" (UID: \"901ea0e0-d046-4871-887c-ea58a1d8fcb0\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:24:31 crc kubenswrapper[4558]: I0120 17:24:31.231838 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/901ea0e0-d046-4871-887c-ea58a1d8fcb0-scripts\") pod \"ceilometer-0\" (UID: \"901ea0e0-d046-4871-887c-ea58a1d8fcb0\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:24:31 crc kubenswrapper[4558]: I0120 17:24:31.235346 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/901ea0e0-d046-4871-887c-ea58a1d8fcb0-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"901ea0e0-d046-4871-887c-ea58a1d8fcb0\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:24:31 crc kubenswrapper[4558]: I0120 17:24:31.240077 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/901ea0e0-d046-4871-887c-ea58a1d8fcb0-config-data\") pod \"ceilometer-0\" (UID: \"901ea0e0-d046-4871-887c-ea58a1d8fcb0\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:24:31 crc kubenswrapper[4558]: I0120 17:24:31.240373 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/901ea0e0-d046-4871-887c-ea58a1d8fcb0-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"901ea0e0-d046-4871-887c-ea58a1d8fcb0\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:24:31 crc kubenswrapper[4558]: I0120 17:24:31.255768 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f5fmc\" (UniqueName: \"kubernetes.io/projected/901ea0e0-d046-4871-887c-ea58a1d8fcb0-kube-api-access-f5fmc\") pod \"ceilometer-0\" (UID: \"901ea0e0-d046-4871-887c-ea58a1d8fcb0\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:24:31 crc kubenswrapper[4558]: I0120 17:24:31.387216 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-af2b-account-create-update-ckc4c" Jan 20 17:24:31 crc kubenswrapper[4558]: I0120 17:24:31.391070 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:24:31 crc kubenswrapper[4558]: I0120 17:24:31.445790 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkf47\" (UniqueName: \"kubernetes.io/projected/a3e891d2-b4a9-4486-ae23-c3437ae07e01-kube-api-access-zkf47\") pod \"a3e891d2-b4a9-4486-ae23-c3437ae07e01\" (UID: \"a3e891d2-b4a9-4486-ae23-c3437ae07e01\") " Jan 20 17:24:31 crc kubenswrapper[4558]: I0120 17:24:31.445935 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a3e891d2-b4a9-4486-ae23-c3437ae07e01-operator-scripts\") pod \"a3e891d2-b4a9-4486-ae23-c3437ae07e01\" (UID: \"a3e891d2-b4a9-4486-ae23-c3437ae07e01\") " Jan 20 17:24:31 crc kubenswrapper[4558]: I0120 17:24:31.453186 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-f949-account-create-update-4ndsp" Jan 20 17:24:31 crc kubenswrapper[4558]: I0120 17:24:31.453253 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a3e891d2-b4a9-4486-ae23-c3437ae07e01-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "a3e891d2-b4a9-4486-ae23-c3437ae07e01" (UID: "a3e891d2-b4a9-4486-ae23-c3437ae07e01"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:24:31 crc kubenswrapper[4558]: I0120 17:24:31.460708 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a3e891d2-b4a9-4486-ae23-c3437ae07e01-kube-api-access-zkf47" (OuterVolumeSpecName: "kube-api-access-zkf47") pod "a3e891d2-b4a9-4486-ae23-c3437ae07e01" (UID: "a3e891d2-b4a9-4486-ae23-c3437ae07e01"). InnerVolumeSpecName "kube-api-access-zkf47". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:24:31 crc kubenswrapper[4558]: I0120 17:24:31.558447 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hk6mq\" (UniqueName: \"kubernetes.io/projected/90c87493-4105-4f33-b9a8-863231d6e367-kube-api-access-hk6mq\") pod \"90c87493-4105-4f33-b9a8-863231d6e367\" (UID: \"90c87493-4105-4f33-b9a8-863231d6e367\") " Jan 20 17:24:31 crc kubenswrapper[4558]: I0120 17:24:31.558509 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/90c87493-4105-4f33-b9a8-863231d6e367-operator-scripts\") pod \"90c87493-4105-4f33-b9a8-863231d6e367\" (UID: \"90c87493-4105-4f33-b9a8-863231d6e367\") " Jan 20 17:24:31 crc kubenswrapper[4558]: I0120 17:24:31.559090 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkf47\" (UniqueName: \"kubernetes.io/projected/a3e891d2-b4a9-4486-ae23-c3437ae07e01-kube-api-access-zkf47\") on node \"crc\" DevicePath \"\"" Jan 20 17:24:31 crc kubenswrapper[4558]: I0120 17:24:31.559111 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a3e891d2-b4a9-4486-ae23-c3437ae07e01-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:24:31 crc kubenswrapper[4558]: I0120 17:24:31.559516 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/90c87493-4105-4f33-b9a8-863231d6e367-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "90c87493-4105-4f33-b9a8-863231d6e367" (UID: "90c87493-4105-4f33-b9a8-863231d6e367"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:24:31 crc kubenswrapper[4558]: I0120 17:24:31.562364 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/90c87493-4105-4f33-b9a8-863231d6e367-kube-api-access-hk6mq" (OuterVolumeSpecName: "kube-api-access-hk6mq") pod "90c87493-4105-4f33-b9a8-863231d6e367" (UID: "90c87493-4105-4f33-b9a8-863231d6e367"). InnerVolumeSpecName "kube-api-access-hk6mq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:24:31 crc kubenswrapper[4558]: I0120 17:24:31.663000 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hk6mq\" (UniqueName: \"kubernetes.io/projected/90c87493-4105-4f33-b9a8-863231d6e367-kube-api-access-hk6mq\") on node \"crc\" DevicePath \"\"" Jan 20 17:24:31 crc kubenswrapper[4558]: I0120 17:24:31.663046 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/90c87493-4105-4f33-b9a8-863231d6e367-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:24:31 crc kubenswrapper[4558]: I0120 17:24:31.775263 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:24:31 crc kubenswrapper[4558]: I0120 17:24:31.775675 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-internal-api-0" podUID="f2f1744d-79dd-43bc-8bea-93e00672c805" containerName="glance-log" containerID="cri-o://86863d93cda6303d400888f0fe4d36dd71ee9de6e0a32e2a3f3d124d43b93601" gracePeriod=30 Jan 20 17:24:31 crc kubenswrapper[4558]: I0120 17:24:31.775743 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-internal-api-0" podUID="f2f1744d-79dd-43bc-8bea-93e00672c805" containerName="glance-httpd" containerID="cri-o://929e11abc56a6402a23f271e8df464870a363c2850f358242cc4b22371fc6ec0" gracePeriod=30 Jan 20 17:24:31 crc kubenswrapper[4558]: I0120 17:24:31.900911 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:24:31 crc kubenswrapper[4558]: W0120 17:24:31.902411 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod901ea0e0_d046_4871_887c_ea58a1d8fcb0.slice/crio-9b495b6fd651da68ce9a30d63f307c50fe0a64582bbe3e8cf1ee36888205b947 WatchSource:0}: Error finding container 9b495b6fd651da68ce9a30d63f307c50fe0a64582bbe3e8cf1ee36888205b947: Status 404 returned error can't find the container with id 9b495b6fd651da68ce9a30d63f307c50fe0a64582bbe3e8cf1ee36888205b947 Jan 20 17:24:32 crc kubenswrapper[4558]: I0120 17:24:32.020371 4558 generic.go:334] "Generic (PLEG): container finished" podID="f2f1744d-79dd-43bc-8bea-93e00672c805" containerID="86863d93cda6303d400888f0fe4d36dd71ee9de6e0a32e2a3f3d124d43b93601" exitCode=143 Jan 20 17:24:32 crc kubenswrapper[4558]: I0120 17:24:32.020435 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"f2f1744d-79dd-43bc-8bea-93e00672c805","Type":"ContainerDied","Data":"86863d93cda6303d400888f0fe4d36dd71ee9de6e0a32e2a3f3d124d43b93601"} Jan 20 17:24:32 crc kubenswrapper[4558]: I0120 17:24:32.021537 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"901ea0e0-d046-4871-887c-ea58a1d8fcb0","Type":"ContainerStarted","Data":"9b495b6fd651da68ce9a30d63f307c50fe0a64582bbe3e8cf1ee36888205b947"} Jan 20 17:24:32 crc kubenswrapper[4558]: I0120 17:24:32.023946 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-f949-account-create-update-4ndsp" event={"ID":"90c87493-4105-4f33-b9a8-863231d6e367","Type":"ContainerDied","Data":"40c8020ccafd66e61eeaf6101cbb693d12e5286f5b1e205d3e52e93fc55eaa92"} Jan 20 17:24:32 crc kubenswrapper[4558]: I0120 17:24:32.024002 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="40c8020ccafd66e61eeaf6101cbb693d12e5286f5b1e205d3e52e93fc55eaa92" Jan 20 17:24:32 crc kubenswrapper[4558]: I0120 17:24:32.023949 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-f949-account-create-update-4ndsp" Jan 20 17:24:32 crc kubenswrapper[4558]: I0120 17:24:32.025972 4558 generic.go:334] "Generic (PLEG): container finished" podID="05e1a302-040a-46ca-a3c2-e4c8c6390091" containerID="8059066dd8ca082e955bcb29f49441ed9509119295b8ded32faebb8c4410e534" exitCode=143 Jan 20 17:24:32 crc kubenswrapper[4558]: I0120 17:24:32.026033 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"05e1a302-040a-46ca-a3c2-e4c8c6390091","Type":"ContainerDied","Data":"8059066dd8ca082e955bcb29f49441ed9509119295b8ded32faebb8c4410e534"} Jan 20 17:24:32 crc kubenswrapper[4558]: I0120 17:24:32.027746 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-af2b-account-create-update-ckc4c" event={"ID":"a3e891d2-b4a9-4486-ae23-c3437ae07e01","Type":"ContainerDied","Data":"493d533846f7a31185b53e85b3e4bc17bce5525b55185a1a17516881eec3597d"} Jan 20 17:24:32 crc kubenswrapper[4558]: I0120 17:24:32.027784 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="493d533846f7a31185b53e85b3e4bc17bce5525b55185a1a17516881eec3597d" Jan 20 17:24:32 crc kubenswrapper[4558]: I0120 17:24:32.027842 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-af2b-account-create-update-ckc4c" Jan 20 17:24:32 crc kubenswrapper[4558]: I0120 17:24:32.192754 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:24:32 crc kubenswrapper[4558]: I0120 17:24:32.400680 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-db-sync-sxc6v"] Jan 20 17:24:32 crc kubenswrapper[4558]: E0120 17:24:32.401065 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a3e891d2-b4a9-4486-ae23-c3437ae07e01" containerName="mariadb-account-create-update" Jan 20 17:24:32 crc kubenswrapper[4558]: I0120 17:24:32.401087 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="a3e891d2-b4a9-4486-ae23-c3437ae07e01" containerName="mariadb-account-create-update" Jan 20 17:24:32 crc kubenswrapper[4558]: E0120 17:24:32.401133 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="90c87493-4105-4f33-b9a8-863231d6e367" containerName="mariadb-account-create-update" Jan 20 17:24:32 crc kubenswrapper[4558]: I0120 17:24:32.401140 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="90c87493-4105-4f33-b9a8-863231d6e367" containerName="mariadb-account-create-update" Jan 20 17:24:32 crc kubenswrapper[4558]: I0120 17:24:32.401334 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="a3e891d2-b4a9-4486-ae23-c3437ae07e01" containerName="mariadb-account-create-update" Jan 20 17:24:32 crc kubenswrapper[4558]: I0120 17:24:32.401350 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="90c87493-4105-4f33-b9a8-863231d6e367" containerName="mariadb-account-create-update" Jan 20 17:24:32 crc kubenswrapper[4558]: I0120 17:24:32.402048 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-sxc6v" Jan 20 17:24:32 crc kubenswrapper[4558]: I0120 17:24:32.403830 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell0-conductor-scripts" Jan 20 17:24:32 crc kubenswrapper[4558]: I0120 17:24:32.404012 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell0-conductor-config-data" Jan 20 17:24:32 crc kubenswrapper[4558]: I0120 17:24:32.404126 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-nova-dockercfg-k8zdz" Jan 20 17:24:32 crc kubenswrapper[4558]: I0120 17:24:32.413108 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-db-sync-sxc6v"] Jan 20 17:24:32 crc kubenswrapper[4558]: I0120 17:24:32.480290 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2760b0e9-ab1f-4fe1-8240-5a638afaef7b-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-sxc6v\" (UID: \"2760b0e9-ab1f-4fe1-8240-5a638afaef7b\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-sxc6v" Jan 20 17:24:32 crc kubenswrapper[4558]: I0120 17:24:32.480379 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2760b0e9-ab1f-4fe1-8240-5a638afaef7b-scripts\") pod \"nova-cell0-conductor-db-sync-sxc6v\" (UID: \"2760b0e9-ab1f-4fe1-8240-5a638afaef7b\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-sxc6v" Jan 20 17:24:32 crc kubenswrapper[4558]: I0120 17:24:32.480521 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-br4dz\" (UniqueName: \"kubernetes.io/projected/2760b0e9-ab1f-4fe1-8240-5a638afaef7b-kube-api-access-br4dz\") pod \"nova-cell0-conductor-db-sync-sxc6v\" (UID: \"2760b0e9-ab1f-4fe1-8240-5a638afaef7b\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-sxc6v" Jan 20 17:24:32 crc kubenswrapper[4558]: I0120 17:24:32.480622 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2760b0e9-ab1f-4fe1-8240-5a638afaef7b-config-data\") pod \"nova-cell0-conductor-db-sync-sxc6v\" (UID: \"2760b0e9-ab1f-4fe1-8240-5a638afaef7b\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-sxc6v" Jan 20 17:24:32 crc kubenswrapper[4558]: I0120 17:24:32.582453 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2760b0e9-ab1f-4fe1-8240-5a638afaef7b-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-sxc6v\" (UID: \"2760b0e9-ab1f-4fe1-8240-5a638afaef7b\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-sxc6v" Jan 20 17:24:32 crc kubenswrapper[4558]: I0120 17:24:32.582496 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d4bf950d-0c4c-41a1-b3c8-a82d3a0f65dd" path="/var/lib/kubelet/pods/d4bf950d-0c4c-41a1-b3c8-a82d3a0f65dd/volumes" Jan 20 17:24:32 crc kubenswrapper[4558]: I0120 17:24:32.582514 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2760b0e9-ab1f-4fe1-8240-5a638afaef7b-scripts\") pod \"nova-cell0-conductor-db-sync-sxc6v\" (UID: \"2760b0e9-ab1f-4fe1-8240-5a638afaef7b\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-sxc6v" Jan 20 17:24:32 crc kubenswrapper[4558]: I0120 17:24:32.583357 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-br4dz\" (UniqueName: \"kubernetes.io/projected/2760b0e9-ab1f-4fe1-8240-5a638afaef7b-kube-api-access-br4dz\") pod \"nova-cell0-conductor-db-sync-sxc6v\" (UID: \"2760b0e9-ab1f-4fe1-8240-5a638afaef7b\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-sxc6v" Jan 20 17:24:32 crc kubenswrapper[4558]: I0120 17:24:32.583558 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2760b0e9-ab1f-4fe1-8240-5a638afaef7b-config-data\") pod \"nova-cell0-conductor-db-sync-sxc6v\" (UID: \"2760b0e9-ab1f-4fe1-8240-5a638afaef7b\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-sxc6v" Jan 20 17:24:32 crc kubenswrapper[4558]: I0120 17:24:32.588533 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2760b0e9-ab1f-4fe1-8240-5a638afaef7b-scripts\") pod \"nova-cell0-conductor-db-sync-sxc6v\" (UID: \"2760b0e9-ab1f-4fe1-8240-5a638afaef7b\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-sxc6v" Jan 20 17:24:32 crc kubenswrapper[4558]: I0120 17:24:32.589690 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2760b0e9-ab1f-4fe1-8240-5a638afaef7b-config-data\") pod \"nova-cell0-conductor-db-sync-sxc6v\" (UID: \"2760b0e9-ab1f-4fe1-8240-5a638afaef7b\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-sxc6v" Jan 20 17:24:32 crc kubenswrapper[4558]: I0120 17:24:32.595317 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2760b0e9-ab1f-4fe1-8240-5a638afaef7b-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-sxc6v\" (UID: \"2760b0e9-ab1f-4fe1-8240-5a638afaef7b\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-sxc6v" Jan 20 17:24:32 crc kubenswrapper[4558]: I0120 17:24:32.601049 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-br4dz\" (UniqueName: \"kubernetes.io/projected/2760b0e9-ab1f-4fe1-8240-5a638afaef7b-kube-api-access-br4dz\") pod \"nova-cell0-conductor-db-sync-sxc6v\" (UID: \"2760b0e9-ab1f-4fe1-8240-5a638afaef7b\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-sxc6v" Jan 20 17:24:32 crc kubenswrapper[4558]: I0120 17:24:32.727479 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-sxc6v" Jan 20 17:24:33 crc kubenswrapper[4558]: I0120 17:24:33.041526 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"901ea0e0-d046-4871-887c-ea58a1d8fcb0","Type":"ContainerStarted","Data":"95025487af705f3e41bbff12d0547501461d0af163d7b016973ec88665be1dff"} Jan 20 17:24:33 crc kubenswrapper[4558]: I0120 17:24:33.151366 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-db-sync-sxc6v"] Jan 20 17:24:33 crc kubenswrapper[4558]: W0120 17:24:33.158861 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2760b0e9_ab1f_4fe1_8240_5a638afaef7b.slice/crio-f43be13c2818fa2b75424c3ad3cf8e2be7035c170dc7e8378511c8931a5d1f61 WatchSource:0}: Error finding container f43be13c2818fa2b75424c3ad3cf8e2be7035c170dc7e8378511c8931a5d1f61: Status 404 returned error can't find the container with id f43be13c2818fa2b75424c3ad3cf8e2be7035c170dc7e8378511c8931a5d1f61 Jan 20 17:24:33 crc kubenswrapper[4558]: I0120 17:24:33.865900 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-s4fbf" Jan 20 17:24:33 crc kubenswrapper[4558]: I0120 17:24:33.865950 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-s4fbf" Jan 20 17:24:33 crc kubenswrapper[4558]: I0120 17:24:33.904549 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-s4fbf" Jan 20 17:24:34 crc kubenswrapper[4558]: I0120 17:24:34.054419 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"901ea0e0-d046-4871-887c-ea58a1d8fcb0","Type":"ContainerStarted","Data":"7947aee6092ae17cad35b9b67df7da0268558ba84fafb3187d4167522b7e4507"} Jan 20 17:24:34 crc kubenswrapper[4558]: I0120 17:24:34.058964 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-sxc6v" event={"ID":"2760b0e9-ab1f-4fe1-8240-5a638afaef7b","Type":"ContainerStarted","Data":"9b5faec98795b92df82474a1c5193b401d1ccb9d90b9025c1ac178dc22c1725f"} Jan 20 17:24:34 crc kubenswrapper[4558]: I0120 17:24:34.059067 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-sxc6v" event={"ID":"2760b0e9-ab1f-4fe1-8240-5a638afaef7b","Type":"ContainerStarted","Data":"f43be13c2818fa2b75424c3ad3cf8e2be7035c170dc7e8378511c8931a5d1f61"} Jan 20 17:24:34 crc kubenswrapper[4558]: I0120 17:24:34.081798 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-sxc6v" podStartSLOduration=2.081787129 podStartE2EDuration="2.081787129s" podCreationTimestamp="2026-01-20 17:24:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:24:34.077344912 +0000 UTC m=+2567.837682880" watchObservedRunningTime="2026-01-20 17:24:34.081787129 +0000 UTC m=+2567.842125096" Jan 20 17:24:34 crc kubenswrapper[4558]: I0120 17:24:34.105621 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-s4fbf" Jan 20 17:24:34 crc kubenswrapper[4558]: I0120 17:24:34.162485 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-s4fbf"] Jan 20 17:24:34 crc kubenswrapper[4558]: I0120 17:24:34.448853 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:24:34 crc kubenswrapper[4558]: I0120 17:24:34.525386 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage14-crc\") pod \"05e1a302-040a-46ca-a3c2-e4c8c6390091\" (UID: \"05e1a302-040a-46ca-a3c2-e4c8c6390091\") " Jan 20 17:24:34 crc kubenswrapper[4558]: I0120 17:24:34.525492 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g568f\" (UniqueName: \"kubernetes.io/projected/05e1a302-040a-46ca-a3c2-e4c8c6390091-kube-api-access-g568f\") pod \"05e1a302-040a-46ca-a3c2-e4c8c6390091\" (UID: \"05e1a302-040a-46ca-a3c2-e4c8c6390091\") " Jan 20 17:24:34 crc kubenswrapper[4558]: I0120 17:24:34.525550 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05e1a302-040a-46ca-a3c2-e4c8c6390091-combined-ca-bundle\") pod \"05e1a302-040a-46ca-a3c2-e4c8c6390091\" (UID: \"05e1a302-040a-46ca-a3c2-e4c8c6390091\") " Jan 20 17:24:34 crc kubenswrapper[4558]: I0120 17:24:34.525594 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/05e1a302-040a-46ca-a3c2-e4c8c6390091-httpd-run\") pod \"05e1a302-040a-46ca-a3c2-e4c8c6390091\" (UID: \"05e1a302-040a-46ca-a3c2-e4c8c6390091\") " Jan 20 17:24:34 crc kubenswrapper[4558]: I0120 17:24:34.525620 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/05e1a302-040a-46ca-a3c2-e4c8c6390091-logs\") pod \"05e1a302-040a-46ca-a3c2-e4c8c6390091\" (UID: \"05e1a302-040a-46ca-a3c2-e4c8c6390091\") " Jan 20 17:24:34 crc kubenswrapper[4558]: I0120 17:24:34.525719 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/05e1a302-040a-46ca-a3c2-e4c8c6390091-config-data\") pod \"05e1a302-040a-46ca-a3c2-e4c8c6390091\" (UID: \"05e1a302-040a-46ca-a3c2-e4c8c6390091\") " Jan 20 17:24:34 crc kubenswrapper[4558]: I0120 17:24:34.525822 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/05e1a302-040a-46ca-a3c2-e4c8c6390091-public-tls-certs\") pod \"05e1a302-040a-46ca-a3c2-e4c8c6390091\" (UID: \"05e1a302-040a-46ca-a3c2-e4c8c6390091\") " Jan 20 17:24:34 crc kubenswrapper[4558]: I0120 17:24:34.525843 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/05e1a302-040a-46ca-a3c2-e4c8c6390091-scripts\") pod \"05e1a302-040a-46ca-a3c2-e4c8c6390091\" (UID: \"05e1a302-040a-46ca-a3c2-e4c8c6390091\") " Jan 20 17:24:34 crc kubenswrapper[4558]: I0120 17:24:34.530553 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/05e1a302-040a-46ca-a3c2-e4c8c6390091-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "05e1a302-040a-46ca-a3c2-e4c8c6390091" (UID: "05e1a302-040a-46ca-a3c2-e4c8c6390091"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:24:34 crc kubenswrapper[4558]: I0120 17:24:34.543618 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/05e1a302-040a-46ca-a3c2-e4c8c6390091-logs" (OuterVolumeSpecName: "logs") pod "05e1a302-040a-46ca-a3c2-e4c8c6390091" (UID: "05e1a302-040a-46ca-a3c2-e4c8c6390091"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:24:34 crc kubenswrapper[4558]: I0120 17:24:34.543755 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/05e1a302-040a-46ca-a3c2-e4c8c6390091-scripts" (OuterVolumeSpecName: "scripts") pod "05e1a302-040a-46ca-a3c2-e4c8c6390091" (UID: "05e1a302-040a-46ca-a3c2-e4c8c6390091"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:24:34 crc kubenswrapper[4558]: I0120 17:24:34.549253 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage14-crc" (OuterVolumeSpecName: "glance") pod "05e1a302-040a-46ca-a3c2-e4c8c6390091" (UID: "05e1a302-040a-46ca-a3c2-e4c8c6390091"). InnerVolumeSpecName "local-storage14-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:24:34 crc kubenswrapper[4558]: I0120 17:24:34.550144 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/05e1a302-040a-46ca-a3c2-e4c8c6390091-kube-api-access-g568f" (OuterVolumeSpecName: "kube-api-access-g568f") pod "05e1a302-040a-46ca-a3c2-e4c8c6390091" (UID: "05e1a302-040a-46ca-a3c2-e4c8c6390091"). InnerVolumeSpecName "kube-api-access-g568f". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:24:34 crc kubenswrapper[4558]: I0120 17:24:34.602969 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/05e1a302-040a-46ca-a3c2-e4c8c6390091-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "05e1a302-040a-46ca-a3c2-e4c8c6390091" (UID: "05e1a302-040a-46ca-a3c2-e4c8c6390091"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:24:34 crc kubenswrapper[4558]: I0120 17:24:34.603353 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/05e1a302-040a-46ca-a3c2-e4c8c6390091-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "05e1a302-040a-46ca-a3c2-e4c8c6390091" (UID: "05e1a302-040a-46ca-a3c2-e4c8c6390091"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:24:34 crc kubenswrapper[4558]: I0120 17:24:34.624281 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/05e1a302-040a-46ca-a3c2-e4c8c6390091-config-data" (OuterVolumeSpecName: "config-data") pod "05e1a302-040a-46ca-a3c2-e4c8c6390091" (UID: "05e1a302-040a-46ca-a3c2-e4c8c6390091"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:24:34 crc kubenswrapper[4558]: I0120 17:24:34.629268 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/05e1a302-040a-46ca-a3c2-e4c8c6390091-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:24:34 crc kubenswrapper[4558]: I0120 17:24:34.629293 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/05e1a302-040a-46ca-a3c2-e4c8c6390091-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:24:34 crc kubenswrapper[4558]: I0120 17:24:34.629328 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage14-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage14-crc\") on node \"crc\" " Jan 20 17:24:34 crc kubenswrapper[4558]: I0120 17:24:34.629343 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g568f\" (UniqueName: \"kubernetes.io/projected/05e1a302-040a-46ca-a3c2-e4c8c6390091-kube-api-access-g568f\") on node \"crc\" DevicePath \"\"" Jan 20 17:24:34 crc kubenswrapper[4558]: I0120 17:24:34.629354 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05e1a302-040a-46ca-a3c2-e4c8c6390091-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:24:34 crc kubenswrapper[4558]: I0120 17:24:34.629363 4558 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/05e1a302-040a-46ca-a3c2-e4c8c6390091-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 20 17:24:34 crc kubenswrapper[4558]: I0120 17:24:34.629372 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/05e1a302-040a-46ca-a3c2-e4c8c6390091-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:24:34 crc kubenswrapper[4558]: I0120 17:24:34.629380 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/05e1a302-040a-46ca-a3c2-e4c8c6390091-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:24:34 crc kubenswrapper[4558]: I0120 17:24:34.645359 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage14-crc" (UniqueName: "kubernetes.io/local-volume/local-storage14-crc") on node "crc" Jan 20 17:24:34 crc kubenswrapper[4558]: I0120 17:24:34.732288 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage14-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage14-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:24:35 crc kubenswrapper[4558]: I0120 17:24:35.072745 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"901ea0e0-d046-4871-887c-ea58a1d8fcb0","Type":"ContainerStarted","Data":"500da8ecd44bd082f1fc0b940db89a832e557dcbbd211723fa9bb91ca4e746bc"} Jan 20 17:24:35 crc kubenswrapper[4558]: I0120 17:24:35.075024 4558 generic.go:334] "Generic (PLEG): container finished" podID="05e1a302-040a-46ca-a3c2-e4c8c6390091" containerID="8ba8938da0a00c9308d3eea1def52748e499a64ab544e915a92e8d41b1af332b" exitCode=0 Jan 20 17:24:35 crc kubenswrapper[4558]: I0120 17:24:35.075092 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"05e1a302-040a-46ca-a3c2-e4c8c6390091","Type":"ContainerDied","Data":"8ba8938da0a00c9308d3eea1def52748e499a64ab544e915a92e8d41b1af332b"} Jan 20 17:24:35 crc kubenswrapper[4558]: I0120 17:24:35.075153 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:24:35 crc kubenswrapper[4558]: I0120 17:24:35.075192 4558 scope.go:117] "RemoveContainer" containerID="8ba8938da0a00c9308d3eea1def52748e499a64ab544e915a92e8d41b1af332b" Jan 20 17:24:35 crc kubenswrapper[4558]: I0120 17:24:35.075175 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"05e1a302-040a-46ca-a3c2-e4c8c6390091","Type":"ContainerDied","Data":"63c772d2033e23df0816d0065fd2885f141742dee74609e5acad19676410ab7d"} Jan 20 17:24:35 crc kubenswrapper[4558]: I0120 17:24:35.105886 4558 scope.go:117] "RemoveContainer" containerID="8059066dd8ca082e955bcb29f49441ed9509119295b8ded32faebb8c4410e534" Jan 20 17:24:35 crc kubenswrapper[4558]: I0120 17:24:35.123550 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:24:35 crc kubenswrapper[4558]: I0120 17:24:35.144261 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:24:35 crc kubenswrapper[4558]: I0120 17:24:35.154002 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:24:35 crc kubenswrapper[4558]: E0120 17:24:35.154463 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="05e1a302-040a-46ca-a3c2-e4c8c6390091" containerName="glance-httpd" Jan 20 17:24:35 crc kubenswrapper[4558]: I0120 17:24:35.154483 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="05e1a302-040a-46ca-a3c2-e4c8c6390091" containerName="glance-httpd" Jan 20 17:24:35 crc kubenswrapper[4558]: E0120 17:24:35.154516 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="05e1a302-040a-46ca-a3c2-e4c8c6390091" containerName="glance-log" Jan 20 17:24:35 crc kubenswrapper[4558]: I0120 17:24:35.154523 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="05e1a302-040a-46ca-a3c2-e4c8c6390091" containerName="glance-log" Jan 20 17:24:35 crc kubenswrapper[4558]: I0120 17:24:35.154705 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="05e1a302-040a-46ca-a3c2-e4c8c6390091" containerName="glance-log" Jan 20 17:24:35 crc kubenswrapper[4558]: I0120 17:24:35.154729 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="05e1a302-040a-46ca-a3c2-e4c8c6390091" containerName="glance-httpd" Jan 20 17:24:35 crc kubenswrapper[4558]: I0120 17:24:35.155743 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:24:35 crc kubenswrapper[4558]: I0120 17:24:35.157887 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-glance-default-public-svc" Jan 20 17:24:35 crc kubenswrapper[4558]: I0120 17:24:35.158517 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-default-external-config-data" Jan 20 17:24:35 crc kubenswrapper[4558]: I0120 17:24:35.159416 4558 scope.go:117] "RemoveContainer" containerID="8ba8938da0a00c9308d3eea1def52748e499a64ab544e915a92e8d41b1af332b" Jan 20 17:24:35 crc kubenswrapper[4558]: I0120 17:24:35.159449 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:24:35 crc kubenswrapper[4558]: E0120 17:24:35.160644 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8ba8938da0a00c9308d3eea1def52748e499a64ab544e915a92e8d41b1af332b\": container with ID starting with 8ba8938da0a00c9308d3eea1def52748e499a64ab544e915a92e8d41b1af332b not found: ID does not exist" containerID="8ba8938da0a00c9308d3eea1def52748e499a64ab544e915a92e8d41b1af332b" Jan 20 17:24:35 crc kubenswrapper[4558]: I0120 17:24:35.160674 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8ba8938da0a00c9308d3eea1def52748e499a64ab544e915a92e8d41b1af332b"} err="failed to get container status \"8ba8938da0a00c9308d3eea1def52748e499a64ab544e915a92e8d41b1af332b\": rpc error: code = NotFound desc = could not find container \"8ba8938da0a00c9308d3eea1def52748e499a64ab544e915a92e8d41b1af332b\": container with ID starting with 8ba8938da0a00c9308d3eea1def52748e499a64ab544e915a92e8d41b1af332b not found: ID does not exist" Jan 20 17:24:35 crc kubenswrapper[4558]: I0120 17:24:35.160694 4558 scope.go:117] "RemoveContainer" containerID="8059066dd8ca082e955bcb29f49441ed9509119295b8ded32faebb8c4410e534" Jan 20 17:24:35 crc kubenswrapper[4558]: E0120 17:24:35.162020 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8059066dd8ca082e955bcb29f49441ed9509119295b8ded32faebb8c4410e534\": container with ID starting with 8059066dd8ca082e955bcb29f49441ed9509119295b8ded32faebb8c4410e534 not found: ID does not exist" containerID="8059066dd8ca082e955bcb29f49441ed9509119295b8ded32faebb8c4410e534" Jan 20 17:24:35 crc kubenswrapper[4558]: I0120 17:24:35.162053 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8059066dd8ca082e955bcb29f49441ed9509119295b8ded32faebb8c4410e534"} err="failed to get container status \"8059066dd8ca082e955bcb29f49441ed9509119295b8ded32faebb8c4410e534\": rpc error: code = NotFound desc = could not find container \"8059066dd8ca082e955bcb29f49441ed9509119295b8ded32faebb8c4410e534\": container with ID starting with 8059066dd8ca082e955bcb29f49441ed9509119295b8ded32faebb8c4410e534 not found: ID does not exist" Jan 20 17:24:35 crc kubenswrapper[4558]: I0120 17:24:35.345362 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c4caf1aa-77f3-4f79-a1e5-ce8fa944b6cd-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"c4caf1aa-77f3-4f79-a1e5-ce8fa944b6cd\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:24:35 crc kubenswrapper[4558]: I0120 17:24:35.345417 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage14-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage14-crc\") pod \"glance-default-external-api-0\" (UID: \"c4caf1aa-77f3-4f79-a1e5-ce8fa944b6cd\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:24:35 crc kubenswrapper[4558]: I0120 17:24:35.345442 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c4caf1aa-77f3-4f79-a1e5-ce8fa944b6cd-scripts\") pod \"glance-default-external-api-0\" (UID: \"c4caf1aa-77f3-4f79-a1e5-ce8fa944b6cd\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:24:35 crc kubenswrapper[4558]: I0120 17:24:35.345969 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c4caf1aa-77f3-4f79-a1e5-ce8fa944b6cd-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"c4caf1aa-77f3-4f79-a1e5-ce8fa944b6cd\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:24:35 crc kubenswrapper[4558]: I0120 17:24:35.346030 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5vdgq\" (UniqueName: \"kubernetes.io/projected/c4caf1aa-77f3-4f79-a1e5-ce8fa944b6cd-kube-api-access-5vdgq\") pod \"glance-default-external-api-0\" (UID: \"c4caf1aa-77f3-4f79-a1e5-ce8fa944b6cd\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:24:35 crc kubenswrapper[4558]: I0120 17:24:35.346052 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c4caf1aa-77f3-4f79-a1e5-ce8fa944b6cd-logs\") pod \"glance-default-external-api-0\" (UID: \"c4caf1aa-77f3-4f79-a1e5-ce8fa944b6cd\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:24:35 crc kubenswrapper[4558]: I0120 17:24:35.346082 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c4caf1aa-77f3-4f79-a1e5-ce8fa944b6cd-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"c4caf1aa-77f3-4f79-a1e5-ce8fa944b6cd\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:24:35 crc kubenswrapper[4558]: I0120 17:24:35.346131 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c4caf1aa-77f3-4f79-a1e5-ce8fa944b6cd-config-data\") pod \"glance-default-external-api-0\" (UID: \"c4caf1aa-77f3-4f79-a1e5-ce8fa944b6cd\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:24:35 crc kubenswrapper[4558]: I0120 17:24:35.448502 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c4caf1aa-77f3-4f79-a1e5-ce8fa944b6cd-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"c4caf1aa-77f3-4f79-a1e5-ce8fa944b6cd\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:24:35 crc kubenswrapper[4558]: I0120 17:24:35.448572 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5vdgq\" (UniqueName: \"kubernetes.io/projected/c4caf1aa-77f3-4f79-a1e5-ce8fa944b6cd-kube-api-access-5vdgq\") pod \"glance-default-external-api-0\" (UID: \"c4caf1aa-77f3-4f79-a1e5-ce8fa944b6cd\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:24:35 crc kubenswrapper[4558]: I0120 17:24:35.448596 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c4caf1aa-77f3-4f79-a1e5-ce8fa944b6cd-logs\") pod \"glance-default-external-api-0\" (UID: \"c4caf1aa-77f3-4f79-a1e5-ce8fa944b6cd\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:24:35 crc kubenswrapper[4558]: I0120 17:24:35.448620 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c4caf1aa-77f3-4f79-a1e5-ce8fa944b6cd-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"c4caf1aa-77f3-4f79-a1e5-ce8fa944b6cd\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:24:35 crc kubenswrapper[4558]: I0120 17:24:35.448657 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c4caf1aa-77f3-4f79-a1e5-ce8fa944b6cd-config-data\") pod \"glance-default-external-api-0\" (UID: \"c4caf1aa-77f3-4f79-a1e5-ce8fa944b6cd\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:24:35 crc kubenswrapper[4558]: I0120 17:24:35.448729 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c4caf1aa-77f3-4f79-a1e5-ce8fa944b6cd-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"c4caf1aa-77f3-4f79-a1e5-ce8fa944b6cd\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:24:35 crc kubenswrapper[4558]: I0120 17:24:35.448749 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage14-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage14-crc\") pod \"glance-default-external-api-0\" (UID: \"c4caf1aa-77f3-4f79-a1e5-ce8fa944b6cd\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:24:35 crc kubenswrapper[4558]: I0120 17:24:35.448777 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c4caf1aa-77f3-4f79-a1e5-ce8fa944b6cd-scripts\") pod \"glance-default-external-api-0\" (UID: \"c4caf1aa-77f3-4f79-a1e5-ce8fa944b6cd\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:24:35 crc kubenswrapper[4558]: I0120 17:24:35.450038 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c4caf1aa-77f3-4f79-a1e5-ce8fa944b6cd-logs\") pod \"glance-default-external-api-0\" (UID: \"c4caf1aa-77f3-4f79-a1e5-ce8fa944b6cd\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:24:35 crc kubenswrapper[4558]: I0120 17:24:35.450058 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage14-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage14-crc\") pod \"glance-default-external-api-0\" (UID: \"c4caf1aa-77f3-4f79-a1e5-ce8fa944b6cd\") device mount path \"/mnt/openstack/pv14\"" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:24:35 crc kubenswrapper[4558]: I0120 17:24:35.450051 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c4caf1aa-77f3-4f79-a1e5-ce8fa944b6cd-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"c4caf1aa-77f3-4f79-a1e5-ce8fa944b6cd\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:24:35 crc kubenswrapper[4558]: I0120 17:24:35.454183 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c4caf1aa-77f3-4f79-a1e5-ce8fa944b6cd-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"c4caf1aa-77f3-4f79-a1e5-ce8fa944b6cd\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:24:35 crc kubenswrapper[4558]: I0120 17:24:35.454579 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c4caf1aa-77f3-4f79-a1e5-ce8fa944b6cd-config-data\") pod \"glance-default-external-api-0\" (UID: \"c4caf1aa-77f3-4f79-a1e5-ce8fa944b6cd\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:24:35 crc kubenswrapper[4558]: I0120 17:24:35.457738 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c4caf1aa-77f3-4f79-a1e5-ce8fa944b6cd-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"c4caf1aa-77f3-4f79-a1e5-ce8fa944b6cd\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:24:35 crc kubenswrapper[4558]: I0120 17:24:35.462399 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c4caf1aa-77f3-4f79-a1e5-ce8fa944b6cd-scripts\") pod \"glance-default-external-api-0\" (UID: \"c4caf1aa-77f3-4f79-a1e5-ce8fa944b6cd\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:24:35 crc kubenswrapper[4558]: I0120 17:24:35.471704 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5vdgq\" (UniqueName: \"kubernetes.io/projected/c4caf1aa-77f3-4f79-a1e5-ce8fa944b6cd-kube-api-access-5vdgq\") pod \"glance-default-external-api-0\" (UID: \"c4caf1aa-77f3-4f79-a1e5-ce8fa944b6cd\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:24:35 crc kubenswrapper[4558]: I0120 17:24:35.482442 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage14-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage14-crc\") pod \"glance-default-external-api-0\" (UID: \"c4caf1aa-77f3-4f79-a1e5-ce8fa944b6cd\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:24:35 crc kubenswrapper[4558]: I0120 17:24:35.490825 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:24:35 crc kubenswrapper[4558]: I0120 17:24:35.654558 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:24:35 crc kubenswrapper[4558]: I0120 17:24:35.757099 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") pod \"f2f1744d-79dd-43bc-8bea-93e00672c805\" (UID: \"f2f1744d-79dd-43bc-8bea-93e00672c805\") " Jan 20 17:24:35 crc kubenswrapper[4558]: I0120 17:24:35.764401 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage15-crc" (OuterVolumeSpecName: "glance") pod "f2f1744d-79dd-43bc-8bea-93e00672c805" (UID: "f2f1744d-79dd-43bc-8bea-93e00672c805"). InnerVolumeSpecName "local-storage15-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:24:35 crc kubenswrapper[4558]: I0120 17:24:35.858564 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f2f1744d-79dd-43bc-8bea-93e00672c805-internal-tls-certs\") pod \"f2f1744d-79dd-43bc-8bea-93e00672c805\" (UID: \"f2f1744d-79dd-43bc-8bea-93e00672c805\") " Jan 20 17:24:35 crc kubenswrapper[4558]: I0120 17:24:35.858821 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f2f1744d-79dd-43bc-8bea-93e00672c805-scripts\") pod \"f2f1744d-79dd-43bc-8bea-93e00672c805\" (UID: \"f2f1744d-79dd-43bc-8bea-93e00672c805\") " Jan 20 17:24:35 crc kubenswrapper[4558]: I0120 17:24:35.858876 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f2f1744d-79dd-43bc-8bea-93e00672c805-httpd-run\") pod \"f2f1744d-79dd-43bc-8bea-93e00672c805\" (UID: \"f2f1744d-79dd-43bc-8bea-93e00672c805\") " Jan 20 17:24:35 crc kubenswrapper[4558]: I0120 17:24:35.858975 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f2f1744d-79dd-43bc-8bea-93e00672c805-logs\") pod \"f2f1744d-79dd-43bc-8bea-93e00672c805\" (UID: \"f2f1744d-79dd-43bc-8bea-93e00672c805\") " Jan 20 17:24:35 crc kubenswrapper[4558]: I0120 17:24:35.858995 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wvv8l\" (UniqueName: \"kubernetes.io/projected/f2f1744d-79dd-43bc-8bea-93e00672c805-kube-api-access-wvv8l\") pod \"f2f1744d-79dd-43bc-8bea-93e00672c805\" (UID: \"f2f1744d-79dd-43bc-8bea-93e00672c805\") " Jan 20 17:24:35 crc kubenswrapper[4558]: I0120 17:24:35.859046 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f2f1744d-79dd-43bc-8bea-93e00672c805-config-data\") pod \"f2f1744d-79dd-43bc-8bea-93e00672c805\" (UID: \"f2f1744d-79dd-43bc-8bea-93e00672c805\") " Jan 20 17:24:35 crc kubenswrapper[4558]: I0120 17:24:35.859081 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2f1744d-79dd-43bc-8bea-93e00672c805-combined-ca-bundle\") pod \"f2f1744d-79dd-43bc-8bea-93e00672c805\" (UID: \"f2f1744d-79dd-43bc-8bea-93e00672c805\") " Jan 20 17:24:35 crc kubenswrapper[4558]: I0120 17:24:35.859599 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f2f1744d-79dd-43bc-8bea-93e00672c805-logs" (OuterVolumeSpecName: "logs") pod "f2f1744d-79dd-43bc-8bea-93e00672c805" (UID: "f2f1744d-79dd-43bc-8bea-93e00672c805"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:24:35 crc kubenswrapper[4558]: I0120 17:24:35.859904 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage15-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") on node \"crc\" " Jan 20 17:24:35 crc kubenswrapper[4558]: I0120 17:24:35.859919 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f2f1744d-79dd-43bc-8bea-93e00672c805-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:24:35 crc kubenswrapper[4558]: I0120 17:24:35.860306 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f2f1744d-79dd-43bc-8bea-93e00672c805-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "f2f1744d-79dd-43bc-8bea-93e00672c805" (UID: "f2f1744d-79dd-43bc-8bea-93e00672c805"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:24:35 crc kubenswrapper[4558]: I0120 17:24:35.863820 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f2f1744d-79dd-43bc-8bea-93e00672c805-scripts" (OuterVolumeSpecName: "scripts") pod "f2f1744d-79dd-43bc-8bea-93e00672c805" (UID: "f2f1744d-79dd-43bc-8bea-93e00672c805"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:24:35 crc kubenswrapper[4558]: I0120 17:24:35.864571 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f2f1744d-79dd-43bc-8bea-93e00672c805-kube-api-access-wvv8l" (OuterVolumeSpecName: "kube-api-access-wvv8l") pod "f2f1744d-79dd-43bc-8bea-93e00672c805" (UID: "f2f1744d-79dd-43bc-8bea-93e00672c805"). InnerVolumeSpecName "kube-api-access-wvv8l". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:24:35 crc kubenswrapper[4558]: I0120 17:24:35.885028 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage15-crc" (UniqueName: "kubernetes.io/local-volume/local-storage15-crc") on node "crc" Jan 20 17:24:35 crc kubenswrapper[4558]: I0120 17:24:35.892241 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f2f1744d-79dd-43bc-8bea-93e00672c805-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f2f1744d-79dd-43bc-8bea-93e00672c805" (UID: "f2f1744d-79dd-43bc-8bea-93e00672c805"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:24:35 crc kubenswrapper[4558]: E0120 17:24:35.916421 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f2f1744d-79dd-43bc-8bea-93e00672c805-config-data podName:f2f1744d-79dd-43bc-8bea-93e00672c805 nodeName:}" failed. No retries permitted until 2026-01-20 17:24:36.416393188 +0000 UTC m=+2570.176731155 (durationBeforeRetry 500ms). Error: error cleaning subPath mounts for volume "config-data" (UniqueName: "kubernetes.io/secret/f2f1744d-79dd-43bc-8bea-93e00672c805-config-data") pod "f2f1744d-79dd-43bc-8bea-93e00672c805" (UID: "f2f1744d-79dd-43bc-8bea-93e00672c805") : error deleting /var/lib/kubelet/pods/f2f1744d-79dd-43bc-8bea-93e00672c805/volume-subpaths: remove /var/lib/kubelet/pods/f2f1744d-79dd-43bc-8bea-93e00672c805/volume-subpaths: no such file or directory Jan 20 17:24:35 crc kubenswrapper[4558]: I0120 17:24:35.922233 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f2f1744d-79dd-43bc-8bea-93e00672c805-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "f2f1744d-79dd-43bc-8bea-93e00672c805" (UID: "f2f1744d-79dd-43bc-8bea-93e00672c805"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:24:35 crc kubenswrapper[4558]: I0120 17:24:35.962534 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage15-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:24:35 crc kubenswrapper[4558]: I0120 17:24:35.962582 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f2f1744d-79dd-43bc-8bea-93e00672c805-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:24:35 crc kubenswrapper[4558]: I0120 17:24:35.962598 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f2f1744d-79dd-43bc-8bea-93e00672c805-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:24:35 crc kubenswrapper[4558]: I0120 17:24:35.962610 4558 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f2f1744d-79dd-43bc-8bea-93e00672c805-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 20 17:24:35 crc kubenswrapper[4558]: I0120 17:24:35.962624 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wvv8l\" (UniqueName: \"kubernetes.io/projected/f2f1744d-79dd-43bc-8bea-93e00672c805-kube-api-access-wvv8l\") on node \"crc\" DevicePath \"\"" Jan 20 17:24:35 crc kubenswrapper[4558]: I0120 17:24:35.962635 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2f1744d-79dd-43bc-8bea-93e00672c805-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:24:36 crc kubenswrapper[4558]: I0120 17:24:36.044145 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:24:36 crc kubenswrapper[4558]: W0120 17:24:36.045972 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc4caf1aa_77f3_4f79_a1e5_ce8fa944b6cd.slice/crio-e2c7caf788a4795ff5ba8b7bcf6273b8cb3046464010a756d216c9ba55706fac WatchSource:0}: Error finding container e2c7caf788a4795ff5ba8b7bcf6273b8cb3046464010a756d216c9ba55706fac: Status 404 returned error can't find the container with id e2c7caf788a4795ff5ba8b7bcf6273b8cb3046464010a756d216c9ba55706fac Jan 20 17:24:36 crc kubenswrapper[4558]: I0120 17:24:36.083948 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"c4caf1aa-77f3-4f79-a1e5-ce8fa944b6cd","Type":"ContainerStarted","Data":"e2c7caf788a4795ff5ba8b7bcf6273b8cb3046464010a756d216c9ba55706fac"} Jan 20 17:24:36 crc kubenswrapper[4558]: I0120 17:24:36.086323 4558 generic.go:334] "Generic (PLEG): container finished" podID="f2f1744d-79dd-43bc-8bea-93e00672c805" containerID="929e11abc56a6402a23f271e8df464870a363c2850f358242cc4b22371fc6ec0" exitCode=0 Jan 20 17:24:36 crc kubenswrapper[4558]: I0120 17:24:36.086363 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"f2f1744d-79dd-43bc-8bea-93e00672c805","Type":"ContainerDied","Data":"929e11abc56a6402a23f271e8df464870a363c2850f358242cc4b22371fc6ec0"} Jan 20 17:24:36 crc kubenswrapper[4558]: I0120 17:24:36.086382 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:24:36 crc kubenswrapper[4558]: I0120 17:24:36.086415 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"f2f1744d-79dd-43bc-8bea-93e00672c805","Type":"ContainerDied","Data":"d3e06842b937cb58486b7028892a08f5c2149d2f9eeee036b5f7c7b8461615b9"} Jan 20 17:24:36 crc kubenswrapper[4558]: I0120 17:24:36.086438 4558 scope.go:117] "RemoveContainer" containerID="929e11abc56a6402a23f271e8df464870a363c2850f358242cc4b22371fc6ec0" Jan 20 17:24:36 crc kubenswrapper[4558]: I0120 17:24:36.095485 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"901ea0e0-d046-4871-887c-ea58a1d8fcb0","Type":"ContainerStarted","Data":"4cc6b4a1f4213019563ea4e8c7f4f34a8130a2fd3bb9deeeb2a3e0cb36b24814"} Jan 20 17:24:36 crc kubenswrapper[4558]: I0120 17:24:36.095565 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:24:36 crc kubenswrapper[4558]: I0120 17:24:36.095567 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="901ea0e0-d046-4871-887c-ea58a1d8fcb0" containerName="ceilometer-central-agent" containerID="cri-o://95025487af705f3e41bbff12d0547501461d0af163d7b016973ec88665be1dff" gracePeriod=30 Jan 20 17:24:36 crc kubenswrapper[4558]: I0120 17:24:36.095647 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="901ea0e0-d046-4871-887c-ea58a1d8fcb0" containerName="ceilometer-notification-agent" containerID="cri-o://7947aee6092ae17cad35b9b67df7da0268558ba84fafb3187d4167522b7e4507" gracePeriod=30 Jan 20 17:24:36 crc kubenswrapper[4558]: I0120 17:24:36.095694 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="901ea0e0-d046-4871-887c-ea58a1d8fcb0" containerName="proxy-httpd" containerID="cri-o://4cc6b4a1f4213019563ea4e8c7f4f34a8130a2fd3bb9deeeb2a3e0cb36b24814" gracePeriod=30 Jan 20 17:24:36 crc kubenswrapper[4558]: I0120 17:24:36.095676 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="901ea0e0-d046-4871-887c-ea58a1d8fcb0" containerName="sg-core" containerID="cri-o://500da8ecd44bd082f1fc0b940db89a832e557dcbbd211723fa9bb91ca4e746bc" gracePeriod=30 Jan 20 17:24:36 crc kubenswrapper[4558]: I0120 17:24:36.101397 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-s4fbf" podUID="11c22b1a-a7cd-40d8-9b6d-cd095b913a81" containerName="registry-server" containerID="cri-o://99dba0a4688df1b240e481e351c05168a3e1900680a0e4f241c80a822442a9a2" gracePeriod=2 Jan 20 17:24:36 crc kubenswrapper[4558]: I0120 17:24:36.114254 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ceilometer-0" podStartSLOduration=1.259697261 podStartE2EDuration="5.11423893s" podCreationTimestamp="2026-01-20 17:24:31 +0000 UTC" firstStartedPulling="2026-01-20 17:24:31.906298174 +0000 UTC m=+2565.666636141" lastFinishedPulling="2026-01-20 17:24:35.760839843 +0000 UTC m=+2569.521177810" observedRunningTime="2026-01-20 17:24:36.109297706 +0000 UTC m=+2569.869635673" watchObservedRunningTime="2026-01-20 17:24:36.11423893 +0000 UTC m=+2569.874576898" Jan 20 17:24:36 crc kubenswrapper[4558]: I0120 17:24:36.118644 4558 scope.go:117] "RemoveContainer" containerID="86863d93cda6303d400888f0fe4d36dd71ee9de6e0a32e2a3f3d124d43b93601" Jan 20 17:24:36 crc kubenswrapper[4558]: I0120 17:24:36.140957 4558 scope.go:117] "RemoveContainer" containerID="929e11abc56a6402a23f271e8df464870a363c2850f358242cc4b22371fc6ec0" Jan 20 17:24:36 crc kubenswrapper[4558]: E0120 17:24:36.141274 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"929e11abc56a6402a23f271e8df464870a363c2850f358242cc4b22371fc6ec0\": container with ID starting with 929e11abc56a6402a23f271e8df464870a363c2850f358242cc4b22371fc6ec0 not found: ID does not exist" containerID="929e11abc56a6402a23f271e8df464870a363c2850f358242cc4b22371fc6ec0" Jan 20 17:24:36 crc kubenswrapper[4558]: I0120 17:24:36.141299 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"929e11abc56a6402a23f271e8df464870a363c2850f358242cc4b22371fc6ec0"} err="failed to get container status \"929e11abc56a6402a23f271e8df464870a363c2850f358242cc4b22371fc6ec0\": rpc error: code = NotFound desc = could not find container \"929e11abc56a6402a23f271e8df464870a363c2850f358242cc4b22371fc6ec0\": container with ID starting with 929e11abc56a6402a23f271e8df464870a363c2850f358242cc4b22371fc6ec0 not found: ID does not exist" Jan 20 17:24:36 crc kubenswrapper[4558]: I0120 17:24:36.141319 4558 scope.go:117] "RemoveContainer" containerID="86863d93cda6303d400888f0fe4d36dd71ee9de6e0a32e2a3f3d124d43b93601" Jan 20 17:24:36 crc kubenswrapper[4558]: E0120 17:24:36.141532 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"86863d93cda6303d400888f0fe4d36dd71ee9de6e0a32e2a3f3d124d43b93601\": container with ID starting with 86863d93cda6303d400888f0fe4d36dd71ee9de6e0a32e2a3f3d124d43b93601 not found: ID does not exist" containerID="86863d93cda6303d400888f0fe4d36dd71ee9de6e0a32e2a3f3d124d43b93601" Jan 20 17:24:36 crc kubenswrapper[4558]: I0120 17:24:36.141550 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"86863d93cda6303d400888f0fe4d36dd71ee9de6e0a32e2a3f3d124d43b93601"} err="failed to get container status \"86863d93cda6303d400888f0fe4d36dd71ee9de6e0a32e2a3f3d124d43b93601\": rpc error: code = NotFound desc = could not find container \"86863d93cda6303d400888f0fe4d36dd71ee9de6e0a32e2a3f3d124d43b93601\": container with ID starting with 86863d93cda6303d400888f0fe4d36dd71ee9de6e0a32e2a3f3d124d43b93601 not found: ID does not exist" Jan 20 17:24:36 crc kubenswrapper[4558]: I0120 17:24:36.470409 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f2f1744d-79dd-43bc-8bea-93e00672c805-config-data\") pod \"f2f1744d-79dd-43bc-8bea-93e00672c805\" (UID: \"f2f1744d-79dd-43bc-8bea-93e00672c805\") " Jan 20 17:24:36 crc kubenswrapper[4558]: I0120 17:24:36.476261 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f2f1744d-79dd-43bc-8bea-93e00672c805-config-data" (OuterVolumeSpecName: "config-data") pod "f2f1744d-79dd-43bc-8bea-93e00672c805" (UID: "f2f1744d-79dd-43bc-8bea-93e00672c805"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:24:36 crc kubenswrapper[4558]: I0120 17:24:36.573574 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f2f1744d-79dd-43bc-8bea-93e00672c805-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:24:36 crc kubenswrapper[4558]: I0120 17:24:36.575350 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="05e1a302-040a-46ca-a3c2-e4c8c6390091" path="/var/lib/kubelet/pods/05e1a302-040a-46ca-a3c2-e4c8c6390091/volumes" Jan 20 17:24:36 crc kubenswrapper[4558]: I0120 17:24:36.613983 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-s4fbf" Jan 20 17:24:36 crc kubenswrapper[4558]: I0120 17:24:36.675371 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/11c22b1a-a7cd-40d8-9b6d-cd095b913a81-utilities\") pod \"11c22b1a-a7cd-40d8-9b6d-cd095b913a81\" (UID: \"11c22b1a-a7cd-40d8-9b6d-cd095b913a81\") " Jan 20 17:24:36 crc kubenswrapper[4558]: I0120 17:24:36.675421 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7jx45\" (UniqueName: \"kubernetes.io/projected/11c22b1a-a7cd-40d8-9b6d-cd095b913a81-kube-api-access-7jx45\") pod \"11c22b1a-a7cd-40d8-9b6d-cd095b913a81\" (UID: \"11c22b1a-a7cd-40d8-9b6d-cd095b913a81\") " Jan 20 17:24:36 crc kubenswrapper[4558]: I0120 17:24:36.675537 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/11c22b1a-a7cd-40d8-9b6d-cd095b913a81-catalog-content\") pod \"11c22b1a-a7cd-40d8-9b6d-cd095b913a81\" (UID: \"11c22b1a-a7cd-40d8-9b6d-cd095b913a81\") " Jan 20 17:24:36 crc kubenswrapper[4558]: I0120 17:24:36.679614 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/11c22b1a-a7cd-40d8-9b6d-cd095b913a81-kube-api-access-7jx45" (OuterVolumeSpecName: "kube-api-access-7jx45") pod "11c22b1a-a7cd-40d8-9b6d-cd095b913a81" (UID: "11c22b1a-a7cd-40d8-9b6d-cd095b913a81"). InnerVolumeSpecName "kube-api-access-7jx45". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:24:36 crc kubenswrapper[4558]: I0120 17:24:36.687526 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/11c22b1a-a7cd-40d8-9b6d-cd095b913a81-utilities" (OuterVolumeSpecName: "utilities") pod "11c22b1a-a7cd-40d8-9b6d-cd095b913a81" (UID: "11c22b1a-a7cd-40d8-9b6d-cd095b913a81"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:24:36 crc kubenswrapper[4558]: I0120 17:24:36.735290 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/11c22b1a-a7cd-40d8-9b6d-cd095b913a81-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "11c22b1a-a7cd-40d8-9b6d-cd095b913a81" (UID: "11c22b1a-a7cd-40d8-9b6d-cd095b913a81"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:24:36 crc kubenswrapper[4558]: I0120 17:24:36.744504 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:24:36 crc kubenswrapper[4558]: I0120 17:24:36.757613 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:24:36 crc kubenswrapper[4558]: I0120 17:24:36.781055 4558 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/11c22b1a-a7cd-40d8-9b6d-cd095b913a81-utilities\") on node \"crc\" DevicePath \"\"" Jan 20 17:24:36 crc kubenswrapper[4558]: I0120 17:24:36.781126 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7jx45\" (UniqueName: \"kubernetes.io/projected/11c22b1a-a7cd-40d8-9b6d-cd095b913a81-kube-api-access-7jx45\") on node \"crc\" DevicePath \"\"" Jan 20 17:24:36 crc kubenswrapper[4558]: I0120 17:24:36.781139 4558 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/11c22b1a-a7cd-40d8-9b6d-cd095b913a81-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 20 17:24:36 crc kubenswrapper[4558]: I0120 17:24:36.801107 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:24:36 crc kubenswrapper[4558]: I0120 17:24:36.806262 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:24:36 crc kubenswrapper[4558]: E0120 17:24:36.806651 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="11c22b1a-a7cd-40d8-9b6d-cd095b913a81" containerName="extract-utilities" Jan 20 17:24:36 crc kubenswrapper[4558]: I0120 17:24:36.806673 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="11c22b1a-a7cd-40d8-9b6d-cd095b913a81" containerName="extract-utilities" Jan 20 17:24:36 crc kubenswrapper[4558]: E0120 17:24:36.806686 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="11c22b1a-a7cd-40d8-9b6d-cd095b913a81" containerName="registry-server" Jan 20 17:24:36 crc kubenswrapper[4558]: I0120 17:24:36.806694 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="11c22b1a-a7cd-40d8-9b6d-cd095b913a81" containerName="registry-server" Jan 20 17:24:36 crc kubenswrapper[4558]: E0120 17:24:36.806714 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="901ea0e0-d046-4871-887c-ea58a1d8fcb0" containerName="ceilometer-central-agent" Jan 20 17:24:36 crc kubenswrapper[4558]: I0120 17:24:36.806722 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="901ea0e0-d046-4871-887c-ea58a1d8fcb0" containerName="ceilometer-central-agent" Jan 20 17:24:36 crc kubenswrapper[4558]: E0120 17:24:36.806733 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="11c22b1a-a7cd-40d8-9b6d-cd095b913a81" containerName="extract-content" Jan 20 17:24:36 crc kubenswrapper[4558]: I0120 17:24:36.806740 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="11c22b1a-a7cd-40d8-9b6d-cd095b913a81" containerName="extract-content" Jan 20 17:24:36 crc kubenswrapper[4558]: E0120 17:24:36.806752 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="901ea0e0-d046-4871-887c-ea58a1d8fcb0" containerName="sg-core" Jan 20 17:24:36 crc kubenswrapper[4558]: I0120 17:24:36.806759 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="901ea0e0-d046-4871-887c-ea58a1d8fcb0" containerName="sg-core" Jan 20 17:24:36 crc kubenswrapper[4558]: E0120 17:24:36.806791 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f2f1744d-79dd-43bc-8bea-93e00672c805" containerName="glance-log" Jan 20 17:24:36 crc kubenswrapper[4558]: I0120 17:24:36.806797 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="f2f1744d-79dd-43bc-8bea-93e00672c805" containerName="glance-log" Jan 20 17:24:36 crc kubenswrapper[4558]: E0120 17:24:36.806817 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="901ea0e0-d046-4871-887c-ea58a1d8fcb0" containerName="proxy-httpd" Jan 20 17:24:36 crc kubenswrapper[4558]: I0120 17:24:36.806823 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="901ea0e0-d046-4871-887c-ea58a1d8fcb0" containerName="proxy-httpd" Jan 20 17:24:36 crc kubenswrapper[4558]: E0120 17:24:36.806830 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f2f1744d-79dd-43bc-8bea-93e00672c805" containerName="glance-httpd" Jan 20 17:24:36 crc kubenswrapper[4558]: I0120 17:24:36.806836 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="f2f1744d-79dd-43bc-8bea-93e00672c805" containerName="glance-httpd" Jan 20 17:24:36 crc kubenswrapper[4558]: E0120 17:24:36.806847 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="901ea0e0-d046-4871-887c-ea58a1d8fcb0" containerName="ceilometer-notification-agent" Jan 20 17:24:36 crc kubenswrapper[4558]: I0120 17:24:36.806854 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="901ea0e0-d046-4871-887c-ea58a1d8fcb0" containerName="ceilometer-notification-agent" Jan 20 17:24:36 crc kubenswrapper[4558]: I0120 17:24:36.807090 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="901ea0e0-d046-4871-887c-ea58a1d8fcb0" containerName="ceilometer-central-agent" Jan 20 17:24:36 crc kubenswrapper[4558]: I0120 17:24:36.807113 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="11c22b1a-a7cd-40d8-9b6d-cd095b913a81" containerName="registry-server" Jan 20 17:24:36 crc kubenswrapper[4558]: I0120 17:24:36.807122 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="901ea0e0-d046-4871-887c-ea58a1d8fcb0" containerName="proxy-httpd" Jan 20 17:24:36 crc kubenswrapper[4558]: I0120 17:24:36.807131 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="f2f1744d-79dd-43bc-8bea-93e00672c805" containerName="glance-log" Jan 20 17:24:36 crc kubenswrapper[4558]: I0120 17:24:36.807144 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="901ea0e0-d046-4871-887c-ea58a1d8fcb0" containerName="sg-core" Jan 20 17:24:36 crc kubenswrapper[4558]: I0120 17:24:36.807154 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="f2f1744d-79dd-43bc-8bea-93e00672c805" containerName="glance-httpd" Jan 20 17:24:36 crc kubenswrapper[4558]: I0120 17:24:36.807233 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="901ea0e0-d046-4871-887c-ea58a1d8fcb0" containerName="ceilometer-notification-agent" Jan 20 17:24:36 crc kubenswrapper[4558]: I0120 17:24:36.808623 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:24:36 crc kubenswrapper[4558]: I0120 17:24:36.810466 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-glance-default-internal-svc" Jan 20 17:24:36 crc kubenswrapper[4558]: I0120 17:24:36.810693 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-default-internal-config-data" Jan 20 17:24:36 crc kubenswrapper[4558]: I0120 17:24:36.825220 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:24:36 crc kubenswrapper[4558]: I0120 17:24:36.984025 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/901ea0e0-d046-4871-887c-ea58a1d8fcb0-log-httpd\") pod \"901ea0e0-d046-4871-887c-ea58a1d8fcb0\" (UID: \"901ea0e0-d046-4871-887c-ea58a1d8fcb0\") " Jan 20 17:24:36 crc kubenswrapper[4558]: I0120 17:24:36.984187 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/901ea0e0-d046-4871-887c-ea58a1d8fcb0-combined-ca-bundle\") pod \"901ea0e0-d046-4871-887c-ea58a1d8fcb0\" (UID: \"901ea0e0-d046-4871-887c-ea58a1d8fcb0\") " Jan 20 17:24:36 crc kubenswrapper[4558]: I0120 17:24:36.984219 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f5fmc\" (UniqueName: \"kubernetes.io/projected/901ea0e0-d046-4871-887c-ea58a1d8fcb0-kube-api-access-f5fmc\") pod \"901ea0e0-d046-4871-887c-ea58a1d8fcb0\" (UID: \"901ea0e0-d046-4871-887c-ea58a1d8fcb0\") " Jan 20 17:24:36 crc kubenswrapper[4558]: I0120 17:24:36.984735 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/901ea0e0-d046-4871-887c-ea58a1d8fcb0-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "901ea0e0-d046-4871-887c-ea58a1d8fcb0" (UID: "901ea0e0-d046-4871-887c-ea58a1d8fcb0"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:24:36 crc kubenswrapper[4558]: I0120 17:24:36.984939 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/901ea0e0-d046-4871-887c-ea58a1d8fcb0-sg-core-conf-yaml\") pod \"901ea0e0-d046-4871-887c-ea58a1d8fcb0\" (UID: \"901ea0e0-d046-4871-887c-ea58a1d8fcb0\") " Jan 20 17:24:36 crc kubenswrapper[4558]: I0120 17:24:36.984972 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/901ea0e0-d046-4871-887c-ea58a1d8fcb0-scripts\") pod \"901ea0e0-d046-4871-887c-ea58a1d8fcb0\" (UID: \"901ea0e0-d046-4871-887c-ea58a1d8fcb0\") " Jan 20 17:24:36 crc kubenswrapper[4558]: I0120 17:24:36.985154 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/901ea0e0-d046-4871-887c-ea58a1d8fcb0-run-httpd\") pod \"901ea0e0-d046-4871-887c-ea58a1d8fcb0\" (UID: \"901ea0e0-d046-4871-887c-ea58a1d8fcb0\") " Jan 20 17:24:36 crc kubenswrapper[4558]: I0120 17:24:36.985222 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/901ea0e0-d046-4871-887c-ea58a1d8fcb0-config-data\") pod \"901ea0e0-d046-4871-887c-ea58a1d8fcb0\" (UID: \"901ea0e0-d046-4871-887c-ea58a1d8fcb0\") " Jan 20 17:24:36 crc kubenswrapper[4558]: I0120 17:24:36.985705 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/901ea0e0-d046-4871-887c-ea58a1d8fcb0-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "901ea0e0-d046-4871-887c-ea58a1d8fcb0" (UID: "901ea0e0-d046-4871-887c-ea58a1d8fcb0"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:24:36 crc kubenswrapper[4558]: I0120 17:24:36.985742 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/269584c1-a4ed-43e5-a1cb-1e5e6952df11-config-data\") pod \"glance-default-internal-api-0\" (UID: \"269584c1-a4ed-43e5-a1cb-1e5e6952df11\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:24:36 crc kubenswrapper[4558]: I0120 17:24:36.985783 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/269584c1-a4ed-43e5-a1cb-1e5e6952df11-logs\") pod \"glance-default-internal-api-0\" (UID: \"269584c1-a4ed-43e5-a1cb-1e5e6952df11\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:24:36 crc kubenswrapper[4558]: I0120 17:24:36.985920 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/269584c1-a4ed-43e5-a1cb-1e5e6952df11-scripts\") pod \"glance-default-internal-api-0\" (UID: \"269584c1-a4ed-43e5-a1cb-1e5e6952df11\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:24:36 crc kubenswrapper[4558]: I0120 17:24:36.985955 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage15-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") pod \"glance-default-internal-api-0\" (UID: \"269584c1-a4ed-43e5-a1cb-1e5e6952df11\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:24:36 crc kubenswrapper[4558]: I0120 17:24:36.986002 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/269584c1-a4ed-43e5-a1cb-1e5e6952df11-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"269584c1-a4ed-43e5-a1cb-1e5e6952df11\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:24:36 crc kubenswrapper[4558]: I0120 17:24:36.986104 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/269584c1-a4ed-43e5-a1cb-1e5e6952df11-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"269584c1-a4ed-43e5-a1cb-1e5e6952df11\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:24:36 crc kubenswrapper[4558]: I0120 17:24:36.986191 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t9zpz\" (UniqueName: \"kubernetes.io/projected/269584c1-a4ed-43e5-a1cb-1e5e6952df11-kube-api-access-t9zpz\") pod \"glance-default-internal-api-0\" (UID: \"269584c1-a4ed-43e5-a1cb-1e5e6952df11\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:24:36 crc kubenswrapper[4558]: I0120 17:24:36.987434 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/269584c1-a4ed-43e5-a1cb-1e5e6952df11-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"269584c1-a4ed-43e5-a1cb-1e5e6952df11\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:24:36 crc kubenswrapper[4558]: I0120 17:24:36.987541 4558 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/901ea0e0-d046-4871-887c-ea58a1d8fcb0-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:24:36 crc kubenswrapper[4558]: I0120 17:24:36.987554 4558 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/901ea0e0-d046-4871-887c-ea58a1d8fcb0-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:24:36 crc kubenswrapper[4558]: I0120 17:24:36.988480 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/901ea0e0-d046-4871-887c-ea58a1d8fcb0-scripts" (OuterVolumeSpecName: "scripts") pod "901ea0e0-d046-4871-887c-ea58a1d8fcb0" (UID: "901ea0e0-d046-4871-887c-ea58a1d8fcb0"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:24:36 crc kubenswrapper[4558]: I0120 17:24:36.989707 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/901ea0e0-d046-4871-887c-ea58a1d8fcb0-kube-api-access-f5fmc" (OuterVolumeSpecName: "kube-api-access-f5fmc") pod "901ea0e0-d046-4871-887c-ea58a1d8fcb0" (UID: "901ea0e0-d046-4871-887c-ea58a1d8fcb0"). InnerVolumeSpecName "kube-api-access-f5fmc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:24:37 crc kubenswrapper[4558]: I0120 17:24:37.012499 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/901ea0e0-d046-4871-887c-ea58a1d8fcb0-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "901ea0e0-d046-4871-887c-ea58a1d8fcb0" (UID: "901ea0e0-d046-4871-887c-ea58a1d8fcb0"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:24:37 crc kubenswrapper[4558]: I0120 17:24:37.067964 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/901ea0e0-d046-4871-887c-ea58a1d8fcb0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "901ea0e0-d046-4871-887c-ea58a1d8fcb0" (UID: "901ea0e0-d046-4871-887c-ea58a1d8fcb0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:24:37 crc kubenswrapper[4558]: I0120 17:24:37.090279 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/269584c1-a4ed-43e5-a1cb-1e5e6952df11-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"269584c1-a4ed-43e5-a1cb-1e5e6952df11\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:24:37 crc kubenswrapper[4558]: I0120 17:24:37.090406 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t9zpz\" (UniqueName: \"kubernetes.io/projected/269584c1-a4ed-43e5-a1cb-1e5e6952df11-kube-api-access-t9zpz\") pod \"glance-default-internal-api-0\" (UID: \"269584c1-a4ed-43e5-a1cb-1e5e6952df11\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:24:37 crc kubenswrapper[4558]: I0120 17:24:37.090479 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/269584c1-a4ed-43e5-a1cb-1e5e6952df11-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"269584c1-a4ed-43e5-a1cb-1e5e6952df11\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:24:37 crc kubenswrapper[4558]: I0120 17:24:37.090587 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/269584c1-a4ed-43e5-a1cb-1e5e6952df11-config-data\") pod \"glance-default-internal-api-0\" (UID: \"269584c1-a4ed-43e5-a1cb-1e5e6952df11\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:24:37 crc kubenswrapper[4558]: I0120 17:24:37.090634 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/269584c1-a4ed-43e5-a1cb-1e5e6952df11-logs\") pod \"glance-default-internal-api-0\" (UID: \"269584c1-a4ed-43e5-a1cb-1e5e6952df11\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:24:37 crc kubenswrapper[4558]: I0120 17:24:37.090832 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/269584c1-a4ed-43e5-a1cb-1e5e6952df11-scripts\") pod \"glance-default-internal-api-0\" (UID: \"269584c1-a4ed-43e5-a1cb-1e5e6952df11\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:24:37 crc kubenswrapper[4558]: I0120 17:24:37.090855 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/269584c1-a4ed-43e5-a1cb-1e5e6952df11-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"269584c1-a4ed-43e5-a1cb-1e5e6952df11\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:24:37 crc kubenswrapper[4558]: I0120 17:24:37.090876 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage15-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") pod \"glance-default-internal-api-0\" (UID: \"269584c1-a4ed-43e5-a1cb-1e5e6952df11\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:24:37 crc kubenswrapper[4558]: I0120 17:24:37.090970 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/269584c1-a4ed-43e5-a1cb-1e5e6952df11-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"269584c1-a4ed-43e5-a1cb-1e5e6952df11\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:24:37 crc kubenswrapper[4558]: I0120 17:24:37.091103 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/901ea0e0-d046-4871-887c-ea58a1d8fcb0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:24:37 crc kubenswrapper[4558]: I0120 17:24:37.091119 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f5fmc\" (UniqueName: \"kubernetes.io/projected/901ea0e0-d046-4871-887c-ea58a1d8fcb0-kube-api-access-f5fmc\") on node \"crc\" DevicePath \"\"" Jan 20 17:24:37 crc kubenswrapper[4558]: I0120 17:24:37.091135 4558 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/901ea0e0-d046-4871-887c-ea58a1d8fcb0-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 20 17:24:37 crc kubenswrapper[4558]: I0120 17:24:37.091153 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/901ea0e0-d046-4871-887c-ea58a1d8fcb0-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:24:37 crc kubenswrapper[4558]: I0120 17:24:37.091689 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/269584c1-a4ed-43e5-a1cb-1e5e6952df11-logs\") pod \"glance-default-internal-api-0\" (UID: \"269584c1-a4ed-43e5-a1cb-1e5e6952df11\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:24:37 crc kubenswrapper[4558]: I0120 17:24:37.092017 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage15-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") pod \"glance-default-internal-api-0\" (UID: \"269584c1-a4ed-43e5-a1cb-1e5e6952df11\") device mount path \"/mnt/openstack/pv15\"" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:24:37 crc kubenswrapper[4558]: I0120 17:24:37.094343 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/901ea0e0-d046-4871-887c-ea58a1d8fcb0-config-data" (OuterVolumeSpecName: "config-data") pod "901ea0e0-d046-4871-887c-ea58a1d8fcb0" (UID: "901ea0e0-d046-4871-887c-ea58a1d8fcb0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:24:37 crc kubenswrapper[4558]: I0120 17:24:37.098229 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/269584c1-a4ed-43e5-a1cb-1e5e6952df11-scripts\") pod \"glance-default-internal-api-0\" (UID: \"269584c1-a4ed-43e5-a1cb-1e5e6952df11\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:24:37 crc kubenswrapper[4558]: I0120 17:24:37.104205 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/269584c1-a4ed-43e5-a1cb-1e5e6952df11-config-data\") pod \"glance-default-internal-api-0\" (UID: \"269584c1-a4ed-43e5-a1cb-1e5e6952df11\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:24:37 crc kubenswrapper[4558]: I0120 17:24:37.104698 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/269584c1-a4ed-43e5-a1cb-1e5e6952df11-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"269584c1-a4ed-43e5-a1cb-1e5e6952df11\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:24:37 crc kubenswrapper[4558]: I0120 17:24:37.105530 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/269584c1-a4ed-43e5-a1cb-1e5e6952df11-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"269584c1-a4ed-43e5-a1cb-1e5e6952df11\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:24:37 crc kubenswrapper[4558]: I0120 17:24:37.108761 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t9zpz\" (UniqueName: \"kubernetes.io/projected/269584c1-a4ed-43e5-a1cb-1e5e6952df11-kube-api-access-t9zpz\") pod \"glance-default-internal-api-0\" (UID: \"269584c1-a4ed-43e5-a1cb-1e5e6952df11\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:24:37 crc kubenswrapper[4558]: I0120 17:24:37.121159 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"c4caf1aa-77f3-4f79-a1e5-ce8fa944b6cd","Type":"ContainerStarted","Data":"22a976775c577de36fb22efa39255300a73eb567891d714dec5fe2063f3429ce"} Jan 20 17:24:37 crc kubenswrapper[4558]: I0120 17:24:37.128854 4558 generic.go:334] "Generic (PLEG): container finished" podID="11c22b1a-a7cd-40d8-9b6d-cd095b913a81" containerID="99dba0a4688df1b240e481e351c05168a3e1900680a0e4f241c80a822442a9a2" exitCode=0 Jan 20 17:24:37 crc kubenswrapper[4558]: I0120 17:24:37.128943 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-s4fbf" Jan 20 17:24:37 crc kubenswrapper[4558]: I0120 17:24:37.128938 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-s4fbf" event={"ID":"11c22b1a-a7cd-40d8-9b6d-cd095b913a81","Type":"ContainerDied","Data":"99dba0a4688df1b240e481e351c05168a3e1900680a0e4f241c80a822442a9a2"} Jan 20 17:24:37 crc kubenswrapper[4558]: I0120 17:24:37.129077 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-s4fbf" event={"ID":"11c22b1a-a7cd-40d8-9b6d-cd095b913a81","Type":"ContainerDied","Data":"7713f54b7023b837a5753498dca128d5953ae3496c5c5aa20aacb6f3fea4985c"} Jan 20 17:24:37 crc kubenswrapper[4558]: I0120 17:24:37.129119 4558 scope.go:117] "RemoveContainer" containerID="99dba0a4688df1b240e481e351c05168a3e1900680a0e4f241c80a822442a9a2" Jan 20 17:24:37 crc kubenswrapper[4558]: I0120 17:24:37.132296 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage15-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") pod \"glance-default-internal-api-0\" (UID: \"269584c1-a4ed-43e5-a1cb-1e5e6952df11\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:24:37 crc kubenswrapper[4558]: I0120 17:24:37.137482 4558 generic.go:334] "Generic (PLEG): container finished" podID="901ea0e0-d046-4871-887c-ea58a1d8fcb0" containerID="4cc6b4a1f4213019563ea4e8c7f4f34a8130a2fd3bb9deeeb2a3e0cb36b24814" exitCode=0 Jan 20 17:24:37 crc kubenswrapper[4558]: I0120 17:24:37.137513 4558 generic.go:334] "Generic (PLEG): container finished" podID="901ea0e0-d046-4871-887c-ea58a1d8fcb0" containerID="500da8ecd44bd082f1fc0b940db89a832e557dcbbd211723fa9bb91ca4e746bc" exitCode=2 Jan 20 17:24:37 crc kubenswrapper[4558]: I0120 17:24:37.137673 4558 generic.go:334] "Generic (PLEG): container finished" podID="901ea0e0-d046-4871-887c-ea58a1d8fcb0" containerID="7947aee6092ae17cad35b9b67df7da0268558ba84fafb3187d4167522b7e4507" exitCode=0 Jan 20 17:24:37 crc kubenswrapper[4558]: I0120 17:24:37.137685 4558 generic.go:334] "Generic (PLEG): container finished" podID="901ea0e0-d046-4871-887c-ea58a1d8fcb0" containerID="95025487af705f3e41bbff12d0547501461d0af163d7b016973ec88665be1dff" exitCode=0 Jan 20 17:24:37 crc kubenswrapper[4558]: I0120 17:24:37.137710 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"901ea0e0-d046-4871-887c-ea58a1d8fcb0","Type":"ContainerDied","Data":"4cc6b4a1f4213019563ea4e8c7f4f34a8130a2fd3bb9deeeb2a3e0cb36b24814"} Jan 20 17:24:37 crc kubenswrapper[4558]: I0120 17:24:37.137737 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"901ea0e0-d046-4871-887c-ea58a1d8fcb0","Type":"ContainerDied","Data":"500da8ecd44bd082f1fc0b940db89a832e557dcbbd211723fa9bb91ca4e746bc"} Jan 20 17:24:37 crc kubenswrapper[4558]: I0120 17:24:37.137749 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"901ea0e0-d046-4871-887c-ea58a1d8fcb0","Type":"ContainerDied","Data":"7947aee6092ae17cad35b9b67df7da0268558ba84fafb3187d4167522b7e4507"} Jan 20 17:24:37 crc kubenswrapper[4558]: I0120 17:24:37.137761 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"901ea0e0-d046-4871-887c-ea58a1d8fcb0","Type":"ContainerDied","Data":"95025487af705f3e41bbff12d0547501461d0af163d7b016973ec88665be1dff"} Jan 20 17:24:37 crc kubenswrapper[4558]: I0120 17:24:37.137826 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"901ea0e0-d046-4871-887c-ea58a1d8fcb0","Type":"ContainerDied","Data":"9b495b6fd651da68ce9a30d63f307c50fe0a64582bbe3e8cf1ee36888205b947"} Jan 20 17:24:37 crc kubenswrapper[4558]: I0120 17:24:37.137894 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:24:37 crc kubenswrapper[4558]: I0120 17:24:37.151612 4558 scope.go:117] "RemoveContainer" containerID="cfacd8d412c42b01a9472434d49e531879f6bbd0b69041950da03b4acbed033a" Jan 20 17:24:37 crc kubenswrapper[4558]: I0120 17:24:37.178250 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-s4fbf"] Jan 20 17:24:37 crc kubenswrapper[4558]: I0120 17:24:37.202695 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-s4fbf"] Jan 20 17:24:37 crc kubenswrapper[4558]: I0120 17:24:37.205309 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/901ea0e0-d046-4871-887c-ea58a1d8fcb0-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:24:37 crc kubenswrapper[4558]: I0120 17:24:37.216592 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:24:37 crc kubenswrapper[4558]: I0120 17:24:37.222394 4558 scope.go:117] "RemoveContainer" containerID="e21f77f3f020a87a98aff7a31130f629dc8bbc433cb6252ad83ebf1169f0494c" Jan 20 17:24:37 crc kubenswrapper[4558]: I0120 17:24:37.228289 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:24:37 crc kubenswrapper[4558]: I0120 17:24:37.235210 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:24:37 crc kubenswrapper[4558]: I0120 17:24:37.237681 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:24:37 crc kubenswrapper[4558]: I0120 17:24:37.239559 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-config-data" Jan 20 17:24:37 crc kubenswrapper[4558]: I0120 17:24:37.239864 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-scripts" Jan 20 17:24:37 crc kubenswrapper[4558]: I0120 17:24:37.241698 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:24:37 crc kubenswrapper[4558]: I0120 17:24:37.308371 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f98f4f9b-3a31-4719-ba9a-f474fcd9672d-config-data\") pod \"ceilometer-0\" (UID: \"f98f4f9b-3a31-4719-ba9a-f474fcd9672d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:24:37 crc kubenswrapper[4558]: I0120 17:24:37.308474 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f98f4f9b-3a31-4719-ba9a-f474fcd9672d-log-httpd\") pod \"ceilometer-0\" (UID: \"f98f4f9b-3a31-4719-ba9a-f474fcd9672d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:24:37 crc kubenswrapper[4558]: I0120 17:24:37.308517 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6wcnz\" (UniqueName: \"kubernetes.io/projected/f98f4f9b-3a31-4719-ba9a-f474fcd9672d-kube-api-access-6wcnz\") pod \"ceilometer-0\" (UID: \"f98f4f9b-3a31-4719-ba9a-f474fcd9672d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:24:37 crc kubenswrapper[4558]: I0120 17:24:37.308563 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f98f4f9b-3a31-4719-ba9a-f474fcd9672d-scripts\") pod \"ceilometer-0\" (UID: \"f98f4f9b-3a31-4719-ba9a-f474fcd9672d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:24:37 crc kubenswrapper[4558]: I0120 17:24:37.308590 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f98f4f9b-3a31-4719-ba9a-f474fcd9672d-run-httpd\") pod \"ceilometer-0\" (UID: \"f98f4f9b-3a31-4719-ba9a-f474fcd9672d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:24:37 crc kubenswrapper[4558]: I0120 17:24:37.308638 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f98f4f9b-3a31-4719-ba9a-f474fcd9672d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"f98f4f9b-3a31-4719-ba9a-f474fcd9672d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:24:37 crc kubenswrapper[4558]: I0120 17:24:37.308670 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f98f4f9b-3a31-4719-ba9a-f474fcd9672d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"f98f4f9b-3a31-4719-ba9a-f474fcd9672d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:24:37 crc kubenswrapper[4558]: I0120 17:24:37.315497 4558 scope.go:117] "RemoveContainer" containerID="99dba0a4688df1b240e481e351c05168a3e1900680a0e4f241c80a822442a9a2" Jan 20 17:24:37 crc kubenswrapper[4558]: E0120 17:24:37.315988 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"99dba0a4688df1b240e481e351c05168a3e1900680a0e4f241c80a822442a9a2\": container with ID starting with 99dba0a4688df1b240e481e351c05168a3e1900680a0e4f241c80a822442a9a2 not found: ID does not exist" containerID="99dba0a4688df1b240e481e351c05168a3e1900680a0e4f241c80a822442a9a2" Jan 20 17:24:37 crc kubenswrapper[4558]: I0120 17:24:37.316022 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"99dba0a4688df1b240e481e351c05168a3e1900680a0e4f241c80a822442a9a2"} err="failed to get container status \"99dba0a4688df1b240e481e351c05168a3e1900680a0e4f241c80a822442a9a2\": rpc error: code = NotFound desc = could not find container \"99dba0a4688df1b240e481e351c05168a3e1900680a0e4f241c80a822442a9a2\": container with ID starting with 99dba0a4688df1b240e481e351c05168a3e1900680a0e4f241c80a822442a9a2 not found: ID does not exist" Jan 20 17:24:37 crc kubenswrapper[4558]: I0120 17:24:37.316046 4558 scope.go:117] "RemoveContainer" containerID="cfacd8d412c42b01a9472434d49e531879f6bbd0b69041950da03b4acbed033a" Jan 20 17:24:37 crc kubenswrapper[4558]: E0120 17:24:37.316340 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cfacd8d412c42b01a9472434d49e531879f6bbd0b69041950da03b4acbed033a\": container with ID starting with cfacd8d412c42b01a9472434d49e531879f6bbd0b69041950da03b4acbed033a not found: ID does not exist" containerID="cfacd8d412c42b01a9472434d49e531879f6bbd0b69041950da03b4acbed033a" Jan 20 17:24:37 crc kubenswrapper[4558]: I0120 17:24:37.316362 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cfacd8d412c42b01a9472434d49e531879f6bbd0b69041950da03b4acbed033a"} err="failed to get container status \"cfacd8d412c42b01a9472434d49e531879f6bbd0b69041950da03b4acbed033a\": rpc error: code = NotFound desc = could not find container \"cfacd8d412c42b01a9472434d49e531879f6bbd0b69041950da03b4acbed033a\": container with ID starting with cfacd8d412c42b01a9472434d49e531879f6bbd0b69041950da03b4acbed033a not found: ID does not exist" Jan 20 17:24:37 crc kubenswrapper[4558]: I0120 17:24:37.316376 4558 scope.go:117] "RemoveContainer" containerID="e21f77f3f020a87a98aff7a31130f629dc8bbc433cb6252ad83ebf1169f0494c" Jan 20 17:24:37 crc kubenswrapper[4558]: E0120 17:24:37.316577 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e21f77f3f020a87a98aff7a31130f629dc8bbc433cb6252ad83ebf1169f0494c\": container with ID starting with e21f77f3f020a87a98aff7a31130f629dc8bbc433cb6252ad83ebf1169f0494c not found: ID does not exist" containerID="e21f77f3f020a87a98aff7a31130f629dc8bbc433cb6252ad83ebf1169f0494c" Jan 20 17:24:37 crc kubenswrapper[4558]: I0120 17:24:37.316605 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e21f77f3f020a87a98aff7a31130f629dc8bbc433cb6252ad83ebf1169f0494c"} err="failed to get container status \"e21f77f3f020a87a98aff7a31130f629dc8bbc433cb6252ad83ebf1169f0494c\": rpc error: code = NotFound desc = could not find container \"e21f77f3f020a87a98aff7a31130f629dc8bbc433cb6252ad83ebf1169f0494c\": container with ID starting with e21f77f3f020a87a98aff7a31130f629dc8bbc433cb6252ad83ebf1169f0494c not found: ID does not exist" Jan 20 17:24:37 crc kubenswrapper[4558]: I0120 17:24:37.316620 4558 scope.go:117] "RemoveContainer" containerID="4cc6b4a1f4213019563ea4e8c7f4f34a8130a2fd3bb9deeeb2a3e0cb36b24814" Jan 20 17:24:37 crc kubenswrapper[4558]: I0120 17:24:37.360565 4558 scope.go:117] "RemoveContainer" containerID="500da8ecd44bd082f1fc0b940db89a832e557dcbbd211723fa9bb91ca4e746bc" Jan 20 17:24:37 crc kubenswrapper[4558]: I0120 17:24:37.388275 4558 scope.go:117] "RemoveContainer" containerID="7947aee6092ae17cad35b9b67df7da0268558ba84fafb3187d4167522b7e4507" Jan 20 17:24:37 crc kubenswrapper[4558]: I0120 17:24:37.406250 4558 scope.go:117] "RemoveContainer" containerID="95025487af705f3e41bbff12d0547501461d0af163d7b016973ec88665be1dff" Jan 20 17:24:37 crc kubenswrapper[4558]: I0120 17:24:37.411151 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f98f4f9b-3a31-4719-ba9a-f474fcd9672d-scripts\") pod \"ceilometer-0\" (UID: \"f98f4f9b-3a31-4719-ba9a-f474fcd9672d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:24:37 crc kubenswrapper[4558]: I0120 17:24:37.411245 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f98f4f9b-3a31-4719-ba9a-f474fcd9672d-run-httpd\") pod \"ceilometer-0\" (UID: \"f98f4f9b-3a31-4719-ba9a-f474fcd9672d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:24:37 crc kubenswrapper[4558]: I0120 17:24:37.411339 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f98f4f9b-3a31-4719-ba9a-f474fcd9672d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"f98f4f9b-3a31-4719-ba9a-f474fcd9672d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:24:37 crc kubenswrapper[4558]: I0120 17:24:37.411640 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f98f4f9b-3a31-4719-ba9a-f474fcd9672d-run-httpd\") pod \"ceilometer-0\" (UID: \"f98f4f9b-3a31-4719-ba9a-f474fcd9672d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:24:37 crc kubenswrapper[4558]: I0120 17:24:37.411762 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f98f4f9b-3a31-4719-ba9a-f474fcd9672d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"f98f4f9b-3a31-4719-ba9a-f474fcd9672d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:24:37 crc kubenswrapper[4558]: I0120 17:24:37.411888 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f98f4f9b-3a31-4719-ba9a-f474fcd9672d-config-data\") pod \"ceilometer-0\" (UID: \"f98f4f9b-3a31-4719-ba9a-f474fcd9672d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:24:37 crc kubenswrapper[4558]: I0120 17:24:37.412057 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f98f4f9b-3a31-4719-ba9a-f474fcd9672d-log-httpd\") pod \"ceilometer-0\" (UID: \"f98f4f9b-3a31-4719-ba9a-f474fcd9672d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:24:37 crc kubenswrapper[4558]: I0120 17:24:37.412113 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6wcnz\" (UniqueName: \"kubernetes.io/projected/f98f4f9b-3a31-4719-ba9a-f474fcd9672d-kube-api-access-6wcnz\") pod \"ceilometer-0\" (UID: \"f98f4f9b-3a31-4719-ba9a-f474fcd9672d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:24:37 crc kubenswrapper[4558]: I0120 17:24:37.413137 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f98f4f9b-3a31-4719-ba9a-f474fcd9672d-log-httpd\") pod \"ceilometer-0\" (UID: \"f98f4f9b-3a31-4719-ba9a-f474fcd9672d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:24:37 crc kubenswrapper[4558]: I0120 17:24:37.416970 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f98f4f9b-3a31-4719-ba9a-f474fcd9672d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"f98f4f9b-3a31-4719-ba9a-f474fcd9672d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:24:37 crc kubenswrapper[4558]: I0120 17:24:37.417009 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f98f4f9b-3a31-4719-ba9a-f474fcd9672d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"f98f4f9b-3a31-4719-ba9a-f474fcd9672d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:24:37 crc kubenswrapper[4558]: I0120 17:24:37.419995 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f98f4f9b-3a31-4719-ba9a-f474fcd9672d-scripts\") pod \"ceilometer-0\" (UID: \"f98f4f9b-3a31-4719-ba9a-f474fcd9672d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:24:37 crc kubenswrapper[4558]: I0120 17:24:37.421059 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f98f4f9b-3a31-4719-ba9a-f474fcd9672d-config-data\") pod \"ceilometer-0\" (UID: \"f98f4f9b-3a31-4719-ba9a-f474fcd9672d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:24:37 crc kubenswrapper[4558]: I0120 17:24:37.431001 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:24:37 crc kubenswrapper[4558]: I0120 17:24:37.438834 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6wcnz\" (UniqueName: \"kubernetes.io/projected/f98f4f9b-3a31-4719-ba9a-f474fcd9672d-kube-api-access-6wcnz\") pod \"ceilometer-0\" (UID: \"f98f4f9b-3a31-4719-ba9a-f474fcd9672d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:24:37 crc kubenswrapper[4558]: I0120 17:24:37.441449 4558 scope.go:117] "RemoveContainer" containerID="4cc6b4a1f4213019563ea4e8c7f4f34a8130a2fd3bb9deeeb2a3e0cb36b24814" Jan 20 17:24:37 crc kubenswrapper[4558]: E0120 17:24:37.442614 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4cc6b4a1f4213019563ea4e8c7f4f34a8130a2fd3bb9deeeb2a3e0cb36b24814\": container with ID starting with 4cc6b4a1f4213019563ea4e8c7f4f34a8130a2fd3bb9deeeb2a3e0cb36b24814 not found: ID does not exist" containerID="4cc6b4a1f4213019563ea4e8c7f4f34a8130a2fd3bb9deeeb2a3e0cb36b24814" Jan 20 17:24:37 crc kubenswrapper[4558]: I0120 17:24:37.442649 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4cc6b4a1f4213019563ea4e8c7f4f34a8130a2fd3bb9deeeb2a3e0cb36b24814"} err="failed to get container status \"4cc6b4a1f4213019563ea4e8c7f4f34a8130a2fd3bb9deeeb2a3e0cb36b24814\": rpc error: code = NotFound desc = could not find container \"4cc6b4a1f4213019563ea4e8c7f4f34a8130a2fd3bb9deeeb2a3e0cb36b24814\": container with ID starting with 4cc6b4a1f4213019563ea4e8c7f4f34a8130a2fd3bb9deeeb2a3e0cb36b24814 not found: ID does not exist" Jan 20 17:24:37 crc kubenswrapper[4558]: I0120 17:24:37.442673 4558 scope.go:117] "RemoveContainer" containerID="500da8ecd44bd082f1fc0b940db89a832e557dcbbd211723fa9bb91ca4e746bc" Jan 20 17:24:37 crc kubenswrapper[4558]: E0120 17:24:37.444367 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"500da8ecd44bd082f1fc0b940db89a832e557dcbbd211723fa9bb91ca4e746bc\": container with ID starting with 500da8ecd44bd082f1fc0b940db89a832e557dcbbd211723fa9bb91ca4e746bc not found: ID does not exist" containerID="500da8ecd44bd082f1fc0b940db89a832e557dcbbd211723fa9bb91ca4e746bc" Jan 20 17:24:37 crc kubenswrapper[4558]: I0120 17:24:37.444393 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"500da8ecd44bd082f1fc0b940db89a832e557dcbbd211723fa9bb91ca4e746bc"} err="failed to get container status \"500da8ecd44bd082f1fc0b940db89a832e557dcbbd211723fa9bb91ca4e746bc\": rpc error: code = NotFound desc = could not find container \"500da8ecd44bd082f1fc0b940db89a832e557dcbbd211723fa9bb91ca4e746bc\": container with ID starting with 500da8ecd44bd082f1fc0b940db89a832e557dcbbd211723fa9bb91ca4e746bc not found: ID does not exist" Jan 20 17:24:37 crc kubenswrapper[4558]: I0120 17:24:37.444412 4558 scope.go:117] "RemoveContainer" containerID="7947aee6092ae17cad35b9b67df7da0268558ba84fafb3187d4167522b7e4507" Jan 20 17:24:37 crc kubenswrapper[4558]: E0120 17:24:37.445578 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7947aee6092ae17cad35b9b67df7da0268558ba84fafb3187d4167522b7e4507\": container with ID starting with 7947aee6092ae17cad35b9b67df7da0268558ba84fafb3187d4167522b7e4507 not found: ID does not exist" containerID="7947aee6092ae17cad35b9b67df7da0268558ba84fafb3187d4167522b7e4507" Jan 20 17:24:37 crc kubenswrapper[4558]: I0120 17:24:37.445616 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7947aee6092ae17cad35b9b67df7da0268558ba84fafb3187d4167522b7e4507"} err="failed to get container status \"7947aee6092ae17cad35b9b67df7da0268558ba84fafb3187d4167522b7e4507\": rpc error: code = NotFound desc = could not find container \"7947aee6092ae17cad35b9b67df7da0268558ba84fafb3187d4167522b7e4507\": container with ID starting with 7947aee6092ae17cad35b9b67df7da0268558ba84fafb3187d4167522b7e4507 not found: ID does not exist" Jan 20 17:24:37 crc kubenswrapper[4558]: I0120 17:24:37.445655 4558 scope.go:117] "RemoveContainer" containerID="95025487af705f3e41bbff12d0547501461d0af163d7b016973ec88665be1dff" Jan 20 17:24:37 crc kubenswrapper[4558]: E0120 17:24:37.446102 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"95025487af705f3e41bbff12d0547501461d0af163d7b016973ec88665be1dff\": container with ID starting with 95025487af705f3e41bbff12d0547501461d0af163d7b016973ec88665be1dff not found: ID does not exist" containerID="95025487af705f3e41bbff12d0547501461d0af163d7b016973ec88665be1dff" Jan 20 17:24:37 crc kubenswrapper[4558]: I0120 17:24:37.446129 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"95025487af705f3e41bbff12d0547501461d0af163d7b016973ec88665be1dff"} err="failed to get container status \"95025487af705f3e41bbff12d0547501461d0af163d7b016973ec88665be1dff\": rpc error: code = NotFound desc = could not find container \"95025487af705f3e41bbff12d0547501461d0af163d7b016973ec88665be1dff\": container with ID starting with 95025487af705f3e41bbff12d0547501461d0af163d7b016973ec88665be1dff not found: ID does not exist" Jan 20 17:24:37 crc kubenswrapper[4558]: I0120 17:24:37.446145 4558 scope.go:117] "RemoveContainer" containerID="4cc6b4a1f4213019563ea4e8c7f4f34a8130a2fd3bb9deeeb2a3e0cb36b24814" Jan 20 17:24:37 crc kubenswrapper[4558]: I0120 17:24:37.448357 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4cc6b4a1f4213019563ea4e8c7f4f34a8130a2fd3bb9deeeb2a3e0cb36b24814"} err="failed to get container status \"4cc6b4a1f4213019563ea4e8c7f4f34a8130a2fd3bb9deeeb2a3e0cb36b24814\": rpc error: code = NotFound desc = could not find container \"4cc6b4a1f4213019563ea4e8c7f4f34a8130a2fd3bb9deeeb2a3e0cb36b24814\": container with ID starting with 4cc6b4a1f4213019563ea4e8c7f4f34a8130a2fd3bb9deeeb2a3e0cb36b24814 not found: ID does not exist" Jan 20 17:24:37 crc kubenswrapper[4558]: I0120 17:24:37.448398 4558 scope.go:117] "RemoveContainer" containerID="500da8ecd44bd082f1fc0b940db89a832e557dcbbd211723fa9bb91ca4e746bc" Jan 20 17:24:37 crc kubenswrapper[4558]: I0120 17:24:37.448680 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"500da8ecd44bd082f1fc0b940db89a832e557dcbbd211723fa9bb91ca4e746bc"} err="failed to get container status \"500da8ecd44bd082f1fc0b940db89a832e557dcbbd211723fa9bb91ca4e746bc\": rpc error: code = NotFound desc = could not find container \"500da8ecd44bd082f1fc0b940db89a832e557dcbbd211723fa9bb91ca4e746bc\": container with ID starting with 500da8ecd44bd082f1fc0b940db89a832e557dcbbd211723fa9bb91ca4e746bc not found: ID does not exist" Jan 20 17:24:37 crc kubenswrapper[4558]: I0120 17:24:37.448709 4558 scope.go:117] "RemoveContainer" containerID="7947aee6092ae17cad35b9b67df7da0268558ba84fafb3187d4167522b7e4507" Jan 20 17:24:37 crc kubenswrapper[4558]: I0120 17:24:37.449232 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7947aee6092ae17cad35b9b67df7da0268558ba84fafb3187d4167522b7e4507"} err="failed to get container status \"7947aee6092ae17cad35b9b67df7da0268558ba84fafb3187d4167522b7e4507\": rpc error: code = NotFound desc = could not find container \"7947aee6092ae17cad35b9b67df7da0268558ba84fafb3187d4167522b7e4507\": container with ID starting with 7947aee6092ae17cad35b9b67df7da0268558ba84fafb3187d4167522b7e4507 not found: ID does not exist" Jan 20 17:24:37 crc kubenswrapper[4558]: I0120 17:24:37.449257 4558 scope.go:117] "RemoveContainer" containerID="95025487af705f3e41bbff12d0547501461d0af163d7b016973ec88665be1dff" Jan 20 17:24:37 crc kubenswrapper[4558]: I0120 17:24:37.449509 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"95025487af705f3e41bbff12d0547501461d0af163d7b016973ec88665be1dff"} err="failed to get container status \"95025487af705f3e41bbff12d0547501461d0af163d7b016973ec88665be1dff\": rpc error: code = NotFound desc = could not find container \"95025487af705f3e41bbff12d0547501461d0af163d7b016973ec88665be1dff\": container with ID starting with 95025487af705f3e41bbff12d0547501461d0af163d7b016973ec88665be1dff not found: ID does not exist" Jan 20 17:24:37 crc kubenswrapper[4558]: I0120 17:24:37.449534 4558 scope.go:117] "RemoveContainer" containerID="4cc6b4a1f4213019563ea4e8c7f4f34a8130a2fd3bb9deeeb2a3e0cb36b24814" Jan 20 17:24:37 crc kubenswrapper[4558]: I0120 17:24:37.449851 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4cc6b4a1f4213019563ea4e8c7f4f34a8130a2fd3bb9deeeb2a3e0cb36b24814"} err="failed to get container status \"4cc6b4a1f4213019563ea4e8c7f4f34a8130a2fd3bb9deeeb2a3e0cb36b24814\": rpc error: code = NotFound desc = could not find container \"4cc6b4a1f4213019563ea4e8c7f4f34a8130a2fd3bb9deeeb2a3e0cb36b24814\": container with ID starting with 4cc6b4a1f4213019563ea4e8c7f4f34a8130a2fd3bb9deeeb2a3e0cb36b24814 not found: ID does not exist" Jan 20 17:24:37 crc kubenswrapper[4558]: I0120 17:24:37.449872 4558 scope.go:117] "RemoveContainer" containerID="500da8ecd44bd082f1fc0b940db89a832e557dcbbd211723fa9bb91ca4e746bc" Jan 20 17:24:37 crc kubenswrapper[4558]: I0120 17:24:37.450291 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"500da8ecd44bd082f1fc0b940db89a832e557dcbbd211723fa9bb91ca4e746bc"} err="failed to get container status \"500da8ecd44bd082f1fc0b940db89a832e557dcbbd211723fa9bb91ca4e746bc\": rpc error: code = NotFound desc = could not find container \"500da8ecd44bd082f1fc0b940db89a832e557dcbbd211723fa9bb91ca4e746bc\": container with ID starting with 500da8ecd44bd082f1fc0b940db89a832e557dcbbd211723fa9bb91ca4e746bc not found: ID does not exist" Jan 20 17:24:37 crc kubenswrapper[4558]: I0120 17:24:37.450320 4558 scope.go:117] "RemoveContainer" containerID="7947aee6092ae17cad35b9b67df7da0268558ba84fafb3187d4167522b7e4507" Jan 20 17:24:37 crc kubenswrapper[4558]: I0120 17:24:37.450844 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7947aee6092ae17cad35b9b67df7da0268558ba84fafb3187d4167522b7e4507"} err="failed to get container status \"7947aee6092ae17cad35b9b67df7da0268558ba84fafb3187d4167522b7e4507\": rpc error: code = NotFound desc = could not find container \"7947aee6092ae17cad35b9b67df7da0268558ba84fafb3187d4167522b7e4507\": container with ID starting with 7947aee6092ae17cad35b9b67df7da0268558ba84fafb3187d4167522b7e4507 not found: ID does not exist" Jan 20 17:24:37 crc kubenswrapper[4558]: I0120 17:24:37.450869 4558 scope.go:117] "RemoveContainer" containerID="95025487af705f3e41bbff12d0547501461d0af163d7b016973ec88665be1dff" Jan 20 17:24:37 crc kubenswrapper[4558]: I0120 17:24:37.451521 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"95025487af705f3e41bbff12d0547501461d0af163d7b016973ec88665be1dff"} err="failed to get container status \"95025487af705f3e41bbff12d0547501461d0af163d7b016973ec88665be1dff\": rpc error: code = NotFound desc = could not find container \"95025487af705f3e41bbff12d0547501461d0af163d7b016973ec88665be1dff\": container with ID starting with 95025487af705f3e41bbff12d0547501461d0af163d7b016973ec88665be1dff not found: ID does not exist" Jan 20 17:24:37 crc kubenswrapper[4558]: I0120 17:24:37.451543 4558 scope.go:117] "RemoveContainer" containerID="4cc6b4a1f4213019563ea4e8c7f4f34a8130a2fd3bb9deeeb2a3e0cb36b24814" Jan 20 17:24:37 crc kubenswrapper[4558]: I0120 17:24:37.451937 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4cc6b4a1f4213019563ea4e8c7f4f34a8130a2fd3bb9deeeb2a3e0cb36b24814"} err="failed to get container status \"4cc6b4a1f4213019563ea4e8c7f4f34a8130a2fd3bb9deeeb2a3e0cb36b24814\": rpc error: code = NotFound desc = could not find container \"4cc6b4a1f4213019563ea4e8c7f4f34a8130a2fd3bb9deeeb2a3e0cb36b24814\": container with ID starting with 4cc6b4a1f4213019563ea4e8c7f4f34a8130a2fd3bb9deeeb2a3e0cb36b24814 not found: ID does not exist" Jan 20 17:24:37 crc kubenswrapper[4558]: I0120 17:24:37.451963 4558 scope.go:117] "RemoveContainer" containerID="500da8ecd44bd082f1fc0b940db89a832e557dcbbd211723fa9bb91ca4e746bc" Jan 20 17:24:37 crc kubenswrapper[4558]: I0120 17:24:37.452291 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"500da8ecd44bd082f1fc0b940db89a832e557dcbbd211723fa9bb91ca4e746bc"} err="failed to get container status \"500da8ecd44bd082f1fc0b940db89a832e557dcbbd211723fa9bb91ca4e746bc\": rpc error: code = NotFound desc = could not find container \"500da8ecd44bd082f1fc0b940db89a832e557dcbbd211723fa9bb91ca4e746bc\": container with ID starting with 500da8ecd44bd082f1fc0b940db89a832e557dcbbd211723fa9bb91ca4e746bc not found: ID does not exist" Jan 20 17:24:37 crc kubenswrapper[4558]: I0120 17:24:37.452317 4558 scope.go:117] "RemoveContainer" containerID="7947aee6092ae17cad35b9b67df7da0268558ba84fafb3187d4167522b7e4507" Jan 20 17:24:37 crc kubenswrapper[4558]: I0120 17:24:37.452697 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7947aee6092ae17cad35b9b67df7da0268558ba84fafb3187d4167522b7e4507"} err="failed to get container status \"7947aee6092ae17cad35b9b67df7da0268558ba84fafb3187d4167522b7e4507\": rpc error: code = NotFound desc = could not find container \"7947aee6092ae17cad35b9b67df7da0268558ba84fafb3187d4167522b7e4507\": container with ID starting with 7947aee6092ae17cad35b9b67df7da0268558ba84fafb3187d4167522b7e4507 not found: ID does not exist" Jan 20 17:24:37 crc kubenswrapper[4558]: I0120 17:24:37.452726 4558 scope.go:117] "RemoveContainer" containerID="95025487af705f3e41bbff12d0547501461d0af163d7b016973ec88665be1dff" Jan 20 17:24:37 crc kubenswrapper[4558]: I0120 17:24:37.453123 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"95025487af705f3e41bbff12d0547501461d0af163d7b016973ec88665be1dff"} err="failed to get container status \"95025487af705f3e41bbff12d0547501461d0af163d7b016973ec88665be1dff\": rpc error: code = NotFound desc = could not find container \"95025487af705f3e41bbff12d0547501461d0af163d7b016973ec88665be1dff\": container with ID starting with 95025487af705f3e41bbff12d0547501461d0af163d7b016973ec88665be1dff not found: ID does not exist" Jan 20 17:24:37 crc kubenswrapper[4558]: I0120 17:24:37.653968 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:24:37 crc kubenswrapper[4558]: I0120 17:24:37.867720 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:24:37 crc kubenswrapper[4558]: W0120 17:24:37.871231 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod269584c1_a4ed_43e5_a1cb_1e5e6952df11.slice/crio-aab62f80f8c125005398693e756fd2c9cf4795cd0187ce218c54a1d92159e226 WatchSource:0}: Error finding container aab62f80f8c125005398693e756fd2c9cf4795cd0187ce218c54a1d92159e226: Status 404 returned error can't find the container with id aab62f80f8c125005398693e756fd2c9cf4795cd0187ce218c54a1d92159e226 Jan 20 17:24:38 crc kubenswrapper[4558]: I0120 17:24:38.098013 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:24:38 crc kubenswrapper[4558]: I0120 17:24:38.106008 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:24:38 crc kubenswrapper[4558]: I0120 17:24:38.149204 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"269584c1-a4ed-43e5-a1cb-1e5e6952df11","Type":"ContainerStarted","Data":"aab62f80f8c125005398693e756fd2c9cf4795cd0187ce218c54a1d92159e226"} Jan 20 17:24:38 crc kubenswrapper[4558]: I0120 17:24:38.151465 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"c4caf1aa-77f3-4f79-a1e5-ce8fa944b6cd","Type":"ContainerStarted","Data":"ca56992fbef2886a2ace481786936757fbf282ec4e0bc3e84966541d89eaaf75"} Jan 20 17:24:38 crc kubenswrapper[4558]: I0120 17:24:38.156749 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"f98f4f9b-3a31-4719-ba9a-f474fcd9672d","Type":"ContainerStarted","Data":"15da5698d7bbc1a7926d65eda981ca83958ca11aafa3d27eeb6ad62f2f00744c"} Jan 20 17:24:38 crc kubenswrapper[4558]: I0120 17:24:38.173529 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/glance-default-external-api-0" podStartSLOduration=3.173517615 podStartE2EDuration="3.173517615s" podCreationTimestamp="2026-01-20 17:24:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:24:38.171666062 +0000 UTC m=+2571.932004030" watchObservedRunningTime="2026-01-20 17:24:38.173517615 +0000 UTC m=+2571.933855583" Jan 20 17:24:38 crc kubenswrapper[4558]: I0120 17:24:38.578882 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="11c22b1a-a7cd-40d8-9b6d-cd095b913a81" path="/var/lib/kubelet/pods/11c22b1a-a7cd-40d8-9b6d-cd095b913a81/volumes" Jan 20 17:24:38 crc kubenswrapper[4558]: I0120 17:24:38.580093 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="901ea0e0-d046-4871-887c-ea58a1d8fcb0" path="/var/lib/kubelet/pods/901ea0e0-d046-4871-887c-ea58a1d8fcb0/volumes" Jan 20 17:24:38 crc kubenswrapper[4558]: I0120 17:24:38.581484 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f2f1744d-79dd-43bc-8bea-93e00672c805" path="/var/lib/kubelet/pods/f2f1744d-79dd-43bc-8bea-93e00672c805/volumes" Jan 20 17:24:39 crc kubenswrapper[4558]: I0120 17:24:39.171130 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"f98f4f9b-3a31-4719-ba9a-f474fcd9672d","Type":"ContainerStarted","Data":"3e056ccb3cd64b510e92d589d69de6a3a8914d1942184aa389607defc32073c3"} Jan 20 17:24:39 crc kubenswrapper[4558]: I0120 17:24:39.174877 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"269584c1-a4ed-43e5-a1cb-1e5e6952df11","Type":"ContainerStarted","Data":"bc74e5cb26055978318c9f2b43ca67fd1b3e4cc5ce3e22f2e91cfdfd0a3b5a4e"} Jan 20 17:24:39 crc kubenswrapper[4558]: I0120 17:24:39.174904 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"269584c1-a4ed-43e5-a1cb-1e5e6952df11","Type":"ContainerStarted","Data":"da0ca9294c16d1999fa49c21fdd4c31261941cef28b785b5ef05cdd451846e52"} Jan 20 17:24:39 crc kubenswrapper[4558]: I0120 17:24:39.202748 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/glance-default-internal-api-0" podStartSLOduration=3.202734533 podStartE2EDuration="3.202734533s" podCreationTimestamp="2026-01-20 17:24:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:24:39.195672269 +0000 UTC m=+2572.956010236" watchObservedRunningTime="2026-01-20 17:24:39.202734533 +0000 UTC m=+2572.963072500" Jan 20 17:24:40 crc kubenswrapper[4558]: I0120 17:24:40.186784 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"f98f4f9b-3a31-4719-ba9a-f474fcd9672d","Type":"ContainerStarted","Data":"063df31930f6dbd820fdf262f0dcbafb7b4bfcb41c7ce7617938d8b010f2cf36"} Jan 20 17:24:40 crc kubenswrapper[4558]: I0120 17:24:40.191577 4558 generic.go:334] "Generic (PLEG): container finished" podID="2760b0e9-ab1f-4fe1-8240-5a638afaef7b" containerID="9b5faec98795b92df82474a1c5193b401d1ccb9d90b9025c1ac178dc22c1725f" exitCode=0 Jan 20 17:24:40 crc kubenswrapper[4558]: I0120 17:24:40.191732 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-sxc6v" event={"ID":"2760b0e9-ab1f-4fe1-8240-5a638afaef7b","Type":"ContainerDied","Data":"9b5faec98795b92df82474a1c5193b401d1ccb9d90b9025c1ac178dc22c1725f"} Jan 20 17:24:41 crc kubenswrapper[4558]: I0120 17:24:41.203304 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"f98f4f9b-3a31-4719-ba9a-f474fcd9672d","Type":"ContainerStarted","Data":"cfafbe7816090f51081840df2dc763fe394259b84171ee55ec514a3163dd7b51"} Jan 20 17:24:41 crc kubenswrapper[4558]: I0120 17:24:41.518844 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-sxc6v" Jan 20 17:24:41 crc kubenswrapper[4558]: I0120 17:24:41.618959 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-br4dz\" (UniqueName: \"kubernetes.io/projected/2760b0e9-ab1f-4fe1-8240-5a638afaef7b-kube-api-access-br4dz\") pod \"2760b0e9-ab1f-4fe1-8240-5a638afaef7b\" (UID: \"2760b0e9-ab1f-4fe1-8240-5a638afaef7b\") " Jan 20 17:24:41 crc kubenswrapper[4558]: I0120 17:24:41.619274 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2760b0e9-ab1f-4fe1-8240-5a638afaef7b-combined-ca-bundle\") pod \"2760b0e9-ab1f-4fe1-8240-5a638afaef7b\" (UID: \"2760b0e9-ab1f-4fe1-8240-5a638afaef7b\") " Jan 20 17:24:41 crc kubenswrapper[4558]: I0120 17:24:41.619727 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2760b0e9-ab1f-4fe1-8240-5a638afaef7b-config-data\") pod \"2760b0e9-ab1f-4fe1-8240-5a638afaef7b\" (UID: \"2760b0e9-ab1f-4fe1-8240-5a638afaef7b\") " Jan 20 17:24:41 crc kubenswrapper[4558]: I0120 17:24:41.619892 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2760b0e9-ab1f-4fe1-8240-5a638afaef7b-scripts\") pod \"2760b0e9-ab1f-4fe1-8240-5a638afaef7b\" (UID: \"2760b0e9-ab1f-4fe1-8240-5a638afaef7b\") " Jan 20 17:24:41 crc kubenswrapper[4558]: I0120 17:24:41.625972 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2760b0e9-ab1f-4fe1-8240-5a638afaef7b-scripts" (OuterVolumeSpecName: "scripts") pod "2760b0e9-ab1f-4fe1-8240-5a638afaef7b" (UID: "2760b0e9-ab1f-4fe1-8240-5a638afaef7b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:24:41 crc kubenswrapper[4558]: I0120 17:24:41.638894 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2760b0e9-ab1f-4fe1-8240-5a638afaef7b-kube-api-access-br4dz" (OuterVolumeSpecName: "kube-api-access-br4dz") pod "2760b0e9-ab1f-4fe1-8240-5a638afaef7b" (UID: "2760b0e9-ab1f-4fe1-8240-5a638afaef7b"). InnerVolumeSpecName "kube-api-access-br4dz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:24:41 crc kubenswrapper[4558]: I0120 17:24:41.646463 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2760b0e9-ab1f-4fe1-8240-5a638afaef7b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2760b0e9-ab1f-4fe1-8240-5a638afaef7b" (UID: "2760b0e9-ab1f-4fe1-8240-5a638afaef7b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:24:41 crc kubenswrapper[4558]: I0120 17:24:41.647146 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2760b0e9-ab1f-4fe1-8240-5a638afaef7b-config-data" (OuterVolumeSpecName: "config-data") pod "2760b0e9-ab1f-4fe1-8240-5a638afaef7b" (UID: "2760b0e9-ab1f-4fe1-8240-5a638afaef7b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:24:41 crc kubenswrapper[4558]: I0120 17:24:41.722934 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-br4dz\" (UniqueName: \"kubernetes.io/projected/2760b0e9-ab1f-4fe1-8240-5a638afaef7b-kube-api-access-br4dz\") on node \"crc\" DevicePath \"\"" Jan 20 17:24:41 crc kubenswrapper[4558]: I0120 17:24:41.722965 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2760b0e9-ab1f-4fe1-8240-5a638afaef7b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:24:41 crc kubenswrapper[4558]: I0120 17:24:41.722976 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2760b0e9-ab1f-4fe1-8240-5a638afaef7b-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:24:41 crc kubenswrapper[4558]: I0120 17:24:41.722987 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2760b0e9-ab1f-4fe1-8240-5a638afaef7b-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:24:42 crc kubenswrapper[4558]: I0120 17:24:42.215955 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-sxc6v" event={"ID":"2760b0e9-ab1f-4fe1-8240-5a638afaef7b","Type":"ContainerDied","Data":"f43be13c2818fa2b75424c3ad3cf8e2be7035c170dc7e8378511c8931a5d1f61"} Jan 20 17:24:42 crc kubenswrapper[4558]: I0120 17:24:42.216029 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f43be13c2818fa2b75424c3ad3cf8e2be7035c170dc7e8378511c8931a5d1f61" Jan 20 17:24:42 crc kubenswrapper[4558]: I0120 17:24:42.217260 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-sxc6v" Jan 20 17:24:42 crc kubenswrapper[4558]: I0120 17:24:42.279717 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-0"] Jan 20 17:24:42 crc kubenswrapper[4558]: E0120 17:24:42.280628 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2760b0e9-ab1f-4fe1-8240-5a638afaef7b" containerName="nova-cell0-conductor-db-sync" Jan 20 17:24:42 crc kubenswrapper[4558]: I0120 17:24:42.280660 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="2760b0e9-ab1f-4fe1-8240-5a638afaef7b" containerName="nova-cell0-conductor-db-sync" Jan 20 17:24:42 crc kubenswrapper[4558]: I0120 17:24:42.280988 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="2760b0e9-ab1f-4fe1-8240-5a638afaef7b" containerName="nova-cell0-conductor-db-sync" Jan 20 17:24:42 crc kubenswrapper[4558]: I0120 17:24:42.281695 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:24:42 crc kubenswrapper[4558]: I0120 17:24:42.283967 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-nova-dockercfg-k8zdz" Jan 20 17:24:42 crc kubenswrapper[4558]: I0120 17:24:42.286481 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell0-conductor-config-data" Jan 20 17:24:42 crc kubenswrapper[4558]: I0120 17:24:42.291126 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-0"] Jan 20 17:24:42 crc kubenswrapper[4558]: I0120 17:24:42.439409 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/165828ca-683d-41f2-a055-7e99870bf131-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"165828ca-683d-41f2-a055-7e99870bf131\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:24:42 crc kubenswrapper[4558]: I0120 17:24:42.439914 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/165828ca-683d-41f2-a055-7e99870bf131-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"165828ca-683d-41f2-a055-7e99870bf131\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:24:42 crc kubenswrapper[4558]: I0120 17:24:42.440129 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vndc8\" (UniqueName: \"kubernetes.io/projected/165828ca-683d-41f2-a055-7e99870bf131-kube-api-access-vndc8\") pod \"nova-cell0-conductor-0\" (UID: \"165828ca-683d-41f2-a055-7e99870bf131\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:24:42 crc kubenswrapper[4558]: I0120 17:24:42.542450 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/165828ca-683d-41f2-a055-7e99870bf131-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"165828ca-683d-41f2-a055-7e99870bf131\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:24:42 crc kubenswrapper[4558]: I0120 17:24:42.542518 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/165828ca-683d-41f2-a055-7e99870bf131-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"165828ca-683d-41f2-a055-7e99870bf131\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:24:42 crc kubenswrapper[4558]: I0120 17:24:42.542601 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vndc8\" (UniqueName: \"kubernetes.io/projected/165828ca-683d-41f2-a055-7e99870bf131-kube-api-access-vndc8\") pod \"nova-cell0-conductor-0\" (UID: \"165828ca-683d-41f2-a055-7e99870bf131\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:24:42 crc kubenswrapper[4558]: I0120 17:24:42.548045 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/165828ca-683d-41f2-a055-7e99870bf131-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"165828ca-683d-41f2-a055-7e99870bf131\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:24:42 crc kubenswrapper[4558]: I0120 17:24:42.548116 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/165828ca-683d-41f2-a055-7e99870bf131-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"165828ca-683d-41f2-a055-7e99870bf131\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:24:42 crc kubenswrapper[4558]: I0120 17:24:42.562041 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vndc8\" (UniqueName: \"kubernetes.io/projected/165828ca-683d-41f2-a055-7e99870bf131-kube-api-access-vndc8\") pod \"nova-cell0-conductor-0\" (UID: \"165828ca-683d-41f2-a055-7e99870bf131\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:24:42 crc kubenswrapper[4558]: I0120 17:24:42.596103 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:24:43 crc kubenswrapper[4558]: I0120 17:24:43.026121 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-0"] Jan 20 17:24:43 crc kubenswrapper[4558]: I0120 17:24:43.229749 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"f98f4f9b-3a31-4719-ba9a-f474fcd9672d","Type":"ContainerStarted","Data":"281bdbc88305581b571cabddc241447314e71a8e9737c9d19ca08d9adbda0586"} Jan 20 17:24:43 crc kubenswrapper[4558]: I0120 17:24:43.229869 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="f98f4f9b-3a31-4719-ba9a-f474fcd9672d" containerName="ceilometer-central-agent" containerID="cri-o://3e056ccb3cd64b510e92d589d69de6a3a8914d1942184aa389607defc32073c3" gracePeriod=30 Jan 20 17:24:43 crc kubenswrapper[4558]: I0120 17:24:43.229914 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:24:43 crc kubenswrapper[4558]: I0120 17:24:43.229942 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="f98f4f9b-3a31-4719-ba9a-f474fcd9672d" containerName="proxy-httpd" containerID="cri-o://281bdbc88305581b571cabddc241447314e71a8e9737c9d19ca08d9adbda0586" gracePeriod=30 Jan 20 17:24:43 crc kubenswrapper[4558]: I0120 17:24:43.229974 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="f98f4f9b-3a31-4719-ba9a-f474fcd9672d" containerName="sg-core" containerID="cri-o://cfafbe7816090f51081840df2dc763fe394259b84171ee55ec514a3163dd7b51" gracePeriod=30 Jan 20 17:24:43 crc kubenswrapper[4558]: I0120 17:24:43.229978 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="f98f4f9b-3a31-4719-ba9a-f474fcd9672d" containerName="ceilometer-notification-agent" containerID="cri-o://063df31930f6dbd820fdf262f0dcbafb7b4bfcb41c7ce7617938d8b010f2cf36" gracePeriod=30 Jan 20 17:24:43 crc kubenswrapper[4558]: I0120 17:24:43.231848 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-0" event={"ID":"165828ca-683d-41f2-a055-7e99870bf131","Type":"ContainerStarted","Data":"f338fd1e92045bdfa117d8f2fbbd25aabf07d0176139c21b6ceada6be6da38fa"} Jan 20 17:24:43 crc kubenswrapper[4558]: I0120 17:24:43.253475 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ceilometer-0" podStartSLOduration=1.943585858 podStartE2EDuration="6.253454047s" podCreationTimestamp="2026-01-20 17:24:37 +0000 UTC" firstStartedPulling="2026-01-20 17:24:38.124012854 +0000 UTC m=+2571.884350821" lastFinishedPulling="2026-01-20 17:24:42.433881043 +0000 UTC m=+2576.194219010" observedRunningTime="2026-01-20 17:24:43.248554951 +0000 UTC m=+2577.008892919" watchObservedRunningTime="2026-01-20 17:24:43.253454047 +0000 UTC m=+2577.013792014" Jan 20 17:24:44 crc kubenswrapper[4558]: I0120 17:24:44.250659 4558 generic.go:334] "Generic (PLEG): container finished" podID="f98f4f9b-3a31-4719-ba9a-f474fcd9672d" containerID="281bdbc88305581b571cabddc241447314e71a8e9737c9d19ca08d9adbda0586" exitCode=0 Jan 20 17:24:44 crc kubenswrapper[4558]: I0120 17:24:44.250701 4558 generic.go:334] "Generic (PLEG): container finished" podID="f98f4f9b-3a31-4719-ba9a-f474fcd9672d" containerID="cfafbe7816090f51081840df2dc763fe394259b84171ee55ec514a3163dd7b51" exitCode=2 Jan 20 17:24:44 crc kubenswrapper[4558]: I0120 17:24:44.250711 4558 generic.go:334] "Generic (PLEG): container finished" podID="f98f4f9b-3a31-4719-ba9a-f474fcd9672d" containerID="063df31930f6dbd820fdf262f0dcbafb7b4bfcb41c7ce7617938d8b010f2cf36" exitCode=0 Jan 20 17:24:44 crc kubenswrapper[4558]: I0120 17:24:44.250668 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"f98f4f9b-3a31-4719-ba9a-f474fcd9672d","Type":"ContainerDied","Data":"281bdbc88305581b571cabddc241447314e71a8e9737c9d19ca08d9adbda0586"} Jan 20 17:24:44 crc kubenswrapper[4558]: I0120 17:24:44.250809 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"f98f4f9b-3a31-4719-ba9a-f474fcd9672d","Type":"ContainerDied","Data":"cfafbe7816090f51081840df2dc763fe394259b84171ee55ec514a3163dd7b51"} Jan 20 17:24:44 crc kubenswrapper[4558]: I0120 17:24:44.250829 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"f98f4f9b-3a31-4719-ba9a-f474fcd9672d","Type":"ContainerDied","Data":"063df31930f6dbd820fdf262f0dcbafb7b4bfcb41c7ce7617938d8b010f2cf36"} Jan 20 17:24:44 crc kubenswrapper[4558]: I0120 17:24:44.254493 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-0" event={"ID":"165828ca-683d-41f2-a055-7e99870bf131","Type":"ContainerStarted","Data":"be8c548bad3d4853a64ebbae913ef2602961735470fbdaf076725d3ca8f5145e"} Jan 20 17:24:44 crc kubenswrapper[4558]: I0120 17:24:44.255988 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:24:44 crc kubenswrapper[4558]: I0120 17:24:44.274781 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell0-conductor-0" podStartSLOduration=2.274750826 podStartE2EDuration="2.274750826s" podCreationTimestamp="2026-01-20 17:24:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:24:44.274131311 +0000 UTC m=+2578.034469278" watchObservedRunningTime="2026-01-20 17:24:44.274750826 +0000 UTC m=+2578.035088784" Jan 20 17:24:45 crc kubenswrapper[4558]: I0120 17:24:45.491602 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:24:45 crc kubenswrapper[4558]: I0120 17:24:45.492185 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:24:45 crc kubenswrapper[4558]: I0120 17:24:45.525411 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:24:45 crc kubenswrapper[4558]: I0120 17:24:45.535858 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:24:46 crc kubenswrapper[4558]: I0120 17:24:46.277932 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:24:46 crc kubenswrapper[4558]: I0120 17:24:46.279719 4558 generic.go:334] "Generic (PLEG): container finished" podID="f98f4f9b-3a31-4719-ba9a-f474fcd9672d" containerID="3e056ccb3cd64b510e92d589d69de6a3a8914d1942184aa389607defc32073c3" exitCode=0 Jan 20 17:24:46 crc kubenswrapper[4558]: I0120 17:24:46.279799 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"f98f4f9b-3a31-4719-ba9a-f474fcd9672d","Type":"ContainerDied","Data":"3e056ccb3cd64b510e92d589d69de6a3a8914d1942184aa389607defc32073c3"} Jan 20 17:24:46 crc kubenswrapper[4558]: I0120 17:24:46.279950 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"f98f4f9b-3a31-4719-ba9a-f474fcd9672d","Type":"ContainerDied","Data":"15da5698d7bbc1a7926d65eda981ca83958ca11aafa3d27eeb6ad62f2f00744c"} Jan 20 17:24:46 crc kubenswrapper[4558]: I0120 17:24:46.280006 4558 scope.go:117] "RemoveContainer" containerID="281bdbc88305581b571cabddc241447314e71a8e9737c9d19ca08d9adbda0586" Jan 20 17:24:46 crc kubenswrapper[4558]: I0120 17:24:46.280213 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:24:46 crc kubenswrapper[4558]: I0120 17:24:46.280543 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:24:46 crc kubenswrapper[4558]: I0120 17:24:46.305797 4558 scope.go:117] "RemoveContainer" containerID="cfafbe7816090f51081840df2dc763fe394259b84171ee55ec514a3163dd7b51" Jan 20 17:24:46 crc kubenswrapper[4558]: I0120 17:24:46.325367 4558 scope.go:117] "RemoveContainer" containerID="063df31930f6dbd820fdf262f0dcbafb7b4bfcb41c7ce7617938d8b010f2cf36" Jan 20 17:24:46 crc kubenswrapper[4558]: I0120 17:24:46.342305 4558 scope.go:117] "RemoveContainer" containerID="3e056ccb3cd64b510e92d589d69de6a3a8914d1942184aa389607defc32073c3" Jan 20 17:24:46 crc kubenswrapper[4558]: I0120 17:24:46.361494 4558 scope.go:117] "RemoveContainer" containerID="281bdbc88305581b571cabddc241447314e71a8e9737c9d19ca08d9adbda0586" Jan 20 17:24:46 crc kubenswrapper[4558]: E0120 17:24:46.361882 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"281bdbc88305581b571cabddc241447314e71a8e9737c9d19ca08d9adbda0586\": container with ID starting with 281bdbc88305581b571cabddc241447314e71a8e9737c9d19ca08d9adbda0586 not found: ID does not exist" containerID="281bdbc88305581b571cabddc241447314e71a8e9737c9d19ca08d9adbda0586" Jan 20 17:24:46 crc kubenswrapper[4558]: I0120 17:24:46.361923 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"281bdbc88305581b571cabddc241447314e71a8e9737c9d19ca08d9adbda0586"} err="failed to get container status \"281bdbc88305581b571cabddc241447314e71a8e9737c9d19ca08d9adbda0586\": rpc error: code = NotFound desc = could not find container \"281bdbc88305581b571cabddc241447314e71a8e9737c9d19ca08d9adbda0586\": container with ID starting with 281bdbc88305581b571cabddc241447314e71a8e9737c9d19ca08d9adbda0586 not found: ID does not exist" Jan 20 17:24:46 crc kubenswrapper[4558]: I0120 17:24:46.361950 4558 scope.go:117] "RemoveContainer" containerID="cfafbe7816090f51081840df2dc763fe394259b84171ee55ec514a3163dd7b51" Jan 20 17:24:46 crc kubenswrapper[4558]: E0120 17:24:46.362257 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cfafbe7816090f51081840df2dc763fe394259b84171ee55ec514a3163dd7b51\": container with ID starting with cfafbe7816090f51081840df2dc763fe394259b84171ee55ec514a3163dd7b51 not found: ID does not exist" containerID="cfafbe7816090f51081840df2dc763fe394259b84171ee55ec514a3163dd7b51" Jan 20 17:24:46 crc kubenswrapper[4558]: I0120 17:24:46.362312 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cfafbe7816090f51081840df2dc763fe394259b84171ee55ec514a3163dd7b51"} err="failed to get container status \"cfafbe7816090f51081840df2dc763fe394259b84171ee55ec514a3163dd7b51\": rpc error: code = NotFound desc = could not find container \"cfafbe7816090f51081840df2dc763fe394259b84171ee55ec514a3163dd7b51\": container with ID starting with cfafbe7816090f51081840df2dc763fe394259b84171ee55ec514a3163dd7b51 not found: ID does not exist" Jan 20 17:24:46 crc kubenswrapper[4558]: I0120 17:24:46.362336 4558 scope.go:117] "RemoveContainer" containerID="063df31930f6dbd820fdf262f0dcbafb7b4bfcb41c7ce7617938d8b010f2cf36" Jan 20 17:24:46 crc kubenswrapper[4558]: E0120 17:24:46.362633 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"063df31930f6dbd820fdf262f0dcbafb7b4bfcb41c7ce7617938d8b010f2cf36\": container with ID starting with 063df31930f6dbd820fdf262f0dcbafb7b4bfcb41c7ce7617938d8b010f2cf36 not found: ID does not exist" containerID="063df31930f6dbd820fdf262f0dcbafb7b4bfcb41c7ce7617938d8b010f2cf36" Jan 20 17:24:46 crc kubenswrapper[4558]: I0120 17:24:46.362659 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"063df31930f6dbd820fdf262f0dcbafb7b4bfcb41c7ce7617938d8b010f2cf36"} err="failed to get container status \"063df31930f6dbd820fdf262f0dcbafb7b4bfcb41c7ce7617938d8b010f2cf36\": rpc error: code = NotFound desc = could not find container \"063df31930f6dbd820fdf262f0dcbafb7b4bfcb41c7ce7617938d8b010f2cf36\": container with ID starting with 063df31930f6dbd820fdf262f0dcbafb7b4bfcb41c7ce7617938d8b010f2cf36 not found: ID does not exist" Jan 20 17:24:46 crc kubenswrapper[4558]: I0120 17:24:46.362677 4558 scope.go:117] "RemoveContainer" containerID="3e056ccb3cd64b510e92d589d69de6a3a8914d1942184aa389607defc32073c3" Jan 20 17:24:46 crc kubenswrapper[4558]: E0120 17:24:46.363113 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3e056ccb3cd64b510e92d589d69de6a3a8914d1942184aa389607defc32073c3\": container with ID starting with 3e056ccb3cd64b510e92d589d69de6a3a8914d1942184aa389607defc32073c3 not found: ID does not exist" containerID="3e056ccb3cd64b510e92d589d69de6a3a8914d1942184aa389607defc32073c3" Jan 20 17:24:46 crc kubenswrapper[4558]: I0120 17:24:46.363179 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3e056ccb3cd64b510e92d589d69de6a3a8914d1942184aa389607defc32073c3"} err="failed to get container status \"3e056ccb3cd64b510e92d589d69de6a3a8914d1942184aa389607defc32073c3\": rpc error: code = NotFound desc = could not find container \"3e056ccb3cd64b510e92d589d69de6a3a8914d1942184aa389607defc32073c3\": container with ID starting with 3e056ccb3cd64b510e92d589d69de6a3a8914d1942184aa389607defc32073c3 not found: ID does not exist" Jan 20 17:24:46 crc kubenswrapper[4558]: I0120 17:24:46.424562 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f98f4f9b-3a31-4719-ba9a-f474fcd9672d-scripts\") pod \"f98f4f9b-3a31-4719-ba9a-f474fcd9672d\" (UID: \"f98f4f9b-3a31-4719-ba9a-f474fcd9672d\") " Jan 20 17:24:46 crc kubenswrapper[4558]: I0120 17:24:46.424638 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f98f4f9b-3a31-4719-ba9a-f474fcd9672d-run-httpd\") pod \"f98f4f9b-3a31-4719-ba9a-f474fcd9672d\" (UID: \"f98f4f9b-3a31-4719-ba9a-f474fcd9672d\") " Jan 20 17:24:46 crc kubenswrapper[4558]: I0120 17:24:46.424710 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6wcnz\" (UniqueName: \"kubernetes.io/projected/f98f4f9b-3a31-4719-ba9a-f474fcd9672d-kube-api-access-6wcnz\") pod \"f98f4f9b-3a31-4719-ba9a-f474fcd9672d\" (UID: \"f98f4f9b-3a31-4719-ba9a-f474fcd9672d\") " Jan 20 17:24:46 crc kubenswrapper[4558]: I0120 17:24:46.425032 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f98f4f9b-3a31-4719-ba9a-f474fcd9672d-sg-core-conf-yaml\") pod \"f98f4f9b-3a31-4719-ba9a-f474fcd9672d\" (UID: \"f98f4f9b-3a31-4719-ba9a-f474fcd9672d\") " Jan 20 17:24:46 crc kubenswrapper[4558]: I0120 17:24:46.425063 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f98f4f9b-3a31-4719-ba9a-f474fcd9672d-config-data\") pod \"f98f4f9b-3a31-4719-ba9a-f474fcd9672d\" (UID: \"f98f4f9b-3a31-4719-ba9a-f474fcd9672d\") " Jan 20 17:24:46 crc kubenswrapper[4558]: I0120 17:24:46.425110 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f98f4f9b-3a31-4719-ba9a-f474fcd9672d-combined-ca-bundle\") pod \"f98f4f9b-3a31-4719-ba9a-f474fcd9672d\" (UID: \"f98f4f9b-3a31-4719-ba9a-f474fcd9672d\") " Jan 20 17:24:46 crc kubenswrapper[4558]: I0120 17:24:46.425214 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f98f4f9b-3a31-4719-ba9a-f474fcd9672d-log-httpd\") pod \"f98f4f9b-3a31-4719-ba9a-f474fcd9672d\" (UID: \"f98f4f9b-3a31-4719-ba9a-f474fcd9672d\") " Jan 20 17:24:46 crc kubenswrapper[4558]: I0120 17:24:46.425226 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f98f4f9b-3a31-4719-ba9a-f474fcd9672d-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "f98f4f9b-3a31-4719-ba9a-f474fcd9672d" (UID: "f98f4f9b-3a31-4719-ba9a-f474fcd9672d"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:24:46 crc kubenswrapper[4558]: I0120 17:24:46.425616 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f98f4f9b-3a31-4719-ba9a-f474fcd9672d-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "f98f4f9b-3a31-4719-ba9a-f474fcd9672d" (UID: "f98f4f9b-3a31-4719-ba9a-f474fcd9672d"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:24:46 crc kubenswrapper[4558]: I0120 17:24:46.426075 4558 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f98f4f9b-3a31-4719-ba9a-f474fcd9672d-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:24:46 crc kubenswrapper[4558]: I0120 17:24:46.426098 4558 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f98f4f9b-3a31-4719-ba9a-f474fcd9672d-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:24:46 crc kubenswrapper[4558]: I0120 17:24:46.432220 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f98f4f9b-3a31-4719-ba9a-f474fcd9672d-scripts" (OuterVolumeSpecName: "scripts") pod "f98f4f9b-3a31-4719-ba9a-f474fcd9672d" (UID: "f98f4f9b-3a31-4719-ba9a-f474fcd9672d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:24:46 crc kubenswrapper[4558]: I0120 17:24:46.446995 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f98f4f9b-3a31-4719-ba9a-f474fcd9672d-kube-api-access-6wcnz" (OuterVolumeSpecName: "kube-api-access-6wcnz") pod "f98f4f9b-3a31-4719-ba9a-f474fcd9672d" (UID: "f98f4f9b-3a31-4719-ba9a-f474fcd9672d"). InnerVolumeSpecName "kube-api-access-6wcnz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:24:46 crc kubenswrapper[4558]: I0120 17:24:46.452252 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f98f4f9b-3a31-4719-ba9a-f474fcd9672d-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "f98f4f9b-3a31-4719-ba9a-f474fcd9672d" (UID: "f98f4f9b-3a31-4719-ba9a-f474fcd9672d"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:24:46 crc kubenswrapper[4558]: I0120 17:24:46.488005 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f98f4f9b-3a31-4719-ba9a-f474fcd9672d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f98f4f9b-3a31-4719-ba9a-f474fcd9672d" (UID: "f98f4f9b-3a31-4719-ba9a-f474fcd9672d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:24:46 crc kubenswrapper[4558]: I0120 17:24:46.500866 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f98f4f9b-3a31-4719-ba9a-f474fcd9672d-config-data" (OuterVolumeSpecName: "config-data") pod "f98f4f9b-3a31-4719-ba9a-f474fcd9672d" (UID: "f98f4f9b-3a31-4719-ba9a-f474fcd9672d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:24:46 crc kubenswrapper[4558]: I0120 17:24:46.527417 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f98f4f9b-3a31-4719-ba9a-f474fcd9672d-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:24:46 crc kubenswrapper[4558]: I0120 17:24:46.527446 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6wcnz\" (UniqueName: \"kubernetes.io/projected/f98f4f9b-3a31-4719-ba9a-f474fcd9672d-kube-api-access-6wcnz\") on node \"crc\" DevicePath \"\"" Jan 20 17:24:46 crc kubenswrapper[4558]: I0120 17:24:46.527458 4558 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f98f4f9b-3a31-4719-ba9a-f474fcd9672d-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 20 17:24:46 crc kubenswrapper[4558]: I0120 17:24:46.527467 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f98f4f9b-3a31-4719-ba9a-f474fcd9672d-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:24:46 crc kubenswrapper[4558]: I0120 17:24:46.527476 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f98f4f9b-3a31-4719-ba9a-f474fcd9672d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:24:47 crc kubenswrapper[4558]: I0120 17:24:47.290502 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:24:47 crc kubenswrapper[4558]: I0120 17:24:47.314976 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:24:47 crc kubenswrapper[4558]: I0120 17:24:47.325969 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:24:47 crc kubenswrapper[4558]: I0120 17:24:47.341334 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:24:47 crc kubenswrapper[4558]: E0120 17:24:47.341815 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f98f4f9b-3a31-4719-ba9a-f474fcd9672d" containerName="ceilometer-notification-agent" Jan 20 17:24:47 crc kubenswrapper[4558]: I0120 17:24:47.341837 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="f98f4f9b-3a31-4719-ba9a-f474fcd9672d" containerName="ceilometer-notification-agent" Jan 20 17:24:47 crc kubenswrapper[4558]: E0120 17:24:47.341850 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f98f4f9b-3a31-4719-ba9a-f474fcd9672d" containerName="ceilometer-central-agent" Jan 20 17:24:47 crc kubenswrapper[4558]: I0120 17:24:47.341859 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="f98f4f9b-3a31-4719-ba9a-f474fcd9672d" containerName="ceilometer-central-agent" Jan 20 17:24:47 crc kubenswrapper[4558]: E0120 17:24:47.341879 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f98f4f9b-3a31-4719-ba9a-f474fcd9672d" containerName="sg-core" Jan 20 17:24:47 crc kubenswrapper[4558]: I0120 17:24:47.341885 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="f98f4f9b-3a31-4719-ba9a-f474fcd9672d" containerName="sg-core" Jan 20 17:24:47 crc kubenswrapper[4558]: E0120 17:24:47.341896 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f98f4f9b-3a31-4719-ba9a-f474fcd9672d" containerName="proxy-httpd" Jan 20 17:24:47 crc kubenswrapper[4558]: I0120 17:24:47.341901 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="f98f4f9b-3a31-4719-ba9a-f474fcd9672d" containerName="proxy-httpd" Jan 20 17:24:47 crc kubenswrapper[4558]: I0120 17:24:47.342069 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="f98f4f9b-3a31-4719-ba9a-f474fcd9672d" containerName="sg-core" Jan 20 17:24:47 crc kubenswrapper[4558]: I0120 17:24:47.342089 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="f98f4f9b-3a31-4719-ba9a-f474fcd9672d" containerName="ceilometer-notification-agent" Jan 20 17:24:47 crc kubenswrapper[4558]: I0120 17:24:47.342097 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="f98f4f9b-3a31-4719-ba9a-f474fcd9672d" containerName="ceilometer-central-agent" Jan 20 17:24:47 crc kubenswrapper[4558]: I0120 17:24:47.342112 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="f98f4f9b-3a31-4719-ba9a-f474fcd9672d" containerName="proxy-httpd" Jan 20 17:24:47 crc kubenswrapper[4558]: I0120 17:24:47.343746 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:24:47 crc kubenswrapper[4558]: I0120 17:24:47.345560 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-config-data" Jan 20 17:24:47 crc kubenswrapper[4558]: I0120 17:24:47.346534 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-scripts" Jan 20 17:24:47 crc kubenswrapper[4558]: I0120 17:24:47.359984 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:24:47 crc kubenswrapper[4558]: I0120 17:24:47.431414 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:24:47 crc kubenswrapper[4558]: I0120 17:24:47.431467 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:24:47 crc kubenswrapper[4558]: I0120 17:24:47.442368 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1364d2e8-742f-45c7-8620-65b25761a446-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"1364d2e8-742f-45c7-8620-65b25761a446\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:24:47 crc kubenswrapper[4558]: I0120 17:24:47.442415 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1364d2e8-742f-45c7-8620-65b25761a446-run-httpd\") pod \"ceilometer-0\" (UID: \"1364d2e8-742f-45c7-8620-65b25761a446\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:24:47 crc kubenswrapper[4558]: I0120 17:24:47.442453 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5djjm\" (UniqueName: \"kubernetes.io/projected/1364d2e8-742f-45c7-8620-65b25761a446-kube-api-access-5djjm\") pod \"ceilometer-0\" (UID: \"1364d2e8-742f-45c7-8620-65b25761a446\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:24:47 crc kubenswrapper[4558]: I0120 17:24:47.442484 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1364d2e8-742f-45c7-8620-65b25761a446-log-httpd\") pod \"ceilometer-0\" (UID: \"1364d2e8-742f-45c7-8620-65b25761a446\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:24:47 crc kubenswrapper[4558]: I0120 17:24:47.442541 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1364d2e8-742f-45c7-8620-65b25761a446-config-data\") pod \"ceilometer-0\" (UID: \"1364d2e8-742f-45c7-8620-65b25761a446\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:24:47 crc kubenswrapper[4558]: I0120 17:24:47.442569 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/1364d2e8-742f-45c7-8620-65b25761a446-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"1364d2e8-742f-45c7-8620-65b25761a446\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:24:47 crc kubenswrapper[4558]: I0120 17:24:47.442708 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1364d2e8-742f-45c7-8620-65b25761a446-scripts\") pod \"ceilometer-0\" (UID: \"1364d2e8-742f-45c7-8620-65b25761a446\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:24:47 crc kubenswrapper[4558]: I0120 17:24:47.465283 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:24:47 crc kubenswrapper[4558]: I0120 17:24:47.470333 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:24:47 crc kubenswrapper[4558]: I0120 17:24:47.544508 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1364d2e8-742f-45c7-8620-65b25761a446-scripts\") pod \"ceilometer-0\" (UID: \"1364d2e8-742f-45c7-8620-65b25761a446\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:24:47 crc kubenswrapper[4558]: I0120 17:24:47.545249 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1364d2e8-742f-45c7-8620-65b25761a446-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"1364d2e8-742f-45c7-8620-65b25761a446\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:24:47 crc kubenswrapper[4558]: I0120 17:24:47.545284 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1364d2e8-742f-45c7-8620-65b25761a446-run-httpd\") pod \"ceilometer-0\" (UID: \"1364d2e8-742f-45c7-8620-65b25761a446\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:24:47 crc kubenswrapper[4558]: I0120 17:24:47.545330 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5djjm\" (UniqueName: \"kubernetes.io/projected/1364d2e8-742f-45c7-8620-65b25761a446-kube-api-access-5djjm\") pod \"ceilometer-0\" (UID: \"1364d2e8-742f-45c7-8620-65b25761a446\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:24:47 crc kubenswrapper[4558]: I0120 17:24:47.545375 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1364d2e8-742f-45c7-8620-65b25761a446-log-httpd\") pod \"ceilometer-0\" (UID: \"1364d2e8-742f-45c7-8620-65b25761a446\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:24:47 crc kubenswrapper[4558]: I0120 17:24:47.545708 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1364d2e8-742f-45c7-8620-65b25761a446-run-httpd\") pod \"ceilometer-0\" (UID: \"1364d2e8-742f-45c7-8620-65b25761a446\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:24:47 crc kubenswrapper[4558]: I0120 17:24:47.545770 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1364d2e8-742f-45c7-8620-65b25761a446-log-httpd\") pod \"ceilometer-0\" (UID: \"1364d2e8-742f-45c7-8620-65b25761a446\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:24:47 crc kubenswrapper[4558]: I0120 17:24:47.545815 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1364d2e8-742f-45c7-8620-65b25761a446-config-data\") pod \"ceilometer-0\" (UID: \"1364d2e8-742f-45c7-8620-65b25761a446\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:24:47 crc kubenswrapper[4558]: I0120 17:24:47.545883 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/1364d2e8-742f-45c7-8620-65b25761a446-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"1364d2e8-742f-45c7-8620-65b25761a446\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:24:47 crc kubenswrapper[4558]: I0120 17:24:47.549556 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/1364d2e8-742f-45c7-8620-65b25761a446-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"1364d2e8-742f-45c7-8620-65b25761a446\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:24:47 crc kubenswrapper[4558]: I0120 17:24:47.549661 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1364d2e8-742f-45c7-8620-65b25761a446-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"1364d2e8-742f-45c7-8620-65b25761a446\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:24:47 crc kubenswrapper[4558]: I0120 17:24:47.553932 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1364d2e8-742f-45c7-8620-65b25761a446-scripts\") pod \"ceilometer-0\" (UID: \"1364d2e8-742f-45c7-8620-65b25761a446\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:24:47 crc kubenswrapper[4558]: I0120 17:24:47.554025 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1364d2e8-742f-45c7-8620-65b25761a446-config-data\") pod \"ceilometer-0\" (UID: \"1364d2e8-742f-45c7-8620-65b25761a446\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:24:47 crc kubenswrapper[4558]: I0120 17:24:47.571631 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5djjm\" (UniqueName: \"kubernetes.io/projected/1364d2e8-742f-45c7-8620-65b25761a446-kube-api-access-5djjm\") pod \"ceilometer-0\" (UID: \"1364d2e8-742f-45c7-8620-65b25761a446\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:24:47 crc kubenswrapper[4558]: I0120 17:24:47.670563 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:24:48 crc kubenswrapper[4558]: I0120 17:24:48.011108 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:24:48 crc kubenswrapper[4558]: I0120 17:24:48.014428 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:24:48 crc kubenswrapper[4558]: I0120 17:24:48.100178 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:24:48 crc kubenswrapper[4558]: I0120 17:24:48.300954 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"1364d2e8-742f-45c7-8620-65b25761a446","Type":"ContainerStarted","Data":"9b392c73014eb00d4a04d80619b78c90d5ecea2f6e17cfd221482af945465742"} Jan 20 17:24:48 crc kubenswrapper[4558]: I0120 17:24:48.301636 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:24:48 crc kubenswrapper[4558]: I0120 17:24:48.301799 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:24:48 crc kubenswrapper[4558]: I0120 17:24:48.576451 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f98f4f9b-3a31-4719-ba9a-f474fcd9672d" path="/var/lib/kubelet/pods/f98f4f9b-3a31-4719-ba9a-f474fcd9672d/volumes" Jan 20 17:24:49 crc kubenswrapper[4558]: I0120 17:24:49.323511 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"1364d2e8-742f-45c7-8620-65b25761a446","Type":"ContainerStarted","Data":"c9cb73bb1f963be4a87942f47e85faaa1a63fa1c61b02072bb1026c37a614ad5"} Jan 20 17:24:49 crc kubenswrapper[4558]: I0120 17:24:49.976190 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:24:49 crc kubenswrapper[4558]: I0120 17:24:49.979238 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:24:50 crc kubenswrapper[4558]: I0120 17:24:50.339137 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"1364d2e8-742f-45c7-8620-65b25761a446","Type":"ContainerStarted","Data":"3de6489147bef99f8f1b647aa061be1af0d2fdfaaea6c6c7842738a0772f4741"} Jan 20 17:24:51 crc kubenswrapper[4558]: I0120 17:24:51.352936 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"1364d2e8-742f-45c7-8620-65b25761a446","Type":"ContainerStarted","Data":"ff2fb655d1873e9edafadb8f8dfe720555b1f3ef4341676568142f19f19ef375"} Jan 20 17:24:52 crc kubenswrapper[4558]: I0120 17:24:52.621349 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:24:53 crc kubenswrapper[4558]: I0120 17:24:53.230037 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell0-cell-mapping-th49n"] Jan 20 17:24:53 crc kubenswrapper[4558]: I0120 17:24:53.231750 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-cell-mapping-th49n" Jan 20 17:24:53 crc kubenswrapper[4558]: I0120 17:24:53.234057 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell0-manage-config-data" Jan 20 17:24:53 crc kubenswrapper[4558]: I0120 17:24:53.234120 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell0-manage-scripts" Jan 20 17:24:53 crc kubenswrapper[4558]: I0120 17:24:53.238868 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-cell-mapping-th49n"] Jan 20 17:24:53 crc kubenswrapper[4558]: I0120 17:24:53.269007 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v8g56\" (UniqueName: \"kubernetes.io/projected/acd67900-69b3-485f-b378-80457d604be8-kube-api-access-v8g56\") pod \"nova-cell0-cell-mapping-th49n\" (UID: \"acd67900-69b3-485f-b378-80457d604be8\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-th49n" Jan 20 17:24:53 crc kubenswrapper[4558]: I0120 17:24:53.269137 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/acd67900-69b3-485f-b378-80457d604be8-scripts\") pod \"nova-cell0-cell-mapping-th49n\" (UID: \"acd67900-69b3-485f-b378-80457d604be8\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-th49n" Jan 20 17:24:53 crc kubenswrapper[4558]: I0120 17:24:53.269179 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/acd67900-69b3-485f-b378-80457d604be8-config-data\") pod \"nova-cell0-cell-mapping-th49n\" (UID: \"acd67900-69b3-485f-b378-80457d604be8\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-th49n" Jan 20 17:24:53 crc kubenswrapper[4558]: I0120 17:24:53.269201 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/acd67900-69b3-485f-b378-80457d604be8-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-th49n\" (UID: \"acd67900-69b3-485f-b378-80457d604be8\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-th49n" Jan 20 17:24:53 crc kubenswrapper[4558]: I0120 17:24:53.370652 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v8g56\" (UniqueName: \"kubernetes.io/projected/acd67900-69b3-485f-b378-80457d604be8-kube-api-access-v8g56\") pod \"nova-cell0-cell-mapping-th49n\" (UID: \"acd67900-69b3-485f-b378-80457d604be8\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-th49n" Jan 20 17:24:53 crc kubenswrapper[4558]: I0120 17:24:53.370923 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/acd67900-69b3-485f-b378-80457d604be8-scripts\") pod \"nova-cell0-cell-mapping-th49n\" (UID: \"acd67900-69b3-485f-b378-80457d604be8\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-th49n" Jan 20 17:24:53 crc kubenswrapper[4558]: I0120 17:24:53.370973 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/acd67900-69b3-485f-b378-80457d604be8-config-data\") pod \"nova-cell0-cell-mapping-th49n\" (UID: \"acd67900-69b3-485f-b378-80457d604be8\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-th49n" Jan 20 17:24:53 crc kubenswrapper[4558]: I0120 17:24:53.371001 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/acd67900-69b3-485f-b378-80457d604be8-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-th49n\" (UID: \"acd67900-69b3-485f-b378-80457d604be8\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-th49n" Jan 20 17:24:53 crc kubenswrapper[4558]: I0120 17:24:53.375734 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/acd67900-69b3-485f-b378-80457d604be8-scripts\") pod \"nova-cell0-cell-mapping-th49n\" (UID: \"acd67900-69b3-485f-b378-80457d604be8\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-th49n" Jan 20 17:24:53 crc kubenswrapper[4558]: I0120 17:24:53.385699 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/acd67900-69b3-485f-b378-80457d604be8-config-data\") pod \"nova-cell0-cell-mapping-th49n\" (UID: \"acd67900-69b3-485f-b378-80457d604be8\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-th49n" Jan 20 17:24:53 crc kubenswrapper[4558]: I0120 17:24:53.388668 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/acd67900-69b3-485f-b378-80457d604be8-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-th49n\" (UID: \"acd67900-69b3-485f-b378-80457d604be8\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-th49n" Jan 20 17:24:53 crc kubenswrapper[4558]: I0120 17:24:53.412854 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"1364d2e8-742f-45c7-8620-65b25761a446","Type":"ContainerStarted","Data":"e1eb2d8ac1c120b5b284b58056545fd48762b4f7439d798ebacdc2d85342480c"} Jan 20 17:24:53 crc kubenswrapper[4558]: I0120 17:24:53.414262 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:24:53 crc kubenswrapper[4558]: I0120 17:24:53.422652 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v8g56\" (UniqueName: \"kubernetes.io/projected/acd67900-69b3-485f-b378-80457d604be8-kube-api-access-v8g56\") pod \"nova-cell0-cell-mapping-th49n\" (UID: \"acd67900-69b3-485f-b378-80457d604be8\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-th49n" Jan 20 17:24:53 crc kubenswrapper[4558]: I0120 17:24:53.435873 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:24:53 crc kubenswrapper[4558]: I0120 17:24:53.437725 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:24:53 crc kubenswrapper[4558]: I0120 17:24:53.442557 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-api-config-data" Jan 20 17:24:53 crc kubenswrapper[4558]: I0120 17:24:53.472736 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/168b4665-9d96-4497-8e29-db3f6e38e3b2-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"168b4665-9d96-4497-8e29-db3f6e38e3b2\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:24:53 crc kubenswrapper[4558]: I0120 17:24:53.473075 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/168b4665-9d96-4497-8e29-db3f6e38e3b2-config-data\") pod \"nova-api-0\" (UID: \"168b4665-9d96-4497-8e29-db3f6e38e3b2\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:24:53 crc kubenswrapper[4558]: I0120 17:24:53.473174 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/168b4665-9d96-4497-8e29-db3f6e38e3b2-logs\") pod \"nova-api-0\" (UID: \"168b4665-9d96-4497-8e29-db3f6e38e3b2\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:24:53 crc kubenswrapper[4558]: I0120 17:24:53.473228 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hb7gx\" (UniqueName: \"kubernetes.io/projected/168b4665-9d96-4497-8e29-db3f6e38e3b2-kube-api-access-hb7gx\") pod \"nova-api-0\" (UID: \"168b4665-9d96-4497-8e29-db3f6e38e3b2\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:24:53 crc kubenswrapper[4558]: I0120 17:24:53.475234 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:24:53 crc kubenswrapper[4558]: I0120 17:24:53.475396 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ceilometer-0" podStartSLOduration=2.087187501 podStartE2EDuration="6.475379292s" podCreationTimestamp="2026-01-20 17:24:47 +0000 UTC" firstStartedPulling="2026-01-20 17:24:48.104504256 +0000 UTC m=+2581.864842223" lastFinishedPulling="2026-01-20 17:24:52.492696048 +0000 UTC m=+2586.253034014" observedRunningTime="2026-01-20 17:24:53.463213187 +0000 UTC m=+2587.223551154" watchObservedRunningTime="2026-01-20 17:24:53.475379292 +0000 UTC m=+2587.235717260" Jan 20 17:24:53 crc kubenswrapper[4558]: I0120 17:24:53.548066 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-cell-mapping-th49n" Jan 20 17:24:53 crc kubenswrapper[4558]: I0120 17:24:53.575140 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/168b4665-9d96-4497-8e29-db3f6e38e3b2-config-data\") pod \"nova-api-0\" (UID: \"168b4665-9d96-4497-8e29-db3f6e38e3b2\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:24:53 crc kubenswrapper[4558]: I0120 17:24:53.575217 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/168b4665-9d96-4497-8e29-db3f6e38e3b2-logs\") pod \"nova-api-0\" (UID: \"168b4665-9d96-4497-8e29-db3f6e38e3b2\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:24:53 crc kubenswrapper[4558]: I0120 17:24:53.575255 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hb7gx\" (UniqueName: \"kubernetes.io/projected/168b4665-9d96-4497-8e29-db3f6e38e3b2-kube-api-access-hb7gx\") pod \"nova-api-0\" (UID: \"168b4665-9d96-4497-8e29-db3f6e38e3b2\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:24:53 crc kubenswrapper[4558]: I0120 17:24:53.575302 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/168b4665-9d96-4497-8e29-db3f6e38e3b2-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"168b4665-9d96-4497-8e29-db3f6e38e3b2\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:24:53 crc kubenswrapper[4558]: I0120 17:24:53.575767 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/168b4665-9d96-4497-8e29-db3f6e38e3b2-logs\") pod \"nova-api-0\" (UID: \"168b4665-9d96-4497-8e29-db3f6e38e3b2\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:24:53 crc kubenswrapper[4558]: I0120 17:24:53.579625 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/168b4665-9d96-4497-8e29-db3f6e38e3b2-config-data\") pod \"nova-api-0\" (UID: \"168b4665-9d96-4497-8e29-db3f6e38e3b2\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:24:53 crc kubenswrapper[4558]: I0120 17:24:53.582911 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/168b4665-9d96-4497-8e29-db3f6e38e3b2-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"168b4665-9d96-4497-8e29-db3f6e38e3b2\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:24:53 crc kubenswrapper[4558]: I0120 17:24:53.621629 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hb7gx\" (UniqueName: \"kubernetes.io/projected/168b4665-9d96-4497-8e29-db3f6e38e3b2-kube-api-access-hb7gx\") pod \"nova-api-0\" (UID: \"168b4665-9d96-4497-8e29-db3f6e38e3b2\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:24:53 crc kubenswrapper[4558]: I0120 17:24:53.676058 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:24:53 crc kubenswrapper[4558]: I0120 17:24:53.677114 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:24:53 crc kubenswrapper[4558]: I0120 17:24:53.682500 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-scheduler-config-data" Jan 20 17:24:53 crc kubenswrapper[4558]: I0120 17:24:53.704945 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:24:53 crc kubenswrapper[4558]: I0120 17:24:53.727210 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:24:53 crc kubenswrapper[4558]: I0120 17:24:53.729032 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:24:53 crc kubenswrapper[4558]: I0120 17:24:53.740544 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-metadata-config-data" Jan 20 17:24:53 crc kubenswrapper[4558]: I0120 17:24:53.777217 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:24:53 crc kubenswrapper[4558]: I0120 17:24:53.778918 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/df4290b9-86af-428a-bb18-68fafc4faac9-logs\") pod \"nova-metadata-0\" (UID: \"df4290b9-86af-428a-bb18-68fafc4faac9\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:24:53 crc kubenswrapper[4558]: I0120 17:24:53.779018 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/df4290b9-86af-428a-bb18-68fafc4faac9-config-data\") pod \"nova-metadata-0\" (UID: \"df4290b9-86af-428a-bb18-68fafc4faac9\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:24:53 crc kubenswrapper[4558]: I0120 17:24:53.779096 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tzsp2\" (UniqueName: \"kubernetes.io/projected/df4290b9-86af-428a-bb18-68fafc4faac9-kube-api-access-tzsp2\") pod \"nova-metadata-0\" (UID: \"df4290b9-86af-428a-bb18-68fafc4faac9\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:24:53 crc kubenswrapper[4558]: I0120 17:24:53.779134 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e01ff5c3-46ef-43d1-b1a1-d39e1a756d2f-config-data\") pod \"nova-scheduler-0\" (UID: \"e01ff5c3-46ef-43d1-b1a1-d39e1a756d2f\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:24:53 crc kubenswrapper[4558]: I0120 17:24:53.779153 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-64vqf\" (UniqueName: \"kubernetes.io/projected/e01ff5c3-46ef-43d1-b1a1-d39e1a756d2f-kube-api-access-64vqf\") pod \"nova-scheduler-0\" (UID: \"e01ff5c3-46ef-43d1-b1a1-d39e1a756d2f\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:24:53 crc kubenswrapper[4558]: I0120 17:24:53.779187 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e01ff5c3-46ef-43d1-b1a1-d39e1a756d2f-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"e01ff5c3-46ef-43d1-b1a1-d39e1a756d2f\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:24:53 crc kubenswrapper[4558]: I0120 17:24:53.779224 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/df4290b9-86af-428a-bb18-68fafc4faac9-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"df4290b9-86af-428a-bb18-68fafc4faac9\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:24:53 crc kubenswrapper[4558]: I0120 17:24:53.800202 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:24:53 crc kubenswrapper[4558]: I0120 17:24:53.842647 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:24:53 crc kubenswrapper[4558]: I0120 17:24:53.849923 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:24:53 crc kubenswrapper[4558]: I0120 17:24:53.855310 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell1-novncproxy-config-data" Jan 20 17:24:53 crc kubenswrapper[4558]: I0120 17:24:53.882979 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/df4290b9-86af-428a-bb18-68fafc4faac9-logs\") pod \"nova-metadata-0\" (UID: \"df4290b9-86af-428a-bb18-68fafc4faac9\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:24:53 crc kubenswrapper[4558]: I0120 17:24:53.883107 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e35b117-b17a-4a9e-a725-a2f251147dad-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"7e35b117-b17a-4a9e-a725-a2f251147dad\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:24:53 crc kubenswrapper[4558]: I0120 17:24:53.883182 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/df4290b9-86af-428a-bb18-68fafc4faac9-config-data\") pod \"nova-metadata-0\" (UID: \"df4290b9-86af-428a-bb18-68fafc4faac9\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:24:53 crc kubenswrapper[4558]: I0120 17:24:53.883257 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7e35b117-b17a-4a9e-a725-a2f251147dad-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"7e35b117-b17a-4a9e-a725-a2f251147dad\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:24:53 crc kubenswrapper[4558]: I0120 17:24:53.883301 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tzsp2\" (UniqueName: \"kubernetes.io/projected/df4290b9-86af-428a-bb18-68fafc4faac9-kube-api-access-tzsp2\") pod \"nova-metadata-0\" (UID: \"df4290b9-86af-428a-bb18-68fafc4faac9\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:24:53 crc kubenswrapper[4558]: I0120 17:24:53.883350 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e01ff5c3-46ef-43d1-b1a1-d39e1a756d2f-config-data\") pod \"nova-scheduler-0\" (UID: \"e01ff5c3-46ef-43d1-b1a1-d39e1a756d2f\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:24:53 crc kubenswrapper[4558]: I0120 17:24:53.883369 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-64vqf\" (UniqueName: \"kubernetes.io/projected/e01ff5c3-46ef-43d1-b1a1-d39e1a756d2f-kube-api-access-64vqf\") pod \"nova-scheduler-0\" (UID: \"e01ff5c3-46ef-43d1-b1a1-d39e1a756d2f\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:24:53 crc kubenswrapper[4558]: I0120 17:24:53.883389 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e01ff5c3-46ef-43d1-b1a1-d39e1a756d2f-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"e01ff5c3-46ef-43d1-b1a1-d39e1a756d2f\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:24:53 crc kubenswrapper[4558]: I0120 17:24:53.883432 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5rcz4\" (UniqueName: \"kubernetes.io/projected/7e35b117-b17a-4a9e-a725-a2f251147dad-kube-api-access-5rcz4\") pod \"nova-cell1-novncproxy-0\" (UID: \"7e35b117-b17a-4a9e-a725-a2f251147dad\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:24:53 crc kubenswrapper[4558]: I0120 17:24:53.883459 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/df4290b9-86af-428a-bb18-68fafc4faac9-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"df4290b9-86af-428a-bb18-68fafc4faac9\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:24:53 crc kubenswrapper[4558]: I0120 17:24:53.883870 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/df4290b9-86af-428a-bb18-68fafc4faac9-logs\") pod \"nova-metadata-0\" (UID: \"df4290b9-86af-428a-bb18-68fafc4faac9\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:24:53 crc kubenswrapper[4558]: I0120 17:24:53.909916 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tzsp2\" (UniqueName: \"kubernetes.io/projected/df4290b9-86af-428a-bb18-68fafc4faac9-kube-api-access-tzsp2\") pod \"nova-metadata-0\" (UID: \"df4290b9-86af-428a-bb18-68fafc4faac9\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:24:53 crc kubenswrapper[4558]: I0120 17:24:53.924496 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e01ff5c3-46ef-43d1-b1a1-d39e1a756d2f-config-data\") pod \"nova-scheduler-0\" (UID: \"e01ff5c3-46ef-43d1-b1a1-d39e1a756d2f\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:24:53 crc kubenswrapper[4558]: I0120 17:24:53.924495 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/df4290b9-86af-428a-bb18-68fafc4faac9-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"df4290b9-86af-428a-bb18-68fafc4faac9\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:24:53 crc kubenswrapper[4558]: I0120 17:24:53.928112 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e01ff5c3-46ef-43d1-b1a1-d39e1a756d2f-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"e01ff5c3-46ef-43d1-b1a1-d39e1a756d2f\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:24:53 crc kubenswrapper[4558]: I0120 17:24:53.934010 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-64vqf\" (UniqueName: \"kubernetes.io/projected/e01ff5c3-46ef-43d1-b1a1-d39e1a756d2f-kube-api-access-64vqf\") pod \"nova-scheduler-0\" (UID: \"e01ff5c3-46ef-43d1-b1a1-d39e1a756d2f\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:24:53 crc kubenswrapper[4558]: I0120 17:24:53.938221 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:24:53 crc kubenswrapper[4558]: I0120 17:24:53.941019 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/df4290b9-86af-428a-bb18-68fafc4faac9-config-data\") pod \"nova-metadata-0\" (UID: \"df4290b9-86af-428a-bb18-68fafc4faac9\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:24:53 crc kubenswrapper[4558]: I0120 17:24:53.990901 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7e35b117-b17a-4a9e-a725-a2f251147dad-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"7e35b117-b17a-4a9e-a725-a2f251147dad\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:24:53 crc kubenswrapper[4558]: I0120 17:24:53.991419 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5rcz4\" (UniqueName: \"kubernetes.io/projected/7e35b117-b17a-4a9e-a725-a2f251147dad-kube-api-access-5rcz4\") pod \"nova-cell1-novncproxy-0\" (UID: \"7e35b117-b17a-4a9e-a725-a2f251147dad\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:24:53 crc kubenswrapper[4558]: I0120 17:24:53.991721 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e35b117-b17a-4a9e-a725-a2f251147dad-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"7e35b117-b17a-4a9e-a725-a2f251147dad\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:24:53 crc kubenswrapper[4558]: I0120 17:24:53.997642 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7e35b117-b17a-4a9e-a725-a2f251147dad-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"7e35b117-b17a-4a9e-a725-a2f251147dad\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:24:53 crc kubenswrapper[4558]: I0120 17:24:53.998318 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e35b117-b17a-4a9e-a725-a2f251147dad-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"7e35b117-b17a-4a9e-a725-a2f251147dad\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:24:54 crc kubenswrapper[4558]: I0120 17:24:54.018225 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5rcz4\" (UniqueName: \"kubernetes.io/projected/7e35b117-b17a-4a9e-a725-a2f251147dad-kube-api-access-5rcz4\") pod \"nova-cell1-novncproxy-0\" (UID: \"7e35b117-b17a-4a9e-a725-a2f251147dad\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:24:54 crc kubenswrapper[4558]: I0120 17:24:54.026693 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:24:54 crc kubenswrapper[4558]: I0120 17:24:54.119511 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:24:54 crc kubenswrapper[4558]: I0120 17:24:54.223539 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:24:54 crc kubenswrapper[4558]: I0120 17:24:54.297155 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-cell-mapping-th49n"] Jan 20 17:24:54 crc kubenswrapper[4558]: I0120 17:24:54.400752 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:24:54 crc kubenswrapper[4558]: W0120 17:24:54.424637 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod168b4665_9d96_4497_8e29_db3f6e38e3b2.slice/crio-0583f90b6191d9901c9b86882c30910228b0b175e071385511bd31480fad1533 WatchSource:0}: Error finding container 0583f90b6191d9901c9b86882c30910228b0b175e071385511bd31480fad1533: Status 404 returned error can't find the container with id 0583f90b6191d9901c9b86882c30910228b0b175e071385511bd31480fad1533 Jan 20 17:24:54 crc kubenswrapper[4558]: I0120 17:24:54.448174 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-cell-mapping-th49n" event={"ID":"acd67900-69b3-485f-b378-80457d604be8","Type":"ContainerStarted","Data":"12d36761612e8a3339bc705d6c7a0ced0db1629320d439f854c82cd965716f70"} Jan 20 17:24:54 crc kubenswrapper[4558]: I0120 17:24:54.482308 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:24:54 crc kubenswrapper[4558]: W0120 17:24:54.635341 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddf4290b9_86af_428a_bb18_68fafc4faac9.slice/crio-7fce53830bc445721ce535048aa9d2fe9d4b95e7133803d541bae2e49de94ae2 WatchSource:0}: Error finding container 7fce53830bc445721ce535048aa9d2fe9d4b95e7133803d541bae2e49de94ae2: Status 404 returned error can't find the container with id 7fce53830bc445721ce535048aa9d2fe9d4b95e7133803d541bae2e49de94ae2 Jan 20 17:24:54 crc kubenswrapper[4558]: I0120 17:24:54.641874 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:24:54 crc kubenswrapper[4558]: I0120 17:24:54.652492 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-db-sync-z28dz"] Jan 20 17:24:54 crc kubenswrapper[4558]: I0120 17:24:54.654085 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-z28dz" Jan 20 17:24:54 crc kubenswrapper[4558]: I0120 17:24:54.657507 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell1-conductor-config-data" Jan 20 17:24:54 crc kubenswrapper[4558]: I0120 17:24:54.657773 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell1-conductor-scripts" Jan 20 17:24:54 crc kubenswrapper[4558]: I0120 17:24:54.671600 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-db-sync-z28dz"] Jan 20 17:24:54 crc kubenswrapper[4558]: I0120 17:24:54.699567 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:24:54 crc kubenswrapper[4558]: I0120 17:24:54.718217 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/01e17b5d-e7c8-4751-9f98-524fdc40bbf3-config-data\") pod \"nova-cell1-conductor-db-sync-z28dz\" (UID: \"01e17b5d-e7c8-4751-9f98-524fdc40bbf3\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-z28dz" Jan 20 17:24:54 crc kubenswrapper[4558]: I0120 17:24:54.718323 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/01e17b5d-e7c8-4751-9f98-524fdc40bbf3-scripts\") pod \"nova-cell1-conductor-db-sync-z28dz\" (UID: \"01e17b5d-e7c8-4751-9f98-524fdc40bbf3\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-z28dz" Jan 20 17:24:54 crc kubenswrapper[4558]: I0120 17:24:54.718356 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/01e17b5d-e7c8-4751-9f98-524fdc40bbf3-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-z28dz\" (UID: \"01e17b5d-e7c8-4751-9f98-524fdc40bbf3\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-z28dz" Jan 20 17:24:54 crc kubenswrapper[4558]: I0120 17:24:54.718427 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vtl7t\" (UniqueName: \"kubernetes.io/projected/01e17b5d-e7c8-4751-9f98-524fdc40bbf3-kube-api-access-vtl7t\") pod \"nova-cell1-conductor-db-sync-z28dz\" (UID: \"01e17b5d-e7c8-4751-9f98-524fdc40bbf3\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-z28dz" Jan 20 17:24:54 crc kubenswrapper[4558]: I0120 17:24:54.820898 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/01e17b5d-e7c8-4751-9f98-524fdc40bbf3-config-data\") pod \"nova-cell1-conductor-db-sync-z28dz\" (UID: \"01e17b5d-e7c8-4751-9f98-524fdc40bbf3\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-z28dz" Jan 20 17:24:54 crc kubenswrapper[4558]: I0120 17:24:54.820994 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/01e17b5d-e7c8-4751-9f98-524fdc40bbf3-scripts\") pod \"nova-cell1-conductor-db-sync-z28dz\" (UID: \"01e17b5d-e7c8-4751-9f98-524fdc40bbf3\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-z28dz" Jan 20 17:24:54 crc kubenswrapper[4558]: I0120 17:24:54.821027 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/01e17b5d-e7c8-4751-9f98-524fdc40bbf3-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-z28dz\" (UID: \"01e17b5d-e7c8-4751-9f98-524fdc40bbf3\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-z28dz" Jan 20 17:24:54 crc kubenswrapper[4558]: I0120 17:24:54.821077 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vtl7t\" (UniqueName: \"kubernetes.io/projected/01e17b5d-e7c8-4751-9f98-524fdc40bbf3-kube-api-access-vtl7t\") pod \"nova-cell1-conductor-db-sync-z28dz\" (UID: \"01e17b5d-e7c8-4751-9f98-524fdc40bbf3\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-z28dz" Jan 20 17:24:54 crc kubenswrapper[4558]: I0120 17:24:54.825404 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/01e17b5d-e7c8-4751-9f98-524fdc40bbf3-config-data\") pod \"nova-cell1-conductor-db-sync-z28dz\" (UID: \"01e17b5d-e7c8-4751-9f98-524fdc40bbf3\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-z28dz" Jan 20 17:24:54 crc kubenswrapper[4558]: I0120 17:24:54.826246 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/01e17b5d-e7c8-4751-9f98-524fdc40bbf3-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-z28dz\" (UID: \"01e17b5d-e7c8-4751-9f98-524fdc40bbf3\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-z28dz" Jan 20 17:24:54 crc kubenswrapper[4558]: I0120 17:24:54.826280 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/01e17b5d-e7c8-4751-9f98-524fdc40bbf3-scripts\") pod \"nova-cell1-conductor-db-sync-z28dz\" (UID: \"01e17b5d-e7c8-4751-9f98-524fdc40bbf3\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-z28dz" Jan 20 17:24:54 crc kubenswrapper[4558]: I0120 17:24:54.847692 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vtl7t\" (UniqueName: \"kubernetes.io/projected/01e17b5d-e7c8-4751-9f98-524fdc40bbf3-kube-api-access-vtl7t\") pod \"nova-cell1-conductor-db-sync-z28dz\" (UID: \"01e17b5d-e7c8-4751-9f98-524fdc40bbf3\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-z28dz" Jan 20 17:24:54 crc kubenswrapper[4558]: I0120 17:24:54.987080 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:24:55 crc kubenswrapper[4558]: I0120 17:24:55.009369 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-0"] Jan 20 17:24:55 crc kubenswrapper[4558]: I0120 17:24:55.009598 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-cell0-conductor-0" podUID="165828ca-683d-41f2-a055-7e99870bf131" containerName="nova-cell0-conductor-conductor" containerID="cri-o://be8c548bad3d4853a64ebbae913ef2602961735470fbdaf076725d3ca8f5145e" gracePeriod=30 Jan 20 17:24:55 crc kubenswrapper[4558]: I0120 17:24:55.048022 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:24:55 crc kubenswrapper[4558]: I0120 17:24:55.055080 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:24:55 crc kubenswrapper[4558]: I0120 17:24:55.061078 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:24:55 crc kubenswrapper[4558]: I0120 17:24:55.145558 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-z28dz" Jan 20 17:24:55 crc kubenswrapper[4558]: I0120 17:24:55.460325 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-cell-mapping-th49n" event={"ID":"acd67900-69b3-485f-b378-80457d604be8","Type":"ContainerStarted","Data":"1bbe4af0bdcc4ee54aef97ab3e8325a227f63771456a354372adfab0b7220202"} Jan 20 17:24:55 crc kubenswrapper[4558]: I0120 17:24:55.462986 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"7e35b117-b17a-4a9e-a725-a2f251147dad","Type":"ContainerStarted","Data":"dd07722809c65d0d49946dd819fc2bcdb222e47a2b85409e724ff95454a5e919"} Jan 20 17:24:55 crc kubenswrapper[4558]: I0120 17:24:55.463033 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"7e35b117-b17a-4a9e-a725-a2f251147dad","Type":"ContainerStarted","Data":"34885a27d726a5eb97a04a2a668a89468f3f09edfccf6d578bb2fb80b31a8309"} Jan 20 17:24:55 crc kubenswrapper[4558]: I0120 17:24:55.465108 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"e01ff5c3-46ef-43d1-b1a1-d39e1a756d2f","Type":"ContainerStarted","Data":"c9b97f96803f44a1b6adf5bf2b148738540c1bff4121e36955d84ece49d2a1a1"} Jan 20 17:24:55 crc kubenswrapper[4558]: I0120 17:24:55.465179 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"e01ff5c3-46ef-43d1-b1a1-d39e1a756d2f","Type":"ContainerStarted","Data":"b5093525b4442b5ea491f7f56b1a2ae6d9fcc0b25848c8f3e04195c49137d9ab"} Jan 20 17:24:55 crc kubenswrapper[4558]: I0120 17:24:55.467453 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"168b4665-9d96-4497-8e29-db3f6e38e3b2","Type":"ContainerStarted","Data":"696221c4db1df9918ddb542ae0b927fec9732f29aac33c16861876b42969ef12"} Jan 20 17:24:55 crc kubenswrapper[4558]: I0120 17:24:55.467503 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"168b4665-9d96-4497-8e29-db3f6e38e3b2","Type":"ContainerStarted","Data":"fef705763856b6dedbc7147592c5b8597de86d2d0e46aae5868b7418bdff49b6"} Jan 20 17:24:55 crc kubenswrapper[4558]: I0120 17:24:55.467517 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"168b4665-9d96-4497-8e29-db3f6e38e3b2","Type":"ContainerStarted","Data":"0583f90b6191d9901c9b86882c30910228b0b175e071385511bd31480fad1533"} Jan 20 17:24:55 crc kubenswrapper[4558]: I0120 17:24:55.470916 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"df4290b9-86af-428a-bb18-68fafc4faac9","Type":"ContainerStarted","Data":"c76a410774504ab797341c1d7f48bc1de18d21cf5e558db39395aadba82c5183"} Jan 20 17:24:55 crc kubenswrapper[4558]: I0120 17:24:55.470952 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"df4290b9-86af-428a-bb18-68fafc4faac9","Type":"ContainerStarted","Data":"bf68c3455e206a3ffeb686a8b32efedb59c0194999e33b19fbaa3a7767cfdfa5"} Jan 20 17:24:55 crc kubenswrapper[4558]: I0120 17:24:55.470967 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"df4290b9-86af-428a-bb18-68fafc4faac9","Type":"ContainerStarted","Data":"7fce53830bc445721ce535048aa9d2fe9d4b95e7133803d541bae2e49de94ae2"} Jan 20 17:24:55 crc kubenswrapper[4558]: I0120 17:24:55.471009 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-metadata-0" podUID="df4290b9-86af-428a-bb18-68fafc4faac9" containerName="nova-metadata-log" containerID="cri-o://bf68c3455e206a3ffeb686a8b32efedb59c0194999e33b19fbaa3a7767cfdfa5" gracePeriod=30 Jan 20 17:24:55 crc kubenswrapper[4558]: I0120 17:24:55.471047 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-metadata-0" podUID="df4290b9-86af-428a-bb18-68fafc4faac9" containerName="nova-metadata-metadata" containerID="cri-o://c76a410774504ab797341c1d7f48bc1de18d21cf5e558db39395aadba82c5183" gracePeriod=30 Jan 20 17:24:55 crc kubenswrapper[4558]: I0120 17:24:55.485769 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell0-cell-mapping-th49n" podStartSLOduration=2.485757192 podStartE2EDuration="2.485757192s" podCreationTimestamp="2026-01-20 17:24:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:24:55.478643791 +0000 UTC m=+2589.238981758" watchObservedRunningTime="2026-01-20 17:24:55.485757192 +0000 UTC m=+2589.246095159" Jan 20 17:24:55 crc kubenswrapper[4558]: I0120 17:24:55.495769 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-metadata-0" podStartSLOduration=2.495753545 podStartE2EDuration="2.495753545s" podCreationTimestamp="2026-01-20 17:24:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:24:55.493865444 +0000 UTC m=+2589.254203411" watchObservedRunningTime="2026-01-20 17:24:55.495753545 +0000 UTC m=+2589.256091513" Jan 20 17:24:55 crc kubenswrapper[4558]: I0120 17:24:55.512256 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" podStartSLOduration=2.512233305 podStartE2EDuration="2.512233305s" podCreationTimestamp="2026-01-20 17:24:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:24:55.508092806 +0000 UTC m=+2589.268430774" watchObservedRunningTime="2026-01-20 17:24:55.512233305 +0000 UTC m=+2589.272571273" Jan 20 17:24:55 crc kubenswrapper[4558]: I0120 17:24:55.526899 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-scheduler-0" podStartSLOduration=2.526876831 podStartE2EDuration="2.526876831s" podCreationTimestamp="2026-01-20 17:24:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:24:55.525364255 +0000 UTC m=+2589.285702212" watchObservedRunningTime="2026-01-20 17:24:55.526876831 +0000 UTC m=+2589.287214797" Jan 20 17:24:55 crc kubenswrapper[4558]: I0120 17:24:55.549202 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-api-0" podStartSLOduration=2.54918814 podStartE2EDuration="2.54918814s" podCreationTimestamp="2026-01-20 17:24:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:24:55.542417043 +0000 UTC m=+2589.302755010" watchObservedRunningTime="2026-01-20 17:24:55.54918814 +0000 UTC m=+2589.309526107" Jan 20 17:24:55 crc kubenswrapper[4558]: I0120 17:24:55.590314 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-db-sync-z28dz"] Jan 20 17:24:56 crc kubenswrapper[4558]: I0120 17:24:56.077617 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:24:56 crc kubenswrapper[4558]: I0120 17:24:56.167629 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/df4290b9-86af-428a-bb18-68fafc4faac9-combined-ca-bundle\") pod \"df4290b9-86af-428a-bb18-68fafc4faac9\" (UID: \"df4290b9-86af-428a-bb18-68fafc4faac9\") " Jan 20 17:24:56 crc kubenswrapper[4558]: I0120 17:24:56.167808 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/df4290b9-86af-428a-bb18-68fafc4faac9-config-data\") pod \"df4290b9-86af-428a-bb18-68fafc4faac9\" (UID: \"df4290b9-86af-428a-bb18-68fafc4faac9\") " Jan 20 17:24:56 crc kubenswrapper[4558]: I0120 17:24:56.167839 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tzsp2\" (UniqueName: \"kubernetes.io/projected/df4290b9-86af-428a-bb18-68fafc4faac9-kube-api-access-tzsp2\") pod \"df4290b9-86af-428a-bb18-68fafc4faac9\" (UID: \"df4290b9-86af-428a-bb18-68fafc4faac9\") " Jan 20 17:24:56 crc kubenswrapper[4558]: I0120 17:24:56.167917 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/df4290b9-86af-428a-bb18-68fafc4faac9-logs\") pod \"df4290b9-86af-428a-bb18-68fafc4faac9\" (UID: \"df4290b9-86af-428a-bb18-68fafc4faac9\") " Jan 20 17:24:56 crc kubenswrapper[4558]: I0120 17:24:56.168326 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/df4290b9-86af-428a-bb18-68fafc4faac9-logs" (OuterVolumeSpecName: "logs") pod "df4290b9-86af-428a-bb18-68fafc4faac9" (UID: "df4290b9-86af-428a-bb18-68fafc4faac9"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:24:56 crc kubenswrapper[4558]: I0120 17:24:56.168834 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/df4290b9-86af-428a-bb18-68fafc4faac9-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:24:56 crc kubenswrapper[4558]: I0120 17:24:56.174352 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/df4290b9-86af-428a-bb18-68fafc4faac9-kube-api-access-tzsp2" (OuterVolumeSpecName: "kube-api-access-tzsp2") pod "df4290b9-86af-428a-bb18-68fafc4faac9" (UID: "df4290b9-86af-428a-bb18-68fafc4faac9"). InnerVolumeSpecName "kube-api-access-tzsp2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:24:56 crc kubenswrapper[4558]: I0120 17:24:56.192901 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/df4290b9-86af-428a-bb18-68fafc4faac9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "df4290b9-86af-428a-bb18-68fafc4faac9" (UID: "df4290b9-86af-428a-bb18-68fafc4faac9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:24:56 crc kubenswrapper[4558]: I0120 17:24:56.198365 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/df4290b9-86af-428a-bb18-68fafc4faac9-config-data" (OuterVolumeSpecName: "config-data") pod "df4290b9-86af-428a-bb18-68fafc4faac9" (UID: "df4290b9-86af-428a-bb18-68fafc4faac9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:24:56 crc kubenswrapper[4558]: I0120 17:24:56.271757 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/df4290b9-86af-428a-bb18-68fafc4faac9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:24:56 crc kubenswrapper[4558]: I0120 17:24:56.271801 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/df4290b9-86af-428a-bb18-68fafc4faac9-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:24:56 crc kubenswrapper[4558]: I0120 17:24:56.271818 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tzsp2\" (UniqueName: \"kubernetes.io/projected/df4290b9-86af-428a-bb18-68fafc4faac9-kube-api-access-tzsp2\") on node \"crc\" DevicePath \"\"" Jan 20 17:24:56 crc kubenswrapper[4558]: I0120 17:24:56.484901 4558 generic.go:334] "Generic (PLEG): container finished" podID="df4290b9-86af-428a-bb18-68fafc4faac9" containerID="c76a410774504ab797341c1d7f48bc1de18d21cf5e558db39395aadba82c5183" exitCode=0 Jan 20 17:24:56 crc kubenswrapper[4558]: I0120 17:24:56.484954 4558 generic.go:334] "Generic (PLEG): container finished" podID="df4290b9-86af-428a-bb18-68fafc4faac9" containerID="bf68c3455e206a3ffeb686a8b32efedb59c0194999e33b19fbaa3a7767cfdfa5" exitCode=143 Jan 20 17:24:56 crc kubenswrapper[4558]: I0120 17:24:56.485005 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:24:56 crc kubenswrapper[4558]: I0120 17:24:56.485018 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"df4290b9-86af-428a-bb18-68fafc4faac9","Type":"ContainerDied","Data":"c76a410774504ab797341c1d7f48bc1de18d21cf5e558db39395aadba82c5183"} Jan 20 17:24:56 crc kubenswrapper[4558]: I0120 17:24:56.485081 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"df4290b9-86af-428a-bb18-68fafc4faac9","Type":"ContainerDied","Data":"bf68c3455e206a3ffeb686a8b32efedb59c0194999e33b19fbaa3a7767cfdfa5"} Jan 20 17:24:56 crc kubenswrapper[4558]: I0120 17:24:56.485098 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"df4290b9-86af-428a-bb18-68fafc4faac9","Type":"ContainerDied","Data":"7fce53830bc445721ce535048aa9d2fe9d4b95e7133803d541bae2e49de94ae2"} Jan 20 17:24:56 crc kubenswrapper[4558]: I0120 17:24:56.485119 4558 scope.go:117] "RemoveContainer" containerID="c76a410774504ab797341c1d7f48bc1de18d21cf5e558db39395aadba82c5183" Jan 20 17:24:56 crc kubenswrapper[4558]: I0120 17:24:56.492968 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-z28dz" event={"ID":"01e17b5d-e7c8-4751-9f98-524fdc40bbf3","Type":"ContainerStarted","Data":"ad4f89b78d4804b38b89b1fbc52cbdd49b304501413e6e54a707bd56ba2d1e8a"} Jan 20 17:24:56 crc kubenswrapper[4558]: I0120 17:24:56.493076 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-z28dz" event={"ID":"01e17b5d-e7c8-4751-9f98-524fdc40bbf3","Type":"ContainerStarted","Data":"ec984f43d9f631776c32a5974670509776150478821b8fbb5c5ca9e30fcd0550"} Jan 20 17:24:56 crc kubenswrapper[4558]: I0120 17:24:56.495226 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" podUID="7e35b117-b17a-4a9e-a725-a2f251147dad" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://dd07722809c65d0d49946dd819fc2bcdb222e47a2b85409e724ff95454a5e919" gracePeriod=30 Jan 20 17:24:56 crc kubenswrapper[4558]: I0120 17:24:56.495685 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-api-0" podUID="168b4665-9d96-4497-8e29-db3f6e38e3b2" containerName="nova-api-log" containerID="cri-o://fef705763856b6dedbc7147592c5b8597de86d2d0e46aae5868b7418bdff49b6" gracePeriod=30 Jan 20 17:24:56 crc kubenswrapper[4558]: I0120 17:24:56.496128 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-api-0" podUID="168b4665-9d96-4497-8e29-db3f6e38e3b2" containerName="nova-api-api" containerID="cri-o://696221c4db1df9918ddb542ae0b927fec9732f29aac33c16861876b42969ef12" gracePeriod=30 Jan 20 17:24:56 crc kubenswrapper[4558]: I0120 17:24:56.500392 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-scheduler-0" podUID="e01ff5c3-46ef-43d1-b1a1-d39e1a756d2f" containerName="nova-scheduler-scheduler" containerID="cri-o://c9b97f96803f44a1b6adf5bf2b148738540c1bff4121e36955d84ece49d2a1a1" gracePeriod=30 Jan 20 17:24:56 crc kubenswrapper[4558]: I0120 17:24:56.557992 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-z28dz" podStartSLOduration=2.557197086 podStartE2EDuration="2.557197086s" podCreationTimestamp="2026-01-20 17:24:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:24:56.529268689 +0000 UTC m=+2590.289606655" watchObservedRunningTime="2026-01-20 17:24:56.557197086 +0000 UTC m=+2590.317535053" Jan 20 17:24:56 crc kubenswrapper[4558]: I0120 17:24:56.572331 4558 scope.go:117] "RemoveContainer" containerID="bf68c3455e206a3ffeb686a8b32efedb59c0194999e33b19fbaa3a7767cfdfa5" Jan 20 17:24:56 crc kubenswrapper[4558]: I0120 17:24:56.611882 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:24:56 crc kubenswrapper[4558]: I0120 17:24:56.616245 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:24:56 crc kubenswrapper[4558]: I0120 17:24:56.624232 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:24:56 crc kubenswrapper[4558]: E0120 17:24:56.624723 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="df4290b9-86af-428a-bb18-68fafc4faac9" containerName="nova-metadata-metadata" Jan 20 17:24:56 crc kubenswrapper[4558]: I0120 17:24:56.624736 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="df4290b9-86af-428a-bb18-68fafc4faac9" containerName="nova-metadata-metadata" Jan 20 17:24:56 crc kubenswrapper[4558]: E0120 17:24:56.624755 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="df4290b9-86af-428a-bb18-68fafc4faac9" containerName="nova-metadata-log" Jan 20 17:24:56 crc kubenswrapper[4558]: I0120 17:24:56.624761 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="df4290b9-86af-428a-bb18-68fafc4faac9" containerName="nova-metadata-log" Jan 20 17:24:56 crc kubenswrapper[4558]: I0120 17:24:56.624961 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="df4290b9-86af-428a-bb18-68fafc4faac9" containerName="nova-metadata-metadata" Jan 20 17:24:56 crc kubenswrapper[4558]: I0120 17:24:56.624978 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="df4290b9-86af-428a-bb18-68fafc4faac9" containerName="nova-metadata-log" Jan 20 17:24:56 crc kubenswrapper[4558]: I0120 17:24:56.626315 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:24:56 crc kubenswrapper[4558]: I0120 17:24:56.629467 4558 scope.go:117] "RemoveContainer" containerID="c76a410774504ab797341c1d7f48bc1de18d21cf5e558db39395aadba82c5183" Jan 20 17:24:56 crc kubenswrapper[4558]: I0120 17:24:56.630187 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:24:56 crc kubenswrapper[4558]: E0120 17:24:56.631048 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c76a410774504ab797341c1d7f48bc1de18d21cf5e558db39395aadba82c5183\": container with ID starting with c76a410774504ab797341c1d7f48bc1de18d21cf5e558db39395aadba82c5183 not found: ID does not exist" containerID="c76a410774504ab797341c1d7f48bc1de18d21cf5e558db39395aadba82c5183" Jan 20 17:24:56 crc kubenswrapper[4558]: I0120 17:24:56.631097 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c76a410774504ab797341c1d7f48bc1de18d21cf5e558db39395aadba82c5183"} err="failed to get container status \"c76a410774504ab797341c1d7f48bc1de18d21cf5e558db39395aadba82c5183\": rpc error: code = NotFound desc = could not find container \"c76a410774504ab797341c1d7f48bc1de18d21cf5e558db39395aadba82c5183\": container with ID starting with c76a410774504ab797341c1d7f48bc1de18d21cf5e558db39395aadba82c5183 not found: ID does not exist" Jan 20 17:24:56 crc kubenswrapper[4558]: I0120 17:24:56.631125 4558 scope.go:117] "RemoveContainer" containerID="bf68c3455e206a3ffeb686a8b32efedb59c0194999e33b19fbaa3a7767cfdfa5" Jan 20 17:24:56 crc kubenswrapper[4558]: I0120 17:24:56.631391 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-metadata-config-data" Jan 20 17:24:56 crc kubenswrapper[4558]: I0120 17:24:56.631572 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-metadata-internal-svc" Jan 20 17:24:56 crc kubenswrapper[4558]: E0120 17:24:56.631808 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bf68c3455e206a3ffeb686a8b32efedb59c0194999e33b19fbaa3a7767cfdfa5\": container with ID starting with bf68c3455e206a3ffeb686a8b32efedb59c0194999e33b19fbaa3a7767cfdfa5 not found: ID does not exist" containerID="bf68c3455e206a3ffeb686a8b32efedb59c0194999e33b19fbaa3a7767cfdfa5" Jan 20 17:24:56 crc kubenswrapper[4558]: I0120 17:24:56.631826 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bf68c3455e206a3ffeb686a8b32efedb59c0194999e33b19fbaa3a7767cfdfa5"} err="failed to get container status \"bf68c3455e206a3ffeb686a8b32efedb59c0194999e33b19fbaa3a7767cfdfa5\": rpc error: code = NotFound desc = could not find container \"bf68c3455e206a3ffeb686a8b32efedb59c0194999e33b19fbaa3a7767cfdfa5\": container with ID starting with bf68c3455e206a3ffeb686a8b32efedb59c0194999e33b19fbaa3a7767cfdfa5 not found: ID does not exist" Jan 20 17:24:56 crc kubenswrapper[4558]: I0120 17:24:56.631860 4558 scope.go:117] "RemoveContainer" containerID="c76a410774504ab797341c1d7f48bc1de18d21cf5e558db39395aadba82c5183" Jan 20 17:24:56 crc kubenswrapper[4558]: I0120 17:24:56.632591 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c76a410774504ab797341c1d7f48bc1de18d21cf5e558db39395aadba82c5183"} err="failed to get container status \"c76a410774504ab797341c1d7f48bc1de18d21cf5e558db39395aadba82c5183\": rpc error: code = NotFound desc = could not find container \"c76a410774504ab797341c1d7f48bc1de18d21cf5e558db39395aadba82c5183\": container with ID starting with c76a410774504ab797341c1d7f48bc1de18d21cf5e558db39395aadba82c5183 not found: ID does not exist" Jan 20 17:24:56 crc kubenswrapper[4558]: I0120 17:24:56.632608 4558 scope.go:117] "RemoveContainer" containerID="bf68c3455e206a3ffeb686a8b32efedb59c0194999e33b19fbaa3a7767cfdfa5" Jan 20 17:24:56 crc kubenswrapper[4558]: I0120 17:24:56.633211 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bf68c3455e206a3ffeb686a8b32efedb59c0194999e33b19fbaa3a7767cfdfa5"} err="failed to get container status \"bf68c3455e206a3ffeb686a8b32efedb59c0194999e33b19fbaa3a7767cfdfa5\": rpc error: code = NotFound desc = could not find container \"bf68c3455e206a3ffeb686a8b32efedb59c0194999e33b19fbaa3a7767cfdfa5\": container with ID starting with bf68c3455e206a3ffeb686a8b32efedb59c0194999e33b19fbaa3a7767cfdfa5 not found: ID does not exist" Jan 20 17:24:56 crc kubenswrapper[4558]: I0120 17:24:56.681341 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/feaaf8d6-cd55-4a27-9be8-8680bb156d10-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"feaaf8d6-cd55-4a27-9be8-8680bb156d10\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:24:56 crc kubenswrapper[4558]: I0120 17:24:56.681455 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/feaaf8d6-cd55-4a27-9be8-8680bb156d10-config-data\") pod \"nova-metadata-0\" (UID: \"feaaf8d6-cd55-4a27-9be8-8680bb156d10\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:24:56 crc kubenswrapper[4558]: I0120 17:24:56.681473 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/feaaf8d6-cd55-4a27-9be8-8680bb156d10-logs\") pod \"nova-metadata-0\" (UID: \"feaaf8d6-cd55-4a27-9be8-8680bb156d10\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:24:56 crc kubenswrapper[4558]: I0120 17:24:56.688896 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/feaaf8d6-cd55-4a27-9be8-8680bb156d10-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"feaaf8d6-cd55-4a27-9be8-8680bb156d10\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:24:56 crc kubenswrapper[4558]: I0120 17:24:56.691010 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fnv47\" (UniqueName: \"kubernetes.io/projected/feaaf8d6-cd55-4a27-9be8-8680bb156d10-kube-api-access-fnv47\") pod \"nova-metadata-0\" (UID: \"feaaf8d6-cd55-4a27-9be8-8680bb156d10\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:24:56 crc kubenswrapper[4558]: I0120 17:24:56.792518 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/feaaf8d6-cd55-4a27-9be8-8680bb156d10-config-data\") pod \"nova-metadata-0\" (UID: \"feaaf8d6-cd55-4a27-9be8-8680bb156d10\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:24:56 crc kubenswrapper[4558]: I0120 17:24:56.792549 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/feaaf8d6-cd55-4a27-9be8-8680bb156d10-logs\") pod \"nova-metadata-0\" (UID: \"feaaf8d6-cd55-4a27-9be8-8680bb156d10\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:24:56 crc kubenswrapper[4558]: I0120 17:24:56.792582 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/feaaf8d6-cd55-4a27-9be8-8680bb156d10-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"feaaf8d6-cd55-4a27-9be8-8680bb156d10\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:24:56 crc kubenswrapper[4558]: I0120 17:24:56.792652 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fnv47\" (UniqueName: \"kubernetes.io/projected/feaaf8d6-cd55-4a27-9be8-8680bb156d10-kube-api-access-fnv47\") pod \"nova-metadata-0\" (UID: \"feaaf8d6-cd55-4a27-9be8-8680bb156d10\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:24:56 crc kubenswrapper[4558]: I0120 17:24:56.792686 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/feaaf8d6-cd55-4a27-9be8-8680bb156d10-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"feaaf8d6-cd55-4a27-9be8-8680bb156d10\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:24:56 crc kubenswrapper[4558]: I0120 17:24:56.793234 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/feaaf8d6-cd55-4a27-9be8-8680bb156d10-logs\") pod \"nova-metadata-0\" (UID: \"feaaf8d6-cd55-4a27-9be8-8680bb156d10\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:24:56 crc kubenswrapper[4558]: I0120 17:24:56.797027 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/feaaf8d6-cd55-4a27-9be8-8680bb156d10-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"feaaf8d6-cd55-4a27-9be8-8680bb156d10\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:24:56 crc kubenswrapper[4558]: I0120 17:24:56.800499 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/feaaf8d6-cd55-4a27-9be8-8680bb156d10-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"feaaf8d6-cd55-4a27-9be8-8680bb156d10\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:24:56 crc kubenswrapper[4558]: I0120 17:24:56.800656 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/feaaf8d6-cd55-4a27-9be8-8680bb156d10-config-data\") pod \"nova-metadata-0\" (UID: \"feaaf8d6-cd55-4a27-9be8-8680bb156d10\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:24:56 crc kubenswrapper[4558]: I0120 17:24:56.809337 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fnv47\" (UniqueName: \"kubernetes.io/projected/feaaf8d6-cd55-4a27-9be8-8680bb156d10-kube-api-access-fnv47\") pod \"nova-metadata-0\" (UID: \"feaaf8d6-cd55-4a27-9be8-8680bb156d10\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:24:56 crc kubenswrapper[4558]: I0120 17:24:56.976262 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:24:57 crc kubenswrapper[4558]: I0120 17:24:57.334612 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:24:57 crc kubenswrapper[4558]: I0120 17:24:57.408229 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hb7gx\" (UniqueName: \"kubernetes.io/projected/168b4665-9d96-4497-8e29-db3f6e38e3b2-kube-api-access-hb7gx\") pod \"168b4665-9d96-4497-8e29-db3f6e38e3b2\" (UID: \"168b4665-9d96-4497-8e29-db3f6e38e3b2\") " Jan 20 17:24:57 crc kubenswrapper[4558]: I0120 17:24:57.408442 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/168b4665-9d96-4497-8e29-db3f6e38e3b2-combined-ca-bundle\") pod \"168b4665-9d96-4497-8e29-db3f6e38e3b2\" (UID: \"168b4665-9d96-4497-8e29-db3f6e38e3b2\") " Jan 20 17:24:57 crc kubenswrapper[4558]: I0120 17:24:57.408474 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/168b4665-9d96-4497-8e29-db3f6e38e3b2-config-data\") pod \"168b4665-9d96-4497-8e29-db3f6e38e3b2\" (UID: \"168b4665-9d96-4497-8e29-db3f6e38e3b2\") " Jan 20 17:24:57 crc kubenswrapper[4558]: I0120 17:24:57.408524 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/168b4665-9d96-4497-8e29-db3f6e38e3b2-logs\") pod \"168b4665-9d96-4497-8e29-db3f6e38e3b2\" (UID: \"168b4665-9d96-4497-8e29-db3f6e38e3b2\") " Jan 20 17:24:57 crc kubenswrapper[4558]: I0120 17:24:57.409007 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/168b4665-9d96-4497-8e29-db3f6e38e3b2-logs" (OuterVolumeSpecName: "logs") pod "168b4665-9d96-4497-8e29-db3f6e38e3b2" (UID: "168b4665-9d96-4497-8e29-db3f6e38e3b2"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:24:57 crc kubenswrapper[4558]: I0120 17:24:57.414746 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/168b4665-9d96-4497-8e29-db3f6e38e3b2-kube-api-access-hb7gx" (OuterVolumeSpecName: "kube-api-access-hb7gx") pod "168b4665-9d96-4497-8e29-db3f6e38e3b2" (UID: "168b4665-9d96-4497-8e29-db3f6e38e3b2"). InnerVolumeSpecName "kube-api-access-hb7gx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:24:57 crc kubenswrapper[4558]: I0120 17:24:57.430669 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/168b4665-9d96-4497-8e29-db3f6e38e3b2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "168b4665-9d96-4497-8e29-db3f6e38e3b2" (UID: "168b4665-9d96-4497-8e29-db3f6e38e3b2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:24:57 crc kubenswrapper[4558]: I0120 17:24:57.432443 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/168b4665-9d96-4497-8e29-db3f6e38e3b2-config-data" (OuterVolumeSpecName: "config-data") pod "168b4665-9d96-4497-8e29-db3f6e38e3b2" (UID: "168b4665-9d96-4497-8e29-db3f6e38e3b2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:24:57 crc kubenswrapper[4558]: I0120 17:24:57.505105 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:24:57 crc kubenswrapper[4558]: I0120 17:24:57.528482 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/168b4665-9d96-4497-8e29-db3f6e38e3b2-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:24:57 crc kubenswrapper[4558]: I0120 17:24:57.528520 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hb7gx\" (UniqueName: \"kubernetes.io/projected/168b4665-9d96-4497-8e29-db3f6e38e3b2-kube-api-access-hb7gx\") on node \"crc\" DevicePath \"\"" Jan 20 17:24:57 crc kubenswrapper[4558]: I0120 17:24:57.528534 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/168b4665-9d96-4497-8e29-db3f6e38e3b2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:24:57 crc kubenswrapper[4558]: I0120 17:24:57.528546 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/168b4665-9d96-4497-8e29-db3f6e38e3b2-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:24:57 crc kubenswrapper[4558]: I0120 17:24:57.530823 4558 generic.go:334] "Generic (PLEG): container finished" podID="168b4665-9d96-4497-8e29-db3f6e38e3b2" containerID="696221c4db1df9918ddb542ae0b927fec9732f29aac33c16861876b42969ef12" exitCode=0 Jan 20 17:24:57 crc kubenswrapper[4558]: I0120 17:24:57.530868 4558 generic.go:334] "Generic (PLEG): container finished" podID="168b4665-9d96-4497-8e29-db3f6e38e3b2" containerID="fef705763856b6dedbc7147592c5b8597de86d2d0e46aae5868b7418bdff49b6" exitCode=143 Jan 20 17:24:57 crc kubenswrapper[4558]: I0120 17:24:57.531014 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:24:57 crc kubenswrapper[4558]: I0120 17:24:57.531002 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"168b4665-9d96-4497-8e29-db3f6e38e3b2","Type":"ContainerDied","Data":"696221c4db1df9918ddb542ae0b927fec9732f29aac33c16861876b42969ef12"} Jan 20 17:24:57 crc kubenswrapper[4558]: I0120 17:24:57.531228 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"168b4665-9d96-4497-8e29-db3f6e38e3b2","Type":"ContainerDied","Data":"fef705763856b6dedbc7147592c5b8597de86d2d0e46aae5868b7418bdff49b6"} Jan 20 17:24:57 crc kubenswrapper[4558]: I0120 17:24:57.531281 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"168b4665-9d96-4497-8e29-db3f6e38e3b2","Type":"ContainerDied","Data":"0583f90b6191d9901c9b86882c30910228b0b175e071385511bd31480fad1533"} Jan 20 17:24:57 crc kubenswrapper[4558]: I0120 17:24:57.531308 4558 scope.go:117] "RemoveContainer" containerID="696221c4db1df9918ddb542ae0b927fec9732f29aac33c16861876b42969ef12" Jan 20 17:24:57 crc kubenswrapper[4558]: I0120 17:24:57.563209 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:24:57 crc kubenswrapper[4558]: I0120 17:24:57.568735 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:24:57 crc kubenswrapper[4558]: I0120 17:24:57.589571 4558 scope.go:117] "RemoveContainer" containerID="fef705763856b6dedbc7147592c5b8597de86d2d0e46aae5868b7418bdff49b6" Jan 20 17:24:57 crc kubenswrapper[4558]: I0120 17:24:57.602281 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:24:57 crc kubenswrapper[4558]: E0120 17:24:57.603122 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="168b4665-9d96-4497-8e29-db3f6e38e3b2" containerName="nova-api-log" Jan 20 17:24:57 crc kubenswrapper[4558]: I0120 17:24:57.603761 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="168b4665-9d96-4497-8e29-db3f6e38e3b2" containerName="nova-api-log" Jan 20 17:24:57 crc kubenswrapper[4558]: E0120 17:24:57.603840 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="168b4665-9d96-4497-8e29-db3f6e38e3b2" containerName="nova-api-api" Jan 20 17:24:57 crc kubenswrapper[4558]: I0120 17:24:57.603854 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="168b4665-9d96-4497-8e29-db3f6e38e3b2" containerName="nova-api-api" Jan 20 17:24:57 crc kubenswrapper[4558]: I0120 17:24:57.604121 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="168b4665-9d96-4497-8e29-db3f6e38e3b2" containerName="nova-api-api" Jan 20 17:24:57 crc kubenswrapper[4558]: I0120 17:24:57.604146 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="168b4665-9d96-4497-8e29-db3f6e38e3b2" containerName="nova-api-log" Jan 20 17:24:57 crc kubenswrapper[4558]: E0120 17:24:57.604262 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="be8c548bad3d4853a64ebbae913ef2602961735470fbdaf076725d3ca8f5145e" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:24:57 crc kubenswrapper[4558]: I0120 17:24:57.605611 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:24:57 crc kubenswrapper[4558]: E0120 17:24:57.606625 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="be8c548bad3d4853a64ebbae913ef2602961735470fbdaf076725d3ca8f5145e" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:24:57 crc kubenswrapper[4558]: I0120 17:24:57.607142 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:24:57 crc kubenswrapper[4558]: E0120 17:24:57.614268 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="be8c548bad3d4853a64ebbae913ef2602961735470fbdaf076725d3ca8f5145e" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:24:57 crc kubenswrapper[4558]: E0120 17:24:57.614333 4558 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack-kuttl-tests/nova-cell0-conductor-0" podUID="165828ca-683d-41f2-a055-7e99870bf131" containerName="nova-cell0-conductor-conductor" Jan 20 17:24:57 crc kubenswrapper[4558]: I0120 17:24:57.624536 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-api-config-data" Jan 20 17:24:57 crc kubenswrapper[4558]: I0120 17:24:57.662756 4558 scope.go:117] "RemoveContainer" containerID="696221c4db1df9918ddb542ae0b927fec9732f29aac33c16861876b42969ef12" Jan 20 17:24:57 crc kubenswrapper[4558]: E0120 17:24:57.667481 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"696221c4db1df9918ddb542ae0b927fec9732f29aac33c16861876b42969ef12\": container with ID starting with 696221c4db1df9918ddb542ae0b927fec9732f29aac33c16861876b42969ef12 not found: ID does not exist" containerID="696221c4db1df9918ddb542ae0b927fec9732f29aac33c16861876b42969ef12" Jan 20 17:24:57 crc kubenswrapper[4558]: I0120 17:24:57.667521 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"696221c4db1df9918ddb542ae0b927fec9732f29aac33c16861876b42969ef12"} err="failed to get container status \"696221c4db1df9918ddb542ae0b927fec9732f29aac33c16861876b42969ef12\": rpc error: code = NotFound desc = could not find container \"696221c4db1df9918ddb542ae0b927fec9732f29aac33c16861876b42969ef12\": container with ID starting with 696221c4db1df9918ddb542ae0b927fec9732f29aac33c16861876b42969ef12 not found: ID does not exist" Jan 20 17:24:57 crc kubenswrapper[4558]: I0120 17:24:57.667551 4558 scope.go:117] "RemoveContainer" containerID="fef705763856b6dedbc7147592c5b8597de86d2d0e46aae5868b7418bdff49b6" Jan 20 17:24:57 crc kubenswrapper[4558]: E0120 17:24:57.669030 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fef705763856b6dedbc7147592c5b8597de86d2d0e46aae5868b7418bdff49b6\": container with ID starting with fef705763856b6dedbc7147592c5b8597de86d2d0e46aae5868b7418bdff49b6 not found: ID does not exist" containerID="fef705763856b6dedbc7147592c5b8597de86d2d0e46aae5868b7418bdff49b6" Jan 20 17:24:57 crc kubenswrapper[4558]: I0120 17:24:57.669078 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fef705763856b6dedbc7147592c5b8597de86d2d0e46aae5868b7418bdff49b6"} err="failed to get container status \"fef705763856b6dedbc7147592c5b8597de86d2d0e46aae5868b7418bdff49b6\": rpc error: code = NotFound desc = could not find container \"fef705763856b6dedbc7147592c5b8597de86d2d0e46aae5868b7418bdff49b6\": container with ID starting with fef705763856b6dedbc7147592c5b8597de86d2d0e46aae5868b7418bdff49b6 not found: ID does not exist" Jan 20 17:24:57 crc kubenswrapper[4558]: I0120 17:24:57.669100 4558 scope.go:117] "RemoveContainer" containerID="696221c4db1df9918ddb542ae0b927fec9732f29aac33c16861876b42969ef12" Jan 20 17:24:57 crc kubenswrapper[4558]: I0120 17:24:57.670819 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"696221c4db1df9918ddb542ae0b927fec9732f29aac33c16861876b42969ef12"} err="failed to get container status \"696221c4db1df9918ddb542ae0b927fec9732f29aac33c16861876b42969ef12\": rpc error: code = NotFound desc = could not find container \"696221c4db1df9918ddb542ae0b927fec9732f29aac33c16861876b42969ef12\": container with ID starting with 696221c4db1df9918ddb542ae0b927fec9732f29aac33c16861876b42969ef12 not found: ID does not exist" Jan 20 17:24:57 crc kubenswrapper[4558]: I0120 17:24:57.670845 4558 scope.go:117] "RemoveContainer" containerID="fef705763856b6dedbc7147592c5b8597de86d2d0e46aae5868b7418bdff49b6" Jan 20 17:24:57 crc kubenswrapper[4558]: I0120 17:24:57.671549 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fef705763856b6dedbc7147592c5b8597de86d2d0e46aae5868b7418bdff49b6"} err="failed to get container status \"fef705763856b6dedbc7147592c5b8597de86d2d0e46aae5868b7418bdff49b6\": rpc error: code = NotFound desc = could not find container \"fef705763856b6dedbc7147592c5b8597de86d2d0e46aae5868b7418bdff49b6\": container with ID starting with fef705763856b6dedbc7147592c5b8597de86d2d0e46aae5868b7418bdff49b6 not found: ID does not exist" Jan 20 17:24:57 crc kubenswrapper[4558]: I0120 17:24:57.695492 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:24:57 crc kubenswrapper[4558]: I0120 17:24:57.695745 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="1364d2e8-742f-45c7-8620-65b25761a446" containerName="ceilometer-central-agent" containerID="cri-o://c9cb73bb1f963be4a87942f47e85faaa1a63fa1c61b02072bb1026c37a614ad5" gracePeriod=30 Jan 20 17:24:57 crc kubenswrapper[4558]: I0120 17:24:57.696112 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="1364d2e8-742f-45c7-8620-65b25761a446" containerName="proxy-httpd" containerID="cri-o://e1eb2d8ac1c120b5b284b58056545fd48762b4f7439d798ebacdc2d85342480c" gracePeriod=30 Jan 20 17:24:57 crc kubenswrapper[4558]: I0120 17:24:57.696196 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="1364d2e8-742f-45c7-8620-65b25761a446" containerName="sg-core" containerID="cri-o://ff2fb655d1873e9edafadb8f8dfe720555b1f3ef4341676568142f19f19ef375" gracePeriod=30 Jan 20 17:24:57 crc kubenswrapper[4558]: I0120 17:24:57.696234 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="1364d2e8-742f-45c7-8620-65b25761a446" containerName="ceilometer-notification-agent" containerID="cri-o://3de6489147bef99f8f1b647aa061be1af0d2fdfaaea6c6c7842738a0772f4741" gracePeriod=30 Jan 20 17:24:57 crc kubenswrapper[4558]: I0120 17:24:57.732948 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9a69275-3884-4ed0-9f8d-f5872c8645f1-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"c9a69275-3884-4ed0-9f8d-f5872c8645f1\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:24:57 crc kubenswrapper[4558]: I0120 17:24:57.733018 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c9a69275-3884-4ed0-9f8d-f5872c8645f1-config-data\") pod \"nova-api-0\" (UID: \"c9a69275-3884-4ed0-9f8d-f5872c8645f1\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:24:57 crc kubenswrapper[4558]: I0120 17:24:57.733067 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jc49k\" (UniqueName: \"kubernetes.io/projected/c9a69275-3884-4ed0-9f8d-f5872c8645f1-kube-api-access-jc49k\") pod \"nova-api-0\" (UID: \"c9a69275-3884-4ed0-9f8d-f5872c8645f1\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:24:57 crc kubenswrapper[4558]: I0120 17:24:57.733273 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c9a69275-3884-4ed0-9f8d-f5872c8645f1-logs\") pod \"nova-api-0\" (UID: \"c9a69275-3884-4ed0-9f8d-f5872c8645f1\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:24:57 crc kubenswrapper[4558]: I0120 17:24:57.838538 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c9a69275-3884-4ed0-9f8d-f5872c8645f1-logs\") pod \"nova-api-0\" (UID: \"c9a69275-3884-4ed0-9f8d-f5872c8645f1\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:24:57 crc kubenswrapper[4558]: I0120 17:24:57.838925 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c9a69275-3884-4ed0-9f8d-f5872c8645f1-logs\") pod \"nova-api-0\" (UID: \"c9a69275-3884-4ed0-9f8d-f5872c8645f1\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:24:57 crc kubenswrapper[4558]: I0120 17:24:57.838938 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9a69275-3884-4ed0-9f8d-f5872c8645f1-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"c9a69275-3884-4ed0-9f8d-f5872c8645f1\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:24:57 crc kubenswrapper[4558]: I0120 17:24:57.839054 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c9a69275-3884-4ed0-9f8d-f5872c8645f1-config-data\") pod \"nova-api-0\" (UID: \"c9a69275-3884-4ed0-9f8d-f5872c8645f1\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:24:57 crc kubenswrapper[4558]: I0120 17:24:57.839122 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jc49k\" (UniqueName: \"kubernetes.io/projected/c9a69275-3884-4ed0-9f8d-f5872c8645f1-kube-api-access-jc49k\") pod \"nova-api-0\" (UID: \"c9a69275-3884-4ed0-9f8d-f5872c8645f1\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:24:57 crc kubenswrapper[4558]: I0120 17:24:57.842741 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9a69275-3884-4ed0-9f8d-f5872c8645f1-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"c9a69275-3884-4ed0-9f8d-f5872c8645f1\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:24:57 crc kubenswrapper[4558]: I0120 17:24:57.847089 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c9a69275-3884-4ed0-9f8d-f5872c8645f1-config-data\") pod \"nova-api-0\" (UID: \"c9a69275-3884-4ed0-9f8d-f5872c8645f1\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:24:57 crc kubenswrapper[4558]: I0120 17:24:57.856447 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jc49k\" (UniqueName: \"kubernetes.io/projected/c9a69275-3884-4ed0-9f8d-f5872c8645f1-kube-api-access-jc49k\") pod \"nova-api-0\" (UID: \"c9a69275-3884-4ed0-9f8d-f5872c8645f1\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:24:57 crc kubenswrapper[4558]: I0120 17:24:57.980937 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:24:58 crc kubenswrapper[4558]: I0120 17:24:58.386441 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:24:58 crc kubenswrapper[4558]: W0120 17:24:58.391882 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc9a69275_3884_4ed0_9f8d_f5872c8645f1.slice/crio-31c5e444b70211e77cbdbeec6edba13dd687ee7385d59fb1003c191626bdcffa WatchSource:0}: Error finding container 31c5e444b70211e77cbdbeec6edba13dd687ee7385d59fb1003c191626bdcffa: Status 404 returned error can't find the container with id 31c5e444b70211e77cbdbeec6edba13dd687ee7385d59fb1003c191626bdcffa Jan 20 17:24:58 crc kubenswrapper[4558]: I0120 17:24:58.418003 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:24:58 crc kubenswrapper[4558]: I0120 17:24:58.555339 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5djjm\" (UniqueName: \"kubernetes.io/projected/1364d2e8-742f-45c7-8620-65b25761a446-kube-api-access-5djjm\") pod \"1364d2e8-742f-45c7-8620-65b25761a446\" (UID: \"1364d2e8-742f-45c7-8620-65b25761a446\") " Jan 20 17:24:58 crc kubenswrapper[4558]: I0120 17:24:58.555516 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1364d2e8-742f-45c7-8620-65b25761a446-combined-ca-bundle\") pod \"1364d2e8-742f-45c7-8620-65b25761a446\" (UID: \"1364d2e8-742f-45c7-8620-65b25761a446\") " Jan 20 17:24:58 crc kubenswrapper[4558]: I0120 17:24:58.555615 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1364d2e8-742f-45c7-8620-65b25761a446-log-httpd\") pod \"1364d2e8-742f-45c7-8620-65b25761a446\" (UID: \"1364d2e8-742f-45c7-8620-65b25761a446\") " Jan 20 17:24:58 crc kubenswrapper[4558]: I0120 17:24:58.555863 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/1364d2e8-742f-45c7-8620-65b25761a446-sg-core-conf-yaml\") pod \"1364d2e8-742f-45c7-8620-65b25761a446\" (UID: \"1364d2e8-742f-45c7-8620-65b25761a446\") " Jan 20 17:24:58 crc kubenswrapper[4558]: I0120 17:24:58.555995 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1364d2e8-742f-45c7-8620-65b25761a446-scripts\") pod \"1364d2e8-742f-45c7-8620-65b25761a446\" (UID: \"1364d2e8-742f-45c7-8620-65b25761a446\") " Jan 20 17:24:58 crc kubenswrapper[4558]: I0120 17:24:58.556034 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1364d2e8-742f-45c7-8620-65b25761a446-run-httpd\") pod \"1364d2e8-742f-45c7-8620-65b25761a446\" (UID: \"1364d2e8-742f-45c7-8620-65b25761a446\") " Jan 20 17:24:58 crc kubenswrapper[4558]: I0120 17:24:58.556069 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1364d2e8-742f-45c7-8620-65b25761a446-config-data\") pod \"1364d2e8-742f-45c7-8620-65b25761a446\" (UID: \"1364d2e8-742f-45c7-8620-65b25761a446\") " Jan 20 17:24:58 crc kubenswrapper[4558]: I0120 17:24:58.558263 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1364d2e8-742f-45c7-8620-65b25761a446-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "1364d2e8-742f-45c7-8620-65b25761a446" (UID: "1364d2e8-742f-45c7-8620-65b25761a446"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:24:58 crc kubenswrapper[4558]: I0120 17:24:58.559373 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1364d2e8-742f-45c7-8620-65b25761a446-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "1364d2e8-742f-45c7-8620-65b25761a446" (UID: "1364d2e8-742f-45c7-8620-65b25761a446"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:24:58 crc kubenswrapper[4558]: I0120 17:24:58.561860 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"c9a69275-3884-4ed0-9f8d-f5872c8645f1","Type":"ContainerStarted","Data":"31c5e444b70211e77cbdbeec6edba13dd687ee7385d59fb1003c191626bdcffa"} Jan 20 17:24:58 crc kubenswrapper[4558]: I0120 17:24:58.563152 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1364d2e8-742f-45c7-8620-65b25761a446-scripts" (OuterVolumeSpecName: "scripts") pod "1364d2e8-742f-45c7-8620-65b25761a446" (UID: "1364d2e8-742f-45c7-8620-65b25761a446"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:24:58 crc kubenswrapper[4558]: I0120 17:24:58.580737 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1364d2e8-742f-45c7-8620-65b25761a446-kube-api-access-5djjm" (OuterVolumeSpecName: "kube-api-access-5djjm") pod "1364d2e8-742f-45c7-8620-65b25761a446" (UID: "1364d2e8-742f-45c7-8620-65b25761a446"). InnerVolumeSpecName "kube-api-access-5djjm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:24:58 crc kubenswrapper[4558]: I0120 17:24:58.580894 4558 generic.go:334] "Generic (PLEG): container finished" podID="1364d2e8-742f-45c7-8620-65b25761a446" containerID="e1eb2d8ac1c120b5b284b58056545fd48762b4f7439d798ebacdc2d85342480c" exitCode=0 Jan 20 17:24:58 crc kubenswrapper[4558]: I0120 17:24:58.580939 4558 generic.go:334] "Generic (PLEG): container finished" podID="1364d2e8-742f-45c7-8620-65b25761a446" containerID="ff2fb655d1873e9edafadb8f8dfe720555b1f3ef4341676568142f19f19ef375" exitCode=2 Jan 20 17:24:58 crc kubenswrapper[4558]: I0120 17:24:58.580950 4558 generic.go:334] "Generic (PLEG): container finished" podID="1364d2e8-742f-45c7-8620-65b25761a446" containerID="3de6489147bef99f8f1b647aa061be1af0d2fdfaaea6c6c7842738a0772f4741" exitCode=0 Jan 20 17:24:58 crc kubenswrapper[4558]: I0120 17:24:58.580963 4558 generic.go:334] "Generic (PLEG): container finished" podID="1364d2e8-742f-45c7-8620-65b25761a446" containerID="c9cb73bb1f963be4a87942f47e85faaa1a63fa1c61b02072bb1026c37a614ad5" exitCode=0 Jan 20 17:24:58 crc kubenswrapper[4558]: I0120 17:24:58.581116 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:24:58 crc kubenswrapper[4558]: I0120 17:24:58.588419 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-metadata-0" podStartSLOduration=2.588401326 podStartE2EDuration="2.588401326s" podCreationTimestamp="2026-01-20 17:24:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:24:58.584792702 +0000 UTC m=+2592.345130669" watchObservedRunningTime="2026-01-20 17:24:58.588401326 +0000 UTC m=+2592.348739284" Jan 20 17:24:58 crc kubenswrapper[4558]: I0120 17:24:58.588701 4558 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1364d2e8-742f-45c7-8620-65b25761a446-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:24:58 crc kubenswrapper[4558]: I0120 17:24:58.594959 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5djjm\" (UniqueName: \"kubernetes.io/projected/1364d2e8-742f-45c7-8620-65b25761a446-kube-api-access-5djjm\") on node \"crc\" DevicePath \"\"" Jan 20 17:24:58 crc kubenswrapper[4558]: I0120 17:24:58.596090 4558 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1364d2e8-742f-45c7-8620-65b25761a446-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:24:58 crc kubenswrapper[4558]: I0120 17:24:58.596107 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1364d2e8-742f-45c7-8620-65b25761a446-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:24:58 crc kubenswrapper[4558]: I0120 17:24:58.611962 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="168b4665-9d96-4497-8e29-db3f6e38e3b2" path="/var/lib/kubelet/pods/168b4665-9d96-4497-8e29-db3f6e38e3b2/volumes" Jan 20 17:24:58 crc kubenswrapper[4558]: I0120 17:24:58.612989 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="df4290b9-86af-428a-bb18-68fafc4faac9" path="/var/lib/kubelet/pods/df4290b9-86af-428a-bb18-68fafc4faac9/volumes" Jan 20 17:24:58 crc kubenswrapper[4558]: I0120 17:24:58.621879 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1364d2e8-742f-45c7-8620-65b25761a446-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "1364d2e8-742f-45c7-8620-65b25761a446" (UID: "1364d2e8-742f-45c7-8620-65b25761a446"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:24:58 crc kubenswrapper[4558]: I0120 17:24:58.677554 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1364d2e8-742f-45c7-8620-65b25761a446-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1364d2e8-742f-45c7-8620-65b25761a446" (UID: "1364d2e8-742f-45c7-8620-65b25761a446"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:24:58 crc kubenswrapper[4558]: I0120 17:24:58.698731 4558 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/1364d2e8-742f-45c7-8620-65b25761a446-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 20 17:24:58 crc kubenswrapper[4558]: I0120 17:24:58.699335 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1364d2e8-742f-45c7-8620-65b25761a446-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:24:58 crc kubenswrapper[4558]: I0120 17:24:58.705280 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1364d2e8-742f-45c7-8620-65b25761a446-config-data" (OuterVolumeSpecName: "config-data") pod "1364d2e8-742f-45c7-8620-65b25761a446" (UID: "1364d2e8-742f-45c7-8620-65b25761a446"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:24:58 crc kubenswrapper[4558]: I0120 17:24:58.743503 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"feaaf8d6-cd55-4a27-9be8-8680bb156d10","Type":"ContainerStarted","Data":"2bc171abc2782ea95314f69f8dbfa8515200e8d4df02459df56ccc1d4940592f"} Jan 20 17:24:58 crc kubenswrapper[4558]: I0120 17:24:58.743546 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"feaaf8d6-cd55-4a27-9be8-8680bb156d10","Type":"ContainerStarted","Data":"886ae90e3a41affbd09898da8f0a852f6e6f37a4b3b62890323b0310f9d85238"} Jan 20 17:24:58 crc kubenswrapper[4558]: I0120 17:24:58.743562 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"feaaf8d6-cd55-4a27-9be8-8680bb156d10","Type":"ContainerStarted","Data":"61fc00c84fe86ea84f0ff2d55c563c0af82ab786ae6ea69d1088694508acf931"} Jan 20 17:24:58 crc kubenswrapper[4558]: I0120 17:24:58.743581 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"1364d2e8-742f-45c7-8620-65b25761a446","Type":"ContainerDied","Data":"e1eb2d8ac1c120b5b284b58056545fd48762b4f7439d798ebacdc2d85342480c"} Jan 20 17:24:58 crc kubenswrapper[4558]: I0120 17:24:58.743600 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"1364d2e8-742f-45c7-8620-65b25761a446","Type":"ContainerDied","Data":"ff2fb655d1873e9edafadb8f8dfe720555b1f3ef4341676568142f19f19ef375"} Jan 20 17:24:58 crc kubenswrapper[4558]: I0120 17:24:58.743611 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"1364d2e8-742f-45c7-8620-65b25761a446","Type":"ContainerDied","Data":"3de6489147bef99f8f1b647aa061be1af0d2fdfaaea6c6c7842738a0772f4741"} Jan 20 17:24:58 crc kubenswrapper[4558]: I0120 17:24:58.743624 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"1364d2e8-742f-45c7-8620-65b25761a446","Type":"ContainerDied","Data":"c9cb73bb1f963be4a87942f47e85faaa1a63fa1c61b02072bb1026c37a614ad5"} Jan 20 17:24:58 crc kubenswrapper[4558]: I0120 17:24:58.743633 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"1364d2e8-742f-45c7-8620-65b25761a446","Type":"ContainerDied","Data":"9b392c73014eb00d4a04d80619b78c90d5ecea2f6e17cfd221482af945465742"} Jan 20 17:24:58 crc kubenswrapper[4558]: I0120 17:24:58.743652 4558 scope.go:117] "RemoveContainer" containerID="e1eb2d8ac1c120b5b284b58056545fd48762b4f7439d798ebacdc2d85342480c" Jan 20 17:24:58 crc kubenswrapper[4558]: I0120 17:24:58.784084 4558 scope.go:117] "RemoveContainer" containerID="ff2fb655d1873e9edafadb8f8dfe720555b1f3ef4341676568142f19f19ef375" Jan 20 17:24:58 crc kubenswrapper[4558]: I0120 17:24:58.802802 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1364d2e8-742f-45c7-8620-65b25761a446-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:24:58 crc kubenswrapper[4558]: I0120 17:24:58.807722 4558 scope.go:117] "RemoveContainer" containerID="3de6489147bef99f8f1b647aa061be1af0d2fdfaaea6c6c7842738a0772f4741" Jan 20 17:24:58 crc kubenswrapper[4558]: I0120 17:24:58.835502 4558 scope.go:117] "RemoveContainer" containerID="c9cb73bb1f963be4a87942f47e85faaa1a63fa1c61b02072bb1026c37a614ad5" Jan 20 17:24:58 crc kubenswrapper[4558]: I0120 17:24:58.858482 4558 scope.go:117] "RemoveContainer" containerID="e1eb2d8ac1c120b5b284b58056545fd48762b4f7439d798ebacdc2d85342480c" Jan 20 17:24:58 crc kubenswrapper[4558]: E0120 17:24:58.858994 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e1eb2d8ac1c120b5b284b58056545fd48762b4f7439d798ebacdc2d85342480c\": container with ID starting with e1eb2d8ac1c120b5b284b58056545fd48762b4f7439d798ebacdc2d85342480c not found: ID does not exist" containerID="e1eb2d8ac1c120b5b284b58056545fd48762b4f7439d798ebacdc2d85342480c" Jan 20 17:24:58 crc kubenswrapper[4558]: I0120 17:24:58.859038 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e1eb2d8ac1c120b5b284b58056545fd48762b4f7439d798ebacdc2d85342480c"} err="failed to get container status \"e1eb2d8ac1c120b5b284b58056545fd48762b4f7439d798ebacdc2d85342480c\": rpc error: code = NotFound desc = could not find container \"e1eb2d8ac1c120b5b284b58056545fd48762b4f7439d798ebacdc2d85342480c\": container with ID starting with e1eb2d8ac1c120b5b284b58056545fd48762b4f7439d798ebacdc2d85342480c not found: ID does not exist" Jan 20 17:24:58 crc kubenswrapper[4558]: I0120 17:24:58.859069 4558 scope.go:117] "RemoveContainer" containerID="ff2fb655d1873e9edafadb8f8dfe720555b1f3ef4341676568142f19f19ef375" Jan 20 17:24:58 crc kubenswrapper[4558]: E0120 17:24:58.859452 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ff2fb655d1873e9edafadb8f8dfe720555b1f3ef4341676568142f19f19ef375\": container with ID starting with ff2fb655d1873e9edafadb8f8dfe720555b1f3ef4341676568142f19f19ef375 not found: ID does not exist" containerID="ff2fb655d1873e9edafadb8f8dfe720555b1f3ef4341676568142f19f19ef375" Jan 20 17:24:58 crc kubenswrapper[4558]: I0120 17:24:58.859573 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ff2fb655d1873e9edafadb8f8dfe720555b1f3ef4341676568142f19f19ef375"} err="failed to get container status \"ff2fb655d1873e9edafadb8f8dfe720555b1f3ef4341676568142f19f19ef375\": rpc error: code = NotFound desc = could not find container \"ff2fb655d1873e9edafadb8f8dfe720555b1f3ef4341676568142f19f19ef375\": container with ID starting with ff2fb655d1873e9edafadb8f8dfe720555b1f3ef4341676568142f19f19ef375 not found: ID does not exist" Jan 20 17:24:58 crc kubenswrapper[4558]: I0120 17:24:58.859662 4558 scope.go:117] "RemoveContainer" containerID="3de6489147bef99f8f1b647aa061be1af0d2fdfaaea6c6c7842738a0772f4741" Jan 20 17:24:58 crc kubenswrapper[4558]: E0120 17:24:58.860146 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3de6489147bef99f8f1b647aa061be1af0d2fdfaaea6c6c7842738a0772f4741\": container with ID starting with 3de6489147bef99f8f1b647aa061be1af0d2fdfaaea6c6c7842738a0772f4741 not found: ID does not exist" containerID="3de6489147bef99f8f1b647aa061be1af0d2fdfaaea6c6c7842738a0772f4741" Jan 20 17:24:58 crc kubenswrapper[4558]: I0120 17:24:58.860273 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3de6489147bef99f8f1b647aa061be1af0d2fdfaaea6c6c7842738a0772f4741"} err="failed to get container status \"3de6489147bef99f8f1b647aa061be1af0d2fdfaaea6c6c7842738a0772f4741\": rpc error: code = NotFound desc = could not find container \"3de6489147bef99f8f1b647aa061be1af0d2fdfaaea6c6c7842738a0772f4741\": container with ID starting with 3de6489147bef99f8f1b647aa061be1af0d2fdfaaea6c6c7842738a0772f4741 not found: ID does not exist" Jan 20 17:24:58 crc kubenswrapper[4558]: I0120 17:24:58.860296 4558 scope.go:117] "RemoveContainer" containerID="c9cb73bb1f963be4a87942f47e85faaa1a63fa1c61b02072bb1026c37a614ad5" Jan 20 17:24:58 crc kubenswrapper[4558]: E0120 17:24:58.860677 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c9cb73bb1f963be4a87942f47e85faaa1a63fa1c61b02072bb1026c37a614ad5\": container with ID starting with c9cb73bb1f963be4a87942f47e85faaa1a63fa1c61b02072bb1026c37a614ad5 not found: ID does not exist" containerID="c9cb73bb1f963be4a87942f47e85faaa1a63fa1c61b02072bb1026c37a614ad5" Jan 20 17:24:58 crc kubenswrapper[4558]: I0120 17:24:58.861037 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c9cb73bb1f963be4a87942f47e85faaa1a63fa1c61b02072bb1026c37a614ad5"} err="failed to get container status \"c9cb73bb1f963be4a87942f47e85faaa1a63fa1c61b02072bb1026c37a614ad5\": rpc error: code = NotFound desc = could not find container \"c9cb73bb1f963be4a87942f47e85faaa1a63fa1c61b02072bb1026c37a614ad5\": container with ID starting with c9cb73bb1f963be4a87942f47e85faaa1a63fa1c61b02072bb1026c37a614ad5 not found: ID does not exist" Jan 20 17:24:58 crc kubenswrapper[4558]: I0120 17:24:58.861119 4558 scope.go:117] "RemoveContainer" containerID="e1eb2d8ac1c120b5b284b58056545fd48762b4f7439d798ebacdc2d85342480c" Jan 20 17:24:58 crc kubenswrapper[4558]: I0120 17:24:58.861587 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e1eb2d8ac1c120b5b284b58056545fd48762b4f7439d798ebacdc2d85342480c"} err="failed to get container status \"e1eb2d8ac1c120b5b284b58056545fd48762b4f7439d798ebacdc2d85342480c\": rpc error: code = NotFound desc = could not find container \"e1eb2d8ac1c120b5b284b58056545fd48762b4f7439d798ebacdc2d85342480c\": container with ID starting with e1eb2d8ac1c120b5b284b58056545fd48762b4f7439d798ebacdc2d85342480c not found: ID does not exist" Jan 20 17:24:58 crc kubenswrapper[4558]: I0120 17:24:58.861614 4558 scope.go:117] "RemoveContainer" containerID="ff2fb655d1873e9edafadb8f8dfe720555b1f3ef4341676568142f19f19ef375" Jan 20 17:24:58 crc kubenswrapper[4558]: I0120 17:24:58.862048 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ff2fb655d1873e9edafadb8f8dfe720555b1f3ef4341676568142f19f19ef375"} err="failed to get container status \"ff2fb655d1873e9edafadb8f8dfe720555b1f3ef4341676568142f19f19ef375\": rpc error: code = NotFound desc = could not find container \"ff2fb655d1873e9edafadb8f8dfe720555b1f3ef4341676568142f19f19ef375\": container with ID starting with ff2fb655d1873e9edafadb8f8dfe720555b1f3ef4341676568142f19f19ef375 not found: ID does not exist" Jan 20 17:24:58 crc kubenswrapper[4558]: I0120 17:24:58.862103 4558 scope.go:117] "RemoveContainer" containerID="3de6489147bef99f8f1b647aa061be1af0d2fdfaaea6c6c7842738a0772f4741" Jan 20 17:24:58 crc kubenswrapper[4558]: I0120 17:24:58.862376 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3de6489147bef99f8f1b647aa061be1af0d2fdfaaea6c6c7842738a0772f4741"} err="failed to get container status \"3de6489147bef99f8f1b647aa061be1af0d2fdfaaea6c6c7842738a0772f4741\": rpc error: code = NotFound desc = could not find container \"3de6489147bef99f8f1b647aa061be1af0d2fdfaaea6c6c7842738a0772f4741\": container with ID starting with 3de6489147bef99f8f1b647aa061be1af0d2fdfaaea6c6c7842738a0772f4741 not found: ID does not exist" Jan 20 17:24:58 crc kubenswrapper[4558]: I0120 17:24:58.862400 4558 scope.go:117] "RemoveContainer" containerID="c9cb73bb1f963be4a87942f47e85faaa1a63fa1c61b02072bb1026c37a614ad5" Jan 20 17:24:58 crc kubenswrapper[4558]: I0120 17:24:58.862660 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c9cb73bb1f963be4a87942f47e85faaa1a63fa1c61b02072bb1026c37a614ad5"} err="failed to get container status \"c9cb73bb1f963be4a87942f47e85faaa1a63fa1c61b02072bb1026c37a614ad5\": rpc error: code = NotFound desc = could not find container \"c9cb73bb1f963be4a87942f47e85faaa1a63fa1c61b02072bb1026c37a614ad5\": container with ID starting with c9cb73bb1f963be4a87942f47e85faaa1a63fa1c61b02072bb1026c37a614ad5 not found: ID does not exist" Jan 20 17:24:58 crc kubenswrapper[4558]: I0120 17:24:58.862686 4558 scope.go:117] "RemoveContainer" containerID="e1eb2d8ac1c120b5b284b58056545fd48762b4f7439d798ebacdc2d85342480c" Jan 20 17:24:58 crc kubenswrapper[4558]: I0120 17:24:58.862936 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e1eb2d8ac1c120b5b284b58056545fd48762b4f7439d798ebacdc2d85342480c"} err="failed to get container status \"e1eb2d8ac1c120b5b284b58056545fd48762b4f7439d798ebacdc2d85342480c\": rpc error: code = NotFound desc = could not find container \"e1eb2d8ac1c120b5b284b58056545fd48762b4f7439d798ebacdc2d85342480c\": container with ID starting with e1eb2d8ac1c120b5b284b58056545fd48762b4f7439d798ebacdc2d85342480c not found: ID does not exist" Jan 20 17:24:58 crc kubenswrapper[4558]: I0120 17:24:58.862958 4558 scope.go:117] "RemoveContainer" containerID="ff2fb655d1873e9edafadb8f8dfe720555b1f3ef4341676568142f19f19ef375" Jan 20 17:24:58 crc kubenswrapper[4558]: I0120 17:24:58.863223 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ff2fb655d1873e9edafadb8f8dfe720555b1f3ef4341676568142f19f19ef375"} err="failed to get container status \"ff2fb655d1873e9edafadb8f8dfe720555b1f3ef4341676568142f19f19ef375\": rpc error: code = NotFound desc = could not find container \"ff2fb655d1873e9edafadb8f8dfe720555b1f3ef4341676568142f19f19ef375\": container with ID starting with ff2fb655d1873e9edafadb8f8dfe720555b1f3ef4341676568142f19f19ef375 not found: ID does not exist" Jan 20 17:24:58 crc kubenswrapper[4558]: I0120 17:24:58.863250 4558 scope.go:117] "RemoveContainer" containerID="3de6489147bef99f8f1b647aa061be1af0d2fdfaaea6c6c7842738a0772f4741" Jan 20 17:24:58 crc kubenswrapper[4558]: I0120 17:24:58.863530 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3de6489147bef99f8f1b647aa061be1af0d2fdfaaea6c6c7842738a0772f4741"} err="failed to get container status \"3de6489147bef99f8f1b647aa061be1af0d2fdfaaea6c6c7842738a0772f4741\": rpc error: code = NotFound desc = could not find container \"3de6489147bef99f8f1b647aa061be1af0d2fdfaaea6c6c7842738a0772f4741\": container with ID starting with 3de6489147bef99f8f1b647aa061be1af0d2fdfaaea6c6c7842738a0772f4741 not found: ID does not exist" Jan 20 17:24:58 crc kubenswrapper[4558]: I0120 17:24:58.863626 4558 scope.go:117] "RemoveContainer" containerID="c9cb73bb1f963be4a87942f47e85faaa1a63fa1c61b02072bb1026c37a614ad5" Jan 20 17:24:58 crc kubenswrapper[4558]: I0120 17:24:58.863966 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c9cb73bb1f963be4a87942f47e85faaa1a63fa1c61b02072bb1026c37a614ad5"} err="failed to get container status \"c9cb73bb1f963be4a87942f47e85faaa1a63fa1c61b02072bb1026c37a614ad5\": rpc error: code = NotFound desc = could not find container \"c9cb73bb1f963be4a87942f47e85faaa1a63fa1c61b02072bb1026c37a614ad5\": container with ID starting with c9cb73bb1f963be4a87942f47e85faaa1a63fa1c61b02072bb1026c37a614ad5 not found: ID does not exist" Jan 20 17:24:58 crc kubenswrapper[4558]: I0120 17:24:58.863994 4558 scope.go:117] "RemoveContainer" containerID="e1eb2d8ac1c120b5b284b58056545fd48762b4f7439d798ebacdc2d85342480c" Jan 20 17:24:58 crc kubenswrapper[4558]: I0120 17:24:58.864272 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e1eb2d8ac1c120b5b284b58056545fd48762b4f7439d798ebacdc2d85342480c"} err="failed to get container status \"e1eb2d8ac1c120b5b284b58056545fd48762b4f7439d798ebacdc2d85342480c\": rpc error: code = NotFound desc = could not find container \"e1eb2d8ac1c120b5b284b58056545fd48762b4f7439d798ebacdc2d85342480c\": container with ID starting with e1eb2d8ac1c120b5b284b58056545fd48762b4f7439d798ebacdc2d85342480c not found: ID does not exist" Jan 20 17:24:58 crc kubenswrapper[4558]: I0120 17:24:58.864294 4558 scope.go:117] "RemoveContainer" containerID="ff2fb655d1873e9edafadb8f8dfe720555b1f3ef4341676568142f19f19ef375" Jan 20 17:24:58 crc kubenswrapper[4558]: I0120 17:24:58.864624 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ff2fb655d1873e9edafadb8f8dfe720555b1f3ef4341676568142f19f19ef375"} err="failed to get container status \"ff2fb655d1873e9edafadb8f8dfe720555b1f3ef4341676568142f19f19ef375\": rpc error: code = NotFound desc = could not find container \"ff2fb655d1873e9edafadb8f8dfe720555b1f3ef4341676568142f19f19ef375\": container with ID starting with ff2fb655d1873e9edafadb8f8dfe720555b1f3ef4341676568142f19f19ef375 not found: ID does not exist" Jan 20 17:24:58 crc kubenswrapper[4558]: I0120 17:24:58.864649 4558 scope.go:117] "RemoveContainer" containerID="3de6489147bef99f8f1b647aa061be1af0d2fdfaaea6c6c7842738a0772f4741" Jan 20 17:24:58 crc kubenswrapper[4558]: I0120 17:24:58.864906 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3de6489147bef99f8f1b647aa061be1af0d2fdfaaea6c6c7842738a0772f4741"} err="failed to get container status \"3de6489147bef99f8f1b647aa061be1af0d2fdfaaea6c6c7842738a0772f4741\": rpc error: code = NotFound desc = could not find container \"3de6489147bef99f8f1b647aa061be1af0d2fdfaaea6c6c7842738a0772f4741\": container with ID starting with 3de6489147bef99f8f1b647aa061be1af0d2fdfaaea6c6c7842738a0772f4741 not found: ID does not exist" Jan 20 17:24:58 crc kubenswrapper[4558]: I0120 17:24:58.864930 4558 scope.go:117] "RemoveContainer" containerID="c9cb73bb1f963be4a87942f47e85faaa1a63fa1c61b02072bb1026c37a614ad5" Jan 20 17:24:58 crc kubenswrapper[4558]: I0120 17:24:58.865209 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c9cb73bb1f963be4a87942f47e85faaa1a63fa1c61b02072bb1026c37a614ad5"} err="failed to get container status \"c9cb73bb1f963be4a87942f47e85faaa1a63fa1c61b02072bb1026c37a614ad5\": rpc error: code = NotFound desc = could not find container \"c9cb73bb1f963be4a87942f47e85faaa1a63fa1c61b02072bb1026c37a614ad5\": container with ID starting with c9cb73bb1f963be4a87942f47e85faaa1a63fa1c61b02072bb1026c37a614ad5 not found: ID does not exist" Jan 20 17:24:58 crc kubenswrapper[4558]: I0120 17:24:58.917522 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:24:58 crc kubenswrapper[4558]: I0120 17:24:58.931415 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:24:58 crc kubenswrapper[4558]: I0120 17:24:58.945099 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:24:58 crc kubenswrapper[4558]: E0120 17:24:58.945594 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1364d2e8-742f-45c7-8620-65b25761a446" containerName="ceilometer-central-agent" Jan 20 17:24:58 crc kubenswrapper[4558]: I0120 17:24:58.945615 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="1364d2e8-742f-45c7-8620-65b25761a446" containerName="ceilometer-central-agent" Jan 20 17:24:58 crc kubenswrapper[4558]: E0120 17:24:58.945637 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1364d2e8-742f-45c7-8620-65b25761a446" containerName="proxy-httpd" Jan 20 17:24:58 crc kubenswrapper[4558]: I0120 17:24:58.945645 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="1364d2e8-742f-45c7-8620-65b25761a446" containerName="proxy-httpd" Jan 20 17:24:58 crc kubenswrapper[4558]: E0120 17:24:58.945655 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1364d2e8-742f-45c7-8620-65b25761a446" containerName="ceilometer-notification-agent" Jan 20 17:24:58 crc kubenswrapper[4558]: I0120 17:24:58.945661 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="1364d2e8-742f-45c7-8620-65b25761a446" containerName="ceilometer-notification-agent" Jan 20 17:24:58 crc kubenswrapper[4558]: E0120 17:24:58.945686 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1364d2e8-742f-45c7-8620-65b25761a446" containerName="sg-core" Jan 20 17:24:58 crc kubenswrapper[4558]: I0120 17:24:58.945694 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="1364d2e8-742f-45c7-8620-65b25761a446" containerName="sg-core" Jan 20 17:24:58 crc kubenswrapper[4558]: I0120 17:24:58.945885 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="1364d2e8-742f-45c7-8620-65b25761a446" containerName="sg-core" Jan 20 17:24:58 crc kubenswrapper[4558]: I0120 17:24:58.945905 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="1364d2e8-742f-45c7-8620-65b25761a446" containerName="ceilometer-central-agent" Jan 20 17:24:58 crc kubenswrapper[4558]: I0120 17:24:58.945918 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="1364d2e8-742f-45c7-8620-65b25761a446" containerName="proxy-httpd" Jan 20 17:24:58 crc kubenswrapper[4558]: I0120 17:24:58.945935 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="1364d2e8-742f-45c7-8620-65b25761a446" containerName="ceilometer-notification-agent" Jan 20 17:24:58 crc kubenswrapper[4558]: I0120 17:24:58.947683 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:24:58 crc kubenswrapper[4558]: I0120 17:24:58.950632 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-config-data" Jan 20 17:24:58 crc kubenswrapper[4558]: I0120 17:24:58.950843 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-scripts" Jan 20 17:24:58 crc kubenswrapper[4558]: I0120 17:24:58.953773 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:24:59 crc kubenswrapper[4558]: I0120 17:24:59.006928 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d3a0ace5-7ba6-400c-accb-e9934117e5da-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"d3a0ace5-7ba6-400c-accb-e9934117e5da\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:24:59 crc kubenswrapper[4558]: I0120 17:24:59.007149 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d3a0ace5-7ba6-400c-accb-e9934117e5da-config-data\") pod \"ceilometer-0\" (UID: \"d3a0ace5-7ba6-400c-accb-e9934117e5da\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:24:59 crc kubenswrapper[4558]: I0120 17:24:59.007267 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5m4vv\" (UniqueName: \"kubernetes.io/projected/d3a0ace5-7ba6-400c-accb-e9934117e5da-kube-api-access-5m4vv\") pod \"ceilometer-0\" (UID: \"d3a0ace5-7ba6-400c-accb-e9934117e5da\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:24:59 crc kubenswrapper[4558]: I0120 17:24:59.007409 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d3a0ace5-7ba6-400c-accb-e9934117e5da-scripts\") pod \"ceilometer-0\" (UID: \"d3a0ace5-7ba6-400c-accb-e9934117e5da\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:24:59 crc kubenswrapper[4558]: I0120 17:24:59.007540 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d3a0ace5-7ba6-400c-accb-e9934117e5da-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"d3a0ace5-7ba6-400c-accb-e9934117e5da\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:24:59 crc kubenswrapper[4558]: I0120 17:24:59.007621 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d3a0ace5-7ba6-400c-accb-e9934117e5da-log-httpd\") pod \"ceilometer-0\" (UID: \"d3a0ace5-7ba6-400c-accb-e9934117e5da\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:24:59 crc kubenswrapper[4558]: I0120 17:24:59.007715 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d3a0ace5-7ba6-400c-accb-e9934117e5da-run-httpd\") pod \"ceilometer-0\" (UID: \"d3a0ace5-7ba6-400c-accb-e9934117e5da\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:24:59 crc kubenswrapper[4558]: I0120 17:24:59.027207 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:24:59 crc kubenswrapper[4558]: I0120 17:24:59.109640 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d3a0ace5-7ba6-400c-accb-e9934117e5da-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"d3a0ace5-7ba6-400c-accb-e9934117e5da\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:24:59 crc kubenswrapper[4558]: I0120 17:24:59.109922 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d3a0ace5-7ba6-400c-accb-e9934117e5da-config-data\") pod \"ceilometer-0\" (UID: \"d3a0ace5-7ba6-400c-accb-e9934117e5da\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:24:59 crc kubenswrapper[4558]: I0120 17:24:59.109946 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5m4vv\" (UniqueName: \"kubernetes.io/projected/d3a0ace5-7ba6-400c-accb-e9934117e5da-kube-api-access-5m4vv\") pod \"ceilometer-0\" (UID: \"d3a0ace5-7ba6-400c-accb-e9934117e5da\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:24:59 crc kubenswrapper[4558]: I0120 17:24:59.110016 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d3a0ace5-7ba6-400c-accb-e9934117e5da-scripts\") pod \"ceilometer-0\" (UID: \"d3a0ace5-7ba6-400c-accb-e9934117e5da\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:24:59 crc kubenswrapper[4558]: I0120 17:24:59.110064 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d3a0ace5-7ba6-400c-accb-e9934117e5da-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"d3a0ace5-7ba6-400c-accb-e9934117e5da\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:24:59 crc kubenswrapper[4558]: I0120 17:24:59.110080 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d3a0ace5-7ba6-400c-accb-e9934117e5da-log-httpd\") pod \"ceilometer-0\" (UID: \"d3a0ace5-7ba6-400c-accb-e9934117e5da\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:24:59 crc kubenswrapper[4558]: I0120 17:24:59.110113 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d3a0ace5-7ba6-400c-accb-e9934117e5da-run-httpd\") pod \"ceilometer-0\" (UID: \"d3a0ace5-7ba6-400c-accb-e9934117e5da\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:24:59 crc kubenswrapper[4558]: I0120 17:24:59.110542 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d3a0ace5-7ba6-400c-accb-e9934117e5da-run-httpd\") pod \"ceilometer-0\" (UID: \"d3a0ace5-7ba6-400c-accb-e9934117e5da\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:24:59 crc kubenswrapper[4558]: I0120 17:24:59.110790 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d3a0ace5-7ba6-400c-accb-e9934117e5da-log-httpd\") pod \"ceilometer-0\" (UID: \"d3a0ace5-7ba6-400c-accb-e9934117e5da\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:24:59 crc kubenswrapper[4558]: I0120 17:24:59.113147 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d3a0ace5-7ba6-400c-accb-e9934117e5da-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"d3a0ace5-7ba6-400c-accb-e9934117e5da\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:24:59 crc kubenswrapper[4558]: I0120 17:24:59.113831 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d3a0ace5-7ba6-400c-accb-e9934117e5da-scripts\") pod \"ceilometer-0\" (UID: \"d3a0ace5-7ba6-400c-accb-e9934117e5da\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:24:59 crc kubenswrapper[4558]: I0120 17:24:59.113920 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d3a0ace5-7ba6-400c-accb-e9934117e5da-config-data\") pod \"ceilometer-0\" (UID: \"d3a0ace5-7ba6-400c-accb-e9934117e5da\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:24:59 crc kubenswrapper[4558]: I0120 17:24:59.116850 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d3a0ace5-7ba6-400c-accb-e9934117e5da-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"d3a0ace5-7ba6-400c-accb-e9934117e5da\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:24:59 crc kubenswrapper[4558]: I0120 17:24:59.125838 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5m4vv\" (UniqueName: \"kubernetes.io/projected/d3a0ace5-7ba6-400c-accb-e9934117e5da-kube-api-access-5m4vv\") pod \"ceilometer-0\" (UID: \"d3a0ace5-7ba6-400c-accb-e9934117e5da\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:24:59 crc kubenswrapper[4558]: I0120 17:24:59.225010 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:24:59 crc kubenswrapper[4558]: I0120 17:24:59.264250 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:24:59 crc kubenswrapper[4558]: I0120 17:24:59.405878 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:24:59 crc kubenswrapper[4558]: I0120 17:24:59.414589 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/165828ca-683d-41f2-a055-7e99870bf131-config-data\") pod \"165828ca-683d-41f2-a055-7e99870bf131\" (UID: \"165828ca-683d-41f2-a055-7e99870bf131\") " Jan 20 17:24:59 crc kubenswrapper[4558]: I0120 17:24:59.415003 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/165828ca-683d-41f2-a055-7e99870bf131-combined-ca-bundle\") pod \"165828ca-683d-41f2-a055-7e99870bf131\" (UID: \"165828ca-683d-41f2-a055-7e99870bf131\") " Jan 20 17:24:59 crc kubenswrapper[4558]: I0120 17:24:59.415130 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vndc8\" (UniqueName: \"kubernetes.io/projected/165828ca-683d-41f2-a055-7e99870bf131-kube-api-access-vndc8\") pod \"165828ca-683d-41f2-a055-7e99870bf131\" (UID: \"165828ca-683d-41f2-a055-7e99870bf131\") " Jan 20 17:24:59 crc kubenswrapper[4558]: I0120 17:24:59.419224 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/165828ca-683d-41f2-a055-7e99870bf131-kube-api-access-vndc8" (OuterVolumeSpecName: "kube-api-access-vndc8") pod "165828ca-683d-41f2-a055-7e99870bf131" (UID: "165828ca-683d-41f2-a055-7e99870bf131"). InnerVolumeSpecName "kube-api-access-vndc8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:24:59 crc kubenswrapper[4558]: I0120 17:24:59.457880 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/165828ca-683d-41f2-a055-7e99870bf131-config-data" (OuterVolumeSpecName: "config-data") pod "165828ca-683d-41f2-a055-7e99870bf131" (UID: "165828ca-683d-41f2-a055-7e99870bf131"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:24:59 crc kubenswrapper[4558]: I0120 17:24:59.486271 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/165828ca-683d-41f2-a055-7e99870bf131-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "165828ca-683d-41f2-a055-7e99870bf131" (UID: "165828ca-683d-41f2-a055-7e99870bf131"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:24:59 crc kubenswrapper[4558]: I0120 17:24:59.517446 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/165828ca-683d-41f2-a055-7e99870bf131-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:24:59 crc kubenswrapper[4558]: I0120 17:24:59.517474 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/165828ca-683d-41f2-a055-7e99870bf131-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:24:59 crc kubenswrapper[4558]: I0120 17:24:59.517488 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vndc8\" (UniqueName: \"kubernetes.io/projected/165828ca-683d-41f2-a055-7e99870bf131-kube-api-access-vndc8\") on node \"crc\" DevicePath \"\"" Jan 20 17:24:59 crc kubenswrapper[4558]: I0120 17:24:59.598023 4558 generic.go:334] "Generic (PLEG): container finished" podID="165828ca-683d-41f2-a055-7e99870bf131" containerID="be8c548bad3d4853a64ebbae913ef2602961735470fbdaf076725d3ca8f5145e" exitCode=0 Jan 20 17:24:59 crc kubenswrapper[4558]: I0120 17:24:59.598100 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-0" event={"ID":"165828ca-683d-41f2-a055-7e99870bf131","Type":"ContainerDied","Data":"be8c548bad3d4853a64ebbae913ef2602961735470fbdaf076725d3ca8f5145e"} Jan 20 17:24:59 crc kubenswrapper[4558]: I0120 17:24:59.598145 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-0" event={"ID":"165828ca-683d-41f2-a055-7e99870bf131","Type":"ContainerDied","Data":"f338fd1e92045bdfa117d8f2fbbd25aabf07d0176139c21b6ceada6be6da38fa"} Jan 20 17:24:59 crc kubenswrapper[4558]: I0120 17:24:59.598197 4558 scope.go:117] "RemoveContainer" containerID="be8c548bad3d4853a64ebbae913ef2602961735470fbdaf076725d3ca8f5145e" Jan 20 17:24:59 crc kubenswrapper[4558]: I0120 17:24:59.598295 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:24:59 crc kubenswrapper[4558]: I0120 17:24:59.604042 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"c9a69275-3884-4ed0-9f8d-f5872c8645f1","Type":"ContainerStarted","Data":"467c5d921903c3ba36bed931ac826b30cbb7da5a335e570e9acb5ddbb4ad3caf"} Jan 20 17:24:59 crc kubenswrapper[4558]: I0120 17:24:59.604096 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"c9a69275-3884-4ed0-9f8d-f5872c8645f1","Type":"ContainerStarted","Data":"b39372a57e0ac9e6c0b3dc139c905ed1fc75c72761b68cd666ec7b02cedfd8e3"} Jan 20 17:24:59 crc kubenswrapper[4558]: I0120 17:24:59.630816 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-api-0" podStartSLOduration=2.630805647 podStartE2EDuration="2.630805647s" podCreationTimestamp="2026-01-20 17:24:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:24:59.618892398 +0000 UTC m=+2593.379230366" watchObservedRunningTime="2026-01-20 17:24:59.630805647 +0000 UTC m=+2593.391143614" Jan 20 17:24:59 crc kubenswrapper[4558]: I0120 17:24:59.661095 4558 scope.go:117] "RemoveContainer" containerID="be8c548bad3d4853a64ebbae913ef2602961735470fbdaf076725d3ca8f5145e" Jan 20 17:24:59 crc kubenswrapper[4558]: E0120 17:24:59.661492 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"be8c548bad3d4853a64ebbae913ef2602961735470fbdaf076725d3ca8f5145e\": container with ID starting with be8c548bad3d4853a64ebbae913ef2602961735470fbdaf076725d3ca8f5145e not found: ID does not exist" containerID="be8c548bad3d4853a64ebbae913ef2602961735470fbdaf076725d3ca8f5145e" Jan 20 17:24:59 crc kubenswrapper[4558]: I0120 17:24:59.661558 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"be8c548bad3d4853a64ebbae913ef2602961735470fbdaf076725d3ca8f5145e"} err="failed to get container status \"be8c548bad3d4853a64ebbae913ef2602961735470fbdaf076725d3ca8f5145e\": rpc error: code = NotFound desc = could not find container \"be8c548bad3d4853a64ebbae913ef2602961735470fbdaf076725d3ca8f5145e\": container with ID starting with be8c548bad3d4853a64ebbae913ef2602961735470fbdaf076725d3ca8f5145e not found: ID does not exist" Jan 20 17:24:59 crc kubenswrapper[4558]: I0120 17:24:59.661944 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-0"] Jan 20 17:24:59 crc kubenswrapper[4558]: I0120 17:24:59.673749 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-0"] Jan 20 17:24:59 crc kubenswrapper[4558]: I0120 17:24:59.680738 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-0"] Jan 20 17:24:59 crc kubenswrapper[4558]: E0120 17:24:59.681150 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="165828ca-683d-41f2-a055-7e99870bf131" containerName="nova-cell0-conductor-conductor" Jan 20 17:24:59 crc kubenswrapper[4558]: I0120 17:24:59.681184 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="165828ca-683d-41f2-a055-7e99870bf131" containerName="nova-cell0-conductor-conductor" Jan 20 17:24:59 crc kubenswrapper[4558]: I0120 17:24:59.681435 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="165828ca-683d-41f2-a055-7e99870bf131" containerName="nova-cell0-conductor-conductor" Jan 20 17:24:59 crc kubenswrapper[4558]: I0120 17:24:59.682041 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:24:59 crc kubenswrapper[4558]: I0120 17:24:59.687596 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell0-conductor-config-data" Jan 20 17:24:59 crc kubenswrapper[4558]: I0120 17:24:59.688120 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-0"] Jan 20 17:24:59 crc kubenswrapper[4558]: I0120 17:24:59.730584 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5d490bdc-81eb-42cd-9a5d-ae4a8d2b4e51-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"5d490bdc-81eb-42cd-9a5d-ae4a8d2b4e51\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:24:59 crc kubenswrapper[4558]: I0120 17:24:59.730715 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-88wbn\" (UniqueName: \"kubernetes.io/projected/5d490bdc-81eb-42cd-9a5d-ae4a8d2b4e51-kube-api-access-88wbn\") pod \"nova-cell0-conductor-0\" (UID: \"5d490bdc-81eb-42cd-9a5d-ae4a8d2b4e51\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:24:59 crc kubenswrapper[4558]: I0120 17:24:59.730756 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5d490bdc-81eb-42cd-9a5d-ae4a8d2b4e51-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"5d490bdc-81eb-42cd-9a5d-ae4a8d2b4e51\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:24:59 crc kubenswrapper[4558]: I0120 17:24:59.761816 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:24:59 crc kubenswrapper[4558]: I0120 17:24:59.831610 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5d490bdc-81eb-42cd-9a5d-ae4a8d2b4e51-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"5d490bdc-81eb-42cd-9a5d-ae4a8d2b4e51\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:24:59 crc kubenswrapper[4558]: I0120 17:24:59.831753 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-88wbn\" (UniqueName: \"kubernetes.io/projected/5d490bdc-81eb-42cd-9a5d-ae4a8d2b4e51-kube-api-access-88wbn\") pod \"nova-cell0-conductor-0\" (UID: \"5d490bdc-81eb-42cd-9a5d-ae4a8d2b4e51\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:24:59 crc kubenswrapper[4558]: I0120 17:24:59.831797 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5d490bdc-81eb-42cd-9a5d-ae4a8d2b4e51-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"5d490bdc-81eb-42cd-9a5d-ae4a8d2b4e51\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:24:59 crc kubenswrapper[4558]: I0120 17:24:59.835871 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5d490bdc-81eb-42cd-9a5d-ae4a8d2b4e51-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"5d490bdc-81eb-42cd-9a5d-ae4a8d2b4e51\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:24:59 crc kubenswrapper[4558]: I0120 17:24:59.838600 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5d490bdc-81eb-42cd-9a5d-ae4a8d2b4e51-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"5d490bdc-81eb-42cd-9a5d-ae4a8d2b4e51\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:24:59 crc kubenswrapper[4558]: I0120 17:24:59.848240 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-88wbn\" (UniqueName: \"kubernetes.io/projected/5d490bdc-81eb-42cd-9a5d-ae4a8d2b4e51-kube-api-access-88wbn\") pod \"nova-cell0-conductor-0\" (UID: \"5d490bdc-81eb-42cd-9a5d-ae4a8d2b4e51\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:25:00 crc kubenswrapper[4558]: I0120 17:25:00.013715 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:25:00 crc kubenswrapper[4558]: I0120 17:25:00.411011 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-0"] Jan 20 17:25:00 crc kubenswrapper[4558]: W0120 17:25:00.414694 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5d490bdc_81eb_42cd_9a5d_ae4a8d2b4e51.slice/crio-673ba5dc63634baef44eda303cbf75192417a099a81426f7425b1e8b14b7bf24 WatchSource:0}: Error finding container 673ba5dc63634baef44eda303cbf75192417a099a81426f7425b1e8b14b7bf24: Status 404 returned error can't find the container with id 673ba5dc63634baef44eda303cbf75192417a099a81426f7425b1e8b14b7bf24 Jan 20 17:25:00 crc kubenswrapper[4558]: I0120 17:25:00.584631 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1364d2e8-742f-45c7-8620-65b25761a446" path="/var/lib/kubelet/pods/1364d2e8-742f-45c7-8620-65b25761a446/volumes" Jan 20 17:25:00 crc kubenswrapper[4558]: I0120 17:25:00.585461 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="165828ca-683d-41f2-a055-7e99870bf131" path="/var/lib/kubelet/pods/165828ca-683d-41f2-a055-7e99870bf131/volumes" Jan 20 17:25:00 crc kubenswrapper[4558]: I0120 17:25:00.620666 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-0" event={"ID":"5d490bdc-81eb-42cd-9a5d-ae4a8d2b4e51","Type":"ContainerStarted","Data":"4954099cb46eb467eb55768a743f3ad68bf55146245f1fb2f43a81ea0ff802cc"} Jan 20 17:25:00 crc kubenswrapper[4558]: I0120 17:25:00.620706 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-0" event={"ID":"5d490bdc-81eb-42cd-9a5d-ae4a8d2b4e51","Type":"ContainerStarted","Data":"673ba5dc63634baef44eda303cbf75192417a099a81426f7425b1e8b14b7bf24"} Jan 20 17:25:00 crc kubenswrapper[4558]: I0120 17:25:00.621225 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:25:00 crc kubenswrapper[4558]: I0120 17:25:00.622204 4558 generic.go:334] "Generic (PLEG): container finished" podID="01e17b5d-e7c8-4751-9f98-524fdc40bbf3" containerID="ad4f89b78d4804b38b89b1fbc52cbdd49b304501413e6e54a707bd56ba2d1e8a" exitCode=0 Jan 20 17:25:00 crc kubenswrapper[4558]: I0120 17:25:00.622258 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-z28dz" event={"ID":"01e17b5d-e7c8-4751-9f98-524fdc40bbf3","Type":"ContainerDied","Data":"ad4f89b78d4804b38b89b1fbc52cbdd49b304501413e6e54a707bd56ba2d1e8a"} Jan 20 17:25:00 crc kubenswrapper[4558]: I0120 17:25:00.624562 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"d3a0ace5-7ba6-400c-accb-e9934117e5da","Type":"ContainerStarted","Data":"8f83799cf5630ce543cd9870bcb19a5209758941174c49ea050d72c3b9117819"} Jan 20 17:25:00 crc kubenswrapper[4558]: I0120 17:25:00.624602 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"d3a0ace5-7ba6-400c-accb-e9934117e5da","Type":"ContainerStarted","Data":"495e467f44dbe9a0841bbe757638899d42845ca73553e02263a2888b555f0df8"} Jan 20 17:25:00 crc kubenswrapper[4558]: I0120 17:25:00.644362 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell0-conductor-0" podStartSLOduration=1.644341112 podStartE2EDuration="1.644341112s" podCreationTimestamp="2026-01-20 17:24:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:25:00.635697411 +0000 UTC m=+2594.396035378" watchObservedRunningTime="2026-01-20 17:25:00.644341112 +0000 UTC m=+2594.404679079" Jan 20 17:25:01 crc kubenswrapper[4558]: I0120 17:25:01.638778 4558 generic.go:334] "Generic (PLEG): container finished" podID="acd67900-69b3-485f-b378-80457d604be8" containerID="1bbe4af0bdcc4ee54aef97ab3e8325a227f63771456a354372adfab0b7220202" exitCode=0 Jan 20 17:25:01 crc kubenswrapper[4558]: I0120 17:25:01.639113 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-cell-mapping-th49n" event={"ID":"acd67900-69b3-485f-b378-80457d604be8","Type":"ContainerDied","Data":"1bbe4af0bdcc4ee54aef97ab3e8325a227f63771456a354372adfab0b7220202"} Jan 20 17:25:01 crc kubenswrapper[4558]: I0120 17:25:01.643290 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"d3a0ace5-7ba6-400c-accb-e9934117e5da","Type":"ContainerStarted","Data":"3952ef2fb2d9911166bfbabed3b129a170e0cfc14d11eef5dad43318dfd2ec4a"} Jan 20 17:25:01 crc kubenswrapper[4558]: I0120 17:25:01.976320 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-z28dz" Jan 20 17:25:01 crc kubenswrapper[4558]: I0120 17:25:01.976750 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:25:01 crc kubenswrapper[4558]: I0120 17:25:01.977429 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:25:02 crc kubenswrapper[4558]: I0120 17:25:02.081295 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/01e17b5d-e7c8-4751-9f98-524fdc40bbf3-scripts\") pod \"01e17b5d-e7c8-4751-9f98-524fdc40bbf3\" (UID: \"01e17b5d-e7c8-4751-9f98-524fdc40bbf3\") " Jan 20 17:25:02 crc kubenswrapper[4558]: I0120 17:25:02.081376 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/01e17b5d-e7c8-4751-9f98-524fdc40bbf3-combined-ca-bundle\") pod \"01e17b5d-e7c8-4751-9f98-524fdc40bbf3\" (UID: \"01e17b5d-e7c8-4751-9f98-524fdc40bbf3\") " Jan 20 17:25:02 crc kubenswrapper[4558]: I0120 17:25:02.081409 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vtl7t\" (UniqueName: \"kubernetes.io/projected/01e17b5d-e7c8-4751-9f98-524fdc40bbf3-kube-api-access-vtl7t\") pod \"01e17b5d-e7c8-4751-9f98-524fdc40bbf3\" (UID: \"01e17b5d-e7c8-4751-9f98-524fdc40bbf3\") " Jan 20 17:25:02 crc kubenswrapper[4558]: I0120 17:25:02.081555 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/01e17b5d-e7c8-4751-9f98-524fdc40bbf3-config-data\") pod \"01e17b5d-e7c8-4751-9f98-524fdc40bbf3\" (UID: \"01e17b5d-e7c8-4751-9f98-524fdc40bbf3\") " Jan 20 17:25:02 crc kubenswrapper[4558]: I0120 17:25:02.101388 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01e17b5d-e7c8-4751-9f98-524fdc40bbf3-scripts" (OuterVolumeSpecName: "scripts") pod "01e17b5d-e7c8-4751-9f98-524fdc40bbf3" (UID: "01e17b5d-e7c8-4751-9f98-524fdc40bbf3"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:25:02 crc kubenswrapper[4558]: I0120 17:25:02.101447 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01e17b5d-e7c8-4751-9f98-524fdc40bbf3-kube-api-access-vtl7t" (OuterVolumeSpecName: "kube-api-access-vtl7t") pod "01e17b5d-e7c8-4751-9f98-524fdc40bbf3" (UID: "01e17b5d-e7c8-4751-9f98-524fdc40bbf3"). InnerVolumeSpecName "kube-api-access-vtl7t". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:25:02 crc kubenswrapper[4558]: I0120 17:25:02.108384 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01e17b5d-e7c8-4751-9f98-524fdc40bbf3-config-data" (OuterVolumeSpecName: "config-data") pod "01e17b5d-e7c8-4751-9f98-524fdc40bbf3" (UID: "01e17b5d-e7c8-4751-9f98-524fdc40bbf3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:25:02 crc kubenswrapper[4558]: I0120 17:25:02.109741 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01e17b5d-e7c8-4751-9f98-524fdc40bbf3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "01e17b5d-e7c8-4751-9f98-524fdc40bbf3" (UID: "01e17b5d-e7c8-4751-9f98-524fdc40bbf3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:25:02 crc kubenswrapper[4558]: I0120 17:25:02.184652 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/01e17b5d-e7c8-4751-9f98-524fdc40bbf3-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:25:02 crc kubenswrapper[4558]: I0120 17:25:02.184697 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/01e17b5d-e7c8-4751-9f98-524fdc40bbf3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:25:02 crc kubenswrapper[4558]: I0120 17:25:02.184713 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vtl7t\" (UniqueName: \"kubernetes.io/projected/01e17b5d-e7c8-4751-9f98-524fdc40bbf3-kube-api-access-vtl7t\") on node \"crc\" DevicePath \"\"" Jan 20 17:25:02 crc kubenswrapper[4558]: I0120 17:25:02.184725 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/01e17b5d-e7c8-4751-9f98-524fdc40bbf3-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:25:02 crc kubenswrapper[4558]: I0120 17:25:02.656943 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"d3a0ace5-7ba6-400c-accb-e9934117e5da","Type":"ContainerStarted","Data":"5a82e36836d32566689eeb37c86b42a4bed4d9a1a58decc778ba71f69e30da77"} Jan 20 17:25:02 crc kubenswrapper[4558]: I0120 17:25:02.658445 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-z28dz" event={"ID":"01e17b5d-e7c8-4751-9f98-524fdc40bbf3","Type":"ContainerDied","Data":"ec984f43d9f631776c32a5974670509776150478821b8fbb5c5ca9e30fcd0550"} Jan 20 17:25:02 crc kubenswrapper[4558]: I0120 17:25:02.658480 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ec984f43d9f631776c32a5974670509776150478821b8fbb5c5ca9e30fcd0550" Jan 20 17:25:02 crc kubenswrapper[4558]: I0120 17:25:02.658536 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-z28dz" Jan 20 17:25:02 crc kubenswrapper[4558]: I0120 17:25:02.726756 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-0"] Jan 20 17:25:02 crc kubenswrapper[4558]: E0120 17:25:02.730740 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="01e17b5d-e7c8-4751-9f98-524fdc40bbf3" containerName="nova-cell1-conductor-db-sync" Jan 20 17:25:02 crc kubenswrapper[4558]: I0120 17:25:02.730773 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="01e17b5d-e7c8-4751-9f98-524fdc40bbf3" containerName="nova-cell1-conductor-db-sync" Jan 20 17:25:02 crc kubenswrapper[4558]: I0120 17:25:02.730943 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="01e17b5d-e7c8-4751-9f98-524fdc40bbf3" containerName="nova-cell1-conductor-db-sync" Jan 20 17:25:02 crc kubenswrapper[4558]: I0120 17:25:02.731766 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:25:02 crc kubenswrapper[4558]: I0120 17:25:02.734184 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell1-conductor-config-data" Jan 20 17:25:02 crc kubenswrapper[4558]: I0120 17:25:02.735216 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-0"] Jan 20 17:25:02 crc kubenswrapper[4558]: I0120 17:25:02.797586 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qm5t5\" (UniqueName: \"kubernetes.io/projected/d7d891f4-8ef8-4a21-9c7f-36088b864f7f-kube-api-access-qm5t5\") pod \"nova-cell1-conductor-0\" (UID: \"d7d891f4-8ef8-4a21-9c7f-36088b864f7f\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:25:02 crc kubenswrapper[4558]: I0120 17:25:02.798060 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7d891f4-8ef8-4a21-9c7f-36088b864f7f-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"d7d891f4-8ef8-4a21-9c7f-36088b864f7f\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:25:02 crc kubenswrapper[4558]: I0120 17:25:02.798184 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d7d891f4-8ef8-4a21-9c7f-36088b864f7f-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"d7d891f4-8ef8-4a21-9c7f-36088b864f7f\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:25:02 crc kubenswrapper[4558]: I0120 17:25:02.900490 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7d891f4-8ef8-4a21-9c7f-36088b864f7f-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"d7d891f4-8ef8-4a21-9c7f-36088b864f7f\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:25:02 crc kubenswrapper[4558]: I0120 17:25:02.900560 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d7d891f4-8ef8-4a21-9c7f-36088b864f7f-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"d7d891f4-8ef8-4a21-9c7f-36088b864f7f\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:25:02 crc kubenswrapper[4558]: I0120 17:25:02.900635 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qm5t5\" (UniqueName: \"kubernetes.io/projected/d7d891f4-8ef8-4a21-9c7f-36088b864f7f-kube-api-access-qm5t5\") pod \"nova-cell1-conductor-0\" (UID: \"d7d891f4-8ef8-4a21-9c7f-36088b864f7f\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:25:02 crc kubenswrapper[4558]: I0120 17:25:02.906621 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7d891f4-8ef8-4a21-9c7f-36088b864f7f-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"d7d891f4-8ef8-4a21-9c7f-36088b864f7f\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:25:02 crc kubenswrapper[4558]: I0120 17:25:02.908057 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d7d891f4-8ef8-4a21-9c7f-36088b864f7f-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"d7d891f4-8ef8-4a21-9c7f-36088b864f7f\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:25:02 crc kubenswrapper[4558]: I0120 17:25:02.914803 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qm5t5\" (UniqueName: \"kubernetes.io/projected/d7d891f4-8ef8-4a21-9c7f-36088b864f7f-kube-api-access-qm5t5\") pod \"nova-cell1-conductor-0\" (UID: \"d7d891f4-8ef8-4a21-9c7f-36088b864f7f\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:25:02 crc kubenswrapper[4558]: I0120 17:25:02.991598 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-cell-mapping-th49n" Jan 20 17:25:03 crc kubenswrapper[4558]: I0120 17:25:03.060099 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:25:03 crc kubenswrapper[4558]: I0120 17:25:03.102305 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/acd67900-69b3-485f-b378-80457d604be8-config-data\") pod \"acd67900-69b3-485f-b378-80457d604be8\" (UID: \"acd67900-69b3-485f-b378-80457d604be8\") " Jan 20 17:25:03 crc kubenswrapper[4558]: I0120 17:25:03.102442 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/acd67900-69b3-485f-b378-80457d604be8-combined-ca-bundle\") pod \"acd67900-69b3-485f-b378-80457d604be8\" (UID: \"acd67900-69b3-485f-b378-80457d604be8\") " Jan 20 17:25:03 crc kubenswrapper[4558]: I0120 17:25:03.102514 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/acd67900-69b3-485f-b378-80457d604be8-scripts\") pod \"acd67900-69b3-485f-b378-80457d604be8\" (UID: \"acd67900-69b3-485f-b378-80457d604be8\") " Jan 20 17:25:03 crc kubenswrapper[4558]: I0120 17:25:03.102590 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v8g56\" (UniqueName: \"kubernetes.io/projected/acd67900-69b3-485f-b378-80457d604be8-kube-api-access-v8g56\") pod \"acd67900-69b3-485f-b378-80457d604be8\" (UID: \"acd67900-69b3-485f-b378-80457d604be8\") " Jan 20 17:25:03 crc kubenswrapper[4558]: I0120 17:25:03.107004 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/acd67900-69b3-485f-b378-80457d604be8-scripts" (OuterVolumeSpecName: "scripts") pod "acd67900-69b3-485f-b378-80457d604be8" (UID: "acd67900-69b3-485f-b378-80457d604be8"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:25:03 crc kubenswrapper[4558]: I0120 17:25:03.107182 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/acd67900-69b3-485f-b378-80457d604be8-kube-api-access-v8g56" (OuterVolumeSpecName: "kube-api-access-v8g56") pod "acd67900-69b3-485f-b378-80457d604be8" (UID: "acd67900-69b3-485f-b378-80457d604be8"). InnerVolumeSpecName "kube-api-access-v8g56". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:25:03 crc kubenswrapper[4558]: I0120 17:25:03.128394 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/acd67900-69b3-485f-b378-80457d604be8-config-data" (OuterVolumeSpecName: "config-data") pod "acd67900-69b3-485f-b378-80457d604be8" (UID: "acd67900-69b3-485f-b378-80457d604be8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:25:03 crc kubenswrapper[4558]: I0120 17:25:03.128383 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/acd67900-69b3-485f-b378-80457d604be8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "acd67900-69b3-485f-b378-80457d604be8" (UID: "acd67900-69b3-485f-b378-80457d604be8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:25:03 crc kubenswrapper[4558]: I0120 17:25:03.205509 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v8g56\" (UniqueName: \"kubernetes.io/projected/acd67900-69b3-485f-b378-80457d604be8-kube-api-access-v8g56\") on node \"crc\" DevicePath \"\"" Jan 20 17:25:03 crc kubenswrapper[4558]: I0120 17:25:03.205544 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/acd67900-69b3-485f-b378-80457d604be8-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:25:03 crc kubenswrapper[4558]: I0120 17:25:03.205556 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/acd67900-69b3-485f-b378-80457d604be8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:25:03 crc kubenswrapper[4558]: I0120 17:25:03.205567 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/acd67900-69b3-485f-b378-80457d604be8-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:25:03 crc kubenswrapper[4558]: I0120 17:25:03.474584 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-0"] Jan 20 17:25:03 crc kubenswrapper[4558]: I0120 17:25:03.673824 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-cell-mapping-th49n" Jan 20 17:25:03 crc kubenswrapper[4558]: I0120 17:25:03.673796 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-cell-mapping-th49n" event={"ID":"acd67900-69b3-485f-b378-80457d604be8","Type":"ContainerDied","Data":"12d36761612e8a3339bc705d6c7a0ced0db1629320d439f854c82cd965716f70"} Jan 20 17:25:03 crc kubenswrapper[4558]: I0120 17:25:03.674314 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="12d36761612e8a3339bc705d6c7a0ced0db1629320d439f854c82cd965716f70" Jan 20 17:25:03 crc kubenswrapper[4558]: I0120 17:25:03.676861 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-0" event={"ID":"d7d891f4-8ef8-4a21-9c7f-36088b864f7f","Type":"ContainerStarted","Data":"3dde84a4d4b2d41242c14681a10217ff909436ad4cbf651a6f8291244c16021c"} Jan 20 17:25:03 crc kubenswrapper[4558]: I0120 17:25:03.676915 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-0" event={"ID":"d7d891f4-8ef8-4a21-9c7f-36088b864f7f","Type":"ContainerStarted","Data":"ff4e129c7bc35532eb2e5aef70443b3c3007c86f4af2b71e09bd92a1f84b7881"} Jan 20 17:25:03 crc kubenswrapper[4558]: I0120 17:25:03.677452 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:25:03 crc kubenswrapper[4558]: I0120 17:25:03.679898 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"d3a0ace5-7ba6-400c-accb-e9934117e5da","Type":"ContainerStarted","Data":"3a8a63e1c605d09fa25d3af1eceb1336b0f46de98660562f8ed078f5456366e0"} Jan 20 17:25:03 crc kubenswrapper[4558]: I0120 17:25:03.680533 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:25:03 crc kubenswrapper[4558]: I0120 17:25:03.710720 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell1-conductor-0" podStartSLOduration=1.710692879 podStartE2EDuration="1.710692879s" podCreationTimestamp="2026-01-20 17:25:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:25:03.698428281 +0000 UTC m=+2597.458766248" watchObservedRunningTime="2026-01-20 17:25:03.710692879 +0000 UTC m=+2597.471030846" Jan 20 17:25:03 crc kubenswrapper[4558]: I0120 17:25:03.719494 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ceilometer-0" podStartSLOduration=2.009075753 podStartE2EDuration="5.719484267s" podCreationTimestamp="2026-01-20 17:24:58 +0000 UTC" firstStartedPulling="2026-01-20 17:24:59.761444127 +0000 UTC m=+2593.521782094" lastFinishedPulling="2026-01-20 17:25:03.471852642 +0000 UTC m=+2597.232190608" observedRunningTime="2026-01-20 17:25:03.714883658 +0000 UTC m=+2597.475221625" watchObservedRunningTime="2026-01-20 17:25:03.719484267 +0000 UTC m=+2597.479822235" Jan 20 17:25:05 crc kubenswrapper[4558]: I0120 17:25:05.039262 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:25:05 crc kubenswrapper[4558]: I0120 17:25:05.573188 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:25:05 crc kubenswrapper[4558]: I0120 17:25:05.573636 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-api-0" podUID="c9a69275-3884-4ed0-9f8d-f5872c8645f1" containerName="nova-api-log" containerID="cri-o://467c5d921903c3ba36bed931ac826b30cbb7da5a335e570e9acb5ddbb4ad3caf" gracePeriod=30 Jan 20 17:25:05 crc kubenswrapper[4558]: I0120 17:25:05.574016 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-api-0" podUID="c9a69275-3884-4ed0-9f8d-f5872c8645f1" containerName="nova-api-api" containerID="cri-o://b39372a57e0ac9e6c0b3dc139c905ed1fc75c72761b68cd666ec7b02cedfd8e3" gracePeriod=30 Jan 20 17:25:05 crc kubenswrapper[4558]: I0120 17:25:05.593490 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:25:05 crc kubenswrapper[4558]: I0120 17:25:05.593706 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-metadata-0" podUID="feaaf8d6-cd55-4a27-9be8-8680bb156d10" containerName="nova-metadata-log" containerID="cri-o://886ae90e3a41affbd09898da8f0a852f6e6f37a4b3b62890323b0310f9d85238" gracePeriod=30 Jan 20 17:25:05 crc kubenswrapper[4558]: I0120 17:25:05.593757 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-metadata-0" podUID="feaaf8d6-cd55-4a27-9be8-8680bb156d10" containerName="nova-metadata-metadata" containerID="cri-o://2bc171abc2782ea95314f69f8dbfa8515200e8d4df02459df56ccc1d4940592f" gracePeriod=30 Jan 20 17:25:05 crc kubenswrapper[4558]: I0120 17:25:05.731631 4558 generic.go:334] "Generic (PLEG): container finished" podID="c9a69275-3884-4ed0-9f8d-f5872c8645f1" containerID="467c5d921903c3ba36bed931ac826b30cbb7da5a335e570e9acb5ddbb4ad3caf" exitCode=143 Jan 20 17:25:05 crc kubenswrapper[4558]: I0120 17:25:05.731698 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"c9a69275-3884-4ed0-9f8d-f5872c8645f1","Type":"ContainerDied","Data":"467c5d921903c3ba36bed931ac826b30cbb7da5a335e570e9acb5ddbb4ad3caf"} Jan 20 17:25:06 crc kubenswrapper[4558]: I0120 17:25:06.198376 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:25:06 crc kubenswrapper[4558]: I0120 17:25:06.265129 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:25:06 crc kubenswrapper[4558]: I0120 17:25:06.268240 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/feaaf8d6-cd55-4a27-9be8-8680bb156d10-nova-metadata-tls-certs\") pod \"feaaf8d6-cd55-4a27-9be8-8680bb156d10\" (UID: \"feaaf8d6-cd55-4a27-9be8-8680bb156d10\") " Jan 20 17:25:06 crc kubenswrapper[4558]: I0120 17:25:06.268334 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/feaaf8d6-cd55-4a27-9be8-8680bb156d10-logs\") pod \"feaaf8d6-cd55-4a27-9be8-8680bb156d10\" (UID: \"feaaf8d6-cd55-4a27-9be8-8680bb156d10\") " Jan 20 17:25:06 crc kubenswrapper[4558]: I0120 17:25:06.268417 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/feaaf8d6-cd55-4a27-9be8-8680bb156d10-config-data\") pod \"feaaf8d6-cd55-4a27-9be8-8680bb156d10\" (UID: \"feaaf8d6-cd55-4a27-9be8-8680bb156d10\") " Jan 20 17:25:06 crc kubenswrapper[4558]: I0120 17:25:06.268456 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/feaaf8d6-cd55-4a27-9be8-8680bb156d10-combined-ca-bundle\") pod \"feaaf8d6-cd55-4a27-9be8-8680bb156d10\" (UID: \"feaaf8d6-cd55-4a27-9be8-8680bb156d10\") " Jan 20 17:25:06 crc kubenswrapper[4558]: I0120 17:25:06.268484 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fnv47\" (UniqueName: \"kubernetes.io/projected/feaaf8d6-cd55-4a27-9be8-8680bb156d10-kube-api-access-fnv47\") pod \"feaaf8d6-cd55-4a27-9be8-8680bb156d10\" (UID: \"feaaf8d6-cd55-4a27-9be8-8680bb156d10\") " Jan 20 17:25:06 crc kubenswrapper[4558]: I0120 17:25:06.268949 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/feaaf8d6-cd55-4a27-9be8-8680bb156d10-logs" (OuterVolumeSpecName: "logs") pod "feaaf8d6-cd55-4a27-9be8-8680bb156d10" (UID: "feaaf8d6-cd55-4a27-9be8-8680bb156d10"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:25:06 crc kubenswrapper[4558]: I0120 17:25:06.277802 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/feaaf8d6-cd55-4a27-9be8-8680bb156d10-kube-api-access-fnv47" (OuterVolumeSpecName: "kube-api-access-fnv47") pod "feaaf8d6-cd55-4a27-9be8-8680bb156d10" (UID: "feaaf8d6-cd55-4a27-9be8-8680bb156d10"). InnerVolumeSpecName "kube-api-access-fnv47". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:25:06 crc kubenswrapper[4558]: I0120 17:25:06.306356 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/feaaf8d6-cd55-4a27-9be8-8680bb156d10-config-data" (OuterVolumeSpecName: "config-data") pod "feaaf8d6-cd55-4a27-9be8-8680bb156d10" (UID: "feaaf8d6-cd55-4a27-9be8-8680bb156d10"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:25:06 crc kubenswrapper[4558]: I0120 17:25:06.311320 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/feaaf8d6-cd55-4a27-9be8-8680bb156d10-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "feaaf8d6-cd55-4a27-9be8-8680bb156d10" (UID: "feaaf8d6-cd55-4a27-9be8-8680bb156d10"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:25:06 crc kubenswrapper[4558]: I0120 17:25:06.325195 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/feaaf8d6-cd55-4a27-9be8-8680bb156d10-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "feaaf8d6-cd55-4a27-9be8-8680bb156d10" (UID: "feaaf8d6-cd55-4a27-9be8-8680bb156d10"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:25:06 crc kubenswrapper[4558]: I0120 17:25:06.369843 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c9a69275-3884-4ed0-9f8d-f5872c8645f1-config-data\") pod \"c9a69275-3884-4ed0-9f8d-f5872c8645f1\" (UID: \"c9a69275-3884-4ed0-9f8d-f5872c8645f1\") " Jan 20 17:25:06 crc kubenswrapper[4558]: I0120 17:25:06.370034 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jc49k\" (UniqueName: \"kubernetes.io/projected/c9a69275-3884-4ed0-9f8d-f5872c8645f1-kube-api-access-jc49k\") pod \"c9a69275-3884-4ed0-9f8d-f5872c8645f1\" (UID: \"c9a69275-3884-4ed0-9f8d-f5872c8645f1\") " Jan 20 17:25:06 crc kubenswrapper[4558]: I0120 17:25:06.370497 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9a69275-3884-4ed0-9f8d-f5872c8645f1-combined-ca-bundle\") pod \"c9a69275-3884-4ed0-9f8d-f5872c8645f1\" (UID: \"c9a69275-3884-4ed0-9f8d-f5872c8645f1\") " Jan 20 17:25:06 crc kubenswrapper[4558]: I0120 17:25:06.370683 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c9a69275-3884-4ed0-9f8d-f5872c8645f1-logs\") pod \"c9a69275-3884-4ed0-9f8d-f5872c8645f1\" (UID: \"c9a69275-3884-4ed0-9f8d-f5872c8645f1\") " Jan 20 17:25:06 crc kubenswrapper[4558]: I0120 17:25:06.371371 4558 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/feaaf8d6-cd55-4a27-9be8-8680bb156d10-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:25:06 crc kubenswrapper[4558]: I0120 17:25:06.371445 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/feaaf8d6-cd55-4a27-9be8-8680bb156d10-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:25:06 crc kubenswrapper[4558]: I0120 17:25:06.371512 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/feaaf8d6-cd55-4a27-9be8-8680bb156d10-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:25:06 crc kubenswrapper[4558]: I0120 17:25:06.371565 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/feaaf8d6-cd55-4a27-9be8-8680bb156d10-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:25:06 crc kubenswrapper[4558]: I0120 17:25:06.371622 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fnv47\" (UniqueName: \"kubernetes.io/projected/feaaf8d6-cd55-4a27-9be8-8680bb156d10-kube-api-access-fnv47\") on node \"crc\" DevicePath \"\"" Jan 20 17:25:06 crc kubenswrapper[4558]: I0120 17:25:06.372078 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c9a69275-3884-4ed0-9f8d-f5872c8645f1-logs" (OuterVolumeSpecName: "logs") pod "c9a69275-3884-4ed0-9f8d-f5872c8645f1" (UID: "c9a69275-3884-4ed0-9f8d-f5872c8645f1"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:25:06 crc kubenswrapper[4558]: I0120 17:25:06.374348 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c9a69275-3884-4ed0-9f8d-f5872c8645f1-kube-api-access-jc49k" (OuterVolumeSpecName: "kube-api-access-jc49k") pod "c9a69275-3884-4ed0-9f8d-f5872c8645f1" (UID: "c9a69275-3884-4ed0-9f8d-f5872c8645f1"). InnerVolumeSpecName "kube-api-access-jc49k". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:25:06 crc kubenswrapper[4558]: I0120 17:25:06.396511 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c9a69275-3884-4ed0-9f8d-f5872c8645f1-config-data" (OuterVolumeSpecName: "config-data") pod "c9a69275-3884-4ed0-9f8d-f5872c8645f1" (UID: "c9a69275-3884-4ed0-9f8d-f5872c8645f1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:25:06 crc kubenswrapper[4558]: I0120 17:25:06.397557 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c9a69275-3884-4ed0-9f8d-f5872c8645f1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c9a69275-3884-4ed0-9f8d-f5872c8645f1" (UID: "c9a69275-3884-4ed0-9f8d-f5872c8645f1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:25:06 crc kubenswrapper[4558]: I0120 17:25:06.473399 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c9a69275-3884-4ed0-9f8d-f5872c8645f1-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:25:06 crc kubenswrapper[4558]: I0120 17:25:06.473447 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jc49k\" (UniqueName: \"kubernetes.io/projected/c9a69275-3884-4ed0-9f8d-f5872c8645f1-kube-api-access-jc49k\") on node \"crc\" DevicePath \"\"" Jan 20 17:25:06 crc kubenswrapper[4558]: I0120 17:25:06.473543 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9a69275-3884-4ed0-9f8d-f5872c8645f1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:25:06 crc kubenswrapper[4558]: I0120 17:25:06.473553 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c9a69275-3884-4ed0-9f8d-f5872c8645f1-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:25:06 crc kubenswrapper[4558]: I0120 17:25:06.748095 4558 generic.go:334] "Generic (PLEG): container finished" podID="feaaf8d6-cd55-4a27-9be8-8680bb156d10" containerID="2bc171abc2782ea95314f69f8dbfa8515200e8d4df02459df56ccc1d4940592f" exitCode=0 Jan 20 17:25:06 crc kubenswrapper[4558]: I0120 17:25:06.748245 4558 generic.go:334] "Generic (PLEG): container finished" podID="feaaf8d6-cd55-4a27-9be8-8680bb156d10" containerID="886ae90e3a41affbd09898da8f0a852f6e6f37a4b3b62890323b0310f9d85238" exitCode=143 Jan 20 17:25:06 crc kubenswrapper[4558]: I0120 17:25:06.748207 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:25:06 crc kubenswrapper[4558]: I0120 17:25:06.748199 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"feaaf8d6-cd55-4a27-9be8-8680bb156d10","Type":"ContainerDied","Data":"2bc171abc2782ea95314f69f8dbfa8515200e8d4df02459df56ccc1d4940592f"} Jan 20 17:25:06 crc kubenswrapper[4558]: I0120 17:25:06.748492 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"feaaf8d6-cd55-4a27-9be8-8680bb156d10","Type":"ContainerDied","Data":"886ae90e3a41affbd09898da8f0a852f6e6f37a4b3b62890323b0310f9d85238"} Jan 20 17:25:06 crc kubenswrapper[4558]: I0120 17:25:06.748548 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"feaaf8d6-cd55-4a27-9be8-8680bb156d10","Type":"ContainerDied","Data":"61fc00c84fe86ea84f0ff2d55c563c0af82ab786ae6ea69d1088694508acf931"} Jan 20 17:25:06 crc kubenswrapper[4558]: I0120 17:25:06.748618 4558 scope.go:117] "RemoveContainer" containerID="2bc171abc2782ea95314f69f8dbfa8515200e8d4df02459df56ccc1d4940592f" Jan 20 17:25:06 crc kubenswrapper[4558]: I0120 17:25:06.752711 4558 generic.go:334] "Generic (PLEG): container finished" podID="c9a69275-3884-4ed0-9f8d-f5872c8645f1" containerID="b39372a57e0ac9e6c0b3dc139c905ed1fc75c72761b68cd666ec7b02cedfd8e3" exitCode=0 Jan 20 17:25:06 crc kubenswrapper[4558]: I0120 17:25:06.752800 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"c9a69275-3884-4ed0-9f8d-f5872c8645f1","Type":"ContainerDied","Data":"b39372a57e0ac9e6c0b3dc139c905ed1fc75c72761b68cd666ec7b02cedfd8e3"} Jan 20 17:25:06 crc kubenswrapper[4558]: I0120 17:25:06.752872 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"c9a69275-3884-4ed0-9f8d-f5872c8645f1","Type":"ContainerDied","Data":"31c5e444b70211e77cbdbeec6edba13dd687ee7385d59fb1003c191626bdcffa"} Jan 20 17:25:06 crc kubenswrapper[4558]: I0120 17:25:06.752830 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:25:06 crc kubenswrapper[4558]: I0120 17:25:06.777554 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:25:06 crc kubenswrapper[4558]: I0120 17:25:06.778475 4558 scope.go:117] "RemoveContainer" containerID="886ae90e3a41affbd09898da8f0a852f6e6f37a4b3b62890323b0310f9d85238" Jan 20 17:25:06 crc kubenswrapper[4558]: I0120 17:25:06.797769 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:25:06 crc kubenswrapper[4558]: I0120 17:25:06.818753 4558 scope.go:117] "RemoveContainer" containerID="2bc171abc2782ea95314f69f8dbfa8515200e8d4df02459df56ccc1d4940592f" Jan 20 17:25:06 crc kubenswrapper[4558]: E0120 17:25:06.819091 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2bc171abc2782ea95314f69f8dbfa8515200e8d4df02459df56ccc1d4940592f\": container with ID starting with 2bc171abc2782ea95314f69f8dbfa8515200e8d4df02459df56ccc1d4940592f not found: ID does not exist" containerID="2bc171abc2782ea95314f69f8dbfa8515200e8d4df02459df56ccc1d4940592f" Jan 20 17:25:06 crc kubenswrapper[4558]: I0120 17:25:06.819128 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2bc171abc2782ea95314f69f8dbfa8515200e8d4df02459df56ccc1d4940592f"} err="failed to get container status \"2bc171abc2782ea95314f69f8dbfa8515200e8d4df02459df56ccc1d4940592f\": rpc error: code = NotFound desc = could not find container \"2bc171abc2782ea95314f69f8dbfa8515200e8d4df02459df56ccc1d4940592f\": container with ID starting with 2bc171abc2782ea95314f69f8dbfa8515200e8d4df02459df56ccc1d4940592f not found: ID does not exist" Jan 20 17:25:06 crc kubenswrapper[4558]: I0120 17:25:06.819155 4558 scope.go:117] "RemoveContainer" containerID="886ae90e3a41affbd09898da8f0a852f6e6f37a4b3b62890323b0310f9d85238" Jan 20 17:25:06 crc kubenswrapper[4558]: E0120 17:25:06.819684 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"886ae90e3a41affbd09898da8f0a852f6e6f37a4b3b62890323b0310f9d85238\": container with ID starting with 886ae90e3a41affbd09898da8f0a852f6e6f37a4b3b62890323b0310f9d85238 not found: ID does not exist" containerID="886ae90e3a41affbd09898da8f0a852f6e6f37a4b3b62890323b0310f9d85238" Jan 20 17:25:06 crc kubenswrapper[4558]: I0120 17:25:06.819712 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"886ae90e3a41affbd09898da8f0a852f6e6f37a4b3b62890323b0310f9d85238"} err="failed to get container status \"886ae90e3a41affbd09898da8f0a852f6e6f37a4b3b62890323b0310f9d85238\": rpc error: code = NotFound desc = could not find container \"886ae90e3a41affbd09898da8f0a852f6e6f37a4b3b62890323b0310f9d85238\": container with ID starting with 886ae90e3a41affbd09898da8f0a852f6e6f37a4b3b62890323b0310f9d85238 not found: ID does not exist" Jan 20 17:25:06 crc kubenswrapper[4558]: I0120 17:25:06.819727 4558 scope.go:117] "RemoveContainer" containerID="2bc171abc2782ea95314f69f8dbfa8515200e8d4df02459df56ccc1d4940592f" Jan 20 17:25:06 crc kubenswrapper[4558]: I0120 17:25:06.833409 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2bc171abc2782ea95314f69f8dbfa8515200e8d4df02459df56ccc1d4940592f"} err="failed to get container status \"2bc171abc2782ea95314f69f8dbfa8515200e8d4df02459df56ccc1d4940592f\": rpc error: code = NotFound desc = could not find container \"2bc171abc2782ea95314f69f8dbfa8515200e8d4df02459df56ccc1d4940592f\": container with ID starting with 2bc171abc2782ea95314f69f8dbfa8515200e8d4df02459df56ccc1d4940592f not found: ID does not exist" Jan 20 17:25:06 crc kubenswrapper[4558]: I0120 17:25:06.833436 4558 scope.go:117] "RemoveContainer" containerID="886ae90e3a41affbd09898da8f0a852f6e6f37a4b3b62890323b0310f9d85238" Jan 20 17:25:06 crc kubenswrapper[4558]: I0120 17:25:06.834631 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"886ae90e3a41affbd09898da8f0a852f6e6f37a4b3b62890323b0310f9d85238"} err="failed to get container status \"886ae90e3a41affbd09898da8f0a852f6e6f37a4b3b62890323b0310f9d85238\": rpc error: code = NotFound desc = could not find container \"886ae90e3a41affbd09898da8f0a852f6e6f37a4b3b62890323b0310f9d85238\": container with ID starting with 886ae90e3a41affbd09898da8f0a852f6e6f37a4b3b62890323b0310f9d85238 not found: ID does not exist" Jan 20 17:25:06 crc kubenswrapper[4558]: I0120 17:25:06.834677 4558 scope.go:117] "RemoveContainer" containerID="b39372a57e0ac9e6c0b3dc139c905ed1fc75c72761b68cd666ec7b02cedfd8e3" Jan 20 17:25:06 crc kubenswrapper[4558]: I0120 17:25:06.839371 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:25:06 crc kubenswrapper[4558]: I0120 17:25:06.852092 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:25:06 crc kubenswrapper[4558]: I0120 17:25:06.864606 4558 scope.go:117] "RemoveContainer" containerID="467c5d921903c3ba36bed931ac826b30cbb7da5a335e570e9acb5ddbb4ad3caf" Jan 20 17:25:06 crc kubenswrapper[4558]: I0120 17:25:06.870091 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:25:06 crc kubenswrapper[4558]: E0120 17:25:06.870518 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c9a69275-3884-4ed0-9f8d-f5872c8645f1" containerName="nova-api-log" Jan 20 17:25:06 crc kubenswrapper[4558]: I0120 17:25:06.870536 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="c9a69275-3884-4ed0-9f8d-f5872c8645f1" containerName="nova-api-log" Jan 20 17:25:06 crc kubenswrapper[4558]: E0120 17:25:06.870553 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="acd67900-69b3-485f-b378-80457d604be8" containerName="nova-manage" Jan 20 17:25:06 crc kubenswrapper[4558]: I0120 17:25:06.870559 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="acd67900-69b3-485f-b378-80457d604be8" containerName="nova-manage" Jan 20 17:25:06 crc kubenswrapper[4558]: E0120 17:25:06.870579 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="feaaf8d6-cd55-4a27-9be8-8680bb156d10" containerName="nova-metadata-log" Jan 20 17:25:06 crc kubenswrapper[4558]: I0120 17:25:06.870585 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="feaaf8d6-cd55-4a27-9be8-8680bb156d10" containerName="nova-metadata-log" Jan 20 17:25:06 crc kubenswrapper[4558]: E0120 17:25:06.870592 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c9a69275-3884-4ed0-9f8d-f5872c8645f1" containerName="nova-api-api" Jan 20 17:25:06 crc kubenswrapper[4558]: I0120 17:25:06.870598 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="c9a69275-3884-4ed0-9f8d-f5872c8645f1" containerName="nova-api-api" Jan 20 17:25:06 crc kubenswrapper[4558]: E0120 17:25:06.870622 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="feaaf8d6-cd55-4a27-9be8-8680bb156d10" containerName="nova-metadata-metadata" Jan 20 17:25:06 crc kubenswrapper[4558]: I0120 17:25:06.870628 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="feaaf8d6-cd55-4a27-9be8-8680bb156d10" containerName="nova-metadata-metadata" Jan 20 17:25:06 crc kubenswrapper[4558]: I0120 17:25:06.870778 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="c9a69275-3884-4ed0-9f8d-f5872c8645f1" containerName="nova-api-log" Jan 20 17:25:06 crc kubenswrapper[4558]: I0120 17:25:06.870798 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="feaaf8d6-cd55-4a27-9be8-8680bb156d10" containerName="nova-metadata-metadata" Jan 20 17:25:06 crc kubenswrapper[4558]: I0120 17:25:06.870809 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="feaaf8d6-cd55-4a27-9be8-8680bb156d10" containerName="nova-metadata-log" Jan 20 17:25:06 crc kubenswrapper[4558]: I0120 17:25:06.870820 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="acd67900-69b3-485f-b378-80457d604be8" containerName="nova-manage" Jan 20 17:25:06 crc kubenswrapper[4558]: I0120 17:25:06.870826 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="c9a69275-3884-4ed0-9f8d-f5872c8645f1" containerName="nova-api-api" Jan 20 17:25:06 crc kubenswrapper[4558]: I0120 17:25:06.871801 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:25:06 crc kubenswrapper[4558]: I0120 17:25:06.874887 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-metadata-internal-svc" Jan 20 17:25:06 crc kubenswrapper[4558]: I0120 17:25:06.875063 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-metadata-config-data" Jan 20 17:25:06 crc kubenswrapper[4558]: I0120 17:25:06.880897 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:25:06 crc kubenswrapper[4558]: I0120 17:25:06.883870 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:25:06 crc kubenswrapper[4558]: I0120 17:25:06.886358 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:25:06 crc kubenswrapper[4558]: I0120 17:25:06.886733 4558 scope.go:117] "RemoveContainer" containerID="b39372a57e0ac9e6c0b3dc139c905ed1fc75c72761b68cd666ec7b02cedfd8e3" Jan 20 17:25:06 crc kubenswrapper[4558]: E0120 17:25:06.887196 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b39372a57e0ac9e6c0b3dc139c905ed1fc75c72761b68cd666ec7b02cedfd8e3\": container with ID starting with b39372a57e0ac9e6c0b3dc139c905ed1fc75c72761b68cd666ec7b02cedfd8e3 not found: ID does not exist" containerID="b39372a57e0ac9e6c0b3dc139c905ed1fc75c72761b68cd666ec7b02cedfd8e3" Jan 20 17:25:06 crc kubenswrapper[4558]: I0120 17:25:06.890372 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b39372a57e0ac9e6c0b3dc139c905ed1fc75c72761b68cd666ec7b02cedfd8e3"} err="failed to get container status \"b39372a57e0ac9e6c0b3dc139c905ed1fc75c72761b68cd666ec7b02cedfd8e3\": rpc error: code = NotFound desc = could not find container \"b39372a57e0ac9e6c0b3dc139c905ed1fc75c72761b68cd666ec7b02cedfd8e3\": container with ID starting with b39372a57e0ac9e6c0b3dc139c905ed1fc75c72761b68cd666ec7b02cedfd8e3 not found: ID does not exist" Jan 20 17:25:06 crc kubenswrapper[4558]: I0120 17:25:06.890648 4558 scope.go:117] "RemoveContainer" containerID="467c5d921903c3ba36bed931ac826b30cbb7da5a335e570e9acb5ddbb4ad3caf" Jan 20 17:25:06 crc kubenswrapper[4558]: I0120 17:25:06.890673 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-api-config-data" Jan 20 17:25:06 crc kubenswrapper[4558]: E0120 17:25:06.892507 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"467c5d921903c3ba36bed931ac826b30cbb7da5a335e570e9acb5ddbb4ad3caf\": container with ID starting with 467c5d921903c3ba36bed931ac826b30cbb7da5a335e570e9acb5ddbb4ad3caf not found: ID does not exist" containerID="467c5d921903c3ba36bed931ac826b30cbb7da5a335e570e9acb5ddbb4ad3caf" Jan 20 17:25:06 crc kubenswrapper[4558]: I0120 17:25:06.892537 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"467c5d921903c3ba36bed931ac826b30cbb7da5a335e570e9acb5ddbb4ad3caf"} err="failed to get container status \"467c5d921903c3ba36bed931ac826b30cbb7da5a335e570e9acb5ddbb4ad3caf\": rpc error: code = NotFound desc = could not find container \"467c5d921903c3ba36bed931ac826b30cbb7da5a335e570e9acb5ddbb4ad3caf\": container with ID starting with 467c5d921903c3ba36bed931ac826b30cbb7da5a335e570e9acb5ddbb4ad3caf not found: ID does not exist" Jan 20 17:25:06 crc kubenswrapper[4558]: I0120 17:25:06.894351 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:25:06 crc kubenswrapper[4558]: I0120 17:25:06.988192 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t4zpc\" (UniqueName: \"kubernetes.io/projected/17b07837-77df-4727-a6cf-b6292274c0d0-kube-api-access-t4zpc\") pod \"nova-metadata-0\" (UID: \"17b07837-77df-4727-a6cf-b6292274c0d0\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:25:06 crc kubenswrapper[4558]: I0120 17:25:06.988263 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ef47776-0da5-453d-b79e-0a2f2e96a5ea-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"8ef47776-0da5-453d-b79e-0a2f2e96a5ea\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:25:06 crc kubenswrapper[4558]: I0120 17:25:06.988313 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/17b07837-77df-4727-a6cf-b6292274c0d0-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"17b07837-77df-4727-a6cf-b6292274c0d0\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:25:06 crc kubenswrapper[4558]: I0120 17:25:06.988357 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/17b07837-77df-4727-a6cf-b6292274c0d0-config-data\") pod \"nova-metadata-0\" (UID: \"17b07837-77df-4727-a6cf-b6292274c0d0\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:25:06 crc kubenswrapper[4558]: I0120 17:25:06.988390 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8ef47776-0da5-453d-b79e-0a2f2e96a5ea-logs\") pod \"nova-api-0\" (UID: \"8ef47776-0da5-453d-b79e-0a2f2e96a5ea\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:25:06 crc kubenswrapper[4558]: I0120 17:25:06.988458 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/17b07837-77df-4727-a6cf-b6292274c0d0-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"17b07837-77df-4727-a6cf-b6292274c0d0\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:25:06 crc kubenswrapper[4558]: I0120 17:25:06.988509 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8ef47776-0da5-453d-b79e-0a2f2e96a5ea-config-data\") pod \"nova-api-0\" (UID: \"8ef47776-0da5-453d-b79e-0a2f2e96a5ea\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:25:06 crc kubenswrapper[4558]: I0120 17:25:06.988581 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/17b07837-77df-4727-a6cf-b6292274c0d0-logs\") pod \"nova-metadata-0\" (UID: \"17b07837-77df-4727-a6cf-b6292274c0d0\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:25:06 crc kubenswrapper[4558]: I0120 17:25:06.988608 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5xbr7\" (UniqueName: \"kubernetes.io/projected/8ef47776-0da5-453d-b79e-0a2f2e96a5ea-kube-api-access-5xbr7\") pod \"nova-api-0\" (UID: \"8ef47776-0da5-453d-b79e-0a2f2e96a5ea\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:25:07 crc kubenswrapper[4558]: I0120 17:25:07.090505 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/17b07837-77df-4727-a6cf-b6292274c0d0-config-data\") pod \"nova-metadata-0\" (UID: \"17b07837-77df-4727-a6cf-b6292274c0d0\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:25:07 crc kubenswrapper[4558]: I0120 17:25:07.090560 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8ef47776-0da5-453d-b79e-0a2f2e96a5ea-logs\") pod \"nova-api-0\" (UID: \"8ef47776-0da5-453d-b79e-0a2f2e96a5ea\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:25:07 crc kubenswrapper[4558]: I0120 17:25:07.090614 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/17b07837-77df-4727-a6cf-b6292274c0d0-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"17b07837-77df-4727-a6cf-b6292274c0d0\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:25:07 crc kubenswrapper[4558]: I0120 17:25:07.090638 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8ef47776-0da5-453d-b79e-0a2f2e96a5ea-config-data\") pod \"nova-api-0\" (UID: \"8ef47776-0da5-453d-b79e-0a2f2e96a5ea\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:25:07 crc kubenswrapper[4558]: I0120 17:25:07.090672 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/17b07837-77df-4727-a6cf-b6292274c0d0-logs\") pod \"nova-metadata-0\" (UID: \"17b07837-77df-4727-a6cf-b6292274c0d0\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:25:07 crc kubenswrapper[4558]: I0120 17:25:07.090691 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5xbr7\" (UniqueName: \"kubernetes.io/projected/8ef47776-0da5-453d-b79e-0a2f2e96a5ea-kube-api-access-5xbr7\") pod \"nova-api-0\" (UID: \"8ef47776-0da5-453d-b79e-0a2f2e96a5ea\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:25:07 crc kubenswrapper[4558]: I0120 17:25:07.090769 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t4zpc\" (UniqueName: \"kubernetes.io/projected/17b07837-77df-4727-a6cf-b6292274c0d0-kube-api-access-t4zpc\") pod \"nova-metadata-0\" (UID: \"17b07837-77df-4727-a6cf-b6292274c0d0\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:25:07 crc kubenswrapper[4558]: I0120 17:25:07.090823 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ef47776-0da5-453d-b79e-0a2f2e96a5ea-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"8ef47776-0da5-453d-b79e-0a2f2e96a5ea\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:25:07 crc kubenswrapper[4558]: I0120 17:25:07.090852 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/17b07837-77df-4727-a6cf-b6292274c0d0-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"17b07837-77df-4727-a6cf-b6292274c0d0\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:25:07 crc kubenswrapper[4558]: I0120 17:25:07.092178 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8ef47776-0da5-453d-b79e-0a2f2e96a5ea-logs\") pod \"nova-api-0\" (UID: \"8ef47776-0da5-453d-b79e-0a2f2e96a5ea\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:25:07 crc kubenswrapper[4558]: I0120 17:25:07.092484 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/17b07837-77df-4727-a6cf-b6292274c0d0-logs\") pod \"nova-metadata-0\" (UID: \"17b07837-77df-4727-a6cf-b6292274c0d0\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:25:07 crc kubenswrapper[4558]: I0120 17:25:07.102602 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/17b07837-77df-4727-a6cf-b6292274c0d0-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"17b07837-77df-4727-a6cf-b6292274c0d0\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:25:07 crc kubenswrapper[4558]: I0120 17:25:07.103372 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ef47776-0da5-453d-b79e-0a2f2e96a5ea-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"8ef47776-0da5-453d-b79e-0a2f2e96a5ea\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:25:07 crc kubenswrapper[4558]: I0120 17:25:07.104462 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/17b07837-77df-4727-a6cf-b6292274c0d0-config-data\") pod \"nova-metadata-0\" (UID: \"17b07837-77df-4727-a6cf-b6292274c0d0\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:25:07 crc kubenswrapper[4558]: I0120 17:25:07.104726 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/17b07837-77df-4727-a6cf-b6292274c0d0-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"17b07837-77df-4727-a6cf-b6292274c0d0\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:25:07 crc kubenswrapper[4558]: I0120 17:25:07.105224 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8ef47776-0da5-453d-b79e-0a2f2e96a5ea-config-data\") pod \"nova-api-0\" (UID: \"8ef47776-0da5-453d-b79e-0a2f2e96a5ea\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:25:07 crc kubenswrapper[4558]: I0120 17:25:07.109516 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t4zpc\" (UniqueName: \"kubernetes.io/projected/17b07837-77df-4727-a6cf-b6292274c0d0-kube-api-access-t4zpc\") pod \"nova-metadata-0\" (UID: \"17b07837-77df-4727-a6cf-b6292274c0d0\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:25:07 crc kubenswrapper[4558]: I0120 17:25:07.113376 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5xbr7\" (UniqueName: \"kubernetes.io/projected/8ef47776-0da5-453d-b79e-0a2f2e96a5ea-kube-api-access-5xbr7\") pod \"nova-api-0\" (UID: \"8ef47776-0da5-453d-b79e-0a2f2e96a5ea\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:25:07 crc kubenswrapper[4558]: I0120 17:25:07.199413 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:25:07 crc kubenswrapper[4558]: I0120 17:25:07.210774 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:25:07 crc kubenswrapper[4558]: I0120 17:25:07.644033 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:25:07 crc kubenswrapper[4558]: W0120 17:25:07.648673 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8ef47776_0da5_453d_b79e_0a2f2e96a5ea.slice/crio-f4725010f8ff7590cdc401e88d9fed15c826d08c1fe314ea9255a98ff6b0a108 WatchSource:0}: Error finding container f4725010f8ff7590cdc401e88d9fed15c826d08c1fe314ea9255a98ff6b0a108: Status 404 returned error can't find the container with id f4725010f8ff7590cdc401e88d9fed15c826d08c1fe314ea9255a98ff6b0a108 Jan 20 17:25:07 crc kubenswrapper[4558]: W0120 17:25:07.726621 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod17b07837_77df_4727_a6cf_b6292274c0d0.slice/crio-79db95a3f173831c627351636fb428bb1da3faf30c53b5b0624bce62e06d9090 WatchSource:0}: Error finding container 79db95a3f173831c627351636fb428bb1da3faf30c53b5b0624bce62e06d9090: Status 404 returned error can't find the container with id 79db95a3f173831c627351636fb428bb1da3faf30c53b5b0624bce62e06d9090 Jan 20 17:25:07 crc kubenswrapper[4558]: I0120 17:25:07.726676 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:25:07 crc kubenswrapper[4558]: I0120 17:25:07.771140 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"17b07837-77df-4727-a6cf-b6292274c0d0","Type":"ContainerStarted","Data":"79db95a3f173831c627351636fb428bb1da3faf30c53b5b0624bce62e06d9090"} Jan 20 17:25:07 crc kubenswrapper[4558]: I0120 17:25:07.775352 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"8ef47776-0da5-453d-b79e-0a2f2e96a5ea","Type":"ContainerStarted","Data":"f4725010f8ff7590cdc401e88d9fed15c826d08c1fe314ea9255a98ff6b0a108"} Jan 20 17:25:08 crc kubenswrapper[4558]: I0120 17:25:08.087653 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:25:08 crc kubenswrapper[4558]: I0120 17:25:08.106150 4558 scope.go:117] "RemoveContainer" containerID="a827e0be6d243551517e150283f3ed864ecaf4a9a6438dd376a0e18717e47a4e" Jan 20 17:25:08 crc kubenswrapper[4558]: I0120 17:25:08.141488 4558 scope.go:117] "RemoveContainer" containerID="9b285fb9c7ca41e6c11eae827e056e3101ae0ca582d9386a36e9e2ea6091713a" Jan 20 17:25:08 crc kubenswrapper[4558]: I0120 17:25:08.162370 4558 scope.go:117] "RemoveContainer" containerID="f4436c3df66467d4db956a7e111480f4d349ef8b096e62dc047b678d6378ae35" Jan 20 17:25:08 crc kubenswrapper[4558]: I0120 17:25:08.183553 4558 scope.go:117] "RemoveContainer" containerID="3798eba8e78b98bed61f098684e8e33e9b75e12fe73a743523793c25884ee006" Jan 20 17:25:08 crc kubenswrapper[4558]: I0120 17:25:08.214034 4558 scope.go:117] "RemoveContainer" containerID="eb9de43bb9c67ceb4188a84f8dae178f1ade4dab62bbd644fa80972fe8028a59" Jan 20 17:25:08 crc kubenswrapper[4558]: I0120 17:25:08.238596 4558 scope.go:117] "RemoveContainer" containerID="606e0dbff67da472a309954e0767fc9f44fcce3c5d73c3e05d4853ba37288e26" Jan 20 17:25:08 crc kubenswrapper[4558]: I0120 17:25:08.274098 4558 scope.go:117] "RemoveContainer" containerID="b5130f999056cd5a292684679ca6cc42bd24f28a031e1d30f86981b402f386cf" Jan 20 17:25:08 crc kubenswrapper[4558]: I0120 17:25:08.292585 4558 scope.go:117] "RemoveContainer" containerID="60492583629e26c3c32820e427ab609fadb431fe51ce67c01898e07bd052f7d1" Jan 20 17:25:08 crc kubenswrapper[4558]: I0120 17:25:08.316799 4558 scope.go:117] "RemoveContainer" containerID="3d05fe873d2ff712ba8633c9dc802a7a515585115de35683f8b31b901ea5991d" Jan 20 17:25:08 crc kubenswrapper[4558]: I0120 17:25:08.340430 4558 scope.go:117] "RemoveContainer" containerID="207e164549e3e5e61bf2c71ee34097728114f9cd05bc02c75ea304dc4cbd25f8" Jan 20 17:25:08 crc kubenswrapper[4558]: I0120 17:25:08.384605 4558 scope.go:117] "RemoveContainer" containerID="e7f3fec701ac7c97a615fd06b3109321d456b072a7460d87900d9cdedcaa35e1" Jan 20 17:25:08 crc kubenswrapper[4558]: I0120 17:25:08.426638 4558 scope.go:117] "RemoveContainer" containerID="b0f6f424cc892d0b7647f8fab2e8274ac9636a2d5b6f30acee2ded1f86c9766c" Jan 20 17:25:08 crc kubenswrapper[4558]: I0120 17:25:08.451814 4558 scope.go:117] "RemoveContainer" containerID="4c46659a6361af98b90de1cf221853c404529cfbd89263f8d6864cc71db115a3" Jan 20 17:25:08 crc kubenswrapper[4558]: I0120 17:25:08.578558 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c9a69275-3884-4ed0-9f8d-f5872c8645f1" path="/var/lib/kubelet/pods/c9a69275-3884-4ed0-9f8d-f5872c8645f1/volumes" Jan 20 17:25:08 crc kubenswrapper[4558]: I0120 17:25:08.579956 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="feaaf8d6-cd55-4a27-9be8-8680bb156d10" path="/var/lib/kubelet/pods/feaaf8d6-cd55-4a27-9be8-8680bb156d10/volumes" Jan 20 17:25:08 crc kubenswrapper[4558]: I0120 17:25:08.801373 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"17b07837-77df-4727-a6cf-b6292274c0d0","Type":"ContainerStarted","Data":"7fb288c38075c2b262015368efafc54f281cd8ab4780829fd44abc51f04758b6"} Jan 20 17:25:08 crc kubenswrapper[4558]: I0120 17:25:08.801437 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"17b07837-77df-4727-a6cf-b6292274c0d0","Type":"ContainerStarted","Data":"35798b1c8df375ae98753eb74e01c62741019fc00650915228809d7db4db0d40"} Jan 20 17:25:08 crc kubenswrapper[4558]: I0120 17:25:08.803528 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"8ef47776-0da5-453d-b79e-0a2f2e96a5ea","Type":"ContainerStarted","Data":"1bfe52b1403217ff2d8d27301c496265ead6571b9febf7eaed8d9641fed21809"} Jan 20 17:25:08 crc kubenswrapper[4558]: I0120 17:25:08.803559 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"8ef47776-0da5-453d-b79e-0a2f2e96a5ea","Type":"ContainerStarted","Data":"16b34a3556681b97fbea8309d05f448893bc41363e6d18ac12c841721057ed0b"} Jan 20 17:25:08 crc kubenswrapper[4558]: I0120 17:25:08.822392 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-metadata-0" podStartSLOduration=2.822379905 podStartE2EDuration="2.822379905s" podCreationTimestamp="2026-01-20 17:25:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:25:08.821895014 +0000 UTC m=+2602.582232980" watchObservedRunningTime="2026-01-20 17:25:08.822379905 +0000 UTC m=+2602.582717872" Jan 20 17:25:08 crc kubenswrapper[4558]: I0120 17:25:08.850702 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-api-0" podStartSLOduration=2.8506819549999998 podStartE2EDuration="2.850681955s" podCreationTimestamp="2026-01-20 17:25:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:25:08.840259169 +0000 UTC m=+2602.600597136" watchObservedRunningTime="2026-01-20 17:25:08.850681955 +0000 UTC m=+2602.611019922" Jan 20 17:25:12 crc kubenswrapper[4558]: I0120 17:25:12.200659 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:25:12 crc kubenswrapper[4558]: I0120 17:25:12.200727 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:25:17 crc kubenswrapper[4558]: I0120 17:25:17.200912 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:25:17 crc kubenswrapper[4558]: I0120 17:25:17.201417 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:25:17 crc kubenswrapper[4558]: I0120 17:25:17.211761 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:25:17 crc kubenswrapper[4558]: I0120 17:25:17.211827 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:25:18 crc kubenswrapper[4558]: I0120 17:25:18.217373 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-metadata-0" podUID="17b07837-77df-4727-a6cf-b6292274c0d0" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.1.48:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 20 17:25:18 crc kubenswrapper[4558]: I0120 17:25:18.217346 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-metadata-0" podUID="17b07837-77df-4727-a6cf-b6292274c0d0" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.1.48:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 20 17:25:18 crc kubenswrapper[4558]: I0120 17:25:18.299318 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-api-0" podUID="8ef47776-0da5-453d-b79e-0a2f2e96a5ea" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.1.49:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 20 17:25:18 crc kubenswrapper[4558]: I0120 17:25:18.299371 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-api-0" podUID="8ef47776-0da5-453d-b79e-0a2f2e96a5ea" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.1.49:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 20 17:25:26 crc kubenswrapper[4558]: I0120 17:25:26.931011 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:25:26 crc kubenswrapper[4558]: I0120 17:25:26.936847 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:25:26 crc kubenswrapper[4558]: I0120 17:25:26.991224 4558 generic.go:334] "Generic (PLEG): container finished" podID="7e35b117-b17a-4a9e-a725-a2f251147dad" containerID="dd07722809c65d0d49946dd819fc2bcdb222e47a2b85409e724ff95454a5e919" exitCode=137 Jan 20 17:25:26 crc kubenswrapper[4558]: I0120 17:25:26.991354 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"7e35b117-b17a-4a9e-a725-a2f251147dad","Type":"ContainerDied","Data":"dd07722809c65d0d49946dd819fc2bcdb222e47a2b85409e724ff95454a5e919"} Jan 20 17:25:26 crc kubenswrapper[4558]: I0120 17:25:26.991403 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"7e35b117-b17a-4a9e-a725-a2f251147dad","Type":"ContainerDied","Data":"34885a27d726a5eb97a04a2a668a89468f3f09edfccf6d578bb2fb80b31a8309"} Jan 20 17:25:26 crc kubenswrapper[4558]: I0120 17:25:26.991423 4558 scope.go:117] "RemoveContainer" containerID="dd07722809c65d0d49946dd819fc2bcdb222e47a2b85409e724ff95454a5e919" Jan 20 17:25:26 crc kubenswrapper[4558]: I0120 17:25:26.991285 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:25:26 crc kubenswrapper[4558]: I0120 17:25:26.993640 4558 generic.go:334] "Generic (PLEG): container finished" podID="e01ff5c3-46ef-43d1-b1a1-d39e1a756d2f" containerID="c9b97f96803f44a1b6adf5bf2b148738540c1bff4121e36955d84ece49d2a1a1" exitCode=137 Jan 20 17:25:26 crc kubenswrapper[4558]: I0120 17:25:26.993686 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:25:26 crc kubenswrapper[4558]: I0120 17:25:26.993696 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"e01ff5c3-46ef-43d1-b1a1-d39e1a756d2f","Type":"ContainerDied","Data":"c9b97f96803f44a1b6adf5bf2b148738540c1bff4121e36955d84ece49d2a1a1"} Jan 20 17:25:26 crc kubenswrapper[4558]: I0120 17:25:26.993723 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"e01ff5c3-46ef-43d1-b1a1-d39e1a756d2f","Type":"ContainerDied","Data":"b5093525b4442b5ea491f7f56b1a2ae6d9fcc0b25848c8f3e04195c49137d9ab"} Jan 20 17:25:27 crc kubenswrapper[4558]: I0120 17:25:27.010635 4558 scope.go:117] "RemoveContainer" containerID="dd07722809c65d0d49946dd819fc2bcdb222e47a2b85409e724ff95454a5e919" Jan 20 17:25:27 crc kubenswrapper[4558]: E0120 17:25:27.010955 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dd07722809c65d0d49946dd819fc2bcdb222e47a2b85409e724ff95454a5e919\": container with ID starting with dd07722809c65d0d49946dd819fc2bcdb222e47a2b85409e724ff95454a5e919 not found: ID does not exist" containerID="dd07722809c65d0d49946dd819fc2bcdb222e47a2b85409e724ff95454a5e919" Jan 20 17:25:27 crc kubenswrapper[4558]: I0120 17:25:27.010988 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dd07722809c65d0d49946dd819fc2bcdb222e47a2b85409e724ff95454a5e919"} err="failed to get container status \"dd07722809c65d0d49946dd819fc2bcdb222e47a2b85409e724ff95454a5e919\": rpc error: code = NotFound desc = could not find container \"dd07722809c65d0d49946dd819fc2bcdb222e47a2b85409e724ff95454a5e919\": container with ID starting with dd07722809c65d0d49946dd819fc2bcdb222e47a2b85409e724ff95454a5e919 not found: ID does not exist" Jan 20 17:25:27 crc kubenswrapper[4558]: I0120 17:25:27.011008 4558 scope.go:117] "RemoveContainer" containerID="c9b97f96803f44a1b6adf5bf2b148738540c1bff4121e36955d84ece49d2a1a1" Jan 20 17:25:27 crc kubenswrapper[4558]: I0120 17:25:27.035595 4558 scope.go:117] "RemoveContainer" containerID="c9b97f96803f44a1b6adf5bf2b148738540c1bff4121e36955d84ece49d2a1a1" Jan 20 17:25:27 crc kubenswrapper[4558]: E0120 17:25:27.036097 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c9b97f96803f44a1b6adf5bf2b148738540c1bff4121e36955d84ece49d2a1a1\": container with ID starting with c9b97f96803f44a1b6adf5bf2b148738540c1bff4121e36955d84ece49d2a1a1 not found: ID does not exist" containerID="c9b97f96803f44a1b6adf5bf2b148738540c1bff4121e36955d84ece49d2a1a1" Jan 20 17:25:27 crc kubenswrapper[4558]: I0120 17:25:27.036198 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c9b97f96803f44a1b6adf5bf2b148738540c1bff4121e36955d84ece49d2a1a1"} err="failed to get container status \"c9b97f96803f44a1b6adf5bf2b148738540c1bff4121e36955d84ece49d2a1a1\": rpc error: code = NotFound desc = could not find container \"c9b97f96803f44a1b6adf5bf2b148738540c1bff4121e36955d84ece49d2a1a1\": container with ID starting with c9b97f96803f44a1b6adf5bf2b148738540c1bff4121e36955d84ece49d2a1a1 not found: ID does not exist" Jan 20 17:25:27 crc kubenswrapper[4558]: I0120 17:25:27.071305 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5rcz4\" (UniqueName: \"kubernetes.io/projected/7e35b117-b17a-4a9e-a725-a2f251147dad-kube-api-access-5rcz4\") pod \"7e35b117-b17a-4a9e-a725-a2f251147dad\" (UID: \"7e35b117-b17a-4a9e-a725-a2f251147dad\") " Jan 20 17:25:27 crc kubenswrapper[4558]: I0120 17:25:27.071637 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-64vqf\" (UniqueName: \"kubernetes.io/projected/e01ff5c3-46ef-43d1-b1a1-d39e1a756d2f-kube-api-access-64vqf\") pod \"e01ff5c3-46ef-43d1-b1a1-d39e1a756d2f\" (UID: \"e01ff5c3-46ef-43d1-b1a1-d39e1a756d2f\") " Jan 20 17:25:27 crc kubenswrapper[4558]: I0120 17:25:27.071871 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e35b117-b17a-4a9e-a725-a2f251147dad-combined-ca-bundle\") pod \"7e35b117-b17a-4a9e-a725-a2f251147dad\" (UID: \"7e35b117-b17a-4a9e-a725-a2f251147dad\") " Jan 20 17:25:27 crc kubenswrapper[4558]: I0120 17:25:27.071908 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e01ff5c3-46ef-43d1-b1a1-d39e1a756d2f-combined-ca-bundle\") pod \"e01ff5c3-46ef-43d1-b1a1-d39e1a756d2f\" (UID: \"e01ff5c3-46ef-43d1-b1a1-d39e1a756d2f\") " Jan 20 17:25:27 crc kubenswrapper[4558]: I0120 17:25:27.071946 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e01ff5c3-46ef-43d1-b1a1-d39e1a756d2f-config-data\") pod \"e01ff5c3-46ef-43d1-b1a1-d39e1a756d2f\" (UID: \"e01ff5c3-46ef-43d1-b1a1-d39e1a756d2f\") " Jan 20 17:25:27 crc kubenswrapper[4558]: I0120 17:25:27.072061 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7e35b117-b17a-4a9e-a725-a2f251147dad-config-data\") pod \"7e35b117-b17a-4a9e-a725-a2f251147dad\" (UID: \"7e35b117-b17a-4a9e-a725-a2f251147dad\") " Jan 20 17:25:27 crc kubenswrapper[4558]: I0120 17:25:27.078806 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7e35b117-b17a-4a9e-a725-a2f251147dad-kube-api-access-5rcz4" (OuterVolumeSpecName: "kube-api-access-5rcz4") pod "7e35b117-b17a-4a9e-a725-a2f251147dad" (UID: "7e35b117-b17a-4a9e-a725-a2f251147dad"). InnerVolumeSpecName "kube-api-access-5rcz4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:25:27 crc kubenswrapper[4558]: I0120 17:25:27.079811 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e01ff5c3-46ef-43d1-b1a1-d39e1a756d2f-kube-api-access-64vqf" (OuterVolumeSpecName: "kube-api-access-64vqf") pod "e01ff5c3-46ef-43d1-b1a1-d39e1a756d2f" (UID: "e01ff5c3-46ef-43d1-b1a1-d39e1a756d2f"). InnerVolumeSpecName "kube-api-access-64vqf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:25:27 crc kubenswrapper[4558]: I0120 17:25:27.099365 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e01ff5c3-46ef-43d1-b1a1-d39e1a756d2f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e01ff5c3-46ef-43d1-b1a1-d39e1a756d2f" (UID: "e01ff5c3-46ef-43d1-b1a1-d39e1a756d2f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:25:27 crc kubenswrapper[4558]: I0120 17:25:27.101106 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e01ff5c3-46ef-43d1-b1a1-d39e1a756d2f-config-data" (OuterVolumeSpecName: "config-data") pod "e01ff5c3-46ef-43d1-b1a1-d39e1a756d2f" (UID: "e01ff5c3-46ef-43d1-b1a1-d39e1a756d2f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:25:27 crc kubenswrapper[4558]: I0120 17:25:27.101806 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7e35b117-b17a-4a9e-a725-a2f251147dad-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7e35b117-b17a-4a9e-a725-a2f251147dad" (UID: "7e35b117-b17a-4a9e-a725-a2f251147dad"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:25:27 crc kubenswrapper[4558]: I0120 17:25:27.103512 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7e35b117-b17a-4a9e-a725-a2f251147dad-config-data" (OuterVolumeSpecName: "config-data") pod "7e35b117-b17a-4a9e-a725-a2f251147dad" (UID: "7e35b117-b17a-4a9e-a725-a2f251147dad"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:25:27 crc kubenswrapper[4558]: I0120 17:25:27.176316 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e01ff5c3-46ef-43d1-b1a1-d39e1a756d2f-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:25:27 crc kubenswrapper[4558]: I0120 17:25:27.176352 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7e35b117-b17a-4a9e-a725-a2f251147dad-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:25:27 crc kubenswrapper[4558]: I0120 17:25:27.176367 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5rcz4\" (UniqueName: \"kubernetes.io/projected/7e35b117-b17a-4a9e-a725-a2f251147dad-kube-api-access-5rcz4\") on node \"crc\" DevicePath \"\"" Jan 20 17:25:27 crc kubenswrapper[4558]: I0120 17:25:27.176383 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-64vqf\" (UniqueName: \"kubernetes.io/projected/e01ff5c3-46ef-43d1-b1a1-d39e1a756d2f-kube-api-access-64vqf\") on node \"crc\" DevicePath \"\"" Jan 20 17:25:27 crc kubenswrapper[4558]: I0120 17:25:27.176396 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e35b117-b17a-4a9e-a725-a2f251147dad-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:25:27 crc kubenswrapper[4558]: I0120 17:25:27.176408 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e01ff5c3-46ef-43d1-b1a1-d39e1a756d2f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:25:27 crc kubenswrapper[4558]: I0120 17:25:27.205890 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:25:27 crc kubenswrapper[4558]: I0120 17:25:27.206742 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:25:27 crc kubenswrapper[4558]: I0120 17:25:27.210652 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:25:27 crc kubenswrapper[4558]: I0120 17:25:27.216631 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:25:27 crc kubenswrapper[4558]: I0120 17:25:27.217108 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:25:27 crc kubenswrapper[4558]: I0120 17:25:27.219550 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:25:27 crc kubenswrapper[4558]: I0120 17:25:27.219826 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:25:27 crc kubenswrapper[4558]: I0120 17:25:27.330467 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:25:27 crc kubenswrapper[4558]: I0120 17:25:27.330567 4558 patch_prober.go:28] interesting pod/machine-config-daemon-2vr4r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 20 17:25:27 crc kubenswrapper[4558]: I0120 17:25:27.330610 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 20 17:25:27 crc kubenswrapper[4558]: I0120 17:25:27.334433 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:25:27 crc kubenswrapper[4558]: I0120 17:25:27.340502 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:25:27 crc kubenswrapper[4558]: I0120 17:25:27.349209 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:25:27 crc kubenswrapper[4558]: I0120 17:25:27.352605 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:25:27 crc kubenswrapper[4558]: E0120 17:25:27.353189 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e01ff5c3-46ef-43d1-b1a1-d39e1a756d2f" containerName="nova-scheduler-scheduler" Jan 20 17:25:27 crc kubenswrapper[4558]: I0120 17:25:27.353212 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="e01ff5c3-46ef-43d1-b1a1-d39e1a756d2f" containerName="nova-scheduler-scheduler" Jan 20 17:25:27 crc kubenswrapper[4558]: E0120 17:25:27.353248 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7e35b117-b17a-4a9e-a725-a2f251147dad" containerName="nova-cell1-novncproxy-novncproxy" Jan 20 17:25:27 crc kubenswrapper[4558]: I0120 17:25:27.353256 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="7e35b117-b17a-4a9e-a725-a2f251147dad" containerName="nova-cell1-novncproxy-novncproxy" Jan 20 17:25:27 crc kubenswrapper[4558]: I0120 17:25:27.353515 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="7e35b117-b17a-4a9e-a725-a2f251147dad" containerName="nova-cell1-novncproxy-novncproxy" Jan 20 17:25:27 crc kubenswrapper[4558]: I0120 17:25:27.353538 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="e01ff5c3-46ef-43d1-b1a1-d39e1a756d2f" containerName="nova-scheduler-scheduler" Jan 20 17:25:27 crc kubenswrapper[4558]: I0120 17:25:27.354374 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:25:27 crc kubenswrapper[4558]: I0120 17:25:27.357706 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-novncproxy-cell1-vencrypt" Jan 20 17:25:27 crc kubenswrapper[4558]: I0120 17:25:27.357795 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell1-novncproxy-config-data" Jan 20 17:25:27 crc kubenswrapper[4558]: I0120 17:25:27.357868 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-novncproxy-cell1-public-svc" Jan 20 17:25:27 crc kubenswrapper[4558]: I0120 17:25:27.374836 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:25:27 crc kubenswrapper[4558]: I0120 17:25:27.377792 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:25:27 crc kubenswrapper[4558]: I0120 17:25:27.383765 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-scheduler-config-data" Jan 20 17:25:27 crc kubenswrapper[4558]: I0120 17:25:27.395787 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2j8pm\" (UniqueName: \"kubernetes.io/projected/2587f606-361f-4462-bef1-e1ec4e95f012-kube-api-access-2j8pm\") pod \"nova-cell1-novncproxy-0\" (UID: \"2587f606-361f-4462-bef1-e1ec4e95f012\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:25:27 crc kubenswrapper[4558]: I0120 17:25:27.395882 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/2587f606-361f-4462-bef1-e1ec4e95f012-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"2587f606-361f-4462-bef1-e1ec4e95f012\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:25:27 crc kubenswrapper[4558]: I0120 17:25:27.395915 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2587f606-361f-4462-bef1-e1ec4e95f012-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"2587f606-361f-4462-bef1-e1ec4e95f012\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:25:27 crc kubenswrapper[4558]: I0120 17:25:27.396025 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8hrp8\" (UniqueName: \"kubernetes.io/projected/fd2955dc-03c4-4aeb-abb9-354d7ea573d8-kube-api-access-8hrp8\") pod \"nova-scheduler-0\" (UID: \"fd2955dc-03c4-4aeb-abb9-354d7ea573d8\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:25:27 crc kubenswrapper[4558]: I0120 17:25:27.396216 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2587f606-361f-4462-bef1-e1ec4e95f012-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"2587f606-361f-4462-bef1-e1ec4e95f012\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:25:27 crc kubenswrapper[4558]: I0120 17:25:27.396258 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd2955dc-03c4-4aeb-abb9-354d7ea573d8-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"fd2955dc-03c4-4aeb-abb9-354d7ea573d8\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:25:27 crc kubenswrapper[4558]: I0120 17:25:27.396288 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fd2955dc-03c4-4aeb-abb9-354d7ea573d8-config-data\") pod \"nova-scheduler-0\" (UID: \"fd2955dc-03c4-4aeb-abb9-354d7ea573d8\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:25:27 crc kubenswrapper[4558]: I0120 17:25:27.396321 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/2587f606-361f-4462-bef1-e1ec4e95f012-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"2587f606-361f-4462-bef1-e1ec4e95f012\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:25:27 crc kubenswrapper[4558]: I0120 17:25:27.414547 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:25:27 crc kubenswrapper[4558]: I0120 17:25:27.424222 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:25:27 crc kubenswrapper[4558]: I0120 17:25:27.498455 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2587f606-361f-4462-bef1-e1ec4e95f012-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"2587f606-361f-4462-bef1-e1ec4e95f012\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:25:27 crc kubenswrapper[4558]: I0120 17:25:27.498564 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd2955dc-03c4-4aeb-abb9-354d7ea573d8-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"fd2955dc-03c4-4aeb-abb9-354d7ea573d8\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:25:27 crc kubenswrapper[4558]: I0120 17:25:27.499255 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fd2955dc-03c4-4aeb-abb9-354d7ea573d8-config-data\") pod \"nova-scheduler-0\" (UID: \"fd2955dc-03c4-4aeb-abb9-354d7ea573d8\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:25:27 crc kubenswrapper[4558]: I0120 17:25:27.499402 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/2587f606-361f-4462-bef1-e1ec4e95f012-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"2587f606-361f-4462-bef1-e1ec4e95f012\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:25:27 crc kubenswrapper[4558]: I0120 17:25:27.499592 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2j8pm\" (UniqueName: \"kubernetes.io/projected/2587f606-361f-4462-bef1-e1ec4e95f012-kube-api-access-2j8pm\") pod \"nova-cell1-novncproxy-0\" (UID: \"2587f606-361f-4462-bef1-e1ec4e95f012\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:25:27 crc kubenswrapper[4558]: I0120 17:25:27.499705 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/2587f606-361f-4462-bef1-e1ec4e95f012-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"2587f606-361f-4462-bef1-e1ec4e95f012\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:25:27 crc kubenswrapper[4558]: I0120 17:25:27.499744 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2587f606-361f-4462-bef1-e1ec4e95f012-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"2587f606-361f-4462-bef1-e1ec4e95f012\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:25:27 crc kubenswrapper[4558]: I0120 17:25:27.499971 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8hrp8\" (UniqueName: \"kubernetes.io/projected/fd2955dc-03c4-4aeb-abb9-354d7ea573d8-kube-api-access-8hrp8\") pod \"nova-scheduler-0\" (UID: \"fd2955dc-03c4-4aeb-abb9-354d7ea573d8\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:25:27 crc kubenswrapper[4558]: I0120 17:25:27.504196 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/2587f606-361f-4462-bef1-e1ec4e95f012-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"2587f606-361f-4462-bef1-e1ec4e95f012\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:25:27 crc kubenswrapper[4558]: I0120 17:25:27.504454 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/2587f606-361f-4462-bef1-e1ec4e95f012-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"2587f606-361f-4462-bef1-e1ec4e95f012\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:25:27 crc kubenswrapper[4558]: I0120 17:25:27.505146 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2587f606-361f-4462-bef1-e1ec4e95f012-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"2587f606-361f-4462-bef1-e1ec4e95f012\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:25:27 crc kubenswrapper[4558]: I0120 17:25:27.505253 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fd2955dc-03c4-4aeb-abb9-354d7ea573d8-config-data\") pod \"nova-scheduler-0\" (UID: \"fd2955dc-03c4-4aeb-abb9-354d7ea573d8\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:25:27 crc kubenswrapper[4558]: I0120 17:25:27.505932 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd2955dc-03c4-4aeb-abb9-354d7ea573d8-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"fd2955dc-03c4-4aeb-abb9-354d7ea573d8\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:25:27 crc kubenswrapper[4558]: I0120 17:25:27.506371 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2587f606-361f-4462-bef1-e1ec4e95f012-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"2587f606-361f-4462-bef1-e1ec4e95f012\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:25:27 crc kubenswrapper[4558]: I0120 17:25:27.514714 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2j8pm\" (UniqueName: \"kubernetes.io/projected/2587f606-361f-4462-bef1-e1ec4e95f012-kube-api-access-2j8pm\") pod \"nova-cell1-novncproxy-0\" (UID: \"2587f606-361f-4462-bef1-e1ec4e95f012\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:25:27 crc kubenswrapper[4558]: I0120 17:25:27.515159 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8hrp8\" (UniqueName: \"kubernetes.io/projected/fd2955dc-03c4-4aeb-abb9-354d7ea573d8-kube-api-access-8hrp8\") pod \"nova-scheduler-0\" (UID: \"fd2955dc-03c4-4aeb-abb9-354d7ea573d8\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:25:27 crc kubenswrapper[4558]: I0120 17:25:27.681628 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:25:27 crc kubenswrapper[4558]: I0120 17:25:27.701147 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:25:28 crc kubenswrapper[4558]: I0120 17:25:28.010979 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:25:28 crc kubenswrapper[4558]: I0120 17:25:28.015748 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:25:28 crc kubenswrapper[4558]: I0120 17:25:28.016118 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:25:28 crc kubenswrapper[4558]: I0120 17:25:28.127929 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:25:28 crc kubenswrapper[4558]: I0120 17:25:28.186062 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:25:28 crc kubenswrapper[4558]: I0120 17:25:28.575699 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7e35b117-b17a-4a9e-a725-a2f251147dad" path="/var/lib/kubelet/pods/7e35b117-b17a-4a9e-a725-a2f251147dad/volumes" Jan 20 17:25:28 crc kubenswrapper[4558]: I0120 17:25:28.576244 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e01ff5c3-46ef-43d1-b1a1-d39e1a756d2f" path="/var/lib/kubelet/pods/e01ff5c3-46ef-43d1-b1a1-d39e1a756d2f/volumes" Jan 20 17:25:29 crc kubenswrapper[4558]: I0120 17:25:29.018689 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"2587f606-361f-4462-bef1-e1ec4e95f012","Type":"ContainerStarted","Data":"fc1cf5d826a15f0f792f4f5bcddbaf158f40dbe461e000bce68e8580a359d758"} Jan 20 17:25:29 crc kubenswrapper[4558]: I0120 17:25:29.019033 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"2587f606-361f-4462-bef1-e1ec4e95f012","Type":"ContainerStarted","Data":"e94fc71558de5951972ae0ec24b9a045519ba50197ac1c002ba634ea2588c91d"} Jan 20 17:25:29 crc kubenswrapper[4558]: I0120 17:25:29.020646 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"fd2955dc-03c4-4aeb-abb9-354d7ea573d8","Type":"ContainerStarted","Data":"b6cff0a928f37d752a1c2df4781f1ea77214201b025c53a47a6c7c72c7398f4e"} Jan 20 17:25:29 crc kubenswrapper[4558]: I0120 17:25:29.020701 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"fd2955dc-03c4-4aeb-abb9-354d7ea573d8","Type":"ContainerStarted","Data":"349ddcabc3471266fa4f8a2d50a9a8d95b144b1ecc812bfc91c45a684b0cf0c1"} Jan 20 17:25:29 crc kubenswrapper[4558]: I0120 17:25:29.046478 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" podStartSLOduration=2.046459453 podStartE2EDuration="2.046459453s" podCreationTimestamp="2026-01-20 17:25:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:25:29.037941008 +0000 UTC m=+2622.798278975" watchObservedRunningTime="2026-01-20 17:25:29.046459453 +0000 UTC m=+2622.806797420" Jan 20 17:25:29 crc kubenswrapper[4558]: I0120 17:25:29.059254 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-scheduler-0" podStartSLOduration=2.059240693 podStartE2EDuration="2.059240693s" podCreationTimestamp="2026-01-20 17:25:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:25:29.056650123 +0000 UTC m=+2622.816988090" watchObservedRunningTime="2026-01-20 17:25:29.059240693 +0000 UTC m=+2622.819578660" Jan 20 17:25:29 crc kubenswrapper[4558]: I0120 17:25:29.269813 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:25:30 crc kubenswrapper[4558]: I0120 17:25:30.297484 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:25:30 crc kubenswrapper[4558]: I0120 17:25:30.297745 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="d3a0ace5-7ba6-400c-accb-e9934117e5da" containerName="ceilometer-central-agent" containerID="cri-o://8f83799cf5630ce543cd9870bcb19a5209758941174c49ea050d72c3b9117819" gracePeriod=30 Jan 20 17:25:30 crc kubenswrapper[4558]: I0120 17:25:30.297801 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="d3a0ace5-7ba6-400c-accb-e9934117e5da" containerName="proxy-httpd" containerID="cri-o://3a8a63e1c605d09fa25d3af1eceb1336b0f46de98660562f8ed078f5456366e0" gracePeriod=30 Jan 20 17:25:30 crc kubenswrapper[4558]: I0120 17:25:30.297859 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="d3a0ace5-7ba6-400c-accb-e9934117e5da" containerName="sg-core" containerID="cri-o://5a82e36836d32566689eeb37c86b42a4bed4d9a1a58decc778ba71f69e30da77" gracePeriod=30 Jan 20 17:25:30 crc kubenswrapper[4558]: I0120 17:25:30.297903 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="d3a0ace5-7ba6-400c-accb-e9934117e5da" containerName="ceilometer-notification-agent" containerID="cri-o://3952ef2fb2d9911166bfbabed3b129a170e0cfc14d11eef5dad43318dfd2ec4a" gracePeriod=30 Jan 20 17:25:30 crc kubenswrapper[4558]: I0120 17:25:30.706558 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:25:31 crc kubenswrapper[4558]: I0120 17:25:31.059105 4558 generic.go:334] "Generic (PLEG): container finished" podID="d3a0ace5-7ba6-400c-accb-e9934117e5da" containerID="3a8a63e1c605d09fa25d3af1eceb1336b0f46de98660562f8ed078f5456366e0" exitCode=0 Jan 20 17:25:31 crc kubenswrapper[4558]: I0120 17:25:31.059134 4558 generic.go:334] "Generic (PLEG): container finished" podID="d3a0ace5-7ba6-400c-accb-e9934117e5da" containerID="5a82e36836d32566689eeb37c86b42a4bed4d9a1a58decc778ba71f69e30da77" exitCode=2 Jan 20 17:25:31 crc kubenswrapper[4558]: I0120 17:25:31.059145 4558 generic.go:334] "Generic (PLEG): container finished" podID="d3a0ace5-7ba6-400c-accb-e9934117e5da" containerID="8f83799cf5630ce543cd9870bcb19a5209758941174c49ea050d72c3b9117819" exitCode=0 Jan 20 17:25:31 crc kubenswrapper[4558]: I0120 17:25:31.059190 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"d3a0ace5-7ba6-400c-accb-e9934117e5da","Type":"ContainerDied","Data":"3a8a63e1c605d09fa25d3af1eceb1336b0f46de98660562f8ed078f5456366e0"} Jan 20 17:25:31 crc kubenswrapper[4558]: I0120 17:25:31.059239 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"d3a0ace5-7ba6-400c-accb-e9934117e5da","Type":"ContainerDied","Data":"5a82e36836d32566689eeb37c86b42a4bed4d9a1a58decc778ba71f69e30da77"} Jan 20 17:25:31 crc kubenswrapper[4558]: I0120 17:25:31.059251 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"d3a0ace5-7ba6-400c-accb-e9934117e5da","Type":"ContainerDied","Data":"8f83799cf5630ce543cd9870bcb19a5209758941174c49ea050d72c3b9117819"} Jan 20 17:25:31 crc kubenswrapper[4558]: I0120 17:25:31.059379 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-api-0" podUID="8ef47776-0da5-453d-b79e-0a2f2e96a5ea" containerName="nova-api-log" containerID="cri-o://16b34a3556681b97fbea8309d05f448893bc41363e6d18ac12c841721057ed0b" gracePeriod=30 Jan 20 17:25:31 crc kubenswrapper[4558]: I0120 17:25:31.059446 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-api-0" podUID="8ef47776-0da5-453d-b79e-0a2f2e96a5ea" containerName="nova-api-api" containerID="cri-o://1bfe52b1403217ff2d8d27301c496265ead6571b9febf7eaed8d9641fed21809" gracePeriod=30 Jan 20 17:25:31 crc kubenswrapper[4558]: I0120 17:25:31.768822 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:25:31 crc kubenswrapper[4558]: I0120 17:25:31.779064 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d3a0ace5-7ba6-400c-accb-e9934117e5da-combined-ca-bundle\") pod \"d3a0ace5-7ba6-400c-accb-e9934117e5da\" (UID: \"d3a0ace5-7ba6-400c-accb-e9934117e5da\") " Jan 20 17:25:31 crc kubenswrapper[4558]: I0120 17:25:31.779099 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d3a0ace5-7ba6-400c-accb-e9934117e5da-sg-core-conf-yaml\") pod \"d3a0ace5-7ba6-400c-accb-e9934117e5da\" (UID: \"d3a0ace5-7ba6-400c-accb-e9934117e5da\") " Jan 20 17:25:31 crc kubenswrapper[4558]: I0120 17:25:31.779210 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d3a0ace5-7ba6-400c-accb-e9934117e5da-log-httpd\") pod \"d3a0ace5-7ba6-400c-accb-e9934117e5da\" (UID: \"d3a0ace5-7ba6-400c-accb-e9934117e5da\") " Jan 20 17:25:31 crc kubenswrapper[4558]: I0120 17:25:31.779265 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d3a0ace5-7ba6-400c-accb-e9934117e5da-config-data\") pod \"d3a0ace5-7ba6-400c-accb-e9934117e5da\" (UID: \"d3a0ace5-7ba6-400c-accb-e9934117e5da\") " Jan 20 17:25:31 crc kubenswrapper[4558]: I0120 17:25:31.779410 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d3a0ace5-7ba6-400c-accb-e9934117e5da-scripts\") pod \"d3a0ace5-7ba6-400c-accb-e9934117e5da\" (UID: \"d3a0ace5-7ba6-400c-accb-e9934117e5da\") " Jan 20 17:25:31 crc kubenswrapper[4558]: I0120 17:25:31.779444 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5m4vv\" (UniqueName: \"kubernetes.io/projected/d3a0ace5-7ba6-400c-accb-e9934117e5da-kube-api-access-5m4vv\") pod \"d3a0ace5-7ba6-400c-accb-e9934117e5da\" (UID: \"d3a0ace5-7ba6-400c-accb-e9934117e5da\") " Jan 20 17:25:31 crc kubenswrapper[4558]: I0120 17:25:31.779505 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d3a0ace5-7ba6-400c-accb-e9934117e5da-run-httpd\") pod \"d3a0ace5-7ba6-400c-accb-e9934117e5da\" (UID: \"d3a0ace5-7ba6-400c-accb-e9934117e5da\") " Jan 20 17:25:31 crc kubenswrapper[4558]: I0120 17:25:31.779867 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d3a0ace5-7ba6-400c-accb-e9934117e5da-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "d3a0ace5-7ba6-400c-accb-e9934117e5da" (UID: "d3a0ace5-7ba6-400c-accb-e9934117e5da"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:25:31 crc kubenswrapper[4558]: I0120 17:25:31.779955 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d3a0ace5-7ba6-400c-accb-e9934117e5da-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "d3a0ace5-7ba6-400c-accb-e9934117e5da" (UID: "d3a0ace5-7ba6-400c-accb-e9934117e5da"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:25:31 crc kubenswrapper[4558]: I0120 17:25:31.780480 4558 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d3a0ace5-7ba6-400c-accb-e9934117e5da-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:25:31 crc kubenswrapper[4558]: I0120 17:25:31.780501 4558 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d3a0ace5-7ba6-400c-accb-e9934117e5da-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:25:31 crc kubenswrapper[4558]: I0120 17:25:31.798357 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d3a0ace5-7ba6-400c-accb-e9934117e5da-scripts" (OuterVolumeSpecName: "scripts") pod "d3a0ace5-7ba6-400c-accb-e9934117e5da" (UID: "d3a0ace5-7ba6-400c-accb-e9934117e5da"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:25:31 crc kubenswrapper[4558]: I0120 17:25:31.798398 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d3a0ace5-7ba6-400c-accb-e9934117e5da-kube-api-access-5m4vv" (OuterVolumeSpecName: "kube-api-access-5m4vv") pod "d3a0ace5-7ba6-400c-accb-e9934117e5da" (UID: "d3a0ace5-7ba6-400c-accb-e9934117e5da"). InnerVolumeSpecName "kube-api-access-5m4vv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:25:31 crc kubenswrapper[4558]: I0120 17:25:31.817104 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d3a0ace5-7ba6-400c-accb-e9934117e5da-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "d3a0ace5-7ba6-400c-accb-e9934117e5da" (UID: "d3a0ace5-7ba6-400c-accb-e9934117e5da"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:25:31 crc kubenswrapper[4558]: I0120 17:25:31.854064 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d3a0ace5-7ba6-400c-accb-e9934117e5da-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d3a0ace5-7ba6-400c-accb-e9934117e5da" (UID: "d3a0ace5-7ba6-400c-accb-e9934117e5da"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:25:31 crc kubenswrapper[4558]: I0120 17:25:31.878432 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d3a0ace5-7ba6-400c-accb-e9934117e5da-config-data" (OuterVolumeSpecName: "config-data") pod "d3a0ace5-7ba6-400c-accb-e9934117e5da" (UID: "d3a0ace5-7ba6-400c-accb-e9934117e5da"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:25:31 crc kubenswrapper[4558]: I0120 17:25:31.883277 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d3a0ace5-7ba6-400c-accb-e9934117e5da-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:25:31 crc kubenswrapper[4558]: I0120 17:25:31.883306 4558 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d3a0ace5-7ba6-400c-accb-e9934117e5da-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 20 17:25:31 crc kubenswrapper[4558]: I0120 17:25:31.883319 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d3a0ace5-7ba6-400c-accb-e9934117e5da-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:25:31 crc kubenswrapper[4558]: I0120 17:25:31.883329 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d3a0ace5-7ba6-400c-accb-e9934117e5da-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:25:31 crc kubenswrapper[4558]: I0120 17:25:31.883341 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5m4vv\" (UniqueName: \"kubernetes.io/projected/d3a0ace5-7ba6-400c-accb-e9934117e5da-kube-api-access-5m4vv\") on node \"crc\" DevicePath \"\"" Jan 20 17:25:32 crc kubenswrapper[4558]: I0120 17:25:32.074788 4558 generic.go:334] "Generic (PLEG): container finished" podID="d3a0ace5-7ba6-400c-accb-e9934117e5da" containerID="3952ef2fb2d9911166bfbabed3b129a170e0cfc14d11eef5dad43318dfd2ec4a" exitCode=0 Jan 20 17:25:32 crc kubenswrapper[4558]: I0120 17:25:32.074858 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"d3a0ace5-7ba6-400c-accb-e9934117e5da","Type":"ContainerDied","Data":"3952ef2fb2d9911166bfbabed3b129a170e0cfc14d11eef5dad43318dfd2ec4a"} Jan 20 17:25:32 crc kubenswrapper[4558]: I0120 17:25:32.074892 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"d3a0ace5-7ba6-400c-accb-e9934117e5da","Type":"ContainerDied","Data":"495e467f44dbe9a0841bbe757638899d42845ca73553e02263a2888b555f0df8"} Jan 20 17:25:32 crc kubenswrapper[4558]: I0120 17:25:32.074912 4558 scope.go:117] "RemoveContainer" containerID="3a8a63e1c605d09fa25d3af1eceb1336b0f46de98660562f8ed078f5456366e0" Jan 20 17:25:32 crc kubenswrapper[4558]: I0120 17:25:32.075047 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:25:32 crc kubenswrapper[4558]: I0120 17:25:32.085186 4558 generic.go:334] "Generic (PLEG): container finished" podID="8ef47776-0da5-453d-b79e-0a2f2e96a5ea" containerID="16b34a3556681b97fbea8309d05f448893bc41363e6d18ac12c841721057ed0b" exitCode=143 Jan 20 17:25:32 crc kubenswrapper[4558]: I0120 17:25:32.085230 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"8ef47776-0da5-453d-b79e-0a2f2e96a5ea","Type":"ContainerDied","Data":"16b34a3556681b97fbea8309d05f448893bc41363e6d18ac12c841721057ed0b"} Jan 20 17:25:32 crc kubenswrapper[4558]: I0120 17:25:32.105987 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:25:32 crc kubenswrapper[4558]: I0120 17:25:32.114916 4558 scope.go:117] "RemoveContainer" containerID="5a82e36836d32566689eeb37c86b42a4bed4d9a1a58decc778ba71f69e30da77" Jan 20 17:25:32 crc kubenswrapper[4558]: I0120 17:25:32.119843 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:25:32 crc kubenswrapper[4558]: I0120 17:25:32.138986 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:25:32 crc kubenswrapper[4558]: E0120 17:25:32.139501 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d3a0ace5-7ba6-400c-accb-e9934117e5da" containerName="ceilometer-central-agent" Jan 20 17:25:32 crc kubenswrapper[4558]: I0120 17:25:32.139518 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d3a0ace5-7ba6-400c-accb-e9934117e5da" containerName="ceilometer-central-agent" Jan 20 17:25:32 crc kubenswrapper[4558]: E0120 17:25:32.139535 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d3a0ace5-7ba6-400c-accb-e9934117e5da" containerName="proxy-httpd" Jan 20 17:25:32 crc kubenswrapper[4558]: I0120 17:25:32.139544 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d3a0ace5-7ba6-400c-accb-e9934117e5da" containerName="proxy-httpd" Jan 20 17:25:32 crc kubenswrapper[4558]: E0120 17:25:32.139564 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d3a0ace5-7ba6-400c-accb-e9934117e5da" containerName="ceilometer-notification-agent" Jan 20 17:25:32 crc kubenswrapper[4558]: I0120 17:25:32.139578 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d3a0ace5-7ba6-400c-accb-e9934117e5da" containerName="ceilometer-notification-agent" Jan 20 17:25:32 crc kubenswrapper[4558]: E0120 17:25:32.139597 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d3a0ace5-7ba6-400c-accb-e9934117e5da" containerName="sg-core" Jan 20 17:25:32 crc kubenswrapper[4558]: I0120 17:25:32.139603 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d3a0ace5-7ba6-400c-accb-e9934117e5da" containerName="sg-core" Jan 20 17:25:32 crc kubenswrapper[4558]: I0120 17:25:32.139791 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="d3a0ace5-7ba6-400c-accb-e9934117e5da" containerName="proxy-httpd" Jan 20 17:25:32 crc kubenswrapper[4558]: I0120 17:25:32.139801 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="d3a0ace5-7ba6-400c-accb-e9934117e5da" containerName="ceilometer-notification-agent" Jan 20 17:25:32 crc kubenswrapper[4558]: I0120 17:25:32.139824 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="d3a0ace5-7ba6-400c-accb-e9934117e5da" containerName="ceilometer-central-agent" Jan 20 17:25:32 crc kubenswrapper[4558]: I0120 17:25:32.139834 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="d3a0ace5-7ba6-400c-accb-e9934117e5da" containerName="sg-core" Jan 20 17:25:32 crc kubenswrapper[4558]: I0120 17:25:32.141633 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:25:32 crc kubenswrapper[4558]: I0120 17:25:32.146585 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-config-data" Jan 20 17:25:32 crc kubenswrapper[4558]: I0120 17:25:32.146661 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-scripts" Jan 20 17:25:32 crc kubenswrapper[4558]: I0120 17:25:32.146900 4558 scope.go:117] "RemoveContainer" containerID="3952ef2fb2d9911166bfbabed3b129a170e0cfc14d11eef5dad43318dfd2ec4a" Jan 20 17:25:32 crc kubenswrapper[4558]: I0120 17:25:32.151911 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:25:32 crc kubenswrapper[4558]: I0120 17:25:32.177024 4558 scope.go:117] "RemoveContainer" containerID="8f83799cf5630ce543cd9870bcb19a5209758941174c49ea050d72c3b9117819" Jan 20 17:25:32 crc kubenswrapper[4558]: I0120 17:25:32.189322 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dps67\" (UniqueName: \"kubernetes.io/projected/a735e320-0f1f-482a-aacf-4ac8dd798b47-kube-api-access-dps67\") pod \"ceilometer-0\" (UID: \"a735e320-0f1f-482a-aacf-4ac8dd798b47\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:25:32 crc kubenswrapper[4558]: I0120 17:25:32.189464 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a735e320-0f1f-482a-aacf-4ac8dd798b47-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a735e320-0f1f-482a-aacf-4ac8dd798b47\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:25:32 crc kubenswrapper[4558]: I0120 17:25:32.189500 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a735e320-0f1f-482a-aacf-4ac8dd798b47-run-httpd\") pod \"ceilometer-0\" (UID: \"a735e320-0f1f-482a-aacf-4ac8dd798b47\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:25:32 crc kubenswrapper[4558]: I0120 17:25:32.189526 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a735e320-0f1f-482a-aacf-4ac8dd798b47-scripts\") pod \"ceilometer-0\" (UID: \"a735e320-0f1f-482a-aacf-4ac8dd798b47\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:25:32 crc kubenswrapper[4558]: I0120 17:25:32.189550 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a735e320-0f1f-482a-aacf-4ac8dd798b47-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a735e320-0f1f-482a-aacf-4ac8dd798b47\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:25:32 crc kubenswrapper[4558]: I0120 17:25:32.189675 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a735e320-0f1f-482a-aacf-4ac8dd798b47-log-httpd\") pod \"ceilometer-0\" (UID: \"a735e320-0f1f-482a-aacf-4ac8dd798b47\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:25:32 crc kubenswrapper[4558]: I0120 17:25:32.189706 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a735e320-0f1f-482a-aacf-4ac8dd798b47-config-data\") pod \"ceilometer-0\" (UID: \"a735e320-0f1f-482a-aacf-4ac8dd798b47\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:25:32 crc kubenswrapper[4558]: I0120 17:25:32.206001 4558 scope.go:117] "RemoveContainer" containerID="3a8a63e1c605d09fa25d3af1eceb1336b0f46de98660562f8ed078f5456366e0" Jan 20 17:25:32 crc kubenswrapper[4558]: E0120 17:25:32.206893 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3a8a63e1c605d09fa25d3af1eceb1336b0f46de98660562f8ed078f5456366e0\": container with ID starting with 3a8a63e1c605d09fa25d3af1eceb1336b0f46de98660562f8ed078f5456366e0 not found: ID does not exist" containerID="3a8a63e1c605d09fa25d3af1eceb1336b0f46de98660562f8ed078f5456366e0" Jan 20 17:25:32 crc kubenswrapper[4558]: I0120 17:25:32.206929 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3a8a63e1c605d09fa25d3af1eceb1336b0f46de98660562f8ed078f5456366e0"} err="failed to get container status \"3a8a63e1c605d09fa25d3af1eceb1336b0f46de98660562f8ed078f5456366e0\": rpc error: code = NotFound desc = could not find container \"3a8a63e1c605d09fa25d3af1eceb1336b0f46de98660562f8ed078f5456366e0\": container with ID starting with 3a8a63e1c605d09fa25d3af1eceb1336b0f46de98660562f8ed078f5456366e0 not found: ID does not exist" Jan 20 17:25:32 crc kubenswrapper[4558]: I0120 17:25:32.207014 4558 scope.go:117] "RemoveContainer" containerID="5a82e36836d32566689eeb37c86b42a4bed4d9a1a58decc778ba71f69e30da77" Jan 20 17:25:32 crc kubenswrapper[4558]: E0120 17:25:32.208512 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5a82e36836d32566689eeb37c86b42a4bed4d9a1a58decc778ba71f69e30da77\": container with ID starting with 5a82e36836d32566689eeb37c86b42a4bed4d9a1a58decc778ba71f69e30da77 not found: ID does not exist" containerID="5a82e36836d32566689eeb37c86b42a4bed4d9a1a58decc778ba71f69e30da77" Jan 20 17:25:32 crc kubenswrapper[4558]: I0120 17:25:32.208554 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5a82e36836d32566689eeb37c86b42a4bed4d9a1a58decc778ba71f69e30da77"} err="failed to get container status \"5a82e36836d32566689eeb37c86b42a4bed4d9a1a58decc778ba71f69e30da77\": rpc error: code = NotFound desc = could not find container \"5a82e36836d32566689eeb37c86b42a4bed4d9a1a58decc778ba71f69e30da77\": container with ID starting with 5a82e36836d32566689eeb37c86b42a4bed4d9a1a58decc778ba71f69e30da77 not found: ID does not exist" Jan 20 17:25:32 crc kubenswrapper[4558]: I0120 17:25:32.208581 4558 scope.go:117] "RemoveContainer" containerID="3952ef2fb2d9911166bfbabed3b129a170e0cfc14d11eef5dad43318dfd2ec4a" Jan 20 17:25:32 crc kubenswrapper[4558]: E0120 17:25:32.209017 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3952ef2fb2d9911166bfbabed3b129a170e0cfc14d11eef5dad43318dfd2ec4a\": container with ID starting with 3952ef2fb2d9911166bfbabed3b129a170e0cfc14d11eef5dad43318dfd2ec4a not found: ID does not exist" containerID="3952ef2fb2d9911166bfbabed3b129a170e0cfc14d11eef5dad43318dfd2ec4a" Jan 20 17:25:32 crc kubenswrapper[4558]: I0120 17:25:32.209054 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3952ef2fb2d9911166bfbabed3b129a170e0cfc14d11eef5dad43318dfd2ec4a"} err="failed to get container status \"3952ef2fb2d9911166bfbabed3b129a170e0cfc14d11eef5dad43318dfd2ec4a\": rpc error: code = NotFound desc = could not find container \"3952ef2fb2d9911166bfbabed3b129a170e0cfc14d11eef5dad43318dfd2ec4a\": container with ID starting with 3952ef2fb2d9911166bfbabed3b129a170e0cfc14d11eef5dad43318dfd2ec4a not found: ID does not exist" Jan 20 17:25:32 crc kubenswrapper[4558]: I0120 17:25:32.209081 4558 scope.go:117] "RemoveContainer" containerID="8f83799cf5630ce543cd9870bcb19a5209758941174c49ea050d72c3b9117819" Jan 20 17:25:32 crc kubenswrapper[4558]: E0120 17:25:32.209390 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8f83799cf5630ce543cd9870bcb19a5209758941174c49ea050d72c3b9117819\": container with ID starting with 8f83799cf5630ce543cd9870bcb19a5209758941174c49ea050d72c3b9117819 not found: ID does not exist" containerID="8f83799cf5630ce543cd9870bcb19a5209758941174c49ea050d72c3b9117819" Jan 20 17:25:32 crc kubenswrapper[4558]: I0120 17:25:32.209417 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8f83799cf5630ce543cd9870bcb19a5209758941174c49ea050d72c3b9117819"} err="failed to get container status \"8f83799cf5630ce543cd9870bcb19a5209758941174c49ea050d72c3b9117819\": rpc error: code = NotFound desc = could not find container \"8f83799cf5630ce543cd9870bcb19a5209758941174c49ea050d72c3b9117819\": container with ID starting with 8f83799cf5630ce543cd9870bcb19a5209758941174c49ea050d72c3b9117819 not found: ID does not exist" Jan 20 17:25:32 crc kubenswrapper[4558]: I0120 17:25:32.262372 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:25:32 crc kubenswrapper[4558]: E0120 17:25:32.263460 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[combined-ca-bundle config-data kube-api-access-dps67 log-httpd run-httpd scripts sg-core-conf-yaml], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack-kuttl-tests/ceilometer-0" podUID="a735e320-0f1f-482a-aacf-4ac8dd798b47" Jan 20 17:25:32 crc kubenswrapper[4558]: I0120 17:25:32.291674 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dps67\" (UniqueName: \"kubernetes.io/projected/a735e320-0f1f-482a-aacf-4ac8dd798b47-kube-api-access-dps67\") pod \"ceilometer-0\" (UID: \"a735e320-0f1f-482a-aacf-4ac8dd798b47\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:25:32 crc kubenswrapper[4558]: I0120 17:25:32.291960 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a735e320-0f1f-482a-aacf-4ac8dd798b47-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a735e320-0f1f-482a-aacf-4ac8dd798b47\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:25:32 crc kubenswrapper[4558]: I0120 17:25:32.292087 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a735e320-0f1f-482a-aacf-4ac8dd798b47-run-httpd\") pod \"ceilometer-0\" (UID: \"a735e320-0f1f-482a-aacf-4ac8dd798b47\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:25:32 crc kubenswrapper[4558]: I0120 17:25:32.292204 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a735e320-0f1f-482a-aacf-4ac8dd798b47-scripts\") pod \"ceilometer-0\" (UID: \"a735e320-0f1f-482a-aacf-4ac8dd798b47\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:25:32 crc kubenswrapper[4558]: I0120 17:25:32.292295 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a735e320-0f1f-482a-aacf-4ac8dd798b47-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a735e320-0f1f-482a-aacf-4ac8dd798b47\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:25:32 crc kubenswrapper[4558]: I0120 17:25:32.292537 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a735e320-0f1f-482a-aacf-4ac8dd798b47-log-httpd\") pod \"ceilometer-0\" (UID: \"a735e320-0f1f-482a-aacf-4ac8dd798b47\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:25:32 crc kubenswrapper[4558]: I0120 17:25:32.292617 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a735e320-0f1f-482a-aacf-4ac8dd798b47-run-httpd\") pod \"ceilometer-0\" (UID: \"a735e320-0f1f-482a-aacf-4ac8dd798b47\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:25:32 crc kubenswrapper[4558]: I0120 17:25:32.292702 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a735e320-0f1f-482a-aacf-4ac8dd798b47-config-data\") pod \"ceilometer-0\" (UID: \"a735e320-0f1f-482a-aacf-4ac8dd798b47\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:25:32 crc kubenswrapper[4558]: I0120 17:25:32.292927 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a735e320-0f1f-482a-aacf-4ac8dd798b47-log-httpd\") pod \"ceilometer-0\" (UID: \"a735e320-0f1f-482a-aacf-4ac8dd798b47\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:25:32 crc kubenswrapper[4558]: I0120 17:25:32.296343 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a735e320-0f1f-482a-aacf-4ac8dd798b47-config-data\") pod \"ceilometer-0\" (UID: \"a735e320-0f1f-482a-aacf-4ac8dd798b47\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:25:32 crc kubenswrapper[4558]: I0120 17:25:32.296772 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a735e320-0f1f-482a-aacf-4ac8dd798b47-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a735e320-0f1f-482a-aacf-4ac8dd798b47\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:25:32 crc kubenswrapper[4558]: I0120 17:25:32.296801 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a735e320-0f1f-482a-aacf-4ac8dd798b47-scripts\") pod \"ceilometer-0\" (UID: \"a735e320-0f1f-482a-aacf-4ac8dd798b47\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:25:32 crc kubenswrapper[4558]: I0120 17:25:32.296978 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a735e320-0f1f-482a-aacf-4ac8dd798b47-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a735e320-0f1f-482a-aacf-4ac8dd798b47\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:25:32 crc kubenswrapper[4558]: I0120 17:25:32.305475 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dps67\" (UniqueName: \"kubernetes.io/projected/a735e320-0f1f-482a-aacf-4ac8dd798b47-kube-api-access-dps67\") pod \"ceilometer-0\" (UID: \"a735e320-0f1f-482a-aacf-4ac8dd798b47\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:25:32 crc kubenswrapper[4558]: I0120 17:25:32.578412 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d3a0ace5-7ba6-400c-accb-e9934117e5da" path="/var/lib/kubelet/pods/d3a0ace5-7ba6-400c-accb-e9934117e5da/volumes" Jan 20 17:25:32 crc kubenswrapper[4558]: I0120 17:25:32.682211 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:25:32 crc kubenswrapper[4558]: I0120 17:25:32.702403 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:25:33 crc kubenswrapper[4558]: I0120 17:25:33.103066 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:25:33 crc kubenswrapper[4558]: I0120 17:25:33.118569 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:25:33 crc kubenswrapper[4558]: I0120 17:25:33.212703 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a735e320-0f1f-482a-aacf-4ac8dd798b47-sg-core-conf-yaml\") pod \"a735e320-0f1f-482a-aacf-4ac8dd798b47\" (UID: \"a735e320-0f1f-482a-aacf-4ac8dd798b47\") " Jan 20 17:25:33 crc kubenswrapper[4558]: I0120 17:25:33.213189 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a735e320-0f1f-482a-aacf-4ac8dd798b47-run-httpd\") pod \"a735e320-0f1f-482a-aacf-4ac8dd798b47\" (UID: \"a735e320-0f1f-482a-aacf-4ac8dd798b47\") " Jan 20 17:25:33 crc kubenswrapper[4558]: I0120 17:25:33.213367 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a735e320-0f1f-482a-aacf-4ac8dd798b47-config-data\") pod \"a735e320-0f1f-482a-aacf-4ac8dd798b47\" (UID: \"a735e320-0f1f-482a-aacf-4ac8dd798b47\") " Jan 20 17:25:33 crc kubenswrapper[4558]: I0120 17:25:33.213503 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a735e320-0f1f-482a-aacf-4ac8dd798b47-combined-ca-bundle\") pod \"a735e320-0f1f-482a-aacf-4ac8dd798b47\" (UID: \"a735e320-0f1f-482a-aacf-4ac8dd798b47\") " Jan 20 17:25:33 crc kubenswrapper[4558]: I0120 17:25:33.213637 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a735e320-0f1f-482a-aacf-4ac8dd798b47-log-httpd\") pod \"a735e320-0f1f-482a-aacf-4ac8dd798b47\" (UID: \"a735e320-0f1f-482a-aacf-4ac8dd798b47\") " Jan 20 17:25:33 crc kubenswrapper[4558]: I0120 17:25:33.213762 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a735e320-0f1f-482a-aacf-4ac8dd798b47-scripts\") pod \"a735e320-0f1f-482a-aacf-4ac8dd798b47\" (UID: \"a735e320-0f1f-482a-aacf-4ac8dd798b47\") " Jan 20 17:25:33 crc kubenswrapper[4558]: I0120 17:25:33.213870 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dps67\" (UniqueName: \"kubernetes.io/projected/a735e320-0f1f-482a-aacf-4ac8dd798b47-kube-api-access-dps67\") pod \"a735e320-0f1f-482a-aacf-4ac8dd798b47\" (UID: \"a735e320-0f1f-482a-aacf-4ac8dd798b47\") " Jan 20 17:25:33 crc kubenswrapper[4558]: I0120 17:25:33.213657 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a735e320-0f1f-482a-aacf-4ac8dd798b47-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "a735e320-0f1f-482a-aacf-4ac8dd798b47" (UID: "a735e320-0f1f-482a-aacf-4ac8dd798b47"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:25:33 crc kubenswrapper[4558]: I0120 17:25:33.213951 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a735e320-0f1f-482a-aacf-4ac8dd798b47-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "a735e320-0f1f-482a-aacf-4ac8dd798b47" (UID: "a735e320-0f1f-482a-aacf-4ac8dd798b47"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:25:33 crc kubenswrapper[4558]: I0120 17:25:33.215046 4558 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a735e320-0f1f-482a-aacf-4ac8dd798b47-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:25:33 crc kubenswrapper[4558]: I0120 17:25:33.215126 4558 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a735e320-0f1f-482a-aacf-4ac8dd798b47-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:25:33 crc kubenswrapper[4558]: I0120 17:25:33.220129 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a735e320-0f1f-482a-aacf-4ac8dd798b47-scripts" (OuterVolumeSpecName: "scripts") pod "a735e320-0f1f-482a-aacf-4ac8dd798b47" (UID: "a735e320-0f1f-482a-aacf-4ac8dd798b47"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:25:33 crc kubenswrapper[4558]: I0120 17:25:33.220196 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a735e320-0f1f-482a-aacf-4ac8dd798b47-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "a735e320-0f1f-482a-aacf-4ac8dd798b47" (UID: "a735e320-0f1f-482a-aacf-4ac8dd798b47"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:25:33 crc kubenswrapper[4558]: I0120 17:25:33.220228 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a735e320-0f1f-482a-aacf-4ac8dd798b47-config-data" (OuterVolumeSpecName: "config-data") pod "a735e320-0f1f-482a-aacf-4ac8dd798b47" (UID: "a735e320-0f1f-482a-aacf-4ac8dd798b47"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:25:33 crc kubenswrapper[4558]: I0120 17:25:33.220269 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a735e320-0f1f-482a-aacf-4ac8dd798b47-kube-api-access-dps67" (OuterVolumeSpecName: "kube-api-access-dps67") pod "a735e320-0f1f-482a-aacf-4ac8dd798b47" (UID: "a735e320-0f1f-482a-aacf-4ac8dd798b47"). InnerVolumeSpecName "kube-api-access-dps67". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:25:33 crc kubenswrapper[4558]: I0120 17:25:33.221149 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a735e320-0f1f-482a-aacf-4ac8dd798b47-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a735e320-0f1f-482a-aacf-4ac8dd798b47" (UID: "a735e320-0f1f-482a-aacf-4ac8dd798b47"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:25:33 crc kubenswrapper[4558]: I0120 17:25:33.320981 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a735e320-0f1f-482a-aacf-4ac8dd798b47-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:25:33 crc kubenswrapper[4558]: I0120 17:25:33.328866 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a735e320-0f1f-482a-aacf-4ac8dd798b47-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:25:33 crc kubenswrapper[4558]: I0120 17:25:33.329033 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a735e320-0f1f-482a-aacf-4ac8dd798b47-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:25:33 crc kubenswrapper[4558]: I0120 17:25:33.329112 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dps67\" (UniqueName: \"kubernetes.io/projected/a735e320-0f1f-482a-aacf-4ac8dd798b47-kube-api-access-dps67\") on node \"crc\" DevicePath \"\"" Jan 20 17:25:33 crc kubenswrapper[4558]: I0120 17:25:33.329184 4558 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a735e320-0f1f-482a-aacf-4ac8dd798b47-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 20 17:25:34 crc kubenswrapper[4558]: I0120 17:25:34.112454 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:25:34 crc kubenswrapper[4558]: I0120 17:25:34.169398 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:25:34 crc kubenswrapper[4558]: I0120 17:25:34.177477 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:25:34 crc kubenswrapper[4558]: I0120 17:25:34.198923 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:25:34 crc kubenswrapper[4558]: I0120 17:25:34.201262 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:25:34 crc kubenswrapper[4558]: I0120 17:25:34.203516 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-scripts" Jan 20 17:25:34 crc kubenswrapper[4558]: I0120 17:25:34.205327 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-config-data" Jan 20 17:25:34 crc kubenswrapper[4558]: I0120 17:25:34.211001 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:25:34 crc kubenswrapper[4558]: I0120 17:25:34.247957 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a305abbb-1d88-4c6d-b2c7-fd650b2319da-log-httpd\") pod \"ceilometer-0\" (UID: \"a305abbb-1d88-4c6d-b2c7-fd650b2319da\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:25:34 crc kubenswrapper[4558]: I0120 17:25:34.248024 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a305abbb-1d88-4c6d-b2c7-fd650b2319da-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a305abbb-1d88-4c6d-b2c7-fd650b2319da\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:25:34 crc kubenswrapper[4558]: I0120 17:25:34.248056 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a305abbb-1d88-4c6d-b2c7-fd650b2319da-scripts\") pod \"ceilometer-0\" (UID: \"a305abbb-1d88-4c6d-b2c7-fd650b2319da\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:25:34 crc kubenswrapper[4558]: I0120 17:25:34.248187 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cbcss\" (UniqueName: \"kubernetes.io/projected/a305abbb-1d88-4c6d-b2c7-fd650b2319da-kube-api-access-cbcss\") pod \"ceilometer-0\" (UID: \"a305abbb-1d88-4c6d-b2c7-fd650b2319da\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:25:34 crc kubenswrapper[4558]: I0120 17:25:34.248423 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a305abbb-1d88-4c6d-b2c7-fd650b2319da-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a305abbb-1d88-4c6d-b2c7-fd650b2319da\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:25:34 crc kubenswrapper[4558]: I0120 17:25:34.248455 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a305abbb-1d88-4c6d-b2c7-fd650b2319da-config-data\") pod \"ceilometer-0\" (UID: \"a305abbb-1d88-4c6d-b2c7-fd650b2319da\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:25:34 crc kubenswrapper[4558]: I0120 17:25:34.248504 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a305abbb-1d88-4c6d-b2c7-fd650b2319da-run-httpd\") pod \"ceilometer-0\" (UID: \"a305abbb-1d88-4c6d-b2c7-fd650b2319da\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:25:34 crc kubenswrapper[4558]: I0120 17:25:34.350673 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a305abbb-1d88-4c6d-b2c7-fd650b2319da-log-httpd\") pod \"ceilometer-0\" (UID: \"a305abbb-1d88-4c6d-b2c7-fd650b2319da\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:25:34 crc kubenswrapper[4558]: I0120 17:25:34.350734 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a305abbb-1d88-4c6d-b2c7-fd650b2319da-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a305abbb-1d88-4c6d-b2c7-fd650b2319da\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:25:34 crc kubenswrapper[4558]: I0120 17:25:34.350761 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a305abbb-1d88-4c6d-b2c7-fd650b2319da-scripts\") pod \"ceilometer-0\" (UID: \"a305abbb-1d88-4c6d-b2c7-fd650b2319da\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:25:34 crc kubenswrapper[4558]: I0120 17:25:34.350822 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cbcss\" (UniqueName: \"kubernetes.io/projected/a305abbb-1d88-4c6d-b2c7-fd650b2319da-kube-api-access-cbcss\") pod \"ceilometer-0\" (UID: \"a305abbb-1d88-4c6d-b2c7-fd650b2319da\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:25:34 crc kubenswrapper[4558]: I0120 17:25:34.350925 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a305abbb-1d88-4c6d-b2c7-fd650b2319da-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a305abbb-1d88-4c6d-b2c7-fd650b2319da\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:25:34 crc kubenswrapper[4558]: I0120 17:25:34.350946 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a305abbb-1d88-4c6d-b2c7-fd650b2319da-config-data\") pod \"ceilometer-0\" (UID: \"a305abbb-1d88-4c6d-b2c7-fd650b2319da\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:25:34 crc kubenswrapper[4558]: I0120 17:25:34.350980 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a305abbb-1d88-4c6d-b2c7-fd650b2319da-run-httpd\") pod \"ceilometer-0\" (UID: \"a305abbb-1d88-4c6d-b2c7-fd650b2319da\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:25:34 crc kubenswrapper[4558]: I0120 17:25:34.351373 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a305abbb-1d88-4c6d-b2c7-fd650b2319da-log-httpd\") pod \"ceilometer-0\" (UID: \"a305abbb-1d88-4c6d-b2c7-fd650b2319da\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:25:34 crc kubenswrapper[4558]: I0120 17:25:34.351409 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a305abbb-1d88-4c6d-b2c7-fd650b2319da-run-httpd\") pod \"ceilometer-0\" (UID: \"a305abbb-1d88-4c6d-b2c7-fd650b2319da\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:25:34 crc kubenswrapper[4558]: I0120 17:25:34.355405 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a305abbb-1d88-4c6d-b2c7-fd650b2319da-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a305abbb-1d88-4c6d-b2c7-fd650b2319da\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:25:34 crc kubenswrapper[4558]: I0120 17:25:34.355930 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a305abbb-1d88-4c6d-b2c7-fd650b2319da-scripts\") pod \"ceilometer-0\" (UID: \"a305abbb-1d88-4c6d-b2c7-fd650b2319da\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:25:34 crc kubenswrapper[4558]: I0120 17:25:34.356155 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a305abbb-1d88-4c6d-b2c7-fd650b2319da-config-data\") pod \"ceilometer-0\" (UID: \"a305abbb-1d88-4c6d-b2c7-fd650b2319da\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:25:34 crc kubenswrapper[4558]: I0120 17:25:34.357824 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a305abbb-1d88-4c6d-b2c7-fd650b2319da-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a305abbb-1d88-4c6d-b2c7-fd650b2319da\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:25:34 crc kubenswrapper[4558]: I0120 17:25:34.385894 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cbcss\" (UniqueName: \"kubernetes.io/projected/a305abbb-1d88-4c6d-b2c7-fd650b2319da-kube-api-access-cbcss\") pod \"ceilometer-0\" (UID: \"a305abbb-1d88-4c6d-b2c7-fd650b2319da\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:25:34 crc kubenswrapper[4558]: I0120 17:25:34.515412 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:25:34 crc kubenswrapper[4558]: I0120 17:25:34.575089 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a735e320-0f1f-482a-aacf-4ac8dd798b47" path="/var/lib/kubelet/pods/a735e320-0f1f-482a-aacf-4ac8dd798b47/volumes" Jan 20 17:25:34 crc kubenswrapper[4558]: I0120 17:25:34.616408 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:25:34 crc kubenswrapper[4558]: I0120 17:25:34.760191 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ef47776-0da5-453d-b79e-0a2f2e96a5ea-combined-ca-bundle\") pod \"8ef47776-0da5-453d-b79e-0a2f2e96a5ea\" (UID: \"8ef47776-0da5-453d-b79e-0a2f2e96a5ea\") " Jan 20 17:25:34 crc kubenswrapper[4558]: I0120 17:25:34.760287 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5xbr7\" (UniqueName: \"kubernetes.io/projected/8ef47776-0da5-453d-b79e-0a2f2e96a5ea-kube-api-access-5xbr7\") pod \"8ef47776-0da5-453d-b79e-0a2f2e96a5ea\" (UID: \"8ef47776-0da5-453d-b79e-0a2f2e96a5ea\") " Jan 20 17:25:34 crc kubenswrapper[4558]: I0120 17:25:34.760351 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8ef47776-0da5-453d-b79e-0a2f2e96a5ea-logs\") pod \"8ef47776-0da5-453d-b79e-0a2f2e96a5ea\" (UID: \"8ef47776-0da5-453d-b79e-0a2f2e96a5ea\") " Jan 20 17:25:34 crc kubenswrapper[4558]: I0120 17:25:34.760405 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8ef47776-0da5-453d-b79e-0a2f2e96a5ea-config-data\") pod \"8ef47776-0da5-453d-b79e-0a2f2e96a5ea\" (UID: \"8ef47776-0da5-453d-b79e-0a2f2e96a5ea\") " Jan 20 17:25:34 crc kubenswrapper[4558]: I0120 17:25:34.760938 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8ef47776-0da5-453d-b79e-0a2f2e96a5ea-logs" (OuterVolumeSpecName: "logs") pod "8ef47776-0da5-453d-b79e-0a2f2e96a5ea" (UID: "8ef47776-0da5-453d-b79e-0a2f2e96a5ea"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:25:34 crc kubenswrapper[4558]: I0120 17:25:34.762550 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8ef47776-0da5-453d-b79e-0a2f2e96a5ea-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:25:34 crc kubenswrapper[4558]: I0120 17:25:34.766632 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8ef47776-0da5-453d-b79e-0a2f2e96a5ea-kube-api-access-5xbr7" (OuterVolumeSpecName: "kube-api-access-5xbr7") pod "8ef47776-0da5-453d-b79e-0a2f2e96a5ea" (UID: "8ef47776-0da5-453d-b79e-0a2f2e96a5ea"). InnerVolumeSpecName "kube-api-access-5xbr7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:25:34 crc kubenswrapper[4558]: I0120 17:25:34.794068 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8ef47776-0da5-453d-b79e-0a2f2e96a5ea-config-data" (OuterVolumeSpecName: "config-data") pod "8ef47776-0da5-453d-b79e-0a2f2e96a5ea" (UID: "8ef47776-0da5-453d-b79e-0a2f2e96a5ea"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:25:34 crc kubenswrapper[4558]: I0120 17:25:34.796963 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8ef47776-0da5-453d-b79e-0a2f2e96a5ea-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8ef47776-0da5-453d-b79e-0a2f2e96a5ea" (UID: "8ef47776-0da5-453d-b79e-0a2f2e96a5ea"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:25:34 crc kubenswrapper[4558]: I0120 17:25:34.864104 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8ef47776-0da5-453d-b79e-0a2f2e96a5ea-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:25:34 crc kubenswrapper[4558]: I0120 17:25:34.864137 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ef47776-0da5-453d-b79e-0a2f2e96a5ea-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:25:34 crc kubenswrapper[4558]: I0120 17:25:34.864152 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5xbr7\" (UniqueName: \"kubernetes.io/projected/8ef47776-0da5-453d-b79e-0a2f2e96a5ea-kube-api-access-5xbr7\") on node \"crc\" DevicePath \"\"" Jan 20 17:25:34 crc kubenswrapper[4558]: I0120 17:25:34.943986 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:25:35 crc kubenswrapper[4558]: I0120 17:25:35.126258 4558 generic.go:334] "Generic (PLEG): container finished" podID="8ef47776-0da5-453d-b79e-0a2f2e96a5ea" containerID="1bfe52b1403217ff2d8d27301c496265ead6571b9febf7eaed8d9641fed21809" exitCode=0 Jan 20 17:25:35 crc kubenswrapper[4558]: I0120 17:25:35.126396 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"8ef47776-0da5-453d-b79e-0a2f2e96a5ea","Type":"ContainerDied","Data":"1bfe52b1403217ff2d8d27301c496265ead6571b9febf7eaed8d9641fed21809"} Jan 20 17:25:35 crc kubenswrapper[4558]: I0120 17:25:35.126423 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:25:35 crc kubenswrapper[4558]: I0120 17:25:35.126469 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"8ef47776-0da5-453d-b79e-0a2f2e96a5ea","Type":"ContainerDied","Data":"f4725010f8ff7590cdc401e88d9fed15c826d08c1fe314ea9255a98ff6b0a108"} Jan 20 17:25:35 crc kubenswrapper[4558]: I0120 17:25:35.126494 4558 scope.go:117] "RemoveContainer" containerID="1bfe52b1403217ff2d8d27301c496265ead6571b9febf7eaed8d9641fed21809" Jan 20 17:25:35 crc kubenswrapper[4558]: I0120 17:25:35.130223 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"a305abbb-1d88-4c6d-b2c7-fd650b2319da","Type":"ContainerStarted","Data":"76271298e96b648d5dda7ec75c83665d0258ca208f018b1dd34d2707abd79502"} Jan 20 17:25:35 crc kubenswrapper[4558]: I0120 17:25:35.156553 4558 scope.go:117] "RemoveContainer" containerID="16b34a3556681b97fbea8309d05f448893bc41363e6d18ac12c841721057ed0b" Jan 20 17:25:35 crc kubenswrapper[4558]: I0120 17:25:35.166089 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:25:35 crc kubenswrapper[4558]: I0120 17:25:35.183053 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:25:35 crc kubenswrapper[4558]: I0120 17:25:35.187562 4558 scope.go:117] "RemoveContainer" containerID="1bfe52b1403217ff2d8d27301c496265ead6571b9febf7eaed8d9641fed21809" Jan 20 17:25:35 crc kubenswrapper[4558]: E0120 17:25:35.187985 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1bfe52b1403217ff2d8d27301c496265ead6571b9febf7eaed8d9641fed21809\": container with ID starting with 1bfe52b1403217ff2d8d27301c496265ead6571b9febf7eaed8d9641fed21809 not found: ID does not exist" containerID="1bfe52b1403217ff2d8d27301c496265ead6571b9febf7eaed8d9641fed21809" Jan 20 17:25:35 crc kubenswrapper[4558]: I0120 17:25:35.188019 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1bfe52b1403217ff2d8d27301c496265ead6571b9febf7eaed8d9641fed21809"} err="failed to get container status \"1bfe52b1403217ff2d8d27301c496265ead6571b9febf7eaed8d9641fed21809\": rpc error: code = NotFound desc = could not find container \"1bfe52b1403217ff2d8d27301c496265ead6571b9febf7eaed8d9641fed21809\": container with ID starting with 1bfe52b1403217ff2d8d27301c496265ead6571b9febf7eaed8d9641fed21809 not found: ID does not exist" Jan 20 17:25:35 crc kubenswrapper[4558]: I0120 17:25:35.188044 4558 scope.go:117] "RemoveContainer" containerID="16b34a3556681b97fbea8309d05f448893bc41363e6d18ac12c841721057ed0b" Jan 20 17:25:35 crc kubenswrapper[4558]: E0120 17:25:35.188303 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"16b34a3556681b97fbea8309d05f448893bc41363e6d18ac12c841721057ed0b\": container with ID starting with 16b34a3556681b97fbea8309d05f448893bc41363e6d18ac12c841721057ed0b not found: ID does not exist" containerID="16b34a3556681b97fbea8309d05f448893bc41363e6d18ac12c841721057ed0b" Jan 20 17:25:35 crc kubenswrapper[4558]: I0120 17:25:35.188329 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"16b34a3556681b97fbea8309d05f448893bc41363e6d18ac12c841721057ed0b"} err="failed to get container status \"16b34a3556681b97fbea8309d05f448893bc41363e6d18ac12c841721057ed0b\": rpc error: code = NotFound desc = could not find container \"16b34a3556681b97fbea8309d05f448893bc41363e6d18ac12c841721057ed0b\": container with ID starting with 16b34a3556681b97fbea8309d05f448893bc41363e6d18ac12c841721057ed0b not found: ID does not exist" Jan 20 17:25:35 crc kubenswrapper[4558]: I0120 17:25:35.194833 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:25:35 crc kubenswrapper[4558]: E0120 17:25:35.195344 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ef47776-0da5-453d-b79e-0a2f2e96a5ea" containerName="nova-api-api" Jan 20 17:25:35 crc kubenswrapper[4558]: I0120 17:25:35.195364 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ef47776-0da5-453d-b79e-0a2f2e96a5ea" containerName="nova-api-api" Jan 20 17:25:35 crc kubenswrapper[4558]: E0120 17:25:35.195386 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ef47776-0da5-453d-b79e-0a2f2e96a5ea" containerName="nova-api-log" Jan 20 17:25:35 crc kubenswrapper[4558]: I0120 17:25:35.195393 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ef47776-0da5-453d-b79e-0a2f2e96a5ea" containerName="nova-api-log" Jan 20 17:25:35 crc kubenswrapper[4558]: I0120 17:25:35.195551 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="8ef47776-0da5-453d-b79e-0a2f2e96a5ea" containerName="nova-api-log" Jan 20 17:25:35 crc kubenswrapper[4558]: I0120 17:25:35.195582 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="8ef47776-0da5-453d-b79e-0a2f2e96a5ea" containerName="nova-api-api" Jan 20 17:25:35 crc kubenswrapper[4558]: I0120 17:25:35.196639 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:25:35 crc kubenswrapper[4558]: I0120 17:25:35.198947 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-internal-svc" Jan 20 17:25:35 crc kubenswrapper[4558]: I0120 17:25:35.199057 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-public-svc" Jan 20 17:25:35 crc kubenswrapper[4558]: I0120 17:25:35.199226 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-api-config-data" Jan 20 17:25:35 crc kubenswrapper[4558]: I0120 17:25:35.208545 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:25:35 crc kubenswrapper[4558]: I0120 17:25:35.379332 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d656c1d3-89ff-4957-9513-eb75a80c1c00-internal-tls-certs\") pod \"nova-api-0\" (UID: \"d656c1d3-89ff-4957-9513-eb75a80c1c00\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:25:35 crc kubenswrapper[4558]: I0120 17:25:35.379395 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d656c1d3-89ff-4957-9513-eb75a80c1c00-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"d656c1d3-89ff-4957-9513-eb75a80c1c00\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:25:35 crc kubenswrapper[4558]: I0120 17:25:35.379504 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d656c1d3-89ff-4957-9513-eb75a80c1c00-logs\") pod \"nova-api-0\" (UID: \"d656c1d3-89ff-4957-9513-eb75a80c1c00\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:25:35 crc kubenswrapper[4558]: I0120 17:25:35.379583 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d656c1d3-89ff-4957-9513-eb75a80c1c00-public-tls-certs\") pod \"nova-api-0\" (UID: \"d656c1d3-89ff-4957-9513-eb75a80c1c00\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:25:35 crc kubenswrapper[4558]: I0120 17:25:35.379609 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d656c1d3-89ff-4957-9513-eb75a80c1c00-config-data\") pod \"nova-api-0\" (UID: \"d656c1d3-89ff-4957-9513-eb75a80c1c00\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:25:35 crc kubenswrapper[4558]: I0120 17:25:35.379752 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zmdvc\" (UniqueName: \"kubernetes.io/projected/d656c1d3-89ff-4957-9513-eb75a80c1c00-kube-api-access-zmdvc\") pod \"nova-api-0\" (UID: \"d656c1d3-89ff-4957-9513-eb75a80c1c00\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:25:35 crc kubenswrapper[4558]: I0120 17:25:35.481743 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zmdvc\" (UniqueName: \"kubernetes.io/projected/d656c1d3-89ff-4957-9513-eb75a80c1c00-kube-api-access-zmdvc\") pod \"nova-api-0\" (UID: \"d656c1d3-89ff-4957-9513-eb75a80c1c00\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:25:35 crc kubenswrapper[4558]: I0120 17:25:35.481979 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d656c1d3-89ff-4957-9513-eb75a80c1c00-internal-tls-certs\") pod \"nova-api-0\" (UID: \"d656c1d3-89ff-4957-9513-eb75a80c1c00\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:25:35 crc kubenswrapper[4558]: I0120 17:25:35.482081 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d656c1d3-89ff-4957-9513-eb75a80c1c00-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"d656c1d3-89ff-4957-9513-eb75a80c1c00\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:25:35 crc kubenswrapper[4558]: I0120 17:25:35.482127 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d656c1d3-89ff-4957-9513-eb75a80c1c00-logs\") pod \"nova-api-0\" (UID: \"d656c1d3-89ff-4957-9513-eb75a80c1c00\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:25:35 crc kubenswrapper[4558]: I0120 17:25:35.482182 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d656c1d3-89ff-4957-9513-eb75a80c1c00-public-tls-certs\") pod \"nova-api-0\" (UID: \"d656c1d3-89ff-4957-9513-eb75a80c1c00\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:25:35 crc kubenswrapper[4558]: I0120 17:25:35.482204 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d656c1d3-89ff-4957-9513-eb75a80c1c00-config-data\") pod \"nova-api-0\" (UID: \"d656c1d3-89ff-4957-9513-eb75a80c1c00\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:25:35 crc kubenswrapper[4558]: I0120 17:25:35.482654 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d656c1d3-89ff-4957-9513-eb75a80c1c00-logs\") pod \"nova-api-0\" (UID: \"d656c1d3-89ff-4957-9513-eb75a80c1c00\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:25:35 crc kubenswrapper[4558]: I0120 17:25:35.487064 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d656c1d3-89ff-4957-9513-eb75a80c1c00-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"d656c1d3-89ff-4957-9513-eb75a80c1c00\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:25:35 crc kubenswrapper[4558]: I0120 17:25:35.487702 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d656c1d3-89ff-4957-9513-eb75a80c1c00-internal-tls-certs\") pod \"nova-api-0\" (UID: \"d656c1d3-89ff-4957-9513-eb75a80c1c00\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:25:35 crc kubenswrapper[4558]: I0120 17:25:35.489389 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d656c1d3-89ff-4957-9513-eb75a80c1c00-public-tls-certs\") pod \"nova-api-0\" (UID: \"d656c1d3-89ff-4957-9513-eb75a80c1c00\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:25:35 crc kubenswrapper[4558]: I0120 17:25:35.506159 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d656c1d3-89ff-4957-9513-eb75a80c1c00-config-data\") pod \"nova-api-0\" (UID: \"d656c1d3-89ff-4957-9513-eb75a80c1c00\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:25:35 crc kubenswrapper[4558]: I0120 17:25:35.514647 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zmdvc\" (UniqueName: \"kubernetes.io/projected/d656c1d3-89ff-4957-9513-eb75a80c1c00-kube-api-access-zmdvc\") pod \"nova-api-0\" (UID: \"d656c1d3-89ff-4957-9513-eb75a80c1c00\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:25:35 crc kubenswrapper[4558]: I0120 17:25:35.810452 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:25:36 crc kubenswrapper[4558]: I0120 17:25:36.297084 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:25:36 crc kubenswrapper[4558]: W0120 17:25:36.297669 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd656c1d3_89ff_4957_9513_eb75a80c1c00.slice/crio-160cbf6b2d88c716c19e8eaf56bfbc422d1af56fa0c3032bdeea49f6486950d6 WatchSource:0}: Error finding container 160cbf6b2d88c716c19e8eaf56bfbc422d1af56fa0c3032bdeea49f6486950d6: Status 404 returned error can't find the container with id 160cbf6b2d88c716c19e8eaf56bfbc422d1af56fa0c3032bdeea49f6486950d6 Jan 20 17:25:36 crc kubenswrapper[4558]: I0120 17:25:36.583009 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8ef47776-0da5-453d-b79e-0a2f2e96a5ea" path="/var/lib/kubelet/pods/8ef47776-0da5-453d-b79e-0a2f2e96a5ea/volumes" Jan 20 17:25:37 crc kubenswrapper[4558]: I0120 17:25:37.154905 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"d656c1d3-89ff-4957-9513-eb75a80c1c00","Type":"ContainerStarted","Data":"59f9862d65c9b50494adb13cb5f61324609c64741954dfc41606a8f27a3df822"} Jan 20 17:25:37 crc kubenswrapper[4558]: I0120 17:25:37.155332 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"d656c1d3-89ff-4957-9513-eb75a80c1c00","Type":"ContainerStarted","Data":"b481ad58fac6f8abfd9b55ac32ee2ff8645a7da54d546b294df2e00aceba5427"} Jan 20 17:25:37 crc kubenswrapper[4558]: I0120 17:25:37.155346 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"d656c1d3-89ff-4957-9513-eb75a80c1c00","Type":"ContainerStarted","Data":"160cbf6b2d88c716c19e8eaf56bfbc422d1af56fa0c3032bdeea49f6486950d6"} Jan 20 17:25:37 crc kubenswrapper[4558]: I0120 17:25:37.159383 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"a305abbb-1d88-4c6d-b2c7-fd650b2319da","Type":"ContainerStarted","Data":"1a94c40b8f00dbb1c00df7d36522e0c283e8c28256ab095351cac69f47b9d73e"} Jan 20 17:25:37 crc kubenswrapper[4558]: I0120 17:25:37.183109 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-api-0" podStartSLOduration=2.183098241 podStartE2EDuration="2.183098241s" podCreationTimestamp="2026-01-20 17:25:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:25:37.178863831 +0000 UTC m=+2630.939201798" watchObservedRunningTime="2026-01-20 17:25:37.183098241 +0000 UTC m=+2630.943436209" Jan 20 17:25:37 crc kubenswrapper[4558]: I0120 17:25:37.682552 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:25:37 crc kubenswrapper[4558]: I0120 17:25:37.702294 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:25:37 crc kubenswrapper[4558]: I0120 17:25:37.703184 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:25:37 crc kubenswrapper[4558]: I0120 17:25:37.737712 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:25:38 crc kubenswrapper[4558]: I0120 17:25:38.171534 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"a305abbb-1d88-4c6d-b2c7-fd650b2319da","Type":"ContainerStarted","Data":"a934572f65febc707d4a14663a61a09ad941c67ab32d4fc8d51ae523ef2a571c"} Jan 20 17:25:38 crc kubenswrapper[4558]: I0120 17:25:38.190771 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:25:38 crc kubenswrapper[4558]: I0120 17:25:38.199412 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:25:38 crc kubenswrapper[4558]: I0120 17:25:38.357047 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell1-cell-mapping-99khx"] Jan 20 17:25:38 crc kubenswrapper[4558]: I0120 17:25:38.358354 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-cell-mapping-99khx" Jan 20 17:25:38 crc kubenswrapper[4558]: I0120 17:25:38.359885 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell1-manage-config-data" Jan 20 17:25:38 crc kubenswrapper[4558]: I0120 17:25:38.360253 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell1-manage-scripts" Jan 20 17:25:38 crc kubenswrapper[4558]: I0120 17:25:38.387181 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-cell-mapping-99khx"] Jan 20 17:25:38 crc kubenswrapper[4558]: I0120 17:25:38.555975 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ql5hh\" (UniqueName: \"kubernetes.io/projected/d697297c-4dda-4206-9a85-e393d32bc809-kube-api-access-ql5hh\") pod \"nova-cell1-cell-mapping-99khx\" (UID: \"d697297c-4dda-4206-9a85-e393d32bc809\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-99khx" Jan 20 17:25:38 crc kubenswrapper[4558]: I0120 17:25:38.556660 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d697297c-4dda-4206-9a85-e393d32bc809-scripts\") pod \"nova-cell1-cell-mapping-99khx\" (UID: \"d697297c-4dda-4206-9a85-e393d32bc809\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-99khx" Jan 20 17:25:38 crc kubenswrapper[4558]: I0120 17:25:38.557832 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d697297c-4dda-4206-9a85-e393d32bc809-config-data\") pod \"nova-cell1-cell-mapping-99khx\" (UID: \"d697297c-4dda-4206-9a85-e393d32bc809\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-99khx" Jan 20 17:25:38 crc kubenswrapper[4558]: I0120 17:25:38.558235 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d697297c-4dda-4206-9a85-e393d32bc809-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-99khx\" (UID: \"d697297c-4dda-4206-9a85-e393d32bc809\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-99khx" Jan 20 17:25:38 crc kubenswrapper[4558]: I0120 17:25:38.659849 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ql5hh\" (UniqueName: \"kubernetes.io/projected/d697297c-4dda-4206-9a85-e393d32bc809-kube-api-access-ql5hh\") pod \"nova-cell1-cell-mapping-99khx\" (UID: \"d697297c-4dda-4206-9a85-e393d32bc809\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-99khx" Jan 20 17:25:38 crc kubenswrapper[4558]: I0120 17:25:38.659921 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d697297c-4dda-4206-9a85-e393d32bc809-scripts\") pod \"nova-cell1-cell-mapping-99khx\" (UID: \"d697297c-4dda-4206-9a85-e393d32bc809\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-99khx" Jan 20 17:25:38 crc kubenswrapper[4558]: I0120 17:25:38.659988 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d697297c-4dda-4206-9a85-e393d32bc809-config-data\") pod \"nova-cell1-cell-mapping-99khx\" (UID: \"d697297c-4dda-4206-9a85-e393d32bc809\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-99khx" Jan 20 17:25:38 crc kubenswrapper[4558]: I0120 17:25:38.660059 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d697297c-4dda-4206-9a85-e393d32bc809-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-99khx\" (UID: \"d697297c-4dda-4206-9a85-e393d32bc809\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-99khx" Jan 20 17:25:38 crc kubenswrapper[4558]: I0120 17:25:38.664521 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d697297c-4dda-4206-9a85-e393d32bc809-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-99khx\" (UID: \"d697297c-4dda-4206-9a85-e393d32bc809\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-99khx" Jan 20 17:25:38 crc kubenswrapper[4558]: I0120 17:25:38.664964 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d697297c-4dda-4206-9a85-e393d32bc809-scripts\") pod \"nova-cell1-cell-mapping-99khx\" (UID: \"d697297c-4dda-4206-9a85-e393d32bc809\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-99khx" Jan 20 17:25:38 crc kubenswrapper[4558]: I0120 17:25:38.673069 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d697297c-4dda-4206-9a85-e393d32bc809-config-data\") pod \"nova-cell1-cell-mapping-99khx\" (UID: \"d697297c-4dda-4206-9a85-e393d32bc809\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-99khx" Jan 20 17:25:38 crc kubenswrapper[4558]: I0120 17:25:38.684643 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ql5hh\" (UniqueName: \"kubernetes.io/projected/d697297c-4dda-4206-9a85-e393d32bc809-kube-api-access-ql5hh\") pod \"nova-cell1-cell-mapping-99khx\" (UID: \"d697297c-4dda-4206-9a85-e393d32bc809\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-99khx" Jan 20 17:25:38 crc kubenswrapper[4558]: I0120 17:25:38.742362 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-cell-mapping-99khx" Jan 20 17:25:39 crc kubenswrapper[4558]: I0120 17:25:39.134999 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-cell-mapping-99khx"] Jan 20 17:25:39 crc kubenswrapper[4558]: I0120 17:25:39.182657 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"a305abbb-1d88-4c6d-b2c7-fd650b2319da","Type":"ContainerStarted","Data":"445646e163abbea4a88c24f58aca3cd554259abe3dc7ac102e66e976842c674a"} Jan 20 17:25:39 crc kubenswrapper[4558]: I0120 17:25:39.184719 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-cell-mapping-99khx" event={"ID":"d697297c-4dda-4206-9a85-e393d32bc809","Type":"ContainerStarted","Data":"0c213d27b94ba2f06adfcbee5cf03ae669b6b1bd3ba19194d71dd8cef4639705"} Jan 20 17:25:40 crc kubenswrapper[4558]: I0120 17:25:40.207653 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"a305abbb-1d88-4c6d-b2c7-fd650b2319da","Type":"ContainerStarted","Data":"8986b81f80be8479e0600f287270ed55401193d48d2351d656e91a1e1e011bec"} Jan 20 17:25:40 crc kubenswrapper[4558]: I0120 17:25:40.208377 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:25:40 crc kubenswrapper[4558]: I0120 17:25:40.209664 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-cell-mapping-99khx" event={"ID":"d697297c-4dda-4206-9a85-e393d32bc809","Type":"ContainerStarted","Data":"83e70e7f692408ccff2187fab95da6e437b09bcb259d31a7fe0858d27643cdca"} Jan 20 17:25:40 crc kubenswrapper[4558]: I0120 17:25:40.232488 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ceilometer-0" podStartSLOduration=1.331613387 podStartE2EDuration="6.232464896s" podCreationTimestamp="2026-01-20 17:25:34 +0000 UTC" firstStartedPulling="2026-01-20 17:25:34.949999113 +0000 UTC m=+2628.710337070" lastFinishedPulling="2026-01-20 17:25:39.850850612 +0000 UTC m=+2633.611188579" observedRunningTime="2026-01-20 17:25:40.229363396 +0000 UTC m=+2633.989701362" watchObservedRunningTime="2026-01-20 17:25:40.232464896 +0000 UTC m=+2633.992802863" Jan 20 17:25:40 crc kubenswrapper[4558]: I0120 17:25:40.252291 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell1-cell-mapping-99khx" podStartSLOduration=2.252267657 podStartE2EDuration="2.252267657s" podCreationTimestamp="2026-01-20 17:25:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:25:40.250364097 +0000 UTC m=+2634.010702065" watchObservedRunningTime="2026-01-20 17:25:40.252267657 +0000 UTC m=+2634.012605624" Jan 20 17:25:44 crc kubenswrapper[4558]: I0120 17:25:44.252194 4558 generic.go:334] "Generic (PLEG): container finished" podID="d697297c-4dda-4206-9a85-e393d32bc809" containerID="83e70e7f692408ccff2187fab95da6e437b09bcb259d31a7fe0858d27643cdca" exitCode=0 Jan 20 17:25:44 crc kubenswrapper[4558]: I0120 17:25:44.252284 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-cell-mapping-99khx" event={"ID":"d697297c-4dda-4206-9a85-e393d32bc809","Type":"ContainerDied","Data":"83e70e7f692408ccff2187fab95da6e437b09bcb259d31a7fe0858d27643cdca"} Jan 20 17:25:45 crc kubenswrapper[4558]: I0120 17:25:45.555663 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-cell-mapping-99khx" Jan 20 17:25:45 crc kubenswrapper[4558]: I0120 17:25:45.709962 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d697297c-4dda-4206-9a85-e393d32bc809-scripts\") pod \"d697297c-4dda-4206-9a85-e393d32bc809\" (UID: \"d697297c-4dda-4206-9a85-e393d32bc809\") " Jan 20 17:25:45 crc kubenswrapper[4558]: I0120 17:25:45.710012 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d697297c-4dda-4206-9a85-e393d32bc809-config-data\") pod \"d697297c-4dda-4206-9a85-e393d32bc809\" (UID: \"d697297c-4dda-4206-9a85-e393d32bc809\") " Jan 20 17:25:45 crc kubenswrapper[4558]: I0120 17:25:45.710068 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d697297c-4dda-4206-9a85-e393d32bc809-combined-ca-bundle\") pod \"d697297c-4dda-4206-9a85-e393d32bc809\" (UID: \"d697297c-4dda-4206-9a85-e393d32bc809\") " Jan 20 17:25:45 crc kubenswrapper[4558]: I0120 17:25:45.710096 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ql5hh\" (UniqueName: \"kubernetes.io/projected/d697297c-4dda-4206-9a85-e393d32bc809-kube-api-access-ql5hh\") pod \"d697297c-4dda-4206-9a85-e393d32bc809\" (UID: \"d697297c-4dda-4206-9a85-e393d32bc809\") " Jan 20 17:25:45 crc kubenswrapper[4558]: I0120 17:25:45.716981 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d697297c-4dda-4206-9a85-e393d32bc809-scripts" (OuterVolumeSpecName: "scripts") pod "d697297c-4dda-4206-9a85-e393d32bc809" (UID: "d697297c-4dda-4206-9a85-e393d32bc809"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:25:45 crc kubenswrapper[4558]: I0120 17:25:45.717299 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d697297c-4dda-4206-9a85-e393d32bc809-kube-api-access-ql5hh" (OuterVolumeSpecName: "kube-api-access-ql5hh") pod "d697297c-4dda-4206-9a85-e393d32bc809" (UID: "d697297c-4dda-4206-9a85-e393d32bc809"). InnerVolumeSpecName "kube-api-access-ql5hh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:25:45 crc kubenswrapper[4558]: I0120 17:25:45.738065 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d697297c-4dda-4206-9a85-e393d32bc809-config-data" (OuterVolumeSpecName: "config-data") pod "d697297c-4dda-4206-9a85-e393d32bc809" (UID: "d697297c-4dda-4206-9a85-e393d32bc809"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:25:45 crc kubenswrapper[4558]: I0120 17:25:45.738103 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d697297c-4dda-4206-9a85-e393d32bc809-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d697297c-4dda-4206-9a85-e393d32bc809" (UID: "d697297c-4dda-4206-9a85-e393d32bc809"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:25:45 crc kubenswrapper[4558]: I0120 17:25:45.811230 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:25:45 crc kubenswrapper[4558]: I0120 17:25:45.811294 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:25:45 crc kubenswrapper[4558]: I0120 17:25:45.813775 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d697297c-4dda-4206-9a85-e393d32bc809-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:25:45 crc kubenswrapper[4558]: I0120 17:25:45.813895 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d697297c-4dda-4206-9a85-e393d32bc809-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:25:45 crc kubenswrapper[4558]: I0120 17:25:45.813963 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d697297c-4dda-4206-9a85-e393d32bc809-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:25:45 crc kubenswrapper[4558]: I0120 17:25:45.814022 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ql5hh\" (UniqueName: \"kubernetes.io/projected/d697297c-4dda-4206-9a85-e393d32bc809-kube-api-access-ql5hh\") on node \"crc\" DevicePath \"\"" Jan 20 17:25:46 crc kubenswrapper[4558]: I0120 17:25:46.275113 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-cell-mapping-99khx" event={"ID":"d697297c-4dda-4206-9a85-e393d32bc809","Type":"ContainerDied","Data":"0c213d27b94ba2f06adfcbee5cf03ae669b6b1bd3ba19194d71dd8cef4639705"} Jan 20 17:25:46 crc kubenswrapper[4558]: I0120 17:25:46.275225 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0c213d27b94ba2f06adfcbee5cf03ae669b6b1bd3ba19194d71dd8cef4639705" Jan 20 17:25:46 crc kubenswrapper[4558]: I0120 17:25:46.275192 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-cell-mapping-99khx" Jan 20 17:25:46 crc kubenswrapper[4558]: I0120 17:25:46.455012 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:25:46 crc kubenswrapper[4558]: I0120 17:25:46.455284 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-api-0" podUID="d656c1d3-89ff-4957-9513-eb75a80c1c00" containerName="nova-api-log" containerID="cri-o://b481ad58fac6f8abfd9b55ac32ee2ff8645a7da54d546b294df2e00aceba5427" gracePeriod=30 Jan 20 17:25:46 crc kubenswrapper[4558]: I0120 17:25:46.455562 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-api-0" podUID="d656c1d3-89ff-4957-9513-eb75a80c1c00" containerName="nova-api-api" containerID="cri-o://59f9862d65c9b50494adb13cb5f61324609c64741954dfc41606a8f27a3df822" gracePeriod=30 Jan 20 17:25:46 crc kubenswrapper[4558]: I0120 17:25:46.463589 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-api-0" podUID="d656c1d3-89ff-4957-9513-eb75a80c1c00" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.1.54:8774/\": EOF" Jan 20 17:25:46 crc kubenswrapper[4558]: I0120 17:25:46.468369 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:25:46 crc kubenswrapper[4558]: I0120 17:25:46.468585 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-scheduler-0" podUID="fd2955dc-03c4-4aeb-abb9-354d7ea573d8" containerName="nova-scheduler-scheduler" containerID="cri-o://b6cff0a928f37d752a1c2df4781f1ea77214201b025c53a47a6c7c72c7398f4e" gracePeriod=30 Jan 20 17:25:46 crc kubenswrapper[4558]: I0120 17:25:46.469766 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-api-0" podUID="d656c1d3-89ff-4957-9513-eb75a80c1c00" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.1.54:8774/\": EOF" Jan 20 17:25:46 crc kubenswrapper[4558]: I0120 17:25:46.526429 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:25:46 crc kubenswrapper[4558]: I0120 17:25:46.527014 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-metadata-0" podUID="17b07837-77df-4727-a6cf-b6292274c0d0" containerName="nova-metadata-log" containerID="cri-o://35798b1c8df375ae98753eb74e01c62741019fc00650915228809d7db4db0d40" gracePeriod=30 Jan 20 17:25:46 crc kubenswrapper[4558]: I0120 17:25:46.527641 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-metadata-0" podUID="17b07837-77df-4727-a6cf-b6292274c0d0" containerName="nova-metadata-metadata" containerID="cri-o://7fb288c38075c2b262015368efafc54f281cd8ab4780829fd44abc51f04758b6" gracePeriod=30 Jan 20 17:25:47 crc kubenswrapper[4558]: I0120 17:25:47.295380 4558 generic.go:334] "Generic (PLEG): container finished" podID="17b07837-77df-4727-a6cf-b6292274c0d0" containerID="35798b1c8df375ae98753eb74e01c62741019fc00650915228809d7db4db0d40" exitCode=143 Jan 20 17:25:47 crc kubenswrapper[4558]: I0120 17:25:47.296740 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"17b07837-77df-4727-a6cf-b6292274c0d0","Type":"ContainerDied","Data":"35798b1c8df375ae98753eb74e01c62741019fc00650915228809d7db4db0d40"} Jan 20 17:25:47 crc kubenswrapper[4558]: I0120 17:25:47.299024 4558 generic.go:334] "Generic (PLEG): container finished" podID="d656c1d3-89ff-4957-9513-eb75a80c1c00" containerID="b481ad58fac6f8abfd9b55ac32ee2ff8645a7da54d546b294df2e00aceba5427" exitCode=143 Jan 20 17:25:47 crc kubenswrapper[4558]: I0120 17:25:47.299176 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"d656c1d3-89ff-4957-9513-eb75a80c1c00","Type":"ContainerDied","Data":"b481ad58fac6f8abfd9b55ac32ee2ff8645a7da54d546b294df2e00aceba5427"} Jan 20 17:25:47 crc kubenswrapper[4558]: E0120 17:25:47.704905 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="b6cff0a928f37d752a1c2df4781f1ea77214201b025c53a47a6c7c72c7398f4e" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 20 17:25:47 crc kubenswrapper[4558]: E0120 17:25:47.706434 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="b6cff0a928f37d752a1c2df4781f1ea77214201b025c53a47a6c7c72c7398f4e" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 20 17:25:47 crc kubenswrapper[4558]: E0120 17:25:47.707690 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="b6cff0a928f37d752a1c2df4781f1ea77214201b025c53a47a6c7c72c7398f4e" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 20 17:25:47 crc kubenswrapper[4558]: E0120 17:25:47.707889 4558 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack-kuttl-tests/nova-scheduler-0" podUID="fd2955dc-03c4-4aeb-abb9-354d7ea573d8" containerName="nova-scheduler-scheduler" Jan 20 17:25:49 crc kubenswrapper[4558]: I0120 17:25:49.663272 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/nova-metadata-0" podUID="17b07837-77df-4727-a6cf-b6292274c0d0" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.1.48:8775/\": read tcp 10.217.0.2:37008->10.217.1.48:8775: read: connection reset by peer" Jan 20 17:25:49 crc kubenswrapper[4558]: I0120 17:25:49.663305 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/nova-metadata-0" podUID="17b07837-77df-4727-a6cf-b6292274c0d0" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.1.48:8775/\": read tcp 10.217.0.2:36992->10.217.1.48:8775: read: connection reset by peer" Jan 20 17:25:49 crc kubenswrapper[4558]: I0120 17:25:49.922856 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:25:50 crc kubenswrapper[4558]: I0120 17:25:50.006912 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fd2955dc-03c4-4aeb-abb9-354d7ea573d8-config-data\") pod \"fd2955dc-03c4-4aeb-abb9-354d7ea573d8\" (UID: \"fd2955dc-03c4-4aeb-abb9-354d7ea573d8\") " Jan 20 17:25:50 crc kubenswrapper[4558]: I0120 17:25:50.007053 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8hrp8\" (UniqueName: \"kubernetes.io/projected/fd2955dc-03c4-4aeb-abb9-354d7ea573d8-kube-api-access-8hrp8\") pod \"fd2955dc-03c4-4aeb-abb9-354d7ea573d8\" (UID: \"fd2955dc-03c4-4aeb-abb9-354d7ea573d8\") " Jan 20 17:25:50 crc kubenswrapper[4558]: I0120 17:25:50.007092 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd2955dc-03c4-4aeb-abb9-354d7ea573d8-combined-ca-bundle\") pod \"fd2955dc-03c4-4aeb-abb9-354d7ea573d8\" (UID: \"fd2955dc-03c4-4aeb-abb9-354d7ea573d8\") " Jan 20 17:25:50 crc kubenswrapper[4558]: I0120 17:25:50.014071 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fd2955dc-03c4-4aeb-abb9-354d7ea573d8-kube-api-access-8hrp8" (OuterVolumeSpecName: "kube-api-access-8hrp8") pod "fd2955dc-03c4-4aeb-abb9-354d7ea573d8" (UID: "fd2955dc-03c4-4aeb-abb9-354d7ea573d8"). InnerVolumeSpecName "kube-api-access-8hrp8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:25:50 crc kubenswrapper[4558]: I0120 17:25:50.040031 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fd2955dc-03c4-4aeb-abb9-354d7ea573d8-config-data" (OuterVolumeSpecName: "config-data") pod "fd2955dc-03c4-4aeb-abb9-354d7ea573d8" (UID: "fd2955dc-03c4-4aeb-abb9-354d7ea573d8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:25:50 crc kubenswrapper[4558]: I0120 17:25:50.044299 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fd2955dc-03c4-4aeb-abb9-354d7ea573d8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fd2955dc-03c4-4aeb-abb9-354d7ea573d8" (UID: "fd2955dc-03c4-4aeb-abb9-354d7ea573d8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:25:50 crc kubenswrapper[4558]: I0120 17:25:50.075484 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:25:50 crc kubenswrapper[4558]: I0120 17:25:50.108227 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/17b07837-77df-4727-a6cf-b6292274c0d0-config-data\") pod \"17b07837-77df-4727-a6cf-b6292274c0d0\" (UID: \"17b07837-77df-4727-a6cf-b6292274c0d0\") " Jan 20 17:25:50 crc kubenswrapper[4558]: I0120 17:25:50.108456 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/17b07837-77df-4727-a6cf-b6292274c0d0-nova-metadata-tls-certs\") pod \"17b07837-77df-4727-a6cf-b6292274c0d0\" (UID: \"17b07837-77df-4727-a6cf-b6292274c0d0\") " Jan 20 17:25:50 crc kubenswrapper[4558]: I0120 17:25:50.108558 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t4zpc\" (UniqueName: \"kubernetes.io/projected/17b07837-77df-4727-a6cf-b6292274c0d0-kube-api-access-t4zpc\") pod \"17b07837-77df-4727-a6cf-b6292274c0d0\" (UID: \"17b07837-77df-4727-a6cf-b6292274c0d0\") " Jan 20 17:25:50 crc kubenswrapper[4558]: I0120 17:25:50.108634 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/17b07837-77df-4727-a6cf-b6292274c0d0-combined-ca-bundle\") pod \"17b07837-77df-4727-a6cf-b6292274c0d0\" (UID: \"17b07837-77df-4727-a6cf-b6292274c0d0\") " Jan 20 17:25:50 crc kubenswrapper[4558]: I0120 17:25:50.108884 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/17b07837-77df-4727-a6cf-b6292274c0d0-logs\") pod \"17b07837-77df-4727-a6cf-b6292274c0d0\" (UID: \"17b07837-77df-4727-a6cf-b6292274c0d0\") " Jan 20 17:25:50 crc kubenswrapper[4558]: I0120 17:25:50.109363 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8hrp8\" (UniqueName: \"kubernetes.io/projected/fd2955dc-03c4-4aeb-abb9-354d7ea573d8-kube-api-access-8hrp8\") on node \"crc\" DevicePath \"\"" Jan 20 17:25:50 crc kubenswrapper[4558]: I0120 17:25:50.109429 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd2955dc-03c4-4aeb-abb9-354d7ea573d8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:25:50 crc kubenswrapper[4558]: I0120 17:25:50.109495 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fd2955dc-03c4-4aeb-abb9-354d7ea573d8-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:25:50 crc kubenswrapper[4558]: I0120 17:25:50.109866 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/17b07837-77df-4727-a6cf-b6292274c0d0-logs" (OuterVolumeSpecName: "logs") pod "17b07837-77df-4727-a6cf-b6292274c0d0" (UID: "17b07837-77df-4727-a6cf-b6292274c0d0"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:25:50 crc kubenswrapper[4558]: I0120 17:25:50.112437 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/17b07837-77df-4727-a6cf-b6292274c0d0-kube-api-access-t4zpc" (OuterVolumeSpecName: "kube-api-access-t4zpc") pod "17b07837-77df-4727-a6cf-b6292274c0d0" (UID: "17b07837-77df-4727-a6cf-b6292274c0d0"). InnerVolumeSpecName "kube-api-access-t4zpc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:25:50 crc kubenswrapper[4558]: I0120 17:25:50.136329 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/17b07837-77df-4727-a6cf-b6292274c0d0-config-data" (OuterVolumeSpecName: "config-data") pod "17b07837-77df-4727-a6cf-b6292274c0d0" (UID: "17b07837-77df-4727-a6cf-b6292274c0d0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:25:50 crc kubenswrapper[4558]: I0120 17:25:50.142242 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/17b07837-77df-4727-a6cf-b6292274c0d0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "17b07837-77df-4727-a6cf-b6292274c0d0" (UID: "17b07837-77df-4727-a6cf-b6292274c0d0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:25:50 crc kubenswrapper[4558]: I0120 17:25:50.155927 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/17b07837-77df-4727-a6cf-b6292274c0d0-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "17b07837-77df-4727-a6cf-b6292274c0d0" (UID: "17b07837-77df-4727-a6cf-b6292274c0d0"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:25:50 crc kubenswrapper[4558]: I0120 17:25:50.211897 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/17b07837-77df-4727-a6cf-b6292274c0d0-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:25:50 crc kubenswrapper[4558]: I0120 17:25:50.211932 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/17b07837-77df-4727-a6cf-b6292274c0d0-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:25:50 crc kubenswrapper[4558]: I0120 17:25:50.211947 4558 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/17b07837-77df-4727-a6cf-b6292274c0d0-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:25:50 crc kubenswrapper[4558]: I0120 17:25:50.211958 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t4zpc\" (UniqueName: \"kubernetes.io/projected/17b07837-77df-4727-a6cf-b6292274c0d0-kube-api-access-t4zpc\") on node \"crc\" DevicePath \"\"" Jan 20 17:25:50 crc kubenswrapper[4558]: I0120 17:25:50.211968 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/17b07837-77df-4727-a6cf-b6292274c0d0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:25:50 crc kubenswrapper[4558]: I0120 17:25:50.336434 4558 generic.go:334] "Generic (PLEG): container finished" podID="fd2955dc-03c4-4aeb-abb9-354d7ea573d8" containerID="b6cff0a928f37d752a1c2df4781f1ea77214201b025c53a47a6c7c72c7398f4e" exitCode=0 Jan 20 17:25:50 crc kubenswrapper[4558]: I0120 17:25:50.336507 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:25:50 crc kubenswrapper[4558]: I0120 17:25:50.336516 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"fd2955dc-03c4-4aeb-abb9-354d7ea573d8","Type":"ContainerDied","Data":"b6cff0a928f37d752a1c2df4781f1ea77214201b025c53a47a6c7c72c7398f4e"} Jan 20 17:25:50 crc kubenswrapper[4558]: I0120 17:25:50.336650 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"fd2955dc-03c4-4aeb-abb9-354d7ea573d8","Type":"ContainerDied","Data":"349ddcabc3471266fa4f8a2d50a9a8d95b144b1ecc812bfc91c45a684b0cf0c1"} Jan 20 17:25:50 crc kubenswrapper[4558]: I0120 17:25:50.336683 4558 scope.go:117] "RemoveContainer" containerID="b6cff0a928f37d752a1c2df4781f1ea77214201b025c53a47a6c7c72c7398f4e" Jan 20 17:25:50 crc kubenswrapper[4558]: I0120 17:25:50.338328 4558 generic.go:334] "Generic (PLEG): container finished" podID="17b07837-77df-4727-a6cf-b6292274c0d0" containerID="7fb288c38075c2b262015368efafc54f281cd8ab4780829fd44abc51f04758b6" exitCode=0 Jan 20 17:25:50 crc kubenswrapper[4558]: I0120 17:25:50.338361 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"17b07837-77df-4727-a6cf-b6292274c0d0","Type":"ContainerDied","Data":"7fb288c38075c2b262015368efafc54f281cd8ab4780829fd44abc51f04758b6"} Jan 20 17:25:50 crc kubenswrapper[4558]: I0120 17:25:50.338386 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"17b07837-77df-4727-a6cf-b6292274c0d0","Type":"ContainerDied","Data":"79db95a3f173831c627351636fb428bb1da3faf30c53b5b0624bce62e06d9090"} Jan 20 17:25:50 crc kubenswrapper[4558]: I0120 17:25:50.338395 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:25:50 crc kubenswrapper[4558]: I0120 17:25:50.383537 4558 scope.go:117] "RemoveContainer" containerID="b6cff0a928f37d752a1c2df4781f1ea77214201b025c53a47a6c7c72c7398f4e" Jan 20 17:25:50 crc kubenswrapper[4558]: E0120 17:25:50.383949 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b6cff0a928f37d752a1c2df4781f1ea77214201b025c53a47a6c7c72c7398f4e\": container with ID starting with b6cff0a928f37d752a1c2df4781f1ea77214201b025c53a47a6c7c72c7398f4e not found: ID does not exist" containerID="b6cff0a928f37d752a1c2df4781f1ea77214201b025c53a47a6c7c72c7398f4e" Jan 20 17:25:50 crc kubenswrapper[4558]: I0120 17:25:50.383984 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b6cff0a928f37d752a1c2df4781f1ea77214201b025c53a47a6c7c72c7398f4e"} err="failed to get container status \"b6cff0a928f37d752a1c2df4781f1ea77214201b025c53a47a6c7c72c7398f4e\": rpc error: code = NotFound desc = could not find container \"b6cff0a928f37d752a1c2df4781f1ea77214201b025c53a47a6c7c72c7398f4e\": container with ID starting with b6cff0a928f37d752a1c2df4781f1ea77214201b025c53a47a6c7c72c7398f4e not found: ID does not exist" Jan 20 17:25:50 crc kubenswrapper[4558]: I0120 17:25:50.384008 4558 scope.go:117] "RemoveContainer" containerID="7fb288c38075c2b262015368efafc54f281cd8ab4780829fd44abc51f04758b6" Jan 20 17:25:50 crc kubenswrapper[4558]: I0120 17:25:50.387199 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:25:50 crc kubenswrapper[4558]: I0120 17:25:50.398476 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:25:50 crc kubenswrapper[4558]: I0120 17:25:50.406215 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:25:50 crc kubenswrapper[4558]: I0120 17:25:50.412896 4558 scope.go:117] "RemoveContainer" containerID="35798b1c8df375ae98753eb74e01c62741019fc00650915228809d7db4db0d40" Jan 20 17:25:50 crc kubenswrapper[4558]: I0120 17:25:50.422221 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:25:50 crc kubenswrapper[4558]: I0120 17:25:50.426416 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:25:50 crc kubenswrapper[4558]: E0120 17:25:50.426863 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d697297c-4dda-4206-9a85-e393d32bc809" containerName="nova-manage" Jan 20 17:25:50 crc kubenswrapper[4558]: I0120 17:25:50.426881 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d697297c-4dda-4206-9a85-e393d32bc809" containerName="nova-manage" Jan 20 17:25:50 crc kubenswrapper[4558]: E0120 17:25:50.426908 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="17b07837-77df-4727-a6cf-b6292274c0d0" containerName="nova-metadata-log" Jan 20 17:25:50 crc kubenswrapper[4558]: I0120 17:25:50.426914 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="17b07837-77df-4727-a6cf-b6292274c0d0" containerName="nova-metadata-log" Jan 20 17:25:50 crc kubenswrapper[4558]: E0120 17:25:50.426927 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fd2955dc-03c4-4aeb-abb9-354d7ea573d8" containerName="nova-scheduler-scheduler" Jan 20 17:25:50 crc kubenswrapper[4558]: I0120 17:25:50.426932 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="fd2955dc-03c4-4aeb-abb9-354d7ea573d8" containerName="nova-scheduler-scheduler" Jan 20 17:25:50 crc kubenswrapper[4558]: E0120 17:25:50.426940 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="17b07837-77df-4727-a6cf-b6292274c0d0" containerName="nova-metadata-metadata" Jan 20 17:25:50 crc kubenswrapper[4558]: I0120 17:25:50.426948 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="17b07837-77df-4727-a6cf-b6292274c0d0" containerName="nova-metadata-metadata" Jan 20 17:25:50 crc kubenswrapper[4558]: I0120 17:25:50.427099 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="17b07837-77df-4727-a6cf-b6292274c0d0" containerName="nova-metadata-log" Jan 20 17:25:50 crc kubenswrapper[4558]: I0120 17:25:50.427115 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="d697297c-4dda-4206-9a85-e393d32bc809" containerName="nova-manage" Jan 20 17:25:50 crc kubenswrapper[4558]: I0120 17:25:50.427135 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="fd2955dc-03c4-4aeb-abb9-354d7ea573d8" containerName="nova-scheduler-scheduler" Jan 20 17:25:50 crc kubenswrapper[4558]: I0120 17:25:50.427148 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="17b07837-77df-4727-a6cf-b6292274c0d0" containerName="nova-metadata-metadata" Jan 20 17:25:50 crc kubenswrapper[4558]: I0120 17:25:50.428098 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:25:50 crc kubenswrapper[4558]: I0120 17:25:50.442540 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:25:50 crc kubenswrapper[4558]: I0120 17:25:50.442561 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-metadata-internal-svc" Jan 20 17:25:50 crc kubenswrapper[4558]: I0120 17:25:50.443032 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-metadata-config-data" Jan 20 17:25:50 crc kubenswrapper[4558]: I0120 17:25:50.452278 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:25:50 crc kubenswrapper[4558]: I0120 17:25:50.453120 4558 scope.go:117] "RemoveContainer" containerID="7fb288c38075c2b262015368efafc54f281cd8ab4780829fd44abc51f04758b6" Jan 20 17:25:50 crc kubenswrapper[4558]: E0120 17:25:50.458396 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7fb288c38075c2b262015368efafc54f281cd8ab4780829fd44abc51f04758b6\": container with ID starting with 7fb288c38075c2b262015368efafc54f281cd8ab4780829fd44abc51f04758b6 not found: ID does not exist" containerID="7fb288c38075c2b262015368efafc54f281cd8ab4780829fd44abc51f04758b6" Jan 20 17:25:50 crc kubenswrapper[4558]: I0120 17:25:50.458464 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7fb288c38075c2b262015368efafc54f281cd8ab4780829fd44abc51f04758b6"} err="failed to get container status \"7fb288c38075c2b262015368efafc54f281cd8ab4780829fd44abc51f04758b6\": rpc error: code = NotFound desc = could not find container \"7fb288c38075c2b262015368efafc54f281cd8ab4780829fd44abc51f04758b6\": container with ID starting with 7fb288c38075c2b262015368efafc54f281cd8ab4780829fd44abc51f04758b6 not found: ID does not exist" Jan 20 17:25:50 crc kubenswrapper[4558]: I0120 17:25:50.458494 4558 scope.go:117] "RemoveContainer" containerID="35798b1c8df375ae98753eb74e01c62741019fc00650915228809d7db4db0d40" Jan 20 17:25:50 crc kubenswrapper[4558]: E0120 17:25:50.458797 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"35798b1c8df375ae98753eb74e01c62741019fc00650915228809d7db4db0d40\": container with ID starting with 35798b1c8df375ae98753eb74e01c62741019fc00650915228809d7db4db0d40 not found: ID does not exist" containerID="35798b1c8df375ae98753eb74e01c62741019fc00650915228809d7db4db0d40" Jan 20 17:25:50 crc kubenswrapper[4558]: I0120 17:25:50.458821 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"35798b1c8df375ae98753eb74e01c62741019fc00650915228809d7db4db0d40"} err="failed to get container status \"35798b1c8df375ae98753eb74e01c62741019fc00650915228809d7db4db0d40\": rpc error: code = NotFound desc = could not find container \"35798b1c8df375ae98753eb74e01c62741019fc00650915228809d7db4db0d40\": container with ID starting with 35798b1c8df375ae98753eb74e01c62741019fc00650915228809d7db4db0d40 not found: ID does not exist" Jan 20 17:25:50 crc kubenswrapper[4558]: I0120 17:25:50.464715 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-scheduler-config-data" Jan 20 17:25:50 crc kubenswrapper[4558]: I0120 17:25:50.477309 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:25:50 crc kubenswrapper[4558]: I0120 17:25:50.510745 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:25:50 crc kubenswrapper[4558]: I0120 17:25:50.519877 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f859dc3a-a5c9-4ddf-bf5e-0cedd762540f-config-data\") pod \"nova-scheduler-0\" (UID: \"f859dc3a-a5c9-4ddf-bf5e-0cedd762540f\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:25:50 crc kubenswrapper[4558]: I0120 17:25:50.519919 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/0370f524-0608-45c9-a4d4-efca3a6bbc4a-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"0370f524-0608-45c9-a4d4-efca3a6bbc4a\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:25:50 crc kubenswrapper[4558]: I0120 17:25:50.519949 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f859dc3a-a5c9-4ddf-bf5e-0cedd762540f-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"f859dc3a-a5c9-4ddf-bf5e-0cedd762540f\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:25:50 crc kubenswrapper[4558]: I0120 17:25:50.519974 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d8shb\" (UniqueName: \"kubernetes.io/projected/f859dc3a-a5c9-4ddf-bf5e-0cedd762540f-kube-api-access-d8shb\") pod \"nova-scheduler-0\" (UID: \"f859dc3a-a5c9-4ddf-bf5e-0cedd762540f\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:25:50 crc kubenswrapper[4558]: I0120 17:25:50.520020 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g7d7d\" (UniqueName: \"kubernetes.io/projected/0370f524-0608-45c9-a4d4-efca3a6bbc4a-kube-api-access-g7d7d\") pod \"nova-metadata-0\" (UID: \"0370f524-0608-45c9-a4d4-efca3a6bbc4a\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:25:50 crc kubenswrapper[4558]: I0120 17:25:50.520049 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0370f524-0608-45c9-a4d4-efca3a6bbc4a-config-data\") pod \"nova-metadata-0\" (UID: \"0370f524-0608-45c9-a4d4-efca3a6bbc4a\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:25:50 crc kubenswrapper[4558]: I0120 17:25:50.520078 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0370f524-0608-45c9-a4d4-efca3a6bbc4a-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"0370f524-0608-45c9-a4d4-efca3a6bbc4a\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:25:50 crc kubenswrapper[4558]: I0120 17:25:50.520097 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0370f524-0608-45c9-a4d4-efca3a6bbc4a-logs\") pod \"nova-metadata-0\" (UID: \"0370f524-0608-45c9-a4d4-efca3a6bbc4a\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:25:50 crc kubenswrapper[4558]: I0120 17:25:50.578207 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="17b07837-77df-4727-a6cf-b6292274c0d0" path="/var/lib/kubelet/pods/17b07837-77df-4727-a6cf-b6292274c0d0/volumes" Jan 20 17:25:50 crc kubenswrapper[4558]: I0120 17:25:50.578978 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fd2955dc-03c4-4aeb-abb9-354d7ea573d8" path="/var/lib/kubelet/pods/fd2955dc-03c4-4aeb-abb9-354d7ea573d8/volumes" Jan 20 17:25:50 crc kubenswrapper[4558]: I0120 17:25:50.621900 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f859dc3a-a5c9-4ddf-bf5e-0cedd762540f-config-data\") pod \"nova-scheduler-0\" (UID: \"f859dc3a-a5c9-4ddf-bf5e-0cedd762540f\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:25:50 crc kubenswrapper[4558]: I0120 17:25:50.622104 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/0370f524-0608-45c9-a4d4-efca3a6bbc4a-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"0370f524-0608-45c9-a4d4-efca3a6bbc4a\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:25:50 crc kubenswrapper[4558]: I0120 17:25:50.622238 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f859dc3a-a5c9-4ddf-bf5e-0cedd762540f-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"f859dc3a-a5c9-4ddf-bf5e-0cedd762540f\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:25:50 crc kubenswrapper[4558]: I0120 17:25:50.622333 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d8shb\" (UniqueName: \"kubernetes.io/projected/f859dc3a-a5c9-4ddf-bf5e-0cedd762540f-kube-api-access-d8shb\") pod \"nova-scheduler-0\" (UID: \"f859dc3a-a5c9-4ddf-bf5e-0cedd762540f\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:25:50 crc kubenswrapper[4558]: I0120 17:25:50.622454 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g7d7d\" (UniqueName: \"kubernetes.io/projected/0370f524-0608-45c9-a4d4-efca3a6bbc4a-kube-api-access-g7d7d\") pod \"nova-metadata-0\" (UID: \"0370f524-0608-45c9-a4d4-efca3a6bbc4a\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:25:50 crc kubenswrapper[4558]: I0120 17:25:50.622538 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0370f524-0608-45c9-a4d4-efca3a6bbc4a-config-data\") pod \"nova-metadata-0\" (UID: \"0370f524-0608-45c9-a4d4-efca3a6bbc4a\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:25:50 crc kubenswrapper[4558]: I0120 17:25:50.622623 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0370f524-0608-45c9-a4d4-efca3a6bbc4a-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"0370f524-0608-45c9-a4d4-efca3a6bbc4a\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:25:50 crc kubenswrapper[4558]: I0120 17:25:50.622682 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0370f524-0608-45c9-a4d4-efca3a6bbc4a-logs\") pod \"nova-metadata-0\" (UID: \"0370f524-0608-45c9-a4d4-efca3a6bbc4a\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:25:50 crc kubenswrapper[4558]: I0120 17:25:50.623356 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0370f524-0608-45c9-a4d4-efca3a6bbc4a-logs\") pod \"nova-metadata-0\" (UID: \"0370f524-0608-45c9-a4d4-efca3a6bbc4a\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:25:50 crc kubenswrapper[4558]: I0120 17:25:50.627712 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f859dc3a-a5c9-4ddf-bf5e-0cedd762540f-config-data\") pod \"nova-scheduler-0\" (UID: \"f859dc3a-a5c9-4ddf-bf5e-0cedd762540f\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:25:50 crc kubenswrapper[4558]: I0120 17:25:50.627725 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/0370f524-0608-45c9-a4d4-efca3a6bbc4a-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"0370f524-0608-45c9-a4d4-efca3a6bbc4a\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:25:50 crc kubenswrapper[4558]: I0120 17:25:50.628110 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f859dc3a-a5c9-4ddf-bf5e-0cedd762540f-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"f859dc3a-a5c9-4ddf-bf5e-0cedd762540f\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:25:50 crc kubenswrapper[4558]: I0120 17:25:50.628568 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0370f524-0608-45c9-a4d4-efca3a6bbc4a-config-data\") pod \"nova-metadata-0\" (UID: \"0370f524-0608-45c9-a4d4-efca3a6bbc4a\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:25:50 crc kubenswrapper[4558]: I0120 17:25:50.638706 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0370f524-0608-45c9-a4d4-efca3a6bbc4a-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"0370f524-0608-45c9-a4d4-efca3a6bbc4a\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:25:50 crc kubenswrapper[4558]: I0120 17:25:50.641224 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g7d7d\" (UniqueName: \"kubernetes.io/projected/0370f524-0608-45c9-a4d4-efca3a6bbc4a-kube-api-access-g7d7d\") pod \"nova-metadata-0\" (UID: \"0370f524-0608-45c9-a4d4-efca3a6bbc4a\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:25:50 crc kubenswrapper[4558]: I0120 17:25:50.641652 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d8shb\" (UniqueName: \"kubernetes.io/projected/f859dc3a-a5c9-4ddf-bf5e-0cedd762540f-kube-api-access-d8shb\") pod \"nova-scheduler-0\" (UID: \"f859dc3a-a5c9-4ddf-bf5e-0cedd762540f\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:25:50 crc kubenswrapper[4558]: I0120 17:25:50.759464 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:25:50 crc kubenswrapper[4558]: I0120 17:25:50.768582 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:25:51 crc kubenswrapper[4558]: I0120 17:25:51.190958 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:25:51 crc kubenswrapper[4558]: I0120 17:25:51.251969 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:25:51 crc kubenswrapper[4558]: I0120 17:25:51.354875 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"0370f524-0608-45c9-a4d4-efca3a6bbc4a","Type":"ContainerStarted","Data":"3bd82063429d21886e43702ff2750e30a9368e89bfcdb3e5dfdc7631a7eabf51"} Jan 20 17:25:51 crc kubenswrapper[4558]: I0120 17:25:51.354944 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"0370f524-0608-45c9-a4d4-efca3a6bbc4a","Type":"ContainerStarted","Data":"b1bceb775b9dd183fe674070820367a0fa5f61e96ce3468ee4f2fb8dbcd0d756"} Jan 20 17:25:51 crc kubenswrapper[4558]: I0120 17:25:51.357025 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"f859dc3a-a5c9-4ddf-bf5e-0cedd762540f","Type":"ContainerStarted","Data":"a4a380e21fdb868a97136a44549eb33353441f09b4ed3dcd9f06928932312fb5"} Jan 20 17:25:52 crc kubenswrapper[4558]: I0120 17:25:52.213639 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:25:52 crc kubenswrapper[4558]: I0120 17:25:52.259271 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zmdvc\" (UniqueName: \"kubernetes.io/projected/d656c1d3-89ff-4957-9513-eb75a80c1c00-kube-api-access-zmdvc\") pod \"d656c1d3-89ff-4957-9513-eb75a80c1c00\" (UID: \"d656c1d3-89ff-4957-9513-eb75a80c1c00\") " Jan 20 17:25:52 crc kubenswrapper[4558]: I0120 17:25:52.259336 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d656c1d3-89ff-4957-9513-eb75a80c1c00-combined-ca-bundle\") pod \"d656c1d3-89ff-4957-9513-eb75a80c1c00\" (UID: \"d656c1d3-89ff-4957-9513-eb75a80c1c00\") " Jan 20 17:25:52 crc kubenswrapper[4558]: I0120 17:25:52.259369 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d656c1d3-89ff-4957-9513-eb75a80c1c00-logs\") pod \"d656c1d3-89ff-4957-9513-eb75a80c1c00\" (UID: \"d656c1d3-89ff-4957-9513-eb75a80c1c00\") " Jan 20 17:25:52 crc kubenswrapper[4558]: I0120 17:25:52.259439 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d656c1d3-89ff-4957-9513-eb75a80c1c00-public-tls-certs\") pod \"d656c1d3-89ff-4957-9513-eb75a80c1c00\" (UID: \"d656c1d3-89ff-4957-9513-eb75a80c1c00\") " Jan 20 17:25:52 crc kubenswrapper[4558]: I0120 17:25:52.259490 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d656c1d3-89ff-4957-9513-eb75a80c1c00-internal-tls-certs\") pod \"d656c1d3-89ff-4957-9513-eb75a80c1c00\" (UID: \"d656c1d3-89ff-4957-9513-eb75a80c1c00\") " Jan 20 17:25:52 crc kubenswrapper[4558]: I0120 17:25:52.259652 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d656c1d3-89ff-4957-9513-eb75a80c1c00-config-data\") pod \"d656c1d3-89ff-4957-9513-eb75a80c1c00\" (UID: \"d656c1d3-89ff-4957-9513-eb75a80c1c00\") " Jan 20 17:25:52 crc kubenswrapper[4558]: I0120 17:25:52.259969 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d656c1d3-89ff-4957-9513-eb75a80c1c00-logs" (OuterVolumeSpecName: "logs") pod "d656c1d3-89ff-4957-9513-eb75a80c1c00" (UID: "d656c1d3-89ff-4957-9513-eb75a80c1c00"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:25:52 crc kubenswrapper[4558]: I0120 17:25:52.260646 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d656c1d3-89ff-4957-9513-eb75a80c1c00-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:25:52 crc kubenswrapper[4558]: I0120 17:25:52.267348 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d656c1d3-89ff-4957-9513-eb75a80c1c00-kube-api-access-zmdvc" (OuterVolumeSpecName: "kube-api-access-zmdvc") pod "d656c1d3-89ff-4957-9513-eb75a80c1c00" (UID: "d656c1d3-89ff-4957-9513-eb75a80c1c00"). InnerVolumeSpecName "kube-api-access-zmdvc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:25:52 crc kubenswrapper[4558]: I0120 17:25:52.291474 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d656c1d3-89ff-4957-9513-eb75a80c1c00-config-data" (OuterVolumeSpecName: "config-data") pod "d656c1d3-89ff-4957-9513-eb75a80c1c00" (UID: "d656c1d3-89ff-4957-9513-eb75a80c1c00"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:25:52 crc kubenswrapper[4558]: I0120 17:25:52.299323 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d656c1d3-89ff-4957-9513-eb75a80c1c00-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d656c1d3-89ff-4957-9513-eb75a80c1c00" (UID: "d656c1d3-89ff-4957-9513-eb75a80c1c00"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:25:52 crc kubenswrapper[4558]: I0120 17:25:52.304379 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d656c1d3-89ff-4957-9513-eb75a80c1c00-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "d656c1d3-89ff-4957-9513-eb75a80c1c00" (UID: "d656c1d3-89ff-4957-9513-eb75a80c1c00"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:25:52 crc kubenswrapper[4558]: I0120 17:25:52.306125 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d656c1d3-89ff-4957-9513-eb75a80c1c00-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "d656c1d3-89ff-4957-9513-eb75a80c1c00" (UID: "d656c1d3-89ff-4957-9513-eb75a80c1c00"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:25:52 crc kubenswrapper[4558]: I0120 17:25:52.362223 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d656c1d3-89ff-4957-9513-eb75a80c1c00-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:25:52 crc kubenswrapper[4558]: I0120 17:25:52.362263 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d656c1d3-89ff-4957-9513-eb75a80c1c00-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:25:52 crc kubenswrapper[4558]: I0120 17:25:52.362276 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d656c1d3-89ff-4957-9513-eb75a80c1c00-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:25:52 crc kubenswrapper[4558]: I0120 17:25:52.362290 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zmdvc\" (UniqueName: \"kubernetes.io/projected/d656c1d3-89ff-4957-9513-eb75a80c1c00-kube-api-access-zmdvc\") on node \"crc\" DevicePath \"\"" Jan 20 17:25:52 crc kubenswrapper[4558]: I0120 17:25:52.362301 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d656c1d3-89ff-4957-9513-eb75a80c1c00-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:25:52 crc kubenswrapper[4558]: I0120 17:25:52.373736 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"f859dc3a-a5c9-4ddf-bf5e-0cedd762540f","Type":"ContainerStarted","Data":"6348c20e2c41f3b65a4752b2bf21e2c1f01971ffd97f7ef8d1a6820091d10a61"} Jan 20 17:25:52 crc kubenswrapper[4558]: I0120 17:25:52.378542 4558 generic.go:334] "Generic (PLEG): container finished" podID="d656c1d3-89ff-4957-9513-eb75a80c1c00" containerID="59f9862d65c9b50494adb13cb5f61324609c64741954dfc41606a8f27a3df822" exitCode=0 Jan 20 17:25:52 crc kubenswrapper[4558]: I0120 17:25:52.378606 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"d656c1d3-89ff-4957-9513-eb75a80c1c00","Type":"ContainerDied","Data":"59f9862d65c9b50494adb13cb5f61324609c64741954dfc41606a8f27a3df822"} Jan 20 17:25:52 crc kubenswrapper[4558]: I0120 17:25:52.378620 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:25:52 crc kubenswrapper[4558]: I0120 17:25:52.378635 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"d656c1d3-89ff-4957-9513-eb75a80c1c00","Type":"ContainerDied","Data":"160cbf6b2d88c716c19e8eaf56bfbc422d1af56fa0c3032bdeea49f6486950d6"} Jan 20 17:25:52 crc kubenswrapper[4558]: I0120 17:25:52.378655 4558 scope.go:117] "RemoveContainer" containerID="59f9862d65c9b50494adb13cb5f61324609c64741954dfc41606a8f27a3df822" Jan 20 17:25:52 crc kubenswrapper[4558]: I0120 17:25:52.382288 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"0370f524-0608-45c9-a4d4-efca3a6bbc4a","Type":"ContainerStarted","Data":"21f88c9b39c9e0e5b774687833ad6583b7301ae275f1fa63a898d7d06969b445"} Jan 20 17:25:52 crc kubenswrapper[4558]: I0120 17:25:52.392829 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-scheduler-0" podStartSLOduration=2.392815152 podStartE2EDuration="2.392815152s" podCreationTimestamp="2026-01-20 17:25:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:25:52.390272642 +0000 UTC m=+2646.150610609" watchObservedRunningTime="2026-01-20 17:25:52.392815152 +0000 UTC m=+2646.153153120" Jan 20 17:25:52 crc kubenswrapper[4558]: I0120 17:25:52.414060 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-metadata-0" podStartSLOduration=2.414050286 podStartE2EDuration="2.414050286s" podCreationTimestamp="2026-01-20 17:25:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:25:52.411572917 +0000 UTC m=+2646.171910885" watchObservedRunningTime="2026-01-20 17:25:52.414050286 +0000 UTC m=+2646.174388253" Jan 20 17:25:52 crc kubenswrapper[4558]: I0120 17:25:52.416840 4558 scope.go:117] "RemoveContainer" containerID="b481ad58fac6f8abfd9b55ac32ee2ff8645a7da54d546b294df2e00aceba5427" Jan 20 17:25:52 crc kubenswrapper[4558]: I0120 17:25:52.433257 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:25:52 crc kubenswrapper[4558]: I0120 17:25:52.448205 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:25:52 crc kubenswrapper[4558]: I0120 17:25:52.451311 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:25:52 crc kubenswrapper[4558]: E0120 17:25:52.451818 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d656c1d3-89ff-4957-9513-eb75a80c1c00" containerName="nova-api-log" Jan 20 17:25:52 crc kubenswrapper[4558]: I0120 17:25:52.451838 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d656c1d3-89ff-4957-9513-eb75a80c1c00" containerName="nova-api-log" Jan 20 17:25:52 crc kubenswrapper[4558]: E0120 17:25:52.451869 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d656c1d3-89ff-4957-9513-eb75a80c1c00" containerName="nova-api-api" Jan 20 17:25:52 crc kubenswrapper[4558]: I0120 17:25:52.451876 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d656c1d3-89ff-4957-9513-eb75a80c1c00" containerName="nova-api-api" Jan 20 17:25:52 crc kubenswrapper[4558]: I0120 17:25:52.452172 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="d656c1d3-89ff-4957-9513-eb75a80c1c00" containerName="nova-api-log" Jan 20 17:25:52 crc kubenswrapper[4558]: I0120 17:25:52.452208 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="d656c1d3-89ff-4957-9513-eb75a80c1c00" containerName="nova-api-api" Jan 20 17:25:52 crc kubenswrapper[4558]: I0120 17:25:52.456823 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:25:52 crc kubenswrapper[4558]: I0120 17:25:52.465720 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f59aa69c-6c42-41df-905e-29428d350637-config-data\") pod \"nova-api-0\" (UID: \"f59aa69c-6c42-41df-905e-29428d350637\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:25:52 crc kubenswrapper[4558]: I0120 17:25:52.465764 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f59aa69c-6c42-41df-905e-29428d350637-internal-tls-certs\") pod \"nova-api-0\" (UID: \"f59aa69c-6c42-41df-905e-29428d350637\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:25:52 crc kubenswrapper[4558]: I0120 17:25:52.465849 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cdstn\" (UniqueName: \"kubernetes.io/projected/f59aa69c-6c42-41df-905e-29428d350637-kube-api-access-cdstn\") pod \"nova-api-0\" (UID: \"f59aa69c-6c42-41df-905e-29428d350637\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:25:52 crc kubenswrapper[4558]: I0120 17:25:52.466083 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f59aa69c-6c42-41df-905e-29428d350637-public-tls-certs\") pod \"nova-api-0\" (UID: \"f59aa69c-6c42-41df-905e-29428d350637\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:25:52 crc kubenswrapper[4558]: I0120 17:25:52.466116 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f59aa69c-6c42-41df-905e-29428d350637-logs\") pod \"nova-api-0\" (UID: \"f59aa69c-6c42-41df-905e-29428d350637\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:25:52 crc kubenswrapper[4558]: I0120 17:25:52.466178 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f59aa69c-6c42-41df-905e-29428d350637-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"f59aa69c-6c42-41df-905e-29428d350637\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:25:52 crc kubenswrapper[4558]: I0120 17:25:52.467909 4558 scope.go:117] "RemoveContainer" containerID="59f9862d65c9b50494adb13cb5f61324609c64741954dfc41606a8f27a3df822" Jan 20 17:25:52 crc kubenswrapper[4558]: I0120 17:25:52.468237 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-public-svc" Jan 20 17:25:52 crc kubenswrapper[4558]: I0120 17:25:52.468490 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-api-config-data" Jan 20 17:25:52 crc kubenswrapper[4558]: I0120 17:25:52.468611 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-internal-svc" Jan 20 17:25:52 crc kubenswrapper[4558]: E0120 17:25:52.468880 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"59f9862d65c9b50494adb13cb5f61324609c64741954dfc41606a8f27a3df822\": container with ID starting with 59f9862d65c9b50494adb13cb5f61324609c64741954dfc41606a8f27a3df822 not found: ID does not exist" containerID="59f9862d65c9b50494adb13cb5f61324609c64741954dfc41606a8f27a3df822" Jan 20 17:25:52 crc kubenswrapper[4558]: I0120 17:25:52.468941 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"59f9862d65c9b50494adb13cb5f61324609c64741954dfc41606a8f27a3df822"} err="failed to get container status \"59f9862d65c9b50494adb13cb5f61324609c64741954dfc41606a8f27a3df822\": rpc error: code = NotFound desc = could not find container \"59f9862d65c9b50494adb13cb5f61324609c64741954dfc41606a8f27a3df822\": container with ID starting with 59f9862d65c9b50494adb13cb5f61324609c64741954dfc41606a8f27a3df822 not found: ID does not exist" Jan 20 17:25:52 crc kubenswrapper[4558]: I0120 17:25:52.468967 4558 scope.go:117] "RemoveContainer" containerID="b481ad58fac6f8abfd9b55ac32ee2ff8645a7da54d546b294df2e00aceba5427" Jan 20 17:25:52 crc kubenswrapper[4558]: E0120 17:25:52.470009 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b481ad58fac6f8abfd9b55ac32ee2ff8645a7da54d546b294df2e00aceba5427\": container with ID starting with b481ad58fac6f8abfd9b55ac32ee2ff8645a7da54d546b294df2e00aceba5427 not found: ID does not exist" containerID="b481ad58fac6f8abfd9b55ac32ee2ff8645a7da54d546b294df2e00aceba5427" Jan 20 17:25:52 crc kubenswrapper[4558]: I0120 17:25:52.470043 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b481ad58fac6f8abfd9b55ac32ee2ff8645a7da54d546b294df2e00aceba5427"} err="failed to get container status \"b481ad58fac6f8abfd9b55ac32ee2ff8645a7da54d546b294df2e00aceba5427\": rpc error: code = NotFound desc = could not find container \"b481ad58fac6f8abfd9b55ac32ee2ff8645a7da54d546b294df2e00aceba5427\": container with ID starting with b481ad58fac6f8abfd9b55ac32ee2ff8645a7da54d546b294df2e00aceba5427 not found: ID does not exist" Jan 20 17:25:52 crc kubenswrapper[4558]: I0120 17:25:52.470338 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:25:52 crc kubenswrapper[4558]: I0120 17:25:52.568295 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cdstn\" (UniqueName: \"kubernetes.io/projected/f59aa69c-6c42-41df-905e-29428d350637-kube-api-access-cdstn\") pod \"nova-api-0\" (UID: \"f59aa69c-6c42-41df-905e-29428d350637\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:25:52 crc kubenswrapper[4558]: I0120 17:25:52.568451 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f59aa69c-6c42-41df-905e-29428d350637-public-tls-certs\") pod \"nova-api-0\" (UID: \"f59aa69c-6c42-41df-905e-29428d350637\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:25:52 crc kubenswrapper[4558]: I0120 17:25:52.568484 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f59aa69c-6c42-41df-905e-29428d350637-logs\") pod \"nova-api-0\" (UID: \"f59aa69c-6c42-41df-905e-29428d350637\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:25:52 crc kubenswrapper[4558]: I0120 17:25:52.568529 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f59aa69c-6c42-41df-905e-29428d350637-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"f59aa69c-6c42-41df-905e-29428d350637\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:25:52 crc kubenswrapper[4558]: I0120 17:25:52.568645 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f59aa69c-6c42-41df-905e-29428d350637-config-data\") pod \"nova-api-0\" (UID: \"f59aa69c-6c42-41df-905e-29428d350637\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:25:52 crc kubenswrapper[4558]: I0120 17:25:52.568676 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f59aa69c-6c42-41df-905e-29428d350637-internal-tls-certs\") pod \"nova-api-0\" (UID: \"f59aa69c-6c42-41df-905e-29428d350637\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:25:52 crc kubenswrapper[4558]: I0120 17:25:52.570863 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f59aa69c-6c42-41df-905e-29428d350637-logs\") pod \"nova-api-0\" (UID: \"f59aa69c-6c42-41df-905e-29428d350637\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:25:52 crc kubenswrapper[4558]: I0120 17:25:52.574485 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f59aa69c-6c42-41df-905e-29428d350637-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"f59aa69c-6c42-41df-905e-29428d350637\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:25:52 crc kubenswrapper[4558]: I0120 17:25:52.574941 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f59aa69c-6c42-41df-905e-29428d350637-public-tls-certs\") pod \"nova-api-0\" (UID: \"f59aa69c-6c42-41df-905e-29428d350637\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:25:52 crc kubenswrapper[4558]: I0120 17:25:52.576222 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f59aa69c-6c42-41df-905e-29428d350637-internal-tls-certs\") pod \"nova-api-0\" (UID: \"f59aa69c-6c42-41df-905e-29428d350637\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:25:52 crc kubenswrapper[4558]: I0120 17:25:52.589393 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d656c1d3-89ff-4957-9513-eb75a80c1c00" path="/var/lib/kubelet/pods/d656c1d3-89ff-4957-9513-eb75a80c1c00/volumes" Jan 20 17:25:52 crc kubenswrapper[4558]: I0120 17:25:52.590113 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cdstn\" (UniqueName: \"kubernetes.io/projected/f59aa69c-6c42-41df-905e-29428d350637-kube-api-access-cdstn\") pod \"nova-api-0\" (UID: \"f59aa69c-6c42-41df-905e-29428d350637\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:25:52 crc kubenswrapper[4558]: I0120 17:25:52.604451 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f59aa69c-6c42-41df-905e-29428d350637-config-data\") pod \"nova-api-0\" (UID: \"f59aa69c-6c42-41df-905e-29428d350637\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:25:52 crc kubenswrapper[4558]: I0120 17:25:52.781097 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:25:53 crc kubenswrapper[4558]: W0120 17:25:53.172420 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf59aa69c_6c42_41df_905e_29428d350637.slice/crio-13fa9f145203a0f248c5638d0ca972e0ba8060df5ceeb7756420262c280d6194 WatchSource:0}: Error finding container 13fa9f145203a0f248c5638d0ca972e0ba8060df5ceeb7756420262c280d6194: Status 404 returned error can't find the container with id 13fa9f145203a0f248c5638d0ca972e0ba8060df5ceeb7756420262c280d6194 Jan 20 17:25:53 crc kubenswrapper[4558]: I0120 17:25:53.175364 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:25:53 crc kubenswrapper[4558]: I0120 17:25:53.408527 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"f59aa69c-6c42-41df-905e-29428d350637","Type":"ContainerStarted","Data":"afb4cc76ebcd4dd7b7fd5a1a99c969b9bf8219683eba7336e506d3069b495f83"} Jan 20 17:25:53 crc kubenswrapper[4558]: I0120 17:25:53.408758 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"f59aa69c-6c42-41df-905e-29428d350637","Type":"ContainerStarted","Data":"13fa9f145203a0f248c5638d0ca972e0ba8060df5ceeb7756420262c280d6194"} Jan 20 17:25:54 crc kubenswrapper[4558]: I0120 17:25:54.421886 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"f59aa69c-6c42-41df-905e-29428d350637","Type":"ContainerStarted","Data":"a58e1fbb49aa21c6ab202f7e097494e1cdb0e9dcdffc1b04bb2b93be7a95a03f"} Jan 20 17:25:54 crc kubenswrapper[4558]: I0120 17:25:54.452682 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-api-0" podStartSLOduration=2.452667693 podStartE2EDuration="2.452667693s" podCreationTimestamp="2026-01-20 17:25:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:25:54.445605936 +0000 UTC m=+2648.205943894" watchObservedRunningTime="2026-01-20 17:25:54.452667693 +0000 UTC m=+2648.213005660" Jan 20 17:25:55 crc kubenswrapper[4558]: I0120 17:25:55.759829 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:25:55 crc kubenswrapper[4558]: I0120 17:25:55.759879 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:25:55 crc kubenswrapper[4558]: I0120 17:25:55.769461 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:25:57 crc kubenswrapper[4558]: I0120 17:25:57.331082 4558 patch_prober.go:28] interesting pod/machine-config-daemon-2vr4r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 20 17:25:57 crc kubenswrapper[4558]: I0120 17:25:57.331234 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 20 17:26:00 crc kubenswrapper[4558]: I0120 17:26:00.759851 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:26:00 crc kubenswrapper[4558]: I0120 17:26:00.760391 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:26:00 crc kubenswrapper[4558]: I0120 17:26:00.769329 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:26:00 crc kubenswrapper[4558]: I0120 17:26:00.795987 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:26:01 crc kubenswrapper[4558]: I0120 17:26:01.526879 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:26:01 crc kubenswrapper[4558]: I0120 17:26:01.777323 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-metadata-0" podUID="0370f524-0608-45c9-a4d4-efca3a6bbc4a" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.1.56:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 20 17:26:01 crc kubenswrapper[4558]: I0120 17:26:01.777372 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-metadata-0" podUID="0370f524-0608-45c9-a4d4-efca3a6bbc4a" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.1.56:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 20 17:26:02 crc kubenswrapper[4558]: I0120 17:26:02.781836 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:26:02 crc kubenswrapper[4558]: I0120 17:26:02.782243 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:26:03 crc kubenswrapper[4558]: I0120 17:26:03.800312 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-api-0" podUID="f59aa69c-6c42-41df-905e-29428d350637" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.1.58:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 20 17:26:03 crc kubenswrapper[4558]: I0120 17:26:03.800316 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-api-0" podUID="f59aa69c-6c42-41df-905e-29428d350637" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.1.58:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 20 17:26:04 crc kubenswrapper[4558]: I0120 17:26:04.521129 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:26:07 crc kubenswrapper[4558]: I0120 17:26:07.542007 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 17:26:07 crc kubenswrapper[4558]: I0120 17:26:07.542807 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/kube-state-metrics-0" podUID="f1f8c221-1f45-40d0-8742-38ab3d5c0d2f" containerName="kube-state-metrics" containerID="cri-o://3361ee4e50c10f2a05b4a510cf99025036c5f5dd140fd944a3939bdc7eb53ec8" gracePeriod=30 Jan 20 17:26:07 crc kubenswrapper[4558]: I0120 17:26:07.940256 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:26:08 crc kubenswrapper[4558]: I0120 17:26:08.104962 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mwbjr\" (UniqueName: \"kubernetes.io/projected/f1f8c221-1f45-40d0-8742-38ab3d5c0d2f-kube-api-access-mwbjr\") pod \"f1f8c221-1f45-40d0-8742-38ab3d5c0d2f\" (UID: \"f1f8c221-1f45-40d0-8742-38ab3d5c0d2f\") " Jan 20 17:26:08 crc kubenswrapper[4558]: I0120 17:26:08.116286 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f1f8c221-1f45-40d0-8742-38ab3d5c0d2f-kube-api-access-mwbjr" (OuterVolumeSpecName: "kube-api-access-mwbjr") pod "f1f8c221-1f45-40d0-8742-38ab3d5c0d2f" (UID: "f1f8c221-1f45-40d0-8742-38ab3d5c0d2f"). InnerVolumeSpecName "kube-api-access-mwbjr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:26:08 crc kubenswrapper[4558]: I0120 17:26:08.208600 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mwbjr\" (UniqueName: \"kubernetes.io/projected/f1f8c221-1f45-40d0-8742-38ab3d5c0d2f-kube-api-access-mwbjr\") on node \"crc\" DevicePath \"\"" Jan 20 17:26:08 crc kubenswrapper[4558]: I0120 17:26:08.566984 4558 generic.go:334] "Generic (PLEG): container finished" podID="f1f8c221-1f45-40d0-8742-38ab3d5c0d2f" containerID="3361ee4e50c10f2a05b4a510cf99025036c5f5dd140fd944a3939bdc7eb53ec8" exitCode=2 Jan 20 17:26:08 crc kubenswrapper[4558]: I0120 17:26:08.567109 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:26:08 crc kubenswrapper[4558]: I0120 17:26:08.581028 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/kube-state-metrics-0" event={"ID":"f1f8c221-1f45-40d0-8742-38ab3d5c0d2f","Type":"ContainerDied","Data":"3361ee4e50c10f2a05b4a510cf99025036c5f5dd140fd944a3939bdc7eb53ec8"} Jan 20 17:26:08 crc kubenswrapper[4558]: I0120 17:26:08.581108 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/kube-state-metrics-0" event={"ID":"f1f8c221-1f45-40d0-8742-38ab3d5c0d2f","Type":"ContainerDied","Data":"c66f9fe5d0946bf8a3b4cc1770194beb42716b50d5c67d582af5dd099ce6b6d0"} Jan 20 17:26:08 crc kubenswrapper[4558]: I0120 17:26:08.581144 4558 scope.go:117] "RemoveContainer" containerID="3361ee4e50c10f2a05b4a510cf99025036c5f5dd140fd944a3939bdc7eb53ec8" Jan 20 17:26:08 crc kubenswrapper[4558]: I0120 17:26:08.602830 4558 scope.go:117] "RemoveContainer" containerID="3361ee4e50c10f2a05b4a510cf99025036c5f5dd140fd944a3939bdc7eb53ec8" Jan 20 17:26:08 crc kubenswrapper[4558]: E0120 17:26:08.603462 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3361ee4e50c10f2a05b4a510cf99025036c5f5dd140fd944a3939bdc7eb53ec8\": container with ID starting with 3361ee4e50c10f2a05b4a510cf99025036c5f5dd140fd944a3939bdc7eb53ec8 not found: ID does not exist" containerID="3361ee4e50c10f2a05b4a510cf99025036c5f5dd140fd944a3939bdc7eb53ec8" Jan 20 17:26:08 crc kubenswrapper[4558]: I0120 17:26:08.603520 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3361ee4e50c10f2a05b4a510cf99025036c5f5dd140fd944a3939bdc7eb53ec8"} err="failed to get container status \"3361ee4e50c10f2a05b4a510cf99025036c5f5dd140fd944a3939bdc7eb53ec8\": rpc error: code = NotFound desc = could not find container \"3361ee4e50c10f2a05b4a510cf99025036c5f5dd140fd944a3939bdc7eb53ec8\": container with ID starting with 3361ee4e50c10f2a05b4a510cf99025036c5f5dd140fd944a3939bdc7eb53ec8 not found: ID does not exist" Jan 20 17:26:08 crc kubenswrapper[4558]: I0120 17:26:08.619363 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 17:26:08 crc kubenswrapper[4558]: I0120 17:26:08.638767 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 17:26:08 crc kubenswrapper[4558]: I0120 17:26:08.646892 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 17:26:08 crc kubenswrapper[4558]: E0120 17:26:08.647362 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f1f8c221-1f45-40d0-8742-38ab3d5c0d2f" containerName="kube-state-metrics" Jan 20 17:26:08 crc kubenswrapper[4558]: I0120 17:26:08.647380 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="f1f8c221-1f45-40d0-8742-38ab3d5c0d2f" containerName="kube-state-metrics" Jan 20 17:26:08 crc kubenswrapper[4558]: I0120 17:26:08.647606 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="f1f8c221-1f45-40d0-8742-38ab3d5c0d2f" containerName="kube-state-metrics" Jan 20 17:26:08 crc kubenswrapper[4558]: I0120 17:26:08.648446 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:26:08 crc kubenswrapper[4558]: I0120 17:26:08.650330 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-kube-state-metrics-svc" Jan 20 17:26:08 crc kubenswrapper[4558]: I0120 17:26:08.650540 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"kube-state-metrics-tls-config" Jan 20 17:26:08 crc kubenswrapper[4558]: I0120 17:26:08.657012 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 17:26:08 crc kubenswrapper[4558]: I0120 17:26:08.717807 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/91cc01be-13ba-41d9-9060-a2cfacdb2f74-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"91cc01be-13ba-41d9-9060-a2cfacdb2f74\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:26:08 crc kubenswrapper[4558]: I0120 17:26:08.718009 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/91cc01be-13ba-41d9-9060-a2cfacdb2f74-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"91cc01be-13ba-41d9-9060-a2cfacdb2f74\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:26:08 crc kubenswrapper[4558]: I0120 17:26:08.718155 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kffjm\" (UniqueName: \"kubernetes.io/projected/91cc01be-13ba-41d9-9060-a2cfacdb2f74-kube-api-access-kffjm\") pod \"kube-state-metrics-0\" (UID: \"91cc01be-13ba-41d9-9060-a2cfacdb2f74\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:26:08 crc kubenswrapper[4558]: I0120 17:26:08.718288 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/91cc01be-13ba-41d9-9060-a2cfacdb2f74-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"91cc01be-13ba-41d9-9060-a2cfacdb2f74\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:26:08 crc kubenswrapper[4558]: I0120 17:26:08.820246 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/91cc01be-13ba-41d9-9060-a2cfacdb2f74-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"91cc01be-13ba-41d9-9060-a2cfacdb2f74\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:26:08 crc kubenswrapper[4558]: I0120 17:26:08.820312 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/91cc01be-13ba-41d9-9060-a2cfacdb2f74-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"91cc01be-13ba-41d9-9060-a2cfacdb2f74\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:26:08 crc kubenswrapper[4558]: I0120 17:26:08.820369 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/91cc01be-13ba-41d9-9060-a2cfacdb2f74-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"91cc01be-13ba-41d9-9060-a2cfacdb2f74\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:26:08 crc kubenswrapper[4558]: I0120 17:26:08.820422 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kffjm\" (UniqueName: \"kubernetes.io/projected/91cc01be-13ba-41d9-9060-a2cfacdb2f74-kube-api-access-kffjm\") pod \"kube-state-metrics-0\" (UID: \"91cc01be-13ba-41d9-9060-a2cfacdb2f74\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:26:08 crc kubenswrapper[4558]: I0120 17:26:08.826288 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/91cc01be-13ba-41d9-9060-a2cfacdb2f74-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"91cc01be-13ba-41d9-9060-a2cfacdb2f74\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:26:08 crc kubenswrapper[4558]: I0120 17:26:08.826315 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/91cc01be-13ba-41d9-9060-a2cfacdb2f74-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"91cc01be-13ba-41d9-9060-a2cfacdb2f74\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:26:08 crc kubenswrapper[4558]: I0120 17:26:08.831323 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/91cc01be-13ba-41d9-9060-a2cfacdb2f74-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"91cc01be-13ba-41d9-9060-a2cfacdb2f74\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:26:08 crc kubenswrapper[4558]: I0120 17:26:08.837390 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kffjm\" (UniqueName: \"kubernetes.io/projected/91cc01be-13ba-41d9-9060-a2cfacdb2f74-kube-api-access-kffjm\") pod \"kube-state-metrics-0\" (UID: \"91cc01be-13ba-41d9-9060-a2cfacdb2f74\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:26:08 crc kubenswrapper[4558]: I0120 17:26:08.921109 4558 scope.go:117] "RemoveContainer" containerID="fca2ff1c6569ac1015ba889896d9030e224df70272a9e9b20388cfb82fa616ca" Jan 20 17:26:08 crc kubenswrapper[4558]: I0120 17:26:08.965280 4558 scope.go:117] "RemoveContainer" containerID="28a7103e4c02a6e13470eb63cb676ead463e061f88b33398c0d422e2998d9f49" Jan 20 17:26:08 crc kubenswrapper[4558]: I0120 17:26:08.970067 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:26:08 crc kubenswrapper[4558]: I0120 17:26:08.991749 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:26:08 crc kubenswrapper[4558]: I0120 17:26:08.992033 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="a305abbb-1d88-4c6d-b2c7-fd650b2319da" containerName="ceilometer-central-agent" containerID="cri-o://1a94c40b8f00dbb1c00df7d36522e0c283e8c28256ab095351cac69f47b9d73e" gracePeriod=30 Jan 20 17:26:08 crc kubenswrapper[4558]: I0120 17:26:08.992100 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="a305abbb-1d88-4c6d-b2c7-fd650b2319da" containerName="ceilometer-notification-agent" containerID="cri-o://a934572f65febc707d4a14663a61a09ad941c67ab32d4fc8d51ae523ef2a571c" gracePeriod=30 Jan 20 17:26:08 crc kubenswrapper[4558]: I0120 17:26:08.992130 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="a305abbb-1d88-4c6d-b2c7-fd650b2319da" containerName="sg-core" containerID="cri-o://445646e163abbea4a88c24f58aca3cd554259abe3dc7ac102e66e976842c674a" gracePeriod=30 Jan 20 17:26:08 crc kubenswrapper[4558]: I0120 17:26:08.992214 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="a305abbb-1d88-4c6d-b2c7-fd650b2319da" containerName="proxy-httpd" containerID="cri-o://8986b81f80be8479e0600f287270ed55401193d48d2351d656e91a1e1e011bec" gracePeriod=30 Jan 20 17:26:09 crc kubenswrapper[4558]: I0120 17:26:09.037477 4558 scope.go:117] "RemoveContainer" containerID="cdc1d8ff5500de6fc7f67919f8800d1bb3a91e560eaa7ae6eb032ebb3cf4ef8f" Jan 20 17:26:09 crc kubenswrapper[4558]: I0120 17:26:09.072346 4558 scope.go:117] "RemoveContainer" containerID="3c17229250a08990f5717d8b2d81a48865a2701c19467dbee26edd7db7249611" Jan 20 17:26:09 crc kubenswrapper[4558]: I0120 17:26:09.119078 4558 scope.go:117] "RemoveContainer" containerID="a3e6795494d00285a308da4e296be3028957ed7dd4cc3ee1924e313b657aaf6d" Jan 20 17:26:09 crc kubenswrapper[4558]: I0120 17:26:09.159036 4558 scope.go:117] "RemoveContainer" containerID="7b5c845f8dfcc82a5614043c05df9578e3d20bc8729179a6b662786a23386251" Jan 20 17:26:09 crc kubenswrapper[4558]: I0120 17:26:09.183856 4558 scope.go:117] "RemoveContainer" containerID="b3a2a6a756812c1e7b932ed4a16bde48b9ef348b603e679ada0e05b62df3feb9" Jan 20 17:26:09 crc kubenswrapper[4558]: I0120 17:26:09.203077 4558 scope.go:117] "RemoveContainer" containerID="138776071c1cd283c04ad3bac5afc2413a017d74f6650ba993ce0dddc48944ab" Jan 20 17:26:09 crc kubenswrapper[4558]: I0120 17:26:09.219851 4558 scope.go:117] "RemoveContainer" containerID="5e4527bd6236a18820049fb582501f32bdd922a11816b5d507d98697a896f50a" Jan 20 17:26:09 crc kubenswrapper[4558]: I0120 17:26:09.455651 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 17:26:09 crc kubenswrapper[4558]: W0120 17:26:09.467532 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod91cc01be_13ba_41d9_9060_a2cfacdb2f74.slice/crio-1f86a59c024a009254b50af1f48aa841486688bd42d1c8fd5e1e0009eb525440 WatchSource:0}: Error finding container 1f86a59c024a009254b50af1f48aa841486688bd42d1c8fd5e1e0009eb525440: Status 404 returned error can't find the container with id 1f86a59c024a009254b50af1f48aa841486688bd42d1c8fd5e1e0009eb525440 Jan 20 17:26:09 crc kubenswrapper[4558]: I0120 17:26:09.580903 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/kube-state-metrics-0" event={"ID":"91cc01be-13ba-41d9-9060-a2cfacdb2f74","Type":"ContainerStarted","Data":"1f86a59c024a009254b50af1f48aa841486688bd42d1c8fd5e1e0009eb525440"} Jan 20 17:26:09 crc kubenswrapper[4558]: I0120 17:26:09.588802 4558 generic.go:334] "Generic (PLEG): container finished" podID="a305abbb-1d88-4c6d-b2c7-fd650b2319da" containerID="8986b81f80be8479e0600f287270ed55401193d48d2351d656e91a1e1e011bec" exitCode=0 Jan 20 17:26:09 crc kubenswrapper[4558]: I0120 17:26:09.588836 4558 generic.go:334] "Generic (PLEG): container finished" podID="a305abbb-1d88-4c6d-b2c7-fd650b2319da" containerID="445646e163abbea4a88c24f58aca3cd554259abe3dc7ac102e66e976842c674a" exitCode=2 Jan 20 17:26:09 crc kubenswrapper[4558]: I0120 17:26:09.588845 4558 generic.go:334] "Generic (PLEG): container finished" podID="a305abbb-1d88-4c6d-b2c7-fd650b2319da" containerID="1a94c40b8f00dbb1c00df7d36522e0c283e8c28256ab095351cac69f47b9d73e" exitCode=0 Jan 20 17:26:09 crc kubenswrapper[4558]: I0120 17:26:09.588877 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"a305abbb-1d88-4c6d-b2c7-fd650b2319da","Type":"ContainerDied","Data":"8986b81f80be8479e0600f287270ed55401193d48d2351d656e91a1e1e011bec"} Jan 20 17:26:09 crc kubenswrapper[4558]: I0120 17:26:09.588912 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"a305abbb-1d88-4c6d-b2c7-fd650b2319da","Type":"ContainerDied","Data":"445646e163abbea4a88c24f58aca3cd554259abe3dc7ac102e66e976842c674a"} Jan 20 17:26:09 crc kubenswrapper[4558]: I0120 17:26:09.588924 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"a305abbb-1d88-4c6d-b2c7-fd650b2319da","Type":"ContainerDied","Data":"1a94c40b8f00dbb1c00df7d36522e0c283e8c28256ab095351cac69f47b9d73e"} Jan 20 17:26:10 crc kubenswrapper[4558]: I0120 17:26:10.578052 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f1f8c221-1f45-40d0-8742-38ab3d5c0d2f" path="/var/lib/kubelet/pods/f1f8c221-1f45-40d0-8742-38ab3d5c0d2f/volumes" Jan 20 17:26:10 crc kubenswrapper[4558]: I0120 17:26:10.601965 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/kube-state-metrics-0" event={"ID":"91cc01be-13ba-41d9-9060-a2cfacdb2f74","Type":"ContainerStarted","Data":"109b1c818a10a94b43307d6a3f5d2e521d77e543f6c41fe1fc8ac78d9590ffb7"} Jan 20 17:26:10 crc kubenswrapper[4558]: I0120 17:26:10.602144 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:26:10 crc kubenswrapper[4558]: I0120 17:26:10.624187 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/kube-state-metrics-0" podStartSLOduration=2.282695196 podStartE2EDuration="2.624158683s" podCreationTimestamp="2026-01-20 17:26:08 +0000 UTC" firstStartedPulling="2026-01-20 17:26:09.478286515 +0000 UTC m=+2663.238624482" lastFinishedPulling="2026-01-20 17:26:09.819750003 +0000 UTC m=+2663.580087969" observedRunningTime="2026-01-20 17:26:10.618034268 +0000 UTC m=+2664.378372235" watchObservedRunningTime="2026-01-20 17:26:10.624158683 +0000 UTC m=+2664.384496651" Jan 20 17:26:10 crc kubenswrapper[4558]: I0120 17:26:10.766589 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:26:10 crc kubenswrapper[4558]: I0120 17:26:10.767012 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:26:10 crc kubenswrapper[4558]: I0120 17:26:10.772657 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:26:10 crc kubenswrapper[4558]: I0120 17:26:10.772767 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:26:12 crc kubenswrapper[4558]: I0120 17:26:12.409586 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:26:12 crc kubenswrapper[4558]: I0120 17:26:12.596403 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a305abbb-1d88-4c6d-b2c7-fd650b2319da-sg-core-conf-yaml\") pod \"a305abbb-1d88-4c6d-b2c7-fd650b2319da\" (UID: \"a305abbb-1d88-4c6d-b2c7-fd650b2319da\") " Jan 20 17:26:12 crc kubenswrapper[4558]: I0120 17:26:12.598059 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a305abbb-1d88-4c6d-b2c7-fd650b2319da-log-httpd\") pod \"a305abbb-1d88-4c6d-b2c7-fd650b2319da\" (UID: \"a305abbb-1d88-4c6d-b2c7-fd650b2319da\") " Jan 20 17:26:12 crc kubenswrapper[4558]: I0120 17:26:12.598110 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a305abbb-1d88-4c6d-b2c7-fd650b2319da-scripts\") pod \"a305abbb-1d88-4c6d-b2c7-fd650b2319da\" (UID: \"a305abbb-1d88-4c6d-b2c7-fd650b2319da\") " Jan 20 17:26:12 crc kubenswrapper[4558]: I0120 17:26:12.598152 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a305abbb-1d88-4c6d-b2c7-fd650b2319da-config-data\") pod \"a305abbb-1d88-4c6d-b2c7-fd650b2319da\" (UID: \"a305abbb-1d88-4c6d-b2c7-fd650b2319da\") " Jan 20 17:26:12 crc kubenswrapper[4558]: I0120 17:26:12.598192 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a305abbb-1d88-4c6d-b2c7-fd650b2319da-run-httpd\") pod \"a305abbb-1d88-4c6d-b2c7-fd650b2319da\" (UID: \"a305abbb-1d88-4c6d-b2c7-fd650b2319da\") " Jan 20 17:26:12 crc kubenswrapper[4558]: I0120 17:26:12.598259 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cbcss\" (UniqueName: \"kubernetes.io/projected/a305abbb-1d88-4c6d-b2c7-fd650b2319da-kube-api-access-cbcss\") pod \"a305abbb-1d88-4c6d-b2c7-fd650b2319da\" (UID: \"a305abbb-1d88-4c6d-b2c7-fd650b2319da\") " Jan 20 17:26:12 crc kubenswrapper[4558]: I0120 17:26:12.598280 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a305abbb-1d88-4c6d-b2c7-fd650b2319da-combined-ca-bundle\") pod \"a305abbb-1d88-4c6d-b2c7-fd650b2319da\" (UID: \"a305abbb-1d88-4c6d-b2c7-fd650b2319da\") " Jan 20 17:26:12 crc kubenswrapper[4558]: I0120 17:26:12.599500 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a305abbb-1d88-4c6d-b2c7-fd650b2319da-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "a305abbb-1d88-4c6d-b2c7-fd650b2319da" (UID: "a305abbb-1d88-4c6d-b2c7-fd650b2319da"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:26:12 crc kubenswrapper[4558]: I0120 17:26:12.599532 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a305abbb-1d88-4c6d-b2c7-fd650b2319da-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "a305abbb-1d88-4c6d-b2c7-fd650b2319da" (UID: "a305abbb-1d88-4c6d-b2c7-fd650b2319da"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:26:12 crc kubenswrapper[4558]: I0120 17:26:12.604854 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a305abbb-1d88-4c6d-b2c7-fd650b2319da-kube-api-access-cbcss" (OuterVolumeSpecName: "kube-api-access-cbcss") pod "a305abbb-1d88-4c6d-b2c7-fd650b2319da" (UID: "a305abbb-1d88-4c6d-b2c7-fd650b2319da"). InnerVolumeSpecName "kube-api-access-cbcss". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:26:12 crc kubenswrapper[4558]: I0120 17:26:12.605250 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a305abbb-1d88-4c6d-b2c7-fd650b2319da-scripts" (OuterVolumeSpecName: "scripts") pod "a305abbb-1d88-4c6d-b2c7-fd650b2319da" (UID: "a305abbb-1d88-4c6d-b2c7-fd650b2319da"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:26:12 crc kubenswrapper[4558]: I0120 17:26:12.624981 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a305abbb-1d88-4c6d-b2c7-fd650b2319da-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "a305abbb-1d88-4c6d-b2c7-fd650b2319da" (UID: "a305abbb-1d88-4c6d-b2c7-fd650b2319da"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:26:12 crc kubenswrapper[4558]: I0120 17:26:12.625885 4558 generic.go:334] "Generic (PLEG): container finished" podID="a305abbb-1d88-4c6d-b2c7-fd650b2319da" containerID="a934572f65febc707d4a14663a61a09ad941c67ab32d4fc8d51ae523ef2a571c" exitCode=0 Jan 20 17:26:12 crc kubenswrapper[4558]: I0120 17:26:12.626034 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:26:12 crc kubenswrapper[4558]: I0120 17:26:12.626126 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"a305abbb-1d88-4c6d-b2c7-fd650b2319da","Type":"ContainerDied","Data":"a934572f65febc707d4a14663a61a09ad941c67ab32d4fc8d51ae523ef2a571c"} Jan 20 17:26:12 crc kubenswrapper[4558]: I0120 17:26:12.626214 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"a305abbb-1d88-4c6d-b2c7-fd650b2319da","Type":"ContainerDied","Data":"76271298e96b648d5dda7ec75c83665d0258ca208f018b1dd34d2707abd79502"} Jan 20 17:26:12 crc kubenswrapper[4558]: I0120 17:26:12.626251 4558 scope.go:117] "RemoveContainer" containerID="8986b81f80be8479e0600f287270ed55401193d48d2351d656e91a1e1e011bec" Jan 20 17:26:12 crc kubenswrapper[4558]: I0120 17:26:12.667743 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a305abbb-1d88-4c6d-b2c7-fd650b2319da-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a305abbb-1d88-4c6d-b2c7-fd650b2319da" (UID: "a305abbb-1d88-4c6d-b2c7-fd650b2319da"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:26:12 crc kubenswrapper[4558]: I0120 17:26:12.686551 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a305abbb-1d88-4c6d-b2c7-fd650b2319da-config-data" (OuterVolumeSpecName: "config-data") pod "a305abbb-1d88-4c6d-b2c7-fd650b2319da" (UID: "a305abbb-1d88-4c6d-b2c7-fd650b2319da"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:26:12 crc kubenswrapper[4558]: I0120 17:26:12.701719 4558 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a305abbb-1d88-4c6d-b2c7-fd650b2319da-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:26:12 crc kubenswrapper[4558]: I0120 17:26:12.701747 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a305abbb-1d88-4c6d-b2c7-fd650b2319da-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:26:12 crc kubenswrapper[4558]: I0120 17:26:12.701763 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a305abbb-1d88-4c6d-b2c7-fd650b2319da-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:26:12 crc kubenswrapper[4558]: I0120 17:26:12.701810 4558 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a305abbb-1d88-4c6d-b2c7-fd650b2319da-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:26:12 crc kubenswrapper[4558]: I0120 17:26:12.701823 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cbcss\" (UniqueName: \"kubernetes.io/projected/a305abbb-1d88-4c6d-b2c7-fd650b2319da-kube-api-access-cbcss\") on node \"crc\" DevicePath \"\"" Jan 20 17:26:12 crc kubenswrapper[4558]: I0120 17:26:12.701834 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a305abbb-1d88-4c6d-b2c7-fd650b2319da-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:26:12 crc kubenswrapper[4558]: I0120 17:26:12.701844 4558 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a305abbb-1d88-4c6d-b2c7-fd650b2319da-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 20 17:26:12 crc kubenswrapper[4558]: I0120 17:26:12.724994 4558 scope.go:117] "RemoveContainer" containerID="445646e163abbea4a88c24f58aca3cd554259abe3dc7ac102e66e976842c674a" Jan 20 17:26:12 crc kubenswrapper[4558]: I0120 17:26:12.742428 4558 scope.go:117] "RemoveContainer" containerID="a934572f65febc707d4a14663a61a09ad941c67ab32d4fc8d51ae523ef2a571c" Jan 20 17:26:12 crc kubenswrapper[4558]: I0120 17:26:12.757445 4558 scope.go:117] "RemoveContainer" containerID="1a94c40b8f00dbb1c00df7d36522e0c283e8c28256ab095351cac69f47b9d73e" Jan 20 17:26:12 crc kubenswrapper[4558]: I0120 17:26:12.774209 4558 scope.go:117] "RemoveContainer" containerID="8986b81f80be8479e0600f287270ed55401193d48d2351d656e91a1e1e011bec" Jan 20 17:26:12 crc kubenswrapper[4558]: E0120 17:26:12.774518 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8986b81f80be8479e0600f287270ed55401193d48d2351d656e91a1e1e011bec\": container with ID starting with 8986b81f80be8479e0600f287270ed55401193d48d2351d656e91a1e1e011bec not found: ID does not exist" containerID="8986b81f80be8479e0600f287270ed55401193d48d2351d656e91a1e1e011bec" Jan 20 17:26:12 crc kubenswrapper[4558]: I0120 17:26:12.774553 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8986b81f80be8479e0600f287270ed55401193d48d2351d656e91a1e1e011bec"} err="failed to get container status \"8986b81f80be8479e0600f287270ed55401193d48d2351d656e91a1e1e011bec\": rpc error: code = NotFound desc = could not find container \"8986b81f80be8479e0600f287270ed55401193d48d2351d656e91a1e1e011bec\": container with ID starting with 8986b81f80be8479e0600f287270ed55401193d48d2351d656e91a1e1e011bec not found: ID does not exist" Jan 20 17:26:12 crc kubenswrapper[4558]: I0120 17:26:12.774577 4558 scope.go:117] "RemoveContainer" containerID="445646e163abbea4a88c24f58aca3cd554259abe3dc7ac102e66e976842c674a" Jan 20 17:26:12 crc kubenswrapper[4558]: E0120 17:26:12.774870 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"445646e163abbea4a88c24f58aca3cd554259abe3dc7ac102e66e976842c674a\": container with ID starting with 445646e163abbea4a88c24f58aca3cd554259abe3dc7ac102e66e976842c674a not found: ID does not exist" containerID="445646e163abbea4a88c24f58aca3cd554259abe3dc7ac102e66e976842c674a" Jan 20 17:26:12 crc kubenswrapper[4558]: I0120 17:26:12.774890 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"445646e163abbea4a88c24f58aca3cd554259abe3dc7ac102e66e976842c674a"} err="failed to get container status \"445646e163abbea4a88c24f58aca3cd554259abe3dc7ac102e66e976842c674a\": rpc error: code = NotFound desc = could not find container \"445646e163abbea4a88c24f58aca3cd554259abe3dc7ac102e66e976842c674a\": container with ID starting with 445646e163abbea4a88c24f58aca3cd554259abe3dc7ac102e66e976842c674a not found: ID does not exist" Jan 20 17:26:12 crc kubenswrapper[4558]: I0120 17:26:12.774907 4558 scope.go:117] "RemoveContainer" containerID="a934572f65febc707d4a14663a61a09ad941c67ab32d4fc8d51ae523ef2a571c" Jan 20 17:26:12 crc kubenswrapper[4558]: E0120 17:26:12.775212 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a934572f65febc707d4a14663a61a09ad941c67ab32d4fc8d51ae523ef2a571c\": container with ID starting with a934572f65febc707d4a14663a61a09ad941c67ab32d4fc8d51ae523ef2a571c not found: ID does not exist" containerID="a934572f65febc707d4a14663a61a09ad941c67ab32d4fc8d51ae523ef2a571c" Jan 20 17:26:12 crc kubenswrapper[4558]: I0120 17:26:12.775235 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a934572f65febc707d4a14663a61a09ad941c67ab32d4fc8d51ae523ef2a571c"} err="failed to get container status \"a934572f65febc707d4a14663a61a09ad941c67ab32d4fc8d51ae523ef2a571c\": rpc error: code = NotFound desc = could not find container \"a934572f65febc707d4a14663a61a09ad941c67ab32d4fc8d51ae523ef2a571c\": container with ID starting with a934572f65febc707d4a14663a61a09ad941c67ab32d4fc8d51ae523ef2a571c not found: ID does not exist" Jan 20 17:26:12 crc kubenswrapper[4558]: I0120 17:26:12.775249 4558 scope.go:117] "RemoveContainer" containerID="1a94c40b8f00dbb1c00df7d36522e0c283e8c28256ab095351cac69f47b9d73e" Jan 20 17:26:12 crc kubenswrapper[4558]: E0120 17:26:12.775569 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1a94c40b8f00dbb1c00df7d36522e0c283e8c28256ab095351cac69f47b9d73e\": container with ID starting with 1a94c40b8f00dbb1c00df7d36522e0c283e8c28256ab095351cac69f47b9d73e not found: ID does not exist" containerID="1a94c40b8f00dbb1c00df7d36522e0c283e8c28256ab095351cac69f47b9d73e" Jan 20 17:26:12 crc kubenswrapper[4558]: I0120 17:26:12.775587 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1a94c40b8f00dbb1c00df7d36522e0c283e8c28256ab095351cac69f47b9d73e"} err="failed to get container status \"1a94c40b8f00dbb1c00df7d36522e0c283e8c28256ab095351cac69f47b9d73e\": rpc error: code = NotFound desc = could not find container \"1a94c40b8f00dbb1c00df7d36522e0c283e8c28256ab095351cac69f47b9d73e\": container with ID starting with 1a94c40b8f00dbb1c00df7d36522e0c283e8c28256ab095351cac69f47b9d73e not found: ID does not exist" Jan 20 17:26:12 crc kubenswrapper[4558]: I0120 17:26:12.787842 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:26:12 crc kubenswrapper[4558]: I0120 17:26:12.788305 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:26:12 crc kubenswrapper[4558]: I0120 17:26:12.788827 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:26:12 crc kubenswrapper[4558]: I0120 17:26:12.792344 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:26:12 crc kubenswrapper[4558]: I0120 17:26:12.968892 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:26:12 crc kubenswrapper[4558]: I0120 17:26:12.976571 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:26:12 crc kubenswrapper[4558]: I0120 17:26:12.982095 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:26:12 crc kubenswrapper[4558]: E0120 17:26:12.982567 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a305abbb-1d88-4c6d-b2c7-fd650b2319da" containerName="ceilometer-notification-agent" Jan 20 17:26:12 crc kubenswrapper[4558]: I0120 17:26:12.982589 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="a305abbb-1d88-4c6d-b2c7-fd650b2319da" containerName="ceilometer-notification-agent" Jan 20 17:26:12 crc kubenswrapper[4558]: E0120 17:26:12.982606 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a305abbb-1d88-4c6d-b2c7-fd650b2319da" containerName="sg-core" Jan 20 17:26:12 crc kubenswrapper[4558]: I0120 17:26:12.982617 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="a305abbb-1d88-4c6d-b2c7-fd650b2319da" containerName="sg-core" Jan 20 17:26:12 crc kubenswrapper[4558]: E0120 17:26:12.982637 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a305abbb-1d88-4c6d-b2c7-fd650b2319da" containerName="ceilometer-central-agent" Jan 20 17:26:12 crc kubenswrapper[4558]: I0120 17:26:12.982644 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="a305abbb-1d88-4c6d-b2c7-fd650b2319da" containerName="ceilometer-central-agent" Jan 20 17:26:12 crc kubenswrapper[4558]: E0120 17:26:12.982651 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a305abbb-1d88-4c6d-b2c7-fd650b2319da" containerName="proxy-httpd" Jan 20 17:26:12 crc kubenswrapper[4558]: I0120 17:26:12.982657 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="a305abbb-1d88-4c6d-b2c7-fd650b2319da" containerName="proxy-httpd" Jan 20 17:26:12 crc kubenswrapper[4558]: I0120 17:26:12.982894 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="a305abbb-1d88-4c6d-b2c7-fd650b2319da" containerName="ceilometer-notification-agent" Jan 20 17:26:12 crc kubenswrapper[4558]: I0120 17:26:12.982918 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="a305abbb-1d88-4c6d-b2c7-fd650b2319da" containerName="sg-core" Jan 20 17:26:12 crc kubenswrapper[4558]: I0120 17:26:12.982933 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="a305abbb-1d88-4c6d-b2c7-fd650b2319da" containerName="ceilometer-central-agent" Jan 20 17:26:12 crc kubenswrapper[4558]: I0120 17:26:12.982952 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="a305abbb-1d88-4c6d-b2c7-fd650b2319da" containerName="proxy-httpd" Jan 20 17:26:12 crc kubenswrapper[4558]: I0120 17:26:12.985221 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:26:12 crc kubenswrapper[4558]: I0120 17:26:12.986741 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-scripts" Jan 20 17:26:12 crc kubenswrapper[4558]: I0120 17:26:12.987589 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-config-data" Jan 20 17:26:12 crc kubenswrapper[4558]: I0120 17:26:12.988123 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-ceilometer-internal-svc" Jan 20 17:26:13 crc kubenswrapper[4558]: I0120 17:26:13.001176 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:26:13 crc kubenswrapper[4558]: I0120 17:26:13.112387 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hhb8n\" (UniqueName: \"kubernetes.io/projected/70698ad2-165d-46a5-b45e-36c4e4b6df8b-kube-api-access-hhb8n\") pod \"ceilometer-0\" (UID: \"70698ad2-165d-46a5-b45e-36c4e4b6df8b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:26:13 crc kubenswrapper[4558]: I0120 17:26:13.112477 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/70698ad2-165d-46a5-b45e-36c4e4b6df8b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"70698ad2-165d-46a5-b45e-36c4e4b6df8b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:26:13 crc kubenswrapper[4558]: I0120 17:26:13.112524 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/70698ad2-165d-46a5-b45e-36c4e4b6df8b-config-data\") pod \"ceilometer-0\" (UID: \"70698ad2-165d-46a5-b45e-36c4e4b6df8b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:26:13 crc kubenswrapper[4558]: I0120 17:26:13.112552 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/70698ad2-165d-46a5-b45e-36c4e4b6df8b-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"70698ad2-165d-46a5-b45e-36c4e4b6df8b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:26:13 crc kubenswrapper[4558]: I0120 17:26:13.112577 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/70698ad2-165d-46a5-b45e-36c4e4b6df8b-run-httpd\") pod \"ceilometer-0\" (UID: \"70698ad2-165d-46a5-b45e-36c4e4b6df8b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:26:13 crc kubenswrapper[4558]: I0120 17:26:13.112617 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/70698ad2-165d-46a5-b45e-36c4e4b6df8b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"70698ad2-165d-46a5-b45e-36c4e4b6df8b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:26:13 crc kubenswrapper[4558]: I0120 17:26:13.112689 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/70698ad2-165d-46a5-b45e-36c4e4b6df8b-scripts\") pod \"ceilometer-0\" (UID: \"70698ad2-165d-46a5-b45e-36c4e4b6df8b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:26:13 crc kubenswrapper[4558]: I0120 17:26:13.112716 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/70698ad2-165d-46a5-b45e-36c4e4b6df8b-log-httpd\") pod \"ceilometer-0\" (UID: \"70698ad2-165d-46a5-b45e-36c4e4b6df8b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:26:13 crc kubenswrapper[4558]: I0120 17:26:13.214932 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/70698ad2-165d-46a5-b45e-36c4e4b6df8b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"70698ad2-165d-46a5-b45e-36c4e4b6df8b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:26:13 crc kubenswrapper[4558]: I0120 17:26:13.214992 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/70698ad2-165d-46a5-b45e-36c4e4b6df8b-scripts\") pod \"ceilometer-0\" (UID: \"70698ad2-165d-46a5-b45e-36c4e4b6df8b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:26:13 crc kubenswrapper[4558]: I0120 17:26:13.215032 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/70698ad2-165d-46a5-b45e-36c4e4b6df8b-log-httpd\") pod \"ceilometer-0\" (UID: \"70698ad2-165d-46a5-b45e-36c4e4b6df8b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:26:13 crc kubenswrapper[4558]: I0120 17:26:13.215103 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hhb8n\" (UniqueName: \"kubernetes.io/projected/70698ad2-165d-46a5-b45e-36c4e4b6df8b-kube-api-access-hhb8n\") pod \"ceilometer-0\" (UID: \"70698ad2-165d-46a5-b45e-36c4e4b6df8b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:26:13 crc kubenswrapper[4558]: I0120 17:26:13.215287 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/70698ad2-165d-46a5-b45e-36c4e4b6df8b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"70698ad2-165d-46a5-b45e-36c4e4b6df8b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:26:13 crc kubenswrapper[4558]: I0120 17:26:13.215360 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/70698ad2-165d-46a5-b45e-36c4e4b6df8b-config-data\") pod \"ceilometer-0\" (UID: \"70698ad2-165d-46a5-b45e-36c4e4b6df8b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:26:13 crc kubenswrapper[4558]: I0120 17:26:13.215420 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/70698ad2-165d-46a5-b45e-36c4e4b6df8b-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"70698ad2-165d-46a5-b45e-36c4e4b6df8b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:26:13 crc kubenswrapper[4558]: I0120 17:26:13.215451 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/70698ad2-165d-46a5-b45e-36c4e4b6df8b-run-httpd\") pod \"ceilometer-0\" (UID: \"70698ad2-165d-46a5-b45e-36c4e4b6df8b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:26:13 crc kubenswrapper[4558]: I0120 17:26:13.215845 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/70698ad2-165d-46a5-b45e-36c4e4b6df8b-log-httpd\") pod \"ceilometer-0\" (UID: \"70698ad2-165d-46a5-b45e-36c4e4b6df8b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:26:13 crc kubenswrapper[4558]: I0120 17:26:13.215969 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/70698ad2-165d-46a5-b45e-36c4e4b6df8b-run-httpd\") pod \"ceilometer-0\" (UID: \"70698ad2-165d-46a5-b45e-36c4e4b6df8b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:26:13 crc kubenswrapper[4558]: I0120 17:26:13.219462 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/70698ad2-165d-46a5-b45e-36c4e4b6df8b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"70698ad2-165d-46a5-b45e-36c4e4b6df8b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:26:13 crc kubenswrapper[4558]: I0120 17:26:13.220035 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/70698ad2-165d-46a5-b45e-36c4e4b6df8b-scripts\") pod \"ceilometer-0\" (UID: \"70698ad2-165d-46a5-b45e-36c4e4b6df8b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:26:13 crc kubenswrapper[4558]: I0120 17:26:13.220153 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/70698ad2-165d-46a5-b45e-36c4e4b6df8b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"70698ad2-165d-46a5-b45e-36c4e4b6df8b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:26:13 crc kubenswrapper[4558]: I0120 17:26:13.220586 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/70698ad2-165d-46a5-b45e-36c4e4b6df8b-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"70698ad2-165d-46a5-b45e-36c4e4b6df8b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:26:13 crc kubenswrapper[4558]: I0120 17:26:13.223044 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/70698ad2-165d-46a5-b45e-36c4e4b6df8b-config-data\") pod \"ceilometer-0\" (UID: \"70698ad2-165d-46a5-b45e-36c4e4b6df8b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:26:13 crc kubenswrapper[4558]: I0120 17:26:13.232868 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hhb8n\" (UniqueName: \"kubernetes.io/projected/70698ad2-165d-46a5-b45e-36c4e4b6df8b-kube-api-access-hhb8n\") pod \"ceilometer-0\" (UID: \"70698ad2-165d-46a5-b45e-36c4e4b6df8b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:26:13 crc kubenswrapper[4558]: I0120 17:26:13.305286 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:26:13 crc kubenswrapper[4558]: I0120 17:26:13.639054 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:26:13 crc kubenswrapper[4558]: I0120 17:26:13.646305 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:26:13 crc kubenswrapper[4558]: I0120 17:26:13.729152 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:26:14 crc kubenswrapper[4558]: I0120 17:26:14.600406 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a305abbb-1d88-4c6d-b2c7-fd650b2319da" path="/var/lib/kubelet/pods/a305abbb-1d88-4c6d-b2c7-fd650b2319da/volumes" Jan 20 17:26:14 crc kubenswrapper[4558]: I0120 17:26:14.659708 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"70698ad2-165d-46a5-b45e-36c4e4b6df8b","Type":"ContainerStarted","Data":"a20ca3fd607ddf7da770b12d270ee3f61e807356eaf73adac4d94547ae33b6e8"} Jan 20 17:26:15 crc kubenswrapper[4558]: I0120 17:26:15.672805 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"70698ad2-165d-46a5-b45e-36c4e4b6df8b","Type":"ContainerStarted","Data":"44fb41a2f181b567d2295b6640d53ad267beb9330b20f34e2b79151e8226e822"} Jan 20 17:26:15 crc kubenswrapper[4558]: I0120 17:26:15.673849 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"70698ad2-165d-46a5-b45e-36c4e4b6df8b","Type":"ContainerStarted","Data":"ea36ed48297a2a2bcb917d936cd3d2d844aa2faeacef821937a3bd537eb2e4ba"} Jan 20 17:26:16 crc kubenswrapper[4558]: I0120 17:26:16.683187 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"70698ad2-165d-46a5-b45e-36c4e4b6df8b","Type":"ContainerStarted","Data":"ebd5e020fe9ccfd450a30b510c61df09172d803a72760f9b96ab98868d87562f"} Jan 20 17:26:18 crc kubenswrapper[4558]: I0120 17:26:18.733638 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"70698ad2-165d-46a5-b45e-36c4e4b6df8b","Type":"ContainerStarted","Data":"b55f709aed6d1e7f660b6aa1c679633da050ee695636dacbeec2bc9d9cacc0ef"} Jan 20 17:26:18 crc kubenswrapper[4558]: I0120 17:26:18.734054 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:26:18 crc kubenswrapper[4558]: I0120 17:26:18.770402 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ceilometer-0" podStartSLOduration=2.455225584 podStartE2EDuration="6.770380498s" podCreationTimestamp="2026-01-20 17:26:12 +0000 UTC" firstStartedPulling="2026-01-20 17:26:13.734606214 +0000 UTC m=+2667.494944181" lastFinishedPulling="2026-01-20 17:26:18.049761127 +0000 UTC m=+2671.810099095" observedRunningTime="2026-01-20 17:26:18.760089991 +0000 UTC m=+2672.520427948" watchObservedRunningTime="2026-01-20 17:26:18.770380498 +0000 UTC m=+2672.530718465" Jan 20 17:26:18 crc kubenswrapper[4558]: I0120 17:26:18.978644 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:26:27 crc kubenswrapper[4558]: I0120 17:26:27.329841 4558 patch_prober.go:28] interesting pod/machine-config-daemon-2vr4r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 20 17:26:27 crc kubenswrapper[4558]: I0120 17:26:27.330716 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 20 17:26:27 crc kubenswrapper[4558]: I0120 17:26:27.330802 4558 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" Jan 20 17:26:27 crc kubenswrapper[4558]: I0120 17:26:27.332046 4558 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"2d144cfe8b47aa78680f1723838f69b34b4c8966312c6a5804d87ce86bc40c18"} pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 20 17:26:27 crc kubenswrapper[4558]: I0120 17:26:27.332124 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" containerID="cri-o://2d144cfe8b47aa78680f1723838f69b34b4c8966312c6a5804d87ce86bc40c18" gracePeriod=600 Jan 20 17:26:27 crc kubenswrapper[4558]: I0120 17:26:27.832864 4558 generic.go:334] "Generic (PLEG): container finished" podID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerID="2d144cfe8b47aa78680f1723838f69b34b4c8966312c6a5804d87ce86bc40c18" exitCode=0 Jan 20 17:26:27 crc kubenswrapper[4558]: I0120 17:26:27.832955 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" event={"ID":"68337d27-3fa6-4a29-88b0-82e60c3739eb","Type":"ContainerDied","Data":"2d144cfe8b47aa78680f1723838f69b34b4c8966312c6a5804d87ce86bc40c18"} Jan 20 17:26:27 crc kubenswrapper[4558]: I0120 17:26:27.833400 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" event={"ID":"68337d27-3fa6-4a29-88b0-82e60c3739eb","Type":"ContainerStarted","Data":"f275e9f5de330b3c79316d5871106c6bcf90b698e0744c5beb28da45c336b36d"} Jan 20 17:26:27 crc kubenswrapper[4558]: I0120 17:26:27.833478 4558 scope.go:117] "RemoveContainer" containerID="0ffce44366a8b4466427b682fb41640425366a0f4449210959148de9aef695c6" Jan 20 17:26:43 crc kubenswrapper[4558]: I0120 17:26:43.314761 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:26:46 crc kubenswrapper[4558]: I0120 17:26:46.695998 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/openstack-galera-0"] Jan 20 17:26:46 crc kubenswrapper[4558]: I0120 17:26:46.723383 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/rabbitmq-server-0"] Jan 20 17:26:46 crc kubenswrapper[4558]: I0120 17:26:46.733215 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/memcached-0"] Jan 20 17:26:46 crc kubenswrapper[4558]: I0120 17:26:46.733483 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/memcached-0" podUID="987d6a20-4fc9-411e-ae1f-9ff5dd78e80d" containerName="memcached" containerID="cri-o://9e7a56b68f74368ebdccf3a2050688f4660eec04ee70f20c2816ab7ef47f5d08" gracePeriod=30 Jan 20 17:26:46 crc kubenswrapper[4558]: I0120 17:26:46.770664 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/openstack-cell1-galera-0"] Jan 20 17:26:46 crc kubenswrapper[4558]: I0120 17:26:46.844751 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/rabbitmq-cell1-server-0"] Jan 20 17:26:46 crc kubenswrapper[4558]: I0120 17:26:46.879158 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-db-sync-x2fdq"] Jan 20 17:26:46 crc kubenswrapper[4558]: I0120 17:26:46.889547 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/keystone-db-sync-x2fdq"] Jan 20 17:26:46 crc kubenswrapper[4558]: I0120 17:26:46.919620 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/placement-db-sync-tbwhp"] Jan 20 17:26:46 crc kubenswrapper[4558]: I0120 17:26:46.927855 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/placement-db-sync-tbwhp"] Jan 20 17:26:46 crc kubenswrapper[4558]: I0120 17:26:46.933965 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/keystone-db-sync-xq5zh"] Jan 20 17:26:46 crc kubenswrapper[4558]: I0120 17:26:46.935440 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-db-sync-xq5zh" Jan 20 17:26:46 crc kubenswrapper[4558]: I0120 17:26:46.943092 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-db-sync-xq5zh"] Jan 20 17:26:46 crc kubenswrapper[4558]: I0120 17:26:46.993674 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29c1b3a9-0bdc-423c-af7f-42f86f3693b5-combined-ca-bundle\") pod \"keystone-db-sync-xq5zh\" (UID: \"29c1b3a9-0bdc-423c-af7f-42f86f3693b5\") " pod="openstack-kuttl-tests/keystone-db-sync-xq5zh" Jan 20 17:26:46 crc kubenswrapper[4558]: I0120 17:26:46.993767 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/29c1b3a9-0bdc-423c-af7f-42f86f3693b5-config-data\") pod \"keystone-db-sync-xq5zh\" (UID: \"29c1b3a9-0bdc-423c-af7f-42f86f3693b5\") " pod="openstack-kuttl-tests/keystone-db-sync-xq5zh" Jan 20 17:26:46 crc kubenswrapper[4558]: I0120 17:26:46.993876 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wxn2h\" (UniqueName: \"kubernetes.io/projected/29c1b3a9-0bdc-423c-af7f-42f86f3693b5-kube-api-access-wxn2h\") pod \"keystone-db-sync-xq5zh\" (UID: \"29c1b3a9-0bdc-423c-af7f-42f86f3693b5\") " pod="openstack-kuttl-tests/keystone-db-sync-xq5zh" Jan 20 17:26:47 crc kubenswrapper[4558]: I0120 17:26:47.007009 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-db-sync-q78xq"] Jan 20 17:26:47 crc kubenswrapper[4558]: I0120 17:26:47.012863 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/glance-db-sync-q78xq"] Jan 20 17:26:47 crc kubenswrapper[4558]: I0120 17:26:47.038854 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/openstack-cell1-galera-0" podUID="d274ba80-2e42-4823-bbf2-02691e791ec9" containerName="galera" containerID="cri-o://96ef199805699173d17182add49a78cb239dafab7eff49108278d5e224dfb51a" gracePeriod=30 Jan 20 17:26:47 crc kubenswrapper[4558]: I0120 17:26:47.040055 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/placement-db-sync-rmm2x"] Jan 20 17:26:47 crc kubenswrapper[4558]: I0120 17:26:47.041842 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-db-sync-rmm2x" Jan 20 17:26:47 crc kubenswrapper[4558]: I0120 17:26:47.055185 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-db-sync-rmm2x"] Jan 20 17:26:47 crc kubenswrapper[4558]: I0120 17:26:47.094718 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c7f6b91b-b55e-4af0-b496-46c80b92bad1-logs\") pod \"placement-db-sync-rmm2x\" (UID: \"c7f6b91b-b55e-4af0-b496-46c80b92bad1\") " pod="openstack-kuttl-tests/placement-db-sync-rmm2x" Jan 20 17:26:47 crc kubenswrapper[4558]: I0120 17:26:47.105047 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-db-sync-dpr25"] Jan 20 17:26:47 crc kubenswrapper[4558]: I0120 17:26:47.108227 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c7f6b91b-b55e-4af0-b496-46c80b92bad1-combined-ca-bundle\") pod \"placement-db-sync-rmm2x\" (UID: \"c7f6b91b-b55e-4af0-b496-46c80b92bad1\") " pod="openstack-kuttl-tests/placement-db-sync-rmm2x" Jan 20 17:26:47 crc kubenswrapper[4558]: I0120 17:26:47.108376 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29c1b3a9-0bdc-423c-af7f-42f86f3693b5-combined-ca-bundle\") pod \"keystone-db-sync-xq5zh\" (UID: \"29c1b3a9-0bdc-423c-af7f-42f86f3693b5\") " pod="openstack-kuttl-tests/keystone-db-sync-xq5zh" Jan 20 17:26:47 crc kubenswrapper[4558]: I0120 17:26:47.108485 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/29c1b3a9-0bdc-423c-af7f-42f86f3693b5-config-data\") pod \"keystone-db-sync-xq5zh\" (UID: \"29c1b3a9-0bdc-423c-af7f-42f86f3693b5\") " pod="openstack-kuttl-tests/keystone-db-sync-xq5zh" Jan 20 17:26:47 crc kubenswrapper[4558]: I0120 17:26:47.108678 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zvxtv\" (UniqueName: \"kubernetes.io/projected/c7f6b91b-b55e-4af0-b496-46c80b92bad1-kube-api-access-zvxtv\") pod \"placement-db-sync-rmm2x\" (UID: \"c7f6b91b-b55e-4af0-b496-46c80b92bad1\") " pod="openstack-kuttl-tests/placement-db-sync-rmm2x" Jan 20 17:26:47 crc kubenswrapper[4558]: I0120 17:26:47.109184 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wxn2h\" (UniqueName: \"kubernetes.io/projected/29c1b3a9-0bdc-423c-af7f-42f86f3693b5-kube-api-access-wxn2h\") pod \"keystone-db-sync-xq5zh\" (UID: \"29c1b3a9-0bdc-423c-af7f-42f86f3693b5\") " pod="openstack-kuttl-tests/keystone-db-sync-xq5zh" Jan 20 17:26:47 crc kubenswrapper[4558]: I0120 17:26:47.109292 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c7f6b91b-b55e-4af0-b496-46c80b92bad1-config-data\") pod \"placement-db-sync-rmm2x\" (UID: \"c7f6b91b-b55e-4af0-b496-46c80b92bad1\") " pod="openstack-kuttl-tests/placement-db-sync-rmm2x" Jan 20 17:26:47 crc kubenswrapper[4558]: I0120 17:26:47.109423 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c7f6b91b-b55e-4af0-b496-46c80b92bad1-scripts\") pod \"placement-db-sync-rmm2x\" (UID: \"c7f6b91b-b55e-4af0-b496-46c80b92bad1\") " pod="openstack-kuttl-tests/placement-db-sync-rmm2x" Jan 20 17:26:47 crc kubenswrapper[4558]: I0120 17:26:47.127495 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-sb-0"] Jan 20 17:26:47 crc kubenswrapper[4558]: I0120 17:26:47.140242 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29c1b3a9-0bdc-423c-af7f-42f86f3693b5-combined-ca-bundle\") pod \"keystone-db-sync-xq5zh\" (UID: \"29c1b3a9-0bdc-423c-af7f-42f86f3693b5\") " pod="openstack-kuttl-tests/keystone-db-sync-xq5zh" Jan 20 17:26:47 crc kubenswrapper[4558]: I0120 17:26:47.141558 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/29c1b3a9-0bdc-423c-af7f-42f86f3693b5-config-data\") pod \"keystone-db-sync-xq5zh\" (UID: \"29c1b3a9-0bdc-423c-af7f-42f86f3693b5\") " pod="openstack-kuttl-tests/keystone-db-sync-xq5zh" Jan 20 17:26:47 crc kubenswrapper[4558]: I0120 17:26:47.142305 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ovsdbserver-sb-0" podUID="63766878-2a9a-47b6-9209-554b59500f10" containerName="openstack-network-exporter" containerID="cri-o://f69a90baad4062187a2630067e5b5be8fe21f3f7ef14d41a5c46fea09bbfc6d4" gracePeriod=300 Jan 20 17:26:47 crc kubenswrapper[4558]: I0120 17:26:47.142961 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wxn2h\" (UniqueName: \"kubernetes.io/projected/29c1b3a9-0bdc-423c-af7f-42f86f3693b5-kube-api-access-wxn2h\") pod \"keystone-db-sync-xq5zh\" (UID: \"29c1b3a9-0bdc-423c-af7f-42f86f3693b5\") " pod="openstack-kuttl-tests/keystone-db-sync-xq5zh" Jan 20 17:26:47 crc kubenswrapper[4558]: I0120 17:26:47.145639 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/cinder-db-sync-dpr25"] Jan 20 17:26:47 crc kubenswrapper[4558]: I0120 17:26:47.195079 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/openstack-galera-0" podUID="9db3acdd-184a-4004-a8d3-451673126318" containerName="galera" containerID="cri-o://74db7b040a7e0441aa4205a49b98fee925418b1a898081b635b0efcdf2e08d5a" gracePeriod=30 Jan 20 17:26:47 crc kubenswrapper[4558]: I0120 17:26:47.199567 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovn-northd-0"] Jan 20 17:26:47 crc kubenswrapper[4558]: I0120 17:26:47.205500 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ovn-northd-0" podUID="c08cb973-6eb3-414d-b127-fc146b0fb1f2" containerName="ovn-northd" containerID="cri-o://bdc31a24aa03485670883d3ea8d3795dfe70b5c89196347668c72f2086bf2bea" gracePeriod=30 Jan 20 17:26:47 crc kubenswrapper[4558]: I0120 17:26:47.206303 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ovn-northd-0" podUID="c08cb973-6eb3-414d-b127-fc146b0fb1f2" containerName="openstack-network-exporter" containerID="cri-o://97eae6723cbae5007ecc1c7e159332138d8c92b6e1ffae4831a2b5b8e46b5a7e" gracePeriod=30 Jan 20 17:26:47 crc kubenswrapper[4558]: I0120 17:26:47.242900 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ovsdbserver-sb-0" podUID="63766878-2a9a-47b6-9209-554b59500f10" containerName="ovsdbserver-sb" containerID="cri-o://37f931cc076b0de2ba4bd77c0544f81aec55cb5d8ae2f041d7332ba974499651" gracePeriod=300 Jan 20 17:26:47 crc kubenswrapper[4558]: I0120 17:26:47.245373 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/glance-db-sync-gn4wj"] Jan 20 17:26:47 crc kubenswrapper[4558]: I0120 17:26:47.247006 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c7f6b91b-b55e-4af0-b496-46c80b92bad1-config-data\") pod \"placement-db-sync-rmm2x\" (UID: \"c7f6b91b-b55e-4af0-b496-46c80b92bad1\") " pod="openstack-kuttl-tests/placement-db-sync-rmm2x" Jan 20 17:26:47 crc kubenswrapper[4558]: I0120 17:26:47.247042 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-db-sync-gn4wj" Jan 20 17:26:47 crc kubenswrapper[4558]: I0120 17:26:47.247131 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c7f6b91b-b55e-4af0-b496-46c80b92bad1-scripts\") pod \"placement-db-sync-rmm2x\" (UID: \"c7f6b91b-b55e-4af0-b496-46c80b92bad1\") " pod="openstack-kuttl-tests/placement-db-sync-rmm2x" Jan 20 17:26:47 crc kubenswrapper[4558]: I0120 17:26:47.247265 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c7f6b91b-b55e-4af0-b496-46c80b92bad1-logs\") pod \"placement-db-sync-rmm2x\" (UID: \"c7f6b91b-b55e-4af0-b496-46c80b92bad1\") " pod="openstack-kuttl-tests/placement-db-sync-rmm2x" Jan 20 17:26:47 crc kubenswrapper[4558]: I0120 17:26:47.247294 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c7f6b91b-b55e-4af0-b496-46c80b92bad1-combined-ca-bundle\") pod \"placement-db-sync-rmm2x\" (UID: \"c7f6b91b-b55e-4af0-b496-46c80b92bad1\") " pod="openstack-kuttl-tests/placement-db-sync-rmm2x" Jan 20 17:26:47 crc kubenswrapper[4558]: I0120 17:26:47.247447 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zvxtv\" (UniqueName: \"kubernetes.io/projected/c7f6b91b-b55e-4af0-b496-46c80b92bad1-kube-api-access-zvxtv\") pod \"placement-db-sync-rmm2x\" (UID: \"c7f6b91b-b55e-4af0-b496-46c80b92bad1\") " pod="openstack-kuttl-tests/placement-db-sync-rmm2x" Jan 20 17:26:47 crc kubenswrapper[4558]: I0120 17:26:47.248148 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c7f6b91b-b55e-4af0-b496-46c80b92bad1-logs\") pod \"placement-db-sync-rmm2x\" (UID: \"c7f6b91b-b55e-4af0-b496-46c80b92bad1\") " pod="openstack-kuttl-tests/placement-db-sync-rmm2x" Jan 20 17:26:47 crc kubenswrapper[4558]: I0120 17:26:47.250953 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-config-data" Jan 20 17:26:47 crc kubenswrapper[4558]: I0120 17:26:47.251713 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c7f6b91b-b55e-4af0-b496-46c80b92bad1-combined-ca-bundle\") pod \"placement-db-sync-rmm2x\" (UID: \"c7f6b91b-b55e-4af0-b496-46c80b92bad1\") " pod="openstack-kuttl-tests/placement-db-sync-rmm2x" Jan 20 17:26:47 crc kubenswrapper[4558]: I0120 17:26:47.253417 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-db-sync-xq5zh" Jan 20 17:26:47 crc kubenswrapper[4558]: I0120 17:26:47.260621 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c7f6b91b-b55e-4af0-b496-46c80b92bad1-scripts\") pod \"placement-db-sync-rmm2x\" (UID: \"c7f6b91b-b55e-4af0-b496-46c80b92bad1\") " pod="openstack-kuttl-tests/placement-db-sync-rmm2x" Jan 20 17:26:47 crc kubenswrapper[4558]: I0120 17:26:47.267895 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zvxtv\" (UniqueName: \"kubernetes.io/projected/c7f6b91b-b55e-4af0-b496-46c80b92bad1-kube-api-access-zvxtv\") pod \"placement-db-sync-rmm2x\" (UID: \"c7f6b91b-b55e-4af0-b496-46c80b92bad1\") " pod="openstack-kuttl-tests/placement-db-sync-rmm2x" Jan 20 17:26:47 crc kubenswrapper[4558]: I0120 17:26:47.278957 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-db-sync-gn4wj"] Jan 20 17:26:47 crc kubenswrapper[4558]: I0120 17:26:47.281818 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c7f6b91b-b55e-4af0-b496-46c80b92bad1-config-data\") pod \"placement-db-sync-rmm2x\" (UID: \"c7f6b91b-b55e-4af0-b496-46c80b92bad1\") " pod="openstack-kuttl-tests/placement-db-sync-rmm2x" Jan 20 17:26:47 crc kubenswrapper[4558]: I0120 17:26:47.290003 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/cinder-db-sync-92t4c"] Jan 20 17:26:47 crc kubenswrapper[4558]: I0120 17:26:47.291494 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-db-sync-92t4c" Jan 20 17:26:47 crc kubenswrapper[4558]: I0120 17:26:47.293149 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-db-sync-92t4c"] Jan 20 17:26:47 crc kubenswrapper[4558]: I0120 17:26:47.297578 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-db-sync-plxqg"] Jan 20 17:26:47 crc kubenswrapper[4558]: I0120 17:26:47.307106 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/neutron-db-sync-plxqg"] Jan 20 17:26:47 crc kubenswrapper[4558]: I0120 17:26:47.349107 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hqhkn\" (UniqueName: \"kubernetes.io/projected/5f91fe62-3615-4153-a6c7-1652a5780bb5-kube-api-access-hqhkn\") pod \"glance-db-sync-gn4wj\" (UID: \"5f91fe62-3615-4153-a6c7-1652a5780bb5\") " pod="openstack-kuttl-tests/glance-db-sync-gn4wj" Jan 20 17:26:47 crc kubenswrapper[4558]: I0120 17:26:47.349398 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f59d0040-b44b-46e3-bb18-c0ea74722cd3-combined-ca-bundle\") pod \"cinder-db-sync-92t4c\" (UID: \"f59d0040-b44b-46e3-bb18-c0ea74722cd3\") " pod="openstack-kuttl-tests/cinder-db-sync-92t4c" Jan 20 17:26:47 crc kubenswrapper[4558]: I0120 17:26:47.349552 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f59d0040-b44b-46e3-bb18-c0ea74722cd3-scripts\") pod \"cinder-db-sync-92t4c\" (UID: \"f59d0040-b44b-46e3-bb18-c0ea74722cd3\") " pod="openstack-kuttl-tests/cinder-db-sync-92t4c" Jan 20 17:26:47 crc kubenswrapper[4558]: I0120 17:26:47.349680 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/5f91fe62-3615-4153-a6c7-1652a5780bb5-db-sync-config-data\") pod \"glance-db-sync-gn4wj\" (UID: \"5f91fe62-3615-4153-a6c7-1652a5780bb5\") " pod="openstack-kuttl-tests/glance-db-sync-gn4wj" Jan 20 17:26:47 crc kubenswrapper[4558]: I0120 17:26:47.349755 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/f59d0040-b44b-46e3-bb18-c0ea74722cd3-db-sync-config-data\") pod \"cinder-db-sync-92t4c\" (UID: \"f59d0040-b44b-46e3-bb18-c0ea74722cd3\") " pod="openstack-kuttl-tests/cinder-db-sync-92t4c" Jan 20 17:26:47 crc kubenswrapper[4558]: I0120 17:26:47.349944 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f59d0040-b44b-46e3-bb18-c0ea74722cd3-config-data\") pod \"cinder-db-sync-92t4c\" (UID: \"f59d0040-b44b-46e3-bb18-c0ea74722cd3\") " pod="openstack-kuttl-tests/cinder-db-sync-92t4c" Jan 20 17:26:47 crc kubenswrapper[4558]: I0120 17:26:47.350045 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f91fe62-3615-4153-a6c7-1652a5780bb5-combined-ca-bundle\") pod \"glance-db-sync-gn4wj\" (UID: \"5f91fe62-3615-4153-a6c7-1652a5780bb5\") " pod="openstack-kuttl-tests/glance-db-sync-gn4wj" Jan 20 17:26:47 crc kubenswrapper[4558]: I0120 17:26:47.350131 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sk949\" (UniqueName: \"kubernetes.io/projected/f59d0040-b44b-46e3-bb18-c0ea74722cd3-kube-api-access-sk949\") pod \"cinder-db-sync-92t4c\" (UID: \"f59d0040-b44b-46e3-bb18-c0ea74722cd3\") " pod="openstack-kuttl-tests/cinder-db-sync-92t4c" Jan 20 17:26:47 crc kubenswrapper[4558]: I0120 17:26:47.350247 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f59d0040-b44b-46e3-bb18-c0ea74722cd3-etc-machine-id\") pod \"cinder-db-sync-92t4c\" (UID: \"f59d0040-b44b-46e3-bb18-c0ea74722cd3\") " pod="openstack-kuttl-tests/cinder-db-sync-92t4c" Jan 20 17:26:47 crc kubenswrapper[4558]: I0120 17:26:47.350373 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5f91fe62-3615-4153-a6c7-1652a5780bb5-config-data\") pod \"glance-db-sync-gn4wj\" (UID: \"5f91fe62-3615-4153-a6c7-1652a5780bb5\") " pod="openstack-kuttl-tests/glance-db-sync-gn4wj" Jan 20 17:26:47 crc kubenswrapper[4558]: I0120 17:26:47.358214 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-nb-0"] Jan 20 17:26:47 crc kubenswrapper[4558]: I0120 17:26:47.358969 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ovsdbserver-nb-0" podUID="aa79d817-2b73-469c-8b09-f2f312835773" containerName="openstack-network-exporter" containerID="cri-o://9902de0f0a19fcf3501305d4c98aa8a835da1fc64164b28742b36af1db06a046" gracePeriod=300 Jan 20 17:26:47 crc kubenswrapper[4558]: I0120 17:26:47.363127 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-db-sync-rmm2x" Jan 20 17:26:47 crc kubenswrapper[4558]: I0120 17:26:47.368940 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/neutron-db-sync-rq6qr"] Jan 20 17:26:47 crc kubenswrapper[4558]: I0120 17:26:47.370313 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-db-sync-rq6qr" Jan 20 17:26:47 crc kubenswrapper[4558]: I0120 17:26:47.376229 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-db-sync-rq6qr"] Jan 20 17:26:47 crc kubenswrapper[4558]: I0120 17:26:47.446025 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ovsdbserver-nb-0" podUID="aa79d817-2b73-469c-8b09-f2f312835773" containerName="ovsdbserver-nb" containerID="cri-o://f5e6e1f7dc89862c9157326c6e877a21963b94977d4b4616a286d30936fb463f" gracePeriod=300 Jan 20 17:26:47 crc kubenswrapper[4558]: I0120 17:26:47.452250 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f59d0040-b44b-46e3-bb18-c0ea74722cd3-scripts\") pod \"cinder-db-sync-92t4c\" (UID: \"f59d0040-b44b-46e3-bb18-c0ea74722cd3\") " pod="openstack-kuttl-tests/cinder-db-sync-92t4c" Jan 20 17:26:47 crc kubenswrapper[4558]: I0120 17:26:47.452301 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4869db4f-9810-4685-a2a3-a1103e998535-combined-ca-bundle\") pod \"neutron-db-sync-rq6qr\" (UID: \"4869db4f-9810-4685-a2a3-a1103e998535\") " pod="openstack-kuttl-tests/neutron-db-sync-rq6qr" Jan 20 17:26:47 crc kubenswrapper[4558]: I0120 17:26:47.452381 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/5f91fe62-3615-4153-a6c7-1652a5780bb5-db-sync-config-data\") pod \"glance-db-sync-gn4wj\" (UID: \"5f91fe62-3615-4153-a6c7-1652a5780bb5\") " pod="openstack-kuttl-tests/glance-db-sync-gn4wj" Jan 20 17:26:47 crc kubenswrapper[4558]: I0120 17:26:47.452401 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/f59d0040-b44b-46e3-bb18-c0ea74722cd3-db-sync-config-data\") pod \"cinder-db-sync-92t4c\" (UID: \"f59d0040-b44b-46e3-bb18-c0ea74722cd3\") " pod="openstack-kuttl-tests/cinder-db-sync-92t4c" Jan 20 17:26:47 crc kubenswrapper[4558]: I0120 17:26:47.452469 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/4869db4f-9810-4685-a2a3-a1103e998535-config\") pod \"neutron-db-sync-rq6qr\" (UID: \"4869db4f-9810-4685-a2a3-a1103e998535\") " pod="openstack-kuttl-tests/neutron-db-sync-rq6qr" Jan 20 17:26:47 crc kubenswrapper[4558]: I0120 17:26:47.452494 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s58ts\" (UniqueName: \"kubernetes.io/projected/4869db4f-9810-4685-a2a3-a1103e998535-kube-api-access-s58ts\") pod \"neutron-db-sync-rq6qr\" (UID: \"4869db4f-9810-4685-a2a3-a1103e998535\") " pod="openstack-kuttl-tests/neutron-db-sync-rq6qr" Jan 20 17:26:47 crc kubenswrapper[4558]: I0120 17:26:47.452561 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f59d0040-b44b-46e3-bb18-c0ea74722cd3-config-data\") pod \"cinder-db-sync-92t4c\" (UID: \"f59d0040-b44b-46e3-bb18-c0ea74722cd3\") " pod="openstack-kuttl-tests/cinder-db-sync-92t4c" Jan 20 17:26:47 crc kubenswrapper[4558]: I0120 17:26:47.452614 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f91fe62-3615-4153-a6c7-1652a5780bb5-combined-ca-bundle\") pod \"glance-db-sync-gn4wj\" (UID: \"5f91fe62-3615-4153-a6c7-1652a5780bb5\") " pod="openstack-kuttl-tests/glance-db-sync-gn4wj" Jan 20 17:26:47 crc kubenswrapper[4558]: I0120 17:26:47.452653 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sk949\" (UniqueName: \"kubernetes.io/projected/f59d0040-b44b-46e3-bb18-c0ea74722cd3-kube-api-access-sk949\") pod \"cinder-db-sync-92t4c\" (UID: \"f59d0040-b44b-46e3-bb18-c0ea74722cd3\") " pod="openstack-kuttl-tests/cinder-db-sync-92t4c" Jan 20 17:26:47 crc kubenswrapper[4558]: I0120 17:26:47.452699 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f59d0040-b44b-46e3-bb18-c0ea74722cd3-etc-machine-id\") pod \"cinder-db-sync-92t4c\" (UID: \"f59d0040-b44b-46e3-bb18-c0ea74722cd3\") " pod="openstack-kuttl-tests/cinder-db-sync-92t4c" Jan 20 17:26:47 crc kubenswrapper[4558]: I0120 17:26:47.452806 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5f91fe62-3615-4153-a6c7-1652a5780bb5-config-data\") pod \"glance-db-sync-gn4wj\" (UID: \"5f91fe62-3615-4153-a6c7-1652a5780bb5\") " pod="openstack-kuttl-tests/glance-db-sync-gn4wj" Jan 20 17:26:47 crc kubenswrapper[4558]: I0120 17:26:47.452978 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hqhkn\" (UniqueName: \"kubernetes.io/projected/5f91fe62-3615-4153-a6c7-1652a5780bb5-kube-api-access-hqhkn\") pod \"glance-db-sync-gn4wj\" (UID: \"5f91fe62-3615-4153-a6c7-1652a5780bb5\") " pod="openstack-kuttl-tests/glance-db-sync-gn4wj" Jan 20 17:26:47 crc kubenswrapper[4558]: I0120 17:26:47.453102 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f59d0040-b44b-46e3-bb18-c0ea74722cd3-combined-ca-bundle\") pod \"cinder-db-sync-92t4c\" (UID: \"f59d0040-b44b-46e3-bb18-c0ea74722cd3\") " pod="openstack-kuttl-tests/cinder-db-sync-92t4c" Jan 20 17:26:47 crc kubenswrapper[4558]: I0120 17:26:47.456813 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f59d0040-b44b-46e3-bb18-c0ea74722cd3-etc-machine-id\") pod \"cinder-db-sync-92t4c\" (UID: \"f59d0040-b44b-46e3-bb18-c0ea74722cd3\") " pod="openstack-kuttl-tests/cinder-db-sync-92t4c" Jan 20 17:26:47 crc kubenswrapper[4558]: I0120 17:26:47.461902 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f59d0040-b44b-46e3-bb18-c0ea74722cd3-scripts\") pod \"cinder-db-sync-92t4c\" (UID: \"f59d0040-b44b-46e3-bb18-c0ea74722cd3\") " pod="openstack-kuttl-tests/cinder-db-sync-92t4c" Jan 20 17:26:47 crc kubenswrapper[4558]: I0120 17:26:47.464836 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5f91fe62-3615-4153-a6c7-1652a5780bb5-config-data\") pod \"glance-db-sync-gn4wj\" (UID: \"5f91fe62-3615-4153-a6c7-1652a5780bb5\") " pod="openstack-kuttl-tests/glance-db-sync-gn4wj" Jan 20 17:26:47 crc kubenswrapper[4558]: I0120 17:26:47.465145 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f59d0040-b44b-46e3-bb18-c0ea74722cd3-config-data\") pod \"cinder-db-sync-92t4c\" (UID: \"f59d0040-b44b-46e3-bb18-c0ea74722cd3\") " pod="openstack-kuttl-tests/cinder-db-sync-92t4c" Jan 20 17:26:47 crc kubenswrapper[4558]: I0120 17:26:47.465220 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/f59d0040-b44b-46e3-bb18-c0ea74722cd3-db-sync-config-data\") pod \"cinder-db-sync-92t4c\" (UID: \"f59d0040-b44b-46e3-bb18-c0ea74722cd3\") " pod="openstack-kuttl-tests/cinder-db-sync-92t4c" Jan 20 17:26:47 crc kubenswrapper[4558]: I0120 17:26:47.466992 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f91fe62-3615-4153-a6c7-1652a5780bb5-combined-ca-bundle\") pod \"glance-db-sync-gn4wj\" (UID: \"5f91fe62-3615-4153-a6c7-1652a5780bb5\") " pod="openstack-kuttl-tests/glance-db-sync-gn4wj" Jan 20 17:26:47 crc kubenswrapper[4558]: I0120 17:26:47.469644 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/5f91fe62-3615-4153-a6c7-1652a5780bb5-db-sync-config-data\") pod \"glance-db-sync-gn4wj\" (UID: \"5f91fe62-3615-4153-a6c7-1652a5780bb5\") " pod="openstack-kuttl-tests/glance-db-sync-gn4wj" Jan 20 17:26:47 crc kubenswrapper[4558]: I0120 17:26:47.472350 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f59d0040-b44b-46e3-bb18-c0ea74722cd3-combined-ca-bundle\") pod \"cinder-db-sync-92t4c\" (UID: \"f59d0040-b44b-46e3-bb18-c0ea74722cd3\") " pod="openstack-kuttl-tests/cinder-db-sync-92t4c" Jan 20 17:26:47 crc kubenswrapper[4558]: I0120 17:26:47.477237 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hqhkn\" (UniqueName: \"kubernetes.io/projected/5f91fe62-3615-4153-a6c7-1652a5780bb5-kube-api-access-hqhkn\") pod \"glance-db-sync-gn4wj\" (UID: \"5f91fe62-3615-4153-a6c7-1652a5780bb5\") " pod="openstack-kuttl-tests/glance-db-sync-gn4wj" Jan 20 17:26:47 crc kubenswrapper[4558]: I0120 17:26:47.477728 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sk949\" (UniqueName: \"kubernetes.io/projected/f59d0040-b44b-46e3-bb18-c0ea74722cd3-kube-api-access-sk949\") pod \"cinder-db-sync-92t4c\" (UID: \"f59d0040-b44b-46e3-bb18-c0ea74722cd3\") " pod="openstack-kuttl-tests/cinder-db-sync-92t4c" Jan 20 17:26:47 crc kubenswrapper[4558]: I0120 17:26:47.555259 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4869db4f-9810-4685-a2a3-a1103e998535-combined-ca-bundle\") pod \"neutron-db-sync-rq6qr\" (UID: \"4869db4f-9810-4685-a2a3-a1103e998535\") " pod="openstack-kuttl-tests/neutron-db-sync-rq6qr" Jan 20 17:26:47 crc kubenswrapper[4558]: I0120 17:26:47.555437 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/4869db4f-9810-4685-a2a3-a1103e998535-config\") pod \"neutron-db-sync-rq6qr\" (UID: \"4869db4f-9810-4685-a2a3-a1103e998535\") " pod="openstack-kuttl-tests/neutron-db-sync-rq6qr" Jan 20 17:26:47 crc kubenswrapper[4558]: I0120 17:26:47.555465 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s58ts\" (UniqueName: \"kubernetes.io/projected/4869db4f-9810-4685-a2a3-a1103e998535-kube-api-access-s58ts\") pod \"neutron-db-sync-rq6qr\" (UID: \"4869db4f-9810-4685-a2a3-a1103e998535\") " pod="openstack-kuttl-tests/neutron-db-sync-rq6qr" Jan 20 17:26:47 crc kubenswrapper[4558]: I0120 17:26:47.560417 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4869db4f-9810-4685-a2a3-a1103e998535-combined-ca-bundle\") pod \"neutron-db-sync-rq6qr\" (UID: \"4869db4f-9810-4685-a2a3-a1103e998535\") " pod="openstack-kuttl-tests/neutron-db-sync-rq6qr" Jan 20 17:26:47 crc kubenswrapper[4558]: I0120 17:26:47.561620 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/4869db4f-9810-4685-a2a3-a1103e998535-config\") pod \"neutron-db-sync-rq6qr\" (UID: \"4869db4f-9810-4685-a2a3-a1103e998535\") " pod="openstack-kuttl-tests/neutron-db-sync-rq6qr" Jan 20 17:26:47 crc kubenswrapper[4558]: I0120 17:26:47.588627 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s58ts\" (UniqueName: \"kubernetes.io/projected/4869db4f-9810-4685-a2a3-a1103e998535-kube-api-access-s58ts\") pod \"neutron-db-sync-rq6qr\" (UID: \"4869db4f-9810-4685-a2a3-a1103e998535\") " pod="openstack-kuttl-tests/neutron-db-sync-rq6qr" Jan 20 17:26:47 crc kubenswrapper[4558]: I0120 17:26:47.618175 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:26:47 crc kubenswrapper[4558]: E0120 17:26:47.677240 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 37f931cc076b0de2ba4bd77c0544f81aec55cb5d8ae2f041d7332ba974499651 is running failed: container process not found" containerID="37f931cc076b0de2ba4bd77c0544f81aec55cb5d8ae2f041d7332ba974499651" cmd=["/usr/bin/pidof","ovsdb-server"] Jan 20 17:26:47 crc kubenswrapper[4558]: E0120 17:26:47.678014 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 37f931cc076b0de2ba4bd77c0544f81aec55cb5d8ae2f041d7332ba974499651 is running failed: container process not found" containerID="37f931cc076b0de2ba4bd77c0544f81aec55cb5d8ae2f041d7332ba974499651" cmd=["/usr/bin/pidof","ovsdb-server"] Jan 20 17:26:47 crc kubenswrapper[4558]: E0120 17:26:47.678448 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 37f931cc076b0de2ba4bd77c0544f81aec55cb5d8ae2f041d7332ba974499651 is running failed: container process not found" containerID="37f931cc076b0de2ba4bd77c0544f81aec55cb5d8ae2f041d7332ba974499651" cmd=["/usr/bin/pidof","ovsdb-server"] Jan 20 17:26:47 crc kubenswrapper[4558]: E0120 17:26:47.678482 4558 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 37f931cc076b0de2ba4bd77c0544f81aec55cb5d8ae2f041d7332ba974499651 is running failed: container process not found" probeType="Readiness" pod="openstack-kuttl-tests/ovsdbserver-sb-0" podUID="63766878-2a9a-47b6-9209-554b59500f10" containerName="ovsdbserver-sb" Jan 20 17:26:47 crc kubenswrapper[4558]: I0120 17:26:47.744049 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-db-sync-gn4wj" Jan 20 17:26:47 crc kubenswrapper[4558]: I0120 17:26:47.767638 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-db-sync-92t4c" Jan 20 17:26:47 crc kubenswrapper[4558]: I0120 17:26:47.779236 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-db-sync-rq6qr" Jan 20 17:26:47 crc kubenswrapper[4558]: E0120 17:26:47.828077 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="74db7b040a7e0441aa4205a49b98fee925418b1a898081b635b0efcdf2e08d5a" cmd=["/bin/bash","/var/lib/operator-scripts/mysql_probe.sh","readiness"] Jan 20 17:26:47 crc kubenswrapper[4558]: E0120 17:26:47.834721 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="74db7b040a7e0441aa4205a49b98fee925418b1a898081b635b0efcdf2e08d5a" cmd=["/bin/bash","/var/lib/operator-scripts/mysql_probe.sh","readiness"] Jan 20 17:26:47 crc kubenswrapper[4558]: I0120 17:26:47.846005 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-db-sync-z28dz"] Jan 20 17:26:47 crc kubenswrapper[4558]: E0120 17:26:47.846986 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="74db7b040a7e0441aa4205a49b98fee925418b1a898081b635b0efcdf2e08d5a" cmd=["/bin/bash","/var/lib/operator-scripts/mysql_probe.sh","readiness"] Jan 20 17:26:47 crc kubenswrapper[4558]: E0120 17:26:47.847035 4558 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack-kuttl-tests/openstack-galera-0" podUID="9db3acdd-184a-4004-a8d3-451673126318" containerName="galera" Jan 20 17:26:47 crc kubenswrapper[4558]: I0120 17:26:47.863462 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-db-sync-z28dz"] Jan 20 17:26:47 crc kubenswrapper[4558]: I0120 17:26:47.885698 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-db-sync-sxc6v"] Jan 20 17:26:47 crc kubenswrapper[4558]: I0120 17:26:47.899857 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-db-sync-sxc6v"] Jan 20 17:26:47 crc kubenswrapper[4558]: I0120 17:26:47.913019 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-db-sync-hbfss"] Jan 20 17:26:47 crc kubenswrapper[4558]: I0120 17:26:47.916345 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-hbfss" Jan 20 17:26:47 crc kubenswrapper[4558]: I0120 17:26:47.926719 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-db-sync-hbfss"] Jan 20 17:26:47 crc kubenswrapper[4558]: I0120 17:26:47.931853 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell1-conductor-scripts" Jan 20 17:26:47 crc kubenswrapper[4558]: I0120 17:26:47.938219 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-db-sync-cnn24"] Jan 20 17:26:47 crc kubenswrapper[4558]: I0120 17:26:47.941983 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/barbican-db-sync-cnn24"] Jan 20 17:26:47 crc kubenswrapper[4558]: I0120 17:26:47.969202 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/04c5c6c7-acc2-4821-a68b-c2ac094e931f-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-hbfss\" (UID: \"04c5c6c7-acc2-4821-a68b-c2ac094e931f\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-hbfss" Jan 20 17:26:47 crc kubenswrapper[4558]: I0120 17:26:47.969252 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/04c5c6c7-acc2-4821-a68b-c2ac094e931f-config-data\") pod \"nova-cell1-conductor-db-sync-hbfss\" (UID: \"04c5c6c7-acc2-4821-a68b-c2ac094e931f\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-hbfss" Jan 20 17:26:47 crc kubenswrapper[4558]: I0120 17:26:47.969297 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4ph6z\" (UniqueName: \"kubernetes.io/projected/04c5c6c7-acc2-4821-a68b-c2ac094e931f-kube-api-access-4ph6z\") pod \"nova-cell1-conductor-db-sync-hbfss\" (UID: \"04c5c6c7-acc2-4821-a68b-c2ac094e931f\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-hbfss" Jan 20 17:26:47 crc kubenswrapper[4558]: I0120 17:26:47.969350 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/04c5c6c7-acc2-4821-a68b-c2ac094e931f-scripts\") pod \"nova-cell1-conductor-db-sync-hbfss\" (UID: \"04c5c6c7-acc2-4821-a68b-c2ac094e931f\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-hbfss" Jan 20 17:26:47 crc kubenswrapper[4558]: I0120 17:26:47.986882 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-db-sync-h5hmh"] Jan 20 17:26:47 crc kubenswrapper[4558]: I0120 17:26:47.988085 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-h5hmh" Jan 20 17:26:47 crc kubenswrapper[4558]: I0120 17:26:47.990324 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell0-conductor-scripts" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.048293 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovsdbserver-sb-0_63766878-2a9a-47b6-9209-554b59500f10/ovsdbserver-sb/0.log" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.048360 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.074226 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-db-sync-h5hmh"] Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.075588 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f461d799-0027-4949-96bc-85e80ae9ec47-config-data\") pod \"nova-cell0-conductor-db-sync-h5hmh\" (UID: \"f461d799-0027-4949-96bc-85e80ae9ec47\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-h5hmh" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.075691 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4ph6z\" (UniqueName: \"kubernetes.io/projected/04c5c6c7-acc2-4821-a68b-c2ac094e931f-kube-api-access-4ph6z\") pod \"nova-cell1-conductor-db-sync-hbfss\" (UID: \"04c5c6c7-acc2-4821-a68b-c2ac094e931f\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-hbfss" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.075863 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2cvf4\" (UniqueName: \"kubernetes.io/projected/f461d799-0027-4949-96bc-85e80ae9ec47-kube-api-access-2cvf4\") pod \"nova-cell0-conductor-db-sync-h5hmh\" (UID: \"f461d799-0027-4949-96bc-85e80ae9ec47\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-h5hmh" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.075903 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/04c5c6c7-acc2-4821-a68b-c2ac094e931f-scripts\") pod \"nova-cell1-conductor-db-sync-hbfss\" (UID: \"04c5c6c7-acc2-4821-a68b-c2ac094e931f\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-hbfss" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.076002 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f461d799-0027-4949-96bc-85e80ae9ec47-scripts\") pod \"nova-cell0-conductor-db-sync-h5hmh\" (UID: \"f461d799-0027-4949-96bc-85e80ae9ec47\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-h5hmh" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.076227 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f461d799-0027-4949-96bc-85e80ae9ec47-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-h5hmh\" (UID: \"f461d799-0027-4949-96bc-85e80ae9ec47\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-h5hmh" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.076293 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/04c5c6c7-acc2-4821-a68b-c2ac094e931f-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-hbfss\" (UID: \"04c5c6c7-acc2-4821-a68b-c2ac094e931f\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-hbfss" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.076355 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/04c5c6c7-acc2-4821-a68b-c2ac094e931f-config-data\") pod \"nova-cell1-conductor-db-sync-hbfss\" (UID: \"04c5c6c7-acc2-4821-a68b-c2ac094e931f\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-hbfss" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.086234 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/barbican-db-sync-t7b8z"] Jan 20 17:26:48 crc kubenswrapper[4558]: E0120 17:26:48.086962 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="63766878-2a9a-47b6-9209-554b59500f10" containerName="ovsdbserver-sb" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.086979 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="63766878-2a9a-47b6-9209-554b59500f10" containerName="ovsdbserver-sb" Jan 20 17:26:48 crc kubenswrapper[4558]: E0120 17:26:48.087020 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="63766878-2a9a-47b6-9209-554b59500f10" containerName="openstack-network-exporter" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.087027 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="63766878-2a9a-47b6-9209-554b59500f10" containerName="openstack-network-exporter" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.087332 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="63766878-2a9a-47b6-9209-554b59500f10" containerName="ovsdbserver-sb" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.087375 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="63766878-2a9a-47b6-9209-554b59500f10" containerName="openstack-network-exporter" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.093070 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-db-sync-t7b8z" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.098285 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovsdbserver-nb-0_aa79d817-2b73-469c-8b09-f2f312835773/ovsdbserver-nb/0.log" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.098398 4558 generic.go:334] "Generic (PLEG): container finished" podID="aa79d817-2b73-469c-8b09-f2f312835773" containerID="9902de0f0a19fcf3501305d4c98aa8a835da1fc64164b28742b36af1db06a046" exitCode=2 Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.098466 4558 generic.go:334] "Generic (PLEG): container finished" podID="aa79d817-2b73-469c-8b09-f2f312835773" containerID="f5e6e1f7dc89862c9157326c6e877a21963b94977d4b4616a286d30936fb463f" exitCode=143 Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.098477 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-nb-0" event={"ID":"aa79d817-2b73-469c-8b09-f2f312835773","Type":"ContainerDied","Data":"9902de0f0a19fcf3501305d4c98aa8a835da1fc64164b28742b36af1db06a046"} Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.098630 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-nb-0" event={"ID":"aa79d817-2b73-469c-8b09-f2f312835773","Type":"ContainerDied","Data":"f5e6e1f7dc89862c9157326c6e877a21963b94977d4b4616a286d30936fb463f"} Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.102838 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-db-sync-t7b8z"] Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.104988 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/04c5c6c7-acc2-4821-a68b-c2ac094e931f-scripts\") pod \"nova-cell1-conductor-db-sync-hbfss\" (UID: \"04c5c6c7-acc2-4821-a68b-c2ac094e931f\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-hbfss" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.107218 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/04c5c6c7-acc2-4821-a68b-c2ac094e931f-config-data\") pod \"nova-cell1-conductor-db-sync-hbfss\" (UID: \"04c5c6c7-acc2-4821-a68b-c2ac094e931f\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-hbfss" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.120884 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/04c5c6c7-acc2-4821-a68b-c2ac094e931f-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-hbfss\" (UID: \"04c5c6c7-acc2-4821-a68b-c2ac094e931f\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-hbfss" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.120955 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/swift-ring-rebalance-scr8s"] Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.139546 4558 generic.go:334] "Generic (PLEG): container finished" podID="d274ba80-2e42-4823-bbf2-02691e791ec9" containerID="96ef199805699173d17182add49a78cb239dafab7eff49108278d5e224dfb51a" exitCode=0 Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.139601 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-cell1-galera-0" event={"ID":"d274ba80-2e42-4823-bbf2-02691e791ec9","Type":"ContainerDied","Data":"96ef199805699173d17182add49a78cb239dafab7eff49108278d5e224dfb51a"} Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.146544 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/swift-ring-rebalance-scr8s"] Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.147577 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4ph6z\" (UniqueName: \"kubernetes.io/projected/04c5c6c7-acc2-4821-a68b-c2ac094e931f-kube-api-access-4ph6z\") pod \"nova-cell1-conductor-db-sync-hbfss\" (UID: \"04c5c6c7-acc2-4821-a68b-c2ac094e931f\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-hbfss" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.157228 4558 generic.go:334] "Generic (PLEG): container finished" podID="c08cb973-6eb3-414d-b127-fc146b0fb1f2" containerID="97eae6723cbae5007ecc1c7e159332138d8c92b6e1ffae4831a2b5b8e46b5a7e" exitCode=2 Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.157288 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-northd-0" event={"ID":"c08cb973-6eb3-414d-b127-fc146b0fb1f2","Type":"ContainerDied","Data":"97eae6723cbae5007ecc1c7e159332138d8c92b6e1ffae4831a2b5b8e46b5a7e"} Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.159365 4558 generic.go:334] "Generic (PLEG): container finished" podID="987d6a20-4fc9-411e-ae1f-9ff5dd78e80d" containerID="9e7a56b68f74368ebdccf3a2050688f4660eec04ee70f20c2816ab7ef47f5d08" exitCode=0 Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.159411 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/memcached-0" event={"ID":"987d6a20-4fc9-411e-ae1f-9ff5dd78e80d","Type":"ContainerDied","Data":"9e7a56b68f74368ebdccf3a2050688f4660eec04ee70f20c2816ab7ef47f5d08"} Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.162280 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovsdbserver-sb-0_63766878-2a9a-47b6-9209-554b59500f10/ovsdbserver-sb/0.log" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.162313 4558 generic.go:334] "Generic (PLEG): container finished" podID="63766878-2a9a-47b6-9209-554b59500f10" containerID="f69a90baad4062187a2630067e5b5be8fe21f3f7ef14d41a5c46fea09bbfc6d4" exitCode=2 Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.162325 4558 generic.go:334] "Generic (PLEG): container finished" podID="63766878-2a9a-47b6-9209-554b59500f10" containerID="37f931cc076b0de2ba4bd77c0544f81aec55cb5d8ae2f041d7332ba974499651" exitCode=143 Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.162479 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" podUID="2587f606-361f-4462-bef1-e1ec4e95f012" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://fc1cf5d826a15f0f792f4f5bcddbaf158f40dbe461e000bce68e8580a359d758" gracePeriod=30 Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.162745 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.162987 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-sb-0" event={"ID":"63766878-2a9a-47b6-9209-554b59500f10","Type":"ContainerDied","Data":"f69a90baad4062187a2630067e5b5be8fe21f3f7ef14d41a5c46fea09bbfc6d4"} Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.163028 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-sb-0" event={"ID":"63766878-2a9a-47b6-9209-554b59500f10","Type":"ContainerDied","Data":"37f931cc076b0de2ba4bd77c0544f81aec55cb5d8ae2f041d7332ba974499651"} Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.163040 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-sb-0" event={"ID":"63766878-2a9a-47b6-9209-554b59500f10","Type":"ContainerDied","Data":"2492a122f242489df747c3740a3ae369146cc7b6726c89e71751c5fa0bdb91d4"} Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.163057 4558 scope.go:117] "RemoveContainer" containerID="f69a90baad4062187a2630067e5b5be8fe21f3f7ef14d41a5c46fea09bbfc6d4" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.180094 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/63766878-2a9a-47b6-9209-554b59500f10-ovsdb-rundir\") pod \"63766878-2a9a-47b6-9209-554b59500f10\" (UID: \"63766878-2a9a-47b6-9209-554b59500f10\") " Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.180158 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/63766878-2a9a-47b6-9209-554b59500f10-config\") pod \"63766878-2a9a-47b6-9209-554b59500f10\" (UID: \"63766878-2a9a-47b6-9209-554b59500f10\") " Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.180344 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/63766878-2a9a-47b6-9209-554b59500f10-scripts\") pod \"63766878-2a9a-47b6-9209-554b59500f10\" (UID: \"63766878-2a9a-47b6-9209-554b59500f10\") " Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.180386 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gcwf4\" (UniqueName: \"kubernetes.io/projected/63766878-2a9a-47b6-9209-554b59500f10-kube-api-access-gcwf4\") pod \"63766878-2a9a-47b6-9209-554b59500f10\" (UID: \"63766878-2a9a-47b6-9209-554b59500f10\") " Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.180425 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/63766878-2a9a-47b6-9209-554b59500f10-ovsdbserver-sb-tls-certs\") pod \"63766878-2a9a-47b6-9209-554b59500f10\" (UID: \"63766878-2a9a-47b6-9209-554b59500f10\") " Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.180523 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndbcluster-sb-etc-ovn\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"63766878-2a9a-47b6-9209-554b59500f10\" (UID: \"63766878-2a9a-47b6-9209-554b59500f10\") " Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.180558 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/63766878-2a9a-47b6-9209-554b59500f10-combined-ca-bundle\") pod \"63766878-2a9a-47b6-9209-554b59500f10\" (UID: \"63766878-2a9a-47b6-9209-554b59500f10\") " Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.180598 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/63766878-2a9a-47b6-9209-554b59500f10-metrics-certs-tls-certs\") pod \"63766878-2a9a-47b6-9209-554b59500f10\" (UID: \"63766878-2a9a-47b6-9209-554b59500f10\") " Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.180628 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/63766878-2a9a-47b6-9209-554b59500f10-ovsdb-rundir" (OuterVolumeSpecName: "ovsdb-rundir") pod "63766878-2a9a-47b6-9209-554b59500f10" (UID: "63766878-2a9a-47b6-9209-554b59500f10"). InnerVolumeSpecName "ovsdb-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.180885 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2442cca-1f6c-4531-b1f4-2b873ce42964-combined-ca-bundle\") pod \"barbican-db-sync-t7b8z\" (UID: \"c2442cca-1f6c-4531-b1f4-2b873ce42964\") " pod="openstack-kuttl-tests/barbican-db-sync-t7b8z" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.180925 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/c2442cca-1f6c-4531-b1f4-2b873ce42964-db-sync-config-data\") pod \"barbican-db-sync-t7b8z\" (UID: \"c2442cca-1f6c-4531-b1f4-2b873ce42964\") " pod="openstack-kuttl-tests/barbican-db-sync-t7b8z" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.184674 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2cvf4\" (UniqueName: \"kubernetes.io/projected/f461d799-0027-4949-96bc-85e80ae9ec47-kube-api-access-2cvf4\") pod \"nova-cell0-conductor-db-sync-h5hmh\" (UID: \"f461d799-0027-4949-96bc-85e80ae9ec47\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-h5hmh" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.184743 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f461d799-0027-4949-96bc-85e80ae9ec47-scripts\") pod \"nova-cell0-conductor-db-sync-h5hmh\" (UID: \"f461d799-0027-4949-96bc-85e80ae9ec47\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-h5hmh" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.184829 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lk77l\" (UniqueName: \"kubernetes.io/projected/c2442cca-1f6c-4531-b1f4-2b873ce42964-kube-api-access-lk77l\") pod \"barbican-db-sync-t7b8z\" (UID: \"c2442cca-1f6c-4531-b1f4-2b873ce42964\") " pod="openstack-kuttl-tests/barbican-db-sync-t7b8z" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.184855 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f461d799-0027-4949-96bc-85e80ae9ec47-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-h5hmh\" (UID: \"f461d799-0027-4949-96bc-85e80ae9ec47\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-h5hmh" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.184923 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f461d799-0027-4949-96bc-85e80ae9ec47-config-data\") pod \"nova-cell0-conductor-db-sync-h5hmh\" (UID: \"f461d799-0027-4949-96bc-85e80ae9ec47\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-h5hmh" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.184996 4558 reconciler_common.go:293] "Volume detached for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/63766878-2a9a-47b6-9209-554b59500f10-ovsdb-rundir\") on node \"crc\" DevicePath \"\"" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.181145 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/63766878-2a9a-47b6-9209-554b59500f10-scripts" (OuterVolumeSpecName: "scripts") pod "63766878-2a9a-47b6-9209-554b59500f10" (UID: "63766878-2a9a-47b6-9209-554b59500f10"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.185048 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/63766878-2a9a-47b6-9209-554b59500f10-config" (OuterVolumeSpecName: "config") pod "63766878-2a9a-47b6-9209-554b59500f10" (UID: "63766878-2a9a-47b6-9209-554b59500f10"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.190748 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f461d799-0027-4949-96bc-85e80ae9ec47-scripts\") pod \"nova-cell0-conductor-db-sync-h5hmh\" (UID: \"f461d799-0027-4949-96bc-85e80ae9ec47\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-h5hmh" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.198573 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage05-crc" (OuterVolumeSpecName: "ovndbcluster-sb-etc-ovn") pod "63766878-2a9a-47b6-9209-554b59500f10" (UID: "63766878-2a9a-47b6-9209-554b59500f10"). InnerVolumeSpecName "local-storage05-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.198702 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/63766878-2a9a-47b6-9209-554b59500f10-kube-api-access-gcwf4" (OuterVolumeSpecName: "kube-api-access-gcwf4") pod "63766878-2a9a-47b6-9209-554b59500f10" (UID: "63766878-2a9a-47b6-9209-554b59500f10"). InnerVolumeSpecName "kube-api-access-gcwf4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.205817 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f461d799-0027-4949-96bc-85e80ae9ec47-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-h5hmh\" (UID: \"f461d799-0027-4949-96bc-85e80ae9ec47\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-h5hmh" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.206015 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f461d799-0027-4949-96bc-85e80ae9ec47-config-data\") pod \"nova-cell0-conductor-db-sync-h5hmh\" (UID: \"f461d799-0027-4949-96bc-85e80ae9ec47\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-h5hmh" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.211714 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/swift-ring-rebalance-zn4xn"] Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.213787 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-ring-rebalance-zn4xn" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.217983 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"swift-ring-scripts" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.218443 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"swift-ring-config-data" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.231083 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/swift-storage-0"] Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.232126 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="cbbfff95-9f90-4be2-9ed7-0a1cbbe43542" containerName="account-server" containerID="cri-o://e6440af1d01d3d7e989418e06350468870193b38937d2ae69a49ff7a72d20bf2" gracePeriod=30 Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.232341 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="cbbfff95-9f90-4be2-9ed7-0a1cbbe43542" containerName="swift-recon-cron" containerID="cri-o://2b148f67806ea893dea736f70886bafe16751aa4932ab889670bd2755c1dad06" gracePeriod=30 Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.232473 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="cbbfff95-9f90-4be2-9ed7-0a1cbbe43542" containerName="rsync" containerID="cri-o://9016dbbaf3d5e8bdb4e9556a7b608c196cc888571331abb5670be24de359a45a" gracePeriod=30 Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.232607 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="cbbfff95-9f90-4be2-9ed7-0a1cbbe43542" containerName="object-expirer" containerID="cri-o://68fbcc76c0e2dedc82e6715e0ec8b4d992e77e10e439a698ddd8d943bc08f443" gracePeriod=30 Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.232733 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="cbbfff95-9f90-4be2-9ed7-0a1cbbe43542" containerName="object-updater" containerID="cri-o://50eea846d91051f2dd7105d2a67595fd30c64b90855e0b8b97a766b5024a4194" gracePeriod=30 Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.232902 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="cbbfff95-9f90-4be2-9ed7-0a1cbbe43542" containerName="object-auditor" containerID="cri-o://a9f1e3687db381bd17fdfb1b0e3aaf9c0fa91fd605b0d73ed9095c3bd3bd381f" gracePeriod=30 Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.233023 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="cbbfff95-9f90-4be2-9ed7-0a1cbbe43542" containerName="object-replicator" containerID="cri-o://dc5ce68e3517c196bdee8bba30197f747f505c80c7dfe2ac492a71348d08c4f8" gracePeriod=30 Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.233149 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="cbbfff95-9f90-4be2-9ed7-0a1cbbe43542" containerName="object-server" containerID="cri-o://f471fa142ace1139e497b71d2408b5de3eb8a81c1e52fc0f3e0b7abe127fa420" gracePeriod=30 Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.233280 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="cbbfff95-9f90-4be2-9ed7-0a1cbbe43542" containerName="container-updater" containerID="cri-o://7d429e35b6e23b8fe5da4b2ba40f03361d3954fbabd9c27927cee2dca39904f9" gracePeriod=30 Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.233404 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="cbbfff95-9f90-4be2-9ed7-0a1cbbe43542" containerName="container-auditor" containerID="cri-o://8dfb739a96b0388de7f0bca3f773a09ff87c39b3fc3e077a97ec5ad2b9469fe8" gracePeriod=30 Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.233545 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="cbbfff95-9f90-4be2-9ed7-0a1cbbe43542" containerName="container-replicator" containerID="cri-o://fac08b16dfb922f531dfe4ed284cdec4dfd765c94432b6d5e4237c6ab1063c5f" gracePeriod=30 Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.233688 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="cbbfff95-9f90-4be2-9ed7-0a1cbbe43542" containerName="container-server" containerID="cri-o://9f94ead147cad59ed2c937f2aace48390c6cf7ba843fc0898b176e2475d251b5" gracePeriod=30 Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.233844 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="cbbfff95-9f90-4be2-9ed7-0a1cbbe43542" containerName="account-replicator" containerID="cri-o://36bfb57970925c866cd826c6992c1bb5eb23be201b3fee3a44013164f6bf6c45" gracePeriod=30 Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.233860 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="cbbfff95-9f90-4be2-9ed7-0a1cbbe43542" containerName="account-auditor" containerID="cri-o://33c92b2ec4e98e2de94cfd9d49d4bf673da1f8bed18bac8e9c5007c99a8dc48e" gracePeriod=30 Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.233931 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="cbbfff95-9f90-4be2-9ed7-0a1cbbe43542" containerName="account-reaper" containerID="cri-o://22cdeeaca39d5197ecff3f1352fe3fc3b073ce7ef0565d67854f14d8c72aeba3" gracePeriod=30 Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.234133 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2cvf4\" (UniqueName: \"kubernetes.io/projected/f461d799-0027-4949-96bc-85e80ae9ec47-kube-api-access-2cvf4\") pod \"nova-cell0-conductor-db-sync-h5hmh\" (UID: \"f461d799-0027-4949-96bc-85e80ae9ec47\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-h5hmh" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.270730 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/63766878-2a9a-47b6-9209-554b59500f10-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "63766878-2a9a-47b6-9209-554b59500f10" (UID: "63766878-2a9a-47b6-9209-554b59500f10"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.271345 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-hbfss" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.296070 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/8e043dc4-ede7-4945-adfe-f23ef8b0b313-ring-data-devices\") pod \"swift-ring-rebalance-zn4xn\" (UID: \"8e043dc4-ede7-4945-adfe-f23ef8b0b313\") " pod="openstack-kuttl-tests/swift-ring-rebalance-zn4xn" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.296153 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2442cca-1f6c-4531-b1f4-2b873ce42964-combined-ca-bundle\") pod \"barbican-db-sync-t7b8z\" (UID: \"c2442cca-1f6c-4531-b1f4-2b873ce42964\") " pod="openstack-kuttl-tests/barbican-db-sync-t7b8z" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.296207 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/c2442cca-1f6c-4531-b1f4-2b873ce42964-db-sync-config-data\") pod \"barbican-db-sync-t7b8z\" (UID: \"c2442cca-1f6c-4531-b1f4-2b873ce42964\") " pod="openstack-kuttl-tests/barbican-db-sync-t7b8z" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.296685 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8e043dc4-ede7-4945-adfe-f23ef8b0b313-scripts\") pod \"swift-ring-rebalance-zn4xn\" (UID: \"8e043dc4-ede7-4945-adfe-f23ef8b0b313\") " pod="openstack-kuttl-tests/swift-ring-rebalance-zn4xn" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.296898 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8e043dc4-ede7-4945-adfe-f23ef8b0b313-combined-ca-bundle\") pod \"swift-ring-rebalance-zn4xn\" (UID: \"8e043dc4-ede7-4945-adfe-f23ef8b0b313\") " pod="openstack-kuttl-tests/swift-ring-rebalance-zn4xn" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.296948 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/8e043dc4-ede7-4945-adfe-f23ef8b0b313-dispersionconf\") pod \"swift-ring-rebalance-zn4xn\" (UID: \"8e043dc4-ede7-4945-adfe-f23ef8b0b313\") " pod="openstack-kuttl-tests/swift-ring-rebalance-zn4xn" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.297263 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lk77l\" (UniqueName: \"kubernetes.io/projected/c2442cca-1f6c-4531-b1f4-2b873ce42964-kube-api-access-lk77l\") pod \"barbican-db-sync-t7b8z\" (UID: \"c2442cca-1f6c-4531-b1f4-2b873ce42964\") " pod="openstack-kuttl-tests/barbican-db-sync-t7b8z" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.297308 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/8e043dc4-ede7-4945-adfe-f23ef8b0b313-swiftconf\") pod \"swift-ring-rebalance-zn4xn\" (UID: \"8e043dc4-ede7-4945-adfe-f23ef8b0b313\") " pod="openstack-kuttl-tests/swift-ring-rebalance-zn4xn" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.297373 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-78pfw\" (UniqueName: \"kubernetes.io/projected/8e043dc4-ede7-4945-adfe-f23ef8b0b313-kube-api-access-78pfw\") pod \"swift-ring-rebalance-zn4xn\" (UID: \"8e043dc4-ede7-4945-adfe-f23ef8b0b313\") " pod="openstack-kuttl-tests/swift-ring-rebalance-zn4xn" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.297412 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/8e043dc4-ede7-4945-adfe-f23ef8b0b313-etc-swift\") pod \"swift-ring-rebalance-zn4xn\" (UID: \"8e043dc4-ede7-4945-adfe-f23ef8b0b313\") " pod="openstack-kuttl-tests/swift-ring-rebalance-zn4xn" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.297540 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/63766878-2a9a-47b6-9209-554b59500f10-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.297635 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/63766878-2a9a-47b6-9209-554b59500f10-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.297665 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gcwf4\" (UniqueName: \"kubernetes.io/projected/63766878-2a9a-47b6-9209-554b59500f10-kube-api-access-gcwf4\") on node \"crc\" DevicePath \"\"" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.297708 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" " Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.297720 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/63766878-2a9a-47b6-9209-554b59500f10-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.352211 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-h5hmh" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.373302 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2442cca-1f6c-4531-b1f4-2b873ce42964-combined-ca-bundle\") pod \"barbican-db-sync-t7b8z\" (UID: \"c2442cca-1f6c-4531-b1f4-2b873ce42964\") " pod="openstack-kuttl-tests/barbican-db-sync-t7b8z" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.385375 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lk77l\" (UniqueName: \"kubernetes.io/projected/c2442cca-1f6c-4531-b1f4-2b873ce42964-kube-api-access-lk77l\") pod \"barbican-db-sync-t7b8z\" (UID: \"c2442cca-1f6c-4531-b1f4-2b873ce42964\") " pod="openstack-kuttl-tests/barbican-db-sync-t7b8z" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.401100 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/swift-ring-rebalance-zn4xn"] Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.404905 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/8e043dc4-ede7-4945-adfe-f23ef8b0b313-ring-data-devices\") pod \"swift-ring-rebalance-zn4xn\" (UID: \"8e043dc4-ede7-4945-adfe-f23ef8b0b313\") " pod="openstack-kuttl-tests/swift-ring-rebalance-zn4xn" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.405183 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8e043dc4-ede7-4945-adfe-f23ef8b0b313-scripts\") pod \"swift-ring-rebalance-zn4xn\" (UID: \"8e043dc4-ede7-4945-adfe-f23ef8b0b313\") " pod="openstack-kuttl-tests/swift-ring-rebalance-zn4xn" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.405261 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8e043dc4-ede7-4945-adfe-f23ef8b0b313-combined-ca-bundle\") pod \"swift-ring-rebalance-zn4xn\" (UID: \"8e043dc4-ede7-4945-adfe-f23ef8b0b313\") " pod="openstack-kuttl-tests/swift-ring-rebalance-zn4xn" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.405463 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/8e043dc4-ede7-4945-adfe-f23ef8b0b313-dispersionconf\") pod \"swift-ring-rebalance-zn4xn\" (UID: \"8e043dc4-ede7-4945-adfe-f23ef8b0b313\") " pod="openstack-kuttl-tests/swift-ring-rebalance-zn4xn" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.405513 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/8e043dc4-ede7-4945-adfe-f23ef8b0b313-swiftconf\") pod \"swift-ring-rebalance-zn4xn\" (UID: \"8e043dc4-ede7-4945-adfe-f23ef8b0b313\") " pod="openstack-kuttl-tests/swift-ring-rebalance-zn4xn" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.405575 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-78pfw\" (UniqueName: \"kubernetes.io/projected/8e043dc4-ede7-4945-adfe-f23ef8b0b313-kube-api-access-78pfw\") pod \"swift-ring-rebalance-zn4xn\" (UID: \"8e043dc4-ede7-4945-adfe-f23ef8b0b313\") " pod="openstack-kuttl-tests/swift-ring-rebalance-zn4xn" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.405618 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/8e043dc4-ede7-4945-adfe-f23ef8b0b313-etc-swift\") pod \"swift-ring-rebalance-zn4xn\" (UID: \"8e043dc4-ede7-4945-adfe-f23ef8b0b313\") " pod="openstack-kuttl-tests/swift-ring-rebalance-zn4xn" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.406145 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/8e043dc4-ede7-4945-adfe-f23ef8b0b313-etc-swift\") pod \"swift-ring-rebalance-zn4xn\" (UID: \"8e043dc4-ede7-4945-adfe-f23ef8b0b313\") " pod="openstack-kuttl-tests/swift-ring-rebalance-zn4xn" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.407463 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8e043dc4-ede7-4945-adfe-f23ef8b0b313-scripts\") pod \"swift-ring-rebalance-zn4xn\" (UID: \"8e043dc4-ede7-4945-adfe-f23ef8b0b313\") " pod="openstack-kuttl-tests/swift-ring-rebalance-zn4xn" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.408372 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/8e043dc4-ede7-4945-adfe-f23ef8b0b313-ring-data-devices\") pod \"swift-ring-rebalance-zn4xn\" (UID: \"8e043dc4-ede7-4945-adfe-f23ef8b0b313\") " pod="openstack-kuttl-tests/swift-ring-rebalance-zn4xn" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.428583 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/8e043dc4-ede7-4945-adfe-f23ef8b0b313-swiftconf\") pod \"swift-ring-rebalance-zn4xn\" (UID: \"8e043dc4-ede7-4945-adfe-f23ef8b0b313\") " pod="openstack-kuttl-tests/swift-ring-rebalance-zn4xn" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.445038 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/c2442cca-1f6c-4531-b1f4-2b873ce42964-db-sync-config-data\") pod \"barbican-db-sync-t7b8z\" (UID: \"c2442cca-1f6c-4531-b1f4-2b873ce42964\") " pod="openstack-kuttl-tests/barbican-db-sync-t7b8z" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.454364 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-db-sync-t7b8z" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.459114 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/8e043dc4-ede7-4945-adfe-f23ef8b0b313-dispersionconf\") pod \"swift-ring-rebalance-zn4xn\" (UID: \"8e043dc4-ede7-4945-adfe-f23ef8b0b313\") " pod="openstack-kuttl-tests/swift-ring-rebalance-zn4xn" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.485733 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-78pfw\" (UniqueName: \"kubernetes.io/projected/8e043dc4-ede7-4945-adfe-f23ef8b0b313-kube-api-access-78pfw\") pod \"swift-ring-rebalance-zn4xn\" (UID: \"8e043dc4-ede7-4945-adfe-f23ef8b0b313\") " pod="openstack-kuttl-tests/swift-ring-rebalance-zn4xn" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.532182 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8e043dc4-ede7-4945-adfe-f23ef8b0b313-combined-ca-bundle\") pod \"swift-ring-rebalance-zn4xn\" (UID: \"8e043dc4-ede7-4945-adfe-f23ef8b0b313\") " pod="openstack-kuttl-tests/swift-ring-rebalance-zn4xn" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.549145 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovsdbserver-nb-0_aa79d817-2b73-469c-8b09-f2f312835773/ovsdbserver-nb/0.log" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.549297 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.555353 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/63766878-2a9a-47b6-9209-554b59500f10-ovsdbserver-sb-tls-certs" (OuterVolumeSpecName: "ovsdbserver-sb-tls-certs") pod "63766878-2a9a-47b6-9209-554b59500f10" (UID: "63766878-2a9a-47b6-9209-554b59500f10"). InnerVolumeSpecName "ovsdbserver-sb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.560455 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage05-crc" (UniqueName: "kubernetes.io/local-volume/local-storage05-crc") on node "crc" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.607350 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/63766878-2a9a-47b6-9209-554b59500f10-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "63766878-2a9a-47b6-9209-554b59500f10" (UID: "63766878-2a9a-47b6-9209-554b59500f10"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.611432 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01e17b5d-e7c8-4751-9f98-524fdc40bbf3" path="/var/lib/kubelet/pods/01e17b5d-e7c8-4751-9f98-524fdc40bbf3/volumes" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.612011 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="213f0f23-1d2e-456d-9a25-de15571358a2" path="/var/lib/kubelet/pods/213f0f23-1d2e-456d-9a25-de15571358a2/volumes" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.620401 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/memcached-0" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.633729 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2760b0e9-ab1f-4fe1-8240-5a638afaef7b" path="/var/lib/kubelet/pods/2760b0e9-ab1f-4fe1-8240-5a638afaef7b/volumes" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.634454 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2e492b4c-9f94-4c1e-87e7-5f63dbb9344b" path="/var/lib/kubelet/pods/2e492b4c-9f94-4c1e-87e7-5f63dbb9344b/volumes" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.635350 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="90fa68b7-a335-4dba-b8f1-a9ab54c07786" path="/var/lib/kubelet/pods/90fa68b7-a335-4dba-b8f1-a9ab54c07786/volumes" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.637055 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="acb70f0a-6559-44d8-9182-a43381f314b4" path="/var/lib/kubelet/pods/acb70f0a-6559-44d8-9182-a43381f314b4/volumes" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.637738 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b03348ed-66d6-4076-a5fa-0ccf182e8e3c" path="/var/lib/kubelet/pods/b03348ed-66d6-4076-a5fa-0ccf182e8e3c/volumes" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.639769 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bwjzm\" (UniqueName: \"kubernetes.io/projected/aa79d817-2b73-469c-8b09-f2f312835773-kube-api-access-bwjzm\") pod \"aa79d817-2b73-469c-8b09-f2f312835773\" (UID: \"aa79d817-2b73-469c-8b09-f2f312835773\") " Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.639851 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/aa79d817-2b73-469c-8b09-f2f312835773-scripts\") pod \"aa79d817-2b73-469c-8b09-f2f312835773\" (UID: \"aa79d817-2b73-469c-8b09-f2f312835773\") " Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.639905 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/aa79d817-2b73-469c-8b09-f2f312835773-ovsdbserver-nb-tls-certs\") pod \"aa79d817-2b73-469c-8b09-f2f312835773\" (UID: \"aa79d817-2b73-469c-8b09-f2f312835773\") " Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.639968 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/aa79d817-2b73-469c-8b09-f2f312835773-ovsdb-rundir\") pod \"aa79d817-2b73-469c-8b09-f2f312835773\" (UID: \"aa79d817-2b73-469c-8b09-f2f312835773\") " Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.640015 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa79d817-2b73-469c-8b09-f2f312835773-combined-ca-bundle\") pod \"aa79d817-2b73-469c-8b09-f2f312835773\" (UID: \"aa79d817-2b73-469c-8b09-f2f312835773\") " Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.640122 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/aa79d817-2b73-469c-8b09-f2f312835773-metrics-certs-tls-certs\") pod \"aa79d817-2b73-469c-8b09-f2f312835773\" (UID: \"aa79d817-2b73-469c-8b09-f2f312835773\") " Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.640158 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/aa79d817-2b73-469c-8b09-f2f312835773-config\") pod \"aa79d817-2b73-469c-8b09-f2f312835773\" (UID: \"aa79d817-2b73-469c-8b09-f2f312835773\") " Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.642948 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c28fa234-6cff-43e9-9cf5-a5cd2f3b67be" path="/var/lib/kubelet/pods/c28fa234-6cff-43e9-9cf5-a5cd2f3b67be/volumes" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.649720 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d1f49886-b3f9-40f5-aa03-731030fbb9fc" path="/var/lib/kubelet/pods/d1f49886-b3f9-40f5-aa03-731030fbb9fc/volumes" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.651440 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndbcluster-nb-etc-ovn\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"aa79d817-2b73-469c-8b09-f2f312835773\" (UID: \"aa79d817-2b73-469c-8b09-f2f312835773\") " Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.652270 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/aa79d817-2b73-469c-8b09-f2f312835773-ovsdb-rundir" (OuterVolumeSpecName: "ovsdb-rundir") pod "aa79d817-2b73-469c-8b09-f2f312835773" (UID: "aa79d817-2b73-469c-8b09-f2f312835773"). InnerVolumeSpecName "ovsdb-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.654404 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/aa79d817-2b73-469c-8b09-f2f312835773-config" (OuterVolumeSpecName: "config") pod "aa79d817-2b73-469c-8b09-f2f312835773" (UID: "aa79d817-2b73-469c-8b09-f2f312835773"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.654505 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/aa79d817-2b73-469c-8b09-f2f312835773-scripts" (OuterVolumeSpecName: "scripts") pod "aa79d817-2b73-469c-8b09-f2f312835773" (UID: "aa79d817-2b73-469c-8b09-f2f312835773"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.655701 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/aa79d817-2b73-469c-8b09-f2f312835773-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.655725 4558 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/63766878-2a9a-47b6-9209-554b59500f10-ovsdbserver-sb-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.655736 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.655748 4558 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/63766878-2a9a-47b6-9209-554b59500f10-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.655757 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/aa79d817-2b73-469c-8b09-f2f312835773-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.655766 4558 reconciler_common.go:293] "Volume detached for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/aa79d817-2b73-469c-8b09-f2f312835773-ovsdb-rundir\") on node \"crc\" DevicePath \"\"" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.656953 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.657007 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-db-sync-xq5zh"] Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.657361 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="70698ad2-165d-46a5-b45e-36c4e4b6df8b" containerName="ceilometer-central-agent" containerID="cri-o://ea36ed48297a2a2bcb917d936cd3d2d844aa2faeacef821937a3bd537eb2e4ba" gracePeriod=30 Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.658612 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="70698ad2-165d-46a5-b45e-36c4e4b6df8b" containerName="proxy-httpd" containerID="cri-o://b55f709aed6d1e7f660b6aa1c679633da050ee695636dacbeec2bc9d9cacc0ef" gracePeriod=30 Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.658697 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="70698ad2-165d-46a5-b45e-36c4e4b6df8b" containerName="sg-core" containerID="cri-o://ebd5e020fe9ccfd450a30b510c61df09172d803a72760f9b96ab98868d87562f" gracePeriod=30 Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.658737 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="70698ad2-165d-46a5-b45e-36c4e4b6df8b" containerName="ceilometer-notification-agent" containerID="cri-o://44fb41a2f181b567d2295b6640d53ad267beb9330b20f34e2b79151e8226e822" gracePeriod=30 Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.663336 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aa79d817-2b73-469c-8b09-f2f312835773-kube-api-access-bwjzm" (OuterVolumeSpecName: "kube-api-access-bwjzm") pod "aa79d817-2b73-469c-8b09-f2f312835773" (UID: "aa79d817-2b73-469c-8b09-f2f312835773"). InnerVolumeSpecName "kube-api-access-bwjzm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.666560 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.666746 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/kube-state-metrics-0" podUID="91cc01be-13ba-41d9-9060-a2cfacdb2f74" containerName="kube-state-metrics" containerID="cri-o://109b1c818a10a94b43307d6a3f5d2e521d77e543f6c41fe1fc8ac78d9590ffb7" gracePeriod=30 Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.676955 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage09-crc" (OuterVolumeSpecName: "ovndbcluster-nb-etc-ovn") pod "aa79d817-2b73-469c-8b09-f2f312835773" (UID: "aa79d817-2b73-469c-8b09-f2f312835773"). InnerVolumeSpecName "local-storage09-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.697806 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-db-sync-rmm2x"] Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.724259 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aa79d817-2b73-469c-8b09-f2f312835773-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "aa79d817-2b73-469c-8b09-f2f312835773" (UID: "aa79d817-2b73-469c-8b09-f2f312835773"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.758158 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/987d6a20-4fc9-411e-ae1f-9ff5dd78e80d-config-data\") pod \"987d6a20-4fc9-411e-ae1f-9ff5dd78e80d\" (UID: \"987d6a20-4fc9-411e-ae1f-9ff5dd78e80d\") " Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.759669 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zk5cx\" (UniqueName: \"kubernetes.io/projected/987d6a20-4fc9-411e-ae1f-9ff5dd78e80d-kube-api-access-zk5cx\") pod \"987d6a20-4fc9-411e-ae1f-9ff5dd78e80d\" (UID: \"987d6a20-4fc9-411e-ae1f-9ff5dd78e80d\") " Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.759791 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/987d6a20-4fc9-411e-ae1f-9ff5dd78e80d-kolla-config\") pod \"987d6a20-4fc9-411e-ae1f-9ff5dd78e80d\" (UID: \"987d6a20-4fc9-411e-ae1f-9ff5dd78e80d\") " Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.759848 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/987d6a20-4fc9-411e-ae1f-9ff5dd78e80d-memcached-tls-certs\") pod \"987d6a20-4fc9-411e-ae1f-9ff5dd78e80d\" (UID: \"987d6a20-4fc9-411e-ae1f-9ff5dd78e80d\") " Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.759900 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/987d6a20-4fc9-411e-ae1f-9ff5dd78e80d-combined-ca-bundle\") pod \"987d6a20-4fc9-411e-ae1f-9ff5dd78e80d\" (UID: \"987d6a20-4fc9-411e-ae1f-9ff5dd78e80d\") " Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.760878 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa79d817-2b73-469c-8b09-f2f312835773-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.760902 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" " Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.760912 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bwjzm\" (UniqueName: \"kubernetes.io/projected/aa79d817-2b73-469c-8b09-f2f312835773-kube-api-access-bwjzm\") on node \"crc\" DevicePath \"\"" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.762826 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/987d6a20-4fc9-411e-ae1f-9ff5dd78e80d-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "987d6a20-4fc9-411e-ae1f-9ff5dd78e80d" (UID: "987d6a20-4fc9-411e-ae1f-9ff5dd78e80d"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.768599 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/987d6a20-4fc9-411e-ae1f-9ff5dd78e80d-config-data" (OuterVolumeSpecName: "config-data") pod "987d6a20-4fc9-411e-ae1f-9ff5dd78e80d" (UID: "987d6a20-4fc9-411e-ae1f-9ff5dd78e80d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.776341 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/987d6a20-4fc9-411e-ae1f-9ff5dd78e80d-kube-api-access-zk5cx" (OuterVolumeSpecName: "kube-api-access-zk5cx") pod "987d6a20-4fc9-411e-ae1f-9ff5dd78e80d" (UID: "987d6a20-4fc9-411e-ae1f-9ff5dd78e80d"). InnerVolumeSpecName "kube-api-access-zk5cx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.778158 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-ring-rebalance-zn4xn" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.832247 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-sb-0"] Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.837659 4558 scope.go:117] "RemoveContainer" containerID="37f931cc076b0de2ba4bd77c0544f81aec55cb5d8ae2f041d7332ba974499651" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.845821 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage09-crc" (UniqueName: "kubernetes.io/local-volume/local-storage09-crc") on node "crc" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.862529 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/987d6a20-4fc9-411e-ae1f-9ff5dd78e80d-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.862556 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zk5cx\" (UniqueName: \"kubernetes.io/projected/987d6a20-4fc9-411e-ae1f-9ff5dd78e80d-kube-api-access-zk5cx\") on node \"crc\" DevicePath \"\"" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.862566 4558 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/987d6a20-4fc9-411e-ae1f-9ff5dd78e80d-kolla-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.862574 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.863076 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aa79d817-2b73-469c-8b09-f2f312835773-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "aa79d817-2b73-469c-8b09-f2f312835773" (UID: "aa79d817-2b73-469c-8b09-f2f312835773"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.872223 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-sb-0"] Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.893963 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ovsdbserver-sb-0"] Jan 20 17:26:48 crc kubenswrapper[4558]: E0120 17:26:48.894423 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aa79d817-2b73-469c-8b09-f2f312835773" containerName="ovsdbserver-nb" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.894438 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="aa79d817-2b73-469c-8b09-f2f312835773" containerName="ovsdbserver-nb" Jan 20 17:26:48 crc kubenswrapper[4558]: E0120 17:26:48.894455 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="987d6a20-4fc9-411e-ae1f-9ff5dd78e80d" containerName="memcached" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.894461 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="987d6a20-4fc9-411e-ae1f-9ff5dd78e80d" containerName="memcached" Jan 20 17:26:48 crc kubenswrapper[4558]: E0120 17:26:48.894470 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aa79d817-2b73-469c-8b09-f2f312835773" containerName="openstack-network-exporter" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.894476 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="aa79d817-2b73-469c-8b09-f2f312835773" containerName="openstack-network-exporter" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.894673 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="aa79d817-2b73-469c-8b09-f2f312835773" containerName="openstack-network-exporter" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.894684 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="987d6a20-4fc9-411e-ae1f-9ff5dd78e80d" containerName="memcached" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.894696 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="aa79d817-2b73-469c-8b09-f2f312835773" containerName="ovsdbserver-nb" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.895705 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.898611 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-ovndbcluster-sb-ovndbs" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.898929 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"ovndbcluster-sb-scripts" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.899054 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ovncluster-ovndbcluster-sb-dockercfg-2c6d4" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.899249 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"ovndbcluster-sb-config" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.902553 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack-kuttl-tests/cinder-scheduler-0" podUID="7c7138fb-59b8-47c0-a431-023fa79404f1" containerName="cinder-scheduler" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.916416 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/987d6a20-4fc9-411e-ae1f-9ff5dd78e80d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "987d6a20-4fc9-411e-ae1f-9ff5dd78e80d" (UID: "987d6a20-4fc9-411e-ae1f-9ff5dd78e80d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.929699 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-sb-0"] Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.964585 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9pfd4\" (UniqueName: \"kubernetes.io/projected/7b6a0803-befe-4426-9e2a-a04ac12f2d7c-kube-api-access-9pfd4\") pod \"ovsdbserver-sb-0\" (UID: \"7b6a0803-befe-4426-9e2a-a04ac12f2d7c\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.964630 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7b6a0803-befe-4426-9e2a-a04ac12f2d7c-config\") pod \"ovsdbserver-sb-0\" (UID: \"7b6a0803-befe-4426-9e2a-a04ac12f2d7c\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.964717 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7b6a0803-befe-4426-9e2a-a04ac12f2d7c-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"7b6a0803-befe-4426-9e2a-a04ac12f2d7c\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.964737 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/7b6a0803-befe-4426-9e2a-a04ac12f2d7c-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"7b6a0803-befe-4426-9e2a-a04ac12f2d7c\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.964785 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/7b6a0803-befe-4426-9e2a-a04ac12f2d7c-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"7b6a0803-befe-4426-9e2a-a04ac12f2d7c\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.964843 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/7b6a0803-befe-4426-9e2a-a04ac12f2d7c-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"7b6a0803-befe-4426-9e2a-a04ac12f2d7c\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.964874 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"ovsdbserver-sb-0\" (UID: \"7b6a0803-befe-4426-9e2a-a04ac12f2d7c\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.964898 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7b6a0803-befe-4426-9e2a-a04ac12f2d7c-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"7b6a0803-befe-4426-9e2a-a04ac12f2d7c\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.964979 4558 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/aa79d817-2b73-469c-8b09-f2f312835773-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.964991 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/987d6a20-4fc9-411e-ae1f-9ff5dd78e80d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.970798 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/kube-state-metrics-0" podUID="91cc01be-13ba-41d9-9060-a2cfacdb2f74" containerName="kube-state-metrics" probeResult="failure" output="Get \"https://10.217.1.59:8081/readyz\": dial tcp 10.217.1.59:8081: connect: connection refused" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.975941 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/987d6a20-4fc9-411e-ae1f-9ff5dd78e80d-memcached-tls-certs" (OuterVolumeSpecName: "memcached-tls-certs") pod "987d6a20-4fc9-411e-ae1f-9ff5dd78e80d" (UID: "987d6a20-4fc9-411e-ae1f-9ff5dd78e80d"). InnerVolumeSpecName "memcached-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:26:48 crc kubenswrapper[4558]: I0120 17:26:48.997645 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-db-sync-92t4c"] Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.053541 4558 scope.go:117] "RemoveContainer" containerID="f69a90baad4062187a2630067e5b5be8fe21f3f7ef14d41a5c46fea09bbfc6d4" Jan 20 17:26:49 crc kubenswrapper[4558]: E0120 17:26:49.056960 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f69a90baad4062187a2630067e5b5be8fe21f3f7ef14d41a5c46fea09bbfc6d4\": container with ID starting with f69a90baad4062187a2630067e5b5be8fe21f3f7ef14d41a5c46fea09bbfc6d4 not found: ID does not exist" containerID="f69a90baad4062187a2630067e5b5be8fe21f3f7ef14d41a5c46fea09bbfc6d4" Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.056994 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f69a90baad4062187a2630067e5b5be8fe21f3f7ef14d41a5c46fea09bbfc6d4"} err="failed to get container status \"f69a90baad4062187a2630067e5b5be8fe21f3f7ef14d41a5c46fea09bbfc6d4\": rpc error: code = NotFound desc = could not find container \"f69a90baad4062187a2630067e5b5be8fe21f3f7ef14d41a5c46fea09bbfc6d4\": container with ID starting with f69a90baad4062187a2630067e5b5be8fe21f3f7ef14d41a5c46fea09bbfc6d4 not found: ID does not exist" Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.057021 4558 scope.go:117] "RemoveContainer" containerID="37f931cc076b0de2ba4bd77c0544f81aec55cb5d8ae2f041d7332ba974499651" Jan 20 17:26:49 crc kubenswrapper[4558]: E0120 17:26:49.063026 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"37f931cc076b0de2ba4bd77c0544f81aec55cb5d8ae2f041d7332ba974499651\": container with ID starting with 37f931cc076b0de2ba4bd77c0544f81aec55cb5d8ae2f041d7332ba974499651 not found: ID does not exist" containerID="37f931cc076b0de2ba4bd77c0544f81aec55cb5d8ae2f041d7332ba974499651" Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.063062 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"37f931cc076b0de2ba4bd77c0544f81aec55cb5d8ae2f041d7332ba974499651"} err="failed to get container status \"37f931cc076b0de2ba4bd77c0544f81aec55cb5d8ae2f041d7332ba974499651\": rpc error: code = NotFound desc = could not find container \"37f931cc076b0de2ba4bd77c0544f81aec55cb5d8ae2f041d7332ba974499651\": container with ID starting with 37f931cc076b0de2ba4bd77c0544f81aec55cb5d8ae2f041d7332ba974499651 not found: ID does not exist" Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.063087 4558 scope.go:117] "RemoveContainer" containerID="f69a90baad4062187a2630067e5b5be8fe21f3f7ef14d41a5c46fea09bbfc6d4" Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.065105 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.065519 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f69a90baad4062187a2630067e5b5be8fe21f3f7ef14d41a5c46fea09bbfc6d4"} err="failed to get container status \"f69a90baad4062187a2630067e5b5be8fe21f3f7ef14d41a5c46fea09bbfc6d4\": rpc error: code = NotFound desc = could not find container \"f69a90baad4062187a2630067e5b5be8fe21f3f7ef14d41a5c46fea09bbfc6d4\": container with ID starting with f69a90baad4062187a2630067e5b5be8fe21f3f7ef14d41a5c46fea09bbfc6d4 not found: ID does not exist" Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.065539 4558 scope.go:117] "RemoveContainer" containerID="37f931cc076b0de2ba4bd77c0544f81aec55cb5d8ae2f041d7332ba974499651" Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.073912 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"37f931cc076b0de2ba4bd77c0544f81aec55cb5d8ae2f041d7332ba974499651"} err="failed to get container status \"37f931cc076b0de2ba4bd77c0544f81aec55cb5d8ae2f041d7332ba974499651\": rpc error: code = NotFound desc = could not find container \"37f931cc076b0de2ba4bd77c0544f81aec55cb5d8ae2f041d7332ba974499651\": container with ID starting with 37f931cc076b0de2ba4bd77c0544f81aec55cb5d8ae2f041d7332ba974499651 not found: ID does not exist" Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.078065 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/7b6a0803-befe-4426-9e2a-a04ac12f2d7c-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"7b6a0803-befe-4426-9e2a-a04ac12f2d7c\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.078148 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/7b6a0803-befe-4426-9e2a-a04ac12f2d7c-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"7b6a0803-befe-4426-9e2a-a04ac12f2d7c\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.078275 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/7b6a0803-befe-4426-9e2a-a04ac12f2d7c-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"7b6a0803-befe-4426-9e2a-a04ac12f2d7c\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.078321 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"ovsdbserver-sb-0\" (UID: \"7b6a0803-befe-4426-9e2a-a04ac12f2d7c\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.078336 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7b6a0803-befe-4426-9e2a-a04ac12f2d7c-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"7b6a0803-befe-4426-9e2a-a04ac12f2d7c\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.078443 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9pfd4\" (UniqueName: \"kubernetes.io/projected/7b6a0803-befe-4426-9e2a-a04ac12f2d7c-kube-api-access-9pfd4\") pod \"ovsdbserver-sb-0\" (UID: \"7b6a0803-befe-4426-9e2a-a04ac12f2d7c\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.078485 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7b6a0803-befe-4426-9e2a-a04ac12f2d7c-config\") pod \"ovsdbserver-sb-0\" (UID: \"7b6a0803-befe-4426-9e2a-a04ac12f2d7c\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.078634 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7b6a0803-befe-4426-9e2a-a04ac12f2d7c-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"7b6a0803-befe-4426-9e2a-a04ac12f2d7c\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.078695 4558 reconciler_common.go:293] "Volume detached for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/987d6a20-4fc9-411e-ae1f-9ff5dd78e80d-memcached-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.079347 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"ovsdbserver-sb-0\" (UID: \"7b6a0803-befe-4426-9e2a-a04ac12f2d7c\") device mount path \"/mnt/openstack/pv05\"" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.080786 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7b6a0803-befe-4426-9e2a-a04ac12f2d7c-config\") pod \"ovsdbserver-sb-0\" (UID: \"7b6a0803-befe-4426-9e2a-a04ac12f2d7c\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.080944 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7b6a0803-befe-4426-9e2a-a04ac12f2d7c-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"7b6a0803-befe-4426-9e2a-a04ac12f2d7c\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.081085 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/7b6a0803-befe-4426-9e2a-a04ac12f2d7c-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"7b6a0803-befe-4426-9e2a-a04ac12f2d7c\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.118673 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/7b6a0803-befe-4426-9e2a-a04ac12f2d7c-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"7b6a0803-befe-4426-9e2a-a04ac12f2d7c\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.131804 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/7b6a0803-befe-4426-9e2a-a04ac12f2d7c-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"7b6a0803-befe-4426-9e2a-a04ac12f2d7c\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.154257 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-db-sync-gn4wj"] Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.161908 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7b6a0803-befe-4426-9e2a-a04ac12f2d7c-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"7b6a0803-befe-4426-9e2a-a04ac12f2d7c\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.165050 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9pfd4\" (UniqueName: \"kubernetes.io/projected/7b6a0803-befe-4426-9e2a-a04ac12f2d7c-kube-api-access-9pfd4\") pod \"ovsdbserver-sb-0\" (UID: \"7b6a0803-befe-4426-9e2a-a04ac12f2d7c\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.180877 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/d274ba80-2e42-4823-bbf2-02691e791ec9-kolla-config\") pod \"d274ba80-2e42-4823-bbf2-02691e791ec9\" (UID: \"d274ba80-2e42-4823-bbf2-02691e791ec9\") " Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.181040 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d274ba80-2e42-4823-bbf2-02691e791ec9-combined-ca-bundle\") pod \"d274ba80-2e42-4823-bbf2-02691e791ec9\" (UID: \"d274ba80-2e42-4823-bbf2-02691e791ec9\") " Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.181080 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r9wdx\" (UniqueName: \"kubernetes.io/projected/d274ba80-2e42-4823-bbf2-02691e791ec9-kube-api-access-r9wdx\") pod \"d274ba80-2e42-4823-bbf2-02691e791ec9\" (UID: \"d274ba80-2e42-4823-bbf2-02691e791ec9\") " Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.181126 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mysql-db\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"d274ba80-2e42-4823-bbf2-02691e791ec9\" (UID: \"d274ba80-2e42-4823-bbf2-02691e791ec9\") " Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.181278 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/d274ba80-2e42-4823-bbf2-02691e791ec9-config-data-default\") pod \"d274ba80-2e42-4823-bbf2-02691e791ec9\" (UID: \"d274ba80-2e42-4823-bbf2-02691e791ec9\") " Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.181966 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/d274ba80-2e42-4823-bbf2-02691e791ec9-galera-tls-certs\") pod \"d274ba80-2e42-4823-bbf2-02691e791ec9\" (UID: \"d274ba80-2e42-4823-bbf2-02691e791ec9\") " Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.182012 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d274ba80-2e42-4823-bbf2-02691e791ec9-operator-scripts\") pod \"d274ba80-2e42-4823-bbf2-02691e791ec9\" (UID: \"d274ba80-2e42-4823-bbf2-02691e791ec9\") " Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.182087 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/d274ba80-2e42-4823-bbf2-02691e791ec9-config-data-generated\") pod \"d274ba80-2e42-4823-bbf2-02691e791ec9\" (UID: \"d274ba80-2e42-4823-bbf2-02691e791ec9\") " Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.182916 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d274ba80-2e42-4823-bbf2-02691e791ec9-config-data-default" (OuterVolumeSpecName: "config-data-default") pod "d274ba80-2e42-4823-bbf2-02691e791ec9" (UID: "d274ba80-2e42-4823-bbf2-02691e791ec9"). InnerVolumeSpecName "config-data-default". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.182982 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d274ba80-2e42-4823-bbf2-02691e791ec9-config-data-generated" (OuterVolumeSpecName: "config-data-generated") pod "d274ba80-2e42-4823-bbf2-02691e791ec9" (UID: "d274ba80-2e42-4823-bbf2-02691e791ec9"). InnerVolumeSpecName "config-data-generated". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.184277 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d274ba80-2e42-4823-bbf2-02691e791ec9-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "d274ba80-2e42-4823-bbf2-02691e791ec9" (UID: "d274ba80-2e42-4823-bbf2-02691e791ec9"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.184619 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d274ba80-2e42-4823-bbf2-02691e791ec9-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "d274ba80-2e42-4823-bbf2-02691e791ec9" (UID: "d274ba80-2e42-4823-bbf2-02691e791ec9"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.191856 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"ovsdbserver-sb-0\" (UID: \"7b6a0803-befe-4426-9e2a-a04ac12f2d7c\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.208030 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d274ba80-2e42-4823-bbf2-02691e791ec9-kube-api-access-r9wdx" (OuterVolumeSpecName: "kube-api-access-r9wdx") pod "d274ba80-2e42-4823-bbf2-02691e791ec9" (UID: "d274ba80-2e42-4823-bbf2-02691e791ec9"). InnerVolumeSpecName "kube-api-access-r9wdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.234796 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.237585 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage10-crc" (OuterVolumeSpecName: "mysql-db") pod "d274ba80-2e42-4823-bbf2-02691e791ec9" (UID: "d274ba80-2e42-4823-bbf2-02691e791ec9"). InnerVolumeSpecName "local-storage10-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.266479 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aa79d817-2b73-469c-8b09-f2f312835773-ovsdbserver-nb-tls-certs" (OuterVolumeSpecName: "ovsdbserver-nb-tls-certs") pod "aa79d817-2b73-469c-8b09-f2f312835773" (UID: "aa79d817-2b73-469c-8b09-f2f312835773"). InnerVolumeSpecName "ovsdbserver-nb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:26:49 crc kubenswrapper[4558]: W0120 17:26:49.272316 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5f91fe62_3615_4153_a6c7_1652a5780bb5.slice/crio-80d9543d95ec125bdd2c22a852c98aeaffb38bc4939c5bf2f6dadc8b3e90677f WatchSource:0}: Error finding container 80d9543d95ec125bdd2c22a852c98aeaffb38bc4939c5bf2f6dadc8b3e90677f: Status 404 returned error can't find the container with id 80d9543d95ec125bdd2c22a852c98aeaffb38bc4939c5bf2f6dadc8b3e90677f Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.285655 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d274ba80-2e42-4823-bbf2-02691e791ec9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d274ba80-2e42-4823-bbf2-02691e791ec9" (UID: "d274ba80-2e42-4823-bbf2-02691e791ec9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.287713 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d274ba80-2e42-4823-bbf2-02691e791ec9-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.287733 4558 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/aa79d817-2b73-469c-8b09-f2f312835773-ovsdbserver-nb-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.287746 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/d274ba80-2e42-4823-bbf2-02691e791ec9-config-data-generated\") on node \"crc\" DevicePath \"\"" Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.287756 4558 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/d274ba80-2e42-4823-bbf2-02691e791ec9-kolla-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.287769 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d274ba80-2e42-4823-bbf2-02691e791ec9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.287786 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r9wdx\" (UniqueName: \"kubernetes.io/projected/d274ba80-2e42-4823-bbf2-02691e791ec9-kube-api-access-r9wdx\") on node \"crc\" DevicePath \"\"" Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.287806 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" " Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.287816 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/d274ba80-2e42-4823-bbf2-02691e791ec9-config-data-default\") on node \"crc\" DevicePath \"\"" Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.294245 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d274ba80-2e42-4823-bbf2-02691e791ec9-galera-tls-certs" (OuterVolumeSpecName: "galera-tls-certs") pod "d274ba80-2e42-4823-bbf2-02691e791ec9" (UID: "d274ba80-2e42-4823-bbf2-02691e791ec9"). InnerVolumeSpecName "galera-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:26:49 crc kubenswrapper[4558]: E0120 17:26:49.347273 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="bdc31a24aa03485670883d3ea8d3795dfe70b5c89196347668c72f2086bf2bea" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.351710 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-db-sync-rq6qr"] Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.359824 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage10-crc" (UniqueName: "kubernetes.io/local-volume/local-storage10-crc") on node "crc" Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.390456 4558 reconciler_common.go:293] "Volume detached for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/d274ba80-2e42-4823-bbf2-02691e791ec9-galera-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.390486 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:26:49 crc kubenswrapper[4558]: E0120 17:26:49.425497 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="bdc31a24aa03485670883d3ea8d3795dfe70b5c89196347668c72f2086bf2bea" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Jan 20 17:26:49 crc kubenswrapper[4558]: E0120 17:26:49.451204 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="bdc31a24aa03485670883d3ea8d3795dfe70b5c89196347668c72f2086bf2bea" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Jan 20 17:26:49 crc kubenswrapper[4558]: E0120 17:26:49.451274 4558 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack-kuttl-tests/ovn-northd-0" podUID="c08cb973-6eb3-414d-b127-fc146b0fb1f2" containerName="ovn-northd" Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.469538 4558 generic.go:334] "Generic (PLEG): container finished" podID="cbbfff95-9f90-4be2-9ed7-0a1cbbe43542" containerID="9016dbbaf3d5e8bdb4e9556a7b608c196cc888571331abb5670be24de359a45a" exitCode=0 Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.469564 4558 generic.go:334] "Generic (PLEG): container finished" podID="cbbfff95-9f90-4be2-9ed7-0a1cbbe43542" containerID="68fbcc76c0e2dedc82e6715e0ec8b4d992e77e10e439a698ddd8d943bc08f443" exitCode=0 Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.469571 4558 generic.go:334] "Generic (PLEG): container finished" podID="cbbfff95-9f90-4be2-9ed7-0a1cbbe43542" containerID="50eea846d91051f2dd7105d2a67595fd30c64b90855e0b8b97a766b5024a4194" exitCode=0 Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.469579 4558 generic.go:334] "Generic (PLEG): container finished" podID="cbbfff95-9f90-4be2-9ed7-0a1cbbe43542" containerID="a9f1e3687db381bd17fdfb1b0e3aaf9c0fa91fd605b0d73ed9095c3bd3bd381f" exitCode=0 Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.469585 4558 generic.go:334] "Generic (PLEG): container finished" podID="cbbfff95-9f90-4be2-9ed7-0a1cbbe43542" containerID="dc5ce68e3517c196bdee8bba30197f747f505c80c7dfe2ac492a71348d08c4f8" exitCode=0 Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.469592 4558 generic.go:334] "Generic (PLEG): container finished" podID="cbbfff95-9f90-4be2-9ed7-0a1cbbe43542" containerID="f471fa142ace1139e497b71d2408b5de3eb8a81c1e52fc0f3e0b7abe127fa420" exitCode=0 Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.469598 4558 generic.go:334] "Generic (PLEG): container finished" podID="cbbfff95-9f90-4be2-9ed7-0a1cbbe43542" containerID="7d429e35b6e23b8fe5da4b2ba40f03361d3954fbabd9c27927cee2dca39904f9" exitCode=0 Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.469604 4558 generic.go:334] "Generic (PLEG): container finished" podID="cbbfff95-9f90-4be2-9ed7-0a1cbbe43542" containerID="8dfb739a96b0388de7f0bca3f773a09ff87c39b3fc3e077a97ec5ad2b9469fe8" exitCode=0 Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.469609 4558 generic.go:334] "Generic (PLEG): container finished" podID="cbbfff95-9f90-4be2-9ed7-0a1cbbe43542" containerID="fac08b16dfb922f531dfe4ed284cdec4dfd765c94432b6d5e4237c6ab1063c5f" exitCode=0 Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.469615 4558 generic.go:334] "Generic (PLEG): container finished" podID="cbbfff95-9f90-4be2-9ed7-0a1cbbe43542" containerID="9f94ead147cad59ed2c937f2aace48390c6cf7ba843fc0898b176e2475d251b5" exitCode=0 Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.469621 4558 generic.go:334] "Generic (PLEG): container finished" podID="cbbfff95-9f90-4be2-9ed7-0a1cbbe43542" containerID="22cdeeaca39d5197ecff3f1352fe3fc3b073ce7ef0565d67854f14d8c72aeba3" exitCode=0 Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.469627 4558 generic.go:334] "Generic (PLEG): container finished" podID="cbbfff95-9f90-4be2-9ed7-0a1cbbe43542" containerID="33c92b2ec4e98e2de94cfd9d49d4bf673da1f8bed18bac8e9c5007c99a8dc48e" exitCode=0 Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.469632 4558 generic.go:334] "Generic (PLEG): container finished" podID="cbbfff95-9f90-4be2-9ed7-0a1cbbe43542" containerID="36bfb57970925c866cd826c6992c1bb5eb23be201b3fee3a44013164f6bf6c45" exitCode=0 Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.469637 4558 generic.go:334] "Generic (PLEG): container finished" podID="cbbfff95-9f90-4be2-9ed7-0a1cbbe43542" containerID="e6440af1d01d3d7e989418e06350468870193b38937d2ae69a49ff7a72d20bf2" exitCode=0 Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.469687 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"cbbfff95-9f90-4be2-9ed7-0a1cbbe43542","Type":"ContainerDied","Data":"9016dbbaf3d5e8bdb4e9556a7b608c196cc888571331abb5670be24de359a45a"} Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.469716 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"cbbfff95-9f90-4be2-9ed7-0a1cbbe43542","Type":"ContainerDied","Data":"68fbcc76c0e2dedc82e6715e0ec8b4d992e77e10e439a698ddd8d943bc08f443"} Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.469726 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"cbbfff95-9f90-4be2-9ed7-0a1cbbe43542","Type":"ContainerDied","Data":"50eea846d91051f2dd7105d2a67595fd30c64b90855e0b8b97a766b5024a4194"} Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.469736 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"cbbfff95-9f90-4be2-9ed7-0a1cbbe43542","Type":"ContainerDied","Data":"a9f1e3687db381bd17fdfb1b0e3aaf9c0fa91fd605b0d73ed9095c3bd3bd381f"} Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.469744 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"cbbfff95-9f90-4be2-9ed7-0a1cbbe43542","Type":"ContainerDied","Data":"dc5ce68e3517c196bdee8bba30197f747f505c80c7dfe2ac492a71348d08c4f8"} Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.469752 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"cbbfff95-9f90-4be2-9ed7-0a1cbbe43542","Type":"ContainerDied","Data":"f471fa142ace1139e497b71d2408b5de3eb8a81c1e52fc0f3e0b7abe127fa420"} Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.469760 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"cbbfff95-9f90-4be2-9ed7-0a1cbbe43542","Type":"ContainerDied","Data":"7d429e35b6e23b8fe5da4b2ba40f03361d3954fbabd9c27927cee2dca39904f9"} Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.469768 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"cbbfff95-9f90-4be2-9ed7-0a1cbbe43542","Type":"ContainerDied","Data":"8dfb739a96b0388de7f0bca3f773a09ff87c39b3fc3e077a97ec5ad2b9469fe8"} Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.469786 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"cbbfff95-9f90-4be2-9ed7-0a1cbbe43542","Type":"ContainerDied","Data":"fac08b16dfb922f531dfe4ed284cdec4dfd765c94432b6d5e4237c6ab1063c5f"} Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.469797 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"cbbfff95-9f90-4be2-9ed7-0a1cbbe43542","Type":"ContainerDied","Data":"9f94ead147cad59ed2c937f2aace48390c6cf7ba843fc0898b176e2475d251b5"} Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.469806 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"cbbfff95-9f90-4be2-9ed7-0a1cbbe43542","Type":"ContainerDied","Data":"22cdeeaca39d5197ecff3f1352fe3fc3b073ce7ef0565d67854f14d8c72aeba3"} Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.469815 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"cbbfff95-9f90-4be2-9ed7-0a1cbbe43542","Type":"ContainerDied","Data":"33c92b2ec4e98e2de94cfd9d49d4bf673da1f8bed18bac8e9c5007c99a8dc48e"} Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.469824 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"cbbfff95-9f90-4be2-9ed7-0a1cbbe43542","Type":"ContainerDied","Data":"36bfb57970925c866cd826c6992c1bb5eb23be201b3fee3a44013164f6bf6c45"} Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.469832 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"cbbfff95-9f90-4be2-9ed7-0a1cbbe43542","Type":"ContainerDied","Data":"e6440af1d01d3d7e989418e06350468870193b38937d2ae69a49ff7a72d20bf2"} Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.557709 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/memcached-0" event={"ID":"987d6a20-4fc9-411e-ae1f-9ff5dd78e80d","Type":"ContainerDied","Data":"7632ebad1469d2f19951c3a650fdd764e24c47ac23b221cece303c5d595e0540"} Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.557771 4558 scope.go:117] "RemoveContainer" containerID="9e7a56b68f74368ebdccf3a2050688f4660eec04ee70f20c2816ab7ef47f5d08" Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.557718 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/memcached-0" Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.561202 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-db-sync-xq5zh" event={"ID":"29c1b3a9-0bdc-423c-af7f-42f86f3693b5","Type":"ContainerStarted","Data":"872d882ed3397d60cfc57175054f837549bbe824340488a1d438aea0610e5316"} Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.567606 4558 generic.go:334] "Generic (PLEG): container finished" podID="91cc01be-13ba-41d9-9060-a2cfacdb2f74" containerID="109b1c818a10a94b43307d6a3f5d2e521d77e543f6c41fe1fc8ac78d9590ffb7" exitCode=2 Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.567668 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/kube-state-metrics-0" event={"ID":"91cc01be-13ba-41d9-9060-a2cfacdb2f74","Type":"ContainerDied","Data":"109b1c818a10a94b43307d6a3f5d2e521d77e543f6c41fe1fc8ac78d9590ffb7"} Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.578429 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovsdbserver-nb-0_aa79d817-2b73-469c-8b09-f2f312835773/ovsdbserver-nb/0.log" Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.578732 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.578925 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-nb-0" event={"ID":"aa79d817-2b73-469c-8b09-f2f312835773","Type":"ContainerDied","Data":"e6da935e2ec40f9f80911536c92ee50f3e6ae508066cd6ccbd070c3710fc353f"} Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.603456 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.604952 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-cell1-galera-0" event={"ID":"d274ba80-2e42-4823-bbf2-02691e791ec9","Type":"ContainerDied","Data":"32bab0bb78471eae5e794a30e1a2b093ec27e805a0be0cf942b4b318fcbdef9f"} Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.620562 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-db-sync-92t4c" event={"ID":"f59d0040-b44b-46e3-bb18-c0ea74722cd3","Type":"ContainerStarted","Data":"d13f731237dd852df88ba7a06c430e35ee4a694754db0eb3ff387233ca07edb3"} Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.626123 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/memcached-0"] Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.629281 4558 generic.go:334] "Generic (PLEG): container finished" podID="9db3acdd-184a-4004-a8d3-451673126318" containerID="74db7b040a7e0441aa4205a49b98fee925418b1a898081b635b0efcdf2e08d5a" exitCode=0 Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.629342 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-galera-0" event={"ID":"9db3acdd-184a-4004-a8d3-451673126318","Type":"ContainerDied","Data":"74db7b040a7e0441aa4205a49b98fee925418b1a898081b635b0efcdf2e08d5a"} Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.630220 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-db-sync-rmm2x" event={"ID":"c7f6b91b-b55e-4af0-b496-46c80b92bad1","Type":"ContainerStarted","Data":"35f91a33cf91d48d54b27ed083811563b85cddfdf68483da24f94953e58b67d4"} Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.646934 4558 generic.go:334] "Generic (PLEG): container finished" podID="70698ad2-165d-46a5-b45e-36c4e4b6df8b" containerID="b55f709aed6d1e7f660b6aa1c679633da050ee695636dacbeec2bc9d9cacc0ef" exitCode=0 Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.647705 4558 generic.go:334] "Generic (PLEG): container finished" podID="70698ad2-165d-46a5-b45e-36c4e4b6df8b" containerID="ebd5e020fe9ccfd450a30b510c61df09172d803a72760f9b96ab98868d87562f" exitCode=2 Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.647259 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"70698ad2-165d-46a5-b45e-36c4e4b6df8b","Type":"ContainerDied","Data":"b55f709aed6d1e7f660b6aa1c679633da050ee695636dacbeec2bc9d9cacc0ef"} Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.647899 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"70698ad2-165d-46a5-b45e-36c4e4b6df8b","Type":"ContainerDied","Data":"ebd5e020fe9ccfd450a30b510c61df09172d803a72760f9b96ab98868d87562f"} Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.650510 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/memcached-0"] Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.667587 4558 scope.go:117] "RemoveContainer" containerID="9902de0f0a19fcf3501305d4c98aa8a835da1fc64164b28742b36af1db06a046" Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.682738 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/memcached-0"] Jan 20 17:26:49 crc kubenswrapper[4558]: E0120 17:26:49.683618 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d274ba80-2e42-4823-bbf2-02691e791ec9" containerName="galera" Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.683641 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d274ba80-2e42-4823-bbf2-02691e791ec9" containerName="galera" Jan 20 17:26:49 crc kubenswrapper[4558]: E0120 17:26:49.683677 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d274ba80-2e42-4823-bbf2-02691e791ec9" containerName="mysql-bootstrap" Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.683684 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d274ba80-2e42-4823-bbf2-02691e791ec9" containerName="mysql-bootstrap" Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.735527 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="d274ba80-2e42-4823-bbf2-02691e791ec9" containerName="galera" Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.748524 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/memcached-0" Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.765903 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-memcached-svc" Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.767411 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"memcached-memcached-dockercfg-pp5ft" Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.767663 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"memcached-config-data" Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.815723 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/memcached-0"] Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.825097 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qszsc\" (UniqueName: \"kubernetes.io/projected/3460a75e-3553-47b9-bfc5-39c2c459826e-kube-api-access-qszsc\") pod \"memcached-0\" (UID: \"3460a75e-3553-47b9-bfc5-39c2c459826e\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.825215 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/3460a75e-3553-47b9-bfc5-39c2c459826e-config-data\") pod \"memcached-0\" (UID: \"3460a75e-3553-47b9-bfc5-39c2c459826e\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.825268 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/3460a75e-3553-47b9-bfc5-39c2c459826e-kolla-config\") pod \"memcached-0\" (UID: \"3460a75e-3553-47b9-bfc5-39c2c459826e\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.825291 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3460a75e-3553-47b9-bfc5-39c2c459826e-combined-ca-bundle\") pod \"memcached-0\" (UID: \"3460a75e-3553-47b9-bfc5-39c2c459826e\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.825334 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/3460a75e-3553-47b9-bfc5-39c2c459826e-memcached-tls-certs\") pod \"memcached-0\" (UID: \"3460a75e-3553-47b9-bfc5-39c2c459826e\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.837632 4558 scope.go:117] "RemoveContainer" containerID="f5e6e1f7dc89862c9157326c6e877a21963b94977d4b4616a286d30936fb463f" Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.839248 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-nb-0"] Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.854584 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-nb-0"] Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.865562 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ovsdbserver-nb-0"] Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.867442 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.881677 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"ovndbcluster-nb-config" Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.881783 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"ovndbcluster-nb-scripts" Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.882046 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-ovndbcluster-nb-ovndbs" Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.882270 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ovncluster-ovndbcluster-nb-dockercfg-6zkzl" Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.889406 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-nb-0"] Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.902148 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/openstack-cell1-galera-0"] Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.925896 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/openstack-cell1-galera-0"] Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.926930 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/3460a75e-3553-47b9-bfc5-39c2c459826e-config-data\") pod \"memcached-0\" (UID: \"3460a75e-3553-47b9-bfc5-39c2c459826e\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.926993 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/3460a75e-3553-47b9-bfc5-39c2c459826e-kolla-config\") pod \"memcached-0\" (UID: \"3460a75e-3553-47b9-bfc5-39c2c459826e\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.927025 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3460a75e-3553-47b9-bfc5-39c2c459826e-combined-ca-bundle\") pod \"memcached-0\" (UID: \"3460a75e-3553-47b9-bfc5-39c2c459826e\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.927072 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/3460a75e-3553-47b9-bfc5-39c2c459826e-memcached-tls-certs\") pod \"memcached-0\" (UID: \"3460a75e-3553-47b9-bfc5-39c2c459826e\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.927111 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qszsc\" (UniqueName: \"kubernetes.io/projected/3460a75e-3553-47b9-bfc5-39c2c459826e-kube-api-access-qszsc\") pod \"memcached-0\" (UID: \"3460a75e-3553-47b9-bfc5-39c2c459826e\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.927862 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/3460a75e-3553-47b9-bfc5-39c2c459826e-kolla-config\") pod \"memcached-0\" (UID: \"3460a75e-3553-47b9-bfc5-39c2c459826e\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.927958 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/3460a75e-3553-47b9-bfc5-39c2c459826e-config-data\") pod \"memcached-0\" (UID: \"3460a75e-3553-47b9-bfc5-39c2c459826e\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.933003 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-db-sync-hbfss"] Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.937970 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3460a75e-3553-47b9-bfc5-39c2c459826e-combined-ca-bundle\") pod \"memcached-0\" (UID: \"3460a75e-3553-47b9-bfc5-39c2c459826e\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.940856 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/3460a75e-3553-47b9-bfc5-39c2c459826e-memcached-tls-certs\") pod \"memcached-0\" (UID: \"3460a75e-3553-47b9-bfc5-39c2c459826e\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.947823 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/openstack-cell1-galera-0"] Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.949785 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.954430 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"galera-openstack-cell1-dockercfg-ct6ph" Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.955746 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-galera-openstack-cell1-svc" Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.955917 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-cell1-config-data" Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.956289 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-cell1-scripts" Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.970737 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qszsc\" (UniqueName: \"kubernetes.io/projected/3460a75e-3553-47b9-bfc5-39c2c459826e-kube-api-access-qszsc\") pod \"memcached-0\" (UID: \"3460a75e-3553-47b9-bfc5-39c2c459826e\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:26:49 crc kubenswrapper[4558]: I0120 17:26:49.986389 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/openstack-cell1-galera-0"] Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.029641 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w4qbq\" (UniqueName: \"kubernetes.io/projected/a2068ccb-0c0d-4b32-9063-082a4c395070-kube-api-access-w4qbq\") pod \"ovsdbserver-nb-0\" (UID: \"a2068ccb-0c0d-4b32-9063-082a4c395070\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.029699 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a2068ccb-0c0d-4b32-9063-082a4c395070-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"a2068ccb-0c0d-4b32-9063-082a4c395070\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.029769 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/a2068ccb-0c0d-4b32-9063-082a4c395070-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"a2068ccb-0c0d-4b32-9063-082a4c395070\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.029816 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/a2068ccb-0c0d-4b32-9063-082a4c395070-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"a2068ccb-0c0d-4b32-9063-082a4c395070\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.029838 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/a2068ccb-0c0d-4b32-9063-082a4c395070-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"a2068ccb-0c0d-4b32-9063-082a4c395070\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.029875 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a2068ccb-0c0d-4b32-9063-082a4c395070-config\") pod \"ovsdbserver-nb-0\" (UID: \"a2068ccb-0c0d-4b32-9063-082a4c395070\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.029905 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"ovsdbserver-nb-0\" (UID: \"a2068ccb-0c0d-4b32-9063-082a4c395070\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.029925 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a2068ccb-0c0d-4b32-9063-082a4c395070-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"a2068ccb-0c0d-4b32-9063-082a4c395070\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.032817 4558 scope.go:117] "RemoveContainer" containerID="96ef199805699173d17182add49a78cb239dafab7eff49108278d5e224dfb51a" Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.042341 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.088990 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.126832 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/memcached-0" Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.131230 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9db3acdd-184a-4004-a8d3-451673126318-combined-ca-bundle\") pod \"9db3acdd-184a-4004-a8d3-451673126318\" (UID: \"9db3acdd-184a-4004-a8d3-451673126318\") " Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.131309 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/9db3acdd-184a-4004-a8d3-451673126318-galera-tls-certs\") pod \"9db3acdd-184a-4004-a8d3-451673126318\" (UID: \"9db3acdd-184a-4004-a8d3-451673126318\") " Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.131372 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/9db3acdd-184a-4004-a8d3-451673126318-config-data-generated\") pod \"9db3acdd-184a-4004-a8d3-451673126318\" (UID: \"9db3acdd-184a-4004-a8d3-451673126318\") " Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.131410 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mysql-db\" (UniqueName: \"kubernetes.io/local-volume/local-storage17-crc\") pod \"9db3acdd-184a-4004-a8d3-451673126318\" (UID: \"9db3acdd-184a-4004-a8d3-451673126318\") " Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.131509 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9db3acdd-184a-4004-a8d3-451673126318-operator-scripts\") pod \"9db3acdd-184a-4004-a8d3-451673126318\" (UID: \"9db3acdd-184a-4004-a8d3-451673126318\") " Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.131534 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/9db3acdd-184a-4004-a8d3-451673126318-config-data-default\") pod \"9db3acdd-184a-4004-a8d3-451673126318\" (UID: \"9db3acdd-184a-4004-a8d3-451673126318\") " Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.131602 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/9db3acdd-184a-4004-a8d3-451673126318-kolla-config\") pod \"9db3acdd-184a-4004-a8d3-451673126318\" (UID: \"9db3acdd-184a-4004-a8d3-451673126318\") " Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.131690 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfj97\" (UniqueName: \"kubernetes.io/projected/9db3acdd-184a-4004-a8d3-451673126318-kube-api-access-kfj97\") pod \"9db3acdd-184a-4004-a8d3-451673126318\" (UID: \"9db3acdd-184a-4004-a8d3-451673126318\") " Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.132079 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w4qbq\" (UniqueName: \"kubernetes.io/projected/a2068ccb-0c0d-4b32-9063-082a4c395070-kube-api-access-w4qbq\") pod \"ovsdbserver-nb-0\" (UID: \"a2068ccb-0c0d-4b32-9063-082a4c395070\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.132126 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a2068ccb-0c0d-4b32-9063-082a4c395070-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"a2068ccb-0c0d-4b32-9063-082a4c395070\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.132206 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"openstack-cell1-galera-0\" (UID: \"f81a00ad-36ce-4383-9e91-fe60de6939d2\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.132265 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/f81a00ad-36ce-4383-9e91-fe60de6939d2-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"f81a00ad-36ce-4383-9e91-fe60de6939d2\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.132290 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/a2068ccb-0c0d-4b32-9063-082a4c395070-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"a2068ccb-0c0d-4b32-9063-082a4c395070\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.132308 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f81a00ad-36ce-4383-9e91-fe60de6939d2-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"f81a00ad-36ce-4383-9e91-fe60de6939d2\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.132370 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rlk6h\" (UniqueName: \"kubernetes.io/projected/f81a00ad-36ce-4383-9e91-fe60de6939d2-kube-api-access-rlk6h\") pod \"openstack-cell1-galera-0\" (UID: \"f81a00ad-36ce-4383-9e91-fe60de6939d2\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.132396 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/a2068ccb-0c0d-4b32-9063-082a4c395070-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"a2068ccb-0c0d-4b32-9063-082a4c395070\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.132435 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/a2068ccb-0c0d-4b32-9063-082a4c395070-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"a2068ccb-0c0d-4b32-9063-082a4c395070\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.132470 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/f81a00ad-36ce-4383-9e91-fe60de6939d2-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"f81a00ad-36ce-4383-9e91-fe60de6939d2\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.132508 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/f81a00ad-36ce-4383-9e91-fe60de6939d2-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"f81a00ad-36ce-4383-9e91-fe60de6939d2\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.132537 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f81a00ad-36ce-4383-9e91-fe60de6939d2-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"f81a00ad-36ce-4383-9e91-fe60de6939d2\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.132564 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a2068ccb-0c0d-4b32-9063-082a4c395070-config\") pod \"ovsdbserver-nb-0\" (UID: \"a2068ccb-0c0d-4b32-9063-082a4c395070\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.132612 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"ovsdbserver-nb-0\" (UID: \"a2068ccb-0c0d-4b32-9063-082a4c395070\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.132637 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a2068ccb-0c0d-4b32-9063-082a4c395070-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"a2068ccb-0c0d-4b32-9063-082a4c395070\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.132683 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/f81a00ad-36ce-4383-9e91-fe60de6939d2-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"f81a00ad-36ce-4383-9e91-fe60de6939d2\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.140738 4558 scope.go:117] "RemoveContainer" containerID="90717528255ead8997bbaf58b8826534843ca75599c5bbaefead8ea9f5018821" Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.148397 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a2068ccb-0c0d-4b32-9063-082a4c395070-config\") pod \"ovsdbserver-nb-0\" (UID: \"a2068ccb-0c0d-4b32-9063-082a4c395070\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.148667 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/a2068ccb-0c0d-4b32-9063-082a4c395070-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"a2068ccb-0c0d-4b32-9063-082a4c395070\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.148960 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/a2068ccb-0c0d-4b32-9063-082a4c395070-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"a2068ccb-0c0d-4b32-9063-082a4c395070\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.152414 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9db3acdd-184a-4004-a8d3-451673126318-config-data-generated" (OuterVolumeSpecName: "config-data-generated") pod "9db3acdd-184a-4004-a8d3-451673126318" (UID: "9db3acdd-184a-4004-a8d3-451673126318"). InnerVolumeSpecName "config-data-generated". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.155408 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"ovsdbserver-nb-0\" (UID: \"a2068ccb-0c0d-4b32-9063-082a4c395070\") device mount path \"/mnt/openstack/pv09\"" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.168273 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9db3acdd-184a-4004-a8d3-451673126318-config-data-default" (OuterVolumeSpecName: "config-data-default") pod "9db3acdd-184a-4004-a8d3-451673126318" (UID: "9db3acdd-184a-4004-a8d3-451673126318"). InnerVolumeSpecName "config-data-default". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.168760 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9db3acdd-184a-4004-a8d3-451673126318-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "9db3acdd-184a-4004-a8d3-451673126318" (UID: "9db3acdd-184a-4004-a8d3-451673126318"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.169226 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a2068ccb-0c0d-4b32-9063-082a4c395070-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"a2068ccb-0c0d-4b32-9063-082a4c395070\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.171113 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9db3acdd-184a-4004-a8d3-451673126318-kube-api-access-kfj97" (OuterVolumeSpecName: "kube-api-access-kfj97") pod "9db3acdd-184a-4004-a8d3-451673126318" (UID: "9db3acdd-184a-4004-a8d3-451673126318"). InnerVolumeSpecName "kube-api-access-kfj97". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.181594 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9db3acdd-184a-4004-a8d3-451673126318-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "9db3acdd-184a-4004-a8d3-451673126318" (UID: "9db3acdd-184a-4004-a8d3-451673126318"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.182701 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-db-sync-h5hmh"] Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.217270 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a2068ccb-0c0d-4b32-9063-082a4c395070-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"a2068ccb-0c0d-4b32-9063-082a4c395070\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.219832 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/a2068ccb-0c0d-4b32-9063-082a4c395070-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"a2068ccb-0c0d-4b32-9063-082a4c395070\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.226099 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w4qbq\" (UniqueName: \"kubernetes.io/projected/a2068ccb-0c0d-4b32-9063-082a4c395070-kube-api-access-w4qbq\") pod \"ovsdbserver-nb-0\" (UID: \"a2068ccb-0c0d-4b32-9063-082a4c395070\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.235225 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/91cc01be-13ba-41d9-9060-a2cfacdb2f74-kube-state-metrics-tls-config\") pod \"91cc01be-13ba-41d9-9060-a2cfacdb2f74\" (UID: \"91cc01be-13ba-41d9-9060-a2cfacdb2f74\") " Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.243641 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/91cc01be-13ba-41d9-9060-a2cfacdb2f74-combined-ca-bundle\") pod \"91cc01be-13ba-41d9-9060-a2cfacdb2f74\" (UID: \"91cc01be-13ba-41d9-9060-a2cfacdb2f74\") " Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.235933 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9db3acdd-184a-4004-a8d3-451673126318-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9db3acdd-184a-4004-a8d3-451673126318" (UID: "9db3acdd-184a-4004-a8d3-451673126318"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.243873 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9db3acdd-184a-4004-a8d3-451673126318-combined-ca-bundle\") pod \"9db3acdd-184a-4004-a8d3-451673126318\" (UID: \"9db3acdd-184a-4004-a8d3-451673126318\") " Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.243972 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/91cc01be-13ba-41d9-9060-a2cfacdb2f74-kube-state-metrics-tls-certs\") pod \"91cc01be-13ba-41d9-9060-a2cfacdb2f74\" (UID: \"91cc01be-13ba-41d9-9060-a2cfacdb2f74\") " Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.244088 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kffjm\" (UniqueName: \"kubernetes.io/projected/91cc01be-13ba-41d9-9060-a2cfacdb2f74-kube-api-access-kffjm\") pod \"91cc01be-13ba-41d9-9060-a2cfacdb2f74\" (UID: \"91cc01be-13ba-41d9-9060-a2cfacdb2f74\") " Jan 20 17:26:50 crc kubenswrapper[4558]: W0120 17:26:50.244385 4558 empty_dir.go:500] Warning: Unmount skipped because path does not exist: /var/lib/kubelet/pods/9db3acdd-184a-4004-a8d3-451673126318/volumes/kubernetes.io~secret/combined-ca-bundle Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.244431 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9db3acdd-184a-4004-a8d3-451673126318-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9db3acdd-184a-4004-a8d3-451673126318" (UID: "9db3acdd-184a-4004-a8d3-451673126318"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.244795 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"openstack-cell1-galera-0\" (UID: \"f81a00ad-36ce-4383-9e91-fe60de6939d2\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.244963 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/f81a00ad-36ce-4383-9e91-fe60de6939d2-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"f81a00ad-36ce-4383-9e91-fe60de6939d2\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.245044 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f81a00ad-36ce-4383-9e91-fe60de6939d2-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"f81a00ad-36ce-4383-9e91-fe60de6939d2\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.245207 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rlk6h\" (UniqueName: \"kubernetes.io/projected/f81a00ad-36ce-4383-9e91-fe60de6939d2-kube-api-access-rlk6h\") pod \"openstack-cell1-galera-0\" (UID: \"f81a00ad-36ce-4383-9e91-fe60de6939d2\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.245362 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/f81a00ad-36ce-4383-9e91-fe60de6939d2-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"f81a00ad-36ce-4383-9e91-fe60de6939d2\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.245507 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/f81a00ad-36ce-4383-9e91-fe60de6939d2-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"f81a00ad-36ce-4383-9e91-fe60de6939d2\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.245592 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f81a00ad-36ce-4383-9e91-fe60de6939d2-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"f81a00ad-36ce-4383-9e91-fe60de6939d2\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.245813 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/f81a00ad-36ce-4383-9e91-fe60de6939d2-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"f81a00ad-36ce-4383-9e91-fe60de6939d2\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.245934 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9db3acdd-184a-4004-a8d3-451673126318-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.245989 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/9db3acdd-184a-4004-a8d3-451673126318-config-data-generated\") on node \"crc\" DevicePath \"\"" Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.246049 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9db3acdd-184a-4004-a8d3-451673126318-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.246334 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/9db3acdd-184a-4004-a8d3-451673126318-config-data-default\") on node \"crc\" DevicePath \"\"" Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.246394 4558 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/9db3acdd-184a-4004-a8d3-451673126318-kolla-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.246443 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfj97\" (UniqueName: \"kubernetes.io/projected/9db3acdd-184a-4004-a8d3-451673126318-kube-api-access-kfj97\") on node \"crc\" DevicePath \"\"" Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.246879 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/f81a00ad-36ce-4383-9e91-fe60de6939d2-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"f81a00ad-36ce-4383-9e91-fe60de6939d2\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.247195 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/f81a00ad-36ce-4383-9e91-fe60de6939d2-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"f81a00ad-36ce-4383-9e91-fe60de6939d2\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.248302 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/f81a00ad-36ce-4383-9e91-fe60de6939d2-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"f81a00ad-36ce-4383-9e91-fe60de6939d2\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.248465 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"openstack-cell1-galera-0\" (UID: \"f81a00ad-36ce-4383-9e91-fe60de6939d2\") device mount path \"/mnt/openstack/pv10\"" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.249571 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f81a00ad-36ce-4383-9e91-fe60de6939d2-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"f81a00ad-36ce-4383-9e91-fe60de6939d2\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.256321 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/91cc01be-13ba-41d9-9060-a2cfacdb2f74-kube-api-access-kffjm" (OuterVolumeSpecName: "kube-api-access-kffjm") pod "91cc01be-13ba-41d9-9060-a2cfacdb2f74" (UID: "91cc01be-13ba-41d9-9060-a2cfacdb2f74"). InnerVolumeSpecName "kube-api-access-kffjm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.264871 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage17-crc" (OuterVolumeSpecName: "mysql-db") pod "9db3acdd-184a-4004-a8d3-451673126318" (UID: "9db3acdd-184a-4004-a8d3-451673126318"). InnerVolumeSpecName "local-storage17-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.269196 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f81a00ad-36ce-4383-9e91-fe60de6939d2-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"f81a00ad-36ce-4383-9e91-fe60de6939d2\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.273861 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rlk6h\" (UniqueName: \"kubernetes.io/projected/f81a00ad-36ce-4383-9e91-fe60de6939d2-kube-api-access-rlk6h\") pod \"openstack-cell1-galera-0\" (UID: \"f81a00ad-36ce-4383-9e91-fe60de6939d2\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.276446 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/f81a00ad-36ce-4383-9e91-fe60de6939d2-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"f81a00ad-36ce-4383-9e91-fe60de6939d2\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.359966 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kffjm\" (UniqueName: \"kubernetes.io/projected/91cc01be-13ba-41d9-9060-a2cfacdb2f74-kube-api-access-kffjm\") on node \"crc\" DevicePath \"\"" Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.360015 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage17-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage17-crc\") on node \"crc\" " Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.367803 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-db-sync-t7b8z"] Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.437507 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage17-crc" (UniqueName: "kubernetes.io/local-volume/local-storage17-crc") on node "crc" Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.473227 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage17-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage17-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.473418 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"openstack-cell1-galera-0\" (UID: \"f81a00ad-36ce-4383-9e91-fe60de6939d2\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.501227 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"ovsdbserver-nb-0\" (UID: \"a2068ccb-0c0d-4b32-9063-082a4c395070\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.524966 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9db3acdd-184a-4004-a8d3-451673126318-galera-tls-certs" (OuterVolumeSpecName: "galera-tls-certs") pod "9db3acdd-184a-4004-a8d3-451673126318" (UID: "9db3acdd-184a-4004-a8d3-451673126318"). InnerVolumeSpecName "galera-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.576059 4558 reconciler_common.go:293] "Volume detached for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/9db3acdd-184a-4004-a8d3-451673126318-galera-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.588635 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.595425 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="63766878-2a9a-47b6-9209-554b59500f10" path="/var/lib/kubelet/pods/63766878-2a9a-47b6-9209-554b59500f10/volumes" Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.596129 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="987d6a20-4fc9-411e-ae1f-9ff5dd78e80d" path="/var/lib/kubelet/pods/987d6a20-4fc9-411e-ae1f-9ff5dd78e80d/volumes" Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.596789 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aa79d817-2b73-469c-8b09-f2f312835773" path="/var/lib/kubelet/pods/aa79d817-2b73-469c-8b09-f2f312835773/volumes" Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.598008 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d274ba80-2e42-4823-bbf2-02691e791ec9" path="/var/lib/kubelet/pods/d274ba80-2e42-4823-bbf2-02691e791ec9/volumes" Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.627545 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/91cc01be-13ba-41d9-9060-a2cfacdb2f74-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "91cc01be-13ba-41d9-9060-a2cfacdb2f74" (UID: "91cc01be-13ba-41d9-9060-a2cfacdb2f74"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.645539 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/swift-ring-rebalance-zn4xn"] Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.651426 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/91cc01be-13ba-41d9-9060-a2cfacdb2f74-kube-state-metrics-tls-config" (OuterVolumeSpecName: "kube-state-metrics-tls-config") pod "91cc01be-13ba-41d9-9060-a2cfacdb2f74" (UID: "91cc01be-13ba-41d9-9060-a2cfacdb2f74"). InnerVolumeSpecName "kube-state-metrics-tls-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.678055 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-sb-0"] Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.680735 4558 reconciler_common.go:293] "Volume detached for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/91cc01be-13ba-41d9-9060-a2cfacdb2f74-kube-state-metrics-tls-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.680760 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/91cc01be-13ba-41d9-9060-a2cfacdb2f74-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.711180 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/91cc01be-13ba-41d9-9060-a2cfacdb2f74-kube-state-metrics-tls-certs" (OuterVolumeSpecName: "kube-state-metrics-tls-certs") pod "91cc01be-13ba-41d9-9060-a2cfacdb2f74" (UID: "91cc01be-13ba-41d9-9060-a2cfacdb2f74"). InnerVolumeSpecName "kube-state-metrics-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.751385 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.754378 4558 generic.go:334] "Generic (PLEG): container finished" podID="70698ad2-165d-46a5-b45e-36c4e4b6df8b" containerID="44fb41a2f181b567d2295b6640d53ad267beb9330b20f34e2b79151e8226e822" exitCode=0 Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.754419 4558 generic.go:334] "Generic (PLEG): container finished" podID="70698ad2-165d-46a5-b45e-36c4e4b6df8b" containerID="ea36ed48297a2a2bcb917d936cd3d2d844aa2faeacef821937a3bd537eb2e4ba" exitCode=0 Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.754470 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"70698ad2-165d-46a5-b45e-36c4e4b6df8b","Type":"ContainerDied","Data":"44fb41a2f181b567d2295b6640d53ad267beb9330b20f34e2b79151e8226e822"} Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.754500 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"70698ad2-165d-46a5-b45e-36c4e4b6df8b","Type":"ContainerDied","Data":"ea36ed48297a2a2bcb917d936cd3d2d844aa2faeacef821937a3bd537eb2e4ba"} Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.757254 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-db-sync-gn4wj" event={"ID":"5f91fe62-3615-4153-a6c7-1652a5780bb5","Type":"ContainerStarted","Data":"80d9543d95ec125bdd2c22a852c98aeaffb38bc4939c5bf2f6dadc8b3e90677f"} Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.766922 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-db-sync-xq5zh" event={"ID":"29c1b3a9-0bdc-423c-af7f-42f86f3693b5","Type":"ContainerStarted","Data":"ba1f5590ea277f251775376a8988a4f0e9b00277af3bcaf111020b2f5cccdc43"} Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.827131 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.827979 4558 reconciler_common.go:293] "Volume detached for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/91cc01be-13ba-41d9-9060-a2cfacdb2f74-kube-state-metrics-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.850448 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-galera-0" event={"ID":"9db3acdd-184a-4004-a8d3-451673126318","Type":"ContainerDied","Data":"dab11a458a378a0ff226a6959ddd7ccd20e52fd9484d0d1c4c1489bc148993e6"} Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.850689 4558 scope.go:117] "RemoveContainer" containerID="74db7b040a7e0441aa4205a49b98fee925418b1a898081b635b0efcdf2e08d5a" Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.850916 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.862256 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-db-sync-rq6qr" event={"ID":"4869db4f-9810-4685-a2a3-a1103e998535","Type":"ContainerStarted","Data":"12bf0dfcd5e93ae240822317488ad957dc71e08560168ddf9dbb00c2223c68b3"} Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.862319 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-db-sync-rq6qr" event={"ID":"4869db4f-9810-4685-a2a3-a1103e998535","Type":"ContainerStarted","Data":"c95e60f455d86ccaf831ae20c373bfba7649b918008b3952256f6e249b18b5e4"} Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.892750 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/kube-state-metrics-0" event={"ID":"91cc01be-13ba-41d9-9060-a2cfacdb2f74","Type":"ContainerDied","Data":"1f86a59c024a009254b50af1f48aa841486688bd42d1c8fd5e1e0009eb525440"} Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.892901 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.909880 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/keystone-db-sync-xq5zh" podStartSLOduration=4.909862314 podStartE2EDuration="4.909862314s" podCreationTimestamp="2026-01-20 17:26:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:26:50.826936999 +0000 UTC m=+2704.587274966" watchObservedRunningTime="2026-01-20 17:26:50.909862314 +0000 UTC m=+2704.670200281" Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.930768 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2j8pm\" (UniqueName: \"kubernetes.io/projected/2587f606-361f-4462-bef1-e1ec4e95f012-kube-api-access-2j8pm\") pod \"2587f606-361f-4462-bef1-e1ec4e95f012\" (UID: \"2587f606-361f-4462-bef1-e1ec4e95f012\") " Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.930989 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2587f606-361f-4462-bef1-e1ec4e95f012-combined-ca-bundle\") pod \"2587f606-361f-4462-bef1-e1ec4e95f012\" (UID: \"2587f606-361f-4462-bef1-e1ec4e95f012\") " Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.931022 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/2587f606-361f-4462-bef1-e1ec4e95f012-vencrypt-tls-certs\") pod \"2587f606-361f-4462-bef1-e1ec4e95f012\" (UID: \"2587f606-361f-4462-bef1-e1ec4e95f012\") " Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.931048 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2587f606-361f-4462-bef1-e1ec4e95f012-config-data\") pod \"2587f606-361f-4462-bef1-e1ec4e95f012\" (UID: \"2587f606-361f-4462-bef1-e1ec4e95f012\") " Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.931105 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/2587f606-361f-4462-bef1-e1ec4e95f012-nova-novncproxy-tls-certs\") pod \"2587f606-361f-4462-bef1-e1ec4e95f012\" (UID: \"2587f606-361f-4462-bef1-e1ec4e95f012\") " Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.936211 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/openstack-galera-0"] Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.937312 4558 generic.go:334] "Generic (PLEG): container finished" podID="2587f606-361f-4462-bef1-e1ec4e95f012" containerID="fc1cf5d826a15f0f792f4f5bcddbaf158f40dbe461e000bce68e8580a359d758" exitCode=0 Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.937367 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"2587f606-361f-4462-bef1-e1ec4e95f012","Type":"ContainerDied","Data":"fc1cf5d826a15f0f792f4f5bcddbaf158f40dbe461e000bce68e8580a359d758"} Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.937391 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"2587f606-361f-4462-bef1-e1ec4e95f012","Type":"ContainerDied","Data":"e94fc71558de5951972ae0ec24b9a045519ba50197ac1c002ba634ea2588c91d"} Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.937462 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.969131 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2587f606-361f-4462-bef1-e1ec4e95f012-kube-api-access-2j8pm" (OuterVolumeSpecName: "kube-api-access-2j8pm") pod "2587f606-361f-4462-bef1-e1ec4e95f012" (UID: "2587f606-361f-4462-bef1-e1ec4e95f012"). InnerVolumeSpecName "kube-api-access-2j8pm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.976686 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-db-sync-rmm2x" event={"ID":"c7f6b91b-b55e-4af0-b496-46c80b92bad1","Type":"ContainerStarted","Data":"7e009ea8664346031f4001d328d14e88633993579983342f57172300cb2e9d67"} Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.989659 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-h5hmh" event={"ID":"f461d799-0027-4949-96bc-85e80ae9ec47","Type":"ContainerStarted","Data":"2ac8c57e21966494d98f82860a871152827ce4bf713928615d45e9156c21b508"} Jan 20 17:26:50 crc kubenswrapper[4558]: I0120 17:26:50.991943 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2587f606-361f-4462-bef1-e1ec4e95f012-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2587f606-361f-4462-bef1-e1ec4e95f012" (UID: "2587f606-361f-4462-bef1-e1ec4e95f012"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:26:51 crc kubenswrapper[4558]: I0120 17:26:51.000366 4558 scope.go:117] "RemoveContainer" containerID="31bf315baedc02ab08496f1a6ef75754e50f376e1ce0ce44b48404a088ee0df2" Jan 20 17:26:51 crc kubenswrapper[4558]: I0120 17:26:51.005600 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-hbfss" event={"ID":"04c5c6c7-acc2-4821-a68b-c2ac094e931f","Type":"ContainerStarted","Data":"48b32c6cd59db0df143887c227616ca6d4f68d463ec76e308c787483019e261c"} Jan 20 17:26:51 crc kubenswrapper[4558]: I0120 17:26:51.026275 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/openstack-galera-0"] Jan 20 17:26:51 crc kubenswrapper[4558]: I0120 17:26:51.033256 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/openstack-galera-0"] Jan 20 17:26:51 crc kubenswrapper[4558]: E0120 17:26:51.033719 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2587f606-361f-4462-bef1-e1ec4e95f012" containerName="nova-cell1-novncproxy-novncproxy" Jan 20 17:26:51 crc kubenswrapper[4558]: I0120 17:26:51.033739 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="2587f606-361f-4462-bef1-e1ec4e95f012" containerName="nova-cell1-novncproxy-novncproxy" Jan 20 17:26:51 crc kubenswrapper[4558]: E0120 17:26:51.033748 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9db3acdd-184a-4004-a8d3-451673126318" containerName="mysql-bootstrap" Jan 20 17:26:51 crc kubenswrapper[4558]: I0120 17:26:51.033755 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="9db3acdd-184a-4004-a8d3-451673126318" containerName="mysql-bootstrap" Jan 20 17:26:51 crc kubenswrapper[4558]: E0120 17:26:51.033769 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9db3acdd-184a-4004-a8d3-451673126318" containerName="galera" Jan 20 17:26:51 crc kubenswrapper[4558]: I0120 17:26:51.033785 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="9db3acdd-184a-4004-a8d3-451673126318" containerName="galera" Jan 20 17:26:51 crc kubenswrapper[4558]: E0120 17:26:51.033809 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="91cc01be-13ba-41d9-9060-a2cfacdb2f74" containerName="kube-state-metrics" Jan 20 17:26:51 crc kubenswrapper[4558]: I0120 17:26:51.033819 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="91cc01be-13ba-41d9-9060-a2cfacdb2f74" containerName="kube-state-metrics" Jan 20 17:26:51 crc kubenswrapper[4558]: I0120 17:26:51.033839 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/neutron-db-sync-rq6qr" podStartSLOduration=4.033817099 podStartE2EDuration="4.033817099s" podCreationTimestamp="2026-01-20 17:26:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:26:50.924754172 +0000 UTC m=+2704.685092139" watchObservedRunningTime="2026-01-20 17:26:51.033817099 +0000 UTC m=+2704.794155066" Jan 20 17:26:51 crc kubenswrapper[4558]: I0120 17:26:51.034054 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="91cc01be-13ba-41d9-9060-a2cfacdb2f74" containerName="kube-state-metrics" Jan 20 17:26:51 crc kubenswrapper[4558]: I0120 17:26:51.034068 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="9db3acdd-184a-4004-a8d3-451673126318" containerName="galera" Jan 20 17:26:51 crc kubenswrapper[4558]: I0120 17:26:51.034087 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="2587f606-361f-4462-bef1-e1ec4e95f012" containerName="nova-cell1-novncproxy-novncproxy" Jan 20 17:26:51 crc kubenswrapper[4558]: I0120 17:26:51.035188 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:26:51 crc kubenswrapper[4558]: I0120 17:26:51.040456 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-galera-openstack-svc" Jan 20 17:26:51 crc kubenswrapper[4558]: I0120 17:26:51.040976 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-config-data" Jan 20 17:26:51 crc kubenswrapper[4558]: I0120 17:26:51.041700 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"galera-openstack-dockercfg-82xg7" Jan 20 17:26:51 crc kubenswrapper[4558]: I0120 17:26:51.046639 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2587f606-361f-4462-bef1-e1ec4e95f012-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:26:51 crc kubenswrapper[4558]: I0120 17:26:51.046670 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2j8pm\" (UniqueName: \"kubernetes.io/projected/2587f606-361f-4462-bef1-e1ec4e95f012-kube-api-access-2j8pm\") on node \"crc\" DevicePath \"\"" Jan 20 17:26:51 crc kubenswrapper[4558]: I0120 17:26:51.054457 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-scripts" Jan 20 17:26:51 crc kubenswrapper[4558]: I0120 17:26:51.059179 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/openstack-galera-0"] Jan 20 17:26:51 crc kubenswrapper[4558]: I0120 17:26:51.064006 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 17:26:51 crc kubenswrapper[4558]: I0120 17:26:51.076526 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/memcached-0"] Jan 20 17:26:51 crc kubenswrapper[4558]: I0120 17:26:51.083216 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 17:26:51 crc kubenswrapper[4558]: I0120 17:26:51.090400 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 17:26:51 crc kubenswrapper[4558]: I0120 17:26:51.091604 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:26:51 crc kubenswrapper[4558]: I0120 17:26:51.099151 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-kube-state-metrics-svc" Jan 20 17:26:51 crc kubenswrapper[4558]: I0120 17:26:51.099335 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"kube-state-metrics-tls-config" Jan 20 17:26:51 crc kubenswrapper[4558]: I0120 17:26:51.099629 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-db-sync-t7b8z" event={"ID":"c2442cca-1f6c-4531-b1f4-2b873ce42964","Type":"ContainerStarted","Data":"6feb415f0b29263a8426672fd5863d7ffb5e3949e847ae77533d83e6a4c9dc72"} Jan 20 17:26:51 crc kubenswrapper[4558]: I0120 17:26:51.115312 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/placement-db-sync-rmm2x" podStartSLOduration=4.115290716 podStartE2EDuration="4.115290716s" podCreationTimestamp="2026-01-20 17:26:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:26:51.010544695 +0000 UTC m=+2704.770882662" watchObservedRunningTime="2026-01-20 17:26:51.115290716 +0000 UTC m=+2704.875628682" Jan 20 17:26:51 crc kubenswrapper[4558]: I0120 17:26:51.132593 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 17:26:51 crc kubenswrapper[4558]: I0120 17:26:51.138338 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2587f606-361f-4462-bef1-e1ec4e95f012-config-data" (OuterVolumeSpecName: "config-data") pod "2587f606-361f-4462-bef1-e1ec4e95f012" (UID: "2587f606-361f-4462-bef1-e1ec4e95f012"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:26:51 crc kubenswrapper[4558]: I0120 17:26:51.148897 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/6bc8ca35-f460-4c4d-9dbe-0012c552371a-config-data-generated\") pod \"openstack-galera-0\" (UID: \"6bc8ca35-f460-4c4d-9dbe-0012c552371a\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:26:51 crc kubenswrapper[4558]: I0120 17:26:51.148983 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6bc8ca35-f460-4c4d-9dbe-0012c552371a-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"6bc8ca35-f460-4c4d-9dbe-0012c552371a\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:26:51 crc kubenswrapper[4558]: I0120 17:26:51.149076 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage17-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage17-crc\") pod \"openstack-galera-0\" (UID: \"6bc8ca35-f460-4c4d-9dbe-0012c552371a\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:26:51 crc kubenswrapper[4558]: I0120 17:26:51.149114 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-swswb\" (UniqueName: \"kubernetes.io/projected/6bc8ca35-f460-4c4d-9dbe-0012c552371a-kube-api-access-swswb\") pod \"openstack-galera-0\" (UID: \"6bc8ca35-f460-4c4d-9dbe-0012c552371a\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:26:51 crc kubenswrapper[4558]: I0120 17:26:51.149219 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/6bc8ca35-f460-4c4d-9dbe-0012c552371a-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"6bc8ca35-f460-4c4d-9dbe-0012c552371a\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:26:51 crc kubenswrapper[4558]: I0120 17:26:51.149299 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/6bc8ca35-f460-4c4d-9dbe-0012c552371a-kolla-config\") pod \"openstack-galera-0\" (UID: \"6bc8ca35-f460-4c4d-9dbe-0012c552371a\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:26:51 crc kubenswrapper[4558]: I0120 17:26:51.149339 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/6bc8ca35-f460-4c4d-9dbe-0012c552371a-config-data-default\") pod \"openstack-galera-0\" (UID: \"6bc8ca35-f460-4c4d-9dbe-0012c552371a\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:26:51 crc kubenswrapper[4558]: I0120 17:26:51.149378 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6bc8ca35-f460-4c4d-9dbe-0012c552371a-operator-scripts\") pod \"openstack-galera-0\" (UID: \"6bc8ca35-f460-4c4d-9dbe-0012c552371a\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:26:51 crc kubenswrapper[4558]: I0120 17:26:51.149454 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2587f606-361f-4462-bef1-e1ec4e95f012-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:26:51 crc kubenswrapper[4558]: I0120 17:26:51.246522 4558 scope.go:117] "RemoveContainer" containerID="109b1c818a10a94b43307d6a3f5d2e521d77e543f6c41fe1fc8ac78d9590ffb7" Jan 20 17:26:51 crc kubenswrapper[4558]: I0120 17:26:51.252462 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/6bc8ca35-f460-4c4d-9dbe-0012c552371a-config-data-generated\") pod \"openstack-galera-0\" (UID: \"6bc8ca35-f460-4c4d-9dbe-0012c552371a\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:26:51 crc kubenswrapper[4558]: I0120 17:26:51.252526 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/443f910c-dd2d-4c72-b861-f15de67ac6bb-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"443f910c-dd2d-4c72-b861-f15de67ac6bb\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:26:51 crc kubenswrapper[4558]: I0120 17:26:51.252551 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cp2rb\" (UniqueName: \"kubernetes.io/projected/443f910c-dd2d-4c72-b861-f15de67ac6bb-kube-api-access-cp2rb\") pod \"kube-state-metrics-0\" (UID: \"443f910c-dd2d-4c72-b861-f15de67ac6bb\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:26:51 crc kubenswrapper[4558]: I0120 17:26:51.252573 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6bc8ca35-f460-4c4d-9dbe-0012c552371a-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"6bc8ca35-f460-4c4d-9dbe-0012c552371a\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:26:51 crc kubenswrapper[4558]: I0120 17:26:51.252622 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage17-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage17-crc\") pod \"openstack-galera-0\" (UID: \"6bc8ca35-f460-4c4d-9dbe-0012c552371a\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:26:51 crc kubenswrapper[4558]: I0120 17:26:51.252647 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-swswb\" (UniqueName: \"kubernetes.io/projected/6bc8ca35-f460-4c4d-9dbe-0012c552371a-kube-api-access-swswb\") pod \"openstack-galera-0\" (UID: \"6bc8ca35-f460-4c4d-9dbe-0012c552371a\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:26:51 crc kubenswrapper[4558]: I0120 17:26:51.252692 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/6bc8ca35-f460-4c4d-9dbe-0012c552371a-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"6bc8ca35-f460-4c4d-9dbe-0012c552371a\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:26:51 crc kubenswrapper[4558]: I0120 17:26:51.252718 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/443f910c-dd2d-4c72-b861-f15de67ac6bb-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"443f910c-dd2d-4c72-b861-f15de67ac6bb\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:26:51 crc kubenswrapper[4558]: I0120 17:26:51.252750 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/6bc8ca35-f460-4c4d-9dbe-0012c552371a-kolla-config\") pod \"openstack-galera-0\" (UID: \"6bc8ca35-f460-4c4d-9dbe-0012c552371a\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:26:51 crc kubenswrapper[4558]: I0120 17:26:51.252784 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/6bc8ca35-f460-4c4d-9dbe-0012c552371a-config-data-default\") pod \"openstack-galera-0\" (UID: \"6bc8ca35-f460-4c4d-9dbe-0012c552371a\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:26:51 crc kubenswrapper[4558]: I0120 17:26:51.252808 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6bc8ca35-f460-4c4d-9dbe-0012c552371a-operator-scripts\") pod \"openstack-galera-0\" (UID: \"6bc8ca35-f460-4c4d-9dbe-0012c552371a\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:26:51 crc kubenswrapper[4558]: I0120 17:26:51.252834 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/443f910c-dd2d-4c72-b861-f15de67ac6bb-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"443f910c-dd2d-4c72-b861-f15de67ac6bb\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:26:51 crc kubenswrapper[4558]: I0120 17:26:51.252881 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/6bc8ca35-f460-4c4d-9dbe-0012c552371a-config-data-generated\") pod \"openstack-galera-0\" (UID: \"6bc8ca35-f460-4c4d-9dbe-0012c552371a\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:26:51 crc kubenswrapper[4558]: I0120 17:26:51.253152 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage17-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage17-crc\") pod \"openstack-galera-0\" (UID: \"6bc8ca35-f460-4c4d-9dbe-0012c552371a\") device mount path \"/mnt/openstack/pv17\"" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:26:51 crc kubenswrapper[4558]: I0120 17:26:51.253711 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/6bc8ca35-f460-4c4d-9dbe-0012c552371a-config-data-default\") pod \"openstack-galera-0\" (UID: \"6bc8ca35-f460-4c4d-9dbe-0012c552371a\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:26:51 crc kubenswrapper[4558]: I0120 17:26:51.254120 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/6bc8ca35-f460-4c4d-9dbe-0012c552371a-kolla-config\") pod \"openstack-galera-0\" (UID: \"6bc8ca35-f460-4c4d-9dbe-0012c552371a\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:26:51 crc kubenswrapper[4558]: I0120 17:26:51.256232 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6bc8ca35-f460-4c4d-9dbe-0012c552371a-operator-scripts\") pod \"openstack-galera-0\" (UID: \"6bc8ca35-f460-4c4d-9dbe-0012c552371a\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:26:51 crc kubenswrapper[4558]: I0120 17:26:51.269803 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6bc8ca35-f460-4c4d-9dbe-0012c552371a-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"6bc8ca35-f460-4c4d-9dbe-0012c552371a\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:26:51 crc kubenswrapper[4558]: I0120 17:26:51.285651 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/6bc8ca35-f460-4c4d-9dbe-0012c552371a-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"6bc8ca35-f460-4c4d-9dbe-0012c552371a\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:26:51 crc kubenswrapper[4558]: I0120 17:26:51.288286 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-swswb\" (UniqueName: \"kubernetes.io/projected/6bc8ca35-f460-4c4d-9dbe-0012c552371a-kube-api-access-swswb\") pod \"openstack-galera-0\" (UID: \"6bc8ca35-f460-4c4d-9dbe-0012c552371a\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:26:51 crc kubenswrapper[4558]: I0120 17:26:51.356065 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/443f910c-dd2d-4c72-b861-f15de67ac6bb-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"443f910c-dd2d-4c72-b861-f15de67ac6bb\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:26:51 crc kubenswrapper[4558]: I0120 17:26:51.356150 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/443f910c-dd2d-4c72-b861-f15de67ac6bb-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"443f910c-dd2d-4c72-b861-f15de67ac6bb\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:26:51 crc kubenswrapper[4558]: I0120 17:26:51.356187 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cp2rb\" (UniqueName: \"kubernetes.io/projected/443f910c-dd2d-4c72-b861-f15de67ac6bb-kube-api-access-cp2rb\") pod \"kube-state-metrics-0\" (UID: \"443f910c-dd2d-4c72-b861-f15de67ac6bb\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:26:51 crc kubenswrapper[4558]: I0120 17:26:51.356285 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/443f910c-dd2d-4c72-b861-f15de67ac6bb-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"443f910c-dd2d-4c72-b861-f15de67ac6bb\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:26:51 crc kubenswrapper[4558]: I0120 17:26:51.367138 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/443f910c-dd2d-4c72-b861-f15de67ac6bb-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"443f910c-dd2d-4c72-b861-f15de67ac6bb\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:26:51 crc kubenswrapper[4558]: I0120 17:26:51.370657 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/443f910c-dd2d-4c72-b861-f15de67ac6bb-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"443f910c-dd2d-4c72-b861-f15de67ac6bb\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:26:51 crc kubenswrapper[4558]: I0120 17:26:51.375305 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/443f910c-dd2d-4c72-b861-f15de67ac6bb-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"443f910c-dd2d-4c72-b861-f15de67ac6bb\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:26:51 crc kubenswrapper[4558]: I0120 17:26:51.376258 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cp2rb\" (UniqueName: \"kubernetes.io/projected/443f910c-dd2d-4c72-b861-f15de67ac6bb-kube-api-access-cp2rb\") pod \"kube-state-metrics-0\" (UID: \"443f910c-dd2d-4c72-b861-f15de67ac6bb\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:26:51 crc kubenswrapper[4558]: I0120 17:26:51.518225 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage17-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage17-crc\") pod \"openstack-galera-0\" (UID: \"6bc8ca35-f460-4c4d-9dbe-0012c552371a\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:26:51 crc kubenswrapper[4558]: I0120 17:26:51.521454 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/openstack-cell1-galera-0"] Jan 20 17:26:51 crc kubenswrapper[4558]: I0120 17:26:51.559685 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2587f606-361f-4462-bef1-e1ec4e95f012-vencrypt-tls-certs" (OuterVolumeSpecName: "vencrypt-tls-certs") pod "2587f606-361f-4462-bef1-e1ec4e95f012" (UID: "2587f606-361f-4462-bef1-e1ec4e95f012"). InnerVolumeSpecName "vencrypt-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:26:51 crc kubenswrapper[4558]: I0120 17:26:51.560128 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2587f606-361f-4462-bef1-e1ec4e95f012-nova-novncproxy-tls-certs" (OuterVolumeSpecName: "nova-novncproxy-tls-certs") pod "2587f606-361f-4462-bef1-e1ec4e95f012" (UID: "2587f606-361f-4462-bef1-e1ec4e95f012"). InnerVolumeSpecName "nova-novncproxy-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:26:51 crc kubenswrapper[4558]: I0120 17:26:51.582389 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-nb-0"] Jan 20 17:26:51 crc kubenswrapper[4558]: I0120 17:26:51.632785 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:26:51 crc kubenswrapper[4558]: W0120 17:26:51.650785 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda2068ccb_0c0d_4b32_9063_082a4c395070.slice/crio-febfce8a80eac146400d05676d21dd60a93153f82cbc431cf75d6c5818ed6658 WatchSource:0}: Error finding container febfce8a80eac146400d05676d21dd60a93153f82cbc431cf75d6c5818ed6658: Status 404 returned error can't find the container with id febfce8a80eac146400d05676d21dd60a93153f82cbc431cf75d6c5818ed6658 Jan 20 17:26:51 crc kubenswrapper[4558]: I0120 17:26:51.664672 4558 reconciler_common.go:293] "Volume detached for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/2587f606-361f-4462-bef1-e1ec4e95f012-vencrypt-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:26:51 crc kubenswrapper[4558]: I0120 17:26:51.664704 4558 reconciler_common.go:293] "Volume detached for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/2587f606-361f-4462-bef1-e1ec4e95f012-nova-novncproxy-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:26:51 crc kubenswrapper[4558]: I0120 17:26:51.682147 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:26:51 crc kubenswrapper[4558]: I0120 17:26:51.715812 4558 scope.go:117] "RemoveContainer" containerID="fc1cf5d826a15f0f792f4f5bcddbaf158f40dbe461e000bce68e8580a359d758" Jan 20 17:26:51 crc kubenswrapper[4558]: I0120 17:26:51.718632 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:26:51 crc kubenswrapper[4558]: I0120 17:26:51.833734 4558 scope.go:117] "RemoveContainer" containerID="fc1cf5d826a15f0f792f4f5bcddbaf158f40dbe461e000bce68e8580a359d758" Jan 20 17:26:51 crc kubenswrapper[4558]: E0120 17:26:51.834718 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fc1cf5d826a15f0f792f4f5bcddbaf158f40dbe461e000bce68e8580a359d758\": container with ID starting with fc1cf5d826a15f0f792f4f5bcddbaf158f40dbe461e000bce68e8580a359d758 not found: ID does not exist" containerID="fc1cf5d826a15f0f792f4f5bcddbaf158f40dbe461e000bce68e8580a359d758" Jan 20 17:26:51 crc kubenswrapper[4558]: I0120 17:26:51.834752 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fc1cf5d826a15f0f792f4f5bcddbaf158f40dbe461e000bce68e8580a359d758"} err="failed to get container status \"fc1cf5d826a15f0f792f4f5bcddbaf158f40dbe461e000bce68e8580a359d758\": rpc error: code = NotFound desc = could not find container \"fc1cf5d826a15f0f792f4f5bcddbaf158f40dbe461e000bce68e8580a359d758\": container with ID starting with fc1cf5d826a15f0f792f4f5bcddbaf158f40dbe461e000bce68e8580a359d758 not found: ID does not exist" Jan 20 17:26:51 crc kubenswrapper[4558]: I0120 17:26:51.863468 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack-kuttl-tests/cinder-scheduler-0" podUID="7c7138fb-59b8-47c0-a431-023fa79404f1" containerName="cinder-scheduler" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 20 17:26:51 crc kubenswrapper[4558]: I0120 17:26:51.876907 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:26:51 crc kubenswrapper[4558]: I0120 17:26:51.888266 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/70698ad2-165d-46a5-b45e-36c4e4b6df8b-config-data\") pod \"70698ad2-165d-46a5-b45e-36c4e4b6df8b\" (UID: \"70698ad2-165d-46a5-b45e-36c4e4b6df8b\") " Jan 20 17:26:51 crc kubenswrapper[4558]: I0120 17:26:51.888429 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/70698ad2-165d-46a5-b45e-36c4e4b6df8b-sg-core-conf-yaml\") pod \"70698ad2-165d-46a5-b45e-36c4e4b6df8b\" (UID: \"70698ad2-165d-46a5-b45e-36c4e4b6df8b\") " Jan 20 17:26:51 crc kubenswrapper[4558]: I0120 17:26:51.888456 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hhb8n\" (UniqueName: \"kubernetes.io/projected/70698ad2-165d-46a5-b45e-36c4e4b6df8b-kube-api-access-hhb8n\") pod \"70698ad2-165d-46a5-b45e-36c4e4b6df8b\" (UID: \"70698ad2-165d-46a5-b45e-36c4e4b6df8b\") " Jan 20 17:26:51 crc kubenswrapper[4558]: I0120 17:26:51.888491 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/70698ad2-165d-46a5-b45e-36c4e4b6df8b-ceilometer-tls-certs\") pod \"70698ad2-165d-46a5-b45e-36c4e4b6df8b\" (UID: \"70698ad2-165d-46a5-b45e-36c4e4b6df8b\") " Jan 20 17:26:51 crc kubenswrapper[4558]: I0120 17:26:51.888515 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/70698ad2-165d-46a5-b45e-36c4e4b6df8b-run-httpd\") pod \"70698ad2-165d-46a5-b45e-36c4e4b6df8b\" (UID: \"70698ad2-165d-46a5-b45e-36c4e4b6df8b\") " Jan 20 17:26:51 crc kubenswrapper[4558]: I0120 17:26:51.888549 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/70698ad2-165d-46a5-b45e-36c4e4b6df8b-combined-ca-bundle\") pod \"70698ad2-165d-46a5-b45e-36c4e4b6df8b\" (UID: \"70698ad2-165d-46a5-b45e-36c4e4b6df8b\") " Jan 20 17:26:51 crc kubenswrapper[4558]: I0120 17:26:51.888592 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/70698ad2-165d-46a5-b45e-36c4e4b6df8b-scripts\") pod \"70698ad2-165d-46a5-b45e-36c4e4b6df8b\" (UID: \"70698ad2-165d-46a5-b45e-36c4e4b6df8b\") " Jan 20 17:26:51 crc kubenswrapper[4558]: I0120 17:26:51.888685 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/70698ad2-165d-46a5-b45e-36c4e4b6df8b-log-httpd\") pod \"70698ad2-165d-46a5-b45e-36c4e4b6df8b\" (UID: \"70698ad2-165d-46a5-b45e-36c4e4b6df8b\") " Jan 20 17:26:51 crc kubenswrapper[4558]: I0120 17:26:51.889650 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/70698ad2-165d-46a5-b45e-36c4e4b6df8b-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "70698ad2-165d-46a5-b45e-36c4e4b6df8b" (UID: "70698ad2-165d-46a5-b45e-36c4e4b6df8b"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:26:51 crc kubenswrapper[4558]: I0120 17:26:51.900482 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/70698ad2-165d-46a5-b45e-36c4e4b6df8b-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "70698ad2-165d-46a5-b45e-36c4e4b6df8b" (UID: "70698ad2-165d-46a5-b45e-36c4e4b6df8b"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:26:51 crc kubenswrapper[4558]: I0120 17:26:51.915915 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:26:51 crc kubenswrapper[4558]: I0120 17:26:51.926250 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:26:51 crc kubenswrapper[4558]: E0120 17:26:51.926840 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="70698ad2-165d-46a5-b45e-36c4e4b6df8b" containerName="sg-core" Jan 20 17:26:51 crc kubenswrapper[4558]: I0120 17:26:51.926857 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="70698ad2-165d-46a5-b45e-36c4e4b6df8b" containerName="sg-core" Jan 20 17:26:51 crc kubenswrapper[4558]: E0120 17:26:51.926880 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="70698ad2-165d-46a5-b45e-36c4e4b6df8b" containerName="ceilometer-central-agent" Jan 20 17:26:51 crc kubenswrapper[4558]: I0120 17:26:51.926888 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="70698ad2-165d-46a5-b45e-36c4e4b6df8b" containerName="ceilometer-central-agent" Jan 20 17:26:51 crc kubenswrapper[4558]: E0120 17:26:51.926916 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="70698ad2-165d-46a5-b45e-36c4e4b6df8b" containerName="proxy-httpd" Jan 20 17:26:51 crc kubenswrapper[4558]: I0120 17:26:51.926923 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="70698ad2-165d-46a5-b45e-36c4e4b6df8b" containerName="proxy-httpd" Jan 20 17:26:51 crc kubenswrapper[4558]: E0120 17:26:51.926939 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="70698ad2-165d-46a5-b45e-36c4e4b6df8b" containerName="ceilometer-notification-agent" Jan 20 17:26:51 crc kubenswrapper[4558]: I0120 17:26:51.926945 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="70698ad2-165d-46a5-b45e-36c4e4b6df8b" containerName="ceilometer-notification-agent" Jan 20 17:26:51 crc kubenswrapper[4558]: I0120 17:26:51.927145 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="70698ad2-165d-46a5-b45e-36c4e4b6df8b" containerName="ceilometer-notification-agent" Jan 20 17:26:51 crc kubenswrapper[4558]: I0120 17:26:51.927156 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="70698ad2-165d-46a5-b45e-36c4e4b6df8b" containerName="sg-core" Jan 20 17:26:51 crc kubenswrapper[4558]: I0120 17:26:51.927181 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="70698ad2-165d-46a5-b45e-36c4e4b6df8b" containerName="ceilometer-central-agent" Jan 20 17:26:51 crc kubenswrapper[4558]: I0120 17:26:51.927192 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="70698ad2-165d-46a5-b45e-36c4e4b6df8b" containerName="proxy-httpd" Jan 20 17:26:51 crc kubenswrapper[4558]: I0120 17:26:51.927928 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:26:51 crc kubenswrapper[4558]: I0120 17:26:51.939185 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-novncproxy-cell1-vencrypt" Jan 20 17:26:51 crc kubenswrapper[4558]: I0120 17:26:51.939610 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-novncproxy-cell1-public-svc" Jan 20 17:26:51 crc kubenswrapper[4558]: I0120 17:26:51.939733 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell1-novncproxy-config-data" Jan 20 17:26:51 crc kubenswrapper[4558]: I0120 17:26:51.954293 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:26:51 crc kubenswrapper[4558]: I0120 17:26:51.956104 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/70698ad2-165d-46a5-b45e-36c4e4b6df8b-kube-api-access-hhb8n" (OuterVolumeSpecName: "kube-api-access-hhb8n") pod "70698ad2-165d-46a5-b45e-36c4e4b6df8b" (UID: "70698ad2-165d-46a5-b45e-36c4e4b6df8b"). InnerVolumeSpecName "kube-api-access-hhb8n". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:26:51 crc kubenswrapper[4558]: I0120 17:26:51.958913 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/70698ad2-165d-46a5-b45e-36c4e4b6df8b-scripts" (OuterVolumeSpecName: "scripts") pod "70698ad2-165d-46a5-b45e-36c4e4b6df8b" (UID: "70698ad2-165d-46a5-b45e-36c4e4b6df8b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:26:51 crc kubenswrapper[4558]: I0120 17:26:51.998960 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hhb8n\" (UniqueName: \"kubernetes.io/projected/70698ad2-165d-46a5-b45e-36c4e4b6df8b-kube-api-access-hhb8n\") on node \"crc\" DevicePath \"\"" Jan 20 17:26:51 crc kubenswrapper[4558]: I0120 17:26:51.999220 4558 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/70698ad2-165d-46a5-b45e-36c4e4b6df8b-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:26:51 crc kubenswrapper[4558]: I0120 17:26:51.999233 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/70698ad2-165d-46a5-b45e-36c4e4b6df8b-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:26:51 crc kubenswrapper[4558]: I0120 17:26:51.999243 4558 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/70698ad2-165d-46a5-b45e-36c4e4b6df8b-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:26:52 crc kubenswrapper[4558]: I0120 17:26:52.114598 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/73ed8a8e-b7b8-4d71-9a2b-d29c60ffd996-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"73ed8a8e-b7b8-4d71-9a2b-d29c60ffd996\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:26:52 crc kubenswrapper[4558]: I0120 17:26:52.114742 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/73ed8a8e-b7b8-4d71-9a2b-d29c60ffd996-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"73ed8a8e-b7b8-4d71-9a2b-d29c60ffd996\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:26:52 crc kubenswrapper[4558]: I0120 17:26:52.114831 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xdmnr\" (UniqueName: \"kubernetes.io/projected/73ed8a8e-b7b8-4d71-9a2b-d29c60ffd996-kube-api-access-xdmnr\") pod \"nova-cell1-novncproxy-0\" (UID: \"73ed8a8e-b7b8-4d71-9a2b-d29c60ffd996\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:26:52 crc kubenswrapper[4558]: I0120 17:26:52.115013 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/73ed8a8e-b7b8-4d71-9a2b-d29c60ffd996-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"73ed8a8e-b7b8-4d71-9a2b-d29c60ffd996\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:26:52 crc kubenswrapper[4558]: I0120 17:26:52.115059 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/73ed8a8e-b7b8-4d71-9a2b-d29c60ffd996-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"73ed8a8e-b7b8-4d71-9a2b-d29c60ffd996\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:26:52 crc kubenswrapper[4558]: I0120 17:26:52.148196 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-sb-0" event={"ID":"7b6a0803-befe-4426-9e2a-a04ac12f2d7c","Type":"ContainerStarted","Data":"add1e8304daefdeb1ed3561478da2142c92e86dfa46d05eedbde1589c7a9ba74"} Jan 20 17:26:52 crc kubenswrapper[4558]: I0120 17:26:52.167600 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 17:26:52 crc kubenswrapper[4558]: I0120 17:26:52.171544 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-db-sync-t7b8z" event={"ID":"c2442cca-1f6c-4531-b1f4-2b873ce42964","Type":"ContainerStarted","Data":"364d7369bafd0402a20a7f9c1276504839740cce0e4e93df229f18382a27c891"} Jan 20 17:26:52 crc kubenswrapper[4558]: I0120 17:26:52.176939 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-ring-rebalance-zn4xn" event={"ID":"8e043dc4-ede7-4945-adfe-f23ef8b0b313","Type":"ContainerStarted","Data":"16f94b1c99175822e8283a9a842573fbffc683d8ea076954dacc80b3b5591b7f"} Jan 20 17:26:52 crc kubenswrapper[4558]: I0120 17:26:52.186611 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-h5hmh" event={"ID":"f461d799-0027-4949-96bc-85e80ae9ec47","Type":"ContainerStarted","Data":"46211dbcac65c4f79d8afc41ddb2ceb06935611e997fd6f57f19214db5ea0343"} Jan 20 17:26:52 crc kubenswrapper[4558]: I0120 17:26:52.211334 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-nb-0" event={"ID":"a2068ccb-0c0d-4b32-9063-082a4c395070","Type":"ContainerStarted","Data":"febfce8a80eac146400d05676d21dd60a93153f82cbc431cf75d6c5818ed6658"} Jan 20 17:26:52 crc kubenswrapper[4558]: I0120 17:26:52.225428 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/barbican-db-sync-t7b8z" podStartSLOduration=5.225413683 podStartE2EDuration="5.225413683s" podCreationTimestamp="2026-01-20 17:26:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:26:52.200480025 +0000 UTC m=+2705.960817992" watchObservedRunningTime="2026-01-20 17:26:52.225413683 +0000 UTC m=+2705.985751650" Jan 20 17:26:52 crc kubenswrapper[4558]: I0120 17:26:52.263700 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/rabbitmq-server-0" podUID="033e5f26-b8e8-48f1-affd-3b9e7fba316a" containerName="rabbitmq" containerID="cri-o://a02a42e340dab5cef6984f56ed207a62ce75f87172edeb2b63be0e769779bf17" gracePeriod=604795 Jan 20 17:26:52 crc kubenswrapper[4558]: I0120 17:26:52.271109 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/73ed8a8e-b7b8-4d71-9a2b-d29c60ffd996-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"73ed8a8e-b7b8-4d71-9a2b-d29c60ffd996\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:26:52 crc kubenswrapper[4558]: I0120 17:26:52.271205 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/73ed8a8e-b7b8-4d71-9a2b-d29c60ffd996-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"73ed8a8e-b7b8-4d71-9a2b-d29c60ffd996\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:26:52 crc kubenswrapper[4558]: I0120 17:26:52.271326 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/73ed8a8e-b7b8-4d71-9a2b-d29c60ffd996-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"73ed8a8e-b7b8-4d71-9a2b-d29c60ffd996\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:26:52 crc kubenswrapper[4558]: I0120 17:26:52.271433 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/73ed8a8e-b7b8-4d71-9a2b-d29c60ffd996-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"73ed8a8e-b7b8-4d71-9a2b-d29c60ffd996\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:26:52 crc kubenswrapper[4558]: I0120 17:26:52.271518 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xdmnr\" (UniqueName: \"kubernetes.io/projected/73ed8a8e-b7b8-4d71-9a2b-d29c60ffd996-kube-api-access-xdmnr\") pod \"nova-cell1-novncproxy-0\" (UID: \"73ed8a8e-b7b8-4d71-9a2b-d29c60ffd996\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:26:52 crc kubenswrapper[4558]: I0120 17:26:52.272300 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-cell1-galera-0" event={"ID":"f81a00ad-36ce-4383-9e91-fe60de6939d2","Type":"ContainerStarted","Data":"a796780243dd9747fb2317729a320ad77fb6a4148e29c322d469cddf921fb156"} Jan 20 17:26:52 crc kubenswrapper[4558]: I0120 17:26:52.311841 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/73ed8a8e-b7b8-4d71-9a2b-d29c60ffd996-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"73ed8a8e-b7b8-4d71-9a2b-d29c60ffd996\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:26:52 crc kubenswrapper[4558]: I0120 17:26:52.311904 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/73ed8a8e-b7b8-4d71-9a2b-d29c60ffd996-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"73ed8a8e-b7b8-4d71-9a2b-d29c60ffd996\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:26:52 crc kubenswrapper[4558]: I0120 17:26:52.319146 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xdmnr\" (UniqueName: \"kubernetes.io/projected/73ed8a8e-b7b8-4d71-9a2b-d29c60ffd996-kube-api-access-xdmnr\") pod \"nova-cell1-novncproxy-0\" (UID: \"73ed8a8e-b7b8-4d71-9a2b-d29c60ffd996\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:26:52 crc kubenswrapper[4558]: I0120 17:26:52.319764 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-hbfss" event={"ID":"04c5c6c7-acc2-4821-a68b-c2ac094e931f","Type":"ContainerStarted","Data":"8657c2c036a43ada6b1b0d8548a2af3d55c5d81b571839269cbc4e19ec5c4f82"} Jan 20 17:26:52 crc kubenswrapper[4558]: I0120 17:26:52.320326 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/73ed8a8e-b7b8-4d71-9a2b-d29c60ffd996-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"73ed8a8e-b7b8-4d71-9a2b-d29c60ffd996\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:26:52 crc kubenswrapper[4558]: I0120 17:26:52.326569 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-db-sync-gn4wj" event={"ID":"5f91fe62-3615-4153-a6c7-1652a5780bb5","Type":"ContainerStarted","Data":"a9c0c1143b3b33cc02b475f942743f25949d7a8e463523453117829d18bc66f6"} Jan 20 17:26:52 crc kubenswrapper[4558]: I0120 17:26:52.336315 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/70698ad2-165d-46a5-b45e-36c4e4b6df8b-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "70698ad2-165d-46a5-b45e-36c4e4b6df8b" (UID: "70698ad2-165d-46a5-b45e-36c4e4b6df8b"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:26:52 crc kubenswrapper[4558]: I0120 17:26:52.344624 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/73ed8a8e-b7b8-4d71-9a2b-d29c60ffd996-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"73ed8a8e-b7b8-4d71-9a2b-d29c60ffd996\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:26:52 crc kubenswrapper[4558]: I0120 17:26:52.352811 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"70698ad2-165d-46a5-b45e-36c4e4b6df8b","Type":"ContainerDied","Data":"a20ca3fd607ddf7da770b12d270ee3f61e807356eaf73adac4d94547ae33b6e8"} Jan 20 17:26:52 crc kubenswrapper[4558]: I0120 17:26:52.352868 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:26:52 crc kubenswrapper[4558]: I0120 17:26:52.352876 4558 scope.go:117] "RemoveContainer" containerID="b55f709aed6d1e7f660b6aa1c679633da050ee695636dacbeec2bc9d9cacc0ef" Jan 20 17:26:52 crc kubenswrapper[4558]: I0120 17:26:52.356183 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-h5hmh" podStartSLOduration=5.35615746 podStartE2EDuration="5.35615746s" podCreationTimestamp="2026-01-20 17:26:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:26:52.263700283 +0000 UTC m=+2706.024038251" watchObservedRunningTime="2026-01-20 17:26:52.35615746 +0000 UTC m=+2706.116495428" Jan 20 17:26:52 crc kubenswrapper[4558]: I0120 17:26:52.357447 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-hbfss" podStartSLOduration=5.357439151 podStartE2EDuration="5.357439151s" podCreationTimestamp="2026-01-20 17:26:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:26:52.342253711 +0000 UTC m=+2706.102591678" watchObservedRunningTime="2026-01-20 17:26:52.357439151 +0000 UTC m=+2706.117777118" Jan 20 17:26:52 crc kubenswrapper[4558]: I0120 17:26:52.370970 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/glance-db-sync-gn4wj" podStartSLOduration=5.370960071 podStartE2EDuration="5.370960071s" podCreationTimestamp="2026-01-20 17:26:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:26:52.370846598 +0000 UTC m=+2706.131184565" watchObservedRunningTime="2026-01-20 17:26:52.370960071 +0000 UTC m=+2706.131298037" Jan 20 17:26:52 crc kubenswrapper[4558]: I0120 17:26:52.374022 4558 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/70698ad2-165d-46a5-b45e-36c4e4b6df8b-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 20 17:26:52 crc kubenswrapper[4558]: I0120 17:26:52.374408 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/memcached-0" event={"ID":"3460a75e-3553-47b9-bfc5-39c2c459826e","Type":"ContainerStarted","Data":"c8a5e9bf9d8e1a8dad3f6f73cdf354545ce12575fe3b33bce155c23769a44f3e"} Jan 20 17:26:52 crc kubenswrapper[4558]: I0120 17:26:52.374445 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/memcached-0" event={"ID":"3460a75e-3553-47b9-bfc5-39c2c459826e","Type":"ContainerStarted","Data":"dad15fc6c1dbccd4219a6824cf8ca4a16fb35b4cb0b75bc8ad332e140bc258b8"} Jan 20 17:26:52 crc kubenswrapper[4558]: I0120 17:26:52.376582 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/memcached-0" Jan 20 17:26:52 crc kubenswrapper[4558]: I0120 17:26:52.387286 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-db-sync-92t4c" event={"ID":"f59d0040-b44b-46e3-bb18-c0ea74722cd3","Type":"ContainerStarted","Data":"dcea6d9bce8f8b365b1e3907b2409a3e2ed23260cd8dc669504ca51f06ab237e"} Jan 20 17:26:52 crc kubenswrapper[4558]: I0120 17:26:52.430882 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/memcached-0" podStartSLOduration=3.430855039 podStartE2EDuration="3.430855039s" podCreationTimestamp="2026-01-20 17:26:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:26:52.409494258 +0000 UTC m=+2706.169832225" watchObservedRunningTime="2026-01-20 17:26:52.430855039 +0000 UTC m=+2706.191193005" Jan 20 17:26:52 crc kubenswrapper[4558]: I0120 17:26:52.436888 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/cinder-db-sync-92t4c" podStartSLOduration=5.436879856 podStartE2EDuration="5.436879856s" podCreationTimestamp="2026-01-20 17:26:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:26:52.425879794 +0000 UTC m=+2706.186217761" watchObservedRunningTime="2026-01-20 17:26:52.436879856 +0000 UTC m=+2706.197217823" Jan 20 17:26:52 crc kubenswrapper[4558]: I0120 17:26:52.514546 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/70698ad2-165d-46a5-b45e-36c4e4b6df8b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "70698ad2-165d-46a5-b45e-36c4e4b6df8b" (UID: "70698ad2-165d-46a5-b45e-36c4e4b6df8b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:26:52 crc kubenswrapper[4558]: I0120 17:26:52.531015 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/openstack-galera-0"] Jan 20 17:26:52 crc kubenswrapper[4558]: I0120 17:26:52.565439 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/70698ad2-165d-46a5-b45e-36c4e4b6df8b-config-data" (OuterVolumeSpecName: "config-data") pod "70698ad2-165d-46a5-b45e-36c4e4b6df8b" (UID: "70698ad2-165d-46a5-b45e-36c4e4b6df8b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:26:52 crc kubenswrapper[4558]: I0120 17:26:52.579825 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/70698ad2-165d-46a5-b45e-36c4e4b6df8b-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:26:52 crc kubenswrapper[4558]: I0120 17:26:52.579856 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/70698ad2-165d-46a5-b45e-36c4e4b6df8b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:26:52 crc kubenswrapper[4558]: I0120 17:26:52.588560 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:26:52 crc kubenswrapper[4558]: I0120 17:26:52.602307 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/70698ad2-165d-46a5-b45e-36c4e4b6df8b-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "70698ad2-165d-46a5-b45e-36c4e4b6df8b" (UID: "70698ad2-165d-46a5-b45e-36c4e4b6df8b"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:26:52 crc kubenswrapper[4558]: I0120 17:26:52.604025 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2587f606-361f-4462-bef1-e1ec4e95f012" path="/var/lib/kubelet/pods/2587f606-361f-4462-bef1-e1ec4e95f012/volumes" Jan 20 17:26:52 crc kubenswrapper[4558]: I0120 17:26:52.604600 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="91cc01be-13ba-41d9-9060-a2cfacdb2f74" path="/var/lib/kubelet/pods/91cc01be-13ba-41d9-9060-a2cfacdb2f74/volumes" Jan 20 17:26:52 crc kubenswrapper[4558]: I0120 17:26:52.605225 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9db3acdd-184a-4004-a8d3-451673126318" path="/var/lib/kubelet/pods/9db3acdd-184a-4004-a8d3-451673126318/volumes" Jan 20 17:26:52 crc kubenswrapper[4558]: I0120 17:26:52.683917 4558 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/70698ad2-165d-46a5-b45e-36c4e4b6df8b-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:26:52 crc kubenswrapper[4558]: I0120 17:26:52.723110 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:26:52 crc kubenswrapper[4558]: I0120 17:26:52.746577 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:26:52 crc kubenswrapper[4558]: I0120 17:26:52.777268 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:26:52 crc kubenswrapper[4558]: I0120 17:26:52.779863 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:26:52 crc kubenswrapper[4558]: I0120 17:26:52.784550 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-ceilometer-internal-svc" Jan 20 17:26:52 crc kubenswrapper[4558]: I0120 17:26:52.784696 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-scripts" Jan 20 17:26:52 crc kubenswrapper[4558]: I0120 17:26:52.785099 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-config-data" Jan 20 17:26:52 crc kubenswrapper[4558]: I0120 17:26:52.794927 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:26:52 crc kubenswrapper[4558]: I0120 17:26:52.797545 4558 scope.go:117] "RemoveContainer" containerID="ebd5e020fe9ccfd450a30b510c61df09172d803a72760f9b96ab98868d87562f" Jan 20 17:26:52 crc kubenswrapper[4558]: I0120 17:26:52.872350 4558 scope.go:117] "RemoveContainer" containerID="44fb41a2f181b567d2295b6640d53ad267beb9330b20f34e2b79151e8226e822" Jan 20 17:26:52 crc kubenswrapper[4558]: I0120 17:26:52.894595 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/17054bbc-d1b1-47ae-b42a-bc09c15b2b9b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"17054bbc-d1b1-47ae-b42a-bc09c15b2b9b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:26:52 crc kubenswrapper[4558]: I0120 17:26:52.894660 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/17054bbc-d1b1-47ae-b42a-bc09c15b2b9b-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"17054bbc-d1b1-47ae-b42a-bc09c15b2b9b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:26:52 crc kubenswrapper[4558]: I0120 17:26:52.894683 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/17054bbc-d1b1-47ae-b42a-bc09c15b2b9b-run-httpd\") pod \"ceilometer-0\" (UID: \"17054bbc-d1b1-47ae-b42a-bc09c15b2b9b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:26:52 crc kubenswrapper[4558]: I0120 17:26:52.894703 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/17054bbc-d1b1-47ae-b42a-bc09c15b2b9b-log-httpd\") pod \"ceilometer-0\" (UID: \"17054bbc-d1b1-47ae-b42a-bc09c15b2b9b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:26:52 crc kubenswrapper[4558]: I0120 17:26:52.894721 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/17054bbc-d1b1-47ae-b42a-bc09c15b2b9b-config-data\") pod \"ceilometer-0\" (UID: \"17054bbc-d1b1-47ae-b42a-bc09c15b2b9b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:26:52 crc kubenswrapper[4558]: I0120 17:26:52.894754 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/17054bbc-d1b1-47ae-b42a-bc09c15b2b9b-scripts\") pod \"ceilometer-0\" (UID: \"17054bbc-d1b1-47ae-b42a-bc09c15b2b9b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:26:52 crc kubenswrapper[4558]: I0120 17:26:52.894855 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5jp6k\" (UniqueName: \"kubernetes.io/projected/17054bbc-d1b1-47ae-b42a-bc09c15b2b9b-kube-api-access-5jp6k\") pod \"ceilometer-0\" (UID: \"17054bbc-d1b1-47ae-b42a-bc09c15b2b9b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:26:52 crc kubenswrapper[4558]: I0120 17:26:52.894870 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/17054bbc-d1b1-47ae-b42a-bc09c15b2b9b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"17054bbc-d1b1-47ae-b42a-bc09c15b2b9b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:26:52 crc kubenswrapper[4558]: I0120 17:26:52.924290 4558 scope.go:117] "RemoveContainer" containerID="ea36ed48297a2a2bcb917d936cd3d2d844aa2faeacef821937a3bd537eb2e4ba" Jan 20 17:26:52 crc kubenswrapper[4558]: I0120 17:26:52.996609 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/17054bbc-d1b1-47ae-b42a-bc09c15b2b9b-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"17054bbc-d1b1-47ae-b42a-bc09c15b2b9b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:26:52 crc kubenswrapper[4558]: I0120 17:26:52.996662 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/17054bbc-d1b1-47ae-b42a-bc09c15b2b9b-run-httpd\") pod \"ceilometer-0\" (UID: \"17054bbc-d1b1-47ae-b42a-bc09c15b2b9b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:26:52 crc kubenswrapper[4558]: I0120 17:26:52.996682 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/17054bbc-d1b1-47ae-b42a-bc09c15b2b9b-log-httpd\") pod \"ceilometer-0\" (UID: \"17054bbc-d1b1-47ae-b42a-bc09c15b2b9b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:26:52 crc kubenswrapper[4558]: I0120 17:26:52.996698 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/17054bbc-d1b1-47ae-b42a-bc09c15b2b9b-config-data\") pod \"ceilometer-0\" (UID: \"17054bbc-d1b1-47ae-b42a-bc09c15b2b9b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:26:52 crc kubenswrapper[4558]: I0120 17:26:52.996730 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/17054bbc-d1b1-47ae-b42a-bc09c15b2b9b-scripts\") pod \"ceilometer-0\" (UID: \"17054bbc-d1b1-47ae-b42a-bc09c15b2b9b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:26:52 crc kubenswrapper[4558]: I0120 17:26:52.996833 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5jp6k\" (UniqueName: \"kubernetes.io/projected/17054bbc-d1b1-47ae-b42a-bc09c15b2b9b-kube-api-access-5jp6k\") pod \"ceilometer-0\" (UID: \"17054bbc-d1b1-47ae-b42a-bc09c15b2b9b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:26:52 crc kubenswrapper[4558]: I0120 17:26:52.996854 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/17054bbc-d1b1-47ae-b42a-bc09c15b2b9b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"17054bbc-d1b1-47ae-b42a-bc09c15b2b9b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:26:52 crc kubenswrapper[4558]: I0120 17:26:52.996880 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/17054bbc-d1b1-47ae-b42a-bc09c15b2b9b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"17054bbc-d1b1-47ae-b42a-bc09c15b2b9b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:26:53 crc kubenswrapper[4558]: I0120 17:26:53.000278 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/17054bbc-d1b1-47ae-b42a-bc09c15b2b9b-run-httpd\") pod \"ceilometer-0\" (UID: \"17054bbc-d1b1-47ae-b42a-bc09c15b2b9b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:26:53 crc kubenswrapper[4558]: I0120 17:26:53.003069 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/17054bbc-d1b1-47ae-b42a-bc09c15b2b9b-log-httpd\") pod \"ceilometer-0\" (UID: \"17054bbc-d1b1-47ae-b42a-bc09c15b2b9b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:26:53 crc kubenswrapper[4558]: I0120 17:26:53.005425 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/17054bbc-d1b1-47ae-b42a-bc09c15b2b9b-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"17054bbc-d1b1-47ae-b42a-bc09c15b2b9b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:26:53 crc kubenswrapper[4558]: I0120 17:26:53.006495 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/17054bbc-d1b1-47ae-b42a-bc09c15b2b9b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"17054bbc-d1b1-47ae-b42a-bc09c15b2b9b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:26:53 crc kubenswrapper[4558]: I0120 17:26:53.010632 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/17054bbc-d1b1-47ae-b42a-bc09c15b2b9b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"17054bbc-d1b1-47ae-b42a-bc09c15b2b9b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:26:53 crc kubenswrapper[4558]: I0120 17:26:53.010820 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/17054bbc-d1b1-47ae-b42a-bc09c15b2b9b-scripts\") pod \"ceilometer-0\" (UID: \"17054bbc-d1b1-47ae-b42a-bc09c15b2b9b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:26:53 crc kubenswrapper[4558]: I0120 17:26:53.022748 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/17054bbc-d1b1-47ae-b42a-bc09c15b2b9b-config-data\") pod \"ceilometer-0\" (UID: \"17054bbc-d1b1-47ae-b42a-bc09c15b2b9b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:26:53 crc kubenswrapper[4558]: I0120 17:26:53.022818 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5jp6k\" (UniqueName: \"kubernetes.io/projected/17054bbc-d1b1-47ae-b42a-bc09c15b2b9b-kube-api-access-5jp6k\") pod \"ceilometer-0\" (UID: \"17054bbc-d1b1-47ae-b42a-bc09c15b2b9b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:26:53 crc kubenswrapper[4558]: I0120 17:26:53.037389 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:26:53 crc kubenswrapper[4558]: I0120 17:26:53.119545 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:26:53 crc kubenswrapper[4558]: I0120 17:26:53.188598 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" podUID="7a2c4722-11cc-426c-8101-496c9ee97ca2" containerName="rabbitmq" containerID="cri-o://d6656270debe6f92e12b0faf0e10cdbadf8fd2e24c19d3019d8548693ab17740" gracePeriod=604794 Jan 20 17:26:53 crc kubenswrapper[4558]: I0120 17:26:53.414086 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-ring-rebalance-zn4xn" event={"ID":"8e043dc4-ede7-4945-adfe-f23ef8b0b313","Type":"ContainerStarted","Data":"fa220b08cefc27ae29c6a4562f29cca0ef746f81fe72e0cd26f3b1e6b453f18e"} Jan 20 17:26:53 crc kubenswrapper[4558]: I0120 17:26:53.424252 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-cell1-galera-0" event={"ID":"f81a00ad-36ce-4383-9e91-fe60de6939d2","Type":"ContainerStarted","Data":"758a745a0a805787f036d55713b5a5b3c51cd6e6c91d9d3304c4dcf394fa055b"} Jan 20 17:26:53 crc kubenswrapper[4558]: I0120 17:26:53.437955 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/kube-state-metrics-0" event={"ID":"443f910c-dd2d-4c72-b861-f15de67ac6bb","Type":"ContainerStarted","Data":"ea8201fc9fae9612c7677cb7b1dbff6d7938a87582e7ecc4e74f7875b23122ec"} Jan 20 17:26:53 crc kubenswrapper[4558]: I0120 17:26:53.437990 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/kube-state-metrics-0" event={"ID":"443f910c-dd2d-4c72-b861-f15de67ac6bb","Type":"ContainerStarted","Data":"46a71c4c2fb2aab516d6c7b2bf6318bfdae48b4c7736d08ffd76acd44932b728"} Jan 20 17:26:53 crc kubenswrapper[4558]: I0120 17:26:53.438564 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:26:53 crc kubenswrapper[4558]: I0120 17:26:53.453439 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"73ed8a8e-b7b8-4d71-9a2b-d29c60ffd996","Type":"ContainerStarted","Data":"a7a134a40ad2fdc31d2893d8841b37e2902c99964219b7826a6e91554f6aa448"} Jan 20 17:26:53 crc kubenswrapper[4558]: I0120 17:26:53.455126 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/swift-ring-rebalance-zn4xn" podStartSLOduration=5.455114254 podStartE2EDuration="5.455114254s" podCreationTimestamp="2026-01-20 17:26:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:26:53.429971224 +0000 UTC m=+2707.190309191" watchObservedRunningTime="2026-01-20 17:26:53.455114254 +0000 UTC m=+2707.215452221" Jan 20 17:26:53 crc kubenswrapper[4558]: I0120 17:26:53.471502 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-galera-0" event={"ID":"6bc8ca35-f460-4c4d-9dbe-0012c552371a","Type":"ContainerStarted","Data":"fc043775a1381650eef8618d77c2954b353dcd53aa5ea369049e6ea12a421442"} Jan 20 17:26:53 crc kubenswrapper[4558]: I0120 17:26:53.471537 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-galera-0" event={"ID":"6bc8ca35-f460-4c4d-9dbe-0012c552371a","Type":"ContainerStarted","Data":"1d20563782842dbbaa3d775668dc3bb5fbdd776a62e69568b949ae9296a620d3"} Jan 20 17:26:53 crc kubenswrapper[4558]: I0120 17:26:53.474521 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/kube-state-metrics-0" podStartSLOduration=2.9022224960000003 podStartE2EDuration="3.474512464s" podCreationTimestamp="2026-01-20 17:26:50 +0000 UTC" firstStartedPulling="2026-01-20 17:26:52.352021685 +0000 UTC m=+2706.112359653" lastFinishedPulling="2026-01-20 17:26:52.924311654 +0000 UTC m=+2706.684649621" observedRunningTime="2026-01-20 17:26:53.460651816 +0000 UTC m=+2707.220989782" watchObservedRunningTime="2026-01-20 17:26:53.474512464 +0000 UTC m=+2707.234850431" Jan 20 17:26:53 crc kubenswrapper[4558]: I0120 17:26:53.481502 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-nb-0" event={"ID":"a2068ccb-0c0d-4b32-9063-082a4c395070","Type":"ContainerStarted","Data":"3cb92b2ab723d670d8405be0316a236c7b450aa82c756082192092f8f2f20fad"} Jan 20 17:26:53 crc kubenswrapper[4558]: I0120 17:26:53.497487 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-sb-0" event={"ID":"7b6a0803-befe-4426-9e2a-a04ac12f2d7c","Type":"ContainerStarted","Data":"e14a2ace059c2e1c3be33ab3bdf7264642df45d603b1ef56bea3fa59cbb4787b"} Jan 20 17:26:53 crc kubenswrapper[4558]: I0120 17:26:53.497516 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-sb-0" event={"ID":"7b6a0803-befe-4426-9e2a-a04ac12f2d7c","Type":"ContainerStarted","Data":"dd2c0dcba96a9cb823d300977fee811cbbd5c8ce5dca2787726542941426f5c2"} Jan 20 17:26:53 crc kubenswrapper[4558]: I0120 17:26:53.550056 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ovsdbserver-sb-0" podStartSLOduration=5.550031826 podStartE2EDuration="5.550031826s" podCreationTimestamp="2026-01-20 17:26:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:26:53.515361944 +0000 UTC m=+2707.275699911" watchObservedRunningTime="2026-01-20 17:26:53.550031826 +0000 UTC m=+2707.310369794" Jan 20 17:26:53 crc kubenswrapper[4558]: I0120 17:26:53.657033 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:26:53 crc kubenswrapper[4558]: W0120 17:26:53.661085 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod17054bbc_d1b1_47ae_b42a_bc09c15b2b9b.slice/crio-7ed30f016afffbf796c3d9a0ae744c831f74000f5a3277f831a1b3ab5170e16d WatchSource:0}: Error finding container 7ed30f016afffbf796c3d9a0ae744c831f74000f5a3277f831a1b3ab5170e16d: Status 404 returned error can't find the container with id 7ed30f016afffbf796c3d9a0ae744c831f74000f5a3277f831a1b3ab5170e16d Jan 20 17:26:54 crc kubenswrapper[4558]: I0120 17:26:54.238245 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:26:54 crc kubenswrapper[4558]: E0120 17:26:54.282080 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="bdc31a24aa03485670883d3ea8d3795dfe70b5c89196347668c72f2086bf2bea" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Jan 20 17:26:54 crc kubenswrapper[4558]: E0120 17:26:54.283433 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="bdc31a24aa03485670883d3ea8d3795dfe70b5c89196347668c72f2086bf2bea" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Jan 20 17:26:54 crc kubenswrapper[4558]: E0120 17:26:54.284809 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="bdc31a24aa03485670883d3ea8d3795dfe70b5c89196347668c72f2086bf2bea" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Jan 20 17:26:54 crc kubenswrapper[4558]: E0120 17:26:54.284861 4558 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack-kuttl-tests/ovn-northd-0" podUID="c08cb973-6eb3-414d-b127-fc146b0fb1f2" containerName="ovn-northd" Jan 20 17:26:54 crc kubenswrapper[4558]: I0120 17:26:54.506740 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"17054bbc-d1b1-47ae-b42a-bc09c15b2b9b","Type":"ContainerStarted","Data":"7ed30f016afffbf796c3d9a0ae744c831f74000f5a3277f831a1b3ab5170e16d"} Jan 20 17:26:54 crc kubenswrapper[4558]: I0120 17:26:54.512381 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-nb-0" event={"ID":"a2068ccb-0c0d-4b32-9063-082a4c395070","Type":"ContainerStarted","Data":"0a200f6996ecf5951d17224c62d14bcea68ab49867b472e280d5c47ecfd50ad7"} Jan 20 17:26:54 crc kubenswrapper[4558]: I0120 17:26:54.516190 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"73ed8a8e-b7b8-4d71-9a2b-d29c60ffd996","Type":"ContainerStarted","Data":"f2931b38008bccdd5209b99b03a91154a5b6e19f660a655b7d1fde4d2990be72"} Jan 20 17:26:54 crc kubenswrapper[4558]: I0120 17:26:54.542401 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ovsdbserver-nb-0" podStartSLOduration=5.542383314 podStartE2EDuration="5.542383314s" podCreationTimestamp="2026-01-20 17:26:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:26:54.535417949 +0000 UTC m=+2708.295755916" watchObservedRunningTime="2026-01-20 17:26:54.542383314 +0000 UTC m=+2708.302721281" Jan 20 17:26:54 crc kubenswrapper[4558]: I0120 17:26:54.562624 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" podStartSLOduration=3.562603209 podStartE2EDuration="3.562603209s" podCreationTimestamp="2026-01-20 17:26:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:26:54.553479115 +0000 UTC m=+2708.313817082" watchObservedRunningTime="2026-01-20 17:26:54.562603209 +0000 UTC m=+2708.322941175" Jan 20 17:26:54 crc kubenswrapper[4558]: I0120 17:26:54.579514 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="70698ad2-165d-46a5-b45e-36c4e4b6df8b" path="/var/lib/kubelet/pods/70698ad2-165d-46a5-b45e-36c4e4b6df8b/volumes" Jan 20 17:26:54 crc kubenswrapper[4558]: I0120 17:26:54.849398 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack-kuttl-tests/cinder-scheduler-0" podUID="7c7138fb-59b8-47c0-a431-023fa79404f1" containerName="cinder-scheduler" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 20 17:26:54 crc kubenswrapper[4558]: I0120 17:26:54.849719 4558 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:26:54 crc kubenswrapper[4558]: I0120 17:26:54.851146 4558 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="cinder-scheduler" containerStatusID={"Type":"cri-o","ID":"98d4b6952becbe3b78b67cf6ee7b8b7cc7435eda4e3a11dead4ce7aba086ca2a"} pod="openstack-kuttl-tests/cinder-scheduler-0" containerMessage="Container cinder-scheduler failed liveness probe, will be restarted" Jan 20 17:26:54 crc kubenswrapper[4558]: I0120 17:26:54.851234 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/cinder-scheduler-0" podUID="7c7138fb-59b8-47c0-a431-023fa79404f1" containerName="cinder-scheduler" containerID="cri-o://98d4b6952becbe3b78b67cf6ee7b8b7cc7435eda4e3a11dead4ce7aba086ca2a" gracePeriod=30 Jan 20 17:26:54 crc kubenswrapper[4558]: I0120 17:26:54.915943 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovn-northd-0_c08cb973-6eb3-414d-b127-fc146b0fb1f2/ovn-northd/0.log" Jan 20 17:26:54 crc kubenswrapper[4558]: I0120 17:26:54.916034 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:26:55 crc kubenswrapper[4558]: I0120 17:26:55.061202 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xbtgw\" (UniqueName: \"kubernetes.io/projected/c08cb973-6eb3-414d-b127-fc146b0fb1f2-kube-api-access-xbtgw\") pod \"c08cb973-6eb3-414d-b127-fc146b0fb1f2\" (UID: \"c08cb973-6eb3-414d-b127-fc146b0fb1f2\") " Jan 20 17:26:55 crc kubenswrapper[4558]: I0120 17:26:55.061338 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c08cb973-6eb3-414d-b127-fc146b0fb1f2-config\") pod \"c08cb973-6eb3-414d-b127-fc146b0fb1f2\" (UID: \"c08cb973-6eb3-414d-b127-fc146b0fb1f2\") " Jan 20 17:26:55 crc kubenswrapper[4558]: I0120 17:26:55.061378 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/c08cb973-6eb3-414d-b127-fc146b0fb1f2-ovn-rundir\") pod \"c08cb973-6eb3-414d-b127-fc146b0fb1f2\" (UID: \"c08cb973-6eb3-414d-b127-fc146b0fb1f2\") " Jan 20 17:26:55 crc kubenswrapper[4558]: I0120 17:26:55.061753 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c08cb973-6eb3-414d-b127-fc146b0fb1f2-ovn-rundir" (OuterVolumeSpecName: "ovn-rundir") pod "c08cb973-6eb3-414d-b127-fc146b0fb1f2" (UID: "c08cb973-6eb3-414d-b127-fc146b0fb1f2"). InnerVolumeSpecName "ovn-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:26:55 crc kubenswrapper[4558]: I0120 17:26:55.061842 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c08cb973-6eb3-414d-b127-fc146b0fb1f2-combined-ca-bundle\") pod \"c08cb973-6eb3-414d-b127-fc146b0fb1f2\" (UID: \"c08cb973-6eb3-414d-b127-fc146b0fb1f2\") " Jan 20 17:26:55 crc kubenswrapper[4558]: I0120 17:26:55.061865 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c08cb973-6eb3-414d-b127-fc146b0fb1f2-scripts\") pod \"c08cb973-6eb3-414d-b127-fc146b0fb1f2\" (UID: \"c08cb973-6eb3-414d-b127-fc146b0fb1f2\") " Jan 20 17:26:55 crc kubenswrapper[4558]: I0120 17:26:55.061976 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c08cb973-6eb3-414d-b127-fc146b0fb1f2-config" (OuterVolumeSpecName: "config") pod "c08cb973-6eb3-414d-b127-fc146b0fb1f2" (UID: "c08cb973-6eb3-414d-b127-fc146b0fb1f2"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:26:55 crc kubenswrapper[4558]: I0120 17:26:55.062426 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/c08cb973-6eb3-414d-b127-fc146b0fb1f2-ovn-northd-tls-certs\") pod \"c08cb973-6eb3-414d-b127-fc146b0fb1f2\" (UID: \"c08cb973-6eb3-414d-b127-fc146b0fb1f2\") " Jan 20 17:26:55 crc kubenswrapper[4558]: I0120 17:26:55.062430 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c08cb973-6eb3-414d-b127-fc146b0fb1f2-scripts" (OuterVolumeSpecName: "scripts") pod "c08cb973-6eb3-414d-b127-fc146b0fb1f2" (UID: "c08cb973-6eb3-414d-b127-fc146b0fb1f2"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:26:55 crc kubenswrapper[4558]: I0120 17:26:55.062477 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/c08cb973-6eb3-414d-b127-fc146b0fb1f2-metrics-certs-tls-certs\") pod \"c08cb973-6eb3-414d-b127-fc146b0fb1f2\" (UID: \"c08cb973-6eb3-414d-b127-fc146b0fb1f2\") " Jan 20 17:26:55 crc kubenswrapper[4558]: I0120 17:26:55.062972 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c08cb973-6eb3-414d-b127-fc146b0fb1f2-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:26:55 crc kubenswrapper[4558]: I0120 17:26:55.062989 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c08cb973-6eb3-414d-b127-fc146b0fb1f2-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:26:55 crc kubenswrapper[4558]: I0120 17:26:55.062999 4558 reconciler_common.go:293] "Volume detached for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/c08cb973-6eb3-414d-b127-fc146b0fb1f2-ovn-rundir\") on node \"crc\" DevicePath \"\"" Jan 20 17:26:55 crc kubenswrapper[4558]: I0120 17:26:55.066965 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c08cb973-6eb3-414d-b127-fc146b0fb1f2-kube-api-access-xbtgw" (OuterVolumeSpecName: "kube-api-access-xbtgw") pod "c08cb973-6eb3-414d-b127-fc146b0fb1f2" (UID: "c08cb973-6eb3-414d-b127-fc146b0fb1f2"). InnerVolumeSpecName "kube-api-access-xbtgw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:26:55 crc kubenswrapper[4558]: I0120 17:26:55.086431 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c08cb973-6eb3-414d-b127-fc146b0fb1f2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c08cb973-6eb3-414d-b127-fc146b0fb1f2" (UID: "c08cb973-6eb3-414d-b127-fc146b0fb1f2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:26:55 crc kubenswrapper[4558]: I0120 17:26:55.122058 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c08cb973-6eb3-414d-b127-fc146b0fb1f2-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "c08cb973-6eb3-414d-b127-fc146b0fb1f2" (UID: "c08cb973-6eb3-414d-b127-fc146b0fb1f2"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:26:55 crc kubenswrapper[4558]: I0120 17:26:55.129212 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c08cb973-6eb3-414d-b127-fc146b0fb1f2-ovn-northd-tls-certs" (OuterVolumeSpecName: "ovn-northd-tls-certs") pod "c08cb973-6eb3-414d-b127-fc146b0fb1f2" (UID: "c08cb973-6eb3-414d-b127-fc146b0fb1f2"). InnerVolumeSpecName "ovn-northd-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:26:55 crc kubenswrapper[4558]: I0120 17:26:55.167323 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xbtgw\" (UniqueName: \"kubernetes.io/projected/c08cb973-6eb3-414d-b127-fc146b0fb1f2-kube-api-access-xbtgw\") on node \"crc\" DevicePath \"\"" Jan 20 17:26:55 crc kubenswrapper[4558]: I0120 17:26:55.167374 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c08cb973-6eb3-414d-b127-fc146b0fb1f2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:26:55 crc kubenswrapper[4558]: I0120 17:26:55.167387 4558 reconciler_common.go:293] "Volume detached for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/c08cb973-6eb3-414d-b127-fc146b0fb1f2-ovn-northd-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:26:55 crc kubenswrapper[4558]: I0120 17:26:55.167402 4558 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/c08cb973-6eb3-414d-b127-fc146b0fb1f2-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:26:55 crc kubenswrapper[4558]: I0120 17:26:55.238685 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:26:55 crc kubenswrapper[4558]: I0120 17:26:55.286321 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:26:55 crc kubenswrapper[4558]: I0120 17:26:55.531940 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovn-northd-0_c08cb973-6eb3-414d-b127-fc146b0fb1f2/ovn-northd/0.log" Jan 20 17:26:55 crc kubenswrapper[4558]: I0120 17:26:55.532037 4558 generic.go:334] "Generic (PLEG): container finished" podID="c08cb973-6eb3-414d-b127-fc146b0fb1f2" containerID="bdc31a24aa03485670883d3ea8d3795dfe70b5c89196347668c72f2086bf2bea" exitCode=139 Jan 20 17:26:55 crc kubenswrapper[4558]: I0120 17:26:55.532127 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:26:55 crc kubenswrapper[4558]: I0120 17:26:55.532140 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-northd-0" event={"ID":"c08cb973-6eb3-414d-b127-fc146b0fb1f2","Type":"ContainerDied","Data":"bdc31a24aa03485670883d3ea8d3795dfe70b5c89196347668c72f2086bf2bea"} Jan 20 17:26:55 crc kubenswrapper[4558]: I0120 17:26:55.532216 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-northd-0" event={"ID":"c08cb973-6eb3-414d-b127-fc146b0fb1f2","Type":"ContainerDied","Data":"07666f608209fe81c7d6bd117e4853ca3c0442f2555801913598516c8e27281a"} Jan 20 17:26:55 crc kubenswrapper[4558]: I0120 17:26:55.532245 4558 scope.go:117] "RemoveContainer" containerID="97eae6723cbae5007ecc1c7e159332138d8c92b6e1ffae4831a2b5b8e46b5a7e" Jan 20 17:26:55 crc kubenswrapper[4558]: I0120 17:26:55.534139 4558 generic.go:334] "Generic (PLEG): container finished" podID="f81a00ad-36ce-4383-9e91-fe60de6939d2" containerID="758a745a0a805787f036d55713b5a5b3c51cd6e6c91d9d3304c4dcf394fa055b" exitCode=0 Jan 20 17:26:55 crc kubenswrapper[4558]: I0120 17:26:55.534227 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-cell1-galera-0" event={"ID":"f81a00ad-36ce-4383-9e91-fe60de6939d2","Type":"ContainerDied","Data":"758a745a0a805787f036d55713b5a5b3c51cd6e6c91d9d3304c4dcf394fa055b"} Jan 20 17:26:55 crc kubenswrapper[4558]: I0120 17:26:55.540634 4558 generic.go:334] "Generic (PLEG): container finished" podID="6bc8ca35-f460-4c4d-9dbe-0012c552371a" containerID="fc043775a1381650eef8618d77c2954b353dcd53aa5ea369049e6ea12a421442" exitCode=0 Jan 20 17:26:55 crc kubenswrapper[4558]: I0120 17:26:55.540708 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-galera-0" event={"ID":"6bc8ca35-f460-4c4d-9dbe-0012c552371a","Type":"ContainerDied","Data":"fc043775a1381650eef8618d77c2954b353dcd53aa5ea369049e6ea12a421442"} Jan 20 17:26:55 crc kubenswrapper[4558]: I0120 17:26:55.551923 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"17054bbc-d1b1-47ae-b42a-bc09c15b2b9b","Type":"ContainerStarted","Data":"5c49ce71384981a8c60295f3e437aa3231d0f9a9b4690345fd2a2732983bcc39"} Jan 20 17:26:55 crc kubenswrapper[4558]: I0120 17:26:55.551952 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"17054bbc-d1b1-47ae-b42a-bc09c15b2b9b","Type":"ContainerStarted","Data":"e2e1e1cb6e8a72f68287eff8a5396b012d963913a047795276d785bba4b6a44c"} Jan 20 17:26:55 crc kubenswrapper[4558]: I0120 17:26:55.561643 4558 scope.go:117] "RemoveContainer" containerID="bdc31a24aa03485670883d3ea8d3795dfe70b5c89196347668c72f2086bf2bea" Jan 20 17:26:55 crc kubenswrapper[4558]: I0120 17:26:55.641379 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovn-northd-0"] Jan 20 17:26:55 crc kubenswrapper[4558]: I0120 17:26:55.655354 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ovn-northd-0"] Jan 20 17:26:55 crc kubenswrapper[4558]: I0120 17:26:55.661213 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ovn-northd-0"] Jan 20 17:26:55 crc kubenswrapper[4558]: E0120 17:26:55.661726 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c08cb973-6eb3-414d-b127-fc146b0fb1f2" containerName="ovn-northd" Jan 20 17:26:55 crc kubenswrapper[4558]: I0120 17:26:55.661749 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="c08cb973-6eb3-414d-b127-fc146b0fb1f2" containerName="ovn-northd" Jan 20 17:26:55 crc kubenswrapper[4558]: E0120 17:26:55.661793 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c08cb973-6eb3-414d-b127-fc146b0fb1f2" containerName="openstack-network-exporter" Jan 20 17:26:55 crc kubenswrapper[4558]: I0120 17:26:55.661801 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="c08cb973-6eb3-414d-b127-fc146b0fb1f2" containerName="openstack-network-exporter" Jan 20 17:26:55 crc kubenswrapper[4558]: I0120 17:26:55.662054 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="c08cb973-6eb3-414d-b127-fc146b0fb1f2" containerName="ovn-northd" Jan 20 17:26:55 crc kubenswrapper[4558]: I0120 17:26:55.662072 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="c08cb973-6eb3-414d-b127-fc146b0fb1f2" containerName="openstack-network-exporter" Jan 20 17:26:55 crc kubenswrapper[4558]: I0120 17:26:55.663310 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:26:55 crc kubenswrapper[4558]: I0120 17:26:55.667192 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovn-northd-0"] Jan 20 17:26:55 crc kubenswrapper[4558]: I0120 17:26:55.668020 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-ovnnorthd-ovndbs" Jan 20 17:26:55 crc kubenswrapper[4558]: I0120 17:26:55.668229 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ovnnorthd-ovnnorthd-dockercfg-wkprl" Jan 20 17:26:55 crc kubenswrapper[4558]: I0120 17:26:55.668515 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"ovnnorthd-config" Jan 20 17:26:55 crc kubenswrapper[4558]: I0120 17:26:55.669316 4558 scope.go:117] "RemoveContainer" containerID="97eae6723cbae5007ecc1c7e159332138d8c92b6e1ffae4831a2b5b8e46b5a7e" Jan 20 17:26:55 crc kubenswrapper[4558]: E0120 17:26:55.669785 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"97eae6723cbae5007ecc1c7e159332138d8c92b6e1ffae4831a2b5b8e46b5a7e\": container with ID starting with 97eae6723cbae5007ecc1c7e159332138d8c92b6e1ffae4831a2b5b8e46b5a7e not found: ID does not exist" containerID="97eae6723cbae5007ecc1c7e159332138d8c92b6e1ffae4831a2b5b8e46b5a7e" Jan 20 17:26:55 crc kubenswrapper[4558]: I0120 17:26:55.669816 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"97eae6723cbae5007ecc1c7e159332138d8c92b6e1ffae4831a2b5b8e46b5a7e"} err="failed to get container status \"97eae6723cbae5007ecc1c7e159332138d8c92b6e1ffae4831a2b5b8e46b5a7e\": rpc error: code = NotFound desc = could not find container \"97eae6723cbae5007ecc1c7e159332138d8c92b6e1ffae4831a2b5b8e46b5a7e\": container with ID starting with 97eae6723cbae5007ecc1c7e159332138d8c92b6e1ffae4831a2b5b8e46b5a7e not found: ID does not exist" Jan 20 17:26:55 crc kubenswrapper[4558]: I0120 17:26:55.669836 4558 scope.go:117] "RemoveContainer" containerID="bdc31a24aa03485670883d3ea8d3795dfe70b5c89196347668c72f2086bf2bea" Jan 20 17:26:55 crc kubenswrapper[4558]: E0120 17:26:55.670113 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bdc31a24aa03485670883d3ea8d3795dfe70b5c89196347668c72f2086bf2bea\": container with ID starting with bdc31a24aa03485670883d3ea8d3795dfe70b5c89196347668c72f2086bf2bea not found: ID does not exist" containerID="bdc31a24aa03485670883d3ea8d3795dfe70b5c89196347668c72f2086bf2bea" Jan 20 17:26:55 crc kubenswrapper[4558]: I0120 17:26:55.670138 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bdc31a24aa03485670883d3ea8d3795dfe70b5c89196347668c72f2086bf2bea"} err="failed to get container status \"bdc31a24aa03485670883d3ea8d3795dfe70b5c89196347668c72f2086bf2bea\": rpc error: code = NotFound desc = could not find container \"bdc31a24aa03485670883d3ea8d3795dfe70b5c89196347668c72f2086bf2bea\": container with ID starting with bdc31a24aa03485670883d3ea8d3795dfe70b5c89196347668c72f2086bf2bea not found: ID does not exist" Jan 20 17:26:55 crc kubenswrapper[4558]: I0120 17:26:55.671128 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"ovnnorthd-scripts" Jan 20 17:26:55 crc kubenswrapper[4558]: I0120 17:26:55.801560 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/8a111df9-8578-40bb-a672-b5d53305c873-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"8a111df9-8578-40bb-a672-b5d53305c873\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:26:55 crc kubenswrapper[4558]: I0120 17:26:55.801617 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8a111df9-8578-40bb-a672-b5d53305c873-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"8a111df9-8578-40bb-a672-b5d53305c873\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:26:55 crc kubenswrapper[4558]: I0120 17:26:55.801823 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/8a111df9-8578-40bb-a672-b5d53305c873-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"8a111df9-8578-40bb-a672-b5d53305c873\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:26:55 crc kubenswrapper[4558]: I0120 17:26:55.802042 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8a111df9-8578-40bb-a672-b5d53305c873-scripts\") pod \"ovn-northd-0\" (UID: \"8a111df9-8578-40bb-a672-b5d53305c873\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:26:55 crc kubenswrapper[4558]: I0120 17:26:55.802093 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8a111df9-8578-40bb-a672-b5d53305c873-config\") pod \"ovn-northd-0\" (UID: \"8a111df9-8578-40bb-a672-b5d53305c873\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:26:55 crc kubenswrapper[4558]: I0120 17:26:55.802205 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r4ll4\" (UniqueName: \"kubernetes.io/projected/8a111df9-8578-40bb-a672-b5d53305c873-kube-api-access-r4ll4\") pod \"ovn-northd-0\" (UID: \"8a111df9-8578-40bb-a672-b5d53305c873\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:26:55 crc kubenswrapper[4558]: I0120 17:26:55.802263 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/8a111df9-8578-40bb-a672-b5d53305c873-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"8a111df9-8578-40bb-a672-b5d53305c873\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:26:55 crc kubenswrapper[4558]: I0120 17:26:55.828456 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:26:55 crc kubenswrapper[4558]: I0120 17:26:55.904687 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8a111df9-8578-40bb-a672-b5d53305c873-scripts\") pod \"ovn-northd-0\" (UID: \"8a111df9-8578-40bb-a672-b5d53305c873\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:26:55 crc kubenswrapper[4558]: I0120 17:26:55.904731 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8a111df9-8578-40bb-a672-b5d53305c873-config\") pod \"ovn-northd-0\" (UID: \"8a111df9-8578-40bb-a672-b5d53305c873\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:26:55 crc kubenswrapper[4558]: I0120 17:26:55.904757 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r4ll4\" (UniqueName: \"kubernetes.io/projected/8a111df9-8578-40bb-a672-b5d53305c873-kube-api-access-r4ll4\") pod \"ovn-northd-0\" (UID: \"8a111df9-8578-40bb-a672-b5d53305c873\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:26:55 crc kubenswrapper[4558]: I0120 17:26:55.904811 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/8a111df9-8578-40bb-a672-b5d53305c873-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"8a111df9-8578-40bb-a672-b5d53305c873\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:26:55 crc kubenswrapper[4558]: I0120 17:26:55.905159 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/8a111df9-8578-40bb-a672-b5d53305c873-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"8a111df9-8578-40bb-a672-b5d53305c873\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:26:55 crc kubenswrapper[4558]: I0120 17:26:55.905209 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8a111df9-8578-40bb-a672-b5d53305c873-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"8a111df9-8578-40bb-a672-b5d53305c873\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:26:55 crc kubenswrapper[4558]: I0120 17:26:55.905240 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/8a111df9-8578-40bb-a672-b5d53305c873-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"8a111df9-8578-40bb-a672-b5d53305c873\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:26:55 crc kubenswrapper[4558]: I0120 17:26:55.905639 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8a111df9-8578-40bb-a672-b5d53305c873-scripts\") pod \"ovn-northd-0\" (UID: \"8a111df9-8578-40bb-a672-b5d53305c873\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:26:55 crc kubenswrapper[4558]: I0120 17:26:55.905854 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/8a111df9-8578-40bb-a672-b5d53305c873-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"8a111df9-8578-40bb-a672-b5d53305c873\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:26:55 crc kubenswrapper[4558]: I0120 17:26:55.906679 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8a111df9-8578-40bb-a672-b5d53305c873-config\") pod \"ovn-northd-0\" (UID: \"8a111df9-8578-40bb-a672-b5d53305c873\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:26:55 crc kubenswrapper[4558]: I0120 17:26:55.911878 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/8a111df9-8578-40bb-a672-b5d53305c873-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"8a111df9-8578-40bb-a672-b5d53305c873\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:26:55 crc kubenswrapper[4558]: I0120 17:26:55.911896 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/8a111df9-8578-40bb-a672-b5d53305c873-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"8a111df9-8578-40bb-a672-b5d53305c873\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:26:55 crc kubenswrapper[4558]: I0120 17:26:55.911936 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8a111df9-8578-40bb-a672-b5d53305c873-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"8a111df9-8578-40bb-a672-b5d53305c873\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:26:55 crc kubenswrapper[4558]: I0120 17:26:55.922954 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r4ll4\" (UniqueName: \"kubernetes.io/projected/8a111df9-8578-40bb-a672-b5d53305c873-kube-api-access-r4ll4\") pod \"ovn-northd-0\" (UID: \"8a111df9-8578-40bb-a672-b5d53305c873\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:26:56 crc kubenswrapper[4558]: I0120 17:26:56.077446 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/rabbitmq-server-0" podUID="033e5f26-b8e8-48f1-affd-3b9e7fba316a" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.223:5671: connect: connection refused" Jan 20 17:26:56 crc kubenswrapper[4558]: I0120 17:26:56.090254 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:26:56 crc kubenswrapper[4558]: I0120 17:26:56.458999 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" podUID="7a2c4722-11cc-426c-8101-496c9ee97ca2" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.224:5671: connect: connection refused" Jan 20 17:26:56 crc kubenswrapper[4558]: I0120 17:26:56.503904 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovn-northd-0"] Jan 20 17:26:56 crc kubenswrapper[4558]: W0120 17:26:56.506588 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8a111df9_8578_40bb_a672_b5d53305c873.slice/crio-44c1fe01210b1e9e65d4468d7551bc7915ee5edc55a00157013003b1b37c9fd0 WatchSource:0}: Error finding container 44c1fe01210b1e9e65d4468d7551bc7915ee5edc55a00157013003b1b37c9fd0: Status 404 returned error can't find the container with id 44c1fe01210b1e9e65d4468d7551bc7915ee5edc55a00157013003b1b37c9fd0 Jan 20 17:26:56 crc kubenswrapper[4558]: I0120 17:26:56.578044 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c08cb973-6eb3-414d-b127-fc146b0fb1f2" path="/var/lib/kubelet/pods/c08cb973-6eb3-414d-b127-fc146b0fb1f2/volumes" Jan 20 17:26:56 crc kubenswrapper[4558]: I0120 17:26:56.578667 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-northd-0" event={"ID":"8a111df9-8578-40bb-a672-b5d53305c873","Type":"ContainerStarted","Data":"44c1fe01210b1e9e65d4468d7551bc7915ee5edc55a00157013003b1b37c9fd0"} Jan 20 17:26:56 crc kubenswrapper[4558]: I0120 17:26:56.578697 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-cell1-galera-0" event={"ID":"f81a00ad-36ce-4383-9e91-fe60de6939d2","Type":"ContainerStarted","Data":"77c882047f115c6a4c56a4e460c8836ce62f0179639cdd46fd3f9af7f7edbd88"} Jan 20 17:26:56 crc kubenswrapper[4558]: I0120 17:26:56.578710 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-galera-0" event={"ID":"6bc8ca35-f460-4c4d-9dbe-0012c552371a","Type":"ContainerStarted","Data":"3d044e6540417b17b7e29b9e6b94aac224f6dc6f6746f710160907736ae8f719"} Jan 20 17:26:56 crc kubenswrapper[4558]: I0120 17:26:56.580138 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"17054bbc-d1b1-47ae-b42a-bc09c15b2b9b","Type":"ContainerStarted","Data":"08372e554acc972018371c794d0195d55e9ca9f63ea522358caad97d33501495"} Jan 20 17:26:56 crc kubenswrapper[4558]: I0120 17:26:56.646746 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/openstack-galera-0" podStartSLOduration=6.646723396 podStartE2EDuration="6.646723396s" podCreationTimestamp="2026-01-20 17:26:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:26:56.632546361 +0000 UTC m=+2710.392884328" watchObservedRunningTime="2026-01-20 17:26:56.646723396 +0000 UTC m=+2710.407061362" Jan 20 17:26:56 crc kubenswrapper[4558]: I0120 17:26:56.657479 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/openstack-cell1-galera-0" podStartSLOduration=7.657458259 podStartE2EDuration="7.657458259s" podCreationTimestamp="2026-01-20 17:26:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:26:56.65195425 +0000 UTC m=+2710.412292217" watchObservedRunningTime="2026-01-20 17:26:56.657458259 +0000 UTC m=+2710.417796225" Jan 20 17:26:56 crc kubenswrapper[4558]: I0120 17:26:56.828023 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:26:56 crc kubenswrapper[4558]: I0120 17:26:56.882957 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:26:57 crc kubenswrapper[4558]: I0120 17:26:57.589767 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:26:57 crc kubenswrapper[4558]: I0120 17:26:57.597394 4558 generic.go:334] "Generic (PLEG): container finished" podID="8e043dc4-ede7-4945-adfe-f23ef8b0b313" containerID="fa220b08cefc27ae29c6a4562f29cca0ef746f81fe72e0cd26f3b1e6b453f18e" exitCode=0 Jan 20 17:26:57 crc kubenswrapper[4558]: I0120 17:26:57.597444 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-ring-rebalance-zn4xn" event={"ID":"8e043dc4-ede7-4945-adfe-f23ef8b0b313","Type":"ContainerDied","Data":"fa220b08cefc27ae29c6a4562f29cca0ef746f81fe72e0cd26f3b1e6b453f18e"} Jan 20 17:26:57 crc kubenswrapper[4558]: I0120 17:26:57.604391 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-northd-0" event={"ID":"8a111df9-8578-40bb-a672-b5d53305c873","Type":"ContainerStarted","Data":"d3ba0972279d7175889b46187fb32615d720f05a63a7bbfedd1ca9dc2eff5075"} Jan 20 17:26:57 crc kubenswrapper[4558]: I0120 17:26:57.604422 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-northd-0" event={"ID":"8a111df9-8578-40bb-a672-b5d53305c873","Type":"ContainerStarted","Data":"c2fa33c63a4c25f68e800c109ef61e283f605e39ebdcd676406536456186262d"} Jan 20 17:26:57 crc kubenswrapper[4558]: I0120 17:26:57.604439 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:26:57 crc kubenswrapper[4558]: I0120 17:26:57.652453 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ovn-northd-0" podStartSLOduration=2.652431063 podStartE2EDuration="2.652431063s" podCreationTimestamp="2026-01-20 17:26:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:26:57.644707683 +0000 UTC m=+2711.405045649" watchObservedRunningTime="2026-01-20 17:26:57.652431063 +0000 UTC m=+2711.412769030" Jan 20 17:26:58 crc kubenswrapper[4558]: I0120 17:26:58.620634 4558 generic.go:334] "Generic (PLEG): container finished" podID="033e5f26-b8e8-48f1-affd-3b9e7fba316a" containerID="a02a42e340dab5cef6984f56ed207a62ce75f87172edeb2b63be0e769779bf17" exitCode=0 Jan 20 17:26:58 crc kubenswrapper[4558]: I0120 17:26:58.620703 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-server-0" event={"ID":"033e5f26-b8e8-48f1-affd-3b9e7fba316a","Type":"ContainerDied","Data":"a02a42e340dab5cef6984f56ed207a62ce75f87172edeb2b63be0e769779bf17"} Jan 20 17:26:58 crc kubenswrapper[4558]: I0120 17:26:58.642666 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"17054bbc-d1b1-47ae-b42a-bc09c15b2b9b","Type":"ContainerStarted","Data":"d20cdc2ad9b52842bb98fbc066110f969938fc19f05488867c07cc1ed5ad8486"} Jan 20 17:26:58 crc kubenswrapper[4558]: I0120 17:26:58.643662 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:26:58 crc kubenswrapper[4558]: I0120 17:26:58.666860 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ceilometer-0" podStartSLOduration=2.844440444 podStartE2EDuration="6.666841984s" podCreationTimestamp="2026-01-20 17:26:52 +0000 UTC" firstStartedPulling="2026-01-20 17:26:53.663962545 +0000 UTC m=+2707.424300502" lastFinishedPulling="2026-01-20 17:26:57.486364074 +0000 UTC m=+2711.246702042" observedRunningTime="2026-01-20 17:26:58.661875878 +0000 UTC m=+2712.422213844" watchObservedRunningTime="2026-01-20 17:26:58.666841984 +0000 UTC m=+2712.427179951" Jan 20 17:26:58 crc kubenswrapper[4558]: I0120 17:26:58.686265 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:26:58 crc kubenswrapper[4558]: I0120 17:26:58.956237 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:26:59 crc kubenswrapper[4558]: I0120 17:26:59.028656 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-ring-rebalance-zn4xn" Jan 20 17:26:59 crc kubenswrapper[4558]: I0120 17:26:59.069189 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/033e5f26-b8e8-48f1-affd-3b9e7fba316a-plugins-conf\") pod \"033e5f26-b8e8-48f1-affd-3b9e7fba316a\" (UID: \"033e5f26-b8e8-48f1-affd-3b9e7fba316a\") " Jan 20 17:26:59 crc kubenswrapper[4558]: I0120 17:26:59.069246 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/033e5f26-b8e8-48f1-affd-3b9e7fba316a-rabbitmq-erlang-cookie\") pod \"033e5f26-b8e8-48f1-affd-3b9e7fba316a\" (UID: \"033e5f26-b8e8-48f1-affd-3b9e7fba316a\") " Jan 20 17:26:59 crc kubenswrapper[4558]: I0120 17:26:59.069310 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/033e5f26-b8e8-48f1-affd-3b9e7fba316a-pod-info\") pod \"033e5f26-b8e8-48f1-affd-3b9e7fba316a\" (UID: \"033e5f26-b8e8-48f1-affd-3b9e7fba316a\") " Jan 20 17:26:59 crc kubenswrapper[4558]: I0120 17:26:59.069380 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/033e5f26-b8e8-48f1-affd-3b9e7fba316a-erlang-cookie-secret\") pod \"033e5f26-b8e8-48f1-affd-3b9e7fba316a\" (UID: \"033e5f26-b8e8-48f1-affd-3b9e7fba316a\") " Jan 20 17:26:59 crc kubenswrapper[4558]: I0120 17:26:59.069447 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ts2c8\" (UniqueName: \"kubernetes.io/projected/033e5f26-b8e8-48f1-affd-3b9e7fba316a-kube-api-access-ts2c8\") pod \"033e5f26-b8e8-48f1-affd-3b9e7fba316a\" (UID: \"033e5f26-b8e8-48f1-affd-3b9e7fba316a\") " Jan 20 17:26:59 crc kubenswrapper[4558]: I0120 17:26:59.069468 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/033e5f26-b8e8-48f1-affd-3b9e7fba316a-server-conf\") pod \"033e5f26-b8e8-48f1-affd-3b9e7fba316a\" (UID: \"033e5f26-b8e8-48f1-affd-3b9e7fba316a\") " Jan 20 17:26:59 crc kubenswrapper[4558]: I0120 17:26:59.069525 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/033e5f26-b8e8-48f1-affd-3b9e7fba316a-rabbitmq-confd\") pod \"033e5f26-b8e8-48f1-affd-3b9e7fba316a\" (UID: \"033e5f26-b8e8-48f1-affd-3b9e7fba316a\") " Jan 20 17:26:59 crc kubenswrapper[4558]: I0120 17:26:59.069558 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"033e5f26-b8e8-48f1-affd-3b9e7fba316a\" (UID: \"033e5f26-b8e8-48f1-affd-3b9e7fba316a\") " Jan 20 17:26:59 crc kubenswrapper[4558]: I0120 17:26:59.069575 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/033e5f26-b8e8-48f1-affd-3b9e7fba316a-config-data\") pod \"033e5f26-b8e8-48f1-affd-3b9e7fba316a\" (UID: \"033e5f26-b8e8-48f1-affd-3b9e7fba316a\") " Jan 20 17:26:59 crc kubenswrapper[4558]: I0120 17:26:59.069591 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/033e5f26-b8e8-48f1-affd-3b9e7fba316a-rabbitmq-tls\") pod \"033e5f26-b8e8-48f1-affd-3b9e7fba316a\" (UID: \"033e5f26-b8e8-48f1-affd-3b9e7fba316a\") " Jan 20 17:26:59 crc kubenswrapper[4558]: I0120 17:26:59.069737 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/033e5f26-b8e8-48f1-affd-3b9e7fba316a-rabbitmq-plugins\") pod \"033e5f26-b8e8-48f1-affd-3b9e7fba316a\" (UID: \"033e5f26-b8e8-48f1-affd-3b9e7fba316a\") " Jan 20 17:26:59 crc kubenswrapper[4558]: I0120 17:26:59.071977 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/033e5f26-b8e8-48f1-affd-3b9e7fba316a-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "033e5f26-b8e8-48f1-affd-3b9e7fba316a" (UID: "033e5f26-b8e8-48f1-affd-3b9e7fba316a"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:26:59 crc kubenswrapper[4558]: I0120 17:26:59.072020 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/033e5f26-b8e8-48f1-affd-3b9e7fba316a-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "033e5f26-b8e8-48f1-affd-3b9e7fba316a" (UID: "033e5f26-b8e8-48f1-affd-3b9e7fba316a"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:26:59 crc kubenswrapper[4558]: I0120 17:26:59.072068 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/033e5f26-b8e8-48f1-affd-3b9e7fba316a-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "033e5f26-b8e8-48f1-affd-3b9e7fba316a" (UID: "033e5f26-b8e8-48f1-affd-3b9e7fba316a"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:26:59 crc kubenswrapper[4558]: I0120 17:26:59.084443 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage07-crc" (OuterVolumeSpecName: "persistence") pod "033e5f26-b8e8-48f1-affd-3b9e7fba316a" (UID: "033e5f26-b8e8-48f1-affd-3b9e7fba316a"). InnerVolumeSpecName "local-storage07-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:26:59 crc kubenswrapper[4558]: I0120 17:26:59.084466 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/033e5f26-b8e8-48f1-affd-3b9e7fba316a-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "033e5f26-b8e8-48f1-affd-3b9e7fba316a" (UID: "033e5f26-b8e8-48f1-affd-3b9e7fba316a"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:26:59 crc kubenswrapper[4558]: I0120 17:26:59.084628 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/033e5f26-b8e8-48f1-affd-3b9e7fba316a-kube-api-access-ts2c8" (OuterVolumeSpecName: "kube-api-access-ts2c8") pod "033e5f26-b8e8-48f1-affd-3b9e7fba316a" (UID: "033e5f26-b8e8-48f1-affd-3b9e7fba316a"). InnerVolumeSpecName "kube-api-access-ts2c8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:26:59 crc kubenswrapper[4558]: I0120 17:26:59.086802 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/033e5f26-b8e8-48f1-affd-3b9e7fba316a-pod-info" (OuterVolumeSpecName: "pod-info") pod "033e5f26-b8e8-48f1-affd-3b9e7fba316a" (UID: "033e5f26-b8e8-48f1-affd-3b9e7fba316a"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Jan 20 17:26:59 crc kubenswrapper[4558]: I0120 17:26:59.098675 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/033e5f26-b8e8-48f1-affd-3b9e7fba316a-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "033e5f26-b8e8-48f1-affd-3b9e7fba316a" (UID: "033e5f26-b8e8-48f1-affd-3b9e7fba316a"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:26:59 crc kubenswrapper[4558]: I0120 17:26:59.111947 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/033e5f26-b8e8-48f1-affd-3b9e7fba316a-config-data" (OuterVolumeSpecName: "config-data") pod "033e5f26-b8e8-48f1-affd-3b9e7fba316a" (UID: "033e5f26-b8e8-48f1-affd-3b9e7fba316a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:26:59 crc kubenswrapper[4558]: I0120 17:26:59.134513 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/033e5f26-b8e8-48f1-affd-3b9e7fba316a-server-conf" (OuterVolumeSpecName: "server-conf") pod "033e5f26-b8e8-48f1-affd-3b9e7fba316a" (UID: "033e5f26-b8e8-48f1-affd-3b9e7fba316a"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:26:59 crc kubenswrapper[4558]: I0120 17:26:59.170228 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/033e5f26-b8e8-48f1-affd-3b9e7fba316a-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "033e5f26-b8e8-48f1-affd-3b9e7fba316a" (UID: "033e5f26-b8e8-48f1-affd-3b9e7fba316a"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:26:59 crc kubenswrapper[4558]: I0120 17:26:59.171954 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/8e043dc4-ede7-4945-adfe-f23ef8b0b313-etc-swift\") pod \"8e043dc4-ede7-4945-adfe-f23ef8b0b313\" (UID: \"8e043dc4-ede7-4945-adfe-f23ef8b0b313\") " Jan 20 17:26:59 crc kubenswrapper[4558]: I0120 17:26:59.172083 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/8e043dc4-ede7-4945-adfe-f23ef8b0b313-dispersionconf\") pod \"8e043dc4-ede7-4945-adfe-f23ef8b0b313\" (UID: \"8e043dc4-ede7-4945-adfe-f23ef8b0b313\") " Jan 20 17:26:59 crc kubenswrapper[4558]: I0120 17:26:59.172246 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/8e043dc4-ede7-4945-adfe-f23ef8b0b313-ring-data-devices\") pod \"8e043dc4-ede7-4945-adfe-f23ef8b0b313\" (UID: \"8e043dc4-ede7-4945-adfe-f23ef8b0b313\") " Jan 20 17:26:59 crc kubenswrapper[4558]: I0120 17:26:59.172512 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/8e043dc4-ede7-4945-adfe-f23ef8b0b313-swiftconf\") pod \"8e043dc4-ede7-4945-adfe-f23ef8b0b313\" (UID: \"8e043dc4-ede7-4945-adfe-f23ef8b0b313\") " Jan 20 17:26:59 crc kubenswrapper[4558]: I0120 17:26:59.172614 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8e043dc4-ede7-4945-adfe-f23ef8b0b313-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "8e043dc4-ede7-4945-adfe-f23ef8b0b313" (UID: "8e043dc4-ede7-4945-adfe-f23ef8b0b313"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:26:59 crc kubenswrapper[4558]: I0120 17:26:59.172609 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8e043dc4-ede7-4945-adfe-f23ef8b0b313-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "8e043dc4-ede7-4945-adfe-f23ef8b0b313" (UID: "8e043dc4-ede7-4945-adfe-f23ef8b0b313"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:26:59 crc kubenswrapper[4558]: I0120 17:26:59.172729 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8e043dc4-ede7-4945-adfe-f23ef8b0b313-combined-ca-bundle\") pod \"8e043dc4-ede7-4945-adfe-f23ef8b0b313\" (UID: \"8e043dc4-ede7-4945-adfe-f23ef8b0b313\") " Jan 20 17:26:59 crc kubenswrapper[4558]: I0120 17:26:59.172865 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8e043dc4-ede7-4945-adfe-f23ef8b0b313-scripts\") pod \"8e043dc4-ede7-4945-adfe-f23ef8b0b313\" (UID: \"8e043dc4-ede7-4945-adfe-f23ef8b0b313\") " Jan 20 17:26:59 crc kubenswrapper[4558]: I0120 17:26:59.172973 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-78pfw\" (UniqueName: \"kubernetes.io/projected/8e043dc4-ede7-4945-adfe-f23ef8b0b313-kube-api-access-78pfw\") pod \"8e043dc4-ede7-4945-adfe-f23ef8b0b313\" (UID: \"8e043dc4-ede7-4945-adfe-f23ef8b0b313\") " Jan 20 17:26:59 crc kubenswrapper[4558]: I0120 17:26:59.173489 4558 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/033e5f26-b8e8-48f1-affd-3b9e7fba316a-pod-info\") on node \"crc\" DevicePath \"\"" Jan 20 17:26:59 crc kubenswrapper[4558]: I0120 17:26:59.173553 4558 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/033e5f26-b8e8-48f1-affd-3b9e7fba316a-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Jan 20 17:26:59 crc kubenswrapper[4558]: I0120 17:26:59.173605 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ts2c8\" (UniqueName: \"kubernetes.io/projected/033e5f26-b8e8-48f1-affd-3b9e7fba316a-kube-api-access-ts2c8\") on node \"crc\" DevicePath \"\"" Jan 20 17:26:59 crc kubenswrapper[4558]: I0120 17:26:59.173654 4558 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/033e5f26-b8e8-48f1-affd-3b9e7fba316a-server-conf\") on node \"crc\" DevicePath \"\"" Jan 20 17:26:59 crc kubenswrapper[4558]: I0120 17:26:59.173717 4558 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/8e043dc4-ede7-4945-adfe-f23ef8b0b313-etc-swift\") on node \"crc\" DevicePath \"\"" Jan 20 17:26:59 crc kubenswrapper[4558]: I0120 17:26:59.173790 4558 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/033e5f26-b8e8-48f1-affd-3b9e7fba316a-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Jan 20 17:26:59 crc kubenswrapper[4558]: I0120 17:26:59.173867 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" " Jan 20 17:26:59 crc kubenswrapper[4558]: I0120 17:26:59.173920 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/033e5f26-b8e8-48f1-affd-3b9e7fba316a-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:26:59 crc kubenswrapper[4558]: I0120 17:26:59.173975 4558 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/033e5f26-b8e8-48f1-affd-3b9e7fba316a-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Jan 20 17:26:59 crc kubenswrapper[4558]: I0120 17:26:59.174031 4558 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/8e043dc4-ede7-4945-adfe-f23ef8b0b313-ring-data-devices\") on node \"crc\" DevicePath \"\"" Jan 20 17:26:59 crc kubenswrapper[4558]: I0120 17:26:59.174103 4558 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/033e5f26-b8e8-48f1-affd-3b9e7fba316a-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Jan 20 17:26:59 crc kubenswrapper[4558]: I0120 17:26:59.174179 4558 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/033e5f26-b8e8-48f1-affd-3b9e7fba316a-plugins-conf\") on node \"crc\" DevicePath \"\"" Jan 20 17:26:59 crc kubenswrapper[4558]: I0120 17:26:59.174238 4558 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/033e5f26-b8e8-48f1-affd-3b9e7fba316a-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Jan 20 17:26:59 crc kubenswrapper[4558]: I0120 17:26:59.181771 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8e043dc4-ede7-4945-adfe-f23ef8b0b313-kube-api-access-78pfw" (OuterVolumeSpecName: "kube-api-access-78pfw") pod "8e043dc4-ede7-4945-adfe-f23ef8b0b313" (UID: "8e043dc4-ede7-4945-adfe-f23ef8b0b313"). InnerVolumeSpecName "kube-api-access-78pfw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:26:59 crc kubenswrapper[4558]: I0120 17:26:59.194690 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage07-crc" (UniqueName: "kubernetes.io/local-volume/local-storage07-crc") on node "crc" Jan 20 17:26:59 crc kubenswrapper[4558]: I0120 17:26:59.194794 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8e043dc4-ede7-4945-adfe-f23ef8b0b313-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "8e043dc4-ede7-4945-adfe-f23ef8b0b313" (UID: "8e043dc4-ede7-4945-adfe-f23ef8b0b313"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:26:59 crc kubenswrapper[4558]: I0120 17:26:59.197573 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8e043dc4-ede7-4945-adfe-f23ef8b0b313-scripts" (OuterVolumeSpecName: "scripts") pod "8e043dc4-ede7-4945-adfe-f23ef8b0b313" (UID: "8e043dc4-ede7-4945-adfe-f23ef8b0b313"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:26:59 crc kubenswrapper[4558]: I0120 17:26:59.198698 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8e043dc4-ede7-4945-adfe-f23ef8b0b313-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "8e043dc4-ede7-4945-adfe-f23ef8b0b313" (UID: "8e043dc4-ede7-4945-adfe-f23ef8b0b313"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:26:59 crc kubenswrapper[4558]: I0120 17:26:59.205545 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8e043dc4-ede7-4945-adfe-f23ef8b0b313-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8e043dc4-ede7-4945-adfe-f23ef8b0b313" (UID: "8e043dc4-ede7-4945-adfe-f23ef8b0b313"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:26:59 crc kubenswrapper[4558]: I0120 17:26:59.278192 4558 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/8e043dc4-ede7-4945-adfe-f23ef8b0b313-swiftconf\") on node \"crc\" DevicePath \"\"" Jan 20 17:26:59 crc kubenswrapper[4558]: I0120 17:26:59.278230 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8e043dc4-ede7-4945-adfe-f23ef8b0b313-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:26:59 crc kubenswrapper[4558]: I0120 17:26:59.278243 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8e043dc4-ede7-4945-adfe-f23ef8b0b313-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:26:59 crc kubenswrapper[4558]: I0120 17:26:59.278255 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-78pfw\" (UniqueName: \"kubernetes.io/projected/8e043dc4-ede7-4945-adfe-f23ef8b0b313-kube-api-access-78pfw\") on node \"crc\" DevicePath \"\"" Jan 20 17:26:59 crc kubenswrapper[4558]: I0120 17:26:59.278265 4558 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/8e043dc4-ede7-4945-adfe-f23ef8b0b313-dispersionconf\") on node \"crc\" DevicePath \"\"" Jan 20 17:26:59 crc kubenswrapper[4558]: I0120 17:26:59.278278 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:26:59 crc kubenswrapper[4558]: I0120 17:26:59.286523 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:26:59 crc kubenswrapper[4558]: I0120 17:26:59.655243 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-ring-rebalance-zn4xn" event={"ID":"8e043dc4-ede7-4945-adfe-f23ef8b0b313","Type":"ContainerDied","Data":"16f94b1c99175822e8283a9a842573fbffc683d8ea076954dacc80b3b5591b7f"} Jan 20 17:26:59 crc kubenswrapper[4558]: I0120 17:26:59.655288 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="16f94b1c99175822e8283a9a842573fbffc683d8ea076954dacc80b3b5591b7f" Jan 20 17:26:59 crc kubenswrapper[4558]: I0120 17:26:59.655354 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-ring-rebalance-zn4xn" Jan 20 17:26:59 crc kubenswrapper[4558]: I0120 17:26:59.658104 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-server-0" event={"ID":"033e5f26-b8e8-48f1-affd-3b9e7fba316a","Type":"ContainerDied","Data":"fcde411c60da965e4e82222fa7f3fa0dab22bb371ca3f9772e2fa456c513c0eb"} Jan 20 17:26:59 crc kubenswrapper[4558]: I0120 17:26:59.658373 4558 scope.go:117] "RemoveContainer" containerID="a02a42e340dab5cef6984f56ed207a62ce75f87172edeb2b63be0e769779bf17" Jan 20 17:26:59 crc kubenswrapper[4558]: I0120 17:26:59.658494 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:26:59 crc kubenswrapper[4558]: I0120 17:26:59.662673 4558 generic.go:334] "Generic (PLEG): container finished" podID="7a2c4722-11cc-426c-8101-496c9ee97ca2" containerID="d6656270debe6f92e12b0faf0e10cdbadf8fd2e24c19d3019d8548693ab17740" exitCode=0 Jan 20 17:26:59 crc kubenswrapper[4558]: I0120 17:26:59.662853 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" event={"ID":"7a2c4722-11cc-426c-8101-496c9ee97ca2","Type":"ContainerDied","Data":"d6656270debe6f92e12b0faf0e10cdbadf8fd2e24c19d3019d8548693ab17740"} Jan 20 17:26:59 crc kubenswrapper[4558]: I0120 17:26:59.688468 4558 scope.go:117] "RemoveContainer" containerID="b6108373b6dc1c8073ee4649e4bf895e88d86f76fea047217ce0066f6364f215" Jan 20 17:26:59 crc kubenswrapper[4558]: I0120 17:26:59.693627 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/rabbitmq-server-0"] Jan 20 17:26:59 crc kubenswrapper[4558]: I0120 17:26:59.701472 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/rabbitmq-server-0"] Jan 20 17:26:59 crc kubenswrapper[4558]: I0120 17:26:59.722645 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/rabbitmq-server-0"] Jan 20 17:26:59 crc kubenswrapper[4558]: E0120 17:26:59.723208 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="033e5f26-b8e8-48f1-affd-3b9e7fba316a" containerName="rabbitmq" Jan 20 17:26:59 crc kubenswrapper[4558]: I0120 17:26:59.723230 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="033e5f26-b8e8-48f1-affd-3b9e7fba316a" containerName="rabbitmq" Jan 20 17:26:59 crc kubenswrapper[4558]: E0120 17:26:59.723241 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8e043dc4-ede7-4945-adfe-f23ef8b0b313" containerName="swift-ring-rebalance" Jan 20 17:26:59 crc kubenswrapper[4558]: I0120 17:26:59.723248 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="8e043dc4-ede7-4945-adfe-f23ef8b0b313" containerName="swift-ring-rebalance" Jan 20 17:26:59 crc kubenswrapper[4558]: E0120 17:26:59.723280 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="033e5f26-b8e8-48f1-affd-3b9e7fba316a" containerName="setup-container" Jan 20 17:26:59 crc kubenswrapper[4558]: I0120 17:26:59.723287 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="033e5f26-b8e8-48f1-affd-3b9e7fba316a" containerName="setup-container" Jan 20 17:26:59 crc kubenswrapper[4558]: I0120 17:26:59.723555 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="8e043dc4-ede7-4945-adfe-f23ef8b0b313" containerName="swift-ring-rebalance" Jan 20 17:26:59 crc kubenswrapper[4558]: I0120 17:26:59.723589 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="033e5f26-b8e8-48f1-affd-3b9e7fba316a" containerName="rabbitmq" Jan 20 17:26:59 crc kubenswrapper[4558]: I0120 17:26:59.725537 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:26:59 crc kubenswrapper[4558]: I0120 17:26:59.728221 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"rabbitmq-erlang-cookie" Jan 20 17:26:59 crc kubenswrapper[4558]: I0120 17:26:59.728276 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"rabbitmq-config-data" Jan 20 17:26:59 crc kubenswrapper[4558]: I0120 17:26:59.728514 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"rabbitmq-plugins-conf" Jan 20 17:26:59 crc kubenswrapper[4558]: I0120 17:26:59.728625 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"rabbitmq-server-dockercfg-dnkqh" Jan 20 17:26:59 crc kubenswrapper[4558]: I0120 17:26:59.728813 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"rabbitmq-default-user" Jan 20 17:26:59 crc kubenswrapper[4558]: I0120 17:26:59.731290 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-rabbitmq-svc" Jan 20 17:26:59 crc kubenswrapper[4558]: I0120 17:26:59.731923 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"rabbitmq-server-conf" Jan 20 17:26:59 crc kubenswrapper[4558]: I0120 17:26:59.740479 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/rabbitmq-server-0"] Jan 20 17:26:59 crc kubenswrapper[4558]: I0120 17:26:59.800313 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:26:59 crc kubenswrapper[4558]: I0120 17:26:59.896742 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zdxrq\" (UniqueName: \"kubernetes.io/projected/7a2c4722-11cc-426c-8101-496c9ee97ca2-kube-api-access-zdxrq\") pod \"7a2c4722-11cc-426c-8101-496c9ee97ca2\" (UID: \"7a2c4722-11cc-426c-8101-496c9ee97ca2\") " Jan 20 17:26:59 crc kubenswrapper[4558]: I0120 17:26:59.896840 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/7a2c4722-11cc-426c-8101-496c9ee97ca2-rabbitmq-tls\") pod \"7a2c4722-11cc-426c-8101-496c9ee97ca2\" (UID: \"7a2c4722-11cc-426c-8101-496c9ee97ca2\") " Jan 20 17:26:59 crc kubenswrapper[4558]: I0120 17:26:59.896899 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/7a2c4722-11cc-426c-8101-496c9ee97ca2-rabbitmq-confd\") pod \"7a2c4722-11cc-426c-8101-496c9ee97ca2\" (UID: \"7a2c4722-11cc-426c-8101-496c9ee97ca2\") " Jan 20 17:26:59 crc kubenswrapper[4558]: I0120 17:26:59.896946 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/7a2c4722-11cc-426c-8101-496c9ee97ca2-pod-info\") pod \"7a2c4722-11cc-426c-8101-496c9ee97ca2\" (UID: \"7a2c4722-11cc-426c-8101-496c9ee97ca2\") " Jan 20 17:26:59 crc kubenswrapper[4558]: I0120 17:26:59.896980 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/7a2c4722-11cc-426c-8101-496c9ee97ca2-rabbitmq-erlang-cookie\") pod \"7a2c4722-11cc-426c-8101-496c9ee97ca2\" (UID: \"7a2c4722-11cc-426c-8101-496c9ee97ca2\") " Jan 20 17:26:59 crc kubenswrapper[4558]: I0120 17:26:59.897073 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/7a2c4722-11cc-426c-8101-496c9ee97ca2-erlang-cookie-secret\") pod \"7a2c4722-11cc-426c-8101-496c9ee97ca2\" (UID: \"7a2c4722-11cc-426c-8101-496c9ee97ca2\") " Jan 20 17:26:59 crc kubenswrapper[4558]: I0120 17:26:59.897102 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7a2c4722-11cc-426c-8101-496c9ee97ca2-config-data\") pod \"7a2c4722-11cc-426c-8101-496c9ee97ca2\" (UID: \"7a2c4722-11cc-426c-8101-496c9ee97ca2\") " Jan 20 17:26:59 crc kubenswrapper[4558]: I0120 17:26:59.897215 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/7a2c4722-11cc-426c-8101-496c9ee97ca2-plugins-conf\") pod \"7a2c4722-11cc-426c-8101-496c9ee97ca2\" (UID: \"7a2c4722-11cc-426c-8101-496c9ee97ca2\") " Jan 20 17:26:59 crc kubenswrapper[4558]: I0120 17:26:59.897269 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/7a2c4722-11cc-426c-8101-496c9ee97ca2-rabbitmq-plugins\") pod \"7a2c4722-11cc-426c-8101-496c9ee97ca2\" (UID: \"7a2c4722-11cc-426c-8101-496c9ee97ca2\") " Jan 20 17:26:59 crc kubenswrapper[4558]: I0120 17:26:59.897294 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/7a2c4722-11cc-426c-8101-496c9ee97ca2-server-conf\") pod \"7a2c4722-11cc-426c-8101-496c9ee97ca2\" (UID: \"7a2c4722-11cc-426c-8101-496c9ee97ca2\") " Jan 20 17:26:59 crc kubenswrapper[4558]: I0120 17:26:59.897434 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"7a2c4722-11cc-426c-8101-496c9ee97ca2\" (UID: \"7a2c4722-11cc-426c-8101-496c9ee97ca2\") " Jan 20 17:26:59 crc kubenswrapper[4558]: I0120 17:26:59.898374 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7a2c4722-11cc-426c-8101-496c9ee97ca2-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "7a2c4722-11cc-426c-8101-496c9ee97ca2" (UID: "7a2c4722-11cc-426c-8101-496c9ee97ca2"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:26:59 crc kubenswrapper[4558]: I0120 17:26:59.898761 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7a2c4722-11cc-426c-8101-496c9ee97ca2-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "7a2c4722-11cc-426c-8101-496c9ee97ca2" (UID: "7a2c4722-11cc-426c-8101-496c9ee97ca2"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:26:59 crc kubenswrapper[4558]: I0120 17:26:59.899029 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7a2c4722-11cc-426c-8101-496c9ee97ca2-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "7a2c4722-11cc-426c-8101-496c9ee97ca2" (UID: "7a2c4722-11cc-426c-8101-496c9ee97ca2"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:26:59 crc kubenswrapper[4558]: I0120 17:26:59.899384 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/d692722b-f7fd-447c-8b7a-f56cff940d91-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"d692722b-f7fd-447c-8b7a-f56cff940d91\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:26:59 crc kubenswrapper[4558]: I0120 17:26:59.899478 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"rabbitmq-server-0\" (UID: \"d692722b-f7fd-447c-8b7a-f56cff940d91\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:26:59 crc kubenswrapper[4558]: I0120 17:26:59.899589 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/d692722b-f7fd-447c-8b7a-f56cff940d91-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"d692722b-f7fd-447c-8b7a-f56cff940d91\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:26:59 crc kubenswrapper[4558]: I0120 17:26:59.899675 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/d692722b-f7fd-447c-8b7a-f56cff940d91-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"d692722b-f7fd-447c-8b7a-f56cff940d91\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:26:59 crc kubenswrapper[4558]: I0120 17:26:59.899726 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/d692722b-f7fd-447c-8b7a-f56cff940d91-server-conf\") pod \"rabbitmq-server-0\" (UID: \"d692722b-f7fd-447c-8b7a-f56cff940d91\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:26:59 crc kubenswrapper[4558]: I0120 17:26:59.899997 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/d692722b-f7fd-447c-8b7a-f56cff940d91-pod-info\") pod \"rabbitmq-server-0\" (UID: \"d692722b-f7fd-447c-8b7a-f56cff940d91\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:26:59 crc kubenswrapper[4558]: I0120 17:26:59.900088 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/d692722b-f7fd-447c-8b7a-f56cff940d91-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"d692722b-f7fd-447c-8b7a-f56cff940d91\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:26:59 crc kubenswrapper[4558]: I0120 17:26:59.900240 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t77q5\" (UniqueName: \"kubernetes.io/projected/d692722b-f7fd-447c-8b7a-f56cff940d91-kube-api-access-t77q5\") pod \"rabbitmq-server-0\" (UID: \"d692722b-f7fd-447c-8b7a-f56cff940d91\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:26:59 crc kubenswrapper[4558]: I0120 17:26:59.900277 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/d692722b-f7fd-447c-8b7a-f56cff940d91-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"d692722b-f7fd-447c-8b7a-f56cff940d91\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:26:59 crc kubenswrapper[4558]: I0120 17:26:59.900306 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/d692722b-f7fd-447c-8b7a-f56cff940d91-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"d692722b-f7fd-447c-8b7a-f56cff940d91\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:26:59 crc kubenswrapper[4558]: I0120 17:26:59.900489 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d692722b-f7fd-447c-8b7a-f56cff940d91-config-data\") pod \"rabbitmq-server-0\" (UID: \"d692722b-f7fd-447c-8b7a-f56cff940d91\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:26:59 crc kubenswrapper[4558]: I0120 17:26:59.900594 4558 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/7a2c4722-11cc-426c-8101-496c9ee97ca2-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Jan 20 17:26:59 crc kubenswrapper[4558]: I0120 17:26:59.900607 4558 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/7a2c4722-11cc-426c-8101-496c9ee97ca2-plugins-conf\") on node \"crc\" DevicePath \"\"" Jan 20 17:26:59 crc kubenswrapper[4558]: I0120 17:26:59.900618 4558 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/7a2c4722-11cc-426c-8101-496c9ee97ca2-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Jan 20 17:26:59 crc kubenswrapper[4558]: I0120 17:26:59.904016 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7a2c4722-11cc-426c-8101-496c9ee97ca2-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "7a2c4722-11cc-426c-8101-496c9ee97ca2" (UID: "7a2c4722-11cc-426c-8101-496c9ee97ca2"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:26:59 crc kubenswrapper[4558]: I0120 17:26:59.904403 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage04-crc" (OuterVolumeSpecName: "persistence") pod "7a2c4722-11cc-426c-8101-496c9ee97ca2" (UID: "7a2c4722-11cc-426c-8101-496c9ee97ca2"). InnerVolumeSpecName "local-storage04-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:26:59 crc kubenswrapper[4558]: I0120 17:26:59.904590 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7a2c4722-11cc-426c-8101-496c9ee97ca2-kube-api-access-zdxrq" (OuterVolumeSpecName: "kube-api-access-zdxrq") pod "7a2c4722-11cc-426c-8101-496c9ee97ca2" (UID: "7a2c4722-11cc-426c-8101-496c9ee97ca2"). InnerVolumeSpecName "kube-api-access-zdxrq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:26:59 crc kubenswrapper[4558]: I0120 17:26:59.906280 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7a2c4722-11cc-426c-8101-496c9ee97ca2-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "7a2c4722-11cc-426c-8101-496c9ee97ca2" (UID: "7a2c4722-11cc-426c-8101-496c9ee97ca2"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:26:59 crc kubenswrapper[4558]: I0120 17:26:59.908811 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/7a2c4722-11cc-426c-8101-496c9ee97ca2-pod-info" (OuterVolumeSpecName: "pod-info") pod "7a2c4722-11cc-426c-8101-496c9ee97ca2" (UID: "7a2c4722-11cc-426c-8101-496c9ee97ca2"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Jan 20 17:26:59 crc kubenswrapper[4558]: I0120 17:26:59.944977 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7a2c4722-11cc-426c-8101-496c9ee97ca2-config-data" (OuterVolumeSpecName: "config-data") pod "7a2c4722-11cc-426c-8101-496c9ee97ca2" (UID: "7a2c4722-11cc-426c-8101-496c9ee97ca2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:26:59 crc kubenswrapper[4558]: I0120 17:26:59.956319 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7a2c4722-11cc-426c-8101-496c9ee97ca2-server-conf" (OuterVolumeSpecName: "server-conf") pod "7a2c4722-11cc-426c-8101-496c9ee97ca2" (UID: "7a2c4722-11cc-426c-8101-496c9ee97ca2"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:26:59 crc kubenswrapper[4558]: I0120 17:26:59.988148 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7a2c4722-11cc-426c-8101-496c9ee97ca2-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "7a2c4722-11cc-426c-8101-496c9ee97ca2" (UID: "7a2c4722-11cc-426c-8101-496c9ee97ca2"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:27:00 crc kubenswrapper[4558]: I0120 17:27:00.002975 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d692722b-f7fd-447c-8b7a-f56cff940d91-config-data\") pod \"rabbitmq-server-0\" (UID: \"d692722b-f7fd-447c-8b7a-f56cff940d91\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:27:00 crc kubenswrapper[4558]: I0120 17:27:00.003027 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/d692722b-f7fd-447c-8b7a-f56cff940d91-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"d692722b-f7fd-447c-8b7a-f56cff940d91\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:27:00 crc kubenswrapper[4558]: I0120 17:27:00.003063 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"rabbitmq-server-0\" (UID: \"d692722b-f7fd-447c-8b7a-f56cff940d91\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:27:00 crc kubenswrapper[4558]: I0120 17:27:00.003086 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/d692722b-f7fd-447c-8b7a-f56cff940d91-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"d692722b-f7fd-447c-8b7a-f56cff940d91\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:27:00 crc kubenswrapper[4558]: I0120 17:27:00.003120 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/d692722b-f7fd-447c-8b7a-f56cff940d91-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"d692722b-f7fd-447c-8b7a-f56cff940d91\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:27:00 crc kubenswrapper[4558]: I0120 17:27:00.003141 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/d692722b-f7fd-447c-8b7a-f56cff940d91-server-conf\") pod \"rabbitmq-server-0\" (UID: \"d692722b-f7fd-447c-8b7a-f56cff940d91\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:27:00 crc kubenswrapper[4558]: I0120 17:27:00.003217 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/d692722b-f7fd-447c-8b7a-f56cff940d91-pod-info\") pod \"rabbitmq-server-0\" (UID: \"d692722b-f7fd-447c-8b7a-f56cff940d91\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:27:00 crc kubenswrapper[4558]: I0120 17:27:00.003252 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/d692722b-f7fd-447c-8b7a-f56cff940d91-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"d692722b-f7fd-447c-8b7a-f56cff940d91\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:27:00 crc kubenswrapper[4558]: I0120 17:27:00.003304 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t77q5\" (UniqueName: \"kubernetes.io/projected/d692722b-f7fd-447c-8b7a-f56cff940d91-kube-api-access-t77q5\") pod \"rabbitmq-server-0\" (UID: \"d692722b-f7fd-447c-8b7a-f56cff940d91\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:27:00 crc kubenswrapper[4558]: I0120 17:27:00.003335 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/d692722b-f7fd-447c-8b7a-f56cff940d91-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"d692722b-f7fd-447c-8b7a-f56cff940d91\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:27:00 crc kubenswrapper[4558]: I0120 17:27:00.003354 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/d692722b-f7fd-447c-8b7a-f56cff940d91-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"d692722b-f7fd-447c-8b7a-f56cff940d91\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:27:00 crc kubenswrapper[4558]: I0120 17:27:00.003430 4558 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/7a2c4722-11cc-426c-8101-496c9ee97ca2-server-conf\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:00 crc kubenswrapper[4558]: I0120 17:27:00.003460 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" " Jan 20 17:27:00 crc kubenswrapper[4558]: I0120 17:27:00.003472 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zdxrq\" (UniqueName: \"kubernetes.io/projected/7a2c4722-11cc-426c-8101-496c9ee97ca2-kube-api-access-zdxrq\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:00 crc kubenswrapper[4558]: I0120 17:27:00.003483 4558 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/7a2c4722-11cc-426c-8101-496c9ee97ca2-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:00 crc kubenswrapper[4558]: I0120 17:27:00.003492 4558 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/7a2c4722-11cc-426c-8101-496c9ee97ca2-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:00 crc kubenswrapper[4558]: I0120 17:27:00.003502 4558 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/7a2c4722-11cc-426c-8101-496c9ee97ca2-pod-info\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:00 crc kubenswrapper[4558]: I0120 17:27:00.003511 4558 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/7a2c4722-11cc-426c-8101-496c9ee97ca2-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:00 crc kubenswrapper[4558]: I0120 17:27:00.003522 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7a2c4722-11cc-426c-8101-496c9ee97ca2-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:00 crc kubenswrapper[4558]: I0120 17:27:00.014677 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"rabbitmq-server-0\" (UID: \"d692722b-f7fd-447c-8b7a-f56cff940d91\") device mount path \"/mnt/openstack/pv07\"" pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:27:00 crc kubenswrapper[4558]: I0120 17:27:00.023531 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d692722b-f7fd-447c-8b7a-f56cff940d91-config-data\") pod \"rabbitmq-server-0\" (UID: \"d692722b-f7fd-447c-8b7a-f56cff940d91\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:27:00 crc kubenswrapper[4558]: I0120 17:27:00.024293 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/d692722b-f7fd-447c-8b7a-f56cff940d91-server-conf\") pod \"rabbitmq-server-0\" (UID: \"d692722b-f7fd-447c-8b7a-f56cff940d91\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:27:00 crc kubenswrapper[4558]: I0120 17:27:00.025711 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/d692722b-f7fd-447c-8b7a-f56cff940d91-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"d692722b-f7fd-447c-8b7a-f56cff940d91\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:27:00 crc kubenswrapper[4558]: I0120 17:27:00.025976 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/d692722b-f7fd-447c-8b7a-f56cff940d91-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"d692722b-f7fd-447c-8b7a-f56cff940d91\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:27:00 crc kubenswrapper[4558]: I0120 17:27:00.026601 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/d692722b-f7fd-447c-8b7a-f56cff940d91-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"d692722b-f7fd-447c-8b7a-f56cff940d91\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:27:00 crc kubenswrapper[4558]: I0120 17:27:00.027790 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/d692722b-f7fd-447c-8b7a-f56cff940d91-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"d692722b-f7fd-447c-8b7a-f56cff940d91\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:27:00 crc kubenswrapper[4558]: I0120 17:27:00.028417 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/d692722b-f7fd-447c-8b7a-f56cff940d91-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"d692722b-f7fd-447c-8b7a-f56cff940d91\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:27:00 crc kubenswrapper[4558]: I0120 17:27:00.028788 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage04-crc" (UniqueName: "kubernetes.io/local-volume/local-storage04-crc") on node "crc" Jan 20 17:27:00 crc kubenswrapper[4558]: I0120 17:27:00.028884 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/d692722b-f7fd-447c-8b7a-f56cff940d91-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"d692722b-f7fd-447c-8b7a-f56cff940d91\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:27:00 crc kubenswrapper[4558]: I0120 17:27:00.029921 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/d692722b-f7fd-447c-8b7a-f56cff940d91-pod-info\") pod \"rabbitmq-server-0\" (UID: \"d692722b-f7fd-447c-8b7a-f56cff940d91\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:27:00 crc kubenswrapper[4558]: I0120 17:27:00.030895 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t77q5\" (UniqueName: \"kubernetes.io/projected/d692722b-f7fd-447c-8b7a-f56cff940d91-kube-api-access-t77q5\") pod \"rabbitmq-server-0\" (UID: \"d692722b-f7fd-447c-8b7a-f56cff940d91\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:27:00 crc kubenswrapper[4558]: I0120 17:27:00.059294 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"rabbitmq-server-0\" (UID: \"d692722b-f7fd-447c-8b7a-f56cff940d91\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:27:00 crc kubenswrapper[4558]: I0120 17:27:00.105722 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:00 crc kubenswrapper[4558]: I0120 17:27:00.111685 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:27:00 crc kubenswrapper[4558]: I0120 17:27:00.128339 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/memcached-0" Jan 20 17:27:00 crc kubenswrapper[4558]: I0120 17:27:00.578531 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="033e5f26-b8e8-48f1-affd-3b9e7fba316a" path="/var/lib/kubelet/pods/033e5f26-b8e8-48f1-affd-3b9e7fba316a/volumes" Jan 20 17:27:00 crc kubenswrapper[4558]: I0120 17:27:00.589664 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:27:00 crc kubenswrapper[4558]: I0120 17:27:00.589759 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:27:00 crc kubenswrapper[4558]: I0120 17:27:00.599513 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/rabbitmq-server-0"] Jan 20 17:27:00 crc kubenswrapper[4558]: I0120 17:27:00.683835 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-server-0" event={"ID":"d692722b-f7fd-447c-8b7a-f56cff940d91","Type":"ContainerStarted","Data":"29eb0229195cd308f45313fbfc05f6b3b4a6af8e6f77a17c66a825db11c76f1c"} Jan 20 17:27:00 crc kubenswrapper[4558]: I0120 17:27:00.688728 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" event={"ID":"7a2c4722-11cc-426c-8101-496c9ee97ca2","Type":"ContainerDied","Data":"a404c6b69974a9bb907bc97d8324c893fb2b73341afff70845cd81ca99024ca5"} Jan 20 17:27:00 crc kubenswrapper[4558]: I0120 17:27:00.688850 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:27:00 crc kubenswrapper[4558]: I0120 17:27:00.688851 4558 scope.go:117] "RemoveContainer" containerID="d6656270debe6f92e12b0faf0e10cdbadf8fd2e24c19d3019d8548693ab17740" Jan 20 17:27:00 crc kubenswrapper[4558]: I0120 17:27:00.693195 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:27:00 crc kubenswrapper[4558]: I0120 17:27:00.741812 4558 scope.go:117] "RemoveContainer" containerID="844cb778093e33ece39e19f2cf2d5530f5852b1038b5c9c019b1574903b32025" Jan 20 17:27:00 crc kubenswrapper[4558]: I0120 17:27:00.755712 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/rabbitmq-cell1-server-0"] Jan 20 17:27:00 crc kubenswrapper[4558]: I0120 17:27:00.764830 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/rabbitmq-cell1-server-0"] Jan 20 17:27:00 crc kubenswrapper[4558]: I0120 17:27:00.781331 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/rabbitmq-cell1-server-0"] Jan 20 17:27:00 crc kubenswrapper[4558]: E0120 17:27:00.781762 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7a2c4722-11cc-426c-8101-496c9ee97ca2" containerName="rabbitmq" Jan 20 17:27:00 crc kubenswrapper[4558]: I0120 17:27:00.781787 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="7a2c4722-11cc-426c-8101-496c9ee97ca2" containerName="rabbitmq" Jan 20 17:27:00 crc kubenswrapper[4558]: E0120 17:27:00.781810 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7a2c4722-11cc-426c-8101-496c9ee97ca2" containerName="setup-container" Jan 20 17:27:00 crc kubenswrapper[4558]: I0120 17:27:00.781816 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="7a2c4722-11cc-426c-8101-496c9ee97ca2" containerName="setup-container" Jan 20 17:27:00 crc kubenswrapper[4558]: I0120 17:27:00.782237 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="7a2c4722-11cc-426c-8101-496c9ee97ca2" containerName="rabbitmq" Jan 20 17:27:00 crc kubenswrapper[4558]: I0120 17:27:00.783838 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:27:00 crc kubenswrapper[4558]: I0120 17:27:00.785545 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"rabbitmq-cell1-default-user" Jan 20 17:27:00 crc kubenswrapper[4558]: I0120 17:27:00.785839 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"rabbitmq-cell1-server-dockercfg-zh5dn" Jan 20 17:27:00 crc kubenswrapper[4558]: I0120 17:27:00.788650 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"rabbitmq-cell1-config-data" Jan 20 17:27:00 crc kubenswrapper[4558]: I0120 17:27:00.788728 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"rabbitmq-cell1-erlang-cookie" Jan 20 17:27:00 crc kubenswrapper[4558]: I0120 17:27:00.791006 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"rabbitmq-cell1-server-conf" Jan 20 17:27:00 crc kubenswrapper[4558]: I0120 17:27:00.791052 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"rabbitmq-cell1-plugins-conf" Jan 20 17:27:00 crc kubenswrapper[4558]: I0120 17:27:00.791076 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-rabbitmq-cell1-svc" Jan 20 17:27:00 crc kubenswrapper[4558]: I0120 17:27:00.797535 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/rabbitmq-cell1-server-0"] Jan 20 17:27:00 crc kubenswrapper[4558]: I0120 17:27:00.822420 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:27:00 crc kubenswrapper[4558]: I0120 17:27:00.924619 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/48cfc6e5-774d-4e7d-8103-f6a3260ea14c-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"48cfc6e5-774d-4e7d-8103-f6a3260ea14c\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:27:00 crc kubenswrapper[4558]: I0120 17:27:00.924711 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-788s8\" (UniqueName: \"kubernetes.io/projected/48cfc6e5-774d-4e7d-8103-f6a3260ea14c-kube-api-access-788s8\") pod \"rabbitmq-cell1-server-0\" (UID: \"48cfc6e5-774d-4e7d-8103-f6a3260ea14c\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:27:00 crc kubenswrapper[4558]: I0120 17:27:00.924807 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/48cfc6e5-774d-4e7d-8103-f6a3260ea14c-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"48cfc6e5-774d-4e7d-8103-f6a3260ea14c\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:27:00 crc kubenswrapper[4558]: I0120 17:27:00.924853 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"48cfc6e5-774d-4e7d-8103-f6a3260ea14c\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:27:00 crc kubenswrapper[4558]: I0120 17:27:00.924883 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/48cfc6e5-774d-4e7d-8103-f6a3260ea14c-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"48cfc6e5-774d-4e7d-8103-f6a3260ea14c\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:27:00 crc kubenswrapper[4558]: I0120 17:27:00.924908 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/48cfc6e5-774d-4e7d-8103-f6a3260ea14c-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"48cfc6e5-774d-4e7d-8103-f6a3260ea14c\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:27:00 crc kubenswrapper[4558]: I0120 17:27:00.924934 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/48cfc6e5-774d-4e7d-8103-f6a3260ea14c-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"48cfc6e5-774d-4e7d-8103-f6a3260ea14c\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:27:00 crc kubenswrapper[4558]: I0120 17:27:00.925224 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/48cfc6e5-774d-4e7d-8103-f6a3260ea14c-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"48cfc6e5-774d-4e7d-8103-f6a3260ea14c\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:27:00 crc kubenswrapper[4558]: I0120 17:27:00.925362 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/48cfc6e5-774d-4e7d-8103-f6a3260ea14c-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"48cfc6e5-774d-4e7d-8103-f6a3260ea14c\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:27:00 crc kubenswrapper[4558]: I0120 17:27:00.925398 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/48cfc6e5-774d-4e7d-8103-f6a3260ea14c-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"48cfc6e5-774d-4e7d-8103-f6a3260ea14c\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:27:00 crc kubenswrapper[4558]: I0120 17:27:00.925423 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/48cfc6e5-774d-4e7d-8103-f6a3260ea14c-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"48cfc6e5-774d-4e7d-8103-f6a3260ea14c\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:27:01 crc kubenswrapper[4558]: I0120 17:27:01.027519 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/48cfc6e5-774d-4e7d-8103-f6a3260ea14c-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"48cfc6e5-774d-4e7d-8103-f6a3260ea14c\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:27:01 crc kubenswrapper[4558]: I0120 17:27:01.027611 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/48cfc6e5-774d-4e7d-8103-f6a3260ea14c-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"48cfc6e5-774d-4e7d-8103-f6a3260ea14c\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:27:01 crc kubenswrapper[4558]: I0120 17:27:01.027647 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/48cfc6e5-774d-4e7d-8103-f6a3260ea14c-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"48cfc6e5-774d-4e7d-8103-f6a3260ea14c\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:27:01 crc kubenswrapper[4558]: I0120 17:27:01.027667 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/48cfc6e5-774d-4e7d-8103-f6a3260ea14c-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"48cfc6e5-774d-4e7d-8103-f6a3260ea14c\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:27:01 crc kubenswrapper[4558]: I0120 17:27:01.027727 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/48cfc6e5-774d-4e7d-8103-f6a3260ea14c-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"48cfc6e5-774d-4e7d-8103-f6a3260ea14c\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:27:01 crc kubenswrapper[4558]: I0120 17:27:01.027783 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-788s8\" (UniqueName: \"kubernetes.io/projected/48cfc6e5-774d-4e7d-8103-f6a3260ea14c-kube-api-access-788s8\") pod \"rabbitmq-cell1-server-0\" (UID: \"48cfc6e5-774d-4e7d-8103-f6a3260ea14c\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:27:01 crc kubenswrapper[4558]: I0120 17:27:01.027861 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/48cfc6e5-774d-4e7d-8103-f6a3260ea14c-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"48cfc6e5-774d-4e7d-8103-f6a3260ea14c\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:27:01 crc kubenswrapper[4558]: I0120 17:27:01.027902 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"48cfc6e5-774d-4e7d-8103-f6a3260ea14c\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:27:01 crc kubenswrapper[4558]: I0120 17:27:01.027937 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/48cfc6e5-774d-4e7d-8103-f6a3260ea14c-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"48cfc6e5-774d-4e7d-8103-f6a3260ea14c\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:27:01 crc kubenswrapper[4558]: I0120 17:27:01.027957 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/48cfc6e5-774d-4e7d-8103-f6a3260ea14c-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"48cfc6e5-774d-4e7d-8103-f6a3260ea14c\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:27:01 crc kubenswrapper[4558]: I0120 17:27:01.027988 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/48cfc6e5-774d-4e7d-8103-f6a3260ea14c-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"48cfc6e5-774d-4e7d-8103-f6a3260ea14c\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:27:01 crc kubenswrapper[4558]: I0120 17:27:01.028433 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/48cfc6e5-774d-4e7d-8103-f6a3260ea14c-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"48cfc6e5-774d-4e7d-8103-f6a3260ea14c\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:27:01 crc kubenswrapper[4558]: I0120 17:27:01.028447 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"48cfc6e5-774d-4e7d-8103-f6a3260ea14c\") device mount path \"/mnt/openstack/pv04\"" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:27:01 crc kubenswrapper[4558]: I0120 17:27:01.028474 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/48cfc6e5-774d-4e7d-8103-f6a3260ea14c-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"48cfc6e5-774d-4e7d-8103-f6a3260ea14c\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:27:01 crc kubenswrapper[4558]: I0120 17:27:01.028920 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/48cfc6e5-774d-4e7d-8103-f6a3260ea14c-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"48cfc6e5-774d-4e7d-8103-f6a3260ea14c\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:27:01 crc kubenswrapper[4558]: I0120 17:27:01.029256 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/48cfc6e5-774d-4e7d-8103-f6a3260ea14c-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"48cfc6e5-774d-4e7d-8103-f6a3260ea14c\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:27:01 crc kubenswrapper[4558]: I0120 17:27:01.029460 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/48cfc6e5-774d-4e7d-8103-f6a3260ea14c-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"48cfc6e5-774d-4e7d-8103-f6a3260ea14c\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:27:01 crc kubenswrapper[4558]: I0120 17:27:01.032736 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/48cfc6e5-774d-4e7d-8103-f6a3260ea14c-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"48cfc6e5-774d-4e7d-8103-f6a3260ea14c\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:27:01 crc kubenswrapper[4558]: I0120 17:27:01.032957 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/48cfc6e5-774d-4e7d-8103-f6a3260ea14c-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"48cfc6e5-774d-4e7d-8103-f6a3260ea14c\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:27:01 crc kubenswrapper[4558]: I0120 17:27:01.043580 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/48cfc6e5-774d-4e7d-8103-f6a3260ea14c-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"48cfc6e5-774d-4e7d-8103-f6a3260ea14c\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:27:01 crc kubenswrapper[4558]: I0120 17:27:01.043611 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/48cfc6e5-774d-4e7d-8103-f6a3260ea14c-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"48cfc6e5-774d-4e7d-8103-f6a3260ea14c\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:27:01 crc kubenswrapper[4558]: I0120 17:27:01.046035 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-788s8\" (UniqueName: \"kubernetes.io/projected/48cfc6e5-774d-4e7d-8103-f6a3260ea14c-kube-api-access-788s8\") pod \"rabbitmq-cell1-server-0\" (UID: \"48cfc6e5-774d-4e7d-8103-f6a3260ea14c\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:27:01 crc kubenswrapper[4558]: I0120 17:27:01.053420 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"48cfc6e5-774d-4e7d-8103-f6a3260ea14c\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:27:01 crc kubenswrapper[4558]: I0120 17:27:01.124338 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:27:01 crc kubenswrapper[4558]: I0120 17:27:01.544851 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/rabbitmq-cell1-server-0"] Jan 20 17:27:01 crc kubenswrapper[4558]: W0120 17:27:01.550908 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod48cfc6e5_774d_4e7d_8103_f6a3260ea14c.slice/crio-57f6281f6524d46ddafe1f044790509fd67d8d7b1d11b6ca7151d5579ff22e21 WatchSource:0}: Error finding container 57f6281f6524d46ddafe1f044790509fd67d8d7b1d11b6ca7151d5579ff22e21: Status 404 returned error can't find the container with id 57f6281f6524d46ddafe1f044790509fd67d8d7b1d11b6ca7151d5579ff22e21 Jan 20 17:27:01 crc kubenswrapper[4558]: I0120 17:27:01.642814 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:27:01 crc kubenswrapper[4558]: I0120 17:27:01.684356 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:27:01 crc kubenswrapper[4558]: I0120 17:27:01.685297 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:27:01 crc kubenswrapper[4558]: I0120 17:27:01.708817 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" event={"ID":"48cfc6e5-774d-4e7d-8103-f6a3260ea14c","Type":"ContainerStarted","Data":"57f6281f6524d46ddafe1f044790509fd67d8d7b1d11b6ca7151d5579ff22e21"} Jan 20 17:27:01 crc kubenswrapper[4558]: I0120 17:27:01.770059 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:27:02 crc kubenswrapper[4558]: I0120 17:27:02.593412 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7a2c4722-11cc-426c-8101-496c9ee97ca2" path="/var/lib/kubelet/pods/7a2c4722-11cc-426c-8101-496c9ee97ca2/volumes" Jan 20 17:27:02 crc kubenswrapper[4558]: I0120 17:27:02.594689 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:27:02 crc kubenswrapper[4558]: I0120 17:27:02.693399 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:27:02 crc kubenswrapper[4558]: I0120 17:27:02.726792 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-server-0" event={"ID":"d692722b-f7fd-447c-8b7a-f56cff940d91","Type":"ContainerStarted","Data":"dfad3104c55017bd8727f5d89d9c30642f8493fbe621ac636c457844689b6a39"} Jan 20 17:27:02 crc kubenswrapper[4558]: I0120 17:27:02.756853 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:27:02 crc kubenswrapper[4558]: I0120 17:27:02.850438 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:27:03 crc kubenswrapper[4558]: I0120 17:27:03.755647 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" event={"ID":"48cfc6e5-774d-4e7d-8103-f6a3260ea14c","Type":"ContainerStarted","Data":"253c7b1bb698ada295155d4391adcd6900c7ef92966fcf1c968bb5c7832f4326"} Jan 20 17:27:04 crc kubenswrapper[4558]: I0120 17:27:04.764690 4558 generic.go:334] "Generic (PLEG): container finished" podID="4869db4f-9810-4685-a2a3-a1103e998535" containerID="12bf0dfcd5e93ae240822317488ad957dc71e08560168ddf9dbb00c2223c68b3" exitCode=0 Jan 20 17:27:04 crc kubenswrapper[4558]: I0120 17:27:04.764764 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-db-sync-rq6qr" event={"ID":"4869db4f-9810-4685-a2a3-a1103e998535","Type":"ContainerDied","Data":"12bf0dfcd5e93ae240822317488ad957dc71e08560168ddf9dbb00c2223c68b3"} Jan 20 17:27:04 crc kubenswrapper[4558]: I0120 17:27:04.766636 4558 generic.go:334] "Generic (PLEG): container finished" podID="f59d0040-b44b-46e3-bb18-c0ea74722cd3" containerID="dcea6d9bce8f8b365b1e3907b2409a3e2ed23260cd8dc669504ca51f06ab237e" exitCode=0 Jan 20 17:27:04 crc kubenswrapper[4558]: I0120 17:27:04.767081 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-db-sync-92t4c" event={"ID":"f59d0040-b44b-46e3-bb18-c0ea74722cd3","Type":"ContainerDied","Data":"dcea6d9bce8f8b365b1e3907b2409a3e2ed23260cd8dc669504ca51f06ab237e"} Jan 20 17:27:05 crc kubenswrapper[4558]: I0120 17:27:05.047630 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:27:05 crc kubenswrapper[4558]: I0120 17:27:05.048278 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="17054bbc-d1b1-47ae-b42a-bc09c15b2b9b" containerName="ceilometer-central-agent" containerID="cri-o://e2e1e1cb6e8a72f68287eff8a5396b012d963913a047795276d785bba4b6a44c" gracePeriod=30 Jan 20 17:27:05 crc kubenswrapper[4558]: I0120 17:27:05.048392 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="17054bbc-d1b1-47ae-b42a-bc09c15b2b9b" containerName="proxy-httpd" containerID="cri-o://d20cdc2ad9b52842bb98fbc066110f969938fc19f05488867c07cc1ed5ad8486" gracePeriod=30 Jan 20 17:27:05 crc kubenswrapper[4558]: I0120 17:27:05.048353 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="17054bbc-d1b1-47ae-b42a-bc09c15b2b9b" containerName="ceilometer-notification-agent" containerID="cri-o://5c49ce71384981a8c60295f3e437aa3231d0f9a9b4690345fd2a2732983bcc39" gracePeriod=30 Jan 20 17:27:05 crc kubenswrapper[4558]: I0120 17:27:05.048309 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="17054bbc-d1b1-47ae-b42a-bc09c15b2b9b" containerName="sg-core" containerID="cri-o://08372e554acc972018371c794d0195d55e9ca9f63ea522358caad97d33501495" gracePeriod=30 Jan 20 17:27:05 crc kubenswrapper[4558]: I0120 17:27:05.781632 4558 generic.go:334] "Generic (PLEG): container finished" podID="5f91fe62-3615-4153-a6c7-1652a5780bb5" containerID="a9c0c1143b3b33cc02b475f942743f25949d7a8e463523453117829d18bc66f6" exitCode=0 Jan 20 17:27:05 crc kubenswrapper[4558]: I0120 17:27:05.782061 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-db-sync-gn4wj" event={"ID":"5f91fe62-3615-4153-a6c7-1652a5780bb5","Type":"ContainerDied","Data":"a9c0c1143b3b33cc02b475f942743f25949d7a8e463523453117829d18bc66f6"} Jan 20 17:27:05 crc kubenswrapper[4558]: I0120 17:27:05.791862 4558 generic.go:334] "Generic (PLEG): container finished" podID="17054bbc-d1b1-47ae-b42a-bc09c15b2b9b" containerID="d20cdc2ad9b52842bb98fbc066110f969938fc19f05488867c07cc1ed5ad8486" exitCode=0 Jan 20 17:27:05 crc kubenswrapper[4558]: I0120 17:27:05.791899 4558 generic.go:334] "Generic (PLEG): container finished" podID="17054bbc-d1b1-47ae-b42a-bc09c15b2b9b" containerID="08372e554acc972018371c794d0195d55e9ca9f63ea522358caad97d33501495" exitCode=2 Jan 20 17:27:05 crc kubenswrapper[4558]: I0120 17:27:05.791908 4558 generic.go:334] "Generic (PLEG): container finished" podID="17054bbc-d1b1-47ae-b42a-bc09c15b2b9b" containerID="5c49ce71384981a8c60295f3e437aa3231d0f9a9b4690345fd2a2732983bcc39" exitCode=0 Jan 20 17:27:05 crc kubenswrapper[4558]: I0120 17:27:05.791916 4558 generic.go:334] "Generic (PLEG): container finished" podID="17054bbc-d1b1-47ae-b42a-bc09c15b2b9b" containerID="e2e1e1cb6e8a72f68287eff8a5396b012d963913a047795276d785bba4b6a44c" exitCode=0 Jan 20 17:27:05 crc kubenswrapper[4558]: I0120 17:27:05.791999 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"17054bbc-d1b1-47ae-b42a-bc09c15b2b9b","Type":"ContainerDied","Data":"d20cdc2ad9b52842bb98fbc066110f969938fc19f05488867c07cc1ed5ad8486"} Jan 20 17:27:05 crc kubenswrapper[4558]: I0120 17:27:05.792033 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"17054bbc-d1b1-47ae-b42a-bc09c15b2b9b","Type":"ContainerDied","Data":"08372e554acc972018371c794d0195d55e9ca9f63ea522358caad97d33501495"} Jan 20 17:27:05 crc kubenswrapper[4558]: I0120 17:27:05.792043 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"17054bbc-d1b1-47ae-b42a-bc09c15b2b9b","Type":"ContainerDied","Data":"5c49ce71384981a8c60295f3e437aa3231d0f9a9b4690345fd2a2732983bcc39"} Jan 20 17:27:05 crc kubenswrapper[4558]: I0120 17:27:05.792051 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"17054bbc-d1b1-47ae-b42a-bc09c15b2b9b","Type":"ContainerDied","Data":"e2e1e1cb6e8a72f68287eff8a5396b012d963913a047795276d785bba4b6a44c"} Jan 20 17:27:05 crc kubenswrapper[4558]: I0120 17:27:05.792060 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"17054bbc-d1b1-47ae-b42a-bc09c15b2b9b","Type":"ContainerDied","Data":"7ed30f016afffbf796c3d9a0ae744c831f74000f5a3277f831a1b3ab5170e16d"} Jan 20 17:27:05 crc kubenswrapper[4558]: I0120 17:27:05.792072 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7ed30f016afffbf796c3d9a0ae744c831f74000f5a3277f831a1b3ab5170e16d" Jan 20 17:27:05 crc kubenswrapper[4558]: I0120 17:27:05.806574 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:27:05 crc kubenswrapper[4558]: I0120 17:27:05.807896 4558 generic.go:334] "Generic (PLEG): container finished" podID="04c5c6c7-acc2-4821-a68b-c2ac094e931f" containerID="8657c2c036a43ada6b1b0d8548a2af3d55c5d81b571839269cbc4e19ec5c4f82" exitCode=0 Jan 20 17:27:05 crc kubenswrapper[4558]: I0120 17:27:05.808057 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-hbfss" event={"ID":"04c5c6c7-acc2-4821-a68b-c2ac094e931f","Type":"ContainerDied","Data":"8657c2c036a43ada6b1b0d8548a2af3d55c5d81b571839269cbc4e19ec5c4f82"} Jan 20 17:27:05 crc kubenswrapper[4558]: I0120 17:27:05.953136 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5jp6k\" (UniqueName: \"kubernetes.io/projected/17054bbc-d1b1-47ae-b42a-bc09c15b2b9b-kube-api-access-5jp6k\") pod \"17054bbc-d1b1-47ae-b42a-bc09c15b2b9b\" (UID: \"17054bbc-d1b1-47ae-b42a-bc09c15b2b9b\") " Jan 20 17:27:05 crc kubenswrapper[4558]: I0120 17:27:05.953224 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/17054bbc-d1b1-47ae-b42a-bc09c15b2b9b-ceilometer-tls-certs\") pod \"17054bbc-d1b1-47ae-b42a-bc09c15b2b9b\" (UID: \"17054bbc-d1b1-47ae-b42a-bc09c15b2b9b\") " Jan 20 17:27:05 crc kubenswrapper[4558]: I0120 17:27:05.954066 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/17054bbc-d1b1-47ae-b42a-bc09c15b2b9b-sg-core-conf-yaml\") pod \"17054bbc-d1b1-47ae-b42a-bc09c15b2b9b\" (UID: \"17054bbc-d1b1-47ae-b42a-bc09c15b2b9b\") " Jan 20 17:27:05 crc kubenswrapper[4558]: I0120 17:27:05.954118 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/17054bbc-d1b1-47ae-b42a-bc09c15b2b9b-scripts\") pod \"17054bbc-d1b1-47ae-b42a-bc09c15b2b9b\" (UID: \"17054bbc-d1b1-47ae-b42a-bc09c15b2b9b\") " Jan 20 17:27:05 crc kubenswrapper[4558]: I0120 17:27:05.954159 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/17054bbc-d1b1-47ae-b42a-bc09c15b2b9b-config-data\") pod \"17054bbc-d1b1-47ae-b42a-bc09c15b2b9b\" (UID: \"17054bbc-d1b1-47ae-b42a-bc09c15b2b9b\") " Jan 20 17:27:05 crc kubenswrapper[4558]: I0120 17:27:05.954213 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/17054bbc-d1b1-47ae-b42a-bc09c15b2b9b-run-httpd\") pod \"17054bbc-d1b1-47ae-b42a-bc09c15b2b9b\" (UID: \"17054bbc-d1b1-47ae-b42a-bc09c15b2b9b\") " Jan 20 17:27:05 crc kubenswrapper[4558]: I0120 17:27:05.954235 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/17054bbc-d1b1-47ae-b42a-bc09c15b2b9b-combined-ca-bundle\") pod \"17054bbc-d1b1-47ae-b42a-bc09c15b2b9b\" (UID: \"17054bbc-d1b1-47ae-b42a-bc09c15b2b9b\") " Jan 20 17:27:05 crc kubenswrapper[4558]: I0120 17:27:05.954358 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/17054bbc-d1b1-47ae-b42a-bc09c15b2b9b-log-httpd\") pod \"17054bbc-d1b1-47ae-b42a-bc09c15b2b9b\" (UID: \"17054bbc-d1b1-47ae-b42a-bc09c15b2b9b\") " Jan 20 17:27:05 crc kubenswrapper[4558]: I0120 17:27:05.955316 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/17054bbc-d1b1-47ae-b42a-bc09c15b2b9b-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "17054bbc-d1b1-47ae-b42a-bc09c15b2b9b" (UID: "17054bbc-d1b1-47ae-b42a-bc09c15b2b9b"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:27:05 crc kubenswrapper[4558]: I0120 17:27:05.955608 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/17054bbc-d1b1-47ae-b42a-bc09c15b2b9b-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "17054bbc-d1b1-47ae-b42a-bc09c15b2b9b" (UID: "17054bbc-d1b1-47ae-b42a-bc09c15b2b9b"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:27:05 crc kubenswrapper[4558]: I0120 17:27:05.958843 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/17054bbc-d1b1-47ae-b42a-bc09c15b2b9b-scripts" (OuterVolumeSpecName: "scripts") pod "17054bbc-d1b1-47ae-b42a-bc09c15b2b9b" (UID: "17054bbc-d1b1-47ae-b42a-bc09c15b2b9b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:27:05 crc kubenswrapper[4558]: I0120 17:27:05.959093 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/17054bbc-d1b1-47ae-b42a-bc09c15b2b9b-kube-api-access-5jp6k" (OuterVolumeSpecName: "kube-api-access-5jp6k") pod "17054bbc-d1b1-47ae-b42a-bc09c15b2b9b" (UID: "17054bbc-d1b1-47ae-b42a-bc09c15b2b9b"). InnerVolumeSpecName "kube-api-access-5jp6k". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:27:05 crc kubenswrapper[4558]: I0120 17:27:05.979591 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/17054bbc-d1b1-47ae-b42a-bc09c15b2b9b-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "17054bbc-d1b1-47ae-b42a-bc09c15b2b9b" (UID: "17054bbc-d1b1-47ae-b42a-bc09c15b2b9b"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:27:06 crc kubenswrapper[4558]: I0120 17:27:06.000569 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/17054bbc-d1b1-47ae-b42a-bc09c15b2b9b-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "17054bbc-d1b1-47ae-b42a-bc09c15b2b9b" (UID: "17054bbc-d1b1-47ae-b42a-bc09c15b2b9b"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:27:06 crc kubenswrapper[4558]: I0120 17:27:06.026427 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/17054bbc-d1b1-47ae-b42a-bc09c15b2b9b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "17054bbc-d1b1-47ae-b42a-bc09c15b2b9b" (UID: "17054bbc-d1b1-47ae-b42a-bc09c15b2b9b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:27:06 crc kubenswrapper[4558]: I0120 17:27:06.055145 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/17054bbc-d1b1-47ae-b42a-bc09c15b2b9b-config-data" (OuterVolumeSpecName: "config-data") pod "17054bbc-d1b1-47ae-b42a-bc09c15b2b9b" (UID: "17054bbc-d1b1-47ae-b42a-bc09c15b2b9b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:27:06 crc kubenswrapper[4558]: I0120 17:27:06.057620 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/17054bbc-d1b1-47ae-b42a-bc09c15b2b9b-config-data\") pod \"17054bbc-d1b1-47ae-b42a-bc09c15b2b9b\" (UID: \"17054bbc-d1b1-47ae-b42a-bc09c15b2b9b\") " Jan 20 17:27:06 crc kubenswrapper[4558]: W0120 17:27:06.058026 4558 empty_dir.go:500] Warning: Unmount skipped because path does not exist: /var/lib/kubelet/pods/17054bbc-d1b1-47ae-b42a-bc09c15b2b9b/volumes/kubernetes.io~secret/config-data Jan 20 17:27:06 crc kubenswrapper[4558]: I0120 17:27:06.058053 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/17054bbc-d1b1-47ae-b42a-bc09c15b2b9b-config-data" (OuterVolumeSpecName: "config-data") pod "17054bbc-d1b1-47ae-b42a-bc09c15b2b9b" (UID: "17054bbc-d1b1-47ae-b42a-bc09c15b2b9b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:27:06 crc kubenswrapper[4558]: I0120 17:27:06.059219 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/17054bbc-d1b1-47ae-b42a-bc09c15b2b9b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:06 crc kubenswrapper[4558]: I0120 17:27:06.059241 4558 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/17054bbc-d1b1-47ae-b42a-bc09c15b2b9b-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:06 crc kubenswrapper[4558]: I0120 17:27:06.059272 4558 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/17054bbc-d1b1-47ae-b42a-bc09c15b2b9b-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:06 crc kubenswrapper[4558]: I0120 17:27:06.059284 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5jp6k\" (UniqueName: \"kubernetes.io/projected/17054bbc-d1b1-47ae-b42a-bc09c15b2b9b-kube-api-access-5jp6k\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:06 crc kubenswrapper[4558]: I0120 17:27:06.059298 4558 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/17054bbc-d1b1-47ae-b42a-bc09c15b2b9b-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:06 crc kubenswrapper[4558]: I0120 17:27:06.059308 4558 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/17054bbc-d1b1-47ae-b42a-bc09c15b2b9b-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:06 crc kubenswrapper[4558]: I0120 17:27:06.059316 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/17054bbc-d1b1-47ae-b42a-bc09c15b2b9b-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:06 crc kubenswrapper[4558]: I0120 17:27:06.059341 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/17054bbc-d1b1-47ae-b42a-bc09c15b2b9b-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:06 crc kubenswrapper[4558]: I0120 17:27:06.149012 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-db-sync-92t4c" Jan 20 17:27:06 crc kubenswrapper[4558]: I0120 17:27:06.160875 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:27:06 crc kubenswrapper[4558]: I0120 17:27:06.252727 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-db-sync-rq6qr" Jan 20 17:27:06 crc kubenswrapper[4558]: I0120 17:27:06.271243 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f59d0040-b44b-46e3-bb18-c0ea74722cd3-etc-machine-id\") pod \"f59d0040-b44b-46e3-bb18-c0ea74722cd3\" (UID: \"f59d0040-b44b-46e3-bb18-c0ea74722cd3\") " Jan 20 17:27:06 crc kubenswrapper[4558]: I0120 17:27:06.271314 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sk949\" (UniqueName: \"kubernetes.io/projected/f59d0040-b44b-46e3-bb18-c0ea74722cd3-kube-api-access-sk949\") pod \"f59d0040-b44b-46e3-bb18-c0ea74722cd3\" (UID: \"f59d0040-b44b-46e3-bb18-c0ea74722cd3\") " Jan 20 17:27:06 crc kubenswrapper[4558]: I0120 17:27:06.271413 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f59d0040-b44b-46e3-bb18-c0ea74722cd3-scripts\") pod \"f59d0040-b44b-46e3-bb18-c0ea74722cd3\" (UID: \"f59d0040-b44b-46e3-bb18-c0ea74722cd3\") " Jan 20 17:27:06 crc kubenswrapper[4558]: I0120 17:27:06.271452 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/f59d0040-b44b-46e3-bb18-c0ea74722cd3-db-sync-config-data\") pod \"f59d0040-b44b-46e3-bb18-c0ea74722cd3\" (UID: \"f59d0040-b44b-46e3-bb18-c0ea74722cd3\") " Jan 20 17:27:06 crc kubenswrapper[4558]: I0120 17:27:06.271414 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f59d0040-b44b-46e3-bb18-c0ea74722cd3-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "f59d0040-b44b-46e3-bb18-c0ea74722cd3" (UID: "f59d0040-b44b-46e3-bb18-c0ea74722cd3"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 17:27:06 crc kubenswrapper[4558]: I0120 17:27:06.271483 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f59d0040-b44b-46e3-bb18-c0ea74722cd3-config-data\") pod \"f59d0040-b44b-46e3-bb18-c0ea74722cd3\" (UID: \"f59d0040-b44b-46e3-bb18-c0ea74722cd3\") " Jan 20 17:27:06 crc kubenswrapper[4558]: I0120 17:27:06.271571 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f59d0040-b44b-46e3-bb18-c0ea74722cd3-combined-ca-bundle\") pod \"f59d0040-b44b-46e3-bb18-c0ea74722cd3\" (UID: \"f59d0040-b44b-46e3-bb18-c0ea74722cd3\") " Jan 20 17:27:06 crc kubenswrapper[4558]: I0120 17:27:06.273043 4558 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f59d0040-b44b-46e3-bb18-c0ea74722cd3-etc-machine-id\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:06 crc kubenswrapper[4558]: I0120 17:27:06.277256 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f59d0040-b44b-46e3-bb18-c0ea74722cd3-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "f59d0040-b44b-46e3-bb18-c0ea74722cd3" (UID: "f59d0040-b44b-46e3-bb18-c0ea74722cd3"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:27:06 crc kubenswrapper[4558]: I0120 17:27:06.278105 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f59d0040-b44b-46e3-bb18-c0ea74722cd3-kube-api-access-sk949" (OuterVolumeSpecName: "kube-api-access-sk949") pod "f59d0040-b44b-46e3-bb18-c0ea74722cd3" (UID: "f59d0040-b44b-46e3-bb18-c0ea74722cd3"). InnerVolumeSpecName "kube-api-access-sk949". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:27:06 crc kubenswrapper[4558]: I0120 17:27:06.283956 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f59d0040-b44b-46e3-bb18-c0ea74722cd3-scripts" (OuterVolumeSpecName: "scripts") pod "f59d0040-b44b-46e3-bb18-c0ea74722cd3" (UID: "f59d0040-b44b-46e3-bb18-c0ea74722cd3"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:27:06 crc kubenswrapper[4558]: I0120 17:27:06.304026 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f59d0040-b44b-46e3-bb18-c0ea74722cd3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f59d0040-b44b-46e3-bb18-c0ea74722cd3" (UID: "f59d0040-b44b-46e3-bb18-c0ea74722cd3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:27:06 crc kubenswrapper[4558]: I0120 17:27:06.316387 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f59d0040-b44b-46e3-bb18-c0ea74722cd3-config-data" (OuterVolumeSpecName: "config-data") pod "f59d0040-b44b-46e3-bb18-c0ea74722cd3" (UID: "f59d0040-b44b-46e3-bb18-c0ea74722cd3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:27:06 crc kubenswrapper[4558]: I0120 17:27:06.374152 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/4869db4f-9810-4685-a2a3-a1103e998535-config\") pod \"4869db4f-9810-4685-a2a3-a1103e998535\" (UID: \"4869db4f-9810-4685-a2a3-a1103e998535\") " Jan 20 17:27:06 crc kubenswrapper[4558]: I0120 17:27:06.374396 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s58ts\" (UniqueName: \"kubernetes.io/projected/4869db4f-9810-4685-a2a3-a1103e998535-kube-api-access-s58ts\") pod \"4869db4f-9810-4685-a2a3-a1103e998535\" (UID: \"4869db4f-9810-4685-a2a3-a1103e998535\") " Jan 20 17:27:06 crc kubenswrapper[4558]: I0120 17:27:06.374514 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4869db4f-9810-4685-a2a3-a1103e998535-combined-ca-bundle\") pod \"4869db4f-9810-4685-a2a3-a1103e998535\" (UID: \"4869db4f-9810-4685-a2a3-a1103e998535\") " Jan 20 17:27:06 crc kubenswrapper[4558]: I0120 17:27:06.375257 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f59d0040-b44b-46e3-bb18-c0ea74722cd3-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:06 crc kubenswrapper[4558]: I0120 17:27:06.375281 4558 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/f59d0040-b44b-46e3-bb18-c0ea74722cd3-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:06 crc kubenswrapper[4558]: I0120 17:27:06.375295 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f59d0040-b44b-46e3-bb18-c0ea74722cd3-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:06 crc kubenswrapper[4558]: I0120 17:27:06.375305 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f59d0040-b44b-46e3-bb18-c0ea74722cd3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:06 crc kubenswrapper[4558]: I0120 17:27:06.375314 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sk949\" (UniqueName: \"kubernetes.io/projected/f59d0040-b44b-46e3-bb18-c0ea74722cd3-kube-api-access-sk949\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:06 crc kubenswrapper[4558]: I0120 17:27:06.377979 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4869db4f-9810-4685-a2a3-a1103e998535-kube-api-access-s58ts" (OuterVolumeSpecName: "kube-api-access-s58ts") pod "4869db4f-9810-4685-a2a3-a1103e998535" (UID: "4869db4f-9810-4685-a2a3-a1103e998535"). InnerVolumeSpecName "kube-api-access-s58ts". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:27:06 crc kubenswrapper[4558]: I0120 17:27:06.397325 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4869db4f-9810-4685-a2a3-a1103e998535-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4869db4f-9810-4685-a2a3-a1103e998535" (UID: "4869db4f-9810-4685-a2a3-a1103e998535"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:27:06 crc kubenswrapper[4558]: I0120 17:27:06.408970 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4869db4f-9810-4685-a2a3-a1103e998535-config" (OuterVolumeSpecName: "config") pod "4869db4f-9810-4685-a2a3-a1103e998535" (UID: "4869db4f-9810-4685-a2a3-a1103e998535"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:27:06 crc kubenswrapper[4558]: I0120 17:27:06.477158 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s58ts\" (UniqueName: \"kubernetes.io/projected/4869db4f-9810-4685-a2a3-a1103e998535-kube-api-access-s58ts\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:06 crc kubenswrapper[4558]: I0120 17:27:06.477205 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4869db4f-9810-4685-a2a3-a1103e998535-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:06 crc kubenswrapper[4558]: I0120 17:27:06.477215 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/4869db4f-9810-4685-a2a3-a1103e998535-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:06 crc kubenswrapper[4558]: I0120 17:27:06.819379 4558 generic.go:334] "Generic (PLEG): container finished" podID="f461d799-0027-4949-96bc-85e80ae9ec47" containerID="46211dbcac65c4f79d8afc41ddb2ceb06935611e997fd6f57f19214db5ea0343" exitCode=0 Jan 20 17:27:06 crc kubenswrapper[4558]: I0120 17:27:06.819468 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-h5hmh" event={"ID":"f461d799-0027-4949-96bc-85e80ae9ec47","Type":"ContainerDied","Data":"46211dbcac65c4f79d8afc41ddb2ceb06935611e997fd6f57f19214db5ea0343"} Jan 20 17:27:06 crc kubenswrapper[4558]: I0120 17:27:06.821387 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-db-sync-92t4c" event={"ID":"f59d0040-b44b-46e3-bb18-c0ea74722cd3","Type":"ContainerDied","Data":"d13f731237dd852df88ba7a06c430e35ee4a694754db0eb3ff387233ca07edb3"} Jan 20 17:27:06 crc kubenswrapper[4558]: I0120 17:27:06.821441 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d13f731237dd852df88ba7a06c430e35ee4a694754db0eb3ff387233ca07edb3" Jan 20 17:27:06 crc kubenswrapper[4558]: I0120 17:27:06.821404 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-db-sync-92t4c" Jan 20 17:27:06 crc kubenswrapper[4558]: I0120 17:27:06.823444 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:27:06 crc kubenswrapper[4558]: I0120 17:27:06.823490 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-db-sync-rq6qr" event={"ID":"4869db4f-9810-4685-a2a3-a1103e998535","Type":"ContainerDied","Data":"c95e60f455d86ccaf831ae20c373bfba7649b918008b3952256f6e249b18b5e4"} Jan 20 17:27:06 crc kubenswrapper[4558]: I0120 17:27:06.823565 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-db-sync-rq6qr" Jan 20 17:27:06 crc kubenswrapper[4558]: I0120 17:27:06.824413 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c95e60f455d86ccaf831ae20c373bfba7649b918008b3952256f6e249b18b5e4" Jan 20 17:27:06 crc kubenswrapper[4558]: I0120 17:27:06.869199 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:27:06 crc kubenswrapper[4558]: I0120 17:27:06.878223 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:27:06 crc kubenswrapper[4558]: I0120 17:27:06.890344 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:27:06 crc kubenswrapper[4558]: E0120 17:27:06.890845 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="17054bbc-d1b1-47ae-b42a-bc09c15b2b9b" containerName="proxy-httpd" Jan 20 17:27:06 crc kubenswrapper[4558]: I0120 17:27:06.890866 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="17054bbc-d1b1-47ae-b42a-bc09c15b2b9b" containerName="proxy-httpd" Jan 20 17:27:06 crc kubenswrapper[4558]: E0120 17:27:06.890884 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="17054bbc-d1b1-47ae-b42a-bc09c15b2b9b" containerName="ceilometer-notification-agent" Jan 20 17:27:06 crc kubenswrapper[4558]: I0120 17:27:06.890891 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="17054bbc-d1b1-47ae-b42a-bc09c15b2b9b" containerName="ceilometer-notification-agent" Jan 20 17:27:06 crc kubenswrapper[4558]: E0120 17:27:06.890904 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4869db4f-9810-4685-a2a3-a1103e998535" containerName="neutron-db-sync" Jan 20 17:27:06 crc kubenswrapper[4558]: I0120 17:27:06.890909 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="4869db4f-9810-4685-a2a3-a1103e998535" containerName="neutron-db-sync" Jan 20 17:27:06 crc kubenswrapper[4558]: E0120 17:27:06.890932 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="17054bbc-d1b1-47ae-b42a-bc09c15b2b9b" containerName="ceilometer-central-agent" Jan 20 17:27:06 crc kubenswrapper[4558]: I0120 17:27:06.890939 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="17054bbc-d1b1-47ae-b42a-bc09c15b2b9b" containerName="ceilometer-central-agent" Jan 20 17:27:06 crc kubenswrapper[4558]: E0120 17:27:06.890953 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="17054bbc-d1b1-47ae-b42a-bc09c15b2b9b" containerName="sg-core" Jan 20 17:27:06 crc kubenswrapper[4558]: I0120 17:27:06.890958 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="17054bbc-d1b1-47ae-b42a-bc09c15b2b9b" containerName="sg-core" Jan 20 17:27:06 crc kubenswrapper[4558]: E0120 17:27:06.890974 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f59d0040-b44b-46e3-bb18-c0ea74722cd3" containerName="cinder-db-sync" Jan 20 17:27:06 crc kubenswrapper[4558]: I0120 17:27:06.890980 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="f59d0040-b44b-46e3-bb18-c0ea74722cd3" containerName="cinder-db-sync" Jan 20 17:27:06 crc kubenswrapper[4558]: I0120 17:27:06.891227 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="17054bbc-d1b1-47ae-b42a-bc09c15b2b9b" containerName="sg-core" Jan 20 17:27:06 crc kubenswrapper[4558]: I0120 17:27:06.891252 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="4869db4f-9810-4685-a2a3-a1103e998535" containerName="neutron-db-sync" Jan 20 17:27:06 crc kubenswrapper[4558]: I0120 17:27:06.891263 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="17054bbc-d1b1-47ae-b42a-bc09c15b2b9b" containerName="ceilometer-central-agent" Jan 20 17:27:06 crc kubenswrapper[4558]: I0120 17:27:06.891276 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="f59d0040-b44b-46e3-bb18-c0ea74722cd3" containerName="cinder-db-sync" Jan 20 17:27:06 crc kubenswrapper[4558]: I0120 17:27:06.891289 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="17054bbc-d1b1-47ae-b42a-bc09c15b2b9b" containerName="ceilometer-notification-agent" Jan 20 17:27:06 crc kubenswrapper[4558]: I0120 17:27:06.891299 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="17054bbc-d1b1-47ae-b42a-bc09c15b2b9b" containerName="proxy-httpd" Jan 20 17:27:06 crc kubenswrapper[4558]: I0120 17:27:06.893040 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:27:06 crc kubenswrapper[4558]: I0120 17:27:06.895863 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-config-data" Jan 20 17:27:06 crc kubenswrapper[4558]: I0120 17:27:06.896118 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-ceilometer-internal-svc" Jan 20 17:27:06 crc kubenswrapper[4558]: I0120 17:27:06.896899 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-scripts" Jan 20 17:27:06 crc kubenswrapper[4558]: I0120 17:27:06.903643 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:27:06 crc kubenswrapper[4558]: I0120 17:27:06.997082 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/neutron-665b4c9c8d-9tcx6"] Jan 20 17:27:06 crc kubenswrapper[4558]: I0120 17:27:06.999339 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-665b4c9c8d-9tcx6" Jan 20 17:27:07 crc kubenswrapper[4558]: I0120 17:27:07.026102 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:27:07 crc kubenswrapper[4558]: I0120 17:27:07.026407 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/cinder-api-0" podUID="174beb08-6b5c-40ef-9809-a4e17c718392" containerName="cinder-api-log" containerID="cri-o://8a7fa99f3021ee9c8829c1eabdc8f6e24a1e1ce1bc1bcf976f8b0fa8e46ef699" gracePeriod=30 Jan 20 17:27:07 crc kubenswrapper[4558]: I0120 17:27:07.026867 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/cinder-api-0" podUID="174beb08-6b5c-40ef-9809-a4e17c718392" containerName="cinder-api" containerID="cri-o://025c28c93d5af302d74da36c6dd18d125f4d085beedce3d041db5da919c5c252" gracePeriod=30 Jan 20 17:27:07 crc kubenswrapper[4558]: I0120 17:27:07.036373 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-665b4c9c8d-9tcx6"] Jan 20 17:27:07 crc kubenswrapper[4558]: I0120 17:27:07.049211 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:27:07 crc kubenswrapper[4558]: I0120 17:27:07.091424 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/134fc5ef-70be-4c8c-aa72-e4f0440d0afe-log-httpd\") pod \"ceilometer-0\" (UID: \"134fc5ef-70be-4c8c-aa72-e4f0440d0afe\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:27:07 crc kubenswrapper[4558]: I0120 17:27:07.091477 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kxbmh\" (UniqueName: \"kubernetes.io/projected/134fc5ef-70be-4c8c-aa72-e4f0440d0afe-kube-api-access-kxbmh\") pod \"ceilometer-0\" (UID: \"134fc5ef-70be-4c8c-aa72-e4f0440d0afe\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:27:07 crc kubenswrapper[4558]: I0120 17:27:07.091535 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/134fc5ef-70be-4c8c-aa72-e4f0440d0afe-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"134fc5ef-70be-4c8c-aa72-e4f0440d0afe\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:27:07 crc kubenswrapper[4558]: I0120 17:27:07.091597 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/134fc5ef-70be-4c8c-aa72-e4f0440d0afe-run-httpd\") pod \"ceilometer-0\" (UID: \"134fc5ef-70be-4c8c-aa72-e4f0440d0afe\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:27:07 crc kubenswrapper[4558]: I0120 17:27:07.091624 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/134fc5ef-70be-4c8c-aa72-e4f0440d0afe-scripts\") pod \"ceilometer-0\" (UID: \"134fc5ef-70be-4c8c-aa72-e4f0440d0afe\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:27:07 crc kubenswrapper[4558]: I0120 17:27:07.091647 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/134fc5ef-70be-4c8c-aa72-e4f0440d0afe-config-data\") pod \"ceilometer-0\" (UID: \"134fc5ef-70be-4c8c-aa72-e4f0440d0afe\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:27:07 crc kubenswrapper[4558]: I0120 17:27:07.091673 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/134fc5ef-70be-4c8c-aa72-e4f0440d0afe-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"134fc5ef-70be-4c8c-aa72-e4f0440d0afe\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:27:07 crc kubenswrapper[4558]: I0120 17:27:07.091708 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/134fc5ef-70be-4c8c-aa72-e4f0440d0afe-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"134fc5ef-70be-4c8c-aa72-e4f0440d0afe\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:27:07 crc kubenswrapper[4558]: I0120 17:27:07.201380 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/6dc46db7-9948-4c7d-b0a3-c767ffd58b4a-httpd-config\") pod \"neutron-665b4c9c8d-9tcx6\" (UID: \"6dc46db7-9948-4c7d-b0a3-c767ffd58b4a\") " pod="openstack-kuttl-tests/neutron-665b4c9c8d-9tcx6" Jan 20 17:27:07 crc kubenswrapper[4558]: I0120 17:27:07.201421 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/6dc46db7-9948-4c7d-b0a3-c767ffd58b4a-config\") pod \"neutron-665b4c9c8d-9tcx6\" (UID: \"6dc46db7-9948-4c7d-b0a3-c767ffd58b4a\") " pod="openstack-kuttl-tests/neutron-665b4c9c8d-9tcx6" Jan 20 17:27:07 crc kubenswrapper[4558]: I0120 17:27:07.201439 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6dc46db7-9948-4c7d-b0a3-c767ffd58b4a-public-tls-certs\") pod \"neutron-665b4c9c8d-9tcx6\" (UID: \"6dc46db7-9948-4c7d-b0a3-c767ffd58b4a\") " pod="openstack-kuttl-tests/neutron-665b4c9c8d-9tcx6" Jan 20 17:27:07 crc kubenswrapper[4558]: I0120 17:27:07.201505 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/134fc5ef-70be-4c8c-aa72-e4f0440d0afe-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"134fc5ef-70be-4c8c-aa72-e4f0440d0afe\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:27:07 crc kubenswrapper[4558]: I0120 17:27:07.201531 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6dc46db7-9948-4c7d-b0a3-c767ffd58b4a-combined-ca-bundle\") pod \"neutron-665b4c9c8d-9tcx6\" (UID: \"6dc46db7-9948-4c7d-b0a3-c767ffd58b4a\") " pod="openstack-kuttl-tests/neutron-665b4c9c8d-9tcx6" Jan 20 17:27:07 crc kubenswrapper[4558]: I0120 17:27:07.201655 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/134fc5ef-70be-4c8c-aa72-e4f0440d0afe-run-httpd\") pod \"ceilometer-0\" (UID: \"134fc5ef-70be-4c8c-aa72-e4f0440d0afe\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:27:07 crc kubenswrapper[4558]: I0120 17:27:07.201674 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6dc46db7-9948-4c7d-b0a3-c767ffd58b4a-internal-tls-certs\") pod \"neutron-665b4c9c8d-9tcx6\" (UID: \"6dc46db7-9948-4c7d-b0a3-c767ffd58b4a\") " pod="openstack-kuttl-tests/neutron-665b4c9c8d-9tcx6" Jan 20 17:27:07 crc kubenswrapper[4558]: I0120 17:27:07.201708 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/6dc46db7-9948-4c7d-b0a3-c767ffd58b4a-ovndb-tls-certs\") pod \"neutron-665b4c9c8d-9tcx6\" (UID: \"6dc46db7-9948-4c7d-b0a3-c767ffd58b4a\") " pod="openstack-kuttl-tests/neutron-665b4c9c8d-9tcx6" Jan 20 17:27:07 crc kubenswrapper[4558]: I0120 17:27:07.201735 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/134fc5ef-70be-4c8c-aa72-e4f0440d0afe-scripts\") pod \"ceilometer-0\" (UID: \"134fc5ef-70be-4c8c-aa72-e4f0440d0afe\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:27:07 crc kubenswrapper[4558]: I0120 17:27:07.201760 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4dh28\" (UniqueName: \"kubernetes.io/projected/6dc46db7-9948-4c7d-b0a3-c767ffd58b4a-kube-api-access-4dh28\") pod \"neutron-665b4c9c8d-9tcx6\" (UID: \"6dc46db7-9948-4c7d-b0a3-c767ffd58b4a\") " pod="openstack-kuttl-tests/neutron-665b4c9c8d-9tcx6" Jan 20 17:27:07 crc kubenswrapper[4558]: I0120 17:27:07.201795 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/134fc5ef-70be-4c8c-aa72-e4f0440d0afe-config-data\") pod \"ceilometer-0\" (UID: \"134fc5ef-70be-4c8c-aa72-e4f0440d0afe\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:27:07 crc kubenswrapper[4558]: I0120 17:27:07.201836 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/134fc5ef-70be-4c8c-aa72-e4f0440d0afe-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"134fc5ef-70be-4c8c-aa72-e4f0440d0afe\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:27:07 crc kubenswrapper[4558]: I0120 17:27:07.201905 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/134fc5ef-70be-4c8c-aa72-e4f0440d0afe-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"134fc5ef-70be-4c8c-aa72-e4f0440d0afe\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:27:07 crc kubenswrapper[4558]: I0120 17:27:07.201983 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/134fc5ef-70be-4c8c-aa72-e4f0440d0afe-log-httpd\") pod \"ceilometer-0\" (UID: \"134fc5ef-70be-4c8c-aa72-e4f0440d0afe\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:27:07 crc kubenswrapper[4558]: I0120 17:27:07.202002 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kxbmh\" (UniqueName: \"kubernetes.io/projected/134fc5ef-70be-4c8c-aa72-e4f0440d0afe-kube-api-access-kxbmh\") pod \"ceilometer-0\" (UID: \"134fc5ef-70be-4c8c-aa72-e4f0440d0afe\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:27:07 crc kubenswrapper[4558]: I0120 17:27:07.202732 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/134fc5ef-70be-4c8c-aa72-e4f0440d0afe-run-httpd\") pod \"ceilometer-0\" (UID: \"134fc5ef-70be-4c8c-aa72-e4f0440d0afe\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:27:07 crc kubenswrapper[4558]: I0120 17:27:07.208875 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/134fc5ef-70be-4c8c-aa72-e4f0440d0afe-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"134fc5ef-70be-4c8c-aa72-e4f0440d0afe\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:27:07 crc kubenswrapper[4558]: I0120 17:27:07.209389 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/134fc5ef-70be-4c8c-aa72-e4f0440d0afe-log-httpd\") pod \"ceilometer-0\" (UID: \"134fc5ef-70be-4c8c-aa72-e4f0440d0afe\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:27:07 crc kubenswrapper[4558]: I0120 17:27:07.209986 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/134fc5ef-70be-4c8c-aa72-e4f0440d0afe-scripts\") pod \"ceilometer-0\" (UID: \"134fc5ef-70be-4c8c-aa72-e4f0440d0afe\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:27:07 crc kubenswrapper[4558]: I0120 17:27:07.210928 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/134fc5ef-70be-4c8c-aa72-e4f0440d0afe-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"134fc5ef-70be-4c8c-aa72-e4f0440d0afe\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:27:07 crc kubenswrapper[4558]: I0120 17:27:07.211567 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/134fc5ef-70be-4c8c-aa72-e4f0440d0afe-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"134fc5ef-70be-4c8c-aa72-e4f0440d0afe\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:27:07 crc kubenswrapper[4558]: I0120 17:27:07.214321 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/134fc5ef-70be-4c8c-aa72-e4f0440d0afe-config-data\") pod \"ceilometer-0\" (UID: \"134fc5ef-70be-4c8c-aa72-e4f0440d0afe\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:27:07 crc kubenswrapper[4558]: I0120 17:27:07.219457 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kxbmh\" (UniqueName: \"kubernetes.io/projected/134fc5ef-70be-4c8c-aa72-e4f0440d0afe-kube-api-access-kxbmh\") pod \"ceilometer-0\" (UID: \"134fc5ef-70be-4c8c-aa72-e4f0440d0afe\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:27:07 crc kubenswrapper[4558]: I0120 17:27:07.226272 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:27:07 crc kubenswrapper[4558]: I0120 17:27:07.303538 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/6dc46db7-9948-4c7d-b0a3-c767ffd58b4a-ovndb-tls-certs\") pod \"neutron-665b4c9c8d-9tcx6\" (UID: \"6dc46db7-9948-4c7d-b0a3-c767ffd58b4a\") " pod="openstack-kuttl-tests/neutron-665b4c9c8d-9tcx6" Jan 20 17:27:07 crc kubenswrapper[4558]: I0120 17:27:07.303582 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4dh28\" (UniqueName: \"kubernetes.io/projected/6dc46db7-9948-4c7d-b0a3-c767ffd58b4a-kube-api-access-4dh28\") pod \"neutron-665b4c9c8d-9tcx6\" (UID: \"6dc46db7-9948-4c7d-b0a3-c767ffd58b4a\") " pod="openstack-kuttl-tests/neutron-665b4c9c8d-9tcx6" Jan 20 17:27:07 crc kubenswrapper[4558]: I0120 17:27:07.303688 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/6dc46db7-9948-4c7d-b0a3-c767ffd58b4a-httpd-config\") pod \"neutron-665b4c9c8d-9tcx6\" (UID: \"6dc46db7-9948-4c7d-b0a3-c767ffd58b4a\") " pod="openstack-kuttl-tests/neutron-665b4c9c8d-9tcx6" Jan 20 17:27:07 crc kubenswrapper[4558]: I0120 17:27:07.303715 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/6dc46db7-9948-4c7d-b0a3-c767ffd58b4a-config\") pod \"neutron-665b4c9c8d-9tcx6\" (UID: \"6dc46db7-9948-4c7d-b0a3-c767ffd58b4a\") " pod="openstack-kuttl-tests/neutron-665b4c9c8d-9tcx6" Jan 20 17:27:07 crc kubenswrapper[4558]: I0120 17:27:07.303731 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6dc46db7-9948-4c7d-b0a3-c767ffd58b4a-public-tls-certs\") pod \"neutron-665b4c9c8d-9tcx6\" (UID: \"6dc46db7-9948-4c7d-b0a3-c767ffd58b4a\") " pod="openstack-kuttl-tests/neutron-665b4c9c8d-9tcx6" Jan 20 17:27:07 crc kubenswrapper[4558]: I0120 17:27:07.303763 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6dc46db7-9948-4c7d-b0a3-c767ffd58b4a-combined-ca-bundle\") pod \"neutron-665b4c9c8d-9tcx6\" (UID: \"6dc46db7-9948-4c7d-b0a3-c767ffd58b4a\") " pod="openstack-kuttl-tests/neutron-665b4c9c8d-9tcx6" Jan 20 17:27:07 crc kubenswrapper[4558]: I0120 17:27:07.303831 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6dc46db7-9948-4c7d-b0a3-c767ffd58b4a-internal-tls-certs\") pod \"neutron-665b4c9c8d-9tcx6\" (UID: \"6dc46db7-9948-4c7d-b0a3-c767ffd58b4a\") " pod="openstack-kuttl-tests/neutron-665b4c9c8d-9tcx6" Jan 20 17:27:07 crc kubenswrapper[4558]: I0120 17:27:07.308251 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6dc46db7-9948-4c7d-b0a3-c767ffd58b4a-internal-tls-certs\") pod \"neutron-665b4c9c8d-9tcx6\" (UID: \"6dc46db7-9948-4c7d-b0a3-c767ffd58b4a\") " pod="openstack-kuttl-tests/neutron-665b4c9c8d-9tcx6" Jan 20 17:27:07 crc kubenswrapper[4558]: I0120 17:27:07.308897 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6dc46db7-9948-4c7d-b0a3-c767ffd58b4a-public-tls-certs\") pod \"neutron-665b4c9c8d-9tcx6\" (UID: \"6dc46db7-9948-4c7d-b0a3-c767ffd58b4a\") " pod="openstack-kuttl-tests/neutron-665b4c9c8d-9tcx6" Jan 20 17:27:07 crc kubenswrapper[4558]: I0120 17:27:07.309620 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/6dc46db7-9948-4c7d-b0a3-c767ffd58b4a-httpd-config\") pod \"neutron-665b4c9c8d-9tcx6\" (UID: \"6dc46db7-9948-4c7d-b0a3-c767ffd58b4a\") " pod="openstack-kuttl-tests/neutron-665b4c9c8d-9tcx6" Jan 20 17:27:07 crc kubenswrapper[4558]: I0120 17:27:07.312893 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/6dc46db7-9948-4c7d-b0a3-c767ffd58b4a-ovndb-tls-certs\") pod \"neutron-665b4c9c8d-9tcx6\" (UID: \"6dc46db7-9948-4c7d-b0a3-c767ffd58b4a\") " pod="openstack-kuttl-tests/neutron-665b4c9c8d-9tcx6" Jan 20 17:27:07 crc kubenswrapper[4558]: I0120 17:27:07.319005 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6dc46db7-9948-4c7d-b0a3-c767ffd58b4a-combined-ca-bundle\") pod \"neutron-665b4c9c8d-9tcx6\" (UID: \"6dc46db7-9948-4c7d-b0a3-c767ffd58b4a\") " pod="openstack-kuttl-tests/neutron-665b4c9c8d-9tcx6" Jan 20 17:27:07 crc kubenswrapper[4558]: I0120 17:27:07.322702 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4dh28\" (UniqueName: \"kubernetes.io/projected/6dc46db7-9948-4c7d-b0a3-c767ffd58b4a-kube-api-access-4dh28\") pod \"neutron-665b4c9c8d-9tcx6\" (UID: \"6dc46db7-9948-4c7d-b0a3-c767ffd58b4a\") " pod="openstack-kuttl-tests/neutron-665b4c9c8d-9tcx6" Jan 20 17:27:07 crc kubenswrapper[4558]: I0120 17:27:07.323314 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/6dc46db7-9948-4c7d-b0a3-c767ffd58b4a-config\") pod \"neutron-665b4c9c8d-9tcx6\" (UID: \"6dc46db7-9948-4c7d-b0a3-c767ffd58b4a\") " pod="openstack-kuttl-tests/neutron-665b4c9c8d-9tcx6" Jan 20 17:27:07 crc kubenswrapper[4558]: I0120 17:27:07.339639 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-665b4c9c8d-9tcx6" Jan 20 17:27:07 crc kubenswrapper[4558]: I0120 17:27:07.375533 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-hbfss" Jan 20 17:27:07 crc kubenswrapper[4558]: I0120 17:27:07.378216 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-db-sync-gn4wj" Jan 20 17:27:07 crc kubenswrapper[4558]: I0120 17:27:07.507174 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hqhkn\" (UniqueName: \"kubernetes.io/projected/5f91fe62-3615-4153-a6c7-1652a5780bb5-kube-api-access-hqhkn\") pod \"5f91fe62-3615-4153-a6c7-1652a5780bb5\" (UID: \"5f91fe62-3615-4153-a6c7-1652a5780bb5\") " Jan 20 17:27:07 crc kubenswrapper[4558]: I0120 17:27:07.507498 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/04c5c6c7-acc2-4821-a68b-c2ac094e931f-combined-ca-bundle\") pod \"04c5c6c7-acc2-4821-a68b-c2ac094e931f\" (UID: \"04c5c6c7-acc2-4821-a68b-c2ac094e931f\") " Jan 20 17:27:07 crc kubenswrapper[4558]: I0120 17:27:07.507540 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4ph6z\" (UniqueName: \"kubernetes.io/projected/04c5c6c7-acc2-4821-a68b-c2ac094e931f-kube-api-access-4ph6z\") pod \"04c5c6c7-acc2-4821-a68b-c2ac094e931f\" (UID: \"04c5c6c7-acc2-4821-a68b-c2ac094e931f\") " Jan 20 17:27:07 crc kubenswrapper[4558]: I0120 17:27:07.507620 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/04c5c6c7-acc2-4821-a68b-c2ac094e931f-config-data\") pod \"04c5c6c7-acc2-4821-a68b-c2ac094e931f\" (UID: \"04c5c6c7-acc2-4821-a68b-c2ac094e931f\") " Jan 20 17:27:07 crc kubenswrapper[4558]: I0120 17:27:07.507674 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/5f91fe62-3615-4153-a6c7-1652a5780bb5-db-sync-config-data\") pod \"5f91fe62-3615-4153-a6c7-1652a5780bb5\" (UID: \"5f91fe62-3615-4153-a6c7-1652a5780bb5\") " Jan 20 17:27:07 crc kubenswrapper[4558]: I0120 17:27:07.507738 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f91fe62-3615-4153-a6c7-1652a5780bb5-combined-ca-bundle\") pod \"5f91fe62-3615-4153-a6c7-1652a5780bb5\" (UID: \"5f91fe62-3615-4153-a6c7-1652a5780bb5\") " Jan 20 17:27:07 crc kubenswrapper[4558]: I0120 17:27:07.507811 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/04c5c6c7-acc2-4821-a68b-c2ac094e931f-scripts\") pod \"04c5c6c7-acc2-4821-a68b-c2ac094e931f\" (UID: \"04c5c6c7-acc2-4821-a68b-c2ac094e931f\") " Jan 20 17:27:07 crc kubenswrapper[4558]: I0120 17:27:07.507827 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5f91fe62-3615-4153-a6c7-1652a5780bb5-config-data\") pod \"5f91fe62-3615-4153-a6c7-1652a5780bb5\" (UID: \"5f91fe62-3615-4153-a6c7-1652a5780bb5\") " Jan 20 17:27:07 crc kubenswrapper[4558]: I0120 17:27:07.511702 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5f91fe62-3615-4153-a6c7-1652a5780bb5-kube-api-access-hqhkn" (OuterVolumeSpecName: "kube-api-access-hqhkn") pod "5f91fe62-3615-4153-a6c7-1652a5780bb5" (UID: "5f91fe62-3615-4153-a6c7-1652a5780bb5"). InnerVolumeSpecName "kube-api-access-hqhkn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:27:07 crc kubenswrapper[4558]: I0120 17:27:07.515244 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5f91fe62-3615-4153-a6c7-1652a5780bb5-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "5f91fe62-3615-4153-a6c7-1652a5780bb5" (UID: "5f91fe62-3615-4153-a6c7-1652a5780bb5"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:27:07 crc kubenswrapper[4558]: I0120 17:27:07.515270 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/04c5c6c7-acc2-4821-a68b-c2ac094e931f-kube-api-access-4ph6z" (OuterVolumeSpecName: "kube-api-access-4ph6z") pod "04c5c6c7-acc2-4821-a68b-c2ac094e931f" (UID: "04c5c6c7-acc2-4821-a68b-c2ac094e931f"). InnerVolumeSpecName "kube-api-access-4ph6z". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:27:07 crc kubenswrapper[4558]: I0120 17:27:07.515387 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/04c5c6c7-acc2-4821-a68b-c2ac094e931f-scripts" (OuterVolumeSpecName: "scripts") pod "04c5c6c7-acc2-4821-a68b-c2ac094e931f" (UID: "04c5c6c7-acc2-4821-a68b-c2ac094e931f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:27:07 crc kubenswrapper[4558]: I0120 17:27:07.534006 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/04c5c6c7-acc2-4821-a68b-c2ac094e931f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "04c5c6c7-acc2-4821-a68b-c2ac094e931f" (UID: "04c5c6c7-acc2-4821-a68b-c2ac094e931f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:27:07 crc kubenswrapper[4558]: I0120 17:27:07.539386 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5f91fe62-3615-4153-a6c7-1652a5780bb5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5f91fe62-3615-4153-a6c7-1652a5780bb5" (UID: "5f91fe62-3615-4153-a6c7-1652a5780bb5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:27:07 crc kubenswrapper[4558]: I0120 17:27:07.541571 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/04c5c6c7-acc2-4821-a68b-c2ac094e931f-config-data" (OuterVolumeSpecName: "config-data") pod "04c5c6c7-acc2-4821-a68b-c2ac094e931f" (UID: "04c5c6c7-acc2-4821-a68b-c2ac094e931f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:27:07 crc kubenswrapper[4558]: I0120 17:27:07.554432 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5f91fe62-3615-4153-a6c7-1652a5780bb5-config-data" (OuterVolumeSpecName: "config-data") pod "5f91fe62-3615-4153-a6c7-1652a5780bb5" (UID: "5f91fe62-3615-4153-a6c7-1652a5780bb5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:27:07 crc kubenswrapper[4558]: I0120 17:27:07.611621 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4ph6z\" (UniqueName: \"kubernetes.io/projected/04c5c6c7-acc2-4821-a68b-c2ac094e931f-kube-api-access-4ph6z\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:07 crc kubenswrapper[4558]: I0120 17:27:07.611711 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/04c5c6c7-acc2-4821-a68b-c2ac094e931f-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:07 crc kubenswrapper[4558]: I0120 17:27:07.611803 4558 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/5f91fe62-3615-4153-a6c7-1652a5780bb5-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:07 crc kubenswrapper[4558]: I0120 17:27:07.611868 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f91fe62-3615-4153-a6c7-1652a5780bb5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:07 crc kubenswrapper[4558]: I0120 17:27:07.611921 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/04c5c6c7-acc2-4821-a68b-c2ac094e931f-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:07 crc kubenswrapper[4558]: I0120 17:27:07.611945 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5f91fe62-3615-4153-a6c7-1652a5780bb5-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:07 crc kubenswrapper[4558]: I0120 17:27:07.611985 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hqhkn\" (UniqueName: \"kubernetes.io/projected/5f91fe62-3615-4153-a6c7-1652a5780bb5-kube-api-access-hqhkn\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:07 crc kubenswrapper[4558]: I0120 17:27:07.612068 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/04c5c6c7-acc2-4821-a68b-c2ac094e931f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:07 crc kubenswrapper[4558]: I0120 17:27:07.707535 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:27:07 crc kubenswrapper[4558]: W0120 17:27:07.709935 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod134fc5ef_70be_4c8c_aa72_e4f0440d0afe.slice/crio-db0135b9134d791f9b0b19e85f18556a0c64916735fe2422f1ad1b4a39439c1b WatchSource:0}: Error finding container db0135b9134d791f9b0b19e85f18556a0c64916735fe2422f1ad1b4a39439c1b: Status 404 returned error can't find the container with id db0135b9134d791f9b0b19e85f18556a0c64916735fe2422f1ad1b4a39439c1b Jan 20 17:27:07 crc kubenswrapper[4558]: W0120 17:27:07.780952 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6dc46db7_9948_4c7d_b0a3_c767ffd58b4a.slice/crio-1bdb7fc42551eedc4e540256310d9b189a67291e02eb1547c3e8edcba58be288 WatchSource:0}: Error finding container 1bdb7fc42551eedc4e540256310d9b189a67291e02eb1547c3e8edcba58be288: Status 404 returned error can't find the container with id 1bdb7fc42551eedc4e540256310d9b189a67291e02eb1547c3e8edcba58be288 Jan 20 17:27:07 crc kubenswrapper[4558]: I0120 17:27:07.788442 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-665b4c9c8d-9tcx6"] Jan 20 17:27:07 crc kubenswrapper[4558]: I0120 17:27:07.848894 4558 generic.go:334] "Generic (PLEG): container finished" podID="174beb08-6b5c-40ef-9809-a4e17c718392" containerID="8a7fa99f3021ee9c8829c1eabdc8f6e24a1e1ce1bc1bcf976f8b0fa8e46ef699" exitCode=143 Jan 20 17:27:07 crc kubenswrapper[4558]: I0120 17:27:07.848977 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"174beb08-6b5c-40ef-9809-a4e17c718392","Type":"ContainerDied","Data":"8a7fa99f3021ee9c8829c1eabdc8f6e24a1e1ce1bc1bcf976f8b0fa8e46ef699"} Jan 20 17:27:07 crc kubenswrapper[4558]: I0120 17:27:07.850429 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-665b4c9c8d-9tcx6" event={"ID":"6dc46db7-9948-4c7d-b0a3-c767ffd58b4a","Type":"ContainerStarted","Data":"1bdb7fc42551eedc4e540256310d9b189a67291e02eb1547c3e8edcba58be288"} Jan 20 17:27:07 crc kubenswrapper[4558]: I0120 17:27:07.851939 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-hbfss" event={"ID":"04c5c6c7-acc2-4821-a68b-c2ac094e931f","Type":"ContainerDied","Data":"48b32c6cd59db0df143887c227616ca6d4f68d463ec76e308c787483019e261c"} Jan 20 17:27:07 crc kubenswrapper[4558]: I0120 17:27:07.851981 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-hbfss" Jan 20 17:27:07 crc kubenswrapper[4558]: I0120 17:27:07.851984 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="48b32c6cd59db0df143887c227616ca6d4f68d463ec76e308c787483019e261c" Jan 20 17:27:07 crc kubenswrapper[4558]: I0120 17:27:07.854042 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-db-sync-gn4wj" event={"ID":"5f91fe62-3615-4153-a6c7-1652a5780bb5","Type":"ContainerDied","Data":"80d9543d95ec125bdd2c22a852c98aeaffb38bc4939c5bf2f6dadc8b3e90677f"} Jan 20 17:27:07 crc kubenswrapper[4558]: I0120 17:27:07.854072 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="80d9543d95ec125bdd2c22a852c98aeaffb38bc4939c5bf2f6dadc8b3e90677f" Jan 20 17:27:07 crc kubenswrapper[4558]: I0120 17:27:07.854123 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-db-sync-gn4wj" Jan 20 17:27:07 crc kubenswrapper[4558]: I0120 17:27:07.855151 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"134fc5ef-70be-4c8c-aa72-e4f0440d0afe","Type":"ContainerStarted","Data":"db0135b9134d791f9b0b19e85f18556a0c64916735fe2422f1ad1b4a39439c1b"} Jan 20 17:27:07 crc kubenswrapper[4558]: I0120 17:27:07.906926 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-0"] Jan 20 17:27:07 crc kubenswrapper[4558]: I0120 17:27:07.907176 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-cell1-conductor-0" podUID="d7d891f4-8ef8-4a21-9c7f-36088b864f7f" containerName="nova-cell1-conductor-conductor" containerID="cri-o://3dde84a4d4b2d41242c14681a10217ff909436ad4cbf651a6f8291244c16021c" gracePeriod=30 Jan 20 17:27:07 crc kubenswrapper[4558]: I0120 17:27:07.937958 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:27:07 crc kubenswrapper[4558]: I0120 17:27:07.939598 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-external-api-0" podUID="c4caf1aa-77f3-4f79-a1e5-ce8fa944b6cd" containerName="glance-log" containerID="cri-o://22a976775c577de36fb22efa39255300a73eb567891d714dec5fe2063f3429ce" gracePeriod=30 Jan 20 17:27:07 crc kubenswrapper[4558]: I0120 17:27:07.940017 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-external-api-0" podUID="c4caf1aa-77f3-4f79-a1e5-ce8fa944b6cd" containerName="glance-httpd" containerID="cri-o://ca56992fbef2886a2ace481786936757fbf282ec4e0bc3e84966541d89eaaf75" gracePeriod=30 Jan 20 17:27:08 crc kubenswrapper[4558]: I0120 17:27:08.013585 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:27:08 crc kubenswrapper[4558]: I0120 17:27:08.013807 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-internal-api-0" podUID="269584c1-a4ed-43e5-a1cb-1e5e6952df11" containerName="glance-log" containerID="cri-o://da0ca9294c16d1999fa49c21fdd4c31261941cef28b785b5ef05cdd451846e52" gracePeriod=30 Jan 20 17:27:08 crc kubenswrapper[4558]: I0120 17:27:08.015371 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-internal-api-0" podUID="269584c1-a4ed-43e5-a1cb-1e5e6952df11" containerName="glance-httpd" containerID="cri-o://bc74e5cb26055978318c9f2b43ca67fd1b3e4cc5ce3e22f2e91cfdfd0a3b5a4e" gracePeriod=30 Jan 20 17:27:08 crc kubenswrapper[4558]: E0120 17:27:08.076078 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="3dde84a4d4b2d41242c14681a10217ff909436ad4cbf651a6f8291244c16021c" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:27:08 crc kubenswrapper[4558]: E0120 17:27:08.082553 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="3dde84a4d4b2d41242c14681a10217ff909436ad4cbf651a6f8291244c16021c" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:27:08 crc kubenswrapper[4558]: E0120 17:27:08.083722 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="3dde84a4d4b2d41242c14681a10217ff909436ad4cbf651a6f8291244c16021c" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:27:08 crc kubenswrapper[4558]: E0120 17:27:08.083758 4558 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack-kuttl-tests/nova-cell1-conductor-0" podUID="d7d891f4-8ef8-4a21-9c7f-36088b864f7f" containerName="nova-cell1-conductor-conductor" Jan 20 17:27:08 crc kubenswrapper[4558]: I0120 17:27:08.254064 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-h5hmh" Jan 20 17:27:08 crc kubenswrapper[4558]: I0120 17:27:08.434143 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f461d799-0027-4949-96bc-85e80ae9ec47-config-data\") pod \"f461d799-0027-4949-96bc-85e80ae9ec47\" (UID: \"f461d799-0027-4949-96bc-85e80ae9ec47\") " Jan 20 17:27:08 crc kubenswrapper[4558]: I0120 17:27:08.434274 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2cvf4\" (UniqueName: \"kubernetes.io/projected/f461d799-0027-4949-96bc-85e80ae9ec47-kube-api-access-2cvf4\") pod \"f461d799-0027-4949-96bc-85e80ae9ec47\" (UID: \"f461d799-0027-4949-96bc-85e80ae9ec47\") " Jan 20 17:27:08 crc kubenswrapper[4558]: I0120 17:27:08.434409 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f461d799-0027-4949-96bc-85e80ae9ec47-scripts\") pod \"f461d799-0027-4949-96bc-85e80ae9ec47\" (UID: \"f461d799-0027-4949-96bc-85e80ae9ec47\") " Jan 20 17:27:08 crc kubenswrapper[4558]: I0120 17:27:08.434450 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f461d799-0027-4949-96bc-85e80ae9ec47-combined-ca-bundle\") pod \"f461d799-0027-4949-96bc-85e80ae9ec47\" (UID: \"f461d799-0027-4949-96bc-85e80ae9ec47\") " Jan 20 17:27:08 crc kubenswrapper[4558]: I0120 17:27:08.451324 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f461d799-0027-4949-96bc-85e80ae9ec47-kube-api-access-2cvf4" (OuterVolumeSpecName: "kube-api-access-2cvf4") pod "f461d799-0027-4949-96bc-85e80ae9ec47" (UID: "f461d799-0027-4949-96bc-85e80ae9ec47"). InnerVolumeSpecName "kube-api-access-2cvf4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:27:08 crc kubenswrapper[4558]: I0120 17:27:08.451535 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f461d799-0027-4949-96bc-85e80ae9ec47-scripts" (OuterVolumeSpecName: "scripts") pod "f461d799-0027-4949-96bc-85e80ae9ec47" (UID: "f461d799-0027-4949-96bc-85e80ae9ec47"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:27:08 crc kubenswrapper[4558]: I0120 17:27:08.473835 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f461d799-0027-4949-96bc-85e80ae9ec47-config-data" (OuterVolumeSpecName: "config-data") pod "f461d799-0027-4949-96bc-85e80ae9ec47" (UID: "f461d799-0027-4949-96bc-85e80ae9ec47"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:27:08 crc kubenswrapper[4558]: I0120 17:27:08.486469 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f461d799-0027-4949-96bc-85e80ae9ec47-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f461d799-0027-4949-96bc-85e80ae9ec47" (UID: "f461d799-0027-4949-96bc-85e80ae9ec47"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:27:08 crc kubenswrapper[4558]: I0120 17:27:08.538212 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f461d799-0027-4949-96bc-85e80ae9ec47-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:08 crc kubenswrapper[4558]: I0120 17:27:08.538244 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2cvf4\" (UniqueName: \"kubernetes.io/projected/f461d799-0027-4949-96bc-85e80ae9ec47-kube-api-access-2cvf4\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:08 crc kubenswrapper[4558]: I0120 17:27:08.538257 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f461d799-0027-4949-96bc-85e80ae9ec47-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:08 crc kubenswrapper[4558]: I0120 17:27:08.538270 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f461d799-0027-4949-96bc-85e80ae9ec47-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:08 crc kubenswrapper[4558]: I0120 17:27:08.576328 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="17054bbc-d1b1-47ae-b42a-bc09c15b2b9b" path="/var/lib/kubelet/pods/17054bbc-d1b1-47ae-b42a-bc09c15b2b9b/volumes" Jan 20 17:27:08 crc kubenswrapper[4558]: I0120 17:27:08.864814 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"134fc5ef-70be-4c8c-aa72-e4f0440d0afe","Type":"ContainerStarted","Data":"8ac13b7e76f880d4b690e8245e9e454bb0f7d3d01a25e904b05a000a609f936a"} Jan 20 17:27:08 crc kubenswrapper[4558]: I0120 17:27:08.866929 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-665b4c9c8d-9tcx6" event={"ID":"6dc46db7-9948-4c7d-b0a3-c767ffd58b4a","Type":"ContainerStarted","Data":"2b441621415d66b4dd582cca5044009b53d19bc346f62f7de3d1c998664de90b"} Jan 20 17:27:08 crc kubenswrapper[4558]: I0120 17:27:08.866973 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-665b4c9c8d-9tcx6" event={"ID":"6dc46db7-9948-4c7d-b0a3-c767ffd58b4a","Type":"ContainerStarted","Data":"76433ae79aade2e04de13538618917ea1b5001e82383e84d8122e38de55ebbe8"} Jan 20 17:27:08 crc kubenswrapper[4558]: I0120 17:27:08.867941 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/neutron-665b4c9c8d-9tcx6" Jan 20 17:27:08 crc kubenswrapper[4558]: I0120 17:27:08.870881 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-h5hmh" event={"ID":"f461d799-0027-4949-96bc-85e80ae9ec47","Type":"ContainerDied","Data":"2ac8c57e21966494d98f82860a871152827ce4bf713928615d45e9156c21b508"} Jan 20 17:27:08 crc kubenswrapper[4558]: I0120 17:27:08.870916 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2ac8c57e21966494d98f82860a871152827ce4bf713928615d45e9156c21b508" Jan 20 17:27:08 crc kubenswrapper[4558]: I0120 17:27:08.870885 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-h5hmh" Jan 20 17:27:08 crc kubenswrapper[4558]: I0120 17:27:08.873920 4558 generic.go:334] "Generic (PLEG): container finished" podID="269584c1-a4ed-43e5-a1cb-1e5e6952df11" containerID="da0ca9294c16d1999fa49c21fdd4c31261941cef28b785b5ef05cdd451846e52" exitCode=143 Jan 20 17:27:08 crc kubenswrapper[4558]: I0120 17:27:08.874019 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"269584c1-a4ed-43e5-a1cb-1e5e6952df11","Type":"ContainerDied","Data":"da0ca9294c16d1999fa49c21fdd4c31261941cef28b785b5ef05cdd451846e52"} Jan 20 17:27:08 crc kubenswrapper[4558]: I0120 17:27:08.876277 4558 generic.go:334] "Generic (PLEG): container finished" podID="c4caf1aa-77f3-4f79-a1e5-ce8fa944b6cd" containerID="22a976775c577de36fb22efa39255300a73eb567891d714dec5fe2063f3429ce" exitCode=143 Jan 20 17:27:08 crc kubenswrapper[4558]: I0120 17:27:08.876314 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"c4caf1aa-77f3-4f79-a1e5-ce8fa944b6cd","Type":"ContainerDied","Data":"22a976775c577de36fb22efa39255300a73eb567891d714dec5fe2063f3429ce"} Jan 20 17:27:08 crc kubenswrapper[4558]: I0120 17:27:08.893666 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/neutron-665b4c9c8d-9tcx6" podStartSLOduration=2.893646745 podStartE2EDuration="2.893646745s" podCreationTimestamp="2026-01-20 17:27:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:27:08.885142446 +0000 UTC m=+2722.645480413" watchObservedRunningTime="2026-01-20 17:27:08.893646745 +0000 UTC m=+2722.653984712" Jan 20 17:27:09 crc kubenswrapper[4558]: I0120 17:27:09.264893 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-0"] Jan 20 17:27:09 crc kubenswrapper[4558]: I0120 17:27:09.265304 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-cell0-conductor-0" podUID="5d490bdc-81eb-42cd-9a5d-ae4a8d2b4e51" containerName="nova-cell0-conductor-conductor" containerID="cri-o://4954099cb46eb467eb55768a743f3ad68bf55146245f1fb2f43a81ea0ff802cc" gracePeriod=30 Jan 20 17:27:09 crc kubenswrapper[4558]: I0120 17:27:09.532083 4558 scope.go:117] "RemoveContainer" containerID="6bfc7c4eeeabb7764777f76623bd506d3ce621420962d421971008ad73e73f4f" Jan 20 17:27:09 crc kubenswrapper[4558]: I0120 17:27:09.553370 4558 scope.go:117] "RemoveContainer" containerID="b34a6c671f617790fb62ac67cbbca0c103dc87a0f332ae0fb1b00118d75fcf77" Jan 20 17:27:09 crc kubenswrapper[4558]: I0120 17:27:09.889106 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"134fc5ef-70be-4c8c-aa72-e4f0440d0afe","Type":"ContainerStarted","Data":"98bb94af3fa40dc3d1b7b4c99b5349d836b492c3cec3acd61d7cae45c5b34d12"} Jan 20 17:27:10 crc kubenswrapper[4558]: E0120 17:27:10.019363 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="4954099cb46eb467eb55768a743f3ad68bf55146245f1fb2f43a81ea0ff802cc" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:27:10 crc kubenswrapper[4558]: E0120 17:27:10.020993 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="4954099cb46eb467eb55768a743f3ad68bf55146245f1fb2f43a81ea0ff802cc" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:27:10 crc kubenswrapper[4558]: E0120 17:27:10.022835 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="4954099cb46eb467eb55768a743f3ad68bf55146245f1fb2f43a81ea0ff802cc" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:27:10 crc kubenswrapper[4558]: E0120 17:27:10.022897 4558 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack-kuttl-tests/nova-cell0-conductor-0" podUID="5d490bdc-81eb-42cd-9a5d-ae4a8d2b4e51" containerName="nova-cell0-conductor-conductor" Jan 20 17:27:10 crc kubenswrapper[4558]: I0120 17:27:10.181216 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/cinder-api-0" podUID="174beb08-6b5c-40ef-9809-a4e17c718392" containerName="cinder-api" probeResult="failure" output="Get \"https://10.217.1.18:8776/healthcheck\": read tcp 10.217.0.2:40986->10.217.1.18:8776: read: connection reset by peer" Jan 20 17:27:10 crc kubenswrapper[4558]: I0120 17:27:10.590282 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:27:10 crc kubenswrapper[4558]: I0120 17:27:10.684827 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/174beb08-6b5c-40ef-9809-a4e17c718392-public-tls-certs\") pod \"174beb08-6b5c-40ef-9809-a4e17c718392\" (UID: \"174beb08-6b5c-40ef-9809-a4e17c718392\") " Jan 20 17:27:10 crc kubenswrapper[4558]: I0120 17:27:10.684873 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wpps6\" (UniqueName: \"kubernetes.io/projected/174beb08-6b5c-40ef-9809-a4e17c718392-kube-api-access-wpps6\") pod \"174beb08-6b5c-40ef-9809-a4e17c718392\" (UID: \"174beb08-6b5c-40ef-9809-a4e17c718392\") " Jan 20 17:27:10 crc kubenswrapper[4558]: I0120 17:27:10.684944 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/174beb08-6b5c-40ef-9809-a4e17c718392-config-data\") pod \"174beb08-6b5c-40ef-9809-a4e17c718392\" (UID: \"174beb08-6b5c-40ef-9809-a4e17c718392\") " Jan 20 17:27:10 crc kubenswrapper[4558]: I0120 17:27:10.684968 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/174beb08-6b5c-40ef-9809-a4e17c718392-logs\") pod \"174beb08-6b5c-40ef-9809-a4e17c718392\" (UID: \"174beb08-6b5c-40ef-9809-a4e17c718392\") " Jan 20 17:27:10 crc kubenswrapper[4558]: I0120 17:27:10.685025 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/174beb08-6b5c-40ef-9809-a4e17c718392-combined-ca-bundle\") pod \"174beb08-6b5c-40ef-9809-a4e17c718392\" (UID: \"174beb08-6b5c-40ef-9809-a4e17c718392\") " Jan 20 17:27:10 crc kubenswrapper[4558]: I0120 17:27:10.685060 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/174beb08-6b5c-40ef-9809-a4e17c718392-internal-tls-certs\") pod \"174beb08-6b5c-40ef-9809-a4e17c718392\" (UID: \"174beb08-6b5c-40ef-9809-a4e17c718392\") " Jan 20 17:27:10 crc kubenswrapper[4558]: I0120 17:27:10.685094 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/174beb08-6b5c-40ef-9809-a4e17c718392-etc-machine-id\") pod \"174beb08-6b5c-40ef-9809-a4e17c718392\" (UID: \"174beb08-6b5c-40ef-9809-a4e17c718392\") " Jan 20 17:27:10 crc kubenswrapper[4558]: I0120 17:27:10.685147 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/174beb08-6b5c-40ef-9809-a4e17c718392-config-data-custom\") pod \"174beb08-6b5c-40ef-9809-a4e17c718392\" (UID: \"174beb08-6b5c-40ef-9809-a4e17c718392\") " Jan 20 17:27:10 crc kubenswrapper[4558]: I0120 17:27:10.685183 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/174beb08-6b5c-40ef-9809-a4e17c718392-scripts\") pod \"174beb08-6b5c-40ef-9809-a4e17c718392\" (UID: \"174beb08-6b5c-40ef-9809-a4e17c718392\") " Jan 20 17:27:10 crc kubenswrapper[4558]: I0120 17:27:10.686453 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/174beb08-6b5c-40ef-9809-a4e17c718392-logs" (OuterVolumeSpecName: "logs") pod "174beb08-6b5c-40ef-9809-a4e17c718392" (UID: "174beb08-6b5c-40ef-9809-a4e17c718392"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:27:10 crc kubenswrapper[4558]: I0120 17:27:10.691438 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/174beb08-6b5c-40ef-9809-a4e17c718392-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "174beb08-6b5c-40ef-9809-a4e17c718392" (UID: "174beb08-6b5c-40ef-9809-a4e17c718392"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 17:27:10 crc kubenswrapper[4558]: I0120 17:27:10.696338 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/174beb08-6b5c-40ef-9809-a4e17c718392-scripts" (OuterVolumeSpecName: "scripts") pod "174beb08-6b5c-40ef-9809-a4e17c718392" (UID: "174beb08-6b5c-40ef-9809-a4e17c718392"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:27:10 crc kubenswrapper[4558]: I0120 17:27:10.703405 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/174beb08-6b5c-40ef-9809-a4e17c718392-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "174beb08-6b5c-40ef-9809-a4e17c718392" (UID: "174beb08-6b5c-40ef-9809-a4e17c718392"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:27:10 crc kubenswrapper[4558]: I0120 17:27:10.705299 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/174beb08-6b5c-40ef-9809-a4e17c718392-kube-api-access-wpps6" (OuterVolumeSpecName: "kube-api-access-wpps6") pod "174beb08-6b5c-40ef-9809-a4e17c718392" (UID: "174beb08-6b5c-40ef-9809-a4e17c718392"). InnerVolumeSpecName "kube-api-access-wpps6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:27:10 crc kubenswrapper[4558]: I0120 17:27:10.745481 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/174beb08-6b5c-40ef-9809-a4e17c718392-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "174beb08-6b5c-40ef-9809-a4e17c718392" (UID: "174beb08-6b5c-40ef-9809-a4e17c718392"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:27:10 crc kubenswrapper[4558]: I0120 17:27:10.751759 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/174beb08-6b5c-40ef-9809-a4e17c718392-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "174beb08-6b5c-40ef-9809-a4e17c718392" (UID: "174beb08-6b5c-40ef-9809-a4e17c718392"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:27:10 crc kubenswrapper[4558]: I0120 17:27:10.760648 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/174beb08-6b5c-40ef-9809-a4e17c718392-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "174beb08-6b5c-40ef-9809-a4e17c718392" (UID: "174beb08-6b5c-40ef-9809-a4e17c718392"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:27:10 crc kubenswrapper[4558]: I0120 17:27:10.769131 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/174beb08-6b5c-40ef-9809-a4e17c718392-config-data" (OuterVolumeSpecName: "config-data") pod "174beb08-6b5c-40ef-9809-a4e17c718392" (UID: "174beb08-6b5c-40ef-9809-a4e17c718392"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:27:10 crc kubenswrapper[4558]: I0120 17:27:10.791194 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/174beb08-6b5c-40ef-9809-a4e17c718392-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:10 crc kubenswrapper[4558]: I0120 17:27:10.791230 4558 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/174beb08-6b5c-40ef-9809-a4e17c718392-etc-machine-id\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:10 crc kubenswrapper[4558]: I0120 17:27:10.791243 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/174beb08-6b5c-40ef-9809-a4e17c718392-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:10 crc kubenswrapper[4558]: I0120 17:27:10.791252 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/174beb08-6b5c-40ef-9809-a4e17c718392-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:10 crc kubenswrapper[4558]: I0120 17:27:10.791261 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/174beb08-6b5c-40ef-9809-a4e17c718392-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:10 crc kubenswrapper[4558]: I0120 17:27:10.791271 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wpps6\" (UniqueName: \"kubernetes.io/projected/174beb08-6b5c-40ef-9809-a4e17c718392-kube-api-access-wpps6\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:10 crc kubenswrapper[4558]: I0120 17:27:10.791281 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/174beb08-6b5c-40ef-9809-a4e17c718392-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:10 crc kubenswrapper[4558]: I0120 17:27:10.791290 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/174beb08-6b5c-40ef-9809-a4e17c718392-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:10 crc kubenswrapper[4558]: I0120 17:27:10.791298 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/174beb08-6b5c-40ef-9809-a4e17c718392-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:10 crc kubenswrapper[4558]: I0120 17:27:10.903046 4558 generic.go:334] "Generic (PLEG): container finished" podID="d7d891f4-8ef8-4a21-9c7f-36088b864f7f" containerID="3dde84a4d4b2d41242c14681a10217ff909436ad4cbf651a6f8291244c16021c" exitCode=0 Jan 20 17:27:10 crc kubenswrapper[4558]: I0120 17:27:10.903336 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-0" event={"ID":"d7d891f4-8ef8-4a21-9c7f-36088b864f7f","Type":"ContainerDied","Data":"3dde84a4d4b2d41242c14681a10217ff909436ad4cbf651a6f8291244c16021c"} Jan 20 17:27:10 crc kubenswrapper[4558]: I0120 17:27:10.907346 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"134fc5ef-70be-4c8c-aa72-e4f0440d0afe","Type":"ContainerStarted","Data":"99d707bce7e8a2bd68ad6ab0135f5762171740a9747fee9d19f3dc67af0b93db"} Jan 20 17:27:10 crc kubenswrapper[4558]: I0120 17:27:10.916722 4558 generic.go:334] "Generic (PLEG): container finished" podID="174beb08-6b5c-40ef-9809-a4e17c718392" containerID="025c28c93d5af302d74da36c6dd18d125f4d085beedce3d041db5da919c5c252" exitCode=0 Jan 20 17:27:10 crc kubenswrapper[4558]: I0120 17:27:10.916748 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"174beb08-6b5c-40ef-9809-a4e17c718392","Type":"ContainerDied","Data":"025c28c93d5af302d74da36c6dd18d125f4d085beedce3d041db5da919c5c252"} Jan 20 17:27:10 crc kubenswrapper[4558]: I0120 17:27:10.916784 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"174beb08-6b5c-40ef-9809-a4e17c718392","Type":"ContainerDied","Data":"a1b8ae8a6fb04666cfb0a4f2a9ffa1b84b72f9d6e76dfc7648b09de4c44e719b"} Jan 20 17:27:10 crc kubenswrapper[4558]: I0120 17:27:10.916804 4558 scope.go:117] "RemoveContainer" containerID="025c28c93d5af302d74da36c6dd18d125f4d085beedce3d041db5da919c5c252" Jan 20 17:27:10 crc kubenswrapper[4558]: I0120 17:27:10.916810 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:27:10 crc kubenswrapper[4558]: I0120 17:27:10.942658 4558 scope.go:117] "RemoveContainer" containerID="8a7fa99f3021ee9c8829c1eabdc8f6e24a1e1ce1bc1bcf976f8b0fa8e46ef699" Jan 20 17:27:10 crc kubenswrapper[4558]: I0120 17:27:10.969953 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:27:10 crc kubenswrapper[4558]: I0120 17:27:10.983279 4558 scope.go:117] "RemoveContainer" containerID="025c28c93d5af302d74da36c6dd18d125f4d085beedce3d041db5da919c5c252" Jan 20 17:27:10 crc kubenswrapper[4558]: I0120 17:27:10.983480 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:27:10 crc kubenswrapper[4558]: E0120 17:27:10.990069 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"025c28c93d5af302d74da36c6dd18d125f4d085beedce3d041db5da919c5c252\": container with ID starting with 025c28c93d5af302d74da36c6dd18d125f4d085beedce3d041db5da919c5c252 not found: ID does not exist" containerID="025c28c93d5af302d74da36c6dd18d125f4d085beedce3d041db5da919c5c252" Jan 20 17:27:10 crc kubenswrapper[4558]: I0120 17:27:10.990115 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"025c28c93d5af302d74da36c6dd18d125f4d085beedce3d041db5da919c5c252"} err="failed to get container status \"025c28c93d5af302d74da36c6dd18d125f4d085beedce3d041db5da919c5c252\": rpc error: code = NotFound desc = could not find container \"025c28c93d5af302d74da36c6dd18d125f4d085beedce3d041db5da919c5c252\": container with ID starting with 025c28c93d5af302d74da36c6dd18d125f4d085beedce3d041db5da919c5c252 not found: ID does not exist" Jan 20 17:27:10 crc kubenswrapper[4558]: I0120 17:27:10.990143 4558 scope.go:117] "RemoveContainer" containerID="8a7fa99f3021ee9c8829c1eabdc8f6e24a1e1ce1bc1bcf976f8b0fa8e46ef699" Jan 20 17:27:10 crc kubenswrapper[4558]: E0120 17:27:10.991374 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8a7fa99f3021ee9c8829c1eabdc8f6e24a1e1ce1bc1bcf976f8b0fa8e46ef699\": container with ID starting with 8a7fa99f3021ee9c8829c1eabdc8f6e24a1e1ce1bc1bcf976f8b0fa8e46ef699 not found: ID does not exist" containerID="8a7fa99f3021ee9c8829c1eabdc8f6e24a1e1ce1bc1bcf976f8b0fa8e46ef699" Jan 20 17:27:10 crc kubenswrapper[4558]: I0120 17:27:10.991413 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8a7fa99f3021ee9c8829c1eabdc8f6e24a1e1ce1bc1bcf976f8b0fa8e46ef699"} err="failed to get container status \"8a7fa99f3021ee9c8829c1eabdc8f6e24a1e1ce1bc1bcf976f8b0fa8e46ef699\": rpc error: code = NotFound desc = could not find container \"8a7fa99f3021ee9c8829c1eabdc8f6e24a1e1ce1bc1bcf976f8b0fa8e46ef699\": container with ID starting with 8a7fa99f3021ee9c8829c1eabdc8f6e24a1e1ce1bc1bcf976f8b0fa8e46ef699 not found: ID does not exist" Jan 20 17:27:10 crc kubenswrapper[4558]: I0120 17:27:10.999216 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.001073 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7d891f4-8ef8-4a21-9c7f-36088b864f7f-combined-ca-bundle\") pod \"d7d891f4-8ef8-4a21-9c7f-36088b864f7f\" (UID: \"d7d891f4-8ef8-4a21-9c7f-36088b864f7f\") " Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.001274 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qm5t5\" (UniqueName: \"kubernetes.io/projected/d7d891f4-8ef8-4a21-9c7f-36088b864f7f-kube-api-access-qm5t5\") pod \"d7d891f4-8ef8-4a21-9c7f-36088b864f7f\" (UID: \"d7d891f4-8ef8-4a21-9c7f-36088b864f7f\") " Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.001653 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d7d891f4-8ef8-4a21-9c7f-36088b864f7f-config-data\") pod \"d7d891f4-8ef8-4a21-9c7f-36088b864f7f\" (UID: \"d7d891f4-8ef8-4a21-9c7f-36088b864f7f\") " Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.019188 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:27:11 crc kubenswrapper[4558]: E0120 17:27:11.019744 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="04c5c6c7-acc2-4821-a68b-c2ac094e931f" containerName="nova-cell1-conductor-db-sync" Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.019783 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="04c5c6c7-acc2-4821-a68b-c2ac094e931f" containerName="nova-cell1-conductor-db-sync" Jan 20 17:27:11 crc kubenswrapper[4558]: E0120 17:27:11.019816 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="174beb08-6b5c-40ef-9809-a4e17c718392" containerName="cinder-api" Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.019823 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="174beb08-6b5c-40ef-9809-a4e17c718392" containerName="cinder-api" Jan 20 17:27:11 crc kubenswrapper[4558]: E0120 17:27:11.019840 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d7d891f4-8ef8-4a21-9c7f-36088b864f7f" containerName="nova-cell1-conductor-conductor" Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.019846 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d7d891f4-8ef8-4a21-9c7f-36088b864f7f" containerName="nova-cell1-conductor-conductor" Jan 20 17:27:11 crc kubenswrapper[4558]: E0120 17:27:11.019863 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f461d799-0027-4949-96bc-85e80ae9ec47" containerName="nova-cell0-conductor-db-sync" Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.019871 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="f461d799-0027-4949-96bc-85e80ae9ec47" containerName="nova-cell0-conductor-db-sync" Jan 20 17:27:11 crc kubenswrapper[4558]: E0120 17:27:11.019881 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f91fe62-3615-4153-a6c7-1652a5780bb5" containerName="glance-db-sync" Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.019888 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f91fe62-3615-4153-a6c7-1652a5780bb5" containerName="glance-db-sync" Jan 20 17:27:11 crc kubenswrapper[4558]: E0120 17:27:11.019897 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="174beb08-6b5c-40ef-9809-a4e17c718392" containerName="cinder-api-log" Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.019903 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="174beb08-6b5c-40ef-9809-a4e17c718392" containerName="cinder-api-log" Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.020119 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="f461d799-0027-4949-96bc-85e80ae9ec47" containerName="nova-cell0-conductor-db-sync" Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.020133 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="174beb08-6b5c-40ef-9809-a4e17c718392" containerName="cinder-api-log" Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.020146 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="04c5c6c7-acc2-4821-a68b-c2ac094e931f" containerName="nova-cell1-conductor-db-sync" Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.020177 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="d7d891f4-8ef8-4a21-9c7f-36088b864f7f" containerName="nova-cell1-conductor-conductor" Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.020186 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="174beb08-6b5c-40ef-9809-a4e17c718392" containerName="cinder-api" Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.020200 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="5f91fe62-3615-4153-a6c7-1652a5780bb5" containerName="glance-db-sync" Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.022530 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.024144 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d7d891f4-8ef8-4a21-9c7f-36088b864f7f-kube-api-access-qm5t5" (OuterVolumeSpecName: "kube-api-access-qm5t5") pod "d7d891f4-8ef8-4a21-9c7f-36088b864f7f" (UID: "d7d891f4-8ef8-4a21-9c7f-36088b864f7f"). InnerVolumeSpecName "kube-api-access-qm5t5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.027833 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cinder-api-config-data" Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.028363 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-cinder-public-svc" Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.029053 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-cinder-internal-svc" Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.031265 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d7d891f4-8ef8-4a21-9c7f-36088b864f7f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d7d891f4-8ef8-4a21-9c7f-36088b864f7f" (UID: "d7d891f4-8ef8-4a21-9c7f-36088b864f7f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.039225 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.064609 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d7d891f4-8ef8-4a21-9c7f-36088b864f7f-config-data" (OuterVolumeSpecName: "config-data") pod "d7d891f4-8ef8-4a21-9c7f-36088b864f7f" (UID: "d7d891f4-8ef8-4a21-9c7f-36088b864f7f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.105246 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-prdr7\" (UniqueName: \"kubernetes.io/projected/f6a0777b-d04f-451b-9e3d-3f9ab0e19df8-kube-api-access-prdr7\") pod \"cinder-api-0\" (UID: \"f6a0777b-d04f-451b-9e3d-3f9ab0e19df8\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.105297 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f6a0777b-d04f-451b-9e3d-3f9ab0e19df8-scripts\") pod \"cinder-api-0\" (UID: \"f6a0777b-d04f-451b-9e3d-3f9ab0e19df8\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.105373 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f6a0777b-d04f-451b-9e3d-3f9ab0e19df8-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"f6a0777b-d04f-451b-9e3d-3f9ab0e19df8\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.105565 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f6a0777b-d04f-451b-9e3d-3f9ab0e19df8-logs\") pod \"cinder-api-0\" (UID: \"f6a0777b-d04f-451b-9e3d-3f9ab0e19df8\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.105608 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f6a0777b-d04f-451b-9e3d-3f9ab0e19df8-config-data-custom\") pod \"cinder-api-0\" (UID: \"f6a0777b-d04f-451b-9e3d-3f9ab0e19df8\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.105629 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f6a0777b-d04f-451b-9e3d-3f9ab0e19df8-config-data\") pod \"cinder-api-0\" (UID: \"f6a0777b-d04f-451b-9e3d-3f9ab0e19df8\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.105698 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f6a0777b-d04f-451b-9e3d-3f9ab0e19df8-public-tls-certs\") pod \"cinder-api-0\" (UID: \"f6a0777b-d04f-451b-9e3d-3f9ab0e19df8\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.105831 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f6a0777b-d04f-451b-9e3d-3f9ab0e19df8-etc-machine-id\") pod \"cinder-api-0\" (UID: \"f6a0777b-d04f-451b-9e3d-3f9ab0e19df8\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.105919 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f6a0777b-d04f-451b-9e3d-3f9ab0e19df8-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"f6a0777b-d04f-451b-9e3d-3f9ab0e19df8\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.106150 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qm5t5\" (UniqueName: \"kubernetes.io/projected/d7d891f4-8ef8-4a21-9c7f-36088b864f7f-kube-api-access-qm5t5\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.106216 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d7d891f4-8ef8-4a21-9c7f-36088b864f7f-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.106230 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d7d891f4-8ef8-4a21-9c7f-36088b864f7f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.208308 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f6a0777b-d04f-451b-9e3d-3f9ab0e19df8-logs\") pod \"cinder-api-0\" (UID: \"f6a0777b-d04f-451b-9e3d-3f9ab0e19df8\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.208354 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f6a0777b-d04f-451b-9e3d-3f9ab0e19df8-config-data-custom\") pod \"cinder-api-0\" (UID: \"f6a0777b-d04f-451b-9e3d-3f9ab0e19df8\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.208384 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f6a0777b-d04f-451b-9e3d-3f9ab0e19df8-config-data\") pod \"cinder-api-0\" (UID: \"f6a0777b-d04f-451b-9e3d-3f9ab0e19df8\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.208413 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f6a0777b-d04f-451b-9e3d-3f9ab0e19df8-public-tls-certs\") pod \"cinder-api-0\" (UID: \"f6a0777b-d04f-451b-9e3d-3f9ab0e19df8\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.208438 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f6a0777b-d04f-451b-9e3d-3f9ab0e19df8-etc-machine-id\") pod \"cinder-api-0\" (UID: \"f6a0777b-d04f-451b-9e3d-3f9ab0e19df8\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.208467 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f6a0777b-d04f-451b-9e3d-3f9ab0e19df8-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"f6a0777b-d04f-451b-9e3d-3f9ab0e19df8\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.208532 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-prdr7\" (UniqueName: \"kubernetes.io/projected/f6a0777b-d04f-451b-9e3d-3f9ab0e19df8-kube-api-access-prdr7\") pod \"cinder-api-0\" (UID: \"f6a0777b-d04f-451b-9e3d-3f9ab0e19df8\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.208550 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f6a0777b-d04f-451b-9e3d-3f9ab0e19df8-scripts\") pod \"cinder-api-0\" (UID: \"f6a0777b-d04f-451b-9e3d-3f9ab0e19df8\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.208587 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f6a0777b-d04f-451b-9e3d-3f9ab0e19df8-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"f6a0777b-d04f-451b-9e3d-3f9ab0e19df8\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.209734 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f6a0777b-d04f-451b-9e3d-3f9ab0e19df8-logs\") pod \"cinder-api-0\" (UID: \"f6a0777b-d04f-451b-9e3d-3f9ab0e19df8\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.213112 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f6a0777b-d04f-451b-9e3d-3f9ab0e19df8-config-data-custom\") pod \"cinder-api-0\" (UID: \"f6a0777b-d04f-451b-9e3d-3f9ab0e19df8\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.215314 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f6a0777b-d04f-451b-9e3d-3f9ab0e19df8-etc-machine-id\") pod \"cinder-api-0\" (UID: \"f6a0777b-d04f-451b-9e3d-3f9ab0e19df8\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.218403 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f6a0777b-d04f-451b-9e3d-3f9ab0e19df8-public-tls-certs\") pod \"cinder-api-0\" (UID: \"f6a0777b-d04f-451b-9e3d-3f9ab0e19df8\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.223074 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f6a0777b-d04f-451b-9e3d-3f9ab0e19df8-config-data\") pod \"cinder-api-0\" (UID: \"f6a0777b-d04f-451b-9e3d-3f9ab0e19df8\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.223380 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f6a0777b-d04f-451b-9e3d-3f9ab0e19df8-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"f6a0777b-d04f-451b-9e3d-3f9ab0e19df8\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.233872 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f6a0777b-d04f-451b-9e3d-3f9ab0e19df8-scripts\") pod \"cinder-api-0\" (UID: \"f6a0777b-d04f-451b-9e3d-3f9ab0e19df8\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.235220 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f6a0777b-d04f-451b-9e3d-3f9ab0e19df8-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"f6a0777b-d04f-451b-9e3d-3f9ab0e19df8\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.235903 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-prdr7\" (UniqueName: \"kubernetes.io/projected/f6a0777b-d04f-451b-9e3d-3f9ab0e19df8-kube-api-access-prdr7\") pod \"cinder-api-0\" (UID: \"f6a0777b-d04f-451b-9e3d-3f9ab0e19df8\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.342004 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.480303 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.591899 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.623531 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c4caf1aa-77f3-4f79-a1e5-ce8fa944b6cd-combined-ca-bundle\") pod \"c4caf1aa-77f3-4f79-a1e5-ce8fa944b6cd\" (UID: \"c4caf1aa-77f3-4f79-a1e5-ce8fa944b6cd\") " Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.623568 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c4caf1aa-77f3-4f79-a1e5-ce8fa944b6cd-config-data\") pod \"c4caf1aa-77f3-4f79-a1e5-ce8fa944b6cd\" (UID: \"c4caf1aa-77f3-4f79-a1e5-ce8fa944b6cd\") " Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.623612 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c4caf1aa-77f3-4f79-a1e5-ce8fa944b6cd-httpd-run\") pod \"c4caf1aa-77f3-4f79-a1e5-ce8fa944b6cd\" (UID: \"c4caf1aa-77f3-4f79-a1e5-ce8fa944b6cd\") " Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.623703 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c4caf1aa-77f3-4f79-a1e5-ce8fa944b6cd-scripts\") pod \"c4caf1aa-77f3-4f79-a1e5-ce8fa944b6cd\" (UID: \"c4caf1aa-77f3-4f79-a1e5-ce8fa944b6cd\") " Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.623783 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage14-crc\") pod \"c4caf1aa-77f3-4f79-a1e5-ce8fa944b6cd\" (UID: \"c4caf1aa-77f3-4f79-a1e5-ce8fa944b6cd\") " Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.623825 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5vdgq\" (UniqueName: \"kubernetes.io/projected/c4caf1aa-77f3-4f79-a1e5-ce8fa944b6cd-kube-api-access-5vdgq\") pod \"c4caf1aa-77f3-4f79-a1e5-ce8fa944b6cd\" (UID: \"c4caf1aa-77f3-4f79-a1e5-ce8fa944b6cd\") " Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.623913 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c4caf1aa-77f3-4f79-a1e5-ce8fa944b6cd-public-tls-certs\") pod \"c4caf1aa-77f3-4f79-a1e5-ce8fa944b6cd\" (UID: \"c4caf1aa-77f3-4f79-a1e5-ce8fa944b6cd\") " Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.623948 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c4caf1aa-77f3-4f79-a1e5-ce8fa944b6cd-logs\") pod \"c4caf1aa-77f3-4f79-a1e5-ce8fa944b6cd\" (UID: \"c4caf1aa-77f3-4f79-a1e5-ce8fa944b6cd\") " Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.624198 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c4caf1aa-77f3-4f79-a1e5-ce8fa944b6cd-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "c4caf1aa-77f3-4f79-a1e5-ce8fa944b6cd" (UID: "c4caf1aa-77f3-4f79-a1e5-ce8fa944b6cd"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.624480 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c4caf1aa-77f3-4f79-a1e5-ce8fa944b6cd-logs" (OuterVolumeSpecName: "logs") pod "c4caf1aa-77f3-4f79-a1e5-ce8fa944b6cd" (UID: "c4caf1aa-77f3-4f79-a1e5-ce8fa944b6cd"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.625219 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c4caf1aa-77f3-4f79-a1e5-ce8fa944b6cd-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.625246 4558 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c4caf1aa-77f3-4f79-a1e5-ce8fa944b6cd-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.629987 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage14-crc" (OuterVolumeSpecName: "glance") pod "c4caf1aa-77f3-4f79-a1e5-ce8fa944b6cd" (UID: "c4caf1aa-77f3-4f79-a1e5-ce8fa944b6cd"). InnerVolumeSpecName "local-storage14-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.636835 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c4caf1aa-77f3-4f79-a1e5-ce8fa944b6cd-kube-api-access-5vdgq" (OuterVolumeSpecName: "kube-api-access-5vdgq") pod "c4caf1aa-77f3-4f79-a1e5-ce8fa944b6cd" (UID: "c4caf1aa-77f3-4f79-a1e5-ce8fa944b6cd"). InnerVolumeSpecName "kube-api-access-5vdgq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.645349 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c4caf1aa-77f3-4f79-a1e5-ce8fa944b6cd-scripts" (OuterVolumeSpecName: "scripts") pod "c4caf1aa-77f3-4f79-a1e5-ce8fa944b6cd" (UID: "c4caf1aa-77f3-4f79-a1e5-ce8fa944b6cd"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.662007 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c4caf1aa-77f3-4f79-a1e5-ce8fa944b6cd-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c4caf1aa-77f3-4f79-a1e5-ce8fa944b6cd" (UID: "c4caf1aa-77f3-4f79-a1e5-ce8fa944b6cd"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.673291 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c4caf1aa-77f3-4f79-a1e5-ce8fa944b6cd-config-data" (OuterVolumeSpecName: "config-data") pod "c4caf1aa-77f3-4f79-a1e5-ce8fa944b6cd" (UID: "c4caf1aa-77f3-4f79-a1e5-ce8fa944b6cd"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.679830 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c4caf1aa-77f3-4f79-a1e5-ce8fa944b6cd-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "c4caf1aa-77f3-4f79-a1e5-ce8fa944b6cd" (UID: "c4caf1aa-77f3-4f79-a1e5-ce8fa944b6cd"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.727941 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/269584c1-a4ed-43e5-a1cb-1e5e6952df11-scripts\") pod \"269584c1-a4ed-43e5-a1cb-1e5e6952df11\" (UID: \"269584c1-a4ed-43e5-a1cb-1e5e6952df11\") " Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.728064 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t9zpz\" (UniqueName: \"kubernetes.io/projected/269584c1-a4ed-43e5-a1cb-1e5e6952df11-kube-api-access-t9zpz\") pod \"269584c1-a4ed-43e5-a1cb-1e5e6952df11\" (UID: \"269584c1-a4ed-43e5-a1cb-1e5e6952df11\") " Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.728363 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/269584c1-a4ed-43e5-a1cb-1e5e6952df11-internal-tls-certs\") pod \"269584c1-a4ed-43e5-a1cb-1e5e6952df11\" (UID: \"269584c1-a4ed-43e5-a1cb-1e5e6952df11\") " Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.728418 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/269584c1-a4ed-43e5-a1cb-1e5e6952df11-httpd-run\") pod \"269584c1-a4ed-43e5-a1cb-1e5e6952df11\" (UID: \"269584c1-a4ed-43e5-a1cb-1e5e6952df11\") " Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.728486 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/269584c1-a4ed-43e5-a1cb-1e5e6952df11-logs\") pod \"269584c1-a4ed-43e5-a1cb-1e5e6952df11\" (UID: \"269584c1-a4ed-43e5-a1cb-1e5e6952df11\") " Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.728626 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") pod \"269584c1-a4ed-43e5-a1cb-1e5e6952df11\" (UID: \"269584c1-a4ed-43e5-a1cb-1e5e6952df11\") " Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.728675 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/269584c1-a4ed-43e5-a1cb-1e5e6952df11-config-data\") pod \"269584c1-a4ed-43e5-a1cb-1e5e6952df11\" (UID: \"269584c1-a4ed-43e5-a1cb-1e5e6952df11\") " Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.728731 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/269584c1-a4ed-43e5-a1cb-1e5e6952df11-combined-ca-bundle\") pod \"269584c1-a4ed-43e5-a1cb-1e5e6952df11\" (UID: \"269584c1-a4ed-43e5-a1cb-1e5e6952df11\") " Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.729351 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/269584c1-a4ed-43e5-a1cb-1e5e6952df11-logs" (OuterVolumeSpecName: "logs") pod "269584c1-a4ed-43e5-a1cb-1e5e6952df11" (UID: "269584c1-a4ed-43e5-a1cb-1e5e6952df11"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.729993 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/269584c1-a4ed-43e5-a1cb-1e5e6952df11-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "269584c1-a4ed-43e5-a1cb-1e5e6952df11" (UID: "269584c1-a4ed-43e5-a1cb-1e5e6952df11"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.730628 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c4caf1aa-77f3-4f79-a1e5-ce8fa944b6cd-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.730667 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage14-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage14-crc\") on node \"crc\" " Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.730681 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5vdgq\" (UniqueName: \"kubernetes.io/projected/c4caf1aa-77f3-4f79-a1e5-ce8fa944b6cd-kube-api-access-5vdgq\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.730695 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c4caf1aa-77f3-4f79-a1e5-ce8fa944b6cd-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.730705 4558 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/269584c1-a4ed-43e5-a1cb-1e5e6952df11-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.730715 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c4caf1aa-77f3-4f79-a1e5-ce8fa944b6cd-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.730739 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c4caf1aa-77f3-4f79-a1e5-ce8fa944b6cd-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.730751 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/269584c1-a4ed-43e5-a1cb-1e5e6952df11-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.732205 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/269584c1-a4ed-43e5-a1cb-1e5e6952df11-scripts" (OuterVolumeSpecName: "scripts") pod "269584c1-a4ed-43e5-a1cb-1e5e6952df11" (UID: "269584c1-a4ed-43e5-a1cb-1e5e6952df11"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.733361 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/269584c1-a4ed-43e5-a1cb-1e5e6952df11-kube-api-access-t9zpz" (OuterVolumeSpecName: "kube-api-access-t9zpz") pod "269584c1-a4ed-43e5-a1cb-1e5e6952df11" (UID: "269584c1-a4ed-43e5-a1cb-1e5e6952df11"). InnerVolumeSpecName "kube-api-access-t9zpz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.738272 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage15-crc" (OuterVolumeSpecName: "glance") pod "269584c1-a4ed-43e5-a1cb-1e5e6952df11" (UID: "269584c1-a4ed-43e5-a1cb-1e5e6952df11"). InnerVolumeSpecName "local-storage15-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.756549 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage14-crc" (UniqueName: "kubernetes.io/local-volume/local-storage14-crc") on node "crc" Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.762815 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/269584c1-a4ed-43e5-a1cb-1e5e6952df11-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "269584c1-a4ed-43e5-a1cb-1e5e6952df11" (UID: "269584c1-a4ed-43e5-a1cb-1e5e6952df11"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.778813 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/269584c1-a4ed-43e5-a1cb-1e5e6952df11-config-data" (OuterVolumeSpecName: "config-data") pod "269584c1-a4ed-43e5-a1cb-1e5e6952df11" (UID: "269584c1-a4ed-43e5-a1cb-1e5e6952df11"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.779630 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/269584c1-a4ed-43e5-a1cb-1e5e6952df11-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "269584c1-a4ed-43e5-a1cb-1e5e6952df11" (UID: "269584c1-a4ed-43e5-a1cb-1e5e6952df11"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.816453 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.832505 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/269584c1-a4ed-43e5-a1cb-1e5e6952df11-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.832553 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage15-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") on node \"crc\" " Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.832564 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/269584c1-a4ed-43e5-a1cb-1e5e6952df11-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.832574 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/269584c1-a4ed-43e5-a1cb-1e5e6952df11-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.832584 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage14-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage14-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.832592 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/269584c1-a4ed-43e5-a1cb-1e5e6952df11-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.832602 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t9zpz\" (UniqueName: \"kubernetes.io/projected/269584c1-a4ed-43e5-a1cb-1e5e6952df11-kube-api-access-t9zpz\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.856600 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage15-crc" (UniqueName: "kubernetes.io/local-volume/local-storage15-crc") on node "crc" Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.927293 4558 generic.go:334] "Generic (PLEG): container finished" podID="c4caf1aa-77f3-4f79-a1e5-ce8fa944b6cd" containerID="ca56992fbef2886a2ace481786936757fbf282ec4e0bc3e84966541d89eaaf75" exitCode=0 Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.927363 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.927394 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"c4caf1aa-77f3-4f79-a1e5-ce8fa944b6cd","Type":"ContainerDied","Data":"ca56992fbef2886a2ace481786936757fbf282ec4e0bc3e84966541d89eaaf75"} Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.927465 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"c4caf1aa-77f3-4f79-a1e5-ce8fa944b6cd","Type":"ContainerDied","Data":"e2c7caf788a4795ff5ba8b7bcf6273b8cb3046464010a756d216c9ba55706fac"} Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.927488 4558 scope.go:117] "RemoveContainer" containerID="ca56992fbef2886a2ace481786936757fbf282ec4e0bc3e84966541d89eaaf75" Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.928983 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"f6a0777b-d04f-451b-9e3d-3f9ab0e19df8","Type":"ContainerStarted","Data":"1a0e63afdcf6b78dbb9d540f12c0b505d916fc6d624ef932399ee4f3c33394cb"} Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.934639 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage15-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.935575 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-0" event={"ID":"d7d891f4-8ef8-4a21-9c7f-36088b864f7f","Type":"ContainerDied","Data":"ff4e129c7bc35532eb2e5aef70443b3c3007c86f4af2b71e09bd92a1f84b7881"} Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.935660 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.956863 4558 generic.go:334] "Generic (PLEG): container finished" podID="c7f6b91b-b55e-4af0-b496-46c80b92bad1" containerID="7e009ea8664346031f4001d328d14e88633993579983342f57172300cb2e9d67" exitCode=0 Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.957134 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-db-sync-rmm2x" event={"ID":"c7f6b91b-b55e-4af0-b496-46c80b92bad1","Type":"ContainerDied","Data":"7e009ea8664346031f4001d328d14e88633993579983342f57172300cb2e9d67"} Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.978047 4558 generic.go:334] "Generic (PLEG): container finished" podID="269584c1-a4ed-43e5-a1cb-1e5e6952df11" containerID="bc74e5cb26055978318c9f2b43ca67fd1b3e4cc5ce3e22f2e91cfdfd0a3b5a4e" exitCode=0 Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.978106 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"269584c1-a4ed-43e5-a1cb-1e5e6952df11","Type":"ContainerDied","Data":"bc74e5cb26055978318c9f2b43ca67fd1b3e4cc5ce3e22f2e91cfdfd0a3b5a4e"} Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.978228 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"269584c1-a4ed-43e5-a1cb-1e5e6952df11","Type":"ContainerDied","Data":"aab62f80f8c125005398693e756fd2c9cf4795cd0187ce218c54a1d92159e226"} Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.978645 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:27:11 crc kubenswrapper[4558]: I0120 17:27:11.996726 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:27:12 crc kubenswrapper[4558]: I0120 17:27:12.041485 4558 scope.go:117] "RemoveContainer" containerID="22a976775c577de36fb22efa39255300a73eb567891d714dec5fe2063f3429ce" Jan 20 17:27:12 crc kubenswrapper[4558]: I0120 17:27:12.075117 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:27:12 crc kubenswrapper[4558]: I0120 17:27:12.102524 4558 scope.go:117] "RemoveContainer" containerID="ca56992fbef2886a2ace481786936757fbf282ec4e0bc3e84966541d89eaaf75" Jan 20 17:27:12 crc kubenswrapper[4558]: E0120 17:27:12.103136 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ca56992fbef2886a2ace481786936757fbf282ec4e0bc3e84966541d89eaaf75\": container with ID starting with ca56992fbef2886a2ace481786936757fbf282ec4e0bc3e84966541d89eaaf75 not found: ID does not exist" containerID="ca56992fbef2886a2ace481786936757fbf282ec4e0bc3e84966541d89eaaf75" Jan 20 17:27:12 crc kubenswrapper[4558]: I0120 17:27:12.103232 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ca56992fbef2886a2ace481786936757fbf282ec4e0bc3e84966541d89eaaf75"} err="failed to get container status \"ca56992fbef2886a2ace481786936757fbf282ec4e0bc3e84966541d89eaaf75\": rpc error: code = NotFound desc = could not find container \"ca56992fbef2886a2ace481786936757fbf282ec4e0bc3e84966541d89eaaf75\": container with ID starting with ca56992fbef2886a2ace481786936757fbf282ec4e0bc3e84966541d89eaaf75 not found: ID does not exist" Jan 20 17:27:12 crc kubenswrapper[4558]: I0120 17:27:12.103265 4558 scope.go:117] "RemoveContainer" containerID="22a976775c577de36fb22efa39255300a73eb567891d714dec5fe2063f3429ce" Jan 20 17:27:12 crc kubenswrapper[4558]: E0120 17:27:12.103913 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"22a976775c577de36fb22efa39255300a73eb567891d714dec5fe2063f3429ce\": container with ID starting with 22a976775c577de36fb22efa39255300a73eb567891d714dec5fe2063f3429ce not found: ID does not exist" containerID="22a976775c577de36fb22efa39255300a73eb567891d714dec5fe2063f3429ce" Jan 20 17:27:12 crc kubenswrapper[4558]: I0120 17:27:12.103961 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"22a976775c577de36fb22efa39255300a73eb567891d714dec5fe2063f3429ce"} err="failed to get container status \"22a976775c577de36fb22efa39255300a73eb567891d714dec5fe2063f3429ce\": rpc error: code = NotFound desc = could not find container \"22a976775c577de36fb22efa39255300a73eb567891d714dec5fe2063f3429ce\": container with ID starting with 22a976775c577de36fb22efa39255300a73eb567891d714dec5fe2063f3429ce not found: ID does not exist" Jan 20 17:27:12 crc kubenswrapper[4558]: I0120 17:27:12.103995 4558 scope.go:117] "RemoveContainer" containerID="3dde84a4d4b2d41242c14681a10217ff909436ad4cbf651a6f8291244c16021c" Jan 20 17:27:12 crc kubenswrapper[4558]: I0120 17:27:12.116147 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:27:12 crc kubenswrapper[4558]: E0120 17:27:12.116901 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="269584c1-a4ed-43e5-a1cb-1e5e6952df11" containerName="glance-httpd" Jan 20 17:27:12 crc kubenswrapper[4558]: I0120 17:27:12.116923 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="269584c1-a4ed-43e5-a1cb-1e5e6952df11" containerName="glance-httpd" Jan 20 17:27:12 crc kubenswrapper[4558]: E0120 17:27:12.116937 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="269584c1-a4ed-43e5-a1cb-1e5e6952df11" containerName="glance-log" Jan 20 17:27:12 crc kubenswrapper[4558]: I0120 17:27:12.116943 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="269584c1-a4ed-43e5-a1cb-1e5e6952df11" containerName="glance-log" Jan 20 17:27:12 crc kubenswrapper[4558]: E0120 17:27:12.116965 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c4caf1aa-77f3-4f79-a1e5-ce8fa944b6cd" containerName="glance-httpd" Jan 20 17:27:12 crc kubenswrapper[4558]: I0120 17:27:12.116971 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="c4caf1aa-77f3-4f79-a1e5-ce8fa944b6cd" containerName="glance-httpd" Jan 20 17:27:12 crc kubenswrapper[4558]: E0120 17:27:12.116986 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c4caf1aa-77f3-4f79-a1e5-ce8fa944b6cd" containerName="glance-log" Jan 20 17:27:12 crc kubenswrapper[4558]: I0120 17:27:12.116992 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="c4caf1aa-77f3-4f79-a1e5-ce8fa944b6cd" containerName="glance-log" Jan 20 17:27:12 crc kubenswrapper[4558]: I0120 17:27:12.117207 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="269584c1-a4ed-43e5-a1cb-1e5e6952df11" containerName="glance-httpd" Jan 20 17:27:12 crc kubenswrapper[4558]: I0120 17:27:12.117222 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="269584c1-a4ed-43e5-a1cb-1e5e6952df11" containerName="glance-log" Jan 20 17:27:12 crc kubenswrapper[4558]: I0120 17:27:12.117232 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="c4caf1aa-77f3-4f79-a1e5-ce8fa944b6cd" containerName="glance-log" Jan 20 17:27:12 crc kubenswrapper[4558]: I0120 17:27:12.117246 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="c4caf1aa-77f3-4f79-a1e5-ce8fa944b6cd" containerName="glance-httpd" Jan 20 17:27:12 crc kubenswrapper[4558]: I0120 17:27:12.118326 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:27:12 crc kubenswrapper[4558]: I0120 17:27:12.120655 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-glance-dockercfg-np554" Jan 20 17:27:12 crc kubenswrapper[4558]: I0120 17:27:12.121069 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-glance-default-public-svc" Jan 20 17:27:12 crc kubenswrapper[4558]: I0120 17:27:12.121273 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-default-external-config-data" Jan 20 17:27:12 crc kubenswrapper[4558]: I0120 17:27:12.122617 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-scripts" Jan 20 17:27:12 crc kubenswrapper[4558]: I0120 17:27:12.122362 4558 scope.go:117] "RemoveContainer" containerID="bc74e5cb26055978318c9f2b43ca67fd1b3e4cc5ce3e22f2e91cfdfd0a3b5a4e" Jan 20 17:27:12 crc kubenswrapper[4558]: I0120 17:27:12.128747 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:27:12 crc kubenswrapper[4558]: I0120 17:27:12.138916 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-0"] Jan 20 17:27:12 crc kubenswrapper[4558]: I0120 17:27:12.151442 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-0"] Jan 20 17:27:12 crc kubenswrapper[4558]: I0120 17:27:12.157827 4558 scope.go:117] "RemoveContainer" containerID="da0ca9294c16d1999fa49c21fdd4c31261941cef28b785b5ef05cdd451846e52" Jan 20 17:27:12 crc kubenswrapper[4558]: I0120 17:27:12.159391 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:27:12 crc kubenswrapper[4558]: I0120 17:27:12.165305 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:27:12 crc kubenswrapper[4558]: I0120 17:27:12.170239 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-0"] Jan 20 17:27:12 crc kubenswrapper[4558]: I0120 17:27:12.171857 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:27:12 crc kubenswrapper[4558]: I0120 17:27:12.173311 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell1-conductor-config-data" Jan 20 17:27:12 crc kubenswrapper[4558]: I0120 17:27:12.175261 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-0"] Jan 20 17:27:12 crc kubenswrapper[4558]: I0120 17:27:12.180227 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:27:12 crc kubenswrapper[4558]: I0120 17:27:12.184099 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:27:12 crc kubenswrapper[4558]: I0120 17:27:12.188005 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:27:12 crc kubenswrapper[4558]: I0120 17:27:12.190391 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-default-internal-config-data" Jan 20 17:27:12 crc kubenswrapper[4558]: I0120 17:27:12.190570 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-glance-default-internal-svc" Jan 20 17:27:12 crc kubenswrapper[4558]: I0120 17:27:12.248253 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/592927c6-9b42-411a-8aec-40cf6183e32a-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"592927c6-9b42-411a-8aec-40cf6183e32a\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:27:12 crc kubenswrapper[4558]: I0120 17:27:12.248303 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/592927c6-9b42-411a-8aec-40cf6183e32a-scripts\") pod \"glance-default-external-api-0\" (UID: \"592927c6-9b42-411a-8aec-40cf6183e32a\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:27:12 crc kubenswrapper[4558]: I0120 17:27:12.248357 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8g5mb\" (UniqueName: \"kubernetes.io/projected/592927c6-9b42-411a-8aec-40cf6183e32a-kube-api-access-8g5mb\") pod \"glance-default-external-api-0\" (UID: \"592927c6-9b42-411a-8aec-40cf6183e32a\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:27:12 crc kubenswrapper[4558]: I0120 17:27:12.248456 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/592927c6-9b42-411a-8aec-40cf6183e32a-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"592927c6-9b42-411a-8aec-40cf6183e32a\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:27:12 crc kubenswrapper[4558]: I0120 17:27:12.248515 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/592927c6-9b42-411a-8aec-40cf6183e32a-logs\") pod \"glance-default-external-api-0\" (UID: \"592927c6-9b42-411a-8aec-40cf6183e32a\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:27:12 crc kubenswrapper[4558]: I0120 17:27:12.248540 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage14-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage14-crc\") pod \"glance-default-external-api-0\" (UID: \"592927c6-9b42-411a-8aec-40cf6183e32a\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:27:12 crc kubenswrapper[4558]: I0120 17:27:12.248588 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/592927c6-9b42-411a-8aec-40cf6183e32a-config-data\") pod \"glance-default-external-api-0\" (UID: \"592927c6-9b42-411a-8aec-40cf6183e32a\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:27:12 crc kubenswrapper[4558]: I0120 17:27:12.248632 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/592927c6-9b42-411a-8aec-40cf6183e32a-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"592927c6-9b42-411a-8aec-40cf6183e32a\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:27:12 crc kubenswrapper[4558]: I0120 17:27:12.334335 4558 scope.go:117] "RemoveContainer" containerID="bc74e5cb26055978318c9f2b43ca67fd1b3e4cc5ce3e22f2e91cfdfd0a3b5a4e" Jan 20 17:27:12 crc kubenswrapper[4558]: E0120 17:27:12.334993 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bc74e5cb26055978318c9f2b43ca67fd1b3e4cc5ce3e22f2e91cfdfd0a3b5a4e\": container with ID starting with bc74e5cb26055978318c9f2b43ca67fd1b3e4cc5ce3e22f2e91cfdfd0a3b5a4e not found: ID does not exist" containerID="bc74e5cb26055978318c9f2b43ca67fd1b3e4cc5ce3e22f2e91cfdfd0a3b5a4e" Jan 20 17:27:12 crc kubenswrapper[4558]: I0120 17:27:12.335033 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bc74e5cb26055978318c9f2b43ca67fd1b3e4cc5ce3e22f2e91cfdfd0a3b5a4e"} err="failed to get container status \"bc74e5cb26055978318c9f2b43ca67fd1b3e4cc5ce3e22f2e91cfdfd0a3b5a4e\": rpc error: code = NotFound desc = could not find container \"bc74e5cb26055978318c9f2b43ca67fd1b3e4cc5ce3e22f2e91cfdfd0a3b5a4e\": container with ID starting with bc74e5cb26055978318c9f2b43ca67fd1b3e4cc5ce3e22f2e91cfdfd0a3b5a4e not found: ID does not exist" Jan 20 17:27:12 crc kubenswrapper[4558]: I0120 17:27:12.335058 4558 scope.go:117] "RemoveContainer" containerID="da0ca9294c16d1999fa49c21fdd4c31261941cef28b785b5ef05cdd451846e52" Jan 20 17:27:12 crc kubenswrapper[4558]: E0120 17:27:12.335282 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"da0ca9294c16d1999fa49c21fdd4c31261941cef28b785b5ef05cdd451846e52\": container with ID starting with da0ca9294c16d1999fa49c21fdd4c31261941cef28b785b5ef05cdd451846e52 not found: ID does not exist" containerID="da0ca9294c16d1999fa49c21fdd4c31261941cef28b785b5ef05cdd451846e52" Jan 20 17:27:12 crc kubenswrapper[4558]: I0120 17:27:12.335306 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"da0ca9294c16d1999fa49c21fdd4c31261941cef28b785b5ef05cdd451846e52"} err="failed to get container status \"da0ca9294c16d1999fa49c21fdd4c31261941cef28b785b5ef05cdd451846e52\": rpc error: code = NotFound desc = could not find container \"da0ca9294c16d1999fa49c21fdd4c31261941cef28b785b5ef05cdd451846e52\": container with ID starting with da0ca9294c16d1999fa49c21fdd4c31261941cef28b785b5ef05cdd451846e52 not found: ID does not exist" Jan 20 17:27:12 crc kubenswrapper[4558]: I0120 17:27:12.351945 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/592927c6-9b42-411a-8aec-40cf6183e32a-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"592927c6-9b42-411a-8aec-40cf6183e32a\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:27:12 crc kubenswrapper[4558]: I0120 17:27:12.351991 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-spmwn\" (UniqueName: \"kubernetes.io/projected/e81d2508-e298-463c-9031-b9d8e486d566-kube-api-access-spmwn\") pod \"glance-default-internal-api-0\" (UID: \"e81d2508-e298-463c-9031-b9d8e486d566\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:27:12 crc kubenswrapper[4558]: I0120 17:27:12.352017 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e81d2508-e298-463c-9031-b9d8e486d566-logs\") pod \"glance-default-internal-api-0\" (UID: \"e81d2508-e298-463c-9031-b9d8e486d566\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:27:12 crc kubenswrapper[4558]: I0120 17:27:12.352055 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/592927c6-9b42-411a-8aec-40cf6183e32a-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"592927c6-9b42-411a-8aec-40cf6183e32a\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:27:12 crc kubenswrapper[4558]: I0120 17:27:12.352704 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/592927c6-9b42-411a-8aec-40cf6183e32a-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"592927c6-9b42-411a-8aec-40cf6183e32a\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:27:12 crc kubenswrapper[4558]: I0120 17:27:12.352216 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/88b73d7f-ee50-4fdf-9fd7-37296c855d69-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"88b73d7f-ee50-4fdf-9fd7-37296c855d69\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:27:12 crc kubenswrapper[4558]: I0120 17:27:12.352791 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/e81d2508-e298-463c-9031-b9d8e486d566-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"e81d2508-e298-463c-9031-b9d8e486d566\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:27:12 crc kubenswrapper[4558]: I0120 17:27:12.352822 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/592927c6-9b42-411a-8aec-40cf6183e32a-scripts\") pod \"glance-default-external-api-0\" (UID: \"592927c6-9b42-411a-8aec-40cf6183e32a\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:27:12 crc kubenswrapper[4558]: I0120 17:27:12.352885 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8g5mb\" (UniqueName: \"kubernetes.io/projected/592927c6-9b42-411a-8aec-40cf6183e32a-kube-api-access-8g5mb\") pod \"glance-default-external-api-0\" (UID: \"592927c6-9b42-411a-8aec-40cf6183e32a\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:27:12 crc kubenswrapper[4558]: I0120 17:27:12.352935 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage15-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") pod \"glance-default-internal-api-0\" (UID: \"e81d2508-e298-463c-9031-b9d8e486d566\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:27:12 crc kubenswrapper[4558]: I0120 17:27:12.353030 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e81d2508-e298-463c-9031-b9d8e486d566-scripts\") pod \"glance-default-internal-api-0\" (UID: \"e81d2508-e298-463c-9031-b9d8e486d566\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:27:12 crc kubenswrapper[4558]: I0120 17:27:12.353067 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/592927c6-9b42-411a-8aec-40cf6183e32a-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"592927c6-9b42-411a-8aec-40cf6183e32a\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:27:12 crc kubenswrapper[4558]: I0120 17:27:12.353157 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/88b73d7f-ee50-4fdf-9fd7-37296c855d69-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"88b73d7f-ee50-4fdf-9fd7-37296c855d69\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:27:12 crc kubenswrapper[4558]: I0120 17:27:12.353234 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/592927c6-9b42-411a-8aec-40cf6183e32a-logs\") pod \"glance-default-external-api-0\" (UID: \"592927c6-9b42-411a-8aec-40cf6183e32a\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:27:12 crc kubenswrapper[4558]: I0120 17:27:12.353275 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage14-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage14-crc\") pod \"glance-default-external-api-0\" (UID: \"592927c6-9b42-411a-8aec-40cf6183e32a\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:27:12 crc kubenswrapper[4558]: I0120 17:27:12.353305 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e81d2508-e298-463c-9031-b9d8e486d566-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"e81d2508-e298-463c-9031-b9d8e486d566\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:27:12 crc kubenswrapper[4558]: I0120 17:27:12.353610 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/592927c6-9b42-411a-8aec-40cf6183e32a-config-data\") pod \"glance-default-external-api-0\" (UID: \"592927c6-9b42-411a-8aec-40cf6183e32a\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:27:12 crc kubenswrapper[4558]: I0120 17:27:12.353640 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8mwb8\" (UniqueName: \"kubernetes.io/projected/88b73d7f-ee50-4fdf-9fd7-37296c855d69-kube-api-access-8mwb8\") pod \"nova-cell1-conductor-0\" (UID: \"88b73d7f-ee50-4fdf-9fd7-37296c855d69\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:27:12 crc kubenswrapper[4558]: I0120 17:27:12.353727 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e81d2508-e298-463c-9031-b9d8e486d566-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"e81d2508-e298-463c-9031-b9d8e486d566\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:27:12 crc kubenswrapper[4558]: I0120 17:27:12.353755 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e81d2508-e298-463c-9031-b9d8e486d566-config-data\") pod \"glance-default-internal-api-0\" (UID: \"e81d2508-e298-463c-9031-b9d8e486d566\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:27:12 crc kubenswrapper[4558]: I0120 17:27:12.354680 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/592927c6-9b42-411a-8aec-40cf6183e32a-logs\") pod \"glance-default-external-api-0\" (UID: \"592927c6-9b42-411a-8aec-40cf6183e32a\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:27:12 crc kubenswrapper[4558]: I0120 17:27:12.355560 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage14-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage14-crc\") pod \"glance-default-external-api-0\" (UID: \"592927c6-9b42-411a-8aec-40cf6183e32a\") device mount path \"/mnt/openstack/pv14\"" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:27:12 crc kubenswrapper[4558]: I0120 17:27:12.359880 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/592927c6-9b42-411a-8aec-40cf6183e32a-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"592927c6-9b42-411a-8aec-40cf6183e32a\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:27:12 crc kubenswrapper[4558]: I0120 17:27:12.359980 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/592927c6-9b42-411a-8aec-40cf6183e32a-scripts\") pod \"glance-default-external-api-0\" (UID: \"592927c6-9b42-411a-8aec-40cf6183e32a\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:27:12 crc kubenswrapper[4558]: I0120 17:27:12.360371 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/592927c6-9b42-411a-8aec-40cf6183e32a-config-data\") pod \"glance-default-external-api-0\" (UID: \"592927c6-9b42-411a-8aec-40cf6183e32a\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:27:12 crc kubenswrapper[4558]: I0120 17:27:12.368415 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/592927c6-9b42-411a-8aec-40cf6183e32a-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"592927c6-9b42-411a-8aec-40cf6183e32a\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:27:12 crc kubenswrapper[4558]: I0120 17:27:12.370074 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8g5mb\" (UniqueName: \"kubernetes.io/projected/592927c6-9b42-411a-8aec-40cf6183e32a-kube-api-access-8g5mb\") pod \"glance-default-external-api-0\" (UID: \"592927c6-9b42-411a-8aec-40cf6183e32a\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:27:12 crc kubenswrapper[4558]: I0120 17:27:12.386517 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage14-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage14-crc\") pod \"glance-default-external-api-0\" (UID: \"592927c6-9b42-411a-8aec-40cf6183e32a\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:27:12 crc kubenswrapper[4558]: I0120 17:27:12.440225 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:27:12 crc kubenswrapper[4558]: I0120 17:27:12.459001 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage15-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") pod \"glance-default-internal-api-0\" (UID: \"e81d2508-e298-463c-9031-b9d8e486d566\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:27:12 crc kubenswrapper[4558]: I0120 17:27:12.459073 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e81d2508-e298-463c-9031-b9d8e486d566-scripts\") pod \"glance-default-internal-api-0\" (UID: \"e81d2508-e298-463c-9031-b9d8e486d566\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:27:12 crc kubenswrapper[4558]: I0120 17:27:12.459130 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/88b73d7f-ee50-4fdf-9fd7-37296c855d69-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"88b73d7f-ee50-4fdf-9fd7-37296c855d69\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:27:12 crc kubenswrapper[4558]: I0120 17:27:12.459182 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e81d2508-e298-463c-9031-b9d8e486d566-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"e81d2508-e298-463c-9031-b9d8e486d566\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:27:12 crc kubenswrapper[4558]: I0120 17:27:12.459234 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8mwb8\" (UniqueName: \"kubernetes.io/projected/88b73d7f-ee50-4fdf-9fd7-37296c855d69-kube-api-access-8mwb8\") pod \"nova-cell1-conductor-0\" (UID: \"88b73d7f-ee50-4fdf-9fd7-37296c855d69\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:27:12 crc kubenswrapper[4558]: I0120 17:27:12.459260 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e81d2508-e298-463c-9031-b9d8e486d566-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"e81d2508-e298-463c-9031-b9d8e486d566\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:27:12 crc kubenswrapper[4558]: I0120 17:27:12.459262 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage15-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") pod \"glance-default-internal-api-0\" (UID: \"e81d2508-e298-463c-9031-b9d8e486d566\") device mount path \"/mnt/openstack/pv15\"" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:27:12 crc kubenswrapper[4558]: I0120 17:27:12.459277 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e81d2508-e298-463c-9031-b9d8e486d566-config-data\") pod \"glance-default-internal-api-0\" (UID: \"e81d2508-e298-463c-9031-b9d8e486d566\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:27:12 crc kubenswrapper[4558]: I0120 17:27:12.462064 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-spmwn\" (UniqueName: \"kubernetes.io/projected/e81d2508-e298-463c-9031-b9d8e486d566-kube-api-access-spmwn\") pod \"glance-default-internal-api-0\" (UID: \"e81d2508-e298-463c-9031-b9d8e486d566\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:27:12 crc kubenswrapper[4558]: I0120 17:27:12.462129 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e81d2508-e298-463c-9031-b9d8e486d566-logs\") pod \"glance-default-internal-api-0\" (UID: \"e81d2508-e298-463c-9031-b9d8e486d566\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:27:12 crc kubenswrapper[4558]: I0120 17:27:12.462179 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/e81d2508-e298-463c-9031-b9d8e486d566-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"e81d2508-e298-463c-9031-b9d8e486d566\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:27:12 crc kubenswrapper[4558]: I0120 17:27:12.462202 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/88b73d7f-ee50-4fdf-9fd7-37296c855d69-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"88b73d7f-ee50-4fdf-9fd7-37296c855d69\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:27:12 crc kubenswrapper[4558]: I0120 17:27:12.464705 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e81d2508-e298-463c-9031-b9d8e486d566-logs\") pod \"glance-default-internal-api-0\" (UID: \"e81d2508-e298-463c-9031-b9d8e486d566\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:27:12 crc kubenswrapper[4558]: I0120 17:27:12.465086 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/e81d2508-e298-463c-9031-b9d8e486d566-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"e81d2508-e298-463c-9031-b9d8e486d566\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:27:12 crc kubenswrapper[4558]: I0120 17:27:12.466063 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e81d2508-e298-463c-9031-b9d8e486d566-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"e81d2508-e298-463c-9031-b9d8e486d566\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:27:12 crc kubenswrapper[4558]: I0120 17:27:12.466695 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/88b73d7f-ee50-4fdf-9fd7-37296c855d69-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"88b73d7f-ee50-4fdf-9fd7-37296c855d69\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:27:12 crc kubenswrapper[4558]: I0120 17:27:12.469971 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e81d2508-e298-463c-9031-b9d8e486d566-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"e81d2508-e298-463c-9031-b9d8e486d566\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:27:12 crc kubenswrapper[4558]: I0120 17:27:12.470603 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/88b73d7f-ee50-4fdf-9fd7-37296c855d69-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"88b73d7f-ee50-4fdf-9fd7-37296c855d69\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:27:12 crc kubenswrapper[4558]: I0120 17:27:12.471659 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e81d2508-e298-463c-9031-b9d8e486d566-scripts\") pod \"glance-default-internal-api-0\" (UID: \"e81d2508-e298-463c-9031-b9d8e486d566\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:27:12 crc kubenswrapper[4558]: I0120 17:27:12.472741 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e81d2508-e298-463c-9031-b9d8e486d566-config-data\") pod \"glance-default-internal-api-0\" (UID: \"e81d2508-e298-463c-9031-b9d8e486d566\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:27:12 crc kubenswrapper[4558]: I0120 17:27:12.478881 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-spmwn\" (UniqueName: \"kubernetes.io/projected/e81d2508-e298-463c-9031-b9d8e486d566-kube-api-access-spmwn\") pod \"glance-default-internal-api-0\" (UID: \"e81d2508-e298-463c-9031-b9d8e486d566\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:27:12 crc kubenswrapper[4558]: I0120 17:27:12.482923 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8mwb8\" (UniqueName: \"kubernetes.io/projected/88b73d7f-ee50-4fdf-9fd7-37296c855d69-kube-api-access-8mwb8\") pod \"nova-cell1-conductor-0\" (UID: \"88b73d7f-ee50-4fdf-9fd7-37296c855d69\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:27:12 crc kubenswrapper[4558]: I0120 17:27:12.499810 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage15-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") pod \"glance-default-internal-api-0\" (UID: \"e81d2508-e298-463c-9031-b9d8e486d566\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:27:12 crc kubenswrapper[4558]: I0120 17:27:12.582770 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="174beb08-6b5c-40ef-9809-a4e17c718392" path="/var/lib/kubelet/pods/174beb08-6b5c-40ef-9809-a4e17c718392/volumes" Jan 20 17:27:12 crc kubenswrapper[4558]: I0120 17:27:12.583704 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="269584c1-a4ed-43e5-a1cb-1e5e6952df11" path="/var/lib/kubelet/pods/269584c1-a4ed-43e5-a1cb-1e5e6952df11/volumes" Jan 20 17:27:12 crc kubenswrapper[4558]: I0120 17:27:12.584377 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c4caf1aa-77f3-4f79-a1e5-ce8fa944b6cd" path="/var/lib/kubelet/pods/c4caf1aa-77f3-4f79-a1e5-ce8fa944b6cd/volumes" Jan 20 17:27:12 crc kubenswrapper[4558]: I0120 17:27:12.585529 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d7d891f4-8ef8-4a21-9c7f-36088b864f7f" path="/var/lib/kubelet/pods/d7d891f4-8ef8-4a21-9c7f-36088b864f7f/volumes" Jan 20 17:27:12 crc kubenswrapper[4558]: I0120 17:27:12.651397 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:27:12 crc kubenswrapper[4558]: I0120 17:27:12.752002 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:27:12 crc kubenswrapper[4558]: I0120 17:27:12.898350 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:27:13 crc kubenswrapper[4558]: I0120 17:27:13.005713 4558 generic.go:334] "Generic (PLEG): container finished" podID="29c1b3a9-0bdc-423c-af7f-42f86f3693b5" containerID="ba1f5590ea277f251775376a8988a4f0e9b00277af3bcaf111020b2f5cccdc43" exitCode=0 Jan 20 17:27:13 crc kubenswrapper[4558]: I0120 17:27:13.005780 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-db-sync-xq5zh" event={"ID":"29c1b3a9-0bdc-423c-af7f-42f86f3693b5","Type":"ContainerDied","Data":"ba1f5590ea277f251775376a8988a4f0e9b00277af3bcaf111020b2f5cccdc43"} Jan 20 17:27:13 crc kubenswrapper[4558]: I0120 17:27:13.014640 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"134fc5ef-70be-4c8c-aa72-e4f0440d0afe","Type":"ContainerStarted","Data":"39bfb26312eaad0bd344b47e0fab7bc8a68e4339a058a5362c330669eb1593d1"} Jan 20 17:27:13 crc kubenswrapper[4558]: I0120 17:27:13.014841 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:27:13 crc kubenswrapper[4558]: I0120 17:27:13.027140 4558 generic.go:334] "Generic (PLEG): container finished" podID="c2442cca-1f6c-4531-b1f4-2b873ce42964" containerID="364d7369bafd0402a20a7f9c1276504839740cce0e4e93df229f18382a27c891" exitCode=0 Jan 20 17:27:13 crc kubenswrapper[4558]: I0120 17:27:13.027205 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-db-sync-t7b8z" event={"ID":"c2442cca-1f6c-4531-b1f4-2b873ce42964","Type":"ContainerDied","Data":"364d7369bafd0402a20a7f9c1276504839740cce0e4e93df229f18382a27c891"} Jan 20 17:27:13 crc kubenswrapper[4558]: I0120 17:27:13.028494 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"f6a0777b-d04f-451b-9e3d-3f9ab0e19df8","Type":"ContainerStarted","Data":"516792e38f223a8dda033868458240450dfed4fe467c3a9a1c42b3301734bf44"} Jan 20 17:27:13 crc kubenswrapper[4558]: I0120 17:27:13.029942 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"592927c6-9b42-411a-8aec-40cf6183e32a","Type":"ContainerStarted","Data":"f016674ed49b7a9e05cae1b4024e96013b71fe07e2c51b556bf79a5bf5c220db"} Jan 20 17:27:13 crc kubenswrapper[4558]: I0120 17:27:13.044470 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ceilometer-0" podStartSLOduration=2.654063231 podStartE2EDuration="7.044453793s" podCreationTimestamp="2026-01-20 17:27:06 +0000 UTC" firstStartedPulling="2026-01-20 17:27:07.712135884 +0000 UTC m=+2721.472473851" lastFinishedPulling="2026-01-20 17:27:12.102526446 +0000 UTC m=+2725.862864413" observedRunningTime="2026-01-20 17:27:13.034996392 +0000 UTC m=+2726.795334349" watchObservedRunningTime="2026-01-20 17:27:13.044453793 +0000 UTC m=+2726.804791759" Jan 20 17:27:13 crc kubenswrapper[4558]: I0120 17:27:13.108952 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-0"] Jan 20 17:27:13 crc kubenswrapper[4558]: I0120 17:27:13.190992 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:27:13 crc kubenswrapper[4558]: I0120 17:27:13.433425 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-db-sync-rmm2x" Jan 20 17:27:13 crc kubenswrapper[4558]: I0120 17:27:13.517047 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c7f6b91b-b55e-4af0-b496-46c80b92bad1-config-data\") pod \"c7f6b91b-b55e-4af0-b496-46c80b92bad1\" (UID: \"c7f6b91b-b55e-4af0-b496-46c80b92bad1\") " Jan 20 17:27:13 crc kubenswrapper[4558]: I0120 17:27:13.517228 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c7f6b91b-b55e-4af0-b496-46c80b92bad1-logs\") pod \"c7f6b91b-b55e-4af0-b496-46c80b92bad1\" (UID: \"c7f6b91b-b55e-4af0-b496-46c80b92bad1\") " Jan 20 17:27:13 crc kubenswrapper[4558]: I0120 17:27:13.517377 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c7f6b91b-b55e-4af0-b496-46c80b92bad1-combined-ca-bundle\") pod \"c7f6b91b-b55e-4af0-b496-46c80b92bad1\" (UID: \"c7f6b91b-b55e-4af0-b496-46c80b92bad1\") " Jan 20 17:27:13 crc kubenswrapper[4558]: I0120 17:27:13.517401 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zvxtv\" (UniqueName: \"kubernetes.io/projected/c7f6b91b-b55e-4af0-b496-46c80b92bad1-kube-api-access-zvxtv\") pod \"c7f6b91b-b55e-4af0-b496-46c80b92bad1\" (UID: \"c7f6b91b-b55e-4af0-b496-46c80b92bad1\") " Jan 20 17:27:13 crc kubenswrapper[4558]: I0120 17:27:13.517441 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c7f6b91b-b55e-4af0-b496-46c80b92bad1-scripts\") pod \"c7f6b91b-b55e-4af0-b496-46c80b92bad1\" (UID: \"c7f6b91b-b55e-4af0-b496-46c80b92bad1\") " Jan 20 17:27:13 crc kubenswrapper[4558]: I0120 17:27:13.519077 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c7f6b91b-b55e-4af0-b496-46c80b92bad1-logs" (OuterVolumeSpecName: "logs") pod "c7f6b91b-b55e-4af0-b496-46c80b92bad1" (UID: "c7f6b91b-b55e-4af0-b496-46c80b92bad1"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:27:13 crc kubenswrapper[4558]: I0120 17:27:13.526842 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c7f6b91b-b55e-4af0-b496-46c80b92bad1-scripts" (OuterVolumeSpecName: "scripts") pod "c7f6b91b-b55e-4af0-b496-46c80b92bad1" (UID: "c7f6b91b-b55e-4af0-b496-46c80b92bad1"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:27:13 crc kubenswrapper[4558]: I0120 17:27:13.527671 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c7f6b91b-b55e-4af0-b496-46c80b92bad1-kube-api-access-zvxtv" (OuterVolumeSpecName: "kube-api-access-zvxtv") pod "c7f6b91b-b55e-4af0-b496-46c80b92bad1" (UID: "c7f6b91b-b55e-4af0-b496-46c80b92bad1"). InnerVolumeSpecName "kube-api-access-zvxtv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:27:13 crc kubenswrapper[4558]: I0120 17:27:13.544806 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c7f6b91b-b55e-4af0-b496-46c80b92bad1-config-data" (OuterVolumeSpecName: "config-data") pod "c7f6b91b-b55e-4af0-b496-46c80b92bad1" (UID: "c7f6b91b-b55e-4af0-b496-46c80b92bad1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:27:13 crc kubenswrapper[4558]: I0120 17:27:13.545454 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c7f6b91b-b55e-4af0-b496-46c80b92bad1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c7f6b91b-b55e-4af0-b496-46c80b92bad1" (UID: "c7f6b91b-b55e-4af0-b496-46c80b92bad1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:27:13 crc kubenswrapper[4558]: I0120 17:27:13.621382 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c7f6b91b-b55e-4af0-b496-46c80b92bad1-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:13 crc kubenswrapper[4558]: I0120 17:27:13.621527 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c7f6b91b-b55e-4af0-b496-46c80b92bad1-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:13 crc kubenswrapper[4558]: I0120 17:27:13.621588 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c7f6b91b-b55e-4af0-b496-46c80b92bad1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:13 crc kubenswrapper[4558]: I0120 17:27:13.621644 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zvxtv\" (UniqueName: \"kubernetes.io/projected/c7f6b91b-b55e-4af0-b496-46c80b92bad1-kube-api-access-zvxtv\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:13 crc kubenswrapper[4558]: I0120 17:27:13.621695 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c7f6b91b-b55e-4af0-b496-46c80b92bad1-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:14 crc kubenswrapper[4558]: I0120 17:27:14.057235 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"592927c6-9b42-411a-8aec-40cf6183e32a","Type":"ContainerStarted","Data":"f60fe058ea93021d1c1c3c3d2912da2aaabf83d63fd9e5d23a8a1c05335b6abd"} Jan 20 17:27:14 crc kubenswrapper[4558]: I0120 17:27:14.062852 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"e81d2508-e298-463c-9031-b9d8e486d566","Type":"ContainerStarted","Data":"d2afe7d8e958d845dee5152deac97265bbf55f309d653220c753d06c2403fd6e"} Jan 20 17:27:14 crc kubenswrapper[4558]: I0120 17:27:14.062906 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"e81d2508-e298-463c-9031-b9d8e486d566","Type":"ContainerStarted","Data":"fd96309ecbf812969dbffa3d6cf2d24d5c1db359e9c99dbad0ee1e4d5d821ec8"} Jan 20 17:27:14 crc kubenswrapper[4558]: I0120 17:27:14.068438 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-db-sync-rmm2x" Jan 20 17:27:14 crc kubenswrapper[4558]: I0120 17:27:14.068525 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-db-sync-rmm2x" event={"ID":"c7f6b91b-b55e-4af0-b496-46c80b92bad1","Type":"ContainerDied","Data":"35f91a33cf91d48d54b27ed083811563b85cddfdf68483da24f94953e58b67d4"} Jan 20 17:27:14 crc kubenswrapper[4558]: I0120 17:27:14.068636 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="35f91a33cf91d48d54b27ed083811563b85cddfdf68483da24f94953e58b67d4" Jan 20 17:27:14 crc kubenswrapper[4558]: I0120 17:27:14.071538 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-0" event={"ID":"88b73d7f-ee50-4fdf-9fd7-37296c855d69","Type":"ContainerStarted","Data":"a17a87967f20a77dfc9f8a5084d14c3b4b2b006c99d8e894a2ad7bcef9243b71"} Jan 20 17:27:14 crc kubenswrapper[4558]: I0120 17:27:14.071567 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-0" event={"ID":"88b73d7f-ee50-4fdf-9fd7-37296c855d69","Type":"ContainerStarted","Data":"9565d88e0b7a5af370711a97c3d99e03d66f44b360b4c988733b9bcc472c4eef"} Jan 20 17:27:14 crc kubenswrapper[4558]: I0120 17:27:14.073023 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:27:14 crc kubenswrapper[4558]: I0120 17:27:14.079065 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"f6a0777b-d04f-451b-9e3d-3f9ab0e19df8","Type":"ContainerStarted","Data":"5318567d652c11a8d32a83b41eaaa58746106e91b1ac59789f4bda5a8e0f9415"} Jan 20 17:27:14 crc kubenswrapper[4558]: I0120 17:27:14.079869 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:27:14 crc kubenswrapper[4558]: I0120 17:27:14.089062 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell1-conductor-0" podStartSLOduration=2.089049513 podStartE2EDuration="2.089049513s" podCreationTimestamp="2026-01-20 17:27:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:27:14.085354526 +0000 UTC m=+2727.845692492" watchObservedRunningTime="2026-01-20 17:27:14.089049513 +0000 UTC m=+2727.849387480" Jan 20 17:27:14 crc kubenswrapper[4558]: I0120 17:27:14.138329 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/cinder-api-0" podStartSLOduration=4.138302071 podStartE2EDuration="4.138302071s" podCreationTimestamp="2026-01-20 17:27:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:27:14.104879725 +0000 UTC m=+2727.865217692" watchObservedRunningTime="2026-01-20 17:27:14.138302071 +0000 UTC m=+2727.898640028" Jan 20 17:27:14 crc kubenswrapper[4558]: I0120 17:27:14.158581 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/placement-55d4bc664d-k82t9"] Jan 20 17:27:14 crc kubenswrapper[4558]: E0120 17:27:14.159309 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c7f6b91b-b55e-4af0-b496-46c80b92bad1" containerName="placement-db-sync" Jan 20 17:27:14 crc kubenswrapper[4558]: I0120 17:27:14.159322 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="c7f6b91b-b55e-4af0-b496-46c80b92bad1" containerName="placement-db-sync" Jan 20 17:27:14 crc kubenswrapper[4558]: I0120 17:27:14.159487 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="c7f6b91b-b55e-4af0-b496-46c80b92bad1" containerName="placement-db-sync" Jan 20 17:27:14 crc kubenswrapper[4558]: I0120 17:27:14.160521 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-55d4bc664d-k82t9" Jan 20 17:27:14 crc kubenswrapper[4558]: I0120 17:27:14.196094 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-55d4bc664d-k82t9"] Jan 20 17:27:14 crc kubenswrapper[4558]: I0120 17:27:14.338908 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/66e24f38-a98c-4444-8ee4-352266267985-combined-ca-bundle\") pod \"placement-55d4bc664d-k82t9\" (UID: \"66e24f38-a98c-4444-8ee4-352266267985\") " pod="openstack-kuttl-tests/placement-55d4bc664d-k82t9" Jan 20 17:27:14 crc kubenswrapper[4558]: I0120 17:27:14.338972 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/66e24f38-a98c-4444-8ee4-352266267985-config-data\") pod \"placement-55d4bc664d-k82t9\" (UID: \"66e24f38-a98c-4444-8ee4-352266267985\") " pod="openstack-kuttl-tests/placement-55d4bc664d-k82t9" Jan 20 17:27:14 crc kubenswrapper[4558]: I0120 17:27:14.339029 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/66e24f38-a98c-4444-8ee4-352266267985-scripts\") pod \"placement-55d4bc664d-k82t9\" (UID: \"66e24f38-a98c-4444-8ee4-352266267985\") " pod="openstack-kuttl-tests/placement-55d4bc664d-k82t9" Jan 20 17:27:14 crc kubenswrapper[4558]: I0120 17:27:14.339216 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/66e24f38-a98c-4444-8ee4-352266267985-logs\") pod \"placement-55d4bc664d-k82t9\" (UID: \"66e24f38-a98c-4444-8ee4-352266267985\") " pod="openstack-kuttl-tests/placement-55d4bc664d-k82t9" Jan 20 17:27:14 crc kubenswrapper[4558]: I0120 17:27:14.339371 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/66e24f38-a98c-4444-8ee4-352266267985-public-tls-certs\") pod \"placement-55d4bc664d-k82t9\" (UID: \"66e24f38-a98c-4444-8ee4-352266267985\") " pod="openstack-kuttl-tests/placement-55d4bc664d-k82t9" Jan 20 17:27:14 crc kubenswrapper[4558]: I0120 17:27:14.339966 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/66e24f38-a98c-4444-8ee4-352266267985-internal-tls-certs\") pod \"placement-55d4bc664d-k82t9\" (UID: \"66e24f38-a98c-4444-8ee4-352266267985\") " pod="openstack-kuttl-tests/placement-55d4bc664d-k82t9" Jan 20 17:27:14 crc kubenswrapper[4558]: I0120 17:27:14.340043 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ptc4f\" (UniqueName: \"kubernetes.io/projected/66e24f38-a98c-4444-8ee4-352266267985-kube-api-access-ptc4f\") pod \"placement-55d4bc664d-k82t9\" (UID: \"66e24f38-a98c-4444-8ee4-352266267985\") " pod="openstack-kuttl-tests/placement-55d4bc664d-k82t9" Jan 20 17:27:14 crc kubenswrapper[4558]: I0120 17:27:14.444998 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/66e24f38-a98c-4444-8ee4-352266267985-public-tls-certs\") pod \"placement-55d4bc664d-k82t9\" (UID: \"66e24f38-a98c-4444-8ee4-352266267985\") " pod="openstack-kuttl-tests/placement-55d4bc664d-k82t9" Jan 20 17:27:14 crc kubenswrapper[4558]: I0120 17:27:14.445272 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/66e24f38-a98c-4444-8ee4-352266267985-internal-tls-certs\") pod \"placement-55d4bc664d-k82t9\" (UID: \"66e24f38-a98c-4444-8ee4-352266267985\") " pod="openstack-kuttl-tests/placement-55d4bc664d-k82t9" Jan 20 17:27:14 crc kubenswrapper[4558]: I0120 17:27:14.445295 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ptc4f\" (UniqueName: \"kubernetes.io/projected/66e24f38-a98c-4444-8ee4-352266267985-kube-api-access-ptc4f\") pod \"placement-55d4bc664d-k82t9\" (UID: \"66e24f38-a98c-4444-8ee4-352266267985\") " pod="openstack-kuttl-tests/placement-55d4bc664d-k82t9" Jan 20 17:27:14 crc kubenswrapper[4558]: I0120 17:27:14.445356 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/66e24f38-a98c-4444-8ee4-352266267985-combined-ca-bundle\") pod \"placement-55d4bc664d-k82t9\" (UID: \"66e24f38-a98c-4444-8ee4-352266267985\") " pod="openstack-kuttl-tests/placement-55d4bc664d-k82t9" Jan 20 17:27:14 crc kubenswrapper[4558]: I0120 17:27:14.445386 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/66e24f38-a98c-4444-8ee4-352266267985-config-data\") pod \"placement-55d4bc664d-k82t9\" (UID: \"66e24f38-a98c-4444-8ee4-352266267985\") " pod="openstack-kuttl-tests/placement-55d4bc664d-k82t9" Jan 20 17:27:14 crc kubenswrapper[4558]: I0120 17:27:14.445418 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/66e24f38-a98c-4444-8ee4-352266267985-scripts\") pod \"placement-55d4bc664d-k82t9\" (UID: \"66e24f38-a98c-4444-8ee4-352266267985\") " pod="openstack-kuttl-tests/placement-55d4bc664d-k82t9" Jan 20 17:27:14 crc kubenswrapper[4558]: I0120 17:27:14.445476 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/66e24f38-a98c-4444-8ee4-352266267985-logs\") pod \"placement-55d4bc664d-k82t9\" (UID: \"66e24f38-a98c-4444-8ee4-352266267985\") " pod="openstack-kuttl-tests/placement-55d4bc664d-k82t9" Jan 20 17:27:14 crc kubenswrapper[4558]: I0120 17:27:14.445852 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/66e24f38-a98c-4444-8ee4-352266267985-logs\") pod \"placement-55d4bc664d-k82t9\" (UID: \"66e24f38-a98c-4444-8ee4-352266267985\") " pod="openstack-kuttl-tests/placement-55d4bc664d-k82t9" Jan 20 17:27:14 crc kubenswrapper[4558]: I0120 17:27:14.450672 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/66e24f38-a98c-4444-8ee4-352266267985-public-tls-certs\") pod \"placement-55d4bc664d-k82t9\" (UID: \"66e24f38-a98c-4444-8ee4-352266267985\") " pod="openstack-kuttl-tests/placement-55d4bc664d-k82t9" Jan 20 17:27:14 crc kubenswrapper[4558]: I0120 17:27:14.451863 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/66e24f38-a98c-4444-8ee4-352266267985-scripts\") pod \"placement-55d4bc664d-k82t9\" (UID: \"66e24f38-a98c-4444-8ee4-352266267985\") " pod="openstack-kuttl-tests/placement-55d4bc664d-k82t9" Jan 20 17:27:14 crc kubenswrapper[4558]: I0120 17:27:14.453015 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/66e24f38-a98c-4444-8ee4-352266267985-internal-tls-certs\") pod \"placement-55d4bc664d-k82t9\" (UID: \"66e24f38-a98c-4444-8ee4-352266267985\") " pod="openstack-kuttl-tests/placement-55d4bc664d-k82t9" Jan 20 17:27:14 crc kubenswrapper[4558]: I0120 17:27:14.453896 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/66e24f38-a98c-4444-8ee4-352266267985-config-data\") pod \"placement-55d4bc664d-k82t9\" (UID: \"66e24f38-a98c-4444-8ee4-352266267985\") " pod="openstack-kuttl-tests/placement-55d4bc664d-k82t9" Jan 20 17:27:14 crc kubenswrapper[4558]: I0120 17:27:14.458876 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ptc4f\" (UniqueName: \"kubernetes.io/projected/66e24f38-a98c-4444-8ee4-352266267985-kube-api-access-ptc4f\") pod \"placement-55d4bc664d-k82t9\" (UID: \"66e24f38-a98c-4444-8ee4-352266267985\") " pod="openstack-kuttl-tests/placement-55d4bc664d-k82t9" Jan 20 17:27:14 crc kubenswrapper[4558]: I0120 17:27:14.459548 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/66e24f38-a98c-4444-8ee4-352266267985-combined-ca-bundle\") pod \"placement-55d4bc664d-k82t9\" (UID: \"66e24f38-a98c-4444-8ee4-352266267985\") " pod="openstack-kuttl-tests/placement-55d4bc664d-k82t9" Jan 20 17:27:14 crc kubenswrapper[4558]: I0120 17:27:14.541921 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-55d4bc664d-k82t9" Jan 20 17:27:14 crc kubenswrapper[4558]: I0120 17:27:14.626276 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-db-sync-xq5zh" Jan 20 17:27:14 crc kubenswrapper[4558]: I0120 17:27:14.631252 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-db-sync-t7b8z" Jan 20 17:27:14 crc kubenswrapper[4558]: I0120 17:27:14.651049 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxn2h\" (UniqueName: \"kubernetes.io/projected/29c1b3a9-0bdc-423c-af7f-42f86f3693b5-kube-api-access-wxn2h\") pod \"29c1b3a9-0bdc-423c-af7f-42f86f3693b5\" (UID: \"29c1b3a9-0bdc-423c-af7f-42f86f3693b5\") " Jan 20 17:27:14 crc kubenswrapper[4558]: I0120 17:27:14.651153 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/29c1b3a9-0bdc-423c-af7f-42f86f3693b5-config-data\") pod \"29c1b3a9-0bdc-423c-af7f-42f86f3693b5\" (UID: \"29c1b3a9-0bdc-423c-af7f-42f86f3693b5\") " Jan 20 17:27:14 crc kubenswrapper[4558]: I0120 17:27:14.651315 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2442cca-1f6c-4531-b1f4-2b873ce42964-combined-ca-bundle\") pod \"c2442cca-1f6c-4531-b1f4-2b873ce42964\" (UID: \"c2442cca-1f6c-4531-b1f4-2b873ce42964\") " Jan 20 17:27:14 crc kubenswrapper[4558]: I0120 17:27:14.651346 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lk77l\" (UniqueName: \"kubernetes.io/projected/c2442cca-1f6c-4531-b1f4-2b873ce42964-kube-api-access-lk77l\") pod \"c2442cca-1f6c-4531-b1f4-2b873ce42964\" (UID: \"c2442cca-1f6c-4531-b1f4-2b873ce42964\") " Jan 20 17:27:14 crc kubenswrapper[4558]: I0120 17:27:14.651432 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/c2442cca-1f6c-4531-b1f4-2b873ce42964-db-sync-config-data\") pod \"c2442cca-1f6c-4531-b1f4-2b873ce42964\" (UID: \"c2442cca-1f6c-4531-b1f4-2b873ce42964\") " Jan 20 17:27:14 crc kubenswrapper[4558]: I0120 17:27:14.651516 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29c1b3a9-0bdc-423c-af7f-42f86f3693b5-combined-ca-bundle\") pod \"29c1b3a9-0bdc-423c-af7f-42f86f3693b5\" (UID: \"29c1b3a9-0bdc-423c-af7f-42f86f3693b5\") " Jan 20 17:27:14 crc kubenswrapper[4558]: I0120 17:27:14.659409 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/29c1b3a9-0bdc-423c-af7f-42f86f3693b5-kube-api-access-wxn2h" (OuterVolumeSpecName: "kube-api-access-wxn2h") pod "29c1b3a9-0bdc-423c-af7f-42f86f3693b5" (UID: "29c1b3a9-0bdc-423c-af7f-42f86f3693b5"). InnerVolumeSpecName "kube-api-access-wxn2h". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:27:14 crc kubenswrapper[4558]: I0120 17:27:14.661314 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c2442cca-1f6c-4531-b1f4-2b873ce42964-kube-api-access-lk77l" (OuterVolumeSpecName: "kube-api-access-lk77l") pod "c2442cca-1f6c-4531-b1f4-2b873ce42964" (UID: "c2442cca-1f6c-4531-b1f4-2b873ce42964"). InnerVolumeSpecName "kube-api-access-lk77l". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:27:14 crc kubenswrapper[4558]: I0120 17:27:14.669734 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c2442cca-1f6c-4531-b1f4-2b873ce42964-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "c2442cca-1f6c-4531-b1f4-2b873ce42964" (UID: "c2442cca-1f6c-4531-b1f4-2b873ce42964"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:27:14 crc kubenswrapper[4558]: I0120 17:27:14.692958 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/29c1b3a9-0bdc-423c-af7f-42f86f3693b5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "29c1b3a9-0bdc-423c-af7f-42f86f3693b5" (UID: "29c1b3a9-0bdc-423c-af7f-42f86f3693b5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:27:14 crc kubenswrapper[4558]: I0120 17:27:14.707318 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c2442cca-1f6c-4531-b1f4-2b873ce42964-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c2442cca-1f6c-4531-b1f4-2b873ce42964" (UID: "c2442cca-1f6c-4531-b1f4-2b873ce42964"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:27:14 crc kubenswrapper[4558]: I0120 17:27:14.732805 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/29c1b3a9-0bdc-423c-af7f-42f86f3693b5-config-data" (OuterVolumeSpecName: "config-data") pod "29c1b3a9-0bdc-423c-af7f-42f86f3693b5" (UID: "29c1b3a9-0bdc-423c-af7f-42f86f3693b5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:27:14 crc kubenswrapper[4558]: I0120 17:27:14.756214 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2442cca-1f6c-4531-b1f4-2b873ce42964-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:14 crc kubenswrapper[4558]: I0120 17:27:14.756243 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lk77l\" (UniqueName: \"kubernetes.io/projected/c2442cca-1f6c-4531-b1f4-2b873ce42964-kube-api-access-lk77l\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:14 crc kubenswrapper[4558]: I0120 17:27:14.756254 4558 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/c2442cca-1f6c-4531-b1f4-2b873ce42964-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:14 crc kubenswrapper[4558]: I0120 17:27:14.756264 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29c1b3a9-0bdc-423c-af7f-42f86f3693b5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:14 crc kubenswrapper[4558]: I0120 17:27:14.756274 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxn2h\" (UniqueName: \"kubernetes.io/projected/29c1b3a9-0bdc-423c-af7f-42f86f3693b5-kube-api-access-wxn2h\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:14 crc kubenswrapper[4558]: I0120 17:27:14.756284 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/29c1b3a9-0bdc-423c-af7f-42f86f3693b5-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:15 crc kubenswrapper[4558]: E0120 17:27:15.016419 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 4954099cb46eb467eb55768a743f3ad68bf55146245f1fb2f43a81ea0ff802cc is running failed: container process not found" containerID="4954099cb46eb467eb55768a743f3ad68bf55146245f1fb2f43a81ea0ff802cc" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:27:15 crc kubenswrapper[4558]: E0120 17:27:15.020484 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 4954099cb46eb467eb55768a743f3ad68bf55146245f1fb2f43a81ea0ff802cc is running failed: container process not found" containerID="4954099cb46eb467eb55768a743f3ad68bf55146245f1fb2f43a81ea0ff802cc" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:27:15 crc kubenswrapper[4558]: E0120 17:27:15.022695 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 4954099cb46eb467eb55768a743f3ad68bf55146245f1fb2f43a81ea0ff802cc is running failed: container process not found" containerID="4954099cb46eb467eb55768a743f3ad68bf55146245f1fb2f43a81ea0ff802cc" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:27:15 crc kubenswrapper[4558]: E0120 17:27:15.022732 4558 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 4954099cb46eb467eb55768a743f3ad68bf55146245f1fb2f43a81ea0ff802cc is running failed: container process not found" probeType="Readiness" pod="openstack-kuttl-tests/nova-cell0-conductor-0" podUID="5d490bdc-81eb-42cd-9a5d-ae4a8d2b4e51" containerName="nova-cell0-conductor-conductor" Jan 20 17:27:15 crc kubenswrapper[4558]: I0120 17:27:15.029809 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-55d4bc664d-k82t9"] Jan 20 17:27:15 crc kubenswrapper[4558]: W0120 17:27:15.050859 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod66e24f38_a98c_4444_8ee4_352266267985.slice/crio-935ddac25769761da4ad627f69a5c0a915bcc758bfd33fa74ab5eaa5b85a8844 WatchSource:0}: Error finding container 935ddac25769761da4ad627f69a5c0a915bcc758bfd33fa74ab5eaa5b85a8844: Status 404 returned error can't find the container with id 935ddac25769761da4ad627f69a5c0a915bcc758bfd33fa74ab5eaa5b85a8844 Jan 20 17:27:15 crc kubenswrapper[4558]: I0120 17:27:15.089924 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-55d4bc664d-k82t9" event={"ID":"66e24f38-a98c-4444-8ee4-352266267985","Type":"ContainerStarted","Data":"935ddac25769761da4ad627f69a5c0a915bcc758bfd33fa74ab5eaa5b85a8844"} Jan 20 17:27:15 crc kubenswrapper[4558]: I0120 17:27:15.092474 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-db-sync-xq5zh" event={"ID":"29c1b3a9-0bdc-423c-af7f-42f86f3693b5","Type":"ContainerDied","Data":"872d882ed3397d60cfc57175054f837549bbe824340488a1d438aea0610e5316"} Jan 20 17:27:15 crc kubenswrapper[4558]: I0120 17:27:15.092511 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="872d882ed3397d60cfc57175054f837549bbe824340488a1d438aea0610e5316" Jan 20 17:27:15 crc kubenswrapper[4558]: I0120 17:27:15.092530 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-db-sync-xq5zh" Jan 20 17:27:15 crc kubenswrapper[4558]: I0120 17:27:15.098518 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"e81d2508-e298-463c-9031-b9d8e486d566","Type":"ContainerStarted","Data":"c06768c089cc106f807b1405688b9c79fd40a3d43e579f7beb4c7d8ba6ce7297"} Jan 20 17:27:15 crc kubenswrapper[4558]: I0120 17:27:15.100679 4558 generic.go:334] "Generic (PLEG): container finished" podID="5d490bdc-81eb-42cd-9a5d-ae4a8d2b4e51" containerID="4954099cb46eb467eb55768a743f3ad68bf55146245f1fb2f43a81ea0ff802cc" exitCode=0 Jan 20 17:27:15 crc kubenswrapper[4558]: I0120 17:27:15.100750 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-0" event={"ID":"5d490bdc-81eb-42cd-9a5d-ae4a8d2b4e51","Type":"ContainerDied","Data":"4954099cb46eb467eb55768a743f3ad68bf55146245f1fb2f43a81ea0ff802cc"} Jan 20 17:27:15 crc kubenswrapper[4558]: I0120 17:27:15.114857 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/glance-default-internal-api-0" podStartSLOduration=3.114841882 podStartE2EDuration="3.114841882s" podCreationTimestamp="2026-01-20 17:27:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:27:15.111769535 +0000 UTC m=+2728.872107502" watchObservedRunningTime="2026-01-20 17:27:15.114841882 +0000 UTC m=+2728.875179848" Jan 20 17:27:15 crc kubenswrapper[4558]: I0120 17:27:15.138695 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-db-sync-t7b8z" event={"ID":"c2442cca-1f6c-4531-b1f4-2b873ce42964","Type":"ContainerDied","Data":"6feb415f0b29263a8426672fd5863d7ffb5e3949e847ae77533d83e6a4c9dc72"} Jan 20 17:27:15 crc kubenswrapper[4558]: I0120 17:27:15.138728 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6feb415f0b29263a8426672fd5863d7ffb5e3949e847ae77533d83e6a4c9dc72" Jan 20 17:27:15 crc kubenswrapper[4558]: I0120 17:27:15.138788 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-db-sync-t7b8z" Jan 20 17:27:15 crc kubenswrapper[4558]: I0120 17:27:15.169137 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"592927c6-9b42-411a-8aec-40cf6183e32a","Type":"ContainerStarted","Data":"981127416a83b8463012a21079019f3fb0205f2a34d9b217c94d3c09ae42bc02"} Jan 20 17:27:15 crc kubenswrapper[4558]: I0120 17:27:15.231671 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-bootstrap-hsd6l"] Jan 20 17:27:15 crc kubenswrapper[4558]: I0120 17:27:15.312234 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/keystone-bootstrap-hsd6l"] Jan 20 17:27:15 crc kubenswrapper[4558]: I0120 17:27:15.339851 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/barbican-worker-5cbf796c47-9gdgl"] Jan 20 17:27:15 crc kubenswrapper[4558]: E0120 17:27:15.340391 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29c1b3a9-0bdc-423c-af7f-42f86f3693b5" containerName="keystone-db-sync" Jan 20 17:27:15 crc kubenswrapper[4558]: I0120 17:27:15.340421 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="29c1b3a9-0bdc-423c-af7f-42f86f3693b5" containerName="keystone-db-sync" Jan 20 17:27:15 crc kubenswrapper[4558]: E0120 17:27:15.340453 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c2442cca-1f6c-4531-b1f4-2b873ce42964" containerName="barbican-db-sync" Jan 20 17:27:15 crc kubenswrapper[4558]: I0120 17:27:15.340459 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="c2442cca-1f6c-4531-b1f4-2b873ce42964" containerName="barbican-db-sync" Jan 20 17:27:15 crc kubenswrapper[4558]: I0120 17:27:15.340843 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="29c1b3a9-0bdc-423c-af7f-42f86f3693b5" containerName="keystone-db-sync" Jan 20 17:27:15 crc kubenswrapper[4558]: I0120 17:27:15.340865 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="c2442cca-1f6c-4531-b1f4-2b873ce42964" containerName="barbican-db-sync" Jan 20 17:27:15 crc kubenswrapper[4558]: I0120 17:27:15.342926 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-worker-5cbf796c47-9gdgl" Jan 20 17:27:15 crc kubenswrapper[4558]: I0120 17:27:15.358097 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/barbican-keystone-listener-86d4665d84-cndx4"] Jan 20 17:27:15 crc kubenswrapper[4558]: I0120 17:27:15.359407 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-keystone-listener-86d4665d84-cndx4" Jan 20 17:27:15 crc kubenswrapper[4558]: I0120 17:27:15.387333 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-worker-5cbf796c47-9gdgl"] Jan 20 17:27:15 crc kubenswrapper[4558]: I0120 17:27:15.396158 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/barbican-api-84969c976-hhcrx"] Jan 20 17:27:15 crc kubenswrapper[4558]: I0120 17:27:15.397477 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-84969c976-hhcrx" Jan 20 17:27:15 crc kubenswrapper[4558]: I0120 17:27:15.408757 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-keystone-listener-86d4665d84-cndx4"] Jan 20 17:27:15 crc kubenswrapper[4558]: I0120 17:27:15.416344 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-api-84969c976-hhcrx"] Jan 20 17:27:15 crc kubenswrapper[4558]: I0120 17:27:15.417844 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/glance-default-external-api-0" podStartSLOduration=4.417831886 podStartE2EDuration="4.417831886s" podCreationTimestamp="2026-01-20 17:27:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:27:15.275497947 +0000 UTC m=+2729.035835915" watchObservedRunningTime="2026-01-20 17:27:15.417831886 +0000 UTC m=+2729.178169873" Jan 20 17:27:15 crc kubenswrapper[4558]: I0120 17:27:15.427020 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/keystone-bootstrap-28nj8"] Jan 20 17:27:15 crc kubenswrapper[4558]: I0120 17:27:15.428753 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-bootstrap-28nj8" Jan 20 17:27:15 crc kubenswrapper[4558]: I0120 17:27:15.430364 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"osp-secret" Jan 20 17:27:15 crc kubenswrapper[4558]: I0120 17:27:15.432238 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-bootstrap-28nj8"] Jan 20 17:27:15 crc kubenswrapper[4558]: I0120 17:27:15.492232 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3b9858cf-2020-458e-bcf6-407c6853a962-config-data-custom\") pod \"barbican-worker-5cbf796c47-9gdgl\" (UID: \"3b9858cf-2020-458e-bcf6-407c6853a962\") " pod="openstack-kuttl-tests/barbican-worker-5cbf796c47-9gdgl" Jan 20 17:27:15 crc kubenswrapper[4558]: I0120 17:27:15.492285 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/184a9f2b-eab7-4221-a900-746e063662a8-internal-tls-certs\") pod \"barbican-api-84969c976-hhcrx\" (UID: \"184a9f2b-eab7-4221-a900-746e063662a8\") " pod="openstack-kuttl-tests/barbican-api-84969c976-hhcrx" Jan 20 17:27:15 crc kubenswrapper[4558]: I0120 17:27:15.492321 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mzh8m\" (UniqueName: \"kubernetes.io/projected/184a9f2b-eab7-4221-a900-746e063662a8-kube-api-access-mzh8m\") pod \"barbican-api-84969c976-hhcrx\" (UID: \"184a9f2b-eab7-4221-a900-746e063662a8\") " pod="openstack-kuttl-tests/barbican-api-84969c976-hhcrx" Jan 20 17:27:15 crc kubenswrapper[4558]: I0120 17:27:15.492346 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/125597fc-d3b4-44c1-a0a6-7dccbaebe7dd-logs\") pod \"barbican-keystone-listener-86d4665d84-cndx4\" (UID: \"125597fc-d3b4-44c1-a0a6-7dccbaebe7dd\") " pod="openstack-kuttl-tests/barbican-keystone-listener-86d4665d84-cndx4" Jan 20 17:27:15 crc kubenswrapper[4558]: I0120 17:27:15.492703 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/184a9f2b-eab7-4221-a900-746e063662a8-config-data\") pod \"barbican-api-84969c976-hhcrx\" (UID: \"184a9f2b-eab7-4221-a900-746e063662a8\") " pod="openstack-kuttl-tests/barbican-api-84969c976-hhcrx" Jan 20 17:27:15 crc kubenswrapper[4558]: I0120 17:27:15.492834 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/125597fc-d3b4-44c1-a0a6-7dccbaebe7dd-config-data-custom\") pod \"barbican-keystone-listener-86d4665d84-cndx4\" (UID: \"125597fc-d3b4-44c1-a0a6-7dccbaebe7dd\") " pod="openstack-kuttl-tests/barbican-keystone-listener-86d4665d84-cndx4" Jan 20 17:27:15 crc kubenswrapper[4558]: I0120 17:27:15.493108 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/184a9f2b-eab7-4221-a900-746e063662a8-logs\") pod \"barbican-api-84969c976-hhcrx\" (UID: \"184a9f2b-eab7-4221-a900-746e063662a8\") " pod="openstack-kuttl-tests/barbican-api-84969c976-hhcrx" Jan 20 17:27:15 crc kubenswrapper[4558]: I0120 17:27:15.493239 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wbw24\" (UniqueName: \"kubernetes.io/projected/3b9858cf-2020-458e-bcf6-407c6853a962-kube-api-access-wbw24\") pod \"barbican-worker-5cbf796c47-9gdgl\" (UID: \"3b9858cf-2020-458e-bcf6-407c6853a962\") " pod="openstack-kuttl-tests/barbican-worker-5cbf796c47-9gdgl" Jan 20 17:27:15 crc kubenswrapper[4558]: I0120 17:27:15.493269 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3b9858cf-2020-458e-bcf6-407c6853a962-logs\") pod \"barbican-worker-5cbf796c47-9gdgl\" (UID: \"3b9858cf-2020-458e-bcf6-407c6853a962\") " pod="openstack-kuttl-tests/barbican-worker-5cbf796c47-9gdgl" Jan 20 17:27:15 crc kubenswrapper[4558]: I0120 17:27:15.493338 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b9858cf-2020-458e-bcf6-407c6853a962-combined-ca-bundle\") pod \"barbican-worker-5cbf796c47-9gdgl\" (UID: \"3b9858cf-2020-458e-bcf6-407c6853a962\") " pod="openstack-kuttl-tests/barbican-worker-5cbf796c47-9gdgl" Jan 20 17:27:15 crc kubenswrapper[4558]: I0120 17:27:15.493390 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/125597fc-d3b4-44c1-a0a6-7dccbaebe7dd-combined-ca-bundle\") pod \"barbican-keystone-listener-86d4665d84-cndx4\" (UID: \"125597fc-d3b4-44c1-a0a6-7dccbaebe7dd\") " pod="openstack-kuttl-tests/barbican-keystone-listener-86d4665d84-cndx4" Jan 20 17:27:15 crc kubenswrapper[4558]: I0120 17:27:15.493464 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/184a9f2b-eab7-4221-a900-746e063662a8-public-tls-certs\") pod \"barbican-api-84969c976-hhcrx\" (UID: \"184a9f2b-eab7-4221-a900-746e063662a8\") " pod="openstack-kuttl-tests/barbican-api-84969c976-hhcrx" Jan 20 17:27:15 crc kubenswrapper[4558]: I0120 17:27:15.493552 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/184a9f2b-eab7-4221-a900-746e063662a8-config-data-custom\") pod \"barbican-api-84969c976-hhcrx\" (UID: \"184a9f2b-eab7-4221-a900-746e063662a8\") " pod="openstack-kuttl-tests/barbican-api-84969c976-hhcrx" Jan 20 17:27:15 crc kubenswrapper[4558]: I0120 17:27:15.493617 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/184a9f2b-eab7-4221-a900-746e063662a8-combined-ca-bundle\") pod \"barbican-api-84969c976-hhcrx\" (UID: \"184a9f2b-eab7-4221-a900-746e063662a8\") " pod="openstack-kuttl-tests/barbican-api-84969c976-hhcrx" Jan 20 17:27:15 crc kubenswrapper[4558]: I0120 17:27:15.493649 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h58lj\" (UniqueName: \"kubernetes.io/projected/125597fc-d3b4-44c1-a0a6-7dccbaebe7dd-kube-api-access-h58lj\") pod \"barbican-keystone-listener-86d4665d84-cndx4\" (UID: \"125597fc-d3b4-44c1-a0a6-7dccbaebe7dd\") " pod="openstack-kuttl-tests/barbican-keystone-listener-86d4665d84-cndx4" Jan 20 17:27:15 crc kubenswrapper[4558]: I0120 17:27:15.493671 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3b9858cf-2020-458e-bcf6-407c6853a962-config-data\") pod \"barbican-worker-5cbf796c47-9gdgl\" (UID: \"3b9858cf-2020-458e-bcf6-407c6853a962\") " pod="openstack-kuttl-tests/barbican-worker-5cbf796c47-9gdgl" Jan 20 17:27:15 crc kubenswrapper[4558]: I0120 17:27:15.493694 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/125597fc-d3b4-44c1-a0a6-7dccbaebe7dd-config-data\") pod \"barbican-keystone-listener-86d4665d84-cndx4\" (UID: \"125597fc-d3b4-44c1-a0a6-7dccbaebe7dd\") " pod="openstack-kuttl-tests/barbican-keystone-listener-86d4665d84-cndx4" Jan 20 17:27:15 crc kubenswrapper[4558]: I0120 17:27:15.518704 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:27:15 crc kubenswrapper[4558]: I0120 17:27:15.596108 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/125597fc-d3b4-44c1-a0a6-7dccbaebe7dd-config-data-custom\") pod \"barbican-keystone-listener-86d4665d84-cndx4\" (UID: \"125597fc-d3b4-44c1-a0a6-7dccbaebe7dd\") " pod="openstack-kuttl-tests/barbican-keystone-listener-86d4665d84-cndx4" Jan 20 17:27:15 crc kubenswrapper[4558]: I0120 17:27:15.596288 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/184a9f2b-eab7-4221-a900-746e063662a8-logs\") pod \"barbican-api-84969c976-hhcrx\" (UID: \"184a9f2b-eab7-4221-a900-746e063662a8\") " pod="openstack-kuttl-tests/barbican-api-84969c976-hhcrx" Jan 20 17:27:15 crc kubenswrapper[4558]: I0120 17:27:15.596339 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wbw24\" (UniqueName: \"kubernetes.io/projected/3b9858cf-2020-458e-bcf6-407c6853a962-kube-api-access-wbw24\") pod \"barbican-worker-5cbf796c47-9gdgl\" (UID: \"3b9858cf-2020-458e-bcf6-407c6853a962\") " pod="openstack-kuttl-tests/barbican-worker-5cbf796c47-9gdgl" Jan 20 17:27:15 crc kubenswrapper[4558]: I0120 17:27:15.596365 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3b9858cf-2020-458e-bcf6-407c6853a962-logs\") pod \"barbican-worker-5cbf796c47-9gdgl\" (UID: \"3b9858cf-2020-458e-bcf6-407c6853a962\") " pod="openstack-kuttl-tests/barbican-worker-5cbf796c47-9gdgl" Jan 20 17:27:15 crc kubenswrapper[4558]: I0120 17:27:15.596410 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j9rvg\" (UniqueName: \"kubernetes.io/projected/005155ac-dadf-4840-9b7c-7eb1b0f3252e-kube-api-access-j9rvg\") pod \"keystone-bootstrap-28nj8\" (UID: \"005155ac-dadf-4840-9b7c-7eb1b0f3252e\") " pod="openstack-kuttl-tests/keystone-bootstrap-28nj8" Jan 20 17:27:15 crc kubenswrapper[4558]: I0120 17:27:15.596436 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b9858cf-2020-458e-bcf6-407c6853a962-combined-ca-bundle\") pod \"barbican-worker-5cbf796c47-9gdgl\" (UID: \"3b9858cf-2020-458e-bcf6-407c6853a962\") " pod="openstack-kuttl-tests/barbican-worker-5cbf796c47-9gdgl" Jan 20 17:27:15 crc kubenswrapper[4558]: I0120 17:27:15.596464 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/005155ac-dadf-4840-9b7c-7eb1b0f3252e-credential-keys\") pod \"keystone-bootstrap-28nj8\" (UID: \"005155ac-dadf-4840-9b7c-7eb1b0f3252e\") " pod="openstack-kuttl-tests/keystone-bootstrap-28nj8" Jan 20 17:27:15 crc kubenswrapper[4558]: I0120 17:27:15.596488 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/125597fc-d3b4-44c1-a0a6-7dccbaebe7dd-combined-ca-bundle\") pod \"barbican-keystone-listener-86d4665d84-cndx4\" (UID: \"125597fc-d3b4-44c1-a0a6-7dccbaebe7dd\") " pod="openstack-kuttl-tests/barbican-keystone-listener-86d4665d84-cndx4" Jan 20 17:27:15 crc kubenswrapper[4558]: I0120 17:27:15.596521 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/184a9f2b-eab7-4221-a900-746e063662a8-public-tls-certs\") pod \"barbican-api-84969c976-hhcrx\" (UID: \"184a9f2b-eab7-4221-a900-746e063662a8\") " pod="openstack-kuttl-tests/barbican-api-84969c976-hhcrx" Jan 20 17:27:15 crc kubenswrapper[4558]: I0120 17:27:15.596570 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/184a9f2b-eab7-4221-a900-746e063662a8-config-data-custom\") pod \"barbican-api-84969c976-hhcrx\" (UID: \"184a9f2b-eab7-4221-a900-746e063662a8\") " pod="openstack-kuttl-tests/barbican-api-84969c976-hhcrx" Jan 20 17:27:15 crc kubenswrapper[4558]: I0120 17:27:15.596611 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h58lj\" (UniqueName: \"kubernetes.io/projected/125597fc-d3b4-44c1-a0a6-7dccbaebe7dd-kube-api-access-h58lj\") pod \"barbican-keystone-listener-86d4665d84-cndx4\" (UID: \"125597fc-d3b4-44c1-a0a6-7dccbaebe7dd\") " pod="openstack-kuttl-tests/barbican-keystone-listener-86d4665d84-cndx4" Jan 20 17:27:15 crc kubenswrapper[4558]: I0120 17:27:15.596632 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/184a9f2b-eab7-4221-a900-746e063662a8-combined-ca-bundle\") pod \"barbican-api-84969c976-hhcrx\" (UID: \"184a9f2b-eab7-4221-a900-746e063662a8\") " pod="openstack-kuttl-tests/barbican-api-84969c976-hhcrx" Jan 20 17:27:15 crc kubenswrapper[4558]: I0120 17:27:15.596653 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/125597fc-d3b4-44c1-a0a6-7dccbaebe7dd-config-data\") pod \"barbican-keystone-listener-86d4665d84-cndx4\" (UID: \"125597fc-d3b4-44c1-a0a6-7dccbaebe7dd\") " pod="openstack-kuttl-tests/barbican-keystone-listener-86d4665d84-cndx4" Jan 20 17:27:15 crc kubenswrapper[4558]: I0120 17:27:15.596671 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3b9858cf-2020-458e-bcf6-407c6853a962-config-data\") pod \"barbican-worker-5cbf796c47-9gdgl\" (UID: \"3b9858cf-2020-458e-bcf6-407c6853a962\") " pod="openstack-kuttl-tests/barbican-worker-5cbf796c47-9gdgl" Jan 20 17:27:15 crc kubenswrapper[4558]: I0120 17:27:15.596704 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3b9858cf-2020-458e-bcf6-407c6853a962-config-data-custom\") pod \"barbican-worker-5cbf796c47-9gdgl\" (UID: \"3b9858cf-2020-458e-bcf6-407c6853a962\") " pod="openstack-kuttl-tests/barbican-worker-5cbf796c47-9gdgl" Jan 20 17:27:15 crc kubenswrapper[4558]: I0120 17:27:15.596724 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/184a9f2b-eab7-4221-a900-746e063662a8-internal-tls-certs\") pod \"barbican-api-84969c976-hhcrx\" (UID: \"184a9f2b-eab7-4221-a900-746e063662a8\") " pod="openstack-kuttl-tests/barbican-api-84969c976-hhcrx" Jan 20 17:27:15 crc kubenswrapper[4558]: I0120 17:27:15.596765 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mzh8m\" (UniqueName: \"kubernetes.io/projected/184a9f2b-eab7-4221-a900-746e063662a8-kube-api-access-mzh8m\") pod \"barbican-api-84969c976-hhcrx\" (UID: \"184a9f2b-eab7-4221-a900-746e063662a8\") " pod="openstack-kuttl-tests/barbican-api-84969c976-hhcrx" Jan 20 17:27:15 crc kubenswrapper[4558]: I0120 17:27:15.596802 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/005155ac-dadf-4840-9b7c-7eb1b0f3252e-scripts\") pod \"keystone-bootstrap-28nj8\" (UID: \"005155ac-dadf-4840-9b7c-7eb1b0f3252e\") " pod="openstack-kuttl-tests/keystone-bootstrap-28nj8" Jan 20 17:27:15 crc kubenswrapper[4558]: I0120 17:27:15.600510 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/125597fc-d3b4-44c1-a0a6-7dccbaebe7dd-logs\") pod \"barbican-keystone-listener-86d4665d84-cndx4\" (UID: \"125597fc-d3b4-44c1-a0a6-7dccbaebe7dd\") " pod="openstack-kuttl-tests/barbican-keystone-listener-86d4665d84-cndx4" Jan 20 17:27:15 crc kubenswrapper[4558]: I0120 17:27:15.600612 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/005155ac-dadf-4840-9b7c-7eb1b0f3252e-combined-ca-bundle\") pod \"keystone-bootstrap-28nj8\" (UID: \"005155ac-dadf-4840-9b7c-7eb1b0f3252e\") " pod="openstack-kuttl-tests/keystone-bootstrap-28nj8" Jan 20 17:27:15 crc kubenswrapper[4558]: I0120 17:27:15.600647 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/005155ac-dadf-4840-9b7c-7eb1b0f3252e-config-data\") pod \"keystone-bootstrap-28nj8\" (UID: \"005155ac-dadf-4840-9b7c-7eb1b0f3252e\") " pod="openstack-kuttl-tests/keystone-bootstrap-28nj8" Jan 20 17:27:15 crc kubenswrapper[4558]: I0120 17:27:15.600680 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/005155ac-dadf-4840-9b7c-7eb1b0f3252e-fernet-keys\") pod \"keystone-bootstrap-28nj8\" (UID: \"005155ac-dadf-4840-9b7c-7eb1b0f3252e\") " pod="openstack-kuttl-tests/keystone-bootstrap-28nj8" Jan 20 17:27:15 crc kubenswrapper[4558]: I0120 17:27:15.600813 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/184a9f2b-eab7-4221-a900-746e063662a8-config-data\") pod \"barbican-api-84969c976-hhcrx\" (UID: \"184a9f2b-eab7-4221-a900-746e063662a8\") " pod="openstack-kuttl-tests/barbican-api-84969c976-hhcrx" Jan 20 17:27:15 crc kubenswrapper[4558]: I0120 17:27:15.601028 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/125597fc-d3b4-44c1-a0a6-7dccbaebe7dd-logs\") pod \"barbican-keystone-listener-86d4665d84-cndx4\" (UID: \"125597fc-d3b4-44c1-a0a6-7dccbaebe7dd\") " pod="openstack-kuttl-tests/barbican-keystone-listener-86d4665d84-cndx4" Jan 20 17:27:15 crc kubenswrapper[4558]: I0120 17:27:15.597608 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3b9858cf-2020-458e-bcf6-407c6853a962-logs\") pod \"barbican-worker-5cbf796c47-9gdgl\" (UID: \"3b9858cf-2020-458e-bcf6-407c6853a962\") " pod="openstack-kuttl-tests/barbican-worker-5cbf796c47-9gdgl" Jan 20 17:27:15 crc kubenswrapper[4558]: I0120 17:27:15.600416 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/184a9f2b-eab7-4221-a900-746e063662a8-logs\") pod \"barbican-api-84969c976-hhcrx\" (UID: \"184a9f2b-eab7-4221-a900-746e063662a8\") " pod="openstack-kuttl-tests/barbican-api-84969c976-hhcrx" Jan 20 17:27:15 crc kubenswrapper[4558]: I0120 17:27:15.607425 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/184a9f2b-eab7-4221-a900-746e063662a8-public-tls-certs\") pod \"barbican-api-84969c976-hhcrx\" (UID: \"184a9f2b-eab7-4221-a900-746e063662a8\") " pod="openstack-kuttl-tests/barbican-api-84969c976-hhcrx" Jan 20 17:27:15 crc kubenswrapper[4558]: I0120 17:27:15.607451 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/184a9f2b-eab7-4221-a900-746e063662a8-config-data-custom\") pod \"barbican-api-84969c976-hhcrx\" (UID: \"184a9f2b-eab7-4221-a900-746e063662a8\") " pod="openstack-kuttl-tests/barbican-api-84969c976-hhcrx" Jan 20 17:27:15 crc kubenswrapper[4558]: I0120 17:27:15.608077 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/125597fc-d3b4-44c1-a0a6-7dccbaebe7dd-combined-ca-bundle\") pod \"barbican-keystone-listener-86d4665d84-cndx4\" (UID: \"125597fc-d3b4-44c1-a0a6-7dccbaebe7dd\") " pod="openstack-kuttl-tests/barbican-keystone-listener-86d4665d84-cndx4" Jan 20 17:27:15 crc kubenswrapper[4558]: I0120 17:27:15.608134 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b9858cf-2020-458e-bcf6-407c6853a962-combined-ca-bundle\") pod \"barbican-worker-5cbf796c47-9gdgl\" (UID: \"3b9858cf-2020-458e-bcf6-407c6853a962\") " pod="openstack-kuttl-tests/barbican-worker-5cbf796c47-9gdgl" Jan 20 17:27:15 crc kubenswrapper[4558]: I0120 17:27:15.608522 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/125597fc-d3b4-44c1-a0a6-7dccbaebe7dd-config-data-custom\") pod \"barbican-keystone-listener-86d4665d84-cndx4\" (UID: \"125597fc-d3b4-44c1-a0a6-7dccbaebe7dd\") " pod="openstack-kuttl-tests/barbican-keystone-listener-86d4665d84-cndx4" Jan 20 17:27:15 crc kubenswrapper[4558]: I0120 17:27:15.608624 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/184a9f2b-eab7-4221-a900-746e063662a8-internal-tls-certs\") pod \"barbican-api-84969c976-hhcrx\" (UID: \"184a9f2b-eab7-4221-a900-746e063662a8\") " pod="openstack-kuttl-tests/barbican-api-84969c976-hhcrx" Jan 20 17:27:15 crc kubenswrapper[4558]: I0120 17:27:15.609190 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/184a9f2b-eab7-4221-a900-746e063662a8-combined-ca-bundle\") pod \"barbican-api-84969c976-hhcrx\" (UID: \"184a9f2b-eab7-4221-a900-746e063662a8\") " pod="openstack-kuttl-tests/barbican-api-84969c976-hhcrx" Jan 20 17:27:15 crc kubenswrapper[4558]: I0120 17:27:15.609247 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/125597fc-d3b4-44c1-a0a6-7dccbaebe7dd-config-data\") pod \"barbican-keystone-listener-86d4665d84-cndx4\" (UID: \"125597fc-d3b4-44c1-a0a6-7dccbaebe7dd\") " pod="openstack-kuttl-tests/barbican-keystone-listener-86d4665d84-cndx4" Jan 20 17:27:15 crc kubenswrapper[4558]: I0120 17:27:15.609320 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3b9858cf-2020-458e-bcf6-407c6853a962-config-data-custom\") pod \"barbican-worker-5cbf796c47-9gdgl\" (UID: \"3b9858cf-2020-458e-bcf6-407c6853a962\") " pod="openstack-kuttl-tests/barbican-worker-5cbf796c47-9gdgl" Jan 20 17:27:15 crc kubenswrapper[4558]: I0120 17:27:15.610130 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/184a9f2b-eab7-4221-a900-746e063662a8-config-data\") pod \"barbican-api-84969c976-hhcrx\" (UID: \"184a9f2b-eab7-4221-a900-746e063662a8\") " pod="openstack-kuttl-tests/barbican-api-84969c976-hhcrx" Jan 20 17:27:15 crc kubenswrapper[4558]: I0120 17:27:15.610744 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3b9858cf-2020-458e-bcf6-407c6853a962-config-data\") pod \"barbican-worker-5cbf796c47-9gdgl\" (UID: \"3b9858cf-2020-458e-bcf6-407c6853a962\") " pod="openstack-kuttl-tests/barbican-worker-5cbf796c47-9gdgl" Jan 20 17:27:15 crc kubenswrapper[4558]: I0120 17:27:15.615063 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h58lj\" (UniqueName: \"kubernetes.io/projected/125597fc-d3b4-44c1-a0a6-7dccbaebe7dd-kube-api-access-h58lj\") pod \"barbican-keystone-listener-86d4665d84-cndx4\" (UID: \"125597fc-d3b4-44c1-a0a6-7dccbaebe7dd\") " pod="openstack-kuttl-tests/barbican-keystone-listener-86d4665d84-cndx4" Jan 20 17:27:15 crc kubenswrapper[4558]: I0120 17:27:15.615439 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mzh8m\" (UniqueName: \"kubernetes.io/projected/184a9f2b-eab7-4221-a900-746e063662a8-kube-api-access-mzh8m\") pod \"barbican-api-84969c976-hhcrx\" (UID: \"184a9f2b-eab7-4221-a900-746e063662a8\") " pod="openstack-kuttl-tests/barbican-api-84969c976-hhcrx" Jan 20 17:27:15 crc kubenswrapper[4558]: I0120 17:27:15.617257 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wbw24\" (UniqueName: \"kubernetes.io/projected/3b9858cf-2020-458e-bcf6-407c6853a962-kube-api-access-wbw24\") pod \"barbican-worker-5cbf796c47-9gdgl\" (UID: \"3b9858cf-2020-458e-bcf6-407c6853a962\") " pod="openstack-kuttl-tests/barbican-worker-5cbf796c47-9gdgl" Jan 20 17:27:15 crc kubenswrapper[4558]: I0120 17:27:15.704433 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5d490bdc-81eb-42cd-9a5d-ae4a8d2b4e51-combined-ca-bundle\") pod \"5d490bdc-81eb-42cd-9a5d-ae4a8d2b4e51\" (UID: \"5d490bdc-81eb-42cd-9a5d-ae4a8d2b4e51\") " Jan 20 17:27:15 crc kubenswrapper[4558]: I0120 17:27:15.704837 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-88wbn\" (UniqueName: \"kubernetes.io/projected/5d490bdc-81eb-42cd-9a5d-ae4a8d2b4e51-kube-api-access-88wbn\") pod \"5d490bdc-81eb-42cd-9a5d-ae4a8d2b4e51\" (UID: \"5d490bdc-81eb-42cd-9a5d-ae4a8d2b4e51\") " Jan 20 17:27:15 crc kubenswrapper[4558]: I0120 17:27:15.704916 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5d490bdc-81eb-42cd-9a5d-ae4a8d2b4e51-config-data\") pod \"5d490bdc-81eb-42cd-9a5d-ae4a8d2b4e51\" (UID: \"5d490bdc-81eb-42cd-9a5d-ae4a8d2b4e51\") " Jan 20 17:27:15 crc kubenswrapper[4558]: I0120 17:27:15.705563 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/005155ac-dadf-4840-9b7c-7eb1b0f3252e-scripts\") pod \"keystone-bootstrap-28nj8\" (UID: \"005155ac-dadf-4840-9b7c-7eb1b0f3252e\") " pod="openstack-kuttl-tests/keystone-bootstrap-28nj8" Jan 20 17:27:15 crc kubenswrapper[4558]: I0120 17:27:15.705617 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/005155ac-dadf-4840-9b7c-7eb1b0f3252e-combined-ca-bundle\") pod \"keystone-bootstrap-28nj8\" (UID: \"005155ac-dadf-4840-9b7c-7eb1b0f3252e\") " pod="openstack-kuttl-tests/keystone-bootstrap-28nj8" Jan 20 17:27:15 crc kubenswrapper[4558]: I0120 17:27:15.705635 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/005155ac-dadf-4840-9b7c-7eb1b0f3252e-config-data\") pod \"keystone-bootstrap-28nj8\" (UID: \"005155ac-dadf-4840-9b7c-7eb1b0f3252e\") " pod="openstack-kuttl-tests/keystone-bootstrap-28nj8" Jan 20 17:27:15 crc kubenswrapper[4558]: I0120 17:27:15.705660 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/005155ac-dadf-4840-9b7c-7eb1b0f3252e-fernet-keys\") pod \"keystone-bootstrap-28nj8\" (UID: \"005155ac-dadf-4840-9b7c-7eb1b0f3252e\") " pod="openstack-kuttl-tests/keystone-bootstrap-28nj8" Jan 20 17:27:15 crc kubenswrapper[4558]: I0120 17:27:15.705810 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j9rvg\" (UniqueName: \"kubernetes.io/projected/005155ac-dadf-4840-9b7c-7eb1b0f3252e-kube-api-access-j9rvg\") pod \"keystone-bootstrap-28nj8\" (UID: \"005155ac-dadf-4840-9b7c-7eb1b0f3252e\") " pod="openstack-kuttl-tests/keystone-bootstrap-28nj8" Jan 20 17:27:15 crc kubenswrapper[4558]: I0120 17:27:15.705836 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/005155ac-dadf-4840-9b7c-7eb1b0f3252e-credential-keys\") pod \"keystone-bootstrap-28nj8\" (UID: \"005155ac-dadf-4840-9b7c-7eb1b0f3252e\") " pod="openstack-kuttl-tests/keystone-bootstrap-28nj8" Jan 20 17:27:15 crc kubenswrapper[4558]: I0120 17:27:15.708469 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5d490bdc-81eb-42cd-9a5d-ae4a8d2b4e51-kube-api-access-88wbn" (OuterVolumeSpecName: "kube-api-access-88wbn") pod "5d490bdc-81eb-42cd-9a5d-ae4a8d2b4e51" (UID: "5d490bdc-81eb-42cd-9a5d-ae4a8d2b4e51"). InnerVolumeSpecName "kube-api-access-88wbn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:27:15 crc kubenswrapper[4558]: I0120 17:27:15.710229 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/005155ac-dadf-4840-9b7c-7eb1b0f3252e-fernet-keys\") pod \"keystone-bootstrap-28nj8\" (UID: \"005155ac-dadf-4840-9b7c-7eb1b0f3252e\") " pod="openstack-kuttl-tests/keystone-bootstrap-28nj8" Jan 20 17:27:15 crc kubenswrapper[4558]: I0120 17:27:15.711660 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/005155ac-dadf-4840-9b7c-7eb1b0f3252e-credential-keys\") pod \"keystone-bootstrap-28nj8\" (UID: \"005155ac-dadf-4840-9b7c-7eb1b0f3252e\") " pod="openstack-kuttl-tests/keystone-bootstrap-28nj8" Jan 20 17:27:15 crc kubenswrapper[4558]: I0120 17:27:15.711900 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/005155ac-dadf-4840-9b7c-7eb1b0f3252e-scripts\") pod \"keystone-bootstrap-28nj8\" (UID: \"005155ac-dadf-4840-9b7c-7eb1b0f3252e\") " pod="openstack-kuttl-tests/keystone-bootstrap-28nj8" Jan 20 17:27:15 crc kubenswrapper[4558]: I0120 17:27:15.712395 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/005155ac-dadf-4840-9b7c-7eb1b0f3252e-config-data\") pod \"keystone-bootstrap-28nj8\" (UID: \"005155ac-dadf-4840-9b7c-7eb1b0f3252e\") " pod="openstack-kuttl-tests/keystone-bootstrap-28nj8" Jan 20 17:27:15 crc kubenswrapper[4558]: I0120 17:27:15.716382 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/005155ac-dadf-4840-9b7c-7eb1b0f3252e-combined-ca-bundle\") pod \"keystone-bootstrap-28nj8\" (UID: \"005155ac-dadf-4840-9b7c-7eb1b0f3252e\") " pod="openstack-kuttl-tests/keystone-bootstrap-28nj8" Jan 20 17:27:15 crc kubenswrapper[4558]: I0120 17:27:15.722802 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j9rvg\" (UniqueName: \"kubernetes.io/projected/005155ac-dadf-4840-9b7c-7eb1b0f3252e-kube-api-access-j9rvg\") pod \"keystone-bootstrap-28nj8\" (UID: \"005155ac-dadf-4840-9b7c-7eb1b0f3252e\") " pod="openstack-kuttl-tests/keystone-bootstrap-28nj8" Jan 20 17:27:15 crc kubenswrapper[4558]: I0120 17:27:15.729581 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5d490bdc-81eb-42cd-9a5d-ae4a8d2b4e51-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5d490bdc-81eb-42cd-9a5d-ae4a8d2b4e51" (UID: "5d490bdc-81eb-42cd-9a5d-ae4a8d2b4e51"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:27:15 crc kubenswrapper[4558]: I0120 17:27:15.731455 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5d490bdc-81eb-42cd-9a5d-ae4a8d2b4e51-config-data" (OuterVolumeSpecName: "config-data") pod "5d490bdc-81eb-42cd-9a5d-ae4a8d2b4e51" (UID: "5d490bdc-81eb-42cd-9a5d-ae4a8d2b4e51"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:27:15 crc kubenswrapper[4558]: I0120 17:27:15.787402 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-worker-5cbf796c47-9gdgl" Jan 20 17:27:15 crc kubenswrapper[4558]: I0120 17:27:15.809668 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5d490bdc-81eb-42cd-9a5d-ae4a8d2b4e51-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:15 crc kubenswrapper[4558]: I0120 17:27:15.809708 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-88wbn\" (UniqueName: \"kubernetes.io/projected/5d490bdc-81eb-42cd-9a5d-ae4a8d2b4e51-kube-api-access-88wbn\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:15 crc kubenswrapper[4558]: I0120 17:27:15.809725 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5d490bdc-81eb-42cd-9a5d-ae4a8d2b4e51-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:15 crc kubenswrapper[4558]: I0120 17:27:15.812497 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-keystone-listener-86d4665d84-cndx4" Jan 20 17:27:15 crc kubenswrapper[4558]: I0120 17:27:15.848128 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-84969c976-hhcrx" Jan 20 17:27:15 crc kubenswrapper[4558]: I0120 17:27:15.858054 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-bootstrap-28nj8" Jan 20 17:27:16 crc kubenswrapper[4558]: I0120 17:27:16.176890 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-0" event={"ID":"5d490bdc-81eb-42cd-9a5d-ae4a8d2b4e51","Type":"ContainerDied","Data":"673ba5dc63634baef44eda303cbf75192417a099a81426f7425b1e8b14b7bf24"} Jan 20 17:27:16 crc kubenswrapper[4558]: I0120 17:27:16.177338 4558 scope.go:117] "RemoveContainer" containerID="4954099cb46eb467eb55768a743f3ad68bf55146245f1fb2f43a81ea0ff802cc" Jan 20 17:27:16 crc kubenswrapper[4558]: I0120 17:27:16.177457 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:27:16 crc kubenswrapper[4558]: I0120 17:27:16.181741 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-55d4bc664d-k82t9" event={"ID":"66e24f38-a98c-4444-8ee4-352266267985","Type":"ContainerStarted","Data":"41bd033877c5a7cf1ffe2c31818a011adc08dd537bfdb844181969248bd15676"} Jan 20 17:27:16 crc kubenswrapper[4558]: I0120 17:27:16.181784 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-55d4bc664d-k82t9" event={"ID":"66e24f38-a98c-4444-8ee4-352266267985","Type":"ContainerStarted","Data":"2c9fa6b9561c4a984ecb954991ac4f52ef42ab0fbc3c9bb866972b9d6d85f164"} Jan 20 17:27:16 crc kubenswrapper[4558]: I0120 17:27:16.222035 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/placement-55d4bc664d-k82t9" podStartSLOduration=2.222009412 podStartE2EDuration="2.222009412s" podCreationTimestamp="2026-01-20 17:27:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:27:16.199564544 +0000 UTC m=+2729.959902501" watchObservedRunningTime="2026-01-20 17:27:16.222009412 +0000 UTC m=+2729.982347379" Jan 20 17:27:16 crc kubenswrapper[4558]: I0120 17:27:16.269250 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-0"] Jan 20 17:27:16 crc kubenswrapper[4558]: I0120 17:27:16.275071 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-0"] Jan 20 17:27:16 crc kubenswrapper[4558]: I0120 17:27:16.281354 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-0"] Jan 20 17:27:16 crc kubenswrapper[4558]: E0120 17:27:16.282007 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5d490bdc-81eb-42cd-9a5d-ae4a8d2b4e51" containerName="nova-cell0-conductor-conductor" Jan 20 17:27:16 crc kubenswrapper[4558]: I0120 17:27:16.282027 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="5d490bdc-81eb-42cd-9a5d-ae4a8d2b4e51" containerName="nova-cell0-conductor-conductor" Jan 20 17:27:16 crc kubenswrapper[4558]: I0120 17:27:16.283059 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="5d490bdc-81eb-42cd-9a5d-ae4a8d2b4e51" containerName="nova-cell0-conductor-conductor" Jan 20 17:27:16 crc kubenswrapper[4558]: I0120 17:27:16.284065 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:27:16 crc kubenswrapper[4558]: I0120 17:27:16.286697 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell0-conductor-config-data" Jan 20 17:27:16 crc kubenswrapper[4558]: I0120 17:27:16.287350 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-worker-5cbf796c47-9gdgl"] Jan 20 17:27:16 crc kubenswrapper[4558]: I0120 17:27:16.294950 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-keystone-listener-86d4665d84-cndx4"] Jan 20 17:27:16 crc kubenswrapper[4558]: I0120 17:27:16.299336 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-0"] Jan 20 17:27:16 crc kubenswrapper[4558]: I0120 17:27:16.383386 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-bootstrap-28nj8"] Jan 20 17:27:16 crc kubenswrapper[4558]: W0120 17:27:16.384707 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod005155ac_dadf_4840_9b7c_7eb1b0f3252e.slice/crio-95fbc0b4cb8c6822a2c9bec6334502c8f64b249253c034d443ebf64e911017e7 WatchSource:0}: Error finding container 95fbc0b4cb8c6822a2c9bec6334502c8f64b249253c034d443ebf64e911017e7: Status 404 returned error can't find the container with id 95fbc0b4cb8c6822a2c9bec6334502c8f64b249253c034d443ebf64e911017e7 Jan 20 17:27:16 crc kubenswrapper[4558]: W0120 17:27:16.386302 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod184a9f2b_eab7_4221_a900_746e063662a8.slice/crio-4f2d4d994ed47f3490c772e919b7eb78c4f3d55e887b908c8fad5de2672ad9f2 WatchSource:0}: Error finding container 4f2d4d994ed47f3490c772e919b7eb78c4f3d55e887b908c8fad5de2672ad9f2: Status 404 returned error can't find the container with id 4f2d4d994ed47f3490c772e919b7eb78c4f3d55e887b908c8fad5de2672ad9f2 Jan 20 17:27:16 crc kubenswrapper[4558]: I0120 17:27:16.394601 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-api-84969c976-hhcrx"] Jan 20 17:27:16 crc kubenswrapper[4558]: I0120 17:27:16.425282 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3973df1a-dd5a-417e-80a9-6653fb036470-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"3973df1a-dd5a-417e-80a9-6653fb036470\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:27:16 crc kubenswrapper[4558]: I0120 17:27:16.425568 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sgmkz\" (UniqueName: \"kubernetes.io/projected/3973df1a-dd5a-417e-80a9-6653fb036470-kube-api-access-sgmkz\") pod \"nova-cell0-conductor-0\" (UID: \"3973df1a-dd5a-417e-80a9-6653fb036470\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:27:16 crc kubenswrapper[4558]: I0120 17:27:16.425935 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3973df1a-dd5a-417e-80a9-6653fb036470-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"3973df1a-dd5a-417e-80a9-6653fb036470\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:27:16 crc kubenswrapper[4558]: I0120 17:27:16.527368 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3973df1a-dd5a-417e-80a9-6653fb036470-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"3973df1a-dd5a-417e-80a9-6653fb036470\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:27:16 crc kubenswrapper[4558]: I0120 17:27:16.527494 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sgmkz\" (UniqueName: \"kubernetes.io/projected/3973df1a-dd5a-417e-80a9-6653fb036470-kube-api-access-sgmkz\") pod \"nova-cell0-conductor-0\" (UID: \"3973df1a-dd5a-417e-80a9-6653fb036470\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:27:16 crc kubenswrapper[4558]: I0120 17:27:16.527602 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3973df1a-dd5a-417e-80a9-6653fb036470-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"3973df1a-dd5a-417e-80a9-6653fb036470\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:27:16 crc kubenswrapper[4558]: I0120 17:27:16.530873 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3973df1a-dd5a-417e-80a9-6653fb036470-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"3973df1a-dd5a-417e-80a9-6653fb036470\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:27:16 crc kubenswrapper[4558]: I0120 17:27:16.531968 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3973df1a-dd5a-417e-80a9-6653fb036470-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"3973df1a-dd5a-417e-80a9-6653fb036470\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:27:16 crc kubenswrapper[4558]: I0120 17:27:16.540136 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sgmkz\" (UniqueName: \"kubernetes.io/projected/3973df1a-dd5a-417e-80a9-6653fb036470-kube-api-access-sgmkz\") pod \"nova-cell0-conductor-0\" (UID: \"3973df1a-dd5a-417e-80a9-6653fb036470\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:27:16 crc kubenswrapper[4558]: I0120 17:27:16.562007 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:27:16 crc kubenswrapper[4558]: I0120 17:27:16.590384 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5d490bdc-81eb-42cd-9a5d-ae4a8d2b4e51" path="/var/lib/kubelet/pods/5d490bdc-81eb-42cd-9a5d-ae4a8d2b4e51/volumes" Jan 20 17:27:16 crc kubenswrapper[4558]: I0120 17:27:16.593867 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a9d1ae84-0a1b-4418-a336-39d0313a9857" path="/var/lib/kubelet/pods/a9d1ae84-0a1b-4418-a336-39d0313a9857/volumes" Jan 20 17:27:17 crc kubenswrapper[4558]: I0120 17:27:17.001405 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-0"] Jan 20 17:27:17 crc kubenswrapper[4558]: W0120 17:27:17.003986 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3973df1a_dd5a_417e_80a9_6653fb036470.slice/crio-b00d8ab246342eb2d06712da991aaec4bc45ecc6ab7b0086d0829fed86dff19d WatchSource:0}: Error finding container b00d8ab246342eb2d06712da991aaec4bc45ecc6ab7b0086d0829fed86dff19d: Status 404 returned error can't find the container with id b00d8ab246342eb2d06712da991aaec4bc45ecc6ab7b0086d0829fed86dff19d Jan 20 17:27:17 crc kubenswrapper[4558]: I0120 17:27:17.194064 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-84969c976-hhcrx" event={"ID":"184a9f2b-eab7-4221-a900-746e063662a8","Type":"ContainerStarted","Data":"1d50c4159354efc26c8a538204ca688c7a0a5b8e2d0ac9d5aef208c1239af23d"} Jan 20 17:27:17 crc kubenswrapper[4558]: I0120 17:27:17.194284 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-84969c976-hhcrx" event={"ID":"184a9f2b-eab7-4221-a900-746e063662a8","Type":"ContainerStarted","Data":"0c2544d6d08a38b60dfd9a1ca62a8c9460a6db9ba8a618726a1b8ad313afe1bf"} Jan 20 17:27:17 crc kubenswrapper[4558]: I0120 17:27:17.194298 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-84969c976-hhcrx" event={"ID":"184a9f2b-eab7-4221-a900-746e063662a8","Type":"ContainerStarted","Data":"4f2d4d994ed47f3490c772e919b7eb78c4f3d55e887b908c8fad5de2672ad9f2"} Jan 20 17:27:17 crc kubenswrapper[4558]: I0120 17:27:17.194344 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/barbican-api-84969c976-hhcrx" Jan 20 17:27:17 crc kubenswrapper[4558]: I0120 17:27:17.194364 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/barbican-api-84969c976-hhcrx" Jan 20 17:27:17 crc kubenswrapper[4558]: I0120 17:27:17.198452 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-worker-5cbf796c47-9gdgl" event={"ID":"3b9858cf-2020-458e-bcf6-407c6853a962","Type":"ContainerStarted","Data":"0bee60e6a9ed04943634730130e635bd656aae9f44611d34a0aa33192c022189"} Jan 20 17:27:17 crc kubenswrapper[4558]: I0120 17:27:17.198505 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-worker-5cbf796c47-9gdgl" event={"ID":"3b9858cf-2020-458e-bcf6-407c6853a962","Type":"ContainerStarted","Data":"b4c792c97b8f3884770e6abef851a252b3788722ba9e9f419f4552e68ff9bd85"} Jan 20 17:27:17 crc kubenswrapper[4558]: I0120 17:27:17.198519 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-worker-5cbf796c47-9gdgl" event={"ID":"3b9858cf-2020-458e-bcf6-407c6853a962","Type":"ContainerStarted","Data":"e540e10ebfe59a8afa1a8d521b3270fe329363cfb25a828a0e2dfd0f29bd25f0"} Jan 20 17:27:17 crc kubenswrapper[4558]: I0120 17:27:17.200394 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-keystone-listener-86d4665d84-cndx4" event={"ID":"125597fc-d3b4-44c1-a0a6-7dccbaebe7dd","Type":"ContainerStarted","Data":"8299fb12d183c4920c21b1f9def27ecef5343744aedc73fdcd93885a02ce9354"} Jan 20 17:27:17 crc kubenswrapper[4558]: I0120 17:27:17.200423 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-keystone-listener-86d4665d84-cndx4" event={"ID":"125597fc-d3b4-44c1-a0a6-7dccbaebe7dd","Type":"ContainerStarted","Data":"2c27863658ecd8a450f932a78dd0d64933a85bf75c1f865bb6dc9d092e3e4a29"} Jan 20 17:27:17 crc kubenswrapper[4558]: I0120 17:27:17.200434 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-keystone-listener-86d4665d84-cndx4" event={"ID":"125597fc-d3b4-44c1-a0a6-7dccbaebe7dd","Type":"ContainerStarted","Data":"c43232927c02372c79d83e943e2c358fe1af41bc01b132b3223cbee474bfe408"} Jan 20 17:27:17 crc kubenswrapper[4558]: I0120 17:27:17.205034 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-0" event={"ID":"3973df1a-dd5a-417e-80a9-6653fb036470","Type":"ContainerStarted","Data":"b00d8ab246342eb2d06712da991aaec4bc45ecc6ab7b0086d0829fed86dff19d"} Jan 20 17:27:17 crc kubenswrapper[4558]: I0120 17:27:17.212514 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-bootstrap-28nj8" event={"ID":"005155ac-dadf-4840-9b7c-7eb1b0f3252e","Type":"ContainerStarted","Data":"078d2b2ec0a8bee0701e4f1972ae83126a9c9ca7a4fe3a16d9d7a16978e58d58"} Jan 20 17:27:17 crc kubenswrapper[4558]: I0120 17:27:17.212567 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-bootstrap-28nj8" event={"ID":"005155ac-dadf-4840-9b7c-7eb1b0f3252e","Type":"ContainerStarted","Data":"95fbc0b4cb8c6822a2c9bec6334502c8f64b249253c034d443ebf64e911017e7"} Jan 20 17:27:17 crc kubenswrapper[4558]: I0120 17:27:17.212883 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/placement-55d4bc664d-k82t9" Jan 20 17:27:17 crc kubenswrapper[4558]: I0120 17:27:17.212994 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/placement-55d4bc664d-k82t9" Jan 20 17:27:17 crc kubenswrapper[4558]: I0120 17:27:17.217574 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/barbican-api-84969c976-hhcrx" podStartSLOduration=2.21756391 podStartE2EDuration="2.21756391s" podCreationTimestamp="2026-01-20 17:27:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:27:17.213414861 +0000 UTC m=+2730.973752828" watchObservedRunningTime="2026-01-20 17:27:17.21756391 +0000 UTC m=+2730.977901878" Jan 20 17:27:17 crc kubenswrapper[4558]: I0120 17:27:17.231740 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/barbican-keystone-listener-86d4665d84-cndx4" podStartSLOduration=2.23172218 podStartE2EDuration="2.23172218s" podCreationTimestamp="2026-01-20 17:27:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:27:17.229374395 +0000 UTC m=+2730.989712362" watchObservedRunningTime="2026-01-20 17:27:17.23172218 +0000 UTC m=+2730.992060147" Jan 20 17:27:17 crc kubenswrapper[4558]: I0120 17:27:17.243385 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/keystone-bootstrap-28nj8" podStartSLOduration=2.243367915 podStartE2EDuration="2.243367915s" podCreationTimestamp="2026-01-20 17:27:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:27:17.240789657 +0000 UTC m=+2731.001127624" watchObservedRunningTime="2026-01-20 17:27:17.243367915 +0000 UTC m=+2731.003705881" Jan 20 17:27:17 crc kubenswrapper[4558]: I0120 17:27:17.261850 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-keystone-listener-5cbb954895-rgzbf"] Jan 20 17:27:17 crc kubenswrapper[4558]: I0120 17:27:17.262083 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-keystone-listener-5cbb954895-rgzbf" podUID="b8f85165-600b-47f4-b459-e1eb023907a6" containerName="barbican-keystone-listener-log" containerID="cri-o://b1b8dd351e12ca92ea5d56e15dd5ab075ca2eb4ee0213bbd1f5c91319f363239" gracePeriod=30 Jan 20 17:27:17 crc kubenswrapper[4558]: I0120 17:27:17.262101 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-keystone-listener-5cbb954895-rgzbf" podUID="b8f85165-600b-47f4-b459-e1eb023907a6" containerName="barbican-keystone-listener" containerID="cri-o://99a4a1d716eab3ac3b920c0fee4a68d744e9cc640f5f82e54e1934eaf6260207" gracePeriod=30 Jan 20 17:27:17 crc kubenswrapper[4558]: I0120 17:27:17.268509 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/barbican-worker-5cbf796c47-9gdgl" podStartSLOduration=2.268495206 podStartE2EDuration="2.268495206s" podCreationTimestamp="2026-01-20 17:27:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:27:17.25895545 +0000 UTC m=+2731.019293418" watchObservedRunningTime="2026-01-20 17:27:17.268495206 +0000 UTC m=+2731.028833173" Jan 20 17:27:17 crc kubenswrapper[4558]: I0120 17:27:17.288263 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-worker-67ccd5cf94-6lkng"] Jan 20 17:27:17 crc kubenswrapper[4558]: I0120 17:27:17.288447 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-worker-67ccd5cf94-6lkng" podUID="d4f8d7f7-b0d0-4643-834d-1a5056d5cf89" containerName="barbican-worker-log" containerID="cri-o://c6c1817123e3f5c8cea565cdf69b0faaf8aa7b2548ca93d325c3dcfe94cc8266" gracePeriod=30 Jan 20 17:27:17 crc kubenswrapper[4558]: I0120 17:27:17.288529 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-worker-67ccd5cf94-6lkng" podUID="d4f8d7f7-b0d0-4643-834d-1a5056d5cf89" containerName="barbican-worker" containerID="cri-o://9370273bf4e481977fef5391473819b681d5b9a82f04da2dbb3c091078de2d23" gracePeriod=30 Jan 20 17:27:18 crc kubenswrapper[4558]: I0120 17:27:18.221743 4558 generic.go:334] "Generic (PLEG): container finished" podID="b8f85165-600b-47f4-b459-e1eb023907a6" containerID="b1b8dd351e12ca92ea5d56e15dd5ab075ca2eb4ee0213bbd1f5c91319f363239" exitCode=143 Jan 20 17:27:18 crc kubenswrapper[4558]: I0120 17:27:18.221833 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-keystone-listener-5cbb954895-rgzbf" event={"ID":"b8f85165-600b-47f4-b459-e1eb023907a6","Type":"ContainerDied","Data":"b1b8dd351e12ca92ea5d56e15dd5ab075ca2eb4ee0213bbd1f5c91319f363239"} Jan 20 17:27:18 crc kubenswrapper[4558]: I0120 17:27:18.223308 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-0" event={"ID":"3973df1a-dd5a-417e-80a9-6653fb036470","Type":"ContainerStarted","Data":"6d33b2d5d8fe60d82930b4864fabfe53d6a679fc712a4ba67d60a4da40feed96"} Jan 20 17:27:18 crc kubenswrapper[4558]: I0120 17:27:18.224606 4558 generic.go:334] "Generic (PLEG): container finished" podID="d4f8d7f7-b0d0-4643-834d-1a5056d5cf89" containerID="c6c1817123e3f5c8cea565cdf69b0faaf8aa7b2548ca93d325c3dcfe94cc8266" exitCode=143 Jan 20 17:27:18 crc kubenswrapper[4558]: I0120 17:27:18.225607 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-worker-67ccd5cf94-6lkng" event={"ID":"d4f8d7f7-b0d0-4643-834d-1a5056d5cf89","Type":"ContainerDied","Data":"c6c1817123e3f5c8cea565cdf69b0faaf8aa7b2548ca93d325c3dcfe94cc8266"} Jan 20 17:27:18 crc kubenswrapper[4558]: I0120 17:27:18.259587 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell0-conductor-0" podStartSLOduration=2.259570032 podStartE2EDuration="2.259570032s" podCreationTimestamp="2026-01-20 17:27:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:27:18.252050145 +0000 UTC m=+2732.012388112" watchObservedRunningTime="2026-01-20 17:27:18.259570032 +0000 UTC m=+2732.019908000" Jan 20 17:27:18 crc kubenswrapper[4558]: I0120 17:27:18.725615 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:27:18 crc kubenswrapper[4558]: I0120 17:27:18.900468 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/cbbfff95-9f90-4be2-9ed7-0a1cbbe43542-etc-swift\") pod \"cbbfff95-9f90-4be2-9ed7-0a1cbbe43542\" (UID: \"cbbfff95-9f90-4be2-9ed7-0a1cbbe43542\") " Jan 20 17:27:18 crc kubenswrapper[4558]: I0120 17:27:18.901009 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/cbbfff95-9f90-4be2-9ed7-0a1cbbe43542-cache\") pod \"cbbfff95-9f90-4be2-9ed7-0a1cbbe43542\" (UID: \"cbbfff95-9f90-4be2-9ed7-0a1cbbe43542\") " Jan 20 17:27:18 crc kubenswrapper[4558]: I0120 17:27:18.901103 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kxw6w\" (UniqueName: \"kubernetes.io/projected/cbbfff95-9f90-4be2-9ed7-0a1cbbe43542-kube-api-access-kxw6w\") pod \"cbbfff95-9f90-4be2-9ed7-0a1cbbe43542\" (UID: \"cbbfff95-9f90-4be2-9ed7-0a1cbbe43542\") " Jan 20 17:27:18 crc kubenswrapper[4558]: I0120 17:27:18.901230 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swift\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"cbbfff95-9f90-4be2-9ed7-0a1cbbe43542\" (UID: \"cbbfff95-9f90-4be2-9ed7-0a1cbbe43542\") " Jan 20 17:27:18 crc kubenswrapper[4558]: I0120 17:27:18.901312 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/cbbfff95-9f90-4be2-9ed7-0a1cbbe43542-lock\") pod \"cbbfff95-9f90-4be2-9ed7-0a1cbbe43542\" (UID: \"cbbfff95-9f90-4be2-9ed7-0a1cbbe43542\") " Jan 20 17:27:18 crc kubenswrapper[4558]: I0120 17:27:18.902661 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cbbfff95-9f90-4be2-9ed7-0a1cbbe43542-cache" (OuterVolumeSpecName: "cache") pod "cbbfff95-9f90-4be2-9ed7-0a1cbbe43542" (UID: "cbbfff95-9f90-4be2-9ed7-0a1cbbe43542"). InnerVolumeSpecName "cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:27:18 crc kubenswrapper[4558]: I0120 17:27:18.902910 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cbbfff95-9f90-4be2-9ed7-0a1cbbe43542-lock" (OuterVolumeSpecName: "lock") pod "cbbfff95-9f90-4be2-9ed7-0a1cbbe43542" (UID: "cbbfff95-9f90-4be2-9ed7-0a1cbbe43542"). InnerVolumeSpecName "lock". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:27:18 crc kubenswrapper[4558]: I0120 17:27:18.909474 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage12-crc" (OuterVolumeSpecName: "swift") pod "cbbfff95-9f90-4be2-9ed7-0a1cbbe43542" (UID: "cbbfff95-9f90-4be2-9ed7-0a1cbbe43542"). InnerVolumeSpecName "local-storage12-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:27:18 crc kubenswrapper[4558]: I0120 17:27:18.915548 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cbbfff95-9f90-4be2-9ed7-0a1cbbe43542-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "cbbfff95-9f90-4be2-9ed7-0a1cbbe43542" (UID: "cbbfff95-9f90-4be2-9ed7-0a1cbbe43542"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:27:18 crc kubenswrapper[4558]: I0120 17:27:18.917974 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cbbfff95-9f90-4be2-9ed7-0a1cbbe43542-kube-api-access-kxw6w" (OuterVolumeSpecName: "kube-api-access-kxw6w") pod "cbbfff95-9f90-4be2-9ed7-0a1cbbe43542" (UID: "cbbfff95-9f90-4be2-9ed7-0a1cbbe43542"). InnerVolumeSpecName "kube-api-access-kxw6w". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:27:19 crc kubenswrapper[4558]: I0120 17:27:19.004340 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kxw6w\" (UniqueName: \"kubernetes.io/projected/cbbfff95-9f90-4be2-9ed7-0a1cbbe43542-kube-api-access-kxw6w\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:19 crc kubenswrapper[4558]: I0120 17:27:19.004414 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" " Jan 20 17:27:19 crc kubenswrapper[4558]: I0120 17:27:19.004429 4558 reconciler_common.go:293] "Volume detached for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/cbbfff95-9f90-4be2-9ed7-0a1cbbe43542-lock\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:19 crc kubenswrapper[4558]: I0120 17:27:19.004440 4558 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/cbbfff95-9f90-4be2-9ed7-0a1cbbe43542-etc-swift\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:19 crc kubenswrapper[4558]: I0120 17:27:19.004449 4558 reconciler_common.go:293] "Volume detached for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/cbbfff95-9f90-4be2-9ed7-0a1cbbe43542-cache\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:19 crc kubenswrapper[4558]: I0120 17:27:19.022950 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage12-crc" (UniqueName: "kubernetes.io/local-volume/local-storage12-crc") on node "crc" Jan 20 17:27:19 crc kubenswrapper[4558]: I0120 17:27:19.107457 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:19 crc kubenswrapper[4558]: I0120 17:27:19.251664 4558 generic.go:334] "Generic (PLEG): container finished" podID="cbbfff95-9f90-4be2-9ed7-0a1cbbe43542" containerID="2b148f67806ea893dea736f70886bafe16751aa4932ab889670bd2755c1dad06" exitCode=137 Jan 20 17:27:19 crc kubenswrapper[4558]: I0120 17:27:19.251748 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"cbbfff95-9f90-4be2-9ed7-0a1cbbe43542","Type":"ContainerDied","Data":"2b148f67806ea893dea736f70886bafe16751aa4932ab889670bd2755c1dad06"} Jan 20 17:27:19 crc kubenswrapper[4558]: I0120 17:27:19.251845 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"cbbfff95-9f90-4be2-9ed7-0a1cbbe43542","Type":"ContainerDied","Data":"0381c18e7f0d5c5edd126013c4674e47264581b209587bb0ec30b7d737e157c2"} Jan 20 17:27:19 crc kubenswrapper[4558]: I0120 17:27:19.251860 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:27:19 crc kubenswrapper[4558]: I0120 17:27:19.251878 4558 scope.go:117] "RemoveContainer" containerID="2b148f67806ea893dea736f70886bafe16751aa4932ab889670bd2755c1dad06" Jan 20 17:27:19 crc kubenswrapper[4558]: I0120 17:27:19.252053 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:27:19 crc kubenswrapper[4558]: I0120 17:27:19.288561 4558 scope.go:117] "RemoveContainer" containerID="9016dbbaf3d5e8bdb4e9556a7b608c196cc888571331abb5670be24de359a45a" Jan 20 17:27:19 crc kubenswrapper[4558]: I0120 17:27:19.294453 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/swift-storage-0"] Jan 20 17:27:19 crc kubenswrapper[4558]: I0120 17:27:19.303895 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/swift-storage-0"] Jan 20 17:27:19 crc kubenswrapper[4558]: I0120 17:27:19.308950 4558 scope.go:117] "RemoveContainer" containerID="68fbcc76c0e2dedc82e6715e0ec8b4d992e77e10e439a698ddd8d943bc08f443" Jan 20 17:27:19 crc kubenswrapper[4558]: I0120 17:27:19.335386 4558 scope.go:117] "RemoveContainer" containerID="50eea846d91051f2dd7105d2a67595fd30c64b90855e0b8b97a766b5024a4194" Jan 20 17:27:19 crc kubenswrapper[4558]: I0120 17:27:19.338036 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/swift-storage-0"] Jan 20 17:27:19 crc kubenswrapper[4558]: E0120 17:27:19.338629 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cbbfff95-9f90-4be2-9ed7-0a1cbbe43542" containerName="account-reaper" Jan 20 17:27:19 crc kubenswrapper[4558]: I0120 17:27:19.338648 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="cbbfff95-9f90-4be2-9ed7-0a1cbbe43542" containerName="account-reaper" Jan 20 17:27:19 crc kubenswrapper[4558]: E0120 17:27:19.338660 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cbbfff95-9f90-4be2-9ed7-0a1cbbe43542" containerName="object-server" Jan 20 17:27:19 crc kubenswrapper[4558]: I0120 17:27:19.338668 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="cbbfff95-9f90-4be2-9ed7-0a1cbbe43542" containerName="object-server" Jan 20 17:27:19 crc kubenswrapper[4558]: E0120 17:27:19.338709 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cbbfff95-9f90-4be2-9ed7-0a1cbbe43542" containerName="object-replicator" Jan 20 17:27:19 crc kubenswrapper[4558]: I0120 17:27:19.338715 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="cbbfff95-9f90-4be2-9ed7-0a1cbbe43542" containerName="object-replicator" Jan 20 17:27:19 crc kubenswrapper[4558]: E0120 17:27:19.338733 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cbbfff95-9f90-4be2-9ed7-0a1cbbe43542" containerName="object-expirer" Jan 20 17:27:19 crc kubenswrapper[4558]: I0120 17:27:19.338739 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="cbbfff95-9f90-4be2-9ed7-0a1cbbe43542" containerName="object-expirer" Jan 20 17:27:19 crc kubenswrapper[4558]: E0120 17:27:19.338748 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cbbfff95-9f90-4be2-9ed7-0a1cbbe43542" containerName="swift-recon-cron" Jan 20 17:27:19 crc kubenswrapper[4558]: I0120 17:27:19.338754 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="cbbfff95-9f90-4be2-9ed7-0a1cbbe43542" containerName="swift-recon-cron" Jan 20 17:27:19 crc kubenswrapper[4558]: E0120 17:27:19.338768 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cbbfff95-9f90-4be2-9ed7-0a1cbbe43542" containerName="object-updater" Jan 20 17:27:19 crc kubenswrapper[4558]: I0120 17:27:19.338782 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="cbbfff95-9f90-4be2-9ed7-0a1cbbe43542" containerName="object-updater" Jan 20 17:27:19 crc kubenswrapper[4558]: E0120 17:27:19.338791 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cbbfff95-9f90-4be2-9ed7-0a1cbbe43542" containerName="container-server" Jan 20 17:27:19 crc kubenswrapper[4558]: I0120 17:27:19.338797 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="cbbfff95-9f90-4be2-9ed7-0a1cbbe43542" containerName="container-server" Jan 20 17:27:19 crc kubenswrapper[4558]: E0120 17:27:19.338807 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cbbfff95-9f90-4be2-9ed7-0a1cbbe43542" containerName="rsync" Jan 20 17:27:19 crc kubenswrapper[4558]: I0120 17:27:19.338814 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="cbbfff95-9f90-4be2-9ed7-0a1cbbe43542" containerName="rsync" Jan 20 17:27:19 crc kubenswrapper[4558]: E0120 17:27:19.338823 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cbbfff95-9f90-4be2-9ed7-0a1cbbe43542" containerName="account-replicator" Jan 20 17:27:19 crc kubenswrapper[4558]: I0120 17:27:19.338829 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="cbbfff95-9f90-4be2-9ed7-0a1cbbe43542" containerName="account-replicator" Jan 20 17:27:19 crc kubenswrapper[4558]: E0120 17:27:19.338853 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cbbfff95-9f90-4be2-9ed7-0a1cbbe43542" containerName="container-updater" Jan 20 17:27:19 crc kubenswrapper[4558]: I0120 17:27:19.338859 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="cbbfff95-9f90-4be2-9ed7-0a1cbbe43542" containerName="container-updater" Jan 20 17:27:19 crc kubenswrapper[4558]: E0120 17:27:19.338868 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cbbfff95-9f90-4be2-9ed7-0a1cbbe43542" containerName="account-server" Jan 20 17:27:19 crc kubenswrapper[4558]: I0120 17:27:19.338873 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="cbbfff95-9f90-4be2-9ed7-0a1cbbe43542" containerName="account-server" Jan 20 17:27:19 crc kubenswrapper[4558]: E0120 17:27:19.338893 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cbbfff95-9f90-4be2-9ed7-0a1cbbe43542" containerName="container-auditor" Jan 20 17:27:19 crc kubenswrapper[4558]: I0120 17:27:19.338898 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="cbbfff95-9f90-4be2-9ed7-0a1cbbe43542" containerName="container-auditor" Jan 20 17:27:19 crc kubenswrapper[4558]: E0120 17:27:19.338912 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cbbfff95-9f90-4be2-9ed7-0a1cbbe43542" containerName="account-auditor" Jan 20 17:27:19 crc kubenswrapper[4558]: I0120 17:27:19.338918 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="cbbfff95-9f90-4be2-9ed7-0a1cbbe43542" containerName="account-auditor" Jan 20 17:27:19 crc kubenswrapper[4558]: E0120 17:27:19.338926 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cbbfff95-9f90-4be2-9ed7-0a1cbbe43542" containerName="container-replicator" Jan 20 17:27:19 crc kubenswrapper[4558]: I0120 17:27:19.338932 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="cbbfff95-9f90-4be2-9ed7-0a1cbbe43542" containerName="container-replicator" Jan 20 17:27:19 crc kubenswrapper[4558]: E0120 17:27:19.338944 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cbbfff95-9f90-4be2-9ed7-0a1cbbe43542" containerName="object-auditor" Jan 20 17:27:19 crc kubenswrapper[4558]: I0120 17:27:19.338951 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="cbbfff95-9f90-4be2-9ed7-0a1cbbe43542" containerName="object-auditor" Jan 20 17:27:19 crc kubenswrapper[4558]: I0120 17:27:19.339200 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="cbbfff95-9f90-4be2-9ed7-0a1cbbe43542" containerName="account-server" Jan 20 17:27:19 crc kubenswrapper[4558]: I0120 17:27:19.339213 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="cbbfff95-9f90-4be2-9ed7-0a1cbbe43542" containerName="rsync" Jan 20 17:27:19 crc kubenswrapper[4558]: I0120 17:27:19.339223 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="cbbfff95-9f90-4be2-9ed7-0a1cbbe43542" containerName="swift-recon-cron" Jan 20 17:27:19 crc kubenswrapper[4558]: I0120 17:27:19.339236 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="cbbfff95-9f90-4be2-9ed7-0a1cbbe43542" containerName="account-reaper" Jan 20 17:27:19 crc kubenswrapper[4558]: I0120 17:27:19.339249 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="cbbfff95-9f90-4be2-9ed7-0a1cbbe43542" containerName="container-auditor" Jan 20 17:27:19 crc kubenswrapper[4558]: I0120 17:27:19.339261 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="cbbfff95-9f90-4be2-9ed7-0a1cbbe43542" containerName="object-server" Jan 20 17:27:19 crc kubenswrapper[4558]: I0120 17:27:19.339269 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="cbbfff95-9f90-4be2-9ed7-0a1cbbe43542" containerName="object-updater" Jan 20 17:27:19 crc kubenswrapper[4558]: I0120 17:27:19.339281 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="cbbfff95-9f90-4be2-9ed7-0a1cbbe43542" containerName="object-auditor" Jan 20 17:27:19 crc kubenswrapper[4558]: I0120 17:27:19.339288 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="cbbfff95-9f90-4be2-9ed7-0a1cbbe43542" containerName="account-auditor" Jan 20 17:27:19 crc kubenswrapper[4558]: I0120 17:27:19.339298 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="cbbfff95-9f90-4be2-9ed7-0a1cbbe43542" containerName="container-updater" Jan 20 17:27:19 crc kubenswrapper[4558]: I0120 17:27:19.339304 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="cbbfff95-9f90-4be2-9ed7-0a1cbbe43542" containerName="container-replicator" Jan 20 17:27:19 crc kubenswrapper[4558]: I0120 17:27:19.339312 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="cbbfff95-9f90-4be2-9ed7-0a1cbbe43542" containerName="object-expirer" Jan 20 17:27:19 crc kubenswrapper[4558]: I0120 17:27:19.339322 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="cbbfff95-9f90-4be2-9ed7-0a1cbbe43542" containerName="account-replicator" Jan 20 17:27:19 crc kubenswrapper[4558]: I0120 17:27:19.339329 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="cbbfff95-9f90-4be2-9ed7-0a1cbbe43542" containerName="container-server" Jan 20 17:27:19 crc kubenswrapper[4558]: I0120 17:27:19.339334 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="cbbfff95-9f90-4be2-9ed7-0a1cbbe43542" containerName="object-replicator" Jan 20 17:27:19 crc kubenswrapper[4558]: I0120 17:27:19.344947 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:27:19 crc kubenswrapper[4558]: I0120 17:27:19.346875 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"swift-storage-config-data" Jan 20 17:27:19 crc kubenswrapper[4558]: I0120 17:27:19.348574 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/swift-storage-0"] Jan 20 17:27:19 crc kubenswrapper[4558]: I0120 17:27:19.362197 4558 scope.go:117] "RemoveContainer" containerID="a9f1e3687db381bd17fdfb1b0e3aaf9c0fa91fd605b0d73ed9095c3bd3bd381f" Jan 20 17:27:19 crc kubenswrapper[4558]: I0120 17:27:19.388753 4558 scope.go:117] "RemoveContainer" containerID="dc5ce68e3517c196bdee8bba30197f747f505c80c7dfe2ac492a71348d08c4f8" Jan 20 17:27:19 crc kubenswrapper[4558]: I0120 17:27:19.406281 4558 scope.go:117] "RemoveContainer" containerID="f471fa142ace1139e497b71d2408b5de3eb8a81c1e52fc0f3e0b7abe127fa420" Jan 20 17:27:19 crc kubenswrapper[4558]: I0120 17:27:19.425981 4558 scope.go:117] "RemoveContainer" containerID="7d429e35b6e23b8fe5da4b2ba40f03361d3954fbabd9c27927cee2dca39904f9" Jan 20 17:27:19 crc kubenswrapper[4558]: I0120 17:27:19.445841 4558 scope.go:117] "RemoveContainer" containerID="8dfb739a96b0388de7f0bca3f773a09ff87c39b3fc3e077a97ec5ad2b9469fe8" Jan 20 17:27:19 crc kubenswrapper[4558]: I0120 17:27:19.461186 4558 scope.go:117] "RemoveContainer" containerID="fac08b16dfb922f531dfe4ed284cdec4dfd765c94432b6d5e4237c6ab1063c5f" Jan 20 17:27:19 crc kubenswrapper[4558]: I0120 17:27:19.488496 4558 scope.go:117] "RemoveContainer" containerID="9f94ead147cad59ed2c937f2aace48390c6cf7ba843fc0898b176e2475d251b5" Jan 20 17:27:19 crc kubenswrapper[4558]: I0120 17:27:19.507505 4558 scope.go:117] "RemoveContainer" containerID="22cdeeaca39d5197ecff3f1352fe3fc3b073ce7ef0565d67854f14d8c72aeba3" Jan 20 17:27:19 crc kubenswrapper[4558]: I0120 17:27:19.516760 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/8804f9f1-60d4-4fa1-9333-9f9e01b3c68a-etc-swift\") pod \"swift-storage-0\" (UID: \"8804f9f1-60d4-4fa1-9333-9f9e01b3c68a\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:27:19 crc kubenswrapper[4558]: I0120 17:27:19.516983 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/8804f9f1-60d4-4fa1-9333-9f9e01b3c68a-lock\") pod \"swift-storage-0\" (UID: \"8804f9f1-60d4-4fa1-9333-9f9e01b3c68a\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:27:19 crc kubenswrapper[4558]: I0120 17:27:19.517025 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ttrhb\" (UniqueName: \"kubernetes.io/projected/8804f9f1-60d4-4fa1-9333-9f9e01b3c68a-kube-api-access-ttrhb\") pod \"swift-storage-0\" (UID: \"8804f9f1-60d4-4fa1-9333-9f9e01b3c68a\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:27:19 crc kubenswrapper[4558]: I0120 17:27:19.517295 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/8804f9f1-60d4-4fa1-9333-9f9e01b3c68a-cache\") pod \"swift-storage-0\" (UID: \"8804f9f1-60d4-4fa1-9333-9f9e01b3c68a\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:27:19 crc kubenswrapper[4558]: I0120 17:27:19.517393 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"swift-storage-0\" (UID: \"8804f9f1-60d4-4fa1-9333-9f9e01b3c68a\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:27:19 crc kubenswrapper[4558]: I0120 17:27:19.529884 4558 scope.go:117] "RemoveContainer" containerID="33c92b2ec4e98e2de94cfd9d49d4bf673da1f8bed18bac8e9c5007c99a8dc48e" Jan 20 17:27:19 crc kubenswrapper[4558]: I0120 17:27:19.588387 4558 scope.go:117] "RemoveContainer" containerID="36bfb57970925c866cd826c6992c1bb5eb23be201b3fee3a44013164f6bf6c45" Jan 20 17:27:19 crc kubenswrapper[4558]: I0120 17:27:19.606668 4558 scope.go:117] "RemoveContainer" containerID="e6440af1d01d3d7e989418e06350468870193b38937d2ae69a49ff7a72d20bf2" Jan 20 17:27:19 crc kubenswrapper[4558]: I0120 17:27:19.619260 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/8804f9f1-60d4-4fa1-9333-9f9e01b3c68a-etc-swift\") pod \"swift-storage-0\" (UID: \"8804f9f1-60d4-4fa1-9333-9f9e01b3c68a\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:27:19 crc kubenswrapper[4558]: I0120 17:27:19.619357 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/8804f9f1-60d4-4fa1-9333-9f9e01b3c68a-lock\") pod \"swift-storage-0\" (UID: \"8804f9f1-60d4-4fa1-9333-9f9e01b3c68a\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:27:19 crc kubenswrapper[4558]: I0120 17:27:19.619393 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ttrhb\" (UniqueName: \"kubernetes.io/projected/8804f9f1-60d4-4fa1-9333-9f9e01b3c68a-kube-api-access-ttrhb\") pod \"swift-storage-0\" (UID: \"8804f9f1-60d4-4fa1-9333-9f9e01b3c68a\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:27:19 crc kubenswrapper[4558]: I0120 17:27:19.619448 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/8804f9f1-60d4-4fa1-9333-9f9e01b3c68a-cache\") pod \"swift-storage-0\" (UID: \"8804f9f1-60d4-4fa1-9333-9f9e01b3c68a\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:27:19 crc kubenswrapper[4558]: I0120 17:27:19.619485 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"swift-storage-0\" (UID: \"8804f9f1-60d4-4fa1-9333-9f9e01b3c68a\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:27:19 crc kubenswrapper[4558]: I0120 17:27:19.619652 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"swift-storage-0\" (UID: \"8804f9f1-60d4-4fa1-9333-9f9e01b3c68a\") device mount path \"/mnt/openstack/pv12\"" pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:27:19 crc kubenswrapper[4558]: I0120 17:27:19.620014 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/8804f9f1-60d4-4fa1-9333-9f9e01b3c68a-lock\") pod \"swift-storage-0\" (UID: \"8804f9f1-60d4-4fa1-9333-9f9e01b3c68a\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:27:19 crc kubenswrapper[4558]: I0120 17:27:19.620199 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/8804f9f1-60d4-4fa1-9333-9f9e01b3c68a-cache\") pod \"swift-storage-0\" (UID: \"8804f9f1-60d4-4fa1-9333-9f9e01b3c68a\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:27:19 crc kubenswrapper[4558]: I0120 17:27:19.625657 4558 scope.go:117] "RemoveContainer" containerID="2b148f67806ea893dea736f70886bafe16751aa4932ab889670bd2755c1dad06" Jan 20 17:27:19 crc kubenswrapper[4558]: E0120 17:27:19.626329 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2b148f67806ea893dea736f70886bafe16751aa4932ab889670bd2755c1dad06\": container with ID starting with 2b148f67806ea893dea736f70886bafe16751aa4932ab889670bd2755c1dad06 not found: ID does not exist" containerID="2b148f67806ea893dea736f70886bafe16751aa4932ab889670bd2755c1dad06" Jan 20 17:27:19 crc kubenswrapper[4558]: I0120 17:27:19.626380 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2b148f67806ea893dea736f70886bafe16751aa4932ab889670bd2755c1dad06"} err="failed to get container status \"2b148f67806ea893dea736f70886bafe16751aa4932ab889670bd2755c1dad06\": rpc error: code = NotFound desc = could not find container \"2b148f67806ea893dea736f70886bafe16751aa4932ab889670bd2755c1dad06\": container with ID starting with 2b148f67806ea893dea736f70886bafe16751aa4932ab889670bd2755c1dad06 not found: ID does not exist" Jan 20 17:27:19 crc kubenswrapper[4558]: I0120 17:27:19.626416 4558 scope.go:117] "RemoveContainer" containerID="9016dbbaf3d5e8bdb4e9556a7b608c196cc888571331abb5670be24de359a45a" Jan 20 17:27:19 crc kubenswrapper[4558]: E0120 17:27:19.626907 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9016dbbaf3d5e8bdb4e9556a7b608c196cc888571331abb5670be24de359a45a\": container with ID starting with 9016dbbaf3d5e8bdb4e9556a7b608c196cc888571331abb5670be24de359a45a not found: ID does not exist" containerID="9016dbbaf3d5e8bdb4e9556a7b608c196cc888571331abb5670be24de359a45a" Jan 20 17:27:19 crc kubenswrapper[4558]: I0120 17:27:19.626964 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9016dbbaf3d5e8bdb4e9556a7b608c196cc888571331abb5670be24de359a45a"} err="failed to get container status \"9016dbbaf3d5e8bdb4e9556a7b608c196cc888571331abb5670be24de359a45a\": rpc error: code = NotFound desc = could not find container \"9016dbbaf3d5e8bdb4e9556a7b608c196cc888571331abb5670be24de359a45a\": container with ID starting with 9016dbbaf3d5e8bdb4e9556a7b608c196cc888571331abb5670be24de359a45a not found: ID does not exist" Jan 20 17:27:19 crc kubenswrapper[4558]: I0120 17:27:19.627003 4558 scope.go:117] "RemoveContainer" containerID="68fbcc76c0e2dedc82e6715e0ec8b4d992e77e10e439a698ddd8d943bc08f443" Jan 20 17:27:19 crc kubenswrapper[4558]: I0120 17:27:19.627495 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/8804f9f1-60d4-4fa1-9333-9f9e01b3c68a-etc-swift\") pod \"swift-storage-0\" (UID: \"8804f9f1-60d4-4fa1-9333-9f9e01b3c68a\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:27:19 crc kubenswrapper[4558]: E0120 17:27:19.627510 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"68fbcc76c0e2dedc82e6715e0ec8b4d992e77e10e439a698ddd8d943bc08f443\": container with ID starting with 68fbcc76c0e2dedc82e6715e0ec8b4d992e77e10e439a698ddd8d943bc08f443 not found: ID does not exist" containerID="68fbcc76c0e2dedc82e6715e0ec8b4d992e77e10e439a698ddd8d943bc08f443" Jan 20 17:27:19 crc kubenswrapper[4558]: I0120 17:27:19.627626 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"68fbcc76c0e2dedc82e6715e0ec8b4d992e77e10e439a698ddd8d943bc08f443"} err="failed to get container status \"68fbcc76c0e2dedc82e6715e0ec8b4d992e77e10e439a698ddd8d943bc08f443\": rpc error: code = NotFound desc = could not find container \"68fbcc76c0e2dedc82e6715e0ec8b4d992e77e10e439a698ddd8d943bc08f443\": container with ID starting with 68fbcc76c0e2dedc82e6715e0ec8b4d992e77e10e439a698ddd8d943bc08f443 not found: ID does not exist" Jan 20 17:27:19 crc kubenswrapper[4558]: I0120 17:27:19.627649 4558 scope.go:117] "RemoveContainer" containerID="50eea846d91051f2dd7105d2a67595fd30c64b90855e0b8b97a766b5024a4194" Jan 20 17:27:19 crc kubenswrapper[4558]: E0120 17:27:19.627969 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"50eea846d91051f2dd7105d2a67595fd30c64b90855e0b8b97a766b5024a4194\": container with ID starting with 50eea846d91051f2dd7105d2a67595fd30c64b90855e0b8b97a766b5024a4194 not found: ID does not exist" containerID="50eea846d91051f2dd7105d2a67595fd30c64b90855e0b8b97a766b5024a4194" Jan 20 17:27:19 crc kubenswrapper[4558]: I0120 17:27:19.628000 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"50eea846d91051f2dd7105d2a67595fd30c64b90855e0b8b97a766b5024a4194"} err="failed to get container status \"50eea846d91051f2dd7105d2a67595fd30c64b90855e0b8b97a766b5024a4194\": rpc error: code = NotFound desc = could not find container \"50eea846d91051f2dd7105d2a67595fd30c64b90855e0b8b97a766b5024a4194\": container with ID starting with 50eea846d91051f2dd7105d2a67595fd30c64b90855e0b8b97a766b5024a4194 not found: ID does not exist" Jan 20 17:27:19 crc kubenswrapper[4558]: I0120 17:27:19.628031 4558 scope.go:117] "RemoveContainer" containerID="a9f1e3687db381bd17fdfb1b0e3aaf9c0fa91fd605b0d73ed9095c3bd3bd381f" Jan 20 17:27:19 crc kubenswrapper[4558]: E0120 17:27:19.628369 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a9f1e3687db381bd17fdfb1b0e3aaf9c0fa91fd605b0d73ed9095c3bd3bd381f\": container with ID starting with a9f1e3687db381bd17fdfb1b0e3aaf9c0fa91fd605b0d73ed9095c3bd3bd381f not found: ID does not exist" containerID="a9f1e3687db381bd17fdfb1b0e3aaf9c0fa91fd605b0d73ed9095c3bd3bd381f" Jan 20 17:27:19 crc kubenswrapper[4558]: I0120 17:27:19.628393 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a9f1e3687db381bd17fdfb1b0e3aaf9c0fa91fd605b0d73ed9095c3bd3bd381f"} err="failed to get container status \"a9f1e3687db381bd17fdfb1b0e3aaf9c0fa91fd605b0d73ed9095c3bd3bd381f\": rpc error: code = NotFound desc = could not find container \"a9f1e3687db381bd17fdfb1b0e3aaf9c0fa91fd605b0d73ed9095c3bd3bd381f\": container with ID starting with a9f1e3687db381bd17fdfb1b0e3aaf9c0fa91fd605b0d73ed9095c3bd3bd381f not found: ID does not exist" Jan 20 17:27:19 crc kubenswrapper[4558]: I0120 17:27:19.628413 4558 scope.go:117] "RemoveContainer" containerID="dc5ce68e3517c196bdee8bba30197f747f505c80c7dfe2ac492a71348d08c4f8" Jan 20 17:27:19 crc kubenswrapper[4558]: E0120 17:27:19.628916 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dc5ce68e3517c196bdee8bba30197f747f505c80c7dfe2ac492a71348d08c4f8\": container with ID starting with dc5ce68e3517c196bdee8bba30197f747f505c80c7dfe2ac492a71348d08c4f8 not found: ID does not exist" containerID="dc5ce68e3517c196bdee8bba30197f747f505c80c7dfe2ac492a71348d08c4f8" Jan 20 17:27:19 crc kubenswrapper[4558]: I0120 17:27:19.628974 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dc5ce68e3517c196bdee8bba30197f747f505c80c7dfe2ac492a71348d08c4f8"} err="failed to get container status \"dc5ce68e3517c196bdee8bba30197f747f505c80c7dfe2ac492a71348d08c4f8\": rpc error: code = NotFound desc = could not find container \"dc5ce68e3517c196bdee8bba30197f747f505c80c7dfe2ac492a71348d08c4f8\": container with ID starting with dc5ce68e3517c196bdee8bba30197f747f505c80c7dfe2ac492a71348d08c4f8 not found: ID does not exist" Jan 20 17:27:19 crc kubenswrapper[4558]: I0120 17:27:19.629015 4558 scope.go:117] "RemoveContainer" containerID="f471fa142ace1139e497b71d2408b5de3eb8a81c1e52fc0f3e0b7abe127fa420" Jan 20 17:27:19 crc kubenswrapper[4558]: E0120 17:27:19.629414 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f471fa142ace1139e497b71d2408b5de3eb8a81c1e52fc0f3e0b7abe127fa420\": container with ID starting with f471fa142ace1139e497b71d2408b5de3eb8a81c1e52fc0f3e0b7abe127fa420 not found: ID does not exist" containerID="f471fa142ace1139e497b71d2408b5de3eb8a81c1e52fc0f3e0b7abe127fa420" Jan 20 17:27:19 crc kubenswrapper[4558]: I0120 17:27:19.629450 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f471fa142ace1139e497b71d2408b5de3eb8a81c1e52fc0f3e0b7abe127fa420"} err="failed to get container status \"f471fa142ace1139e497b71d2408b5de3eb8a81c1e52fc0f3e0b7abe127fa420\": rpc error: code = NotFound desc = could not find container \"f471fa142ace1139e497b71d2408b5de3eb8a81c1e52fc0f3e0b7abe127fa420\": container with ID starting with f471fa142ace1139e497b71d2408b5de3eb8a81c1e52fc0f3e0b7abe127fa420 not found: ID does not exist" Jan 20 17:27:19 crc kubenswrapper[4558]: I0120 17:27:19.629471 4558 scope.go:117] "RemoveContainer" containerID="7d429e35b6e23b8fe5da4b2ba40f03361d3954fbabd9c27927cee2dca39904f9" Jan 20 17:27:19 crc kubenswrapper[4558]: E0120 17:27:19.629970 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7d429e35b6e23b8fe5da4b2ba40f03361d3954fbabd9c27927cee2dca39904f9\": container with ID starting with 7d429e35b6e23b8fe5da4b2ba40f03361d3954fbabd9c27927cee2dca39904f9 not found: ID does not exist" containerID="7d429e35b6e23b8fe5da4b2ba40f03361d3954fbabd9c27927cee2dca39904f9" Jan 20 17:27:19 crc kubenswrapper[4558]: I0120 17:27:19.629996 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7d429e35b6e23b8fe5da4b2ba40f03361d3954fbabd9c27927cee2dca39904f9"} err="failed to get container status \"7d429e35b6e23b8fe5da4b2ba40f03361d3954fbabd9c27927cee2dca39904f9\": rpc error: code = NotFound desc = could not find container \"7d429e35b6e23b8fe5da4b2ba40f03361d3954fbabd9c27927cee2dca39904f9\": container with ID starting with 7d429e35b6e23b8fe5da4b2ba40f03361d3954fbabd9c27927cee2dca39904f9 not found: ID does not exist" Jan 20 17:27:19 crc kubenswrapper[4558]: I0120 17:27:19.630012 4558 scope.go:117] "RemoveContainer" containerID="8dfb739a96b0388de7f0bca3f773a09ff87c39b3fc3e077a97ec5ad2b9469fe8" Jan 20 17:27:19 crc kubenswrapper[4558]: E0120 17:27:19.630304 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8dfb739a96b0388de7f0bca3f773a09ff87c39b3fc3e077a97ec5ad2b9469fe8\": container with ID starting with 8dfb739a96b0388de7f0bca3f773a09ff87c39b3fc3e077a97ec5ad2b9469fe8 not found: ID does not exist" containerID="8dfb739a96b0388de7f0bca3f773a09ff87c39b3fc3e077a97ec5ad2b9469fe8" Jan 20 17:27:19 crc kubenswrapper[4558]: I0120 17:27:19.630321 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8dfb739a96b0388de7f0bca3f773a09ff87c39b3fc3e077a97ec5ad2b9469fe8"} err="failed to get container status \"8dfb739a96b0388de7f0bca3f773a09ff87c39b3fc3e077a97ec5ad2b9469fe8\": rpc error: code = NotFound desc = could not find container \"8dfb739a96b0388de7f0bca3f773a09ff87c39b3fc3e077a97ec5ad2b9469fe8\": container with ID starting with 8dfb739a96b0388de7f0bca3f773a09ff87c39b3fc3e077a97ec5ad2b9469fe8 not found: ID does not exist" Jan 20 17:27:19 crc kubenswrapper[4558]: I0120 17:27:19.630341 4558 scope.go:117] "RemoveContainer" containerID="fac08b16dfb922f531dfe4ed284cdec4dfd765c94432b6d5e4237c6ab1063c5f" Jan 20 17:27:19 crc kubenswrapper[4558]: E0120 17:27:19.630855 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fac08b16dfb922f531dfe4ed284cdec4dfd765c94432b6d5e4237c6ab1063c5f\": container with ID starting with fac08b16dfb922f531dfe4ed284cdec4dfd765c94432b6d5e4237c6ab1063c5f not found: ID does not exist" containerID="fac08b16dfb922f531dfe4ed284cdec4dfd765c94432b6d5e4237c6ab1063c5f" Jan 20 17:27:19 crc kubenswrapper[4558]: I0120 17:27:19.630992 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fac08b16dfb922f531dfe4ed284cdec4dfd765c94432b6d5e4237c6ab1063c5f"} err="failed to get container status \"fac08b16dfb922f531dfe4ed284cdec4dfd765c94432b6d5e4237c6ab1063c5f\": rpc error: code = NotFound desc = could not find container \"fac08b16dfb922f531dfe4ed284cdec4dfd765c94432b6d5e4237c6ab1063c5f\": container with ID starting with fac08b16dfb922f531dfe4ed284cdec4dfd765c94432b6d5e4237c6ab1063c5f not found: ID does not exist" Jan 20 17:27:19 crc kubenswrapper[4558]: I0120 17:27:19.631109 4558 scope.go:117] "RemoveContainer" containerID="9f94ead147cad59ed2c937f2aace48390c6cf7ba843fc0898b176e2475d251b5" Jan 20 17:27:19 crc kubenswrapper[4558]: E0120 17:27:19.631542 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9f94ead147cad59ed2c937f2aace48390c6cf7ba843fc0898b176e2475d251b5\": container with ID starting with 9f94ead147cad59ed2c937f2aace48390c6cf7ba843fc0898b176e2475d251b5 not found: ID does not exist" containerID="9f94ead147cad59ed2c937f2aace48390c6cf7ba843fc0898b176e2475d251b5" Jan 20 17:27:19 crc kubenswrapper[4558]: I0120 17:27:19.631576 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9f94ead147cad59ed2c937f2aace48390c6cf7ba843fc0898b176e2475d251b5"} err="failed to get container status \"9f94ead147cad59ed2c937f2aace48390c6cf7ba843fc0898b176e2475d251b5\": rpc error: code = NotFound desc = could not find container \"9f94ead147cad59ed2c937f2aace48390c6cf7ba843fc0898b176e2475d251b5\": container with ID starting with 9f94ead147cad59ed2c937f2aace48390c6cf7ba843fc0898b176e2475d251b5 not found: ID does not exist" Jan 20 17:27:19 crc kubenswrapper[4558]: I0120 17:27:19.631597 4558 scope.go:117] "RemoveContainer" containerID="22cdeeaca39d5197ecff3f1352fe3fc3b073ce7ef0565d67854f14d8c72aeba3" Jan 20 17:27:19 crc kubenswrapper[4558]: E0120 17:27:19.631992 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"22cdeeaca39d5197ecff3f1352fe3fc3b073ce7ef0565d67854f14d8c72aeba3\": container with ID starting with 22cdeeaca39d5197ecff3f1352fe3fc3b073ce7ef0565d67854f14d8c72aeba3 not found: ID does not exist" containerID="22cdeeaca39d5197ecff3f1352fe3fc3b073ce7ef0565d67854f14d8c72aeba3" Jan 20 17:27:19 crc kubenswrapper[4558]: I0120 17:27:19.632029 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"22cdeeaca39d5197ecff3f1352fe3fc3b073ce7ef0565d67854f14d8c72aeba3"} err="failed to get container status \"22cdeeaca39d5197ecff3f1352fe3fc3b073ce7ef0565d67854f14d8c72aeba3\": rpc error: code = NotFound desc = could not find container \"22cdeeaca39d5197ecff3f1352fe3fc3b073ce7ef0565d67854f14d8c72aeba3\": container with ID starting with 22cdeeaca39d5197ecff3f1352fe3fc3b073ce7ef0565d67854f14d8c72aeba3 not found: ID does not exist" Jan 20 17:27:19 crc kubenswrapper[4558]: I0120 17:27:19.632050 4558 scope.go:117] "RemoveContainer" containerID="33c92b2ec4e98e2de94cfd9d49d4bf673da1f8bed18bac8e9c5007c99a8dc48e" Jan 20 17:27:19 crc kubenswrapper[4558]: E0120 17:27:19.632377 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"33c92b2ec4e98e2de94cfd9d49d4bf673da1f8bed18bac8e9c5007c99a8dc48e\": container with ID starting with 33c92b2ec4e98e2de94cfd9d49d4bf673da1f8bed18bac8e9c5007c99a8dc48e not found: ID does not exist" containerID="33c92b2ec4e98e2de94cfd9d49d4bf673da1f8bed18bac8e9c5007c99a8dc48e" Jan 20 17:27:19 crc kubenswrapper[4558]: I0120 17:27:19.632511 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"33c92b2ec4e98e2de94cfd9d49d4bf673da1f8bed18bac8e9c5007c99a8dc48e"} err="failed to get container status \"33c92b2ec4e98e2de94cfd9d49d4bf673da1f8bed18bac8e9c5007c99a8dc48e\": rpc error: code = NotFound desc = could not find container \"33c92b2ec4e98e2de94cfd9d49d4bf673da1f8bed18bac8e9c5007c99a8dc48e\": container with ID starting with 33c92b2ec4e98e2de94cfd9d49d4bf673da1f8bed18bac8e9c5007c99a8dc48e not found: ID does not exist" Jan 20 17:27:19 crc kubenswrapper[4558]: I0120 17:27:19.632601 4558 scope.go:117] "RemoveContainer" containerID="36bfb57970925c866cd826c6992c1bb5eb23be201b3fee3a44013164f6bf6c45" Jan 20 17:27:19 crc kubenswrapper[4558]: E0120 17:27:19.633009 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"36bfb57970925c866cd826c6992c1bb5eb23be201b3fee3a44013164f6bf6c45\": container with ID starting with 36bfb57970925c866cd826c6992c1bb5eb23be201b3fee3a44013164f6bf6c45 not found: ID does not exist" containerID="36bfb57970925c866cd826c6992c1bb5eb23be201b3fee3a44013164f6bf6c45" Jan 20 17:27:19 crc kubenswrapper[4558]: I0120 17:27:19.633037 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"36bfb57970925c866cd826c6992c1bb5eb23be201b3fee3a44013164f6bf6c45"} err="failed to get container status \"36bfb57970925c866cd826c6992c1bb5eb23be201b3fee3a44013164f6bf6c45\": rpc error: code = NotFound desc = could not find container \"36bfb57970925c866cd826c6992c1bb5eb23be201b3fee3a44013164f6bf6c45\": container with ID starting with 36bfb57970925c866cd826c6992c1bb5eb23be201b3fee3a44013164f6bf6c45 not found: ID does not exist" Jan 20 17:27:19 crc kubenswrapper[4558]: I0120 17:27:19.633052 4558 scope.go:117] "RemoveContainer" containerID="e6440af1d01d3d7e989418e06350468870193b38937d2ae69a49ff7a72d20bf2" Jan 20 17:27:19 crc kubenswrapper[4558]: E0120 17:27:19.633597 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e6440af1d01d3d7e989418e06350468870193b38937d2ae69a49ff7a72d20bf2\": container with ID starting with e6440af1d01d3d7e989418e06350468870193b38937d2ae69a49ff7a72d20bf2 not found: ID does not exist" containerID="e6440af1d01d3d7e989418e06350468870193b38937d2ae69a49ff7a72d20bf2" Jan 20 17:27:19 crc kubenswrapper[4558]: I0120 17:27:19.633664 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e6440af1d01d3d7e989418e06350468870193b38937d2ae69a49ff7a72d20bf2"} err="failed to get container status \"e6440af1d01d3d7e989418e06350468870193b38937d2ae69a49ff7a72d20bf2\": rpc error: code = NotFound desc = could not find container \"e6440af1d01d3d7e989418e06350468870193b38937d2ae69a49ff7a72d20bf2\": container with ID starting with e6440af1d01d3d7e989418e06350468870193b38937d2ae69a49ff7a72d20bf2 not found: ID does not exist" Jan 20 17:27:19 crc kubenswrapper[4558]: I0120 17:27:19.637273 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ttrhb\" (UniqueName: \"kubernetes.io/projected/8804f9f1-60d4-4fa1-9333-9f9e01b3c68a-kube-api-access-ttrhb\") pod \"swift-storage-0\" (UID: \"8804f9f1-60d4-4fa1-9333-9f9e01b3c68a\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:27:19 crc kubenswrapper[4558]: I0120 17:27:19.647262 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"swift-storage-0\" (UID: \"8804f9f1-60d4-4fa1-9333-9f9e01b3c68a\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:27:19 crc kubenswrapper[4558]: I0120 17:27:19.663046 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:27:20 crc kubenswrapper[4558]: I0120 17:27:20.120029 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/swift-storage-0"] Jan 20 17:27:20 crc kubenswrapper[4558]: I0120 17:27:20.267432 4558 generic.go:334] "Generic (PLEG): container finished" podID="005155ac-dadf-4840-9b7c-7eb1b0f3252e" containerID="078d2b2ec0a8bee0701e4f1972ae83126a9c9ca7a4fe3a16d9d7a16978e58d58" exitCode=0 Jan 20 17:27:20 crc kubenswrapper[4558]: I0120 17:27:20.267529 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-bootstrap-28nj8" event={"ID":"005155ac-dadf-4840-9b7c-7eb1b0f3252e","Type":"ContainerDied","Data":"078d2b2ec0a8bee0701e4f1972ae83126a9c9ca7a4fe3a16d9d7a16978e58d58"} Jan 20 17:27:20 crc kubenswrapper[4558]: I0120 17:27:20.270887 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"8804f9f1-60d4-4fa1-9333-9f9e01b3c68a","Type":"ContainerStarted","Data":"cbd1808ad849a969a4b336b424c07db8bc3fabb8d02d150668692d6b2fdabd39"} Jan 20 17:27:20 crc kubenswrapper[4558]: I0120 17:27:20.585396 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cbbfff95-9f90-4be2-9ed7-0a1cbbe43542" path="/var/lib/kubelet/pods/cbbfff95-9f90-4be2-9ed7-0a1cbbe43542/volumes" Jan 20 17:27:21 crc kubenswrapper[4558]: I0120 17:27:21.301390 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"8804f9f1-60d4-4fa1-9333-9f9e01b3c68a","Type":"ContainerStarted","Data":"703117c0c32a1ceba745d2e72e8de7d8ffea3ca4a98e3434eda2bde5c61b0d36"} Jan 20 17:27:21 crc kubenswrapper[4558]: I0120 17:27:21.301719 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"8804f9f1-60d4-4fa1-9333-9f9e01b3c68a","Type":"ContainerStarted","Data":"19bf1b2c84ff2748887d3f5954817882ace006a585e9b1cf3ff55c2286de632f"} Jan 20 17:27:21 crc kubenswrapper[4558]: I0120 17:27:21.301733 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"8804f9f1-60d4-4fa1-9333-9f9e01b3c68a","Type":"ContainerStarted","Data":"70b3bec12ba85f03f2bb9c09bdc640f03db124a8b41fd0886ea7b738171db315"} Jan 20 17:27:21 crc kubenswrapper[4558]: I0120 17:27:21.301742 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"8804f9f1-60d4-4fa1-9333-9f9e01b3c68a","Type":"ContainerStarted","Data":"b9e271e5199c2c5986711fa373c13ad8aade0056ea94afaa4c8bdfa52c131009"} Jan 20 17:27:21 crc kubenswrapper[4558]: I0120 17:27:21.301751 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"8804f9f1-60d4-4fa1-9333-9f9e01b3c68a","Type":"ContainerStarted","Data":"7355d501842ad1b14019ba4bbb3365f8da9a6587b24e3a344f6aeb0cbde4a6bd"} Jan 20 17:27:21 crc kubenswrapper[4558]: I0120 17:27:21.301758 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"8804f9f1-60d4-4fa1-9333-9f9e01b3c68a","Type":"ContainerStarted","Data":"2ee2e3e7ebe2d7413bfe073e0a35c416b50099cfb7c3db764047c0da6077f92e"} Jan 20 17:27:21 crc kubenswrapper[4558]: I0120 17:27:21.301767 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"8804f9f1-60d4-4fa1-9333-9f9e01b3c68a","Type":"ContainerStarted","Data":"8ba1d4c7037c9af7af4ff6c69fdad67f28d557367c30a255c97945adb2a5670d"} Jan 20 17:27:21 crc kubenswrapper[4558]: I0120 17:27:21.587359 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-bootstrap-28nj8" Jan 20 17:27:21 crc kubenswrapper[4558]: I0120 17:27:21.779899 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/005155ac-dadf-4840-9b7c-7eb1b0f3252e-combined-ca-bundle\") pod \"005155ac-dadf-4840-9b7c-7eb1b0f3252e\" (UID: \"005155ac-dadf-4840-9b7c-7eb1b0f3252e\") " Jan 20 17:27:21 crc kubenswrapper[4558]: I0120 17:27:21.780020 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/005155ac-dadf-4840-9b7c-7eb1b0f3252e-scripts\") pod \"005155ac-dadf-4840-9b7c-7eb1b0f3252e\" (UID: \"005155ac-dadf-4840-9b7c-7eb1b0f3252e\") " Jan 20 17:27:21 crc kubenswrapper[4558]: I0120 17:27:21.780157 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/005155ac-dadf-4840-9b7c-7eb1b0f3252e-fernet-keys\") pod \"005155ac-dadf-4840-9b7c-7eb1b0f3252e\" (UID: \"005155ac-dadf-4840-9b7c-7eb1b0f3252e\") " Jan 20 17:27:21 crc kubenswrapper[4558]: I0120 17:27:21.780241 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j9rvg\" (UniqueName: \"kubernetes.io/projected/005155ac-dadf-4840-9b7c-7eb1b0f3252e-kube-api-access-j9rvg\") pod \"005155ac-dadf-4840-9b7c-7eb1b0f3252e\" (UID: \"005155ac-dadf-4840-9b7c-7eb1b0f3252e\") " Jan 20 17:27:21 crc kubenswrapper[4558]: I0120 17:27:21.780339 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/005155ac-dadf-4840-9b7c-7eb1b0f3252e-config-data\") pod \"005155ac-dadf-4840-9b7c-7eb1b0f3252e\" (UID: \"005155ac-dadf-4840-9b7c-7eb1b0f3252e\") " Jan 20 17:27:21 crc kubenswrapper[4558]: I0120 17:27:21.780368 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/005155ac-dadf-4840-9b7c-7eb1b0f3252e-credential-keys\") pod \"005155ac-dadf-4840-9b7c-7eb1b0f3252e\" (UID: \"005155ac-dadf-4840-9b7c-7eb1b0f3252e\") " Jan 20 17:27:21 crc kubenswrapper[4558]: I0120 17:27:21.785919 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/005155ac-dadf-4840-9b7c-7eb1b0f3252e-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "005155ac-dadf-4840-9b7c-7eb1b0f3252e" (UID: "005155ac-dadf-4840-9b7c-7eb1b0f3252e"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:27:21 crc kubenswrapper[4558]: I0120 17:27:21.786263 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/005155ac-dadf-4840-9b7c-7eb1b0f3252e-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "005155ac-dadf-4840-9b7c-7eb1b0f3252e" (UID: "005155ac-dadf-4840-9b7c-7eb1b0f3252e"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:27:21 crc kubenswrapper[4558]: I0120 17:27:21.787295 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/005155ac-dadf-4840-9b7c-7eb1b0f3252e-scripts" (OuterVolumeSpecName: "scripts") pod "005155ac-dadf-4840-9b7c-7eb1b0f3252e" (UID: "005155ac-dadf-4840-9b7c-7eb1b0f3252e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:27:21 crc kubenswrapper[4558]: I0120 17:27:21.798312 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/005155ac-dadf-4840-9b7c-7eb1b0f3252e-kube-api-access-j9rvg" (OuterVolumeSpecName: "kube-api-access-j9rvg") pod "005155ac-dadf-4840-9b7c-7eb1b0f3252e" (UID: "005155ac-dadf-4840-9b7c-7eb1b0f3252e"). InnerVolumeSpecName "kube-api-access-j9rvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:27:21 crc kubenswrapper[4558]: I0120 17:27:21.812043 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/005155ac-dadf-4840-9b7c-7eb1b0f3252e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "005155ac-dadf-4840-9b7c-7eb1b0f3252e" (UID: "005155ac-dadf-4840-9b7c-7eb1b0f3252e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:27:21 crc kubenswrapper[4558]: I0120 17:27:21.815715 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/005155ac-dadf-4840-9b7c-7eb1b0f3252e-config-data" (OuterVolumeSpecName: "config-data") pod "005155ac-dadf-4840-9b7c-7eb1b0f3252e" (UID: "005155ac-dadf-4840-9b7c-7eb1b0f3252e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:27:21 crc kubenswrapper[4558]: I0120 17:27:21.884403 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/005155ac-dadf-4840-9b7c-7eb1b0f3252e-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:21 crc kubenswrapper[4558]: I0120 17:27:21.884675 4558 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/005155ac-dadf-4840-9b7c-7eb1b0f3252e-fernet-keys\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:21 crc kubenswrapper[4558]: I0120 17:27:21.884690 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j9rvg\" (UniqueName: \"kubernetes.io/projected/005155ac-dadf-4840-9b7c-7eb1b0f3252e-kube-api-access-j9rvg\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:21 crc kubenswrapper[4558]: I0120 17:27:21.884701 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/005155ac-dadf-4840-9b7c-7eb1b0f3252e-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:21 crc kubenswrapper[4558]: I0120 17:27:21.884715 4558 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/005155ac-dadf-4840-9b7c-7eb1b0f3252e-credential-keys\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:21 crc kubenswrapper[4558]: I0120 17:27:21.884725 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/005155ac-dadf-4840-9b7c-7eb1b0f3252e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:21 crc kubenswrapper[4558]: I0120 17:27:21.991066 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-worker-67ccd5cf94-6lkng" Jan 20 17:27:22 crc kubenswrapper[4558]: I0120 17:27:22.089391 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g5l9d\" (UniqueName: \"kubernetes.io/projected/d4f8d7f7-b0d0-4643-834d-1a5056d5cf89-kube-api-access-g5l9d\") pod \"d4f8d7f7-b0d0-4643-834d-1a5056d5cf89\" (UID: \"d4f8d7f7-b0d0-4643-834d-1a5056d5cf89\") " Jan 20 17:27:22 crc kubenswrapper[4558]: I0120 17:27:22.089495 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4f8d7f7-b0d0-4643-834d-1a5056d5cf89-combined-ca-bundle\") pod \"d4f8d7f7-b0d0-4643-834d-1a5056d5cf89\" (UID: \"d4f8d7f7-b0d0-4643-834d-1a5056d5cf89\") " Jan 20 17:27:22 crc kubenswrapper[4558]: I0120 17:27:22.089574 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d4f8d7f7-b0d0-4643-834d-1a5056d5cf89-config-data-custom\") pod \"d4f8d7f7-b0d0-4643-834d-1a5056d5cf89\" (UID: \"d4f8d7f7-b0d0-4643-834d-1a5056d5cf89\") " Jan 20 17:27:22 crc kubenswrapper[4558]: I0120 17:27:22.095152 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d4f8d7f7-b0d0-4643-834d-1a5056d5cf89-kube-api-access-g5l9d" (OuterVolumeSpecName: "kube-api-access-g5l9d") pod "d4f8d7f7-b0d0-4643-834d-1a5056d5cf89" (UID: "d4f8d7f7-b0d0-4643-834d-1a5056d5cf89"). InnerVolumeSpecName "kube-api-access-g5l9d". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:27:22 crc kubenswrapper[4558]: I0120 17:27:22.100769 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d4f8d7f7-b0d0-4643-834d-1a5056d5cf89-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "d4f8d7f7-b0d0-4643-834d-1a5056d5cf89" (UID: "d4f8d7f7-b0d0-4643-834d-1a5056d5cf89"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:27:22 crc kubenswrapper[4558]: I0120 17:27:22.133369 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d4f8d7f7-b0d0-4643-834d-1a5056d5cf89-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d4f8d7f7-b0d0-4643-834d-1a5056d5cf89" (UID: "d4f8d7f7-b0d0-4643-834d-1a5056d5cf89"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:27:22 crc kubenswrapper[4558]: I0120 17:27:22.190900 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d4f8d7f7-b0d0-4643-834d-1a5056d5cf89-config-data\") pod \"d4f8d7f7-b0d0-4643-834d-1a5056d5cf89\" (UID: \"d4f8d7f7-b0d0-4643-834d-1a5056d5cf89\") " Jan 20 17:27:22 crc kubenswrapper[4558]: I0120 17:27:22.190938 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d4f8d7f7-b0d0-4643-834d-1a5056d5cf89-logs\") pod \"d4f8d7f7-b0d0-4643-834d-1a5056d5cf89\" (UID: \"d4f8d7f7-b0d0-4643-834d-1a5056d5cf89\") " Jan 20 17:27:22 crc kubenswrapper[4558]: I0120 17:27:22.191328 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4f8d7f7-b0d0-4643-834d-1a5056d5cf89-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:22 crc kubenswrapper[4558]: I0120 17:27:22.191347 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d4f8d7f7-b0d0-4643-834d-1a5056d5cf89-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:22 crc kubenswrapper[4558]: I0120 17:27:22.191358 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g5l9d\" (UniqueName: \"kubernetes.io/projected/d4f8d7f7-b0d0-4643-834d-1a5056d5cf89-kube-api-access-g5l9d\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:22 crc kubenswrapper[4558]: I0120 17:27:22.191448 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-keystone-listener-5cbb954895-rgzbf" Jan 20 17:27:22 crc kubenswrapper[4558]: I0120 17:27:22.191477 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d4f8d7f7-b0d0-4643-834d-1a5056d5cf89-logs" (OuterVolumeSpecName: "logs") pod "d4f8d7f7-b0d0-4643-834d-1a5056d5cf89" (UID: "d4f8d7f7-b0d0-4643-834d-1a5056d5cf89"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:27:22 crc kubenswrapper[4558]: I0120 17:27:22.247267 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d4f8d7f7-b0d0-4643-834d-1a5056d5cf89-config-data" (OuterVolumeSpecName: "config-data") pod "d4f8d7f7-b0d0-4643-834d-1a5056d5cf89" (UID: "d4f8d7f7-b0d0-4643-834d-1a5056d5cf89"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:27:22 crc kubenswrapper[4558]: I0120 17:27:22.293344 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-94lnp\" (UniqueName: \"kubernetes.io/projected/b8f85165-600b-47f4-b459-e1eb023907a6-kube-api-access-94lnp\") pod \"b8f85165-600b-47f4-b459-e1eb023907a6\" (UID: \"b8f85165-600b-47f4-b459-e1eb023907a6\") " Jan 20 17:27:22 crc kubenswrapper[4558]: I0120 17:27:22.293442 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8f85165-600b-47f4-b459-e1eb023907a6-combined-ca-bundle\") pod \"b8f85165-600b-47f4-b459-e1eb023907a6\" (UID: \"b8f85165-600b-47f4-b459-e1eb023907a6\") " Jan 20 17:27:22 crc kubenswrapper[4558]: I0120 17:27:22.293533 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b8f85165-600b-47f4-b459-e1eb023907a6-logs\") pod \"b8f85165-600b-47f4-b459-e1eb023907a6\" (UID: \"b8f85165-600b-47f4-b459-e1eb023907a6\") " Jan 20 17:27:22 crc kubenswrapper[4558]: I0120 17:27:22.294272 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b8f85165-600b-47f4-b459-e1eb023907a6-logs" (OuterVolumeSpecName: "logs") pod "b8f85165-600b-47f4-b459-e1eb023907a6" (UID: "b8f85165-600b-47f4-b459-e1eb023907a6"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:27:22 crc kubenswrapper[4558]: I0120 17:27:22.294605 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d4f8d7f7-b0d0-4643-834d-1a5056d5cf89-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:22 crc kubenswrapper[4558]: I0120 17:27:22.294634 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d4f8d7f7-b0d0-4643-834d-1a5056d5cf89-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:22 crc kubenswrapper[4558]: I0120 17:27:22.294645 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b8f85165-600b-47f4-b459-e1eb023907a6-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:22 crc kubenswrapper[4558]: I0120 17:27:22.297156 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b8f85165-600b-47f4-b459-e1eb023907a6-kube-api-access-94lnp" (OuterVolumeSpecName: "kube-api-access-94lnp") pod "b8f85165-600b-47f4-b459-e1eb023907a6" (UID: "b8f85165-600b-47f4-b459-e1eb023907a6"). InnerVolumeSpecName "kube-api-access-94lnp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:27:22 crc kubenswrapper[4558]: I0120 17:27:22.342149 4558 generic.go:334] "Generic (PLEG): container finished" podID="d4f8d7f7-b0d0-4643-834d-1a5056d5cf89" containerID="9370273bf4e481977fef5391473819b681d5b9a82f04da2dbb3c091078de2d23" exitCode=0 Jan 20 17:27:22 crc kubenswrapper[4558]: I0120 17:27:22.342414 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-worker-67ccd5cf94-6lkng" Jan 20 17:27:22 crc kubenswrapper[4558]: I0120 17:27:22.342334 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-worker-67ccd5cf94-6lkng" event={"ID":"d4f8d7f7-b0d0-4643-834d-1a5056d5cf89","Type":"ContainerDied","Data":"9370273bf4e481977fef5391473819b681d5b9a82f04da2dbb3c091078de2d23"} Jan 20 17:27:22 crc kubenswrapper[4558]: I0120 17:27:22.345285 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-worker-67ccd5cf94-6lkng" event={"ID":"d4f8d7f7-b0d0-4643-834d-1a5056d5cf89","Type":"ContainerDied","Data":"8ae9a7b0c7ca5fa6dadabdb9990fde848cdca6d3d61876dba5886a547e136754"} Jan 20 17:27:22 crc kubenswrapper[4558]: I0120 17:27:22.345319 4558 scope.go:117] "RemoveContainer" containerID="9370273bf4e481977fef5391473819b681d5b9a82f04da2dbb3c091078de2d23" Jan 20 17:27:22 crc kubenswrapper[4558]: I0120 17:27:22.346968 4558 generic.go:334] "Generic (PLEG): container finished" podID="b8f85165-600b-47f4-b459-e1eb023907a6" containerID="99a4a1d716eab3ac3b920c0fee4a68d744e9cc640f5f82e54e1934eaf6260207" exitCode=0 Jan 20 17:27:22 crc kubenswrapper[4558]: I0120 17:27:22.347024 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-keystone-listener-5cbb954895-rgzbf" event={"ID":"b8f85165-600b-47f4-b459-e1eb023907a6","Type":"ContainerDied","Data":"99a4a1d716eab3ac3b920c0fee4a68d744e9cc640f5f82e54e1934eaf6260207"} Jan 20 17:27:22 crc kubenswrapper[4558]: I0120 17:27:22.347044 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-keystone-listener-5cbb954895-rgzbf" event={"ID":"b8f85165-600b-47f4-b459-e1eb023907a6","Type":"ContainerDied","Data":"0323677f63509ece02e01ee5f3833ba23b2e1d09a04dc41591cbdaf181c384f9"} Jan 20 17:27:22 crc kubenswrapper[4558]: I0120 17:27:22.347111 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-keystone-listener-5cbb954895-rgzbf" Jan 20 17:27:22 crc kubenswrapper[4558]: I0120 17:27:22.352187 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-bootstrap-28nj8" event={"ID":"005155ac-dadf-4840-9b7c-7eb1b0f3252e","Type":"ContainerDied","Data":"95fbc0b4cb8c6822a2c9bec6334502c8f64b249253c034d443ebf64e911017e7"} Jan 20 17:27:22 crc kubenswrapper[4558]: I0120 17:27:22.352231 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="95fbc0b4cb8c6822a2c9bec6334502c8f64b249253c034d443ebf64e911017e7" Jan 20 17:27:22 crc kubenswrapper[4558]: I0120 17:27:22.352313 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-bootstrap-28nj8" Jan 20 17:27:22 crc kubenswrapper[4558]: I0120 17:27:22.369415 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b8f85165-600b-47f4-b459-e1eb023907a6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b8f85165-600b-47f4-b459-e1eb023907a6" (UID: "b8f85165-600b-47f4-b459-e1eb023907a6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:27:22 crc kubenswrapper[4558]: I0120 17:27:22.375787 4558 scope.go:117] "RemoveContainer" containerID="c6c1817123e3f5c8cea565cdf69b0faaf8aa7b2548ca93d325c3dcfe94cc8266" Jan 20 17:27:22 crc kubenswrapper[4558]: I0120 17:27:22.378200 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"8804f9f1-60d4-4fa1-9333-9f9e01b3c68a","Type":"ContainerStarted","Data":"36e479ddabc13ddf3cd61164acd11174c752edf6961acec9ae67d8951043aee5"} Jan 20 17:27:22 crc kubenswrapper[4558]: I0120 17:27:22.378260 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"8804f9f1-60d4-4fa1-9333-9f9e01b3c68a","Type":"ContainerStarted","Data":"5abed863186c32fde3a40dcffb538fd7112acbbdfd253d93751ff00096a299c5"} Jan 20 17:27:22 crc kubenswrapper[4558]: I0120 17:27:22.378276 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"8804f9f1-60d4-4fa1-9333-9f9e01b3c68a","Type":"ContainerStarted","Data":"d700282b5693986cac00fa3aff076f819077ba4d7e9191db243d0f5c076b2c4d"} Jan 20 17:27:22 crc kubenswrapper[4558]: I0120 17:27:22.378285 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"8804f9f1-60d4-4fa1-9333-9f9e01b3c68a","Type":"ContainerStarted","Data":"4a6334c4fa8ffc04d0aaf20255ac8f9132d2e26680c4b1f11d017cfb5b777eec"} Jan 20 17:27:22 crc kubenswrapper[4558]: I0120 17:27:22.378294 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"8804f9f1-60d4-4fa1-9333-9f9e01b3c68a","Type":"ContainerStarted","Data":"8c8b5915d3326030a79e4f528fb27f011fc914ddc227bbc37139ba81a55e7e83"} Jan 20 17:27:22 crc kubenswrapper[4558]: I0120 17:27:22.379293 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-worker-67ccd5cf94-6lkng"] Jan 20 17:27:22 crc kubenswrapper[4558]: I0120 17:27:22.388027 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/barbican-worker-67ccd5cf94-6lkng"] Jan 20 17:27:22 crc kubenswrapper[4558]: I0120 17:27:22.395367 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b8f85165-600b-47f4-b459-e1eb023907a6-config-data\") pod \"b8f85165-600b-47f4-b459-e1eb023907a6\" (UID: \"b8f85165-600b-47f4-b459-e1eb023907a6\") " Jan 20 17:27:22 crc kubenswrapper[4558]: I0120 17:27:22.395556 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b8f85165-600b-47f4-b459-e1eb023907a6-config-data-custom\") pod \"b8f85165-600b-47f4-b459-e1eb023907a6\" (UID: \"b8f85165-600b-47f4-b459-e1eb023907a6\") " Jan 20 17:27:22 crc kubenswrapper[4558]: I0120 17:27:22.396350 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-94lnp\" (UniqueName: \"kubernetes.io/projected/b8f85165-600b-47f4-b459-e1eb023907a6-kube-api-access-94lnp\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:22 crc kubenswrapper[4558]: I0120 17:27:22.396376 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8f85165-600b-47f4-b459-e1eb023907a6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:22 crc kubenswrapper[4558]: I0120 17:27:22.398260 4558 scope.go:117] "RemoveContainer" containerID="9370273bf4e481977fef5391473819b681d5b9a82f04da2dbb3c091078de2d23" Jan 20 17:27:22 crc kubenswrapper[4558]: E0120 17:27:22.398707 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9370273bf4e481977fef5391473819b681d5b9a82f04da2dbb3c091078de2d23\": container with ID starting with 9370273bf4e481977fef5391473819b681d5b9a82f04da2dbb3c091078de2d23 not found: ID does not exist" containerID="9370273bf4e481977fef5391473819b681d5b9a82f04da2dbb3c091078de2d23" Jan 20 17:27:22 crc kubenswrapper[4558]: I0120 17:27:22.398737 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9370273bf4e481977fef5391473819b681d5b9a82f04da2dbb3c091078de2d23"} err="failed to get container status \"9370273bf4e481977fef5391473819b681d5b9a82f04da2dbb3c091078de2d23\": rpc error: code = NotFound desc = could not find container \"9370273bf4e481977fef5391473819b681d5b9a82f04da2dbb3c091078de2d23\": container with ID starting with 9370273bf4e481977fef5391473819b681d5b9a82f04da2dbb3c091078de2d23 not found: ID does not exist" Jan 20 17:27:22 crc kubenswrapper[4558]: I0120 17:27:22.398785 4558 scope.go:117] "RemoveContainer" containerID="c6c1817123e3f5c8cea565cdf69b0faaf8aa7b2548ca93d325c3dcfe94cc8266" Jan 20 17:27:22 crc kubenswrapper[4558]: E0120 17:27:22.399544 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c6c1817123e3f5c8cea565cdf69b0faaf8aa7b2548ca93d325c3dcfe94cc8266\": container with ID starting with c6c1817123e3f5c8cea565cdf69b0faaf8aa7b2548ca93d325c3dcfe94cc8266 not found: ID does not exist" containerID="c6c1817123e3f5c8cea565cdf69b0faaf8aa7b2548ca93d325c3dcfe94cc8266" Jan 20 17:27:22 crc kubenswrapper[4558]: I0120 17:27:22.399660 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c6c1817123e3f5c8cea565cdf69b0faaf8aa7b2548ca93d325c3dcfe94cc8266"} err="failed to get container status \"c6c1817123e3f5c8cea565cdf69b0faaf8aa7b2548ca93d325c3dcfe94cc8266\": rpc error: code = NotFound desc = could not find container \"c6c1817123e3f5c8cea565cdf69b0faaf8aa7b2548ca93d325c3dcfe94cc8266\": container with ID starting with c6c1817123e3f5c8cea565cdf69b0faaf8aa7b2548ca93d325c3dcfe94cc8266 not found: ID does not exist" Jan 20 17:27:22 crc kubenswrapper[4558]: I0120 17:27:22.399780 4558 scope.go:117] "RemoveContainer" containerID="99a4a1d716eab3ac3b920c0fee4a68d744e9cc640f5f82e54e1934eaf6260207" Jan 20 17:27:22 crc kubenswrapper[4558]: I0120 17:27:22.409559 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b8f85165-600b-47f4-b459-e1eb023907a6-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "b8f85165-600b-47f4-b459-e1eb023907a6" (UID: "b8f85165-600b-47f4-b459-e1eb023907a6"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:27:22 crc kubenswrapper[4558]: I0120 17:27:22.421662 4558 scope.go:117] "RemoveContainer" containerID="b1b8dd351e12ca92ea5d56e15dd5ab075ca2eb4ee0213bbd1f5c91319f363239" Jan 20 17:27:22 crc kubenswrapper[4558]: I0120 17:27:22.440816 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:27:22 crc kubenswrapper[4558]: I0120 17:27:22.442040 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:27:22 crc kubenswrapper[4558]: I0120 17:27:22.444309 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b8f85165-600b-47f4-b459-e1eb023907a6-config-data" (OuterVolumeSpecName: "config-data") pod "b8f85165-600b-47f4-b459-e1eb023907a6" (UID: "b8f85165-600b-47f4-b459-e1eb023907a6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:27:22 crc kubenswrapper[4558]: I0120 17:27:22.493605 4558 scope.go:117] "RemoveContainer" containerID="99a4a1d716eab3ac3b920c0fee4a68d744e9cc640f5f82e54e1934eaf6260207" Jan 20 17:27:22 crc kubenswrapper[4558]: I0120 17:27:22.497470 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b8f85165-600b-47f4-b459-e1eb023907a6-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:22 crc kubenswrapper[4558]: I0120 17:27:22.498581 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b8f85165-600b-47f4-b459-e1eb023907a6-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:22 crc kubenswrapper[4558]: I0120 17:27:22.498794 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/keystone-74494d9d6c-qv9st"] Jan 20 17:27:22 crc kubenswrapper[4558]: E0120 17:27:22.499186 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"99a4a1d716eab3ac3b920c0fee4a68d744e9cc640f5f82e54e1934eaf6260207\": container with ID starting with 99a4a1d716eab3ac3b920c0fee4a68d744e9cc640f5f82e54e1934eaf6260207 not found: ID does not exist" containerID="99a4a1d716eab3ac3b920c0fee4a68d744e9cc640f5f82e54e1934eaf6260207" Jan 20 17:27:22 crc kubenswrapper[4558]: I0120 17:27:22.499243 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"99a4a1d716eab3ac3b920c0fee4a68d744e9cc640f5f82e54e1934eaf6260207"} err="failed to get container status \"99a4a1d716eab3ac3b920c0fee4a68d744e9cc640f5f82e54e1934eaf6260207\": rpc error: code = NotFound desc = could not find container \"99a4a1d716eab3ac3b920c0fee4a68d744e9cc640f5f82e54e1934eaf6260207\": container with ID starting with 99a4a1d716eab3ac3b920c0fee4a68d744e9cc640f5f82e54e1934eaf6260207 not found: ID does not exist" Jan 20 17:27:22 crc kubenswrapper[4558]: I0120 17:27:22.499267 4558 scope.go:117] "RemoveContainer" containerID="b1b8dd351e12ca92ea5d56e15dd5ab075ca2eb4ee0213bbd1f5c91319f363239" Jan 20 17:27:22 crc kubenswrapper[4558]: E0120 17:27:22.499564 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b8f85165-600b-47f4-b459-e1eb023907a6" containerName="barbican-keystone-listener-log" Jan 20 17:27:22 crc kubenswrapper[4558]: I0120 17:27:22.499633 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="b8f85165-600b-47f4-b459-e1eb023907a6" containerName="barbican-keystone-listener-log" Jan 20 17:27:22 crc kubenswrapper[4558]: E0120 17:27:22.499702 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b8f85165-600b-47f4-b459-e1eb023907a6" containerName="barbican-keystone-listener" Jan 20 17:27:22 crc kubenswrapper[4558]: I0120 17:27:22.499751 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="b8f85165-600b-47f4-b459-e1eb023907a6" containerName="barbican-keystone-listener" Jan 20 17:27:22 crc kubenswrapper[4558]: E0120 17:27:22.499812 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d4f8d7f7-b0d0-4643-834d-1a5056d5cf89" containerName="barbican-worker-log" Jan 20 17:27:22 crc kubenswrapper[4558]: I0120 17:27:22.499867 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d4f8d7f7-b0d0-4643-834d-1a5056d5cf89" containerName="barbican-worker-log" Jan 20 17:27:22 crc kubenswrapper[4558]: E0120 17:27:22.499998 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d4f8d7f7-b0d0-4643-834d-1a5056d5cf89" containerName="barbican-worker" Jan 20 17:27:22 crc kubenswrapper[4558]: I0120 17:27:22.500053 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d4f8d7f7-b0d0-4643-834d-1a5056d5cf89" containerName="barbican-worker" Jan 20 17:27:22 crc kubenswrapper[4558]: E0120 17:27:22.500118 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="005155ac-dadf-4840-9b7c-7eb1b0f3252e" containerName="keystone-bootstrap" Jan 20 17:27:22 crc kubenswrapper[4558]: I0120 17:27:22.500179 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="005155ac-dadf-4840-9b7c-7eb1b0f3252e" containerName="keystone-bootstrap" Jan 20 17:27:22 crc kubenswrapper[4558]: I0120 17:27:22.500390 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="b8f85165-600b-47f4-b459-e1eb023907a6" containerName="barbican-keystone-listener" Jan 20 17:27:22 crc kubenswrapper[4558]: I0120 17:27:22.500458 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="005155ac-dadf-4840-9b7c-7eb1b0f3252e" containerName="keystone-bootstrap" Jan 20 17:27:22 crc kubenswrapper[4558]: I0120 17:27:22.500523 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="b8f85165-600b-47f4-b459-e1eb023907a6" containerName="barbican-keystone-listener-log" Jan 20 17:27:22 crc kubenswrapper[4558]: I0120 17:27:22.500576 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="d4f8d7f7-b0d0-4643-834d-1a5056d5cf89" containerName="barbican-worker" Jan 20 17:27:22 crc kubenswrapper[4558]: I0120 17:27:22.500630 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="d4f8d7f7-b0d0-4643-834d-1a5056d5cf89" containerName="barbican-worker-log" Jan 20 17:27:22 crc kubenswrapper[4558]: I0120 17:27:22.501358 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-74494d9d6c-qv9st" Jan 20 17:27:22 crc kubenswrapper[4558]: E0120 17:27:22.503187 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b1b8dd351e12ca92ea5d56e15dd5ab075ca2eb4ee0213bbd1f5c91319f363239\": container with ID starting with b1b8dd351e12ca92ea5d56e15dd5ab075ca2eb4ee0213bbd1f5c91319f363239 not found: ID does not exist" containerID="b1b8dd351e12ca92ea5d56e15dd5ab075ca2eb4ee0213bbd1f5c91319f363239" Jan 20 17:27:22 crc kubenswrapper[4558]: I0120 17:27:22.503280 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b1b8dd351e12ca92ea5d56e15dd5ab075ca2eb4ee0213bbd1f5c91319f363239"} err="failed to get container status \"b1b8dd351e12ca92ea5d56e15dd5ab075ca2eb4ee0213bbd1f5c91319f363239\": rpc error: code = NotFound desc = could not find container \"b1b8dd351e12ca92ea5d56e15dd5ab075ca2eb4ee0213bbd1f5c91319f363239\": container with ID starting with b1b8dd351e12ca92ea5d56e15dd5ab075ca2eb4ee0213bbd1f5c91319f363239 not found: ID does not exist" Jan 20 17:27:22 crc kubenswrapper[4558]: I0120 17:27:22.535945 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-74494d9d6c-qv9st"] Jan 20 17:27:22 crc kubenswrapper[4558]: I0120 17:27:22.561654 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:27:22 crc kubenswrapper[4558]: I0120 17:27:22.642103 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d4f8d7f7-b0d0-4643-834d-1a5056d5cf89" path="/var/lib/kubelet/pods/d4f8d7f7-b0d0-4643-834d-1a5056d5cf89/volumes" Jan 20 17:27:22 crc kubenswrapper[4558]: I0120 17:27:22.651335 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:27:22 crc kubenswrapper[4558]: I0120 17:27:22.693793 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:27:22 crc kubenswrapper[4558]: I0120 17:27:22.710293 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7d72e21b-f0dd-48d4-9594-bb39aae6fa9d-scripts\") pod \"keystone-74494d9d6c-qv9st\" (UID: \"7d72e21b-f0dd-48d4-9594-bb39aae6fa9d\") " pod="openstack-kuttl-tests/keystone-74494d9d6c-qv9st" Jan 20 17:27:22 crc kubenswrapper[4558]: I0120 17:27:22.710550 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/7d72e21b-f0dd-48d4-9594-bb39aae6fa9d-credential-keys\") pod \"keystone-74494d9d6c-qv9st\" (UID: \"7d72e21b-f0dd-48d4-9594-bb39aae6fa9d\") " pod="openstack-kuttl-tests/keystone-74494d9d6c-qv9st" Jan 20 17:27:22 crc kubenswrapper[4558]: I0120 17:27:22.710605 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7d72e21b-f0dd-48d4-9594-bb39aae6fa9d-config-data\") pod \"keystone-74494d9d6c-qv9st\" (UID: \"7d72e21b-f0dd-48d4-9594-bb39aae6fa9d\") " pod="openstack-kuttl-tests/keystone-74494d9d6c-qv9st" Jan 20 17:27:22 crc kubenswrapper[4558]: I0120 17:27:22.710634 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7d72e21b-f0dd-48d4-9594-bb39aae6fa9d-combined-ca-bundle\") pod \"keystone-74494d9d6c-qv9st\" (UID: \"7d72e21b-f0dd-48d4-9594-bb39aae6fa9d\") " pod="openstack-kuttl-tests/keystone-74494d9d6c-qv9st" Jan 20 17:27:22 crc kubenswrapper[4558]: I0120 17:27:22.710686 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7d72e21b-f0dd-48d4-9594-bb39aae6fa9d-public-tls-certs\") pod \"keystone-74494d9d6c-qv9st\" (UID: \"7d72e21b-f0dd-48d4-9594-bb39aae6fa9d\") " pod="openstack-kuttl-tests/keystone-74494d9d6c-qv9st" Jan 20 17:27:22 crc kubenswrapper[4558]: I0120 17:27:22.710781 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7d72e21b-f0dd-48d4-9594-bb39aae6fa9d-internal-tls-certs\") pod \"keystone-74494d9d6c-qv9st\" (UID: \"7d72e21b-f0dd-48d4-9594-bb39aae6fa9d\") " pod="openstack-kuttl-tests/keystone-74494d9d6c-qv9st" Jan 20 17:27:22 crc kubenswrapper[4558]: I0120 17:27:22.711034 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/7d72e21b-f0dd-48d4-9594-bb39aae6fa9d-fernet-keys\") pod \"keystone-74494d9d6c-qv9st\" (UID: \"7d72e21b-f0dd-48d4-9594-bb39aae6fa9d\") " pod="openstack-kuttl-tests/keystone-74494d9d6c-qv9st" Jan 20 17:27:22 crc kubenswrapper[4558]: I0120 17:27:22.711281 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kqq8g\" (UniqueName: \"kubernetes.io/projected/7d72e21b-f0dd-48d4-9594-bb39aae6fa9d-kube-api-access-kqq8g\") pod \"keystone-74494d9d6c-qv9st\" (UID: \"7d72e21b-f0dd-48d4-9594-bb39aae6fa9d\") " pod="openstack-kuttl-tests/keystone-74494d9d6c-qv9st" Jan 20 17:27:22 crc kubenswrapper[4558]: I0120 17:27:22.750119 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-keystone-listener-5cbb954895-rgzbf"] Jan 20 17:27:22 crc kubenswrapper[4558]: I0120 17:27:22.752227 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:27:22 crc kubenswrapper[4558]: I0120 17:27:22.752835 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:27:22 crc kubenswrapper[4558]: I0120 17:27:22.761532 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/barbican-keystone-listener-5cbb954895-rgzbf"] Jan 20 17:27:22 crc kubenswrapper[4558]: I0120 17:27:22.797231 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:27:22 crc kubenswrapper[4558]: I0120 17:27:22.813591 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kqq8g\" (UniqueName: \"kubernetes.io/projected/7d72e21b-f0dd-48d4-9594-bb39aae6fa9d-kube-api-access-kqq8g\") pod \"keystone-74494d9d6c-qv9st\" (UID: \"7d72e21b-f0dd-48d4-9594-bb39aae6fa9d\") " pod="openstack-kuttl-tests/keystone-74494d9d6c-qv9st" Jan 20 17:27:22 crc kubenswrapper[4558]: I0120 17:27:22.813743 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7d72e21b-f0dd-48d4-9594-bb39aae6fa9d-scripts\") pod \"keystone-74494d9d6c-qv9st\" (UID: \"7d72e21b-f0dd-48d4-9594-bb39aae6fa9d\") " pod="openstack-kuttl-tests/keystone-74494d9d6c-qv9st" Jan 20 17:27:22 crc kubenswrapper[4558]: I0120 17:27:22.813834 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/7d72e21b-f0dd-48d4-9594-bb39aae6fa9d-credential-keys\") pod \"keystone-74494d9d6c-qv9st\" (UID: \"7d72e21b-f0dd-48d4-9594-bb39aae6fa9d\") " pod="openstack-kuttl-tests/keystone-74494d9d6c-qv9st" Jan 20 17:27:22 crc kubenswrapper[4558]: I0120 17:27:22.813865 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7d72e21b-f0dd-48d4-9594-bb39aae6fa9d-config-data\") pod \"keystone-74494d9d6c-qv9st\" (UID: \"7d72e21b-f0dd-48d4-9594-bb39aae6fa9d\") " pod="openstack-kuttl-tests/keystone-74494d9d6c-qv9st" Jan 20 17:27:22 crc kubenswrapper[4558]: I0120 17:27:22.813885 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7d72e21b-f0dd-48d4-9594-bb39aae6fa9d-combined-ca-bundle\") pod \"keystone-74494d9d6c-qv9st\" (UID: \"7d72e21b-f0dd-48d4-9594-bb39aae6fa9d\") " pod="openstack-kuttl-tests/keystone-74494d9d6c-qv9st" Jan 20 17:27:22 crc kubenswrapper[4558]: I0120 17:27:22.813912 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7d72e21b-f0dd-48d4-9594-bb39aae6fa9d-public-tls-certs\") pod \"keystone-74494d9d6c-qv9st\" (UID: \"7d72e21b-f0dd-48d4-9594-bb39aae6fa9d\") " pod="openstack-kuttl-tests/keystone-74494d9d6c-qv9st" Jan 20 17:27:22 crc kubenswrapper[4558]: I0120 17:27:22.813943 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7d72e21b-f0dd-48d4-9594-bb39aae6fa9d-internal-tls-certs\") pod \"keystone-74494d9d6c-qv9st\" (UID: \"7d72e21b-f0dd-48d4-9594-bb39aae6fa9d\") " pod="openstack-kuttl-tests/keystone-74494d9d6c-qv9st" Jan 20 17:27:22 crc kubenswrapper[4558]: I0120 17:27:22.814035 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/7d72e21b-f0dd-48d4-9594-bb39aae6fa9d-fernet-keys\") pod \"keystone-74494d9d6c-qv9st\" (UID: \"7d72e21b-f0dd-48d4-9594-bb39aae6fa9d\") " pod="openstack-kuttl-tests/keystone-74494d9d6c-qv9st" Jan 20 17:27:22 crc kubenswrapper[4558]: I0120 17:27:22.820319 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7d72e21b-f0dd-48d4-9594-bb39aae6fa9d-scripts\") pod \"keystone-74494d9d6c-qv9st\" (UID: \"7d72e21b-f0dd-48d4-9594-bb39aae6fa9d\") " pod="openstack-kuttl-tests/keystone-74494d9d6c-qv9st" Jan 20 17:27:22 crc kubenswrapper[4558]: I0120 17:27:22.820795 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7d72e21b-f0dd-48d4-9594-bb39aae6fa9d-internal-tls-certs\") pod \"keystone-74494d9d6c-qv9st\" (UID: \"7d72e21b-f0dd-48d4-9594-bb39aae6fa9d\") " pod="openstack-kuttl-tests/keystone-74494d9d6c-qv9st" Jan 20 17:27:22 crc kubenswrapper[4558]: I0120 17:27:22.824697 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/7d72e21b-f0dd-48d4-9594-bb39aae6fa9d-fernet-keys\") pod \"keystone-74494d9d6c-qv9st\" (UID: \"7d72e21b-f0dd-48d4-9594-bb39aae6fa9d\") " pod="openstack-kuttl-tests/keystone-74494d9d6c-qv9st" Jan 20 17:27:22 crc kubenswrapper[4558]: I0120 17:27:22.826659 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7d72e21b-f0dd-48d4-9594-bb39aae6fa9d-combined-ca-bundle\") pod \"keystone-74494d9d6c-qv9st\" (UID: \"7d72e21b-f0dd-48d4-9594-bb39aae6fa9d\") " pod="openstack-kuttl-tests/keystone-74494d9d6c-qv9st" Jan 20 17:27:22 crc kubenswrapper[4558]: I0120 17:27:22.832894 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:27:22 crc kubenswrapper[4558]: I0120 17:27:22.849713 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kqq8g\" (UniqueName: \"kubernetes.io/projected/7d72e21b-f0dd-48d4-9594-bb39aae6fa9d-kube-api-access-kqq8g\") pod \"keystone-74494d9d6c-qv9st\" (UID: \"7d72e21b-f0dd-48d4-9594-bb39aae6fa9d\") " pod="openstack-kuttl-tests/keystone-74494d9d6c-qv9st" Jan 20 17:27:22 crc kubenswrapper[4558]: I0120 17:27:22.865526 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/7d72e21b-f0dd-48d4-9594-bb39aae6fa9d-credential-keys\") pod \"keystone-74494d9d6c-qv9st\" (UID: \"7d72e21b-f0dd-48d4-9594-bb39aae6fa9d\") " pod="openstack-kuttl-tests/keystone-74494d9d6c-qv9st" Jan 20 17:27:22 crc kubenswrapper[4558]: I0120 17:27:22.865677 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7d72e21b-f0dd-48d4-9594-bb39aae6fa9d-public-tls-certs\") pod \"keystone-74494d9d6c-qv9st\" (UID: \"7d72e21b-f0dd-48d4-9594-bb39aae6fa9d\") " pod="openstack-kuttl-tests/keystone-74494d9d6c-qv9st" Jan 20 17:27:22 crc kubenswrapper[4558]: I0120 17:27:22.869092 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7d72e21b-f0dd-48d4-9594-bb39aae6fa9d-config-data\") pod \"keystone-74494d9d6c-qv9st\" (UID: \"7d72e21b-f0dd-48d4-9594-bb39aae6fa9d\") " pod="openstack-kuttl-tests/keystone-74494d9d6c-qv9st" Jan 20 17:27:23 crc kubenswrapper[4558]: I0120 17:27:23.098288 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:27:23 crc kubenswrapper[4558]: I0120 17:27:23.135372 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-74494d9d6c-qv9st" Jan 20 17:27:23 crc kubenswrapper[4558]: I0120 17:27:23.400254 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"8804f9f1-60d4-4fa1-9333-9f9e01b3c68a","Type":"ContainerStarted","Data":"94d135b3e27081e915874cdf29659d51450157f87f3b4f37332d5d83a3c11459"} Jan 20 17:27:23 crc kubenswrapper[4558]: I0120 17:27:23.400688 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"8804f9f1-60d4-4fa1-9333-9f9e01b3c68a","Type":"ContainerStarted","Data":"2953f93c5ea7797971404b822588bdd28383773e127010c836cbd6ce7a1b5450"} Jan 20 17:27:23 crc kubenswrapper[4558]: I0120 17:27:23.400720 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"8804f9f1-60d4-4fa1-9333-9f9e01b3c68a","Type":"ContainerStarted","Data":"33faf2c0a93c93d45813868ee43337a14e962791dad79303fe6a9a0671ec791e"} Jan 20 17:27:23 crc kubenswrapper[4558]: I0120 17:27:23.405494 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:27:23 crc kubenswrapper[4558]: I0120 17:27:23.405560 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:27:23 crc kubenswrapper[4558]: I0120 17:27:23.405577 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:27:23 crc kubenswrapper[4558]: I0120 17:27:23.405586 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:27:23 crc kubenswrapper[4558]: I0120 17:27:23.433994 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/swift-storage-0" podStartSLOduration=4.433975765 podStartE2EDuration="4.433975765s" podCreationTimestamp="2026-01-20 17:27:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:27:23.430602973 +0000 UTC m=+2737.190940941" watchObservedRunningTime="2026-01-20 17:27:23.433975765 +0000 UTC m=+2737.194313732" Jan 20 17:27:23 crc kubenswrapper[4558]: I0120 17:27:23.548838 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-7947c54dc7-w2kxl"] Jan 20 17:27:23 crc kubenswrapper[4558]: I0120 17:27:23.550476 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-7947c54dc7-w2kxl" Jan 20 17:27:23 crc kubenswrapper[4558]: I0120 17:27:23.555717 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-74494d9d6c-qv9st"] Jan 20 17:27:23 crc kubenswrapper[4558]: I0120 17:27:23.562026 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-7947c54dc7-w2kxl"] Jan 20 17:27:23 crc kubenswrapper[4558]: I0120 17:27:23.637470 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/43ad40c9-142e-42c2-b46b-e998fb487f42-dns-swift-storage-0\") pod \"dnsmasq-dnsmasq-7947c54dc7-w2kxl\" (UID: \"43ad40c9-142e-42c2-b46b-e998fb487f42\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-7947c54dc7-w2kxl" Jan 20 17:27:23 crc kubenswrapper[4558]: I0120 17:27:23.637535 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/43ad40c9-142e-42c2-b46b-e998fb487f42-config\") pod \"dnsmasq-dnsmasq-7947c54dc7-w2kxl\" (UID: \"43ad40c9-142e-42c2-b46b-e998fb487f42\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-7947c54dc7-w2kxl" Jan 20 17:27:23 crc kubenswrapper[4558]: I0120 17:27:23.637685 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lbh2l\" (UniqueName: \"kubernetes.io/projected/43ad40c9-142e-42c2-b46b-e998fb487f42-kube-api-access-lbh2l\") pod \"dnsmasq-dnsmasq-7947c54dc7-w2kxl\" (UID: \"43ad40c9-142e-42c2-b46b-e998fb487f42\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-7947c54dc7-w2kxl" Jan 20 17:27:23 crc kubenswrapper[4558]: I0120 17:27:23.637827 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/43ad40c9-142e-42c2-b46b-e998fb487f42-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-7947c54dc7-w2kxl\" (UID: \"43ad40c9-142e-42c2-b46b-e998fb487f42\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-7947c54dc7-w2kxl" Jan 20 17:27:23 crc kubenswrapper[4558]: I0120 17:27:23.739605 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/43ad40c9-142e-42c2-b46b-e998fb487f42-config\") pod \"dnsmasq-dnsmasq-7947c54dc7-w2kxl\" (UID: \"43ad40c9-142e-42c2-b46b-e998fb487f42\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-7947c54dc7-w2kxl" Jan 20 17:27:23 crc kubenswrapper[4558]: I0120 17:27:23.739873 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lbh2l\" (UniqueName: \"kubernetes.io/projected/43ad40c9-142e-42c2-b46b-e998fb487f42-kube-api-access-lbh2l\") pod \"dnsmasq-dnsmasq-7947c54dc7-w2kxl\" (UID: \"43ad40c9-142e-42c2-b46b-e998fb487f42\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-7947c54dc7-w2kxl" Jan 20 17:27:23 crc kubenswrapper[4558]: I0120 17:27:23.739964 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/43ad40c9-142e-42c2-b46b-e998fb487f42-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-7947c54dc7-w2kxl\" (UID: \"43ad40c9-142e-42c2-b46b-e998fb487f42\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-7947c54dc7-w2kxl" Jan 20 17:27:23 crc kubenswrapper[4558]: I0120 17:27:23.740096 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/43ad40c9-142e-42c2-b46b-e998fb487f42-dns-swift-storage-0\") pod \"dnsmasq-dnsmasq-7947c54dc7-w2kxl\" (UID: \"43ad40c9-142e-42c2-b46b-e998fb487f42\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-7947c54dc7-w2kxl" Jan 20 17:27:23 crc kubenswrapper[4558]: I0120 17:27:23.740478 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/43ad40c9-142e-42c2-b46b-e998fb487f42-config\") pod \"dnsmasq-dnsmasq-7947c54dc7-w2kxl\" (UID: \"43ad40c9-142e-42c2-b46b-e998fb487f42\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-7947c54dc7-w2kxl" Jan 20 17:27:23 crc kubenswrapper[4558]: I0120 17:27:23.740590 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/43ad40c9-142e-42c2-b46b-e998fb487f42-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-7947c54dc7-w2kxl\" (UID: \"43ad40c9-142e-42c2-b46b-e998fb487f42\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-7947c54dc7-w2kxl" Jan 20 17:27:23 crc kubenswrapper[4558]: I0120 17:27:23.740879 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/43ad40c9-142e-42c2-b46b-e998fb487f42-dns-swift-storage-0\") pod \"dnsmasq-dnsmasq-7947c54dc7-w2kxl\" (UID: \"43ad40c9-142e-42c2-b46b-e998fb487f42\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-7947c54dc7-w2kxl" Jan 20 17:27:23 crc kubenswrapper[4558]: I0120 17:27:23.758830 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lbh2l\" (UniqueName: \"kubernetes.io/projected/43ad40c9-142e-42c2-b46b-e998fb487f42-kube-api-access-lbh2l\") pod \"dnsmasq-dnsmasq-7947c54dc7-w2kxl\" (UID: \"43ad40c9-142e-42c2-b46b-e998fb487f42\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-7947c54dc7-w2kxl" Jan 20 17:27:23 crc kubenswrapper[4558]: I0120 17:27:23.868193 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-7947c54dc7-w2kxl" Jan 20 17:27:24 crc kubenswrapper[4558]: I0120 17:27:24.295967 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-7947c54dc7-w2kxl"] Jan 20 17:27:24 crc kubenswrapper[4558]: W0120 17:27:24.297138 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod43ad40c9_142e_42c2_b46b_e998fb487f42.slice/crio-45097c1a23cd594c174b20abc1a35ba156d33e0f07339e00ac7ed28f0b61e82c WatchSource:0}: Error finding container 45097c1a23cd594c174b20abc1a35ba156d33e0f07339e00ac7ed28f0b61e82c: Status 404 returned error can't find the container with id 45097c1a23cd594c174b20abc1a35ba156d33e0f07339e00ac7ed28f0b61e82c Jan 20 17:27:24 crc kubenswrapper[4558]: I0120 17:27:24.425535 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-7947c54dc7-w2kxl" event={"ID":"43ad40c9-142e-42c2-b46b-e998fb487f42","Type":"ContainerStarted","Data":"45097c1a23cd594c174b20abc1a35ba156d33e0f07339e00ac7ed28f0b61e82c"} Jan 20 17:27:24 crc kubenswrapper[4558]: I0120 17:27:24.429294 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-74494d9d6c-qv9st" event={"ID":"7d72e21b-f0dd-48d4-9594-bb39aae6fa9d","Type":"ContainerStarted","Data":"911340d3455407e7d7596ed292889c631409f00406037f4417caa85ad7dc54a6"} Jan 20 17:27:24 crc kubenswrapper[4558]: I0120 17:27:24.429344 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-74494d9d6c-qv9st" event={"ID":"7d72e21b-f0dd-48d4-9594-bb39aae6fa9d","Type":"ContainerStarted","Data":"1daf69cee188fa87bbd078da1bc9c2cd7dceb6bfa47e1fdeb19d4ec98b39ed5a"} Jan 20 17:27:24 crc kubenswrapper[4558]: I0120 17:27:24.485568 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/keystone-74494d9d6c-qv9st" podStartSLOduration=2.485549905 podStartE2EDuration="2.485549905s" podCreationTimestamp="2026-01-20 17:27:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:27:24.480178737 +0000 UTC m=+2738.240516704" watchObservedRunningTime="2026-01-20 17:27:24.485549905 +0000 UTC m=+2738.245887872" Jan 20 17:27:24 crc kubenswrapper[4558]: I0120 17:27:24.583700 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b8f85165-600b-47f4-b459-e1eb023907a6" path="/var/lib/kubelet/pods/b8f85165-600b-47f4-b459-e1eb023907a6/volumes" Jan 20 17:27:25 crc kubenswrapper[4558]: I0120 17:27:25.212450 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:27:25 crc kubenswrapper[4558]: I0120 17:27:25.256675 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:27:25 crc kubenswrapper[4558]: I0120 17:27:25.263506 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:27:25 crc kubenswrapper[4558]: I0120 17:27:25.404921 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:27:25 crc kubenswrapper[4558]: I0120 17:27:25.448508 4558 generic.go:334] "Generic (PLEG): container finished" podID="7c7138fb-59b8-47c0-a431-023fa79404f1" containerID="98d4b6952becbe3b78b67cf6ee7b8b7cc7435eda4e3a11dead4ce7aba086ca2a" exitCode=137 Jan 20 17:27:25 crc kubenswrapper[4558]: I0120 17:27:25.448606 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"7c7138fb-59b8-47c0-a431-023fa79404f1","Type":"ContainerDied","Data":"98d4b6952becbe3b78b67cf6ee7b8b7cc7435eda4e3a11dead4ce7aba086ca2a"} Jan 20 17:27:25 crc kubenswrapper[4558]: I0120 17:27:25.451786 4558 generic.go:334] "Generic (PLEG): container finished" podID="43ad40c9-142e-42c2-b46b-e998fb487f42" containerID="f8313c346808cf6301fc20ee128dcf9490dacea0bfd25b44a105e2f69473c4d2" exitCode=0 Jan 20 17:27:25 crc kubenswrapper[4558]: I0120 17:27:25.452957 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-7947c54dc7-w2kxl" event={"ID":"43ad40c9-142e-42c2-b46b-e998fb487f42","Type":"ContainerDied","Data":"f8313c346808cf6301fc20ee128dcf9490dacea0bfd25b44a105e2f69473c4d2"} Jan 20 17:27:25 crc kubenswrapper[4558]: I0120 17:27:25.454090 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/keystone-74494d9d6c-qv9st" Jan 20 17:27:26 crc kubenswrapper[4558]: I0120 17:27:26.464139 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-7947c54dc7-w2kxl" event={"ID":"43ad40c9-142e-42c2-b46b-e998fb487f42","Type":"ContainerStarted","Data":"3130f3ec22ad881b33c249b0c38e752bf5b2698967894c2673b4174032a505d0"} Jan 20 17:27:26 crc kubenswrapper[4558]: I0120 17:27:26.464676 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-7947c54dc7-w2kxl" Jan 20 17:27:26 crc kubenswrapper[4558]: I0120 17:27:26.466001 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"7c7138fb-59b8-47c0-a431-023fa79404f1","Type":"ContainerStarted","Data":"1037b028087ec07b7d9badbde18841aeb9f38ee0fdee885a7163230235716a60"} Jan 20 17:27:26 crc kubenswrapper[4558]: I0120 17:27:26.466335 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/cinder-scheduler-0" podUID="7c7138fb-59b8-47c0-a431-023fa79404f1" containerName="probe" containerID="cri-o://b4e76db6b8f078c1dc089ec0784ae087f970f12f50927744b7cf7a30c75eb1e1" gracePeriod=30 Jan 20 17:27:26 crc kubenswrapper[4558]: I0120 17:27:26.466349 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/cinder-scheduler-0" podUID="7c7138fb-59b8-47c0-a431-023fa79404f1" containerName="cinder-scheduler" containerID="cri-o://1037b028087ec07b7d9badbde18841aeb9f38ee0fdee885a7163230235716a60" gracePeriod=30 Jan 20 17:27:26 crc kubenswrapper[4558]: I0120 17:27:26.490178 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-7947c54dc7-w2kxl" podStartSLOduration=3.490136051 podStartE2EDuration="3.490136051s" podCreationTimestamp="2026-01-20 17:27:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:27:26.484393866 +0000 UTC m=+2740.244731823" watchObservedRunningTime="2026-01-20 17:27:26.490136051 +0000 UTC m=+2740.250474018" Jan 20 17:27:26 crc kubenswrapper[4558]: I0120 17:27:26.592691 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:27:27 crc kubenswrapper[4558]: I0120 17:27:27.115402 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/barbican-api-84969c976-hhcrx" Jan 20 17:27:27 crc kubenswrapper[4558]: I0120 17:27:27.127707 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/barbican-api-84969c976-hhcrx" Jan 20 17:27:27 crc kubenswrapper[4558]: I0120 17:27:27.185242 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-api-666d4cc7c-5vfvn"] Jan 20 17:27:27 crc kubenswrapper[4558]: I0120 17:27:27.185501 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-api-666d4cc7c-5vfvn" podUID="000dbc78-69b3-452b-8d87-9781c58b7b06" containerName="barbican-api-log" containerID="cri-o://da0acfbf374913ebf99cfd01ff5c7b709490f7d87e56e16f3846a5f546e7bbbf" gracePeriod=30 Jan 20 17:27:27 crc kubenswrapper[4558]: I0120 17:27:27.185984 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-api-666d4cc7c-5vfvn" podUID="000dbc78-69b3-452b-8d87-9781c58b7b06" containerName="barbican-api" containerID="cri-o://9c3cde8598228c2fcc2d02a66724249be75d689574a13d9797a4510b0c5f177a" gracePeriod=30 Jan 20 17:27:27 crc kubenswrapper[4558]: I0120 17:27:27.497743 4558 generic.go:334] "Generic (PLEG): container finished" podID="7c7138fb-59b8-47c0-a431-023fa79404f1" containerID="b4e76db6b8f078c1dc089ec0784ae087f970f12f50927744b7cf7a30c75eb1e1" exitCode=0 Jan 20 17:27:27 crc kubenswrapper[4558]: I0120 17:27:27.497832 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"7c7138fb-59b8-47c0-a431-023fa79404f1","Type":"ContainerDied","Data":"b4e76db6b8f078c1dc089ec0784ae087f970f12f50927744b7cf7a30c75eb1e1"} Jan 20 17:27:27 crc kubenswrapper[4558]: I0120 17:27:27.500865 4558 generic.go:334] "Generic (PLEG): container finished" podID="000dbc78-69b3-452b-8d87-9781c58b7b06" containerID="da0acfbf374913ebf99cfd01ff5c7b709490f7d87e56e16f3846a5f546e7bbbf" exitCode=143 Jan 20 17:27:27 crc kubenswrapper[4558]: I0120 17:27:27.501496 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-666d4cc7c-5vfvn" event={"ID":"000dbc78-69b3-452b-8d87-9781c58b7b06","Type":"ContainerDied","Data":"da0acfbf374913ebf99cfd01ff5c7b709490f7d87e56e16f3846a5f546e7bbbf"} Jan 20 17:27:30 crc kubenswrapper[4558]: I0120 17:27:30.336695 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/barbican-api-666d4cc7c-5vfvn" podUID="000dbc78-69b3-452b-8d87-9781c58b7b06" containerName="barbican-api" probeResult="failure" output="Get \"https://10.217.1.17:9311/healthcheck\": read tcp 10.217.0.2:34718->10.217.1.17:9311: read: connection reset by peer" Jan 20 17:27:30 crc kubenswrapper[4558]: I0120 17:27:30.336688 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/barbican-api-666d4cc7c-5vfvn" podUID="000dbc78-69b3-452b-8d87-9781c58b7b06" containerName="barbican-api-log" probeResult="failure" output="Get \"https://10.217.1.17:9311/healthcheck\": read tcp 10.217.0.2:34722->10.217.1.17:9311: read: connection reset by peer" Jan 20 17:27:30 crc kubenswrapper[4558]: I0120 17:27:30.537031 4558 generic.go:334] "Generic (PLEG): container finished" podID="000dbc78-69b3-452b-8d87-9781c58b7b06" containerID="9c3cde8598228c2fcc2d02a66724249be75d689574a13d9797a4510b0c5f177a" exitCode=0 Jan 20 17:27:30 crc kubenswrapper[4558]: I0120 17:27:30.537080 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-666d4cc7c-5vfvn" event={"ID":"000dbc78-69b3-452b-8d87-9781c58b7b06","Type":"ContainerDied","Data":"9c3cde8598228c2fcc2d02a66724249be75d689574a13d9797a4510b0c5f177a"} Jan 20 17:27:30 crc kubenswrapper[4558]: I0120 17:27:30.763769 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-666d4cc7c-5vfvn" Jan 20 17:27:30 crc kubenswrapper[4558]: I0120 17:27:30.864603 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/000dbc78-69b3-452b-8d87-9781c58b7b06-public-tls-certs\") pod \"000dbc78-69b3-452b-8d87-9781c58b7b06\" (UID: \"000dbc78-69b3-452b-8d87-9781c58b7b06\") " Jan 20 17:27:30 crc kubenswrapper[4558]: I0120 17:27:30.864719 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/000dbc78-69b3-452b-8d87-9781c58b7b06-config-data\") pod \"000dbc78-69b3-452b-8d87-9781c58b7b06\" (UID: \"000dbc78-69b3-452b-8d87-9781c58b7b06\") " Jan 20 17:27:30 crc kubenswrapper[4558]: I0120 17:27:30.864800 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d86wh\" (UniqueName: \"kubernetes.io/projected/000dbc78-69b3-452b-8d87-9781c58b7b06-kube-api-access-d86wh\") pod \"000dbc78-69b3-452b-8d87-9781c58b7b06\" (UID: \"000dbc78-69b3-452b-8d87-9781c58b7b06\") " Jan 20 17:27:30 crc kubenswrapper[4558]: I0120 17:27:30.864824 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/000dbc78-69b3-452b-8d87-9781c58b7b06-config-data-custom\") pod \"000dbc78-69b3-452b-8d87-9781c58b7b06\" (UID: \"000dbc78-69b3-452b-8d87-9781c58b7b06\") " Jan 20 17:27:30 crc kubenswrapper[4558]: I0120 17:27:30.864878 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/000dbc78-69b3-452b-8d87-9781c58b7b06-combined-ca-bundle\") pod \"000dbc78-69b3-452b-8d87-9781c58b7b06\" (UID: \"000dbc78-69b3-452b-8d87-9781c58b7b06\") " Jan 20 17:27:30 crc kubenswrapper[4558]: I0120 17:27:30.864898 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/000dbc78-69b3-452b-8d87-9781c58b7b06-internal-tls-certs\") pod \"000dbc78-69b3-452b-8d87-9781c58b7b06\" (UID: \"000dbc78-69b3-452b-8d87-9781c58b7b06\") " Jan 20 17:27:30 crc kubenswrapper[4558]: I0120 17:27:30.864982 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/000dbc78-69b3-452b-8d87-9781c58b7b06-logs\") pod \"000dbc78-69b3-452b-8d87-9781c58b7b06\" (UID: \"000dbc78-69b3-452b-8d87-9781c58b7b06\") " Jan 20 17:27:30 crc kubenswrapper[4558]: I0120 17:27:30.865662 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/000dbc78-69b3-452b-8d87-9781c58b7b06-logs" (OuterVolumeSpecName: "logs") pod "000dbc78-69b3-452b-8d87-9781c58b7b06" (UID: "000dbc78-69b3-452b-8d87-9781c58b7b06"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:27:30 crc kubenswrapper[4558]: I0120 17:27:30.866080 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/000dbc78-69b3-452b-8d87-9781c58b7b06-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:30 crc kubenswrapper[4558]: I0120 17:27:30.872052 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/000dbc78-69b3-452b-8d87-9781c58b7b06-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "000dbc78-69b3-452b-8d87-9781c58b7b06" (UID: "000dbc78-69b3-452b-8d87-9781c58b7b06"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:27:30 crc kubenswrapper[4558]: I0120 17:27:30.872829 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/000dbc78-69b3-452b-8d87-9781c58b7b06-kube-api-access-d86wh" (OuterVolumeSpecName: "kube-api-access-d86wh") pod "000dbc78-69b3-452b-8d87-9781c58b7b06" (UID: "000dbc78-69b3-452b-8d87-9781c58b7b06"). InnerVolumeSpecName "kube-api-access-d86wh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:27:30 crc kubenswrapper[4558]: I0120 17:27:30.891610 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/000dbc78-69b3-452b-8d87-9781c58b7b06-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "000dbc78-69b3-452b-8d87-9781c58b7b06" (UID: "000dbc78-69b3-452b-8d87-9781c58b7b06"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:27:30 crc kubenswrapper[4558]: I0120 17:27:30.913452 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/000dbc78-69b3-452b-8d87-9781c58b7b06-config-data" (OuterVolumeSpecName: "config-data") pod "000dbc78-69b3-452b-8d87-9781c58b7b06" (UID: "000dbc78-69b3-452b-8d87-9781c58b7b06"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:27:30 crc kubenswrapper[4558]: I0120 17:27:30.915523 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/000dbc78-69b3-452b-8d87-9781c58b7b06-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "000dbc78-69b3-452b-8d87-9781c58b7b06" (UID: "000dbc78-69b3-452b-8d87-9781c58b7b06"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:27:30 crc kubenswrapper[4558]: I0120 17:27:30.917994 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/000dbc78-69b3-452b-8d87-9781c58b7b06-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "000dbc78-69b3-452b-8d87-9781c58b7b06" (UID: "000dbc78-69b3-452b-8d87-9781c58b7b06"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:27:30 crc kubenswrapper[4558]: I0120 17:27:30.968633 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/000dbc78-69b3-452b-8d87-9781c58b7b06-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:30 crc kubenswrapper[4558]: I0120 17:27:30.968667 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d86wh\" (UniqueName: \"kubernetes.io/projected/000dbc78-69b3-452b-8d87-9781c58b7b06-kube-api-access-d86wh\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:30 crc kubenswrapper[4558]: I0120 17:27:30.968701 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/000dbc78-69b3-452b-8d87-9781c58b7b06-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:30 crc kubenswrapper[4558]: I0120 17:27:30.968712 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/000dbc78-69b3-452b-8d87-9781c58b7b06-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:30 crc kubenswrapper[4558]: I0120 17:27:30.968722 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/000dbc78-69b3-452b-8d87-9781c58b7b06-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:30 crc kubenswrapper[4558]: I0120 17:27:30.968732 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/000dbc78-69b3-452b-8d87-9781c58b7b06-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:31 crc kubenswrapper[4558]: I0120 17:27:31.550996 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-666d4cc7c-5vfvn" event={"ID":"000dbc78-69b3-452b-8d87-9781c58b7b06","Type":"ContainerDied","Data":"c73bd646b1102096930ce3ac3e0d4246c05222379157d2eddcc65e7d0c3e3f03"} Jan 20 17:27:31 crc kubenswrapper[4558]: I0120 17:27:31.551109 4558 scope.go:117] "RemoveContainer" containerID="9c3cde8598228c2fcc2d02a66724249be75d689574a13d9797a4510b0c5f177a" Jan 20 17:27:31 crc kubenswrapper[4558]: I0120 17:27:31.551434 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-666d4cc7c-5vfvn" Jan 20 17:27:31 crc kubenswrapper[4558]: I0120 17:27:31.594310 4558 scope.go:117] "RemoveContainer" containerID="da0acfbf374913ebf99cfd01ff5c7b709490f7d87e56e16f3846a5f546e7bbbf" Jan 20 17:27:31 crc kubenswrapper[4558]: I0120 17:27:31.596206 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-api-666d4cc7c-5vfvn"] Jan 20 17:27:31 crc kubenswrapper[4558]: I0120 17:27:31.604476 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/barbican-api-666d4cc7c-5vfvn"] Jan 20 17:27:32 crc kubenswrapper[4558]: I0120 17:27:32.579457 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="000dbc78-69b3-452b-8d87-9781c58b7b06" path="/var/lib/kubelet/pods/000dbc78-69b3-452b-8d87-9781c58b7b06/volumes" Jan 20 17:27:33 crc kubenswrapper[4558]: I0120 17:27:33.870446 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-7947c54dc7-w2kxl" Jan 20 17:27:33 crc kubenswrapper[4558]: I0120 17:27:33.946265 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-c5fc7f8f-s5hwr"] Jan 20 17:27:33 crc kubenswrapper[4558]: I0120 17:27:33.946504 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-c5fc7f8f-s5hwr" podUID="e1484eaf-3b1f-4dfa-9413-dc02309dfbbf" containerName="dnsmasq-dns" containerID="cri-o://1c137771e465e33a67364c32486ffdbc563cc7ed837bc1e32b1d407c435a9739" gracePeriod=10 Jan 20 17:27:34 crc kubenswrapper[4558]: I0120 17:27:34.407136 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-c5fc7f8f-s5hwr" Jan 20 17:27:34 crc kubenswrapper[4558]: I0120 17:27:34.544419 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/e1484eaf-3b1f-4dfa-9413-dc02309dfbbf-dnsmasq-svc\") pod \"e1484eaf-3b1f-4dfa-9413-dc02309dfbbf\" (UID: \"e1484eaf-3b1f-4dfa-9413-dc02309dfbbf\") " Jan 20 17:27:34 crc kubenswrapper[4558]: I0120 17:27:34.544736 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e1484eaf-3b1f-4dfa-9413-dc02309dfbbf-config\") pod \"e1484eaf-3b1f-4dfa-9413-dc02309dfbbf\" (UID: \"e1484eaf-3b1f-4dfa-9413-dc02309dfbbf\") " Jan 20 17:27:34 crc kubenswrapper[4558]: I0120 17:27:34.544925 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ltf6c\" (UniqueName: \"kubernetes.io/projected/e1484eaf-3b1f-4dfa-9413-dc02309dfbbf-kube-api-access-ltf6c\") pod \"e1484eaf-3b1f-4dfa-9413-dc02309dfbbf\" (UID: \"e1484eaf-3b1f-4dfa-9413-dc02309dfbbf\") " Jan 20 17:27:34 crc kubenswrapper[4558]: I0120 17:27:34.545155 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e1484eaf-3b1f-4dfa-9413-dc02309dfbbf-dns-swift-storage-0\") pod \"e1484eaf-3b1f-4dfa-9413-dc02309dfbbf\" (UID: \"e1484eaf-3b1f-4dfa-9413-dc02309dfbbf\") " Jan 20 17:27:34 crc kubenswrapper[4558]: I0120 17:27:34.554041 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e1484eaf-3b1f-4dfa-9413-dc02309dfbbf-kube-api-access-ltf6c" (OuterVolumeSpecName: "kube-api-access-ltf6c") pod "e1484eaf-3b1f-4dfa-9413-dc02309dfbbf" (UID: "e1484eaf-3b1f-4dfa-9413-dc02309dfbbf"). InnerVolumeSpecName "kube-api-access-ltf6c". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:27:34 crc kubenswrapper[4558]: I0120 17:27:34.593206 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e1484eaf-3b1f-4dfa-9413-dc02309dfbbf-dnsmasq-svc" (OuterVolumeSpecName: "dnsmasq-svc") pod "e1484eaf-3b1f-4dfa-9413-dc02309dfbbf" (UID: "e1484eaf-3b1f-4dfa-9413-dc02309dfbbf"). InnerVolumeSpecName "dnsmasq-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:27:34 crc kubenswrapper[4558]: I0120 17:27:34.599717 4558 generic.go:334] "Generic (PLEG): container finished" podID="d692722b-f7fd-447c-8b7a-f56cff940d91" containerID="dfad3104c55017bd8727f5d89d9c30642f8493fbe621ac636c457844689b6a39" exitCode=0 Jan 20 17:27:34 crc kubenswrapper[4558]: I0120 17:27:34.602251 4558 generic.go:334] "Generic (PLEG): container finished" podID="e1484eaf-3b1f-4dfa-9413-dc02309dfbbf" containerID="1c137771e465e33a67364c32486ffdbc563cc7ed837bc1e32b1d407c435a9739" exitCode=0 Jan 20 17:27:34 crc kubenswrapper[4558]: I0120 17:27:34.602372 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-c5fc7f8f-s5hwr" Jan 20 17:27:34 crc kubenswrapper[4558]: I0120 17:27:34.606356 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e1484eaf-3b1f-4dfa-9413-dc02309dfbbf-config" (OuterVolumeSpecName: "config") pod "e1484eaf-3b1f-4dfa-9413-dc02309dfbbf" (UID: "e1484eaf-3b1f-4dfa-9413-dc02309dfbbf"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:27:34 crc kubenswrapper[4558]: I0120 17:27:34.609444 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e1484eaf-3b1f-4dfa-9413-dc02309dfbbf-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "e1484eaf-3b1f-4dfa-9413-dc02309dfbbf" (UID: "e1484eaf-3b1f-4dfa-9413-dc02309dfbbf"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:27:34 crc kubenswrapper[4558]: I0120 17:27:34.648149 4558 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/e1484eaf-3b1f-4dfa-9413-dc02309dfbbf-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:34 crc kubenswrapper[4558]: I0120 17:27:34.648193 4558 reconciler_common.go:293] "Volume detached for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/e1484eaf-3b1f-4dfa-9413-dc02309dfbbf-dnsmasq-svc\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:34 crc kubenswrapper[4558]: I0120 17:27:34.648203 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e1484eaf-3b1f-4dfa-9413-dc02309dfbbf-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:34 crc kubenswrapper[4558]: I0120 17:27:34.648214 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ltf6c\" (UniqueName: \"kubernetes.io/projected/e1484eaf-3b1f-4dfa-9413-dc02309dfbbf-kube-api-access-ltf6c\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:34 crc kubenswrapper[4558]: I0120 17:27:34.675752 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-server-0" event={"ID":"d692722b-f7fd-447c-8b7a-f56cff940d91","Type":"ContainerDied","Data":"dfad3104c55017bd8727f5d89d9c30642f8493fbe621ac636c457844689b6a39"} Jan 20 17:27:34 crc kubenswrapper[4558]: I0120 17:27:34.675816 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-c5fc7f8f-s5hwr" event={"ID":"e1484eaf-3b1f-4dfa-9413-dc02309dfbbf","Type":"ContainerDied","Data":"1c137771e465e33a67364c32486ffdbc563cc7ed837bc1e32b1d407c435a9739"} Jan 20 17:27:34 crc kubenswrapper[4558]: I0120 17:27:34.675837 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-c5fc7f8f-s5hwr" event={"ID":"e1484eaf-3b1f-4dfa-9413-dc02309dfbbf","Type":"ContainerDied","Data":"99072e33c1b7ee8af93204acbc7321535e897fa11b45ecebcf70849083a4824d"} Jan 20 17:27:34 crc kubenswrapper[4558]: I0120 17:27:34.675857 4558 scope.go:117] "RemoveContainer" containerID="1c137771e465e33a67364c32486ffdbc563cc7ed837bc1e32b1d407c435a9739" Jan 20 17:27:34 crc kubenswrapper[4558]: I0120 17:27:34.710474 4558 scope.go:117] "RemoveContainer" containerID="44a0737b01ce0cd786f9b99cafcc7751a34c8d80cd152766679f7c00f1f35b84" Jan 20 17:27:34 crc kubenswrapper[4558]: I0120 17:27:34.729951 4558 scope.go:117] "RemoveContainer" containerID="1c137771e465e33a67364c32486ffdbc563cc7ed837bc1e32b1d407c435a9739" Jan 20 17:27:34 crc kubenswrapper[4558]: E0120 17:27:34.730318 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1c137771e465e33a67364c32486ffdbc563cc7ed837bc1e32b1d407c435a9739\": container with ID starting with 1c137771e465e33a67364c32486ffdbc563cc7ed837bc1e32b1d407c435a9739 not found: ID does not exist" containerID="1c137771e465e33a67364c32486ffdbc563cc7ed837bc1e32b1d407c435a9739" Jan 20 17:27:34 crc kubenswrapper[4558]: I0120 17:27:34.730352 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1c137771e465e33a67364c32486ffdbc563cc7ed837bc1e32b1d407c435a9739"} err="failed to get container status \"1c137771e465e33a67364c32486ffdbc563cc7ed837bc1e32b1d407c435a9739\": rpc error: code = NotFound desc = could not find container \"1c137771e465e33a67364c32486ffdbc563cc7ed837bc1e32b1d407c435a9739\": container with ID starting with 1c137771e465e33a67364c32486ffdbc563cc7ed837bc1e32b1d407c435a9739 not found: ID does not exist" Jan 20 17:27:34 crc kubenswrapper[4558]: I0120 17:27:34.730374 4558 scope.go:117] "RemoveContainer" containerID="44a0737b01ce0cd786f9b99cafcc7751a34c8d80cd152766679f7c00f1f35b84" Jan 20 17:27:34 crc kubenswrapper[4558]: E0120 17:27:34.730675 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"44a0737b01ce0cd786f9b99cafcc7751a34c8d80cd152766679f7c00f1f35b84\": container with ID starting with 44a0737b01ce0cd786f9b99cafcc7751a34c8d80cd152766679f7c00f1f35b84 not found: ID does not exist" containerID="44a0737b01ce0cd786f9b99cafcc7751a34c8d80cd152766679f7c00f1f35b84" Jan 20 17:27:34 crc kubenswrapper[4558]: I0120 17:27:34.730719 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"44a0737b01ce0cd786f9b99cafcc7751a34c8d80cd152766679f7c00f1f35b84"} err="failed to get container status \"44a0737b01ce0cd786f9b99cafcc7751a34c8d80cd152766679f7c00f1f35b84\": rpc error: code = NotFound desc = could not find container \"44a0737b01ce0cd786f9b99cafcc7751a34c8d80cd152766679f7c00f1f35b84\": container with ID starting with 44a0737b01ce0cd786f9b99cafcc7751a34c8d80cd152766679f7c00f1f35b84 not found: ID does not exist" Jan 20 17:27:34 crc kubenswrapper[4558]: E0120 17:27:34.806043 4558 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod48cfc6e5_774d_4e7d_8103_f6a3260ea14c.slice/crio-253c7b1bb698ada295155d4391adcd6900c7ef92966fcf1c968bb5c7832f4326.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod48cfc6e5_774d_4e7d_8103_f6a3260ea14c.slice/crio-conmon-253c7b1bb698ada295155d4391adcd6900c7ef92966fcf1c968bb5c7832f4326.scope\": RecentStats: unable to find data in memory cache]" Jan 20 17:27:34 crc kubenswrapper[4558]: I0120 17:27:34.939448 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-c5fc7f8f-s5hwr"] Jan 20 17:27:34 crc kubenswrapper[4558]: I0120 17:27:34.949448 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-c5fc7f8f-s5hwr"] Jan 20 17:27:35 crc kubenswrapper[4558]: I0120 17:27:35.616057 4558 generic.go:334] "Generic (PLEG): container finished" podID="48cfc6e5-774d-4e7d-8103-f6a3260ea14c" containerID="253c7b1bb698ada295155d4391adcd6900c7ef92966fcf1c968bb5c7832f4326" exitCode=0 Jan 20 17:27:35 crc kubenswrapper[4558]: I0120 17:27:35.616176 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" event={"ID":"48cfc6e5-774d-4e7d-8103-f6a3260ea14c","Type":"ContainerDied","Data":"253c7b1bb698ada295155d4391adcd6900c7ef92966fcf1c968bb5c7832f4326"} Jan 20 17:27:35 crc kubenswrapper[4558]: I0120 17:27:35.618885 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-server-0" event={"ID":"d692722b-f7fd-447c-8b7a-f56cff940d91","Type":"ContainerStarted","Data":"e662a87a8dfea3af31619f4746b123264a47d980ca2258670d31ac3490d07672"} Jan 20 17:27:35 crc kubenswrapper[4558]: I0120 17:27:35.619262 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:27:35 crc kubenswrapper[4558]: I0120 17:27:35.657970 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/rabbitmq-server-0" podStartSLOduration=36.657952593 podStartE2EDuration="36.657952593s" podCreationTimestamp="2026-01-20 17:26:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:27:35.656666965 +0000 UTC m=+2749.417004932" watchObservedRunningTime="2026-01-20 17:27:35.657952593 +0000 UTC m=+2749.418290560" Jan 20 17:27:36 crc kubenswrapper[4558]: I0120 17:27:36.577661 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e1484eaf-3b1f-4dfa-9413-dc02309dfbbf" path="/var/lib/kubelet/pods/e1484eaf-3b1f-4dfa-9413-dc02309dfbbf/volumes" Jan 20 17:27:36 crc kubenswrapper[4558]: I0120 17:27:36.633923 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" event={"ID":"48cfc6e5-774d-4e7d-8103-f6a3260ea14c","Type":"ContainerStarted","Data":"94d9e6b06e5975fa2b75519bde1be0a449c0ca63e7bbf3340176be25c0d5874c"} Jan 20 17:27:36 crc kubenswrapper[4558]: I0120 17:27:36.634312 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:27:36 crc kubenswrapper[4558]: I0120 17:27:36.665532 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" podStartSLOduration=36.665512994 podStartE2EDuration="36.665512994s" podCreationTimestamp="2026-01-20 17:27:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:27:36.656671662 +0000 UTC m=+2750.417009628" watchObservedRunningTime="2026-01-20 17:27:36.665512994 +0000 UTC m=+2750.425850961" Jan 20 17:27:37 crc kubenswrapper[4558]: I0120 17:27:37.232753 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:27:37 crc kubenswrapper[4558]: I0120 17:27:37.364617 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/neutron-665b4c9c8d-9tcx6" Jan 20 17:27:37 crc kubenswrapper[4558]: I0120 17:27:37.439805 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-5855ff84bf-bczz4"] Jan 20 17:27:37 crc kubenswrapper[4558]: I0120 17:27:37.440101 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/neutron-5855ff84bf-bczz4" podUID="bd387ac0-76d7-418c-a34f-19be32ae37f9" containerName="neutron-api" containerID="cri-o://e8b21ab038396136a9090ecf6bdb12372852799fe29e7b256ebbd56bdce61f7b" gracePeriod=30 Jan 20 17:27:37 crc kubenswrapper[4558]: I0120 17:27:37.440261 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/neutron-5855ff84bf-bczz4" podUID="bd387ac0-76d7-418c-a34f-19be32ae37f9" containerName="neutron-httpd" containerID="cri-o://976dcb4a79daec7424bb1f359fd90302b7391b2695da9a6003214e35e5292830" gracePeriod=30 Jan 20 17:27:38 crc kubenswrapper[4558]: I0120 17:27:38.658652 4558 generic.go:334] "Generic (PLEG): container finished" podID="bd387ac0-76d7-418c-a34f-19be32ae37f9" containerID="976dcb4a79daec7424bb1f359fd90302b7391b2695da9a6003214e35e5292830" exitCode=0 Jan 20 17:27:38 crc kubenswrapper[4558]: I0120 17:27:38.658701 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-5855ff84bf-bczz4" event={"ID":"bd387ac0-76d7-418c-a34f-19be32ae37f9","Type":"ContainerDied","Data":"976dcb4a79daec7424bb1f359fd90302b7391b2695da9a6003214e35e5292830"} Jan 20 17:27:45 crc kubenswrapper[4558]: I0120 17:27:45.638564 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/placement-55d4bc664d-k82t9" Jan 20 17:27:45 crc kubenswrapper[4558]: I0120 17:27:45.748313 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/placement-55d4bc664d-k82t9" Jan 20 17:27:45 crc kubenswrapper[4558]: I0120 17:27:45.846425 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/placement-66fdf9847-wwwbv"] Jan 20 17:27:45 crc kubenswrapper[4558]: I0120 17:27:45.846750 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/placement-66fdf9847-wwwbv" podUID="ef71f603-5259-468f-b39f-4e726cdcb2f5" containerName="placement-log" containerID="cri-o://f7ce9ab01f40e824acf195a6b4913a12b3fb6a924673bfa550a16e418b812cff" gracePeriod=30 Jan 20 17:27:45 crc kubenswrapper[4558]: I0120 17:27:45.847425 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/placement-66fdf9847-wwwbv" podUID="ef71f603-5259-468f-b39f-4e726cdcb2f5" containerName="placement-api" containerID="cri-o://7ae163fcbc93afed4572da8b9cfe88991120b5aebe851c19ca31f198e3260a32" gracePeriod=30 Jan 20 17:27:46 crc kubenswrapper[4558]: I0120 17:27:46.741605 4558 generic.go:334] "Generic (PLEG): container finished" podID="ef71f603-5259-468f-b39f-4e726cdcb2f5" containerID="f7ce9ab01f40e824acf195a6b4913a12b3fb6a924673bfa550a16e418b812cff" exitCode=143 Jan 20 17:27:46 crc kubenswrapper[4558]: I0120 17:27:46.741683 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-66fdf9847-wwwbv" event={"ID":"ef71f603-5259-468f-b39f-4e726cdcb2f5","Type":"ContainerDied","Data":"f7ce9ab01f40e824acf195a6b4913a12b3fb6a924673bfa550a16e418b812cff"} Jan 20 17:27:49 crc kubenswrapper[4558]: I0120 17:27:49.456184 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-66fdf9847-wwwbv" Jan 20 17:27:49 crc kubenswrapper[4558]: I0120 17:27:49.549945 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ef71f603-5259-468f-b39f-4e726cdcb2f5-scripts\") pod \"ef71f603-5259-468f-b39f-4e726cdcb2f5\" (UID: \"ef71f603-5259-468f-b39f-4e726cdcb2f5\") " Jan 20 17:27:49 crc kubenswrapper[4558]: I0120 17:27:49.549975 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ef71f603-5259-468f-b39f-4e726cdcb2f5-public-tls-certs\") pod \"ef71f603-5259-468f-b39f-4e726cdcb2f5\" (UID: \"ef71f603-5259-468f-b39f-4e726cdcb2f5\") " Jan 20 17:27:49 crc kubenswrapper[4558]: I0120 17:27:49.550047 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ef71f603-5259-468f-b39f-4e726cdcb2f5-combined-ca-bundle\") pod \"ef71f603-5259-468f-b39f-4e726cdcb2f5\" (UID: \"ef71f603-5259-468f-b39f-4e726cdcb2f5\") " Jan 20 17:27:49 crc kubenswrapper[4558]: I0120 17:27:49.550069 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ef71f603-5259-468f-b39f-4e726cdcb2f5-internal-tls-certs\") pod \"ef71f603-5259-468f-b39f-4e726cdcb2f5\" (UID: \"ef71f603-5259-468f-b39f-4e726cdcb2f5\") " Jan 20 17:27:49 crc kubenswrapper[4558]: I0120 17:27:49.550086 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ef71f603-5259-468f-b39f-4e726cdcb2f5-logs\") pod \"ef71f603-5259-468f-b39f-4e726cdcb2f5\" (UID: \"ef71f603-5259-468f-b39f-4e726cdcb2f5\") " Jan 20 17:27:49 crc kubenswrapper[4558]: I0120 17:27:49.550138 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ef71f603-5259-468f-b39f-4e726cdcb2f5-config-data\") pod \"ef71f603-5259-468f-b39f-4e726cdcb2f5\" (UID: \"ef71f603-5259-468f-b39f-4e726cdcb2f5\") " Jan 20 17:27:49 crc kubenswrapper[4558]: I0120 17:27:49.550156 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lkxmz\" (UniqueName: \"kubernetes.io/projected/ef71f603-5259-468f-b39f-4e726cdcb2f5-kube-api-access-lkxmz\") pod \"ef71f603-5259-468f-b39f-4e726cdcb2f5\" (UID: \"ef71f603-5259-468f-b39f-4e726cdcb2f5\") " Jan 20 17:27:49 crc kubenswrapper[4558]: I0120 17:27:49.550603 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ef71f603-5259-468f-b39f-4e726cdcb2f5-logs" (OuterVolumeSpecName: "logs") pod "ef71f603-5259-468f-b39f-4e726cdcb2f5" (UID: "ef71f603-5259-468f-b39f-4e726cdcb2f5"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:27:49 crc kubenswrapper[4558]: I0120 17:27:49.555690 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ef71f603-5259-468f-b39f-4e726cdcb2f5-scripts" (OuterVolumeSpecName: "scripts") pod "ef71f603-5259-468f-b39f-4e726cdcb2f5" (UID: "ef71f603-5259-468f-b39f-4e726cdcb2f5"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:27:49 crc kubenswrapper[4558]: I0120 17:27:49.568612 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ef71f603-5259-468f-b39f-4e726cdcb2f5-kube-api-access-lkxmz" (OuterVolumeSpecName: "kube-api-access-lkxmz") pod "ef71f603-5259-468f-b39f-4e726cdcb2f5" (UID: "ef71f603-5259-468f-b39f-4e726cdcb2f5"). InnerVolumeSpecName "kube-api-access-lkxmz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:27:49 crc kubenswrapper[4558]: I0120 17:27:49.599796 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ef71f603-5259-468f-b39f-4e726cdcb2f5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ef71f603-5259-468f-b39f-4e726cdcb2f5" (UID: "ef71f603-5259-468f-b39f-4e726cdcb2f5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:27:49 crc kubenswrapper[4558]: I0120 17:27:49.600555 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ef71f603-5259-468f-b39f-4e726cdcb2f5-config-data" (OuterVolumeSpecName: "config-data") pod "ef71f603-5259-468f-b39f-4e726cdcb2f5" (UID: "ef71f603-5259-468f-b39f-4e726cdcb2f5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:27:49 crc kubenswrapper[4558]: I0120 17:27:49.635140 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ef71f603-5259-468f-b39f-4e726cdcb2f5-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "ef71f603-5259-468f-b39f-4e726cdcb2f5" (UID: "ef71f603-5259-468f-b39f-4e726cdcb2f5"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:27:49 crc kubenswrapper[4558]: I0120 17:27:49.650209 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ef71f603-5259-468f-b39f-4e726cdcb2f5-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "ef71f603-5259-468f-b39f-4e726cdcb2f5" (UID: "ef71f603-5259-468f-b39f-4e726cdcb2f5"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:27:49 crc kubenswrapper[4558]: I0120 17:27:49.661256 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ef71f603-5259-468f-b39f-4e726cdcb2f5-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:49 crc kubenswrapper[4558]: I0120 17:27:49.661292 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ef71f603-5259-468f-b39f-4e726cdcb2f5-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:49 crc kubenswrapper[4558]: I0120 17:27:49.661307 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ef71f603-5259-468f-b39f-4e726cdcb2f5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:49 crc kubenswrapper[4558]: I0120 17:27:49.661318 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ef71f603-5259-468f-b39f-4e726cdcb2f5-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:49 crc kubenswrapper[4558]: I0120 17:27:49.661331 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ef71f603-5259-468f-b39f-4e726cdcb2f5-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:49 crc kubenswrapper[4558]: I0120 17:27:49.661343 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ef71f603-5259-468f-b39f-4e726cdcb2f5-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:49 crc kubenswrapper[4558]: I0120 17:27:49.661355 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lkxmz\" (UniqueName: \"kubernetes.io/projected/ef71f603-5259-468f-b39f-4e726cdcb2f5-kube-api-access-lkxmz\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:49 crc kubenswrapper[4558]: I0120 17:27:49.769894 4558 generic.go:334] "Generic (PLEG): container finished" podID="ef71f603-5259-468f-b39f-4e726cdcb2f5" containerID="7ae163fcbc93afed4572da8b9cfe88991120b5aebe851c19ca31f198e3260a32" exitCode=0 Jan 20 17:27:49 crc kubenswrapper[4558]: I0120 17:27:49.770132 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-66fdf9847-wwwbv" event={"ID":"ef71f603-5259-468f-b39f-4e726cdcb2f5","Type":"ContainerDied","Data":"7ae163fcbc93afed4572da8b9cfe88991120b5aebe851c19ca31f198e3260a32"} Jan 20 17:27:49 crc kubenswrapper[4558]: I0120 17:27:49.770215 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-66fdf9847-wwwbv" Jan 20 17:27:49 crc kubenswrapper[4558]: I0120 17:27:49.770249 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-66fdf9847-wwwbv" event={"ID":"ef71f603-5259-468f-b39f-4e726cdcb2f5","Type":"ContainerDied","Data":"178e67168083a3ca4fd77829a812a47e54bb69b9e9849a55530f849b73d6f201"} Jan 20 17:27:49 crc kubenswrapper[4558]: I0120 17:27:49.770285 4558 scope.go:117] "RemoveContainer" containerID="7ae163fcbc93afed4572da8b9cfe88991120b5aebe851c19ca31f198e3260a32" Jan 20 17:27:49 crc kubenswrapper[4558]: I0120 17:27:49.797789 4558 scope.go:117] "RemoveContainer" containerID="f7ce9ab01f40e824acf195a6b4913a12b3fb6a924673bfa550a16e418b812cff" Jan 20 17:27:49 crc kubenswrapper[4558]: I0120 17:27:49.826222 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/placement-66fdf9847-wwwbv"] Jan 20 17:27:49 crc kubenswrapper[4558]: I0120 17:27:49.833852 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/placement-66fdf9847-wwwbv"] Jan 20 17:27:49 crc kubenswrapper[4558]: I0120 17:27:49.838198 4558 scope.go:117] "RemoveContainer" containerID="7ae163fcbc93afed4572da8b9cfe88991120b5aebe851c19ca31f198e3260a32" Jan 20 17:27:49 crc kubenswrapper[4558]: E0120 17:27:49.839224 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7ae163fcbc93afed4572da8b9cfe88991120b5aebe851c19ca31f198e3260a32\": container with ID starting with 7ae163fcbc93afed4572da8b9cfe88991120b5aebe851c19ca31f198e3260a32 not found: ID does not exist" containerID="7ae163fcbc93afed4572da8b9cfe88991120b5aebe851c19ca31f198e3260a32" Jan 20 17:27:49 crc kubenswrapper[4558]: I0120 17:27:49.839256 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7ae163fcbc93afed4572da8b9cfe88991120b5aebe851c19ca31f198e3260a32"} err="failed to get container status \"7ae163fcbc93afed4572da8b9cfe88991120b5aebe851c19ca31f198e3260a32\": rpc error: code = NotFound desc = could not find container \"7ae163fcbc93afed4572da8b9cfe88991120b5aebe851c19ca31f198e3260a32\": container with ID starting with 7ae163fcbc93afed4572da8b9cfe88991120b5aebe851c19ca31f198e3260a32 not found: ID does not exist" Jan 20 17:27:49 crc kubenswrapper[4558]: I0120 17:27:49.839277 4558 scope.go:117] "RemoveContainer" containerID="f7ce9ab01f40e824acf195a6b4913a12b3fb6a924673bfa550a16e418b812cff" Jan 20 17:27:49 crc kubenswrapper[4558]: E0120 17:27:49.839639 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f7ce9ab01f40e824acf195a6b4913a12b3fb6a924673bfa550a16e418b812cff\": container with ID starting with f7ce9ab01f40e824acf195a6b4913a12b3fb6a924673bfa550a16e418b812cff not found: ID does not exist" containerID="f7ce9ab01f40e824acf195a6b4913a12b3fb6a924673bfa550a16e418b812cff" Jan 20 17:27:49 crc kubenswrapper[4558]: I0120 17:27:49.839666 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f7ce9ab01f40e824acf195a6b4913a12b3fb6a924673bfa550a16e418b812cff"} err="failed to get container status \"f7ce9ab01f40e824acf195a6b4913a12b3fb6a924673bfa550a16e418b812cff\": rpc error: code = NotFound desc = could not find container \"f7ce9ab01f40e824acf195a6b4913a12b3fb6a924673bfa550a16e418b812cff\": container with ID starting with f7ce9ab01f40e824acf195a6b4913a12b3fb6a924673bfa550a16e418b812cff not found: ID does not exist" Jan 20 17:27:50 crc kubenswrapper[4558]: I0120 17:27:50.116517 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:27:50 crc kubenswrapper[4558]: I0120 17:27:50.349623 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell0-cell-mapping-th49n"] Jan 20 17:27:50 crc kubenswrapper[4558]: I0120 17:27:50.360389 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell0-cell-mapping-th49n"] Jan 20 17:27:50 crc kubenswrapper[4558]: I0120 17:27:50.368076 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:27:50 crc kubenswrapper[4558]: I0120 17:27:50.368481 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-scheduler-0" podUID="f859dc3a-a5c9-4ddf-bf5e-0cedd762540f" containerName="nova-scheduler-scheduler" containerID="cri-o://6348c20e2c41f3b65a4752b2bf21e2c1f01971ffd97f7ef8d1a6820091d10a61" gracePeriod=30 Jan 20 17:27:50 crc kubenswrapper[4558]: I0120 17:27:50.376571 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:27:50 crc kubenswrapper[4558]: I0120 17:27:50.376853 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-api-0" podUID="f59aa69c-6c42-41df-905e-29428d350637" containerName="nova-api-log" containerID="cri-o://afb4cc76ebcd4dd7b7fd5a1a99c969b9bf8219683eba7336e506d3069b495f83" gracePeriod=30 Jan 20 17:27:50 crc kubenswrapper[4558]: I0120 17:27:50.377024 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-api-0" podUID="f59aa69c-6c42-41df-905e-29428d350637" containerName="nova-api-api" containerID="cri-o://a58e1fbb49aa21c6ab202f7e097494e1cdb0e9dcdffc1b04bb2b93be7a95a03f" gracePeriod=30 Jan 20 17:27:50 crc kubenswrapper[4558]: I0120 17:27:50.396007 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:27:50 crc kubenswrapper[4558]: I0120 17:27:50.396278 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-metadata-0" podUID="0370f524-0608-45c9-a4d4-efca3a6bbc4a" containerName="nova-metadata-log" containerID="cri-o://3bd82063429d21886e43702ff2750e30a9368e89bfcdb3e5dfdc7631a7eabf51" gracePeriod=30 Jan 20 17:27:50 crc kubenswrapper[4558]: I0120 17:27:50.396423 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-metadata-0" podUID="0370f524-0608-45c9-a4d4-efca3a6bbc4a" containerName="nova-metadata-metadata" containerID="cri-o://21f88c9b39c9e0e5b774687833ad6583b7301ae275f1fa63a898d7d06969b445" gracePeriod=30 Jan 20 17:27:50 crc kubenswrapper[4558]: I0120 17:27:50.575001 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="acd67900-69b3-485f-b378-80457d604be8" path="/var/lib/kubelet/pods/acd67900-69b3-485f-b378-80457d604be8/volumes" Jan 20 17:27:50 crc kubenswrapper[4558]: I0120 17:27:50.575600 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ef71f603-5259-468f-b39f-4e726cdcb2f5" path="/var/lib/kubelet/pods/ef71f603-5259-468f-b39f-4e726cdcb2f5/volumes" Jan 20 17:27:50 crc kubenswrapper[4558]: I0120 17:27:50.673282 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell0-cell-mapping-5jhz5"] Jan 20 17:27:50 crc kubenswrapper[4558]: E0120 17:27:50.673640 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="000dbc78-69b3-452b-8d87-9781c58b7b06" containerName="barbican-api-log" Jan 20 17:27:50 crc kubenswrapper[4558]: I0120 17:27:50.673655 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="000dbc78-69b3-452b-8d87-9781c58b7b06" containerName="barbican-api-log" Jan 20 17:27:50 crc kubenswrapper[4558]: E0120 17:27:50.673671 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e1484eaf-3b1f-4dfa-9413-dc02309dfbbf" containerName="init" Jan 20 17:27:50 crc kubenswrapper[4558]: I0120 17:27:50.673677 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="e1484eaf-3b1f-4dfa-9413-dc02309dfbbf" containerName="init" Jan 20 17:27:50 crc kubenswrapper[4558]: E0120 17:27:50.673695 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="000dbc78-69b3-452b-8d87-9781c58b7b06" containerName="barbican-api" Jan 20 17:27:50 crc kubenswrapper[4558]: I0120 17:27:50.673702 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="000dbc78-69b3-452b-8d87-9781c58b7b06" containerName="barbican-api" Jan 20 17:27:50 crc kubenswrapper[4558]: E0120 17:27:50.673712 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ef71f603-5259-468f-b39f-4e726cdcb2f5" containerName="placement-log" Jan 20 17:27:50 crc kubenswrapper[4558]: I0120 17:27:50.673717 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ef71f603-5259-468f-b39f-4e726cdcb2f5" containerName="placement-log" Jan 20 17:27:50 crc kubenswrapper[4558]: E0120 17:27:50.673733 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ef71f603-5259-468f-b39f-4e726cdcb2f5" containerName="placement-api" Jan 20 17:27:50 crc kubenswrapper[4558]: I0120 17:27:50.673738 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ef71f603-5259-468f-b39f-4e726cdcb2f5" containerName="placement-api" Jan 20 17:27:50 crc kubenswrapper[4558]: E0120 17:27:50.673748 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e1484eaf-3b1f-4dfa-9413-dc02309dfbbf" containerName="dnsmasq-dns" Jan 20 17:27:50 crc kubenswrapper[4558]: I0120 17:27:50.673754 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="e1484eaf-3b1f-4dfa-9413-dc02309dfbbf" containerName="dnsmasq-dns" Jan 20 17:27:50 crc kubenswrapper[4558]: I0120 17:27:50.673923 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="ef71f603-5259-468f-b39f-4e726cdcb2f5" containerName="placement-log" Jan 20 17:27:50 crc kubenswrapper[4558]: I0120 17:27:50.673937 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="000dbc78-69b3-452b-8d87-9781c58b7b06" containerName="barbican-api-log" Jan 20 17:27:50 crc kubenswrapper[4558]: I0120 17:27:50.673944 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="000dbc78-69b3-452b-8d87-9781c58b7b06" containerName="barbican-api" Jan 20 17:27:50 crc kubenswrapper[4558]: I0120 17:27:50.673953 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="e1484eaf-3b1f-4dfa-9413-dc02309dfbbf" containerName="dnsmasq-dns" Jan 20 17:27:50 crc kubenswrapper[4558]: I0120 17:27:50.673976 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="ef71f603-5259-468f-b39f-4e726cdcb2f5" containerName="placement-api" Jan 20 17:27:50 crc kubenswrapper[4558]: I0120 17:27:50.677653 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-cell-mapping-5jhz5" Jan 20 17:27:50 crc kubenswrapper[4558]: I0120 17:27:50.682204 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell0-manage-scripts" Jan 20 17:27:50 crc kubenswrapper[4558]: I0120 17:27:50.682344 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell0-manage-config-data" Jan 20 17:27:50 crc kubenswrapper[4558]: I0120 17:27:50.686836 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-cell-mapping-5jhz5"] Jan 20 17:27:50 crc kubenswrapper[4558]: E0120 17:27:50.770227 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="6348c20e2c41f3b65a4752b2bf21e2c1f01971ffd97f7ef8d1a6820091d10a61" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 20 17:27:50 crc kubenswrapper[4558]: E0120 17:27:50.771843 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="6348c20e2c41f3b65a4752b2bf21e2c1f01971ffd97f7ef8d1a6820091d10a61" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 20 17:27:50 crc kubenswrapper[4558]: E0120 17:27:50.776462 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="6348c20e2c41f3b65a4752b2bf21e2c1f01971ffd97f7ef8d1a6820091d10a61" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 20 17:27:50 crc kubenswrapper[4558]: E0120 17:27:50.776512 4558 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack-kuttl-tests/nova-scheduler-0" podUID="f859dc3a-a5c9-4ddf-bf5e-0cedd762540f" containerName="nova-scheduler-scheduler" Jan 20 17:27:50 crc kubenswrapper[4558]: I0120 17:27:50.780471 4558 generic.go:334] "Generic (PLEG): container finished" podID="f59aa69c-6c42-41df-905e-29428d350637" containerID="afb4cc76ebcd4dd7b7fd5a1a99c969b9bf8219683eba7336e506d3069b495f83" exitCode=143 Jan 20 17:27:50 crc kubenswrapper[4558]: I0120 17:27:50.780527 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"f59aa69c-6c42-41df-905e-29428d350637","Type":"ContainerDied","Data":"afb4cc76ebcd4dd7b7fd5a1a99c969b9bf8219683eba7336e506d3069b495f83"} Jan 20 17:27:50 crc kubenswrapper[4558]: I0120 17:27:50.782062 4558 generic.go:334] "Generic (PLEG): container finished" podID="0370f524-0608-45c9-a4d4-efca3a6bbc4a" containerID="3bd82063429d21886e43702ff2750e30a9368e89bfcdb3e5dfdc7631a7eabf51" exitCode=143 Jan 20 17:27:50 crc kubenswrapper[4558]: I0120 17:27:50.782090 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"0370f524-0608-45c9-a4d4-efca3a6bbc4a","Type":"ContainerDied","Data":"3bd82063429d21886e43702ff2750e30a9368e89bfcdb3e5dfdc7631a7eabf51"} Jan 20 17:27:50 crc kubenswrapper[4558]: I0120 17:27:50.788953 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/467e28b1-5dde-44cb-9ef4-f81980c3d999-scripts\") pod \"nova-cell0-cell-mapping-5jhz5\" (UID: \"467e28b1-5dde-44cb-9ef4-f81980c3d999\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-5jhz5" Jan 20 17:27:50 crc kubenswrapper[4558]: I0120 17:27:50.789005 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/467e28b1-5dde-44cb-9ef4-f81980c3d999-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-5jhz5\" (UID: \"467e28b1-5dde-44cb-9ef4-f81980c3d999\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-5jhz5" Jan 20 17:27:50 crc kubenswrapper[4558]: I0120 17:27:50.789069 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/467e28b1-5dde-44cb-9ef4-f81980c3d999-config-data\") pod \"nova-cell0-cell-mapping-5jhz5\" (UID: \"467e28b1-5dde-44cb-9ef4-f81980c3d999\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-5jhz5" Jan 20 17:27:50 crc kubenswrapper[4558]: I0120 17:27:50.789121 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zw8pz\" (UniqueName: \"kubernetes.io/projected/467e28b1-5dde-44cb-9ef4-f81980c3d999-kube-api-access-zw8pz\") pod \"nova-cell0-cell-mapping-5jhz5\" (UID: \"467e28b1-5dde-44cb-9ef4-f81980c3d999\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-5jhz5" Jan 20 17:27:50 crc kubenswrapper[4558]: I0120 17:27:50.890818 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/467e28b1-5dde-44cb-9ef4-f81980c3d999-scripts\") pod \"nova-cell0-cell-mapping-5jhz5\" (UID: \"467e28b1-5dde-44cb-9ef4-f81980c3d999\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-5jhz5" Jan 20 17:27:50 crc kubenswrapper[4558]: I0120 17:27:50.890873 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/467e28b1-5dde-44cb-9ef4-f81980c3d999-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-5jhz5\" (UID: \"467e28b1-5dde-44cb-9ef4-f81980c3d999\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-5jhz5" Jan 20 17:27:50 crc kubenswrapper[4558]: I0120 17:27:50.890926 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/467e28b1-5dde-44cb-9ef4-f81980c3d999-config-data\") pod \"nova-cell0-cell-mapping-5jhz5\" (UID: \"467e28b1-5dde-44cb-9ef4-f81980c3d999\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-5jhz5" Jan 20 17:27:50 crc kubenswrapper[4558]: I0120 17:27:50.890957 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zw8pz\" (UniqueName: \"kubernetes.io/projected/467e28b1-5dde-44cb-9ef4-f81980c3d999-kube-api-access-zw8pz\") pod \"nova-cell0-cell-mapping-5jhz5\" (UID: \"467e28b1-5dde-44cb-9ef4-f81980c3d999\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-5jhz5" Jan 20 17:27:50 crc kubenswrapper[4558]: I0120 17:27:50.896398 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/467e28b1-5dde-44cb-9ef4-f81980c3d999-scripts\") pod \"nova-cell0-cell-mapping-5jhz5\" (UID: \"467e28b1-5dde-44cb-9ef4-f81980c3d999\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-5jhz5" Jan 20 17:27:50 crc kubenswrapper[4558]: I0120 17:27:50.896707 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/467e28b1-5dde-44cb-9ef4-f81980c3d999-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-5jhz5\" (UID: \"467e28b1-5dde-44cb-9ef4-f81980c3d999\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-5jhz5" Jan 20 17:27:50 crc kubenswrapper[4558]: I0120 17:27:50.900683 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/467e28b1-5dde-44cb-9ef4-f81980c3d999-config-data\") pod \"nova-cell0-cell-mapping-5jhz5\" (UID: \"467e28b1-5dde-44cb-9ef4-f81980c3d999\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-5jhz5" Jan 20 17:27:50 crc kubenswrapper[4558]: I0120 17:27:50.905683 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zw8pz\" (UniqueName: \"kubernetes.io/projected/467e28b1-5dde-44cb-9ef4-f81980c3d999-kube-api-access-zw8pz\") pod \"nova-cell0-cell-mapping-5jhz5\" (UID: \"467e28b1-5dde-44cb-9ef4-f81980c3d999\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-5jhz5" Jan 20 17:27:50 crc kubenswrapper[4558]: I0120 17:27:50.998024 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-cell-mapping-5jhz5" Jan 20 17:27:51 crc kubenswrapper[4558]: I0120 17:27:51.128573 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:27:51 crc kubenswrapper[4558]: I0120 17:27:51.295711 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-cell-mapping-99khx"] Jan 20 17:27:51 crc kubenswrapper[4558]: I0120 17:27:51.301205 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell1-cell-mapping-99khx"] Jan 20 17:27:51 crc kubenswrapper[4558]: I0120 17:27:51.502822 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-cell-mapping-5jhz5"] Jan 20 17:27:51 crc kubenswrapper[4558]: I0120 17:27:51.510231 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell1-cell-mapping-4kxm5"] Jan 20 17:27:51 crc kubenswrapper[4558]: I0120 17:27:51.511649 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-cell-mapping-4kxm5" Jan 20 17:27:51 crc kubenswrapper[4558]: I0120 17:27:51.514713 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell1-manage-config-data" Jan 20 17:27:51 crc kubenswrapper[4558]: I0120 17:27:51.514946 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell1-manage-scripts" Jan 20 17:27:51 crc kubenswrapper[4558]: I0120 17:27:51.525677 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-cell-mapping-4kxm5"] Jan 20 17:27:51 crc kubenswrapper[4558]: I0120 17:27:51.606986 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c8667\" (UniqueName: \"kubernetes.io/projected/aeb478ae-4d88-4f64-be68-ccebfe589ff1-kube-api-access-c8667\") pod \"nova-cell1-cell-mapping-4kxm5\" (UID: \"aeb478ae-4d88-4f64-be68-ccebfe589ff1\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-4kxm5" Jan 20 17:27:51 crc kubenswrapper[4558]: I0120 17:27:51.607135 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aeb478ae-4d88-4f64-be68-ccebfe589ff1-config-data\") pod \"nova-cell1-cell-mapping-4kxm5\" (UID: \"aeb478ae-4d88-4f64-be68-ccebfe589ff1\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-4kxm5" Jan 20 17:27:51 crc kubenswrapper[4558]: I0120 17:27:51.607285 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aeb478ae-4d88-4f64-be68-ccebfe589ff1-scripts\") pod \"nova-cell1-cell-mapping-4kxm5\" (UID: \"aeb478ae-4d88-4f64-be68-ccebfe589ff1\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-4kxm5" Jan 20 17:27:51 crc kubenswrapper[4558]: I0120 17:27:51.607310 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aeb478ae-4d88-4f64-be68-ccebfe589ff1-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-4kxm5\" (UID: \"aeb478ae-4d88-4f64-be68-ccebfe589ff1\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-4kxm5" Jan 20 17:27:51 crc kubenswrapper[4558]: I0120 17:27:51.708979 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aeb478ae-4d88-4f64-be68-ccebfe589ff1-scripts\") pod \"nova-cell1-cell-mapping-4kxm5\" (UID: \"aeb478ae-4d88-4f64-be68-ccebfe589ff1\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-4kxm5" Jan 20 17:27:51 crc kubenswrapper[4558]: I0120 17:27:51.709025 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aeb478ae-4d88-4f64-be68-ccebfe589ff1-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-4kxm5\" (UID: \"aeb478ae-4d88-4f64-be68-ccebfe589ff1\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-4kxm5" Jan 20 17:27:51 crc kubenswrapper[4558]: I0120 17:27:51.709269 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c8667\" (UniqueName: \"kubernetes.io/projected/aeb478ae-4d88-4f64-be68-ccebfe589ff1-kube-api-access-c8667\") pod \"nova-cell1-cell-mapping-4kxm5\" (UID: \"aeb478ae-4d88-4f64-be68-ccebfe589ff1\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-4kxm5" Jan 20 17:27:51 crc kubenswrapper[4558]: I0120 17:27:51.709608 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aeb478ae-4d88-4f64-be68-ccebfe589ff1-config-data\") pod \"nova-cell1-cell-mapping-4kxm5\" (UID: \"aeb478ae-4d88-4f64-be68-ccebfe589ff1\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-4kxm5" Jan 20 17:27:51 crc kubenswrapper[4558]: I0120 17:27:51.714053 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aeb478ae-4d88-4f64-be68-ccebfe589ff1-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-4kxm5\" (UID: \"aeb478ae-4d88-4f64-be68-ccebfe589ff1\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-4kxm5" Jan 20 17:27:51 crc kubenswrapper[4558]: I0120 17:27:51.714692 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aeb478ae-4d88-4f64-be68-ccebfe589ff1-config-data\") pod \"nova-cell1-cell-mapping-4kxm5\" (UID: \"aeb478ae-4d88-4f64-be68-ccebfe589ff1\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-4kxm5" Jan 20 17:27:51 crc kubenswrapper[4558]: I0120 17:27:51.717358 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aeb478ae-4d88-4f64-be68-ccebfe589ff1-scripts\") pod \"nova-cell1-cell-mapping-4kxm5\" (UID: \"aeb478ae-4d88-4f64-be68-ccebfe589ff1\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-4kxm5" Jan 20 17:27:51 crc kubenswrapper[4558]: I0120 17:27:51.727607 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c8667\" (UniqueName: \"kubernetes.io/projected/aeb478ae-4d88-4f64-be68-ccebfe589ff1-kube-api-access-c8667\") pod \"nova-cell1-cell-mapping-4kxm5\" (UID: \"aeb478ae-4d88-4f64-be68-ccebfe589ff1\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-4kxm5" Jan 20 17:27:51 crc kubenswrapper[4558]: I0120 17:27:51.795974 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-cell-mapping-5jhz5" event={"ID":"467e28b1-5dde-44cb-9ef4-f81980c3d999","Type":"ContainerStarted","Data":"50e04d94313c300a1bbfb5aa67197781874c448647ec0443d12262cb9d1314e4"} Jan 20 17:27:51 crc kubenswrapper[4558]: I0120 17:27:51.796415 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-cell-mapping-5jhz5" event={"ID":"467e28b1-5dde-44cb-9ef4-f81980c3d999","Type":"ContainerStarted","Data":"1dc319fbe664663f43eb5091a84dfebd22bd87a2698091de839e309298e64584"} Jan 20 17:27:51 crc kubenswrapper[4558]: I0120 17:27:51.848095 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell0-cell-mapping-5jhz5" podStartSLOduration=1.848078798 podStartE2EDuration="1.848078798s" podCreationTimestamp="2026-01-20 17:27:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:27:51.841579559 +0000 UTC m=+2765.601917526" watchObservedRunningTime="2026-01-20 17:27:51.848078798 +0000 UTC m=+2765.608416765" Jan 20 17:27:51 crc kubenswrapper[4558]: I0120 17:27:51.914713 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-cell-mapping-4kxm5" Jan 20 17:27:52 crc kubenswrapper[4558]: I0120 17:27:52.116667 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/neutron-5855ff84bf-bczz4" podUID="bd387ac0-76d7-418c-a34f-19be32ae37f9" containerName="neutron-httpd" probeResult="failure" output="Get \"https://10.217.1.19:9696/\": dial tcp 10.217.1.19:9696: connect: connection refused" Jan 20 17:27:52 crc kubenswrapper[4558]: I0120 17:27:52.356985 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-cell-mapping-4kxm5"] Jan 20 17:27:52 crc kubenswrapper[4558]: I0120 17:27:52.424448 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-5855ff84bf-bczz4" Jan 20 17:27:52 crc kubenswrapper[4558]: I0120 17:27:52.488945 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:27:52 crc kubenswrapper[4558]: I0120 17:27:52.538955 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/bd387ac0-76d7-418c-a34f-19be32ae37f9-internal-tls-certs\") pod \"bd387ac0-76d7-418c-a34f-19be32ae37f9\" (UID: \"bd387ac0-76d7-418c-a34f-19be32ae37f9\") " Jan 20 17:27:52 crc kubenswrapper[4558]: I0120 17:27:52.539061 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/bd387ac0-76d7-418c-a34f-19be32ae37f9-httpd-config\") pod \"bd387ac0-76d7-418c-a34f-19be32ae37f9\" (UID: \"bd387ac0-76d7-418c-a34f-19be32ae37f9\") " Jan 20 17:27:52 crc kubenswrapper[4558]: I0120 17:27:52.539158 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/bd387ac0-76d7-418c-a34f-19be32ae37f9-public-tls-certs\") pod \"bd387ac0-76d7-418c-a34f-19be32ae37f9\" (UID: \"bd387ac0-76d7-418c-a34f-19be32ae37f9\") " Jan 20 17:27:52 crc kubenswrapper[4558]: I0120 17:27:52.539223 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/bd387ac0-76d7-418c-a34f-19be32ae37f9-config\") pod \"bd387ac0-76d7-418c-a34f-19be32ae37f9\" (UID: \"bd387ac0-76d7-418c-a34f-19be32ae37f9\") " Jan 20 17:27:52 crc kubenswrapper[4558]: I0120 17:27:52.539304 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/bd387ac0-76d7-418c-a34f-19be32ae37f9-ovndb-tls-certs\") pod \"bd387ac0-76d7-418c-a34f-19be32ae37f9\" (UID: \"bd387ac0-76d7-418c-a34f-19be32ae37f9\") " Jan 20 17:27:52 crc kubenswrapper[4558]: I0120 17:27:52.539350 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d8shb\" (UniqueName: \"kubernetes.io/projected/f859dc3a-a5c9-4ddf-bf5e-0cedd762540f-kube-api-access-d8shb\") pod \"f859dc3a-a5c9-4ddf-bf5e-0cedd762540f\" (UID: \"f859dc3a-a5c9-4ddf-bf5e-0cedd762540f\") " Jan 20 17:27:52 crc kubenswrapper[4558]: I0120 17:27:52.539395 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f859dc3a-a5c9-4ddf-bf5e-0cedd762540f-config-data\") pod \"f859dc3a-a5c9-4ddf-bf5e-0cedd762540f\" (UID: \"f859dc3a-a5c9-4ddf-bf5e-0cedd762540f\") " Jan 20 17:27:52 crc kubenswrapper[4558]: I0120 17:27:52.539422 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bd387ac0-76d7-418c-a34f-19be32ae37f9-combined-ca-bundle\") pod \"bd387ac0-76d7-418c-a34f-19be32ae37f9\" (UID: \"bd387ac0-76d7-418c-a34f-19be32ae37f9\") " Jan 20 17:27:52 crc kubenswrapper[4558]: I0120 17:27:52.539478 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xnr96\" (UniqueName: \"kubernetes.io/projected/bd387ac0-76d7-418c-a34f-19be32ae37f9-kube-api-access-xnr96\") pod \"bd387ac0-76d7-418c-a34f-19be32ae37f9\" (UID: \"bd387ac0-76d7-418c-a34f-19be32ae37f9\") " Jan 20 17:27:52 crc kubenswrapper[4558]: I0120 17:27:52.539532 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f859dc3a-a5c9-4ddf-bf5e-0cedd762540f-combined-ca-bundle\") pod \"f859dc3a-a5c9-4ddf-bf5e-0cedd762540f\" (UID: \"f859dc3a-a5c9-4ddf-bf5e-0cedd762540f\") " Jan 20 17:27:52 crc kubenswrapper[4558]: I0120 17:27:52.545072 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd387ac0-76d7-418c-a34f-19be32ae37f9-kube-api-access-xnr96" (OuterVolumeSpecName: "kube-api-access-xnr96") pod "bd387ac0-76d7-418c-a34f-19be32ae37f9" (UID: "bd387ac0-76d7-418c-a34f-19be32ae37f9"). InnerVolumeSpecName "kube-api-access-xnr96". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:27:52 crc kubenswrapper[4558]: I0120 17:27:52.545962 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f859dc3a-a5c9-4ddf-bf5e-0cedd762540f-kube-api-access-d8shb" (OuterVolumeSpecName: "kube-api-access-d8shb") pod "f859dc3a-a5c9-4ddf-bf5e-0cedd762540f" (UID: "f859dc3a-a5c9-4ddf-bf5e-0cedd762540f"). InnerVolumeSpecName "kube-api-access-d8shb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:27:52 crc kubenswrapper[4558]: I0120 17:27:52.550412 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bd387ac0-76d7-418c-a34f-19be32ae37f9-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "bd387ac0-76d7-418c-a34f-19be32ae37f9" (UID: "bd387ac0-76d7-418c-a34f-19be32ae37f9"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:27:52 crc kubenswrapper[4558]: I0120 17:27:52.575512 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f859dc3a-a5c9-4ddf-bf5e-0cedd762540f-config-data" (OuterVolumeSpecName: "config-data") pod "f859dc3a-a5c9-4ddf-bf5e-0cedd762540f" (UID: "f859dc3a-a5c9-4ddf-bf5e-0cedd762540f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:27:52 crc kubenswrapper[4558]: I0120 17:27:52.578731 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f859dc3a-a5c9-4ddf-bf5e-0cedd762540f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f859dc3a-a5c9-4ddf-bf5e-0cedd762540f" (UID: "f859dc3a-a5c9-4ddf-bf5e-0cedd762540f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:27:52 crc kubenswrapper[4558]: I0120 17:27:52.586155 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d697297c-4dda-4206-9a85-e393d32bc809" path="/var/lib/kubelet/pods/d697297c-4dda-4206-9a85-e393d32bc809/volumes" Jan 20 17:27:52 crc kubenswrapper[4558]: I0120 17:27:52.608491 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bd387ac0-76d7-418c-a34f-19be32ae37f9-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "bd387ac0-76d7-418c-a34f-19be32ae37f9" (UID: "bd387ac0-76d7-418c-a34f-19be32ae37f9"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:27:52 crc kubenswrapper[4558]: I0120 17:27:52.613668 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bd387ac0-76d7-418c-a34f-19be32ae37f9-config" (OuterVolumeSpecName: "config") pod "bd387ac0-76d7-418c-a34f-19be32ae37f9" (UID: "bd387ac0-76d7-418c-a34f-19be32ae37f9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:27:52 crc kubenswrapper[4558]: I0120 17:27:52.624636 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bd387ac0-76d7-418c-a34f-19be32ae37f9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "bd387ac0-76d7-418c-a34f-19be32ae37f9" (UID: "bd387ac0-76d7-418c-a34f-19be32ae37f9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:27:52 crc kubenswrapper[4558]: I0120 17:27:52.637594 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bd387ac0-76d7-418c-a34f-19be32ae37f9-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "bd387ac0-76d7-418c-a34f-19be32ae37f9" (UID: "bd387ac0-76d7-418c-a34f-19be32ae37f9"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:27:52 crc kubenswrapper[4558]: I0120 17:27:52.637751 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bd387ac0-76d7-418c-a34f-19be32ae37f9-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "bd387ac0-76d7-418c-a34f-19be32ae37f9" (UID: "bd387ac0-76d7-418c-a34f-19be32ae37f9"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:27:52 crc kubenswrapper[4558]: I0120 17:27:52.642593 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/bd387ac0-76d7-418c-a34f-19be32ae37f9-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:52 crc kubenswrapper[4558]: I0120 17:27:52.642620 4558 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/bd387ac0-76d7-418c-a34f-19be32ae37f9-httpd-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:52 crc kubenswrapper[4558]: I0120 17:27:52.642630 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/bd387ac0-76d7-418c-a34f-19be32ae37f9-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:52 crc kubenswrapper[4558]: I0120 17:27:52.642640 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/bd387ac0-76d7-418c-a34f-19be32ae37f9-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:52 crc kubenswrapper[4558]: I0120 17:27:52.642649 4558 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/bd387ac0-76d7-418c-a34f-19be32ae37f9-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:52 crc kubenswrapper[4558]: I0120 17:27:52.642658 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d8shb\" (UniqueName: \"kubernetes.io/projected/f859dc3a-a5c9-4ddf-bf5e-0cedd762540f-kube-api-access-d8shb\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:52 crc kubenswrapper[4558]: I0120 17:27:52.642669 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f859dc3a-a5c9-4ddf-bf5e-0cedd762540f-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:52 crc kubenswrapper[4558]: I0120 17:27:52.642677 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bd387ac0-76d7-418c-a34f-19be32ae37f9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:52 crc kubenswrapper[4558]: I0120 17:27:52.642688 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xnr96\" (UniqueName: \"kubernetes.io/projected/bd387ac0-76d7-418c-a34f-19be32ae37f9-kube-api-access-xnr96\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:52 crc kubenswrapper[4558]: I0120 17:27:52.642697 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f859dc3a-a5c9-4ddf-bf5e-0cedd762540f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:52 crc kubenswrapper[4558]: I0120 17:27:52.827034 4558 generic.go:334] "Generic (PLEG): container finished" podID="f859dc3a-a5c9-4ddf-bf5e-0cedd762540f" containerID="6348c20e2c41f3b65a4752b2bf21e2c1f01971ffd97f7ef8d1a6820091d10a61" exitCode=0 Jan 20 17:27:52 crc kubenswrapper[4558]: I0120 17:27:52.827086 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"f859dc3a-a5c9-4ddf-bf5e-0cedd762540f","Type":"ContainerDied","Data":"6348c20e2c41f3b65a4752b2bf21e2c1f01971ffd97f7ef8d1a6820091d10a61"} Jan 20 17:27:52 crc kubenswrapper[4558]: I0120 17:27:52.827147 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"f859dc3a-a5c9-4ddf-bf5e-0cedd762540f","Type":"ContainerDied","Data":"a4a380e21fdb868a97136a44549eb33353441f09b4ed3dcd9f06928932312fb5"} Jan 20 17:27:52 crc kubenswrapper[4558]: I0120 17:27:52.827190 4558 scope.go:117] "RemoveContainer" containerID="6348c20e2c41f3b65a4752b2bf21e2c1f01971ffd97f7ef8d1a6820091d10a61" Jan 20 17:27:52 crc kubenswrapper[4558]: I0120 17:27:52.827898 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:27:52 crc kubenswrapper[4558]: I0120 17:27:52.830670 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-cell-mapping-4kxm5" event={"ID":"aeb478ae-4d88-4f64-be68-ccebfe589ff1","Type":"ContainerStarted","Data":"08b72bd01a759efc57d52868101c30d8254f92693dba81d382bc14dd5867a057"} Jan 20 17:27:52 crc kubenswrapper[4558]: I0120 17:27:52.830703 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-cell-mapping-4kxm5" event={"ID":"aeb478ae-4d88-4f64-be68-ccebfe589ff1","Type":"ContainerStarted","Data":"8a98e18ecc83d87e03e736a4ef60a48ac281b0649b3d8eb29fa53ba47d5afcb7"} Jan 20 17:27:52 crc kubenswrapper[4558]: I0120 17:27:52.836278 4558 generic.go:334] "Generic (PLEG): container finished" podID="bd387ac0-76d7-418c-a34f-19be32ae37f9" containerID="e8b21ab038396136a9090ecf6bdb12372852799fe29e7b256ebbd56bdce61f7b" exitCode=0 Jan 20 17:27:52 crc kubenswrapper[4558]: I0120 17:27:52.836342 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-5855ff84bf-bczz4" Jan 20 17:27:52 crc kubenswrapper[4558]: I0120 17:27:52.836388 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-5855ff84bf-bczz4" event={"ID":"bd387ac0-76d7-418c-a34f-19be32ae37f9","Type":"ContainerDied","Data":"e8b21ab038396136a9090ecf6bdb12372852799fe29e7b256ebbd56bdce61f7b"} Jan 20 17:27:52 crc kubenswrapper[4558]: I0120 17:27:52.836470 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-5855ff84bf-bczz4" event={"ID":"bd387ac0-76d7-418c-a34f-19be32ae37f9","Type":"ContainerDied","Data":"21d9c25d51c6ffa8cfbdb4331abc7cf8daae151992e7c397f352af90b1798d2c"} Jan 20 17:27:52 crc kubenswrapper[4558]: I0120 17:27:52.865403 4558 scope.go:117] "RemoveContainer" containerID="6348c20e2c41f3b65a4752b2bf21e2c1f01971ffd97f7ef8d1a6820091d10a61" Jan 20 17:27:52 crc kubenswrapper[4558]: E0120 17:27:52.865925 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6348c20e2c41f3b65a4752b2bf21e2c1f01971ffd97f7ef8d1a6820091d10a61\": container with ID starting with 6348c20e2c41f3b65a4752b2bf21e2c1f01971ffd97f7ef8d1a6820091d10a61 not found: ID does not exist" containerID="6348c20e2c41f3b65a4752b2bf21e2c1f01971ffd97f7ef8d1a6820091d10a61" Jan 20 17:27:52 crc kubenswrapper[4558]: I0120 17:27:52.865963 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6348c20e2c41f3b65a4752b2bf21e2c1f01971ffd97f7ef8d1a6820091d10a61"} err="failed to get container status \"6348c20e2c41f3b65a4752b2bf21e2c1f01971ffd97f7ef8d1a6820091d10a61\": rpc error: code = NotFound desc = could not find container \"6348c20e2c41f3b65a4752b2bf21e2c1f01971ffd97f7ef8d1a6820091d10a61\": container with ID starting with 6348c20e2c41f3b65a4752b2bf21e2c1f01971ffd97f7ef8d1a6820091d10a61 not found: ID does not exist" Jan 20 17:27:52 crc kubenswrapper[4558]: I0120 17:27:52.865989 4558 scope.go:117] "RemoveContainer" containerID="976dcb4a79daec7424bb1f359fd90302b7391b2695da9a6003214e35e5292830" Jan 20 17:27:52 crc kubenswrapper[4558]: I0120 17:27:52.866591 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell1-cell-mapping-4kxm5" podStartSLOduration=1.866578426 podStartE2EDuration="1.866578426s" podCreationTimestamp="2026-01-20 17:27:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:27:52.859613521 +0000 UTC m=+2766.619951488" watchObservedRunningTime="2026-01-20 17:27:52.866578426 +0000 UTC m=+2766.626916393" Jan 20 17:27:52 crc kubenswrapper[4558]: I0120 17:27:52.886930 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:27:52 crc kubenswrapper[4558]: I0120 17:27:52.894005 4558 scope.go:117] "RemoveContainer" containerID="e8b21ab038396136a9090ecf6bdb12372852799fe29e7b256ebbd56bdce61f7b" Jan 20 17:27:52 crc kubenswrapper[4558]: I0120 17:27:52.896704 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:27:52 crc kubenswrapper[4558]: I0120 17:27:52.905249 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-5855ff84bf-bczz4"] Jan 20 17:27:52 crc kubenswrapper[4558]: I0120 17:27:52.917051 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/neutron-5855ff84bf-bczz4"] Jan 20 17:27:52 crc kubenswrapper[4558]: I0120 17:27:52.917083 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:27:52 crc kubenswrapper[4558]: E0120 17:27:52.917569 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bd387ac0-76d7-418c-a34f-19be32ae37f9" containerName="neutron-httpd" Jan 20 17:27:52 crc kubenswrapper[4558]: I0120 17:27:52.917590 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="bd387ac0-76d7-418c-a34f-19be32ae37f9" containerName="neutron-httpd" Jan 20 17:27:52 crc kubenswrapper[4558]: E0120 17:27:52.917611 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f859dc3a-a5c9-4ddf-bf5e-0cedd762540f" containerName="nova-scheduler-scheduler" Jan 20 17:27:52 crc kubenswrapper[4558]: I0120 17:27:52.917619 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="f859dc3a-a5c9-4ddf-bf5e-0cedd762540f" containerName="nova-scheduler-scheduler" Jan 20 17:27:52 crc kubenswrapper[4558]: E0120 17:27:52.917634 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bd387ac0-76d7-418c-a34f-19be32ae37f9" containerName="neutron-api" Jan 20 17:27:52 crc kubenswrapper[4558]: I0120 17:27:52.917639 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="bd387ac0-76d7-418c-a34f-19be32ae37f9" containerName="neutron-api" Jan 20 17:27:52 crc kubenswrapper[4558]: I0120 17:27:52.917867 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="bd387ac0-76d7-418c-a34f-19be32ae37f9" containerName="neutron-httpd" Jan 20 17:27:52 crc kubenswrapper[4558]: I0120 17:27:52.917885 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="bd387ac0-76d7-418c-a34f-19be32ae37f9" containerName="neutron-api" Jan 20 17:27:52 crc kubenswrapper[4558]: I0120 17:27:52.917898 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="f859dc3a-a5c9-4ddf-bf5e-0cedd762540f" containerName="nova-scheduler-scheduler" Jan 20 17:27:52 crc kubenswrapper[4558]: I0120 17:27:52.918709 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:27:52 crc kubenswrapper[4558]: I0120 17:27:52.921257 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-scheduler-config-data" Jan 20 17:27:52 crc kubenswrapper[4558]: I0120 17:27:52.921665 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:27:52 crc kubenswrapper[4558]: I0120 17:27:52.925005 4558 scope.go:117] "RemoveContainer" containerID="976dcb4a79daec7424bb1f359fd90302b7391b2695da9a6003214e35e5292830" Jan 20 17:27:52 crc kubenswrapper[4558]: E0120 17:27:52.926407 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"976dcb4a79daec7424bb1f359fd90302b7391b2695da9a6003214e35e5292830\": container with ID starting with 976dcb4a79daec7424bb1f359fd90302b7391b2695da9a6003214e35e5292830 not found: ID does not exist" containerID="976dcb4a79daec7424bb1f359fd90302b7391b2695da9a6003214e35e5292830" Jan 20 17:27:52 crc kubenswrapper[4558]: I0120 17:27:52.928461 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"976dcb4a79daec7424bb1f359fd90302b7391b2695da9a6003214e35e5292830"} err="failed to get container status \"976dcb4a79daec7424bb1f359fd90302b7391b2695da9a6003214e35e5292830\": rpc error: code = NotFound desc = could not find container \"976dcb4a79daec7424bb1f359fd90302b7391b2695da9a6003214e35e5292830\": container with ID starting with 976dcb4a79daec7424bb1f359fd90302b7391b2695da9a6003214e35e5292830 not found: ID does not exist" Jan 20 17:27:52 crc kubenswrapper[4558]: I0120 17:27:52.928528 4558 scope.go:117] "RemoveContainer" containerID="e8b21ab038396136a9090ecf6bdb12372852799fe29e7b256ebbd56bdce61f7b" Jan 20 17:27:52 crc kubenswrapper[4558]: E0120 17:27:52.928921 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e8b21ab038396136a9090ecf6bdb12372852799fe29e7b256ebbd56bdce61f7b\": container with ID starting with e8b21ab038396136a9090ecf6bdb12372852799fe29e7b256ebbd56bdce61f7b not found: ID does not exist" containerID="e8b21ab038396136a9090ecf6bdb12372852799fe29e7b256ebbd56bdce61f7b" Jan 20 17:27:52 crc kubenswrapper[4558]: I0120 17:27:52.929005 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e8b21ab038396136a9090ecf6bdb12372852799fe29e7b256ebbd56bdce61f7b"} err="failed to get container status \"e8b21ab038396136a9090ecf6bdb12372852799fe29e7b256ebbd56bdce61f7b\": rpc error: code = NotFound desc = could not find container \"e8b21ab038396136a9090ecf6bdb12372852799fe29e7b256ebbd56bdce61f7b\": container with ID starting with e8b21ab038396136a9090ecf6bdb12372852799fe29e7b256ebbd56bdce61f7b not found: ID does not exist" Jan 20 17:27:52 crc kubenswrapper[4558]: I0120 17:27:52.949074 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b2a7ece8-32ed-4eb3-ab85-27ff4361622d-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"b2a7ece8-32ed-4eb3-ab85-27ff4361622d\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:27:52 crc kubenswrapper[4558]: I0120 17:27:52.949248 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rcxn7\" (UniqueName: \"kubernetes.io/projected/b2a7ece8-32ed-4eb3-ab85-27ff4361622d-kube-api-access-rcxn7\") pod \"nova-scheduler-0\" (UID: \"b2a7ece8-32ed-4eb3-ab85-27ff4361622d\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:27:52 crc kubenswrapper[4558]: I0120 17:27:52.949282 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b2a7ece8-32ed-4eb3-ab85-27ff4361622d-config-data\") pod \"nova-scheduler-0\" (UID: \"b2a7ece8-32ed-4eb3-ab85-27ff4361622d\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:27:53 crc kubenswrapper[4558]: I0120 17:27:53.050888 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rcxn7\" (UniqueName: \"kubernetes.io/projected/b2a7ece8-32ed-4eb3-ab85-27ff4361622d-kube-api-access-rcxn7\") pod \"nova-scheduler-0\" (UID: \"b2a7ece8-32ed-4eb3-ab85-27ff4361622d\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:27:53 crc kubenswrapper[4558]: I0120 17:27:53.050949 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b2a7ece8-32ed-4eb3-ab85-27ff4361622d-config-data\") pod \"nova-scheduler-0\" (UID: \"b2a7ece8-32ed-4eb3-ab85-27ff4361622d\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:27:53 crc kubenswrapper[4558]: I0120 17:27:53.051020 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b2a7ece8-32ed-4eb3-ab85-27ff4361622d-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"b2a7ece8-32ed-4eb3-ab85-27ff4361622d\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:27:53 crc kubenswrapper[4558]: I0120 17:27:53.055940 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b2a7ece8-32ed-4eb3-ab85-27ff4361622d-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"b2a7ece8-32ed-4eb3-ab85-27ff4361622d\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:27:53 crc kubenswrapper[4558]: I0120 17:27:53.056279 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b2a7ece8-32ed-4eb3-ab85-27ff4361622d-config-data\") pod \"nova-scheduler-0\" (UID: \"b2a7ece8-32ed-4eb3-ab85-27ff4361622d\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:27:53 crc kubenswrapper[4558]: I0120 17:27:53.066631 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rcxn7\" (UniqueName: \"kubernetes.io/projected/b2a7ece8-32ed-4eb3-ab85-27ff4361622d-kube-api-access-rcxn7\") pod \"nova-scheduler-0\" (UID: \"b2a7ece8-32ed-4eb3-ab85-27ff4361622d\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:27:53 crc kubenswrapper[4558]: I0120 17:27:53.238909 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:27:53 crc kubenswrapper[4558]: I0120 17:27:53.572234 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/nova-metadata-0" podUID="0370f524-0608-45c9-a4d4-efca3a6bbc4a" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.1.56:8775/\": read tcp 10.217.0.2:40414->10.217.1.56:8775: read: connection reset by peer" Jan 20 17:27:53 crc kubenswrapper[4558]: I0120 17:27:53.572571 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/nova-metadata-0" podUID="0370f524-0608-45c9-a4d4-efca3a6bbc4a" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.1.56:8775/\": read tcp 10.217.0.2:40412->10.217.1.56:8775: read: connection reset by peer" Jan 20 17:27:53 crc kubenswrapper[4558]: I0120 17:27:53.689149 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:27:53 crc kubenswrapper[4558]: W0120 17:27:53.689991 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb2a7ece8_32ed_4eb3_ab85_27ff4361622d.slice/crio-4c5b27f057af39120dd18ed3760fd5e5bece44a4920f0051709e8fcb3c409554 WatchSource:0}: Error finding container 4c5b27f057af39120dd18ed3760fd5e5bece44a4920f0051709e8fcb3c409554: Status 404 returned error can't find the container with id 4c5b27f057af39120dd18ed3760fd5e5bece44a4920f0051709e8fcb3c409554 Jan 20 17:27:53 crc kubenswrapper[4558]: I0120 17:27:53.733115 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/nova-api-0" podUID="f59aa69c-6c42-41df-905e-29428d350637" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.1.58:8774/\": read tcp 10.217.0.2:60424->10.217.1.58:8774: read: connection reset by peer" Jan 20 17:27:53 crc kubenswrapper[4558]: I0120 17:27:53.733437 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/nova-api-0" podUID="f59aa69c-6c42-41df-905e-29428d350637" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.1.58:8774/\": read tcp 10.217.0.2:60428->10.217.1.58:8774: read: connection reset by peer" Jan 20 17:27:53 crc kubenswrapper[4558]: I0120 17:27:53.877399 4558 generic.go:334] "Generic (PLEG): container finished" podID="f59aa69c-6c42-41df-905e-29428d350637" containerID="a58e1fbb49aa21c6ab202f7e097494e1cdb0e9dcdffc1b04bb2b93be7a95a03f" exitCode=0 Jan 20 17:27:53 crc kubenswrapper[4558]: I0120 17:27:53.877739 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"f59aa69c-6c42-41df-905e-29428d350637","Type":"ContainerDied","Data":"a58e1fbb49aa21c6ab202f7e097494e1cdb0e9dcdffc1b04bb2b93be7a95a03f"} Jan 20 17:27:53 crc kubenswrapper[4558]: I0120 17:27:53.882369 4558 generic.go:334] "Generic (PLEG): container finished" podID="0370f524-0608-45c9-a4d4-efca3a6bbc4a" containerID="21f88c9b39c9e0e5b774687833ad6583b7301ae275f1fa63a898d7d06969b445" exitCode=0 Jan 20 17:27:53 crc kubenswrapper[4558]: I0120 17:27:53.882428 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"0370f524-0608-45c9-a4d4-efca3a6bbc4a","Type":"ContainerDied","Data":"21f88c9b39c9e0e5b774687833ad6583b7301ae275f1fa63a898d7d06969b445"} Jan 20 17:27:53 crc kubenswrapper[4558]: I0120 17:27:53.889396 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"b2a7ece8-32ed-4eb3-ab85-27ff4361622d","Type":"ContainerStarted","Data":"4c5b27f057af39120dd18ed3760fd5e5bece44a4920f0051709e8fcb3c409554"} Jan 20 17:27:53 crc kubenswrapper[4558]: I0120 17:27:53.920062 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:27:53 crc kubenswrapper[4558]: I0120 17:27:53.974090 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g7d7d\" (UniqueName: \"kubernetes.io/projected/0370f524-0608-45c9-a4d4-efca3a6bbc4a-kube-api-access-g7d7d\") pod \"0370f524-0608-45c9-a4d4-efca3a6bbc4a\" (UID: \"0370f524-0608-45c9-a4d4-efca3a6bbc4a\") " Jan 20 17:27:53 crc kubenswrapper[4558]: I0120 17:27:53.974221 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/0370f524-0608-45c9-a4d4-efca3a6bbc4a-nova-metadata-tls-certs\") pod \"0370f524-0608-45c9-a4d4-efca3a6bbc4a\" (UID: \"0370f524-0608-45c9-a4d4-efca3a6bbc4a\") " Jan 20 17:27:53 crc kubenswrapper[4558]: I0120 17:27:53.974302 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0370f524-0608-45c9-a4d4-efca3a6bbc4a-combined-ca-bundle\") pod \"0370f524-0608-45c9-a4d4-efca3a6bbc4a\" (UID: \"0370f524-0608-45c9-a4d4-efca3a6bbc4a\") " Jan 20 17:27:53 crc kubenswrapper[4558]: I0120 17:27:53.974326 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0370f524-0608-45c9-a4d4-efca3a6bbc4a-config-data\") pod \"0370f524-0608-45c9-a4d4-efca3a6bbc4a\" (UID: \"0370f524-0608-45c9-a4d4-efca3a6bbc4a\") " Jan 20 17:27:53 crc kubenswrapper[4558]: I0120 17:27:53.974507 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0370f524-0608-45c9-a4d4-efca3a6bbc4a-logs\") pod \"0370f524-0608-45c9-a4d4-efca3a6bbc4a\" (UID: \"0370f524-0608-45c9-a4d4-efca3a6bbc4a\") " Jan 20 17:27:53 crc kubenswrapper[4558]: I0120 17:27:53.976594 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0370f524-0608-45c9-a4d4-efca3a6bbc4a-logs" (OuterVolumeSpecName: "logs") pod "0370f524-0608-45c9-a4d4-efca3a6bbc4a" (UID: "0370f524-0608-45c9-a4d4-efca3a6bbc4a"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:27:53 crc kubenswrapper[4558]: I0120 17:27:53.980302 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0370f524-0608-45c9-a4d4-efca3a6bbc4a-kube-api-access-g7d7d" (OuterVolumeSpecName: "kube-api-access-g7d7d") pod "0370f524-0608-45c9-a4d4-efca3a6bbc4a" (UID: "0370f524-0608-45c9-a4d4-efca3a6bbc4a"). InnerVolumeSpecName "kube-api-access-g7d7d". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:27:54 crc kubenswrapper[4558]: I0120 17:27:54.011260 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0370f524-0608-45c9-a4d4-efca3a6bbc4a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0370f524-0608-45c9-a4d4-efca3a6bbc4a" (UID: "0370f524-0608-45c9-a4d4-efca3a6bbc4a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:27:54 crc kubenswrapper[4558]: I0120 17:27:54.016239 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0370f524-0608-45c9-a4d4-efca3a6bbc4a-config-data" (OuterVolumeSpecName: "config-data") pod "0370f524-0608-45c9-a4d4-efca3a6bbc4a" (UID: "0370f524-0608-45c9-a4d4-efca3a6bbc4a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:27:54 crc kubenswrapper[4558]: I0120 17:27:54.024329 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0370f524-0608-45c9-a4d4-efca3a6bbc4a-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "0370f524-0608-45c9-a4d4-efca3a6bbc4a" (UID: "0370f524-0608-45c9-a4d4-efca3a6bbc4a"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:27:54 crc kubenswrapper[4558]: I0120 17:27:54.078807 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0370f524-0608-45c9-a4d4-efca3a6bbc4a-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:54 crc kubenswrapper[4558]: I0120 17:27:54.078847 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g7d7d\" (UniqueName: \"kubernetes.io/projected/0370f524-0608-45c9-a4d4-efca3a6bbc4a-kube-api-access-g7d7d\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:54 crc kubenswrapper[4558]: I0120 17:27:54.078860 4558 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/0370f524-0608-45c9-a4d4-efca3a6bbc4a-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:54 crc kubenswrapper[4558]: I0120 17:27:54.078870 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0370f524-0608-45c9-a4d4-efca3a6bbc4a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:54 crc kubenswrapper[4558]: I0120 17:27:54.078880 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0370f524-0608-45c9-a4d4-efca3a6bbc4a-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:54 crc kubenswrapper[4558]: I0120 17:27:54.236018 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:27:54 crc kubenswrapper[4558]: I0120 17:27:54.288617 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f59aa69c-6c42-41df-905e-29428d350637-logs\") pod \"f59aa69c-6c42-41df-905e-29428d350637\" (UID: \"f59aa69c-6c42-41df-905e-29428d350637\") " Jan 20 17:27:54 crc kubenswrapper[4558]: I0120 17:27:54.288721 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f59aa69c-6c42-41df-905e-29428d350637-internal-tls-certs\") pod \"f59aa69c-6c42-41df-905e-29428d350637\" (UID: \"f59aa69c-6c42-41df-905e-29428d350637\") " Jan 20 17:27:54 crc kubenswrapper[4558]: I0120 17:27:54.288831 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f59aa69c-6c42-41df-905e-29428d350637-config-data\") pod \"f59aa69c-6c42-41df-905e-29428d350637\" (UID: \"f59aa69c-6c42-41df-905e-29428d350637\") " Jan 20 17:27:54 crc kubenswrapper[4558]: I0120 17:27:54.288864 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f59aa69c-6c42-41df-905e-29428d350637-combined-ca-bundle\") pod \"f59aa69c-6c42-41df-905e-29428d350637\" (UID: \"f59aa69c-6c42-41df-905e-29428d350637\") " Jan 20 17:27:54 crc kubenswrapper[4558]: I0120 17:27:54.288944 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cdstn\" (UniqueName: \"kubernetes.io/projected/f59aa69c-6c42-41df-905e-29428d350637-kube-api-access-cdstn\") pod \"f59aa69c-6c42-41df-905e-29428d350637\" (UID: \"f59aa69c-6c42-41df-905e-29428d350637\") " Jan 20 17:27:54 crc kubenswrapper[4558]: I0120 17:27:54.289066 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f59aa69c-6c42-41df-905e-29428d350637-public-tls-certs\") pod \"f59aa69c-6c42-41df-905e-29428d350637\" (UID: \"f59aa69c-6c42-41df-905e-29428d350637\") " Jan 20 17:27:54 crc kubenswrapper[4558]: I0120 17:27:54.291596 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f59aa69c-6c42-41df-905e-29428d350637-logs" (OuterVolumeSpecName: "logs") pod "f59aa69c-6c42-41df-905e-29428d350637" (UID: "f59aa69c-6c42-41df-905e-29428d350637"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:27:54 crc kubenswrapper[4558]: I0120 17:27:54.302017 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f59aa69c-6c42-41df-905e-29428d350637-kube-api-access-cdstn" (OuterVolumeSpecName: "kube-api-access-cdstn") pod "f59aa69c-6c42-41df-905e-29428d350637" (UID: "f59aa69c-6c42-41df-905e-29428d350637"). InnerVolumeSpecName "kube-api-access-cdstn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:27:54 crc kubenswrapper[4558]: I0120 17:27:54.329674 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f59aa69c-6c42-41df-905e-29428d350637-config-data" (OuterVolumeSpecName: "config-data") pod "f59aa69c-6c42-41df-905e-29428d350637" (UID: "f59aa69c-6c42-41df-905e-29428d350637"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:27:54 crc kubenswrapper[4558]: I0120 17:27:54.343144 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f59aa69c-6c42-41df-905e-29428d350637-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f59aa69c-6c42-41df-905e-29428d350637" (UID: "f59aa69c-6c42-41df-905e-29428d350637"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:27:54 crc kubenswrapper[4558]: I0120 17:27:54.349282 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f59aa69c-6c42-41df-905e-29428d350637-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "f59aa69c-6c42-41df-905e-29428d350637" (UID: "f59aa69c-6c42-41df-905e-29428d350637"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:27:54 crc kubenswrapper[4558]: I0120 17:27:54.362398 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f59aa69c-6c42-41df-905e-29428d350637-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "f59aa69c-6c42-41df-905e-29428d350637" (UID: "f59aa69c-6c42-41df-905e-29428d350637"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:27:54 crc kubenswrapper[4558]: I0120 17:27:54.393556 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f59aa69c-6c42-41df-905e-29428d350637-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:54 crc kubenswrapper[4558]: I0120 17:27:54.393678 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f59aa69c-6c42-41df-905e-29428d350637-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:54 crc kubenswrapper[4558]: I0120 17:27:54.393737 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f59aa69c-6c42-41df-905e-29428d350637-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:54 crc kubenswrapper[4558]: I0120 17:27:54.393797 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f59aa69c-6c42-41df-905e-29428d350637-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:54 crc kubenswrapper[4558]: I0120 17:27:54.393846 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cdstn\" (UniqueName: \"kubernetes.io/projected/f59aa69c-6c42-41df-905e-29428d350637-kube-api-access-cdstn\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:54 crc kubenswrapper[4558]: I0120 17:27:54.393926 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f59aa69c-6c42-41df-905e-29428d350637-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:54 crc kubenswrapper[4558]: I0120 17:27:54.576610 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd387ac0-76d7-418c-a34f-19be32ae37f9" path="/var/lib/kubelet/pods/bd387ac0-76d7-418c-a34f-19be32ae37f9/volumes" Jan 20 17:27:54 crc kubenswrapper[4558]: I0120 17:27:54.577238 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f859dc3a-a5c9-4ddf-bf5e-0cedd762540f" path="/var/lib/kubelet/pods/f859dc3a-a5c9-4ddf-bf5e-0cedd762540f/volumes" Jan 20 17:27:54 crc kubenswrapper[4558]: I0120 17:27:54.663377 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/keystone-74494d9d6c-qv9st" Jan 20 17:27:54 crc kubenswrapper[4558]: I0120 17:27:54.730050 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-6866498598-2wxkp"] Jan 20 17:27:54 crc kubenswrapper[4558]: I0120 17:27:54.730326 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/keystone-6866498598-2wxkp" podUID="72394a99-d879-415b-9ae9-21165451ac70" containerName="keystone-api" containerID="cri-o://57a221fa6c7f95ec5c64257c235ed54b2e2bd0a8d13ecc18c3f5f0a4dce5307b" gracePeriod=30 Jan 20 17:27:54 crc kubenswrapper[4558]: I0120 17:27:54.906044 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"b2a7ece8-32ed-4eb3-ab85-27ff4361622d","Type":"ContainerStarted","Data":"15e562b45f89d1f5fb52993145b8f16fbd8c5b35f2f80d2df9e160bf13a36eaf"} Jan 20 17:27:54 crc kubenswrapper[4558]: I0120 17:27:54.908241 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"f59aa69c-6c42-41df-905e-29428d350637","Type":"ContainerDied","Data":"13fa9f145203a0f248c5638d0ca972e0ba8060df5ceeb7756420262c280d6194"} Jan 20 17:27:54 crc kubenswrapper[4558]: I0120 17:27:54.908299 4558 scope.go:117] "RemoveContainer" containerID="a58e1fbb49aa21c6ab202f7e097494e1cdb0e9dcdffc1b04bb2b93be7a95a03f" Jan 20 17:27:54 crc kubenswrapper[4558]: I0120 17:27:54.908465 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:27:54 crc kubenswrapper[4558]: I0120 17:27:54.922522 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"0370f524-0608-45c9-a4d4-efca3a6bbc4a","Type":"ContainerDied","Data":"b1bceb775b9dd183fe674070820367a0fa5f61e96ce3468ee4f2fb8dbcd0d756"} Jan 20 17:27:54 crc kubenswrapper[4558]: I0120 17:27:54.922611 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:27:54 crc kubenswrapper[4558]: I0120 17:27:54.938699 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-scheduler-0" podStartSLOduration=2.938684706 podStartE2EDuration="2.938684706s" podCreationTimestamp="2026-01-20 17:27:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:27:54.934951216 +0000 UTC m=+2768.695289183" watchObservedRunningTime="2026-01-20 17:27:54.938684706 +0000 UTC m=+2768.699022673" Jan 20 17:27:54 crc kubenswrapper[4558]: I0120 17:27:54.956546 4558 scope.go:117] "RemoveContainer" containerID="afb4cc76ebcd4dd7b7fd5a1a99c969b9bf8219683eba7336e506d3069b495f83" Jan 20 17:27:54 crc kubenswrapper[4558]: I0120 17:27:54.973974 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:27:54 crc kubenswrapper[4558]: I0120 17:27:54.983202 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:27:54 crc kubenswrapper[4558]: I0120 17:27:54.992264 4558 scope.go:117] "RemoveContainer" containerID="21f88c9b39c9e0e5b774687833ad6583b7301ae275f1fa63a898d7d06969b445" Jan 20 17:27:54 crc kubenswrapper[4558]: I0120 17:27:54.996679 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:27:54 crc kubenswrapper[4558]: E0120 17:27:54.997114 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f59aa69c-6c42-41df-905e-29428d350637" containerName="nova-api-log" Jan 20 17:27:54 crc kubenswrapper[4558]: I0120 17:27:54.997129 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="f59aa69c-6c42-41df-905e-29428d350637" containerName="nova-api-log" Jan 20 17:27:54 crc kubenswrapper[4558]: E0120 17:27:54.997174 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0370f524-0608-45c9-a4d4-efca3a6bbc4a" containerName="nova-metadata-log" Jan 20 17:27:54 crc kubenswrapper[4558]: I0120 17:27:54.997180 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="0370f524-0608-45c9-a4d4-efca3a6bbc4a" containerName="nova-metadata-log" Jan 20 17:27:54 crc kubenswrapper[4558]: E0120 17:27:54.997214 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f59aa69c-6c42-41df-905e-29428d350637" containerName="nova-api-api" Jan 20 17:27:54 crc kubenswrapper[4558]: I0120 17:27:54.997220 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="f59aa69c-6c42-41df-905e-29428d350637" containerName="nova-api-api" Jan 20 17:27:54 crc kubenswrapper[4558]: E0120 17:27:54.997237 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0370f524-0608-45c9-a4d4-efca3a6bbc4a" containerName="nova-metadata-metadata" Jan 20 17:27:54 crc kubenswrapper[4558]: I0120 17:27:54.997243 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="0370f524-0608-45c9-a4d4-efca3a6bbc4a" containerName="nova-metadata-metadata" Jan 20 17:27:55 crc kubenswrapper[4558]: I0120 17:27:54.997437 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="0370f524-0608-45c9-a4d4-efca3a6bbc4a" containerName="nova-metadata-metadata" Jan 20 17:27:55 crc kubenswrapper[4558]: I0120 17:27:54.998518 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="f59aa69c-6c42-41df-905e-29428d350637" containerName="nova-api-api" Jan 20 17:27:55 crc kubenswrapper[4558]: I0120 17:27:54.998538 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="0370f524-0608-45c9-a4d4-efca3a6bbc4a" containerName="nova-metadata-log" Jan 20 17:27:55 crc kubenswrapper[4558]: I0120 17:27:54.998550 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="f59aa69c-6c42-41df-905e-29428d350637" containerName="nova-api-log" Jan 20 17:27:55 crc kubenswrapper[4558]: I0120 17:27:55.003673 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:27:55 crc kubenswrapper[4558]: I0120 17:27:55.014926 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-internal-svc" Jan 20 17:27:55 crc kubenswrapper[4558]: I0120 17:27:55.014959 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-api-config-data" Jan 20 17:27:55 crc kubenswrapper[4558]: I0120 17:27:55.014984 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-public-svc" Jan 20 17:27:55 crc kubenswrapper[4558]: I0120 17:27:55.026114 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:27:55 crc kubenswrapper[4558]: I0120 17:27:55.033866 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:27:55 crc kubenswrapper[4558]: I0120 17:27:55.040849 4558 scope.go:117] "RemoveContainer" containerID="3bd82063429d21886e43702ff2750e30a9368e89bfcdb3e5dfdc7631a7eabf51" Jan 20 17:27:55 crc kubenswrapper[4558]: I0120 17:27:55.053508 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:27:55 crc kubenswrapper[4558]: I0120 17:27:55.061436 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:27:55 crc kubenswrapper[4558]: I0120 17:27:55.063358 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:27:55 crc kubenswrapper[4558]: I0120 17:27:55.066649 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-metadata-config-data" Jan 20 17:27:55 crc kubenswrapper[4558]: I0120 17:27:55.066880 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-metadata-internal-svc" Jan 20 17:27:55 crc kubenswrapper[4558]: I0120 17:27:55.068491 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:27:55 crc kubenswrapper[4558]: I0120 17:27:55.117995 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4430d3cb-0a1c-4266-9f69-cf8e817f1d3e-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"4430d3cb-0a1c-4266-9f69-cf8e817f1d3e\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:27:55 crc kubenswrapper[4558]: I0120 17:27:55.118348 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4430d3cb-0a1c-4266-9f69-cf8e817f1d3e-internal-tls-certs\") pod \"nova-api-0\" (UID: \"4430d3cb-0a1c-4266-9f69-cf8e817f1d3e\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:27:55 crc kubenswrapper[4558]: I0120 17:27:55.118390 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/cc4089c6-71ea-4503-b54c-18777fcc3c48-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"cc4089c6-71ea-4503-b54c-18777fcc3c48\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:27:55 crc kubenswrapper[4558]: I0120 17:27:55.118468 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cc4089c6-71ea-4503-b54c-18777fcc3c48-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"cc4089c6-71ea-4503-b54c-18777fcc3c48\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:27:55 crc kubenswrapper[4558]: I0120 17:27:55.118558 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4430d3cb-0a1c-4266-9f69-cf8e817f1d3e-public-tls-certs\") pod \"nova-api-0\" (UID: \"4430d3cb-0a1c-4266-9f69-cf8e817f1d3e\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:27:55 crc kubenswrapper[4558]: I0120 17:27:55.118730 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cc4089c6-71ea-4503-b54c-18777fcc3c48-config-data\") pod \"nova-metadata-0\" (UID: \"cc4089c6-71ea-4503-b54c-18777fcc3c48\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:27:55 crc kubenswrapper[4558]: I0120 17:27:55.118802 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4430d3cb-0a1c-4266-9f69-cf8e817f1d3e-config-data\") pod \"nova-api-0\" (UID: \"4430d3cb-0a1c-4266-9f69-cf8e817f1d3e\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:27:55 crc kubenswrapper[4558]: I0120 17:27:55.118865 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7g4x5\" (UniqueName: \"kubernetes.io/projected/4430d3cb-0a1c-4266-9f69-cf8e817f1d3e-kube-api-access-7g4x5\") pod \"nova-api-0\" (UID: \"4430d3cb-0a1c-4266-9f69-cf8e817f1d3e\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:27:55 crc kubenswrapper[4558]: I0120 17:27:55.118905 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cc4089c6-71ea-4503-b54c-18777fcc3c48-logs\") pod \"nova-metadata-0\" (UID: \"cc4089c6-71ea-4503-b54c-18777fcc3c48\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:27:55 crc kubenswrapper[4558]: I0120 17:27:55.118935 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4430d3cb-0a1c-4266-9f69-cf8e817f1d3e-logs\") pod \"nova-api-0\" (UID: \"4430d3cb-0a1c-4266-9f69-cf8e817f1d3e\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:27:55 crc kubenswrapper[4558]: I0120 17:27:55.118965 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cl297\" (UniqueName: \"kubernetes.io/projected/cc4089c6-71ea-4503-b54c-18777fcc3c48-kube-api-access-cl297\") pod \"nova-metadata-0\" (UID: \"cc4089c6-71ea-4503-b54c-18777fcc3c48\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:27:55 crc kubenswrapper[4558]: I0120 17:27:55.219801 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7g4x5\" (UniqueName: \"kubernetes.io/projected/4430d3cb-0a1c-4266-9f69-cf8e817f1d3e-kube-api-access-7g4x5\") pod \"nova-api-0\" (UID: \"4430d3cb-0a1c-4266-9f69-cf8e817f1d3e\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:27:55 crc kubenswrapper[4558]: I0120 17:27:55.219844 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cc4089c6-71ea-4503-b54c-18777fcc3c48-logs\") pod \"nova-metadata-0\" (UID: \"cc4089c6-71ea-4503-b54c-18777fcc3c48\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:27:55 crc kubenswrapper[4558]: I0120 17:27:55.219871 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4430d3cb-0a1c-4266-9f69-cf8e817f1d3e-logs\") pod \"nova-api-0\" (UID: \"4430d3cb-0a1c-4266-9f69-cf8e817f1d3e\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:27:55 crc kubenswrapper[4558]: I0120 17:27:55.219894 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cl297\" (UniqueName: \"kubernetes.io/projected/cc4089c6-71ea-4503-b54c-18777fcc3c48-kube-api-access-cl297\") pod \"nova-metadata-0\" (UID: \"cc4089c6-71ea-4503-b54c-18777fcc3c48\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:27:55 crc kubenswrapper[4558]: I0120 17:27:55.219934 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4430d3cb-0a1c-4266-9f69-cf8e817f1d3e-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"4430d3cb-0a1c-4266-9f69-cf8e817f1d3e\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:27:55 crc kubenswrapper[4558]: I0120 17:27:55.219959 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4430d3cb-0a1c-4266-9f69-cf8e817f1d3e-internal-tls-certs\") pod \"nova-api-0\" (UID: \"4430d3cb-0a1c-4266-9f69-cf8e817f1d3e\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:27:55 crc kubenswrapper[4558]: I0120 17:27:55.219983 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/cc4089c6-71ea-4503-b54c-18777fcc3c48-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"cc4089c6-71ea-4503-b54c-18777fcc3c48\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:27:55 crc kubenswrapper[4558]: I0120 17:27:55.220021 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cc4089c6-71ea-4503-b54c-18777fcc3c48-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"cc4089c6-71ea-4503-b54c-18777fcc3c48\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:27:55 crc kubenswrapper[4558]: I0120 17:27:55.220060 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4430d3cb-0a1c-4266-9f69-cf8e817f1d3e-public-tls-certs\") pod \"nova-api-0\" (UID: \"4430d3cb-0a1c-4266-9f69-cf8e817f1d3e\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:27:55 crc kubenswrapper[4558]: I0120 17:27:55.220105 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cc4089c6-71ea-4503-b54c-18777fcc3c48-config-data\") pod \"nova-metadata-0\" (UID: \"cc4089c6-71ea-4503-b54c-18777fcc3c48\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:27:55 crc kubenswrapper[4558]: I0120 17:27:55.220136 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4430d3cb-0a1c-4266-9f69-cf8e817f1d3e-config-data\") pod \"nova-api-0\" (UID: \"4430d3cb-0a1c-4266-9f69-cf8e817f1d3e\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:27:55 crc kubenswrapper[4558]: I0120 17:27:55.222542 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cc4089c6-71ea-4503-b54c-18777fcc3c48-logs\") pod \"nova-metadata-0\" (UID: \"cc4089c6-71ea-4503-b54c-18777fcc3c48\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:27:55 crc kubenswrapper[4558]: I0120 17:27:55.222646 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4430d3cb-0a1c-4266-9f69-cf8e817f1d3e-logs\") pod \"nova-api-0\" (UID: \"4430d3cb-0a1c-4266-9f69-cf8e817f1d3e\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:27:55 crc kubenswrapper[4558]: I0120 17:27:55.225606 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/cc4089c6-71ea-4503-b54c-18777fcc3c48-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"cc4089c6-71ea-4503-b54c-18777fcc3c48\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:27:55 crc kubenswrapper[4558]: I0120 17:27:55.226826 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4430d3cb-0a1c-4266-9f69-cf8e817f1d3e-internal-tls-certs\") pod \"nova-api-0\" (UID: \"4430d3cb-0a1c-4266-9f69-cf8e817f1d3e\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:27:55 crc kubenswrapper[4558]: I0120 17:27:55.231191 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4430d3cb-0a1c-4266-9f69-cf8e817f1d3e-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"4430d3cb-0a1c-4266-9f69-cf8e817f1d3e\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:27:55 crc kubenswrapper[4558]: I0120 17:27:55.231754 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4430d3cb-0a1c-4266-9f69-cf8e817f1d3e-config-data\") pod \"nova-api-0\" (UID: \"4430d3cb-0a1c-4266-9f69-cf8e817f1d3e\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:27:55 crc kubenswrapper[4558]: I0120 17:27:55.232504 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4430d3cb-0a1c-4266-9f69-cf8e817f1d3e-public-tls-certs\") pod \"nova-api-0\" (UID: \"4430d3cb-0a1c-4266-9f69-cf8e817f1d3e\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:27:55 crc kubenswrapper[4558]: I0120 17:27:55.233458 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cc4089c6-71ea-4503-b54c-18777fcc3c48-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"cc4089c6-71ea-4503-b54c-18777fcc3c48\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:27:55 crc kubenswrapper[4558]: I0120 17:27:55.234508 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cc4089c6-71ea-4503-b54c-18777fcc3c48-config-data\") pod \"nova-metadata-0\" (UID: \"cc4089c6-71ea-4503-b54c-18777fcc3c48\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:27:55 crc kubenswrapper[4558]: I0120 17:27:55.236079 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cl297\" (UniqueName: \"kubernetes.io/projected/cc4089c6-71ea-4503-b54c-18777fcc3c48-kube-api-access-cl297\") pod \"nova-metadata-0\" (UID: \"cc4089c6-71ea-4503-b54c-18777fcc3c48\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:27:55 crc kubenswrapper[4558]: I0120 17:27:55.236233 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7g4x5\" (UniqueName: \"kubernetes.io/projected/4430d3cb-0a1c-4266-9f69-cf8e817f1d3e-kube-api-access-7g4x5\") pod \"nova-api-0\" (UID: \"4430d3cb-0a1c-4266-9f69-cf8e817f1d3e\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:27:55 crc kubenswrapper[4558]: I0120 17:27:55.342293 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:27:55 crc kubenswrapper[4558]: I0120 17:27:55.416731 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:27:55 crc kubenswrapper[4558]: I0120 17:27:55.813568 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:27:56 crc kubenswrapper[4558]: I0120 17:27:56.001106 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:27:56 crc kubenswrapper[4558]: I0120 17:27:56.008127 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"4430d3cb-0a1c-4266-9f69-cf8e817f1d3e","Type":"ContainerStarted","Data":"37b985b06b28c8541149da36506253933f4d0bb97118cf9caae6c0277577b2c6"} Jan 20 17:27:56 crc kubenswrapper[4558]: I0120 17:27:56.581361 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0370f524-0608-45c9-a4d4-efca3a6bbc4a" path="/var/lib/kubelet/pods/0370f524-0608-45c9-a4d4-efca3a6bbc4a/volumes" Jan 20 17:27:56 crc kubenswrapper[4558]: I0120 17:27:56.582452 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f59aa69c-6c42-41df-905e-29428d350637" path="/var/lib/kubelet/pods/f59aa69c-6c42-41df-905e-29428d350637/volumes" Jan 20 17:27:56 crc kubenswrapper[4558]: I0120 17:27:56.856853 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:27:56 crc kubenswrapper[4558]: I0120 17:27:56.970204 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7c7138fb-59b8-47c0-a431-023fa79404f1-scripts\") pod \"7c7138fb-59b8-47c0-a431-023fa79404f1\" (UID: \"7c7138fb-59b8-47c0-a431-023fa79404f1\") " Jan 20 17:27:56 crc kubenswrapper[4558]: I0120 17:27:56.970335 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9mbjv\" (UniqueName: \"kubernetes.io/projected/7c7138fb-59b8-47c0-a431-023fa79404f1-kube-api-access-9mbjv\") pod \"7c7138fb-59b8-47c0-a431-023fa79404f1\" (UID: \"7c7138fb-59b8-47c0-a431-023fa79404f1\") " Jan 20 17:27:56 crc kubenswrapper[4558]: I0120 17:27:56.970445 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7c7138fb-59b8-47c0-a431-023fa79404f1-config-data-custom\") pod \"7c7138fb-59b8-47c0-a431-023fa79404f1\" (UID: \"7c7138fb-59b8-47c0-a431-023fa79404f1\") " Jan 20 17:27:56 crc kubenswrapper[4558]: I0120 17:27:56.970504 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7c7138fb-59b8-47c0-a431-023fa79404f1-etc-machine-id\") pod \"7c7138fb-59b8-47c0-a431-023fa79404f1\" (UID: \"7c7138fb-59b8-47c0-a431-023fa79404f1\") " Jan 20 17:27:56 crc kubenswrapper[4558]: I0120 17:27:56.970582 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7c7138fb-59b8-47c0-a431-023fa79404f1-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "7c7138fb-59b8-47c0-a431-023fa79404f1" (UID: "7c7138fb-59b8-47c0-a431-023fa79404f1"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 17:27:56 crc kubenswrapper[4558]: I0120 17:27:56.970651 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7c7138fb-59b8-47c0-a431-023fa79404f1-config-data\") pod \"7c7138fb-59b8-47c0-a431-023fa79404f1\" (UID: \"7c7138fb-59b8-47c0-a431-023fa79404f1\") " Jan 20 17:27:56 crc kubenswrapper[4558]: I0120 17:27:56.970971 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c7138fb-59b8-47c0-a431-023fa79404f1-combined-ca-bundle\") pod \"7c7138fb-59b8-47c0-a431-023fa79404f1\" (UID: \"7c7138fb-59b8-47c0-a431-023fa79404f1\") " Jan 20 17:27:56 crc kubenswrapper[4558]: I0120 17:27:56.971975 4558 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7c7138fb-59b8-47c0-a431-023fa79404f1-etc-machine-id\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:56 crc kubenswrapper[4558]: I0120 17:27:56.975976 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7c7138fb-59b8-47c0-a431-023fa79404f1-scripts" (OuterVolumeSpecName: "scripts") pod "7c7138fb-59b8-47c0-a431-023fa79404f1" (UID: "7c7138fb-59b8-47c0-a431-023fa79404f1"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:27:56 crc kubenswrapper[4558]: I0120 17:27:56.975981 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7c7138fb-59b8-47c0-a431-023fa79404f1-kube-api-access-9mbjv" (OuterVolumeSpecName: "kube-api-access-9mbjv") pod "7c7138fb-59b8-47c0-a431-023fa79404f1" (UID: "7c7138fb-59b8-47c0-a431-023fa79404f1"). InnerVolumeSpecName "kube-api-access-9mbjv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:27:56 crc kubenswrapper[4558]: I0120 17:27:56.976091 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7c7138fb-59b8-47c0-a431-023fa79404f1-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "7c7138fb-59b8-47c0-a431-023fa79404f1" (UID: "7c7138fb-59b8-47c0-a431-023fa79404f1"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:27:57 crc kubenswrapper[4558]: I0120 17:27:57.014903 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7c7138fb-59b8-47c0-a431-023fa79404f1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7c7138fb-59b8-47c0-a431-023fa79404f1" (UID: "7c7138fb-59b8-47c0-a431-023fa79404f1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:27:57 crc kubenswrapper[4558]: I0120 17:27:57.036894 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"cc4089c6-71ea-4503-b54c-18777fcc3c48","Type":"ContainerStarted","Data":"ddfb78eb686b10db6949511eb53f1d12111dd9665adfb113b40c64dcc8f80745"} Jan 20 17:27:57 crc kubenswrapper[4558]: I0120 17:27:57.036938 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"cc4089c6-71ea-4503-b54c-18777fcc3c48","Type":"ContainerStarted","Data":"ca2fe5f683448e1b2e71f1f58bff793b5e9a3b0d6f6f324c37f3cdcce1707435"} Jan 20 17:27:57 crc kubenswrapper[4558]: I0120 17:27:57.036949 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"cc4089c6-71ea-4503-b54c-18777fcc3c48","Type":"ContainerStarted","Data":"998152c0b9388437fe908745b3e41450a3c684cee34feea370f0f29a644e8aa7"} Jan 20 17:27:57 crc kubenswrapper[4558]: I0120 17:27:57.042353 4558 generic.go:334] "Generic (PLEG): container finished" podID="7c7138fb-59b8-47c0-a431-023fa79404f1" containerID="1037b028087ec07b7d9badbde18841aeb9f38ee0fdee885a7163230235716a60" exitCode=137 Jan 20 17:27:57 crc kubenswrapper[4558]: I0120 17:27:57.042399 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"7c7138fb-59b8-47c0-a431-023fa79404f1","Type":"ContainerDied","Data":"1037b028087ec07b7d9badbde18841aeb9f38ee0fdee885a7163230235716a60"} Jan 20 17:27:57 crc kubenswrapper[4558]: I0120 17:27:57.042419 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"7c7138fb-59b8-47c0-a431-023fa79404f1","Type":"ContainerDied","Data":"b5d4c7b2fd5aaea450b495d6eb85db94cb27143a6d1814862eae36a4d86ed3f9"} Jan 20 17:27:57 crc kubenswrapper[4558]: I0120 17:27:57.042437 4558 scope.go:117] "RemoveContainer" containerID="1037b028087ec07b7d9badbde18841aeb9f38ee0fdee885a7163230235716a60" Jan 20 17:27:57 crc kubenswrapper[4558]: I0120 17:27:57.042522 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:27:57 crc kubenswrapper[4558]: I0120 17:27:57.044305 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"4430d3cb-0a1c-4266-9f69-cf8e817f1d3e","Type":"ContainerStarted","Data":"fccdd43e49b8880beb3f4193e0e2a09b0c9bcb297f472c9a13a4fc7709889c73"} Jan 20 17:27:57 crc kubenswrapper[4558]: I0120 17:27:57.044329 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"4430d3cb-0a1c-4266-9f69-cf8e817f1d3e","Type":"ContainerStarted","Data":"01f067cab301938980d9e1ee9c68462c017ce912a5f70f653badf838dae20a53"} Jan 20 17:27:57 crc kubenswrapper[4558]: I0120 17:27:57.046116 4558 generic.go:334] "Generic (PLEG): container finished" podID="467e28b1-5dde-44cb-9ef4-f81980c3d999" containerID="50e04d94313c300a1bbfb5aa67197781874c448647ec0443d12262cb9d1314e4" exitCode=0 Jan 20 17:27:57 crc kubenswrapper[4558]: I0120 17:27:57.046173 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-cell-mapping-5jhz5" event={"ID":"467e28b1-5dde-44cb-9ef4-f81980c3d999","Type":"ContainerDied","Data":"50e04d94313c300a1bbfb5aa67197781874c448647ec0443d12262cb9d1314e4"} Jan 20 17:27:57 crc kubenswrapper[4558]: I0120 17:27:57.050750 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7c7138fb-59b8-47c0-a431-023fa79404f1-config-data" (OuterVolumeSpecName: "config-data") pod "7c7138fb-59b8-47c0-a431-023fa79404f1" (UID: "7c7138fb-59b8-47c0-a431-023fa79404f1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:27:57 crc kubenswrapper[4558]: I0120 17:27:57.058191 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-metadata-0" podStartSLOduration=3.058180281 podStartE2EDuration="3.058180281s" podCreationTimestamp="2026-01-20 17:27:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:27:57.055097164 +0000 UTC m=+2770.815435120" watchObservedRunningTime="2026-01-20 17:27:57.058180281 +0000 UTC m=+2770.818518258" Jan 20 17:27:57 crc kubenswrapper[4558]: I0120 17:27:57.072837 4558 scope.go:117] "RemoveContainer" containerID="b4e76db6b8f078c1dc089ec0784ae087f970f12f50927744b7cf7a30c75eb1e1" Jan 20 17:27:57 crc kubenswrapper[4558]: I0120 17:27:57.074198 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c7138fb-59b8-47c0-a431-023fa79404f1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:57 crc kubenswrapper[4558]: I0120 17:27:57.074242 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7c7138fb-59b8-47c0-a431-023fa79404f1-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:57 crc kubenswrapper[4558]: I0120 17:27:57.074259 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9mbjv\" (UniqueName: \"kubernetes.io/projected/7c7138fb-59b8-47c0-a431-023fa79404f1-kube-api-access-9mbjv\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:57 crc kubenswrapper[4558]: I0120 17:27:57.074276 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7c7138fb-59b8-47c0-a431-023fa79404f1-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:57 crc kubenswrapper[4558]: I0120 17:27:57.074843 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7c7138fb-59b8-47c0-a431-023fa79404f1-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:57 crc kubenswrapper[4558]: I0120 17:27:57.236717 4558 scope.go:117] "RemoveContainer" containerID="98d4b6952becbe3b78b67cf6ee7b8b7cc7435eda4e3a11dead4ce7aba086ca2a" Jan 20 17:27:57 crc kubenswrapper[4558]: I0120 17:27:57.262705 4558 scope.go:117] "RemoveContainer" containerID="1037b028087ec07b7d9badbde18841aeb9f38ee0fdee885a7163230235716a60" Jan 20 17:27:57 crc kubenswrapper[4558]: E0120 17:27:57.263231 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1037b028087ec07b7d9badbde18841aeb9f38ee0fdee885a7163230235716a60\": container with ID starting with 1037b028087ec07b7d9badbde18841aeb9f38ee0fdee885a7163230235716a60 not found: ID does not exist" containerID="1037b028087ec07b7d9badbde18841aeb9f38ee0fdee885a7163230235716a60" Jan 20 17:27:57 crc kubenswrapper[4558]: I0120 17:27:57.263288 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1037b028087ec07b7d9badbde18841aeb9f38ee0fdee885a7163230235716a60"} err="failed to get container status \"1037b028087ec07b7d9badbde18841aeb9f38ee0fdee885a7163230235716a60\": rpc error: code = NotFound desc = could not find container \"1037b028087ec07b7d9badbde18841aeb9f38ee0fdee885a7163230235716a60\": container with ID starting with 1037b028087ec07b7d9badbde18841aeb9f38ee0fdee885a7163230235716a60 not found: ID does not exist" Jan 20 17:27:57 crc kubenswrapper[4558]: I0120 17:27:57.263330 4558 scope.go:117] "RemoveContainer" containerID="b4e76db6b8f078c1dc089ec0784ae087f970f12f50927744b7cf7a30c75eb1e1" Jan 20 17:27:57 crc kubenswrapper[4558]: E0120 17:27:57.264048 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b4e76db6b8f078c1dc089ec0784ae087f970f12f50927744b7cf7a30c75eb1e1\": container with ID starting with b4e76db6b8f078c1dc089ec0784ae087f970f12f50927744b7cf7a30c75eb1e1 not found: ID does not exist" containerID="b4e76db6b8f078c1dc089ec0784ae087f970f12f50927744b7cf7a30c75eb1e1" Jan 20 17:27:57 crc kubenswrapper[4558]: I0120 17:27:57.264115 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b4e76db6b8f078c1dc089ec0784ae087f970f12f50927744b7cf7a30c75eb1e1"} err="failed to get container status \"b4e76db6b8f078c1dc089ec0784ae087f970f12f50927744b7cf7a30c75eb1e1\": rpc error: code = NotFound desc = could not find container \"b4e76db6b8f078c1dc089ec0784ae087f970f12f50927744b7cf7a30c75eb1e1\": container with ID starting with b4e76db6b8f078c1dc089ec0784ae087f970f12f50927744b7cf7a30c75eb1e1 not found: ID does not exist" Jan 20 17:27:57 crc kubenswrapper[4558]: I0120 17:27:57.264182 4558 scope.go:117] "RemoveContainer" containerID="98d4b6952becbe3b78b67cf6ee7b8b7cc7435eda4e3a11dead4ce7aba086ca2a" Jan 20 17:27:57 crc kubenswrapper[4558]: E0120 17:27:57.264762 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"98d4b6952becbe3b78b67cf6ee7b8b7cc7435eda4e3a11dead4ce7aba086ca2a\": container with ID starting with 98d4b6952becbe3b78b67cf6ee7b8b7cc7435eda4e3a11dead4ce7aba086ca2a not found: ID does not exist" containerID="98d4b6952becbe3b78b67cf6ee7b8b7cc7435eda4e3a11dead4ce7aba086ca2a" Jan 20 17:27:57 crc kubenswrapper[4558]: I0120 17:27:57.264844 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"98d4b6952becbe3b78b67cf6ee7b8b7cc7435eda4e3a11dead4ce7aba086ca2a"} err="failed to get container status \"98d4b6952becbe3b78b67cf6ee7b8b7cc7435eda4e3a11dead4ce7aba086ca2a\": rpc error: code = NotFound desc = could not find container \"98d4b6952becbe3b78b67cf6ee7b8b7cc7435eda4e3a11dead4ce7aba086ca2a\": container with ID starting with 98d4b6952becbe3b78b67cf6ee7b8b7cc7435eda4e3a11dead4ce7aba086ca2a not found: ID does not exist" Jan 20 17:27:57 crc kubenswrapper[4558]: I0120 17:27:57.371599 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-api-0" podStartSLOduration=3.371585607 podStartE2EDuration="3.371585607s" podCreationTimestamp="2026-01-20 17:27:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:27:57.115037186 +0000 UTC m=+2770.875375143" watchObservedRunningTime="2026-01-20 17:27:57.371585607 +0000 UTC m=+2771.131923574" Jan 20 17:27:57 crc kubenswrapper[4558]: I0120 17:27:57.378069 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:27:57 crc kubenswrapper[4558]: I0120 17:27:57.383977 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:27:57 crc kubenswrapper[4558]: I0120 17:27:57.392717 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:27:57 crc kubenswrapper[4558]: E0120 17:27:57.393111 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7c7138fb-59b8-47c0-a431-023fa79404f1" containerName="cinder-scheduler" Jan 20 17:27:57 crc kubenswrapper[4558]: I0120 17:27:57.393131 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="7c7138fb-59b8-47c0-a431-023fa79404f1" containerName="cinder-scheduler" Jan 20 17:27:57 crc kubenswrapper[4558]: E0120 17:27:57.393148 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7c7138fb-59b8-47c0-a431-023fa79404f1" containerName="cinder-scheduler" Jan 20 17:27:57 crc kubenswrapper[4558]: I0120 17:27:57.393154 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="7c7138fb-59b8-47c0-a431-023fa79404f1" containerName="cinder-scheduler" Jan 20 17:27:57 crc kubenswrapper[4558]: E0120 17:27:57.393180 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7c7138fb-59b8-47c0-a431-023fa79404f1" containerName="probe" Jan 20 17:27:57 crc kubenswrapper[4558]: I0120 17:27:57.393187 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="7c7138fb-59b8-47c0-a431-023fa79404f1" containerName="probe" Jan 20 17:27:57 crc kubenswrapper[4558]: I0120 17:27:57.393395 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="7c7138fb-59b8-47c0-a431-023fa79404f1" containerName="probe" Jan 20 17:27:57 crc kubenswrapper[4558]: I0120 17:27:57.393432 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="7c7138fb-59b8-47c0-a431-023fa79404f1" containerName="cinder-scheduler" Jan 20 17:27:57 crc kubenswrapper[4558]: I0120 17:27:57.393444 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="7c7138fb-59b8-47c0-a431-023fa79404f1" containerName="cinder-scheduler" Jan 20 17:27:57 crc kubenswrapper[4558]: I0120 17:27:57.394421 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:27:57 crc kubenswrapper[4558]: I0120 17:27:57.396262 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cinder-scheduler-config-data" Jan 20 17:27:57 crc kubenswrapper[4558]: I0120 17:27:57.407865 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:27:57 crc kubenswrapper[4558]: I0120 17:27:57.481072 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/c1afb831-f3e4-4356-ab86-713eb0beca39-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"c1afb831-f3e4-4356-ab86-713eb0beca39\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:27:57 crc kubenswrapper[4558]: I0120 17:27:57.481120 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c1afb831-f3e4-4356-ab86-713eb0beca39-config-data\") pod \"cinder-scheduler-0\" (UID: \"c1afb831-f3e4-4356-ab86-713eb0beca39\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:27:57 crc kubenswrapper[4558]: I0120 17:27:57.481381 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cdj6z\" (UniqueName: \"kubernetes.io/projected/c1afb831-f3e4-4356-ab86-713eb0beca39-kube-api-access-cdj6z\") pod \"cinder-scheduler-0\" (UID: \"c1afb831-f3e4-4356-ab86-713eb0beca39\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:27:57 crc kubenswrapper[4558]: I0120 17:27:57.481497 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c1afb831-f3e4-4356-ab86-713eb0beca39-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"c1afb831-f3e4-4356-ab86-713eb0beca39\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:27:57 crc kubenswrapper[4558]: I0120 17:27:57.481618 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c1afb831-f3e4-4356-ab86-713eb0beca39-scripts\") pod \"cinder-scheduler-0\" (UID: \"c1afb831-f3e4-4356-ab86-713eb0beca39\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:27:57 crc kubenswrapper[4558]: I0120 17:27:57.481705 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c1afb831-f3e4-4356-ab86-713eb0beca39-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"c1afb831-f3e4-4356-ab86-713eb0beca39\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:27:57 crc kubenswrapper[4558]: I0120 17:27:57.486283 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/openstackclient"] Jan 20 17:27:57 crc kubenswrapper[4558]: I0120 17:27:57.486601 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/openstackclient" podUID="4d845835-77d4-4d8a-9fad-0286ad5291ae" containerName="openstackclient" containerID="cri-o://e33ca27067cb626a71e4e70acb048cf75b1fdf4f714c880df6a27aa61155fa45" gracePeriod=2 Jan 20 17:27:57 crc kubenswrapper[4558]: I0120 17:27:57.493875 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/openstackclient"] Jan 20 17:27:57 crc kubenswrapper[4558]: I0120 17:27:57.505805 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/openstackclient"] Jan 20 17:27:57 crc kubenswrapper[4558]: E0120 17:27:57.506191 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4d845835-77d4-4d8a-9fad-0286ad5291ae" containerName="openstackclient" Jan 20 17:27:57 crc kubenswrapper[4558]: I0120 17:27:57.506211 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="4d845835-77d4-4d8a-9fad-0286ad5291ae" containerName="openstackclient" Jan 20 17:27:57 crc kubenswrapper[4558]: I0120 17:27:57.506436 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="4d845835-77d4-4d8a-9fad-0286ad5291ae" containerName="openstackclient" Jan 20 17:27:57 crc kubenswrapper[4558]: I0120 17:27:57.507051 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstackclient" Jan 20 17:27:57 crc kubenswrapper[4558]: I0120 17:27:57.511083 4558 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack-kuttl-tests/openstackclient" oldPodUID="4d845835-77d4-4d8a-9fad-0286ad5291ae" podUID="e383e708-a471-4904-bfc7-eead6a5c76dc" Jan 20 17:27:57 crc kubenswrapper[4558]: I0120 17:27:57.516808 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/openstackclient"] Jan 20 17:27:57 crc kubenswrapper[4558]: I0120 17:27:57.583533 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p6hdm\" (UniqueName: \"kubernetes.io/projected/e383e708-a471-4904-bfc7-eead6a5c76dc-kube-api-access-p6hdm\") pod \"openstackclient\" (UID: \"e383e708-a471-4904-bfc7-eead6a5c76dc\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:27:57 crc kubenswrapper[4558]: I0120 17:27:57.583592 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/c1afb831-f3e4-4356-ab86-713eb0beca39-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"c1afb831-f3e4-4356-ab86-713eb0beca39\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:27:57 crc kubenswrapper[4558]: I0120 17:27:57.583630 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c1afb831-f3e4-4356-ab86-713eb0beca39-config-data\") pod \"cinder-scheduler-0\" (UID: \"c1afb831-f3e4-4356-ab86-713eb0beca39\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:27:57 crc kubenswrapper[4558]: I0120 17:27:57.584096 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cdj6z\" (UniqueName: \"kubernetes.io/projected/c1afb831-f3e4-4356-ab86-713eb0beca39-kube-api-access-cdj6z\") pod \"cinder-scheduler-0\" (UID: \"c1afb831-f3e4-4356-ab86-713eb0beca39\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:27:57 crc kubenswrapper[4558]: I0120 17:27:57.584314 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/c1afb831-f3e4-4356-ab86-713eb0beca39-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"c1afb831-f3e4-4356-ab86-713eb0beca39\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:27:57 crc kubenswrapper[4558]: I0120 17:27:57.584212 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e383e708-a471-4904-bfc7-eead6a5c76dc-combined-ca-bundle\") pod \"openstackclient\" (UID: \"e383e708-a471-4904-bfc7-eead6a5c76dc\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:27:57 crc kubenswrapper[4558]: I0120 17:27:57.584508 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c1afb831-f3e4-4356-ab86-713eb0beca39-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"c1afb831-f3e4-4356-ab86-713eb0beca39\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:27:57 crc kubenswrapper[4558]: I0120 17:27:57.586463 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c1afb831-f3e4-4356-ab86-713eb0beca39-scripts\") pod \"cinder-scheduler-0\" (UID: \"c1afb831-f3e4-4356-ab86-713eb0beca39\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:27:57 crc kubenswrapper[4558]: I0120 17:27:57.586632 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c1afb831-f3e4-4356-ab86-713eb0beca39-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"c1afb831-f3e4-4356-ab86-713eb0beca39\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:27:57 crc kubenswrapper[4558]: I0120 17:27:57.586681 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/e383e708-a471-4904-bfc7-eead6a5c76dc-openstack-config-secret\") pod \"openstackclient\" (UID: \"e383e708-a471-4904-bfc7-eead6a5c76dc\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:27:57 crc kubenswrapper[4558]: I0120 17:27:57.586716 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/e383e708-a471-4904-bfc7-eead6a5c76dc-openstack-config\") pod \"openstackclient\" (UID: \"e383e708-a471-4904-bfc7-eead6a5c76dc\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:27:57 crc kubenswrapper[4558]: I0120 17:27:57.592561 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c1afb831-f3e4-4356-ab86-713eb0beca39-scripts\") pod \"cinder-scheduler-0\" (UID: \"c1afb831-f3e4-4356-ab86-713eb0beca39\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:27:57 crc kubenswrapper[4558]: I0120 17:27:57.593906 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c1afb831-f3e4-4356-ab86-713eb0beca39-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"c1afb831-f3e4-4356-ab86-713eb0beca39\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:27:57 crc kubenswrapper[4558]: I0120 17:27:57.594813 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c1afb831-f3e4-4356-ab86-713eb0beca39-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"c1afb831-f3e4-4356-ab86-713eb0beca39\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:27:57 crc kubenswrapper[4558]: I0120 17:27:57.598077 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c1afb831-f3e4-4356-ab86-713eb0beca39-config-data\") pod \"cinder-scheduler-0\" (UID: \"c1afb831-f3e4-4356-ab86-713eb0beca39\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:27:57 crc kubenswrapper[4558]: I0120 17:27:57.603127 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cdj6z\" (UniqueName: \"kubernetes.io/projected/c1afb831-f3e4-4356-ab86-713eb0beca39-kube-api-access-cdj6z\") pod \"cinder-scheduler-0\" (UID: \"c1afb831-f3e4-4356-ab86-713eb0beca39\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:27:57 crc kubenswrapper[4558]: I0120 17:27:57.687897 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/e383e708-a471-4904-bfc7-eead6a5c76dc-openstack-config-secret\") pod \"openstackclient\" (UID: \"e383e708-a471-4904-bfc7-eead6a5c76dc\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:27:57 crc kubenswrapper[4558]: I0120 17:27:57.687942 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/e383e708-a471-4904-bfc7-eead6a5c76dc-openstack-config\") pod \"openstackclient\" (UID: \"e383e708-a471-4904-bfc7-eead6a5c76dc\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:27:57 crc kubenswrapper[4558]: I0120 17:27:57.688014 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p6hdm\" (UniqueName: \"kubernetes.io/projected/e383e708-a471-4904-bfc7-eead6a5c76dc-kube-api-access-p6hdm\") pod \"openstackclient\" (UID: \"e383e708-a471-4904-bfc7-eead6a5c76dc\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:27:57 crc kubenswrapper[4558]: I0120 17:27:57.688100 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e383e708-a471-4904-bfc7-eead6a5c76dc-combined-ca-bundle\") pod \"openstackclient\" (UID: \"e383e708-a471-4904-bfc7-eead6a5c76dc\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:27:57 crc kubenswrapper[4558]: I0120 17:27:57.689270 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/e383e708-a471-4904-bfc7-eead6a5c76dc-openstack-config\") pod \"openstackclient\" (UID: \"e383e708-a471-4904-bfc7-eead6a5c76dc\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:27:57 crc kubenswrapper[4558]: I0120 17:27:57.691396 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e383e708-a471-4904-bfc7-eead6a5c76dc-combined-ca-bundle\") pod \"openstackclient\" (UID: \"e383e708-a471-4904-bfc7-eead6a5c76dc\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:27:57 crc kubenswrapper[4558]: I0120 17:27:57.691619 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/e383e708-a471-4904-bfc7-eead6a5c76dc-openstack-config-secret\") pod \"openstackclient\" (UID: \"e383e708-a471-4904-bfc7-eead6a5c76dc\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:27:57 crc kubenswrapper[4558]: I0120 17:27:57.702562 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p6hdm\" (UniqueName: \"kubernetes.io/projected/e383e708-a471-4904-bfc7-eead6a5c76dc-kube-api-access-p6hdm\") pod \"openstackclient\" (UID: \"e383e708-a471-4904-bfc7-eead6a5c76dc\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:27:57 crc kubenswrapper[4558]: I0120 17:27:57.713514 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:27:57 crc kubenswrapper[4558]: I0120 17:27:57.821971 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstackclient" Jan 20 17:27:58 crc kubenswrapper[4558]: I0120 17:27:58.087229 4558 generic.go:334] "Generic (PLEG): container finished" podID="72394a99-d879-415b-9ae9-21165451ac70" containerID="57a221fa6c7f95ec5c64257c235ed54b2e2bd0a8d13ecc18c3f5f0a4dce5307b" exitCode=0 Jan 20 17:27:58 crc kubenswrapper[4558]: I0120 17:27:58.087354 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-6866498598-2wxkp" event={"ID":"72394a99-d879-415b-9ae9-21165451ac70","Type":"ContainerDied","Data":"57a221fa6c7f95ec5c64257c235ed54b2e2bd0a8d13ecc18c3f5f0a4dce5307b"} Jan 20 17:27:58 crc kubenswrapper[4558]: I0120 17:27:58.096899 4558 generic.go:334] "Generic (PLEG): container finished" podID="aeb478ae-4d88-4f64-be68-ccebfe589ff1" containerID="08b72bd01a759efc57d52868101c30d8254f92693dba81d382bc14dd5867a057" exitCode=0 Jan 20 17:27:58 crc kubenswrapper[4558]: I0120 17:27:58.097018 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-cell-mapping-4kxm5" event={"ID":"aeb478ae-4d88-4f64-be68-ccebfe589ff1","Type":"ContainerDied","Data":"08b72bd01a759efc57d52868101c30d8254f92693dba81d382bc14dd5867a057"} Jan 20 17:27:58 crc kubenswrapper[4558]: I0120 17:27:58.177633 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:27:58 crc kubenswrapper[4558]: I0120 17:27:58.239339 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:27:58 crc kubenswrapper[4558]: I0120 17:27:58.414019 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/openstackclient"] Jan 20 17:27:58 crc kubenswrapper[4558]: W0120 17:27:58.430302 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode383e708_a471_4904_bfc7_eead6a5c76dc.slice/crio-bf107416f9c82b93a43f94e999290c66a3ddfb88778eddd62960ea850ad5aecb WatchSource:0}: Error finding container bf107416f9c82b93a43f94e999290c66a3ddfb88778eddd62960ea850ad5aecb: Status 404 returned error can't find the container with id bf107416f9c82b93a43f94e999290c66a3ddfb88778eddd62960ea850ad5aecb Jan 20 17:27:58 crc kubenswrapper[4558]: I0120 17:27:58.439483 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-6866498598-2wxkp" Jan 20 17:27:58 crc kubenswrapper[4558]: I0120 17:27:58.447925 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-cell-mapping-5jhz5" Jan 20 17:27:58 crc kubenswrapper[4558]: I0120 17:27:58.507519 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zw8pz\" (UniqueName: \"kubernetes.io/projected/467e28b1-5dde-44cb-9ef4-f81980c3d999-kube-api-access-zw8pz\") pod \"467e28b1-5dde-44cb-9ef4-f81980c3d999\" (UID: \"467e28b1-5dde-44cb-9ef4-f81980c3d999\") " Jan 20 17:27:58 crc kubenswrapper[4558]: I0120 17:27:58.507573 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/467e28b1-5dde-44cb-9ef4-f81980c3d999-config-data\") pod \"467e28b1-5dde-44cb-9ef4-f81980c3d999\" (UID: \"467e28b1-5dde-44cb-9ef4-f81980c3d999\") " Jan 20 17:27:58 crc kubenswrapper[4558]: I0120 17:27:58.507620 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/72394a99-d879-415b-9ae9-21165451ac70-fernet-keys\") pod \"72394a99-d879-415b-9ae9-21165451ac70\" (UID: \"72394a99-d879-415b-9ae9-21165451ac70\") " Jan 20 17:27:58 crc kubenswrapper[4558]: I0120 17:27:58.507735 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/72394a99-d879-415b-9ae9-21165451ac70-credential-keys\") pod \"72394a99-d879-415b-9ae9-21165451ac70\" (UID: \"72394a99-d879-415b-9ae9-21165451ac70\") " Jan 20 17:27:58 crc kubenswrapper[4558]: I0120 17:27:58.507837 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/72394a99-d879-415b-9ae9-21165451ac70-combined-ca-bundle\") pod \"72394a99-d879-415b-9ae9-21165451ac70\" (UID: \"72394a99-d879-415b-9ae9-21165451ac70\") " Jan 20 17:27:58 crc kubenswrapper[4558]: I0120 17:27:58.507857 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zlr65\" (UniqueName: \"kubernetes.io/projected/72394a99-d879-415b-9ae9-21165451ac70-kube-api-access-zlr65\") pod \"72394a99-d879-415b-9ae9-21165451ac70\" (UID: \"72394a99-d879-415b-9ae9-21165451ac70\") " Jan 20 17:27:58 crc kubenswrapper[4558]: I0120 17:27:58.507927 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/72394a99-d879-415b-9ae9-21165451ac70-config-data\") pod \"72394a99-d879-415b-9ae9-21165451ac70\" (UID: \"72394a99-d879-415b-9ae9-21165451ac70\") " Jan 20 17:27:58 crc kubenswrapper[4558]: I0120 17:27:58.507948 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/72394a99-d879-415b-9ae9-21165451ac70-internal-tls-certs\") pod \"72394a99-d879-415b-9ae9-21165451ac70\" (UID: \"72394a99-d879-415b-9ae9-21165451ac70\") " Jan 20 17:27:58 crc kubenswrapper[4558]: I0120 17:27:58.507971 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/72394a99-d879-415b-9ae9-21165451ac70-scripts\") pod \"72394a99-d879-415b-9ae9-21165451ac70\" (UID: \"72394a99-d879-415b-9ae9-21165451ac70\") " Jan 20 17:27:58 crc kubenswrapper[4558]: I0120 17:27:58.508030 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/72394a99-d879-415b-9ae9-21165451ac70-public-tls-certs\") pod \"72394a99-d879-415b-9ae9-21165451ac70\" (UID: \"72394a99-d879-415b-9ae9-21165451ac70\") " Jan 20 17:27:58 crc kubenswrapper[4558]: I0120 17:27:58.508114 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/467e28b1-5dde-44cb-9ef4-f81980c3d999-scripts\") pod \"467e28b1-5dde-44cb-9ef4-f81980c3d999\" (UID: \"467e28b1-5dde-44cb-9ef4-f81980c3d999\") " Jan 20 17:27:58 crc kubenswrapper[4558]: I0120 17:27:58.508157 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/467e28b1-5dde-44cb-9ef4-f81980c3d999-combined-ca-bundle\") pod \"467e28b1-5dde-44cb-9ef4-f81980c3d999\" (UID: \"467e28b1-5dde-44cb-9ef4-f81980c3d999\") " Jan 20 17:27:58 crc kubenswrapper[4558]: I0120 17:27:58.511861 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/72394a99-d879-415b-9ae9-21165451ac70-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "72394a99-d879-415b-9ae9-21165451ac70" (UID: "72394a99-d879-415b-9ae9-21165451ac70"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:27:58 crc kubenswrapper[4558]: I0120 17:27:58.514950 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/467e28b1-5dde-44cb-9ef4-f81980c3d999-scripts" (OuterVolumeSpecName: "scripts") pod "467e28b1-5dde-44cb-9ef4-f81980c3d999" (UID: "467e28b1-5dde-44cb-9ef4-f81980c3d999"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:27:58 crc kubenswrapper[4558]: I0120 17:27:58.515277 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/72394a99-d879-415b-9ae9-21165451ac70-scripts" (OuterVolumeSpecName: "scripts") pod "72394a99-d879-415b-9ae9-21165451ac70" (UID: "72394a99-d879-415b-9ae9-21165451ac70"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:27:58 crc kubenswrapper[4558]: I0120 17:27:58.517645 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/72394a99-d879-415b-9ae9-21165451ac70-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "72394a99-d879-415b-9ae9-21165451ac70" (UID: "72394a99-d879-415b-9ae9-21165451ac70"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:27:58 crc kubenswrapper[4558]: I0120 17:27:58.517791 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/72394a99-d879-415b-9ae9-21165451ac70-kube-api-access-zlr65" (OuterVolumeSpecName: "kube-api-access-zlr65") pod "72394a99-d879-415b-9ae9-21165451ac70" (UID: "72394a99-d879-415b-9ae9-21165451ac70"). InnerVolumeSpecName "kube-api-access-zlr65". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:27:58 crc kubenswrapper[4558]: I0120 17:27:58.521253 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/467e28b1-5dde-44cb-9ef4-f81980c3d999-kube-api-access-zw8pz" (OuterVolumeSpecName: "kube-api-access-zw8pz") pod "467e28b1-5dde-44cb-9ef4-f81980c3d999" (UID: "467e28b1-5dde-44cb-9ef4-f81980c3d999"). InnerVolumeSpecName "kube-api-access-zw8pz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:27:58 crc kubenswrapper[4558]: I0120 17:27:58.544040 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/467e28b1-5dde-44cb-9ef4-f81980c3d999-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "467e28b1-5dde-44cb-9ef4-f81980c3d999" (UID: "467e28b1-5dde-44cb-9ef4-f81980c3d999"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:27:58 crc kubenswrapper[4558]: I0120 17:27:58.545901 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/467e28b1-5dde-44cb-9ef4-f81980c3d999-config-data" (OuterVolumeSpecName: "config-data") pod "467e28b1-5dde-44cb-9ef4-f81980c3d999" (UID: "467e28b1-5dde-44cb-9ef4-f81980c3d999"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:27:58 crc kubenswrapper[4558]: I0120 17:27:58.548643 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/72394a99-d879-415b-9ae9-21165451ac70-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "72394a99-d879-415b-9ae9-21165451ac70" (UID: "72394a99-d879-415b-9ae9-21165451ac70"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:27:58 crc kubenswrapper[4558]: I0120 17:27:58.550000 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/72394a99-d879-415b-9ae9-21165451ac70-config-data" (OuterVolumeSpecName: "config-data") pod "72394a99-d879-415b-9ae9-21165451ac70" (UID: "72394a99-d879-415b-9ae9-21165451ac70"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:27:58 crc kubenswrapper[4558]: I0120 17:27:58.561882 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/72394a99-d879-415b-9ae9-21165451ac70-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "72394a99-d879-415b-9ae9-21165451ac70" (UID: "72394a99-d879-415b-9ae9-21165451ac70"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:27:58 crc kubenswrapper[4558]: I0120 17:27:58.570122 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/72394a99-d879-415b-9ae9-21165451ac70-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "72394a99-d879-415b-9ae9-21165451ac70" (UID: "72394a99-d879-415b-9ae9-21165451ac70"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:27:58 crc kubenswrapper[4558]: I0120 17:27:58.593102 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7c7138fb-59b8-47c0-a431-023fa79404f1" path="/var/lib/kubelet/pods/7c7138fb-59b8-47c0-a431-023fa79404f1/volumes" Jan 20 17:27:58 crc kubenswrapper[4558]: I0120 17:27:58.610009 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/467e28b1-5dde-44cb-9ef4-f81980c3d999-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:58 crc kubenswrapper[4558]: I0120 17:27:58.610043 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/467e28b1-5dde-44cb-9ef4-f81980c3d999-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:58 crc kubenswrapper[4558]: I0120 17:27:58.610060 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zw8pz\" (UniqueName: \"kubernetes.io/projected/467e28b1-5dde-44cb-9ef4-f81980c3d999-kube-api-access-zw8pz\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:58 crc kubenswrapper[4558]: I0120 17:27:58.610071 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/467e28b1-5dde-44cb-9ef4-f81980c3d999-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:58 crc kubenswrapper[4558]: I0120 17:27:58.610081 4558 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/72394a99-d879-415b-9ae9-21165451ac70-fernet-keys\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:58 crc kubenswrapper[4558]: I0120 17:27:58.610090 4558 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/72394a99-d879-415b-9ae9-21165451ac70-credential-keys\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:58 crc kubenswrapper[4558]: I0120 17:27:58.610099 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/72394a99-d879-415b-9ae9-21165451ac70-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:58 crc kubenswrapper[4558]: I0120 17:27:58.610109 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zlr65\" (UniqueName: \"kubernetes.io/projected/72394a99-d879-415b-9ae9-21165451ac70-kube-api-access-zlr65\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:58 crc kubenswrapper[4558]: I0120 17:27:58.610118 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/72394a99-d879-415b-9ae9-21165451ac70-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:58 crc kubenswrapper[4558]: I0120 17:27:58.610128 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/72394a99-d879-415b-9ae9-21165451ac70-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:58 crc kubenswrapper[4558]: I0120 17:27:58.610137 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/72394a99-d879-415b-9ae9-21165451ac70-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:58 crc kubenswrapper[4558]: I0120 17:27:58.610145 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/72394a99-d879-415b-9ae9-21165451ac70-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:27:59 crc kubenswrapper[4558]: I0120 17:27:59.011230 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/swift-proxy-5f7fb7b-s2dxn"] Jan 20 17:27:59 crc kubenswrapper[4558]: E0120 17:27:59.011972 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="467e28b1-5dde-44cb-9ef4-f81980c3d999" containerName="nova-manage" Jan 20 17:27:59 crc kubenswrapper[4558]: I0120 17:27:59.011991 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="467e28b1-5dde-44cb-9ef4-f81980c3d999" containerName="nova-manage" Jan 20 17:27:59 crc kubenswrapper[4558]: E0120 17:27:59.012011 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="72394a99-d879-415b-9ae9-21165451ac70" containerName="keystone-api" Jan 20 17:27:59 crc kubenswrapper[4558]: I0120 17:27:59.012017 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="72394a99-d879-415b-9ae9-21165451ac70" containerName="keystone-api" Jan 20 17:27:59 crc kubenswrapper[4558]: I0120 17:27:59.012290 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="72394a99-d879-415b-9ae9-21165451ac70" containerName="keystone-api" Jan 20 17:27:59 crc kubenswrapper[4558]: I0120 17:27:59.012314 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="467e28b1-5dde-44cb-9ef4-f81980c3d999" containerName="nova-manage" Jan 20 17:27:59 crc kubenswrapper[4558]: I0120 17:27:59.013330 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-proxy-5f7fb7b-s2dxn" Jan 20 17:27:59 crc kubenswrapper[4558]: I0120 17:27:59.020104 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5-combined-ca-bundle\") pod \"swift-proxy-5f7fb7b-s2dxn\" (UID: \"4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5\") " pod="openstack-kuttl-tests/swift-proxy-5f7fb7b-s2dxn" Jan 20 17:27:59 crc kubenswrapper[4558]: I0120 17:27:59.020152 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5-internal-tls-certs\") pod \"swift-proxy-5f7fb7b-s2dxn\" (UID: \"4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5\") " pod="openstack-kuttl-tests/swift-proxy-5f7fb7b-s2dxn" Jan 20 17:27:59 crc kubenswrapper[4558]: I0120 17:27:59.020188 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jhww5\" (UniqueName: \"kubernetes.io/projected/4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5-kube-api-access-jhww5\") pod \"swift-proxy-5f7fb7b-s2dxn\" (UID: \"4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5\") " pod="openstack-kuttl-tests/swift-proxy-5f7fb7b-s2dxn" Jan 20 17:27:59 crc kubenswrapper[4558]: I0120 17:27:59.020203 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5-etc-swift\") pod \"swift-proxy-5f7fb7b-s2dxn\" (UID: \"4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5\") " pod="openstack-kuttl-tests/swift-proxy-5f7fb7b-s2dxn" Jan 20 17:27:59 crc kubenswrapper[4558]: I0120 17:27:59.020404 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5-config-data\") pod \"swift-proxy-5f7fb7b-s2dxn\" (UID: \"4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5\") " pod="openstack-kuttl-tests/swift-proxy-5f7fb7b-s2dxn" Jan 20 17:27:59 crc kubenswrapper[4558]: I0120 17:27:59.020471 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5-public-tls-certs\") pod \"swift-proxy-5f7fb7b-s2dxn\" (UID: \"4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5\") " pod="openstack-kuttl-tests/swift-proxy-5f7fb7b-s2dxn" Jan 20 17:27:59 crc kubenswrapper[4558]: I0120 17:27:59.020563 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5-log-httpd\") pod \"swift-proxy-5f7fb7b-s2dxn\" (UID: \"4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5\") " pod="openstack-kuttl-tests/swift-proxy-5f7fb7b-s2dxn" Jan 20 17:27:59 crc kubenswrapper[4558]: I0120 17:27:59.020598 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5-run-httpd\") pod \"swift-proxy-5f7fb7b-s2dxn\" (UID: \"4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5\") " pod="openstack-kuttl-tests/swift-proxy-5f7fb7b-s2dxn" Jan 20 17:27:59 crc kubenswrapper[4558]: I0120 17:27:59.028701 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/swift-proxy-5f7fb7b-s2dxn"] Jan 20 17:27:59 crc kubenswrapper[4558]: I0120 17:27:59.122172 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5-log-httpd\") pod \"swift-proxy-5f7fb7b-s2dxn\" (UID: \"4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5\") " pod="openstack-kuttl-tests/swift-proxy-5f7fb7b-s2dxn" Jan 20 17:27:59 crc kubenswrapper[4558]: I0120 17:27:59.122211 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5-run-httpd\") pod \"swift-proxy-5f7fb7b-s2dxn\" (UID: \"4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5\") " pod="openstack-kuttl-tests/swift-proxy-5f7fb7b-s2dxn" Jan 20 17:27:59 crc kubenswrapper[4558]: I0120 17:27:59.123406 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5-log-httpd\") pod \"swift-proxy-5f7fb7b-s2dxn\" (UID: \"4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5\") " pod="openstack-kuttl-tests/swift-proxy-5f7fb7b-s2dxn" Jan 20 17:27:59 crc kubenswrapper[4558]: I0120 17:27:59.123455 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5-combined-ca-bundle\") pod \"swift-proxy-5f7fb7b-s2dxn\" (UID: \"4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5\") " pod="openstack-kuttl-tests/swift-proxy-5f7fb7b-s2dxn" Jan 20 17:27:59 crc kubenswrapper[4558]: I0120 17:27:59.123495 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5-internal-tls-certs\") pod \"swift-proxy-5f7fb7b-s2dxn\" (UID: \"4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5\") " pod="openstack-kuttl-tests/swift-proxy-5f7fb7b-s2dxn" Jan 20 17:27:59 crc kubenswrapper[4558]: I0120 17:27:59.126173 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jhww5\" (UniqueName: \"kubernetes.io/projected/4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5-kube-api-access-jhww5\") pod \"swift-proxy-5f7fb7b-s2dxn\" (UID: \"4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5\") " pod="openstack-kuttl-tests/swift-proxy-5f7fb7b-s2dxn" Jan 20 17:27:59 crc kubenswrapper[4558]: I0120 17:27:59.126193 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5-etc-swift\") pod \"swift-proxy-5f7fb7b-s2dxn\" (UID: \"4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5\") " pod="openstack-kuttl-tests/swift-proxy-5f7fb7b-s2dxn" Jan 20 17:27:59 crc kubenswrapper[4558]: I0120 17:27:59.126562 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5-config-data\") pod \"swift-proxy-5f7fb7b-s2dxn\" (UID: \"4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5\") " pod="openstack-kuttl-tests/swift-proxy-5f7fb7b-s2dxn" Jan 20 17:27:59 crc kubenswrapper[4558]: I0120 17:27:59.126635 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5-public-tls-certs\") pod \"swift-proxy-5f7fb7b-s2dxn\" (UID: \"4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5\") " pod="openstack-kuttl-tests/swift-proxy-5f7fb7b-s2dxn" Jan 20 17:27:59 crc kubenswrapper[4558]: I0120 17:27:59.123507 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5-run-httpd\") pod \"swift-proxy-5f7fb7b-s2dxn\" (UID: \"4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5\") " pod="openstack-kuttl-tests/swift-proxy-5f7fb7b-s2dxn" Jan 20 17:27:59 crc kubenswrapper[4558]: I0120 17:27:59.131356 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5-public-tls-certs\") pod \"swift-proxy-5f7fb7b-s2dxn\" (UID: \"4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5\") " pod="openstack-kuttl-tests/swift-proxy-5f7fb7b-s2dxn" Jan 20 17:27:59 crc kubenswrapper[4558]: I0120 17:27:59.133261 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5-internal-tls-certs\") pod \"swift-proxy-5f7fb7b-s2dxn\" (UID: \"4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5\") " pod="openstack-kuttl-tests/swift-proxy-5f7fb7b-s2dxn" Jan 20 17:27:59 crc kubenswrapper[4558]: I0120 17:27:59.135102 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5-combined-ca-bundle\") pod \"swift-proxy-5f7fb7b-s2dxn\" (UID: \"4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5\") " pod="openstack-kuttl-tests/swift-proxy-5f7fb7b-s2dxn" Jan 20 17:27:59 crc kubenswrapper[4558]: I0120 17:27:59.140685 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5-config-data\") pod \"swift-proxy-5f7fb7b-s2dxn\" (UID: \"4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5\") " pod="openstack-kuttl-tests/swift-proxy-5f7fb7b-s2dxn" Jan 20 17:27:59 crc kubenswrapper[4558]: I0120 17:27:59.143903 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5-etc-swift\") pod \"swift-proxy-5f7fb7b-s2dxn\" (UID: \"4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5\") " pod="openstack-kuttl-tests/swift-proxy-5f7fb7b-s2dxn" Jan 20 17:27:59 crc kubenswrapper[4558]: I0120 17:27:59.148632 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jhww5\" (UniqueName: \"kubernetes.io/projected/4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5-kube-api-access-jhww5\") pod \"swift-proxy-5f7fb7b-s2dxn\" (UID: \"4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5\") " pod="openstack-kuttl-tests/swift-proxy-5f7fb7b-s2dxn" Jan 20 17:27:59 crc kubenswrapper[4558]: I0120 17:27:59.153131 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-6866498598-2wxkp" event={"ID":"72394a99-d879-415b-9ae9-21165451ac70","Type":"ContainerDied","Data":"9ad156af82950e8f5e276e76cb19e3ef406261de26707bcdd959116491a9ace8"} Jan 20 17:27:59 crc kubenswrapper[4558]: I0120 17:27:59.153225 4558 scope.go:117] "RemoveContainer" containerID="57a221fa6c7f95ec5c64257c235ed54b2e2bd0a8d13ecc18c3f5f0a4dce5307b" Jan 20 17:27:59 crc kubenswrapper[4558]: I0120 17:27:59.153370 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-6866498598-2wxkp" Jan 20 17:27:59 crc kubenswrapper[4558]: I0120 17:27:59.181250 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstackclient" event={"ID":"e383e708-a471-4904-bfc7-eead6a5c76dc","Type":"ContainerStarted","Data":"fc05c1e78ad7a92e2b78d674adb5592b41e556323420c6070785f62676598f12"} Jan 20 17:27:59 crc kubenswrapper[4558]: I0120 17:27:59.181291 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstackclient" event={"ID":"e383e708-a471-4904-bfc7-eead6a5c76dc","Type":"ContainerStarted","Data":"bf107416f9c82b93a43f94e999290c66a3ddfb88778eddd62960ea850ad5aecb"} Jan 20 17:27:59 crc kubenswrapper[4558]: I0120 17:27:59.201565 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"c1afb831-f3e4-4356-ab86-713eb0beca39","Type":"ContainerStarted","Data":"11baea4ab79cfa0023a448f7d4c2fd060ecbb75ef0a90656b63b54a79a53d8f7"} Jan 20 17:27:59 crc kubenswrapper[4558]: I0120 17:27:59.201599 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"c1afb831-f3e4-4356-ab86-713eb0beca39","Type":"ContainerStarted","Data":"ad602ea750041b8651b16f5dfde26a7e046c18a3d3efca817c0e051aad027231"} Jan 20 17:27:59 crc kubenswrapper[4558]: I0120 17:27:59.218630 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-cell-mapping-5jhz5" Jan 20 17:27:59 crc kubenswrapper[4558]: I0120 17:27:59.218814 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-cell-mapping-5jhz5" event={"ID":"467e28b1-5dde-44cb-9ef4-f81980c3d999","Type":"ContainerDied","Data":"1dc319fbe664663f43eb5091a84dfebd22bd87a2698091de839e309298e64584"} Jan 20 17:27:59 crc kubenswrapper[4558]: I0120 17:27:59.218850 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1dc319fbe664663f43eb5091a84dfebd22bd87a2698091de839e309298e64584" Jan 20 17:27:59 crc kubenswrapper[4558]: I0120 17:27:59.260605 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/openstackclient" podStartSLOduration=2.260588979 podStartE2EDuration="2.260588979s" podCreationTimestamp="2026-01-20 17:27:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:27:59.24851016 +0000 UTC m=+2773.008848127" watchObservedRunningTime="2026-01-20 17:27:59.260588979 +0000 UTC m=+2773.020926946" Jan 20 17:27:59 crc kubenswrapper[4558]: I0120 17:27:59.306611 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-6866498598-2wxkp"] Jan 20 17:27:59 crc kubenswrapper[4558]: I0120 17:27:59.329545 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/keystone-6866498598-2wxkp"] Jan 20 17:27:59 crc kubenswrapper[4558]: I0120 17:27:59.342303 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-proxy-5f7fb7b-s2dxn" Jan 20 17:27:59 crc kubenswrapper[4558]: I0120 17:27:59.344020 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/placement-7ab2-account-create-update-c5qqm"] Jan 20 17:27:59 crc kubenswrapper[4558]: I0120 17:27:59.352466 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-7ab2-account-create-update-c5qqm" Jan 20 17:27:59 crc kubenswrapper[4558]: I0120 17:27:59.361893 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"placement-db-secret" Jan 20 17:27:59 crc kubenswrapper[4558]: I0120 17:27:59.375078 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-7ab2-account-create-update-c5qqm"] Jan 20 17:27:59 crc kubenswrapper[4558]: I0120 17:27:59.393209 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/root-account-create-update-dmtbg"] Jan 20 17:27:59 crc kubenswrapper[4558]: I0120 17:27:59.395338 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-dmtbg" Jan 20 17:27:59 crc kubenswrapper[4558]: I0120 17:27:59.403840 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"openstack-cell1-mariadb-root-db-secret" Jan 20 17:27:59 crc kubenswrapper[4558]: I0120 17:27:59.411952 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/placement-db-create-j7682"] Jan 20 17:27:59 crc kubenswrapper[4558]: I0120 17:27:59.413248 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-db-create-j7682" Jan 20 17:27:59 crc kubenswrapper[4558]: I0120 17:27:59.457119 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c2bfe108-b710-400b-baac-55815b192ee3-operator-scripts\") pod \"root-account-create-update-dmtbg\" (UID: \"c2bfe108-b710-400b-baac-55815b192ee3\") " pod="openstack-kuttl-tests/root-account-create-update-dmtbg" Jan 20 17:27:59 crc kubenswrapper[4558]: I0120 17:27:59.457206 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/df28560c-92e6-4003-a08f-b8691fa43300-operator-scripts\") pod \"placement-db-create-j7682\" (UID: \"df28560c-92e6-4003-a08f-b8691fa43300\") " pod="openstack-kuttl-tests/placement-db-create-j7682" Jan 20 17:27:59 crc kubenswrapper[4558]: I0120 17:27:59.457320 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mp4v6\" (UniqueName: \"kubernetes.io/projected/b201974e-8bb6-412a-95d5-cce7a95e4528-kube-api-access-mp4v6\") pod \"placement-7ab2-account-create-update-c5qqm\" (UID: \"b201974e-8bb6-412a-95d5-cce7a95e4528\") " pod="openstack-kuttl-tests/placement-7ab2-account-create-update-c5qqm" Jan 20 17:27:59 crc kubenswrapper[4558]: I0120 17:27:59.457630 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-28p8x\" (UniqueName: \"kubernetes.io/projected/c2bfe108-b710-400b-baac-55815b192ee3-kube-api-access-28p8x\") pod \"root-account-create-update-dmtbg\" (UID: \"c2bfe108-b710-400b-baac-55815b192ee3\") " pod="openstack-kuttl-tests/root-account-create-update-dmtbg" Jan 20 17:27:59 crc kubenswrapper[4558]: I0120 17:27:59.459141 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b201974e-8bb6-412a-95d5-cce7a95e4528-operator-scripts\") pod \"placement-7ab2-account-create-update-c5qqm\" (UID: \"b201974e-8bb6-412a-95d5-cce7a95e4528\") " pod="openstack-kuttl-tests/placement-7ab2-account-create-update-c5qqm" Jan 20 17:27:59 crc kubenswrapper[4558]: I0120 17:27:59.459318 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qz5rv\" (UniqueName: \"kubernetes.io/projected/df28560c-92e6-4003-a08f-b8691fa43300-kube-api-access-qz5rv\") pod \"placement-db-create-j7682\" (UID: \"df28560c-92e6-4003-a08f-b8691fa43300\") " pod="openstack-kuttl-tests/placement-db-create-j7682" Jan 20 17:27:59 crc kubenswrapper[4558]: I0120 17:27:59.480854 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-dmtbg"] Jan 20 17:27:59 crc kubenswrapper[4558]: I0120 17:27:59.515882 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/rabbitmq-server-0"] Jan 20 17:27:59 crc kubenswrapper[4558]: I0120 17:27:59.533811 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-db-create-j7682"] Jan 20 17:27:59 crc kubenswrapper[4558]: I0120 17:27:59.563739 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-28p8x\" (UniqueName: \"kubernetes.io/projected/c2bfe108-b710-400b-baac-55815b192ee3-kube-api-access-28p8x\") pod \"root-account-create-update-dmtbg\" (UID: \"c2bfe108-b710-400b-baac-55815b192ee3\") " pod="openstack-kuttl-tests/root-account-create-update-dmtbg" Jan 20 17:27:59 crc kubenswrapper[4558]: I0120 17:27:59.563894 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b201974e-8bb6-412a-95d5-cce7a95e4528-operator-scripts\") pod \"placement-7ab2-account-create-update-c5qqm\" (UID: \"b201974e-8bb6-412a-95d5-cce7a95e4528\") " pod="openstack-kuttl-tests/placement-7ab2-account-create-update-c5qqm" Jan 20 17:27:59 crc kubenswrapper[4558]: I0120 17:27:59.564018 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qz5rv\" (UniqueName: \"kubernetes.io/projected/df28560c-92e6-4003-a08f-b8691fa43300-kube-api-access-qz5rv\") pod \"placement-db-create-j7682\" (UID: \"df28560c-92e6-4003-a08f-b8691fa43300\") " pod="openstack-kuttl-tests/placement-db-create-j7682" Jan 20 17:27:59 crc kubenswrapper[4558]: I0120 17:27:59.564177 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c2bfe108-b710-400b-baac-55815b192ee3-operator-scripts\") pod \"root-account-create-update-dmtbg\" (UID: \"c2bfe108-b710-400b-baac-55815b192ee3\") " pod="openstack-kuttl-tests/root-account-create-update-dmtbg" Jan 20 17:27:59 crc kubenswrapper[4558]: I0120 17:27:59.564267 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/df28560c-92e6-4003-a08f-b8691fa43300-operator-scripts\") pod \"placement-db-create-j7682\" (UID: \"df28560c-92e6-4003-a08f-b8691fa43300\") " pod="openstack-kuttl-tests/placement-db-create-j7682" Jan 20 17:27:59 crc kubenswrapper[4558]: I0120 17:27:59.564385 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mp4v6\" (UniqueName: \"kubernetes.io/projected/b201974e-8bb6-412a-95d5-cce7a95e4528-kube-api-access-mp4v6\") pod \"placement-7ab2-account-create-update-c5qqm\" (UID: \"b201974e-8bb6-412a-95d5-cce7a95e4528\") " pod="openstack-kuttl-tests/placement-7ab2-account-create-update-c5qqm" Jan 20 17:27:59 crc kubenswrapper[4558]: I0120 17:27:59.565557 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b201974e-8bb6-412a-95d5-cce7a95e4528-operator-scripts\") pod \"placement-7ab2-account-create-update-c5qqm\" (UID: \"b201974e-8bb6-412a-95d5-cce7a95e4528\") " pod="openstack-kuttl-tests/placement-7ab2-account-create-update-c5qqm" Jan 20 17:27:59 crc kubenswrapper[4558]: I0120 17:27:59.565797 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c2bfe108-b710-400b-baac-55815b192ee3-operator-scripts\") pod \"root-account-create-update-dmtbg\" (UID: \"c2bfe108-b710-400b-baac-55815b192ee3\") " pod="openstack-kuttl-tests/root-account-create-update-dmtbg" Jan 20 17:27:59 crc kubenswrapper[4558]: I0120 17:27:59.570843 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/df28560c-92e6-4003-a08f-b8691fa43300-operator-scripts\") pod \"placement-db-create-j7682\" (UID: \"df28560c-92e6-4003-a08f-b8691fa43300\") " pod="openstack-kuttl-tests/placement-db-create-j7682" Jan 20 17:27:59 crc kubenswrapper[4558]: I0120 17:27:59.574376 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/glance-65cd-account-create-update-grtts"] Jan 20 17:27:59 crc kubenswrapper[4558]: I0120 17:27:59.575830 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-65cd-account-create-update-grtts" Jan 20 17:27:59 crc kubenswrapper[4558]: I0120 17:27:59.577917 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-db-secret" Jan 20 17:27:59 crc kubenswrapper[4558]: I0120 17:27:59.650076 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mp4v6\" (UniqueName: \"kubernetes.io/projected/b201974e-8bb6-412a-95d5-cce7a95e4528-kube-api-access-mp4v6\") pod \"placement-7ab2-account-create-update-c5qqm\" (UID: \"b201974e-8bb6-412a-95d5-cce7a95e4528\") " pod="openstack-kuttl-tests/placement-7ab2-account-create-update-c5qqm" Jan 20 17:27:59 crc kubenswrapper[4558]: I0120 17:27:59.655158 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qz5rv\" (UniqueName: \"kubernetes.io/projected/df28560c-92e6-4003-a08f-b8691fa43300-kube-api-access-qz5rv\") pod \"placement-db-create-j7682\" (UID: \"df28560c-92e6-4003-a08f-b8691fa43300\") " pod="openstack-kuttl-tests/placement-db-create-j7682" Jan 20 17:27:59 crc kubenswrapper[4558]: E0120 17:27:59.671472 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Jan 20 17:27:59 crc kubenswrapper[4558]: E0120 17:27:59.671541 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/d692722b-f7fd-447c-8b7a-f56cff940d91-config-data podName:d692722b-f7fd-447c-8b7a-f56cff940d91 nodeName:}" failed. No retries permitted until 2026-01-20 17:28:00.171523167 +0000 UTC m=+2773.931861134 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/d692722b-f7fd-447c-8b7a-f56cff940d91-config-data") pod "rabbitmq-server-0" (UID: "d692722b-f7fd-447c-8b7a-f56cff940d91") : configmap "rabbitmq-config-data" not found Jan 20 17:27:59 crc kubenswrapper[4558]: I0120 17:27:59.673322 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-65cd-account-create-update-grtts"] Jan 20 17:27:59 crc kubenswrapper[4558]: I0120 17:27:59.673709 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-28p8x\" (UniqueName: \"kubernetes.io/projected/c2bfe108-b710-400b-baac-55815b192ee3-kube-api-access-28p8x\") pod \"root-account-create-update-dmtbg\" (UID: \"c2bfe108-b710-400b-baac-55815b192ee3\") " pod="openstack-kuttl-tests/root-account-create-update-dmtbg" Jan 20 17:27:59 crc kubenswrapper[4558]: I0120 17:27:59.677045 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-7ab2-account-create-update-c5qqm" Jan 20 17:27:59 crc kubenswrapper[4558]: I0120 17:27:59.706629 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-dmtbg" Jan 20 17:27:59 crc kubenswrapper[4558]: I0120 17:27:59.717537 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-db-create-j7682" Jan 20 17:27:59 crc kubenswrapper[4558]: I0120 17:27:59.810811 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/placement-7ab2-account-create-update-s556d"] Jan 20 17:27:59 crc kubenswrapper[4558]: I0120 17:27:59.813183 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5f7a0ad9-436f-433c-9a91-cec4ffd3beeb-operator-scripts\") pod \"glance-65cd-account-create-update-grtts\" (UID: \"5f7a0ad9-436f-433c-9a91-cec4ffd3beeb\") " pod="openstack-kuttl-tests/glance-65cd-account-create-update-grtts" Jan 20 17:27:59 crc kubenswrapper[4558]: I0120 17:27:59.813347 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kbpl7\" (UniqueName: \"kubernetes.io/projected/5f7a0ad9-436f-433c-9a91-cec4ffd3beeb-kube-api-access-kbpl7\") pod \"glance-65cd-account-create-update-grtts\" (UID: \"5f7a0ad9-436f-433c-9a91-cec4ffd3beeb\") " pod="openstack-kuttl-tests/glance-65cd-account-create-update-grtts" Jan 20 17:27:59 crc kubenswrapper[4558]: I0120 17:27:59.874442 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/placement-7ab2-account-create-update-s556d"] Jan 20 17:27:59 crc kubenswrapper[4558]: I0120 17:27:59.892171 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/placement-db-create-zk697"] Jan 20 17:27:59 crc kubenswrapper[4558]: I0120 17:27:59.919322 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/placement-db-create-zk697"] Jan 20 17:27:59 crc kubenswrapper[4558]: I0120 17:27:59.923254 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kbpl7\" (UniqueName: \"kubernetes.io/projected/5f7a0ad9-436f-433c-9a91-cec4ffd3beeb-kube-api-access-kbpl7\") pod \"glance-65cd-account-create-update-grtts\" (UID: \"5f7a0ad9-436f-433c-9a91-cec4ffd3beeb\") " pod="openstack-kuttl-tests/glance-65cd-account-create-update-grtts" Jan 20 17:27:59 crc kubenswrapper[4558]: I0120 17:27:59.926452 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5f7a0ad9-436f-433c-9a91-cec4ffd3beeb-operator-scripts\") pod \"glance-65cd-account-create-update-grtts\" (UID: \"5f7a0ad9-436f-433c-9a91-cec4ffd3beeb\") " pod="openstack-kuttl-tests/glance-65cd-account-create-update-grtts" Jan 20 17:27:59 crc kubenswrapper[4558]: I0120 17:27:59.940919 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5f7a0ad9-436f-433c-9a91-cec4ffd3beeb-operator-scripts\") pod \"glance-65cd-account-create-update-grtts\" (UID: \"5f7a0ad9-436f-433c-9a91-cec4ffd3beeb\") " pod="openstack-kuttl-tests/glance-65cd-account-create-update-grtts" Jan 20 17:27:59 crc kubenswrapper[4558]: I0120 17:27:59.954767 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/barbican-keystone-listener-8f4cc76c4-9qkpn"] Jan 20 17:27:59 crc kubenswrapper[4558]: I0120 17:27:59.956915 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-keystone-listener-8f4cc76c4-9qkpn" Jan 20 17:27:59 crc kubenswrapper[4558]: I0120 17:27:59.973760 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-keystone-listener-8f4cc76c4-9qkpn"] Jan 20 17:27:59 crc kubenswrapper[4558]: I0120 17:27:59.984997 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/openstackclient"] Jan 20 17:28:00 crc kubenswrapper[4558]: I0120 17:28:00.033439 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/openstackclient"] Jan 20 17:28:00 crc kubenswrapper[4558]: I0120 17:28:00.044019 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/fa57dfc3-11b4-48af-a78c-00463e8894bf-config-data-custom\") pod \"barbican-keystone-listener-8f4cc76c4-9qkpn\" (UID: \"fa57dfc3-11b4-48af-a78c-00463e8894bf\") " pod="openstack-kuttl-tests/barbican-keystone-listener-8f4cc76c4-9qkpn" Jan 20 17:28:00 crc kubenswrapper[4558]: I0120 17:28:00.044146 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fa57dfc3-11b4-48af-a78c-00463e8894bf-config-data\") pod \"barbican-keystone-listener-8f4cc76c4-9qkpn\" (UID: \"fa57dfc3-11b4-48af-a78c-00463e8894bf\") " pod="openstack-kuttl-tests/barbican-keystone-listener-8f4cc76c4-9qkpn" Jan 20 17:28:00 crc kubenswrapper[4558]: I0120 17:28:00.045401 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7vmvf\" (UniqueName: \"kubernetes.io/projected/fa57dfc3-11b4-48af-a78c-00463e8894bf-kube-api-access-7vmvf\") pod \"barbican-keystone-listener-8f4cc76c4-9qkpn\" (UID: \"fa57dfc3-11b4-48af-a78c-00463e8894bf\") " pod="openstack-kuttl-tests/barbican-keystone-listener-8f4cc76c4-9qkpn" Jan 20 17:28:00 crc kubenswrapper[4558]: I0120 17:28:00.045490 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fa57dfc3-11b4-48af-a78c-00463e8894bf-logs\") pod \"barbican-keystone-listener-8f4cc76c4-9qkpn\" (UID: \"fa57dfc3-11b4-48af-a78c-00463e8894bf\") " pod="openstack-kuttl-tests/barbican-keystone-listener-8f4cc76c4-9qkpn" Jan 20 17:28:00 crc kubenswrapper[4558]: I0120 17:28:00.045630 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fa57dfc3-11b4-48af-a78c-00463e8894bf-combined-ca-bundle\") pod \"barbican-keystone-listener-8f4cc76c4-9qkpn\" (UID: \"fa57dfc3-11b4-48af-a78c-00463e8894bf\") " pod="openstack-kuttl-tests/barbican-keystone-listener-8f4cc76c4-9qkpn" Jan 20 17:28:00 crc kubenswrapper[4558]: I0120 17:28:00.067886 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kbpl7\" (UniqueName: \"kubernetes.io/projected/5f7a0ad9-436f-433c-9a91-cec4ffd3beeb-kube-api-access-kbpl7\") pod \"glance-65cd-account-create-update-grtts\" (UID: \"5f7a0ad9-436f-433c-9a91-cec4ffd3beeb\") " pod="openstack-kuttl-tests/glance-65cd-account-create-update-grtts" Jan 20 17:28:00 crc kubenswrapper[4558]: I0120 17:28:00.091625 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/rabbitmq-cell1-server-0"] Jan 20 17:28:00 crc kubenswrapper[4558]: I0120 17:28:00.148654 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/barbican-worker-74d54f756c-g9zzk"] Jan 20 17:28:00 crc kubenswrapper[4558]: E0120 17:28:00.149361 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e383e708-a471-4904-bfc7-eead6a5c76dc" containerName="openstackclient" Jan 20 17:28:00 crc kubenswrapper[4558]: I0120 17:28:00.149375 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="e383e708-a471-4904-bfc7-eead6a5c76dc" containerName="openstackclient" Jan 20 17:28:00 crc kubenswrapper[4558]: I0120 17:28:00.149602 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="e383e708-a471-4904-bfc7-eead6a5c76dc" containerName="openstackclient" Jan 20 17:28:00 crc kubenswrapper[4558]: I0120 17:28:00.150596 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-worker-74d54f756c-g9zzk" Jan 20 17:28:00 crc kubenswrapper[4558]: I0120 17:28:00.150774 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7vmvf\" (UniqueName: \"kubernetes.io/projected/fa57dfc3-11b4-48af-a78c-00463e8894bf-kube-api-access-7vmvf\") pod \"barbican-keystone-listener-8f4cc76c4-9qkpn\" (UID: \"fa57dfc3-11b4-48af-a78c-00463e8894bf\") " pod="openstack-kuttl-tests/barbican-keystone-listener-8f4cc76c4-9qkpn" Jan 20 17:28:00 crc kubenswrapper[4558]: I0120 17:28:00.150840 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fa57dfc3-11b4-48af-a78c-00463e8894bf-logs\") pod \"barbican-keystone-listener-8f4cc76c4-9qkpn\" (UID: \"fa57dfc3-11b4-48af-a78c-00463e8894bf\") " pod="openstack-kuttl-tests/barbican-keystone-listener-8f4cc76c4-9qkpn" Jan 20 17:28:00 crc kubenswrapper[4558]: I0120 17:28:00.150936 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fa57dfc3-11b4-48af-a78c-00463e8894bf-combined-ca-bundle\") pod \"barbican-keystone-listener-8f4cc76c4-9qkpn\" (UID: \"fa57dfc3-11b4-48af-a78c-00463e8894bf\") " pod="openstack-kuttl-tests/barbican-keystone-listener-8f4cc76c4-9qkpn" Jan 20 17:28:00 crc kubenswrapper[4558]: I0120 17:28:00.151026 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/fa57dfc3-11b4-48af-a78c-00463e8894bf-config-data-custom\") pod \"barbican-keystone-listener-8f4cc76c4-9qkpn\" (UID: \"fa57dfc3-11b4-48af-a78c-00463e8894bf\") " pod="openstack-kuttl-tests/barbican-keystone-listener-8f4cc76c4-9qkpn" Jan 20 17:28:00 crc kubenswrapper[4558]: I0120 17:28:00.151182 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fa57dfc3-11b4-48af-a78c-00463e8894bf-config-data\") pod \"barbican-keystone-listener-8f4cc76c4-9qkpn\" (UID: \"fa57dfc3-11b4-48af-a78c-00463e8894bf\") " pod="openstack-kuttl-tests/barbican-keystone-listener-8f4cc76c4-9qkpn" Jan 20 17:28:00 crc kubenswrapper[4558]: E0120 17:28:00.151510 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/combined-ca-bundle: secret "combined-ca-bundle" not found Jan 20 17:28:00 crc kubenswrapper[4558]: E0120 17:28:00.151557 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/fa57dfc3-11b4-48af-a78c-00463e8894bf-combined-ca-bundle podName:fa57dfc3-11b4-48af-a78c-00463e8894bf nodeName:}" failed. No retries permitted until 2026-01-20 17:28:00.651541478 +0000 UTC m=+2774.411879446 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/fa57dfc3-11b4-48af-a78c-00463e8894bf-combined-ca-bundle") pod "barbican-keystone-listener-8f4cc76c4-9qkpn" (UID: "fa57dfc3-11b4-48af-a78c-00463e8894bf") : secret "combined-ca-bundle" not found Jan 20 17:28:00 crc kubenswrapper[4558]: I0120 17:28:00.151659 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fa57dfc3-11b4-48af-a78c-00463e8894bf-logs\") pod \"barbican-keystone-listener-8f4cc76c4-9qkpn\" (UID: \"fa57dfc3-11b4-48af-a78c-00463e8894bf\") " pod="openstack-kuttl-tests/barbican-keystone-listener-8f4cc76c4-9qkpn" Jan 20 17:28:00 crc kubenswrapper[4558]: I0120 17:28:00.161911 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/neutron-b136-account-create-update-n9qdv"] Jan 20 17:28:00 crc kubenswrapper[4558]: I0120 17:28:00.163030 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-b136-account-create-update-n9qdv" Jan 20 17:28:00 crc kubenswrapper[4558]: I0120 17:28:00.197590 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-worker-74d54f756c-g9zzk"] Jan 20 17:28:00 crc kubenswrapper[4558]: I0120 17:28:00.200698 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/fa57dfc3-11b4-48af-a78c-00463e8894bf-config-data-custom\") pod \"barbican-keystone-listener-8f4cc76c4-9qkpn\" (UID: \"fa57dfc3-11b4-48af-a78c-00463e8894bf\") " pod="openstack-kuttl-tests/barbican-keystone-listener-8f4cc76c4-9qkpn" Jan 20 17:28:00 crc kubenswrapper[4558]: I0120 17:28:00.207299 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"neutron-db-secret" Jan 20 17:28:00 crc kubenswrapper[4558]: I0120 17:28:00.209132 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fa57dfc3-11b4-48af-a78c-00463e8894bf-config-data\") pod \"barbican-keystone-listener-8f4cc76c4-9qkpn\" (UID: \"fa57dfc3-11b4-48af-a78c-00463e8894bf\") " pod="openstack-kuttl-tests/barbican-keystone-listener-8f4cc76c4-9qkpn" Jan 20 17:28:00 crc kubenswrapper[4558]: I0120 17:28:00.209782 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:28:00 crc kubenswrapper[4558]: I0120 17:28:00.210065 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/cinder-api-0" podUID="f6a0777b-d04f-451b-9e3d-3f9ab0e19df8" containerName="cinder-api-log" containerID="cri-o://516792e38f223a8dda033868458240450dfed4fe467c3a9a1c42b3301734bf44" gracePeriod=30 Jan 20 17:28:00 crc kubenswrapper[4558]: I0120 17:28:00.210230 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/cinder-api-0" podUID="f6a0777b-d04f-451b-9e3d-3f9ab0e19df8" containerName="cinder-api" containerID="cri-o://5318567d652c11a8d32a83b41eaaa58746106e91b1ac59789f4bda5a8e0f9415" gracePeriod=30 Jan 20 17:28:00 crc kubenswrapper[4558]: I0120 17:28:00.225344 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7vmvf\" (UniqueName: \"kubernetes.io/projected/fa57dfc3-11b4-48af-a78c-00463e8894bf-kube-api-access-7vmvf\") pod \"barbican-keystone-listener-8f4cc76c4-9qkpn\" (UID: \"fa57dfc3-11b4-48af-a78c-00463e8894bf\") " pod="openstack-kuttl-tests/barbican-keystone-listener-8f4cc76c4-9qkpn" Jan 20 17:28:00 crc kubenswrapper[4558]: I0120 17:28:00.247985 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/glance-db-create-rt2fh"] Jan 20 17:28:00 crc kubenswrapper[4558]: I0120 17:28:00.249761 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-db-create-rt2fh" Jan 20 17:28:00 crc kubenswrapper[4558]: E0120 17:28:00.258267 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Jan 20 17:28:00 crc kubenswrapper[4558]: E0120 17:28:00.258319 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/d692722b-f7fd-447c-8b7a-f56cff940d91-config-data podName:d692722b-f7fd-447c-8b7a-f56cff940d91 nodeName:}" failed. No retries permitted until 2026-01-20 17:28:01.258303469 +0000 UTC m=+2775.018641436 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/d692722b-f7fd-447c-8b7a-f56cff940d91-config-data") pod "rabbitmq-server-0" (UID: "d692722b-f7fd-447c-8b7a-f56cff940d91") : configmap "rabbitmq-config-data" not found Jan 20 17:28:00 crc kubenswrapper[4558]: E0120 17:28:00.258600 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Jan 20 17:28:00 crc kubenswrapper[4558]: E0120 17:28:00.258633 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/48cfc6e5-774d-4e7d-8103-f6a3260ea14c-config-data podName:48cfc6e5-774d-4e7d-8103-f6a3260ea14c nodeName:}" failed. No retries permitted until 2026-01-20 17:28:00.758626216 +0000 UTC m=+2774.518964173 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/48cfc6e5-774d-4e7d-8103-f6a3260ea14c-config-data") pod "rabbitmq-cell1-server-0" (UID: "48cfc6e5-774d-4e7d-8103-f6a3260ea14c") : configmap "rabbitmq-cell1-config-data" not found Jan 20 17:28:00 crc kubenswrapper[4558]: I0120 17:28:00.283222 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-b136-account-create-update-n9qdv"] Jan 20 17:28:00 crc kubenswrapper[4558]: I0120 17:28:00.288633 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-zbfxs"] Jan 20 17:28:00 crc kubenswrapper[4558]: I0120 17:28:00.290477 4558 generic.go:334] "Generic (PLEG): container finished" podID="4d845835-77d4-4d8a-9fad-0286ad5291ae" containerID="e33ca27067cb626a71e4e70acb048cf75b1fdf4f714c880df6a27aa61155fa45" exitCode=137 Jan 20 17:28:00 crc kubenswrapper[4558]: I0120 17:28:00.299221 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-zbfxs"] Jan 20 17:28:00 crc kubenswrapper[4558]: I0120 17:28:00.306741 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/barbican-e949-account-create-update-qd847"] Jan 20 17:28:00 crc kubenswrapper[4558]: I0120 17:28:00.308100 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-e949-account-create-update-qd847" Jan 20 17:28:00 crc kubenswrapper[4558]: I0120 17:28:00.336897 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"barbican-db-secret" Jan 20 17:28:00 crc kubenswrapper[4558]: I0120 17:28:00.350875 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-65cd-account-create-update-grtts" Jan 20 17:28:00 crc kubenswrapper[4558]: I0120 17:28:00.369902 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-db-create-rt2fh"] Jan 20 17:28:00 crc kubenswrapper[4558]: I0120 17:28:00.382917 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c8k9g\" (UniqueName: \"kubernetes.io/projected/1bcd505c-4754-4f41-b91d-6e488a669c93-kube-api-access-c8k9g\") pod \"neutron-b136-account-create-update-n9qdv\" (UID: \"1bcd505c-4754-4f41-b91d-6e488a669c93\") " pod="openstack-kuttl-tests/neutron-b136-account-create-update-n9qdv" Jan 20 17:28:00 crc kubenswrapper[4558]: I0120 17:28:00.382965 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1bcd505c-4754-4f41-b91d-6e488a669c93-operator-scripts\") pod \"neutron-b136-account-create-update-n9qdv\" (UID: \"1bcd505c-4754-4f41-b91d-6e488a669c93\") " pod="openstack-kuttl-tests/neutron-b136-account-create-update-n9qdv" Jan 20 17:28:00 crc kubenswrapper[4558]: I0120 17:28:00.382989 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b9b28c41-c054-49f3-87d2-a8c15e6124de-config-data\") pod \"barbican-worker-74d54f756c-g9zzk\" (UID: \"b9b28c41-c054-49f3-87d2-a8c15e6124de\") " pod="openstack-kuttl-tests/barbican-worker-74d54f756c-g9zzk" Jan 20 17:28:00 crc kubenswrapper[4558]: I0120 17:28:00.383034 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1b7c6fb3-f30d-4dde-8502-e7d840719520-operator-scripts\") pod \"glance-db-create-rt2fh\" (UID: \"1b7c6fb3-f30d-4dde-8502-e7d840719520\") " pod="openstack-kuttl-tests/glance-db-create-rt2fh" Jan 20 17:28:00 crc kubenswrapper[4558]: I0120 17:28:00.383145 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6vfpf\" (UniqueName: \"kubernetes.io/projected/1b7c6fb3-f30d-4dde-8502-e7d840719520-kube-api-access-6vfpf\") pod \"glance-db-create-rt2fh\" (UID: \"1b7c6fb3-f30d-4dde-8502-e7d840719520\") " pod="openstack-kuttl-tests/glance-db-create-rt2fh" Jan 20 17:28:00 crc kubenswrapper[4558]: I0120 17:28:00.383195 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/54457478-e4f4-4088-bd18-e427314e1fb2-operator-scripts\") pod \"barbican-e949-account-create-update-qd847\" (UID: \"54457478-e4f4-4088-bd18-e427314e1fb2\") " pod="openstack-kuttl-tests/barbican-e949-account-create-update-qd847" Jan 20 17:28:00 crc kubenswrapper[4558]: I0120 17:28:00.383276 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b9b28c41-c054-49f3-87d2-a8c15e6124de-logs\") pod \"barbican-worker-74d54f756c-g9zzk\" (UID: \"b9b28c41-c054-49f3-87d2-a8c15e6124de\") " pod="openstack-kuttl-tests/barbican-worker-74d54f756c-g9zzk" Jan 20 17:28:00 crc kubenswrapper[4558]: I0120 17:28:00.383296 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9b28c41-c054-49f3-87d2-a8c15e6124de-combined-ca-bundle\") pod \"barbican-worker-74d54f756c-g9zzk\" (UID: \"b9b28c41-c054-49f3-87d2-a8c15e6124de\") " pod="openstack-kuttl-tests/barbican-worker-74d54f756c-g9zzk" Jan 20 17:28:00 crc kubenswrapper[4558]: I0120 17:28:00.383318 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q7dzv\" (UniqueName: \"kubernetes.io/projected/b9b28c41-c054-49f3-87d2-a8c15e6124de-kube-api-access-q7dzv\") pod \"barbican-worker-74d54f756c-g9zzk\" (UID: \"b9b28c41-c054-49f3-87d2-a8c15e6124de\") " pod="openstack-kuttl-tests/barbican-worker-74d54f756c-g9zzk" Jan 20 17:28:00 crc kubenswrapper[4558]: I0120 17:28:00.383360 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zz9r4\" (UniqueName: \"kubernetes.io/projected/54457478-e4f4-4088-bd18-e427314e1fb2-kube-api-access-zz9r4\") pod \"barbican-e949-account-create-update-qd847\" (UID: \"54457478-e4f4-4088-bd18-e427314e1fb2\") " pod="openstack-kuttl-tests/barbican-e949-account-create-update-qd847" Jan 20 17:28:00 crc kubenswrapper[4558]: I0120 17:28:00.383423 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b9b28c41-c054-49f3-87d2-a8c15e6124de-config-data-custom\") pod \"barbican-worker-74d54f756c-g9zzk\" (UID: \"b9b28c41-c054-49f3-87d2-a8c15e6124de\") " pod="openstack-kuttl-tests/barbican-worker-74d54f756c-g9zzk" Jan 20 17:28:00 crc kubenswrapper[4558]: I0120 17:28:00.421933 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:28:00 crc kubenswrapper[4558]: I0120 17:28:00.422498 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:28:00 crc kubenswrapper[4558]: I0120 17:28:00.470969 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-e949-account-create-update-qd847"] Jan 20 17:28:00 crc kubenswrapper[4558]: I0120 17:28:00.485975 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b9b28c41-c054-49f3-87d2-a8c15e6124de-config-data-custom\") pod \"barbican-worker-74d54f756c-g9zzk\" (UID: \"b9b28c41-c054-49f3-87d2-a8c15e6124de\") " pod="openstack-kuttl-tests/barbican-worker-74d54f756c-g9zzk" Jan 20 17:28:00 crc kubenswrapper[4558]: I0120 17:28:00.486034 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c8k9g\" (UniqueName: \"kubernetes.io/projected/1bcd505c-4754-4f41-b91d-6e488a669c93-kube-api-access-c8k9g\") pod \"neutron-b136-account-create-update-n9qdv\" (UID: \"1bcd505c-4754-4f41-b91d-6e488a669c93\") " pod="openstack-kuttl-tests/neutron-b136-account-create-update-n9qdv" Jan 20 17:28:00 crc kubenswrapper[4558]: I0120 17:28:00.486061 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1bcd505c-4754-4f41-b91d-6e488a669c93-operator-scripts\") pod \"neutron-b136-account-create-update-n9qdv\" (UID: \"1bcd505c-4754-4f41-b91d-6e488a669c93\") " pod="openstack-kuttl-tests/neutron-b136-account-create-update-n9qdv" Jan 20 17:28:00 crc kubenswrapper[4558]: I0120 17:28:00.486082 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b9b28c41-c054-49f3-87d2-a8c15e6124de-config-data\") pod \"barbican-worker-74d54f756c-g9zzk\" (UID: \"b9b28c41-c054-49f3-87d2-a8c15e6124de\") " pod="openstack-kuttl-tests/barbican-worker-74d54f756c-g9zzk" Jan 20 17:28:00 crc kubenswrapper[4558]: I0120 17:28:00.486119 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1b7c6fb3-f30d-4dde-8502-e7d840719520-operator-scripts\") pod \"glance-db-create-rt2fh\" (UID: \"1b7c6fb3-f30d-4dde-8502-e7d840719520\") " pod="openstack-kuttl-tests/glance-db-create-rt2fh" Jan 20 17:28:00 crc kubenswrapper[4558]: I0120 17:28:00.486852 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6vfpf\" (UniqueName: \"kubernetes.io/projected/1b7c6fb3-f30d-4dde-8502-e7d840719520-kube-api-access-6vfpf\") pod \"glance-db-create-rt2fh\" (UID: \"1b7c6fb3-f30d-4dde-8502-e7d840719520\") " pod="openstack-kuttl-tests/glance-db-create-rt2fh" Jan 20 17:28:00 crc kubenswrapper[4558]: I0120 17:28:00.487538 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/54457478-e4f4-4088-bd18-e427314e1fb2-operator-scripts\") pod \"barbican-e949-account-create-update-qd847\" (UID: \"54457478-e4f4-4088-bd18-e427314e1fb2\") " pod="openstack-kuttl-tests/barbican-e949-account-create-update-qd847" Jan 20 17:28:00 crc kubenswrapper[4558]: I0120 17:28:00.487761 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b9b28c41-c054-49f3-87d2-a8c15e6124de-logs\") pod \"barbican-worker-74d54f756c-g9zzk\" (UID: \"b9b28c41-c054-49f3-87d2-a8c15e6124de\") " pod="openstack-kuttl-tests/barbican-worker-74d54f756c-g9zzk" Jan 20 17:28:00 crc kubenswrapper[4558]: I0120 17:28:00.487786 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9b28c41-c054-49f3-87d2-a8c15e6124de-combined-ca-bundle\") pod \"barbican-worker-74d54f756c-g9zzk\" (UID: \"b9b28c41-c054-49f3-87d2-a8c15e6124de\") " pod="openstack-kuttl-tests/barbican-worker-74d54f756c-g9zzk" Jan 20 17:28:00 crc kubenswrapper[4558]: I0120 17:28:00.487805 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q7dzv\" (UniqueName: \"kubernetes.io/projected/b9b28c41-c054-49f3-87d2-a8c15e6124de-kube-api-access-q7dzv\") pod \"barbican-worker-74d54f756c-g9zzk\" (UID: \"b9b28c41-c054-49f3-87d2-a8c15e6124de\") " pod="openstack-kuttl-tests/barbican-worker-74d54f756c-g9zzk" Jan 20 17:28:00 crc kubenswrapper[4558]: I0120 17:28:00.487841 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zz9r4\" (UniqueName: \"kubernetes.io/projected/54457478-e4f4-4088-bd18-e427314e1fb2-kube-api-access-zz9r4\") pod \"barbican-e949-account-create-update-qd847\" (UID: \"54457478-e4f4-4088-bd18-e427314e1fb2\") " pod="openstack-kuttl-tests/barbican-e949-account-create-update-qd847" Jan 20 17:28:00 crc kubenswrapper[4558]: I0120 17:28:00.489923 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1b7c6fb3-f30d-4dde-8502-e7d840719520-operator-scripts\") pod \"glance-db-create-rt2fh\" (UID: \"1b7c6fb3-f30d-4dde-8502-e7d840719520\") " pod="openstack-kuttl-tests/glance-db-create-rt2fh" Jan 20 17:28:00 crc kubenswrapper[4558]: I0120 17:28:00.490305 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/54457478-e4f4-4088-bd18-e427314e1fb2-operator-scripts\") pod \"barbican-e949-account-create-update-qd847\" (UID: \"54457478-e4f4-4088-bd18-e427314e1fb2\") " pod="openstack-kuttl-tests/barbican-e949-account-create-update-qd847" Jan 20 17:28:00 crc kubenswrapper[4558]: I0120 17:28:00.490578 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b9b28c41-c054-49f3-87d2-a8c15e6124de-logs\") pod \"barbican-worker-74d54f756c-g9zzk\" (UID: \"b9b28c41-c054-49f3-87d2-a8c15e6124de\") " pod="openstack-kuttl-tests/barbican-worker-74d54f756c-g9zzk" Jan 20 17:28:00 crc kubenswrapper[4558]: E0120 17:28:00.490653 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/combined-ca-bundle: secret "combined-ca-bundle" not found Jan 20 17:28:00 crc kubenswrapper[4558]: E0120 17:28:00.490692 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b9b28c41-c054-49f3-87d2-a8c15e6124de-combined-ca-bundle podName:b9b28c41-c054-49f3-87d2-a8c15e6124de nodeName:}" failed. No retries permitted until 2026-01-20 17:28:00.990678201 +0000 UTC m=+2774.751016168 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/b9b28c41-c054-49f3-87d2-a8c15e6124de-combined-ca-bundle") pod "barbican-worker-74d54f756c-g9zzk" (UID: "b9b28c41-c054-49f3-87d2-a8c15e6124de") : secret "combined-ca-bundle" not found Jan 20 17:28:00 crc kubenswrapper[4558]: I0120 17:28:00.496572 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1bcd505c-4754-4f41-b91d-6e488a669c93-operator-scripts\") pod \"neutron-b136-account-create-update-n9qdv\" (UID: \"1bcd505c-4754-4f41-b91d-6e488a669c93\") " pod="openstack-kuttl-tests/neutron-b136-account-create-update-n9qdv" Jan 20 17:28:00 crc kubenswrapper[4558]: I0120 17:28:00.527065 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b9b28c41-c054-49f3-87d2-a8c15e6124de-config-data-custom\") pod \"barbican-worker-74d54f756c-g9zzk\" (UID: \"b9b28c41-c054-49f3-87d2-a8c15e6124de\") " pod="openstack-kuttl-tests/barbican-worker-74d54f756c-g9zzk" Jan 20 17:28:00 crc kubenswrapper[4558]: I0120 17:28:00.527934 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b9b28c41-c054-49f3-87d2-a8c15e6124de-config-data\") pod \"barbican-worker-74d54f756c-g9zzk\" (UID: \"b9b28c41-c054-49f3-87d2-a8c15e6124de\") " pod="openstack-kuttl-tests/barbican-worker-74d54f756c-g9zzk" Jan 20 17:28:00 crc kubenswrapper[4558]: I0120 17:28:00.554893 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c8k9g\" (UniqueName: \"kubernetes.io/projected/1bcd505c-4754-4f41-b91d-6e488a669c93-kube-api-access-c8k9g\") pod \"neutron-b136-account-create-update-n9qdv\" (UID: \"1bcd505c-4754-4f41-b91d-6e488a669c93\") " pod="openstack-kuttl-tests/neutron-b136-account-create-update-n9qdv" Jan 20 17:28:00 crc kubenswrapper[4558]: I0120 17:28:00.557858 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zz9r4\" (UniqueName: \"kubernetes.io/projected/54457478-e4f4-4088-bd18-e427314e1fb2-kube-api-access-zz9r4\") pod \"barbican-e949-account-create-update-qd847\" (UID: \"54457478-e4f4-4088-bd18-e427314e1fb2\") " pod="openstack-kuttl-tests/barbican-e949-account-create-update-qd847" Jan 20 17:28:00 crc kubenswrapper[4558]: I0120 17:28:00.611804 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6vfpf\" (UniqueName: \"kubernetes.io/projected/1b7c6fb3-f30d-4dde-8502-e7d840719520-kube-api-access-6vfpf\") pod \"glance-db-create-rt2fh\" (UID: \"1b7c6fb3-f30d-4dde-8502-e7d840719520\") " pod="openstack-kuttl-tests/glance-db-create-rt2fh" Jan 20 17:28:00 crc kubenswrapper[4558]: I0120 17:28:00.626478 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="72394a99-d879-415b-9ae9-21165451ac70" path="/var/lib/kubelet/pods/72394a99-d879-415b-9ae9-21165451ac70/volumes" Jan 20 17:28:00 crc kubenswrapper[4558]: I0120 17:28:00.627354 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="979d210b-e852-4deb-bef1-345fbd8ccf52" path="/var/lib/kubelet/pods/979d210b-e852-4deb-bef1-345fbd8ccf52/volumes" Jan 20 17:28:00 crc kubenswrapper[4558]: I0120 17:28:00.629000 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b7b28a1d-af79-4ed2-aa96-15e39e665994" path="/var/lib/kubelet/pods/b7b28a1d-af79-4ed2-aa96-15e39e665994/volumes" Jan 20 17:28:00 crc kubenswrapper[4558]: I0120 17:28:00.629560 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ffd4b518-58dc-44f5-a3f9-614a77d06ca2" path="/var/lib/kubelet/pods/ffd4b518-58dc-44f5-a3f9-614a77d06ca2/volumes" Jan 20 17:28:00 crc kubenswrapper[4558]: E0120 17:28:00.630675 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/combined-ca-bundle: secret "combined-ca-bundle" not found Jan 20 17:28:00 crc kubenswrapper[4558]: E0120 17:28:00.630743 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/cc4089c6-71ea-4503-b54c-18777fcc3c48-combined-ca-bundle podName:cc4089c6-71ea-4503-b54c-18777fcc3c48 nodeName:}" failed. No retries permitted until 2026-01-20 17:28:01.130721312 +0000 UTC m=+2774.891059270 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/cc4089c6-71ea-4503-b54c-18777fcc3c48-combined-ca-bundle") pod "nova-metadata-0" (UID: "cc4089c6-71ea-4503-b54c-18777fcc3c48") : secret "combined-ca-bundle" not found Jan 20 17:28:00 crc kubenswrapper[4558]: I0120 17:28:00.621642 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q7dzv\" (UniqueName: \"kubernetes.io/projected/b9b28c41-c054-49f3-87d2-a8c15e6124de-kube-api-access-q7dzv\") pod \"barbican-worker-74d54f756c-g9zzk\" (UID: \"b9b28c41-c054-49f3-87d2-a8c15e6124de\") " pod="openstack-kuttl-tests/barbican-worker-74d54f756c-g9zzk" Jan 20 17:28:00 crc kubenswrapper[4558]: I0120 17:28:00.642264 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/cinder-43e3-account-create-update-fdlrr"] Jan 20 17:28:00 crc kubenswrapper[4558]: I0120 17:28:00.646881 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-43e3-account-create-update-fdlrr" Jan 20 17:28:00 crc kubenswrapper[4558]: I0120 17:28:00.656117 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/neutron-db-create-2kmst"] Jan 20 17:28:00 crc kubenswrapper[4558]: I0120 17:28:00.661847 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-db-create-2kmst" Jan 20 17:28:00 crc kubenswrapper[4558]: I0120 17:28:00.669614 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-db-create-rt2fh" Jan 20 17:28:00 crc kubenswrapper[4558]: I0120 17:28:00.680096 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cinder-db-secret" Jan 20 17:28:00 crc kubenswrapper[4558]: I0120 17:28:00.702651 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-65cd-account-create-update-dr789"] Jan 20 17:28:00 crc kubenswrapper[4558]: I0120 17:28:00.729744 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-db-create-2kmst"] Jan 20 17:28:00 crc kubenswrapper[4558]: I0120 17:28:00.731415 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-e949-account-create-update-qd847" Jan 20 17:28:00 crc kubenswrapper[4558]: I0120 17:28:00.737703 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7ce0109d-f162-4ce7-b957-34ba84e8e377-operator-scripts\") pod \"cinder-43e3-account-create-update-fdlrr\" (UID: \"7ce0109d-f162-4ce7-b957-34ba84e8e377\") " pod="openstack-kuttl-tests/cinder-43e3-account-create-update-fdlrr" Jan 20 17:28:00 crc kubenswrapper[4558]: I0120 17:28:00.737808 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8lfjc\" (UniqueName: \"kubernetes.io/projected/7ce0109d-f162-4ce7-b957-34ba84e8e377-kube-api-access-8lfjc\") pod \"cinder-43e3-account-create-update-fdlrr\" (UID: \"7ce0109d-f162-4ce7-b957-34ba84e8e377\") " pod="openstack-kuttl-tests/cinder-43e3-account-create-update-fdlrr" Jan 20 17:28:00 crc kubenswrapper[4558]: I0120 17:28:00.737841 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fa57dfc3-11b4-48af-a78c-00463e8894bf-combined-ca-bundle\") pod \"barbican-keystone-listener-8f4cc76c4-9qkpn\" (UID: \"fa57dfc3-11b4-48af-a78c-00463e8894bf\") " pod="openstack-kuttl-tests/barbican-keystone-listener-8f4cc76c4-9qkpn" Jan 20 17:28:00 crc kubenswrapper[4558]: I0120 17:28:00.737857 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bfb2b456-ae0e-42ca-a227-428a626f1e3e-operator-scripts\") pod \"neutron-db-create-2kmst\" (UID: \"bfb2b456-ae0e-42ca-a227-428a626f1e3e\") " pod="openstack-kuttl-tests/neutron-db-create-2kmst" Jan 20 17:28:00 crc kubenswrapper[4558]: I0120 17:28:00.737920 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-95lbq\" (UniqueName: \"kubernetes.io/projected/bfb2b456-ae0e-42ca-a227-428a626f1e3e-kube-api-access-95lbq\") pod \"neutron-db-create-2kmst\" (UID: \"bfb2b456-ae0e-42ca-a227-428a626f1e3e\") " pod="openstack-kuttl-tests/neutron-db-create-2kmst" Jan 20 17:28:00 crc kubenswrapper[4558]: E0120 17:28:00.738147 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/combined-ca-bundle: secret "combined-ca-bundle" not found Jan 20 17:28:00 crc kubenswrapper[4558]: E0120 17:28:00.738211 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/fa57dfc3-11b4-48af-a78c-00463e8894bf-combined-ca-bundle podName:fa57dfc3-11b4-48af-a78c-00463e8894bf nodeName:}" failed. No retries permitted until 2026-01-20 17:28:01.738196194 +0000 UTC m=+2775.498534161 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/fa57dfc3-11b4-48af-a78c-00463e8894bf-combined-ca-bundle") pod "barbican-keystone-listener-8f4cc76c4-9qkpn" (UID: "fa57dfc3-11b4-48af-a78c-00463e8894bf") : secret "combined-ca-bundle" not found Jan 20 17:28:00 crc kubenswrapper[4558]: I0120 17:28:00.750301 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-43e3-account-create-update-fdlrr"] Jan 20 17:28:00 crc kubenswrapper[4558]: I0120 17:28:00.761459 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/glance-65cd-account-create-update-dr789"] Jan 20 17:28:00 crc kubenswrapper[4558]: I0120 17:28:00.772151 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-b136-account-create-update-n9qdv" Jan 20 17:28:00 crc kubenswrapper[4558]: I0120 17:28:00.935090 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7ce0109d-f162-4ce7-b957-34ba84e8e377-operator-scripts\") pod \"cinder-43e3-account-create-update-fdlrr\" (UID: \"7ce0109d-f162-4ce7-b957-34ba84e8e377\") " pod="openstack-kuttl-tests/cinder-43e3-account-create-update-fdlrr" Jan 20 17:28:00 crc kubenswrapper[4558]: I0120 17:28:00.935721 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8lfjc\" (UniqueName: \"kubernetes.io/projected/7ce0109d-f162-4ce7-b957-34ba84e8e377-kube-api-access-8lfjc\") pod \"cinder-43e3-account-create-update-fdlrr\" (UID: \"7ce0109d-f162-4ce7-b957-34ba84e8e377\") " pod="openstack-kuttl-tests/cinder-43e3-account-create-update-fdlrr" Jan 20 17:28:00 crc kubenswrapper[4558]: I0120 17:28:00.935817 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bfb2b456-ae0e-42ca-a227-428a626f1e3e-operator-scripts\") pod \"neutron-db-create-2kmst\" (UID: \"bfb2b456-ae0e-42ca-a227-428a626f1e3e\") " pod="openstack-kuttl-tests/neutron-db-create-2kmst" Jan 20 17:28:00 crc kubenswrapper[4558]: I0120 17:28:00.935918 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-95lbq\" (UniqueName: \"kubernetes.io/projected/bfb2b456-ae0e-42ca-a227-428a626f1e3e-kube-api-access-95lbq\") pod \"neutron-db-create-2kmst\" (UID: \"bfb2b456-ae0e-42ca-a227-428a626f1e3e\") " pod="openstack-kuttl-tests/neutron-db-create-2kmst" Jan 20 17:28:00 crc kubenswrapper[4558]: E0120 17:28:00.936727 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Jan 20 17:28:00 crc kubenswrapper[4558]: E0120 17:28:00.936822 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/48cfc6e5-774d-4e7d-8103-f6a3260ea14c-config-data podName:48cfc6e5-774d-4e7d-8103-f6a3260ea14c nodeName:}" failed. No retries permitted until 2026-01-20 17:28:01.936799233 +0000 UTC m=+2775.697137200 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/48cfc6e5-774d-4e7d-8103-f6a3260ea14c-config-data") pod "rabbitmq-cell1-server-0" (UID: "48cfc6e5-774d-4e7d-8103-f6a3260ea14c") : configmap "rabbitmq-cell1-config-data" not found Jan 20 17:28:00 crc kubenswrapper[4558]: I0120 17:28:00.937619 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bfb2b456-ae0e-42ca-a227-428a626f1e3e-operator-scripts\") pod \"neutron-db-create-2kmst\" (UID: \"bfb2b456-ae0e-42ca-a227-428a626f1e3e\") " pod="openstack-kuttl-tests/neutron-db-create-2kmst" Jan 20 17:28:00 crc kubenswrapper[4558]: I0120 17:28:00.939841 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7ce0109d-f162-4ce7-b957-34ba84e8e377-operator-scripts\") pod \"cinder-43e3-account-create-update-fdlrr\" (UID: \"7ce0109d-f162-4ce7-b957-34ba84e8e377\") " pod="openstack-kuttl-tests/cinder-43e3-account-create-update-fdlrr" Jan 20 17:28:00 crc kubenswrapper[4558]: I0120 17:28:00.961972 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-b136-account-create-update-x288d"] Jan 20 17:28:00 crc kubenswrapper[4558]: I0120 17:28:00.990744 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8lfjc\" (UniqueName: \"kubernetes.io/projected/7ce0109d-f162-4ce7-b957-34ba84e8e377-kube-api-access-8lfjc\") pod \"cinder-43e3-account-create-update-fdlrr\" (UID: \"7ce0109d-f162-4ce7-b957-34ba84e8e377\") " pod="openstack-kuttl-tests/cinder-43e3-account-create-update-fdlrr" Jan 20 17:28:01 crc kubenswrapper[4558]: I0120 17:28:01.030625 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstackclient" Jan 20 17:28:01 crc kubenswrapper[4558]: I0120 17:28:01.059822 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-95lbq\" (UniqueName: \"kubernetes.io/projected/bfb2b456-ae0e-42ca-a227-428a626f1e3e-kube-api-access-95lbq\") pod \"neutron-db-create-2kmst\" (UID: \"bfb2b456-ae0e-42ca-a227-428a626f1e3e\") " pod="openstack-kuttl-tests/neutron-db-create-2kmst" Jan 20 17:28:01 crc kubenswrapper[4558]: I0120 17:28:01.070130 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/4d845835-77d4-4d8a-9fad-0286ad5291ae-openstack-config-secret\") pod \"4d845835-77d4-4d8a-9fad-0286ad5291ae\" (UID: \"4d845835-77d4-4d8a-9fad-0286ad5291ae\") " Jan 20 17:28:01 crc kubenswrapper[4558]: I0120 17:28:01.070366 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4d845835-77d4-4d8a-9fad-0286ad5291ae-combined-ca-bundle\") pod \"4d845835-77d4-4d8a-9fad-0286ad5291ae\" (UID: \"4d845835-77d4-4d8a-9fad-0286ad5291ae\") " Jan 20 17:28:01 crc kubenswrapper[4558]: I0120 17:28:01.070429 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4lw8x\" (UniqueName: \"kubernetes.io/projected/4d845835-77d4-4d8a-9fad-0286ad5291ae-kube-api-access-4lw8x\") pod \"4d845835-77d4-4d8a-9fad-0286ad5291ae\" (UID: \"4d845835-77d4-4d8a-9fad-0286ad5291ae\") " Jan 20 17:28:01 crc kubenswrapper[4558]: I0120 17:28:01.083144 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9b28c41-c054-49f3-87d2-a8c15e6124de-combined-ca-bundle\") pod \"barbican-worker-74d54f756c-g9zzk\" (UID: \"b9b28c41-c054-49f3-87d2-a8c15e6124de\") " pod="openstack-kuttl-tests/barbican-worker-74d54f756c-g9zzk" Jan 20 17:28:01 crc kubenswrapper[4558]: E0120 17:28:01.083467 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/combined-ca-bundle: secret "combined-ca-bundle" not found Jan 20 17:28:01 crc kubenswrapper[4558]: E0120 17:28:01.083536 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b9b28c41-c054-49f3-87d2-a8c15e6124de-combined-ca-bundle podName:b9b28c41-c054-49f3-87d2-a8c15e6124de nodeName:}" failed. No retries permitted until 2026-01-20 17:28:02.083512675 +0000 UTC m=+2775.843850642 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/b9b28c41-c054-49f3-87d2-a8c15e6124de-combined-ca-bundle") pod "barbican-worker-74d54f756c-g9zzk" (UID: "b9b28c41-c054-49f3-87d2-a8c15e6124de") : secret "combined-ca-bundle" not found Jan 20 17:28:01 crc kubenswrapper[4558]: I0120 17:28:01.111847 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4d845835-77d4-4d8a-9fad-0286ad5291ae-kube-api-access-4lw8x" (OuterVolumeSpecName: "kube-api-access-4lw8x") pod "4d845835-77d4-4d8a-9fad-0286ad5291ae" (UID: "4d845835-77d4-4d8a-9fad-0286ad5291ae"). InnerVolumeSpecName "kube-api-access-4lw8x". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:28:01 crc kubenswrapper[4558]: I0120 17:28:01.140066 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-43e3-account-create-update-fdlrr" Jan 20 17:28:01 crc kubenswrapper[4558]: I0120 17:28:01.165330 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4d845835-77d4-4d8a-9fad-0286ad5291ae-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4d845835-77d4-4d8a-9fad-0286ad5291ae" (UID: "4d845835-77d4-4d8a-9fad-0286ad5291ae"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:28:01 crc kubenswrapper[4558]: I0120 17:28:01.209043 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/4d845835-77d4-4d8a-9fad-0286ad5291ae-openstack-config\") pod \"4d845835-77d4-4d8a-9fad-0286ad5291ae\" (UID: \"4d845835-77d4-4d8a-9fad-0286ad5291ae\") " Jan 20 17:28:01 crc kubenswrapper[4558]: I0120 17:28:01.210089 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4d845835-77d4-4d8a-9fad-0286ad5291ae-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:01 crc kubenswrapper[4558]: I0120 17:28:01.210105 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4lw8x\" (UniqueName: \"kubernetes.io/projected/4d845835-77d4-4d8a-9fad-0286ad5291ae-kube-api-access-4lw8x\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:01 crc kubenswrapper[4558]: E0120 17:28:01.210226 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/combined-ca-bundle: secret "combined-ca-bundle" not found Jan 20 17:28:01 crc kubenswrapper[4558]: E0120 17:28:01.210274 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/cc4089c6-71ea-4503-b54c-18777fcc3c48-combined-ca-bundle podName:cc4089c6-71ea-4503-b54c-18777fcc3c48 nodeName:}" failed. No retries permitted until 2026-01-20 17:28:02.21026009 +0000 UTC m=+2775.970598057 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/cc4089c6-71ea-4503-b54c-18777fcc3c48-combined-ca-bundle") pod "nova-metadata-0" (UID: "cc4089c6-71ea-4503-b54c-18777fcc3c48") : secret "combined-ca-bundle" not found Jan 20 17:28:01 crc kubenswrapper[4558]: I0120 17:28:01.211715 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/neutron-b136-account-create-update-x288d"] Jan 20 17:28:01 crc kubenswrapper[4558]: I0120 17:28:01.238158 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-db-create-2kmst" Jan 20 17:28:01 crc kubenswrapper[4558]: I0120 17:28:01.276245 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4d845835-77d4-4d8a-9fad-0286ad5291ae-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "4d845835-77d4-4d8a-9fad-0286ad5291ae" (UID: "4d845835-77d4-4d8a-9fad-0286ad5291ae"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:28:01 crc kubenswrapper[4558]: I0120 17:28:01.300673 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-db-create-thhtc"] Jan 20 17:28:01 crc kubenswrapper[4558]: I0120 17:28:01.314262 4558 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/4d845835-77d4-4d8a-9fad-0286ad5291ae-openstack-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:01 crc kubenswrapper[4558]: E0120 17:28:01.314340 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Jan 20 17:28:01 crc kubenswrapper[4558]: E0120 17:28:01.314381 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/d692722b-f7fd-447c-8b7a-f56cff940d91-config-data podName:d692722b-f7fd-447c-8b7a-f56cff940d91 nodeName:}" failed. No retries permitted until 2026-01-20 17:28:03.314367931 +0000 UTC m=+2777.074705899 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/d692722b-f7fd-447c-8b7a-f56cff940d91-config-data") pod "rabbitmq-server-0" (UID: "d692722b-f7fd-447c-8b7a-f56cff940d91") : configmap "rabbitmq-config-data" not found Jan 20 17:28:01 crc kubenswrapper[4558]: I0120 17:28:01.318225 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4d845835-77d4-4d8a-9fad-0286ad5291ae-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "4d845835-77d4-4d8a-9fad-0286ad5291ae" (UID: "4d845835-77d4-4d8a-9fad-0286ad5291ae"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:28:01 crc kubenswrapper[4558]: I0120 17:28:01.324704 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/glance-db-create-thhtc"] Jan 20 17:28:01 crc kubenswrapper[4558]: I0120 17:28:01.401965 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-e949-account-create-update-swzjb"] Jan 20 17:28:01 crc kubenswrapper[4558]: I0120 17:28:01.413871 4558 scope.go:117] "RemoveContainer" containerID="e33ca27067cb626a71e4e70acb048cf75b1fdf4f714c880df6a27aa61155fa45" Jan 20 17:28:01 crc kubenswrapper[4558]: I0120 17:28:01.414060 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstackclient" Jan 20 17:28:01 crc kubenswrapper[4558]: I0120 17:28:01.416619 4558 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/4d845835-77d4-4d8a-9fad-0286ad5291ae-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:01 crc kubenswrapper[4558]: I0120 17:28:01.444397 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovn-northd-0"] Jan 20 17:28:01 crc kubenswrapper[4558]: I0120 17:28:01.444906 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ovn-northd-0" podUID="8a111df9-8578-40bb-a672-b5d53305c873" containerName="ovn-northd" containerID="cri-o://c2fa33c63a4c25f68e800c109ef61e283f605e39ebdcd676406536456186262d" gracePeriod=30 Jan 20 17:28:01 crc kubenswrapper[4558]: I0120 17:28:01.445647 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ovn-northd-0" podUID="8a111df9-8578-40bb-a672-b5d53305c873" containerName="openstack-network-exporter" containerID="cri-o://d3ba0972279d7175889b46187fb32615d720f05a63a7bbfedd1ca9dc2eff5075" gracePeriod=30 Jan 20 17:28:01 crc kubenswrapper[4558]: I0120 17:28:01.483356 4558 generic.go:334] "Generic (PLEG): container finished" podID="f6a0777b-d04f-451b-9e3d-3f9ab0e19df8" containerID="516792e38f223a8dda033868458240450dfed4fe467c3a9a1c42b3301734bf44" exitCode=143 Jan 20 17:28:01 crc kubenswrapper[4558]: I0120 17:28:01.484821 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"f6a0777b-d04f-451b-9e3d-3f9ab0e19df8","Type":"ContainerDied","Data":"516792e38f223a8dda033868458240450dfed4fe467c3a9a1c42b3301734bf44"} Jan 20 17:28:01 crc kubenswrapper[4558]: I0120 17:28:01.497227 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/barbican-e949-account-create-update-swzjb"] Jan 20 17:28:01 crc kubenswrapper[4558]: I0120 17:28:01.505265 4558 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="openstack-kuttl-tests/nova-metadata-0" secret="" err="secret \"nova-nova-dockercfg-k8zdz\" not found" Jan 20 17:28:01 crc kubenswrapper[4558]: I0120 17:28:01.506927 4558 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="openstack-kuttl-tests/cinder-scheduler-0" secret="" err="secret \"cinder-cinder-dockercfg-msjfn\" not found" Jan 20 17:28:01 crc kubenswrapper[4558]: I0120 17:28:01.507651 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"c1afb831-f3e4-4356-ab86-713eb0beca39","Type":"ContainerStarted","Data":"4073b49237446b10090528b017b317faf894679a9662161b1fb6fedebcfc14ed"} Jan 20 17:28:01 crc kubenswrapper[4558]: I0120 17:28:01.507785 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/openstackclient" podUID="e383e708-a471-4904-bfc7-eead6a5c76dc" containerName="openstackclient" containerID="cri-o://fc05c1e78ad7a92e2b78d674adb5592b41e556323420c6070785f62676598f12" gracePeriod=2 Jan 20 17:28:01 crc kubenswrapper[4558]: I0120 17:28:01.512438 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/barbican-db-create-pxxqs"] Jan 20 17:28:01 crc kubenswrapper[4558]: I0120 17:28:01.523461 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-db-create-pxxqs" Jan 20 17:28:01 crc kubenswrapper[4558]: I0120 17:28:01.523732 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-db-create-sz4kq"] Jan 20 17:28:01 crc kubenswrapper[4558]: I0120 17:28:01.530184 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/neutron-db-create-sz4kq"] Jan 20 17:28:01 crc kubenswrapper[4558]: I0120 17:28:01.542343 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-api-a772-account-create-update-27cpc"] Jan 20 17:28:01 crc kubenswrapper[4558]: I0120 17:28:01.543827 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-a772-account-create-update-27cpc" Jan 20 17:28:01 crc kubenswrapper[4558]: I0120 17:28:01.545103 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-api-db-secret" Jan 20 17:28:01 crc kubenswrapper[4558]: I0120 17:28:01.546064 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-cell-mapping-4kxm5" Jan 20 17:28:01 crc kubenswrapper[4558]: I0120 17:28:01.549542 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-db-create-pxxqs"] Jan 20 17:28:01 crc kubenswrapper[4558]: I0120 17:28:01.556982 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack-kuttl-tests/ovn-northd-0" podUID="8a111df9-8578-40bb-a672-b5d53305c873" containerName="ovn-northd" probeResult="failure" output="" Jan 20 17:28:01 crc kubenswrapper[4558]: E0120 17:28:01.594135 4558 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 20 17:28:01 crc kubenswrapper[4558]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[/bin/sh -c #!/bin/bash Jan 20 17:28:01 crc kubenswrapper[4558]: Jan 20 17:28:01 crc kubenswrapper[4558]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 20 17:28:01 crc kubenswrapper[4558]: Jan 20 17:28:01 crc kubenswrapper[4558]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 20 17:28:01 crc kubenswrapper[4558]: Jan 20 17:28:01 crc kubenswrapper[4558]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 20 17:28:01 crc kubenswrapper[4558]: Jan 20 17:28:01 crc kubenswrapper[4558]: if [ -n "placement" ]; then Jan 20 17:28:01 crc kubenswrapper[4558]: GRANT_DATABASE="placement" Jan 20 17:28:01 crc kubenswrapper[4558]: else Jan 20 17:28:01 crc kubenswrapper[4558]: GRANT_DATABASE="*" Jan 20 17:28:01 crc kubenswrapper[4558]: fi Jan 20 17:28:01 crc kubenswrapper[4558]: Jan 20 17:28:01 crc kubenswrapper[4558]: # going for maximum compatibility here: Jan 20 17:28:01 crc kubenswrapper[4558]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 20 17:28:01 crc kubenswrapper[4558]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 20 17:28:01 crc kubenswrapper[4558]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 20 17:28:01 crc kubenswrapper[4558]: # support updates Jan 20 17:28:01 crc kubenswrapper[4558]: Jan 20 17:28:01 crc kubenswrapper[4558]: $MYSQL_CMD < logger="UnhandledError" Jan 20 17:28:01 crc kubenswrapper[4558]: E0120 17:28:01.595420 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"placement-db-secret\\\" not found\"" pod="openstack-kuttl-tests/placement-7ab2-account-create-update-c5qqm" podUID="b201974e-8bb6-412a-95d5-cce7a95e4528" Jan 20 17:28:01 crc kubenswrapper[4558]: I0120 17:28:01.624427 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aeb478ae-4d88-4f64-be68-ccebfe589ff1-scripts\") pod \"aeb478ae-4d88-4f64-be68-ccebfe589ff1\" (UID: \"aeb478ae-4d88-4f64-be68-ccebfe589ff1\") " Jan 20 17:28:01 crc kubenswrapper[4558]: I0120 17:28:01.624502 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aeb478ae-4d88-4f64-be68-ccebfe589ff1-combined-ca-bundle\") pod \"aeb478ae-4d88-4f64-be68-ccebfe589ff1\" (UID: \"aeb478ae-4d88-4f64-be68-ccebfe589ff1\") " Jan 20 17:28:01 crc kubenswrapper[4558]: I0120 17:28:01.624632 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c8667\" (UniqueName: \"kubernetes.io/projected/aeb478ae-4d88-4f64-be68-ccebfe589ff1-kube-api-access-c8667\") pod \"aeb478ae-4d88-4f64-be68-ccebfe589ff1\" (UID: \"aeb478ae-4d88-4f64-be68-ccebfe589ff1\") " Jan 20 17:28:01 crc kubenswrapper[4558]: I0120 17:28:01.624691 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aeb478ae-4d88-4f64-be68-ccebfe589ff1-config-data\") pod \"aeb478ae-4d88-4f64-be68-ccebfe589ff1\" (UID: \"aeb478ae-4d88-4f64-be68-ccebfe589ff1\") " Jan 20 17:28:01 crc kubenswrapper[4558]: I0120 17:28:01.642370 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e0d8717e-1693-4222-aebb-024d291cb0d5-operator-scripts\") pod \"nova-api-a772-account-create-update-27cpc\" (UID: \"e0d8717e-1693-4222-aebb-024d291cb0d5\") " pod="openstack-kuttl-tests/nova-api-a772-account-create-update-27cpc" Jan 20 17:28:01 crc kubenswrapper[4558]: I0120 17:28:01.642578 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mxxtr\" (UniqueName: \"kubernetes.io/projected/e0d8717e-1693-4222-aebb-024d291cb0d5-kube-api-access-mxxtr\") pod \"nova-api-a772-account-create-update-27cpc\" (UID: \"e0d8717e-1693-4222-aebb-024d291cb0d5\") " pod="openstack-kuttl-tests/nova-api-a772-account-create-update-27cpc" Jan 20 17:28:01 crc kubenswrapper[4558]: I0120 17:28:01.642800 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4sp5s\" (UniqueName: \"kubernetes.io/projected/ed3c56f1-4bbb-4590-8b19-a0de467537ad-kube-api-access-4sp5s\") pod \"barbican-db-create-pxxqs\" (UID: \"ed3c56f1-4bbb-4590-8b19-a0de467537ad\") " pod="openstack-kuttl-tests/barbican-db-create-pxxqs" Jan 20 17:28:01 crc kubenswrapper[4558]: I0120 17:28:01.642879 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ed3c56f1-4bbb-4590-8b19-a0de467537ad-operator-scripts\") pod \"barbican-db-create-pxxqs\" (UID: \"ed3c56f1-4bbb-4590-8b19-a0de467537ad\") " pod="openstack-kuttl-tests/barbican-db-create-pxxqs" Jan 20 17:28:01 crc kubenswrapper[4558]: E0120 17:28:01.643325 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cinder-scripts: secret "cinder-scripts" not found Jan 20 17:28:01 crc kubenswrapper[4558]: E0120 17:28:01.643352 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cinder-scheduler-config-data: secret "cinder-scheduler-config-data" not found Jan 20 17:28:01 crc kubenswrapper[4558]: E0120 17:28:01.643378 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c1afb831-f3e4-4356-ab86-713eb0beca39-scripts podName:c1afb831-f3e4-4356-ab86-713eb0beca39 nodeName:}" failed. No retries permitted until 2026-01-20 17:28:02.143361074 +0000 UTC m=+2775.903699041 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "scripts" (UniqueName: "kubernetes.io/secret/c1afb831-f3e4-4356-ab86-713eb0beca39-scripts") pod "cinder-scheduler-0" (UID: "c1afb831-f3e4-4356-ab86-713eb0beca39") : secret "cinder-scripts" not found Jan 20 17:28:01 crc kubenswrapper[4558]: E0120 17:28:01.643998 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c1afb831-f3e4-4356-ab86-713eb0beca39-config-data-custom podName:c1afb831-f3e4-4356-ab86-713eb0beca39 nodeName:}" failed. No retries permitted until 2026-01-20 17:28:02.143405968 +0000 UTC m=+2775.903743935 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-data-custom" (UniqueName: "kubernetes.io/secret/c1afb831-f3e4-4356-ab86-713eb0beca39-config-data-custom") pod "cinder-scheduler-0" (UID: "c1afb831-f3e4-4356-ab86-713eb0beca39") : secret "cinder-scheduler-config-data" not found Jan 20 17:28:01 crc kubenswrapper[4558]: E0120 17:28:01.644329 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cinder-config-data: secret "cinder-config-data" not found Jan 20 17:28:01 crc kubenswrapper[4558]: E0120 17:28:01.644387 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c1afb831-f3e4-4356-ab86-713eb0beca39-config-data podName:c1afb831-f3e4-4356-ab86-713eb0beca39 nodeName:}" failed. No retries permitted until 2026-01-20 17:28:02.144371604 +0000 UTC m=+2775.904709571 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/secret/c1afb831-f3e4-4356-ab86-713eb0beca39-config-data") pod "cinder-scheduler-0" (UID: "c1afb831-f3e4-4356-ab86-713eb0beca39") : secret "cinder-config-data" not found Jan 20 17:28:01 crc kubenswrapper[4558]: E0120 17:28:01.644431 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/combined-ca-bundle: secret "combined-ca-bundle" not found Jan 20 17:28:01 crc kubenswrapper[4558]: E0120 17:28:01.644455 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c1afb831-f3e4-4356-ab86-713eb0beca39-combined-ca-bundle podName:c1afb831-f3e4-4356-ab86-713eb0beca39 nodeName:}" failed. No retries permitted until 2026-01-20 17:28:02.144447366 +0000 UTC m=+2775.904785323 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/c1afb831-f3e4-4356-ab86-713eb0beca39-combined-ca-bundle") pod "cinder-scheduler-0" (UID: "c1afb831-f3e4-4356-ab86-713eb0beca39") : secret "combined-ca-bundle" not found Jan 20 17:28:01 crc kubenswrapper[4558]: E0120 17:28:01.644702 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/nova-metadata-config-data: secret "nova-metadata-config-data" not found Jan 20 17:28:01 crc kubenswrapper[4558]: E0120 17:28:01.644734 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/cc4089c6-71ea-4503-b54c-18777fcc3c48-config-data podName:cc4089c6-71ea-4503-b54c-18777fcc3c48 nodeName:}" failed. No retries permitted until 2026-01-20 17:28:02.144723836 +0000 UTC m=+2775.905061803 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/secret/cc4089c6-71ea-4503-b54c-18777fcc3c48-config-data") pod "nova-metadata-0" (UID: "cc4089c6-71ea-4503-b54c-18777fcc3c48") : secret "nova-metadata-config-data" not found Jan 20 17:28:01 crc kubenswrapper[4558]: I0120 17:28:01.654366 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/placement-db-sync-rmm2x"] Jan 20 17:28:01 crc kubenswrapper[4558]: I0120 17:28:01.661605 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aeb478ae-4d88-4f64-be68-ccebfe589ff1-scripts" (OuterVolumeSpecName: "scripts") pod "aeb478ae-4d88-4f64-be68-ccebfe589ff1" (UID: "aeb478ae-4d88-4f64-be68-ccebfe589ff1"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:28:01 crc kubenswrapper[4558]: I0120 17:28:01.662541 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/placement-db-sync-rmm2x"] Jan 20 17:28:01 crc kubenswrapper[4558]: I0120 17:28:01.675093 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aeb478ae-4d88-4f64-be68-ccebfe589ff1-kube-api-access-c8667" (OuterVolumeSpecName: "kube-api-access-c8667") pod "aeb478ae-4d88-4f64-be68-ccebfe589ff1" (UID: "aeb478ae-4d88-4f64-be68-ccebfe589ff1"). InnerVolumeSpecName "kube-api-access-c8667". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:28:01 crc kubenswrapper[4558]: I0120 17:28:01.685342 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-a772-account-create-update-27cpc"] Jan 20 17:28:01 crc kubenswrapper[4558]: I0120 17:28:01.700758 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-db-create-4t4nh"] Jan 20 17:28:01 crc kubenswrapper[4558]: I0120 17:28:01.721124 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/barbican-db-create-4t4nh"] Jan 20 17:28:01 crc kubenswrapper[4558]: I0120 17:28:01.730007 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-43e3-account-create-update-l4gms"] Jan 20 17:28:01 crc kubenswrapper[4558]: I0120 17:28:01.736272 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aeb478ae-4d88-4f64-be68-ccebfe589ff1-config-data" (OuterVolumeSpecName: "config-data") pod "aeb478ae-4d88-4f64-be68-ccebfe589ff1" (UID: "aeb478ae-4d88-4f64-be68-ccebfe589ff1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:28:01 crc kubenswrapper[4558]: I0120 17:28:01.744894 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e0d8717e-1693-4222-aebb-024d291cb0d5-operator-scripts\") pod \"nova-api-a772-account-create-update-27cpc\" (UID: \"e0d8717e-1693-4222-aebb-024d291cb0d5\") " pod="openstack-kuttl-tests/nova-api-a772-account-create-update-27cpc" Jan 20 17:28:01 crc kubenswrapper[4558]: I0120 17:28:01.744981 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fa57dfc3-11b4-48af-a78c-00463e8894bf-combined-ca-bundle\") pod \"barbican-keystone-listener-8f4cc76c4-9qkpn\" (UID: \"fa57dfc3-11b4-48af-a78c-00463e8894bf\") " pod="openstack-kuttl-tests/barbican-keystone-listener-8f4cc76c4-9qkpn" Jan 20 17:28:01 crc kubenswrapper[4558]: I0120 17:28:01.745015 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mxxtr\" (UniqueName: \"kubernetes.io/projected/e0d8717e-1693-4222-aebb-024d291cb0d5-kube-api-access-mxxtr\") pod \"nova-api-a772-account-create-update-27cpc\" (UID: \"e0d8717e-1693-4222-aebb-024d291cb0d5\") " pod="openstack-kuttl-tests/nova-api-a772-account-create-update-27cpc" Jan 20 17:28:01 crc kubenswrapper[4558]: I0120 17:28:01.745097 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4sp5s\" (UniqueName: \"kubernetes.io/projected/ed3c56f1-4bbb-4590-8b19-a0de467537ad-kube-api-access-4sp5s\") pod \"barbican-db-create-pxxqs\" (UID: \"ed3c56f1-4bbb-4590-8b19-a0de467537ad\") " pod="openstack-kuttl-tests/barbican-db-create-pxxqs" Jan 20 17:28:01 crc kubenswrapper[4558]: I0120 17:28:01.745142 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ed3c56f1-4bbb-4590-8b19-a0de467537ad-operator-scripts\") pod \"barbican-db-create-pxxqs\" (UID: \"ed3c56f1-4bbb-4590-8b19-a0de467537ad\") " pod="openstack-kuttl-tests/barbican-db-create-pxxqs" Jan 20 17:28:01 crc kubenswrapper[4558]: E0120 17:28:01.747981 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/combined-ca-bundle: secret "combined-ca-bundle" not found Jan 20 17:28:01 crc kubenswrapper[4558]: E0120 17:28:01.748041 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/fa57dfc3-11b4-48af-a78c-00463e8894bf-combined-ca-bundle podName:fa57dfc3-11b4-48af-a78c-00463e8894bf nodeName:}" failed. No retries permitted until 2026-01-20 17:28:03.748022766 +0000 UTC m=+2777.508360733 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/fa57dfc3-11b4-48af-a78c-00463e8894bf-combined-ca-bundle") pod "barbican-keystone-listener-8f4cc76c4-9qkpn" (UID: "fa57dfc3-11b4-48af-a78c-00463e8894bf") : secret "combined-ca-bundle" not found Jan 20 17:28:01 crc kubenswrapper[4558]: I0120 17:28:01.748552 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e0d8717e-1693-4222-aebb-024d291cb0d5-operator-scripts\") pod \"nova-api-a772-account-create-update-27cpc\" (UID: \"e0d8717e-1693-4222-aebb-024d291cb0d5\") " pod="openstack-kuttl-tests/nova-api-a772-account-create-update-27cpc" Jan 20 17:28:01 crc kubenswrapper[4558]: I0120 17:28:01.748888 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/cinder-43e3-account-create-update-l4gms"] Jan 20 17:28:01 crc kubenswrapper[4558]: I0120 17:28:01.763420 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aeb478ae-4d88-4f64-be68-ccebfe589ff1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "aeb478ae-4d88-4f64-be68-ccebfe589ff1" (UID: "aeb478ae-4d88-4f64-be68-ccebfe589ff1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:28:01 crc kubenswrapper[4558]: I0120 17:28:01.771412 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell0-f949-account-create-update-v2zng"] Jan 20 17:28:01 crc kubenswrapper[4558]: E0120 17:28:01.772004 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aeb478ae-4d88-4f64-be68-ccebfe589ff1" containerName="nova-manage" Jan 20 17:28:01 crc kubenswrapper[4558]: I0120 17:28:01.772021 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="aeb478ae-4d88-4f64-be68-ccebfe589ff1" containerName="nova-manage" Jan 20 17:28:01 crc kubenswrapper[4558]: I0120 17:28:01.772237 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="aeb478ae-4d88-4f64-be68-ccebfe589ff1" containerName="nova-manage" Jan 20 17:28:01 crc kubenswrapper[4558]: I0120 17:28:01.775366 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-f949-account-create-update-v2zng" Jan 20 17:28:01 crc kubenswrapper[4558]: I0120 17:28:01.777982 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell0-db-secret" Jan 20 17:28:01 crc kubenswrapper[4558]: I0120 17:28:01.779231 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aeb478ae-4d88-4f64-be68-ccebfe589ff1-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:01 crc kubenswrapper[4558]: I0120 17:28:01.779277 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c8667\" (UniqueName: \"kubernetes.io/projected/aeb478ae-4d88-4f64-be68-ccebfe589ff1-kube-api-access-c8667\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:01 crc kubenswrapper[4558]: I0120 17:28:01.779292 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aeb478ae-4d88-4f64-be68-ccebfe589ff1-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:01 crc kubenswrapper[4558]: I0120 17:28:01.779783 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ed3c56f1-4bbb-4590-8b19-a0de467537ad-operator-scripts\") pod \"barbican-db-create-pxxqs\" (UID: \"ed3c56f1-4bbb-4590-8b19-a0de467537ad\") " pod="openstack-kuttl-tests/barbican-db-create-pxxqs" Jan 20 17:28:01 crc kubenswrapper[4558]: I0120 17:28:01.792049 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-f949-account-create-update-v2zng"] Jan 20 17:28:01 crc kubenswrapper[4558]: I0120 17:28:01.804855 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mxxtr\" (UniqueName: \"kubernetes.io/projected/e0d8717e-1693-4222-aebb-024d291cb0d5-kube-api-access-mxxtr\") pod \"nova-api-a772-account-create-update-27cpc\" (UID: \"e0d8717e-1693-4222-aebb-024d291cb0d5\") " pod="openstack-kuttl-tests/nova-api-a772-account-create-update-27cpc" Jan 20 17:28:01 crc kubenswrapper[4558]: I0120 17:28:01.805939 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4sp5s\" (UniqueName: \"kubernetes.io/projected/ed3c56f1-4bbb-4590-8b19-a0de467537ad-kube-api-access-4sp5s\") pod \"barbican-db-create-pxxqs\" (UID: \"ed3c56f1-4bbb-4590-8b19-a0de467537ad\") " pod="openstack-kuttl-tests/barbican-db-create-pxxqs" Jan 20 17:28:01 crc kubenswrapper[4558]: I0120 17:28:01.806010 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-nb-0"] Jan 20 17:28:01 crc kubenswrapper[4558]: I0120 17:28:01.806781 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ovsdbserver-nb-0" podUID="a2068ccb-0c0d-4b32-9063-082a4c395070" containerName="openstack-network-exporter" containerID="cri-o://0a200f6996ecf5951d17224c62d14bcea68ab49867b472e280d5c47ecfd50ad7" gracePeriod=300 Jan 20 17:28:01 crc kubenswrapper[4558]: I0120 17:28:01.816999 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-db-create-gh87k"] Jan 20 17:28:01 crc kubenswrapper[4558]: I0120 17:28:01.824610 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-a772-account-create-update-sfz2c"] Jan 20 17:28:01 crc kubenswrapper[4558]: I0120 17:28:01.833449 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/cinder-db-create-gh87k"] Jan 20 17:28:01 crc kubenswrapper[4558]: I0120 17:28:01.838799 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-api-a772-account-create-update-sfz2c"] Jan 20 17:28:01 crc kubenswrapper[4558]: I0120 17:28:01.849229 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell1-af2b-account-create-update-x6tw5"] Jan 20 17:28:01 crc kubenswrapper[4558]: I0120 17:28:01.850639 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-af2b-account-create-update-x6tw5" Jan 20 17:28:01 crc kubenswrapper[4558]: I0120 17:28:01.853858 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell1-db-secret" Jan 20 17:28:01 crc kubenswrapper[4558]: I0120 17:28:01.856558 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-sb-0"] Jan 20 17:28:01 crc kubenswrapper[4558]: I0120 17:28:01.857284 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ovsdbserver-sb-0" podUID="7b6a0803-befe-4426-9e2a-a04ac12f2d7c" containerName="openstack-network-exporter" containerID="cri-o://e14a2ace059c2e1c3be33ab3bdf7264642df45d603b1ef56bea3fa59cbb4787b" gracePeriod=300 Jan 20 17:28:01 crc kubenswrapper[4558]: I0120 17:28:01.882991 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3ac6a4e2-150b-4137-b547-33cbdd1137b7-operator-scripts\") pod \"nova-cell0-f949-account-create-update-v2zng\" (UID: \"3ac6a4e2-150b-4137-b547-33cbdd1137b7\") " pod="openstack-kuttl-tests/nova-cell0-f949-account-create-update-v2zng" Jan 20 17:28:01 crc kubenswrapper[4558]: I0120 17:28:01.883129 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rf6ql\" (UniqueName: \"kubernetes.io/projected/3ac6a4e2-150b-4137-b547-33cbdd1137b7-kube-api-access-rf6ql\") pod \"nova-cell0-f949-account-create-update-v2zng\" (UID: \"3ac6a4e2-150b-4137-b547-33cbdd1137b7\") " pod="openstack-kuttl-tests/nova-cell0-f949-account-create-update-v2zng" Jan 20 17:28:01 crc kubenswrapper[4558]: I0120 17:28:01.883201 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aeb478ae-4d88-4f64-be68-ccebfe589ff1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:01 crc kubenswrapper[4558]: I0120 17:28:01.891892 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-af2b-account-create-update-x6tw5"] Jan 20 17:28:01 crc kubenswrapper[4558]: I0120 17:28:01.929287 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell0-f949-account-create-update-4ndsp"] Jan 20 17:28:01 crc kubenswrapper[4558]: I0120 17:28:01.935825 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-af2b-account-create-update-ckc4c"] Jan 20 17:28:01 crc kubenswrapper[4558]: W0120 17:28:01.946326 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddf28560c_92e6_4003_a08f_b8691fa43300.slice/crio-bb6338053e49720b1e74bb1a48e7513dde37dd08f6b58323abe135e98b714799 WatchSource:0}: Error finding container bb6338053e49720b1e74bb1a48e7513dde37dd08f6b58323abe135e98b714799: Status 404 returned error can't find the container with id bb6338053e49720b1e74bb1a48e7513dde37dd08f6b58323abe135e98b714799 Jan 20 17:28:01 crc kubenswrapper[4558]: I0120 17:28:01.951760 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ovsdbserver-sb-0" podUID="7b6a0803-befe-4426-9e2a-a04ac12f2d7c" containerName="ovsdbserver-sb" containerID="cri-o://dd2c0dcba96a9cb823d300977fee811cbbd5c8ce5dca2787726542941426f5c2" gracePeriod=300 Jan 20 17:28:01 crc kubenswrapper[4558]: I0120 17:28:01.970949 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/swift-proxy-5f7fb7b-s2dxn"] Jan 20 17:28:01 crc kubenswrapper[4558]: I0120 17:28:01.985593 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rf6ql\" (UniqueName: \"kubernetes.io/projected/3ac6a4e2-150b-4137-b547-33cbdd1137b7-kube-api-access-rf6ql\") pod \"nova-cell0-f949-account-create-update-v2zng\" (UID: \"3ac6a4e2-150b-4137-b547-33cbdd1137b7\") " pod="openstack-kuttl-tests/nova-cell0-f949-account-create-update-v2zng" Jan 20 17:28:01 crc kubenswrapper[4558]: E0120 17:28:01.985725 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Jan 20 17:28:01 crc kubenswrapper[4558]: E0120 17:28:01.985799 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/48cfc6e5-774d-4e7d-8103-f6a3260ea14c-config-data podName:48cfc6e5-774d-4e7d-8103-f6a3260ea14c nodeName:}" failed. No retries permitted until 2026-01-20 17:28:03.985765712 +0000 UTC m=+2777.746103678 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/48cfc6e5-774d-4e7d-8103-f6a3260ea14c-config-data") pod "rabbitmq-cell1-server-0" (UID: "48cfc6e5-774d-4e7d-8103-f6a3260ea14c") : configmap "rabbitmq-cell1-config-data" not found Jan 20 17:28:01 crc kubenswrapper[4558]: I0120 17:28:01.986009 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b69d0136-9748-45bf-a9ed-d23e0180f1b4-operator-scripts\") pod \"nova-cell1-af2b-account-create-update-x6tw5\" (UID: \"b69d0136-9748-45bf-a9ed-d23e0180f1b4\") " pod="openstack-kuttl-tests/nova-cell1-af2b-account-create-update-x6tw5" Jan 20 17:28:01 crc kubenswrapper[4558]: I0120 17:28:01.986093 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4j88m\" (UniqueName: \"kubernetes.io/projected/b69d0136-9748-45bf-a9ed-d23e0180f1b4-kube-api-access-4j88m\") pod \"nova-cell1-af2b-account-create-update-x6tw5\" (UID: \"b69d0136-9748-45bf-a9ed-d23e0180f1b4\") " pod="openstack-kuttl-tests/nova-cell1-af2b-account-create-update-x6tw5" Jan 20 17:28:01 crc kubenswrapper[4558]: I0120 17:28:01.986151 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3ac6a4e2-150b-4137-b547-33cbdd1137b7-operator-scripts\") pod \"nova-cell0-f949-account-create-update-v2zng\" (UID: \"3ac6a4e2-150b-4137-b547-33cbdd1137b7\") " pod="openstack-kuttl-tests/nova-cell0-f949-account-create-update-v2zng" Jan 20 17:28:01 crc kubenswrapper[4558]: I0120 17:28:01.986960 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3ac6a4e2-150b-4137-b547-33cbdd1137b7-operator-scripts\") pod \"nova-cell0-f949-account-create-update-v2zng\" (UID: \"3ac6a4e2-150b-4137-b547-33cbdd1137b7\") " pod="openstack-kuttl-tests/nova-cell0-f949-account-create-update-v2zng" Jan 20 17:28:01 crc kubenswrapper[4558]: I0120 17:28:01.995275 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell1-af2b-account-create-update-ckc4c"] Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.011101 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-db-create-pxxqs" Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.011464 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rf6ql\" (UniqueName: \"kubernetes.io/projected/3ac6a4e2-150b-4137-b547-33cbdd1137b7-kube-api-access-rf6ql\") pod \"nova-cell0-f949-account-create-update-v2zng\" (UID: \"3ac6a4e2-150b-4137-b547-33cbdd1137b7\") " pod="openstack-kuttl-tests/nova-cell0-f949-account-create-update-v2zng" Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.014532 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ovsdbserver-nb-0" podUID="a2068ccb-0c0d-4b32-9063-082a4c395070" containerName="ovsdbserver-nb" containerID="cri-o://3cb92b2ab723d670d8405be0316a236c7b450aa82c756082192092f8f2f20fad" gracePeriod=300 Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.030540 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-a772-account-create-update-27cpc" Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.036197 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-db-sync-gn4wj"] Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.089512 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/glance-db-sync-gn4wj"] Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.091064 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b69d0136-9748-45bf-a9ed-d23e0180f1b4-operator-scripts\") pod \"nova-cell1-af2b-account-create-update-x6tw5\" (UID: \"b69d0136-9748-45bf-a9ed-d23e0180f1b4\") " pod="openstack-kuttl-tests/nova-cell1-af2b-account-create-update-x6tw5" Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.091117 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9b28c41-c054-49f3-87d2-a8c15e6124de-combined-ca-bundle\") pod \"barbican-worker-74d54f756c-g9zzk\" (UID: \"b9b28c41-c054-49f3-87d2-a8c15e6124de\") " pod="openstack-kuttl-tests/barbican-worker-74d54f756c-g9zzk" Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.091145 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4j88m\" (UniqueName: \"kubernetes.io/projected/b69d0136-9748-45bf-a9ed-d23e0180f1b4-kube-api-access-4j88m\") pod \"nova-cell1-af2b-account-create-update-x6tw5\" (UID: \"b69d0136-9748-45bf-a9ed-d23e0180f1b4\") " pod="openstack-kuttl-tests/nova-cell1-af2b-account-create-update-x6tw5" Jan 20 17:28:02 crc kubenswrapper[4558]: E0120 17:28:02.091232 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/openstack-cell1-scripts: configmap "openstack-cell1-scripts" not found Jan 20 17:28:02 crc kubenswrapper[4558]: E0120 17:28:02.091301 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/b69d0136-9748-45bf-a9ed-d23e0180f1b4-operator-scripts podName:b69d0136-9748-45bf-a9ed-d23e0180f1b4 nodeName:}" failed. No retries permitted until 2026-01-20 17:28:02.591280087 +0000 UTC m=+2776.351618053 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/b69d0136-9748-45bf-a9ed-d23e0180f1b4-operator-scripts") pod "nova-cell1-af2b-account-create-update-x6tw5" (UID: "b69d0136-9748-45bf-a9ed-d23e0180f1b4") : configmap "openstack-cell1-scripts" not found Jan 20 17:28:02 crc kubenswrapper[4558]: E0120 17:28:02.091465 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/combined-ca-bundle: secret "combined-ca-bundle" not found Jan 20 17:28:02 crc kubenswrapper[4558]: E0120 17:28:02.091513 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b9b28c41-c054-49f3-87d2-a8c15e6124de-combined-ca-bundle podName:b9b28c41-c054-49f3-87d2-a8c15e6124de nodeName:}" failed. No retries permitted until 2026-01-20 17:28:04.091497926 +0000 UTC m=+2777.851835893 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/b9b28c41-c054-49f3-87d2-a8c15e6124de-combined-ca-bundle") pod "barbican-worker-74d54f756c-g9zzk" (UID: "b9b28c41-c054-49f3-87d2-a8c15e6124de") : secret "combined-ca-bundle" not found Jan 20 17:28:02 crc kubenswrapper[4558]: E0120 17:28:02.098103 4558 projected.go:194] Error preparing data for projected volume kube-api-access-4j88m for pod openstack-kuttl-tests/nova-cell1-af2b-account-create-update-x6tw5: failed to fetch token: serviceaccounts "galera-openstack-cell1" not found Jan 20 17:28:02 crc kubenswrapper[4558]: E0120 17:28:02.098138 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/b69d0136-9748-45bf-a9ed-d23e0180f1b4-kube-api-access-4j88m podName:b69d0136-9748-45bf-a9ed-d23e0180f1b4 nodeName:}" failed. No retries permitted until 2026-01-20 17:28:02.598128332 +0000 UTC m=+2776.358466299 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-4j88m" (UniqueName: "kubernetes.io/projected/b69d0136-9748-45bf-a9ed-d23e0180f1b4-kube-api-access-4j88m") pod "nova-cell1-af2b-account-create-update-x6tw5" (UID: "b69d0136-9748-45bf-a9ed-d23e0180f1b4") : failed to fetch token: serviceaccounts "galera-openstack-cell1" not found Jan 20 17:28:02 crc kubenswrapper[4558]: E0120 17:28:02.106095 4558 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 20 17:28:02 crc kubenswrapper[4558]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[/bin/sh -c #!/bin/bash Jan 20 17:28:02 crc kubenswrapper[4558]: Jan 20 17:28:02 crc kubenswrapper[4558]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 20 17:28:02 crc kubenswrapper[4558]: Jan 20 17:28:02 crc kubenswrapper[4558]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 20 17:28:02 crc kubenswrapper[4558]: Jan 20 17:28:02 crc kubenswrapper[4558]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 20 17:28:02 crc kubenswrapper[4558]: Jan 20 17:28:02 crc kubenswrapper[4558]: if [ -n "glance" ]; then Jan 20 17:28:02 crc kubenswrapper[4558]: GRANT_DATABASE="glance" Jan 20 17:28:02 crc kubenswrapper[4558]: else Jan 20 17:28:02 crc kubenswrapper[4558]: GRANT_DATABASE="*" Jan 20 17:28:02 crc kubenswrapper[4558]: fi Jan 20 17:28:02 crc kubenswrapper[4558]: Jan 20 17:28:02 crc kubenswrapper[4558]: # going for maximum compatibility here: Jan 20 17:28:02 crc kubenswrapper[4558]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 20 17:28:02 crc kubenswrapper[4558]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 20 17:28:02 crc kubenswrapper[4558]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 20 17:28:02 crc kubenswrapper[4558]: # support updates Jan 20 17:28:02 crc kubenswrapper[4558]: Jan 20 17:28:02 crc kubenswrapper[4558]: $MYSQL_CMD < logger="UnhandledError" Jan 20 17:28:02 crc kubenswrapper[4558]: E0120 17:28:02.109712 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"glance-db-secret\\\" not found\"" pod="openstack-kuttl-tests/glance-65cd-account-create-update-grtts" podUID="5f7a0ad9-436f-433c-9a91-cec4ffd3beeb" Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.124206 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell0-f949-account-create-update-4ndsp"] Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.127424 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-f949-account-create-update-v2zng" Jan 20 17:28:02 crc kubenswrapper[4558]: E0120 17:28:02.195710 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/combined-ca-bundle: secret "combined-ca-bundle" not found Jan 20 17:28:02 crc kubenswrapper[4558]: E0120 17:28:02.195774 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c1afb831-f3e4-4356-ab86-713eb0beca39-combined-ca-bundle podName:c1afb831-f3e4-4356-ab86-713eb0beca39 nodeName:}" failed. No retries permitted until 2026-01-20 17:28:03.195752712 +0000 UTC m=+2776.956090680 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/c1afb831-f3e4-4356-ab86-713eb0beca39-combined-ca-bundle") pod "cinder-scheduler-0" (UID: "c1afb831-f3e4-4356-ab86-713eb0beca39") : secret "combined-ca-bundle" not found Jan 20 17:28:02 crc kubenswrapper[4558]: E0120 17:28:02.196080 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cinder-config-data: secret "cinder-config-data" not found Jan 20 17:28:02 crc kubenswrapper[4558]: E0120 17:28:02.196102 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c1afb831-f3e4-4356-ab86-713eb0beca39-config-data podName:c1afb831-f3e4-4356-ab86-713eb0beca39 nodeName:}" failed. No retries permitted until 2026-01-20 17:28:03.196095878 +0000 UTC m=+2776.956433835 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/secret/c1afb831-f3e4-4356-ab86-713eb0beca39-config-data") pod "cinder-scheduler-0" (UID: "c1afb831-f3e4-4356-ab86-713eb0beca39") : secret "cinder-config-data" not found Jan 20 17:28:02 crc kubenswrapper[4558]: E0120 17:28:02.196134 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/nova-metadata-config-data: secret "nova-metadata-config-data" not found Jan 20 17:28:02 crc kubenswrapper[4558]: E0120 17:28:02.196149 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/cc4089c6-71ea-4503-b54c-18777fcc3c48-config-data podName:cc4089c6-71ea-4503-b54c-18777fcc3c48 nodeName:}" failed. No retries permitted until 2026-01-20 17:28:03.196144539 +0000 UTC m=+2776.956482506 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/secret/cc4089c6-71ea-4503-b54c-18777fcc3c48-config-data") pod "nova-metadata-0" (UID: "cc4089c6-71ea-4503-b54c-18777fcc3c48") : secret "nova-metadata-config-data" not found Jan 20 17:28:02 crc kubenswrapper[4558]: E0120 17:28:02.197296 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cinder-scheduler-config-data: secret "cinder-scheduler-config-data" not found Jan 20 17:28:02 crc kubenswrapper[4558]: E0120 17:28:02.197315 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c1afb831-f3e4-4356-ab86-713eb0beca39-config-data-custom podName:c1afb831-f3e4-4356-ab86-713eb0beca39 nodeName:}" failed. No retries permitted until 2026-01-20 17:28:03.19730977 +0000 UTC m=+2776.957647737 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config-data-custom" (UniqueName: "kubernetes.io/secret/c1afb831-f3e4-4356-ab86-713eb0beca39-config-data-custom") pod "cinder-scheduler-0" (UID: "c1afb831-f3e4-4356-ab86-713eb0beca39") : secret "cinder-scheduler-config-data" not found Jan 20 17:28:02 crc kubenswrapper[4558]: E0120 17:28:02.197343 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cinder-scripts: secret "cinder-scripts" not found Jan 20 17:28:02 crc kubenswrapper[4558]: E0120 17:28:02.197360 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c1afb831-f3e4-4356-ab86-713eb0beca39-scripts podName:c1afb831-f3e4-4356-ab86-713eb0beca39 nodeName:}" failed. No retries permitted until 2026-01-20 17:28:03.197355526 +0000 UTC m=+2776.957693482 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "scripts" (UniqueName: "kubernetes.io/secret/c1afb831-f3e4-4356-ab86-713eb0beca39-scripts") pod "cinder-scheduler-0" (UID: "c1afb831-f3e4-4356-ab86-713eb0beca39") : secret "cinder-scripts" not found Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.199219 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-db-sync-rq6qr"] Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.213139 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/neutron-db-sync-rq6qr"] Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.219281 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-db-create-f6jhs"] Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.235529 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-db-sync-92t4c"] Jan 20 17:28:02 crc kubenswrapper[4558]: W0120 17:28:02.270477 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1bcd505c_4754_4f41_b91d_6e488a669c93.slice/crio-84e60af078ecb3aa4c7a8e920a31923950c7f5a488c23a929fc9e13a11f6efe7 WatchSource:0}: Error finding container 84e60af078ecb3aa4c7a8e920a31923950c7f5a488c23a929fc9e13a11f6efe7: Status 404 returned error can't find the container with id 84e60af078ecb3aa4c7a8e920a31923950c7f5a488c23a929fc9e13a11f6efe7 Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.273481 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/swift-ring-rebalance-zn4xn"] Jan 20 17:28:02 crc kubenswrapper[4558]: E0120 17:28:02.297816 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/combined-ca-bundle: secret "combined-ca-bundle" not found Jan 20 17:28:02 crc kubenswrapper[4558]: E0120 17:28:02.297889 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/cc4089c6-71ea-4503-b54c-18777fcc3c48-combined-ca-bundle podName:cc4089c6-71ea-4503-b54c-18777fcc3c48 nodeName:}" failed. No retries permitted until 2026-01-20 17:28:04.297872866 +0000 UTC m=+2778.058210823 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/cc4089c6-71ea-4503-b54c-18777fcc3c48-combined-ca-bundle") pod "nova-metadata-0" (UID: "cc4089c6-71ea-4503-b54c-18777fcc3c48") : secret "combined-ca-bundle" not found Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.308470 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell0-db-create-2c6lh"] Jan 20 17:28:02 crc kubenswrapper[4558]: E0120 17:28:02.328998 4558 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 20 17:28:02 crc kubenswrapper[4558]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[/bin/sh -c #!/bin/bash Jan 20 17:28:02 crc kubenswrapper[4558]: Jan 20 17:28:02 crc kubenswrapper[4558]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 20 17:28:02 crc kubenswrapper[4558]: Jan 20 17:28:02 crc kubenswrapper[4558]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 20 17:28:02 crc kubenswrapper[4558]: Jan 20 17:28:02 crc kubenswrapper[4558]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 20 17:28:02 crc kubenswrapper[4558]: Jan 20 17:28:02 crc kubenswrapper[4558]: if [ -n "neutron" ]; then Jan 20 17:28:02 crc kubenswrapper[4558]: GRANT_DATABASE="neutron" Jan 20 17:28:02 crc kubenswrapper[4558]: else Jan 20 17:28:02 crc kubenswrapper[4558]: GRANT_DATABASE="*" Jan 20 17:28:02 crc kubenswrapper[4558]: fi Jan 20 17:28:02 crc kubenswrapper[4558]: Jan 20 17:28:02 crc kubenswrapper[4558]: # going for maximum compatibility here: Jan 20 17:28:02 crc kubenswrapper[4558]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 20 17:28:02 crc kubenswrapper[4558]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 20 17:28:02 crc kubenswrapper[4558]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 20 17:28:02 crc kubenswrapper[4558]: # support updates Jan 20 17:28:02 crc kubenswrapper[4558]: Jan 20 17:28:02 crc kubenswrapper[4558]: $MYSQL_CMD < logger="UnhandledError" Jan 20 17:28:02 crc kubenswrapper[4558]: E0120 17:28:02.332534 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"neutron-db-secret\\\" not found\"" pod="openstack-kuttl-tests/neutron-b136-account-create-update-n9qdv" podUID="1bcd505c-4754-4f41-b91d-6e488a669c93" Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.355669 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/swift-storage-0"] Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.368349 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="8804f9f1-60d4-4fa1-9333-9f9e01b3c68a" containerName="account-server" containerID="cri-o://8ba1d4c7037c9af7af4ff6c69fdad67f28d557367c30a255c97945adb2a5670d" gracePeriod=30 Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.371261 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="8804f9f1-60d4-4fa1-9333-9f9e01b3c68a" containerName="object-server" containerID="cri-o://4a6334c4fa8ffc04d0aaf20255ac8f9132d2e26680c4b1f11d017cfb5b777eec" gracePeriod=30 Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.371520 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="8804f9f1-60d4-4fa1-9333-9f9e01b3c68a" containerName="container-updater" containerID="cri-o://8c8b5915d3326030a79e4f528fb27f011fc914ddc227bbc37139ba81a55e7e83" gracePeriod=30 Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.371652 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="8804f9f1-60d4-4fa1-9333-9f9e01b3c68a" containerName="container-auditor" containerID="cri-o://703117c0c32a1ceba745d2e72e8de7d8ffea3ca4a98e3434eda2bde5c61b0d36" gracePeriod=30 Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.371717 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="8804f9f1-60d4-4fa1-9333-9f9e01b3c68a" containerName="container-replicator" containerID="cri-o://19bf1b2c84ff2748887d3f5954817882ace006a585e9b1cf3ff55c2286de632f" gracePeriod=30 Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.371740 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="8804f9f1-60d4-4fa1-9333-9f9e01b3c68a" containerName="rsync" containerID="cri-o://2953f93c5ea7797971404b822588bdd28383773e127010c836cbd6ce7a1b5450" gracePeriod=30 Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.371719 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="8804f9f1-60d4-4fa1-9333-9f9e01b3c68a" containerName="swift-recon-cron" containerID="cri-o://94d135b3e27081e915874cdf29659d51450157f87f3b4f37332d5d83a3c11459" gracePeriod=30 Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.371811 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="8804f9f1-60d4-4fa1-9333-9f9e01b3c68a" containerName="container-server" containerID="cri-o://70b3bec12ba85f03f2bb9c09bdc640f03db124a8b41fd0886ea7b738171db315" gracePeriod=30 Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.371826 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="8804f9f1-60d4-4fa1-9333-9f9e01b3c68a" containerName="object-expirer" containerID="cri-o://33faf2c0a93c93d45813868ee43337a14e962791dad79303fe6a9a0671ec791e" gracePeriod=30 Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.371863 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="8804f9f1-60d4-4fa1-9333-9f9e01b3c68a" containerName="account-reaper" containerID="cri-o://b9e271e5199c2c5986711fa373c13ad8aade0056ea94afaa4c8bdfa52c131009" gracePeriod=30 Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.371874 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="8804f9f1-60d4-4fa1-9333-9f9e01b3c68a" containerName="object-updater" containerID="cri-o://36e479ddabc13ddf3cd61164acd11174c752edf6961acec9ae67d8951043aee5" gracePeriod=30 Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.371899 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="8804f9f1-60d4-4fa1-9333-9f9e01b3c68a" containerName="account-auditor" containerID="cri-o://7355d501842ad1b14019ba4bbb3365f8da9a6587b24e3a344f6aeb0cbde4a6bd" gracePeriod=30 Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.371908 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="8804f9f1-60d4-4fa1-9333-9f9e01b3c68a" containerName="object-auditor" containerID="cri-o://5abed863186c32fde3a40dcffb538fd7112acbbdfd253d93751ff00096a299c5" gracePeriod=30 Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.371933 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="8804f9f1-60d4-4fa1-9333-9f9e01b3c68a" containerName="account-replicator" containerID="cri-o://2ee2e3e7ebe2d7413bfe073e0a35c416b50099cfb7c3db764047c0da6077f92e" gracePeriod=30 Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.371945 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="8804f9f1-60d4-4fa1-9333-9f9e01b3c68a" containerName="object-replicator" containerID="cri-o://d700282b5693986cac00fa3aff076f819077ba4d7e9191db243d0f5c076b2c4d" gracePeriod=30 Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.385579 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-api-db-create-f6jhs"] Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.411583 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/cinder-db-sync-92t4c"] Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.439657 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell0-db-create-2c6lh"] Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.475563 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/swift-ring-rebalance-zn4xn"] Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.489963 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-db-create-pxpgt"] Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.495594 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell1-db-create-pxpgt"] Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.504572 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/rabbitmq-server-0"] Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.509498 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-db-sync-t7b8z"] Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.519399 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/barbican-db-sync-t7b8z"] Jan 20 17:28:02 crc kubenswrapper[4558]: W0120 17:28:02.519992 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod54457478_e4f4_4088_bd18_e427314e1fb2.slice/crio-c125669d0d4932ece229fc505a1e8ff7c92a49f2cc4899179e3895b79383b019 WatchSource:0}: Error finding container c125669d0d4932ece229fc505a1e8ff7c92a49f2cc4899179e3895b79383b019: Status 404 returned error can't find the container with id c125669d0d4932ece229fc505a1e8ff7c92a49f2cc4899179e3895b79383b019 Jan 20 17:28:02 crc kubenswrapper[4558]: E0120 17:28:02.530348 4558 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 20 17:28:02 crc kubenswrapper[4558]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[/bin/sh -c #!/bin/bash Jan 20 17:28:02 crc kubenswrapper[4558]: Jan 20 17:28:02 crc kubenswrapper[4558]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 20 17:28:02 crc kubenswrapper[4558]: Jan 20 17:28:02 crc kubenswrapper[4558]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 20 17:28:02 crc kubenswrapper[4558]: Jan 20 17:28:02 crc kubenswrapper[4558]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 20 17:28:02 crc kubenswrapper[4558]: Jan 20 17:28:02 crc kubenswrapper[4558]: if [ -n "barbican" ]; then Jan 20 17:28:02 crc kubenswrapper[4558]: GRANT_DATABASE="barbican" Jan 20 17:28:02 crc kubenswrapper[4558]: else Jan 20 17:28:02 crc kubenswrapper[4558]: GRANT_DATABASE="*" Jan 20 17:28:02 crc kubenswrapper[4558]: fi Jan 20 17:28:02 crc kubenswrapper[4558]: Jan 20 17:28:02 crc kubenswrapper[4558]: # going for maximum compatibility here: Jan 20 17:28:02 crc kubenswrapper[4558]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 20 17:28:02 crc kubenswrapper[4558]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 20 17:28:02 crc kubenswrapper[4558]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 20 17:28:02 crc kubenswrapper[4558]: # support updates Jan 20 17:28:02 crc kubenswrapper[4558]: Jan 20 17:28:02 crc kubenswrapper[4558]: $MYSQL_CMD < logger="UnhandledError" Jan 20 17:28:02 crc kubenswrapper[4558]: E0120 17:28:02.532563 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"barbican-db-secret\\\" not found\"" pod="openstack-kuttl-tests/barbican-e949-account-create-update-qd847" podUID="54457478-e4f4-4088-bd18-e427314e1fb2" Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.536871 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-cell-mapping-4kxm5" Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.536915 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-cell-mapping-4kxm5"] Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.536952 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-cell-mapping-4kxm5" event={"ID":"aeb478ae-4d88-4f64-be68-ccebfe589ff1","Type":"ContainerDied","Data":"8a98e18ecc83d87e03e736a4ef60a48ac281b0649b3d8eb29fa53ba47d5afcb7"} Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.536986 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8a98e18ecc83d87e03e736a4ef60a48ac281b0649b3d8eb29fa53ba47d5afcb7" Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.544052 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/placement-55d4bc664d-k82t9"] Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.544413 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/placement-55d4bc664d-k82t9" podUID="66e24f38-a98c-4444-8ee4-352266267985" containerName="placement-log" containerID="cri-o://2c9fa6b9561c4a984ecb954991ac4f52ef42ab0fbc3c9bb866972b9d6d85f164" gracePeriod=30 Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.544550 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/placement-55d4bc664d-k82t9" podUID="66e24f38-a98c-4444-8ee4-352266267985" containerName="placement-api" containerID="cri-o://41bd033877c5a7cf1ffe2c31818a011adc08dd537bfdb844181969248bd15676" gracePeriod=30 Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.549219 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-7ab2-account-create-update-c5qqm"] Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.568141 4558 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="openstack-kuttl-tests/kube-state-metrics-0" secret="" err="secret \"telemetry-ceilometer-dockercfg-5nz7c\" not found" Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.571200 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/rabbitmq-server-0" podUID="d692722b-f7fd-447c-8b7a-f56cff940d91" containerName="rabbitmq" containerID="cri-o://e662a87a8dfea3af31619f4746b123264a47d980ca2258670d31ac3490d07672" gracePeriod=604800 Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.598670 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovn-northd-0_8a111df9-8578-40bb-a672-b5d53305c873/ovn-northd/0.log" Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.599149 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.605553 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0e4d0567-44de-45db-977c-50fc9725b092" path="/var/lib/kubelet/pods/0e4d0567-44de-45db-977c-50fc9725b092/volumes" Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.613369 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b69d0136-9748-45bf-a9ed-d23e0180f1b4-operator-scripts\") pod \"nova-cell1-af2b-account-create-update-x6tw5\" (UID: \"b69d0136-9748-45bf-a9ed-d23e0180f1b4\") " pod="openstack-kuttl-tests/nova-cell1-af2b-account-create-update-x6tw5" Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.613439 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4j88m\" (UniqueName: \"kubernetes.io/projected/b69d0136-9748-45bf-a9ed-d23e0180f1b4-kube-api-access-4j88m\") pod \"nova-cell1-af2b-account-create-update-x6tw5\" (UID: \"b69d0136-9748-45bf-a9ed-d23e0180f1b4\") " pod="openstack-kuttl-tests/nova-cell1-af2b-account-create-update-x6tw5" Jan 20 17:28:02 crc kubenswrapper[4558]: E0120 17:28:02.613720 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/openstack-cell1-scripts: configmap "openstack-cell1-scripts" not found Jan 20 17:28:02 crc kubenswrapper[4558]: E0120 17:28:02.613764 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/b69d0136-9748-45bf-a9ed-d23e0180f1b4-operator-scripts podName:b69d0136-9748-45bf-a9ed-d23e0180f1b4 nodeName:}" failed. No retries permitted until 2026-01-20 17:28:03.613749799 +0000 UTC m=+2777.374087766 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/b69d0136-9748-45bf-a9ed-d23e0180f1b4-operator-scripts") pod "nova-cell1-af2b-account-create-update-x6tw5" (UID: "b69d0136-9748-45bf-a9ed-d23e0180f1b4") : configmap "openstack-cell1-scripts" not found Jan 20 17:28:02 crc kubenswrapper[4558]: E0120 17:28:02.617133 4558 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 20 17:28:02 crc kubenswrapper[4558]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[/bin/sh -c #!/bin/bash Jan 20 17:28:02 crc kubenswrapper[4558]: Jan 20 17:28:02 crc kubenswrapper[4558]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 20 17:28:02 crc kubenswrapper[4558]: Jan 20 17:28:02 crc kubenswrapper[4558]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 20 17:28:02 crc kubenswrapper[4558]: Jan 20 17:28:02 crc kubenswrapper[4558]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 20 17:28:02 crc kubenswrapper[4558]: Jan 20 17:28:02 crc kubenswrapper[4558]: if [ -n "glance" ]; then Jan 20 17:28:02 crc kubenswrapper[4558]: GRANT_DATABASE="glance" Jan 20 17:28:02 crc kubenswrapper[4558]: else Jan 20 17:28:02 crc kubenswrapper[4558]: GRANT_DATABASE="*" Jan 20 17:28:02 crc kubenswrapper[4558]: fi Jan 20 17:28:02 crc kubenswrapper[4558]: Jan 20 17:28:02 crc kubenswrapper[4558]: # going for maximum compatibility here: Jan 20 17:28:02 crc kubenswrapper[4558]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 20 17:28:02 crc kubenswrapper[4558]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 20 17:28:02 crc kubenswrapper[4558]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 20 17:28:02 crc kubenswrapper[4558]: # support updates Jan 20 17:28:02 crc kubenswrapper[4558]: Jan 20 17:28:02 crc kubenswrapper[4558]: $MYSQL_CMD < logger="UnhandledError" Jan 20 17:28:02 crc kubenswrapper[4558]: E0120 17:28:02.619476 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"glance-db-secret\\\" not found\"" pod="openstack-kuttl-tests/glance-65cd-account-create-update-grtts" podUID="5f7a0ad9-436f-433c-9a91-cec4ffd3beeb" Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.621146 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2a434215-321c-46c8-940b-7341b5c2a8c1" path="/var/lib/kubelet/pods/2a434215-321c-46c8-940b-7341b5c2a8c1/volumes" Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.621665 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="33a5b64d-fca0-4a5f-92df-908c67df28cc" path="/var/lib/kubelet/pods/33a5b64d-fca0-4a5f-92df-908c67df28cc/volumes" Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.622213 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3c810cd2-bc0a-407e-8626-e52eb0ba5296" path="/var/lib/kubelet/pods/3c810cd2-bc0a-407e-8626-e52eb0ba5296/volumes" Jan 20 17:28:02 crc kubenswrapper[4558]: E0120 17:28:02.624938 4558 projected.go:194] Error preparing data for projected volume kube-api-access-4j88m for pod openstack-kuttl-tests/nova-cell1-af2b-account-create-update-x6tw5: failed to fetch token: serviceaccounts "galera-openstack-cell1" not found Jan 20 17:28:02 crc kubenswrapper[4558]: E0120 17:28:02.625001 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/b69d0136-9748-45bf-a9ed-d23e0180f1b4-kube-api-access-4j88m podName:b69d0136-9748-45bf-a9ed-d23e0180f1b4 nodeName:}" failed. No retries permitted until 2026-01-20 17:28:03.624979913 +0000 UTC m=+2777.385317880 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-4j88m" (UniqueName: "kubernetes.io/projected/b69d0136-9748-45bf-a9ed-d23e0180f1b4-kube-api-access-4j88m") pod "nova-cell1-af2b-account-create-update-x6tw5" (UID: "b69d0136-9748-45bf-a9ed-d23e0180f1b4") : failed to fetch token: serviceaccounts "galera-openstack-cell1" not found Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.626199 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4869db4f-9810-4685-a2a3-a1103e998535" path="/var/lib/kubelet/pods/4869db4f-9810-4685-a2a3-a1103e998535/volumes" Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.626743 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4d845835-77d4-4d8a-9fad-0286ad5291ae" path="/var/lib/kubelet/pods/4d845835-77d4-4d8a-9fad-0286ad5291ae/volumes" Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.629893 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5f91fe62-3615-4153-a6c7-1652a5780bb5" path="/var/lib/kubelet/pods/5f91fe62-3615-4153-a6c7-1652a5780bb5/volumes" Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.631712 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="72189413-de76-4a83-87ce-441c69a4e319" path="/var/lib/kubelet/pods/72189413-de76-4a83-87ce-441c69a4e319/volumes" Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.632647 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="730a8fa5-7c66-41ff-be4b-f170d49a6b0b" path="/var/lib/kubelet/pods/730a8fa5-7c66-41ff-be4b-f170d49a6b0b/volumes" Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.633698 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8e043dc4-ede7-4945-adfe-f23ef8b0b313" path="/var/lib/kubelet/pods/8e043dc4-ede7-4945-adfe-f23ef8b0b313/volumes" Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.639091 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="90c87493-4105-4f33-b9a8-863231d6e367" path="/var/lib/kubelet/pods/90c87493-4105-4f33-b9a8-863231d6e367/volumes" Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.644506 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9c26bd33-30c8-48f1-b6f5-2a6cbe8cc92b" path="/var/lib/kubelet/pods/9c26bd33-30c8-48f1-b6f5-2a6cbe8cc92b/volumes" Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.645018 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a3e891d2-b4a9-4486-ae23-c3437ae07e01" path="/var/lib/kubelet/pods/a3e891d2-b4a9-4486-ae23-c3437ae07e01/volumes" Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.647835 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a7cbf118-c70e-4058-b7b8-e14883e1fdc2" path="/var/lib/kubelet/pods/a7cbf118-c70e-4058-b7b8-e14883e1fdc2/volumes" Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.648320 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c2442cca-1f6c-4531-b1f4-2b873ce42964" path="/var/lib/kubelet/pods/c2442cca-1f6c-4531-b1f4-2b873ce42964/volumes" Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.663234 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c7f6b91b-b55e-4af0-b496-46c80b92bad1" path="/var/lib/kubelet/pods/c7f6b91b-b55e-4af0-b496-46c80b92bad1/volumes" Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.663777 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ca42fa74-258c-428a-83e0-0410cd4f2961" path="/var/lib/kubelet/pods/ca42fa74-258c-428a-83e0-0410cd4f2961/volumes" Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.664310 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d39d453b-abd1-42ef-8b7c-0cf6d9404ad6" path="/var/lib/kubelet/pods/d39d453b-abd1-42ef-8b7c-0cf6d9404ad6/volumes" Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.664825 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ebbaaabe-8492-4108-981c-0c00cf1561f0" path="/var/lib/kubelet/pods/ebbaaabe-8492-4108-981c-0c00cf1561f0/volumes" Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.668998 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovsdbserver-sb-0_7b6a0803-befe-4426-9e2a-a04ac12f2d7c/ovsdbserver-sb/0.log" Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.669047 4558 generic.go:334] "Generic (PLEG): container finished" podID="7b6a0803-befe-4426-9e2a-a04ac12f2d7c" containerID="e14a2ace059c2e1c3be33ab3bdf7264642df45d603b1ef56bea3fa59cbb4787b" exitCode=2 Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.669062 4558 generic.go:334] "Generic (PLEG): container finished" podID="7b6a0803-befe-4426-9e2a-a04ac12f2d7c" containerID="dd2c0dcba96a9cb823d300977fee811cbbd5c8ce5dca2787726542941426f5c2" exitCode=143 Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.681217 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f59d0040-b44b-46e3-bb18-c0ea74722cd3" path="/var/lib/kubelet/pods/f59d0040-b44b-46e3-bb18-c0ea74722cd3/volumes" Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.682281 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f59de47e-9a81-41d9-bc7c-fd5ff6723291" path="/var/lib/kubelet/pods/f59de47e-9a81-41d9-bc7c-fd5ff6723291/volumes" Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.682896 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-dmtbg"] Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.682919 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell0-cell-mapping-5jhz5"] Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.682931 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-db-create-rt2fh" event={"ID":"1b7c6fb3-f30d-4dde-8502-e7d840719520","Type":"ContainerStarted","Data":"6e67b06995c037b7fe9c89c79183b501b9088714a4aa0c041dd1cc3af13b30c6"} Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.682947 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell0-cell-mapping-5jhz5"] Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.682963 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-65cd-account-create-update-grtts" event={"ID":"5f7a0ad9-436f-433c-9a91-cec4ffd3beeb","Type":"ContainerStarted","Data":"7da3bfbb5909f35367f1490c833f509488197456e90229fef8bc430474608c7a"} Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.682974 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-proxy-5f7fb7b-s2dxn" event={"ID":"4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5","Type":"ContainerStarted","Data":"bcc4b197f2fa6bbcecefb3da95a4d96c30060a9d1504ae2efef0a52dc6bcf098"} Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.682984 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/placement-7ab2-account-create-update-c5qqm"] Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.682998 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-proxy-5f7fb7b-s2dxn" event={"ID":"4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5","Type":"ContainerStarted","Data":"a2a700bf0185bdf7350481166679a9ad59dffb201ae25abadc2103dd55e220aa"} Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.683030 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-sb-0" event={"ID":"7b6a0803-befe-4426-9e2a-a04ac12f2d7c","Type":"ContainerDied","Data":"e14a2ace059c2e1c3be33ab3bdf7264642df45d603b1ef56bea3fa59cbb4787b"} Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.683044 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-sb-0" event={"ID":"7b6a0803-befe-4426-9e2a-a04ac12f2d7c","Type":"ContainerDied","Data":"dd2c0dcba96a9cb823d300977fee811cbbd5c8ce5dca2787726542941426f5c2"} Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.686700 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/rabbitmq-cell1-server-0"] Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.699609 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/placement-db-create-j7682"] Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.709465 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovn-northd-0_8a111df9-8578-40bb-a672-b5d53305c873/ovn-northd/0.log" Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.709501 4558 generic.go:334] "Generic (PLEG): container finished" podID="8a111df9-8578-40bb-a672-b5d53305c873" containerID="d3ba0972279d7175889b46187fb32615d720f05a63a7bbfedd1ca9dc2eff5075" exitCode=2 Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.709514 4558 generic.go:334] "Generic (PLEG): container finished" podID="8a111df9-8578-40bb-a672-b5d53305c873" containerID="c2fa33c63a4c25f68e800c109ef61e283f605e39ebdcd676406536456186262d" exitCode=143 Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.709574 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-northd-0" event={"ID":"8a111df9-8578-40bb-a672-b5d53305c873","Type":"ContainerDied","Data":"d3ba0972279d7175889b46187fb32615d720f05a63a7bbfedd1ca9dc2eff5075"} Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.709604 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.709618 4558 scope.go:117] "RemoveContainer" containerID="d3ba0972279d7175889b46187fb32615d720f05a63a7bbfedd1ca9dc2eff5075" Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.709605 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-northd-0" event={"ID":"8a111df9-8578-40bb-a672-b5d53305c873","Type":"ContainerDied","Data":"c2fa33c63a4c25f68e800c109ef61e283f605e39ebdcd676406536456186262d"} Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.715812 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.716018 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.716233 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-external-api-0" podUID="592927c6-9b42-411a-8aec-40cf6183e32a" containerName="glance-log" containerID="cri-o://f60fe058ea93021d1c1c3c3d2912da2aaabf83d63fd9e5d23a8a1c05335b6abd" gracePeriod=30 Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.716367 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-external-api-0" podUID="592927c6-9b42-411a-8aec-40cf6183e32a" containerName="glance-httpd" containerID="cri-o://981127416a83b8463012a21079019f3fb0205f2a34d9b217c94d3c09ae42bc02" gracePeriod=30 Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.716602 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/8a111df9-8578-40bb-a672-b5d53305c873-metrics-certs-tls-certs\") pod \"8a111df9-8578-40bb-a672-b5d53305c873\" (UID: \"8a111df9-8578-40bb-a672-b5d53305c873\") " Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.716632 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8a111df9-8578-40bb-a672-b5d53305c873-scripts\") pod \"8a111df9-8578-40bb-a672-b5d53305c873\" (UID: \"8a111df9-8578-40bb-a672-b5d53305c873\") " Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.716695 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/8a111df9-8578-40bb-a672-b5d53305c873-ovn-rundir\") pod \"8a111df9-8578-40bb-a672-b5d53305c873\" (UID: \"8a111df9-8578-40bb-a672-b5d53305c873\") " Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.716750 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/8a111df9-8578-40bb-a672-b5d53305c873-ovn-northd-tls-certs\") pod \"8a111df9-8578-40bb-a672-b5d53305c873\" (UID: \"8a111df9-8578-40bb-a672-b5d53305c873\") " Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.716861 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8a111df9-8578-40bb-a672-b5d53305c873-config\") pod \"8a111df9-8578-40bb-a672-b5d53305c873\" (UID: \"8a111df9-8578-40bb-a672-b5d53305c873\") " Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.716972 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8a111df9-8578-40bb-a672-b5d53305c873-combined-ca-bundle\") pod \"8a111df9-8578-40bb-a672-b5d53305c873\" (UID: \"8a111df9-8578-40bb-a672-b5d53305c873\") " Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.717030 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r4ll4\" (UniqueName: \"kubernetes.io/projected/8a111df9-8578-40bb-a672-b5d53305c873-kube-api-access-r4ll4\") pod \"8a111df9-8578-40bb-a672-b5d53305c873\" (UID: \"8a111df9-8578-40bb-a672-b5d53305c873\") " Jan 20 17:28:02 crc kubenswrapper[4558]: E0120 17:28:02.718303 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-kube-state-metrics-svc: secret "cert-kube-state-metrics-svc" not found Jan 20 17:28:02 crc kubenswrapper[4558]: E0120 17:28:02.718348 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/443f910c-dd2d-4c72-b861-f15de67ac6bb-kube-state-metrics-tls-certs podName:443f910c-dd2d-4c72-b861-f15de67ac6bb nodeName:}" failed. No retries permitted until 2026-01-20 17:28:03.218334767 +0000 UTC m=+2776.978672734 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-state-metrics-tls-certs" (UniqueName: "kubernetes.io/secret/443f910c-dd2d-4c72-b861-f15de67ac6bb-kube-state-metrics-tls-certs") pod "kube-state-metrics-0" (UID: "443f910c-dd2d-4c72-b861-f15de67ac6bb") : secret "cert-kube-state-metrics-svc" not found Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.718572 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8a111df9-8578-40bb-a672-b5d53305c873-ovn-rundir" (OuterVolumeSpecName: "ovn-rundir") pod "8a111df9-8578-40bb-a672-b5d53305c873" (UID: "8a111df9-8578-40bb-a672-b5d53305c873"). InnerVolumeSpecName "ovn-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:28:02 crc kubenswrapper[4558]: E0120 17:28:02.719813 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/kube-state-metrics-tls-config: secret "kube-state-metrics-tls-config" not found Jan 20 17:28:02 crc kubenswrapper[4558]: E0120 17:28:02.726035 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/443f910c-dd2d-4c72-b861-f15de67ac6bb-kube-state-metrics-tls-config podName:443f910c-dd2d-4c72-b861-f15de67ac6bb nodeName:}" failed. No retries permitted until 2026-01-20 17:28:03.226018053 +0000 UTC m=+2776.986356020 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-state-metrics-tls-config" (UniqueName: "kubernetes.io/secret/443f910c-dd2d-4c72-b861-f15de67ac6bb-kube-state-metrics-tls-config") pod "kube-state-metrics-0" (UID: "443f910c-dd2d-4c72-b861-f15de67ac6bb") : secret "kube-state-metrics-tls-config" not found Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.721052 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8a111df9-8578-40bb-a672-b5d53305c873-config" (OuterVolumeSpecName: "config") pod "8a111df9-8578-40bb-a672-b5d53305c873" (UID: "8a111df9-8578-40bb-a672-b5d53305c873"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.722384 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8a111df9-8578-40bb-a672-b5d53305c873-scripts" (OuterVolumeSpecName: "scripts") pod "8a111df9-8578-40bb-a672-b5d53305c873" (UID: "8a111df9-8578-40bb-a672-b5d53305c873"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:28:02 crc kubenswrapper[4558]: E0120 17:28:02.722969 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/combined-ca-bundle: secret "combined-ca-bundle" not found Jan 20 17:28:02 crc kubenswrapper[4558]: E0120 17:28:02.726582 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/443f910c-dd2d-4c72-b861-f15de67ac6bb-combined-ca-bundle podName:443f910c-dd2d-4c72-b861-f15de67ac6bb nodeName:}" failed. No retries permitted until 2026-01-20 17:28:03.226566734 +0000 UTC m=+2776.986904701 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/443f910c-dd2d-4c72-b861-f15de67ac6bb-combined-ca-bundle") pod "kube-state-metrics-0" (UID: "443f910c-dd2d-4c72-b861-f15de67ac6bb") : secret "combined-ca-bundle" not found Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.738218 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-lgbfw"] Jan 20 17:28:02 crc kubenswrapper[4558]: E0120 17:28:02.738765 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8a111df9-8578-40bb-a672-b5d53305c873" containerName="openstack-network-exporter" Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.738838 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="8a111df9-8578-40bb-a672-b5d53305c873" containerName="openstack-network-exporter" Jan 20 17:28:02 crc kubenswrapper[4558]: E0120 17:28:02.738908 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8a111df9-8578-40bb-a672-b5d53305c873" containerName="ovn-northd" Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.738956 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="8a111df9-8578-40bb-a672-b5d53305c873" containerName="ovn-northd" Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.740927 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="8a111df9-8578-40bb-a672-b5d53305c873" containerName="openstack-network-exporter" Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.741119 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="8a111df9-8578-40bb-a672-b5d53305c873" containerName="ovn-northd" Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.744101 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-lgbfw" Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.747665 4558 generic.go:334] "Generic (PLEG): container finished" podID="c2bfe108-b710-400b-baac-55815b192ee3" containerID="a9c03d94fb53b93aaaeb1d783b464528820c872e9dc48826f7156866d1723e5e" exitCode=1 Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.748144 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/root-account-create-update-dmtbg" event={"ID":"c2bfe108-b710-400b-baac-55815b192ee3","Type":"ContainerDied","Data":"a9c03d94fb53b93aaaeb1d783b464528820c872e9dc48826f7156866d1723e5e"} Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.748222 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/root-account-create-update-dmtbg" event={"ID":"c2bfe108-b710-400b-baac-55815b192ee3","Type":"ContainerStarted","Data":"cd5e3f650786565f023c1017ec96f62e35bc2e5721e6093ac2aec847b1e311a1"} Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.748399 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8a111df9-8578-40bb-a672-b5d53305c873-kube-api-access-r4ll4" (OuterVolumeSpecName: "kube-api-access-r4ll4") pod "8a111df9-8578-40bb-a672-b5d53305c873" (UID: "8a111df9-8578-40bb-a672-b5d53305c873"). InnerVolumeSpecName "kube-api-access-r4ll4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.779817 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-7ab2-account-create-update-c5qqm" event={"ID":"b201974e-8bb6-412a-95d5-cce7a95e4528","Type":"ContainerStarted","Data":"cc78df1605b23b363d8c1c147f3989b9a5c85c29cc42aca19c525730f4cb40eb"} Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.813013 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-db-create-j7682" event={"ID":"df28560c-92e6-4003-a08f-b8691fa43300","Type":"ContainerStarted","Data":"405eba50b6723c4bbfd326989331668cadd5d5aa3aa6d3f082fbf7d791dda0f0"} Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.828512 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-db-create-j7682" event={"ID":"df28560c-92e6-4003-a08f-b8691fa43300","Type":"ContainerStarted","Data":"bb6338053e49720b1e74bb1a48e7513dde37dd08f6b58323abe135e98b714799"} Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.824444 4558 reconciler_common.go:293] "Volume detached for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/8a111df9-8578-40bb-a672-b5d53305c873-ovn-rundir\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.832556 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8a111df9-8578-40bb-a672-b5d53305c873-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.832591 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r4ll4\" (UniqueName: \"kubernetes.io/projected/8a111df9-8578-40bb-a672-b5d53305c873-kube-api-access-r4ll4\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.832606 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8a111df9-8578-40bb-a672-b5d53305c873-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:02 crc kubenswrapper[4558]: E0120 17:28:02.833814 4558 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 20 17:28:02 crc kubenswrapper[4558]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[/bin/sh -c #!/bin/bash Jan 20 17:28:02 crc kubenswrapper[4558]: Jan 20 17:28:02 crc kubenswrapper[4558]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 20 17:28:02 crc kubenswrapper[4558]: Jan 20 17:28:02 crc kubenswrapper[4558]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 20 17:28:02 crc kubenswrapper[4558]: Jan 20 17:28:02 crc kubenswrapper[4558]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 20 17:28:02 crc kubenswrapper[4558]: Jan 20 17:28:02 crc kubenswrapper[4558]: if [ -n "placement" ]; then Jan 20 17:28:02 crc kubenswrapper[4558]: GRANT_DATABASE="placement" Jan 20 17:28:02 crc kubenswrapper[4558]: else Jan 20 17:28:02 crc kubenswrapper[4558]: GRANT_DATABASE="*" Jan 20 17:28:02 crc kubenswrapper[4558]: fi Jan 20 17:28:02 crc kubenswrapper[4558]: Jan 20 17:28:02 crc kubenswrapper[4558]: # going for maximum compatibility here: Jan 20 17:28:02 crc kubenswrapper[4558]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 20 17:28:02 crc kubenswrapper[4558]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 20 17:28:02 crc kubenswrapper[4558]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 20 17:28:02 crc kubenswrapper[4558]: # support updates Jan 20 17:28:02 crc kubenswrapper[4558]: Jan 20 17:28:02 crc kubenswrapper[4558]: $MYSQL_CMD < logger="UnhandledError" Jan 20 17:28:02 crc kubenswrapper[4558]: E0120 17:28:02.835386 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"placement-db-secret\\\" not found\"" pod="openstack-kuttl-tests/placement-7ab2-account-create-update-c5qqm" podUID="b201974e-8bb6-412a-95d5-cce7a95e4528" Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.837395 4558 scope.go:117] "RemoveContainer" containerID="c2fa33c63a4c25f68e800c109ef61e283f605e39ebdcd676406536456186262d" Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.844108 4558 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="openstack-kuttl-tests/root-account-create-update-dmtbg" secret="" err="secret \"galera-openstack-cell1-dockercfg-ct6ph\" not found" Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.844215 4558 scope.go:117] "RemoveContainer" containerID="a9c03d94fb53b93aaaeb1d783b464528820c872e9dc48826f7156866d1723e5e" Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.848734 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-b136-account-create-update-n9qdv" event={"ID":"1bcd505c-4754-4f41-b91d-6e488a669c93","Type":"ContainerStarted","Data":"84e60af078ecb3aa4c7a8e920a31923950c7f5a488c23a929fc9e13a11f6efe7"} Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.875686 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.876181 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-internal-api-0" podUID="e81d2508-e298-463c-9031-b9d8e486d566" containerName="glance-log" containerID="cri-o://d2afe7d8e958d845dee5152deac97265bbf55f309d653220c753d06c2403fd6e" gracePeriod=30 Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.876413 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-internal-api-0" podUID="e81d2508-e298-463c-9031-b9d8e486d566" containerName="glance-httpd" containerID="cri-o://c06768c089cc106f807b1405688b9c79fd40a3d43e579f7beb4c7d8ba6ce7297" gracePeriod=30 Jan 20 17:28:02 crc kubenswrapper[4558]: E0120 17:28:02.902135 4558 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 20 17:28:02 crc kubenswrapper[4558]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[/bin/sh -c #!/bin/bash Jan 20 17:28:02 crc kubenswrapper[4558]: Jan 20 17:28:02 crc kubenswrapper[4558]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 20 17:28:02 crc kubenswrapper[4558]: Jan 20 17:28:02 crc kubenswrapper[4558]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 20 17:28:02 crc kubenswrapper[4558]: Jan 20 17:28:02 crc kubenswrapper[4558]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 20 17:28:02 crc kubenswrapper[4558]: Jan 20 17:28:02 crc kubenswrapper[4558]: if [ -n "neutron" ]; then Jan 20 17:28:02 crc kubenswrapper[4558]: GRANT_DATABASE="neutron" Jan 20 17:28:02 crc kubenswrapper[4558]: else Jan 20 17:28:02 crc kubenswrapper[4558]: GRANT_DATABASE="*" Jan 20 17:28:02 crc kubenswrapper[4558]: fi Jan 20 17:28:02 crc kubenswrapper[4558]: Jan 20 17:28:02 crc kubenswrapper[4558]: # going for maximum compatibility here: Jan 20 17:28:02 crc kubenswrapper[4558]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 20 17:28:02 crc kubenswrapper[4558]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 20 17:28:02 crc kubenswrapper[4558]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 20 17:28:02 crc kubenswrapper[4558]: # support updates Jan 20 17:28:02 crc kubenswrapper[4558]: Jan 20 17:28:02 crc kubenswrapper[4558]: $MYSQL_CMD < logger="UnhandledError" Jan 20 17:28:02 crc kubenswrapper[4558]: E0120 17:28:02.903292 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"neutron-db-secret\\\" not found\"" pod="openstack-kuttl-tests/neutron-b136-account-create-update-n9qdv" podUID="1bcd505c-4754-4f41-b91d-6e488a669c93" Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.906391 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovsdbserver-nb-0_a2068ccb-0c0d-4b32-9063-082a4c395070/ovsdbserver-nb/0.log" Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.906439 4558 generic.go:334] "Generic (PLEG): container finished" podID="a2068ccb-0c0d-4b32-9063-082a4c395070" containerID="0a200f6996ecf5951d17224c62d14bcea68ab49867b472e280d5c47ecfd50ad7" exitCode=2 Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.906456 4558 generic.go:334] "Generic (PLEG): container finished" podID="a2068ccb-0c0d-4b32-9063-082a4c395070" containerID="3cb92b2ab723d670d8405be0316a236c7b450aa82c756082192092f8f2f20fad" exitCode=143 Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.906539 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-nb-0" event={"ID":"a2068ccb-0c0d-4b32-9063-082a4c395070","Type":"ContainerDied","Data":"0a200f6996ecf5951d17224c62d14bcea68ab49867b472e280d5c47ecfd50ad7"} Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.906573 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-nb-0" event={"ID":"a2068ccb-0c0d-4b32-9063-082a4c395070","Type":"ContainerDied","Data":"3cb92b2ab723d670d8405be0316a236c7b450aa82c756082192092f8f2f20fad"} Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.918381 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8a111df9-8578-40bb-a672-b5d53305c873-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8a111df9-8578-40bb-a672-b5d53305c873" (UID: "8a111df9-8578-40bb-a672-b5d53305c873"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.918393 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" podUID="48cfc6e5-774d-4e7d-8103-f6a3260ea14c" containerName="rabbitmq" containerID="cri-o://94d9e6b06e5975fa2b75519bde1be0a449c0ca63e7bbf3340176be25c0d5874c" gracePeriod=604800 Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.931359 4558 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="openstack-kuttl-tests/cinder-scheduler-0" secret="" err="secret \"cinder-cinder-dockercfg-msjfn\" not found" Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.949112 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-665b4c9c8d-9tcx6"] Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.949531 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/neutron-665b4c9c8d-9tcx6" podUID="6dc46db7-9948-4c7d-b0a3-c767ffd58b4a" containerName="neutron-api" containerID="cri-o://76433ae79aade2e04de13538618917ea1b5001e82383e84d8122e38de55ebbe8" gracePeriod=30 Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.950044 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/neutron-665b4c9c8d-9tcx6" podUID="6dc46db7-9948-4c7d-b0a3-c767ffd58b4a" containerName="neutron-httpd" containerID="cri-o://2b441621415d66b4dd582cca5044009b53d19bc346f62f7de3d1c998664de90b" gracePeriod=30 Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.950225 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w9qtf\" (UniqueName: \"kubernetes.io/projected/318ab596-0189-4f84-b805-f28bd05824e1-kube-api-access-w9qtf\") pod \"dnsmasq-dnsmasq-84b9f45d47-lgbfw\" (UID: \"318ab596-0189-4f84-b805-f28bd05824e1\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-lgbfw" Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.950313 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/318ab596-0189-4f84-b805-f28bd05824e1-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-84b9f45d47-lgbfw\" (UID: \"318ab596-0189-4f84-b805-f28bd05824e1\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-lgbfw" Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.950427 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/318ab596-0189-4f84-b805-f28bd05824e1-config\") pod \"dnsmasq-dnsmasq-84b9f45d47-lgbfw\" (UID: \"318ab596-0189-4f84-b805-f28bd05824e1\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-lgbfw" Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.951461 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8a111df9-8578-40bb-a672-b5d53305c873-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:02 crc kubenswrapper[4558]: I0120 17:28:02.976352 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-65cd-account-create-update-grtts"] Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.015339 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-lgbfw"] Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.021272 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.039395 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/cinder-scheduler-0" podStartSLOduration=6.039374356 podStartE2EDuration="6.039374356s" podCreationTimestamp="2026-01-20 17:27:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:28:01.56058041 +0000 UTC m=+2775.320918376" watchObservedRunningTime="2026-01-20 17:28:03.039374356 +0000 UTC m=+2776.799712323" Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.042317 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8a111df9-8578-40bb-a672-b5d53305c873-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "8a111df9-8578-40bb-a672-b5d53305c873" (UID: "8a111df9-8578-40bb-a672-b5d53305c873"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.058050 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-b136-account-create-update-n9qdv"] Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.059541 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w9qtf\" (UniqueName: \"kubernetes.io/projected/318ab596-0189-4f84-b805-f28bd05824e1-kube-api-access-w9qtf\") pod \"dnsmasq-dnsmasq-84b9f45d47-lgbfw\" (UID: \"318ab596-0189-4f84-b805-f28bd05824e1\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-lgbfw" Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.059614 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/318ab596-0189-4f84-b805-f28bd05824e1-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-84b9f45d47-lgbfw\" (UID: \"318ab596-0189-4f84-b805-f28bd05824e1\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-lgbfw" Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.059721 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/318ab596-0189-4f84-b805-f28bd05824e1-config\") pod \"dnsmasq-dnsmasq-84b9f45d47-lgbfw\" (UID: \"318ab596-0189-4f84-b805-f28bd05824e1\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-lgbfw" Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.059820 4558 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/8a111df9-8578-40bb-a672-b5d53305c873-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:03 crc kubenswrapper[4558]: E0120 17:28:03.059882 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/openstack-cell1-scripts: configmap "openstack-cell1-scripts" not found Jan 20 17:28:03 crc kubenswrapper[4558]: E0120 17:28:03.059918 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/c2bfe108-b710-400b-baac-55815b192ee3-operator-scripts podName:c2bfe108-b710-400b-baac-55815b192ee3 nodeName:}" failed. No retries permitted until 2026-01-20 17:28:03.559905497 +0000 UTC m=+2777.320243463 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/c2bfe108-b710-400b-baac-55815b192ee3-operator-scripts") pod "root-account-create-update-dmtbg" (UID: "c2bfe108-b710-400b-baac-55815b192ee3") : configmap "openstack-cell1-scripts" not found Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.061488 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/318ab596-0189-4f84-b805-f28bd05824e1-config\") pod \"dnsmasq-dnsmasq-84b9f45d47-lgbfw\" (UID: \"318ab596-0189-4f84-b805-f28bd05824e1\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-lgbfw" Jan 20 17:28:03 crc kubenswrapper[4558]: E0120 17:28:03.061552 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/openstack-scripts: configmap "openstack-scripts" not found Jan 20 17:28:03 crc kubenswrapper[4558]: E0120 17:28:03.061590 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/1bcd505c-4754-4f41-b91d-6e488a669c93-operator-scripts podName:1bcd505c-4754-4f41-b91d-6e488a669c93 nodeName:}" failed. No retries permitted until 2026-01-20 17:28:03.561576979 +0000 UTC m=+2777.321914946 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/1bcd505c-4754-4f41-b91d-6e488a669c93-operator-scripts") pod "neutron-b136-account-create-update-n9qdv" (UID: "1bcd505c-4754-4f41-b91d-6e488a669c93") : configmap "openstack-scripts" not found Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.061831 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/318ab596-0189-4f84-b805-f28bd05824e1-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-84b9f45d47-lgbfw\" (UID: \"318ab596-0189-4f84-b805-f28bd05824e1\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-lgbfw" Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.081702 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8a111df9-8578-40bb-a672-b5d53305c873-ovn-northd-tls-certs" (OuterVolumeSpecName: "ovn-northd-tls-certs") pod "8a111df9-8578-40bb-a672-b5d53305c873" (UID: "8a111df9-8578-40bb-a672-b5d53305c873"). InnerVolumeSpecName "ovn-northd-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.083458 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w9qtf\" (UniqueName: \"kubernetes.io/projected/318ab596-0189-4f84-b805-f28bd05824e1-kube-api-access-w9qtf\") pod \"dnsmasq-dnsmasq-84b9f45d47-lgbfw\" (UID: \"318ab596-0189-4f84-b805-f28bd05824e1\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-lgbfw" Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.106564 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-e949-account-create-update-qd847"] Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.113222 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-43e3-account-create-update-fdlrr"] Jan 20 17:28:03 crc kubenswrapper[4558]: E0120 17:28:03.115614 4558 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 20 17:28:03 crc kubenswrapper[4558]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[/bin/sh -c #!/bin/bash Jan 20 17:28:03 crc kubenswrapper[4558]: Jan 20 17:28:03 crc kubenswrapper[4558]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 20 17:28:03 crc kubenswrapper[4558]: Jan 20 17:28:03 crc kubenswrapper[4558]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 20 17:28:03 crc kubenswrapper[4558]: Jan 20 17:28:03 crc kubenswrapper[4558]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 20 17:28:03 crc kubenswrapper[4558]: Jan 20 17:28:03 crc kubenswrapper[4558]: if [ -n "cinder" ]; then Jan 20 17:28:03 crc kubenswrapper[4558]: GRANT_DATABASE="cinder" Jan 20 17:28:03 crc kubenswrapper[4558]: else Jan 20 17:28:03 crc kubenswrapper[4558]: GRANT_DATABASE="*" Jan 20 17:28:03 crc kubenswrapper[4558]: fi Jan 20 17:28:03 crc kubenswrapper[4558]: Jan 20 17:28:03 crc kubenswrapper[4558]: # going for maximum compatibility here: Jan 20 17:28:03 crc kubenswrapper[4558]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 20 17:28:03 crc kubenswrapper[4558]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 20 17:28:03 crc kubenswrapper[4558]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 20 17:28:03 crc kubenswrapper[4558]: # support updates Jan 20 17:28:03 crc kubenswrapper[4558]: Jan 20 17:28:03 crc kubenswrapper[4558]: $MYSQL_CMD < logger="UnhandledError" Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.116363 4558 scope.go:117] "RemoveContainer" containerID="d3ba0972279d7175889b46187fb32615d720f05a63a7bbfedd1ca9dc2eff5075" Jan 20 17:28:03 crc kubenswrapper[4558]: E0120 17:28:03.117293 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"cinder-db-secret\\\" not found\"" pod="openstack-kuttl-tests/cinder-43e3-account-create-update-fdlrr" podUID="7ce0109d-f162-4ce7-b957-34ba84e8e377" Jan 20 17:28:03 crc kubenswrapper[4558]: E0120 17:28:03.117911 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d3ba0972279d7175889b46187fb32615d720f05a63a7bbfedd1ca9dc2eff5075\": container with ID starting with d3ba0972279d7175889b46187fb32615d720f05a63a7bbfedd1ca9dc2eff5075 not found: ID does not exist" containerID="d3ba0972279d7175889b46187fb32615d720f05a63a7bbfedd1ca9dc2eff5075" Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.117944 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d3ba0972279d7175889b46187fb32615d720f05a63a7bbfedd1ca9dc2eff5075"} err="failed to get container status \"d3ba0972279d7175889b46187fb32615d720f05a63a7bbfedd1ca9dc2eff5075\": rpc error: code = NotFound desc = could not find container \"d3ba0972279d7175889b46187fb32615d720f05a63a7bbfedd1ca9dc2eff5075\": container with ID starting with d3ba0972279d7175889b46187fb32615d720f05a63a7bbfedd1ca9dc2eff5075 not found: ID does not exist" Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.117968 4558 scope.go:117] "RemoveContainer" containerID="c2fa33c63a4c25f68e800c109ef61e283f605e39ebdcd676406536456186262d" Jan 20 17:28:03 crc kubenswrapper[4558]: E0120 17:28:03.118369 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c2fa33c63a4c25f68e800c109ef61e283f605e39ebdcd676406536456186262d\": container with ID starting with c2fa33c63a4c25f68e800c109ef61e283f605e39ebdcd676406536456186262d not found: ID does not exist" containerID="c2fa33c63a4c25f68e800c109ef61e283f605e39ebdcd676406536456186262d" Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.118386 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c2fa33c63a4c25f68e800c109ef61e283f605e39ebdcd676406536456186262d"} err="failed to get container status \"c2fa33c63a4c25f68e800c109ef61e283f605e39ebdcd676406536456186262d\": rpc error: code = NotFound desc = could not find container \"c2fa33c63a4c25f68e800c109ef61e283f605e39ebdcd676406536456186262d\": container with ID starting with c2fa33c63a4c25f68e800c109ef61e283f605e39ebdcd676406536456186262d not found: ID does not exist" Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.140985 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-db-create-2kmst"] Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.152384 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.158086 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-scheduler-0" podUID="b2a7ece8-32ed-4eb3-ab85-27ff4361622d" containerName="nova-scheduler-scheduler" containerID="cri-o://15e562b45f89d1f5fb52993145b8f16fbd8c5b35f2f80d2df9e160bf13a36eaf" gracePeriod=30 Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.162524 4558 reconciler_common.go:293] "Volume detached for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/8a111df9-8578-40bb-a672-b5d53305c873-ovn-northd-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.164467 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-db-create-rt2fh"] Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.176036 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.176336 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-metadata-0" podUID="cc4089c6-71ea-4503-b54c-18777fcc3c48" containerName="nova-metadata-log" containerID="cri-o://ca2fe5f683448e1b2e71f1f58bff793b5e9a3b0d6f6f324c37f3cdcce1707435" gracePeriod=30 Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.176705 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-metadata-0" podUID="cc4089c6-71ea-4503-b54c-18777fcc3c48" containerName="nova-metadata-metadata" containerID="cri-o://ddfb78eb686b10db6949511eb53f1d12111dd9665adfb113b40c64dcc8f80745" gracePeriod=30 Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.189250 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-db-create-pxxqs"] Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.200221 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.200704 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-api-0" podUID="4430d3cb-0a1c-4266-9f69-cf8e817f1d3e" containerName="nova-api-log" containerID="cri-o://01f067cab301938980d9e1ee9c68462c017ce912a5f70f653badf838dae20a53" gracePeriod=30 Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.201003 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-api-0" podUID="4430d3cb-0a1c-4266-9f69-cf8e817f1d3e" containerName="nova-api-api" containerID="cri-o://fccdd43e49b8880beb3f4193e0e2a09b0c9bcb297f472c9a13a4fc7709889c73" gracePeriod=30 Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.219394 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-af2b-account-create-update-x6tw5"] Jan 20 17:28:03 crc kubenswrapper[4558]: E0120 17:28:03.220443 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[kube-api-access-4j88m operator-scripts], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack-kuttl-tests/nova-cell1-af2b-account-create-update-x6tw5" podUID="b69d0136-9748-45bf-a9ed-d23e0180f1b4" Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.224949 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-worker-5cbf796c47-9gdgl"] Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.225330 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-worker-5cbf796c47-9gdgl" podUID="3b9858cf-2020-458e-bcf6-407c6853a962" containerName="barbican-worker-log" containerID="cri-o://b4c792c97b8f3884770e6abef851a252b3788722ba9e9f419f4552e68ff9bd85" gracePeriod=30 Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.225382 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-worker-5cbf796c47-9gdgl" podUID="3b9858cf-2020-458e-bcf6-407c6853a962" containerName="barbican-worker" containerID="cri-o://0bee60e6a9ed04943634730130e635bd656aae9f44611d34a0aa33192c022189" gracePeriod=30 Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.233360 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-worker-74d54f756c-g9zzk"] Jan 20 17:28:03 crc kubenswrapper[4558]: E0120 17:28:03.235521 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[combined-ca-bundle], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack-kuttl-tests/barbican-worker-74d54f756c-g9zzk" podUID="b9b28c41-c054-49f3-87d2-a8c15e6124de" Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.239484 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-a772-account-create-update-27cpc"] Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.244294 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-keystone-listener-86d4665d84-cndx4"] Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.244509 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-keystone-listener-86d4665d84-cndx4" podUID="125597fc-d3b4-44c1-a0a6-7dccbaebe7dd" containerName="barbican-keystone-listener-log" containerID="cri-o://2c27863658ecd8a450f932a78dd0d64933a85bf75c1f865bb6dc9d092e3e4a29" gracePeriod=30 Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.244881 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-keystone-listener-86d4665d84-cndx4" podUID="125597fc-d3b4-44c1-a0a6-7dccbaebe7dd" containerName="barbican-keystone-listener" containerID="cri-o://8299fb12d183c4920c21b1f9def27ecef5343744aedc73fdcd93885a02ce9354" gracePeriod=30 Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.245473 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovsdbserver-nb-0_a2068ccb-0c0d-4b32-9063-082a4c395070/ovsdbserver-nb/0.log" Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.245545 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.250661 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-keystone-listener-8f4cc76c4-9qkpn"] Jan 20 17:28:03 crc kubenswrapper[4558]: E0120 17:28:03.257616 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[combined-ca-bundle], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack-kuttl-tests/barbican-keystone-listener-8f4cc76c4-9qkpn" podUID="fa57dfc3-11b4-48af-a78c-00463e8894bf" Jan 20 17:28:03 crc kubenswrapper[4558]: E0120 17:28:03.265197 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cinder-config-data: secret "cinder-config-data" not found Jan 20 17:28:03 crc kubenswrapper[4558]: E0120 17:28:03.265243 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c1afb831-f3e4-4356-ab86-713eb0beca39-config-data podName:c1afb831-f3e4-4356-ab86-713eb0beca39 nodeName:}" failed. No retries permitted until 2026-01-20 17:28:05.265228018 +0000 UTC m=+2779.025565986 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/secret/c1afb831-f3e4-4356-ab86-713eb0beca39-config-data") pod "cinder-scheduler-0" (UID: "c1afb831-f3e4-4356-ab86-713eb0beca39") : secret "cinder-config-data" not found Jan 20 17:28:03 crc kubenswrapper[4558]: E0120 17:28:03.265509 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/kube-state-metrics-tls-config: secret "kube-state-metrics-tls-config" not found Jan 20 17:28:03 crc kubenswrapper[4558]: E0120 17:28:03.265537 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/443f910c-dd2d-4c72-b861-f15de67ac6bb-kube-state-metrics-tls-config podName:443f910c-dd2d-4c72-b861-f15de67ac6bb nodeName:}" failed. No retries permitted until 2026-01-20 17:28:04.265527162 +0000 UTC m=+2778.025865128 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-state-metrics-tls-config" (UniqueName: "kubernetes.io/secret/443f910c-dd2d-4c72-b861-f15de67ac6bb-kube-state-metrics-tls-config") pod "kube-state-metrics-0" (UID: "443f910c-dd2d-4c72-b861-f15de67ac6bb") : secret "kube-state-metrics-tls-config" not found Jan 20 17:28:03 crc kubenswrapper[4558]: E0120 17:28:03.265575 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/nova-metadata-config-data: secret "nova-metadata-config-data" not found Jan 20 17:28:03 crc kubenswrapper[4558]: E0120 17:28:03.265594 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/cc4089c6-71ea-4503-b54c-18777fcc3c48-config-data podName:cc4089c6-71ea-4503-b54c-18777fcc3c48 nodeName:}" failed. No retries permitted until 2026-01-20 17:28:05.265587584 +0000 UTC m=+2779.025925552 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/secret/cc4089c6-71ea-4503-b54c-18777fcc3c48-config-data") pod "nova-metadata-0" (UID: "cc4089c6-71ea-4503-b54c-18777fcc3c48") : secret "nova-metadata-config-data" not found Jan 20 17:28:03 crc kubenswrapper[4558]: E0120 17:28:03.265624 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cinder-scheduler-config-data: secret "cinder-scheduler-config-data" not found Jan 20 17:28:03 crc kubenswrapper[4558]: E0120 17:28:03.265641 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c1afb831-f3e4-4356-ab86-713eb0beca39-config-data-custom podName:c1afb831-f3e4-4356-ab86-713eb0beca39 nodeName:}" failed. No retries permitted until 2026-01-20 17:28:05.265635464 +0000 UTC m=+2779.025973432 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "config-data-custom" (UniqueName: "kubernetes.io/secret/c1afb831-f3e4-4356-ab86-713eb0beca39-config-data-custom") pod "cinder-scheduler-0" (UID: "c1afb831-f3e4-4356-ab86-713eb0beca39") : secret "cinder-scheduler-config-data" not found Jan 20 17:28:03 crc kubenswrapper[4558]: E0120 17:28:03.265670 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/combined-ca-bundle: secret "combined-ca-bundle" not found Jan 20 17:28:03 crc kubenswrapper[4558]: E0120 17:28:03.265686 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/443f910c-dd2d-4c72-b861-f15de67ac6bb-combined-ca-bundle podName:443f910c-dd2d-4c72-b861-f15de67ac6bb nodeName:}" failed. No retries permitted until 2026-01-20 17:28:04.265680731 +0000 UTC m=+2778.026018697 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/443f910c-dd2d-4c72-b861-f15de67ac6bb-combined-ca-bundle") pod "kube-state-metrics-0" (UID: "443f910c-dd2d-4c72-b861-f15de67ac6bb") : secret "combined-ca-bundle" not found Jan 20 17:28:03 crc kubenswrapper[4558]: E0120 17:28:03.265712 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cinder-scripts: secret "cinder-scripts" not found Jan 20 17:28:03 crc kubenswrapper[4558]: E0120 17:28:03.265731 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c1afb831-f3e4-4356-ab86-713eb0beca39-scripts podName:c1afb831-f3e4-4356-ab86-713eb0beca39 nodeName:}" failed. No retries permitted until 2026-01-20 17:28:05.26572326 +0000 UTC m=+2779.026061227 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "scripts" (UniqueName: "kubernetes.io/secret/c1afb831-f3e4-4356-ab86-713eb0beca39-scripts") pod "cinder-scheduler-0" (UID: "c1afb831-f3e4-4356-ab86-713eb0beca39") : secret "cinder-scripts" not found Jan 20 17:28:03 crc kubenswrapper[4558]: E0120 17:28:03.265757 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/combined-ca-bundle: secret "combined-ca-bundle" not found Jan 20 17:28:03 crc kubenswrapper[4558]: E0120 17:28:03.265783 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c1afb831-f3e4-4356-ab86-713eb0beca39-combined-ca-bundle podName:c1afb831-f3e4-4356-ab86-713eb0beca39 nodeName:}" failed. No retries permitted until 2026-01-20 17:28:05.265777411 +0000 UTC m=+2779.026115379 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/c1afb831-f3e4-4356-ab86-713eb0beca39-combined-ca-bundle") pod "cinder-scheduler-0" (UID: "c1afb831-f3e4-4356-ab86-713eb0beca39") : secret "combined-ca-bundle" not found Jan 20 17:28:03 crc kubenswrapper[4558]: E0120 17:28:03.265814 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-kube-state-metrics-svc: secret "cert-kube-state-metrics-svc" not found Jan 20 17:28:03 crc kubenswrapper[4558]: E0120 17:28:03.265831 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/443f910c-dd2d-4c72-b861-f15de67ac6bb-kube-state-metrics-tls-certs podName:443f910c-dd2d-4c72-b861-f15de67ac6bb nodeName:}" failed. No retries permitted until 2026-01-20 17:28:04.265826984 +0000 UTC m=+2778.026164952 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-state-metrics-tls-certs" (UniqueName: "kubernetes.io/secret/443f910c-dd2d-4c72-b861-f15de67ac6bb-kube-state-metrics-tls-certs") pod "kube-state-metrics-0" (UID: "443f910c-dd2d-4c72-b861-f15de67ac6bb") : secret "cert-kube-state-metrics-svc" not found Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.272207 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/openstack-cell1-galera-0"] Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.276988 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell0-f949-account-create-update-v2zng"] Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.282063 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-api-84969c976-hhcrx"] Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.282243 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-api-84969c976-hhcrx" podUID="184a9f2b-eab7-4221-a900-746e063662a8" containerName="barbican-api-log" containerID="cri-o://0c2544d6d08a38b60dfd9a1ca62a8c9460a6db9ba8a618726a1b8ad313afe1bf" gracePeriod=30 Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.282657 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-api-84969c976-hhcrx" podUID="184a9f2b-eab7-4221-a900-746e063662a8" containerName="barbican-api" containerID="cri-o://1d50c4159354efc26c8a538204ca688c7a0a5b8e2d0ac9d5aef208c1239af23d" gracePeriod=30 Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.289127 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.289390 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" podUID="73ed8a8e-b7b8-4d71-9a2b-d29c60ffd996" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://f2931b38008bccdd5209b99b03a91154a5b6e19f660a655b7d1fde4d2990be72" gracePeriod=30 Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.306548 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-dmtbg"] Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.311606 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-lgbfw" Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.317454 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovsdbserver-sb-0_7b6a0803-befe-4426-9e2a-a04ac12f2d7c/ovsdbserver-sb/0.log" Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.317529 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.318263 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/placement-db-create-j7682"] Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.367307 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/a2068ccb-0c0d-4b32-9063-082a4c395070-metrics-certs-tls-certs\") pod \"a2068ccb-0c0d-4b32-9063-082a4c395070\" (UID: \"a2068ccb-0c0d-4b32-9063-082a4c395070\") " Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.367496 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4qbq\" (UniqueName: \"kubernetes.io/projected/a2068ccb-0c0d-4b32-9063-082a4c395070-kube-api-access-w4qbq\") pod \"a2068ccb-0c0d-4b32-9063-082a4c395070\" (UID: \"a2068ccb-0c0d-4b32-9063-082a4c395070\") " Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.367532 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a2068ccb-0c0d-4b32-9063-082a4c395070-config\") pod \"a2068ccb-0c0d-4b32-9063-082a4c395070\" (UID: \"a2068ccb-0c0d-4b32-9063-082a4c395070\") " Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.367560 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/a2068ccb-0c0d-4b32-9063-082a4c395070-ovsdbserver-nb-tls-certs\") pod \"a2068ccb-0c0d-4b32-9063-082a4c395070\" (UID: \"a2068ccb-0c0d-4b32-9063-082a4c395070\") " Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.367687 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/a2068ccb-0c0d-4b32-9063-082a4c395070-ovsdb-rundir\") pod \"a2068ccb-0c0d-4b32-9063-082a4c395070\" (UID: \"a2068ccb-0c0d-4b32-9063-082a4c395070\") " Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.367718 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndbcluster-nb-etc-ovn\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"a2068ccb-0c0d-4b32-9063-082a4c395070\" (UID: \"a2068ccb-0c0d-4b32-9063-082a4c395070\") " Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.367795 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a2068ccb-0c0d-4b32-9063-082a4c395070-combined-ca-bundle\") pod \"a2068ccb-0c0d-4b32-9063-082a4c395070\" (UID: \"a2068ccb-0c0d-4b32-9063-082a4c395070\") " Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.367861 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a2068ccb-0c0d-4b32-9063-082a4c395070-scripts\") pod \"a2068ccb-0c0d-4b32-9063-082a4c395070\" (UID: \"a2068ccb-0c0d-4b32-9063-082a4c395070\") " Jan 20 17:28:03 crc kubenswrapper[4558]: E0120 17:28:03.368830 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Jan 20 17:28:03 crc kubenswrapper[4558]: E0120 17:28:03.368874 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/d692722b-f7fd-447c-8b7a-f56cff940d91-config-data podName:d692722b-f7fd-447c-8b7a-f56cff940d91 nodeName:}" failed. No retries permitted until 2026-01-20 17:28:07.368860786 +0000 UTC m=+2781.129198753 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/d692722b-f7fd-447c-8b7a-f56cff940d91-config-data") pod "rabbitmq-server-0" (UID: "d692722b-f7fd-447c-8b7a-f56cff940d91") : configmap "rabbitmq-config-data" not found Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.369361 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a2068ccb-0c0d-4b32-9063-082a4c395070-ovsdb-rundir" (OuterVolumeSpecName: "ovsdb-rundir") pod "a2068ccb-0c0d-4b32-9063-082a4c395070" (UID: "a2068ccb-0c0d-4b32-9063-082a4c395070"). InnerVolumeSpecName "ovsdb-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.369669 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a2068ccb-0c0d-4b32-9063-082a4c395070-scripts" (OuterVolumeSpecName: "scripts") pod "a2068ccb-0c0d-4b32-9063-082a4c395070" (UID: "a2068ccb-0c0d-4b32-9063-082a4c395070"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.371869 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a2068ccb-0c0d-4b32-9063-082a4c395070-config" (OuterVolumeSpecName: "config") pod "a2068ccb-0c0d-4b32-9063-082a4c395070" (UID: "a2068ccb-0c0d-4b32-9063-082a4c395070"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.375698 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a2068ccb-0c0d-4b32-9063-082a4c395070-kube-api-access-w4qbq" (OuterVolumeSpecName: "kube-api-access-w4qbq") pod "a2068ccb-0c0d-4b32-9063-082a4c395070" (UID: "a2068ccb-0c0d-4b32-9063-082a4c395070"). InnerVolumeSpecName "kube-api-access-w4qbq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.390339 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage09-crc" (OuterVolumeSpecName: "ovndbcluster-nb-etc-ovn") pod "a2068ccb-0c0d-4b32-9063-082a4c395070" (UID: "a2068ccb-0c0d-4b32-9063-082a4c395070"). InnerVolumeSpecName "local-storage09-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.404246 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-db-sync-h5hmh"] Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.416227 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-db-sync-h5hmh"] Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.436626 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-0"] Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.436838 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-cell0-conductor-0" podUID="3973df1a-dd5a-417e-80a9-6653fb036470" containerName="nova-cell0-conductor-conductor" containerID="cri-o://6d33b2d5d8fe60d82930b4864fabfe53d6a679fc712a4ba67d60a4da40feed96" gracePeriod=30 Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.442711 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-db-sync-hbfss"] Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.445779 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-db-sync-hbfss"] Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.480677 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-0"] Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.480991 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-cell1-conductor-0" podUID="88b73d7f-ee50-4fdf-9fd7-37296c855d69" containerName="nova-cell1-conductor-conductor" containerID="cri-o://a17a87967f20a77dfc9f8a5084d14c3b4b2b006c99d8e894a2ad7bcef9243b71" gracePeriod=30 Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.488911 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7b6a0803-befe-4426-9e2a-a04ac12f2d7c-combined-ca-bundle\") pod \"7b6a0803-befe-4426-9e2a-a04ac12f2d7c\" (UID: \"7b6a0803-befe-4426-9e2a-a04ac12f2d7c\") " Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.489002 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/7b6a0803-befe-4426-9e2a-a04ac12f2d7c-ovsdb-rundir\") pod \"7b6a0803-befe-4426-9e2a-a04ac12f2d7c\" (UID: \"7b6a0803-befe-4426-9e2a-a04ac12f2d7c\") " Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.489096 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/7b6a0803-befe-4426-9e2a-a04ac12f2d7c-ovsdbserver-sb-tls-certs\") pod \"7b6a0803-befe-4426-9e2a-a04ac12f2d7c\" (UID: \"7b6a0803-befe-4426-9e2a-a04ac12f2d7c\") " Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.489223 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/7b6a0803-befe-4426-9e2a-a04ac12f2d7c-metrics-certs-tls-certs\") pod \"7b6a0803-befe-4426-9e2a-a04ac12f2d7c\" (UID: \"7b6a0803-befe-4426-9e2a-a04ac12f2d7c\") " Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.489253 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9pfd4\" (UniqueName: \"kubernetes.io/projected/7b6a0803-befe-4426-9e2a-a04ac12f2d7c-kube-api-access-9pfd4\") pod \"7b6a0803-befe-4426-9e2a-a04ac12f2d7c\" (UID: \"7b6a0803-befe-4426-9e2a-a04ac12f2d7c\") " Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.489377 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7b6a0803-befe-4426-9e2a-a04ac12f2d7c-scripts\") pod \"7b6a0803-befe-4426-9e2a-a04ac12f2d7c\" (UID: \"7b6a0803-befe-4426-9e2a-a04ac12f2d7c\") " Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.489441 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndbcluster-sb-etc-ovn\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"7b6a0803-befe-4426-9e2a-a04ac12f2d7c\" (UID: \"7b6a0803-befe-4426-9e2a-a04ac12f2d7c\") " Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.489552 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7b6a0803-befe-4426-9e2a-a04ac12f2d7c-config\") pod \"7b6a0803-befe-4426-9e2a-a04ac12f2d7c\" (UID: \"7b6a0803-befe-4426-9e2a-a04ac12f2d7c\") " Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.494705 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7b6a0803-befe-4426-9e2a-a04ac12f2d7c-scripts" (OuterVolumeSpecName: "scripts") pod "7b6a0803-befe-4426-9e2a-a04ac12f2d7c" (UID: "7b6a0803-befe-4426-9e2a-a04ac12f2d7c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.503378 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7b6a0803-befe-4426-9e2a-a04ac12f2d7c-config" (OuterVolumeSpecName: "config") pod "7b6a0803-befe-4426-9e2a-a04ac12f2d7c" (UID: "7b6a0803-befe-4426-9e2a-a04ac12f2d7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.504919 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-65cd-account-create-update-grtts"] Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.505238 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7b6a0803-befe-4426-9e2a-a04ac12f2d7c-ovsdb-rundir" (OuterVolumeSpecName: "ovsdb-rundir") pod "7b6a0803-befe-4426-9e2a-a04ac12f2d7c" (UID: "7b6a0803-befe-4426-9e2a-a04ac12f2d7c"). InnerVolumeSpecName "ovsdb-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.527271 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-db-create-rt2fh"] Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.531299 4558 reconciler_common.go:293] "Volume detached for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/a2068ccb-0c0d-4b32-9063-082a4c395070-ovsdb-rundir\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.531350 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" " Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.538351 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a2068ccb-0c0d-4b32-9063-082a4c395070-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.538488 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4qbq\" (UniqueName: \"kubernetes.io/projected/a2068ccb-0c0d-4b32-9063-082a4c395070-kube-api-access-w4qbq\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.538551 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a2068ccb-0c0d-4b32-9063-082a4c395070-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.564090 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage05-crc" (OuterVolumeSpecName: "ovndbcluster-sb-etc-ovn") pod "7b6a0803-befe-4426-9e2a-a04ac12f2d7c" (UID: "7b6a0803-befe-4426-9e2a-a04ac12f2d7c"). InnerVolumeSpecName "local-storage05-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.564378 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7b6a0803-befe-4426-9e2a-a04ac12f2d7c-kube-api-access-9pfd4" (OuterVolumeSpecName: "kube-api-access-9pfd4") pod "7b6a0803-befe-4426-9e2a-a04ac12f2d7c" (UID: "7b6a0803-befe-4426-9e2a-a04ac12f2d7c"). InnerVolumeSpecName "kube-api-access-9pfd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.576616 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-b136-account-create-update-n9qdv"] Jan 20 17:28:03 crc kubenswrapper[4558]: E0120 17:28:03.584676 4558 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 20 17:28:03 crc kubenswrapper[4558]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[/bin/sh -c #!/bin/bash Jan 20 17:28:03 crc kubenswrapper[4558]: Jan 20 17:28:03 crc kubenswrapper[4558]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 20 17:28:03 crc kubenswrapper[4558]: Jan 20 17:28:03 crc kubenswrapper[4558]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 20 17:28:03 crc kubenswrapper[4558]: Jan 20 17:28:03 crc kubenswrapper[4558]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 20 17:28:03 crc kubenswrapper[4558]: Jan 20 17:28:03 crc kubenswrapper[4558]: if [ -n "nova_cell0" ]; then Jan 20 17:28:03 crc kubenswrapper[4558]: GRANT_DATABASE="nova_cell0" Jan 20 17:28:03 crc kubenswrapper[4558]: else Jan 20 17:28:03 crc kubenswrapper[4558]: GRANT_DATABASE="*" Jan 20 17:28:03 crc kubenswrapper[4558]: fi Jan 20 17:28:03 crc kubenswrapper[4558]: Jan 20 17:28:03 crc kubenswrapper[4558]: # going for maximum compatibility here: Jan 20 17:28:03 crc kubenswrapper[4558]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 20 17:28:03 crc kubenswrapper[4558]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 20 17:28:03 crc kubenswrapper[4558]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 20 17:28:03 crc kubenswrapper[4558]: # support updates Jan 20 17:28:03 crc kubenswrapper[4558]: Jan 20 17:28:03 crc kubenswrapper[4558]: $MYSQL_CMD < logger="UnhandledError" Jan 20 17:28:03 crc kubenswrapper[4558]: E0120 17:28:03.584995 4558 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 20 17:28:03 crc kubenswrapper[4558]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[/bin/sh -c #!/bin/bash Jan 20 17:28:03 crc kubenswrapper[4558]: Jan 20 17:28:03 crc kubenswrapper[4558]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 20 17:28:03 crc kubenswrapper[4558]: Jan 20 17:28:03 crc kubenswrapper[4558]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 20 17:28:03 crc kubenswrapper[4558]: Jan 20 17:28:03 crc kubenswrapper[4558]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 20 17:28:03 crc kubenswrapper[4558]: Jan 20 17:28:03 crc kubenswrapper[4558]: if [ -n "nova_api" ]; then Jan 20 17:28:03 crc kubenswrapper[4558]: GRANT_DATABASE="nova_api" Jan 20 17:28:03 crc kubenswrapper[4558]: else Jan 20 17:28:03 crc kubenswrapper[4558]: GRANT_DATABASE="*" Jan 20 17:28:03 crc kubenswrapper[4558]: fi Jan 20 17:28:03 crc kubenswrapper[4558]: Jan 20 17:28:03 crc kubenswrapper[4558]: # going for maximum compatibility here: Jan 20 17:28:03 crc kubenswrapper[4558]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 20 17:28:03 crc kubenswrapper[4558]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 20 17:28:03 crc kubenswrapper[4558]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 20 17:28:03 crc kubenswrapper[4558]: # support updates Jan 20 17:28:03 crc kubenswrapper[4558]: Jan 20 17:28:03 crc kubenswrapper[4558]: $MYSQL_CMD < logger="UnhandledError" Jan 20 17:28:03 crc kubenswrapper[4558]: E0120 17:28:03.586894 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"nova-cell0-db-secret\\\" not found\"" pod="openstack-kuttl-tests/nova-cell0-f949-account-create-update-v2zng" podUID="3ac6a4e2-150b-4137-b547-33cbdd1137b7" Jan 20 17:28:03 crc kubenswrapper[4558]: E0120 17:28:03.588440 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"nova-api-db-secret\\\" not found\"" pod="openstack-kuttl-tests/nova-api-a772-account-create-update-27cpc" podUID="e0d8717e-1693-4222-aebb-024d291cb0d5" Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.590400 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.590784 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="134fc5ef-70be-4c8c-aa72-e4f0440d0afe" containerName="ceilometer-central-agent" containerID="cri-o://8ac13b7e76f880d4b690e8245e9e454bb0f7d3d01a25e904b05a000a609f936a" gracePeriod=30 Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.591776 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="134fc5ef-70be-4c8c-aa72-e4f0440d0afe" containerName="sg-core" containerID="cri-o://99d707bce7e8a2bd68ad6ab0135f5762171740a9747fee9d19f3dc67af0b93db" gracePeriod=30 Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.591802 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="134fc5ef-70be-4c8c-aa72-e4f0440d0afe" containerName="proxy-httpd" containerID="cri-o://39bfb26312eaad0bd344b47e0fab7bc8a68e4339a058a5362c330669eb1593d1" gracePeriod=30 Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.591821 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="134fc5ef-70be-4c8c-aa72-e4f0440d0afe" containerName="ceilometer-notification-agent" containerID="cri-o://98bb94af3fa40dc3d1b7b4c99b5349d836b492c3cec3acd61d7cae45c5b34d12" gracePeriod=30 Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.618692 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a2068ccb-0c0d-4b32-9063-082a4c395070-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a2068ccb-0c0d-4b32-9063-082a4c395070" (UID: "a2068ccb-0c0d-4b32-9063-082a4c395070"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.620564 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage09-crc" (UniqueName: "kubernetes.io/local-volume/local-storage09-crc") on node "crc" Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.620631 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.641049 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b69d0136-9748-45bf-a9ed-d23e0180f1b4-operator-scripts\") pod \"nova-cell1-af2b-account-create-update-x6tw5\" (UID: \"b69d0136-9748-45bf-a9ed-d23e0180f1b4\") " pod="openstack-kuttl-tests/nova-cell1-af2b-account-create-update-x6tw5" Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.641120 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4j88m\" (UniqueName: \"kubernetes.io/projected/b69d0136-9748-45bf-a9ed-d23e0180f1b4-kube-api-access-4j88m\") pod \"nova-cell1-af2b-account-create-update-x6tw5\" (UID: \"b69d0136-9748-45bf-a9ed-d23e0180f1b4\") " pod="openstack-kuttl-tests/nova-cell1-af2b-account-create-update-x6tw5" Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.641198 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9pfd4\" (UniqueName: \"kubernetes.io/projected/7b6a0803-befe-4426-9e2a-a04ac12f2d7c-kube-api-access-9pfd4\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.641215 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7b6a0803-befe-4426-9e2a-a04ac12f2d7c-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.641234 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" " Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.641245 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.641255 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7b6a0803-befe-4426-9e2a-a04ac12f2d7c-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.641263 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a2068ccb-0c0d-4b32-9063-082a4c395070-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.641272 4558 reconciler_common.go:293] "Volume detached for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/7b6a0803-befe-4426-9e2a-a04ac12f2d7c-ovsdb-rundir\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.644590 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-e949-account-create-update-qd847"] Jan 20 17:28:03 crc kubenswrapper[4558]: E0120 17:28:03.644691 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/openstack-cell1-scripts: configmap "openstack-cell1-scripts" not found Jan 20 17:28:03 crc kubenswrapper[4558]: E0120 17:28:03.644739 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/c2bfe108-b710-400b-baac-55815b192ee3-operator-scripts podName:c2bfe108-b710-400b-baac-55815b192ee3 nodeName:}" failed. No retries permitted until 2026-01-20 17:28:04.64472354 +0000 UTC m=+2778.405061507 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/c2bfe108-b710-400b-baac-55815b192ee3-operator-scripts") pod "root-account-create-update-dmtbg" (UID: "c2bfe108-b710-400b-baac-55815b192ee3") : configmap "openstack-cell1-scripts" not found Jan 20 17:28:03 crc kubenswrapper[4558]: E0120 17:28:03.644783 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/openstack-scripts: configmap "openstack-scripts" not found Jan 20 17:28:03 crc kubenswrapper[4558]: E0120 17:28:03.644802 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/1bcd505c-4754-4f41-b91d-6e488a669c93-operator-scripts podName:1bcd505c-4754-4f41-b91d-6e488a669c93 nodeName:}" failed. No retries permitted until 2026-01-20 17:28:04.644796697 +0000 UTC m=+2778.405134664 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/1bcd505c-4754-4f41-b91d-6e488a669c93-operator-scripts") pod "neutron-b136-account-create-update-n9qdv" (UID: "1bcd505c-4754-4f41-b91d-6e488a669c93") : configmap "openstack-scripts" not found Jan 20 17:28:03 crc kubenswrapper[4558]: E0120 17:28:03.644827 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/openstack-cell1-scripts: configmap "openstack-cell1-scripts" not found Jan 20 17:28:03 crc kubenswrapper[4558]: E0120 17:28:03.644844 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/b69d0136-9748-45bf-a9ed-d23e0180f1b4-operator-scripts podName:b69d0136-9748-45bf-a9ed-d23e0180f1b4 nodeName:}" failed. No retries permitted until 2026-01-20 17:28:05.644839718 +0000 UTC m=+2779.405177686 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/b69d0136-9748-45bf-a9ed-d23e0180f1b4-operator-scripts") pod "nova-cell1-af2b-account-create-update-x6tw5" (UID: "b69d0136-9748-45bf-a9ed-d23e0180f1b4") : configmap "openstack-cell1-scripts" not found Jan 20 17:28:03 crc kubenswrapper[4558]: E0120 17:28:03.650594 4558 projected.go:194] Error preparing data for projected volume kube-api-access-4j88m for pod openstack-kuttl-tests/nova-cell1-af2b-account-create-update-x6tw5: failed to fetch token: serviceaccounts "galera-openstack-cell1" not found Jan 20 17:28:03 crc kubenswrapper[4558]: E0120 17:28:03.650658 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/b69d0136-9748-45bf-a9ed-d23e0180f1b4-kube-api-access-4j88m podName:b69d0136-9748-45bf-a9ed-d23e0180f1b4 nodeName:}" failed. No retries permitted until 2026-01-20 17:28:05.650642739 +0000 UTC m=+2779.410980695 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-4j88m" (UniqueName: "kubernetes.io/projected/b69d0136-9748-45bf-a9ed-d23e0180f1b4-kube-api-access-4j88m") pod "nova-cell1-af2b-account-create-update-x6tw5" (UID: "b69d0136-9748-45bf-a9ed-d23e0180f1b4") : failed to fetch token: serviceaccounts "galera-openstack-cell1" not found Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.653510 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-cell-mapping-4kxm5"] Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.661539 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell1-cell-mapping-4kxm5"] Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.665500 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage05-crc" (UniqueName: "kubernetes.io/local-volume/local-storage05-crc") on node "crc" Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.743723 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.754212 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/swift-proxy-5f7fb7b-s2dxn"] Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.788147 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/swift-proxy-7459567f99-gx6dr"] Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.788649 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-proxy-7459567f99-gx6dr" podUID="2bee9ff9-52bb-4dae-971c-8c6236a4e563" containerName="proxy-httpd" containerID="cri-o://95487ba24ea1b5e70725b0d792dbc8653b76f5c38eac7b039714b3d9fd0a7854" gracePeriod=30 Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.789132 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-proxy-7459567f99-gx6dr" podUID="2bee9ff9-52bb-4dae-971c-8c6236a4e563" containerName="proxy-server" containerID="cri-o://6a0521946798f9e4098ead7f6a1d8d950d13146e8392b370d0b39a8082a8515a" gracePeriod=30 Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.827135 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/memcached-0"] Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.827716 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/memcached-0" podUID="3460a75e-3553-47b9-bfc5-39c2c459826e" containerName="memcached" containerID="cri-o://c8a5e9bf9d8e1a8dad3f6f73cdf354545ce12575fe3b33bce155c23769a44f3e" gracePeriod=30 Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.843554 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-64c1-account-create-update-h6gct"] Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.846801 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/keystone-64c1-account-create-update-h6gct"] Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.848609 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fa57dfc3-11b4-48af-a78c-00463e8894bf-combined-ca-bundle\") pod \"barbican-keystone-listener-8f4cc76c4-9qkpn\" (UID: \"fa57dfc3-11b4-48af-a78c-00463e8894bf\") " pod="openstack-kuttl-tests/barbican-keystone-listener-8f4cc76c4-9qkpn" Jan 20 17:28:03 crc kubenswrapper[4558]: E0120 17:28:03.848941 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/combined-ca-bundle: secret "combined-ca-bundle" not found Jan 20 17:28:03 crc kubenswrapper[4558]: E0120 17:28:03.848991 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/fa57dfc3-11b4-48af-a78c-00463e8894bf-combined-ca-bundle podName:fa57dfc3-11b4-48af-a78c-00463e8894bf nodeName:}" failed. No retries permitted until 2026-01-20 17:28:07.848978695 +0000 UTC m=+2781.609316662 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/fa57dfc3-11b4-48af-a78c-00463e8894bf-combined-ca-bundle") pod "barbican-keystone-listener-8f4cc76c4-9qkpn" (UID: "fa57dfc3-11b4-48af-a78c-00463e8894bf") : secret "combined-ca-bundle" not found Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.876195 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-db-create-c6v64"] Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.889209 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/keystone-db-create-c6v64"] Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.900057 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/keystone-64c1-account-create-update-wsq7z"] Jan 20 17:28:03 crc kubenswrapper[4558]: E0120 17:28:03.900941 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7b6a0803-befe-4426-9e2a-a04ac12f2d7c" containerName="openstack-network-exporter" Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.900967 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="7b6a0803-befe-4426-9e2a-a04ac12f2d7c" containerName="openstack-network-exporter" Jan 20 17:28:03 crc kubenswrapper[4558]: E0120 17:28:03.900986 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7b6a0803-befe-4426-9e2a-a04ac12f2d7c" containerName="ovsdbserver-sb" Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.900993 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="7b6a0803-befe-4426-9e2a-a04ac12f2d7c" containerName="ovsdbserver-sb" Jan 20 17:28:03 crc kubenswrapper[4558]: E0120 17:28:03.901019 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a2068ccb-0c0d-4b32-9063-082a4c395070" containerName="ovsdbserver-nb" Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.901025 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="a2068ccb-0c0d-4b32-9063-082a4c395070" containerName="ovsdbserver-nb" Jan 20 17:28:03 crc kubenswrapper[4558]: E0120 17:28:03.901037 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a2068ccb-0c0d-4b32-9063-082a4c395070" containerName="openstack-network-exporter" Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.901043 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="a2068ccb-0c0d-4b32-9063-082a4c395070" containerName="openstack-network-exporter" Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.901261 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="a2068ccb-0c0d-4b32-9063-082a4c395070" containerName="openstack-network-exporter" Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.901287 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="7b6a0803-befe-4426-9e2a-a04ac12f2d7c" containerName="openstack-network-exporter" Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.901299 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="7b6a0803-befe-4426-9e2a-a04ac12f2d7c" containerName="ovsdbserver-sb" Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.901310 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="a2068ccb-0c0d-4b32-9063-082a4c395070" containerName="ovsdbserver-nb" Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.902068 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-64c1-account-create-update-wsq7z" Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.906424 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-db-secret" Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.924342 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-43e3-account-create-update-fdlrr"] Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.964917 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-64c1-account-create-update-wsq7z"] Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.975274 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-db-sync-xq5zh"] Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.979712 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/keystone-db-sync-xq5zh"] Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.987088 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovsdbserver-sb-0_7b6a0803-befe-4426-9e2a-a04ac12f2d7c/ovsdbserver-sb/0.log" Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.987223 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.987875 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-bootstrap-28nj8"] Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.987902 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-sb-0" event={"ID":"7b6a0803-befe-4426-9e2a-a04ac12f2d7c","Type":"ContainerDied","Data":"add1e8304daefdeb1ed3561478da2142c92e86dfa46d05eedbde1589c7a9ba74"} Jan 20 17:28:03 crc kubenswrapper[4558]: I0120 17:28:03.987934 4558 scope.go:117] "RemoveContainer" containerID="e14a2ace059c2e1c3be33ab3bdf7264642df45d603b1ef56bea3fa59cbb4787b" Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.001981 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/keystone-bootstrap-28nj8"] Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.007623 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-74494d9d6c-qv9st"] Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.007847 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/keystone-74494d9d6c-qv9st" podUID="7d72e21b-f0dd-48d4-9594-bb39aae6fa9d" containerName="keystone-api" containerID="cri-o://911340d3455407e7d7596ed292889c631409f00406037f4417caa85ad7dc54a6" gracePeriod=30 Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.010809 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-db-create-2kmst" event={"ID":"bfb2b456-ae0e-42ca-a227-428a626f1e3e","Type":"ContainerStarted","Data":"7ca25b484d83aa301dba93a3c721ced69437a083aca71309d616a6dc5b6b8f42"} Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.018474 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-northd-0" event={"ID":"8a111df9-8578-40bb-a672-b5d53305c873","Type":"ContainerDied","Data":"44c1fe01210b1e9e65d4468d7551bc7915ee5edc55a00157013003b1b37c9fd0"} Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.020192 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-a772-account-create-update-27cpc" event={"ID":"e0d8717e-1693-4222-aebb-024d291cb0d5","Type":"ContainerStarted","Data":"25792754d41cc5f42b33b0c612735ff2ff78d5fbc10c4077212bc9154f35d4e0"} Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.027524 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovsdbserver-nb-0_a2068ccb-0c0d-4b32-9063-082a4c395070/ovsdbserver-nb/0.log" Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.027596 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-nb-0" event={"ID":"a2068ccb-0c0d-4b32-9063-082a4c395070","Type":"ContainerDied","Data":"febfce8a80eac146400d05676d21dd60a93153f82cbc431cf75d6c5818ed6658"} Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.027679 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.033779 4558 generic.go:334] "Generic (PLEG): container finished" podID="e383e708-a471-4904-bfc7-eead6a5c76dc" containerID="fc05c1e78ad7a92e2b78d674adb5592b41e556323420c6070785f62676598f12" exitCode=137 Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.053428 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/012246ba-362c-48f3-bbb1-913d57f1f9ed-operator-scripts\") pod \"keystone-64c1-account-create-update-wsq7z\" (UID: \"012246ba-362c-48f3-bbb1-913d57f1f9ed\") " pod="openstack-kuttl-tests/keystone-64c1-account-create-update-wsq7z" Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.053776 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xfskp\" (UniqueName: \"kubernetes.io/projected/012246ba-362c-48f3-bbb1-913d57f1f9ed-kube-api-access-xfskp\") pod \"keystone-64c1-account-create-update-wsq7z\" (UID: \"012246ba-362c-48f3-bbb1-913d57f1f9ed\") " pod="openstack-kuttl-tests/keystone-64c1-account-create-update-wsq7z" Jan 20 17:28:04 crc kubenswrapper[4558]: E0120 17:28:04.054226 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Jan 20 17:28:04 crc kubenswrapper[4558]: E0120 17:28:04.054277 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/48cfc6e5-774d-4e7d-8103-f6a3260ea14c-config-data podName:48cfc6e5-774d-4e7d-8103-f6a3260ea14c nodeName:}" failed. No retries permitted until 2026-01-20 17:28:08.054262474 +0000 UTC m=+2781.814600441 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/48cfc6e5-774d-4e7d-8103-f6a3260ea14c-config-data") pod "rabbitmq-cell1-server-0" (UID: "48cfc6e5-774d-4e7d-8103-f6a3260ea14c") : configmap "rabbitmq-cell1-config-data" not found Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.054378 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-43e3-account-create-update-fdlrr" event={"ID":"7ce0109d-f162-4ce7-b957-34ba84e8e377","Type":"ContainerStarted","Data":"75d61c2e2226f7f863261986f80b02b7ed8ec3398ab59f519f4737c3e2faf21d"} Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.073187 4558 generic.go:334] "Generic (PLEG): container finished" podID="125597fc-d3b4-44c1-a0a6-7dccbaebe7dd" containerID="2c27863658ecd8a450f932a78dd0d64933a85bf75c1f865bb6dc9d092e3e4a29" exitCode=143 Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.073250 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-keystone-listener-86d4665d84-cndx4" event={"ID":"125597fc-d3b4-44c1-a0a6-7dccbaebe7dd","Type":"ContainerDied","Data":"2c27863658ecd8a450f932a78dd0d64933a85bf75c1f865bb6dc9d092e3e4a29"} Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.080234 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-f949-account-create-update-v2zng" event={"ID":"3ac6a4e2-150b-4137-b547-33cbdd1137b7","Type":"ContainerStarted","Data":"9b296b8e490374239f893d021d6f999f502562a3fa2954ca8fa50525f672a8ea"} Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.084465 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/placement-db-create-j7682" podStartSLOduration=5.084449569 podStartE2EDuration="5.084449569s" podCreationTimestamp="2026-01-20 17:27:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:28:02.918621171 +0000 UTC m=+2776.678959139" watchObservedRunningTime="2026-01-20 17:28:04.084449569 +0000 UTC m=+2777.844787536" Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.084715 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/keystone-db-create-rztt2"] Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.086063 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-db-create-rztt2" Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.099589 4558 generic.go:334] "Generic (PLEG): container finished" podID="592927c6-9b42-411a-8aec-40cf6183e32a" containerID="f60fe058ea93021d1c1c3c3d2912da2aaabf83d63fd9e5d23a8a1c05335b6abd" exitCode=143 Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.099679 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"592927c6-9b42-411a-8aec-40cf6183e32a","Type":"ContainerDied","Data":"f60fe058ea93021d1c1c3c3d2912da2aaabf83d63fd9e5d23a8a1c05335b6abd"} Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.103363 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-64c1-account-create-update-wsq7z"] Jan 20 17:28:04 crc kubenswrapper[4558]: E0120 17:28:04.104498 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[kube-api-access-xfskp operator-scripts], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack-kuttl-tests/keystone-64c1-account-create-update-wsq7z" podUID="012246ba-362c-48f3-bbb1-913d57f1f9ed" Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.118045 4558 generic.go:334] "Generic (PLEG): container finished" podID="66e24f38-a98c-4444-8ee4-352266267985" containerID="2c9fa6b9561c4a984ecb954991ac4f52ef42ab0fbc3c9bb866972b9d6d85f164" exitCode=143 Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.118106 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-55d4bc664d-k82t9" event={"ID":"66e24f38-a98c-4444-8ee4-352266267985","Type":"ContainerDied","Data":"2c9fa6b9561c4a984ecb954991ac4f52ef42ab0fbc3c9bb866972b9d6d85f164"} Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.119409 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-db-create-rztt2"] Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.129745 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-db-create-pxxqs" event={"ID":"ed3c56f1-4bbb-4590-8b19-a0de467537ad","Type":"ContainerStarted","Data":"339ec500ad84a2e36b862d84cdd2a820a026b14a6aad6e3de83dfd5ca99a848f"} Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.144084 4558 generic.go:334] "Generic (PLEG): container finished" podID="cc4089c6-71ea-4503-b54c-18777fcc3c48" containerID="ddfb78eb686b10db6949511eb53f1d12111dd9665adfb113b40c64dcc8f80745" exitCode=0 Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.144106 4558 generic.go:334] "Generic (PLEG): container finished" podID="cc4089c6-71ea-4503-b54c-18777fcc3c48" containerID="ca2fe5f683448e1b2e71f1f58bff793b5e9a3b0d6f6f324c37f3cdcce1707435" exitCode=143 Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.144142 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"cc4089c6-71ea-4503-b54c-18777fcc3c48","Type":"ContainerDied","Data":"ddfb78eb686b10db6949511eb53f1d12111dd9665adfb113b40c64dcc8f80745"} Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.144182 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"cc4089c6-71ea-4503-b54c-18777fcc3c48","Type":"ContainerDied","Data":"ca2fe5f683448e1b2e71f1f58bff793b5e9a3b0d6f6f324c37f3cdcce1707435"} Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.152670 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-e949-account-create-update-qd847" event={"ID":"54457478-e4f4-4088-bd18-e427314e1fb2","Type":"ContainerStarted","Data":"c125669d0d4932ece229fc505a1e8ff7c92a49f2cc4899179e3895b79383b019"} Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.153227 4558 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="openstack-kuttl-tests/barbican-e949-account-create-update-qd847" secret="" err="secret \"galera-openstack-dockercfg-82xg7\" not found" Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.155867 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xfskp\" (UniqueName: \"kubernetes.io/projected/012246ba-362c-48f3-bbb1-913d57f1f9ed-kube-api-access-xfskp\") pod \"keystone-64c1-account-create-update-wsq7z\" (UID: \"012246ba-362c-48f3-bbb1-913d57f1f9ed\") " pod="openstack-kuttl-tests/keystone-64c1-account-create-update-wsq7z" Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.157565 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/012246ba-362c-48f3-bbb1-913d57f1f9ed-operator-scripts\") pod \"keystone-64c1-account-create-update-wsq7z\" (UID: \"012246ba-362c-48f3-bbb1-913d57f1f9ed\") " pod="openstack-kuttl-tests/keystone-64c1-account-create-update-wsq7z" Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.157650 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9b28c41-c054-49f3-87d2-a8c15e6124de-combined-ca-bundle\") pod \"barbican-worker-74d54f756c-g9zzk\" (UID: \"b9b28c41-c054-49f3-87d2-a8c15e6124de\") " pod="openstack-kuttl-tests/barbican-worker-74d54f756c-g9zzk" Jan 20 17:28:04 crc kubenswrapper[4558]: E0120 17:28:04.158943 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/openstack-scripts: configmap "openstack-scripts" not found Jan 20 17:28:04 crc kubenswrapper[4558]: E0120 17:28:04.158992 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/012246ba-362c-48f3-bbb1-913d57f1f9ed-operator-scripts podName:012246ba-362c-48f3-bbb1-913d57f1f9ed nodeName:}" failed. No retries permitted until 2026-01-20 17:28:04.658975673 +0000 UTC m=+2778.419313640 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/012246ba-362c-48f3-bbb1-913d57f1f9ed-operator-scripts") pod "keystone-64c1-account-create-update-wsq7z" (UID: "012246ba-362c-48f3-bbb1-913d57f1f9ed") : configmap "openstack-scripts" not found Jan 20 17:28:04 crc kubenswrapper[4558]: E0120 17:28:04.159116 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/combined-ca-bundle: secret "combined-ca-bundle" not found Jan 20 17:28:04 crc kubenswrapper[4558]: E0120 17:28:04.159143 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b9b28c41-c054-49f3-87d2-a8c15e6124de-combined-ca-bundle podName:b9b28c41-c054-49f3-87d2-a8c15e6124de nodeName:}" failed. No retries permitted until 2026-01-20 17:28:08.159136245 +0000 UTC m=+2781.919474212 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/b9b28c41-c054-49f3-87d2-a8c15e6124de-combined-ca-bundle") pod "barbican-worker-74d54f756c-g9zzk" (UID: "b9b28c41-c054-49f3-87d2-a8c15e6124de") : secret "combined-ca-bundle" not found Jan 20 17:28:04 crc kubenswrapper[4558]: E0120 17:28:04.167534 4558 projected.go:194] Error preparing data for projected volume kube-api-access-xfskp for pod openstack-kuttl-tests/keystone-64c1-account-create-update-wsq7z: failed to fetch token: serviceaccounts "galera-openstack" not found Jan 20 17:28:04 crc kubenswrapper[4558]: E0120 17:28:04.167574 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/012246ba-362c-48f3-bbb1-913d57f1f9ed-kube-api-access-xfskp podName:012246ba-362c-48f3-bbb1-913d57f1f9ed nodeName:}" failed. No retries permitted until 2026-01-20 17:28:04.667563609 +0000 UTC m=+2778.427901576 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-xfskp" (UniqueName: "kubernetes.io/projected/012246ba-362c-48f3-bbb1-913d57f1f9ed-kube-api-access-xfskp") pod "keystone-64c1-account-create-update-wsq7z" (UID: "012246ba-362c-48f3-bbb1-913d57f1f9ed") : failed to fetch token: serviceaccounts "galera-openstack" not found Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.167915 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-db-create-rztt2"] Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.173994 4558 generic.go:334] "Generic (PLEG): container finished" podID="184a9f2b-eab7-4221-a900-746e063662a8" containerID="0c2544d6d08a38b60dfd9a1ca62a8c9460a6db9ba8a618726a1b8ad313afe1bf" exitCode=143 Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.174066 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-84969c976-hhcrx" event={"ID":"184a9f2b-eab7-4221-a900-746e063662a8","Type":"ContainerDied","Data":"0c2544d6d08a38b60dfd9a1ca62a8c9460a6db9ba8a618726a1b8ad313afe1bf"} Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.188450 4558 generic.go:334] "Generic (PLEG): container finished" podID="6dc46db7-9948-4c7d-b0a3-c767ffd58b4a" containerID="2b441621415d66b4dd582cca5044009b53d19bc346f62f7de3d1c998664de90b" exitCode=0 Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.188510 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-665b4c9c8d-9tcx6" event={"ID":"6dc46db7-9948-4c7d-b0a3-c767ffd58b4a","Type":"ContainerDied","Data":"2b441621415d66b4dd582cca5044009b53d19bc346f62f7de3d1c998664de90b"} Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.192673 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/openstack-galera-0"] Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.195447 4558 generic.go:334] "Generic (PLEG): container finished" podID="4430d3cb-0a1c-4266-9f69-cf8e817f1d3e" containerID="01f067cab301938980d9e1ee9c68462c017ce912a5f70f653badf838dae20a53" exitCode=143 Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.195506 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"4430d3cb-0a1c-4266-9f69-cf8e817f1d3e","Type":"ContainerDied","Data":"01f067cab301938980d9e1ee9c68462c017ce912a5f70f653badf838dae20a53"} Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.203748 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-db-create-2kmst"] Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.208410 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-db-create-pxxqs"] Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.225418 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell0-f949-account-create-update-v2zng"] Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.227175 4558 generic.go:334] "Generic (PLEG): container finished" podID="134fc5ef-70be-4c8c-aa72-e4f0440d0afe" containerID="99d707bce7e8a2bd68ad6ab0135f5762171740a9747fee9d19f3dc67af0b93db" exitCode=2 Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.227228 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"134fc5ef-70be-4c8c-aa72-e4f0440d0afe","Type":"ContainerDied","Data":"99d707bce7e8a2bd68ad6ab0135f5762171740a9747fee9d19f3dc67af0b93db"} Jan 20 17:28:04 crc kubenswrapper[4558]: E0120 17:28:04.230564 4558 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 20 17:28:04 crc kubenswrapper[4558]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[/bin/sh -c #!/bin/bash Jan 20 17:28:04 crc kubenswrapper[4558]: Jan 20 17:28:04 crc kubenswrapper[4558]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 20 17:28:04 crc kubenswrapper[4558]: Jan 20 17:28:04 crc kubenswrapper[4558]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 20 17:28:04 crc kubenswrapper[4558]: Jan 20 17:28:04 crc kubenswrapper[4558]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 20 17:28:04 crc kubenswrapper[4558]: Jan 20 17:28:04 crc kubenswrapper[4558]: if [ -n "barbican" ]; then Jan 20 17:28:04 crc kubenswrapper[4558]: GRANT_DATABASE="barbican" Jan 20 17:28:04 crc kubenswrapper[4558]: else Jan 20 17:28:04 crc kubenswrapper[4558]: GRANT_DATABASE="*" Jan 20 17:28:04 crc kubenswrapper[4558]: fi Jan 20 17:28:04 crc kubenswrapper[4558]: Jan 20 17:28:04 crc kubenswrapper[4558]: # going for maximum compatibility here: Jan 20 17:28:04 crc kubenswrapper[4558]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 20 17:28:04 crc kubenswrapper[4558]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 20 17:28:04 crc kubenswrapper[4558]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 20 17:28:04 crc kubenswrapper[4558]: # support updates Jan 20 17:28:04 crc kubenswrapper[4558]: Jan 20 17:28:04 crc kubenswrapper[4558]: $MYSQL_CMD < logger="UnhandledError" Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.231814 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-a772-account-create-update-27cpc"] Jan 20 17:28:04 crc kubenswrapper[4558]: E0120 17:28:04.234029 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"barbican-db-secret\\\" not found\"" pod="openstack-kuttl-tests/barbican-e949-account-create-update-qd847" podUID="54457478-e4f4-4088-bd18-e427314e1fb2" Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.234236 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovn-northd-0"] Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.240040 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ovn-northd-0"] Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.265450 4558 generic.go:334] "Generic (PLEG): container finished" podID="df28560c-92e6-4003-a08f-b8691fa43300" containerID="405eba50b6723c4bbfd326989331668cadd5d5aa3aa6d3f082fbf7d791dda0f0" exitCode=0 Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.265518 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-db-create-j7682" event={"ID":"df28560c-92e6-4003-a08f-b8691fa43300","Type":"ContainerDied","Data":"405eba50b6723c4bbfd326989331668cadd5d5aa3aa6d3f082fbf7d791dda0f0"} Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.280673 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7b6a0803-befe-4426-9e2a-a04ac12f2d7c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7b6a0803-befe-4426-9e2a-a04ac12f2d7c" (UID: "7b6a0803-befe-4426-9e2a-a04ac12f2d7c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.305097 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tp4lp\" (UniqueName: \"kubernetes.io/projected/c67fda0a-394c-424a-ad72-f0e3ebd77f1b-kube-api-access-tp4lp\") pod \"keystone-db-create-rztt2\" (UID: \"c67fda0a-394c-424a-ad72-f0e3ebd77f1b\") " pod="openstack-kuttl-tests/keystone-db-create-rztt2" Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.305481 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c67fda0a-394c-424a-ad72-f0e3ebd77f1b-operator-scripts\") pod \"keystone-db-create-rztt2\" (UID: \"c67fda0a-394c-424a-ad72-f0e3ebd77f1b\") " pod="openstack-kuttl-tests/keystone-db-create-rztt2" Jan 20 17:28:04 crc kubenswrapper[4558]: E0120 17:28:04.305780 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-kube-state-metrics-svc: secret "cert-kube-state-metrics-svc" not found Jan 20 17:28:04 crc kubenswrapper[4558]: E0120 17:28:04.305835 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/443f910c-dd2d-4c72-b861-f15de67ac6bb-kube-state-metrics-tls-certs podName:443f910c-dd2d-4c72-b861-f15de67ac6bb nodeName:}" failed. No retries permitted until 2026-01-20 17:28:06.30581907 +0000 UTC m=+2780.066157037 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-state-metrics-tls-certs" (UniqueName: "kubernetes.io/secret/443f910c-dd2d-4c72-b861-f15de67ac6bb-kube-state-metrics-tls-certs") pod "kube-state-metrics-0" (UID: "443f910c-dd2d-4c72-b861-f15de67ac6bb") : secret "cert-kube-state-metrics-svc" not found Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.306467 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7b6a0803-befe-4426-9e2a-a04ac12f2d7c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:04 crc kubenswrapper[4558]: E0120 17:28:04.306544 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/kube-state-metrics-tls-config: secret "kube-state-metrics-tls-config" not found Jan 20 17:28:04 crc kubenswrapper[4558]: E0120 17:28:04.306583 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/443f910c-dd2d-4c72-b861-f15de67ac6bb-kube-state-metrics-tls-config podName:443f910c-dd2d-4c72-b861-f15de67ac6bb nodeName:}" failed. No retries permitted until 2026-01-20 17:28:06.306568289 +0000 UTC m=+2780.066906245 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-state-metrics-tls-config" (UniqueName: "kubernetes.io/secret/443f910c-dd2d-4c72-b861-f15de67ac6bb-kube-state-metrics-tls-config") pod "kube-state-metrics-0" (UID: "443f910c-dd2d-4c72-b861-f15de67ac6bb") : secret "kube-state-metrics-tls-config" not found Jan 20 17:28:04 crc kubenswrapper[4558]: E0120 17:28:04.306640 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/openstack-scripts: configmap "openstack-scripts" not found Jan 20 17:28:04 crc kubenswrapper[4558]: E0120 17:28:04.306666 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/54457478-e4f4-4088-bd18-e427314e1fb2-operator-scripts podName:54457478-e4f4-4088-bd18-e427314e1fb2 nodeName:}" failed. No retries permitted until 2026-01-20 17:28:04.806656875 +0000 UTC m=+2778.566994832 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/54457478-e4f4-4088-bd18-e427314e1fb2-operator-scripts") pod "barbican-e949-account-create-update-qd847" (UID: "54457478-e4f4-4088-bd18-e427314e1fb2") : configmap "openstack-scripts" not found Jan 20 17:28:04 crc kubenswrapper[4558]: E0120 17:28:04.306704 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/combined-ca-bundle: secret "combined-ca-bundle" not found Jan 20 17:28:04 crc kubenswrapper[4558]: E0120 17:28:04.306727 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/443f910c-dd2d-4c72-b861-f15de67ac6bb-combined-ca-bundle podName:443f910c-dd2d-4c72-b861-f15de67ac6bb nodeName:}" failed. No retries permitted until 2026-01-20 17:28:06.306720525 +0000 UTC m=+2780.067058492 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/443f910c-dd2d-4c72-b861-f15de67ac6bb-combined-ca-bundle") pod "kube-state-metrics-0" (UID: "443f910c-dd2d-4c72-b861-f15de67ac6bb") : secret "combined-ca-bundle" not found Jan 20 17:28:04 crc kubenswrapper[4558]: E0120 17:28:04.307223 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/combined-ca-bundle: secret "combined-ca-bundle" not found Jan 20 17:28:04 crc kubenswrapper[4558]: E0120 17:28:04.307265 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/cc4089c6-71ea-4503-b54c-18777fcc3c48-combined-ca-bundle podName:cc4089c6-71ea-4503-b54c-18777fcc3c48 nodeName:}" failed. No retries permitted until 2026-01-20 17:28:08.307245842 +0000 UTC m=+2782.067583810 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/cc4089c6-71ea-4503-b54c-18777fcc3c48-combined-ca-bundle") pod "nova-metadata-0" (UID: "cc4089c6-71ea-4503-b54c-18777fcc3c48") : secret "combined-ca-bundle" not found Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.364449 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-lgbfw"] Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.392460 4558 generic.go:334] "Generic (PLEG): container finished" podID="8804f9f1-60d4-4fa1-9333-9f9e01b3c68a" containerID="2953f93c5ea7797971404b822588bdd28383773e127010c836cbd6ce7a1b5450" exitCode=0 Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.392514 4558 generic.go:334] "Generic (PLEG): container finished" podID="8804f9f1-60d4-4fa1-9333-9f9e01b3c68a" containerID="33faf2c0a93c93d45813868ee43337a14e962791dad79303fe6a9a0671ec791e" exitCode=0 Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.392527 4558 generic.go:334] "Generic (PLEG): container finished" podID="8804f9f1-60d4-4fa1-9333-9f9e01b3c68a" containerID="36e479ddabc13ddf3cd61164acd11174c752edf6961acec9ae67d8951043aee5" exitCode=0 Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.392539 4558 generic.go:334] "Generic (PLEG): container finished" podID="8804f9f1-60d4-4fa1-9333-9f9e01b3c68a" containerID="5abed863186c32fde3a40dcffb538fd7112acbbdfd253d93751ff00096a299c5" exitCode=0 Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.392546 4558 generic.go:334] "Generic (PLEG): container finished" podID="8804f9f1-60d4-4fa1-9333-9f9e01b3c68a" containerID="d700282b5693986cac00fa3aff076f819077ba4d7e9191db243d0f5c076b2c4d" exitCode=0 Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.392555 4558 generic.go:334] "Generic (PLEG): container finished" podID="8804f9f1-60d4-4fa1-9333-9f9e01b3c68a" containerID="4a6334c4fa8ffc04d0aaf20255ac8f9132d2e26680c4b1f11d017cfb5b777eec" exitCode=0 Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.392566 4558 generic.go:334] "Generic (PLEG): container finished" podID="8804f9f1-60d4-4fa1-9333-9f9e01b3c68a" containerID="8c8b5915d3326030a79e4f528fb27f011fc914ddc227bbc37139ba81a55e7e83" exitCode=0 Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.392576 4558 generic.go:334] "Generic (PLEG): container finished" podID="8804f9f1-60d4-4fa1-9333-9f9e01b3c68a" containerID="703117c0c32a1ceba745d2e72e8de7d8ffea3ca4a98e3434eda2bde5c61b0d36" exitCode=0 Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.392586 4558 generic.go:334] "Generic (PLEG): container finished" podID="8804f9f1-60d4-4fa1-9333-9f9e01b3c68a" containerID="19bf1b2c84ff2748887d3f5954817882ace006a585e9b1cf3ff55c2286de632f" exitCode=0 Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.392597 4558 generic.go:334] "Generic (PLEG): container finished" podID="8804f9f1-60d4-4fa1-9333-9f9e01b3c68a" containerID="70b3bec12ba85f03f2bb9c09bdc640f03db124a8b41fd0886ea7b738171db315" exitCode=0 Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.392608 4558 generic.go:334] "Generic (PLEG): container finished" podID="8804f9f1-60d4-4fa1-9333-9f9e01b3c68a" containerID="b9e271e5199c2c5986711fa373c13ad8aade0056ea94afaa4c8bdfa52c131009" exitCode=0 Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.392618 4558 generic.go:334] "Generic (PLEG): container finished" podID="8804f9f1-60d4-4fa1-9333-9f9e01b3c68a" containerID="7355d501842ad1b14019ba4bbb3365f8da9a6587b24e3a344f6aeb0cbde4a6bd" exitCode=0 Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.392623 4558 generic.go:334] "Generic (PLEG): container finished" podID="8804f9f1-60d4-4fa1-9333-9f9e01b3c68a" containerID="2ee2e3e7ebe2d7413bfe073e0a35c416b50099cfb7c3db764047c0da6077f92e" exitCode=0 Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.392631 4558 generic.go:334] "Generic (PLEG): container finished" podID="8804f9f1-60d4-4fa1-9333-9f9e01b3c68a" containerID="8ba1d4c7037c9af7af4ff6c69fdad67f28d557367c30a255c97945adb2a5670d" exitCode=0 Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.392700 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"8804f9f1-60d4-4fa1-9333-9f9e01b3c68a","Type":"ContainerDied","Data":"2953f93c5ea7797971404b822588bdd28383773e127010c836cbd6ce7a1b5450"} Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.392736 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"8804f9f1-60d4-4fa1-9333-9f9e01b3c68a","Type":"ContainerDied","Data":"33faf2c0a93c93d45813868ee43337a14e962791dad79303fe6a9a0671ec791e"} Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.392747 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"8804f9f1-60d4-4fa1-9333-9f9e01b3c68a","Type":"ContainerDied","Data":"36e479ddabc13ddf3cd61164acd11174c752edf6961acec9ae67d8951043aee5"} Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.392758 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"8804f9f1-60d4-4fa1-9333-9f9e01b3c68a","Type":"ContainerDied","Data":"5abed863186c32fde3a40dcffb538fd7112acbbdfd253d93751ff00096a299c5"} Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.392767 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"8804f9f1-60d4-4fa1-9333-9f9e01b3c68a","Type":"ContainerDied","Data":"d700282b5693986cac00fa3aff076f819077ba4d7e9191db243d0f5c076b2c4d"} Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.392790 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"8804f9f1-60d4-4fa1-9333-9f9e01b3c68a","Type":"ContainerDied","Data":"4a6334c4fa8ffc04d0aaf20255ac8f9132d2e26680c4b1f11d017cfb5b777eec"} Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.392798 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"8804f9f1-60d4-4fa1-9333-9f9e01b3c68a","Type":"ContainerDied","Data":"8c8b5915d3326030a79e4f528fb27f011fc914ddc227bbc37139ba81a55e7e83"} Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.392806 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"8804f9f1-60d4-4fa1-9333-9f9e01b3c68a","Type":"ContainerDied","Data":"703117c0c32a1ceba745d2e72e8de7d8ffea3ca4a98e3434eda2bde5c61b0d36"} Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.392814 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"8804f9f1-60d4-4fa1-9333-9f9e01b3c68a","Type":"ContainerDied","Data":"19bf1b2c84ff2748887d3f5954817882ace006a585e9b1cf3ff55c2286de632f"} Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.392825 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"8804f9f1-60d4-4fa1-9333-9f9e01b3c68a","Type":"ContainerDied","Data":"70b3bec12ba85f03f2bb9c09bdc640f03db124a8b41fd0886ea7b738171db315"} Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.392835 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"8804f9f1-60d4-4fa1-9333-9f9e01b3c68a","Type":"ContainerDied","Data":"b9e271e5199c2c5986711fa373c13ad8aade0056ea94afaa4c8bdfa52c131009"} Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.392844 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"8804f9f1-60d4-4fa1-9333-9f9e01b3c68a","Type":"ContainerDied","Data":"7355d501842ad1b14019ba4bbb3365f8da9a6587b24e3a344f6aeb0cbde4a6bd"} Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.392852 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"8804f9f1-60d4-4fa1-9333-9f9e01b3c68a","Type":"ContainerDied","Data":"2ee2e3e7ebe2d7413bfe073e0a35c416b50099cfb7c3db764047c0da6077f92e"} Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.392860 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"8804f9f1-60d4-4fa1-9333-9f9e01b3c68a","Type":"ContainerDied","Data":"8ba1d4c7037c9af7af4ff6c69fdad67f28d557367c30a255c97945adb2a5670d"} Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.398933 4558 generic.go:334] "Generic (PLEG): container finished" podID="e81d2508-e298-463c-9031-b9d8e486d566" containerID="d2afe7d8e958d845dee5152deac97265bbf55f309d653220c753d06c2403fd6e" exitCode=143 Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.398983 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"e81d2508-e298-463c-9031-b9d8e486d566","Type":"ContainerDied","Data":"d2afe7d8e958d845dee5152deac97265bbf55f309d653220c753d06c2403fd6e"} Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.402911 4558 generic.go:334] "Generic (PLEG): container finished" podID="3b9858cf-2020-458e-bcf6-407c6853a962" containerID="b4c792c97b8f3884770e6abef851a252b3788722ba9e9f419f4552e68ff9bd85" exitCode=143 Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.402984 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-worker-5cbf796c47-9gdgl" event={"ID":"3b9858cf-2020-458e-bcf6-407c6853a962","Type":"ContainerDied","Data":"b4c792c97b8f3884770e6abef851a252b3788722ba9e9f419f4552e68ff9bd85"} Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.413682 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c67fda0a-394c-424a-ad72-f0e3ebd77f1b-operator-scripts\") pod \"keystone-db-create-rztt2\" (UID: \"c67fda0a-394c-424a-ad72-f0e3ebd77f1b\") " pod="openstack-kuttl-tests/keystone-db-create-rztt2" Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.413859 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tp4lp\" (UniqueName: \"kubernetes.io/projected/c67fda0a-394c-424a-ad72-f0e3ebd77f1b-kube-api-access-tp4lp\") pod \"keystone-db-create-rztt2\" (UID: \"c67fda0a-394c-424a-ad72-f0e3ebd77f1b\") " pod="openstack-kuttl-tests/keystone-db-create-rztt2" Jan 20 17:28:04 crc kubenswrapper[4558]: E0120 17:28:04.414195 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/openstack-scripts: configmap "openstack-scripts" not found Jan 20 17:28:04 crc kubenswrapper[4558]: E0120 17:28:04.414236 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/c67fda0a-394c-424a-ad72-f0e3ebd77f1b-operator-scripts podName:c67fda0a-394c-424a-ad72-f0e3ebd77f1b nodeName:}" failed. No retries permitted until 2026-01-20 17:28:04.914222938 +0000 UTC m=+2778.674560905 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/c67fda0a-394c-424a-ad72-f0e3ebd77f1b-operator-scripts") pod "keystone-db-create-rztt2" (UID: "c67fda0a-394c-424a-ad72-f0e3ebd77f1b") : configmap "openstack-scripts" not found Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.415266 4558 scope.go:117] "RemoveContainer" containerID="dd2c0dcba96a9cb823d300977fee811cbbd5c8ce5dca2787726542941426f5c2" Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.433276 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/openstack-cell1-galera-0" podUID="f81a00ad-36ce-4383-9e91-fe60de6939d2" containerName="galera" containerID="cri-o://77c882047f115c6a4c56a4e460c8836ce62f0179639cdd46fd3f9af7f7edbd88" gracePeriod=29 Jan 20 17:28:04 crc kubenswrapper[4558]: E0120 17:28:04.433593 4558 projected.go:194] Error preparing data for projected volume kube-api-access-tp4lp for pod openstack-kuttl-tests/keystone-db-create-rztt2: failed to fetch token: serviceaccounts "galera-openstack" not found Jan 20 17:28:04 crc kubenswrapper[4558]: E0120 17:28:04.433663 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/c67fda0a-394c-424a-ad72-f0e3ebd77f1b-kube-api-access-tp4lp podName:c67fda0a-394c-424a-ad72-f0e3ebd77f1b nodeName:}" failed. No retries permitted until 2026-01-20 17:28:04.933641476 +0000 UTC m=+2778.693979443 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-tp4lp" (UniqueName: "kubernetes.io/projected/c67fda0a-394c-424a-ad72-f0e3ebd77f1b-kube-api-access-tp4lp") pod "keystone-db-create-rztt2" (UID: "c67fda0a-394c-424a-ad72-f0e3ebd77f1b") : failed to fetch token: serviceaccounts "galera-openstack" not found Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.444641 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-db-create-rt2fh" event={"ID":"1b7c6fb3-f30d-4dde-8502-e7d840719520","Type":"ContainerStarted","Data":"a986efaf418f6a3e2abd8d9f06b0fe8df99833bf6b1e6f9c131ce1990c94b418"} Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.445389 4558 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="openstack-kuttl-tests/glance-db-create-rt2fh" secret="" err="secret \"galera-openstack-dockercfg-82xg7\" not found" Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.485186 4558 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="openstack-kuttl-tests/neutron-b136-account-create-update-n9qdv" secret="" err="secret \"galera-openstack-dockercfg-82xg7\" not found" Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.486063 4558 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="openstack-kuttl-tests/swift-proxy-5f7fb7b-s2dxn" secret="" err="secret \"swift-swift-dockercfg-vgvcg\" not found" Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.487443 4558 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="openstack-kuttl-tests/cinder-scheduler-0" secret="" err="secret \"cinder-cinder-dockercfg-msjfn\" not found" Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.487574 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-worker-74d54f756c-g9zzk" Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.487662 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-proxy-5f7fb7b-s2dxn" event={"ID":"4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5","Type":"ContainerStarted","Data":"5a07cb607c001f9ab3a834dea57d84934f1fe9dff82a41e28812e0217fe09cff"} Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.487758 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-af2b-account-create-update-x6tw5" Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.487825 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/kube-state-metrics-0" podUID="443f910c-dd2d-4c72-b861-f15de67ac6bb" containerName="kube-state-metrics" containerID="cri-o://ea8201fc9fae9612c7677cb7b1dbff6d7938a87582e7ecc4e74f7875b23122ec" gracePeriod=30 Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.487862 4558 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="openstack-kuttl-tests/glance-65cd-account-create-update-grtts" secret="" err="secret \"galera-openstack-dockercfg-82xg7\" not found" Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.487902 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-keystone-listener-8f4cc76c4-9qkpn" Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.487955 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/swift-proxy-5f7fb7b-s2dxn" Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.487967 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/swift-proxy-5f7fb7b-s2dxn" Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.488732 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/glance-db-create-rt2fh" podStartSLOduration=4.488709779 podStartE2EDuration="4.488709779s" podCreationTimestamp="2026-01-20 17:28:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:28:04.475020933 +0000 UTC m=+2778.235358900" watchObservedRunningTime="2026-01-20 17:28:04.488709779 +0000 UTC m=+2778.249047736" Jan 20 17:28:04 crc kubenswrapper[4558]: E0120 17:28:04.507578 4558 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 20 17:28:04 crc kubenswrapper[4558]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[/bin/sh -c #!/bin/bash Jan 20 17:28:04 crc kubenswrapper[4558]: Jan 20 17:28:04 crc kubenswrapper[4558]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 20 17:28:04 crc kubenswrapper[4558]: Jan 20 17:28:04 crc kubenswrapper[4558]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 20 17:28:04 crc kubenswrapper[4558]: Jan 20 17:28:04 crc kubenswrapper[4558]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 20 17:28:04 crc kubenswrapper[4558]: Jan 20 17:28:04 crc kubenswrapper[4558]: if [ -n "glance" ]; then Jan 20 17:28:04 crc kubenswrapper[4558]: GRANT_DATABASE="glance" Jan 20 17:28:04 crc kubenswrapper[4558]: else Jan 20 17:28:04 crc kubenswrapper[4558]: GRANT_DATABASE="*" Jan 20 17:28:04 crc kubenswrapper[4558]: fi Jan 20 17:28:04 crc kubenswrapper[4558]: Jan 20 17:28:04 crc kubenswrapper[4558]: # going for maximum compatibility here: Jan 20 17:28:04 crc kubenswrapper[4558]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 20 17:28:04 crc kubenswrapper[4558]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 20 17:28:04 crc kubenswrapper[4558]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 20 17:28:04 crc kubenswrapper[4558]: # support updates Jan 20 17:28:04 crc kubenswrapper[4558]: Jan 20 17:28:04 crc kubenswrapper[4558]: $MYSQL_CMD < logger="UnhandledError" Jan 20 17:28:04 crc kubenswrapper[4558]: E0120 17:28:04.509018 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"glance-db-secret\\\" not found\"" pod="openstack-kuttl-tests/glance-65cd-account-create-update-grtts" podUID="5f7a0ad9-436f-433c-9a91-cec4ffd3beeb" Jan 20 17:28:04 crc kubenswrapper[4558]: E0120 17:28:04.532830 4558 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 20 17:28:04 crc kubenswrapper[4558]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[/bin/sh -c #!/bin/bash Jan 20 17:28:04 crc kubenswrapper[4558]: Jan 20 17:28:04 crc kubenswrapper[4558]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 20 17:28:04 crc kubenswrapper[4558]: Jan 20 17:28:04 crc kubenswrapper[4558]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 20 17:28:04 crc kubenswrapper[4558]: Jan 20 17:28:04 crc kubenswrapper[4558]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 20 17:28:04 crc kubenswrapper[4558]: Jan 20 17:28:04 crc kubenswrapper[4558]: if [ -n "neutron" ]; then Jan 20 17:28:04 crc kubenswrapper[4558]: GRANT_DATABASE="neutron" Jan 20 17:28:04 crc kubenswrapper[4558]: else Jan 20 17:28:04 crc kubenswrapper[4558]: GRANT_DATABASE="*" Jan 20 17:28:04 crc kubenswrapper[4558]: fi Jan 20 17:28:04 crc kubenswrapper[4558]: Jan 20 17:28:04 crc kubenswrapper[4558]: # going for maximum compatibility here: Jan 20 17:28:04 crc kubenswrapper[4558]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 20 17:28:04 crc kubenswrapper[4558]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 20 17:28:04 crc kubenswrapper[4558]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 20 17:28:04 crc kubenswrapper[4558]: # support updates Jan 20 17:28:04 crc kubenswrapper[4558]: Jan 20 17:28:04 crc kubenswrapper[4558]: $MYSQL_CMD < logger="UnhandledError" Jan 20 17:28:04 crc kubenswrapper[4558]: E0120 17:28:04.537047 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/openstack-scripts: configmap "openstack-scripts" not found Jan 20 17:28:04 crc kubenswrapper[4558]: E0120 17:28:04.537096 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/1b7c6fb3-f30d-4dde-8502-e7d840719520-operator-scripts podName:1b7c6fb3-f30d-4dde-8502-e7d840719520 nodeName:}" failed. No retries permitted until 2026-01-20 17:28:05.03708054 +0000 UTC m=+2778.797418506 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/1b7c6fb3-f30d-4dde-8502-e7d840719520-operator-scripts") pod "glance-db-create-rt2fh" (UID: "1b7c6fb3-f30d-4dde-8502-e7d840719520") : configmap "openstack-scripts" not found Jan 20 17:28:04 crc kubenswrapper[4558]: E0120 17:28:04.537144 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/combined-ca-bundle: secret "combined-ca-bundle" not found Jan 20 17:28:04 crc kubenswrapper[4558]: E0120 17:28:04.537184 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5-combined-ca-bundle podName:4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5 nodeName:}" failed. No retries permitted until 2026-01-20 17:28:05.037177132 +0000 UTC m=+2778.797515099 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5-combined-ca-bundle") pod "swift-proxy-5f7fb7b-s2dxn" (UID: "4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5") : secret "combined-ca-bundle" not found Jan 20 17:28:04 crc kubenswrapper[4558]: E0120 17:28:04.537218 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-swift-internal-svc: secret "cert-swift-internal-svc" not found Jan 20 17:28:04 crc kubenswrapper[4558]: E0120 17:28:04.537236 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5-internal-tls-certs podName:4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5 nodeName:}" failed. No retries permitted until 2026-01-20 17:28:05.037230582 +0000 UTC m=+2778.797568549 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "internal-tls-certs" (UniqueName: "kubernetes.io/secret/4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5-internal-tls-certs") pod "swift-proxy-5f7fb7b-s2dxn" (UID: "4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5") : secret "cert-swift-internal-svc" not found Jan 20 17:28:04 crc kubenswrapper[4558]: E0120 17:28:04.537688 4558 projected.go:263] Couldn't get secret openstack-kuttl-tests/swift-proxy-config-data: secret "swift-proxy-config-data" not found Jan 20 17:28:04 crc kubenswrapper[4558]: E0120 17:28:04.537699 4558 projected.go:263] Couldn't get secret openstack-kuttl-tests/swift-conf: secret "swift-conf" not found Jan 20 17:28:04 crc kubenswrapper[4558]: E0120 17:28:04.537711 4558 projected.go:288] Couldn't get configMap openstack-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Jan 20 17:28:04 crc kubenswrapper[4558]: E0120 17:28:04.537725 4558 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack-kuttl-tests/swift-proxy-5f7fb7b-s2dxn: [secret "swift-proxy-config-data" not found, secret "swift-conf" not found, configmap "swift-ring-files" not found] Jan 20 17:28:04 crc kubenswrapper[4558]: E0120 17:28:04.537749 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5-etc-swift podName:4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5 nodeName:}" failed. No retries permitted until 2026-01-20 17:28:05.037741692 +0000 UTC m=+2778.798079659 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5-etc-swift") pod "swift-proxy-5f7fb7b-s2dxn" (UID: "4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5") : [secret "swift-proxy-config-data" not found, secret "swift-conf" not found, configmap "swift-ring-files" not found] Jan 20 17:28:04 crc kubenswrapper[4558]: E0120 17:28:04.537790 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/openstack-scripts: configmap "openstack-scripts" not found Jan 20 17:28:04 crc kubenswrapper[4558]: E0120 17:28:04.537808 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5f7a0ad9-436f-433c-9a91-cec4ffd3beeb-operator-scripts podName:5f7a0ad9-436f-433c-9a91-cec4ffd3beeb nodeName:}" failed. No retries permitted until 2026-01-20 17:28:05.037802727 +0000 UTC m=+2778.798140695 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/5f7a0ad9-436f-433c-9a91-cec4ffd3beeb-operator-scripts") pod "glance-65cd-account-create-update-grtts" (UID: "5f7a0ad9-436f-433c-9a91-cec4ffd3beeb") : configmap "openstack-scripts" not found Jan 20 17:28:04 crc kubenswrapper[4558]: E0120 17:28:04.538495 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/swift-proxy-config-data: secret "swift-proxy-config-data" not found Jan 20 17:28:04 crc kubenswrapper[4558]: E0120 17:28:04.538519 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5-config-data podName:4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5 nodeName:}" failed. No retries permitted until 2026-01-20 17:28:05.038513142 +0000 UTC m=+2778.798851110 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/secret/4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5-config-data") pod "swift-proxy-5f7fb7b-s2dxn" (UID: "4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5") : secret "swift-proxy-config-data" not found Jan 20 17:28:04 crc kubenswrapper[4558]: E0120 17:28:04.539136 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-swift-public-svc: secret "cert-swift-public-svc" not found Jan 20 17:28:04 crc kubenswrapper[4558]: E0120 17:28:04.539178 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5-public-tls-certs podName:4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5 nodeName:}" failed. No retries permitted until 2026-01-20 17:28:05.039153286 +0000 UTC m=+2778.799491253 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "public-tls-certs" (UniqueName: "kubernetes.io/secret/4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5-public-tls-certs") pod "swift-proxy-5f7fb7b-s2dxn" (UID: "4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5") : secret "cert-swift-public-svc" not found Jan 20 17:28:04 crc kubenswrapper[4558]: E0120 17:28:04.539213 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"neutron-db-secret\\\" not found\"" pod="openstack-kuttl-tests/neutron-b136-account-create-update-n9qdv" podUID="1bcd505c-4754-4f41-b91d-6e488a669c93" Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.549431 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/swift-proxy-5f7fb7b-s2dxn" podStartSLOduration=6.549413688 podStartE2EDuration="6.549413688s" podCreationTimestamp="2026-01-20 17:27:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:28:04.538704332 +0000 UTC m=+2778.299042299" watchObservedRunningTime="2026-01-20 17:28:04.549413688 +0000 UTC m=+2778.309751654" Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.585569 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7b6a0803-befe-4426-9e2a-a04ac12f2d7c-ovsdbserver-sb-tls-certs" (OuterVolumeSpecName: "ovsdbserver-sb-tls-certs") pod "7b6a0803-befe-4426-9e2a-a04ac12f2d7c" (UID: "7b6a0803-befe-4426-9e2a-a04ac12f2d7c"). InnerVolumeSpecName "ovsdbserver-sb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.598580 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a2068ccb-0c0d-4b32-9063-082a4c395070-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "a2068ccb-0c0d-4b32-9063-082a4c395070" (UID: "a2068ccb-0c0d-4b32-9063-082a4c395070"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.605142 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7b6a0803-befe-4426-9e2a-a04ac12f2d7c-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "7b6a0803-befe-4426-9e2a-a04ac12f2d7c" (UID: "7b6a0803-befe-4426-9e2a-a04ac12f2d7c"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.613920 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="005155ac-dadf-4840-9b7c-7eb1b0f3252e" path="/var/lib/kubelet/pods/005155ac-dadf-4840-9b7c-7eb1b0f3252e/volumes" Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.614589 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="04c5c6c7-acc2-4821-a68b-c2ac094e931f" path="/var/lib/kubelet/pods/04c5c6c7-acc2-4821-a68b-c2ac094e931f/volumes" Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.615153 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="29c1b3a9-0bdc-423c-af7f-42f86f3693b5" path="/var/lib/kubelet/pods/29c1b3a9-0bdc-423c-af7f-42f86f3693b5/volumes" Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.615739 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2f0bbcf9-0bf8-43ec-bd9c-220f67300878" path="/var/lib/kubelet/pods/2f0bbcf9-0bf8-43ec-bd9c-220f67300878/volumes" Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.620106 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="467e28b1-5dde-44cb-9ef4-f81980c3d999" path="/var/lib/kubelet/pods/467e28b1-5dde-44cb-9ef4-f81980c3d999/volumes" Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.620667 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8a111df9-8578-40bb-a672-b5d53305c873" path="/var/lib/kubelet/pods/8a111df9-8578-40bb-a672-b5d53305c873/volumes" Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.621289 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9e043cfa-4b67-4f6c-a46b-626a8ab6d797" path="/var/lib/kubelet/pods/9e043cfa-4b67-4f6c-a46b-626a8ab6d797/volumes" Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.622258 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aeb478ae-4d88-4f64-be68-ccebfe589ff1" path="/var/lib/kubelet/pods/aeb478ae-4d88-4f64-be68-ccebfe589ff1/volumes" Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.622815 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f461d799-0027-4949-96bc-85e80ae9ec47" path="/var/lib/kubelet/pods/f461d799-0027-4949-96bc-85e80ae9ec47/volumes" Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.637411 4558 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/a2068ccb-0c0d-4b32-9063-082a4c395070-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.637433 4558 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/7b6a0803-befe-4426-9e2a-a04ac12f2d7c-ovsdbserver-sb-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.637443 4558 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/7b6a0803-befe-4426-9e2a-a04ac12f2d7c-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.663081 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a2068ccb-0c0d-4b32-9063-082a4c395070-ovsdbserver-nb-tls-certs" (OuterVolumeSpecName: "ovsdbserver-nb-tls-certs") pod "a2068ccb-0c0d-4b32-9063-082a4c395070" (UID: "a2068ccb-0c0d-4b32-9063-082a4c395070"). InnerVolumeSpecName "ovsdbserver-nb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.739621 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xfskp\" (UniqueName: \"kubernetes.io/projected/012246ba-362c-48f3-bbb1-913d57f1f9ed-kube-api-access-xfskp\") pod \"keystone-64c1-account-create-update-wsq7z\" (UID: \"012246ba-362c-48f3-bbb1-913d57f1f9ed\") " pod="openstack-kuttl-tests/keystone-64c1-account-create-update-wsq7z" Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.739953 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/012246ba-362c-48f3-bbb1-913d57f1f9ed-operator-scripts\") pod \"keystone-64c1-account-create-update-wsq7z\" (UID: \"012246ba-362c-48f3-bbb1-913d57f1f9ed\") " pod="openstack-kuttl-tests/keystone-64c1-account-create-update-wsq7z" Jan 20 17:28:04 crc kubenswrapper[4558]: E0120 17:28:04.740205 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/openstack-scripts: configmap "openstack-scripts" not found Jan 20 17:28:04 crc kubenswrapper[4558]: E0120 17:28:04.740273 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/012246ba-362c-48f3-bbb1-913d57f1f9ed-operator-scripts podName:012246ba-362c-48f3-bbb1-913d57f1f9ed nodeName:}" failed. No retries permitted until 2026-01-20 17:28:05.740257829 +0000 UTC m=+2779.500595796 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/012246ba-362c-48f3-bbb1-913d57f1f9ed-operator-scripts") pod "keystone-64c1-account-create-update-wsq7z" (UID: "012246ba-362c-48f3-bbb1-913d57f1f9ed") : configmap "openstack-scripts" not found Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.740371 4558 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/a2068ccb-0c0d-4b32-9063-082a4c395070-ovsdbserver-nb-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:04 crc kubenswrapper[4558]: E0120 17:28:04.740485 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/openstack-cell1-scripts: configmap "openstack-cell1-scripts" not found Jan 20 17:28:04 crc kubenswrapper[4558]: E0120 17:28:04.740580 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/c2bfe108-b710-400b-baac-55815b192ee3-operator-scripts podName:c2bfe108-b710-400b-baac-55815b192ee3 nodeName:}" failed. No retries permitted until 2026-01-20 17:28:06.740566448 +0000 UTC m=+2780.500904416 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/c2bfe108-b710-400b-baac-55815b192ee3-operator-scripts") pod "root-account-create-update-dmtbg" (UID: "c2bfe108-b710-400b-baac-55815b192ee3") : configmap "openstack-cell1-scripts" not found Jan 20 17:28:04 crc kubenswrapper[4558]: E0120 17:28:04.740508 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/openstack-scripts: configmap "openstack-scripts" not found Jan 20 17:28:04 crc kubenswrapper[4558]: E0120 17:28:04.740706 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/1bcd505c-4754-4f41-b91d-6e488a669c93-operator-scripts podName:1bcd505c-4754-4f41-b91d-6e488a669c93 nodeName:}" failed. No retries permitted until 2026-01-20 17:28:06.740699428 +0000 UTC m=+2780.501037396 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/1bcd505c-4754-4f41-b91d-6e488a669c93-operator-scripts") pod "neutron-b136-account-create-update-n9qdv" (UID: "1bcd505c-4754-4f41-b91d-6e488a669c93") : configmap "openstack-scripts" not found Jan 20 17:28:04 crc kubenswrapper[4558]: E0120 17:28:04.747760 4558 projected.go:194] Error preparing data for projected volume kube-api-access-xfskp for pod openstack-kuttl-tests/keystone-64c1-account-create-update-wsq7z: failed to fetch token: serviceaccounts "galera-openstack" not found Jan 20 17:28:04 crc kubenswrapper[4558]: E0120 17:28:04.747860 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/012246ba-362c-48f3-bbb1-913d57f1f9ed-kube-api-access-xfskp podName:012246ba-362c-48f3-bbb1-913d57f1f9ed nodeName:}" failed. No retries permitted until 2026-01-20 17:28:05.747829885 +0000 UTC m=+2779.508167842 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-xfskp" (UniqueName: "kubernetes.io/projected/012246ba-362c-48f3-bbb1-913d57f1f9ed-kube-api-access-xfskp") pod "keystone-64c1-account-create-update-wsq7z" (UID: "012246ba-362c-48f3-bbb1-913d57f1f9ed") : failed to fetch token: serviceaccounts "galera-openstack" not found Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.785750 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/openstack-galera-0" podUID="6bc8ca35-f460-4c4d-9dbe-0012c552371a" containerName="galera" containerID="cri-o://3d044e6540417b17b7e29b9e6b94aac224f6dc6f6746f710160907736ae8f719" gracePeriod=30 Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.800397 4558 scope.go:117] "RemoveContainer" containerID="0a200f6996ecf5951d17224c62d14bcea68ab49867b472e280d5c47ecfd50ad7" Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.802012 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstackclient" Jan 20 17:28:04 crc kubenswrapper[4558]: E0120 17:28:04.807397 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[kube-api-access-tp4lp operator-scripts], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack-kuttl-tests/keystone-db-create-rztt2" podUID="c67fda0a-394c-424a-ad72-f0e3ebd77f1b" Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.811785 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-worker-74d54f756c-g9zzk" Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.824969 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-keystone-listener-8f4cc76c4-9qkpn" Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.836278 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/cinder-api-0" podUID="f6a0777b-d04f-451b-9e3d-3f9ab0e19df8" containerName="cinder-api" probeResult="failure" output="Get \"https://10.217.1.83:8776/healthcheck\": read tcp 10.217.0.2:43346->10.217.1.83:8776: read: connection reset by peer" Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.836962 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-af2b-account-create-update-x6tw5" Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.838188 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:28:04 crc kubenswrapper[4558]: E0120 17:28:04.843006 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/openstack-scripts: configmap "openstack-scripts" not found Jan 20 17:28:04 crc kubenswrapper[4558]: E0120 17:28:04.843067 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/54457478-e4f4-4088-bd18-e427314e1fb2-operator-scripts podName:54457478-e4f4-4088-bd18-e427314e1fb2 nodeName:}" failed. No retries permitted until 2026-01-20 17:28:05.84305221 +0000 UTC m=+2779.603390177 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/54457478-e4f4-4088-bd18-e427314e1fb2-operator-scripts") pod "barbican-e949-account-create-update-qd847" (UID: "54457478-e4f4-4088-bd18-e427314e1fb2") : configmap "openstack-scripts" not found Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.876509 4558 scope.go:117] "RemoveContainer" containerID="3cb92b2ab723d670d8405be0316a236c7b450aa82c756082192092f8f2f20fad" Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.941233 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-sb-0"] Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.943429 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/fa57dfc3-11b4-48af-a78c-00463e8894bf-config-data-custom\") pod \"fa57dfc3-11b4-48af-a78c-00463e8894bf\" (UID: \"fa57dfc3-11b4-48af-a78c-00463e8894bf\") " Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.943479 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cc4089c6-71ea-4503-b54c-18777fcc3c48-logs\") pod \"cc4089c6-71ea-4503-b54c-18777fcc3c48\" (UID: \"cc4089c6-71ea-4503-b54c-18777fcc3c48\") " Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.943525 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7vmvf\" (UniqueName: \"kubernetes.io/projected/fa57dfc3-11b4-48af-a78c-00463e8894bf-kube-api-access-7vmvf\") pod \"fa57dfc3-11b4-48af-a78c-00463e8894bf\" (UID: \"fa57dfc3-11b4-48af-a78c-00463e8894bf\") " Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.943543 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/cc4089c6-71ea-4503-b54c-18777fcc3c48-nova-metadata-tls-certs\") pod \"cc4089c6-71ea-4503-b54c-18777fcc3c48\" (UID: \"cc4089c6-71ea-4503-b54c-18777fcc3c48\") " Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.943586 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/e383e708-a471-4904-bfc7-eead6a5c76dc-openstack-config-secret\") pod \"e383e708-a471-4904-bfc7-eead6a5c76dc\" (UID: \"e383e708-a471-4904-bfc7-eead6a5c76dc\") " Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.943601 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b9b28c41-c054-49f3-87d2-a8c15e6124de-config-data\") pod \"b9b28c41-c054-49f3-87d2-a8c15e6124de\" (UID: \"b9b28c41-c054-49f3-87d2-a8c15e6124de\") " Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.943627 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fa57dfc3-11b4-48af-a78c-00463e8894bf-logs\") pod \"fa57dfc3-11b4-48af-a78c-00463e8894bf\" (UID: \"fa57dfc3-11b4-48af-a78c-00463e8894bf\") " Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.943653 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b9b28c41-c054-49f3-87d2-a8c15e6124de-logs\") pod \"b9b28c41-c054-49f3-87d2-a8c15e6124de\" (UID: \"b9b28c41-c054-49f3-87d2-a8c15e6124de\") " Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.943692 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cc4089c6-71ea-4503-b54c-18777fcc3c48-combined-ca-bundle\") pod \"cc4089c6-71ea-4503-b54c-18777fcc3c48\" (UID: \"cc4089c6-71ea-4503-b54c-18777fcc3c48\") " Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.943710 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/e383e708-a471-4904-bfc7-eead6a5c76dc-openstack-config\") pod \"e383e708-a471-4904-bfc7-eead6a5c76dc\" (UID: \"e383e708-a471-4904-bfc7-eead6a5c76dc\") " Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.943804 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p6hdm\" (UniqueName: \"kubernetes.io/projected/e383e708-a471-4904-bfc7-eead6a5c76dc-kube-api-access-p6hdm\") pod \"e383e708-a471-4904-bfc7-eead6a5c76dc\" (UID: \"e383e708-a471-4904-bfc7-eead6a5c76dc\") " Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.943833 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q7dzv\" (UniqueName: \"kubernetes.io/projected/b9b28c41-c054-49f3-87d2-a8c15e6124de-kube-api-access-q7dzv\") pod \"b9b28c41-c054-49f3-87d2-a8c15e6124de\" (UID: \"b9b28c41-c054-49f3-87d2-a8c15e6124de\") " Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.943896 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e383e708-a471-4904-bfc7-eead6a5c76dc-combined-ca-bundle\") pod \"e383e708-a471-4904-bfc7-eead6a5c76dc\" (UID: \"e383e708-a471-4904-bfc7-eead6a5c76dc\") " Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.943920 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cl297\" (UniqueName: \"kubernetes.io/projected/cc4089c6-71ea-4503-b54c-18777fcc3c48-kube-api-access-cl297\") pod \"cc4089c6-71ea-4503-b54c-18777fcc3c48\" (UID: \"cc4089c6-71ea-4503-b54c-18777fcc3c48\") " Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.943937 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cc4089c6-71ea-4503-b54c-18777fcc3c48-config-data\") pod \"cc4089c6-71ea-4503-b54c-18777fcc3c48\" (UID: \"cc4089c6-71ea-4503-b54c-18777fcc3c48\") " Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.943961 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fa57dfc3-11b4-48af-a78c-00463e8894bf-config-data\") pod \"fa57dfc3-11b4-48af-a78c-00463e8894bf\" (UID: \"fa57dfc3-11b4-48af-a78c-00463e8894bf\") " Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.944008 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b9b28c41-c054-49f3-87d2-a8c15e6124de-config-data-custom\") pod \"b9b28c41-c054-49f3-87d2-a8c15e6124de\" (UID: \"b9b28c41-c054-49f3-87d2-a8c15e6124de\") " Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.944636 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tp4lp\" (UniqueName: \"kubernetes.io/projected/c67fda0a-394c-424a-ad72-f0e3ebd77f1b-kube-api-access-tp4lp\") pod \"keystone-db-create-rztt2\" (UID: \"c67fda0a-394c-424a-ad72-f0e3ebd77f1b\") " pod="openstack-kuttl-tests/keystone-db-create-rztt2" Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.944792 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c67fda0a-394c-424a-ad72-f0e3ebd77f1b-operator-scripts\") pod \"keystone-db-create-rztt2\" (UID: \"c67fda0a-394c-424a-ad72-f0e3ebd77f1b\") " pod="openstack-kuttl-tests/keystone-db-create-rztt2" Jan 20 17:28:04 crc kubenswrapper[4558]: E0120 17:28:04.944962 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/openstack-scripts: configmap "openstack-scripts" not found Jan 20 17:28:04 crc kubenswrapper[4558]: E0120 17:28:04.945004 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/c67fda0a-394c-424a-ad72-f0e3ebd77f1b-operator-scripts podName:c67fda0a-394c-424a-ad72-f0e3ebd77f1b nodeName:}" failed. No retries permitted until 2026-01-20 17:28:05.944993006 +0000 UTC m=+2779.705330973 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/c67fda0a-394c-424a-ad72-f0e3ebd77f1b-operator-scripts") pod "keystone-db-create-rztt2" (UID: "c67fda0a-394c-424a-ad72-f0e3ebd77f1b") : configmap "openstack-scripts" not found Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.950318 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fa57dfc3-11b4-48af-a78c-00463e8894bf-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "fa57dfc3-11b4-48af-a78c-00463e8894bf" (UID: "fa57dfc3-11b4-48af-a78c-00463e8894bf"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:28:04 crc kubenswrapper[4558]: E0120 17:28:04.954064 4558 projected.go:194] Error preparing data for projected volume kube-api-access-tp4lp for pod openstack-kuttl-tests/keystone-db-create-rztt2: failed to fetch token: serviceaccounts "galera-openstack" not found Jan 20 17:28:04 crc kubenswrapper[4558]: E0120 17:28:04.954139 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/c67fda0a-394c-424a-ad72-f0e3ebd77f1b-kube-api-access-tp4lp podName:c67fda0a-394c-424a-ad72-f0e3ebd77f1b nodeName:}" failed. No retries permitted until 2026-01-20 17:28:05.954120177 +0000 UTC m=+2779.714458144 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-tp4lp" (UniqueName: "kubernetes.io/projected/c67fda0a-394c-424a-ad72-f0e3ebd77f1b-kube-api-access-tp4lp") pod "keystone-db-create-rztt2" (UID: "c67fda0a-394c-424a-ad72-f0e3ebd77f1b") : failed to fetch token: serviceaccounts "galera-openstack" not found Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.955976 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fa57dfc3-11b4-48af-a78c-00463e8894bf-kube-api-access-7vmvf" (OuterVolumeSpecName: "kube-api-access-7vmvf") pod "fa57dfc3-11b4-48af-a78c-00463e8894bf" (UID: "fa57dfc3-11b4-48af-a78c-00463e8894bf"). InnerVolumeSpecName "kube-api-access-7vmvf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.956363 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cc4089c6-71ea-4503-b54c-18777fcc3c48-logs" (OuterVolumeSpecName: "logs") pod "cc4089c6-71ea-4503-b54c-18777fcc3c48" (UID: "cc4089c6-71ea-4503-b54c-18777fcc3c48"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.956603 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fa57dfc3-11b4-48af-a78c-00463e8894bf-logs" (OuterVolumeSpecName: "logs") pod "fa57dfc3-11b4-48af-a78c-00463e8894bf" (UID: "fa57dfc3-11b4-48af-a78c-00463e8894bf"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.959947 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-sb-0"] Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.961297 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b9b28c41-c054-49f3-87d2-a8c15e6124de-logs" (OuterVolumeSpecName: "logs") pod "b9b28c41-c054-49f3-87d2-a8c15e6124de" (UID: "b9b28c41-c054-49f3-87d2-a8c15e6124de"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.965628 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b9b28c41-c054-49f3-87d2-a8c15e6124de-kube-api-access-q7dzv" (OuterVolumeSpecName: "kube-api-access-q7dzv") pod "b9b28c41-c054-49f3-87d2-a8c15e6124de" (UID: "b9b28c41-c054-49f3-87d2-a8c15e6124de"). InnerVolumeSpecName "kube-api-access-q7dzv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.966244 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fa57dfc3-11b4-48af-a78c-00463e8894bf-config-data" (OuterVolumeSpecName: "config-data") pod "fa57dfc3-11b4-48af-a78c-00463e8894bf" (UID: "fa57dfc3-11b4-48af-a78c-00463e8894bf"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.973264 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b9b28c41-c054-49f3-87d2-a8c15e6124de-config-data" (OuterVolumeSpecName: "config-data") pod "b9b28c41-c054-49f3-87d2-a8c15e6124de" (UID: "b9b28c41-c054-49f3-87d2-a8c15e6124de"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.978614 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e383e708-a471-4904-bfc7-eead6a5c76dc-kube-api-access-p6hdm" (OuterVolumeSpecName: "kube-api-access-p6hdm") pod "e383e708-a471-4904-bfc7-eead6a5c76dc" (UID: "e383e708-a471-4904-bfc7-eead6a5c76dc"). InnerVolumeSpecName "kube-api-access-p6hdm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.988528 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-nb-0"] Jan 20 17:28:04 crc kubenswrapper[4558]: I0120 17:28:04.991206 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-nb-0"] Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.015034 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cc4089c6-71ea-4503-b54c-18777fcc3c48-kube-api-access-cl297" (OuterVolumeSpecName: "kube-api-access-cl297") pod "cc4089c6-71ea-4503-b54c-18777fcc3c48" (UID: "cc4089c6-71ea-4503-b54c-18777fcc3c48"). InnerVolumeSpecName "kube-api-access-cl297". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.022374 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b9b28c41-c054-49f3-87d2-a8c15e6124de-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "b9b28c41-c054-49f3-87d2-a8c15e6124de" (UID: "b9b28c41-c054-49f3-87d2-a8c15e6124de"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.037685 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e383e708-a471-4904-bfc7-eead6a5c76dc-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "e383e708-a471-4904-bfc7-eead6a5c76dc" (UID: "e383e708-a471-4904-bfc7-eead6a5c76dc"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.048398 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b9b28c41-c054-49f3-87d2-a8c15e6124de-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.048424 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/fa57dfc3-11b4-48af-a78c-00463e8894bf-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.048435 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cc4089c6-71ea-4503-b54c-18777fcc3c48-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.048445 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7vmvf\" (UniqueName: \"kubernetes.io/projected/fa57dfc3-11b4-48af-a78c-00463e8894bf-kube-api-access-7vmvf\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.048456 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b9b28c41-c054-49f3-87d2-a8c15e6124de-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.048464 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fa57dfc3-11b4-48af-a78c-00463e8894bf-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.048474 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b9b28c41-c054-49f3-87d2-a8c15e6124de-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.048481 4558 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/e383e708-a471-4904-bfc7-eead6a5c76dc-openstack-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.048493 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p6hdm\" (UniqueName: \"kubernetes.io/projected/e383e708-a471-4904-bfc7-eead6a5c76dc-kube-api-access-p6hdm\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.048501 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q7dzv\" (UniqueName: \"kubernetes.io/projected/b9b28c41-c054-49f3-87d2-a8c15e6124de-kube-api-access-q7dzv\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.048511 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cl297\" (UniqueName: \"kubernetes.io/projected/cc4089c6-71ea-4503-b54c-18777fcc3c48-kube-api-access-cl297\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.048521 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fa57dfc3-11b4-48af-a78c-00463e8894bf-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:05 crc kubenswrapper[4558]: E0120 17:28:05.048795 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-swift-internal-svc: secret "cert-swift-internal-svc" not found Jan 20 17:28:05 crc kubenswrapper[4558]: E0120 17:28:05.048869 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5-internal-tls-certs podName:4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5 nodeName:}" failed. No retries permitted until 2026-01-20 17:28:06.04885249 +0000 UTC m=+2779.809190457 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "internal-tls-certs" (UniqueName: "kubernetes.io/secret/4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5-internal-tls-certs") pod "swift-proxy-5f7fb7b-s2dxn" (UID: "4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5") : secret "cert-swift-internal-svc" not found Jan 20 17:28:05 crc kubenswrapper[4558]: E0120 17:28:05.048873 4558 projected.go:263] Couldn't get secret openstack-kuttl-tests/swift-proxy-config-data: secret "swift-proxy-config-data" not found Jan 20 17:28:05 crc kubenswrapper[4558]: E0120 17:28:05.048908 4558 projected.go:263] Couldn't get secret openstack-kuttl-tests/swift-conf: secret "swift-conf" not found Jan 20 17:28:05 crc kubenswrapper[4558]: E0120 17:28:05.048920 4558 projected.go:288] Couldn't get configMap openstack-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Jan 20 17:28:05 crc kubenswrapper[4558]: E0120 17:28:05.048936 4558 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack-kuttl-tests/swift-proxy-5f7fb7b-s2dxn: [secret "swift-proxy-config-data" not found, secret "swift-conf" not found, configmap "swift-ring-files" not found] Jan 20 17:28:05 crc kubenswrapper[4558]: E0120 17:28:05.048932 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/combined-ca-bundle: secret "combined-ca-bundle" not found Jan 20 17:28:05 crc kubenswrapper[4558]: E0120 17:28:05.049001 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5-etc-swift podName:4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5 nodeName:}" failed. No retries permitted until 2026-01-20 17:28:06.048966044 +0000 UTC m=+2779.809304011 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5-etc-swift") pod "swift-proxy-5f7fb7b-s2dxn" (UID: "4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5") : [secret "swift-proxy-config-data" not found, secret "swift-conf" not found, configmap "swift-ring-files" not found] Jan 20 17:28:05 crc kubenswrapper[4558]: E0120 17:28:05.049020 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5-combined-ca-bundle podName:4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5 nodeName:}" failed. No retries permitted until 2026-01-20 17:28:06.04901175 +0000 UTC m=+2779.809349717 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5-combined-ca-bundle") pod "swift-proxy-5f7fb7b-s2dxn" (UID: "4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5") : secret "combined-ca-bundle" not found Jan 20 17:28:05 crc kubenswrapper[4558]: E0120 17:28:05.049047 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-swift-public-svc: secret "cert-swift-public-svc" not found Jan 20 17:28:05 crc kubenswrapper[4558]: E0120 17:28:05.049085 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5-public-tls-certs podName:4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5 nodeName:}" failed. No retries permitted until 2026-01-20 17:28:06.049064859 +0000 UTC m=+2779.809402817 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "public-tls-certs" (UniqueName: "kubernetes.io/secret/4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5-public-tls-certs") pod "swift-proxy-5f7fb7b-s2dxn" (UID: "4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5") : secret "cert-swift-public-svc" not found Jan 20 17:28:05 crc kubenswrapper[4558]: E0120 17:28:05.049093 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/openstack-scripts: configmap "openstack-scripts" not found Jan 20 17:28:05 crc kubenswrapper[4558]: E0120 17:28:05.049119 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5f7a0ad9-436f-433c-9a91-cec4ffd3beeb-operator-scripts podName:5f7a0ad9-436f-433c-9a91-cec4ffd3beeb nodeName:}" failed. No retries permitted until 2026-01-20 17:28:06.049113802 +0000 UTC m=+2779.809451758 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/5f7a0ad9-436f-433c-9a91-cec4ffd3beeb-operator-scripts") pod "glance-65cd-account-create-update-grtts" (UID: "5f7a0ad9-436f-433c-9a91-cec4ffd3beeb") : configmap "openstack-scripts" not found Jan 20 17:28:05 crc kubenswrapper[4558]: E0120 17:28:05.049147 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/openstack-scripts: configmap "openstack-scripts" not found Jan 20 17:28:05 crc kubenswrapper[4558]: E0120 17:28:05.049173 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/swift-proxy-config-data: secret "swift-proxy-config-data" not found Jan 20 17:28:05 crc kubenswrapper[4558]: E0120 17:28:05.049182 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/1b7c6fb3-f30d-4dde-8502-e7d840719520-operator-scripts podName:1b7c6fb3-f30d-4dde-8502-e7d840719520 nodeName:}" failed. No retries permitted until 2026-01-20 17:28:06.049175999 +0000 UTC m=+2779.809513966 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/1b7c6fb3-f30d-4dde-8502-e7d840719520-operator-scripts") pod "glance-db-create-rt2fh" (UID: "1b7c6fb3-f30d-4dde-8502-e7d840719520") : configmap "openstack-scripts" not found Jan 20 17:28:05 crc kubenswrapper[4558]: E0120 17:28:05.049251 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5-config-data podName:4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5 nodeName:}" failed. No retries permitted until 2026-01-20 17:28:06.049233627 +0000 UTC m=+2779.809571594 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/secret/4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5-config-data") pod "swift-proxy-5f7fb7b-s2dxn" (UID: "4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5") : secret "swift-proxy-config-data" not found Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.101008 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e383e708-a471-4904-bfc7-eead6a5c76dc-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e383e708-a471-4904-bfc7-eead6a5c76dc" (UID: "e383e708-a471-4904-bfc7-eead6a5c76dc"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.109100 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cc4089c6-71ea-4503-b54c-18777fcc3c48-config-data" (OuterVolumeSpecName: "config-data") pod "cc4089c6-71ea-4503-b54c-18777fcc3c48" (UID: "cc4089c6-71ea-4503-b54c-18777fcc3c48"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.114341 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e383e708-a471-4904-bfc7-eead6a5c76dc-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "e383e708-a471-4904-bfc7-eead6a5c76dc" (UID: "e383e708-a471-4904-bfc7-eead6a5c76dc"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.115017 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cc4089c6-71ea-4503-b54c-18777fcc3c48-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "cc4089c6-71ea-4503-b54c-18777fcc3c48" (UID: "cc4089c6-71ea-4503-b54c-18777fcc3c48"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.126067 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cc4089c6-71ea-4503-b54c-18777fcc3c48-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "cc4089c6-71ea-4503-b54c-18777fcc3c48" (UID: "cc4089c6-71ea-4503-b54c-18777fcc3c48"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.150463 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cc4089c6-71ea-4503-b54c-18777fcc3c48-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.150483 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e383e708-a471-4904-bfc7-eead6a5c76dc-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.150494 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cc4089c6-71ea-4503-b54c-18777fcc3c48-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.150503 4558 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/cc4089c6-71ea-4503-b54c-18777fcc3c48-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.150514 4558 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/e383e708-a471-4904-bfc7-eead6a5c76dc-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:05 crc kubenswrapper[4558]: E0120 17:28:05.358223 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cinder-scheduler-config-data: secret "cinder-scheduler-config-data" not found Jan 20 17:28:05 crc kubenswrapper[4558]: E0120 17:28:05.358510 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/combined-ca-bundle: secret "combined-ca-bundle" not found Jan 20 17:28:05 crc kubenswrapper[4558]: E0120 17:28:05.358449 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cinder-config-data: secret "cinder-config-data" not found Jan 20 17:28:05 crc kubenswrapper[4558]: E0120 17:28:05.358570 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c1afb831-f3e4-4356-ab86-713eb0beca39-config-data-custom podName:c1afb831-f3e4-4356-ab86-713eb0beca39 nodeName:}" failed. No retries permitted until 2026-01-20 17:28:09.358548964 +0000 UTC m=+2783.118886921 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "config-data-custom" (UniqueName: "kubernetes.io/secret/c1afb831-f3e4-4356-ab86-713eb0beca39-config-data-custom") pod "cinder-scheduler-0" (UID: "c1afb831-f3e4-4356-ab86-713eb0beca39") : secret "cinder-scheduler-config-data" not found Jan 20 17:28:05 crc kubenswrapper[4558]: E0120 17:28:05.358591 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c1afb831-f3e4-4356-ab86-713eb0beca39-combined-ca-bundle podName:c1afb831-f3e4-4356-ab86-713eb0beca39 nodeName:}" failed. No retries permitted until 2026-01-20 17:28:09.358584831 +0000 UTC m=+2783.118922798 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/c1afb831-f3e4-4356-ab86-713eb0beca39-combined-ca-bundle") pod "cinder-scheduler-0" (UID: "c1afb831-f3e4-4356-ab86-713eb0beca39") : secret "combined-ca-bundle" not found Jan 20 17:28:05 crc kubenswrapper[4558]: E0120 17:28:05.358605 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c1afb831-f3e4-4356-ab86-713eb0beca39-config-data podName:c1afb831-f3e4-4356-ab86-713eb0beca39 nodeName:}" failed. No retries permitted until 2026-01-20 17:28:09.358599429 +0000 UTC m=+2783.118937396 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/secret/c1afb831-f3e4-4356-ab86-713eb0beca39-config-data") pod "cinder-scheduler-0" (UID: "c1afb831-f3e4-4356-ab86-713eb0beca39") : secret "cinder-config-data" not found Jan 20 17:28:05 crc kubenswrapper[4558]: E0120 17:28:05.358484 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cinder-scripts: secret "cinder-scripts" not found Jan 20 17:28:05 crc kubenswrapper[4558]: E0120 17:28:05.358839 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c1afb831-f3e4-4356-ab86-713eb0beca39-scripts podName:c1afb831-f3e4-4356-ab86-713eb0beca39 nodeName:}" failed. No retries permitted until 2026-01-20 17:28:09.358822498 +0000 UTC m=+2783.119160465 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "scripts" (UniqueName: "kubernetes.io/secret/c1afb831-f3e4-4356-ab86-713eb0beca39-scripts") pod "cinder-scheduler-0" (UID: "c1afb831-f3e4-4356-ab86-713eb0beca39") : secret "cinder-scripts" not found Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.505156 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-f949-account-create-update-v2zng" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.507264 4558 generic.go:334] "Generic (PLEG): container finished" podID="134fc5ef-70be-4c8c-aa72-e4f0440d0afe" containerID="39bfb26312eaad0bd344b47e0fab7bc8a68e4339a058a5362c330669eb1593d1" exitCode=0 Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.507292 4558 generic.go:334] "Generic (PLEG): container finished" podID="134fc5ef-70be-4c8c-aa72-e4f0440d0afe" containerID="8ac13b7e76f880d4b690e8245e9e454bb0f7d3d01a25e904b05a000a609f936a" exitCode=0 Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.507360 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"134fc5ef-70be-4c8c-aa72-e4f0440d0afe","Type":"ContainerDied","Data":"39bfb26312eaad0bd344b47e0fab7bc8a68e4339a058a5362c330669eb1593d1"} Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.507410 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"134fc5ef-70be-4c8c-aa72-e4f0440d0afe","Type":"ContainerDied","Data":"8ac13b7e76f880d4b690e8245e9e454bb0f7d3d01a25e904b05a000a609f936a"} Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.510683 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-a772-account-create-update-27cpc" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.524778 4558 generic.go:334] "Generic (PLEG): container finished" podID="c2bfe108-b710-400b-baac-55815b192ee3" containerID="b9a71508666d31c13efe2453ce42664d694e3c0db82c6d7b70071f319d25a051" exitCode=1 Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.526880 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/root-account-create-update-dmtbg" event={"ID":"c2bfe108-b710-400b-baac-55815b192ee3","Type":"ContainerDied","Data":"b9a71508666d31c13efe2453ce42664d694e3c0db82c6d7b70071f319d25a051"} Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.526955 4558 scope.go:117] "RemoveContainer" containerID="a9c03d94fb53b93aaaeb1d783b464528820c872e9dc48826f7156866d1723e5e" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.528616 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.532409 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-7ab2-account-create-update-c5qqm" event={"ID":"b201974e-8bb6-412a-95d5-cce7a95e4528","Type":"ContainerDied","Data":"cc78df1605b23b363d8c1c147f3989b9a5c85c29cc42aca19c525730f4cb40eb"} Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.532482 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cc78df1605b23b363d8c1c147f3989b9a5c85c29cc42aca19c525730f4cb40eb" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.538439 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.542265 4558 generic.go:334] "Generic (PLEG): container finished" podID="4430d3cb-0a1c-4266-9f69-cf8e817f1d3e" containerID="fccdd43e49b8880beb3f4193e0e2a09b0c9bcb297f472c9a13a4fc7709889c73" exitCode=0 Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.542327 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"4430d3cb-0a1c-4266-9f69-cf8e817f1d3e","Type":"ContainerDied","Data":"fccdd43e49b8880beb3f4193e0e2a09b0c9bcb297f472c9a13a4fc7709889c73"} Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.542352 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"4430d3cb-0a1c-4266-9f69-cf8e817f1d3e","Type":"ContainerDied","Data":"37b985b06b28c8541149da36506253933f4d0bb97118cf9caae6c0277577b2c6"} Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.542434 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.545980 4558 generic.go:334] "Generic (PLEG): container finished" podID="2bee9ff9-52bb-4dae-971c-8c6236a4e563" containerID="6a0521946798f9e4098ead7f6a1d8d950d13146e8392b370d0b39a8082a8515a" exitCode=0 Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.546028 4558 generic.go:334] "Generic (PLEG): container finished" podID="2bee9ff9-52bb-4dae-971c-8c6236a4e563" containerID="95487ba24ea1b5e70725b0d792dbc8653b76f5c38eac7b039714b3d9fd0a7854" exitCode=0 Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.546078 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-proxy-7459567f99-gx6dr" event={"ID":"2bee9ff9-52bb-4dae-971c-8c6236a4e563","Type":"ContainerDied","Data":"6a0521946798f9e4098ead7f6a1d8d950d13146e8392b370d0b39a8082a8515a"} Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.546097 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-proxy-7459567f99-gx6dr" event={"ID":"2bee9ff9-52bb-4dae-971c-8c6236a4e563","Type":"ContainerDied","Data":"95487ba24ea1b5e70725b0d792dbc8653b76f5c38eac7b039714b3d9fd0a7854"} Jan 20 17:28:05 crc kubenswrapper[4558]: E0120 17:28:05.546076 4558 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3460a75e_3553_47b9_bfc5_39c2c459826e.slice/crio-conmon-c8a5e9bf9d8e1a8dad3f6f73cdf354545ce12575fe3b33bce155c23769a44f3e.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3460a75e_3553_47b9_bfc5_39c2c459826e.slice/crio-c8a5e9bf9d8e1a8dad3f6f73cdf354545ce12575fe3b33bce155c23769a44f3e.scope\": RecentStats: unable to find data in memory cache]" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.546109 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-proxy-7459567f99-gx6dr" event={"ID":"2bee9ff9-52bb-4dae-971c-8c6236a4e563","Type":"ContainerDied","Data":"b78f2da417e81673e98774d9598a50961cb7fbede4d984710706cd12f7cb03b8"} Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.546568 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b78f2da417e81673e98774d9598a50961cb7fbede4d984710706cd12f7cb03b8" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.548306 4558 generic.go:334] "Generic (PLEG): container finished" podID="f6a0777b-d04f-451b-9e3d-3f9ab0e19df8" containerID="5318567d652c11a8d32a83b41eaaa58746106e91b1ac59789f4bda5a8e0f9415" exitCode=0 Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.548367 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"f6a0777b-d04f-451b-9e3d-3f9ab0e19df8","Type":"ContainerDied","Data":"5318567d652c11a8d32a83b41eaaa58746106e91b1ac59789f4bda5a8e0f9415"} Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.551295 4558 generic.go:334] "Generic (PLEG): container finished" podID="f81a00ad-36ce-4383-9e91-fe60de6939d2" containerID="77c882047f115c6a4c56a4e460c8836ce62f0179639cdd46fd3f9af7f7edbd88" exitCode=0 Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.551351 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-cell1-galera-0" event={"ID":"f81a00ad-36ce-4383-9e91-fe60de6939d2","Type":"ContainerDied","Data":"77c882047f115c6a4c56a4e460c8836ce62f0179639cdd46fd3f9af7f7edbd88"} Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.562333 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4430d3cb-0a1c-4266-9f69-cf8e817f1d3e-config-data\") pod \"4430d3cb-0a1c-4266-9f69-cf8e817f1d3e\" (UID: \"4430d3cb-0a1c-4266-9f69-cf8e817f1d3e\") " Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.562386 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4430d3cb-0a1c-4266-9f69-cf8e817f1d3e-public-tls-certs\") pod \"4430d3cb-0a1c-4266-9f69-cf8e817f1d3e\" (UID: \"4430d3cb-0a1c-4266-9f69-cf8e817f1d3e\") " Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.562411 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4430d3cb-0a1c-4266-9f69-cf8e817f1d3e-internal-tls-certs\") pod \"4430d3cb-0a1c-4266-9f69-cf8e817f1d3e\" (UID: \"4430d3cb-0a1c-4266-9f69-cf8e817f1d3e\") " Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.562472 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3ac6a4e2-150b-4137-b547-33cbdd1137b7-operator-scripts\") pod \"3ac6a4e2-150b-4137-b547-33cbdd1137b7\" (UID: \"3ac6a4e2-150b-4137-b547-33cbdd1137b7\") " Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.562496 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7g4x5\" (UniqueName: \"kubernetes.io/projected/4430d3cb-0a1c-4266-9f69-cf8e817f1d3e-kube-api-access-7g4x5\") pod \"4430d3cb-0a1c-4266-9f69-cf8e817f1d3e\" (UID: \"4430d3cb-0a1c-4266-9f69-cf8e817f1d3e\") " Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.562520 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e0d8717e-1693-4222-aebb-024d291cb0d5-operator-scripts\") pod \"e0d8717e-1693-4222-aebb-024d291cb0d5\" (UID: \"e0d8717e-1693-4222-aebb-024d291cb0d5\") " Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.562552 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4430d3cb-0a1c-4266-9f69-cf8e817f1d3e-combined-ca-bundle\") pod \"4430d3cb-0a1c-4266-9f69-cf8e817f1d3e\" (UID: \"4430d3cb-0a1c-4266-9f69-cf8e817f1d3e\") " Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.562629 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rf6ql\" (UniqueName: \"kubernetes.io/projected/3ac6a4e2-150b-4137-b547-33cbdd1137b7-kube-api-access-rf6ql\") pod \"3ac6a4e2-150b-4137-b547-33cbdd1137b7\" (UID: \"3ac6a4e2-150b-4137-b547-33cbdd1137b7\") " Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.562681 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4430d3cb-0a1c-4266-9f69-cf8e817f1d3e-logs\") pod \"4430d3cb-0a1c-4266-9f69-cf8e817f1d3e\" (UID: \"4430d3cb-0a1c-4266-9f69-cf8e817f1d3e\") " Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.562713 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mxxtr\" (UniqueName: \"kubernetes.io/projected/e0d8717e-1693-4222-aebb-024d291cb0d5-kube-api-access-mxxtr\") pod \"e0d8717e-1693-4222-aebb-024d291cb0d5\" (UID: \"e0d8717e-1693-4222-aebb-024d291cb0d5\") " Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.568387 4558 generic.go:334] "Generic (PLEG): container finished" podID="443f910c-dd2d-4c72-b861-f15de67ac6bb" containerID="ea8201fc9fae9612c7677cb7b1dbff6d7938a87582e7ecc4e74f7875b23122ec" exitCode=2 Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.568394 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e0d8717e-1693-4222-aebb-024d291cb0d5-kube-api-access-mxxtr" (OuterVolumeSpecName: "kube-api-access-mxxtr") pod "e0d8717e-1693-4222-aebb-024d291cb0d5" (UID: "e0d8717e-1693-4222-aebb-024d291cb0d5"). InnerVolumeSpecName "kube-api-access-mxxtr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.568452 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/kube-state-metrics-0" event={"ID":"443f910c-dd2d-4c72-b861-f15de67ac6bb","Type":"ContainerDied","Data":"ea8201fc9fae9612c7677cb7b1dbff6d7938a87582e7ecc4e74f7875b23122ec"} Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.568472 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/kube-state-metrics-0" event={"ID":"443f910c-dd2d-4c72-b861-f15de67ac6bb","Type":"ContainerDied","Data":"46a71c4c2fb2aab516d6c7b2bf6318bfdae48b4c7736d08ffd76acd44932b728"} Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.568484 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="46a71c4c2fb2aab516d6c7b2bf6318bfdae48b4c7736d08ffd76acd44932b728" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.569743 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e0d8717e-1693-4222-aebb-024d291cb0d5-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "e0d8717e-1693-4222-aebb-024d291cb0d5" (UID: "e0d8717e-1693-4222-aebb-024d291cb0d5"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.570109 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4430d3cb-0a1c-4266-9f69-cf8e817f1d3e-logs" (OuterVolumeSpecName: "logs") pod "4430d3cb-0a1c-4266-9f69-cf8e817f1d3e" (UID: "4430d3cb-0a1c-4266-9f69-cf8e817f1d3e"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.570403 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3ac6a4e2-150b-4137-b547-33cbdd1137b7-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "3ac6a4e2-150b-4137-b547-33cbdd1137b7" (UID: "3ac6a4e2-150b-4137-b547-33cbdd1137b7"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.570531 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4430d3cb-0a1c-4266-9f69-cf8e817f1d3e-kube-api-access-7g4x5" (OuterVolumeSpecName: "kube-api-access-7g4x5") pod "4430d3cb-0a1c-4266-9f69-cf8e817f1d3e" (UID: "4430d3cb-0a1c-4266-9f69-cf8e817f1d3e"). InnerVolumeSpecName "kube-api-access-7g4x5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.572900 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstackclient" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.577513 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ac6a4e2-150b-4137-b547-33cbdd1137b7-kube-api-access-rf6ql" (OuterVolumeSpecName: "kube-api-access-rf6ql") pod "3ac6a4e2-150b-4137-b547-33cbdd1137b7" (UID: "3ac6a4e2-150b-4137-b547-33cbdd1137b7"). InnerVolumeSpecName "kube-api-access-rf6ql". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.583896 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-f949-account-create-update-v2zng" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.583930 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-f949-account-create-update-v2zng" event={"ID":"3ac6a4e2-150b-4137-b547-33cbdd1137b7","Type":"ContainerDied","Data":"9b296b8e490374239f893d021d6f999f502562a3fa2954ca8fa50525f672a8ea"} Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.589206 4558 generic.go:334] "Generic (PLEG): container finished" podID="ed3c56f1-4bbb-4590-8b19-a0de467537ad" containerID="c3560a1d46ef53613df33e490314cf902b5ef6e4850d07cac0cdb608195a48df" exitCode=1 Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.589262 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-db-create-pxxqs" event={"ID":"ed3c56f1-4bbb-4590-8b19-a0de467537ad","Type":"ContainerDied","Data":"c3560a1d46ef53613df33e490314cf902b5ef6e4850d07cac0cdb608195a48df"} Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.613085 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-43e3-account-create-update-fdlrr" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.613775 4558 scope.go:117] "RemoveContainer" containerID="fccdd43e49b8880beb3f4193e0e2a09b0c9bcb297f472c9a13a4fc7709889c73" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.615884 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"cc4089c6-71ea-4503-b54c-18777fcc3c48","Type":"ContainerDied","Data":"998152c0b9388437fe908745b3e41450a3c684cee34feea370f0f29a644e8aa7"} Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.615961 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4430d3cb-0a1c-4266-9f69-cf8e817f1d3e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4430d3cb-0a1c-4266-9f69-cf8e817f1d3e" (UID: "4430d3cb-0a1c-4266-9f69-cf8e817f1d3e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.616224 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.617199 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4430d3cb-0a1c-4266-9f69-cf8e817f1d3e-config-data" (OuterVolumeSpecName: "config-data") pod "4430d3cb-0a1c-4266-9f69-cf8e817f1d3e" (UID: "4430d3cb-0a1c-4266-9f69-cf8e817f1d3e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.625666 4558 generic.go:334] "Generic (PLEG): container finished" podID="73ed8a8e-b7b8-4d71-9a2b-d29c60ffd996" containerID="f2931b38008bccdd5209b99b03a91154a5b6e19f660a655b7d1fde4d2990be72" exitCode=0 Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.625762 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"73ed8a8e-b7b8-4d71-9a2b-d29c60ffd996","Type":"ContainerDied","Data":"f2931b38008bccdd5209b99b03a91154a5b6e19f660a655b7d1fde4d2990be72"} Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.625804 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"73ed8a8e-b7b8-4d71-9a2b-d29c60ffd996","Type":"ContainerDied","Data":"a7a134a40ad2fdc31d2893d8841b37e2902c99964219b7826a6e91554f6aa448"} Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.625950 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.631809 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-43e3-account-create-update-fdlrr" event={"ID":"7ce0109d-f162-4ce7-b957-34ba84e8e377","Type":"ContainerDied","Data":"75d61c2e2226f7f863261986f80b02b7ed8ec3398ab59f519f4737c3e2faf21d"} Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.632014 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-43e3-account-create-update-fdlrr" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.633876 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.638692 4558 generic.go:334] "Generic (PLEG): container finished" podID="318ab596-0189-4f84-b805-f28bd05824e1" containerID="17815dd01664011f7b21291548668facb5de910395c1405e050881ec910e03a5" exitCode=0 Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.638775 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-lgbfw" event={"ID":"318ab596-0189-4f84-b805-f28bd05824e1","Type":"ContainerDied","Data":"17815dd01664011f7b21291548668facb5de910395c1405e050881ec910e03a5"} Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.638797 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-lgbfw" event={"ID":"318ab596-0189-4f84-b805-f28bd05824e1","Type":"ContainerStarted","Data":"03355da3f4b4d317b1d947bab5eed6dc4d42c04598faa1d7ffc1b2a3128947a4"} Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.644330 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-a772-account-create-update-27cpc" event={"ID":"e0d8717e-1693-4222-aebb-024d291cb0d5","Type":"ContainerDied","Data":"25792754d41cc5f42b33b0c612735ff2ff78d5fbc10c4077212bc9154f35d4e0"} Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.644425 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-a772-account-create-update-27cpc" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.653324 4558 generic.go:334] "Generic (PLEG): container finished" podID="3460a75e-3553-47b9-bfc5-39c2c459826e" containerID="c8a5e9bf9d8e1a8dad3f6f73cdf354545ce12575fe3b33bce155c23769a44f3e" exitCode=0 Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.653444 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/memcached-0" event={"ID":"3460a75e-3553-47b9-bfc5-39c2c459826e","Type":"ContainerDied","Data":"c8a5e9bf9d8e1a8dad3f6f73cdf354545ce12575fe3b33bce155c23769a44f3e"} Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.655335 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-db-create-j7682" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.658958 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4430d3cb-0a1c-4266-9f69-cf8e817f1d3e-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "4430d3cb-0a1c-4266-9f69-cf8e817f1d3e" (UID: "4430d3cb-0a1c-4266-9f69-cf8e817f1d3e"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.665055 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/73ed8a8e-b7b8-4d71-9a2b-d29c60ffd996-combined-ca-bundle\") pod \"73ed8a8e-b7b8-4d71-9a2b-d29c60ffd996\" (UID: \"73ed8a8e-b7b8-4d71-9a2b-d29c60ffd996\") " Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.665323 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/73ed8a8e-b7b8-4d71-9a2b-d29c60ffd996-nova-novncproxy-tls-certs\") pod \"73ed8a8e-b7b8-4d71-9a2b-d29c60ffd996\" (UID: \"73ed8a8e-b7b8-4d71-9a2b-d29c60ffd996\") " Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.665402 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7ce0109d-f162-4ce7-b957-34ba84e8e377-operator-scripts\") pod \"7ce0109d-f162-4ce7-b957-34ba84e8e377\" (UID: \"7ce0109d-f162-4ce7-b957-34ba84e8e377\") " Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.665490 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/443f910c-dd2d-4c72-b861-f15de67ac6bb-combined-ca-bundle\") pod \"443f910c-dd2d-4c72-b861-f15de67ac6bb\" (UID: \"443f910c-dd2d-4c72-b861-f15de67ac6bb\") " Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.665588 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8lfjc\" (UniqueName: \"kubernetes.io/projected/7ce0109d-f162-4ce7-b957-34ba84e8e377-kube-api-access-8lfjc\") pod \"7ce0109d-f162-4ce7-b957-34ba84e8e377\" (UID: \"7ce0109d-f162-4ce7-b957-34ba84e8e377\") " Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.665662 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/73ed8a8e-b7b8-4d71-9a2b-d29c60ffd996-vencrypt-tls-certs\") pod \"73ed8a8e-b7b8-4d71-9a2b-d29c60ffd996\" (UID: \"73ed8a8e-b7b8-4d71-9a2b-d29c60ffd996\") " Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.665725 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xdmnr\" (UniqueName: \"kubernetes.io/projected/73ed8a8e-b7b8-4d71-9a2b-d29c60ffd996-kube-api-access-xdmnr\") pod \"73ed8a8e-b7b8-4d71-9a2b-d29c60ffd996\" (UID: \"73ed8a8e-b7b8-4d71-9a2b-d29c60ffd996\") " Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.665893 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cp2rb\" (UniqueName: \"kubernetes.io/projected/443f910c-dd2d-4c72-b861-f15de67ac6bb-kube-api-access-cp2rb\") pod \"443f910c-dd2d-4c72-b861-f15de67ac6bb\" (UID: \"443f910c-dd2d-4c72-b861-f15de67ac6bb\") " Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.665959 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/73ed8a8e-b7b8-4d71-9a2b-d29c60ffd996-config-data\") pod \"73ed8a8e-b7b8-4d71-9a2b-d29c60ffd996\" (UID: \"73ed8a8e-b7b8-4d71-9a2b-d29c60ffd996\") " Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.665987 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/443f910c-dd2d-4c72-b861-f15de67ac6bb-kube-state-metrics-tls-certs\") pod \"443f910c-dd2d-4c72-b861-f15de67ac6bb\" (UID: \"443f910c-dd2d-4c72-b861-f15de67ac6bb\") " Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.666051 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/443f910c-dd2d-4c72-b861-f15de67ac6bb-kube-state-metrics-tls-config\") pod \"443f910c-dd2d-4c72-b861-f15de67ac6bb\" (UID: \"443f910c-dd2d-4c72-b861-f15de67ac6bb\") " Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.666100 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7ce0109d-f162-4ce7-b957-34ba84e8e377-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "7ce0109d-f162-4ce7-b957-34ba84e8e377" (UID: "7ce0109d-f162-4ce7-b957-34ba84e8e377"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.667060 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b69d0136-9748-45bf-a9ed-d23e0180f1b4-operator-scripts\") pod \"nova-cell1-af2b-account-create-update-x6tw5\" (UID: \"b69d0136-9748-45bf-a9ed-d23e0180f1b4\") " pod="openstack-kuttl-tests/nova-cell1-af2b-account-create-update-x6tw5" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.667237 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4j88m\" (UniqueName: \"kubernetes.io/projected/b69d0136-9748-45bf-a9ed-d23e0180f1b4-kube-api-access-4j88m\") pod \"nova-cell1-af2b-account-create-update-x6tw5\" (UID: \"b69d0136-9748-45bf-a9ed-d23e0180f1b4\") " pod="openstack-kuttl-tests/nova-cell1-af2b-account-create-update-x6tw5" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.667744 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4430d3cb-0a1c-4266-9f69-cf8e817f1d3e-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.667760 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4430d3cb-0a1c-4266-9f69-cf8e817f1d3e-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.667783 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3ac6a4e2-150b-4137-b547-33cbdd1137b7-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.667794 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7g4x5\" (UniqueName: \"kubernetes.io/projected/4430d3cb-0a1c-4266-9f69-cf8e817f1d3e-kube-api-access-7g4x5\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.667826 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e0d8717e-1693-4222-aebb-024d291cb0d5-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.667836 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4430d3cb-0a1c-4266-9f69-cf8e817f1d3e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.667849 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7ce0109d-f162-4ce7-b957-34ba84e8e377-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.667861 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rf6ql\" (UniqueName: \"kubernetes.io/projected/3ac6a4e2-150b-4137-b547-33cbdd1137b7-kube-api-access-rf6ql\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.667899 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4430d3cb-0a1c-4266-9f69-cf8e817f1d3e-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.667912 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mxxtr\" (UniqueName: \"kubernetes.io/projected/e0d8717e-1693-4222-aebb-024d291cb0d5-kube-api-access-mxxtr\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:05 crc kubenswrapper[4558]: E0120 17:28:05.674614 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/openstack-cell1-scripts: configmap "openstack-cell1-scripts" not found Jan 20 17:28:05 crc kubenswrapper[4558]: E0120 17:28:05.674699 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/b69d0136-9748-45bf-a9ed-d23e0180f1b4-operator-scripts podName:b69d0136-9748-45bf-a9ed-d23e0180f1b4 nodeName:}" failed. No retries permitted until 2026-01-20 17:28:09.674677791 +0000 UTC m=+2783.435015757 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/b69d0136-9748-45bf-a9ed-d23e0180f1b4-operator-scripts") pod "nova-cell1-af2b-account-create-update-x6tw5" (UID: "b69d0136-9748-45bf-a9ed-d23e0180f1b4") : configmap "openstack-cell1-scripts" not found Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.676949 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/443f910c-dd2d-4c72-b861-f15de67ac6bb-kube-api-access-cp2rb" (OuterVolumeSpecName: "kube-api-access-cp2rb") pod "443f910c-dd2d-4c72-b861-f15de67ac6bb" (UID: "443f910c-dd2d-4c72-b861-f15de67ac6bb"). InnerVolumeSpecName "kube-api-access-cp2rb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:28:05 crc kubenswrapper[4558]: E0120 17:28:05.677041 4558 projected.go:194] Error preparing data for projected volume kube-api-access-4j88m for pod openstack-kuttl-tests/nova-cell1-af2b-account-create-update-x6tw5: failed to fetch token: serviceaccounts "galera-openstack-cell1" not found Jan 20 17:28:05 crc kubenswrapper[4558]: E0120 17:28:05.677098 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/b69d0136-9748-45bf-a9ed-d23e0180f1b4-kube-api-access-4j88m podName:b69d0136-9748-45bf-a9ed-d23e0180f1b4 nodeName:}" failed. No retries permitted until 2026-01-20 17:28:09.677086148 +0000 UTC m=+2783.437424115 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-4j88m" (UniqueName: "kubernetes.io/projected/b69d0136-9748-45bf-a9ed-d23e0180f1b4-kube-api-access-4j88m") pod "nova-cell1-af2b-account-create-update-x6tw5" (UID: "b69d0136-9748-45bf-a9ed-d23e0180f1b4") : failed to fetch token: serviceaccounts "galera-openstack-cell1" not found Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.689780 4558 generic.go:334] "Generic (PLEG): container finished" podID="bfb2b456-ae0e-42ca-a227-428a626f1e3e" containerID="0988bc018c883e0a93655e8a6aa66b272e95530ac909536051e552811ce17949" exitCode=1 Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.689961 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-db-create-2kmst" event={"ID":"bfb2b456-ae0e-42ca-a227-428a626f1e3e","Type":"ContainerDied","Data":"0988bc018c883e0a93655e8a6aa66b272e95530ac909536051e552811ce17949"} Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.695621 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-7ab2-account-create-update-c5qqm" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.695934 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/73ed8a8e-b7b8-4d71-9a2b-d29c60ffd996-kube-api-access-xdmnr" (OuterVolumeSpecName: "kube-api-access-xdmnr") pod "73ed8a8e-b7b8-4d71-9a2b-d29c60ffd996" (UID: "73ed8a8e-b7b8-4d71-9a2b-d29c60ffd996"). InnerVolumeSpecName "kube-api-access-xdmnr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.703297 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7ce0109d-f162-4ce7-b957-34ba84e8e377-kube-api-access-8lfjc" (OuterVolumeSpecName: "kube-api-access-8lfjc") pod "7ce0109d-f162-4ce7-b957-34ba84e8e377" (UID: "7ce0109d-f162-4ce7-b957-34ba84e8e377"). InnerVolumeSpecName "kube-api-access-8lfjc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.703589 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4430d3cb-0a1c-4266-9f69-cf8e817f1d3e-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "4430d3cb-0a1c-4266-9f69-cf8e817f1d3e" (UID: "4430d3cb-0a1c-4266-9f69-cf8e817f1d3e"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.703790 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/443f910c-dd2d-4c72-b861-f15de67ac6bb-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "443f910c-dd2d-4c72-b861-f15de67ac6bb" (UID: "443f910c-dd2d-4c72-b861-f15de67ac6bb"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.704189 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-proxy-7459567f99-gx6dr" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.708924 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.711983 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-proxy-5f7fb7b-s2dxn" podUID="4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5" containerName="proxy-httpd" containerID="cri-o://bcc4b197f2fa6bbcecefb3da95a4d96c30060a9d1504ae2efef0a52dc6bcf098" gracePeriod=30 Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.713637 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-64c1-account-create-update-wsq7z" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.714552 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-db-create-rztt2" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.714630 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-db-create-j7682" event={"ID":"df28560c-92e6-4003-a08f-b8691fa43300","Type":"ContainerDied","Data":"bb6338053e49720b1e74bb1a48e7513dde37dd08f6b58323abe135e98b714799"} Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.714659 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/cinder-scheduler-0" podUID="c1afb831-f3e4-4356-ab86-713eb0beca39" containerName="cinder-scheduler" containerID="cri-o://11baea4ab79cfa0023a448f7d4c2fd060ecbb75ef0a90656b63b54a79a53d8f7" gracePeriod=30 Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.714745 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-db-create-j7682" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.714760 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-worker-74d54f756c-g9zzk" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.714842 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/cinder-scheduler-0" podUID="c1afb831-f3e4-4356-ab86-713eb0beca39" containerName="probe" containerID="cri-o://4073b49237446b10090528b017b317faf894679a9662161b1fb6fedebcfc14ed" gracePeriod=30 Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.714930 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-keystone-listener-8f4cc76c4-9qkpn" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.714974 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-proxy-5f7fb7b-s2dxn" podUID="4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5" containerName="proxy-server" containerID="cri-o://5a07cb607c001f9ab3a834dea57d84934f1fe9dff82a41e28812e0217fe09cff" gracePeriod=30 Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.715534 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-af2b-account-create-update-x6tw5" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.715744 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.716135 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-db-create-rt2fh" podUID="1b7c6fb3-f30d-4dde-8502-e7d840719520" containerName="mariadb-database-create" containerID="cri-o://a986efaf418f6a3e2abd8d9f06b0fe8df99833bf6b1e6f9c131ce1990c94b418" gracePeriod=30 Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.718510 4558 scope.go:117] "RemoveContainer" containerID="01f067cab301938980d9e1ee9c68462c017ce912a5f70f653badf838dae20a53" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.719923 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/443f910c-dd2d-4c72-b861-f15de67ac6bb-kube-state-metrics-tls-config" (OuterVolumeSpecName: "kube-state-metrics-tls-config") pod "443f910c-dd2d-4c72-b861-f15de67ac6bb" (UID: "443f910c-dd2d-4c72-b861-f15de67ac6bb"). InnerVolumeSpecName "kube-state-metrics-tls-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.727238 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/73ed8a8e-b7b8-4d71-9a2b-d29c60ffd996-config-data" (OuterVolumeSpecName: "config-data") pod "73ed8a8e-b7b8-4d71-9a2b-d29c60ffd996" (UID: "73ed8a8e-b7b8-4d71-9a2b-d29c60ffd996"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.730836 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-64c1-account-create-update-wsq7z" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.737553 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-db-create-rztt2" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.765270 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/443f910c-dd2d-4c72-b861-f15de67ac6bb-kube-state-metrics-tls-certs" (OuterVolumeSpecName: "kube-state-metrics-tls-certs") pod "443f910c-dd2d-4c72-b861-f15de67ac6bb" (UID: "443f910c-dd2d-4c72-b861-f15de67ac6bb"). InnerVolumeSpecName "kube-state-metrics-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.772566 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2bee9ff9-52bb-4dae-971c-8c6236a4e563-combined-ca-bundle\") pod \"2bee9ff9-52bb-4dae-971c-8c6236a4e563\" (UID: \"2bee9ff9-52bb-4dae-971c-8c6236a4e563\") " Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.772684 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-prdr7\" (UniqueName: \"kubernetes.io/projected/f6a0777b-d04f-451b-9e3d-3f9ab0e19df8-kube-api-access-prdr7\") pod \"f6a0777b-d04f-451b-9e3d-3f9ab0e19df8\" (UID: \"f6a0777b-d04f-451b-9e3d-3f9ab0e19df8\") " Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.772726 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f6a0777b-d04f-451b-9e3d-3f9ab0e19df8-config-data-custom\") pod \"f6a0777b-d04f-451b-9e3d-3f9ab0e19df8\" (UID: \"f6a0777b-d04f-451b-9e3d-3f9ab0e19df8\") " Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.772757 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mp4v6\" (UniqueName: \"kubernetes.io/projected/b201974e-8bb6-412a-95d5-cce7a95e4528-kube-api-access-mp4v6\") pod \"b201974e-8bb6-412a-95d5-cce7a95e4528\" (UID: \"b201974e-8bb6-412a-95d5-cce7a95e4528\") " Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.772844 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f81a00ad-36ce-4383-9e91-fe60de6939d2-operator-scripts\") pod \"f81a00ad-36ce-4383-9e91-fe60de6939d2\" (UID: \"f81a00ad-36ce-4383-9e91-fe60de6939d2\") " Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.772894 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2bee9ff9-52bb-4dae-971c-8c6236a4e563-config-data\") pod \"2bee9ff9-52bb-4dae-971c-8c6236a4e563\" (UID: \"2bee9ff9-52bb-4dae-971c-8c6236a4e563\") " Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.772931 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/f81a00ad-36ce-4383-9e91-fe60de6939d2-kolla-config\") pod \"f81a00ad-36ce-4383-9e91-fe60de6939d2\" (UID: \"f81a00ad-36ce-4383-9e91-fe60de6939d2\") " Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.772971 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mysql-db\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"f81a00ad-36ce-4383-9e91-fe60de6939d2\" (UID: \"f81a00ad-36ce-4383-9e91-fe60de6939d2\") " Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.773048 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f6a0777b-d04f-451b-9e3d-3f9ab0e19df8-etc-machine-id\") pod \"f6a0777b-d04f-451b-9e3d-3f9ab0e19df8\" (UID: \"f6a0777b-d04f-451b-9e3d-3f9ab0e19df8\") " Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.773101 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2bee9ff9-52bb-4dae-971c-8c6236a4e563-run-httpd\") pod \"2bee9ff9-52bb-4dae-971c-8c6236a4e563\" (UID: \"2bee9ff9-52bb-4dae-971c-8c6236a4e563\") " Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.773132 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/2bee9ff9-52bb-4dae-971c-8c6236a4e563-etc-swift\") pod \"2bee9ff9-52bb-4dae-971c-8c6236a4e563\" (UID: \"2bee9ff9-52bb-4dae-971c-8c6236a4e563\") " Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.773157 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f6a0777b-d04f-451b-9e3d-3f9ab0e19df8-public-tls-certs\") pod \"f6a0777b-d04f-451b-9e3d-3f9ab0e19df8\" (UID: \"f6a0777b-d04f-451b-9e3d-3f9ab0e19df8\") " Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.773200 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b201974e-8bb6-412a-95d5-cce7a95e4528-operator-scripts\") pod \"b201974e-8bb6-412a-95d5-cce7a95e4528\" (UID: \"b201974e-8bb6-412a-95d5-cce7a95e4528\") " Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.773219 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/df28560c-92e6-4003-a08f-b8691fa43300-operator-scripts\") pod \"df28560c-92e6-4003-a08f-b8691fa43300\" (UID: \"df28560c-92e6-4003-a08f-b8691fa43300\") " Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.773235 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f6a0777b-d04f-451b-9e3d-3f9ab0e19df8-scripts\") pod \"f6a0777b-d04f-451b-9e3d-3f9ab0e19df8\" (UID: \"f6a0777b-d04f-451b-9e3d-3f9ab0e19df8\") " Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.773265 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2bee9ff9-52bb-4dae-971c-8c6236a4e563-log-httpd\") pod \"2bee9ff9-52bb-4dae-971c-8c6236a4e563\" (UID: \"2bee9ff9-52bb-4dae-971c-8c6236a4e563\") " Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.773284 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qz5rv\" (UniqueName: \"kubernetes.io/projected/df28560c-92e6-4003-a08f-b8691fa43300-kube-api-access-qz5rv\") pod \"df28560c-92e6-4003-a08f-b8691fa43300\" (UID: \"df28560c-92e6-4003-a08f-b8691fa43300\") " Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.773310 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f81a00ad-36ce-4383-9e91-fe60de6939d2-combined-ca-bundle\") pod \"f81a00ad-36ce-4383-9e91-fe60de6939d2\" (UID: \"f81a00ad-36ce-4383-9e91-fe60de6939d2\") " Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.773344 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f6a0777b-d04f-451b-9e3d-3f9ab0e19df8-internal-tls-certs\") pod \"f6a0777b-d04f-451b-9e3d-3f9ab0e19df8\" (UID: \"f6a0777b-d04f-451b-9e3d-3f9ab0e19df8\") " Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.773406 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/f81a00ad-36ce-4383-9e91-fe60de6939d2-galera-tls-certs\") pod \"f81a00ad-36ce-4383-9e91-fe60de6939d2\" (UID: \"f81a00ad-36ce-4383-9e91-fe60de6939d2\") " Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.773584 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hcqdx\" (UniqueName: \"kubernetes.io/projected/2bee9ff9-52bb-4dae-971c-8c6236a4e563-kube-api-access-hcqdx\") pod \"2bee9ff9-52bb-4dae-971c-8c6236a4e563\" (UID: \"2bee9ff9-52bb-4dae-971c-8c6236a4e563\") " Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.773649 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f6a0777b-d04f-451b-9e3d-3f9ab0e19df8-logs\") pod \"f6a0777b-d04f-451b-9e3d-3f9ab0e19df8\" (UID: \"f6a0777b-d04f-451b-9e3d-3f9ab0e19df8\") " Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.773674 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/f81a00ad-36ce-4383-9e91-fe60de6939d2-config-data-default\") pod \"f81a00ad-36ce-4383-9e91-fe60de6939d2\" (UID: \"f81a00ad-36ce-4383-9e91-fe60de6939d2\") " Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.773750 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f6a0777b-d04f-451b-9e3d-3f9ab0e19df8-combined-ca-bundle\") pod \"f6a0777b-d04f-451b-9e3d-3f9ab0e19df8\" (UID: \"f6a0777b-d04f-451b-9e3d-3f9ab0e19df8\") " Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.773811 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/f81a00ad-36ce-4383-9e91-fe60de6939d2-config-data-generated\") pod \"f81a00ad-36ce-4383-9e91-fe60de6939d2\" (UID: \"f81a00ad-36ce-4383-9e91-fe60de6939d2\") " Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.773843 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2bee9ff9-52bb-4dae-971c-8c6236a4e563-internal-tls-certs\") pod \"2bee9ff9-52bb-4dae-971c-8c6236a4e563\" (UID: \"2bee9ff9-52bb-4dae-971c-8c6236a4e563\") " Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.773880 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rlk6h\" (UniqueName: \"kubernetes.io/projected/f81a00ad-36ce-4383-9e91-fe60de6939d2-kube-api-access-rlk6h\") pod \"f81a00ad-36ce-4383-9e91-fe60de6939d2\" (UID: \"f81a00ad-36ce-4383-9e91-fe60de6939d2\") " Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.773904 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f6a0777b-d04f-451b-9e3d-3f9ab0e19df8-config-data\") pod \"f6a0777b-d04f-451b-9e3d-3f9ab0e19df8\" (UID: \"f6a0777b-d04f-451b-9e3d-3f9ab0e19df8\") " Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.773921 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2bee9ff9-52bb-4dae-971c-8c6236a4e563-public-tls-certs\") pod \"2bee9ff9-52bb-4dae-971c-8c6236a4e563\" (UID: \"2bee9ff9-52bb-4dae-971c-8c6236a4e563\") " Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.774416 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/012246ba-362c-48f3-bbb1-913d57f1f9ed-operator-scripts\") pod \"keystone-64c1-account-create-update-wsq7z\" (UID: \"012246ba-362c-48f3-bbb1-913d57f1f9ed\") " pod="openstack-kuttl-tests/keystone-64c1-account-create-update-wsq7z" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.774808 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xfskp\" (UniqueName: \"kubernetes.io/projected/012246ba-362c-48f3-bbb1-913d57f1f9ed-kube-api-access-xfskp\") pod \"keystone-64c1-account-create-update-wsq7z\" (UID: \"012246ba-362c-48f3-bbb1-913d57f1f9ed\") " pod="openstack-kuttl-tests/keystone-64c1-account-create-update-wsq7z" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.774939 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/443f910c-dd2d-4c72-b861-f15de67ac6bb-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.774958 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8lfjc\" (UniqueName: \"kubernetes.io/projected/7ce0109d-f162-4ce7-b957-34ba84e8e377-kube-api-access-8lfjc\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.774972 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xdmnr\" (UniqueName: \"kubernetes.io/projected/73ed8a8e-b7b8-4d71-9a2b-d29c60ffd996-kube-api-access-xdmnr\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.774983 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cp2rb\" (UniqueName: \"kubernetes.io/projected/443f910c-dd2d-4c72-b861-f15de67ac6bb-kube-api-access-cp2rb\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.774993 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/73ed8a8e-b7b8-4d71-9a2b-d29c60ffd996-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.775002 4558 reconciler_common.go:293] "Volume detached for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/443f910c-dd2d-4c72-b861-f15de67ac6bb-kube-state-metrics-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.775014 4558 reconciler_common.go:293] "Volume detached for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/443f910c-dd2d-4c72-b861-f15de67ac6bb-kube-state-metrics-tls-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.775024 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4430d3cb-0a1c-4266-9f69-cf8e817f1d3e-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.775555 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/df28560c-92e6-4003-a08f-b8691fa43300-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "df28560c-92e6-4003-a08f-b8691fa43300" (UID: "df28560c-92e6-4003-a08f-b8691fa43300"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.775681 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f81a00ad-36ce-4383-9e91-fe60de6939d2-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "f81a00ad-36ce-4383-9e91-fe60de6939d2" (UID: "f81a00ad-36ce-4383-9e91-fe60de6939d2"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:28:05 crc kubenswrapper[4558]: E0120 17:28:05.776232 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/openstack-scripts: configmap "openstack-scripts" not found Jan 20 17:28:05 crc kubenswrapper[4558]: E0120 17:28:05.776308 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/012246ba-362c-48f3-bbb1-913d57f1f9ed-operator-scripts podName:012246ba-362c-48f3-bbb1-913d57f1f9ed nodeName:}" failed. No retries permitted until 2026-01-20 17:28:07.776293005 +0000 UTC m=+2781.536630972 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/012246ba-362c-48f3-bbb1-913d57f1f9ed-operator-scripts") pod "keystone-64c1-account-create-update-wsq7z" (UID: "012246ba-362c-48f3-bbb1-913d57f1f9ed") : configmap "openstack-scripts" not found Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.777255 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/73ed8a8e-b7b8-4d71-9a2b-d29c60ffd996-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "73ed8a8e-b7b8-4d71-9a2b-d29c60ffd996" (UID: "73ed8a8e-b7b8-4d71-9a2b-d29c60ffd996"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.782942 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2bee9ff9-52bb-4dae-971c-8c6236a4e563-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "2bee9ff9-52bb-4dae-971c-8c6236a4e563" (UID: "2bee9ff9-52bb-4dae-971c-8c6236a4e563"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.783093 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f6a0777b-d04f-451b-9e3d-3f9ab0e19df8-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "f6a0777b-d04f-451b-9e3d-3f9ab0e19df8" (UID: "f6a0777b-d04f-451b-9e3d-3f9ab0e19df8"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.783428 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2bee9ff9-52bb-4dae-971c-8c6236a4e563-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "2bee9ff9-52bb-4dae-971c-8c6236a4e563" (UID: "2bee9ff9-52bb-4dae-971c-8c6236a4e563"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.783453 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/73ed8a8e-b7b8-4d71-9a2b-d29c60ffd996-nova-novncproxy-tls-certs" (OuterVolumeSpecName: "nova-novncproxy-tls-certs") pod "73ed8a8e-b7b8-4d71-9a2b-d29c60ffd996" (UID: "73ed8a8e-b7b8-4d71-9a2b-d29c60ffd996"). InnerVolumeSpecName "nova-novncproxy-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.783844 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f81a00ad-36ce-4383-9e91-fe60de6939d2-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "f81a00ad-36ce-4383-9e91-fe60de6939d2" (UID: "f81a00ad-36ce-4383-9e91-fe60de6939d2"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:28:05 crc kubenswrapper[4558]: E0120 17:28:05.784119 4558 projected.go:194] Error preparing data for projected volume kube-api-access-xfskp for pod openstack-kuttl-tests/keystone-64c1-account-create-update-wsq7z: failed to fetch token: serviceaccounts "galera-openstack" not found Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.784965 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b201974e-8bb6-412a-95d5-cce7a95e4528-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "b201974e-8bb6-412a-95d5-cce7a95e4528" (UID: "b201974e-8bb6-412a-95d5-cce7a95e4528"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.785904 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f81a00ad-36ce-4383-9e91-fe60de6939d2-config-data-default" (OuterVolumeSpecName: "config-data-default") pod "f81a00ad-36ce-4383-9e91-fe60de6939d2" (UID: "f81a00ad-36ce-4383-9e91-fe60de6939d2"). InnerVolumeSpecName "config-data-default". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.786333 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f6a0777b-d04f-451b-9e3d-3f9ab0e19df8-logs" (OuterVolumeSpecName: "logs") pod "f6a0777b-d04f-451b-9e3d-3f9ab0e19df8" (UID: "f6a0777b-d04f-451b-9e3d-3f9ab0e19df8"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.790377 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f81a00ad-36ce-4383-9e91-fe60de6939d2-config-data-generated" (OuterVolumeSpecName: "config-data-generated") pod "f81a00ad-36ce-4383-9e91-fe60de6939d2" (UID: "f81a00ad-36ce-4383-9e91-fe60de6939d2"). InnerVolumeSpecName "config-data-generated". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.794246 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/73ed8a8e-b7b8-4d71-9a2b-d29c60ffd996-vencrypt-tls-certs" (OuterVolumeSpecName: "vencrypt-tls-certs") pod "73ed8a8e-b7b8-4d71-9a2b-d29c60ffd996" (UID: "73ed8a8e-b7b8-4d71-9a2b-d29c60ffd996"). InnerVolumeSpecName "vencrypt-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.794408 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2bee9ff9-52bb-4dae-971c-8c6236a4e563-kube-api-access-hcqdx" (OuterVolumeSpecName: "kube-api-access-hcqdx") pod "2bee9ff9-52bb-4dae-971c-8c6236a4e563" (UID: "2bee9ff9-52bb-4dae-971c-8c6236a4e563"). InnerVolumeSpecName "kube-api-access-hcqdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:28:05 crc kubenswrapper[4558]: E0120 17:28:05.801343 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/012246ba-362c-48f3-bbb1-913d57f1f9ed-kube-api-access-xfskp podName:012246ba-362c-48f3-bbb1-913d57f1f9ed nodeName:}" failed. No retries permitted until 2026-01-20 17:28:07.801311371 +0000 UTC m=+2781.561649338 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-xfskp" (UniqueName: "kubernetes.io/projected/012246ba-362c-48f3-bbb1-913d57f1f9ed-kube-api-access-xfskp") pod "keystone-64c1-account-create-update-wsq7z" (UID: "012246ba-362c-48f3-bbb1-913d57f1f9ed") : failed to fetch token: serviceaccounts "galera-openstack" not found Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.801700 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell0-f949-account-create-update-v2zng"] Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.814354 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f6a0777b-d04f-451b-9e3d-3f9ab0e19df8-scripts" (OuterVolumeSpecName: "scripts") pod "f6a0777b-d04f-451b-9e3d-3f9ab0e19df8" (UID: "f6a0777b-d04f-451b-9e3d-3f9ab0e19df8"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.814383 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f6a0777b-d04f-451b-9e3d-3f9ab0e19df8-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "f6a0777b-d04f-451b-9e3d-3f9ab0e19df8" (UID: "f6a0777b-d04f-451b-9e3d-3f9ab0e19df8"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.814429 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell0-f949-account-create-update-v2zng"] Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.815281 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2bee9ff9-52bb-4dae-971c-8c6236a4e563-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "2bee9ff9-52bb-4dae-971c-8c6236a4e563" (UID: "2bee9ff9-52bb-4dae-971c-8c6236a4e563"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.816549 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f81a00ad-36ce-4383-9e91-fe60de6939d2-kube-api-access-rlk6h" (OuterVolumeSpecName: "kube-api-access-rlk6h") pod "f81a00ad-36ce-4383-9e91-fe60de6939d2" (UID: "f81a00ad-36ce-4383-9e91-fe60de6939d2"). InnerVolumeSpecName "kube-api-access-rlk6h". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.816582 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f6a0777b-d04f-451b-9e3d-3f9ab0e19df8-kube-api-access-prdr7" (OuterVolumeSpecName: "kube-api-access-prdr7") pod "f6a0777b-d04f-451b-9e3d-3f9ab0e19df8" (UID: "f6a0777b-d04f-451b-9e3d-3f9ab0e19df8"). InnerVolumeSpecName "kube-api-access-prdr7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.816615 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/df28560c-92e6-4003-a08f-b8691fa43300-kube-api-access-qz5rv" (OuterVolumeSpecName: "kube-api-access-qz5rv") pod "df28560c-92e6-4003-a08f-b8691fa43300" (UID: "df28560c-92e6-4003-a08f-b8691fa43300"). InnerVolumeSpecName "kube-api-access-qz5rv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.816709 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b201974e-8bb6-412a-95d5-cce7a95e4528-kube-api-access-mp4v6" (OuterVolumeSpecName: "kube-api-access-mp4v6") pod "b201974e-8bb6-412a-95d5-cce7a95e4528" (UID: "b201974e-8bb6-412a-95d5-cce7a95e4528"). InnerVolumeSpecName "kube-api-access-mp4v6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.816867 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/memcached-0" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.848732 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.850650 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f81a00ad-36ce-4383-9e91-fe60de6939d2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f81a00ad-36ce-4383-9e91-fe60de6939d2" (UID: "f81a00ad-36ce-4383-9e91-fe60de6939d2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.853381 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.853957 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage10-crc" (OuterVolumeSpecName: "mysql-db") pod "f81a00ad-36ce-4383-9e91-fe60de6939d2" (UID: "f81a00ad-36ce-4383-9e91-fe60de6939d2"). InnerVolumeSpecName "local-storage10-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.859575 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2bee9ff9-52bb-4dae-971c-8c6236a4e563-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "2bee9ff9-52bb-4dae-971c-8c6236a4e563" (UID: "2bee9ff9-52bb-4dae-971c-8c6236a4e563"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.866137 4558 scope.go:117] "RemoveContainer" containerID="fccdd43e49b8880beb3f4193e0e2a09b0c9bcb297f472c9a13a4fc7709889c73" Jan 20 17:28:05 crc kubenswrapper[4558]: E0120 17:28:05.867794 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fccdd43e49b8880beb3f4193e0e2a09b0c9bcb297f472c9a13a4fc7709889c73\": container with ID starting with fccdd43e49b8880beb3f4193e0e2a09b0c9bcb297f472c9a13a4fc7709889c73 not found: ID does not exist" containerID="fccdd43e49b8880beb3f4193e0e2a09b0c9bcb297f472c9a13a4fc7709889c73" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.867851 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fccdd43e49b8880beb3f4193e0e2a09b0c9bcb297f472c9a13a4fc7709889c73"} err="failed to get container status \"fccdd43e49b8880beb3f4193e0e2a09b0c9bcb297f472c9a13a4fc7709889c73\": rpc error: code = NotFound desc = could not find container \"fccdd43e49b8880beb3f4193e0e2a09b0c9bcb297f472c9a13a4fc7709889c73\": container with ID starting with fccdd43e49b8880beb3f4193e0e2a09b0c9bcb297f472c9a13a4fc7709889c73 not found: ID does not exist" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.868201 4558 scope.go:117] "RemoveContainer" containerID="01f067cab301938980d9e1ee9c68462c017ce912a5f70f653badf838dae20a53" Jan 20 17:28:05 crc kubenswrapper[4558]: E0120 17:28:05.870021 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"01f067cab301938980d9e1ee9c68462c017ce912a5f70f653badf838dae20a53\": container with ID starting with 01f067cab301938980d9e1ee9c68462c017ce912a5f70f653badf838dae20a53 not found: ID does not exist" containerID="01f067cab301938980d9e1ee9c68462c017ce912a5f70f653badf838dae20a53" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.870068 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"01f067cab301938980d9e1ee9c68462c017ce912a5f70f653badf838dae20a53"} err="failed to get container status \"01f067cab301938980d9e1ee9c68462c017ce912a5f70f653badf838dae20a53\": rpc error: code = NotFound desc = could not find container \"01f067cab301938980d9e1ee9c68462c017ce912a5f70f653badf838dae20a53\": container with ID starting with 01f067cab301938980d9e1ee9c68462c017ce912a5f70f653badf838dae20a53 not found: ID does not exist" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.870092 4558 scope.go:117] "RemoveContainer" containerID="fc05c1e78ad7a92e2b78d674adb5592b41e556323420c6070785f62676598f12" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.875918 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3460a75e-3553-47b9-bfc5-39c2c459826e-combined-ca-bundle\") pod \"3460a75e-3553-47b9-bfc5-39c2c459826e\" (UID: \"3460a75e-3553-47b9-bfc5-39c2c459826e\") " Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.876010 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qszsc\" (UniqueName: \"kubernetes.io/projected/3460a75e-3553-47b9-bfc5-39c2c459826e-kube-api-access-qszsc\") pod \"3460a75e-3553-47b9-bfc5-39c2c459826e\" (UID: \"3460a75e-3553-47b9-bfc5-39c2c459826e\") " Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.876142 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/3460a75e-3553-47b9-bfc5-39c2c459826e-memcached-tls-certs\") pod \"3460a75e-3553-47b9-bfc5-39c2c459826e\" (UID: \"3460a75e-3553-47b9-bfc5-39c2c459826e\") " Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.876185 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/3460a75e-3553-47b9-bfc5-39c2c459826e-kolla-config\") pod \"3460a75e-3553-47b9-bfc5-39c2c459826e\" (UID: \"3460a75e-3553-47b9-bfc5-39c2c459826e\") " Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.876250 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/3460a75e-3553-47b9-bfc5-39c2c459826e-config-data\") pod \"3460a75e-3553-47b9-bfc5-39c2c459826e\" (UID: \"3460a75e-3553-47b9-bfc5-39c2c459826e\") " Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.876733 4558 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/2bee9ff9-52bb-4dae-971c-8c6236a4e563-etc-swift\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.876758 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/df28560c-92e6-4003-a08f-b8691fa43300-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.876784 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f6a0777b-d04f-451b-9e3d-3f9ab0e19df8-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.876794 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b201974e-8bb6-412a-95d5-cce7a95e4528-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.876805 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/73ed8a8e-b7b8-4d71-9a2b-d29c60ffd996-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.876816 4558 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2bee9ff9-52bb-4dae-971c-8c6236a4e563-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.876826 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qz5rv\" (UniqueName: \"kubernetes.io/projected/df28560c-92e6-4003-a08f-b8691fa43300-kube-api-access-qz5rv\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.876835 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f81a00ad-36ce-4383-9e91-fe60de6939d2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.876846 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hcqdx\" (UniqueName: \"kubernetes.io/projected/2bee9ff9-52bb-4dae-971c-8c6236a4e563-kube-api-access-hcqdx\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.876855 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f6a0777b-d04f-451b-9e3d-3f9ab0e19df8-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.876864 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/f81a00ad-36ce-4383-9e91-fe60de6939d2-config-data-default\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.876873 4558 reconciler_common.go:293] "Volume detached for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/73ed8a8e-b7b8-4d71-9a2b-d29c60ffd996-nova-novncproxy-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.876882 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/f81a00ad-36ce-4383-9e91-fe60de6939d2-config-data-generated\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.876894 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rlk6h\" (UniqueName: \"kubernetes.io/projected/f81a00ad-36ce-4383-9e91-fe60de6939d2-kube-api-access-rlk6h\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.876905 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2bee9ff9-52bb-4dae-971c-8c6236a4e563-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.876915 4558 reconciler_common.go:293] "Volume detached for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/73ed8a8e-b7b8-4d71-9a2b-d29c60ffd996-vencrypt-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.876926 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-prdr7\" (UniqueName: \"kubernetes.io/projected/f6a0777b-d04f-451b-9e3d-3f9ab0e19df8-kube-api-access-prdr7\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.876936 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f6a0777b-d04f-451b-9e3d-3f9ab0e19df8-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.876947 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mp4v6\" (UniqueName: \"kubernetes.io/projected/b201974e-8bb6-412a-95d5-cce7a95e4528-kube-api-access-mp4v6\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.876956 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f81a00ad-36ce-4383-9e91-fe60de6939d2-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.876965 4558 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/f81a00ad-36ce-4383-9e91-fe60de6939d2-kolla-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.876988 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" " Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.876997 4558 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f6a0777b-d04f-451b-9e3d-3f9ab0e19df8-etc-machine-id\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.877008 4558 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2bee9ff9-52bb-4dae-971c-8c6236a4e563-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:05 crc kubenswrapper[4558]: E0120 17:28:05.880414 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/openstack-scripts: configmap "openstack-scripts" not found Jan 20 17:28:05 crc kubenswrapper[4558]: E0120 17:28:05.880483 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/54457478-e4f4-4088-bd18-e427314e1fb2-operator-scripts podName:54457478-e4f4-4088-bd18-e427314e1fb2 nodeName:}" failed. No retries permitted until 2026-01-20 17:28:07.880463503 +0000 UTC m=+2781.640801470 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/54457478-e4f4-4088-bd18-e427314e1fb2-operator-scripts") pod "barbican-e949-account-create-update-qd847" (UID: "54457478-e4f4-4088-bd18-e427314e1fb2") : configmap "openstack-scripts" not found Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.880731 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3460a75e-3553-47b9-bfc5-39c2c459826e-config-data" (OuterVolumeSpecName: "config-data") pod "3460a75e-3553-47b9-bfc5-39c2c459826e" (UID: "3460a75e-3553-47b9-bfc5-39c2c459826e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.881696 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3460a75e-3553-47b9-bfc5-39c2c459826e-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "3460a75e-3553-47b9-bfc5-39c2c459826e" (UID: "3460a75e-3553-47b9-bfc5-39c2c459826e"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.884557 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3460a75e-3553-47b9-bfc5-39c2c459826e-kube-api-access-qszsc" (OuterVolumeSpecName: "kube-api-access-qszsc") pod "3460a75e-3553-47b9-bfc5-39c2c459826e" (UID: "3460a75e-3553-47b9-bfc5-39c2c459826e"). InnerVolumeSpecName "kube-api-access-qszsc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.895911 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f6a0777b-d04f-451b-9e3d-3f9ab0e19df8-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "f6a0777b-d04f-451b-9e3d-3f9ab0e19df8" (UID: "f6a0777b-d04f-451b-9e3d-3f9ab0e19df8"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.899716 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f6a0777b-d04f-451b-9e3d-3f9ab0e19df8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f6a0777b-d04f-451b-9e3d-3f9ab0e19df8" (UID: "f6a0777b-d04f-451b-9e3d-3f9ab0e19df8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.923675 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage10-crc" (UniqueName: "kubernetes.io/local-volume/local-storage10-crc") on node "crc" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.929390 4558 scope.go:117] "RemoveContainer" containerID="ddfb78eb686b10db6949511eb53f1d12111dd9665adfb113b40c64dcc8f80745" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.929695 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f6a0777b-d04f-451b-9e3d-3f9ab0e19df8-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "f6a0777b-d04f-451b-9e3d-3f9ab0e19df8" (UID: "f6a0777b-d04f-451b-9e3d-3f9ab0e19df8"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.930838 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2bee9ff9-52bb-4dae-971c-8c6236a4e563-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2bee9ff9-52bb-4dae-971c-8c6236a4e563" (UID: "2bee9ff9-52bb-4dae-971c-8c6236a4e563"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.939226 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f81a00ad-36ce-4383-9e91-fe60de6939d2-galera-tls-certs" (OuterVolumeSpecName: "galera-tls-certs") pod "f81a00ad-36ce-4383-9e91-fe60de6939d2" (UID: "f81a00ad-36ce-4383-9e91-fe60de6939d2"). InnerVolumeSpecName "galera-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.943351 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3460a75e-3553-47b9-bfc5-39c2c459826e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3460a75e-3553-47b9-bfc5-39c2c459826e" (UID: "3460a75e-3553-47b9-bfc5-39c2c459826e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.944880 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f6a0777b-d04f-451b-9e3d-3f9ab0e19df8-config-data" (OuterVolumeSpecName: "config-data") pod "f6a0777b-d04f-451b-9e3d-3f9ab0e19df8" (UID: "f6a0777b-d04f-451b-9e3d-3f9ab0e19df8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.954628 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2bee9ff9-52bb-4dae-971c-8c6236a4e563-config-data" (OuterVolumeSpecName: "config-data") pod "2bee9ff9-52bb-4dae-971c-8c6236a4e563" (UID: "2bee9ff9-52bb-4dae-971c-8c6236a4e563"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.954851 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2bee9ff9-52bb-4dae-971c-8c6236a4e563-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "2bee9ff9-52bb-4dae-971c-8c6236a4e563" (UID: "2bee9ff9-52bb-4dae-971c-8c6236a4e563"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.981354 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tp4lp\" (UniqueName: \"kubernetes.io/projected/c67fda0a-394c-424a-ad72-f0e3ebd77f1b-kube-api-access-tp4lp\") pod \"keystone-db-create-rztt2\" (UID: \"c67fda0a-394c-424a-ad72-f0e3ebd77f1b\") " pod="openstack-kuttl-tests/keystone-db-create-rztt2" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.981925 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c67fda0a-394c-424a-ad72-f0e3ebd77f1b-operator-scripts\") pod \"keystone-db-create-rztt2\" (UID: \"c67fda0a-394c-424a-ad72-f0e3ebd77f1b\") " pod="openstack-kuttl-tests/keystone-db-create-rztt2" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.982357 4558 reconciler_common.go:293] "Volume detached for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/f81a00ad-36ce-4383-9e91-fe60de6939d2-galera-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.982373 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f6a0777b-d04f-451b-9e3d-3f9ab0e19df8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.982384 4558 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/3460a75e-3553-47b9-bfc5-39c2c459826e-kolla-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.982393 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2bee9ff9-52bb-4dae-971c-8c6236a4e563-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.982401 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f6a0777b-d04f-451b-9e3d-3f9ab0e19df8-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.982409 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2bee9ff9-52bb-4dae-971c-8c6236a4e563-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.982417 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/3460a75e-3553-47b9-bfc5-39c2c459826e-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.982427 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3460a75e-3553-47b9-bfc5-39c2c459826e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.982435 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2bee9ff9-52bb-4dae-971c-8c6236a4e563-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.982443 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.982453 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qszsc\" (UniqueName: \"kubernetes.io/projected/3460a75e-3553-47b9-bfc5-39c2c459826e-kube-api-access-qszsc\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.982464 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f6a0777b-d04f-451b-9e3d-3f9ab0e19df8-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:05 crc kubenswrapper[4558]: I0120 17:28:05.982473 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f6a0777b-d04f-451b-9e3d-3f9ab0e19df8-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:05 crc kubenswrapper[4558]: E0120 17:28:05.982560 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/openstack-scripts: configmap "openstack-scripts" not found Jan 20 17:28:05 crc kubenswrapper[4558]: E0120 17:28:05.982602 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/c67fda0a-394c-424a-ad72-f0e3ebd77f1b-operator-scripts podName:c67fda0a-394c-424a-ad72-f0e3ebd77f1b nodeName:}" failed. No retries permitted until 2026-01-20 17:28:07.982588506 +0000 UTC m=+2781.742926473 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/c67fda0a-394c-424a-ad72-f0e3ebd77f1b-operator-scripts") pod "keystone-db-create-rztt2" (UID: "c67fda0a-394c-424a-ad72-f0e3ebd77f1b") : configmap "openstack-scripts" not found Jan 20 17:28:05 crc kubenswrapper[4558]: E0120 17:28:05.985105 4558 projected.go:194] Error preparing data for projected volume kube-api-access-tp4lp for pod openstack-kuttl-tests/keystone-db-create-rztt2: failed to fetch token: serviceaccounts "galera-openstack" not found Jan 20 17:28:05 crc kubenswrapper[4558]: E0120 17:28:05.985149 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/c67fda0a-394c-424a-ad72-f0e3ebd77f1b-kube-api-access-tp4lp podName:c67fda0a-394c-424a-ad72-f0e3ebd77f1b nodeName:}" failed. No retries permitted until 2026-01-20 17:28:07.985136076 +0000 UTC m=+2781.745474042 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-tp4lp" (UniqueName: "kubernetes.io/projected/c67fda0a-394c-424a-ad72-f0e3ebd77f1b-kube-api-access-tp4lp") pod "keystone-db-create-rztt2" (UID: "c67fda0a-394c-424a-ad72-f0e3ebd77f1b") : failed to fetch token: serviceaccounts "galera-openstack" not found Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:05.999364 4558 scope.go:117] "RemoveContainer" containerID="ca2fe5f683448e1b2e71f1f58bff793b5e9a3b0d6f6f324c37f3cdcce1707435" Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.028505 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-a772-account-create-update-27cpc"] Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.028987 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-api-a772-account-create-update-27cpc"] Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.036222 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.043503 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.051183 4558 scope.go:117] "RemoveContainer" containerID="f2931b38008bccdd5209b99b03a91154a5b6e19f660a655b7d1fde4d2990be72" Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.052285 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-af2b-account-create-update-x6tw5"] Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.054096 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3460a75e-3553-47b9-bfc5-39c2c459826e-memcached-tls-certs" (OuterVolumeSpecName: "memcached-tls-certs") pod "3460a75e-3553-47b9-bfc5-39c2c459826e" (UID: "3460a75e-3553-47b9-bfc5-39c2c459826e"). InnerVolumeSpecName "memcached-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.058914 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell1-af2b-account-create-update-x6tw5"] Jan 20 17:28:06 crc kubenswrapper[4558]: E0120 17:28:06.085026 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/openstack-scripts: configmap "openstack-scripts" not found Jan 20 17:28:06 crc kubenswrapper[4558]: E0120 17:28:06.085106 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/swift-proxy-config-data: secret "swift-proxy-config-data" not found Jan 20 17:28:06 crc kubenswrapper[4558]: E0120 17:28:06.085118 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5f7a0ad9-436f-433c-9a91-cec4ffd3beeb-operator-scripts podName:5f7a0ad9-436f-433c-9a91-cec4ffd3beeb nodeName:}" failed. No retries permitted until 2026-01-20 17:28:08.085095407 +0000 UTC m=+2781.845433373 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/5f7a0ad9-436f-433c-9a91-cec4ffd3beeb-operator-scripts") pod "glance-65cd-account-create-update-grtts" (UID: "5f7a0ad9-436f-433c-9a91-cec4ffd3beeb") : configmap "openstack-scripts" not found Jan 20 17:28:06 crc kubenswrapper[4558]: E0120 17:28:06.085200 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-swift-internal-svc: secret "cert-swift-internal-svc" not found Jan 20 17:28:06 crc kubenswrapper[4558]: E0120 17:28:06.085255 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5-internal-tls-certs podName:4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5 nodeName:}" failed. No retries permitted until 2026-01-20 17:28:08.085240049 +0000 UTC m=+2781.845578016 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "internal-tls-certs" (UniqueName: "kubernetes.io/secret/4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5-internal-tls-certs") pod "swift-proxy-5f7fb7b-s2dxn" (UID: "4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5") : secret "cert-swift-internal-svc" not found Jan 20 17:28:06 crc kubenswrapper[4558]: E0120 17:28:06.085297 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/combined-ca-bundle: secret "combined-ca-bundle" not found Jan 20 17:28:06 crc kubenswrapper[4558]: E0120 17:28:06.085329 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5-combined-ca-bundle podName:4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5 nodeName:}" failed. No retries permitted until 2026-01-20 17:28:08.08531552 +0000 UTC m=+2781.845653487 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5-combined-ca-bundle") pod "swift-proxy-5f7fb7b-s2dxn" (UID: "4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5") : secret "combined-ca-bundle" not found Jan 20 17:28:06 crc kubenswrapper[4558]: E0120 17:28:06.085359 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-swift-public-svc: secret "cert-swift-public-svc" not found Jan 20 17:28:06 crc kubenswrapper[4558]: E0120 17:28:06.085371 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5-config-data podName:4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5 nodeName:}" failed. No retries permitted until 2026-01-20 17:28:08.085364663 +0000 UTC m=+2781.845702630 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/secret/4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5-config-data") pod "swift-proxy-5f7fb7b-s2dxn" (UID: "4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5") : secret "swift-proxy-config-data" not found Jan 20 17:28:06 crc kubenswrapper[4558]: E0120 17:28:06.085386 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5-public-tls-certs podName:4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5 nodeName:}" failed. No retries permitted until 2026-01-20 17:28:08.085380563 +0000 UTC m=+2781.845718530 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "public-tls-certs" (UniqueName: "kubernetes.io/secret/4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5-public-tls-certs") pod "swift-proxy-5f7fb7b-s2dxn" (UID: "4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5") : secret "cert-swift-public-svc" not found Jan 20 17:28:06 crc kubenswrapper[4558]: E0120 17:28:06.085398 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/openstack-scripts: configmap "openstack-scripts" not found Jan 20 17:28:06 crc kubenswrapper[4558]: E0120 17:28:06.085419 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/1b7c6fb3-f30d-4dde-8502-e7d840719520-operator-scripts podName:1b7c6fb3-f30d-4dde-8502-e7d840719520 nodeName:}" failed. No retries permitted until 2026-01-20 17:28:08.08541089 +0000 UTC m=+2781.845748857 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/1b7c6fb3-f30d-4dde-8502-e7d840719520-operator-scripts") pod "glance-db-create-rt2fh" (UID: "1b7c6fb3-f30d-4dde-8502-e7d840719520") : configmap "openstack-scripts" not found Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.085359 4558 reconciler_common.go:293] "Volume detached for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/3460a75e-3553-47b9-bfc5-39c2c459826e-memcached-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:06 crc kubenswrapper[4558]: E0120 17:28:06.085431 4558 projected.go:263] Couldn't get secret openstack-kuttl-tests/swift-proxy-config-data: secret "swift-proxy-config-data" not found Jan 20 17:28:06 crc kubenswrapper[4558]: E0120 17:28:06.085441 4558 projected.go:263] Couldn't get secret openstack-kuttl-tests/swift-conf: secret "swift-conf" not found Jan 20 17:28:06 crc kubenswrapper[4558]: E0120 17:28:06.085450 4558 projected.go:288] Couldn't get configMap openstack-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Jan 20 17:28:06 crc kubenswrapper[4558]: E0120 17:28:06.085461 4558 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack-kuttl-tests/swift-proxy-5f7fb7b-s2dxn: [secret "swift-proxy-config-data" not found, secret "swift-conf" not found, configmap "swift-ring-files" not found] Jan 20 17:28:06 crc kubenswrapper[4558]: E0120 17:28:06.085487 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5-etc-swift podName:4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5 nodeName:}" failed. No retries permitted until 2026-01-20 17:28:08.085481453 +0000 UTC m=+2781.845819419 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5-etc-swift") pod "swift-proxy-5f7fb7b-s2dxn" (UID: "4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5") : [secret "swift-proxy-config-data" not found, secret "swift-conf" not found, configmap "swift-ring-files" not found] Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.122500 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-worker-74d54f756c-g9zzk"] Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.135901 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/barbican-worker-74d54f756c-g9zzk"] Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.157638 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-keystone-listener-8f4cc76c4-9qkpn"] Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.164218 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/barbican-keystone-listener-8f4cc76c4-9qkpn"] Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.168992 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.177018 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.184284 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-43e3-account-create-update-fdlrr"] Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.187663 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b69d0136-9748-45bf-a9ed-d23e0180f1b4-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.187692 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4j88m\" (UniqueName: \"kubernetes.io/projected/b69d0136-9748-45bf-a9ed-d23e0180f1b4-kube-api-access-4j88m\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.188526 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/cinder-43e3-account-create-update-fdlrr"] Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.238388 4558 scope.go:117] "RemoveContainer" containerID="f2931b38008bccdd5209b99b03a91154a5b6e19f660a655b7d1fde4d2990be72" Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.240887 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-dmtbg" Jan 20 17:28:06 crc kubenswrapper[4558]: E0120 17:28:06.240995 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f2931b38008bccdd5209b99b03a91154a5b6e19f660a655b7d1fde4d2990be72\": container with ID starting with f2931b38008bccdd5209b99b03a91154a5b6e19f660a655b7d1fde4d2990be72 not found: ID does not exist" containerID="f2931b38008bccdd5209b99b03a91154a5b6e19f660a655b7d1fde4d2990be72" Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.241022 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f2931b38008bccdd5209b99b03a91154a5b6e19f660a655b7d1fde4d2990be72"} err="failed to get container status \"f2931b38008bccdd5209b99b03a91154a5b6e19f660a655b7d1fde4d2990be72\": rpc error: code = NotFound desc = could not find container \"f2931b38008bccdd5209b99b03a91154a5b6e19f660a655b7d1fde4d2990be72\": container with ID starting with f2931b38008bccdd5209b99b03a91154a5b6e19f660a655b7d1fde4d2990be72 not found: ID does not exist" Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.241042 4558 scope.go:117] "RemoveContainer" containerID="405eba50b6723c4bbfd326989331668cadd5d5aa3aa6d3f082fbf7d791dda0f0" Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.253022 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/placement-db-create-j7682"] Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.275553 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/placement-db-create-j7682"] Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.294506 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9b28c41-c054-49f3-87d2-a8c15e6124de-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.294551 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fa57dfc3-11b4-48af-a78c-00463e8894bf-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.396269 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-28p8x\" (UniqueName: \"kubernetes.io/projected/c2bfe108-b710-400b-baac-55815b192ee3-kube-api-access-28p8x\") pod \"c2bfe108-b710-400b-baac-55815b192ee3\" (UID: \"c2bfe108-b710-400b-baac-55815b192ee3\") " Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.396315 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c2bfe108-b710-400b-baac-55815b192ee3-operator-scripts\") pod \"c2bfe108-b710-400b-baac-55815b192ee3\" (UID: \"c2bfe108-b710-400b-baac-55815b192ee3\") " Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.397802 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c2bfe108-b710-400b-baac-55815b192ee3-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "c2bfe108-b710-400b-baac-55815b192ee3" (UID: "c2bfe108-b710-400b-baac-55815b192ee3"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.398628 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c2bfe108-b710-400b-baac-55815b192ee3-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.418047 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c2bfe108-b710-400b-baac-55815b192ee3-kube-api-access-28p8x" (OuterVolumeSpecName: "kube-api-access-28p8x") pod "c2bfe108-b710-400b-baac-55815b192ee3" (UID: "c2bfe108-b710-400b-baac-55815b192ee3"). InnerVolumeSpecName "kube-api-access-28p8x". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.500823 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-28p8x\" (UniqueName: \"kubernetes.io/projected/c2bfe108-b710-400b-baac-55815b192ee3-kube-api-access-28p8x\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:06 crc kubenswrapper[4558]: E0120 17:28:06.570470 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="6d33b2d5d8fe60d82930b4864fabfe53d6a679fc712a4ba67d60a4da40feed96" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:28:06 crc kubenswrapper[4558]: E0120 17:28:06.571804 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="6d33b2d5d8fe60d82930b4864fabfe53d6a679fc712a4ba67d60a4da40feed96" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.572572 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-e949-account-create-update-qd847" Jan 20 17:28:06 crc kubenswrapper[4558]: E0120 17:28:06.573706 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="6d33b2d5d8fe60d82930b4864fabfe53d6a679fc712a4ba67d60a4da40feed96" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:28:06 crc kubenswrapper[4558]: E0120 17:28:06.573748 4558 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack-kuttl-tests/nova-cell0-conductor-0" podUID="3973df1a-dd5a-417e-80a9-6653fb036470" containerName="nova-cell0-conductor-conductor" Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.584060 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ac6a4e2-150b-4137-b547-33cbdd1137b7" path="/var/lib/kubelet/pods/3ac6a4e2-150b-4137-b547-33cbdd1137b7/volumes" Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.584108 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-65cd-account-create-update-grtts" Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.585068 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4430d3cb-0a1c-4266-9f69-cf8e817f1d3e" path="/var/lib/kubelet/pods/4430d3cb-0a1c-4266-9f69-cf8e817f1d3e/volumes" Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.588123 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="73ed8a8e-b7b8-4d71-9a2b-d29c60ffd996" path="/var/lib/kubelet/pods/73ed8a8e-b7b8-4d71-9a2b-d29c60ffd996/volumes" Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.588944 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7b6a0803-befe-4426-9e2a-a04ac12f2d7c" path="/var/lib/kubelet/pods/7b6a0803-befe-4426-9e2a-a04ac12f2d7c/volumes" Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.589508 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7ce0109d-f162-4ce7-b957-34ba84e8e377" path="/var/lib/kubelet/pods/7ce0109d-f162-4ce7-b957-34ba84e8e377/volumes" Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.590485 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a2068ccb-0c0d-4b32-9063-082a4c395070" path="/var/lib/kubelet/pods/a2068ccb-0c0d-4b32-9063-082a4c395070/volumes" Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.590930 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b69d0136-9748-45bf-a9ed-d23e0180f1b4" path="/var/lib/kubelet/pods/b69d0136-9748-45bf-a9ed-d23e0180f1b4/volumes" Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.591479 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b9b28c41-c054-49f3-87d2-a8c15e6124de" path="/var/lib/kubelet/pods/b9b28c41-c054-49f3-87d2-a8c15e6124de/volumes" Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.591851 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cc4089c6-71ea-4503-b54c-18777fcc3c48" path="/var/lib/kubelet/pods/cc4089c6-71ea-4503-b54c-18777fcc3c48/volumes" Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.592854 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="df28560c-92e6-4003-a08f-b8691fa43300" path="/var/lib/kubelet/pods/df28560c-92e6-4003-a08f-b8691fa43300/volumes" Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.594233 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e0d8717e-1693-4222-aebb-024d291cb0d5" path="/var/lib/kubelet/pods/e0d8717e-1693-4222-aebb-024d291cb0d5/volumes" Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.594642 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e383e708-a471-4904-bfc7-eead6a5c76dc" path="/var/lib/kubelet/pods/e383e708-a471-4904-bfc7-eead6a5c76dc/volumes" Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.595733 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fa57dfc3-11b4-48af-a78c-00463e8894bf" path="/var/lib/kubelet/pods/fa57dfc3-11b4-48af-a78c-00463e8894bf/volumes" Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.633546 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-db-create-2kmst" Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.707652 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zz9r4\" (UniqueName: \"kubernetes.io/projected/54457478-e4f4-4088-bd18-e427314e1fb2-kube-api-access-zz9r4\") pod \"54457478-e4f4-4088-bd18-e427314e1fb2\" (UID: \"54457478-e4f4-4088-bd18-e427314e1fb2\") " Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.707749 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/54457478-e4f4-4088-bd18-e427314e1fb2-operator-scripts\") pod \"54457478-e4f4-4088-bd18-e427314e1fb2\" (UID: \"54457478-e4f4-4088-bd18-e427314e1fb2\") " Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.707868 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kbpl7\" (UniqueName: \"kubernetes.io/projected/5f7a0ad9-436f-433c-9a91-cec4ffd3beeb-kube-api-access-kbpl7\") pod \"5f7a0ad9-436f-433c-9a91-cec4ffd3beeb\" (UID: \"5f7a0ad9-436f-433c-9a91-cec4ffd3beeb\") " Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.707971 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-95lbq\" (UniqueName: \"kubernetes.io/projected/bfb2b456-ae0e-42ca-a227-428a626f1e3e-kube-api-access-95lbq\") pod \"bfb2b456-ae0e-42ca-a227-428a626f1e3e\" (UID: \"bfb2b456-ae0e-42ca-a227-428a626f1e3e\") " Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.708023 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bfb2b456-ae0e-42ca-a227-428a626f1e3e-operator-scripts\") pod \"bfb2b456-ae0e-42ca-a227-428a626f1e3e\" (UID: \"bfb2b456-ae0e-42ca-a227-428a626f1e3e\") " Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.708075 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5f7a0ad9-436f-433c-9a91-cec4ffd3beeb-operator-scripts\") pod \"5f7a0ad9-436f-433c-9a91-cec4ffd3beeb\" (UID: \"5f7a0ad9-436f-433c-9a91-cec4ffd3beeb\") " Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.710881 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5f7a0ad9-436f-433c-9a91-cec4ffd3beeb-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "5f7a0ad9-436f-433c-9a91-cec4ffd3beeb" (UID: "5f7a0ad9-436f-433c-9a91-cec4ffd3beeb"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.710974 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bfb2b456-ae0e-42ca-a227-428a626f1e3e-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "bfb2b456-ae0e-42ca-a227-428a626f1e3e" (UID: "bfb2b456-ae0e-42ca-a227-428a626f1e3e"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.711549 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/54457478-e4f4-4088-bd18-e427314e1fb2-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "54457478-e4f4-4088-bd18-e427314e1fb2" (UID: "54457478-e4f4-4088-bd18-e427314e1fb2"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.713326 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bfb2b456-ae0e-42ca-a227-428a626f1e3e-kube-api-access-95lbq" (OuterVolumeSpecName: "kube-api-access-95lbq") pod "bfb2b456-ae0e-42ca-a227-428a626f1e3e" (UID: "bfb2b456-ae0e-42ca-a227-428a626f1e3e"). InnerVolumeSpecName "kube-api-access-95lbq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.714527 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-db-create-pxxqs" Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.714767 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5f7a0ad9-436f-433c-9a91-cec4ffd3beeb-kube-api-access-kbpl7" (OuterVolumeSpecName: "kube-api-access-kbpl7") pod "5f7a0ad9-436f-433c-9a91-cec4ffd3beeb" (UID: "5f7a0ad9-436f-433c-9a91-cec4ffd3beeb"). InnerVolumeSpecName "kube-api-access-kbpl7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.720871 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/54457478-e4f4-4088-bd18-e427314e1fb2-kube-api-access-zz9r4" (OuterVolumeSpecName: "kube-api-access-zz9r4") pod "54457478-e4f4-4088-bd18-e427314e1fb2" (UID: "54457478-e4f4-4088-bd18-e427314e1fb2"). InnerVolumeSpecName "kube-api-access-zz9r4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.720985 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-b136-account-create-update-n9qdv" Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.729448 4558 generic.go:334] "Generic (PLEG): container finished" podID="6bc8ca35-f460-4c4d-9dbe-0012c552371a" containerID="3d044e6540417b17b7e29b9e6b94aac224f6dc6f6746f710160907736ae8f719" exitCode=0 Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.729506 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-galera-0" event={"ID":"6bc8ca35-f460-4c4d-9dbe-0012c552371a","Type":"ContainerDied","Data":"3d044e6540417b17b7e29b9e6b94aac224f6dc6f6746f710160907736ae8f719"} Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.736678 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-65cd-account-create-update-grtts" event={"ID":"5f7a0ad9-436f-433c-9a91-cec4ffd3beeb","Type":"ContainerDied","Data":"7da3bfbb5909f35367f1490c833f509488197456e90229fef8bc430474608c7a"} Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.737395 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-65cd-account-create-update-grtts" Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.756325 4558 generic.go:334] "Generic (PLEG): container finished" podID="e81d2508-e298-463c-9031-b9d8e486d566" containerID="c06768c089cc106f807b1405688b9c79fd40a3d43e579f7beb4c7d8ba6ce7297" exitCode=0 Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.756619 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"e81d2508-e298-463c-9031-b9d8e486d566","Type":"ContainerDied","Data":"c06768c089cc106f807b1405688b9c79fd40a3d43e579f7beb4c7d8ba6ce7297"} Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.761556 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/root-account-create-update-dmtbg" event={"ID":"c2bfe108-b710-400b-baac-55815b192ee3","Type":"ContainerDied","Data":"cd5e3f650786565f023c1017ec96f62e35bc2e5721e6093ac2aec847b1e311a1"} Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.761671 4558 scope.go:117] "RemoveContainer" containerID="b9a71508666d31c13efe2453ce42664d694e3c0db82c6d7b70071f319d25a051" Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.761930 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-dmtbg" Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.787974 4558 generic.go:334] "Generic (PLEG): container finished" podID="66e24f38-a98c-4444-8ee4-352266267985" containerID="41bd033877c5a7cf1ffe2c31818a011adc08dd537bfdb844181969248bd15676" exitCode=0 Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.788037 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-55d4bc664d-k82t9" event={"ID":"66e24f38-a98c-4444-8ee4-352266267985","Type":"ContainerDied","Data":"41bd033877c5a7cf1ffe2c31818a011adc08dd537bfdb844181969248bd15676"} Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.788060 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-55d4bc664d-k82t9" event={"ID":"66e24f38-a98c-4444-8ee4-352266267985","Type":"ContainerDied","Data":"935ddac25769761da4ad627f69a5c0a915bcc758bfd33fa74ab5eaa5b85a8844"} Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.788073 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="935ddac25769761da4ad627f69a5c0a915bcc758bfd33fa74ab5eaa5b85a8844" Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.790300 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-db-create-pxxqs" event={"ID":"ed3c56f1-4bbb-4590-8b19-a0de467537ad","Type":"ContainerDied","Data":"339ec500ad84a2e36b862d84cdd2a820a026b14a6aad6e3de83dfd5ca99a848f"} Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.790401 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-db-create-pxxqs" Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.793481 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-proxy-5f7fb7b-s2dxn" Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.794482 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-dmtbg"] Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.797205 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-cell1-galera-0" event={"ID":"f81a00ad-36ce-4383-9e91-fe60de6939d2","Type":"ContainerDied","Data":"a796780243dd9747fb2317729a320ad77fb6a4148e29c322d469cddf921fb156"} Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.797232 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.808904 4558 scope.go:117] "RemoveContainer" containerID="c3560a1d46ef53613df33e490314cf902b5ef6e4850d07cac0cdb608195a48df" Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.809145 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-55d4bc664d-k82t9" Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.813815 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/66e24f38-a98c-4444-8ee4-352266267985-public-tls-certs\") pod \"66e24f38-a98c-4444-8ee4-352266267985\" (UID: \"66e24f38-a98c-4444-8ee4-352266267985\") " Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.813889 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5-log-httpd\") pod \"4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5\" (UID: \"4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5\") " Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.814052 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ptc4f\" (UniqueName: \"kubernetes.io/projected/66e24f38-a98c-4444-8ee4-352266267985-kube-api-access-ptc4f\") pod \"66e24f38-a98c-4444-8ee4-352266267985\" (UID: \"66e24f38-a98c-4444-8ee4-352266267985\") " Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.815215 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5" (UID: "4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.816642 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5-public-tls-certs\") pod \"4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5\" (UID: \"4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5\") " Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.816683 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/66e24f38-a98c-4444-8ee4-352266267985-config-data\") pod \"66e24f38-a98c-4444-8ee4-352266267985\" (UID: \"66e24f38-a98c-4444-8ee4-352266267985\") " Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.816732 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4sp5s\" (UniqueName: \"kubernetes.io/projected/ed3c56f1-4bbb-4590-8b19-a0de467537ad-kube-api-access-4sp5s\") pod \"ed3c56f1-4bbb-4590-8b19-a0de467537ad\" (UID: \"ed3c56f1-4bbb-4590-8b19-a0de467537ad\") " Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.816781 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5-combined-ca-bundle\") pod \"4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5\" (UID: \"4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5\") " Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.818650 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ed3c56f1-4bbb-4590-8b19-a0de467537ad-operator-scripts\") pod \"ed3c56f1-4bbb-4590-8b19-a0de467537ad\" (UID: \"ed3c56f1-4bbb-4590-8b19-a0de467537ad\") " Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.818715 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1bcd505c-4754-4f41-b91d-6e488a669c93-operator-scripts\") pod \"1bcd505c-4754-4f41-b91d-6e488a669c93\" (UID: \"1bcd505c-4754-4f41-b91d-6e488a669c93\") " Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.818796 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/66e24f38-a98c-4444-8ee4-352266267985-internal-tls-certs\") pod \"66e24f38-a98c-4444-8ee4-352266267985\" (UID: \"66e24f38-a98c-4444-8ee4-352266267985\") " Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.818818 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5-run-httpd\") pod \"4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5\" (UID: \"4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5\") " Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.818864 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhww5\" (UniqueName: \"kubernetes.io/projected/4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5-kube-api-access-jhww5\") pod \"4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5\" (UID: \"4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5\") " Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.818906 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5-internal-tls-certs\") pod \"4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5\" (UID: \"4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5\") " Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.818932 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/66e24f38-a98c-4444-8ee4-352266267985-logs\") pod \"66e24f38-a98c-4444-8ee4-352266267985\" (UID: \"66e24f38-a98c-4444-8ee4-352266267985\") " Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.818951 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/66e24f38-a98c-4444-8ee4-352266267985-scripts\") pod \"66e24f38-a98c-4444-8ee4-352266267985\" (UID: \"66e24f38-a98c-4444-8ee4-352266267985\") " Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.818969 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c8k9g\" (UniqueName: \"kubernetes.io/projected/1bcd505c-4754-4f41-b91d-6e488a669c93-kube-api-access-c8k9g\") pod \"1bcd505c-4754-4f41-b91d-6e488a669c93\" (UID: \"1bcd505c-4754-4f41-b91d-6e488a669c93\") " Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.818998 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5-etc-swift\") pod \"4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5\" (UID: \"4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5\") " Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.819744 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/54457478-e4f4-4088-bd18-e427314e1fb2-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.819760 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kbpl7\" (UniqueName: \"kubernetes.io/projected/5f7a0ad9-436f-433c-9a91-cec4ffd3beeb-kube-api-access-kbpl7\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.819783 4558 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.819792 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-95lbq\" (UniqueName: \"kubernetes.io/projected/bfb2b456-ae0e-42ca-a227-428a626f1e3e-kube-api-access-95lbq\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.819802 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/bfb2b456-ae0e-42ca-a227-428a626f1e3e-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.819811 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5f7a0ad9-436f-433c-9a91-cec4ffd3beeb-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.819819 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zz9r4\" (UniqueName: \"kubernetes.io/projected/54457478-e4f4-4088-bd18-e427314e1fb2-kube-api-access-zz9r4\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.822742 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5" (UID: "4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.834329 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/66e24f38-a98c-4444-8ee4-352266267985-kube-api-access-ptc4f" (OuterVolumeSpecName: "kube-api-access-ptc4f") pod "66e24f38-a98c-4444-8ee4-352266267985" (UID: "66e24f38-a98c-4444-8ee4-352266267985"). InnerVolumeSpecName "kube-api-access-ptc4f". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.837264 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bcd505c-4754-4f41-b91d-6e488a669c93-kube-api-access-c8k9g" (OuterVolumeSpecName: "kube-api-access-c8k9g") pod "1bcd505c-4754-4f41-b91d-6e488a669c93" (UID: "1bcd505c-4754-4f41-b91d-6e488a669c93"). InnerVolumeSpecName "kube-api-access-c8k9g". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.837650 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-dmtbg"] Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.837890 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5" (UID: "4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.839619 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/66e24f38-a98c-4444-8ee4-352266267985-scripts" (OuterVolumeSpecName: "scripts") pod "66e24f38-a98c-4444-8ee4-352266267985" (UID: "66e24f38-a98c-4444-8ee4-352266267985"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.839976 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/66e24f38-a98c-4444-8ee4-352266267985-logs" (OuterVolumeSpecName: "logs") pod "66e24f38-a98c-4444-8ee4-352266267985" (UID: "66e24f38-a98c-4444-8ee4-352266267985"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.840427 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ed3c56f1-4bbb-4590-8b19-a0de467537ad-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "ed3c56f1-4bbb-4590-8b19-a0de467537ad" (UID: "ed3c56f1-4bbb-4590-8b19-a0de467537ad"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.843810 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bcd505c-4754-4f41-b91d-6e488a669c93-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "1bcd505c-4754-4f41-b91d-6e488a669c93" (UID: "1bcd505c-4754-4f41-b91d-6e488a669c93"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.844569 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-65cd-account-create-update-grtts"] Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.845850 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ed3c56f1-4bbb-4590-8b19-a0de467537ad-kube-api-access-4sp5s" (OuterVolumeSpecName: "kube-api-access-4sp5s") pod "ed3c56f1-4bbb-4590-8b19-a0de467537ad" (UID: "ed3c56f1-4bbb-4590-8b19-a0de467537ad"). InnerVolumeSpecName "kube-api-access-4sp5s". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.852021 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/glance-65cd-account-create-update-grtts"] Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.856698 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/openstack-cell1-galera-0"] Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.857060 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5-kube-api-access-jhww5" (OuterVolumeSpecName: "kube-api-access-jhww5") pod "4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5" (UID: "4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5"). InnerVolumeSpecName "kube-api-access-jhww5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.862843 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/openstack-cell1-galera-0"] Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.875389 4558 generic.go:334] "Generic (PLEG): container finished" podID="88b73d7f-ee50-4fdf-9fd7-37296c855d69" containerID="a17a87967f20a77dfc9f8a5084d14c3b4b2b006c99d8e894a2ad7bcef9243b71" exitCode=0 Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.875481 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-0" event={"ID":"88b73d7f-ee50-4fdf-9fd7-37296c855d69","Type":"ContainerDied","Data":"a17a87967f20a77dfc9f8a5084d14c3b4b2b006c99d8e894a2ad7bcef9243b71"} Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.889747 4558 scope.go:117] "RemoveContainer" containerID="77c882047f115c6a4c56a4e460c8836ce62f0179639cdd46fd3f9af7f7edbd88" Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.893851 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-e949-account-create-update-qd847" event={"ID":"54457478-e4f4-4088-bd18-e427314e1fb2","Type":"ContainerDied","Data":"c125669d0d4932ece229fc505a1e8ff7c92a49f2cc4899179e3895b79383b019"} Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.893873 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-e949-account-create-update-qd847" Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.896551 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.896591 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.897574 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/memcached-0" event={"ID":"3460a75e-3553-47b9-bfc5-39c2c459826e","Type":"ContainerDied","Data":"dad15fc6c1dbccd4219a6824cf8ca4a16fb35b4cb0b75bc8ad332e140bc258b8"} Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.897591 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/memcached-0" Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.905927 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-db-create-2kmst" event={"ID":"bfb2b456-ae0e-42ca-a227-428a626f1e3e","Type":"ContainerDied","Data":"7ca25b484d83aa301dba93a3c721ced69437a083aca71309d616a6dc5b6b8f42"} Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.906002 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-db-create-2kmst" Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.915778 4558 generic.go:334] "Generic (PLEG): container finished" podID="592927c6-9b42-411a-8aec-40cf6183e32a" containerID="981127416a83b8463012a21079019f3fb0205f2a34d9b217c94d3c09ae42bc02" exitCode=0 Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.915930 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.916146 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"592927c6-9b42-411a-8aec-40cf6183e32a","Type":"ContainerDied","Data":"981127416a83b8463012a21079019f3fb0205f2a34d9b217c94d3c09ae42bc02"} Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.917894 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/barbican-api-84969c976-hhcrx" podUID="184a9f2b-eab7-4221-a900-746e063662a8" containerName="barbican-api" probeResult="failure" output="Get \"https://10.217.1.90:9311/healthcheck\": read tcp 10.217.0.2:53986->10.217.1.90:9311: read: connection reset by peer" Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.917917 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/barbican-api-84969c976-hhcrx" podUID="184a9f2b-eab7-4221-a900-746e063662a8" containerName="barbican-api-log" probeResult="failure" output="Get \"https://10.217.1.90:9311/healthcheck\": read tcp 10.217.0.2:53978->10.217.1.90:9311: read: connection reset by peer" Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.920495 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/592927c6-9b42-411a-8aec-40cf6183e32a-public-tls-certs\") pod \"592927c6-9b42-411a-8aec-40cf6183e32a\" (UID: \"592927c6-9b42-411a-8aec-40cf6183e32a\") " Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.920616 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage14-crc\") pod \"592927c6-9b42-411a-8aec-40cf6183e32a\" (UID: \"592927c6-9b42-411a-8aec-40cf6183e32a\") " Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.920659 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8g5mb\" (UniqueName: \"kubernetes.io/projected/592927c6-9b42-411a-8aec-40cf6183e32a-kube-api-access-8g5mb\") pod \"592927c6-9b42-411a-8aec-40cf6183e32a\" (UID: \"592927c6-9b42-411a-8aec-40cf6183e32a\") " Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.920880 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6bc8ca35-f460-4c4d-9dbe-0012c552371a-combined-ca-bundle\") pod \"6bc8ca35-f460-4c4d-9dbe-0012c552371a\" (UID: \"6bc8ca35-f460-4c4d-9dbe-0012c552371a\") " Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.920910 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/6bc8ca35-f460-4c4d-9dbe-0012c552371a-config-data-generated\") pod \"6bc8ca35-f460-4c4d-9dbe-0012c552371a\" (UID: \"6bc8ca35-f460-4c4d-9dbe-0012c552371a\") " Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.920934 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/66e24f38-a98c-4444-8ee4-352266267985-combined-ca-bundle\") pod \"66e24f38-a98c-4444-8ee4-352266267985\" (UID: \"66e24f38-a98c-4444-8ee4-352266267985\") " Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.920961 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5-config-data\") pod \"4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5\" (UID: \"4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5\") " Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.921011 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-swswb\" (UniqueName: \"kubernetes.io/projected/6bc8ca35-f460-4c4d-9dbe-0012c552371a-kube-api-access-swswb\") pod \"6bc8ca35-f460-4c4d-9dbe-0012c552371a\" (UID: \"6bc8ca35-f460-4c4d-9dbe-0012c552371a\") " Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.921041 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/592927c6-9b42-411a-8aec-40cf6183e32a-httpd-run\") pod \"592927c6-9b42-411a-8aec-40cf6183e32a\" (UID: \"592927c6-9b42-411a-8aec-40cf6183e32a\") " Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.921056 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mysql-db\" (UniqueName: \"kubernetes.io/local-volume/local-storage17-crc\") pod \"6bc8ca35-f460-4c4d-9dbe-0012c552371a\" (UID: \"6bc8ca35-f460-4c4d-9dbe-0012c552371a\") " Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.921078 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/6bc8ca35-f460-4c4d-9dbe-0012c552371a-kolla-config\") pod \"6bc8ca35-f460-4c4d-9dbe-0012c552371a\" (UID: \"6bc8ca35-f460-4c4d-9dbe-0012c552371a\") " Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.921104 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/592927c6-9b42-411a-8aec-40cf6183e32a-combined-ca-bundle\") pod \"592927c6-9b42-411a-8aec-40cf6183e32a\" (UID: \"592927c6-9b42-411a-8aec-40cf6183e32a\") " Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.921140 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/6bc8ca35-f460-4c4d-9dbe-0012c552371a-galera-tls-certs\") pod \"6bc8ca35-f460-4c4d-9dbe-0012c552371a\" (UID: \"6bc8ca35-f460-4c4d-9dbe-0012c552371a\") " Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.921208 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/592927c6-9b42-411a-8aec-40cf6183e32a-scripts\") pod \"592927c6-9b42-411a-8aec-40cf6183e32a\" (UID: \"592927c6-9b42-411a-8aec-40cf6183e32a\") " Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.921233 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/6bc8ca35-f460-4c4d-9dbe-0012c552371a-config-data-default\") pod \"6bc8ca35-f460-4c4d-9dbe-0012c552371a\" (UID: \"6bc8ca35-f460-4c4d-9dbe-0012c552371a\") " Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.921265 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/592927c6-9b42-411a-8aec-40cf6183e32a-config-data\") pod \"592927c6-9b42-411a-8aec-40cf6183e32a\" (UID: \"592927c6-9b42-411a-8aec-40cf6183e32a\") " Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.921284 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6bc8ca35-f460-4c4d-9dbe-0012c552371a-operator-scripts\") pod \"6bc8ca35-f460-4c4d-9dbe-0012c552371a\" (UID: \"6bc8ca35-f460-4c4d-9dbe-0012c552371a\") " Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.921335 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/592927c6-9b42-411a-8aec-40cf6183e32a-logs\") pod \"592927c6-9b42-411a-8aec-40cf6183e32a\" (UID: \"592927c6-9b42-411a-8aec-40cf6183e32a\") " Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.921447 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6bc8ca35-f460-4c4d-9dbe-0012c552371a-config-data-generated" (OuterVolumeSpecName: "config-data-generated") pod "6bc8ca35-f460-4c4d-9dbe-0012c552371a" (UID: "6bc8ca35-f460-4c4d-9dbe-0012c552371a"). InnerVolumeSpecName "config-data-generated". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.921790 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ed3c56f1-4bbb-4590-8b19-a0de467537ad-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.921826 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1bcd505c-4754-4f41-b91d-6e488a669c93-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.921838 4558 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.921849 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhww5\" (UniqueName: \"kubernetes.io/projected/4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5-kube-api-access-jhww5\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.921859 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/66e24f38-a98c-4444-8ee4-352266267985-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.921868 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/66e24f38-a98c-4444-8ee4-352266267985-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.921876 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c8k9g\" (UniqueName: \"kubernetes.io/projected/1bcd505c-4754-4f41-b91d-6e488a669c93-kube-api-access-c8k9g\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.921909 4558 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5-etc-swift\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.921920 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/6bc8ca35-f460-4c4d-9dbe-0012c552371a-config-data-generated\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.921930 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ptc4f\" (UniqueName: \"kubernetes.io/projected/66e24f38-a98c-4444-8ee4-352266267985-kube-api-access-ptc4f\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.921940 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4sp5s\" (UniqueName: \"kubernetes.io/projected/ed3c56f1-4bbb-4590-8b19-a0de467537ad-kube-api-access-4sp5s\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.921991 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5" (UID: "4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.922322 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/592927c6-9b42-411a-8aec-40cf6183e32a-logs" (OuterVolumeSpecName: "logs") pod "592927c6-9b42-411a-8aec-40cf6183e32a" (UID: "592927c6-9b42-411a-8aec-40cf6183e32a"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.922975 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5" (UID: "4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.923297 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/592927c6-9b42-411a-8aec-40cf6183e32a-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "592927c6-9b42-411a-8aec-40cf6183e32a" (UID: "592927c6-9b42-411a-8aec-40cf6183e32a"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.923739 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6bc8ca35-f460-4c4d-9dbe-0012c552371a-config-data-default" (OuterVolumeSpecName: "config-data-default") pod "6bc8ca35-f460-4c4d-9dbe-0012c552371a" (UID: "6bc8ca35-f460-4c4d-9dbe-0012c552371a"). InnerVolumeSpecName "config-data-default". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.923787 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6bc8ca35-f460-4c4d-9dbe-0012c552371a-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "6bc8ca35-f460-4c4d-9dbe-0012c552371a" (UID: "6bc8ca35-f460-4c4d-9dbe-0012c552371a"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.928761 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6bc8ca35-f460-4c4d-9dbe-0012c552371a-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "6bc8ca35-f460-4c4d-9dbe-0012c552371a" (UID: "6bc8ca35-f460-4c4d-9dbe-0012c552371a"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.928874 4558 scope.go:117] "RemoveContainer" containerID="758a745a0a805787f036d55713b5a5b3c51cd6e6c91d9d3304c4dcf394fa055b" Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.938987 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6bc8ca35-f460-4c4d-9dbe-0012c552371a-kube-api-access-swswb" (OuterVolumeSpecName: "kube-api-access-swswb") pod "6bc8ca35-f460-4c4d-9dbe-0012c552371a" (UID: "6bc8ca35-f460-4c4d-9dbe-0012c552371a"). InnerVolumeSpecName "kube-api-access-swswb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.940715 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-lgbfw" event={"ID":"318ab596-0189-4f84-b805-f28bd05824e1","Type":"ContainerStarted","Data":"1fd793f5ed701b177f6d6abcb3b5a69703e396d894ac6e3369bf9b47b38eea48"} Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.942268 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-lgbfw" Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.944046 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage14-crc" (OuterVolumeSpecName: "glance") pod "592927c6-9b42-411a-8aec-40cf6183e32a" (UID: "592927c6-9b42-411a-8aec-40cf6183e32a"). InnerVolumeSpecName "local-storage14-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.948626 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/592927c6-9b42-411a-8aec-40cf6183e32a-kube-api-access-8g5mb" (OuterVolumeSpecName: "kube-api-access-8g5mb") pod "592927c6-9b42-411a-8aec-40cf6183e32a" (UID: "592927c6-9b42-411a-8aec-40cf6183e32a"). InnerVolumeSpecName "kube-api-access-8g5mb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.957280 4558 generic.go:334] "Generic (PLEG): container finished" podID="4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5" containerID="5a07cb607c001f9ab3a834dea57d84934f1fe9dff82a41e28812e0217fe09cff" exitCode=0 Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.957295 4558 generic.go:334] "Generic (PLEG): container finished" podID="4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5" containerID="bcc4b197f2fa6bbcecefb3da95a4d96c30060a9d1504ae2efef0a52dc6bcf098" exitCode=0 Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.957387 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-proxy-5f7fb7b-s2dxn" Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.957597 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-proxy-5f7fb7b-s2dxn" event={"ID":"4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5","Type":"ContainerDied","Data":"5a07cb607c001f9ab3a834dea57d84934f1fe9dff82a41e28812e0217fe09cff"} Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.957613 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-proxy-5f7fb7b-s2dxn" event={"ID":"4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5","Type":"ContainerDied","Data":"bcc4b197f2fa6bbcecefb3da95a4d96c30060a9d1504ae2efef0a52dc6bcf098"} Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.957623 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-proxy-5f7fb7b-s2dxn" event={"ID":"4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5","Type":"ContainerDied","Data":"a2a700bf0185bdf7350481166679a9ad59dffb201ae25abadc2103dd55e220aa"} Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.960299 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"f6a0777b-d04f-451b-9e3d-3f9ab0e19df8","Type":"ContainerDied","Data":"1a0e63afdcf6b78dbb9d540f12c0b505d916fc6d624ef932399ee4f3c33394cb"} Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.960407 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.960574 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/592927c6-9b42-411a-8aec-40cf6183e32a-scripts" (OuterVolumeSpecName: "scripts") pod "592927c6-9b42-411a-8aec-40cf6183e32a" (UID: "592927c6-9b42-411a-8aec-40cf6183e32a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.962405 4558 generic.go:334] "Generic (PLEG): container finished" podID="c1afb831-f3e4-4356-ab86-713eb0beca39" containerID="4073b49237446b10090528b017b317faf894679a9662161b1fb6fedebcfc14ed" exitCode=0 Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.962495 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"c1afb831-f3e4-4356-ab86-713eb0beca39","Type":"ContainerDied","Data":"4073b49237446b10090528b017b317faf894679a9662161b1fb6fedebcfc14ed"} Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.964892 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.965535 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-b136-account-create-update-n9qdv" Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.966190 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-64c1-account-create-update-wsq7z" Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.968485 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-b136-account-create-update-n9qdv" event={"ID":"1bcd505c-4754-4f41-b91d-6e488a669c93","Type":"ContainerDied","Data":"84e60af078ecb3aa4c7a8e920a31923950c7f5a488c23a929fc9e13a11f6efe7"} Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.968497 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-7ab2-account-create-update-c5qqm" Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.968544 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-proxy-7459567f99-gx6dr" Jan 20 17:28:06 crc kubenswrapper[4558]: I0120 17:28:06.968733 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-db-create-rztt2" Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.009340 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5" (UID: "4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.032482 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-swswb\" (UniqueName: \"kubernetes.io/projected/6bc8ca35-f460-4c4d-9dbe-0012c552371a-kube-api-access-swswb\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.032589 4558 scope.go:117] "RemoveContainer" containerID="c8a5e9bf9d8e1a8dad3f6f73cdf354545ce12575fe3b33bce155c23769a44f3e" Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.032597 4558 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/592927c6-9b42-411a-8aec-40cf6183e32a-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.032786 4558 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/6bc8ca35-f460-4c4d-9dbe-0012c552371a-kolla-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.032808 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.032821 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/592927c6-9b42-411a-8aec-40cf6183e32a-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.032834 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/6bc8ca35-f460-4c4d-9dbe-0012c552371a-config-data-default\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.032852 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.032867 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6bc8ca35-f460-4c4d-9dbe-0012c552371a-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.032880 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/592927c6-9b42-411a-8aec-40cf6183e32a-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.032916 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage14-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage14-crc\") on node \"crc\" " Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.032928 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8g5mb\" (UniqueName: \"kubernetes.io/projected/592927c6-9b42-411a-8aec-40cf6183e32a-kube-api-access-8g5mb\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.032941 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.033317 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-db-create-2kmst"] Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.048446 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/neutron-db-create-2kmst"] Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.079527 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage17-crc" (OuterVolumeSpecName: "mysql-db") pod "6bc8ca35-f460-4c4d-9dbe-0012c552371a" (UID: "6bc8ca35-f460-4c4d-9dbe-0012c552371a"). InnerVolumeSpecName "local-storage17-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.133956 4558 scope.go:117] "RemoveContainer" containerID="0988bc018c883e0a93655e8a6aa66b272e95530ac909536051e552811ce17949" Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.134582 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage17-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage17-crc\") on node \"crc\" " Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.151351 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6bc8ca35-f460-4c4d-9dbe-0012c552371a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6bc8ca35-f460-4c4d-9dbe-0012c552371a" (UID: "6bc8ca35-f460-4c4d-9dbe-0012c552371a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.154783 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-e949-account-create-update-qd847"] Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.161090 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/66e24f38-a98c-4444-8ee4-352266267985-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "66e24f38-a98c-4444-8ee4-352266267985" (UID: "66e24f38-a98c-4444-8ee4-352266267985"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.163303 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/barbican-e949-account-create-update-qd847"] Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.170470 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/memcached-0"] Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.185542 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-lgbfw" podStartSLOduration=6.185526292 podStartE2EDuration="6.185526292s" podCreationTimestamp="2026-01-20 17:28:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:28:07.024118934 +0000 UTC m=+2780.784456901" watchObservedRunningTime="2026-01-20 17:28:07.185526292 +0000 UTC m=+2780.945864259" Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.188993 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/memcached-0"] Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.206762 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/placement-7ab2-account-create-update-c5qqm"] Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.211257 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/placement-7ab2-account-create-update-c5qqm"] Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.223471 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/66e24f38-a98c-4444-8ee4-352266267985-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "66e24f38-a98c-4444-8ee4-352266267985" (UID: "66e24f38-a98c-4444-8ee4-352266267985"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.229570 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-64c1-account-create-update-wsq7z"] Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.231066 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/ceilometer-0" podUID="134fc5ef-70be-4c8c-aa72-e4f0440d0afe" containerName="proxy-httpd" probeResult="failure" output="Get \"https://10.217.1.81:3000/\": dial tcp 10.217.1.81:3000: connect: connection refused" Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.232651 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/keystone-64c1-account-create-update-wsq7z"] Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.236470 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage14-crc" (UniqueName: "kubernetes.io/local-volume/local-storage14-crc") on node "crc" Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.236519 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6bc8ca35-f460-4c4d-9dbe-0012c552371a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.236541 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/66e24f38-a98c-4444-8ee4-352266267985-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.236554 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage14-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage14-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.236628 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/66e24f38-a98c-4444-8ee4-352266267985-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.237896 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/592927c6-9b42-411a-8aec-40cf6183e32a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "592927c6-9b42-411a-8aec-40cf6183e32a" (UID: "592927c6-9b42-411a-8aec-40cf6183e32a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.238359 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.245438 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.249869 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/592927c6-9b42-411a-8aec-40cf6183e32a-config-data" (OuterVolumeSpecName: "config-data") pod "592927c6-9b42-411a-8aec-40cf6183e32a" (UID: "592927c6-9b42-411a-8aec-40cf6183e32a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.251705 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/swift-proxy-7459567f99-gx6dr"] Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.257510 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/swift-proxy-7459567f99-gx6dr"] Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.260599 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/592927c6-9b42-411a-8aec-40cf6183e32a-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "592927c6-9b42-411a-8aec-40cf6183e32a" (UID: "592927c6-9b42-411a-8aec-40cf6183e32a"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.276498 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-db-create-rztt2"] Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.276538 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/keystone-db-create-rztt2"] Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.278706 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.278980 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/66e24f38-a98c-4444-8ee4-352266267985-config-data" (OuterVolumeSpecName: "config-data") pod "66e24f38-a98c-4444-8ee4-352266267985" (UID: "66e24f38-a98c-4444-8ee4-352266267985"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.282900 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.287100 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/66e24f38-a98c-4444-8ee4-352266267985-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "66e24f38-a98c-4444-8ee4-352266267985" (UID: "66e24f38-a98c-4444-8ee4-352266267985"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.290247 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6bc8ca35-f460-4c4d-9dbe-0012c552371a-galera-tls-certs" (OuterVolumeSpecName: "galera-tls-certs") pod "6bc8ca35-f460-4c4d-9dbe-0012c552371a" (UID: "6bc8ca35-f460-4c4d-9dbe-0012c552371a"). InnerVolumeSpecName "galera-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.291766 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage17-crc" (UniqueName: "kubernetes.io/local-volume/local-storage17-crc") on node "crc" Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.296964 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-b136-account-create-update-n9qdv"] Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.301570 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/neutron-b136-account-create-update-n9qdv"] Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.312765 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5-config-data" (OuterVolumeSpecName: "config-data") pod "4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5" (UID: "4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.342728 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage17-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage17-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.342754 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/592927c6-9b42-411a-8aec-40cf6183e32a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.342766 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/66e24f38-a98c-4444-8ee4-352266267985-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.342787 4558 reconciler_common.go:293] "Volume detached for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/6bc8ca35-f460-4c4d-9dbe-0012c552371a-galera-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.342797 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/012246ba-362c-48f3-bbb1-913d57f1f9ed-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.342806 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/592927c6-9b42-411a-8aec-40cf6183e32a-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.342815 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/592927c6-9b42-411a-8aec-40cf6183e32a-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.342824 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/66e24f38-a98c-4444-8ee4-352266267985-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.342834 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tp4lp\" (UniqueName: \"kubernetes.io/projected/c67fda0a-394c-424a-ad72-f0e3ebd77f1b-kube-api-access-tp4lp\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.342843 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xfskp\" (UniqueName: \"kubernetes.io/projected/012246ba-362c-48f3-bbb1-913d57f1f9ed-kube-api-access-xfskp\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.342853 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c67fda0a-394c-424a-ad72-f0e3ebd77f1b-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.342861 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.351024 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/neutron-665b4c9c8d-9tcx6" podUID="6dc46db7-9948-4c7d-b0a3-c767ffd58b4a" containerName="neutron-httpd" probeResult="failure" output="Get \"https://10.217.1.82:9696/\": dial tcp 10.217.1.82:9696: connect: connection refused" Jan 20 17:28:07 crc kubenswrapper[4558]: E0120 17:28:07.444705 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Jan 20 17:28:07 crc kubenswrapper[4558]: E0120 17:28:07.444789 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/d692722b-f7fd-447c-8b7a-f56cff940d91-config-data podName:d692722b-f7fd-447c-8b7a-f56cff940d91 nodeName:}" failed. No retries permitted until 2026-01-20 17:28:15.444761464 +0000 UTC m=+2789.205099431 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/d692722b-f7fd-447c-8b7a-f56cff940d91-config-data") pod "rabbitmq-server-0" (UID: "d692722b-f7fd-447c-8b7a-f56cff940d91") : configmap "rabbitmq-config-data" not found Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.467797 4558 scope.go:117] "RemoveContainer" containerID="981127416a83b8463012a21079019f3fb0205f2a34d9b217c94d3c09ae42bc02" Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.493468 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.499521 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.506023 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-84969c976-hhcrx" Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.512432 4558 scope.go:117] "RemoveContainer" containerID="f60fe058ea93021d1c1c3c3d2912da2aaabf83d63fd9e5d23a8a1c05335b6abd" Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.548131 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8mwb8\" (UniqueName: \"kubernetes.io/projected/88b73d7f-ee50-4fdf-9fd7-37296c855d69-kube-api-access-8mwb8\") pod \"88b73d7f-ee50-4fdf-9fd7-37296c855d69\" (UID: \"88b73d7f-ee50-4fdf-9fd7-37296c855d69\") " Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.548216 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/184a9f2b-eab7-4221-a900-746e063662a8-combined-ca-bundle\") pod \"184a9f2b-eab7-4221-a900-746e063662a8\" (UID: \"184a9f2b-eab7-4221-a900-746e063662a8\") " Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.548327 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") pod \"e81d2508-e298-463c-9031-b9d8e486d566\" (UID: \"e81d2508-e298-463c-9031-b9d8e486d566\") " Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.548345 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-spmwn\" (UniqueName: \"kubernetes.io/projected/e81d2508-e298-463c-9031-b9d8e486d566-kube-api-access-spmwn\") pod \"e81d2508-e298-463c-9031-b9d8e486d566\" (UID: \"e81d2508-e298-463c-9031-b9d8e486d566\") " Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.548402 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/184a9f2b-eab7-4221-a900-746e063662a8-config-data\") pod \"184a9f2b-eab7-4221-a900-746e063662a8\" (UID: \"184a9f2b-eab7-4221-a900-746e063662a8\") " Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.548427 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/184a9f2b-eab7-4221-a900-746e063662a8-internal-tls-certs\") pod \"184a9f2b-eab7-4221-a900-746e063662a8\" (UID: \"184a9f2b-eab7-4221-a900-746e063662a8\") " Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.548458 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mzh8m\" (UniqueName: \"kubernetes.io/projected/184a9f2b-eab7-4221-a900-746e063662a8-kube-api-access-mzh8m\") pod \"184a9f2b-eab7-4221-a900-746e063662a8\" (UID: \"184a9f2b-eab7-4221-a900-746e063662a8\") " Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.548491 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/88b73d7f-ee50-4fdf-9fd7-37296c855d69-combined-ca-bundle\") pod \"88b73d7f-ee50-4fdf-9fd7-37296c855d69\" (UID: \"88b73d7f-ee50-4fdf-9fd7-37296c855d69\") " Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.548557 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e81d2508-e298-463c-9031-b9d8e486d566-scripts\") pod \"e81d2508-e298-463c-9031-b9d8e486d566\" (UID: \"e81d2508-e298-463c-9031-b9d8e486d566\") " Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.548579 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e81d2508-e298-463c-9031-b9d8e486d566-combined-ca-bundle\") pod \"e81d2508-e298-463c-9031-b9d8e486d566\" (UID: \"e81d2508-e298-463c-9031-b9d8e486d566\") " Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.548602 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e81d2508-e298-463c-9031-b9d8e486d566-internal-tls-certs\") pod \"e81d2508-e298-463c-9031-b9d8e486d566\" (UID: \"e81d2508-e298-463c-9031-b9d8e486d566\") " Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.548659 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e81d2508-e298-463c-9031-b9d8e486d566-config-data\") pod \"e81d2508-e298-463c-9031-b9d8e486d566\" (UID: \"e81d2508-e298-463c-9031-b9d8e486d566\") " Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.548734 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e81d2508-e298-463c-9031-b9d8e486d566-logs\") pod \"e81d2508-e298-463c-9031-b9d8e486d566\" (UID: \"e81d2508-e298-463c-9031-b9d8e486d566\") " Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.548760 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/184a9f2b-eab7-4221-a900-746e063662a8-logs\") pod \"184a9f2b-eab7-4221-a900-746e063662a8\" (UID: \"184a9f2b-eab7-4221-a900-746e063662a8\") " Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.548802 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/184a9f2b-eab7-4221-a900-746e063662a8-public-tls-certs\") pod \"184a9f2b-eab7-4221-a900-746e063662a8\" (UID: \"184a9f2b-eab7-4221-a900-746e063662a8\") " Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.548863 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/184a9f2b-eab7-4221-a900-746e063662a8-config-data-custom\") pod \"184a9f2b-eab7-4221-a900-746e063662a8\" (UID: \"184a9f2b-eab7-4221-a900-746e063662a8\") " Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.548882 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/e81d2508-e298-463c-9031-b9d8e486d566-httpd-run\") pod \"e81d2508-e298-463c-9031-b9d8e486d566\" (UID: \"e81d2508-e298-463c-9031-b9d8e486d566\") " Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.548911 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/88b73d7f-ee50-4fdf-9fd7-37296c855d69-config-data\") pod \"88b73d7f-ee50-4fdf-9fd7-37296c855d69\" (UID: \"88b73d7f-ee50-4fdf-9fd7-37296c855d69\") " Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.549990 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e81d2508-e298-463c-9031-b9d8e486d566-logs" (OuterVolumeSpecName: "logs") pod "e81d2508-e298-463c-9031-b9d8e486d566" (UID: "e81d2508-e298-463c-9031-b9d8e486d566"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.557560 4558 scope.go:117] "RemoveContainer" containerID="5a07cb607c001f9ab3a834dea57d84934f1fe9dff82a41e28812e0217fe09cff" Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.558621 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e81d2508-e298-463c-9031-b9d8e486d566-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "e81d2508-e298-463c-9031-b9d8e486d566" (UID: "e81d2508-e298-463c-9031-b9d8e486d566"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.559038 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/184a9f2b-eab7-4221-a900-746e063662a8-logs" (OuterVolumeSpecName: "logs") pod "184a9f2b-eab7-4221-a900-746e063662a8" (UID: "184a9f2b-eab7-4221-a900-746e063662a8"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.563625 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/184a9f2b-eab7-4221-a900-746e063662a8-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "184a9f2b-eab7-4221-a900-746e063662a8" (UID: "184a9f2b-eab7-4221-a900-746e063662a8"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.564492 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/184a9f2b-eab7-4221-a900-746e063662a8-kube-api-access-mzh8m" (OuterVolumeSpecName: "kube-api-access-mzh8m") pod "184a9f2b-eab7-4221-a900-746e063662a8" (UID: "184a9f2b-eab7-4221-a900-746e063662a8"). InnerVolumeSpecName "kube-api-access-mzh8m". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.564670 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e81d2508-e298-463c-9031-b9d8e486d566-kube-api-access-spmwn" (OuterVolumeSpecName: "kube-api-access-spmwn") pod "e81d2508-e298-463c-9031-b9d8e486d566" (UID: "e81d2508-e298-463c-9031-b9d8e486d566"). InnerVolumeSpecName "kube-api-access-spmwn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.566740 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e81d2508-e298-463c-9031-b9d8e486d566-scripts" (OuterVolumeSpecName: "scripts") pod "e81d2508-e298-463c-9031-b9d8e486d566" (UID: "e81d2508-e298-463c-9031-b9d8e486d566"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.573517 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage15-crc" (OuterVolumeSpecName: "glance") pod "e81d2508-e298-463c-9031-b9d8e486d566" (UID: "e81d2508-e298-463c-9031-b9d8e486d566"). InnerVolumeSpecName "local-storage15-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.585288 4558 scope.go:117] "RemoveContainer" containerID="bcc4b197f2fa6bbcecefb3da95a4d96c30060a9d1504ae2efef0a52dc6bcf098" Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.595281 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-db-create-pxxqs"] Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.621888 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/88b73d7f-ee50-4fdf-9fd7-37296c855d69-kube-api-access-8mwb8" (OuterVolumeSpecName: "kube-api-access-8mwb8") pod "88b73d7f-ee50-4fdf-9fd7-37296c855d69" (UID: "88b73d7f-ee50-4fdf-9fd7-37296c855d69"). InnerVolumeSpecName "kube-api-access-8mwb8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.623516 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/barbican-db-create-pxxqs"] Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.624795 4558 scope.go:117] "RemoveContainer" containerID="5a07cb607c001f9ab3a834dea57d84934f1fe9dff82a41e28812e0217fe09cff" Jan 20 17:28:07 crc kubenswrapper[4558]: E0120 17:28:07.625622 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5a07cb607c001f9ab3a834dea57d84934f1fe9dff82a41e28812e0217fe09cff\": container with ID starting with 5a07cb607c001f9ab3a834dea57d84934f1fe9dff82a41e28812e0217fe09cff not found: ID does not exist" containerID="5a07cb607c001f9ab3a834dea57d84934f1fe9dff82a41e28812e0217fe09cff" Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.625654 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5a07cb607c001f9ab3a834dea57d84934f1fe9dff82a41e28812e0217fe09cff"} err="failed to get container status \"5a07cb607c001f9ab3a834dea57d84934f1fe9dff82a41e28812e0217fe09cff\": rpc error: code = NotFound desc = could not find container \"5a07cb607c001f9ab3a834dea57d84934f1fe9dff82a41e28812e0217fe09cff\": container with ID starting with 5a07cb607c001f9ab3a834dea57d84934f1fe9dff82a41e28812e0217fe09cff not found: ID does not exist" Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.625702 4558 scope.go:117] "RemoveContainer" containerID="bcc4b197f2fa6bbcecefb3da95a4d96c30060a9d1504ae2efef0a52dc6bcf098" Jan 20 17:28:07 crc kubenswrapper[4558]: E0120 17:28:07.626143 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bcc4b197f2fa6bbcecefb3da95a4d96c30060a9d1504ae2efef0a52dc6bcf098\": container with ID starting with bcc4b197f2fa6bbcecefb3da95a4d96c30060a9d1504ae2efef0a52dc6bcf098 not found: ID does not exist" containerID="bcc4b197f2fa6bbcecefb3da95a4d96c30060a9d1504ae2efef0a52dc6bcf098" Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.626208 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bcc4b197f2fa6bbcecefb3da95a4d96c30060a9d1504ae2efef0a52dc6bcf098"} err="failed to get container status \"bcc4b197f2fa6bbcecefb3da95a4d96c30060a9d1504ae2efef0a52dc6bcf098\": rpc error: code = NotFound desc = could not find container \"bcc4b197f2fa6bbcecefb3da95a4d96c30060a9d1504ae2efef0a52dc6bcf098\": container with ID starting with bcc4b197f2fa6bbcecefb3da95a4d96c30060a9d1504ae2efef0a52dc6bcf098 not found: ID does not exist" Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.626237 4558 scope.go:117] "RemoveContainer" containerID="5a07cb607c001f9ab3a834dea57d84934f1fe9dff82a41e28812e0217fe09cff" Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.626549 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5a07cb607c001f9ab3a834dea57d84934f1fe9dff82a41e28812e0217fe09cff"} err="failed to get container status \"5a07cb607c001f9ab3a834dea57d84934f1fe9dff82a41e28812e0217fe09cff\": rpc error: code = NotFound desc = could not find container \"5a07cb607c001f9ab3a834dea57d84934f1fe9dff82a41e28812e0217fe09cff\": container with ID starting with 5a07cb607c001f9ab3a834dea57d84934f1fe9dff82a41e28812e0217fe09cff not found: ID does not exist" Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.626576 4558 scope.go:117] "RemoveContainer" containerID="bcc4b197f2fa6bbcecefb3da95a4d96c30060a9d1504ae2efef0a52dc6bcf098" Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.627001 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bcc4b197f2fa6bbcecefb3da95a4d96c30060a9d1504ae2efef0a52dc6bcf098"} err="failed to get container status \"bcc4b197f2fa6bbcecefb3da95a4d96c30060a9d1504ae2efef0a52dc6bcf098\": rpc error: code = NotFound desc = could not find container \"bcc4b197f2fa6bbcecefb3da95a4d96c30060a9d1504ae2efef0a52dc6bcf098\": container with ID starting with bcc4b197f2fa6bbcecefb3da95a4d96c30060a9d1504ae2efef0a52dc6bcf098 not found: ID does not exist" Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.627025 4558 scope.go:117] "RemoveContainer" containerID="5318567d652c11a8d32a83b41eaaa58746106e91b1ac59789f4bda5a8e0f9415" Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.630464 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.636984 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.641712 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-74494d9d6c-qv9st" Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.643248 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/swift-proxy-5f7fb7b-s2dxn"] Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.648342 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e81d2508-e298-463c-9031-b9d8e486d566-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e81d2508-e298-463c-9031-b9d8e486d566" (UID: "e81d2508-e298-463c-9031-b9d8e486d566"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.650995 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/88b73d7f-ee50-4fdf-9fd7-37296c855d69-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "88b73d7f-ee50-4fdf-9fd7-37296c855d69" (UID: "88b73d7f-ee50-4fdf-9fd7-37296c855d69"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.651578 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e81d2508-e298-463c-9031-b9d8e486d566-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.651603 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e81d2508-e298-463c-9031-b9d8e486d566-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.651611 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e81d2508-e298-463c-9031-b9d8e486d566-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.651621 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/184a9f2b-eab7-4221-a900-746e063662a8-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.651631 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/184a9f2b-eab7-4221-a900-746e063662a8-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.651642 4558 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/e81d2508-e298-463c-9031-b9d8e486d566-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.651653 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8mwb8\" (UniqueName: \"kubernetes.io/projected/88b73d7f-ee50-4fdf-9fd7-37296c855d69-kube-api-access-8mwb8\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.651673 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage15-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") on node \"crc\" " Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.651683 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-spmwn\" (UniqueName: \"kubernetes.io/projected/e81d2508-e298-463c-9031-b9d8e486d566-kube-api-access-spmwn\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.651693 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mzh8m\" (UniqueName: \"kubernetes.io/projected/184a9f2b-eab7-4221-a900-746e063662a8-kube-api-access-mzh8m\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.651702 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/88b73d7f-ee50-4fdf-9fd7-37296c855d69-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.654031 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/88b73d7f-ee50-4fdf-9fd7-37296c855d69-config-data" (OuterVolumeSpecName: "config-data") pod "88b73d7f-ee50-4fdf-9fd7-37296c855d69" (UID: "88b73d7f-ee50-4fdf-9fd7-37296c855d69"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.654079 4558 scope.go:117] "RemoveContainer" containerID="516792e38f223a8dda033868458240450dfed4fe467c3a9a1c42b3301734bf44" Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.687546 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/swift-proxy-5f7fb7b-s2dxn"] Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.706404 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage15-crc" (UniqueName: "kubernetes.io/local-volume/local-storage15-crc") on node "crc" Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.706998 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/184a9f2b-eab7-4221-a900-746e063662a8-config-data" (OuterVolumeSpecName: "config-data") pod "184a9f2b-eab7-4221-a900-746e063662a8" (UID: "184a9f2b-eab7-4221-a900-746e063662a8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.711305 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/184a9f2b-eab7-4221-a900-746e063662a8-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "184a9f2b-eab7-4221-a900-746e063662a8" (UID: "184a9f2b-eab7-4221-a900-746e063662a8"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.711423 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/184a9f2b-eab7-4221-a900-746e063662a8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "184a9f2b-eab7-4221-a900-746e063662a8" (UID: "184a9f2b-eab7-4221-a900-746e063662a8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.713421 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e81d2508-e298-463c-9031-b9d8e486d566-config-data" (OuterVolumeSpecName: "config-data") pod "e81d2508-e298-463c-9031-b9d8e486d566" (UID: "e81d2508-e298-463c-9031-b9d8e486d566"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.717344 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/184a9f2b-eab7-4221-a900-746e063662a8-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "184a9f2b-eab7-4221-a900-746e063662a8" (UID: "184a9f2b-eab7-4221-a900-746e063662a8"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.721268 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e81d2508-e298-463c-9031-b9d8e486d566-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "e81d2508-e298-463c-9031-b9d8e486d566" (UID: "e81d2508-e298-463c-9031-b9d8e486d566"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.752983 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7d72e21b-f0dd-48d4-9594-bb39aae6fa9d-public-tls-certs\") pod \"7d72e21b-f0dd-48d4-9594-bb39aae6fa9d\" (UID: \"7d72e21b-f0dd-48d4-9594-bb39aae6fa9d\") " Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.753060 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/7d72e21b-f0dd-48d4-9594-bb39aae6fa9d-fernet-keys\") pod \"7d72e21b-f0dd-48d4-9594-bb39aae6fa9d\" (UID: \"7d72e21b-f0dd-48d4-9594-bb39aae6fa9d\") " Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.753120 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/7d72e21b-f0dd-48d4-9594-bb39aae6fa9d-credential-keys\") pod \"7d72e21b-f0dd-48d4-9594-bb39aae6fa9d\" (UID: \"7d72e21b-f0dd-48d4-9594-bb39aae6fa9d\") " Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.753142 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7d72e21b-f0dd-48d4-9594-bb39aae6fa9d-config-data\") pod \"7d72e21b-f0dd-48d4-9594-bb39aae6fa9d\" (UID: \"7d72e21b-f0dd-48d4-9594-bb39aae6fa9d\") " Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.753199 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7d72e21b-f0dd-48d4-9594-bb39aae6fa9d-scripts\") pod \"7d72e21b-f0dd-48d4-9594-bb39aae6fa9d\" (UID: \"7d72e21b-f0dd-48d4-9594-bb39aae6fa9d\") " Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.753237 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7d72e21b-f0dd-48d4-9594-bb39aae6fa9d-combined-ca-bundle\") pod \"7d72e21b-f0dd-48d4-9594-bb39aae6fa9d\" (UID: \"7d72e21b-f0dd-48d4-9594-bb39aae6fa9d\") " Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.753338 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7d72e21b-f0dd-48d4-9594-bb39aae6fa9d-internal-tls-certs\") pod \"7d72e21b-f0dd-48d4-9594-bb39aae6fa9d\" (UID: \"7d72e21b-f0dd-48d4-9594-bb39aae6fa9d\") " Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.753373 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kqq8g\" (UniqueName: \"kubernetes.io/projected/7d72e21b-f0dd-48d4-9594-bb39aae6fa9d-kube-api-access-kqq8g\") pod \"7d72e21b-f0dd-48d4-9594-bb39aae6fa9d\" (UID: \"7d72e21b-f0dd-48d4-9594-bb39aae6fa9d\") " Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.753817 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/184a9f2b-eab7-4221-a900-746e063662a8-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.753837 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/88b73d7f-ee50-4fdf-9fd7-37296c855d69-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.753848 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/184a9f2b-eab7-4221-a900-746e063662a8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.753857 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage15-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.753867 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/184a9f2b-eab7-4221-a900-746e063662a8-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.753875 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/184a9f2b-eab7-4221-a900-746e063662a8-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.753884 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e81d2508-e298-463c-9031-b9d8e486d566-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.753892 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e81d2508-e298-463c-9031-b9d8e486d566-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.756689 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7d72e21b-f0dd-48d4-9594-bb39aae6fa9d-scripts" (OuterVolumeSpecName: "scripts") pod "7d72e21b-f0dd-48d4-9594-bb39aae6fa9d" (UID: "7d72e21b-f0dd-48d4-9594-bb39aae6fa9d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.756906 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7d72e21b-f0dd-48d4-9594-bb39aae6fa9d-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "7d72e21b-f0dd-48d4-9594-bb39aae6fa9d" (UID: "7d72e21b-f0dd-48d4-9594-bb39aae6fa9d"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.757016 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7d72e21b-f0dd-48d4-9594-bb39aae6fa9d-kube-api-access-kqq8g" (OuterVolumeSpecName: "kube-api-access-kqq8g") pod "7d72e21b-f0dd-48d4-9594-bb39aae6fa9d" (UID: "7d72e21b-f0dd-48d4-9594-bb39aae6fa9d"). InnerVolumeSpecName "kube-api-access-kqq8g". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.757508 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7d72e21b-f0dd-48d4-9594-bb39aae6fa9d-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "7d72e21b-f0dd-48d4-9594-bb39aae6fa9d" (UID: "7d72e21b-f0dd-48d4-9594-bb39aae6fa9d"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.773103 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7d72e21b-f0dd-48d4-9594-bb39aae6fa9d-config-data" (OuterVolumeSpecName: "config-data") pod "7d72e21b-f0dd-48d4-9594-bb39aae6fa9d" (UID: "7d72e21b-f0dd-48d4-9594-bb39aae6fa9d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.774625 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7d72e21b-f0dd-48d4-9594-bb39aae6fa9d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7d72e21b-f0dd-48d4-9594-bb39aae6fa9d" (UID: "7d72e21b-f0dd-48d4-9594-bb39aae6fa9d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.792934 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7d72e21b-f0dd-48d4-9594-bb39aae6fa9d-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "7d72e21b-f0dd-48d4-9594-bb39aae6fa9d" (UID: "7d72e21b-f0dd-48d4-9594-bb39aae6fa9d"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.795630 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7d72e21b-f0dd-48d4-9594-bb39aae6fa9d-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "7d72e21b-f0dd-48d4-9594-bb39aae6fa9d" (UID: "7d72e21b-f0dd-48d4-9594-bb39aae6fa9d"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.855276 4558 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/7d72e21b-f0dd-48d4-9594-bb39aae6fa9d-credential-keys\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.855305 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7d72e21b-f0dd-48d4-9594-bb39aae6fa9d-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.855317 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7d72e21b-f0dd-48d4-9594-bb39aae6fa9d-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.855328 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7d72e21b-f0dd-48d4-9594-bb39aae6fa9d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.855339 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7d72e21b-f0dd-48d4-9594-bb39aae6fa9d-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.855349 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kqq8g\" (UniqueName: \"kubernetes.io/projected/7d72e21b-f0dd-48d4-9594-bb39aae6fa9d-kube-api-access-kqq8g\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.855360 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7d72e21b-f0dd-48d4-9594-bb39aae6fa9d-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:07 crc kubenswrapper[4558]: I0120 17:28:07.855370 4558 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/7d72e21b-f0dd-48d4-9594-bb39aae6fa9d-fernet-keys\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:08 crc kubenswrapper[4558]: I0120 17:28:08.008822 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"e81d2508-e298-463c-9031-b9d8e486d566","Type":"ContainerDied","Data":"fd96309ecbf812969dbffa3d6cf2d24d5c1db359e9c99dbad0ee1e4d5d821ec8"} Jan 20 17:28:08 crc kubenswrapper[4558]: I0120 17:28:08.008883 4558 scope.go:117] "RemoveContainer" containerID="c06768c089cc106f807b1405688b9c79fd40a3d43e579f7beb4c7d8ba6ce7297" Jan 20 17:28:08 crc kubenswrapper[4558]: I0120 17:28:08.009064 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:28:08 crc kubenswrapper[4558]: I0120 17:28:08.013922 4558 generic.go:334] "Generic (PLEG): container finished" podID="184a9f2b-eab7-4221-a900-746e063662a8" containerID="1d50c4159354efc26c8a538204ca688c7a0a5b8e2d0ac9d5aef208c1239af23d" exitCode=0 Jan 20 17:28:08 crc kubenswrapper[4558]: I0120 17:28:08.014002 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-84969c976-hhcrx" event={"ID":"184a9f2b-eab7-4221-a900-746e063662a8","Type":"ContainerDied","Data":"1d50c4159354efc26c8a538204ca688c7a0a5b8e2d0ac9d5aef208c1239af23d"} Jan 20 17:28:08 crc kubenswrapper[4558]: I0120 17:28:08.014035 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-84969c976-hhcrx" event={"ID":"184a9f2b-eab7-4221-a900-746e063662a8","Type":"ContainerDied","Data":"4f2d4d994ed47f3490c772e919b7eb78c4f3d55e887b908c8fad5de2672ad9f2"} Jan 20 17:28:08 crc kubenswrapper[4558]: I0120 17:28:08.014227 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-84969c976-hhcrx" Jan 20 17:28:08 crc kubenswrapper[4558]: I0120 17:28:08.022577 4558 generic.go:334] "Generic (PLEG): container finished" podID="125597fc-d3b4-44c1-a0a6-7dccbaebe7dd" containerID="8299fb12d183c4920c21b1f9def27ecef5343744aedc73fdcd93885a02ce9354" exitCode=0 Jan 20 17:28:08 crc kubenswrapper[4558]: I0120 17:28:08.022631 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-keystone-listener-86d4665d84-cndx4" event={"ID":"125597fc-d3b4-44c1-a0a6-7dccbaebe7dd","Type":"ContainerDied","Data":"8299fb12d183c4920c21b1f9def27ecef5343744aedc73fdcd93885a02ce9354"} Jan 20 17:28:08 crc kubenswrapper[4558]: I0120 17:28:08.045632 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-galera-0" event={"ID":"6bc8ca35-f460-4c4d-9dbe-0012c552371a","Type":"ContainerDied","Data":"1d20563782842dbbaa3d775668dc3bb5fbdd776a62e69568b949ae9296a620d3"} Jan 20 17:28:08 crc kubenswrapper[4558]: I0120 17:28:08.045953 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:28:08 crc kubenswrapper[4558]: I0120 17:28:08.050857 4558 scope.go:117] "RemoveContainer" containerID="d2afe7d8e958d845dee5152deac97265bbf55f309d653220c753d06c2403fd6e" Jan 20 17:28:08 crc kubenswrapper[4558]: E0120 17:28:08.057786 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Jan 20 17:28:08 crc kubenswrapper[4558]: E0120 17:28:08.057851 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/48cfc6e5-774d-4e7d-8103-f6a3260ea14c-config-data podName:48cfc6e5-774d-4e7d-8103-f6a3260ea14c nodeName:}" failed. No retries permitted until 2026-01-20 17:28:16.057837174 +0000 UTC m=+2789.818175141 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/48cfc6e5-774d-4e7d-8103-f6a3260ea14c-config-data") pod "rabbitmq-cell1-server-0" (UID: "48cfc6e5-774d-4e7d-8103-f6a3260ea14c") : configmap "rabbitmq-cell1-config-data" not found Jan 20 17:28:08 crc kubenswrapper[4558]: I0120 17:28:08.060277 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:28:08 crc kubenswrapper[4558]: I0120 17:28:08.064147 4558 generic.go:334] "Generic (PLEG): container finished" podID="3b9858cf-2020-458e-bcf6-407c6853a962" containerID="0bee60e6a9ed04943634730130e635bd656aae9f44611d34a0aa33192c022189" exitCode=0 Jan 20 17:28:08 crc kubenswrapper[4558]: I0120 17:28:08.064208 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-worker-5cbf796c47-9gdgl" event={"ID":"3b9858cf-2020-458e-bcf6-407c6853a962","Type":"ContainerDied","Data":"0bee60e6a9ed04943634730130e635bd656aae9f44611d34a0aa33192c022189"} Jan 20 17:28:08 crc kubenswrapper[4558]: I0120 17:28:08.066982 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-0" event={"ID":"88b73d7f-ee50-4fdf-9fd7-37296c855d69","Type":"ContainerDied","Data":"9565d88e0b7a5af370711a97c3d99e03d66f44b360b4c988733b9bcc472c4eef"} Jan 20 17:28:08 crc kubenswrapper[4558]: I0120 17:28:08.067011 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:28:08 crc kubenswrapper[4558]: I0120 17:28:08.069152 4558 generic.go:334] "Generic (PLEG): container finished" podID="7d72e21b-f0dd-48d4-9594-bb39aae6fa9d" containerID="911340d3455407e7d7596ed292889c631409f00406037f4417caa85ad7dc54a6" exitCode=0 Jan 20 17:28:08 crc kubenswrapper[4558]: I0120 17:28:08.069284 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-55d4bc664d-k82t9" Jan 20 17:28:08 crc kubenswrapper[4558]: I0120 17:28:08.075360 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-74494d9d6c-qv9st" Jan 20 17:28:08 crc kubenswrapper[4558]: I0120 17:28:08.075723 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-74494d9d6c-qv9st" event={"ID":"7d72e21b-f0dd-48d4-9594-bb39aae6fa9d","Type":"ContainerDied","Data":"911340d3455407e7d7596ed292889c631409f00406037f4417caa85ad7dc54a6"} Jan 20 17:28:08 crc kubenswrapper[4558]: I0120 17:28:08.075754 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-74494d9d6c-qv9st" event={"ID":"7d72e21b-f0dd-48d4-9594-bb39aae6fa9d","Type":"ContainerDied","Data":"1daf69cee188fa87bbd078da1bc9c2cd7dceb6bfa47e1fdeb19d4ec98b39ed5a"} Jan 20 17:28:08 crc kubenswrapper[4558]: I0120 17:28:08.085878 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:28:08 crc kubenswrapper[4558]: I0120 17:28:08.108293 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-api-84969c976-hhcrx"] Jan 20 17:28:08 crc kubenswrapper[4558]: I0120 17:28:08.134278 4558 scope.go:117] "RemoveContainer" containerID="1d50c4159354efc26c8a538204ca688c7a0a5b8e2d0ac9d5aef208c1239af23d" Jan 20 17:28:08 crc kubenswrapper[4558]: I0120 17:28:08.144537 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/barbican-api-84969c976-hhcrx"] Jan 20 17:28:08 crc kubenswrapper[4558]: I0120 17:28:08.152354 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/openstack-galera-0"] Jan 20 17:28:08 crc kubenswrapper[4558]: I0120 17:28:08.158189 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/openstack-galera-0"] Jan 20 17:28:08 crc kubenswrapper[4558]: E0120 17:28:08.158720 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/openstack-scripts: configmap "openstack-scripts" not found Jan 20 17:28:08 crc kubenswrapper[4558]: E0120 17:28:08.159679 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/1b7c6fb3-f30d-4dde-8502-e7d840719520-operator-scripts podName:1b7c6fb3-f30d-4dde-8502-e7d840719520 nodeName:}" failed. No retries permitted until 2026-01-20 17:28:12.159654638 +0000 UTC m=+2785.919992605 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/1b7c6fb3-f30d-4dde-8502-e7d840719520-operator-scripts") pod "glance-db-create-rt2fh" (UID: "1b7c6fb3-f30d-4dde-8502-e7d840719520") : configmap "openstack-scripts" not found Jan 20 17:28:08 crc kubenswrapper[4558]: I0120 17:28:08.209106 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-worker-5cbf796c47-9gdgl" Jan 20 17:28:08 crc kubenswrapper[4558]: I0120 17:28:08.236304 4558 scope.go:117] "RemoveContainer" containerID="0c2544d6d08a38b60dfd9a1ca62a8c9460a6db9ba8a618726a1b8ad313afe1bf" Jan 20 17:28:08 crc kubenswrapper[4558]: I0120 17:28:08.242376 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/placement-55d4bc664d-k82t9"] Jan 20 17:28:08 crc kubenswrapper[4558]: I0120 17:28:08.251396 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/placement-55d4bc664d-k82t9"] Jan 20 17:28:08 crc kubenswrapper[4558]: I0120 17:28:08.258028 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-74494d9d6c-qv9st"] Jan 20 17:28:08 crc kubenswrapper[4558]: I0120 17:28:08.260547 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wbw24\" (UniqueName: \"kubernetes.io/projected/3b9858cf-2020-458e-bcf6-407c6853a962-kube-api-access-wbw24\") pod \"3b9858cf-2020-458e-bcf6-407c6853a962\" (UID: \"3b9858cf-2020-458e-bcf6-407c6853a962\") " Jan 20 17:28:08 crc kubenswrapper[4558]: I0120 17:28:08.260667 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b9858cf-2020-458e-bcf6-407c6853a962-combined-ca-bundle\") pod \"3b9858cf-2020-458e-bcf6-407c6853a962\" (UID: \"3b9858cf-2020-458e-bcf6-407c6853a962\") " Jan 20 17:28:08 crc kubenswrapper[4558]: I0120 17:28:08.260725 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3b9858cf-2020-458e-bcf6-407c6853a962-logs\") pod \"3b9858cf-2020-458e-bcf6-407c6853a962\" (UID: \"3b9858cf-2020-458e-bcf6-407c6853a962\") " Jan 20 17:28:08 crc kubenswrapper[4558]: I0120 17:28:08.260863 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3b9858cf-2020-458e-bcf6-407c6853a962-config-data-custom\") pod \"3b9858cf-2020-458e-bcf6-407c6853a962\" (UID: \"3b9858cf-2020-458e-bcf6-407c6853a962\") " Jan 20 17:28:08 crc kubenswrapper[4558]: I0120 17:28:08.260924 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3b9858cf-2020-458e-bcf6-407c6853a962-config-data\") pod \"3b9858cf-2020-458e-bcf6-407c6853a962\" (UID: \"3b9858cf-2020-458e-bcf6-407c6853a962\") " Jan 20 17:28:08 crc kubenswrapper[4558]: I0120 17:28:08.262963 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3b9858cf-2020-458e-bcf6-407c6853a962-logs" (OuterVolumeSpecName: "logs") pod "3b9858cf-2020-458e-bcf6-407c6853a962" (UID: "3b9858cf-2020-458e-bcf6-407c6853a962"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:28:08 crc kubenswrapper[4558]: I0120 17:28:08.270221 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/keystone-74494d9d6c-qv9st"] Jan 20 17:28:08 crc kubenswrapper[4558]: I0120 17:28:08.272407 4558 scope.go:117] "RemoveContainer" containerID="1d50c4159354efc26c8a538204ca688c7a0a5b8e2d0ac9d5aef208c1239af23d" Jan 20 17:28:08 crc kubenswrapper[4558]: I0120 17:28:08.277092 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-0"] Jan 20 17:28:08 crc kubenswrapper[4558]: E0120 17:28:08.280257 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1d50c4159354efc26c8a538204ca688c7a0a5b8e2d0ac9d5aef208c1239af23d\": container with ID starting with 1d50c4159354efc26c8a538204ca688c7a0a5b8e2d0ac9d5aef208c1239af23d not found: ID does not exist" containerID="1d50c4159354efc26c8a538204ca688c7a0a5b8e2d0ac9d5aef208c1239af23d" Jan 20 17:28:08 crc kubenswrapper[4558]: I0120 17:28:08.280318 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1d50c4159354efc26c8a538204ca688c7a0a5b8e2d0ac9d5aef208c1239af23d"} err="failed to get container status \"1d50c4159354efc26c8a538204ca688c7a0a5b8e2d0ac9d5aef208c1239af23d\": rpc error: code = NotFound desc = could not find container \"1d50c4159354efc26c8a538204ca688c7a0a5b8e2d0ac9d5aef208c1239af23d\": container with ID starting with 1d50c4159354efc26c8a538204ca688c7a0a5b8e2d0ac9d5aef208c1239af23d not found: ID does not exist" Jan 20 17:28:08 crc kubenswrapper[4558]: I0120 17:28:08.280359 4558 scope.go:117] "RemoveContainer" containerID="0c2544d6d08a38b60dfd9a1ca62a8c9460a6db9ba8a618726a1b8ad313afe1bf" Jan 20 17:28:08 crc kubenswrapper[4558]: E0120 17:28:08.281551 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0c2544d6d08a38b60dfd9a1ca62a8c9460a6db9ba8a618726a1b8ad313afe1bf\": container with ID starting with 0c2544d6d08a38b60dfd9a1ca62a8c9460a6db9ba8a618726a1b8ad313afe1bf not found: ID does not exist" containerID="0c2544d6d08a38b60dfd9a1ca62a8c9460a6db9ba8a618726a1b8ad313afe1bf" Jan 20 17:28:08 crc kubenswrapper[4558]: I0120 17:28:08.281593 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0c2544d6d08a38b60dfd9a1ca62a8c9460a6db9ba8a618726a1b8ad313afe1bf"} err="failed to get container status \"0c2544d6d08a38b60dfd9a1ca62a8c9460a6db9ba8a618726a1b8ad313afe1bf\": rpc error: code = NotFound desc = could not find container \"0c2544d6d08a38b60dfd9a1ca62a8c9460a6db9ba8a618726a1b8ad313afe1bf\": container with ID starting with 0c2544d6d08a38b60dfd9a1ca62a8c9460a6db9ba8a618726a1b8ad313afe1bf not found: ID does not exist" Jan 20 17:28:08 crc kubenswrapper[4558]: I0120 17:28:08.281626 4558 scope.go:117] "RemoveContainer" containerID="3d044e6540417b17b7e29b9e6b94aac224f6dc6f6746f710160907736ae8f719" Jan 20 17:28:08 crc kubenswrapper[4558]: I0120 17:28:08.283305 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3b9858cf-2020-458e-bcf6-407c6853a962-kube-api-access-wbw24" (OuterVolumeSpecName: "kube-api-access-wbw24") pod "3b9858cf-2020-458e-bcf6-407c6853a962" (UID: "3b9858cf-2020-458e-bcf6-407c6853a962"). InnerVolumeSpecName "kube-api-access-wbw24". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:28:08 crc kubenswrapper[4558]: I0120 17:28:08.284369 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-0"] Jan 20 17:28:08 crc kubenswrapper[4558]: I0120 17:28:08.286548 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3b9858cf-2020-458e-bcf6-407c6853a962-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "3b9858cf-2020-458e-bcf6-407c6853a962" (UID: "3b9858cf-2020-458e-bcf6-407c6853a962"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:28:08 crc kubenswrapper[4558]: I0120 17:28:08.296402 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3b9858cf-2020-458e-bcf6-407c6853a962-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3b9858cf-2020-458e-bcf6-407c6853a962" (UID: "3b9858cf-2020-458e-bcf6-407c6853a962"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:28:08 crc kubenswrapper[4558]: I0120 17:28:08.325657 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3b9858cf-2020-458e-bcf6-407c6853a962-config-data" (OuterVolumeSpecName: "config-data") pod "3b9858cf-2020-458e-bcf6-407c6853a962" (UID: "3b9858cf-2020-458e-bcf6-407c6853a962"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:28:08 crc kubenswrapper[4558]: I0120 17:28:08.346179 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-keystone-listener-86d4665d84-cndx4" Jan 20 17:28:08 crc kubenswrapper[4558]: I0120 17:28:08.366428 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/125597fc-d3b4-44c1-a0a6-7dccbaebe7dd-config-data\") pod \"125597fc-d3b4-44c1-a0a6-7dccbaebe7dd\" (UID: \"125597fc-d3b4-44c1-a0a6-7dccbaebe7dd\") " Jan 20 17:28:08 crc kubenswrapper[4558]: I0120 17:28:08.366565 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h58lj\" (UniqueName: \"kubernetes.io/projected/125597fc-d3b4-44c1-a0a6-7dccbaebe7dd-kube-api-access-h58lj\") pod \"125597fc-d3b4-44c1-a0a6-7dccbaebe7dd\" (UID: \"125597fc-d3b4-44c1-a0a6-7dccbaebe7dd\") " Jan 20 17:28:08 crc kubenswrapper[4558]: I0120 17:28:08.366652 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/125597fc-d3b4-44c1-a0a6-7dccbaebe7dd-combined-ca-bundle\") pod \"125597fc-d3b4-44c1-a0a6-7dccbaebe7dd\" (UID: \"125597fc-d3b4-44c1-a0a6-7dccbaebe7dd\") " Jan 20 17:28:08 crc kubenswrapper[4558]: I0120 17:28:08.366707 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/125597fc-d3b4-44c1-a0a6-7dccbaebe7dd-config-data-custom\") pod \"125597fc-d3b4-44c1-a0a6-7dccbaebe7dd\" (UID: \"125597fc-d3b4-44c1-a0a6-7dccbaebe7dd\") " Jan 20 17:28:08 crc kubenswrapper[4558]: I0120 17:28:08.366826 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/125597fc-d3b4-44c1-a0a6-7dccbaebe7dd-logs\") pod \"125597fc-d3b4-44c1-a0a6-7dccbaebe7dd\" (UID: \"125597fc-d3b4-44c1-a0a6-7dccbaebe7dd\") " Jan 20 17:28:08 crc kubenswrapper[4558]: I0120 17:28:08.367282 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3b9858cf-2020-458e-bcf6-407c6853a962-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:08 crc kubenswrapper[4558]: I0120 17:28:08.367301 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3b9858cf-2020-458e-bcf6-407c6853a962-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:08 crc kubenswrapper[4558]: I0120 17:28:08.367335 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3b9858cf-2020-458e-bcf6-407c6853a962-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:08 crc kubenswrapper[4558]: I0120 17:28:08.367345 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3b9858cf-2020-458e-bcf6-407c6853a962-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:08 crc kubenswrapper[4558]: I0120 17:28:08.367354 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wbw24\" (UniqueName: \"kubernetes.io/projected/3b9858cf-2020-458e-bcf6-407c6853a962-kube-api-access-wbw24\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:08 crc kubenswrapper[4558]: I0120 17:28:08.367716 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/125597fc-d3b4-44c1-a0a6-7dccbaebe7dd-logs" (OuterVolumeSpecName: "logs") pod "125597fc-d3b4-44c1-a0a6-7dccbaebe7dd" (UID: "125597fc-d3b4-44c1-a0a6-7dccbaebe7dd"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:28:08 crc kubenswrapper[4558]: I0120 17:28:08.369843 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/125597fc-d3b4-44c1-a0a6-7dccbaebe7dd-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "125597fc-d3b4-44c1-a0a6-7dccbaebe7dd" (UID: "125597fc-d3b4-44c1-a0a6-7dccbaebe7dd"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:28:08 crc kubenswrapper[4558]: I0120 17:28:08.373913 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/125597fc-d3b4-44c1-a0a6-7dccbaebe7dd-kube-api-access-h58lj" (OuterVolumeSpecName: "kube-api-access-h58lj") pod "125597fc-d3b4-44c1-a0a6-7dccbaebe7dd" (UID: "125597fc-d3b4-44c1-a0a6-7dccbaebe7dd"). InnerVolumeSpecName "kube-api-access-h58lj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:28:08 crc kubenswrapper[4558]: I0120 17:28:08.376708 4558 scope.go:117] "RemoveContainer" containerID="fc043775a1381650eef8618d77c2954b353dcd53aa5ea369049e6ea12a421442" Jan 20 17:28:08 crc kubenswrapper[4558]: I0120 17:28:08.394793 4558 scope.go:117] "RemoveContainer" containerID="a17a87967f20a77dfc9f8a5084d14c3b4b2b006c99d8e894a2ad7bcef9243b71" Jan 20 17:28:08 crc kubenswrapper[4558]: I0120 17:28:08.396579 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/125597fc-d3b4-44c1-a0a6-7dccbaebe7dd-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "125597fc-d3b4-44c1-a0a6-7dccbaebe7dd" (UID: "125597fc-d3b4-44c1-a0a6-7dccbaebe7dd"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:28:08 crc kubenswrapper[4558]: I0120 17:28:08.404845 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/125597fc-d3b4-44c1-a0a6-7dccbaebe7dd-config-data" (OuterVolumeSpecName: "config-data") pod "125597fc-d3b4-44c1-a0a6-7dccbaebe7dd" (UID: "125597fc-d3b4-44c1-a0a6-7dccbaebe7dd"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:28:08 crc kubenswrapper[4558]: I0120 17:28:08.425041 4558 scope.go:117] "RemoveContainer" containerID="911340d3455407e7d7596ed292889c631409f00406037f4417caa85ad7dc54a6" Jan 20 17:28:08 crc kubenswrapper[4558]: I0120 17:28:08.444782 4558 scope.go:117] "RemoveContainer" containerID="911340d3455407e7d7596ed292889c631409f00406037f4417caa85ad7dc54a6" Jan 20 17:28:08 crc kubenswrapper[4558]: E0120 17:28:08.445616 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"911340d3455407e7d7596ed292889c631409f00406037f4417caa85ad7dc54a6\": container with ID starting with 911340d3455407e7d7596ed292889c631409f00406037f4417caa85ad7dc54a6 not found: ID does not exist" containerID="911340d3455407e7d7596ed292889c631409f00406037f4417caa85ad7dc54a6" Jan 20 17:28:08 crc kubenswrapper[4558]: I0120 17:28:08.445652 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"911340d3455407e7d7596ed292889c631409f00406037f4417caa85ad7dc54a6"} err="failed to get container status \"911340d3455407e7d7596ed292889c631409f00406037f4417caa85ad7dc54a6\": rpc error: code = NotFound desc = could not find container \"911340d3455407e7d7596ed292889c631409f00406037f4417caa85ad7dc54a6\": container with ID starting with 911340d3455407e7d7596ed292889c631409f00406037f4417caa85ad7dc54a6 not found: ID does not exist" Jan 20 17:28:08 crc kubenswrapper[4558]: I0120 17:28:08.468321 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/125597fc-d3b4-44c1-a0a6-7dccbaebe7dd-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:08 crc kubenswrapper[4558]: I0120 17:28:08.468352 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h58lj\" (UniqueName: \"kubernetes.io/projected/125597fc-d3b4-44c1-a0a6-7dccbaebe7dd-kube-api-access-h58lj\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:08 crc kubenswrapper[4558]: I0120 17:28:08.468365 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/125597fc-d3b4-44c1-a0a6-7dccbaebe7dd-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:08 crc kubenswrapper[4558]: I0120 17:28:08.468378 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/125597fc-d3b4-44c1-a0a6-7dccbaebe7dd-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:08 crc kubenswrapper[4558]: I0120 17:28:08.468389 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/125597fc-d3b4-44c1-a0a6-7dccbaebe7dd-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:08 crc kubenswrapper[4558]: I0120 17:28:08.576475 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="012246ba-362c-48f3-bbb1-913d57f1f9ed" path="/var/lib/kubelet/pods/012246ba-362c-48f3-bbb1-913d57f1f9ed/volumes" Jan 20 17:28:08 crc kubenswrapper[4558]: I0120 17:28:08.576893 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="184a9f2b-eab7-4221-a900-746e063662a8" path="/var/lib/kubelet/pods/184a9f2b-eab7-4221-a900-746e063662a8/volumes" Jan 20 17:28:08 crc kubenswrapper[4558]: I0120 17:28:08.577491 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bcd505c-4754-4f41-b91d-6e488a669c93" path="/var/lib/kubelet/pods/1bcd505c-4754-4f41-b91d-6e488a669c93/volumes" Jan 20 17:28:08 crc kubenswrapper[4558]: I0120 17:28:08.577905 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2bee9ff9-52bb-4dae-971c-8c6236a4e563" path="/var/lib/kubelet/pods/2bee9ff9-52bb-4dae-971c-8c6236a4e563/volumes" Jan 20 17:28:08 crc kubenswrapper[4558]: I0120 17:28:08.579057 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3460a75e-3553-47b9-bfc5-39c2c459826e" path="/var/lib/kubelet/pods/3460a75e-3553-47b9-bfc5-39c2c459826e/volumes" Jan 20 17:28:08 crc kubenswrapper[4558]: I0120 17:28:08.579564 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="443f910c-dd2d-4c72-b861-f15de67ac6bb" path="/var/lib/kubelet/pods/443f910c-dd2d-4c72-b861-f15de67ac6bb/volumes" Jan 20 17:28:08 crc kubenswrapper[4558]: I0120 17:28:08.580282 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5" path="/var/lib/kubelet/pods/4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5/volumes" Jan 20 17:28:08 crc kubenswrapper[4558]: I0120 17:28:08.581389 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="54457478-e4f4-4088-bd18-e427314e1fb2" path="/var/lib/kubelet/pods/54457478-e4f4-4088-bd18-e427314e1fb2/volumes" Jan 20 17:28:08 crc kubenswrapper[4558]: I0120 17:28:08.581837 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="592927c6-9b42-411a-8aec-40cf6183e32a" path="/var/lib/kubelet/pods/592927c6-9b42-411a-8aec-40cf6183e32a/volumes" Jan 20 17:28:08 crc kubenswrapper[4558]: I0120 17:28:08.582540 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5f7a0ad9-436f-433c-9a91-cec4ffd3beeb" path="/var/lib/kubelet/pods/5f7a0ad9-436f-433c-9a91-cec4ffd3beeb/volumes" Jan 20 17:28:08 crc kubenswrapper[4558]: I0120 17:28:08.582958 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="66e24f38-a98c-4444-8ee4-352266267985" path="/var/lib/kubelet/pods/66e24f38-a98c-4444-8ee4-352266267985/volumes" Jan 20 17:28:08 crc kubenswrapper[4558]: I0120 17:28:08.584239 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6bc8ca35-f460-4c4d-9dbe-0012c552371a" path="/var/lib/kubelet/pods/6bc8ca35-f460-4c4d-9dbe-0012c552371a/volumes" Jan 20 17:28:08 crc kubenswrapper[4558]: I0120 17:28:08.584899 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7d72e21b-f0dd-48d4-9594-bb39aae6fa9d" path="/var/lib/kubelet/pods/7d72e21b-f0dd-48d4-9594-bb39aae6fa9d/volumes" Jan 20 17:28:08 crc kubenswrapper[4558]: I0120 17:28:08.585758 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="88b73d7f-ee50-4fdf-9fd7-37296c855d69" path="/var/lib/kubelet/pods/88b73d7f-ee50-4fdf-9fd7-37296c855d69/volumes" Jan 20 17:28:08 crc kubenswrapper[4558]: I0120 17:28:08.586231 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b201974e-8bb6-412a-95d5-cce7a95e4528" path="/var/lib/kubelet/pods/b201974e-8bb6-412a-95d5-cce7a95e4528/volumes" Jan 20 17:28:08 crc kubenswrapper[4558]: I0120 17:28:08.586564 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bfb2b456-ae0e-42ca-a227-428a626f1e3e" path="/var/lib/kubelet/pods/bfb2b456-ae0e-42ca-a227-428a626f1e3e/volumes" Jan 20 17:28:08 crc kubenswrapper[4558]: I0120 17:28:08.586999 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c2bfe108-b710-400b-baac-55815b192ee3" path="/var/lib/kubelet/pods/c2bfe108-b710-400b-baac-55815b192ee3/volumes" Jan 20 17:28:08 crc kubenswrapper[4558]: I0120 17:28:08.587807 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c67fda0a-394c-424a-ad72-f0e3ebd77f1b" path="/var/lib/kubelet/pods/c67fda0a-394c-424a-ad72-f0e3ebd77f1b/volumes" Jan 20 17:28:08 crc kubenswrapper[4558]: I0120 17:28:08.588201 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e81d2508-e298-463c-9031-b9d8e486d566" path="/var/lib/kubelet/pods/e81d2508-e298-463c-9031-b9d8e486d566/volumes" Jan 20 17:28:08 crc kubenswrapper[4558]: I0120 17:28:08.588804 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ed3c56f1-4bbb-4590-8b19-a0de467537ad" path="/var/lib/kubelet/pods/ed3c56f1-4bbb-4590-8b19-a0de467537ad/volumes" Jan 20 17:28:08 crc kubenswrapper[4558]: I0120 17:28:08.589318 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f6a0777b-d04f-451b-9e3d-3f9ab0e19df8" path="/var/lib/kubelet/pods/f6a0777b-d04f-451b-9e3d-3f9ab0e19df8/volumes" Jan 20 17:28:08 crc kubenswrapper[4558]: I0120 17:28:08.590452 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f81a00ad-36ce-4383-9e91-fe60de6939d2" path="/var/lib/kubelet/pods/f81a00ad-36ce-4383-9e91-fe60de6939d2/volumes" Jan 20 17:28:09 crc kubenswrapper[4558]: I0120 17:28:09.081766 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-keystone-listener-86d4665d84-cndx4" event={"ID":"125597fc-d3b4-44c1-a0a6-7dccbaebe7dd","Type":"ContainerDied","Data":"c43232927c02372c79d83e943e2c358fe1af41bc01b132b3223cbee474bfe408"} Jan 20 17:28:09 crc kubenswrapper[4558]: I0120 17:28:09.081832 4558 scope.go:117] "RemoveContainer" containerID="8299fb12d183c4920c21b1f9def27ecef5343744aedc73fdcd93885a02ce9354" Jan 20 17:28:09 crc kubenswrapper[4558]: I0120 17:28:09.081925 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-keystone-listener-86d4665d84-cndx4" Jan 20 17:28:09 crc kubenswrapper[4558]: I0120 17:28:09.087154 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-worker-5cbf796c47-9gdgl" event={"ID":"3b9858cf-2020-458e-bcf6-407c6853a962","Type":"ContainerDied","Data":"e540e10ebfe59a8afa1a8d521b3270fe329363cfb25a828a0e2dfd0f29bd25f0"} Jan 20 17:28:09 crc kubenswrapper[4558]: I0120 17:28:09.087201 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-worker-5cbf796c47-9gdgl" Jan 20 17:28:09 crc kubenswrapper[4558]: I0120 17:28:09.089583 4558 generic.go:334] "Generic (PLEG): container finished" podID="d692722b-f7fd-447c-8b7a-f56cff940d91" containerID="e662a87a8dfea3af31619f4746b123264a47d980ca2258670d31ac3490d07672" exitCode=0 Jan 20 17:28:09 crc kubenswrapper[4558]: I0120 17:28:09.089661 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-server-0" event={"ID":"d692722b-f7fd-447c-8b7a-f56cff940d91","Type":"ContainerDied","Data":"e662a87a8dfea3af31619f4746b123264a47d980ca2258670d31ac3490d07672"} Jan 20 17:28:09 crc kubenswrapper[4558]: I0120 17:28:09.192965 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:28:09 crc kubenswrapper[4558]: I0120 17:28:09.223191 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-keystone-listener-86d4665d84-cndx4"] Jan 20 17:28:09 crc kubenswrapper[4558]: I0120 17:28:09.236114 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/barbican-keystone-listener-86d4665d84-cndx4"] Jan 20 17:28:09 crc kubenswrapper[4558]: I0120 17:28:09.236515 4558 scope.go:117] "RemoveContainer" containerID="2c27863658ecd8a450f932a78dd0d64933a85bf75c1f865bb6dc9d092e3e4a29" Jan 20 17:28:09 crc kubenswrapper[4558]: I0120 17:28:09.246783 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-worker-5cbf796c47-9gdgl"] Jan 20 17:28:09 crc kubenswrapper[4558]: I0120 17:28:09.260458 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/barbican-worker-5cbf796c47-9gdgl"] Jan 20 17:28:09 crc kubenswrapper[4558]: I0120 17:28:09.275211 4558 scope.go:117] "RemoveContainer" containerID="0bee60e6a9ed04943634730130e635bd656aae9f44611d34a0aa33192c022189" Jan 20 17:28:09 crc kubenswrapper[4558]: I0120 17:28:09.288795 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/d692722b-f7fd-447c-8b7a-f56cff940d91-rabbitmq-erlang-cookie\") pod \"d692722b-f7fd-447c-8b7a-f56cff940d91\" (UID: \"d692722b-f7fd-447c-8b7a-f56cff940d91\") " Jan 20 17:28:09 crc kubenswrapper[4558]: I0120 17:28:09.289149 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/d692722b-f7fd-447c-8b7a-f56cff940d91-rabbitmq-plugins\") pod \"d692722b-f7fd-447c-8b7a-f56cff940d91\" (UID: \"d692722b-f7fd-447c-8b7a-f56cff940d91\") " Jan 20 17:28:09 crc kubenswrapper[4558]: I0120 17:28:09.289197 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/d692722b-f7fd-447c-8b7a-f56cff940d91-rabbitmq-tls\") pod \"d692722b-f7fd-447c-8b7a-f56cff940d91\" (UID: \"d692722b-f7fd-447c-8b7a-f56cff940d91\") " Jan 20 17:28:09 crc kubenswrapper[4558]: I0120 17:28:09.289225 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"d692722b-f7fd-447c-8b7a-f56cff940d91\" (UID: \"d692722b-f7fd-447c-8b7a-f56cff940d91\") " Jan 20 17:28:09 crc kubenswrapper[4558]: I0120 17:28:09.289746 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d692722b-f7fd-447c-8b7a-f56cff940d91-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "d692722b-f7fd-447c-8b7a-f56cff940d91" (UID: "d692722b-f7fd-447c-8b7a-f56cff940d91"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:28:09 crc kubenswrapper[4558]: I0120 17:28:09.289808 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d692722b-f7fd-447c-8b7a-f56cff940d91-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "d692722b-f7fd-447c-8b7a-f56cff940d91" (UID: "d692722b-f7fd-447c-8b7a-f56cff940d91"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:28:09 crc kubenswrapper[4558]: I0120 17:28:09.290602 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/d692722b-f7fd-447c-8b7a-f56cff940d91-pod-info\") pod \"d692722b-f7fd-447c-8b7a-f56cff940d91\" (UID: \"d692722b-f7fd-447c-8b7a-f56cff940d91\") " Jan 20 17:28:09 crc kubenswrapper[4558]: I0120 17:28:09.290678 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t77q5\" (UniqueName: \"kubernetes.io/projected/d692722b-f7fd-447c-8b7a-f56cff940d91-kube-api-access-t77q5\") pod \"d692722b-f7fd-447c-8b7a-f56cff940d91\" (UID: \"d692722b-f7fd-447c-8b7a-f56cff940d91\") " Jan 20 17:28:09 crc kubenswrapper[4558]: I0120 17:28:09.290747 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d692722b-f7fd-447c-8b7a-f56cff940d91-config-data\") pod \"d692722b-f7fd-447c-8b7a-f56cff940d91\" (UID: \"d692722b-f7fd-447c-8b7a-f56cff940d91\") " Jan 20 17:28:09 crc kubenswrapper[4558]: I0120 17:28:09.290784 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/d692722b-f7fd-447c-8b7a-f56cff940d91-plugins-conf\") pod \"d692722b-f7fd-447c-8b7a-f56cff940d91\" (UID: \"d692722b-f7fd-447c-8b7a-f56cff940d91\") " Jan 20 17:28:09 crc kubenswrapper[4558]: I0120 17:28:09.290811 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/d692722b-f7fd-447c-8b7a-f56cff940d91-server-conf\") pod \"d692722b-f7fd-447c-8b7a-f56cff940d91\" (UID: \"d692722b-f7fd-447c-8b7a-f56cff940d91\") " Jan 20 17:28:09 crc kubenswrapper[4558]: I0120 17:28:09.290832 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/d692722b-f7fd-447c-8b7a-f56cff940d91-erlang-cookie-secret\") pod \"d692722b-f7fd-447c-8b7a-f56cff940d91\" (UID: \"d692722b-f7fd-447c-8b7a-f56cff940d91\") " Jan 20 17:28:09 crc kubenswrapper[4558]: I0120 17:28:09.290860 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/d692722b-f7fd-447c-8b7a-f56cff940d91-rabbitmq-confd\") pod \"d692722b-f7fd-447c-8b7a-f56cff940d91\" (UID: \"d692722b-f7fd-447c-8b7a-f56cff940d91\") " Jan 20 17:28:09 crc kubenswrapper[4558]: I0120 17:28:09.291440 4558 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/d692722b-f7fd-447c-8b7a-f56cff940d91-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:09 crc kubenswrapper[4558]: I0120 17:28:09.291458 4558 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/d692722b-f7fd-447c-8b7a-f56cff940d91-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:09 crc kubenswrapper[4558]: I0120 17:28:09.292055 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d692722b-f7fd-447c-8b7a-f56cff940d91-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "d692722b-f7fd-447c-8b7a-f56cff940d91" (UID: "d692722b-f7fd-447c-8b7a-f56cff940d91"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:28:09 crc kubenswrapper[4558]: I0120 17:28:09.300454 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d692722b-f7fd-447c-8b7a-f56cff940d91-kube-api-access-t77q5" (OuterVolumeSpecName: "kube-api-access-t77q5") pod "d692722b-f7fd-447c-8b7a-f56cff940d91" (UID: "d692722b-f7fd-447c-8b7a-f56cff940d91"). InnerVolumeSpecName "kube-api-access-t77q5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:28:09 crc kubenswrapper[4558]: I0120 17:28:09.300465 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/d692722b-f7fd-447c-8b7a-f56cff940d91-pod-info" (OuterVolumeSpecName: "pod-info") pod "d692722b-f7fd-447c-8b7a-f56cff940d91" (UID: "d692722b-f7fd-447c-8b7a-f56cff940d91"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Jan 20 17:28:09 crc kubenswrapper[4558]: I0120 17:28:09.300883 4558 scope.go:117] "RemoveContainer" containerID="b4c792c97b8f3884770e6abef851a252b3788722ba9e9f419f4552e68ff9bd85" Jan 20 17:28:09 crc kubenswrapper[4558]: I0120 17:28:09.313352 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d692722b-f7fd-447c-8b7a-f56cff940d91-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "d692722b-f7fd-447c-8b7a-f56cff940d91" (UID: "d692722b-f7fd-447c-8b7a-f56cff940d91"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:28:09 crc kubenswrapper[4558]: I0120 17:28:09.313357 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d692722b-f7fd-447c-8b7a-f56cff940d91-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "d692722b-f7fd-447c-8b7a-f56cff940d91" (UID: "d692722b-f7fd-447c-8b7a-f56cff940d91"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:28:09 crc kubenswrapper[4558]: I0120 17:28:09.313610 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage07-crc" (OuterVolumeSpecName: "persistence") pod "d692722b-f7fd-447c-8b7a-f56cff940d91" (UID: "d692722b-f7fd-447c-8b7a-f56cff940d91"). InnerVolumeSpecName "local-storage07-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:28:09 crc kubenswrapper[4558]: I0120 17:28:09.316847 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d692722b-f7fd-447c-8b7a-f56cff940d91-config-data" (OuterVolumeSpecName: "config-data") pod "d692722b-f7fd-447c-8b7a-f56cff940d91" (UID: "d692722b-f7fd-447c-8b7a-f56cff940d91"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:28:09 crc kubenswrapper[4558]: I0120 17:28:09.347661 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d692722b-f7fd-447c-8b7a-f56cff940d91-server-conf" (OuterVolumeSpecName: "server-conf") pod "d692722b-f7fd-447c-8b7a-f56cff940d91" (UID: "d692722b-f7fd-447c-8b7a-f56cff940d91"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:28:09 crc kubenswrapper[4558]: I0120 17:28:09.380229 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d692722b-f7fd-447c-8b7a-f56cff940d91-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "d692722b-f7fd-447c-8b7a-f56cff940d91" (UID: "d692722b-f7fd-447c-8b7a-f56cff940d91"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:28:09 crc kubenswrapper[4558]: E0120 17:28:09.396209 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cinder-config-data: secret "cinder-config-data" not found Jan 20 17:28:09 crc kubenswrapper[4558]: E0120 17:28:09.396272 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c1afb831-f3e4-4356-ab86-713eb0beca39-config-data podName:c1afb831-f3e4-4356-ab86-713eb0beca39 nodeName:}" failed. No retries permitted until 2026-01-20 17:28:17.396255102 +0000 UTC m=+2791.156593069 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/secret/c1afb831-f3e4-4356-ab86-713eb0beca39-config-data") pod "cinder-scheduler-0" (UID: "c1afb831-f3e4-4356-ab86-713eb0beca39") : secret "cinder-config-data" not found Jan 20 17:28:09 crc kubenswrapper[4558]: I0120 17:28:09.396345 4558 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/d692722b-f7fd-447c-8b7a-f56cff940d91-pod-info\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:09 crc kubenswrapper[4558]: E0120 17:28:09.396367 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cinder-scripts: secret "cinder-scripts" not found Jan 20 17:28:09 crc kubenswrapper[4558]: E0120 17:28:09.396395 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cinder-scheduler-config-data: secret "cinder-scheduler-config-data" not found Jan 20 17:28:09 crc kubenswrapper[4558]: E0120 17:28:09.396425 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c1afb831-f3e4-4356-ab86-713eb0beca39-scripts podName:c1afb831-f3e4-4356-ab86-713eb0beca39 nodeName:}" failed. No retries permitted until 2026-01-20 17:28:17.396408761 +0000 UTC m=+2791.156746728 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "scripts" (UniqueName: "kubernetes.io/secret/c1afb831-f3e4-4356-ab86-713eb0beca39-scripts") pod "cinder-scheduler-0" (UID: "c1afb831-f3e4-4356-ab86-713eb0beca39") : secret "cinder-scripts" not found Jan 20 17:28:09 crc kubenswrapper[4558]: I0120 17:28:09.396371 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t77q5\" (UniqueName: \"kubernetes.io/projected/d692722b-f7fd-447c-8b7a-f56cff940d91-kube-api-access-t77q5\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:09 crc kubenswrapper[4558]: I0120 17:28:09.396461 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d692722b-f7fd-447c-8b7a-f56cff940d91-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:09 crc kubenswrapper[4558]: I0120 17:28:09.396473 4558 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/d692722b-f7fd-447c-8b7a-f56cff940d91-plugins-conf\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:09 crc kubenswrapper[4558]: E0120 17:28:09.396473 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/combined-ca-bundle: secret "combined-ca-bundle" not found Jan 20 17:28:09 crc kubenswrapper[4558]: I0120 17:28:09.396483 4558 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/d692722b-f7fd-447c-8b7a-f56cff940d91-server-conf\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:09 crc kubenswrapper[4558]: I0120 17:28:09.396495 4558 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/d692722b-f7fd-447c-8b7a-f56cff940d91-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:09 crc kubenswrapper[4558]: I0120 17:28:09.396505 4558 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/d692722b-f7fd-447c-8b7a-f56cff940d91-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:09 crc kubenswrapper[4558]: I0120 17:28:09.396515 4558 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/d692722b-f7fd-447c-8b7a-f56cff940d91-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:09 crc kubenswrapper[4558]: E0120 17:28:09.396526 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c1afb831-f3e4-4356-ab86-713eb0beca39-combined-ca-bundle podName:c1afb831-f3e4-4356-ab86-713eb0beca39 nodeName:}" failed. No retries permitted until 2026-01-20 17:28:17.396511414 +0000 UTC m=+2791.156849381 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/c1afb831-f3e4-4356-ab86-713eb0beca39-combined-ca-bundle") pod "cinder-scheduler-0" (UID: "c1afb831-f3e4-4356-ab86-713eb0beca39") : secret "combined-ca-bundle" not found Jan 20 17:28:09 crc kubenswrapper[4558]: E0120 17:28:09.396567 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c1afb831-f3e4-4356-ab86-713eb0beca39-config-data-custom podName:c1afb831-f3e4-4356-ab86-713eb0beca39 nodeName:}" failed. No retries permitted until 2026-01-20 17:28:17.396542773 +0000 UTC m=+2791.156880741 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "config-data-custom" (UniqueName: "kubernetes.io/secret/c1afb831-f3e4-4356-ab86-713eb0beca39-config-data-custom") pod "cinder-scheduler-0" (UID: "c1afb831-f3e4-4356-ab86-713eb0beca39") : secret "cinder-scheduler-config-data" not found Jan 20 17:28:09 crc kubenswrapper[4558]: I0120 17:28:09.396602 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" " Jan 20 17:28:09 crc kubenswrapper[4558]: I0120 17:28:09.408927 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage07-crc" (UniqueName: "kubernetes.io/local-volume/local-storage07-crc") on node "crc" Jan 20 17:28:09 crc kubenswrapper[4558]: I0120 17:28:09.452710 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:28:09 crc kubenswrapper[4558]: I0120 17:28:09.506839 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-788s8\" (UniqueName: \"kubernetes.io/projected/48cfc6e5-774d-4e7d-8103-f6a3260ea14c-kube-api-access-788s8\") pod \"48cfc6e5-774d-4e7d-8103-f6a3260ea14c\" (UID: \"48cfc6e5-774d-4e7d-8103-f6a3260ea14c\") " Jan 20 17:28:09 crc kubenswrapper[4558]: I0120 17:28:09.506897 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/48cfc6e5-774d-4e7d-8103-f6a3260ea14c-plugins-conf\") pod \"48cfc6e5-774d-4e7d-8103-f6a3260ea14c\" (UID: \"48cfc6e5-774d-4e7d-8103-f6a3260ea14c\") " Jan 20 17:28:09 crc kubenswrapper[4558]: I0120 17:28:09.506992 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/48cfc6e5-774d-4e7d-8103-f6a3260ea14c-server-conf\") pod \"48cfc6e5-774d-4e7d-8103-f6a3260ea14c\" (UID: \"48cfc6e5-774d-4e7d-8103-f6a3260ea14c\") " Jan 20 17:28:09 crc kubenswrapper[4558]: I0120 17:28:09.507036 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/48cfc6e5-774d-4e7d-8103-f6a3260ea14c-config-data\") pod \"48cfc6e5-774d-4e7d-8103-f6a3260ea14c\" (UID: \"48cfc6e5-774d-4e7d-8103-f6a3260ea14c\") " Jan 20 17:28:09 crc kubenswrapper[4558]: I0120 17:28:09.507069 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/48cfc6e5-774d-4e7d-8103-f6a3260ea14c-rabbitmq-plugins\") pod \"48cfc6e5-774d-4e7d-8103-f6a3260ea14c\" (UID: \"48cfc6e5-774d-4e7d-8103-f6a3260ea14c\") " Jan 20 17:28:09 crc kubenswrapper[4558]: I0120 17:28:09.507126 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/48cfc6e5-774d-4e7d-8103-f6a3260ea14c-rabbitmq-erlang-cookie\") pod \"48cfc6e5-774d-4e7d-8103-f6a3260ea14c\" (UID: \"48cfc6e5-774d-4e7d-8103-f6a3260ea14c\") " Jan 20 17:28:09 crc kubenswrapper[4558]: I0120 17:28:09.507188 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"48cfc6e5-774d-4e7d-8103-f6a3260ea14c\" (UID: \"48cfc6e5-774d-4e7d-8103-f6a3260ea14c\") " Jan 20 17:28:09 crc kubenswrapper[4558]: I0120 17:28:09.507228 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/48cfc6e5-774d-4e7d-8103-f6a3260ea14c-pod-info\") pod \"48cfc6e5-774d-4e7d-8103-f6a3260ea14c\" (UID: \"48cfc6e5-774d-4e7d-8103-f6a3260ea14c\") " Jan 20 17:28:09 crc kubenswrapper[4558]: I0120 17:28:09.507276 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/48cfc6e5-774d-4e7d-8103-f6a3260ea14c-erlang-cookie-secret\") pod \"48cfc6e5-774d-4e7d-8103-f6a3260ea14c\" (UID: \"48cfc6e5-774d-4e7d-8103-f6a3260ea14c\") " Jan 20 17:28:09 crc kubenswrapper[4558]: I0120 17:28:09.507309 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/48cfc6e5-774d-4e7d-8103-f6a3260ea14c-rabbitmq-tls\") pod \"48cfc6e5-774d-4e7d-8103-f6a3260ea14c\" (UID: \"48cfc6e5-774d-4e7d-8103-f6a3260ea14c\") " Jan 20 17:28:09 crc kubenswrapper[4558]: I0120 17:28:09.507352 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/48cfc6e5-774d-4e7d-8103-f6a3260ea14c-rabbitmq-confd\") pod \"48cfc6e5-774d-4e7d-8103-f6a3260ea14c\" (UID: \"48cfc6e5-774d-4e7d-8103-f6a3260ea14c\") " Jan 20 17:28:09 crc kubenswrapper[4558]: I0120 17:28:09.507901 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:09 crc kubenswrapper[4558]: I0120 17:28:09.508640 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/48cfc6e5-774d-4e7d-8103-f6a3260ea14c-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "48cfc6e5-774d-4e7d-8103-f6a3260ea14c" (UID: "48cfc6e5-774d-4e7d-8103-f6a3260ea14c"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:28:09 crc kubenswrapper[4558]: I0120 17:28:09.508721 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/48cfc6e5-774d-4e7d-8103-f6a3260ea14c-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "48cfc6e5-774d-4e7d-8103-f6a3260ea14c" (UID: "48cfc6e5-774d-4e7d-8103-f6a3260ea14c"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:28:09 crc kubenswrapper[4558]: I0120 17:28:09.508693 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/48cfc6e5-774d-4e7d-8103-f6a3260ea14c-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "48cfc6e5-774d-4e7d-8103-f6a3260ea14c" (UID: "48cfc6e5-774d-4e7d-8103-f6a3260ea14c"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:28:09 crc kubenswrapper[4558]: I0120 17:28:09.512254 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/48cfc6e5-774d-4e7d-8103-f6a3260ea14c-kube-api-access-788s8" (OuterVolumeSpecName: "kube-api-access-788s8") pod "48cfc6e5-774d-4e7d-8103-f6a3260ea14c" (UID: "48cfc6e5-774d-4e7d-8103-f6a3260ea14c"). InnerVolumeSpecName "kube-api-access-788s8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:28:09 crc kubenswrapper[4558]: I0120 17:28:09.512881 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/48cfc6e5-774d-4e7d-8103-f6a3260ea14c-pod-info" (OuterVolumeSpecName: "pod-info") pod "48cfc6e5-774d-4e7d-8103-f6a3260ea14c" (UID: "48cfc6e5-774d-4e7d-8103-f6a3260ea14c"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Jan 20 17:28:09 crc kubenswrapper[4558]: I0120 17:28:09.512891 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage04-crc" (OuterVolumeSpecName: "persistence") pod "48cfc6e5-774d-4e7d-8103-f6a3260ea14c" (UID: "48cfc6e5-774d-4e7d-8103-f6a3260ea14c"). InnerVolumeSpecName "local-storage04-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:28:09 crc kubenswrapper[4558]: I0120 17:28:09.515452 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/48cfc6e5-774d-4e7d-8103-f6a3260ea14c-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "48cfc6e5-774d-4e7d-8103-f6a3260ea14c" (UID: "48cfc6e5-774d-4e7d-8103-f6a3260ea14c"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:28:09 crc kubenswrapper[4558]: I0120 17:28:09.515663 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/48cfc6e5-774d-4e7d-8103-f6a3260ea14c-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "48cfc6e5-774d-4e7d-8103-f6a3260ea14c" (UID: "48cfc6e5-774d-4e7d-8103-f6a3260ea14c"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:28:09 crc kubenswrapper[4558]: I0120 17:28:09.528454 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/48cfc6e5-774d-4e7d-8103-f6a3260ea14c-config-data" (OuterVolumeSpecName: "config-data") pod "48cfc6e5-774d-4e7d-8103-f6a3260ea14c" (UID: "48cfc6e5-774d-4e7d-8103-f6a3260ea14c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:28:09 crc kubenswrapper[4558]: I0120 17:28:09.543188 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/48cfc6e5-774d-4e7d-8103-f6a3260ea14c-server-conf" (OuterVolumeSpecName: "server-conf") pod "48cfc6e5-774d-4e7d-8103-f6a3260ea14c" (UID: "48cfc6e5-774d-4e7d-8103-f6a3260ea14c"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:28:09 crc kubenswrapper[4558]: I0120 17:28:09.568642 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/48cfc6e5-774d-4e7d-8103-f6a3260ea14c-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "48cfc6e5-774d-4e7d-8103-f6a3260ea14c" (UID: "48cfc6e5-774d-4e7d-8103-f6a3260ea14c"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:28:09 crc kubenswrapper[4558]: I0120 17:28:09.610328 4558 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/48cfc6e5-774d-4e7d-8103-f6a3260ea14c-server-conf\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:09 crc kubenswrapper[4558]: I0120 17:28:09.611291 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/48cfc6e5-774d-4e7d-8103-f6a3260ea14c-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:09 crc kubenswrapper[4558]: I0120 17:28:09.611376 4558 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/48cfc6e5-774d-4e7d-8103-f6a3260ea14c-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:09 crc kubenswrapper[4558]: I0120 17:28:09.611464 4558 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/48cfc6e5-774d-4e7d-8103-f6a3260ea14c-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:09 crc kubenswrapper[4558]: I0120 17:28:09.611534 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" " Jan 20 17:28:09 crc kubenswrapper[4558]: I0120 17:28:09.611606 4558 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/48cfc6e5-774d-4e7d-8103-f6a3260ea14c-pod-info\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:09 crc kubenswrapper[4558]: I0120 17:28:09.611668 4558 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/48cfc6e5-774d-4e7d-8103-f6a3260ea14c-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:09 crc kubenswrapper[4558]: I0120 17:28:09.611718 4558 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/48cfc6e5-774d-4e7d-8103-f6a3260ea14c-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:09 crc kubenswrapper[4558]: I0120 17:28:09.611764 4558 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/48cfc6e5-774d-4e7d-8103-f6a3260ea14c-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:09 crc kubenswrapper[4558]: I0120 17:28:09.611830 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-788s8\" (UniqueName: \"kubernetes.io/projected/48cfc6e5-774d-4e7d-8103-f6a3260ea14c-kube-api-access-788s8\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:09 crc kubenswrapper[4558]: I0120 17:28:09.611908 4558 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/48cfc6e5-774d-4e7d-8103-f6a3260ea14c-plugins-conf\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:09 crc kubenswrapper[4558]: I0120 17:28:09.631445 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage04-crc" (UniqueName: "kubernetes.io/local-volume/local-storage04-crc") on node "crc" Jan 20 17:28:09 crc kubenswrapper[4558]: I0120 17:28:09.713268 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:10 crc kubenswrapper[4558]: I0120 17:28:10.118749 4558 generic.go:334] "Generic (PLEG): container finished" podID="48cfc6e5-774d-4e7d-8103-f6a3260ea14c" containerID="94d9e6b06e5975fa2b75519bde1be0a449c0ca63e7bbf3340176be25c0d5874c" exitCode=0 Jan 20 17:28:10 crc kubenswrapper[4558]: I0120 17:28:10.118833 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" event={"ID":"48cfc6e5-774d-4e7d-8103-f6a3260ea14c","Type":"ContainerDied","Data":"94d9e6b06e5975fa2b75519bde1be0a449c0ca63e7bbf3340176be25c0d5874c"} Jan 20 17:28:10 crc kubenswrapper[4558]: I0120 17:28:10.118906 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:28:10 crc kubenswrapper[4558]: I0120 17:28:10.119141 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" event={"ID":"48cfc6e5-774d-4e7d-8103-f6a3260ea14c","Type":"ContainerDied","Data":"57f6281f6524d46ddafe1f044790509fd67d8d7b1d11b6ca7151d5579ff22e21"} Jan 20 17:28:10 crc kubenswrapper[4558]: I0120 17:28:10.119202 4558 scope.go:117] "RemoveContainer" containerID="94d9e6b06e5975fa2b75519bde1be0a449c0ca63e7bbf3340176be25c0d5874c" Jan 20 17:28:10 crc kubenswrapper[4558]: I0120 17:28:10.121822 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-server-0" event={"ID":"d692722b-f7fd-447c-8b7a-f56cff940d91","Type":"ContainerDied","Data":"29eb0229195cd308f45313fbfc05f6b3b4a6af8e6f77a17c66a825db11c76f1c"} Jan 20 17:28:10 crc kubenswrapper[4558]: I0120 17:28:10.121893 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:28:10 crc kubenswrapper[4558]: I0120 17:28:10.137186 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/swift-proxy-7459567f99-gx6dr" podUID="2bee9ff9-52bb-4dae-971c-8c6236a4e563" containerName="proxy-httpd" probeResult="failure" output="Get \"https://10.217.1.23:8080/healthcheck\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Jan 20 17:28:10 crc kubenswrapper[4558]: I0120 17:28:10.137343 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/swift-proxy-7459567f99-gx6dr" podUID="2bee9ff9-52bb-4dae-971c-8c6236a4e563" containerName="proxy-server" probeResult="failure" output="Get \"https://10.217.1.23:8080/healthcheck\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Jan 20 17:28:10 crc kubenswrapper[4558]: I0120 17:28:10.163081 4558 scope.go:117] "RemoveContainer" containerID="253c7b1bb698ada295155d4391adcd6900c7ef92966fcf1c968bb5c7832f4326" Jan 20 17:28:10 crc kubenswrapper[4558]: I0120 17:28:10.171572 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/rabbitmq-server-0"] Jan 20 17:28:10 crc kubenswrapper[4558]: I0120 17:28:10.182873 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/rabbitmq-server-0"] Jan 20 17:28:10 crc kubenswrapper[4558]: I0120 17:28:10.188298 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/rabbitmq-cell1-server-0"] Jan 20 17:28:10 crc kubenswrapper[4558]: I0120 17:28:10.192867 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/rabbitmq-cell1-server-0"] Jan 20 17:28:10 crc kubenswrapper[4558]: I0120 17:28:10.194493 4558 scope.go:117] "RemoveContainer" containerID="94d9e6b06e5975fa2b75519bde1be0a449c0ca63e7bbf3340176be25c0d5874c" Jan 20 17:28:10 crc kubenswrapper[4558]: E0120 17:28:10.195340 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"94d9e6b06e5975fa2b75519bde1be0a449c0ca63e7bbf3340176be25c0d5874c\": container with ID starting with 94d9e6b06e5975fa2b75519bde1be0a449c0ca63e7bbf3340176be25c0d5874c not found: ID does not exist" containerID="94d9e6b06e5975fa2b75519bde1be0a449c0ca63e7bbf3340176be25c0d5874c" Jan 20 17:28:10 crc kubenswrapper[4558]: I0120 17:28:10.195379 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"94d9e6b06e5975fa2b75519bde1be0a449c0ca63e7bbf3340176be25c0d5874c"} err="failed to get container status \"94d9e6b06e5975fa2b75519bde1be0a449c0ca63e7bbf3340176be25c0d5874c\": rpc error: code = NotFound desc = could not find container \"94d9e6b06e5975fa2b75519bde1be0a449c0ca63e7bbf3340176be25c0d5874c\": container with ID starting with 94d9e6b06e5975fa2b75519bde1be0a449c0ca63e7bbf3340176be25c0d5874c not found: ID does not exist" Jan 20 17:28:10 crc kubenswrapper[4558]: I0120 17:28:10.195406 4558 scope.go:117] "RemoveContainer" containerID="253c7b1bb698ada295155d4391adcd6900c7ef92966fcf1c968bb5c7832f4326" Jan 20 17:28:10 crc kubenswrapper[4558]: E0120 17:28:10.195798 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"253c7b1bb698ada295155d4391adcd6900c7ef92966fcf1c968bb5c7832f4326\": container with ID starting with 253c7b1bb698ada295155d4391adcd6900c7ef92966fcf1c968bb5c7832f4326 not found: ID does not exist" containerID="253c7b1bb698ada295155d4391adcd6900c7ef92966fcf1c968bb5c7832f4326" Jan 20 17:28:10 crc kubenswrapper[4558]: I0120 17:28:10.195830 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"253c7b1bb698ada295155d4391adcd6900c7ef92966fcf1c968bb5c7832f4326"} err="failed to get container status \"253c7b1bb698ada295155d4391adcd6900c7ef92966fcf1c968bb5c7832f4326\": rpc error: code = NotFound desc = could not find container \"253c7b1bb698ada295155d4391adcd6900c7ef92966fcf1c968bb5c7832f4326\": container with ID starting with 253c7b1bb698ada295155d4391adcd6900c7ef92966fcf1c968bb5c7832f4326 not found: ID does not exist" Jan 20 17:28:10 crc kubenswrapper[4558]: I0120 17:28:10.195853 4558 scope.go:117] "RemoveContainer" containerID="e662a87a8dfea3af31619f4746b123264a47d980ca2258670d31ac3490d07672" Jan 20 17:28:10 crc kubenswrapper[4558]: I0120 17:28:10.226190 4558 scope.go:117] "RemoveContainer" containerID="dfad3104c55017bd8727f5d89d9c30642f8493fbe621ac636c457844689b6a39" Jan 20 17:28:10 crc kubenswrapper[4558]: I0120 17:28:10.579432 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="125597fc-d3b4-44c1-a0a6-7dccbaebe7dd" path="/var/lib/kubelet/pods/125597fc-d3b4-44c1-a0a6-7dccbaebe7dd/volumes" Jan 20 17:28:10 crc kubenswrapper[4558]: I0120 17:28:10.580469 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3b9858cf-2020-458e-bcf6-407c6853a962" path="/var/lib/kubelet/pods/3b9858cf-2020-458e-bcf6-407c6853a962/volumes" Jan 20 17:28:10 crc kubenswrapper[4558]: I0120 17:28:10.581446 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="48cfc6e5-774d-4e7d-8103-f6a3260ea14c" path="/var/lib/kubelet/pods/48cfc6e5-774d-4e7d-8103-f6a3260ea14c/volumes" Jan 20 17:28:10 crc kubenswrapper[4558]: I0120 17:28:10.583045 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d692722b-f7fd-447c-8b7a-f56cff940d91" path="/var/lib/kubelet/pods/d692722b-f7fd-447c-8b7a-f56cff940d91/volumes" Jan 20 17:28:11 crc kubenswrapper[4558]: E0120 17:28:11.566365 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="6d33b2d5d8fe60d82930b4864fabfe53d6a679fc712a4ba67d60a4da40feed96" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:28:11 crc kubenswrapper[4558]: E0120 17:28:11.567646 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="6d33b2d5d8fe60d82930b4864fabfe53d6a679fc712a4ba67d60a4da40feed96" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:28:11 crc kubenswrapper[4558]: E0120 17:28:11.568720 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="6d33b2d5d8fe60d82930b4864fabfe53d6a679fc712a4ba67d60a4da40feed96" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:28:11 crc kubenswrapper[4558]: E0120 17:28:11.568750 4558 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack-kuttl-tests/nova-cell0-conductor-0" podUID="3973df1a-dd5a-417e-80a9-6653fb036470" containerName="nova-cell0-conductor-conductor" Jan 20 17:28:12 crc kubenswrapper[4558]: E0120 17:28:12.161528 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/openstack-scripts: configmap "openstack-scripts" not found Jan 20 17:28:12 crc kubenswrapper[4558]: E0120 17:28:12.161626 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/1b7c6fb3-f30d-4dde-8502-e7d840719520-operator-scripts podName:1b7c6fb3-f30d-4dde-8502-e7d840719520 nodeName:}" failed. No retries permitted until 2026-01-20 17:28:20.161606175 +0000 UTC m=+2793.921944142 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/1b7c6fb3-f30d-4dde-8502-e7d840719520-operator-scripts") pod "glance-db-create-rt2fh" (UID: "1b7c6fb3-f30d-4dde-8502-e7d840719520") : configmap "openstack-scripts" not found Jan 20 17:28:13 crc kubenswrapper[4558]: I0120 17:28:13.314124 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-lgbfw" Jan 20 17:28:13 crc kubenswrapper[4558]: I0120 17:28:13.369805 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-7947c54dc7-w2kxl"] Jan 20 17:28:13 crc kubenswrapper[4558]: I0120 17:28:13.370087 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-7947c54dc7-w2kxl" podUID="43ad40c9-142e-42c2-b46b-e998fb487f42" containerName="dnsmasq-dns" containerID="cri-o://3130f3ec22ad881b33c249b0c38e752bf5b2698967894c2673b4174032a505d0" gracePeriod=10 Jan 20 17:28:13 crc kubenswrapper[4558]: I0120 17:28:13.766745 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-7947c54dc7-w2kxl" Jan 20 17:28:13 crc kubenswrapper[4558]: I0120 17:28:13.883150 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/43ad40c9-142e-42c2-b46b-e998fb487f42-config\") pod \"43ad40c9-142e-42c2-b46b-e998fb487f42\" (UID: \"43ad40c9-142e-42c2-b46b-e998fb487f42\") " Jan 20 17:28:13 crc kubenswrapper[4558]: I0120 17:28:13.883311 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/43ad40c9-142e-42c2-b46b-e998fb487f42-dns-swift-storage-0\") pod \"43ad40c9-142e-42c2-b46b-e998fb487f42\" (UID: \"43ad40c9-142e-42c2-b46b-e998fb487f42\") " Jan 20 17:28:13 crc kubenswrapper[4558]: I0120 17:28:13.883383 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/43ad40c9-142e-42c2-b46b-e998fb487f42-dnsmasq-svc\") pod \"43ad40c9-142e-42c2-b46b-e998fb487f42\" (UID: \"43ad40c9-142e-42c2-b46b-e998fb487f42\") " Jan 20 17:28:13 crc kubenswrapper[4558]: I0120 17:28:13.883405 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lbh2l\" (UniqueName: \"kubernetes.io/projected/43ad40c9-142e-42c2-b46b-e998fb487f42-kube-api-access-lbh2l\") pod \"43ad40c9-142e-42c2-b46b-e998fb487f42\" (UID: \"43ad40c9-142e-42c2-b46b-e998fb487f42\") " Jan 20 17:28:13 crc kubenswrapper[4558]: I0120 17:28:13.890049 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43ad40c9-142e-42c2-b46b-e998fb487f42-kube-api-access-lbh2l" (OuterVolumeSpecName: "kube-api-access-lbh2l") pod "43ad40c9-142e-42c2-b46b-e998fb487f42" (UID: "43ad40c9-142e-42c2-b46b-e998fb487f42"). InnerVolumeSpecName "kube-api-access-lbh2l". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:28:13 crc kubenswrapper[4558]: I0120 17:28:13.914959 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43ad40c9-142e-42c2-b46b-e998fb487f42-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "43ad40c9-142e-42c2-b46b-e998fb487f42" (UID: "43ad40c9-142e-42c2-b46b-e998fb487f42"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:28:13 crc kubenswrapper[4558]: I0120 17:28:13.915139 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43ad40c9-142e-42c2-b46b-e998fb487f42-dnsmasq-svc" (OuterVolumeSpecName: "dnsmasq-svc") pod "43ad40c9-142e-42c2-b46b-e998fb487f42" (UID: "43ad40c9-142e-42c2-b46b-e998fb487f42"). InnerVolumeSpecName "dnsmasq-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:28:13 crc kubenswrapper[4558]: I0120 17:28:13.918150 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43ad40c9-142e-42c2-b46b-e998fb487f42-config" (OuterVolumeSpecName: "config") pod "43ad40c9-142e-42c2-b46b-e998fb487f42" (UID: "43ad40c9-142e-42c2-b46b-e998fb487f42"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:28:13 crc kubenswrapper[4558]: I0120 17:28:13.985561 4558 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/43ad40c9-142e-42c2-b46b-e998fb487f42-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:13 crc kubenswrapper[4558]: I0120 17:28:13.985597 4558 reconciler_common.go:293] "Volume detached for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/43ad40c9-142e-42c2-b46b-e998fb487f42-dnsmasq-svc\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:13 crc kubenswrapper[4558]: I0120 17:28:13.985626 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lbh2l\" (UniqueName: \"kubernetes.io/projected/43ad40c9-142e-42c2-b46b-e998fb487f42-kube-api-access-lbh2l\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:13 crc kubenswrapper[4558]: I0120 17:28:13.985636 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/43ad40c9-142e-42c2-b46b-e998fb487f42-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:14 crc kubenswrapper[4558]: I0120 17:28:14.179086 4558 generic.go:334] "Generic (PLEG): container finished" podID="43ad40c9-142e-42c2-b46b-e998fb487f42" containerID="3130f3ec22ad881b33c249b0c38e752bf5b2698967894c2673b4174032a505d0" exitCode=0 Jan 20 17:28:14 crc kubenswrapper[4558]: I0120 17:28:14.179142 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-7947c54dc7-w2kxl" event={"ID":"43ad40c9-142e-42c2-b46b-e998fb487f42","Type":"ContainerDied","Data":"3130f3ec22ad881b33c249b0c38e752bf5b2698967894c2673b4174032a505d0"} Jan 20 17:28:14 crc kubenswrapper[4558]: I0120 17:28:14.179198 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-7947c54dc7-w2kxl" event={"ID":"43ad40c9-142e-42c2-b46b-e998fb487f42","Type":"ContainerDied","Data":"45097c1a23cd594c174b20abc1a35ba156d33e0f07339e00ac7ed28f0b61e82c"} Jan 20 17:28:14 crc kubenswrapper[4558]: I0120 17:28:14.179226 4558 scope.go:117] "RemoveContainer" containerID="3130f3ec22ad881b33c249b0c38e752bf5b2698967894c2673b4174032a505d0" Jan 20 17:28:14 crc kubenswrapper[4558]: I0120 17:28:14.179250 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-7947c54dc7-w2kxl" Jan 20 17:28:14 crc kubenswrapper[4558]: I0120 17:28:14.209675 4558 scope.go:117] "RemoveContainer" containerID="f8313c346808cf6301fc20ee128dcf9490dacea0bfd25b44a105e2f69473c4d2" Jan 20 17:28:14 crc kubenswrapper[4558]: I0120 17:28:14.215189 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-7947c54dc7-w2kxl"] Jan 20 17:28:14 crc kubenswrapper[4558]: I0120 17:28:14.219595 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-7947c54dc7-w2kxl"] Jan 20 17:28:14 crc kubenswrapper[4558]: I0120 17:28:14.246357 4558 scope.go:117] "RemoveContainer" containerID="3130f3ec22ad881b33c249b0c38e752bf5b2698967894c2673b4174032a505d0" Jan 20 17:28:14 crc kubenswrapper[4558]: E0120 17:28:14.246900 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3130f3ec22ad881b33c249b0c38e752bf5b2698967894c2673b4174032a505d0\": container with ID starting with 3130f3ec22ad881b33c249b0c38e752bf5b2698967894c2673b4174032a505d0 not found: ID does not exist" containerID="3130f3ec22ad881b33c249b0c38e752bf5b2698967894c2673b4174032a505d0" Jan 20 17:28:14 crc kubenswrapper[4558]: I0120 17:28:14.246937 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3130f3ec22ad881b33c249b0c38e752bf5b2698967894c2673b4174032a505d0"} err="failed to get container status \"3130f3ec22ad881b33c249b0c38e752bf5b2698967894c2673b4174032a505d0\": rpc error: code = NotFound desc = could not find container \"3130f3ec22ad881b33c249b0c38e752bf5b2698967894c2673b4174032a505d0\": container with ID starting with 3130f3ec22ad881b33c249b0c38e752bf5b2698967894c2673b4174032a505d0 not found: ID does not exist" Jan 20 17:28:14 crc kubenswrapper[4558]: I0120 17:28:14.246964 4558 scope.go:117] "RemoveContainer" containerID="f8313c346808cf6301fc20ee128dcf9490dacea0bfd25b44a105e2f69473c4d2" Jan 20 17:28:14 crc kubenswrapper[4558]: E0120 17:28:14.247350 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f8313c346808cf6301fc20ee128dcf9490dacea0bfd25b44a105e2f69473c4d2\": container with ID starting with f8313c346808cf6301fc20ee128dcf9490dacea0bfd25b44a105e2f69473c4d2 not found: ID does not exist" containerID="f8313c346808cf6301fc20ee128dcf9490dacea0bfd25b44a105e2f69473c4d2" Jan 20 17:28:14 crc kubenswrapper[4558]: I0120 17:28:14.247376 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f8313c346808cf6301fc20ee128dcf9490dacea0bfd25b44a105e2f69473c4d2"} err="failed to get container status \"f8313c346808cf6301fc20ee128dcf9490dacea0bfd25b44a105e2f69473c4d2\": rpc error: code = NotFound desc = could not find container \"f8313c346808cf6301fc20ee128dcf9490dacea0bfd25b44a105e2f69473c4d2\": container with ID starting with f8313c346808cf6301fc20ee128dcf9490dacea0bfd25b44a105e2f69473c4d2 not found: ID does not exist" Jan 20 17:28:14 crc kubenswrapper[4558]: I0120 17:28:14.575005 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43ad40c9-142e-42c2-b46b-e998fb487f42" path="/var/lib/kubelet/pods/43ad40c9-142e-42c2-b46b-e998fb487f42/volumes" Jan 20 17:28:16 crc kubenswrapper[4558]: E0120 17:28:16.565688 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="6d33b2d5d8fe60d82930b4864fabfe53d6a679fc712a4ba67d60a4da40feed96" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:28:16 crc kubenswrapper[4558]: E0120 17:28:16.568490 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="6d33b2d5d8fe60d82930b4864fabfe53d6a679fc712a4ba67d60a4da40feed96" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:28:16 crc kubenswrapper[4558]: E0120 17:28:16.569892 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="6d33b2d5d8fe60d82930b4864fabfe53d6a679fc712a4ba67d60a4da40feed96" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:28:16 crc kubenswrapper[4558]: E0120 17:28:16.569941 4558 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack-kuttl-tests/nova-cell0-conductor-0" podUID="3973df1a-dd5a-417e-80a9-6653fb036470" containerName="nova-cell0-conductor-conductor" Jan 20 17:28:17 crc kubenswrapper[4558]: E0120 17:28:17.467618 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cinder-config-data: secret "cinder-config-data" not found Jan 20 17:28:17 crc kubenswrapper[4558]: E0120 17:28:17.468083 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c1afb831-f3e4-4356-ab86-713eb0beca39-config-data podName:c1afb831-f3e4-4356-ab86-713eb0beca39 nodeName:}" failed. No retries permitted until 2026-01-20 17:28:33.468060368 +0000 UTC m=+2807.228398336 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/secret/c1afb831-f3e4-4356-ab86-713eb0beca39-config-data") pod "cinder-scheduler-0" (UID: "c1afb831-f3e4-4356-ab86-713eb0beca39") : secret "cinder-config-data" not found Jan 20 17:28:17 crc kubenswrapper[4558]: E0120 17:28:17.467624 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/combined-ca-bundle: secret "combined-ca-bundle" not found Jan 20 17:28:17 crc kubenswrapper[4558]: E0120 17:28:17.467673 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cinder-scheduler-config-data: secret "cinder-scheduler-config-data" not found Jan 20 17:28:17 crc kubenswrapper[4558]: E0120 17:28:17.468200 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c1afb831-f3e4-4356-ab86-713eb0beca39-combined-ca-bundle podName:c1afb831-f3e4-4356-ab86-713eb0beca39 nodeName:}" failed. No retries permitted until 2026-01-20 17:28:33.468179062 +0000 UTC m=+2807.228517029 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/c1afb831-f3e4-4356-ab86-713eb0beca39-combined-ca-bundle") pod "cinder-scheduler-0" (UID: "c1afb831-f3e4-4356-ab86-713eb0beca39") : secret "combined-ca-bundle" not found Jan 20 17:28:17 crc kubenswrapper[4558]: E0120 17:28:17.467679 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cinder-scripts: secret "cinder-scripts" not found Jan 20 17:28:17 crc kubenswrapper[4558]: E0120 17:28:17.468248 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c1afb831-f3e4-4356-ab86-713eb0beca39-config-data-custom podName:c1afb831-f3e4-4356-ab86-713eb0beca39 nodeName:}" failed. No retries permitted until 2026-01-20 17:28:33.468228044 +0000 UTC m=+2807.228566011 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "config-data-custom" (UniqueName: "kubernetes.io/secret/c1afb831-f3e4-4356-ab86-713eb0beca39-config-data-custom") pod "cinder-scheduler-0" (UID: "c1afb831-f3e4-4356-ab86-713eb0beca39") : secret "cinder-scheduler-config-data" not found Jan 20 17:28:17 crc kubenswrapper[4558]: E0120 17:28:17.468268 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c1afb831-f3e4-4356-ab86-713eb0beca39-scripts podName:c1afb831-f3e4-4356-ab86-713eb0beca39 nodeName:}" failed. No retries permitted until 2026-01-20 17:28:33.468259914 +0000 UTC m=+2807.228597880 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "scripts" (UniqueName: "kubernetes.io/secret/c1afb831-f3e4-4356-ab86-713eb0beca39-scripts") pod "cinder-scheduler-0" (UID: "c1afb831-f3e4-4356-ab86-713eb0beca39") : secret "cinder-scripts" not found Jan 20 17:28:20 crc kubenswrapper[4558]: E0120 17:28:20.216279 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/openstack-scripts: configmap "openstack-scripts" not found Jan 20 17:28:20 crc kubenswrapper[4558]: E0120 17:28:20.216639 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/1b7c6fb3-f30d-4dde-8502-e7d840719520-operator-scripts podName:1b7c6fb3-f30d-4dde-8502-e7d840719520 nodeName:}" failed. No retries permitted until 2026-01-20 17:28:36.216623088 +0000 UTC m=+2809.976961055 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/1b7c6fb3-f30d-4dde-8502-e7d840719520-operator-scripts") pod "glance-db-create-rt2fh" (UID: "1b7c6fb3-f30d-4dde-8502-e7d840719520") : configmap "openstack-scripts" not found Jan 20 17:28:21 crc kubenswrapper[4558]: E0120 17:28:21.564630 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="6d33b2d5d8fe60d82930b4864fabfe53d6a679fc712a4ba67d60a4da40feed96" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:28:21 crc kubenswrapper[4558]: E0120 17:28:21.567389 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="6d33b2d5d8fe60d82930b4864fabfe53d6a679fc712a4ba67d60a4da40feed96" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:28:21 crc kubenswrapper[4558]: E0120 17:28:21.568803 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="6d33b2d5d8fe60d82930b4864fabfe53d6a679fc712a4ba67d60a4da40feed96" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:28:21 crc kubenswrapper[4558]: E0120 17:28:21.568895 4558 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack-kuttl-tests/nova-cell0-conductor-0" podUID="3973df1a-dd5a-417e-80a9-6653fb036470" containerName="nova-cell0-conductor-conductor" Jan 20 17:28:26 crc kubenswrapper[4558]: E0120 17:28:26.578840 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="6d33b2d5d8fe60d82930b4864fabfe53d6a679fc712a4ba67d60a4da40feed96" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:28:26 crc kubenswrapper[4558]: E0120 17:28:26.583102 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="6d33b2d5d8fe60d82930b4864fabfe53d6a679fc712a4ba67d60a4da40feed96" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:28:26 crc kubenswrapper[4558]: E0120 17:28:26.585374 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="6d33b2d5d8fe60d82930b4864fabfe53d6a679fc712a4ba67d60a4da40feed96" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:28:26 crc kubenswrapper[4558]: E0120 17:28:26.585441 4558 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack-kuttl-tests/nova-cell0-conductor-0" podUID="3973df1a-dd5a-417e-80a9-6653fb036470" containerName="nova-cell0-conductor-conductor" Jan 20 17:28:27 crc kubenswrapper[4558]: I0120 17:28:27.330381 4558 patch_prober.go:28] interesting pod/machine-config-daemon-2vr4r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 20 17:28:27 crc kubenswrapper[4558]: I0120 17:28:27.330455 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 20 17:28:31 crc kubenswrapper[4558]: E0120 17:28:31.564212 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="6d33b2d5d8fe60d82930b4864fabfe53d6a679fc712a4ba67d60a4da40feed96" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:28:31 crc kubenswrapper[4558]: E0120 17:28:31.566445 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="6d33b2d5d8fe60d82930b4864fabfe53d6a679fc712a4ba67d60a4da40feed96" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:28:31 crc kubenswrapper[4558]: E0120 17:28:31.567682 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="6d33b2d5d8fe60d82930b4864fabfe53d6a679fc712a4ba67d60a4da40feed96" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:28:31 crc kubenswrapper[4558]: E0120 17:28:31.567721 4558 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack-kuttl-tests/nova-cell0-conductor-0" podUID="3973df1a-dd5a-417e-80a9-6653fb036470" containerName="nova-cell0-conductor-conductor" Jan 20 17:28:32 crc kubenswrapper[4558]: I0120 17:28:32.860451 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:28:33 crc kubenswrapper[4558]: I0120 17:28:33.030834 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swift\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"8804f9f1-60d4-4fa1-9333-9f9e01b3c68a\" (UID: \"8804f9f1-60d4-4fa1-9333-9f9e01b3c68a\") " Jan 20 17:28:33 crc kubenswrapper[4558]: I0120 17:28:33.030915 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/8804f9f1-60d4-4fa1-9333-9f9e01b3c68a-cache\") pod \"8804f9f1-60d4-4fa1-9333-9f9e01b3c68a\" (UID: \"8804f9f1-60d4-4fa1-9333-9f9e01b3c68a\") " Jan 20 17:28:33 crc kubenswrapper[4558]: I0120 17:28:33.031080 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ttrhb\" (UniqueName: \"kubernetes.io/projected/8804f9f1-60d4-4fa1-9333-9f9e01b3c68a-kube-api-access-ttrhb\") pod \"8804f9f1-60d4-4fa1-9333-9f9e01b3c68a\" (UID: \"8804f9f1-60d4-4fa1-9333-9f9e01b3c68a\") " Jan 20 17:28:33 crc kubenswrapper[4558]: I0120 17:28:33.031203 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/8804f9f1-60d4-4fa1-9333-9f9e01b3c68a-lock\") pod \"8804f9f1-60d4-4fa1-9333-9f9e01b3c68a\" (UID: \"8804f9f1-60d4-4fa1-9333-9f9e01b3c68a\") " Jan 20 17:28:33 crc kubenswrapper[4558]: I0120 17:28:33.031247 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/8804f9f1-60d4-4fa1-9333-9f9e01b3c68a-etc-swift\") pod \"8804f9f1-60d4-4fa1-9333-9f9e01b3c68a\" (UID: \"8804f9f1-60d4-4fa1-9333-9f9e01b3c68a\") " Jan 20 17:28:33 crc kubenswrapper[4558]: I0120 17:28:33.031810 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8804f9f1-60d4-4fa1-9333-9f9e01b3c68a-cache" (OuterVolumeSpecName: "cache") pod "8804f9f1-60d4-4fa1-9333-9f9e01b3c68a" (UID: "8804f9f1-60d4-4fa1-9333-9f9e01b3c68a"). InnerVolumeSpecName "cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:28:33 crc kubenswrapper[4558]: I0120 17:28:33.032069 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8804f9f1-60d4-4fa1-9333-9f9e01b3c68a-lock" (OuterVolumeSpecName: "lock") pod "8804f9f1-60d4-4fa1-9333-9f9e01b3c68a" (UID: "8804f9f1-60d4-4fa1-9333-9f9e01b3c68a"). InnerVolumeSpecName "lock". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:28:33 crc kubenswrapper[4558]: I0120 17:28:33.032384 4558 reconciler_common.go:293] "Volume detached for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/8804f9f1-60d4-4fa1-9333-9f9e01b3c68a-cache\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:33 crc kubenswrapper[4558]: I0120 17:28:33.032406 4558 reconciler_common.go:293] "Volume detached for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/8804f9f1-60d4-4fa1-9333-9f9e01b3c68a-lock\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:33 crc kubenswrapper[4558]: I0120 17:28:33.039013 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage12-crc" (OuterVolumeSpecName: "swift") pod "8804f9f1-60d4-4fa1-9333-9f9e01b3c68a" (UID: "8804f9f1-60d4-4fa1-9333-9f9e01b3c68a"). InnerVolumeSpecName "local-storage12-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:28:33 crc kubenswrapper[4558]: I0120 17:28:33.048764 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8804f9f1-60d4-4fa1-9333-9f9e01b3c68a-kube-api-access-ttrhb" (OuterVolumeSpecName: "kube-api-access-ttrhb") pod "8804f9f1-60d4-4fa1-9333-9f9e01b3c68a" (UID: "8804f9f1-60d4-4fa1-9333-9f9e01b3c68a"). InnerVolumeSpecName "kube-api-access-ttrhb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:28:33 crc kubenswrapper[4558]: I0120 17:28:33.063646 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8804f9f1-60d4-4fa1-9333-9f9e01b3c68a-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "8804f9f1-60d4-4fa1-9333-9f9e01b3c68a" (UID: "8804f9f1-60d4-4fa1-9333-9f9e01b3c68a"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:28:33 crc kubenswrapper[4558]: I0120 17:28:33.134381 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" " Jan 20 17:28:33 crc kubenswrapper[4558]: I0120 17:28:33.134415 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ttrhb\" (UniqueName: \"kubernetes.io/projected/8804f9f1-60d4-4fa1-9333-9f9e01b3c68a-kube-api-access-ttrhb\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:33 crc kubenswrapper[4558]: I0120 17:28:33.134426 4558 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/8804f9f1-60d4-4fa1-9333-9f9e01b3c68a-etc-swift\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:33 crc kubenswrapper[4558]: I0120 17:28:33.148652 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage12-crc" (UniqueName: "kubernetes.io/local-volume/local-storage12-crc") on node "crc" Jan 20 17:28:33 crc kubenswrapper[4558]: I0120 17:28:33.236971 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:33 crc kubenswrapper[4558]: I0120 17:28:33.301229 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_neutron-665b4c9c8d-9tcx6_6dc46db7-9948-4c7d-b0a3-c767ffd58b4a/neutron-api/0.log" Jan 20 17:28:33 crc kubenswrapper[4558]: I0120 17:28:33.301364 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-665b4c9c8d-9tcx6" Jan 20 17:28:33 crc kubenswrapper[4558]: I0120 17:28:33.337532 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/6dc46db7-9948-4c7d-b0a3-c767ffd58b4a-ovndb-tls-certs\") pod \"6dc46db7-9948-4c7d-b0a3-c767ffd58b4a\" (UID: \"6dc46db7-9948-4c7d-b0a3-c767ffd58b4a\") " Jan 20 17:28:33 crc kubenswrapper[4558]: I0120 17:28:33.337572 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/6dc46db7-9948-4c7d-b0a3-c767ffd58b4a-config\") pod \"6dc46db7-9948-4c7d-b0a3-c767ffd58b4a\" (UID: \"6dc46db7-9948-4c7d-b0a3-c767ffd58b4a\") " Jan 20 17:28:33 crc kubenswrapper[4558]: I0120 17:28:33.337665 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6dc46db7-9948-4c7d-b0a3-c767ffd58b4a-internal-tls-certs\") pod \"6dc46db7-9948-4c7d-b0a3-c767ffd58b4a\" (UID: \"6dc46db7-9948-4c7d-b0a3-c767ffd58b4a\") " Jan 20 17:28:33 crc kubenswrapper[4558]: I0120 17:28:33.337683 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6dc46db7-9948-4c7d-b0a3-c767ffd58b4a-public-tls-certs\") pod \"6dc46db7-9948-4c7d-b0a3-c767ffd58b4a\" (UID: \"6dc46db7-9948-4c7d-b0a3-c767ffd58b4a\") " Jan 20 17:28:33 crc kubenswrapper[4558]: I0120 17:28:33.337721 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/6dc46db7-9948-4c7d-b0a3-c767ffd58b4a-httpd-config\") pod \"6dc46db7-9948-4c7d-b0a3-c767ffd58b4a\" (UID: \"6dc46db7-9948-4c7d-b0a3-c767ffd58b4a\") " Jan 20 17:28:33 crc kubenswrapper[4558]: I0120 17:28:33.337753 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4dh28\" (UniqueName: \"kubernetes.io/projected/6dc46db7-9948-4c7d-b0a3-c767ffd58b4a-kube-api-access-4dh28\") pod \"6dc46db7-9948-4c7d-b0a3-c767ffd58b4a\" (UID: \"6dc46db7-9948-4c7d-b0a3-c767ffd58b4a\") " Jan 20 17:28:33 crc kubenswrapper[4558]: I0120 17:28:33.337843 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6dc46db7-9948-4c7d-b0a3-c767ffd58b4a-combined-ca-bundle\") pod \"6dc46db7-9948-4c7d-b0a3-c767ffd58b4a\" (UID: \"6dc46db7-9948-4c7d-b0a3-c767ffd58b4a\") " Jan 20 17:28:33 crc kubenswrapper[4558]: I0120 17:28:33.342360 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6dc46db7-9948-4c7d-b0a3-c767ffd58b4a-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "6dc46db7-9948-4c7d-b0a3-c767ffd58b4a" (UID: "6dc46db7-9948-4c7d-b0a3-c767ffd58b4a"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:28:33 crc kubenswrapper[4558]: I0120 17:28:33.354995 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6dc46db7-9948-4c7d-b0a3-c767ffd58b4a-kube-api-access-4dh28" (OuterVolumeSpecName: "kube-api-access-4dh28") pod "6dc46db7-9948-4c7d-b0a3-c767ffd58b4a" (UID: "6dc46db7-9948-4c7d-b0a3-c767ffd58b4a"). InnerVolumeSpecName "kube-api-access-4dh28". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:28:33 crc kubenswrapper[4558]: I0120 17:28:33.374957 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_neutron-665b4c9c8d-9tcx6_6dc46db7-9948-4c7d-b0a3-c767ffd58b4a/neutron-api/0.log" Jan 20 17:28:33 crc kubenswrapper[4558]: I0120 17:28:33.375009 4558 generic.go:334] "Generic (PLEG): container finished" podID="6dc46db7-9948-4c7d-b0a3-c767ffd58b4a" containerID="76433ae79aade2e04de13538618917ea1b5001e82383e84d8122e38de55ebbe8" exitCode=137 Jan 20 17:28:33 crc kubenswrapper[4558]: I0120 17:28:33.375083 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-665b4c9c8d-9tcx6" event={"ID":"6dc46db7-9948-4c7d-b0a3-c767ffd58b4a","Type":"ContainerDied","Data":"76433ae79aade2e04de13538618917ea1b5001e82383e84d8122e38de55ebbe8"} Jan 20 17:28:33 crc kubenswrapper[4558]: I0120 17:28:33.375116 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-665b4c9c8d-9tcx6" event={"ID":"6dc46db7-9948-4c7d-b0a3-c767ffd58b4a","Type":"ContainerDied","Data":"1bdb7fc42551eedc4e540256310d9b189a67291e02eb1547c3e8edcba58be288"} Jan 20 17:28:33 crc kubenswrapper[4558]: I0120 17:28:33.375138 4558 scope.go:117] "RemoveContainer" containerID="2b441621415d66b4dd582cca5044009b53d19bc346f62f7de3d1c998664de90b" Jan 20 17:28:33 crc kubenswrapper[4558]: I0120 17:28:33.375443 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6dc46db7-9948-4c7d-b0a3-c767ffd58b4a-config" (OuterVolumeSpecName: "config") pod "6dc46db7-9948-4c7d-b0a3-c767ffd58b4a" (UID: "6dc46db7-9948-4c7d-b0a3-c767ffd58b4a"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:28:33 crc kubenswrapper[4558]: I0120 17:28:33.375932 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-665b4c9c8d-9tcx6" Jan 20 17:28:33 crc kubenswrapper[4558]: I0120 17:28:33.378451 4558 generic.go:334] "Generic (PLEG): container finished" podID="b2a7ece8-32ed-4eb3-ab85-27ff4361622d" containerID="15e562b45f89d1f5fb52993145b8f16fbd8c5b35f2f80d2df9e160bf13a36eaf" exitCode=137 Jan 20 17:28:33 crc kubenswrapper[4558]: I0120 17:28:33.378488 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"b2a7ece8-32ed-4eb3-ab85-27ff4361622d","Type":"ContainerDied","Data":"15e562b45f89d1f5fb52993145b8f16fbd8c5b35f2f80d2df9e160bf13a36eaf"} Jan 20 17:28:33 crc kubenswrapper[4558]: I0120 17:28:33.383433 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6dc46db7-9948-4c7d-b0a3-c767ffd58b4a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6dc46db7-9948-4c7d-b0a3-c767ffd58b4a" (UID: "6dc46db7-9948-4c7d-b0a3-c767ffd58b4a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:28:33 crc kubenswrapper[4558]: I0120 17:28:33.387647 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6dc46db7-9948-4c7d-b0a3-c767ffd58b4a-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "6dc46db7-9948-4c7d-b0a3-c767ffd58b4a" (UID: "6dc46db7-9948-4c7d-b0a3-c767ffd58b4a"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:28:33 crc kubenswrapper[4558]: I0120 17:28:33.389461 4558 generic.go:334] "Generic (PLEG): container finished" podID="8804f9f1-60d4-4fa1-9333-9f9e01b3c68a" containerID="94d135b3e27081e915874cdf29659d51450157f87f3b4f37332d5d83a3c11459" exitCode=137 Jan 20 17:28:33 crc kubenswrapper[4558]: I0120 17:28:33.389521 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6dc46db7-9948-4c7d-b0a3-c767ffd58b4a-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "6dc46db7-9948-4c7d-b0a3-c767ffd58b4a" (UID: "6dc46db7-9948-4c7d-b0a3-c767ffd58b4a"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:28:33 crc kubenswrapper[4558]: I0120 17:28:33.389550 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:28:33 crc kubenswrapper[4558]: I0120 17:28:33.389555 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"8804f9f1-60d4-4fa1-9333-9f9e01b3c68a","Type":"ContainerDied","Data":"94d135b3e27081e915874cdf29659d51450157f87f3b4f37332d5d83a3c11459"} Jan 20 17:28:33 crc kubenswrapper[4558]: I0120 17:28:33.390300 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"8804f9f1-60d4-4fa1-9333-9f9e01b3c68a","Type":"ContainerDied","Data":"cbd1808ad849a969a4b336b424c07db8bc3fabb8d02d150668692d6b2fdabd39"} Jan 20 17:28:33 crc kubenswrapper[4558]: I0120 17:28:33.404741 4558 scope.go:117] "RemoveContainer" containerID="76433ae79aade2e04de13538618917ea1b5001e82383e84d8122e38de55ebbe8" Jan 20 17:28:33 crc kubenswrapper[4558]: I0120 17:28:33.405645 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6dc46db7-9948-4c7d-b0a3-c767ffd58b4a-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "6dc46db7-9948-4c7d-b0a3-c767ffd58b4a" (UID: "6dc46db7-9948-4c7d-b0a3-c767ffd58b4a"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:28:33 crc kubenswrapper[4558]: I0120 17:28:33.426344 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/swift-storage-0"] Jan 20 17:28:33 crc kubenswrapper[4558]: I0120 17:28:33.431909 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/swift-storage-0"] Jan 20 17:28:33 crc kubenswrapper[4558]: I0120 17:28:33.436611 4558 scope.go:117] "RemoveContainer" containerID="2b441621415d66b4dd582cca5044009b53d19bc346f62f7de3d1c998664de90b" Jan 20 17:28:33 crc kubenswrapper[4558]: I0120 17:28:33.439609 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6dc46db7-9948-4c7d-b0a3-c767ffd58b4a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:33 crc kubenswrapper[4558]: I0120 17:28:33.439638 4558 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/6dc46db7-9948-4c7d-b0a3-c767ffd58b4a-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:33 crc kubenswrapper[4558]: I0120 17:28:33.439649 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/6dc46db7-9948-4c7d-b0a3-c767ffd58b4a-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:33 crc kubenswrapper[4558]: I0120 17:28:33.439661 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6dc46db7-9948-4c7d-b0a3-c767ffd58b4a-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:33 crc kubenswrapper[4558]: I0120 17:28:33.439671 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6dc46db7-9948-4c7d-b0a3-c767ffd58b4a-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:33 crc kubenswrapper[4558]: I0120 17:28:33.439680 4558 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/6dc46db7-9948-4c7d-b0a3-c767ffd58b4a-httpd-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:33 crc kubenswrapper[4558]: I0120 17:28:33.439691 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4dh28\" (UniqueName: \"kubernetes.io/projected/6dc46db7-9948-4c7d-b0a3-c767ffd58b4a-kube-api-access-4dh28\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:33 crc kubenswrapper[4558]: E0120 17:28:33.440115 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2b441621415d66b4dd582cca5044009b53d19bc346f62f7de3d1c998664de90b\": container with ID starting with 2b441621415d66b4dd582cca5044009b53d19bc346f62f7de3d1c998664de90b not found: ID does not exist" containerID="2b441621415d66b4dd582cca5044009b53d19bc346f62f7de3d1c998664de90b" Jan 20 17:28:33 crc kubenswrapper[4558]: I0120 17:28:33.440150 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2b441621415d66b4dd582cca5044009b53d19bc346f62f7de3d1c998664de90b"} err="failed to get container status \"2b441621415d66b4dd582cca5044009b53d19bc346f62f7de3d1c998664de90b\": rpc error: code = NotFound desc = could not find container \"2b441621415d66b4dd582cca5044009b53d19bc346f62f7de3d1c998664de90b\": container with ID starting with 2b441621415d66b4dd582cca5044009b53d19bc346f62f7de3d1c998664de90b not found: ID does not exist" Jan 20 17:28:33 crc kubenswrapper[4558]: I0120 17:28:33.440185 4558 scope.go:117] "RemoveContainer" containerID="76433ae79aade2e04de13538618917ea1b5001e82383e84d8122e38de55ebbe8" Jan 20 17:28:33 crc kubenswrapper[4558]: E0120 17:28:33.440638 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"76433ae79aade2e04de13538618917ea1b5001e82383e84d8122e38de55ebbe8\": container with ID starting with 76433ae79aade2e04de13538618917ea1b5001e82383e84d8122e38de55ebbe8 not found: ID does not exist" containerID="76433ae79aade2e04de13538618917ea1b5001e82383e84d8122e38de55ebbe8" Jan 20 17:28:33 crc kubenswrapper[4558]: I0120 17:28:33.440661 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"76433ae79aade2e04de13538618917ea1b5001e82383e84d8122e38de55ebbe8"} err="failed to get container status \"76433ae79aade2e04de13538618917ea1b5001e82383e84d8122e38de55ebbe8\": rpc error: code = NotFound desc = could not find container \"76433ae79aade2e04de13538618917ea1b5001e82383e84d8122e38de55ebbe8\": container with ID starting with 76433ae79aade2e04de13538618917ea1b5001e82383e84d8122e38de55ebbe8 not found: ID does not exist" Jan 20 17:28:33 crc kubenswrapper[4558]: I0120 17:28:33.440674 4558 scope.go:117] "RemoveContainer" containerID="94d135b3e27081e915874cdf29659d51450157f87f3b4f37332d5d83a3c11459" Jan 20 17:28:33 crc kubenswrapper[4558]: I0120 17:28:33.454443 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:28:33 crc kubenswrapper[4558]: I0120 17:28:33.458295 4558 scope.go:117] "RemoveContainer" containerID="2953f93c5ea7797971404b822588bdd28383773e127010c836cbd6ce7a1b5450" Jan 20 17:28:33 crc kubenswrapper[4558]: E0120 17:28:33.541730 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cinder-config-data: secret "cinder-config-data" not found Jan 20 17:28:33 crc kubenswrapper[4558]: E0120 17:28:33.542058 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c1afb831-f3e4-4356-ab86-713eb0beca39-config-data podName:c1afb831-f3e4-4356-ab86-713eb0beca39 nodeName:}" failed. No retries permitted until 2026-01-20 17:29:05.542038787 +0000 UTC m=+2839.302376754 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/secret/c1afb831-f3e4-4356-ab86-713eb0beca39-config-data") pod "cinder-scheduler-0" (UID: "c1afb831-f3e4-4356-ab86-713eb0beca39") : secret "cinder-config-data" not found Jan 20 17:28:33 crc kubenswrapper[4558]: E0120 17:28:33.542265 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cinder-scripts: secret "cinder-scripts" not found Jan 20 17:28:33 crc kubenswrapper[4558]: E0120 17:28:33.542295 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c1afb831-f3e4-4356-ab86-713eb0beca39-scripts podName:c1afb831-f3e4-4356-ab86-713eb0beca39 nodeName:}" failed. No retries permitted until 2026-01-20 17:29:05.542287445 +0000 UTC m=+2839.302625402 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "scripts" (UniqueName: "kubernetes.io/secret/c1afb831-f3e4-4356-ab86-713eb0beca39-scripts") pod "cinder-scheduler-0" (UID: "c1afb831-f3e4-4356-ab86-713eb0beca39") : secret "cinder-scripts" not found Jan 20 17:28:33 crc kubenswrapper[4558]: E0120 17:28:33.542329 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cinder-scheduler-config-data: secret "cinder-scheduler-config-data" not found Jan 20 17:28:33 crc kubenswrapper[4558]: E0120 17:28:33.542336 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/combined-ca-bundle: secret "combined-ca-bundle" not found Jan 20 17:28:33 crc kubenswrapper[4558]: E0120 17:28:33.542408 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c1afb831-f3e4-4356-ab86-713eb0beca39-config-data-custom podName:c1afb831-f3e4-4356-ab86-713eb0beca39 nodeName:}" failed. No retries permitted until 2026-01-20 17:29:05.542389436 +0000 UTC m=+2839.302727403 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "config-data-custom" (UniqueName: "kubernetes.io/secret/c1afb831-f3e4-4356-ab86-713eb0beca39-config-data-custom") pod "cinder-scheduler-0" (UID: "c1afb831-f3e4-4356-ab86-713eb0beca39") : secret "cinder-scheduler-config-data" not found Jan 20 17:28:33 crc kubenswrapper[4558]: E0120 17:28:33.542428 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c1afb831-f3e4-4356-ab86-713eb0beca39-combined-ca-bundle podName:c1afb831-f3e4-4356-ab86-713eb0beca39 nodeName:}" failed. No retries permitted until 2026-01-20 17:29:05.542421927 +0000 UTC m=+2839.302759894 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/c1afb831-f3e4-4356-ab86-713eb0beca39-combined-ca-bundle") pod "cinder-scheduler-0" (UID: "c1afb831-f3e4-4356-ab86-713eb0beca39") : secret "combined-ca-bundle" not found Jan 20 17:28:33 crc kubenswrapper[4558]: I0120 17:28:33.593374 4558 scope.go:117] "RemoveContainer" containerID="33faf2c0a93c93d45813868ee43337a14e962791dad79303fe6a9a0671ec791e" Jan 20 17:28:33 crc kubenswrapper[4558]: I0120 17:28:33.611486 4558 scope.go:117] "RemoveContainer" containerID="36e479ddabc13ddf3cd61164acd11174c752edf6961acec9ae67d8951043aee5" Jan 20 17:28:33 crc kubenswrapper[4558]: I0120 17:28:33.633688 4558 scope.go:117] "RemoveContainer" containerID="5abed863186c32fde3a40dcffb538fd7112acbbdfd253d93751ff00096a299c5" Jan 20 17:28:33 crc kubenswrapper[4558]: I0120 17:28:33.644571 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b2a7ece8-32ed-4eb3-ab85-27ff4361622d-config-data\") pod \"b2a7ece8-32ed-4eb3-ab85-27ff4361622d\" (UID: \"b2a7ece8-32ed-4eb3-ab85-27ff4361622d\") " Jan 20 17:28:33 crc kubenswrapper[4558]: I0120 17:28:33.644617 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b2a7ece8-32ed-4eb3-ab85-27ff4361622d-combined-ca-bundle\") pod \"b2a7ece8-32ed-4eb3-ab85-27ff4361622d\" (UID: \"b2a7ece8-32ed-4eb3-ab85-27ff4361622d\") " Jan 20 17:28:33 crc kubenswrapper[4558]: I0120 17:28:33.644673 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rcxn7\" (UniqueName: \"kubernetes.io/projected/b2a7ece8-32ed-4eb3-ab85-27ff4361622d-kube-api-access-rcxn7\") pod \"b2a7ece8-32ed-4eb3-ab85-27ff4361622d\" (UID: \"b2a7ece8-32ed-4eb3-ab85-27ff4361622d\") " Jan 20 17:28:33 crc kubenswrapper[4558]: I0120 17:28:33.653602 4558 scope.go:117] "RemoveContainer" containerID="d700282b5693986cac00fa3aff076f819077ba4d7e9191db243d0f5c076b2c4d" Jan 20 17:28:33 crc kubenswrapper[4558]: I0120 17:28:33.657363 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b2a7ece8-32ed-4eb3-ab85-27ff4361622d-kube-api-access-rcxn7" (OuterVolumeSpecName: "kube-api-access-rcxn7") pod "b2a7ece8-32ed-4eb3-ab85-27ff4361622d" (UID: "b2a7ece8-32ed-4eb3-ab85-27ff4361622d"). InnerVolumeSpecName "kube-api-access-rcxn7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:28:33 crc kubenswrapper[4558]: I0120 17:28:33.687891 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b2a7ece8-32ed-4eb3-ab85-27ff4361622d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b2a7ece8-32ed-4eb3-ab85-27ff4361622d" (UID: "b2a7ece8-32ed-4eb3-ab85-27ff4361622d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:28:33 crc kubenswrapper[4558]: I0120 17:28:33.688080 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b2a7ece8-32ed-4eb3-ab85-27ff4361622d-config-data" (OuterVolumeSpecName: "config-data") pod "b2a7ece8-32ed-4eb3-ab85-27ff4361622d" (UID: "b2a7ece8-32ed-4eb3-ab85-27ff4361622d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:28:33 crc kubenswrapper[4558]: I0120 17:28:33.748415 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b2a7ece8-32ed-4eb3-ab85-27ff4361622d-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:33 crc kubenswrapper[4558]: I0120 17:28:33.748738 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b2a7ece8-32ed-4eb3-ab85-27ff4361622d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:33 crc kubenswrapper[4558]: I0120 17:28:33.748832 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rcxn7\" (UniqueName: \"kubernetes.io/projected/b2a7ece8-32ed-4eb3-ab85-27ff4361622d-kube-api-access-rcxn7\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:33 crc kubenswrapper[4558]: I0120 17:28:33.794366 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-665b4c9c8d-9tcx6"] Jan 20 17:28:33 crc kubenswrapper[4558]: I0120 17:28:33.794831 4558 scope.go:117] "RemoveContainer" containerID="4a6334c4fa8ffc04d0aaf20255ac8f9132d2e26680c4b1f11d017cfb5b777eec" Jan 20 17:28:33 crc kubenswrapper[4558]: I0120 17:28:33.797153 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:28:33 crc kubenswrapper[4558]: I0120 17:28:33.800898 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/neutron-665b4c9c8d-9tcx6"] Jan 20 17:28:33 crc kubenswrapper[4558]: I0120 17:28:33.818543 4558 scope.go:117] "RemoveContainer" containerID="8c8b5915d3326030a79e4f528fb27f011fc914ddc227bbc37139ba81a55e7e83" Jan 20 17:28:33 crc kubenswrapper[4558]: I0120 17:28:33.844455 4558 scope.go:117] "RemoveContainer" containerID="703117c0c32a1ceba745d2e72e8de7d8ffea3ca4a98e3434eda2bde5c61b0d36" Jan 20 17:28:33 crc kubenswrapper[4558]: I0120 17:28:33.849717 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sgmkz\" (UniqueName: \"kubernetes.io/projected/3973df1a-dd5a-417e-80a9-6653fb036470-kube-api-access-sgmkz\") pod \"3973df1a-dd5a-417e-80a9-6653fb036470\" (UID: \"3973df1a-dd5a-417e-80a9-6653fb036470\") " Jan 20 17:28:33 crc kubenswrapper[4558]: I0120 17:28:33.849877 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3973df1a-dd5a-417e-80a9-6653fb036470-combined-ca-bundle\") pod \"3973df1a-dd5a-417e-80a9-6653fb036470\" (UID: \"3973df1a-dd5a-417e-80a9-6653fb036470\") " Jan 20 17:28:33 crc kubenswrapper[4558]: I0120 17:28:33.850059 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3973df1a-dd5a-417e-80a9-6653fb036470-config-data\") pod \"3973df1a-dd5a-417e-80a9-6653fb036470\" (UID: \"3973df1a-dd5a-417e-80a9-6653fb036470\") " Jan 20 17:28:33 crc kubenswrapper[4558]: I0120 17:28:33.853589 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3973df1a-dd5a-417e-80a9-6653fb036470-kube-api-access-sgmkz" (OuterVolumeSpecName: "kube-api-access-sgmkz") pod "3973df1a-dd5a-417e-80a9-6653fb036470" (UID: "3973df1a-dd5a-417e-80a9-6653fb036470"). InnerVolumeSpecName "kube-api-access-sgmkz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:28:33 crc kubenswrapper[4558]: I0120 17:28:33.860496 4558 scope.go:117] "RemoveContainer" containerID="19bf1b2c84ff2748887d3f5954817882ace006a585e9b1cf3ff55c2286de632f" Jan 20 17:28:33 crc kubenswrapper[4558]: I0120 17:28:33.892556 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3973df1a-dd5a-417e-80a9-6653fb036470-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3973df1a-dd5a-417e-80a9-6653fb036470" (UID: "3973df1a-dd5a-417e-80a9-6653fb036470"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:28:33 crc kubenswrapper[4558]: I0120 17:28:33.898602 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3973df1a-dd5a-417e-80a9-6653fb036470-config-data" (OuterVolumeSpecName: "config-data") pod "3973df1a-dd5a-417e-80a9-6653fb036470" (UID: "3973df1a-dd5a-417e-80a9-6653fb036470"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:28:33 crc kubenswrapper[4558]: I0120 17:28:33.908691 4558 scope.go:117] "RemoveContainer" containerID="70b3bec12ba85f03f2bb9c09bdc640f03db124a8b41fd0886ea7b738171db315" Jan 20 17:28:33 crc kubenswrapper[4558]: I0120 17:28:33.926820 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:28:33 crc kubenswrapper[4558]: I0120 17:28:33.932258 4558 scope.go:117] "RemoveContainer" containerID="b9e271e5199c2c5986711fa373c13ad8aade0056ea94afaa4c8bdfa52c131009" Jan 20 17:28:33 crc kubenswrapper[4558]: I0120 17:28:33.950566 4558 scope.go:117] "RemoveContainer" containerID="7355d501842ad1b14019ba4bbb3365f8da9a6587b24e3a344f6aeb0cbde4a6bd" Jan 20 17:28:33 crc kubenswrapper[4558]: I0120 17:28:33.951458 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/134fc5ef-70be-4c8c-aa72-e4f0440d0afe-ceilometer-tls-certs\") pod \"134fc5ef-70be-4c8c-aa72-e4f0440d0afe\" (UID: \"134fc5ef-70be-4c8c-aa72-e4f0440d0afe\") " Jan 20 17:28:33 crc kubenswrapper[4558]: I0120 17:28:33.951577 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/134fc5ef-70be-4c8c-aa72-e4f0440d0afe-config-data\") pod \"134fc5ef-70be-4c8c-aa72-e4f0440d0afe\" (UID: \"134fc5ef-70be-4c8c-aa72-e4f0440d0afe\") " Jan 20 17:28:33 crc kubenswrapper[4558]: I0120 17:28:33.952358 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/134fc5ef-70be-4c8c-aa72-e4f0440d0afe-run-httpd\") pod \"134fc5ef-70be-4c8c-aa72-e4f0440d0afe\" (UID: \"134fc5ef-70be-4c8c-aa72-e4f0440d0afe\") " Jan 20 17:28:33 crc kubenswrapper[4558]: I0120 17:28:33.952408 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/134fc5ef-70be-4c8c-aa72-e4f0440d0afe-log-httpd\") pod \"134fc5ef-70be-4c8c-aa72-e4f0440d0afe\" (UID: \"134fc5ef-70be-4c8c-aa72-e4f0440d0afe\") " Jan 20 17:28:33 crc kubenswrapper[4558]: I0120 17:28:33.952445 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/134fc5ef-70be-4c8c-aa72-e4f0440d0afe-sg-core-conf-yaml\") pod \"134fc5ef-70be-4c8c-aa72-e4f0440d0afe\" (UID: \"134fc5ef-70be-4c8c-aa72-e4f0440d0afe\") " Jan 20 17:28:33 crc kubenswrapper[4558]: I0120 17:28:33.952482 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/134fc5ef-70be-4c8c-aa72-e4f0440d0afe-combined-ca-bundle\") pod \"134fc5ef-70be-4c8c-aa72-e4f0440d0afe\" (UID: \"134fc5ef-70be-4c8c-aa72-e4f0440d0afe\") " Jan 20 17:28:33 crc kubenswrapper[4558]: I0120 17:28:33.952512 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kxbmh\" (UniqueName: \"kubernetes.io/projected/134fc5ef-70be-4c8c-aa72-e4f0440d0afe-kube-api-access-kxbmh\") pod \"134fc5ef-70be-4c8c-aa72-e4f0440d0afe\" (UID: \"134fc5ef-70be-4c8c-aa72-e4f0440d0afe\") " Jan 20 17:28:33 crc kubenswrapper[4558]: I0120 17:28:33.952562 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/134fc5ef-70be-4c8c-aa72-e4f0440d0afe-scripts\") pod \"134fc5ef-70be-4c8c-aa72-e4f0440d0afe\" (UID: \"134fc5ef-70be-4c8c-aa72-e4f0440d0afe\") " Jan 20 17:28:33 crc kubenswrapper[4558]: I0120 17:28:33.953054 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/134fc5ef-70be-4c8c-aa72-e4f0440d0afe-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "134fc5ef-70be-4c8c-aa72-e4f0440d0afe" (UID: "134fc5ef-70be-4c8c-aa72-e4f0440d0afe"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:28:33 crc kubenswrapper[4558]: I0120 17:28:33.953404 4558 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/134fc5ef-70be-4c8c-aa72-e4f0440d0afe-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:33 crc kubenswrapper[4558]: I0120 17:28:33.953429 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3973df1a-dd5a-417e-80a9-6653fb036470-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:33 crc kubenswrapper[4558]: I0120 17:28:33.953442 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sgmkz\" (UniqueName: \"kubernetes.io/projected/3973df1a-dd5a-417e-80a9-6653fb036470-kube-api-access-sgmkz\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:33 crc kubenswrapper[4558]: I0120 17:28:33.953454 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3973df1a-dd5a-417e-80a9-6653fb036470-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:33 crc kubenswrapper[4558]: I0120 17:28:33.953604 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/134fc5ef-70be-4c8c-aa72-e4f0440d0afe-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "134fc5ef-70be-4c8c-aa72-e4f0440d0afe" (UID: "134fc5ef-70be-4c8c-aa72-e4f0440d0afe"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:28:33 crc kubenswrapper[4558]: I0120 17:28:33.955548 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/134fc5ef-70be-4c8c-aa72-e4f0440d0afe-scripts" (OuterVolumeSpecName: "scripts") pod "134fc5ef-70be-4c8c-aa72-e4f0440d0afe" (UID: "134fc5ef-70be-4c8c-aa72-e4f0440d0afe"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:28:33 crc kubenswrapper[4558]: I0120 17:28:33.956398 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/134fc5ef-70be-4c8c-aa72-e4f0440d0afe-kube-api-access-kxbmh" (OuterVolumeSpecName: "kube-api-access-kxbmh") pod "134fc5ef-70be-4c8c-aa72-e4f0440d0afe" (UID: "134fc5ef-70be-4c8c-aa72-e4f0440d0afe"). InnerVolumeSpecName "kube-api-access-kxbmh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:28:33 crc kubenswrapper[4558]: I0120 17:28:33.968571 4558 scope.go:117] "RemoveContainer" containerID="2ee2e3e7ebe2d7413bfe073e0a35c416b50099cfb7c3db764047c0da6077f92e" Jan 20 17:28:33 crc kubenswrapper[4558]: I0120 17:28:33.971433 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/134fc5ef-70be-4c8c-aa72-e4f0440d0afe-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "134fc5ef-70be-4c8c-aa72-e4f0440d0afe" (UID: "134fc5ef-70be-4c8c-aa72-e4f0440d0afe"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:28:33 crc kubenswrapper[4558]: I0120 17:28:33.985699 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/134fc5ef-70be-4c8c-aa72-e4f0440d0afe-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "134fc5ef-70be-4c8c-aa72-e4f0440d0afe" (UID: "134fc5ef-70be-4c8c-aa72-e4f0440d0afe"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:28:33 crc kubenswrapper[4558]: I0120 17:28:33.989246 4558 scope.go:117] "RemoveContainer" containerID="8ba1d4c7037c9af7af4ff6c69fdad67f28d557367c30a255c97945adb2a5670d" Jan 20 17:28:34 crc kubenswrapper[4558]: I0120 17:28:34.004278 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/134fc5ef-70be-4c8c-aa72-e4f0440d0afe-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "134fc5ef-70be-4c8c-aa72-e4f0440d0afe" (UID: "134fc5ef-70be-4c8c-aa72-e4f0440d0afe"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:28:34 crc kubenswrapper[4558]: I0120 17:28:34.006176 4558 scope.go:117] "RemoveContainer" containerID="94d135b3e27081e915874cdf29659d51450157f87f3b4f37332d5d83a3c11459" Jan 20 17:28:34 crc kubenswrapper[4558]: E0120 17:28:34.006755 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"94d135b3e27081e915874cdf29659d51450157f87f3b4f37332d5d83a3c11459\": container with ID starting with 94d135b3e27081e915874cdf29659d51450157f87f3b4f37332d5d83a3c11459 not found: ID does not exist" containerID="94d135b3e27081e915874cdf29659d51450157f87f3b4f37332d5d83a3c11459" Jan 20 17:28:34 crc kubenswrapper[4558]: I0120 17:28:34.006851 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"94d135b3e27081e915874cdf29659d51450157f87f3b4f37332d5d83a3c11459"} err="failed to get container status \"94d135b3e27081e915874cdf29659d51450157f87f3b4f37332d5d83a3c11459\": rpc error: code = NotFound desc = could not find container \"94d135b3e27081e915874cdf29659d51450157f87f3b4f37332d5d83a3c11459\": container with ID starting with 94d135b3e27081e915874cdf29659d51450157f87f3b4f37332d5d83a3c11459 not found: ID does not exist" Jan 20 17:28:34 crc kubenswrapper[4558]: I0120 17:28:34.006938 4558 scope.go:117] "RemoveContainer" containerID="2953f93c5ea7797971404b822588bdd28383773e127010c836cbd6ce7a1b5450" Jan 20 17:28:34 crc kubenswrapper[4558]: E0120 17:28:34.007330 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2953f93c5ea7797971404b822588bdd28383773e127010c836cbd6ce7a1b5450\": container with ID starting with 2953f93c5ea7797971404b822588bdd28383773e127010c836cbd6ce7a1b5450 not found: ID does not exist" containerID="2953f93c5ea7797971404b822588bdd28383773e127010c836cbd6ce7a1b5450" Jan 20 17:28:34 crc kubenswrapper[4558]: I0120 17:28:34.007363 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2953f93c5ea7797971404b822588bdd28383773e127010c836cbd6ce7a1b5450"} err="failed to get container status \"2953f93c5ea7797971404b822588bdd28383773e127010c836cbd6ce7a1b5450\": rpc error: code = NotFound desc = could not find container \"2953f93c5ea7797971404b822588bdd28383773e127010c836cbd6ce7a1b5450\": container with ID starting with 2953f93c5ea7797971404b822588bdd28383773e127010c836cbd6ce7a1b5450 not found: ID does not exist" Jan 20 17:28:34 crc kubenswrapper[4558]: I0120 17:28:34.007390 4558 scope.go:117] "RemoveContainer" containerID="33faf2c0a93c93d45813868ee43337a14e962791dad79303fe6a9a0671ec791e" Jan 20 17:28:34 crc kubenswrapper[4558]: E0120 17:28:34.007760 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"33faf2c0a93c93d45813868ee43337a14e962791dad79303fe6a9a0671ec791e\": container with ID starting with 33faf2c0a93c93d45813868ee43337a14e962791dad79303fe6a9a0671ec791e not found: ID does not exist" containerID="33faf2c0a93c93d45813868ee43337a14e962791dad79303fe6a9a0671ec791e" Jan 20 17:28:34 crc kubenswrapper[4558]: I0120 17:28:34.007818 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"33faf2c0a93c93d45813868ee43337a14e962791dad79303fe6a9a0671ec791e"} err="failed to get container status \"33faf2c0a93c93d45813868ee43337a14e962791dad79303fe6a9a0671ec791e\": rpc error: code = NotFound desc = could not find container \"33faf2c0a93c93d45813868ee43337a14e962791dad79303fe6a9a0671ec791e\": container with ID starting with 33faf2c0a93c93d45813868ee43337a14e962791dad79303fe6a9a0671ec791e not found: ID does not exist" Jan 20 17:28:34 crc kubenswrapper[4558]: I0120 17:28:34.007855 4558 scope.go:117] "RemoveContainer" containerID="36e479ddabc13ddf3cd61164acd11174c752edf6961acec9ae67d8951043aee5" Jan 20 17:28:34 crc kubenswrapper[4558]: E0120 17:28:34.008678 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"36e479ddabc13ddf3cd61164acd11174c752edf6961acec9ae67d8951043aee5\": container with ID starting with 36e479ddabc13ddf3cd61164acd11174c752edf6961acec9ae67d8951043aee5 not found: ID does not exist" containerID="36e479ddabc13ddf3cd61164acd11174c752edf6961acec9ae67d8951043aee5" Jan 20 17:28:34 crc kubenswrapper[4558]: I0120 17:28:34.008729 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"36e479ddabc13ddf3cd61164acd11174c752edf6961acec9ae67d8951043aee5"} err="failed to get container status \"36e479ddabc13ddf3cd61164acd11174c752edf6961acec9ae67d8951043aee5\": rpc error: code = NotFound desc = could not find container \"36e479ddabc13ddf3cd61164acd11174c752edf6961acec9ae67d8951043aee5\": container with ID starting with 36e479ddabc13ddf3cd61164acd11174c752edf6961acec9ae67d8951043aee5 not found: ID does not exist" Jan 20 17:28:34 crc kubenswrapper[4558]: I0120 17:28:34.008762 4558 scope.go:117] "RemoveContainer" containerID="5abed863186c32fde3a40dcffb538fd7112acbbdfd253d93751ff00096a299c5" Jan 20 17:28:34 crc kubenswrapper[4558]: E0120 17:28:34.009187 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5abed863186c32fde3a40dcffb538fd7112acbbdfd253d93751ff00096a299c5\": container with ID starting with 5abed863186c32fde3a40dcffb538fd7112acbbdfd253d93751ff00096a299c5 not found: ID does not exist" containerID="5abed863186c32fde3a40dcffb538fd7112acbbdfd253d93751ff00096a299c5" Jan 20 17:28:34 crc kubenswrapper[4558]: I0120 17:28:34.009276 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5abed863186c32fde3a40dcffb538fd7112acbbdfd253d93751ff00096a299c5"} err="failed to get container status \"5abed863186c32fde3a40dcffb538fd7112acbbdfd253d93751ff00096a299c5\": rpc error: code = NotFound desc = could not find container \"5abed863186c32fde3a40dcffb538fd7112acbbdfd253d93751ff00096a299c5\": container with ID starting with 5abed863186c32fde3a40dcffb538fd7112acbbdfd253d93751ff00096a299c5 not found: ID does not exist" Jan 20 17:28:34 crc kubenswrapper[4558]: I0120 17:28:34.009348 4558 scope.go:117] "RemoveContainer" containerID="d700282b5693986cac00fa3aff076f819077ba4d7e9191db243d0f5c076b2c4d" Jan 20 17:28:34 crc kubenswrapper[4558]: E0120 17:28:34.009815 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d700282b5693986cac00fa3aff076f819077ba4d7e9191db243d0f5c076b2c4d\": container with ID starting with d700282b5693986cac00fa3aff076f819077ba4d7e9191db243d0f5c076b2c4d not found: ID does not exist" containerID="d700282b5693986cac00fa3aff076f819077ba4d7e9191db243d0f5c076b2c4d" Jan 20 17:28:34 crc kubenswrapper[4558]: I0120 17:28:34.009852 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d700282b5693986cac00fa3aff076f819077ba4d7e9191db243d0f5c076b2c4d"} err="failed to get container status \"d700282b5693986cac00fa3aff076f819077ba4d7e9191db243d0f5c076b2c4d\": rpc error: code = NotFound desc = could not find container \"d700282b5693986cac00fa3aff076f819077ba4d7e9191db243d0f5c076b2c4d\": container with ID starting with d700282b5693986cac00fa3aff076f819077ba4d7e9191db243d0f5c076b2c4d not found: ID does not exist" Jan 20 17:28:34 crc kubenswrapper[4558]: I0120 17:28:34.009873 4558 scope.go:117] "RemoveContainer" containerID="4a6334c4fa8ffc04d0aaf20255ac8f9132d2e26680c4b1f11d017cfb5b777eec" Jan 20 17:28:34 crc kubenswrapper[4558]: I0120 17:28:34.009882 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/134fc5ef-70be-4c8c-aa72-e4f0440d0afe-config-data" (OuterVolumeSpecName: "config-data") pod "134fc5ef-70be-4c8c-aa72-e4f0440d0afe" (UID: "134fc5ef-70be-4c8c-aa72-e4f0440d0afe"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:28:34 crc kubenswrapper[4558]: E0120 17:28:34.010252 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4a6334c4fa8ffc04d0aaf20255ac8f9132d2e26680c4b1f11d017cfb5b777eec\": container with ID starting with 4a6334c4fa8ffc04d0aaf20255ac8f9132d2e26680c4b1f11d017cfb5b777eec not found: ID does not exist" containerID="4a6334c4fa8ffc04d0aaf20255ac8f9132d2e26680c4b1f11d017cfb5b777eec" Jan 20 17:28:34 crc kubenswrapper[4558]: I0120 17:28:34.010290 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4a6334c4fa8ffc04d0aaf20255ac8f9132d2e26680c4b1f11d017cfb5b777eec"} err="failed to get container status \"4a6334c4fa8ffc04d0aaf20255ac8f9132d2e26680c4b1f11d017cfb5b777eec\": rpc error: code = NotFound desc = could not find container \"4a6334c4fa8ffc04d0aaf20255ac8f9132d2e26680c4b1f11d017cfb5b777eec\": container with ID starting with 4a6334c4fa8ffc04d0aaf20255ac8f9132d2e26680c4b1f11d017cfb5b777eec not found: ID does not exist" Jan 20 17:28:34 crc kubenswrapper[4558]: I0120 17:28:34.010320 4558 scope.go:117] "RemoveContainer" containerID="8c8b5915d3326030a79e4f528fb27f011fc914ddc227bbc37139ba81a55e7e83" Jan 20 17:28:34 crc kubenswrapper[4558]: E0120 17:28:34.010694 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8c8b5915d3326030a79e4f528fb27f011fc914ddc227bbc37139ba81a55e7e83\": container with ID starting with 8c8b5915d3326030a79e4f528fb27f011fc914ddc227bbc37139ba81a55e7e83 not found: ID does not exist" containerID="8c8b5915d3326030a79e4f528fb27f011fc914ddc227bbc37139ba81a55e7e83" Jan 20 17:28:34 crc kubenswrapper[4558]: I0120 17:28:34.010724 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8c8b5915d3326030a79e4f528fb27f011fc914ddc227bbc37139ba81a55e7e83"} err="failed to get container status \"8c8b5915d3326030a79e4f528fb27f011fc914ddc227bbc37139ba81a55e7e83\": rpc error: code = NotFound desc = could not find container \"8c8b5915d3326030a79e4f528fb27f011fc914ddc227bbc37139ba81a55e7e83\": container with ID starting with 8c8b5915d3326030a79e4f528fb27f011fc914ddc227bbc37139ba81a55e7e83 not found: ID does not exist" Jan 20 17:28:34 crc kubenswrapper[4558]: I0120 17:28:34.010740 4558 scope.go:117] "RemoveContainer" containerID="703117c0c32a1ceba745d2e72e8de7d8ffea3ca4a98e3434eda2bde5c61b0d36" Jan 20 17:28:34 crc kubenswrapper[4558]: E0120 17:28:34.011052 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"703117c0c32a1ceba745d2e72e8de7d8ffea3ca4a98e3434eda2bde5c61b0d36\": container with ID starting with 703117c0c32a1ceba745d2e72e8de7d8ffea3ca4a98e3434eda2bde5c61b0d36 not found: ID does not exist" containerID="703117c0c32a1ceba745d2e72e8de7d8ffea3ca4a98e3434eda2bde5c61b0d36" Jan 20 17:28:34 crc kubenswrapper[4558]: I0120 17:28:34.011077 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"703117c0c32a1ceba745d2e72e8de7d8ffea3ca4a98e3434eda2bde5c61b0d36"} err="failed to get container status \"703117c0c32a1ceba745d2e72e8de7d8ffea3ca4a98e3434eda2bde5c61b0d36\": rpc error: code = NotFound desc = could not find container \"703117c0c32a1ceba745d2e72e8de7d8ffea3ca4a98e3434eda2bde5c61b0d36\": container with ID starting with 703117c0c32a1ceba745d2e72e8de7d8ffea3ca4a98e3434eda2bde5c61b0d36 not found: ID does not exist" Jan 20 17:28:34 crc kubenswrapper[4558]: I0120 17:28:34.011095 4558 scope.go:117] "RemoveContainer" containerID="19bf1b2c84ff2748887d3f5954817882ace006a585e9b1cf3ff55c2286de632f" Jan 20 17:28:34 crc kubenswrapper[4558]: E0120 17:28:34.011416 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"19bf1b2c84ff2748887d3f5954817882ace006a585e9b1cf3ff55c2286de632f\": container with ID starting with 19bf1b2c84ff2748887d3f5954817882ace006a585e9b1cf3ff55c2286de632f not found: ID does not exist" containerID="19bf1b2c84ff2748887d3f5954817882ace006a585e9b1cf3ff55c2286de632f" Jan 20 17:28:34 crc kubenswrapper[4558]: I0120 17:28:34.011440 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"19bf1b2c84ff2748887d3f5954817882ace006a585e9b1cf3ff55c2286de632f"} err="failed to get container status \"19bf1b2c84ff2748887d3f5954817882ace006a585e9b1cf3ff55c2286de632f\": rpc error: code = NotFound desc = could not find container \"19bf1b2c84ff2748887d3f5954817882ace006a585e9b1cf3ff55c2286de632f\": container with ID starting with 19bf1b2c84ff2748887d3f5954817882ace006a585e9b1cf3ff55c2286de632f not found: ID does not exist" Jan 20 17:28:34 crc kubenswrapper[4558]: I0120 17:28:34.011455 4558 scope.go:117] "RemoveContainer" containerID="70b3bec12ba85f03f2bb9c09bdc640f03db124a8b41fd0886ea7b738171db315" Jan 20 17:28:34 crc kubenswrapper[4558]: E0120 17:28:34.011759 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"70b3bec12ba85f03f2bb9c09bdc640f03db124a8b41fd0886ea7b738171db315\": container with ID starting with 70b3bec12ba85f03f2bb9c09bdc640f03db124a8b41fd0886ea7b738171db315 not found: ID does not exist" containerID="70b3bec12ba85f03f2bb9c09bdc640f03db124a8b41fd0886ea7b738171db315" Jan 20 17:28:34 crc kubenswrapper[4558]: I0120 17:28:34.011800 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"70b3bec12ba85f03f2bb9c09bdc640f03db124a8b41fd0886ea7b738171db315"} err="failed to get container status \"70b3bec12ba85f03f2bb9c09bdc640f03db124a8b41fd0886ea7b738171db315\": rpc error: code = NotFound desc = could not find container \"70b3bec12ba85f03f2bb9c09bdc640f03db124a8b41fd0886ea7b738171db315\": container with ID starting with 70b3bec12ba85f03f2bb9c09bdc640f03db124a8b41fd0886ea7b738171db315 not found: ID does not exist" Jan 20 17:28:34 crc kubenswrapper[4558]: I0120 17:28:34.011826 4558 scope.go:117] "RemoveContainer" containerID="b9e271e5199c2c5986711fa373c13ad8aade0056ea94afaa4c8bdfa52c131009" Jan 20 17:28:34 crc kubenswrapper[4558]: E0120 17:28:34.012196 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b9e271e5199c2c5986711fa373c13ad8aade0056ea94afaa4c8bdfa52c131009\": container with ID starting with b9e271e5199c2c5986711fa373c13ad8aade0056ea94afaa4c8bdfa52c131009 not found: ID does not exist" containerID="b9e271e5199c2c5986711fa373c13ad8aade0056ea94afaa4c8bdfa52c131009" Jan 20 17:28:34 crc kubenswrapper[4558]: I0120 17:28:34.012223 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b9e271e5199c2c5986711fa373c13ad8aade0056ea94afaa4c8bdfa52c131009"} err="failed to get container status \"b9e271e5199c2c5986711fa373c13ad8aade0056ea94afaa4c8bdfa52c131009\": rpc error: code = NotFound desc = could not find container \"b9e271e5199c2c5986711fa373c13ad8aade0056ea94afaa4c8bdfa52c131009\": container with ID starting with b9e271e5199c2c5986711fa373c13ad8aade0056ea94afaa4c8bdfa52c131009 not found: ID does not exist" Jan 20 17:28:34 crc kubenswrapper[4558]: I0120 17:28:34.012241 4558 scope.go:117] "RemoveContainer" containerID="7355d501842ad1b14019ba4bbb3365f8da9a6587b24e3a344f6aeb0cbde4a6bd" Jan 20 17:28:34 crc kubenswrapper[4558]: E0120 17:28:34.012570 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7355d501842ad1b14019ba4bbb3365f8da9a6587b24e3a344f6aeb0cbde4a6bd\": container with ID starting with 7355d501842ad1b14019ba4bbb3365f8da9a6587b24e3a344f6aeb0cbde4a6bd not found: ID does not exist" containerID="7355d501842ad1b14019ba4bbb3365f8da9a6587b24e3a344f6aeb0cbde4a6bd" Jan 20 17:28:34 crc kubenswrapper[4558]: I0120 17:28:34.012602 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7355d501842ad1b14019ba4bbb3365f8da9a6587b24e3a344f6aeb0cbde4a6bd"} err="failed to get container status \"7355d501842ad1b14019ba4bbb3365f8da9a6587b24e3a344f6aeb0cbde4a6bd\": rpc error: code = NotFound desc = could not find container \"7355d501842ad1b14019ba4bbb3365f8da9a6587b24e3a344f6aeb0cbde4a6bd\": container with ID starting with 7355d501842ad1b14019ba4bbb3365f8da9a6587b24e3a344f6aeb0cbde4a6bd not found: ID does not exist" Jan 20 17:28:34 crc kubenswrapper[4558]: I0120 17:28:34.012618 4558 scope.go:117] "RemoveContainer" containerID="2ee2e3e7ebe2d7413bfe073e0a35c416b50099cfb7c3db764047c0da6077f92e" Jan 20 17:28:34 crc kubenswrapper[4558]: E0120 17:28:34.012976 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2ee2e3e7ebe2d7413bfe073e0a35c416b50099cfb7c3db764047c0da6077f92e\": container with ID starting with 2ee2e3e7ebe2d7413bfe073e0a35c416b50099cfb7c3db764047c0da6077f92e not found: ID does not exist" containerID="2ee2e3e7ebe2d7413bfe073e0a35c416b50099cfb7c3db764047c0da6077f92e" Jan 20 17:28:34 crc kubenswrapper[4558]: I0120 17:28:34.013007 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2ee2e3e7ebe2d7413bfe073e0a35c416b50099cfb7c3db764047c0da6077f92e"} err="failed to get container status \"2ee2e3e7ebe2d7413bfe073e0a35c416b50099cfb7c3db764047c0da6077f92e\": rpc error: code = NotFound desc = could not find container \"2ee2e3e7ebe2d7413bfe073e0a35c416b50099cfb7c3db764047c0da6077f92e\": container with ID starting with 2ee2e3e7ebe2d7413bfe073e0a35c416b50099cfb7c3db764047c0da6077f92e not found: ID does not exist" Jan 20 17:28:34 crc kubenswrapper[4558]: I0120 17:28:34.013028 4558 scope.go:117] "RemoveContainer" containerID="8ba1d4c7037c9af7af4ff6c69fdad67f28d557367c30a255c97945adb2a5670d" Jan 20 17:28:34 crc kubenswrapper[4558]: E0120 17:28:34.013289 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8ba1d4c7037c9af7af4ff6c69fdad67f28d557367c30a255c97945adb2a5670d\": container with ID starting with 8ba1d4c7037c9af7af4ff6c69fdad67f28d557367c30a255c97945adb2a5670d not found: ID does not exist" containerID="8ba1d4c7037c9af7af4ff6c69fdad67f28d557367c30a255c97945adb2a5670d" Jan 20 17:28:34 crc kubenswrapper[4558]: I0120 17:28:34.013317 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8ba1d4c7037c9af7af4ff6c69fdad67f28d557367c30a255c97945adb2a5670d"} err="failed to get container status \"8ba1d4c7037c9af7af4ff6c69fdad67f28d557367c30a255c97945adb2a5670d\": rpc error: code = NotFound desc = could not find container \"8ba1d4c7037c9af7af4ff6c69fdad67f28d557367c30a255c97945adb2a5670d\": container with ID starting with 8ba1d4c7037c9af7af4ff6c69fdad67f28d557367c30a255c97945adb2a5670d not found: ID does not exist" Jan 20 17:28:34 crc kubenswrapper[4558]: I0120 17:28:34.055050 4558 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/134fc5ef-70be-4c8c-aa72-e4f0440d0afe-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:34 crc kubenswrapper[4558]: I0120 17:28:34.055091 4558 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/134fc5ef-70be-4c8c-aa72-e4f0440d0afe-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:34 crc kubenswrapper[4558]: I0120 17:28:34.055104 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/134fc5ef-70be-4c8c-aa72-e4f0440d0afe-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:34 crc kubenswrapper[4558]: I0120 17:28:34.055121 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kxbmh\" (UniqueName: \"kubernetes.io/projected/134fc5ef-70be-4c8c-aa72-e4f0440d0afe-kube-api-access-kxbmh\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:34 crc kubenswrapper[4558]: I0120 17:28:34.055133 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/134fc5ef-70be-4c8c-aa72-e4f0440d0afe-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:34 crc kubenswrapper[4558]: I0120 17:28:34.055143 4558 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/134fc5ef-70be-4c8c-aa72-e4f0440d0afe-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:34 crc kubenswrapper[4558]: I0120 17:28:34.055153 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/134fc5ef-70be-4c8c-aa72-e4f0440d0afe-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:34 crc kubenswrapper[4558]: I0120 17:28:34.407907 4558 generic.go:334] "Generic (PLEG): container finished" podID="134fc5ef-70be-4c8c-aa72-e4f0440d0afe" containerID="98bb94af3fa40dc3d1b7b4c99b5349d836b492c3cec3acd61d7cae45c5b34d12" exitCode=137 Jan 20 17:28:34 crc kubenswrapper[4558]: I0120 17:28:34.407979 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"134fc5ef-70be-4c8c-aa72-e4f0440d0afe","Type":"ContainerDied","Data":"98bb94af3fa40dc3d1b7b4c99b5349d836b492c3cec3acd61d7cae45c5b34d12"} Jan 20 17:28:34 crc kubenswrapper[4558]: I0120 17:28:34.408453 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"134fc5ef-70be-4c8c-aa72-e4f0440d0afe","Type":"ContainerDied","Data":"db0135b9134d791f9b0b19e85f18556a0c64916735fe2422f1ad1b4a39439c1b"} Jan 20 17:28:34 crc kubenswrapper[4558]: I0120 17:28:34.408490 4558 scope.go:117] "RemoveContainer" containerID="39bfb26312eaad0bd344b47e0fab7bc8a68e4339a058a5362c330669eb1593d1" Jan 20 17:28:34 crc kubenswrapper[4558]: I0120 17:28:34.408035 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:28:34 crc kubenswrapper[4558]: I0120 17:28:34.410789 4558 generic.go:334] "Generic (PLEG): container finished" podID="3973df1a-dd5a-417e-80a9-6653fb036470" containerID="6d33b2d5d8fe60d82930b4864fabfe53d6a679fc712a4ba67d60a4da40feed96" exitCode=137 Jan 20 17:28:34 crc kubenswrapper[4558]: I0120 17:28:34.410871 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-0" event={"ID":"3973df1a-dd5a-417e-80a9-6653fb036470","Type":"ContainerDied","Data":"6d33b2d5d8fe60d82930b4864fabfe53d6a679fc712a4ba67d60a4da40feed96"} Jan 20 17:28:34 crc kubenswrapper[4558]: I0120 17:28:34.410921 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-0" event={"ID":"3973df1a-dd5a-417e-80a9-6653fb036470","Type":"ContainerDied","Data":"b00d8ab246342eb2d06712da991aaec4bc45ecc6ab7b0086d0829fed86dff19d"} Jan 20 17:28:34 crc kubenswrapper[4558]: I0120 17:28:34.411009 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:28:34 crc kubenswrapper[4558]: I0120 17:28:34.414063 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"b2a7ece8-32ed-4eb3-ab85-27ff4361622d","Type":"ContainerDied","Data":"4c5b27f057af39120dd18ed3760fd5e5bece44a4920f0051709e8fcb3c409554"} Jan 20 17:28:34 crc kubenswrapper[4558]: I0120 17:28:34.414233 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:28:34 crc kubenswrapper[4558]: I0120 17:28:34.433927 4558 scope.go:117] "RemoveContainer" containerID="99d707bce7e8a2bd68ad6ab0135f5762171740a9747fee9d19f3dc67af0b93db" Jan 20 17:28:34 crc kubenswrapper[4558]: I0120 17:28:34.446736 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:28:34 crc kubenswrapper[4558]: I0120 17:28:34.450837 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:28:34 crc kubenswrapper[4558]: I0120 17:28:34.461989 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:28:34 crc kubenswrapper[4558]: I0120 17:28:34.466827 4558 scope.go:117] "RemoveContainer" containerID="98bb94af3fa40dc3d1b7b4c99b5349d836b492c3cec3acd61d7cae45c5b34d12" Jan 20 17:28:34 crc kubenswrapper[4558]: I0120 17:28:34.470459 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:28:34 crc kubenswrapper[4558]: I0120 17:28:34.475596 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-0"] Jan 20 17:28:34 crc kubenswrapper[4558]: I0120 17:28:34.479489 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-0"] Jan 20 17:28:34 crc kubenswrapper[4558]: I0120 17:28:34.522606 4558 scope.go:117] "RemoveContainer" containerID="8ac13b7e76f880d4b690e8245e9e454bb0f7d3d01a25e904b05a000a609f936a" Jan 20 17:28:34 crc kubenswrapper[4558]: I0120 17:28:34.551685 4558 scope.go:117] "RemoveContainer" containerID="39bfb26312eaad0bd344b47e0fab7bc8a68e4339a058a5362c330669eb1593d1" Jan 20 17:28:34 crc kubenswrapper[4558]: E0120 17:28:34.553493 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"39bfb26312eaad0bd344b47e0fab7bc8a68e4339a058a5362c330669eb1593d1\": container with ID starting with 39bfb26312eaad0bd344b47e0fab7bc8a68e4339a058a5362c330669eb1593d1 not found: ID does not exist" containerID="39bfb26312eaad0bd344b47e0fab7bc8a68e4339a058a5362c330669eb1593d1" Jan 20 17:28:34 crc kubenswrapper[4558]: I0120 17:28:34.553539 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"39bfb26312eaad0bd344b47e0fab7bc8a68e4339a058a5362c330669eb1593d1"} err="failed to get container status \"39bfb26312eaad0bd344b47e0fab7bc8a68e4339a058a5362c330669eb1593d1\": rpc error: code = NotFound desc = could not find container \"39bfb26312eaad0bd344b47e0fab7bc8a68e4339a058a5362c330669eb1593d1\": container with ID starting with 39bfb26312eaad0bd344b47e0fab7bc8a68e4339a058a5362c330669eb1593d1 not found: ID does not exist" Jan 20 17:28:34 crc kubenswrapper[4558]: I0120 17:28:34.553578 4558 scope.go:117] "RemoveContainer" containerID="99d707bce7e8a2bd68ad6ab0135f5762171740a9747fee9d19f3dc67af0b93db" Jan 20 17:28:34 crc kubenswrapper[4558]: E0120 17:28:34.554546 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"99d707bce7e8a2bd68ad6ab0135f5762171740a9747fee9d19f3dc67af0b93db\": container with ID starting with 99d707bce7e8a2bd68ad6ab0135f5762171740a9747fee9d19f3dc67af0b93db not found: ID does not exist" containerID="99d707bce7e8a2bd68ad6ab0135f5762171740a9747fee9d19f3dc67af0b93db" Jan 20 17:28:34 crc kubenswrapper[4558]: I0120 17:28:34.554580 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"99d707bce7e8a2bd68ad6ab0135f5762171740a9747fee9d19f3dc67af0b93db"} err="failed to get container status \"99d707bce7e8a2bd68ad6ab0135f5762171740a9747fee9d19f3dc67af0b93db\": rpc error: code = NotFound desc = could not find container \"99d707bce7e8a2bd68ad6ab0135f5762171740a9747fee9d19f3dc67af0b93db\": container with ID starting with 99d707bce7e8a2bd68ad6ab0135f5762171740a9747fee9d19f3dc67af0b93db not found: ID does not exist" Jan 20 17:28:34 crc kubenswrapper[4558]: I0120 17:28:34.554597 4558 scope.go:117] "RemoveContainer" containerID="98bb94af3fa40dc3d1b7b4c99b5349d836b492c3cec3acd61d7cae45c5b34d12" Jan 20 17:28:34 crc kubenswrapper[4558]: E0120 17:28:34.555005 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"98bb94af3fa40dc3d1b7b4c99b5349d836b492c3cec3acd61d7cae45c5b34d12\": container with ID starting with 98bb94af3fa40dc3d1b7b4c99b5349d836b492c3cec3acd61d7cae45c5b34d12 not found: ID does not exist" containerID="98bb94af3fa40dc3d1b7b4c99b5349d836b492c3cec3acd61d7cae45c5b34d12" Jan 20 17:28:34 crc kubenswrapper[4558]: I0120 17:28:34.555034 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"98bb94af3fa40dc3d1b7b4c99b5349d836b492c3cec3acd61d7cae45c5b34d12"} err="failed to get container status \"98bb94af3fa40dc3d1b7b4c99b5349d836b492c3cec3acd61d7cae45c5b34d12\": rpc error: code = NotFound desc = could not find container \"98bb94af3fa40dc3d1b7b4c99b5349d836b492c3cec3acd61d7cae45c5b34d12\": container with ID starting with 98bb94af3fa40dc3d1b7b4c99b5349d836b492c3cec3acd61d7cae45c5b34d12 not found: ID does not exist" Jan 20 17:28:34 crc kubenswrapper[4558]: I0120 17:28:34.555048 4558 scope.go:117] "RemoveContainer" containerID="8ac13b7e76f880d4b690e8245e9e454bb0f7d3d01a25e904b05a000a609f936a" Jan 20 17:28:34 crc kubenswrapper[4558]: E0120 17:28:34.555456 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8ac13b7e76f880d4b690e8245e9e454bb0f7d3d01a25e904b05a000a609f936a\": container with ID starting with 8ac13b7e76f880d4b690e8245e9e454bb0f7d3d01a25e904b05a000a609f936a not found: ID does not exist" containerID="8ac13b7e76f880d4b690e8245e9e454bb0f7d3d01a25e904b05a000a609f936a" Jan 20 17:28:34 crc kubenswrapper[4558]: I0120 17:28:34.555498 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8ac13b7e76f880d4b690e8245e9e454bb0f7d3d01a25e904b05a000a609f936a"} err="failed to get container status \"8ac13b7e76f880d4b690e8245e9e454bb0f7d3d01a25e904b05a000a609f936a\": rpc error: code = NotFound desc = could not find container \"8ac13b7e76f880d4b690e8245e9e454bb0f7d3d01a25e904b05a000a609f936a\": container with ID starting with 8ac13b7e76f880d4b690e8245e9e454bb0f7d3d01a25e904b05a000a609f936a not found: ID does not exist" Jan 20 17:28:34 crc kubenswrapper[4558]: I0120 17:28:34.555512 4558 scope.go:117] "RemoveContainer" containerID="6d33b2d5d8fe60d82930b4864fabfe53d6a679fc712a4ba67d60a4da40feed96" Jan 20 17:28:34 crc kubenswrapper[4558]: I0120 17:28:34.573483 4558 scope.go:117] "RemoveContainer" containerID="6d33b2d5d8fe60d82930b4864fabfe53d6a679fc712a4ba67d60a4da40feed96" Jan 20 17:28:34 crc kubenswrapper[4558]: E0120 17:28:34.574013 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6d33b2d5d8fe60d82930b4864fabfe53d6a679fc712a4ba67d60a4da40feed96\": container with ID starting with 6d33b2d5d8fe60d82930b4864fabfe53d6a679fc712a4ba67d60a4da40feed96 not found: ID does not exist" containerID="6d33b2d5d8fe60d82930b4864fabfe53d6a679fc712a4ba67d60a4da40feed96" Jan 20 17:28:34 crc kubenswrapper[4558]: I0120 17:28:34.574064 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6d33b2d5d8fe60d82930b4864fabfe53d6a679fc712a4ba67d60a4da40feed96"} err="failed to get container status \"6d33b2d5d8fe60d82930b4864fabfe53d6a679fc712a4ba67d60a4da40feed96\": rpc error: code = NotFound desc = could not find container \"6d33b2d5d8fe60d82930b4864fabfe53d6a679fc712a4ba67d60a4da40feed96\": container with ID starting with 6d33b2d5d8fe60d82930b4864fabfe53d6a679fc712a4ba67d60a4da40feed96 not found: ID does not exist" Jan 20 17:28:34 crc kubenswrapper[4558]: I0120 17:28:34.574093 4558 scope.go:117] "RemoveContainer" containerID="15e562b45f89d1f5fb52993145b8f16fbd8c5b35f2f80d2df9e160bf13a36eaf" Jan 20 17:28:34 crc kubenswrapper[4558]: I0120 17:28:34.575441 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="134fc5ef-70be-4c8c-aa72-e4f0440d0afe" path="/var/lib/kubelet/pods/134fc5ef-70be-4c8c-aa72-e4f0440d0afe/volumes" Jan 20 17:28:34 crc kubenswrapper[4558]: I0120 17:28:34.576225 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3973df1a-dd5a-417e-80a9-6653fb036470" path="/var/lib/kubelet/pods/3973df1a-dd5a-417e-80a9-6653fb036470/volumes" Jan 20 17:28:34 crc kubenswrapper[4558]: I0120 17:28:34.577985 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6dc46db7-9948-4c7d-b0a3-c767ffd58b4a" path="/var/lib/kubelet/pods/6dc46db7-9948-4c7d-b0a3-c767ffd58b4a/volumes" Jan 20 17:28:34 crc kubenswrapper[4558]: I0120 17:28:34.579233 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8804f9f1-60d4-4fa1-9333-9f9e01b3c68a" path="/var/lib/kubelet/pods/8804f9f1-60d4-4fa1-9333-9f9e01b3c68a/volumes" Jan 20 17:28:34 crc kubenswrapper[4558]: I0120 17:28:34.581789 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b2a7ece8-32ed-4eb3-ab85-27ff4361622d" path="/var/lib/kubelet/pods/b2a7ece8-32ed-4eb3-ab85-27ff4361622d/volumes" Jan 20 17:28:36 crc kubenswrapper[4558]: I0120 17:28:36.041158 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-db-create-rt2fh" Jan 20 17:28:36 crc kubenswrapper[4558]: I0120 17:28:36.088939 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1b7c6fb3-f30d-4dde-8502-e7d840719520-operator-scripts\") pod \"1b7c6fb3-f30d-4dde-8502-e7d840719520\" (UID: \"1b7c6fb3-f30d-4dde-8502-e7d840719520\") " Jan 20 17:28:36 crc kubenswrapper[4558]: I0120 17:28:36.089990 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1b7c6fb3-f30d-4dde-8502-e7d840719520-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "1b7c6fb3-f30d-4dde-8502-e7d840719520" (UID: "1b7c6fb3-f30d-4dde-8502-e7d840719520"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:28:36 crc kubenswrapper[4558]: I0120 17:28:36.091056 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6vfpf\" (UniqueName: \"kubernetes.io/projected/1b7c6fb3-f30d-4dde-8502-e7d840719520-kube-api-access-6vfpf\") pod \"1b7c6fb3-f30d-4dde-8502-e7d840719520\" (UID: \"1b7c6fb3-f30d-4dde-8502-e7d840719520\") " Jan 20 17:28:36 crc kubenswrapper[4558]: I0120 17:28:36.095424 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1b7c6fb3-f30d-4dde-8502-e7d840719520-kube-api-access-6vfpf" (OuterVolumeSpecName: "kube-api-access-6vfpf") pod "1b7c6fb3-f30d-4dde-8502-e7d840719520" (UID: "1b7c6fb3-f30d-4dde-8502-e7d840719520"). InnerVolumeSpecName "kube-api-access-6vfpf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:28:36 crc kubenswrapper[4558]: I0120 17:28:36.104439 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6vfpf\" (UniqueName: \"kubernetes.io/projected/1b7c6fb3-f30d-4dde-8502-e7d840719520-kube-api-access-6vfpf\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:36 crc kubenswrapper[4558]: I0120 17:28:36.104468 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1b7c6fb3-f30d-4dde-8502-e7d840719520-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:36 crc kubenswrapper[4558]: I0120 17:28:36.151239 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:28:36 crc kubenswrapper[4558]: I0120 17:28:36.205252 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c1afb831-f3e4-4356-ab86-713eb0beca39-config-data\") pod \"c1afb831-f3e4-4356-ab86-713eb0beca39\" (UID: \"c1afb831-f3e4-4356-ab86-713eb0beca39\") " Jan 20 17:28:36 crc kubenswrapper[4558]: I0120 17:28:36.205381 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c1afb831-f3e4-4356-ab86-713eb0beca39-scripts\") pod \"c1afb831-f3e4-4356-ab86-713eb0beca39\" (UID: \"c1afb831-f3e4-4356-ab86-713eb0beca39\") " Jan 20 17:28:36 crc kubenswrapper[4558]: I0120 17:28:36.205540 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c1afb831-f3e4-4356-ab86-713eb0beca39-config-data-custom\") pod \"c1afb831-f3e4-4356-ab86-713eb0beca39\" (UID: \"c1afb831-f3e4-4356-ab86-713eb0beca39\") " Jan 20 17:28:36 crc kubenswrapper[4558]: I0120 17:28:36.205572 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/c1afb831-f3e4-4356-ab86-713eb0beca39-etc-machine-id\") pod \"c1afb831-f3e4-4356-ab86-713eb0beca39\" (UID: \"c1afb831-f3e4-4356-ab86-713eb0beca39\") " Jan 20 17:28:36 crc kubenswrapper[4558]: I0120 17:28:36.205668 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c1afb831-f3e4-4356-ab86-713eb0beca39-combined-ca-bundle\") pod \"c1afb831-f3e4-4356-ab86-713eb0beca39\" (UID: \"c1afb831-f3e4-4356-ab86-713eb0beca39\") " Jan 20 17:28:36 crc kubenswrapper[4558]: I0120 17:28:36.205712 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cdj6z\" (UniqueName: \"kubernetes.io/projected/c1afb831-f3e4-4356-ab86-713eb0beca39-kube-api-access-cdj6z\") pod \"c1afb831-f3e4-4356-ab86-713eb0beca39\" (UID: \"c1afb831-f3e4-4356-ab86-713eb0beca39\") " Jan 20 17:28:36 crc kubenswrapper[4558]: I0120 17:28:36.205762 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/c1afb831-f3e4-4356-ab86-713eb0beca39-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "c1afb831-f3e4-4356-ab86-713eb0beca39" (UID: "c1afb831-f3e4-4356-ab86-713eb0beca39"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 17:28:36 crc kubenswrapper[4558]: I0120 17:28:36.206276 4558 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/c1afb831-f3e4-4356-ab86-713eb0beca39-etc-machine-id\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:36 crc kubenswrapper[4558]: I0120 17:28:36.210058 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c1afb831-f3e4-4356-ab86-713eb0beca39-kube-api-access-cdj6z" (OuterVolumeSpecName: "kube-api-access-cdj6z") pod "c1afb831-f3e4-4356-ab86-713eb0beca39" (UID: "c1afb831-f3e4-4356-ab86-713eb0beca39"). InnerVolumeSpecName "kube-api-access-cdj6z". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:28:36 crc kubenswrapper[4558]: I0120 17:28:36.210282 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c1afb831-f3e4-4356-ab86-713eb0beca39-scripts" (OuterVolumeSpecName: "scripts") pod "c1afb831-f3e4-4356-ab86-713eb0beca39" (UID: "c1afb831-f3e4-4356-ab86-713eb0beca39"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:28:36 crc kubenswrapper[4558]: I0120 17:28:36.210761 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c1afb831-f3e4-4356-ab86-713eb0beca39-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "c1afb831-f3e4-4356-ab86-713eb0beca39" (UID: "c1afb831-f3e4-4356-ab86-713eb0beca39"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:28:36 crc kubenswrapper[4558]: I0120 17:28:36.247953 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c1afb831-f3e4-4356-ab86-713eb0beca39-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c1afb831-f3e4-4356-ab86-713eb0beca39" (UID: "c1afb831-f3e4-4356-ab86-713eb0beca39"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:28:36 crc kubenswrapper[4558]: I0120 17:28:36.277350 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c1afb831-f3e4-4356-ab86-713eb0beca39-config-data" (OuterVolumeSpecName: "config-data") pod "c1afb831-f3e4-4356-ab86-713eb0beca39" (UID: "c1afb831-f3e4-4356-ab86-713eb0beca39"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:28:36 crc kubenswrapper[4558]: I0120 17:28:36.308805 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c1afb831-f3e4-4356-ab86-713eb0beca39-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:36 crc kubenswrapper[4558]: I0120 17:28:36.308843 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c1afb831-f3e4-4356-ab86-713eb0beca39-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:36 crc kubenswrapper[4558]: I0120 17:28:36.308855 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c1afb831-f3e4-4356-ab86-713eb0beca39-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:36 crc kubenswrapper[4558]: I0120 17:28:36.308872 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c1afb831-f3e4-4356-ab86-713eb0beca39-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:36 crc kubenswrapper[4558]: I0120 17:28:36.308885 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cdj6z\" (UniqueName: \"kubernetes.io/projected/c1afb831-f3e4-4356-ab86-713eb0beca39-kube-api-access-cdj6z\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:36 crc kubenswrapper[4558]: I0120 17:28:36.449676 4558 generic.go:334] "Generic (PLEG): container finished" podID="1b7c6fb3-f30d-4dde-8502-e7d840719520" containerID="a986efaf418f6a3e2abd8d9f06b0fe8df99833bf6b1e6f9c131ce1990c94b418" exitCode=137 Jan 20 17:28:36 crc kubenswrapper[4558]: I0120 17:28:36.449764 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-db-create-rt2fh" Jan 20 17:28:36 crc kubenswrapper[4558]: I0120 17:28:36.449758 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-db-create-rt2fh" event={"ID":"1b7c6fb3-f30d-4dde-8502-e7d840719520","Type":"ContainerDied","Data":"a986efaf418f6a3e2abd8d9f06b0fe8df99833bf6b1e6f9c131ce1990c94b418"} Jan 20 17:28:36 crc kubenswrapper[4558]: I0120 17:28:36.449842 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-db-create-rt2fh" event={"ID":"1b7c6fb3-f30d-4dde-8502-e7d840719520","Type":"ContainerDied","Data":"6e67b06995c037b7fe9c89c79183b501b9088714a4aa0c041dd1cc3af13b30c6"} Jan 20 17:28:36 crc kubenswrapper[4558]: I0120 17:28:36.449876 4558 scope.go:117] "RemoveContainer" containerID="a986efaf418f6a3e2abd8d9f06b0fe8df99833bf6b1e6f9c131ce1990c94b418" Jan 20 17:28:36 crc kubenswrapper[4558]: I0120 17:28:36.453520 4558 generic.go:334] "Generic (PLEG): container finished" podID="c1afb831-f3e4-4356-ab86-713eb0beca39" containerID="11baea4ab79cfa0023a448f7d4c2fd060ecbb75ef0a90656b63b54a79a53d8f7" exitCode=137 Jan 20 17:28:36 crc kubenswrapper[4558]: I0120 17:28:36.453607 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:28:36 crc kubenswrapper[4558]: I0120 17:28:36.453600 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"c1afb831-f3e4-4356-ab86-713eb0beca39","Type":"ContainerDied","Data":"11baea4ab79cfa0023a448f7d4c2fd060ecbb75ef0a90656b63b54a79a53d8f7"} Jan 20 17:28:36 crc kubenswrapper[4558]: I0120 17:28:36.453667 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"c1afb831-f3e4-4356-ab86-713eb0beca39","Type":"ContainerDied","Data":"ad602ea750041b8651b16f5dfde26a7e046c18a3d3efca817c0e051aad027231"} Jan 20 17:28:36 crc kubenswrapper[4558]: I0120 17:28:36.483709 4558 scope.go:117] "RemoveContainer" containerID="a986efaf418f6a3e2abd8d9f06b0fe8df99833bf6b1e6f9c131ce1990c94b418" Jan 20 17:28:36 crc kubenswrapper[4558]: E0120 17:28:36.484322 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a986efaf418f6a3e2abd8d9f06b0fe8df99833bf6b1e6f9c131ce1990c94b418\": container with ID starting with a986efaf418f6a3e2abd8d9f06b0fe8df99833bf6b1e6f9c131ce1990c94b418 not found: ID does not exist" containerID="a986efaf418f6a3e2abd8d9f06b0fe8df99833bf6b1e6f9c131ce1990c94b418" Jan 20 17:28:36 crc kubenswrapper[4558]: I0120 17:28:36.484414 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a986efaf418f6a3e2abd8d9f06b0fe8df99833bf6b1e6f9c131ce1990c94b418"} err="failed to get container status \"a986efaf418f6a3e2abd8d9f06b0fe8df99833bf6b1e6f9c131ce1990c94b418\": rpc error: code = NotFound desc = could not find container \"a986efaf418f6a3e2abd8d9f06b0fe8df99833bf6b1e6f9c131ce1990c94b418\": container with ID starting with a986efaf418f6a3e2abd8d9f06b0fe8df99833bf6b1e6f9c131ce1990c94b418 not found: ID does not exist" Jan 20 17:28:36 crc kubenswrapper[4558]: I0120 17:28:36.484471 4558 scope.go:117] "RemoveContainer" containerID="4073b49237446b10090528b017b317faf894679a9662161b1fb6fedebcfc14ed" Jan 20 17:28:36 crc kubenswrapper[4558]: I0120 17:28:36.489122 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-db-create-rt2fh"] Jan 20 17:28:36 crc kubenswrapper[4558]: I0120 17:28:36.498662 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/glance-db-create-rt2fh"] Jan 20 17:28:36 crc kubenswrapper[4558]: I0120 17:28:36.502762 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:28:36 crc kubenswrapper[4558]: I0120 17:28:36.506441 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:28:36 crc kubenswrapper[4558]: I0120 17:28:36.511133 4558 scope.go:117] "RemoveContainer" containerID="11baea4ab79cfa0023a448f7d4c2fd060ecbb75ef0a90656b63b54a79a53d8f7" Jan 20 17:28:36 crc kubenswrapper[4558]: I0120 17:28:36.533333 4558 scope.go:117] "RemoveContainer" containerID="4073b49237446b10090528b017b317faf894679a9662161b1fb6fedebcfc14ed" Jan 20 17:28:36 crc kubenswrapper[4558]: E0120 17:28:36.533978 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4073b49237446b10090528b017b317faf894679a9662161b1fb6fedebcfc14ed\": container with ID starting with 4073b49237446b10090528b017b317faf894679a9662161b1fb6fedebcfc14ed not found: ID does not exist" containerID="4073b49237446b10090528b017b317faf894679a9662161b1fb6fedebcfc14ed" Jan 20 17:28:36 crc kubenswrapper[4558]: I0120 17:28:36.534033 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4073b49237446b10090528b017b317faf894679a9662161b1fb6fedebcfc14ed"} err="failed to get container status \"4073b49237446b10090528b017b317faf894679a9662161b1fb6fedebcfc14ed\": rpc error: code = NotFound desc = could not find container \"4073b49237446b10090528b017b317faf894679a9662161b1fb6fedebcfc14ed\": container with ID starting with 4073b49237446b10090528b017b317faf894679a9662161b1fb6fedebcfc14ed not found: ID does not exist" Jan 20 17:28:36 crc kubenswrapper[4558]: I0120 17:28:36.534063 4558 scope.go:117] "RemoveContainer" containerID="11baea4ab79cfa0023a448f7d4c2fd060ecbb75ef0a90656b63b54a79a53d8f7" Jan 20 17:28:36 crc kubenswrapper[4558]: E0120 17:28:36.534469 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"11baea4ab79cfa0023a448f7d4c2fd060ecbb75ef0a90656b63b54a79a53d8f7\": container with ID starting with 11baea4ab79cfa0023a448f7d4c2fd060ecbb75ef0a90656b63b54a79a53d8f7 not found: ID does not exist" containerID="11baea4ab79cfa0023a448f7d4c2fd060ecbb75ef0a90656b63b54a79a53d8f7" Jan 20 17:28:36 crc kubenswrapper[4558]: I0120 17:28:36.534502 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"11baea4ab79cfa0023a448f7d4c2fd060ecbb75ef0a90656b63b54a79a53d8f7"} err="failed to get container status \"11baea4ab79cfa0023a448f7d4c2fd060ecbb75ef0a90656b63b54a79a53d8f7\": rpc error: code = NotFound desc = could not find container \"11baea4ab79cfa0023a448f7d4c2fd060ecbb75ef0a90656b63b54a79a53d8f7\": container with ID starting with 11baea4ab79cfa0023a448f7d4c2fd060ecbb75ef0a90656b63b54a79a53d8f7 not found: ID does not exist" Jan 20 17:28:36 crc kubenswrapper[4558]: I0120 17:28:36.579423 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1b7c6fb3-f30d-4dde-8502-e7d840719520" path="/var/lib/kubelet/pods/1b7c6fb3-f30d-4dde-8502-e7d840719520/volumes" Jan 20 17:28:36 crc kubenswrapper[4558]: I0120 17:28:36.580592 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c1afb831-f3e4-4356-ab86-713eb0beca39" path="/var/lib/kubelet/pods/c1afb831-f3e4-4356-ab86-713eb0beca39/volumes" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.347521 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["crc-storage/crc-storage-crc-6flbs"] Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.354753 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["crc-storage/crc-storage-crc-6flbs"] Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.443488 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["crc-storage/crc-storage-crc-skfk2"] Jan 20 17:28:42 crc kubenswrapper[4558]: E0120 17:28:42.443906 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="134fc5ef-70be-4c8c-aa72-e4f0440d0afe" containerName="ceilometer-central-agent" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.443928 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="134fc5ef-70be-4c8c-aa72-e4f0440d0afe" containerName="ceilometer-central-agent" Jan 20 17:28:42 crc kubenswrapper[4558]: E0120 17:28:42.443952 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="184a9f2b-eab7-4221-a900-746e063662a8" containerName="barbican-api-log" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.443960 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="184a9f2b-eab7-4221-a900-746e063662a8" containerName="barbican-api-log" Jan 20 17:28:42 crc kubenswrapper[4558]: E0120 17:28:42.443968 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8804f9f1-60d4-4fa1-9333-9f9e01b3c68a" containerName="object-replicator" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.443974 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="8804f9f1-60d4-4fa1-9333-9f9e01b3c68a" containerName="object-replicator" Jan 20 17:28:42 crc kubenswrapper[4558]: E0120 17:28:42.443989 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8804f9f1-60d4-4fa1-9333-9f9e01b3c68a" containerName="object-updater" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.443995 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="8804f9f1-60d4-4fa1-9333-9f9e01b3c68a" containerName="object-updater" Jan 20 17:28:42 crc kubenswrapper[4558]: E0120 17:28:42.444008 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8804f9f1-60d4-4fa1-9333-9f9e01b3c68a" containerName="container-replicator" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.444014 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="8804f9f1-60d4-4fa1-9333-9f9e01b3c68a" containerName="container-replicator" Jan 20 17:28:42 crc kubenswrapper[4558]: E0120 17:28:42.444023 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="134fc5ef-70be-4c8c-aa72-e4f0440d0afe" containerName="sg-core" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.444032 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="134fc5ef-70be-4c8c-aa72-e4f0440d0afe" containerName="sg-core" Jan 20 17:28:42 crc kubenswrapper[4558]: E0120 17:28:42.444044 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8804f9f1-60d4-4fa1-9333-9f9e01b3c68a" containerName="container-server" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.444050 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="8804f9f1-60d4-4fa1-9333-9f9e01b3c68a" containerName="container-server" Jan 20 17:28:42 crc kubenswrapper[4558]: E0120 17:28:42.444060 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8804f9f1-60d4-4fa1-9333-9f9e01b3c68a" containerName="rsync" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.444066 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="8804f9f1-60d4-4fa1-9333-9f9e01b3c68a" containerName="rsync" Jan 20 17:28:42 crc kubenswrapper[4558]: E0120 17:28:42.444076 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e81d2508-e298-463c-9031-b9d8e486d566" containerName="glance-httpd" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.444082 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="e81d2508-e298-463c-9031-b9d8e486d566" containerName="glance-httpd" Jan 20 17:28:42 crc kubenswrapper[4558]: E0120 17:28:42.444092 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3b9858cf-2020-458e-bcf6-407c6853a962" containerName="barbican-worker" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.444098 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="3b9858cf-2020-458e-bcf6-407c6853a962" containerName="barbican-worker" Jan 20 17:28:42 crc kubenswrapper[4558]: E0120 17:28:42.444109 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8804f9f1-60d4-4fa1-9333-9f9e01b3c68a" containerName="account-auditor" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.444115 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="8804f9f1-60d4-4fa1-9333-9f9e01b3c68a" containerName="account-auditor" Jan 20 17:28:42 crc kubenswrapper[4558]: E0120 17:28:42.444121 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f6a0777b-d04f-451b-9e3d-3f9ab0e19df8" containerName="cinder-api-log" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.444127 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="f6a0777b-d04f-451b-9e3d-3f9ab0e19df8" containerName="cinder-api-log" Jan 20 17:28:42 crc kubenswrapper[4558]: E0120 17:28:42.444136 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5" containerName="proxy-httpd" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.444142 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5" containerName="proxy-httpd" Jan 20 17:28:42 crc kubenswrapper[4558]: E0120 17:28:42.444151 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6bc8ca35-f460-4c4d-9dbe-0012c552371a" containerName="mysql-bootstrap" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.444157 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="6bc8ca35-f460-4c4d-9dbe-0012c552371a" containerName="mysql-bootstrap" Jan 20 17:28:42 crc kubenswrapper[4558]: E0120 17:28:42.444179 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d692722b-f7fd-447c-8b7a-f56cff940d91" containerName="setup-container" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.444186 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d692722b-f7fd-447c-8b7a-f56cff940d91" containerName="setup-container" Jan 20 17:28:42 crc kubenswrapper[4558]: E0120 17:28:42.444196 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="125597fc-d3b4-44c1-a0a6-7dccbaebe7dd" containerName="barbican-keystone-listener" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.444204 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="125597fc-d3b4-44c1-a0a6-7dccbaebe7dd" containerName="barbican-keystone-listener" Jan 20 17:28:42 crc kubenswrapper[4558]: E0120 17:28:42.444214 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8804f9f1-60d4-4fa1-9333-9f9e01b3c68a" containerName="container-auditor" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.444220 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="8804f9f1-60d4-4fa1-9333-9f9e01b3c68a" containerName="container-auditor" Jan 20 17:28:42 crc kubenswrapper[4558]: E0120 17:28:42.444227 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f81a00ad-36ce-4383-9e91-fe60de6939d2" containerName="galera" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.444233 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="f81a00ad-36ce-4383-9e91-fe60de6939d2" containerName="galera" Jan 20 17:28:42 crc kubenswrapper[4558]: E0120 17:28:42.444244 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6dc46db7-9948-4c7d-b0a3-c767ffd58b4a" containerName="neutron-api" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.444249 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="6dc46db7-9948-4c7d-b0a3-c767ffd58b4a" containerName="neutron-api" Jan 20 17:28:42 crc kubenswrapper[4558]: E0120 17:28:42.444260 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6dc46db7-9948-4c7d-b0a3-c767ffd58b4a" containerName="neutron-httpd" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.444266 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="6dc46db7-9948-4c7d-b0a3-c767ffd58b4a" containerName="neutron-httpd" Jan 20 17:28:42 crc kubenswrapper[4558]: E0120 17:28:42.444275 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8804f9f1-60d4-4fa1-9333-9f9e01b3c68a" containerName="account-reaper" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.444281 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="8804f9f1-60d4-4fa1-9333-9f9e01b3c68a" containerName="account-reaper" Jan 20 17:28:42 crc kubenswrapper[4558]: E0120 17:28:42.444289 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b2a7ece8-32ed-4eb3-ab85-27ff4361622d" containerName="nova-scheduler-scheduler" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.444296 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="b2a7ece8-32ed-4eb3-ab85-27ff4361622d" containerName="nova-scheduler-scheduler" Jan 20 17:28:42 crc kubenswrapper[4558]: E0120 17:28:42.444308 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="48cfc6e5-774d-4e7d-8103-f6a3260ea14c" containerName="setup-container" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.444315 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="48cfc6e5-774d-4e7d-8103-f6a3260ea14c" containerName="setup-container" Jan 20 17:28:42 crc kubenswrapper[4558]: E0120 17:28:42.444325 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="66e24f38-a98c-4444-8ee4-352266267985" containerName="placement-log" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.444331 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="66e24f38-a98c-4444-8ee4-352266267985" containerName="placement-log" Jan 20 17:28:42 crc kubenswrapper[4558]: E0120 17:28:42.444340 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8804f9f1-60d4-4fa1-9333-9f9e01b3c68a" containerName="container-updater" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.444346 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="8804f9f1-60d4-4fa1-9333-9f9e01b3c68a" containerName="container-updater" Jan 20 17:28:42 crc kubenswrapper[4558]: E0120 17:28:42.444352 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cc4089c6-71ea-4503-b54c-18777fcc3c48" containerName="nova-metadata-log" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.444358 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="cc4089c6-71ea-4503-b54c-18777fcc3c48" containerName="nova-metadata-log" Jan 20 17:28:42 crc kubenswrapper[4558]: E0120 17:28:42.444365 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed3c56f1-4bbb-4590-8b19-a0de467537ad" containerName="mariadb-database-create" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.444371 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed3c56f1-4bbb-4590-8b19-a0de467537ad" containerName="mariadb-database-create" Jan 20 17:28:42 crc kubenswrapper[4558]: E0120 17:28:42.444377 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="73ed8a8e-b7b8-4d71-9a2b-d29c60ffd996" containerName="nova-cell1-novncproxy-novncproxy" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.444383 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="73ed8a8e-b7b8-4d71-9a2b-d29c60ffd996" containerName="nova-cell1-novncproxy-novncproxy" Jan 20 17:28:42 crc kubenswrapper[4558]: E0120 17:28:42.444389 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="43ad40c9-142e-42c2-b46b-e998fb487f42" containerName="dnsmasq-dns" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.444395 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="43ad40c9-142e-42c2-b46b-e998fb487f42" containerName="dnsmasq-dns" Jan 20 17:28:42 crc kubenswrapper[4558]: E0120 17:28:42.444402 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3b9858cf-2020-458e-bcf6-407c6853a962" containerName="barbican-worker-log" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.444408 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="3b9858cf-2020-458e-bcf6-407c6853a962" containerName="barbican-worker-log" Jan 20 17:28:42 crc kubenswrapper[4558]: E0120 17:28:42.444416 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8804f9f1-60d4-4fa1-9333-9f9e01b3c68a" containerName="swift-recon-cron" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.444421 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="8804f9f1-60d4-4fa1-9333-9f9e01b3c68a" containerName="swift-recon-cron" Jan 20 17:28:42 crc kubenswrapper[4558]: E0120 17:28:42.444427 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f81a00ad-36ce-4383-9e91-fe60de6939d2" containerName="mysql-bootstrap" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.444433 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="f81a00ad-36ce-4383-9e91-fe60de6939d2" containerName="mysql-bootstrap" Jan 20 17:28:42 crc kubenswrapper[4558]: E0120 17:28:42.444441 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8804f9f1-60d4-4fa1-9333-9f9e01b3c68a" containerName="object-auditor" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.444445 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="8804f9f1-60d4-4fa1-9333-9f9e01b3c68a" containerName="object-auditor" Jan 20 17:28:42 crc kubenswrapper[4558]: E0120 17:28:42.444452 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="592927c6-9b42-411a-8aec-40cf6183e32a" containerName="glance-log" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.444457 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="592927c6-9b42-411a-8aec-40cf6183e32a" containerName="glance-log" Jan 20 17:28:42 crc kubenswrapper[4558]: E0120 17:28:42.444463 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e81d2508-e298-463c-9031-b9d8e486d566" containerName="glance-log" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.444469 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="e81d2508-e298-463c-9031-b9d8e486d566" containerName="glance-log" Jan 20 17:28:42 crc kubenswrapper[4558]: E0120 17:28:42.444478 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="184a9f2b-eab7-4221-a900-746e063662a8" containerName="barbican-api" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.444483 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="184a9f2b-eab7-4221-a900-746e063662a8" containerName="barbican-api" Jan 20 17:28:42 crc kubenswrapper[4558]: E0120 17:28:42.444488 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c2bfe108-b710-400b-baac-55815b192ee3" containerName="mariadb-account-create-update" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.444494 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="c2bfe108-b710-400b-baac-55815b192ee3" containerName="mariadb-account-create-update" Jan 20 17:28:42 crc kubenswrapper[4558]: E0120 17:28:42.444502 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4430d3cb-0a1c-4266-9f69-cf8e817f1d3e" containerName="nova-api-api" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.444507 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="4430d3cb-0a1c-4266-9f69-cf8e817f1d3e" containerName="nova-api-api" Jan 20 17:28:42 crc kubenswrapper[4558]: E0120 17:28:42.444514 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="66e24f38-a98c-4444-8ee4-352266267985" containerName="placement-api" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.444518 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="66e24f38-a98c-4444-8ee4-352266267985" containerName="placement-api" Jan 20 17:28:42 crc kubenswrapper[4558]: E0120 17:28:42.444528 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4430d3cb-0a1c-4266-9f69-cf8e817f1d3e" containerName="nova-api-log" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.444533 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="4430d3cb-0a1c-4266-9f69-cf8e817f1d3e" containerName="nova-api-log" Jan 20 17:28:42 crc kubenswrapper[4558]: E0120 17:28:42.444540 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="134fc5ef-70be-4c8c-aa72-e4f0440d0afe" containerName="ceilometer-notification-agent" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.444546 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="134fc5ef-70be-4c8c-aa72-e4f0440d0afe" containerName="ceilometer-notification-agent" Jan 20 17:28:42 crc kubenswrapper[4558]: E0120 17:28:42.444554 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3973df1a-dd5a-417e-80a9-6653fb036470" containerName="nova-cell0-conductor-conductor" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.444560 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="3973df1a-dd5a-417e-80a9-6653fb036470" containerName="nova-cell0-conductor-conductor" Jan 20 17:28:42 crc kubenswrapper[4558]: E0120 17:28:42.444567 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f6a0777b-d04f-451b-9e3d-3f9ab0e19df8" containerName="cinder-api" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.444572 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="f6a0777b-d04f-451b-9e3d-3f9ab0e19df8" containerName="cinder-api" Jan 20 17:28:42 crc kubenswrapper[4558]: E0120 17:28:42.444579 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c2bfe108-b710-400b-baac-55815b192ee3" containerName="mariadb-account-create-update" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.444584 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="c2bfe108-b710-400b-baac-55815b192ee3" containerName="mariadb-account-create-update" Jan 20 17:28:42 crc kubenswrapper[4558]: E0120 17:28:42.444592 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="48cfc6e5-774d-4e7d-8103-f6a3260ea14c" containerName="rabbitmq" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.444597 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="48cfc6e5-774d-4e7d-8103-f6a3260ea14c" containerName="rabbitmq" Jan 20 17:28:42 crc kubenswrapper[4558]: E0120 17:28:42.444602 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="43ad40c9-142e-42c2-b46b-e998fb487f42" containerName="init" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.444607 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="43ad40c9-142e-42c2-b46b-e998fb487f42" containerName="init" Jan 20 17:28:42 crc kubenswrapper[4558]: E0120 17:28:42.444615 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="df28560c-92e6-4003-a08f-b8691fa43300" containerName="mariadb-database-create" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.444621 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="df28560c-92e6-4003-a08f-b8691fa43300" containerName="mariadb-database-create" Jan 20 17:28:42 crc kubenswrapper[4558]: E0120 17:28:42.444630 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="88b73d7f-ee50-4fdf-9fd7-37296c855d69" containerName="nova-cell1-conductor-conductor" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.444635 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="88b73d7f-ee50-4fdf-9fd7-37296c855d69" containerName="nova-cell1-conductor-conductor" Jan 20 17:28:42 crc kubenswrapper[4558]: E0120 17:28:42.444643 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cc4089c6-71ea-4503-b54c-18777fcc3c48" containerName="nova-metadata-metadata" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.444648 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="cc4089c6-71ea-4503-b54c-18777fcc3c48" containerName="nova-metadata-metadata" Jan 20 17:28:42 crc kubenswrapper[4558]: E0120 17:28:42.444656 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bfb2b456-ae0e-42ca-a227-428a626f1e3e" containerName="mariadb-database-create" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.444661 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="bfb2b456-ae0e-42ca-a227-428a626f1e3e" containerName="mariadb-database-create" Jan 20 17:28:42 crc kubenswrapper[4558]: E0120 17:28:42.444668 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8804f9f1-60d4-4fa1-9333-9f9e01b3c68a" containerName="object-server" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.444673 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="8804f9f1-60d4-4fa1-9333-9f9e01b3c68a" containerName="object-server" Jan 20 17:28:42 crc kubenswrapper[4558]: E0120 17:28:42.444679 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2bee9ff9-52bb-4dae-971c-8c6236a4e563" containerName="proxy-httpd" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.444684 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="2bee9ff9-52bb-4dae-971c-8c6236a4e563" containerName="proxy-httpd" Jan 20 17:28:42 crc kubenswrapper[4558]: E0120 17:28:42.444692 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3460a75e-3553-47b9-bfc5-39c2c459826e" containerName="memcached" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.444697 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="3460a75e-3553-47b9-bfc5-39c2c459826e" containerName="memcached" Jan 20 17:28:42 crc kubenswrapper[4558]: E0120 17:28:42.444704 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8804f9f1-60d4-4fa1-9333-9f9e01b3c68a" containerName="account-replicator" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.444710 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="8804f9f1-60d4-4fa1-9333-9f9e01b3c68a" containerName="account-replicator" Jan 20 17:28:42 crc kubenswrapper[4558]: E0120 17:28:42.444720 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d692722b-f7fd-447c-8b7a-f56cff940d91" containerName="rabbitmq" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.444725 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d692722b-f7fd-447c-8b7a-f56cff940d91" containerName="rabbitmq" Jan 20 17:28:42 crc kubenswrapper[4558]: E0120 17:28:42.444734 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c1afb831-f3e4-4356-ab86-713eb0beca39" containerName="cinder-scheduler" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.444740 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="c1afb831-f3e4-4356-ab86-713eb0beca39" containerName="cinder-scheduler" Jan 20 17:28:42 crc kubenswrapper[4558]: E0120 17:28:42.444748 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2bee9ff9-52bb-4dae-971c-8c6236a4e563" containerName="proxy-server" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.444754 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="2bee9ff9-52bb-4dae-971c-8c6236a4e563" containerName="proxy-server" Jan 20 17:28:42 crc kubenswrapper[4558]: E0120 17:28:42.444761 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8804f9f1-60d4-4fa1-9333-9f9e01b3c68a" containerName="account-server" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.444779 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="8804f9f1-60d4-4fa1-9333-9f9e01b3c68a" containerName="account-server" Jan 20 17:28:42 crc kubenswrapper[4558]: E0120 17:28:42.444788 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="443f910c-dd2d-4c72-b861-f15de67ac6bb" containerName="kube-state-metrics" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.444793 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="443f910c-dd2d-4c72-b861-f15de67ac6bb" containerName="kube-state-metrics" Jan 20 17:28:42 crc kubenswrapper[4558]: E0120 17:28:42.444800 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6bc8ca35-f460-4c4d-9dbe-0012c552371a" containerName="galera" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.444806 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="6bc8ca35-f460-4c4d-9dbe-0012c552371a" containerName="galera" Jan 20 17:28:42 crc kubenswrapper[4558]: E0120 17:28:42.444814 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7d72e21b-f0dd-48d4-9594-bb39aae6fa9d" containerName="keystone-api" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.444819 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="7d72e21b-f0dd-48d4-9594-bb39aae6fa9d" containerName="keystone-api" Jan 20 17:28:42 crc kubenswrapper[4558]: E0120 17:28:42.444828 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5" containerName="proxy-server" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.444833 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5" containerName="proxy-server" Jan 20 17:28:42 crc kubenswrapper[4558]: E0120 17:28:42.444838 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="134fc5ef-70be-4c8c-aa72-e4f0440d0afe" containerName="proxy-httpd" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.444843 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="134fc5ef-70be-4c8c-aa72-e4f0440d0afe" containerName="proxy-httpd" Jan 20 17:28:42 crc kubenswrapper[4558]: E0120 17:28:42.444848 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="125597fc-d3b4-44c1-a0a6-7dccbaebe7dd" containerName="barbican-keystone-listener-log" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.444853 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="125597fc-d3b4-44c1-a0a6-7dccbaebe7dd" containerName="barbican-keystone-listener-log" Jan 20 17:28:42 crc kubenswrapper[4558]: E0120 17:28:42.444861 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c1afb831-f3e4-4356-ab86-713eb0beca39" containerName="probe" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.444866 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="c1afb831-f3e4-4356-ab86-713eb0beca39" containerName="probe" Jan 20 17:28:42 crc kubenswrapper[4558]: E0120 17:28:42.444874 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1b7c6fb3-f30d-4dde-8502-e7d840719520" containerName="mariadb-database-create" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.444879 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="1b7c6fb3-f30d-4dde-8502-e7d840719520" containerName="mariadb-database-create" Jan 20 17:28:42 crc kubenswrapper[4558]: E0120 17:28:42.444888 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="592927c6-9b42-411a-8aec-40cf6183e32a" containerName="glance-httpd" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.444893 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="592927c6-9b42-411a-8aec-40cf6183e32a" containerName="glance-httpd" Jan 20 17:28:42 crc kubenswrapper[4558]: E0120 17:28:42.444900 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8804f9f1-60d4-4fa1-9333-9f9e01b3c68a" containerName="object-expirer" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.444906 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="8804f9f1-60d4-4fa1-9333-9f9e01b3c68a" containerName="object-expirer" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.445047 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="3973df1a-dd5a-417e-80a9-6653fb036470" containerName="nova-cell0-conductor-conductor" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.445057 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="d692722b-f7fd-447c-8b7a-f56cff940d91" containerName="rabbitmq" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.445067 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="bfb2b456-ae0e-42ca-a227-428a626f1e3e" containerName="mariadb-database-create" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.445074 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="134fc5ef-70be-4c8c-aa72-e4f0440d0afe" containerName="proxy-httpd" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.445080 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="c1afb831-f3e4-4356-ab86-713eb0beca39" containerName="probe" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.445087 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="73ed8a8e-b7b8-4d71-9a2b-d29c60ffd996" containerName="nova-cell1-novncproxy-novncproxy" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.445093 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="cc4089c6-71ea-4503-b54c-18777fcc3c48" containerName="nova-metadata-log" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.445100 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="3b9858cf-2020-458e-bcf6-407c6853a962" containerName="barbican-worker-log" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.445109 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="8804f9f1-60d4-4fa1-9333-9f9e01b3c68a" containerName="account-reaper" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.445115 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="2bee9ff9-52bb-4dae-971c-8c6236a4e563" containerName="proxy-server" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.445121 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="f81a00ad-36ce-4383-9e91-fe60de6939d2" containerName="galera" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.445129 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="df28560c-92e6-4003-a08f-b8691fa43300" containerName="mariadb-database-create" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.445137 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="8804f9f1-60d4-4fa1-9333-9f9e01b3c68a" containerName="swift-recon-cron" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.445147 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="8804f9f1-60d4-4fa1-9333-9f9e01b3c68a" containerName="object-replicator" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.445154 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="3460a75e-3553-47b9-bfc5-39c2c459826e" containerName="memcached" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.445176 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="134fc5ef-70be-4c8c-aa72-e4f0440d0afe" containerName="sg-core" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.445185 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="8804f9f1-60d4-4fa1-9333-9f9e01b3c68a" containerName="container-updater" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.445190 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="8804f9f1-60d4-4fa1-9333-9f9e01b3c68a" containerName="rsync" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.445198 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5" containerName="proxy-httpd" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.445204 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="4430d3cb-0a1c-4266-9f69-cf8e817f1d3e" containerName="nova-api-log" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.445209 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="e81d2508-e298-463c-9031-b9d8e486d566" containerName="glance-log" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.445217 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="184a9f2b-eab7-4221-a900-746e063662a8" containerName="barbican-api-log" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.445221 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="cc4089c6-71ea-4503-b54c-18777fcc3c48" containerName="nova-metadata-metadata" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.445230 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="4430d3cb-0a1c-4266-9f69-cf8e817f1d3e" containerName="nova-api-api" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.445236 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="125597fc-d3b4-44c1-a0a6-7dccbaebe7dd" containerName="barbican-keystone-listener" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.445243 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="4570342e-2e9d-41ec-9bbd-ad4b8ed75fa5" containerName="proxy-server" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.445249 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="f6a0777b-d04f-451b-9e3d-3f9ab0e19df8" containerName="cinder-api-log" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.445254 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="2bee9ff9-52bb-4dae-971c-8c6236a4e563" containerName="proxy-httpd" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.445261 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="8804f9f1-60d4-4fa1-9333-9f9e01b3c68a" containerName="object-auditor" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.445270 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="7d72e21b-f0dd-48d4-9594-bb39aae6fa9d" containerName="keystone-api" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.445277 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="3b9858cf-2020-458e-bcf6-407c6853a962" containerName="barbican-worker" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.445284 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="134fc5ef-70be-4c8c-aa72-e4f0440d0afe" containerName="ceilometer-central-agent" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.445292 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="8804f9f1-60d4-4fa1-9333-9f9e01b3c68a" containerName="account-server" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.445302 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="8804f9f1-60d4-4fa1-9333-9f9e01b3c68a" containerName="account-auditor" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.445308 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="e81d2508-e298-463c-9031-b9d8e486d566" containerName="glance-httpd" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.445316 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="66e24f38-a98c-4444-8ee4-352266267985" containerName="placement-api" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.445324 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="134fc5ef-70be-4c8c-aa72-e4f0440d0afe" containerName="ceilometer-notification-agent" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.445331 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="592927c6-9b42-411a-8aec-40cf6183e32a" containerName="glance-httpd" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.445338 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="b2a7ece8-32ed-4eb3-ab85-27ff4361622d" containerName="nova-scheduler-scheduler" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.445345 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="c2bfe108-b710-400b-baac-55815b192ee3" containerName="mariadb-account-create-update" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.445352 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="43ad40c9-142e-42c2-b46b-e998fb487f42" containerName="dnsmasq-dns" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.445359 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="8804f9f1-60d4-4fa1-9333-9f9e01b3c68a" containerName="object-server" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.445366 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="8804f9f1-60d4-4fa1-9333-9f9e01b3c68a" containerName="container-auditor" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.445372 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="ed3c56f1-4bbb-4590-8b19-a0de467537ad" containerName="mariadb-database-create" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.445378 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="88b73d7f-ee50-4fdf-9fd7-37296c855d69" containerName="nova-cell1-conductor-conductor" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.445386 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="8804f9f1-60d4-4fa1-9333-9f9e01b3c68a" containerName="account-replicator" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.445393 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="8804f9f1-60d4-4fa1-9333-9f9e01b3c68a" containerName="container-server" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.445400 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="443f910c-dd2d-4c72-b861-f15de67ac6bb" containerName="kube-state-metrics" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.445408 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="8804f9f1-60d4-4fa1-9333-9f9e01b3c68a" containerName="object-updater" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.445415 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="48cfc6e5-774d-4e7d-8103-f6a3260ea14c" containerName="rabbitmq" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.445422 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="c2bfe108-b710-400b-baac-55815b192ee3" containerName="mariadb-account-create-update" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.445431 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="8804f9f1-60d4-4fa1-9333-9f9e01b3c68a" containerName="object-expirer" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.445437 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="8804f9f1-60d4-4fa1-9333-9f9e01b3c68a" containerName="container-replicator" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.445444 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="592927c6-9b42-411a-8aec-40cf6183e32a" containerName="glance-log" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.445451 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="6bc8ca35-f460-4c4d-9dbe-0012c552371a" containerName="galera" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.445458 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="6dc46db7-9948-4c7d-b0a3-c767ffd58b4a" containerName="neutron-api" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.445466 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="f6a0777b-d04f-451b-9e3d-3f9ab0e19df8" containerName="cinder-api" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.445472 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="125597fc-d3b4-44c1-a0a6-7dccbaebe7dd" containerName="barbican-keystone-listener-log" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.445481 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="1b7c6fb3-f30d-4dde-8502-e7d840719520" containerName="mariadb-database-create" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.445490 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="184a9f2b-eab7-4221-a900-746e063662a8" containerName="barbican-api" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.445496 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="c1afb831-f3e4-4356-ab86-713eb0beca39" containerName="cinder-scheduler" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.445503 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="66e24f38-a98c-4444-8ee4-352266267985" containerName="placement-log" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.445511 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="6dc46db7-9948-4c7d-b0a3-c767ffd58b4a" containerName="neutron-httpd" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.446120 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-skfk2" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.448896 4558 reflector.go:368] Caches populated for *v1.Secret from object-"crc-storage"/"crc-storage-dockercfg-k9ht8" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.448944 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"openshift-service-ca.crt" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.449700 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"kube-root-ca.crt" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.450945 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"crc-storage" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.451004 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-skfk2"] Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.499780 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7d5xd\" (UniqueName: \"kubernetes.io/projected/131f0fe2-7d18-475f-86b5-541ca7747b94-kube-api-access-7d5xd\") pod \"crc-storage-crc-skfk2\" (UID: \"131f0fe2-7d18-475f-86b5-541ca7747b94\") " pod="crc-storage/crc-storage-crc-skfk2" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.499832 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/131f0fe2-7d18-475f-86b5-541ca7747b94-node-mnt\") pod \"crc-storage-crc-skfk2\" (UID: \"131f0fe2-7d18-475f-86b5-541ca7747b94\") " pod="crc-storage/crc-storage-crc-skfk2" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.499866 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/131f0fe2-7d18-475f-86b5-541ca7747b94-crc-storage\") pod \"crc-storage-crc-skfk2\" (UID: \"131f0fe2-7d18-475f-86b5-541ca7747b94\") " pod="crc-storage/crc-storage-crc-skfk2" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.575875 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="703ab252-bbd5-40ba-a904-6031bc2a1c1f" path="/var/lib/kubelet/pods/703ab252-bbd5-40ba-a904-6031bc2a1c1f/volumes" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.601793 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7d5xd\" (UniqueName: \"kubernetes.io/projected/131f0fe2-7d18-475f-86b5-541ca7747b94-kube-api-access-7d5xd\") pod \"crc-storage-crc-skfk2\" (UID: \"131f0fe2-7d18-475f-86b5-541ca7747b94\") " pod="crc-storage/crc-storage-crc-skfk2" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.601861 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/131f0fe2-7d18-475f-86b5-541ca7747b94-node-mnt\") pod \"crc-storage-crc-skfk2\" (UID: \"131f0fe2-7d18-475f-86b5-541ca7747b94\") " pod="crc-storage/crc-storage-crc-skfk2" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.601893 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/131f0fe2-7d18-475f-86b5-541ca7747b94-crc-storage\") pod \"crc-storage-crc-skfk2\" (UID: \"131f0fe2-7d18-475f-86b5-541ca7747b94\") " pod="crc-storage/crc-storage-crc-skfk2" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.602046 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/131f0fe2-7d18-475f-86b5-541ca7747b94-node-mnt\") pod \"crc-storage-crc-skfk2\" (UID: \"131f0fe2-7d18-475f-86b5-541ca7747b94\") " pod="crc-storage/crc-storage-crc-skfk2" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.602890 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/131f0fe2-7d18-475f-86b5-541ca7747b94-crc-storage\") pod \"crc-storage-crc-skfk2\" (UID: \"131f0fe2-7d18-475f-86b5-541ca7747b94\") " pod="crc-storage/crc-storage-crc-skfk2" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.626736 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7d5xd\" (UniqueName: \"kubernetes.io/projected/131f0fe2-7d18-475f-86b5-541ca7747b94-kube-api-access-7d5xd\") pod \"crc-storage-crc-skfk2\" (UID: \"131f0fe2-7d18-475f-86b5-541ca7747b94\") " pod="crc-storage/crc-storage-crc-skfk2" Jan 20 17:28:42 crc kubenswrapper[4558]: I0120 17:28:42.771986 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-skfk2" Jan 20 17:28:43 crc kubenswrapper[4558]: I0120 17:28:43.149218 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-skfk2"] Jan 20 17:28:43 crc kubenswrapper[4558]: I0120 17:28:43.157872 4558 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 20 17:28:43 crc kubenswrapper[4558]: I0120 17:28:43.519235 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-skfk2" event={"ID":"131f0fe2-7d18-475f-86b5-541ca7747b94","Type":"ContainerStarted","Data":"a5e7bfd243c0c23154ca5e2ee8f502e089d944b0a74fc6293b58d2066b722ce6"} Jan 20 17:28:44 crc kubenswrapper[4558]: I0120 17:28:44.527837 4558 generic.go:334] "Generic (PLEG): container finished" podID="131f0fe2-7d18-475f-86b5-541ca7747b94" containerID="9277589ed5b4239b74d19c171dcaaa83b3f9fc262243f6f1c2b59156f314f5d8" exitCode=0 Jan 20 17:28:44 crc kubenswrapper[4558]: I0120 17:28:44.527883 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-skfk2" event={"ID":"131f0fe2-7d18-475f-86b5-541ca7747b94","Type":"ContainerDied","Data":"9277589ed5b4239b74d19c171dcaaa83b3f9fc262243f6f1c2b59156f314f5d8"} Jan 20 17:28:45 crc kubenswrapper[4558]: I0120 17:28:45.801088 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-skfk2" Jan 20 17:28:45 crc kubenswrapper[4558]: I0120 17:28:45.847036 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7d5xd\" (UniqueName: \"kubernetes.io/projected/131f0fe2-7d18-475f-86b5-541ca7747b94-kube-api-access-7d5xd\") pod \"131f0fe2-7d18-475f-86b5-541ca7747b94\" (UID: \"131f0fe2-7d18-475f-86b5-541ca7747b94\") " Jan 20 17:28:45 crc kubenswrapper[4558]: I0120 17:28:45.847464 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/131f0fe2-7d18-475f-86b5-541ca7747b94-crc-storage\") pod \"131f0fe2-7d18-475f-86b5-541ca7747b94\" (UID: \"131f0fe2-7d18-475f-86b5-541ca7747b94\") " Jan 20 17:28:45 crc kubenswrapper[4558]: I0120 17:28:45.847570 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/131f0fe2-7d18-475f-86b5-541ca7747b94-node-mnt\") pod \"131f0fe2-7d18-475f-86b5-541ca7747b94\" (UID: \"131f0fe2-7d18-475f-86b5-541ca7747b94\") " Jan 20 17:28:45 crc kubenswrapper[4558]: I0120 17:28:45.847887 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/131f0fe2-7d18-475f-86b5-541ca7747b94-node-mnt" (OuterVolumeSpecName: "node-mnt") pod "131f0fe2-7d18-475f-86b5-541ca7747b94" (UID: "131f0fe2-7d18-475f-86b5-541ca7747b94"). InnerVolumeSpecName "node-mnt". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 17:28:45 crc kubenswrapper[4558]: I0120 17:28:45.853553 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/131f0fe2-7d18-475f-86b5-541ca7747b94-kube-api-access-7d5xd" (OuterVolumeSpecName: "kube-api-access-7d5xd") pod "131f0fe2-7d18-475f-86b5-541ca7747b94" (UID: "131f0fe2-7d18-475f-86b5-541ca7747b94"). InnerVolumeSpecName "kube-api-access-7d5xd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:28:45 crc kubenswrapper[4558]: I0120 17:28:45.867599 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/131f0fe2-7d18-475f-86b5-541ca7747b94-crc-storage" (OuterVolumeSpecName: "crc-storage") pod "131f0fe2-7d18-475f-86b5-541ca7747b94" (UID: "131f0fe2-7d18-475f-86b5-541ca7747b94"). InnerVolumeSpecName "crc-storage". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:28:45 crc kubenswrapper[4558]: I0120 17:28:45.950384 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7d5xd\" (UniqueName: \"kubernetes.io/projected/131f0fe2-7d18-475f-86b5-541ca7747b94-kube-api-access-7d5xd\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:45 crc kubenswrapper[4558]: I0120 17:28:45.950436 4558 reconciler_common.go:293] "Volume detached for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/131f0fe2-7d18-475f-86b5-541ca7747b94-crc-storage\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:45 crc kubenswrapper[4558]: I0120 17:28:45.950450 4558 reconciler_common.go:293] "Volume detached for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/131f0fe2-7d18-475f-86b5-541ca7747b94-node-mnt\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:46 crc kubenswrapper[4558]: I0120 17:28:46.548663 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-skfk2" event={"ID":"131f0fe2-7d18-475f-86b5-541ca7747b94","Type":"ContainerDied","Data":"a5e7bfd243c0c23154ca5e2ee8f502e089d944b0a74fc6293b58d2066b722ce6"} Jan 20 17:28:46 crc kubenswrapper[4558]: I0120 17:28:46.548711 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a5e7bfd243c0c23154ca5e2ee8f502e089d944b0a74fc6293b58d2066b722ce6" Jan 20 17:28:46 crc kubenswrapper[4558]: I0120 17:28:46.549074 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-skfk2" Jan 20 17:28:48 crc kubenswrapper[4558]: I0120 17:28:48.726059 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["crc-storage/crc-storage-crc-skfk2"] Jan 20 17:28:48 crc kubenswrapper[4558]: I0120 17:28:48.732972 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["crc-storage/crc-storage-crc-skfk2"] Jan 20 17:28:48 crc kubenswrapper[4558]: I0120 17:28:48.819301 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["crc-storage/crc-storage-crc-mhzjj"] Jan 20 17:28:48 crc kubenswrapper[4558]: E0120 17:28:48.819857 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="131f0fe2-7d18-475f-86b5-541ca7747b94" containerName="storage" Jan 20 17:28:48 crc kubenswrapper[4558]: I0120 17:28:48.819886 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="131f0fe2-7d18-475f-86b5-541ca7747b94" containerName="storage" Jan 20 17:28:48 crc kubenswrapper[4558]: I0120 17:28:48.820198 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="131f0fe2-7d18-475f-86b5-541ca7747b94" containerName="storage" Jan 20 17:28:48 crc kubenswrapper[4558]: I0120 17:28:48.821116 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-mhzjj" Jan 20 17:28:48 crc kubenswrapper[4558]: I0120 17:28:48.823421 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"kube-root-ca.crt" Jan 20 17:28:48 crc kubenswrapper[4558]: I0120 17:28:48.824210 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"crc-storage" Jan 20 17:28:48 crc kubenswrapper[4558]: I0120 17:28:48.826237 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"openshift-service-ca.crt" Jan 20 17:28:48 crc kubenswrapper[4558]: I0120 17:28:48.830349 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-mhzjj"] Jan 20 17:28:48 crc kubenswrapper[4558]: I0120 17:28:48.832487 4558 reflector.go:368] Caches populated for *v1.Secret from object-"crc-storage"/"crc-storage-dockercfg-k9ht8" Jan 20 17:28:48 crc kubenswrapper[4558]: I0120 17:28:48.904085 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/996db13b-869c-4880-a4ad-d9d22e5a493b-crc-storage\") pod \"crc-storage-crc-mhzjj\" (UID: \"996db13b-869c-4880-a4ad-d9d22e5a493b\") " pod="crc-storage/crc-storage-crc-mhzjj" Jan 20 17:28:48 crc kubenswrapper[4558]: I0120 17:28:48.904287 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6s8rp\" (UniqueName: \"kubernetes.io/projected/996db13b-869c-4880-a4ad-d9d22e5a493b-kube-api-access-6s8rp\") pod \"crc-storage-crc-mhzjj\" (UID: \"996db13b-869c-4880-a4ad-d9d22e5a493b\") " pod="crc-storage/crc-storage-crc-mhzjj" Jan 20 17:28:48 crc kubenswrapper[4558]: I0120 17:28:48.904341 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/996db13b-869c-4880-a4ad-d9d22e5a493b-node-mnt\") pod \"crc-storage-crc-mhzjj\" (UID: \"996db13b-869c-4880-a4ad-d9d22e5a493b\") " pod="crc-storage/crc-storage-crc-mhzjj" Jan 20 17:28:49 crc kubenswrapper[4558]: I0120 17:28:49.005394 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6s8rp\" (UniqueName: \"kubernetes.io/projected/996db13b-869c-4880-a4ad-d9d22e5a493b-kube-api-access-6s8rp\") pod \"crc-storage-crc-mhzjj\" (UID: \"996db13b-869c-4880-a4ad-d9d22e5a493b\") " pod="crc-storage/crc-storage-crc-mhzjj" Jan 20 17:28:49 crc kubenswrapper[4558]: I0120 17:28:49.005455 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/996db13b-869c-4880-a4ad-d9d22e5a493b-node-mnt\") pod \"crc-storage-crc-mhzjj\" (UID: \"996db13b-869c-4880-a4ad-d9d22e5a493b\") " pod="crc-storage/crc-storage-crc-mhzjj" Jan 20 17:28:49 crc kubenswrapper[4558]: I0120 17:28:49.005493 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/996db13b-869c-4880-a4ad-d9d22e5a493b-crc-storage\") pod \"crc-storage-crc-mhzjj\" (UID: \"996db13b-869c-4880-a4ad-d9d22e5a493b\") " pod="crc-storage/crc-storage-crc-mhzjj" Jan 20 17:28:49 crc kubenswrapper[4558]: I0120 17:28:49.005847 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/996db13b-869c-4880-a4ad-d9d22e5a493b-node-mnt\") pod \"crc-storage-crc-mhzjj\" (UID: \"996db13b-869c-4880-a4ad-d9d22e5a493b\") " pod="crc-storage/crc-storage-crc-mhzjj" Jan 20 17:28:49 crc kubenswrapper[4558]: I0120 17:28:49.006255 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/996db13b-869c-4880-a4ad-d9d22e5a493b-crc-storage\") pod \"crc-storage-crc-mhzjj\" (UID: \"996db13b-869c-4880-a4ad-d9d22e5a493b\") " pod="crc-storage/crc-storage-crc-mhzjj" Jan 20 17:28:49 crc kubenswrapper[4558]: I0120 17:28:49.023679 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6s8rp\" (UniqueName: \"kubernetes.io/projected/996db13b-869c-4880-a4ad-d9d22e5a493b-kube-api-access-6s8rp\") pod \"crc-storage-crc-mhzjj\" (UID: \"996db13b-869c-4880-a4ad-d9d22e5a493b\") " pod="crc-storage/crc-storage-crc-mhzjj" Jan 20 17:28:49 crc kubenswrapper[4558]: I0120 17:28:49.138720 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-mhzjj" Jan 20 17:28:49 crc kubenswrapper[4558]: I0120 17:28:49.545966 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-mhzjj"] Jan 20 17:28:49 crc kubenswrapper[4558]: I0120 17:28:49.579141 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-mhzjj" event={"ID":"996db13b-869c-4880-a4ad-d9d22e5a493b","Type":"ContainerStarted","Data":"a9dff8f8408e766b9a4260118b9cf86bc1c437597100f7dfc5bfabec5e79b61a"} Jan 20 17:28:50 crc kubenswrapper[4558]: I0120 17:28:50.576949 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="131f0fe2-7d18-475f-86b5-541ca7747b94" path="/var/lib/kubelet/pods/131f0fe2-7d18-475f-86b5-541ca7747b94/volumes" Jan 20 17:28:50 crc kubenswrapper[4558]: I0120 17:28:50.589335 4558 generic.go:334] "Generic (PLEG): container finished" podID="996db13b-869c-4880-a4ad-d9d22e5a493b" containerID="bd9bc91b84b06a59b8b191189a3f6d1dde72408f2e0800a8c07dc2c238aeebb3" exitCode=0 Jan 20 17:28:50 crc kubenswrapper[4558]: I0120 17:28:50.589385 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-mhzjj" event={"ID":"996db13b-869c-4880-a4ad-d9d22e5a493b","Type":"ContainerDied","Data":"bd9bc91b84b06a59b8b191189a3f6d1dde72408f2e0800a8c07dc2c238aeebb3"} Jan 20 17:28:51 crc kubenswrapper[4558]: I0120 17:28:51.860829 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-mhzjj" Jan 20 17:28:51 crc kubenswrapper[4558]: I0120 17:28:51.946837 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/996db13b-869c-4880-a4ad-d9d22e5a493b-crc-storage\") pod \"996db13b-869c-4880-a4ad-d9d22e5a493b\" (UID: \"996db13b-869c-4880-a4ad-d9d22e5a493b\") " Jan 20 17:28:51 crc kubenswrapper[4558]: I0120 17:28:51.946898 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6s8rp\" (UniqueName: \"kubernetes.io/projected/996db13b-869c-4880-a4ad-d9d22e5a493b-kube-api-access-6s8rp\") pod \"996db13b-869c-4880-a4ad-d9d22e5a493b\" (UID: \"996db13b-869c-4880-a4ad-d9d22e5a493b\") " Jan 20 17:28:51 crc kubenswrapper[4558]: I0120 17:28:51.946974 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/996db13b-869c-4880-a4ad-d9d22e5a493b-node-mnt\") pod \"996db13b-869c-4880-a4ad-d9d22e5a493b\" (UID: \"996db13b-869c-4880-a4ad-d9d22e5a493b\") " Jan 20 17:28:51 crc kubenswrapper[4558]: I0120 17:28:51.947204 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/996db13b-869c-4880-a4ad-d9d22e5a493b-node-mnt" (OuterVolumeSpecName: "node-mnt") pod "996db13b-869c-4880-a4ad-d9d22e5a493b" (UID: "996db13b-869c-4880-a4ad-d9d22e5a493b"). InnerVolumeSpecName "node-mnt". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 17:28:51 crc kubenswrapper[4558]: I0120 17:28:51.947628 4558 reconciler_common.go:293] "Volume detached for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/996db13b-869c-4880-a4ad-d9d22e5a493b-node-mnt\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:51 crc kubenswrapper[4558]: I0120 17:28:51.951880 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/996db13b-869c-4880-a4ad-d9d22e5a493b-kube-api-access-6s8rp" (OuterVolumeSpecName: "kube-api-access-6s8rp") pod "996db13b-869c-4880-a4ad-d9d22e5a493b" (UID: "996db13b-869c-4880-a4ad-d9d22e5a493b"). InnerVolumeSpecName "kube-api-access-6s8rp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:28:51 crc kubenswrapper[4558]: I0120 17:28:51.963548 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/996db13b-869c-4880-a4ad-d9d22e5a493b-crc-storage" (OuterVolumeSpecName: "crc-storage") pod "996db13b-869c-4880-a4ad-d9d22e5a493b" (UID: "996db13b-869c-4880-a4ad-d9d22e5a493b"). InnerVolumeSpecName "crc-storage". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:28:52 crc kubenswrapper[4558]: I0120 17:28:52.049447 4558 reconciler_common.go:293] "Volume detached for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/996db13b-869c-4880-a4ad-d9d22e5a493b-crc-storage\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:52 crc kubenswrapper[4558]: I0120 17:28:52.049477 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6s8rp\" (UniqueName: \"kubernetes.io/projected/996db13b-869c-4880-a4ad-d9d22e5a493b-kube-api-access-6s8rp\") on node \"crc\" DevicePath \"\"" Jan 20 17:28:52 crc kubenswrapper[4558]: I0120 17:28:52.605962 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-mhzjj" event={"ID":"996db13b-869c-4880-a4ad-d9d22e5a493b","Type":"ContainerDied","Data":"a9dff8f8408e766b9a4260118b9cf86bc1c437597100f7dfc5bfabec5e79b61a"} Jan 20 17:28:52 crc kubenswrapper[4558]: I0120 17:28:52.606021 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-mhzjj" Jan 20 17:28:52 crc kubenswrapper[4558]: I0120 17:28:52.606043 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a9dff8f8408e766b9a4260118b9cf86bc1c437597100f7dfc5bfabec5e79b61a" Jan 20 17:28:55 crc kubenswrapper[4558]: I0120 17:28:55.569211 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-mn9jr"] Jan 20 17:28:55 crc kubenswrapper[4558]: E0120 17:28:55.569600 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="996db13b-869c-4880-a4ad-d9d22e5a493b" containerName="storage" Jan 20 17:28:55 crc kubenswrapper[4558]: I0120 17:28:55.569616 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="996db13b-869c-4880-a4ad-d9d22e5a493b" containerName="storage" Jan 20 17:28:55 crc kubenswrapper[4558]: I0120 17:28:55.569774 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="996db13b-869c-4880-a4ad-d9d22e5a493b" containerName="storage" Jan 20 17:28:55 crc kubenswrapper[4558]: I0120 17:28:55.570828 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mn9jr" Jan 20 17:28:55 crc kubenswrapper[4558]: I0120 17:28:55.585201 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-mn9jr"] Jan 20 17:28:55 crc kubenswrapper[4558]: I0120 17:28:55.608382 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/86b88605-5219-482f-803e-edae5ea5d319-catalog-content\") pod \"redhat-operators-mn9jr\" (UID: \"86b88605-5219-482f-803e-edae5ea5d319\") " pod="openshift-marketplace/redhat-operators-mn9jr" Jan 20 17:28:55 crc kubenswrapper[4558]: I0120 17:28:55.608486 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/86b88605-5219-482f-803e-edae5ea5d319-utilities\") pod \"redhat-operators-mn9jr\" (UID: \"86b88605-5219-482f-803e-edae5ea5d319\") " pod="openshift-marketplace/redhat-operators-mn9jr" Jan 20 17:28:55 crc kubenswrapper[4558]: I0120 17:28:55.608665 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rvkm5\" (UniqueName: \"kubernetes.io/projected/86b88605-5219-482f-803e-edae5ea5d319-kube-api-access-rvkm5\") pod \"redhat-operators-mn9jr\" (UID: \"86b88605-5219-482f-803e-edae5ea5d319\") " pod="openshift-marketplace/redhat-operators-mn9jr" Jan 20 17:28:55 crc kubenswrapper[4558]: I0120 17:28:55.710669 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rvkm5\" (UniqueName: \"kubernetes.io/projected/86b88605-5219-482f-803e-edae5ea5d319-kube-api-access-rvkm5\") pod \"redhat-operators-mn9jr\" (UID: \"86b88605-5219-482f-803e-edae5ea5d319\") " pod="openshift-marketplace/redhat-operators-mn9jr" Jan 20 17:28:55 crc kubenswrapper[4558]: I0120 17:28:55.710786 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/86b88605-5219-482f-803e-edae5ea5d319-catalog-content\") pod \"redhat-operators-mn9jr\" (UID: \"86b88605-5219-482f-803e-edae5ea5d319\") " pod="openshift-marketplace/redhat-operators-mn9jr" Jan 20 17:28:55 crc kubenswrapper[4558]: I0120 17:28:55.711244 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/86b88605-5219-482f-803e-edae5ea5d319-catalog-content\") pod \"redhat-operators-mn9jr\" (UID: \"86b88605-5219-482f-803e-edae5ea5d319\") " pod="openshift-marketplace/redhat-operators-mn9jr" Jan 20 17:28:55 crc kubenswrapper[4558]: I0120 17:28:55.711288 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/86b88605-5219-482f-803e-edae5ea5d319-utilities\") pod \"redhat-operators-mn9jr\" (UID: \"86b88605-5219-482f-803e-edae5ea5d319\") " pod="openshift-marketplace/redhat-operators-mn9jr" Jan 20 17:28:55 crc kubenswrapper[4558]: I0120 17:28:55.711300 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/86b88605-5219-482f-803e-edae5ea5d319-utilities\") pod \"redhat-operators-mn9jr\" (UID: \"86b88605-5219-482f-803e-edae5ea5d319\") " pod="openshift-marketplace/redhat-operators-mn9jr" Jan 20 17:28:55 crc kubenswrapper[4558]: I0120 17:28:55.726211 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rvkm5\" (UniqueName: \"kubernetes.io/projected/86b88605-5219-482f-803e-edae5ea5d319-kube-api-access-rvkm5\") pod \"redhat-operators-mn9jr\" (UID: \"86b88605-5219-482f-803e-edae5ea5d319\") " pod="openshift-marketplace/redhat-operators-mn9jr" Jan 20 17:28:55 crc kubenswrapper[4558]: I0120 17:28:55.887773 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mn9jr" Jan 20 17:28:56 crc kubenswrapper[4558]: I0120 17:28:56.308364 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-mn9jr"] Jan 20 17:28:56 crc kubenswrapper[4558]: I0120 17:28:56.661574 4558 generic.go:334] "Generic (PLEG): container finished" podID="86b88605-5219-482f-803e-edae5ea5d319" containerID="94e8d5e6a044a2bc19b0c7a23c0dadff21b1814df4e2d558c68b7f6c9c9621ac" exitCode=0 Jan 20 17:28:56 crc kubenswrapper[4558]: I0120 17:28:56.661872 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mn9jr" event={"ID":"86b88605-5219-482f-803e-edae5ea5d319","Type":"ContainerDied","Data":"94e8d5e6a044a2bc19b0c7a23c0dadff21b1814df4e2d558c68b7f6c9c9621ac"} Jan 20 17:28:56 crc kubenswrapper[4558]: I0120 17:28:56.661938 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mn9jr" event={"ID":"86b88605-5219-482f-803e-edae5ea5d319","Type":"ContainerStarted","Data":"e054cf99c703307392344cee65707bf2149a5df79632e3cb8462d22b28db6d41"} Jan 20 17:28:57 crc kubenswrapper[4558]: I0120 17:28:57.329713 4558 patch_prober.go:28] interesting pod/machine-config-daemon-2vr4r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 20 17:28:57 crc kubenswrapper[4558]: I0120 17:28:57.330047 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 20 17:28:58 crc kubenswrapper[4558]: I0120 17:28:58.687471 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mn9jr" event={"ID":"86b88605-5219-482f-803e-edae5ea5d319","Type":"ContainerStarted","Data":"9b5a8a676324fb1e8f2b5d27898efb15271fce6ab775bd87dfff07b977d04682"} Jan 20 17:28:59 crc kubenswrapper[4558]: I0120 17:28:59.700137 4558 generic.go:334] "Generic (PLEG): container finished" podID="86b88605-5219-482f-803e-edae5ea5d319" containerID="9b5a8a676324fb1e8f2b5d27898efb15271fce6ab775bd87dfff07b977d04682" exitCode=0 Jan 20 17:28:59 crc kubenswrapper[4558]: I0120 17:28:59.700218 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mn9jr" event={"ID":"86b88605-5219-482f-803e-edae5ea5d319","Type":"ContainerDied","Data":"9b5a8a676324fb1e8f2b5d27898efb15271fce6ab775bd87dfff07b977d04682"} Jan 20 17:29:00 crc kubenswrapper[4558]: I0120 17:29:00.711044 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mn9jr" event={"ID":"86b88605-5219-482f-803e-edae5ea5d319","Type":"ContainerStarted","Data":"a2da0cc196cd077af001802b735ebf8ad80ebc52741d9d22cbe7e660fe3bf3ef"} Jan 20 17:29:00 crc kubenswrapper[4558]: I0120 17:29:00.730789 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-mn9jr" podStartSLOduration=2.151667237 podStartE2EDuration="5.730762634s" podCreationTimestamp="2026-01-20 17:28:55 +0000 UTC" firstStartedPulling="2026-01-20 17:28:56.664887605 +0000 UTC m=+2830.425225561" lastFinishedPulling="2026-01-20 17:29:00.24398299 +0000 UTC m=+2834.004320958" observedRunningTime="2026-01-20 17:29:00.725811314 +0000 UTC m=+2834.486149281" watchObservedRunningTime="2026-01-20 17:29:00.730762634 +0000 UTC m=+2834.491100600" Jan 20 17:29:02 crc kubenswrapper[4558]: I0120 17:29:02.305660 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/rabbitmq-server-0"] Jan 20 17:29:02 crc kubenswrapper[4558]: I0120 17:29:02.307896 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:29:02 crc kubenswrapper[4558]: I0120 17:29:02.310654 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"rabbitmq-server-dockercfg-pqnkt" Jan 20 17:29:02 crc kubenswrapper[4558]: I0120 17:29:02.310917 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"rabbitmq-erlang-cookie" Jan 20 17:29:02 crc kubenswrapper[4558]: I0120 17:29:02.310945 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"rabbitmq-plugins-conf" Jan 20 17:29:02 crc kubenswrapper[4558]: I0120 17:29:02.311059 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"rabbitmq-default-user" Jan 20 17:29:02 crc kubenswrapper[4558]: I0120 17:29:02.311347 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"rabbitmq-server-conf" Jan 20 17:29:02 crc kubenswrapper[4558]: I0120 17:29:02.320723 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/rabbitmq-server-0"] Jan 20 17:29:02 crc kubenswrapper[4558]: I0120 17:29:02.422721 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/35b2371f-57f2-4728-b32f-1ba587b4532e-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"35b2371f-57f2-4728-b32f-1ba587b4532e\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:29:02 crc kubenswrapper[4558]: I0120 17:29:02.422772 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/35b2371f-57f2-4728-b32f-1ba587b4532e-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"35b2371f-57f2-4728-b32f-1ba587b4532e\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:29:02 crc kubenswrapper[4558]: I0120 17:29:02.422924 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/35b2371f-57f2-4728-b32f-1ba587b4532e-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"35b2371f-57f2-4728-b32f-1ba587b4532e\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:29:02 crc kubenswrapper[4558]: I0120 17:29:02.423005 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-server-0\" (UID: \"35b2371f-57f2-4728-b32f-1ba587b4532e\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:29:02 crc kubenswrapper[4558]: I0120 17:29:02.423099 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-slnl6\" (UniqueName: \"kubernetes.io/projected/35b2371f-57f2-4728-b32f-1ba587b4532e-kube-api-access-slnl6\") pod \"rabbitmq-server-0\" (UID: \"35b2371f-57f2-4728-b32f-1ba587b4532e\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:29:02 crc kubenswrapper[4558]: I0120 17:29:02.423140 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/35b2371f-57f2-4728-b32f-1ba587b4532e-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"35b2371f-57f2-4728-b32f-1ba587b4532e\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:29:02 crc kubenswrapper[4558]: I0120 17:29:02.423159 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/35b2371f-57f2-4728-b32f-1ba587b4532e-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"35b2371f-57f2-4728-b32f-1ba587b4532e\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:29:02 crc kubenswrapper[4558]: I0120 17:29:02.423223 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/35b2371f-57f2-4728-b32f-1ba587b4532e-pod-info\") pod \"rabbitmq-server-0\" (UID: \"35b2371f-57f2-4728-b32f-1ba587b4532e\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:29:02 crc kubenswrapper[4558]: I0120 17:29:02.423340 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/35b2371f-57f2-4728-b32f-1ba587b4532e-server-conf\") pod \"rabbitmq-server-0\" (UID: \"35b2371f-57f2-4728-b32f-1ba587b4532e\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:29:02 crc kubenswrapper[4558]: I0120 17:29:02.524922 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-server-0\" (UID: \"35b2371f-57f2-4728-b32f-1ba587b4532e\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:29:02 crc kubenswrapper[4558]: I0120 17:29:02.525002 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-slnl6\" (UniqueName: \"kubernetes.io/projected/35b2371f-57f2-4728-b32f-1ba587b4532e-kube-api-access-slnl6\") pod \"rabbitmq-server-0\" (UID: \"35b2371f-57f2-4728-b32f-1ba587b4532e\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:29:02 crc kubenswrapper[4558]: I0120 17:29:02.525033 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/35b2371f-57f2-4728-b32f-1ba587b4532e-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"35b2371f-57f2-4728-b32f-1ba587b4532e\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:29:02 crc kubenswrapper[4558]: I0120 17:29:02.525053 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/35b2371f-57f2-4728-b32f-1ba587b4532e-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"35b2371f-57f2-4728-b32f-1ba587b4532e\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:29:02 crc kubenswrapper[4558]: I0120 17:29:02.525081 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/35b2371f-57f2-4728-b32f-1ba587b4532e-pod-info\") pod \"rabbitmq-server-0\" (UID: \"35b2371f-57f2-4728-b32f-1ba587b4532e\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:29:02 crc kubenswrapper[4558]: I0120 17:29:02.525141 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/35b2371f-57f2-4728-b32f-1ba587b4532e-server-conf\") pod \"rabbitmq-server-0\" (UID: \"35b2371f-57f2-4728-b32f-1ba587b4532e\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:29:02 crc kubenswrapper[4558]: I0120 17:29:02.525192 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/35b2371f-57f2-4728-b32f-1ba587b4532e-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"35b2371f-57f2-4728-b32f-1ba587b4532e\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:29:02 crc kubenswrapper[4558]: I0120 17:29:02.525220 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/35b2371f-57f2-4728-b32f-1ba587b4532e-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"35b2371f-57f2-4728-b32f-1ba587b4532e\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:29:02 crc kubenswrapper[4558]: I0120 17:29:02.525261 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/35b2371f-57f2-4728-b32f-1ba587b4532e-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"35b2371f-57f2-4728-b32f-1ba587b4532e\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:29:02 crc kubenswrapper[4558]: I0120 17:29:02.525365 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-server-0\" (UID: \"35b2371f-57f2-4728-b32f-1ba587b4532e\") device mount path \"/mnt/openstack/pv12\"" pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:29:02 crc kubenswrapper[4558]: I0120 17:29:02.525941 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/35b2371f-57f2-4728-b32f-1ba587b4532e-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"35b2371f-57f2-4728-b32f-1ba587b4532e\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:29:02 crc kubenswrapper[4558]: I0120 17:29:02.526865 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/35b2371f-57f2-4728-b32f-1ba587b4532e-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"35b2371f-57f2-4728-b32f-1ba587b4532e\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:29:02 crc kubenswrapper[4558]: I0120 17:29:02.526981 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/35b2371f-57f2-4728-b32f-1ba587b4532e-server-conf\") pod \"rabbitmq-server-0\" (UID: \"35b2371f-57f2-4728-b32f-1ba587b4532e\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:29:02 crc kubenswrapper[4558]: I0120 17:29:02.527342 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/35b2371f-57f2-4728-b32f-1ba587b4532e-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"35b2371f-57f2-4728-b32f-1ba587b4532e\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:29:02 crc kubenswrapper[4558]: I0120 17:29:02.538522 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/35b2371f-57f2-4728-b32f-1ba587b4532e-pod-info\") pod \"rabbitmq-server-0\" (UID: \"35b2371f-57f2-4728-b32f-1ba587b4532e\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:29:02 crc kubenswrapper[4558]: I0120 17:29:02.538557 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/35b2371f-57f2-4728-b32f-1ba587b4532e-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"35b2371f-57f2-4728-b32f-1ba587b4532e\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:29:02 crc kubenswrapper[4558]: I0120 17:29:02.538675 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/35b2371f-57f2-4728-b32f-1ba587b4532e-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"35b2371f-57f2-4728-b32f-1ba587b4532e\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:29:02 crc kubenswrapper[4558]: I0120 17:29:02.543507 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/rabbitmq-cell1-server-0"] Jan 20 17:29:02 crc kubenswrapper[4558]: I0120 17:29:02.545860 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:29:02 crc kubenswrapper[4558]: I0120 17:29:02.548312 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-server-0\" (UID: \"35b2371f-57f2-4728-b32f-1ba587b4532e\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:29:02 crc kubenswrapper[4558]: I0120 17:29:02.550373 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"rabbitmq-cell1-plugins-conf" Jan 20 17:29:02 crc kubenswrapper[4558]: I0120 17:29:02.550580 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-slnl6\" (UniqueName: \"kubernetes.io/projected/35b2371f-57f2-4728-b32f-1ba587b4532e-kube-api-access-slnl6\") pod \"rabbitmq-server-0\" (UID: \"35b2371f-57f2-4728-b32f-1ba587b4532e\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:29:02 crc kubenswrapper[4558]: I0120 17:29:02.550688 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"rabbitmq-cell1-default-user" Jan 20 17:29:02 crc kubenswrapper[4558]: I0120 17:29:02.550854 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"rabbitmq-cell1-erlang-cookie" Jan 20 17:29:02 crc kubenswrapper[4558]: I0120 17:29:02.550892 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"rabbitmq-cell1-server-dockercfg-t8m5j" Jan 20 17:29:02 crc kubenswrapper[4558]: I0120 17:29:02.550925 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"rabbitmq-cell1-server-conf" Jan 20 17:29:02 crc kubenswrapper[4558]: I0120 17:29:02.555693 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/rabbitmq-cell1-server-0"] Jan 20 17:29:02 crc kubenswrapper[4558]: I0120 17:29:02.627065 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/49dc00d4-0563-4324-a98b-e42b24b4223b-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"49dc00d4-0563-4324-a98b-e42b24b4223b\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:29:02 crc kubenswrapper[4558]: I0120 17:29:02.627125 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/49dc00d4-0563-4324-a98b-e42b24b4223b-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"49dc00d4-0563-4324-a98b-e42b24b4223b\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:29:02 crc kubenswrapper[4558]: I0120 17:29:02.627266 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/49dc00d4-0563-4324-a98b-e42b24b4223b-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"49dc00d4-0563-4324-a98b-e42b24b4223b\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:29:02 crc kubenswrapper[4558]: I0120 17:29:02.627294 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"49dc00d4-0563-4324-a98b-e42b24b4223b\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:29:02 crc kubenswrapper[4558]: I0120 17:29:02.627325 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/49dc00d4-0563-4324-a98b-e42b24b4223b-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"49dc00d4-0563-4324-a98b-e42b24b4223b\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:29:02 crc kubenswrapper[4558]: I0120 17:29:02.627443 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wxbsn\" (UniqueName: \"kubernetes.io/projected/49dc00d4-0563-4324-a98b-e42b24b4223b-kube-api-access-wxbsn\") pod \"rabbitmq-cell1-server-0\" (UID: \"49dc00d4-0563-4324-a98b-e42b24b4223b\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:29:02 crc kubenswrapper[4558]: I0120 17:29:02.627548 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/49dc00d4-0563-4324-a98b-e42b24b4223b-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"49dc00d4-0563-4324-a98b-e42b24b4223b\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:29:02 crc kubenswrapper[4558]: I0120 17:29:02.627580 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/49dc00d4-0563-4324-a98b-e42b24b4223b-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"49dc00d4-0563-4324-a98b-e42b24b4223b\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:29:02 crc kubenswrapper[4558]: I0120 17:29:02.627619 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/49dc00d4-0563-4324-a98b-e42b24b4223b-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"49dc00d4-0563-4324-a98b-e42b24b4223b\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:29:02 crc kubenswrapper[4558]: I0120 17:29:02.629435 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:29:02 crc kubenswrapper[4558]: I0120 17:29:02.729003 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/49dc00d4-0563-4324-a98b-e42b24b4223b-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"49dc00d4-0563-4324-a98b-e42b24b4223b\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:29:02 crc kubenswrapper[4558]: I0120 17:29:02.729269 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"49dc00d4-0563-4324-a98b-e42b24b4223b\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:29:02 crc kubenswrapper[4558]: I0120 17:29:02.729293 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/49dc00d4-0563-4324-a98b-e42b24b4223b-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"49dc00d4-0563-4324-a98b-e42b24b4223b\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:29:02 crc kubenswrapper[4558]: I0120 17:29:02.729326 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wxbsn\" (UniqueName: \"kubernetes.io/projected/49dc00d4-0563-4324-a98b-e42b24b4223b-kube-api-access-wxbsn\") pod \"rabbitmq-cell1-server-0\" (UID: \"49dc00d4-0563-4324-a98b-e42b24b4223b\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:29:02 crc kubenswrapper[4558]: I0120 17:29:02.729363 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/49dc00d4-0563-4324-a98b-e42b24b4223b-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"49dc00d4-0563-4324-a98b-e42b24b4223b\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:29:02 crc kubenswrapper[4558]: I0120 17:29:02.729379 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/49dc00d4-0563-4324-a98b-e42b24b4223b-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"49dc00d4-0563-4324-a98b-e42b24b4223b\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:29:02 crc kubenswrapper[4558]: I0120 17:29:02.729399 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/49dc00d4-0563-4324-a98b-e42b24b4223b-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"49dc00d4-0563-4324-a98b-e42b24b4223b\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:29:02 crc kubenswrapper[4558]: I0120 17:29:02.729422 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/49dc00d4-0563-4324-a98b-e42b24b4223b-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"49dc00d4-0563-4324-a98b-e42b24b4223b\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:29:02 crc kubenswrapper[4558]: I0120 17:29:02.729448 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/49dc00d4-0563-4324-a98b-e42b24b4223b-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"49dc00d4-0563-4324-a98b-e42b24b4223b\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:29:02 crc kubenswrapper[4558]: I0120 17:29:02.731108 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/49dc00d4-0563-4324-a98b-e42b24b4223b-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"49dc00d4-0563-4324-a98b-e42b24b4223b\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:29:02 crc kubenswrapper[4558]: I0120 17:29:02.732346 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/49dc00d4-0563-4324-a98b-e42b24b4223b-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"49dc00d4-0563-4324-a98b-e42b24b4223b\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:29:02 crc kubenswrapper[4558]: I0120 17:29:02.732527 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"49dc00d4-0563-4324-a98b-e42b24b4223b\") device mount path \"/mnt/openstack/pv01\"" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:29:02 crc kubenswrapper[4558]: I0120 17:29:02.733086 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/49dc00d4-0563-4324-a98b-e42b24b4223b-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"49dc00d4-0563-4324-a98b-e42b24b4223b\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:29:02 crc kubenswrapper[4558]: I0120 17:29:02.735658 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/49dc00d4-0563-4324-a98b-e42b24b4223b-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"49dc00d4-0563-4324-a98b-e42b24b4223b\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:29:02 crc kubenswrapper[4558]: I0120 17:29:02.741522 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/49dc00d4-0563-4324-a98b-e42b24b4223b-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"49dc00d4-0563-4324-a98b-e42b24b4223b\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:29:02 crc kubenswrapper[4558]: I0120 17:29:02.741601 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/49dc00d4-0563-4324-a98b-e42b24b4223b-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"49dc00d4-0563-4324-a98b-e42b24b4223b\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:29:02 crc kubenswrapper[4558]: I0120 17:29:02.750039 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wxbsn\" (UniqueName: \"kubernetes.io/projected/49dc00d4-0563-4324-a98b-e42b24b4223b-kube-api-access-wxbsn\") pod \"rabbitmq-cell1-server-0\" (UID: \"49dc00d4-0563-4324-a98b-e42b24b4223b\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:29:02 crc kubenswrapper[4558]: I0120 17:29:02.750484 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/49dc00d4-0563-4324-a98b-e42b24b4223b-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"49dc00d4-0563-4324-a98b-e42b24b4223b\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:29:02 crc kubenswrapper[4558]: I0120 17:29:02.751597 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"49dc00d4-0563-4324-a98b-e42b24b4223b\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:29:02 crc kubenswrapper[4558]: I0120 17:29:02.900455 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:29:03 crc kubenswrapper[4558]: I0120 17:29:03.050711 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/rabbitmq-server-0"] Jan 20 17:29:03 crc kubenswrapper[4558]: W0120 17:29:03.060846 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod35b2371f_57f2_4728_b32f_1ba587b4532e.slice/crio-6ca2725584caac477b29e00556aa6daeb9f8347980274d9c02390fd9c1f3473f WatchSource:0}: Error finding container 6ca2725584caac477b29e00556aa6daeb9f8347980274d9c02390fd9c1f3473f: Status 404 returned error can't find the container with id 6ca2725584caac477b29e00556aa6daeb9f8347980274d9c02390fd9c1f3473f Jan 20 17:29:03 crc kubenswrapper[4558]: I0120 17:29:03.107908 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/rabbitmq-cell1-server-0"] Jan 20 17:29:03 crc kubenswrapper[4558]: W0120 17:29:03.113491 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod49dc00d4_0563_4324_a98b_e42b24b4223b.slice/crio-33b204fb796375ab9ea199c3fa8c4edd2c70b3a454e49f2bebb05a610516fa5b WatchSource:0}: Error finding container 33b204fb796375ab9ea199c3fa8c4edd2c70b3a454e49f2bebb05a610516fa5b: Status 404 returned error can't find the container with id 33b204fb796375ab9ea199c3fa8c4edd2c70b3a454e49f2bebb05a610516fa5b Jan 20 17:29:03 crc kubenswrapper[4558]: I0120 17:29:03.443794 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/openstack-galera-0"] Jan 20 17:29:03 crc kubenswrapper[4558]: I0120 17:29:03.445257 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:29:03 crc kubenswrapper[4558]: I0120 17:29:03.451611 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-config-data" Jan 20 17:29:03 crc kubenswrapper[4558]: I0120 17:29:03.451614 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"galera-openstack-dockercfg-7ct58" Jan 20 17:29:03 crc kubenswrapper[4558]: I0120 17:29:03.451887 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-galera-openstack-svc" Jan 20 17:29:03 crc kubenswrapper[4558]: I0120 17:29:03.451985 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-scripts" Jan 20 17:29:03 crc kubenswrapper[4558]: I0120 17:29:03.455673 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"combined-ca-bundle" Jan 20 17:29:03 crc kubenswrapper[4558]: I0120 17:29:03.455932 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/openstack-galera-0"] Jan 20 17:29:03 crc kubenswrapper[4558]: I0120 17:29:03.541310 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-galera-0\" (UID: \"5e0a76a1-125c-47f2-a903-b5c8ce8e5277\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:29:03 crc kubenswrapper[4558]: I0120 17:29:03.541485 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/5e0a76a1-125c-47f2-a903-b5c8ce8e5277-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"5e0a76a1-125c-47f2-a903-b5c8ce8e5277\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:29:03 crc kubenswrapper[4558]: I0120 17:29:03.541584 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5jck4\" (UniqueName: \"kubernetes.io/projected/5e0a76a1-125c-47f2-a903-b5c8ce8e5277-kube-api-access-5jck4\") pod \"openstack-galera-0\" (UID: \"5e0a76a1-125c-47f2-a903-b5c8ce8e5277\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:29:03 crc kubenswrapper[4558]: I0120 17:29:03.541688 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5e0a76a1-125c-47f2-a903-b5c8ce8e5277-operator-scripts\") pod \"openstack-galera-0\" (UID: \"5e0a76a1-125c-47f2-a903-b5c8ce8e5277\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:29:03 crc kubenswrapper[4558]: I0120 17:29:03.541793 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e0a76a1-125c-47f2-a903-b5c8ce8e5277-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"5e0a76a1-125c-47f2-a903-b5c8ce8e5277\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:29:03 crc kubenswrapper[4558]: I0120 17:29:03.541892 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/5e0a76a1-125c-47f2-a903-b5c8ce8e5277-config-data-default\") pod \"openstack-galera-0\" (UID: \"5e0a76a1-125c-47f2-a903-b5c8ce8e5277\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:29:03 crc kubenswrapper[4558]: I0120 17:29:03.541967 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/5e0a76a1-125c-47f2-a903-b5c8ce8e5277-config-data-generated\") pod \"openstack-galera-0\" (UID: \"5e0a76a1-125c-47f2-a903-b5c8ce8e5277\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:29:03 crc kubenswrapper[4558]: I0120 17:29:03.542757 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/5e0a76a1-125c-47f2-a903-b5c8ce8e5277-kolla-config\") pod \"openstack-galera-0\" (UID: \"5e0a76a1-125c-47f2-a903-b5c8ce8e5277\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:29:03 crc kubenswrapper[4558]: I0120 17:29:03.643683 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/5e0a76a1-125c-47f2-a903-b5c8ce8e5277-kolla-config\") pod \"openstack-galera-0\" (UID: \"5e0a76a1-125c-47f2-a903-b5c8ce8e5277\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:29:03 crc kubenswrapper[4558]: I0120 17:29:03.643910 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-galera-0\" (UID: \"5e0a76a1-125c-47f2-a903-b5c8ce8e5277\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:29:03 crc kubenswrapper[4558]: I0120 17:29:03.644479 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/5e0a76a1-125c-47f2-a903-b5c8ce8e5277-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"5e0a76a1-125c-47f2-a903-b5c8ce8e5277\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:29:03 crc kubenswrapper[4558]: I0120 17:29:03.644635 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5jck4\" (UniqueName: \"kubernetes.io/projected/5e0a76a1-125c-47f2-a903-b5c8ce8e5277-kube-api-access-5jck4\") pod \"openstack-galera-0\" (UID: \"5e0a76a1-125c-47f2-a903-b5c8ce8e5277\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:29:03 crc kubenswrapper[4558]: I0120 17:29:03.644223 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-galera-0\" (UID: \"5e0a76a1-125c-47f2-a903-b5c8ce8e5277\") device mount path \"/mnt/openstack/pv02\"" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:29:03 crc kubenswrapper[4558]: I0120 17:29:03.644553 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/5e0a76a1-125c-47f2-a903-b5c8ce8e5277-kolla-config\") pod \"openstack-galera-0\" (UID: \"5e0a76a1-125c-47f2-a903-b5c8ce8e5277\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:29:03 crc kubenswrapper[4558]: I0120 17:29:03.645588 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5e0a76a1-125c-47f2-a903-b5c8ce8e5277-operator-scripts\") pod \"openstack-galera-0\" (UID: \"5e0a76a1-125c-47f2-a903-b5c8ce8e5277\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:29:03 crc kubenswrapper[4558]: I0120 17:29:03.645719 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e0a76a1-125c-47f2-a903-b5c8ce8e5277-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"5e0a76a1-125c-47f2-a903-b5c8ce8e5277\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:29:03 crc kubenswrapper[4558]: I0120 17:29:03.646157 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/5e0a76a1-125c-47f2-a903-b5c8ce8e5277-config-data-default\") pod \"openstack-galera-0\" (UID: \"5e0a76a1-125c-47f2-a903-b5c8ce8e5277\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:29:03 crc kubenswrapper[4558]: I0120 17:29:03.646260 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/5e0a76a1-125c-47f2-a903-b5c8ce8e5277-config-data-generated\") pod \"openstack-galera-0\" (UID: \"5e0a76a1-125c-47f2-a903-b5c8ce8e5277\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:29:03 crc kubenswrapper[4558]: I0120 17:29:03.646750 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5e0a76a1-125c-47f2-a903-b5c8ce8e5277-operator-scripts\") pod \"openstack-galera-0\" (UID: \"5e0a76a1-125c-47f2-a903-b5c8ce8e5277\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:29:03 crc kubenswrapper[4558]: I0120 17:29:03.646996 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/5e0a76a1-125c-47f2-a903-b5c8ce8e5277-config-data-generated\") pod \"openstack-galera-0\" (UID: \"5e0a76a1-125c-47f2-a903-b5c8ce8e5277\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:29:03 crc kubenswrapper[4558]: I0120 17:29:03.647483 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/5e0a76a1-125c-47f2-a903-b5c8ce8e5277-config-data-default\") pod \"openstack-galera-0\" (UID: \"5e0a76a1-125c-47f2-a903-b5c8ce8e5277\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:29:03 crc kubenswrapper[4558]: I0120 17:29:03.666746 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5jck4\" (UniqueName: \"kubernetes.io/projected/5e0a76a1-125c-47f2-a903-b5c8ce8e5277-kube-api-access-5jck4\") pod \"openstack-galera-0\" (UID: \"5e0a76a1-125c-47f2-a903-b5c8ce8e5277\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:29:03 crc kubenswrapper[4558]: I0120 17:29:03.690752 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-galera-0\" (UID: \"5e0a76a1-125c-47f2-a903-b5c8ce8e5277\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:29:03 crc kubenswrapper[4558]: I0120 17:29:03.696201 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e0a76a1-125c-47f2-a903-b5c8ce8e5277-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"5e0a76a1-125c-47f2-a903-b5c8ce8e5277\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:29:03 crc kubenswrapper[4558]: I0120 17:29:03.696360 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/5e0a76a1-125c-47f2-a903-b5c8ce8e5277-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"5e0a76a1-125c-47f2-a903-b5c8ce8e5277\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:29:03 crc kubenswrapper[4558]: I0120 17:29:03.752663 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-server-0" event={"ID":"35b2371f-57f2-4728-b32f-1ba587b4532e","Type":"ContainerStarted","Data":"6ca2725584caac477b29e00556aa6daeb9f8347980274d9c02390fd9c1f3473f"} Jan 20 17:29:03 crc kubenswrapper[4558]: I0120 17:29:03.760692 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:29:03 crc kubenswrapper[4558]: I0120 17:29:03.786463 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" event={"ID":"49dc00d4-0563-4324-a98b-e42b24b4223b","Type":"ContainerStarted","Data":"33b204fb796375ab9ea199c3fa8c4edd2c70b3a454e49f2bebb05a610516fa5b"} Jan 20 17:29:03 crc kubenswrapper[4558]: I0120 17:29:03.835561 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/memcached-0"] Jan 20 17:29:03 crc kubenswrapper[4558]: I0120 17:29:03.836633 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/memcached-0" Jan 20 17:29:03 crc kubenswrapper[4558]: I0120 17:29:03.841191 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"memcached-config-data" Jan 20 17:29:03 crc kubenswrapper[4558]: I0120 17:29:03.841498 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"memcached-memcached-dockercfg-8bzhf" Jan 20 17:29:03 crc kubenswrapper[4558]: I0120 17:29:03.858343 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-846cj\" (UniqueName: \"kubernetes.io/projected/39e9c81e-2923-472a-b892-5404d32842bb-kube-api-access-846cj\") pod \"memcached-0\" (UID: \"39e9c81e-2923-472a-b892-5404d32842bb\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:29:03 crc kubenswrapper[4558]: I0120 17:29:03.858439 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/39e9c81e-2923-472a-b892-5404d32842bb-config-data\") pod \"memcached-0\" (UID: \"39e9c81e-2923-472a-b892-5404d32842bb\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:29:03 crc kubenswrapper[4558]: I0120 17:29:03.858551 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/39e9c81e-2923-472a-b892-5404d32842bb-kolla-config\") pod \"memcached-0\" (UID: \"39e9c81e-2923-472a-b892-5404d32842bb\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:29:03 crc kubenswrapper[4558]: I0120 17:29:03.892698 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/memcached-0"] Jan 20 17:29:03 crc kubenswrapper[4558]: I0120 17:29:03.960267 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-846cj\" (UniqueName: \"kubernetes.io/projected/39e9c81e-2923-472a-b892-5404d32842bb-kube-api-access-846cj\") pod \"memcached-0\" (UID: \"39e9c81e-2923-472a-b892-5404d32842bb\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:29:03 crc kubenswrapper[4558]: I0120 17:29:03.960317 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/39e9c81e-2923-472a-b892-5404d32842bb-config-data\") pod \"memcached-0\" (UID: \"39e9c81e-2923-472a-b892-5404d32842bb\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:29:03 crc kubenswrapper[4558]: I0120 17:29:03.960431 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/39e9c81e-2923-472a-b892-5404d32842bb-kolla-config\") pod \"memcached-0\" (UID: \"39e9c81e-2923-472a-b892-5404d32842bb\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:29:03 crc kubenswrapper[4558]: I0120 17:29:03.961410 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/39e9c81e-2923-472a-b892-5404d32842bb-kolla-config\") pod \"memcached-0\" (UID: \"39e9c81e-2923-472a-b892-5404d32842bb\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:29:03 crc kubenswrapper[4558]: I0120 17:29:03.961639 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/39e9c81e-2923-472a-b892-5404d32842bb-config-data\") pod \"memcached-0\" (UID: \"39e9c81e-2923-472a-b892-5404d32842bb\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:29:04 crc kubenswrapper[4558]: I0120 17:29:04.014753 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-846cj\" (UniqueName: \"kubernetes.io/projected/39e9c81e-2923-472a-b892-5404d32842bb-kube-api-access-846cj\") pod \"memcached-0\" (UID: \"39e9c81e-2923-472a-b892-5404d32842bb\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:29:04 crc kubenswrapper[4558]: I0120 17:29:04.281546 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/memcached-0" Jan 20 17:29:04 crc kubenswrapper[4558]: I0120 17:29:04.520229 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/openstack-galera-0"] Jan 20 17:29:04 crc kubenswrapper[4558]: W0120 17:29:04.523245 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5e0a76a1_125c_47f2_a903_b5c8ce8e5277.slice/crio-5cb005d56635787d08a647fb641db00f33241027dae1261587298498379f014b WatchSource:0}: Error finding container 5cb005d56635787d08a647fb641db00f33241027dae1261587298498379f014b: Status 404 returned error can't find the container with id 5cb005d56635787d08a647fb641db00f33241027dae1261587298498379f014b Jan 20 17:29:04 crc kubenswrapper[4558]: I0120 17:29:04.709648 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/memcached-0"] Jan 20 17:29:04 crc kubenswrapper[4558]: I0120 17:29:04.768457 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 17:29:04 crc kubenswrapper[4558]: I0120 17:29:04.769561 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:29:04 crc kubenswrapper[4558]: I0120 17:29:04.773519 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"telemetry-ceilometer-dockercfg-hzfmh" Jan 20 17:29:04 crc kubenswrapper[4558]: I0120 17:29:04.785488 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 17:29:04 crc kubenswrapper[4558]: I0120 17:29:04.809227 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/memcached-0" event={"ID":"39e9c81e-2923-472a-b892-5404d32842bb","Type":"ContainerStarted","Data":"e926dceaefe7d08030725450daa3bbe85b1bfb6ac0fcfaf97459f9b11814b1e3"} Jan 20 17:29:04 crc kubenswrapper[4558]: I0120 17:29:04.829591 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-galera-0" event={"ID":"5e0a76a1-125c-47f2-a903-b5c8ce8e5277","Type":"ContainerStarted","Data":"74a82b2cb87f85950d65db39375572be31ea2b3a88bbbb153f941cdc471461ac"} Jan 20 17:29:04 crc kubenswrapper[4558]: I0120 17:29:04.829656 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-galera-0" event={"ID":"5e0a76a1-125c-47f2-a903-b5c8ce8e5277","Type":"ContainerStarted","Data":"5cb005d56635787d08a647fb641db00f33241027dae1261587298498379f014b"} Jan 20 17:29:04 crc kubenswrapper[4558]: I0120 17:29:04.836733 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-server-0" event={"ID":"35b2371f-57f2-4728-b32f-1ba587b4532e","Type":"ContainerStarted","Data":"470d1df81fd770d7b974c08649b44b1ac9d508cc4fcfb3aae910123fc7a03c96"} Jan 20 17:29:04 crc kubenswrapper[4558]: I0120 17:29:04.852184 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" event={"ID":"49dc00d4-0563-4324-a98b-e42b24b4223b","Type":"ContainerStarted","Data":"a216ad33befa8b30bde5bc44c084e7e37be4198c817fabd4c86a26d9201627a7"} Jan 20 17:29:04 crc kubenswrapper[4558]: I0120 17:29:04.890386 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6glq8\" (UniqueName: \"kubernetes.io/projected/7505236e-4372-4f2b-9b7c-172a1348f818-kube-api-access-6glq8\") pod \"kube-state-metrics-0\" (UID: \"7505236e-4372-4f2b-9b7c-172a1348f818\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:29:04 crc kubenswrapper[4558]: I0120 17:29:04.992265 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6glq8\" (UniqueName: \"kubernetes.io/projected/7505236e-4372-4f2b-9b7c-172a1348f818-kube-api-access-6glq8\") pod \"kube-state-metrics-0\" (UID: \"7505236e-4372-4f2b-9b7c-172a1348f818\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:29:05 crc kubenswrapper[4558]: I0120 17:29:05.009613 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6glq8\" (UniqueName: \"kubernetes.io/projected/7505236e-4372-4f2b-9b7c-172a1348f818-kube-api-access-6glq8\") pod \"kube-state-metrics-0\" (UID: \"7505236e-4372-4f2b-9b7c-172a1348f818\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:29:05 crc kubenswrapper[4558]: I0120 17:29:05.123158 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/openstack-cell1-galera-0"] Jan 20 17:29:05 crc kubenswrapper[4558]: I0120 17:29:05.124572 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:29:05 crc kubenswrapper[4558]: I0120 17:29:05.134301 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/openstack-cell1-galera-0"] Jan 20 17:29:05 crc kubenswrapper[4558]: I0120 17:29:05.135097 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-galera-openstack-cell1-svc" Jan 20 17:29:05 crc kubenswrapper[4558]: I0120 17:29:05.135339 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"galera-openstack-cell1-dockercfg-ftflb" Jan 20 17:29:05 crc kubenswrapper[4558]: I0120 17:29:05.135486 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-cell1-config-data" Jan 20 17:29:05 crc kubenswrapper[4558]: I0120 17:29:05.135647 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-cell1-scripts" Jan 20 17:29:05 crc kubenswrapper[4558]: I0120 17:29:05.138287 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:29:05 crc kubenswrapper[4558]: I0120 17:29:05.205152 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ovsdbserver-nb-0"] Jan 20 17:29:05 crc kubenswrapper[4558]: I0120 17:29:05.206674 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:29:05 crc kubenswrapper[4558]: I0120 17:29:05.212012 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ovncluster-ovndbcluster-nb-dockercfg-djpzb" Jan 20 17:29:05 crc kubenswrapper[4558]: I0120 17:29:05.212091 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"ovndbcluster-nb-config" Jan 20 17:29:05 crc kubenswrapper[4558]: I0120 17:29:05.212389 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"ovndbcluster-nb-scripts" Jan 20 17:29:05 crc kubenswrapper[4558]: I0120 17:29:05.229524 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-nb-0"] Jan 20 17:29:05 crc kubenswrapper[4558]: I0120 17:29:05.297896 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/57872fb7-1988-4da4-b8b9-eee9c5e8c827-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"57872fb7-1988-4da4-b8b9-eee9c5e8c827\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:29:05 crc kubenswrapper[4558]: I0120 17:29:05.297953 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/57872fb7-1988-4da4-b8b9-eee9c5e8c827-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"57872fb7-1988-4da4-b8b9-eee9c5e8c827\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:29:05 crc kubenswrapper[4558]: I0120 17:29:05.297976 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/57872fb7-1988-4da4-b8b9-eee9c5e8c827-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"57872fb7-1988-4da4-b8b9-eee9c5e8c827\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:29:05 crc kubenswrapper[4558]: I0120 17:29:05.298001 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage13-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage13-crc\") pod \"ovsdbserver-nb-0\" (UID: \"581cf384-3caa-4074-a6aa-526e8a65e9b2\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:29:05 crc kubenswrapper[4558]: I0120 17:29:05.298021 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/581cf384-3caa-4074-a6aa-526e8a65e9b2-config\") pod \"ovsdbserver-nb-0\" (UID: \"581cf384-3caa-4074-a6aa-526e8a65e9b2\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:29:05 crc kubenswrapper[4558]: I0120 17:29:05.298036 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/581cf384-3caa-4074-a6aa-526e8a65e9b2-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"581cf384-3caa-4074-a6aa-526e8a65e9b2\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:29:05 crc kubenswrapper[4558]: I0120 17:29:05.298058 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/57872fb7-1988-4da4-b8b9-eee9c5e8c827-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"57872fb7-1988-4da4-b8b9-eee9c5e8c827\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:29:05 crc kubenswrapper[4558]: I0120 17:29:05.298086 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wtxm4\" (UniqueName: \"kubernetes.io/projected/57872fb7-1988-4da4-b8b9-eee9c5e8c827-kube-api-access-wtxm4\") pod \"openstack-cell1-galera-0\" (UID: \"57872fb7-1988-4da4-b8b9-eee9c5e8c827\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:29:05 crc kubenswrapper[4558]: I0120 17:29:05.298103 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bwt2l\" (UniqueName: \"kubernetes.io/projected/581cf384-3caa-4074-a6aa-526e8a65e9b2-kube-api-access-bwt2l\") pod \"ovsdbserver-nb-0\" (UID: \"581cf384-3caa-4074-a6aa-526e8a65e9b2\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:29:05 crc kubenswrapper[4558]: I0120 17:29:05.298145 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"openstack-cell1-galera-0\" (UID: \"57872fb7-1988-4da4-b8b9-eee9c5e8c827\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:29:05 crc kubenswrapper[4558]: I0120 17:29:05.298203 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/57872fb7-1988-4da4-b8b9-eee9c5e8c827-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"57872fb7-1988-4da4-b8b9-eee9c5e8c827\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:29:05 crc kubenswrapper[4558]: I0120 17:29:05.298229 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/57872fb7-1988-4da4-b8b9-eee9c5e8c827-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"57872fb7-1988-4da4-b8b9-eee9c5e8c827\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:29:05 crc kubenswrapper[4558]: I0120 17:29:05.298296 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/581cf384-3caa-4074-a6aa-526e8a65e9b2-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"581cf384-3caa-4074-a6aa-526e8a65e9b2\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:29:05 crc kubenswrapper[4558]: I0120 17:29:05.298318 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/581cf384-3caa-4074-a6aa-526e8a65e9b2-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"581cf384-3caa-4074-a6aa-526e8a65e9b2\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:29:05 crc kubenswrapper[4558]: I0120 17:29:05.399579 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"openstack-cell1-galera-0\" (UID: \"57872fb7-1988-4da4-b8b9-eee9c5e8c827\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:29:05 crc kubenswrapper[4558]: I0120 17:29:05.399627 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"openstack-cell1-galera-0\" (UID: \"57872fb7-1988-4da4-b8b9-eee9c5e8c827\") device mount path \"/mnt/openstack/pv06\"" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:29:05 crc kubenswrapper[4558]: I0120 17:29:05.399655 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/57872fb7-1988-4da4-b8b9-eee9c5e8c827-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"57872fb7-1988-4da4-b8b9-eee9c5e8c827\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:29:05 crc kubenswrapper[4558]: I0120 17:29:05.399690 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/57872fb7-1988-4da4-b8b9-eee9c5e8c827-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"57872fb7-1988-4da4-b8b9-eee9c5e8c827\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:29:05 crc kubenswrapper[4558]: I0120 17:29:05.399776 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/581cf384-3caa-4074-a6aa-526e8a65e9b2-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"581cf384-3caa-4074-a6aa-526e8a65e9b2\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:29:05 crc kubenswrapper[4558]: I0120 17:29:05.399800 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/581cf384-3caa-4074-a6aa-526e8a65e9b2-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"581cf384-3caa-4074-a6aa-526e8a65e9b2\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:29:05 crc kubenswrapper[4558]: I0120 17:29:05.399839 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/57872fb7-1988-4da4-b8b9-eee9c5e8c827-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"57872fb7-1988-4da4-b8b9-eee9c5e8c827\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:29:05 crc kubenswrapper[4558]: I0120 17:29:05.399874 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/57872fb7-1988-4da4-b8b9-eee9c5e8c827-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"57872fb7-1988-4da4-b8b9-eee9c5e8c827\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:29:05 crc kubenswrapper[4558]: I0120 17:29:05.399892 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/57872fb7-1988-4da4-b8b9-eee9c5e8c827-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"57872fb7-1988-4da4-b8b9-eee9c5e8c827\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:29:05 crc kubenswrapper[4558]: I0120 17:29:05.399913 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage13-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage13-crc\") pod \"ovsdbserver-nb-0\" (UID: \"581cf384-3caa-4074-a6aa-526e8a65e9b2\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:29:05 crc kubenswrapper[4558]: I0120 17:29:05.399931 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/581cf384-3caa-4074-a6aa-526e8a65e9b2-config\") pod \"ovsdbserver-nb-0\" (UID: \"581cf384-3caa-4074-a6aa-526e8a65e9b2\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:29:05 crc kubenswrapper[4558]: I0120 17:29:05.399947 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/581cf384-3caa-4074-a6aa-526e8a65e9b2-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"581cf384-3caa-4074-a6aa-526e8a65e9b2\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:29:05 crc kubenswrapper[4558]: I0120 17:29:05.399970 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/57872fb7-1988-4da4-b8b9-eee9c5e8c827-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"57872fb7-1988-4da4-b8b9-eee9c5e8c827\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:29:05 crc kubenswrapper[4558]: I0120 17:29:05.399998 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wtxm4\" (UniqueName: \"kubernetes.io/projected/57872fb7-1988-4da4-b8b9-eee9c5e8c827-kube-api-access-wtxm4\") pod \"openstack-cell1-galera-0\" (UID: \"57872fb7-1988-4da4-b8b9-eee9c5e8c827\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:29:05 crc kubenswrapper[4558]: I0120 17:29:05.400016 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bwt2l\" (UniqueName: \"kubernetes.io/projected/581cf384-3caa-4074-a6aa-526e8a65e9b2-kube-api-access-bwt2l\") pod \"ovsdbserver-nb-0\" (UID: \"581cf384-3caa-4074-a6aa-526e8a65e9b2\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:29:05 crc kubenswrapper[4558]: I0120 17:29:05.400481 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage13-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage13-crc\") pod \"ovsdbserver-nb-0\" (UID: \"581cf384-3caa-4074-a6aa-526e8a65e9b2\") device mount path \"/mnt/openstack/pv13\"" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:29:05 crc kubenswrapper[4558]: I0120 17:29:05.401894 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/57872fb7-1988-4da4-b8b9-eee9c5e8c827-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"57872fb7-1988-4da4-b8b9-eee9c5e8c827\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:29:05 crc kubenswrapper[4558]: I0120 17:29:05.402477 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/581cf384-3caa-4074-a6aa-526e8a65e9b2-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"581cf384-3caa-4074-a6aa-526e8a65e9b2\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:29:05 crc kubenswrapper[4558]: I0120 17:29:05.402687 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/57872fb7-1988-4da4-b8b9-eee9c5e8c827-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"57872fb7-1988-4da4-b8b9-eee9c5e8c827\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:29:05 crc kubenswrapper[4558]: I0120 17:29:05.402952 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/57872fb7-1988-4da4-b8b9-eee9c5e8c827-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"57872fb7-1988-4da4-b8b9-eee9c5e8c827\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:29:05 crc kubenswrapper[4558]: I0120 17:29:05.403123 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/57872fb7-1988-4da4-b8b9-eee9c5e8c827-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"57872fb7-1988-4da4-b8b9-eee9c5e8c827\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:29:05 crc kubenswrapper[4558]: I0120 17:29:05.403655 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/581cf384-3caa-4074-a6aa-526e8a65e9b2-config\") pod \"ovsdbserver-nb-0\" (UID: \"581cf384-3caa-4074-a6aa-526e8a65e9b2\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:29:05 crc kubenswrapper[4558]: I0120 17:29:05.403972 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/57872fb7-1988-4da4-b8b9-eee9c5e8c827-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"57872fb7-1988-4da4-b8b9-eee9c5e8c827\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:29:05 crc kubenswrapper[4558]: I0120 17:29:05.404182 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/581cf384-3caa-4074-a6aa-526e8a65e9b2-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"581cf384-3caa-4074-a6aa-526e8a65e9b2\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:29:05 crc kubenswrapper[4558]: I0120 17:29:05.405073 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/57872fb7-1988-4da4-b8b9-eee9c5e8c827-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"57872fb7-1988-4da4-b8b9-eee9c5e8c827\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:29:05 crc kubenswrapper[4558]: I0120 17:29:05.408685 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/581cf384-3caa-4074-a6aa-526e8a65e9b2-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"581cf384-3caa-4074-a6aa-526e8a65e9b2\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:29:05 crc kubenswrapper[4558]: I0120 17:29:05.415041 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bwt2l\" (UniqueName: \"kubernetes.io/projected/581cf384-3caa-4074-a6aa-526e8a65e9b2-kube-api-access-bwt2l\") pod \"ovsdbserver-nb-0\" (UID: \"581cf384-3caa-4074-a6aa-526e8a65e9b2\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:29:05 crc kubenswrapper[4558]: I0120 17:29:05.417630 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wtxm4\" (UniqueName: \"kubernetes.io/projected/57872fb7-1988-4da4-b8b9-eee9c5e8c827-kube-api-access-wtxm4\") pod \"openstack-cell1-galera-0\" (UID: \"57872fb7-1988-4da4-b8b9-eee9c5e8c827\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:29:05 crc kubenswrapper[4558]: I0120 17:29:05.418349 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"openstack-cell1-galera-0\" (UID: \"57872fb7-1988-4da4-b8b9-eee9c5e8c827\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:29:05 crc kubenswrapper[4558]: I0120 17:29:05.418416 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage13-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage13-crc\") pod \"ovsdbserver-nb-0\" (UID: \"581cf384-3caa-4074-a6aa-526e8a65e9b2\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:29:05 crc kubenswrapper[4558]: I0120 17:29:05.440218 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:29:05 crc kubenswrapper[4558]: I0120 17:29:05.489453 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ovsdbserver-sb-0"] Jan 20 17:29:05 crc kubenswrapper[4558]: I0120 17:29:05.490846 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:29:05 crc kubenswrapper[4558]: I0120 17:29:05.494584 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"ovndbcluster-sb-config" Jan 20 17:29:05 crc kubenswrapper[4558]: I0120 17:29:05.494962 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ovncluster-ovndbcluster-sb-dockercfg-g95pp" Jan 20 17:29:05 crc kubenswrapper[4558]: I0120 17:29:05.495105 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"ovndbcluster-sb-scripts" Jan 20 17:29:05 crc kubenswrapper[4558]: I0120 17:29:05.513246 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-sb-0"] Jan 20 17:29:05 crc kubenswrapper[4558]: I0120 17:29:05.522459 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:29:05 crc kubenswrapper[4558]: I0120 17:29:05.582007 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 17:29:05 crc kubenswrapper[4558]: W0120 17:29:05.587609 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7505236e_4372_4f2b_9b7c_172a1348f818.slice/crio-d17e9607c701acd713b927e4018f02455e070b56378384c2b40bb6ab6b925287 WatchSource:0}: Error finding container d17e9607c701acd713b927e4018f02455e070b56378384c2b40bb6ab6b925287: Status 404 returned error can't find the container with id d17e9607c701acd713b927e4018f02455e070b56378384c2b40bb6ab6b925287 Jan 20 17:29:05 crc kubenswrapper[4558]: I0120 17:29:05.606597 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gt5pj\" (UniqueName: \"kubernetes.io/projected/31c9af7b-b151-4eaa-b090-efecddd08c46-kube-api-access-gt5pj\") pod \"ovsdbserver-sb-0\" (UID: \"31c9af7b-b151-4eaa-b090-efecddd08c46\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:29:05 crc kubenswrapper[4558]: I0120 17:29:05.606710 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/31c9af7b-b151-4eaa-b090-efecddd08c46-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"31c9af7b-b151-4eaa-b090-efecddd08c46\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:29:05 crc kubenswrapper[4558]: I0120 17:29:05.606739 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/31c9af7b-b151-4eaa-b090-efecddd08c46-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"31c9af7b-b151-4eaa-b090-efecddd08c46\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:29:05 crc kubenswrapper[4558]: I0120 17:29:05.606761 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"ovsdbserver-sb-0\" (UID: \"31c9af7b-b151-4eaa-b090-efecddd08c46\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:29:05 crc kubenswrapper[4558]: I0120 17:29:05.606795 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/31c9af7b-b151-4eaa-b090-efecddd08c46-config\") pod \"ovsdbserver-sb-0\" (UID: \"31c9af7b-b151-4eaa-b090-efecddd08c46\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:29:05 crc kubenswrapper[4558]: I0120 17:29:05.606841 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/31c9af7b-b151-4eaa-b090-efecddd08c46-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"31c9af7b-b151-4eaa-b090-efecddd08c46\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:29:05 crc kubenswrapper[4558]: I0120 17:29:05.708523 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/31c9af7b-b151-4eaa-b090-efecddd08c46-config\") pod \"ovsdbserver-sb-0\" (UID: \"31c9af7b-b151-4eaa-b090-efecddd08c46\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:29:05 crc kubenswrapper[4558]: I0120 17:29:05.708630 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/31c9af7b-b151-4eaa-b090-efecddd08c46-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"31c9af7b-b151-4eaa-b090-efecddd08c46\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:29:05 crc kubenswrapper[4558]: I0120 17:29:05.708710 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gt5pj\" (UniqueName: \"kubernetes.io/projected/31c9af7b-b151-4eaa-b090-efecddd08c46-kube-api-access-gt5pj\") pod \"ovsdbserver-sb-0\" (UID: \"31c9af7b-b151-4eaa-b090-efecddd08c46\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:29:05 crc kubenswrapper[4558]: I0120 17:29:05.708855 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/31c9af7b-b151-4eaa-b090-efecddd08c46-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"31c9af7b-b151-4eaa-b090-efecddd08c46\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:29:05 crc kubenswrapper[4558]: I0120 17:29:05.708890 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/31c9af7b-b151-4eaa-b090-efecddd08c46-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"31c9af7b-b151-4eaa-b090-efecddd08c46\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:29:05 crc kubenswrapper[4558]: I0120 17:29:05.708912 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"ovsdbserver-sb-0\" (UID: \"31c9af7b-b151-4eaa-b090-efecddd08c46\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:29:05 crc kubenswrapper[4558]: I0120 17:29:05.710024 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/31c9af7b-b151-4eaa-b090-efecddd08c46-config\") pod \"ovsdbserver-sb-0\" (UID: \"31c9af7b-b151-4eaa-b090-efecddd08c46\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:29:05 crc kubenswrapper[4558]: I0120 17:29:05.710239 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/31c9af7b-b151-4eaa-b090-efecddd08c46-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"31c9af7b-b151-4eaa-b090-efecddd08c46\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:29:05 crc kubenswrapper[4558]: I0120 17:29:05.711187 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"ovsdbserver-sb-0\" (UID: \"31c9af7b-b151-4eaa-b090-efecddd08c46\") device mount path \"/mnt/openstack/pv04\"" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:29:05 crc kubenswrapper[4558]: I0120 17:29:05.711259 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/31c9af7b-b151-4eaa-b090-efecddd08c46-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"31c9af7b-b151-4eaa-b090-efecddd08c46\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:29:05 crc kubenswrapper[4558]: I0120 17:29:05.716349 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/31c9af7b-b151-4eaa-b090-efecddd08c46-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"31c9af7b-b151-4eaa-b090-efecddd08c46\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:29:05 crc kubenswrapper[4558]: I0120 17:29:05.726842 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gt5pj\" (UniqueName: \"kubernetes.io/projected/31c9af7b-b151-4eaa-b090-efecddd08c46-kube-api-access-gt5pj\") pod \"ovsdbserver-sb-0\" (UID: \"31c9af7b-b151-4eaa-b090-efecddd08c46\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:29:05 crc kubenswrapper[4558]: I0120 17:29:05.737907 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"ovsdbserver-sb-0\" (UID: \"31c9af7b-b151-4eaa-b090-efecddd08c46\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:29:05 crc kubenswrapper[4558]: I0120 17:29:05.808059 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:29:05 crc kubenswrapper[4558]: I0120 17:29:05.862065 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/memcached-0" event={"ID":"39e9c81e-2923-472a-b892-5404d32842bb","Type":"ContainerStarted","Data":"2125c4c8f812531a8911dde13a1a8e1cc94d924088d9043e03d8a2554aea8fba"} Jan 20 17:29:05 crc kubenswrapper[4558]: I0120 17:29:05.862421 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/memcached-0" Jan 20 17:29:05 crc kubenswrapper[4558]: I0120 17:29:05.864244 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/kube-state-metrics-0" event={"ID":"7505236e-4372-4f2b-9b7c-172a1348f818","Type":"ContainerStarted","Data":"d17e9607c701acd713b927e4018f02455e070b56378384c2b40bb6ab6b925287"} Jan 20 17:29:05 crc kubenswrapper[4558]: I0120 17:29:05.879124 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/memcached-0" podStartSLOduration=2.879103942 podStartE2EDuration="2.879103942s" podCreationTimestamp="2026-01-20 17:29:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:29:05.875160367 +0000 UTC m=+2839.635498335" watchObservedRunningTime="2026-01-20 17:29:05.879103942 +0000 UTC m=+2839.639441909" Jan 20 17:29:05 crc kubenswrapper[4558]: I0120 17:29:05.897718 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-mn9jr" Jan 20 17:29:05 crc kubenswrapper[4558]: I0120 17:29:05.897752 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-mn9jr" Jan 20 17:29:05 crc kubenswrapper[4558]: I0120 17:29:05.913592 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/openstack-cell1-galera-0"] Jan 20 17:29:05 crc kubenswrapper[4558]: I0120 17:29:05.985259 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-nb-0"] Jan 20 17:29:06 crc kubenswrapper[4558]: I0120 17:29:06.250429 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-sb-0"] Jan 20 17:29:06 crc kubenswrapper[4558]: I0120 17:29:06.873068 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-cell1-galera-0" event={"ID":"57872fb7-1988-4da4-b8b9-eee9c5e8c827","Type":"ContainerStarted","Data":"bf2b91504ae946bbd07d60c83c5fc451a51026e889c1d6b6cb64618f1bb1865a"} Jan 20 17:29:06 crc kubenswrapper[4558]: I0120 17:29:06.873420 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-cell1-galera-0" event={"ID":"57872fb7-1988-4da4-b8b9-eee9c5e8c827","Type":"ContainerStarted","Data":"18c8cf1d30438d409b1f52f4aebc69b25a65d800d6cdaa24d524a6e89415459d"} Jan 20 17:29:06 crc kubenswrapper[4558]: I0120 17:29:06.875193 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-nb-0" event={"ID":"581cf384-3caa-4074-a6aa-526e8a65e9b2","Type":"ContainerStarted","Data":"ce4c4a8dc51cc8a0747c9a283ff63624944b94308dde57af4f43cc0ee489bfaa"} Jan 20 17:29:06 crc kubenswrapper[4558]: I0120 17:29:06.875245 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-nb-0" event={"ID":"581cf384-3caa-4074-a6aa-526e8a65e9b2","Type":"ContainerStarted","Data":"f2a87a3274b85da1b912ddda9d9c24218552ae2474139c4df992c4937f8ad8bb"} Jan 20 17:29:06 crc kubenswrapper[4558]: I0120 17:29:06.875264 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-nb-0" event={"ID":"581cf384-3caa-4074-a6aa-526e8a65e9b2","Type":"ContainerStarted","Data":"090f42e80e9e937d110de0492f6d7685db4909c82b2ccfaeb8fc3770dbee0d55"} Jan 20 17:29:06 crc kubenswrapper[4558]: I0120 17:29:06.876807 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/kube-state-metrics-0" event={"ID":"7505236e-4372-4f2b-9b7c-172a1348f818","Type":"ContainerStarted","Data":"d7c483fdbe053d7af01b1d39038ffcbb4a12a0b71bb1b88b7d45655675034466"} Jan 20 17:29:06 crc kubenswrapper[4558]: I0120 17:29:06.876925 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:29:06 crc kubenswrapper[4558]: I0120 17:29:06.878820 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-sb-0" event={"ID":"31c9af7b-b151-4eaa-b090-efecddd08c46","Type":"ContainerStarted","Data":"309c8fa54d3864dc5b564b635ee338b83b4c559bc90397337a276c3ed181323e"} Jan 20 17:29:06 crc kubenswrapper[4558]: I0120 17:29:06.878855 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-sb-0" event={"ID":"31c9af7b-b151-4eaa-b090-efecddd08c46","Type":"ContainerStarted","Data":"cddbf5e0f0aa3f8140abd8d8060e08f69d0761eab29715426c1715bdc2833585"} Jan 20 17:29:06 crc kubenswrapper[4558]: I0120 17:29:06.878866 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-sb-0" event={"ID":"31c9af7b-b151-4eaa-b090-efecddd08c46","Type":"ContainerStarted","Data":"7b131344a595bbce78d90b53b0bc828a5fbd683845a9c9535a0ee9a6a24128e7"} Jan 20 17:29:06 crc kubenswrapper[4558]: I0120 17:29:06.909342 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/kube-state-metrics-0" podStartSLOduration=2.5962665940000003 podStartE2EDuration="2.909330147s" podCreationTimestamp="2026-01-20 17:29:04 +0000 UTC" firstStartedPulling="2026-01-20 17:29:05.591288627 +0000 UTC m=+2839.351626594" lastFinishedPulling="2026-01-20 17:29:05.90435218 +0000 UTC m=+2839.664690147" observedRunningTime="2026-01-20 17:29:06.903257811 +0000 UTC m=+2840.663595778" watchObservedRunningTime="2026-01-20 17:29:06.909330147 +0000 UTC m=+2840.669668114" Jan 20 17:29:06 crc kubenswrapper[4558]: I0120 17:29:06.923858 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ovsdbserver-nb-0" podStartSLOduration=2.923845236 podStartE2EDuration="2.923845236s" podCreationTimestamp="2026-01-20 17:29:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:29:06.915669325 +0000 UTC m=+2840.676007292" watchObservedRunningTime="2026-01-20 17:29:06.923845236 +0000 UTC m=+2840.684183203" Jan 20 17:29:06 crc kubenswrapper[4558]: I0120 17:29:06.933565 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ovsdbserver-sb-0" podStartSLOduration=2.9335541000000003 podStartE2EDuration="2.9335541s" podCreationTimestamp="2026-01-20 17:29:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:29:06.930237223 +0000 UTC m=+2840.690575191" watchObservedRunningTime="2026-01-20 17:29:06.9335541 +0000 UTC m=+2840.693892067" Jan 20 17:29:06 crc kubenswrapper[4558]: I0120 17:29:06.945302 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-mn9jr" podUID="86b88605-5219-482f-803e-edae5ea5d319" containerName="registry-server" probeResult="failure" output=< Jan 20 17:29:06 crc kubenswrapper[4558]: timeout: failed to connect service ":50051" within 1s Jan 20 17:29:06 crc kubenswrapper[4558]: > Jan 20 17:29:08 crc kubenswrapper[4558]: I0120 17:29:08.522698 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:29:08 crc kubenswrapper[4558]: I0120 17:29:08.808509 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:29:08 crc kubenswrapper[4558]: I0120 17:29:08.936863 4558 generic.go:334] "Generic (PLEG): container finished" podID="5e0a76a1-125c-47f2-a903-b5c8ce8e5277" containerID="74a82b2cb87f85950d65db39375572be31ea2b3a88bbbb153f941cdc471461ac" exitCode=0 Jan 20 17:29:08 crc kubenswrapper[4558]: I0120 17:29:08.936956 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-galera-0" event={"ID":"5e0a76a1-125c-47f2-a903-b5c8ce8e5277","Type":"ContainerDied","Data":"74a82b2cb87f85950d65db39375572be31ea2b3a88bbbb153f941cdc471461ac"} Jan 20 17:29:09 crc kubenswrapper[4558]: I0120 17:29:09.283388 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/memcached-0" Jan 20 17:29:09 crc kubenswrapper[4558]: I0120 17:29:09.951574 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-galera-0" event={"ID":"5e0a76a1-125c-47f2-a903-b5c8ce8e5277","Type":"ContainerStarted","Data":"aaa3870751af8f0c4b7878382a9021d1b0d86ba2fbbf69032fcd3bacd83d2d92"} Jan 20 17:29:09 crc kubenswrapper[4558]: I0120 17:29:09.975922 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/openstack-galera-0" podStartSLOduration=7.975896728 podStartE2EDuration="7.975896728s" podCreationTimestamp="2026-01-20 17:29:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:29:09.970243599 +0000 UTC m=+2843.730581566" watchObservedRunningTime="2026-01-20 17:29:09.975896728 +0000 UTC m=+2843.736234695" Jan 20 17:29:10 crc kubenswrapper[4558]: I0120 17:29:10.522604 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:29:10 crc kubenswrapper[4558]: I0120 17:29:10.598711 4558 scope.go:117] "RemoveContainer" containerID="888ebccf523afdd2da82f932d8088d8e28c16e7be4d09466d29dbfe4ed5235aa" Jan 20 17:29:10 crc kubenswrapper[4558]: I0120 17:29:10.621595 4558 scope.go:117] "RemoveContainer" containerID="5e28de7a7ee3d35b6b18ea4eac47cc3ed5820910b779402f078366abc8119fcd" Jan 20 17:29:10 crc kubenswrapper[4558]: I0120 17:29:10.644360 4558 scope.go:117] "RemoveContainer" containerID="96a6b2b57e1f805d5d79906202bc8db8cfd9f39f17f625dbff6ed62ab2634b68" Jan 20 17:29:10 crc kubenswrapper[4558]: I0120 17:29:10.668421 4558 scope.go:117] "RemoveContainer" containerID="f51941633ec981d615e3f81dbf2603d9890a9ed4aa990ee310afe05d0cc363d5" Jan 20 17:29:10 crc kubenswrapper[4558]: I0120 17:29:10.686421 4558 scope.go:117] "RemoveContainer" containerID="19d2eb91531f091cee0633f6be93c0f1072901e75d5c21b701c2426abfc50181" Jan 20 17:29:10 crc kubenswrapper[4558]: I0120 17:29:10.702737 4558 scope.go:117] "RemoveContainer" containerID="3b5dee10562c22a67048b6fc6c3b8f6768b24f57a85fa0e1abeee9c3745fe3ec" Jan 20 17:29:10 crc kubenswrapper[4558]: I0120 17:29:10.723429 4558 scope.go:117] "RemoveContainer" containerID="51c4471e615f0278850f1aad746f15c73d4493f0a751b60dad03afce53c2f92a" Jan 20 17:29:10 crc kubenswrapper[4558]: I0120 17:29:10.770642 4558 scope.go:117] "RemoveContainer" containerID="4ea077acc81e2f99881f558d59d1f75afc856a3ed13d145c17a81d4ad3d3ce4c" Jan 20 17:29:10 crc kubenswrapper[4558]: I0120 17:29:10.791063 4558 scope.go:117] "RemoveContainer" containerID="2ce6a52b519e78433be47bb493bd9e81226e10f0796bf02066e39b30593b9275" Jan 20 17:29:10 crc kubenswrapper[4558]: I0120 17:29:10.806212 4558 scope.go:117] "RemoveContainer" containerID="d4528f4868b3df3d0078aaff6c6ad31ee69b350696bbce4aef20138bc1d0e447" Jan 20 17:29:10 crc kubenswrapper[4558]: I0120 17:29:10.808319 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:29:10 crc kubenswrapper[4558]: I0120 17:29:10.821646 4558 scope.go:117] "RemoveContainer" containerID="99f840cfec6f5fd24affe278783ce784e3dc668ee377d2172d230ab73c6e60dc" Jan 20 17:29:10 crc kubenswrapper[4558]: I0120 17:29:10.839845 4558 scope.go:117] "RemoveContainer" containerID="3ab396d4ddc6d93934c5b2cdd59f60c8abc4faa48b09f179bfba616f9732c273" Jan 20 17:29:10 crc kubenswrapper[4558]: I0120 17:29:10.967067 4558 generic.go:334] "Generic (PLEG): container finished" podID="57872fb7-1988-4da4-b8b9-eee9c5e8c827" containerID="bf2b91504ae946bbd07d60c83c5fc451a51026e889c1d6b6cb64618f1bb1865a" exitCode=0 Jan 20 17:29:10 crc kubenswrapper[4558]: I0120 17:29:10.967143 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-cell1-galera-0" event={"ID":"57872fb7-1988-4da4-b8b9-eee9c5e8c827","Type":"ContainerDied","Data":"bf2b91504ae946bbd07d60c83c5fc451a51026e889c1d6b6cb64618f1bb1865a"} Jan 20 17:29:11 crc kubenswrapper[4558]: I0120 17:29:11.555895 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:29:11 crc kubenswrapper[4558]: I0120 17:29:11.590427 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:29:11 crc kubenswrapper[4558]: I0120 17:29:11.840454 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:29:11 crc kubenswrapper[4558]: I0120 17:29:11.876018 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:29:11 crc kubenswrapper[4558]: I0120 17:29:11.984329 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-cell1-galera-0" event={"ID":"57872fb7-1988-4da4-b8b9-eee9c5e8c827","Type":"ContainerStarted","Data":"d8e9ed0ae69bd19b53807e767c4bcd9fcd074682e879fd040455a50aac23dd93"} Jan 20 17:29:12 crc kubenswrapper[4558]: I0120 17:29:12.006631 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ovn-northd-0"] Jan 20 17:29:12 crc kubenswrapper[4558]: I0120 17:29:12.007755 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:29:12 crc kubenswrapper[4558]: I0120 17:29:12.009462 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"ovnnorthd-config" Jan 20 17:29:12 crc kubenswrapper[4558]: I0120 17:29:12.009888 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"ovnnorthd-scripts" Jan 20 17:29:12 crc kubenswrapper[4558]: I0120 17:29:12.010730 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ovnnorthd-ovnnorthd-dockercfg-dj5lx" Jan 20 17:29:12 crc kubenswrapper[4558]: I0120 17:29:12.020270 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovn-northd-0"] Jan 20 17:29:12 crc kubenswrapper[4558]: I0120 17:29:12.031498 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/openstack-cell1-galera-0" podStartSLOduration=8.031476096 podStartE2EDuration="8.031476096s" podCreationTimestamp="2026-01-20 17:29:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:29:12.028479663 +0000 UTC m=+2845.788817630" watchObservedRunningTime="2026-01-20 17:29:12.031476096 +0000 UTC m=+2845.791814063" Jan 20 17:29:12 crc kubenswrapper[4558]: I0120 17:29:12.119247 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6ecac33c-30fa-4996-b271-318148e62416-config\") pod \"ovn-northd-0\" (UID: \"6ecac33c-30fa-4996-b271-318148e62416\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:29:12 crc kubenswrapper[4558]: I0120 17:29:12.119356 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ecac33c-30fa-4996-b271-318148e62416-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"6ecac33c-30fa-4996-b271-318148e62416\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:29:12 crc kubenswrapper[4558]: I0120 17:29:12.119384 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tphf6\" (UniqueName: \"kubernetes.io/projected/6ecac33c-30fa-4996-b271-318148e62416-kube-api-access-tphf6\") pod \"ovn-northd-0\" (UID: \"6ecac33c-30fa-4996-b271-318148e62416\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:29:12 crc kubenswrapper[4558]: I0120 17:29:12.119498 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6ecac33c-30fa-4996-b271-318148e62416-scripts\") pod \"ovn-northd-0\" (UID: \"6ecac33c-30fa-4996-b271-318148e62416\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:29:12 crc kubenswrapper[4558]: I0120 17:29:12.119532 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/6ecac33c-30fa-4996-b271-318148e62416-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"6ecac33c-30fa-4996-b271-318148e62416\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:29:12 crc kubenswrapper[4558]: I0120 17:29:12.221091 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6ecac33c-30fa-4996-b271-318148e62416-config\") pod \"ovn-northd-0\" (UID: \"6ecac33c-30fa-4996-b271-318148e62416\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:29:12 crc kubenswrapper[4558]: I0120 17:29:12.221196 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ecac33c-30fa-4996-b271-318148e62416-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"6ecac33c-30fa-4996-b271-318148e62416\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:29:12 crc kubenswrapper[4558]: I0120 17:29:12.221236 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tphf6\" (UniqueName: \"kubernetes.io/projected/6ecac33c-30fa-4996-b271-318148e62416-kube-api-access-tphf6\") pod \"ovn-northd-0\" (UID: \"6ecac33c-30fa-4996-b271-318148e62416\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:29:12 crc kubenswrapper[4558]: I0120 17:29:12.221298 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6ecac33c-30fa-4996-b271-318148e62416-scripts\") pod \"ovn-northd-0\" (UID: \"6ecac33c-30fa-4996-b271-318148e62416\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:29:12 crc kubenswrapper[4558]: I0120 17:29:12.221331 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/6ecac33c-30fa-4996-b271-318148e62416-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"6ecac33c-30fa-4996-b271-318148e62416\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:29:12 crc kubenswrapper[4558]: I0120 17:29:12.221861 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/6ecac33c-30fa-4996-b271-318148e62416-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"6ecac33c-30fa-4996-b271-318148e62416\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:29:12 crc kubenswrapper[4558]: I0120 17:29:12.222207 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6ecac33c-30fa-4996-b271-318148e62416-scripts\") pod \"ovn-northd-0\" (UID: \"6ecac33c-30fa-4996-b271-318148e62416\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:29:12 crc kubenswrapper[4558]: I0120 17:29:12.222242 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6ecac33c-30fa-4996-b271-318148e62416-config\") pod \"ovn-northd-0\" (UID: \"6ecac33c-30fa-4996-b271-318148e62416\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:29:12 crc kubenswrapper[4558]: I0120 17:29:12.227942 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ecac33c-30fa-4996-b271-318148e62416-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"6ecac33c-30fa-4996-b271-318148e62416\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:29:12 crc kubenswrapper[4558]: I0120 17:29:12.237271 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tphf6\" (UniqueName: \"kubernetes.io/projected/6ecac33c-30fa-4996-b271-318148e62416-kube-api-access-tphf6\") pod \"ovn-northd-0\" (UID: \"6ecac33c-30fa-4996-b271-318148e62416\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:29:12 crc kubenswrapper[4558]: I0120 17:29:12.323034 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:29:12 crc kubenswrapper[4558]: I0120 17:29:12.712032 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovn-northd-0"] Jan 20 17:29:12 crc kubenswrapper[4558]: W0120 17:29:12.713870 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6ecac33c_30fa_4996_b271_318148e62416.slice/crio-b71ca26e6ba3729271cc4ed50b7c54870c0894291866c0187adee148967bb4e1 WatchSource:0}: Error finding container b71ca26e6ba3729271cc4ed50b7c54870c0894291866c0187adee148967bb4e1: Status 404 returned error can't find the container with id b71ca26e6ba3729271cc4ed50b7c54870c0894291866c0187adee148967bb4e1 Jan 20 17:29:12 crc kubenswrapper[4558]: I0120 17:29:12.996455 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-northd-0" event={"ID":"6ecac33c-30fa-4996-b271-318148e62416","Type":"ContainerStarted","Data":"a333ee4a53687d89af384ce1c1e7dd6ab814cf37eae12fca495be4bf694c8f99"} Jan 20 17:29:12 crc kubenswrapper[4558]: I0120 17:29:12.996811 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-northd-0" event={"ID":"6ecac33c-30fa-4996-b271-318148e62416","Type":"ContainerStarted","Data":"b71ca26e6ba3729271cc4ed50b7c54870c0894291866c0187adee148967bb4e1"} Jan 20 17:29:13 crc kubenswrapper[4558]: I0120 17:29:13.763131 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:29:13 crc kubenswrapper[4558]: I0120 17:29:13.763212 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:29:13 crc kubenswrapper[4558]: I0120 17:29:13.821548 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:29:14 crc kubenswrapper[4558]: I0120 17:29:14.007391 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-northd-0" event={"ID":"6ecac33c-30fa-4996-b271-318148e62416","Type":"ContainerStarted","Data":"ae89028fc849f2ea980139f852b630c0eb1659da3362f8ae021fcba4d043bb4d"} Jan 20 17:29:14 crc kubenswrapper[4558]: I0120 17:29:14.034585 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ovn-northd-0" podStartSLOduration=3.034558045 podStartE2EDuration="3.034558045s" podCreationTimestamp="2026-01-20 17:29:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:29:14.024321088 +0000 UTC m=+2847.784659045" watchObservedRunningTime="2026-01-20 17:29:14.034558045 +0000 UTC m=+2847.794896012" Jan 20 17:29:14 crc kubenswrapper[4558]: I0120 17:29:14.075567 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:29:15 crc kubenswrapper[4558]: I0120 17:29:15.015910 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:29:15 crc kubenswrapper[4558]: I0120 17:29:15.142569 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:29:15 crc kubenswrapper[4558]: I0120 17:29:15.440853 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:29:15 crc kubenswrapper[4558]: I0120 17:29:15.440925 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:29:15 crc kubenswrapper[4558]: I0120 17:29:15.519328 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:29:15 crc kubenswrapper[4558]: I0120 17:29:15.934063 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-mn9jr" Jan 20 17:29:15 crc kubenswrapper[4558]: I0120 17:29:15.975852 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-mn9jr" Jan 20 17:29:16 crc kubenswrapper[4558]: I0120 17:29:16.074083 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/swift-storage-0"] Jan 20 17:29:16 crc kubenswrapper[4558]: I0120 17:29:16.086983 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:29:16 crc kubenswrapper[4558]: I0120 17:29:16.089561 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"swift-swift-dockercfg-sshvs" Jan 20 17:29:16 crc kubenswrapper[4558]: I0120 17:29:16.092300 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"swift-ring-files" Jan 20 17:29:16 crc kubenswrapper[4558]: I0120 17:29:16.092554 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"swift-storage-config-data" Jan 20 17:29:16 crc kubenswrapper[4558]: I0120 17:29:16.092747 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"swift-conf" Jan 20 17:29:16 crc kubenswrapper[4558]: I0120 17:29:16.109583 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/swift-storage-0"] Jan 20 17:29:16 crc kubenswrapper[4558]: I0120 17:29:16.129876 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:29:16 crc kubenswrapper[4558]: I0120 17:29:16.172335 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-mn9jr"] Jan 20 17:29:16 crc kubenswrapper[4558]: I0120 17:29:16.292057 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/8c74a8f5-be12-4ac4-b45c-d459801927ea-cache\") pod \"swift-storage-0\" (UID: \"8c74a8f5-be12-4ac4-b45c-d459801927ea\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:29:16 crc kubenswrapper[4558]: I0120 17:29:16.292180 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/8c74a8f5-be12-4ac4-b45c-d459801927ea-etc-swift\") pod \"swift-storage-0\" (UID: \"8c74a8f5-be12-4ac4-b45c-d459801927ea\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:29:16 crc kubenswrapper[4558]: I0120 17:29:16.292245 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage14-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage14-crc\") pod \"swift-storage-0\" (UID: \"8c74a8f5-be12-4ac4-b45c-d459801927ea\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:29:16 crc kubenswrapper[4558]: I0120 17:29:16.292436 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/8c74a8f5-be12-4ac4-b45c-d459801927ea-lock\") pod \"swift-storage-0\" (UID: \"8c74a8f5-be12-4ac4-b45c-d459801927ea\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:29:16 crc kubenswrapper[4558]: I0120 17:29:16.293155 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x64sj\" (UniqueName: \"kubernetes.io/projected/8c74a8f5-be12-4ac4-b45c-d459801927ea-kube-api-access-x64sj\") pod \"swift-storage-0\" (UID: \"8c74a8f5-be12-4ac4-b45c-d459801927ea\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:29:16 crc kubenswrapper[4558]: I0120 17:29:16.397753 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/8c74a8f5-be12-4ac4-b45c-d459801927ea-lock\") pod \"swift-storage-0\" (UID: \"8c74a8f5-be12-4ac4-b45c-d459801927ea\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:29:16 crc kubenswrapper[4558]: I0120 17:29:16.397858 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x64sj\" (UniqueName: \"kubernetes.io/projected/8c74a8f5-be12-4ac4-b45c-d459801927ea-kube-api-access-x64sj\") pod \"swift-storage-0\" (UID: \"8c74a8f5-be12-4ac4-b45c-d459801927ea\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:29:16 crc kubenswrapper[4558]: I0120 17:29:16.398029 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/8c74a8f5-be12-4ac4-b45c-d459801927ea-cache\") pod \"swift-storage-0\" (UID: \"8c74a8f5-be12-4ac4-b45c-d459801927ea\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:29:16 crc kubenswrapper[4558]: I0120 17:29:16.398091 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/8c74a8f5-be12-4ac4-b45c-d459801927ea-etc-swift\") pod \"swift-storage-0\" (UID: \"8c74a8f5-be12-4ac4-b45c-d459801927ea\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:29:16 crc kubenswrapper[4558]: I0120 17:29:16.398138 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage14-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage14-crc\") pod \"swift-storage-0\" (UID: \"8c74a8f5-be12-4ac4-b45c-d459801927ea\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:29:16 crc kubenswrapper[4558]: E0120 17:29:16.398387 4558 projected.go:288] Couldn't get configMap openstack-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Jan 20 17:29:16 crc kubenswrapper[4558]: E0120 17:29:16.398446 4558 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack-kuttl-tests/swift-storage-0: configmap "swift-ring-files" not found Jan 20 17:29:16 crc kubenswrapper[4558]: E0120 17:29:16.398585 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/8c74a8f5-be12-4ac4-b45c-d459801927ea-etc-swift podName:8c74a8f5-be12-4ac4-b45c-d459801927ea nodeName:}" failed. No retries permitted until 2026-01-20 17:29:16.898535655 +0000 UTC m=+2850.658873621 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/8c74a8f5-be12-4ac4-b45c-d459801927ea-etc-swift") pod "swift-storage-0" (UID: "8c74a8f5-be12-4ac4-b45c-d459801927ea") : configmap "swift-ring-files" not found Jan 20 17:29:16 crc kubenswrapper[4558]: I0120 17:29:16.398570 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/8c74a8f5-be12-4ac4-b45c-d459801927ea-lock\") pod \"swift-storage-0\" (UID: \"8c74a8f5-be12-4ac4-b45c-d459801927ea\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:29:16 crc kubenswrapper[4558]: I0120 17:29:16.399422 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/8c74a8f5-be12-4ac4-b45c-d459801927ea-cache\") pod \"swift-storage-0\" (UID: \"8c74a8f5-be12-4ac4-b45c-d459801927ea\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:29:16 crc kubenswrapper[4558]: I0120 17:29:16.399465 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage14-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage14-crc\") pod \"swift-storage-0\" (UID: \"8c74a8f5-be12-4ac4-b45c-d459801927ea\") device mount path \"/mnt/openstack/pv14\"" pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:29:16 crc kubenswrapper[4558]: I0120 17:29:16.418675 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x64sj\" (UniqueName: \"kubernetes.io/projected/8c74a8f5-be12-4ac4-b45c-d459801927ea-kube-api-access-x64sj\") pod \"swift-storage-0\" (UID: \"8c74a8f5-be12-4ac4-b45c-d459801927ea\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:29:16 crc kubenswrapper[4558]: I0120 17:29:16.420257 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage14-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage14-crc\") pod \"swift-storage-0\" (UID: \"8c74a8f5-be12-4ac4-b45c-d459801927ea\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:29:16 crc kubenswrapper[4558]: I0120 17:29:16.905807 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/8c74a8f5-be12-4ac4-b45c-d459801927ea-etc-swift\") pod \"swift-storage-0\" (UID: \"8c74a8f5-be12-4ac4-b45c-d459801927ea\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:29:16 crc kubenswrapper[4558]: E0120 17:29:16.906043 4558 projected.go:288] Couldn't get configMap openstack-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Jan 20 17:29:16 crc kubenswrapper[4558]: E0120 17:29:16.906362 4558 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack-kuttl-tests/swift-storage-0: configmap "swift-ring-files" not found Jan 20 17:29:16 crc kubenswrapper[4558]: E0120 17:29:16.906438 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/8c74a8f5-be12-4ac4-b45c-d459801927ea-etc-swift podName:8c74a8f5-be12-4ac4-b45c-d459801927ea nodeName:}" failed. No retries permitted until 2026-01-20 17:29:17.906414888 +0000 UTC m=+2851.666752845 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/8c74a8f5-be12-4ac4-b45c-d459801927ea-etc-swift") pod "swift-storage-0" (UID: "8c74a8f5-be12-4ac4-b45c-d459801927ea") : configmap "swift-ring-files" not found Jan 20 17:29:17 crc kubenswrapper[4558]: I0120 17:29:17.032612 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-mn9jr" podUID="86b88605-5219-482f-803e-edae5ea5d319" containerName="registry-server" containerID="cri-o://a2da0cc196cd077af001802b735ebf8ad80ebc52741d9d22cbe7e660fe3bf3ef" gracePeriod=2 Jan 20 17:29:17 crc kubenswrapper[4558]: I0120 17:29:17.456437 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mn9jr" Jan 20 17:29:17 crc kubenswrapper[4558]: I0120 17:29:17.530233 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rvkm5\" (UniqueName: \"kubernetes.io/projected/86b88605-5219-482f-803e-edae5ea5d319-kube-api-access-rvkm5\") pod \"86b88605-5219-482f-803e-edae5ea5d319\" (UID: \"86b88605-5219-482f-803e-edae5ea5d319\") " Jan 20 17:29:17 crc kubenswrapper[4558]: I0120 17:29:17.530295 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/86b88605-5219-482f-803e-edae5ea5d319-utilities\") pod \"86b88605-5219-482f-803e-edae5ea5d319\" (UID: \"86b88605-5219-482f-803e-edae5ea5d319\") " Jan 20 17:29:17 crc kubenswrapper[4558]: I0120 17:29:17.530449 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/86b88605-5219-482f-803e-edae5ea5d319-catalog-content\") pod \"86b88605-5219-482f-803e-edae5ea5d319\" (UID: \"86b88605-5219-482f-803e-edae5ea5d319\") " Jan 20 17:29:17 crc kubenswrapper[4558]: I0120 17:29:17.531578 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/86b88605-5219-482f-803e-edae5ea5d319-utilities" (OuterVolumeSpecName: "utilities") pod "86b88605-5219-482f-803e-edae5ea5d319" (UID: "86b88605-5219-482f-803e-edae5ea5d319"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:29:17 crc kubenswrapper[4558]: I0120 17:29:17.538459 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/86b88605-5219-482f-803e-edae5ea5d319-kube-api-access-rvkm5" (OuterVolumeSpecName: "kube-api-access-rvkm5") pod "86b88605-5219-482f-803e-edae5ea5d319" (UID: "86b88605-5219-482f-803e-edae5ea5d319"). InnerVolumeSpecName "kube-api-access-rvkm5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:29:17 crc kubenswrapper[4558]: I0120 17:29:17.634855 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rvkm5\" (UniqueName: \"kubernetes.io/projected/86b88605-5219-482f-803e-edae5ea5d319-kube-api-access-rvkm5\") on node \"crc\" DevicePath \"\"" Jan 20 17:29:17 crc kubenswrapper[4558]: I0120 17:29:17.634898 4558 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/86b88605-5219-482f-803e-edae5ea5d319-utilities\") on node \"crc\" DevicePath \"\"" Jan 20 17:29:17 crc kubenswrapper[4558]: I0120 17:29:17.685368 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/86b88605-5219-482f-803e-edae5ea5d319-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "86b88605-5219-482f-803e-edae5ea5d319" (UID: "86b88605-5219-482f-803e-edae5ea5d319"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:29:17 crc kubenswrapper[4558]: I0120 17:29:17.736364 4558 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/86b88605-5219-482f-803e-edae5ea5d319-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 20 17:29:17 crc kubenswrapper[4558]: I0120 17:29:17.940501 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/8c74a8f5-be12-4ac4-b45c-d459801927ea-etc-swift\") pod \"swift-storage-0\" (UID: \"8c74a8f5-be12-4ac4-b45c-d459801927ea\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:29:17 crc kubenswrapper[4558]: E0120 17:29:17.940747 4558 projected.go:288] Couldn't get configMap openstack-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Jan 20 17:29:17 crc kubenswrapper[4558]: E0120 17:29:17.940796 4558 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack-kuttl-tests/swift-storage-0: configmap "swift-ring-files" not found Jan 20 17:29:17 crc kubenswrapper[4558]: E0120 17:29:17.940860 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/8c74a8f5-be12-4ac4-b45c-d459801927ea-etc-swift podName:8c74a8f5-be12-4ac4-b45c-d459801927ea nodeName:}" failed. No retries permitted until 2026-01-20 17:29:19.940839414 +0000 UTC m=+2853.701177381 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/8c74a8f5-be12-4ac4-b45c-d459801927ea-etc-swift") pod "swift-storage-0" (UID: "8c74a8f5-be12-4ac4-b45c-d459801927ea") : configmap "swift-ring-files" not found Jan 20 17:29:18 crc kubenswrapper[4558]: I0120 17:29:18.055855 4558 generic.go:334] "Generic (PLEG): container finished" podID="86b88605-5219-482f-803e-edae5ea5d319" containerID="a2da0cc196cd077af001802b735ebf8ad80ebc52741d9d22cbe7e660fe3bf3ef" exitCode=0 Jan 20 17:29:18 crc kubenswrapper[4558]: I0120 17:29:18.055903 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mn9jr" event={"ID":"86b88605-5219-482f-803e-edae5ea5d319","Type":"ContainerDied","Data":"a2da0cc196cd077af001802b735ebf8ad80ebc52741d9d22cbe7e660fe3bf3ef"} Jan 20 17:29:18 crc kubenswrapper[4558]: I0120 17:29:18.055940 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mn9jr" event={"ID":"86b88605-5219-482f-803e-edae5ea5d319","Type":"ContainerDied","Data":"e054cf99c703307392344cee65707bf2149a5df79632e3cb8462d22b28db6d41"} Jan 20 17:29:18 crc kubenswrapper[4558]: I0120 17:29:18.055960 4558 scope.go:117] "RemoveContainer" containerID="a2da0cc196cd077af001802b735ebf8ad80ebc52741d9d22cbe7e660fe3bf3ef" Jan 20 17:29:18 crc kubenswrapper[4558]: I0120 17:29:18.056119 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mn9jr" Jan 20 17:29:18 crc kubenswrapper[4558]: I0120 17:29:18.086229 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-mn9jr"] Jan 20 17:29:18 crc kubenswrapper[4558]: I0120 17:29:18.086866 4558 scope.go:117] "RemoveContainer" containerID="9b5a8a676324fb1e8f2b5d27898efb15271fce6ab775bd87dfff07b977d04682" Jan 20 17:29:18 crc kubenswrapper[4558]: I0120 17:29:18.090544 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-mn9jr"] Jan 20 17:29:18 crc kubenswrapper[4558]: I0120 17:29:18.113253 4558 scope.go:117] "RemoveContainer" containerID="94e8d5e6a044a2bc19b0c7a23c0dadff21b1814df4e2d558c68b7f6c9c9621ac" Jan 20 17:29:18 crc kubenswrapper[4558]: I0120 17:29:18.130035 4558 scope.go:117] "RemoveContainer" containerID="a2da0cc196cd077af001802b735ebf8ad80ebc52741d9d22cbe7e660fe3bf3ef" Jan 20 17:29:18 crc kubenswrapper[4558]: E0120 17:29:18.130410 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a2da0cc196cd077af001802b735ebf8ad80ebc52741d9d22cbe7e660fe3bf3ef\": container with ID starting with a2da0cc196cd077af001802b735ebf8ad80ebc52741d9d22cbe7e660fe3bf3ef not found: ID does not exist" containerID="a2da0cc196cd077af001802b735ebf8ad80ebc52741d9d22cbe7e660fe3bf3ef" Jan 20 17:29:18 crc kubenswrapper[4558]: I0120 17:29:18.130451 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a2da0cc196cd077af001802b735ebf8ad80ebc52741d9d22cbe7e660fe3bf3ef"} err="failed to get container status \"a2da0cc196cd077af001802b735ebf8ad80ebc52741d9d22cbe7e660fe3bf3ef\": rpc error: code = NotFound desc = could not find container \"a2da0cc196cd077af001802b735ebf8ad80ebc52741d9d22cbe7e660fe3bf3ef\": container with ID starting with a2da0cc196cd077af001802b735ebf8ad80ebc52741d9d22cbe7e660fe3bf3ef not found: ID does not exist" Jan 20 17:29:18 crc kubenswrapper[4558]: I0120 17:29:18.130479 4558 scope.go:117] "RemoveContainer" containerID="9b5a8a676324fb1e8f2b5d27898efb15271fce6ab775bd87dfff07b977d04682" Jan 20 17:29:18 crc kubenswrapper[4558]: E0120 17:29:18.130833 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9b5a8a676324fb1e8f2b5d27898efb15271fce6ab775bd87dfff07b977d04682\": container with ID starting with 9b5a8a676324fb1e8f2b5d27898efb15271fce6ab775bd87dfff07b977d04682 not found: ID does not exist" containerID="9b5a8a676324fb1e8f2b5d27898efb15271fce6ab775bd87dfff07b977d04682" Jan 20 17:29:18 crc kubenswrapper[4558]: I0120 17:29:18.130863 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9b5a8a676324fb1e8f2b5d27898efb15271fce6ab775bd87dfff07b977d04682"} err="failed to get container status \"9b5a8a676324fb1e8f2b5d27898efb15271fce6ab775bd87dfff07b977d04682\": rpc error: code = NotFound desc = could not find container \"9b5a8a676324fb1e8f2b5d27898efb15271fce6ab775bd87dfff07b977d04682\": container with ID starting with 9b5a8a676324fb1e8f2b5d27898efb15271fce6ab775bd87dfff07b977d04682 not found: ID does not exist" Jan 20 17:29:18 crc kubenswrapper[4558]: I0120 17:29:18.130890 4558 scope.go:117] "RemoveContainer" containerID="94e8d5e6a044a2bc19b0c7a23c0dadff21b1814df4e2d558c68b7f6c9c9621ac" Jan 20 17:29:18 crc kubenswrapper[4558]: E0120 17:29:18.131143 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"94e8d5e6a044a2bc19b0c7a23c0dadff21b1814df4e2d558c68b7f6c9c9621ac\": container with ID starting with 94e8d5e6a044a2bc19b0c7a23c0dadff21b1814df4e2d558c68b7f6c9c9621ac not found: ID does not exist" containerID="94e8d5e6a044a2bc19b0c7a23c0dadff21b1814df4e2d558c68b7f6c9c9621ac" Jan 20 17:29:18 crc kubenswrapper[4558]: I0120 17:29:18.131195 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"94e8d5e6a044a2bc19b0c7a23c0dadff21b1814df4e2d558c68b7f6c9c9621ac"} err="failed to get container status \"94e8d5e6a044a2bc19b0c7a23c0dadff21b1814df4e2d558c68b7f6c9c9621ac\": rpc error: code = NotFound desc = could not find container \"94e8d5e6a044a2bc19b0c7a23c0dadff21b1814df4e2d558c68b7f6c9c9621ac\": container with ID starting with 94e8d5e6a044a2bc19b0c7a23c0dadff21b1814df4e2d558c68b7f6c9c9621ac not found: ID does not exist" Jan 20 17:29:18 crc kubenswrapper[4558]: I0120 17:29:18.576204 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="86b88605-5219-482f-803e-edae5ea5d319" path="/var/lib/kubelet/pods/86b88605-5219-482f-803e-edae5ea5d319/volumes" Jan 20 17:29:19 crc kubenswrapper[4558]: I0120 17:29:19.421198 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/glance-db-create-5jmnq"] Jan 20 17:29:19 crc kubenswrapper[4558]: E0120 17:29:19.421671 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="86b88605-5219-482f-803e-edae5ea5d319" containerName="extract-utilities" Jan 20 17:29:19 crc kubenswrapper[4558]: I0120 17:29:19.421695 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="86b88605-5219-482f-803e-edae5ea5d319" containerName="extract-utilities" Jan 20 17:29:19 crc kubenswrapper[4558]: E0120 17:29:19.421724 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="86b88605-5219-482f-803e-edae5ea5d319" containerName="registry-server" Jan 20 17:29:19 crc kubenswrapper[4558]: I0120 17:29:19.421731 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="86b88605-5219-482f-803e-edae5ea5d319" containerName="registry-server" Jan 20 17:29:19 crc kubenswrapper[4558]: E0120 17:29:19.421743 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="86b88605-5219-482f-803e-edae5ea5d319" containerName="extract-content" Jan 20 17:29:19 crc kubenswrapper[4558]: I0120 17:29:19.421749 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="86b88605-5219-482f-803e-edae5ea5d319" containerName="extract-content" Jan 20 17:29:19 crc kubenswrapper[4558]: I0120 17:29:19.421981 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="86b88605-5219-482f-803e-edae5ea5d319" containerName="registry-server" Jan 20 17:29:19 crc kubenswrapper[4558]: I0120 17:29:19.422686 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-db-create-5jmnq" Jan 20 17:29:19 crc kubenswrapper[4558]: I0120 17:29:19.428823 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-db-create-5jmnq"] Jan 20 17:29:19 crc kubenswrapper[4558]: I0120 17:29:19.477626 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/eaa89d12-4192-4a41-b6f5-fffc5e1121e0-operator-scripts\") pod \"glance-db-create-5jmnq\" (UID: \"eaa89d12-4192-4a41-b6f5-fffc5e1121e0\") " pod="openstack-kuttl-tests/glance-db-create-5jmnq" Jan 20 17:29:19 crc kubenswrapper[4558]: I0120 17:29:19.477697 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7c7z8\" (UniqueName: \"kubernetes.io/projected/eaa89d12-4192-4a41-b6f5-fffc5e1121e0-kube-api-access-7c7z8\") pod \"glance-db-create-5jmnq\" (UID: \"eaa89d12-4192-4a41-b6f5-fffc5e1121e0\") " pod="openstack-kuttl-tests/glance-db-create-5jmnq" Jan 20 17:29:19 crc kubenswrapper[4558]: I0120 17:29:19.527375 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/glance-19e0-account-create-update-x5cqg"] Jan 20 17:29:19 crc kubenswrapper[4558]: I0120 17:29:19.528919 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-19e0-account-create-update-x5cqg" Jan 20 17:29:19 crc kubenswrapper[4558]: I0120 17:29:19.530868 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-db-secret" Jan 20 17:29:19 crc kubenswrapper[4558]: I0120 17:29:19.532819 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-19e0-account-create-update-x5cqg"] Jan 20 17:29:19 crc kubenswrapper[4558]: I0120 17:29:19.579428 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/eaa89d12-4192-4a41-b6f5-fffc5e1121e0-operator-scripts\") pod \"glance-db-create-5jmnq\" (UID: \"eaa89d12-4192-4a41-b6f5-fffc5e1121e0\") " pod="openstack-kuttl-tests/glance-db-create-5jmnq" Jan 20 17:29:19 crc kubenswrapper[4558]: I0120 17:29:19.579489 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7c7z8\" (UniqueName: \"kubernetes.io/projected/eaa89d12-4192-4a41-b6f5-fffc5e1121e0-kube-api-access-7c7z8\") pod \"glance-db-create-5jmnq\" (UID: \"eaa89d12-4192-4a41-b6f5-fffc5e1121e0\") " pod="openstack-kuttl-tests/glance-db-create-5jmnq" Jan 20 17:29:19 crc kubenswrapper[4558]: I0120 17:29:19.579537 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3850786b-f9ee-4ecb-a15e-ccad25577f01-operator-scripts\") pod \"glance-19e0-account-create-update-x5cqg\" (UID: \"3850786b-f9ee-4ecb-a15e-ccad25577f01\") " pod="openstack-kuttl-tests/glance-19e0-account-create-update-x5cqg" Jan 20 17:29:19 crc kubenswrapper[4558]: I0120 17:29:19.579587 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9m56q\" (UniqueName: \"kubernetes.io/projected/3850786b-f9ee-4ecb-a15e-ccad25577f01-kube-api-access-9m56q\") pod \"glance-19e0-account-create-update-x5cqg\" (UID: \"3850786b-f9ee-4ecb-a15e-ccad25577f01\") " pod="openstack-kuttl-tests/glance-19e0-account-create-update-x5cqg" Jan 20 17:29:19 crc kubenswrapper[4558]: I0120 17:29:19.580131 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/eaa89d12-4192-4a41-b6f5-fffc5e1121e0-operator-scripts\") pod \"glance-db-create-5jmnq\" (UID: \"eaa89d12-4192-4a41-b6f5-fffc5e1121e0\") " pod="openstack-kuttl-tests/glance-db-create-5jmnq" Jan 20 17:29:19 crc kubenswrapper[4558]: I0120 17:29:19.595897 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7c7z8\" (UniqueName: \"kubernetes.io/projected/eaa89d12-4192-4a41-b6f5-fffc5e1121e0-kube-api-access-7c7z8\") pod \"glance-db-create-5jmnq\" (UID: \"eaa89d12-4192-4a41-b6f5-fffc5e1121e0\") " pod="openstack-kuttl-tests/glance-db-create-5jmnq" Jan 20 17:29:19 crc kubenswrapper[4558]: I0120 17:29:19.680417 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3850786b-f9ee-4ecb-a15e-ccad25577f01-operator-scripts\") pod \"glance-19e0-account-create-update-x5cqg\" (UID: \"3850786b-f9ee-4ecb-a15e-ccad25577f01\") " pod="openstack-kuttl-tests/glance-19e0-account-create-update-x5cqg" Jan 20 17:29:19 crc kubenswrapper[4558]: I0120 17:29:19.680488 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9m56q\" (UniqueName: \"kubernetes.io/projected/3850786b-f9ee-4ecb-a15e-ccad25577f01-kube-api-access-9m56q\") pod \"glance-19e0-account-create-update-x5cqg\" (UID: \"3850786b-f9ee-4ecb-a15e-ccad25577f01\") " pod="openstack-kuttl-tests/glance-19e0-account-create-update-x5cqg" Jan 20 17:29:19 crc kubenswrapper[4558]: I0120 17:29:19.681560 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3850786b-f9ee-4ecb-a15e-ccad25577f01-operator-scripts\") pod \"glance-19e0-account-create-update-x5cqg\" (UID: \"3850786b-f9ee-4ecb-a15e-ccad25577f01\") " pod="openstack-kuttl-tests/glance-19e0-account-create-update-x5cqg" Jan 20 17:29:19 crc kubenswrapper[4558]: I0120 17:29:19.695213 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9m56q\" (UniqueName: \"kubernetes.io/projected/3850786b-f9ee-4ecb-a15e-ccad25577f01-kube-api-access-9m56q\") pod \"glance-19e0-account-create-update-x5cqg\" (UID: \"3850786b-f9ee-4ecb-a15e-ccad25577f01\") " pod="openstack-kuttl-tests/glance-19e0-account-create-update-x5cqg" Jan 20 17:29:19 crc kubenswrapper[4558]: I0120 17:29:19.738738 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-db-create-5jmnq" Jan 20 17:29:19 crc kubenswrapper[4558]: I0120 17:29:19.842990 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-19e0-account-create-update-x5cqg" Jan 20 17:29:19 crc kubenswrapper[4558]: I0120 17:29:19.963750 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/swift-ring-rebalance-zzttm"] Jan 20 17:29:19 crc kubenswrapper[4558]: I0120 17:29:19.964796 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-ring-rebalance-zzttm" Jan 20 17:29:19 crc kubenswrapper[4558]: I0120 17:29:19.966418 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"swift-proxy-config-data" Jan 20 17:29:19 crc kubenswrapper[4558]: I0120 17:29:19.969135 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"swift-ring-scripts" Jan 20 17:29:19 crc kubenswrapper[4558]: I0120 17:29:19.969333 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"swift-ring-config-data" Jan 20 17:29:19 crc kubenswrapper[4558]: I0120 17:29:19.985515 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/8c74a8f5-be12-4ac4-b45c-d459801927ea-etc-swift\") pod \"swift-storage-0\" (UID: \"8c74a8f5-be12-4ac4-b45c-d459801927ea\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:29:19 crc kubenswrapper[4558]: I0120 17:29:19.985569 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/31071f28-1020-4bbd-b122-c1dcf9a67a14-swiftconf\") pod \"swift-ring-rebalance-zzttm\" (UID: \"31071f28-1020-4bbd-b122-c1dcf9a67a14\") " pod="openstack-kuttl-tests/swift-ring-rebalance-zzttm" Jan 20 17:29:19 crc kubenswrapper[4558]: I0120 17:29:19.985615 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/31071f28-1020-4bbd-b122-c1dcf9a67a14-scripts\") pod \"swift-ring-rebalance-zzttm\" (UID: \"31071f28-1020-4bbd-b122-c1dcf9a67a14\") " pod="openstack-kuttl-tests/swift-ring-rebalance-zzttm" Jan 20 17:29:19 crc kubenswrapper[4558]: I0120 17:29:19.985676 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/31071f28-1020-4bbd-b122-c1dcf9a67a14-ring-data-devices\") pod \"swift-ring-rebalance-zzttm\" (UID: \"31071f28-1020-4bbd-b122-c1dcf9a67a14\") " pod="openstack-kuttl-tests/swift-ring-rebalance-zzttm" Jan 20 17:29:19 crc kubenswrapper[4558]: I0120 17:29:19.985693 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/31071f28-1020-4bbd-b122-c1dcf9a67a14-dispersionconf\") pod \"swift-ring-rebalance-zzttm\" (UID: \"31071f28-1020-4bbd-b122-c1dcf9a67a14\") " pod="openstack-kuttl-tests/swift-ring-rebalance-zzttm" Jan 20 17:29:19 crc kubenswrapper[4558]: I0120 17:29:19.985713 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/swift-ring-rebalance-zzttm"] Jan 20 17:29:19 crc kubenswrapper[4558]: I0120 17:29:19.985721 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l874w\" (UniqueName: \"kubernetes.io/projected/31071f28-1020-4bbd-b122-c1dcf9a67a14-kube-api-access-l874w\") pod \"swift-ring-rebalance-zzttm\" (UID: \"31071f28-1020-4bbd-b122-c1dcf9a67a14\") " pod="openstack-kuttl-tests/swift-ring-rebalance-zzttm" Jan 20 17:29:19 crc kubenswrapper[4558]: I0120 17:29:19.985837 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/31071f28-1020-4bbd-b122-c1dcf9a67a14-combined-ca-bundle\") pod \"swift-ring-rebalance-zzttm\" (UID: \"31071f28-1020-4bbd-b122-c1dcf9a67a14\") " pod="openstack-kuttl-tests/swift-ring-rebalance-zzttm" Jan 20 17:29:19 crc kubenswrapper[4558]: I0120 17:29:19.985923 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/31071f28-1020-4bbd-b122-c1dcf9a67a14-etc-swift\") pod \"swift-ring-rebalance-zzttm\" (UID: \"31071f28-1020-4bbd-b122-c1dcf9a67a14\") " pod="openstack-kuttl-tests/swift-ring-rebalance-zzttm" Jan 20 17:29:19 crc kubenswrapper[4558]: E0120 17:29:19.986104 4558 projected.go:288] Couldn't get configMap openstack-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Jan 20 17:29:19 crc kubenswrapper[4558]: E0120 17:29:19.986119 4558 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack-kuttl-tests/swift-storage-0: configmap "swift-ring-files" not found Jan 20 17:29:19 crc kubenswrapper[4558]: E0120 17:29:19.986157 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/8c74a8f5-be12-4ac4-b45c-d459801927ea-etc-swift podName:8c74a8f5-be12-4ac4-b45c-d459801927ea nodeName:}" failed. No retries permitted until 2026-01-20 17:29:23.986142543 +0000 UTC m=+2857.746480510 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/8c74a8f5-be12-4ac4-b45c-d459801927ea-etc-swift") pod "swift-storage-0" (UID: "8c74a8f5-be12-4ac4-b45c-d459801927ea") : configmap "swift-ring-files" not found Jan 20 17:29:20 crc kubenswrapper[4558]: I0120 17:29:20.087503 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/31071f28-1020-4bbd-b122-c1dcf9a67a14-etc-swift\") pod \"swift-ring-rebalance-zzttm\" (UID: \"31071f28-1020-4bbd-b122-c1dcf9a67a14\") " pod="openstack-kuttl-tests/swift-ring-rebalance-zzttm" Jan 20 17:29:20 crc kubenswrapper[4558]: I0120 17:29:20.087633 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/31071f28-1020-4bbd-b122-c1dcf9a67a14-swiftconf\") pod \"swift-ring-rebalance-zzttm\" (UID: \"31071f28-1020-4bbd-b122-c1dcf9a67a14\") " pod="openstack-kuttl-tests/swift-ring-rebalance-zzttm" Jan 20 17:29:20 crc kubenswrapper[4558]: I0120 17:29:20.087701 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/31071f28-1020-4bbd-b122-c1dcf9a67a14-scripts\") pod \"swift-ring-rebalance-zzttm\" (UID: \"31071f28-1020-4bbd-b122-c1dcf9a67a14\") " pod="openstack-kuttl-tests/swift-ring-rebalance-zzttm" Jan 20 17:29:20 crc kubenswrapper[4558]: I0120 17:29:20.087800 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/31071f28-1020-4bbd-b122-c1dcf9a67a14-ring-data-devices\") pod \"swift-ring-rebalance-zzttm\" (UID: \"31071f28-1020-4bbd-b122-c1dcf9a67a14\") " pod="openstack-kuttl-tests/swift-ring-rebalance-zzttm" Jan 20 17:29:20 crc kubenswrapper[4558]: I0120 17:29:20.087826 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/31071f28-1020-4bbd-b122-c1dcf9a67a14-dispersionconf\") pod \"swift-ring-rebalance-zzttm\" (UID: \"31071f28-1020-4bbd-b122-c1dcf9a67a14\") " pod="openstack-kuttl-tests/swift-ring-rebalance-zzttm" Jan 20 17:29:20 crc kubenswrapper[4558]: I0120 17:29:20.087856 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l874w\" (UniqueName: \"kubernetes.io/projected/31071f28-1020-4bbd-b122-c1dcf9a67a14-kube-api-access-l874w\") pod \"swift-ring-rebalance-zzttm\" (UID: \"31071f28-1020-4bbd-b122-c1dcf9a67a14\") " pod="openstack-kuttl-tests/swift-ring-rebalance-zzttm" Jan 20 17:29:20 crc kubenswrapper[4558]: I0120 17:29:20.087889 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/31071f28-1020-4bbd-b122-c1dcf9a67a14-combined-ca-bundle\") pod \"swift-ring-rebalance-zzttm\" (UID: \"31071f28-1020-4bbd-b122-c1dcf9a67a14\") " pod="openstack-kuttl-tests/swift-ring-rebalance-zzttm" Jan 20 17:29:20 crc kubenswrapper[4558]: I0120 17:29:20.088491 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/31071f28-1020-4bbd-b122-c1dcf9a67a14-scripts\") pod \"swift-ring-rebalance-zzttm\" (UID: \"31071f28-1020-4bbd-b122-c1dcf9a67a14\") " pod="openstack-kuttl-tests/swift-ring-rebalance-zzttm" Jan 20 17:29:20 crc kubenswrapper[4558]: I0120 17:29:20.088594 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/31071f28-1020-4bbd-b122-c1dcf9a67a14-ring-data-devices\") pod \"swift-ring-rebalance-zzttm\" (UID: \"31071f28-1020-4bbd-b122-c1dcf9a67a14\") " pod="openstack-kuttl-tests/swift-ring-rebalance-zzttm" Jan 20 17:29:20 crc kubenswrapper[4558]: I0120 17:29:20.088733 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/31071f28-1020-4bbd-b122-c1dcf9a67a14-etc-swift\") pod \"swift-ring-rebalance-zzttm\" (UID: \"31071f28-1020-4bbd-b122-c1dcf9a67a14\") " pod="openstack-kuttl-tests/swift-ring-rebalance-zzttm" Jan 20 17:29:20 crc kubenswrapper[4558]: I0120 17:29:20.093192 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/31071f28-1020-4bbd-b122-c1dcf9a67a14-dispersionconf\") pod \"swift-ring-rebalance-zzttm\" (UID: \"31071f28-1020-4bbd-b122-c1dcf9a67a14\") " pod="openstack-kuttl-tests/swift-ring-rebalance-zzttm" Jan 20 17:29:20 crc kubenswrapper[4558]: I0120 17:29:20.093299 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/31071f28-1020-4bbd-b122-c1dcf9a67a14-swiftconf\") pod \"swift-ring-rebalance-zzttm\" (UID: \"31071f28-1020-4bbd-b122-c1dcf9a67a14\") " pod="openstack-kuttl-tests/swift-ring-rebalance-zzttm" Jan 20 17:29:20 crc kubenswrapper[4558]: I0120 17:29:20.093905 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/31071f28-1020-4bbd-b122-c1dcf9a67a14-combined-ca-bundle\") pod \"swift-ring-rebalance-zzttm\" (UID: \"31071f28-1020-4bbd-b122-c1dcf9a67a14\") " pod="openstack-kuttl-tests/swift-ring-rebalance-zzttm" Jan 20 17:29:20 crc kubenswrapper[4558]: I0120 17:29:20.101651 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l874w\" (UniqueName: \"kubernetes.io/projected/31071f28-1020-4bbd-b122-c1dcf9a67a14-kube-api-access-l874w\") pod \"swift-ring-rebalance-zzttm\" (UID: \"31071f28-1020-4bbd-b122-c1dcf9a67a14\") " pod="openstack-kuttl-tests/swift-ring-rebalance-zzttm" Jan 20 17:29:20 crc kubenswrapper[4558]: I0120 17:29:20.142213 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-db-create-5jmnq"] Jan 20 17:29:20 crc kubenswrapper[4558]: W0120 17:29:20.145828 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podeaa89d12_4192_4a41_b6f5_fffc5e1121e0.slice/crio-c8dd0a4a2a29182f5ae0438e0020594997238d6a9731972ef5852237fd149568 WatchSource:0}: Error finding container c8dd0a4a2a29182f5ae0438e0020594997238d6a9731972ef5852237fd149568: Status 404 returned error can't find the container with id c8dd0a4a2a29182f5ae0438e0020594997238d6a9731972ef5852237fd149568 Jan 20 17:29:20 crc kubenswrapper[4558]: I0120 17:29:20.260831 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-19e0-account-create-update-x5cqg"] Jan 20 17:29:20 crc kubenswrapper[4558]: W0120 17:29:20.264295 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3850786b_f9ee_4ecb_a15e_ccad25577f01.slice/crio-f3f9d6f99fd702a3664102c557f0f867420fbff37d9ed9bd08e20deafa27bc96 WatchSource:0}: Error finding container f3f9d6f99fd702a3664102c557f0f867420fbff37d9ed9bd08e20deafa27bc96: Status 404 returned error can't find the container with id f3f9d6f99fd702a3664102c557f0f867420fbff37d9ed9bd08e20deafa27bc96 Jan 20 17:29:20 crc kubenswrapper[4558]: I0120 17:29:20.289230 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-ring-rebalance-zzttm" Jan 20 17:29:20 crc kubenswrapper[4558]: I0120 17:29:20.700386 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/swift-ring-rebalance-zzttm"] Jan 20 17:29:20 crc kubenswrapper[4558]: W0120 17:29:20.710069 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod31071f28_1020_4bbd_b122_c1dcf9a67a14.slice/crio-01ba4fc520f8548af793bef9802b7737bd2cad5e86c912ac32db4108c59fcbfe WatchSource:0}: Error finding container 01ba4fc520f8548af793bef9802b7737bd2cad5e86c912ac32db4108c59fcbfe: Status 404 returned error can't find the container with id 01ba4fc520f8548af793bef9802b7737bd2cad5e86c912ac32db4108c59fcbfe Jan 20 17:29:21 crc kubenswrapper[4558]: I0120 17:29:21.089942 4558 generic.go:334] "Generic (PLEG): container finished" podID="3850786b-f9ee-4ecb-a15e-ccad25577f01" containerID="a7601c725675bc7987095de4d5932e6397cc2d621e319d01c58442eee53836e0" exitCode=0 Jan 20 17:29:21 crc kubenswrapper[4558]: I0120 17:29:21.090044 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-19e0-account-create-update-x5cqg" event={"ID":"3850786b-f9ee-4ecb-a15e-ccad25577f01","Type":"ContainerDied","Data":"a7601c725675bc7987095de4d5932e6397cc2d621e319d01c58442eee53836e0"} Jan 20 17:29:21 crc kubenswrapper[4558]: I0120 17:29:21.090312 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-19e0-account-create-update-x5cqg" event={"ID":"3850786b-f9ee-4ecb-a15e-ccad25577f01","Type":"ContainerStarted","Data":"f3f9d6f99fd702a3664102c557f0f867420fbff37d9ed9bd08e20deafa27bc96"} Jan 20 17:29:21 crc kubenswrapper[4558]: I0120 17:29:21.092396 4558 generic.go:334] "Generic (PLEG): container finished" podID="eaa89d12-4192-4a41-b6f5-fffc5e1121e0" containerID="75ebc41ab16927c48f8afc5adcf73afda1095d0f103558bf36ce7dae6248c5c7" exitCode=0 Jan 20 17:29:21 crc kubenswrapper[4558]: I0120 17:29:21.092547 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-db-create-5jmnq" event={"ID":"eaa89d12-4192-4a41-b6f5-fffc5e1121e0","Type":"ContainerDied","Data":"75ebc41ab16927c48f8afc5adcf73afda1095d0f103558bf36ce7dae6248c5c7"} Jan 20 17:29:21 crc kubenswrapper[4558]: I0120 17:29:21.092625 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-db-create-5jmnq" event={"ID":"eaa89d12-4192-4a41-b6f5-fffc5e1121e0","Type":"ContainerStarted","Data":"c8dd0a4a2a29182f5ae0438e0020594997238d6a9731972ef5852237fd149568"} Jan 20 17:29:21 crc kubenswrapper[4558]: I0120 17:29:21.094013 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-ring-rebalance-zzttm" event={"ID":"31071f28-1020-4bbd-b122-c1dcf9a67a14","Type":"ContainerStarted","Data":"3a3e340167ccce72bbe411439f7909473c7e135797ac4b0db65fe1584f2e6946"} Jan 20 17:29:21 crc kubenswrapper[4558]: I0120 17:29:21.094088 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-ring-rebalance-zzttm" event={"ID":"31071f28-1020-4bbd-b122-c1dcf9a67a14","Type":"ContainerStarted","Data":"01ba4fc520f8548af793bef9802b7737bd2cad5e86c912ac32db4108c59fcbfe"} Jan 20 17:29:21 crc kubenswrapper[4558]: I0120 17:29:21.126591 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/swift-ring-rebalance-zzttm" podStartSLOduration=2.126575979 podStartE2EDuration="2.126575979s" podCreationTimestamp="2026-01-20 17:29:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:29:21.121451594 +0000 UTC m=+2854.881789560" watchObservedRunningTime="2026-01-20 17:29:21.126575979 +0000 UTC m=+2854.886913946" Jan 20 17:29:22 crc kubenswrapper[4558]: I0120 17:29:22.391981 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:29:22 crc kubenswrapper[4558]: I0120 17:29:22.441614 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/root-account-create-update-w6cqr"] Jan 20 17:29:22 crc kubenswrapper[4558]: I0120 17:29:22.442932 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-w6cqr" Jan 20 17:29:22 crc kubenswrapper[4558]: I0120 17:29:22.446705 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"openstack-mariadb-root-db-secret" Jan 20 17:29:22 crc kubenswrapper[4558]: I0120 17:29:22.460900 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-w6cqr"] Jan 20 17:29:22 crc kubenswrapper[4558]: I0120 17:29:22.468108 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-db-create-5jmnq" Jan 20 17:29:22 crc kubenswrapper[4558]: I0120 17:29:22.533293 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c7z8\" (UniqueName: \"kubernetes.io/projected/eaa89d12-4192-4a41-b6f5-fffc5e1121e0-kube-api-access-7c7z8\") pod \"eaa89d12-4192-4a41-b6f5-fffc5e1121e0\" (UID: \"eaa89d12-4192-4a41-b6f5-fffc5e1121e0\") " Jan 20 17:29:22 crc kubenswrapper[4558]: I0120 17:29:22.533350 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/eaa89d12-4192-4a41-b6f5-fffc5e1121e0-operator-scripts\") pod \"eaa89d12-4192-4a41-b6f5-fffc5e1121e0\" (UID: \"eaa89d12-4192-4a41-b6f5-fffc5e1121e0\") " Jan 20 17:29:22 crc kubenswrapper[4558]: I0120 17:29:22.533552 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v82js\" (UniqueName: \"kubernetes.io/projected/854a193a-de08-44cb-967f-079392fa6680-kube-api-access-v82js\") pod \"root-account-create-update-w6cqr\" (UID: \"854a193a-de08-44cb-967f-079392fa6680\") " pod="openstack-kuttl-tests/root-account-create-update-w6cqr" Jan 20 17:29:22 crc kubenswrapper[4558]: I0120 17:29:22.533678 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/854a193a-de08-44cb-967f-079392fa6680-operator-scripts\") pod \"root-account-create-update-w6cqr\" (UID: \"854a193a-de08-44cb-967f-079392fa6680\") " pod="openstack-kuttl-tests/root-account-create-update-w6cqr" Jan 20 17:29:22 crc kubenswrapper[4558]: I0120 17:29:22.533787 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/eaa89d12-4192-4a41-b6f5-fffc5e1121e0-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "eaa89d12-4192-4a41-b6f5-fffc5e1121e0" (UID: "eaa89d12-4192-4a41-b6f5-fffc5e1121e0"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:29:22 crc kubenswrapper[4558]: I0120 17:29:22.538503 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eaa89d12-4192-4a41-b6f5-fffc5e1121e0-kube-api-access-7c7z8" (OuterVolumeSpecName: "kube-api-access-7c7z8") pod "eaa89d12-4192-4a41-b6f5-fffc5e1121e0" (UID: "eaa89d12-4192-4a41-b6f5-fffc5e1121e0"). InnerVolumeSpecName "kube-api-access-7c7z8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:29:22 crc kubenswrapper[4558]: I0120 17:29:22.541189 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-19e0-account-create-update-x5cqg" Jan 20 17:29:22 crc kubenswrapper[4558]: I0120 17:29:22.634713 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9m56q\" (UniqueName: \"kubernetes.io/projected/3850786b-f9ee-4ecb-a15e-ccad25577f01-kube-api-access-9m56q\") pod \"3850786b-f9ee-4ecb-a15e-ccad25577f01\" (UID: \"3850786b-f9ee-4ecb-a15e-ccad25577f01\") " Jan 20 17:29:22 crc kubenswrapper[4558]: I0120 17:29:22.634858 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3850786b-f9ee-4ecb-a15e-ccad25577f01-operator-scripts\") pod \"3850786b-f9ee-4ecb-a15e-ccad25577f01\" (UID: \"3850786b-f9ee-4ecb-a15e-ccad25577f01\") " Jan 20 17:29:22 crc kubenswrapper[4558]: I0120 17:29:22.635053 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v82js\" (UniqueName: \"kubernetes.io/projected/854a193a-de08-44cb-967f-079392fa6680-kube-api-access-v82js\") pod \"root-account-create-update-w6cqr\" (UID: \"854a193a-de08-44cb-967f-079392fa6680\") " pod="openstack-kuttl-tests/root-account-create-update-w6cqr" Jan 20 17:29:22 crc kubenswrapper[4558]: I0120 17:29:22.635183 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/854a193a-de08-44cb-967f-079392fa6680-operator-scripts\") pod \"root-account-create-update-w6cqr\" (UID: \"854a193a-de08-44cb-967f-079392fa6680\") " pod="openstack-kuttl-tests/root-account-create-update-w6cqr" Jan 20 17:29:22 crc kubenswrapper[4558]: I0120 17:29:22.635263 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c7z8\" (UniqueName: \"kubernetes.io/projected/eaa89d12-4192-4a41-b6f5-fffc5e1121e0-kube-api-access-7c7z8\") on node \"crc\" DevicePath \"\"" Jan 20 17:29:22 crc kubenswrapper[4558]: I0120 17:29:22.635281 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/eaa89d12-4192-4a41-b6f5-fffc5e1121e0-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:29:22 crc kubenswrapper[4558]: I0120 17:29:22.635868 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/854a193a-de08-44cb-967f-079392fa6680-operator-scripts\") pod \"root-account-create-update-w6cqr\" (UID: \"854a193a-de08-44cb-967f-079392fa6680\") " pod="openstack-kuttl-tests/root-account-create-update-w6cqr" Jan 20 17:29:22 crc kubenswrapper[4558]: I0120 17:29:22.636546 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3850786b-f9ee-4ecb-a15e-ccad25577f01-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "3850786b-f9ee-4ecb-a15e-ccad25577f01" (UID: "3850786b-f9ee-4ecb-a15e-ccad25577f01"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:29:22 crc kubenswrapper[4558]: I0120 17:29:22.639019 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3850786b-f9ee-4ecb-a15e-ccad25577f01-kube-api-access-9m56q" (OuterVolumeSpecName: "kube-api-access-9m56q") pod "3850786b-f9ee-4ecb-a15e-ccad25577f01" (UID: "3850786b-f9ee-4ecb-a15e-ccad25577f01"). InnerVolumeSpecName "kube-api-access-9m56q". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:29:22 crc kubenswrapper[4558]: I0120 17:29:22.650719 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v82js\" (UniqueName: \"kubernetes.io/projected/854a193a-de08-44cb-967f-079392fa6680-kube-api-access-v82js\") pod \"root-account-create-update-w6cqr\" (UID: \"854a193a-de08-44cb-967f-079392fa6680\") " pod="openstack-kuttl-tests/root-account-create-update-w6cqr" Jan 20 17:29:22 crc kubenswrapper[4558]: I0120 17:29:22.736339 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3850786b-f9ee-4ecb-a15e-ccad25577f01-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:29:22 crc kubenswrapper[4558]: I0120 17:29:22.736363 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9m56q\" (UniqueName: \"kubernetes.io/projected/3850786b-f9ee-4ecb-a15e-ccad25577f01-kube-api-access-9m56q\") on node \"crc\" DevicePath \"\"" Jan 20 17:29:22 crc kubenswrapper[4558]: I0120 17:29:22.776369 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-w6cqr" Jan 20 17:29:23 crc kubenswrapper[4558]: I0120 17:29:23.113657 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-19e0-account-create-update-x5cqg" event={"ID":"3850786b-f9ee-4ecb-a15e-ccad25577f01","Type":"ContainerDied","Data":"f3f9d6f99fd702a3664102c557f0f867420fbff37d9ed9bd08e20deafa27bc96"} Jan 20 17:29:23 crc kubenswrapper[4558]: I0120 17:29:23.113900 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f3f9d6f99fd702a3664102c557f0f867420fbff37d9ed9bd08e20deafa27bc96" Jan 20 17:29:23 crc kubenswrapper[4558]: I0120 17:29:23.113692 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-19e0-account-create-update-x5cqg" Jan 20 17:29:23 crc kubenswrapper[4558]: I0120 17:29:23.115228 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-db-create-5jmnq" event={"ID":"eaa89d12-4192-4a41-b6f5-fffc5e1121e0","Type":"ContainerDied","Data":"c8dd0a4a2a29182f5ae0438e0020594997238d6a9731972ef5852237fd149568"} Jan 20 17:29:23 crc kubenswrapper[4558]: I0120 17:29:23.115288 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-db-create-5jmnq" Jan 20 17:29:23 crc kubenswrapper[4558]: I0120 17:29:23.115292 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c8dd0a4a2a29182f5ae0438e0020594997238d6a9731972ef5852237fd149568" Jan 20 17:29:23 crc kubenswrapper[4558]: W0120 17:29:23.178101 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod854a193a_de08_44cb_967f_079392fa6680.slice/crio-a835e5d8972d1be17133a9a8c2182b4850ce3cca797b34c6dda193bf8204f49e WatchSource:0}: Error finding container a835e5d8972d1be17133a9a8c2182b4850ce3cca797b34c6dda193bf8204f49e: Status 404 returned error can't find the container with id a835e5d8972d1be17133a9a8c2182b4850ce3cca797b34c6dda193bf8204f49e Jan 20 17:29:23 crc kubenswrapper[4558]: I0120 17:29:23.179613 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-w6cqr"] Jan 20 17:29:23 crc kubenswrapper[4558]: I0120 17:29:23.663923 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/keystone-db-create-dfmsf"] Jan 20 17:29:23 crc kubenswrapper[4558]: E0120 17:29:23.664650 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3850786b-f9ee-4ecb-a15e-ccad25577f01" containerName="mariadb-account-create-update" Jan 20 17:29:23 crc kubenswrapper[4558]: I0120 17:29:23.664665 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="3850786b-f9ee-4ecb-a15e-ccad25577f01" containerName="mariadb-account-create-update" Jan 20 17:29:23 crc kubenswrapper[4558]: E0120 17:29:23.664686 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eaa89d12-4192-4a41-b6f5-fffc5e1121e0" containerName="mariadb-database-create" Jan 20 17:29:23 crc kubenswrapper[4558]: I0120 17:29:23.664693 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="eaa89d12-4192-4a41-b6f5-fffc5e1121e0" containerName="mariadb-database-create" Jan 20 17:29:23 crc kubenswrapper[4558]: I0120 17:29:23.664903 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="eaa89d12-4192-4a41-b6f5-fffc5e1121e0" containerName="mariadb-database-create" Jan 20 17:29:23 crc kubenswrapper[4558]: I0120 17:29:23.664927 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="3850786b-f9ee-4ecb-a15e-ccad25577f01" containerName="mariadb-account-create-update" Jan 20 17:29:23 crc kubenswrapper[4558]: I0120 17:29:23.665551 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-db-create-dfmsf" Jan 20 17:29:23 crc kubenswrapper[4558]: I0120 17:29:23.672106 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-db-create-dfmsf"] Jan 20 17:29:23 crc kubenswrapper[4558]: I0120 17:29:23.756398 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/adf0bec9-f1a7-41d3-a4ef-28573981892f-operator-scripts\") pod \"keystone-db-create-dfmsf\" (UID: \"adf0bec9-f1a7-41d3-a4ef-28573981892f\") " pod="openstack-kuttl-tests/keystone-db-create-dfmsf" Jan 20 17:29:23 crc kubenswrapper[4558]: I0120 17:29:23.756455 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m9nmj\" (UniqueName: \"kubernetes.io/projected/adf0bec9-f1a7-41d3-a4ef-28573981892f-kube-api-access-m9nmj\") pod \"keystone-db-create-dfmsf\" (UID: \"adf0bec9-f1a7-41d3-a4ef-28573981892f\") " pod="openstack-kuttl-tests/keystone-db-create-dfmsf" Jan 20 17:29:23 crc kubenswrapper[4558]: I0120 17:29:23.766838 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/keystone-2d06-account-create-update-ht2vp"] Jan 20 17:29:23 crc kubenswrapper[4558]: I0120 17:29:23.767881 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-2d06-account-create-update-ht2vp" Jan 20 17:29:23 crc kubenswrapper[4558]: I0120 17:29:23.769773 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-db-secret" Jan 20 17:29:23 crc kubenswrapper[4558]: I0120 17:29:23.772931 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-2d06-account-create-update-ht2vp"] Jan 20 17:29:23 crc kubenswrapper[4558]: I0120 17:29:23.858251 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/adf0bec9-f1a7-41d3-a4ef-28573981892f-operator-scripts\") pod \"keystone-db-create-dfmsf\" (UID: \"adf0bec9-f1a7-41d3-a4ef-28573981892f\") " pod="openstack-kuttl-tests/keystone-db-create-dfmsf" Jan 20 17:29:23 crc kubenswrapper[4558]: I0120 17:29:23.858337 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m9nmj\" (UniqueName: \"kubernetes.io/projected/adf0bec9-f1a7-41d3-a4ef-28573981892f-kube-api-access-m9nmj\") pod \"keystone-db-create-dfmsf\" (UID: \"adf0bec9-f1a7-41d3-a4ef-28573981892f\") " pod="openstack-kuttl-tests/keystone-db-create-dfmsf" Jan 20 17:29:23 crc kubenswrapper[4558]: I0120 17:29:23.858432 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ed722c75-6f59-4f89-a7fd-c9e2e6c402b3-operator-scripts\") pod \"keystone-2d06-account-create-update-ht2vp\" (UID: \"ed722c75-6f59-4f89-a7fd-c9e2e6c402b3\") " pod="openstack-kuttl-tests/keystone-2d06-account-create-update-ht2vp" Jan 20 17:29:23 crc kubenswrapper[4558]: I0120 17:29:23.858511 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kk9bq\" (UniqueName: \"kubernetes.io/projected/ed722c75-6f59-4f89-a7fd-c9e2e6c402b3-kube-api-access-kk9bq\") pod \"keystone-2d06-account-create-update-ht2vp\" (UID: \"ed722c75-6f59-4f89-a7fd-c9e2e6c402b3\") " pod="openstack-kuttl-tests/keystone-2d06-account-create-update-ht2vp" Jan 20 17:29:23 crc kubenswrapper[4558]: I0120 17:29:23.859508 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/adf0bec9-f1a7-41d3-a4ef-28573981892f-operator-scripts\") pod \"keystone-db-create-dfmsf\" (UID: \"adf0bec9-f1a7-41d3-a4ef-28573981892f\") " pod="openstack-kuttl-tests/keystone-db-create-dfmsf" Jan 20 17:29:23 crc kubenswrapper[4558]: I0120 17:29:23.880502 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m9nmj\" (UniqueName: \"kubernetes.io/projected/adf0bec9-f1a7-41d3-a4ef-28573981892f-kube-api-access-m9nmj\") pod \"keystone-db-create-dfmsf\" (UID: \"adf0bec9-f1a7-41d3-a4ef-28573981892f\") " pod="openstack-kuttl-tests/keystone-db-create-dfmsf" Jan 20 17:29:23 crc kubenswrapper[4558]: I0120 17:29:23.960234 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kk9bq\" (UniqueName: \"kubernetes.io/projected/ed722c75-6f59-4f89-a7fd-c9e2e6c402b3-kube-api-access-kk9bq\") pod \"keystone-2d06-account-create-update-ht2vp\" (UID: \"ed722c75-6f59-4f89-a7fd-c9e2e6c402b3\") " pod="openstack-kuttl-tests/keystone-2d06-account-create-update-ht2vp" Jan 20 17:29:23 crc kubenswrapper[4558]: I0120 17:29:23.960416 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ed722c75-6f59-4f89-a7fd-c9e2e6c402b3-operator-scripts\") pod \"keystone-2d06-account-create-update-ht2vp\" (UID: \"ed722c75-6f59-4f89-a7fd-c9e2e6c402b3\") " pod="openstack-kuttl-tests/keystone-2d06-account-create-update-ht2vp" Jan 20 17:29:23 crc kubenswrapper[4558]: I0120 17:29:23.961076 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ed722c75-6f59-4f89-a7fd-c9e2e6c402b3-operator-scripts\") pod \"keystone-2d06-account-create-update-ht2vp\" (UID: \"ed722c75-6f59-4f89-a7fd-c9e2e6c402b3\") " pod="openstack-kuttl-tests/keystone-2d06-account-create-update-ht2vp" Jan 20 17:29:23 crc kubenswrapper[4558]: I0120 17:29:23.975789 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kk9bq\" (UniqueName: \"kubernetes.io/projected/ed722c75-6f59-4f89-a7fd-c9e2e6c402b3-kube-api-access-kk9bq\") pod \"keystone-2d06-account-create-update-ht2vp\" (UID: \"ed722c75-6f59-4f89-a7fd-c9e2e6c402b3\") " pod="openstack-kuttl-tests/keystone-2d06-account-create-update-ht2vp" Jan 20 17:29:23 crc kubenswrapper[4558]: I0120 17:29:23.985683 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-db-create-dfmsf" Jan 20 17:29:24 crc kubenswrapper[4558]: I0120 17:29:24.061732 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/8c74a8f5-be12-4ac4-b45c-d459801927ea-etc-swift\") pod \"swift-storage-0\" (UID: \"8c74a8f5-be12-4ac4-b45c-d459801927ea\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:29:24 crc kubenswrapper[4558]: E0120 17:29:24.062594 4558 projected.go:288] Couldn't get configMap openstack-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Jan 20 17:29:24 crc kubenswrapper[4558]: E0120 17:29:24.062617 4558 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack-kuttl-tests/swift-storage-0: configmap "swift-ring-files" not found Jan 20 17:29:24 crc kubenswrapper[4558]: E0120 17:29:24.062660 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/8c74a8f5-be12-4ac4-b45c-d459801927ea-etc-swift podName:8c74a8f5-be12-4ac4-b45c-d459801927ea nodeName:}" failed. No retries permitted until 2026-01-20 17:29:32.062647244 +0000 UTC m=+2865.822985211 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/8c74a8f5-be12-4ac4-b45c-d459801927ea-etc-swift") pod "swift-storage-0" (UID: "8c74a8f5-be12-4ac4-b45c-d459801927ea") : configmap "swift-ring-files" not found Jan 20 17:29:24 crc kubenswrapper[4558]: I0120 17:29:24.096100 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-2d06-account-create-update-ht2vp" Jan 20 17:29:24 crc kubenswrapper[4558]: I0120 17:29:24.102950 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/placement-db-create-b82kr"] Jan 20 17:29:24 crc kubenswrapper[4558]: I0120 17:29:24.105730 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-db-create-b82kr" Jan 20 17:29:24 crc kubenswrapper[4558]: I0120 17:29:24.115968 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-db-create-b82kr"] Jan 20 17:29:24 crc kubenswrapper[4558]: I0120 17:29:24.127961 4558 generic.go:334] "Generic (PLEG): container finished" podID="854a193a-de08-44cb-967f-079392fa6680" containerID="6a2e345da9c0d3bd2685dcab70d32403d6708298dcd5c04143e21d6e1aaaf4d5" exitCode=0 Jan 20 17:29:24 crc kubenswrapper[4558]: I0120 17:29:24.128005 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/root-account-create-update-w6cqr" event={"ID":"854a193a-de08-44cb-967f-079392fa6680","Type":"ContainerDied","Data":"6a2e345da9c0d3bd2685dcab70d32403d6708298dcd5c04143e21d6e1aaaf4d5"} Jan 20 17:29:24 crc kubenswrapper[4558]: I0120 17:29:24.128037 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/root-account-create-update-w6cqr" event={"ID":"854a193a-de08-44cb-967f-079392fa6680","Type":"ContainerStarted","Data":"a835e5d8972d1be17133a9a8c2182b4850ce3cca797b34c6dda193bf8204f49e"} Jan 20 17:29:24 crc kubenswrapper[4558]: I0120 17:29:24.163038 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dnpth\" (UniqueName: \"kubernetes.io/projected/eb025a4d-55a2-4b4d-becb-6250cdd0055b-kube-api-access-dnpth\") pod \"placement-db-create-b82kr\" (UID: \"eb025a4d-55a2-4b4d-becb-6250cdd0055b\") " pod="openstack-kuttl-tests/placement-db-create-b82kr" Jan 20 17:29:24 crc kubenswrapper[4558]: I0120 17:29:24.163101 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/eb025a4d-55a2-4b4d-becb-6250cdd0055b-operator-scripts\") pod \"placement-db-create-b82kr\" (UID: \"eb025a4d-55a2-4b4d-becb-6250cdd0055b\") " pod="openstack-kuttl-tests/placement-db-create-b82kr" Jan 20 17:29:24 crc kubenswrapper[4558]: I0120 17:29:24.212475 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/placement-61de-account-create-update-467z8"] Jan 20 17:29:24 crc kubenswrapper[4558]: I0120 17:29:24.213905 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-61de-account-create-update-467z8" Jan 20 17:29:24 crc kubenswrapper[4558]: I0120 17:29:24.216887 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"placement-db-secret" Jan 20 17:29:24 crc kubenswrapper[4558]: I0120 17:29:24.232273 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-61de-account-create-update-467z8"] Jan 20 17:29:24 crc kubenswrapper[4558]: I0120 17:29:24.264994 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5klm6\" (UniqueName: \"kubernetes.io/projected/6c4c8564-f383-46e0-8bc9-d7b68aaa5bcc-kube-api-access-5klm6\") pod \"placement-61de-account-create-update-467z8\" (UID: \"6c4c8564-f383-46e0-8bc9-d7b68aaa5bcc\") " pod="openstack-kuttl-tests/placement-61de-account-create-update-467z8" Jan 20 17:29:24 crc kubenswrapper[4558]: I0120 17:29:24.265141 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dnpth\" (UniqueName: \"kubernetes.io/projected/eb025a4d-55a2-4b4d-becb-6250cdd0055b-kube-api-access-dnpth\") pod \"placement-db-create-b82kr\" (UID: \"eb025a4d-55a2-4b4d-becb-6250cdd0055b\") " pod="openstack-kuttl-tests/placement-db-create-b82kr" Jan 20 17:29:24 crc kubenswrapper[4558]: I0120 17:29:24.265251 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/eb025a4d-55a2-4b4d-becb-6250cdd0055b-operator-scripts\") pod \"placement-db-create-b82kr\" (UID: \"eb025a4d-55a2-4b4d-becb-6250cdd0055b\") " pod="openstack-kuttl-tests/placement-db-create-b82kr" Jan 20 17:29:24 crc kubenswrapper[4558]: I0120 17:29:24.265290 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6c4c8564-f383-46e0-8bc9-d7b68aaa5bcc-operator-scripts\") pod \"placement-61de-account-create-update-467z8\" (UID: \"6c4c8564-f383-46e0-8bc9-d7b68aaa5bcc\") " pod="openstack-kuttl-tests/placement-61de-account-create-update-467z8" Jan 20 17:29:24 crc kubenswrapper[4558]: I0120 17:29:24.266186 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/eb025a4d-55a2-4b4d-becb-6250cdd0055b-operator-scripts\") pod \"placement-db-create-b82kr\" (UID: \"eb025a4d-55a2-4b4d-becb-6250cdd0055b\") " pod="openstack-kuttl-tests/placement-db-create-b82kr" Jan 20 17:29:24 crc kubenswrapper[4558]: I0120 17:29:24.282274 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dnpth\" (UniqueName: \"kubernetes.io/projected/eb025a4d-55a2-4b4d-becb-6250cdd0055b-kube-api-access-dnpth\") pod \"placement-db-create-b82kr\" (UID: \"eb025a4d-55a2-4b4d-becb-6250cdd0055b\") " pod="openstack-kuttl-tests/placement-db-create-b82kr" Jan 20 17:29:24 crc kubenswrapper[4558]: I0120 17:29:24.366680 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6c4c8564-f383-46e0-8bc9-d7b68aaa5bcc-operator-scripts\") pod \"placement-61de-account-create-update-467z8\" (UID: \"6c4c8564-f383-46e0-8bc9-d7b68aaa5bcc\") " pod="openstack-kuttl-tests/placement-61de-account-create-update-467z8" Jan 20 17:29:24 crc kubenswrapper[4558]: I0120 17:29:24.366774 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5klm6\" (UniqueName: \"kubernetes.io/projected/6c4c8564-f383-46e0-8bc9-d7b68aaa5bcc-kube-api-access-5klm6\") pod \"placement-61de-account-create-update-467z8\" (UID: \"6c4c8564-f383-46e0-8bc9-d7b68aaa5bcc\") " pod="openstack-kuttl-tests/placement-61de-account-create-update-467z8" Jan 20 17:29:24 crc kubenswrapper[4558]: I0120 17:29:24.367600 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6c4c8564-f383-46e0-8bc9-d7b68aaa5bcc-operator-scripts\") pod \"placement-61de-account-create-update-467z8\" (UID: \"6c4c8564-f383-46e0-8bc9-d7b68aaa5bcc\") " pod="openstack-kuttl-tests/placement-61de-account-create-update-467z8" Jan 20 17:29:24 crc kubenswrapper[4558]: I0120 17:29:24.390046 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5klm6\" (UniqueName: \"kubernetes.io/projected/6c4c8564-f383-46e0-8bc9-d7b68aaa5bcc-kube-api-access-5klm6\") pod \"placement-61de-account-create-update-467z8\" (UID: \"6c4c8564-f383-46e0-8bc9-d7b68aaa5bcc\") " pod="openstack-kuttl-tests/placement-61de-account-create-update-467z8" Jan 20 17:29:24 crc kubenswrapper[4558]: I0120 17:29:24.419833 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-db-create-dfmsf"] Jan 20 17:29:24 crc kubenswrapper[4558]: W0120 17:29:24.424914 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podadf0bec9_f1a7_41d3_a4ef_28573981892f.slice/crio-5feb49fd4ece989adf865c405a855b52a9db4528b64f4a3082b761cd18ec83e3 WatchSource:0}: Error finding container 5feb49fd4ece989adf865c405a855b52a9db4528b64f4a3082b761cd18ec83e3: Status 404 returned error can't find the container with id 5feb49fd4ece989adf865c405a855b52a9db4528b64f4a3082b761cd18ec83e3 Jan 20 17:29:24 crc kubenswrapper[4558]: I0120 17:29:24.462865 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-db-create-b82kr" Jan 20 17:29:24 crc kubenswrapper[4558]: I0120 17:29:24.522550 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-2d06-account-create-update-ht2vp"] Jan 20 17:29:24 crc kubenswrapper[4558]: W0120 17:29:24.529996 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poded722c75_6f59_4f89_a7fd_c9e2e6c402b3.slice/crio-b6044f358cdff7a9239b2e0cb826d4acc89d701a50bed7c56690ca0925ab5b0e WatchSource:0}: Error finding container b6044f358cdff7a9239b2e0cb826d4acc89d701a50bed7c56690ca0925ab5b0e: Status 404 returned error can't find the container with id b6044f358cdff7a9239b2e0cb826d4acc89d701a50bed7c56690ca0925ab5b0e Jan 20 17:29:24 crc kubenswrapper[4558]: I0120 17:29:24.537673 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-61de-account-create-update-467z8" Jan 20 17:29:24 crc kubenswrapper[4558]: I0120 17:29:24.711709 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/glance-db-sync-cmhmv"] Jan 20 17:29:24 crc kubenswrapper[4558]: I0120 17:29:24.713103 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-db-sync-cmhmv" Jan 20 17:29:24 crc kubenswrapper[4558]: I0120 17:29:24.716153 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-config-data" Jan 20 17:29:24 crc kubenswrapper[4558]: I0120 17:29:24.716649 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-glance-dockercfg-bsssw" Jan 20 17:29:24 crc kubenswrapper[4558]: I0120 17:29:24.722420 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-db-sync-cmhmv"] Jan 20 17:29:24 crc kubenswrapper[4558]: I0120 17:29:24.775781 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/40b11e15-3f6e-4fb0-9a1f-e4f145f9c5b9-db-sync-config-data\") pod \"glance-db-sync-cmhmv\" (UID: \"40b11e15-3f6e-4fb0-9a1f-e4f145f9c5b9\") " pod="openstack-kuttl-tests/glance-db-sync-cmhmv" Jan 20 17:29:24 crc kubenswrapper[4558]: I0120 17:29:24.776220 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40b11e15-3f6e-4fb0-9a1f-e4f145f9c5b9-combined-ca-bundle\") pod \"glance-db-sync-cmhmv\" (UID: \"40b11e15-3f6e-4fb0-9a1f-e4f145f9c5b9\") " pod="openstack-kuttl-tests/glance-db-sync-cmhmv" Jan 20 17:29:24 crc kubenswrapper[4558]: I0120 17:29:24.776316 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/40b11e15-3f6e-4fb0-9a1f-e4f145f9c5b9-config-data\") pod \"glance-db-sync-cmhmv\" (UID: \"40b11e15-3f6e-4fb0-9a1f-e4f145f9c5b9\") " pod="openstack-kuttl-tests/glance-db-sync-cmhmv" Jan 20 17:29:24 crc kubenswrapper[4558]: I0120 17:29:24.776352 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qgqqt\" (UniqueName: \"kubernetes.io/projected/40b11e15-3f6e-4fb0-9a1f-e4f145f9c5b9-kube-api-access-qgqqt\") pod \"glance-db-sync-cmhmv\" (UID: \"40b11e15-3f6e-4fb0-9a1f-e4f145f9c5b9\") " pod="openstack-kuttl-tests/glance-db-sync-cmhmv" Jan 20 17:29:24 crc kubenswrapper[4558]: I0120 17:29:24.879017 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/40b11e15-3f6e-4fb0-9a1f-e4f145f9c5b9-config-data\") pod \"glance-db-sync-cmhmv\" (UID: \"40b11e15-3f6e-4fb0-9a1f-e4f145f9c5b9\") " pod="openstack-kuttl-tests/glance-db-sync-cmhmv" Jan 20 17:29:24 crc kubenswrapper[4558]: I0120 17:29:24.879130 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qgqqt\" (UniqueName: \"kubernetes.io/projected/40b11e15-3f6e-4fb0-9a1f-e4f145f9c5b9-kube-api-access-qgqqt\") pod \"glance-db-sync-cmhmv\" (UID: \"40b11e15-3f6e-4fb0-9a1f-e4f145f9c5b9\") " pod="openstack-kuttl-tests/glance-db-sync-cmhmv" Jan 20 17:29:24 crc kubenswrapper[4558]: I0120 17:29:24.879406 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/40b11e15-3f6e-4fb0-9a1f-e4f145f9c5b9-db-sync-config-data\") pod \"glance-db-sync-cmhmv\" (UID: \"40b11e15-3f6e-4fb0-9a1f-e4f145f9c5b9\") " pod="openstack-kuttl-tests/glance-db-sync-cmhmv" Jan 20 17:29:24 crc kubenswrapper[4558]: I0120 17:29:24.879622 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40b11e15-3f6e-4fb0-9a1f-e4f145f9c5b9-combined-ca-bundle\") pod \"glance-db-sync-cmhmv\" (UID: \"40b11e15-3f6e-4fb0-9a1f-e4f145f9c5b9\") " pod="openstack-kuttl-tests/glance-db-sync-cmhmv" Jan 20 17:29:24 crc kubenswrapper[4558]: I0120 17:29:24.880850 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-db-create-b82kr"] Jan 20 17:29:24 crc kubenswrapper[4558]: I0120 17:29:24.887944 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/40b11e15-3f6e-4fb0-9a1f-e4f145f9c5b9-db-sync-config-data\") pod \"glance-db-sync-cmhmv\" (UID: \"40b11e15-3f6e-4fb0-9a1f-e4f145f9c5b9\") " pod="openstack-kuttl-tests/glance-db-sync-cmhmv" Jan 20 17:29:24 crc kubenswrapper[4558]: I0120 17:29:24.888054 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/40b11e15-3f6e-4fb0-9a1f-e4f145f9c5b9-config-data\") pod \"glance-db-sync-cmhmv\" (UID: \"40b11e15-3f6e-4fb0-9a1f-e4f145f9c5b9\") " pod="openstack-kuttl-tests/glance-db-sync-cmhmv" Jan 20 17:29:24 crc kubenswrapper[4558]: I0120 17:29:24.888181 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40b11e15-3f6e-4fb0-9a1f-e4f145f9c5b9-combined-ca-bundle\") pod \"glance-db-sync-cmhmv\" (UID: \"40b11e15-3f6e-4fb0-9a1f-e4f145f9c5b9\") " pod="openstack-kuttl-tests/glance-db-sync-cmhmv" Jan 20 17:29:24 crc kubenswrapper[4558]: I0120 17:29:24.894107 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qgqqt\" (UniqueName: \"kubernetes.io/projected/40b11e15-3f6e-4fb0-9a1f-e4f145f9c5b9-kube-api-access-qgqqt\") pod \"glance-db-sync-cmhmv\" (UID: \"40b11e15-3f6e-4fb0-9a1f-e4f145f9c5b9\") " pod="openstack-kuttl-tests/glance-db-sync-cmhmv" Jan 20 17:29:24 crc kubenswrapper[4558]: I0120 17:29:24.982427 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-61de-account-create-update-467z8"] Jan 20 17:29:24 crc kubenswrapper[4558]: W0120 17:29:24.983138 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6c4c8564_f383_46e0_8bc9_d7b68aaa5bcc.slice/crio-29376bf96e8bf45d274c626105fde24404c3b607d48df6e806d134558b5bd500 WatchSource:0}: Error finding container 29376bf96e8bf45d274c626105fde24404c3b607d48df6e806d134558b5bd500: Status 404 returned error can't find the container with id 29376bf96e8bf45d274c626105fde24404c3b607d48df6e806d134558b5bd500 Jan 20 17:29:25 crc kubenswrapper[4558]: I0120 17:29:25.033081 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-db-sync-cmhmv" Jan 20 17:29:25 crc kubenswrapper[4558]: I0120 17:29:25.139956 4558 generic.go:334] "Generic (PLEG): container finished" podID="ed722c75-6f59-4f89-a7fd-c9e2e6c402b3" containerID="a8e3fc9bf9c4e7346570c5f357e43cdd9269a735e12bf5a5218a0b8161bcbfa1" exitCode=0 Jan 20 17:29:25 crc kubenswrapper[4558]: I0120 17:29:25.140032 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-2d06-account-create-update-ht2vp" event={"ID":"ed722c75-6f59-4f89-a7fd-c9e2e6c402b3","Type":"ContainerDied","Data":"a8e3fc9bf9c4e7346570c5f357e43cdd9269a735e12bf5a5218a0b8161bcbfa1"} Jan 20 17:29:25 crc kubenswrapper[4558]: I0120 17:29:25.140445 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-2d06-account-create-update-ht2vp" event={"ID":"ed722c75-6f59-4f89-a7fd-c9e2e6c402b3","Type":"ContainerStarted","Data":"b6044f358cdff7a9239b2e0cb826d4acc89d701a50bed7c56690ca0925ab5b0e"} Jan 20 17:29:25 crc kubenswrapper[4558]: I0120 17:29:25.145941 4558 generic.go:334] "Generic (PLEG): container finished" podID="adf0bec9-f1a7-41d3-a4ef-28573981892f" containerID="e6dbfcfe37cc1fc1b51a5b0ceb80a37987d1128a5d6564f93754a47973a5a2d5" exitCode=0 Jan 20 17:29:25 crc kubenswrapper[4558]: I0120 17:29:25.145996 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-db-create-dfmsf" event={"ID":"adf0bec9-f1a7-41d3-a4ef-28573981892f","Type":"ContainerDied","Data":"e6dbfcfe37cc1fc1b51a5b0ceb80a37987d1128a5d6564f93754a47973a5a2d5"} Jan 20 17:29:25 crc kubenswrapper[4558]: I0120 17:29:25.146020 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-db-create-dfmsf" event={"ID":"adf0bec9-f1a7-41d3-a4ef-28573981892f","Type":"ContainerStarted","Data":"5feb49fd4ece989adf865c405a855b52a9db4528b64f4a3082b761cd18ec83e3"} Jan 20 17:29:25 crc kubenswrapper[4558]: I0120 17:29:25.150629 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-db-create-b82kr" event={"ID":"eb025a4d-55a2-4b4d-becb-6250cdd0055b","Type":"ContainerStarted","Data":"ed22a8f15040fdb70a4928a92fa242110b06a6c1df1e015b23c8cb0337987b61"} Jan 20 17:29:25 crc kubenswrapper[4558]: I0120 17:29:25.150670 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-db-create-b82kr" event={"ID":"eb025a4d-55a2-4b4d-becb-6250cdd0055b","Type":"ContainerStarted","Data":"b374b534d52c45272b8ab1fe4745658a8f0534c8f5cfa7dcdda2d401d4f56266"} Jan 20 17:29:25 crc kubenswrapper[4558]: I0120 17:29:25.153406 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-61de-account-create-update-467z8" event={"ID":"6c4c8564-f383-46e0-8bc9-d7b68aaa5bcc","Type":"ContainerStarted","Data":"984dea855e8e7fe42eeed165d3a29ee0a673310ad202b53132d6f313fe890e3d"} Jan 20 17:29:25 crc kubenswrapper[4558]: I0120 17:29:25.153445 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-61de-account-create-update-467z8" event={"ID":"6c4c8564-f383-46e0-8bc9-d7b68aaa5bcc","Type":"ContainerStarted","Data":"29376bf96e8bf45d274c626105fde24404c3b607d48df6e806d134558b5bd500"} Jan 20 17:29:25 crc kubenswrapper[4558]: I0120 17:29:25.192713 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/placement-db-create-b82kr" podStartSLOduration=1.1926954730000001 podStartE2EDuration="1.192695473s" podCreationTimestamp="2026-01-20 17:29:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:29:25.185609769 +0000 UTC m=+2858.945947736" watchObservedRunningTime="2026-01-20 17:29:25.192695473 +0000 UTC m=+2858.953033440" Jan 20 17:29:25 crc kubenswrapper[4558]: I0120 17:29:25.208207 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/placement-61de-account-create-update-467z8" podStartSLOduration=1.2081906390000001 podStartE2EDuration="1.208190639s" podCreationTimestamp="2026-01-20 17:29:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:29:25.197925066 +0000 UTC m=+2858.958263023" watchObservedRunningTime="2026-01-20 17:29:25.208190639 +0000 UTC m=+2858.968528605" Jan 20 17:29:25 crc kubenswrapper[4558]: I0120 17:29:25.462612 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-db-sync-cmhmv"] Jan 20 17:29:25 crc kubenswrapper[4558]: I0120 17:29:25.466694 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-w6cqr" Jan 20 17:29:25 crc kubenswrapper[4558]: W0120 17:29:25.472376 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod40b11e15_3f6e_4fb0_9a1f_e4f145f9c5b9.slice/crio-3b3677a3436b6da93bb9635c06bb4a6df14154ccdba374fe4807179331c569a2 WatchSource:0}: Error finding container 3b3677a3436b6da93bb9635c06bb4a6df14154ccdba374fe4807179331c569a2: Status 404 returned error can't find the container with id 3b3677a3436b6da93bb9635c06bb4a6df14154ccdba374fe4807179331c569a2 Jan 20 17:29:25 crc kubenswrapper[4558]: I0120 17:29:25.488633 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v82js\" (UniqueName: \"kubernetes.io/projected/854a193a-de08-44cb-967f-079392fa6680-kube-api-access-v82js\") pod \"854a193a-de08-44cb-967f-079392fa6680\" (UID: \"854a193a-de08-44cb-967f-079392fa6680\") " Jan 20 17:29:25 crc kubenswrapper[4558]: I0120 17:29:25.489116 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/854a193a-de08-44cb-967f-079392fa6680-operator-scripts\") pod \"854a193a-de08-44cb-967f-079392fa6680\" (UID: \"854a193a-de08-44cb-967f-079392fa6680\") " Jan 20 17:29:25 crc kubenswrapper[4558]: I0120 17:29:25.490072 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/854a193a-de08-44cb-967f-079392fa6680-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "854a193a-de08-44cb-967f-079392fa6680" (UID: "854a193a-de08-44cb-967f-079392fa6680"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:29:25 crc kubenswrapper[4558]: I0120 17:29:25.490294 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/854a193a-de08-44cb-967f-079392fa6680-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:29:25 crc kubenswrapper[4558]: I0120 17:29:25.492367 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/854a193a-de08-44cb-967f-079392fa6680-kube-api-access-v82js" (OuterVolumeSpecName: "kube-api-access-v82js") pod "854a193a-de08-44cb-967f-079392fa6680" (UID: "854a193a-de08-44cb-967f-079392fa6680"). InnerVolumeSpecName "kube-api-access-v82js". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:29:25 crc kubenswrapper[4558]: I0120 17:29:25.592451 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v82js\" (UniqueName: \"kubernetes.io/projected/854a193a-de08-44cb-967f-079392fa6680-kube-api-access-v82js\") on node \"crc\" DevicePath \"\"" Jan 20 17:29:26 crc kubenswrapper[4558]: I0120 17:29:26.164966 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/root-account-create-update-w6cqr" event={"ID":"854a193a-de08-44cb-967f-079392fa6680","Type":"ContainerDied","Data":"a835e5d8972d1be17133a9a8c2182b4850ce3cca797b34c6dda193bf8204f49e"} Jan 20 17:29:26 crc kubenswrapper[4558]: I0120 17:29:26.165445 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a835e5d8972d1be17133a9a8c2182b4850ce3cca797b34c6dda193bf8204f49e" Jan 20 17:29:26 crc kubenswrapper[4558]: I0120 17:29:26.165040 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-w6cqr" Jan 20 17:29:26 crc kubenswrapper[4558]: I0120 17:29:26.167044 4558 generic.go:334] "Generic (PLEG): container finished" podID="eb025a4d-55a2-4b4d-becb-6250cdd0055b" containerID="ed22a8f15040fdb70a4928a92fa242110b06a6c1df1e015b23c8cb0337987b61" exitCode=0 Jan 20 17:29:26 crc kubenswrapper[4558]: I0120 17:29:26.167109 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-db-create-b82kr" event={"ID":"eb025a4d-55a2-4b4d-becb-6250cdd0055b","Type":"ContainerDied","Data":"ed22a8f15040fdb70a4928a92fa242110b06a6c1df1e015b23c8cb0337987b61"} Jan 20 17:29:26 crc kubenswrapper[4558]: I0120 17:29:26.169092 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-db-sync-cmhmv" event={"ID":"40b11e15-3f6e-4fb0-9a1f-e4f145f9c5b9","Type":"ContainerStarted","Data":"db8e8828d641c74a4518b290f587aa376715ea2305d752194da19a7d7e76d365"} Jan 20 17:29:26 crc kubenswrapper[4558]: I0120 17:29:26.169138 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-db-sync-cmhmv" event={"ID":"40b11e15-3f6e-4fb0-9a1f-e4f145f9c5b9","Type":"ContainerStarted","Data":"3b3677a3436b6da93bb9635c06bb4a6df14154ccdba374fe4807179331c569a2"} Jan 20 17:29:26 crc kubenswrapper[4558]: I0120 17:29:26.171575 4558 generic.go:334] "Generic (PLEG): container finished" podID="6c4c8564-f383-46e0-8bc9-d7b68aaa5bcc" containerID="984dea855e8e7fe42eeed165d3a29ee0a673310ad202b53132d6f313fe890e3d" exitCode=0 Jan 20 17:29:26 crc kubenswrapper[4558]: I0120 17:29:26.171677 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-61de-account-create-update-467z8" event={"ID":"6c4c8564-f383-46e0-8bc9-d7b68aaa5bcc","Type":"ContainerDied","Data":"984dea855e8e7fe42eeed165d3a29ee0a673310ad202b53132d6f313fe890e3d"} Jan 20 17:29:26 crc kubenswrapper[4558]: I0120 17:29:26.241267 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/glance-db-sync-cmhmv" podStartSLOduration=2.241242734 podStartE2EDuration="2.241242734s" podCreationTimestamp="2026-01-20 17:29:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:29:26.22021212 +0000 UTC m=+2859.980550086" watchObservedRunningTime="2026-01-20 17:29:26.241242734 +0000 UTC m=+2860.001580701" Jan 20 17:29:26 crc kubenswrapper[4558]: I0120 17:29:26.577739 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-2d06-account-create-update-ht2vp" Jan 20 17:29:26 crc kubenswrapper[4558]: I0120 17:29:26.578289 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-db-create-dfmsf" Jan 20 17:29:26 crc kubenswrapper[4558]: I0120 17:29:26.621370 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m9nmj\" (UniqueName: \"kubernetes.io/projected/adf0bec9-f1a7-41d3-a4ef-28573981892f-kube-api-access-m9nmj\") pod \"adf0bec9-f1a7-41d3-a4ef-28573981892f\" (UID: \"adf0bec9-f1a7-41d3-a4ef-28573981892f\") " Jan 20 17:29:26 crc kubenswrapper[4558]: I0120 17:29:26.622105 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kk9bq\" (UniqueName: \"kubernetes.io/projected/ed722c75-6f59-4f89-a7fd-c9e2e6c402b3-kube-api-access-kk9bq\") pod \"ed722c75-6f59-4f89-a7fd-c9e2e6c402b3\" (UID: \"ed722c75-6f59-4f89-a7fd-c9e2e6c402b3\") " Jan 20 17:29:26 crc kubenswrapper[4558]: I0120 17:29:26.622357 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ed722c75-6f59-4f89-a7fd-c9e2e6c402b3-operator-scripts\") pod \"ed722c75-6f59-4f89-a7fd-c9e2e6c402b3\" (UID: \"ed722c75-6f59-4f89-a7fd-c9e2e6c402b3\") " Jan 20 17:29:26 crc kubenswrapper[4558]: I0120 17:29:26.622452 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/adf0bec9-f1a7-41d3-a4ef-28573981892f-operator-scripts\") pod \"adf0bec9-f1a7-41d3-a4ef-28573981892f\" (UID: \"adf0bec9-f1a7-41d3-a4ef-28573981892f\") " Jan 20 17:29:26 crc kubenswrapper[4558]: I0120 17:29:26.624576 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ed722c75-6f59-4f89-a7fd-c9e2e6c402b3-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "ed722c75-6f59-4f89-a7fd-c9e2e6c402b3" (UID: "ed722c75-6f59-4f89-a7fd-c9e2e6c402b3"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:29:26 crc kubenswrapper[4558]: I0120 17:29:26.625379 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/adf0bec9-f1a7-41d3-a4ef-28573981892f-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "adf0bec9-f1a7-41d3-a4ef-28573981892f" (UID: "adf0bec9-f1a7-41d3-a4ef-28573981892f"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:29:26 crc kubenswrapper[4558]: I0120 17:29:26.634993 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/adf0bec9-f1a7-41d3-a4ef-28573981892f-kube-api-access-m9nmj" (OuterVolumeSpecName: "kube-api-access-m9nmj") pod "adf0bec9-f1a7-41d3-a4ef-28573981892f" (UID: "adf0bec9-f1a7-41d3-a4ef-28573981892f"). InnerVolumeSpecName "kube-api-access-m9nmj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:29:26 crc kubenswrapper[4558]: I0120 17:29:26.636011 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ed722c75-6f59-4f89-a7fd-c9e2e6c402b3-kube-api-access-kk9bq" (OuterVolumeSpecName: "kube-api-access-kk9bq") pod "ed722c75-6f59-4f89-a7fd-c9e2e6c402b3" (UID: "ed722c75-6f59-4f89-a7fd-c9e2e6c402b3"). InnerVolumeSpecName "kube-api-access-kk9bq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:29:26 crc kubenswrapper[4558]: I0120 17:29:26.724974 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ed722c75-6f59-4f89-a7fd-c9e2e6c402b3-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:29:26 crc kubenswrapper[4558]: I0120 17:29:26.725011 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/adf0bec9-f1a7-41d3-a4ef-28573981892f-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:29:26 crc kubenswrapper[4558]: I0120 17:29:26.725024 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m9nmj\" (UniqueName: \"kubernetes.io/projected/adf0bec9-f1a7-41d3-a4ef-28573981892f-kube-api-access-m9nmj\") on node \"crc\" DevicePath \"\"" Jan 20 17:29:26 crc kubenswrapper[4558]: I0120 17:29:26.725038 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kk9bq\" (UniqueName: \"kubernetes.io/projected/ed722c75-6f59-4f89-a7fd-c9e2e6c402b3-kube-api-access-kk9bq\") on node \"crc\" DevicePath \"\"" Jan 20 17:29:27 crc kubenswrapper[4558]: I0120 17:29:27.185184 4558 generic.go:334] "Generic (PLEG): container finished" podID="31071f28-1020-4bbd-b122-c1dcf9a67a14" containerID="3a3e340167ccce72bbe411439f7909473c7e135797ac4b0db65fe1584f2e6946" exitCode=0 Jan 20 17:29:27 crc kubenswrapper[4558]: I0120 17:29:27.185204 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-ring-rebalance-zzttm" event={"ID":"31071f28-1020-4bbd-b122-c1dcf9a67a14","Type":"ContainerDied","Data":"3a3e340167ccce72bbe411439f7909473c7e135797ac4b0db65fe1584f2e6946"} Jan 20 17:29:27 crc kubenswrapper[4558]: I0120 17:29:27.188498 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-2d06-account-create-update-ht2vp" event={"ID":"ed722c75-6f59-4f89-a7fd-c9e2e6c402b3","Type":"ContainerDied","Data":"b6044f358cdff7a9239b2e0cb826d4acc89d701a50bed7c56690ca0925ab5b0e"} Jan 20 17:29:27 crc kubenswrapper[4558]: I0120 17:29:27.188566 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b6044f358cdff7a9239b2e0cb826d4acc89d701a50bed7c56690ca0925ab5b0e" Jan 20 17:29:27 crc kubenswrapper[4558]: I0120 17:29:27.188621 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-2d06-account-create-update-ht2vp" Jan 20 17:29:27 crc kubenswrapper[4558]: I0120 17:29:27.190484 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-db-create-dfmsf" Jan 20 17:29:27 crc kubenswrapper[4558]: I0120 17:29:27.190517 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-db-create-dfmsf" event={"ID":"adf0bec9-f1a7-41d3-a4ef-28573981892f","Type":"ContainerDied","Data":"5feb49fd4ece989adf865c405a855b52a9db4528b64f4a3082b761cd18ec83e3"} Jan 20 17:29:27 crc kubenswrapper[4558]: I0120 17:29:27.190573 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5feb49fd4ece989adf865c405a855b52a9db4528b64f4a3082b761cd18ec83e3" Jan 20 17:29:27 crc kubenswrapper[4558]: I0120 17:29:27.330039 4558 patch_prober.go:28] interesting pod/machine-config-daemon-2vr4r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 20 17:29:27 crc kubenswrapper[4558]: I0120 17:29:27.330152 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 20 17:29:27 crc kubenswrapper[4558]: I0120 17:29:27.330527 4558 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" Jan 20 17:29:27 crc kubenswrapper[4558]: I0120 17:29:27.331057 4558 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"f275e9f5de330b3c79316d5871106c6bcf90b698e0744c5beb28da45c336b36d"} pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 20 17:29:27 crc kubenswrapper[4558]: I0120 17:29:27.331125 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" containerID="cri-o://f275e9f5de330b3c79316d5871106c6bcf90b698e0744c5beb28da45c336b36d" gracePeriod=600 Jan 20 17:29:27 crc kubenswrapper[4558]: E0120 17:29:27.452444 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 17:29:27 crc kubenswrapper[4558]: I0120 17:29:27.568534 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-61de-account-create-update-467z8" Jan 20 17:29:27 crc kubenswrapper[4558]: I0120 17:29:27.573952 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-db-create-b82kr" Jan 20 17:29:27 crc kubenswrapper[4558]: I0120 17:29:27.639717 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6c4c8564-f383-46e0-8bc9-d7b68aaa5bcc-operator-scripts\") pod \"6c4c8564-f383-46e0-8bc9-d7b68aaa5bcc\" (UID: \"6c4c8564-f383-46e0-8bc9-d7b68aaa5bcc\") " Jan 20 17:29:27 crc kubenswrapper[4558]: I0120 17:29:27.639851 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/eb025a4d-55a2-4b4d-becb-6250cdd0055b-operator-scripts\") pod \"eb025a4d-55a2-4b4d-becb-6250cdd0055b\" (UID: \"eb025a4d-55a2-4b4d-becb-6250cdd0055b\") " Jan 20 17:29:27 crc kubenswrapper[4558]: I0120 17:29:27.639880 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dnpth\" (UniqueName: \"kubernetes.io/projected/eb025a4d-55a2-4b4d-becb-6250cdd0055b-kube-api-access-dnpth\") pod \"eb025a4d-55a2-4b4d-becb-6250cdd0055b\" (UID: \"eb025a4d-55a2-4b4d-becb-6250cdd0055b\") " Jan 20 17:29:27 crc kubenswrapper[4558]: I0120 17:29:27.640043 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5klm6\" (UniqueName: \"kubernetes.io/projected/6c4c8564-f383-46e0-8bc9-d7b68aaa5bcc-kube-api-access-5klm6\") pod \"6c4c8564-f383-46e0-8bc9-d7b68aaa5bcc\" (UID: \"6c4c8564-f383-46e0-8bc9-d7b68aaa5bcc\") " Jan 20 17:29:27 crc kubenswrapper[4558]: I0120 17:29:27.640723 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/eb025a4d-55a2-4b4d-becb-6250cdd0055b-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "eb025a4d-55a2-4b4d-becb-6250cdd0055b" (UID: "eb025a4d-55a2-4b4d-becb-6250cdd0055b"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:29:27 crc kubenswrapper[4558]: I0120 17:29:27.640733 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6c4c8564-f383-46e0-8bc9-d7b68aaa5bcc-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "6c4c8564-f383-46e0-8bc9-d7b68aaa5bcc" (UID: "6c4c8564-f383-46e0-8bc9-d7b68aaa5bcc"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:29:27 crc kubenswrapper[4558]: I0120 17:29:27.642342 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/eb025a4d-55a2-4b4d-becb-6250cdd0055b-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:29:27 crc kubenswrapper[4558]: I0120 17:29:27.642368 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6c4c8564-f383-46e0-8bc9-d7b68aaa5bcc-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:29:27 crc kubenswrapper[4558]: I0120 17:29:27.645454 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6c4c8564-f383-46e0-8bc9-d7b68aaa5bcc-kube-api-access-5klm6" (OuterVolumeSpecName: "kube-api-access-5klm6") pod "6c4c8564-f383-46e0-8bc9-d7b68aaa5bcc" (UID: "6c4c8564-f383-46e0-8bc9-d7b68aaa5bcc"). InnerVolumeSpecName "kube-api-access-5klm6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:29:27 crc kubenswrapper[4558]: I0120 17:29:27.645862 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eb025a4d-55a2-4b4d-becb-6250cdd0055b-kube-api-access-dnpth" (OuterVolumeSpecName: "kube-api-access-dnpth") pod "eb025a4d-55a2-4b4d-becb-6250cdd0055b" (UID: "eb025a4d-55a2-4b4d-becb-6250cdd0055b"). InnerVolumeSpecName "kube-api-access-dnpth". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:29:27 crc kubenswrapper[4558]: I0120 17:29:27.744412 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dnpth\" (UniqueName: \"kubernetes.io/projected/eb025a4d-55a2-4b4d-becb-6250cdd0055b-kube-api-access-dnpth\") on node \"crc\" DevicePath \"\"" Jan 20 17:29:27 crc kubenswrapper[4558]: I0120 17:29:27.744454 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5klm6\" (UniqueName: \"kubernetes.io/projected/6c4c8564-f383-46e0-8bc9-d7b68aaa5bcc-kube-api-access-5klm6\") on node \"crc\" DevicePath \"\"" Jan 20 17:29:28 crc kubenswrapper[4558]: I0120 17:29:28.219186 4558 generic.go:334] "Generic (PLEG): container finished" podID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerID="f275e9f5de330b3c79316d5871106c6bcf90b698e0744c5beb28da45c336b36d" exitCode=0 Jan 20 17:29:28 crc kubenswrapper[4558]: I0120 17:29:28.219246 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" event={"ID":"68337d27-3fa6-4a29-88b0-82e60c3739eb","Type":"ContainerDied","Data":"f275e9f5de330b3c79316d5871106c6bcf90b698e0744c5beb28da45c336b36d"} Jan 20 17:29:28 crc kubenswrapper[4558]: I0120 17:29:28.220529 4558 scope.go:117] "RemoveContainer" containerID="2d144cfe8b47aa78680f1723838f69b34b4c8966312c6a5804d87ce86bc40c18" Jan 20 17:29:28 crc kubenswrapper[4558]: I0120 17:29:28.223534 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-db-create-b82kr" Jan 20 17:29:28 crc kubenswrapper[4558]: I0120 17:29:28.225576 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-61de-account-create-update-467z8" Jan 20 17:29:28 crc kubenswrapper[4558]: I0120 17:29:28.225629 4558 scope.go:117] "RemoveContainer" containerID="f275e9f5de330b3c79316d5871106c6bcf90b698e0744c5beb28da45c336b36d" Jan 20 17:29:28 crc kubenswrapper[4558]: I0120 17:29:28.223555 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-db-create-b82kr" event={"ID":"eb025a4d-55a2-4b4d-becb-6250cdd0055b","Type":"ContainerDied","Data":"b374b534d52c45272b8ab1fe4745658a8f0534c8f5cfa7dcdda2d401d4f56266"} Jan 20 17:29:28 crc kubenswrapper[4558]: I0120 17:29:28.225691 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b374b534d52c45272b8ab1fe4745658a8f0534c8f5cfa7dcdda2d401d4f56266" Jan 20 17:29:28 crc kubenswrapper[4558]: I0120 17:29:28.225709 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-61de-account-create-update-467z8" event={"ID":"6c4c8564-f383-46e0-8bc9-d7b68aaa5bcc","Type":"ContainerDied","Data":"29376bf96e8bf45d274c626105fde24404c3b607d48df6e806d134558b5bd500"} Jan 20 17:29:28 crc kubenswrapper[4558]: I0120 17:29:28.225728 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="29376bf96e8bf45d274c626105fde24404c3b607d48df6e806d134558b5bd500" Jan 20 17:29:28 crc kubenswrapper[4558]: E0120 17:29:28.226302 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 17:29:28 crc kubenswrapper[4558]: I0120 17:29:28.531294 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-ring-rebalance-zzttm" Jan 20 17:29:28 crc kubenswrapper[4558]: I0120 17:29:28.556280 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l874w\" (UniqueName: \"kubernetes.io/projected/31071f28-1020-4bbd-b122-c1dcf9a67a14-kube-api-access-l874w\") pod \"31071f28-1020-4bbd-b122-c1dcf9a67a14\" (UID: \"31071f28-1020-4bbd-b122-c1dcf9a67a14\") " Jan 20 17:29:28 crc kubenswrapper[4558]: I0120 17:29:28.556376 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/31071f28-1020-4bbd-b122-c1dcf9a67a14-scripts\") pod \"31071f28-1020-4bbd-b122-c1dcf9a67a14\" (UID: \"31071f28-1020-4bbd-b122-c1dcf9a67a14\") " Jan 20 17:29:28 crc kubenswrapper[4558]: I0120 17:29:28.556433 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/31071f28-1020-4bbd-b122-c1dcf9a67a14-combined-ca-bundle\") pod \"31071f28-1020-4bbd-b122-c1dcf9a67a14\" (UID: \"31071f28-1020-4bbd-b122-c1dcf9a67a14\") " Jan 20 17:29:28 crc kubenswrapper[4558]: I0120 17:29:28.556483 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/31071f28-1020-4bbd-b122-c1dcf9a67a14-ring-data-devices\") pod \"31071f28-1020-4bbd-b122-c1dcf9a67a14\" (UID: \"31071f28-1020-4bbd-b122-c1dcf9a67a14\") " Jan 20 17:29:28 crc kubenswrapper[4558]: I0120 17:29:28.556596 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/31071f28-1020-4bbd-b122-c1dcf9a67a14-dispersionconf\") pod \"31071f28-1020-4bbd-b122-c1dcf9a67a14\" (UID: \"31071f28-1020-4bbd-b122-c1dcf9a67a14\") " Jan 20 17:29:28 crc kubenswrapper[4558]: I0120 17:29:28.556704 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/31071f28-1020-4bbd-b122-c1dcf9a67a14-etc-swift\") pod \"31071f28-1020-4bbd-b122-c1dcf9a67a14\" (UID: \"31071f28-1020-4bbd-b122-c1dcf9a67a14\") " Jan 20 17:29:28 crc kubenswrapper[4558]: I0120 17:29:28.556730 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/31071f28-1020-4bbd-b122-c1dcf9a67a14-swiftconf\") pod \"31071f28-1020-4bbd-b122-c1dcf9a67a14\" (UID: \"31071f28-1020-4bbd-b122-c1dcf9a67a14\") " Jan 20 17:29:28 crc kubenswrapper[4558]: I0120 17:29:28.558663 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31071f28-1020-4bbd-b122-c1dcf9a67a14-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "31071f28-1020-4bbd-b122-c1dcf9a67a14" (UID: "31071f28-1020-4bbd-b122-c1dcf9a67a14"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:29:28 crc kubenswrapper[4558]: I0120 17:29:28.561021 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/31071f28-1020-4bbd-b122-c1dcf9a67a14-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "31071f28-1020-4bbd-b122-c1dcf9a67a14" (UID: "31071f28-1020-4bbd-b122-c1dcf9a67a14"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:29:28 crc kubenswrapper[4558]: I0120 17:29:28.561078 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31071f28-1020-4bbd-b122-c1dcf9a67a14-kube-api-access-l874w" (OuterVolumeSpecName: "kube-api-access-l874w") pod "31071f28-1020-4bbd-b122-c1dcf9a67a14" (UID: "31071f28-1020-4bbd-b122-c1dcf9a67a14"). InnerVolumeSpecName "kube-api-access-l874w". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:29:28 crc kubenswrapper[4558]: I0120 17:29:28.571312 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31071f28-1020-4bbd-b122-c1dcf9a67a14-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "31071f28-1020-4bbd-b122-c1dcf9a67a14" (UID: "31071f28-1020-4bbd-b122-c1dcf9a67a14"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:29:28 crc kubenswrapper[4558]: I0120 17:29:28.578269 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31071f28-1020-4bbd-b122-c1dcf9a67a14-scripts" (OuterVolumeSpecName: "scripts") pod "31071f28-1020-4bbd-b122-c1dcf9a67a14" (UID: "31071f28-1020-4bbd-b122-c1dcf9a67a14"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:29:28 crc kubenswrapper[4558]: I0120 17:29:28.582578 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31071f28-1020-4bbd-b122-c1dcf9a67a14-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "31071f28-1020-4bbd-b122-c1dcf9a67a14" (UID: "31071f28-1020-4bbd-b122-c1dcf9a67a14"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:29:28 crc kubenswrapper[4558]: I0120 17:29:28.584289 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31071f28-1020-4bbd-b122-c1dcf9a67a14-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "31071f28-1020-4bbd-b122-c1dcf9a67a14" (UID: "31071f28-1020-4bbd-b122-c1dcf9a67a14"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:29:28 crc kubenswrapper[4558]: I0120 17:29:28.659328 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l874w\" (UniqueName: \"kubernetes.io/projected/31071f28-1020-4bbd-b122-c1dcf9a67a14-kube-api-access-l874w\") on node \"crc\" DevicePath \"\"" Jan 20 17:29:28 crc kubenswrapper[4558]: I0120 17:29:28.659366 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/31071f28-1020-4bbd-b122-c1dcf9a67a14-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:29:28 crc kubenswrapper[4558]: I0120 17:29:28.659586 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/31071f28-1020-4bbd-b122-c1dcf9a67a14-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:29:28 crc kubenswrapper[4558]: I0120 17:29:28.659597 4558 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/31071f28-1020-4bbd-b122-c1dcf9a67a14-ring-data-devices\") on node \"crc\" DevicePath \"\"" Jan 20 17:29:28 crc kubenswrapper[4558]: I0120 17:29:28.659607 4558 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/31071f28-1020-4bbd-b122-c1dcf9a67a14-dispersionconf\") on node \"crc\" DevicePath \"\"" Jan 20 17:29:28 crc kubenswrapper[4558]: I0120 17:29:28.659617 4558 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/31071f28-1020-4bbd-b122-c1dcf9a67a14-etc-swift\") on node \"crc\" DevicePath \"\"" Jan 20 17:29:28 crc kubenswrapper[4558]: I0120 17:29:28.659629 4558 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/31071f28-1020-4bbd-b122-c1dcf9a67a14-swiftconf\") on node \"crc\" DevicePath \"\"" Jan 20 17:29:29 crc kubenswrapper[4558]: I0120 17:29:29.077202 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-w6cqr"] Jan 20 17:29:29 crc kubenswrapper[4558]: I0120 17:29:29.086412 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-w6cqr"] Jan 20 17:29:29 crc kubenswrapper[4558]: I0120 17:29:29.242573 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-ring-rebalance-zzttm" event={"ID":"31071f28-1020-4bbd-b122-c1dcf9a67a14","Type":"ContainerDied","Data":"01ba4fc520f8548af793bef9802b7737bd2cad5e86c912ac32db4108c59fcbfe"} Jan 20 17:29:29 crc kubenswrapper[4558]: I0120 17:29:29.242617 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="01ba4fc520f8548af793bef9802b7737bd2cad5e86c912ac32db4108c59fcbfe" Jan 20 17:29:29 crc kubenswrapper[4558]: I0120 17:29:29.242693 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-ring-rebalance-zzttm" Jan 20 17:29:30 crc kubenswrapper[4558]: I0120 17:29:30.261751 4558 generic.go:334] "Generic (PLEG): container finished" podID="40b11e15-3f6e-4fb0-9a1f-e4f145f9c5b9" containerID="db8e8828d641c74a4518b290f587aa376715ea2305d752194da19a7d7e76d365" exitCode=0 Jan 20 17:29:30 crc kubenswrapper[4558]: I0120 17:29:30.261805 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-db-sync-cmhmv" event={"ID":"40b11e15-3f6e-4fb0-9a1f-e4f145f9c5b9","Type":"ContainerDied","Data":"db8e8828d641c74a4518b290f587aa376715ea2305d752194da19a7d7e76d365"} Jan 20 17:29:30 crc kubenswrapper[4558]: I0120 17:29:30.576503 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="854a193a-de08-44cb-967f-079392fa6680" path="/var/lib/kubelet/pods/854a193a-de08-44cb-967f-079392fa6680/volumes" Jan 20 17:29:31 crc kubenswrapper[4558]: I0120 17:29:31.549301 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-db-sync-cmhmv" Jan 20 17:29:31 crc kubenswrapper[4558]: I0120 17:29:31.613278 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/40b11e15-3f6e-4fb0-9a1f-e4f145f9c5b9-config-data\") pod \"40b11e15-3f6e-4fb0-9a1f-e4f145f9c5b9\" (UID: \"40b11e15-3f6e-4fb0-9a1f-e4f145f9c5b9\") " Jan 20 17:29:31 crc kubenswrapper[4558]: I0120 17:29:31.613385 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/40b11e15-3f6e-4fb0-9a1f-e4f145f9c5b9-db-sync-config-data\") pod \"40b11e15-3f6e-4fb0-9a1f-e4f145f9c5b9\" (UID: \"40b11e15-3f6e-4fb0-9a1f-e4f145f9c5b9\") " Jan 20 17:29:31 crc kubenswrapper[4558]: I0120 17:29:31.613407 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40b11e15-3f6e-4fb0-9a1f-e4f145f9c5b9-combined-ca-bundle\") pod \"40b11e15-3f6e-4fb0-9a1f-e4f145f9c5b9\" (UID: \"40b11e15-3f6e-4fb0-9a1f-e4f145f9c5b9\") " Jan 20 17:29:31 crc kubenswrapper[4558]: I0120 17:29:31.613490 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qgqqt\" (UniqueName: \"kubernetes.io/projected/40b11e15-3f6e-4fb0-9a1f-e4f145f9c5b9-kube-api-access-qgqqt\") pod \"40b11e15-3f6e-4fb0-9a1f-e4f145f9c5b9\" (UID: \"40b11e15-3f6e-4fb0-9a1f-e4f145f9c5b9\") " Jan 20 17:29:31 crc kubenswrapper[4558]: I0120 17:29:31.618982 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/40b11e15-3f6e-4fb0-9a1f-e4f145f9c5b9-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "40b11e15-3f6e-4fb0-9a1f-e4f145f9c5b9" (UID: "40b11e15-3f6e-4fb0-9a1f-e4f145f9c5b9"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:29:31 crc kubenswrapper[4558]: I0120 17:29:31.619339 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/40b11e15-3f6e-4fb0-9a1f-e4f145f9c5b9-kube-api-access-qgqqt" (OuterVolumeSpecName: "kube-api-access-qgqqt") pod "40b11e15-3f6e-4fb0-9a1f-e4f145f9c5b9" (UID: "40b11e15-3f6e-4fb0-9a1f-e4f145f9c5b9"). InnerVolumeSpecName "kube-api-access-qgqqt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:29:31 crc kubenswrapper[4558]: I0120 17:29:31.633197 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/40b11e15-3f6e-4fb0-9a1f-e4f145f9c5b9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "40b11e15-3f6e-4fb0-9a1f-e4f145f9c5b9" (UID: "40b11e15-3f6e-4fb0-9a1f-e4f145f9c5b9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:29:31 crc kubenswrapper[4558]: I0120 17:29:31.647598 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/40b11e15-3f6e-4fb0-9a1f-e4f145f9c5b9-config-data" (OuterVolumeSpecName: "config-data") pod "40b11e15-3f6e-4fb0-9a1f-e4f145f9c5b9" (UID: "40b11e15-3f6e-4fb0-9a1f-e4f145f9c5b9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:29:31 crc kubenswrapper[4558]: I0120 17:29:31.715712 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/40b11e15-3f6e-4fb0-9a1f-e4f145f9c5b9-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:29:31 crc kubenswrapper[4558]: I0120 17:29:31.715758 4558 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/40b11e15-3f6e-4fb0-9a1f-e4f145f9c5b9-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:29:31 crc kubenswrapper[4558]: I0120 17:29:31.715784 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40b11e15-3f6e-4fb0-9a1f-e4f145f9c5b9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:29:31 crc kubenswrapper[4558]: I0120 17:29:31.715798 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qgqqt\" (UniqueName: \"kubernetes.io/projected/40b11e15-3f6e-4fb0-9a1f-e4f145f9c5b9-kube-api-access-qgqqt\") on node \"crc\" DevicePath \"\"" Jan 20 17:29:32 crc kubenswrapper[4558]: I0120 17:29:32.122319 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/8c74a8f5-be12-4ac4-b45c-d459801927ea-etc-swift\") pod \"swift-storage-0\" (UID: \"8c74a8f5-be12-4ac4-b45c-d459801927ea\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:29:32 crc kubenswrapper[4558]: I0120 17:29:32.126649 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/8c74a8f5-be12-4ac4-b45c-d459801927ea-etc-swift\") pod \"swift-storage-0\" (UID: \"8c74a8f5-be12-4ac4-b45c-d459801927ea\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:29:32 crc kubenswrapper[4558]: I0120 17:29:32.284969 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-db-sync-cmhmv" event={"ID":"40b11e15-3f6e-4fb0-9a1f-e4f145f9c5b9","Type":"ContainerDied","Data":"3b3677a3436b6da93bb9635c06bb4a6df14154ccdba374fe4807179331c569a2"} Jan 20 17:29:32 crc kubenswrapper[4558]: I0120 17:29:32.285031 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3b3677a3436b6da93bb9635c06bb4a6df14154ccdba374fe4807179331c569a2" Jan 20 17:29:32 crc kubenswrapper[4558]: I0120 17:29:32.285034 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-db-sync-cmhmv" Jan 20 17:29:32 crc kubenswrapper[4558]: I0120 17:29:32.318508 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:29:32 crc kubenswrapper[4558]: I0120 17:29:32.761352 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/swift-storage-0"] Jan 20 17:29:32 crc kubenswrapper[4558]: W0120 17:29:32.767198 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8c74a8f5_be12_4ac4_b45c_d459801927ea.slice/crio-fcaba0c4f99da6b8b205133f8184debe1ce8d6bc61b3c4f4ee8f189ec592ace1 WatchSource:0}: Error finding container fcaba0c4f99da6b8b205133f8184debe1ce8d6bc61b3c4f4ee8f189ec592ace1: Status 404 returned error can't find the container with id fcaba0c4f99da6b8b205133f8184debe1ce8d6bc61b3c4f4ee8f189ec592ace1 Jan 20 17:29:33 crc kubenswrapper[4558]: I0120 17:29:33.296076 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"8c74a8f5-be12-4ac4-b45c-d459801927ea","Type":"ContainerStarted","Data":"c51ba6d01533031c51006f14e866bc5bdedd26de8afcbe43c5bcf1e7025e0dd3"} Jan 20 17:29:33 crc kubenswrapper[4558]: I0120 17:29:33.296133 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"8c74a8f5-be12-4ac4-b45c-d459801927ea","Type":"ContainerStarted","Data":"abb482af37b6f6a3f13af0986ae717f0f32b8467e1976fdbb59f0c73b8240e46"} Jan 20 17:29:33 crc kubenswrapper[4558]: I0120 17:29:33.296145 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"8c74a8f5-be12-4ac4-b45c-d459801927ea","Type":"ContainerStarted","Data":"33d41f8923ec28aa7b94229f05d825972526de6642700e3a9555dbc7f7a6e7ce"} Jan 20 17:29:33 crc kubenswrapper[4558]: I0120 17:29:33.296155 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"8c74a8f5-be12-4ac4-b45c-d459801927ea","Type":"ContainerStarted","Data":"fcaba0c4f99da6b8b205133f8184debe1ce8d6bc61b3c4f4ee8f189ec592ace1"} Jan 20 17:29:34 crc kubenswrapper[4558]: I0120 17:29:34.083756 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/root-account-create-update-7s2th"] Jan 20 17:29:34 crc kubenswrapper[4558]: E0120 17:29:34.084347 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="adf0bec9-f1a7-41d3-a4ef-28573981892f" containerName="mariadb-database-create" Jan 20 17:29:34 crc kubenswrapper[4558]: I0120 17:29:34.084361 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="adf0bec9-f1a7-41d3-a4ef-28573981892f" containerName="mariadb-database-create" Jan 20 17:29:34 crc kubenswrapper[4558]: E0120 17:29:34.084380 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="854a193a-de08-44cb-967f-079392fa6680" containerName="mariadb-account-create-update" Jan 20 17:29:34 crc kubenswrapper[4558]: I0120 17:29:34.084386 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="854a193a-de08-44cb-967f-079392fa6680" containerName="mariadb-account-create-update" Jan 20 17:29:34 crc kubenswrapper[4558]: E0120 17:29:34.084400 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="31071f28-1020-4bbd-b122-c1dcf9a67a14" containerName="swift-ring-rebalance" Jan 20 17:29:34 crc kubenswrapper[4558]: I0120 17:29:34.084406 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="31071f28-1020-4bbd-b122-c1dcf9a67a14" containerName="swift-ring-rebalance" Jan 20 17:29:34 crc kubenswrapper[4558]: E0120 17:29:34.084412 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed722c75-6f59-4f89-a7fd-c9e2e6c402b3" containerName="mariadb-account-create-update" Jan 20 17:29:34 crc kubenswrapper[4558]: I0120 17:29:34.084418 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed722c75-6f59-4f89-a7fd-c9e2e6c402b3" containerName="mariadb-account-create-update" Jan 20 17:29:34 crc kubenswrapper[4558]: E0120 17:29:34.084426 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c4c8564-f383-46e0-8bc9-d7b68aaa5bcc" containerName="mariadb-account-create-update" Jan 20 17:29:34 crc kubenswrapper[4558]: I0120 17:29:34.084431 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c4c8564-f383-46e0-8bc9-d7b68aaa5bcc" containerName="mariadb-account-create-update" Jan 20 17:29:34 crc kubenswrapper[4558]: E0120 17:29:34.084442 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="40b11e15-3f6e-4fb0-9a1f-e4f145f9c5b9" containerName="glance-db-sync" Jan 20 17:29:34 crc kubenswrapper[4558]: I0120 17:29:34.084447 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="40b11e15-3f6e-4fb0-9a1f-e4f145f9c5b9" containerName="glance-db-sync" Jan 20 17:29:34 crc kubenswrapper[4558]: E0120 17:29:34.084460 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eb025a4d-55a2-4b4d-becb-6250cdd0055b" containerName="mariadb-database-create" Jan 20 17:29:34 crc kubenswrapper[4558]: I0120 17:29:34.084465 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="eb025a4d-55a2-4b4d-becb-6250cdd0055b" containerName="mariadb-database-create" Jan 20 17:29:34 crc kubenswrapper[4558]: I0120 17:29:34.084611 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="6c4c8564-f383-46e0-8bc9-d7b68aaa5bcc" containerName="mariadb-account-create-update" Jan 20 17:29:34 crc kubenswrapper[4558]: I0120 17:29:34.084623 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="ed722c75-6f59-4f89-a7fd-c9e2e6c402b3" containerName="mariadb-account-create-update" Jan 20 17:29:34 crc kubenswrapper[4558]: I0120 17:29:34.084638 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="854a193a-de08-44cb-967f-079392fa6680" containerName="mariadb-account-create-update" Jan 20 17:29:34 crc kubenswrapper[4558]: I0120 17:29:34.084647 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="eb025a4d-55a2-4b4d-becb-6250cdd0055b" containerName="mariadb-database-create" Jan 20 17:29:34 crc kubenswrapper[4558]: I0120 17:29:34.084657 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="40b11e15-3f6e-4fb0-9a1f-e4f145f9c5b9" containerName="glance-db-sync" Jan 20 17:29:34 crc kubenswrapper[4558]: I0120 17:29:34.084667 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="31071f28-1020-4bbd-b122-c1dcf9a67a14" containerName="swift-ring-rebalance" Jan 20 17:29:34 crc kubenswrapper[4558]: I0120 17:29:34.084677 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="adf0bec9-f1a7-41d3-a4ef-28573981892f" containerName="mariadb-database-create" Jan 20 17:29:34 crc kubenswrapper[4558]: I0120 17:29:34.085149 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-7s2th" Jan 20 17:29:34 crc kubenswrapper[4558]: I0120 17:29:34.087026 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"openstack-cell1-mariadb-root-db-secret" Jan 20 17:29:34 crc kubenswrapper[4558]: I0120 17:29:34.091318 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-7s2th"] Jan 20 17:29:34 crc kubenswrapper[4558]: I0120 17:29:34.156247 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4csnk\" (UniqueName: \"kubernetes.io/projected/48a0dae5-34e3-490e-9cc5-f474f6cd2fad-kube-api-access-4csnk\") pod \"root-account-create-update-7s2th\" (UID: \"48a0dae5-34e3-490e-9cc5-f474f6cd2fad\") " pod="openstack-kuttl-tests/root-account-create-update-7s2th" Jan 20 17:29:34 crc kubenswrapper[4558]: I0120 17:29:34.156442 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/48a0dae5-34e3-490e-9cc5-f474f6cd2fad-operator-scripts\") pod \"root-account-create-update-7s2th\" (UID: \"48a0dae5-34e3-490e-9cc5-f474f6cd2fad\") " pod="openstack-kuttl-tests/root-account-create-update-7s2th" Jan 20 17:29:34 crc kubenswrapper[4558]: I0120 17:29:34.258874 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4csnk\" (UniqueName: \"kubernetes.io/projected/48a0dae5-34e3-490e-9cc5-f474f6cd2fad-kube-api-access-4csnk\") pod \"root-account-create-update-7s2th\" (UID: \"48a0dae5-34e3-490e-9cc5-f474f6cd2fad\") " pod="openstack-kuttl-tests/root-account-create-update-7s2th" Jan 20 17:29:34 crc kubenswrapper[4558]: I0120 17:29:34.259019 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/48a0dae5-34e3-490e-9cc5-f474f6cd2fad-operator-scripts\") pod \"root-account-create-update-7s2th\" (UID: \"48a0dae5-34e3-490e-9cc5-f474f6cd2fad\") " pod="openstack-kuttl-tests/root-account-create-update-7s2th" Jan 20 17:29:34 crc kubenswrapper[4558]: I0120 17:29:34.259691 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/48a0dae5-34e3-490e-9cc5-f474f6cd2fad-operator-scripts\") pod \"root-account-create-update-7s2th\" (UID: \"48a0dae5-34e3-490e-9cc5-f474f6cd2fad\") " pod="openstack-kuttl-tests/root-account-create-update-7s2th" Jan 20 17:29:34 crc kubenswrapper[4558]: I0120 17:29:34.275844 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4csnk\" (UniqueName: \"kubernetes.io/projected/48a0dae5-34e3-490e-9cc5-f474f6cd2fad-kube-api-access-4csnk\") pod \"root-account-create-update-7s2th\" (UID: \"48a0dae5-34e3-490e-9cc5-f474f6cd2fad\") " pod="openstack-kuttl-tests/root-account-create-update-7s2th" Jan 20 17:29:34 crc kubenswrapper[4558]: I0120 17:29:34.318645 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"8c74a8f5-be12-4ac4-b45c-d459801927ea","Type":"ContainerStarted","Data":"49db71c25f4101de9d35070da8e88df17b13463d85f4085b268b15d6f2fd0010"} Jan 20 17:29:34 crc kubenswrapper[4558]: I0120 17:29:34.318710 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"8c74a8f5-be12-4ac4-b45c-d459801927ea","Type":"ContainerStarted","Data":"b1a440452721f6300aa80e2b85895305c5e163dc53fb6fedc65d4a4ad30c530d"} Jan 20 17:29:34 crc kubenswrapper[4558]: I0120 17:29:34.318722 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"8c74a8f5-be12-4ac4-b45c-d459801927ea","Type":"ContainerStarted","Data":"5b64dc2431311cf052b4ff6df6c6144511a386c51c5291266eaa5ffde8e35a57"} Jan 20 17:29:34 crc kubenswrapper[4558]: I0120 17:29:34.318732 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"8c74a8f5-be12-4ac4-b45c-d459801927ea","Type":"ContainerStarted","Data":"6750a373d500a14c8bb8f71b32edf18286f29379d3c4862db6be084c829808ed"} Jan 20 17:29:34 crc kubenswrapper[4558]: I0120 17:29:34.318741 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"8c74a8f5-be12-4ac4-b45c-d459801927ea","Type":"ContainerStarted","Data":"7312f6e5f2e6499af9f3601374373df6aa29dfce310cc2421bf1cf529c300a02"} Jan 20 17:29:34 crc kubenswrapper[4558]: I0120 17:29:34.318750 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"8c74a8f5-be12-4ac4-b45c-d459801927ea","Type":"ContainerStarted","Data":"b854ccf3f0261f0809cff51e86b924877af436c76a3e8bb217d1f85c3dda4221"} Jan 20 17:29:34 crc kubenswrapper[4558]: I0120 17:29:34.318759 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"8c74a8f5-be12-4ac4-b45c-d459801927ea","Type":"ContainerStarted","Data":"e2141ae9ecf7ffabbc3fdb13e8e88e4a3ea99534f37f0efd0752308e25d02c42"} Jan 20 17:29:34 crc kubenswrapper[4558]: I0120 17:29:34.318779 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"8c74a8f5-be12-4ac4-b45c-d459801927ea","Type":"ContainerStarted","Data":"b70b64e3b3bd252db5a3a5cd9a2f046dc46575ddcb991f7fbfb5eae9caa6a55b"} Jan 20 17:29:34 crc kubenswrapper[4558]: I0120 17:29:34.420451 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-7s2th" Jan 20 17:29:34 crc kubenswrapper[4558]: I0120 17:29:34.833362 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-7s2th"] Jan 20 17:29:34 crc kubenswrapper[4558]: W0120 17:29:34.838617 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod48a0dae5_34e3_490e_9cc5_f474f6cd2fad.slice/crio-d5eb5034577ba051e9c89b52c115de32c0013fac68b122231b9a4d153a108529 WatchSource:0}: Error finding container d5eb5034577ba051e9c89b52c115de32c0013fac68b122231b9a4d153a108529: Status 404 returned error can't find the container with id d5eb5034577ba051e9c89b52c115de32c0013fac68b122231b9a4d153a108529 Jan 20 17:29:35 crc kubenswrapper[4558]: I0120 17:29:35.332242 4558 generic.go:334] "Generic (PLEG): container finished" podID="48a0dae5-34e3-490e-9cc5-f474f6cd2fad" containerID="182441449b8343f15df8dd6b8d344ab02e993a5dadb813be2e93069d80443bbe" exitCode=0 Jan 20 17:29:35 crc kubenswrapper[4558]: I0120 17:29:35.332291 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/root-account-create-update-7s2th" event={"ID":"48a0dae5-34e3-490e-9cc5-f474f6cd2fad","Type":"ContainerDied","Data":"182441449b8343f15df8dd6b8d344ab02e993a5dadb813be2e93069d80443bbe"} Jan 20 17:29:35 crc kubenswrapper[4558]: I0120 17:29:35.332660 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/root-account-create-update-7s2th" event={"ID":"48a0dae5-34e3-490e-9cc5-f474f6cd2fad","Type":"ContainerStarted","Data":"d5eb5034577ba051e9c89b52c115de32c0013fac68b122231b9a4d153a108529"} Jan 20 17:29:35 crc kubenswrapper[4558]: I0120 17:29:35.339992 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"8c74a8f5-be12-4ac4-b45c-d459801927ea","Type":"ContainerStarted","Data":"440ae9e1e2d1bfffd629b66e5f2b1ad26abca913a264c8ed449f65a224346b30"} Jan 20 17:29:35 crc kubenswrapper[4558]: I0120 17:29:35.340062 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"8c74a8f5-be12-4ac4-b45c-d459801927ea","Type":"ContainerStarted","Data":"e84993808b2bc5f85b1ae50493298e6a10eb7c3fcbd64da879a0e7a937ba17e1"} Jan 20 17:29:35 crc kubenswrapper[4558]: I0120 17:29:35.340076 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"8c74a8f5-be12-4ac4-b45c-d459801927ea","Type":"ContainerStarted","Data":"6bd5c4f64f7527d8728149bf64ddbf3f8288b598bd1840a6821fe48789fce821"} Jan 20 17:29:35 crc kubenswrapper[4558]: I0120 17:29:35.340088 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"8c74a8f5-be12-4ac4-b45c-d459801927ea","Type":"ContainerStarted","Data":"642fe8d6d1e22629c1ee44fe92b12d3105252f0e570aa04c45e104ebe419f7ad"} Jan 20 17:29:35 crc kubenswrapper[4558]: I0120 17:29:35.384210 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/swift-storage-0" podStartSLOduration=20.384158966 podStartE2EDuration="20.384158966s" podCreationTimestamp="2026-01-20 17:29:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:29:35.375430023 +0000 UTC m=+2869.135767990" watchObservedRunningTime="2026-01-20 17:29:35.384158966 +0000 UTC m=+2869.144496932" Jan 20 17:29:35 crc kubenswrapper[4558]: I0120 17:29:35.543287 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-59d597f5bf-w7957"] Jan 20 17:29:35 crc kubenswrapper[4558]: I0120 17:29:35.554545 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-59d597f5bf-w7957" Jan 20 17:29:35 crc kubenswrapper[4558]: I0120 17:29:35.558344 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-59d597f5bf-w7957"] Jan 20 17:29:35 crc kubenswrapper[4558]: I0120 17:29:35.561408 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"dns-swift-storage-0" Jan 20 17:29:35 crc kubenswrapper[4558]: I0120 17:29:35.596388 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tx84d\" (UniqueName: \"kubernetes.io/projected/d41575b2-4a6b-4d95-b15d-4699f0193d4f-kube-api-access-tx84d\") pod \"dnsmasq-dnsmasq-59d597f5bf-w7957\" (UID: \"d41575b2-4a6b-4d95-b15d-4699f0193d4f\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-59d597f5bf-w7957" Jan 20 17:29:35 crc kubenswrapper[4558]: I0120 17:29:35.596465 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/d41575b2-4a6b-4d95-b15d-4699f0193d4f-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-59d597f5bf-w7957\" (UID: \"d41575b2-4a6b-4d95-b15d-4699f0193d4f\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-59d597f5bf-w7957" Jan 20 17:29:35 crc kubenswrapper[4558]: I0120 17:29:35.596514 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d41575b2-4a6b-4d95-b15d-4699f0193d4f-config\") pod \"dnsmasq-dnsmasq-59d597f5bf-w7957\" (UID: \"d41575b2-4a6b-4d95-b15d-4699f0193d4f\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-59d597f5bf-w7957" Jan 20 17:29:35 crc kubenswrapper[4558]: I0120 17:29:35.596580 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d41575b2-4a6b-4d95-b15d-4699f0193d4f-dns-swift-storage-0\") pod \"dnsmasq-dnsmasq-59d597f5bf-w7957\" (UID: \"d41575b2-4a6b-4d95-b15d-4699f0193d4f\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-59d597f5bf-w7957" Jan 20 17:29:35 crc kubenswrapper[4558]: I0120 17:29:35.698416 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d41575b2-4a6b-4d95-b15d-4699f0193d4f-config\") pod \"dnsmasq-dnsmasq-59d597f5bf-w7957\" (UID: \"d41575b2-4a6b-4d95-b15d-4699f0193d4f\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-59d597f5bf-w7957" Jan 20 17:29:35 crc kubenswrapper[4558]: I0120 17:29:35.698479 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d41575b2-4a6b-4d95-b15d-4699f0193d4f-dns-swift-storage-0\") pod \"dnsmasq-dnsmasq-59d597f5bf-w7957\" (UID: \"d41575b2-4a6b-4d95-b15d-4699f0193d4f\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-59d597f5bf-w7957" Jan 20 17:29:35 crc kubenswrapper[4558]: I0120 17:29:35.698589 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tx84d\" (UniqueName: \"kubernetes.io/projected/d41575b2-4a6b-4d95-b15d-4699f0193d4f-kube-api-access-tx84d\") pod \"dnsmasq-dnsmasq-59d597f5bf-w7957\" (UID: \"d41575b2-4a6b-4d95-b15d-4699f0193d4f\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-59d597f5bf-w7957" Jan 20 17:29:35 crc kubenswrapper[4558]: I0120 17:29:35.698613 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/d41575b2-4a6b-4d95-b15d-4699f0193d4f-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-59d597f5bf-w7957\" (UID: \"d41575b2-4a6b-4d95-b15d-4699f0193d4f\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-59d597f5bf-w7957" Jan 20 17:29:35 crc kubenswrapper[4558]: I0120 17:29:35.699573 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d41575b2-4a6b-4d95-b15d-4699f0193d4f-dns-swift-storage-0\") pod \"dnsmasq-dnsmasq-59d597f5bf-w7957\" (UID: \"d41575b2-4a6b-4d95-b15d-4699f0193d4f\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-59d597f5bf-w7957" Jan 20 17:29:35 crc kubenswrapper[4558]: I0120 17:29:35.699583 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d41575b2-4a6b-4d95-b15d-4699f0193d4f-config\") pod \"dnsmasq-dnsmasq-59d597f5bf-w7957\" (UID: \"d41575b2-4a6b-4d95-b15d-4699f0193d4f\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-59d597f5bf-w7957" Jan 20 17:29:35 crc kubenswrapper[4558]: I0120 17:29:35.699621 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/d41575b2-4a6b-4d95-b15d-4699f0193d4f-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-59d597f5bf-w7957\" (UID: \"d41575b2-4a6b-4d95-b15d-4699f0193d4f\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-59d597f5bf-w7957" Jan 20 17:29:35 crc kubenswrapper[4558]: I0120 17:29:35.717850 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tx84d\" (UniqueName: \"kubernetes.io/projected/d41575b2-4a6b-4d95-b15d-4699f0193d4f-kube-api-access-tx84d\") pod \"dnsmasq-dnsmasq-59d597f5bf-w7957\" (UID: \"d41575b2-4a6b-4d95-b15d-4699f0193d4f\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-59d597f5bf-w7957" Jan 20 17:29:36 crc kubenswrapper[4558]: I0120 17:29:36.002969 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-59d597f5bf-w7957" Jan 20 17:29:36 crc kubenswrapper[4558]: I0120 17:29:36.351938 4558 generic.go:334] "Generic (PLEG): container finished" podID="35b2371f-57f2-4728-b32f-1ba587b4532e" containerID="470d1df81fd770d7b974c08649b44b1ac9d508cc4fcfb3aae910123fc7a03c96" exitCode=0 Jan 20 17:29:36 crc kubenswrapper[4558]: I0120 17:29:36.352030 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-server-0" event={"ID":"35b2371f-57f2-4728-b32f-1ba587b4532e","Type":"ContainerDied","Data":"470d1df81fd770d7b974c08649b44b1ac9d508cc4fcfb3aae910123fc7a03c96"} Jan 20 17:29:36 crc kubenswrapper[4558]: I0120 17:29:36.354237 4558 generic.go:334] "Generic (PLEG): container finished" podID="49dc00d4-0563-4324-a98b-e42b24b4223b" containerID="a216ad33befa8b30bde5bc44c084e7e37be4198c817fabd4c86a26d9201627a7" exitCode=0 Jan 20 17:29:36 crc kubenswrapper[4558]: I0120 17:29:36.354365 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" event={"ID":"49dc00d4-0563-4324-a98b-e42b24b4223b","Type":"ContainerDied","Data":"a216ad33befa8b30bde5bc44c084e7e37be4198c817fabd4c86a26d9201627a7"} Jan 20 17:29:36 crc kubenswrapper[4558]: I0120 17:29:36.416229 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-59d597f5bf-w7957"] Jan 20 17:29:36 crc kubenswrapper[4558]: I0120 17:29:36.677589 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-7s2th" Jan 20 17:29:36 crc kubenswrapper[4558]: I0120 17:29:36.820682 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4csnk\" (UniqueName: \"kubernetes.io/projected/48a0dae5-34e3-490e-9cc5-f474f6cd2fad-kube-api-access-4csnk\") pod \"48a0dae5-34e3-490e-9cc5-f474f6cd2fad\" (UID: \"48a0dae5-34e3-490e-9cc5-f474f6cd2fad\") " Jan 20 17:29:36 crc kubenswrapper[4558]: I0120 17:29:36.820751 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/48a0dae5-34e3-490e-9cc5-f474f6cd2fad-operator-scripts\") pod \"48a0dae5-34e3-490e-9cc5-f474f6cd2fad\" (UID: \"48a0dae5-34e3-490e-9cc5-f474f6cd2fad\") " Jan 20 17:29:36 crc kubenswrapper[4558]: I0120 17:29:36.821227 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/48a0dae5-34e3-490e-9cc5-f474f6cd2fad-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "48a0dae5-34e3-490e-9cc5-f474f6cd2fad" (UID: "48a0dae5-34e3-490e-9cc5-f474f6cd2fad"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:29:36 crc kubenswrapper[4558]: I0120 17:29:36.821493 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/48a0dae5-34e3-490e-9cc5-f474f6cd2fad-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:29:36 crc kubenswrapper[4558]: I0120 17:29:36.824257 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/48a0dae5-34e3-490e-9cc5-f474f6cd2fad-kube-api-access-4csnk" (OuterVolumeSpecName: "kube-api-access-4csnk") pod "48a0dae5-34e3-490e-9cc5-f474f6cd2fad" (UID: "48a0dae5-34e3-490e-9cc5-f474f6cd2fad"). InnerVolumeSpecName "kube-api-access-4csnk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:29:36 crc kubenswrapper[4558]: I0120 17:29:36.922282 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4csnk\" (UniqueName: \"kubernetes.io/projected/48a0dae5-34e3-490e-9cc5-f474f6cd2fad-kube-api-access-4csnk\") on node \"crc\" DevicePath \"\"" Jan 20 17:29:37 crc kubenswrapper[4558]: I0120 17:29:37.364788 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" event={"ID":"49dc00d4-0563-4324-a98b-e42b24b4223b","Type":"ContainerStarted","Data":"4008537411d2d6de7def09cb45d06dbf9acb1876b14e155e5f8a007aaeffe4df"} Jan 20 17:29:37 crc kubenswrapper[4558]: I0120 17:29:37.365777 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:29:37 crc kubenswrapper[4558]: I0120 17:29:37.366358 4558 generic.go:334] "Generic (PLEG): container finished" podID="d41575b2-4a6b-4d95-b15d-4699f0193d4f" containerID="8ca32d67a64ed7ab458dc4f478ee11665eebc4e2687ca7e2c89888f1694106fd" exitCode=0 Jan 20 17:29:37 crc kubenswrapper[4558]: I0120 17:29:37.366435 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-59d597f5bf-w7957" event={"ID":"d41575b2-4a6b-4d95-b15d-4699f0193d4f","Type":"ContainerDied","Data":"8ca32d67a64ed7ab458dc4f478ee11665eebc4e2687ca7e2c89888f1694106fd"} Jan 20 17:29:37 crc kubenswrapper[4558]: I0120 17:29:37.366487 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-59d597f5bf-w7957" event={"ID":"d41575b2-4a6b-4d95-b15d-4699f0193d4f","Type":"ContainerStarted","Data":"cbeefef59a12b3553955af3e7b1c8cdf9e902a35bb8b9b86902ec7460f4e1b10"} Jan 20 17:29:37 crc kubenswrapper[4558]: I0120 17:29:37.368505 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-7s2th" Jan 20 17:29:37 crc kubenswrapper[4558]: I0120 17:29:37.368518 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/root-account-create-update-7s2th" event={"ID":"48a0dae5-34e3-490e-9cc5-f474f6cd2fad","Type":"ContainerDied","Data":"d5eb5034577ba051e9c89b52c115de32c0013fac68b122231b9a4d153a108529"} Jan 20 17:29:37 crc kubenswrapper[4558]: I0120 17:29:37.368559 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d5eb5034577ba051e9c89b52c115de32c0013fac68b122231b9a4d153a108529" Jan 20 17:29:37 crc kubenswrapper[4558]: I0120 17:29:37.370739 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-server-0" event={"ID":"35b2371f-57f2-4728-b32f-1ba587b4532e","Type":"ContainerStarted","Data":"7e836c85569914a31734b34020c760d2ba17ca6f5821aecaa0763835a0e87863"} Jan 20 17:29:37 crc kubenswrapper[4558]: I0120 17:29:37.370958 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:29:37 crc kubenswrapper[4558]: I0120 17:29:37.398934 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" podStartSLOduration=36.39891159 podStartE2EDuration="36.39891159s" podCreationTimestamp="2026-01-20 17:29:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:29:37.39072696 +0000 UTC m=+2871.151064927" watchObservedRunningTime="2026-01-20 17:29:37.39891159 +0000 UTC m=+2871.159249557" Jan 20 17:29:37 crc kubenswrapper[4558]: I0120 17:29:37.426743 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/rabbitmq-server-0" podStartSLOduration=36.426722623 podStartE2EDuration="36.426722623s" podCreationTimestamp="2026-01-20 17:29:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:29:37.420654082 +0000 UTC m=+2871.180992049" watchObservedRunningTime="2026-01-20 17:29:37.426722623 +0000 UTC m=+2871.187060590" Jan 20 17:29:38 crc kubenswrapper[4558]: I0120 17:29:38.381255 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-59d597f5bf-w7957" event={"ID":"d41575b2-4a6b-4d95-b15d-4699f0193d4f","Type":"ContainerStarted","Data":"a20edd87246e6f47b5bac9aeb82f24c7d1f1dec5608b67514645643cdf25e8de"} Jan 20 17:29:38 crc kubenswrapper[4558]: I0120 17:29:38.399866 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-59d597f5bf-w7957" podStartSLOduration=3.399845081 podStartE2EDuration="3.399845081s" podCreationTimestamp="2026-01-20 17:29:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:29:38.397799165 +0000 UTC m=+2872.158137132" watchObservedRunningTime="2026-01-20 17:29:38.399845081 +0000 UTC m=+2872.160183039" Jan 20 17:29:39 crc kubenswrapper[4558]: I0120 17:29:39.387979 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-59d597f5bf-w7957" Jan 20 17:29:41 crc kubenswrapper[4558]: I0120 17:29:41.565853 4558 scope.go:117] "RemoveContainer" containerID="f275e9f5de330b3c79316d5871106c6bcf90b698e0744c5beb28da45c336b36d" Jan 20 17:29:41 crc kubenswrapper[4558]: E0120 17:29:41.566527 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 17:29:46 crc kubenswrapper[4558]: I0120 17:29:46.004401 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-59d597f5bf-w7957" Jan 20 17:29:46 crc kubenswrapper[4558]: I0120 17:29:46.049656 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-lgbfw"] Jan 20 17:29:46 crc kubenswrapper[4558]: I0120 17:29:46.049913 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-lgbfw" podUID="318ab596-0189-4f84-b805-f28bd05824e1" containerName="dnsmasq-dns" containerID="cri-o://1fd793f5ed701b177f6d6abcb3b5a69703e396d894ac6e3369bf9b47b38eea48" gracePeriod=10 Jan 20 17:29:46 crc kubenswrapper[4558]: I0120 17:29:46.445121 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-lgbfw" Jan 20 17:29:46 crc kubenswrapper[4558]: I0120 17:29:46.453271 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-lgbfw" event={"ID":"318ab596-0189-4f84-b805-f28bd05824e1","Type":"ContainerDied","Data":"1fd793f5ed701b177f6d6abcb3b5a69703e396d894ac6e3369bf9b47b38eea48"} Jan 20 17:29:46 crc kubenswrapper[4558]: I0120 17:29:46.453324 4558 scope.go:117] "RemoveContainer" containerID="1fd793f5ed701b177f6d6abcb3b5a69703e396d894ac6e3369bf9b47b38eea48" Jan 20 17:29:46 crc kubenswrapper[4558]: I0120 17:29:46.453292 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-lgbfw" Jan 20 17:29:46 crc kubenswrapper[4558]: I0120 17:29:46.453236 4558 generic.go:334] "Generic (PLEG): container finished" podID="318ab596-0189-4f84-b805-f28bd05824e1" containerID="1fd793f5ed701b177f6d6abcb3b5a69703e396d894ac6e3369bf9b47b38eea48" exitCode=0 Jan 20 17:29:46 crc kubenswrapper[4558]: I0120 17:29:46.453504 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-lgbfw" event={"ID":"318ab596-0189-4f84-b805-f28bd05824e1","Type":"ContainerDied","Data":"03355da3f4b4d317b1d947bab5eed6dc4d42c04598faa1d7ffc1b2a3128947a4"} Jan 20 17:29:46 crc kubenswrapper[4558]: I0120 17:29:46.477260 4558 scope.go:117] "RemoveContainer" containerID="17815dd01664011f7b21291548668facb5de910395c1405e050881ec910e03a5" Jan 20 17:29:46 crc kubenswrapper[4558]: I0120 17:29:46.499370 4558 scope.go:117] "RemoveContainer" containerID="1fd793f5ed701b177f6d6abcb3b5a69703e396d894ac6e3369bf9b47b38eea48" Jan 20 17:29:46 crc kubenswrapper[4558]: E0120 17:29:46.499792 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1fd793f5ed701b177f6d6abcb3b5a69703e396d894ac6e3369bf9b47b38eea48\": container with ID starting with 1fd793f5ed701b177f6d6abcb3b5a69703e396d894ac6e3369bf9b47b38eea48 not found: ID does not exist" containerID="1fd793f5ed701b177f6d6abcb3b5a69703e396d894ac6e3369bf9b47b38eea48" Jan 20 17:29:46 crc kubenswrapper[4558]: I0120 17:29:46.499868 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1fd793f5ed701b177f6d6abcb3b5a69703e396d894ac6e3369bf9b47b38eea48"} err="failed to get container status \"1fd793f5ed701b177f6d6abcb3b5a69703e396d894ac6e3369bf9b47b38eea48\": rpc error: code = NotFound desc = could not find container \"1fd793f5ed701b177f6d6abcb3b5a69703e396d894ac6e3369bf9b47b38eea48\": container with ID starting with 1fd793f5ed701b177f6d6abcb3b5a69703e396d894ac6e3369bf9b47b38eea48 not found: ID does not exist" Jan 20 17:29:46 crc kubenswrapper[4558]: I0120 17:29:46.499912 4558 scope.go:117] "RemoveContainer" containerID="17815dd01664011f7b21291548668facb5de910395c1405e050881ec910e03a5" Jan 20 17:29:46 crc kubenswrapper[4558]: E0120 17:29:46.500420 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"17815dd01664011f7b21291548668facb5de910395c1405e050881ec910e03a5\": container with ID starting with 17815dd01664011f7b21291548668facb5de910395c1405e050881ec910e03a5 not found: ID does not exist" containerID="17815dd01664011f7b21291548668facb5de910395c1405e050881ec910e03a5" Jan 20 17:29:46 crc kubenswrapper[4558]: I0120 17:29:46.500464 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"17815dd01664011f7b21291548668facb5de910395c1405e050881ec910e03a5"} err="failed to get container status \"17815dd01664011f7b21291548668facb5de910395c1405e050881ec910e03a5\": rpc error: code = NotFound desc = could not find container \"17815dd01664011f7b21291548668facb5de910395c1405e050881ec910e03a5\": container with ID starting with 17815dd01664011f7b21291548668facb5de910395c1405e050881ec910e03a5 not found: ID does not exist" Jan 20 17:29:46 crc kubenswrapper[4558]: I0120 17:29:46.580151 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/318ab596-0189-4f84-b805-f28bd05824e1-config\") pod \"318ab596-0189-4f84-b805-f28bd05824e1\" (UID: \"318ab596-0189-4f84-b805-f28bd05824e1\") " Jan 20 17:29:46 crc kubenswrapper[4558]: I0120 17:29:46.580239 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/318ab596-0189-4f84-b805-f28bd05824e1-dnsmasq-svc\") pod \"318ab596-0189-4f84-b805-f28bd05824e1\" (UID: \"318ab596-0189-4f84-b805-f28bd05824e1\") " Jan 20 17:29:46 crc kubenswrapper[4558]: I0120 17:29:46.580349 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9qtf\" (UniqueName: \"kubernetes.io/projected/318ab596-0189-4f84-b805-f28bd05824e1-kube-api-access-w9qtf\") pod \"318ab596-0189-4f84-b805-f28bd05824e1\" (UID: \"318ab596-0189-4f84-b805-f28bd05824e1\") " Jan 20 17:29:46 crc kubenswrapper[4558]: I0120 17:29:46.597973 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/318ab596-0189-4f84-b805-f28bd05824e1-kube-api-access-w9qtf" (OuterVolumeSpecName: "kube-api-access-w9qtf") pod "318ab596-0189-4f84-b805-f28bd05824e1" (UID: "318ab596-0189-4f84-b805-f28bd05824e1"). InnerVolumeSpecName "kube-api-access-w9qtf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:29:46 crc kubenswrapper[4558]: I0120 17:29:46.616840 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/318ab596-0189-4f84-b805-f28bd05824e1-dnsmasq-svc" (OuterVolumeSpecName: "dnsmasq-svc") pod "318ab596-0189-4f84-b805-f28bd05824e1" (UID: "318ab596-0189-4f84-b805-f28bd05824e1"). InnerVolumeSpecName "dnsmasq-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:29:46 crc kubenswrapper[4558]: I0120 17:29:46.624964 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/318ab596-0189-4f84-b805-f28bd05824e1-config" (OuterVolumeSpecName: "config") pod "318ab596-0189-4f84-b805-f28bd05824e1" (UID: "318ab596-0189-4f84-b805-f28bd05824e1"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:29:46 crc kubenswrapper[4558]: I0120 17:29:46.682379 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/318ab596-0189-4f84-b805-f28bd05824e1-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:29:46 crc kubenswrapper[4558]: I0120 17:29:46.682406 4558 reconciler_common.go:293] "Volume detached for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/318ab596-0189-4f84-b805-f28bd05824e1-dnsmasq-svc\") on node \"crc\" DevicePath \"\"" Jan 20 17:29:46 crc kubenswrapper[4558]: I0120 17:29:46.682420 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9qtf\" (UniqueName: \"kubernetes.io/projected/318ab596-0189-4f84-b805-f28bd05824e1-kube-api-access-w9qtf\") on node \"crc\" DevicePath \"\"" Jan 20 17:29:46 crc kubenswrapper[4558]: I0120 17:29:46.785231 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-lgbfw"] Jan 20 17:29:46 crc kubenswrapper[4558]: I0120 17:29:46.795325 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-lgbfw"] Jan 20 17:29:48 crc kubenswrapper[4558]: I0120 17:29:48.574002 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="318ab596-0189-4f84-b805-f28bd05824e1" path="/var/lib/kubelet/pods/318ab596-0189-4f84-b805-f28bd05824e1/volumes" Jan 20 17:29:52 crc kubenswrapper[4558]: I0120 17:29:52.632946 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:29:52 crc kubenswrapper[4558]: I0120 17:29:52.903545 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:29:52 crc kubenswrapper[4558]: I0120 17:29:52.985483 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/barbican-db-create-2rxk6"] Jan 20 17:29:52 crc kubenswrapper[4558]: E0120 17:29:52.985836 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="318ab596-0189-4f84-b805-f28bd05824e1" containerName="dnsmasq-dns" Jan 20 17:29:52 crc kubenswrapper[4558]: I0120 17:29:52.985853 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="318ab596-0189-4f84-b805-f28bd05824e1" containerName="dnsmasq-dns" Jan 20 17:29:52 crc kubenswrapper[4558]: E0120 17:29:52.985868 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="48a0dae5-34e3-490e-9cc5-f474f6cd2fad" containerName="mariadb-account-create-update" Jan 20 17:29:52 crc kubenswrapper[4558]: I0120 17:29:52.985874 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="48a0dae5-34e3-490e-9cc5-f474f6cd2fad" containerName="mariadb-account-create-update" Jan 20 17:29:52 crc kubenswrapper[4558]: E0120 17:29:52.985887 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="318ab596-0189-4f84-b805-f28bd05824e1" containerName="init" Jan 20 17:29:52 crc kubenswrapper[4558]: I0120 17:29:52.985892 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="318ab596-0189-4f84-b805-f28bd05824e1" containerName="init" Jan 20 17:29:52 crc kubenswrapper[4558]: I0120 17:29:52.986014 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="48a0dae5-34e3-490e-9cc5-f474f6cd2fad" containerName="mariadb-account-create-update" Jan 20 17:29:52 crc kubenswrapper[4558]: I0120 17:29:52.986029 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="318ab596-0189-4f84-b805-f28bd05824e1" containerName="dnsmasq-dns" Jan 20 17:29:52 crc kubenswrapper[4558]: I0120 17:29:52.986597 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-db-create-2rxk6" Jan 20 17:29:53 crc kubenswrapper[4558]: I0120 17:29:53.000963 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-db-create-2rxk6"] Jan 20 17:29:53 crc kubenswrapper[4558]: I0120 17:29:53.022174 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/barbican-b147-account-create-update-jnh4d"] Jan 20 17:29:53 crc kubenswrapper[4558]: I0120 17:29:53.023665 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-b147-account-create-update-jnh4d" Jan 20 17:29:53 crc kubenswrapper[4558]: I0120 17:29:53.025327 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"barbican-db-secret" Jan 20 17:29:53 crc kubenswrapper[4558]: I0120 17:29:53.048197 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-b147-account-create-update-jnh4d"] Jan 20 17:29:53 crc kubenswrapper[4558]: I0120 17:29:53.079587 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/neutron-db-create-cvpdw"] Jan 20 17:29:53 crc kubenswrapper[4558]: I0120 17:29:53.080887 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-db-create-cvpdw" Jan 20 17:29:53 crc kubenswrapper[4558]: I0120 17:29:53.095083 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-db-create-cvpdw"] Jan 20 17:29:53 crc kubenswrapper[4558]: I0120 17:29:53.096673 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nrb2p\" (UniqueName: \"kubernetes.io/projected/70a900e8-163c-4d0e-8273-1697de2dba32-kube-api-access-nrb2p\") pod \"barbican-db-create-2rxk6\" (UID: \"70a900e8-163c-4d0e-8273-1697de2dba32\") " pod="openstack-kuttl-tests/barbican-db-create-2rxk6" Jan 20 17:29:53 crc kubenswrapper[4558]: I0120 17:29:53.103666 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/70a900e8-163c-4d0e-8273-1697de2dba32-operator-scripts\") pod \"barbican-db-create-2rxk6\" (UID: \"70a900e8-163c-4d0e-8273-1697de2dba32\") " pod="openstack-kuttl-tests/barbican-db-create-2rxk6" Jan 20 17:29:53 crc kubenswrapper[4558]: I0120 17:29:53.117115 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/cinder-bf60-account-create-update-v2bzq"] Jan 20 17:29:53 crc kubenswrapper[4558]: I0120 17:29:53.118294 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-bf60-account-create-update-v2bzq" Jan 20 17:29:53 crc kubenswrapper[4558]: I0120 17:29:53.133679 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cinder-db-secret" Jan 20 17:29:53 crc kubenswrapper[4558]: I0120 17:29:53.147442 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-bf60-account-create-update-v2bzq"] Jan 20 17:29:53 crc kubenswrapper[4558]: I0120 17:29:53.205603 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fjdsc\" (UniqueName: \"kubernetes.io/projected/3d6a4075-3af8-4980-adbc-cbf55fcaf10d-kube-api-access-fjdsc\") pod \"barbican-b147-account-create-update-jnh4d\" (UID: \"3d6a4075-3af8-4980-adbc-cbf55fcaf10d\") " pod="openstack-kuttl-tests/barbican-b147-account-create-update-jnh4d" Jan 20 17:29:53 crc kubenswrapper[4558]: I0120 17:29:53.205735 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nrb2p\" (UniqueName: \"kubernetes.io/projected/70a900e8-163c-4d0e-8273-1697de2dba32-kube-api-access-nrb2p\") pod \"barbican-db-create-2rxk6\" (UID: \"70a900e8-163c-4d0e-8273-1697de2dba32\") " pod="openstack-kuttl-tests/barbican-db-create-2rxk6" Jan 20 17:29:53 crc kubenswrapper[4558]: I0120 17:29:53.205759 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-45cxn\" (UniqueName: \"kubernetes.io/projected/957aa771-87eb-4421-b9bd-42f510ce1f8b-kube-api-access-45cxn\") pod \"neutron-db-create-cvpdw\" (UID: \"957aa771-87eb-4421-b9bd-42f510ce1f8b\") " pod="openstack-kuttl-tests/neutron-db-create-cvpdw" Jan 20 17:29:53 crc kubenswrapper[4558]: I0120 17:29:53.205811 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3d6a4075-3af8-4980-adbc-cbf55fcaf10d-operator-scripts\") pod \"barbican-b147-account-create-update-jnh4d\" (UID: \"3d6a4075-3af8-4980-adbc-cbf55fcaf10d\") " pod="openstack-kuttl-tests/barbican-b147-account-create-update-jnh4d" Jan 20 17:29:53 crc kubenswrapper[4558]: I0120 17:29:53.205846 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/70a900e8-163c-4d0e-8273-1697de2dba32-operator-scripts\") pod \"barbican-db-create-2rxk6\" (UID: \"70a900e8-163c-4d0e-8273-1697de2dba32\") " pod="openstack-kuttl-tests/barbican-db-create-2rxk6" Jan 20 17:29:53 crc kubenswrapper[4558]: I0120 17:29:53.205888 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/957aa771-87eb-4421-b9bd-42f510ce1f8b-operator-scripts\") pod \"neutron-db-create-cvpdw\" (UID: \"957aa771-87eb-4421-b9bd-42f510ce1f8b\") " pod="openstack-kuttl-tests/neutron-db-create-cvpdw" Jan 20 17:29:53 crc kubenswrapper[4558]: I0120 17:29:53.206688 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/70a900e8-163c-4d0e-8273-1697de2dba32-operator-scripts\") pod \"barbican-db-create-2rxk6\" (UID: \"70a900e8-163c-4d0e-8273-1697de2dba32\") " pod="openstack-kuttl-tests/barbican-db-create-2rxk6" Jan 20 17:29:53 crc kubenswrapper[4558]: I0120 17:29:53.211583 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/cinder-db-create-55tnd"] Jan 20 17:29:53 crc kubenswrapper[4558]: I0120 17:29:53.212586 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-db-create-55tnd" Jan 20 17:29:53 crc kubenswrapper[4558]: I0120 17:29:53.242219 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/neutron-f7c7-account-create-update-wldkt"] Jan 20 17:29:53 crc kubenswrapper[4558]: I0120 17:29:53.260302 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-f7c7-account-create-update-wldkt" Jan 20 17:29:53 crc kubenswrapper[4558]: I0120 17:29:53.274358 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"neutron-db-secret" Jan 20 17:29:53 crc kubenswrapper[4558]: I0120 17:29:53.274451 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nrb2p\" (UniqueName: \"kubernetes.io/projected/70a900e8-163c-4d0e-8273-1697de2dba32-kube-api-access-nrb2p\") pod \"barbican-db-create-2rxk6\" (UID: \"70a900e8-163c-4d0e-8273-1697de2dba32\") " pod="openstack-kuttl-tests/barbican-db-create-2rxk6" Jan 20 17:29:53 crc kubenswrapper[4558]: I0120 17:29:53.304132 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-db-create-55tnd"] Jan 20 17:29:53 crc kubenswrapper[4558]: I0120 17:29:53.310365 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3d6a4075-3af8-4980-adbc-cbf55fcaf10d-operator-scripts\") pod \"barbican-b147-account-create-update-jnh4d\" (UID: \"3d6a4075-3af8-4980-adbc-cbf55fcaf10d\") " pod="openstack-kuttl-tests/barbican-b147-account-create-update-jnh4d" Jan 20 17:29:53 crc kubenswrapper[4558]: I0120 17:29:53.310449 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a26d1360-434d-4bc3-b02c-7158daac68d8-operator-scripts\") pod \"cinder-db-create-55tnd\" (UID: \"a26d1360-434d-4bc3-b02c-7158daac68d8\") " pod="openstack-kuttl-tests/cinder-db-create-55tnd" Jan 20 17:29:53 crc kubenswrapper[4558]: I0120 17:29:53.312223 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-db-create-2rxk6" Jan 20 17:29:53 crc kubenswrapper[4558]: I0120 17:29:53.313261 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/957aa771-87eb-4421-b9bd-42f510ce1f8b-operator-scripts\") pod \"neutron-db-create-cvpdw\" (UID: \"957aa771-87eb-4421-b9bd-42f510ce1f8b\") " pod="openstack-kuttl-tests/neutron-db-create-cvpdw" Jan 20 17:29:53 crc kubenswrapper[4558]: I0120 17:29:53.313352 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3d6a4075-3af8-4980-adbc-cbf55fcaf10d-operator-scripts\") pod \"barbican-b147-account-create-update-jnh4d\" (UID: \"3d6a4075-3af8-4980-adbc-cbf55fcaf10d\") " pod="openstack-kuttl-tests/barbican-b147-account-create-update-jnh4d" Jan 20 17:29:53 crc kubenswrapper[4558]: I0120 17:29:53.313385 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jl52s\" (UniqueName: \"kubernetes.io/projected/a26d1360-434d-4bc3-b02c-7158daac68d8-kube-api-access-jl52s\") pod \"cinder-db-create-55tnd\" (UID: \"a26d1360-434d-4bc3-b02c-7158daac68d8\") " pod="openstack-kuttl-tests/cinder-db-create-55tnd" Jan 20 17:29:53 crc kubenswrapper[4558]: I0120 17:29:53.313496 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fjdsc\" (UniqueName: \"kubernetes.io/projected/3d6a4075-3af8-4980-adbc-cbf55fcaf10d-kube-api-access-fjdsc\") pod \"barbican-b147-account-create-update-jnh4d\" (UID: \"3d6a4075-3af8-4980-adbc-cbf55fcaf10d\") " pod="openstack-kuttl-tests/barbican-b147-account-create-update-jnh4d" Jan 20 17:29:53 crc kubenswrapper[4558]: I0120 17:29:53.313558 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7d991da1-52c2-4010-b467-8341490801b3-operator-scripts\") pod \"cinder-bf60-account-create-update-v2bzq\" (UID: \"7d991da1-52c2-4010-b467-8341490801b3\") " pod="openstack-kuttl-tests/cinder-bf60-account-create-update-v2bzq" Jan 20 17:29:53 crc kubenswrapper[4558]: I0120 17:29:53.313649 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7kkcv\" (UniqueName: \"kubernetes.io/projected/7d991da1-52c2-4010-b467-8341490801b3-kube-api-access-7kkcv\") pod \"cinder-bf60-account-create-update-v2bzq\" (UID: \"7d991da1-52c2-4010-b467-8341490801b3\") " pod="openstack-kuttl-tests/cinder-bf60-account-create-update-v2bzq" Jan 20 17:29:53 crc kubenswrapper[4558]: I0120 17:29:53.313747 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-45cxn\" (UniqueName: \"kubernetes.io/projected/957aa771-87eb-4421-b9bd-42f510ce1f8b-kube-api-access-45cxn\") pod \"neutron-db-create-cvpdw\" (UID: \"957aa771-87eb-4421-b9bd-42f510ce1f8b\") " pod="openstack-kuttl-tests/neutron-db-create-cvpdw" Jan 20 17:29:53 crc kubenswrapper[4558]: I0120 17:29:53.314720 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/957aa771-87eb-4421-b9bd-42f510ce1f8b-operator-scripts\") pod \"neutron-db-create-cvpdw\" (UID: \"957aa771-87eb-4421-b9bd-42f510ce1f8b\") " pod="openstack-kuttl-tests/neutron-db-create-cvpdw" Jan 20 17:29:53 crc kubenswrapper[4558]: I0120 17:29:53.318264 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-f7c7-account-create-update-wldkt"] Jan 20 17:29:53 crc kubenswrapper[4558]: I0120 17:29:53.330335 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fjdsc\" (UniqueName: \"kubernetes.io/projected/3d6a4075-3af8-4980-adbc-cbf55fcaf10d-kube-api-access-fjdsc\") pod \"barbican-b147-account-create-update-jnh4d\" (UID: \"3d6a4075-3af8-4980-adbc-cbf55fcaf10d\") " pod="openstack-kuttl-tests/barbican-b147-account-create-update-jnh4d" Jan 20 17:29:53 crc kubenswrapper[4558]: I0120 17:29:53.331994 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-45cxn\" (UniqueName: \"kubernetes.io/projected/957aa771-87eb-4421-b9bd-42f510ce1f8b-kube-api-access-45cxn\") pod \"neutron-db-create-cvpdw\" (UID: \"957aa771-87eb-4421-b9bd-42f510ce1f8b\") " pod="openstack-kuttl-tests/neutron-db-create-cvpdw" Jan 20 17:29:53 crc kubenswrapper[4558]: I0120 17:29:53.343013 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-b147-account-create-update-jnh4d" Jan 20 17:29:53 crc kubenswrapper[4558]: I0120 17:29:53.383059 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/keystone-db-sync-fgdtt"] Jan 20 17:29:53 crc kubenswrapper[4558]: I0120 17:29:53.384332 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-db-sync-fgdtt" Jan 20 17:29:53 crc kubenswrapper[4558]: I0120 17:29:53.387682 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone" Jan 20 17:29:53 crc kubenswrapper[4558]: I0120 17:29:53.387927 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-keystone-dockercfg-8n4lx" Jan 20 17:29:53 crc kubenswrapper[4558]: I0120 17:29:53.387980 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-scripts" Jan 20 17:29:53 crc kubenswrapper[4558]: I0120 17:29:53.388132 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-config-data" Jan 20 17:29:53 crc kubenswrapper[4558]: I0120 17:29:53.389714 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-db-sync-fgdtt"] Jan 20 17:29:53 crc kubenswrapper[4558]: I0120 17:29:53.404961 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-db-create-cvpdw" Jan 20 17:29:53 crc kubenswrapper[4558]: I0120 17:29:53.422652 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3cf7d899-0457-4a78-9054-56101bd4963a-combined-ca-bundle\") pod \"keystone-db-sync-fgdtt\" (UID: \"3cf7d899-0457-4a78-9054-56101bd4963a\") " pod="openstack-kuttl-tests/keystone-db-sync-fgdtt" Jan 20 17:29:53 crc kubenswrapper[4558]: I0120 17:29:53.422697 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7kkcv\" (UniqueName: \"kubernetes.io/projected/7d991da1-52c2-4010-b467-8341490801b3-kube-api-access-7kkcv\") pod \"cinder-bf60-account-create-update-v2bzq\" (UID: \"7d991da1-52c2-4010-b467-8341490801b3\") " pod="openstack-kuttl-tests/cinder-bf60-account-create-update-v2bzq" Jan 20 17:29:53 crc kubenswrapper[4558]: I0120 17:29:53.422723 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rthxj\" (UniqueName: \"kubernetes.io/projected/9219712c-6093-4d73-b176-86aa131bb6d2-kube-api-access-rthxj\") pod \"neutron-f7c7-account-create-update-wldkt\" (UID: \"9219712c-6093-4d73-b176-86aa131bb6d2\") " pod="openstack-kuttl-tests/neutron-f7c7-account-create-update-wldkt" Jan 20 17:29:53 crc kubenswrapper[4558]: I0120 17:29:53.422756 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9219712c-6093-4d73-b176-86aa131bb6d2-operator-scripts\") pod \"neutron-f7c7-account-create-update-wldkt\" (UID: \"9219712c-6093-4d73-b176-86aa131bb6d2\") " pod="openstack-kuttl-tests/neutron-f7c7-account-create-update-wldkt" Jan 20 17:29:53 crc kubenswrapper[4558]: I0120 17:29:53.422870 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3cf7d899-0457-4a78-9054-56101bd4963a-config-data\") pod \"keystone-db-sync-fgdtt\" (UID: \"3cf7d899-0457-4a78-9054-56101bd4963a\") " pod="openstack-kuttl-tests/keystone-db-sync-fgdtt" Jan 20 17:29:53 crc kubenswrapper[4558]: I0120 17:29:53.422960 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a26d1360-434d-4bc3-b02c-7158daac68d8-operator-scripts\") pod \"cinder-db-create-55tnd\" (UID: \"a26d1360-434d-4bc3-b02c-7158daac68d8\") " pod="openstack-kuttl-tests/cinder-db-create-55tnd" Jan 20 17:29:53 crc kubenswrapper[4558]: I0120 17:29:53.423017 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f26nr\" (UniqueName: \"kubernetes.io/projected/3cf7d899-0457-4a78-9054-56101bd4963a-kube-api-access-f26nr\") pod \"keystone-db-sync-fgdtt\" (UID: \"3cf7d899-0457-4a78-9054-56101bd4963a\") " pod="openstack-kuttl-tests/keystone-db-sync-fgdtt" Jan 20 17:29:53 crc kubenswrapper[4558]: I0120 17:29:53.423091 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jl52s\" (UniqueName: \"kubernetes.io/projected/a26d1360-434d-4bc3-b02c-7158daac68d8-kube-api-access-jl52s\") pod \"cinder-db-create-55tnd\" (UID: \"a26d1360-434d-4bc3-b02c-7158daac68d8\") " pod="openstack-kuttl-tests/cinder-db-create-55tnd" Jan 20 17:29:53 crc kubenswrapper[4558]: I0120 17:29:53.423114 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7d991da1-52c2-4010-b467-8341490801b3-operator-scripts\") pod \"cinder-bf60-account-create-update-v2bzq\" (UID: \"7d991da1-52c2-4010-b467-8341490801b3\") " pod="openstack-kuttl-tests/cinder-bf60-account-create-update-v2bzq" Jan 20 17:29:53 crc kubenswrapper[4558]: I0120 17:29:53.423667 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a26d1360-434d-4bc3-b02c-7158daac68d8-operator-scripts\") pod \"cinder-db-create-55tnd\" (UID: \"a26d1360-434d-4bc3-b02c-7158daac68d8\") " pod="openstack-kuttl-tests/cinder-db-create-55tnd" Jan 20 17:29:53 crc kubenswrapper[4558]: I0120 17:29:53.423721 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7d991da1-52c2-4010-b467-8341490801b3-operator-scripts\") pod \"cinder-bf60-account-create-update-v2bzq\" (UID: \"7d991da1-52c2-4010-b467-8341490801b3\") " pod="openstack-kuttl-tests/cinder-bf60-account-create-update-v2bzq" Jan 20 17:29:53 crc kubenswrapper[4558]: I0120 17:29:53.440502 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7kkcv\" (UniqueName: \"kubernetes.io/projected/7d991da1-52c2-4010-b467-8341490801b3-kube-api-access-7kkcv\") pod \"cinder-bf60-account-create-update-v2bzq\" (UID: \"7d991da1-52c2-4010-b467-8341490801b3\") " pod="openstack-kuttl-tests/cinder-bf60-account-create-update-v2bzq" Jan 20 17:29:53 crc kubenswrapper[4558]: I0120 17:29:53.447722 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jl52s\" (UniqueName: \"kubernetes.io/projected/a26d1360-434d-4bc3-b02c-7158daac68d8-kube-api-access-jl52s\") pod \"cinder-db-create-55tnd\" (UID: \"a26d1360-434d-4bc3-b02c-7158daac68d8\") " pod="openstack-kuttl-tests/cinder-db-create-55tnd" Jan 20 17:29:53 crc kubenswrapper[4558]: I0120 17:29:53.469417 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-bf60-account-create-update-v2bzq" Jan 20 17:29:53 crc kubenswrapper[4558]: I0120 17:29:53.527431 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f26nr\" (UniqueName: \"kubernetes.io/projected/3cf7d899-0457-4a78-9054-56101bd4963a-kube-api-access-f26nr\") pod \"keystone-db-sync-fgdtt\" (UID: \"3cf7d899-0457-4a78-9054-56101bd4963a\") " pod="openstack-kuttl-tests/keystone-db-sync-fgdtt" Jan 20 17:29:53 crc kubenswrapper[4558]: I0120 17:29:53.527581 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3cf7d899-0457-4a78-9054-56101bd4963a-combined-ca-bundle\") pod \"keystone-db-sync-fgdtt\" (UID: \"3cf7d899-0457-4a78-9054-56101bd4963a\") " pod="openstack-kuttl-tests/keystone-db-sync-fgdtt" Jan 20 17:29:53 crc kubenswrapper[4558]: I0120 17:29:53.527610 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rthxj\" (UniqueName: \"kubernetes.io/projected/9219712c-6093-4d73-b176-86aa131bb6d2-kube-api-access-rthxj\") pod \"neutron-f7c7-account-create-update-wldkt\" (UID: \"9219712c-6093-4d73-b176-86aa131bb6d2\") " pod="openstack-kuttl-tests/neutron-f7c7-account-create-update-wldkt" Jan 20 17:29:53 crc kubenswrapper[4558]: I0120 17:29:53.527652 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9219712c-6093-4d73-b176-86aa131bb6d2-operator-scripts\") pod \"neutron-f7c7-account-create-update-wldkt\" (UID: \"9219712c-6093-4d73-b176-86aa131bb6d2\") " pod="openstack-kuttl-tests/neutron-f7c7-account-create-update-wldkt" Jan 20 17:29:53 crc kubenswrapper[4558]: I0120 17:29:53.527703 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3cf7d899-0457-4a78-9054-56101bd4963a-config-data\") pod \"keystone-db-sync-fgdtt\" (UID: \"3cf7d899-0457-4a78-9054-56101bd4963a\") " pod="openstack-kuttl-tests/keystone-db-sync-fgdtt" Jan 20 17:29:53 crc kubenswrapper[4558]: I0120 17:29:53.530736 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9219712c-6093-4d73-b176-86aa131bb6d2-operator-scripts\") pod \"neutron-f7c7-account-create-update-wldkt\" (UID: \"9219712c-6093-4d73-b176-86aa131bb6d2\") " pod="openstack-kuttl-tests/neutron-f7c7-account-create-update-wldkt" Jan 20 17:29:53 crc kubenswrapper[4558]: I0120 17:29:53.533223 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3cf7d899-0457-4a78-9054-56101bd4963a-config-data\") pod \"keystone-db-sync-fgdtt\" (UID: \"3cf7d899-0457-4a78-9054-56101bd4963a\") " pod="openstack-kuttl-tests/keystone-db-sync-fgdtt" Jan 20 17:29:53 crc kubenswrapper[4558]: I0120 17:29:53.534618 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3cf7d899-0457-4a78-9054-56101bd4963a-combined-ca-bundle\") pod \"keystone-db-sync-fgdtt\" (UID: \"3cf7d899-0457-4a78-9054-56101bd4963a\") " pod="openstack-kuttl-tests/keystone-db-sync-fgdtt" Jan 20 17:29:53 crc kubenswrapper[4558]: I0120 17:29:53.545378 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f26nr\" (UniqueName: \"kubernetes.io/projected/3cf7d899-0457-4a78-9054-56101bd4963a-kube-api-access-f26nr\") pod \"keystone-db-sync-fgdtt\" (UID: \"3cf7d899-0457-4a78-9054-56101bd4963a\") " pod="openstack-kuttl-tests/keystone-db-sync-fgdtt" Jan 20 17:29:53 crc kubenswrapper[4558]: I0120 17:29:53.547524 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rthxj\" (UniqueName: \"kubernetes.io/projected/9219712c-6093-4d73-b176-86aa131bb6d2-kube-api-access-rthxj\") pod \"neutron-f7c7-account-create-update-wldkt\" (UID: \"9219712c-6093-4d73-b176-86aa131bb6d2\") " pod="openstack-kuttl-tests/neutron-f7c7-account-create-update-wldkt" Jan 20 17:29:53 crc kubenswrapper[4558]: I0120 17:29:53.561316 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-db-create-55tnd" Jan 20 17:29:53 crc kubenswrapper[4558]: I0120 17:29:53.587962 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-f7c7-account-create-update-wldkt" Jan 20 17:29:53 crc kubenswrapper[4558]: I0120 17:29:53.745302 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-db-sync-fgdtt" Jan 20 17:29:53 crc kubenswrapper[4558]: I0120 17:29:53.793977 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-db-create-2rxk6"] Jan 20 17:29:53 crc kubenswrapper[4558]: I0120 17:29:53.864564 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-b147-account-create-update-jnh4d"] Jan 20 17:29:53 crc kubenswrapper[4558]: W0120 17:29:53.893292 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3d6a4075_3af8_4980_adbc_cbf55fcaf10d.slice/crio-9217bf241dbe9e8f0a85032cc3146045c19e20f9e27445faf56fcdaa087918c8 WatchSource:0}: Error finding container 9217bf241dbe9e8f0a85032cc3146045c19e20f9e27445faf56fcdaa087918c8: Status 404 returned error can't find the container with id 9217bf241dbe9e8f0a85032cc3146045c19e20f9e27445faf56fcdaa087918c8 Jan 20 17:29:53 crc kubenswrapper[4558]: I0120 17:29:53.969017 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-db-create-cvpdw"] Jan 20 17:29:53 crc kubenswrapper[4558]: W0120 17:29:53.973733 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod957aa771_87eb_4421_b9bd_42f510ce1f8b.slice/crio-5b1ff5c1003efbd323cb87a74566d6f172cfdc8c93d1e7b7c1a9ae8b6dbbd221 WatchSource:0}: Error finding container 5b1ff5c1003efbd323cb87a74566d6f172cfdc8c93d1e7b7c1a9ae8b6dbbd221: Status 404 returned error can't find the container with id 5b1ff5c1003efbd323cb87a74566d6f172cfdc8c93d1e7b7c1a9ae8b6dbbd221 Jan 20 17:29:54 crc kubenswrapper[4558]: I0120 17:29:54.009243 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-bf60-account-create-update-v2bzq"] Jan 20 17:29:54 crc kubenswrapper[4558]: W0120 17:29:54.014051 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7d991da1_52c2_4010_b467_8341490801b3.slice/crio-b8f84ebf5ba5eba75bb49c3e342438e7acd14eb9d85041d3c088ede9e471384d WatchSource:0}: Error finding container b8f84ebf5ba5eba75bb49c3e342438e7acd14eb9d85041d3c088ede9e471384d: Status 404 returned error can't find the container with id b8f84ebf5ba5eba75bb49c3e342438e7acd14eb9d85041d3c088ede9e471384d Jan 20 17:29:54 crc kubenswrapper[4558]: I0120 17:29:54.062751 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-db-create-55tnd"] Jan 20 17:29:54 crc kubenswrapper[4558]: W0120 17:29:54.066794 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda26d1360_434d_4bc3_b02c_7158daac68d8.slice/crio-72960c5d7ff2d97c282d06d4909da20454c307482ec008fdf44560504198639e WatchSource:0}: Error finding container 72960c5d7ff2d97c282d06d4909da20454c307482ec008fdf44560504198639e: Status 404 returned error can't find the container with id 72960c5d7ff2d97c282d06d4909da20454c307482ec008fdf44560504198639e Jan 20 17:29:54 crc kubenswrapper[4558]: I0120 17:29:54.132221 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-f7c7-account-create-update-wldkt"] Jan 20 17:29:54 crc kubenswrapper[4558]: W0120 17:29:54.150622 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9219712c_6093_4d73_b176_86aa131bb6d2.slice/crio-e573b1b191e8908a51984d9ad46e1fd6975a418ed15ee6882485401707350dc2 WatchSource:0}: Error finding container e573b1b191e8908a51984d9ad46e1fd6975a418ed15ee6882485401707350dc2: Status 404 returned error can't find the container with id e573b1b191e8908a51984d9ad46e1fd6975a418ed15ee6882485401707350dc2 Jan 20 17:29:54 crc kubenswrapper[4558]: I0120 17:29:54.213428 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-db-sync-fgdtt"] Jan 20 17:29:54 crc kubenswrapper[4558]: W0120 17:29:54.266283 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3cf7d899_0457_4a78_9054_56101bd4963a.slice/crio-39cf2d397ba59549ba0c501691eee518ee994a4e54ceac09d6d3ab2be8cd76aa WatchSource:0}: Error finding container 39cf2d397ba59549ba0c501691eee518ee994a4e54ceac09d6d3ab2be8cd76aa: Status 404 returned error can't find the container with id 39cf2d397ba59549ba0c501691eee518ee994a4e54ceac09d6d3ab2be8cd76aa Jan 20 17:29:54 crc kubenswrapper[4558]: I0120 17:29:54.527863 4558 generic.go:334] "Generic (PLEG): container finished" podID="a26d1360-434d-4bc3-b02c-7158daac68d8" containerID="22b0bb6d8dfc540e08d68aafb7f363e65e6bf28ecef8a61519f3bd8c6294ee98" exitCode=0 Jan 20 17:29:54 crc kubenswrapper[4558]: I0120 17:29:54.527933 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-db-create-55tnd" event={"ID":"a26d1360-434d-4bc3-b02c-7158daac68d8","Type":"ContainerDied","Data":"22b0bb6d8dfc540e08d68aafb7f363e65e6bf28ecef8a61519f3bd8c6294ee98"} Jan 20 17:29:54 crc kubenswrapper[4558]: I0120 17:29:54.527999 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-db-create-55tnd" event={"ID":"a26d1360-434d-4bc3-b02c-7158daac68d8","Type":"ContainerStarted","Data":"72960c5d7ff2d97c282d06d4909da20454c307482ec008fdf44560504198639e"} Jan 20 17:29:54 crc kubenswrapper[4558]: I0120 17:29:54.529495 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-f7c7-account-create-update-wldkt" event={"ID":"9219712c-6093-4d73-b176-86aa131bb6d2","Type":"ContainerStarted","Data":"e573b1b191e8908a51984d9ad46e1fd6975a418ed15ee6882485401707350dc2"} Jan 20 17:29:54 crc kubenswrapper[4558]: I0120 17:29:54.530945 4558 generic.go:334] "Generic (PLEG): container finished" podID="7d991da1-52c2-4010-b467-8341490801b3" containerID="8bf3869dd26a4d9967ff8e46c95c2f1f103bd6f215f6faf970d4d2330579f3f1" exitCode=0 Jan 20 17:29:54 crc kubenswrapper[4558]: I0120 17:29:54.531025 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-bf60-account-create-update-v2bzq" event={"ID":"7d991da1-52c2-4010-b467-8341490801b3","Type":"ContainerDied","Data":"8bf3869dd26a4d9967ff8e46c95c2f1f103bd6f215f6faf970d4d2330579f3f1"} Jan 20 17:29:54 crc kubenswrapper[4558]: I0120 17:29:54.531049 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-bf60-account-create-update-v2bzq" event={"ID":"7d991da1-52c2-4010-b467-8341490801b3","Type":"ContainerStarted","Data":"b8f84ebf5ba5eba75bb49c3e342438e7acd14eb9d85041d3c088ede9e471384d"} Jan 20 17:29:54 crc kubenswrapper[4558]: I0120 17:29:54.532443 4558 generic.go:334] "Generic (PLEG): container finished" podID="3d6a4075-3af8-4980-adbc-cbf55fcaf10d" containerID="dd7569b8ff01533e3501e65ae5d5c255b180a086f14b419b53f83fd01756e7f2" exitCode=0 Jan 20 17:29:54 crc kubenswrapper[4558]: I0120 17:29:54.532523 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-b147-account-create-update-jnh4d" event={"ID":"3d6a4075-3af8-4980-adbc-cbf55fcaf10d","Type":"ContainerDied","Data":"dd7569b8ff01533e3501e65ae5d5c255b180a086f14b419b53f83fd01756e7f2"} Jan 20 17:29:54 crc kubenswrapper[4558]: I0120 17:29:54.532559 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-b147-account-create-update-jnh4d" event={"ID":"3d6a4075-3af8-4980-adbc-cbf55fcaf10d","Type":"ContainerStarted","Data":"9217bf241dbe9e8f0a85032cc3146045c19e20f9e27445faf56fcdaa087918c8"} Jan 20 17:29:54 crc kubenswrapper[4558]: I0120 17:29:54.534061 4558 generic.go:334] "Generic (PLEG): container finished" podID="70a900e8-163c-4d0e-8273-1697de2dba32" containerID="29d4e80472a2976b5555e6b99ba389153a2a5584bf4c4181c2a9d783faf098b2" exitCode=0 Jan 20 17:29:54 crc kubenswrapper[4558]: I0120 17:29:54.534140 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-db-create-2rxk6" event={"ID":"70a900e8-163c-4d0e-8273-1697de2dba32","Type":"ContainerDied","Data":"29d4e80472a2976b5555e6b99ba389153a2a5584bf4c4181c2a9d783faf098b2"} Jan 20 17:29:54 crc kubenswrapper[4558]: I0120 17:29:54.534208 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-db-create-2rxk6" event={"ID":"70a900e8-163c-4d0e-8273-1697de2dba32","Type":"ContainerStarted","Data":"380ef9cf54ae2e78def797025ada296023bc64224de3ca863c548030c694be86"} Jan 20 17:29:54 crc kubenswrapper[4558]: I0120 17:29:54.535989 4558 generic.go:334] "Generic (PLEG): container finished" podID="957aa771-87eb-4421-b9bd-42f510ce1f8b" containerID="c4b9cfb0a2514a56eb0849ff86a8a6cd56fb0b2cf35efee93f6ec0f03d4c6cfa" exitCode=0 Jan 20 17:29:54 crc kubenswrapper[4558]: I0120 17:29:54.536059 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-db-create-cvpdw" event={"ID":"957aa771-87eb-4421-b9bd-42f510ce1f8b","Type":"ContainerDied","Data":"c4b9cfb0a2514a56eb0849ff86a8a6cd56fb0b2cf35efee93f6ec0f03d4c6cfa"} Jan 20 17:29:54 crc kubenswrapper[4558]: I0120 17:29:54.536089 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-db-create-cvpdw" event={"ID":"957aa771-87eb-4421-b9bd-42f510ce1f8b","Type":"ContainerStarted","Data":"5b1ff5c1003efbd323cb87a74566d6f172cfdc8c93d1e7b7c1a9ae8b6dbbd221"} Jan 20 17:29:54 crc kubenswrapper[4558]: I0120 17:29:54.537362 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-db-sync-fgdtt" event={"ID":"3cf7d899-0457-4a78-9054-56101bd4963a","Type":"ContainerStarted","Data":"39cf2d397ba59549ba0c501691eee518ee994a4e54ceac09d6d3ab2be8cd76aa"} Jan 20 17:29:55 crc kubenswrapper[4558]: I0120 17:29:55.546327 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-db-sync-fgdtt" event={"ID":"3cf7d899-0457-4a78-9054-56101bd4963a","Type":"ContainerStarted","Data":"8e5c536c7a93e02e95f308947a5b32600cda6ec3942625a79d8f934630ae1c60"} Jan 20 17:29:55 crc kubenswrapper[4558]: I0120 17:29:55.547852 4558 generic.go:334] "Generic (PLEG): container finished" podID="9219712c-6093-4d73-b176-86aa131bb6d2" containerID="c1062c77fd9698ee03c8343d7ee7f40ade58e38052555615881b886f6a23d884" exitCode=0 Jan 20 17:29:55 crc kubenswrapper[4558]: I0120 17:29:55.548065 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-f7c7-account-create-update-wldkt" event={"ID":"9219712c-6093-4d73-b176-86aa131bb6d2","Type":"ContainerDied","Data":"c1062c77fd9698ee03c8343d7ee7f40ade58e38052555615881b886f6a23d884"} Jan 20 17:29:55 crc kubenswrapper[4558]: I0120 17:29:55.573539 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/keystone-db-sync-fgdtt" podStartSLOduration=2.573524204 podStartE2EDuration="2.573524204s" podCreationTimestamp="2026-01-20 17:29:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:29:55.562095114 +0000 UTC m=+2889.322433082" watchObservedRunningTime="2026-01-20 17:29:55.573524204 +0000 UTC m=+2889.333862171" Jan 20 17:29:55 crc kubenswrapper[4558]: I0120 17:29:55.873478 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-db-create-cvpdw" Jan 20 17:29:55 crc kubenswrapper[4558]: I0120 17:29:55.977327 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/957aa771-87eb-4421-b9bd-42f510ce1f8b-operator-scripts\") pod \"957aa771-87eb-4421-b9bd-42f510ce1f8b\" (UID: \"957aa771-87eb-4421-b9bd-42f510ce1f8b\") " Jan 20 17:29:55 crc kubenswrapper[4558]: I0120 17:29:55.977486 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-45cxn\" (UniqueName: \"kubernetes.io/projected/957aa771-87eb-4421-b9bd-42f510ce1f8b-kube-api-access-45cxn\") pod \"957aa771-87eb-4421-b9bd-42f510ce1f8b\" (UID: \"957aa771-87eb-4421-b9bd-42f510ce1f8b\") " Jan 20 17:29:55 crc kubenswrapper[4558]: I0120 17:29:55.978097 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/957aa771-87eb-4421-b9bd-42f510ce1f8b-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "957aa771-87eb-4421-b9bd-42f510ce1f8b" (UID: "957aa771-87eb-4421-b9bd-42f510ce1f8b"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:29:55 crc kubenswrapper[4558]: I0120 17:29:55.984873 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/957aa771-87eb-4421-b9bd-42f510ce1f8b-kube-api-access-45cxn" (OuterVolumeSpecName: "kube-api-access-45cxn") pod "957aa771-87eb-4421-b9bd-42f510ce1f8b" (UID: "957aa771-87eb-4421-b9bd-42f510ce1f8b"). InnerVolumeSpecName "kube-api-access-45cxn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:29:56 crc kubenswrapper[4558]: I0120 17:29:56.040484 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-b147-account-create-update-jnh4d" Jan 20 17:29:56 crc kubenswrapper[4558]: I0120 17:29:56.044577 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-bf60-account-create-update-v2bzq" Jan 20 17:29:56 crc kubenswrapper[4558]: I0120 17:29:56.049518 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-db-create-55tnd" Jan 20 17:29:56 crc kubenswrapper[4558]: I0120 17:29:56.060062 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-db-create-2rxk6" Jan 20 17:29:56 crc kubenswrapper[4558]: I0120 17:29:56.094111 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/957aa771-87eb-4421-b9bd-42f510ce1f8b-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:29:56 crc kubenswrapper[4558]: I0120 17:29:56.094179 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-45cxn\" (UniqueName: \"kubernetes.io/projected/957aa771-87eb-4421-b9bd-42f510ce1f8b-kube-api-access-45cxn\") on node \"crc\" DevicePath \"\"" Jan 20 17:29:56 crc kubenswrapper[4558]: I0120 17:29:56.195334 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3d6a4075-3af8-4980-adbc-cbf55fcaf10d-operator-scripts\") pod \"3d6a4075-3af8-4980-adbc-cbf55fcaf10d\" (UID: \"3d6a4075-3af8-4980-adbc-cbf55fcaf10d\") " Jan 20 17:29:56 crc kubenswrapper[4558]: I0120 17:29:56.195482 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jl52s\" (UniqueName: \"kubernetes.io/projected/a26d1360-434d-4bc3-b02c-7158daac68d8-kube-api-access-jl52s\") pod \"a26d1360-434d-4bc3-b02c-7158daac68d8\" (UID: \"a26d1360-434d-4bc3-b02c-7158daac68d8\") " Jan 20 17:29:56 crc kubenswrapper[4558]: I0120 17:29:56.195623 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fjdsc\" (UniqueName: \"kubernetes.io/projected/3d6a4075-3af8-4980-adbc-cbf55fcaf10d-kube-api-access-fjdsc\") pod \"3d6a4075-3af8-4980-adbc-cbf55fcaf10d\" (UID: \"3d6a4075-3af8-4980-adbc-cbf55fcaf10d\") " Jan 20 17:29:56 crc kubenswrapper[4558]: I0120 17:29:56.195644 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7d991da1-52c2-4010-b467-8341490801b3-operator-scripts\") pod \"7d991da1-52c2-4010-b467-8341490801b3\" (UID: \"7d991da1-52c2-4010-b467-8341490801b3\") " Jan 20 17:29:56 crc kubenswrapper[4558]: I0120 17:29:56.195673 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7kkcv\" (UniqueName: \"kubernetes.io/projected/7d991da1-52c2-4010-b467-8341490801b3-kube-api-access-7kkcv\") pod \"7d991da1-52c2-4010-b467-8341490801b3\" (UID: \"7d991da1-52c2-4010-b467-8341490801b3\") " Jan 20 17:29:56 crc kubenswrapper[4558]: I0120 17:29:56.195698 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a26d1360-434d-4bc3-b02c-7158daac68d8-operator-scripts\") pod \"a26d1360-434d-4bc3-b02c-7158daac68d8\" (UID: \"a26d1360-434d-4bc3-b02c-7158daac68d8\") " Jan 20 17:29:56 crc kubenswrapper[4558]: I0120 17:29:56.195742 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nrb2p\" (UniqueName: \"kubernetes.io/projected/70a900e8-163c-4d0e-8273-1697de2dba32-kube-api-access-nrb2p\") pod \"70a900e8-163c-4d0e-8273-1697de2dba32\" (UID: \"70a900e8-163c-4d0e-8273-1697de2dba32\") " Jan 20 17:29:56 crc kubenswrapper[4558]: I0120 17:29:56.195788 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/70a900e8-163c-4d0e-8273-1697de2dba32-operator-scripts\") pod \"70a900e8-163c-4d0e-8273-1697de2dba32\" (UID: \"70a900e8-163c-4d0e-8273-1697de2dba32\") " Jan 20 17:29:56 crc kubenswrapper[4558]: I0120 17:29:56.196152 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3d6a4075-3af8-4980-adbc-cbf55fcaf10d-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "3d6a4075-3af8-4980-adbc-cbf55fcaf10d" (UID: "3d6a4075-3af8-4980-adbc-cbf55fcaf10d"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:29:56 crc kubenswrapper[4558]: I0120 17:29:56.196878 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a26d1360-434d-4bc3-b02c-7158daac68d8-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "a26d1360-434d-4bc3-b02c-7158daac68d8" (UID: "a26d1360-434d-4bc3-b02c-7158daac68d8"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:29:56 crc kubenswrapper[4558]: I0120 17:29:56.197283 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/70a900e8-163c-4d0e-8273-1697de2dba32-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "70a900e8-163c-4d0e-8273-1697de2dba32" (UID: "70a900e8-163c-4d0e-8273-1697de2dba32"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:29:56 crc kubenswrapper[4558]: I0120 17:29:56.197539 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7d991da1-52c2-4010-b467-8341490801b3-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "7d991da1-52c2-4010-b467-8341490801b3" (UID: "7d991da1-52c2-4010-b467-8341490801b3"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:29:56 crc kubenswrapper[4558]: I0120 17:29:56.204018 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3d6a4075-3af8-4980-adbc-cbf55fcaf10d-kube-api-access-fjdsc" (OuterVolumeSpecName: "kube-api-access-fjdsc") pod "3d6a4075-3af8-4980-adbc-cbf55fcaf10d" (UID: "3d6a4075-3af8-4980-adbc-cbf55fcaf10d"). InnerVolumeSpecName "kube-api-access-fjdsc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:29:56 crc kubenswrapper[4558]: I0120 17:29:56.204294 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7d991da1-52c2-4010-b467-8341490801b3-kube-api-access-7kkcv" (OuterVolumeSpecName: "kube-api-access-7kkcv") pod "7d991da1-52c2-4010-b467-8341490801b3" (UID: "7d991da1-52c2-4010-b467-8341490801b3"). InnerVolumeSpecName "kube-api-access-7kkcv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:29:56 crc kubenswrapper[4558]: I0120 17:29:56.204719 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/70a900e8-163c-4d0e-8273-1697de2dba32-kube-api-access-nrb2p" (OuterVolumeSpecName: "kube-api-access-nrb2p") pod "70a900e8-163c-4d0e-8273-1697de2dba32" (UID: "70a900e8-163c-4d0e-8273-1697de2dba32"). InnerVolumeSpecName "kube-api-access-nrb2p". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:29:56 crc kubenswrapper[4558]: I0120 17:29:56.216959 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a26d1360-434d-4bc3-b02c-7158daac68d8-kube-api-access-jl52s" (OuterVolumeSpecName: "kube-api-access-jl52s") pod "a26d1360-434d-4bc3-b02c-7158daac68d8" (UID: "a26d1360-434d-4bc3-b02c-7158daac68d8"). InnerVolumeSpecName "kube-api-access-jl52s". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:29:56 crc kubenswrapper[4558]: I0120 17:29:56.298098 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fjdsc\" (UniqueName: \"kubernetes.io/projected/3d6a4075-3af8-4980-adbc-cbf55fcaf10d-kube-api-access-fjdsc\") on node \"crc\" DevicePath \"\"" Jan 20 17:29:56 crc kubenswrapper[4558]: I0120 17:29:56.298133 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7d991da1-52c2-4010-b467-8341490801b3-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:29:56 crc kubenswrapper[4558]: I0120 17:29:56.298148 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7kkcv\" (UniqueName: \"kubernetes.io/projected/7d991da1-52c2-4010-b467-8341490801b3-kube-api-access-7kkcv\") on node \"crc\" DevicePath \"\"" Jan 20 17:29:56 crc kubenswrapper[4558]: I0120 17:29:56.298160 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a26d1360-434d-4bc3-b02c-7158daac68d8-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:29:56 crc kubenswrapper[4558]: I0120 17:29:56.298188 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nrb2p\" (UniqueName: \"kubernetes.io/projected/70a900e8-163c-4d0e-8273-1697de2dba32-kube-api-access-nrb2p\") on node \"crc\" DevicePath \"\"" Jan 20 17:29:56 crc kubenswrapper[4558]: I0120 17:29:56.298199 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/70a900e8-163c-4d0e-8273-1697de2dba32-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:29:56 crc kubenswrapper[4558]: I0120 17:29:56.298209 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3d6a4075-3af8-4980-adbc-cbf55fcaf10d-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:29:56 crc kubenswrapper[4558]: I0120 17:29:56.298219 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jl52s\" (UniqueName: \"kubernetes.io/projected/a26d1360-434d-4bc3-b02c-7158daac68d8-kube-api-access-jl52s\") on node \"crc\" DevicePath \"\"" Jan 20 17:29:56 crc kubenswrapper[4558]: I0120 17:29:56.557937 4558 generic.go:334] "Generic (PLEG): container finished" podID="3cf7d899-0457-4a78-9054-56101bd4963a" containerID="8e5c536c7a93e02e95f308947a5b32600cda6ec3942625a79d8f934630ae1c60" exitCode=0 Jan 20 17:29:56 crc kubenswrapper[4558]: I0120 17:29:56.558037 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-db-sync-fgdtt" event={"ID":"3cf7d899-0457-4a78-9054-56101bd4963a","Type":"ContainerDied","Data":"8e5c536c7a93e02e95f308947a5b32600cda6ec3942625a79d8f934630ae1c60"} Jan 20 17:29:56 crc kubenswrapper[4558]: I0120 17:29:56.559850 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-db-create-55tnd" event={"ID":"a26d1360-434d-4bc3-b02c-7158daac68d8","Type":"ContainerDied","Data":"72960c5d7ff2d97c282d06d4909da20454c307482ec008fdf44560504198639e"} Jan 20 17:29:56 crc kubenswrapper[4558]: I0120 17:29:56.559880 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-db-create-55tnd" Jan 20 17:29:56 crc kubenswrapper[4558]: I0120 17:29:56.559888 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="72960c5d7ff2d97c282d06d4909da20454c307482ec008fdf44560504198639e" Jan 20 17:29:56 crc kubenswrapper[4558]: I0120 17:29:56.563199 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-bf60-account-create-update-v2bzq" event={"ID":"7d991da1-52c2-4010-b467-8341490801b3","Type":"ContainerDied","Data":"b8f84ebf5ba5eba75bb49c3e342438e7acd14eb9d85041d3c088ede9e471384d"} Jan 20 17:29:56 crc kubenswrapper[4558]: I0120 17:29:56.563234 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-bf60-account-create-update-v2bzq" Jan 20 17:29:56 crc kubenswrapper[4558]: I0120 17:29:56.563248 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b8f84ebf5ba5eba75bb49c3e342438e7acd14eb9d85041d3c088ede9e471384d" Jan 20 17:29:56 crc kubenswrapper[4558]: I0120 17:29:56.565291 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-b147-account-create-update-jnh4d" Jan 20 17:29:56 crc kubenswrapper[4558]: I0120 17:29:56.565753 4558 scope.go:117] "RemoveContainer" containerID="f275e9f5de330b3c79316d5871106c6bcf90b698e0744c5beb28da45c336b36d" Jan 20 17:29:56 crc kubenswrapper[4558]: E0120 17:29:56.566143 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 17:29:56 crc kubenswrapper[4558]: I0120 17:29:56.567491 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-db-create-2rxk6" Jan 20 17:29:56 crc kubenswrapper[4558]: I0120 17:29:56.569571 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-db-create-cvpdw" Jan 20 17:29:56 crc kubenswrapper[4558]: I0120 17:29:56.577965 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-b147-account-create-update-jnh4d" event={"ID":"3d6a4075-3af8-4980-adbc-cbf55fcaf10d","Type":"ContainerDied","Data":"9217bf241dbe9e8f0a85032cc3146045c19e20f9e27445faf56fcdaa087918c8"} Jan 20 17:29:56 crc kubenswrapper[4558]: I0120 17:29:56.578681 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9217bf241dbe9e8f0a85032cc3146045c19e20f9e27445faf56fcdaa087918c8" Jan 20 17:29:56 crc kubenswrapper[4558]: I0120 17:29:56.578711 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-db-create-2rxk6" event={"ID":"70a900e8-163c-4d0e-8273-1697de2dba32","Type":"ContainerDied","Data":"380ef9cf54ae2e78def797025ada296023bc64224de3ca863c548030c694be86"} Jan 20 17:29:56 crc kubenswrapper[4558]: I0120 17:29:56.578735 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="380ef9cf54ae2e78def797025ada296023bc64224de3ca863c548030c694be86" Jan 20 17:29:56 crc kubenswrapper[4558]: I0120 17:29:56.579334 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-db-create-cvpdw" event={"ID":"957aa771-87eb-4421-b9bd-42f510ce1f8b","Type":"ContainerDied","Data":"5b1ff5c1003efbd323cb87a74566d6f172cfdc8c93d1e7b7c1a9ae8b6dbbd221"} Jan 20 17:29:56 crc kubenswrapper[4558]: I0120 17:29:56.579354 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5b1ff5c1003efbd323cb87a74566d6f172cfdc8c93d1e7b7c1a9ae8b6dbbd221" Jan 20 17:29:56 crc kubenswrapper[4558]: I0120 17:29:56.797015 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-f7c7-account-create-update-wldkt" Jan 20 17:29:56 crc kubenswrapper[4558]: I0120 17:29:56.808627 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rthxj\" (UniqueName: \"kubernetes.io/projected/9219712c-6093-4d73-b176-86aa131bb6d2-kube-api-access-rthxj\") pod \"9219712c-6093-4d73-b176-86aa131bb6d2\" (UID: \"9219712c-6093-4d73-b176-86aa131bb6d2\") " Jan 20 17:29:56 crc kubenswrapper[4558]: I0120 17:29:56.809006 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9219712c-6093-4d73-b176-86aa131bb6d2-operator-scripts\") pod \"9219712c-6093-4d73-b176-86aa131bb6d2\" (UID: \"9219712c-6093-4d73-b176-86aa131bb6d2\") " Jan 20 17:29:56 crc kubenswrapper[4558]: I0120 17:29:56.809707 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9219712c-6093-4d73-b176-86aa131bb6d2-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "9219712c-6093-4d73-b176-86aa131bb6d2" (UID: "9219712c-6093-4d73-b176-86aa131bb6d2"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:29:56 crc kubenswrapper[4558]: I0120 17:29:56.810123 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9219712c-6093-4d73-b176-86aa131bb6d2-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:29:56 crc kubenswrapper[4558]: I0120 17:29:56.816745 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9219712c-6093-4d73-b176-86aa131bb6d2-kube-api-access-rthxj" (OuterVolumeSpecName: "kube-api-access-rthxj") pod "9219712c-6093-4d73-b176-86aa131bb6d2" (UID: "9219712c-6093-4d73-b176-86aa131bb6d2"). InnerVolumeSpecName "kube-api-access-rthxj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:29:56 crc kubenswrapper[4558]: I0120 17:29:56.912261 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rthxj\" (UniqueName: \"kubernetes.io/projected/9219712c-6093-4d73-b176-86aa131bb6d2-kube-api-access-rthxj\") on node \"crc\" DevicePath \"\"" Jan 20 17:29:57 crc kubenswrapper[4558]: I0120 17:29:57.582024 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-f7c7-account-create-update-wldkt" Jan 20 17:29:57 crc kubenswrapper[4558]: I0120 17:29:57.582125 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-f7c7-account-create-update-wldkt" event={"ID":"9219712c-6093-4d73-b176-86aa131bb6d2","Type":"ContainerDied","Data":"e573b1b191e8908a51984d9ad46e1fd6975a418ed15ee6882485401707350dc2"} Jan 20 17:29:57 crc kubenswrapper[4558]: I0120 17:29:57.582526 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e573b1b191e8908a51984d9ad46e1fd6975a418ed15ee6882485401707350dc2" Jan 20 17:29:57 crc kubenswrapper[4558]: I0120 17:29:57.906679 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-db-sync-fgdtt" Jan 20 17:29:58 crc kubenswrapper[4558]: I0120 17:29:58.028948 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f26nr\" (UniqueName: \"kubernetes.io/projected/3cf7d899-0457-4a78-9054-56101bd4963a-kube-api-access-f26nr\") pod \"3cf7d899-0457-4a78-9054-56101bd4963a\" (UID: \"3cf7d899-0457-4a78-9054-56101bd4963a\") " Jan 20 17:29:58 crc kubenswrapper[4558]: I0120 17:29:58.029181 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3cf7d899-0457-4a78-9054-56101bd4963a-combined-ca-bundle\") pod \"3cf7d899-0457-4a78-9054-56101bd4963a\" (UID: \"3cf7d899-0457-4a78-9054-56101bd4963a\") " Jan 20 17:29:58 crc kubenswrapper[4558]: I0120 17:29:58.029219 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3cf7d899-0457-4a78-9054-56101bd4963a-config-data\") pod \"3cf7d899-0457-4a78-9054-56101bd4963a\" (UID: \"3cf7d899-0457-4a78-9054-56101bd4963a\") " Jan 20 17:29:58 crc kubenswrapper[4558]: I0120 17:29:58.035233 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cf7d899-0457-4a78-9054-56101bd4963a-kube-api-access-f26nr" (OuterVolumeSpecName: "kube-api-access-f26nr") pod "3cf7d899-0457-4a78-9054-56101bd4963a" (UID: "3cf7d899-0457-4a78-9054-56101bd4963a"). InnerVolumeSpecName "kube-api-access-f26nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:29:58 crc kubenswrapper[4558]: I0120 17:29:58.058589 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3cf7d899-0457-4a78-9054-56101bd4963a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3cf7d899-0457-4a78-9054-56101bd4963a" (UID: "3cf7d899-0457-4a78-9054-56101bd4963a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:29:58 crc kubenswrapper[4558]: I0120 17:29:58.075544 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3cf7d899-0457-4a78-9054-56101bd4963a-config-data" (OuterVolumeSpecName: "config-data") pod "3cf7d899-0457-4a78-9054-56101bd4963a" (UID: "3cf7d899-0457-4a78-9054-56101bd4963a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:29:58 crc kubenswrapper[4558]: I0120 17:29:58.131215 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f26nr\" (UniqueName: \"kubernetes.io/projected/3cf7d899-0457-4a78-9054-56101bd4963a-kube-api-access-f26nr\") on node \"crc\" DevicePath \"\"" Jan 20 17:29:58 crc kubenswrapper[4558]: I0120 17:29:58.131250 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3cf7d899-0457-4a78-9054-56101bd4963a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:29:58 crc kubenswrapper[4558]: I0120 17:29:58.131261 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3cf7d899-0457-4a78-9054-56101bd4963a-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:29:58 crc kubenswrapper[4558]: I0120 17:29:58.592210 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-db-sync-fgdtt" event={"ID":"3cf7d899-0457-4a78-9054-56101bd4963a","Type":"ContainerDied","Data":"39cf2d397ba59549ba0c501691eee518ee994a4e54ceac09d6d3ab2be8cd76aa"} Jan 20 17:29:58 crc kubenswrapper[4558]: I0120 17:29:58.592265 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="39cf2d397ba59549ba0c501691eee518ee994a4e54ceac09d6d3ab2be8cd76aa" Jan 20 17:29:58 crc kubenswrapper[4558]: I0120 17:29:58.592309 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-db-sync-fgdtt" Jan 20 17:29:58 crc kubenswrapper[4558]: I0120 17:29:58.735836 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/keystone-bootstrap-vh8wg"] Jan 20 17:29:58 crc kubenswrapper[4558]: E0120 17:29:58.736178 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="957aa771-87eb-4421-b9bd-42f510ce1f8b" containerName="mariadb-database-create" Jan 20 17:29:58 crc kubenswrapper[4558]: I0120 17:29:58.736196 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="957aa771-87eb-4421-b9bd-42f510ce1f8b" containerName="mariadb-database-create" Jan 20 17:29:58 crc kubenswrapper[4558]: E0120 17:29:58.736215 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a26d1360-434d-4bc3-b02c-7158daac68d8" containerName="mariadb-database-create" Jan 20 17:29:58 crc kubenswrapper[4558]: I0120 17:29:58.736221 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="a26d1360-434d-4bc3-b02c-7158daac68d8" containerName="mariadb-database-create" Jan 20 17:29:58 crc kubenswrapper[4558]: E0120 17:29:58.736234 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9219712c-6093-4d73-b176-86aa131bb6d2" containerName="mariadb-account-create-update" Jan 20 17:29:58 crc kubenswrapper[4558]: I0120 17:29:58.736240 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="9219712c-6093-4d73-b176-86aa131bb6d2" containerName="mariadb-account-create-update" Jan 20 17:29:58 crc kubenswrapper[4558]: E0120 17:29:58.736253 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="70a900e8-163c-4d0e-8273-1697de2dba32" containerName="mariadb-database-create" Jan 20 17:29:58 crc kubenswrapper[4558]: I0120 17:29:58.736259 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="70a900e8-163c-4d0e-8273-1697de2dba32" containerName="mariadb-database-create" Jan 20 17:29:58 crc kubenswrapper[4558]: E0120 17:29:58.736279 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3d6a4075-3af8-4980-adbc-cbf55fcaf10d" containerName="mariadb-account-create-update" Jan 20 17:29:58 crc kubenswrapper[4558]: I0120 17:29:58.736285 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="3d6a4075-3af8-4980-adbc-cbf55fcaf10d" containerName="mariadb-account-create-update" Jan 20 17:29:58 crc kubenswrapper[4558]: E0120 17:29:58.736295 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3cf7d899-0457-4a78-9054-56101bd4963a" containerName="keystone-db-sync" Jan 20 17:29:58 crc kubenswrapper[4558]: I0120 17:29:58.736300 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="3cf7d899-0457-4a78-9054-56101bd4963a" containerName="keystone-db-sync" Jan 20 17:29:58 crc kubenswrapper[4558]: E0120 17:29:58.736308 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7d991da1-52c2-4010-b467-8341490801b3" containerName="mariadb-account-create-update" Jan 20 17:29:58 crc kubenswrapper[4558]: I0120 17:29:58.736314 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="7d991da1-52c2-4010-b467-8341490801b3" containerName="mariadb-account-create-update" Jan 20 17:29:58 crc kubenswrapper[4558]: I0120 17:29:58.736449 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="70a900e8-163c-4d0e-8273-1697de2dba32" containerName="mariadb-database-create" Jan 20 17:29:58 crc kubenswrapper[4558]: I0120 17:29:58.736462 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="3d6a4075-3af8-4980-adbc-cbf55fcaf10d" containerName="mariadb-account-create-update" Jan 20 17:29:58 crc kubenswrapper[4558]: I0120 17:29:58.736475 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="9219712c-6093-4d73-b176-86aa131bb6d2" containerName="mariadb-account-create-update" Jan 20 17:29:58 crc kubenswrapper[4558]: I0120 17:29:58.736485 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="3cf7d899-0457-4a78-9054-56101bd4963a" containerName="keystone-db-sync" Jan 20 17:29:58 crc kubenswrapper[4558]: I0120 17:29:58.736495 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="957aa771-87eb-4421-b9bd-42f510ce1f8b" containerName="mariadb-database-create" Jan 20 17:29:58 crc kubenswrapper[4558]: I0120 17:29:58.736504 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="a26d1360-434d-4bc3-b02c-7158daac68d8" containerName="mariadb-database-create" Jan 20 17:29:58 crc kubenswrapper[4558]: I0120 17:29:58.736514 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="7d991da1-52c2-4010-b467-8341490801b3" containerName="mariadb-account-create-update" Jan 20 17:29:58 crc kubenswrapper[4558]: I0120 17:29:58.737018 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-bootstrap-vh8wg" Jan 20 17:29:58 crc kubenswrapper[4558]: I0120 17:29:58.742641 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dbbead9c-ffa4-448a-81a0-d1db96d7b5e3-combined-ca-bundle\") pod \"keystone-bootstrap-vh8wg\" (UID: \"dbbead9c-ffa4-448a-81a0-d1db96d7b5e3\") " pod="openstack-kuttl-tests/keystone-bootstrap-vh8wg" Jan 20 17:29:58 crc kubenswrapper[4558]: I0120 17:29:58.742693 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/dbbead9c-ffa4-448a-81a0-d1db96d7b5e3-fernet-keys\") pod \"keystone-bootstrap-vh8wg\" (UID: \"dbbead9c-ffa4-448a-81a0-d1db96d7b5e3\") " pod="openstack-kuttl-tests/keystone-bootstrap-vh8wg" Jan 20 17:29:58 crc kubenswrapper[4558]: I0120 17:29:58.742739 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/dbbead9c-ffa4-448a-81a0-d1db96d7b5e3-credential-keys\") pod \"keystone-bootstrap-vh8wg\" (UID: \"dbbead9c-ffa4-448a-81a0-d1db96d7b5e3\") " pod="openstack-kuttl-tests/keystone-bootstrap-vh8wg" Jan 20 17:29:58 crc kubenswrapper[4558]: I0120 17:29:58.742761 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dbbead9c-ffa4-448a-81a0-d1db96d7b5e3-config-data\") pod \"keystone-bootstrap-vh8wg\" (UID: \"dbbead9c-ffa4-448a-81a0-d1db96d7b5e3\") " pod="openstack-kuttl-tests/keystone-bootstrap-vh8wg" Jan 20 17:29:58 crc kubenswrapper[4558]: I0120 17:29:58.742882 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bnzln\" (UniqueName: \"kubernetes.io/projected/dbbead9c-ffa4-448a-81a0-d1db96d7b5e3-kube-api-access-bnzln\") pod \"keystone-bootstrap-vh8wg\" (UID: \"dbbead9c-ffa4-448a-81a0-d1db96d7b5e3\") " pod="openstack-kuttl-tests/keystone-bootstrap-vh8wg" Jan 20 17:29:58 crc kubenswrapper[4558]: I0120 17:29:58.743029 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dbbead9c-ffa4-448a-81a0-d1db96d7b5e3-scripts\") pod \"keystone-bootstrap-vh8wg\" (UID: \"dbbead9c-ffa4-448a-81a0-d1db96d7b5e3\") " pod="openstack-kuttl-tests/keystone-bootstrap-vh8wg" Jan 20 17:29:58 crc kubenswrapper[4558]: I0120 17:29:58.745139 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-config-data" Jan 20 17:29:58 crc kubenswrapper[4558]: I0120 17:29:58.745232 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-keystone-dockercfg-8n4lx" Jan 20 17:29:58 crc kubenswrapper[4558]: I0120 17:29:58.745329 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-scripts" Jan 20 17:29:58 crc kubenswrapper[4558]: I0120 17:29:58.745433 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone" Jan 20 17:29:58 crc kubenswrapper[4558]: I0120 17:29:58.745478 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"osp-secret" Jan 20 17:29:58 crc kubenswrapper[4558]: I0120 17:29:58.762876 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-bootstrap-vh8wg"] Jan 20 17:29:58 crc kubenswrapper[4558]: I0120 17:29:58.844356 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dbbead9c-ffa4-448a-81a0-d1db96d7b5e3-combined-ca-bundle\") pod \"keystone-bootstrap-vh8wg\" (UID: \"dbbead9c-ffa4-448a-81a0-d1db96d7b5e3\") " pod="openstack-kuttl-tests/keystone-bootstrap-vh8wg" Jan 20 17:29:58 crc kubenswrapper[4558]: I0120 17:29:58.844412 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/dbbead9c-ffa4-448a-81a0-d1db96d7b5e3-fernet-keys\") pod \"keystone-bootstrap-vh8wg\" (UID: \"dbbead9c-ffa4-448a-81a0-d1db96d7b5e3\") " pod="openstack-kuttl-tests/keystone-bootstrap-vh8wg" Jan 20 17:29:58 crc kubenswrapper[4558]: I0120 17:29:58.844454 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/dbbead9c-ffa4-448a-81a0-d1db96d7b5e3-credential-keys\") pod \"keystone-bootstrap-vh8wg\" (UID: \"dbbead9c-ffa4-448a-81a0-d1db96d7b5e3\") " pod="openstack-kuttl-tests/keystone-bootstrap-vh8wg" Jan 20 17:29:58 crc kubenswrapper[4558]: I0120 17:29:58.844598 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dbbead9c-ffa4-448a-81a0-d1db96d7b5e3-config-data\") pod \"keystone-bootstrap-vh8wg\" (UID: \"dbbead9c-ffa4-448a-81a0-d1db96d7b5e3\") " pod="openstack-kuttl-tests/keystone-bootstrap-vh8wg" Jan 20 17:29:58 crc kubenswrapper[4558]: I0120 17:29:58.844653 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bnzln\" (UniqueName: \"kubernetes.io/projected/dbbead9c-ffa4-448a-81a0-d1db96d7b5e3-kube-api-access-bnzln\") pod \"keystone-bootstrap-vh8wg\" (UID: \"dbbead9c-ffa4-448a-81a0-d1db96d7b5e3\") " pod="openstack-kuttl-tests/keystone-bootstrap-vh8wg" Jan 20 17:29:58 crc kubenswrapper[4558]: I0120 17:29:58.844706 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dbbead9c-ffa4-448a-81a0-d1db96d7b5e3-scripts\") pod \"keystone-bootstrap-vh8wg\" (UID: \"dbbead9c-ffa4-448a-81a0-d1db96d7b5e3\") " pod="openstack-kuttl-tests/keystone-bootstrap-vh8wg" Jan 20 17:29:58 crc kubenswrapper[4558]: I0120 17:29:58.850893 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/dbbead9c-ffa4-448a-81a0-d1db96d7b5e3-fernet-keys\") pod \"keystone-bootstrap-vh8wg\" (UID: \"dbbead9c-ffa4-448a-81a0-d1db96d7b5e3\") " pod="openstack-kuttl-tests/keystone-bootstrap-vh8wg" Jan 20 17:29:58 crc kubenswrapper[4558]: I0120 17:29:58.851495 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/dbbead9c-ffa4-448a-81a0-d1db96d7b5e3-credential-keys\") pod \"keystone-bootstrap-vh8wg\" (UID: \"dbbead9c-ffa4-448a-81a0-d1db96d7b5e3\") " pod="openstack-kuttl-tests/keystone-bootstrap-vh8wg" Jan 20 17:29:58 crc kubenswrapper[4558]: I0120 17:29:58.853435 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dbbead9c-ffa4-448a-81a0-d1db96d7b5e3-scripts\") pod \"keystone-bootstrap-vh8wg\" (UID: \"dbbead9c-ffa4-448a-81a0-d1db96d7b5e3\") " pod="openstack-kuttl-tests/keystone-bootstrap-vh8wg" Jan 20 17:29:58 crc kubenswrapper[4558]: I0120 17:29:58.853644 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dbbead9c-ffa4-448a-81a0-d1db96d7b5e3-combined-ca-bundle\") pod \"keystone-bootstrap-vh8wg\" (UID: \"dbbead9c-ffa4-448a-81a0-d1db96d7b5e3\") " pod="openstack-kuttl-tests/keystone-bootstrap-vh8wg" Jan 20 17:29:58 crc kubenswrapper[4558]: I0120 17:29:58.853855 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dbbead9c-ffa4-448a-81a0-d1db96d7b5e3-config-data\") pod \"keystone-bootstrap-vh8wg\" (UID: \"dbbead9c-ffa4-448a-81a0-d1db96d7b5e3\") " pod="openstack-kuttl-tests/keystone-bootstrap-vh8wg" Jan 20 17:29:58 crc kubenswrapper[4558]: I0120 17:29:58.864444 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bnzln\" (UniqueName: \"kubernetes.io/projected/dbbead9c-ffa4-448a-81a0-d1db96d7b5e3-kube-api-access-bnzln\") pod \"keystone-bootstrap-vh8wg\" (UID: \"dbbead9c-ffa4-448a-81a0-d1db96d7b5e3\") " pod="openstack-kuttl-tests/keystone-bootstrap-vh8wg" Jan 20 17:29:58 crc kubenswrapper[4558]: I0120 17:29:58.902936 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/neutron-db-sync-hbj94"] Jan 20 17:29:58 crc kubenswrapper[4558]: I0120 17:29:58.904218 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-db-sync-hbj94" Jan 20 17:29:58 crc kubenswrapper[4558]: I0120 17:29:58.906675 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"neutron-neutron-dockercfg-6d57m" Jan 20 17:29:58 crc kubenswrapper[4558]: I0120 17:29:58.906719 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"neutron-httpd-config" Jan 20 17:29:58 crc kubenswrapper[4558]: I0120 17:29:58.906862 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"neutron-config" Jan 20 17:29:58 crc kubenswrapper[4558]: I0120 17:29:58.919868 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-db-sync-hbj94"] Jan 20 17:29:58 crc kubenswrapper[4558]: I0120 17:29:58.925670 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:29:58 crc kubenswrapper[4558]: I0120 17:29:58.927877 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:29:58 crc kubenswrapper[4558]: I0120 17:29:58.930337 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-config-data" Jan 20 17:29:58 crc kubenswrapper[4558]: I0120 17:29:58.932577 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-scripts" Jan 20 17:29:58 crc kubenswrapper[4558]: I0120 17:29:58.942685 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.002336 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/barbican-db-sync-8rp7v"] Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.003424 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-db-sync-8rp7v" Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.007817 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"barbican-config-data" Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.012109 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"barbican-barbican-dockercfg-nww48" Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.027587 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-db-sync-8rp7v"] Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.044112 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/cinder-db-sync-kd8pp"] Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.045571 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-db-sync-kd8pp" Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.048034 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54807f10-de72-4664-a757-a1c048e4d5d7-combined-ca-bundle\") pod \"cinder-db-sync-kd8pp\" (UID: \"54807f10-de72-4664-a757-a1c048e4d5d7\") " pod="openstack-kuttl-tests/cinder-db-sync-kd8pp" Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.048090 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9f98dae-f452-470a-b189-73bf68f60a83-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"b9f98dae-f452-470a-b189-73bf68f60a83\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.048116 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/323fa0e2-7555-4fd5-bd80-65474fa83b69-config\") pod \"neutron-db-sync-hbj94\" (UID: \"323fa0e2-7555-4fd5-bd80-65474fa83b69\") " pod="openstack-kuttl-tests/neutron-db-sync-hbj94" Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.048138 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b9f98dae-f452-470a-b189-73bf68f60a83-scripts\") pod \"ceilometer-0\" (UID: \"b9f98dae-f452-470a-b189-73bf68f60a83\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.048157 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a9c95d35-61c0-4abb-92a1-8b33d73139ab-combined-ca-bundle\") pod \"barbican-db-sync-8rp7v\" (UID: \"a9c95d35-61c0-4abb-92a1-8b33d73139ab\") " pod="openstack-kuttl-tests/barbican-db-sync-8rp7v" Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.048220 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/54807f10-de72-4664-a757-a1c048e4d5d7-db-sync-config-data\") pod \"cinder-db-sync-kd8pp\" (UID: \"54807f10-de72-4664-a757-a1c048e4d5d7\") " pod="openstack-kuttl-tests/cinder-db-sync-kd8pp" Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.048253 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cinder-cinder-dockercfg-zxp27" Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.048392 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cinder-scripts" Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.048257 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/54807f10-de72-4664-a757-a1c048e4d5d7-etc-machine-id\") pod \"cinder-db-sync-kd8pp\" (UID: \"54807f10-de72-4664-a757-a1c048e4d5d7\") " pod="openstack-kuttl-tests/cinder-db-sync-kd8pp" Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.048531 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/a9c95d35-61c0-4abb-92a1-8b33d73139ab-db-sync-config-data\") pod \"barbican-db-sync-8rp7v\" (UID: \"a9c95d35-61c0-4abb-92a1-8b33d73139ab\") " pod="openstack-kuttl-tests/barbican-db-sync-8rp7v" Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.048574 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/323fa0e2-7555-4fd5-bd80-65474fa83b69-combined-ca-bundle\") pod \"neutron-db-sync-hbj94\" (UID: \"323fa0e2-7555-4fd5-bd80-65474fa83b69\") " pod="openstack-kuttl-tests/neutron-db-sync-hbj94" Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.048600 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b9f98dae-f452-470a-b189-73bf68f60a83-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"b9f98dae-f452-470a-b189-73bf68f60a83\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.048627 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b9f98dae-f452-470a-b189-73bf68f60a83-config-data\") pod \"ceilometer-0\" (UID: \"b9f98dae-f452-470a-b189-73bf68f60a83\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.048668 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l252s\" (UniqueName: \"kubernetes.io/projected/54807f10-de72-4664-a757-a1c048e4d5d7-kube-api-access-l252s\") pod \"cinder-db-sync-kd8pp\" (UID: \"54807f10-de72-4664-a757-a1c048e4d5d7\") " pod="openstack-kuttl-tests/cinder-db-sync-kd8pp" Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.048715 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b9f98dae-f452-470a-b189-73bf68f60a83-run-httpd\") pod \"ceilometer-0\" (UID: \"b9f98dae-f452-470a-b189-73bf68f60a83\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.048744 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ww4bl\" (UniqueName: \"kubernetes.io/projected/b9f98dae-f452-470a-b189-73bf68f60a83-kube-api-access-ww4bl\") pod \"ceilometer-0\" (UID: \"b9f98dae-f452-470a-b189-73bf68f60a83\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.048807 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/54807f10-de72-4664-a757-a1c048e4d5d7-config-data\") pod \"cinder-db-sync-kd8pp\" (UID: \"54807f10-de72-4664-a757-a1c048e4d5d7\") " pod="openstack-kuttl-tests/cinder-db-sync-kd8pp" Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.048841 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-56tdf\" (UniqueName: \"kubernetes.io/projected/323fa0e2-7555-4fd5-bd80-65474fa83b69-kube-api-access-56tdf\") pod \"neutron-db-sync-hbj94\" (UID: \"323fa0e2-7555-4fd5-bd80-65474fa83b69\") " pod="openstack-kuttl-tests/neutron-db-sync-hbj94" Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.048857 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/54807f10-de72-4664-a757-a1c048e4d5d7-scripts\") pod \"cinder-db-sync-kd8pp\" (UID: \"54807f10-de72-4664-a757-a1c048e4d5d7\") " pod="openstack-kuttl-tests/cinder-db-sync-kd8pp" Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.048877 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f788v\" (UniqueName: \"kubernetes.io/projected/a9c95d35-61c0-4abb-92a1-8b33d73139ab-kube-api-access-f788v\") pod \"barbican-db-sync-8rp7v\" (UID: \"a9c95d35-61c0-4abb-92a1-8b33d73139ab\") " pod="openstack-kuttl-tests/barbican-db-sync-8rp7v" Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.048899 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b9f98dae-f452-470a-b189-73bf68f60a83-log-httpd\") pod \"ceilometer-0\" (UID: \"b9f98dae-f452-470a-b189-73bf68f60a83\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.050957 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cinder-config-data" Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.054061 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-bootstrap-vh8wg" Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.063725 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-db-sync-kd8pp"] Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.069245 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/placement-db-sync-b88r8"] Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.070188 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-db-sync-b88r8" Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.073941 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"placement-config-data" Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.074088 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"placement-scripts" Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.074522 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"placement-placement-dockercfg-j7r59" Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.077048 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-db-sync-b88r8"] Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.151303 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9f98dae-f452-470a-b189-73bf68f60a83-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"b9f98dae-f452-470a-b189-73bf68f60a83\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.151366 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/838617e6-6285-4b18-9fc1-022ec5989dd2-config-data\") pod \"placement-db-sync-b88r8\" (UID: \"838617e6-6285-4b18-9fc1-022ec5989dd2\") " pod="openstack-kuttl-tests/placement-db-sync-b88r8" Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.151404 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/323fa0e2-7555-4fd5-bd80-65474fa83b69-config\") pod \"neutron-db-sync-hbj94\" (UID: \"323fa0e2-7555-4fd5-bd80-65474fa83b69\") " pod="openstack-kuttl-tests/neutron-db-sync-hbj94" Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.151431 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b9f98dae-f452-470a-b189-73bf68f60a83-scripts\") pod \"ceilometer-0\" (UID: \"b9f98dae-f452-470a-b189-73bf68f60a83\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.151453 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a9c95d35-61c0-4abb-92a1-8b33d73139ab-combined-ca-bundle\") pod \"barbican-db-sync-8rp7v\" (UID: \"a9c95d35-61c0-4abb-92a1-8b33d73139ab\") " pod="openstack-kuttl-tests/barbican-db-sync-8rp7v" Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.151477 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pzrkq\" (UniqueName: \"kubernetes.io/projected/838617e6-6285-4b18-9fc1-022ec5989dd2-kube-api-access-pzrkq\") pod \"placement-db-sync-b88r8\" (UID: \"838617e6-6285-4b18-9fc1-022ec5989dd2\") " pod="openstack-kuttl-tests/placement-db-sync-b88r8" Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.151519 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/54807f10-de72-4664-a757-a1c048e4d5d7-db-sync-config-data\") pod \"cinder-db-sync-kd8pp\" (UID: \"54807f10-de72-4664-a757-a1c048e4d5d7\") " pod="openstack-kuttl-tests/cinder-db-sync-kd8pp" Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.151558 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/54807f10-de72-4664-a757-a1c048e4d5d7-etc-machine-id\") pod \"cinder-db-sync-kd8pp\" (UID: \"54807f10-de72-4664-a757-a1c048e4d5d7\") " pod="openstack-kuttl-tests/cinder-db-sync-kd8pp" Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.151580 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/a9c95d35-61c0-4abb-92a1-8b33d73139ab-db-sync-config-data\") pod \"barbican-db-sync-8rp7v\" (UID: \"a9c95d35-61c0-4abb-92a1-8b33d73139ab\") " pod="openstack-kuttl-tests/barbican-db-sync-8rp7v" Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.151604 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/323fa0e2-7555-4fd5-bd80-65474fa83b69-combined-ca-bundle\") pod \"neutron-db-sync-hbj94\" (UID: \"323fa0e2-7555-4fd5-bd80-65474fa83b69\") " pod="openstack-kuttl-tests/neutron-db-sync-hbj94" Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.151623 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b9f98dae-f452-470a-b189-73bf68f60a83-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"b9f98dae-f452-470a-b189-73bf68f60a83\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.151647 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b9f98dae-f452-470a-b189-73bf68f60a83-config-data\") pod \"ceilometer-0\" (UID: \"b9f98dae-f452-470a-b189-73bf68f60a83\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.151670 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l252s\" (UniqueName: \"kubernetes.io/projected/54807f10-de72-4664-a757-a1c048e4d5d7-kube-api-access-l252s\") pod \"cinder-db-sync-kd8pp\" (UID: \"54807f10-de72-4664-a757-a1c048e4d5d7\") " pod="openstack-kuttl-tests/cinder-db-sync-kd8pp" Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.151686 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/838617e6-6285-4b18-9fc1-022ec5989dd2-scripts\") pod \"placement-db-sync-b88r8\" (UID: \"838617e6-6285-4b18-9fc1-022ec5989dd2\") " pod="openstack-kuttl-tests/placement-db-sync-b88r8" Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.151711 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b9f98dae-f452-470a-b189-73bf68f60a83-run-httpd\") pod \"ceilometer-0\" (UID: \"b9f98dae-f452-470a-b189-73bf68f60a83\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.151731 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/838617e6-6285-4b18-9fc1-022ec5989dd2-logs\") pod \"placement-db-sync-b88r8\" (UID: \"838617e6-6285-4b18-9fc1-022ec5989dd2\") " pod="openstack-kuttl-tests/placement-db-sync-b88r8" Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.151751 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ww4bl\" (UniqueName: \"kubernetes.io/projected/b9f98dae-f452-470a-b189-73bf68f60a83-kube-api-access-ww4bl\") pod \"ceilometer-0\" (UID: \"b9f98dae-f452-470a-b189-73bf68f60a83\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.151779 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/54807f10-de72-4664-a757-a1c048e4d5d7-config-data\") pod \"cinder-db-sync-kd8pp\" (UID: \"54807f10-de72-4664-a757-a1c048e4d5d7\") " pod="openstack-kuttl-tests/cinder-db-sync-kd8pp" Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.151800 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-56tdf\" (UniqueName: \"kubernetes.io/projected/323fa0e2-7555-4fd5-bd80-65474fa83b69-kube-api-access-56tdf\") pod \"neutron-db-sync-hbj94\" (UID: \"323fa0e2-7555-4fd5-bd80-65474fa83b69\") " pod="openstack-kuttl-tests/neutron-db-sync-hbj94" Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.151818 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/54807f10-de72-4664-a757-a1c048e4d5d7-scripts\") pod \"cinder-db-sync-kd8pp\" (UID: \"54807f10-de72-4664-a757-a1c048e4d5d7\") " pod="openstack-kuttl-tests/cinder-db-sync-kd8pp" Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.151840 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/838617e6-6285-4b18-9fc1-022ec5989dd2-combined-ca-bundle\") pod \"placement-db-sync-b88r8\" (UID: \"838617e6-6285-4b18-9fc1-022ec5989dd2\") " pod="openstack-kuttl-tests/placement-db-sync-b88r8" Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.151861 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b9f98dae-f452-470a-b189-73bf68f60a83-log-httpd\") pod \"ceilometer-0\" (UID: \"b9f98dae-f452-470a-b189-73bf68f60a83\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.151881 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f788v\" (UniqueName: \"kubernetes.io/projected/a9c95d35-61c0-4abb-92a1-8b33d73139ab-kube-api-access-f788v\") pod \"barbican-db-sync-8rp7v\" (UID: \"a9c95d35-61c0-4abb-92a1-8b33d73139ab\") " pod="openstack-kuttl-tests/barbican-db-sync-8rp7v" Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.151906 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54807f10-de72-4664-a757-a1c048e4d5d7-combined-ca-bundle\") pod \"cinder-db-sync-kd8pp\" (UID: \"54807f10-de72-4664-a757-a1c048e4d5d7\") " pod="openstack-kuttl-tests/cinder-db-sync-kd8pp" Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.153713 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/54807f10-de72-4664-a757-a1c048e4d5d7-etc-machine-id\") pod \"cinder-db-sync-kd8pp\" (UID: \"54807f10-de72-4664-a757-a1c048e4d5d7\") " pod="openstack-kuttl-tests/cinder-db-sync-kd8pp" Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.155609 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b9f98dae-f452-470a-b189-73bf68f60a83-log-httpd\") pod \"ceilometer-0\" (UID: \"b9f98dae-f452-470a-b189-73bf68f60a83\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.155624 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b9f98dae-f452-470a-b189-73bf68f60a83-run-httpd\") pod \"ceilometer-0\" (UID: \"b9f98dae-f452-470a-b189-73bf68f60a83\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.157019 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9f98dae-f452-470a-b189-73bf68f60a83-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"b9f98dae-f452-470a-b189-73bf68f60a83\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.157789 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/323fa0e2-7555-4fd5-bd80-65474fa83b69-combined-ca-bundle\") pod \"neutron-db-sync-hbj94\" (UID: \"323fa0e2-7555-4fd5-bd80-65474fa83b69\") " pod="openstack-kuttl-tests/neutron-db-sync-hbj94" Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.159039 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b9f98dae-f452-470a-b189-73bf68f60a83-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"b9f98dae-f452-470a-b189-73bf68f60a83\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.159751 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b9f98dae-f452-470a-b189-73bf68f60a83-scripts\") pod \"ceilometer-0\" (UID: \"b9f98dae-f452-470a-b189-73bf68f60a83\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.161094 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/a9c95d35-61c0-4abb-92a1-8b33d73139ab-db-sync-config-data\") pod \"barbican-db-sync-8rp7v\" (UID: \"a9c95d35-61c0-4abb-92a1-8b33d73139ab\") " pod="openstack-kuttl-tests/barbican-db-sync-8rp7v" Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.161228 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a9c95d35-61c0-4abb-92a1-8b33d73139ab-combined-ca-bundle\") pod \"barbican-db-sync-8rp7v\" (UID: \"a9c95d35-61c0-4abb-92a1-8b33d73139ab\") " pod="openstack-kuttl-tests/barbican-db-sync-8rp7v" Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.161259 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54807f10-de72-4664-a757-a1c048e4d5d7-combined-ca-bundle\") pod \"cinder-db-sync-kd8pp\" (UID: \"54807f10-de72-4664-a757-a1c048e4d5d7\") " pod="openstack-kuttl-tests/cinder-db-sync-kd8pp" Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.161600 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/323fa0e2-7555-4fd5-bd80-65474fa83b69-config\") pod \"neutron-db-sync-hbj94\" (UID: \"323fa0e2-7555-4fd5-bd80-65474fa83b69\") " pod="openstack-kuttl-tests/neutron-db-sync-hbj94" Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.162288 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b9f98dae-f452-470a-b189-73bf68f60a83-config-data\") pod \"ceilometer-0\" (UID: \"b9f98dae-f452-470a-b189-73bf68f60a83\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.163053 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/54807f10-de72-4664-a757-a1c048e4d5d7-config-data\") pod \"cinder-db-sync-kd8pp\" (UID: \"54807f10-de72-4664-a757-a1c048e4d5d7\") " pod="openstack-kuttl-tests/cinder-db-sync-kd8pp" Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.164000 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/54807f10-de72-4664-a757-a1c048e4d5d7-scripts\") pod \"cinder-db-sync-kd8pp\" (UID: \"54807f10-de72-4664-a757-a1c048e4d5d7\") " pod="openstack-kuttl-tests/cinder-db-sync-kd8pp" Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.166104 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/54807f10-de72-4664-a757-a1c048e4d5d7-db-sync-config-data\") pod \"cinder-db-sync-kd8pp\" (UID: \"54807f10-de72-4664-a757-a1c048e4d5d7\") " pod="openstack-kuttl-tests/cinder-db-sync-kd8pp" Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.169654 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-56tdf\" (UniqueName: \"kubernetes.io/projected/323fa0e2-7555-4fd5-bd80-65474fa83b69-kube-api-access-56tdf\") pod \"neutron-db-sync-hbj94\" (UID: \"323fa0e2-7555-4fd5-bd80-65474fa83b69\") " pod="openstack-kuttl-tests/neutron-db-sync-hbj94" Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.169808 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f788v\" (UniqueName: \"kubernetes.io/projected/a9c95d35-61c0-4abb-92a1-8b33d73139ab-kube-api-access-f788v\") pod \"barbican-db-sync-8rp7v\" (UID: \"a9c95d35-61c0-4abb-92a1-8b33d73139ab\") " pod="openstack-kuttl-tests/barbican-db-sync-8rp7v" Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.170266 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l252s\" (UniqueName: \"kubernetes.io/projected/54807f10-de72-4664-a757-a1c048e4d5d7-kube-api-access-l252s\") pod \"cinder-db-sync-kd8pp\" (UID: \"54807f10-de72-4664-a757-a1c048e4d5d7\") " pod="openstack-kuttl-tests/cinder-db-sync-kd8pp" Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.175076 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ww4bl\" (UniqueName: \"kubernetes.io/projected/b9f98dae-f452-470a-b189-73bf68f60a83-kube-api-access-ww4bl\") pod \"ceilometer-0\" (UID: \"b9f98dae-f452-470a-b189-73bf68f60a83\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.224132 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-db-sync-hbj94" Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.244736 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.252604 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/838617e6-6285-4b18-9fc1-022ec5989dd2-config-data\") pod \"placement-db-sync-b88r8\" (UID: \"838617e6-6285-4b18-9fc1-022ec5989dd2\") " pod="openstack-kuttl-tests/placement-db-sync-b88r8" Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.252654 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pzrkq\" (UniqueName: \"kubernetes.io/projected/838617e6-6285-4b18-9fc1-022ec5989dd2-kube-api-access-pzrkq\") pod \"placement-db-sync-b88r8\" (UID: \"838617e6-6285-4b18-9fc1-022ec5989dd2\") " pod="openstack-kuttl-tests/placement-db-sync-b88r8" Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.252715 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/838617e6-6285-4b18-9fc1-022ec5989dd2-scripts\") pod \"placement-db-sync-b88r8\" (UID: \"838617e6-6285-4b18-9fc1-022ec5989dd2\") " pod="openstack-kuttl-tests/placement-db-sync-b88r8" Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.252740 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/838617e6-6285-4b18-9fc1-022ec5989dd2-logs\") pod \"placement-db-sync-b88r8\" (UID: \"838617e6-6285-4b18-9fc1-022ec5989dd2\") " pod="openstack-kuttl-tests/placement-db-sync-b88r8" Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.252767 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/838617e6-6285-4b18-9fc1-022ec5989dd2-combined-ca-bundle\") pod \"placement-db-sync-b88r8\" (UID: \"838617e6-6285-4b18-9fc1-022ec5989dd2\") " pod="openstack-kuttl-tests/placement-db-sync-b88r8" Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.259717 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/838617e6-6285-4b18-9fc1-022ec5989dd2-combined-ca-bundle\") pod \"placement-db-sync-b88r8\" (UID: \"838617e6-6285-4b18-9fc1-022ec5989dd2\") " pod="openstack-kuttl-tests/placement-db-sync-b88r8" Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.259896 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/838617e6-6285-4b18-9fc1-022ec5989dd2-config-data\") pod \"placement-db-sync-b88r8\" (UID: \"838617e6-6285-4b18-9fc1-022ec5989dd2\") " pod="openstack-kuttl-tests/placement-db-sync-b88r8" Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.259982 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/838617e6-6285-4b18-9fc1-022ec5989dd2-logs\") pod \"placement-db-sync-b88r8\" (UID: \"838617e6-6285-4b18-9fc1-022ec5989dd2\") " pod="openstack-kuttl-tests/placement-db-sync-b88r8" Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.263662 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/838617e6-6285-4b18-9fc1-022ec5989dd2-scripts\") pod \"placement-db-sync-b88r8\" (UID: \"838617e6-6285-4b18-9fc1-022ec5989dd2\") " pod="openstack-kuttl-tests/placement-db-sync-b88r8" Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.268895 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pzrkq\" (UniqueName: \"kubernetes.io/projected/838617e6-6285-4b18-9fc1-022ec5989dd2-kube-api-access-pzrkq\") pod \"placement-db-sync-b88r8\" (UID: \"838617e6-6285-4b18-9fc1-022ec5989dd2\") " pod="openstack-kuttl-tests/placement-db-sync-b88r8" Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.335115 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-db-sync-8rp7v" Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.364747 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-db-sync-kd8pp" Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.437733 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-db-sync-b88r8" Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.562754 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-bootstrap-vh8wg"] Jan 20 17:29:59 crc kubenswrapper[4558]: W0120 17:29:59.584285 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddbbead9c_ffa4_448a_81a0_d1db96d7b5e3.slice/crio-e6eddfdebe67de2e2a1df02569126c3a804d051d83edeb95725c6cba748d7022 WatchSource:0}: Error finding container e6eddfdebe67de2e2a1df02569126c3a804d051d83edeb95725c6cba748d7022: Status 404 returned error can't find the container with id e6eddfdebe67de2e2a1df02569126c3a804d051d83edeb95725c6cba748d7022 Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.617356 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-bootstrap-vh8wg" event={"ID":"dbbead9c-ffa4-448a-81a0-d1db96d7b5e3","Type":"ContainerStarted","Data":"e6eddfdebe67de2e2a1df02569126c3a804d051d83edeb95725c6cba748d7022"} Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.671404 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.675780 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.679602 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-default-external-config-data" Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.679879 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-glance-dockercfg-bsssw" Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.686702 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-scripts" Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.728742 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.787058 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-db-sync-hbj94"] Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.804403 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.806490 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.812207 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-default-internal-config-data" Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.820371 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.832711 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.844363 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-db-sync-b88r8"] Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.860156 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-db-sync-8rp7v"] Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.877320 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2f28be37-ef13-48a6-9b8f-84544328e73b-scripts\") pod \"glance-default-external-api-0\" (UID: \"2f28be37-ef13-48a6-9b8f-84544328e73b\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.877369 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qv6tt\" (UniqueName: \"kubernetes.io/projected/2f28be37-ef13-48a6-9b8f-84544328e73b-kube-api-access-qv6tt\") pod \"glance-default-external-api-0\" (UID: \"2f28be37-ef13-48a6-9b8f-84544328e73b\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.877445 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2f28be37-ef13-48a6-9b8f-84544328e73b-config-data\") pod \"glance-default-external-api-0\" (UID: \"2f28be37-ef13-48a6-9b8f-84544328e73b\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.877515 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2f28be37-ef13-48a6-9b8f-84544328e73b-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"2f28be37-ef13-48a6-9b8f-84544328e73b\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.877602 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/2f28be37-ef13-48a6-9b8f-84544328e73b-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"2f28be37-ef13-48a6-9b8f-84544328e73b\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.877660 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2f28be37-ef13-48a6-9b8f-84544328e73b-logs\") pod \"glance-default-external-api-0\" (UID: \"2f28be37-ef13-48a6-9b8f-84544328e73b\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.877693 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage16-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage16-crc\") pod \"glance-default-external-api-0\" (UID: \"2f28be37-ef13-48a6-9b8f-84544328e73b\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.914100 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-db-sync-kd8pp"] Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.979598 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1f87c5fb-8c96-4e5a-bdbb-ca072b7cf522-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"1f87c5fb-8c96-4e5a-bdbb-ca072b7cf522\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.979655 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1f87c5fb-8c96-4e5a-bdbb-ca072b7cf522-logs\") pod \"glance-default-internal-api-0\" (UID: \"1f87c5fb-8c96-4e5a-bdbb-ca072b7cf522\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.979691 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2f28be37-ef13-48a6-9b8f-84544328e73b-logs\") pod \"glance-default-external-api-0\" (UID: \"2f28be37-ef13-48a6-9b8f-84544328e73b\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.979742 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage16-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage16-crc\") pod \"glance-default-external-api-0\" (UID: \"2f28be37-ef13-48a6-9b8f-84544328e73b\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.979797 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1f87c5fb-8c96-4e5a-bdbb-ca072b7cf522-config-data\") pod \"glance-default-internal-api-0\" (UID: \"1f87c5fb-8c96-4e5a-bdbb-ca072b7cf522\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.979816 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2f28be37-ef13-48a6-9b8f-84544328e73b-scripts\") pod \"glance-default-external-api-0\" (UID: \"2f28be37-ef13-48a6-9b8f-84544328e73b\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.979842 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qv6tt\" (UniqueName: \"kubernetes.io/projected/2f28be37-ef13-48a6-9b8f-84544328e73b-kube-api-access-qv6tt\") pod \"glance-default-external-api-0\" (UID: \"2f28be37-ef13-48a6-9b8f-84544328e73b\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.979872 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-47znw\" (UniqueName: \"kubernetes.io/projected/1f87c5fb-8c96-4e5a-bdbb-ca072b7cf522-kube-api-access-47znw\") pod \"glance-default-internal-api-0\" (UID: \"1f87c5fb-8c96-4e5a-bdbb-ca072b7cf522\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.979899 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2f28be37-ef13-48a6-9b8f-84544328e73b-config-data\") pod \"glance-default-external-api-0\" (UID: \"2f28be37-ef13-48a6-9b8f-84544328e73b\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.979919 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1f87c5fb-8c96-4e5a-bdbb-ca072b7cf522-scripts\") pod \"glance-default-internal-api-0\" (UID: \"1f87c5fb-8c96-4e5a-bdbb-ca072b7cf522\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.979951 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/1f87c5fb-8c96-4e5a-bdbb-ca072b7cf522-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"1f87c5fb-8c96-4e5a-bdbb-ca072b7cf522\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.980072 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2f28be37-ef13-48a6-9b8f-84544328e73b-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"2f28be37-ef13-48a6-9b8f-84544328e73b\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.980118 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage15-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") pod \"glance-default-internal-api-0\" (UID: \"1f87c5fb-8c96-4e5a-bdbb-ca072b7cf522\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.980077 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2f28be37-ef13-48a6-9b8f-84544328e73b-logs\") pod \"glance-default-external-api-0\" (UID: \"2f28be37-ef13-48a6-9b8f-84544328e73b\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.980137 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage16-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage16-crc\") pod \"glance-default-external-api-0\" (UID: \"2f28be37-ef13-48a6-9b8f-84544328e73b\") device mount path \"/mnt/openstack/pv16\"" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.980145 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/2f28be37-ef13-48a6-9b8f-84544328e73b-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"2f28be37-ef13-48a6-9b8f-84544328e73b\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.980462 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/2f28be37-ef13-48a6-9b8f-84544328e73b-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"2f28be37-ef13-48a6-9b8f-84544328e73b\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.992276 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2f28be37-ef13-48a6-9b8f-84544328e73b-config-data\") pod \"glance-default-external-api-0\" (UID: \"2f28be37-ef13-48a6-9b8f-84544328e73b\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.992620 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2f28be37-ef13-48a6-9b8f-84544328e73b-scripts\") pod \"glance-default-external-api-0\" (UID: \"2f28be37-ef13-48a6-9b8f-84544328e73b\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.994255 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2f28be37-ef13-48a6-9b8f-84544328e73b-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"2f28be37-ef13-48a6-9b8f-84544328e73b\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:29:59 crc kubenswrapper[4558]: I0120 17:29:59.994821 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qv6tt\" (UniqueName: \"kubernetes.io/projected/2f28be37-ef13-48a6-9b8f-84544328e73b-kube-api-access-qv6tt\") pod \"glance-default-external-api-0\" (UID: \"2f28be37-ef13-48a6-9b8f-84544328e73b\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:30:00 crc kubenswrapper[4558]: I0120 17:30:00.012573 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage16-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage16-crc\") pod \"glance-default-external-api-0\" (UID: \"2f28be37-ef13-48a6-9b8f-84544328e73b\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:30:00 crc kubenswrapper[4558]: I0120 17:30:00.056736 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:30:00 crc kubenswrapper[4558]: I0120 17:30:00.083684 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/1f87c5fb-8c96-4e5a-bdbb-ca072b7cf522-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"1f87c5fb-8c96-4e5a-bdbb-ca072b7cf522\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:30:00 crc kubenswrapper[4558]: I0120 17:30:00.083884 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage15-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") pod \"glance-default-internal-api-0\" (UID: \"1f87c5fb-8c96-4e5a-bdbb-ca072b7cf522\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:30:00 crc kubenswrapper[4558]: I0120 17:30:00.084013 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1f87c5fb-8c96-4e5a-bdbb-ca072b7cf522-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"1f87c5fb-8c96-4e5a-bdbb-ca072b7cf522\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:30:00 crc kubenswrapper[4558]: I0120 17:30:00.084102 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1f87c5fb-8c96-4e5a-bdbb-ca072b7cf522-logs\") pod \"glance-default-internal-api-0\" (UID: \"1f87c5fb-8c96-4e5a-bdbb-ca072b7cf522\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:30:00 crc kubenswrapper[4558]: I0120 17:30:00.084279 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1f87c5fb-8c96-4e5a-bdbb-ca072b7cf522-config-data\") pod \"glance-default-internal-api-0\" (UID: \"1f87c5fb-8c96-4e5a-bdbb-ca072b7cf522\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:30:00 crc kubenswrapper[4558]: I0120 17:30:00.084399 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-47znw\" (UniqueName: \"kubernetes.io/projected/1f87c5fb-8c96-4e5a-bdbb-ca072b7cf522-kube-api-access-47znw\") pod \"glance-default-internal-api-0\" (UID: \"1f87c5fb-8c96-4e5a-bdbb-ca072b7cf522\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:30:00 crc kubenswrapper[4558]: I0120 17:30:00.084500 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1f87c5fb-8c96-4e5a-bdbb-ca072b7cf522-scripts\") pod \"glance-default-internal-api-0\" (UID: \"1f87c5fb-8c96-4e5a-bdbb-ca072b7cf522\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:30:00 crc kubenswrapper[4558]: I0120 17:30:00.085179 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1f87c5fb-8c96-4e5a-bdbb-ca072b7cf522-logs\") pod \"glance-default-internal-api-0\" (UID: \"1f87c5fb-8c96-4e5a-bdbb-ca072b7cf522\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:30:00 crc kubenswrapper[4558]: I0120 17:30:00.085825 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/1f87c5fb-8c96-4e5a-bdbb-ca072b7cf522-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"1f87c5fb-8c96-4e5a-bdbb-ca072b7cf522\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:30:00 crc kubenswrapper[4558]: I0120 17:30:00.086052 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage15-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") pod \"glance-default-internal-api-0\" (UID: \"1f87c5fb-8c96-4e5a-bdbb-ca072b7cf522\") device mount path \"/mnt/openstack/pv15\"" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:30:00 crc kubenswrapper[4558]: I0120 17:30:00.090561 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1f87c5fb-8c96-4e5a-bdbb-ca072b7cf522-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"1f87c5fb-8c96-4e5a-bdbb-ca072b7cf522\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:30:00 crc kubenswrapper[4558]: I0120 17:30:00.100782 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1f87c5fb-8c96-4e5a-bdbb-ca072b7cf522-scripts\") pod \"glance-default-internal-api-0\" (UID: \"1f87c5fb-8c96-4e5a-bdbb-ca072b7cf522\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:30:00 crc kubenswrapper[4558]: I0120 17:30:00.105220 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1f87c5fb-8c96-4e5a-bdbb-ca072b7cf522-config-data\") pod \"glance-default-internal-api-0\" (UID: \"1f87c5fb-8c96-4e5a-bdbb-ca072b7cf522\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:30:00 crc kubenswrapper[4558]: I0120 17:30:00.106080 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-47znw\" (UniqueName: \"kubernetes.io/projected/1f87c5fb-8c96-4e5a-bdbb-ca072b7cf522-kube-api-access-47znw\") pod \"glance-default-internal-api-0\" (UID: \"1f87c5fb-8c96-4e5a-bdbb-ca072b7cf522\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:30:00 crc kubenswrapper[4558]: I0120 17:30:00.135768 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29482170-8mhr7"] Jan 20 17:30:00 crc kubenswrapper[4558]: I0120 17:30:00.136769 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29482170-8mhr7" Jan 20 17:30:00 crc kubenswrapper[4558]: I0120 17:30:00.141743 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 20 17:30:00 crc kubenswrapper[4558]: I0120 17:30:00.142753 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 20 17:30:00 crc kubenswrapper[4558]: I0120 17:30:00.156969 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29482170-8mhr7"] Jan 20 17:30:00 crc kubenswrapper[4558]: I0120 17:30:00.162266 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage15-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") pod \"glance-default-internal-api-0\" (UID: \"1f87c5fb-8c96-4e5a-bdbb-ca072b7cf522\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:30:00 crc kubenswrapper[4558]: I0120 17:30:00.290966 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c3f46696-aea2-4531-9e00-687b70338139-config-volume\") pod \"collect-profiles-29482170-8mhr7\" (UID: \"c3f46696-aea2-4531-9e00-687b70338139\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482170-8mhr7" Jan 20 17:30:00 crc kubenswrapper[4558]: I0120 17:30:00.291076 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sbt75\" (UniqueName: \"kubernetes.io/projected/c3f46696-aea2-4531-9e00-687b70338139-kube-api-access-sbt75\") pod \"collect-profiles-29482170-8mhr7\" (UID: \"c3f46696-aea2-4531-9e00-687b70338139\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482170-8mhr7" Jan 20 17:30:00 crc kubenswrapper[4558]: I0120 17:30:00.291132 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c3f46696-aea2-4531-9e00-687b70338139-secret-volume\") pod \"collect-profiles-29482170-8mhr7\" (UID: \"c3f46696-aea2-4531-9e00-687b70338139\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482170-8mhr7" Jan 20 17:30:00 crc kubenswrapper[4558]: I0120 17:30:00.393023 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c3f46696-aea2-4531-9e00-687b70338139-secret-volume\") pod \"collect-profiles-29482170-8mhr7\" (UID: \"c3f46696-aea2-4531-9e00-687b70338139\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482170-8mhr7" Jan 20 17:30:00 crc kubenswrapper[4558]: I0120 17:30:00.393261 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c3f46696-aea2-4531-9e00-687b70338139-config-volume\") pod \"collect-profiles-29482170-8mhr7\" (UID: \"c3f46696-aea2-4531-9e00-687b70338139\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482170-8mhr7" Jan 20 17:30:00 crc kubenswrapper[4558]: I0120 17:30:00.393345 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sbt75\" (UniqueName: \"kubernetes.io/projected/c3f46696-aea2-4531-9e00-687b70338139-kube-api-access-sbt75\") pod \"collect-profiles-29482170-8mhr7\" (UID: \"c3f46696-aea2-4531-9e00-687b70338139\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482170-8mhr7" Jan 20 17:30:00 crc kubenswrapper[4558]: I0120 17:30:00.394533 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c3f46696-aea2-4531-9e00-687b70338139-config-volume\") pod \"collect-profiles-29482170-8mhr7\" (UID: \"c3f46696-aea2-4531-9e00-687b70338139\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482170-8mhr7" Jan 20 17:30:00 crc kubenswrapper[4558]: I0120 17:30:00.398471 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c3f46696-aea2-4531-9e00-687b70338139-secret-volume\") pod \"collect-profiles-29482170-8mhr7\" (UID: \"c3f46696-aea2-4531-9e00-687b70338139\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482170-8mhr7" Jan 20 17:30:00 crc kubenswrapper[4558]: I0120 17:30:00.407439 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sbt75\" (UniqueName: \"kubernetes.io/projected/c3f46696-aea2-4531-9e00-687b70338139-kube-api-access-sbt75\") pod \"collect-profiles-29482170-8mhr7\" (UID: \"c3f46696-aea2-4531-9e00-687b70338139\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482170-8mhr7" Jan 20 17:30:00 crc kubenswrapper[4558]: I0120 17:30:00.425813 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:30:00 crc kubenswrapper[4558]: I0120 17:30:00.460606 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29482170-8mhr7" Jan 20 17:30:00 crc kubenswrapper[4558]: I0120 17:30:00.621301 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:30:00 crc kubenswrapper[4558]: I0120 17:30:00.667058 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-bootstrap-vh8wg" event={"ID":"dbbead9c-ffa4-448a-81a0-d1db96d7b5e3","Type":"ContainerStarted","Data":"30636a3c4260263ef23765c51ceb13ef78993f311ae6ee434f35b1a095aaf86e"} Jan 20 17:30:00 crc kubenswrapper[4558]: I0120 17:30:00.689521 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-db-sync-hbj94" event={"ID":"323fa0e2-7555-4fd5-bd80-65474fa83b69","Type":"ContainerStarted","Data":"a43d1649fd01e64f0e6f6c358fe3b57e49f79cb49bee9b16b8082898e6121cb8"} Jan 20 17:30:00 crc kubenswrapper[4558]: I0120 17:30:00.689646 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-db-sync-hbj94" event={"ID":"323fa0e2-7555-4fd5-bd80-65474fa83b69","Type":"ContainerStarted","Data":"5a579917f2efdfef0835625ce33001038846fa8ee06077b4f6daadfc801944f0"} Jan 20 17:30:00 crc kubenswrapper[4558]: I0120 17:30:00.705308 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/keystone-bootstrap-vh8wg" podStartSLOduration=2.705291004 podStartE2EDuration="2.705291004s" podCreationTimestamp="2026-01-20 17:29:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:30:00.702463627 +0000 UTC m=+2894.462801604" watchObservedRunningTime="2026-01-20 17:30:00.705291004 +0000 UTC m=+2894.465628970" Jan 20 17:30:00 crc kubenswrapper[4558]: I0120 17:30:00.712315 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"b9f98dae-f452-470a-b189-73bf68f60a83","Type":"ContainerStarted","Data":"de2595c1be910c8baab53452ee69cdb3f1c2c8f4a017701c9c57a3c424643584"} Jan 20 17:30:00 crc kubenswrapper[4558]: I0120 17:30:00.716518 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-db-sync-8rp7v" event={"ID":"a9c95d35-61c0-4abb-92a1-8b33d73139ab","Type":"ContainerStarted","Data":"aba10c7094d84cccbde149d948a3e60389448792ee9a5d96fd2796f7883a64b4"} Jan 20 17:30:00 crc kubenswrapper[4558]: I0120 17:30:00.716559 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-db-sync-8rp7v" event={"ID":"a9c95d35-61c0-4abb-92a1-8b33d73139ab","Type":"ContainerStarted","Data":"6f01eb42ffffb39d62072d3631f0125b9bc4fa9080cb73705ac40777ab5f29f6"} Jan 20 17:30:00 crc kubenswrapper[4558]: I0120 17:30:00.717805 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-db-sync-kd8pp" event={"ID":"54807f10-de72-4664-a757-a1c048e4d5d7","Type":"ContainerStarted","Data":"875266bd4148c109949f5a13dd6c799264df1f183288f54c66a32c7496e55160"} Jan 20 17:30:00 crc kubenswrapper[4558]: I0120 17:30:00.719358 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-db-sync-b88r8" event={"ID":"838617e6-6285-4b18-9fc1-022ec5989dd2","Type":"ContainerStarted","Data":"76b128f3cb64faa63f2cd117e4ba208a86aca8bf3e631936793f60b7e1161a97"} Jan 20 17:30:00 crc kubenswrapper[4558]: I0120 17:30:00.719383 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-db-sync-b88r8" event={"ID":"838617e6-6285-4b18-9fc1-022ec5989dd2","Type":"ContainerStarted","Data":"c3f6c16445b5135d6982612eae3d089c67687884468683454427ddc78dd48630"} Jan 20 17:30:00 crc kubenswrapper[4558]: I0120 17:30:00.763644 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/neutron-db-sync-hbj94" podStartSLOduration=2.763624719 podStartE2EDuration="2.763624719s" podCreationTimestamp="2026-01-20 17:29:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:30:00.741665869 +0000 UTC m=+2894.502003827" watchObservedRunningTime="2026-01-20 17:30:00.763624719 +0000 UTC m=+2894.523962676" Jan 20 17:30:00 crc kubenswrapper[4558]: I0120 17:30:00.771250 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/placement-db-sync-b88r8" podStartSLOduration=1.7712426140000002 podStartE2EDuration="1.771242614s" podCreationTimestamp="2026-01-20 17:29:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:30:00.761560628 +0000 UTC m=+2894.521898595" watchObservedRunningTime="2026-01-20 17:30:00.771242614 +0000 UTC m=+2894.531580580" Jan 20 17:30:00 crc kubenswrapper[4558]: I0120 17:30:00.779117 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/barbican-db-sync-8rp7v" podStartSLOduration=2.7790956700000002 podStartE2EDuration="2.77909567s" podCreationTimestamp="2026-01-20 17:29:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:30:00.778887297 +0000 UTC m=+2894.539225264" watchObservedRunningTime="2026-01-20 17:30:00.77909567 +0000 UTC m=+2894.539433637" Jan 20 17:30:01 crc kubenswrapper[4558]: I0120 17:30:01.175722 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:30:01 crc kubenswrapper[4558]: I0120 17:30:01.205547 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29482170-8mhr7"] Jan 20 17:30:01 crc kubenswrapper[4558]: W0120 17:30:01.218099 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc3f46696_aea2_4531_9e00_687b70338139.slice/crio-4d19c40550d7351847bd41729bf04a1b9990d05ef342b0520484c8a49a684fbb WatchSource:0}: Error finding container 4d19c40550d7351847bd41729bf04a1b9990d05ef342b0520484c8a49a684fbb: Status 404 returned error can't find the container with id 4d19c40550d7351847bd41729bf04a1b9990d05ef342b0520484c8a49a684fbb Jan 20 17:30:01 crc kubenswrapper[4558]: I0120 17:30:01.735551 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29482170-8mhr7" event={"ID":"c3f46696-aea2-4531-9e00-687b70338139","Type":"ContainerStarted","Data":"10177ca11b34dc2db0f8a49d2aa24a6836504e49e9461d83bf7e65d004666d86"} Jan 20 17:30:01 crc kubenswrapper[4558]: I0120 17:30:01.735888 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29482170-8mhr7" event={"ID":"c3f46696-aea2-4531-9e00-687b70338139","Type":"ContainerStarted","Data":"4d19c40550d7351847bd41729bf04a1b9990d05ef342b0520484c8a49a684fbb"} Jan 20 17:30:01 crc kubenswrapper[4558]: I0120 17:30:01.750348 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"1f87c5fb-8c96-4e5a-bdbb-ca072b7cf522","Type":"ContainerStarted","Data":"ab755648fa19e7361557f82ad97ffdfe31f54dd1bc8bbb6e6e60547c52f87841"} Jan 20 17:30:01 crc kubenswrapper[4558]: I0120 17:30:01.761754 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-db-sync-kd8pp" event={"ID":"54807f10-de72-4664-a757-a1c048e4d5d7","Type":"ContainerStarted","Data":"6c4463c91500e6f60b0e412648172b7bc947cb8a2b79b8f1016e559b589b2165"} Jan 20 17:30:01 crc kubenswrapper[4558]: I0120 17:30:01.764181 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29482170-8mhr7" podStartSLOduration=1.764144994 podStartE2EDuration="1.764144994s" podCreationTimestamp="2026-01-20 17:30:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:30:01.756288962 +0000 UTC m=+2895.516626929" watchObservedRunningTime="2026-01-20 17:30:01.764144994 +0000 UTC m=+2895.524482961" Jan 20 17:30:01 crc kubenswrapper[4558]: I0120 17:30:01.765934 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"2f28be37-ef13-48a6-9b8f-84544328e73b","Type":"ContainerStarted","Data":"447c2dafa20acb9a88ee521cc25e4c2c064494b9d4033020a8d42dfd079675ac"} Jan 20 17:30:01 crc kubenswrapper[4558]: I0120 17:30:01.765965 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"2f28be37-ef13-48a6-9b8f-84544328e73b","Type":"ContainerStarted","Data":"dadae09c02e20d14661a9b505da725dc17d4da7261642d58bdf65a0c5cfbe1d8"} Jan 20 17:30:01 crc kubenswrapper[4558]: I0120 17:30:01.772111 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"b9f98dae-f452-470a-b189-73bf68f60a83","Type":"ContainerStarted","Data":"6428db4c2b901272e1858dd44864d09cb610d55279efa36a870c1deaa8640299"} Jan 20 17:30:01 crc kubenswrapper[4558]: I0120 17:30:01.780401 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/cinder-db-sync-kd8pp" podStartSLOduration=3.780388377 podStartE2EDuration="3.780388377s" podCreationTimestamp="2026-01-20 17:29:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:30:01.778124441 +0000 UTC m=+2895.538462408" watchObservedRunningTime="2026-01-20 17:30:01.780388377 +0000 UTC m=+2895.540726344" Jan 20 17:30:02 crc kubenswrapper[4558]: I0120 17:30:02.784835 4558 generic.go:334] "Generic (PLEG): container finished" podID="a9c95d35-61c0-4abb-92a1-8b33d73139ab" containerID="aba10c7094d84cccbde149d948a3e60389448792ee9a5d96fd2796f7883a64b4" exitCode=0 Jan 20 17:30:02 crc kubenswrapper[4558]: I0120 17:30:02.784953 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-db-sync-8rp7v" event={"ID":"a9c95d35-61c0-4abb-92a1-8b33d73139ab","Type":"ContainerDied","Data":"aba10c7094d84cccbde149d948a3e60389448792ee9a5d96fd2796f7883a64b4"} Jan 20 17:30:02 crc kubenswrapper[4558]: I0120 17:30:02.788023 4558 generic.go:334] "Generic (PLEG): container finished" podID="838617e6-6285-4b18-9fc1-022ec5989dd2" containerID="76b128f3cb64faa63f2cd117e4ba208a86aca8bf3e631936793f60b7e1161a97" exitCode=0 Jan 20 17:30:02 crc kubenswrapper[4558]: I0120 17:30:02.788131 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-db-sync-b88r8" event={"ID":"838617e6-6285-4b18-9fc1-022ec5989dd2","Type":"ContainerDied","Data":"76b128f3cb64faa63f2cd117e4ba208a86aca8bf3e631936793f60b7e1161a97"} Jan 20 17:30:02 crc kubenswrapper[4558]: I0120 17:30:02.790212 4558 generic.go:334] "Generic (PLEG): container finished" podID="dbbead9c-ffa4-448a-81a0-d1db96d7b5e3" containerID="30636a3c4260263ef23765c51ceb13ef78993f311ae6ee434f35b1a095aaf86e" exitCode=0 Jan 20 17:30:02 crc kubenswrapper[4558]: I0120 17:30:02.790291 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-bootstrap-vh8wg" event={"ID":"dbbead9c-ffa4-448a-81a0-d1db96d7b5e3","Type":"ContainerDied","Data":"30636a3c4260263ef23765c51ceb13ef78993f311ae6ee434f35b1a095aaf86e"} Jan 20 17:30:02 crc kubenswrapper[4558]: I0120 17:30:02.793973 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"2f28be37-ef13-48a6-9b8f-84544328e73b","Type":"ContainerStarted","Data":"3abd92f8f694eb696e46d0fae2d93d85bd2a28f5cba4d7f01af8a85470a13be8"} Jan 20 17:30:02 crc kubenswrapper[4558]: I0120 17:30:02.798007 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"b9f98dae-f452-470a-b189-73bf68f60a83","Type":"ContainerStarted","Data":"a4534babe827dc538ef40509c4b12b441fb5d5753bbac7c36e30efffc9ded647"} Jan 20 17:30:02 crc kubenswrapper[4558]: I0120 17:30:02.800118 4558 generic.go:334] "Generic (PLEG): container finished" podID="c3f46696-aea2-4531-9e00-687b70338139" containerID="10177ca11b34dc2db0f8a49d2aa24a6836504e49e9461d83bf7e65d004666d86" exitCode=0 Jan 20 17:30:02 crc kubenswrapper[4558]: I0120 17:30:02.800197 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29482170-8mhr7" event={"ID":"c3f46696-aea2-4531-9e00-687b70338139","Type":"ContainerDied","Data":"10177ca11b34dc2db0f8a49d2aa24a6836504e49e9461d83bf7e65d004666d86"} Jan 20 17:30:02 crc kubenswrapper[4558]: I0120 17:30:02.803424 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"1f87c5fb-8c96-4e5a-bdbb-ca072b7cf522","Type":"ContainerStarted","Data":"2ce860fea0b9ccd065e20871bc948a224bcf38e35dee37e2a55a51a09f5622b4"} Jan 20 17:30:02 crc kubenswrapper[4558]: I0120 17:30:02.803490 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"1f87c5fb-8c96-4e5a-bdbb-ca072b7cf522","Type":"ContainerStarted","Data":"68350012dda08fc4af071c135d146ae203cfd2cb7af5e9c8dabdd6c9d0241c95"} Jan 20 17:30:02 crc kubenswrapper[4558]: I0120 17:30:02.833114 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/glance-default-internal-api-0" podStartSLOduration=3.833098436 podStartE2EDuration="3.833098436s" podCreationTimestamp="2026-01-20 17:29:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:30:02.825766749 +0000 UTC m=+2896.586104717" watchObservedRunningTime="2026-01-20 17:30:02.833098436 +0000 UTC m=+2896.593436403" Jan 20 17:30:02 crc kubenswrapper[4558]: I0120 17:30:02.888359 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/glance-default-external-api-0" podStartSLOduration=3.888340247 podStartE2EDuration="3.888340247s" podCreationTimestamp="2026-01-20 17:29:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:30:02.87722113 +0000 UTC m=+2896.637559097" watchObservedRunningTime="2026-01-20 17:30:02.888340247 +0000 UTC m=+2896.648678215" Jan 20 17:30:03 crc kubenswrapper[4558]: I0120 17:30:03.815447 4558 generic.go:334] "Generic (PLEG): container finished" podID="54807f10-de72-4664-a757-a1c048e4d5d7" containerID="6c4463c91500e6f60b0e412648172b7bc947cb8a2b79b8f1016e559b589b2165" exitCode=0 Jan 20 17:30:03 crc kubenswrapper[4558]: I0120 17:30:03.815531 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-db-sync-kd8pp" event={"ID":"54807f10-de72-4664-a757-a1c048e4d5d7","Type":"ContainerDied","Data":"6c4463c91500e6f60b0e412648172b7bc947cb8a2b79b8f1016e559b589b2165"} Jan 20 17:30:03 crc kubenswrapper[4558]: I0120 17:30:03.820285 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"b9f98dae-f452-470a-b189-73bf68f60a83","Type":"ContainerStarted","Data":"0170e31fb16386b6d672e9aa3018bdf8494fb2367332d154bae35a48b701ece9"} Jan 20 17:30:04 crc kubenswrapper[4558]: I0120 17:30:04.172316 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29482170-8mhr7" Jan 20 17:30:04 crc kubenswrapper[4558]: I0120 17:30:04.281573 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29482125-cn6z6"] Jan 20 17:30:04 crc kubenswrapper[4558]: I0120 17:30:04.289218 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29482125-cn6z6"] Jan 20 17:30:04 crc kubenswrapper[4558]: I0120 17:30:04.305218 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sbt75\" (UniqueName: \"kubernetes.io/projected/c3f46696-aea2-4531-9e00-687b70338139-kube-api-access-sbt75\") pod \"c3f46696-aea2-4531-9e00-687b70338139\" (UID: \"c3f46696-aea2-4531-9e00-687b70338139\") " Jan 20 17:30:04 crc kubenswrapper[4558]: I0120 17:30:04.305471 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c3f46696-aea2-4531-9e00-687b70338139-secret-volume\") pod \"c3f46696-aea2-4531-9e00-687b70338139\" (UID: \"c3f46696-aea2-4531-9e00-687b70338139\") " Jan 20 17:30:04 crc kubenswrapper[4558]: I0120 17:30:04.305549 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c3f46696-aea2-4531-9e00-687b70338139-config-volume\") pod \"c3f46696-aea2-4531-9e00-687b70338139\" (UID: \"c3f46696-aea2-4531-9e00-687b70338139\") " Jan 20 17:30:04 crc kubenswrapper[4558]: I0120 17:30:04.306642 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c3f46696-aea2-4531-9e00-687b70338139-config-volume" (OuterVolumeSpecName: "config-volume") pod "c3f46696-aea2-4531-9e00-687b70338139" (UID: "c3f46696-aea2-4531-9e00-687b70338139"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:30:04 crc kubenswrapper[4558]: I0120 17:30:04.310769 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c3f46696-aea2-4531-9e00-687b70338139-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "c3f46696-aea2-4531-9e00-687b70338139" (UID: "c3f46696-aea2-4531-9e00-687b70338139"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:30:04 crc kubenswrapper[4558]: I0120 17:30:04.310827 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c3f46696-aea2-4531-9e00-687b70338139-kube-api-access-sbt75" (OuterVolumeSpecName: "kube-api-access-sbt75") pod "c3f46696-aea2-4531-9e00-687b70338139" (UID: "c3f46696-aea2-4531-9e00-687b70338139"). InnerVolumeSpecName "kube-api-access-sbt75". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:30:04 crc kubenswrapper[4558]: I0120 17:30:04.335075 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-db-sync-8rp7v" Jan 20 17:30:04 crc kubenswrapper[4558]: I0120 17:30:04.347636 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-db-sync-b88r8" Jan 20 17:30:04 crc kubenswrapper[4558]: I0120 17:30:04.354857 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-bootstrap-vh8wg" Jan 20 17:30:04 crc kubenswrapper[4558]: I0120 17:30:04.409205 4558 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c3f46696-aea2-4531-9e00-687b70338139-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 20 17:30:04 crc kubenswrapper[4558]: I0120 17:30:04.409239 4558 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c3f46696-aea2-4531-9e00-687b70338139-config-volume\") on node \"crc\" DevicePath \"\"" Jan 20 17:30:04 crc kubenswrapper[4558]: I0120 17:30:04.409258 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sbt75\" (UniqueName: \"kubernetes.io/projected/c3f46696-aea2-4531-9e00-687b70338139-kube-api-access-sbt75\") on node \"crc\" DevicePath \"\"" Jan 20 17:30:04 crc kubenswrapper[4558]: I0120 17:30:04.511356 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/838617e6-6285-4b18-9fc1-022ec5989dd2-logs\") pod \"838617e6-6285-4b18-9fc1-022ec5989dd2\" (UID: \"838617e6-6285-4b18-9fc1-022ec5989dd2\") " Jan 20 17:30:04 crc kubenswrapper[4558]: I0120 17:30:04.511499 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bnzln\" (UniqueName: \"kubernetes.io/projected/dbbead9c-ffa4-448a-81a0-d1db96d7b5e3-kube-api-access-bnzln\") pod \"dbbead9c-ffa4-448a-81a0-d1db96d7b5e3\" (UID: \"dbbead9c-ffa4-448a-81a0-d1db96d7b5e3\") " Jan 20 17:30:04 crc kubenswrapper[4558]: I0120 17:30:04.511553 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dbbead9c-ffa4-448a-81a0-d1db96d7b5e3-scripts\") pod \"dbbead9c-ffa4-448a-81a0-d1db96d7b5e3\" (UID: \"dbbead9c-ffa4-448a-81a0-d1db96d7b5e3\") " Jan 20 17:30:04 crc kubenswrapper[4558]: I0120 17:30:04.511579 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dbbead9c-ffa4-448a-81a0-d1db96d7b5e3-combined-ca-bundle\") pod \"dbbead9c-ffa4-448a-81a0-d1db96d7b5e3\" (UID: \"dbbead9c-ffa4-448a-81a0-d1db96d7b5e3\") " Jan 20 17:30:04 crc kubenswrapper[4558]: I0120 17:30:04.511623 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/838617e6-6285-4b18-9fc1-022ec5989dd2-combined-ca-bundle\") pod \"838617e6-6285-4b18-9fc1-022ec5989dd2\" (UID: \"838617e6-6285-4b18-9fc1-022ec5989dd2\") " Jan 20 17:30:04 crc kubenswrapper[4558]: I0120 17:30:04.511654 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/838617e6-6285-4b18-9fc1-022ec5989dd2-logs" (OuterVolumeSpecName: "logs") pod "838617e6-6285-4b18-9fc1-022ec5989dd2" (UID: "838617e6-6285-4b18-9fc1-022ec5989dd2"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:30:04 crc kubenswrapper[4558]: I0120 17:30:04.512470 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pzrkq\" (UniqueName: \"kubernetes.io/projected/838617e6-6285-4b18-9fc1-022ec5989dd2-kube-api-access-pzrkq\") pod \"838617e6-6285-4b18-9fc1-022ec5989dd2\" (UID: \"838617e6-6285-4b18-9fc1-022ec5989dd2\") " Jan 20 17:30:04 crc kubenswrapper[4558]: I0120 17:30:04.512523 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a9c95d35-61c0-4abb-92a1-8b33d73139ab-combined-ca-bundle\") pod \"a9c95d35-61c0-4abb-92a1-8b33d73139ab\" (UID: \"a9c95d35-61c0-4abb-92a1-8b33d73139ab\") " Jan 20 17:30:04 crc kubenswrapper[4558]: I0120 17:30:04.512557 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/dbbead9c-ffa4-448a-81a0-d1db96d7b5e3-fernet-keys\") pod \"dbbead9c-ffa4-448a-81a0-d1db96d7b5e3\" (UID: \"dbbead9c-ffa4-448a-81a0-d1db96d7b5e3\") " Jan 20 17:30:04 crc kubenswrapper[4558]: I0120 17:30:04.512586 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/dbbead9c-ffa4-448a-81a0-d1db96d7b5e3-credential-keys\") pod \"dbbead9c-ffa4-448a-81a0-d1db96d7b5e3\" (UID: \"dbbead9c-ffa4-448a-81a0-d1db96d7b5e3\") " Jan 20 17:30:04 crc kubenswrapper[4558]: I0120 17:30:04.512653 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dbbead9c-ffa4-448a-81a0-d1db96d7b5e3-config-data\") pod \"dbbead9c-ffa4-448a-81a0-d1db96d7b5e3\" (UID: \"dbbead9c-ffa4-448a-81a0-d1db96d7b5e3\") " Jan 20 17:30:04 crc kubenswrapper[4558]: I0120 17:30:04.512758 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/a9c95d35-61c0-4abb-92a1-8b33d73139ab-db-sync-config-data\") pod \"a9c95d35-61c0-4abb-92a1-8b33d73139ab\" (UID: \"a9c95d35-61c0-4abb-92a1-8b33d73139ab\") " Jan 20 17:30:04 crc kubenswrapper[4558]: I0120 17:30:04.512840 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/838617e6-6285-4b18-9fc1-022ec5989dd2-scripts\") pod \"838617e6-6285-4b18-9fc1-022ec5989dd2\" (UID: \"838617e6-6285-4b18-9fc1-022ec5989dd2\") " Jan 20 17:30:04 crc kubenswrapper[4558]: I0120 17:30:04.512879 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/838617e6-6285-4b18-9fc1-022ec5989dd2-config-data\") pod \"838617e6-6285-4b18-9fc1-022ec5989dd2\" (UID: \"838617e6-6285-4b18-9fc1-022ec5989dd2\") " Jan 20 17:30:04 crc kubenswrapper[4558]: I0120 17:30:04.512937 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f788v\" (UniqueName: \"kubernetes.io/projected/a9c95d35-61c0-4abb-92a1-8b33d73139ab-kube-api-access-f788v\") pod \"a9c95d35-61c0-4abb-92a1-8b33d73139ab\" (UID: \"a9c95d35-61c0-4abb-92a1-8b33d73139ab\") " Jan 20 17:30:04 crc kubenswrapper[4558]: I0120 17:30:04.514121 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/838617e6-6285-4b18-9fc1-022ec5989dd2-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:30:04 crc kubenswrapper[4558]: I0120 17:30:04.516915 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dbbead9c-ffa4-448a-81a0-d1db96d7b5e3-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "dbbead9c-ffa4-448a-81a0-d1db96d7b5e3" (UID: "dbbead9c-ffa4-448a-81a0-d1db96d7b5e3"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:30:04 crc kubenswrapper[4558]: I0120 17:30:04.517282 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/838617e6-6285-4b18-9fc1-022ec5989dd2-kube-api-access-pzrkq" (OuterVolumeSpecName: "kube-api-access-pzrkq") pod "838617e6-6285-4b18-9fc1-022ec5989dd2" (UID: "838617e6-6285-4b18-9fc1-022ec5989dd2"). InnerVolumeSpecName "kube-api-access-pzrkq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:30:04 crc kubenswrapper[4558]: I0120 17:30:04.517582 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dbbead9c-ffa4-448a-81a0-d1db96d7b5e3-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "dbbead9c-ffa4-448a-81a0-d1db96d7b5e3" (UID: "dbbead9c-ffa4-448a-81a0-d1db96d7b5e3"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:30:04 crc kubenswrapper[4558]: I0120 17:30:04.518084 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dbbead9c-ffa4-448a-81a0-d1db96d7b5e3-scripts" (OuterVolumeSpecName: "scripts") pod "dbbead9c-ffa4-448a-81a0-d1db96d7b5e3" (UID: "dbbead9c-ffa4-448a-81a0-d1db96d7b5e3"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:30:04 crc kubenswrapper[4558]: I0120 17:30:04.518418 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/838617e6-6285-4b18-9fc1-022ec5989dd2-scripts" (OuterVolumeSpecName: "scripts") pod "838617e6-6285-4b18-9fc1-022ec5989dd2" (UID: "838617e6-6285-4b18-9fc1-022ec5989dd2"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:30:04 crc kubenswrapper[4558]: I0120 17:30:04.518478 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dbbead9c-ffa4-448a-81a0-d1db96d7b5e3-kube-api-access-bnzln" (OuterVolumeSpecName: "kube-api-access-bnzln") pod "dbbead9c-ffa4-448a-81a0-d1db96d7b5e3" (UID: "dbbead9c-ffa4-448a-81a0-d1db96d7b5e3"). InnerVolumeSpecName "kube-api-access-bnzln". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:30:04 crc kubenswrapper[4558]: I0120 17:30:04.522055 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a9c95d35-61c0-4abb-92a1-8b33d73139ab-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "a9c95d35-61c0-4abb-92a1-8b33d73139ab" (UID: "a9c95d35-61c0-4abb-92a1-8b33d73139ab"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:30:04 crc kubenswrapper[4558]: I0120 17:30:04.524985 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a9c95d35-61c0-4abb-92a1-8b33d73139ab-kube-api-access-f788v" (OuterVolumeSpecName: "kube-api-access-f788v") pod "a9c95d35-61c0-4abb-92a1-8b33d73139ab" (UID: "a9c95d35-61c0-4abb-92a1-8b33d73139ab"). InnerVolumeSpecName "kube-api-access-f788v". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:30:04 crc kubenswrapper[4558]: I0120 17:30:04.539807 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/838617e6-6285-4b18-9fc1-022ec5989dd2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "838617e6-6285-4b18-9fc1-022ec5989dd2" (UID: "838617e6-6285-4b18-9fc1-022ec5989dd2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:30:04 crc kubenswrapper[4558]: I0120 17:30:04.541919 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dbbead9c-ffa4-448a-81a0-d1db96d7b5e3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "dbbead9c-ffa4-448a-81a0-d1db96d7b5e3" (UID: "dbbead9c-ffa4-448a-81a0-d1db96d7b5e3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:30:04 crc kubenswrapper[4558]: I0120 17:30:04.544545 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dbbead9c-ffa4-448a-81a0-d1db96d7b5e3-config-data" (OuterVolumeSpecName: "config-data") pod "dbbead9c-ffa4-448a-81a0-d1db96d7b5e3" (UID: "dbbead9c-ffa4-448a-81a0-d1db96d7b5e3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:30:04 crc kubenswrapper[4558]: I0120 17:30:04.546522 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a9c95d35-61c0-4abb-92a1-8b33d73139ab-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a9c95d35-61c0-4abb-92a1-8b33d73139ab" (UID: "a9c95d35-61c0-4abb-92a1-8b33d73139ab"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:30:04 crc kubenswrapper[4558]: I0120 17:30:04.548100 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/838617e6-6285-4b18-9fc1-022ec5989dd2-config-data" (OuterVolumeSpecName: "config-data") pod "838617e6-6285-4b18-9fc1-022ec5989dd2" (UID: "838617e6-6285-4b18-9fc1-022ec5989dd2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:30:04 crc kubenswrapper[4558]: I0120 17:30:04.581810 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1042596b-4f2c-4da7-9bfb-05d2e8be6b80" path="/var/lib/kubelet/pods/1042596b-4f2c-4da7-9bfb-05d2e8be6b80/volumes" Jan 20 17:30:04 crc kubenswrapper[4558]: I0120 17:30:04.615620 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pzrkq\" (UniqueName: \"kubernetes.io/projected/838617e6-6285-4b18-9fc1-022ec5989dd2-kube-api-access-pzrkq\") on node \"crc\" DevicePath \"\"" Jan 20 17:30:04 crc kubenswrapper[4558]: I0120 17:30:04.615659 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a9c95d35-61c0-4abb-92a1-8b33d73139ab-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:30:04 crc kubenswrapper[4558]: I0120 17:30:04.615671 4558 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/dbbead9c-ffa4-448a-81a0-d1db96d7b5e3-fernet-keys\") on node \"crc\" DevicePath \"\"" Jan 20 17:30:04 crc kubenswrapper[4558]: I0120 17:30:04.615683 4558 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/dbbead9c-ffa4-448a-81a0-d1db96d7b5e3-credential-keys\") on node \"crc\" DevicePath \"\"" Jan 20 17:30:04 crc kubenswrapper[4558]: I0120 17:30:04.615693 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dbbead9c-ffa4-448a-81a0-d1db96d7b5e3-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:30:04 crc kubenswrapper[4558]: I0120 17:30:04.615706 4558 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/a9c95d35-61c0-4abb-92a1-8b33d73139ab-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:30:04 crc kubenswrapper[4558]: I0120 17:30:04.615719 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/838617e6-6285-4b18-9fc1-022ec5989dd2-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:30:04 crc kubenswrapper[4558]: I0120 17:30:04.615728 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/838617e6-6285-4b18-9fc1-022ec5989dd2-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:30:04 crc kubenswrapper[4558]: I0120 17:30:04.615736 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f788v\" (UniqueName: \"kubernetes.io/projected/a9c95d35-61c0-4abb-92a1-8b33d73139ab-kube-api-access-f788v\") on node \"crc\" DevicePath \"\"" Jan 20 17:30:04 crc kubenswrapper[4558]: I0120 17:30:04.615748 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bnzln\" (UniqueName: \"kubernetes.io/projected/dbbead9c-ffa4-448a-81a0-d1db96d7b5e3-kube-api-access-bnzln\") on node \"crc\" DevicePath \"\"" Jan 20 17:30:04 crc kubenswrapper[4558]: I0120 17:30:04.615756 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dbbead9c-ffa4-448a-81a0-d1db96d7b5e3-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:30:04 crc kubenswrapper[4558]: I0120 17:30:04.615765 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dbbead9c-ffa4-448a-81a0-d1db96d7b5e3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:30:04 crc kubenswrapper[4558]: I0120 17:30:04.615781 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/838617e6-6285-4b18-9fc1-022ec5989dd2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:30:04 crc kubenswrapper[4558]: I0120 17:30:04.835940 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-db-sync-8rp7v" event={"ID":"a9c95d35-61c0-4abb-92a1-8b33d73139ab","Type":"ContainerDied","Data":"6f01eb42ffffb39d62072d3631f0125b9bc4fa9080cb73705ac40777ab5f29f6"} Jan 20 17:30:04 crc kubenswrapper[4558]: I0120 17:30:04.836003 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6f01eb42ffffb39d62072d3631f0125b9bc4fa9080cb73705ac40777ab5f29f6" Jan 20 17:30:04 crc kubenswrapper[4558]: I0120 17:30:04.836540 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-db-sync-8rp7v" Jan 20 17:30:04 crc kubenswrapper[4558]: I0120 17:30:04.838394 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-db-sync-b88r8" Jan 20 17:30:04 crc kubenswrapper[4558]: I0120 17:30:04.838415 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-db-sync-b88r8" event={"ID":"838617e6-6285-4b18-9fc1-022ec5989dd2","Type":"ContainerDied","Data":"c3f6c16445b5135d6982612eae3d089c67687884468683454427ddc78dd48630"} Jan 20 17:30:04 crc kubenswrapper[4558]: I0120 17:30:04.838453 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c3f6c16445b5135d6982612eae3d089c67687884468683454427ddc78dd48630" Jan 20 17:30:04 crc kubenswrapper[4558]: I0120 17:30:04.840348 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-bootstrap-vh8wg" event={"ID":"dbbead9c-ffa4-448a-81a0-d1db96d7b5e3","Type":"ContainerDied","Data":"e6eddfdebe67de2e2a1df02569126c3a804d051d83edeb95725c6cba748d7022"} Jan 20 17:30:04 crc kubenswrapper[4558]: I0120 17:30:04.840388 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e6eddfdebe67de2e2a1df02569126c3a804d051d83edeb95725c6cba748d7022" Jan 20 17:30:04 crc kubenswrapper[4558]: I0120 17:30:04.840447 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-bootstrap-vh8wg" Jan 20 17:30:04 crc kubenswrapper[4558]: I0120 17:30:04.843991 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29482170-8mhr7" Jan 20 17:30:04 crc kubenswrapper[4558]: I0120 17:30:04.844699 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29482170-8mhr7" event={"ID":"c3f46696-aea2-4531-9e00-687b70338139","Type":"ContainerDied","Data":"4d19c40550d7351847bd41729bf04a1b9990d05ef342b0520484c8a49a684fbb"} Jan 20 17:30:04 crc kubenswrapper[4558]: I0120 17:30:04.844735 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4d19c40550d7351847bd41729bf04a1b9990d05ef342b0520484c8a49a684fbb" Jan 20 17:30:04 crc kubenswrapper[4558]: I0120 17:30:04.927221 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-bootstrap-vh8wg"] Jan 20 17:30:04 crc kubenswrapper[4558]: I0120 17:30:04.940026 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/keystone-bootstrap-vh8wg"] Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.044183 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/barbican-worker-5bb9c47bc7-ct6jf"] Jan 20 17:30:05 crc kubenswrapper[4558]: E0120 17:30:05.044552 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a9c95d35-61c0-4abb-92a1-8b33d73139ab" containerName="barbican-db-sync" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.044571 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="a9c95d35-61c0-4abb-92a1-8b33d73139ab" containerName="barbican-db-sync" Jan 20 17:30:05 crc kubenswrapper[4558]: E0120 17:30:05.044589 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c3f46696-aea2-4531-9e00-687b70338139" containerName="collect-profiles" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.044596 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="c3f46696-aea2-4531-9e00-687b70338139" containerName="collect-profiles" Jan 20 17:30:05 crc kubenswrapper[4558]: E0120 17:30:05.044607 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dbbead9c-ffa4-448a-81a0-d1db96d7b5e3" containerName="keystone-bootstrap" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.044613 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="dbbead9c-ffa4-448a-81a0-d1db96d7b5e3" containerName="keystone-bootstrap" Jan 20 17:30:05 crc kubenswrapper[4558]: E0120 17:30:05.044628 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="838617e6-6285-4b18-9fc1-022ec5989dd2" containerName="placement-db-sync" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.044633 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="838617e6-6285-4b18-9fc1-022ec5989dd2" containerName="placement-db-sync" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.044801 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="dbbead9c-ffa4-448a-81a0-d1db96d7b5e3" containerName="keystone-bootstrap" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.044812 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="c3f46696-aea2-4531-9e00-687b70338139" containerName="collect-profiles" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.044821 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="a9c95d35-61c0-4abb-92a1-8b33d73139ab" containerName="barbican-db-sync" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.044833 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="838617e6-6285-4b18-9fc1-022ec5989dd2" containerName="placement-db-sync" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.045652 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-worker-5bb9c47bc7-ct6jf" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.055937 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"barbican-worker-config-data" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.056085 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"barbican-config-data" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.056269 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"barbican-barbican-dockercfg-nww48" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.077241 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/keystone-bootstrap-zwldh"] Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.078627 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-bootstrap-zwldh" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.084448 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-config-data" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.088841 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.089034 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"osp-secret" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.089213 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-keystone-dockercfg-8n4lx" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.091585 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-scripts" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.108375 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/placement-76cff77fc4-bkwrr"] Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.110201 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/barbican-keystone-listener-65b4ff8654-bj52d"] Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.110783 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-76cff77fc4-bkwrr" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.113753 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-keystone-listener-65b4ff8654-bj52d" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.119224 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"placement-scripts" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.119329 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"placement-config-data" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.119374 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"placement-placement-dockercfg-j7r59" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.123226 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-worker-5bb9c47bc7-ct6jf"] Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.126512 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"barbican-keystone-listener-config-data" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.205326 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-bootstrap-zwldh"] Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.216879 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-76cff77fc4-bkwrr"] Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.232144 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e7c82345-58f6-4542-9483-604f1ad2b5f4-config-data\") pod \"placement-76cff77fc4-bkwrr\" (UID: \"e7c82345-58f6-4542-9483-604f1ad2b5f4\") " pod="openstack-kuttl-tests/placement-76cff77fc4-bkwrr" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.232218 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fbba86e7-87c6-4350-8bbd-565ba9abfe68-logs\") pod \"barbican-worker-5bb9c47bc7-ct6jf\" (UID: \"fbba86e7-87c6-4350-8bbd-565ba9abfe68\") " pod="openstack-kuttl-tests/barbican-worker-5bb9c47bc7-ct6jf" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.232270 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fce7f5af-9139-4f50-a1c9-b74477ed188f-combined-ca-bundle\") pod \"keystone-bootstrap-zwldh\" (UID: \"fce7f5af-9139-4f50-a1c9-b74477ed188f\") " pod="openstack-kuttl-tests/keystone-bootstrap-zwldh" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.232303 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbba86e7-87c6-4350-8bbd-565ba9abfe68-combined-ca-bundle\") pod \"barbican-worker-5bb9c47bc7-ct6jf\" (UID: \"fbba86e7-87c6-4350-8bbd-565ba9abfe68\") " pod="openstack-kuttl-tests/barbican-worker-5bb9c47bc7-ct6jf" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.232323 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/113d7999-28c3-4738-aa99-bd09c1880299-config-data\") pod \"barbican-keystone-listener-65b4ff8654-bj52d\" (UID: \"113d7999-28c3-4738-aa99-bd09c1880299\") " pod="openstack-kuttl-tests/barbican-keystone-listener-65b4ff8654-bj52d" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.232344 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fce7f5af-9139-4f50-a1c9-b74477ed188f-scripts\") pod \"keystone-bootstrap-zwldh\" (UID: \"fce7f5af-9139-4f50-a1c9-b74477ed188f\") " pod="openstack-kuttl-tests/keystone-bootstrap-zwldh" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.232364 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/113d7999-28c3-4738-aa99-bd09c1880299-combined-ca-bundle\") pod \"barbican-keystone-listener-65b4ff8654-bj52d\" (UID: \"113d7999-28c3-4738-aa99-bd09c1880299\") " pod="openstack-kuttl-tests/barbican-keystone-listener-65b4ff8654-bj52d" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.232391 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gmsqz\" (UniqueName: \"kubernetes.io/projected/fbba86e7-87c6-4350-8bbd-565ba9abfe68-kube-api-access-gmsqz\") pod \"barbican-worker-5bb9c47bc7-ct6jf\" (UID: \"fbba86e7-87c6-4350-8bbd-565ba9abfe68\") " pod="openstack-kuttl-tests/barbican-worker-5bb9c47bc7-ct6jf" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.232407 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7c82345-58f6-4542-9483-604f1ad2b5f4-combined-ca-bundle\") pod \"placement-76cff77fc4-bkwrr\" (UID: \"e7c82345-58f6-4542-9483-604f1ad2b5f4\") " pod="openstack-kuttl-tests/placement-76cff77fc4-bkwrr" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.232433 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/fce7f5af-9139-4f50-a1c9-b74477ed188f-credential-keys\") pod \"keystone-bootstrap-zwldh\" (UID: \"fce7f5af-9139-4f50-a1c9-b74477ed188f\") " pod="openstack-kuttl-tests/keystone-bootstrap-zwldh" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.232456 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e7c82345-58f6-4542-9483-604f1ad2b5f4-logs\") pod \"placement-76cff77fc4-bkwrr\" (UID: \"e7c82345-58f6-4542-9483-604f1ad2b5f4\") " pod="openstack-kuttl-tests/placement-76cff77fc4-bkwrr" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.232470 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wfb2k\" (UniqueName: \"kubernetes.io/projected/e7c82345-58f6-4542-9483-604f1ad2b5f4-kube-api-access-wfb2k\") pod \"placement-76cff77fc4-bkwrr\" (UID: \"e7c82345-58f6-4542-9483-604f1ad2b5f4\") " pod="openstack-kuttl-tests/placement-76cff77fc4-bkwrr" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.232501 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e7c82345-58f6-4542-9483-604f1ad2b5f4-scripts\") pod \"placement-76cff77fc4-bkwrr\" (UID: \"e7c82345-58f6-4542-9483-604f1ad2b5f4\") " pod="openstack-kuttl-tests/placement-76cff77fc4-bkwrr" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.232518 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z6k25\" (UniqueName: \"kubernetes.io/projected/113d7999-28c3-4738-aa99-bd09c1880299-kube-api-access-z6k25\") pod \"barbican-keystone-listener-65b4ff8654-bj52d\" (UID: \"113d7999-28c3-4738-aa99-bd09c1880299\") " pod="openstack-kuttl-tests/barbican-keystone-listener-65b4ff8654-bj52d" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.232544 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rwj9v\" (UniqueName: \"kubernetes.io/projected/fce7f5af-9139-4f50-a1c9-b74477ed188f-kube-api-access-rwj9v\") pod \"keystone-bootstrap-zwldh\" (UID: \"fce7f5af-9139-4f50-a1c9-b74477ed188f\") " pod="openstack-kuttl-tests/keystone-bootstrap-zwldh" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.232562 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fce7f5af-9139-4f50-a1c9-b74477ed188f-config-data\") pod \"keystone-bootstrap-zwldh\" (UID: \"fce7f5af-9139-4f50-a1c9-b74477ed188f\") " pod="openstack-kuttl-tests/keystone-bootstrap-zwldh" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.232579 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/113d7999-28c3-4738-aa99-bd09c1880299-logs\") pod \"barbican-keystone-listener-65b4ff8654-bj52d\" (UID: \"113d7999-28c3-4738-aa99-bd09c1880299\") " pod="openstack-kuttl-tests/barbican-keystone-listener-65b4ff8654-bj52d" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.232599 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/fbba86e7-87c6-4350-8bbd-565ba9abfe68-config-data-custom\") pod \"barbican-worker-5bb9c47bc7-ct6jf\" (UID: \"fbba86e7-87c6-4350-8bbd-565ba9abfe68\") " pod="openstack-kuttl-tests/barbican-worker-5bb9c47bc7-ct6jf" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.232613 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/fce7f5af-9139-4f50-a1c9-b74477ed188f-fernet-keys\") pod \"keystone-bootstrap-zwldh\" (UID: \"fce7f5af-9139-4f50-a1c9-b74477ed188f\") " pod="openstack-kuttl-tests/keystone-bootstrap-zwldh" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.232641 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/113d7999-28c3-4738-aa99-bd09c1880299-config-data-custom\") pod \"barbican-keystone-listener-65b4ff8654-bj52d\" (UID: \"113d7999-28c3-4738-aa99-bd09c1880299\") " pod="openstack-kuttl-tests/barbican-keystone-listener-65b4ff8654-bj52d" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.232656 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fbba86e7-87c6-4350-8bbd-565ba9abfe68-config-data\") pod \"barbican-worker-5bb9c47bc7-ct6jf\" (UID: \"fbba86e7-87c6-4350-8bbd-565ba9abfe68\") " pod="openstack-kuttl-tests/barbican-worker-5bb9c47bc7-ct6jf" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.259513 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-keystone-listener-65b4ff8654-bj52d"] Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.333660 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/fbba86e7-87c6-4350-8bbd-565ba9abfe68-config-data-custom\") pod \"barbican-worker-5bb9c47bc7-ct6jf\" (UID: \"fbba86e7-87c6-4350-8bbd-565ba9abfe68\") " pod="openstack-kuttl-tests/barbican-worker-5bb9c47bc7-ct6jf" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.333707 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/fce7f5af-9139-4f50-a1c9-b74477ed188f-fernet-keys\") pod \"keystone-bootstrap-zwldh\" (UID: \"fce7f5af-9139-4f50-a1c9-b74477ed188f\") " pod="openstack-kuttl-tests/keystone-bootstrap-zwldh" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.333738 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/113d7999-28c3-4738-aa99-bd09c1880299-config-data-custom\") pod \"barbican-keystone-listener-65b4ff8654-bj52d\" (UID: \"113d7999-28c3-4738-aa99-bd09c1880299\") " pod="openstack-kuttl-tests/barbican-keystone-listener-65b4ff8654-bj52d" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.333758 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fbba86e7-87c6-4350-8bbd-565ba9abfe68-config-data\") pod \"barbican-worker-5bb9c47bc7-ct6jf\" (UID: \"fbba86e7-87c6-4350-8bbd-565ba9abfe68\") " pod="openstack-kuttl-tests/barbican-worker-5bb9c47bc7-ct6jf" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.333787 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e7c82345-58f6-4542-9483-604f1ad2b5f4-config-data\") pod \"placement-76cff77fc4-bkwrr\" (UID: \"e7c82345-58f6-4542-9483-604f1ad2b5f4\") " pod="openstack-kuttl-tests/placement-76cff77fc4-bkwrr" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.333804 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fbba86e7-87c6-4350-8bbd-565ba9abfe68-logs\") pod \"barbican-worker-5bb9c47bc7-ct6jf\" (UID: \"fbba86e7-87c6-4350-8bbd-565ba9abfe68\") " pod="openstack-kuttl-tests/barbican-worker-5bb9c47bc7-ct6jf" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.333834 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fce7f5af-9139-4f50-a1c9-b74477ed188f-combined-ca-bundle\") pod \"keystone-bootstrap-zwldh\" (UID: \"fce7f5af-9139-4f50-a1c9-b74477ed188f\") " pod="openstack-kuttl-tests/keystone-bootstrap-zwldh" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.333859 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbba86e7-87c6-4350-8bbd-565ba9abfe68-combined-ca-bundle\") pod \"barbican-worker-5bb9c47bc7-ct6jf\" (UID: \"fbba86e7-87c6-4350-8bbd-565ba9abfe68\") " pod="openstack-kuttl-tests/barbican-worker-5bb9c47bc7-ct6jf" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.333875 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/113d7999-28c3-4738-aa99-bd09c1880299-config-data\") pod \"barbican-keystone-listener-65b4ff8654-bj52d\" (UID: \"113d7999-28c3-4738-aa99-bd09c1880299\") " pod="openstack-kuttl-tests/barbican-keystone-listener-65b4ff8654-bj52d" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.333897 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fce7f5af-9139-4f50-a1c9-b74477ed188f-scripts\") pod \"keystone-bootstrap-zwldh\" (UID: \"fce7f5af-9139-4f50-a1c9-b74477ed188f\") " pod="openstack-kuttl-tests/keystone-bootstrap-zwldh" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.333916 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/113d7999-28c3-4738-aa99-bd09c1880299-combined-ca-bundle\") pod \"barbican-keystone-listener-65b4ff8654-bj52d\" (UID: \"113d7999-28c3-4738-aa99-bd09c1880299\") " pod="openstack-kuttl-tests/barbican-keystone-listener-65b4ff8654-bj52d" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.333939 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gmsqz\" (UniqueName: \"kubernetes.io/projected/fbba86e7-87c6-4350-8bbd-565ba9abfe68-kube-api-access-gmsqz\") pod \"barbican-worker-5bb9c47bc7-ct6jf\" (UID: \"fbba86e7-87c6-4350-8bbd-565ba9abfe68\") " pod="openstack-kuttl-tests/barbican-worker-5bb9c47bc7-ct6jf" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.333961 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7c82345-58f6-4542-9483-604f1ad2b5f4-combined-ca-bundle\") pod \"placement-76cff77fc4-bkwrr\" (UID: \"e7c82345-58f6-4542-9483-604f1ad2b5f4\") " pod="openstack-kuttl-tests/placement-76cff77fc4-bkwrr" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.333982 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/fce7f5af-9139-4f50-a1c9-b74477ed188f-credential-keys\") pod \"keystone-bootstrap-zwldh\" (UID: \"fce7f5af-9139-4f50-a1c9-b74477ed188f\") " pod="openstack-kuttl-tests/keystone-bootstrap-zwldh" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.333999 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e7c82345-58f6-4542-9483-604f1ad2b5f4-logs\") pod \"placement-76cff77fc4-bkwrr\" (UID: \"e7c82345-58f6-4542-9483-604f1ad2b5f4\") " pod="openstack-kuttl-tests/placement-76cff77fc4-bkwrr" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.334015 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wfb2k\" (UniqueName: \"kubernetes.io/projected/e7c82345-58f6-4542-9483-604f1ad2b5f4-kube-api-access-wfb2k\") pod \"placement-76cff77fc4-bkwrr\" (UID: \"e7c82345-58f6-4542-9483-604f1ad2b5f4\") " pod="openstack-kuttl-tests/placement-76cff77fc4-bkwrr" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.334039 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e7c82345-58f6-4542-9483-604f1ad2b5f4-scripts\") pod \"placement-76cff77fc4-bkwrr\" (UID: \"e7c82345-58f6-4542-9483-604f1ad2b5f4\") " pod="openstack-kuttl-tests/placement-76cff77fc4-bkwrr" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.334062 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z6k25\" (UniqueName: \"kubernetes.io/projected/113d7999-28c3-4738-aa99-bd09c1880299-kube-api-access-z6k25\") pod \"barbican-keystone-listener-65b4ff8654-bj52d\" (UID: \"113d7999-28c3-4738-aa99-bd09c1880299\") " pod="openstack-kuttl-tests/barbican-keystone-listener-65b4ff8654-bj52d" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.334084 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rwj9v\" (UniqueName: \"kubernetes.io/projected/fce7f5af-9139-4f50-a1c9-b74477ed188f-kube-api-access-rwj9v\") pod \"keystone-bootstrap-zwldh\" (UID: \"fce7f5af-9139-4f50-a1c9-b74477ed188f\") " pod="openstack-kuttl-tests/keystone-bootstrap-zwldh" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.334100 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fce7f5af-9139-4f50-a1c9-b74477ed188f-config-data\") pod \"keystone-bootstrap-zwldh\" (UID: \"fce7f5af-9139-4f50-a1c9-b74477ed188f\") " pod="openstack-kuttl-tests/keystone-bootstrap-zwldh" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.334117 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/113d7999-28c3-4738-aa99-bd09c1880299-logs\") pod \"barbican-keystone-listener-65b4ff8654-bj52d\" (UID: \"113d7999-28c3-4738-aa99-bd09c1880299\") " pod="openstack-kuttl-tests/barbican-keystone-listener-65b4ff8654-bj52d" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.334607 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/113d7999-28c3-4738-aa99-bd09c1880299-logs\") pod \"barbican-keystone-listener-65b4ff8654-bj52d\" (UID: \"113d7999-28c3-4738-aa99-bd09c1880299\") " pod="openstack-kuttl-tests/barbican-keystone-listener-65b4ff8654-bj52d" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.345539 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/113d7999-28c3-4738-aa99-bd09c1880299-combined-ca-bundle\") pod \"barbican-keystone-listener-65b4ff8654-bj52d\" (UID: \"113d7999-28c3-4738-aa99-bd09c1880299\") " pod="openstack-kuttl-tests/barbican-keystone-listener-65b4ff8654-bj52d" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.345594 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/barbican-api-74c98446fb-d49s5"] Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.346994 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-74c98446fb-d49s5" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.348522 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e7c82345-58f6-4542-9483-604f1ad2b5f4-scripts\") pod \"placement-76cff77fc4-bkwrr\" (UID: \"e7c82345-58f6-4542-9483-604f1ad2b5f4\") " pod="openstack-kuttl-tests/placement-76cff77fc4-bkwrr" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.348783 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e7c82345-58f6-4542-9483-604f1ad2b5f4-logs\") pod \"placement-76cff77fc4-bkwrr\" (UID: \"e7c82345-58f6-4542-9483-604f1ad2b5f4\") " pod="openstack-kuttl-tests/placement-76cff77fc4-bkwrr" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.351413 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fbba86e7-87c6-4350-8bbd-565ba9abfe68-logs\") pod \"barbican-worker-5bb9c47bc7-ct6jf\" (UID: \"fbba86e7-87c6-4350-8bbd-565ba9abfe68\") " pod="openstack-kuttl-tests/barbican-worker-5bb9c47bc7-ct6jf" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.351562 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"barbican-api-config-data" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.353761 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/fce7f5af-9139-4f50-a1c9-b74477ed188f-credential-keys\") pod \"keystone-bootstrap-zwldh\" (UID: \"fce7f5af-9139-4f50-a1c9-b74477ed188f\") " pod="openstack-kuttl-tests/keystone-bootstrap-zwldh" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.356239 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-api-74c98446fb-d49s5"] Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.360915 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/fbba86e7-87c6-4350-8bbd-565ba9abfe68-config-data-custom\") pod \"barbican-worker-5bb9c47bc7-ct6jf\" (UID: \"fbba86e7-87c6-4350-8bbd-565ba9abfe68\") " pod="openstack-kuttl-tests/barbican-worker-5bb9c47bc7-ct6jf" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.361235 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/fce7f5af-9139-4f50-a1c9-b74477ed188f-fernet-keys\") pod \"keystone-bootstrap-zwldh\" (UID: \"fce7f5af-9139-4f50-a1c9-b74477ed188f\") " pod="openstack-kuttl-tests/keystone-bootstrap-zwldh" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.361466 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7c82345-58f6-4542-9483-604f1ad2b5f4-combined-ca-bundle\") pod \"placement-76cff77fc4-bkwrr\" (UID: \"e7c82345-58f6-4542-9483-604f1ad2b5f4\") " pod="openstack-kuttl-tests/placement-76cff77fc4-bkwrr" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.362467 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fce7f5af-9139-4f50-a1c9-b74477ed188f-combined-ca-bundle\") pod \"keystone-bootstrap-zwldh\" (UID: \"fce7f5af-9139-4f50-a1c9-b74477ed188f\") " pod="openstack-kuttl-tests/keystone-bootstrap-zwldh" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.368115 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fbba86e7-87c6-4350-8bbd-565ba9abfe68-config-data\") pod \"barbican-worker-5bb9c47bc7-ct6jf\" (UID: \"fbba86e7-87c6-4350-8bbd-565ba9abfe68\") " pod="openstack-kuttl-tests/barbican-worker-5bb9c47bc7-ct6jf" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.368757 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z6k25\" (UniqueName: \"kubernetes.io/projected/113d7999-28c3-4738-aa99-bd09c1880299-kube-api-access-z6k25\") pod \"barbican-keystone-listener-65b4ff8654-bj52d\" (UID: \"113d7999-28c3-4738-aa99-bd09c1880299\") " pod="openstack-kuttl-tests/barbican-keystone-listener-65b4ff8654-bj52d" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.368935 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fce7f5af-9139-4f50-a1c9-b74477ed188f-config-data\") pod \"keystone-bootstrap-zwldh\" (UID: \"fce7f5af-9139-4f50-a1c9-b74477ed188f\") " pod="openstack-kuttl-tests/keystone-bootstrap-zwldh" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.373762 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e7c82345-58f6-4542-9483-604f1ad2b5f4-config-data\") pod \"placement-76cff77fc4-bkwrr\" (UID: \"e7c82345-58f6-4542-9483-604f1ad2b5f4\") " pod="openstack-kuttl-tests/placement-76cff77fc4-bkwrr" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.374226 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gmsqz\" (UniqueName: \"kubernetes.io/projected/fbba86e7-87c6-4350-8bbd-565ba9abfe68-kube-api-access-gmsqz\") pod \"barbican-worker-5bb9c47bc7-ct6jf\" (UID: \"fbba86e7-87c6-4350-8bbd-565ba9abfe68\") " pod="openstack-kuttl-tests/barbican-worker-5bb9c47bc7-ct6jf" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.381512 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fce7f5af-9139-4f50-a1c9-b74477ed188f-scripts\") pod \"keystone-bootstrap-zwldh\" (UID: \"fce7f5af-9139-4f50-a1c9-b74477ed188f\") " pod="openstack-kuttl-tests/keystone-bootstrap-zwldh" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.381716 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/113d7999-28c3-4738-aa99-bd09c1880299-config-data-custom\") pod \"barbican-keystone-listener-65b4ff8654-bj52d\" (UID: \"113d7999-28c3-4738-aa99-bd09c1880299\") " pod="openstack-kuttl-tests/barbican-keystone-listener-65b4ff8654-bj52d" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.381785 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbba86e7-87c6-4350-8bbd-565ba9abfe68-combined-ca-bundle\") pod \"barbican-worker-5bb9c47bc7-ct6jf\" (UID: \"fbba86e7-87c6-4350-8bbd-565ba9abfe68\") " pod="openstack-kuttl-tests/barbican-worker-5bb9c47bc7-ct6jf" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.382146 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wfb2k\" (UniqueName: \"kubernetes.io/projected/e7c82345-58f6-4542-9483-604f1ad2b5f4-kube-api-access-wfb2k\") pod \"placement-76cff77fc4-bkwrr\" (UID: \"e7c82345-58f6-4542-9483-604f1ad2b5f4\") " pod="openstack-kuttl-tests/placement-76cff77fc4-bkwrr" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.382421 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/113d7999-28c3-4738-aa99-bd09c1880299-config-data\") pod \"barbican-keystone-listener-65b4ff8654-bj52d\" (UID: \"113d7999-28c3-4738-aa99-bd09c1880299\") " pod="openstack-kuttl-tests/barbican-keystone-listener-65b4ff8654-bj52d" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.388381 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rwj9v\" (UniqueName: \"kubernetes.io/projected/fce7f5af-9139-4f50-a1c9-b74477ed188f-kube-api-access-rwj9v\") pod \"keystone-bootstrap-zwldh\" (UID: \"fce7f5af-9139-4f50-a1c9-b74477ed188f\") " pod="openstack-kuttl-tests/keystone-bootstrap-zwldh" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.439360 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e83a55be-4002-403a-911f-8f33c234b7e4-config-data-custom\") pod \"barbican-api-74c98446fb-d49s5\" (UID: \"e83a55be-4002-403a-911f-8f33c234b7e4\") " pod="openstack-kuttl-tests/barbican-api-74c98446fb-d49s5" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.439422 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l7zlc\" (UniqueName: \"kubernetes.io/projected/e83a55be-4002-403a-911f-8f33c234b7e4-kube-api-access-l7zlc\") pod \"barbican-api-74c98446fb-d49s5\" (UID: \"e83a55be-4002-403a-911f-8f33c234b7e4\") " pod="openstack-kuttl-tests/barbican-api-74c98446fb-d49s5" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.439451 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e83a55be-4002-403a-911f-8f33c234b7e4-config-data\") pod \"barbican-api-74c98446fb-d49s5\" (UID: \"e83a55be-4002-403a-911f-8f33c234b7e4\") " pod="openstack-kuttl-tests/barbican-api-74c98446fb-d49s5" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.439502 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e83a55be-4002-403a-911f-8f33c234b7e4-combined-ca-bundle\") pod \"barbican-api-74c98446fb-d49s5\" (UID: \"e83a55be-4002-403a-911f-8f33c234b7e4\") " pod="openstack-kuttl-tests/barbican-api-74c98446fb-d49s5" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.439573 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e83a55be-4002-403a-911f-8f33c234b7e4-logs\") pod \"barbican-api-74c98446fb-d49s5\" (UID: \"e83a55be-4002-403a-911f-8f33c234b7e4\") " pod="openstack-kuttl-tests/barbican-api-74c98446fb-d49s5" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.486429 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-bootstrap-zwldh" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.510246 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-db-sync-kd8pp" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.541734 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54807f10-de72-4664-a757-a1c048e4d5d7-combined-ca-bundle\") pod \"54807f10-de72-4664-a757-a1c048e4d5d7\" (UID: \"54807f10-de72-4664-a757-a1c048e4d5d7\") " Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.542043 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/54807f10-de72-4664-a757-a1c048e4d5d7-config-data\") pod \"54807f10-de72-4664-a757-a1c048e4d5d7\" (UID: \"54807f10-de72-4664-a757-a1c048e4d5d7\") " Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.542064 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/54807f10-de72-4664-a757-a1c048e4d5d7-db-sync-config-data\") pod \"54807f10-de72-4664-a757-a1c048e4d5d7\" (UID: \"54807f10-de72-4664-a757-a1c048e4d5d7\") " Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.542091 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/54807f10-de72-4664-a757-a1c048e4d5d7-etc-machine-id\") pod \"54807f10-de72-4664-a757-a1c048e4d5d7\" (UID: \"54807f10-de72-4664-a757-a1c048e4d5d7\") " Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.542140 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/54807f10-de72-4664-a757-a1c048e4d5d7-scripts\") pod \"54807f10-de72-4664-a757-a1c048e4d5d7\" (UID: \"54807f10-de72-4664-a757-a1c048e4d5d7\") " Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.542383 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e83a55be-4002-403a-911f-8f33c234b7e4-config-data\") pod \"barbican-api-74c98446fb-d49s5\" (UID: \"e83a55be-4002-403a-911f-8f33c234b7e4\") " pod="openstack-kuttl-tests/barbican-api-74c98446fb-d49s5" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.542455 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e83a55be-4002-403a-911f-8f33c234b7e4-combined-ca-bundle\") pod \"barbican-api-74c98446fb-d49s5\" (UID: \"e83a55be-4002-403a-911f-8f33c234b7e4\") " pod="openstack-kuttl-tests/barbican-api-74c98446fb-d49s5" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.542572 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e83a55be-4002-403a-911f-8f33c234b7e4-logs\") pod \"barbican-api-74c98446fb-d49s5\" (UID: \"e83a55be-4002-403a-911f-8f33c234b7e4\") " pod="openstack-kuttl-tests/barbican-api-74c98446fb-d49s5" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.542679 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e83a55be-4002-403a-911f-8f33c234b7e4-config-data-custom\") pod \"barbican-api-74c98446fb-d49s5\" (UID: \"e83a55be-4002-403a-911f-8f33c234b7e4\") " pod="openstack-kuttl-tests/barbican-api-74c98446fb-d49s5" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.542738 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l7zlc\" (UniqueName: \"kubernetes.io/projected/e83a55be-4002-403a-911f-8f33c234b7e4-kube-api-access-l7zlc\") pod \"barbican-api-74c98446fb-d49s5\" (UID: \"e83a55be-4002-403a-911f-8f33c234b7e4\") " pod="openstack-kuttl-tests/barbican-api-74c98446fb-d49s5" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.543528 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/54807f10-de72-4664-a757-a1c048e4d5d7-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "54807f10-de72-4664-a757-a1c048e4d5d7" (UID: "54807f10-de72-4664-a757-a1c048e4d5d7"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.543623 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e83a55be-4002-403a-911f-8f33c234b7e4-logs\") pod \"barbican-api-74c98446fb-d49s5\" (UID: \"e83a55be-4002-403a-911f-8f33c234b7e4\") " pod="openstack-kuttl-tests/barbican-api-74c98446fb-d49s5" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.548632 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/54807f10-de72-4664-a757-a1c048e4d5d7-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "54807f10-de72-4664-a757-a1c048e4d5d7" (UID: "54807f10-de72-4664-a757-a1c048e4d5d7"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.555181 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e83a55be-4002-403a-911f-8f33c234b7e4-combined-ca-bundle\") pod \"barbican-api-74c98446fb-d49s5\" (UID: \"e83a55be-4002-403a-911f-8f33c234b7e4\") " pod="openstack-kuttl-tests/barbican-api-74c98446fb-d49s5" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.573325 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/54807f10-de72-4664-a757-a1c048e4d5d7-scripts" (OuterVolumeSpecName: "scripts") pod "54807f10-de72-4664-a757-a1c048e4d5d7" (UID: "54807f10-de72-4664-a757-a1c048e4d5d7"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.576768 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e83a55be-4002-403a-911f-8f33c234b7e4-config-data\") pod \"barbican-api-74c98446fb-d49s5\" (UID: \"e83a55be-4002-403a-911f-8f33c234b7e4\") " pod="openstack-kuttl-tests/barbican-api-74c98446fb-d49s5" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.577618 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e83a55be-4002-403a-911f-8f33c234b7e4-config-data-custom\") pod \"barbican-api-74c98446fb-d49s5\" (UID: \"e83a55be-4002-403a-911f-8f33c234b7e4\") " pod="openstack-kuttl-tests/barbican-api-74c98446fb-d49s5" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.585741 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l7zlc\" (UniqueName: \"kubernetes.io/projected/e83a55be-4002-403a-911f-8f33c234b7e4-kube-api-access-l7zlc\") pod \"barbican-api-74c98446fb-d49s5\" (UID: \"e83a55be-4002-403a-911f-8f33c234b7e4\") " pod="openstack-kuttl-tests/barbican-api-74c98446fb-d49s5" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.599942 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/54807f10-de72-4664-a757-a1c048e4d5d7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "54807f10-de72-4664-a757-a1c048e4d5d7" (UID: "54807f10-de72-4664-a757-a1c048e4d5d7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.621054 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/54807f10-de72-4664-a757-a1c048e4d5d7-config-data" (OuterVolumeSpecName: "config-data") pod "54807f10-de72-4664-a757-a1c048e4d5d7" (UID: "54807f10-de72-4664-a757-a1c048e4d5d7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.646277 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l252s\" (UniqueName: \"kubernetes.io/projected/54807f10-de72-4664-a757-a1c048e4d5d7-kube-api-access-l252s\") pod \"54807f10-de72-4664-a757-a1c048e4d5d7\" (UID: \"54807f10-de72-4664-a757-a1c048e4d5d7\") " Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.646634 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/54807f10-de72-4664-a757-a1c048e4d5d7-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.646652 4558 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/54807f10-de72-4664-a757-a1c048e4d5d7-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.646662 4558 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/54807f10-de72-4664-a757-a1c048e4d5d7-etc-machine-id\") on node \"crc\" DevicePath \"\"" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.646670 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/54807f10-de72-4664-a757-a1c048e4d5d7-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.646679 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54807f10-de72-4664-a757-a1c048e4d5d7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.649307 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/54807f10-de72-4664-a757-a1c048e4d5d7-kube-api-access-l252s" (OuterVolumeSpecName: "kube-api-access-l252s") pod "54807f10-de72-4664-a757-a1c048e4d5d7" (UID: "54807f10-de72-4664-a757-a1c048e4d5d7"). InnerVolumeSpecName "kube-api-access-l252s". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.672475 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-worker-5bb9c47bc7-ct6jf" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.704052 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-76cff77fc4-bkwrr" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.712605 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-keystone-listener-65b4ff8654-bj52d" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.719616 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-74c98446fb-d49s5" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.749794 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l252s\" (UniqueName: \"kubernetes.io/projected/54807f10-de72-4664-a757-a1c048e4d5d7-kube-api-access-l252s\") on node \"crc\" DevicePath \"\"" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.889435 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-db-sync-kd8pp" event={"ID":"54807f10-de72-4664-a757-a1c048e4d5d7","Type":"ContainerDied","Data":"875266bd4148c109949f5a13dd6c799264df1f183288f54c66a32c7496e55160"} Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.889686 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="875266bd4148c109949f5a13dd6c799264df1f183288f54c66a32c7496e55160" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.889694 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-db-sync-kd8pp" Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.943430 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"b9f98dae-f452-470a-b189-73bf68f60a83","Type":"ContainerStarted","Data":"289b514a790a35157db641c744953dda67cf07a71d5bd4f2955ba6a0e7198bb1"} Jan 20 17:30:05 crc kubenswrapper[4558]: I0120 17:30:05.944686 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:30:06 crc kubenswrapper[4558]: I0120 17:30:05.997995 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-bootstrap-zwldh"] Jan 20 17:30:06 crc kubenswrapper[4558]: I0120 17:30:06.006075 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ceilometer-0" podStartSLOduration=2.794583068 podStartE2EDuration="8.006045962s" podCreationTimestamp="2026-01-20 17:29:58 +0000 UTC" firstStartedPulling="2026-01-20 17:29:59.783785531 +0000 UTC m=+2893.544123487" lastFinishedPulling="2026-01-20 17:30:04.995248413 +0000 UTC m=+2898.755586381" observedRunningTime="2026-01-20 17:30:05.998060609 +0000 UTC m=+2899.758398575" watchObservedRunningTime="2026-01-20 17:30:06.006045962 +0000 UTC m=+2899.766383930" Jan 20 17:30:06 crc kubenswrapper[4558]: W0120 17:30:06.023630 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfce7f5af_9139_4f50_a1c9_b74477ed188f.slice/crio-d2b7e4c217f1fc4ea05d59cbb27568b7434e7093e652affe0cf68c0c7e32c41f WatchSource:0}: Error finding container d2b7e4c217f1fc4ea05d59cbb27568b7434e7093e652affe0cf68c0c7e32c41f: Status 404 returned error can't find the container with id d2b7e4c217f1fc4ea05d59cbb27568b7434e7093e652affe0cf68c0c7e32c41f Jan 20 17:30:06 crc kubenswrapper[4558]: I0120 17:30:06.055058 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:30:06 crc kubenswrapper[4558]: E0120 17:30:06.056407 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="54807f10-de72-4664-a757-a1c048e4d5d7" containerName="cinder-db-sync" Jan 20 17:30:06 crc kubenswrapper[4558]: I0120 17:30:06.056481 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="54807f10-de72-4664-a757-a1c048e4d5d7" containerName="cinder-db-sync" Jan 20 17:30:06 crc kubenswrapper[4558]: I0120 17:30:06.056698 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="54807f10-de72-4664-a757-a1c048e4d5d7" containerName="cinder-db-sync" Jan 20 17:30:06 crc kubenswrapper[4558]: I0120 17:30:06.060026 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:30:06 crc kubenswrapper[4558]: I0120 17:30:06.064429 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/568979fa-537c-45c5-952c-f87a40b194ef-scripts\") pod \"cinder-scheduler-0\" (UID: \"568979fa-537c-45c5-952c-f87a40b194ef\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:30:06 crc kubenswrapper[4558]: I0120 17:30:06.064585 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/568979fa-537c-45c5-952c-f87a40b194ef-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"568979fa-537c-45c5-952c-f87a40b194ef\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:30:06 crc kubenswrapper[4558]: I0120 17:30:06.064645 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xsjnm\" (UniqueName: \"kubernetes.io/projected/568979fa-537c-45c5-952c-f87a40b194ef-kube-api-access-xsjnm\") pod \"cinder-scheduler-0\" (UID: \"568979fa-537c-45c5-952c-f87a40b194ef\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:30:06 crc kubenswrapper[4558]: I0120 17:30:06.064723 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/568979fa-537c-45c5-952c-f87a40b194ef-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"568979fa-537c-45c5-952c-f87a40b194ef\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:30:06 crc kubenswrapper[4558]: I0120 17:30:06.064813 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cinder-cinder-dockercfg-zxp27" Jan 20 17:30:06 crc kubenswrapper[4558]: I0120 17:30:06.065027 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cinder-scripts" Jan 20 17:30:06 crc kubenswrapper[4558]: I0120 17:30:06.064823 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/568979fa-537c-45c5-952c-f87a40b194ef-config-data\") pod \"cinder-scheduler-0\" (UID: \"568979fa-537c-45c5-952c-f87a40b194ef\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:30:06 crc kubenswrapper[4558]: I0120 17:30:06.065176 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/568979fa-537c-45c5-952c-f87a40b194ef-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"568979fa-537c-45c5-952c-f87a40b194ef\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:30:06 crc kubenswrapper[4558]: I0120 17:30:06.065051 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cinder-config-data" Jan 20 17:30:06 crc kubenswrapper[4558]: I0120 17:30:06.067669 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cinder-scheduler-config-data" Jan 20 17:30:06 crc kubenswrapper[4558]: I0120 17:30:06.089575 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:30:06 crc kubenswrapper[4558]: I0120 17:30:06.130803 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-worker-5bb9c47bc7-ct6jf"] Jan 20 17:30:06 crc kubenswrapper[4558]: I0120 17:30:06.165042 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:30:06 crc kubenswrapper[4558]: I0120 17:30:06.166778 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:30:06 crc kubenswrapper[4558]: I0120 17:30:06.167091 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/568979fa-537c-45c5-952c-f87a40b194ef-config-data\") pod \"cinder-scheduler-0\" (UID: \"568979fa-537c-45c5-952c-f87a40b194ef\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:30:06 crc kubenswrapper[4558]: I0120 17:30:06.167180 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/568979fa-537c-45c5-952c-f87a40b194ef-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"568979fa-537c-45c5-952c-f87a40b194ef\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:30:06 crc kubenswrapper[4558]: I0120 17:30:06.167212 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/568979fa-537c-45c5-952c-f87a40b194ef-scripts\") pod \"cinder-scheduler-0\" (UID: \"568979fa-537c-45c5-952c-f87a40b194ef\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:30:06 crc kubenswrapper[4558]: I0120 17:30:06.167250 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/568979fa-537c-45c5-952c-f87a40b194ef-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"568979fa-537c-45c5-952c-f87a40b194ef\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:30:06 crc kubenswrapper[4558]: I0120 17:30:06.167275 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xsjnm\" (UniqueName: \"kubernetes.io/projected/568979fa-537c-45c5-952c-f87a40b194ef-kube-api-access-xsjnm\") pod \"cinder-scheduler-0\" (UID: \"568979fa-537c-45c5-952c-f87a40b194ef\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:30:06 crc kubenswrapper[4558]: I0120 17:30:06.167309 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/568979fa-537c-45c5-952c-f87a40b194ef-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"568979fa-537c-45c5-952c-f87a40b194ef\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:30:06 crc kubenswrapper[4558]: I0120 17:30:06.167703 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/568979fa-537c-45c5-952c-f87a40b194ef-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"568979fa-537c-45c5-952c-f87a40b194ef\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:30:06 crc kubenswrapper[4558]: I0120 17:30:06.171892 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/568979fa-537c-45c5-952c-f87a40b194ef-scripts\") pod \"cinder-scheduler-0\" (UID: \"568979fa-537c-45c5-952c-f87a40b194ef\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:30:06 crc kubenswrapper[4558]: I0120 17:30:06.177297 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cinder-api-config-data" Jan 20 17:30:06 crc kubenswrapper[4558]: I0120 17:30:06.191105 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/568979fa-537c-45c5-952c-f87a40b194ef-config-data\") pod \"cinder-scheduler-0\" (UID: \"568979fa-537c-45c5-952c-f87a40b194ef\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:30:06 crc kubenswrapper[4558]: I0120 17:30:06.191121 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/568979fa-537c-45c5-952c-f87a40b194ef-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"568979fa-537c-45c5-952c-f87a40b194ef\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:30:06 crc kubenswrapper[4558]: I0120 17:30:06.191213 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:30:06 crc kubenswrapper[4558]: I0120 17:30:06.191962 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/568979fa-537c-45c5-952c-f87a40b194ef-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"568979fa-537c-45c5-952c-f87a40b194ef\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:30:06 crc kubenswrapper[4558]: I0120 17:30:06.204823 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xsjnm\" (UniqueName: \"kubernetes.io/projected/568979fa-537c-45c5-952c-f87a40b194ef-kube-api-access-xsjnm\") pod \"cinder-scheduler-0\" (UID: \"568979fa-537c-45c5-952c-f87a40b194ef\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:30:06 crc kubenswrapper[4558]: I0120 17:30:06.269247 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/63df8460-ab49-4787-92a7-54a14c0179ca-etc-machine-id\") pod \"cinder-api-0\" (UID: \"63df8460-ab49-4787-92a7-54a14c0179ca\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:30:06 crc kubenswrapper[4558]: I0120 17:30:06.269411 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/63df8460-ab49-4787-92a7-54a14c0179ca-scripts\") pod \"cinder-api-0\" (UID: \"63df8460-ab49-4787-92a7-54a14c0179ca\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:30:06 crc kubenswrapper[4558]: I0120 17:30:06.269439 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/63df8460-ab49-4787-92a7-54a14c0179ca-logs\") pod \"cinder-api-0\" (UID: \"63df8460-ab49-4787-92a7-54a14c0179ca\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:30:06 crc kubenswrapper[4558]: I0120 17:30:06.269485 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l88ft\" (UniqueName: \"kubernetes.io/projected/63df8460-ab49-4787-92a7-54a14c0179ca-kube-api-access-l88ft\") pod \"cinder-api-0\" (UID: \"63df8460-ab49-4787-92a7-54a14c0179ca\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:30:06 crc kubenswrapper[4558]: I0120 17:30:06.269522 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/63df8460-ab49-4787-92a7-54a14c0179ca-config-data\") pod \"cinder-api-0\" (UID: \"63df8460-ab49-4787-92a7-54a14c0179ca\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:30:06 crc kubenswrapper[4558]: I0120 17:30:06.269582 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/63df8460-ab49-4787-92a7-54a14c0179ca-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"63df8460-ab49-4787-92a7-54a14c0179ca\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:30:06 crc kubenswrapper[4558]: I0120 17:30:06.269642 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/63df8460-ab49-4787-92a7-54a14c0179ca-config-data-custom\") pod \"cinder-api-0\" (UID: \"63df8460-ab49-4787-92a7-54a14c0179ca\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:30:06 crc kubenswrapper[4558]: I0120 17:30:06.371216 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l88ft\" (UniqueName: \"kubernetes.io/projected/63df8460-ab49-4787-92a7-54a14c0179ca-kube-api-access-l88ft\") pod \"cinder-api-0\" (UID: \"63df8460-ab49-4787-92a7-54a14c0179ca\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:30:06 crc kubenswrapper[4558]: I0120 17:30:06.371558 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/63df8460-ab49-4787-92a7-54a14c0179ca-config-data\") pod \"cinder-api-0\" (UID: \"63df8460-ab49-4787-92a7-54a14c0179ca\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:30:06 crc kubenswrapper[4558]: I0120 17:30:06.371719 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/63df8460-ab49-4787-92a7-54a14c0179ca-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"63df8460-ab49-4787-92a7-54a14c0179ca\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:30:06 crc kubenswrapper[4558]: I0120 17:30:06.371844 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/63df8460-ab49-4787-92a7-54a14c0179ca-config-data-custom\") pod \"cinder-api-0\" (UID: \"63df8460-ab49-4787-92a7-54a14c0179ca\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:30:06 crc kubenswrapper[4558]: I0120 17:30:06.371906 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/63df8460-ab49-4787-92a7-54a14c0179ca-etc-machine-id\") pod \"cinder-api-0\" (UID: \"63df8460-ab49-4787-92a7-54a14c0179ca\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:30:06 crc kubenswrapper[4558]: I0120 17:30:06.371972 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/63df8460-ab49-4787-92a7-54a14c0179ca-scripts\") pod \"cinder-api-0\" (UID: \"63df8460-ab49-4787-92a7-54a14c0179ca\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:30:06 crc kubenswrapper[4558]: I0120 17:30:06.371992 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/63df8460-ab49-4787-92a7-54a14c0179ca-logs\") pod \"cinder-api-0\" (UID: \"63df8460-ab49-4787-92a7-54a14c0179ca\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:30:06 crc kubenswrapper[4558]: I0120 17:30:06.372318 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/63df8460-ab49-4787-92a7-54a14c0179ca-etc-machine-id\") pod \"cinder-api-0\" (UID: \"63df8460-ab49-4787-92a7-54a14c0179ca\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:30:06 crc kubenswrapper[4558]: I0120 17:30:06.372789 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/63df8460-ab49-4787-92a7-54a14c0179ca-logs\") pod \"cinder-api-0\" (UID: \"63df8460-ab49-4787-92a7-54a14c0179ca\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:30:06 crc kubenswrapper[4558]: I0120 17:30:06.377055 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/63df8460-ab49-4787-92a7-54a14c0179ca-config-data\") pod \"cinder-api-0\" (UID: \"63df8460-ab49-4787-92a7-54a14c0179ca\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:30:06 crc kubenswrapper[4558]: I0120 17:30:06.377806 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/63df8460-ab49-4787-92a7-54a14c0179ca-config-data-custom\") pod \"cinder-api-0\" (UID: \"63df8460-ab49-4787-92a7-54a14c0179ca\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:30:06 crc kubenswrapper[4558]: I0120 17:30:06.378106 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/63df8460-ab49-4787-92a7-54a14c0179ca-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"63df8460-ab49-4787-92a7-54a14c0179ca\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:30:06 crc kubenswrapper[4558]: I0120 17:30:06.378238 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/63df8460-ab49-4787-92a7-54a14c0179ca-scripts\") pod \"cinder-api-0\" (UID: \"63df8460-ab49-4787-92a7-54a14c0179ca\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:30:06 crc kubenswrapper[4558]: I0120 17:30:06.386519 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-76cff77fc4-bkwrr"] Jan 20 17:30:06 crc kubenswrapper[4558]: I0120 17:30:06.387607 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:30:06 crc kubenswrapper[4558]: I0120 17:30:06.390780 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l88ft\" (UniqueName: \"kubernetes.io/projected/63df8460-ab49-4787-92a7-54a14c0179ca-kube-api-access-l88ft\") pod \"cinder-api-0\" (UID: \"63df8460-ab49-4787-92a7-54a14c0179ca\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:30:06 crc kubenswrapper[4558]: I0120 17:30:06.404381 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-api-74c98446fb-d49s5"] Jan 20 17:30:06 crc kubenswrapper[4558]: I0120 17:30:06.542596 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-keystone-listener-65b4ff8654-bj52d"] Jan 20 17:30:06 crc kubenswrapper[4558]: I0120 17:30:06.565853 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:30:06 crc kubenswrapper[4558]: I0120 17:30:06.579298 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dbbead9c-ffa4-448a-81a0-d1db96d7b5e3" path="/var/lib/kubelet/pods/dbbead9c-ffa4-448a-81a0-d1db96d7b5e3/volumes" Jan 20 17:30:06 crc kubenswrapper[4558]: W0120 17:30:06.856590 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod568979fa_537c_45c5_952c_f87a40b194ef.slice/crio-881644d23f93cffca43d14cbda0bbbea6ddba271b723e6f35ce42103ef988265 WatchSource:0}: Error finding container 881644d23f93cffca43d14cbda0bbbea6ddba271b723e6f35ce42103ef988265: Status 404 returned error can't find the container with id 881644d23f93cffca43d14cbda0bbbea6ddba271b723e6f35ce42103ef988265 Jan 20 17:30:06 crc kubenswrapper[4558]: I0120 17:30:06.858123 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:30:06 crc kubenswrapper[4558]: I0120 17:30:06.958988 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-74c98446fb-d49s5" event={"ID":"e83a55be-4002-403a-911f-8f33c234b7e4","Type":"ContainerStarted","Data":"33cd821346a573aa7c60d888955a767917f3516b07634b6ccea35a0b1b0543af"} Jan 20 17:30:06 crc kubenswrapper[4558]: I0120 17:30:06.959036 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-74c98446fb-d49s5" event={"ID":"e83a55be-4002-403a-911f-8f33c234b7e4","Type":"ContainerStarted","Data":"c59e4b15090f012b517b5ebb751bc8a094a2cfae61addd70eed726c4b9071598"} Jan 20 17:30:06 crc kubenswrapper[4558]: I0120 17:30:06.964618 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-bootstrap-zwldh" event={"ID":"fce7f5af-9139-4f50-a1c9-b74477ed188f","Type":"ContainerStarted","Data":"4899b425b0055fe954b8d69c9913e490586788a42bae1052bff5b0f5accd9abb"} Jan 20 17:30:06 crc kubenswrapper[4558]: I0120 17:30:06.964646 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-bootstrap-zwldh" event={"ID":"fce7f5af-9139-4f50-a1c9-b74477ed188f","Type":"ContainerStarted","Data":"d2b7e4c217f1fc4ea05d59cbb27568b7434e7093e652affe0cf68c0c7e32c41f"} Jan 20 17:30:06 crc kubenswrapper[4558]: I0120 17:30:06.968742 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"568979fa-537c-45c5-952c-f87a40b194ef","Type":"ContainerStarted","Data":"881644d23f93cffca43d14cbda0bbbea6ddba271b723e6f35ce42103ef988265"} Jan 20 17:30:06 crc kubenswrapper[4558]: I0120 17:30:06.971408 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-76cff77fc4-bkwrr" event={"ID":"e7c82345-58f6-4542-9483-604f1ad2b5f4","Type":"ContainerStarted","Data":"3907acb149bd72bc7bc6e07f835a9cc0d239aacbc88c4c4e9b53259322e14cc9"} Jan 20 17:30:06 crc kubenswrapper[4558]: I0120 17:30:06.971438 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-76cff77fc4-bkwrr" event={"ID":"e7c82345-58f6-4542-9483-604f1ad2b5f4","Type":"ContainerStarted","Data":"22d159542849bbd0a9f6cc94b62f9e83aec785861d4e1104bbfeace836675a08"} Jan 20 17:30:06 crc kubenswrapper[4558]: I0120 17:30:06.981507 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/keystone-bootstrap-zwldh" podStartSLOduration=1.981495197 podStartE2EDuration="1.981495197s" podCreationTimestamp="2026-01-20 17:30:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:30:06.978194711 +0000 UTC m=+2900.738532678" watchObservedRunningTime="2026-01-20 17:30:06.981495197 +0000 UTC m=+2900.741833164" Jan 20 17:30:06 crc kubenswrapper[4558]: I0120 17:30:06.990116 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-keystone-listener-65b4ff8654-bj52d" event={"ID":"113d7999-28c3-4738-aa99-bd09c1880299","Type":"ContainerStarted","Data":"d981d2b9fdcc50a6990df32e78be80ae43eebfa1b705595c1292a8f9a9e81ae5"} Jan 20 17:30:06 crc kubenswrapper[4558]: I0120 17:30:06.990153 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-keystone-listener-65b4ff8654-bj52d" event={"ID":"113d7999-28c3-4738-aa99-bd09c1880299","Type":"ContainerStarted","Data":"ceabbb81be2bef2501c32958390209c116353c7c402919013de9b183b48d54f8"} Jan 20 17:30:06 crc kubenswrapper[4558]: I0120 17:30:06.993288 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-worker-5bb9c47bc7-ct6jf" event={"ID":"fbba86e7-87c6-4350-8bbd-565ba9abfe68","Type":"ContainerStarted","Data":"8315184077e8a32f5cae764089867c8adc47f7cd18db8e5c92a42400d2b15103"} Jan 20 17:30:06 crc kubenswrapper[4558]: I0120 17:30:06.993336 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-worker-5bb9c47bc7-ct6jf" event={"ID":"fbba86e7-87c6-4350-8bbd-565ba9abfe68","Type":"ContainerStarted","Data":"c03486a35df252536c36f85a6561b27a72947105aeca886fbecad26ee9903d17"} Jan 20 17:30:06 crc kubenswrapper[4558]: I0120 17:30:06.993352 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-worker-5bb9c47bc7-ct6jf" event={"ID":"fbba86e7-87c6-4350-8bbd-565ba9abfe68","Type":"ContainerStarted","Data":"b4e3b274173b767600258b538c6ec79b6fad8e999a9177f1eb117b98219db0aa"} Jan 20 17:30:07 crc kubenswrapper[4558]: I0120 17:30:07.008349 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/barbican-worker-5bb9c47bc7-ct6jf" podStartSLOduration=2.008339121 podStartE2EDuration="2.008339121s" podCreationTimestamp="2026-01-20 17:30:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:30:07.006149475 +0000 UTC m=+2900.766487442" watchObservedRunningTime="2026-01-20 17:30:07.008339121 +0000 UTC m=+2900.768677088" Jan 20 17:30:07 crc kubenswrapper[4558]: I0120 17:30:07.037787 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:30:07 crc kubenswrapper[4558]: I0120 17:30:07.567886 4558 scope.go:117] "RemoveContainer" containerID="f275e9f5de330b3c79316d5871106c6bcf90b698e0744c5beb28da45c336b36d" Jan 20 17:30:07 crc kubenswrapper[4558]: E0120 17:30:07.568150 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 17:30:08 crc kubenswrapper[4558]: I0120 17:30:08.022138 4558 generic.go:334] "Generic (PLEG): container finished" podID="323fa0e2-7555-4fd5-bd80-65474fa83b69" containerID="a43d1649fd01e64f0e6f6c358fe3b57e49f79cb49bee9b16b8082898e6121cb8" exitCode=0 Jan 20 17:30:08 crc kubenswrapper[4558]: I0120 17:30:08.022296 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-db-sync-hbj94" event={"ID":"323fa0e2-7555-4fd5-bd80-65474fa83b69","Type":"ContainerDied","Data":"a43d1649fd01e64f0e6f6c358fe3b57e49f79cb49bee9b16b8082898e6121cb8"} Jan 20 17:30:08 crc kubenswrapper[4558]: I0120 17:30:08.029759 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-74c98446fb-d49s5" event={"ID":"e83a55be-4002-403a-911f-8f33c234b7e4","Type":"ContainerStarted","Data":"9d39db634c3fa17e206e8d37ce291551fd4325bd609316a24c87cd44c0b5f95d"} Jan 20 17:30:08 crc kubenswrapper[4558]: I0120 17:30:08.030151 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/barbican-api-74c98446fb-d49s5" Jan 20 17:30:08 crc kubenswrapper[4558]: I0120 17:30:08.030215 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/barbican-api-74c98446fb-d49s5" Jan 20 17:30:08 crc kubenswrapper[4558]: I0120 17:30:08.041562 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"568979fa-537c-45c5-952c-f87a40b194ef","Type":"ContainerStarted","Data":"1395db81401be1f584e96686dac08b81079dc9377f0f67ac885fa17b47a87685"} Jan 20 17:30:08 crc kubenswrapper[4558]: I0120 17:30:08.045899 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-76cff77fc4-bkwrr" event={"ID":"e7c82345-58f6-4542-9483-604f1ad2b5f4","Type":"ContainerStarted","Data":"61313214dcb3a55bfb055d2bef3c402cad0651369d70f00a0842a2ed0b39da06"} Jan 20 17:30:08 crc kubenswrapper[4558]: I0120 17:30:08.045977 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/placement-76cff77fc4-bkwrr" Jan 20 17:30:08 crc kubenswrapper[4558]: I0120 17:30:08.046041 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/placement-76cff77fc4-bkwrr" Jan 20 17:30:08 crc kubenswrapper[4558]: I0120 17:30:08.053016 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/barbican-api-74c98446fb-d49s5" podStartSLOduration=3.053006418 podStartE2EDuration="3.053006418s" podCreationTimestamp="2026-01-20 17:30:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:30:08.050302173 +0000 UTC m=+2901.810640140" watchObservedRunningTime="2026-01-20 17:30:08.053006418 +0000 UTC m=+2901.813344375" Jan 20 17:30:08 crc kubenswrapper[4558]: I0120 17:30:08.057260 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-keystone-listener-65b4ff8654-bj52d" event={"ID":"113d7999-28c3-4738-aa99-bd09c1880299","Type":"ContainerStarted","Data":"9bbacbd57db249ab8218348788165000873acf1f94027640d5d33d96e3684631"} Jan 20 17:30:08 crc kubenswrapper[4558]: I0120 17:30:08.060643 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"63df8460-ab49-4787-92a7-54a14c0179ca","Type":"ContainerStarted","Data":"5f3ae803d524ce3cd6aaa39d2f91f748bd07e8465458e550a0da0000266138f6"} Jan 20 17:30:08 crc kubenswrapper[4558]: I0120 17:30:08.060689 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"63df8460-ab49-4787-92a7-54a14c0179ca","Type":"ContainerStarted","Data":"c7aed5c14d46c61c407b439488d836d230e8284647781ff979fb14718686f4f9"} Jan 20 17:30:08 crc kubenswrapper[4558]: I0120 17:30:08.075884 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/placement-76cff77fc4-bkwrr" podStartSLOduration=3.075870779 podStartE2EDuration="3.075870779s" podCreationTimestamp="2026-01-20 17:30:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:30:08.073185471 +0000 UTC m=+2901.833523438" watchObservedRunningTime="2026-01-20 17:30:08.075870779 +0000 UTC m=+2901.836208747" Jan 20 17:30:08 crc kubenswrapper[4558]: I0120 17:30:08.111131 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/barbican-keystone-listener-65b4ff8654-bj52d" podStartSLOduration=3.111111314 podStartE2EDuration="3.111111314s" podCreationTimestamp="2026-01-20 17:30:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:30:08.092256641 +0000 UTC m=+2901.852594607" watchObservedRunningTime="2026-01-20 17:30:08.111111314 +0000 UTC m=+2901.871449281" Jan 20 17:30:09 crc kubenswrapper[4558]: I0120 17:30:09.071726 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"63df8460-ab49-4787-92a7-54a14c0179ca","Type":"ContainerStarted","Data":"226374bee93280652306e72191bcd834be88cb52e519757b561baa0a32c4b976"} Jan 20 17:30:09 crc kubenswrapper[4558]: I0120 17:30:09.072279 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:30:09 crc kubenswrapper[4558]: I0120 17:30:09.074896 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"568979fa-537c-45c5-952c-f87a40b194ef","Type":"ContainerStarted","Data":"b2cc3eb7df47c10e18cf97ab2f8a7780ca8a8e35c8a8c749aa1b4a85dd0a58ad"} Jan 20 17:30:09 crc kubenswrapper[4558]: I0120 17:30:09.089844 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/cinder-api-0" podStartSLOduration=3.089828913 podStartE2EDuration="3.089828913s" podCreationTimestamp="2026-01-20 17:30:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:30:09.085313002 +0000 UTC m=+2902.845650969" watchObservedRunningTime="2026-01-20 17:30:09.089828913 +0000 UTC m=+2902.850166879" Jan 20 17:30:09 crc kubenswrapper[4558]: I0120 17:30:09.110568 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/cinder-scheduler-0" podStartSLOduration=3.110547901 podStartE2EDuration="3.110547901s" podCreationTimestamp="2026-01-20 17:30:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:30:09.108559693 +0000 UTC m=+2902.868897659" watchObservedRunningTime="2026-01-20 17:30:09.110547901 +0000 UTC m=+2902.870885859" Jan 20 17:30:09 crc kubenswrapper[4558]: I0120 17:30:09.389342 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-db-sync-hbj94" Jan 20 17:30:09 crc kubenswrapper[4558]: I0120 17:30:09.546188 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/323fa0e2-7555-4fd5-bd80-65474fa83b69-config\") pod \"323fa0e2-7555-4fd5-bd80-65474fa83b69\" (UID: \"323fa0e2-7555-4fd5-bd80-65474fa83b69\") " Jan 20 17:30:09 crc kubenswrapper[4558]: I0120 17:30:09.546515 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/323fa0e2-7555-4fd5-bd80-65474fa83b69-combined-ca-bundle\") pod \"323fa0e2-7555-4fd5-bd80-65474fa83b69\" (UID: \"323fa0e2-7555-4fd5-bd80-65474fa83b69\") " Jan 20 17:30:09 crc kubenswrapper[4558]: I0120 17:30:09.546624 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-56tdf\" (UniqueName: \"kubernetes.io/projected/323fa0e2-7555-4fd5-bd80-65474fa83b69-kube-api-access-56tdf\") pod \"323fa0e2-7555-4fd5-bd80-65474fa83b69\" (UID: \"323fa0e2-7555-4fd5-bd80-65474fa83b69\") " Jan 20 17:30:09 crc kubenswrapper[4558]: I0120 17:30:09.552137 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/323fa0e2-7555-4fd5-bd80-65474fa83b69-kube-api-access-56tdf" (OuterVolumeSpecName: "kube-api-access-56tdf") pod "323fa0e2-7555-4fd5-bd80-65474fa83b69" (UID: "323fa0e2-7555-4fd5-bd80-65474fa83b69"). InnerVolumeSpecName "kube-api-access-56tdf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:30:09 crc kubenswrapper[4558]: I0120 17:30:09.570403 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/323fa0e2-7555-4fd5-bd80-65474fa83b69-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "323fa0e2-7555-4fd5-bd80-65474fa83b69" (UID: "323fa0e2-7555-4fd5-bd80-65474fa83b69"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:30:09 crc kubenswrapper[4558]: I0120 17:30:09.573461 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/323fa0e2-7555-4fd5-bd80-65474fa83b69-config" (OuterVolumeSpecName: "config") pod "323fa0e2-7555-4fd5-bd80-65474fa83b69" (UID: "323fa0e2-7555-4fd5-bd80-65474fa83b69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:30:09 crc kubenswrapper[4558]: I0120 17:30:09.649027 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-56tdf\" (UniqueName: \"kubernetes.io/projected/323fa0e2-7555-4fd5-bd80-65474fa83b69-kube-api-access-56tdf\") on node \"crc\" DevicePath \"\"" Jan 20 17:30:09 crc kubenswrapper[4558]: I0120 17:30:09.649062 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/323fa0e2-7555-4fd5-bd80-65474fa83b69-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:30:09 crc kubenswrapper[4558]: I0120 17:30:09.649079 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/323fa0e2-7555-4fd5-bd80-65474fa83b69-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:30:10 crc kubenswrapper[4558]: I0120 17:30:10.057344 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:30:10 crc kubenswrapper[4558]: I0120 17:30:10.057397 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:30:10 crc kubenswrapper[4558]: I0120 17:30:10.085323 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:30:10 crc kubenswrapper[4558]: I0120 17:30:10.085825 4558 generic.go:334] "Generic (PLEG): container finished" podID="fce7f5af-9139-4f50-a1c9-b74477ed188f" containerID="4899b425b0055fe954b8d69c9913e490586788a42bae1052bff5b0f5accd9abb" exitCode=0 Jan 20 17:30:10 crc kubenswrapper[4558]: I0120 17:30:10.085992 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-bootstrap-zwldh" event={"ID":"fce7f5af-9139-4f50-a1c9-b74477ed188f","Type":"ContainerDied","Data":"4899b425b0055fe954b8d69c9913e490586788a42bae1052bff5b0f5accd9abb"} Jan 20 17:30:10 crc kubenswrapper[4558]: I0120 17:30:10.088250 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-db-sync-hbj94" event={"ID":"323fa0e2-7555-4fd5-bd80-65474fa83b69","Type":"ContainerDied","Data":"5a579917f2efdfef0835625ce33001038846fa8ee06077b4f6daadfc801944f0"} Jan 20 17:30:10 crc kubenswrapper[4558]: I0120 17:30:10.088301 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5a579917f2efdfef0835625ce33001038846fa8ee06077b4f6daadfc801944f0" Jan 20 17:30:10 crc kubenswrapper[4558]: I0120 17:30:10.088444 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-db-sync-hbj94" Jan 20 17:30:10 crc kubenswrapper[4558]: I0120 17:30:10.089109 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:30:10 crc kubenswrapper[4558]: I0120 17:30:10.089599 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:30:10 crc kubenswrapper[4558]: I0120 17:30:10.296893 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/neutron-5878f88784-4j5nq"] Jan 20 17:30:10 crc kubenswrapper[4558]: E0120 17:30:10.297515 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="323fa0e2-7555-4fd5-bd80-65474fa83b69" containerName="neutron-db-sync" Jan 20 17:30:10 crc kubenswrapper[4558]: I0120 17:30:10.297534 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="323fa0e2-7555-4fd5-bd80-65474fa83b69" containerName="neutron-db-sync" Jan 20 17:30:10 crc kubenswrapper[4558]: I0120 17:30:10.297742 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="323fa0e2-7555-4fd5-bd80-65474fa83b69" containerName="neutron-db-sync" Jan 20 17:30:10 crc kubenswrapper[4558]: I0120 17:30:10.298614 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-5878f88784-4j5nq" Jan 20 17:30:10 crc kubenswrapper[4558]: I0120 17:30:10.300233 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"neutron-config" Jan 20 17:30:10 crc kubenswrapper[4558]: I0120 17:30:10.300619 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"neutron-httpd-config" Jan 20 17:30:10 crc kubenswrapper[4558]: I0120 17:30:10.300831 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"neutron-neutron-dockercfg-6d57m" Jan 20 17:30:10 crc kubenswrapper[4558]: I0120 17:30:10.311872 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-5878f88784-4j5nq"] Jan 20 17:30:10 crc kubenswrapper[4558]: I0120 17:30:10.426847 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:30:10 crc kubenswrapper[4558]: I0120 17:30:10.426936 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:30:10 crc kubenswrapper[4558]: I0120 17:30:10.460350 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:30:10 crc kubenswrapper[4558]: I0120 17:30:10.464494 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c2jvr\" (UniqueName: \"kubernetes.io/projected/a289616c-ea44-447c-a263-4744c01d5b5e-kube-api-access-c2jvr\") pod \"neutron-5878f88784-4j5nq\" (UID: \"a289616c-ea44-447c-a263-4744c01d5b5e\") " pod="openstack-kuttl-tests/neutron-5878f88784-4j5nq" Jan 20 17:30:10 crc kubenswrapper[4558]: I0120 17:30:10.464657 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/a289616c-ea44-447c-a263-4744c01d5b5e-config\") pod \"neutron-5878f88784-4j5nq\" (UID: \"a289616c-ea44-447c-a263-4744c01d5b5e\") " pod="openstack-kuttl-tests/neutron-5878f88784-4j5nq" Jan 20 17:30:10 crc kubenswrapper[4558]: I0120 17:30:10.464802 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a289616c-ea44-447c-a263-4744c01d5b5e-combined-ca-bundle\") pod \"neutron-5878f88784-4j5nq\" (UID: \"a289616c-ea44-447c-a263-4744c01d5b5e\") " pod="openstack-kuttl-tests/neutron-5878f88784-4j5nq" Jan 20 17:30:10 crc kubenswrapper[4558]: I0120 17:30:10.464865 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/a289616c-ea44-447c-a263-4744c01d5b5e-httpd-config\") pod \"neutron-5878f88784-4j5nq\" (UID: \"a289616c-ea44-447c-a263-4744c01d5b5e\") " pod="openstack-kuttl-tests/neutron-5878f88784-4j5nq" Jan 20 17:30:10 crc kubenswrapper[4558]: I0120 17:30:10.466334 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:30:10 crc kubenswrapper[4558]: I0120 17:30:10.566697 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/a289616c-ea44-447c-a263-4744c01d5b5e-config\") pod \"neutron-5878f88784-4j5nq\" (UID: \"a289616c-ea44-447c-a263-4744c01d5b5e\") " pod="openstack-kuttl-tests/neutron-5878f88784-4j5nq" Jan 20 17:30:10 crc kubenswrapper[4558]: I0120 17:30:10.566816 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a289616c-ea44-447c-a263-4744c01d5b5e-combined-ca-bundle\") pod \"neutron-5878f88784-4j5nq\" (UID: \"a289616c-ea44-447c-a263-4744c01d5b5e\") " pod="openstack-kuttl-tests/neutron-5878f88784-4j5nq" Jan 20 17:30:10 crc kubenswrapper[4558]: I0120 17:30:10.566863 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/a289616c-ea44-447c-a263-4744c01d5b5e-httpd-config\") pod \"neutron-5878f88784-4j5nq\" (UID: \"a289616c-ea44-447c-a263-4744c01d5b5e\") " pod="openstack-kuttl-tests/neutron-5878f88784-4j5nq" Jan 20 17:30:10 crc kubenswrapper[4558]: I0120 17:30:10.566951 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c2jvr\" (UniqueName: \"kubernetes.io/projected/a289616c-ea44-447c-a263-4744c01d5b5e-kube-api-access-c2jvr\") pod \"neutron-5878f88784-4j5nq\" (UID: \"a289616c-ea44-447c-a263-4744c01d5b5e\") " pod="openstack-kuttl-tests/neutron-5878f88784-4j5nq" Jan 20 17:30:10 crc kubenswrapper[4558]: I0120 17:30:10.578927 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a289616c-ea44-447c-a263-4744c01d5b5e-combined-ca-bundle\") pod \"neutron-5878f88784-4j5nq\" (UID: \"a289616c-ea44-447c-a263-4744c01d5b5e\") " pod="openstack-kuttl-tests/neutron-5878f88784-4j5nq" Jan 20 17:30:10 crc kubenswrapper[4558]: I0120 17:30:10.579023 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/a289616c-ea44-447c-a263-4744c01d5b5e-config\") pod \"neutron-5878f88784-4j5nq\" (UID: \"a289616c-ea44-447c-a263-4744c01d5b5e\") " pod="openstack-kuttl-tests/neutron-5878f88784-4j5nq" Jan 20 17:30:10 crc kubenswrapper[4558]: I0120 17:30:10.586893 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/a289616c-ea44-447c-a263-4744c01d5b5e-httpd-config\") pod \"neutron-5878f88784-4j5nq\" (UID: \"a289616c-ea44-447c-a263-4744c01d5b5e\") " pod="openstack-kuttl-tests/neutron-5878f88784-4j5nq" Jan 20 17:30:10 crc kubenswrapper[4558]: I0120 17:30:10.588407 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c2jvr\" (UniqueName: \"kubernetes.io/projected/a289616c-ea44-447c-a263-4744c01d5b5e-kube-api-access-c2jvr\") pod \"neutron-5878f88784-4j5nq\" (UID: \"a289616c-ea44-447c-a263-4744c01d5b5e\") " pod="openstack-kuttl-tests/neutron-5878f88784-4j5nq" Jan 20 17:30:10 crc kubenswrapper[4558]: I0120 17:30:10.615451 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-5878f88784-4j5nq" Jan 20 17:30:11 crc kubenswrapper[4558]: I0120 17:30:11.035340 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-5878f88784-4j5nq"] Jan 20 17:30:11 crc kubenswrapper[4558]: W0120 17:30:11.041990 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda289616c_ea44_447c_a263_4744c01d5b5e.slice/crio-477eb853f8cade9de5067dc4ac34643d4e443f00f7b1f52ebd367a76fb0b8ca2 WatchSource:0}: Error finding container 477eb853f8cade9de5067dc4ac34643d4e443f00f7b1f52ebd367a76fb0b8ca2: Status 404 returned error can't find the container with id 477eb853f8cade9de5067dc4ac34643d4e443f00f7b1f52ebd367a76fb0b8ca2 Jan 20 17:30:11 crc kubenswrapper[4558]: I0120 17:30:11.100592 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-5878f88784-4j5nq" event={"ID":"a289616c-ea44-447c-a263-4744c01d5b5e","Type":"ContainerStarted","Data":"477eb853f8cade9de5067dc4ac34643d4e443f00f7b1f52ebd367a76fb0b8ca2"} Jan 20 17:30:11 crc kubenswrapper[4558]: I0120 17:30:11.100866 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:30:11 crc kubenswrapper[4558]: I0120 17:30:11.100925 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:30:11 crc kubenswrapper[4558]: I0120 17:30:11.100939 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:30:11 crc kubenswrapper[4558]: I0120 17:30:11.149683 4558 scope.go:117] "RemoveContainer" containerID="30ed7aba992e1e08f5fc61d9f1994bef19246447cd45bbdb4562c998c6ff53d3" Jan 20 17:30:11 crc kubenswrapper[4558]: I0120 17:30:11.182964 4558 scope.go:117] "RemoveContainer" containerID="0af8204595f0ad0b8bd69669cc56303c864bdc64f7220c28eb66afc83033aa7c" Jan 20 17:30:11 crc kubenswrapper[4558]: I0120 17:30:11.225890 4558 scope.go:117] "RemoveContainer" containerID="ff11f5de1d67fe6865fff0f4a7d6166b294cda32601d7f50b6f2472fca230f0f" Jan 20 17:30:11 crc kubenswrapper[4558]: I0120 17:30:11.265457 4558 scope.go:117] "RemoveContainer" containerID="663137f7885e57e3788c4f07743e4e3a3ed295d4d32787f249df7daefd8a4ea6" Jan 20 17:30:11 crc kubenswrapper[4558]: I0120 17:30:11.328251 4558 scope.go:117] "RemoveContainer" containerID="27473c2ff83e868f723f6402e1bd91d3230ab7a18cd06577ec40b96f27a8676a" Jan 20 17:30:11 crc kubenswrapper[4558]: I0120 17:30:11.361831 4558 scope.go:117] "RemoveContainer" containerID="2b330ecbee7bbb3b71ca63bcecf70606762e1f81d67e6e0a6cfab6537f33a7a4" Jan 20 17:30:11 crc kubenswrapper[4558]: I0120 17:30:11.375411 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-bootstrap-zwldh" Jan 20 17:30:11 crc kubenswrapper[4558]: I0120 17:30:11.388983 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:30:11 crc kubenswrapper[4558]: I0120 17:30:11.404450 4558 scope.go:117] "RemoveContainer" containerID="a14d46445c620e36992101173d2a0d80fcb77d671533a2ad4c05704ae7e3dd72" Jan 20 17:30:11 crc kubenswrapper[4558]: I0120 17:30:11.434579 4558 scope.go:117] "RemoveContainer" containerID="1d239ea1d0bf395c205ee9453448d581f75ec979383afda6ed0ada4d6982504a" Jan 20 17:30:11 crc kubenswrapper[4558]: I0120 17:30:11.459372 4558 scope.go:117] "RemoveContainer" containerID="ff1a9f56eba4d6a58e189c76b9c17f98147866827a5459b702643795452019b3" Jan 20 17:30:11 crc kubenswrapper[4558]: I0120 17:30:11.487890 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/fce7f5af-9139-4f50-a1c9-b74477ed188f-fernet-keys\") pod \"fce7f5af-9139-4f50-a1c9-b74477ed188f\" (UID: \"fce7f5af-9139-4f50-a1c9-b74477ed188f\") " Jan 20 17:30:11 crc kubenswrapper[4558]: I0120 17:30:11.487942 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rwj9v\" (UniqueName: \"kubernetes.io/projected/fce7f5af-9139-4f50-a1c9-b74477ed188f-kube-api-access-rwj9v\") pod \"fce7f5af-9139-4f50-a1c9-b74477ed188f\" (UID: \"fce7f5af-9139-4f50-a1c9-b74477ed188f\") " Jan 20 17:30:11 crc kubenswrapper[4558]: I0120 17:30:11.488022 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/fce7f5af-9139-4f50-a1c9-b74477ed188f-credential-keys\") pod \"fce7f5af-9139-4f50-a1c9-b74477ed188f\" (UID: \"fce7f5af-9139-4f50-a1c9-b74477ed188f\") " Jan 20 17:30:11 crc kubenswrapper[4558]: I0120 17:30:11.488042 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fce7f5af-9139-4f50-a1c9-b74477ed188f-config-data\") pod \"fce7f5af-9139-4f50-a1c9-b74477ed188f\" (UID: \"fce7f5af-9139-4f50-a1c9-b74477ed188f\") " Jan 20 17:30:11 crc kubenswrapper[4558]: I0120 17:30:11.488259 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fce7f5af-9139-4f50-a1c9-b74477ed188f-combined-ca-bundle\") pod \"fce7f5af-9139-4f50-a1c9-b74477ed188f\" (UID: \"fce7f5af-9139-4f50-a1c9-b74477ed188f\") " Jan 20 17:30:11 crc kubenswrapper[4558]: I0120 17:30:11.488327 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fce7f5af-9139-4f50-a1c9-b74477ed188f-scripts\") pod \"fce7f5af-9139-4f50-a1c9-b74477ed188f\" (UID: \"fce7f5af-9139-4f50-a1c9-b74477ed188f\") " Jan 20 17:30:11 crc kubenswrapper[4558]: I0120 17:30:11.493472 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fce7f5af-9139-4f50-a1c9-b74477ed188f-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "fce7f5af-9139-4f50-a1c9-b74477ed188f" (UID: "fce7f5af-9139-4f50-a1c9-b74477ed188f"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:30:11 crc kubenswrapper[4558]: I0120 17:30:11.493815 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fce7f5af-9139-4f50-a1c9-b74477ed188f-scripts" (OuterVolumeSpecName: "scripts") pod "fce7f5af-9139-4f50-a1c9-b74477ed188f" (UID: "fce7f5af-9139-4f50-a1c9-b74477ed188f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:30:11 crc kubenswrapper[4558]: I0120 17:30:11.493956 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fce7f5af-9139-4f50-a1c9-b74477ed188f-kube-api-access-rwj9v" (OuterVolumeSpecName: "kube-api-access-rwj9v") pod "fce7f5af-9139-4f50-a1c9-b74477ed188f" (UID: "fce7f5af-9139-4f50-a1c9-b74477ed188f"). InnerVolumeSpecName "kube-api-access-rwj9v". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:30:11 crc kubenswrapper[4558]: I0120 17:30:11.494108 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fce7f5af-9139-4f50-a1c9-b74477ed188f-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "fce7f5af-9139-4f50-a1c9-b74477ed188f" (UID: "fce7f5af-9139-4f50-a1c9-b74477ed188f"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:30:11 crc kubenswrapper[4558]: I0120 17:30:11.516929 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fce7f5af-9139-4f50-a1c9-b74477ed188f-config-data" (OuterVolumeSpecName: "config-data") pod "fce7f5af-9139-4f50-a1c9-b74477ed188f" (UID: "fce7f5af-9139-4f50-a1c9-b74477ed188f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:30:11 crc kubenswrapper[4558]: I0120 17:30:11.520904 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fce7f5af-9139-4f50-a1c9-b74477ed188f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fce7f5af-9139-4f50-a1c9-b74477ed188f" (UID: "fce7f5af-9139-4f50-a1c9-b74477ed188f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:30:11 crc kubenswrapper[4558]: I0120 17:30:11.524244 4558 scope.go:117] "RemoveContainer" containerID="f11848ac2fc339da2a4334199ad841e032de939b5925b9d1a2ce800535efd8c2" Jan 20 17:30:11 crc kubenswrapper[4558]: I0120 17:30:11.545680 4558 scope.go:117] "RemoveContainer" containerID="c3f90e45f10a2d4bedae0abbbf6303211fbf4e5ffaad8f639762a180b6dadc32" Jan 20 17:30:11 crc kubenswrapper[4558]: I0120 17:30:11.588854 4558 scope.go:117] "RemoveContainer" containerID="bf0dac54002843637dcb1d66cd90c6114eab7b2d37bfe7b7235714925a431b60" Jan 20 17:30:11 crc kubenswrapper[4558]: I0120 17:30:11.591095 4558 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/fce7f5af-9139-4f50-a1c9-b74477ed188f-fernet-keys\") on node \"crc\" DevicePath \"\"" Jan 20 17:30:11 crc kubenswrapper[4558]: I0120 17:30:11.591127 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rwj9v\" (UniqueName: \"kubernetes.io/projected/fce7f5af-9139-4f50-a1c9-b74477ed188f-kube-api-access-rwj9v\") on node \"crc\" DevicePath \"\"" Jan 20 17:30:11 crc kubenswrapper[4558]: I0120 17:30:11.591154 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fce7f5af-9139-4f50-a1c9-b74477ed188f-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:30:11 crc kubenswrapper[4558]: I0120 17:30:11.591235 4558 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/fce7f5af-9139-4f50-a1c9-b74477ed188f-credential-keys\") on node \"crc\" DevicePath \"\"" Jan 20 17:30:11 crc kubenswrapper[4558]: I0120 17:30:11.591249 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fce7f5af-9139-4f50-a1c9-b74477ed188f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:30:11 crc kubenswrapper[4558]: I0120 17:30:11.591257 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fce7f5af-9139-4f50-a1c9-b74477ed188f-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:30:11 crc kubenswrapper[4558]: I0120 17:30:11.653361 4558 scope.go:117] "RemoveContainer" containerID="a48e3d27977f717a8a67a15fc538b85cc155b19326c26e2228f720ee64ecbce3" Jan 20 17:30:11 crc kubenswrapper[4558]: I0120 17:30:11.681898 4558 scope.go:117] "RemoveContainer" containerID="bc4cc07eb90d7d3d3906a3d0cffea06fd114245a87ecda09cca341976ee3130b" Jan 20 17:30:11 crc kubenswrapper[4558]: I0120 17:30:11.732803 4558 scope.go:117] "RemoveContainer" containerID="17c1bc185af60a311f104555bef3f1cc5c63333bd26c434a782630b2668d6f84" Jan 20 17:30:11 crc kubenswrapper[4558]: I0120 17:30:11.765988 4558 scope.go:117] "RemoveContainer" containerID="735dcde94cbdc51444810e74df6806f075f4ccbace4e341d82a534d321b0a02d" Jan 20 17:30:11 crc kubenswrapper[4558]: I0120 17:30:11.845722 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:30:11 crc kubenswrapper[4558]: I0120 17:30:11.849974 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:30:12 crc kubenswrapper[4558]: I0120 17:30:12.111556 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-5878f88784-4j5nq" event={"ID":"a289616c-ea44-447c-a263-4744c01d5b5e","Type":"ContainerStarted","Data":"f8d054c8c4c51555f3cda668c1ab5da9f1dcb5f7d87997a3ed3c22db03630067"} Jan 20 17:30:12 crc kubenswrapper[4558]: I0120 17:30:12.111902 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-5878f88784-4j5nq" event={"ID":"a289616c-ea44-447c-a263-4744c01d5b5e","Type":"ContainerStarted","Data":"2eba8336553f4bec8ad0739564c6671e683452b7d048a156e1a6e3cf92ae581f"} Jan 20 17:30:12 crc kubenswrapper[4558]: I0120 17:30:12.112182 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/neutron-5878f88784-4j5nq" Jan 20 17:30:12 crc kubenswrapper[4558]: I0120 17:30:12.114768 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-bootstrap-zwldh" event={"ID":"fce7f5af-9139-4f50-a1c9-b74477ed188f","Type":"ContainerDied","Data":"d2b7e4c217f1fc4ea05d59cbb27568b7434e7093e652affe0cf68c0c7e32c41f"} Jan 20 17:30:12 crc kubenswrapper[4558]: I0120 17:30:12.114823 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d2b7e4c217f1fc4ea05d59cbb27568b7434e7093e652affe0cf68c0c7e32c41f" Jan 20 17:30:12 crc kubenswrapper[4558]: I0120 17:30:12.115023 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-bootstrap-zwldh" Jan 20 17:30:12 crc kubenswrapper[4558]: I0120 17:30:12.136618 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/neutron-5878f88784-4j5nq" podStartSLOduration=2.136600679 podStartE2EDuration="2.136600679s" podCreationTimestamp="2026-01-20 17:30:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:30:12.130489598 +0000 UTC m=+2905.890827565" watchObservedRunningTime="2026-01-20 17:30:12.136600679 +0000 UTC m=+2905.896938646" Jan 20 17:30:12 crc kubenswrapper[4558]: I0120 17:30:12.296291 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/keystone-55b7797494-6bq45"] Jan 20 17:30:12 crc kubenswrapper[4558]: E0120 17:30:12.296818 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fce7f5af-9139-4f50-a1c9-b74477ed188f" containerName="keystone-bootstrap" Jan 20 17:30:12 crc kubenswrapper[4558]: I0120 17:30:12.296841 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="fce7f5af-9139-4f50-a1c9-b74477ed188f" containerName="keystone-bootstrap" Jan 20 17:30:12 crc kubenswrapper[4558]: I0120 17:30:12.297020 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="fce7f5af-9139-4f50-a1c9-b74477ed188f" containerName="keystone-bootstrap" Jan 20 17:30:12 crc kubenswrapper[4558]: I0120 17:30:12.297818 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-55b7797494-6bq45" Jan 20 17:30:12 crc kubenswrapper[4558]: I0120 17:30:12.300014 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone" Jan 20 17:30:12 crc kubenswrapper[4558]: I0120 17:30:12.300311 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-scripts" Jan 20 17:30:12 crc kubenswrapper[4558]: I0120 17:30:12.301375 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-keystone-dockercfg-8n4lx" Jan 20 17:30:12 crc kubenswrapper[4558]: I0120 17:30:12.301522 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-config-data" Jan 20 17:30:12 crc kubenswrapper[4558]: I0120 17:30:12.302935 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-55b7797494-6bq45"] Jan 20 17:30:12 crc kubenswrapper[4558]: I0120 17:30:12.411503 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bb135ff5-5284-4558-b549-c59fee8cc0e9-scripts\") pod \"keystone-55b7797494-6bq45\" (UID: \"bb135ff5-5284-4558-b549-c59fee8cc0e9\") " pod="openstack-kuttl-tests/keystone-55b7797494-6bq45" Jan 20 17:30:12 crc kubenswrapper[4558]: I0120 17:30:12.411621 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bb135ff5-5284-4558-b549-c59fee8cc0e9-combined-ca-bundle\") pod \"keystone-55b7797494-6bq45\" (UID: \"bb135ff5-5284-4558-b549-c59fee8cc0e9\") " pod="openstack-kuttl-tests/keystone-55b7797494-6bq45" Jan 20 17:30:12 crc kubenswrapper[4558]: I0120 17:30:12.411719 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2kxc2\" (UniqueName: \"kubernetes.io/projected/bb135ff5-5284-4558-b549-c59fee8cc0e9-kube-api-access-2kxc2\") pod \"keystone-55b7797494-6bq45\" (UID: \"bb135ff5-5284-4558-b549-c59fee8cc0e9\") " pod="openstack-kuttl-tests/keystone-55b7797494-6bq45" Jan 20 17:30:12 crc kubenswrapper[4558]: I0120 17:30:12.411782 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/bb135ff5-5284-4558-b549-c59fee8cc0e9-credential-keys\") pod \"keystone-55b7797494-6bq45\" (UID: \"bb135ff5-5284-4558-b549-c59fee8cc0e9\") " pod="openstack-kuttl-tests/keystone-55b7797494-6bq45" Jan 20 17:30:12 crc kubenswrapper[4558]: I0120 17:30:12.411853 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/bb135ff5-5284-4558-b549-c59fee8cc0e9-fernet-keys\") pod \"keystone-55b7797494-6bq45\" (UID: \"bb135ff5-5284-4558-b549-c59fee8cc0e9\") " pod="openstack-kuttl-tests/keystone-55b7797494-6bq45" Jan 20 17:30:12 crc kubenswrapper[4558]: I0120 17:30:12.411919 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bb135ff5-5284-4558-b549-c59fee8cc0e9-config-data\") pod \"keystone-55b7797494-6bq45\" (UID: \"bb135ff5-5284-4558-b549-c59fee8cc0e9\") " pod="openstack-kuttl-tests/keystone-55b7797494-6bq45" Jan 20 17:30:12 crc kubenswrapper[4558]: I0120 17:30:12.513378 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2kxc2\" (UniqueName: \"kubernetes.io/projected/bb135ff5-5284-4558-b549-c59fee8cc0e9-kube-api-access-2kxc2\") pod \"keystone-55b7797494-6bq45\" (UID: \"bb135ff5-5284-4558-b549-c59fee8cc0e9\") " pod="openstack-kuttl-tests/keystone-55b7797494-6bq45" Jan 20 17:30:12 crc kubenswrapper[4558]: I0120 17:30:12.513448 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/bb135ff5-5284-4558-b549-c59fee8cc0e9-credential-keys\") pod \"keystone-55b7797494-6bq45\" (UID: \"bb135ff5-5284-4558-b549-c59fee8cc0e9\") " pod="openstack-kuttl-tests/keystone-55b7797494-6bq45" Jan 20 17:30:12 crc kubenswrapper[4558]: I0120 17:30:12.513512 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/bb135ff5-5284-4558-b549-c59fee8cc0e9-fernet-keys\") pod \"keystone-55b7797494-6bq45\" (UID: \"bb135ff5-5284-4558-b549-c59fee8cc0e9\") " pod="openstack-kuttl-tests/keystone-55b7797494-6bq45" Jan 20 17:30:12 crc kubenswrapper[4558]: I0120 17:30:12.513580 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bb135ff5-5284-4558-b549-c59fee8cc0e9-config-data\") pod \"keystone-55b7797494-6bq45\" (UID: \"bb135ff5-5284-4558-b549-c59fee8cc0e9\") " pod="openstack-kuttl-tests/keystone-55b7797494-6bq45" Jan 20 17:30:12 crc kubenswrapper[4558]: I0120 17:30:12.513659 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bb135ff5-5284-4558-b549-c59fee8cc0e9-scripts\") pod \"keystone-55b7797494-6bq45\" (UID: \"bb135ff5-5284-4558-b549-c59fee8cc0e9\") " pod="openstack-kuttl-tests/keystone-55b7797494-6bq45" Jan 20 17:30:12 crc kubenswrapper[4558]: I0120 17:30:12.513710 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bb135ff5-5284-4558-b549-c59fee8cc0e9-combined-ca-bundle\") pod \"keystone-55b7797494-6bq45\" (UID: \"bb135ff5-5284-4558-b549-c59fee8cc0e9\") " pod="openstack-kuttl-tests/keystone-55b7797494-6bq45" Jan 20 17:30:12 crc kubenswrapper[4558]: I0120 17:30:12.518306 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/bb135ff5-5284-4558-b549-c59fee8cc0e9-fernet-keys\") pod \"keystone-55b7797494-6bq45\" (UID: \"bb135ff5-5284-4558-b549-c59fee8cc0e9\") " pod="openstack-kuttl-tests/keystone-55b7797494-6bq45" Jan 20 17:30:12 crc kubenswrapper[4558]: I0120 17:30:12.518665 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/bb135ff5-5284-4558-b549-c59fee8cc0e9-credential-keys\") pod \"keystone-55b7797494-6bq45\" (UID: \"bb135ff5-5284-4558-b549-c59fee8cc0e9\") " pod="openstack-kuttl-tests/keystone-55b7797494-6bq45" Jan 20 17:30:12 crc kubenswrapper[4558]: I0120 17:30:12.518877 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bb135ff5-5284-4558-b549-c59fee8cc0e9-config-data\") pod \"keystone-55b7797494-6bq45\" (UID: \"bb135ff5-5284-4558-b549-c59fee8cc0e9\") " pod="openstack-kuttl-tests/keystone-55b7797494-6bq45" Jan 20 17:30:12 crc kubenswrapper[4558]: I0120 17:30:12.519845 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bb135ff5-5284-4558-b549-c59fee8cc0e9-scripts\") pod \"keystone-55b7797494-6bq45\" (UID: \"bb135ff5-5284-4558-b549-c59fee8cc0e9\") " pod="openstack-kuttl-tests/keystone-55b7797494-6bq45" Jan 20 17:30:12 crc kubenswrapper[4558]: I0120 17:30:12.520333 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bb135ff5-5284-4558-b549-c59fee8cc0e9-combined-ca-bundle\") pod \"keystone-55b7797494-6bq45\" (UID: \"bb135ff5-5284-4558-b549-c59fee8cc0e9\") " pod="openstack-kuttl-tests/keystone-55b7797494-6bq45" Jan 20 17:30:12 crc kubenswrapper[4558]: I0120 17:30:12.530458 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2kxc2\" (UniqueName: \"kubernetes.io/projected/bb135ff5-5284-4558-b549-c59fee8cc0e9-kube-api-access-2kxc2\") pod \"keystone-55b7797494-6bq45\" (UID: \"bb135ff5-5284-4558-b549-c59fee8cc0e9\") " pod="openstack-kuttl-tests/keystone-55b7797494-6bq45" Jan 20 17:30:12 crc kubenswrapper[4558]: I0120 17:30:12.613197 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-55b7797494-6bq45" Jan 20 17:30:12 crc kubenswrapper[4558]: I0120 17:30:12.798271 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:30:12 crc kubenswrapper[4558]: I0120 17:30:12.926702 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:30:13 crc kubenswrapper[4558]: I0120 17:30:13.042620 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-55b7797494-6bq45"] Jan 20 17:30:13 crc kubenswrapper[4558]: I0120 17:30:13.125134 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-55b7797494-6bq45" event={"ID":"bb135ff5-5284-4558-b549-c59fee8cc0e9","Type":"ContainerStarted","Data":"2fbc5b4b4ea247202660c1ce798e4d3959684966521bbd40599e4512a87b0f52"} Jan 20 17:30:14 crc kubenswrapper[4558]: I0120 17:30:14.144136 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-55b7797494-6bq45" event={"ID":"bb135ff5-5284-4558-b549-c59fee8cc0e9","Type":"ContainerStarted","Data":"9842d6850ee563852429054c791f3062cb318e3f1aae2137f5ef5dfa0598575e"} Jan 20 17:30:14 crc kubenswrapper[4558]: I0120 17:30:14.163902 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/keystone-55b7797494-6bq45" podStartSLOduration=2.163883224 podStartE2EDuration="2.163883224s" podCreationTimestamp="2026-01-20 17:30:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:30:14.157787491 +0000 UTC m=+2907.918125459" watchObservedRunningTime="2026-01-20 17:30:14.163883224 +0000 UTC m=+2907.924221190" Jan 20 17:30:15 crc kubenswrapper[4558]: I0120 17:30:15.155699 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/keystone-55b7797494-6bq45" Jan 20 17:30:16 crc kubenswrapper[4558]: I0120 17:30:16.582048 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:30:16 crc kubenswrapper[4558]: I0120 17:30:16.985553 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/barbican-api-74c98446fb-d49s5" Jan 20 17:30:17 crc kubenswrapper[4558]: I0120 17:30:17.013019 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/barbican-api-74c98446fb-d49s5" Jan 20 17:30:18 crc kubenswrapper[4558]: I0120 17:30:18.190876 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:30:22 crc kubenswrapper[4558]: I0120 17:30:22.565928 4558 scope.go:117] "RemoveContainer" containerID="f275e9f5de330b3c79316d5871106c6bcf90b698e0744c5beb28da45c336b36d" Jan 20 17:30:22 crc kubenswrapper[4558]: E0120 17:30:22.566525 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 17:30:29 crc kubenswrapper[4558]: I0120 17:30:29.252491 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:30:36 crc kubenswrapper[4558]: I0120 17:30:36.576534 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/placement-76cff77fc4-bkwrr" Jan 20 17:30:36 crc kubenswrapper[4558]: I0120 17:30:36.577046 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/placement-76cff77fc4-bkwrr" Jan 20 17:30:37 crc kubenswrapper[4558]: I0120 17:30:37.565998 4558 scope.go:117] "RemoveContainer" containerID="f275e9f5de330b3c79316d5871106c6bcf90b698e0744c5beb28da45c336b36d" Jan 20 17:30:37 crc kubenswrapper[4558]: E0120 17:30:37.566874 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 17:30:40 crc kubenswrapper[4558]: I0120 17:30:40.624544 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/neutron-5878f88784-4j5nq" Jan 20 17:30:43 crc kubenswrapper[4558]: I0120 17:30:43.884124 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/keystone-55b7797494-6bq45" Jan 20 17:30:44 crc kubenswrapper[4558]: I0120 17:30:44.183986 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/openstackclient"] Jan 20 17:30:44 crc kubenswrapper[4558]: I0120 17:30:44.185317 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstackclient" Jan 20 17:30:44 crc kubenswrapper[4558]: I0120 17:30:44.186970 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-config" Jan 20 17:30:44 crc kubenswrapper[4558]: I0120 17:30:44.187329 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"openstack-config-secret" Jan 20 17:30:44 crc kubenswrapper[4558]: I0120 17:30:44.187516 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"openstackclient-openstackclient-dockercfg-hlmz9" Jan 20 17:30:44 crc kubenswrapper[4558]: I0120 17:30:44.188841 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/openstackclient"] Jan 20 17:30:44 crc kubenswrapper[4558]: I0120 17:30:44.291757 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a3058bcc-fe5c-4662-ad3c-982e823eb04a-combined-ca-bundle\") pod \"openstackclient\" (UID: \"a3058bcc-fe5c-4662-ad3c-982e823eb04a\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:30:44 crc kubenswrapper[4558]: I0120 17:30:44.292184 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bcth7\" (UniqueName: \"kubernetes.io/projected/a3058bcc-fe5c-4662-ad3c-982e823eb04a-kube-api-access-bcth7\") pod \"openstackclient\" (UID: \"a3058bcc-fe5c-4662-ad3c-982e823eb04a\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:30:44 crc kubenswrapper[4558]: I0120 17:30:44.292386 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/a3058bcc-fe5c-4662-ad3c-982e823eb04a-openstack-config-secret\") pod \"openstackclient\" (UID: \"a3058bcc-fe5c-4662-ad3c-982e823eb04a\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:30:44 crc kubenswrapper[4558]: I0120 17:30:44.292465 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/a3058bcc-fe5c-4662-ad3c-982e823eb04a-openstack-config\") pod \"openstackclient\" (UID: \"a3058bcc-fe5c-4662-ad3c-982e823eb04a\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:30:44 crc kubenswrapper[4558]: I0120 17:30:44.394638 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a3058bcc-fe5c-4662-ad3c-982e823eb04a-combined-ca-bundle\") pod \"openstackclient\" (UID: \"a3058bcc-fe5c-4662-ad3c-982e823eb04a\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:30:44 crc kubenswrapper[4558]: I0120 17:30:44.394701 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bcth7\" (UniqueName: \"kubernetes.io/projected/a3058bcc-fe5c-4662-ad3c-982e823eb04a-kube-api-access-bcth7\") pod \"openstackclient\" (UID: \"a3058bcc-fe5c-4662-ad3c-982e823eb04a\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:30:44 crc kubenswrapper[4558]: I0120 17:30:44.394827 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/a3058bcc-fe5c-4662-ad3c-982e823eb04a-openstack-config-secret\") pod \"openstackclient\" (UID: \"a3058bcc-fe5c-4662-ad3c-982e823eb04a\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:30:44 crc kubenswrapper[4558]: I0120 17:30:44.394894 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/a3058bcc-fe5c-4662-ad3c-982e823eb04a-openstack-config\") pod \"openstackclient\" (UID: \"a3058bcc-fe5c-4662-ad3c-982e823eb04a\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:30:44 crc kubenswrapper[4558]: I0120 17:30:44.395819 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/a3058bcc-fe5c-4662-ad3c-982e823eb04a-openstack-config\") pod \"openstackclient\" (UID: \"a3058bcc-fe5c-4662-ad3c-982e823eb04a\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:30:44 crc kubenswrapper[4558]: I0120 17:30:44.403072 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/a3058bcc-fe5c-4662-ad3c-982e823eb04a-openstack-config-secret\") pod \"openstackclient\" (UID: \"a3058bcc-fe5c-4662-ad3c-982e823eb04a\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:30:44 crc kubenswrapper[4558]: I0120 17:30:44.403705 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a3058bcc-fe5c-4662-ad3c-982e823eb04a-combined-ca-bundle\") pod \"openstackclient\" (UID: \"a3058bcc-fe5c-4662-ad3c-982e823eb04a\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:30:44 crc kubenswrapper[4558]: I0120 17:30:44.411812 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bcth7\" (UniqueName: \"kubernetes.io/projected/a3058bcc-fe5c-4662-ad3c-982e823eb04a-kube-api-access-bcth7\") pod \"openstackclient\" (UID: \"a3058bcc-fe5c-4662-ad3c-982e823eb04a\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:30:44 crc kubenswrapper[4558]: I0120 17:30:44.514123 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstackclient" Jan 20 17:30:44 crc kubenswrapper[4558]: I0120 17:30:44.926904 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/openstackclient"] Jan 20 17:30:45 crc kubenswrapper[4558]: I0120 17:30:45.477699 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstackclient" event={"ID":"a3058bcc-fe5c-4662-ad3c-982e823eb04a","Type":"ContainerStarted","Data":"83a869e63968cef48fe6ad2503082138083707d2b97eb35d862c937912bd1d3d"} Jan 20 17:30:45 crc kubenswrapper[4558]: I0120 17:30:45.478076 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstackclient" event={"ID":"a3058bcc-fe5c-4662-ad3c-982e823eb04a","Type":"ContainerStarted","Data":"5c54f5f5a93f36a3706aabe4cdbcbfdb5e2ea0d19795a3f0980ef326205937af"} Jan 20 17:30:45 crc kubenswrapper[4558]: I0120 17:30:45.507710 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/openstackclient" podStartSLOduration=1.507677361 podStartE2EDuration="1.507677361s" podCreationTimestamp="2026-01-20 17:30:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:30:45.506539923 +0000 UTC m=+2939.266877890" watchObservedRunningTime="2026-01-20 17:30:45.507677361 +0000 UTC m=+2939.268015329" Jan 20 17:30:47 crc kubenswrapper[4558]: I0120 17:30:47.320804 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/swift-proxy-6b8d9c59b6-z8fqn"] Jan 20 17:30:47 crc kubenswrapper[4558]: I0120 17:30:47.322665 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-proxy-6b8d9c59b6-z8fqn" Jan 20 17:30:47 crc kubenswrapper[4558]: I0120 17:30:47.326110 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"swift-proxy-config-data" Jan 20 17:30:47 crc kubenswrapper[4558]: I0120 17:30:47.338736 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/swift-proxy-6b8d9c59b6-z8fqn"] Jan 20 17:30:47 crc kubenswrapper[4558]: I0120 17:30:47.357140 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fcd9c0dd-1942-49ff-b3de-7a49b4e2ad5d-combined-ca-bundle\") pod \"swift-proxy-6b8d9c59b6-z8fqn\" (UID: \"fcd9c0dd-1942-49ff-b3de-7a49b4e2ad5d\") " pod="openstack-kuttl-tests/swift-proxy-6b8d9c59b6-z8fqn" Jan 20 17:30:47 crc kubenswrapper[4558]: I0120 17:30:47.357310 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fcd9c0dd-1942-49ff-b3de-7a49b4e2ad5d-log-httpd\") pod \"swift-proxy-6b8d9c59b6-z8fqn\" (UID: \"fcd9c0dd-1942-49ff-b3de-7a49b4e2ad5d\") " pod="openstack-kuttl-tests/swift-proxy-6b8d9c59b6-z8fqn" Jan 20 17:30:47 crc kubenswrapper[4558]: I0120 17:30:47.357414 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fcd9c0dd-1942-49ff-b3de-7a49b4e2ad5d-config-data\") pod \"swift-proxy-6b8d9c59b6-z8fqn\" (UID: \"fcd9c0dd-1942-49ff-b3de-7a49b4e2ad5d\") " pod="openstack-kuttl-tests/swift-proxy-6b8d9c59b6-z8fqn" Jan 20 17:30:47 crc kubenswrapper[4558]: I0120 17:30:47.357492 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qlcrt\" (UniqueName: \"kubernetes.io/projected/fcd9c0dd-1942-49ff-b3de-7a49b4e2ad5d-kube-api-access-qlcrt\") pod \"swift-proxy-6b8d9c59b6-z8fqn\" (UID: \"fcd9c0dd-1942-49ff-b3de-7a49b4e2ad5d\") " pod="openstack-kuttl-tests/swift-proxy-6b8d9c59b6-z8fqn" Jan 20 17:30:47 crc kubenswrapper[4558]: I0120 17:30:47.357559 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/fcd9c0dd-1942-49ff-b3de-7a49b4e2ad5d-etc-swift\") pod \"swift-proxy-6b8d9c59b6-z8fqn\" (UID: \"fcd9c0dd-1942-49ff-b3de-7a49b4e2ad5d\") " pod="openstack-kuttl-tests/swift-proxy-6b8d9c59b6-z8fqn" Jan 20 17:30:47 crc kubenswrapper[4558]: I0120 17:30:47.357635 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fcd9c0dd-1942-49ff-b3de-7a49b4e2ad5d-run-httpd\") pod \"swift-proxy-6b8d9c59b6-z8fqn\" (UID: \"fcd9c0dd-1942-49ff-b3de-7a49b4e2ad5d\") " pod="openstack-kuttl-tests/swift-proxy-6b8d9c59b6-z8fqn" Jan 20 17:30:47 crc kubenswrapper[4558]: I0120 17:30:47.459478 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fcd9c0dd-1942-49ff-b3de-7a49b4e2ad5d-combined-ca-bundle\") pod \"swift-proxy-6b8d9c59b6-z8fqn\" (UID: \"fcd9c0dd-1942-49ff-b3de-7a49b4e2ad5d\") " pod="openstack-kuttl-tests/swift-proxy-6b8d9c59b6-z8fqn" Jan 20 17:30:47 crc kubenswrapper[4558]: I0120 17:30:47.459600 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fcd9c0dd-1942-49ff-b3de-7a49b4e2ad5d-log-httpd\") pod \"swift-proxy-6b8d9c59b6-z8fqn\" (UID: \"fcd9c0dd-1942-49ff-b3de-7a49b4e2ad5d\") " pod="openstack-kuttl-tests/swift-proxy-6b8d9c59b6-z8fqn" Jan 20 17:30:47 crc kubenswrapper[4558]: I0120 17:30:47.459656 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fcd9c0dd-1942-49ff-b3de-7a49b4e2ad5d-config-data\") pod \"swift-proxy-6b8d9c59b6-z8fqn\" (UID: \"fcd9c0dd-1942-49ff-b3de-7a49b4e2ad5d\") " pod="openstack-kuttl-tests/swift-proxy-6b8d9c59b6-z8fqn" Jan 20 17:30:47 crc kubenswrapper[4558]: I0120 17:30:47.459690 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qlcrt\" (UniqueName: \"kubernetes.io/projected/fcd9c0dd-1942-49ff-b3de-7a49b4e2ad5d-kube-api-access-qlcrt\") pod \"swift-proxy-6b8d9c59b6-z8fqn\" (UID: \"fcd9c0dd-1942-49ff-b3de-7a49b4e2ad5d\") " pod="openstack-kuttl-tests/swift-proxy-6b8d9c59b6-z8fqn" Jan 20 17:30:47 crc kubenswrapper[4558]: I0120 17:30:47.459714 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/fcd9c0dd-1942-49ff-b3de-7a49b4e2ad5d-etc-swift\") pod \"swift-proxy-6b8d9c59b6-z8fqn\" (UID: \"fcd9c0dd-1942-49ff-b3de-7a49b4e2ad5d\") " pod="openstack-kuttl-tests/swift-proxy-6b8d9c59b6-z8fqn" Jan 20 17:30:47 crc kubenswrapper[4558]: I0120 17:30:47.459756 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fcd9c0dd-1942-49ff-b3de-7a49b4e2ad5d-run-httpd\") pod \"swift-proxy-6b8d9c59b6-z8fqn\" (UID: \"fcd9c0dd-1942-49ff-b3de-7a49b4e2ad5d\") " pod="openstack-kuttl-tests/swift-proxy-6b8d9c59b6-z8fqn" Jan 20 17:30:47 crc kubenswrapper[4558]: I0120 17:30:47.460195 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fcd9c0dd-1942-49ff-b3de-7a49b4e2ad5d-log-httpd\") pod \"swift-proxy-6b8d9c59b6-z8fqn\" (UID: \"fcd9c0dd-1942-49ff-b3de-7a49b4e2ad5d\") " pod="openstack-kuttl-tests/swift-proxy-6b8d9c59b6-z8fqn" Jan 20 17:30:47 crc kubenswrapper[4558]: I0120 17:30:47.460321 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fcd9c0dd-1942-49ff-b3de-7a49b4e2ad5d-run-httpd\") pod \"swift-proxy-6b8d9c59b6-z8fqn\" (UID: \"fcd9c0dd-1942-49ff-b3de-7a49b4e2ad5d\") " pod="openstack-kuttl-tests/swift-proxy-6b8d9c59b6-z8fqn" Jan 20 17:30:47 crc kubenswrapper[4558]: I0120 17:30:47.469515 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fcd9c0dd-1942-49ff-b3de-7a49b4e2ad5d-combined-ca-bundle\") pod \"swift-proxy-6b8d9c59b6-z8fqn\" (UID: \"fcd9c0dd-1942-49ff-b3de-7a49b4e2ad5d\") " pod="openstack-kuttl-tests/swift-proxy-6b8d9c59b6-z8fqn" Jan 20 17:30:47 crc kubenswrapper[4558]: I0120 17:30:47.472380 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fcd9c0dd-1942-49ff-b3de-7a49b4e2ad5d-config-data\") pod \"swift-proxy-6b8d9c59b6-z8fqn\" (UID: \"fcd9c0dd-1942-49ff-b3de-7a49b4e2ad5d\") " pod="openstack-kuttl-tests/swift-proxy-6b8d9c59b6-z8fqn" Jan 20 17:30:47 crc kubenswrapper[4558]: I0120 17:30:47.472472 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/fcd9c0dd-1942-49ff-b3de-7a49b4e2ad5d-etc-swift\") pod \"swift-proxy-6b8d9c59b6-z8fqn\" (UID: \"fcd9c0dd-1942-49ff-b3de-7a49b4e2ad5d\") " pod="openstack-kuttl-tests/swift-proxy-6b8d9c59b6-z8fqn" Jan 20 17:30:47 crc kubenswrapper[4558]: I0120 17:30:47.479670 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qlcrt\" (UniqueName: \"kubernetes.io/projected/fcd9c0dd-1942-49ff-b3de-7a49b4e2ad5d-kube-api-access-qlcrt\") pod \"swift-proxy-6b8d9c59b6-z8fqn\" (UID: \"fcd9c0dd-1942-49ff-b3de-7a49b4e2ad5d\") " pod="openstack-kuttl-tests/swift-proxy-6b8d9c59b6-z8fqn" Jan 20 17:30:47 crc kubenswrapper[4558]: I0120 17:30:47.647975 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-proxy-6b8d9c59b6-z8fqn" Jan 20 17:30:48 crc kubenswrapper[4558]: I0120 17:30:48.098121 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/swift-proxy-6b8d9c59b6-z8fqn"] Jan 20 17:30:48 crc kubenswrapper[4558]: I0120 17:30:48.513302 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-proxy-6b8d9c59b6-z8fqn" event={"ID":"fcd9c0dd-1942-49ff-b3de-7a49b4e2ad5d","Type":"ContainerStarted","Data":"1990d0bdef6da51da465520b09ecd48fce8e28b11c4163d2bf1d75f432ef1a20"} Jan 20 17:30:48 crc kubenswrapper[4558]: I0120 17:30:48.513557 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-proxy-6b8d9c59b6-z8fqn" event={"ID":"fcd9c0dd-1942-49ff-b3de-7a49b4e2ad5d","Type":"ContainerStarted","Data":"157e978c4ecc8e76d59459dde878219563637e712816d504a6cc4d4c37722f06"} Jan 20 17:30:48 crc kubenswrapper[4558]: I0120 17:30:48.513570 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-proxy-6b8d9c59b6-z8fqn" event={"ID":"fcd9c0dd-1942-49ff-b3de-7a49b4e2ad5d","Type":"ContainerStarted","Data":"4088b38ae9786f8c6b4ef5c81e601b056820aaab42a1dacb13b196d0e2754e1c"} Jan 20 17:30:48 crc kubenswrapper[4558]: I0120 17:30:48.514818 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/swift-proxy-6b8d9c59b6-z8fqn" Jan 20 17:30:48 crc kubenswrapper[4558]: I0120 17:30:48.514851 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/swift-proxy-6b8d9c59b6-z8fqn" Jan 20 17:30:48 crc kubenswrapper[4558]: I0120 17:30:48.541256 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/swift-proxy-6b8d9c59b6-z8fqn" podStartSLOduration=1.54124409 podStartE2EDuration="1.54124409s" podCreationTimestamp="2026-01-20 17:30:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:30:48.534855055 +0000 UTC m=+2942.295193013" watchObservedRunningTime="2026-01-20 17:30:48.54124409 +0000 UTC m=+2942.301582057" Jan 20 17:30:48 crc kubenswrapper[4558]: I0120 17:30:48.712838 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:30:48 crc kubenswrapper[4558]: I0120 17:30:48.713717 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="b9f98dae-f452-470a-b189-73bf68f60a83" containerName="ceilometer-central-agent" containerID="cri-o://6428db4c2b901272e1858dd44864d09cb610d55279efa36a870c1deaa8640299" gracePeriod=30 Jan 20 17:30:48 crc kubenswrapper[4558]: I0120 17:30:48.713828 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="b9f98dae-f452-470a-b189-73bf68f60a83" containerName="ceilometer-notification-agent" containerID="cri-o://a4534babe827dc538ef40509c4b12b441fb5d5753bbac7c36e30efffc9ded647" gracePeriod=30 Jan 20 17:30:48 crc kubenswrapper[4558]: I0120 17:30:48.713760 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="b9f98dae-f452-470a-b189-73bf68f60a83" containerName="proxy-httpd" containerID="cri-o://289b514a790a35157db641c744953dda67cf07a71d5bd4f2955ba6a0e7198bb1" gracePeriod=30 Jan 20 17:30:48 crc kubenswrapper[4558]: I0120 17:30:48.713743 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="b9f98dae-f452-470a-b189-73bf68f60a83" containerName="sg-core" containerID="cri-o://0170e31fb16386b6d672e9aa3018bdf8494fb2367332d154bae35a48b701ece9" gracePeriod=30 Jan 20 17:30:49 crc kubenswrapper[4558]: I0120 17:30:49.522904 4558 generic.go:334] "Generic (PLEG): container finished" podID="b9f98dae-f452-470a-b189-73bf68f60a83" containerID="289b514a790a35157db641c744953dda67cf07a71d5bd4f2955ba6a0e7198bb1" exitCode=0 Jan 20 17:30:49 crc kubenswrapper[4558]: I0120 17:30:49.522940 4558 generic.go:334] "Generic (PLEG): container finished" podID="b9f98dae-f452-470a-b189-73bf68f60a83" containerID="0170e31fb16386b6d672e9aa3018bdf8494fb2367332d154bae35a48b701ece9" exitCode=2 Jan 20 17:30:49 crc kubenswrapper[4558]: I0120 17:30:49.522949 4558 generic.go:334] "Generic (PLEG): container finished" podID="b9f98dae-f452-470a-b189-73bf68f60a83" containerID="6428db4c2b901272e1858dd44864d09cb610d55279efa36a870c1deaa8640299" exitCode=0 Jan 20 17:30:49 crc kubenswrapper[4558]: I0120 17:30:49.522990 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"b9f98dae-f452-470a-b189-73bf68f60a83","Type":"ContainerDied","Data":"289b514a790a35157db641c744953dda67cf07a71d5bd4f2955ba6a0e7198bb1"} Jan 20 17:30:49 crc kubenswrapper[4558]: I0120 17:30:49.523047 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"b9f98dae-f452-470a-b189-73bf68f60a83","Type":"ContainerDied","Data":"0170e31fb16386b6d672e9aa3018bdf8494fb2367332d154bae35a48b701ece9"} Jan 20 17:30:49 crc kubenswrapper[4558]: I0120 17:30:49.523060 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"b9f98dae-f452-470a-b189-73bf68f60a83","Type":"ContainerDied","Data":"6428db4c2b901272e1858dd44864d09cb610d55279efa36a870c1deaa8640299"} Jan 20 17:30:50 crc kubenswrapper[4558]: I0120 17:30:50.039442 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:30:50 crc kubenswrapper[4558]: I0120 17:30:50.112577 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b9f98dae-f452-470a-b189-73bf68f60a83-sg-core-conf-yaml\") pod \"b9f98dae-f452-470a-b189-73bf68f60a83\" (UID: \"b9f98dae-f452-470a-b189-73bf68f60a83\") " Jan 20 17:30:50 crc kubenswrapper[4558]: I0120 17:30:50.112708 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b9f98dae-f452-470a-b189-73bf68f60a83-run-httpd\") pod \"b9f98dae-f452-470a-b189-73bf68f60a83\" (UID: \"b9f98dae-f452-470a-b189-73bf68f60a83\") " Jan 20 17:30:50 crc kubenswrapper[4558]: I0120 17:30:50.112743 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b9f98dae-f452-470a-b189-73bf68f60a83-scripts\") pod \"b9f98dae-f452-470a-b189-73bf68f60a83\" (UID: \"b9f98dae-f452-470a-b189-73bf68f60a83\") " Jan 20 17:30:50 crc kubenswrapper[4558]: I0120 17:30:50.112767 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b9f98dae-f452-470a-b189-73bf68f60a83-log-httpd\") pod \"b9f98dae-f452-470a-b189-73bf68f60a83\" (UID: \"b9f98dae-f452-470a-b189-73bf68f60a83\") " Jan 20 17:30:50 crc kubenswrapper[4558]: I0120 17:30:50.112793 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ww4bl\" (UniqueName: \"kubernetes.io/projected/b9f98dae-f452-470a-b189-73bf68f60a83-kube-api-access-ww4bl\") pod \"b9f98dae-f452-470a-b189-73bf68f60a83\" (UID: \"b9f98dae-f452-470a-b189-73bf68f60a83\") " Jan 20 17:30:50 crc kubenswrapper[4558]: I0120 17:30:50.112853 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9f98dae-f452-470a-b189-73bf68f60a83-combined-ca-bundle\") pod \"b9f98dae-f452-470a-b189-73bf68f60a83\" (UID: \"b9f98dae-f452-470a-b189-73bf68f60a83\") " Jan 20 17:30:50 crc kubenswrapper[4558]: I0120 17:30:50.112878 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b9f98dae-f452-470a-b189-73bf68f60a83-config-data\") pod \"b9f98dae-f452-470a-b189-73bf68f60a83\" (UID: \"b9f98dae-f452-470a-b189-73bf68f60a83\") " Jan 20 17:30:50 crc kubenswrapper[4558]: I0120 17:30:50.113066 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b9f98dae-f452-470a-b189-73bf68f60a83-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "b9f98dae-f452-470a-b189-73bf68f60a83" (UID: "b9f98dae-f452-470a-b189-73bf68f60a83"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:30:50 crc kubenswrapper[4558]: I0120 17:30:50.113486 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b9f98dae-f452-470a-b189-73bf68f60a83-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "b9f98dae-f452-470a-b189-73bf68f60a83" (UID: "b9f98dae-f452-470a-b189-73bf68f60a83"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:30:50 crc kubenswrapper[4558]: I0120 17:30:50.113896 4558 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b9f98dae-f452-470a-b189-73bf68f60a83-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:30:50 crc kubenswrapper[4558]: I0120 17:30:50.113920 4558 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b9f98dae-f452-470a-b189-73bf68f60a83-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:30:50 crc kubenswrapper[4558]: I0120 17:30:50.118671 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b9f98dae-f452-470a-b189-73bf68f60a83-kube-api-access-ww4bl" (OuterVolumeSpecName: "kube-api-access-ww4bl") pod "b9f98dae-f452-470a-b189-73bf68f60a83" (UID: "b9f98dae-f452-470a-b189-73bf68f60a83"). InnerVolumeSpecName "kube-api-access-ww4bl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:30:50 crc kubenswrapper[4558]: I0120 17:30:50.119924 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b9f98dae-f452-470a-b189-73bf68f60a83-scripts" (OuterVolumeSpecName: "scripts") pod "b9f98dae-f452-470a-b189-73bf68f60a83" (UID: "b9f98dae-f452-470a-b189-73bf68f60a83"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:30:50 crc kubenswrapper[4558]: I0120 17:30:50.135322 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b9f98dae-f452-470a-b189-73bf68f60a83-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "b9f98dae-f452-470a-b189-73bf68f60a83" (UID: "b9f98dae-f452-470a-b189-73bf68f60a83"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:30:50 crc kubenswrapper[4558]: I0120 17:30:50.161564 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b9f98dae-f452-470a-b189-73bf68f60a83-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b9f98dae-f452-470a-b189-73bf68f60a83" (UID: "b9f98dae-f452-470a-b189-73bf68f60a83"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:30:50 crc kubenswrapper[4558]: I0120 17:30:50.175971 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b9f98dae-f452-470a-b189-73bf68f60a83-config-data" (OuterVolumeSpecName: "config-data") pod "b9f98dae-f452-470a-b189-73bf68f60a83" (UID: "b9f98dae-f452-470a-b189-73bf68f60a83"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:30:50 crc kubenswrapper[4558]: I0120 17:30:50.214919 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b9f98dae-f452-470a-b189-73bf68f60a83-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:30:50 crc kubenswrapper[4558]: I0120 17:30:50.214958 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ww4bl\" (UniqueName: \"kubernetes.io/projected/b9f98dae-f452-470a-b189-73bf68f60a83-kube-api-access-ww4bl\") on node \"crc\" DevicePath \"\"" Jan 20 17:30:50 crc kubenswrapper[4558]: I0120 17:30:50.214976 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9f98dae-f452-470a-b189-73bf68f60a83-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:30:50 crc kubenswrapper[4558]: I0120 17:30:50.214987 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b9f98dae-f452-470a-b189-73bf68f60a83-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:30:50 crc kubenswrapper[4558]: I0120 17:30:50.214998 4558 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b9f98dae-f452-470a-b189-73bf68f60a83-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 20 17:30:50 crc kubenswrapper[4558]: I0120 17:30:50.467881 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-api-db-create-ph8wn"] Jan 20 17:30:50 crc kubenswrapper[4558]: E0120 17:30:50.468421 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b9f98dae-f452-470a-b189-73bf68f60a83" containerName="ceilometer-notification-agent" Jan 20 17:30:50 crc kubenswrapper[4558]: I0120 17:30:50.468443 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="b9f98dae-f452-470a-b189-73bf68f60a83" containerName="ceilometer-notification-agent" Jan 20 17:30:50 crc kubenswrapper[4558]: E0120 17:30:50.468458 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b9f98dae-f452-470a-b189-73bf68f60a83" containerName="ceilometer-central-agent" Jan 20 17:30:50 crc kubenswrapper[4558]: I0120 17:30:50.468464 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="b9f98dae-f452-470a-b189-73bf68f60a83" containerName="ceilometer-central-agent" Jan 20 17:30:50 crc kubenswrapper[4558]: E0120 17:30:50.468480 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b9f98dae-f452-470a-b189-73bf68f60a83" containerName="proxy-httpd" Jan 20 17:30:50 crc kubenswrapper[4558]: I0120 17:30:50.468487 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="b9f98dae-f452-470a-b189-73bf68f60a83" containerName="proxy-httpd" Jan 20 17:30:50 crc kubenswrapper[4558]: E0120 17:30:50.468530 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b9f98dae-f452-470a-b189-73bf68f60a83" containerName="sg-core" Jan 20 17:30:50 crc kubenswrapper[4558]: I0120 17:30:50.468536 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="b9f98dae-f452-470a-b189-73bf68f60a83" containerName="sg-core" Jan 20 17:30:50 crc kubenswrapper[4558]: I0120 17:30:50.468766 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="b9f98dae-f452-470a-b189-73bf68f60a83" containerName="sg-core" Jan 20 17:30:50 crc kubenswrapper[4558]: I0120 17:30:50.468795 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="b9f98dae-f452-470a-b189-73bf68f60a83" containerName="ceilometer-central-agent" Jan 20 17:30:50 crc kubenswrapper[4558]: I0120 17:30:50.468805 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="b9f98dae-f452-470a-b189-73bf68f60a83" containerName="ceilometer-notification-agent" Jan 20 17:30:50 crc kubenswrapper[4558]: I0120 17:30:50.468819 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="b9f98dae-f452-470a-b189-73bf68f60a83" containerName="proxy-httpd" Jan 20 17:30:50 crc kubenswrapper[4558]: I0120 17:30:50.469534 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-db-create-ph8wn" Jan 20 17:30:50 crc kubenswrapper[4558]: I0120 17:30:50.477149 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-api-2fe2-account-create-update-tmfpb"] Jan 20 17:30:50 crc kubenswrapper[4558]: I0120 17:30:50.478939 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-2fe2-account-create-update-tmfpb" Jan 20 17:30:50 crc kubenswrapper[4558]: I0120 17:30:50.481800 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-api-db-secret" Jan 20 17:30:50 crc kubenswrapper[4558]: I0120 17:30:50.499416 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-db-create-ph8wn"] Jan 20 17:30:50 crc kubenswrapper[4558]: I0120 17:30:50.506236 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-2fe2-account-create-update-tmfpb"] Jan 20 17:30:50 crc kubenswrapper[4558]: I0120 17:30:50.523336 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p2qz7\" (UniqueName: \"kubernetes.io/projected/ac18e294-d27c-4012-91e4-16b6b875b781-kube-api-access-p2qz7\") pod \"nova-api-2fe2-account-create-update-tmfpb\" (UID: \"ac18e294-d27c-4012-91e4-16b6b875b781\") " pod="openstack-kuttl-tests/nova-api-2fe2-account-create-update-tmfpb" Jan 20 17:30:50 crc kubenswrapper[4558]: I0120 17:30:50.523749 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-652wv\" (UniqueName: \"kubernetes.io/projected/ce4270c3-42ac-4529-8dad-a9da8b758d43-kube-api-access-652wv\") pod \"nova-api-db-create-ph8wn\" (UID: \"ce4270c3-42ac-4529-8dad-a9da8b758d43\") " pod="openstack-kuttl-tests/nova-api-db-create-ph8wn" Jan 20 17:30:50 crc kubenswrapper[4558]: I0120 17:30:50.523811 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ac18e294-d27c-4012-91e4-16b6b875b781-operator-scripts\") pod \"nova-api-2fe2-account-create-update-tmfpb\" (UID: \"ac18e294-d27c-4012-91e4-16b6b875b781\") " pod="openstack-kuttl-tests/nova-api-2fe2-account-create-update-tmfpb" Jan 20 17:30:50 crc kubenswrapper[4558]: I0120 17:30:50.524111 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ce4270c3-42ac-4529-8dad-a9da8b758d43-operator-scripts\") pod \"nova-api-db-create-ph8wn\" (UID: \"ce4270c3-42ac-4529-8dad-a9da8b758d43\") " pod="openstack-kuttl-tests/nova-api-db-create-ph8wn" Jan 20 17:30:50 crc kubenswrapper[4558]: I0120 17:30:50.538983 4558 generic.go:334] "Generic (PLEG): container finished" podID="b9f98dae-f452-470a-b189-73bf68f60a83" containerID="a4534babe827dc538ef40509c4b12b441fb5d5753bbac7c36e30efffc9ded647" exitCode=0 Jan 20 17:30:50 crc kubenswrapper[4558]: I0120 17:30:50.539044 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"b9f98dae-f452-470a-b189-73bf68f60a83","Type":"ContainerDied","Data":"a4534babe827dc538ef40509c4b12b441fb5d5753bbac7c36e30efffc9ded647"} Jan 20 17:30:50 crc kubenswrapper[4558]: I0120 17:30:50.540129 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"b9f98dae-f452-470a-b189-73bf68f60a83","Type":"ContainerDied","Data":"de2595c1be910c8baab53452ee69cdb3f1c2c8f4a017701c9c57a3c424643584"} Jan 20 17:30:50 crc kubenswrapper[4558]: I0120 17:30:50.540154 4558 scope.go:117] "RemoveContainer" containerID="289b514a790a35157db641c744953dda67cf07a71d5bd4f2955ba6a0e7198bb1" Jan 20 17:30:50 crc kubenswrapper[4558]: I0120 17:30:50.539060 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:30:50 crc kubenswrapper[4558]: I0120 17:30:50.580007 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell0-db-create-ngdbp"] Jan 20 17:30:50 crc kubenswrapper[4558]: I0120 17:30:50.581355 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-db-create-ngdbp" Jan 20 17:30:50 crc kubenswrapper[4558]: I0120 17:30:50.588549 4558 scope.go:117] "RemoveContainer" containerID="0170e31fb16386b6d672e9aa3018bdf8494fb2367332d154bae35a48b701ece9" Jan 20 17:30:50 crc kubenswrapper[4558]: I0120 17:30:50.612670 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-db-create-ngdbp"] Jan 20 17:30:50 crc kubenswrapper[4558]: I0120 17:30:50.622051 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:30:50 crc kubenswrapper[4558]: I0120 17:30:50.626110 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/91098276-3b20-48eb-a0c6-0126a50d19af-operator-scripts\") pod \"nova-cell0-db-create-ngdbp\" (UID: \"91098276-3b20-48eb-a0c6-0126a50d19af\") " pod="openstack-kuttl-tests/nova-cell0-db-create-ngdbp" Jan 20 17:30:50 crc kubenswrapper[4558]: I0120 17:30:50.626209 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xbrxx\" (UniqueName: \"kubernetes.io/projected/91098276-3b20-48eb-a0c6-0126a50d19af-kube-api-access-xbrxx\") pod \"nova-cell0-db-create-ngdbp\" (UID: \"91098276-3b20-48eb-a0c6-0126a50d19af\") " pod="openstack-kuttl-tests/nova-cell0-db-create-ngdbp" Jan 20 17:30:50 crc kubenswrapper[4558]: I0120 17:30:50.626304 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-652wv\" (UniqueName: \"kubernetes.io/projected/ce4270c3-42ac-4529-8dad-a9da8b758d43-kube-api-access-652wv\") pod \"nova-api-db-create-ph8wn\" (UID: \"ce4270c3-42ac-4529-8dad-a9da8b758d43\") " pod="openstack-kuttl-tests/nova-api-db-create-ph8wn" Jan 20 17:30:50 crc kubenswrapper[4558]: I0120 17:30:50.626347 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ac18e294-d27c-4012-91e4-16b6b875b781-operator-scripts\") pod \"nova-api-2fe2-account-create-update-tmfpb\" (UID: \"ac18e294-d27c-4012-91e4-16b6b875b781\") " pod="openstack-kuttl-tests/nova-api-2fe2-account-create-update-tmfpb" Jan 20 17:30:50 crc kubenswrapper[4558]: I0120 17:30:50.626475 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ce4270c3-42ac-4529-8dad-a9da8b758d43-operator-scripts\") pod \"nova-api-db-create-ph8wn\" (UID: \"ce4270c3-42ac-4529-8dad-a9da8b758d43\") " pod="openstack-kuttl-tests/nova-api-db-create-ph8wn" Jan 20 17:30:50 crc kubenswrapper[4558]: I0120 17:30:50.626523 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p2qz7\" (UniqueName: \"kubernetes.io/projected/ac18e294-d27c-4012-91e4-16b6b875b781-kube-api-access-p2qz7\") pod \"nova-api-2fe2-account-create-update-tmfpb\" (UID: \"ac18e294-d27c-4012-91e4-16b6b875b781\") " pod="openstack-kuttl-tests/nova-api-2fe2-account-create-update-tmfpb" Jan 20 17:30:50 crc kubenswrapper[4558]: I0120 17:30:50.627328 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ac18e294-d27c-4012-91e4-16b6b875b781-operator-scripts\") pod \"nova-api-2fe2-account-create-update-tmfpb\" (UID: \"ac18e294-d27c-4012-91e4-16b6b875b781\") " pod="openstack-kuttl-tests/nova-api-2fe2-account-create-update-tmfpb" Jan 20 17:30:50 crc kubenswrapper[4558]: I0120 17:30:50.627938 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ce4270c3-42ac-4529-8dad-a9da8b758d43-operator-scripts\") pod \"nova-api-db-create-ph8wn\" (UID: \"ce4270c3-42ac-4529-8dad-a9da8b758d43\") " pod="openstack-kuttl-tests/nova-api-db-create-ph8wn" Jan 20 17:30:50 crc kubenswrapper[4558]: I0120 17:30:50.630703 4558 scope.go:117] "RemoveContainer" containerID="a4534babe827dc538ef40509c4b12b441fb5d5753bbac7c36e30efffc9ded647" Jan 20 17:30:50 crc kubenswrapper[4558]: I0120 17:30:50.636775 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:30:50 crc kubenswrapper[4558]: I0120 17:30:50.642406 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-652wv\" (UniqueName: \"kubernetes.io/projected/ce4270c3-42ac-4529-8dad-a9da8b758d43-kube-api-access-652wv\") pod \"nova-api-db-create-ph8wn\" (UID: \"ce4270c3-42ac-4529-8dad-a9da8b758d43\") " pod="openstack-kuttl-tests/nova-api-db-create-ph8wn" Jan 20 17:30:50 crc kubenswrapper[4558]: I0120 17:30:50.651610 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:30:50 crc kubenswrapper[4558]: I0120 17:30:50.655201 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p2qz7\" (UniqueName: \"kubernetes.io/projected/ac18e294-d27c-4012-91e4-16b6b875b781-kube-api-access-p2qz7\") pod \"nova-api-2fe2-account-create-update-tmfpb\" (UID: \"ac18e294-d27c-4012-91e4-16b6b875b781\") " pod="openstack-kuttl-tests/nova-api-2fe2-account-create-update-tmfpb" Jan 20 17:30:50 crc kubenswrapper[4558]: I0120 17:30:50.664367 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:30:50 crc kubenswrapper[4558]: I0120 17:30:50.664764 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:30:50 crc kubenswrapper[4558]: I0120 17:30:50.666892 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-config-data" Jan 20 17:30:50 crc kubenswrapper[4558]: I0120 17:30:50.669803 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-scripts" Jan 20 17:30:50 crc kubenswrapper[4558]: I0120 17:30:50.671894 4558 scope.go:117] "RemoveContainer" containerID="6428db4c2b901272e1858dd44864d09cb610d55279efa36a870c1deaa8640299" Jan 20 17:30:50 crc kubenswrapper[4558]: I0120 17:30:50.688897 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell1-db-create-45f8v"] Jan 20 17:30:50 crc kubenswrapper[4558]: I0120 17:30:50.690187 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-db-create-45f8v" Jan 20 17:30:50 crc kubenswrapper[4558]: I0120 17:30:50.707074 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell0-41bf-account-create-update-9f8d4"] Jan 20 17:30:50 crc kubenswrapper[4558]: I0120 17:30:50.708523 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-41bf-account-create-update-9f8d4" Jan 20 17:30:50 crc kubenswrapper[4558]: I0120 17:30:50.710717 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell0-db-secret" Jan 20 17:30:50 crc kubenswrapper[4558]: I0120 17:30:50.711697 4558 scope.go:117] "RemoveContainer" containerID="289b514a790a35157db641c744953dda67cf07a71d5bd4f2955ba6a0e7198bb1" Jan 20 17:30:50 crc kubenswrapper[4558]: E0120 17:30:50.712051 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"289b514a790a35157db641c744953dda67cf07a71d5bd4f2955ba6a0e7198bb1\": container with ID starting with 289b514a790a35157db641c744953dda67cf07a71d5bd4f2955ba6a0e7198bb1 not found: ID does not exist" containerID="289b514a790a35157db641c744953dda67cf07a71d5bd4f2955ba6a0e7198bb1" Jan 20 17:30:50 crc kubenswrapper[4558]: I0120 17:30:50.712108 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"289b514a790a35157db641c744953dda67cf07a71d5bd4f2955ba6a0e7198bb1"} err="failed to get container status \"289b514a790a35157db641c744953dda67cf07a71d5bd4f2955ba6a0e7198bb1\": rpc error: code = NotFound desc = could not find container \"289b514a790a35157db641c744953dda67cf07a71d5bd4f2955ba6a0e7198bb1\": container with ID starting with 289b514a790a35157db641c744953dda67cf07a71d5bd4f2955ba6a0e7198bb1 not found: ID does not exist" Jan 20 17:30:50 crc kubenswrapper[4558]: I0120 17:30:50.712193 4558 scope.go:117] "RemoveContainer" containerID="0170e31fb16386b6d672e9aa3018bdf8494fb2367332d154bae35a48b701ece9" Jan 20 17:30:50 crc kubenswrapper[4558]: E0120 17:30:50.712471 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0170e31fb16386b6d672e9aa3018bdf8494fb2367332d154bae35a48b701ece9\": container with ID starting with 0170e31fb16386b6d672e9aa3018bdf8494fb2367332d154bae35a48b701ece9 not found: ID does not exist" containerID="0170e31fb16386b6d672e9aa3018bdf8494fb2367332d154bae35a48b701ece9" Jan 20 17:30:50 crc kubenswrapper[4558]: I0120 17:30:50.712495 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0170e31fb16386b6d672e9aa3018bdf8494fb2367332d154bae35a48b701ece9"} err="failed to get container status \"0170e31fb16386b6d672e9aa3018bdf8494fb2367332d154bae35a48b701ece9\": rpc error: code = NotFound desc = could not find container \"0170e31fb16386b6d672e9aa3018bdf8494fb2367332d154bae35a48b701ece9\": container with ID starting with 0170e31fb16386b6d672e9aa3018bdf8494fb2367332d154bae35a48b701ece9 not found: ID does not exist" Jan 20 17:30:50 crc kubenswrapper[4558]: I0120 17:30:50.712514 4558 scope.go:117] "RemoveContainer" containerID="a4534babe827dc538ef40509c4b12b441fb5d5753bbac7c36e30efffc9ded647" Jan 20 17:30:50 crc kubenswrapper[4558]: E0120 17:30:50.712768 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a4534babe827dc538ef40509c4b12b441fb5d5753bbac7c36e30efffc9ded647\": container with ID starting with a4534babe827dc538ef40509c4b12b441fb5d5753bbac7c36e30efffc9ded647 not found: ID does not exist" containerID="a4534babe827dc538ef40509c4b12b441fb5d5753bbac7c36e30efffc9ded647" Jan 20 17:30:50 crc kubenswrapper[4558]: I0120 17:30:50.712798 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a4534babe827dc538ef40509c4b12b441fb5d5753bbac7c36e30efffc9ded647"} err="failed to get container status \"a4534babe827dc538ef40509c4b12b441fb5d5753bbac7c36e30efffc9ded647\": rpc error: code = NotFound desc = could not find container \"a4534babe827dc538ef40509c4b12b441fb5d5753bbac7c36e30efffc9ded647\": container with ID starting with a4534babe827dc538ef40509c4b12b441fb5d5753bbac7c36e30efffc9ded647 not found: ID does not exist" Jan 20 17:30:50 crc kubenswrapper[4558]: I0120 17:30:50.712811 4558 scope.go:117] "RemoveContainer" containerID="6428db4c2b901272e1858dd44864d09cb610d55279efa36a870c1deaa8640299" Jan 20 17:30:50 crc kubenswrapper[4558]: I0120 17:30:50.712943 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-db-create-45f8v"] Jan 20 17:30:50 crc kubenswrapper[4558]: E0120 17:30:50.713206 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6428db4c2b901272e1858dd44864d09cb610d55279efa36a870c1deaa8640299\": container with ID starting with 6428db4c2b901272e1858dd44864d09cb610d55279efa36a870c1deaa8640299 not found: ID does not exist" containerID="6428db4c2b901272e1858dd44864d09cb610d55279efa36a870c1deaa8640299" Jan 20 17:30:50 crc kubenswrapper[4558]: I0120 17:30:50.713239 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6428db4c2b901272e1858dd44864d09cb610d55279efa36a870c1deaa8640299"} err="failed to get container status \"6428db4c2b901272e1858dd44864d09cb610d55279efa36a870c1deaa8640299\": rpc error: code = NotFound desc = could not find container \"6428db4c2b901272e1858dd44864d09cb610d55279efa36a870c1deaa8640299\": container with ID starting with 6428db4c2b901272e1858dd44864d09cb610d55279efa36a870c1deaa8640299 not found: ID does not exist" Jan 20 17:30:50 crc kubenswrapper[4558]: I0120 17:30:50.719875 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-41bf-account-create-update-9f8d4"] Jan 20 17:30:51 crc kubenswrapper[4558]: I0120 17:30:50.728052 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dzbvl\" (UniqueName: \"kubernetes.io/projected/95b44804-20a9-4636-bd47-551f819b8d53-kube-api-access-dzbvl\") pod \"nova-cell1-db-create-45f8v\" (UID: \"95b44804-20a9-4636-bd47-551f819b8d53\") " pod="openstack-kuttl-tests/nova-cell1-db-create-45f8v" Jan 20 17:30:51 crc kubenswrapper[4558]: I0120 17:30:50.728095 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6ef82b34-46f8-49ec-903d-ca23dc3c1772-log-httpd\") pod \"ceilometer-0\" (UID: \"6ef82b34-46f8-49ec-903d-ca23dc3c1772\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:30:51 crc kubenswrapper[4558]: I0120 17:30:50.728120 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ef82b34-46f8-49ec-903d-ca23dc3c1772-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"6ef82b34-46f8-49ec-903d-ca23dc3c1772\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:30:51 crc kubenswrapper[4558]: I0120 17:30:50.728219 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6ef82b34-46f8-49ec-903d-ca23dc3c1772-run-httpd\") pod \"ceilometer-0\" (UID: \"6ef82b34-46f8-49ec-903d-ca23dc3c1772\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:30:51 crc kubenswrapper[4558]: I0120 17:30:50.728259 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sxrxv\" (UniqueName: \"kubernetes.io/projected/cf3515ce-a794-4635-8476-d84e98e393b8-kube-api-access-sxrxv\") pod \"nova-cell0-41bf-account-create-update-9f8d4\" (UID: \"cf3515ce-a794-4635-8476-d84e98e393b8\") " pod="openstack-kuttl-tests/nova-cell0-41bf-account-create-update-9f8d4" Jan 20 17:30:51 crc kubenswrapper[4558]: I0120 17:30:50.728281 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/95b44804-20a9-4636-bd47-551f819b8d53-operator-scripts\") pod \"nova-cell1-db-create-45f8v\" (UID: \"95b44804-20a9-4636-bd47-551f819b8d53\") " pod="openstack-kuttl-tests/nova-cell1-db-create-45f8v" Jan 20 17:30:51 crc kubenswrapper[4558]: I0120 17:30:50.728329 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/6ef82b34-46f8-49ec-903d-ca23dc3c1772-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"6ef82b34-46f8-49ec-903d-ca23dc3c1772\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:30:51 crc kubenswrapper[4558]: I0120 17:30:50.728396 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cf3515ce-a794-4635-8476-d84e98e393b8-operator-scripts\") pod \"nova-cell0-41bf-account-create-update-9f8d4\" (UID: \"cf3515ce-a794-4635-8476-d84e98e393b8\") " pod="openstack-kuttl-tests/nova-cell0-41bf-account-create-update-9f8d4" Jan 20 17:30:51 crc kubenswrapper[4558]: I0120 17:30:50.728453 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ncz9t\" (UniqueName: \"kubernetes.io/projected/6ef82b34-46f8-49ec-903d-ca23dc3c1772-kube-api-access-ncz9t\") pod \"ceilometer-0\" (UID: \"6ef82b34-46f8-49ec-903d-ca23dc3c1772\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:30:51 crc kubenswrapper[4558]: I0120 17:30:50.728566 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/91098276-3b20-48eb-a0c6-0126a50d19af-operator-scripts\") pod \"nova-cell0-db-create-ngdbp\" (UID: \"91098276-3b20-48eb-a0c6-0126a50d19af\") " pod="openstack-kuttl-tests/nova-cell0-db-create-ngdbp" Jan 20 17:30:51 crc kubenswrapper[4558]: I0120 17:30:50.728598 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xbrxx\" (UniqueName: \"kubernetes.io/projected/91098276-3b20-48eb-a0c6-0126a50d19af-kube-api-access-xbrxx\") pod \"nova-cell0-db-create-ngdbp\" (UID: \"91098276-3b20-48eb-a0c6-0126a50d19af\") " pod="openstack-kuttl-tests/nova-cell0-db-create-ngdbp" Jan 20 17:30:51 crc kubenswrapper[4558]: I0120 17:30:50.728629 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6ef82b34-46f8-49ec-903d-ca23dc3c1772-scripts\") pod \"ceilometer-0\" (UID: \"6ef82b34-46f8-49ec-903d-ca23dc3c1772\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:30:51 crc kubenswrapper[4558]: I0120 17:30:50.728644 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6ef82b34-46f8-49ec-903d-ca23dc3c1772-config-data\") pod \"ceilometer-0\" (UID: \"6ef82b34-46f8-49ec-903d-ca23dc3c1772\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:30:51 crc kubenswrapper[4558]: I0120 17:30:50.729405 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/91098276-3b20-48eb-a0c6-0126a50d19af-operator-scripts\") pod \"nova-cell0-db-create-ngdbp\" (UID: \"91098276-3b20-48eb-a0c6-0126a50d19af\") " pod="openstack-kuttl-tests/nova-cell0-db-create-ngdbp" Jan 20 17:30:51 crc kubenswrapper[4558]: I0120 17:30:50.747725 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xbrxx\" (UniqueName: \"kubernetes.io/projected/91098276-3b20-48eb-a0c6-0126a50d19af-kube-api-access-xbrxx\") pod \"nova-cell0-db-create-ngdbp\" (UID: \"91098276-3b20-48eb-a0c6-0126a50d19af\") " pod="openstack-kuttl-tests/nova-cell0-db-create-ngdbp" Jan 20 17:30:51 crc kubenswrapper[4558]: I0120 17:30:50.784909 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-db-create-ph8wn" Jan 20 17:30:51 crc kubenswrapper[4558]: I0120 17:30:50.826500 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-2fe2-account-create-update-tmfpb" Jan 20 17:30:51 crc kubenswrapper[4558]: I0120 17:30:50.830655 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cf3515ce-a794-4635-8476-d84e98e393b8-operator-scripts\") pod \"nova-cell0-41bf-account-create-update-9f8d4\" (UID: \"cf3515ce-a794-4635-8476-d84e98e393b8\") " pod="openstack-kuttl-tests/nova-cell0-41bf-account-create-update-9f8d4" Jan 20 17:30:51 crc kubenswrapper[4558]: I0120 17:30:50.830703 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ncz9t\" (UniqueName: \"kubernetes.io/projected/6ef82b34-46f8-49ec-903d-ca23dc3c1772-kube-api-access-ncz9t\") pod \"ceilometer-0\" (UID: \"6ef82b34-46f8-49ec-903d-ca23dc3c1772\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:30:51 crc kubenswrapper[4558]: I0120 17:30:50.830815 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6ef82b34-46f8-49ec-903d-ca23dc3c1772-scripts\") pod \"ceilometer-0\" (UID: \"6ef82b34-46f8-49ec-903d-ca23dc3c1772\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:30:51 crc kubenswrapper[4558]: I0120 17:30:50.830834 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6ef82b34-46f8-49ec-903d-ca23dc3c1772-config-data\") pod \"ceilometer-0\" (UID: \"6ef82b34-46f8-49ec-903d-ca23dc3c1772\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:30:51 crc kubenswrapper[4558]: I0120 17:30:50.830857 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dzbvl\" (UniqueName: \"kubernetes.io/projected/95b44804-20a9-4636-bd47-551f819b8d53-kube-api-access-dzbvl\") pod \"nova-cell1-db-create-45f8v\" (UID: \"95b44804-20a9-4636-bd47-551f819b8d53\") " pod="openstack-kuttl-tests/nova-cell1-db-create-45f8v" Jan 20 17:30:51 crc kubenswrapper[4558]: I0120 17:30:50.830882 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6ef82b34-46f8-49ec-903d-ca23dc3c1772-log-httpd\") pod \"ceilometer-0\" (UID: \"6ef82b34-46f8-49ec-903d-ca23dc3c1772\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:30:51 crc kubenswrapper[4558]: I0120 17:30:50.830900 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ef82b34-46f8-49ec-903d-ca23dc3c1772-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"6ef82b34-46f8-49ec-903d-ca23dc3c1772\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:30:51 crc kubenswrapper[4558]: I0120 17:30:50.830948 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6ef82b34-46f8-49ec-903d-ca23dc3c1772-run-httpd\") pod \"ceilometer-0\" (UID: \"6ef82b34-46f8-49ec-903d-ca23dc3c1772\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:30:51 crc kubenswrapper[4558]: I0120 17:30:50.830978 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sxrxv\" (UniqueName: \"kubernetes.io/projected/cf3515ce-a794-4635-8476-d84e98e393b8-kube-api-access-sxrxv\") pod \"nova-cell0-41bf-account-create-update-9f8d4\" (UID: \"cf3515ce-a794-4635-8476-d84e98e393b8\") " pod="openstack-kuttl-tests/nova-cell0-41bf-account-create-update-9f8d4" Jan 20 17:30:51 crc kubenswrapper[4558]: I0120 17:30:50.831001 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/95b44804-20a9-4636-bd47-551f819b8d53-operator-scripts\") pod \"nova-cell1-db-create-45f8v\" (UID: \"95b44804-20a9-4636-bd47-551f819b8d53\") " pod="openstack-kuttl-tests/nova-cell1-db-create-45f8v" Jan 20 17:30:51 crc kubenswrapper[4558]: I0120 17:30:50.831038 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/6ef82b34-46f8-49ec-903d-ca23dc3c1772-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"6ef82b34-46f8-49ec-903d-ca23dc3c1772\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:30:51 crc kubenswrapper[4558]: I0120 17:30:50.832491 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6ef82b34-46f8-49ec-903d-ca23dc3c1772-log-httpd\") pod \"ceilometer-0\" (UID: \"6ef82b34-46f8-49ec-903d-ca23dc3c1772\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:30:51 crc kubenswrapper[4558]: I0120 17:30:50.832725 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6ef82b34-46f8-49ec-903d-ca23dc3c1772-run-httpd\") pod \"ceilometer-0\" (UID: \"6ef82b34-46f8-49ec-903d-ca23dc3c1772\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:30:51 crc kubenswrapper[4558]: I0120 17:30:50.833354 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/95b44804-20a9-4636-bd47-551f819b8d53-operator-scripts\") pod \"nova-cell1-db-create-45f8v\" (UID: \"95b44804-20a9-4636-bd47-551f819b8d53\") " pod="openstack-kuttl-tests/nova-cell1-db-create-45f8v" Jan 20 17:30:51 crc kubenswrapper[4558]: I0120 17:30:50.834588 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ef82b34-46f8-49ec-903d-ca23dc3c1772-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"6ef82b34-46f8-49ec-903d-ca23dc3c1772\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:30:51 crc kubenswrapper[4558]: I0120 17:30:50.837182 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6ef82b34-46f8-49ec-903d-ca23dc3c1772-config-data\") pod \"ceilometer-0\" (UID: \"6ef82b34-46f8-49ec-903d-ca23dc3c1772\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:30:51 crc kubenswrapper[4558]: I0120 17:30:50.838965 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/6ef82b34-46f8-49ec-903d-ca23dc3c1772-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"6ef82b34-46f8-49ec-903d-ca23dc3c1772\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:30:51 crc kubenswrapper[4558]: I0120 17:30:50.847066 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cf3515ce-a794-4635-8476-d84e98e393b8-operator-scripts\") pod \"nova-cell0-41bf-account-create-update-9f8d4\" (UID: \"cf3515ce-a794-4635-8476-d84e98e393b8\") " pod="openstack-kuttl-tests/nova-cell0-41bf-account-create-update-9f8d4" Jan 20 17:30:51 crc kubenswrapper[4558]: I0120 17:30:50.850725 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6ef82b34-46f8-49ec-903d-ca23dc3c1772-scripts\") pod \"ceilometer-0\" (UID: \"6ef82b34-46f8-49ec-903d-ca23dc3c1772\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:30:51 crc kubenswrapper[4558]: I0120 17:30:50.854943 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sxrxv\" (UniqueName: \"kubernetes.io/projected/cf3515ce-a794-4635-8476-d84e98e393b8-kube-api-access-sxrxv\") pod \"nova-cell0-41bf-account-create-update-9f8d4\" (UID: \"cf3515ce-a794-4635-8476-d84e98e393b8\") " pod="openstack-kuttl-tests/nova-cell0-41bf-account-create-update-9f8d4" Jan 20 17:30:51 crc kubenswrapper[4558]: I0120 17:30:50.864922 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ncz9t\" (UniqueName: \"kubernetes.io/projected/6ef82b34-46f8-49ec-903d-ca23dc3c1772-kube-api-access-ncz9t\") pod \"ceilometer-0\" (UID: \"6ef82b34-46f8-49ec-903d-ca23dc3c1772\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:30:51 crc kubenswrapper[4558]: I0120 17:30:50.865381 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dzbvl\" (UniqueName: \"kubernetes.io/projected/95b44804-20a9-4636-bd47-551f819b8d53-kube-api-access-dzbvl\") pod \"nova-cell1-db-create-45f8v\" (UID: \"95b44804-20a9-4636-bd47-551f819b8d53\") " pod="openstack-kuttl-tests/nova-cell1-db-create-45f8v" Jan 20 17:30:51 crc kubenswrapper[4558]: I0120 17:30:50.882598 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell1-c055-account-create-update-4rstr"] Jan 20 17:30:51 crc kubenswrapper[4558]: I0120 17:30:50.884803 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-c055-account-create-update-4rstr" Jan 20 17:30:51 crc kubenswrapper[4558]: I0120 17:30:50.887832 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell1-db-secret" Jan 20 17:30:51 crc kubenswrapper[4558]: I0120 17:30:50.903656 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-db-create-ngdbp" Jan 20 17:30:51 crc kubenswrapper[4558]: I0120 17:30:50.914741 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-c055-account-create-update-4rstr"] Jan 20 17:30:51 crc kubenswrapper[4558]: I0120 17:30:50.931965 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c58c8dea-46da-43de-801f-68b8700c27e1-operator-scripts\") pod \"nova-cell1-c055-account-create-update-4rstr\" (UID: \"c58c8dea-46da-43de-801f-68b8700c27e1\") " pod="openstack-kuttl-tests/nova-cell1-c055-account-create-update-4rstr" Jan 20 17:30:51 crc kubenswrapper[4558]: I0120 17:30:50.932070 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-25g44\" (UniqueName: \"kubernetes.io/projected/c58c8dea-46da-43de-801f-68b8700c27e1-kube-api-access-25g44\") pod \"nova-cell1-c055-account-create-update-4rstr\" (UID: \"c58c8dea-46da-43de-801f-68b8700c27e1\") " pod="openstack-kuttl-tests/nova-cell1-c055-account-create-update-4rstr" Jan 20 17:30:51 crc kubenswrapper[4558]: I0120 17:30:50.985347 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:30:51 crc kubenswrapper[4558]: I0120 17:30:51.006417 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-db-create-45f8v" Jan 20 17:30:51 crc kubenswrapper[4558]: I0120 17:30:51.026371 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-41bf-account-create-update-9f8d4" Jan 20 17:30:51 crc kubenswrapper[4558]: I0120 17:30:51.034581 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c58c8dea-46da-43de-801f-68b8700c27e1-operator-scripts\") pod \"nova-cell1-c055-account-create-update-4rstr\" (UID: \"c58c8dea-46da-43de-801f-68b8700c27e1\") " pod="openstack-kuttl-tests/nova-cell1-c055-account-create-update-4rstr" Jan 20 17:30:51 crc kubenswrapper[4558]: I0120 17:30:51.034687 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-25g44\" (UniqueName: \"kubernetes.io/projected/c58c8dea-46da-43de-801f-68b8700c27e1-kube-api-access-25g44\") pod \"nova-cell1-c055-account-create-update-4rstr\" (UID: \"c58c8dea-46da-43de-801f-68b8700c27e1\") " pod="openstack-kuttl-tests/nova-cell1-c055-account-create-update-4rstr" Jan 20 17:30:51 crc kubenswrapper[4558]: I0120 17:30:51.035629 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c58c8dea-46da-43de-801f-68b8700c27e1-operator-scripts\") pod \"nova-cell1-c055-account-create-update-4rstr\" (UID: \"c58c8dea-46da-43de-801f-68b8700c27e1\") " pod="openstack-kuttl-tests/nova-cell1-c055-account-create-update-4rstr" Jan 20 17:30:51 crc kubenswrapper[4558]: I0120 17:30:51.056911 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-25g44\" (UniqueName: \"kubernetes.io/projected/c58c8dea-46da-43de-801f-68b8700c27e1-kube-api-access-25g44\") pod \"nova-cell1-c055-account-create-update-4rstr\" (UID: \"c58c8dea-46da-43de-801f-68b8700c27e1\") " pod="openstack-kuttl-tests/nova-cell1-c055-account-create-update-4rstr" Jan 20 17:30:51 crc kubenswrapper[4558]: I0120 17:30:51.213410 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-c055-account-create-update-4rstr" Jan 20 17:30:51 crc kubenswrapper[4558]: I0120 17:30:51.565900 4558 scope.go:117] "RemoveContainer" containerID="f275e9f5de330b3c79316d5871106c6bcf90b698e0744c5beb28da45c336b36d" Jan 20 17:30:51 crc kubenswrapper[4558]: E0120 17:30:51.566239 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 17:30:51 crc kubenswrapper[4558]: I0120 17:30:51.740441 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-db-create-45f8v"] Jan 20 17:30:51 crc kubenswrapper[4558]: I0120 17:30:51.754686 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-db-create-ngdbp"] Jan 20 17:30:51 crc kubenswrapper[4558]: I0120 17:30:51.766151 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-db-create-ph8wn"] Jan 20 17:30:51 crc kubenswrapper[4558]: W0120 17:30:51.773573 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc58c8dea_46da_43de_801f_68b8700c27e1.slice/crio-c0ce187053eb66e28defb351b24b7c9eb2c559e9c2e127bdf8e4c40cdb1744ac WatchSource:0}: Error finding container c0ce187053eb66e28defb351b24b7c9eb2c559e9c2e127bdf8e4c40cdb1744ac: Status 404 returned error can't find the container with id c0ce187053eb66e28defb351b24b7c9eb2c559e9c2e127bdf8e4c40cdb1744ac Jan 20 17:30:51 crc kubenswrapper[4558]: I0120 17:30:51.786152 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-c055-account-create-update-4rstr"] Jan 20 17:30:51 crc kubenswrapper[4558]: I0120 17:30:51.796460 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-2fe2-account-create-update-tmfpb"] Jan 20 17:30:51 crc kubenswrapper[4558]: I0120 17:30:51.807892 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:30:51 crc kubenswrapper[4558]: I0120 17:30:51.843620 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-41bf-account-create-update-9f8d4"] Jan 20 17:30:52 crc kubenswrapper[4558]: I0120 17:30:52.564564 4558 generic.go:334] "Generic (PLEG): container finished" podID="c58c8dea-46da-43de-801f-68b8700c27e1" containerID="9f9e4dd6e557284ebc769ce80f9deed9621afea1ebb690cdf6335f93f3210c7d" exitCode=0 Jan 20 17:30:52 crc kubenswrapper[4558]: I0120 17:30:52.564741 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-c055-account-create-update-4rstr" event={"ID":"c58c8dea-46da-43de-801f-68b8700c27e1","Type":"ContainerDied","Data":"9f9e4dd6e557284ebc769ce80f9deed9621afea1ebb690cdf6335f93f3210c7d"} Jan 20 17:30:52 crc kubenswrapper[4558]: I0120 17:30:52.565021 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-c055-account-create-update-4rstr" event={"ID":"c58c8dea-46da-43de-801f-68b8700c27e1","Type":"ContainerStarted","Data":"c0ce187053eb66e28defb351b24b7c9eb2c559e9c2e127bdf8e4c40cdb1744ac"} Jan 20 17:30:52 crc kubenswrapper[4558]: I0120 17:30:52.578210 4558 generic.go:334] "Generic (PLEG): container finished" podID="91098276-3b20-48eb-a0c6-0126a50d19af" containerID="86faabac2df50963bf2c0ee255197a15360640acc9dad8b29910c78ef8f18691" exitCode=0 Jan 20 17:30:52 crc kubenswrapper[4558]: I0120 17:30:52.587483 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b9f98dae-f452-470a-b189-73bf68f60a83" path="/var/lib/kubelet/pods/b9f98dae-f452-470a-b189-73bf68f60a83/volumes" Jan 20 17:30:52 crc kubenswrapper[4558]: I0120 17:30:52.588565 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-db-create-ngdbp" event={"ID":"91098276-3b20-48eb-a0c6-0126a50d19af","Type":"ContainerDied","Data":"86faabac2df50963bf2c0ee255197a15360640acc9dad8b29910c78ef8f18691"} Jan 20 17:30:52 crc kubenswrapper[4558]: I0120 17:30:52.588601 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-db-create-ngdbp" event={"ID":"91098276-3b20-48eb-a0c6-0126a50d19af","Type":"ContainerStarted","Data":"5d3c398635199ae48a32c6a4cd5103e369ea6ffac19bef6a17807f651356c209"} Jan 20 17:30:52 crc kubenswrapper[4558]: I0120 17:30:52.589713 4558 generic.go:334] "Generic (PLEG): container finished" podID="95b44804-20a9-4636-bd47-551f819b8d53" containerID="f04b8f0579435de2c14d1baa0b1a6029323eaa4b69328f14b70cb3236804875c" exitCode=0 Jan 20 17:30:52 crc kubenswrapper[4558]: I0120 17:30:52.589834 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-db-create-45f8v" event={"ID":"95b44804-20a9-4636-bd47-551f819b8d53","Type":"ContainerDied","Data":"f04b8f0579435de2c14d1baa0b1a6029323eaa4b69328f14b70cb3236804875c"} Jan 20 17:30:52 crc kubenswrapper[4558]: I0120 17:30:52.589878 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-db-create-45f8v" event={"ID":"95b44804-20a9-4636-bd47-551f819b8d53","Type":"ContainerStarted","Data":"8d421f8c83333c296e55c7f53a9acc5a7bb6cce193c35e53b968e519ae0e06f5"} Jan 20 17:30:52 crc kubenswrapper[4558]: I0120 17:30:52.591819 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"6ef82b34-46f8-49ec-903d-ca23dc3c1772","Type":"ContainerStarted","Data":"7cc350e8d3762f24a182153c3042cd065a4a35978f3917cac9b128f8c8865844"} Jan 20 17:30:52 crc kubenswrapper[4558]: I0120 17:30:52.593613 4558 generic.go:334] "Generic (PLEG): container finished" podID="ac18e294-d27c-4012-91e4-16b6b875b781" containerID="10570933c613aadb10b453629851c02d74b4bcbbc5d9a50e439b2b15473a74fc" exitCode=0 Jan 20 17:30:52 crc kubenswrapper[4558]: I0120 17:30:52.593714 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-2fe2-account-create-update-tmfpb" event={"ID":"ac18e294-d27c-4012-91e4-16b6b875b781","Type":"ContainerDied","Data":"10570933c613aadb10b453629851c02d74b4bcbbc5d9a50e439b2b15473a74fc"} Jan 20 17:30:52 crc kubenswrapper[4558]: I0120 17:30:52.593749 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-2fe2-account-create-update-tmfpb" event={"ID":"ac18e294-d27c-4012-91e4-16b6b875b781","Type":"ContainerStarted","Data":"356cb0f91573c02ed25a905f41adc1a94a333e2d9cf55a3d41350d6e809a65b7"} Jan 20 17:30:52 crc kubenswrapper[4558]: I0120 17:30:52.595852 4558 generic.go:334] "Generic (PLEG): container finished" podID="ce4270c3-42ac-4529-8dad-a9da8b758d43" containerID="62fe0620a925c3f6194650b38fc772a458aef217a8b081fff99c16e4e4ed75bf" exitCode=0 Jan 20 17:30:52 crc kubenswrapper[4558]: I0120 17:30:52.595918 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-db-create-ph8wn" event={"ID":"ce4270c3-42ac-4529-8dad-a9da8b758d43","Type":"ContainerDied","Data":"62fe0620a925c3f6194650b38fc772a458aef217a8b081fff99c16e4e4ed75bf"} Jan 20 17:30:52 crc kubenswrapper[4558]: I0120 17:30:52.595937 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-db-create-ph8wn" event={"ID":"ce4270c3-42ac-4529-8dad-a9da8b758d43","Type":"ContainerStarted","Data":"1851f87d7773a5bb45b43e968f2eaa0ad955742afec6106a9af34b91d68498dd"} Jan 20 17:30:52 crc kubenswrapper[4558]: I0120 17:30:52.600587 4558 generic.go:334] "Generic (PLEG): container finished" podID="cf3515ce-a794-4635-8476-d84e98e393b8" containerID="65629aa894ee7068d5ea871d1a1a3d97a8a4dd768f01f9781ead6f9a6b82bfb2" exitCode=0 Jan 20 17:30:52 crc kubenswrapper[4558]: I0120 17:30:52.600620 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-41bf-account-create-update-9f8d4" event={"ID":"cf3515ce-a794-4635-8476-d84e98e393b8","Type":"ContainerDied","Data":"65629aa894ee7068d5ea871d1a1a3d97a8a4dd768f01f9781ead6f9a6b82bfb2"} Jan 20 17:30:52 crc kubenswrapper[4558]: I0120 17:30:52.600636 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-41bf-account-create-update-9f8d4" event={"ID":"cf3515ce-a794-4635-8476-d84e98e393b8","Type":"ContainerStarted","Data":"1d6a4a49c23cda982386459aa37abed8ac475b7370a1216fc91394983f56b7f0"} Jan 20 17:30:53 crc kubenswrapper[4558]: I0120 17:30:53.617942 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"6ef82b34-46f8-49ec-903d-ca23dc3c1772","Type":"ContainerStarted","Data":"71160c848af9886030158c2e0b4bb5723f31572ae93243d81b2efd3fdbeaaca1"} Jan 20 17:30:53 crc kubenswrapper[4558]: I0120 17:30:53.619057 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"6ef82b34-46f8-49ec-903d-ca23dc3c1772","Type":"ContainerStarted","Data":"56a82d3f9d2cfc2b2c9d587a9ce1e44099c72354572719bfe519c5baae4b77a1"} Jan 20 17:30:54 crc kubenswrapper[4558]: I0120 17:30:54.005138 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-db-create-ngdbp" Jan 20 17:30:54 crc kubenswrapper[4558]: I0120 17:30:54.102222 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-db-create-45f8v" Jan 20 17:30:54 crc kubenswrapper[4558]: I0120 17:30:54.107850 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-db-create-ph8wn" Jan 20 17:30:54 crc kubenswrapper[4558]: I0120 17:30:54.111051 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xbrxx\" (UniqueName: \"kubernetes.io/projected/91098276-3b20-48eb-a0c6-0126a50d19af-kube-api-access-xbrxx\") pod \"91098276-3b20-48eb-a0c6-0126a50d19af\" (UID: \"91098276-3b20-48eb-a0c6-0126a50d19af\") " Jan 20 17:30:54 crc kubenswrapper[4558]: I0120 17:30:54.111115 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dzbvl\" (UniqueName: \"kubernetes.io/projected/95b44804-20a9-4636-bd47-551f819b8d53-kube-api-access-dzbvl\") pod \"95b44804-20a9-4636-bd47-551f819b8d53\" (UID: \"95b44804-20a9-4636-bd47-551f819b8d53\") " Jan 20 17:30:54 crc kubenswrapper[4558]: I0120 17:30:54.111438 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/91098276-3b20-48eb-a0c6-0126a50d19af-operator-scripts\") pod \"91098276-3b20-48eb-a0c6-0126a50d19af\" (UID: \"91098276-3b20-48eb-a0c6-0126a50d19af\") " Jan 20 17:30:54 crc kubenswrapper[4558]: I0120 17:30:54.111509 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/95b44804-20a9-4636-bd47-551f819b8d53-operator-scripts\") pod \"95b44804-20a9-4636-bd47-551f819b8d53\" (UID: \"95b44804-20a9-4636-bd47-551f819b8d53\") " Jan 20 17:30:54 crc kubenswrapper[4558]: I0120 17:30:54.112261 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/95b44804-20a9-4636-bd47-551f819b8d53-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "95b44804-20a9-4636-bd47-551f819b8d53" (UID: "95b44804-20a9-4636-bd47-551f819b8d53"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:30:54 crc kubenswrapper[4558]: I0120 17:30:54.112519 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/95b44804-20a9-4636-bd47-551f819b8d53-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:30:54 crc kubenswrapper[4558]: I0120 17:30:54.115743 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/91098276-3b20-48eb-a0c6-0126a50d19af-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "91098276-3b20-48eb-a0c6-0126a50d19af" (UID: "91098276-3b20-48eb-a0c6-0126a50d19af"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:30:54 crc kubenswrapper[4558]: I0120 17:30:54.118152 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/95b44804-20a9-4636-bd47-551f819b8d53-kube-api-access-dzbvl" (OuterVolumeSpecName: "kube-api-access-dzbvl") pod "95b44804-20a9-4636-bd47-551f819b8d53" (UID: "95b44804-20a9-4636-bd47-551f819b8d53"). InnerVolumeSpecName "kube-api-access-dzbvl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:30:54 crc kubenswrapper[4558]: I0120 17:30:54.119131 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/91098276-3b20-48eb-a0c6-0126a50d19af-kube-api-access-xbrxx" (OuterVolumeSpecName: "kube-api-access-xbrxx") pod "91098276-3b20-48eb-a0c6-0126a50d19af" (UID: "91098276-3b20-48eb-a0c6-0126a50d19af"). InnerVolumeSpecName "kube-api-access-xbrxx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:30:54 crc kubenswrapper[4558]: I0120 17:30:54.131121 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-c055-account-create-update-4rstr" Jan 20 17:30:54 crc kubenswrapper[4558]: I0120 17:30:54.146250 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-41bf-account-create-update-9f8d4" Jan 20 17:30:54 crc kubenswrapper[4558]: I0120 17:30:54.151699 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-2fe2-account-create-update-tmfpb" Jan 20 17:30:54 crc kubenswrapper[4558]: I0120 17:30:54.213348 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cf3515ce-a794-4635-8476-d84e98e393b8-operator-scripts\") pod \"cf3515ce-a794-4635-8476-d84e98e393b8\" (UID: \"cf3515ce-a794-4635-8476-d84e98e393b8\") " Jan 20 17:30:54 crc kubenswrapper[4558]: I0120 17:30:54.213555 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ce4270c3-42ac-4529-8dad-a9da8b758d43-operator-scripts\") pod \"ce4270c3-42ac-4529-8dad-a9da8b758d43\" (UID: \"ce4270c3-42ac-4529-8dad-a9da8b758d43\") " Jan 20 17:30:54 crc kubenswrapper[4558]: I0120 17:30:54.213599 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sxrxv\" (UniqueName: \"kubernetes.io/projected/cf3515ce-a794-4635-8476-d84e98e393b8-kube-api-access-sxrxv\") pod \"cf3515ce-a794-4635-8476-d84e98e393b8\" (UID: \"cf3515ce-a794-4635-8476-d84e98e393b8\") " Jan 20 17:30:54 crc kubenswrapper[4558]: I0120 17:30:54.213705 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-652wv\" (UniqueName: \"kubernetes.io/projected/ce4270c3-42ac-4529-8dad-a9da8b758d43-kube-api-access-652wv\") pod \"ce4270c3-42ac-4529-8dad-a9da8b758d43\" (UID: \"ce4270c3-42ac-4529-8dad-a9da8b758d43\") " Jan 20 17:30:54 crc kubenswrapper[4558]: I0120 17:30:54.213808 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-25g44\" (UniqueName: \"kubernetes.io/projected/c58c8dea-46da-43de-801f-68b8700c27e1-kube-api-access-25g44\") pod \"c58c8dea-46da-43de-801f-68b8700c27e1\" (UID: \"c58c8dea-46da-43de-801f-68b8700c27e1\") " Jan 20 17:30:54 crc kubenswrapper[4558]: I0120 17:30:54.213851 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p2qz7\" (UniqueName: \"kubernetes.io/projected/ac18e294-d27c-4012-91e4-16b6b875b781-kube-api-access-p2qz7\") pod \"ac18e294-d27c-4012-91e4-16b6b875b781\" (UID: \"ac18e294-d27c-4012-91e4-16b6b875b781\") " Jan 20 17:30:54 crc kubenswrapper[4558]: I0120 17:30:54.213883 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ac18e294-d27c-4012-91e4-16b6b875b781-operator-scripts\") pod \"ac18e294-d27c-4012-91e4-16b6b875b781\" (UID: \"ac18e294-d27c-4012-91e4-16b6b875b781\") " Jan 20 17:30:54 crc kubenswrapper[4558]: I0120 17:30:54.213963 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c58c8dea-46da-43de-801f-68b8700c27e1-operator-scripts\") pod \"c58c8dea-46da-43de-801f-68b8700c27e1\" (UID: \"c58c8dea-46da-43de-801f-68b8700c27e1\") " Jan 20 17:30:54 crc kubenswrapper[4558]: I0120 17:30:54.214333 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cf3515ce-a794-4635-8476-d84e98e393b8-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "cf3515ce-a794-4635-8476-d84e98e393b8" (UID: "cf3515ce-a794-4635-8476-d84e98e393b8"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:30:54 crc kubenswrapper[4558]: I0120 17:30:54.214424 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xbrxx\" (UniqueName: \"kubernetes.io/projected/91098276-3b20-48eb-a0c6-0126a50d19af-kube-api-access-xbrxx\") on node \"crc\" DevicePath \"\"" Jan 20 17:30:54 crc kubenswrapper[4558]: I0120 17:30:54.214441 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dzbvl\" (UniqueName: \"kubernetes.io/projected/95b44804-20a9-4636-bd47-551f819b8d53-kube-api-access-dzbvl\") on node \"crc\" DevicePath \"\"" Jan 20 17:30:54 crc kubenswrapper[4558]: I0120 17:30:54.214454 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/91098276-3b20-48eb-a0c6-0126a50d19af-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:30:54 crc kubenswrapper[4558]: I0120 17:30:54.214445 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ce4270c3-42ac-4529-8dad-a9da8b758d43-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "ce4270c3-42ac-4529-8dad-a9da8b758d43" (UID: "ce4270c3-42ac-4529-8dad-a9da8b758d43"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:30:54 crc kubenswrapper[4558]: I0120 17:30:54.215308 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ac18e294-d27c-4012-91e4-16b6b875b781-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "ac18e294-d27c-4012-91e4-16b6b875b781" (UID: "ac18e294-d27c-4012-91e4-16b6b875b781"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:30:54 crc kubenswrapper[4558]: I0120 17:30:54.215759 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c58c8dea-46da-43de-801f-68b8700c27e1-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "c58c8dea-46da-43de-801f-68b8700c27e1" (UID: "c58c8dea-46da-43de-801f-68b8700c27e1"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:30:54 crc kubenswrapper[4558]: I0120 17:30:54.220236 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c58c8dea-46da-43de-801f-68b8700c27e1-kube-api-access-25g44" (OuterVolumeSpecName: "kube-api-access-25g44") pod "c58c8dea-46da-43de-801f-68b8700c27e1" (UID: "c58c8dea-46da-43de-801f-68b8700c27e1"). InnerVolumeSpecName "kube-api-access-25g44". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:30:54 crc kubenswrapper[4558]: I0120 17:30:54.220374 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ce4270c3-42ac-4529-8dad-a9da8b758d43-kube-api-access-652wv" (OuterVolumeSpecName: "kube-api-access-652wv") pod "ce4270c3-42ac-4529-8dad-a9da8b758d43" (UID: "ce4270c3-42ac-4529-8dad-a9da8b758d43"). InnerVolumeSpecName "kube-api-access-652wv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:30:54 crc kubenswrapper[4558]: I0120 17:30:54.220638 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ac18e294-d27c-4012-91e4-16b6b875b781-kube-api-access-p2qz7" (OuterVolumeSpecName: "kube-api-access-p2qz7") pod "ac18e294-d27c-4012-91e4-16b6b875b781" (UID: "ac18e294-d27c-4012-91e4-16b6b875b781"). InnerVolumeSpecName "kube-api-access-p2qz7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:30:54 crc kubenswrapper[4558]: I0120 17:30:54.220823 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cf3515ce-a794-4635-8476-d84e98e393b8-kube-api-access-sxrxv" (OuterVolumeSpecName: "kube-api-access-sxrxv") pod "cf3515ce-a794-4635-8476-d84e98e393b8" (UID: "cf3515ce-a794-4635-8476-d84e98e393b8"). InnerVolumeSpecName "kube-api-access-sxrxv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:30:54 crc kubenswrapper[4558]: I0120 17:30:54.316433 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cf3515ce-a794-4635-8476-d84e98e393b8-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:30:54 crc kubenswrapper[4558]: I0120 17:30:54.316470 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ce4270c3-42ac-4529-8dad-a9da8b758d43-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:30:54 crc kubenswrapper[4558]: I0120 17:30:54.316483 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sxrxv\" (UniqueName: \"kubernetes.io/projected/cf3515ce-a794-4635-8476-d84e98e393b8-kube-api-access-sxrxv\") on node \"crc\" DevicePath \"\"" Jan 20 17:30:54 crc kubenswrapper[4558]: I0120 17:30:54.316499 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-652wv\" (UniqueName: \"kubernetes.io/projected/ce4270c3-42ac-4529-8dad-a9da8b758d43-kube-api-access-652wv\") on node \"crc\" DevicePath \"\"" Jan 20 17:30:54 crc kubenswrapper[4558]: I0120 17:30:54.316511 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-25g44\" (UniqueName: \"kubernetes.io/projected/c58c8dea-46da-43de-801f-68b8700c27e1-kube-api-access-25g44\") on node \"crc\" DevicePath \"\"" Jan 20 17:30:54 crc kubenswrapper[4558]: I0120 17:30:54.316521 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p2qz7\" (UniqueName: \"kubernetes.io/projected/ac18e294-d27c-4012-91e4-16b6b875b781-kube-api-access-p2qz7\") on node \"crc\" DevicePath \"\"" Jan 20 17:30:54 crc kubenswrapper[4558]: I0120 17:30:54.316532 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ac18e294-d27c-4012-91e4-16b6b875b781-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:30:54 crc kubenswrapper[4558]: I0120 17:30:54.316541 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c58c8dea-46da-43de-801f-68b8700c27e1-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:30:54 crc kubenswrapper[4558]: I0120 17:30:54.648229 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-2fe2-account-create-update-tmfpb" event={"ID":"ac18e294-d27c-4012-91e4-16b6b875b781","Type":"ContainerDied","Data":"356cb0f91573c02ed25a905f41adc1a94a333e2d9cf55a3d41350d6e809a65b7"} Jan 20 17:30:54 crc kubenswrapper[4558]: I0120 17:30:54.648508 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="356cb0f91573c02ed25a905f41adc1a94a333e2d9cf55a3d41350d6e809a65b7" Jan 20 17:30:54 crc kubenswrapper[4558]: I0120 17:30:54.648566 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-2fe2-account-create-update-tmfpb" Jan 20 17:30:54 crc kubenswrapper[4558]: I0120 17:30:54.650872 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-db-create-ph8wn" event={"ID":"ce4270c3-42ac-4529-8dad-a9da8b758d43","Type":"ContainerDied","Data":"1851f87d7773a5bb45b43e968f2eaa0ad955742afec6106a9af34b91d68498dd"} Jan 20 17:30:54 crc kubenswrapper[4558]: I0120 17:30:54.650911 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1851f87d7773a5bb45b43e968f2eaa0ad955742afec6106a9af34b91d68498dd" Jan 20 17:30:54 crc kubenswrapper[4558]: I0120 17:30:54.650970 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-db-create-ph8wn" Jan 20 17:30:54 crc kubenswrapper[4558]: I0120 17:30:54.654398 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-41bf-account-create-update-9f8d4" event={"ID":"cf3515ce-a794-4635-8476-d84e98e393b8","Type":"ContainerDied","Data":"1d6a4a49c23cda982386459aa37abed8ac475b7370a1216fc91394983f56b7f0"} Jan 20 17:30:54 crc kubenswrapper[4558]: I0120 17:30:54.654436 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1d6a4a49c23cda982386459aa37abed8ac475b7370a1216fc91394983f56b7f0" Jan 20 17:30:54 crc kubenswrapper[4558]: I0120 17:30:54.654745 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-41bf-account-create-update-9f8d4" Jan 20 17:30:54 crc kubenswrapper[4558]: I0120 17:30:54.658489 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-c055-account-create-update-4rstr" event={"ID":"c58c8dea-46da-43de-801f-68b8700c27e1","Type":"ContainerDied","Data":"c0ce187053eb66e28defb351b24b7c9eb2c559e9c2e127bdf8e4c40cdb1744ac"} Jan 20 17:30:54 crc kubenswrapper[4558]: I0120 17:30:54.658519 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c0ce187053eb66e28defb351b24b7c9eb2c559e9c2e127bdf8e4c40cdb1744ac" Jan 20 17:30:54 crc kubenswrapper[4558]: I0120 17:30:54.658562 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-c055-account-create-update-4rstr" Jan 20 17:30:54 crc kubenswrapper[4558]: I0120 17:30:54.661190 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-db-create-ngdbp" Jan 20 17:30:54 crc kubenswrapper[4558]: I0120 17:30:54.661274 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-db-create-ngdbp" event={"ID":"91098276-3b20-48eb-a0c6-0126a50d19af","Type":"ContainerDied","Data":"5d3c398635199ae48a32c6a4cd5103e369ea6ffac19bef6a17807f651356c209"} Jan 20 17:30:54 crc kubenswrapper[4558]: I0120 17:30:54.661454 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5d3c398635199ae48a32c6a4cd5103e369ea6ffac19bef6a17807f651356c209" Jan 20 17:30:54 crc kubenswrapper[4558]: I0120 17:30:54.664585 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-db-create-45f8v" event={"ID":"95b44804-20a9-4636-bd47-551f819b8d53","Type":"ContainerDied","Data":"8d421f8c83333c296e55c7f53a9acc5a7bb6cce193c35e53b968e519ae0e06f5"} Jan 20 17:30:54 crc kubenswrapper[4558]: I0120 17:30:54.664639 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8d421f8c83333c296e55c7f53a9acc5a7bb6cce193c35e53b968e519ae0e06f5" Jan 20 17:30:54 crc kubenswrapper[4558]: I0120 17:30:54.664710 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-db-create-45f8v" Jan 20 17:30:54 crc kubenswrapper[4558]: I0120 17:30:54.667965 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"6ef82b34-46f8-49ec-903d-ca23dc3c1772","Type":"ContainerStarted","Data":"a588daffe5917b9bbbff4a26aff1bb4ec87d7cda8c14af7d280bf9688ee2f19f"} Jan 20 17:30:55 crc kubenswrapper[4558]: I0120 17:30:55.687518 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"6ef82b34-46f8-49ec-903d-ca23dc3c1772","Type":"ContainerStarted","Data":"27d73cc55a230c02cd4a68812a64ff2aec734681b12d1528600971812141a10f"} Jan 20 17:30:55 crc kubenswrapper[4558]: I0120 17:30:55.687867 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:30:55 crc kubenswrapper[4558]: I0120 17:30:55.717684 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ceilometer-0" podStartSLOduration=2.058252871 podStartE2EDuration="5.717671348s" podCreationTimestamp="2026-01-20 17:30:50 +0000 UTC" firstStartedPulling="2026-01-20 17:30:51.813888037 +0000 UTC m=+2945.574225994" lastFinishedPulling="2026-01-20 17:30:55.473306504 +0000 UTC m=+2949.233644471" observedRunningTime="2026-01-20 17:30:55.70263793 +0000 UTC m=+2949.462975897" watchObservedRunningTime="2026-01-20 17:30:55.717671348 +0000 UTC m=+2949.478009315" Jan 20 17:30:55 crc kubenswrapper[4558]: I0120 17:30:55.964969 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-db-sync-qwwkb"] Jan 20 17:30:55 crc kubenswrapper[4558]: E0120 17:30:55.965396 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c58c8dea-46da-43de-801f-68b8700c27e1" containerName="mariadb-account-create-update" Jan 20 17:30:55 crc kubenswrapper[4558]: I0120 17:30:55.965415 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="c58c8dea-46da-43de-801f-68b8700c27e1" containerName="mariadb-account-create-update" Jan 20 17:30:55 crc kubenswrapper[4558]: E0120 17:30:55.965429 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="91098276-3b20-48eb-a0c6-0126a50d19af" containerName="mariadb-database-create" Jan 20 17:30:55 crc kubenswrapper[4558]: I0120 17:30:55.965435 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="91098276-3b20-48eb-a0c6-0126a50d19af" containerName="mariadb-database-create" Jan 20 17:30:55 crc kubenswrapper[4558]: E0120 17:30:55.965454 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ac18e294-d27c-4012-91e4-16b6b875b781" containerName="mariadb-account-create-update" Jan 20 17:30:55 crc kubenswrapper[4558]: I0120 17:30:55.965459 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ac18e294-d27c-4012-91e4-16b6b875b781" containerName="mariadb-account-create-update" Jan 20 17:30:55 crc kubenswrapper[4558]: E0120 17:30:55.965466 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="95b44804-20a9-4636-bd47-551f819b8d53" containerName="mariadb-database-create" Jan 20 17:30:55 crc kubenswrapper[4558]: I0120 17:30:55.965472 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="95b44804-20a9-4636-bd47-551f819b8d53" containerName="mariadb-database-create" Jan 20 17:30:55 crc kubenswrapper[4558]: E0120 17:30:55.965484 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf3515ce-a794-4635-8476-d84e98e393b8" containerName="mariadb-account-create-update" Jan 20 17:30:55 crc kubenswrapper[4558]: I0120 17:30:55.965492 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf3515ce-a794-4635-8476-d84e98e393b8" containerName="mariadb-account-create-update" Jan 20 17:30:55 crc kubenswrapper[4558]: E0120 17:30:55.965510 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ce4270c3-42ac-4529-8dad-a9da8b758d43" containerName="mariadb-database-create" Jan 20 17:30:55 crc kubenswrapper[4558]: I0120 17:30:55.965517 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ce4270c3-42ac-4529-8dad-a9da8b758d43" containerName="mariadb-database-create" Jan 20 17:30:55 crc kubenswrapper[4558]: I0120 17:30:55.965693 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="ac18e294-d27c-4012-91e4-16b6b875b781" containerName="mariadb-account-create-update" Jan 20 17:30:55 crc kubenswrapper[4558]: I0120 17:30:55.965713 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="91098276-3b20-48eb-a0c6-0126a50d19af" containerName="mariadb-database-create" Jan 20 17:30:55 crc kubenswrapper[4558]: I0120 17:30:55.965721 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="c58c8dea-46da-43de-801f-68b8700c27e1" containerName="mariadb-account-create-update" Jan 20 17:30:55 crc kubenswrapper[4558]: I0120 17:30:55.965733 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="cf3515ce-a794-4635-8476-d84e98e393b8" containerName="mariadb-account-create-update" Jan 20 17:30:55 crc kubenswrapper[4558]: I0120 17:30:55.965743 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="ce4270c3-42ac-4529-8dad-a9da8b758d43" containerName="mariadb-database-create" Jan 20 17:30:55 crc kubenswrapper[4558]: I0120 17:30:55.965755 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="95b44804-20a9-4636-bd47-551f819b8d53" containerName="mariadb-database-create" Jan 20 17:30:55 crc kubenswrapper[4558]: I0120 17:30:55.966403 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-qwwkb" Jan 20 17:30:55 crc kubenswrapper[4558]: I0120 17:30:55.968761 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell0-conductor-scripts" Jan 20 17:30:55 crc kubenswrapper[4558]: I0120 17:30:55.969088 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-nova-dockercfg-24bfk" Jan 20 17:30:55 crc kubenswrapper[4558]: I0120 17:30:55.969386 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell0-conductor-config-data" Jan 20 17:30:55 crc kubenswrapper[4558]: I0120 17:30:55.973935 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-db-sync-qwwkb"] Jan 20 17:30:56 crc kubenswrapper[4558]: I0120 17:30:56.053174 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lhb8r\" (UniqueName: \"kubernetes.io/projected/8d3adcfe-4948-4e18-910b-bb3380e977a0-kube-api-access-lhb8r\") pod \"nova-cell0-conductor-db-sync-qwwkb\" (UID: \"8d3adcfe-4948-4e18-910b-bb3380e977a0\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-qwwkb" Jan 20 17:30:56 crc kubenswrapper[4558]: I0120 17:30:56.053308 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d3adcfe-4948-4e18-910b-bb3380e977a0-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-qwwkb\" (UID: \"8d3adcfe-4948-4e18-910b-bb3380e977a0\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-qwwkb" Jan 20 17:30:56 crc kubenswrapper[4558]: I0120 17:30:56.053505 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8d3adcfe-4948-4e18-910b-bb3380e977a0-config-data\") pod \"nova-cell0-conductor-db-sync-qwwkb\" (UID: \"8d3adcfe-4948-4e18-910b-bb3380e977a0\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-qwwkb" Jan 20 17:30:56 crc kubenswrapper[4558]: I0120 17:30:56.053545 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8d3adcfe-4948-4e18-910b-bb3380e977a0-scripts\") pod \"nova-cell0-conductor-db-sync-qwwkb\" (UID: \"8d3adcfe-4948-4e18-910b-bb3380e977a0\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-qwwkb" Jan 20 17:30:56 crc kubenswrapper[4558]: I0120 17:30:56.155660 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8d3adcfe-4948-4e18-910b-bb3380e977a0-scripts\") pod \"nova-cell0-conductor-db-sync-qwwkb\" (UID: \"8d3adcfe-4948-4e18-910b-bb3380e977a0\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-qwwkb" Jan 20 17:30:56 crc kubenswrapper[4558]: I0120 17:30:56.155773 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lhb8r\" (UniqueName: \"kubernetes.io/projected/8d3adcfe-4948-4e18-910b-bb3380e977a0-kube-api-access-lhb8r\") pod \"nova-cell0-conductor-db-sync-qwwkb\" (UID: \"8d3adcfe-4948-4e18-910b-bb3380e977a0\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-qwwkb" Jan 20 17:30:56 crc kubenswrapper[4558]: I0120 17:30:56.155851 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d3adcfe-4948-4e18-910b-bb3380e977a0-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-qwwkb\" (UID: \"8d3adcfe-4948-4e18-910b-bb3380e977a0\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-qwwkb" Jan 20 17:30:56 crc kubenswrapper[4558]: I0120 17:30:56.155940 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8d3adcfe-4948-4e18-910b-bb3380e977a0-config-data\") pod \"nova-cell0-conductor-db-sync-qwwkb\" (UID: \"8d3adcfe-4948-4e18-910b-bb3380e977a0\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-qwwkb" Jan 20 17:30:56 crc kubenswrapper[4558]: I0120 17:30:56.160949 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d3adcfe-4948-4e18-910b-bb3380e977a0-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-qwwkb\" (UID: \"8d3adcfe-4948-4e18-910b-bb3380e977a0\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-qwwkb" Jan 20 17:30:56 crc kubenswrapper[4558]: I0120 17:30:56.161065 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8d3adcfe-4948-4e18-910b-bb3380e977a0-config-data\") pod \"nova-cell0-conductor-db-sync-qwwkb\" (UID: \"8d3adcfe-4948-4e18-910b-bb3380e977a0\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-qwwkb" Jan 20 17:30:56 crc kubenswrapper[4558]: I0120 17:30:56.161304 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8d3adcfe-4948-4e18-910b-bb3380e977a0-scripts\") pod \"nova-cell0-conductor-db-sync-qwwkb\" (UID: \"8d3adcfe-4948-4e18-910b-bb3380e977a0\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-qwwkb" Jan 20 17:30:56 crc kubenswrapper[4558]: I0120 17:30:56.170931 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lhb8r\" (UniqueName: \"kubernetes.io/projected/8d3adcfe-4948-4e18-910b-bb3380e977a0-kube-api-access-lhb8r\") pod \"nova-cell0-conductor-db-sync-qwwkb\" (UID: \"8d3adcfe-4948-4e18-910b-bb3380e977a0\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-qwwkb" Jan 20 17:30:56 crc kubenswrapper[4558]: I0120 17:30:56.281111 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-qwwkb" Jan 20 17:30:56 crc kubenswrapper[4558]: I0120 17:30:56.741447 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-db-sync-qwwkb"] Jan 20 17:30:57 crc kubenswrapper[4558]: I0120 17:30:57.654244 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/swift-proxy-6b8d9c59b6-z8fqn" Jan 20 17:30:57 crc kubenswrapper[4558]: I0120 17:30:57.656914 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/swift-proxy-6b8d9c59b6-z8fqn" Jan 20 17:30:57 crc kubenswrapper[4558]: I0120 17:30:57.731206 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-qwwkb" event={"ID":"8d3adcfe-4948-4e18-910b-bb3380e977a0","Type":"ContainerStarted","Data":"5bd5fedc19895ba71e1b6d3077a41a79fa078729e40ca74ac886956eafb22003"} Jan 20 17:30:57 crc kubenswrapper[4558]: I0120 17:30:57.731279 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-qwwkb" event={"ID":"8d3adcfe-4948-4e18-910b-bb3380e977a0","Type":"ContainerStarted","Data":"86b127d3fcd23311598f95c16f4ebdd1257576cbcb6c88cebb1fb1a602c10728"} Jan 20 17:30:57 crc kubenswrapper[4558]: I0120 17:30:57.748500 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-qwwkb" podStartSLOduration=2.748486745 podStartE2EDuration="2.748486745s" podCreationTimestamp="2026-01-20 17:30:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:30:57.748224311 +0000 UTC m=+2951.508562279" watchObservedRunningTime="2026-01-20 17:30:57.748486745 +0000 UTC m=+2951.508824712" Jan 20 17:30:58 crc kubenswrapper[4558]: I0120 17:30:58.314347 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:30:58 crc kubenswrapper[4558]: I0120 17:30:58.314595 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="6ef82b34-46f8-49ec-903d-ca23dc3c1772" containerName="ceilometer-central-agent" containerID="cri-o://56a82d3f9d2cfc2b2c9d587a9ce1e44099c72354572719bfe519c5baae4b77a1" gracePeriod=30 Jan 20 17:30:58 crc kubenswrapper[4558]: I0120 17:30:58.315027 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="6ef82b34-46f8-49ec-903d-ca23dc3c1772" containerName="proxy-httpd" containerID="cri-o://27d73cc55a230c02cd4a68812a64ff2aec734681b12d1528600971812141a10f" gracePeriod=30 Jan 20 17:30:58 crc kubenswrapper[4558]: I0120 17:30:58.315098 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="6ef82b34-46f8-49ec-903d-ca23dc3c1772" containerName="sg-core" containerID="cri-o://a588daffe5917b9bbbff4a26aff1bb4ec87d7cda8c14af7d280bf9688ee2f19f" gracePeriod=30 Jan 20 17:30:58 crc kubenswrapper[4558]: I0120 17:30:58.315142 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="6ef82b34-46f8-49ec-903d-ca23dc3c1772" containerName="ceilometer-notification-agent" containerID="cri-o://71160c848af9886030158c2e0b4bb5723f31572ae93243d81b2efd3fdbeaaca1" gracePeriod=30 Jan 20 17:30:58 crc kubenswrapper[4558]: I0120 17:30:58.753923 4558 generic.go:334] "Generic (PLEG): container finished" podID="6ef82b34-46f8-49ec-903d-ca23dc3c1772" containerID="27d73cc55a230c02cd4a68812a64ff2aec734681b12d1528600971812141a10f" exitCode=0 Jan 20 17:30:58 crc kubenswrapper[4558]: I0120 17:30:58.753962 4558 generic.go:334] "Generic (PLEG): container finished" podID="6ef82b34-46f8-49ec-903d-ca23dc3c1772" containerID="a588daffe5917b9bbbff4a26aff1bb4ec87d7cda8c14af7d280bf9688ee2f19f" exitCode=2 Jan 20 17:30:58 crc kubenswrapper[4558]: I0120 17:30:58.753972 4558 generic.go:334] "Generic (PLEG): container finished" podID="6ef82b34-46f8-49ec-903d-ca23dc3c1772" containerID="71160c848af9886030158c2e0b4bb5723f31572ae93243d81b2efd3fdbeaaca1" exitCode=0 Jan 20 17:30:58 crc kubenswrapper[4558]: I0120 17:30:58.753979 4558 generic.go:334] "Generic (PLEG): container finished" podID="6ef82b34-46f8-49ec-903d-ca23dc3c1772" containerID="56a82d3f9d2cfc2b2c9d587a9ce1e44099c72354572719bfe519c5baae4b77a1" exitCode=0 Jan 20 17:30:58 crc kubenswrapper[4558]: I0120 17:30:58.753965 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"6ef82b34-46f8-49ec-903d-ca23dc3c1772","Type":"ContainerDied","Data":"27d73cc55a230c02cd4a68812a64ff2aec734681b12d1528600971812141a10f"} Jan 20 17:30:58 crc kubenswrapper[4558]: I0120 17:30:58.754030 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"6ef82b34-46f8-49ec-903d-ca23dc3c1772","Type":"ContainerDied","Data":"a588daffe5917b9bbbff4a26aff1bb4ec87d7cda8c14af7d280bf9688ee2f19f"} Jan 20 17:30:58 crc kubenswrapper[4558]: I0120 17:30:58.754044 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"6ef82b34-46f8-49ec-903d-ca23dc3c1772","Type":"ContainerDied","Data":"71160c848af9886030158c2e0b4bb5723f31572ae93243d81b2efd3fdbeaaca1"} Jan 20 17:30:58 crc kubenswrapper[4558]: I0120 17:30:58.754056 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"6ef82b34-46f8-49ec-903d-ca23dc3c1772","Type":"ContainerDied","Data":"56a82d3f9d2cfc2b2c9d587a9ce1e44099c72354572719bfe519c5baae4b77a1"} Jan 20 17:30:58 crc kubenswrapper[4558]: I0120 17:30:58.996122 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:30:59 crc kubenswrapper[4558]: I0120 17:30:59.041505 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6ef82b34-46f8-49ec-903d-ca23dc3c1772-config-data\") pod \"6ef82b34-46f8-49ec-903d-ca23dc3c1772\" (UID: \"6ef82b34-46f8-49ec-903d-ca23dc3c1772\") " Jan 20 17:30:59 crc kubenswrapper[4558]: I0120 17:30:59.041582 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/6ef82b34-46f8-49ec-903d-ca23dc3c1772-sg-core-conf-yaml\") pod \"6ef82b34-46f8-49ec-903d-ca23dc3c1772\" (UID: \"6ef82b34-46f8-49ec-903d-ca23dc3c1772\") " Jan 20 17:30:59 crc kubenswrapper[4558]: I0120 17:30:59.041692 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ncz9t\" (UniqueName: \"kubernetes.io/projected/6ef82b34-46f8-49ec-903d-ca23dc3c1772-kube-api-access-ncz9t\") pod \"6ef82b34-46f8-49ec-903d-ca23dc3c1772\" (UID: \"6ef82b34-46f8-49ec-903d-ca23dc3c1772\") " Jan 20 17:30:59 crc kubenswrapper[4558]: I0120 17:30:59.041840 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6ef82b34-46f8-49ec-903d-ca23dc3c1772-run-httpd\") pod \"6ef82b34-46f8-49ec-903d-ca23dc3c1772\" (UID: \"6ef82b34-46f8-49ec-903d-ca23dc3c1772\") " Jan 20 17:30:59 crc kubenswrapper[4558]: I0120 17:30:59.041872 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6ef82b34-46f8-49ec-903d-ca23dc3c1772-scripts\") pod \"6ef82b34-46f8-49ec-903d-ca23dc3c1772\" (UID: \"6ef82b34-46f8-49ec-903d-ca23dc3c1772\") " Jan 20 17:30:59 crc kubenswrapper[4558]: I0120 17:30:59.041913 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ef82b34-46f8-49ec-903d-ca23dc3c1772-combined-ca-bundle\") pod \"6ef82b34-46f8-49ec-903d-ca23dc3c1772\" (UID: \"6ef82b34-46f8-49ec-903d-ca23dc3c1772\") " Jan 20 17:30:59 crc kubenswrapper[4558]: I0120 17:30:59.041973 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6ef82b34-46f8-49ec-903d-ca23dc3c1772-log-httpd\") pod \"6ef82b34-46f8-49ec-903d-ca23dc3c1772\" (UID: \"6ef82b34-46f8-49ec-903d-ca23dc3c1772\") " Jan 20 17:30:59 crc kubenswrapper[4558]: I0120 17:30:59.042808 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6ef82b34-46f8-49ec-903d-ca23dc3c1772-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "6ef82b34-46f8-49ec-903d-ca23dc3c1772" (UID: "6ef82b34-46f8-49ec-903d-ca23dc3c1772"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:30:59 crc kubenswrapper[4558]: I0120 17:30:59.042943 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6ef82b34-46f8-49ec-903d-ca23dc3c1772-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "6ef82b34-46f8-49ec-903d-ca23dc3c1772" (UID: "6ef82b34-46f8-49ec-903d-ca23dc3c1772"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:30:59 crc kubenswrapper[4558]: I0120 17:30:59.054413 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ef82b34-46f8-49ec-903d-ca23dc3c1772-scripts" (OuterVolumeSpecName: "scripts") pod "6ef82b34-46f8-49ec-903d-ca23dc3c1772" (UID: "6ef82b34-46f8-49ec-903d-ca23dc3c1772"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:30:59 crc kubenswrapper[4558]: I0120 17:30:59.054578 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ef82b34-46f8-49ec-903d-ca23dc3c1772-kube-api-access-ncz9t" (OuterVolumeSpecName: "kube-api-access-ncz9t") pod "6ef82b34-46f8-49ec-903d-ca23dc3c1772" (UID: "6ef82b34-46f8-49ec-903d-ca23dc3c1772"). InnerVolumeSpecName "kube-api-access-ncz9t". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:30:59 crc kubenswrapper[4558]: I0120 17:30:59.073109 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ef82b34-46f8-49ec-903d-ca23dc3c1772-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "6ef82b34-46f8-49ec-903d-ca23dc3c1772" (UID: "6ef82b34-46f8-49ec-903d-ca23dc3c1772"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:30:59 crc kubenswrapper[4558]: I0120 17:30:59.113325 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ef82b34-46f8-49ec-903d-ca23dc3c1772-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6ef82b34-46f8-49ec-903d-ca23dc3c1772" (UID: "6ef82b34-46f8-49ec-903d-ca23dc3c1772"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:30:59 crc kubenswrapper[4558]: I0120 17:30:59.124018 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ef82b34-46f8-49ec-903d-ca23dc3c1772-config-data" (OuterVolumeSpecName: "config-data") pod "6ef82b34-46f8-49ec-903d-ca23dc3c1772" (UID: "6ef82b34-46f8-49ec-903d-ca23dc3c1772"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:30:59 crc kubenswrapper[4558]: I0120 17:30:59.144945 4558 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6ef82b34-46f8-49ec-903d-ca23dc3c1772-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:30:59 crc kubenswrapper[4558]: I0120 17:30:59.144974 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6ef82b34-46f8-49ec-903d-ca23dc3c1772-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:30:59 crc kubenswrapper[4558]: I0120 17:30:59.144984 4558 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/6ef82b34-46f8-49ec-903d-ca23dc3c1772-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 20 17:30:59 crc kubenswrapper[4558]: I0120 17:30:59.145000 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ncz9t\" (UniqueName: \"kubernetes.io/projected/6ef82b34-46f8-49ec-903d-ca23dc3c1772-kube-api-access-ncz9t\") on node \"crc\" DevicePath \"\"" Jan 20 17:30:59 crc kubenswrapper[4558]: I0120 17:30:59.145010 4558 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6ef82b34-46f8-49ec-903d-ca23dc3c1772-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:30:59 crc kubenswrapper[4558]: I0120 17:30:59.145020 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6ef82b34-46f8-49ec-903d-ca23dc3c1772-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:30:59 crc kubenswrapper[4558]: I0120 17:30:59.145030 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ef82b34-46f8-49ec-903d-ca23dc3c1772-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:30:59 crc kubenswrapper[4558]: I0120 17:30:59.766837 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"6ef82b34-46f8-49ec-903d-ca23dc3c1772","Type":"ContainerDied","Data":"7cc350e8d3762f24a182153c3042cd065a4a35978f3917cac9b128f8c8865844"} Jan 20 17:30:59 crc kubenswrapper[4558]: I0120 17:30:59.767288 4558 scope.go:117] "RemoveContainer" containerID="27d73cc55a230c02cd4a68812a64ff2aec734681b12d1528600971812141a10f" Jan 20 17:30:59 crc kubenswrapper[4558]: I0120 17:30:59.767464 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:30:59 crc kubenswrapper[4558]: I0120 17:30:59.810012 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:30:59 crc kubenswrapper[4558]: I0120 17:30:59.815846 4558 scope.go:117] "RemoveContainer" containerID="a588daffe5917b9bbbff4a26aff1bb4ec87d7cda8c14af7d280bf9688ee2f19f" Jan 20 17:30:59 crc kubenswrapper[4558]: I0120 17:30:59.818687 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:30:59 crc kubenswrapper[4558]: I0120 17:30:59.829808 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:30:59 crc kubenswrapper[4558]: E0120 17:30:59.830288 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6ef82b34-46f8-49ec-903d-ca23dc3c1772" containerName="proxy-httpd" Jan 20 17:30:59 crc kubenswrapper[4558]: I0120 17:30:59.830309 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="6ef82b34-46f8-49ec-903d-ca23dc3c1772" containerName="proxy-httpd" Jan 20 17:30:59 crc kubenswrapper[4558]: E0120 17:30:59.830341 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6ef82b34-46f8-49ec-903d-ca23dc3c1772" containerName="sg-core" Jan 20 17:30:59 crc kubenswrapper[4558]: I0120 17:30:59.830350 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="6ef82b34-46f8-49ec-903d-ca23dc3c1772" containerName="sg-core" Jan 20 17:30:59 crc kubenswrapper[4558]: E0120 17:30:59.830364 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6ef82b34-46f8-49ec-903d-ca23dc3c1772" containerName="ceilometer-notification-agent" Jan 20 17:30:59 crc kubenswrapper[4558]: I0120 17:30:59.830372 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="6ef82b34-46f8-49ec-903d-ca23dc3c1772" containerName="ceilometer-notification-agent" Jan 20 17:30:59 crc kubenswrapper[4558]: E0120 17:30:59.830396 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6ef82b34-46f8-49ec-903d-ca23dc3c1772" containerName="ceilometer-central-agent" Jan 20 17:30:59 crc kubenswrapper[4558]: I0120 17:30:59.830403 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="6ef82b34-46f8-49ec-903d-ca23dc3c1772" containerName="ceilometer-central-agent" Jan 20 17:30:59 crc kubenswrapper[4558]: I0120 17:30:59.830583 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="6ef82b34-46f8-49ec-903d-ca23dc3c1772" containerName="proxy-httpd" Jan 20 17:30:59 crc kubenswrapper[4558]: I0120 17:30:59.830604 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="6ef82b34-46f8-49ec-903d-ca23dc3c1772" containerName="ceilometer-notification-agent" Jan 20 17:30:59 crc kubenswrapper[4558]: I0120 17:30:59.830611 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="6ef82b34-46f8-49ec-903d-ca23dc3c1772" containerName="ceilometer-central-agent" Jan 20 17:30:59 crc kubenswrapper[4558]: I0120 17:30:59.830632 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="6ef82b34-46f8-49ec-903d-ca23dc3c1772" containerName="sg-core" Jan 20 17:30:59 crc kubenswrapper[4558]: I0120 17:30:59.832109 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:30:59 crc kubenswrapper[4558]: I0120 17:30:59.833605 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-config-data" Jan 20 17:30:59 crc kubenswrapper[4558]: I0120 17:30:59.834432 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-scripts" Jan 20 17:30:59 crc kubenswrapper[4558]: I0120 17:30:59.836718 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:30:59 crc kubenswrapper[4558]: I0120 17:30:59.847304 4558 scope.go:117] "RemoveContainer" containerID="71160c848af9886030158c2e0b4bb5723f31572ae93243d81b2efd3fdbeaaca1" Jan 20 17:30:59 crc kubenswrapper[4558]: I0120 17:30:59.871734 4558 scope.go:117] "RemoveContainer" containerID="56a82d3f9d2cfc2b2c9d587a9ce1e44099c72354572719bfe519c5baae4b77a1" Jan 20 17:30:59 crc kubenswrapper[4558]: I0120 17:30:59.960498 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5d6a1a3c-9291-4b95-89ed-69c7dfcb8e00-log-httpd\") pod \"ceilometer-0\" (UID: \"5d6a1a3c-9291-4b95-89ed-69c7dfcb8e00\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:30:59 crc kubenswrapper[4558]: I0120 17:30:59.960584 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vr56v\" (UniqueName: \"kubernetes.io/projected/5d6a1a3c-9291-4b95-89ed-69c7dfcb8e00-kube-api-access-vr56v\") pod \"ceilometer-0\" (UID: \"5d6a1a3c-9291-4b95-89ed-69c7dfcb8e00\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:30:59 crc kubenswrapper[4558]: I0120 17:30:59.960612 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5d6a1a3c-9291-4b95-89ed-69c7dfcb8e00-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"5d6a1a3c-9291-4b95-89ed-69c7dfcb8e00\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:30:59 crc kubenswrapper[4558]: I0120 17:30:59.960636 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5d6a1a3c-9291-4b95-89ed-69c7dfcb8e00-run-httpd\") pod \"ceilometer-0\" (UID: \"5d6a1a3c-9291-4b95-89ed-69c7dfcb8e00\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:30:59 crc kubenswrapper[4558]: I0120 17:30:59.960831 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5d6a1a3c-9291-4b95-89ed-69c7dfcb8e00-config-data\") pod \"ceilometer-0\" (UID: \"5d6a1a3c-9291-4b95-89ed-69c7dfcb8e00\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:30:59 crc kubenswrapper[4558]: I0120 17:30:59.960912 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5d6a1a3c-9291-4b95-89ed-69c7dfcb8e00-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"5d6a1a3c-9291-4b95-89ed-69c7dfcb8e00\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:30:59 crc kubenswrapper[4558]: I0120 17:30:59.960994 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5d6a1a3c-9291-4b95-89ed-69c7dfcb8e00-scripts\") pod \"ceilometer-0\" (UID: \"5d6a1a3c-9291-4b95-89ed-69c7dfcb8e00\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:31:00 crc kubenswrapper[4558]: I0120 17:31:00.062608 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vr56v\" (UniqueName: \"kubernetes.io/projected/5d6a1a3c-9291-4b95-89ed-69c7dfcb8e00-kube-api-access-vr56v\") pod \"ceilometer-0\" (UID: \"5d6a1a3c-9291-4b95-89ed-69c7dfcb8e00\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:31:00 crc kubenswrapper[4558]: I0120 17:31:00.062659 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5d6a1a3c-9291-4b95-89ed-69c7dfcb8e00-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"5d6a1a3c-9291-4b95-89ed-69c7dfcb8e00\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:31:00 crc kubenswrapper[4558]: I0120 17:31:00.062689 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5d6a1a3c-9291-4b95-89ed-69c7dfcb8e00-run-httpd\") pod \"ceilometer-0\" (UID: \"5d6a1a3c-9291-4b95-89ed-69c7dfcb8e00\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:31:00 crc kubenswrapper[4558]: I0120 17:31:00.062772 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5d6a1a3c-9291-4b95-89ed-69c7dfcb8e00-config-data\") pod \"ceilometer-0\" (UID: \"5d6a1a3c-9291-4b95-89ed-69c7dfcb8e00\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:31:00 crc kubenswrapper[4558]: I0120 17:31:00.062818 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5d6a1a3c-9291-4b95-89ed-69c7dfcb8e00-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"5d6a1a3c-9291-4b95-89ed-69c7dfcb8e00\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:31:00 crc kubenswrapper[4558]: I0120 17:31:00.062859 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5d6a1a3c-9291-4b95-89ed-69c7dfcb8e00-scripts\") pod \"ceilometer-0\" (UID: \"5d6a1a3c-9291-4b95-89ed-69c7dfcb8e00\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:31:00 crc kubenswrapper[4558]: I0120 17:31:00.062918 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5d6a1a3c-9291-4b95-89ed-69c7dfcb8e00-log-httpd\") pod \"ceilometer-0\" (UID: \"5d6a1a3c-9291-4b95-89ed-69c7dfcb8e00\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:31:00 crc kubenswrapper[4558]: I0120 17:31:00.063258 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5d6a1a3c-9291-4b95-89ed-69c7dfcb8e00-run-httpd\") pod \"ceilometer-0\" (UID: \"5d6a1a3c-9291-4b95-89ed-69c7dfcb8e00\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:31:00 crc kubenswrapper[4558]: I0120 17:31:00.063346 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5d6a1a3c-9291-4b95-89ed-69c7dfcb8e00-log-httpd\") pod \"ceilometer-0\" (UID: \"5d6a1a3c-9291-4b95-89ed-69c7dfcb8e00\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:31:00 crc kubenswrapper[4558]: I0120 17:31:00.068539 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5d6a1a3c-9291-4b95-89ed-69c7dfcb8e00-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"5d6a1a3c-9291-4b95-89ed-69c7dfcb8e00\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:31:00 crc kubenswrapper[4558]: I0120 17:31:00.069419 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5d6a1a3c-9291-4b95-89ed-69c7dfcb8e00-config-data\") pod \"ceilometer-0\" (UID: \"5d6a1a3c-9291-4b95-89ed-69c7dfcb8e00\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:31:00 crc kubenswrapper[4558]: I0120 17:31:00.075086 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5d6a1a3c-9291-4b95-89ed-69c7dfcb8e00-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"5d6a1a3c-9291-4b95-89ed-69c7dfcb8e00\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:31:00 crc kubenswrapper[4558]: I0120 17:31:00.077919 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5d6a1a3c-9291-4b95-89ed-69c7dfcb8e00-scripts\") pod \"ceilometer-0\" (UID: \"5d6a1a3c-9291-4b95-89ed-69c7dfcb8e00\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:31:00 crc kubenswrapper[4558]: I0120 17:31:00.078839 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vr56v\" (UniqueName: \"kubernetes.io/projected/5d6a1a3c-9291-4b95-89ed-69c7dfcb8e00-kube-api-access-vr56v\") pod \"ceilometer-0\" (UID: \"5d6a1a3c-9291-4b95-89ed-69c7dfcb8e00\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:31:00 crc kubenswrapper[4558]: I0120 17:31:00.152412 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:31:00 crc kubenswrapper[4558]: I0120 17:31:00.589989 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ef82b34-46f8-49ec-903d-ca23dc3c1772" path="/var/lib/kubelet/pods/6ef82b34-46f8-49ec-903d-ca23dc3c1772/volumes" Jan 20 17:31:00 crc kubenswrapper[4558]: I0120 17:31:00.590883 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:31:00 crc kubenswrapper[4558]: I0120 17:31:00.781749 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"5d6a1a3c-9291-4b95-89ed-69c7dfcb8e00","Type":"ContainerStarted","Data":"50fff1102ca3ba28aa408c3c10c097fc1267f100c66542cce3117d2bab902210"} Jan 20 17:31:01 crc kubenswrapper[4558]: I0120 17:31:01.117924 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:31:01 crc kubenswrapper[4558]: I0120 17:31:01.798371 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"5d6a1a3c-9291-4b95-89ed-69c7dfcb8e00","Type":"ContainerStarted","Data":"b132b77e1877bae6d1988fa8eddb280d9e29969477d275d815cbad46d435252b"} Jan 20 17:31:02 crc kubenswrapper[4558]: I0120 17:31:02.810284 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"5d6a1a3c-9291-4b95-89ed-69c7dfcb8e00","Type":"ContainerStarted","Data":"36cf841bd011a58c66dcc157de2ad486b8170e4f32624bee453064b82c3fb39f"} Jan 20 17:31:02 crc kubenswrapper[4558]: I0120 17:31:02.943386 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:31:02 crc kubenswrapper[4558]: I0120 17:31:02.943598 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-external-api-0" podUID="2f28be37-ef13-48a6-9b8f-84544328e73b" containerName="glance-log" containerID="cri-o://447c2dafa20acb9a88ee521cc25e4c2c064494b9d4033020a8d42dfd079675ac" gracePeriod=30 Jan 20 17:31:02 crc kubenswrapper[4558]: I0120 17:31:02.943710 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-external-api-0" podUID="2f28be37-ef13-48a6-9b8f-84544328e73b" containerName="glance-httpd" containerID="cri-o://3abd92f8f694eb696e46d0fae2d93d85bd2a28f5cba4d7f01af8a85470a13be8" gracePeriod=30 Jan 20 17:31:03 crc kubenswrapper[4558]: I0120 17:31:03.610880 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:31:03 crc kubenswrapper[4558]: I0120 17:31:03.611449 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-internal-api-0" podUID="1f87c5fb-8c96-4e5a-bdbb-ca072b7cf522" containerName="glance-log" containerID="cri-o://68350012dda08fc4af071c135d146ae203cfd2cb7af5e9c8dabdd6c9d0241c95" gracePeriod=30 Jan 20 17:31:03 crc kubenswrapper[4558]: I0120 17:31:03.611507 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-internal-api-0" podUID="1f87c5fb-8c96-4e5a-bdbb-ca072b7cf522" containerName="glance-httpd" containerID="cri-o://2ce860fea0b9ccd065e20871bc948a224bcf38e35dee37e2a55a51a09f5622b4" gracePeriod=30 Jan 20 17:31:03 crc kubenswrapper[4558]: I0120 17:31:03.824208 4558 generic.go:334] "Generic (PLEG): container finished" podID="8d3adcfe-4948-4e18-910b-bb3380e977a0" containerID="5bd5fedc19895ba71e1b6d3077a41a79fa078729e40ca74ac886956eafb22003" exitCode=0 Jan 20 17:31:03 crc kubenswrapper[4558]: I0120 17:31:03.824295 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-qwwkb" event={"ID":"8d3adcfe-4948-4e18-910b-bb3380e977a0","Type":"ContainerDied","Data":"5bd5fedc19895ba71e1b6d3077a41a79fa078729e40ca74ac886956eafb22003"} Jan 20 17:31:03 crc kubenswrapper[4558]: I0120 17:31:03.827012 4558 generic.go:334] "Generic (PLEG): container finished" podID="1f87c5fb-8c96-4e5a-bdbb-ca072b7cf522" containerID="68350012dda08fc4af071c135d146ae203cfd2cb7af5e9c8dabdd6c9d0241c95" exitCode=143 Jan 20 17:31:03 crc kubenswrapper[4558]: I0120 17:31:03.827077 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"1f87c5fb-8c96-4e5a-bdbb-ca072b7cf522","Type":"ContainerDied","Data":"68350012dda08fc4af071c135d146ae203cfd2cb7af5e9c8dabdd6c9d0241c95"} Jan 20 17:31:03 crc kubenswrapper[4558]: I0120 17:31:03.833424 4558 generic.go:334] "Generic (PLEG): container finished" podID="2f28be37-ef13-48a6-9b8f-84544328e73b" containerID="447c2dafa20acb9a88ee521cc25e4c2c064494b9d4033020a8d42dfd079675ac" exitCode=143 Jan 20 17:31:03 crc kubenswrapper[4558]: I0120 17:31:03.833534 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"2f28be37-ef13-48a6-9b8f-84544328e73b","Type":"ContainerDied","Data":"447c2dafa20acb9a88ee521cc25e4c2c064494b9d4033020a8d42dfd079675ac"} Jan 20 17:31:03 crc kubenswrapper[4558]: I0120 17:31:03.837316 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"5d6a1a3c-9291-4b95-89ed-69c7dfcb8e00","Type":"ContainerStarted","Data":"9fd43867f209bf26f8256753383c15a6625393d84ed247c3d40fc13e896a0e58"} Jan 20 17:31:04 crc kubenswrapper[4558]: I0120 17:31:04.565873 4558 scope.go:117] "RemoveContainer" containerID="f275e9f5de330b3c79316d5871106c6bcf90b698e0744c5beb28da45c336b36d" Jan 20 17:31:04 crc kubenswrapper[4558]: E0120 17:31:04.566472 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 17:31:05 crc kubenswrapper[4558]: I0120 17:31:05.224926 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-qwwkb" Jan 20 17:31:05 crc kubenswrapper[4558]: I0120 17:31:05.275133 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d3adcfe-4948-4e18-910b-bb3380e977a0-combined-ca-bundle\") pod \"8d3adcfe-4948-4e18-910b-bb3380e977a0\" (UID: \"8d3adcfe-4948-4e18-910b-bb3380e977a0\") " Jan 20 17:31:05 crc kubenswrapper[4558]: I0120 17:31:05.275205 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8d3adcfe-4948-4e18-910b-bb3380e977a0-scripts\") pod \"8d3adcfe-4948-4e18-910b-bb3380e977a0\" (UID: \"8d3adcfe-4948-4e18-910b-bb3380e977a0\") " Jan 20 17:31:05 crc kubenswrapper[4558]: I0120 17:31:05.275399 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lhb8r\" (UniqueName: \"kubernetes.io/projected/8d3adcfe-4948-4e18-910b-bb3380e977a0-kube-api-access-lhb8r\") pod \"8d3adcfe-4948-4e18-910b-bb3380e977a0\" (UID: \"8d3adcfe-4948-4e18-910b-bb3380e977a0\") " Jan 20 17:31:05 crc kubenswrapper[4558]: I0120 17:31:05.275440 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8d3adcfe-4948-4e18-910b-bb3380e977a0-config-data\") pod \"8d3adcfe-4948-4e18-910b-bb3380e977a0\" (UID: \"8d3adcfe-4948-4e18-910b-bb3380e977a0\") " Jan 20 17:31:05 crc kubenswrapper[4558]: I0120 17:31:05.280573 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8d3adcfe-4948-4e18-910b-bb3380e977a0-kube-api-access-lhb8r" (OuterVolumeSpecName: "kube-api-access-lhb8r") pod "8d3adcfe-4948-4e18-910b-bb3380e977a0" (UID: "8d3adcfe-4948-4e18-910b-bb3380e977a0"). InnerVolumeSpecName "kube-api-access-lhb8r". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:31:05 crc kubenswrapper[4558]: I0120 17:31:05.282396 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8d3adcfe-4948-4e18-910b-bb3380e977a0-scripts" (OuterVolumeSpecName: "scripts") pod "8d3adcfe-4948-4e18-910b-bb3380e977a0" (UID: "8d3adcfe-4948-4e18-910b-bb3380e977a0"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:31:05 crc kubenswrapper[4558]: I0120 17:31:05.304704 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8d3adcfe-4948-4e18-910b-bb3380e977a0-config-data" (OuterVolumeSpecName: "config-data") pod "8d3adcfe-4948-4e18-910b-bb3380e977a0" (UID: "8d3adcfe-4948-4e18-910b-bb3380e977a0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:31:05 crc kubenswrapper[4558]: I0120 17:31:05.317190 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8d3adcfe-4948-4e18-910b-bb3380e977a0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8d3adcfe-4948-4e18-910b-bb3380e977a0" (UID: "8d3adcfe-4948-4e18-910b-bb3380e977a0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:31:05 crc kubenswrapper[4558]: I0120 17:31:05.378792 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lhb8r\" (UniqueName: \"kubernetes.io/projected/8d3adcfe-4948-4e18-910b-bb3380e977a0-kube-api-access-lhb8r\") on node \"crc\" DevicePath \"\"" Jan 20 17:31:05 crc kubenswrapper[4558]: I0120 17:31:05.379147 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8d3adcfe-4948-4e18-910b-bb3380e977a0-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:31:05 crc kubenswrapper[4558]: I0120 17:31:05.379224 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d3adcfe-4948-4e18-910b-bb3380e977a0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:31:05 crc kubenswrapper[4558]: I0120 17:31:05.379273 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8d3adcfe-4948-4e18-910b-bb3380e977a0-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:31:05 crc kubenswrapper[4558]: I0120 17:31:05.857424 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"5d6a1a3c-9291-4b95-89ed-69c7dfcb8e00","Type":"ContainerStarted","Data":"934d208a4a3607faba4102e258a17ff86ab231b6df91b3c206ed6eaf6f9148ce"} Jan 20 17:31:05 crc kubenswrapper[4558]: I0120 17:31:05.857646 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="5d6a1a3c-9291-4b95-89ed-69c7dfcb8e00" containerName="ceilometer-central-agent" containerID="cri-o://b132b77e1877bae6d1988fa8eddb280d9e29969477d275d815cbad46d435252b" gracePeriod=30 Jan 20 17:31:05 crc kubenswrapper[4558]: I0120 17:31:05.857722 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="5d6a1a3c-9291-4b95-89ed-69c7dfcb8e00" containerName="sg-core" containerID="cri-o://9fd43867f209bf26f8256753383c15a6625393d84ed247c3d40fc13e896a0e58" gracePeriod=30 Jan 20 17:31:05 crc kubenswrapper[4558]: I0120 17:31:05.857660 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="5d6a1a3c-9291-4b95-89ed-69c7dfcb8e00" containerName="proxy-httpd" containerID="cri-o://934d208a4a3607faba4102e258a17ff86ab231b6df91b3c206ed6eaf6f9148ce" gracePeriod=30 Jan 20 17:31:05 crc kubenswrapper[4558]: I0120 17:31:05.857694 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="5d6a1a3c-9291-4b95-89ed-69c7dfcb8e00" containerName="ceilometer-notification-agent" containerID="cri-o://36cf841bd011a58c66dcc157de2ad486b8170e4f32624bee453064b82c3fb39f" gracePeriod=30 Jan 20 17:31:05 crc kubenswrapper[4558]: I0120 17:31:05.858108 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:31:05 crc kubenswrapper[4558]: I0120 17:31:05.861211 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-qwwkb" event={"ID":"8d3adcfe-4948-4e18-910b-bb3380e977a0","Type":"ContainerDied","Data":"86b127d3fcd23311598f95c16f4ebdd1257576cbcb6c88cebb1fb1a602c10728"} Jan 20 17:31:05 crc kubenswrapper[4558]: I0120 17:31:05.861253 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-qwwkb" Jan 20 17:31:05 crc kubenswrapper[4558]: I0120 17:31:05.861274 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="86b127d3fcd23311598f95c16f4ebdd1257576cbcb6c88cebb1fb1a602c10728" Jan 20 17:31:05 crc kubenswrapper[4558]: I0120 17:31:05.890223 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ceilometer-0" podStartSLOduration=2.358644408 podStartE2EDuration="6.890207352s" podCreationTimestamp="2026-01-20 17:30:59 +0000 UTC" firstStartedPulling="2026-01-20 17:31:00.579195666 +0000 UTC m=+2954.339533633" lastFinishedPulling="2026-01-20 17:31:05.11075862 +0000 UTC m=+2958.871096577" observedRunningTime="2026-01-20 17:31:05.885322836 +0000 UTC m=+2959.645660803" watchObservedRunningTime="2026-01-20 17:31:05.890207352 +0000 UTC m=+2959.650545318" Jan 20 17:31:05 crc kubenswrapper[4558]: I0120 17:31:05.931588 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-0"] Jan 20 17:31:05 crc kubenswrapper[4558]: E0120 17:31:05.932053 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8d3adcfe-4948-4e18-910b-bb3380e977a0" containerName="nova-cell0-conductor-db-sync" Jan 20 17:31:05 crc kubenswrapper[4558]: I0120 17:31:05.932072 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="8d3adcfe-4948-4e18-910b-bb3380e977a0" containerName="nova-cell0-conductor-db-sync" Jan 20 17:31:05 crc kubenswrapper[4558]: I0120 17:31:05.932286 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="8d3adcfe-4948-4e18-910b-bb3380e977a0" containerName="nova-cell0-conductor-db-sync" Jan 20 17:31:05 crc kubenswrapper[4558]: I0120 17:31:05.933006 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:31:05 crc kubenswrapper[4558]: I0120 17:31:05.938658 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell0-conductor-config-data" Jan 20 17:31:05 crc kubenswrapper[4558]: I0120 17:31:05.938701 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-nova-dockercfg-24bfk" Jan 20 17:31:05 crc kubenswrapper[4558]: I0120 17:31:05.946878 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-0"] Jan 20 17:31:05 crc kubenswrapper[4558]: I0120 17:31:05.988564 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6dbc6996-80bf-4f86-9acf-f74dd04e51b1-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"6dbc6996-80bf-4f86-9acf-f74dd04e51b1\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:31:05 crc kubenswrapper[4558]: I0120 17:31:05.988633 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gnxn7\" (UniqueName: \"kubernetes.io/projected/6dbc6996-80bf-4f86-9acf-f74dd04e51b1-kube-api-access-gnxn7\") pod \"nova-cell0-conductor-0\" (UID: \"6dbc6996-80bf-4f86-9acf-f74dd04e51b1\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:31:05 crc kubenswrapper[4558]: I0120 17:31:05.988774 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6dbc6996-80bf-4f86-9acf-f74dd04e51b1-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"6dbc6996-80bf-4f86-9acf-f74dd04e51b1\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:31:06 crc kubenswrapper[4558]: I0120 17:31:06.091709 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6dbc6996-80bf-4f86-9acf-f74dd04e51b1-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"6dbc6996-80bf-4f86-9acf-f74dd04e51b1\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:31:06 crc kubenswrapper[4558]: I0120 17:31:06.092260 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gnxn7\" (UniqueName: \"kubernetes.io/projected/6dbc6996-80bf-4f86-9acf-f74dd04e51b1-kube-api-access-gnxn7\") pod \"nova-cell0-conductor-0\" (UID: \"6dbc6996-80bf-4f86-9acf-f74dd04e51b1\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:31:06 crc kubenswrapper[4558]: I0120 17:31:06.092354 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6dbc6996-80bf-4f86-9acf-f74dd04e51b1-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"6dbc6996-80bf-4f86-9acf-f74dd04e51b1\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:31:06 crc kubenswrapper[4558]: I0120 17:31:06.097620 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6dbc6996-80bf-4f86-9acf-f74dd04e51b1-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"6dbc6996-80bf-4f86-9acf-f74dd04e51b1\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:31:06 crc kubenswrapper[4558]: I0120 17:31:06.097887 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6dbc6996-80bf-4f86-9acf-f74dd04e51b1-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"6dbc6996-80bf-4f86-9acf-f74dd04e51b1\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:31:06 crc kubenswrapper[4558]: I0120 17:31:06.114568 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gnxn7\" (UniqueName: \"kubernetes.io/projected/6dbc6996-80bf-4f86-9acf-f74dd04e51b1-kube-api-access-gnxn7\") pod \"nova-cell0-conductor-0\" (UID: \"6dbc6996-80bf-4f86-9acf-f74dd04e51b1\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:31:06 crc kubenswrapper[4558]: I0120 17:31:06.248377 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:31:06 crc kubenswrapper[4558]: I0120 17:31:06.460405 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:31:06 crc kubenswrapper[4558]: I0120 17:31:06.501156 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage16-crc\") pod \"2f28be37-ef13-48a6-9b8f-84544328e73b\" (UID: \"2f28be37-ef13-48a6-9b8f-84544328e73b\") " Jan 20 17:31:06 crc kubenswrapper[4558]: I0120 17:31:06.501293 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2f28be37-ef13-48a6-9b8f-84544328e73b-combined-ca-bundle\") pod \"2f28be37-ef13-48a6-9b8f-84544328e73b\" (UID: \"2f28be37-ef13-48a6-9b8f-84544328e73b\") " Jan 20 17:31:06 crc kubenswrapper[4558]: I0120 17:31:06.501347 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/2f28be37-ef13-48a6-9b8f-84544328e73b-httpd-run\") pod \"2f28be37-ef13-48a6-9b8f-84544328e73b\" (UID: \"2f28be37-ef13-48a6-9b8f-84544328e73b\") " Jan 20 17:31:06 crc kubenswrapper[4558]: I0120 17:31:06.501413 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2f28be37-ef13-48a6-9b8f-84544328e73b-scripts\") pod \"2f28be37-ef13-48a6-9b8f-84544328e73b\" (UID: \"2f28be37-ef13-48a6-9b8f-84544328e73b\") " Jan 20 17:31:06 crc kubenswrapper[4558]: I0120 17:31:06.501452 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qv6tt\" (UniqueName: \"kubernetes.io/projected/2f28be37-ef13-48a6-9b8f-84544328e73b-kube-api-access-qv6tt\") pod \"2f28be37-ef13-48a6-9b8f-84544328e73b\" (UID: \"2f28be37-ef13-48a6-9b8f-84544328e73b\") " Jan 20 17:31:06 crc kubenswrapper[4558]: I0120 17:31:06.501541 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2f28be37-ef13-48a6-9b8f-84544328e73b-logs\") pod \"2f28be37-ef13-48a6-9b8f-84544328e73b\" (UID: \"2f28be37-ef13-48a6-9b8f-84544328e73b\") " Jan 20 17:31:06 crc kubenswrapper[4558]: I0120 17:31:06.501562 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2f28be37-ef13-48a6-9b8f-84544328e73b-config-data\") pod \"2f28be37-ef13-48a6-9b8f-84544328e73b\" (UID: \"2f28be37-ef13-48a6-9b8f-84544328e73b\") " Jan 20 17:31:06 crc kubenswrapper[4558]: I0120 17:31:06.503081 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2f28be37-ef13-48a6-9b8f-84544328e73b-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "2f28be37-ef13-48a6-9b8f-84544328e73b" (UID: "2f28be37-ef13-48a6-9b8f-84544328e73b"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:31:06 crc kubenswrapper[4558]: I0120 17:31:06.505497 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2f28be37-ef13-48a6-9b8f-84544328e73b-logs" (OuterVolumeSpecName: "logs") pod "2f28be37-ef13-48a6-9b8f-84544328e73b" (UID: "2f28be37-ef13-48a6-9b8f-84544328e73b"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:31:06 crc kubenswrapper[4558]: I0120 17:31:06.509307 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage16-crc" (OuterVolumeSpecName: "glance") pod "2f28be37-ef13-48a6-9b8f-84544328e73b" (UID: "2f28be37-ef13-48a6-9b8f-84544328e73b"). InnerVolumeSpecName "local-storage16-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:31:06 crc kubenswrapper[4558]: I0120 17:31:06.509524 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2f28be37-ef13-48a6-9b8f-84544328e73b-scripts" (OuterVolumeSpecName: "scripts") pod "2f28be37-ef13-48a6-9b8f-84544328e73b" (UID: "2f28be37-ef13-48a6-9b8f-84544328e73b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:31:06 crc kubenswrapper[4558]: I0120 17:31:06.512432 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2f28be37-ef13-48a6-9b8f-84544328e73b-kube-api-access-qv6tt" (OuterVolumeSpecName: "kube-api-access-qv6tt") pod "2f28be37-ef13-48a6-9b8f-84544328e73b" (UID: "2f28be37-ef13-48a6-9b8f-84544328e73b"). InnerVolumeSpecName "kube-api-access-qv6tt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:31:06 crc kubenswrapper[4558]: I0120 17:31:06.536212 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2f28be37-ef13-48a6-9b8f-84544328e73b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2f28be37-ef13-48a6-9b8f-84544328e73b" (UID: "2f28be37-ef13-48a6-9b8f-84544328e73b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:31:06 crc kubenswrapper[4558]: I0120 17:31:06.549714 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2f28be37-ef13-48a6-9b8f-84544328e73b-config-data" (OuterVolumeSpecName: "config-data") pod "2f28be37-ef13-48a6-9b8f-84544328e73b" (UID: "2f28be37-ef13-48a6-9b8f-84544328e73b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:31:06 crc kubenswrapper[4558]: I0120 17:31:06.603467 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2f28be37-ef13-48a6-9b8f-84544328e73b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:31:06 crc kubenswrapper[4558]: I0120 17:31:06.603496 4558 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/2f28be37-ef13-48a6-9b8f-84544328e73b-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 20 17:31:06 crc kubenswrapper[4558]: I0120 17:31:06.603508 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2f28be37-ef13-48a6-9b8f-84544328e73b-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:31:06 crc kubenswrapper[4558]: I0120 17:31:06.603519 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qv6tt\" (UniqueName: \"kubernetes.io/projected/2f28be37-ef13-48a6-9b8f-84544328e73b-kube-api-access-qv6tt\") on node \"crc\" DevicePath \"\"" Jan 20 17:31:06 crc kubenswrapper[4558]: I0120 17:31:06.603530 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2f28be37-ef13-48a6-9b8f-84544328e73b-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:31:06 crc kubenswrapper[4558]: I0120 17:31:06.603538 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2f28be37-ef13-48a6-9b8f-84544328e73b-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:31:06 crc kubenswrapper[4558]: I0120 17:31:06.603582 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage16-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage16-crc\") on node \"crc\" " Jan 20 17:31:06 crc kubenswrapper[4558]: I0120 17:31:06.619214 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage16-crc" (UniqueName: "kubernetes.io/local-volume/local-storage16-crc") on node "crc" Jan 20 17:31:06 crc kubenswrapper[4558]: I0120 17:31:06.669514 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-0"] Jan 20 17:31:06 crc kubenswrapper[4558]: I0120 17:31:06.707083 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage16-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage16-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:31:06 crc kubenswrapper[4558]: I0120 17:31:06.887447 4558 generic.go:334] "Generic (PLEG): container finished" podID="2f28be37-ef13-48a6-9b8f-84544328e73b" containerID="3abd92f8f694eb696e46d0fae2d93d85bd2a28f5cba4d7f01af8a85470a13be8" exitCode=0 Jan 20 17:31:06 crc kubenswrapper[4558]: I0120 17:31:06.888312 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"2f28be37-ef13-48a6-9b8f-84544328e73b","Type":"ContainerDied","Data":"3abd92f8f694eb696e46d0fae2d93d85bd2a28f5cba4d7f01af8a85470a13be8"} Jan 20 17:31:06 crc kubenswrapper[4558]: I0120 17:31:06.888378 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"2f28be37-ef13-48a6-9b8f-84544328e73b","Type":"ContainerDied","Data":"dadae09c02e20d14661a9b505da725dc17d4da7261642d58bdf65a0c5cfbe1d8"} Jan 20 17:31:06 crc kubenswrapper[4558]: I0120 17:31:06.889012 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:31:06 crc kubenswrapper[4558]: I0120 17:31:06.889325 4558 scope.go:117] "RemoveContainer" containerID="3abd92f8f694eb696e46d0fae2d93d85bd2a28f5cba4d7f01af8a85470a13be8" Jan 20 17:31:06 crc kubenswrapper[4558]: I0120 17:31:06.906062 4558 generic.go:334] "Generic (PLEG): container finished" podID="5d6a1a3c-9291-4b95-89ed-69c7dfcb8e00" containerID="934d208a4a3607faba4102e258a17ff86ab231b6df91b3c206ed6eaf6f9148ce" exitCode=0 Jan 20 17:31:06 crc kubenswrapper[4558]: I0120 17:31:06.906091 4558 generic.go:334] "Generic (PLEG): container finished" podID="5d6a1a3c-9291-4b95-89ed-69c7dfcb8e00" containerID="9fd43867f209bf26f8256753383c15a6625393d84ed247c3d40fc13e896a0e58" exitCode=2 Jan 20 17:31:06 crc kubenswrapper[4558]: I0120 17:31:06.906101 4558 generic.go:334] "Generic (PLEG): container finished" podID="5d6a1a3c-9291-4b95-89ed-69c7dfcb8e00" containerID="36cf841bd011a58c66dcc157de2ad486b8170e4f32624bee453064b82c3fb39f" exitCode=0 Jan 20 17:31:06 crc kubenswrapper[4558]: I0120 17:31:06.906158 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"5d6a1a3c-9291-4b95-89ed-69c7dfcb8e00","Type":"ContainerDied","Data":"934d208a4a3607faba4102e258a17ff86ab231b6df91b3c206ed6eaf6f9148ce"} Jan 20 17:31:06 crc kubenswrapper[4558]: I0120 17:31:06.906200 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"5d6a1a3c-9291-4b95-89ed-69c7dfcb8e00","Type":"ContainerDied","Data":"9fd43867f209bf26f8256753383c15a6625393d84ed247c3d40fc13e896a0e58"} Jan 20 17:31:06 crc kubenswrapper[4558]: I0120 17:31:06.906212 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"5d6a1a3c-9291-4b95-89ed-69c7dfcb8e00","Type":"ContainerDied","Data":"36cf841bd011a58c66dcc157de2ad486b8170e4f32624bee453064b82c3fb39f"} Jan 20 17:31:06 crc kubenswrapper[4558]: I0120 17:31:06.912102 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-0" event={"ID":"6dbc6996-80bf-4f86-9acf-f74dd04e51b1","Type":"ContainerStarted","Data":"970a93b4a91f5ca4973899ad99e90f77e072ebe56a15538b980197b22ac94391"} Jan 20 17:31:06 crc kubenswrapper[4558]: I0120 17:31:06.925373 4558 generic.go:334] "Generic (PLEG): container finished" podID="1f87c5fb-8c96-4e5a-bdbb-ca072b7cf522" containerID="2ce860fea0b9ccd065e20871bc948a224bcf38e35dee37e2a55a51a09f5622b4" exitCode=0 Jan 20 17:31:06 crc kubenswrapper[4558]: I0120 17:31:06.925419 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"1f87c5fb-8c96-4e5a-bdbb-ca072b7cf522","Type":"ContainerDied","Data":"2ce860fea0b9ccd065e20871bc948a224bcf38e35dee37e2a55a51a09f5622b4"} Jan 20 17:31:06 crc kubenswrapper[4558]: I0120 17:31:06.928652 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:31:06 crc kubenswrapper[4558]: I0120 17:31:06.928951 4558 scope.go:117] "RemoveContainer" containerID="447c2dafa20acb9a88ee521cc25e4c2c064494b9d4033020a8d42dfd079675ac" Jan 20 17:31:06 crc kubenswrapper[4558]: I0120 17:31:06.957178 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:31:06 crc kubenswrapper[4558]: I0120 17:31:06.972950 4558 scope.go:117] "RemoveContainer" containerID="3abd92f8f694eb696e46d0fae2d93d85bd2a28f5cba4d7f01af8a85470a13be8" Jan 20 17:31:06 crc kubenswrapper[4558]: I0120 17:31:06.973299 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:31:06 crc kubenswrapper[4558]: E0120 17:31:06.973773 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2f28be37-ef13-48a6-9b8f-84544328e73b" containerName="glance-log" Jan 20 17:31:06 crc kubenswrapper[4558]: I0120 17:31:06.973861 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="2f28be37-ef13-48a6-9b8f-84544328e73b" containerName="glance-log" Jan 20 17:31:06 crc kubenswrapper[4558]: E0120 17:31:06.973919 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2f28be37-ef13-48a6-9b8f-84544328e73b" containerName="glance-httpd" Jan 20 17:31:06 crc kubenswrapper[4558]: I0120 17:31:06.973976 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="2f28be37-ef13-48a6-9b8f-84544328e73b" containerName="glance-httpd" Jan 20 17:31:06 crc kubenswrapper[4558]: I0120 17:31:06.974256 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="2f28be37-ef13-48a6-9b8f-84544328e73b" containerName="glance-httpd" Jan 20 17:31:06 crc kubenswrapper[4558]: I0120 17:31:06.974330 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="2f28be37-ef13-48a6-9b8f-84544328e73b" containerName="glance-log" Jan 20 17:31:06 crc kubenswrapper[4558]: I0120 17:31:06.975683 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:31:06 crc kubenswrapper[4558]: E0120 17:31:06.976260 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3abd92f8f694eb696e46d0fae2d93d85bd2a28f5cba4d7f01af8a85470a13be8\": container with ID starting with 3abd92f8f694eb696e46d0fae2d93d85bd2a28f5cba4d7f01af8a85470a13be8 not found: ID does not exist" containerID="3abd92f8f694eb696e46d0fae2d93d85bd2a28f5cba4d7f01af8a85470a13be8" Jan 20 17:31:06 crc kubenswrapper[4558]: I0120 17:31:06.976291 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3abd92f8f694eb696e46d0fae2d93d85bd2a28f5cba4d7f01af8a85470a13be8"} err="failed to get container status \"3abd92f8f694eb696e46d0fae2d93d85bd2a28f5cba4d7f01af8a85470a13be8\": rpc error: code = NotFound desc = could not find container \"3abd92f8f694eb696e46d0fae2d93d85bd2a28f5cba4d7f01af8a85470a13be8\": container with ID starting with 3abd92f8f694eb696e46d0fae2d93d85bd2a28f5cba4d7f01af8a85470a13be8 not found: ID does not exist" Jan 20 17:31:06 crc kubenswrapper[4558]: I0120 17:31:06.976313 4558 scope.go:117] "RemoveContainer" containerID="447c2dafa20acb9a88ee521cc25e4c2c064494b9d4033020a8d42dfd079675ac" Jan 20 17:31:06 crc kubenswrapper[4558]: E0120 17:31:06.976659 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"447c2dafa20acb9a88ee521cc25e4c2c064494b9d4033020a8d42dfd079675ac\": container with ID starting with 447c2dafa20acb9a88ee521cc25e4c2c064494b9d4033020a8d42dfd079675ac not found: ID does not exist" containerID="447c2dafa20acb9a88ee521cc25e4c2c064494b9d4033020a8d42dfd079675ac" Jan 20 17:31:06 crc kubenswrapper[4558]: I0120 17:31:06.976752 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"447c2dafa20acb9a88ee521cc25e4c2c064494b9d4033020a8d42dfd079675ac"} err="failed to get container status \"447c2dafa20acb9a88ee521cc25e4c2c064494b9d4033020a8d42dfd079675ac\": rpc error: code = NotFound desc = could not find container \"447c2dafa20acb9a88ee521cc25e4c2c064494b9d4033020a8d42dfd079675ac\": container with ID starting with 447c2dafa20acb9a88ee521cc25e4c2c064494b9d4033020a8d42dfd079675ac not found: ID does not exist" Jan 20 17:31:06 crc kubenswrapper[4558]: I0120 17:31:06.983759 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-default-external-config-data" Jan 20 17:31:06 crc kubenswrapper[4558]: I0120 17:31:06.986021 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:31:07 crc kubenswrapper[4558]: I0120 17:31:07.018330 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/59e7ef92-3d74-4334-ac19-671eb3199d0b-scripts\") pod \"glance-default-external-api-0\" (UID: \"59e7ef92-3d74-4334-ac19-671eb3199d0b\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:31:07 crc kubenswrapper[4558]: I0120 17:31:07.018390 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8p4k8\" (UniqueName: \"kubernetes.io/projected/59e7ef92-3d74-4334-ac19-671eb3199d0b-kube-api-access-8p4k8\") pod \"glance-default-external-api-0\" (UID: \"59e7ef92-3d74-4334-ac19-671eb3199d0b\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:31:07 crc kubenswrapper[4558]: I0120 17:31:07.018446 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/59e7ef92-3d74-4334-ac19-671eb3199d0b-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"59e7ef92-3d74-4334-ac19-671eb3199d0b\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:31:07 crc kubenswrapper[4558]: I0120 17:31:07.018485 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/59e7ef92-3d74-4334-ac19-671eb3199d0b-config-data\") pod \"glance-default-external-api-0\" (UID: \"59e7ef92-3d74-4334-ac19-671eb3199d0b\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:31:07 crc kubenswrapper[4558]: I0120 17:31:07.018510 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/59e7ef92-3d74-4334-ac19-671eb3199d0b-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"59e7ef92-3d74-4334-ac19-671eb3199d0b\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:31:07 crc kubenswrapper[4558]: I0120 17:31:07.018631 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/59e7ef92-3d74-4334-ac19-671eb3199d0b-logs\") pod \"glance-default-external-api-0\" (UID: \"59e7ef92-3d74-4334-ac19-671eb3199d0b\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:31:07 crc kubenswrapper[4558]: I0120 17:31:07.018668 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage16-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage16-crc\") pod \"glance-default-external-api-0\" (UID: \"59e7ef92-3d74-4334-ac19-671eb3199d0b\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:31:07 crc kubenswrapper[4558]: I0120 17:31:07.119650 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/59e7ef92-3d74-4334-ac19-671eb3199d0b-scripts\") pod \"glance-default-external-api-0\" (UID: \"59e7ef92-3d74-4334-ac19-671eb3199d0b\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:31:07 crc kubenswrapper[4558]: I0120 17:31:07.119706 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8p4k8\" (UniqueName: \"kubernetes.io/projected/59e7ef92-3d74-4334-ac19-671eb3199d0b-kube-api-access-8p4k8\") pod \"glance-default-external-api-0\" (UID: \"59e7ef92-3d74-4334-ac19-671eb3199d0b\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:31:07 crc kubenswrapper[4558]: I0120 17:31:07.119756 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/59e7ef92-3d74-4334-ac19-671eb3199d0b-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"59e7ef92-3d74-4334-ac19-671eb3199d0b\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:31:07 crc kubenswrapper[4558]: I0120 17:31:07.119804 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/59e7ef92-3d74-4334-ac19-671eb3199d0b-config-data\") pod \"glance-default-external-api-0\" (UID: \"59e7ef92-3d74-4334-ac19-671eb3199d0b\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:31:07 crc kubenswrapper[4558]: I0120 17:31:07.119843 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/59e7ef92-3d74-4334-ac19-671eb3199d0b-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"59e7ef92-3d74-4334-ac19-671eb3199d0b\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:31:07 crc kubenswrapper[4558]: I0120 17:31:07.119873 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/59e7ef92-3d74-4334-ac19-671eb3199d0b-logs\") pod \"glance-default-external-api-0\" (UID: \"59e7ef92-3d74-4334-ac19-671eb3199d0b\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:31:07 crc kubenswrapper[4558]: I0120 17:31:07.119908 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage16-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage16-crc\") pod \"glance-default-external-api-0\" (UID: \"59e7ef92-3d74-4334-ac19-671eb3199d0b\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:31:07 crc kubenswrapper[4558]: I0120 17:31:07.120456 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/59e7ef92-3d74-4334-ac19-671eb3199d0b-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"59e7ef92-3d74-4334-ac19-671eb3199d0b\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:31:07 crc kubenswrapper[4558]: I0120 17:31:07.120727 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage16-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage16-crc\") pod \"glance-default-external-api-0\" (UID: \"59e7ef92-3d74-4334-ac19-671eb3199d0b\") device mount path \"/mnt/openstack/pv16\"" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:31:07 crc kubenswrapper[4558]: I0120 17:31:07.120749 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/59e7ef92-3d74-4334-ac19-671eb3199d0b-logs\") pod \"glance-default-external-api-0\" (UID: \"59e7ef92-3d74-4334-ac19-671eb3199d0b\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:31:07 crc kubenswrapper[4558]: I0120 17:31:07.128736 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/59e7ef92-3d74-4334-ac19-671eb3199d0b-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"59e7ef92-3d74-4334-ac19-671eb3199d0b\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:31:07 crc kubenswrapper[4558]: I0120 17:31:07.129252 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/59e7ef92-3d74-4334-ac19-671eb3199d0b-config-data\") pod \"glance-default-external-api-0\" (UID: \"59e7ef92-3d74-4334-ac19-671eb3199d0b\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:31:07 crc kubenswrapper[4558]: I0120 17:31:07.129284 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/59e7ef92-3d74-4334-ac19-671eb3199d0b-scripts\") pod \"glance-default-external-api-0\" (UID: \"59e7ef92-3d74-4334-ac19-671eb3199d0b\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:31:07 crc kubenswrapper[4558]: I0120 17:31:07.129617 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:31:07 crc kubenswrapper[4558]: I0120 17:31:07.137460 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8p4k8\" (UniqueName: \"kubernetes.io/projected/59e7ef92-3d74-4334-ac19-671eb3199d0b-kube-api-access-8p4k8\") pod \"glance-default-external-api-0\" (UID: \"59e7ef92-3d74-4334-ac19-671eb3199d0b\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:31:07 crc kubenswrapper[4558]: I0120 17:31:07.144191 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage16-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage16-crc\") pod \"glance-default-external-api-0\" (UID: \"59e7ef92-3d74-4334-ac19-671eb3199d0b\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:31:07 crc kubenswrapper[4558]: I0120 17:31:07.221003 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-47znw\" (UniqueName: \"kubernetes.io/projected/1f87c5fb-8c96-4e5a-bdbb-ca072b7cf522-kube-api-access-47znw\") pod \"1f87c5fb-8c96-4e5a-bdbb-ca072b7cf522\" (UID: \"1f87c5fb-8c96-4e5a-bdbb-ca072b7cf522\") " Jan 20 17:31:07 crc kubenswrapper[4558]: I0120 17:31:07.221420 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1f87c5fb-8c96-4e5a-bdbb-ca072b7cf522-logs\") pod \"1f87c5fb-8c96-4e5a-bdbb-ca072b7cf522\" (UID: \"1f87c5fb-8c96-4e5a-bdbb-ca072b7cf522\") " Jan 20 17:31:07 crc kubenswrapper[4558]: I0120 17:31:07.221488 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1f87c5fb-8c96-4e5a-bdbb-ca072b7cf522-combined-ca-bundle\") pod \"1f87c5fb-8c96-4e5a-bdbb-ca072b7cf522\" (UID: \"1f87c5fb-8c96-4e5a-bdbb-ca072b7cf522\") " Jan 20 17:31:07 crc kubenswrapper[4558]: I0120 17:31:07.221534 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") pod \"1f87c5fb-8c96-4e5a-bdbb-ca072b7cf522\" (UID: \"1f87c5fb-8c96-4e5a-bdbb-ca072b7cf522\") " Jan 20 17:31:07 crc kubenswrapper[4558]: I0120 17:31:07.221562 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/1f87c5fb-8c96-4e5a-bdbb-ca072b7cf522-httpd-run\") pod \"1f87c5fb-8c96-4e5a-bdbb-ca072b7cf522\" (UID: \"1f87c5fb-8c96-4e5a-bdbb-ca072b7cf522\") " Jan 20 17:31:07 crc kubenswrapper[4558]: I0120 17:31:07.221651 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1f87c5fb-8c96-4e5a-bdbb-ca072b7cf522-config-data\") pod \"1f87c5fb-8c96-4e5a-bdbb-ca072b7cf522\" (UID: \"1f87c5fb-8c96-4e5a-bdbb-ca072b7cf522\") " Jan 20 17:31:07 crc kubenswrapper[4558]: I0120 17:31:07.221742 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1f87c5fb-8c96-4e5a-bdbb-ca072b7cf522-logs" (OuterVolumeSpecName: "logs") pod "1f87c5fb-8c96-4e5a-bdbb-ca072b7cf522" (UID: "1f87c5fb-8c96-4e5a-bdbb-ca072b7cf522"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:31:07 crc kubenswrapper[4558]: I0120 17:31:07.221764 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1f87c5fb-8c96-4e5a-bdbb-ca072b7cf522-scripts\") pod \"1f87c5fb-8c96-4e5a-bdbb-ca072b7cf522\" (UID: \"1f87c5fb-8c96-4e5a-bdbb-ca072b7cf522\") " Jan 20 17:31:07 crc kubenswrapper[4558]: I0120 17:31:07.222111 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1f87c5fb-8c96-4e5a-bdbb-ca072b7cf522-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "1f87c5fb-8c96-4e5a-bdbb-ca072b7cf522" (UID: "1f87c5fb-8c96-4e5a-bdbb-ca072b7cf522"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:31:07 crc kubenswrapper[4558]: I0120 17:31:07.222804 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1f87c5fb-8c96-4e5a-bdbb-ca072b7cf522-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:31:07 crc kubenswrapper[4558]: I0120 17:31:07.222826 4558 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/1f87c5fb-8c96-4e5a-bdbb-ca072b7cf522-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 20 17:31:07 crc kubenswrapper[4558]: I0120 17:31:07.227774 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage15-crc" (OuterVolumeSpecName: "glance") pod "1f87c5fb-8c96-4e5a-bdbb-ca072b7cf522" (UID: "1f87c5fb-8c96-4e5a-bdbb-ca072b7cf522"). InnerVolumeSpecName "local-storage15-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:31:07 crc kubenswrapper[4558]: I0120 17:31:07.229260 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1f87c5fb-8c96-4e5a-bdbb-ca072b7cf522-scripts" (OuterVolumeSpecName: "scripts") pod "1f87c5fb-8c96-4e5a-bdbb-ca072b7cf522" (UID: "1f87c5fb-8c96-4e5a-bdbb-ca072b7cf522"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:31:07 crc kubenswrapper[4558]: I0120 17:31:07.235378 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1f87c5fb-8c96-4e5a-bdbb-ca072b7cf522-kube-api-access-47znw" (OuterVolumeSpecName: "kube-api-access-47znw") pod "1f87c5fb-8c96-4e5a-bdbb-ca072b7cf522" (UID: "1f87c5fb-8c96-4e5a-bdbb-ca072b7cf522"). InnerVolumeSpecName "kube-api-access-47znw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:31:07 crc kubenswrapper[4558]: I0120 17:31:07.247740 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1f87c5fb-8c96-4e5a-bdbb-ca072b7cf522-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1f87c5fb-8c96-4e5a-bdbb-ca072b7cf522" (UID: "1f87c5fb-8c96-4e5a-bdbb-ca072b7cf522"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:31:07 crc kubenswrapper[4558]: I0120 17:31:07.272312 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1f87c5fb-8c96-4e5a-bdbb-ca072b7cf522-config-data" (OuterVolumeSpecName: "config-data") pod "1f87c5fb-8c96-4e5a-bdbb-ca072b7cf522" (UID: "1f87c5fb-8c96-4e5a-bdbb-ca072b7cf522"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:31:07 crc kubenswrapper[4558]: I0120 17:31:07.299971 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:31:07 crc kubenswrapper[4558]: I0120 17:31:07.325830 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1f87c5fb-8c96-4e5a-bdbb-ca072b7cf522-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:31:07 crc kubenswrapper[4558]: I0120 17:31:07.325860 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-47znw\" (UniqueName: \"kubernetes.io/projected/1f87c5fb-8c96-4e5a-bdbb-ca072b7cf522-kube-api-access-47znw\") on node \"crc\" DevicePath \"\"" Jan 20 17:31:07 crc kubenswrapper[4558]: I0120 17:31:07.325878 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1f87c5fb-8c96-4e5a-bdbb-ca072b7cf522-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:31:07 crc kubenswrapper[4558]: I0120 17:31:07.325918 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage15-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") on node \"crc\" " Jan 20 17:31:07 crc kubenswrapper[4558]: I0120 17:31:07.325929 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1f87c5fb-8c96-4e5a-bdbb-ca072b7cf522-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:31:07 crc kubenswrapper[4558]: I0120 17:31:07.345361 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage15-crc" (UniqueName: "kubernetes.io/local-volume/local-storage15-crc") on node "crc" Jan 20 17:31:07 crc kubenswrapper[4558]: I0120 17:31:07.427800 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage15-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:31:07 crc kubenswrapper[4558]: W0120 17:31:07.741193 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod59e7ef92_3d74_4334_ac19_671eb3199d0b.slice/crio-2789214ccdafc0424e87310c5b887521dedc090f785e3b948d6478a4744b64fe WatchSource:0}: Error finding container 2789214ccdafc0424e87310c5b887521dedc090f785e3b948d6478a4744b64fe: Status 404 returned error can't find the container with id 2789214ccdafc0424e87310c5b887521dedc090f785e3b948d6478a4744b64fe Jan 20 17:31:07 crc kubenswrapper[4558]: I0120 17:31:07.742036 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:31:07 crc kubenswrapper[4558]: I0120 17:31:07.948090 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-0" event={"ID":"6dbc6996-80bf-4f86-9acf-f74dd04e51b1","Type":"ContainerStarted","Data":"a45e6f8d25e2903ff94497e102fe9cb0b1841d235bfac8ac8e62657bad6c6b17"} Jan 20 17:31:07 crc kubenswrapper[4558]: I0120 17:31:07.948239 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:31:07 crc kubenswrapper[4558]: I0120 17:31:07.951791 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"59e7ef92-3d74-4334-ac19-671eb3199d0b","Type":"ContainerStarted","Data":"2789214ccdafc0424e87310c5b887521dedc090f785e3b948d6478a4744b64fe"} Jan 20 17:31:07 crc kubenswrapper[4558]: I0120 17:31:07.956189 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"1f87c5fb-8c96-4e5a-bdbb-ca072b7cf522","Type":"ContainerDied","Data":"ab755648fa19e7361557f82ad97ffdfe31f54dd1bc8bbb6e6e60547c52f87841"} Jan 20 17:31:07 crc kubenswrapper[4558]: I0120 17:31:07.956237 4558 scope.go:117] "RemoveContainer" containerID="2ce860fea0b9ccd065e20871bc948a224bcf38e35dee37e2a55a51a09f5622b4" Jan 20 17:31:07 crc kubenswrapper[4558]: I0120 17:31:07.956403 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:31:07 crc kubenswrapper[4558]: I0120 17:31:07.968517 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell0-conductor-0" podStartSLOduration=2.96849433 podStartE2EDuration="2.96849433s" podCreationTimestamp="2026-01-20 17:31:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:31:07.96215537 +0000 UTC m=+2961.722493328" watchObservedRunningTime="2026-01-20 17:31:07.96849433 +0000 UTC m=+2961.728832287" Jan 20 17:31:07 crc kubenswrapper[4558]: I0120 17:31:07.995970 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:31:08 crc kubenswrapper[4558]: I0120 17:31:08.015338 4558 scope.go:117] "RemoveContainer" containerID="68350012dda08fc4af071c135d146ae203cfd2cb7af5e9c8dabdd6c9d0241c95" Jan 20 17:31:08 crc kubenswrapper[4558]: I0120 17:31:08.020974 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:31:08 crc kubenswrapper[4558]: I0120 17:31:08.033059 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:31:08 crc kubenswrapper[4558]: E0120 17:31:08.033539 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1f87c5fb-8c96-4e5a-bdbb-ca072b7cf522" containerName="glance-log" Jan 20 17:31:08 crc kubenswrapper[4558]: I0120 17:31:08.033554 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="1f87c5fb-8c96-4e5a-bdbb-ca072b7cf522" containerName="glance-log" Jan 20 17:31:08 crc kubenswrapper[4558]: E0120 17:31:08.033572 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1f87c5fb-8c96-4e5a-bdbb-ca072b7cf522" containerName="glance-httpd" Jan 20 17:31:08 crc kubenswrapper[4558]: I0120 17:31:08.033579 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="1f87c5fb-8c96-4e5a-bdbb-ca072b7cf522" containerName="glance-httpd" Jan 20 17:31:08 crc kubenswrapper[4558]: I0120 17:31:08.033748 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="1f87c5fb-8c96-4e5a-bdbb-ca072b7cf522" containerName="glance-log" Jan 20 17:31:08 crc kubenswrapper[4558]: I0120 17:31:08.033767 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="1f87c5fb-8c96-4e5a-bdbb-ca072b7cf522" containerName="glance-httpd" Jan 20 17:31:08 crc kubenswrapper[4558]: I0120 17:31:08.034747 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:31:08 crc kubenswrapper[4558]: I0120 17:31:08.036392 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-default-internal-config-data" Jan 20 17:31:08 crc kubenswrapper[4558]: I0120 17:31:08.045032 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:31:08 crc kubenswrapper[4558]: I0120 17:31:08.145111 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/aaaaf2d3-8f74-4ce7-8407-2f0b0bece070-logs\") pod \"glance-default-internal-api-0\" (UID: \"aaaaf2d3-8f74-4ce7-8407-2f0b0bece070\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:31:08 crc kubenswrapper[4558]: I0120 17:31:08.145240 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aaaaf2d3-8f74-4ce7-8407-2f0b0bece070-config-data\") pod \"glance-default-internal-api-0\" (UID: \"aaaaf2d3-8f74-4ce7-8407-2f0b0bece070\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:31:08 crc kubenswrapper[4558]: I0120 17:31:08.145388 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rq4md\" (UniqueName: \"kubernetes.io/projected/aaaaf2d3-8f74-4ce7-8407-2f0b0bece070-kube-api-access-rq4md\") pod \"glance-default-internal-api-0\" (UID: \"aaaaf2d3-8f74-4ce7-8407-2f0b0bece070\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:31:08 crc kubenswrapper[4558]: I0120 17:31:08.145457 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aaaaf2d3-8f74-4ce7-8407-2f0b0bece070-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"aaaaf2d3-8f74-4ce7-8407-2f0b0bece070\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:31:08 crc kubenswrapper[4558]: I0120 17:31:08.145640 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage15-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") pod \"glance-default-internal-api-0\" (UID: \"aaaaf2d3-8f74-4ce7-8407-2f0b0bece070\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:31:08 crc kubenswrapper[4558]: I0120 17:31:08.145695 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aaaaf2d3-8f74-4ce7-8407-2f0b0bece070-scripts\") pod \"glance-default-internal-api-0\" (UID: \"aaaaf2d3-8f74-4ce7-8407-2f0b0bece070\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:31:08 crc kubenswrapper[4558]: I0120 17:31:08.145838 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/aaaaf2d3-8f74-4ce7-8407-2f0b0bece070-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"aaaaf2d3-8f74-4ce7-8407-2f0b0bece070\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:31:08 crc kubenswrapper[4558]: I0120 17:31:08.247652 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage15-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") pod \"glance-default-internal-api-0\" (UID: \"aaaaf2d3-8f74-4ce7-8407-2f0b0bece070\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:31:08 crc kubenswrapper[4558]: I0120 17:31:08.247713 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aaaaf2d3-8f74-4ce7-8407-2f0b0bece070-scripts\") pod \"glance-default-internal-api-0\" (UID: \"aaaaf2d3-8f74-4ce7-8407-2f0b0bece070\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:31:08 crc kubenswrapper[4558]: I0120 17:31:08.247792 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/aaaaf2d3-8f74-4ce7-8407-2f0b0bece070-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"aaaaf2d3-8f74-4ce7-8407-2f0b0bece070\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:31:08 crc kubenswrapper[4558]: I0120 17:31:08.247874 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/aaaaf2d3-8f74-4ce7-8407-2f0b0bece070-logs\") pod \"glance-default-internal-api-0\" (UID: \"aaaaf2d3-8f74-4ce7-8407-2f0b0bece070\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:31:08 crc kubenswrapper[4558]: I0120 17:31:08.247905 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aaaaf2d3-8f74-4ce7-8407-2f0b0bece070-config-data\") pod \"glance-default-internal-api-0\" (UID: \"aaaaf2d3-8f74-4ce7-8407-2f0b0bece070\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:31:08 crc kubenswrapper[4558]: I0120 17:31:08.247982 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rq4md\" (UniqueName: \"kubernetes.io/projected/aaaaf2d3-8f74-4ce7-8407-2f0b0bece070-kube-api-access-rq4md\") pod \"glance-default-internal-api-0\" (UID: \"aaaaf2d3-8f74-4ce7-8407-2f0b0bece070\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:31:08 crc kubenswrapper[4558]: I0120 17:31:08.248035 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aaaaf2d3-8f74-4ce7-8407-2f0b0bece070-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"aaaaf2d3-8f74-4ce7-8407-2f0b0bece070\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:31:08 crc kubenswrapper[4558]: I0120 17:31:08.248093 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage15-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") pod \"glance-default-internal-api-0\" (UID: \"aaaaf2d3-8f74-4ce7-8407-2f0b0bece070\") device mount path \"/mnt/openstack/pv15\"" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:31:08 crc kubenswrapper[4558]: I0120 17:31:08.248923 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/aaaaf2d3-8f74-4ce7-8407-2f0b0bece070-logs\") pod \"glance-default-internal-api-0\" (UID: \"aaaaf2d3-8f74-4ce7-8407-2f0b0bece070\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:31:08 crc kubenswrapper[4558]: I0120 17:31:08.249068 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/aaaaf2d3-8f74-4ce7-8407-2f0b0bece070-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"aaaaf2d3-8f74-4ce7-8407-2f0b0bece070\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:31:08 crc kubenswrapper[4558]: I0120 17:31:08.252001 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aaaaf2d3-8f74-4ce7-8407-2f0b0bece070-scripts\") pod \"glance-default-internal-api-0\" (UID: \"aaaaf2d3-8f74-4ce7-8407-2f0b0bece070\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:31:08 crc kubenswrapper[4558]: I0120 17:31:08.252579 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aaaaf2d3-8f74-4ce7-8407-2f0b0bece070-config-data\") pod \"glance-default-internal-api-0\" (UID: \"aaaaf2d3-8f74-4ce7-8407-2f0b0bece070\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:31:08 crc kubenswrapper[4558]: I0120 17:31:08.253374 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aaaaf2d3-8f74-4ce7-8407-2f0b0bece070-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"aaaaf2d3-8f74-4ce7-8407-2f0b0bece070\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:31:08 crc kubenswrapper[4558]: I0120 17:31:08.265811 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rq4md\" (UniqueName: \"kubernetes.io/projected/aaaaf2d3-8f74-4ce7-8407-2f0b0bece070-kube-api-access-rq4md\") pod \"glance-default-internal-api-0\" (UID: \"aaaaf2d3-8f74-4ce7-8407-2f0b0bece070\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:31:08 crc kubenswrapper[4558]: I0120 17:31:08.275264 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage15-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") pod \"glance-default-internal-api-0\" (UID: \"aaaaf2d3-8f74-4ce7-8407-2f0b0bece070\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:31:08 crc kubenswrapper[4558]: I0120 17:31:08.361461 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:31:08 crc kubenswrapper[4558]: I0120 17:31:08.586240 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1f87c5fb-8c96-4e5a-bdbb-ca072b7cf522" path="/var/lib/kubelet/pods/1f87c5fb-8c96-4e5a-bdbb-ca072b7cf522/volumes" Jan 20 17:31:08 crc kubenswrapper[4558]: I0120 17:31:08.587561 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2f28be37-ef13-48a6-9b8f-84544328e73b" path="/var/lib/kubelet/pods/2f28be37-ef13-48a6-9b8f-84544328e73b/volumes" Jan 20 17:31:08 crc kubenswrapper[4558]: I0120 17:31:08.830973 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:31:08 crc kubenswrapper[4558]: I0120 17:31:08.986987 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"aaaaf2d3-8f74-4ce7-8407-2f0b0bece070","Type":"ContainerStarted","Data":"fd97bcd155d8655d88e65b818cf9169181c3b48d6002d6b382f7268fcedceba0"} Jan 20 17:31:08 crc kubenswrapper[4558]: I0120 17:31:08.990427 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"59e7ef92-3d74-4334-ac19-671eb3199d0b","Type":"ContainerStarted","Data":"40ef3c5a952862f3e1bf0e9c9f5e2c665b77d0d4387f66b4456d7a0b64be3cbf"} Jan 20 17:31:08 crc kubenswrapper[4558]: I0120 17:31:08.990477 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"59e7ef92-3d74-4334-ac19-671eb3199d0b","Type":"ContainerStarted","Data":"85d12510c1da886d4ee25d1c28a63fdd18e54d2a19e1791fb8e0c9a0fa49fb05"} Jan 20 17:31:09 crc kubenswrapper[4558]: I0120 17:31:09.019554 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/glance-default-external-api-0" podStartSLOduration=3.019530661 podStartE2EDuration="3.019530661s" podCreationTimestamp="2026-01-20 17:31:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:31:09.005052868 +0000 UTC m=+2962.765390835" watchObservedRunningTime="2026-01-20 17:31:09.019530661 +0000 UTC m=+2962.779868628" Jan 20 17:31:10 crc kubenswrapper[4558]: I0120 17:31:10.008069 4558 generic.go:334] "Generic (PLEG): container finished" podID="5d6a1a3c-9291-4b95-89ed-69c7dfcb8e00" containerID="b132b77e1877bae6d1988fa8eddb280d9e29969477d275d815cbad46d435252b" exitCode=0 Jan 20 17:31:10 crc kubenswrapper[4558]: I0120 17:31:10.008153 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"5d6a1a3c-9291-4b95-89ed-69c7dfcb8e00","Type":"ContainerDied","Data":"b132b77e1877bae6d1988fa8eddb280d9e29969477d275d815cbad46d435252b"} Jan 20 17:31:10 crc kubenswrapper[4558]: I0120 17:31:10.011921 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"aaaaf2d3-8f74-4ce7-8407-2f0b0bece070","Type":"ContainerStarted","Data":"b5f54a9d58e26e3dd566d5f5f71a6cabd02248b3e9fd086fccf6af2e071db403"} Jan 20 17:31:10 crc kubenswrapper[4558]: I0120 17:31:10.011974 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"aaaaf2d3-8f74-4ce7-8407-2f0b0bece070","Type":"ContainerStarted","Data":"bc8a516268d1e7dd8e3c075c9cfa7d70176325f633e340325c2c6e2aa0d06228"} Jan 20 17:31:10 crc kubenswrapper[4558]: I0120 17:31:10.027781 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:31:10 crc kubenswrapper[4558]: I0120 17:31:10.043943 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/glance-default-internal-api-0" podStartSLOduration=3.043664508 podStartE2EDuration="3.043664508s" podCreationTimestamp="2026-01-20 17:31:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:31:10.035231672 +0000 UTC m=+2963.795569638" watchObservedRunningTime="2026-01-20 17:31:10.043664508 +0000 UTC m=+2963.804002475" Jan 20 17:31:10 crc kubenswrapper[4558]: I0120 17:31:10.190830 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vr56v\" (UniqueName: \"kubernetes.io/projected/5d6a1a3c-9291-4b95-89ed-69c7dfcb8e00-kube-api-access-vr56v\") pod \"5d6a1a3c-9291-4b95-89ed-69c7dfcb8e00\" (UID: \"5d6a1a3c-9291-4b95-89ed-69c7dfcb8e00\") " Jan 20 17:31:10 crc kubenswrapper[4558]: I0120 17:31:10.190878 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5d6a1a3c-9291-4b95-89ed-69c7dfcb8e00-scripts\") pod \"5d6a1a3c-9291-4b95-89ed-69c7dfcb8e00\" (UID: \"5d6a1a3c-9291-4b95-89ed-69c7dfcb8e00\") " Jan 20 17:31:10 crc kubenswrapper[4558]: I0120 17:31:10.190911 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5d6a1a3c-9291-4b95-89ed-69c7dfcb8e00-run-httpd\") pod \"5d6a1a3c-9291-4b95-89ed-69c7dfcb8e00\" (UID: \"5d6a1a3c-9291-4b95-89ed-69c7dfcb8e00\") " Jan 20 17:31:10 crc kubenswrapper[4558]: I0120 17:31:10.190949 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5d6a1a3c-9291-4b95-89ed-69c7dfcb8e00-log-httpd\") pod \"5d6a1a3c-9291-4b95-89ed-69c7dfcb8e00\" (UID: \"5d6a1a3c-9291-4b95-89ed-69c7dfcb8e00\") " Jan 20 17:31:10 crc kubenswrapper[4558]: I0120 17:31:10.190978 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5d6a1a3c-9291-4b95-89ed-69c7dfcb8e00-config-data\") pod \"5d6a1a3c-9291-4b95-89ed-69c7dfcb8e00\" (UID: \"5d6a1a3c-9291-4b95-89ed-69c7dfcb8e00\") " Jan 20 17:31:10 crc kubenswrapper[4558]: I0120 17:31:10.191004 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5d6a1a3c-9291-4b95-89ed-69c7dfcb8e00-sg-core-conf-yaml\") pod \"5d6a1a3c-9291-4b95-89ed-69c7dfcb8e00\" (UID: \"5d6a1a3c-9291-4b95-89ed-69c7dfcb8e00\") " Jan 20 17:31:10 crc kubenswrapper[4558]: I0120 17:31:10.191030 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5d6a1a3c-9291-4b95-89ed-69c7dfcb8e00-combined-ca-bundle\") pod \"5d6a1a3c-9291-4b95-89ed-69c7dfcb8e00\" (UID: \"5d6a1a3c-9291-4b95-89ed-69c7dfcb8e00\") " Jan 20 17:31:10 crc kubenswrapper[4558]: I0120 17:31:10.191374 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5d6a1a3c-9291-4b95-89ed-69c7dfcb8e00-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "5d6a1a3c-9291-4b95-89ed-69c7dfcb8e00" (UID: "5d6a1a3c-9291-4b95-89ed-69c7dfcb8e00"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:31:10 crc kubenswrapper[4558]: I0120 17:31:10.191642 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5d6a1a3c-9291-4b95-89ed-69c7dfcb8e00-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "5d6a1a3c-9291-4b95-89ed-69c7dfcb8e00" (UID: "5d6a1a3c-9291-4b95-89ed-69c7dfcb8e00"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:31:10 crc kubenswrapper[4558]: I0120 17:31:10.193551 4558 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5d6a1a3c-9291-4b95-89ed-69c7dfcb8e00-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:31:10 crc kubenswrapper[4558]: I0120 17:31:10.193829 4558 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5d6a1a3c-9291-4b95-89ed-69c7dfcb8e00-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:31:10 crc kubenswrapper[4558]: I0120 17:31:10.196847 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5d6a1a3c-9291-4b95-89ed-69c7dfcb8e00-scripts" (OuterVolumeSpecName: "scripts") pod "5d6a1a3c-9291-4b95-89ed-69c7dfcb8e00" (UID: "5d6a1a3c-9291-4b95-89ed-69c7dfcb8e00"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:31:10 crc kubenswrapper[4558]: I0120 17:31:10.197383 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5d6a1a3c-9291-4b95-89ed-69c7dfcb8e00-kube-api-access-vr56v" (OuterVolumeSpecName: "kube-api-access-vr56v") pod "5d6a1a3c-9291-4b95-89ed-69c7dfcb8e00" (UID: "5d6a1a3c-9291-4b95-89ed-69c7dfcb8e00"). InnerVolumeSpecName "kube-api-access-vr56v". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:31:10 crc kubenswrapper[4558]: I0120 17:31:10.217203 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5d6a1a3c-9291-4b95-89ed-69c7dfcb8e00-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "5d6a1a3c-9291-4b95-89ed-69c7dfcb8e00" (UID: "5d6a1a3c-9291-4b95-89ed-69c7dfcb8e00"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:31:10 crc kubenswrapper[4558]: I0120 17:31:10.250318 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5d6a1a3c-9291-4b95-89ed-69c7dfcb8e00-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5d6a1a3c-9291-4b95-89ed-69c7dfcb8e00" (UID: "5d6a1a3c-9291-4b95-89ed-69c7dfcb8e00"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:31:10 crc kubenswrapper[4558]: I0120 17:31:10.268840 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5d6a1a3c-9291-4b95-89ed-69c7dfcb8e00-config-data" (OuterVolumeSpecName: "config-data") pod "5d6a1a3c-9291-4b95-89ed-69c7dfcb8e00" (UID: "5d6a1a3c-9291-4b95-89ed-69c7dfcb8e00"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:31:10 crc kubenswrapper[4558]: I0120 17:31:10.294966 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5d6a1a3c-9291-4b95-89ed-69c7dfcb8e00-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:31:10 crc kubenswrapper[4558]: I0120 17:31:10.294991 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vr56v\" (UniqueName: \"kubernetes.io/projected/5d6a1a3c-9291-4b95-89ed-69c7dfcb8e00-kube-api-access-vr56v\") on node \"crc\" DevicePath \"\"" Jan 20 17:31:10 crc kubenswrapper[4558]: I0120 17:31:10.295003 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5d6a1a3c-9291-4b95-89ed-69c7dfcb8e00-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:31:10 crc kubenswrapper[4558]: I0120 17:31:10.295012 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5d6a1a3c-9291-4b95-89ed-69c7dfcb8e00-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:31:10 crc kubenswrapper[4558]: I0120 17:31:10.295022 4558 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5d6a1a3c-9291-4b95-89ed-69c7dfcb8e00-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 20 17:31:11 crc kubenswrapper[4558]: I0120 17:31:11.027902 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"5d6a1a3c-9291-4b95-89ed-69c7dfcb8e00","Type":"ContainerDied","Data":"50fff1102ca3ba28aa408c3c10c097fc1267f100c66542cce3117d2bab902210"} Jan 20 17:31:11 crc kubenswrapper[4558]: I0120 17:31:11.027968 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:31:11 crc kubenswrapper[4558]: I0120 17:31:11.028646 4558 scope.go:117] "RemoveContainer" containerID="934d208a4a3607faba4102e258a17ff86ab231b6df91b3c206ed6eaf6f9148ce" Jan 20 17:31:11 crc kubenswrapper[4558]: I0120 17:31:11.051081 4558 scope.go:117] "RemoveContainer" containerID="9fd43867f209bf26f8256753383c15a6625393d84ed247c3d40fc13e896a0e58" Jan 20 17:31:11 crc kubenswrapper[4558]: I0120 17:31:11.055187 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:31:11 crc kubenswrapper[4558]: I0120 17:31:11.065055 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:31:11 crc kubenswrapper[4558]: I0120 17:31:11.081566 4558 scope.go:117] "RemoveContainer" containerID="36cf841bd011a58c66dcc157de2ad486b8170e4f32624bee453064b82c3fb39f" Jan 20 17:31:11 crc kubenswrapper[4558]: I0120 17:31:11.086962 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:31:11 crc kubenswrapper[4558]: E0120 17:31:11.087516 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5d6a1a3c-9291-4b95-89ed-69c7dfcb8e00" containerName="ceilometer-central-agent" Jan 20 17:31:11 crc kubenswrapper[4558]: I0120 17:31:11.087590 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="5d6a1a3c-9291-4b95-89ed-69c7dfcb8e00" containerName="ceilometer-central-agent" Jan 20 17:31:11 crc kubenswrapper[4558]: E0120 17:31:11.087672 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5d6a1a3c-9291-4b95-89ed-69c7dfcb8e00" containerName="ceilometer-notification-agent" Jan 20 17:31:11 crc kubenswrapper[4558]: I0120 17:31:11.087719 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="5d6a1a3c-9291-4b95-89ed-69c7dfcb8e00" containerName="ceilometer-notification-agent" Jan 20 17:31:11 crc kubenswrapper[4558]: E0120 17:31:11.087806 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5d6a1a3c-9291-4b95-89ed-69c7dfcb8e00" containerName="proxy-httpd" Jan 20 17:31:11 crc kubenswrapper[4558]: I0120 17:31:11.087857 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="5d6a1a3c-9291-4b95-89ed-69c7dfcb8e00" containerName="proxy-httpd" Jan 20 17:31:11 crc kubenswrapper[4558]: E0120 17:31:11.087904 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5d6a1a3c-9291-4b95-89ed-69c7dfcb8e00" containerName="sg-core" Jan 20 17:31:11 crc kubenswrapper[4558]: I0120 17:31:11.087946 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="5d6a1a3c-9291-4b95-89ed-69c7dfcb8e00" containerName="sg-core" Jan 20 17:31:11 crc kubenswrapper[4558]: I0120 17:31:11.088227 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="5d6a1a3c-9291-4b95-89ed-69c7dfcb8e00" containerName="proxy-httpd" Jan 20 17:31:11 crc kubenswrapper[4558]: I0120 17:31:11.088293 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="5d6a1a3c-9291-4b95-89ed-69c7dfcb8e00" containerName="ceilometer-notification-agent" Jan 20 17:31:11 crc kubenswrapper[4558]: I0120 17:31:11.088352 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="5d6a1a3c-9291-4b95-89ed-69c7dfcb8e00" containerName="ceilometer-central-agent" Jan 20 17:31:11 crc kubenswrapper[4558]: I0120 17:31:11.088408 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="5d6a1a3c-9291-4b95-89ed-69c7dfcb8e00" containerName="sg-core" Jan 20 17:31:11 crc kubenswrapper[4558]: I0120 17:31:11.090026 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:31:11 crc kubenswrapper[4558]: I0120 17:31:11.093775 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-scripts" Jan 20 17:31:11 crc kubenswrapper[4558]: I0120 17:31:11.094027 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-config-data" Jan 20 17:31:11 crc kubenswrapper[4558]: I0120 17:31:11.095118 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:31:11 crc kubenswrapper[4558]: I0120 17:31:11.109238 4558 scope.go:117] "RemoveContainer" containerID="b132b77e1877bae6d1988fa8eddb280d9e29969477d275d815cbad46d435252b" Jan 20 17:31:11 crc kubenswrapper[4558]: I0120 17:31:11.109821 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bmjnp\" (UniqueName: \"kubernetes.io/projected/ae0038a3-7264-4263-961d-ff4bd973270e-kube-api-access-bmjnp\") pod \"ceilometer-0\" (UID: \"ae0038a3-7264-4263-961d-ff4bd973270e\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:31:11 crc kubenswrapper[4558]: I0120 17:31:11.109856 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ae0038a3-7264-4263-961d-ff4bd973270e-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ae0038a3-7264-4263-961d-ff4bd973270e\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:31:11 crc kubenswrapper[4558]: I0120 17:31:11.109925 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ae0038a3-7264-4263-961d-ff4bd973270e-run-httpd\") pod \"ceilometer-0\" (UID: \"ae0038a3-7264-4263-961d-ff4bd973270e\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:31:11 crc kubenswrapper[4558]: I0120 17:31:11.109963 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae0038a3-7264-4263-961d-ff4bd973270e-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ae0038a3-7264-4263-961d-ff4bd973270e\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:31:11 crc kubenswrapper[4558]: I0120 17:31:11.109980 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ae0038a3-7264-4263-961d-ff4bd973270e-scripts\") pod \"ceilometer-0\" (UID: \"ae0038a3-7264-4263-961d-ff4bd973270e\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:31:11 crc kubenswrapper[4558]: I0120 17:31:11.110004 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ae0038a3-7264-4263-961d-ff4bd973270e-log-httpd\") pod \"ceilometer-0\" (UID: \"ae0038a3-7264-4263-961d-ff4bd973270e\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:31:11 crc kubenswrapper[4558]: I0120 17:31:11.110020 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ae0038a3-7264-4263-961d-ff4bd973270e-config-data\") pod \"ceilometer-0\" (UID: \"ae0038a3-7264-4263-961d-ff4bd973270e\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:31:11 crc kubenswrapper[4558]: I0120 17:31:11.211599 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bmjnp\" (UniqueName: \"kubernetes.io/projected/ae0038a3-7264-4263-961d-ff4bd973270e-kube-api-access-bmjnp\") pod \"ceilometer-0\" (UID: \"ae0038a3-7264-4263-961d-ff4bd973270e\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:31:11 crc kubenswrapper[4558]: I0120 17:31:11.211649 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ae0038a3-7264-4263-961d-ff4bd973270e-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ae0038a3-7264-4263-961d-ff4bd973270e\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:31:11 crc kubenswrapper[4558]: I0120 17:31:11.211771 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ae0038a3-7264-4263-961d-ff4bd973270e-run-httpd\") pod \"ceilometer-0\" (UID: \"ae0038a3-7264-4263-961d-ff4bd973270e\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:31:11 crc kubenswrapper[4558]: I0120 17:31:11.211818 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae0038a3-7264-4263-961d-ff4bd973270e-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ae0038a3-7264-4263-961d-ff4bd973270e\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:31:11 crc kubenswrapper[4558]: I0120 17:31:11.211838 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ae0038a3-7264-4263-961d-ff4bd973270e-scripts\") pod \"ceilometer-0\" (UID: \"ae0038a3-7264-4263-961d-ff4bd973270e\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:31:11 crc kubenswrapper[4558]: I0120 17:31:11.211887 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ae0038a3-7264-4263-961d-ff4bd973270e-log-httpd\") pod \"ceilometer-0\" (UID: \"ae0038a3-7264-4263-961d-ff4bd973270e\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:31:11 crc kubenswrapper[4558]: I0120 17:31:11.211903 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ae0038a3-7264-4263-961d-ff4bd973270e-config-data\") pod \"ceilometer-0\" (UID: \"ae0038a3-7264-4263-961d-ff4bd973270e\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:31:11 crc kubenswrapper[4558]: I0120 17:31:11.212608 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ae0038a3-7264-4263-961d-ff4bd973270e-run-httpd\") pod \"ceilometer-0\" (UID: \"ae0038a3-7264-4263-961d-ff4bd973270e\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:31:11 crc kubenswrapper[4558]: I0120 17:31:11.212674 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ae0038a3-7264-4263-961d-ff4bd973270e-log-httpd\") pod \"ceilometer-0\" (UID: \"ae0038a3-7264-4263-961d-ff4bd973270e\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:31:11 crc kubenswrapper[4558]: I0120 17:31:11.217090 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ae0038a3-7264-4263-961d-ff4bd973270e-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ae0038a3-7264-4263-961d-ff4bd973270e\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:31:11 crc kubenswrapper[4558]: I0120 17:31:11.217810 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae0038a3-7264-4263-961d-ff4bd973270e-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ae0038a3-7264-4263-961d-ff4bd973270e\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:31:11 crc kubenswrapper[4558]: I0120 17:31:11.217902 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ae0038a3-7264-4263-961d-ff4bd973270e-config-data\") pod \"ceilometer-0\" (UID: \"ae0038a3-7264-4263-961d-ff4bd973270e\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:31:11 crc kubenswrapper[4558]: I0120 17:31:11.218870 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ae0038a3-7264-4263-961d-ff4bd973270e-scripts\") pod \"ceilometer-0\" (UID: \"ae0038a3-7264-4263-961d-ff4bd973270e\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:31:11 crc kubenswrapper[4558]: I0120 17:31:11.226605 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bmjnp\" (UniqueName: \"kubernetes.io/projected/ae0038a3-7264-4263-961d-ff4bd973270e-kube-api-access-bmjnp\") pod \"ceilometer-0\" (UID: \"ae0038a3-7264-4263-961d-ff4bd973270e\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:31:11 crc kubenswrapper[4558]: I0120 17:31:11.275677 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:31:11 crc kubenswrapper[4558]: I0120 17:31:11.411347 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:31:11 crc kubenswrapper[4558]: I0120 17:31:11.726518 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell0-cell-mapping-b6m6v"] Jan 20 17:31:11 crc kubenswrapper[4558]: I0120 17:31:11.728050 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-cell-mapping-b6m6v" Jan 20 17:31:11 crc kubenswrapper[4558]: I0120 17:31:11.729905 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell0-manage-config-data" Jan 20 17:31:11 crc kubenswrapper[4558]: I0120 17:31:11.730658 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell0-manage-scripts" Jan 20 17:31:11 crc kubenswrapper[4558]: I0120 17:31:11.749690 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-cell-mapping-b6m6v"] Jan 20 17:31:11 crc kubenswrapper[4558]: I0120 17:31:11.879231 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:31:11 crc kubenswrapper[4558]: I0120 17:31:11.880483 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:31:11 crc kubenswrapper[4558]: I0120 17:31:11.882458 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-scheduler-config-data" Jan 20 17:31:11 crc kubenswrapper[4558]: I0120 17:31:11.891732 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:31:11 crc kubenswrapper[4558]: I0120 17:31:11.912616 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:31:11 crc kubenswrapper[4558]: I0120 17:31:11.928019 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ca7c35ef-9af8-452c-800d-89bfa63317af-scripts\") pod \"nova-cell0-cell-mapping-b6m6v\" (UID: \"ca7c35ef-9af8-452c-800d-89bfa63317af\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-b6m6v" Jan 20 17:31:11 crc kubenswrapper[4558]: I0120 17:31:11.928076 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dfdfb43e-4249-4f0e-b397-09ffd2245fc6-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"dfdfb43e-4249-4f0e-b397-09ffd2245fc6\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:31:11 crc kubenswrapper[4558]: I0120 17:31:11.928100 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca7c35ef-9af8-452c-800d-89bfa63317af-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-b6m6v\" (UID: \"ca7c35ef-9af8-452c-800d-89bfa63317af\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-b6m6v" Jan 20 17:31:11 crc kubenswrapper[4558]: I0120 17:31:11.928224 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ca7c35ef-9af8-452c-800d-89bfa63317af-config-data\") pod \"nova-cell0-cell-mapping-b6m6v\" (UID: \"ca7c35ef-9af8-452c-800d-89bfa63317af\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-b6m6v" Jan 20 17:31:11 crc kubenswrapper[4558]: I0120 17:31:11.928271 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dfdfb43e-4249-4f0e-b397-09ffd2245fc6-config-data\") pod \"nova-scheduler-0\" (UID: \"dfdfb43e-4249-4f0e-b397-09ffd2245fc6\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:31:11 crc kubenswrapper[4558]: I0120 17:31:11.928383 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-66474\" (UniqueName: \"kubernetes.io/projected/dfdfb43e-4249-4f0e-b397-09ffd2245fc6-kube-api-access-66474\") pod \"nova-scheduler-0\" (UID: \"dfdfb43e-4249-4f0e-b397-09ffd2245fc6\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:31:11 crc kubenswrapper[4558]: I0120 17:31:11.928440 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bzs8v\" (UniqueName: \"kubernetes.io/projected/ca7c35ef-9af8-452c-800d-89bfa63317af-kube-api-access-bzs8v\") pod \"nova-cell0-cell-mapping-b6m6v\" (UID: \"ca7c35ef-9af8-452c-800d-89bfa63317af\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-b6m6v" Jan 20 17:31:11 crc kubenswrapper[4558]: I0120 17:31:11.950553 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:31:11 crc kubenswrapper[4558]: I0120 17:31:11.952387 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:31:11 crc kubenswrapper[4558]: I0120 17:31:11.955570 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-metadata-config-data" Jan 20 17:31:11 crc kubenswrapper[4558]: I0120 17:31:11.989236 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:31:12 crc kubenswrapper[4558]: I0120 17:31:12.008589 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:31:12 crc kubenswrapper[4558]: I0120 17:31:12.010057 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:31:12 crc kubenswrapper[4558]: I0120 17:31:12.015490 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell1-novncproxy-config-data" Jan 20 17:31:12 crc kubenswrapper[4558]: I0120 17:31:12.029567 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ca7c35ef-9af8-452c-800d-89bfa63317af-scripts\") pod \"nova-cell0-cell-mapping-b6m6v\" (UID: \"ca7c35ef-9af8-452c-800d-89bfa63317af\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-b6m6v" Jan 20 17:31:12 crc kubenswrapper[4558]: I0120 17:31:12.029618 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dfdfb43e-4249-4f0e-b397-09ffd2245fc6-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"dfdfb43e-4249-4f0e-b397-09ffd2245fc6\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:31:12 crc kubenswrapper[4558]: I0120 17:31:12.029640 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca7c35ef-9af8-452c-800d-89bfa63317af-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-b6m6v\" (UID: \"ca7c35ef-9af8-452c-800d-89bfa63317af\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-b6m6v" Jan 20 17:31:12 crc kubenswrapper[4558]: I0120 17:31:12.029743 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ca7c35ef-9af8-452c-800d-89bfa63317af-config-data\") pod \"nova-cell0-cell-mapping-b6m6v\" (UID: \"ca7c35ef-9af8-452c-800d-89bfa63317af\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-b6m6v" Jan 20 17:31:12 crc kubenswrapper[4558]: I0120 17:31:12.029768 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dfdfb43e-4249-4f0e-b397-09ffd2245fc6-config-data\") pod \"nova-scheduler-0\" (UID: \"dfdfb43e-4249-4f0e-b397-09ffd2245fc6\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:31:12 crc kubenswrapper[4558]: I0120 17:31:12.029890 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-66474\" (UniqueName: \"kubernetes.io/projected/dfdfb43e-4249-4f0e-b397-09ffd2245fc6-kube-api-access-66474\") pod \"nova-scheduler-0\" (UID: \"dfdfb43e-4249-4f0e-b397-09ffd2245fc6\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:31:12 crc kubenswrapper[4558]: I0120 17:31:12.029929 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bzs8v\" (UniqueName: \"kubernetes.io/projected/ca7c35ef-9af8-452c-800d-89bfa63317af-kube-api-access-bzs8v\") pod \"nova-cell0-cell-mapping-b6m6v\" (UID: \"ca7c35ef-9af8-452c-800d-89bfa63317af\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-b6m6v" Jan 20 17:31:12 crc kubenswrapper[4558]: I0120 17:31:12.039612 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ca7c35ef-9af8-452c-800d-89bfa63317af-config-data\") pod \"nova-cell0-cell-mapping-b6m6v\" (UID: \"ca7c35ef-9af8-452c-800d-89bfa63317af\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-b6m6v" Jan 20 17:31:12 crc kubenswrapper[4558]: I0120 17:31:12.040530 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dfdfb43e-4249-4f0e-b397-09ffd2245fc6-config-data\") pod \"nova-scheduler-0\" (UID: \"dfdfb43e-4249-4f0e-b397-09ffd2245fc6\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:31:12 crc kubenswrapper[4558]: I0120 17:31:12.045782 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dfdfb43e-4249-4f0e-b397-09ffd2245fc6-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"dfdfb43e-4249-4f0e-b397-09ffd2245fc6\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:31:12 crc kubenswrapper[4558]: I0120 17:31:12.054627 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca7c35ef-9af8-452c-800d-89bfa63317af-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-b6m6v\" (UID: \"ca7c35ef-9af8-452c-800d-89bfa63317af\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-b6m6v" Jan 20 17:31:12 crc kubenswrapper[4558]: I0120 17:31:12.058682 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ca7c35ef-9af8-452c-800d-89bfa63317af-scripts\") pod \"nova-cell0-cell-mapping-b6m6v\" (UID: \"ca7c35ef-9af8-452c-800d-89bfa63317af\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-b6m6v" Jan 20 17:31:12 crc kubenswrapper[4558]: I0120 17:31:12.060935 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:31:12 crc kubenswrapper[4558]: I0120 17:31:12.062054 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bzs8v\" (UniqueName: \"kubernetes.io/projected/ca7c35ef-9af8-452c-800d-89bfa63317af-kube-api-access-bzs8v\") pod \"nova-cell0-cell-mapping-b6m6v\" (UID: \"ca7c35ef-9af8-452c-800d-89bfa63317af\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-b6m6v" Jan 20 17:31:12 crc kubenswrapper[4558]: I0120 17:31:12.067285 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-66474\" (UniqueName: \"kubernetes.io/projected/dfdfb43e-4249-4f0e-b397-09ffd2245fc6-kube-api-access-66474\") pod \"nova-scheduler-0\" (UID: \"dfdfb43e-4249-4f0e-b397-09ffd2245fc6\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:31:12 crc kubenswrapper[4558]: I0120 17:31:12.070483 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"ae0038a3-7264-4263-961d-ff4bd973270e","Type":"ContainerStarted","Data":"77f35fccc8aba402723b0717629f9aebee0a0ecbf33c1f5bb0a869687dac41ac"} Jan 20 17:31:12 crc kubenswrapper[4558]: I0120 17:31:12.090884 4558 scope.go:117] "RemoveContainer" containerID="5df938cc30440b9b047183119d309af1693ed6d0bba0b1b9b61720b412b8c825" Jan 20 17:31:12 crc kubenswrapper[4558]: I0120 17:31:12.118121 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:31:12 crc kubenswrapper[4558]: I0120 17:31:12.120106 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:31:12 crc kubenswrapper[4558]: I0120 17:31:12.122362 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-api-config-data" Jan 20 17:31:12 crc kubenswrapper[4558]: I0120 17:31:12.128891 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:31:12 crc kubenswrapper[4558]: I0120 17:31:12.131857 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7d0577b7-3358-4cfa-b158-defd76ba6c5e-logs\") pod \"nova-metadata-0\" (UID: \"7d0577b7-3358-4cfa-b158-defd76ba6c5e\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:31:12 crc kubenswrapper[4558]: I0120 17:31:12.131928 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7d0577b7-3358-4cfa-b158-defd76ba6c5e-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"7d0577b7-3358-4cfa-b158-defd76ba6c5e\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:31:12 crc kubenswrapper[4558]: I0120 17:31:12.131973 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xjhbz\" (UniqueName: \"kubernetes.io/projected/7d6b9532-22b5-41c9-8b15-83a9c515c52d-kube-api-access-xjhbz\") pod \"nova-cell1-novncproxy-0\" (UID: \"7d6b9532-22b5-41c9-8b15-83a9c515c52d\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:31:12 crc kubenswrapper[4558]: I0120 17:31:12.132080 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7d0577b7-3358-4cfa-b158-defd76ba6c5e-config-data\") pod \"nova-metadata-0\" (UID: \"7d0577b7-3358-4cfa-b158-defd76ba6c5e\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:31:12 crc kubenswrapper[4558]: I0120 17:31:12.132120 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7d6b9532-22b5-41c9-8b15-83a9c515c52d-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"7d6b9532-22b5-41c9-8b15-83a9c515c52d\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:31:12 crc kubenswrapper[4558]: I0120 17:31:12.132136 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7d6b9532-22b5-41c9-8b15-83a9c515c52d-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"7d6b9532-22b5-41c9-8b15-83a9c515c52d\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:31:12 crc kubenswrapper[4558]: I0120 17:31:12.132202 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zpbgm\" (UniqueName: \"kubernetes.io/projected/7d0577b7-3358-4cfa-b158-defd76ba6c5e-kube-api-access-zpbgm\") pod \"nova-metadata-0\" (UID: \"7d0577b7-3358-4cfa-b158-defd76ba6c5e\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:31:12 crc kubenswrapper[4558]: I0120 17:31:12.138449 4558 scope.go:117] "RemoveContainer" containerID="d140d7cfdbff94ef6b5907046676f6d45c60f5cd711589f64e1ce555bd15a8a6" Jan 20 17:31:12 crc kubenswrapper[4558]: I0120 17:31:12.171326 4558 scope.go:117] "RemoveContainer" containerID="6a0521946798f9e4098ead7f6a1d8d950d13146e8392b370d0b39a8082a8515a" Jan 20 17:31:12 crc kubenswrapper[4558]: I0120 17:31:12.196100 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:31:12 crc kubenswrapper[4558]: I0120 17:31:12.201453 4558 scope.go:117] "RemoveContainer" containerID="1bbe4af0bdcc4ee54aef97ab3e8325a227f63771456a354372adfab0b7220202" Jan 20 17:31:12 crc kubenswrapper[4558]: I0120 17:31:12.233563 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ck2qm\" (UniqueName: \"kubernetes.io/projected/f6508afb-b99c-44cf-9a08-8e239cd9b718-kube-api-access-ck2qm\") pod \"nova-api-0\" (UID: \"f6508afb-b99c-44cf-9a08-8e239cd9b718\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:31:12 crc kubenswrapper[4558]: I0120 17:31:12.233639 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7d0577b7-3358-4cfa-b158-defd76ba6c5e-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"7d0577b7-3358-4cfa-b158-defd76ba6c5e\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:31:12 crc kubenswrapper[4558]: I0120 17:31:12.233695 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xjhbz\" (UniqueName: \"kubernetes.io/projected/7d6b9532-22b5-41c9-8b15-83a9c515c52d-kube-api-access-xjhbz\") pod \"nova-cell1-novncproxy-0\" (UID: \"7d6b9532-22b5-41c9-8b15-83a9c515c52d\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:31:12 crc kubenswrapper[4558]: I0120 17:31:12.233743 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f6508afb-b99c-44cf-9a08-8e239cd9b718-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"f6508afb-b99c-44cf-9a08-8e239cd9b718\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:31:12 crc kubenswrapper[4558]: I0120 17:31:12.233824 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f6508afb-b99c-44cf-9a08-8e239cd9b718-logs\") pod \"nova-api-0\" (UID: \"f6508afb-b99c-44cf-9a08-8e239cd9b718\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:31:12 crc kubenswrapper[4558]: I0120 17:31:12.233866 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7d0577b7-3358-4cfa-b158-defd76ba6c5e-config-data\") pod \"nova-metadata-0\" (UID: \"7d0577b7-3358-4cfa-b158-defd76ba6c5e\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:31:12 crc kubenswrapper[4558]: I0120 17:31:12.233907 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7d6b9532-22b5-41c9-8b15-83a9c515c52d-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"7d6b9532-22b5-41c9-8b15-83a9c515c52d\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:31:12 crc kubenswrapper[4558]: I0120 17:31:12.233929 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7d6b9532-22b5-41c9-8b15-83a9c515c52d-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"7d6b9532-22b5-41c9-8b15-83a9c515c52d\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:31:12 crc kubenswrapper[4558]: I0120 17:31:12.233991 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zpbgm\" (UniqueName: \"kubernetes.io/projected/7d0577b7-3358-4cfa-b158-defd76ba6c5e-kube-api-access-zpbgm\") pod \"nova-metadata-0\" (UID: \"7d0577b7-3358-4cfa-b158-defd76ba6c5e\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:31:12 crc kubenswrapper[4558]: I0120 17:31:12.234046 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f6508afb-b99c-44cf-9a08-8e239cd9b718-config-data\") pod \"nova-api-0\" (UID: \"f6508afb-b99c-44cf-9a08-8e239cd9b718\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:31:12 crc kubenswrapper[4558]: I0120 17:31:12.234147 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7d0577b7-3358-4cfa-b158-defd76ba6c5e-logs\") pod \"nova-metadata-0\" (UID: \"7d0577b7-3358-4cfa-b158-defd76ba6c5e\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:31:12 crc kubenswrapper[4558]: I0120 17:31:12.234619 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7d0577b7-3358-4cfa-b158-defd76ba6c5e-logs\") pod \"nova-metadata-0\" (UID: \"7d0577b7-3358-4cfa-b158-defd76ba6c5e\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:31:12 crc kubenswrapper[4558]: I0120 17:31:12.241376 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7d0577b7-3358-4cfa-b158-defd76ba6c5e-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"7d0577b7-3358-4cfa-b158-defd76ba6c5e\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:31:12 crc kubenswrapper[4558]: I0120 17:31:12.242592 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7d6b9532-22b5-41c9-8b15-83a9c515c52d-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"7d6b9532-22b5-41c9-8b15-83a9c515c52d\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:31:12 crc kubenswrapper[4558]: I0120 17:31:12.244092 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7d0577b7-3358-4cfa-b158-defd76ba6c5e-config-data\") pod \"nova-metadata-0\" (UID: \"7d0577b7-3358-4cfa-b158-defd76ba6c5e\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:31:12 crc kubenswrapper[4558]: I0120 17:31:12.245577 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7d6b9532-22b5-41c9-8b15-83a9c515c52d-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"7d6b9532-22b5-41c9-8b15-83a9c515c52d\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:31:12 crc kubenswrapper[4558]: I0120 17:31:12.252470 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zpbgm\" (UniqueName: \"kubernetes.io/projected/7d0577b7-3358-4cfa-b158-defd76ba6c5e-kube-api-access-zpbgm\") pod \"nova-metadata-0\" (UID: \"7d0577b7-3358-4cfa-b158-defd76ba6c5e\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:31:12 crc kubenswrapper[4558]: I0120 17:31:12.260644 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xjhbz\" (UniqueName: \"kubernetes.io/projected/7d6b9532-22b5-41c9-8b15-83a9c515c52d-kube-api-access-xjhbz\") pod \"nova-cell1-novncproxy-0\" (UID: \"7d6b9532-22b5-41c9-8b15-83a9c515c52d\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:31:12 crc kubenswrapper[4558]: I0120 17:31:12.279801 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:31:12 crc kubenswrapper[4558]: I0120 17:31:12.282394 4558 scope.go:117] "RemoveContainer" containerID="86a4a8a8eeeac8eb783b7fe9897a2f20ca9d3bc70a3d18047e9800db618ef2ad" Jan 20 17:31:12 crc kubenswrapper[4558]: I0120 17:31:12.328702 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:31:12 crc kubenswrapper[4558]: I0120 17:31:12.332363 4558 scope.go:117] "RemoveContainer" containerID="ad4f89b78d4804b38b89b1fbc52cbdd49b304501413e6e54a707bd56ba2d1e8a" Jan 20 17:31:12 crc kubenswrapper[4558]: I0120 17:31:12.335803 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ck2qm\" (UniqueName: \"kubernetes.io/projected/f6508afb-b99c-44cf-9a08-8e239cd9b718-kube-api-access-ck2qm\") pod \"nova-api-0\" (UID: \"f6508afb-b99c-44cf-9a08-8e239cd9b718\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:31:12 crc kubenswrapper[4558]: I0120 17:31:12.335877 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f6508afb-b99c-44cf-9a08-8e239cd9b718-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"f6508afb-b99c-44cf-9a08-8e239cd9b718\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:31:12 crc kubenswrapper[4558]: I0120 17:31:12.335920 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f6508afb-b99c-44cf-9a08-8e239cd9b718-logs\") pod \"nova-api-0\" (UID: \"f6508afb-b99c-44cf-9a08-8e239cd9b718\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:31:12 crc kubenswrapper[4558]: I0120 17:31:12.335991 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f6508afb-b99c-44cf-9a08-8e239cd9b718-config-data\") pod \"nova-api-0\" (UID: \"f6508afb-b99c-44cf-9a08-8e239cd9b718\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:31:12 crc kubenswrapper[4558]: I0120 17:31:12.337013 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f6508afb-b99c-44cf-9a08-8e239cd9b718-logs\") pod \"nova-api-0\" (UID: \"f6508afb-b99c-44cf-9a08-8e239cd9b718\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:31:12 crc kubenswrapper[4558]: I0120 17:31:12.344075 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f6508afb-b99c-44cf-9a08-8e239cd9b718-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"f6508afb-b99c-44cf-9a08-8e239cd9b718\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:31:12 crc kubenswrapper[4558]: I0120 17:31:12.347681 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f6508afb-b99c-44cf-9a08-8e239cd9b718-config-data\") pod \"nova-api-0\" (UID: \"f6508afb-b99c-44cf-9a08-8e239cd9b718\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:31:12 crc kubenswrapper[4558]: I0120 17:31:12.348903 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-cell-mapping-b6m6v" Jan 20 17:31:12 crc kubenswrapper[4558]: I0120 17:31:12.352924 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ck2qm\" (UniqueName: \"kubernetes.io/projected/f6508afb-b99c-44cf-9a08-8e239cd9b718-kube-api-access-ck2qm\") pod \"nova-api-0\" (UID: \"f6508afb-b99c-44cf-9a08-8e239cd9b718\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:31:12 crc kubenswrapper[4558]: I0120 17:31:12.449985 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:31:12 crc kubenswrapper[4558]: I0120 17:31:12.499658 4558 scope.go:117] "RemoveContainer" containerID="e208edb098530123771487358f9d48b2af80ac41b80b4e499a06bbe58e7eec93" Jan 20 17:31:12 crc kubenswrapper[4558]: I0120 17:31:12.604493 4558 scope.go:117] "RemoveContainer" containerID="95487ba24ea1b5e70725b0d792dbc8653b76f5c38eac7b039714b3d9fd0a7854" Jan 20 17:31:12 crc kubenswrapper[4558]: I0120 17:31:12.606008 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5d6a1a3c-9291-4b95-89ed-69c7dfcb8e00" path="/var/lib/kubelet/pods/5d6a1a3c-9291-4b95-89ed-69c7dfcb8e00/volumes" Jan 20 17:31:12 crc kubenswrapper[4558]: I0120 17:31:12.670609 4558 scope.go:117] "RemoveContainer" containerID="e19a292a92ee00b776620f6d6da97636958e79b7d260ef36ae81df04a9975fff" Jan 20 17:31:12 crc kubenswrapper[4558]: I0120 17:31:12.706032 4558 scope.go:117] "RemoveContainer" containerID="9b5faec98795b92df82474a1c5193b401d1ccb9d90b9025c1ac178dc22c1725f" Jan 20 17:31:12 crc kubenswrapper[4558]: I0120 17:31:12.760708 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:31:12 crc kubenswrapper[4558]: I0120 17:31:12.785496 4558 scope.go:117] "RemoveContainer" containerID="f3cfae4fc913da224a21d06bfc724e3180240bdec6193cf871256739ecf1d2d6" Jan 20 17:31:12 crc kubenswrapper[4558]: I0120 17:31:12.951453 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:31:13 crc kubenswrapper[4558]: I0120 17:31:13.014000 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-db-sync-s8ph9"] Jan 20 17:31:13 crc kubenswrapper[4558]: I0120 17:31:13.037002 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-s8ph9" Jan 20 17:31:13 crc kubenswrapper[4558]: I0120 17:31:13.043799 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell1-conductor-config-data" Jan 20 17:31:13 crc kubenswrapper[4558]: I0120 17:31:13.045318 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell1-conductor-scripts" Jan 20 17:31:13 crc kubenswrapper[4558]: I0120 17:31:13.070472 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-db-sync-s8ph9"] Jan 20 17:31:13 crc kubenswrapper[4558]: I0120 17:31:13.097895 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-cell-mapping-b6m6v"] Jan 20 17:31:13 crc kubenswrapper[4558]: I0120 17:31:13.113139 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-cell-mapping-b6m6v" event={"ID":"ca7c35ef-9af8-452c-800d-89bfa63317af","Type":"ContainerStarted","Data":"945aa15fd9cb055547c8696f32e183367020a4dd1b524ff406d36b13f835a4c3"} Jan 20 17:31:13 crc kubenswrapper[4558]: I0120 17:31:13.115801 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:31:13 crc kubenswrapper[4558]: I0120 17:31:13.127228 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"ae0038a3-7264-4263-961d-ff4bd973270e","Type":"ContainerStarted","Data":"5cfd9ea90fd3f8cb0609173530a9c3bac6afc3a002b0b0d58a159c437eb42d76"} Jan 20 17:31:13 crc kubenswrapper[4558]: I0120 17:31:13.132266 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"dfdfb43e-4249-4f0e-b397-09ffd2245fc6","Type":"ContainerStarted","Data":"83a5f1b208f4daf04893c3fef923efc5336fbc5b5ccd3afe5700c022800c7e84"} Jan 20 17:31:13 crc kubenswrapper[4558]: I0120 17:31:13.135594 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"7d0577b7-3358-4cfa-b158-defd76ba6c5e","Type":"ContainerStarted","Data":"5bc52db2603c8d870aea1111d772b5c72f0854d3c55f540593caa7499e5eea0e"} Jan 20 17:31:13 crc kubenswrapper[4558]: I0120 17:31:13.151818 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-scheduler-0" podStartSLOduration=2.151800489 podStartE2EDuration="2.151800489s" podCreationTimestamp="2026-01-20 17:31:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:31:13.142931413 +0000 UTC m=+2966.903269369" watchObservedRunningTime="2026-01-20 17:31:13.151800489 +0000 UTC m=+2966.912138455" Jan 20 17:31:13 crc kubenswrapper[4558]: I0120 17:31:13.165219 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/38224f24-100b-4765-b1f3-886f8941593d-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-s8ph9\" (UID: \"38224f24-100b-4765-b1f3-886f8941593d\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-s8ph9" Jan 20 17:31:13 crc kubenswrapper[4558]: I0120 17:31:13.165275 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nt98f\" (UniqueName: \"kubernetes.io/projected/38224f24-100b-4765-b1f3-886f8941593d-kube-api-access-nt98f\") pod \"nova-cell1-conductor-db-sync-s8ph9\" (UID: \"38224f24-100b-4765-b1f3-886f8941593d\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-s8ph9" Jan 20 17:31:13 crc kubenswrapper[4558]: I0120 17:31:13.165314 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/38224f24-100b-4765-b1f3-886f8941593d-scripts\") pod \"nova-cell1-conductor-db-sync-s8ph9\" (UID: \"38224f24-100b-4765-b1f3-886f8941593d\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-s8ph9" Jan 20 17:31:13 crc kubenswrapper[4558]: I0120 17:31:13.165389 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/38224f24-100b-4765-b1f3-886f8941593d-config-data\") pod \"nova-cell1-conductor-db-sync-s8ph9\" (UID: \"38224f24-100b-4765-b1f3-886f8941593d\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-s8ph9" Jan 20 17:31:13 crc kubenswrapper[4558]: I0120 17:31:13.195946 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:31:13 crc kubenswrapper[4558]: I0120 17:31:13.268616 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/38224f24-100b-4765-b1f3-886f8941593d-config-data\") pod \"nova-cell1-conductor-db-sync-s8ph9\" (UID: \"38224f24-100b-4765-b1f3-886f8941593d\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-s8ph9" Jan 20 17:31:13 crc kubenswrapper[4558]: I0120 17:31:13.269423 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/38224f24-100b-4765-b1f3-886f8941593d-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-s8ph9\" (UID: \"38224f24-100b-4765-b1f3-886f8941593d\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-s8ph9" Jan 20 17:31:13 crc kubenswrapper[4558]: I0120 17:31:13.269517 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nt98f\" (UniqueName: \"kubernetes.io/projected/38224f24-100b-4765-b1f3-886f8941593d-kube-api-access-nt98f\") pod \"nova-cell1-conductor-db-sync-s8ph9\" (UID: \"38224f24-100b-4765-b1f3-886f8941593d\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-s8ph9" Jan 20 17:31:13 crc kubenswrapper[4558]: I0120 17:31:13.269577 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/38224f24-100b-4765-b1f3-886f8941593d-scripts\") pod \"nova-cell1-conductor-db-sync-s8ph9\" (UID: \"38224f24-100b-4765-b1f3-886f8941593d\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-s8ph9" Jan 20 17:31:13 crc kubenswrapper[4558]: I0120 17:31:13.273546 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/38224f24-100b-4765-b1f3-886f8941593d-config-data\") pod \"nova-cell1-conductor-db-sync-s8ph9\" (UID: \"38224f24-100b-4765-b1f3-886f8941593d\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-s8ph9" Jan 20 17:31:13 crc kubenswrapper[4558]: I0120 17:31:13.274857 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/38224f24-100b-4765-b1f3-886f8941593d-scripts\") pod \"nova-cell1-conductor-db-sync-s8ph9\" (UID: \"38224f24-100b-4765-b1f3-886f8941593d\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-s8ph9" Jan 20 17:31:13 crc kubenswrapper[4558]: I0120 17:31:13.275836 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/38224f24-100b-4765-b1f3-886f8941593d-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-s8ph9\" (UID: \"38224f24-100b-4765-b1f3-886f8941593d\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-s8ph9" Jan 20 17:31:13 crc kubenswrapper[4558]: I0120 17:31:13.287541 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nt98f\" (UniqueName: \"kubernetes.io/projected/38224f24-100b-4765-b1f3-886f8941593d-kube-api-access-nt98f\") pod \"nova-cell1-conductor-db-sync-s8ph9\" (UID: \"38224f24-100b-4765-b1f3-886f8941593d\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-s8ph9" Jan 20 17:31:13 crc kubenswrapper[4558]: I0120 17:31:13.367864 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-s8ph9" Jan 20 17:31:13 crc kubenswrapper[4558]: I0120 17:31:13.979994 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-db-sync-s8ph9"] Jan 20 17:31:13 crc kubenswrapper[4558]: W0120 17:31:13.985779 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod38224f24_100b_4765_b1f3_886f8941593d.slice/crio-363fb83ea3721242e10fb9294e57f5e863b79b60f5563593c043ffd1a45cc7e5 WatchSource:0}: Error finding container 363fb83ea3721242e10fb9294e57f5e863b79b60f5563593c043ffd1a45cc7e5: Status 404 returned error can't find the container with id 363fb83ea3721242e10fb9294e57f5e863b79b60f5563593c043ffd1a45cc7e5 Jan 20 17:31:14 crc kubenswrapper[4558]: I0120 17:31:14.161184 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"f6508afb-b99c-44cf-9a08-8e239cd9b718","Type":"ContainerStarted","Data":"0da526c952b16dfa06cfc0bd66e840efc4b84941c74883bac142160038ff2363"} Jan 20 17:31:14 crc kubenswrapper[4558]: I0120 17:31:14.161257 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"f6508afb-b99c-44cf-9a08-8e239cd9b718","Type":"ContainerStarted","Data":"fe6b368717df9222d0d373ae663a7090cb34a6e2b7a469795c2fd7476bcba6cf"} Jan 20 17:31:14 crc kubenswrapper[4558]: I0120 17:31:14.161270 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"f6508afb-b99c-44cf-9a08-8e239cd9b718","Type":"ContainerStarted","Data":"c6d1779b6e4f4310d66c25b1005802ab7c04564ba9400bf0a2f5d9c005a05fdd"} Jan 20 17:31:14 crc kubenswrapper[4558]: I0120 17:31:14.165915 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"dfdfb43e-4249-4f0e-b397-09ffd2245fc6","Type":"ContainerStarted","Data":"71359f9180f19fb1eac89d6c9957099d56eebcab537b46ed0226e9a138090ab5"} Jan 20 17:31:14 crc kubenswrapper[4558]: I0120 17:31:14.176463 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"7d0577b7-3358-4cfa-b158-defd76ba6c5e","Type":"ContainerStarted","Data":"49889382778d1334515823c89266efdc5d5ea02682dfe1e77471b2db885683e2"} Jan 20 17:31:14 crc kubenswrapper[4558]: I0120 17:31:14.176510 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"7d0577b7-3358-4cfa-b158-defd76ba6c5e","Type":"ContainerStarted","Data":"c3eed0f10c9cf095c38a4b97d9692bcb73ebdbcab85fb435f0b6abce4c648582"} Jan 20 17:31:14 crc kubenswrapper[4558]: I0120 17:31:14.188865 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"ae0038a3-7264-4263-961d-ff4bd973270e","Type":"ContainerStarted","Data":"eff67ac0c52d545cb6e84b48951c7811d3a813f3451132a6b93b2e8896f528d2"} Jan 20 17:31:14 crc kubenswrapper[4558]: I0120 17:31:14.192094 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-s8ph9" event={"ID":"38224f24-100b-4765-b1f3-886f8941593d","Type":"ContainerStarted","Data":"363fb83ea3721242e10fb9294e57f5e863b79b60f5563593c043ffd1a45cc7e5"} Jan 20 17:31:14 crc kubenswrapper[4558]: I0120 17:31:14.224268 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-api-0" podStartSLOduration=2.224251085 podStartE2EDuration="2.224251085s" podCreationTimestamp="2026-01-20 17:31:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:31:14.191006225 +0000 UTC m=+2967.951344182" watchObservedRunningTime="2026-01-20 17:31:14.224251085 +0000 UTC m=+2967.984589053" Jan 20 17:31:14 crc kubenswrapper[4558]: I0120 17:31:14.226099 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"7d6b9532-22b5-41c9-8b15-83a9c515c52d","Type":"ContainerStarted","Data":"68de8de2f4680620ba6e5b236fe7d12d5fb76c8413d11297d94efa48e9761c9e"} Jan 20 17:31:14 crc kubenswrapper[4558]: I0120 17:31:14.226248 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"7d6b9532-22b5-41c9-8b15-83a9c515c52d","Type":"ContainerStarted","Data":"a97bdbc9a4de273f8137ff9c69110f06a81be653a7f71ff8f59cd55b614c6532"} Jan 20 17:31:14 crc kubenswrapper[4558]: I0120 17:31:14.241231 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-metadata-0" podStartSLOduration=3.241220523 podStartE2EDuration="3.241220523s" podCreationTimestamp="2026-01-20 17:31:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:31:14.222623485 +0000 UTC m=+2967.982961452" watchObservedRunningTime="2026-01-20 17:31:14.241220523 +0000 UTC m=+2968.001558491" Jan 20 17:31:14 crc kubenswrapper[4558]: I0120 17:31:14.269815 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" podStartSLOduration=3.269799611 podStartE2EDuration="3.269799611s" podCreationTimestamp="2026-01-20 17:31:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:31:14.25539179 +0000 UTC m=+2968.015729757" watchObservedRunningTime="2026-01-20 17:31:14.269799611 +0000 UTC m=+2968.030137578" Jan 20 17:31:14 crc kubenswrapper[4558]: I0120 17:31:14.276310 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-cell-mapping-b6m6v" event={"ID":"ca7c35ef-9af8-452c-800d-89bfa63317af","Type":"ContainerStarted","Data":"947c28f9113ea23ac9a8fe215ee6bfabb2e7509fa98fad8873bc83baf344bdb1"} Jan 20 17:31:14 crc kubenswrapper[4558]: I0120 17:31:14.304827 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell0-cell-mapping-b6m6v" podStartSLOduration=3.304813378 podStartE2EDuration="3.304813378s" podCreationTimestamp="2026-01-20 17:31:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:31:14.297199122 +0000 UTC m=+2968.057537078" watchObservedRunningTime="2026-01-20 17:31:14.304813378 +0000 UTC m=+2968.065151345" Jan 20 17:31:15 crc kubenswrapper[4558]: I0120 17:31:15.289322 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-s8ph9" event={"ID":"38224f24-100b-4765-b1f3-886f8941593d","Type":"ContainerStarted","Data":"e5f8385fa34d72587ed1458c9e852fe135326907c6264adfa2ce9be72b329691"} Jan 20 17:31:15 crc kubenswrapper[4558]: I0120 17:31:15.299868 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"ae0038a3-7264-4263-961d-ff4bd973270e","Type":"ContainerStarted","Data":"62879093a8dcc7d9551e46afe019ff1b446d9ad90e73eec908991c7db72c06c5"} Jan 20 17:31:16 crc kubenswrapper[4558]: I0120 17:31:16.598449 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-s8ph9" podStartSLOduration=4.598427931 podStartE2EDuration="4.598427931s" podCreationTimestamp="2026-01-20 17:31:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:31:15.309794602 +0000 UTC m=+2969.070132588" watchObservedRunningTime="2026-01-20 17:31:16.598427931 +0000 UTC m=+2970.358765899" Jan 20 17:31:17 crc kubenswrapper[4558]: I0120 17:31:17.197236 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:31:17 crc kubenswrapper[4558]: I0120 17:31:17.280240 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:31:17 crc kubenswrapper[4558]: I0120 17:31:17.280302 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:31:17 crc kubenswrapper[4558]: I0120 17:31:17.302353 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:31:17 crc kubenswrapper[4558]: I0120 17:31:17.302386 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:31:17 crc kubenswrapper[4558]: I0120 17:31:17.328936 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:31:17 crc kubenswrapper[4558]: I0120 17:31:17.329814 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:31:17 crc kubenswrapper[4558]: I0120 17:31:17.336121 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"ae0038a3-7264-4263-961d-ff4bd973270e","Type":"ContainerStarted","Data":"e264ab3139725adb126c87a12bb8544eec8bb6f3977044a49a218713e8607e4a"} Jan 20 17:31:17 crc kubenswrapper[4558]: I0120 17:31:17.336181 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:31:17 crc kubenswrapper[4558]: I0120 17:31:17.336196 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:31:17 crc kubenswrapper[4558]: I0120 17:31:17.336431 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:31:17 crc kubenswrapper[4558]: I0120 17:31:17.385984 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ceilometer-0" podStartSLOduration=2.050718332 podStartE2EDuration="6.385963247s" podCreationTimestamp="2026-01-20 17:31:11 +0000 UTC" firstStartedPulling="2026-01-20 17:31:11.904409818 +0000 UTC m=+2965.664747786" lastFinishedPulling="2026-01-20 17:31:16.239654733 +0000 UTC m=+2969.999992701" observedRunningTime="2026-01-20 17:31:17.383714008 +0000 UTC m=+2971.144051965" watchObservedRunningTime="2026-01-20 17:31:17.385963247 +0000 UTC m=+2971.146301214" Jan 20 17:31:18 crc kubenswrapper[4558]: I0120 17:31:18.359086 4558 generic.go:334] "Generic (PLEG): container finished" podID="38224f24-100b-4765-b1f3-886f8941593d" containerID="e5f8385fa34d72587ed1458c9e852fe135326907c6264adfa2ce9be72b329691" exitCode=0 Jan 20 17:31:18 crc kubenswrapper[4558]: I0120 17:31:18.359192 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-s8ph9" event={"ID":"38224f24-100b-4765-b1f3-886f8941593d","Type":"ContainerDied","Data":"e5f8385fa34d72587ed1458c9e852fe135326907c6264adfa2ce9be72b329691"} Jan 20 17:31:18 crc kubenswrapper[4558]: I0120 17:31:18.359733 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:31:18 crc kubenswrapper[4558]: I0120 17:31:18.362398 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:31:18 crc kubenswrapper[4558]: I0120 17:31:18.362434 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:31:18 crc kubenswrapper[4558]: I0120 17:31:18.404621 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:31:18 crc kubenswrapper[4558]: I0120 17:31:18.405257 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:31:18 crc kubenswrapper[4558]: I0120 17:31:18.567882 4558 scope.go:117] "RemoveContainer" containerID="f275e9f5de330b3c79316d5871106c6bcf90b698e0744c5beb28da45c336b36d" Jan 20 17:31:18 crc kubenswrapper[4558]: E0120 17:31:18.568250 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 17:31:18 crc kubenswrapper[4558]: I0120 17:31:18.966103 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:31:19 crc kubenswrapper[4558]: I0120 17:31:19.372207 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:31:19 crc kubenswrapper[4558]: I0120 17:31:19.373046 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:31:19 crc kubenswrapper[4558]: I0120 17:31:19.671960 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-s8ph9" Jan 20 17:31:19 crc kubenswrapper[4558]: I0120 17:31:19.815207 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nt98f\" (UniqueName: \"kubernetes.io/projected/38224f24-100b-4765-b1f3-886f8941593d-kube-api-access-nt98f\") pod \"38224f24-100b-4765-b1f3-886f8941593d\" (UID: \"38224f24-100b-4765-b1f3-886f8941593d\") " Jan 20 17:31:19 crc kubenswrapper[4558]: I0120 17:31:19.815340 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/38224f24-100b-4765-b1f3-886f8941593d-scripts\") pod \"38224f24-100b-4765-b1f3-886f8941593d\" (UID: \"38224f24-100b-4765-b1f3-886f8941593d\") " Jan 20 17:31:19 crc kubenswrapper[4558]: I0120 17:31:19.815575 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/38224f24-100b-4765-b1f3-886f8941593d-combined-ca-bundle\") pod \"38224f24-100b-4765-b1f3-886f8941593d\" (UID: \"38224f24-100b-4765-b1f3-886f8941593d\") " Jan 20 17:31:19 crc kubenswrapper[4558]: I0120 17:31:19.815757 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/38224f24-100b-4765-b1f3-886f8941593d-config-data\") pod \"38224f24-100b-4765-b1f3-886f8941593d\" (UID: \"38224f24-100b-4765-b1f3-886f8941593d\") " Jan 20 17:31:19 crc kubenswrapper[4558]: I0120 17:31:19.822267 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/38224f24-100b-4765-b1f3-886f8941593d-scripts" (OuterVolumeSpecName: "scripts") pod "38224f24-100b-4765-b1f3-886f8941593d" (UID: "38224f24-100b-4765-b1f3-886f8941593d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:31:19 crc kubenswrapper[4558]: I0120 17:31:19.822459 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/38224f24-100b-4765-b1f3-886f8941593d-kube-api-access-nt98f" (OuterVolumeSpecName: "kube-api-access-nt98f") pod "38224f24-100b-4765-b1f3-886f8941593d" (UID: "38224f24-100b-4765-b1f3-886f8941593d"). InnerVolumeSpecName "kube-api-access-nt98f". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:31:19 crc kubenswrapper[4558]: I0120 17:31:19.840409 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/38224f24-100b-4765-b1f3-886f8941593d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "38224f24-100b-4765-b1f3-886f8941593d" (UID: "38224f24-100b-4765-b1f3-886f8941593d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:31:19 crc kubenswrapper[4558]: I0120 17:31:19.841246 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/38224f24-100b-4765-b1f3-886f8941593d-config-data" (OuterVolumeSpecName: "config-data") pod "38224f24-100b-4765-b1f3-886f8941593d" (UID: "38224f24-100b-4765-b1f3-886f8941593d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:31:19 crc kubenswrapper[4558]: I0120 17:31:19.917926 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/38224f24-100b-4765-b1f3-886f8941593d-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:31:19 crc kubenswrapper[4558]: I0120 17:31:19.917964 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/38224f24-100b-4765-b1f3-886f8941593d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:31:19 crc kubenswrapper[4558]: I0120 17:31:19.917980 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/38224f24-100b-4765-b1f3-886f8941593d-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:31:19 crc kubenswrapper[4558]: I0120 17:31:19.917992 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nt98f\" (UniqueName: \"kubernetes.io/projected/38224f24-100b-4765-b1f3-886f8941593d-kube-api-access-nt98f\") on node \"crc\" DevicePath \"\"" Jan 20 17:31:20 crc kubenswrapper[4558]: I0120 17:31:20.052382 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:31:20 crc kubenswrapper[4558]: I0120 17:31:20.383261 4558 generic.go:334] "Generic (PLEG): container finished" podID="ca7c35ef-9af8-452c-800d-89bfa63317af" containerID="947c28f9113ea23ac9a8fe215ee6bfabb2e7509fa98fad8873bc83baf344bdb1" exitCode=0 Jan 20 17:31:20 crc kubenswrapper[4558]: I0120 17:31:20.383358 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-cell-mapping-b6m6v" event={"ID":"ca7c35ef-9af8-452c-800d-89bfa63317af","Type":"ContainerDied","Data":"947c28f9113ea23ac9a8fe215ee6bfabb2e7509fa98fad8873bc83baf344bdb1"} Jan 20 17:31:20 crc kubenswrapper[4558]: I0120 17:31:20.388670 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-s8ph9" Jan 20 17:31:20 crc kubenswrapper[4558]: I0120 17:31:20.389026 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-s8ph9" event={"ID":"38224f24-100b-4765-b1f3-886f8941593d","Type":"ContainerDied","Data":"363fb83ea3721242e10fb9294e57f5e863b79b60f5563593c043ffd1a45cc7e5"} Jan 20 17:31:20 crc kubenswrapper[4558]: I0120 17:31:20.389173 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="363fb83ea3721242e10fb9294e57f5e863b79b60f5563593c043ffd1a45cc7e5" Jan 20 17:31:20 crc kubenswrapper[4558]: I0120 17:31:20.454341 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-0"] Jan 20 17:31:20 crc kubenswrapper[4558]: E0120 17:31:20.454908 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="38224f24-100b-4765-b1f3-886f8941593d" containerName="nova-cell1-conductor-db-sync" Jan 20 17:31:20 crc kubenswrapper[4558]: I0120 17:31:20.454930 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="38224f24-100b-4765-b1f3-886f8941593d" containerName="nova-cell1-conductor-db-sync" Jan 20 17:31:20 crc kubenswrapper[4558]: I0120 17:31:20.455195 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="38224f24-100b-4765-b1f3-886f8941593d" containerName="nova-cell1-conductor-db-sync" Jan 20 17:31:20 crc kubenswrapper[4558]: I0120 17:31:20.455993 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:31:20 crc kubenswrapper[4558]: I0120 17:31:20.458411 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell1-conductor-config-data" Jan 20 17:31:20 crc kubenswrapper[4558]: I0120 17:31:20.463840 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-0"] Jan 20 17:31:20 crc kubenswrapper[4558]: I0120 17:31:20.630843 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fkg7v\" (UniqueName: \"kubernetes.io/projected/a7f2bbf6-acbc-487a-a2ea-08e49a055eb5-kube-api-access-fkg7v\") pod \"nova-cell1-conductor-0\" (UID: \"a7f2bbf6-acbc-487a-a2ea-08e49a055eb5\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:31:20 crc kubenswrapper[4558]: I0120 17:31:20.631014 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7f2bbf6-acbc-487a-a2ea-08e49a055eb5-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"a7f2bbf6-acbc-487a-a2ea-08e49a055eb5\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:31:20 crc kubenswrapper[4558]: I0120 17:31:20.631378 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a7f2bbf6-acbc-487a-a2ea-08e49a055eb5-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"a7f2bbf6-acbc-487a-a2ea-08e49a055eb5\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:31:20 crc kubenswrapper[4558]: I0120 17:31:20.732724 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a7f2bbf6-acbc-487a-a2ea-08e49a055eb5-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"a7f2bbf6-acbc-487a-a2ea-08e49a055eb5\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:31:20 crc kubenswrapper[4558]: I0120 17:31:20.732904 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fkg7v\" (UniqueName: \"kubernetes.io/projected/a7f2bbf6-acbc-487a-a2ea-08e49a055eb5-kube-api-access-fkg7v\") pod \"nova-cell1-conductor-0\" (UID: \"a7f2bbf6-acbc-487a-a2ea-08e49a055eb5\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:31:20 crc kubenswrapper[4558]: I0120 17:31:20.733018 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7f2bbf6-acbc-487a-a2ea-08e49a055eb5-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"a7f2bbf6-acbc-487a-a2ea-08e49a055eb5\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:31:20 crc kubenswrapper[4558]: I0120 17:31:20.738395 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a7f2bbf6-acbc-487a-a2ea-08e49a055eb5-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"a7f2bbf6-acbc-487a-a2ea-08e49a055eb5\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:31:20 crc kubenswrapper[4558]: I0120 17:31:20.745664 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7f2bbf6-acbc-487a-a2ea-08e49a055eb5-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"a7f2bbf6-acbc-487a-a2ea-08e49a055eb5\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:31:20 crc kubenswrapper[4558]: I0120 17:31:20.754003 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fkg7v\" (UniqueName: \"kubernetes.io/projected/a7f2bbf6-acbc-487a-a2ea-08e49a055eb5-kube-api-access-fkg7v\") pod \"nova-cell1-conductor-0\" (UID: \"a7f2bbf6-acbc-487a-a2ea-08e49a055eb5\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:31:20 crc kubenswrapper[4558]: I0120 17:31:20.786471 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:31:21 crc kubenswrapper[4558]: I0120 17:31:21.207108 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-0"] Jan 20 17:31:21 crc kubenswrapper[4558]: I0120 17:31:21.249266 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:31:21 crc kubenswrapper[4558]: I0120 17:31:21.253050 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:31:21 crc kubenswrapper[4558]: I0120 17:31:21.399028 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-0" event={"ID":"a7f2bbf6-acbc-487a-a2ea-08e49a055eb5","Type":"ContainerStarted","Data":"3fc2c01e810b816844a3e05f96c2ab94fb7c1503af2abfcc136c12d107ebb7de"} Jan 20 17:31:21 crc kubenswrapper[4558]: I0120 17:31:21.399332 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-0" event={"ID":"a7f2bbf6-acbc-487a-a2ea-08e49a055eb5","Type":"ContainerStarted","Data":"06c9e6b547f7af597996fe49f7b2a4563702259d899bacb7ba9b74bb01ae1d53"} Jan 20 17:31:21 crc kubenswrapper[4558]: I0120 17:31:21.440404 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell1-conductor-0" podStartSLOduration=1.440384404 podStartE2EDuration="1.440384404s" podCreationTimestamp="2026-01-20 17:31:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:31:21.438699777 +0000 UTC m=+2975.199037744" watchObservedRunningTime="2026-01-20 17:31:21.440384404 +0000 UTC m=+2975.200722361" Jan 20 17:31:21 crc kubenswrapper[4558]: I0120 17:31:21.674086 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-cell-mapping-b6m6v" Jan 20 17:31:21 crc kubenswrapper[4558]: I0120 17:31:21.762913 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bzs8v\" (UniqueName: \"kubernetes.io/projected/ca7c35ef-9af8-452c-800d-89bfa63317af-kube-api-access-bzs8v\") pod \"ca7c35ef-9af8-452c-800d-89bfa63317af\" (UID: \"ca7c35ef-9af8-452c-800d-89bfa63317af\") " Jan 20 17:31:21 crc kubenswrapper[4558]: I0120 17:31:21.762998 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ca7c35ef-9af8-452c-800d-89bfa63317af-config-data\") pod \"ca7c35ef-9af8-452c-800d-89bfa63317af\" (UID: \"ca7c35ef-9af8-452c-800d-89bfa63317af\") " Jan 20 17:31:21 crc kubenswrapper[4558]: I0120 17:31:21.763194 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ca7c35ef-9af8-452c-800d-89bfa63317af-scripts\") pod \"ca7c35ef-9af8-452c-800d-89bfa63317af\" (UID: \"ca7c35ef-9af8-452c-800d-89bfa63317af\") " Jan 20 17:31:21 crc kubenswrapper[4558]: I0120 17:31:21.763235 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca7c35ef-9af8-452c-800d-89bfa63317af-combined-ca-bundle\") pod \"ca7c35ef-9af8-452c-800d-89bfa63317af\" (UID: \"ca7c35ef-9af8-452c-800d-89bfa63317af\") " Jan 20 17:31:21 crc kubenswrapper[4558]: I0120 17:31:21.769842 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ca7c35ef-9af8-452c-800d-89bfa63317af-scripts" (OuterVolumeSpecName: "scripts") pod "ca7c35ef-9af8-452c-800d-89bfa63317af" (UID: "ca7c35ef-9af8-452c-800d-89bfa63317af"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:31:21 crc kubenswrapper[4558]: I0120 17:31:21.769948 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ca7c35ef-9af8-452c-800d-89bfa63317af-kube-api-access-bzs8v" (OuterVolumeSpecName: "kube-api-access-bzs8v") pod "ca7c35ef-9af8-452c-800d-89bfa63317af" (UID: "ca7c35ef-9af8-452c-800d-89bfa63317af"). InnerVolumeSpecName "kube-api-access-bzs8v". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:31:21 crc kubenswrapper[4558]: I0120 17:31:21.788839 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ca7c35ef-9af8-452c-800d-89bfa63317af-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ca7c35ef-9af8-452c-800d-89bfa63317af" (UID: "ca7c35ef-9af8-452c-800d-89bfa63317af"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:31:21 crc kubenswrapper[4558]: I0120 17:31:21.793300 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ca7c35ef-9af8-452c-800d-89bfa63317af-config-data" (OuterVolumeSpecName: "config-data") pod "ca7c35ef-9af8-452c-800d-89bfa63317af" (UID: "ca7c35ef-9af8-452c-800d-89bfa63317af"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:31:21 crc kubenswrapper[4558]: I0120 17:31:21.865388 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ca7c35ef-9af8-452c-800d-89bfa63317af-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:31:21 crc kubenswrapper[4558]: I0120 17:31:21.865431 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca7c35ef-9af8-452c-800d-89bfa63317af-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:31:21 crc kubenswrapper[4558]: I0120 17:31:21.865445 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bzs8v\" (UniqueName: \"kubernetes.io/projected/ca7c35ef-9af8-452c-800d-89bfa63317af-kube-api-access-bzs8v\") on node \"crc\" DevicePath \"\"" Jan 20 17:31:21 crc kubenswrapper[4558]: I0120 17:31:21.865459 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ca7c35ef-9af8-452c-800d-89bfa63317af-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:31:22 crc kubenswrapper[4558]: I0120 17:31:22.197221 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:31:22 crc kubenswrapper[4558]: I0120 17:31:22.224838 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:31:22 crc kubenswrapper[4558]: I0120 17:31:22.280144 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:31:22 crc kubenswrapper[4558]: I0120 17:31:22.280218 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:31:22 crc kubenswrapper[4558]: I0120 17:31:22.332433 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:31:22 crc kubenswrapper[4558]: I0120 17:31:22.341584 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:31:22 crc kubenswrapper[4558]: I0120 17:31:22.412003 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-cell-mapping-b6m6v" Jan 20 17:31:22 crc kubenswrapper[4558]: I0120 17:31:22.412796 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-cell-mapping-b6m6v" event={"ID":"ca7c35ef-9af8-452c-800d-89bfa63317af","Type":"ContainerDied","Data":"945aa15fd9cb055547c8696f32e183367020a4dd1b524ff406d36b13f835a4c3"} Jan 20 17:31:22 crc kubenswrapper[4558]: I0120 17:31:22.412826 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="945aa15fd9cb055547c8696f32e183367020a4dd1b524ff406d36b13f835a4c3" Jan 20 17:31:22 crc kubenswrapper[4558]: I0120 17:31:22.412844 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:31:22 crc kubenswrapper[4558]: I0120 17:31:22.422871 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:31:22 crc kubenswrapper[4558]: I0120 17:31:22.450606 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:31:22 crc kubenswrapper[4558]: I0120 17:31:22.450664 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:31:22 crc kubenswrapper[4558]: I0120 17:31:22.461743 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:31:22 crc kubenswrapper[4558]: I0120 17:31:22.580051 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:31:22 crc kubenswrapper[4558]: I0120 17:31:22.625315 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:31:22 crc kubenswrapper[4558]: I0120 17:31:22.625701 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-metadata-0" podUID="7d0577b7-3358-4cfa-b158-defd76ba6c5e" containerName="nova-metadata-log" containerID="cri-o://c3eed0f10c9cf095c38a4b97d9692bcb73ebdbcab85fb435f0b6abce4c648582" gracePeriod=30 Jan 20 17:31:22 crc kubenswrapper[4558]: I0120 17:31:22.625773 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-metadata-0" podUID="7d0577b7-3358-4cfa-b158-defd76ba6c5e" containerName="nova-metadata-metadata" containerID="cri-o://49889382778d1334515823c89266efdc5d5ea02682dfe1e77471b2db885683e2" gracePeriod=30 Jan 20 17:31:22 crc kubenswrapper[4558]: I0120 17:31:22.631896 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-metadata-0" podUID="7d0577b7-3358-4cfa-b158-defd76ba6c5e" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"http://10.217.1.193:8775/\": EOF" Jan 20 17:31:22 crc kubenswrapper[4558]: I0120 17:31:22.633290 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-metadata-0" podUID="7d0577b7-3358-4cfa-b158-defd76ba6c5e" containerName="nova-metadata-log" probeResult="failure" output="Get \"http://10.217.1.193:8775/\": EOF" Jan 20 17:31:22 crc kubenswrapper[4558]: I0120 17:31:22.873654 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:31:23 crc kubenswrapper[4558]: I0120 17:31:23.423630 4558 generic.go:334] "Generic (PLEG): container finished" podID="7d0577b7-3358-4cfa-b158-defd76ba6c5e" containerID="c3eed0f10c9cf095c38a4b97d9692bcb73ebdbcab85fb435f0b6abce4c648582" exitCode=143 Jan 20 17:31:23 crc kubenswrapper[4558]: I0120 17:31:23.423718 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"7d0577b7-3358-4cfa-b158-defd76ba6c5e","Type":"ContainerDied","Data":"c3eed0f10c9cf095c38a4b97d9692bcb73ebdbcab85fb435f0b6abce4c648582"} Jan 20 17:31:23 crc kubenswrapper[4558]: I0120 17:31:23.425061 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-api-0" podUID="f6508afb-b99c-44cf-9a08-8e239cd9b718" containerName="nova-api-log" containerID="cri-o://fe6b368717df9222d0d373ae663a7090cb34a6e2b7a469795c2fd7476bcba6cf" gracePeriod=30 Jan 20 17:31:23 crc kubenswrapper[4558]: I0120 17:31:23.425209 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-api-0" podUID="f6508afb-b99c-44cf-9a08-8e239cd9b718" containerName="nova-api-api" containerID="cri-o://0da526c952b16dfa06cfc0bd66e840efc4b84941c74883bac142160038ff2363" gracePeriod=30 Jan 20 17:31:23 crc kubenswrapper[4558]: I0120 17:31:23.429773 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-api-0" podUID="f6508afb-b99c-44cf-9a08-8e239cd9b718" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.1.195:8774/\": EOF" Jan 20 17:31:23 crc kubenswrapper[4558]: I0120 17:31:23.429812 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-api-0" podUID="f6508afb-b99c-44cf-9a08-8e239cd9b718" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.1.195:8774/\": EOF" Jan 20 17:31:24 crc kubenswrapper[4558]: I0120 17:31:24.446040 4558 generic.go:334] "Generic (PLEG): container finished" podID="f6508afb-b99c-44cf-9a08-8e239cd9b718" containerID="fe6b368717df9222d0d373ae663a7090cb34a6e2b7a469795c2fd7476bcba6cf" exitCode=143 Jan 20 17:31:24 crc kubenswrapper[4558]: I0120 17:31:24.447148 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-scheduler-0" podUID="dfdfb43e-4249-4f0e-b397-09ffd2245fc6" containerName="nova-scheduler-scheduler" containerID="cri-o://71359f9180f19fb1eac89d6c9957099d56eebcab537b46ed0226e9a138090ab5" gracePeriod=30 Jan 20 17:31:24 crc kubenswrapper[4558]: I0120 17:31:24.447586 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"f6508afb-b99c-44cf-9a08-8e239cd9b718","Type":"ContainerDied","Data":"fe6b368717df9222d0d373ae663a7090cb34a6e2b7a469795c2fd7476bcba6cf"} Jan 20 17:31:27 crc kubenswrapper[4558]: E0120 17:31:27.199157 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="71359f9180f19fb1eac89d6c9957099d56eebcab537b46ed0226e9a138090ab5" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 20 17:31:27 crc kubenswrapper[4558]: E0120 17:31:27.200835 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="71359f9180f19fb1eac89d6c9957099d56eebcab537b46ed0226e9a138090ab5" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 20 17:31:27 crc kubenswrapper[4558]: E0120 17:31:27.202658 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="71359f9180f19fb1eac89d6c9957099d56eebcab537b46ed0226e9a138090ab5" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 20 17:31:27 crc kubenswrapper[4558]: E0120 17:31:27.202687 4558 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack-kuttl-tests/nova-scheduler-0" podUID="dfdfb43e-4249-4f0e-b397-09ffd2245fc6" containerName="nova-scheduler-scheduler" Jan 20 17:31:28 crc kubenswrapper[4558]: I0120 17:31:28.138724 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:31:28 crc kubenswrapper[4558]: I0120 17:31:28.294568 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f6508afb-b99c-44cf-9a08-8e239cd9b718-combined-ca-bundle\") pod \"f6508afb-b99c-44cf-9a08-8e239cd9b718\" (UID: \"f6508afb-b99c-44cf-9a08-8e239cd9b718\") " Jan 20 17:31:28 crc kubenswrapper[4558]: I0120 17:31:28.294967 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ck2qm\" (UniqueName: \"kubernetes.io/projected/f6508afb-b99c-44cf-9a08-8e239cd9b718-kube-api-access-ck2qm\") pod \"f6508afb-b99c-44cf-9a08-8e239cd9b718\" (UID: \"f6508afb-b99c-44cf-9a08-8e239cd9b718\") " Jan 20 17:31:28 crc kubenswrapper[4558]: I0120 17:31:28.295014 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f6508afb-b99c-44cf-9a08-8e239cd9b718-config-data\") pod \"f6508afb-b99c-44cf-9a08-8e239cd9b718\" (UID: \"f6508afb-b99c-44cf-9a08-8e239cd9b718\") " Jan 20 17:31:28 crc kubenswrapper[4558]: I0120 17:31:28.295270 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f6508afb-b99c-44cf-9a08-8e239cd9b718-logs\") pod \"f6508afb-b99c-44cf-9a08-8e239cd9b718\" (UID: \"f6508afb-b99c-44cf-9a08-8e239cd9b718\") " Jan 20 17:31:28 crc kubenswrapper[4558]: I0120 17:31:28.296460 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f6508afb-b99c-44cf-9a08-8e239cd9b718-logs" (OuterVolumeSpecName: "logs") pod "f6508afb-b99c-44cf-9a08-8e239cd9b718" (UID: "f6508afb-b99c-44cf-9a08-8e239cd9b718"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:31:28 crc kubenswrapper[4558]: I0120 17:31:28.301916 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f6508afb-b99c-44cf-9a08-8e239cd9b718-kube-api-access-ck2qm" (OuterVolumeSpecName: "kube-api-access-ck2qm") pod "f6508afb-b99c-44cf-9a08-8e239cd9b718" (UID: "f6508afb-b99c-44cf-9a08-8e239cd9b718"). InnerVolumeSpecName "kube-api-access-ck2qm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:31:28 crc kubenswrapper[4558]: I0120 17:31:28.321723 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f6508afb-b99c-44cf-9a08-8e239cd9b718-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f6508afb-b99c-44cf-9a08-8e239cd9b718" (UID: "f6508afb-b99c-44cf-9a08-8e239cd9b718"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:31:28 crc kubenswrapper[4558]: I0120 17:31:28.323983 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f6508afb-b99c-44cf-9a08-8e239cd9b718-config-data" (OuterVolumeSpecName: "config-data") pod "f6508afb-b99c-44cf-9a08-8e239cd9b718" (UID: "f6508afb-b99c-44cf-9a08-8e239cd9b718"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:31:28 crc kubenswrapper[4558]: I0120 17:31:28.389303 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:31:28 crc kubenswrapper[4558]: I0120 17:31:28.398556 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ck2qm\" (UniqueName: \"kubernetes.io/projected/f6508afb-b99c-44cf-9a08-8e239cd9b718-kube-api-access-ck2qm\") on node \"crc\" DevicePath \"\"" Jan 20 17:31:28 crc kubenswrapper[4558]: I0120 17:31:28.398594 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f6508afb-b99c-44cf-9a08-8e239cd9b718-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:31:28 crc kubenswrapper[4558]: I0120 17:31:28.398609 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f6508afb-b99c-44cf-9a08-8e239cd9b718-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:31:28 crc kubenswrapper[4558]: I0120 17:31:28.398623 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f6508afb-b99c-44cf-9a08-8e239cd9b718-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:31:28 crc kubenswrapper[4558]: I0120 17:31:28.491858 4558 generic.go:334] "Generic (PLEG): container finished" podID="7d0577b7-3358-4cfa-b158-defd76ba6c5e" containerID="49889382778d1334515823c89266efdc5d5ea02682dfe1e77471b2db885683e2" exitCode=0 Jan 20 17:31:28 crc kubenswrapper[4558]: I0120 17:31:28.491923 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:31:28 crc kubenswrapper[4558]: I0120 17:31:28.491928 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"7d0577b7-3358-4cfa-b158-defd76ba6c5e","Type":"ContainerDied","Data":"49889382778d1334515823c89266efdc5d5ea02682dfe1e77471b2db885683e2"} Jan 20 17:31:28 crc kubenswrapper[4558]: I0120 17:31:28.492041 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"7d0577b7-3358-4cfa-b158-defd76ba6c5e","Type":"ContainerDied","Data":"5bc52db2603c8d870aea1111d772b5c72f0854d3c55f540593caa7499e5eea0e"} Jan 20 17:31:28 crc kubenswrapper[4558]: I0120 17:31:28.492062 4558 scope.go:117] "RemoveContainer" containerID="49889382778d1334515823c89266efdc5d5ea02682dfe1e77471b2db885683e2" Jan 20 17:31:28 crc kubenswrapper[4558]: I0120 17:31:28.494062 4558 generic.go:334] "Generic (PLEG): container finished" podID="f6508afb-b99c-44cf-9a08-8e239cd9b718" containerID="0da526c952b16dfa06cfc0bd66e840efc4b84941c74883bac142160038ff2363" exitCode=0 Jan 20 17:31:28 crc kubenswrapper[4558]: I0120 17:31:28.494110 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:31:28 crc kubenswrapper[4558]: I0120 17:31:28.494110 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"f6508afb-b99c-44cf-9a08-8e239cd9b718","Type":"ContainerDied","Data":"0da526c952b16dfa06cfc0bd66e840efc4b84941c74883bac142160038ff2363"} Jan 20 17:31:28 crc kubenswrapper[4558]: I0120 17:31:28.494142 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"f6508afb-b99c-44cf-9a08-8e239cd9b718","Type":"ContainerDied","Data":"c6d1779b6e4f4310d66c25b1005802ab7c04564ba9400bf0a2f5d9c005a05fdd"} Jan 20 17:31:28 crc kubenswrapper[4558]: I0120 17:31:28.499769 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7d0577b7-3358-4cfa-b158-defd76ba6c5e-config-data\") pod \"7d0577b7-3358-4cfa-b158-defd76ba6c5e\" (UID: \"7d0577b7-3358-4cfa-b158-defd76ba6c5e\") " Jan 20 17:31:28 crc kubenswrapper[4558]: I0120 17:31:28.499857 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7d0577b7-3358-4cfa-b158-defd76ba6c5e-combined-ca-bundle\") pod \"7d0577b7-3358-4cfa-b158-defd76ba6c5e\" (UID: \"7d0577b7-3358-4cfa-b158-defd76ba6c5e\") " Jan 20 17:31:28 crc kubenswrapper[4558]: I0120 17:31:28.499942 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zpbgm\" (UniqueName: \"kubernetes.io/projected/7d0577b7-3358-4cfa-b158-defd76ba6c5e-kube-api-access-zpbgm\") pod \"7d0577b7-3358-4cfa-b158-defd76ba6c5e\" (UID: \"7d0577b7-3358-4cfa-b158-defd76ba6c5e\") " Jan 20 17:31:28 crc kubenswrapper[4558]: I0120 17:31:28.500094 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7d0577b7-3358-4cfa-b158-defd76ba6c5e-logs\") pod \"7d0577b7-3358-4cfa-b158-defd76ba6c5e\" (UID: \"7d0577b7-3358-4cfa-b158-defd76ba6c5e\") " Jan 20 17:31:28 crc kubenswrapper[4558]: I0120 17:31:28.500463 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7d0577b7-3358-4cfa-b158-defd76ba6c5e-logs" (OuterVolumeSpecName: "logs") pod "7d0577b7-3358-4cfa-b158-defd76ba6c5e" (UID: "7d0577b7-3358-4cfa-b158-defd76ba6c5e"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:31:28 crc kubenswrapper[4558]: I0120 17:31:28.500690 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7d0577b7-3358-4cfa-b158-defd76ba6c5e-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:31:28 crc kubenswrapper[4558]: I0120 17:31:28.502475 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7d0577b7-3358-4cfa-b158-defd76ba6c5e-kube-api-access-zpbgm" (OuterVolumeSpecName: "kube-api-access-zpbgm") pod "7d0577b7-3358-4cfa-b158-defd76ba6c5e" (UID: "7d0577b7-3358-4cfa-b158-defd76ba6c5e"). InnerVolumeSpecName "kube-api-access-zpbgm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:31:28 crc kubenswrapper[4558]: I0120 17:31:28.512041 4558 scope.go:117] "RemoveContainer" containerID="c3eed0f10c9cf095c38a4b97d9692bcb73ebdbcab85fb435f0b6abce4c648582" Jan 20 17:31:28 crc kubenswrapper[4558]: I0120 17:31:28.519407 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7d0577b7-3358-4cfa-b158-defd76ba6c5e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7d0577b7-3358-4cfa-b158-defd76ba6c5e" (UID: "7d0577b7-3358-4cfa-b158-defd76ba6c5e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:31:28 crc kubenswrapper[4558]: I0120 17:31:28.520399 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7d0577b7-3358-4cfa-b158-defd76ba6c5e-config-data" (OuterVolumeSpecName: "config-data") pod "7d0577b7-3358-4cfa-b158-defd76ba6c5e" (UID: "7d0577b7-3358-4cfa-b158-defd76ba6c5e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:31:28 crc kubenswrapper[4558]: I0120 17:31:28.552152 4558 scope.go:117] "RemoveContainer" containerID="49889382778d1334515823c89266efdc5d5ea02682dfe1e77471b2db885683e2" Jan 20 17:31:28 crc kubenswrapper[4558]: E0120 17:31:28.552529 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"49889382778d1334515823c89266efdc5d5ea02682dfe1e77471b2db885683e2\": container with ID starting with 49889382778d1334515823c89266efdc5d5ea02682dfe1e77471b2db885683e2 not found: ID does not exist" containerID="49889382778d1334515823c89266efdc5d5ea02682dfe1e77471b2db885683e2" Jan 20 17:31:28 crc kubenswrapper[4558]: I0120 17:31:28.552585 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"49889382778d1334515823c89266efdc5d5ea02682dfe1e77471b2db885683e2"} err="failed to get container status \"49889382778d1334515823c89266efdc5d5ea02682dfe1e77471b2db885683e2\": rpc error: code = NotFound desc = could not find container \"49889382778d1334515823c89266efdc5d5ea02682dfe1e77471b2db885683e2\": container with ID starting with 49889382778d1334515823c89266efdc5d5ea02682dfe1e77471b2db885683e2 not found: ID does not exist" Jan 20 17:31:28 crc kubenswrapper[4558]: I0120 17:31:28.552634 4558 scope.go:117] "RemoveContainer" containerID="c3eed0f10c9cf095c38a4b97d9692bcb73ebdbcab85fb435f0b6abce4c648582" Jan 20 17:31:28 crc kubenswrapper[4558]: E0120 17:31:28.553063 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c3eed0f10c9cf095c38a4b97d9692bcb73ebdbcab85fb435f0b6abce4c648582\": container with ID starting with c3eed0f10c9cf095c38a4b97d9692bcb73ebdbcab85fb435f0b6abce4c648582 not found: ID does not exist" containerID="c3eed0f10c9cf095c38a4b97d9692bcb73ebdbcab85fb435f0b6abce4c648582" Jan 20 17:31:28 crc kubenswrapper[4558]: I0120 17:31:28.553111 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c3eed0f10c9cf095c38a4b97d9692bcb73ebdbcab85fb435f0b6abce4c648582"} err="failed to get container status \"c3eed0f10c9cf095c38a4b97d9692bcb73ebdbcab85fb435f0b6abce4c648582\": rpc error: code = NotFound desc = could not find container \"c3eed0f10c9cf095c38a4b97d9692bcb73ebdbcab85fb435f0b6abce4c648582\": container with ID starting with c3eed0f10c9cf095c38a4b97d9692bcb73ebdbcab85fb435f0b6abce4c648582 not found: ID does not exist" Jan 20 17:31:28 crc kubenswrapper[4558]: I0120 17:31:28.553143 4558 scope.go:117] "RemoveContainer" containerID="0da526c952b16dfa06cfc0bd66e840efc4b84941c74883bac142160038ff2363" Jan 20 17:31:28 crc kubenswrapper[4558]: I0120 17:31:28.559813 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:31:28 crc kubenswrapper[4558]: I0120 17:31:28.603571 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7d0577b7-3358-4cfa-b158-defd76ba6c5e-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:31:28 crc kubenswrapper[4558]: I0120 17:31:28.603622 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7d0577b7-3358-4cfa-b158-defd76ba6c5e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:31:28 crc kubenswrapper[4558]: I0120 17:31:28.603640 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zpbgm\" (UniqueName: \"kubernetes.io/projected/7d0577b7-3358-4cfa-b158-defd76ba6c5e-kube-api-access-zpbgm\") on node \"crc\" DevicePath \"\"" Jan 20 17:31:28 crc kubenswrapper[4558]: I0120 17:31:28.604915 4558 scope.go:117] "RemoveContainer" containerID="fe6b368717df9222d0d373ae663a7090cb34a6e2b7a469795c2fd7476bcba6cf" Jan 20 17:31:28 crc kubenswrapper[4558]: I0120 17:31:28.611707 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:31:28 crc kubenswrapper[4558]: I0120 17:31:28.617816 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:31:28 crc kubenswrapper[4558]: E0120 17:31:28.618342 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f6508afb-b99c-44cf-9a08-8e239cd9b718" containerName="nova-api-api" Jan 20 17:31:28 crc kubenswrapper[4558]: I0120 17:31:28.618365 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="f6508afb-b99c-44cf-9a08-8e239cd9b718" containerName="nova-api-api" Jan 20 17:31:28 crc kubenswrapper[4558]: E0120 17:31:28.618385 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7d0577b7-3358-4cfa-b158-defd76ba6c5e" containerName="nova-metadata-metadata" Jan 20 17:31:28 crc kubenswrapper[4558]: I0120 17:31:28.618393 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="7d0577b7-3358-4cfa-b158-defd76ba6c5e" containerName="nova-metadata-metadata" Jan 20 17:31:28 crc kubenswrapper[4558]: E0120 17:31:28.618427 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ca7c35ef-9af8-452c-800d-89bfa63317af" containerName="nova-manage" Jan 20 17:31:28 crc kubenswrapper[4558]: I0120 17:31:28.618435 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ca7c35ef-9af8-452c-800d-89bfa63317af" containerName="nova-manage" Jan 20 17:31:28 crc kubenswrapper[4558]: E0120 17:31:28.618465 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7d0577b7-3358-4cfa-b158-defd76ba6c5e" containerName="nova-metadata-log" Jan 20 17:31:28 crc kubenswrapper[4558]: I0120 17:31:28.618473 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="7d0577b7-3358-4cfa-b158-defd76ba6c5e" containerName="nova-metadata-log" Jan 20 17:31:28 crc kubenswrapper[4558]: E0120 17:31:28.618481 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f6508afb-b99c-44cf-9a08-8e239cd9b718" containerName="nova-api-log" Jan 20 17:31:28 crc kubenswrapper[4558]: I0120 17:31:28.618488 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="f6508afb-b99c-44cf-9a08-8e239cd9b718" containerName="nova-api-log" Jan 20 17:31:28 crc kubenswrapper[4558]: I0120 17:31:28.618716 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="7d0577b7-3358-4cfa-b158-defd76ba6c5e" containerName="nova-metadata-log" Jan 20 17:31:28 crc kubenswrapper[4558]: I0120 17:31:28.618738 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="ca7c35ef-9af8-452c-800d-89bfa63317af" containerName="nova-manage" Jan 20 17:31:28 crc kubenswrapper[4558]: I0120 17:31:28.618751 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="f6508afb-b99c-44cf-9a08-8e239cd9b718" containerName="nova-api-log" Jan 20 17:31:28 crc kubenswrapper[4558]: I0120 17:31:28.618763 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="f6508afb-b99c-44cf-9a08-8e239cd9b718" containerName="nova-api-api" Jan 20 17:31:28 crc kubenswrapper[4558]: I0120 17:31:28.618796 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="7d0577b7-3358-4cfa-b158-defd76ba6c5e" containerName="nova-metadata-metadata" Jan 20 17:31:28 crc kubenswrapper[4558]: I0120 17:31:28.620054 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:31:28 crc kubenswrapper[4558]: I0120 17:31:28.622218 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-api-config-data" Jan 20 17:31:28 crc kubenswrapper[4558]: I0120 17:31:28.625493 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:31:28 crc kubenswrapper[4558]: I0120 17:31:28.628389 4558 scope.go:117] "RemoveContainer" containerID="0da526c952b16dfa06cfc0bd66e840efc4b84941c74883bac142160038ff2363" Jan 20 17:31:28 crc kubenswrapper[4558]: E0120 17:31:28.630192 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0da526c952b16dfa06cfc0bd66e840efc4b84941c74883bac142160038ff2363\": container with ID starting with 0da526c952b16dfa06cfc0bd66e840efc4b84941c74883bac142160038ff2363 not found: ID does not exist" containerID="0da526c952b16dfa06cfc0bd66e840efc4b84941c74883bac142160038ff2363" Jan 20 17:31:28 crc kubenswrapper[4558]: I0120 17:31:28.630280 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0da526c952b16dfa06cfc0bd66e840efc4b84941c74883bac142160038ff2363"} err="failed to get container status \"0da526c952b16dfa06cfc0bd66e840efc4b84941c74883bac142160038ff2363\": rpc error: code = NotFound desc = could not find container \"0da526c952b16dfa06cfc0bd66e840efc4b84941c74883bac142160038ff2363\": container with ID starting with 0da526c952b16dfa06cfc0bd66e840efc4b84941c74883bac142160038ff2363 not found: ID does not exist" Jan 20 17:31:28 crc kubenswrapper[4558]: I0120 17:31:28.630362 4558 scope.go:117] "RemoveContainer" containerID="fe6b368717df9222d0d373ae663a7090cb34a6e2b7a469795c2fd7476bcba6cf" Jan 20 17:31:28 crc kubenswrapper[4558]: E0120 17:31:28.633752 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fe6b368717df9222d0d373ae663a7090cb34a6e2b7a469795c2fd7476bcba6cf\": container with ID starting with fe6b368717df9222d0d373ae663a7090cb34a6e2b7a469795c2fd7476bcba6cf not found: ID does not exist" containerID="fe6b368717df9222d0d373ae663a7090cb34a6e2b7a469795c2fd7476bcba6cf" Jan 20 17:31:28 crc kubenswrapper[4558]: I0120 17:31:28.633842 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fe6b368717df9222d0d373ae663a7090cb34a6e2b7a469795c2fd7476bcba6cf"} err="failed to get container status \"fe6b368717df9222d0d373ae663a7090cb34a6e2b7a469795c2fd7476bcba6cf\": rpc error: code = NotFound desc = could not find container \"fe6b368717df9222d0d373ae663a7090cb34a6e2b7a469795c2fd7476bcba6cf\": container with ID starting with fe6b368717df9222d0d373ae663a7090cb34a6e2b7a469795c2fd7476bcba6cf not found: ID does not exist" Jan 20 17:31:28 crc kubenswrapper[4558]: I0120 17:31:28.705234 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a1618e8e-b2ef-429f-92e0-7282453257ec-config-data\") pod \"nova-api-0\" (UID: \"a1618e8e-b2ef-429f-92e0-7282453257ec\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:31:28 crc kubenswrapper[4558]: I0120 17:31:28.705292 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-crn87\" (UniqueName: \"kubernetes.io/projected/a1618e8e-b2ef-429f-92e0-7282453257ec-kube-api-access-crn87\") pod \"nova-api-0\" (UID: \"a1618e8e-b2ef-429f-92e0-7282453257ec\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:31:28 crc kubenswrapper[4558]: I0120 17:31:28.705529 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a1618e8e-b2ef-429f-92e0-7282453257ec-logs\") pod \"nova-api-0\" (UID: \"a1618e8e-b2ef-429f-92e0-7282453257ec\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:31:28 crc kubenswrapper[4558]: I0120 17:31:28.705661 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1618e8e-b2ef-429f-92e0-7282453257ec-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"a1618e8e-b2ef-429f-92e0-7282453257ec\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:31:28 crc kubenswrapper[4558]: I0120 17:31:28.808454 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a1618e8e-b2ef-429f-92e0-7282453257ec-config-data\") pod \"nova-api-0\" (UID: \"a1618e8e-b2ef-429f-92e0-7282453257ec\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:31:28 crc kubenswrapper[4558]: I0120 17:31:28.808533 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-crn87\" (UniqueName: \"kubernetes.io/projected/a1618e8e-b2ef-429f-92e0-7282453257ec-kube-api-access-crn87\") pod \"nova-api-0\" (UID: \"a1618e8e-b2ef-429f-92e0-7282453257ec\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:31:28 crc kubenswrapper[4558]: I0120 17:31:28.808640 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a1618e8e-b2ef-429f-92e0-7282453257ec-logs\") pod \"nova-api-0\" (UID: \"a1618e8e-b2ef-429f-92e0-7282453257ec\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:31:28 crc kubenswrapper[4558]: I0120 17:31:28.808703 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1618e8e-b2ef-429f-92e0-7282453257ec-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"a1618e8e-b2ef-429f-92e0-7282453257ec\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:31:28 crc kubenswrapper[4558]: I0120 17:31:28.809299 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a1618e8e-b2ef-429f-92e0-7282453257ec-logs\") pod \"nova-api-0\" (UID: \"a1618e8e-b2ef-429f-92e0-7282453257ec\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:31:28 crc kubenswrapper[4558]: I0120 17:31:28.813749 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1618e8e-b2ef-429f-92e0-7282453257ec-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"a1618e8e-b2ef-429f-92e0-7282453257ec\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:31:28 crc kubenswrapper[4558]: I0120 17:31:28.813886 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a1618e8e-b2ef-429f-92e0-7282453257ec-config-data\") pod \"nova-api-0\" (UID: \"a1618e8e-b2ef-429f-92e0-7282453257ec\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:31:28 crc kubenswrapper[4558]: I0120 17:31:28.818353 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:31:28 crc kubenswrapper[4558]: I0120 17:31:28.828567 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:31:28 crc kubenswrapper[4558]: I0120 17:31:28.828583 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-crn87\" (UniqueName: \"kubernetes.io/projected/a1618e8e-b2ef-429f-92e0-7282453257ec-kube-api-access-crn87\") pod \"nova-api-0\" (UID: \"a1618e8e-b2ef-429f-92e0-7282453257ec\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:31:28 crc kubenswrapper[4558]: I0120 17:31:28.842248 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:31:28 crc kubenswrapper[4558]: I0120 17:31:28.844301 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:31:28 crc kubenswrapper[4558]: I0120 17:31:28.846309 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-metadata-config-data" Jan 20 17:31:28 crc kubenswrapper[4558]: I0120 17:31:28.852219 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:31:28 crc kubenswrapper[4558]: I0120 17:31:28.910686 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cf070a99-9875-47c6-bff7-c499c916cfe3-logs\") pod \"nova-metadata-0\" (UID: \"cf070a99-9875-47c6-bff7-c499c916cfe3\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:31:28 crc kubenswrapper[4558]: I0120 17:31:28.910726 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf070a99-9875-47c6-bff7-c499c916cfe3-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"cf070a99-9875-47c6-bff7-c499c916cfe3\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:31:28 crc kubenswrapper[4558]: I0120 17:31:28.910778 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf070a99-9875-47c6-bff7-c499c916cfe3-config-data\") pod \"nova-metadata-0\" (UID: \"cf070a99-9875-47c6-bff7-c499c916cfe3\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:31:28 crc kubenswrapper[4558]: I0120 17:31:28.910818 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fkccd\" (UniqueName: \"kubernetes.io/projected/cf070a99-9875-47c6-bff7-c499c916cfe3-kube-api-access-fkccd\") pod \"nova-metadata-0\" (UID: \"cf070a99-9875-47c6-bff7-c499c916cfe3\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:31:28 crc kubenswrapper[4558]: I0120 17:31:28.939960 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:31:29 crc kubenswrapper[4558]: I0120 17:31:29.012455 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cf070a99-9875-47c6-bff7-c499c916cfe3-logs\") pod \"nova-metadata-0\" (UID: \"cf070a99-9875-47c6-bff7-c499c916cfe3\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:31:29 crc kubenswrapper[4558]: I0120 17:31:29.012499 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf070a99-9875-47c6-bff7-c499c916cfe3-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"cf070a99-9875-47c6-bff7-c499c916cfe3\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:31:29 crc kubenswrapper[4558]: I0120 17:31:29.012544 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf070a99-9875-47c6-bff7-c499c916cfe3-config-data\") pod \"nova-metadata-0\" (UID: \"cf070a99-9875-47c6-bff7-c499c916cfe3\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:31:29 crc kubenswrapper[4558]: I0120 17:31:29.012589 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fkccd\" (UniqueName: \"kubernetes.io/projected/cf070a99-9875-47c6-bff7-c499c916cfe3-kube-api-access-fkccd\") pod \"nova-metadata-0\" (UID: \"cf070a99-9875-47c6-bff7-c499c916cfe3\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:31:29 crc kubenswrapper[4558]: I0120 17:31:29.012896 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cf070a99-9875-47c6-bff7-c499c916cfe3-logs\") pod \"nova-metadata-0\" (UID: \"cf070a99-9875-47c6-bff7-c499c916cfe3\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:31:29 crc kubenswrapper[4558]: I0120 17:31:29.017067 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf070a99-9875-47c6-bff7-c499c916cfe3-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"cf070a99-9875-47c6-bff7-c499c916cfe3\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:31:29 crc kubenswrapper[4558]: I0120 17:31:29.017804 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf070a99-9875-47c6-bff7-c499c916cfe3-config-data\") pod \"nova-metadata-0\" (UID: \"cf070a99-9875-47c6-bff7-c499c916cfe3\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:31:29 crc kubenswrapper[4558]: I0120 17:31:29.028371 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fkccd\" (UniqueName: \"kubernetes.io/projected/cf070a99-9875-47c6-bff7-c499c916cfe3-kube-api-access-fkccd\") pod \"nova-metadata-0\" (UID: \"cf070a99-9875-47c6-bff7-c499c916cfe3\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:31:29 crc kubenswrapper[4558]: I0120 17:31:29.162818 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:31:29 crc kubenswrapper[4558]: I0120 17:31:29.347878 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:31:29 crc kubenswrapper[4558]: W0120 17:31:29.351549 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda1618e8e_b2ef_429f_92e0_7282453257ec.slice/crio-0c1c8c0abb47a0fe387e58af6b176fe1fb45567ec637ae0ae33aa785b3f1ba0e WatchSource:0}: Error finding container 0c1c8c0abb47a0fe387e58af6b176fe1fb45567ec637ae0ae33aa785b3f1ba0e: Status 404 returned error can't find the container with id 0c1c8c0abb47a0fe387e58af6b176fe1fb45567ec637ae0ae33aa785b3f1ba0e Jan 20 17:31:29 crc kubenswrapper[4558]: I0120 17:31:29.509816 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"a1618e8e-b2ef-429f-92e0-7282453257ec","Type":"ContainerStarted","Data":"0c1c8c0abb47a0fe387e58af6b176fe1fb45567ec637ae0ae33aa785b3f1ba0e"} Jan 20 17:31:29 crc kubenswrapper[4558]: I0120 17:31:29.513087 4558 generic.go:334] "Generic (PLEG): container finished" podID="dfdfb43e-4249-4f0e-b397-09ffd2245fc6" containerID="71359f9180f19fb1eac89d6c9957099d56eebcab537b46ed0226e9a138090ab5" exitCode=0 Jan 20 17:31:29 crc kubenswrapper[4558]: I0120 17:31:29.513182 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"dfdfb43e-4249-4f0e-b397-09ffd2245fc6","Type":"ContainerDied","Data":"71359f9180f19fb1eac89d6c9957099d56eebcab537b46ed0226e9a138090ab5"} Jan 20 17:31:29 crc kubenswrapper[4558]: I0120 17:31:29.576485 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:31:29 crc kubenswrapper[4558]: W0120 17:31:29.590350 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcf070a99_9875_47c6_bff7_c499c916cfe3.slice/crio-2319f0963fc22c618109f49465042c5102c9fc4d8ddf2c32bce30da7b0acbd1e WatchSource:0}: Error finding container 2319f0963fc22c618109f49465042c5102c9fc4d8ddf2c32bce30da7b0acbd1e: Status 404 returned error can't find the container with id 2319f0963fc22c618109f49465042c5102c9fc4d8ddf2c32bce30da7b0acbd1e Jan 20 17:31:29 crc kubenswrapper[4558]: E0120 17:31:29.617466 4558 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddfdfb43e_4249_4f0e_b397_09ffd2245fc6.slice/crio-71359f9180f19fb1eac89d6c9957099d56eebcab537b46ed0226e9a138090ab5.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddfdfb43e_4249_4f0e_b397_09ffd2245fc6.slice/crio-conmon-71359f9180f19fb1eac89d6c9957099d56eebcab537b46ed0226e9a138090ab5.scope\": RecentStats: unable to find data in memory cache]" Jan 20 17:31:29 crc kubenswrapper[4558]: I0120 17:31:29.739680 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:31:29 crc kubenswrapper[4558]: I0120 17:31:29.831413 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dfdfb43e-4249-4f0e-b397-09ffd2245fc6-combined-ca-bundle\") pod \"dfdfb43e-4249-4f0e-b397-09ffd2245fc6\" (UID: \"dfdfb43e-4249-4f0e-b397-09ffd2245fc6\") " Jan 20 17:31:29 crc kubenswrapper[4558]: I0120 17:31:29.831738 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-66474\" (UniqueName: \"kubernetes.io/projected/dfdfb43e-4249-4f0e-b397-09ffd2245fc6-kube-api-access-66474\") pod \"dfdfb43e-4249-4f0e-b397-09ffd2245fc6\" (UID: \"dfdfb43e-4249-4f0e-b397-09ffd2245fc6\") " Jan 20 17:31:29 crc kubenswrapper[4558]: I0120 17:31:29.831792 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dfdfb43e-4249-4f0e-b397-09ffd2245fc6-config-data\") pod \"dfdfb43e-4249-4f0e-b397-09ffd2245fc6\" (UID: \"dfdfb43e-4249-4f0e-b397-09ffd2245fc6\") " Jan 20 17:31:29 crc kubenswrapper[4558]: I0120 17:31:29.837420 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dfdfb43e-4249-4f0e-b397-09ffd2245fc6-kube-api-access-66474" (OuterVolumeSpecName: "kube-api-access-66474") pod "dfdfb43e-4249-4f0e-b397-09ffd2245fc6" (UID: "dfdfb43e-4249-4f0e-b397-09ffd2245fc6"). InnerVolumeSpecName "kube-api-access-66474". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:31:29 crc kubenswrapper[4558]: I0120 17:31:29.855468 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dfdfb43e-4249-4f0e-b397-09ffd2245fc6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "dfdfb43e-4249-4f0e-b397-09ffd2245fc6" (UID: "dfdfb43e-4249-4f0e-b397-09ffd2245fc6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:31:29 crc kubenswrapper[4558]: I0120 17:31:29.856506 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dfdfb43e-4249-4f0e-b397-09ffd2245fc6-config-data" (OuterVolumeSpecName: "config-data") pod "dfdfb43e-4249-4f0e-b397-09ffd2245fc6" (UID: "dfdfb43e-4249-4f0e-b397-09ffd2245fc6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:31:29 crc kubenswrapper[4558]: I0120 17:31:29.934826 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-66474\" (UniqueName: \"kubernetes.io/projected/dfdfb43e-4249-4f0e-b397-09ffd2245fc6-kube-api-access-66474\") on node \"crc\" DevicePath \"\"" Jan 20 17:31:29 crc kubenswrapper[4558]: I0120 17:31:29.934866 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dfdfb43e-4249-4f0e-b397-09ffd2245fc6-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:31:29 crc kubenswrapper[4558]: I0120 17:31:29.934879 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dfdfb43e-4249-4f0e-b397-09ffd2245fc6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:31:30 crc kubenswrapper[4558]: I0120 17:31:30.527719 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:31:30 crc kubenswrapper[4558]: I0120 17:31:30.527717 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"dfdfb43e-4249-4f0e-b397-09ffd2245fc6","Type":"ContainerDied","Data":"83a5f1b208f4daf04893c3fef923efc5336fbc5b5ccd3afe5700c022800c7e84"} Jan 20 17:31:30 crc kubenswrapper[4558]: I0120 17:31:30.528308 4558 scope.go:117] "RemoveContainer" containerID="71359f9180f19fb1eac89d6c9957099d56eebcab537b46ed0226e9a138090ab5" Jan 20 17:31:30 crc kubenswrapper[4558]: I0120 17:31:30.532830 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"a1618e8e-b2ef-429f-92e0-7282453257ec","Type":"ContainerStarted","Data":"e48511e765af1b24d0a54f8b138e65670b4d57bc07fbbd9c31b295ff6dbd1001"} Jan 20 17:31:30 crc kubenswrapper[4558]: I0120 17:31:30.532897 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"a1618e8e-b2ef-429f-92e0-7282453257ec","Type":"ContainerStarted","Data":"7d650a87f24b2456ad240cf6813e8bb50b8444396ac8b7dea371452fcd4f602a"} Jan 20 17:31:30 crc kubenswrapper[4558]: I0120 17:31:30.536250 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"cf070a99-9875-47c6-bff7-c499c916cfe3","Type":"ContainerStarted","Data":"b59f37de538031e4e2bce427860c02966bc93e77f0110ac0d1370edb7764b623"} Jan 20 17:31:30 crc kubenswrapper[4558]: I0120 17:31:30.536299 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"cf070a99-9875-47c6-bff7-c499c916cfe3","Type":"ContainerStarted","Data":"7a99c98ecc66a7b5c4bef4fed4454988c74b4eed3dc280a10c7b5861324c0362"} Jan 20 17:31:30 crc kubenswrapper[4558]: I0120 17:31:30.536312 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"cf070a99-9875-47c6-bff7-c499c916cfe3","Type":"ContainerStarted","Data":"2319f0963fc22c618109f49465042c5102c9fc4d8ddf2c32bce30da7b0acbd1e"} Jan 20 17:31:30 crc kubenswrapper[4558]: I0120 17:31:30.551104 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-api-0" podStartSLOduration=2.55107459 podStartE2EDuration="2.55107459s" podCreationTimestamp="2026-01-20 17:31:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:31:30.550626688 +0000 UTC m=+2984.310964655" watchObservedRunningTime="2026-01-20 17:31:30.55107459 +0000 UTC m=+2984.311412557" Jan 20 17:31:30 crc kubenswrapper[4558]: I0120 17:31:30.605820 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7d0577b7-3358-4cfa-b158-defd76ba6c5e" path="/var/lib/kubelet/pods/7d0577b7-3358-4cfa-b158-defd76ba6c5e/volumes" Jan 20 17:31:30 crc kubenswrapper[4558]: I0120 17:31:30.606611 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f6508afb-b99c-44cf-9a08-8e239cd9b718" path="/var/lib/kubelet/pods/f6508afb-b99c-44cf-9a08-8e239cd9b718/volumes" Jan 20 17:31:30 crc kubenswrapper[4558]: I0120 17:31:30.607535 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:31:30 crc kubenswrapper[4558]: I0120 17:31:30.608128 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:31:30 crc kubenswrapper[4558]: I0120 17:31:30.615132 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-metadata-0" podStartSLOduration=2.615106842 podStartE2EDuration="2.615106842s" podCreationTimestamp="2026-01-20 17:31:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:31:30.572444262 +0000 UTC m=+2984.332782229" watchObservedRunningTime="2026-01-20 17:31:30.615106842 +0000 UTC m=+2984.375444808" Jan 20 17:31:30 crc kubenswrapper[4558]: I0120 17:31:30.627350 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:31:30 crc kubenswrapper[4558]: E0120 17:31:30.627745 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dfdfb43e-4249-4f0e-b397-09ffd2245fc6" containerName="nova-scheduler-scheduler" Jan 20 17:31:30 crc kubenswrapper[4558]: I0120 17:31:30.627766 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="dfdfb43e-4249-4f0e-b397-09ffd2245fc6" containerName="nova-scheduler-scheduler" Jan 20 17:31:30 crc kubenswrapper[4558]: I0120 17:31:30.627982 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="dfdfb43e-4249-4f0e-b397-09ffd2245fc6" containerName="nova-scheduler-scheduler" Jan 20 17:31:30 crc kubenswrapper[4558]: I0120 17:31:30.628718 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:31:30 crc kubenswrapper[4558]: I0120 17:31:30.630795 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-scheduler-config-data" Jan 20 17:31:30 crc kubenswrapper[4558]: I0120 17:31:30.635581 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:31:30 crc kubenswrapper[4558]: I0120 17:31:30.759222 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/68c699b5-54d2-439d-b110-879ddfbd318f-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"68c699b5-54d2-439d-b110-879ddfbd318f\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:31:30 crc kubenswrapper[4558]: I0120 17:31:30.759414 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/68c699b5-54d2-439d-b110-879ddfbd318f-config-data\") pod \"nova-scheduler-0\" (UID: \"68c699b5-54d2-439d-b110-879ddfbd318f\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:31:30 crc kubenswrapper[4558]: I0120 17:31:30.759451 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wz45f\" (UniqueName: \"kubernetes.io/projected/68c699b5-54d2-439d-b110-879ddfbd318f-kube-api-access-wz45f\") pod \"nova-scheduler-0\" (UID: \"68c699b5-54d2-439d-b110-879ddfbd318f\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:31:30 crc kubenswrapper[4558]: I0120 17:31:30.811808 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:31:30 crc kubenswrapper[4558]: I0120 17:31:30.860709 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/68c699b5-54d2-439d-b110-879ddfbd318f-config-data\") pod \"nova-scheduler-0\" (UID: \"68c699b5-54d2-439d-b110-879ddfbd318f\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:31:30 crc kubenswrapper[4558]: I0120 17:31:30.860753 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wz45f\" (UniqueName: \"kubernetes.io/projected/68c699b5-54d2-439d-b110-879ddfbd318f-kube-api-access-wz45f\") pod \"nova-scheduler-0\" (UID: \"68c699b5-54d2-439d-b110-879ddfbd318f\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:31:30 crc kubenswrapper[4558]: I0120 17:31:30.860870 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/68c699b5-54d2-439d-b110-879ddfbd318f-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"68c699b5-54d2-439d-b110-879ddfbd318f\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:31:30 crc kubenswrapper[4558]: I0120 17:31:30.866244 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/68c699b5-54d2-439d-b110-879ddfbd318f-config-data\") pod \"nova-scheduler-0\" (UID: \"68c699b5-54d2-439d-b110-879ddfbd318f\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:31:30 crc kubenswrapper[4558]: I0120 17:31:30.866776 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/68c699b5-54d2-439d-b110-879ddfbd318f-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"68c699b5-54d2-439d-b110-879ddfbd318f\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:31:30 crc kubenswrapper[4558]: I0120 17:31:30.874967 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wz45f\" (UniqueName: \"kubernetes.io/projected/68c699b5-54d2-439d-b110-879ddfbd318f-kube-api-access-wz45f\") pod \"nova-scheduler-0\" (UID: \"68c699b5-54d2-439d-b110-879ddfbd318f\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:31:30 crc kubenswrapper[4558]: I0120 17:31:30.946467 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:31:31 crc kubenswrapper[4558]: I0120 17:31:31.229311 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell1-cell-mapping-269vx"] Jan 20 17:31:31 crc kubenswrapper[4558]: I0120 17:31:31.230817 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-cell-mapping-269vx" Jan 20 17:31:31 crc kubenswrapper[4558]: I0120 17:31:31.233574 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell1-manage-config-data" Jan 20 17:31:31 crc kubenswrapper[4558]: I0120 17:31:31.233610 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell1-manage-scripts" Jan 20 17:31:31 crc kubenswrapper[4558]: I0120 17:31:31.236913 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-cell-mapping-269vx"] Jan 20 17:31:31 crc kubenswrapper[4558]: I0120 17:31:31.363858 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:31:31 crc kubenswrapper[4558]: I0120 17:31:31.376860 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b422242-a880-40dd-b972-5c5c05cb12c5-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-269vx\" (UID: \"6b422242-a880-40dd-b972-5c5c05cb12c5\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-269vx" Jan 20 17:31:31 crc kubenswrapper[4558]: I0120 17:31:31.376905 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b422242-a880-40dd-b972-5c5c05cb12c5-config-data\") pod \"nova-cell1-cell-mapping-269vx\" (UID: \"6b422242-a880-40dd-b972-5c5c05cb12c5\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-269vx" Jan 20 17:31:31 crc kubenswrapper[4558]: I0120 17:31:31.376945 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cfm99\" (UniqueName: \"kubernetes.io/projected/6b422242-a880-40dd-b972-5c5c05cb12c5-kube-api-access-cfm99\") pod \"nova-cell1-cell-mapping-269vx\" (UID: \"6b422242-a880-40dd-b972-5c5c05cb12c5\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-269vx" Jan 20 17:31:31 crc kubenswrapper[4558]: I0120 17:31:31.376998 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6b422242-a880-40dd-b972-5c5c05cb12c5-scripts\") pod \"nova-cell1-cell-mapping-269vx\" (UID: \"6b422242-a880-40dd-b972-5c5c05cb12c5\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-269vx" Jan 20 17:31:31 crc kubenswrapper[4558]: I0120 17:31:31.482261 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b422242-a880-40dd-b972-5c5c05cb12c5-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-269vx\" (UID: \"6b422242-a880-40dd-b972-5c5c05cb12c5\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-269vx" Jan 20 17:31:31 crc kubenswrapper[4558]: I0120 17:31:31.482321 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b422242-a880-40dd-b972-5c5c05cb12c5-config-data\") pod \"nova-cell1-cell-mapping-269vx\" (UID: \"6b422242-a880-40dd-b972-5c5c05cb12c5\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-269vx" Jan 20 17:31:31 crc kubenswrapper[4558]: I0120 17:31:31.482359 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cfm99\" (UniqueName: \"kubernetes.io/projected/6b422242-a880-40dd-b972-5c5c05cb12c5-kube-api-access-cfm99\") pod \"nova-cell1-cell-mapping-269vx\" (UID: \"6b422242-a880-40dd-b972-5c5c05cb12c5\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-269vx" Jan 20 17:31:31 crc kubenswrapper[4558]: I0120 17:31:31.482428 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6b422242-a880-40dd-b972-5c5c05cb12c5-scripts\") pod \"nova-cell1-cell-mapping-269vx\" (UID: \"6b422242-a880-40dd-b972-5c5c05cb12c5\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-269vx" Jan 20 17:31:31 crc kubenswrapper[4558]: I0120 17:31:31.487626 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b422242-a880-40dd-b972-5c5c05cb12c5-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-269vx\" (UID: \"6b422242-a880-40dd-b972-5c5c05cb12c5\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-269vx" Jan 20 17:31:31 crc kubenswrapper[4558]: I0120 17:31:31.487687 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b422242-a880-40dd-b972-5c5c05cb12c5-config-data\") pod \"nova-cell1-cell-mapping-269vx\" (UID: \"6b422242-a880-40dd-b972-5c5c05cb12c5\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-269vx" Jan 20 17:31:31 crc kubenswrapper[4558]: I0120 17:31:31.488068 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6b422242-a880-40dd-b972-5c5c05cb12c5-scripts\") pod \"nova-cell1-cell-mapping-269vx\" (UID: \"6b422242-a880-40dd-b972-5c5c05cb12c5\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-269vx" Jan 20 17:31:31 crc kubenswrapper[4558]: I0120 17:31:31.496620 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cfm99\" (UniqueName: \"kubernetes.io/projected/6b422242-a880-40dd-b972-5c5c05cb12c5-kube-api-access-cfm99\") pod \"nova-cell1-cell-mapping-269vx\" (UID: \"6b422242-a880-40dd-b972-5c5c05cb12c5\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-269vx" Jan 20 17:31:31 crc kubenswrapper[4558]: I0120 17:31:31.549658 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"68c699b5-54d2-439d-b110-879ddfbd318f","Type":"ContainerStarted","Data":"bc4907b838488fbc10fd9186808e170474f88d3e2b4a74884a7a6935d84328d9"} Jan 20 17:31:31 crc kubenswrapper[4558]: I0120 17:31:31.549728 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"68c699b5-54d2-439d-b110-879ddfbd318f","Type":"ContainerStarted","Data":"1771d71e7c70909fcae1b797860709e4f7a2895fe19a216b5d1f7b13411ea686"} Jan 20 17:31:31 crc kubenswrapper[4558]: I0120 17:31:31.549974 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-cell-mapping-269vx" Jan 20 17:31:31 crc kubenswrapper[4558]: I0120 17:31:31.573317 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-scheduler-0" podStartSLOduration=1.573301772 podStartE2EDuration="1.573301772s" podCreationTimestamp="2026-01-20 17:31:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:31:31.565011814 +0000 UTC m=+2985.325349781" watchObservedRunningTime="2026-01-20 17:31:31.573301772 +0000 UTC m=+2985.333639739" Jan 20 17:31:31 crc kubenswrapper[4558]: I0120 17:31:31.958867 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-cell-mapping-269vx"] Jan 20 17:31:31 crc kubenswrapper[4558]: W0120 17:31:31.961713 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6b422242_a880_40dd_b972_5c5c05cb12c5.slice/crio-05bf2ecca11c92f55889c0a1673f459cca6231b49faae5e2b3afb300ad4fe5f5 WatchSource:0}: Error finding container 05bf2ecca11c92f55889c0a1673f459cca6231b49faae5e2b3afb300ad4fe5f5: Status 404 returned error can't find the container with id 05bf2ecca11c92f55889c0a1673f459cca6231b49faae5e2b3afb300ad4fe5f5 Jan 20 17:31:32 crc kubenswrapper[4558]: I0120 17:31:32.563733 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-cell-mapping-269vx" event={"ID":"6b422242-a880-40dd-b972-5c5c05cb12c5","Type":"ContainerStarted","Data":"aafa82e0ba7ad650ecae2bee5a0e71bb01982354e592b14191e645d9fc1d07bc"} Jan 20 17:31:32 crc kubenswrapper[4558]: I0120 17:31:32.564049 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-cell-mapping-269vx" event={"ID":"6b422242-a880-40dd-b972-5c5c05cb12c5","Type":"ContainerStarted","Data":"05bf2ecca11c92f55889c0a1673f459cca6231b49faae5e2b3afb300ad4fe5f5"} Jan 20 17:31:32 crc kubenswrapper[4558]: I0120 17:31:32.575647 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dfdfb43e-4249-4f0e-b397-09ffd2245fc6" path="/var/lib/kubelet/pods/dfdfb43e-4249-4f0e-b397-09ffd2245fc6/volumes" Jan 20 17:31:32 crc kubenswrapper[4558]: I0120 17:31:32.587535 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell1-cell-mapping-269vx" podStartSLOduration=1.587513812 podStartE2EDuration="1.587513812s" podCreationTimestamp="2026-01-20 17:31:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:31:32.581502227 +0000 UTC m=+2986.341840195" watchObservedRunningTime="2026-01-20 17:31:32.587513812 +0000 UTC m=+2986.347851779" Jan 20 17:31:33 crc kubenswrapper[4558]: I0120 17:31:33.566215 4558 scope.go:117] "RemoveContainer" containerID="f275e9f5de330b3c79316d5871106c6bcf90b698e0744c5beb28da45c336b36d" Jan 20 17:31:33 crc kubenswrapper[4558]: E0120 17:31:33.566446 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 17:31:34 crc kubenswrapper[4558]: I0120 17:31:34.162918 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:31:34 crc kubenswrapper[4558]: I0120 17:31:34.163286 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:31:35 crc kubenswrapper[4558]: I0120 17:31:35.947591 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:31:36 crc kubenswrapper[4558]: I0120 17:31:36.614606 4558 generic.go:334] "Generic (PLEG): container finished" podID="6b422242-a880-40dd-b972-5c5c05cb12c5" containerID="aafa82e0ba7ad650ecae2bee5a0e71bb01982354e592b14191e645d9fc1d07bc" exitCode=0 Jan 20 17:31:36 crc kubenswrapper[4558]: I0120 17:31:36.614655 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-cell-mapping-269vx" event={"ID":"6b422242-a880-40dd-b972-5c5c05cb12c5","Type":"ContainerDied","Data":"aafa82e0ba7ad650ecae2bee5a0e71bb01982354e592b14191e645d9fc1d07bc"} Jan 20 17:31:37 crc kubenswrapper[4558]: I0120 17:31:37.938507 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-cell-mapping-269vx" Jan 20 17:31:38 crc kubenswrapper[4558]: I0120 17:31:38.013285 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b422242-a880-40dd-b972-5c5c05cb12c5-combined-ca-bundle\") pod \"6b422242-a880-40dd-b972-5c5c05cb12c5\" (UID: \"6b422242-a880-40dd-b972-5c5c05cb12c5\") " Jan 20 17:31:38 crc kubenswrapper[4558]: I0120 17:31:38.013335 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b422242-a880-40dd-b972-5c5c05cb12c5-config-data\") pod \"6b422242-a880-40dd-b972-5c5c05cb12c5\" (UID: \"6b422242-a880-40dd-b972-5c5c05cb12c5\") " Jan 20 17:31:38 crc kubenswrapper[4558]: I0120 17:31:38.013358 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6b422242-a880-40dd-b972-5c5c05cb12c5-scripts\") pod \"6b422242-a880-40dd-b972-5c5c05cb12c5\" (UID: \"6b422242-a880-40dd-b972-5c5c05cb12c5\") " Jan 20 17:31:38 crc kubenswrapper[4558]: I0120 17:31:38.013576 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfm99\" (UniqueName: \"kubernetes.io/projected/6b422242-a880-40dd-b972-5c5c05cb12c5-kube-api-access-cfm99\") pod \"6b422242-a880-40dd-b972-5c5c05cb12c5\" (UID: \"6b422242-a880-40dd-b972-5c5c05cb12c5\") " Jan 20 17:31:38 crc kubenswrapper[4558]: I0120 17:31:38.021349 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6b422242-a880-40dd-b972-5c5c05cb12c5-scripts" (OuterVolumeSpecName: "scripts") pod "6b422242-a880-40dd-b972-5c5c05cb12c5" (UID: "6b422242-a880-40dd-b972-5c5c05cb12c5"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:31:38 crc kubenswrapper[4558]: I0120 17:31:38.021413 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6b422242-a880-40dd-b972-5c5c05cb12c5-kube-api-access-cfm99" (OuterVolumeSpecName: "kube-api-access-cfm99") pod "6b422242-a880-40dd-b972-5c5c05cb12c5" (UID: "6b422242-a880-40dd-b972-5c5c05cb12c5"). InnerVolumeSpecName "kube-api-access-cfm99". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:31:38 crc kubenswrapper[4558]: I0120 17:31:38.047813 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6b422242-a880-40dd-b972-5c5c05cb12c5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6b422242-a880-40dd-b972-5c5c05cb12c5" (UID: "6b422242-a880-40dd-b972-5c5c05cb12c5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:31:38 crc kubenswrapper[4558]: I0120 17:31:38.050799 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6b422242-a880-40dd-b972-5c5c05cb12c5-config-data" (OuterVolumeSpecName: "config-data") pod "6b422242-a880-40dd-b972-5c5c05cb12c5" (UID: "6b422242-a880-40dd-b972-5c5c05cb12c5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:31:38 crc kubenswrapper[4558]: I0120 17:31:38.115020 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b422242-a880-40dd-b972-5c5c05cb12c5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:31:38 crc kubenswrapper[4558]: I0120 17:31:38.115051 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b422242-a880-40dd-b972-5c5c05cb12c5-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:31:38 crc kubenswrapper[4558]: I0120 17:31:38.115061 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6b422242-a880-40dd-b972-5c5c05cb12c5-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:31:38 crc kubenswrapper[4558]: I0120 17:31:38.115073 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfm99\" (UniqueName: \"kubernetes.io/projected/6b422242-a880-40dd-b972-5c5c05cb12c5-kube-api-access-cfm99\") on node \"crc\" DevicePath \"\"" Jan 20 17:31:38 crc kubenswrapper[4558]: I0120 17:31:38.638949 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-cell-mapping-269vx" event={"ID":"6b422242-a880-40dd-b972-5c5c05cb12c5","Type":"ContainerDied","Data":"05bf2ecca11c92f55889c0a1673f459cca6231b49faae5e2b3afb300ad4fe5f5"} Jan 20 17:31:38 crc kubenswrapper[4558]: I0120 17:31:38.639002 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="05bf2ecca11c92f55889c0a1673f459cca6231b49faae5e2b3afb300ad4fe5f5" Jan 20 17:31:38 crc kubenswrapper[4558]: I0120 17:31:38.639092 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-cell-mapping-269vx" Jan 20 17:31:38 crc kubenswrapper[4558]: I0120 17:31:38.835229 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:31:38 crc kubenswrapper[4558]: I0120 17:31:38.835479 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-api-0" podUID="a1618e8e-b2ef-429f-92e0-7282453257ec" containerName="nova-api-log" containerID="cri-o://e48511e765af1b24d0a54f8b138e65670b4d57bc07fbbd9c31b295ff6dbd1001" gracePeriod=30 Jan 20 17:31:38 crc kubenswrapper[4558]: I0120 17:31:38.835940 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-api-0" podUID="a1618e8e-b2ef-429f-92e0-7282453257ec" containerName="nova-api-api" containerID="cri-o://7d650a87f24b2456ad240cf6813e8bb50b8444396ac8b7dea371452fcd4f602a" gracePeriod=30 Jan 20 17:31:38 crc kubenswrapper[4558]: I0120 17:31:38.853847 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:31:38 crc kubenswrapper[4558]: I0120 17:31:38.854054 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-scheduler-0" podUID="68c699b5-54d2-439d-b110-879ddfbd318f" containerName="nova-scheduler-scheduler" containerID="cri-o://bc4907b838488fbc10fd9186808e170474f88d3e2b4a74884a7a6935d84328d9" gracePeriod=30 Jan 20 17:31:38 crc kubenswrapper[4558]: I0120 17:31:38.864226 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:31:38 crc kubenswrapper[4558]: I0120 17:31:38.864399 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-metadata-0" podUID="cf070a99-9875-47c6-bff7-c499c916cfe3" containerName="nova-metadata-log" containerID="cri-o://7a99c98ecc66a7b5c4bef4fed4454988c74b4eed3dc280a10c7b5861324c0362" gracePeriod=30 Jan 20 17:31:38 crc kubenswrapper[4558]: I0120 17:31:38.864499 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-metadata-0" podUID="cf070a99-9875-47c6-bff7-c499c916cfe3" containerName="nova-metadata-metadata" containerID="cri-o://b59f37de538031e4e2bce427860c02966bc93e77f0110ac0d1370edb7764b623" gracePeriod=30 Jan 20 17:31:39 crc kubenswrapper[4558]: I0120 17:31:39.314644 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:31:39 crc kubenswrapper[4558]: I0120 17:31:39.324143 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:31:39 crc kubenswrapper[4558]: I0120 17:31:39.442856 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-crn87\" (UniqueName: \"kubernetes.io/projected/a1618e8e-b2ef-429f-92e0-7282453257ec-kube-api-access-crn87\") pod \"a1618e8e-b2ef-429f-92e0-7282453257ec\" (UID: \"a1618e8e-b2ef-429f-92e0-7282453257ec\") " Jan 20 17:31:39 crc kubenswrapper[4558]: I0120 17:31:39.442954 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a1618e8e-b2ef-429f-92e0-7282453257ec-logs\") pod \"a1618e8e-b2ef-429f-92e0-7282453257ec\" (UID: \"a1618e8e-b2ef-429f-92e0-7282453257ec\") " Jan 20 17:31:39 crc kubenswrapper[4558]: I0120 17:31:39.443015 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fkccd\" (UniqueName: \"kubernetes.io/projected/cf070a99-9875-47c6-bff7-c499c916cfe3-kube-api-access-fkccd\") pod \"cf070a99-9875-47c6-bff7-c499c916cfe3\" (UID: \"cf070a99-9875-47c6-bff7-c499c916cfe3\") " Jan 20 17:31:39 crc kubenswrapper[4558]: I0120 17:31:39.443127 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1618e8e-b2ef-429f-92e0-7282453257ec-combined-ca-bundle\") pod \"a1618e8e-b2ef-429f-92e0-7282453257ec\" (UID: \"a1618e8e-b2ef-429f-92e0-7282453257ec\") " Jan 20 17:31:39 crc kubenswrapper[4558]: I0120 17:31:39.443208 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf070a99-9875-47c6-bff7-c499c916cfe3-combined-ca-bundle\") pod \"cf070a99-9875-47c6-bff7-c499c916cfe3\" (UID: \"cf070a99-9875-47c6-bff7-c499c916cfe3\") " Jan 20 17:31:39 crc kubenswrapper[4558]: I0120 17:31:39.443262 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cf070a99-9875-47c6-bff7-c499c916cfe3-logs\") pod \"cf070a99-9875-47c6-bff7-c499c916cfe3\" (UID: \"cf070a99-9875-47c6-bff7-c499c916cfe3\") " Jan 20 17:31:39 crc kubenswrapper[4558]: I0120 17:31:39.443284 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a1618e8e-b2ef-429f-92e0-7282453257ec-config-data\") pod \"a1618e8e-b2ef-429f-92e0-7282453257ec\" (UID: \"a1618e8e-b2ef-429f-92e0-7282453257ec\") " Jan 20 17:31:39 crc kubenswrapper[4558]: I0120 17:31:39.443458 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf070a99-9875-47c6-bff7-c499c916cfe3-config-data\") pod \"cf070a99-9875-47c6-bff7-c499c916cfe3\" (UID: \"cf070a99-9875-47c6-bff7-c499c916cfe3\") " Jan 20 17:31:39 crc kubenswrapper[4558]: I0120 17:31:39.445431 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a1618e8e-b2ef-429f-92e0-7282453257ec-logs" (OuterVolumeSpecName: "logs") pod "a1618e8e-b2ef-429f-92e0-7282453257ec" (UID: "a1618e8e-b2ef-429f-92e0-7282453257ec"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:31:39 crc kubenswrapper[4558]: I0120 17:31:39.445692 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cf070a99-9875-47c6-bff7-c499c916cfe3-logs" (OuterVolumeSpecName: "logs") pod "cf070a99-9875-47c6-bff7-c499c916cfe3" (UID: "cf070a99-9875-47c6-bff7-c499c916cfe3"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:31:39 crc kubenswrapper[4558]: I0120 17:31:39.450888 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cf070a99-9875-47c6-bff7-c499c916cfe3-kube-api-access-fkccd" (OuterVolumeSpecName: "kube-api-access-fkccd") pod "cf070a99-9875-47c6-bff7-c499c916cfe3" (UID: "cf070a99-9875-47c6-bff7-c499c916cfe3"). InnerVolumeSpecName "kube-api-access-fkccd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:31:39 crc kubenswrapper[4558]: I0120 17:31:39.451546 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a1618e8e-b2ef-429f-92e0-7282453257ec-kube-api-access-crn87" (OuterVolumeSpecName: "kube-api-access-crn87") pod "a1618e8e-b2ef-429f-92e0-7282453257ec" (UID: "a1618e8e-b2ef-429f-92e0-7282453257ec"). InnerVolumeSpecName "kube-api-access-crn87". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:31:39 crc kubenswrapper[4558]: I0120 17:31:39.472373 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a1618e8e-b2ef-429f-92e0-7282453257ec-config-data" (OuterVolumeSpecName: "config-data") pod "a1618e8e-b2ef-429f-92e0-7282453257ec" (UID: "a1618e8e-b2ef-429f-92e0-7282453257ec"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:31:39 crc kubenswrapper[4558]: I0120 17:31:39.475320 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a1618e8e-b2ef-429f-92e0-7282453257ec-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a1618e8e-b2ef-429f-92e0-7282453257ec" (UID: "a1618e8e-b2ef-429f-92e0-7282453257ec"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:31:39 crc kubenswrapper[4558]: I0120 17:31:39.475572 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cf070a99-9875-47c6-bff7-c499c916cfe3-config-data" (OuterVolumeSpecName: "config-data") pod "cf070a99-9875-47c6-bff7-c499c916cfe3" (UID: "cf070a99-9875-47c6-bff7-c499c916cfe3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:31:39 crc kubenswrapper[4558]: I0120 17:31:39.475647 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cf070a99-9875-47c6-bff7-c499c916cfe3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "cf070a99-9875-47c6-bff7-c499c916cfe3" (UID: "cf070a99-9875-47c6-bff7-c499c916cfe3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:31:39 crc kubenswrapper[4558]: I0120 17:31:39.546613 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a1618e8e-b2ef-429f-92e0-7282453257ec-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:31:39 crc kubenswrapper[4558]: I0120 17:31:39.546768 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fkccd\" (UniqueName: \"kubernetes.io/projected/cf070a99-9875-47c6-bff7-c499c916cfe3-kube-api-access-fkccd\") on node \"crc\" DevicePath \"\"" Jan 20 17:31:39 crc kubenswrapper[4558]: I0120 17:31:39.546802 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a1618e8e-b2ef-429f-92e0-7282453257ec-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:31:39 crc kubenswrapper[4558]: I0120 17:31:39.546816 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf070a99-9875-47c6-bff7-c499c916cfe3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:31:39 crc kubenswrapper[4558]: I0120 17:31:39.546825 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cf070a99-9875-47c6-bff7-c499c916cfe3-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:31:39 crc kubenswrapper[4558]: I0120 17:31:39.546834 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a1618e8e-b2ef-429f-92e0-7282453257ec-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:31:39 crc kubenswrapper[4558]: I0120 17:31:39.546843 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf070a99-9875-47c6-bff7-c499c916cfe3-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:31:39 crc kubenswrapper[4558]: I0120 17:31:39.546853 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-crn87\" (UniqueName: \"kubernetes.io/projected/a1618e8e-b2ef-429f-92e0-7282453257ec-kube-api-access-crn87\") on node \"crc\" DevicePath \"\"" Jan 20 17:31:39 crc kubenswrapper[4558]: I0120 17:31:39.654753 4558 generic.go:334] "Generic (PLEG): container finished" podID="a1618e8e-b2ef-429f-92e0-7282453257ec" containerID="7d650a87f24b2456ad240cf6813e8bb50b8444396ac8b7dea371452fcd4f602a" exitCode=0 Jan 20 17:31:39 crc kubenswrapper[4558]: I0120 17:31:39.654795 4558 generic.go:334] "Generic (PLEG): container finished" podID="a1618e8e-b2ef-429f-92e0-7282453257ec" containerID="e48511e765af1b24d0a54f8b138e65670b4d57bc07fbbd9c31b295ff6dbd1001" exitCode=143 Jan 20 17:31:39 crc kubenswrapper[4558]: I0120 17:31:39.654841 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"a1618e8e-b2ef-429f-92e0-7282453257ec","Type":"ContainerDied","Data":"7d650a87f24b2456ad240cf6813e8bb50b8444396ac8b7dea371452fcd4f602a"} Jan 20 17:31:39 crc kubenswrapper[4558]: I0120 17:31:39.654893 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"a1618e8e-b2ef-429f-92e0-7282453257ec","Type":"ContainerDied","Data":"e48511e765af1b24d0a54f8b138e65670b4d57bc07fbbd9c31b295ff6dbd1001"} Jan 20 17:31:39 crc kubenswrapper[4558]: I0120 17:31:39.654852 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:31:39 crc kubenswrapper[4558]: I0120 17:31:39.654906 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"a1618e8e-b2ef-429f-92e0-7282453257ec","Type":"ContainerDied","Data":"0c1c8c0abb47a0fe387e58af6b176fe1fb45567ec637ae0ae33aa785b3f1ba0e"} Jan 20 17:31:39 crc kubenswrapper[4558]: I0120 17:31:39.654926 4558 scope.go:117] "RemoveContainer" containerID="7d650a87f24b2456ad240cf6813e8bb50b8444396ac8b7dea371452fcd4f602a" Jan 20 17:31:39 crc kubenswrapper[4558]: I0120 17:31:39.656939 4558 generic.go:334] "Generic (PLEG): container finished" podID="cf070a99-9875-47c6-bff7-c499c916cfe3" containerID="b59f37de538031e4e2bce427860c02966bc93e77f0110ac0d1370edb7764b623" exitCode=0 Jan 20 17:31:39 crc kubenswrapper[4558]: I0120 17:31:39.656958 4558 generic.go:334] "Generic (PLEG): container finished" podID="cf070a99-9875-47c6-bff7-c499c916cfe3" containerID="7a99c98ecc66a7b5c4bef4fed4454988c74b4eed3dc280a10c7b5861324c0362" exitCode=143 Jan 20 17:31:39 crc kubenswrapper[4558]: I0120 17:31:39.656978 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"cf070a99-9875-47c6-bff7-c499c916cfe3","Type":"ContainerDied","Data":"b59f37de538031e4e2bce427860c02966bc93e77f0110ac0d1370edb7764b623"} Jan 20 17:31:39 crc kubenswrapper[4558]: I0120 17:31:39.657003 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"cf070a99-9875-47c6-bff7-c499c916cfe3","Type":"ContainerDied","Data":"7a99c98ecc66a7b5c4bef4fed4454988c74b4eed3dc280a10c7b5861324c0362"} Jan 20 17:31:39 crc kubenswrapper[4558]: I0120 17:31:39.657016 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"cf070a99-9875-47c6-bff7-c499c916cfe3","Type":"ContainerDied","Data":"2319f0963fc22c618109f49465042c5102c9fc4d8ddf2c32bce30da7b0acbd1e"} Jan 20 17:31:39 crc kubenswrapper[4558]: I0120 17:31:39.657066 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:31:39 crc kubenswrapper[4558]: I0120 17:31:39.696650 4558 scope.go:117] "RemoveContainer" containerID="e48511e765af1b24d0a54f8b138e65670b4d57bc07fbbd9c31b295ff6dbd1001" Jan 20 17:31:39 crc kubenswrapper[4558]: I0120 17:31:39.701911 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:31:39 crc kubenswrapper[4558]: I0120 17:31:39.726037 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:31:39 crc kubenswrapper[4558]: I0120 17:31:39.742478 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:31:39 crc kubenswrapper[4558]: I0120 17:31:39.748533 4558 scope.go:117] "RemoveContainer" containerID="7d650a87f24b2456ad240cf6813e8bb50b8444396ac8b7dea371452fcd4f602a" Jan 20 17:31:39 crc kubenswrapper[4558]: E0120 17:31:39.749008 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7d650a87f24b2456ad240cf6813e8bb50b8444396ac8b7dea371452fcd4f602a\": container with ID starting with 7d650a87f24b2456ad240cf6813e8bb50b8444396ac8b7dea371452fcd4f602a not found: ID does not exist" containerID="7d650a87f24b2456ad240cf6813e8bb50b8444396ac8b7dea371452fcd4f602a" Jan 20 17:31:39 crc kubenswrapper[4558]: I0120 17:31:39.749057 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7d650a87f24b2456ad240cf6813e8bb50b8444396ac8b7dea371452fcd4f602a"} err="failed to get container status \"7d650a87f24b2456ad240cf6813e8bb50b8444396ac8b7dea371452fcd4f602a\": rpc error: code = NotFound desc = could not find container \"7d650a87f24b2456ad240cf6813e8bb50b8444396ac8b7dea371452fcd4f602a\": container with ID starting with 7d650a87f24b2456ad240cf6813e8bb50b8444396ac8b7dea371452fcd4f602a not found: ID does not exist" Jan 20 17:31:39 crc kubenswrapper[4558]: I0120 17:31:39.749091 4558 scope.go:117] "RemoveContainer" containerID="e48511e765af1b24d0a54f8b138e65670b4d57bc07fbbd9c31b295ff6dbd1001" Jan 20 17:31:39 crc kubenswrapper[4558]: I0120 17:31:39.749216 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:31:39 crc kubenswrapper[4558]: E0120 17:31:39.749687 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e48511e765af1b24d0a54f8b138e65670b4d57bc07fbbd9c31b295ff6dbd1001\": container with ID starting with e48511e765af1b24d0a54f8b138e65670b4d57bc07fbbd9c31b295ff6dbd1001 not found: ID does not exist" containerID="e48511e765af1b24d0a54f8b138e65670b4d57bc07fbbd9c31b295ff6dbd1001" Jan 20 17:31:39 crc kubenswrapper[4558]: I0120 17:31:39.749740 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e48511e765af1b24d0a54f8b138e65670b4d57bc07fbbd9c31b295ff6dbd1001"} err="failed to get container status \"e48511e765af1b24d0a54f8b138e65670b4d57bc07fbbd9c31b295ff6dbd1001\": rpc error: code = NotFound desc = could not find container \"e48511e765af1b24d0a54f8b138e65670b4d57bc07fbbd9c31b295ff6dbd1001\": container with ID starting with e48511e765af1b24d0a54f8b138e65670b4d57bc07fbbd9c31b295ff6dbd1001 not found: ID does not exist" Jan 20 17:31:39 crc kubenswrapper[4558]: I0120 17:31:39.749771 4558 scope.go:117] "RemoveContainer" containerID="7d650a87f24b2456ad240cf6813e8bb50b8444396ac8b7dea371452fcd4f602a" Jan 20 17:31:39 crc kubenswrapper[4558]: E0120 17:31:39.749700 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a1618e8e-b2ef-429f-92e0-7282453257ec" containerName="nova-api-log" Jan 20 17:31:39 crc kubenswrapper[4558]: I0120 17:31:39.749893 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="a1618e8e-b2ef-429f-92e0-7282453257ec" containerName="nova-api-log" Jan 20 17:31:39 crc kubenswrapper[4558]: E0120 17:31:39.749951 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf070a99-9875-47c6-bff7-c499c916cfe3" containerName="nova-metadata-log" Jan 20 17:31:39 crc kubenswrapper[4558]: I0120 17:31:39.749960 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf070a99-9875-47c6-bff7-c499c916cfe3" containerName="nova-metadata-log" Jan 20 17:31:39 crc kubenswrapper[4558]: E0120 17:31:39.749984 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf070a99-9875-47c6-bff7-c499c916cfe3" containerName="nova-metadata-metadata" Jan 20 17:31:39 crc kubenswrapper[4558]: I0120 17:31:39.749992 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf070a99-9875-47c6-bff7-c499c916cfe3" containerName="nova-metadata-metadata" Jan 20 17:31:39 crc kubenswrapper[4558]: E0120 17:31:39.750011 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a1618e8e-b2ef-429f-92e0-7282453257ec" containerName="nova-api-api" Jan 20 17:31:39 crc kubenswrapper[4558]: I0120 17:31:39.750019 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="a1618e8e-b2ef-429f-92e0-7282453257ec" containerName="nova-api-api" Jan 20 17:31:39 crc kubenswrapper[4558]: E0120 17:31:39.750037 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6b422242-a880-40dd-b972-5c5c05cb12c5" containerName="nova-manage" Jan 20 17:31:39 crc kubenswrapper[4558]: I0120 17:31:39.750045 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="6b422242-a880-40dd-b972-5c5c05cb12c5" containerName="nova-manage" Jan 20 17:31:39 crc kubenswrapper[4558]: I0120 17:31:39.750046 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7d650a87f24b2456ad240cf6813e8bb50b8444396ac8b7dea371452fcd4f602a"} err="failed to get container status \"7d650a87f24b2456ad240cf6813e8bb50b8444396ac8b7dea371452fcd4f602a\": rpc error: code = NotFound desc = could not find container \"7d650a87f24b2456ad240cf6813e8bb50b8444396ac8b7dea371452fcd4f602a\": container with ID starting with 7d650a87f24b2456ad240cf6813e8bb50b8444396ac8b7dea371452fcd4f602a not found: ID does not exist" Jan 20 17:31:39 crc kubenswrapper[4558]: I0120 17:31:39.750066 4558 scope.go:117] "RemoveContainer" containerID="e48511e765af1b24d0a54f8b138e65670b4d57bc07fbbd9c31b295ff6dbd1001" Jan 20 17:31:39 crc kubenswrapper[4558]: I0120 17:31:39.750249 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e48511e765af1b24d0a54f8b138e65670b4d57bc07fbbd9c31b295ff6dbd1001"} err="failed to get container status \"e48511e765af1b24d0a54f8b138e65670b4d57bc07fbbd9c31b295ff6dbd1001\": rpc error: code = NotFound desc = could not find container \"e48511e765af1b24d0a54f8b138e65670b4d57bc07fbbd9c31b295ff6dbd1001\": container with ID starting with e48511e765af1b24d0a54f8b138e65670b4d57bc07fbbd9c31b295ff6dbd1001 not found: ID does not exist" Jan 20 17:31:39 crc kubenswrapper[4558]: I0120 17:31:39.750268 4558 scope.go:117] "RemoveContainer" containerID="b59f37de538031e4e2bce427860c02966bc93e77f0110ac0d1370edb7764b623" Jan 20 17:31:39 crc kubenswrapper[4558]: I0120 17:31:39.750430 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="a1618e8e-b2ef-429f-92e0-7282453257ec" containerName="nova-api-log" Jan 20 17:31:39 crc kubenswrapper[4558]: I0120 17:31:39.750455 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="6b422242-a880-40dd-b972-5c5c05cb12c5" containerName="nova-manage" Jan 20 17:31:39 crc kubenswrapper[4558]: I0120 17:31:39.750464 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="a1618e8e-b2ef-429f-92e0-7282453257ec" containerName="nova-api-api" Jan 20 17:31:39 crc kubenswrapper[4558]: I0120 17:31:39.750473 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="cf070a99-9875-47c6-bff7-c499c916cfe3" containerName="nova-metadata-metadata" Jan 20 17:31:39 crc kubenswrapper[4558]: I0120 17:31:39.750500 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="cf070a99-9875-47c6-bff7-c499c916cfe3" containerName="nova-metadata-log" Jan 20 17:31:39 crc kubenswrapper[4558]: I0120 17:31:39.751692 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:31:39 crc kubenswrapper[4558]: I0120 17:31:39.752983 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-api-config-data" Jan 20 17:31:39 crc kubenswrapper[4558]: I0120 17:31:39.768803 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:31:39 crc kubenswrapper[4558]: I0120 17:31:39.774026 4558 scope.go:117] "RemoveContainer" containerID="7a99c98ecc66a7b5c4bef4fed4454988c74b4eed3dc280a10c7b5861324c0362" Jan 20 17:31:39 crc kubenswrapper[4558]: I0120 17:31:39.776319 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:31:39 crc kubenswrapper[4558]: I0120 17:31:39.783401 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:31:39 crc kubenswrapper[4558]: I0120 17:31:39.785048 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:31:39 crc kubenswrapper[4558]: I0120 17:31:39.786596 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-metadata-config-data" Jan 20 17:31:39 crc kubenswrapper[4558]: I0120 17:31:39.789522 4558 scope.go:117] "RemoveContainer" containerID="b59f37de538031e4e2bce427860c02966bc93e77f0110ac0d1370edb7764b623" Jan 20 17:31:39 crc kubenswrapper[4558]: E0120 17:31:39.789898 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b59f37de538031e4e2bce427860c02966bc93e77f0110ac0d1370edb7764b623\": container with ID starting with b59f37de538031e4e2bce427860c02966bc93e77f0110ac0d1370edb7764b623 not found: ID does not exist" containerID="b59f37de538031e4e2bce427860c02966bc93e77f0110ac0d1370edb7764b623" Jan 20 17:31:39 crc kubenswrapper[4558]: I0120 17:31:39.789923 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b59f37de538031e4e2bce427860c02966bc93e77f0110ac0d1370edb7764b623"} err="failed to get container status \"b59f37de538031e4e2bce427860c02966bc93e77f0110ac0d1370edb7764b623\": rpc error: code = NotFound desc = could not find container \"b59f37de538031e4e2bce427860c02966bc93e77f0110ac0d1370edb7764b623\": container with ID starting with b59f37de538031e4e2bce427860c02966bc93e77f0110ac0d1370edb7764b623 not found: ID does not exist" Jan 20 17:31:39 crc kubenswrapper[4558]: I0120 17:31:39.789942 4558 scope.go:117] "RemoveContainer" containerID="7a99c98ecc66a7b5c4bef4fed4454988c74b4eed3dc280a10c7b5861324c0362" Jan 20 17:31:39 crc kubenswrapper[4558]: E0120 17:31:39.790158 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7a99c98ecc66a7b5c4bef4fed4454988c74b4eed3dc280a10c7b5861324c0362\": container with ID starting with 7a99c98ecc66a7b5c4bef4fed4454988c74b4eed3dc280a10c7b5861324c0362 not found: ID does not exist" containerID="7a99c98ecc66a7b5c4bef4fed4454988c74b4eed3dc280a10c7b5861324c0362" Jan 20 17:31:39 crc kubenswrapper[4558]: I0120 17:31:39.790206 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7a99c98ecc66a7b5c4bef4fed4454988c74b4eed3dc280a10c7b5861324c0362"} err="failed to get container status \"7a99c98ecc66a7b5c4bef4fed4454988c74b4eed3dc280a10c7b5861324c0362\": rpc error: code = NotFound desc = could not find container \"7a99c98ecc66a7b5c4bef4fed4454988c74b4eed3dc280a10c7b5861324c0362\": container with ID starting with 7a99c98ecc66a7b5c4bef4fed4454988c74b4eed3dc280a10c7b5861324c0362 not found: ID does not exist" Jan 20 17:31:39 crc kubenswrapper[4558]: I0120 17:31:39.790222 4558 scope.go:117] "RemoveContainer" containerID="b59f37de538031e4e2bce427860c02966bc93e77f0110ac0d1370edb7764b623" Jan 20 17:31:39 crc kubenswrapper[4558]: I0120 17:31:39.790558 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b59f37de538031e4e2bce427860c02966bc93e77f0110ac0d1370edb7764b623"} err="failed to get container status \"b59f37de538031e4e2bce427860c02966bc93e77f0110ac0d1370edb7764b623\": rpc error: code = NotFound desc = could not find container \"b59f37de538031e4e2bce427860c02966bc93e77f0110ac0d1370edb7764b623\": container with ID starting with b59f37de538031e4e2bce427860c02966bc93e77f0110ac0d1370edb7764b623 not found: ID does not exist" Jan 20 17:31:39 crc kubenswrapper[4558]: I0120 17:31:39.790603 4558 scope.go:117] "RemoveContainer" containerID="7a99c98ecc66a7b5c4bef4fed4454988c74b4eed3dc280a10c7b5861324c0362" Jan 20 17:31:39 crc kubenswrapper[4558]: I0120 17:31:39.790851 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7a99c98ecc66a7b5c4bef4fed4454988c74b4eed3dc280a10c7b5861324c0362"} err="failed to get container status \"7a99c98ecc66a7b5c4bef4fed4454988c74b4eed3dc280a10c7b5861324c0362\": rpc error: code = NotFound desc = could not find container \"7a99c98ecc66a7b5c4bef4fed4454988c74b4eed3dc280a10c7b5861324c0362\": container with ID starting with 7a99c98ecc66a7b5c4bef4fed4454988c74b4eed3dc280a10c7b5861324c0362 not found: ID does not exist" Jan 20 17:31:39 crc kubenswrapper[4558]: I0120 17:31:39.794097 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:31:39 crc kubenswrapper[4558]: E0120 17:31:39.809951 4558 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda1618e8e_b2ef_429f_92e0_7282453257ec.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcf070a99_9875_47c6_bff7_c499c916cfe3.slice\": RecentStats: unable to find data in memory cache]" Jan 20 17:31:39 crc kubenswrapper[4558]: I0120 17:31:39.851499 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/623ea472-e0f5-48c1-a621-89fe343686ff-logs\") pod \"nova-api-0\" (UID: \"623ea472-e0f5-48c1-a621-89fe343686ff\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:31:39 crc kubenswrapper[4558]: I0120 17:31:39.852006 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/623ea472-e0f5-48c1-a621-89fe343686ff-config-data\") pod \"nova-api-0\" (UID: \"623ea472-e0f5-48c1-a621-89fe343686ff\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:31:39 crc kubenswrapper[4558]: I0120 17:31:39.852091 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5p6l9\" (UniqueName: \"kubernetes.io/projected/623ea472-e0f5-48c1-a621-89fe343686ff-kube-api-access-5p6l9\") pod \"nova-api-0\" (UID: \"623ea472-e0f5-48c1-a621-89fe343686ff\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:31:39 crc kubenswrapper[4558]: I0120 17:31:39.852365 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/623ea472-e0f5-48c1-a621-89fe343686ff-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"623ea472-e0f5-48c1-a621-89fe343686ff\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:31:39 crc kubenswrapper[4558]: I0120 17:31:39.953783 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/623ea472-e0f5-48c1-a621-89fe343686ff-config-data\") pod \"nova-api-0\" (UID: \"623ea472-e0f5-48c1-a621-89fe343686ff\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:31:39 crc kubenswrapper[4558]: I0120 17:31:39.953824 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/eb4881e5-cce6-4423-a9f4-d93f96a2ccb1-logs\") pod \"nova-metadata-0\" (UID: \"eb4881e5-cce6-4423-a9f4-d93f96a2ccb1\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:31:39 crc kubenswrapper[4558]: I0120 17:31:39.953865 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5p6l9\" (UniqueName: \"kubernetes.io/projected/623ea472-e0f5-48c1-a621-89fe343686ff-kube-api-access-5p6l9\") pod \"nova-api-0\" (UID: \"623ea472-e0f5-48c1-a621-89fe343686ff\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:31:39 crc kubenswrapper[4558]: I0120 17:31:39.953922 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/623ea472-e0f5-48c1-a621-89fe343686ff-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"623ea472-e0f5-48c1-a621-89fe343686ff\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:31:39 crc kubenswrapper[4558]: I0120 17:31:39.953941 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tp227\" (UniqueName: \"kubernetes.io/projected/eb4881e5-cce6-4423-a9f4-d93f96a2ccb1-kube-api-access-tp227\") pod \"nova-metadata-0\" (UID: \"eb4881e5-cce6-4423-a9f4-d93f96a2ccb1\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:31:39 crc kubenswrapper[4558]: I0120 17:31:39.953973 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/623ea472-e0f5-48c1-a621-89fe343686ff-logs\") pod \"nova-api-0\" (UID: \"623ea472-e0f5-48c1-a621-89fe343686ff\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:31:39 crc kubenswrapper[4558]: I0120 17:31:39.954009 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eb4881e5-cce6-4423-a9f4-d93f96a2ccb1-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"eb4881e5-cce6-4423-a9f4-d93f96a2ccb1\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:31:39 crc kubenswrapper[4558]: I0120 17:31:39.954088 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eb4881e5-cce6-4423-a9f4-d93f96a2ccb1-config-data\") pod \"nova-metadata-0\" (UID: \"eb4881e5-cce6-4423-a9f4-d93f96a2ccb1\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:31:39 crc kubenswrapper[4558]: I0120 17:31:39.954394 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/623ea472-e0f5-48c1-a621-89fe343686ff-logs\") pod \"nova-api-0\" (UID: \"623ea472-e0f5-48c1-a621-89fe343686ff\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:31:39 crc kubenswrapper[4558]: I0120 17:31:39.957539 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/623ea472-e0f5-48c1-a621-89fe343686ff-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"623ea472-e0f5-48c1-a621-89fe343686ff\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:31:39 crc kubenswrapper[4558]: I0120 17:31:39.958081 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/623ea472-e0f5-48c1-a621-89fe343686ff-config-data\") pod \"nova-api-0\" (UID: \"623ea472-e0f5-48c1-a621-89fe343686ff\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:31:39 crc kubenswrapper[4558]: I0120 17:31:39.967745 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5p6l9\" (UniqueName: \"kubernetes.io/projected/623ea472-e0f5-48c1-a621-89fe343686ff-kube-api-access-5p6l9\") pod \"nova-api-0\" (UID: \"623ea472-e0f5-48c1-a621-89fe343686ff\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:31:40 crc kubenswrapper[4558]: I0120 17:31:40.055317 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eb4881e5-cce6-4423-a9f4-d93f96a2ccb1-config-data\") pod \"nova-metadata-0\" (UID: \"eb4881e5-cce6-4423-a9f4-d93f96a2ccb1\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:31:40 crc kubenswrapper[4558]: I0120 17:31:40.055383 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/eb4881e5-cce6-4423-a9f4-d93f96a2ccb1-logs\") pod \"nova-metadata-0\" (UID: \"eb4881e5-cce6-4423-a9f4-d93f96a2ccb1\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:31:40 crc kubenswrapper[4558]: I0120 17:31:40.055460 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tp227\" (UniqueName: \"kubernetes.io/projected/eb4881e5-cce6-4423-a9f4-d93f96a2ccb1-kube-api-access-tp227\") pod \"nova-metadata-0\" (UID: \"eb4881e5-cce6-4423-a9f4-d93f96a2ccb1\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:31:40 crc kubenswrapper[4558]: I0120 17:31:40.055533 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eb4881e5-cce6-4423-a9f4-d93f96a2ccb1-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"eb4881e5-cce6-4423-a9f4-d93f96a2ccb1\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:31:40 crc kubenswrapper[4558]: I0120 17:31:40.055849 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/eb4881e5-cce6-4423-a9f4-d93f96a2ccb1-logs\") pod \"nova-metadata-0\" (UID: \"eb4881e5-cce6-4423-a9f4-d93f96a2ccb1\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:31:40 crc kubenswrapper[4558]: I0120 17:31:40.058871 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eb4881e5-cce6-4423-a9f4-d93f96a2ccb1-config-data\") pod \"nova-metadata-0\" (UID: \"eb4881e5-cce6-4423-a9f4-d93f96a2ccb1\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:31:40 crc kubenswrapper[4558]: I0120 17:31:40.059100 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eb4881e5-cce6-4423-a9f4-d93f96a2ccb1-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"eb4881e5-cce6-4423-a9f4-d93f96a2ccb1\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:31:40 crc kubenswrapper[4558]: I0120 17:31:40.068186 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:31:40 crc kubenswrapper[4558]: I0120 17:31:40.069870 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tp227\" (UniqueName: \"kubernetes.io/projected/eb4881e5-cce6-4423-a9f4-d93f96a2ccb1-kube-api-access-tp227\") pod \"nova-metadata-0\" (UID: \"eb4881e5-cce6-4423-a9f4-d93f96a2ccb1\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:31:40 crc kubenswrapper[4558]: I0120 17:31:40.103891 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:31:40 crc kubenswrapper[4558]: I0120 17:31:40.492211 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:31:40 crc kubenswrapper[4558]: W0120 17:31:40.555506 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podeb4881e5_cce6_4423_a9f4_d93f96a2ccb1.slice/crio-9298cac7d4049f7510589ee498be2017851e9b87876aa18bbe6002445438312d WatchSource:0}: Error finding container 9298cac7d4049f7510589ee498be2017851e9b87876aa18bbe6002445438312d: Status 404 returned error can't find the container with id 9298cac7d4049f7510589ee498be2017851e9b87876aa18bbe6002445438312d Jan 20 17:31:40 crc kubenswrapper[4558]: I0120 17:31:40.556592 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:31:40 crc kubenswrapper[4558]: I0120 17:31:40.586365 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a1618e8e-b2ef-429f-92e0-7282453257ec" path="/var/lib/kubelet/pods/a1618e8e-b2ef-429f-92e0-7282453257ec/volumes" Jan 20 17:31:40 crc kubenswrapper[4558]: I0120 17:31:40.587376 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cf070a99-9875-47c6-bff7-c499c916cfe3" path="/var/lib/kubelet/pods/cf070a99-9875-47c6-bff7-c499c916cfe3/volumes" Jan 20 17:31:40 crc kubenswrapper[4558]: I0120 17:31:40.674856 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"623ea472-e0f5-48c1-a621-89fe343686ff","Type":"ContainerStarted","Data":"90a23fb16c326ed369750450259a15b6eb7aa5fc9dc6005c29c9b1e37c002a27"} Jan 20 17:31:40 crc kubenswrapper[4558]: I0120 17:31:40.675288 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"623ea472-e0f5-48c1-a621-89fe343686ff","Type":"ContainerStarted","Data":"0c2567480a3eac2f76add9cadd1120f37e159ed68ab470d3e65bd16322a30eae"} Jan 20 17:31:40 crc kubenswrapper[4558]: I0120 17:31:40.677083 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"eb4881e5-cce6-4423-a9f4-d93f96a2ccb1","Type":"ContainerStarted","Data":"9298cac7d4049f7510589ee498be2017851e9b87876aa18bbe6002445438312d"} Jan 20 17:31:41 crc kubenswrapper[4558]: I0120 17:31:41.417010 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:31:41 crc kubenswrapper[4558]: I0120 17:31:41.690723 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"623ea472-e0f5-48c1-a621-89fe343686ff","Type":"ContainerStarted","Data":"8383f65df719eb3597f000c97c3244e2dda5aeac1cb0ef54093fbcf77af4f6b2"} Jan 20 17:31:41 crc kubenswrapper[4558]: I0120 17:31:41.694459 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"eb4881e5-cce6-4423-a9f4-d93f96a2ccb1","Type":"ContainerStarted","Data":"c01c9738249c284d39a49949a1689663cc221dbc3ee6a65254441d2704e98b09"} Jan 20 17:31:41 crc kubenswrapper[4558]: I0120 17:31:41.694621 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"eb4881e5-cce6-4423-a9f4-d93f96a2ccb1","Type":"ContainerStarted","Data":"11026a4823ee019937da1b5d44ee5848da1509d7fbc7958ed8712c4c0b58b33b"} Jan 20 17:31:41 crc kubenswrapper[4558]: I0120 17:31:41.711748 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-api-0" podStartSLOduration=2.711732556 podStartE2EDuration="2.711732556s" podCreationTimestamp="2026-01-20 17:31:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:31:41.708491171 +0000 UTC m=+2995.468829127" watchObservedRunningTime="2026-01-20 17:31:41.711732556 +0000 UTC m=+2995.472070523" Jan 20 17:31:41 crc kubenswrapper[4558]: I0120 17:31:41.730752 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-metadata-0" podStartSLOduration=2.730729797 podStartE2EDuration="2.730729797s" podCreationTimestamp="2026-01-20 17:31:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:31:41.723333159 +0000 UTC m=+2995.483671116" watchObservedRunningTime="2026-01-20 17:31:41.730729797 +0000 UTC m=+2995.491067764" Jan 20 17:31:42 crc kubenswrapper[4558]: I0120 17:31:42.707224 4558 generic.go:334] "Generic (PLEG): container finished" podID="68c699b5-54d2-439d-b110-879ddfbd318f" containerID="bc4907b838488fbc10fd9186808e170474f88d3e2b4a74884a7a6935d84328d9" exitCode=0 Jan 20 17:31:42 crc kubenswrapper[4558]: I0120 17:31:42.708812 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"68c699b5-54d2-439d-b110-879ddfbd318f","Type":"ContainerDied","Data":"bc4907b838488fbc10fd9186808e170474f88d3e2b4a74884a7a6935d84328d9"} Jan 20 17:31:42 crc kubenswrapper[4558]: I0120 17:31:42.708852 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"68c699b5-54d2-439d-b110-879ddfbd318f","Type":"ContainerDied","Data":"1771d71e7c70909fcae1b797860709e4f7a2895fe19a216b5d1f7b13411ea686"} Jan 20 17:31:42 crc kubenswrapper[4558]: I0120 17:31:42.708867 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1771d71e7c70909fcae1b797860709e4f7a2895fe19a216b5d1f7b13411ea686" Jan 20 17:31:42 crc kubenswrapper[4558]: I0120 17:31:42.727110 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:31:42 crc kubenswrapper[4558]: I0120 17:31:42.836672 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/68c699b5-54d2-439d-b110-879ddfbd318f-combined-ca-bundle\") pod \"68c699b5-54d2-439d-b110-879ddfbd318f\" (UID: \"68c699b5-54d2-439d-b110-879ddfbd318f\") " Jan 20 17:31:42 crc kubenswrapper[4558]: I0120 17:31:42.836994 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/68c699b5-54d2-439d-b110-879ddfbd318f-config-data\") pod \"68c699b5-54d2-439d-b110-879ddfbd318f\" (UID: \"68c699b5-54d2-439d-b110-879ddfbd318f\") " Jan 20 17:31:42 crc kubenswrapper[4558]: I0120 17:31:42.837053 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wz45f\" (UniqueName: \"kubernetes.io/projected/68c699b5-54d2-439d-b110-879ddfbd318f-kube-api-access-wz45f\") pod \"68c699b5-54d2-439d-b110-879ddfbd318f\" (UID: \"68c699b5-54d2-439d-b110-879ddfbd318f\") " Jan 20 17:31:42 crc kubenswrapper[4558]: I0120 17:31:42.845572 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/68c699b5-54d2-439d-b110-879ddfbd318f-kube-api-access-wz45f" (OuterVolumeSpecName: "kube-api-access-wz45f") pod "68c699b5-54d2-439d-b110-879ddfbd318f" (UID: "68c699b5-54d2-439d-b110-879ddfbd318f"). InnerVolumeSpecName "kube-api-access-wz45f". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:31:42 crc kubenswrapper[4558]: I0120 17:31:42.861378 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/68c699b5-54d2-439d-b110-879ddfbd318f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "68c699b5-54d2-439d-b110-879ddfbd318f" (UID: "68c699b5-54d2-439d-b110-879ddfbd318f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:31:42 crc kubenswrapper[4558]: I0120 17:31:42.861685 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/68c699b5-54d2-439d-b110-879ddfbd318f-config-data" (OuterVolumeSpecName: "config-data") pod "68c699b5-54d2-439d-b110-879ddfbd318f" (UID: "68c699b5-54d2-439d-b110-879ddfbd318f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:31:42 crc kubenswrapper[4558]: I0120 17:31:42.939567 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wz45f\" (UniqueName: \"kubernetes.io/projected/68c699b5-54d2-439d-b110-879ddfbd318f-kube-api-access-wz45f\") on node \"crc\" DevicePath \"\"" Jan 20 17:31:42 crc kubenswrapper[4558]: I0120 17:31:42.939606 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/68c699b5-54d2-439d-b110-879ddfbd318f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:31:42 crc kubenswrapper[4558]: I0120 17:31:42.939616 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/68c699b5-54d2-439d-b110-879ddfbd318f-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:31:43 crc kubenswrapper[4558]: I0120 17:31:43.714610 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:31:43 crc kubenswrapper[4558]: I0120 17:31:43.743652 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:31:43 crc kubenswrapper[4558]: I0120 17:31:43.749914 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:31:43 crc kubenswrapper[4558]: I0120 17:31:43.769459 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:31:43 crc kubenswrapper[4558]: E0120 17:31:43.769946 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="68c699b5-54d2-439d-b110-879ddfbd318f" containerName="nova-scheduler-scheduler" Jan 20 17:31:43 crc kubenswrapper[4558]: I0120 17:31:43.769967 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="68c699b5-54d2-439d-b110-879ddfbd318f" containerName="nova-scheduler-scheduler" Jan 20 17:31:43 crc kubenswrapper[4558]: I0120 17:31:43.770239 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="68c699b5-54d2-439d-b110-879ddfbd318f" containerName="nova-scheduler-scheduler" Jan 20 17:31:43 crc kubenswrapper[4558]: I0120 17:31:43.770962 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:31:43 crc kubenswrapper[4558]: I0120 17:31:43.785638 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-scheduler-config-data" Jan 20 17:31:43 crc kubenswrapper[4558]: I0120 17:31:43.790093 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:31:43 crc kubenswrapper[4558]: I0120 17:31:43.865488 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7aa6c24a-66a5-4b11-96a1-4d14001d1f5c-config-data\") pod \"nova-scheduler-0\" (UID: \"7aa6c24a-66a5-4b11-96a1-4d14001d1f5c\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:31:43 crc kubenswrapper[4558]: I0120 17:31:43.865761 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c5lng\" (UniqueName: \"kubernetes.io/projected/7aa6c24a-66a5-4b11-96a1-4d14001d1f5c-kube-api-access-c5lng\") pod \"nova-scheduler-0\" (UID: \"7aa6c24a-66a5-4b11-96a1-4d14001d1f5c\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:31:43 crc kubenswrapper[4558]: I0120 17:31:43.865868 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7aa6c24a-66a5-4b11-96a1-4d14001d1f5c-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"7aa6c24a-66a5-4b11-96a1-4d14001d1f5c\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:31:43 crc kubenswrapper[4558]: I0120 17:31:43.967809 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7aa6c24a-66a5-4b11-96a1-4d14001d1f5c-config-data\") pod \"nova-scheduler-0\" (UID: \"7aa6c24a-66a5-4b11-96a1-4d14001d1f5c\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:31:43 crc kubenswrapper[4558]: I0120 17:31:43.967893 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c5lng\" (UniqueName: \"kubernetes.io/projected/7aa6c24a-66a5-4b11-96a1-4d14001d1f5c-kube-api-access-c5lng\") pod \"nova-scheduler-0\" (UID: \"7aa6c24a-66a5-4b11-96a1-4d14001d1f5c\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:31:43 crc kubenswrapper[4558]: I0120 17:31:43.967925 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7aa6c24a-66a5-4b11-96a1-4d14001d1f5c-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"7aa6c24a-66a5-4b11-96a1-4d14001d1f5c\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:31:43 crc kubenswrapper[4558]: I0120 17:31:43.973579 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7aa6c24a-66a5-4b11-96a1-4d14001d1f5c-config-data\") pod \"nova-scheduler-0\" (UID: \"7aa6c24a-66a5-4b11-96a1-4d14001d1f5c\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:31:43 crc kubenswrapper[4558]: I0120 17:31:43.973631 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7aa6c24a-66a5-4b11-96a1-4d14001d1f5c-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"7aa6c24a-66a5-4b11-96a1-4d14001d1f5c\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:31:43 crc kubenswrapper[4558]: I0120 17:31:43.982801 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c5lng\" (UniqueName: \"kubernetes.io/projected/7aa6c24a-66a5-4b11-96a1-4d14001d1f5c-kube-api-access-c5lng\") pod \"nova-scheduler-0\" (UID: \"7aa6c24a-66a5-4b11-96a1-4d14001d1f5c\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:31:44 crc kubenswrapper[4558]: I0120 17:31:44.106143 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:31:44 crc kubenswrapper[4558]: I0120 17:31:44.533245 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:31:44 crc kubenswrapper[4558]: I0120 17:31:44.582396 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="68c699b5-54d2-439d-b110-879ddfbd318f" path="/var/lib/kubelet/pods/68c699b5-54d2-439d-b110-879ddfbd318f/volumes" Jan 20 17:31:44 crc kubenswrapper[4558]: I0120 17:31:44.728959 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"7aa6c24a-66a5-4b11-96a1-4d14001d1f5c","Type":"ContainerStarted","Data":"075fdf51fe78df21a6645f555dba48d1e1068aaccdaf9394e743e0397b0f8c25"} Jan 20 17:31:44 crc kubenswrapper[4558]: I0120 17:31:44.729012 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"7aa6c24a-66a5-4b11-96a1-4d14001d1f5c","Type":"ContainerStarted","Data":"58da02281d2c517c5fb8d6fde458e29e6833a321349b658a1345d5ceef2a99c0"} Jan 20 17:31:44 crc kubenswrapper[4558]: I0120 17:31:44.747001 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-scheduler-0" podStartSLOduration=1.746977529 podStartE2EDuration="1.746977529s" podCreationTimestamp="2026-01-20 17:31:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:31:44.741684897 +0000 UTC m=+2998.502022865" watchObservedRunningTime="2026-01-20 17:31:44.746977529 +0000 UTC m=+2998.507315497" Jan 20 17:31:45 crc kubenswrapper[4558]: I0120 17:31:45.104487 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:31:45 crc kubenswrapper[4558]: I0120 17:31:45.104759 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:31:46 crc kubenswrapper[4558]: I0120 17:31:46.574307 4558 scope.go:117] "RemoveContainer" containerID="f275e9f5de330b3c79316d5871106c6bcf90b698e0744c5beb28da45c336b36d" Jan 20 17:31:46 crc kubenswrapper[4558]: E0120 17:31:46.574994 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 17:31:49 crc kubenswrapper[4558]: I0120 17:31:49.107069 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:31:50 crc kubenswrapper[4558]: I0120 17:31:50.068742 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:31:50 crc kubenswrapper[4558]: I0120 17:31:50.068960 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:31:50 crc kubenswrapper[4558]: I0120 17:31:50.104992 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:31:50 crc kubenswrapper[4558]: I0120 17:31:50.105306 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:31:51 crc kubenswrapper[4558]: I0120 17:31:51.232310 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-api-0" podUID="623ea472-e0f5-48c1-a621-89fe343686ff" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.1.202:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 20 17:31:51 crc kubenswrapper[4558]: I0120 17:31:51.232346 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-metadata-0" podUID="eb4881e5-cce6-4423-a9f4-d93f96a2ccb1" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"http://10.217.1.203:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 20 17:31:51 crc kubenswrapper[4558]: I0120 17:31:51.232368 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-api-0" podUID="623ea472-e0f5-48c1-a621-89fe343686ff" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.1.202:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 20 17:31:51 crc kubenswrapper[4558]: I0120 17:31:51.232399 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-metadata-0" podUID="eb4881e5-cce6-4423-a9f4-d93f96a2ccb1" containerName="nova-metadata-log" probeResult="failure" output="Get \"http://10.217.1.203:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 20 17:31:54 crc kubenswrapper[4558]: I0120 17:31:54.106513 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:31:54 crc kubenswrapper[4558]: I0120 17:31:54.136788 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:31:54 crc kubenswrapper[4558]: I0120 17:31:54.866629 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:31:59 crc kubenswrapper[4558]: I0120 17:31:59.566557 4558 scope.go:117] "RemoveContainer" containerID="f275e9f5de330b3c79316d5871106c6bcf90b698e0744c5beb28da45c336b36d" Jan 20 17:31:59 crc kubenswrapper[4558]: E0120 17:31:59.567142 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 17:32:00 crc kubenswrapper[4558]: I0120 17:32:00.074073 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:32:00 crc kubenswrapper[4558]: I0120 17:32:00.075043 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:32:00 crc kubenswrapper[4558]: I0120 17:32:00.075115 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:32:00 crc kubenswrapper[4558]: I0120 17:32:00.079489 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:32:00 crc kubenswrapper[4558]: I0120 17:32:00.106192 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:32:00 crc kubenswrapper[4558]: I0120 17:32:00.106537 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:32:00 crc kubenswrapper[4558]: I0120 17:32:00.108932 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:32:00 crc kubenswrapper[4558]: I0120 17:32:00.109369 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:32:00 crc kubenswrapper[4558]: I0120 17:32:00.913151 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:32:00 crc kubenswrapper[4558]: I0120 17:32:00.917847 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:32:02 crc kubenswrapper[4558]: I0120 17:32:02.558471 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:32:02 crc kubenswrapper[4558]: I0120 17:32:02.559156 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="ae0038a3-7264-4263-961d-ff4bd973270e" containerName="ceilometer-central-agent" containerID="cri-o://5cfd9ea90fd3f8cb0609173530a9c3bac6afc3a002b0b0d58a159c437eb42d76" gracePeriod=30 Jan 20 17:32:02 crc kubenswrapper[4558]: I0120 17:32:02.559207 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="ae0038a3-7264-4263-961d-ff4bd973270e" containerName="proxy-httpd" containerID="cri-o://e264ab3139725adb126c87a12bb8544eec8bb6f3977044a49a218713e8607e4a" gracePeriod=30 Jan 20 17:32:02 crc kubenswrapper[4558]: I0120 17:32:02.559363 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="ae0038a3-7264-4263-961d-ff4bd973270e" containerName="sg-core" containerID="cri-o://62879093a8dcc7d9551e46afe019ff1b446d9ad90e73eec908991c7db72c06c5" gracePeriod=30 Jan 20 17:32:02 crc kubenswrapper[4558]: I0120 17:32:02.559408 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="ae0038a3-7264-4263-961d-ff4bd973270e" containerName="ceilometer-notification-agent" containerID="cri-o://eff67ac0c52d545cb6e84b48951c7811d3a813f3451132a6b93b2e8896f528d2" gracePeriod=30 Jan 20 17:32:02 crc kubenswrapper[4558]: I0120 17:32:02.933033 4558 generic.go:334] "Generic (PLEG): container finished" podID="ae0038a3-7264-4263-961d-ff4bd973270e" containerID="e264ab3139725adb126c87a12bb8544eec8bb6f3977044a49a218713e8607e4a" exitCode=0 Jan 20 17:32:02 crc kubenswrapper[4558]: I0120 17:32:02.933339 4558 generic.go:334] "Generic (PLEG): container finished" podID="ae0038a3-7264-4263-961d-ff4bd973270e" containerID="62879093a8dcc7d9551e46afe019ff1b446d9ad90e73eec908991c7db72c06c5" exitCode=2 Jan 20 17:32:02 crc kubenswrapper[4558]: I0120 17:32:02.933350 4558 generic.go:334] "Generic (PLEG): container finished" podID="ae0038a3-7264-4263-961d-ff4bd973270e" containerID="5cfd9ea90fd3f8cb0609173530a9c3bac6afc3a002b0b0d58a159c437eb42d76" exitCode=0 Jan 20 17:32:02 crc kubenswrapper[4558]: I0120 17:32:02.933080 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"ae0038a3-7264-4263-961d-ff4bd973270e","Type":"ContainerDied","Data":"e264ab3139725adb126c87a12bb8544eec8bb6f3977044a49a218713e8607e4a"} Jan 20 17:32:02 crc kubenswrapper[4558]: I0120 17:32:02.933446 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"ae0038a3-7264-4263-961d-ff4bd973270e","Type":"ContainerDied","Data":"62879093a8dcc7d9551e46afe019ff1b446d9ad90e73eec908991c7db72c06c5"} Jan 20 17:32:02 crc kubenswrapper[4558]: I0120 17:32:02.933460 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"ae0038a3-7264-4263-961d-ff4bd973270e","Type":"ContainerDied","Data":"5cfd9ea90fd3f8cb0609173530a9c3bac6afc3a002b0b0d58a159c437eb42d76"} Jan 20 17:32:04 crc kubenswrapper[4558]: I0120 17:32:04.961989 4558 generic.go:334] "Generic (PLEG): container finished" podID="ae0038a3-7264-4263-961d-ff4bd973270e" containerID="eff67ac0c52d545cb6e84b48951c7811d3a813f3451132a6b93b2e8896f528d2" exitCode=0 Jan 20 17:32:04 crc kubenswrapper[4558]: I0120 17:32:04.962033 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"ae0038a3-7264-4263-961d-ff4bd973270e","Type":"ContainerDied","Data":"eff67ac0c52d545cb6e84b48951c7811d3a813f3451132a6b93b2e8896f528d2"} Jan 20 17:32:05 crc kubenswrapper[4558]: I0120 17:32:05.174563 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:32:05 crc kubenswrapper[4558]: I0120 17:32:05.258464 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bmjnp\" (UniqueName: \"kubernetes.io/projected/ae0038a3-7264-4263-961d-ff4bd973270e-kube-api-access-bmjnp\") pod \"ae0038a3-7264-4263-961d-ff4bd973270e\" (UID: \"ae0038a3-7264-4263-961d-ff4bd973270e\") " Jan 20 17:32:05 crc kubenswrapper[4558]: I0120 17:32:05.258504 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ae0038a3-7264-4263-961d-ff4bd973270e-sg-core-conf-yaml\") pod \"ae0038a3-7264-4263-961d-ff4bd973270e\" (UID: \"ae0038a3-7264-4263-961d-ff4bd973270e\") " Jan 20 17:32:05 crc kubenswrapper[4558]: I0120 17:32:05.258553 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ae0038a3-7264-4263-961d-ff4bd973270e-config-data\") pod \"ae0038a3-7264-4263-961d-ff4bd973270e\" (UID: \"ae0038a3-7264-4263-961d-ff4bd973270e\") " Jan 20 17:32:05 crc kubenswrapper[4558]: I0120 17:32:05.258605 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ae0038a3-7264-4263-961d-ff4bd973270e-scripts\") pod \"ae0038a3-7264-4263-961d-ff4bd973270e\" (UID: \"ae0038a3-7264-4263-961d-ff4bd973270e\") " Jan 20 17:32:05 crc kubenswrapper[4558]: I0120 17:32:05.258659 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ae0038a3-7264-4263-961d-ff4bd973270e-log-httpd\") pod \"ae0038a3-7264-4263-961d-ff4bd973270e\" (UID: \"ae0038a3-7264-4263-961d-ff4bd973270e\") " Jan 20 17:32:05 crc kubenswrapper[4558]: I0120 17:32:05.258700 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ae0038a3-7264-4263-961d-ff4bd973270e-run-httpd\") pod \"ae0038a3-7264-4263-961d-ff4bd973270e\" (UID: \"ae0038a3-7264-4263-961d-ff4bd973270e\") " Jan 20 17:32:05 crc kubenswrapper[4558]: I0120 17:32:05.258725 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae0038a3-7264-4263-961d-ff4bd973270e-combined-ca-bundle\") pod \"ae0038a3-7264-4263-961d-ff4bd973270e\" (UID: \"ae0038a3-7264-4263-961d-ff4bd973270e\") " Jan 20 17:32:05 crc kubenswrapper[4558]: I0120 17:32:05.259476 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ae0038a3-7264-4263-961d-ff4bd973270e-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "ae0038a3-7264-4263-961d-ff4bd973270e" (UID: "ae0038a3-7264-4263-961d-ff4bd973270e"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:32:05 crc kubenswrapper[4558]: I0120 17:32:05.259502 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ae0038a3-7264-4263-961d-ff4bd973270e-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "ae0038a3-7264-4263-961d-ff4bd973270e" (UID: "ae0038a3-7264-4263-961d-ff4bd973270e"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:32:05 crc kubenswrapper[4558]: I0120 17:32:05.265901 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ae0038a3-7264-4263-961d-ff4bd973270e-kube-api-access-bmjnp" (OuterVolumeSpecName: "kube-api-access-bmjnp") pod "ae0038a3-7264-4263-961d-ff4bd973270e" (UID: "ae0038a3-7264-4263-961d-ff4bd973270e"). InnerVolumeSpecName "kube-api-access-bmjnp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:32:05 crc kubenswrapper[4558]: I0120 17:32:05.265919 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ae0038a3-7264-4263-961d-ff4bd973270e-scripts" (OuterVolumeSpecName: "scripts") pod "ae0038a3-7264-4263-961d-ff4bd973270e" (UID: "ae0038a3-7264-4263-961d-ff4bd973270e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:32:05 crc kubenswrapper[4558]: I0120 17:32:05.280379 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ae0038a3-7264-4263-961d-ff4bd973270e-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "ae0038a3-7264-4263-961d-ff4bd973270e" (UID: "ae0038a3-7264-4263-961d-ff4bd973270e"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:32:05 crc kubenswrapper[4558]: I0120 17:32:05.315067 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ae0038a3-7264-4263-961d-ff4bd973270e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ae0038a3-7264-4263-961d-ff4bd973270e" (UID: "ae0038a3-7264-4263-961d-ff4bd973270e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:32:05 crc kubenswrapper[4558]: I0120 17:32:05.331101 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ae0038a3-7264-4263-961d-ff4bd973270e-config-data" (OuterVolumeSpecName: "config-data") pod "ae0038a3-7264-4263-961d-ff4bd973270e" (UID: "ae0038a3-7264-4263-961d-ff4bd973270e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:32:05 crc kubenswrapper[4558]: I0120 17:32:05.360848 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bmjnp\" (UniqueName: \"kubernetes.io/projected/ae0038a3-7264-4263-961d-ff4bd973270e-kube-api-access-bmjnp\") on node \"crc\" DevicePath \"\"" Jan 20 17:32:05 crc kubenswrapper[4558]: I0120 17:32:05.360877 4558 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ae0038a3-7264-4263-961d-ff4bd973270e-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 20 17:32:05 crc kubenswrapper[4558]: I0120 17:32:05.360888 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ae0038a3-7264-4263-961d-ff4bd973270e-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:32:05 crc kubenswrapper[4558]: I0120 17:32:05.360898 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ae0038a3-7264-4263-961d-ff4bd973270e-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:32:05 crc kubenswrapper[4558]: I0120 17:32:05.360907 4558 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ae0038a3-7264-4263-961d-ff4bd973270e-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:32:05 crc kubenswrapper[4558]: I0120 17:32:05.360915 4558 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ae0038a3-7264-4263-961d-ff4bd973270e-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:32:05 crc kubenswrapper[4558]: I0120 17:32:05.360926 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae0038a3-7264-4263-961d-ff4bd973270e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:32:05 crc kubenswrapper[4558]: I0120 17:32:05.976155 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"ae0038a3-7264-4263-961d-ff4bd973270e","Type":"ContainerDied","Data":"77f35fccc8aba402723b0717629f9aebee0a0ecbf33c1f5bb0a869687dac41ac"} Jan 20 17:32:05 crc kubenswrapper[4558]: I0120 17:32:05.976249 4558 scope.go:117] "RemoveContainer" containerID="e264ab3139725adb126c87a12bb8544eec8bb6f3977044a49a218713e8607e4a" Jan 20 17:32:05 crc kubenswrapper[4558]: I0120 17:32:05.977122 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:32:06 crc kubenswrapper[4558]: I0120 17:32:06.008668 4558 scope.go:117] "RemoveContainer" containerID="62879093a8dcc7d9551e46afe019ff1b446d9ad90e73eec908991c7db72c06c5" Jan 20 17:32:06 crc kubenswrapper[4558]: I0120 17:32:06.013862 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:32:06 crc kubenswrapper[4558]: I0120 17:32:06.024948 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:32:06 crc kubenswrapper[4558]: I0120 17:32:06.047038 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:32:06 crc kubenswrapper[4558]: E0120 17:32:06.047598 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ae0038a3-7264-4263-961d-ff4bd973270e" containerName="ceilometer-central-agent" Jan 20 17:32:06 crc kubenswrapper[4558]: I0120 17:32:06.047622 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ae0038a3-7264-4263-961d-ff4bd973270e" containerName="ceilometer-central-agent" Jan 20 17:32:06 crc kubenswrapper[4558]: E0120 17:32:06.047665 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ae0038a3-7264-4263-961d-ff4bd973270e" containerName="proxy-httpd" Jan 20 17:32:06 crc kubenswrapper[4558]: I0120 17:32:06.047671 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ae0038a3-7264-4263-961d-ff4bd973270e" containerName="proxy-httpd" Jan 20 17:32:06 crc kubenswrapper[4558]: E0120 17:32:06.047684 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ae0038a3-7264-4263-961d-ff4bd973270e" containerName="sg-core" Jan 20 17:32:06 crc kubenswrapper[4558]: I0120 17:32:06.047690 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ae0038a3-7264-4263-961d-ff4bd973270e" containerName="sg-core" Jan 20 17:32:06 crc kubenswrapper[4558]: E0120 17:32:06.047697 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ae0038a3-7264-4263-961d-ff4bd973270e" containerName="ceilometer-notification-agent" Jan 20 17:32:06 crc kubenswrapper[4558]: I0120 17:32:06.047702 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ae0038a3-7264-4263-961d-ff4bd973270e" containerName="ceilometer-notification-agent" Jan 20 17:32:06 crc kubenswrapper[4558]: I0120 17:32:06.047887 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="ae0038a3-7264-4263-961d-ff4bd973270e" containerName="ceilometer-notification-agent" Jan 20 17:32:06 crc kubenswrapper[4558]: I0120 17:32:06.047896 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="ae0038a3-7264-4263-961d-ff4bd973270e" containerName="proxy-httpd" Jan 20 17:32:06 crc kubenswrapper[4558]: I0120 17:32:06.047913 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="ae0038a3-7264-4263-961d-ff4bd973270e" containerName="sg-core" Jan 20 17:32:06 crc kubenswrapper[4558]: I0120 17:32:06.047925 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="ae0038a3-7264-4263-961d-ff4bd973270e" containerName="ceilometer-central-agent" Jan 20 17:32:06 crc kubenswrapper[4558]: I0120 17:32:06.048839 4558 scope.go:117] "RemoveContainer" containerID="eff67ac0c52d545cb6e84b48951c7811d3a813f3451132a6b93b2e8896f528d2" Jan 20 17:32:06 crc kubenswrapper[4558]: I0120 17:32:06.049578 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:32:06 crc kubenswrapper[4558]: I0120 17:32:06.053627 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-scripts" Jan 20 17:32:06 crc kubenswrapper[4558]: I0120 17:32:06.054200 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-config-data" Jan 20 17:32:06 crc kubenswrapper[4558]: I0120 17:32:06.067351 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:32:06 crc kubenswrapper[4558]: I0120 17:32:06.089974 4558 scope.go:117] "RemoveContainer" containerID="5cfd9ea90fd3f8cb0609173530a9c3bac6afc3a002b0b0d58a159c437eb42d76" Jan 20 17:32:06 crc kubenswrapper[4558]: I0120 17:32:06.177769 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0fa9abe5-c250-44ce-97d5-17528dd527cb-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"0fa9abe5-c250-44ce-97d5-17528dd527cb\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:32:06 crc kubenswrapper[4558]: I0120 17:32:06.178557 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0fa9abe5-c250-44ce-97d5-17528dd527cb-log-httpd\") pod \"ceilometer-0\" (UID: \"0fa9abe5-c250-44ce-97d5-17528dd527cb\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:32:06 crc kubenswrapper[4558]: I0120 17:32:06.178594 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0fa9abe5-c250-44ce-97d5-17528dd527cb-scripts\") pod \"ceilometer-0\" (UID: \"0fa9abe5-c250-44ce-97d5-17528dd527cb\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:32:06 crc kubenswrapper[4558]: I0120 17:32:06.178666 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0fa9abe5-c250-44ce-97d5-17528dd527cb-run-httpd\") pod \"ceilometer-0\" (UID: \"0fa9abe5-c250-44ce-97d5-17528dd527cb\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:32:06 crc kubenswrapper[4558]: I0120 17:32:06.178736 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0fa9abe5-c250-44ce-97d5-17528dd527cb-config-data\") pod \"ceilometer-0\" (UID: \"0fa9abe5-c250-44ce-97d5-17528dd527cb\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:32:06 crc kubenswrapper[4558]: I0120 17:32:06.178778 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-djfsj\" (UniqueName: \"kubernetes.io/projected/0fa9abe5-c250-44ce-97d5-17528dd527cb-kube-api-access-djfsj\") pod \"ceilometer-0\" (UID: \"0fa9abe5-c250-44ce-97d5-17528dd527cb\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:32:06 crc kubenswrapper[4558]: I0120 17:32:06.178822 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0fa9abe5-c250-44ce-97d5-17528dd527cb-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"0fa9abe5-c250-44ce-97d5-17528dd527cb\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:32:06 crc kubenswrapper[4558]: I0120 17:32:06.281143 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0fa9abe5-c250-44ce-97d5-17528dd527cb-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"0fa9abe5-c250-44ce-97d5-17528dd527cb\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:32:06 crc kubenswrapper[4558]: I0120 17:32:06.281262 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0fa9abe5-c250-44ce-97d5-17528dd527cb-log-httpd\") pod \"ceilometer-0\" (UID: \"0fa9abe5-c250-44ce-97d5-17528dd527cb\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:32:06 crc kubenswrapper[4558]: I0120 17:32:06.281299 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0fa9abe5-c250-44ce-97d5-17528dd527cb-scripts\") pod \"ceilometer-0\" (UID: \"0fa9abe5-c250-44ce-97d5-17528dd527cb\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:32:06 crc kubenswrapper[4558]: I0120 17:32:06.281462 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0fa9abe5-c250-44ce-97d5-17528dd527cb-run-httpd\") pod \"ceilometer-0\" (UID: \"0fa9abe5-c250-44ce-97d5-17528dd527cb\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:32:06 crc kubenswrapper[4558]: I0120 17:32:06.281500 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0fa9abe5-c250-44ce-97d5-17528dd527cb-config-data\") pod \"ceilometer-0\" (UID: \"0fa9abe5-c250-44ce-97d5-17528dd527cb\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:32:06 crc kubenswrapper[4558]: I0120 17:32:06.281596 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-djfsj\" (UniqueName: \"kubernetes.io/projected/0fa9abe5-c250-44ce-97d5-17528dd527cb-kube-api-access-djfsj\") pod \"ceilometer-0\" (UID: \"0fa9abe5-c250-44ce-97d5-17528dd527cb\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:32:06 crc kubenswrapper[4558]: I0120 17:32:06.281669 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0fa9abe5-c250-44ce-97d5-17528dd527cb-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"0fa9abe5-c250-44ce-97d5-17528dd527cb\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:32:06 crc kubenswrapper[4558]: I0120 17:32:06.282466 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0fa9abe5-c250-44ce-97d5-17528dd527cb-log-httpd\") pod \"ceilometer-0\" (UID: \"0fa9abe5-c250-44ce-97d5-17528dd527cb\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:32:06 crc kubenswrapper[4558]: I0120 17:32:06.282489 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0fa9abe5-c250-44ce-97d5-17528dd527cb-run-httpd\") pod \"ceilometer-0\" (UID: \"0fa9abe5-c250-44ce-97d5-17528dd527cb\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:32:06 crc kubenswrapper[4558]: I0120 17:32:06.288298 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0fa9abe5-c250-44ce-97d5-17528dd527cb-config-data\") pod \"ceilometer-0\" (UID: \"0fa9abe5-c250-44ce-97d5-17528dd527cb\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:32:06 crc kubenswrapper[4558]: I0120 17:32:06.288607 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0fa9abe5-c250-44ce-97d5-17528dd527cb-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"0fa9abe5-c250-44ce-97d5-17528dd527cb\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:32:06 crc kubenswrapper[4558]: I0120 17:32:06.289012 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0fa9abe5-c250-44ce-97d5-17528dd527cb-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"0fa9abe5-c250-44ce-97d5-17528dd527cb\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:32:06 crc kubenswrapper[4558]: I0120 17:32:06.289255 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0fa9abe5-c250-44ce-97d5-17528dd527cb-scripts\") pod \"ceilometer-0\" (UID: \"0fa9abe5-c250-44ce-97d5-17528dd527cb\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:32:06 crc kubenswrapper[4558]: I0120 17:32:06.297308 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-djfsj\" (UniqueName: \"kubernetes.io/projected/0fa9abe5-c250-44ce-97d5-17528dd527cb-kube-api-access-djfsj\") pod \"ceilometer-0\" (UID: \"0fa9abe5-c250-44ce-97d5-17528dd527cb\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:32:06 crc kubenswrapper[4558]: I0120 17:32:06.373184 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:32:06 crc kubenswrapper[4558]: I0120 17:32:06.580574 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ae0038a3-7264-4263-961d-ff4bd973270e" path="/var/lib/kubelet/pods/ae0038a3-7264-4263-961d-ff4bd973270e/volumes" Jan 20 17:32:06 crc kubenswrapper[4558]: W0120 17:32:06.779530 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0fa9abe5_c250_44ce_97d5_17528dd527cb.slice/crio-0d1fa68b946d2d23bd99c093ea577f81d6d1e250ab188ed02e8270a1ed6e6c89 WatchSource:0}: Error finding container 0d1fa68b946d2d23bd99c093ea577f81d6d1e250ab188ed02e8270a1ed6e6c89: Status 404 returned error can't find the container with id 0d1fa68b946d2d23bd99c093ea577f81d6d1e250ab188ed02e8270a1ed6e6c89 Jan 20 17:32:06 crc kubenswrapper[4558]: I0120 17:32:06.785705 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:32:06 crc kubenswrapper[4558]: I0120 17:32:06.988865 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"0fa9abe5-c250-44ce-97d5-17528dd527cb","Type":"ContainerStarted","Data":"0d1fa68b946d2d23bd99c093ea577f81d6d1e250ab188ed02e8270a1ed6e6c89"} Jan 20 17:32:08 crc kubenswrapper[4558]: I0120 17:32:08.001198 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"0fa9abe5-c250-44ce-97d5-17528dd527cb","Type":"ContainerStarted","Data":"94acb89771ba53cc1e3c2ff43ebec406a07834ae245a8a2d429afb1cf797e2ed"} Jan 20 17:32:09 crc kubenswrapper[4558]: I0120 17:32:09.017702 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"0fa9abe5-c250-44ce-97d5-17528dd527cb","Type":"ContainerStarted","Data":"255c1473971684fbf37d5dbc876ce06c7e2d2050c60ff64fb10bd1de85cc2f28"} Jan 20 17:32:10 crc kubenswrapper[4558]: I0120 17:32:10.043760 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"0fa9abe5-c250-44ce-97d5-17528dd527cb","Type":"ContainerStarted","Data":"ba412cf075d10c839cba475b03b73954666ef9d98b9d93b40ae3e1e0eb1beb15"} Jan 20 17:32:11 crc kubenswrapper[4558]: I0120 17:32:11.060554 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"0fa9abe5-c250-44ce-97d5-17528dd527cb","Type":"ContainerStarted","Data":"6b63c5d8df23726b95ea12ea0a38363f05fc0ca2ac135e3fa2be2b0c33f58276"} Jan 20 17:32:11 crc kubenswrapper[4558]: I0120 17:32:11.061055 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:32:11 crc kubenswrapper[4558]: I0120 17:32:11.082392 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ceilometer-0" podStartSLOduration=1.4550957709999999 podStartE2EDuration="5.082370788s" podCreationTimestamp="2026-01-20 17:32:06 +0000 UTC" firstStartedPulling="2026-01-20 17:32:06.78262353 +0000 UTC m=+3020.542961497" lastFinishedPulling="2026-01-20 17:32:10.409898557 +0000 UTC m=+3024.170236514" observedRunningTime="2026-01-20 17:32:11.081668477 +0000 UTC m=+3024.842006444" watchObservedRunningTime="2026-01-20 17:32:11.082370788 +0000 UTC m=+3024.842708755" Jan 20 17:32:11 crc kubenswrapper[4558]: I0120 17:32:11.565976 4558 scope.go:117] "RemoveContainer" containerID="f275e9f5de330b3c79316d5871106c6bcf90b698e0744c5beb28da45c336b36d" Jan 20 17:32:11 crc kubenswrapper[4558]: E0120 17:32:11.566370 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 17:32:13 crc kubenswrapper[4558]: I0120 17:32:13.457533 4558 scope.go:117] "RemoveContainer" containerID="83e70e7f692408ccff2187fab95da6e437b09bcb259d31a7fe0858d27643cdca" Jan 20 17:32:25 crc kubenswrapper[4558]: I0120 17:32:25.566214 4558 scope.go:117] "RemoveContainer" containerID="f275e9f5de330b3c79316d5871106c6bcf90b698e0744c5beb28da45c336b36d" Jan 20 17:32:25 crc kubenswrapper[4558]: E0120 17:32:25.567121 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 17:32:36 crc kubenswrapper[4558]: I0120 17:32:36.379611 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:32:36 crc kubenswrapper[4558]: I0120 17:32:36.571194 4558 scope.go:117] "RemoveContainer" containerID="f275e9f5de330b3c79316d5871106c6bcf90b698e0744c5beb28da45c336b36d" Jan 20 17:32:36 crc kubenswrapper[4558]: E0120 17:32:36.571669 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 17:32:46 crc kubenswrapper[4558]: I0120 17:32:46.039791 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/rabbitmq-cell1-server-0"] Jan 20 17:32:46 crc kubenswrapper[4558]: I0120 17:32:46.121519 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-cell-mapping-269vx"] Jan 20 17:32:46 crc kubenswrapper[4558]: I0120 17:32:46.129575 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell1-cell-mapping-269vx"] Jan 20 17:32:46 crc kubenswrapper[4558]: I0120 17:32:46.135357 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:32:46 crc kubenswrapper[4558]: I0120 17:32:46.135655 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" podUID="7d6b9532-22b5-41c9-8b15-83a9c515c52d" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://68de8de2f4680620ba6e5b236fe7d12d5fb76c8413d11297d94efa48e9761c9e" gracePeriod=30 Jan 20 17:32:46 crc kubenswrapper[4558]: I0120 17:32:46.140858 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-0"] Jan 20 17:32:46 crc kubenswrapper[4558]: I0120 17:32:46.141082 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-cell1-conductor-0" podUID="a7f2bbf6-acbc-487a-a2ea-08e49a055eb5" containerName="nova-cell1-conductor-conductor" containerID="cri-o://3fc2c01e810b816844a3e05f96c2ab94fb7c1503af2abfcc136c12d107ebb7de" gracePeriod=30 Jan 20 17:32:46 crc kubenswrapper[4558]: I0120 17:32:46.276594 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell1-cell-mapping-nbrcj"] Jan 20 17:32:46 crc kubenswrapper[4558]: I0120 17:32:46.278268 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-cell-mapping-nbrcj" Jan 20 17:32:46 crc kubenswrapper[4558]: I0120 17:32:46.280302 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell1-manage-scripts" Jan 20 17:32:46 crc kubenswrapper[4558]: I0120 17:32:46.280449 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell1-manage-config-data" Jan 20 17:32:46 crc kubenswrapper[4558]: I0120 17:32:46.298584 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-cell-mapping-nbrcj"] Jan 20 17:32:46 crc kubenswrapper[4558]: I0120 17:32:46.334660 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wbn5n\" (UniqueName: \"kubernetes.io/projected/9ab93517-8f51-467d-b830-16030668be2b-kube-api-access-wbn5n\") pod \"nova-cell1-cell-mapping-nbrcj\" (UID: \"9ab93517-8f51-467d-b830-16030668be2b\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-nbrcj" Jan 20 17:32:46 crc kubenswrapper[4558]: I0120 17:32:46.334896 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9ab93517-8f51-467d-b830-16030668be2b-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-nbrcj\" (UID: \"9ab93517-8f51-467d-b830-16030668be2b\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-nbrcj" Jan 20 17:32:46 crc kubenswrapper[4558]: I0120 17:32:46.334961 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9ab93517-8f51-467d-b830-16030668be2b-scripts\") pod \"nova-cell1-cell-mapping-nbrcj\" (UID: \"9ab93517-8f51-467d-b830-16030668be2b\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-nbrcj" Jan 20 17:32:46 crc kubenswrapper[4558]: I0120 17:32:46.335012 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9ab93517-8f51-467d-b830-16030668be2b-config-data\") pod \"nova-cell1-cell-mapping-nbrcj\" (UID: \"9ab93517-8f51-467d-b830-16030668be2b\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-nbrcj" Jan 20 17:32:46 crc kubenswrapper[4558]: I0120 17:32:46.436230 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9ab93517-8f51-467d-b830-16030668be2b-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-nbrcj\" (UID: \"9ab93517-8f51-467d-b830-16030668be2b\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-nbrcj" Jan 20 17:32:46 crc kubenswrapper[4558]: I0120 17:32:46.436278 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9ab93517-8f51-467d-b830-16030668be2b-scripts\") pod \"nova-cell1-cell-mapping-nbrcj\" (UID: \"9ab93517-8f51-467d-b830-16030668be2b\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-nbrcj" Jan 20 17:32:46 crc kubenswrapper[4558]: I0120 17:32:46.436307 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9ab93517-8f51-467d-b830-16030668be2b-config-data\") pod \"nova-cell1-cell-mapping-nbrcj\" (UID: \"9ab93517-8f51-467d-b830-16030668be2b\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-nbrcj" Jan 20 17:32:46 crc kubenswrapper[4558]: I0120 17:32:46.436371 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wbn5n\" (UniqueName: \"kubernetes.io/projected/9ab93517-8f51-467d-b830-16030668be2b-kube-api-access-wbn5n\") pod \"nova-cell1-cell-mapping-nbrcj\" (UID: \"9ab93517-8f51-467d-b830-16030668be2b\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-nbrcj" Jan 20 17:32:46 crc kubenswrapper[4558]: I0120 17:32:46.446869 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9ab93517-8f51-467d-b830-16030668be2b-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-nbrcj\" (UID: \"9ab93517-8f51-467d-b830-16030668be2b\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-nbrcj" Jan 20 17:32:46 crc kubenswrapper[4558]: I0120 17:32:46.448781 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9ab93517-8f51-467d-b830-16030668be2b-config-data\") pod \"nova-cell1-cell-mapping-nbrcj\" (UID: \"9ab93517-8f51-467d-b830-16030668be2b\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-nbrcj" Jan 20 17:32:46 crc kubenswrapper[4558]: I0120 17:32:46.453794 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/rabbitmq-server-0"] Jan 20 17:32:46 crc kubenswrapper[4558]: I0120 17:32:46.459724 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wbn5n\" (UniqueName: \"kubernetes.io/projected/9ab93517-8f51-467d-b830-16030668be2b-kube-api-access-wbn5n\") pod \"nova-cell1-cell-mapping-nbrcj\" (UID: \"9ab93517-8f51-467d-b830-16030668be2b\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-nbrcj" Jan 20 17:32:46 crc kubenswrapper[4558]: I0120 17:32:46.459734 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9ab93517-8f51-467d-b830-16030668be2b-scripts\") pod \"nova-cell1-cell-mapping-nbrcj\" (UID: \"9ab93517-8f51-467d-b830-16030668be2b\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-nbrcj" Jan 20 17:32:46 crc kubenswrapper[4558]: I0120 17:32:46.530324 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:32:46 crc kubenswrapper[4558]: I0120 17:32:46.530537 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/cinder-scheduler-0" podUID="568979fa-537c-45c5-952c-f87a40b194ef" containerName="cinder-scheduler" containerID="cri-o://1395db81401be1f584e96686dac08b81079dc9377f0f67ac885fa17b47a87685" gracePeriod=30 Jan 20 17:32:46 crc kubenswrapper[4558]: I0120 17:32:46.530915 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/cinder-scheduler-0" podUID="568979fa-537c-45c5-952c-f87a40b194ef" containerName="probe" containerID="cri-o://b2cc3eb7df47c10e18cf97ab2f8a7780ca8a8e35c8a8c749aa1b4a85dd0a58ad" gracePeriod=30 Jan 20 17:32:46 crc kubenswrapper[4558]: I0120 17:32:46.610551 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-cell-mapping-nbrcj" Jan 20 17:32:46 crc kubenswrapper[4558]: I0120 17:32:46.627383 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6b422242-a880-40dd-b972-5c5c05cb12c5" path="/var/lib/kubelet/pods/6b422242-a880-40dd-b972-5c5c05cb12c5/volumes" Jan 20 17:32:46 crc kubenswrapper[4558]: I0120 17:32:46.654925 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/barbican-keystone-listener-5459fb5f8-h5bbp"] Jan 20 17:32:46 crc kubenswrapper[4558]: I0120 17:32:46.677718 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-keystone-listener-5459fb5f8-h5bbp" Jan 20 17:32:46 crc kubenswrapper[4558]: I0120 17:32:46.679579 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/keystone-56458f747-wvqtq"] Jan 20 17:32:46 crc kubenswrapper[4558]: I0120 17:32:46.683867 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-56458f747-wvqtq" Jan 20 17:32:46 crc kubenswrapper[4558]: I0120 17:32:46.689514 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-keystone-listener-5459fb5f8-h5bbp"] Jan 20 17:32:46 crc kubenswrapper[4558]: I0120 17:32:46.694557 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/barbican-worker-777f6887b5-wwl56"] Jan 20 17:32:46 crc kubenswrapper[4558]: I0120 17:32:46.695918 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-worker-777f6887b5-wwl56" Jan 20 17:32:46 crc kubenswrapper[4558]: I0120 17:32:46.747724 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-56458f747-wvqtq"] Jan 20 17:32:46 crc kubenswrapper[4558]: I0120 17:32:46.798358 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-worker-777f6887b5-wwl56"] Jan 20 17:32:46 crc kubenswrapper[4558]: I0120 17:32:46.834921 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:32:46 crc kubenswrapper[4558]: I0120 17:32:46.835139 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/cinder-api-0" podUID="63df8460-ab49-4787-92a7-54a14c0179ca" containerName="cinder-api-log" containerID="cri-o://5f3ae803d524ce3cd6aaa39d2f91f748bd07e8465458e550a0da0000266138f6" gracePeriod=30 Jan 20 17:32:46 crc kubenswrapper[4558]: I0120 17:32:46.835292 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/cinder-api-0" podUID="63df8460-ab49-4787-92a7-54a14c0179ca" containerName="cinder-api" containerID="cri-o://226374bee93280652306e72191bcd834be88cb52e519757b561baa0a32c4b976" gracePeriod=30 Jan 20 17:32:46 crc kubenswrapper[4558]: I0120 17:32:46.854938 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d6b6da7f-6f9b-415b-ac10-b3bd8655bb1a-combined-ca-bundle\") pod \"barbican-keystone-listener-5459fb5f8-h5bbp\" (UID: \"d6b6da7f-6f9b-415b-ac10-b3bd8655bb1a\") " pod="openstack-kuttl-tests/barbican-keystone-listener-5459fb5f8-h5bbp" Jan 20 17:32:46 crc kubenswrapper[4558]: I0120 17:32:46.854996 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g62sk\" (UniqueName: \"kubernetes.io/projected/aa7956fe-8f61-4e0d-86c1-716e300c4059-kube-api-access-g62sk\") pod \"keystone-56458f747-wvqtq\" (UID: \"aa7956fe-8f61-4e0d-86c1-716e300c4059\") " pod="openstack-kuttl-tests/keystone-56458f747-wvqtq" Jan 20 17:32:46 crc kubenswrapper[4558]: I0120 17:32:46.855025 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-spt7n\" (UniqueName: \"kubernetes.io/projected/d6b6da7f-6f9b-415b-ac10-b3bd8655bb1a-kube-api-access-spt7n\") pod \"barbican-keystone-listener-5459fb5f8-h5bbp\" (UID: \"d6b6da7f-6f9b-415b-ac10-b3bd8655bb1a\") " pod="openstack-kuttl-tests/barbican-keystone-listener-5459fb5f8-h5bbp" Jan 20 17:32:46 crc kubenswrapper[4558]: I0120 17:32:46.855048 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e0294130-264e-4fa0-b336-72c350ea61cc-config-data-custom\") pod \"barbican-worker-777f6887b5-wwl56\" (UID: \"e0294130-264e-4fa0-b336-72c350ea61cc\") " pod="openstack-kuttl-tests/barbican-worker-777f6887b5-wwl56" Jan 20 17:32:46 crc kubenswrapper[4558]: I0120 17:32:46.855083 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/aa7956fe-8f61-4e0d-86c1-716e300c4059-fernet-keys\") pod \"keystone-56458f747-wvqtq\" (UID: \"aa7956fe-8f61-4e0d-86c1-716e300c4059\") " pod="openstack-kuttl-tests/keystone-56458f747-wvqtq" Jan 20 17:32:46 crc kubenswrapper[4558]: I0120 17:32:46.855103 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d6b6da7f-6f9b-415b-ac10-b3bd8655bb1a-config-data\") pod \"barbican-keystone-listener-5459fb5f8-h5bbp\" (UID: \"d6b6da7f-6f9b-415b-ac10-b3bd8655bb1a\") " pod="openstack-kuttl-tests/barbican-keystone-listener-5459fb5f8-h5bbp" Jan 20 17:32:46 crc kubenswrapper[4558]: I0120 17:32:46.855128 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e0294130-264e-4fa0-b336-72c350ea61cc-config-data\") pod \"barbican-worker-777f6887b5-wwl56\" (UID: \"e0294130-264e-4fa0-b336-72c350ea61cc\") " pod="openstack-kuttl-tests/barbican-worker-777f6887b5-wwl56" Jan 20 17:32:46 crc kubenswrapper[4558]: I0120 17:32:46.855179 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa7956fe-8f61-4e0d-86c1-716e300c4059-combined-ca-bundle\") pod \"keystone-56458f747-wvqtq\" (UID: \"aa7956fe-8f61-4e0d-86c1-716e300c4059\") " pod="openstack-kuttl-tests/keystone-56458f747-wvqtq" Jan 20 17:32:46 crc kubenswrapper[4558]: I0120 17:32:46.855207 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/aa7956fe-8f61-4e0d-86c1-716e300c4059-credential-keys\") pod \"keystone-56458f747-wvqtq\" (UID: \"aa7956fe-8f61-4e0d-86c1-716e300c4059\") " pod="openstack-kuttl-tests/keystone-56458f747-wvqtq" Jan 20 17:32:46 crc kubenswrapper[4558]: I0120 17:32:46.855321 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e0294130-264e-4fa0-b336-72c350ea61cc-combined-ca-bundle\") pod \"barbican-worker-777f6887b5-wwl56\" (UID: \"e0294130-264e-4fa0-b336-72c350ea61cc\") " pod="openstack-kuttl-tests/barbican-worker-777f6887b5-wwl56" Jan 20 17:32:46 crc kubenswrapper[4558]: I0120 17:32:46.855501 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aa7956fe-8f61-4e0d-86c1-716e300c4059-scripts\") pod \"keystone-56458f747-wvqtq\" (UID: \"aa7956fe-8f61-4e0d-86c1-716e300c4059\") " pod="openstack-kuttl-tests/keystone-56458f747-wvqtq" Jan 20 17:32:46 crc kubenswrapper[4558]: I0120 17:32:46.855582 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d6b6da7f-6f9b-415b-ac10-b3bd8655bb1a-logs\") pod \"barbican-keystone-listener-5459fb5f8-h5bbp\" (UID: \"d6b6da7f-6f9b-415b-ac10-b3bd8655bb1a\") " pod="openstack-kuttl-tests/barbican-keystone-listener-5459fb5f8-h5bbp" Jan 20 17:32:46 crc kubenswrapper[4558]: I0120 17:32:46.855624 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d6b6da7f-6f9b-415b-ac10-b3bd8655bb1a-config-data-custom\") pod \"barbican-keystone-listener-5459fb5f8-h5bbp\" (UID: \"d6b6da7f-6f9b-415b-ac10-b3bd8655bb1a\") " pod="openstack-kuttl-tests/barbican-keystone-listener-5459fb5f8-h5bbp" Jan 20 17:32:46 crc kubenswrapper[4558]: I0120 17:32:46.855742 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2lpkg\" (UniqueName: \"kubernetes.io/projected/e0294130-264e-4fa0-b336-72c350ea61cc-kube-api-access-2lpkg\") pod \"barbican-worker-777f6887b5-wwl56\" (UID: \"e0294130-264e-4fa0-b336-72c350ea61cc\") " pod="openstack-kuttl-tests/barbican-worker-777f6887b5-wwl56" Jan 20 17:32:46 crc kubenswrapper[4558]: I0120 17:32:46.855778 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e0294130-264e-4fa0-b336-72c350ea61cc-logs\") pod \"barbican-worker-777f6887b5-wwl56\" (UID: \"e0294130-264e-4fa0-b336-72c350ea61cc\") " pod="openstack-kuttl-tests/barbican-worker-777f6887b5-wwl56" Jan 20 17:32:46 crc kubenswrapper[4558]: I0120 17:32:46.855830 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa7956fe-8f61-4e0d-86c1-716e300c4059-config-data\") pod \"keystone-56458f747-wvqtq\" (UID: \"aa7956fe-8f61-4e0d-86c1-716e300c4059\") " pod="openstack-kuttl-tests/keystone-56458f747-wvqtq" Jan 20 17:32:46 crc kubenswrapper[4558]: I0120 17:32:46.959053 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:32:46 crc kubenswrapper[4558]: I0120 17:32:46.959362 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-metadata-0" podUID="eb4881e5-cce6-4423-a9f4-d93f96a2ccb1" containerName="nova-metadata-log" containerID="cri-o://11026a4823ee019937da1b5d44ee5848da1509d7fbc7958ed8712c4c0b58b33b" gracePeriod=30 Jan 20 17:32:46 crc kubenswrapper[4558]: I0120 17:32:46.959472 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e0294130-264e-4fa0-b336-72c350ea61cc-config-data\") pod \"barbican-worker-777f6887b5-wwl56\" (UID: \"e0294130-264e-4fa0-b336-72c350ea61cc\") " pod="openstack-kuttl-tests/barbican-worker-777f6887b5-wwl56" Jan 20 17:32:46 crc kubenswrapper[4558]: I0120 17:32:46.959573 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa7956fe-8f61-4e0d-86c1-716e300c4059-combined-ca-bundle\") pod \"keystone-56458f747-wvqtq\" (UID: \"aa7956fe-8f61-4e0d-86c1-716e300c4059\") " pod="openstack-kuttl-tests/keystone-56458f747-wvqtq" Jan 20 17:32:46 crc kubenswrapper[4558]: I0120 17:32:46.959620 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/aa7956fe-8f61-4e0d-86c1-716e300c4059-credential-keys\") pod \"keystone-56458f747-wvqtq\" (UID: \"aa7956fe-8f61-4e0d-86c1-716e300c4059\") " pod="openstack-kuttl-tests/keystone-56458f747-wvqtq" Jan 20 17:32:46 crc kubenswrapper[4558]: I0120 17:32:46.959650 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e0294130-264e-4fa0-b336-72c350ea61cc-combined-ca-bundle\") pod \"barbican-worker-777f6887b5-wwl56\" (UID: \"e0294130-264e-4fa0-b336-72c350ea61cc\") " pod="openstack-kuttl-tests/barbican-worker-777f6887b5-wwl56" Jan 20 17:32:46 crc kubenswrapper[4558]: I0120 17:32:46.959695 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aa7956fe-8f61-4e0d-86c1-716e300c4059-scripts\") pod \"keystone-56458f747-wvqtq\" (UID: \"aa7956fe-8f61-4e0d-86c1-716e300c4059\") " pod="openstack-kuttl-tests/keystone-56458f747-wvqtq" Jan 20 17:32:46 crc kubenswrapper[4558]: I0120 17:32:46.959737 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d6b6da7f-6f9b-415b-ac10-b3bd8655bb1a-logs\") pod \"barbican-keystone-listener-5459fb5f8-h5bbp\" (UID: \"d6b6da7f-6f9b-415b-ac10-b3bd8655bb1a\") " pod="openstack-kuttl-tests/barbican-keystone-listener-5459fb5f8-h5bbp" Jan 20 17:32:46 crc kubenswrapper[4558]: I0120 17:32:46.959765 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d6b6da7f-6f9b-415b-ac10-b3bd8655bb1a-config-data-custom\") pod \"barbican-keystone-listener-5459fb5f8-h5bbp\" (UID: \"d6b6da7f-6f9b-415b-ac10-b3bd8655bb1a\") " pod="openstack-kuttl-tests/barbican-keystone-listener-5459fb5f8-h5bbp" Jan 20 17:32:46 crc kubenswrapper[4558]: I0120 17:32:46.959845 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2lpkg\" (UniqueName: \"kubernetes.io/projected/e0294130-264e-4fa0-b336-72c350ea61cc-kube-api-access-2lpkg\") pod \"barbican-worker-777f6887b5-wwl56\" (UID: \"e0294130-264e-4fa0-b336-72c350ea61cc\") " pod="openstack-kuttl-tests/barbican-worker-777f6887b5-wwl56" Jan 20 17:32:46 crc kubenswrapper[4558]: I0120 17:32:46.959879 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e0294130-264e-4fa0-b336-72c350ea61cc-logs\") pod \"barbican-worker-777f6887b5-wwl56\" (UID: \"e0294130-264e-4fa0-b336-72c350ea61cc\") " pod="openstack-kuttl-tests/barbican-worker-777f6887b5-wwl56" Jan 20 17:32:46 crc kubenswrapper[4558]: I0120 17:32:46.959879 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-metadata-0" podUID="eb4881e5-cce6-4423-a9f4-d93f96a2ccb1" containerName="nova-metadata-metadata" containerID="cri-o://c01c9738249c284d39a49949a1689663cc221dbc3ee6a65254441d2704e98b09" gracePeriod=30 Jan 20 17:32:46 crc kubenswrapper[4558]: I0120 17:32:46.959923 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa7956fe-8f61-4e0d-86c1-716e300c4059-config-data\") pod \"keystone-56458f747-wvqtq\" (UID: \"aa7956fe-8f61-4e0d-86c1-716e300c4059\") " pod="openstack-kuttl-tests/keystone-56458f747-wvqtq" Jan 20 17:32:46 crc kubenswrapper[4558]: I0120 17:32:46.959979 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d6b6da7f-6f9b-415b-ac10-b3bd8655bb1a-combined-ca-bundle\") pod \"barbican-keystone-listener-5459fb5f8-h5bbp\" (UID: \"d6b6da7f-6f9b-415b-ac10-b3bd8655bb1a\") " pod="openstack-kuttl-tests/barbican-keystone-listener-5459fb5f8-h5bbp" Jan 20 17:32:46 crc kubenswrapper[4558]: I0120 17:32:46.960035 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g62sk\" (UniqueName: \"kubernetes.io/projected/aa7956fe-8f61-4e0d-86c1-716e300c4059-kube-api-access-g62sk\") pod \"keystone-56458f747-wvqtq\" (UID: \"aa7956fe-8f61-4e0d-86c1-716e300c4059\") " pod="openstack-kuttl-tests/keystone-56458f747-wvqtq" Jan 20 17:32:46 crc kubenswrapper[4558]: I0120 17:32:46.960071 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-spt7n\" (UniqueName: \"kubernetes.io/projected/d6b6da7f-6f9b-415b-ac10-b3bd8655bb1a-kube-api-access-spt7n\") pod \"barbican-keystone-listener-5459fb5f8-h5bbp\" (UID: \"d6b6da7f-6f9b-415b-ac10-b3bd8655bb1a\") " pod="openstack-kuttl-tests/barbican-keystone-listener-5459fb5f8-h5bbp" Jan 20 17:32:46 crc kubenswrapper[4558]: I0120 17:32:46.960097 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e0294130-264e-4fa0-b336-72c350ea61cc-config-data-custom\") pod \"barbican-worker-777f6887b5-wwl56\" (UID: \"e0294130-264e-4fa0-b336-72c350ea61cc\") " pod="openstack-kuttl-tests/barbican-worker-777f6887b5-wwl56" Jan 20 17:32:46 crc kubenswrapper[4558]: I0120 17:32:46.960138 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/aa7956fe-8f61-4e0d-86c1-716e300c4059-fernet-keys\") pod \"keystone-56458f747-wvqtq\" (UID: \"aa7956fe-8f61-4e0d-86c1-716e300c4059\") " pod="openstack-kuttl-tests/keystone-56458f747-wvqtq" Jan 20 17:32:46 crc kubenswrapper[4558]: I0120 17:32:46.960160 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d6b6da7f-6f9b-415b-ac10-b3bd8655bb1a-config-data\") pod \"barbican-keystone-listener-5459fb5f8-h5bbp\" (UID: \"d6b6da7f-6f9b-415b-ac10-b3bd8655bb1a\") " pod="openstack-kuttl-tests/barbican-keystone-listener-5459fb5f8-h5bbp" Jan 20 17:32:46 crc kubenswrapper[4558]: I0120 17:32:46.972465 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d6b6da7f-6f9b-415b-ac10-b3bd8655bb1a-logs\") pod \"barbican-keystone-listener-5459fb5f8-h5bbp\" (UID: \"d6b6da7f-6f9b-415b-ac10-b3bd8655bb1a\") " pod="openstack-kuttl-tests/barbican-keystone-listener-5459fb5f8-h5bbp" Jan 20 17:32:46 crc kubenswrapper[4558]: I0120 17:32:46.973105 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e0294130-264e-4fa0-b336-72c350ea61cc-logs\") pod \"barbican-worker-777f6887b5-wwl56\" (UID: \"e0294130-264e-4fa0-b336-72c350ea61cc\") " pod="openstack-kuttl-tests/barbican-worker-777f6887b5-wwl56" Jan 20 17:32:46 crc kubenswrapper[4558]: I0120 17:32:46.987690 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d6b6da7f-6f9b-415b-ac10-b3bd8655bb1a-combined-ca-bundle\") pod \"barbican-keystone-listener-5459fb5f8-h5bbp\" (UID: \"d6b6da7f-6f9b-415b-ac10-b3bd8655bb1a\") " pod="openstack-kuttl-tests/barbican-keystone-listener-5459fb5f8-h5bbp" Jan 20 17:32:46 crc kubenswrapper[4558]: I0120 17:32:46.987697 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-spt7n\" (UniqueName: \"kubernetes.io/projected/d6b6da7f-6f9b-415b-ac10-b3bd8655bb1a-kube-api-access-spt7n\") pod \"barbican-keystone-listener-5459fb5f8-h5bbp\" (UID: \"d6b6da7f-6f9b-415b-ac10-b3bd8655bb1a\") " pod="openstack-kuttl-tests/barbican-keystone-listener-5459fb5f8-h5bbp" Jan 20 17:32:46 crc kubenswrapper[4558]: I0120 17:32:46.990831 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e0294130-264e-4fa0-b336-72c350ea61cc-config-data-custom\") pod \"barbican-worker-777f6887b5-wwl56\" (UID: \"e0294130-264e-4fa0-b336-72c350ea61cc\") " pod="openstack-kuttl-tests/barbican-worker-777f6887b5-wwl56" Jan 20 17:32:46 crc kubenswrapper[4558]: I0120 17:32:46.992337 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/aa7956fe-8f61-4e0d-86c1-716e300c4059-credential-keys\") pod \"keystone-56458f747-wvqtq\" (UID: \"aa7956fe-8f61-4e0d-86c1-716e300c4059\") " pod="openstack-kuttl-tests/keystone-56458f747-wvqtq" Jan 20 17:32:46 crc kubenswrapper[4558]: I0120 17:32:46.992611 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d6b6da7f-6f9b-415b-ac10-b3bd8655bb1a-config-data\") pod \"barbican-keystone-listener-5459fb5f8-h5bbp\" (UID: \"d6b6da7f-6f9b-415b-ac10-b3bd8655bb1a\") " pod="openstack-kuttl-tests/barbican-keystone-listener-5459fb5f8-h5bbp" Jan 20 17:32:46 crc kubenswrapper[4558]: I0120 17:32:46.999833 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa7956fe-8f61-4e0d-86c1-716e300c4059-combined-ca-bundle\") pod \"keystone-56458f747-wvqtq\" (UID: \"aa7956fe-8f61-4e0d-86c1-716e300c4059\") " pod="openstack-kuttl-tests/keystone-56458f747-wvqtq" Jan 20 17:32:47 crc kubenswrapper[4558]: I0120 17:32:47.000345 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e0294130-264e-4fa0-b336-72c350ea61cc-combined-ca-bundle\") pod \"barbican-worker-777f6887b5-wwl56\" (UID: \"e0294130-264e-4fa0-b336-72c350ea61cc\") " pod="openstack-kuttl-tests/barbican-worker-777f6887b5-wwl56" Jan 20 17:32:47 crc kubenswrapper[4558]: I0120 17:32:47.003209 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/barbican-api-6d8c858488-xrdwn"] Jan 20 17:32:47 crc kubenswrapper[4558]: I0120 17:32:47.004850 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-6d8c858488-xrdwn" Jan 20 17:32:47 crc kubenswrapper[4558]: I0120 17:32:47.006644 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d6b6da7f-6f9b-415b-ac10-b3bd8655bb1a-config-data-custom\") pod \"barbican-keystone-listener-5459fb5f8-h5bbp\" (UID: \"d6b6da7f-6f9b-415b-ac10-b3bd8655bb1a\") " pod="openstack-kuttl-tests/barbican-keystone-listener-5459fb5f8-h5bbp" Jan 20 17:32:47 crc kubenswrapper[4558]: I0120 17:32:47.006854 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aa7956fe-8f61-4e0d-86c1-716e300c4059-scripts\") pod \"keystone-56458f747-wvqtq\" (UID: \"aa7956fe-8f61-4e0d-86c1-716e300c4059\") " pod="openstack-kuttl-tests/keystone-56458f747-wvqtq" Jan 20 17:32:47 crc kubenswrapper[4558]: I0120 17:32:47.009491 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/aa7956fe-8f61-4e0d-86c1-716e300c4059-fernet-keys\") pod \"keystone-56458f747-wvqtq\" (UID: \"aa7956fe-8f61-4e0d-86c1-716e300c4059\") " pod="openstack-kuttl-tests/keystone-56458f747-wvqtq" Jan 20 17:32:47 crc kubenswrapper[4558]: I0120 17:32:47.009851 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e0294130-264e-4fa0-b336-72c350ea61cc-config-data\") pod \"barbican-worker-777f6887b5-wwl56\" (UID: \"e0294130-264e-4fa0-b336-72c350ea61cc\") " pod="openstack-kuttl-tests/barbican-worker-777f6887b5-wwl56" Jan 20 17:32:47 crc kubenswrapper[4558]: I0120 17:32:47.011717 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g62sk\" (UniqueName: \"kubernetes.io/projected/aa7956fe-8f61-4e0d-86c1-716e300c4059-kube-api-access-g62sk\") pod \"keystone-56458f747-wvqtq\" (UID: \"aa7956fe-8f61-4e0d-86c1-716e300c4059\") " pod="openstack-kuttl-tests/keystone-56458f747-wvqtq" Jan 20 17:32:47 crc kubenswrapper[4558]: I0120 17:32:47.012287 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2lpkg\" (UniqueName: \"kubernetes.io/projected/e0294130-264e-4fa0-b336-72c350ea61cc-kube-api-access-2lpkg\") pod \"barbican-worker-777f6887b5-wwl56\" (UID: \"e0294130-264e-4fa0-b336-72c350ea61cc\") " pod="openstack-kuttl-tests/barbican-worker-777f6887b5-wwl56" Jan 20 17:32:47 crc kubenswrapper[4558]: I0120 17:32:47.015011 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa7956fe-8f61-4e0d-86c1-716e300c4059-config-data\") pod \"keystone-56458f747-wvqtq\" (UID: \"aa7956fe-8f61-4e0d-86c1-716e300c4059\") " pod="openstack-kuttl-tests/keystone-56458f747-wvqtq" Jan 20 17:32:47 crc kubenswrapper[4558]: I0120 17:32:47.033103 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-keystone-listener-5459fb5f8-h5bbp" Jan 20 17:32:47 crc kubenswrapper[4558]: I0120 17:32:47.041790 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:32:47 crc kubenswrapper[4558]: I0120 17:32:47.042609 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-scheduler-0" podUID="7aa6c24a-66a5-4b11-96a1-4d14001d1f5c" containerName="nova-scheduler-scheduler" containerID="cri-o://075fdf51fe78df21a6645f555dba48d1e1068aaccdaf9394e743e0397b0f8c25" gracePeriod=30 Jan 20 17:32:47 crc kubenswrapper[4558]: I0120 17:32:47.056583 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-api-6d8c858488-xrdwn"] Jan 20 17:32:47 crc kubenswrapper[4558]: I0120 17:32:47.069352 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-56458f747-wvqtq" Jan 20 17:32:47 crc kubenswrapper[4558]: I0120 17:32:47.073280 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:32:47 crc kubenswrapper[4558]: I0120 17:32:47.073554 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-api-0" podUID="623ea472-e0f5-48c1-a621-89fe343686ff" containerName="nova-api-log" containerID="cri-o://90a23fb16c326ed369750450259a15b6eb7aa5fc9dc6005c29c9b1e37c002a27" gracePeriod=30 Jan 20 17:32:47 crc kubenswrapper[4558]: I0120 17:32:47.074073 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-api-0" podUID="623ea472-e0f5-48c1-a621-89fe343686ff" containerName="nova-api-api" containerID="cri-o://8383f65df719eb3597f000c97c3244e2dda5aeac1cb0ef54093fbcf77af4f6b2" gracePeriod=30 Jan 20 17:32:47 crc kubenswrapper[4558]: I0120 17:32:47.079210 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-worker-777f6887b5-wwl56" Jan 20 17:32:47 crc kubenswrapper[4558]: I0120 17:32:47.096333 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/neutron-75b5f89ffd-dsnd8"] Jan 20 17:32:47 crc kubenswrapper[4558]: I0120 17:32:47.098203 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-75b5f89ffd-dsnd8" Jan 20 17:32:47 crc kubenswrapper[4558]: I0120 17:32:47.105754 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-75b5f89ffd-dsnd8"] Jan 20 17:32:47 crc kubenswrapper[4558]: I0120 17:32:47.165343 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d0062240-1ed7-488f-ac15-d3c5e0812ccd-combined-ca-bundle\") pod \"barbican-api-6d8c858488-xrdwn\" (UID: \"d0062240-1ed7-488f-ac15-d3c5e0812ccd\") " pod="openstack-kuttl-tests/barbican-api-6d8c858488-xrdwn" Jan 20 17:32:47 crc kubenswrapper[4558]: I0120 17:32:47.165444 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d0062240-1ed7-488f-ac15-d3c5e0812ccd-config-data\") pod \"barbican-api-6d8c858488-xrdwn\" (UID: \"d0062240-1ed7-488f-ac15-d3c5e0812ccd\") " pod="openstack-kuttl-tests/barbican-api-6d8c858488-xrdwn" Jan 20 17:32:47 crc kubenswrapper[4558]: I0120 17:32:47.165526 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d0062240-1ed7-488f-ac15-d3c5e0812ccd-logs\") pod \"barbican-api-6d8c858488-xrdwn\" (UID: \"d0062240-1ed7-488f-ac15-d3c5e0812ccd\") " pod="openstack-kuttl-tests/barbican-api-6d8c858488-xrdwn" Jan 20 17:32:47 crc kubenswrapper[4558]: I0120 17:32:47.165586 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k6dxk\" (UniqueName: \"kubernetes.io/projected/d0062240-1ed7-488f-ac15-d3c5e0812ccd-kube-api-access-k6dxk\") pod \"barbican-api-6d8c858488-xrdwn\" (UID: \"d0062240-1ed7-488f-ac15-d3c5e0812ccd\") " pod="openstack-kuttl-tests/barbican-api-6d8c858488-xrdwn" Jan 20 17:32:47 crc kubenswrapper[4558]: I0120 17:32:47.165653 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d0062240-1ed7-488f-ac15-d3c5e0812ccd-config-data-custom\") pod \"barbican-api-6d8c858488-xrdwn\" (UID: \"d0062240-1ed7-488f-ac15-d3c5e0812ccd\") " pod="openstack-kuttl-tests/barbican-api-6d8c858488-xrdwn" Jan 20 17:32:47 crc kubenswrapper[4558]: I0120 17:32:47.271484 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d0062240-1ed7-488f-ac15-d3c5e0812ccd-config-data\") pod \"barbican-api-6d8c858488-xrdwn\" (UID: \"d0062240-1ed7-488f-ac15-d3c5e0812ccd\") " pod="openstack-kuttl-tests/barbican-api-6d8c858488-xrdwn" Jan 20 17:32:47 crc kubenswrapper[4558]: I0120 17:32:47.271539 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/e8c90851-f646-4a0c-8c0c-7ecac86fa5a7-config\") pod \"neutron-75b5f89ffd-dsnd8\" (UID: \"e8c90851-f646-4a0c-8c0c-7ecac86fa5a7\") " pod="openstack-kuttl-tests/neutron-75b5f89ffd-dsnd8" Jan 20 17:32:47 crc kubenswrapper[4558]: I0120 17:32:47.271654 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d0062240-1ed7-488f-ac15-d3c5e0812ccd-logs\") pod \"barbican-api-6d8c858488-xrdwn\" (UID: \"d0062240-1ed7-488f-ac15-d3c5e0812ccd\") " pod="openstack-kuttl-tests/barbican-api-6d8c858488-xrdwn" Jan 20 17:32:47 crc kubenswrapper[4558]: I0120 17:32:47.271695 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r5p5r\" (UniqueName: \"kubernetes.io/projected/e8c90851-f646-4a0c-8c0c-7ecac86fa5a7-kube-api-access-r5p5r\") pod \"neutron-75b5f89ffd-dsnd8\" (UID: \"e8c90851-f646-4a0c-8c0c-7ecac86fa5a7\") " pod="openstack-kuttl-tests/neutron-75b5f89ffd-dsnd8" Jan 20 17:32:47 crc kubenswrapper[4558]: I0120 17:32:47.271745 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k6dxk\" (UniqueName: \"kubernetes.io/projected/d0062240-1ed7-488f-ac15-d3c5e0812ccd-kube-api-access-k6dxk\") pod \"barbican-api-6d8c858488-xrdwn\" (UID: \"d0062240-1ed7-488f-ac15-d3c5e0812ccd\") " pod="openstack-kuttl-tests/barbican-api-6d8c858488-xrdwn" Jan 20 17:32:47 crc kubenswrapper[4558]: I0120 17:32:47.271836 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d0062240-1ed7-488f-ac15-d3c5e0812ccd-config-data-custom\") pod \"barbican-api-6d8c858488-xrdwn\" (UID: \"d0062240-1ed7-488f-ac15-d3c5e0812ccd\") " pod="openstack-kuttl-tests/barbican-api-6d8c858488-xrdwn" Jan 20 17:32:47 crc kubenswrapper[4558]: I0120 17:32:47.271877 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e8c90851-f646-4a0c-8c0c-7ecac86fa5a7-combined-ca-bundle\") pod \"neutron-75b5f89ffd-dsnd8\" (UID: \"e8c90851-f646-4a0c-8c0c-7ecac86fa5a7\") " pod="openstack-kuttl-tests/neutron-75b5f89ffd-dsnd8" Jan 20 17:32:47 crc kubenswrapper[4558]: I0120 17:32:47.271997 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d0062240-1ed7-488f-ac15-d3c5e0812ccd-combined-ca-bundle\") pod \"barbican-api-6d8c858488-xrdwn\" (UID: \"d0062240-1ed7-488f-ac15-d3c5e0812ccd\") " pod="openstack-kuttl-tests/barbican-api-6d8c858488-xrdwn" Jan 20 17:32:47 crc kubenswrapper[4558]: I0120 17:32:47.272054 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/e8c90851-f646-4a0c-8c0c-7ecac86fa5a7-httpd-config\") pod \"neutron-75b5f89ffd-dsnd8\" (UID: \"e8c90851-f646-4a0c-8c0c-7ecac86fa5a7\") " pod="openstack-kuttl-tests/neutron-75b5f89ffd-dsnd8" Jan 20 17:32:47 crc kubenswrapper[4558]: I0120 17:32:47.287692 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d0062240-1ed7-488f-ac15-d3c5e0812ccd-config-data\") pod \"barbican-api-6d8c858488-xrdwn\" (UID: \"d0062240-1ed7-488f-ac15-d3c5e0812ccd\") " pod="openstack-kuttl-tests/barbican-api-6d8c858488-xrdwn" Jan 20 17:32:47 crc kubenswrapper[4558]: I0120 17:32:47.288134 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d0062240-1ed7-488f-ac15-d3c5e0812ccd-logs\") pod \"barbican-api-6d8c858488-xrdwn\" (UID: \"d0062240-1ed7-488f-ac15-d3c5e0812ccd\") " pod="openstack-kuttl-tests/barbican-api-6d8c858488-xrdwn" Jan 20 17:32:47 crc kubenswrapper[4558]: I0120 17:32:47.297535 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d0062240-1ed7-488f-ac15-d3c5e0812ccd-config-data-custom\") pod \"barbican-api-6d8c858488-xrdwn\" (UID: \"d0062240-1ed7-488f-ac15-d3c5e0812ccd\") " pod="openstack-kuttl-tests/barbican-api-6d8c858488-xrdwn" Jan 20 17:32:47 crc kubenswrapper[4558]: I0120 17:32:47.299607 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d0062240-1ed7-488f-ac15-d3c5e0812ccd-combined-ca-bundle\") pod \"barbican-api-6d8c858488-xrdwn\" (UID: \"d0062240-1ed7-488f-ac15-d3c5e0812ccd\") " pod="openstack-kuttl-tests/barbican-api-6d8c858488-xrdwn" Jan 20 17:32:47 crc kubenswrapper[4558]: I0120 17:32:47.323636 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k6dxk\" (UniqueName: \"kubernetes.io/projected/d0062240-1ed7-488f-ac15-d3c5e0812ccd-kube-api-access-k6dxk\") pod \"barbican-api-6d8c858488-xrdwn\" (UID: \"d0062240-1ed7-488f-ac15-d3c5e0812ccd\") " pod="openstack-kuttl-tests/barbican-api-6d8c858488-xrdwn" Jan 20 17:32:47 crc kubenswrapper[4558]: I0120 17:32:47.354697 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-6d8c858488-xrdwn" Jan 20 17:32:47 crc kubenswrapper[4558]: I0120 17:32:47.372156 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/memcached-0"] Jan 20 17:32:47 crc kubenswrapper[4558]: I0120 17:32:47.372361 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/memcached-0" podUID="39e9c81e-2923-472a-b892-5404d32842bb" containerName="memcached" containerID="cri-o://2125c4c8f812531a8911dde13a1a8e1cc94d924088d9043e03d8a2554aea8fba" gracePeriod=30 Jan 20 17:32:47 crc kubenswrapper[4558]: I0120 17:32:47.375068 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/e8c90851-f646-4a0c-8c0c-7ecac86fa5a7-httpd-config\") pod \"neutron-75b5f89ffd-dsnd8\" (UID: \"e8c90851-f646-4a0c-8c0c-7ecac86fa5a7\") " pod="openstack-kuttl-tests/neutron-75b5f89ffd-dsnd8" Jan 20 17:32:47 crc kubenswrapper[4558]: I0120 17:32:47.375116 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/e8c90851-f646-4a0c-8c0c-7ecac86fa5a7-config\") pod \"neutron-75b5f89ffd-dsnd8\" (UID: \"e8c90851-f646-4a0c-8c0c-7ecac86fa5a7\") " pod="openstack-kuttl-tests/neutron-75b5f89ffd-dsnd8" Jan 20 17:32:47 crc kubenswrapper[4558]: I0120 17:32:47.375191 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r5p5r\" (UniqueName: \"kubernetes.io/projected/e8c90851-f646-4a0c-8c0c-7ecac86fa5a7-kube-api-access-r5p5r\") pod \"neutron-75b5f89ffd-dsnd8\" (UID: \"e8c90851-f646-4a0c-8c0c-7ecac86fa5a7\") " pod="openstack-kuttl-tests/neutron-75b5f89ffd-dsnd8" Jan 20 17:32:47 crc kubenswrapper[4558]: I0120 17:32:47.375246 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e8c90851-f646-4a0c-8c0c-7ecac86fa5a7-combined-ca-bundle\") pod \"neutron-75b5f89ffd-dsnd8\" (UID: \"e8c90851-f646-4a0c-8c0c-7ecac86fa5a7\") " pod="openstack-kuttl-tests/neutron-75b5f89ffd-dsnd8" Jan 20 17:32:47 crc kubenswrapper[4558]: I0120 17:32:47.384852 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/e8c90851-f646-4a0c-8c0c-7ecac86fa5a7-config\") pod \"neutron-75b5f89ffd-dsnd8\" (UID: \"e8c90851-f646-4a0c-8c0c-7ecac86fa5a7\") " pod="openstack-kuttl-tests/neutron-75b5f89ffd-dsnd8" Jan 20 17:32:47 crc kubenswrapper[4558]: I0120 17:32:47.388675 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e8c90851-f646-4a0c-8c0c-7ecac86fa5a7-combined-ca-bundle\") pod \"neutron-75b5f89ffd-dsnd8\" (UID: \"e8c90851-f646-4a0c-8c0c-7ecac86fa5a7\") " pod="openstack-kuttl-tests/neutron-75b5f89ffd-dsnd8" Jan 20 17:32:47 crc kubenswrapper[4558]: I0120 17:32:47.390812 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/e8c90851-f646-4a0c-8c0c-7ecac86fa5a7-httpd-config\") pod \"neutron-75b5f89ffd-dsnd8\" (UID: \"e8c90851-f646-4a0c-8c0c-7ecac86fa5a7\") " pod="openstack-kuttl-tests/neutron-75b5f89ffd-dsnd8" Jan 20 17:32:47 crc kubenswrapper[4558]: I0120 17:32:47.410822 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r5p5r\" (UniqueName: \"kubernetes.io/projected/e8c90851-f646-4a0c-8c0c-7ecac86fa5a7-kube-api-access-r5p5r\") pod \"neutron-75b5f89ffd-dsnd8\" (UID: \"e8c90851-f646-4a0c-8c0c-7ecac86fa5a7\") " pod="openstack-kuttl-tests/neutron-75b5f89ffd-dsnd8" Jan 20 17:32:47 crc kubenswrapper[4558]: I0120 17:32:47.431880 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-cell-mapping-nbrcj"] Jan 20 17:32:47 crc kubenswrapper[4558]: I0120 17:32:47.433743 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-75b5f89ffd-dsnd8" Jan 20 17:32:47 crc kubenswrapper[4558]: I0120 17:32:47.442362 4558 generic.go:334] "Generic (PLEG): container finished" podID="7d6b9532-22b5-41c9-8b15-83a9c515c52d" containerID="68de8de2f4680620ba6e5b236fe7d12d5fb76c8413d11297d94efa48e9761c9e" exitCode=0 Jan 20 17:32:47 crc kubenswrapper[4558]: I0120 17:32:47.442509 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"7d6b9532-22b5-41c9-8b15-83a9c515c52d","Type":"ContainerDied","Data":"68de8de2f4680620ba6e5b236fe7d12d5fb76c8413d11297d94efa48e9761c9e"} Jan 20 17:32:47 crc kubenswrapper[4558]: I0120 17:32:47.483444 4558 generic.go:334] "Generic (PLEG): container finished" podID="63df8460-ab49-4787-92a7-54a14c0179ca" containerID="5f3ae803d524ce3cd6aaa39d2f91f748bd07e8465458e550a0da0000266138f6" exitCode=143 Jan 20 17:32:47 crc kubenswrapper[4558]: I0120 17:32:47.483845 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"63df8460-ab49-4787-92a7-54a14c0179ca","Type":"ContainerDied","Data":"5f3ae803d524ce3cd6aaa39d2f91f748bd07e8465458e550a0da0000266138f6"} Jan 20 17:32:47 crc kubenswrapper[4558]: I0120 17:32:47.499325 4558 generic.go:334] "Generic (PLEG): container finished" podID="623ea472-e0f5-48c1-a621-89fe343686ff" containerID="90a23fb16c326ed369750450259a15b6eb7aa5fc9dc6005c29c9b1e37c002a27" exitCode=143 Jan 20 17:32:47 crc kubenswrapper[4558]: I0120 17:32:47.499386 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"623ea472-e0f5-48c1-a621-89fe343686ff","Type":"ContainerDied","Data":"90a23fb16c326ed369750450259a15b6eb7aa5fc9dc6005c29c9b1e37c002a27"} Jan 20 17:32:47 crc kubenswrapper[4558]: I0120 17:32:47.502632 4558 generic.go:334] "Generic (PLEG): container finished" podID="eb4881e5-cce6-4423-a9f4-d93f96a2ccb1" containerID="11026a4823ee019937da1b5d44ee5848da1509d7fbc7958ed8712c4c0b58b33b" exitCode=143 Jan 20 17:32:47 crc kubenswrapper[4558]: I0120 17:32:47.502659 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"eb4881e5-cce6-4423-a9f4-d93f96a2ccb1","Type":"ContainerDied","Data":"11026a4823ee019937da1b5d44ee5848da1509d7fbc7958ed8712c4c0b58b33b"} Jan 20 17:32:47 crc kubenswrapper[4558]: I0120 17:32:47.638511 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:32:47 crc kubenswrapper[4558]: I0120 17:32:47.638722 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-external-api-0" podUID="59e7ef92-3d74-4334-ac19-671eb3199d0b" containerName="glance-log" containerID="cri-o://85d12510c1da886d4ee25d1c28a63fdd18e54d2a19e1791fb8e0c9a0fa49fb05" gracePeriod=30 Jan 20 17:32:47 crc kubenswrapper[4558]: I0120 17:32:47.639073 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-external-api-0" podUID="59e7ef92-3d74-4334-ac19-671eb3199d0b" containerName="glance-httpd" containerID="cri-o://40ef3c5a952862f3e1bf0e9c9f5e2c665b77d0d4387f66b4456d7a0b64be3cbf" gracePeriod=30 Jan 20 17:32:47 crc kubenswrapper[4558]: I0120 17:32:47.719889 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:32:47 crc kubenswrapper[4558]: I0120 17:32:47.740183 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-keystone-listener-5459fb5f8-h5bbp"] Jan 20 17:32:47 crc kubenswrapper[4558]: I0120 17:32:47.814733 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/swift-storage-0"] Jan 20 17:32:47 crc kubenswrapper[4558]: I0120 17:32:47.815137 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="8c74a8f5-be12-4ac4-b45c-d459801927ea" containerName="account-server" containerID="cri-o://33d41f8923ec28aa7b94229f05d825972526de6642700e3a9555dbc7f7a6e7ce" gracePeriod=30 Jan 20 17:32:47 crc kubenswrapper[4558]: I0120 17:32:47.815486 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="8c74a8f5-be12-4ac4-b45c-d459801927ea" containerName="swift-recon-cron" containerID="cri-o://440ae9e1e2d1bfffd629b66e5f2b1ad26abca913a264c8ed449f65a224346b30" gracePeriod=30 Jan 20 17:32:47 crc kubenswrapper[4558]: I0120 17:32:47.815527 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="8c74a8f5-be12-4ac4-b45c-d459801927ea" containerName="rsync" containerID="cri-o://e84993808b2bc5f85b1ae50493298e6a10eb7c3fcbd64da879a0e7a937ba17e1" gracePeriod=30 Jan 20 17:32:47 crc kubenswrapper[4558]: I0120 17:32:47.815560 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="8c74a8f5-be12-4ac4-b45c-d459801927ea" containerName="object-expirer" containerID="cri-o://6bd5c4f64f7527d8728149bf64ddbf3f8288b598bd1840a6821fe48789fce821" gracePeriod=30 Jan 20 17:32:47 crc kubenswrapper[4558]: I0120 17:32:47.815589 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="8c74a8f5-be12-4ac4-b45c-d459801927ea" containerName="object-updater" containerID="cri-o://642fe8d6d1e22629c1ee44fe92b12d3105252f0e570aa04c45e104ebe419f7ad" gracePeriod=30 Jan 20 17:32:47 crc kubenswrapper[4558]: I0120 17:32:47.815622 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="8c74a8f5-be12-4ac4-b45c-d459801927ea" containerName="object-auditor" containerID="cri-o://49db71c25f4101de9d35070da8e88df17b13463d85f4085b268b15d6f2fd0010" gracePeriod=30 Jan 20 17:32:47 crc kubenswrapper[4558]: I0120 17:32:47.815659 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="8c74a8f5-be12-4ac4-b45c-d459801927ea" containerName="object-replicator" containerID="cri-o://b1a440452721f6300aa80e2b85895305c5e163dc53fb6fedc65d4a4ad30c530d" gracePeriod=30 Jan 20 17:32:47 crc kubenswrapper[4558]: I0120 17:32:47.815701 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="8c74a8f5-be12-4ac4-b45c-d459801927ea" containerName="object-server" containerID="cri-o://5b64dc2431311cf052b4ff6df6c6144511a386c51c5291266eaa5ffde8e35a57" gracePeriod=30 Jan 20 17:32:47 crc kubenswrapper[4558]: I0120 17:32:47.815727 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="8c74a8f5-be12-4ac4-b45c-d459801927ea" containerName="container-updater" containerID="cri-o://6750a373d500a14c8bb8f71b32edf18286f29379d3c4862db6be084c829808ed" gracePeriod=30 Jan 20 17:32:47 crc kubenswrapper[4558]: I0120 17:32:47.815752 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="8c74a8f5-be12-4ac4-b45c-d459801927ea" containerName="container-auditor" containerID="cri-o://7312f6e5f2e6499af9f3601374373df6aa29dfce310cc2421bf1cf529c300a02" gracePeriod=30 Jan 20 17:32:47 crc kubenswrapper[4558]: I0120 17:32:47.815778 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="8c74a8f5-be12-4ac4-b45c-d459801927ea" containerName="container-replicator" containerID="cri-o://b854ccf3f0261f0809cff51e86b924877af436c76a3e8bb217d1f85c3dda4221" gracePeriod=30 Jan 20 17:32:47 crc kubenswrapper[4558]: I0120 17:32:47.815821 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="8c74a8f5-be12-4ac4-b45c-d459801927ea" containerName="container-server" containerID="cri-o://e2141ae9ecf7ffabbc3fdb13e8e88e4a3ea99534f37f0efd0752308e25d02c42" gracePeriod=30 Jan 20 17:32:47 crc kubenswrapper[4558]: I0120 17:32:47.815849 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="8c74a8f5-be12-4ac4-b45c-d459801927ea" containerName="account-reaper" containerID="cri-o://b70b64e3b3bd252db5a3a5cd9a2f046dc46575ddcb991f7fbfb5eae9caa6a55b" gracePeriod=30 Jan 20 17:32:47 crc kubenswrapper[4558]: I0120 17:32:47.815894 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="8c74a8f5-be12-4ac4-b45c-d459801927ea" containerName="account-auditor" containerID="cri-o://c51ba6d01533031c51006f14e866bc5bdedd26de8afcbe43c5bcf1e7025e0dd3" gracePeriod=30 Jan 20 17:32:47 crc kubenswrapper[4558]: I0120 17:32:47.815923 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="8c74a8f5-be12-4ac4-b45c-d459801927ea" containerName="account-replicator" containerID="cri-o://abb482af37b6f6a3f13af0986ae717f0f32b8467e1976fdbb59f0c73b8240e46" gracePeriod=30 Jan 20 17:32:47 crc kubenswrapper[4558]: I0120 17:32:47.902283 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7d6b9532-22b5-41c9-8b15-83a9c515c52d-combined-ca-bundle\") pod \"7d6b9532-22b5-41c9-8b15-83a9c515c52d\" (UID: \"7d6b9532-22b5-41c9-8b15-83a9c515c52d\") " Jan 20 17:32:47 crc kubenswrapper[4558]: I0120 17:32:47.902349 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7d6b9532-22b5-41c9-8b15-83a9c515c52d-config-data\") pod \"7d6b9532-22b5-41c9-8b15-83a9c515c52d\" (UID: \"7d6b9532-22b5-41c9-8b15-83a9c515c52d\") " Jan 20 17:32:47 crc kubenswrapper[4558]: I0120 17:32:47.902398 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xjhbz\" (UniqueName: \"kubernetes.io/projected/7d6b9532-22b5-41c9-8b15-83a9c515c52d-kube-api-access-xjhbz\") pod \"7d6b9532-22b5-41c9-8b15-83a9c515c52d\" (UID: \"7d6b9532-22b5-41c9-8b15-83a9c515c52d\") " Jan 20 17:32:47 crc kubenswrapper[4558]: I0120 17:32:47.911438 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7d6b9532-22b5-41c9-8b15-83a9c515c52d-kube-api-access-xjhbz" (OuterVolumeSpecName: "kube-api-access-xjhbz") pod "7d6b9532-22b5-41c9-8b15-83a9c515c52d" (UID: "7d6b9532-22b5-41c9-8b15-83a9c515c52d"). InnerVolumeSpecName "kube-api-access-xjhbz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:32:47 crc kubenswrapper[4558]: I0120 17:32:47.963290 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7d6b9532-22b5-41c9-8b15-83a9c515c52d-config-data" (OuterVolumeSpecName: "config-data") pod "7d6b9532-22b5-41c9-8b15-83a9c515c52d" (UID: "7d6b9532-22b5-41c9-8b15-83a9c515c52d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:32:47 crc kubenswrapper[4558]: I0120 17:32:47.996850 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7d6b9532-22b5-41c9-8b15-83a9c515c52d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7d6b9532-22b5-41c9-8b15-83a9c515c52d" (UID: "7d6b9532-22b5-41c9-8b15-83a9c515c52d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:32:48 crc kubenswrapper[4558]: I0120 17:32:48.006376 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7d6b9532-22b5-41c9-8b15-83a9c515c52d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:32:48 crc kubenswrapper[4558]: I0120 17:32:48.006399 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7d6b9532-22b5-41c9-8b15-83a9c515c52d-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:32:48 crc kubenswrapper[4558]: I0120 17:32:48.006411 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xjhbz\" (UniqueName: \"kubernetes.io/projected/7d6b9532-22b5-41c9-8b15-83a9c515c52d-kube-api-access-xjhbz\") on node \"crc\" DevicePath \"\"" Jan 20 17:32:48 crc kubenswrapper[4558]: I0120 17:32:48.009463 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-56458f747-wvqtq"] Jan 20 17:32:48 crc kubenswrapper[4558]: I0120 17:32:48.027871 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-75b5f89ffd-dsnd8"] Jan 20 17:32:48 crc kubenswrapper[4558]: I0120 17:32:48.094334 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-56458f747-wvqtq"] Jan 20 17:32:48 crc kubenswrapper[4558]: I0120 17:32:48.185320 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:32:48 crc kubenswrapper[4558]: I0120 17:32:48.185648 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-internal-api-0" podUID="aaaaf2d3-8f74-4ce7-8407-2f0b0bece070" containerName="glance-log" containerID="cri-o://bc8a516268d1e7dd8e3c075c9cfa7d70176325f633e340325c2c6e2aa0d06228" gracePeriod=30 Jan 20 17:32:48 crc kubenswrapper[4558]: I0120 17:32:48.186245 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-internal-api-0" podUID="aaaaf2d3-8f74-4ce7-8407-2f0b0bece070" containerName="glance-httpd" containerID="cri-o://b5f54a9d58e26e3dd566d5f5f71a6cabd02248b3e9fd086fccf6af2e071db403" gracePeriod=30 Jan 20 17:32:48 crc kubenswrapper[4558]: I0120 17:32:48.237274 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/keystone-7fb744b5b7-vj8fh"] Jan 20 17:32:48 crc kubenswrapper[4558]: E0120 17:32:48.237764 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7d6b9532-22b5-41c9-8b15-83a9c515c52d" containerName="nova-cell1-novncproxy-novncproxy" Jan 20 17:32:48 crc kubenswrapper[4558]: I0120 17:32:48.237779 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="7d6b9532-22b5-41c9-8b15-83a9c515c52d" containerName="nova-cell1-novncproxy-novncproxy" Jan 20 17:32:48 crc kubenswrapper[4558]: I0120 17:32:48.238014 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="7d6b9532-22b5-41c9-8b15-83a9c515c52d" containerName="nova-cell1-novncproxy-novncproxy" Jan 20 17:32:48 crc kubenswrapper[4558]: I0120 17:32:48.238812 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-7fb744b5b7-vj8fh" Jan 20 17:32:48 crc kubenswrapper[4558]: I0120 17:32:48.296866 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-7fb744b5b7-vj8fh"] Jan 20 17:32:48 crc kubenswrapper[4558]: I0120 17:32:48.328294 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5wvk2\" (UniqueName: \"kubernetes.io/projected/8516d676-2fa0-47c4-b81a-254ec50efd52-kube-api-access-5wvk2\") pod \"keystone-7fb744b5b7-vj8fh\" (UID: \"8516d676-2fa0-47c4-b81a-254ec50efd52\") " pod="openstack-kuttl-tests/keystone-7fb744b5b7-vj8fh" Jan 20 17:32:48 crc kubenswrapper[4558]: I0120 17:32:48.328367 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/8516d676-2fa0-47c4-b81a-254ec50efd52-fernet-keys\") pod \"keystone-7fb744b5b7-vj8fh\" (UID: \"8516d676-2fa0-47c4-b81a-254ec50efd52\") " pod="openstack-kuttl-tests/keystone-7fb744b5b7-vj8fh" Jan 20 17:32:48 crc kubenswrapper[4558]: I0120 17:32:48.328401 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8516d676-2fa0-47c4-b81a-254ec50efd52-config-data\") pod \"keystone-7fb744b5b7-vj8fh\" (UID: \"8516d676-2fa0-47c4-b81a-254ec50efd52\") " pod="openstack-kuttl-tests/keystone-7fb744b5b7-vj8fh" Jan 20 17:32:48 crc kubenswrapper[4558]: I0120 17:32:48.328424 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/8516d676-2fa0-47c4-b81a-254ec50efd52-credential-keys\") pod \"keystone-7fb744b5b7-vj8fh\" (UID: \"8516d676-2fa0-47c4-b81a-254ec50efd52\") " pod="openstack-kuttl-tests/keystone-7fb744b5b7-vj8fh" Jan 20 17:32:48 crc kubenswrapper[4558]: I0120 17:32:48.328481 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8516d676-2fa0-47c4-b81a-254ec50efd52-combined-ca-bundle\") pod \"keystone-7fb744b5b7-vj8fh\" (UID: \"8516d676-2fa0-47c4-b81a-254ec50efd52\") " pod="openstack-kuttl-tests/keystone-7fb744b5b7-vj8fh" Jan 20 17:32:48 crc kubenswrapper[4558]: I0120 17:32:48.328516 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8516d676-2fa0-47c4-b81a-254ec50efd52-scripts\") pod \"keystone-7fb744b5b7-vj8fh\" (UID: \"8516d676-2fa0-47c4-b81a-254ec50efd52\") " pod="openstack-kuttl-tests/keystone-7fb744b5b7-vj8fh" Jan 20 17:32:48 crc kubenswrapper[4558]: I0120 17:32:48.358319 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-worker-777f6887b5-wwl56"] Jan 20 17:32:48 crc kubenswrapper[4558]: I0120 17:32:48.385378 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/neutron-5c6488b48b-4sj7k"] Jan 20 17:32:48 crc kubenswrapper[4558]: I0120 17:32:48.390964 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-5c6488b48b-4sj7k" Jan 20 17:32:48 crc kubenswrapper[4558]: I0120 17:32:48.398250 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-5c6488b48b-4sj7k"] Jan 20 17:32:48 crc kubenswrapper[4558]: I0120 17:32:48.423218 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-api-6d8c858488-xrdwn"] Jan 20 17:32:48 crc kubenswrapper[4558]: I0120 17:32:48.430231 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8516d676-2fa0-47c4-b81a-254ec50efd52-combined-ca-bundle\") pod \"keystone-7fb744b5b7-vj8fh\" (UID: \"8516d676-2fa0-47c4-b81a-254ec50efd52\") " pod="openstack-kuttl-tests/keystone-7fb744b5b7-vj8fh" Jan 20 17:32:48 crc kubenswrapper[4558]: I0120 17:32:48.430319 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8516d676-2fa0-47c4-b81a-254ec50efd52-scripts\") pod \"keystone-7fb744b5b7-vj8fh\" (UID: \"8516d676-2fa0-47c4-b81a-254ec50efd52\") " pod="openstack-kuttl-tests/keystone-7fb744b5b7-vj8fh" Jan 20 17:32:48 crc kubenswrapper[4558]: I0120 17:32:48.430365 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5wvk2\" (UniqueName: \"kubernetes.io/projected/8516d676-2fa0-47c4-b81a-254ec50efd52-kube-api-access-5wvk2\") pod \"keystone-7fb744b5b7-vj8fh\" (UID: \"8516d676-2fa0-47c4-b81a-254ec50efd52\") " pod="openstack-kuttl-tests/keystone-7fb744b5b7-vj8fh" Jan 20 17:32:48 crc kubenswrapper[4558]: I0120 17:32:48.430454 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/8516d676-2fa0-47c4-b81a-254ec50efd52-fernet-keys\") pod \"keystone-7fb744b5b7-vj8fh\" (UID: \"8516d676-2fa0-47c4-b81a-254ec50efd52\") " pod="openstack-kuttl-tests/keystone-7fb744b5b7-vj8fh" Jan 20 17:32:48 crc kubenswrapper[4558]: I0120 17:32:48.430508 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8516d676-2fa0-47c4-b81a-254ec50efd52-config-data\") pod \"keystone-7fb744b5b7-vj8fh\" (UID: \"8516d676-2fa0-47c4-b81a-254ec50efd52\") " pod="openstack-kuttl-tests/keystone-7fb744b5b7-vj8fh" Jan 20 17:32:48 crc kubenswrapper[4558]: I0120 17:32:48.430547 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/8516d676-2fa0-47c4-b81a-254ec50efd52-credential-keys\") pod \"keystone-7fb744b5b7-vj8fh\" (UID: \"8516d676-2fa0-47c4-b81a-254ec50efd52\") " pod="openstack-kuttl-tests/keystone-7fb744b5b7-vj8fh" Jan 20 17:32:48 crc kubenswrapper[4558]: I0120 17:32:48.444679 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8516d676-2fa0-47c4-b81a-254ec50efd52-config-data\") pod \"keystone-7fb744b5b7-vj8fh\" (UID: \"8516d676-2fa0-47c4-b81a-254ec50efd52\") " pod="openstack-kuttl-tests/keystone-7fb744b5b7-vj8fh" Jan 20 17:32:48 crc kubenswrapper[4558]: I0120 17:32:48.450140 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8516d676-2fa0-47c4-b81a-254ec50efd52-scripts\") pod \"keystone-7fb744b5b7-vj8fh\" (UID: \"8516d676-2fa0-47c4-b81a-254ec50efd52\") " pod="openstack-kuttl-tests/keystone-7fb744b5b7-vj8fh" Jan 20 17:32:48 crc kubenswrapper[4558]: I0120 17:32:48.457743 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8516d676-2fa0-47c4-b81a-254ec50efd52-combined-ca-bundle\") pod \"keystone-7fb744b5b7-vj8fh\" (UID: \"8516d676-2fa0-47c4-b81a-254ec50efd52\") " pod="openstack-kuttl-tests/keystone-7fb744b5b7-vj8fh" Jan 20 17:32:48 crc kubenswrapper[4558]: I0120 17:32:48.462921 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/8516d676-2fa0-47c4-b81a-254ec50efd52-fernet-keys\") pod \"keystone-7fb744b5b7-vj8fh\" (UID: \"8516d676-2fa0-47c4-b81a-254ec50efd52\") " pod="openstack-kuttl-tests/keystone-7fb744b5b7-vj8fh" Jan 20 17:32:48 crc kubenswrapper[4558]: I0120 17:32:48.464580 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/8516d676-2fa0-47c4-b81a-254ec50efd52-credential-keys\") pod \"keystone-7fb744b5b7-vj8fh\" (UID: \"8516d676-2fa0-47c4-b81a-254ec50efd52\") " pod="openstack-kuttl-tests/keystone-7fb744b5b7-vj8fh" Jan 20 17:32:48 crc kubenswrapper[4558]: I0120 17:32:48.481595 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5wvk2\" (UniqueName: \"kubernetes.io/projected/8516d676-2fa0-47c4-b81a-254ec50efd52-kube-api-access-5wvk2\") pod \"keystone-7fb744b5b7-vj8fh\" (UID: \"8516d676-2fa0-47c4-b81a-254ec50efd52\") " pod="openstack-kuttl-tests/keystone-7fb744b5b7-vj8fh" Jan 20 17:32:48 crc kubenswrapper[4558]: I0120 17:32:48.501814 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-75b5f89ffd-dsnd8"] Jan 20 17:32:48 crc kubenswrapper[4558]: I0120 17:32:48.534672 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/48ebccca-cc18-415f-b6f8-f301f99e5d3a-config\") pod \"neutron-5c6488b48b-4sj7k\" (UID: \"48ebccca-cc18-415f-b6f8-f301f99e5d3a\") " pod="openstack-kuttl-tests/neutron-5c6488b48b-4sj7k" Jan 20 17:32:48 crc kubenswrapper[4558]: I0120 17:32:48.534794 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/48ebccca-cc18-415f-b6f8-f301f99e5d3a-combined-ca-bundle\") pod \"neutron-5c6488b48b-4sj7k\" (UID: \"48ebccca-cc18-415f-b6f8-f301f99e5d3a\") " pod="openstack-kuttl-tests/neutron-5c6488b48b-4sj7k" Jan 20 17:32:48 crc kubenswrapper[4558]: I0120 17:32:48.534848 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5p4h4\" (UniqueName: \"kubernetes.io/projected/48ebccca-cc18-415f-b6f8-f301f99e5d3a-kube-api-access-5p4h4\") pod \"neutron-5c6488b48b-4sj7k\" (UID: \"48ebccca-cc18-415f-b6f8-f301f99e5d3a\") " pod="openstack-kuttl-tests/neutron-5c6488b48b-4sj7k" Jan 20 17:32:48 crc kubenswrapper[4558]: I0120 17:32:48.534915 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/48ebccca-cc18-415f-b6f8-f301f99e5d3a-httpd-config\") pod \"neutron-5c6488b48b-4sj7k\" (UID: \"48ebccca-cc18-415f-b6f8-f301f99e5d3a\") " pod="openstack-kuttl-tests/neutron-5c6488b48b-4sj7k" Jan 20 17:32:48 crc kubenswrapper[4558]: I0120 17:32:48.539542 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-cell-mapping-nbrcj" event={"ID":"9ab93517-8f51-467d-b830-16030668be2b","Type":"ContainerStarted","Data":"d7b53b31033927ce6e8736e2b00e96c96f93bd828ee7bf1b9cf354815ad9c843"} Jan 20 17:32:48 crc kubenswrapper[4558]: I0120 17:32:48.539613 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-cell-mapping-nbrcj" event={"ID":"9ab93517-8f51-467d-b830-16030668be2b","Type":"ContainerStarted","Data":"4a13176f595f2b9edae8e641361b42151f47e949617e58060b99a9f8a1b196dc"} Jan 20 17:32:48 crc kubenswrapper[4558]: I0120 17:32:48.542225 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-6d8c858488-xrdwn" event={"ID":"d0062240-1ed7-488f-ac15-d3c5e0812ccd","Type":"ContainerStarted","Data":"d072a6a8cc0a04bf971ca24e0c66fbd5ad8b6cba92a127c02e13563a25c1c21f"} Jan 20 17:32:48 crc kubenswrapper[4558]: I0120 17:32:48.543846 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-56458f747-wvqtq" event={"ID":"aa7956fe-8f61-4e0d-86c1-716e300c4059","Type":"ContainerStarted","Data":"b8533bb52b8311cc6e8d8471c0e63b718e9477ef334c429e7569596648ef2ba8"} Jan 20 17:32:48 crc kubenswrapper[4558]: I0120 17:32:48.556992 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-keystone-listener-5459fb5f8-h5bbp" event={"ID":"d6b6da7f-6f9b-415b-ac10-b3bd8655bb1a","Type":"ContainerStarted","Data":"01ee3a8fed591849102b4d4c6d98d89fd578aa4be7ef209e7467b90ceed6447d"} Jan 20 17:32:48 crc kubenswrapper[4558]: I0120 17:32:48.584712 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-worker-777f6887b5-wwl56" event={"ID":"e0294130-264e-4fa0-b336-72c350ea61cc","Type":"ContainerStarted","Data":"c9d625c04190b37538111c82cbf9d2725c6c8892cfd1a4af3f02910be0e108ed"} Jan 20 17:32:48 crc kubenswrapper[4558]: I0120 17:32:48.586920 4558 generic.go:334] "Generic (PLEG): container finished" podID="59e7ef92-3d74-4334-ac19-671eb3199d0b" containerID="85d12510c1da886d4ee25d1c28a63fdd18e54d2a19e1791fb8e0c9a0fa49fb05" exitCode=143 Jan 20 17:32:48 crc kubenswrapper[4558]: I0120 17:32:48.586974 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"59e7ef92-3d74-4334-ac19-671eb3199d0b","Type":"ContainerDied","Data":"85d12510c1da886d4ee25d1c28a63fdd18e54d2a19e1791fb8e0c9a0fa49fb05"} Jan 20 17:32:48 crc kubenswrapper[4558]: I0120 17:32:48.600072 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell1-cell-mapping-nbrcj" podStartSLOduration=2.6000537379999997 podStartE2EDuration="2.600053738s" podCreationTimestamp="2026-01-20 17:32:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:32:48.570081981 +0000 UTC m=+3062.330419949" watchObservedRunningTime="2026-01-20 17:32:48.600053738 +0000 UTC m=+3062.360391705" Jan 20 17:32:48 crc kubenswrapper[4558]: I0120 17:32:48.605672 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-7fb744b5b7-vj8fh" Jan 20 17:32:48 crc kubenswrapper[4558]: I0120 17:32:48.638270 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/48ebccca-cc18-415f-b6f8-f301f99e5d3a-httpd-config\") pod \"neutron-5c6488b48b-4sj7k\" (UID: \"48ebccca-cc18-415f-b6f8-f301f99e5d3a\") " pod="openstack-kuttl-tests/neutron-5c6488b48b-4sj7k" Jan 20 17:32:48 crc kubenswrapper[4558]: I0120 17:32:48.638349 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/48ebccca-cc18-415f-b6f8-f301f99e5d3a-config\") pod \"neutron-5c6488b48b-4sj7k\" (UID: \"48ebccca-cc18-415f-b6f8-f301f99e5d3a\") " pod="openstack-kuttl-tests/neutron-5c6488b48b-4sj7k" Jan 20 17:32:48 crc kubenswrapper[4558]: I0120 17:32:48.638423 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/48ebccca-cc18-415f-b6f8-f301f99e5d3a-combined-ca-bundle\") pod \"neutron-5c6488b48b-4sj7k\" (UID: \"48ebccca-cc18-415f-b6f8-f301f99e5d3a\") " pod="openstack-kuttl-tests/neutron-5c6488b48b-4sj7k" Jan 20 17:32:48 crc kubenswrapper[4558]: I0120 17:32:48.638456 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5p4h4\" (UniqueName: \"kubernetes.io/projected/48ebccca-cc18-415f-b6f8-f301f99e5d3a-kube-api-access-5p4h4\") pod \"neutron-5c6488b48b-4sj7k\" (UID: \"48ebccca-cc18-415f-b6f8-f301f99e5d3a\") " pod="openstack-kuttl-tests/neutron-5c6488b48b-4sj7k" Jan 20 17:32:48 crc kubenswrapper[4558]: I0120 17:32:48.653746 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/48ebccca-cc18-415f-b6f8-f301f99e5d3a-config\") pod \"neutron-5c6488b48b-4sj7k\" (UID: \"48ebccca-cc18-415f-b6f8-f301f99e5d3a\") " pod="openstack-kuttl-tests/neutron-5c6488b48b-4sj7k" Jan 20 17:32:48 crc kubenswrapper[4558]: I0120 17:32:48.658767 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/48ebccca-cc18-415f-b6f8-f301f99e5d3a-httpd-config\") pod \"neutron-5c6488b48b-4sj7k\" (UID: \"48ebccca-cc18-415f-b6f8-f301f99e5d3a\") " pod="openstack-kuttl-tests/neutron-5c6488b48b-4sj7k" Jan 20 17:32:48 crc kubenswrapper[4558]: I0120 17:32:48.670060 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/48ebccca-cc18-415f-b6f8-f301f99e5d3a-combined-ca-bundle\") pod \"neutron-5c6488b48b-4sj7k\" (UID: \"48ebccca-cc18-415f-b6f8-f301f99e5d3a\") " pod="openstack-kuttl-tests/neutron-5c6488b48b-4sj7k" Jan 20 17:32:48 crc kubenswrapper[4558]: I0120 17:32:48.682811 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5p4h4\" (UniqueName: \"kubernetes.io/projected/48ebccca-cc18-415f-b6f8-f301f99e5d3a-kube-api-access-5p4h4\") pod \"neutron-5c6488b48b-4sj7k\" (UID: \"48ebccca-cc18-415f-b6f8-f301f99e5d3a\") " pod="openstack-kuttl-tests/neutron-5c6488b48b-4sj7k" Jan 20 17:32:48 crc kubenswrapper[4558]: I0120 17:32:48.691295 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"7d6b9532-22b5-41c9-8b15-83a9c515c52d","Type":"ContainerDied","Data":"a97bdbc9a4de273f8137ff9c69110f06a81be653a7f71ff8f59cd55b614c6532"} Jan 20 17:32:48 crc kubenswrapper[4558]: I0120 17:32:48.691348 4558 scope.go:117] "RemoveContainer" containerID="68de8de2f4680620ba6e5b236fe7d12d5fb76c8413d11297d94efa48e9761c9e" Jan 20 17:32:48 crc kubenswrapper[4558]: I0120 17:32:48.691485 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:32:48 crc kubenswrapper[4558]: I0120 17:32:48.732054 4558 generic.go:334] "Generic (PLEG): container finished" podID="8c74a8f5-be12-4ac4-b45c-d459801927ea" containerID="e84993808b2bc5f85b1ae50493298e6a10eb7c3fcbd64da879a0e7a937ba17e1" exitCode=0 Jan 20 17:32:48 crc kubenswrapper[4558]: I0120 17:32:48.732082 4558 generic.go:334] "Generic (PLEG): container finished" podID="8c74a8f5-be12-4ac4-b45c-d459801927ea" containerID="6bd5c4f64f7527d8728149bf64ddbf3f8288b598bd1840a6821fe48789fce821" exitCode=0 Jan 20 17:32:48 crc kubenswrapper[4558]: I0120 17:32:48.732091 4558 generic.go:334] "Generic (PLEG): container finished" podID="8c74a8f5-be12-4ac4-b45c-d459801927ea" containerID="642fe8d6d1e22629c1ee44fe92b12d3105252f0e570aa04c45e104ebe419f7ad" exitCode=0 Jan 20 17:32:48 crc kubenswrapper[4558]: I0120 17:32:48.732099 4558 generic.go:334] "Generic (PLEG): container finished" podID="8c74a8f5-be12-4ac4-b45c-d459801927ea" containerID="b1a440452721f6300aa80e2b85895305c5e163dc53fb6fedc65d4a4ad30c530d" exitCode=0 Jan 20 17:32:48 crc kubenswrapper[4558]: I0120 17:32:48.732106 4558 generic.go:334] "Generic (PLEG): container finished" podID="8c74a8f5-be12-4ac4-b45c-d459801927ea" containerID="6750a373d500a14c8bb8f71b32edf18286f29379d3c4862db6be084c829808ed" exitCode=0 Jan 20 17:32:48 crc kubenswrapper[4558]: I0120 17:32:48.732112 4558 generic.go:334] "Generic (PLEG): container finished" podID="8c74a8f5-be12-4ac4-b45c-d459801927ea" containerID="b70b64e3b3bd252db5a3a5cd9a2f046dc46575ddcb991f7fbfb5eae9caa6a55b" exitCode=0 Jan 20 17:32:48 crc kubenswrapper[4558]: I0120 17:32:48.732128 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"8c74a8f5-be12-4ac4-b45c-d459801927ea","Type":"ContainerDied","Data":"e84993808b2bc5f85b1ae50493298e6a10eb7c3fcbd64da879a0e7a937ba17e1"} Jan 20 17:32:48 crc kubenswrapper[4558]: I0120 17:32:48.732145 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"8c74a8f5-be12-4ac4-b45c-d459801927ea","Type":"ContainerDied","Data":"6bd5c4f64f7527d8728149bf64ddbf3f8288b598bd1840a6821fe48789fce821"} Jan 20 17:32:48 crc kubenswrapper[4558]: I0120 17:32:48.732203 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"8c74a8f5-be12-4ac4-b45c-d459801927ea","Type":"ContainerDied","Data":"642fe8d6d1e22629c1ee44fe92b12d3105252f0e570aa04c45e104ebe419f7ad"} Jan 20 17:32:48 crc kubenswrapper[4558]: I0120 17:32:48.732212 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"8c74a8f5-be12-4ac4-b45c-d459801927ea","Type":"ContainerDied","Data":"b1a440452721f6300aa80e2b85895305c5e163dc53fb6fedc65d4a4ad30c530d"} Jan 20 17:32:48 crc kubenswrapper[4558]: I0120 17:32:48.732222 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"8c74a8f5-be12-4ac4-b45c-d459801927ea","Type":"ContainerDied","Data":"6750a373d500a14c8bb8f71b32edf18286f29379d3c4862db6be084c829808ed"} Jan 20 17:32:48 crc kubenswrapper[4558]: I0120 17:32:48.732233 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"8c74a8f5-be12-4ac4-b45c-d459801927ea","Type":"ContainerDied","Data":"b70b64e3b3bd252db5a3a5cd9a2f046dc46575ddcb991f7fbfb5eae9caa6a55b"} Jan 20 17:32:48 crc kubenswrapper[4558]: I0120 17:32:48.745774 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-5c6488b48b-4sj7k" Jan 20 17:32:48 crc kubenswrapper[4558]: I0120 17:32:48.800985 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:32:48 crc kubenswrapper[4558]: I0120 17:32:48.821764 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:32:48 crc kubenswrapper[4558]: I0120 17:32:48.829666 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:32:48 crc kubenswrapper[4558]: I0120 17:32:48.830786 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:32:48 crc kubenswrapper[4558]: I0120 17:32:48.834735 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell1-novncproxy-config-data" Jan 20 17:32:48 crc kubenswrapper[4558]: I0120 17:32:48.894492 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:32:48 crc kubenswrapper[4558]: I0120 17:32:48.894683 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" podUID="49dc00d4-0563-4324-a98b-e42b24b4223b" containerName="rabbitmq" containerID="cri-o://4008537411d2d6de7def09cb45d06dbf9acb1876b14e155e5f8a007aaeffe4df" gracePeriod=604798 Jan 20 17:32:48 crc kubenswrapper[4558]: I0120 17:32:48.944847 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/760135a9-d7f7-456e-b7d6-9f2e7730e382-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"760135a9-d7f7-456e-b7d6-9f2e7730e382\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:32:48 crc kubenswrapper[4558]: I0120 17:32:48.944916 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qq6ds\" (UniqueName: \"kubernetes.io/projected/760135a9-d7f7-456e-b7d6-9f2e7730e382-kube-api-access-qq6ds\") pod \"nova-cell1-novncproxy-0\" (UID: \"760135a9-d7f7-456e-b7d6-9f2e7730e382\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:32:48 crc kubenswrapper[4558]: I0120 17:32:48.945002 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/760135a9-d7f7-456e-b7d6-9f2e7730e382-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"760135a9-d7f7-456e-b7d6-9f2e7730e382\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:32:49 crc kubenswrapper[4558]: I0120 17:32:49.048488 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/760135a9-d7f7-456e-b7d6-9f2e7730e382-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"760135a9-d7f7-456e-b7d6-9f2e7730e382\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:32:49 crc kubenswrapper[4558]: I0120 17:32:49.048780 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qq6ds\" (UniqueName: \"kubernetes.io/projected/760135a9-d7f7-456e-b7d6-9f2e7730e382-kube-api-access-qq6ds\") pod \"nova-cell1-novncproxy-0\" (UID: \"760135a9-d7f7-456e-b7d6-9f2e7730e382\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:32:49 crc kubenswrapper[4558]: I0120 17:32:49.048844 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/760135a9-d7f7-456e-b7d6-9f2e7730e382-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"760135a9-d7f7-456e-b7d6-9f2e7730e382\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:32:49 crc kubenswrapper[4558]: I0120 17:32:49.051980 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/760135a9-d7f7-456e-b7d6-9f2e7730e382-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"760135a9-d7f7-456e-b7d6-9f2e7730e382\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:32:49 crc kubenswrapper[4558]: I0120 17:32:49.057550 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/760135a9-d7f7-456e-b7d6-9f2e7730e382-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"760135a9-d7f7-456e-b7d6-9f2e7730e382\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:32:49 crc kubenswrapper[4558]: I0120 17:32:49.063111 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qq6ds\" (UniqueName: \"kubernetes.io/projected/760135a9-d7f7-456e-b7d6-9f2e7730e382-kube-api-access-qq6ds\") pod \"nova-cell1-novncproxy-0\" (UID: \"760135a9-d7f7-456e-b7d6-9f2e7730e382\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:32:49 crc kubenswrapper[4558]: E0120 17:32:49.112283 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="075fdf51fe78df21a6645f555dba48d1e1068aaccdaf9394e743e0397b0f8c25" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 20 17:32:49 crc kubenswrapper[4558]: E0120 17:32:49.125897 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="075fdf51fe78df21a6645f555dba48d1e1068aaccdaf9394e743e0397b0f8c25" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 20 17:32:49 crc kubenswrapper[4558]: E0120 17:32:49.135290 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="075fdf51fe78df21a6645f555dba48d1e1068aaccdaf9394e743e0397b0f8c25" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 20 17:32:49 crc kubenswrapper[4558]: E0120 17:32:49.135333 4558 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack-kuttl-tests/nova-scheduler-0" podUID="7aa6c24a-66a5-4b11-96a1-4d14001d1f5c" containerName="nova-scheduler-scheduler" Jan 20 17:32:49 crc kubenswrapper[4558]: I0120 17:32:49.146089 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/memcached-0" Jan 20 17:32:49 crc kubenswrapper[4558]: I0120 17:32:49.164468 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:32:49 crc kubenswrapper[4558]: I0120 17:32:49.263822 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/39e9c81e-2923-472a-b892-5404d32842bb-kolla-config\") pod \"39e9c81e-2923-472a-b892-5404d32842bb\" (UID: \"39e9c81e-2923-472a-b892-5404d32842bb\") " Jan 20 17:32:49 crc kubenswrapper[4558]: I0120 17:32:49.263904 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/39e9c81e-2923-472a-b892-5404d32842bb-config-data\") pod \"39e9c81e-2923-472a-b892-5404d32842bb\" (UID: \"39e9c81e-2923-472a-b892-5404d32842bb\") " Jan 20 17:32:49 crc kubenswrapper[4558]: I0120 17:32:49.264027 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-846cj\" (UniqueName: \"kubernetes.io/projected/39e9c81e-2923-472a-b892-5404d32842bb-kube-api-access-846cj\") pod \"39e9c81e-2923-472a-b892-5404d32842bb\" (UID: \"39e9c81e-2923-472a-b892-5404d32842bb\") " Jan 20 17:32:49 crc kubenswrapper[4558]: I0120 17:32:49.264784 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/39e9c81e-2923-472a-b892-5404d32842bb-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "39e9c81e-2923-472a-b892-5404d32842bb" (UID: "39e9c81e-2923-472a-b892-5404d32842bb"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:32:49 crc kubenswrapper[4558]: I0120 17:32:49.265104 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/39e9c81e-2923-472a-b892-5404d32842bb-config-data" (OuterVolumeSpecName: "config-data") pod "39e9c81e-2923-472a-b892-5404d32842bb" (UID: "39e9c81e-2923-472a-b892-5404d32842bb"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:32:49 crc kubenswrapper[4558]: I0120 17:32:49.266016 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/rabbitmq-server-0" podUID="35b2371f-57f2-4728-b32f-1ba587b4532e" containerName="rabbitmq" containerID="cri-o://7e836c85569914a31734b34020c760d2ba17ca6f5821aecaa0763835a0e87863" gracePeriod=604798 Jan 20 17:32:49 crc kubenswrapper[4558]: I0120 17:32:49.271104 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/39e9c81e-2923-472a-b892-5404d32842bb-kube-api-access-846cj" (OuterVolumeSpecName: "kube-api-access-846cj") pod "39e9c81e-2923-472a-b892-5404d32842bb" (UID: "39e9c81e-2923-472a-b892-5404d32842bb"). InnerVolumeSpecName "kube-api-access-846cj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:32:49 crc kubenswrapper[4558]: I0120 17:32:49.366693 4558 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/39e9c81e-2923-472a-b892-5404d32842bb-kolla-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:32:49 crc kubenswrapper[4558]: I0120 17:32:49.366734 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/39e9c81e-2923-472a-b892-5404d32842bb-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:32:49 crc kubenswrapper[4558]: I0120 17:32:49.366746 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-846cj\" (UniqueName: \"kubernetes.io/projected/39e9c81e-2923-472a-b892-5404d32842bb-kube-api-access-846cj\") on node \"crc\" DevicePath \"\"" Jan 20 17:32:49 crc kubenswrapper[4558]: I0120 17:32:49.557386 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-5c6488b48b-4sj7k"] Jan 20 17:32:49 crc kubenswrapper[4558]: I0120 17:32:49.566964 4558 scope.go:117] "RemoveContainer" containerID="f275e9f5de330b3c79316d5871106c6bcf90b698e0744c5beb28da45c336b36d" Jan 20 17:32:49 crc kubenswrapper[4558]: E0120 17:32:49.567185 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 17:32:49 crc kubenswrapper[4558]: I0120 17:32:49.668990 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-7fb744b5b7-vj8fh"] Jan 20 17:32:49 crc kubenswrapper[4558]: I0120 17:32:49.768069 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:32:49 crc kubenswrapper[4558]: I0120 17:32:49.826439 4558 generic.go:334] "Generic (PLEG): container finished" podID="568979fa-537c-45c5-952c-f87a40b194ef" containerID="b2cc3eb7df47c10e18cf97ab2f8a7780ca8a8e35c8a8c749aa1b4a85dd0a58ad" exitCode=0 Jan 20 17:32:49 crc kubenswrapper[4558]: I0120 17:32:49.826645 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"568979fa-537c-45c5-952c-f87a40b194ef","Type":"ContainerDied","Data":"b2cc3eb7df47c10e18cf97ab2f8a7780ca8a8e35c8a8c749aa1b4a85dd0a58ad"} Jan 20 17:32:49 crc kubenswrapper[4558]: I0120 17:32:49.879387 4558 generic.go:334] "Generic (PLEG): container finished" podID="8c74a8f5-be12-4ac4-b45c-d459801927ea" containerID="49db71c25f4101de9d35070da8e88df17b13463d85f4085b268b15d6f2fd0010" exitCode=0 Jan 20 17:32:49 crc kubenswrapper[4558]: I0120 17:32:49.879410 4558 generic.go:334] "Generic (PLEG): container finished" podID="8c74a8f5-be12-4ac4-b45c-d459801927ea" containerID="5b64dc2431311cf052b4ff6df6c6144511a386c51c5291266eaa5ffde8e35a57" exitCode=0 Jan 20 17:32:49 crc kubenswrapper[4558]: I0120 17:32:49.879438 4558 generic.go:334] "Generic (PLEG): container finished" podID="8c74a8f5-be12-4ac4-b45c-d459801927ea" containerID="7312f6e5f2e6499af9f3601374373df6aa29dfce310cc2421bf1cf529c300a02" exitCode=0 Jan 20 17:32:49 crc kubenswrapper[4558]: I0120 17:32:49.879445 4558 generic.go:334] "Generic (PLEG): container finished" podID="8c74a8f5-be12-4ac4-b45c-d459801927ea" containerID="b854ccf3f0261f0809cff51e86b924877af436c76a3e8bb217d1f85c3dda4221" exitCode=0 Jan 20 17:32:49 crc kubenswrapper[4558]: I0120 17:32:49.879452 4558 generic.go:334] "Generic (PLEG): container finished" podID="8c74a8f5-be12-4ac4-b45c-d459801927ea" containerID="e2141ae9ecf7ffabbc3fdb13e8e88e4a3ea99534f37f0efd0752308e25d02c42" exitCode=0 Jan 20 17:32:49 crc kubenswrapper[4558]: I0120 17:32:49.879460 4558 generic.go:334] "Generic (PLEG): container finished" podID="8c74a8f5-be12-4ac4-b45c-d459801927ea" containerID="c51ba6d01533031c51006f14e866bc5bdedd26de8afcbe43c5bcf1e7025e0dd3" exitCode=0 Jan 20 17:32:49 crc kubenswrapper[4558]: I0120 17:32:49.879465 4558 generic.go:334] "Generic (PLEG): container finished" podID="8c74a8f5-be12-4ac4-b45c-d459801927ea" containerID="abb482af37b6f6a3f13af0986ae717f0f32b8467e1976fdbb59f0c73b8240e46" exitCode=0 Jan 20 17:32:49 crc kubenswrapper[4558]: I0120 17:32:49.879470 4558 generic.go:334] "Generic (PLEG): container finished" podID="8c74a8f5-be12-4ac4-b45c-d459801927ea" containerID="33d41f8923ec28aa7b94229f05d825972526de6642700e3a9555dbc7f7a6e7ce" exitCode=0 Jan 20 17:32:49 crc kubenswrapper[4558]: I0120 17:32:49.879550 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"8c74a8f5-be12-4ac4-b45c-d459801927ea","Type":"ContainerDied","Data":"49db71c25f4101de9d35070da8e88df17b13463d85f4085b268b15d6f2fd0010"} Jan 20 17:32:49 crc kubenswrapper[4558]: I0120 17:32:49.879596 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"8c74a8f5-be12-4ac4-b45c-d459801927ea","Type":"ContainerDied","Data":"5b64dc2431311cf052b4ff6df6c6144511a386c51c5291266eaa5ffde8e35a57"} Jan 20 17:32:49 crc kubenswrapper[4558]: I0120 17:32:49.879607 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"8c74a8f5-be12-4ac4-b45c-d459801927ea","Type":"ContainerDied","Data":"7312f6e5f2e6499af9f3601374373df6aa29dfce310cc2421bf1cf529c300a02"} Jan 20 17:32:49 crc kubenswrapper[4558]: I0120 17:32:49.879616 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"8c74a8f5-be12-4ac4-b45c-d459801927ea","Type":"ContainerDied","Data":"b854ccf3f0261f0809cff51e86b924877af436c76a3e8bb217d1f85c3dda4221"} Jan 20 17:32:49 crc kubenswrapper[4558]: I0120 17:32:49.879624 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"8c74a8f5-be12-4ac4-b45c-d459801927ea","Type":"ContainerDied","Data":"e2141ae9ecf7ffabbc3fdb13e8e88e4a3ea99534f37f0efd0752308e25d02c42"} Jan 20 17:32:49 crc kubenswrapper[4558]: I0120 17:32:49.879633 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"8c74a8f5-be12-4ac4-b45c-d459801927ea","Type":"ContainerDied","Data":"c51ba6d01533031c51006f14e866bc5bdedd26de8afcbe43c5bcf1e7025e0dd3"} Jan 20 17:32:49 crc kubenswrapper[4558]: I0120 17:32:49.879640 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"8c74a8f5-be12-4ac4-b45c-d459801927ea","Type":"ContainerDied","Data":"abb482af37b6f6a3f13af0986ae717f0f32b8467e1976fdbb59f0c73b8240e46"} Jan 20 17:32:49 crc kubenswrapper[4558]: I0120 17:32:49.879647 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"8c74a8f5-be12-4ac4-b45c-d459801927ea","Type":"ContainerDied","Data":"33d41f8923ec28aa7b94229f05d825972526de6642700e3a9555dbc7f7a6e7ce"} Jan 20 17:32:49 crc kubenswrapper[4558]: I0120 17:32:49.894291 4558 generic.go:334] "Generic (PLEG): container finished" podID="39e9c81e-2923-472a-b892-5404d32842bb" containerID="2125c4c8f812531a8911dde13a1a8e1cc94d924088d9043e03d8a2554aea8fba" exitCode=0 Jan 20 17:32:49 crc kubenswrapper[4558]: I0120 17:32:49.894334 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/memcached-0" event={"ID":"39e9c81e-2923-472a-b892-5404d32842bb","Type":"ContainerDied","Data":"2125c4c8f812531a8911dde13a1a8e1cc94d924088d9043e03d8a2554aea8fba"} Jan 20 17:32:49 crc kubenswrapper[4558]: I0120 17:32:49.894350 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/memcached-0" event={"ID":"39e9c81e-2923-472a-b892-5404d32842bb","Type":"ContainerDied","Data":"e926dceaefe7d08030725450daa3bbe85b1bfb6ac0fcfaf97459f9b11814b1e3"} Jan 20 17:32:49 crc kubenswrapper[4558]: I0120 17:32:49.894367 4558 scope.go:117] "RemoveContainer" containerID="2125c4c8f812531a8911dde13a1a8e1cc94d924088d9043e03d8a2554aea8fba" Jan 20 17:32:49 crc kubenswrapper[4558]: I0120 17:32:49.894458 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/memcached-0" Jan 20 17:32:49 crc kubenswrapper[4558]: I0120 17:32:49.936573 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-7fb744b5b7-vj8fh" event={"ID":"8516d676-2fa0-47c4-b81a-254ec50efd52","Type":"ContainerStarted","Data":"30df890f266e77b1f0f4c83ebc87ae7d845870e96e6f6159151d6c7ad9b88cdb"} Jan 20 17:32:49 crc kubenswrapper[4558]: I0120 17:32:49.955570 4558 generic.go:334] "Generic (PLEG): container finished" podID="aaaaf2d3-8f74-4ce7-8407-2f0b0bece070" containerID="bc8a516268d1e7dd8e3c075c9cfa7d70176325f633e340325c2c6e2aa0d06228" exitCode=143 Jan 20 17:32:49 crc kubenswrapper[4558]: I0120 17:32:49.955752 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"aaaaf2d3-8f74-4ce7-8407-2f0b0bece070","Type":"ContainerDied","Data":"bc8a516268d1e7dd8e3c075c9cfa7d70176325f633e340325c2c6e2aa0d06228"} Jan 20 17:32:49 crc kubenswrapper[4558]: I0120 17:32:49.963560 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/memcached-0"] Jan 20 17:32:49 crc kubenswrapper[4558]: I0120 17:32:49.967637 4558 scope.go:117] "RemoveContainer" containerID="2125c4c8f812531a8911dde13a1a8e1cc94d924088d9043e03d8a2554aea8fba" Jan 20 17:32:50 crc kubenswrapper[4558]: E0120 17:32:50.002395 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2125c4c8f812531a8911dde13a1a8e1cc94d924088d9043e03d8a2554aea8fba\": container with ID starting with 2125c4c8f812531a8911dde13a1a8e1cc94d924088d9043e03d8a2554aea8fba not found: ID does not exist" containerID="2125c4c8f812531a8911dde13a1a8e1cc94d924088d9043e03d8a2554aea8fba" Jan 20 17:32:50 crc kubenswrapper[4558]: I0120 17:32:50.002443 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2125c4c8f812531a8911dde13a1a8e1cc94d924088d9043e03d8a2554aea8fba"} err="failed to get container status \"2125c4c8f812531a8911dde13a1a8e1cc94d924088d9043e03d8a2554aea8fba\": rpc error: code = NotFound desc = could not find container \"2125c4c8f812531a8911dde13a1a8e1cc94d924088d9043e03d8a2554aea8fba\": container with ID starting with 2125c4c8f812531a8911dde13a1a8e1cc94d924088d9043e03d8a2554aea8fba not found: ID does not exist" Jan 20 17:32:50 crc kubenswrapper[4558]: I0120 17:32:50.009726 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/memcached-0"] Jan 20 17:32:50 crc kubenswrapper[4558]: I0120 17:32:50.031106 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/memcached-0"] Jan 20 17:32:50 crc kubenswrapper[4558]: E0120 17:32:50.031543 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="39e9c81e-2923-472a-b892-5404d32842bb" containerName="memcached" Jan 20 17:32:50 crc kubenswrapper[4558]: I0120 17:32:50.031557 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="39e9c81e-2923-472a-b892-5404d32842bb" containerName="memcached" Jan 20 17:32:50 crc kubenswrapper[4558]: I0120 17:32:50.031757 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="39e9c81e-2923-472a-b892-5404d32842bb" containerName="memcached" Jan 20 17:32:50 crc kubenswrapper[4558]: I0120 17:32:50.032720 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:32:50 crc kubenswrapper[4558]: I0120 17:32:50.034087 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/barbican-api-6d8c858488-xrdwn" Jan 20 17:32:50 crc kubenswrapper[4558]: I0120 17:32:50.034111 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/barbican-api-6d8c858488-xrdwn" Jan 20 17:32:50 crc kubenswrapper[4558]: I0120 17:32:50.034121 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-6d8c858488-xrdwn" event={"ID":"d0062240-1ed7-488f-ac15-d3c5e0812ccd","Type":"ContainerStarted","Data":"f8f2ef208173ae07911e14421c874575ec9ac47e3161c054e5cda79b8cc76030"} Jan 20 17:32:50 crc kubenswrapper[4558]: I0120 17:32:50.034135 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-6d8c858488-xrdwn" event={"ID":"d0062240-1ed7-488f-ac15-d3c5e0812ccd","Type":"ContainerStarted","Data":"c0fe7edacfd45633530e170a5c39c2fd85b8ee4e9a397ea2a78b53cb55d8f966"} Jan 20 17:32:50 crc kubenswrapper[4558]: I0120 17:32:50.034200 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/memcached-0" Jan 20 17:32:50 crc kubenswrapper[4558]: I0120 17:32:50.035968 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"memcached-config-data" Jan 20 17:32:50 crc kubenswrapper[4558]: I0120 17:32:50.036247 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"memcached-memcached-dockercfg-8bzhf" Jan 20 17:32:50 crc kubenswrapper[4558]: I0120 17:32:50.037746 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-memcached-svc" Jan 20 17:32:50 crc kubenswrapper[4558]: I0120 17:32:50.038364 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-5c6488b48b-4sj7k" event={"ID":"48ebccca-cc18-415f-b6f8-f301f99e5d3a","Type":"ContainerStarted","Data":"6e70719a68f130030bdf24d3504c54db722377f4ef2e6a6503821d48a7173a3d"} Jan 20 17:32:50 crc kubenswrapper[4558]: I0120 17:32:50.039710 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-56458f747-wvqtq" event={"ID":"aa7956fe-8f61-4e0d-86c1-716e300c4059","Type":"ContainerStarted","Data":"45e428dbaa6ce016259fe3e88fa43969eb40f16dbbb8bdf480289cfc28c7be0e"} Jan 20 17:32:50 crc kubenswrapper[4558]: I0120 17:32:50.039824 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/keystone-56458f747-wvqtq" podUID="aa7956fe-8f61-4e0d-86c1-716e300c4059" containerName="keystone-api" containerID="cri-o://45e428dbaa6ce016259fe3e88fa43969eb40f16dbbb8bdf480289cfc28c7be0e" gracePeriod=30 Jan 20 17:32:50 crc kubenswrapper[4558]: I0120 17:32:50.040039 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/keystone-56458f747-wvqtq" Jan 20 17:32:50 crc kubenswrapper[4558]: I0120 17:32:50.043500 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/memcached-0"] Jan 20 17:32:50 crc kubenswrapper[4558]: I0120 17:32:50.055822 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-keystone-listener-5459fb5f8-h5bbp" event={"ID":"d6b6da7f-6f9b-415b-ac10-b3bd8655bb1a","Type":"ContainerStarted","Data":"616dfc84f48150feccd210dc50efb30772487af019214a6546143c12c3014223"} Jan 20 17:32:50 crc kubenswrapper[4558]: I0120 17:32:50.055854 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-keystone-listener-5459fb5f8-h5bbp" event={"ID":"d6b6da7f-6f9b-415b-ac10-b3bd8655bb1a","Type":"ContainerStarted","Data":"f06e9dc08abc874c209a5383b2994f653202e9f66eb7464ef36f77ca919a05a8"} Jan 20 17:32:50 crc kubenswrapper[4558]: I0120 17:32:50.092248 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/keystone-56458f747-wvqtq" podStartSLOduration=4.092236806 podStartE2EDuration="4.092236806s" podCreationTimestamp="2026-01-20 17:32:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:32:50.082483487 +0000 UTC m=+3063.842821454" watchObservedRunningTime="2026-01-20 17:32:50.092236806 +0000 UTC m=+3063.852574773" Jan 20 17:32:50 crc kubenswrapper[4558]: I0120 17:32:50.103185 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-75b5f89ffd-dsnd8" event={"ID":"e8c90851-f646-4a0c-8c0c-7ecac86fa5a7","Type":"ContainerStarted","Data":"13d6958e3f022365a5a2101645a748bcf6a2a94aa5188200a2ebb9ecfc435482"} Jan 20 17:32:50 crc kubenswrapper[4558]: I0120 17:32:50.103215 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-75b5f89ffd-dsnd8" event={"ID":"e8c90851-f646-4a0c-8c0c-7ecac86fa5a7","Type":"ContainerStarted","Data":"ad169539cfb088bb07910338d7ba610c14a018ab31874536a6e7fafebd60e428"} Jan 20 17:32:50 crc kubenswrapper[4558]: I0120 17:32:50.103224 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-75b5f89ffd-dsnd8" event={"ID":"e8c90851-f646-4a0c-8c0c-7ecac86fa5a7","Type":"ContainerStarted","Data":"0d9278d84f2e42b2855482ba5a3543e181c3048878d068d282fee3f7d6cdefd4"} Jan 20 17:32:50 crc kubenswrapper[4558]: I0120 17:32:50.103322 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/neutron-75b5f89ffd-dsnd8" podUID="e8c90851-f646-4a0c-8c0c-7ecac86fa5a7" containerName="neutron-api" containerID="cri-o://ad169539cfb088bb07910338d7ba610c14a018ab31874536a6e7fafebd60e428" gracePeriod=30 Jan 20 17:32:50 crc kubenswrapper[4558]: I0120 17:32:50.103500 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/neutron-75b5f89ffd-dsnd8" Jan 20 17:32:50 crc kubenswrapper[4558]: I0120 17:32:50.103542 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/neutron-75b5f89ffd-dsnd8" podUID="e8c90851-f646-4a0c-8c0c-7ecac86fa5a7" containerName="neutron-httpd" containerID="cri-o://13d6958e3f022365a5a2101645a748bcf6a2a94aa5188200a2ebb9ecfc435482" gracePeriod=30 Jan 20 17:32:50 crc kubenswrapper[4558]: I0120 17:32:50.104797 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/barbican-api-6d8c858488-xrdwn" podStartSLOduration=4.104789428 podStartE2EDuration="4.104789428s" podCreationTimestamp="2026-01-20 17:32:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:32:50.102975528 +0000 UTC m=+3063.863313496" watchObservedRunningTime="2026-01-20 17:32:50.104789428 +0000 UTC m=+3063.865127396" Jan 20 17:32:50 crc kubenswrapper[4558]: I0120 17:32:50.124080 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-worker-777f6887b5-wwl56" event={"ID":"e0294130-264e-4fa0-b336-72c350ea61cc","Type":"ContainerStarted","Data":"c084c5e8cdb480d00ce491d7dbdb0d773ed2ee7cdfeacd273ca45bb3c51772de"} Jan 20 17:32:50 crc kubenswrapper[4558]: I0120 17:32:50.124102 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-worker-777f6887b5-wwl56" event={"ID":"e0294130-264e-4fa0-b336-72c350ea61cc","Type":"ContainerStarted","Data":"62f48a277d2d3f69fc053a84173286db65e766dd294fabf68464419b15e8611b"} Jan 20 17:32:50 crc kubenswrapper[4558]: I0120 17:32:50.152321 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/neutron-75b5f89ffd-dsnd8" podStartSLOduration=4.152302167 podStartE2EDuration="4.152302167s" podCreationTimestamp="2026-01-20 17:32:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:32:50.141793178 +0000 UTC m=+3063.902131145" watchObservedRunningTime="2026-01-20 17:32:50.152302167 +0000 UTC m=+3063.912640134" Jan 20 17:32:50 crc kubenswrapper[4558]: I0120 17:32:50.156691 4558 generic.go:334] "Generic (PLEG): container finished" podID="a7f2bbf6-acbc-487a-a2ea-08e49a055eb5" containerID="3fc2c01e810b816844a3e05f96c2ab94fb7c1503af2abfcc136c12d107ebb7de" exitCode=0 Jan 20 17:32:50 crc kubenswrapper[4558]: I0120 17:32:50.156766 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:32:50 crc kubenswrapper[4558]: I0120 17:32:50.156822 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-0" event={"ID":"a7f2bbf6-acbc-487a-a2ea-08e49a055eb5","Type":"ContainerDied","Data":"3fc2c01e810b816844a3e05f96c2ab94fb7c1503af2abfcc136c12d107ebb7de"} Jan 20 17:32:50 crc kubenswrapper[4558]: I0120 17:32:50.156861 4558 scope.go:117] "RemoveContainer" containerID="3fc2c01e810b816844a3e05f96c2ab94fb7c1503af2abfcc136c12d107ebb7de" Jan 20 17:32:50 crc kubenswrapper[4558]: I0120 17:32:50.170375 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/barbican-keystone-listener-5459fb5f8-h5bbp" podStartSLOduration=4.170362397 podStartE2EDuration="4.170362397s" podCreationTimestamp="2026-01-20 17:32:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:32:50.163483632 +0000 UTC m=+3063.923821600" watchObservedRunningTime="2026-01-20 17:32:50.170362397 +0000 UTC m=+3063.930700364" Jan 20 17:32:50 crc kubenswrapper[4558]: I0120 17:32:50.211706 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fkg7v\" (UniqueName: \"kubernetes.io/projected/a7f2bbf6-acbc-487a-a2ea-08e49a055eb5-kube-api-access-fkg7v\") pod \"a7f2bbf6-acbc-487a-a2ea-08e49a055eb5\" (UID: \"a7f2bbf6-acbc-487a-a2ea-08e49a055eb5\") " Jan 20 17:32:50 crc kubenswrapper[4558]: I0120 17:32:50.211784 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a7f2bbf6-acbc-487a-a2ea-08e49a055eb5-config-data\") pod \"a7f2bbf6-acbc-487a-a2ea-08e49a055eb5\" (UID: \"a7f2bbf6-acbc-487a-a2ea-08e49a055eb5\") " Jan 20 17:32:50 crc kubenswrapper[4558]: I0120 17:32:50.211950 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7f2bbf6-acbc-487a-a2ea-08e49a055eb5-combined-ca-bundle\") pod \"a7f2bbf6-acbc-487a-a2ea-08e49a055eb5\" (UID: \"a7f2bbf6-acbc-487a-a2ea-08e49a055eb5\") " Jan 20 17:32:50 crc kubenswrapper[4558]: I0120 17:32:50.212375 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/568f7066-e99f-45b2-aa41-28c808fb27b8-combined-ca-bundle\") pod \"memcached-0\" (UID: \"568f7066-e99f-45b2-aa41-28c808fb27b8\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:32:50 crc kubenswrapper[4558]: I0120 17:32:50.212419 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/568f7066-e99f-45b2-aa41-28c808fb27b8-kolla-config\") pod \"memcached-0\" (UID: \"568f7066-e99f-45b2-aa41-28c808fb27b8\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:32:50 crc kubenswrapper[4558]: I0120 17:32:50.212459 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/568f7066-e99f-45b2-aa41-28c808fb27b8-config-data\") pod \"memcached-0\" (UID: \"568f7066-e99f-45b2-aa41-28c808fb27b8\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:32:50 crc kubenswrapper[4558]: I0120 17:32:50.212491 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-67q4s\" (UniqueName: \"kubernetes.io/projected/568f7066-e99f-45b2-aa41-28c808fb27b8-kube-api-access-67q4s\") pod \"memcached-0\" (UID: \"568f7066-e99f-45b2-aa41-28c808fb27b8\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:32:50 crc kubenswrapper[4558]: I0120 17:32:50.212616 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/568f7066-e99f-45b2-aa41-28c808fb27b8-memcached-tls-certs\") pod \"memcached-0\" (UID: \"568f7066-e99f-45b2-aa41-28c808fb27b8\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:32:50 crc kubenswrapper[4558]: I0120 17:32:50.219129 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-keystone-listener-65b4ff8654-bj52d"] Jan 20 17:32:50 crc kubenswrapper[4558]: I0120 17:32:50.219436 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-keystone-listener-65b4ff8654-bj52d" podUID="113d7999-28c3-4738-aa99-bd09c1880299" containerName="barbican-keystone-listener-log" containerID="cri-o://d981d2b9fdcc50a6990df32e78be80ae43eebfa1b705595c1292a8f9a9e81ae5" gracePeriod=30 Jan 20 17:32:50 crc kubenswrapper[4558]: I0120 17:32:50.219605 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-keystone-listener-65b4ff8654-bj52d" podUID="113d7999-28c3-4738-aa99-bd09c1880299" containerName="barbican-keystone-listener" containerID="cri-o://9bbacbd57db249ab8218348788165000873acf1f94027640d5d33d96e3684631" gracePeriod=30 Jan 20 17:32:50 crc kubenswrapper[4558]: I0120 17:32:50.225285 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/barbican-worker-777f6887b5-wwl56" podStartSLOduration=4.225268037 podStartE2EDuration="4.225268037s" podCreationTimestamp="2026-01-20 17:32:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:32:50.194617414 +0000 UTC m=+3063.954955381" watchObservedRunningTime="2026-01-20 17:32:50.225268037 +0000 UTC m=+3063.985606004" Jan 20 17:32:50 crc kubenswrapper[4558]: I0120 17:32:50.251763 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a7f2bbf6-acbc-487a-a2ea-08e49a055eb5-kube-api-access-fkg7v" (OuterVolumeSpecName: "kube-api-access-fkg7v") pod "a7f2bbf6-acbc-487a-a2ea-08e49a055eb5" (UID: "a7f2bbf6-acbc-487a-a2ea-08e49a055eb5"). InnerVolumeSpecName "kube-api-access-fkg7v". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:32:50 crc kubenswrapper[4558]: I0120 17:32:50.255185 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-worker-5bb9c47bc7-ct6jf"] Jan 20 17:32:50 crc kubenswrapper[4558]: I0120 17:32:50.261346 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-worker-5bb9c47bc7-ct6jf" podUID="fbba86e7-87c6-4350-8bbd-565ba9abfe68" containerName="barbican-worker-log" containerID="cri-o://c03486a35df252536c36f85a6561b27a72947105aeca886fbecad26ee9903d17" gracePeriod=30 Jan 20 17:32:50 crc kubenswrapper[4558]: I0120 17:32:50.263799 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-worker-5bb9c47bc7-ct6jf" podUID="fbba86e7-87c6-4350-8bbd-565ba9abfe68" containerName="barbican-worker" containerID="cri-o://8315184077e8a32f5cae764089867c8adc47f7cd18db8e5c92a42400d2b15103" gracePeriod=30 Jan 20 17:32:50 crc kubenswrapper[4558]: I0120 17:32:50.300320 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a7f2bbf6-acbc-487a-a2ea-08e49a055eb5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a7f2bbf6-acbc-487a-a2ea-08e49a055eb5" (UID: "a7f2bbf6-acbc-487a-a2ea-08e49a055eb5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:32:50 crc kubenswrapper[4558]: I0120 17:32:50.320036 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/568f7066-e99f-45b2-aa41-28c808fb27b8-memcached-tls-certs\") pod \"memcached-0\" (UID: \"568f7066-e99f-45b2-aa41-28c808fb27b8\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:32:50 crc kubenswrapper[4558]: I0120 17:32:50.320260 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/568f7066-e99f-45b2-aa41-28c808fb27b8-combined-ca-bundle\") pod \"memcached-0\" (UID: \"568f7066-e99f-45b2-aa41-28c808fb27b8\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:32:50 crc kubenswrapper[4558]: I0120 17:32:50.320317 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/568f7066-e99f-45b2-aa41-28c808fb27b8-kolla-config\") pod \"memcached-0\" (UID: \"568f7066-e99f-45b2-aa41-28c808fb27b8\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:32:50 crc kubenswrapper[4558]: I0120 17:32:50.320362 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/568f7066-e99f-45b2-aa41-28c808fb27b8-config-data\") pod \"memcached-0\" (UID: \"568f7066-e99f-45b2-aa41-28c808fb27b8\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:32:50 crc kubenswrapper[4558]: I0120 17:32:50.320393 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-67q4s\" (UniqueName: \"kubernetes.io/projected/568f7066-e99f-45b2-aa41-28c808fb27b8-kube-api-access-67q4s\") pod \"memcached-0\" (UID: \"568f7066-e99f-45b2-aa41-28c808fb27b8\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:32:50 crc kubenswrapper[4558]: I0120 17:32:50.320521 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a7f2bbf6-acbc-487a-a2ea-08e49a055eb5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:32:50 crc kubenswrapper[4558]: I0120 17:32:50.320533 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fkg7v\" (UniqueName: \"kubernetes.io/projected/a7f2bbf6-acbc-487a-a2ea-08e49a055eb5-kube-api-access-fkg7v\") on node \"crc\" DevicePath \"\"" Jan 20 17:32:50 crc kubenswrapper[4558]: I0120 17:32:50.325675 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/568f7066-e99f-45b2-aa41-28c808fb27b8-memcached-tls-certs\") pod \"memcached-0\" (UID: \"568f7066-e99f-45b2-aa41-28c808fb27b8\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:32:50 crc kubenswrapper[4558]: I0120 17:32:50.328448 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/568f7066-e99f-45b2-aa41-28c808fb27b8-kolla-config\") pod \"memcached-0\" (UID: \"568f7066-e99f-45b2-aa41-28c808fb27b8\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:32:50 crc kubenswrapper[4558]: I0120 17:32:50.329080 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/568f7066-e99f-45b2-aa41-28c808fb27b8-combined-ca-bundle\") pod \"memcached-0\" (UID: \"568f7066-e99f-45b2-aa41-28c808fb27b8\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:32:50 crc kubenswrapper[4558]: I0120 17:32:50.331901 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/568f7066-e99f-45b2-aa41-28c808fb27b8-config-data\") pod \"memcached-0\" (UID: \"568f7066-e99f-45b2-aa41-28c808fb27b8\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:32:50 crc kubenswrapper[4558]: I0120 17:32:50.342605 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-67q4s\" (UniqueName: \"kubernetes.io/projected/568f7066-e99f-45b2-aa41-28c808fb27b8-kube-api-access-67q4s\") pod \"memcached-0\" (UID: \"568f7066-e99f-45b2-aa41-28c808fb27b8\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:32:50 crc kubenswrapper[4558]: I0120 17:32:50.354005 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a7f2bbf6-acbc-487a-a2ea-08e49a055eb5-config-data" (OuterVolumeSpecName: "config-data") pod "a7f2bbf6-acbc-487a-a2ea-08e49a055eb5" (UID: "a7f2bbf6-acbc-487a-a2ea-08e49a055eb5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:32:50 crc kubenswrapper[4558]: I0120 17:32:50.358495 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/memcached-0" Jan 20 17:32:50 crc kubenswrapper[4558]: I0120 17:32:50.397337 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:32:50 crc kubenswrapper[4558]: I0120 17:32:50.422923 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/nova-metadata-0" podUID="eb4881e5-cce6-4423-a9f4-d93f96a2ccb1" containerName="nova-metadata-log" probeResult="failure" output="Get \"http://10.217.1.203:8775/\": read tcp 10.217.0.2:43126->10.217.1.203:8775: read: connection reset by peer" Jan 20 17:32:50 crc kubenswrapper[4558]: I0120 17:32:50.422943 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/nova-metadata-0" podUID="eb4881e5-cce6-4423-a9f4-d93f96a2ccb1" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"http://10.217.1.203:8775/\": read tcp 10.217.0.2:43140->10.217.1.203:8775: read: connection reset by peer" Jan 20 17:32:50 crc kubenswrapper[4558]: I0120 17:32:50.423013 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a7f2bbf6-acbc-487a-a2ea-08e49a055eb5-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:32:50 crc kubenswrapper[4558]: I0120 17:32:50.524908 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c5lng\" (UniqueName: \"kubernetes.io/projected/7aa6c24a-66a5-4b11-96a1-4d14001d1f5c-kube-api-access-c5lng\") pod \"7aa6c24a-66a5-4b11-96a1-4d14001d1f5c\" (UID: \"7aa6c24a-66a5-4b11-96a1-4d14001d1f5c\") " Jan 20 17:32:50 crc kubenswrapper[4558]: I0120 17:32:50.525330 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7aa6c24a-66a5-4b11-96a1-4d14001d1f5c-config-data\") pod \"7aa6c24a-66a5-4b11-96a1-4d14001d1f5c\" (UID: \"7aa6c24a-66a5-4b11-96a1-4d14001d1f5c\") " Jan 20 17:32:50 crc kubenswrapper[4558]: I0120 17:32:50.525357 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7aa6c24a-66a5-4b11-96a1-4d14001d1f5c-combined-ca-bundle\") pod \"7aa6c24a-66a5-4b11-96a1-4d14001d1f5c\" (UID: \"7aa6c24a-66a5-4b11-96a1-4d14001d1f5c\") " Jan 20 17:32:50 crc kubenswrapper[4558]: I0120 17:32:50.533552 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7aa6c24a-66a5-4b11-96a1-4d14001d1f5c-kube-api-access-c5lng" (OuterVolumeSpecName: "kube-api-access-c5lng") pod "7aa6c24a-66a5-4b11-96a1-4d14001d1f5c" (UID: "7aa6c24a-66a5-4b11-96a1-4d14001d1f5c"). InnerVolumeSpecName "kube-api-access-c5lng". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:32:50 crc kubenswrapper[4558]: I0120 17:32:50.617713 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="39e9c81e-2923-472a-b892-5404d32842bb" path="/var/lib/kubelet/pods/39e9c81e-2923-472a-b892-5404d32842bb/volumes" Jan 20 17:32:50 crc kubenswrapper[4558]: I0120 17:32:50.618221 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7d6b9532-22b5-41c9-8b15-83a9c515c52d" path="/var/lib/kubelet/pods/7d6b9532-22b5-41c9-8b15-83a9c515c52d/volumes" Jan 20 17:32:50 crc kubenswrapper[4558]: I0120 17:32:50.631158 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c5lng\" (UniqueName: \"kubernetes.io/projected/7aa6c24a-66a5-4b11-96a1-4d14001d1f5c-kube-api-access-c5lng\") on node \"crc\" DevicePath \"\"" Jan 20 17:32:50 crc kubenswrapper[4558]: I0120 17:32:50.642571 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7aa6c24a-66a5-4b11-96a1-4d14001d1f5c-config-data" (OuterVolumeSpecName: "config-data") pod "7aa6c24a-66a5-4b11-96a1-4d14001d1f5c" (UID: "7aa6c24a-66a5-4b11-96a1-4d14001d1f5c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:32:50 crc kubenswrapper[4558]: I0120 17:32:50.647206 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7aa6c24a-66a5-4b11-96a1-4d14001d1f5c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7aa6c24a-66a5-4b11-96a1-4d14001d1f5c" (UID: "7aa6c24a-66a5-4b11-96a1-4d14001d1f5c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:32:50 crc kubenswrapper[4558]: I0120 17:32:50.737131 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7aa6c24a-66a5-4b11-96a1-4d14001d1f5c-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:32:50 crc kubenswrapper[4558]: I0120 17:32:50.737182 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7aa6c24a-66a5-4b11-96a1-4d14001d1f5c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:32:50 crc kubenswrapper[4558]: I0120 17:32:50.765840 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-0"] Jan 20 17:32:50 crc kubenswrapper[4558]: I0120 17:32:50.805233 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-0"] Jan 20 17:32:50 crc kubenswrapper[4558]: I0120 17:32:50.815637 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-0"] Jan 20 17:32:50 crc kubenswrapper[4558]: E0120 17:32:50.816069 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7aa6c24a-66a5-4b11-96a1-4d14001d1f5c" containerName="nova-scheduler-scheduler" Jan 20 17:32:50 crc kubenswrapper[4558]: I0120 17:32:50.816089 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="7aa6c24a-66a5-4b11-96a1-4d14001d1f5c" containerName="nova-scheduler-scheduler" Jan 20 17:32:50 crc kubenswrapper[4558]: E0120 17:32:50.816108 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a7f2bbf6-acbc-487a-a2ea-08e49a055eb5" containerName="nova-cell1-conductor-conductor" Jan 20 17:32:50 crc kubenswrapper[4558]: I0120 17:32:50.816115 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="a7f2bbf6-acbc-487a-a2ea-08e49a055eb5" containerName="nova-cell1-conductor-conductor" Jan 20 17:32:50 crc kubenswrapper[4558]: I0120 17:32:50.816322 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="7aa6c24a-66a5-4b11-96a1-4d14001d1f5c" containerName="nova-scheduler-scheduler" Jan 20 17:32:50 crc kubenswrapper[4558]: I0120 17:32:50.816361 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="a7f2bbf6-acbc-487a-a2ea-08e49a055eb5" containerName="nova-cell1-conductor-conductor" Jan 20 17:32:50 crc kubenswrapper[4558]: I0120 17:32:50.816989 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:32:50 crc kubenswrapper[4558]: I0120 17:32:50.819639 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell1-conductor-config-data" Jan 20 17:32:50 crc kubenswrapper[4558]: I0120 17:32:50.825497 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-0"] Jan 20 17:32:50 crc kubenswrapper[4558]: I0120 17:32:50.839083 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c686e4c2-68d0-4999-9078-90c70927d9ae-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"c686e4c2-68d0-4999-9078-90c70927d9ae\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:32:50 crc kubenswrapper[4558]: I0120 17:32:50.839139 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-btqr4\" (UniqueName: \"kubernetes.io/projected/c686e4c2-68d0-4999-9078-90c70927d9ae-kube-api-access-btqr4\") pod \"nova-cell1-conductor-0\" (UID: \"c686e4c2-68d0-4999-9078-90c70927d9ae\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:32:50 crc kubenswrapper[4558]: I0120 17:32:50.839303 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c686e4c2-68d0-4999-9078-90c70927d9ae-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"c686e4c2-68d0-4999-9078-90c70927d9ae\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:32:50 crc kubenswrapper[4558]: I0120 17:32:50.940724 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c686e4c2-68d0-4999-9078-90c70927d9ae-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"c686e4c2-68d0-4999-9078-90c70927d9ae\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:32:50 crc kubenswrapper[4558]: I0120 17:32:50.940833 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c686e4c2-68d0-4999-9078-90c70927d9ae-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"c686e4c2-68d0-4999-9078-90c70927d9ae\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:32:50 crc kubenswrapper[4558]: I0120 17:32:50.940877 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-btqr4\" (UniqueName: \"kubernetes.io/projected/c686e4c2-68d0-4999-9078-90c70927d9ae-kube-api-access-btqr4\") pod \"nova-cell1-conductor-0\" (UID: \"c686e4c2-68d0-4999-9078-90c70927d9ae\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:32:50 crc kubenswrapper[4558]: I0120 17:32:50.944980 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c686e4c2-68d0-4999-9078-90c70927d9ae-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"c686e4c2-68d0-4999-9078-90c70927d9ae\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:32:50 crc kubenswrapper[4558]: I0120 17:32:50.947302 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c686e4c2-68d0-4999-9078-90c70927d9ae-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"c686e4c2-68d0-4999-9078-90c70927d9ae\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:32:50 crc kubenswrapper[4558]: I0120 17:32:50.961492 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-btqr4\" (UniqueName: \"kubernetes.io/projected/c686e4c2-68d0-4999-9078-90c70927d9ae-kube-api-access-btqr4\") pod \"nova-cell1-conductor-0\" (UID: \"c686e4c2-68d0-4999-9078-90c70927d9ae\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.094453 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.123178 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-56458f747-wvqtq" Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.156926 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.190306 4558 generic.go:334] "Generic (PLEG): container finished" podID="7aa6c24a-66a5-4b11-96a1-4d14001d1f5c" containerID="075fdf51fe78df21a6645f555dba48d1e1068aaccdaf9394e743e0397b0f8c25" exitCode=0 Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.190648 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"7aa6c24a-66a5-4b11-96a1-4d14001d1f5c","Type":"ContainerDied","Data":"075fdf51fe78df21a6645f555dba48d1e1068aaccdaf9394e743e0397b0f8c25"} Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.190680 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"7aa6c24a-66a5-4b11-96a1-4d14001d1f5c","Type":"ContainerDied","Data":"58da02281d2c517c5fb8d6fde458e29e6833a321349b658a1345d5ceef2a99c0"} Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.190698 4558 scope.go:117] "RemoveContainer" containerID="075fdf51fe78df21a6645f555dba48d1e1068aaccdaf9394e743e0397b0f8c25" Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.190812 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.228100 4558 generic.go:334] "Generic (PLEG): container finished" podID="e8c90851-f646-4a0c-8c0c-7ecac86fa5a7" containerID="13d6958e3f022365a5a2101645a748bcf6a2a94aa5188200a2ebb9ecfc435482" exitCode=0 Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.228154 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-75b5f89ffd-dsnd8" event={"ID":"e8c90851-f646-4a0c-8c0c-7ecac86fa5a7","Type":"ContainerDied","Data":"13d6958e3f022365a5a2101645a748bcf6a2a94aa5188200a2ebb9ecfc435482"} Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.254130 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/623ea472-e0f5-48c1-a621-89fe343686ff-logs\") pod \"623ea472-e0f5-48c1-a621-89fe343686ff\" (UID: \"623ea472-e0f5-48c1-a621-89fe343686ff\") " Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.254190 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa7956fe-8f61-4e0d-86c1-716e300c4059-combined-ca-bundle\") pod \"aa7956fe-8f61-4e0d-86c1-716e300c4059\" (UID: \"aa7956fe-8f61-4e0d-86c1-716e300c4059\") " Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.254280 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aa7956fe-8f61-4e0d-86c1-716e300c4059-scripts\") pod \"aa7956fe-8f61-4e0d-86c1-716e300c4059\" (UID: \"aa7956fe-8f61-4e0d-86c1-716e300c4059\") " Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.254299 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa7956fe-8f61-4e0d-86c1-716e300c4059-config-data\") pod \"aa7956fe-8f61-4e0d-86c1-716e300c4059\" (UID: \"aa7956fe-8f61-4e0d-86c1-716e300c4059\") " Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.254319 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/aa7956fe-8f61-4e0d-86c1-716e300c4059-fernet-keys\") pod \"aa7956fe-8f61-4e0d-86c1-716e300c4059\" (UID: \"aa7956fe-8f61-4e0d-86c1-716e300c4059\") " Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.254347 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/aa7956fe-8f61-4e0d-86c1-716e300c4059-credential-keys\") pod \"aa7956fe-8f61-4e0d-86c1-716e300c4059\" (UID: \"aa7956fe-8f61-4e0d-86c1-716e300c4059\") " Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.254370 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5p6l9\" (UniqueName: \"kubernetes.io/projected/623ea472-e0f5-48c1-a621-89fe343686ff-kube-api-access-5p6l9\") pod \"623ea472-e0f5-48c1-a621-89fe343686ff\" (UID: \"623ea472-e0f5-48c1-a621-89fe343686ff\") " Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.254389 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g62sk\" (UniqueName: \"kubernetes.io/projected/aa7956fe-8f61-4e0d-86c1-716e300c4059-kube-api-access-g62sk\") pod \"aa7956fe-8f61-4e0d-86c1-716e300c4059\" (UID: \"aa7956fe-8f61-4e0d-86c1-716e300c4059\") " Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.254467 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/623ea472-e0f5-48c1-a621-89fe343686ff-combined-ca-bundle\") pod \"623ea472-e0f5-48c1-a621-89fe343686ff\" (UID: \"623ea472-e0f5-48c1-a621-89fe343686ff\") " Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.254492 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/623ea472-e0f5-48c1-a621-89fe343686ff-config-data\") pod \"623ea472-e0f5-48c1-a621-89fe343686ff\" (UID: \"623ea472-e0f5-48c1-a621-89fe343686ff\") " Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.255607 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/623ea472-e0f5-48c1-a621-89fe343686ff-logs" (OuterVolumeSpecName: "logs") pod "623ea472-e0f5-48c1-a621-89fe343686ff" (UID: "623ea472-e0f5-48c1-a621-89fe343686ff"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.287613 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aa7956fe-8f61-4e0d-86c1-716e300c4059-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "aa7956fe-8f61-4e0d-86c1-716e300c4059" (UID: "aa7956fe-8f61-4e0d-86c1-716e300c4059"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.291099 4558 generic.go:334] "Generic (PLEG): container finished" podID="eb4881e5-cce6-4423-a9f4-d93f96a2ccb1" containerID="c01c9738249c284d39a49949a1689663cc221dbc3ee6a65254441d2704e98b09" exitCode=0 Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.291179 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"eb4881e5-cce6-4423-a9f4-d93f96a2ccb1","Type":"ContainerDied","Data":"c01c9738249c284d39a49949a1689663cc221dbc3ee6a65254441d2704e98b09"} Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.292345 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/623ea472-e0f5-48c1-a621-89fe343686ff-kube-api-access-5p6l9" (OuterVolumeSpecName: "kube-api-access-5p6l9") pod "623ea472-e0f5-48c1-a621-89fe343686ff" (UID: "623ea472-e0f5-48c1-a621-89fe343686ff"). InnerVolumeSpecName "kube-api-access-5p6l9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.292356 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aa7956fe-8f61-4e0d-86c1-716e300c4059-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "aa7956fe-8f61-4e0d-86c1-716e300c4059" (UID: "aa7956fe-8f61-4e0d-86c1-716e300c4059"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.292950 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.298443 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aa7956fe-8f61-4e0d-86c1-716e300c4059-scripts" (OuterVolumeSpecName: "scripts") pod "aa7956fe-8f61-4e0d-86c1-716e300c4059" (UID: "aa7956fe-8f61-4e0d-86c1-716e300c4059"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.305267 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.308832 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"760135a9-d7f7-456e-b7d6-9f2e7730e382","Type":"ContainerStarted","Data":"c6654ae1097dc1779180cf64c658db2d6969ccaab32c2416576f239f35513d6a"} Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.308878 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"760135a9-d7f7-456e-b7d6-9f2e7730e382","Type":"ContainerStarted","Data":"2ddccc288a019aaabae17c74e035d0bf4fbbfc674f41378c25ee7d422b3e847c"} Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.312671 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aa7956fe-8f61-4e0d-86c1-716e300c4059-kube-api-access-g62sk" (OuterVolumeSpecName: "kube-api-access-g62sk") pod "aa7956fe-8f61-4e0d-86c1-716e300c4059" (UID: "aa7956fe-8f61-4e0d-86c1-716e300c4059"). InnerVolumeSpecName "kube-api-access-g62sk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.312699 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.312824 4558 scope.go:117] "RemoveContainer" containerID="075fdf51fe78df21a6645f555dba48d1e1068aaccdaf9394e743e0397b0f8c25" Jan 20 17:32:51 crc kubenswrapper[4558]: E0120 17:32:51.313056 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="623ea472-e0f5-48c1-a621-89fe343686ff" containerName="nova-api-log" Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.313068 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="623ea472-e0f5-48c1-a621-89fe343686ff" containerName="nova-api-log" Jan 20 17:32:51 crc kubenswrapper[4558]: E0120 17:32:51.313091 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aa7956fe-8f61-4e0d-86c1-716e300c4059" containerName="keystone-api" Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.313099 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="aa7956fe-8f61-4e0d-86c1-716e300c4059" containerName="keystone-api" Jan 20 17:32:51 crc kubenswrapper[4558]: E0120 17:32:51.313124 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="623ea472-e0f5-48c1-a621-89fe343686ff" containerName="nova-api-api" Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.313130 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="623ea472-e0f5-48c1-a621-89fe343686ff" containerName="nova-api-api" Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.313292 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="aa7956fe-8f61-4e0d-86c1-716e300c4059" containerName="keystone-api" Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.313314 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="623ea472-e0f5-48c1-a621-89fe343686ff" containerName="nova-api-api" Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.313324 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="623ea472-e0f5-48c1-a621-89fe343686ff" containerName="nova-api-log" Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.313875 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.317099 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-scheduler-config-data" Jan 20 17:32:51 crc kubenswrapper[4558]: E0120 17:32:51.322389 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"075fdf51fe78df21a6645f555dba48d1e1068aaccdaf9394e743e0397b0f8c25\": container with ID starting with 075fdf51fe78df21a6645f555dba48d1e1068aaccdaf9394e743e0397b0f8c25 not found: ID does not exist" containerID="075fdf51fe78df21a6645f555dba48d1e1068aaccdaf9394e743e0397b0f8c25" Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.322505 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"075fdf51fe78df21a6645f555dba48d1e1068aaccdaf9394e743e0397b0f8c25"} err="failed to get container status \"075fdf51fe78df21a6645f555dba48d1e1068aaccdaf9394e743e0397b0f8c25\": rpc error: code = NotFound desc = could not find container \"075fdf51fe78df21a6645f555dba48d1e1068aaccdaf9394e743e0397b0f8c25\": container with ID starting with 075fdf51fe78df21a6645f555dba48d1e1068aaccdaf9394e743e0397b0f8c25 not found: ID does not exist" Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.324998 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.334128 4558 generic.go:334] "Generic (PLEG): container finished" podID="113d7999-28c3-4738-aa99-bd09c1880299" containerID="d981d2b9fdcc50a6990df32e78be80ae43eebfa1b705595c1292a8f9a9e81ae5" exitCode=143 Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.335542 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-keystone-listener-65b4ff8654-bj52d" event={"ID":"113d7999-28c3-4738-aa99-bd09c1880299","Type":"ContainerDied","Data":"d981d2b9fdcc50a6990df32e78be80ae43eebfa1b705595c1292a8f9a9e81ae5"} Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.348490 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" podStartSLOduration=3.348469471 podStartE2EDuration="3.348469471s" podCreationTimestamp="2026-01-20 17:32:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:32:51.330444779 +0000 UTC m=+3065.090782746" watchObservedRunningTime="2026-01-20 17:32:51.348469471 +0000 UTC m=+3065.108807438" Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.370719 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/623ea472-e0f5-48c1-a621-89fe343686ff-config-data" (OuterVolumeSpecName: "config-data") pod "623ea472-e0f5-48c1-a621-89fe343686ff" (UID: "623ea472-e0f5-48c1-a621-89fe343686ff"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.377417 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a336b03d-9969-41de-9f19-f3479f76a33d-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"a336b03d-9969-41de-9f19-f3479f76a33d\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.377497 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lzmw4\" (UniqueName: \"kubernetes.io/projected/a336b03d-9969-41de-9f19-f3479f76a33d-kube-api-access-lzmw4\") pod \"nova-scheduler-0\" (UID: \"a336b03d-9969-41de-9f19-f3479f76a33d\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.377745 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a336b03d-9969-41de-9f19-f3479f76a33d-config-data\") pod \"nova-scheduler-0\" (UID: \"a336b03d-9969-41de-9f19-f3479f76a33d\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.377977 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aa7956fe-8f61-4e0d-86c1-716e300c4059-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.377993 4558 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/aa7956fe-8f61-4e0d-86c1-716e300c4059-fernet-keys\") on node \"crc\" DevicePath \"\"" Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.378005 4558 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/aa7956fe-8f61-4e0d-86c1-716e300c4059-credential-keys\") on node \"crc\" DevicePath \"\"" Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.378035 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5p6l9\" (UniqueName: \"kubernetes.io/projected/623ea472-e0f5-48c1-a621-89fe343686ff-kube-api-access-5p6l9\") on node \"crc\" DevicePath \"\"" Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.378048 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g62sk\" (UniqueName: \"kubernetes.io/projected/aa7956fe-8f61-4e0d-86c1-716e300c4059-kube-api-access-g62sk\") on node \"crc\" DevicePath \"\"" Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.378058 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/623ea472-e0f5-48c1-a621-89fe343686ff-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.378067 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/623ea472-e0f5-48c1-a621-89fe343686ff-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.386258 4558 generic.go:334] "Generic (PLEG): container finished" podID="fbba86e7-87c6-4350-8bbd-565ba9abfe68" containerID="c03486a35df252536c36f85a6561b27a72947105aeca886fbecad26ee9903d17" exitCode=143 Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.386405 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-worker-5bb9c47bc7-ct6jf" event={"ID":"fbba86e7-87c6-4350-8bbd-565ba9abfe68","Type":"ContainerDied","Data":"c03486a35df252536c36f85a6561b27a72947105aeca886fbecad26ee9903d17"} Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.392612 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aa7956fe-8f61-4e0d-86c1-716e300c4059-config-data" (OuterVolumeSpecName: "config-data") pod "aa7956fe-8f61-4e0d-86c1-716e300c4059" (UID: "aa7956fe-8f61-4e0d-86c1-716e300c4059"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.396657 4558 generic.go:334] "Generic (PLEG): container finished" podID="aa7956fe-8f61-4e0d-86c1-716e300c4059" containerID="45e428dbaa6ce016259fe3e88fa43969eb40f16dbbb8bdf480289cfc28c7be0e" exitCode=0 Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.396752 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-56458f747-wvqtq" event={"ID":"aa7956fe-8f61-4e0d-86c1-716e300c4059","Type":"ContainerDied","Data":"45e428dbaa6ce016259fe3e88fa43969eb40f16dbbb8bdf480289cfc28c7be0e"} Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.396781 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-56458f747-wvqtq" event={"ID":"aa7956fe-8f61-4e0d-86c1-716e300c4059","Type":"ContainerDied","Data":"b8533bb52b8311cc6e8d8471c0e63b718e9477ef334c429e7569596648ef2ba8"} Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.396810 4558 scope.go:117] "RemoveContainer" containerID="45e428dbaa6ce016259fe3e88fa43969eb40f16dbbb8bdf480289cfc28c7be0e" Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.397031 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-56458f747-wvqtq" Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.398662 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.408795 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-7fb744b5b7-vj8fh" event={"ID":"8516d676-2fa0-47c4-b81a-254ec50efd52","Type":"ContainerStarted","Data":"5f4eff277fd4cf9e126dcb473d4bc2082809a8fcea976047692503f0bb42d441"} Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.411400 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/keystone-7fb744b5b7-vj8fh" Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.435476 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/623ea472-e0f5-48c1-a621-89fe343686ff-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "623ea472-e0f5-48c1-a621-89fe343686ff" (UID: "623ea472-e0f5-48c1-a621-89fe343686ff"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.440516 4558 generic.go:334] "Generic (PLEG): container finished" podID="63df8460-ab49-4787-92a7-54a14c0179ca" containerID="226374bee93280652306e72191bcd834be88cb52e519757b561baa0a32c4b976" exitCode=0 Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.440547 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"63df8460-ab49-4787-92a7-54a14c0179ca","Type":"ContainerDied","Data":"226374bee93280652306e72191bcd834be88cb52e519757b561baa0a32c4b976"} Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.451661 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.451909 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aa7956fe-8f61-4e0d-86c1-716e300c4059-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "aa7956fe-8f61-4e0d-86c1-716e300c4059" (UID: "aa7956fe-8f61-4e0d-86c1-716e300c4059"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.472308 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-5c6488b48b-4sj7k" event={"ID":"48ebccca-cc18-415f-b6f8-f301f99e5d3a","Type":"ContainerStarted","Data":"38c6eff5636dfec3724781cb0cc82717214fdcbd1a3eb92d714d5934aa1b6128"} Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.472381 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-5c6488b48b-4sj7k" event={"ID":"48ebccca-cc18-415f-b6f8-f301f99e5d3a","Type":"ContainerStarted","Data":"399e63aaadfe0fa5502fd43a62acd0f3a44179ff829093b2b62e55e74b7ceac6"} Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.474379 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/neutron-5c6488b48b-4sj7k" Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.483526 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a336b03d-9969-41de-9f19-f3479f76a33d-config-data\") pod \"nova-scheduler-0\" (UID: \"a336b03d-9969-41de-9f19-f3479f76a33d\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.484197 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a336b03d-9969-41de-9f19-f3479f76a33d-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"a336b03d-9969-41de-9f19-f3479f76a33d\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.491859 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lzmw4\" (UniqueName: \"kubernetes.io/projected/a336b03d-9969-41de-9f19-f3479f76a33d-kube-api-access-lzmw4\") pod \"nova-scheduler-0\" (UID: \"a336b03d-9969-41de-9f19-f3479f76a33d\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.491973 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa7956fe-8f61-4e0d-86c1-716e300c4059-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.491990 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/623ea472-e0f5-48c1-a621-89fe343686ff-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.492003 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa7956fe-8f61-4e0d-86c1-716e300c4059-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.492264 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/memcached-0"] Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.496354 4558 scope.go:117] "RemoveContainer" containerID="45e428dbaa6ce016259fe3e88fa43969eb40f16dbbb8bdf480289cfc28c7be0e" Jan 20 17:32:51 crc kubenswrapper[4558]: E0120 17:32:51.497464 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"45e428dbaa6ce016259fe3e88fa43969eb40f16dbbb8bdf480289cfc28c7be0e\": container with ID starting with 45e428dbaa6ce016259fe3e88fa43969eb40f16dbbb8bdf480289cfc28c7be0e not found: ID does not exist" containerID="45e428dbaa6ce016259fe3e88fa43969eb40f16dbbb8bdf480289cfc28c7be0e" Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.497498 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"45e428dbaa6ce016259fe3e88fa43969eb40f16dbbb8bdf480289cfc28c7be0e"} err="failed to get container status \"45e428dbaa6ce016259fe3e88fa43969eb40f16dbbb8bdf480289cfc28c7be0e\": rpc error: code = NotFound desc = could not find container \"45e428dbaa6ce016259fe3e88fa43969eb40f16dbbb8bdf480289cfc28c7be0e\": container with ID starting with 45e428dbaa6ce016259fe3e88fa43969eb40f16dbbb8bdf480289cfc28c7be0e not found: ID does not exist" Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.505341 4558 generic.go:334] "Generic (PLEG): container finished" podID="623ea472-e0f5-48c1-a621-89fe343686ff" containerID="8383f65df719eb3597f000c97c3244e2dda5aeac1cb0ef54093fbcf77af4f6b2" exitCode=0 Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.505401 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"623ea472-e0f5-48c1-a621-89fe343686ff","Type":"ContainerDied","Data":"8383f65df719eb3597f000c97c3244e2dda5aeac1cb0ef54093fbcf77af4f6b2"} Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.505429 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"623ea472-e0f5-48c1-a621-89fe343686ff","Type":"ContainerDied","Data":"0c2567480a3eac2f76add9cadd1120f37e159ed68ab470d3e65bd16322a30eae"} Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.505448 4558 scope.go:117] "RemoveContainer" containerID="8383f65df719eb3597f000c97c3244e2dda5aeac1cb0ef54093fbcf77af4f6b2" Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.505551 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.528956 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a336b03d-9969-41de-9f19-f3479f76a33d-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"a336b03d-9969-41de-9f19-f3479f76a33d\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.537811 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lzmw4\" (UniqueName: \"kubernetes.io/projected/a336b03d-9969-41de-9f19-f3479f76a33d-kube-api-access-lzmw4\") pod \"nova-scheduler-0\" (UID: \"a336b03d-9969-41de-9f19-f3479f76a33d\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.572142 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a336b03d-9969-41de-9f19-f3479f76a33d-config-data\") pod \"nova-scheduler-0\" (UID: \"a336b03d-9969-41de-9f19-f3479f76a33d\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.592292 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/keystone-7fb744b5b7-vj8fh" podStartSLOduration=3.592272939 podStartE2EDuration="3.592272939s" podCreationTimestamp="2026-01-20 17:32:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:32:51.467342932 +0000 UTC m=+3065.227680899" watchObservedRunningTime="2026-01-20 17:32:51.592272939 +0000 UTC m=+3065.352610906" Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.594838 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/63df8460-ab49-4787-92a7-54a14c0179ca-combined-ca-bundle\") pod \"63df8460-ab49-4787-92a7-54a14c0179ca\" (UID: \"63df8460-ab49-4787-92a7-54a14c0179ca\") " Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.594908 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/63df8460-ab49-4787-92a7-54a14c0179ca-config-data\") pod \"63df8460-ab49-4787-92a7-54a14c0179ca\" (UID: \"63df8460-ab49-4787-92a7-54a14c0179ca\") " Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.595018 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/63df8460-ab49-4787-92a7-54a14c0179ca-scripts\") pod \"63df8460-ab49-4787-92a7-54a14c0179ca\" (UID: \"63df8460-ab49-4787-92a7-54a14c0179ca\") " Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.595048 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eb4881e5-cce6-4423-a9f4-d93f96a2ccb1-combined-ca-bundle\") pod \"eb4881e5-cce6-4423-a9f4-d93f96a2ccb1\" (UID: \"eb4881e5-cce6-4423-a9f4-d93f96a2ccb1\") " Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.595072 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/63df8460-ab49-4787-92a7-54a14c0179ca-config-data-custom\") pod \"63df8460-ab49-4787-92a7-54a14c0179ca\" (UID: \"63df8460-ab49-4787-92a7-54a14c0179ca\") " Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.595117 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l88ft\" (UniqueName: \"kubernetes.io/projected/63df8460-ab49-4787-92a7-54a14c0179ca-kube-api-access-l88ft\") pod \"63df8460-ab49-4787-92a7-54a14c0179ca\" (UID: \"63df8460-ab49-4787-92a7-54a14c0179ca\") " Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.595136 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/63df8460-ab49-4787-92a7-54a14c0179ca-etc-machine-id\") pod \"63df8460-ab49-4787-92a7-54a14c0179ca\" (UID: \"63df8460-ab49-4787-92a7-54a14c0179ca\") " Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.595180 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/63df8460-ab49-4787-92a7-54a14c0179ca-logs\") pod \"63df8460-ab49-4787-92a7-54a14c0179ca\" (UID: \"63df8460-ab49-4787-92a7-54a14c0179ca\") " Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.595195 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/eb4881e5-cce6-4423-a9f4-d93f96a2ccb1-logs\") pod \"eb4881e5-cce6-4423-a9f4-d93f96a2ccb1\" (UID: \"eb4881e5-cce6-4423-a9f4-d93f96a2ccb1\") " Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.595244 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tp227\" (UniqueName: \"kubernetes.io/projected/eb4881e5-cce6-4423-a9f4-d93f96a2ccb1-kube-api-access-tp227\") pod \"eb4881e5-cce6-4423-a9f4-d93f96a2ccb1\" (UID: \"eb4881e5-cce6-4423-a9f4-d93f96a2ccb1\") " Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.595399 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eb4881e5-cce6-4423-a9f4-d93f96a2ccb1-config-data\") pod \"eb4881e5-cce6-4423-a9f4-d93f96a2ccb1\" (UID: \"eb4881e5-cce6-4423-a9f4-d93f96a2ccb1\") " Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.597254 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/63df8460-ab49-4787-92a7-54a14c0179ca-logs" (OuterVolumeSpecName: "logs") pod "63df8460-ab49-4787-92a7-54a14c0179ca" (UID: "63df8460-ab49-4787-92a7-54a14c0179ca"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.597521 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/63df8460-ab49-4787-92a7-54a14c0179ca-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "63df8460-ab49-4787-92a7-54a14c0179ca" (UID: "63df8460-ab49-4787-92a7-54a14c0179ca"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.599528 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/eb4881e5-cce6-4423-a9f4-d93f96a2ccb1-logs" (OuterVolumeSpecName: "logs") pod "eb4881e5-cce6-4423-a9f4-d93f96a2ccb1" (UID: "eb4881e5-cce6-4423-a9f4-d93f96a2ccb1"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.606395 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/neutron-5c6488b48b-4sj7k" podStartSLOduration=3.606381988 podStartE2EDuration="3.606381988s" podCreationTimestamp="2026-01-20 17:32:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:32:51.548501826 +0000 UTC m=+3065.308839793" watchObservedRunningTime="2026-01-20 17:32:51.606381988 +0000 UTC m=+3065.366719956" Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.616355 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/63df8460-ab49-4787-92a7-54a14c0179ca-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "63df8460-ab49-4787-92a7-54a14c0179ca" (UID: "63df8460-ab49-4787-92a7-54a14c0179ca"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.616426 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/63df8460-ab49-4787-92a7-54a14c0179ca-scripts" (OuterVolumeSpecName: "scripts") pod "63df8460-ab49-4787-92a7-54a14c0179ca" (UID: "63df8460-ab49-4787-92a7-54a14c0179ca"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.616555 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/63df8460-ab49-4787-92a7-54a14c0179ca-kube-api-access-l88ft" (OuterVolumeSpecName: "kube-api-access-l88ft") pod "63df8460-ab49-4787-92a7-54a14c0179ca" (UID: "63df8460-ab49-4787-92a7-54a14c0179ca"). InnerVolumeSpecName "kube-api-access-l88ft". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.620327 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eb4881e5-cce6-4423-a9f4-d93f96a2ccb1-kube-api-access-tp227" (OuterVolumeSpecName: "kube-api-access-tp227") pod "eb4881e5-cce6-4423-a9f4-d93f96a2ccb1" (UID: "eb4881e5-cce6-4423-a9f4-d93f96a2ccb1"). InnerVolumeSpecName "kube-api-access-tp227". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:32:51 crc kubenswrapper[4558]: E0120 17:32:51.621350 4558 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7aa6c24a_66a5_4b11_96a1_4d14001d1f5c.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7aa6c24a_66a5_4b11_96a1_4d14001d1f5c.slice/crio-58da02281d2c517c5fb8d6fde458e29e6833a321349b658a1345d5ceef2a99c0\": RecentStats: unable to find data in memory cache]" Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.634432 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eb4881e5-cce6-4423-a9f4-d93f96a2ccb1-config-data" (OuterVolumeSpecName: "config-data") pod "eb4881e5-cce6-4423-a9f4-d93f96a2ccb1" (UID: "eb4881e5-cce6-4423-a9f4-d93f96a2ccb1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.652498 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eb4881e5-cce6-4423-a9f4-d93f96a2ccb1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "eb4881e5-cce6-4423-a9f4-d93f96a2ccb1" (UID: "eb4881e5-cce6-4423-a9f4-d93f96a2ccb1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.658307 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/63df8460-ab49-4787-92a7-54a14c0179ca-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "63df8460-ab49-4787-92a7-54a14c0179ca" (UID: "63df8460-ab49-4787-92a7-54a14c0179ca"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.671329 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.680189 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/63df8460-ab49-4787-92a7-54a14c0179ca-config-data" (OuterVolumeSpecName: "config-data") pod "63df8460-ab49-4787-92a7-54a14c0179ca" (UID: "63df8460-ab49-4787-92a7-54a14c0179ca"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.699065 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eb4881e5-cce6-4423-a9f4-d93f96a2ccb1-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.699093 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/63df8460-ab49-4787-92a7-54a14c0179ca-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.699104 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/63df8460-ab49-4787-92a7-54a14c0179ca-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.699113 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/63df8460-ab49-4787-92a7-54a14c0179ca-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.699123 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eb4881e5-cce6-4423-a9f4-d93f96a2ccb1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.699132 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/63df8460-ab49-4787-92a7-54a14c0179ca-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.699141 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l88ft\" (UniqueName: \"kubernetes.io/projected/63df8460-ab49-4787-92a7-54a14c0179ca-kube-api-access-l88ft\") on node \"crc\" DevicePath \"\"" Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.699150 4558 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/63df8460-ab49-4787-92a7-54a14c0179ca-etc-machine-id\") on node \"crc\" DevicePath \"\"" Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.699174 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/63df8460-ab49-4787-92a7-54a14c0179ca-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.699182 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/eb4881e5-cce6-4423-a9f4-d93f96a2ccb1-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.699192 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tp227\" (UniqueName: \"kubernetes.io/projected/eb4881e5-cce6-4423-a9f4-d93f96a2ccb1-kube-api-access-tp227\") on node \"crc\" DevicePath \"\"" Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.768955 4558 scope.go:117] "RemoveContainer" containerID="90a23fb16c326ed369750450259a15b6eb7aa5fc9dc6005c29c9b1e37c002a27" Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.787105 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.796891 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.804392 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:32:51 crc kubenswrapper[4558]: E0120 17:32:51.805094 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="63df8460-ab49-4787-92a7-54a14c0179ca" containerName="cinder-api-log" Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.805110 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="63df8460-ab49-4787-92a7-54a14c0179ca" containerName="cinder-api-log" Jan 20 17:32:51 crc kubenswrapper[4558]: E0120 17:32:51.805144 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="63df8460-ab49-4787-92a7-54a14c0179ca" containerName="cinder-api" Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.805150 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="63df8460-ab49-4787-92a7-54a14c0179ca" containerName="cinder-api" Jan 20 17:32:51 crc kubenswrapper[4558]: E0120 17:32:51.805189 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eb4881e5-cce6-4423-a9f4-d93f96a2ccb1" containerName="nova-metadata-metadata" Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.805196 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="eb4881e5-cce6-4423-a9f4-d93f96a2ccb1" containerName="nova-metadata-metadata" Jan 20 17:32:51 crc kubenswrapper[4558]: E0120 17:32:51.805225 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eb4881e5-cce6-4423-a9f4-d93f96a2ccb1" containerName="nova-metadata-log" Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.805232 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="eb4881e5-cce6-4423-a9f4-d93f96a2ccb1" containerName="nova-metadata-log" Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.805403 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="63df8460-ab49-4787-92a7-54a14c0179ca" containerName="cinder-api" Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.805417 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="eb4881e5-cce6-4423-a9f4-d93f96a2ccb1" containerName="nova-metadata-metadata" Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.805429 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="eb4881e5-cce6-4423-a9f4-d93f96a2ccb1" containerName="nova-metadata-log" Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.805443 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="63df8460-ab49-4787-92a7-54a14c0179ca" containerName="cinder-api-log" Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.806600 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.809311 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-api-config-data" Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.821618 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-56458f747-wvqtq"] Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.825825 4558 scope.go:117] "RemoveContainer" containerID="8383f65df719eb3597f000c97c3244e2dda5aeac1cb0ef54093fbcf77af4f6b2" Jan 20 17:32:51 crc kubenswrapper[4558]: E0120 17:32:51.826155 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8383f65df719eb3597f000c97c3244e2dda5aeac1cb0ef54093fbcf77af4f6b2\": container with ID starting with 8383f65df719eb3597f000c97c3244e2dda5aeac1cb0ef54093fbcf77af4f6b2 not found: ID does not exist" containerID="8383f65df719eb3597f000c97c3244e2dda5aeac1cb0ef54093fbcf77af4f6b2" Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.826203 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8383f65df719eb3597f000c97c3244e2dda5aeac1cb0ef54093fbcf77af4f6b2"} err="failed to get container status \"8383f65df719eb3597f000c97c3244e2dda5aeac1cb0ef54093fbcf77af4f6b2\": rpc error: code = NotFound desc = could not find container \"8383f65df719eb3597f000c97c3244e2dda5aeac1cb0ef54093fbcf77af4f6b2\": container with ID starting with 8383f65df719eb3597f000c97c3244e2dda5aeac1cb0ef54093fbcf77af4f6b2 not found: ID does not exist" Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.826224 4558 scope.go:117] "RemoveContainer" containerID="90a23fb16c326ed369750450259a15b6eb7aa5fc9dc6005c29c9b1e37c002a27" Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.827719 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/keystone-56458f747-wvqtq"] Jan 20 17:32:51 crc kubenswrapper[4558]: E0120 17:32:51.830876 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"90a23fb16c326ed369750450259a15b6eb7aa5fc9dc6005c29c9b1e37c002a27\": container with ID starting with 90a23fb16c326ed369750450259a15b6eb7aa5fc9dc6005c29c9b1e37c002a27 not found: ID does not exist" containerID="90a23fb16c326ed369750450259a15b6eb7aa5fc9dc6005c29c9b1e37c002a27" Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.830899 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"90a23fb16c326ed369750450259a15b6eb7aa5fc9dc6005c29c9b1e37c002a27"} err="failed to get container status \"90a23fb16c326ed369750450259a15b6eb7aa5fc9dc6005c29c9b1e37c002a27\": rpc error: code = NotFound desc = could not find container \"90a23fb16c326ed369750450259a15b6eb7aa5fc9dc6005c29c9b1e37c002a27\": container with ID starting with 90a23fb16c326ed369750450259a15b6eb7aa5fc9dc6005c29c9b1e37c002a27 not found: ID does not exist" Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.833090 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.839500 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-0"] Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.903566 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6a66de52-5f9c-46c6-aa54-8a0ba3288230-config-data\") pod \"nova-api-0\" (UID: \"6a66de52-5f9c-46c6-aa54-8a0ba3288230\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.903642 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6a66de52-5f9c-46c6-aa54-8a0ba3288230-logs\") pod \"nova-api-0\" (UID: \"6a66de52-5f9c-46c6-aa54-8a0ba3288230\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.903686 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pzlfz\" (UniqueName: \"kubernetes.io/projected/6a66de52-5f9c-46c6-aa54-8a0ba3288230-kube-api-access-pzlfz\") pod \"nova-api-0\" (UID: \"6a66de52-5f9c-46c6-aa54-8a0ba3288230\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:32:51 crc kubenswrapper[4558]: I0120 17:32:51.903740 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6a66de52-5f9c-46c6-aa54-8a0ba3288230-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"6a66de52-5f9c-46c6-aa54-8a0ba3288230\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:32:52 crc kubenswrapper[4558]: I0120 17:32:52.005778 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pzlfz\" (UniqueName: \"kubernetes.io/projected/6a66de52-5f9c-46c6-aa54-8a0ba3288230-kube-api-access-pzlfz\") pod \"nova-api-0\" (UID: \"6a66de52-5f9c-46c6-aa54-8a0ba3288230\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:32:52 crc kubenswrapper[4558]: I0120 17:32:52.005864 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6a66de52-5f9c-46c6-aa54-8a0ba3288230-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"6a66de52-5f9c-46c6-aa54-8a0ba3288230\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:32:52 crc kubenswrapper[4558]: I0120 17:32:52.006033 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6a66de52-5f9c-46c6-aa54-8a0ba3288230-config-data\") pod \"nova-api-0\" (UID: \"6a66de52-5f9c-46c6-aa54-8a0ba3288230\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:32:52 crc kubenswrapper[4558]: I0120 17:32:52.006094 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6a66de52-5f9c-46c6-aa54-8a0ba3288230-logs\") pod \"nova-api-0\" (UID: \"6a66de52-5f9c-46c6-aa54-8a0ba3288230\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:32:52 crc kubenswrapper[4558]: I0120 17:32:52.006746 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6a66de52-5f9c-46c6-aa54-8a0ba3288230-logs\") pod \"nova-api-0\" (UID: \"6a66de52-5f9c-46c6-aa54-8a0ba3288230\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:32:52 crc kubenswrapper[4558]: I0120 17:32:52.012686 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6a66de52-5f9c-46c6-aa54-8a0ba3288230-config-data\") pod \"nova-api-0\" (UID: \"6a66de52-5f9c-46c6-aa54-8a0ba3288230\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:32:52 crc kubenswrapper[4558]: I0120 17:32:52.014111 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6a66de52-5f9c-46c6-aa54-8a0ba3288230-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"6a66de52-5f9c-46c6-aa54-8a0ba3288230\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:32:52 crc kubenswrapper[4558]: I0120 17:32:52.025767 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pzlfz\" (UniqueName: \"kubernetes.io/projected/6a66de52-5f9c-46c6-aa54-8a0ba3288230-kube-api-access-pzlfz\") pod \"nova-api-0\" (UID: \"6a66de52-5f9c-46c6-aa54-8a0ba3288230\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:32:52 crc kubenswrapper[4558]: I0120 17:32:52.182311 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:32:52 crc kubenswrapper[4558]: I0120 17:32:52.330775 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" podUID="7d6b9532-22b5-41c9-8b15-83a9c515c52d" containerName="nova-cell1-novncproxy-novncproxy" probeResult="failure" output="Get \"http://10.217.1.194:6080/vnc_lite.html\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 20 17:32:52 crc kubenswrapper[4558]: I0120 17:32:52.337557 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:32:52 crc kubenswrapper[4558]: I0120 17:32:52.368075 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:32:52 crc kubenswrapper[4558]: W0120 17:32:52.395371 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda336b03d_9969_41de_9f19_f3479f76a33d.slice/crio-d3cfa6c9ac5163efbd1033e4185e44f56fb8dcb7c6a8af9fb92422829151f9ea WatchSource:0}: Error finding container d3cfa6c9ac5163efbd1033e4185e44f56fb8dcb7c6a8af9fb92422829151f9ea: Status 404 returned error can't find the container with id d3cfa6c9ac5163efbd1033e4185e44f56fb8dcb7c6a8af9fb92422829151f9ea Jan 20 17:32:52 crc kubenswrapper[4558]: E0120 17:32:52.445264 4558 upgradeaware.go:441] Error proxying data from backend to client: writeto tcp 192.168.25.8:56732->192.168.25.8:43883: read tcp 192.168.25.8:56732->192.168.25.8:43883: read: connection reset by peer Jan 20 17:32:52 crc kubenswrapper[4558]: I0120 17:32:52.524828 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aaaaf2d3-8f74-4ce7-8407-2f0b0bece070-combined-ca-bundle\") pod \"aaaaf2d3-8f74-4ce7-8407-2f0b0bece070\" (UID: \"aaaaf2d3-8f74-4ce7-8407-2f0b0bece070\") " Jan 20 17:32:52 crc kubenswrapper[4558]: I0120 17:32:52.524902 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aaaaf2d3-8f74-4ce7-8407-2f0b0bece070-scripts\") pod \"aaaaf2d3-8f74-4ce7-8407-2f0b0bece070\" (UID: \"aaaaf2d3-8f74-4ce7-8407-2f0b0bece070\") " Jan 20 17:32:52 crc kubenswrapper[4558]: I0120 17:32:52.524932 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rq4md\" (UniqueName: \"kubernetes.io/projected/aaaaf2d3-8f74-4ce7-8407-2f0b0bece070-kube-api-access-rq4md\") pod \"aaaaf2d3-8f74-4ce7-8407-2f0b0bece070\" (UID: \"aaaaf2d3-8f74-4ce7-8407-2f0b0bece070\") " Jan 20 17:32:52 crc kubenswrapper[4558]: I0120 17:32:52.524963 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") pod \"aaaaf2d3-8f74-4ce7-8407-2f0b0bece070\" (UID: \"aaaaf2d3-8f74-4ce7-8407-2f0b0bece070\") " Jan 20 17:32:52 crc kubenswrapper[4558]: I0120 17:32:52.525011 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/aaaaf2d3-8f74-4ce7-8407-2f0b0bece070-logs\") pod \"aaaaf2d3-8f74-4ce7-8407-2f0b0bece070\" (UID: \"aaaaf2d3-8f74-4ce7-8407-2f0b0bece070\") " Jan 20 17:32:52 crc kubenswrapper[4558]: I0120 17:32:52.525121 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aaaaf2d3-8f74-4ce7-8407-2f0b0bece070-config-data\") pod \"aaaaf2d3-8f74-4ce7-8407-2f0b0bece070\" (UID: \"aaaaf2d3-8f74-4ce7-8407-2f0b0bece070\") " Jan 20 17:32:52 crc kubenswrapper[4558]: I0120 17:32:52.525178 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/aaaaf2d3-8f74-4ce7-8407-2f0b0bece070-httpd-run\") pod \"aaaaf2d3-8f74-4ce7-8407-2f0b0bece070\" (UID: \"aaaaf2d3-8f74-4ce7-8407-2f0b0bece070\") " Jan 20 17:32:52 crc kubenswrapper[4558]: I0120 17:32:52.525934 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/aaaaf2d3-8f74-4ce7-8407-2f0b0bece070-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "aaaaf2d3-8f74-4ce7-8407-2f0b0bece070" (UID: "aaaaf2d3-8f74-4ce7-8407-2f0b0bece070"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:32:52 crc kubenswrapper[4558]: I0120 17:32:52.531424 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/aaaaf2d3-8f74-4ce7-8407-2f0b0bece070-logs" (OuterVolumeSpecName: "logs") pod "aaaaf2d3-8f74-4ce7-8407-2f0b0bece070" (UID: "aaaaf2d3-8f74-4ce7-8407-2f0b0bece070"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:32:52 crc kubenswrapper[4558]: I0120 17:32:52.541974 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage15-crc" (OuterVolumeSpecName: "glance") pod "aaaaf2d3-8f74-4ce7-8407-2f0b0bece070" (UID: "aaaaf2d3-8f74-4ce7-8407-2f0b0bece070"). InnerVolumeSpecName "local-storage15-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:32:52 crc kubenswrapper[4558]: I0120 17:32:52.554565 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aaaaf2d3-8f74-4ce7-8407-2f0b0bece070-scripts" (OuterVolumeSpecName: "scripts") pod "aaaaf2d3-8f74-4ce7-8407-2f0b0bece070" (UID: "aaaaf2d3-8f74-4ce7-8407-2f0b0bece070"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:32:52 crc kubenswrapper[4558]: I0120 17:32:52.557679 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"63df8460-ab49-4787-92a7-54a14c0179ca","Type":"ContainerDied","Data":"c7aed5c14d46c61c407b439488d836d230e8284647781ff979fb14718686f4f9"} Jan 20 17:32:52 crc kubenswrapper[4558]: I0120 17:32:52.557733 4558 scope.go:117] "RemoveContainer" containerID="226374bee93280652306e72191bcd834be88cb52e519757b561baa0a32c4b976" Jan 20 17:32:52 crc kubenswrapper[4558]: I0120 17:32:52.557860 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:32:52 crc kubenswrapper[4558]: I0120 17:32:52.566334 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aaaaf2d3-8f74-4ce7-8407-2f0b0bece070-kube-api-access-rq4md" (OuterVolumeSpecName: "kube-api-access-rq4md") pod "aaaaf2d3-8f74-4ce7-8407-2f0b0bece070" (UID: "aaaaf2d3-8f74-4ce7-8407-2f0b0bece070"). InnerVolumeSpecName "kube-api-access-rq4md". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:32:52 crc kubenswrapper[4558]: I0120 17:32:52.581931 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="623ea472-e0f5-48c1-a621-89fe343686ff" path="/var/lib/kubelet/pods/623ea472-e0f5-48c1-a621-89fe343686ff/volumes" Jan 20 17:32:52 crc kubenswrapper[4558]: I0120 17:32:52.583510 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7aa6c24a-66a5-4b11-96a1-4d14001d1f5c" path="/var/lib/kubelet/pods/7aa6c24a-66a5-4b11-96a1-4d14001d1f5c/volumes" Jan 20 17:32:52 crc kubenswrapper[4558]: I0120 17:32:52.584120 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a7f2bbf6-acbc-487a-a2ea-08e49a055eb5" path="/var/lib/kubelet/pods/a7f2bbf6-acbc-487a-a2ea-08e49a055eb5/volumes" Jan 20 17:32:52 crc kubenswrapper[4558]: I0120 17:32:52.587622 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:32:52 crc kubenswrapper[4558]: I0120 17:32:52.589491 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aa7956fe-8f61-4e0d-86c1-716e300c4059" path="/var/lib/kubelet/pods/aa7956fe-8f61-4e0d-86c1-716e300c4059/volumes" Jan 20 17:32:52 crc kubenswrapper[4558]: I0120 17:32:52.604312 4558 generic.go:334] "Generic (PLEG): container finished" podID="59e7ef92-3d74-4334-ac19-671eb3199d0b" containerID="40ef3c5a952862f3e1bf0e9c9f5e2c665b77d0d4387f66b4456d7a0b64be3cbf" exitCode=0 Jan 20 17:32:52 crc kubenswrapper[4558]: I0120 17:32:52.628069 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aaaaf2d3-8f74-4ce7-8407-2f0b0bece070-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:32:52 crc kubenswrapper[4558]: I0120 17:32:52.628235 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rq4md\" (UniqueName: \"kubernetes.io/projected/aaaaf2d3-8f74-4ce7-8407-2f0b0bece070-kube-api-access-rq4md\") on node \"crc\" DevicePath \"\"" Jan 20 17:32:52 crc kubenswrapper[4558]: I0120 17:32:52.628326 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage15-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") on node \"crc\" " Jan 20 17:32:52 crc kubenswrapper[4558]: I0120 17:32:52.628398 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/aaaaf2d3-8f74-4ce7-8407-2f0b0bece070-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:32:52 crc kubenswrapper[4558]: I0120 17:32:52.628553 4558 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/aaaaf2d3-8f74-4ce7-8407-2f0b0bece070-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 20 17:32:52 crc kubenswrapper[4558]: I0120 17:32:52.636442 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/rabbitmq-server-0" podUID="35b2371f-57f2-4728-b32f-1ba587b4532e" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.1.125:5672: connect: connection refused" Jan 20 17:32:52 crc kubenswrapper[4558]: I0120 17:32:52.639718 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aaaaf2d3-8f74-4ce7-8407-2f0b0bece070-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "aaaaf2d3-8f74-4ce7-8407-2f0b0bece070" (UID: "aaaaf2d3-8f74-4ce7-8407-2f0b0bece070"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:32:52 crc kubenswrapper[4558]: I0120 17:32:52.642670 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aaaaf2d3-8f74-4ce7-8407-2f0b0bece070-config-data" (OuterVolumeSpecName: "config-data") pod "aaaaf2d3-8f74-4ce7-8407-2f0b0bece070" (UID: "aaaaf2d3-8f74-4ce7-8407-2f0b0bece070"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:32:52 crc kubenswrapper[4558]: I0120 17:32:52.649066 4558 generic.go:334] "Generic (PLEG): container finished" podID="aaaaf2d3-8f74-4ce7-8407-2f0b0bece070" containerID="b5f54a9d58e26e3dd566d5f5f71a6cabd02248b3e9fd086fccf6af2e071db403" exitCode=0 Jan 20 17:32:52 crc kubenswrapper[4558]: I0120 17:32:52.649224 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:32:52 crc kubenswrapper[4558]: I0120 17:32:52.652734 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell1-conductor-0" podStartSLOduration=2.652719194 podStartE2EDuration="2.652719194s" podCreationTimestamp="2026-01-20 17:32:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:32:52.642988647 +0000 UTC m=+3066.403326615" watchObservedRunningTime="2026-01-20 17:32:52.652719194 +0000 UTC m=+3066.413057161" Jan 20 17:32:52 crc kubenswrapper[4558]: I0120 17:32:52.663541 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage15-crc" (UniqueName: "kubernetes.io/local-volume/local-storage15-crc") on node "crc" Jan 20 17:32:52 crc kubenswrapper[4558]: I0120 17:32:52.686888 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/memcached-0" podStartSLOduration=3.686868617 podStartE2EDuration="3.686868617s" podCreationTimestamp="2026-01-20 17:32:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:32:52.674483128 +0000 UTC m=+3066.434821094" watchObservedRunningTime="2026-01-20 17:32:52.686868617 +0000 UTC m=+3066.447206583" Jan 20 17:32:52 crc kubenswrapper[4558]: I0120 17:32:52.696093 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:32:52 crc kubenswrapper[4558]: I0120 17:32:52.696132 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:32:52 crc kubenswrapper[4558]: I0120 17:32:52.696145 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/memcached-0" Jan 20 17:32:52 crc kubenswrapper[4558]: I0120 17:32:52.696157 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"a336b03d-9969-41de-9f19-f3479f76a33d","Type":"ContainerStarted","Data":"d3cfa6c9ac5163efbd1033e4185e44f56fb8dcb7c6a8af9fb92422829151f9ea"} Jan 20 17:32:52 crc kubenswrapper[4558]: I0120 17:32:52.696202 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"eb4881e5-cce6-4423-a9f4-d93f96a2ccb1","Type":"ContainerDied","Data":"9298cac7d4049f7510589ee498be2017851e9b87876aa18bbe6002445438312d"} Jan 20 17:32:52 crc kubenswrapper[4558]: I0120 17:32:52.696223 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"59e7ef92-3d74-4334-ac19-671eb3199d0b","Type":"ContainerDied","Data":"40ef3c5a952862f3e1bf0e9c9f5e2c665b77d0d4387f66b4456d7a0b64be3cbf"} Jan 20 17:32:52 crc kubenswrapper[4558]: I0120 17:32:52.696237 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-0" event={"ID":"c686e4c2-68d0-4999-9078-90c70927d9ae","Type":"ContainerStarted","Data":"46edde77e0df814c67d5b21fe72058f3eb4efd6acd4a7f68a737219cd8448b40"} Jan 20 17:32:52 crc kubenswrapper[4558]: I0120 17:32:52.696245 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-0" event={"ID":"c686e4c2-68d0-4999-9078-90c70927d9ae","Type":"ContainerStarted","Data":"e29581ea8179ca9452a837725910db234522125b44cce80de4ab2a0f44cb5ea0"} Jan 20 17:32:52 crc kubenswrapper[4558]: I0120 17:32:52.696254 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"aaaaf2d3-8f74-4ce7-8407-2f0b0bece070","Type":"ContainerDied","Data":"b5f54a9d58e26e3dd566d5f5f71a6cabd02248b3e9fd086fccf6af2e071db403"} Jan 20 17:32:52 crc kubenswrapper[4558]: I0120 17:32:52.696266 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"aaaaf2d3-8f74-4ce7-8407-2f0b0bece070","Type":"ContainerDied","Data":"fd97bcd155d8655d88e65b818cf9169181c3b48d6002d6b382f7268fcedceba0"} Jan 20 17:32:52 crc kubenswrapper[4558]: I0120 17:32:52.696274 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/memcached-0" event={"ID":"568f7066-e99f-45b2-aa41-28c808fb27b8","Type":"ContainerStarted","Data":"0b26ed4dfd437ec4fd0415ae7cb52ebc72347c26385495a0c7ac90e3a3686790"} Jan 20 17:32:52 crc kubenswrapper[4558]: I0120 17:32:52.696283 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/memcached-0" event={"ID":"568f7066-e99f-45b2-aa41-28c808fb27b8","Type":"ContainerStarted","Data":"8adabdd93232b90e30c1a677f57f38211dad7fec2224573f6fbfa669870559a9"} Jan 20 17:32:52 crc kubenswrapper[4558]: I0120 17:32:52.707337 4558 scope.go:117] "RemoveContainer" containerID="5f3ae803d524ce3cd6aaa39d2f91f748bd07e8465458e550a0da0000266138f6" Jan 20 17:32:52 crc kubenswrapper[4558]: I0120 17:32:52.719648 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:32:52 crc kubenswrapper[4558]: I0120 17:32:52.732217 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage15-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:32:52 crc kubenswrapper[4558]: I0120 17:32:52.732249 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aaaaf2d3-8f74-4ce7-8407-2f0b0bece070-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:32:52 crc kubenswrapper[4558]: I0120 17:32:52.732268 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aaaaf2d3-8f74-4ce7-8407-2f0b0bece070-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:32:52 crc kubenswrapper[4558]: I0120 17:32:52.739416 4558 scope.go:117] "RemoveContainer" containerID="c01c9738249c284d39a49949a1689663cc221dbc3ee6a65254441d2704e98b09" Jan 20 17:32:52 crc kubenswrapper[4558]: I0120 17:32:52.825098 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:32:52 crc kubenswrapper[4558]: I0120 17:32:52.836835 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/59e7ef92-3d74-4334-ac19-671eb3199d0b-combined-ca-bundle\") pod \"59e7ef92-3d74-4334-ac19-671eb3199d0b\" (UID: \"59e7ef92-3d74-4334-ac19-671eb3199d0b\") " Jan 20 17:32:52 crc kubenswrapper[4558]: I0120 17:32:52.836950 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8p4k8\" (UniqueName: \"kubernetes.io/projected/59e7ef92-3d74-4334-ac19-671eb3199d0b-kube-api-access-8p4k8\") pod \"59e7ef92-3d74-4334-ac19-671eb3199d0b\" (UID: \"59e7ef92-3d74-4334-ac19-671eb3199d0b\") " Jan 20 17:32:52 crc kubenswrapper[4558]: I0120 17:32:52.837011 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/59e7ef92-3d74-4334-ac19-671eb3199d0b-httpd-run\") pod \"59e7ef92-3d74-4334-ac19-671eb3199d0b\" (UID: \"59e7ef92-3d74-4334-ac19-671eb3199d0b\") " Jan 20 17:32:52 crc kubenswrapper[4558]: I0120 17:32:52.837028 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/59e7ef92-3d74-4334-ac19-671eb3199d0b-logs\") pod \"59e7ef92-3d74-4334-ac19-671eb3199d0b\" (UID: \"59e7ef92-3d74-4334-ac19-671eb3199d0b\") " Jan 20 17:32:52 crc kubenswrapper[4558]: I0120 17:32:52.837262 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage16-crc\") pod \"59e7ef92-3d74-4334-ac19-671eb3199d0b\" (UID: \"59e7ef92-3d74-4334-ac19-671eb3199d0b\") " Jan 20 17:32:52 crc kubenswrapper[4558]: I0120 17:32:52.838309 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/59e7ef92-3d74-4334-ac19-671eb3199d0b-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "59e7ef92-3d74-4334-ac19-671eb3199d0b" (UID: "59e7ef92-3d74-4334-ac19-671eb3199d0b"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:32:52 crc kubenswrapper[4558]: I0120 17:32:52.839100 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/59e7ef92-3d74-4334-ac19-671eb3199d0b-logs" (OuterVolumeSpecName: "logs") pod "59e7ef92-3d74-4334-ac19-671eb3199d0b" (UID: "59e7ef92-3d74-4334-ac19-671eb3199d0b"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:32:52 crc kubenswrapper[4558]: I0120 17:32:52.849274 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/59e7ef92-3d74-4334-ac19-671eb3199d0b-kube-api-access-8p4k8" (OuterVolumeSpecName: "kube-api-access-8p4k8") pod "59e7ef92-3d74-4334-ac19-671eb3199d0b" (UID: "59e7ef92-3d74-4334-ac19-671eb3199d0b"). InnerVolumeSpecName "kube-api-access-8p4k8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:32:52 crc kubenswrapper[4558]: I0120 17:32:52.850411 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/59e7ef92-3d74-4334-ac19-671eb3199d0b-config-data\") pod \"59e7ef92-3d74-4334-ac19-671eb3199d0b\" (UID: \"59e7ef92-3d74-4334-ac19-671eb3199d0b\") " Jan 20 17:32:52 crc kubenswrapper[4558]: I0120 17:32:52.850454 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/59e7ef92-3d74-4334-ac19-671eb3199d0b-scripts\") pod \"59e7ef92-3d74-4334-ac19-671eb3199d0b\" (UID: \"59e7ef92-3d74-4334-ac19-671eb3199d0b\") " Jan 20 17:32:52 crc kubenswrapper[4558]: I0120 17:32:52.851399 4558 scope.go:117] "RemoveContainer" containerID="11026a4823ee019937da1b5d44ee5848da1509d7fbc7958ed8712c4c0b58b33b" Jan 20 17:32:52 crc kubenswrapper[4558]: I0120 17:32:52.851919 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8p4k8\" (UniqueName: \"kubernetes.io/projected/59e7ef92-3d74-4334-ac19-671eb3199d0b-kube-api-access-8p4k8\") on node \"crc\" DevicePath \"\"" Jan 20 17:32:52 crc kubenswrapper[4558]: I0120 17:32:52.851939 4558 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/59e7ef92-3d74-4334-ac19-671eb3199d0b-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 20 17:32:52 crc kubenswrapper[4558]: I0120 17:32:52.851950 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/59e7ef92-3d74-4334-ac19-671eb3199d0b-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:32:52 crc kubenswrapper[4558]: I0120 17:32:52.852093 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage16-crc" (OuterVolumeSpecName: "glance") pod "59e7ef92-3d74-4334-ac19-671eb3199d0b" (UID: "59e7ef92-3d74-4334-ac19-671eb3199d0b"). InnerVolumeSpecName "local-storage16-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:32:52 crc kubenswrapper[4558]: I0120 17:32:52.891471 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/59e7ef92-3d74-4334-ac19-671eb3199d0b-scripts" (OuterVolumeSpecName: "scripts") pod "59e7ef92-3d74-4334-ac19-671eb3199d0b" (UID: "59e7ef92-3d74-4334-ac19-671eb3199d0b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:32:52 crc kubenswrapper[4558]: I0120 17:32:52.892587 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:32:52 crc kubenswrapper[4558]: I0120 17:32:52.902448 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" podUID="49dc00d4-0563-4324-a98b-e42b24b4223b" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.1.126:5672: connect: connection refused" Jan 20 17:32:52 crc kubenswrapper[4558]: I0120 17:32:52.907707 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:32:52 crc kubenswrapper[4558]: I0120 17:32:52.922133 4558 scope.go:117] "RemoveContainer" containerID="b5f54a9d58e26e3dd566d5f5f71a6cabd02248b3e9fd086fccf6af2e071db403" Jan 20 17:32:52 crc kubenswrapper[4558]: I0120 17:32:52.922283 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:32:52 crc kubenswrapper[4558]: E0120 17:32:52.922707 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="59e7ef92-3d74-4334-ac19-671eb3199d0b" containerName="glance-httpd" Jan 20 17:32:52 crc kubenswrapper[4558]: I0120 17:32:52.922719 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="59e7ef92-3d74-4334-ac19-671eb3199d0b" containerName="glance-httpd" Jan 20 17:32:52 crc kubenswrapper[4558]: E0120 17:32:52.922728 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aaaaf2d3-8f74-4ce7-8407-2f0b0bece070" containerName="glance-httpd" Jan 20 17:32:52 crc kubenswrapper[4558]: I0120 17:32:52.922735 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="aaaaf2d3-8f74-4ce7-8407-2f0b0bece070" containerName="glance-httpd" Jan 20 17:32:52 crc kubenswrapper[4558]: E0120 17:32:52.922744 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="59e7ef92-3d74-4334-ac19-671eb3199d0b" containerName="glance-log" Jan 20 17:32:52 crc kubenswrapper[4558]: I0120 17:32:52.922750 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="59e7ef92-3d74-4334-ac19-671eb3199d0b" containerName="glance-log" Jan 20 17:32:52 crc kubenswrapper[4558]: E0120 17:32:52.922768 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aaaaf2d3-8f74-4ce7-8407-2f0b0bece070" containerName="glance-log" Jan 20 17:32:52 crc kubenswrapper[4558]: I0120 17:32:52.922773 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="aaaaf2d3-8f74-4ce7-8407-2f0b0bece070" containerName="glance-log" Jan 20 17:32:52 crc kubenswrapper[4558]: I0120 17:32:52.922956 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="59e7ef92-3d74-4334-ac19-671eb3199d0b" containerName="glance-httpd" Jan 20 17:32:52 crc kubenswrapper[4558]: I0120 17:32:52.922971 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="59e7ef92-3d74-4334-ac19-671eb3199d0b" containerName="glance-log" Jan 20 17:32:52 crc kubenswrapper[4558]: I0120 17:32:52.922978 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="aaaaf2d3-8f74-4ce7-8407-2f0b0bece070" containerName="glance-httpd" Jan 20 17:32:52 crc kubenswrapper[4558]: I0120 17:32:52.922987 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="aaaaf2d3-8f74-4ce7-8407-2f0b0bece070" containerName="glance-log" Jan 20 17:32:52 crc kubenswrapper[4558]: I0120 17:32:52.923984 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:32:52 crc kubenswrapper[4558]: I0120 17:32:52.930035 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-metadata-config-data" Jan 20 17:32:52 crc kubenswrapper[4558]: I0120 17:32:52.938948 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:32:52 crc kubenswrapper[4558]: I0120 17:32:52.948315 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/59e7ef92-3d74-4334-ac19-671eb3199d0b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "59e7ef92-3d74-4334-ac19-671eb3199d0b" (UID: "59e7ef92-3d74-4334-ac19-671eb3199d0b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:32:52 crc kubenswrapper[4558]: I0120 17:32:52.953860 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/59e7ef92-3d74-4334-ac19-671eb3199d0b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:32:52 crc kubenswrapper[4558]: I0120 17:32:52.953895 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:32:52 crc kubenswrapper[4558]: I0120 17:32:52.953905 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage16-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage16-crc\") on node \"crc\" " Jan 20 17:32:52 crc kubenswrapper[4558]: I0120 17:32:52.953965 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/59e7ef92-3d74-4334-ac19-671eb3199d0b-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:32:52 crc kubenswrapper[4558]: I0120 17:32:52.961310 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:32:52 crc kubenswrapper[4558]: I0120 17:32:52.969218 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:32:52 crc kubenswrapper[4558]: I0120 17:32:52.971350 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/59e7ef92-3d74-4334-ac19-671eb3199d0b-config-data" (OuterVolumeSpecName: "config-data") pod "59e7ef92-3d74-4334-ac19-671eb3199d0b" (UID: "59e7ef92-3d74-4334-ac19-671eb3199d0b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:32:52 crc kubenswrapper[4558]: I0120 17:32:52.971534 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage16-crc" (UniqueName: "kubernetes.io/local-volume/local-storage16-crc") on node "crc" Jan 20 17:32:52 crc kubenswrapper[4558]: I0120 17:32:52.972894 4558 scope.go:117] "RemoveContainer" containerID="bc8a516268d1e7dd8e3c075c9cfa7d70176325f633e340325c2c6e2aa0d06228" Jan 20 17:32:52 crc kubenswrapper[4558]: I0120 17:32:52.974934 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:32:52 crc kubenswrapper[4558]: I0120 17:32:52.976618 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:32:52 crc kubenswrapper[4558]: I0120 17:32:52.978827 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cinder-api-config-data" Jan 20 17:32:52 crc kubenswrapper[4558]: I0120 17:32:52.980230 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/placement-5f4bd4844-95dpq"] Jan 20 17:32:52 crc kubenswrapper[4558]: I0120 17:32:52.982410 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-5f4bd4844-95dpq" Jan 20 17:32:52 crc kubenswrapper[4558]: I0120 17:32:52.983889 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-placement-public-svc" Jan 20 17:32:52 crc kubenswrapper[4558]: I0120 17:32:52.985781 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-placement-internal-svc" Jan 20 17:32:52 crc kubenswrapper[4558]: I0120 17:32:52.986310 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:32:52 crc kubenswrapper[4558]: I0120 17:32:52.992402 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:32:52 crc kubenswrapper[4558]: I0120 17:32:52.994707 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-default-internal-config-data" Jan 20 17:32:52 crc kubenswrapper[4558]: I0120 17:32:52.996542 4558 scope.go:117] "RemoveContainer" containerID="b5f54a9d58e26e3dd566d5f5f71a6cabd02248b3e9fd086fccf6af2e071db403" Jan 20 17:32:52 crc kubenswrapper[4558]: E0120 17:32:52.996963 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b5f54a9d58e26e3dd566d5f5f71a6cabd02248b3e9fd086fccf6af2e071db403\": container with ID starting with b5f54a9d58e26e3dd566d5f5f71a6cabd02248b3e9fd086fccf6af2e071db403 not found: ID does not exist" containerID="b5f54a9d58e26e3dd566d5f5f71a6cabd02248b3e9fd086fccf6af2e071db403" Jan 20 17:32:52 crc kubenswrapper[4558]: I0120 17:32:52.997044 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b5f54a9d58e26e3dd566d5f5f71a6cabd02248b3e9fd086fccf6af2e071db403"} err="failed to get container status \"b5f54a9d58e26e3dd566d5f5f71a6cabd02248b3e9fd086fccf6af2e071db403\": rpc error: code = NotFound desc = could not find container \"b5f54a9d58e26e3dd566d5f5f71a6cabd02248b3e9fd086fccf6af2e071db403\": container with ID starting with b5f54a9d58e26e3dd566d5f5f71a6cabd02248b3e9fd086fccf6af2e071db403 not found: ID does not exist" Jan 20 17:32:52 crc kubenswrapper[4558]: I0120 17:32:52.997114 4558 scope.go:117] "RemoveContainer" containerID="bc8a516268d1e7dd8e3c075c9cfa7d70176325f633e340325c2c6e2aa0d06228" Jan 20 17:32:52 crc kubenswrapper[4558]: E0120 17:32:52.998463 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bc8a516268d1e7dd8e3c075c9cfa7d70176325f633e340325c2c6e2aa0d06228\": container with ID starting with bc8a516268d1e7dd8e3c075c9cfa7d70176325f633e340325c2c6e2aa0d06228 not found: ID does not exist" containerID="bc8a516268d1e7dd8e3c075c9cfa7d70176325f633e340325c2c6e2aa0d06228" Jan 20 17:32:52 crc kubenswrapper[4558]: I0120 17:32:52.998514 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bc8a516268d1e7dd8e3c075c9cfa7d70176325f633e340325c2c6e2aa0d06228"} err="failed to get container status \"bc8a516268d1e7dd8e3c075c9cfa7d70176325f633e340325c2c6e2aa0d06228\": rpc error: code = NotFound desc = could not find container \"bc8a516268d1e7dd8e3c075c9cfa7d70176325f633e340325c2c6e2aa0d06228\": container with ID starting with bc8a516268d1e7dd8e3c075c9cfa7d70176325f633e340325c2c6e2aa0d06228 not found: ID does not exist" Jan 20 17:32:53 crc kubenswrapper[4558]: I0120 17:32:53.003130 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-5f4bd4844-95dpq"] Jan 20 17:32:53 crc kubenswrapper[4558]: I0120 17:32:53.017997 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:32:53 crc kubenswrapper[4558]: I0120 17:32:53.025491 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:32:53 crc kubenswrapper[4558]: I0120 17:32:53.055856 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1038c23d-3108-472d-b43c-4181fd8ac365-config-data\") pod \"placement-5f4bd4844-95dpq\" (UID: \"1038c23d-3108-472d-b43c-4181fd8ac365\") " pod="openstack-kuttl-tests/placement-5f4bd4844-95dpq" Jan 20 17:32:53 crc kubenswrapper[4558]: I0120 17:32:53.055914 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8fb66f31-6ee9-4ad8-8bd2-14e489b98591-logs\") pod \"nova-metadata-0\" (UID: \"8fb66f31-6ee9-4ad8-8bd2-14e489b98591\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:32:53 crc kubenswrapper[4558]: I0120 17:32:53.055941 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bvdnd\" (UniqueName: \"kubernetes.io/projected/8fb66f31-6ee9-4ad8-8bd2-14e489b98591-kube-api-access-bvdnd\") pod \"nova-metadata-0\" (UID: \"8fb66f31-6ee9-4ad8-8bd2-14e489b98591\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:32:53 crc kubenswrapper[4558]: I0120 17:32:53.056081 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/846821b5-5647-4ffa-a1bb-8695fe90cbcc-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"846821b5-5647-4ffa-a1bb-8695fe90cbcc\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:32:53 crc kubenswrapper[4558]: I0120 17:32:53.056110 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1038c23d-3108-472d-b43c-4181fd8ac365-public-tls-certs\") pod \"placement-5f4bd4844-95dpq\" (UID: \"1038c23d-3108-472d-b43c-4181fd8ac365\") " pod="openstack-kuttl-tests/placement-5f4bd4844-95dpq" Jan 20 17:32:53 crc kubenswrapper[4558]: I0120 17:32:53.056143 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/846821b5-5647-4ffa-a1bb-8695fe90cbcc-logs\") pod \"cinder-api-0\" (UID: \"846821b5-5647-4ffa-a1bb-8695fe90cbcc\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:32:53 crc kubenswrapper[4558]: I0120 17:32:53.056180 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/846821b5-5647-4ffa-a1bb-8695fe90cbcc-config-data-custom\") pod \"cinder-api-0\" (UID: \"846821b5-5647-4ffa-a1bb-8695fe90cbcc\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:32:53 crc kubenswrapper[4558]: I0120 17:32:53.056423 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1038c23d-3108-472d-b43c-4181fd8ac365-combined-ca-bundle\") pod \"placement-5f4bd4844-95dpq\" (UID: \"1038c23d-3108-472d-b43c-4181fd8ac365\") " pod="openstack-kuttl-tests/placement-5f4bd4844-95dpq" Jan 20 17:32:53 crc kubenswrapper[4558]: I0120 17:32:53.056496 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/846821b5-5647-4ffa-a1bb-8695fe90cbcc-config-data\") pod \"cinder-api-0\" (UID: \"846821b5-5647-4ffa-a1bb-8695fe90cbcc\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:32:53 crc kubenswrapper[4558]: I0120 17:32:53.056577 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/846821b5-5647-4ffa-a1bb-8695fe90cbcc-scripts\") pod \"cinder-api-0\" (UID: \"846821b5-5647-4ffa-a1bb-8695fe90cbcc\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:32:53 crc kubenswrapper[4558]: I0120 17:32:53.056631 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1038c23d-3108-472d-b43c-4181fd8ac365-scripts\") pod \"placement-5f4bd4844-95dpq\" (UID: \"1038c23d-3108-472d-b43c-4181fd8ac365\") " pod="openstack-kuttl-tests/placement-5f4bd4844-95dpq" Jan 20 17:32:53 crc kubenswrapper[4558]: I0120 17:32:53.056682 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1038c23d-3108-472d-b43c-4181fd8ac365-logs\") pod \"placement-5f4bd4844-95dpq\" (UID: \"1038c23d-3108-472d-b43c-4181fd8ac365\") " pod="openstack-kuttl-tests/placement-5f4bd4844-95dpq" Jan 20 17:32:53 crc kubenswrapper[4558]: I0120 17:32:53.056717 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d5qdl\" (UniqueName: \"kubernetes.io/projected/846821b5-5647-4ffa-a1bb-8695fe90cbcc-kube-api-access-d5qdl\") pod \"cinder-api-0\" (UID: \"846821b5-5647-4ffa-a1bb-8695fe90cbcc\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:32:53 crc kubenswrapper[4558]: I0120 17:32:53.056760 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1038c23d-3108-472d-b43c-4181fd8ac365-internal-tls-certs\") pod \"placement-5f4bd4844-95dpq\" (UID: \"1038c23d-3108-472d-b43c-4181fd8ac365\") " pod="openstack-kuttl-tests/placement-5f4bd4844-95dpq" Jan 20 17:32:53 crc kubenswrapper[4558]: I0120 17:32:53.056838 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/846821b5-5647-4ffa-a1bb-8695fe90cbcc-etc-machine-id\") pod \"cinder-api-0\" (UID: \"846821b5-5647-4ffa-a1bb-8695fe90cbcc\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:32:53 crc kubenswrapper[4558]: I0120 17:32:53.056971 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6n69g\" (UniqueName: \"kubernetes.io/projected/1038c23d-3108-472d-b43c-4181fd8ac365-kube-api-access-6n69g\") pod \"placement-5f4bd4844-95dpq\" (UID: \"1038c23d-3108-472d-b43c-4181fd8ac365\") " pod="openstack-kuttl-tests/placement-5f4bd4844-95dpq" Jan 20 17:32:53 crc kubenswrapper[4558]: I0120 17:32:53.057028 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8fb66f31-6ee9-4ad8-8bd2-14e489b98591-config-data\") pod \"nova-metadata-0\" (UID: \"8fb66f31-6ee9-4ad8-8bd2-14e489b98591\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:32:53 crc kubenswrapper[4558]: I0120 17:32:53.057090 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8fb66f31-6ee9-4ad8-8bd2-14e489b98591-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"8fb66f31-6ee9-4ad8-8bd2-14e489b98591\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:32:53 crc kubenswrapper[4558]: I0120 17:32:53.057231 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage16-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage16-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:32:53 crc kubenswrapper[4558]: I0120 17:32:53.057252 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/59e7ef92-3d74-4334-ac19-671eb3199d0b-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:32:53 crc kubenswrapper[4558]: I0120 17:32:53.106268 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:32:53 crc kubenswrapper[4558]: I0120 17:32:53.106920 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="0fa9abe5-c250-44ce-97d5-17528dd527cb" containerName="ceilometer-central-agent" containerID="cri-o://94acb89771ba53cc1e3c2ff43ebec406a07834ae245a8a2d429afb1cf797e2ed" gracePeriod=30 Jan 20 17:32:53 crc kubenswrapper[4558]: I0120 17:32:53.107268 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="0fa9abe5-c250-44ce-97d5-17528dd527cb" containerName="ceilometer-notification-agent" containerID="cri-o://255c1473971684fbf37d5dbc876ce06c7e2d2050c60ff64fb10bd1de85cc2f28" gracePeriod=30 Jan 20 17:32:53 crc kubenswrapper[4558]: I0120 17:32:53.107294 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="0fa9abe5-c250-44ce-97d5-17528dd527cb" containerName="proxy-httpd" containerID="cri-o://6b63c5d8df23726b95ea12ea0a38363f05fc0ca2ac135e3fa2be2b0c33f58276" gracePeriod=30 Jan 20 17:32:53 crc kubenswrapper[4558]: I0120 17:32:53.107284 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="0fa9abe5-c250-44ce-97d5-17528dd527cb" containerName="sg-core" containerID="cri-o://ba412cf075d10c839cba475b03b73954666ef9d98b9d93b40ae3e1e0eb1beb15" gracePeriod=30 Jan 20 17:32:53 crc kubenswrapper[4558]: I0120 17:32:53.158776 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/846821b5-5647-4ffa-a1bb-8695fe90cbcc-logs\") pod \"cinder-api-0\" (UID: \"846821b5-5647-4ffa-a1bb-8695fe90cbcc\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:32:53 crc kubenswrapper[4558]: I0120 17:32:53.158914 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/846821b5-5647-4ffa-a1bb-8695fe90cbcc-config-data-custom\") pod \"cinder-api-0\" (UID: \"846821b5-5647-4ffa-a1bb-8695fe90cbcc\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:32:53 crc kubenswrapper[4558]: I0120 17:32:53.159053 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/32e34e3d-8cbb-42df-9b7a-4b99b025cdf0-logs\") pod \"glance-default-internal-api-0\" (UID: \"32e34e3d-8cbb-42df-9b7a-4b99b025cdf0\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:32:53 crc kubenswrapper[4558]: I0120 17:32:53.159137 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1038c23d-3108-472d-b43c-4181fd8ac365-combined-ca-bundle\") pod \"placement-5f4bd4844-95dpq\" (UID: \"1038c23d-3108-472d-b43c-4181fd8ac365\") " pod="openstack-kuttl-tests/placement-5f4bd4844-95dpq" Jan 20 17:32:53 crc kubenswrapper[4558]: I0120 17:32:53.159235 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/846821b5-5647-4ffa-a1bb-8695fe90cbcc-config-data\") pod \"cinder-api-0\" (UID: \"846821b5-5647-4ffa-a1bb-8695fe90cbcc\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:32:53 crc kubenswrapper[4558]: I0120 17:32:53.159327 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/846821b5-5647-4ffa-a1bb-8695fe90cbcc-scripts\") pod \"cinder-api-0\" (UID: \"846821b5-5647-4ffa-a1bb-8695fe90cbcc\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:32:53 crc kubenswrapper[4558]: I0120 17:32:53.159407 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/32e34e3d-8cbb-42df-9b7a-4b99b025cdf0-config-data\") pod \"glance-default-internal-api-0\" (UID: \"32e34e3d-8cbb-42df-9b7a-4b99b025cdf0\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:32:53 crc kubenswrapper[4558]: I0120 17:32:53.159483 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1038c23d-3108-472d-b43c-4181fd8ac365-scripts\") pod \"placement-5f4bd4844-95dpq\" (UID: \"1038c23d-3108-472d-b43c-4181fd8ac365\") " pod="openstack-kuttl-tests/placement-5f4bd4844-95dpq" Jan 20 17:32:53 crc kubenswrapper[4558]: I0120 17:32:53.159559 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1038c23d-3108-472d-b43c-4181fd8ac365-logs\") pod \"placement-5f4bd4844-95dpq\" (UID: \"1038c23d-3108-472d-b43c-4181fd8ac365\") " pod="openstack-kuttl-tests/placement-5f4bd4844-95dpq" Jan 20 17:32:53 crc kubenswrapper[4558]: I0120 17:32:53.159625 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d5qdl\" (UniqueName: \"kubernetes.io/projected/846821b5-5647-4ffa-a1bb-8695fe90cbcc-kube-api-access-d5qdl\") pod \"cinder-api-0\" (UID: \"846821b5-5647-4ffa-a1bb-8695fe90cbcc\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:32:53 crc kubenswrapper[4558]: I0120 17:32:53.159704 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1038c23d-3108-472d-b43c-4181fd8ac365-internal-tls-certs\") pod \"placement-5f4bd4844-95dpq\" (UID: \"1038c23d-3108-472d-b43c-4181fd8ac365\") " pod="openstack-kuttl-tests/placement-5f4bd4844-95dpq" Jan 20 17:32:53 crc kubenswrapper[4558]: I0120 17:32:53.159817 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage15-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") pod \"glance-default-internal-api-0\" (UID: \"32e34e3d-8cbb-42df-9b7a-4b99b025cdf0\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:32:53 crc kubenswrapper[4558]: I0120 17:32:53.159901 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/846821b5-5647-4ffa-a1bb-8695fe90cbcc-etc-machine-id\") pod \"cinder-api-0\" (UID: \"846821b5-5647-4ffa-a1bb-8695fe90cbcc\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:32:53 crc kubenswrapper[4558]: I0120 17:32:53.159986 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/32e34e3d-8cbb-42df-9b7a-4b99b025cdf0-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"32e34e3d-8cbb-42df-9b7a-4b99b025cdf0\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:32:53 crc kubenswrapper[4558]: I0120 17:32:53.160065 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6n69g\" (UniqueName: \"kubernetes.io/projected/1038c23d-3108-472d-b43c-4181fd8ac365-kube-api-access-6n69g\") pod \"placement-5f4bd4844-95dpq\" (UID: \"1038c23d-3108-472d-b43c-4181fd8ac365\") " pod="openstack-kuttl-tests/placement-5f4bd4844-95dpq" Jan 20 17:32:53 crc kubenswrapper[4558]: I0120 17:32:53.160130 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8fb66f31-6ee9-4ad8-8bd2-14e489b98591-config-data\") pod \"nova-metadata-0\" (UID: \"8fb66f31-6ee9-4ad8-8bd2-14e489b98591\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:32:53 crc kubenswrapper[4558]: I0120 17:32:53.160653 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/32e34e3d-8cbb-42df-9b7a-4b99b025cdf0-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"32e34e3d-8cbb-42df-9b7a-4b99b025cdf0\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:32:53 crc kubenswrapper[4558]: I0120 17:32:53.160726 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8fb66f31-6ee9-4ad8-8bd2-14e489b98591-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"8fb66f31-6ee9-4ad8-8bd2-14e489b98591\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:32:53 crc kubenswrapper[4558]: I0120 17:32:53.160815 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/32e34e3d-8cbb-42df-9b7a-4b99b025cdf0-scripts\") pod \"glance-default-internal-api-0\" (UID: \"32e34e3d-8cbb-42df-9b7a-4b99b025cdf0\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:32:53 crc kubenswrapper[4558]: I0120 17:32:53.160917 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1038c23d-3108-472d-b43c-4181fd8ac365-config-data\") pod \"placement-5f4bd4844-95dpq\" (UID: \"1038c23d-3108-472d-b43c-4181fd8ac365\") " pod="openstack-kuttl-tests/placement-5f4bd4844-95dpq" Jan 20 17:32:53 crc kubenswrapper[4558]: I0120 17:32:53.160993 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tfhgz\" (UniqueName: \"kubernetes.io/projected/32e34e3d-8cbb-42df-9b7a-4b99b025cdf0-kube-api-access-tfhgz\") pod \"glance-default-internal-api-0\" (UID: \"32e34e3d-8cbb-42df-9b7a-4b99b025cdf0\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:32:53 crc kubenswrapper[4558]: I0120 17:32:53.161085 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8fb66f31-6ee9-4ad8-8bd2-14e489b98591-logs\") pod \"nova-metadata-0\" (UID: \"8fb66f31-6ee9-4ad8-8bd2-14e489b98591\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:32:53 crc kubenswrapper[4558]: I0120 17:32:53.161153 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bvdnd\" (UniqueName: \"kubernetes.io/projected/8fb66f31-6ee9-4ad8-8bd2-14e489b98591-kube-api-access-bvdnd\") pod \"nova-metadata-0\" (UID: \"8fb66f31-6ee9-4ad8-8bd2-14e489b98591\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:32:53 crc kubenswrapper[4558]: I0120 17:32:53.161266 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/846821b5-5647-4ffa-a1bb-8695fe90cbcc-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"846821b5-5647-4ffa-a1bb-8695fe90cbcc\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:32:53 crc kubenswrapper[4558]: I0120 17:32:53.161338 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1038c23d-3108-472d-b43c-4181fd8ac365-public-tls-certs\") pod \"placement-5f4bd4844-95dpq\" (UID: \"1038c23d-3108-472d-b43c-4181fd8ac365\") " pod="openstack-kuttl-tests/placement-5f4bd4844-95dpq" Jan 20 17:32:53 crc kubenswrapper[4558]: I0120 17:32:53.162222 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/846821b5-5647-4ffa-a1bb-8695fe90cbcc-logs\") pod \"cinder-api-0\" (UID: \"846821b5-5647-4ffa-a1bb-8695fe90cbcc\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:32:53 crc kubenswrapper[4558]: I0120 17:32:53.163119 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1038c23d-3108-472d-b43c-4181fd8ac365-logs\") pod \"placement-5f4bd4844-95dpq\" (UID: \"1038c23d-3108-472d-b43c-4181fd8ac365\") " pod="openstack-kuttl-tests/placement-5f4bd4844-95dpq" Jan 20 17:32:53 crc kubenswrapper[4558]: I0120 17:32:53.163188 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/846821b5-5647-4ffa-a1bb-8695fe90cbcc-etc-machine-id\") pod \"cinder-api-0\" (UID: \"846821b5-5647-4ffa-a1bb-8695fe90cbcc\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:32:53 crc kubenswrapper[4558]: I0120 17:32:53.165749 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8fb66f31-6ee9-4ad8-8bd2-14e489b98591-config-data\") pod \"nova-metadata-0\" (UID: \"8fb66f31-6ee9-4ad8-8bd2-14e489b98591\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:32:53 crc kubenswrapper[4558]: I0120 17:32:53.166082 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/846821b5-5647-4ffa-a1bb-8695fe90cbcc-config-data-custom\") pod \"cinder-api-0\" (UID: \"846821b5-5647-4ffa-a1bb-8695fe90cbcc\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:32:53 crc kubenswrapper[4558]: I0120 17:32:53.166563 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1038c23d-3108-472d-b43c-4181fd8ac365-scripts\") pod \"placement-5f4bd4844-95dpq\" (UID: \"1038c23d-3108-472d-b43c-4181fd8ac365\") " pod="openstack-kuttl-tests/placement-5f4bd4844-95dpq" Jan 20 17:32:53 crc kubenswrapper[4558]: I0120 17:32:53.166710 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8fb66f31-6ee9-4ad8-8bd2-14e489b98591-logs\") pod \"nova-metadata-0\" (UID: \"8fb66f31-6ee9-4ad8-8bd2-14e489b98591\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:32:53 crc kubenswrapper[4558]: I0120 17:32:53.167928 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/846821b5-5647-4ffa-a1bb-8695fe90cbcc-scripts\") pod \"cinder-api-0\" (UID: \"846821b5-5647-4ffa-a1bb-8695fe90cbcc\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:32:53 crc kubenswrapper[4558]: I0120 17:32:53.171677 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1038c23d-3108-472d-b43c-4181fd8ac365-public-tls-certs\") pod \"placement-5f4bd4844-95dpq\" (UID: \"1038c23d-3108-472d-b43c-4181fd8ac365\") " pod="openstack-kuttl-tests/placement-5f4bd4844-95dpq" Jan 20 17:32:53 crc kubenswrapper[4558]: I0120 17:32:53.175671 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1038c23d-3108-472d-b43c-4181fd8ac365-combined-ca-bundle\") pod \"placement-5f4bd4844-95dpq\" (UID: \"1038c23d-3108-472d-b43c-4181fd8ac365\") " pod="openstack-kuttl-tests/placement-5f4bd4844-95dpq" Jan 20 17:32:53 crc kubenswrapper[4558]: I0120 17:32:53.176751 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8fb66f31-6ee9-4ad8-8bd2-14e489b98591-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"8fb66f31-6ee9-4ad8-8bd2-14e489b98591\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:32:53 crc kubenswrapper[4558]: I0120 17:32:53.177663 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/846821b5-5647-4ffa-a1bb-8695fe90cbcc-config-data\") pod \"cinder-api-0\" (UID: \"846821b5-5647-4ffa-a1bb-8695fe90cbcc\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:32:53 crc kubenswrapper[4558]: I0120 17:32:53.177699 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/846821b5-5647-4ffa-a1bb-8695fe90cbcc-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"846821b5-5647-4ffa-a1bb-8695fe90cbcc\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:32:53 crc kubenswrapper[4558]: I0120 17:32:53.179760 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d5qdl\" (UniqueName: \"kubernetes.io/projected/846821b5-5647-4ffa-a1bb-8695fe90cbcc-kube-api-access-d5qdl\") pod \"cinder-api-0\" (UID: \"846821b5-5647-4ffa-a1bb-8695fe90cbcc\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:32:53 crc kubenswrapper[4558]: I0120 17:32:53.179786 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6n69g\" (UniqueName: \"kubernetes.io/projected/1038c23d-3108-472d-b43c-4181fd8ac365-kube-api-access-6n69g\") pod \"placement-5f4bd4844-95dpq\" (UID: \"1038c23d-3108-472d-b43c-4181fd8ac365\") " pod="openstack-kuttl-tests/placement-5f4bd4844-95dpq" Jan 20 17:32:53 crc kubenswrapper[4558]: I0120 17:32:53.179898 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1038c23d-3108-472d-b43c-4181fd8ac365-config-data\") pod \"placement-5f4bd4844-95dpq\" (UID: \"1038c23d-3108-472d-b43c-4181fd8ac365\") " pod="openstack-kuttl-tests/placement-5f4bd4844-95dpq" Jan 20 17:32:53 crc kubenswrapper[4558]: I0120 17:32:53.180186 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1038c23d-3108-472d-b43c-4181fd8ac365-internal-tls-certs\") pod \"placement-5f4bd4844-95dpq\" (UID: \"1038c23d-3108-472d-b43c-4181fd8ac365\") " pod="openstack-kuttl-tests/placement-5f4bd4844-95dpq" Jan 20 17:32:53 crc kubenswrapper[4558]: I0120 17:32:53.181520 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bvdnd\" (UniqueName: \"kubernetes.io/projected/8fb66f31-6ee9-4ad8-8bd2-14e489b98591-kube-api-access-bvdnd\") pod \"nova-metadata-0\" (UID: \"8fb66f31-6ee9-4ad8-8bd2-14e489b98591\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:32:53 crc kubenswrapper[4558]: I0120 17:32:53.253465 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:32:53 crc kubenswrapper[4558]: I0120 17:32:53.263133 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/32e34e3d-8cbb-42df-9b7a-4b99b025cdf0-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"32e34e3d-8cbb-42df-9b7a-4b99b025cdf0\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:32:53 crc kubenswrapper[4558]: I0120 17:32:53.263188 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/32e34e3d-8cbb-42df-9b7a-4b99b025cdf0-scripts\") pod \"glance-default-internal-api-0\" (UID: \"32e34e3d-8cbb-42df-9b7a-4b99b025cdf0\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:32:53 crc kubenswrapper[4558]: I0120 17:32:53.263238 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tfhgz\" (UniqueName: \"kubernetes.io/projected/32e34e3d-8cbb-42df-9b7a-4b99b025cdf0-kube-api-access-tfhgz\") pod \"glance-default-internal-api-0\" (UID: \"32e34e3d-8cbb-42df-9b7a-4b99b025cdf0\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:32:53 crc kubenswrapper[4558]: I0120 17:32:53.263333 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/32e34e3d-8cbb-42df-9b7a-4b99b025cdf0-logs\") pod \"glance-default-internal-api-0\" (UID: \"32e34e3d-8cbb-42df-9b7a-4b99b025cdf0\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:32:53 crc kubenswrapper[4558]: I0120 17:32:53.263395 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/32e34e3d-8cbb-42df-9b7a-4b99b025cdf0-config-data\") pod \"glance-default-internal-api-0\" (UID: \"32e34e3d-8cbb-42df-9b7a-4b99b025cdf0\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:32:53 crc kubenswrapper[4558]: I0120 17:32:53.263435 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage15-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") pod \"glance-default-internal-api-0\" (UID: \"32e34e3d-8cbb-42df-9b7a-4b99b025cdf0\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:32:53 crc kubenswrapper[4558]: I0120 17:32:53.263482 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/32e34e3d-8cbb-42df-9b7a-4b99b025cdf0-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"32e34e3d-8cbb-42df-9b7a-4b99b025cdf0\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:32:53 crc kubenswrapper[4558]: I0120 17:32:53.264447 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/32e34e3d-8cbb-42df-9b7a-4b99b025cdf0-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"32e34e3d-8cbb-42df-9b7a-4b99b025cdf0\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:32:53 crc kubenswrapper[4558]: I0120 17:32:53.264451 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage15-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") pod \"glance-default-internal-api-0\" (UID: \"32e34e3d-8cbb-42df-9b7a-4b99b025cdf0\") device mount path \"/mnt/openstack/pv15\"" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:32:53 crc kubenswrapper[4558]: I0120 17:32:53.264560 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/32e34e3d-8cbb-42df-9b7a-4b99b025cdf0-logs\") pod \"glance-default-internal-api-0\" (UID: \"32e34e3d-8cbb-42df-9b7a-4b99b025cdf0\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:32:53 crc kubenswrapper[4558]: I0120 17:32:53.267494 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/32e34e3d-8cbb-42df-9b7a-4b99b025cdf0-config-data\") pod \"glance-default-internal-api-0\" (UID: \"32e34e3d-8cbb-42df-9b7a-4b99b025cdf0\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:32:53 crc kubenswrapper[4558]: I0120 17:32:53.268632 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/32e34e3d-8cbb-42df-9b7a-4b99b025cdf0-scripts\") pod \"glance-default-internal-api-0\" (UID: \"32e34e3d-8cbb-42df-9b7a-4b99b025cdf0\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:32:53 crc kubenswrapper[4558]: I0120 17:32:53.271193 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/32e34e3d-8cbb-42df-9b7a-4b99b025cdf0-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"32e34e3d-8cbb-42df-9b7a-4b99b025cdf0\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:32:53 crc kubenswrapper[4558]: I0120 17:32:53.279711 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tfhgz\" (UniqueName: \"kubernetes.io/projected/32e34e3d-8cbb-42df-9b7a-4b99b025cdf0-kube-api-access-tfhgz\") pod \"glance-default-internal-api-0\" (UID: \"32e34e3d-8cbb-42df-9b7a-4b99b025cdf0\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:32:53 crc kubenswrapper[4558]: I0120 17:32:53.290572 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage15-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") pod \"glance-default-internal-api-0\" (UID: \"32e34e3d-8cbb-42df-9b7a-4b99b025cdf0\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:32:53 crc kubenswrapper[4558]: I0120 17:32:53.301702 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:32:53 crc kubenswrapper[4558]: I0120 17:32:53.315201 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-5f4bd4844-95dpq" Jan 20 17:32:53 crc kubenswrapper[4558]: I0120 17:32:53.322693 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:32:53 crc kubenswrapper[4558]: I0120 17:32:53.693707 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"59e7ef92-3d74-4334-ac19-671eb3199d0b","Type":"ContainerDied","Data":"2789214ccdafc0424e87310c5b887521dedc090f785e3b948d6478a4744b64fe"} Jan 20 17:32:53 crc kubenswrapper[4558]: I0120 17:32:53.694022 4558 scope.go:117] "RemoveContainer" containerID="40ef3c5a952862f3e1bf0e9c9f5e2c665b77d0d4387f66b4456d7a0b64be3cbf" Jan 20 17:32:53 crc kubenswrapper[4558]: I0120 17:32:53.694145 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:32:53 crc kubenswrapper[4558]: I0120 17:32:53.710966 4558 generic.go:334] "Generic (PLEG): container finished" podID="0fa9abe5-c250-44ce-97d5-17528dd527cb" containerID="6b63c5d8df23726b95ea12ea0a38363f05fc0ca2ac135e3fa2be2b0c33f58276" exitCode=0 Jan 20 17:32:53 crc kubenswrapper[4558]: I0120 17:32:53.710990 4558 generic.go:334] "Generic (PLEG): container finished" podID="0fa9abe5-c250-44ce-97d5-17528dd527cb" containerID="ba412cf075d10c839cba475b03b73954666ef9d98b9d93b40ae3e1e0eb1beb15" exitCode=2 Jan 20 17:32:53 crc kubenswrapper[4558]: I0120 17:32:53.710999 4558 generic.go:334] "Generic (PLEG): container finished" podID="0fa9abe5-c250-44ce-97d5-17528dd527cb" containerID="94acb89771ba53cc1e3c2ff43ebec406a07834ae245a8a2d429afb1cf797e2ed" exitCode=0 Jan 20 17:32:53 crc kubenswrapper[4558]: I0120 17:32:53.711047 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"0fa9abe5-c250-44ce-97d5-17528dd527cb","Type":"ContainerDied","Data":"6b63c5d8df23726b95ea12ea0a38363f05fc0ca2ac135e3fa2be2b0c33f58276"} Jan 20 17:32:53 crc kubenswrapper[4558]: I0120 17:32:53.711064 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"0fa9abe5-c250-44ce-97d5-17528dd527cb","Type":"ContainerDied","Data":"ba412cf075d10c839cba475b03b73954666ef9d98b9d93b40ae3e1e0eb1beb15"} Jan 20 17:32:53 crc kubenswrapper[4558]: I0120 17:32:53.711074 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"0fa9abe5-c250-44ce-97d5-17528dd527cb","Type":"ContainerDied","Data":"94acb89771ba53cc1e3c2ff43ebec406a07834ae245a8a2d429afb1cf797e2ed"} Jan 20 17:32:53 crc kubenswrapper[4558]: I0120 17:32:53.714318 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"6a66de52-5f9c-46c6-aa54-8a0ba3288230","Type":"ContainerStarted","Data":"915a019c132b2d4f4aeb77f2aacee6bc94ce4e5d6b372707956a6621db84cd38"} Jan 20 17:32:53 crc kubenswrapper[4558]: I0120 17:32:53.714361 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"6a66de52-5f9c-46c6-aa54-8a0ba3288230","Type":"ContainerStarted","Data":"1099f7ebf3750cf92ad4dcca603aaebf0e6a74e93107a073aead4aee1f85dd17"} Jan 20 17:32:53 crc kubenswrapper[4558]: I0120 17:32:53.714374 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"6a66de52-5f9c-46c6-aa54-8a0ba3288230","Type":"ContainerStarted","Data":"4b143bb9dea569bfb66e69be82985991ccec7aef83b6118dc9d97737a6c4598f"} Jan 20 17:32:53 crc kubenswrapper[4558]: I0120 17:32:53.718748 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"a336b03d-9969-41de-9f19-f3479f76a33d","Type":"ContainerStarted","Data":"90ed3b76b87cd561c155928fde1a7320fbfce5af416fc09381ed42b6ead92480"} Jan 20 17:32:53 crc kubenswrapper[4558]: I0120 17:32:53.730396 4558 scope.go:117] "RemoveContainer" containerID="85d12510c1da886d4ee25d1c28a63fdd18e54d2a19e1791fb8e0c9a0fa49fb05" Jan 20 17:32:53 crc kubenswrapper[4558]: I0120 17:32:53.749261 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:32:53 crc kubenswrapper[4558]: I0120 17:32:53.762290 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-api-0" podStartSLOduration=2.762274851 podStartE2EDuration="2.762274851s" podCreationTimestamp="2026-01-20 17:32:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:32:53.731380199 +0000 UTC m=+3067.491718186" watchObservedRunningTime="2026-01-20 17:32:53.762274851 +0000 UTC m=+3067.522612819" Jan 20 17:32:53 crc kubenswrapper[4558]: I0120 17:32:53.786220 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:32:53 crc kubenswrapper[4558]: I0120 17:32:53.824143 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:32:53 crc kubenswrapper[4558]: I0120 17:32:53.833765 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-scheduler-0" podStartSLOduration=2.82891207 podStartE2EDuration="2.82891207s" podCreationTimestamp="2026-01-20 17:32:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:32:53.771947929 +0000 UTC m=+3067.532285896" watchObservedRunningTime="2026-01-20 17:32:53.82891207 +0000 UTC m=+3067.589250037" Jan 20 17:32:53 crc kubenswrapper[4558]: I0120 17:32:53.852433 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:32:53 crc kubenswrapper[4558]: I0120 17:32:53.857140 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:32:53 crc kubenswrapper[4558]: I0120 17:32:53.859148 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-default-external-config-data" Jan 20 17:32:53 crc kubenswrapper[4558]: I0120 17:32:53.859351 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:32:53 crc kubenswrapper[4558]: I0120 17:32:53.952764 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-5f4bd4844-95dpq"] Jan 20 17:32:53 crc kubenswrapper[4558]: I0120 17:32:53.965307 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:32:53 crc kubenswrapper[4558]: I0120 17:32:53.973998 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:32:53 crc kubenswrapper[4558]: I0120 17:32:53.984333 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sz6zp\" (UniqueName: \"kubernetes.io/projected/acfda63d-d93f-4b65-b9f4-eaf856bc5a92-kube-api-access-sz6zp\") pod \"glance-default-external-api-0\" (UID: \"acfda63d-d93f-4b65-b9f4-eaf856bc5a92\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:32:53 crc kubenswrapper[4558]: I0120 17:32:53.984575 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/acfda63d-d93f-4b65-b9f4-eaf856bc5a92-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"acfda63d-d93f-4b65-b9f4-eaf856bc5a92\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:32:53 crc kubenswrapper[4558]: I0120 17:32:53.984604 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage16-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage16-crc\") pod \"glance-default-external-api-0\" (UID: \"acfda63d-d93f-4b65-b9f4-eaf856bc5a92\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:32:53 crc kubenswrapper[4558]: I0120 17:32:53.984628 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/acfda63d-d93f-4b65-b9f4-eaf856bc5a92-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"acfda63d-d93f-4b65-b9f4-eaf856bc5a92\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:32:53 crc kubenswrapper[4558]: I0120 17:32:53.984676 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/acfda63d-d93f-4b65-b9f4-eaf856bc5a92-logs\") pod \"glance-default-external-api-0\" (UID: \"acfda63d-d93f-4b65-b9f4-eaf856bc5a92\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:32:53 crc kubenswrapper[4558]: I0120 17:32:53.984707 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/acfda63d-d93f-4b65-b9f4-eaf856bc5a92-config-data\") pod \"glance-default-external-api-0\" (UID: \"acfda63d-d93f-4b65-b9f4-eaf856bc5a92\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:32:53 crc kubenswrapper[4558]: I0120 17:32:53.984720 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/acfda63d-d93f-4b65-b9f4-eaf856bc5a92-scripts\") pod \"glance-default-external-api-0\" (UID: \"acfda63d-d93f-4b65-b9f4-eaf856bc5a92\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:32:54 crc kubenswrapper[4558]: I0120 17:32:54.087731 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/acfda63d-d93f-4b65-b9f4-eaf856bc5a92-logs\") pod \"glance-default-external-api-0\" (UID: \"acfda63d-d93f-4b65-b9f4-eaf856bc5a92\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:32:54 crc kubenswrapper[4558]: I0120 17:32:54.088468 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/acfda63d-d93f-4b65-b9f4-eaf856bc5a92-config-data\") pod \"glance-default-external-api-0\" (UID: \"acfda63d-d93f-4b65-b9f4-eaf856bc5a92\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:32:54 crc kubenswrapper[4558]: I0120 17:32:54.088996 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/acfda63d-d93f-4b65-b9f4-eaf856bc5a92-scripts\") pod \"glance-default-external-api-0\" (UID: \"acfda63d-d93f-4b65-b9f4-eaf856bc5a92\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:32:54 crc kubenswrapper[4558]: I0120 17:32:54.088313 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/acfda63d-d93f-4b65-b9f4-eaf856bc5a92-logs\") pod \"glance-default-external-api-0\" (UID: \"acfda63d-d93f-4b65-b9f4-eaf856bc5a92\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:32:54 crc kubenswrapper[4558]: I0120 17:32:54.089835 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sz6zp\" (UniqueName: \"kubernetes.io/projected/acfda63d-d93f-4b65-b9f4-eaf856bc5a92-kube-api-access-sz6zp\") pod \"glance-default-external-api-0\" (UID: \"acfda63d-d93f-4b65-b9f4-eaf856bc5a92\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:32:54 crc kubenswrapper[4558]: I0120 17:32:54.089915 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/acfda63d-d93f-4b65-b9f4-eaf856bc5a92-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"acfda63d-d93f-4b65-b9f4-eaf856bc5a92\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:32:54 crc kubenswrapper[4558]: I0120 17:32:54.089998 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage16-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage16-crc\") pod \"glance-default-external-api-0\" (UID: \"acfda63d-d93f-4b65-b9f4-eaf856bc5a92\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:32:54 crc kubenswrapper[4558]: I0120 17:32:54.090050 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/acfda63d-d93f-4b65-b9f4-eaf856bc5a92-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"acfda63d-d93f-4b65-b9f4-eaf856bc5a92\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:32:54 crc kubenswrapper[4558]: I0120 17:32:54.090705 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/acfda63d-d93f-4b65-b9f4-eaf856bc5a92-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"acfda63d-d93f-4b65-b9f4-eaf856bc5a92\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:32:54 crc kubenswrapper[4558]: I0120 17:32:54.093527 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage16-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage16-crc\") pod \"glance-default-external-api-0\" (UID: \"acfda63d-d93f-4b65-b9f4-eaf856bc5a92\") device mount path \"/mnt/openstack/pv16\"" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:32:54 crc kubenswrapper[4558]: I0120 17:32:54.103179 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/acfda63d-d93f-4b65-b9f4-eaf856bc5a92-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"acfda63d-d93f-4b65-b9f4-eaf856bc5a92\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:32:54 crc kubenswrapper[4558]: I0120 17:32:54.103201 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/acfda63d-d93f-4b65-b9f4-eaf856bc5a92-scripts\") pod \"glance-default-external-api-0\" (UID: \"acfda63d-d93f-4b65-b9f4-eaf856bc5a92\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:32:54 crc kubenswrapper[4558]: I0120 17:32:54.104596 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/acfda63d-d93f-4b65-b9f4-eaf856bc5a92-config-data\") pod \"glance-default-external-api-0\" (UID: \"acfda63d-d93f-4b65-b9f4-eaf856bc5a92\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:32:54 crc kubenswrapper[4558]: I0120 17:32:54.110992 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sz6zp\" (UniqueName: \"kubernetes.io/projected/acfda63d-d93f-4b65-b9f4-eaf856bc5a92-kube-api-access-sz6zp\") pod \"glance-default-external-api-0\" (UID: \"acfda63d-d93f-4b65-b9f4-eaf856bc5a92\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:32:54 crc kubenswrapper[4558]: I0120 17:32:54.130840 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage16-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage16-crc\") pod \"glance-default-external-api-0\" (UID: \"acfda63d-d93f-4b65-b9f4-eaf856bc5a92\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:32:54 crc kubenswrapper[4558]: I0120 17:32:54.166399 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:32:54 crc kubenswrapper[4558]: I0120 17:32:54.199561 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:32:54 crc kubenswrapper[4558]: I0120 17:32:54.584146 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="59e7ef92-3d74-4334-ac19-671eb3199d0b" path="/var/lib/kubelet/pods/59e7ef92-3d74-4334-ac19-671eb3199d0b/volumes" Jan 20 17:32:54 crc kubenswrapper[4558]: I0120 17:32:54.584918 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="63df8460-ab49-4787-92a7-54a14c0179ca" path="/var/lib/kubelet/pods/63df8460-ab49-4787-92a7-54a14c0179ca/volumes" Jan 20 17:32:54 crc kubenswrapper[4558]: I0120 17:32:54.585640 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aaaaf2d3-8f74-4ce7-8407-2f0b0bece070" path="/var/lib/kubelet/pods/aaaaf2d3-8f74-4ce7-8407-2f0b0bece070/volumes" Jan 20 17:32:54 crc kubenswrapper[4558]: I0120 17:32:54.586772 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eb4881e5-cce6-4423-a9f4-d93f96a2ccb1" path="/var/lib/kubelet/pods/eb4881e5-cce6-4423-a9f4-d93f96a2ccb1/volumes" Jan 20 17:32:54 crc kubenswrapper[4558]: I0120 17:32:54.702742 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:32:54 crc kubenswrapper[4558]: W0120 17:32:54.737263 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podacfda63d_d93f_4b65_b9f4_eaf856bc5a92.slice/crio-47ae37c50e187e3af5b1f199ecc51656d1271150d317551e0365ad178b647554 WatchSource:0}: Error finding container 47ae37c50e187e3af5b1f199ecc51656d1271150d317551e0365ad178b647554: Status 404 returned error can't find the container with id 47ae37c50e187e3af5b1f199ecc51656d1271150d317551e0365ad178b647554 Jan 20 17:32:54 crc kubenswrapper[4558]: I0120 17:32:54.756188 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-5f4bd4844-95dpq" event={"ID":"1038c23d-3108-472d-b43c-4181fd8ac365","Type":"ContainerStarted","Data":"e2a18338fd9ab69e3f54fc92343898be5bb0e50f61066e2c11e95d851b940179"} Jan 20 17:32:54 crc kubenswrapper[4558]: I0120 17:32:54.756222 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-5f4bd4844-95dpq" event={"ID":"1038c23d-3108-472d-b43c-4181fd8ac365","Type":"ContainerStarted","Data":"8a1e30fe4204a02290d90187641c27f6cbb6b966db67d83a76c5a54e949c72ae"} Jan 20 17:32:54 crc kubenswrapper[4558]: I0120 17:32:54.756233 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-5f4bd4844-95dpq" event={"ID":"1038c23d-3108-472d-b43c-4181fd8ac365","Type":"ContainerStarted","Data":"5031de08614f2d9218eedd73dad8864c17d8257834d19d6dbe3cc1fe9b0edfb4"} Jan 20 17:32:54 crc kubenswrapper[4558]: I0120 17:32:54.756933 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/placement-5f4bd4844-95dpq" Jan 20 17:32:54 crc kubenswrapper[4558]: I0120 17:32:54.756962 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/placement-5f4bd4844-95dpq" Jan 20 17:32:54 crc kubenswrapper[4558]: I0120 17:32:54.766312 4558 generic.go:334] "Generic (PLEG): container finished" podID="9ab93517-8f51-467d-b830-16030668be2b" containerID="d7b53b31033927ce6e8736e2b00e96c96f93bd828ee7bf1b9cf354815ad9c843" exitCode=0 Jan 20 17:32:54 crc kubenswrapper[4558]: I0120 17:32:54.766374 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-cell-mapping-nbrcj" event={"ID":"9ab93517-8f51-467d-b830-16030668be2b","Type":"ContainerDied","Data":"d7b53b31033927ce6e8736e2b00e96c96f93bd828ee7bf1b9cf354815ad9c843"} Jan 20 17:32:54 crc kubenswrapper[4558]: I0120 17:32:54.789918 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/placement-5f4bd4844-95dpq" podStartSLOduration=2.789906414 podStartE2EDuration="2.789906414s" podCreationTimestamp="2026-01-20 17:32:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:32:54.784082604 +0000 UTC m=+3068.544420571" watchObservedRunningTime="2026-01-20 17:32:54.789906414 +0000 UTC m=+3068.550244381" Jan 20 17:32:54 crc kubenswrapper[4558]: I0120 17:32:54.817708 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"8fb66f31-6ee9-4ad8-8bd2-14e489b98591","Type":"ContainerStarted","Data":"642f551615ce48a73abe795ca46d2227e4e6a93313045edf6c62bffaecdc9441"} Jan 20 17:32:54 crc kubenswrapper[4558]: I0120 17:32:54.817745 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"8fb66f31-6ee9-4ad8-8bd2-14e489b98591","Type":"ContainerStarted","Data":"a9c596fe297f41ede3bfa332670ccef94114b4b90d59c0d5cf0160934a47ac18"} Jan 20 17:32:54 crc kubenswrapper[4558]: I0120 17:32:54.817758 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"8fb66f31-6ee9-4ad8-8bd2-14e489b98591","Type":"ContainerStarted","Data":"a65a5a35a730b473f8fda1850c8d424f2df977b3cb244f46616c780a7ae4c4f0"} Jan 20 17:32:54 crc kubenswrapper[4558]: I0120 17:32:54.848053 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-metadata-0" podStartSLOduration=2.848028482 podStartE2EDuration="2.848028482s" podCreationTimestamp="2026-01-20 17:32:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:32:54.841505527 +0000 UTC m=+3068.601843494" watchObservedRunningTime="2026-01-20 17:32:54.848028482 +0000 UTC m=+3068.608366449" Jan 20 17:32:54 crc kubenswrapper[4558]: I0120 17:32:54.851295 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"846821b5-5647-4ffa-a1bb-8695fe90cbcc","Type":"ContainerStarted","Data":"72da703d73712000127a5c91a66e96458bf2133274e58fb7bcbe402b18ccca9b"} Jan 20 17:32:54 crc kubenswrapper[4558]: I0120 17:32:54.851350 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"846821b5-5647-4ffa-a1bb-8695fe90cbcc","Type":"ContainerStarted","Data":"ddd93aadb72fd028114ea17b3d905d2242d090ec9a95a9d0b4c0f0502d9ec533"} Jan 20 17:32:54 crc kubenswrapper[4558]: I0120 17:32:54.860542 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"32e34e3d-8cbb-42df-9b7a-4b99b025cdf0","Type":"ContainerStarted","Data":"41deae5788f625f14add58324272ddd5cc382883dd1b3b32b04ceaa69dd8ec1e"} Jan 20 17:32:54 crc kubenswrapper[4558]: I0120 17:32:54.860578 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"32e34e3d-8cbb-42df-9b7a-4b99b025cdf0","Type":"ContainerStarted","Data":"baa3484dda0c02685e8f957a1c16b8286d35d860abba01160d98c4a69ee4f370"} Jan 20 17:32:55 crc kubenswrapper[4558]: I0120 17:32:55.492353 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:32:55 crc kubenswrapper[4558]: I0120 17:32:55.626440 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/49dc00d4-0563-4324-a98b-e42b24b4223b-rabbitmq-plugins\") pod \"49dc00d4-0563-4324-a98b-e42b24b4223b\" (UID: \"49dc00d4-0563-4324-a98b-e42b24b4223b\") " Jan 20 17:32:55 crc kubenswrapper[4558]: I0120 17:32:55.626551 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/49dc00d4-0563-4324-a98b-e42b24b4223b-rabbitmq-erlang-cookie\") pod \"49dc00d4-0563-4324-a98b-e42b24b4223b\" (UID: \"49dc00d4-0563-4324-a98b-e42b24b4223b\") " Jan 20 17:32:55 crc kubenswrapper[4558]: I0120 17:32:55.626711 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxbsn\" (UniqueName: \"kubernetes.io/projected/49dc00d4-0563-4324-a98b-e42b24b4223b-kube-api-access-wxbsn\") pod \"49dc00d4-0563-4324-a98b-e42b24b4223b\" (UID: \"49dc00d4-0563-4324-a98b-e42b24b4223b\") " Jan 20 17:32:55 crc kubenswrapper[4558]: I0120 17:32:55.626747 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/49dc00d4-0563-4324-a98b-e42b24b4223b-server-conf\") pod \"49dc00d4-0563-4324-a98b-e42b24b4223b\" (UID: \"49dc00d4-0563-4324-a98b-e42b24b4223b\") " Jan 20 17:32:55 crc kubenswrapper[4558]: I0120 17:32:55.626853 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/49dc00d4-0563-4324-a98b-e42b24b4223b-erlang-cookie-secret\") pod \"49dc00d4-0563-4324-a98b-e42b24b4223b\" (UID: \"49dc00d4-0563-4324-a98b-e42b24b4223b\") " Jan 20 17:32:55 crc kubenswrapper[4558]: I0120 17:32:55.626878 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/49dc00d4-0563-4324-a98b-e42b24b4223b-pod-info\") pod \"49dc00d4-0563-4324-a98b-e42b24b4223b\" (UID: \"49dc00d4-0563-4324-a98b-e42b24b4223b\") " Jan 20 17:32:55 crc kubenswrapper[4558]: I0120 17:32:55.626900 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/49dc00d4-0563-4324-a98b-e42b24b4223b-plugins-conf\") pod \"49dc00d4-0563-4324-a98b-e42b24b4223b\" (UID: \"49dc00d4-0563-4324-a98b-e42b24b4223b\") " Jan 20 17:32:55 crc kubenswrapper[4558]: I0120 17:32:55.626947 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"49dc00d4-0563-4324-a98b-e42b24b4223b\" (UID: \"49dc00d4-0563-4324-a98b-e42b24b4223b\") " Jan 20 17:32:55 crc kubenswrapper[4558]: I0120 17:32:55.626991 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/49dc00d4-0563-4324-a98b-e42b24b4223b-rabbitmq-confd\") pod \"49dc00d4-0563-4324-a98b-e42b24b4223b\" (UID: \"49dc00d4-0563-4324-a98b-e42b24b4223b\") " Jan 20 17:32:55 crc kubenswrapper[4558]: I0120 17:32:55.628183 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/49dc00d4-0563-4324-a98b-e42b24b4223b-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "49dc00d4-0563-4324-a98b-e42b24b4223b" (UID: "49dc00d4-0563-4324-a98b-e42b24b4223b"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:32:55 crc kubenswrapper[4558]: I0120 17:32:55.628631 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49dc00d4-0563-4324-a98b-e42b24b4223b-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "49dc00d4-0563-4324-a98b-e42b24b4223b" (UID: "49dc00d4-0563-4324-a98b-e42b24b4223b"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:32:55 crc kubenswrapper[4558]: I0120 17:32:55.630158 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/49dc00d4-0563-4324-a98b-e42b24b4223b-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "49dc00d4-0563-4324-a98b-e42b24b4223b" (UID: "49dc00d4-0563-4324-a98b-e42b24b4223b"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:32:55 crc kubenswrapper[4558]: I0120 17:32:55.632291 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49dc00d4-0563-4324-a98b-e42b24b4223b-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "49dc00d4-0563-4324-a98b-e42b24b4223b" (UID: "49dc00d4-0563-4324-a98b-e42b24b4223b"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:32:55 crc kubenswrapper[4558]: I0120 17:32:55.632982 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/49dc00d4-0563-4324-a98b-e42b24b4223b-pod-info" (OuterVolumeSpecName: "pod-info") pod "49dc00d4-0563-4324-a98b-e42b24b4223b" (UID: "49dc00d4-0563-4324-a98b-e42b24b4223b"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Jan 20 17:32:55 crc kubenswrapper[4558]: I0120 17:32:55.634044 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage01-crc" (OuterVolumeSpecName: "persistence") pod "49dc00d4-0563-4324-a98b-e42b24b4223b" (UID: "49dc00d4-0563-4324-a98b-e42b24b4223b"). InnerVolumeSpecName "local-storage01-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:32:55 crc kubenswrapper[4558]: I0120 17:32:55.638963 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49dc00d4-0563-4324-a98b-e42b24b4223b-kube-api-access-wxbsn" (OuterVolumeSpecName: "kube-api-access-wxbsn") pod "49dc00d4-0563-4324-a98b-e42b24b4223b" (UID: "49dc00d4-0563-4324-a98b-e42b24b4223b"). InnerVolumeSpecName "kube-api-access-wxbsn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:32:55 crc kubenswrapper[4558]: I0120 17:32:55.656901 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49dc00d4-0563-4324-a98b-e42b24b4223b-server-conf" (OuterVolumeSpecName: "server-conf") pod "49dc00d4-0563-4324-a98b-e42b24b4223b" (UID: "49dc00d4-0563-4324-a98b-e42b24b4223b"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:32:55 crc kubenswrapper[4558]: I0120 17:32:55.721561 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49dc00d4-0563-4324-a98b-e42b24b4223b-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "49dc00d4-0563-4324-a98b-e42b24b4223b" (UID: "49dc00d4-0563-4324-a98b-e42b24b4223b"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:32:55 crc kubenswrapper[4558]: I0120 17:32:55.729864 4558 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/49dc00d4-0563-4324-a98b-e42b24b4223b-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Jan 20 17:32:55 crc kubenswrapper[4558]: I0120 17:32:55.729889 4558 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/49dc00d4-0563-4324-a98b-e42b24b4223b-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Jan 20 17:32:55 crc kubenswrapper[4558]: I0120 17:32:55.729900 4558 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/49dc00d4-0563-4324-a98b-e42b24b4223b-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Jan 20 17:32:55 crc kubenswrapper[4558]: I0120 17:32:55.729913 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxbsn\" (UniqueName: \"kubernetes.io/projected/49dc00d4-0563-4324-a98b-e42b24b4223b-kube-api-access-wxbsn\") on node \"crc\" DevicePath \"\"" Jan 20 17:32:55 crc kubenswrapper[4558]: I0120 17:32:55.729924 4558 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/49dc00d4-0563-4324-a98b-e42b24b4223b-server-conf\") on node \"crc\" DevicePath \"\"" Jan 20 17:32:55 crc kubenswrapper[4558]: I0120 17:32:55.729932 4558 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/49dc00d4-0563-4324-a98b-e42b24b4223b-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Jan 20 17:32:55 crc kubenswrapper[4558]: I0120 17:32:55.729940 4558 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/49dc00d4-0563-4324-a98b-e42b24b4223b-pod-info\") on node \"crc\" DevicePath \"\"" Jan 20 17:32:55 crc kubenswrapper[4558]: I0120 17:32:55.729949 4558 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/49dc00d4-0563-4324-a98b-e42b24b4223b-plugins-conf\") on node \"crc\" DevicePath \"\"" Jan 20 17:32:55 crc kubenswrapper[4558]: I0120 17:32:55.729975 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" " Jan 20 17:32:55 crc kubenswrapper[4558]: I0120 17:32:55.820723 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage01-crc" (UniqueName: "kubernetes.io/local-volume/local-storage01-crc") on node "crc" Jan 20 17:32:55 crc kubenswrapper[4558]: I0120 17:32:55.835470 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:32:55 crc kubenswrapper[4558]: I0120 17:32:55.903640 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"acfda63d-d93f-4b65-b9f4-eaf856bc5a92","Type":"ContainerStarted","Data":"6506b5c2fe94ddb4f34e480f304b433a04393dac97ebdf236b623e4dcfde7b2f"} Jan 20 17:32:55 crc kubenswrapper[4558]: I0120 17:32:55.903961 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"acfda63d-d93f-4b65-b9f4-eaf856bc5a92","Type":"ContainerStarted","Data":"47ae37c50e187e3af5b1f199ecc51656d1271150d317551e0365ad178b647554"} Jan 20 17:32:55 crc kubenswrapper[4558]: I0120 17:32:55.910757 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-worker-5bb9c47bc7-ct6jf" event={"ID":"fbba86e7-87c6-4350-8bbd-565ba9abfe68","Type":"ContainerDied","Data":"8315184077e8a32f5cae764089867c8adc47f7cd18db8e5c92a42400d2b15103"} Jan 20 17:32:55 crc kubenswrapper[4558]: I0120 17:32:55.910611 4558 generic.go:334] "Generic (PLEG): container finished" podID="fbba86e7-87c6-4350-8bbd-565ba9abfe68" containerID="8315184077e8a32f5cae764089867c8adc47f7cd18db8e5c92a42400d2b15103" exitCode=0 Jan 20 17:32:55 crc kubenswrapper[4558]: I0120 17:32:55.921303 4558 generic.go:334] "Generic (PLEG): container finished" podID="35b2371f-57f2-4728-b32f-1ba587b4532e" containerID="7e836c85569914a31734b34020c760d2ba17ca6f5821aecaa0763835a0e87863" exitCode=0 Jan 20 17:32:55 crc kubenswrapper[4558]: I0120 17:32:55.921352 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-server-0" event={"ID":"35b2371f-57f2-4728-b32f-1ba587b4532e","Type":"ContainerDied","Data":"7e836c85569914a31734b34020c760d2ba17ca6f5821aecaa0763835a0e87863"} Jan 20 17:32:55 crc kubenswrapper[4558]: I0120 17:32:55.930583 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:32:55 crc kubenswrapper[4558]: I0120 17:32:55.931103 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"846821b5-5647-4ffa-a1bb-8695fe90cbcc","Type":"ContainerStarted","Data":"a31259f069ab4363f5184768435b810c1a24f8a773760135e07b1662c647ae59"} Jan 20 17:32:55 crc kubenswrapper[4558]: I0120 17:32:55.931372 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:32:55 crc kubenswrapper[4558]: I0120 17:32:55.938944 4558 generic.go:334] "Generic (PLEG): container finished" podID="49dc00d4-0563-4324-a98b-e42b24b4223b" containerID="4008537411d2d6de7def09cb45d06dbf9acb1876b14e155e5f8a007aaeffe4df" exitCode=0 Jan 20 17:32:55 crc kubenswrapper[4558]: I0120 17:32:55.939006 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" event={"ID":"49dc00d4-0563-4324-a98b-e42b24b4223b","Type":"ContainerDied","Data":"4008537411d2d6de7def09cb45d06dbf9acb1876b14e155e5f8a007aaeffe4df"} Jan 20 17:32:55 crc kubenswrapper[4558]: I0120 17:32:55.939027 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" event={"ID":"49dc00d4-0563-4324-a98b-e42b24b4223b","Type":"ContainerDied","Data":"33b204fb796375ab9ea199c3fa8c4edd2c70b3a454e49f2bebb05a610516fa5b"} Jan 20 17:32:55 crc kubenswrapper[4558]: I0120 17:32:55.939048 4558 scope.go:117] "RemoveContainer" containerID="4008537411d2d6de7def09cb45d06dbf9acb1876b14e155e5f8a007aaeffe4df" Jan 20 17:32:55 crc kubenswrapper[4558]: I0120 17:32:55.939203 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:32:55 crc kubenswrapper[4558]: I0120 17:32:55.967701 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"32e34e3d-8cbb-42df-9b7a-4b99b025cdf0","Type":"ContainerStarted","Data":"32cf2a0dd770e34c9d2400413f255671ccbd9a67bbd106457f3e7faaa2b2df50"} Jan 20 17:32:55 crc kubenswrapper[4558]: I0120 17:32:55.982195 4558 generic.go:334] "Generic (PLEG): container finished" podID="113d7999-28c3-4738-aa99-bd09c1880299" containerID="9bbacbd57db249ab8218348788165000873acf1f94027640d5d33d96e3684631" exitCode=0 Jan 20 17:32:55 crc kubenswrapper[4558]: I0120 17:32:55.982270 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-keystone-listener-65b4ff8654-bj52d" event={"ID":"113d7999-28c3-4738-aa99-bd09c1880299","Type":"ContainerDied","Data":"9bbacbd57db249ab8218348788165000873acf1f94027640d5d33d96e3684631"} Jan 20 17:32:55 crc kubenswrapper[4558]: I0120 17:32:55.992494 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/cinder-api-0" podStartSLOduration=3.9924548570000002 podStartE2EDuration="3.992454857s" podCreationTimestamp="2026-01-20 17:32:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:32:55.985116127 +0000 UTC m=+3069.745454094" watchObservedRunningTime="2026-01-20 17:32:55.992454857 +0000 UTC m=+3069.752792814" Jan 20 17:32:55 crc kubenswrapper[4558]: I0120 17:32:55.996156 4558 generic.go:334] "Generic (PLEG): container finished" podID="0fa9abe5-c250-44ce-97d5-17528dd527cb" containerID="255c1473971684fbf37d5dbc876ce06c7e2d2050c60ff64fb10bd1de85cc2f28" exitCode=0 Jan 20 17:32:55 crc kubenswrapper[4558]: I0120 17:32:55.996446 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"0fa9abe5-c250-44ce-97d5-17528dd527cb","Type":"ContainerDied","Data":"255c1473971684fbf37d5dbc876ce06c7e2d2050c60ff64fb10bd1de85cc2f28"} Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.016671 4558 scope.go:117] "RemoveContainer" containerID="a216ad33befa8b30bde5bc44c084e7e37be4198c817fabd4c86a26d9201627a7" Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.027886 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/rabbitmq-cell1-server-0"] Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.035790 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/rabbitmq-cell1-server-0"] Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.040009 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/35b2371f-57f2-4728-b32f-1ba587b4532e-rabbitmq-confd\") pod \"35b2371f-57f2-4728-b32f-1ba587b4532e\" (UID: \"35b2371f-57f2-4728-b32f-1ba587b4532e\") " Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.040074 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/35b2371f-57f2-4728-b32f-1ba587b4532e-pod-info\") pod \"35b2371f-57f2-4728-b32f-1ba587b4532e\" (UID: \"35b2371f-57f2-4728-b32f-1ba587b4532e\") " Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.040131 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/35b2371f-57f2-4728-b32f-1ba587b4532e-erlang-cookie-secret\") pod \"35b2371f-57f2-4728-b32f-1ba587b4532e\" (UID: \"35b2371f-57f2-4728-b32f-1ba587b4532e\") " Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.040189 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/35b2371f-57f2-4728-b32f-1ba587b4532e-rabbitmq-plugins\") pod \"35b2371f-57f2-4728-b32f-1ba587b4532e\" (UID: \"35b2371f-57f2-4728-b32f-1ba587b4532e\") " Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.040215 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/35b2371f-57f2-4728-b32f-1ba587b4532e-server-conf\") pod \"35b2371f-57f2-4728-b32f-1ba587b4532e\" (UID: \"35b2371f-57f2-4728-b32f-1ba587b4532e\") " Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.040241 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-slnl6\" (UniqueName: \"kubernetes.io/projected/35b2371f-57f2-4728-b32f-1ba587b4532e-kube-api-access-slnl6\") pod \"35b2371f-57f2-4728-b32f-1ba587b4532e\" (UID: \"35b2371f-57f2-4728-b32f-1ba587b4532e\") " Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.040369 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/35b2371f-57f2-4728-b32f-1ba587b4532e-plugins-conf\") pod \"35b2371f-57f2-4728-b32f-1ba587b4532e\" (UID: \"35b2371f-57f2-4728-b32f-1ba587b4532e\") " Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.040401 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"35b2371f-57f2-4728-b32f-1ba587b4532e\" (UID: \"35b2371f-57f2-4728-b32f-1ba587b4532e\") " Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.040420 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/35b2371f-57f2-4728-b32f-1ba587b4532e-rabbitmq-erlang-cookie\") pod \"35b2371f-57f2-4728-b32f-1ba587b4532e\" (UID: \"35b2371f-57f2-4728-b32f-1ba587b4532e\") " Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.044680 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/35b2371f-57f2-4728-b32f-1ba587b4532e-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "35b2371f-57f2-4728-b32f-1ba587b4532e" (UID: "35b2371f-57f2-4728-b32f-1ba587b4532e"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.052667 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/35b2371f-57f2-4728-b32f-1ba587b4532e-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "35b2371f-57f2-4728-b32f-1ba587b4532e" (UID: "35b2371f-57f2-4728-b32f-1ba587b4532e"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.053924 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/35b2371f-57f2-4728-b32f-1ba587b4532e-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "35b2371f-57f2-4728-b32f-1ba587b4532e" (UID: "35b2371f-57f2-4728-b32f-1ba587b4532e"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.055684 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/35b2371f-57f2-4728-b32f-1ba587b4532e-pod-info" (OuterVolumeSpecName: "pod-info") pod "35b2371f-57f2-4728-b32f-1ba587b4532e" (UID: "35b2371f-57f2-4728-b32f-1ba587b4532e"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.061267 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage12-crc" (OuterVolumeSpecName: "persistence") pod "35b2371f-57f2-4728-b32f-1ba587b4532e" (UID: "35b2371f-57f2-4728-b32f-1ba587b4532e"). InnerVolumeSpecName "local-storage12-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.061576 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/35b2371f-57f2-4728-b32f-1ba587b4532e-kube-api-access-slnl6" (OuterVolumeSpecName: "kube-api-access-slnl6") pod "35b2371f-57f2-4728-b32f-1ba587b4532e" (UID: "35b2371f-57f2-4728-b32f-1ba587b4532e"). InnerVolumeSpecName "kube-api-access-slnl6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.062326 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/35b2371f-57f2-4728-b32f-1ba587b4532e-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "35b2371f-57f2-4728-b32f-1ba587b4532e" (UID: "35b2371f-57f2-4728-b32f-1ba587b4532e"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.064218 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/rabbitmq-cell1-server-0"] Jan 20 17:32:56 crc kubenswrapper[4558]: E0120 17:32:56.064708 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="49dc00d4-0563-4324-a98b-e42b24b4223b" containerName="rabbitmq" Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.064727 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="49dc00d4-0563-4324-a98b-e42b24b4223b" containerName="rabbitmq" Jan 20 17:32:56 crc kubenswrapper[4558]: E0120 17:32:56.064763 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="49dc00d4-0563-4324-a98b-e42b24b4223b" containerName="setup-container" Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.064770 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="49dc00d4-0563-4324-a98b-e42b24b4223b" containerName="setup-container" Jan 20 17:32:56 crc kubenswrapper[4558]: E0120 17:32:56.064794 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="35b2371f-57f2-4728-b32f-1ba587b4532e" containerName="rabbitmq" Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.064800 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="35b2371f-57f2-4728-b32f-1ba587b4532e" containerName="rabbitmq" Jan 20 17:32:56 crc kubenswrapper[4558]: E0120 17:32:56.064818 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="35b2371f-57f2-4728-b32f-1ba587b4532e" containerName="setup-container" Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.064824 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="35b2371f-57f2-4728-b32f-1ba587b4532e" containerName="setup-container" Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.065026 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="35b2371f-57f2-4728-b32f-1ba587b4532e" containerName="rabbitmq" Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.065043 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="49dc00d4-0563-4324-a98b-e42b24b4223b" containerName="rabbitmq" Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.066136 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.069970 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/glance-default-internal-api-0" podStartSLOduration=4.069954531 podStartE2EDuration="4.069954531s" podCreationTimestamp="2026-01-20 17:32:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:32:56.041079126 +0000 UTC m=+3069.801417093" watchObservedRunningTime="2026-01-20 17:32:56.069954531 +0000 UTC m=+3069.830292498" Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.077624 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-rabbitmq-cell1-svc" Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.077878 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"rabbitmq-cell1-plugins-conf" Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.078058 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"rabbitmq-cell1-server-dockercfg-t8m5j" Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.078190 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"rabbitmq-cell1-config-data" Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.078306 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"rabbitmq-cell1-erlang-cookie" Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.078439 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"rabbitmq-cell1-server-conf" Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.078789 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"rabbitmq-cell1-default-user" Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.087700 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/35b2371f-57f2-4728-b32f-1ba587b4532e-server-conf" (OuterVolumeSpecName: "server-conf") pod "35b2371f-57f2-4728-b32f-1ba587b4532e" (UID: "35b2371f-57f2-4728-b32f-1ba587b4532e"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.120390 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/rabbitmq-cell1-server-0"] Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.144843 4558 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/35b2371f-57f2-4728-b32f-1ba587b4532e-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.144874 4558 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/35b2371f-57f2-4728-b32f-1ba587b4532e-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.144884 4558 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/35b2371f-57f2-4728-b32f-1ba587b4532e-server-conf\") on node \"crc\" DevicePath \"\"" Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.144895 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-slnl6\" (UniqueName: \"kubernetes.io/projected/35b2371f-57f2-4728-b32f-1ba587b4532e-kube-api-access-slnl6\") on node \"crc\" DevicePath \"\"" Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.144904 4558 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/35b2371f-57f2-4728-b32f-1ba587b4532e-plugins-conf\") on node \"crc\" DevicePath \"\"" Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.144914 4558 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/35b2371f-57f2-4728-b32f-1ba587b4532e-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.144938 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" " Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.144946 4558 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/35b2371f-57f2-4728-b32f-1ba587b4532e-pod-info\") on node \"crc\" DevicePath \"\"" Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.148142 4558 scope.go:117] "RemoveContainer" containerID="4008537411d2d6de7def09cb45d06dbf9acb1876b14e155e5f8a007aaeffe4df" Jan 20 17:32:56 crc kubenswrapper[4558]: E0120 17:32:56.150082 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4008537411d2d6de7def09cb45d06dbf9acb1876b14e155e5f8a007aaeffe4df\": container with ID starting with 4008537411d2d6de7def09cb45d06dbf9acb1876b14e155e5f8a007aaeffe4df not found: ID does not exist" containerID="4008537411d2d6de7def09cb45d06dbf9acb1876b14e155e5f8a007aaeffe4df" Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.150112 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4008537411d2d6de7def09cb45d06dbf9acb1876b14e155e5f8a007aaeffe4df"} err="failed to get container status \"4008537411d2d6de7def09cb45d06dbf9acb1876b14e155e5f8a007aaeffe4df\": rpc error: code = NotFound desc = could not find container \"4008537411d2d6de7def09cb45d06dbf9acb1876b14e155e5f8a007aaeffe4df\": container with ID starting with 4008537411d2d6de7def09cb45d06dbf9acb1876b14e155e5f8a007aaeffe4df not found: ID does not exist" Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.150133 4558 scope.go:117] "RemoveContainer" containerID="a216ad33befa8b30bde5bc44c084e7e37be4198c817fabd4c86a26d9201627a7" Jan 20 17:32:56 crc kubenswrapper[4558]: E0120 17:32:56.150639 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a216ad33befa8b30bde5bc44c084e7e37be4198c817fabd4c86a26d9201627a7\": container with ID starting with a216ad33befa8b30bde5bc44c084e7e37be4198c817fabd4c86a26d9201627a7 not found: ID does not exist" containerID="a216ad33befa8b30bde5bc44c084e7e37be4198c817fabd4c86a26d9201627a7" Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.150656 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a216ad33befa8b30bde5bc44c084e7e37be4198c817fabd4c86a26d9201627a7"} err="failed to get container status \"a216ad33befa8b30bde5bc44c084e7e37be4198c817fabd4c86a26d9201627a7\": rpc error: code = NotFound desc = could not find container \"a216ad33befa8b30bde5bc44c084e7e37be4198c817fabd4c86a26d9201627a7\": container with ID starting with a216ad33befa8b30bde5bc44c084e7e37be4198c817fabd4c86a26d9201627a7 not found: ID does not exist" Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.168674 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage12-crc" (UniqueName: "kubernetes.io/local-volume/local-storage12-crc") on node "crc" Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.187580 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/35b2371f-57f2-4728-b32f-1ba587b4532e-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "35b2371f-57f2-4728-b32f-1ba587b4532e" (UID: "35b2371f-57f2-4728-b32f-1ba587b4532e"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.246691 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/0c6a6265-cf90-4039-9200-ba478d612baa-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"0c6a6265-cf90-4039-9200-ba478d612baa\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.246756 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/0c6a6265-cf90-4039-9200-ba478d612baa-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"0c6a6265-cf90-4039-9200-ba478d612baa\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.246845 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/0c6a6265-cf90-4039-9200-ba478d612baa-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"0c6a6265-cf90-4039-9200-ba478d612baa\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.246900 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"0c6a6265-cf90-4039-9200-ba478d612baa\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.246931 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/0c6a6265-cf90-4039-9200-ba478d612baa-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"0c6a6265-cf90-4039-9200-ba478d612baa\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.246948 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/0c6a6265-cf90-4039-9200-ba478d612baa-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"0c6a6265-cf90-4039-9200-ba478d612baa\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.246993 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/0c6a6265-cf90-4039-9200-ba478d612baa-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"0c6a6265-cf90-4039-9200-ba478d612baa\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.247017 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/0c6a6265-cf90-4039-9200-ba478d612baa-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"0c6a6265-cf90-4039-9200-ba478d612baa\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.247053 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r5k8q\" (UniqueName: \"kubernetes.io/projected/0c6a6265-cf90-4039-9200-ba478d612baa-kube-api-access-r5k8q\") pod \"rabbitmq-cell1-server-0\" (UID: \"0c6a6265-cf90-4039-9200-ba478d612baa\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.247191 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/0c6a6265-cf90-4039-9200-ba478d612baa-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"0c6a6265-cf90-4039-9200-ba478d612baa\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.247256 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/0c6a6265-cf90-4039-9200-ba478d612baa-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"0c6a6265-cf90-4039-9200-ba478d612baa\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.247311 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.247329 4558 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/35b2371f-57f2-4728-b32f-1ba587b4532e-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.348564 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/0c6a6265-cf90-4039-9200-ba478d612baa-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"0c6a6265-cf90-4039-9200-ba478d612baa\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.348768 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/0c6a6265-cf90-4039-9200-ba478d612baa-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"0c6a6265-cf90-4039-9200-ba478d612baa\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.348870 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/0c6a6265-cf90-4039-9200-ba478d612baa-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"0c6a6265-cf90-4039-9200-ba478d612baa\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.348975 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/0c6a6265-cf90-4039-9200-ba478d612baa-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"0c6a6265-cf90-4039-9200-ba478d612baa\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.349065 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"0c6a6265-cf90-4039-9200-ba478d612baa\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.349141 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/0c6a6265-cf90-4039-9200-ba478d612baa-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"0c6a6265-cf90-4039-9200-ba478d612baa\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.349233 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/0c6a6265-cf90-4039-9200-ba478d612baa-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"0c6a6265-cf90-4039-9200-ba478d612baa\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.349317 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/0c6a6265-cf90-4039-9200-ba478d612baa-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"0c6a6265-cf90-4039-9200-ba478d612baa\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.349383 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/0c6a6265-cf90-4039-9200-ba478d612baa-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"0c6a6265-cf90-4039-9200-ba478d612baa\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.349452 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r5k8q\" (UniqueName: \"kubernetes.io/projected/0c6a6265-cf90-4039-9200-ba478d612baa-kube-api-access-r5k8q\") pod \"rabbitmq-cell1-server-0\" (UID: \"0c6a6265-cf90-4039-9200-ba478d612baa\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.349968 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/0c6a6265-cf90-4039-9200-ba478d612baa-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"0c6a6265-cf90-4039-9200-ba478d612baa\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.349479 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"0c6a6265-cf90-4039-9200-ba478d612baa\") device mount path \"/mnt/openstack/pv01\"" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.350744 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/0c6a6265-cf90-4039-9200-ba478d612baa-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"0c6a6265-cf90-4039-9200-ba478d612baa\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.350978 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/0c6a6265-cf90-4039-9200-ba478d612baa-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"0c6a6265-cf90-4039-9200-ba478d612baa\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.351380 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/0c6a6265-cf90-4039-9200-ba478d612baa-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"0c6a6265-cf90-4039-9200-ba478d612baa\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.352412 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/0c6a6265-cf90-4039-9200-ba478d612baa-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"0c6a6265-cf90-4039-9200-ba478d612baa\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.352762 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/0c6a6265-cf90-4039-9200-ba478d612baa-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"0c6a6265-cf90-4039-9200-ba478d612baa\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.353084 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/0c6a6265-cf90-4039-9200-ba478d612baa-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"0c6a6265-cf90-4039-9200-ba478d612baa\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.353814 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/0c6a6265-cf90-4039-9200-ba478d612baa-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"0c6a6265-cf90-4039-9200-ba478d612baa\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.358004 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/0c6a6265-cf90-4039-9200-ba478d612baa-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"0c6a6265-cf90-4039-9200-ba478d612baa\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.360549 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.362612 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-worker-5bb9c47bc7-ct6jf" Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.365686 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r5k8q\" (UniqueName: \"kubernetes.io/projected/0c6a6265-cf90-4039-9200-ba478d612baa-kube-api-access-r5k8q\") pod \"rabbitmq-cell1-server-0\" (UID: \"0c6a6265-cf90-4039-9200-ba478d612baa\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.370327 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-keystone-listener-65b4ff8654-bj52d" Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.370604 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/0c6a6265-cf90-4039-9200-ba478d612baa-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"0c6a6265-cf90-4039-9200-ba478d612baa\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.409980 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"0c6a6265-cf90-4039-9200-ba478d612baa\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.493136 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-cell-mapping-nbrcj" Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.559865 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0fa9abe5-c250-44ce-97d5-17528dd527cb-sg-core-conf-yaml\") pod \"0fa9abe5-c250-44ce-97d5-17528dd527cb\" (UID: \"0fa9abe5-c250-44ce-97d5-17528dd527cb\") " Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.559916 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0fa9abe5-c250-44ce-97d5-17528dd527cb-combined-ca-bundle\") pod \"0fa9abe5-c250-44ce-97d5-17528dd527cb\" (UID: \"0fa9abe5-c250-44ce-97d5-17528dd527cb\") " Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.559969 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-djfsj\" (UniqueName: \"kubernetes.io/projected/0fa9abe5-c250-44ce-97d5-17528dd527cb-kube-api-access-djfsj\") pod \"0fa9abe5-c250-44ce-97d5-17528dd527cb\" (UID: \"0fa9abe5-c250-44ce-97d5-17528dd527cb\") " Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.559989 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/fbba86e7-87c6-4350-8bbd-565ba9abfe68-config-data-custom\") pod \"fbba86e7-87c6-4350-8bbd-565ba9abfe68\" (UID: \"fbba86e7-87c6-4350-8bbd-565ba9abfe68\") " Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.560009 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/113d7999-28c3-4738-aa99-bd09c1880299-config-data\") pod \"113d7999-28c3-4738-aa99-bd09c1880299\" (UID: \"113d7999-28c3-4738-aa99-bd09c1880299\") " Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.560041 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbba86e7-87c6-4350-8bbd-565ba9abfe68-combined-ca-bundle\") pod \"fbba86e7-87c6-4350-8bbd-565ba9abfe68\" (UID: \"fbba86e7-87c6-4350-8bbd-565ba9abfe68\") " Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.560082 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/113d7999-28c3-4738-aa99-bd09c1880299-config-data-custom\") pod \"113d7999-28c3-4738-aa99-bd09c1880299\" (UID: \"113d7999-28c3-4738-aa99-bd09c1880299\") " Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.560112 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0fa9abe5-c250-44ce-97d5-17528dd527cb-config-data\") pod \"0fa9abe5-c250-44ce-97d5-17528dd527cb\" (UID: \"0fa9abe5-c250-44ce-97d5-17528dd527cb\") " Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.560148 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/113d7999-28c3-4738-aa99-bd09c1880299-logs\") pod \"113d7999-28c3-4738-aa99-bd09c1880299\" (UID: \"113d7999-28c3-4738-aa99-bd09c1880299\") " Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.560199 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/113d7999-28c3-4738-aa99-bd09c1880299-combined-ca-bundle\") pod \"113d7999-28c3-4738-aa99-bd09c1880299\" (UID: \"113d7999-28c3-4738-aa99-bd09c1880299\") " Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.560217 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fbba86e7-87c6-4350-8bbd-565ba9abfe68-config-data\") pod \"fbba86e7-87c6-4350-8bbd-565ba9abfe68\" (UID: \"fbba86e7-87c6-4350-8bbd-565ba9abfe68\") " Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.560316 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0fa9abe5-c250-44ce-97d5-17528dd527cb-scripts\") pod \"0fa9abe5-c250-44ce-97d5-17528dd527cb\" (UID: \"0fa9abe5-c250-44ce-97d5-17528dd527cb\") " Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.560418 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gmsqz\" (UniqueName: \"kubernetes.io/projected/fbba86e7-87c6-4350-8bbd-565ba9abfe68-kube-api-access-gmsqz\") pod \"fbba86e7-87c6-4350-8bbd-565ba9abfe68\" (UID: \"fbba86e7-87c6-4350-8bbd-565ba9abfe68\") " Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.560479 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0fa9abe5-c250-44ce-97d5-17528dd527cb-run-httpd\") pod \"0fa9abe5-c250-44ce-97d5-17528dd527cb\" (UID: \"0fa9abe5-c250-44ce-97d5-17528dd527cb\") " Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.560518 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fbba86e7-87c6-4350-8bbd-565ba9abfe68-logs\") pod \"fbba86e7-87c6-4350-8bbd-565ba9abfe68\" (UID: \"fbba86e7-87c6-4350-8bbd-565ba9abfe68\") " Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.560541 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z6k25\" (UniqueName: \"kubernetes.io/projected/113d7999-28c3-4738-aa99-bd09c1880299-kube-api-access-z6k25\") pod \"113d7999-28c3-4738-aa99-bd09c1880299\" (UID: \"113d7999-28c3-4738-aa99-bd09c1880299\") " Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.560568 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0fa9abe5-c250-44ce-97d5-17528dd527cb-log-httpd\") pod \"0fa9abe5-c250-44ce-97d5-17528dd527cb\" (UID: \"0fa9abe5-c250-44ce-97d5-17528dd527cb\") " Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.561452 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0fa9abe5-c250-44ce-97d5-17528dd527cb-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "0fa9abe5-c250-44ce-97d5-17528dd527cb" (UID: "0fa9abe5-c250-44ce-97d5-17528dd527cb"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.563538 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/113d7999-28c3-4738-aa99-bd09c1880299-logs" (OuterVolumeSpecName: "logs") pod "113d7999-28c3-4738-aa99-bd09c1880299" (UID: "113d7999-28c3-4738-aa99-bd09c1880299"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.568973 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fbba86e7-87c6-4350-8bbd-565ba9abfe68-logs" (OuterVolumeSpecName: "logs") pod "fbba86e7-87c6-4350-8bbd-565ba9abfe68" (UID: "fbba86e7-87c6-4350-8bbd-565ba9abfe68"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.571595 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0fa9abe5-c250-44ce-97d5-17528dd527cb-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "0fa9abe5-c250-44ce-97d5-17528dd527cb" (UID: "0fa9abe5-c250-44ce-97d5-17528dd527cb"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.578314 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0fa9abe5-c250-44ce-97d5-17528dd527cb-kube-api-access-djfsj" (OuterVolumeSpecName: "kube-api-access-djfsj") pod "0fa9abe5-c250-44ce-97d5-17528dd527cb" (UID: "0fa9abe5-c250-44ce-97d5-17528dd527cb"). InnerVolumeSpecName "kube-api-access-djfsj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.582585 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49dc00d4-0563-4324-a98b-e42b24b4223b" path="/var/lib/kubelet/pods/49dc00d4-0563-4324-a98b-e42b24b4223b/volumes" Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.591908 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fbba86e7-87c6-4350-8bbd-565ba9abfe68-kube-api-access-gmsqz" (OuterVolumeSpecName: "kube-api-access-gmsqz") pod "fbba86e7-87c6-4350-8bbd-565ba9abfe68" (UID: "fbba86e7-87c6-4350-8bbd-565ba9abfe68"). InnerVolumeSpecName "kube-api-access-gmsqz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.595697 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/113d7999-28c3-4738-aa99-bd09c1880299-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "113d7999-28c3-4738-aa99-bd09c1880299" (UID: "113d7999-28c3-4738-aa99-bd09c1880299"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.596216 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fbba86e7-87c6-4350-8bbd-565ba9abfe68-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "fbba86e7-87c6-4350-8bbd-565ba9abfe68" (UID: "fbba86e7-87c6-4350-8bbd-565ba9abfe68"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.617146 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/113d7999-28c3-4738-aa99-bd09c1880299-kube-api-access-z6k25" (OuterVolumeSpecName: "kube-api-access-z6k25") pod "113d7999-28c3-4738-aa99-bd09c1880299" (UID: "113d7999-28c3-4738-aa99-bd09c1880299"). InnerVolumeSpecName "kube-api-access-z6k25". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.617547 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0fa9abe5-c250-44ce-97d5-17528dd527cb-scripts" (OuterVolumeSpecName: "scripts") pod "0fa9abe5-c250-44ce-97d5-17528dd527cb" (UID: "0fa9abe5-c250-44ce-97d5-17528dd527cb"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.639866 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fbba86e7-87c6-4350-8bbd-565ba9abfe68-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fbba86e7-87c6-4350-8bbd-565ba9abfe68" (UID: "fbba86e7-87c6-4350-8bbd-565ba9abfe68"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.642685 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0fa9abe5-c250-44ce-97d5-17528dd527cb-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "0fa9abe5-c250-44ce-97d5-17528dd527cb" (UID: "0fa9abe5-c250-44ce-97d5-17528dd527cb"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.649250 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/113d7999-28c3-4738-aa99-bd09c1880299-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "113d7999-28c3-4738-aa99-bd09c1880299" (UID: "113d7999-28c3-4738-aa99-bd09c1880299"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.655472 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/113d7999-28c3-4738-aa99-bd09c1880299-config-data" (OuterVolumeSpecName: "config-data") pod "113d7999-28c3-4738-aa99-bd09c1880299" (UID: "113d7999-28c3-4738-aa99-bd09c1880299"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.662744 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wbn5n\" (UniqueName: \"kubernetes.io/projected/9ab93517-8f51-467d-b830-16030668be2b-kube-api-access-wbn5n\") pod \"9ab93517-8f51-467d-b830-16030668be2b\" (UID: \"9ab93517-8f51-467d-b830-16030668be2b\") " Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.662811 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9ab93517-8f51-467d-b830-16030668be2b-scripts\") pod \"9ab93517-8f51-467d-b830-16030668be2b\" (UID: \"9ab93517-8f51-467d-b830-16030668be2b\") " Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.662837 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9ab93517-8f51-467d-b830-16030668be2b-config-data\") pod \"9ab93517-8f51-467d-b830-16030668be2b\" (UID: \"9ab93517-8f51-467d-b830-16030668be2b\") " Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.662947 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9ab93517-8f51-467d-b830-16030668be2b-combined-ca-bundle\") pod \"9ab93517-8f51-467d-b830-16030668be2b\" (UID: \"9ab93517-8f51-467d-b830-16030668be2b\") " Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.663701 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-djfsj\" (UniqueName: \"kubernetes.io/projected/0fa9abe5-c250-44ce-97d5-17528dd527cb-kube-api-access-djfsj\") on node \"crc\" DevicePath \"\"" Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.663724 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/fbba86e7-87c6-4350-8bbd-565ba9abfe68-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.663735 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/113d7999-28c3-4738-aa99-bd09c1880299-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.663745 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fbba86e7-87c6-4350-8bbd-565ba9abfe68-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.663755 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/113d7999-28c3-4738-aa99-bd09c1880299-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.663764 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/113d7999-28c3-4738-aa99-bd09c1880299-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.663773 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/113d7999-28c3-4738-aa99-bd09c1880299-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.663781 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0fa9abe5-c250-44ce-97d5-17528dd527cb-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.663791 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gmsqz\" (UniqueName: \"kubernetes.io/projected/fbba86e7-87c6-4350-8bbd-565ba9abfe68-kube-api-access-gmsqz\") on node \"crc\" DevicePath \"\"" Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.663800 4558 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0fa9abe5-c250-44ce-97d5-17528dd527cb-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.663816 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fbba86e7-87c6-4350-8bbd-565ba9abfe68-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.663824 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z6k25\" (UniqueName: \"kubernetes.io/projected/113d7999-28c3-4738-aa99-bd09c1880299-kube-api-access-z6k25\") on node \"crc\" DevicePath \"\"" Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.663833 4558 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0fa9abe5-c250-44ce-97d5-17528dd527cb-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.663843 4558 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0fa9abe5-c250-44ce-97d5-17528dd527cb-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.665696 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9ab93517-8f51-467d-b830-16030668be2b-kube-api-access-wbn5n" (OuterVolumeSpecName: "kube-api-access-wbn5n") pod "9ab93517-8f51-467d-b830-16030668be2b" (UID: "9ab93517-8f51-467d-b830-16030668be2b"). InnerVolumeSpecName "kube-api-access-wbn5n". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.669305 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9ab93517-8f51-467d-b830-16030668be2b-scripts" (OuterVolumeSpecName: "scripts") pod "9ab93517-8f51-467d-b830-16030668be2b" (UID: "9ab93517-8f51-467d-b830-16030668be2b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.675826 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fbba86e7-87c6-4350-8bbd-565ba9abfe68-config-data" (OuterVolumeSpecName: "config-data") pod "fbba86e7-87c6-4350-8bbd-565ba9abfe68" (UID: "fbba86e7-87c6-4350-8bbd-565ba9abfe68"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.682532 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9ab93517-8f51-467d-b830-16030668be2b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9ab93517-8f51-467d-b830-16030668be2b" (UID: "9ab93517-8f51-467d-b830-16030668be2b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.684301 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9ab93517-8f51-467d-b830-16030668be2b-config-data" (OuterVolumeSpecName: "config-data") pod "9ab93517-8f51-467d-b830-16030668be2b" (UID: "9ab93517-8f51-467d-b830-16030668be2b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.689527 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0fa9abe5-c250-44ce-97d5-17528dd527cb-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0fa9abe5-c250-44ce-97d5-17528dd527cb" (UID: "0fa9abe5-c250-44ce-97d5-17528dd527cb"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.693895 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.738558 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0fa9abe5-c250-44ce-97d5-17528dd527cb-config-data" (OuterVolumeSpecName: "config-data") pod "0fa9abe5-c250-44ce-97d5-17528dd527cb" (UID: "0fa9abe5-c250-44ce-97d5-17528dd527cb"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.743156 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.766111 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wbn5n\" (UniqueName: \"kubernetes.io/projected/9ab93517-8f51-467d-b830-16030668be2b-kube-api-access-wbn5n\") on node \"crc\" DevicePath \"\"" Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.766180 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9ab93517-8f51-467d-b830-16030668be2b-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.766197 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9ab93517-8f51-467d-b830-16030668be2b-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.766210 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0fa9abe5-c250-44ce-97d5-17528dd527cb-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.766221 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9ab93517-8f51-467d-b830-16030668be2b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.766233 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0fa9abe5-c250-44ce-97d5-17528dd527cb-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:32:56 crc kubenswrapper[4558]: I0120 17:32:56.766243 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fbba86e7-87c6-4350-8bbd-565ba9abfe68-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:32:57 crc kubenswrapper[4558]: I0120 17:32:57.014034 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-keystone-listener-65b4ff8654-bj52d" event={"ID":"113d7999-28c3-4738-aa99-bd09c1880299","Type":"ContainerDied","Data":"ceabbb81be2bef2501c32958390209c116353c7c402919013de9b183b48d54f8"} Jan 20 17:32:57 crc kubenswrapper[4558]: I0120 17:32:57.014365 4558 scope.go:117] "RemoveContainer" containerID="9bbacbd57db249ab8218348788165000873acf1f94027640d5d33d96e3684631" Jan 20 17:32:57 crc kubenswrapper[4558]: I0120 17:32:57.014513 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-keystone-listener-65b4ff8654-bj52d" Jan 20 17:32:57 crc kubenswrapper[4558]: I0120 17:32:57.019372 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:32:57 crc kubenswrapper[4558]: I0120 17:32:57.019369 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"0fa9abe5-c250-44ce-97d5-17528dd527cb","Type":"ContainerDied","Data":"0d1fa68b946d2d23bd99c093ea577f81d6d1e250ab188ed02e8270a1ed6e6c89"} Jan 20 17:32:57 crc kubenswrapper[4558]: I0120 17:32:57.021402 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-cell-mapping-nbrcj" Jan 20 17:32:57 crc kubenswrapper[4558]: I0120 17:32:57.021636 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-cell-mapping-nbrcj" event={"ID":"9ab93517-8f51-467d-b830-16030668be2b","Type":"ContainerDied","Data":"4a13176f595f2b9edae8e641361b42151f47e949617e58060b99a9f8a1b196dc"} Jan 20 17:32:57 crc kubenswrapper[4558]: I0120 17:32:57.021666 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4a13176f595f2b9edae8e641361b42151f47e949617e58060b99a9f8a1b196dc" Jan 20 17:32:57 crc kubenswrapper[4558]: I0120 17:32:57.023557 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"acfda63d-d93f-4b65-b9f4-eaf856bc5a92","Type":"ContainerStarted","Data":"d0d2f8f1f296f903b655cb3d5bbafcb0fde6cac093a0f3918cb5ca364a74a320"} Jan 20 17:32:57 crc kubenswrapper[4558]: I0120 17:32:57.025544 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-worker-5bb9c47bc7-ct6jf" event={"ID":"fbba86e7-87c6-4350-8bbd-565ba9abfe68","Type":"ContainerDied","Data":"b4e3b274173b767600258b538c6ec79b6fad8e999a9177f1eb117b98219db0aa"} Jan 20 17:32:57 crc kubenswrapper[4558]: I0120 17:32:57.025619 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-worker-5bb9c47bc7-ct6jf" Jan 20 17:32:57 crc kubenswrapper[4558]: I0120 17:32:57.028403 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-server-0" event={"ID":"35b2371f-57f2-4728-b32f-1ba587b4532e","Type":"ContainerDied","Data":"6ca2725584caac477b29e00556aa6daeb9f8347980274d9c02390fd9c1f3473f"} Jan 20 17:32:57 crc kubenswrapper[4558]: I0120 17:32:57.028463 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:32:57 crc kubenswrapper[4558]: I0120 17:32:57.036836 4558 scope.go:117] "RemoveContainer" containerID="d981d2b9fdcc50a6990df32e78be80ae43eebfa1b705595c1292a8f9a9e81ae5" Jan 20 17:32:57 crc kubenswrapper[4558]: I0120 17:32:57.058580 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/glance-default-external-api-0" podStartSLOduration=4.058554361 podStartE2EDuration="4.058554361s" podCreationTimestamp="2026-01-20 17:32:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:32:57.046076039 +0000 UTC m=+3070.806414006" watchObservedRunningTime="2026-01-20 17:32:57.058554361 +0000 UTC m=+3070.818892329" Jan 20 17:32:57 crc kubenswrapper[4558]: I0120 17:32:57.071139 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/rabbitmq-server-0"] Jan 20 17:32:57 crc kubenswrapper[4558]: I0120 17:32:57.075996 4558 scope.go:117] "RemoveContainer" containerID="6b63c5d8df23726b95ea12ea0a38363f05fc0ca2ac135e3fa2be2b0c33f58276" Jan 20 17:32:57 crc kubenswrapper[4558]: I0120 17:32:57.082344 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/rabbitmq-server-0"] Jan 20 17:32:57 crc kubenswrapper[4558]: I0120 17:32:57.111870 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/rabbitmq-server-0"] Jan 20 17:32:57 crc kubenswrapper[4558]: E0120 17:32:57.118301 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0fa9abe5-c250-44ce-97d5-17528dd527cb" containerName="sg-core" Jan 20 17:32:57 crc kubenswrapper[4558]: I0120 17:32:57.118328 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="0fa9abe5-c250-44ce-97d5-17528dd527cb" containerName="sg-core" Jan 20 17:32:57 crc kubenswrapper[4558]: E0120 17:32:57.118374 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0fa9abe5-c250-44ce-97d5-17528dd527cb" containerName="ceilometer-central-agent" Jan 20 17:32:57 crc kubenswrapper[4558]: I0120 17:32:57.118381 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="0fa9abe5-c250-44ce-97d5-17528dd527cb" containerName="ceilometer-central-agent" Jan 20 17:32:57 crc kubenswrapper[4558]: E0120 17:32:57.118401 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0fa9abe5-c250-44ce-97d5-17528dd527cb" containerName="ceilometer-notification-agent" Jan 20 17:32:57 crc kubenswrapper[4558]: I0120 17:32:57.118409 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="0fa9abe5-c250-44ce-97d5-17528dd527cb" containerName="ceilometer-notification-agent" Jan 20 17:32:57 crc kubenswrapper[4558]: E0120 17:32:57.118484 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="113d7999-28c3-4738-aa99-bd09c1880299" containerName="barbican-keystone-listener-log" Jan 20 17:32:57 crc kubenswrapper[4558]: I0120 17:32:57.118491 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="113d7999-28c3-4738-aa99-bd09c1880299" containerName="barbican-keystone-listener-log" Jan 20 17:32:57 crc kubenswrapper[4558]: E0120 17:32:57.118505 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0fa9abe5-c250-44ce-97d5-17528dd527cb" containerName="proxy-httpd" Jan 20 17:32:57 crc kubenswrapper[4558]: I0120 17:32:57.118512 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="0fa9abe5-c250-44ce-97d5-17528dd527cb" containerName="proxy-httpd" Jan 20 17:32:57 crc kubenswrapper[4558]: E0120 17:32:57.118522 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fbba86e7-87c6-4350-8bbd-565ba9abfe68" containerName="barbican-worker" Jan 20 17:32:57 crc kubenswrapper[4558]: I0120 17:32:57.118528 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="fbba86e7-87c6-4350-8bbd-565ba9abfe68" containerName="barbican-worker" Jan 20 17:32:57 crc kubenswrapper[4558]: E0120 17:32:57.118539 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="113d7999-28c3-4738-aa99-bd09c1880299" containerName="barbican-keystone-listener" Jan 20 17:32:57 crc kubenswrapper[4558]: I0120 17:32:57.118545 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="113d7999-28c3-4738-aa99-bd09c1880299" containerName="barbican-keystone-listener" Jan 20 17:32:57 crc kubenswrapper[4558]: E0120 17:32:57.118721 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9ab93517-8f51-467d-b830-16030668be2b" containerName="nova-manage" Jan 20 17:32:57 crc kubenswrapper[4558]: I0120 17:32:57.118731 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="9ab93517-8f51-467d-b830-16030668be2b" containerName="nova-manage" Jan 20 17:32:57 crc kubenswrapper[4558]: E0120 17:32:57.118750 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fbba86e7-87c6-4350-8bbd-565ba9abfe68" containerName="barbican-worker-log" Jan 20 17:32:57 crc kubenswrapper[4558]: I0120 17:32:57.118758 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="fbba86e7-87c6-4350-8bbd-565ba9abfe68" containerName="barbican-worker-log" Jan 20 17:32:57 crc kubenswrapper[4558]: I0120 17:32:57.118969 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="9ab93517-8f51-467d-b830-16030668be2b" containerName="nova-manage" Jan 20 17:32:57 crc kubenswrapper[4558]: I0120 17:32:57.118985 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="0fa9abe5-c250-44ce-97d5-17528dd527cb" containerName="proxy-httpd" Jan 20 17:32:57 crc kubenswrapper[4558]: I0120 17:32:57.118991 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="fbba86e7-87c6-4350-8bbd-565ba9abfe68" containerName="barbican-worker-log" Jan 20 17:32:57 crc kubenswrapper[4558]: I0120 17:32:57.119001 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="113d7999-28c3-4738-aa99-bd09c1880299" containerName="barbican-keystone-listener" Jan 20 17:32:57 crc kubenswrapper[4558]: I0120 17:32:57.119009 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="0fa9abe5-c250-44ce-97d5-17528dd527cb" containerName="sg-core" Jan 20 17:32:57 crc kubenswrapper[4558]: I0120 17:32:57.119017 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="0fa9abe5-c250-44ce-97d5-17528dd527cb" containerName="ceilometer-notification-agent" Jan 20 17:32:57 crc kubenswrapper[4558]: I0120 17:32:57.119029 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="fbba86e7-87c6-4350-8bbd-565ba9abfe68" containerName="barbican-worker" Jan 20 17:32:57 crc kubenswrapper[4558]: I0120 17:32:57.119040 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="0fa9abe5-c250-44ce-97d5-17528dd527cb" containerName="ceilometer-central-agent" Jan 20 17:32:57 crc kubenswrapper[4558]: I0120 17:32:57.119047 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="113d7999-28c3-4738-aa99-bd09c1880299" containerName="barbican-keystone-listener-log" Jan 20 17:32:57 crc kubenswrapper[4558]: I0120 17:32:57.131034 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-keystone-listener-65b4ff8654-bj52d"] Jan 20 17:32:57 crc kubenswrapper[4558]: I0120 17:32:57.131144 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:32:57 crc kubenswrapper[4558]: I0120 17:32:57.137678 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"rabbitmq-erlang-cookie" Jan 20 17:32:57 crc kubenswrapper[4558]: I0120 17:32:57.137872 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"rabbitmq-server-dockercfg-pqnkt" Jan 20 17:32:57 crc kubenswrapper[4558]: I0120 17:32:57.137976 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"rabbitmq-default-user" Jan 20 17:32:57 crc kubenswrapper[4558]: I0120 17:32:57.137996 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"rabbitmq-server-conf" Jan 20 17:32:57 crc kubenswrapper[4558]: I0120 17:32:57.138096 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-rabbitmq-svc" Jan 20 17:32:57 crc kubenswrapper[4558]: I0120 17:32:57.138000 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"rabbitmq-plugins-conf" Jan 20 17:32:57 crc kubenswrapper[4558]: I0120 17:32:57.138214 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"rabbitmq-config-data" Jan 20 17:32:57 crc kubenswrapper[4558]: I0120 17:32:57.149426 4558 scope.go:117] "RemoveContainer" containerID="ba412cf075d10c839cba475b03b73954666ef9d98b9d93b40ae3e1e0eb1beb15" Jan 20 17:32:57 crc kubenswrapper[4558]: I0120 17:32:57.156249 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/barbican-keystone-listener-65b4ff8654-bj52d"] Jan 20 17:32:57 crc kubenswrapper[4558]: I0120 17:32:57.174346 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qkf2f\" (UniqueName: \"kubernetes.io/projected/0696a635-5dc9-46e9-8502-47fab9ff761c-kube-api-access-qkf2f\") pod \"rabbitmq-server-0\" (UID: \"0696a635-5dc9-46e9-8502-47fab9ff761c\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:32:57 crc kubenswrapper[4558]: I0120 17:32:57.174397 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/0696a635-5dc9-46e9-8502-47fab9ff761c-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"0696a635-5dc9-46e9-8502-47fab9ff761c\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:32:57 crc kubenswrapper[4558]: I0120 17:32:57.174455 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/0696a635-5dc9-46e9-8502-47fab9ff761c-server-conf\") pod \"rabbitmq-server-0\" (UID: \"0696a635-5dc9-46e9-8502-47fab9ff761c\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:32:57 crc kubenswrapper[4558]: I0120 17:32:57.174481 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/0696a635-5dc9-46e9-8502-47fab9ff761c-pod-info\") pod \"rabbitmq-server-0\" (UID: \"0696a635-5dc9-46e9-8502-47fab9ff761c\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:32:57 crc kubenswrapper[4558]: I0120 17:32:57.174519 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/0696a635-5dc9-46e9-8502-47fab9ff761c-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"0696a635-5dc9-46e9-8502-47fab9ff761c\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:32:57 crc kubenswrapper[4558]: I0120 17:32:57.174561 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/0696a635-5dc9-46e9-8502-47fab9ff761c-config-data\") pod \"rabbitmq-server-0\" (UID: \"0696a635-5dc9-46e9-8502-47fab9ff761c\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:32:57 crc kubenswrapper[4558]: I0120 17:32:57.174608 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-server-0\" (UID: \"0696a635-5dc9-46e9-8502-47fab9ff761c\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:32:57 crc kubenswrapper[4558]: I0120 17:32:57.174639 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/0696a635-5dc9-46e9-8502-47fab9ff761c-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"0696a635-5dc9-46e9-8502-47fab9ff761c\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:32:57 crc kubenswrapper[4558]: I0120 17:32:57.174662 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/0696a635-5dc9-46e9-8502-47fab9ff761c-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"0696a635-5dc9-46e9-8502-47fab9ff761c\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:32:57 crc kubenswrapper[4558]: I0120 17:32:57.174723 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/0696a635-5dc9-46e9-8502-47fab9ff761c-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"0696a635-5dc9-46e9-8502-47fab9ff761c\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:32:57 crc kubenswrapper[4558]: I0120 17:32:57.174838 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/0696a635-5dc9-46e9-8502-47fab9ff761c-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"0696a635-5dc9-46e9-8502-47fab9ff761c\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:32:57 crc kubenswrapper[4558]: I0120 17:32:57.188331 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/rabbitmq-server-0"] Jan 20 17:32:57 crc kubenswrapper[4558]: I0120 17:32:57.199654 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:32:57 crc kubenswrapper[4558]: I0120 17:32:57.206675 4558 scope.go:117] "RemoveContainer" containerID="255c1473971684fbf37d5dbc876ce06c7e2d2050c60ff64fb10bd1de85cc2f28" Jan 20 17:32:57 crc kubenswrapper[4558]: I0120 17:32:57.225150 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:32:57 crc kubenswrapper[4558]: I0120 17:32:57.232221 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-worker-5bb9c47bc7-ct6jf"] Jan 20 17:32:57 crc kubenswrapper[4558]: I0120 17:32:57.241097 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:32:57 crc kubenswrapper[4558]: I0120 17:32:57.243435 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:32:57 crc kubenswrapper[4558]: I0120 17:32:57.245214 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/barbican-worker-5bb9c47bc7-ct6jf"] Jan 20 17:32:57 crc kubenswrapper[4558]: I0120 17:32:57.247157 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-config-data" Jan 20 17:32:57 crc kubenswrapper[4558]: I0120 17:32:57.247438 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-scripts" Jan 20 17:32:57 crc kubenswrapper[4558]: I0120 17:32:57.250303 4558 scope.go:117] "RemoveContainer" containerID="94acb89771ba53cc1e3c2ff43ebec406a07834ae245a8a2d429afb1cf797e2ed" Jan 20 17:32:57 crc kubenswrapper[4558]: I0120 17:32:57.253633 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:32:57 crc kubenswrapper[4558]: I0120 17:32:57.265231 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/rabbitmq-cell1-server-0"] Jan 20 17:32:57 crc kubenswrapper[4558]: I0120 17:32:57.276910 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/0696a635-5dc9-46e9-8502-47fab9ff761c-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"0696a635-5dc9-46e9-8502-47fab9ff761c\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:32:57 crc kubenswrapper[4558]: I0120 17:32:57.276954 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/0696a635-5dc9-46e9-8502-47fab9ff761c-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"0696a635-5dc9-46e9-8502-47fab9ff761c\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:32:57 crc kubenswrapper[4558]: I0120 17:32:57.277024 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b115808a-3ae3-40c5-b330-f58aa6af7503-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"b115808a-3ae3-40c5-b330-f58aa6af7503\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:32:57 crc kubenswrapper[4558]: I0120 17:32:57.277083 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/0696a635-5dc9-46e9-8502-47fab9ff761c-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"0696a635-5dc9-46e9-8502-47fab9ff761c\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:32:57 crc kubenswrapper[4558]: I0120 17:32:57.277209 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/0696a635-5dc9-46e9-8502-47fab9ff761c-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"0696a635-5dc9-46e9-8502-47fab9ff761c\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:32:57 crc kubenswrapper[4558]: I0120 17:32:57.277269 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b115808a-3ae3-40c5-b330-f58aa6af7503-config-data\") pod \"ceilometer-0\" (UID: \"b115808a-3ae3-40c5-b330-f58aa6af7503\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:32:57 crc kubenswrapper[4558]: I0120 17:32:57.277342 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qkf2f\" (UniqueName: \"kubernetes.io/projected/0696a635-5dc9-46e9-8502-47fab9ff761c-kube-api-access-qkf2f\") pod \"rabbitmq-server-0\" (UID: \"0696a635-5dc9-46e9-8502-47fab9ff761c\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:32:57 crc kubenswrapper[4558]: I0120 17:32:57.277378 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/0696a635-5dc9-46e9-8502-47fab9ff761c-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"0696a635-5dc9-46e9-8502-47fab9ff761c\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:32:57 crc kubenswrapper[4558]: I0120 17:32:57.277441 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/0696a635-5dc9-46e9-8502-47fab9ff761c-server-conf\") pod \"rabbitmq-server-0\" (UID: \"0696a635-5dc9-46e9-8502-47fab9ff761c\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:32:57 crc kubenswrapper[4558]: I0120 17:32:57.277465 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b115808a-3ae3-40c5-b330-f58aa6af7503-run-httpd\") pod \"ceilometer-0\" (UID: \"b115808a-3ae3-40c5-b330-f58aa6af7503\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:32:57 crc kubenswrapper[4558]: I0120 17:32:57.277510 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/0696a635-5dc9-46e9-8502-47fab9ff761c-pod-info\") pod \"rabbitmq-server-0\" (UID: \"0696a635-5dc9-46e9-8502-47fab9ff761c\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:32:57 crc kubenswrapper[4558]: I0120 17:32:57.277552 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/0696a635-5dc9-46e9-8502-47fab9ff761c-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"0696a635-5dc9-46e9-8502-47fab9ff761c\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:32:57 crc kubenswrapper[4558]: I0120 17:32:57.277599 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b115808a-3ae3-40c5-b330-f58aa6af7503-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"b115808a-3ae3-40c5-b330-f58aa6af7503\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:32:57 crc kubenswrapper[4558]: I0120 17:32:57.277617 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t4prt\" (UniqueName: \"kubernetes.io/projected/b115808a-3ae3-40c5-b330-f58aa6af7503-kube-api-access-t4prt\") pod \"ceilometer-0\" (UID: \"b115808a-3ae3-40c5-b330-f58aa6af7503\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:32:57 crc kubenswrapper[4558]: I0120 17:32:57.277654 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/0696a635-5dc9-46e9-8502-47fab9ff761c-config-data\") pod \"rabbitmq-server-0\" (UID: \"0696a635-5dc9-46e9-8502-47fab9ff761c\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:32:57 crc kubenswrapper[4558]: I0120 17:32:57.277688 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b115808a-3ae3-40c5-b330-f58aa6af7503-scripts\") pod \"ceilometer-0\" (UID: \"b115808a-3ae3-40c5-b330-f58aa6af7503\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:32:57 crc kubenswrapper[4558]: I0120 17:32:57.277730 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b115808a-3ae3-40c5-b330-f58aa6af7503-log-httpd\") pod \"ceilometer-0\" (UID: \"b115808a-3ae3-40c5-b330-f58aa6af7503\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:32:57 crc kubenswrapper[4558]: I0120 17:32:57.277752 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-server-0\" (UID: \"0696a635-5dc9-46e9-8502-47fab9ff761c\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:32:57 crc kubenswrapper[4558]: I0120 17:32:57.278145 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-server-0\" (UID: \"0696a635-5dc9-46e9-8502-47fab9ff761c\") device mount path \"/mnt/openstack/pv12\"" pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:32:57 crc kubenswrapper[4558]: I0120 17:32:57.278925 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/0696a635-5dc9-46e9-8502-47fab9ff761c-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"0696a635-5dc9-46e9-8502-47fab9ff761c\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:32:57 crc kubenswrapper[4558]: I0120 17:32:57.280029 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/0696a635-5dc9-46e9-8502-47fab9ff761c-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"0696a635-5dc9-46e9-8502-47fab9ff761c\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:32:57 crc kubenswrapper[4558]: I0120 17:32:57.280043 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/0696a635-5dc9-46e9-8502-47fab9ff761c-server-conf\") pod \"rabbitmq-server-0\" (UID: \"0696a635-5dc9-46e9-8502-47fab9ff761c\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:32:57 crc kubenswrapper[4558]: I0120 17:32:57.280212 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/0696a635-5dc9-46e9-8502-47fab9ff761c-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"0696a635-5dc9-46e9-8502-47fab9ff761c\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:32:57 crc kubenswrapper[4558]: I0120 17:32:57.280317 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/0696a635-5dc9-46e9-8502-47fab9ff761c-config-data\") pod \"rabbitmq-server-0\" (UID: \"0696a635-5dc9-46e9-8502-47fab9ff761c\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:32:57 crc kubenswrapper[4558]: I0120 17:32:57.284313 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/0696a635-5dc9-46e9-8502-47fab9ff761c-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"0696a635-5dc9-46e9-8502-47fab9ff761c\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:32:57 crc kubenswrapper[4558]: I0120 17:32:57.285121 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/0696a635-5dc9-46e9-8502-47fab9ff761c-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"0696a635-5dc9-46e9-8502-47fab9ff761c\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:32:57 crc kubenswrapper[4558]: I0120 17:32:57.286704 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/0696a635-5dc9-46e9-8502-47fab9ff761c-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"0696a635-5dc9-46e9-8502-47fab9ff761c\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:32:57 crc kubenswrapper[4558]: I0120 17:32:57.287615 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/0696a635-5dc9-46e9-8502-47fab9ff761c-pod-info\") pod \"rabbitmq-server-0\" (UID: \"0696a635-5dc9-46e9-8502-47fab9ff761c\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:32:57 crc kubenswrapper[4558]: I0120 17:32:57.299021 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qkf2f\" (UniqueName: \"kubernetes.io/projected/0696a635-5dc9-46e9-8502-47fab9ff761c-kube-api-access-qkf2f\") pod \"rabbitmq-server-0\" (UID: \"0696a635-5dc9-46e9-8502-47fab9ff761c\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:32:57 crc kubenswrapper[4558]: I0120 17:32:57.319389 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-server-0\" (UID: \"0696a635-5dc9-46e9-8502-47fab9ff761c\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:32:57 crc kubenswrapper[4558]: I0120 17:32:57.322492 4558 scope.go:117] "RemoveContainer" containerID="8315184077e8a32f5cae764089867c8adc47f7cd18db8e5c92a42400d2b15103" Jan 20 17:32:57 crc kubenswrapper[4558]: I0120 17:32:57.352159 4558 scope.go:117] "RemoveContainer" containerID="c03486a35df252536c36f85a6561b27a72947105aeca886fbecad26ee9903d17" Jan 20 17:32:57 crc kubenswrapper[4558]: I0120 17:32:57.373605 4558 scope.go:117] "RemoveContainer" containerID="7e836c85569914a31734b34020c760d2ba17ca6f5821aecaa0763835a0e87863" Jan 20 17:32:57 crc kubenswrapper[4558]: I0120 17:32:57.380307 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b115808a-3ae3-40c5-b330-f58aa6af7503-config-data\") pod \"ceilometer-0\" (UID: \"b115808a-3ae3-40c5-b330-f58aa6af7503\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:32:57 crc kubenswrapper[4558]: I0120 17:32:57.380446 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b115808a-3ae3-40c5-b330-f58aa6af7503-run-httpd\") pod \"ceilometer-0\" (UID: \"b115808a-3ae3-40c5-b330-f58aa6af7503\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:32:57 crc kubenswrapper[4558]: I0120 17:32:57.380529 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b115808a-3ae3-40c5-b330-f58aa6af7503-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"b115808a-3ae3-40c5-b330-f58aa6af7503\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:32:57 crc kubenswrapper[4558]: I0120 17:32:57.380555 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t4prt\" (UniqueName: \"kubernetes.io/projected/b115808a-3ae3-40c5-b330-f58aa6af7503-kube-api-access-t4prt\") pod \"ceilometer-0\" (UID: \"b115808a-3ae3-40c5-b330-f58aa6af7503\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:32:57 crc kubenswrapper[4558]: I0120 17:32:57.380598 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b115808a-3ae3-40c5-b330-f58aa6af7503-scripts\") pod \"ceilometer-0\" (UID: \"b115808a-3ae3-40c5-b330-f58aa6af7503\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:32:57 crc kubenswrapper[4558]: I0120 17:32:57.380633 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b115808a-3ae3-40c5-b330-f58aa6af7503-log-httpd\") pod \"ceilometer-0\" (UID: \"b115808a-3ae3-40c5-b330-f58aa6af7503\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:32:57 crc kubenswrapper[4558]: I0120 17:32:57.380701 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b115808a-3ae3-40c5-b330-f58aa6af7503-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"b115808a-3ae3-40c5-b330-f58aa6af7503\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:32:57 crc kubenswrapper[4558]: I0120 17:32:57.380991 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b115808a-3ae3-40c5-b330-f58aa6af7503-run-httpd\") pod \"ceilometer-0\" (UID: \"b115808a-3ae3-40c5-b330-f58aa6af7503\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:32:57 crc kubenswrapper[4558]: I0120 17:32:57.383890 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b115808a-3ae3-40c5-b330-f58aa6af7503-log-httpd\") pod \"ceilometer-0\" (UID: \"b115808a-3ae3-40c5-b330-f58aa6af7503\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:32:57 crc kubenswrapper[4558]: I0120 17:32:57.384414 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b115808a-3ae3-40c5-b330-f58aa6af7503-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"b115808a-3ae3-40c5-b330-f58aa6af7503\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:32:57 crc kubenswrapper[4558]: I0120 17:32:57.385467 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b115808a-3ae3-40c5-b330-f58aa6af7503-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"b115808a-3ae3-40c5-b330-f58aa6af7503\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:32:57 crc kubenswrapper[4558]: I0120 17:32:57.385892 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b115808a-3ae3-40c5-b330-f58aa6af7503-config-data\") pod \"ceilometer-0\" (UID: \"b115808a-3ae3-40c5-b330-f58aa6af7503\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:32:57 crc kubenswrapper[4558]: I0120 17:32:57.387784 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b115808a-3ae3-40c5-b330-f58aa6af7503-scripts\") pod \"ceilometer-0\" (UID: \"b115808a-3ae3-40c5-b330-f58aa6af7503\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:32:57 crc kubenswrapper[4558]: I0120 17:32:57.393672 4558 scope.go:117] "RemoveContainer" containerID="470d1df81fd770d7b974c08649b44b1ac9d508cc4fcfb3aae910123fc7a03c96" Jan 20 17:32:57 crc kubenswrapper[4558]: I0120 17:32:57.396761 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t4prt\" (UniqueName: \"kubernetes.io/projected/b115808a-3ae3-40c5-b330-f58aa6af7503-kube-api-access-t4prt\") pod \"ceilometer-0\" (UID: \"b115808a-3ae3-40c5-b330-f58aa6af7503\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:32:57 crc kubenswrapper[4558]: I0120 17:32:57.476811 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:32:57 crc kubenswrapper[4558]: I0120 17:32:57.565638 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:32:58 crc kubenswrapper[4558]: I0120 17:32:58.030291 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:32:58 crc kubenswrapper[4558]: I0120 17:32:58.084033 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" event={"ID":"0c6a6265-cf90-4039-9200-ba478d612baa","Type":"ContainerStarted","Data":"364bffe31431af105ef6fe2c18d88e8f24a66280d58bb91d9fab9166a07450c1"} Jan 20 17:32:58 crc kubenswrapper[4558]: I0120 17:32:58.130140 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/rabbitmq-server-0"] Jan 20 17:32:58 crc kubenswrapper[4558]: I0120 17:32:58.255950 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:32:58 crc kubenswrapper[4558]: I0120 17:32:58.257258 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:32:58 crc kubenswrapper[4558]: I0120 17:32:58.575226 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0fa9abe5-c250-44ce-97d5-17528dd527cb" path="/var/lib/kubelet/pods/0fa9abe5-c250-44ce-97d5-17528dd527cb/volumes" Jan 20 17:32:58 crc kubenswrapper[4558]: I0120 17:32:58.576309 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="113d7999-28c3-4738-aa99-bd09c1880299" path="/var/lib/kubelet/pods/113d7999-28c3-4738-aa99-bd09c1880299/volumes" Jan 20 17:32:58 crc kubenswrapper[4558]: I0120 17:32:58.577498 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="35b2371f-57f2-4728-b32f-1ba587b4532e" path="/var/lib/kubelet/pods/35b2371f-57f2-4728-b32f-1ba587b4532e/volumes" Jan 20 17:32:58 crc kubenswrapper[4558]: I0120 17:32:58.578107 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fbba86e7-87c6-4350-8bbd-565ba9abfe68" path="/var/lib/kubelet/pods/fbba86e7-87c6-4350-8bbd-565ba9abfe68/volumes" Jan 20 17:32:58 crc kubenswrapper[4558]: I0120 17:32:58.626069 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/barbican-api-6d8c858488-xrdwn" Jan 20 17:32:58 crc kubenswrapper[4558]: I0120 17:32:58.650329 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/barbican-api-6d8c858488-xrdwn" Jan 20 17:32:58 crc kubenswrapper[4558]: I0120 17:32:58.712603 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-api-74c98446fb-d49s5"] Jan 20 17:32:58 crc kubenswrapper[4558]: I0120 17:32:58.712856 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-api-74c98446fb-d49s5" podUID="e83a55be-4002-403a-911f-8f33c234b7e4" containerName="barbican-api-log" containerID="cri-o://33cd821346a573aa7c60d888955a767917f3516b07634b6ccea35a0b1b0543af" gracePeriod=30 Jan 20 17:32:58 crc kubenswrapper[4558]: I0120 17:32:58.713004 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-api-74c98446fb-d49s5" podUID="e83a55be-4002-403a-911f-8f33c234b7e4" containerName="barbican-api" containerID="cri-o://9d39db634c3fa17e206e8d37ce291551fd4325bd609316a24c87cd44c0b5f95d" gracePeriod=30 Jan 20 17:32:59 crc kubenswrapper[4558]: I0120 17:32:59.096935 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"b115808a-3ae3-40c5-b330-f58aa6af7503","Type":"ContainerStarted","Data":"902c7c53a3328dda0625dd77131ac2bb26f4fce8e93e1be078eaef8a855526bf"} Jan 20 17:32:59 crc kubenswrapper[4558]: I0120 17:32:59.097292 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"b115808a-3ae3-40c5-b330-f58aa6af7503","Type":"ContainerStarted","Data":"be7d51cbfa258bd2e7eea0fc06c1a734c129b658ae00c98bb8b2246305f07912"} Jan 20 17:32:59 crc kubenswrapper[4558]: I0120 17:32:59.100048 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-server-0" event={"ID":"0696a635-5dc9-46e9-8502-47fab9ff761c","Type":"ContainerStarted","Data":"792a3568e787cfc41cee7d472f88c204c7d8314c5091fb0349e59f2ff91ddae6"} Jan 20 17:32:59 crc kubenswrapper[4558]: I0120 17:32:59.104043 4558 generic.go:334] "Generic (PLEG): container finished" podID="e83a55be-4002-403a-911f-8f33c234b7e4" containerID="33cd821346a573aa7c60d888955a767917f3516b07634b6ccea35a0b1b0543af" exitCode=143 Jan 20 17:32:59 crc kubenswrapper[4558]: I0120 17:32:59.104183 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-74c98446fb-d49s5" event={"ID":"e83a55be-4002-403a-911f-8f33c234b7e4","Type":"ContainerDied","Data":"33cd821346a573aa7c60d888955a767917f3516b07634b6ccea35a0b1b0543af"} Jan 20 17:32:59 crc kubenswrapper[4558]: I0120 17:32:59.106240 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" event={"ID":"0c6a6265-cf90-4039-9200-ba478d612baa","Type":"ContainerStarted","Data":"44c62c978803560adab011f48335024e3733608109c5e30c32a43f89dc5997fd"} Jan 20 17:32:59 crc kubenswrapper[4558]: I0120 17:32:59.165835 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:32:59 crc kubenswrapper[4558]: I0120 17:32:59.180305 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:33:00 crc kubenswrapper[4558]: I0120 17:33:00.131815 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"b115808a-3ae3-40c5-b330-f58aa6af7503","Type":"ContainerStarted","Data":"0704db880c3d402aa539743da9a5f815da9504c0ee52f7941ada9ffd86b66fe0"} Jan 20 17:33:00 crc kubenswrapper[4558]: I0120 17:33:00.142960 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-server-0" event={"ID":"0696a635-5dc9-46e9-8502-47fab9ff761c","Type":"ContainerStarted","Data":"89fad12073aaf3b621cce6e9c2e512578ea6f57b9f2d06d4a3ac809c1fe90a00"} Jan 20 17:33:00 crc kubenswrapper[4558]: I0120 17:33:00.152314 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:33:00 crc kubenswrapper[4558]: I0120 17:33:00.361009 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/memcached-0" Jan 20 17:33:00 crc kubenswrapper[4558]: I0120 17:33:00.475234 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:33:00 crc kubenswrapper[4558]: I0120 17:33:00.475486 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-scheduler-0" podUID="a336b03d-9969-41de-9f19-f3479f76a33d" containerName="nova-scheduler-scheduler" containerID="cri-o://90ed3b76b87cd561c155928fde1a7320fbfce5af416fc09381ed42b6ead92480" gracePeriod=30 Jan 20 17:33:00 crc kubenswrapper[4558]: I0120 17:33:00.488993 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-0"] Jan 20 17:33:00 crc kubenswrapper[4558]: I0120 17:33:00.489299 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-cell1-conductor-0" podUID="c686e4c2-68d0-4999-9078-90c70927d9ae" containerName="nova-cell1-conductor-conductor" containerID="cri-o://46edde77e0df814c67d5b21fe72058f3eb4efd6acd4a7f68a737219cd8448b40" gracePeriod=30 Jan 20 17:33:00 crc kubenswrapper[4558]: I0120 17:33:00.544760 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:33:00 crc kubenswrapper[4558]: I0120 17:33:00.673646 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:33:00 crc kubenswrapper[4558]: I0120 17:33:00.674208 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-internal-api-0" podUID="32e34e3d-8cbb-42df-9b7a-4b99b025cdf0" containerName="glance-log" containerID="cri-o://41deae5788f625f14add58324272ddd5cc382883dd1b3b32b04ceaa69dd8ec1e" gracePeriod=30 Jan 20 17:33:00 crc kubenswrapper[4558]: I0120 17:33:00.674384 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-internal-api-0" podUID="32e34e3d-8cbb-42df-9b7a-4b99b025cdf0" containerName="glance-httpd" containerID="cri-o://32cf2a0dd770e34c9d2400413f255671ccbd9a67bbd106457f3e7faaa2b2df50" gracePeriod=30 Jan 20 17:33:00 crc kubenswrapper[4558]: I0120 17:33:00.708049 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:33:00 crc kubenswrapper[4558]: I0120 17:33:00.708311 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/cinder-api-0" podUID="846821b5-5647-4ffa-a1bb-8695fe90cbcc" containerName="cinder-api-log" containerID="cri-o://72da703d73712000127a5c91a66e96458bf2133274e58fb7bcbe402b18ccca9b" gracePeriod=30 Jan 20 17:33:00 crc kubenswrapper[4558]: I0120 17:33:00.710103 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/cinder-api-0" podUID="846821b5-5647-4ffa-a1bb-8695fe90cbcc" containerName="cinder-api" containerID="cri-o://a31259f069ab4363f5184768435b810c1a24f8a773760135e07b1662c647ae59" gracePeriod=30 Jan 20 17:33:00 crc kubenswrapper[4558]: I0120 17:33:00.722135 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-0"] Jan 20 17:33:00 crc kubenswrapper[4558]: I0120 17:33:00.722338 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-cell0-conductor-0" podUID="6dbc6996-80bf-4f86-9acf-f74dd04e51b1" containerName="nova-cell0-conductor-conductor" containerID="cri-o://a45e6f8d25e2903ff94497e102fe9cb0b1841d235bfac8ac8e62657bad6c6b17" gracePeriod=30 Jan 20 17:33:00 crc kubenswrapper[4558]: I0120 17:33:00.744044 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-bootstrap-zwldh"] Jan 20 17:33:00 crc kubenswrapper[4558]: I0120 17:33:00.748870 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/cinder-api-0" podUID="846821b5-5647-4ffa-a1bb-8695fe90cbcc" containerName="cinder-api" probeResult="failure" output="Get \"http://10.217.1.220:8776/healthcheck\": EOF" Jan 20 17:33:00 crc kubenswrapper[4558]: I0120 17:33:00.760534 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/keystone-bootstrap-zwldh"] Jan 20 17:33:00 crc kubenswrapper[4558]: I0120 17:33:00.768509 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:33:00 crc kubenswrapper[4558]: I0120 17:33:00.768768 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-external-api-0" podUID="acfda63d-d93f-4b65-b9f4-eaf856bc5a92" containerName="glance-log" containerID="cri-o://6506b5c2fe94ddb4f34e480f304b433a04393dac97ebdf236b623e4dcfde7b2f" gracePeriod=30 Jan 20 17:33:00 crc kubenswrapper[4558]: I0120 17:33:00.769221 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-external-api-0" podUID="acfda63d-d93f-4b65-b9f4-eaf856bc5a92" containerName="glance-httpd" containerID="cri-o://d0d2f8f1f296f903b655cb3d5bbafcb0fde6cac093a0f3918cb5ca364a74a320" gracePeriod=30 Jan 20 17:33:00 crc kubenswrapper[4558]: I0120 17:33:00.842670 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/barbican-keystone-listener-755c97c57d-cswnd"] Jan 20 17:33:00 crc kubenswrapper[4558]: I0120 17:33:00.860032 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-keystone-listener-755c97c57d-cswnd" Jan 20 17:33:00 crc kubenswrapper[4558]: I0120 17:33:00.865773 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-keystone-listener-755c97c57d-cswnd"] Jan 20 17:33:00 crc kubenswrapper[4558]: I0120 17:33:00.923734 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/barbican-worker-5b76bfdfd9-6pwrz"] Jan 20 17:33:00 crc kubenswrapper[4558]: I0120 17:33:00.925476 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-worker-5b76bfdfd9-6pwrz" Jan 20 17:33:00 crc kubenswrapper[4558]: I0120 17:33:00.962897 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-worker-5b76bfdfd9-6pwrz"] Jan 20 17:33:00 crc kubenswrapper[4558]: I0120 17:33:00.989320 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hnhc4\" (UniqueName: \"kubernetes.io/projected/09df5e63-9283-4cae-80fe-cfa88f5a9b72-kube-api-access-hnhc4\") pod \"barbican-keystone-listener-755c97c57d-cswnd\" (UID: \"09df5e63-9283-4cae-80fe-cfa88f5a9b72\") " pod="openstack-kuttl-tests/barbican-keystone-listener-755c97c57d-cswnd" Jan 20 17:33:00 crc kubenswrapper[4558]: I0120 17:33:00.989424 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/09df5e63-9283-4cae-80fe-cfa88f5a9b72-config-data-custom\") pod \"barbican-keystone-listener-755c97c57d-cswnd\" (UID: \"09df5e63-9283-4cae-80fe-cfa88f5a9b72\") " pod="openstack-kuttl-tests/barbican-keystone-listener-755c97c57d-cswnd" Jan 20 17:33:00 crc kubenswrapper[4558]: I0120 17:33:00.989576 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dzm29\" (UniqueName: \"kubernetes.io/projected/377a0d4c-edf2-4912-a04f-0f814ea9efdb-kube-api-access-dzm29\") pod \"barbican-worker-5b76bfdfd9-6pwrz\" (UID: \"377a0d4c-edf2-4912-a04f-0f814ea9efdb\") " pod="openstack-kuttl-tests/barbican-worker-5b76bfdfd9-6pwrz" Jan 20 17:33:00 crc kubenswrapper[4558]: I0120 17:33:00.989651 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/377a0d4c-edf2-4912-a04f-0f814ea9efdb-combined-ca-bundle\") pod \"barbican-worker-5b76bfdfd9-6pwrz\" (UID: \"377a0d4c-edf2-4912-a04f-0f814ea9efdb\") " pod="openstack-kuttl-tests/barbican-worker-5b76bfdfd9-6pwrz" Jan 20 17:33:00 crc kubenswrapper[4558]: I0120 17:33:00.989689 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/377a0d4c-edf2-4912-a04f-0f814ea9efdb-logs\") pod \"barbican-worker-5b76bfdfd9-6pwrz\" (UID: \"377a0d4c-edf2-4912-a04f-0f814ea9efdb\") " pod="openstack-kuttl-tests/barbican-worker-5b76bfdfd9-6pwrz" Jan 20 17:33:00 crc kubenswrapper[4558]: I0120 17:33:00.989743 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/09df5e63-9283-4cae-80fe-cfa88f5a9b72-logs\") pod \"barbican-keystone-listener-755c97c57d-cswnd\" (UID: \"09df5e63-9283-4cae-80fe-cfa88f5a9b72\") " pod="openstack-kuttl-tests/barbican-keystone-listener-755c97c57d-cswnd" Jan 20 17:33:00 crc kubenswrapper[4558]: I0120 17:33:00.989780 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/377a0d4c-edf2-4912-a04f-0f814ea9efdb-config-data\") pod \"barbican-worker-5b76bfdfd9-6pwrz\" (UID: \"377a0d4c-edf2-4912-a04f-0f814ea9efdb\") " pod="openstack-kuttl-tests/barbican-worker-5b76bfdfd9-6pwrz" Jan 20 17:33:00 crc kubenswrapper[4558]: I0120 17:33:00.989818 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/377a0d4c-edf2-4912-a04f-0f814ea9efdb-config-data-custom\") pod \"barbican-worker-5b76bfdfd9-6pwrz\" (UID: \"377a0d4c-edf2-4912-a04f-0f814ea9efdb\") " pod="openstack-kuttl-tests/barbican-worker-5b76bfdfd9-6pwrz" Jan 20 17:33:00 crc kubenswrapper[4558]: I0120 17:33:00.989842 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/09df5e63-9283-4cae-80fe-cfa88f5a9b72-config-data\") pod \"barbican-keystone-listener-755c97c57d-cswnd\" (UID: \"09df5e63-9283-4cae-80fe-cfa88f5a9b72\") " pod="openstack-kuttl-tests/barbican-keystone-listener-755c97c57d-cswnd" Jan 20 17:33:00 crc kubenswrapper[4558]: I0120 17:33:00.989877 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/09df5e63-9283-4cae-80fe-cfa88f5a9b72-combined-ca-bundle\") pod \"barbican-keystone-listener-755c97c57d-cswnd\" (UID: \"09df5e63-9283-4cae-80fe-cfa88f5a9b72\") " pod="openstack-kuttl-tests/barbican-keystone-listener-755c97c57d-cswnd" Jan 20 17:33:00 crc kubenswrapper[4558]: I0120 17:33:00.995618 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/keystone-bootstrap-s78ts"] Jan 20 17:33:00 crc kubenswrapper[4558]: I0120 17:33:00.998040 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-bootstrap-s78ts" Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.006152 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"osp-secret" Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.018774 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-5c6488b48b-4sj7k"] Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.019076 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/neutron-5c6488b48b-4sj7k" podUID="48ebccca-cc18-415f-b6f8-f301f99e5d3a" containerName="neutron-api" containerID="cri-o://399e63aaadfe0fa5502fd43a62acd0f3a44179ff829093b2b62e55e74b7ceac6" gracePeriod=30 Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.019300 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/neutron-5c6488b48b-4sj7k" podUID="48ebccca-cc18-415f-b6f8-f301f99e5d3a" containerName="neutron-httpd" containerID="cri-o://38c6eff5636dfec3724781cb0cc82717214fdcbd1a3eb92d714d5934aa1b6128" gracePeriod=30 Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.048153 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-bootstrap-s78ts"] Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.055004 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/placement-5f4bd4844-95dpq"] Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.055221 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/placement-5f4bd4844-95dpq" podUID="1038c23d-3108-472d-b43c-4181fd8ac365" containerName="placement-log" containerID="cri-o://8a1e30fe4204a02290d90187641c27f6cbb6b966db67d83a76c5a54e949c72ae" gracePeriod=30 Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.055766 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/placement-5f4bd4844-95dpq" podUID="1038c23d-3108-472d-b43c-4181fd8ac365" containerName="placement-api" containerID="cri-o://e2a18338fd9ab69e3f54fc92343898be5bb0e50f61066e2c11e95d851b940179" gracePeriod=30 Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.063334 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/placement-95f6c56c8-5t8cm"] Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.065241 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-95f6c56c8-5t8cm" Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.080473 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/placement-5f4bd4844-95dpq" podUID="1038c23d-3108-472d-b43c-4181fd8ac365" containerName="placement-api" probeResult="failure" output="Get \"https://10.217.1.221:8778/\": EOF" Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.092977 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/09df5e63-9283-4cae-80fe-cfa88f5a9b72-config-data\") pod \"barbican-keystone-listener-755c97c57d-cswnd\" (UID: \"09df5e63-9283-4cae-80fe-cfa88f5a9b72\") " pod="openstack-kuttl-tests/barbican-keystone-listener-755c97c57d-cswnd" Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.093024 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/09df5e63-9283-4cae-80fe-cfa88f5a9b72-combined-ca-bundle\") pod \"barbican-keystone-listener-755c97c57d-cswnd\" (UID: \"09df5e63-9283-4cae-80fe-cfa88f5a9b72\") " pod="openstack-kuttl-tests/barbican-keystone-listener-755c97c57d-cswnd" Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.093051 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a03b6fb7-d712-46bd-a8ed-e47bc25ecc7c-config-data\") pod \"keystone-bootstrap-s78ts\" (UID: \"a03b6fb7-d712-46bd-a8ed-e47bc25ecc7c\") " pod="openstack-kuttl-tests/keystone-bootstrap-s78ts" Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.093072 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zk644\" (UniqueName: \"kubernetes.io/projected/a03b6fb7-d712-46bd-a8ed-e47bc25ecc7c-kube-api-access-zk644\") pod \"keystone-bootstrap-s78ts\" (UID: \"a03b6fb7-d712-46bd-a8ed-e47bc25ecc7c\") " pod="openstack-kuttl-tests/keystone-bootstrap-s78ts" Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.093093 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hnhc4\" (UniqueName: \"kubernetes.io/projected/09df5e63-9283-4cae-80fe-cfa88f5a9b72-kube-api-access-hnhc4\") pod \"barbican-keystone-listener-755c97c57d-cswnd\" (UID: \"09df5e63-9283-4cae-80fe-cfa88f5a9b72\") " pod="openstack-kuttl-tests/barbican-keystone-listener-755c97c57d-cswnd" Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.093126 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/09df5e63-9283-4cae-80fe-cfa88f5a9b72-config-data-custom\") pod \"barbican-keystone-listener-755c97c57d-cswnd\" (UID: \"09df5e63-9283-4cae-80fe-cfa88f5a9b72\") " pod="openstack-kuttl-tests/barbican-keystone-listener-755c97c57d-cswnd" Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.093205 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a03b6fb7-d712-46bd-a8ed-e47bc25ecc7c-scripts\") pod \"keystone-bootstrap-s78ts\" (UID: \"a03b6fb7-d712-46bd-a8ed-e47bc25ecc7c\") " pod="openstack-kuttl-tests/keystone-bootstrap-s78ts" Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.093225 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/a03b6fb7-d712-46bd-a8ed-e47bc25ecc7c-fernet-keys\") pod \"keystone-bootstrap-s78ts\" (UID: \"a03b6fb7-d712-46bd-a8ed-e47bc25ecc7c\") " pod="openstack-kuttl-tests/keystone-bootstrap-s78ts" Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.093242 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a03b6fb7-d712-46bd-a8ed-e47bc25ecc7c-combined-ca-bundle\") pod \"keystone-bootstrap-s78ts\" (UID: \"a03b6fb7-d712-46bd-a8ed-e47bc25ecc7c\") " pod="openstack-kuttl-tests/keystone-bootstrap-s78ts" Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.093274 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/a03b6fb7-d712-46bd-a8ed-e47bc25ecc7c-credential-keys\") pod \"keystone-bootstrap-s78ts\" (UID: \"a03b6fb7-d712-46bd-a8ed-e47bc25ecc7c\") " pod="openstack-kuttl-tests/keystone-bootstrap-s78ts" Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.093299 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dzm29\" (UniqueName: \"kubernetes.io/projected/377a0d4c-edf2-4912-a04f-0f814ea9efdb-kube-api-access-dzm29\") pod \"barbican-worker-5b76bfdfd9-6pwrz\" (UID: \"377a0d4c-edf2-4912-a04f-0f814ea9efdb\") " pod="openstack-kuttl-tests/barbican-worker-5b76bfdfd9-6pwrz" Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.093335 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/377a0d4c-edf2-4912-a04f-0f814ea9efdb-combined-ca-bundle\") pod \"barbican-worker-5b76bfdfd9-6pwrz\" (UID: \"377a0d4c-edf2-4912-a04f-0f814ea9efdb\") " pod="openstack-kuttl-tests/barbican-worker-5b76bfdfd9-6pwrz" Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.093355 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/377a0d4c-edf2-4912-a04f-0f814ea9efdb-logs\") pod \"barbican-worker-5b76bfdfd9-6pwrz\" (UID: \"377a0d4c-edf2-4912-a04f-0f814ea9efdb\") " pod="openstack-kuttl-tests/barbican-worker-5b76bfdfd9-6pwrz" Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.093380 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/09df5e63-9283-4cae-80fe-cfa88f5a9b72-logs\") pod \"barbican-keystone-listener-755c97c57d-cswnd\" (UID: \"09df5e63-9283-4cae-80fe-cfa88f5a9b72\") " pod="openstack-kuttl-tests/barbican-keystone-listener-755c97c57d-cswnd" Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.093398 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/377a0d4c-edf2-4912-a04f-0f814ea9efdb-config-data\") pod \"barbican-worker-5b76bfdfd9-6pwrz\" (UID: \"377a0d4c-edf2-4912-a04f-0f814ea9efdb\") " pod="openstack-kuttl-tests/barbican-worker-5b76bfdfd9-6pwrz" Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.093422 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/377a0d4c-edf2-4912-a04f-0f814ea9efdb-config-data-custom\") pod \"barbican-worker-5b76bfdfd9-6pwrz\" (UID: \"377a0d4c-edf2-4912-a04f-0f814ea9efdb\") " pod="openstack-kuttl-tests/barbican-worker-5b76bfdfd9-6pwrz" Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.097443 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-95f6c56c8-5t8cm"] Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.098612 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/09df5e63-9283-4cae-80fe-cfa88f5a9b72-logs\") pod \"barbican-keystone-listener-755c97c57d-cswnd\" (UID: \"09df5e63-9283-4cae-80fe-cfa88f5a9b72\") " pod="openstack-kuttl-tests/barbican-keystone-listener-755c97c57d-cswnd" Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.099188 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/377a0d4c-edf2-4912-a04f-0f814ea9efdb-logs\") pod \"barbican-worker-5b76bfdfd9-6pwrz\" (UID: \"377a0d4c-edf2-4912-a04f-0f814ea9efdb\") " pod="openstack-kuttl-tests/barbican-worker-5b76bfdfd9-6pwrz" Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.118929 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/377a0d4c-edf2-4912-a04f-0f814ea9efdb-config-data\") pod \"barbican-worker-5b76bfdfd9-6pwrz\" (UID: \"377a0d4c-edf2-4912-a04f-0f814ea9efdb\") " pod="openstack-kuttl-tests/barbican-worker-5b76bfdfd9-6pwrz" Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.119042 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/09df5e63-9283-4cae-80fe-cfa88f5a9b72-config-data-custom\") pod \"barbican-keystone-listener-755c97c57d-cswnd\" (UID: \"09df5e63-9283-4cae-80fe-cfa88f5a9b72\") " pod="openstack-kuttl-tests/barbican-keystone-listener-755c97c57d-cswnd" Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.119957 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/377a0d4c-edf2-4912-a04f-0f814ea9efdb-config-data-custom\") pod \"barbican-worker-5b76bfdfd9-6pwrz\" (UID: \"377a0d4c-edf2-4912-a04f-0f814ea9efdb\") " pod="openstack-kuttl-tests/barbican-worker-5b76bfdfd9-6pwrz" Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.123054 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/09df5e63-9283-4cae-80fe-cfa88f5a9b72-config-data\") pod \"barbican-keystone-listener-755c97c57d-cswnd\" (UID: \"09df5e63-9283-4cae-80fe-cfa88f5a9b72\") " pod="openstack-kuttl-tests/barbican-keystone-listener-755c97c57d-cswnd" Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.128854 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dzm29\" (UniqueName: \"kubernetes.io/projected/377a0d4c-edf2-4912-a04f-0f814ea9efdb-kube-api-access-dzm29\") pod \"barbican-worker-5b76bfdfd9-6pwrz\" (UID: \"377a0d4c-edf2-4912-a04f-0f814ea9efdb\") " pod="openstack-kuttl-tests/barbican-worker-5b76bfdfd9-6pwrz" Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.130134 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/377a0d4c-edf2-4912-a04f-0f814ea9efdb-combined-ca-bundle\") pod \"barbican-worker-5b76bfdfd9-6pwrz\" (UID: \"377a0d4c-edf2-4912-a04f-0f814ea9efdb\") " pod="openstack-kuttl-tests/barbican-worker-5b76bfdfd9-6pwrz" Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.130766 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hnhc4\" (UniqueName: \"kubernetes.io/projected/09df5e63-9283-4cae-80fe-cfa88f5a9b72-kube-api-access-hnhc4\") pod \"barbican-keystone-listener-755c97c57d-cswnd\" (UID: \"09df5e63-9283-4cae-80fe-cfa88f5a9b72\") " pod="openstack-kuttl-tests/barbican-keystone-listener-755c97c57d-cswnd" Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.134734 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/09df5e63-9283-4cae-80fe-cfa88f5a9b72-combined-ca-bundle\") pod \"barbican-keystone-listener-755c97c57d-cswnd\" (UID: \"09df5e63-9283-4cae-80fe-cfa88f5a9b72\") " pod="openstack-kuttl-tests/barbican-keystone-listener-755c97c57d-cswnd" Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.140215 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/barbican-api-b7d7d464-2s2ds"] Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.141871 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-b7d7d464-2s2ds" Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.153579 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/neutron-7c4c8fc458-mv7sx"] Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.155391 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-7c4c8fc458-mv7sx" Jan 20 17:33:01 crc kubenswrapper[4558]: E0120 17:33:01.181517 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="46edde77e0df814c67d5b21fe72058f3eb4efd6acd4a7f68a737219cd8448b40" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.182999 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-api-b7d7d464-2s2ds"] Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.195194 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-7c4c8fc458-mv7sx"] Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.195676 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a03b6fb7-d712-46bd-a8ed-e47bc25ecc7c-config-data\") pod \"keystone-bootstrap-s78ts\" (UID: \"a03b6fb7-d712-46bd-a8ed-e47bc25ecc7c\") " pod="openstack-kuttl-tests/keystone-bootstrap-s78ts" Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.195720 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zk644\" (UniqueName: \"kubernetes.io/projected/a03b6fb7-d712-46bd-a8ed-e47bc25ecc7c-kube-api-access-zk644\") pod \"keystone-bootstrap-s78ts\" (UID: \"a03b6fb7-d712-46bd-a8ed-e47bc25ecc7c\") " pod="openstack-kuttl-tests/keystone-bootstrap-s78ts" Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.195759 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-26tpl\" (UniqueName: \"kubernetes.io/projected/e0b4cbdd-cd9f-4ff7-87c5-b24833dc6e0a-kube-api-access-26tpl\") pod \"placement-95f6c56c8-5t8cm\" (UID: \"e0b4cbdd-cd9f-4ff7-87c5-b24833dc6e0a\") " pod="openstack-kuttl-tests/placement-95f6c56c8-5t8cm" Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.195815 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e0b4cbdd-cd9f-4ff7-87c5-b24833dc6e0a-combined-ca-bundle\") pod \"placement-95f6c56c8-5t8cm\" (UID: \"e0b4cbdd-cd9f-4ff7-87c5-b24833dc6e0a\") " pod="openstack-kuttl-tests/placement-95f6c56c8-5t8cm" Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.195848 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e0b4cbdd-cd9f-4ff7-87c5-b24833dc6e0a-logs\") pod \"placement-95f6c56c8-5t8cm\" (UID: \"e0b4cbdd-cd9f-4ff7-87c5-b24833dc6e0a\") " pod="openstack-kuttl-tests/placement-95f6c56c8-5t8cm" Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.195890 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f9691b53-0a90-45ee-bd69-c75b1195eeb8-combined-ca-bundle\") pod \"barbican-api-b7d7d464-2s2ds\" (UID: \"f9691b53-0a90-45ee-bd69-c75b1195eeb8\") " pod="openstack-kuttl-tests/barbican-api-b7d7d464-2s2ds" Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.195910 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a03b6fb7-d712-46bd-a8ed-e47bc25ecc7c-scripts\") pod \"keystone-bootstrap-s78ts\" (UID: \"a03b6fb7-d712-46bd-a8ed-e47bc25ecc7c\") " pod="openstack-kuttl-tests/keystone-bootstrap-s78ts" Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.195939 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/a03b6fb7-d712-46bd-a8ed-e47bc25ecc7c-fernet-keys\") pod \"keystone-bootstrap-s78ts\" (UID: \"a03b6fb7-d712-46bd-a8ed-e47bc25ecc7c\") " pod="openstack-kuttl-tests/keystone-bootstrap-s78ts" Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.195956 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f9691b53-0a90-45ee-bd69-c75b1195eeb8-logs\") pod \"barbican-api-b7d7d464-2s2ds\" (UID: \"f9691b53-0a90-45ee-bd69-c75b1195eeb8\") " pod="openstack-kuttl-tests/barbican-api-b7d7d464-2s2ds" Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.195972 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a03b6fb7-d712-46bd-a8ed-e47bc25ecc7c-combined-ca-bundle\") pod \"keystone-bootstrap-s78ts\" (UID: \"a03b6fb7-d712-46bd-a8ed-e47bc25ecc7c\") " pod="openstack-kuttl-tests/keystone-bootstrap-s78ts" Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.196000 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/a03b6fb7-d712-46bd-a8ed-e47bc25ecc7c-credential-keys\") pod \"keystone-bootstrap-s78ts\" (UID: \"a03b6fb7-d712-46bd-a8ed-e47bc25ecc7c\") " pod="openstack-kuttl-tests/keystone-bootstrap-s78ts" Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.196018 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e0b4cbdd-cd9f-4ff7-87c5-b24833dc6e0a-scripts\") pod \"placement-95f6c56c8-5t8cm\" (UID: \"e0b4cbdd-cd9f-4ff7-87c5-b24833dc6e0a\") " pod="openstack-kuttl-tests/placement-95f6c56c8-5t8cm" Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.196034 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e0b4cbdd-cd9f-4ff7-87c5-b24833dc6e0a-internal-tls-certs\") pod \"placement-95f6c56c8-5t8cm\" (UID: \"e0b4cbdd-cd9f-4ff7-87c5-b24833dc6e0a\") " pod="openstack-kuttl-tests/placement-95f6c56c8-5t8cm" Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.196061 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e0b4cbdd-cd9f-4ff7-87c5-b24833dc6e0a-config-data\") pod \"placement-95f6c56c8-5t8cm\" (UID: \"e0b4cbdd-cd9f-4ff7-87c5-b24833dc6e0a\") " pod="openstack-kuttl-tests/placement-95f6c56c8-5t8cm" Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.196098 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e0b4cbdd-cd9f-4ff7-87c5-b24833dc6e0a-public-tls-certs\") pod \"placement-95f6c56c8-5t8cm\" (UID: \"e0b4cbdd-cd9f-4ff7-87c5-b24833dc6e0a\") " pod="openstack-kuttl-tests/placement-95f6c56c8-5t8cm" Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.196118 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f9691b53-0a90-45ee-bd69-c75b1195eeb8-config-data\") pod \"barbican-api-b7d7d464-2s2ds\" (UID: \"f9691b53-0a90-45ee-bd69-c75b1195eeb8\") " pod="openstack-kuttl-tests/barbican-api-b7d7d464-2s2ds" Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.196141 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4kdg5\" (UniqueName: \"kubernetes.io/projected/f9691b53-0a90-45ee-bd69-c75b1195eeb8-kube-api-access-4kdg5\") pod \"barbican-api-b7d7d464-2s2ds\" (UID: \"f9691b53-0a90-45ee-bd69-c75b1195eeb8\") " pod="openstack-kuttl-tests/barbican-api-b7d7d464-2s2ds" Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.196224 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f9691b53-0a90-45ee-bd69-c75b1195eeb8-config-data-custom\") pod \"barbican-api-b7d7d464-2s2ds\" (UID: \"f9691b53-0a90-45ee-bd69-c75b1195eeb8\") " pod="openstack-kuttl-tests/barbican-api-b7d7d464-2s2ds" Jan 20 17:33:01 crc kubenswrapper[4558]: E0120 17:33:01.197083 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="46edde77e0df814c67d5b21fe72058f3eb4efd6acd4a7f68a737219cd8448b40" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.200078 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/a03b6fb7-d712-46bd-a8ed-e47bc25ecc7c-fernet-keys\") pod \"keystone-bootstrap-s78ts\" (UID: \"a03b6fb7-d712-46bd-a8ed-e47bc25ecc7c\") " pod="openstack-kuttl-tests/keystone-bootstrap-s78ts" Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.201698 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a03b6fb7-d712-46bd-a8ed-e47bc25ecc7c-combined-ca-bundle\") pod \"keystone-bootstrap-s78ts\" (UID: \"a03b6fb7-d712-46bd-a8ed-e47bc25ecc7c\") " pod="openstack-kuttl-tests/keystone-bootstrap-s78ts" Jan 20 17:33:01 crc kubenswrapper[4558]: E0120 17:33:01.201863 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="46edde77e0df814c67d5b21fe72058f3eb4efd6acd4a7f68a737219cd8448b40" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:33:01 crc kubenswrapper[4558]: E0120 17:33:01.201969 4558 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack-kuttl-tests/nova-cell1-conductor-0" podUID="c686e4c2-68d0-4999-9078-90c70927d9ae" containerName="nova-cell1-conductor-conductor" Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.202327 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/a03b6fb7-d712-46bd-a8ed-e47bc25ecc7c-credential-keys\") pod \"keystone-bootstrap-s78ts\" (UID: \"a03b6fb7-d712-46bd-a8ed-e47bc25ecc7c\") " pod="openstack-kuttl-tests/keystone-bootstrap-s78ts" Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.204380 4558 generic.go:334] "Generic (PLEG): container finished" podID="846821b5-5647-4ffa-a1bb-8695fe90cbcc" containerID="72da703d73712000127a5c91a66e96458bf2133274e58fb7bcbe402b18ccca9b" exitCode=143 Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.204440 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"846821b5-5647-4ffa-a1bb-8695fe90cbcc","Type":"ContainerDied","Data":"72da703d73712000127a5c91a66e96458bf2133274e58fb7bcbe402b18ccca9b"} Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.207178 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a03b6fb7-d712-46bd-a8ed-e47bc25ecc7c-config-data\") pod \"keystone-bootstrap-s78ts\" (UID: \"a03b6fb7-d712-46bd-a8ed-e47bc25ecc7c\") " pod="openstack-kuttl-tests/keystone-bootstrap-s78ts" Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.209411 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a03b6fb7-d712-46bd-a8ed-e47bc25ecc7c-scripts\") pod \"keystone-bootstrap-s78ts\" (UID: \"a03b6fb7-d712-46bd-a8ed-e47bc25ecc7c\") " pod="openstack-kuttl-tests/keystone-bootstrap-s78ts" Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.212540 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zk644\" (UniqueName: \"kubernetes.io/projected/a03b6fb7-d712-46bd-a8ed-e47bc25ecc7c-kube-api-access-zk644\") pod \"keystone-bootstrap-s78ts\" (UID: \"a03b6fb7-d712-46bd-a8ed-e47bc25ecc7c\") " pod="openstack-kuttl-tests/keystone-bootstrap-s78ts" Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.216986 4558 generic.go:334] "Generic (PLEG): container finished" podID="32e34e3d-8cbb-42df-9b7a-4b99b025cdf0" containerID="41deae5788f625f14add58324272ddd5cc382883dd1b3b32b04ceaa69dd8ec1e" exitCode=143 Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.217042 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"32e34e3d-8cbb-42df-9b7a-4b99b025cdf0","Type":"ContainerDied","Data":"41deae5788f625f14add58324272ddd5cc382883dd1b3b32b04ceaa69dd8ec1e"} Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.219468 4558 generic.go:334] "Generic (PLEG): container finished" podID="acfda63d-d93f-4b65-b9f4-eaf856bc5a92" containerID="6506b5c2fe94ddb4f34e480f304b433a04393dac97ebdf236b623e4dcfde7b2f" exitCode=143 Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.219510 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"acfda63d-d93f-4b65-b9f4-eaf856bc5a92","Type":"ContainerDied","Data":"6506b5c2fe94ddb4f34e480f304b433a04393dac97ebdf236b623e4dcfde7b2f"} Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.227441 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"b115808a-3ae3-40c5-b330-f58aa6af7503","Type":"ContainerStarted","Data":"9521f46d4a99703b801e57ee0c4402fc90e695fd2198c5972592e15eac8d40d6"} Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.236565 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-keystone-listener-755c97c57d-cswnd" Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.244687 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-worker-5b76bfdfd9-6pwrz" Jan 20 17:33:01 crc kubenswrapper[4558]: E0120 17:33:01.250672 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="a45e6f8d25e2903ff94497e102fe9cb0b1841d235bfac8ac8e62657bad6c6b17" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:33:01 crc kubenswrapper[4558]: E0120 17:33:01.251855 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="a45e6f8d25e2903ff94497e102fe9cb0b1841d235bfac8ac8e62657bad6c6b17" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:33:01 crc kubenswrapper[4558]: E0120 17:33:01.253613 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="a45e6f8d25e2903ff94497e102fe9cb0b1841d235bfac8ac8e62657bad6c6b17" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:33:01 crc kubenswrapper[4558]: E0120 17:33:01.253671 4558 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack-kuttl-tests/nova-cell0-conductor-0" podUID="6dbc6996-80bf-4f86-9acf-f74dd04e51b1" containerName="nova-cell0-conductor-conductor" Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.297809 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f9691b53-0a90-45ee-bd69-c75b1195eeb8-combined-ca-bundle\") pod \"barbican-api-b7d7d464-2s2ds\" (UID: \"f9691b53-0a90-45ee-bd69-c75b1195eeb8\") " pod="openstack-kuttl-tests/barbican-api-b7d7d464-2s2ds" Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.297895 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f9691b53-0a90-45ee-bd69-c75b1195eeb8-logs\") pod \"barbican-api-b7d7d464-2s2ds\" (UID: \"f9691b53-0a90-45ee-bd69-c75b1195eeb8\") " pod="openstack-kuttl-tests/barbican-api-b7d7d464-2s2ds" Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.297930 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e0b4cbdd-cd9f-4ff7-87c5-b24833dc6e0a-internal-tls-certs\") pod \"placement-95f6c56c8-5t8cm\" (UID: \"e0b4cbdd-cd9f-4ff7-87c5-b24833dc6e0a\") " pod="openstack-kuttl-tests/placement-95f6c56c8-5t8cm" Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.297970 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e0b4cbdd-cd9f-4ff7-87c5-b24833dc6e0a-scripts\") pod \"placement-95f6c56c8-5t8cm\" (UID: \"e0b4cbdd-cd9f-4ff7-87c5-b24833dc6e0a\") " pod="openstack-kuttl-tests/placement-95f6c56c8-5t8cm" Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.297996 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e0b4cbdd-cd9f-4ff7-87c5-b24833dc6e0a-config-data\") pod \"placement-95f6c56c8-5t8cm\" (UID: \"e0b4cbdd-cd9f-4ff7-87c5-b24833dc6e0a\") " pod="openstack-kuttl-tests/placement-95f6c56c8-5t8cm" Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.298673 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f9691b53-0a90-45ee-bd69-c75b1195eeb8-logs\") pod \"barbican-api-b7d7d464-2s2ds\" (UID: \"f9691b53-0a90-45ee-bd69-c75b1195eeb8\") " pod="openstack-kuttl-tests/barbican-api-b7d7d464-2s2ds" Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.299186 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3a9cf38d-4b8b-453b-bc39-b97deab68b72-combined-ca-bundle\") pod \"neutron-7c4c8fc458-mv7sx\" (UID: \"3a9cf38d-4b8b-453b-bc39-b97deab68b72\") " pod="openstack-kuttl-tests/neutron-7c4c8fc458-mv7sx" Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.299244 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f9691b53-0a90-45ee-bd69-c75b1195eeb8-config-data\") pod \"barbican-api-b7d7d464-2s2ds\" (UID: \"f9691b53-0a90-45ee-bd69-c75b1195eeb8\") " pod="openstack-kuttl-tests/barbican-api-b7d7d464-2s2ds" Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.299262 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e0b4cbdd-cd9f-4ff7-87c5-b24833dc6e0a-public-tls-certs\") pod \"placement-95f6c56c8-5t8cm\" (UID: \"e0b4cbdd-cd9f-4ff7-87c5-b24833dc6e0a\") " pod="openstack-kuttl-tests/placement-95f6c56c8-5t8cm" Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.299281 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/3a9cf38d-4b8b-453b-bc39-b97deab68b72-config\") pod \"neutron-7c4c8fc458-mv7sx\" (UID: \"3a9cf38d-4b8b-453b-bc39-b97deab68b72\") " pod="openstack-kuttl-tests/neutron-7c4c8fc458-mv7sx" Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.299300 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4kdg5\" (UniqueName: \"kubernetes.io/projected/f9691b53-0a90-45ee-bd69-c75b1195eeb8-kube-api-access-4kdg5\") pod \"barbican-api-b7d7d464-2s2ds\" (UID: \"f9691b53-0a90-45ee-bd69-c75b1195eeb8\") " pod="openstack-kuttl-tests/barbican-api-b7d7d464-2s2ds" Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.299345 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f9691b53-0a90-45ee-bd69-c75b1195eeb8-config-data-custom\") pod \"barbican-api-b7d7d464-2s2ds\" (UID: \"f9691b53-0a90-45ee-bd69-c75b1195eeb8\") " pod="openstack-kuttl-tests/barbican-api-b7d7d464-2s2ds" Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.299362 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/3a9cf38d-4b8b-453b-bc39-b97deab68b72-httpd-config\") pod \"neutron-7c4c8fc458-mv7sx\" (UID: \"3a9cf38d-4b8b-453b-bc39-b97deab68b72\") " pod="openstack-kuttl-tests/neutron-7c4c8fc458-mv7sx" Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.299400 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mtl24\" (UniqueName: \"kubernetes.io/projected/3a9cf38d-4b8b-453b-bc39-b97deab68b72-kube-api-access-mtl24\") pod \"neutron-7c4c8fc458-mv7sx\" (UID: \"3a9cf38d-4b8b-453b-bc39-b97deab68b72\") " pod="openstack-kuttl-tests/neutron-7c4c8fc458-mv7sx" Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.299504 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-26tpl\" (UniqueName: \"kubernetes.io/projected/e0b4cbdd-cd9f-4ff7-87c5-b24833dc6e0a-kube-api-access-26tpl\") pod \"placement-95f6c56c8-5t8cm\" (UID: \"e0b4cbdd-cd9f-4ff7-87c5-b24833dc6e0a\") " pod="openstack-kuttl-tests/placement-95f6c56c8-5t8cm" Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.299551 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e0b4cbdd-cd9f-4ff7-87c5-b24833dc6e0a-combined-ca-bundle\") pod \"placement-95f6c56c8-5t8cm\" (UID: \"e0b4cbdd-cd9f-4ff7-87c5-b24833dc6e0a\") " pod="openstack-kuttl-tests/placement-95f6c56c8-5t8cm" Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.299592 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e0b4cbdd-cd9f-4ff7-87c5-b24833dc6e0a-logs\") pod \"placement-95f6c56c8-5t8cm\" (UID: \"e0b4cbdd-cd9f-4ff7-87c5-b24833dc6e0a\") " pod="openstack-kuttl-tests/placement-95f6c56c8-5t8cm" Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.299929 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e0b4cbdd-cd9f-4ff7-87c5-b24833dc6e0a-logs\") pod \"placement-95f6c56c8-5t8cm\" (UID: \"e0b4cbdd-cd9f-4ff7-87c5-b24833dc6e0a\") " pod="openstack-kuttl-tests/placement-95f6c56c8-5t8cm" Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.301653 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e0b4cbdd-cd9f-4ff7-87c5-b24833dc6e0a-config-data\") pod \"placement-95f6c56c8-5t8cm\" (UID: \"e0b4cbdd-cd9f-4ff7-87c5-b24833dc6e0a\") " pod="openstack-kuttl-tests/placement-95f6c56c8-5t8cm" Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.304019 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e0b4cbdd-cd9f-4ff7-87c5-b24833dc6e0a-internal-tls-certs\") pod \"placement-95f6c56c8-5t8cm\" (UID: \"e0b4cbdd-cd9f-4ff7-87c5-b24833dc6e0a\") " pod="openstack-kuttl-tests/placement-95f6c56c8-5t8cm" Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.305279 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f9691b53-0a90-45ee-bd69-c75b1195eeb8-config-data\") pod \"barbican-api-b7d7d464-2s2ds\" (UID: \"f9691b53-0a90-45ee-bd69-c75b1195eeb8\") " pod="openstack-kuttl-tests/barbican-api-b7d7d464-2s2ds" Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.305905 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f9691b53-0a90-45ee-bd69-c75b1195eeb8-combined-ca-bundle\") pod \"barbican-api-b7d7d464-2s2ds\" (UID: \"f9691b53-0a90-45ee-bd69-c75b1195eeb8\") " pod="openstack-kuttl-tests/barbican-api-b7d7d464-2s2ds" Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.306230 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e0b4cbdd-cd9f-4ff7-87c5-b24833dc6e0a-scripts\") pod \"placement-95f6c56c8-5t8cm\" (UID: \"e0b4cbdd-cd9f-4ff7-87c5-b24833dc6e0a\") " pod="openstack-kuttl-tests/placement-95f6c56c8-5t8cm" Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.306388 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e0b4cbdd-cd9f-4ff7-87c5-b24833dc6e0a-public-tls-certs\") pod \"placement-95f6c56c8-5t8cm\" (UID: \"e0b4cbdd-cd9f-4ff7-87c5-b24833dc6e0a\") " pod="openstack-kuttl-tests/placement-95f6c56c8-5t8cm" Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.307636 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f9691b53-0a90-45ee-bd69-c75b1195eeb8-config-data-custom\") pod \"barbican-api-b7d7d464-2s2ds\" (UID: \"f9691b53-0a90-45ee-bd69-c75b1195eeb8\") " pod="openstack-kuttl-tests/barbican-api-b7d7d464-2s2ds" Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.310713 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e0b4cbdd-cd9f-4ff7-87c5-b24833dc6e0a-combined-ca-bundle\") pod \"placement-95f6c56c8-5t8cm\" (UID: \"e0b4cbdd-cd9f-4ff7-87c5-b24833dc6e0a\") " pod="openstack-kuttl-tests/placement-95f6c56c8-5t8cm" Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.322432 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-26tpl\" (UniqueName: \"kubernetes.io/projected/e0b4cbdd-cd9f-4ff7-87c5-b24833dc6e0a-kube-api-access-26tpl\") pod \"placement-95f6c56c8-5t8cm\" (UID: \"e0b4cbdd-cd9f-4ff7-87c5-b24833dc6e0a\") " pod="openstack-kuttl-tests/placement-95f6c56c8-5t8cm" Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.323405 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4kdg5\" (UniqueName: \"kubernetes.io/projected/f9691b53-0a90-45ee-bd69-c75b1195eeb8-kube-api-access-4kdg5\") pod \"barbican-api-b7d7d464-2s2ds\" (UID: \"f9691b53-0a90-45ee-bd69-c75b1195eeb8\") " pod="openstack-kuttl-tests/barbican-api-b7d7d464-2s2ds" Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.348954 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-bootstrap-s78ts" Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.390731 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/neutron-5c6488b48b-4sj7k" podUID="48ebccca-cc18-415f-b6f8-f301f99e5d3a" containerName="neutron-httpd" probeResult="failure" output="Get \"http://10.217.1.213:9696/\": read tcp 10.217.0.2:39846->10.217.1.213:9696: read: connection reset by peer" Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.401908 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-95f6c56c8-5t8cm" Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.402158 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3a9cf38d-4b8b-453b-bc39-b97deab68b72-combined-ca-bundle\") pod \"neutron-7c4c8fc458-mv7sx\" (UID: \"3a9cf38d-4b8b-453b-bc39-b97deab68b72\") " pod="openstack-kuttl-tests/neutron-7c4c8fc458-mv7sx" Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.402235 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/3a9cf38d-4b8b-453b-bc39-b97deab68b72-config\") pod \"neutron-7c4c8fc458-mv7sx\" (UID: \"3a9cf38d-4b8b-453b-bc39-b97deab68b72\") " pod="openstack-kuttl-tests/neutron-7c4c8fc458-mv7sx" Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.402274 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/3a9cf38d-4b8b-453b-bc39-b97deab68b72-httpd-config\") pod \"neutron-7c4c8fc458-mv7sx\" (UID: \"3a9cf38d-4b8b-453b-bc39-b97deab68b72\") " pod="openstack-kuttl-tests/neutron-7c4c8fc458-mv7sx" Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.402303 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mtl24\" (UniqueName: \"kubernetes.io/projected/3a9cf38d-4b8b-453b-bc39-b97deab68b72-kube-api-access-mtl24\") pod \"neutron-7c4c8fc458-mv7sx\" (UID: \"3a9cf38d-4b8b-453b-bc39-b97deab68b72\") " pod="openstack-kuttl-tests/neutron-7c4c8fc458-mv7sx" Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.410882 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/3a9cf38d-4b8b-453b-bc39-b97deab68b72-httpd-config\") pod \"neutron-7c4c8fc458-mv7sx\" (UID: \"3a9cf38d-4b8b-453b-bc39-b97deab68b72\") " pod="openstack-kuttl-tests/neutron-7c4c8fc458-mv7sx" Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.411034 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/3a9cf38d-4b8b-453b-bc39-b97deab68b72-config\") pod \"neutron-7c4c8fc458-mv7sx\" (UID: \"3a9cf38d-4b8b-453b-bc39-b97deab68b72\") " pod="openstack-kuttl-tests/neutron-7c4c8fc458-mv7sx" Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.425744 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mtl24\" (UniqueName: \"kubernetes.io/projected/3a9cf38d-4b8b-453b-bc39-b97deab68b72-kube-api-access-mtl24\") pod \"neutron-7c4c8fc458-mv7sx\" (UID: \"3a9cf38d-4b8b-453b-bc39-b97deab68b72\") " pod="openstack-kuttl-tests/neutron-7c4c8fc458-mv7sx" Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.432777 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3a9cf38d-4b8b-453b-bc39-b97deab68b72-combined-ca-bundle\") pod \"neutron-7c4c8fc458-mv7sx\" (UID: \"3a9cf38d-4b8b-453b-bc39-b97deab68b72\") " pod="openstack-kuttl-tests/neutron-7c4c8fc458-mv7sx" Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.476133 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-b7d7d464-2s2ds" Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.495694 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-7c4c8fc458-mv7sx" Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.566732 4558 scope.go:117] "RemoveContainer" containerID="f275e9f5de330b3c79316d5871106c6bcf90b698e0744c5beb28da45c336b36d" Jan 20 17:33:01 crc kubenswrapper[4558]: E0120 17:33:01.567006 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.631133 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovn-northd-0"] Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.631357 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ovn-northd-0" podUID="6ecac33c-30fa-4996-b271-318148e62416" containerName="ovn-northd" containerID="cri-o://a333ee4a53687d89af384ce1c1e7dd6ab814cf37eae12fca495be4bf694c8f99" gracePeriod=30 Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.631717 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ovn-northd-0" podUID="6ecac33c-30fa-4996-b271-318148e62416" containerName="openstack-network-exporter" containerID="cri-o://ae89028fc849f2ea980139f852b630c0eb1659da3362f8ae021fcba4d043bb4d" gracePeriod=30 Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.688187 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.928443 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.938194 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.956259 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/barbican-api-74c98446fb-d49s5" podUID="e83a55be-4002-403a-911f-8f33c234b7e4" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.1.171:9311/healthcheck\": read tcp 10.217.0.2:52456->10.217.1.171:9311: read: connection reset by peer" Jan 20 17:33:01 crc kubenswrapper[4558]: I0120 17:33:01.957288 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/barbican-api-74c98446fb-d49s5" podUID="e83a55be-4002-403a-911f-8f33c234b7e4" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.1.171:9311/healthcheck\": read tcp 10.217.0.2:52458->10.217.1.171:9311: read: connection reset by peer" Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.030209 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/32e34e3d-8cbb-42df-9b7a-4b99b025cdf0-combined-ca-bundle\") pod \"32e34e3d-8cbb-42df-9b7a-4b99b025cdf0\" (UID: \"32e34e3d-8cbb-42df-9b7a-4b99b025cdf0\") " Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.030273 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/acfda63d-d93f-4b65-b9f4-eaf856bc5a92-logs\") pod \"acfda63d-d93f-4b65-b9f4-eaf856bc5a92\" (UID: \"acfda63d-d93f-4b65-b9f4-eaf856bc5a92\") " Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.030302 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/acfda63d-d93f-4b65-b9f4-eaf856bc5a92-scripts\") pod \"acfda63d-d93f-4b65-b9f4-eaf856bc5a92\" (UID: \"acfda63d-d93f-4b65-b9f4-eaf856bc5a92\") " Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.030359 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sz6zp\" (UniqueName: \"kubernetes.io/projected/acfda63d-d93f-4b65-b9f4-eaf856bc5a92-kube-api-access-sz6zp\") pod \"acfda63d-d93f-4b65-b9f4-eaf856bc5a92\" (UID: \"acfda63d-d93f-4b65-b9f4-eaf856bc5a92\") " Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.030389 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/acfda63d-d93f-4b65-b9f4-eaf856bc5a92-combined-ca-bundle\") pod \"acfda63d-d93f-4b65-b9f4-eaf856bc5a92\" (UID: \"acfda63d-d93f-4b65-b9f4-eaf856bc5a92\") " Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.030474 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage16-crc\") pod \"acfda63d-d93f-4b65-b9f4-eaf856bc5a92\" (UID: \"acfda63d-d93f-4b65-b9f4-eaf856bc5a92\") " Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.030526 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/32e34e3d-8cbb-42df-9b7a-4b99b025cdf0-config-data\") pod \"32e34e3d-8cbb-42df-9b7a-4b99b025cdf0\" (UID: \"32e34e3d-8cbb-42df-9b7a-4b99b025cdf0\") " Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.030547 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/32e34e3d-8cbb-42df-9b7a-4b99b025cdf0-httpd-run\") pod \"32e34e3d-8cbb-42df-9b7a-4b99b025cdf0\" (UID: \"32e34e3d-8cbb-42df-9b7a-4b99b025cdf0\") " Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.030582 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/acfda63d-d93f-4b65-b9f4-eaf856bc5a92-config-data\") pod \"acfda63d-d93f-4b65-b9f4-eaf856bc5a92\" (UID: \"acfda63d-d93f-4b65-b9f4-eaf856bc5a92\") " Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.030624 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") pod \"32e34e3d-8cbb-42df-9b7a-4b99b025cdf0\" (UID: \"32e34e3d-8cbb-42df-9b7a-4b99b025cdf0\") " Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.030667 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tfhgz\" (UniqueName: \"kubernetes.io/projected/32e34e3d-8cbb-42df-9b7a-4b99b025cdf0-kube-api-access-tfhgz\") pod \"32e34e3d-8cbb-42df-9b7a-4b99b025cdf0\" (UID: \"32e34e3d-8cbb-42df-9b7a-4b99b025cdf0\") " Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.030693 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/32e34e3d-8cbb-42df-9b7a-4b99b025cdf0-logs\") pod \"32e34e3d-8cbb-42df-9b7a-4b99b025cdf0\" (UID: \"32e34e3d-8cbb-42df-9b7a-4b99b025cdf0\") " Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.030756 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/acfda63d-d93f-4b65-b9f4-eaf856bc5a92-httpd-run\") pod \"acfda63d-d93f-4b65-b9f4-eaf856bc5a92\" (UID: \"acfda63d-d93f-4b65-b9f4-eaf856bc5a92\") " Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.030777 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/32e34e3d-8cbb-42df-9b7a-4b99b025cdf0-scripts\") pod \"32e34e3d-8cbb-42df-9b7a-4b99b025cdf0\" (UID: \"32e34e3d-8cbb-42df-9b7a-4b99b025cdf0\") " Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.033885 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/32e34e3d-8cbb-42df-9b7a-4b99b025cdf0-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "32e34e3d-8cbb-42df-9b7a-4b99b025cdf0" (UID: "32e34e3d-8cbb-42df-9b7a-4b99b025cdf0"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.035492 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/acfda63d-d93f-4b65-b9f4-eaf856bc5a92-logs" (OuterVolumeSpecName: "logs") pod "acfda63d-d93f-4b65-b9f4-eaf856bc5a92" (UID: "acfda63d-d93f-4b65-b9f4-eaf856bc5a92"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:33:02 crc kubenswrapper[4558]: E0120 17:33:02.036891 4558 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode83a55be_4002_403a_911f_8f33c234b7e4.slice/crio-9d39db634c3fa17e206e8d37ce291551fd4325bd609316a24c87cd44c0b5f95d.scope\": RecentStats: unable to find data in memory cache]" Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.039556 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/32e34e3d-8cbb-42df-9b7a-4b99b025cdf0-logs" (OuterVolumeSpecName: "logs") pod "32e34e3d-8cbb-42df-9b7a-4b99b025cdf0" (UID: "32e34e3d-8cbb-42df-9b7a-4b99b025cdf0"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.041483 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/acfda63d-d93f-4b65-b9f4-eaf856bc5a92-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "acfda63d-d93f-4b65-b9f4-eaf856bc5a92" (UID: "acfda63d-d93f-4b65-b9f4-eaf856bc5a92"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.044319 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/acfda63d-d93f-4b65-b9f4-eaf856bc5a92-scripts" (OuterVolumeSpecName: "scripts") pod "acfda63d-d93f-4b65-b9f4-eaf856bc5a92" (UID: "acfda63d-d93f-4b65-b9f4-eaf856bc5a92"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.046272 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/32e34e3d-8cbb-42df-9b7a-4b99b025cdf0-kube-api-access-tfhgz" (OuterVolumeSpecName: "kube-api-access-tfhgz") pod "32e34e3d-8cbb-42df-9b7a-4b99b025cdf0" (UID: "32e34e3d-8cbb-42df-9b7a-4b99b025cdf0"). InnerVolumeSpecName "kube-api-access-tfhgz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.046382 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/acfda63d-d93f-4b65-b9f4-eaf856bc5a92-kube-api-access-sz6zp" (OuterVolumeSpecName: "kube-api-access-sz6zp") pod "acfda63d-d93f-4b65-b9f4-eaf856bc5a92" (UID: "acfda63d-d93f-4b65-b9f4-eaf856bc5a92"). InnerVolumeSpecName "kube-api-access-sz6zp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.062999 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage15-crc" (OuterVolumeSpecName: "glance") pod "32e34e3d-8cbb-42df-9b7a-4b99b025cdf0" (UID: "32e34e3d-8cbb-42df-9b7a-4b99b025cdf0"). InnerVolumeSpecName "local-storage15-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.063515 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage16-crc" (OuterVolumeSpecName: "glance") pod "acfda63d-d93f-4b65-b9f4-eaf856bc5a92" (UID: "acfda63d-d93f-4b65-b9f4-eaf856bc5a92"). InnerVolumeSpecName "local-storage16-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.066238 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/32e34e3d-8cbb-42df-9b7a-4b99b025cdf0-scripts" (OuterVolumeSpecName: "scripts") pod "32e34e3d-8cbb-42df-9b7a-4b99b025cdf0" (UID: "32e34e3d-8cbb-42df-9b7a-4b99b025cdf0"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.094868 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/acfda63d-d93f-4b65-b9f4-eaf856bc5a92-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "acfda63d-d93f-4b65-b9f4-eaf856bc5a92" (UID: "acfda63d-d93f-4b65-b9f4-eaf856bc5a92"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.097252 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/32e34e3d-8cbb-42df-9b7a-4b99b025cdf0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "32e34e3d-8cbb-42df-9b7a-4b99b025cdf0" (UID: "32e34e3d-8cbb-42df-9b7a-4b99b025cdf0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.113337 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/32e34e3d-8cbb-42df-9b7a-4b99b025cdf0-config-data" (OuterVolumeSpecName: "config-data") pod "32e34e3d-8cbb-42df-9b7a-4b99b025cdf0" (UID: "32e34e3d-8cbb-42df-9b7a-4b99b025cdf0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.133310 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/32e34e3d-8cbb-42df-9b7a-4b99b025cdf0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.133330 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/acfda63d-d93f-4b65-b9f4-eaf856bc5a92-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.133342 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/acfda63d-d93f-4b65-b9f4-eaf856bc5a92-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.133353 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sz6zp\" (UniqueName: \"kubernetes.io/projected/acfda63d-d93f-4b65-b9f4-eaf856bc5a92-kube-api-access-sz6zp\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.133363 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/acfda63d-d93f-4b65-b9f4-eaf856bc5a92-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.133383 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage16-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage16-crc\") on node \"crc\" " Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.133392 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/32e34e3d-8cbb-42df-9b7a-4b99b025cdf0-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.133400 4558 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/32e34e3d-8cbb-42df-9b7a-4b99b025cdf0-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.133412 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage15-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") on node \"crc\" " Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.133422 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tfhgz\" (UniqueName: \"kubernetes.io/projected/32e34e3d-8cbb-42df-9b7a-4b99b025cdf0-kube-api-access-tfhgz\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.133430 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/32e34e3d-8cbb-42df-9b7a-4b99b025cdf0-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.133438 4558 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/acfda63d-d93f-4b65-b9f4-eaf856bc5a92-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.133446 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/32e34e3d-8cbb-42df-9b7a-4b99b025cdf0-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.154053 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage16-crc" (UniqueName: "kubernetes.io/local-volume/local-storage16-crc") on node "crc" Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.162869 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage15-crc" (UniqueName: "kubernetes.io/local-volume/local-storage15-crc") on node "crc" Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.186970 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.187002 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.201626 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/acfda63d-d93f-4b65-b9f4-eaf856bc5a92-config-data" (OuterVolumeSpecName: "config-data") pod "acfda63d-d93f-4b65-b9f4-eaf856bc5a92" (UID: "acfda63d-d93f-4b65-b9f4-eaf856bc5a92"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.217818 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-bootstrap-s78ts"] Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.243049 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage16-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage16-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.243069 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/acfda63d-d93f-4b65-b9f4-eaf856bc5a92-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.243081 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage15-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.255879 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-keystone-listener-755c97c57d-cswnd"] Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.269769 4558 generic.go:334] "Generic (PLEG): container finished" podID="6ecac33c-30fa-4996-b271-318148e62416" containerID="ae89028fc849f2ea980139f852b630c0eb1659da3362f8ae021fcba4d043bb4d" exitCode=2 Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.270541 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-northd-0" event={"ID":"6ecac33c-30fa-4996-b271-318148e62416","Type":"ContainerDied","Data":"ae89028fc849f2ea980139f852b630c0eb1659da3362f8ae021fcba4d043bb4d"} Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.277482 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-worker-5b76bfdfd9-6pwrz"] Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.288919 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-api-b7d7d464-2s2ds"] Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.295557 4558 generic.go:334] "Generic (PLEG): container finished" podID="48ebccca-cc18-415f-b6f8-f301f99e5d3a" containerID="38c6eff5636dfec3724781cb0cc82717214fdcbd1a3eb92d714d5934aa1b6128" exitCode=0 Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.295620 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-5c6488b48b-4sj7k" event={"ID":"48ebccca-cc18-415f-b6f8-f301f99e5d3a","Type":"ContainerDied","Data":"38c6eff5636dfec3724781cb0cc82717214fdcbd1a3eb92d714d5934aa1b6128"} Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.296779 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-95f6c56c8-5t8cm"] Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.299830 4558 generic.go:334] "Generic (PLEG): container finished" podID="6dbc6996-80bf-4f86-9acf-f74dd04e51b1" containerID="a45e6f8d25e2903ff94497e102fe9cb0b1841d235bfac8ac8e62657bad6c6b17" exitCode=0 Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.299959 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-0" event={"ID":"6dbc6996-80bf-4f86-9acf-f74dd04e51b1","Type":"ContainerDied","Data":"a45e6f8d25e2903ff94497e102fe9cb0b1841d235bfac8ac8e62657bad6c6b17"} Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.322510 4558 generic.go:334] "Generic (PLEG): container finished" podID="32e34e3d-8cbb-42df-9b7a-4b99b025cdf0" containerID="32cf2a0dd770e34c9d2400413f255671ccbd9a67bbd106457f3e7faaa2b2df50" exitCode=0 Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.322582 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"32e34e3d-8cbb-42df-9b7a-4b99b025cdf0","Type":"ContainerDied","Data":"32cf2a0dd770e34c9d2400413f255671ccbd9a67bbd106457f3e7faaa2b2df50"} Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.322630 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"32e34e3d-8cbb-42df-9b7a-4b99b025cdf0","Type":"ContainerDied","Data":"baa3484dda0c02685e8f957a1c16b8286d35d860abba01160d98c4a69ee4f370"} Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.322654 4558 scope.go:117] "RemoveContainer" containerID="32cf2a0dd770e34c9d2400413f255671ccbd9a67bbd106457f3e7faaa2b2df50" Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.322828 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:33:02 crc kubenswrapper[4558]: E0120 17:33:02.351056 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="a333ee4a53687d89af384ce1c1e7dd6ab814cf37eae12fca495be4bf694c8f99" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.363664 4558 generic.go:334] "Generic (PLEG): container finished" podID="e83a55be-4002-403a-911f-8f33c234b7e4" containerID="9d39db634c3fa17e206e8d37ce291551fd4325bd609316a24c87cd44c0b5f95d" exitCode=0 Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.363922 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-74c98446fb-d49s5" event={"ID":"e83a55be-4002-403a-911f-8f33c234b7e4","Type":"ContainerDied","Data":"9d39db634c3fa17e206e8d37ce291551fd4325bd609316a24c87cd44c0b5f95d"} Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.367997 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-keystone-listener-755c97c57d-cswnd" event={"ID":"09df5e63-9283-4cae-80fe-cfa88f5a9b72","Type":"ContainerStarted","Data":"89b0d3ea7ad3a30fd9ad478e238eb045ab4c94d8dcbfebf59a338b01371c9018"} Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.378077 4558 generic.go:334] "Generic (PLEG): container finished" podID="1038c23d-3108-472d-b43c-4181fd8ac365" containerID="8a1e30fe4204a02290d90187641c27f6cbb6b966db67d83a76c5a54e949c72ae" exitCode=143 Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.378208 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-5f4bd4844-95dpq" event={"ID":"1038c23d-3108-472d-b43c-4181fd8ac365","Type":"ContainerDied","Data":"8a1e30fe4204a02290d90187641c27f6cbb6b966db67d83a76c5a54e949c72ae"} Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.383884 4558 generic.go:334] "Generic (PLEG): container finished" podID="acfda63d-d93f-4b65-b9f4-eaf856bc5a92" containerID="d0d2f8f1f296f903b655cb3d5bbafcb0fde6cac093a0f3918cb5ca364a74a320" exitCode=0 Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.383915 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"acfda63d-d93f-4b65-b9f4-eaf856bc5a92","Type":"ContainerDied","Data":"d0d2f8f1f296f903b655cb3d5bbafcb0fde6cac093a0f3918cb5ca364a74a320"} Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.383940 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"acfda63d-d93f-4b65-b9f4-eaf856bc5a92","Type":"ContainerDied","Data":"47ae37c50e187e3af5b1f199ecc51656d1271150d317551e0365ad178b647554"} Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.383998 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:33:02 crc kubenswrapper[4558]: E0120 17:33:02.388445 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="a333ee4a53687d89af384ce1c1e7dd6ab814cf37eae12fca495be4bf694c8f99" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.390753 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:33:02 crc kubenswrapper[4558]: E0120 17:33:02.408202 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="a333ee4a53687d89af384ce1c1e7dd6ab814cf37eae12fca495be4bf694c8f99" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Jan 20 17:33:02 crc kubenswrapper[4558]: E0120 17:33:02.408369 4558 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack-kuttl-tests/ovn-northd-0" podUID="6ecac33c-30fa-4996-b271-318148e62416" containerName="ovn-northd" Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.409173 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.419234 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:33:02 crc kubenswrapper[4558]: E0120 17:33:02.419757 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="acfda63d-d93f-4b65-b9f4-eaf856bc5a92" containerName="glance-httpd" Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.419772 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="acfda63d-d93f-4b65-b9f4-eaf856bc5a92" containerName="glance-httpd" Jan 20 17:33:02 crc kubenswrapper[4558]: E0120 17:33:02.419801 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="acfda63d-d93f-4b65-b9f4-eaf856bc5a92" containerName="glance-log" Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.419818 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="acfda63d-d93f-4b65-b9f4-eaf856bc5a92" containerName="glance-log" Jan 20 17:33:02 crc kubenswrapper[4558]: E0120 17:33:02.419832 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="32e34e3d-8cbb-42df-9b7a-4b99b025cdf0" containerName="glance-httpd" Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.419839 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="32e34e3d-8cbb-42df-9b7a-4b99b025cdf0" containerName="glance-httpd" Jan 20 17:33:02 crc kubenswrapper[4558]: E0120 17:33:02.419872 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="32e34e3d-8cbb-42df-9b7a-4b99b025cdf0" containerName="glance-log" Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.419881 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="32e34e3d-8cbb-42df-9b7a-4b99b025cdf0" containerName="glance-log" Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.420085 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="32e34e3d-8cbb-42df-9b7a-4b99b025cdf0" containerName="glance-log" Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.420098 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="acfda63d-d93f-4b65-b9f4-eaf856bc5a92" containerName="glance-log" Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.420106 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="acfda63d-d93f-4b65-b9f4-eaf856bc5a92" containerName="glance-httpd" Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.420122 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="32e34e3d-8cbb-42df-9b7a-4b99b025cdf0" containerName="glance-httpd" Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.421303 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.426142 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.430950 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-scripts" Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.434678 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-default-internal-config-data" Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.435212 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-glance-default-internal-svc" Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.435512 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-glance-dockercfg-bsssw" Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.450443 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2jzkg\" (UniqueName: \"kubernetes.io/projected/21c81b0a-c70b-4d57-bea0-96e4840af7dd-kube-api-access-2jzkg\") pod \"glance-default-internal-api-0\" (UID: \"21c81b0a-c70b-4d57-bea0-96e4840af7dd\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.450487 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21c81b0a-c70b-4d57-bea0-96e4840af7dd-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"21c81b0a-c70b-4d57-bea0-96e4840af7dd\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.450598 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/21c81b0a-c70b-4d57-bea0-96e4840af7dd-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"21c81b0a-c70b-4d57-bea0-96e4840af7dd\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.450628 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage15-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") pod \"glance-default-internal-api-0\" (UID: \"21c81b0a-c70b-4d57-bea0-96e4840af7dd\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.450727 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/21c81b0a-c70b-4d57-bea0-96e4840af7dd-scripts\") pod \"glance-default-internal-api-0\" (UID: \"21c81b0a-c70b-4d57-bea0-96e4840af7dd\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.450782 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/21c81b0a-c70b-4d57-bea0-96e4840af7dd-logs\") pod \"glance-default-internal-api-0\" (UID: \"21c81b0a-c70b-4d57-bea0-96e4840af7dd\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.450852 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/21c81b0a-c70b-4d57-bea0-96e4840af7dd-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"21c81b0a-c70b-4d57-bea0-96e4840af7dd\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.450887 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/21c81b0a-c70b-4d57-bea0-96e4840af7dd-config-data\") pod \"glance-default-internal-api-0\" (UID: \"21c81b0a-c70b-4d57-bea0-96e4840af7dd\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.452318 4558 scope.go:117] "RemoveContainer" containerID="41deae5788f625f14add58324272ddd5cc382883dd1b3b32b04ceaa69dd8ec1e" Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.508213 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.509400 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.524641 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/placement-5f4bd4844-95dpq" Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.526968 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.528831 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.532545 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.540411 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-glance-default-public-svc" Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.542400 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-default-external-config-data" Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.556954 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/21c81b0a-c70b-4d57-bea0-96e4840af7dd-logs\") pod \"glance-default-internal-api-0\" (UID: \"21c81b0a-c70b-4d57-bea0-96e4840af7dd\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.557011 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/21c81b0a-c70b-4d57-bea0-96e4840af7dd-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"21c81b0a-c70b-4d57-bea0-96e4840af7dd\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.557043 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/21c81b0a-c70b-4d57-bea0-96e4840af7dd-config-data\") pod \"glance-default-internal-api-0\" (UID: \"21c81b0a-c70b-4d57-bea0-96e4840af7dd\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.557096 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2jzkg\" (UniqueName: \"kubernetes.io/projected/21c81b0a-c70b-4d57-bea0-96e4840af7dd-kube-api-access-2jzkg\") pod \"glance-default-internal-api-0\" (UID: \"21c81b0a-c70b-4d57-bea0-96e4840af7dd\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.557119 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21c81b0a-c70b-4d57-bea0-96e4840af7dd-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"21c81b0a-c70b-4d57-bea0-96e4840af7dd\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.557186 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/21c81b0a-c70b-4d57-bea0-96e4840af7dd-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"21c81b0a-c70b-4d57-bea0-96e4840af7dd\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.557212 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage15-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") pod \"glance-default-internal-api-0\" (UID: \"21c81b0a-c70b-4d57-bea0-96e4840af7dd\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.557262 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/21c81b0a-c70b-4d57-bea0-96e4840af7dd-scripts\") pod \"glance-default-internal-api-0\" (UID: \"21c81b0a-c70b-4d57-bea0-96e4840af7dd\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.557952 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/21c81b0a-c70b-4d57-bea0-96e4840af7dd-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"21c81b0a-c70b-4d57-bea0-96e4840af7dd\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.558183 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage15-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") pod \"glance-default-internal-api-0\" (UID: \"21c81b0a-c70b-4d57-bea0-96e4840af7dd\") device mount path \"/mnt/openstack/pv15\"" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.564229 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/21c81b0a-c70b-4d57-bea0-96e4840af7dd-logs\") pod \"glance-default-internal-api-0\" (UID: \"21c81b0a-c70b-4d57-bea0-96e4840af7dd\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.568977 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/21c81b0a-c70b-4d57-bea0-96e4840af7dd-config-data\") pod \"glance-default-internal-api-0\" (UID: \"21c81b0a-c70b-4d57-bea0-96e4840af7dd\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.643778 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/21c81b0a-c70b-4d57-bea0-96e4840af7dd-scripts\") pod \"glance-default-internal-api-0\" (UID: \"21c81b0a-c70b-4d57-bea0-96e4840af7dd\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.644839 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21c81b0a-c70b-4d57-bea0-96e4840af7dd-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"21c81b0a-c70b-4d57-bea0-96e4840af7dd\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.645249 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/21c81b0a-c70b-4d57-bea0-96e4840af7dd-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"21c81b0a-c70b-4d57-bea0-96e4840af7dd\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.647146 4558 scope.go:117] "RemoveContainer" containerID="32cf2a0dd770e34c9d2400413f255671ccbd9a67bbd106457f3e7faaa2b2df50" Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.653943 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2jzkg\" (UniqueName: \"kubernetes.io/projected/21c81b0a-c70b-4d57-bea0-96e4840af7dd-kube-api-access-2jzkg\") pod \"glance-default-internal-api-0\" (UID: \"21c81b0a-c70b-4d57-bea0-96e4840af7dd\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.667958 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d78c49cb-72eb-4def-9c40-b16128c74418-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"d78c49cb-72eb-4def-9c40-b16128c74418\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.668070 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fdln6\" (UniqueName: \"kubernetes.io/projected/d78c49cb-72eb-4def-9c40-b16128c74418-kube-api-access-fdln6\") pod \"glance-default-external-api-0\" (UID: \"d78c49cb-72eb-4def-9c40-b16128c74418\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.668139 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d78c49cb-72eb-4def-9c40-b16128c74418-scripts\") pod \"glance-default-external-api-0\" (UID: \"d78c49cb-72eb-4def-9c40-b16128c74418\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.668356 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage16-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage16-crc\") pod \"glance-default-external-api-0\" (UID: \"d78c49cb-72eb-4def-9c40-b16128c74418\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.668434 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d78c49cb-72eb-4def-9c40-b16128c74418-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"d78c49cb-72eb-4def-9c40-b16128c74418\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.668474 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d78c49cb-72eb-4def-9c40-b16128c74418-config-data\") pod \"glance-default-external-api-0\" (UID: \"d78c49cb-72eb-4def-9c40-b16128c74418\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.668594 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d78c49cb-72eb-4def-9c40-b16128c74418-logs\") pod \"glance-default-external-api-0\" (UID: \"d78c49cb-72eb-4def-9c40-b16128c74418\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.668659 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d78c49cb-72eb-4def-9c40-b16128c74418-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"d78c49cb-72eb-4def-9c40-b16128c74418\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:33:02 crc kubenswrapper[4558]: E0120 17:33:02.681742 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"32cf2a0dd770e34c9d2400413f255671ccbd9a67bbd106457f3e7faaa2b2df50\": container with ID starting with 32cf2a0dd770e34c9d2400413f255671ccbd9a67bbd106457f3e7faaa2b2df50 not found: ID does not exist" containerID="32cf2a0dd770e34c9d2400413f255671ccbd9a67bbd106457f3e7faaa2b2df50" Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.681783 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"32cf2a0dd770e34c9d2400413f255671ccbd9a67bbd106457f3e7faaa2b2df50"} err="failed to get container status \"32cf2a0dd770e34c9d2400413f255671ccbd9a67bbd106457f3e7faaa2b2df50\": rpc error: code = NotFound desc = could not find container \"32cf2a0dd770e34c9d2400413f255671ccbd9a67bbd106457f3e7faaa2b2df50\": container with ID starting with 32cf2a0dd770e34c9d2400413f255671ccbd9a67bbd106457f3e7faaa2b2df50 not found: ID does not exist" Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.681819 4558 scope.go:117] "RemoveContainer" containerID="41deae5788f625f14add58324272ddd5cc382883dd1b3b32b04ceaa69dd8ec1e" Jan 20 17:33:02 crc kubenswrapper[4558]: E0120 17:33:02.686059 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"41deae5788f625f14add58324272ddd5cc382883dd1b3b32b04ceaa69dd8ec1e\": container with ID starting with 41deae5788f625f14add58324272ddd5cc382883dd1b3b32b04ceaa69dd8ec1e not found: ID does not exist" containerID="41deae5788f625f14add58324272ddd5cc382883dd1b3b32b04ceaa69dd8ec1e" Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.690577 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"41deae5788f625f14add58324272ddd5cc382883dd1b3b32b04ceaa69dd8ec1e"} err="failed to get container status \"41deae5788f625f14add58324272ddd5cc382883dd1b3b32b04ceaa69dd8ec1e\": rpc error: code = NotFound desc = could not find container \"41deae5788f625f14add58324272ddd5cc382883dd1b3b32b04ceaa69dd8ec1e\": container with ID starting with 41deae5788f625f14add58324272ddd5cc382883dd1b3b32b04ceaa69dd8ec1e not found: ID does not exist" Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.690616 4558 scope.go:117] "RemoveContainer" containerID="d0d2f8f1f296f903b655cb3d5bbafcb0fde6cac093a0f3918cb5ca364a74a320" Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.691811 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="32e34e3d-8cbb-42df-9b7a-4b99b025cdf0" path="/var/lib/kubelet/pods/32e34e3d-8cbb-42df-9b7a-4b99b025cdf0/volumes" Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.692683 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="acfda63d-d93f-4b65-b9f4-eaf856bc5a92" path="/var/lib/kubelet/pods/acfda63d-d93f-4b65-b9f4-eaf856bc5a92/volumes" Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.693894 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fce7f5af-9139-4f50-a1c9-b74477ed188f" path="/var/lib/kubelet/pods/fce7f5af-9139-4f50-a1c9-b74477ed188f/volumes" Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.765408 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-7c4c8fc458-mv7sx"] Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.770180 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d78c49cb-72eb-4def-9c40-b16128c74418-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"d78c49cb-72eb-4def-9c40-b16128c74418\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.770275 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d78c49cb-72eb-4def-9c40-b16128c74418-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"d78c49cb-72eb-4def-9c40-b16128c74418\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.770439 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fdln6\" (UniqueName: \"kubernetes.io/projected/d78c49cb-72eb-4def-9c40-b16128c74418-kube-api-access-fdln6\") pod \"glance-default-external-api-0\" (UID: \"d78c49cb-72eb-4def-9c40-b16128c74418\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.771240 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d78c49cb-72eb-4def-9c40-b16128c74418-scripts\") pod \"glance-default-external-api-0\" (UID: \"d78c49cb-72eb-4def-9c40-b16128c74418\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.772774 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage16-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage16-crc\") pod \"glance-default-external-api-0\" (UID: \"d78c49cb-72eb-4def-9c40-b16128c74418\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.773105 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d78c49cb-72eb-4def-9c40-b16128c74418-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"d78c49cb-72eb-4def-9c40-b16128c74418\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.773132 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d78c49cb-72eb-4def-9c40-b16128c74418-config-data\") pod \"glance-default-external-api-0\" (UID: \"d78c49cb-72eb-4def-9c40-b16128c74418\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.773197 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d78c49cb-72eb-4def-9c40-b16128c74418-logs\") pod \"glance-default-external-api-0\" (UID: \"d78c49cb-72eb-4def-9c40-b16128c74418\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.773033 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage16-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage16-crc\") pod \"glance-default-external-api-0\" (UID: \"d78c49cb-72eb-4def-9c40-b16128c74418\") device mount path \"/mnt/openstack/pv16\"" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.775383 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d78c49cb-72eb-4def-9c40-b16128c74418-scripts\") pod \"glance-default-external-api-0\" (UID: \"d78c49cb-72eb-4def-9c40-b16128c74418\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.777792 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d78c49cb-72eb-4def-9c40-b16128c74418-logs\") pod \"glance-default-external-api-0\" (UID: \"d78c49cb-72eb-4def-9c40-b16128c74418\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.777934 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d78c49cb-72eb-4def-9c40-b16128c74418-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"d78c49cb-72eb-4def-9c40-b16128c74418\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.778262 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d78c49cb-72eb-4def-9c40-b16128c74418-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"d78c49cb-72eb-4def-9c40-b16128c74418\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.786019 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d78c49cb-72eb-4def-9c40-b16128c74418-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"d78c49cb-72eb-4def-9c40-b16128c74418\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.787090 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d78c49cb-72eb-4def-9c40-b16128c74418-config-data\") pod \"glance-default-external-api-0\" (UID: \"d78c49cb-72eb-4def-9c40-b16128c74418\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.794052 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fdln6\" (UniqueName: \"kubernetes.io/projected/d78c49cb-72eb-4def-9c40-b16128c74418-kube-api-access-fdln6\") pod \"glance-default-external-api-0\" (UID: \"d78c49cb-72eb-4def-9c40-b16128c74418\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.812842 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage15-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") pod \"glance-default-internal-api-0\" (UID: \"21c81b0a-c70b-4d57-bea0-96e4840af7dd\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.821617 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage16-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage16-crc\") pod \"glance-default-external-api-0\" (UID: \"d78c49cb-72eb-4def-9c40-b16128c74418\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.845642 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:33:02 crc kubenswrapper[4558]: I0120 17:33:02.987043 4558 scope.go:117] "RemoveContainer" containerID="6506b5c2fe94ddb4f34e480f304b433a04393dac97ebdf236b623e4dcfde7b2f" Jan 20 17:33:03 crc kubenswrapper[4558]: I0120 17:33:03.098529 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:33:03 crc kubenswrapper[4558]: I0120 17:33:03.105844 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-nb-0"] Jan 20 17:33:03 crc kubenswrapper[4558]: I0120 17:33:03.106548 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ovsdbserver-nb-0" podUID="581cf384-3caa-4074-a6aa-526e8a65e9b2" containerName="openstack-network-exporter" containerID="cri-o://ce4c4a8dc51cc8a0747c9a283ff63624944b94308dde57af4f43cc0ee489bfaa" gracePeriod=300 Jan 20 17:33:03 crc kubenswrapper[4558]: I0120 17:33:03.118024 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-74c98446fb-d49s5" Jan 20 17:33:03 crc kubenswrapper[4558]: I0120 17:33:03.171088 4558 scope.go:117] "RemoveContainer" containerID="d0d2f8f1f296f903b655cb3d5bbafcb0fde6cac093a0f3918cb5ca364a74a320" Jan 20 17:33:03 crc kubenswrapper[4558]: E0120 17:33:03.172707 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d0d2f8f1f296f903b655cb3d5bbafcb0fde6cac093a0f3918cb5ca364a74a320\": container with ID starting with d0d2f8f1f296f903b655cb3d5bbafcb0fde6cac093a0f3918cb5ca364a74a320 not found: ID does not exist" containerID="d0d2f8f1f296f903b655cb3d5bbafcb0fde6cac093a0f3918cb5ca364a74a320" Jan 20 17:33:03 crc kubenswrapper[4558]: I0120 17:33:03.172737 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d0d2f8f1f296f903b655cb3d5bbafcb0fde6cac093a0f3918cb5ca364a74a320"} err="failed to get container status \"d0d2f8f1f296f903b655cb3d5bbafcb0fde6cac093a0f3918cb5ca364a74a320\": rpc error: code = NotFound desc = could not find container \"d0d2f8f1f296f903b655cb3d5bbafcb0fde6cac093a0f3918cb5ca364a74a320\": container with ID starting with d0d2f8f1f296f903b655cb3d5bbafcb0fde6cac093a0f3918cb5ca364a74a320 not found: ID does not exist" Jan 20 17:33:03 crc kubenswrapper[4558]: I0120 17:33:03.172757 4558 scope.go:117] "RemoveContainer" containerID="6506b5c2fe94ddb4f34e480f304b433a04393dac97ebdf236b623e4dcfde7b2f" Jan 20 17:33:03 crc kubenswrapper[4558]: E0120 17:33:03.174559 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6506b5c2fe94ddb4f34e480f304b433a04393dac97ebdf236b623e4dcfde7b2f\": container with ID starting with 6506b5c2fe94ddb4f34e480f304b433a04393dac97ebdf236b623e4dcfde7b2f not found: ID does not exist" containerID="6506b5c2fe94ddb4f34e480f304b433a04393dac97ebdf236b623e4dcfde7b2f" Jan 20 17:33:03 crc kubenswrapper[4558]: I0120 17:33:03.174578 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6506b5c2fe94ddb4f34e480f304b433a04393dac97ebdf236b623e4dcfde7b2f"} err="failed to get container status \"6506b5c2fe94ddb4f34e480f304b433a04393dac97ebdf236b623e4dcfde7b2f\": rpc error: code = NotFound desc = could not find container \"6506b5c2fe94ddb4f34e480f304b433a04393dac97ebdf236b623e4dcfde7b2f\": container with ID starting with 6506b5c2fe94ddb4f34e480f304b433a04393dac97ebdf236b623e4dcfde7b2f not found: ID does not exist" Jan 20 17:33:03 crc kubenswrapper[4558]: I0120 17:33:03.178520 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:33:03 crc kubenswrapper[4558]: I0120 17:33:03.181692 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-5c6488b48b-4sj7k" Jan 20 17:33:03 crc kubenswrapper[4558]: I0120 17:33:03.198252 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e83a55be-4002-403a-911f-8f33c234b7e4-config-data\") pod \"e83a55be-4002-403a-911f-8f33c234b7e4\" (UID: \"e83a55be-4002-403a-911f-8f33c234b7e4\") " Jan 20 17:33:03 crc kubenswrapper[4558]: I0120 17:33:03.198374 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l7zlc\" (UniqueName: \"kubernetes.io/projected/e83a55be-4002-403a-911f-8f33c234b7e4-kube-api-access-l7zlc\") pod \"e83a55be-4002-403a-911f-8f33c234b7e4\" (UID: \"e83a55be-4002-403a-911f-8f33c234b7e4\") " Jan 20 17:33:03 crc kubenswrapper[4558]: I0120 17:33:03.198460 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e83a55be-4002-403a-911f-8f33c234b7e4-combined-ca-bundle\") pod \"e83a55be-4002-403a-911f-8f33c234b7e4\" (UID: \"e83a55be-4002-403a-911f-8f33c234b7e4\") " Jan 20 17:33:03 crc kubenswrapper[4558]: I0120 17:33:03.198517 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e83a55be-4002-403a-911f-8f33c234b7e4-logs\") pod \"e83a55be-4002-403a-911f-8f33c234b7e4\" (UID: \"e83a55be-4002-403a-911f-8f33c234b7e4\") " Jan 20 17:33:03 crc kubenswrapper[4558]: I0120 17:33:03.198610 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e83a55be-4002-403a-911f-8f33c234b7e4-config-data-custom\") pod \"e83a55be-4002-403a-911f-8f33c234b7e4\" (UID: \"e83a55be-4002-403a-911f-8f33c234b7e4\") " Jan 20 17:33:03 crc kubenswrapper[4558]: I0120 17:33:03.201497 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e83a55be-4002-403a-911f-8f33c234b7e4-logs" (OuterVolumeSpecName: "logs") pod "e83a55be-4002-403a-911f-8f33c234b7e4" (UID: "e83a55be-4002-403a-911f-8f33c234b7e4"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:33:03 crc kubenswrapper[4558]: I0120 17:33:03.210852 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e83a55be-4002-403a-911f-8f33c234b7e4-kube-api-access-l7zlc" (OuterVolumeSpecName: "kube-api-access-l7zlc") pod "e83a55be-4002-403a-911f-8f33c234b7e4" (UID: "e83a55be-4002-403a-911f-8f33c234b7e4"). InnerVolumeSpecName "kube-api-access-l7zlc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:33:03 crc kubenswrapper[4558]: I0120 17:33:03.253264 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e83a55be-4002-403a-911f-8f33c234b7e4-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "e83a55be-4002-403a-911f-8f33c234b7e4" (UID: "e83a55be-4002-403a-911f-8f33c234b7e4"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:33:03 crc kubenswrapper[4558]: I0120 17:33:03.256440 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:33:03 crc kubenswrapper[4558]: I0120 17:33:03.256477 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:33:03 crc kubenswrapper[4558]: I0120 17:33:03.265411 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ovsdbserver-nb-0" podUID="581cf384-3caa-4074-a6aa-526e8a65e9b2" containerName="ovsdbserver-nb" containerID="cri-o://f2a87a3274b85da1b912ddda9d9c24218552ae2474139c4df992c4937f8ad8bb" gracePeriod=300 Jan 20 17:33:03 crc kubenswrapper[4558]: I0120 17:33:03.271308 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-api-0" podUID="6a66de52-5f9c-46c6-aa54-8a0ba3288230" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.1.218:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 20 17:33:03 crc kubenswrapper[4558]: I0120 17:33:03.271386 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-api-0" podUID="6a66de52-5f9c-46c6-aa54-8a0ba3288230" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.1.218:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 20 17:33:03 crc kubenswrapper[4558]: I0120 17:33:03.300652 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/48ebccca-cc18-415f-b6f8-f301f99e5d3a-httpd-config\") pod \"48ebccca-cc18-415f-b6f8-f301f99e5d3a\" (UID: \"48ebccca-cc18-415f-b6f8-f301f99e5d3a\") " Jan 20 17:33:03 crc kubenswrapper[4558]: I0120 17:33:03.300697 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/48ebccca-cc18-415f-b6f8-f301f99e5d3a-combined-ca-bundle\") pod \"48ebccca-cc18-415f-b6f8-f301f99e5d3a\" (UID: \"48ebccca-cc18-415f-b6f8-f301f99e5d3a\") " Jan 20 17:33:03 crc kubenswrapper[4558]: I0120 17:33:03.300752 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gnxn7\" (UniqueName: \"kubernetes.io/projected/6dbc6996-80bf-4f86-9acf-f74dd04e51b1-kube-api-access-gnxn7\") pod \"6dbc6996-80bf-4f86-9acf-f74dd04e51b1\" (UID: \"6dbc6996-80bf-4f86-9acf-f74dd04e51b1\") " Jan 20 17:33:03 crc kubenswrapper[4558]: I0120 17:33:03.301265 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6dbc6996-80bf-4f86-9acf-f74dd04e51b1-config-data\") pod \"6dbc6996-80bf-4f86-9acf-f74dd04e51b1\" (UID: \"6dbc6996-80bf-4f86-9acf-f74dd04e51b1\") " Jan 20 17:33:03 crc kubenswrapper[4558]: I0120 17:33:03.301328 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/48ebccca-cc18-415f-b6f8-f301f99e5d3a-config\") pod \"48ebccca-cc18-415f-b6f8-f301f99e5d3a\" (UID: \"48ebccca-cc18-415f-b6f8-f301f99e5d3a\") " Jan 20 17:33:03 crc kubenswrapper[4558]: I0120 17:33:03.301452 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6dbc6996-80bf-4f86-9acf-f74dd04e51b1-combined-ca-bundle\") pod \"6dbc6996-80bf-4f86-9acf-f74dd04e51b1\" (UID: \"6dbc6996-80bf-4f86-9acf-f74dd04e51b1\") " Jan 20 17:33:03 crc kubenswrapper[4558]: I0120 17:33:03.301499 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5p4h4\" (UniqueName: \"kubernetes.io/projected/48ebccca-cc18-415f-b6f8-f301f99e5d3a-kube-api-access-5p4h4\") pod \"48ebccca-cc18-415f-b6f8-f301f99e5d3a\" (UID: \"48ebccca-cc18-415f-b6f8-f301f99e5d3a\") " Jan 20 17:33:03 crc kubenswrapper[4558]: I0120 17:33:03.302093 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e83a55be-4002-403a-911f-8f33c234b7e4-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:03 crc kubenswrapper[4558]: I0120 17:33:03.302106 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l7zlc\" (UniqueName: \"kubernetes.io/projected/e83a55be-4002-403a-911f-8f33c234b7e4-kube-api-access-l7zlc\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:03 crc kubenswrapper[4558]: I0120 17:33:03.302116 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e83a55be-4002-403a-911f-8f33c234b7e4-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:03 crc kubenswrapper[4558]: I0120 17:33:03.335722 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6dbc6996-80bf-4f86-9acf-f74dd04e51b1-kube-api-access-gnxn7" (OuterVolumeSpecName: "kube-api-access-gnxn7") pod "6dbc6996-80bf-4f86-9acf-f74dd04e51b1" (UID: "6dbc6996-80bf-4f86-9acf-f74dd04e51b1"). InnerVolumeSpecName "kube-api-access-gnxn7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:33:03 crc kubenswrapper[4558]: I0120 17:33:03.339769 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/48ebccca-cc18-415f-b6f8-f301f99e5d3a-kube-api-access-5p4h4" (OuterVolumeSpecName: "kube-api-access-5p4h4") pod "48ebccca-cc18-415f-b6f8-f301f99e5d3a" (UID: "48ebccca-cc18-415f-b6f8-f301f99e5d3a"). InnerVolumeSpecName "kube-api-access-5p4h4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:33:03 crc kubenswrapper[4558]: I0120 17:33:03.353741 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/48ebccca-cc18-415f-b6f8-f301f99e5d3a-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "48ebccca-cc18-415f-b6f8-f301f99e5d3a" (UID: "48ebccca-cc18-415f-b6f8-f301f99e5d3a"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:33:03 crc kubenswrapper[4558]: I0120 17:33:03.380821 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:33:03 crc kubenswrapper[4558]: I0120 17:33:03.407572 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5p4h4\" (UniqueName: \"kubernetes.io/projected/48ebccca-cc18-415f-b6f8-f301f99e5d3a-kube-api-access-5p4h4\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:03 crc kubenswrapper[4558]: I0120 17:33:03.407596 4558 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/48ebccca-cc18-415f-b6f8-f301f99e5d3a-httpd-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:03 crc kubenswrapper[4558]: I0120 17:33:03.407608 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gnxn7\" (UniqueName: \"kubernetes.io/projected/6dbc6996-80bf-4f86-9acf-f74dd04e51b1-kube-api-access-gnxn7\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:03 crc kubenswrapper[4558]: I0120 17:33:03.408175 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovsdbserver-nb-0_581cf384-3caa-4074-a6aa-526e8a65e9b2/ovsdbserver-nb/0.log" Jan 20 17:33:03 crc kubenswrapper[4558]: I0120 17:33:03.408220 4558 generic.go:334] "Generic (PLEG): container finished" podID="581cf384-3caa-4074-a6aa-526e8a65e9b2" containerID="ce4c4a8dc51cc8a0747c9a283ff63624944b94308dde57af4f43cc0ee489bfaa" exitCode=2 Jan 20 17:33:03 crc kubenswrapper[4558]: I0120 17:33:03.408251 4558 generic.go:334] "Generic (PLEG): container finished" podID="581cf384-3caa-4074-a6aa-526e8a65e9b2" containerID="f2a87a3274b85da1b912ddda9d9c24218552ae2474139c4df992c4937f8ad8bb" exitCode=143 Jan 20 17:33:03 crc kubenswrapper[4558]: I0120 17:33:03.408320 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-nb-0" event={"ID":"581cf384-3caa-4074-a6aa-526e8a65e9b2","Type":"ContainerDied","Data":"ce4c4a8dc51cc8a0747c9a283ff63624944b94308dde57af4f43cc0ee489bfaa"} Jan 20 17:33:03 crc kubenswrapper[4558]: I0120 17:33:03.408357 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-nb-0" event={"ID":"581cf384-3caa-4074-a6aa-526e8a65e9b2","Type":"ContainerDied","Data":"f2a87a3274b85da1b912ddda9d9c24218552ae2474139c4df992c4937f8ad8bb"} Jan 20 17:33:03 crc kubenswrapper[4558]: I0120 17:33:03.413721 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-7c4c8fc458-mv7sx" event={"ID":"3a9cf38d-4b8b-453b-bc39-b97deab68b72","Type":"ContainerStarted","Data":"1e5b7fdafd6b5dfe7da064308a77b7e92ccf6f49d26523f64fcc883be1040d57"} Jan 20 17:33:03 crc kubenswrapper[4558]: I0120 17:33:03.416782 4558 generic.go:334] "Generic (PLEG): container finished" podID="48ebccca-cc18-415f-b6f8-f301f99e5d3a" containerID="399e63aaadfe0fa5502fd43a62acd0f3a44179ff829093b2b62e55e74b7ceac6" exitCode=0 Jan 20 17:33:03 crc kubenswrapper[4558]: I0120 17:33:03.416928 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-5c6488b48b-4sj7k" event={"ID":"48ebccca-cc18-415f-b6f8-f301f99e5d3a","Type":"ContainerDied","Data":"399e63aaadfe0fa5502fd43a62acd0f3a44179ff829093b2b62e55e74b7ceac6"} Jan 20 17:33:03 crc kubenswrapper[4558]: I0120 17:33:03.417017 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-5c6488b48b-4sj7k" event={"ID":"48ebccca-cc18-415f-b6f8-f301f99e5d3a","Type":"ContainerDied","Data":"6e70719a68f130030bdf24d3504c54db722377f4ef2e6a6503821d48a7173a3d"} Jan 20 17:33:03 crc kubenswrapper[4558]: I0120 17:33:03.417122 4558 scope.go:117] "RemoveContainer" containerID="38c6eff5636dfec3724781cb0cc82717214fdcbd1a3eb92d714d5934aa1b6128" Jan 20 17:33:03 crc kubenswrapper[4558]: I0120 17:33:03.417361 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-5c6488b48b-4sj7k" Jan 20 17:33:03 crc kubenswrapper[4558]: I0120 17:33:03.430584 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-0" event={"ID":"6dbc6996-80bf-4f86-9acf-f74dd04e51b1","Type":"ContainerDied","Data":"970a93b4a91f5ca4973899ad99e90f77e072ebe56a15538b980197b22ac94391"} Jan 20 17:33:03 crc kubenswrapper[4558]: I0120 17:33:03.430673 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:33:03 crc kubenswrapper[4558]: I0120 17:33:03.440288 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-95f6c56c8-5t8cm" event={"ID":"e0b4cbdd-cd9f-4ff7-87c5-b24833dc6e0a","Type":"ContainerStarted","Data":"da662d5236ba854a24d04ef2fd7223581f34c137e93a7f81b34b1e201dbeda89"} Jan 20 17:33:03 crc kubenswrapper[4558]: I0120 17:33:03.472068 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-74c98446fb-d49s5" event={"ID":"e83a55be-4002-403a-911f-8f33c234b7e4","Type":"ContainerDied","Data":"c59e4b15090f012b517b5ebb751bc8a094a2cfae61addd70eed726c4b9071598"} Jan 20 17:33:03 crc kubenswrapper[4558]: I0120 17:33:03.472155 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-74c98446fb-d49s5" Jan 20 17:33:03 crc kubenswrapper[4558]: W0120 17:33:03.479846 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd78c49cb_72eb_4def_9c40_b16128c74418.slice/crio-ec4550a0ff6fadc406c9437c9c4710fec8bca31865e14b1400128f0f71c62981 WatchSource:0}: Error finding container ec4550a0ff6fadc406c9437c9c4710fec8bca31865e14b1400128f0f71c62981: Status 404 returned error can't find the container with id ec4550a0ff6fadc406c9437c9c4710fec8bca31865e14b1400128f0f71c62981 Jan 20 17:33:03 crc kubenswrapper[4558]: I0120 17:33:03.505708 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-keystone-listener-755c97c57d-cswnd" event={"ID":"09df5e63-9283-4cae-80fe-cfa88f5a9b72","Type":"ContainerStarted","Data":"7bd2233090ba7d102c3a1cba643b0d5335cea55cf7e97e49488020c2bb64eaa3"} Jan 20 17:33:03 crc kubenswrapper[4558]: I0120 17:33:03.509944 4558 scope.go:117] "RemoveContainer" containerID="399e63aaadfe0fa5502fd43a62acd0f3a44179ff829093b2b62e55e74b7ceac6" Jan 20 17:33:03 crc kubenswrapper[4558]: I0120 17:33:03.521572 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-b7d7d464-2s2ds" event={"ID":"f9691b53-0a90-45ee-bd69-c75b1195eeb8","Type":"ContainerStarted","Data":"93673d7da26917595853550ced8a076cdb7f6814e0bc857960d3ee07fd0ffe91"} Jan 20 17:33:03 crc kubenswrapper[4558]: I0120 17:33:03.525872 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-bootstrap-s78ts" event={"ID":"a03b6fb7-d712-46bd-a8ed-e47bc25ecc7c","Type":"ContainerStarted","Data":"caf3b86c536afa6f89b87145a2e56bad21ebf5126d817742dfc2556c1dbf4d10"} Jan 20 17:33:03 crc kubenswrapper[4558]: I0120 17:33:03.573608 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e83a55be-4002-403a-911f-8f33c234b7e4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e83a55be-4002-403a-911f-8f33c234b7e4" (UID: "e83a55be-4002-403a-911f-8f33c234b7e4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:33:03 crc kubenswrapper[4558]: I0120 17:33:03.582205 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"b115808a-3ae3-40c5-b330-f58aa6af7503","Type":"ContainerStarted","Data":"8f9b1bb1f9284398997331917a18ecd29e5f1f938a3f0cdf95397ef7ef755186"} Jan 20 17:33:03 crc kubenswrapper[4558]: I0120 17:33:03.582608 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="b115808a-3ae3-40c5-b330-f58aa6af7503" containerName="ceilometer-central-agent" containerID="cri-o://902c7c53a3328dda0625dd77131ac2bb26f4fce8e93e1be078eaef8a855526bf" gracePeriod=30 Jan 20 17:33:03 crc kubenswrapper[4558]: I0120 17:33:03.582893 4558 scope.go:117] "RemoveContainer" containerID="38c6eff5636dfec3724781cb0cc82717214fdcbd1a3eb92d714d5934aa1b6128" Jan 20 17:33:03 crc kubenswrapper[4558]: I0120 17:33:03.582934 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:33:03 crc kubenswrapper[4558]: I0120 17:33:03.583006 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="b115808a-3ae3-40c5-b330-f58aa6af7503" containerName="sg-core" containerID="cri-o://9521f46d4a99703b801e57ee0c4402fc90e695fd2198c5972592e15eac8d40d6" gracePeriod=30 Jan 20 17:33:03 crc kubenswrapper[4558]: I0120 17:33:03.583098 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="b115808a-3ae3-40c5-b330-f58aa6af7503" containerName="ceilometer-notification-agent" containerID="cri-o://0704db880c3d402aa539743da9a5f815da9504c0ee52f7941ada9ffd86b66fe0" gracePeriod=30 Jan 20 17:33:03 crc kubenswrapper[4558]: I0120 17:33:03.583149 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="b115808a-3ae3-40c5-b330-f58aa6af7503" containerName="proxy-httpd" containerID="cri-o://8f9b1bb1f9284398997331917a18ecd29e5f1f938a3f0cdf95397ef7ef755186" gracePeriod=30 Jan 20 17:33:03 crc kubenswrapper[4558]: E0120 17:33:03.589757 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"38c6eff5636dfec3724781cb0cc82717214fdcbd1a3eb92d714d5934aa1b6128\": container with ID starting with 38c6eff5636dfec3724781cb0cc82717214fdcbd1a3eb92d714d5934aa1b6128 not found: ID does not exist" containerID="38c6eff5636dfec3724781cb0cc82717214fdcbd1a3eb92d714d5934aa1b6128" Jan 20 17:33:03 crc kubenswrapper[4558]: I0120 17:33:03.590058 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"38c6eff5636dfec3724781cb0cc82717214fdcbd1a3eb92d714d5934aa1b6128"} err="failed to get container status \"38c6eff5636dfec3724781cb0cc82717214fdcbd1a3eb92d714d5934aa1b6128\": rpc error: code = NotFound desc = could not find container \"38c6eff5636dfec3724781cb0cc82717214fdcbd1a3eb92d714d5934aa1b6128\": container with ID starting with 38c6eff5636dfec3724781cb0cc82717214fdcbd1a3eb92d714d5934aa1b6128 not found: ID does not exist" Jan 20 17:33:03 crc kubenswrapper[4558]: I0120 17:33:03.590092 4558 scope.go:117] "RemoveContainer" containerID="399e63aaadfe0fa5502fd43a62acd0f3a44179ff829093b2b62e55e74b7ceac6" Jan 20 17:33:03 crc kubenswrapper[4558]: I0120 17:33:03.590342 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-7c4c8fc458-mv7sx"] Jan 20 17:33:03 crc kubenswrapper[4558]: E0120 17:33:03.595388 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"399e63aaadfe0fa5502fd43a62acd0f3a44179ff829093b2b62e55e74b7ceac6\": container with ID starting with 399e63aaadfe0fa5502fd43a62acd0f3a44179ff829093b2b62e55e74b7ceac6 not found: ID does not exist" containerID="399e63aaadfe0fa5502fd43a62acd0f3a44179ff829093b2b62e55e74b7ceac6" Jan 20 17:33:03 crc kubenswrapper[4558]: I0120 17:33:03.597583 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-worker-5b76bfdfd9-6pwrz" event={"ID":"377a0d4c-edf2-4912-a04f-0f814ea9efdb","Type":"ContainerStarted","Data":"414f0875a609073a32394286fcda0d02e11c9c0d582bf0952c06f509510acf10"} Jan 20 17:33:03 crc kubenswrapper[4558]: I0120 17:33:03.600663 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"399e63aaadfe0fa5502fd43a62acd0f3a44179ff829093b2b62e55e74b7ceac6"} err="failed to get container status \"399e63aaadfe0fa5502fd43a62acd0f3a44179ff829093b2b62e55e74b7ceac6\": rpc error: code = NotFound desc = could not find container \"399e63aaadfe0fa5502fd43a62acd0f3a44179ff829093b2b62e55e74b7ceac6\": container with ID starting with 399e63aaadfe0fa5502fd43a62acd0f3a44179ff829093b2b62e55e74b7ceac6 not found: ID does not exist" Jan 20 17:33:03 crc kubenswrapper[4558]: I0120 17:33:03.600708 4558 scope.go:117] "RemoveContainer" containerID="a45e6f8d25e2903ff94497e102fe9cb0b1841d235bfac8ac8e62657bad6c6b17" Jan 20 17:33:03 crc kubenswrapper[4558]: I0120 17:33:03.605473 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/neutron-9c84d7f96-k2vb8"] Jan 20 17:33:03 crc kubenswrapper[4558]: E0120 17:33:03.605883 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6dbc6996-80bf-4f86-9acf-f74dd04e51b1" containerName="nova-cell0-conductor-conductor" Jan 20 17:33:03 crc kubenswrapper[4558]: I0120 17:33:03.605902 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="6dbc6996-80bf-4f86-9acf-f74dd04e51b1" containerName="nova-cell0-conductor-conductor" Jan 20 17:33:03 crc kubenswrapper[4558]: E0120 17:33:03.605920 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e83a55be-4002-403a-911f-8f33c234b7e4" containerName="barbican-api-log" Jan 20 17:33:03 crc kubenswrapper[4558]: I0120 17:33:03.605926 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="e83a55be-4002-403a-911f-8f33c234b7e4" containerName="barbican-api-log" Jan 20 17:33:03 crc kubenswrapper[4558]: E0120 17:33:03.605944 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="48ebccca-cc18-415f-b6f8-f301f99e5d3a" containerName="neutron-api" Jan 20 17:33:03 crc kubenswrapper[4558]: I0120 17:33:03.605952 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="48ebccca-cc18-415f-b6f8-f301f99e5d3a" containerName="neutron-api" Jan 20 17:33:03 crc kubenswrapper[4558]: E0120 17:33:03.605967 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="48ebccca-cc18-415f-b6f8-f301f99e5d3a" containerName="neutron-httpd" Jan 20 17:33:03 crc kubenswrapper[4558]: I0120 17:33:03.605973 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="48ebccca-cc18-415f-b6f8-f301f99e5d3a" containerName="neutron-httpd" Jan 20 17:33:03 crc kubenswrapper[4558]: E0120 17:33:03.605983 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e83a55be-4002-403a-911f-8f33c234b7e4" containerName="barbican-api" Jan 20 17:33:03 crc kubenswrapper[4558]: I0120 17:33:03.605991 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="e83a55be-4002-403a-911f-8f33c234b7e4" containerName="barbican-api" Jan 20 17:33:03 crc kubenswrapper[4558]: I0120 17:33:03.606183 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="e83a55be-4002-403a-911f-8f33c234b7e4" containerName="barbican-api-log" Jan 20 17:33:03 crc kubenswrapper[4558]: I0120 17:33:03.606205 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="6dbc6996-80bf-4f86-9acf-f74dd04e51b1" containerName="nova-cell0-conductor-conductor" Jan 20 17:33:03 crc kubenswrapper[4558]: I0120 17:33:03.606216 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="e83a55be-4002-403a-911f-8f33c234b7e4" containerName="barbican-api" Jan 20 17:33:03 crc kubenswrapper[4558]: I0120 17:33:03.606227 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="48ebccca-cc18-415f-b6f8-f301f99e5d3a" containerName="neutron-httpd" Jan 20 17:33:03 crc kubenswrapper[4558]: I0120 17:33:03.606237 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="48ebccca-cc18-415f-b6f8-f301f99e5d3a" containerName="neutron-api" Jan 20 17:33:03 crc kubenswrapper[4558]: I0120 17:33:03.607186 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-9c84d7f96-k2vb8" Jan 20 17:33:03 crc kubenswrapper[4558]: I0120 17:33:03.621359 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-9c84d7f96-k2vb8"] Jan 20 17:33:03 crc kubenswrapper[4558]: I0120 17:33:03.625514 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e83a55be-4002-403a-911f-8f33c234b7e4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:03 crc kubenswrapper[4558]: I0120 17:33:03.629758 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ceilometer-0" podStartSLOduration=2.524871309 podStartE2EDuration="6.62973839s" podCreationTimestamp="2026-01-20 17:32:57 +0000 UTC" firstStartedPulling="2026-01-20 17:32:58.042490152 +0000 UTC m=+3071.802828120" lastFinishedPulling="2026-01-20 17:33:02.147357234 +0000 UTC m=+3075.907695201" observedRunningTime="2026-01-20 17:33:03.613451516 +0000 UTC m=+3077.373789503" watchObservedRunningTime="2026-01-20 17:33:03.62973839 +0000 UTC m=+3077.390076357" Jan 20 17:33:03 crc kubenswrapper[4558]: I0120 17:33:03.728088 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/25ac8e40-35ea-4d5b-8d8a-ee123c2e2fff-config\") pod \"neutron-9c84d7f96-k2vb8\" (UID: \"25ac8e40-35ea-4d5b-8d8a-ee123c2e2fff\") " pod="openstack-kuttl-tests/neutron-9c84d7f96-k2vb8" Jan 20 17:33:03 crc kubenswrapper[4558]: I0120 17:33:03.728246 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/25ac8e40-35ea-4d5b-8d8a-ee123c2e2fff-combined-ca-bundle\") pod \"neutron-9c84d7f96-k2vb8\" (UID: \"25ac8e40-35ea-4d5b-8d8a-ee123c2e2fff\") " pod="openstack-kuttl-tests/neutron-9c84d7f96-k2vb8" Jan 20 17:33:03 crc kubenswrapper[4558]: I0120 17:33:03.728278 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pfd22\" (UniqueName: \"kubernetes.io/projected/25ac8e40-35ea-4d5b-8d8a-ee123c2e2fff-kube-api-access-pfd22\") pod \"neutron-9c84d7f96-k2vb8\" (UID: \"25ac8e40-35ea-4d5b-8d8a-ee123c2e2fff\") " pod="openstack-kuttl-tests/neutron-9c84d7f96-k2vb8" Jan 20 17:33:03 crc kubenswrapper[4558]: I0120 17:33:03.728341 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/25ac8e40-35ea-4d5b-8d8a-ee123c2e2fff-httpd-config\") pod \"neutron-9c84d7f96-k2vb8\" (UID: \"25ac8e40-35ea-4d5b-8d8a-ee123c2e2fff\") " pod="openstack-kuttl-tests/neutron-9c84d7f96-k2vb8" Jan 20 17:33:03 crc kubenswrapper[4558]: I0120 17:33:03.772829 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:33:03 crc kubenswrapper[4558]: I0120 17:33:03.862739 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/25ac8e40-35ea-4d5b-8d8a-ee123c2e2fff-combined-ca-bundle\") pod \"neutron-9c84d7f96-k2vb8\" (UID: \"25ac8e40-35ea-4d5b-8d8a-ee123c2e2fff\") " pod="openstack-kuttl-tests/neutron-9c84d7f96-k2vb8" Jan 20 17:33:03 crc kubenswrapper[4558]: I0120 17:33:03.862829 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pfd22\" (UniqueName: \"kubernetes.io/projected/25ac8e40-35ea-4d5b-8d8a-ee123c2e2fff-kube-api-access-pfd22\") pod \"neutron-9c84d7f96-k2vb8\" (UID: \"25ac8e40-35ea-4d5b-8d8a-ee123c2e2fff\") " pod="openstack-kuttl-tests/neutron-9c84d7f96-k2vb8" Jan 20 17:33:03 crc kubenswrapper[4558]: I0120 17:33:03.862928 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/25ac8e40-35ea-4d5b-8d8a-ee123c2e2fff-httpd-config\") pod \"neutron-9c84d7f96-k2vb8\" (UID: \"25ac8e40-35ea-4d5b-8d8a-ee123c2e2fff\") " pod="openstack-kuttl-tests/neutron-9c84d7f96-k2vb8" Jan 20 17:33:03 crc kubenswrapper[4558]: I0120 17:33:03.863063 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/25ac8e40-35ea-4d5b-8d8a-ee123c2e2fff-config\") pod \"neutron-9c84d7f96-k2vb8\" (UID: \"25ac8e40-35ea-4d5b-8d8a-ee123c2e2fff\") " pod="openstack-kuttl-tests/neutron-9c84d7f96-k2vb8" Jan 20 17:33:03 crc kubenswrapper[4558]: I0120 17:33:03.878851 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/25ac8e40-35ea-4d5b-8d8a-ee123c2e2fff-httpd-config\") pod \"neutron-9c84d7f96-k2vb8\" (UID: \"25ac8e40-35ea-4d5b-8d8a-ee123c2e2fff\") " pod="openstack-kuttl-tests/neutron-9c84d7f96-k2vb8" Jan 20 17:33:03 crc kubenswrapper[4558]: I0120 17:33:03.879017 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/25ac8e40-35ea-4d5b-8d8a-ee123c2e2fff-config\") pod \"neutron-9c84d7f96-k2vb8\" (UID: \"25ac8e40-35ea-4d5b-8d8a-ee123c2e2fff\") " pod="openstack-kuttl-tests/neutron-9c84d7f96-k2vb8" Jan 20 17:33:03 crc kubenswrapper[4558]: I0120 17:33:03.879019 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/25ac8e40-35ea-4d5b-8d8a-ee123c2e2fff-combined-ca-bundle\") pod \"neutron-9c84d7f96-k2vb8\" (UID: \"25ac8e40-35ea-4d5b-8d8a-ee123c2e2fff\") " pod="openstack-kuttl-tests/neutron-9c84d7f96-k2vb8" Jan 20 17:33:03 crc kubenswrapper[4558]: I0120 17:33:03.886360 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pfd22\" (UniqueName: \"kubernetes.io/projected/25ac8e40-35ea-4d5b-8d8a-ee123c2e2fff-kube-api-access-pfd22\") pod \"neutron-9c84d7f96-k2vb8\" (UID: \"25ac8e40-35ea-4d5b-8d8a-ee123c2e2fff\") " pod="openstack-kuttl-tests/neutron-9c84d7f96-k2vb8" Jan 20 17:33:04 crc kubenswrapper[4558]: I0120 17:33:04.344338 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-metadata-0" podUID="8fb66f31-6ee9-4ad8-8bd2-14e489b98591" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"http://10.217.1.219:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 20 17:33:04 crc kubenswrapper[4558]: I0120 17:33:04.344497 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-metadata-0" podUID="8fb66f31-6ee9-4ad8-8bd2-14e489b98591" containerName="nova-metadata-log" probeResult="failure" output="Get \"http://10.217.1.219:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 20 17:33:04 crc kubenswrapper[4558]: I0120 17:33:04.374947 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/48ebccca-cc18-415f-b6f8-f301f99e5d3a-config" (OuterVolumeSpecName: "config") pod "48ebccca-cc18-415f-b6f8-f301f99e5d3a" (UID: "48ebccca-cc18-415f-b6f8-f301f99e5d3a"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:33:04 crc kubenswrapper[4558]: I0120 17:33:04.379279 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e83a55be-4002-403a-911f-8f33c234b7e4-config-data" (OuterVolumeSpecName: "config-data") pod "e83a55be-4002-403a-911f-8f33c234b7e4" (UID: "e83a55be-4002-403a-911f-8f33c234b7e4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:33:04 crc kubenswrapper[4558]: I0120 17:33:04.390400 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6dbc6996-80bf-4f86-9acf-f74dd04e51b1-config-data" (OuterVolumeSpecName: "config-data") pod "6dbc6996-80bf-4f86-9acf-f74dd04e51b1" (UID: "6dbc6996-80bf-4f86-9acf-f74dd04e51b1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:33:04 crc kubenswrapper[4558]: I0120 17:33:04.392250 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/48ebccca-cc18-415f-b6f8-f301f99e5d3a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "48ebccca-cc18-415f-b6f8-f301f99e5d3a" (UID: "48ebccca-cc18-415f-b6f8-f301f99e5d3a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:33:04 crc kubenswrapper[4558]: I0120 17:33:04.410298 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6dbc6996-80bf-4f86-9acf-f74dd04e51b1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6dbc6996-80bf-4f86-9acf-f74dd04e51b1" (UID: "6dbc6996-80bf-4f86-9acf-f74dd04e51b1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:33:04 crc kubenswrapper[4558]: I0120 17:33:04.477775 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6dbc6996-80bf-4f86-9acf-f74dd04e51b1-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:04 crc kubenswrapper[4558]: I0120 17:33:04.478247 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/48ebccca-cc18-415f-b6f8-f301f99e5d3a-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:04 crc kubenswrapper[4558]: I0120 17:33:04.478262 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6dbc6996-80bf-4f86-9acf-f74dd04e51b1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:04 crc kubenswrapper[4558]: I0120 17:33:04.478274 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/48ebccca-cc18-415f-b6f8-f301f99e5d3a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:04 crc kubenswrapper[4558]: I0120 17:33:04.478294 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e83a55be-4002-403a-911f-8f33c234b7e4-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:04 crc kubenswrapper[4558]: I0120 17:33:04.656071 4558 generic.go:334] "Generic (PLEG): container finished" podID="b115808a-3ae3-40c5-b330-f58aa6af7503" containerID="8f9b1bb1f9284398997331917a18ecd29e5f1f938a3f0cdf95397ef7ef755186" exitCode=0 Jan 20 17:33:04 crc kubenswrapper[4558]: I0120 17:33:04.656120 4558 generic.go:334] "Generic (PLEG): container finished" podID="b115808a-3ae3-40c5-b330-f58aa6af7503" containerID="9521f46d4a99703b801e57ee0c4402fc90e695fd2198c5972592e15eac8d40d6" exitCode=2 Jan 20 17:33:04 crc kubenswrapper[4558]: I0120 17:33:04.656129 4558 generic.go:334] "Generic (PLEG): container finished" podID="b115808a-3ae3-40c5-b330-f58aa6af7503" containerID="0704db880c3d402aa539743da9a5f815da9504c0ee52f7941ada9ffd86b66fe0" exitCode=0 Jan 20 17:33:04 crc kubenswrapper[4558]: I0120 17:33:04.656239 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"b115808a-3ae3-40c5-b330-f58aa6af7503","Type":"ContainerDied","Data":"8f9b1bb1f9284398997331917a18ecd29e5f1f938a3f0cdf95397ef7ef755186"} Jan 20 17:33:04 crc kubenswrapper[4558]: I0120 17:33:04.656279 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"b115808a-3ae3-40c5-b330-f58aa6af7503","Type":"ContainerDied","Data":"9521f46d4a99703b801e57ee0c4402fc90e695fd2198c5972592e15eac8d40d6"} Jan 20 17:33:04 crc kubenswrapper[4558]: I0120 17:33:04.656291 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"b115808a-3ae3-40c5-b330-f58aa6af7503","Type":"ContainerDied","Data":"0704db880c3d402aa539743da9a5f815da9504c0ee52f7941ada9ffd86b66fe0"} Jan 20 17:33:04 crc kubenswrapper[4558]: I0120 17:33:04.662331 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"d78c49cb-72eb-4def-9c40-b16128c74418","Type":"ContainerStarted","Data":"ec4550a0ff6fadc406c9437c9c4710fec8bca31865e14b1400128f0f71c62981"} Jan 20 17:33:04 crc kubenswrapper[4558]: I0120 17:33:04.665984 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"21c81b0a-c70b-4d57-bea0-96e4840af7dd","Type":"ContainerStarted","Data":"8da018bd9bfe4c0ed93d768061a960a8f05d5986aebfa42f877a7b9fc4eaca37"} Jan 20 17:33:04 crc kubenswrapper[4558]: I0120 17:33:04.673051 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-keystone-listener-755c97c57d-cswnd" event={"ID":"09df5e63-9283-4cae-80fe-cfa88f5a9b72","Type":"ContainerStarted","Data":"de6accc2640fd6714fc093d22da413b576997cc0e51ebcd994892e58839c6118"} Jan 20 17:33:04 crc kubenswrapper[4558]: I0120 17:33:04.696254 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-b7d7d464-2s2ds" event={"ID":"f9691b53-0a90-45ee-bd69-c75b1195eeb8","Type":"ContainerStarted","Data":"ea4aa09b4d785b96f464c4b23e3df19dec0b597d02c3e90b688f8fa7421d46c0"} Jan 20 17:33:04 crc kubenswrapper[4558]: I0120 17:33:04.700005 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/barbican-keystone-listener-755c97c57d-cswnd" podStartSLOduration=4.699992527 podStartE2EDuration="4.699992527s" podCreationTimestamp="2026-01-20 17:33:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:33:04.697135665 +0000 UTC m=+3078.457473623" watchObservedRunningTime="2026-01-20 17:33:04.699992527 +0000 UTC m=+3078.460330494" Jan 20 17:33:04 crc kubenswrapper[4558]: I0120 17:33:04.702582 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-bootstrap-s78ts" event={"ID":"a03b6fb7-d712-46bd-a8ed-e47bc25ecc7c","Type":"ContainerStarted","Data":"fcf35382d515efc646c6a18f5a871bb7d56050c4d19ea44e31380d2ddf0a363c"} Jan 20 17:33:04 crc kubenswrapper[4558]: I0120 17:33:04.716874 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovsdbserver-nb-0_581cf384-3caa-4074-a6aa-526e8a65e9b2/ovsdbserver-nb/0.log" Jan 20 17:33:04 crc kubenswrapper[4558]: I0120 17:33:04.716944 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-nb-0" event={"ID":"581cf384-3caa-4074-a6aa-526e8a65e9b2","Type":"ContainerDied","Data":"090f42e80e9e937d110de0492f6d7685db4909c82b2ccfaeb8fc3770dbee0d55"} Jan 20 17:33:04 crc kubenswrapper[4558]: I0120 17:33:04.716963 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="090f42e80e9e937d110de0492f6d7685db4909c82b2ccfaeb8fc3770dbee0d55" Jan 20 17:33:04 crc kubenswrapper[4558]: I0120 17:33:04.756477 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-worker-5b76bfdfd9-6pwrz" event={"ID":"377a0d4c-edf2-4912-a04f-0f814ea9efdb","Type":"ContainerStarted","Data":"c2b0747f8229855e5230624f8d196c8c02e4e23423e1c651c0079dce53d51d01"} Jan 20 17:33:04 crc kubenswrapper[4558]: I0120 17:33:04.760951 4558 scope.go:117] "RemoveContainer" containerID="9d39db634c3fa17e206e8d37ce291551fd4325bd609316a24c87cd44c0b5f95d" Jan 20 17:33:04 crc kubenswrapper[4558]: I0120 17:33:04.767659 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-95f6c56c8-5t8cm" event={"ID":"e0b4cbdd-cd9f-4ff7-87c5-b24833dc6e0a","Type":"ContainerStarted","Data":"a23e411ace410c8611d6138e0db0566765ec515acb969fbae9e8e3de65c92841"} Jan 20 17:33:04 crc kubenswrapper[4558]: I0120 17:33:04.771488 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-7c4c8fc458-mv7sx" event={"ID":"3a9cf38d-4b8b-453b-bc39-b97deab68b72","Type":"ContainerStarted","Data":"76e12ebe02e4aa0edbf97593983bce1f44046a97fbfe38a1b74fd48dd726a1c5"} Jan 20 17:33:04 crc kubenswrapper[4558]: I0120 17:33:04.778215 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-keystone-listener-5459fb5f8-h5bbp"] Jan 20 17:33:04 crc kubenswrapper[4558]: I0120 17:33:04.778615 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-keystone-listener-5459fb5f8-h5bbp" podUID="d6b6da7f-6f9b-415b-ac10-b3bd8655bb1a" containerName="barbican-keystone-listener-log" containerID="cri-o://f06e9dc08abc874c209a5383b2994f653202e9f66eb7464ef36f77ca919a05a8" gracePeriod=30 Jan 20 17:33:04 crc kubenswrapper[4558]: I0120 17:33:04.778936 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-keystone-listener-5459fb5f8-h5bbp" podUID="d6b6da7f-6f9b-415b-ac10-b3bd8655bb1a" containerName="barbican-keystone-listener" containerID="cri-o://616dfc84f48150feccd210dc50efb30772487af019214a6546143c12c3014223" gracePeriod=30 Jan 20 17:33:04 crc kubenswrapper[4558]: I0120 17:33:04.793204 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/keystone-bootstrap-s78ts" podStartSLOduration=4.793187924 podStartE2EDuration="4.793187924s" podCreationTimestamp="2026-01-20 17:33:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:33:04.741702686 +0000 UTC m=+3078.502040653" watchObservedRunningTime="2026-01-20 17:33:04.793187924 +0000 UTC m=+3078.553525891" Jan 20 17:33:04 crc kubenswrapper[4558]: I0120 17:33:04.852529 4558 scope.go:117] "RemoveContainer" containerID="33cd821346a573aa7c60d888955a767917f3516b07634b6ccea35a0b1b0543af" Jan 20 17:33:04 crc kubenswrapper[4558]: I0120 17:33:04.856459 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovsdbserver-nb-0_581cf384-3caa-4074-a6aa-526e8a65e9b2/ovsdbserver-nb/0.log" Jan 20 17:33:04 crc kubenswrapper[4558]: I0120 17:33:04.856619 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:33:04 crc kubenswrapper[4558]: I0120 17:33:04.902980 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-9c84d7f96-k2vb8" Jan 20 17:33:04 crc kubenswrapper[4558]: I0120 17:33:04.940230 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-5c6488b48b-4sj7k"] Jan 20 17:33:04 crc kubenswrapper[4558]: I0120 17:33:04.948088 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/neutron-5c6488b48b-4sj7k"] Jan 20 17:33:04 crc kubenswrapper[4558]: I0120 17:33:04.955549 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-0"] Jan 20 17:33:04 crc kubenswrapper[4558]: I0120 17:33:04.963605 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-0"] Jan 20 17:33:04 crc kubenswrapper[4558]: I0120 17:33:04.980638 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-api-74c98446fb-d49s5"] Jan 20 17:33:04 crc kubenswrapper[4558]: I0120 17:33:04.987395 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/barbican-api-74c98446fb-d49s5"] Jan 20 17:33:04 crc kubenswrapper[4558]: I0120 17:33:04.999833 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndbcluster-nb-etc-ovn\" (UniqueName: \"kubernetes.io/local-volume/local-storage13-crc\") pod \"581cf384-3caa-4074-a6aa-526e8a65e9b2\" (UID: \"581cf384-3caa-4074-a6aa-526e8a65e9b2\") " Jan 20 17:33:04 crc kubenswrapper[4558]: I0120 17:33:04.999924 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bwt2l\" (UniqueName: \"kubernetes.io/projected/581cf384-3caa-4074-a6aa-526e8a65e9b2-kube-api-access-bwt2l\") pod \"581cf384-3caa-4074-a6aa-526e8a65e9b2\" (UID: \"581cf384-3caa-4074-a6aa-526e8a65e9b2\") " Jan 20 17:33:05 crc kubenswrapper[4558]: I0120 17:33:04.999965 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/581cf384-3caa-4074-a6aa-526e8a65e9b2-scripts\") pod \"581cf384-3caa-4074-a6aa-526e8a65e9b2\" (UID: \"581cf384-3caa-4074-a6aa-526e8a65e9b2\") " Jan 20 17:33:05 crc kubenswrapper[4558]: I0120 17:33:05.000069 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/581cf384-3caa-4074-a6aa-526e8a65e9b2-combined-ca-bundle\") pod \"581cf384-3caa-4074-a6aa-526e8a65e9b2\" (UID: \"581cf384-3caa-4074-a6aa-526e8a65e9b2\") " Jan 20 17:33:05 crc kubenswrapper[4558]: I0120 17:33:05.000116 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/581cf384-3caa-4074-a6aa-526e8a65e9b2-config\") pod \"581cf384-3caa-4074-a6aa-526e8a65e9b2\" (UID: \"581cf384-3caa-4074-a6aa-526e8a65e9b2\") " Jan 20 17:33:05 crc kubenswrapper[4558]: I0120 17:33:05.000262 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/581cf384-3caa-4074-a6aa-526e8a65e9b2-ovsdb-rundir\") pod \"581cf384-3caa-4074-a6aa-526e8a65e9b2\" (UID: \"581cf384-3caa-4074-a6aa-526e8a65e9b2\") " Jan 20 17:33:05 crc kubenswrapper[4558]: I0120 17:33:05.003590 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/581cf384-3caa-4074-a6aa-526e8a65e9b2-config" (OuterVolumeSpecName: "config") pod "581cf384-3caa-4074-a6aa-526e8a65e9b2" (UID: "581cf384-3caa-4074-a6aa-526e8a65e9b2"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:33:05 crc kubenswrapper[4558]: I0120 17:33:05.004244 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-0"] Jan 20 17:33:05 crc kubenswrapper[4558]: E0120 17:33:05.004731 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="581cf384-3caa-4074-a6aa-526e8a65e9b2" containerName="openstack-network-exporter" Jan 20 17:33:05 crc kubenswrapper[4558]: I0120 17:33:05.004750 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="581cf384-3caa-4074-a6aa-526e8a65e9b2" containerName="openstack-network-exporter" Jan 20 17:33:05 crc kubenswrapper[4558]: E0120 17:33:05.004796 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="581cf384-3caa-4074-a6aa-526e8a65e9b2" containerName="ovsdbserver-nb" Jan 20 17:33:05 crc kubenswrapper[4558]: I0120 17:33:05.004809 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="581cf384-3caa-4074-a6aa-526e8a65e9b2" containerName="ovsdbserver-nb" Jan 20 17:33:05 crc kubenswrapper[4558]: I0120 17:33:05.004990 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="581cf384-3caa-4074-a6aa-526e8a65e9b2" containerName="ovsdbserver-nb" Jan 20 17:33:05 crc kubenswrapper[4558]: I0120 17:33:05.005020 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="581cf384-3caa-4074-a6aa-526e8a65e9b2" containerName="openstack-network-exporter" Jan 20 17:33:05 crc kubenswrapper[4558]: I0120 17:33:05.004733 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/581cf384-3caa-4074-a6aa-526e8a65e9b2-ovsdb-rundir" (OuterVolumeSpecName: "ovsdb-rundir") pod "581cf384-3caa-4074-a6aa-526e8a65e9b2" (UID: "581cf384-3caa-4074-a6aa-526e8a65e9b2"). InnerVolumeSpecName "ovsdb-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:33:05 crc kubenswrapper[4558]: I0120 17:33:05.005770 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:33:05 crc kubenswrapper[4558]: I0120 17:33:05.005831 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/581cf384-3caa-4074-a6aa-526e8a65e9b2-scripts" (OuterVolumeSpecName: "scripts") pod "581cf384-3caa-4074-a6aa-526e8a65e9b2" (UID: "581cf384-3caa-4074-a6aa-526e8a65e9b2"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:33:05 crc kubenswrapper[4558]: I0120 17:33:05.007989 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-0"] Jan 20 17:33:05 crc kubenswrapper[4558]: I0120 17:33:05.008450 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell0-conductor-config-data" Jan 20 17:33:05 crc kubenswrapper[4558]: I0120 17:33:05.019457 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage13-crc" (OuterVolumeSpecName: "ovndbcluster-nb-etc-ovn") pod "581cf384-3caa-4074-a6aa-526e8a65e9b2" (UID: "581cf384-3caa-4074-a6aa-526e8a65e9b2"). InnerVolumeSpecName "local-storage13-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:33:05 crc kubenswrapper[4558]: I0120 17:33:05.030029 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/581cf384-3caa-4074-a6aa-526e8a65e9b2-kube-api-access-bwt2l" (OuterVolumeSpecName: "kube-api-access-bwt2l") pod "581cf384-3caa-4074-a6aa-526e8a65e9b2" (UID: "581cf384-3caa-4074-a6aa-526e8a65e9b2"). InnerVolumeSpecName "kube-api-access-bwt2l". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:33:05 crc kubenswrapper[4558]: I0120 17:33:05.103304 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/37fd3160-ee6a-41a1-9c4b-260e72133f59-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"37fd3160-ee6a-41a1-9c4b-260e72133f59\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:33:05 crc kubenswrapper[4558]: I0120 17:33:05.103917 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fc6ph\" (UniqueName: \"kubernetes.io/projected/37fd3160-ee6a-41a1-9c4b-260e72133f59-kube-api-access-fc6ph\") pod \"nova-cell0-conductor-0\" (UID: \"37fd3160-ee6a-41a1-9c4b-260e72133f59\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:33:05 crc kubenswrapper[4558]: I0120 17:33:05.103958 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/37fd3160-ee6a-41a1-9c4b-260e72133f59-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"37fd3160-ee6a-41a1-9c4b-260e72133f59\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:33:05 crc kubenswrapper[4558]: I0120 17:33:05.104100 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/581cf384-3caa-4074-a6aa-526e8a65e9b2-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:05 crc kubenswrapper[4558]: I0120 17:33:05.104118 4558 reconciler_common.go:293] "Volume detached for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/581cf384-3caa-4074-a6aa-526e8a65e9b2-ovsdb-rundir\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:05 crc kubenswrapper[4558]: I0120 17:33:05.104153 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage13-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage13-crc\") on node \"crc\" " Jan 20 17:33:05 crc kubenswrapper[4558]: I0120 17:33:05.104178 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bwt2l\" (UniqueName: \"kubernetes.io/projected/581cf384-3caa-4074-a6aa-526e8a65e9b2-kube-api-access-bwt2l\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:05 crc kubenswrapper[4558]: I0120 17:33:05.104188 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/581cf384-3caa-4074-a6aa-526e8a65e9b2-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:05 crc kubenswrapper[4558]: I0120 17:33:05.208632 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fc6ph\" (UniqueName: \"kubernetes.io/projected/37fd3160-ee6a-41a1-9c4b-260e72133f59-kube-api-access-fc6ph\") pod \"nova-cell0-conductor-0\" (UID: \"37fd3160-ee6a-41a1-9c4b-260e72133f59\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:33:05 crc kubenswrapper[4558]: I0120 17:33:05.208676 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/37fd3160-ee6a-41a1-9c4b-260e72133f59-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"37fd3160-ee6a-41a1-9c4b-260e72133f59\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:33:05 crc kubenswrapper[4558]: I0120 17:33:05.208801 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/37fd3160-ee6a-41a1-9c4b-260e72133f59-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"37fd3160-ee6a-41a1-9c4b-260e72133f59\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:33:05 crc kubenswrapper[4558]: I0120 17:33:05.222785 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fc6ph\" (UniqueName: \"kubernetes.io/projected/37fd3160-ee6a-41a1-9c4b-260e72133f59-kube-api-access-fc6ph\") pod \"nova-cell0-conductor-0\" (UID: \"37fd3160-ee6a-41a1-9c4b-260e72133f59\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:33:05 crc kubenswrapper[4558]: I0120 17:33:05.246883 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/37fd3160-ee6a-41a1-9c4b-260e72133f59-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"37fd3160-ee6a-41a1-9c4b-260e72133f59\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:33:05 crc kubenswrapper[4558]: I0120 17:33:05.248768 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/37fd3160-ee6a-41a1-9c4b-260e72133f59-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"37fd3160-ee6a-41a1-9c4b-260e72133f59\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:33:05 crc kubenswrapper[4558]: I0120 17:33:05.298930 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-9c84d7f96-k2vb8"] Jan 20 17:33:05 crc kubenswrapper[4558]: I0120 17:33:05.333458 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:33:05 crc kubenswrapper[4558]: I0120 17:33:05.337378 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage13-crc" (UniqueName: "kubernetes.io/local-volume/local-storage13-crc") on node "crc" Jan 20 17:33:05 crc kubenswrapper[4558]: I0120 17:33:05.417666 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage13-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage13-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:05 crc kubenswrapper[4558]: I0120 17:33:05.439543 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/581cf384-3caa-4074-a6aa-526e8a65e9b2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "581cf384-3caa-4074-a6aa-526e8a65e9b2" (UID: "581cf384-3caa-4074-a6aa-526e8a65e9b2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:33:05 crc kubenswrapper[4558]: I0120 17:33:05.524177 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/581cf384-3caa-4074-a6aa-526e8a65e9b2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:05 crc kubenswrapper[4558]: I0120 17:33:05.851783 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-9c84d7f96-k2vb8" event={"ID":"25ac8e40-35ea-4d5b-8d8a-ee123c2e2fff","Type":"ContainerStarted","Data":"dab990962bd423061467701f509eb86caef0f4aae069688ebb9c4fa1d2c455d5"} Jan 20 17:33:05 crc kubenswrapper[4558]: I0120 17:33:05.852047 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-9c84d7f96-k2vb8" event={"ID":"25ac8e40-35ea-4d5b-8d8a-ee123c2e2fff","Type":"ContainerStarted","Data":"03663a328f32678b585838e982e6923562fdcb85fff77f09aaaf3c9fd37d3f88"} Jan 20 17:33:05 crc kubenswrapper[4558]: I0120 17:33:05.853324 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"d78c49cb-72eb-4def-9c40-b16128c74418","Type":"ContainerStarted","Data":"c9fcf8291fd575a7777eae1e5a6b38407c0f8527d5473b4afc554f44d2c8098e"} Jan 20 17:33:05 crc kubenswrapper[4558]: I0120 17:33:05.859030 4558 generic.go:334] "Generic (PLEG): container finished" podID="d6b6da7f-6f9b-415b-ac10-b3bd8655bb1a" containerID="f06e9dc08abc874c209a5383b2994f653202e9f66eb7464ef36f77ca919a05a8" exitCode=143 Jan 20 17:33:05 crc kubenswrapper[4558]: I0120 17:33:05.859074 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-keystone-listener-5459fb5f8-h5bbp" event={"ID":"d6b6da7f-6f9b-415b-ac10-b3bd8655bb1a","Type":"ContainerDied","Data":"f06e9dc08abc874c209a5383b2994f653202e9f66eb7464ef36f77ca919a05a8"} Jan 20 17:33:05 crc kubenswrapper[4558]: I0120 17:33:05.892034 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"21c81b0a-c70b-4d57-bea0-96e4840af7dd","Type":"ContainerStarted","Data":"21722b5c1856dd9260830023e156411f09aabc0fe431ae02c3192f233463c11f"} Jan 20 17:33:05 crc kubenswrapper[4558]: I0120 17:33:05.897070 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-worker-5b76bfdfd9-6pwrz" event={"ID":"377a0d4c-edf2-4912-a04f-0f814ea9efdb","Type":"ContainerStarted","Data":"8508c73a6d33c4073f424676d96dd78cecddaccb095da8154e3ebbb8171a2314"} Jan 20 17:33:05 crc kubenswrapper[4558]: I0120 17:33:05.920893 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/barbican-worker-5b76bfdfd9-6pwrz" podStartSLOduration=5.920874432 podStartE2EDuration="5.920874432s" podCreationTimestamp="2026-01-20 17:33:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:33:05.91276791 +0000 UTC m=+3079.673105877" watchObservedRunningTime="2026-01-20 17:33:05.920874432 +0000 UTC m=+3079.681212400" Jan 20 17:33:05 crc kubenswrapper[4558]: I0120 17:33:05.923688 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-7c4c8fc458-mv7sx" event={"ID":"3a9cf38d-4b8b-453b-bc39-b97deab68b72","Type":"ContainerStarted","Data":"6ed4c26e0b379eaa1189608d6dbca318d8f1ceda6ae39f733e1b04e40c4cbb57"} Jan 20 17:33:05 crc kubenswrapper[4558]: I0120 17:33:05.923868 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/neutron-7c4c8fc458-mv7sx" podUID="3a9cf38d-4b8b-453b-bc39-b97deab68b72" containerName="neutron-api" containerID="cri-o://76e12ebe02e4aa0edbf97593983bce1f44046a97fbfe38a1b74fd48dd726a1c5" gracePeriod=30 Jan 20 17:33:05 crc kubenswrapper[4558]: I0120 17:33:05.924133 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/neutron-7c4c8fc458-mv7sx" Jan 20 17:33:05 crc kubenswrapper[4558]: I0120 17:33:05.924457 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/neutron-7c4c8fc458-mv7sx" podUID="3a9cf38d-4b8b-453b-bc39-b97deab68b72" containerName="neutron-httpd" containerID="cri-o://6ed4c26e0b379eaa1189608d6dbca318d8f1ceda6ae39f733e1b04e40c4cbb57" gracePeriod=30 Jan 20 17:33:05 crc kubenswrapper[4558]: I0120 17:33:05.938717 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-95f6c56c8-5t8cm" event={"ID":"e0b4cbdd-cd9f-4ff7-87c5-b24833dc6e0a","Type":"ContainerStarted","Data":"498b22181051bf7df1d792470910cf1984b67820328f2a3e03b54681be9c8fb8"} Jan 20 17:33:05 crc kubenswrapper[4558]: I0120 17:33:05.938768 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/placement-95f6c56c8-5t8cm" Jan 20 17:33:05 crc kubenswrapper[4558]: I0120 17:33:05.939758 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/placement-95f6c56c8-5t8cm" Jan 20 17:33:05 crc kubenswrapper[4558]: I0120 17:33:05.954112 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:33:05 crc kubenswrapper[4558]: I0120 17:33:05.957704 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-b7d7d464-2s2ds" event={"ID":"f9691b53-0a90-45ee-bd69-c75b1195eeb8","Type":"ContainerStarted","Data":"355dd454cf13c43abb76b22e552d53dadf06d22d2c8ec34e79c955905ece6c1f"} Jan 20 17:33:05 crc kubenswrapper[4558]: I0120 17:33:05.957894 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/barbican-api-b7d7d464-2s2ds" Jan 20 17:33:05 crc kubenswrapper[4558]: I0120 17:33:05.957984 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/barbican-api-b7d7d464-2s2ds" Jan 20 17:33:05 crc kubenswrapper[4558]: I0120 17:33:05.959282 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-worker-777f6887b5-wwl56"] Jan 20 17:33:05 crc kubenswrapper[4558]: I0120 17:33:05.959461 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-worker-777f6887b5-wwl56" podUID="e0294130-264e-4fa0-b336-72c350ea61cc" containerName="barbican-worker-log" containerID="cri-o://62f48a277d2d3f69fc053a84173286db65e766dd294fabf68464419b15e8611b" gracePeriod=30 Jan 20 17:33:05 crc kubenswrapper[4558]: I0120 17:33:05.959485 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-worker-777f6887b5-wwl56" podUID="e0294130-264e-4fa0-b336-72c350ea61cc" containerName="barbican-worker" containerID="cri-o://c084c5e8cdb480d00ce491d7dbdb0d773ed2ee7cdfeacd273ca45bb3c51772de" gracePeriod=30 Jan 20 17:33:06 crc kubenswrapper[4558]: I0120 17:33:05.991629 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/neutron-7c4c8fc458-mv7sx" podStartSLOduration=4.991607653 podStartE2EDuration="4.991607653s" podCreationTimestamp="2026-01-20 17:33:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:33:05.952743116 +0000 UTC m=+3079.713081083" watchObservedRunningTime="2026-01-20 17:33:05.991607653 +0000 UTC m=+3079.751945621" Jan 20 17:33:06 crc kubenswrapper[4558]: I0120 17:33:06.006100 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/placement-95f6c56c8-5t8cm" podStartSLOduration=5.006067092 podStartE2EDuration="5.006067092s" podCreationTimestamp="2026-01-20 17:33:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:33:05.984656793 +0000 UTC m=+3079.744994780" watchObservedRunningTime="2026-01-20 17:33:06.006067092 +0000 UTC m=+3079.766405079" Jan 20 17:33:06 crc kubenswrapper[4558]: I0120 17:33:06.027548 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-0"] Jan 20 17:33:06 crc kubenswrapper[4558]: I0120 17:33:06.036463 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/barbican-api-b7d7d464-2s2ds" podStartSLOduration=5.036440734 podStartE2EDuration="5.036440734s" podCreationTimestamp="2026-01-20 17:33:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:33:06.009784792 +0000 UTC m=+3079.770122759" watchObservedRunningTime="2026-01-20 17:33:06.036440734 +0000 UTC m=+3079.796778701" Jan 20 17:33:06 crc kubenswrapper[4558]: I0120 17:33:06.153855 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-nb-0"] Jan 20 17:33:06 crc kubenswrapper[4558]: E0120 17:33:06.168599 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="46edde77e0df814c67d5b21fe72058f3eb4efd6acd4a7f68a737219cd8448b40" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:33:06 crc kubenswrapper[4558]: I0120 17:33:06.174060 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-nb-0"] Jan 20 17:33:06 crc kubenswrapper[4558]: E0120 17:33:06.181273 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="46edde77e0df814c67d5b21fe72058f3eb4efd6acd4a7f68a737219cd8448b40" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:33:06 crc kubenswrapper[4558]: I0120 17:33:06.187350 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ovsdbserver-nb-0"] Jan 20 17:33:06 crc kubenswrapper[4558]: I0120 17:33:06.191250 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:33:06 crc kubenswrapper[4558]: I0120 17:33:06.196709 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-ovndbcluster-nb-ovndbs" Jan 20 17:33:06 crc kubenswrapper[4558]: I0120 17:33:06.196908 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ovncluster-ovndbcluster-nb-dockercfg-djpzb" Jan 20 17:33:06 crc kubenswrapper[4558]: I0120 17:33:06.197033 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"ovndbcluster-nb-scripts" Jan 20 17:33:06 crc kubenswrapper[4558]: E0120 17:33:06.197114 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="46edde77e0df814c67d5b21fe72058f3eb4efd6acd4a7f68a737219cd8448b40" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:33:06 crc kubenswrapper[4558]: I0120 17:33:06.197180 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-ovn-metrics" Jan 20 17:33:06 crc kubenswrapper[4558]: E0120 17:33:06.197192 4558 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack-kuttl-tests/nova-cell1-conductor-0" podUID="c686e4c2-68d0-4999-9078-90c70927d9ae" containerName="nova-cell1-conductor-conductor" Jan 20 17:33:06 crc kubenswrapper[4558]: I0120 17:33:06.197193 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-nb-0"] Jan 20 17:33:06 crc kubenswrapper[4558]: I0120 17:33:06.197494 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"ovndbcluster-nb-config" Jan 20 17:33:06 crc kubenswrapper[4558]: I0120 17:33:06.349199 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dcbebc6e-c10d-4a59-9bdb-cdf2678250f8-config\") pod \"ovsdbserver-nb-0\" (UID: \"dcbebc6e-c10d-4a59-9bdb-cdf2678250f8\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:33:06 crc kubenswrapper[4558]: I0120 17:33:06.349352 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dcbebc6e-c10d-4a59-9bdb-cdf2678250f8-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"dcbebc6e-c10d-4a59-9bdb-cdf2678250f8\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:33:06 crc kubenswrapper[4558]: I0120 17:33:06.349398 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6mbd9\" (UniqueName: \"kubernetes.io/projected/dcbebc6e-c10d-4a59-9bdb-cdf2678250f8-kube-api-access-6mbd9\") pod \"ovsdbserver-nb-0\" (UID: \"dcbebc6e-c10d-4a59-9bdb-cdf2678250f8\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:33:06 crc kubenswrapper[4558]: I0120 17:33:06.349442 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/dcbebc6e-c10d-4a59-9bdb-cdf2678250f8-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"dcbebc6e-c10d-4a59-9bdb-cdf2678250f8\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:33:06 crc kubenswrapper[4558]: I0120 17:33:06.349460 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/dcbebc6e-c10d-4a59-9bdb-cdf2678250f8-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"dcbebc6e-c10d-4a59-9bdb-cdf2678250f8\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:33:06 crc kubenswrapper[4558]: I0120 17:33:06.349496 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/dcbebc6e-c10d-4a59-9bdb-cdf2678250f8-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"dcbebc6e-c10d-4a59-9bdb-cdf2678250f8\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:33:06 crc kubenswrapper[4558]: I0120 17:33:06.349601 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage13-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage13-crc\") pod \"ovsdbserver-nb-0\" (UID: \"dcbebc6e-c10d-4a59-9bdb-cdf2678250f8\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:33:06 crc kubenswrapper[4558]: I0120 17:33:06.349679 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/dcbebc6e-c10d-4a59-9bdb-cdf2678250f8-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"dcbebc6e-c10d-4a59-9bdb-cdf2678250f8\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:33:06 crc kubenswrapper[4558]: I0120 17:33:06.378202 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-sb-0"] Jan 20 17:33:06 crc kubenswrapper[4558]: I0120 17:33:06.378821 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ovsdbserver-sb-0" podUID="31c9af7b-b151-4eaa-b090-efecddd08c46" containerName="openstack-network-exporter" containerID="cri-o://309c8fa54d3864dc5b564b635ee338b83b4c559bc90397337a276c3ed181323e" gracePeriod=300 Jan 20 17:33:06 crc kubenswrapper[4558]: I0120 17:33:06.451793 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dcbebc6e-c10d-4a59-9bdb-cdf2678250f8-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"dcbebc6e-c10d-4a59-9bdb-cdf2678250f8\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:33:06 crc kubenswrapper[4558]: I0120 17:33:06.451858 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6mbd9\" (UniqueName: \"kubernetes.io/projected/dcbebc6e-c10d-4a59-9bdb-cdf2678250f8-kube-api-access-6mbd9\") pod \"ovsdbserver-nb-0\" (UID: \"dcbebc6e-c10d-4a59-9bdb-cdf2678250f8\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:33:06 crc kubenswrapper[4558]: I0120 17:33:06.451902 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/dcbebc6e-c10d-4a59-9bdb-cdf2678250f8-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"dcbebc6e-c10d-4a59-9bdb-cdf2678250f8\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:33:06 crc kubenswrapper[4558]: I0120 17:33:06.451919 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/dcbebc6e-c10d-4a59-9bdb-cdf2678250f8-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"dcbebc6e-c10d-4a59-9bdb-cdf2678250f8\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:33:06 crc kubenswrapper[4558]: I0120 17:33:06.451949 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/dcbebc6e-c10d-4a59-9bdb-cdf2678250f8-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"dcbebc6e-c10d-4a59-9bdb-cdf2678250f8\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:33:06 crc kubenswrapper[4558]: I0120 17:33:06.452045 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage13-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage13-crc\") pod \"ovsdbserver-nb-0\" (UID: \"dcbebc6e-c10d-4a59-9bdb-cdf2678250f8\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:33:06 crc kubenswrapper[4558]: I0120 17:33:06.452110 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/dcbebc6e-c10d-4a59-9bdb-cdf2678250f8-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"dcbebc6e-c10d-4a59-9bdb-cdf2678250f8\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:33:06 crc kubenswrapper[4558]: I0120 17:33:06.452212 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dcbebc6e-c10d-4a59-9bdb-cdf2678250f8-config\") pod \"ovsdbserver-nb-0\" (UID: \"dcbebc6e-c10d-4a59-9bdb-cdf2678250f8\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:33:06 crc kubenswrapper[4558]: I0120 17:33:06.454072 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage13-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage13-crc\") pod \"ovsdbserver-nb-0\" (UID: \"dcbebc6e-c10d-4a59-9bdb-cdf2678250f8\") device mount path \"/mnt/openstack/pv13\"" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:33:06 crc kubenswrapper[4558]: I0120 17:33:06.455933 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/dcbebc6e-c10d-4a59-9bdb-cdf2678250f8-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"dcbebc6e-c10d-4a59-9bdb-cdf2678250f8\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:33:06 crc kubenswrapper[4558]: I0120 17:33:06.456577 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dcbebc6e-c10d-4a59-9bdb-cdf2678250f8-config\") pod \"ovsdbserver-nb-0\" (UID: \"dcbebc6e-c10d-4a59-9bdb-cdf2678250f8\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:33:06 crc kubenswrapper[4558]: I0120 17:33:06.456861 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/dcbebc6e-c10d-4a59-9bdb-cdf2678250f8-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"dcbebc6e-c10d-4a59-9bdb-cdf2678250f8\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:33:06 crc kubenswrapper[4558]: I0120 17:33:06.462783 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/dcbebc6e-c10d-4a59-9bdb-cdf2678250f8-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"dcbebc6e-c10d-4a59-9bdb-cdf2678250f8\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:33:06 crc kubenswrapper[4558]: I0120 17:33:06.464917 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/dcbebc6e-c10d-4a59-9bdb-cdf2678250f8-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"dcbebc6e-c10d-4a59-9bdb-cdf2678250f8\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:33:06 crc kubenswrapper[4558]: I0120 17:33:06.473632 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dcbebc6e-c10d-4a59-9bdb-cdf2678250f8-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"dcbebc6e-c10d-4a59-9bdb-cdf2678250f8\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:33:06 crc kubenswrapper[4558]: I0120 17:33:06.474234 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6mbd9\" (UniqueName: \"kubernetes.io/projected/dcbebc6e-c10d-4a59-9bdb-cdf2678250f8-kube-api-access-6mbd9\") pod \"ovsdbserver-nb-0\" (UID: \"dcbebc6e-c10d-4a59-9bdb-cdf2678250f8\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:33:06 crc kubenswrapper[4558]: I0120 17:33:06.516988 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ovsdbserver-sb-0" podUID="31c9af7b-b151-4eaa-b090-efecddd08c46" containerName="ovsdbserver-sb" containerID="cri-o://cddbf5e0f0aa3f8140abd8d8060e08f69d0761eab29715426c1715bdc2833585" gracePeriod=300 Jan 20 17:33:06 crc kubenswrapper[4558]: I0120 17:33:06.539999 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage13-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage13-crc\") pod \"ovsdbserver-nb-0\" (UID: \"dcbebc6e-c10d-4a59-9bdb-cdf2678250f8\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:33:06 crc kubenswrapper[4558]: I0120 17:33:06.624485 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="48ebccca-cc18-415f-b6f8-f301f99e5d3a" path="/var/lib/kubelet/pods/48ebccca-cc18-415f-b6f8-f301f99e5d3a/volumes" Jan 20 17:33:06 crc kubenswrapper[4558]: I0120 17:33:06.625477 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="581cf384-3caa-4074-a6aa-526e8a65e9b2" path="/var/lib/kubelet/pods/581cf384-3caa-4074-a6aa-526e8a65e9b2/volumes" Jan 20 17:33:06 crc kubenswrapper[4558]: I0120 17:33:06.626045 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6dbc6996-80bf-4f86-9acf-f74dd04e51b1" path="/var/lib/kubelet/pods/6dbc6996-80bf-4f86-9acf-f74dd04e51b1/volumes" Jan 20 17:33:06 crc kubenswrapper[4558]: I0120 17:33:06.627780 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e83a55be-4002-403a-911f-8f33c234b7e4" path="/var/lib/kubelet/pods/e83a55be-4002-403a-911f-8f33c234b7e4/volumes" Jan 20 17:33:06 crc kubenswrapper[4558]: I0120 17:33:06.826640 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:33:06 crc kubenswrapper[4558]: I0120 17:33:06.858155 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-9c84d7f96-k2vb8"] Jan 20 17:33:06 crc kubenswrapper[4558]: I0120 17:33:06.908733 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/neutron-5bb5cf79fd-qwwlx"] Jan 20 17:33:06 crc kubenswrapper[4558]: I0120 17:33:06.910585 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-5bb5cf79fd-qwwlx" Jan 20 17:33:06 crc kubenswrapper[4558]: I0120 17:33:06.920481 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-5bb5cf79fd-qwwlx"] Jan 20 17:33:06 crc kubenswrapper[4558]: I0120 17:33:06.997423 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rhgsm\" (UniqueName: \"kubernetes.io/projected/d48e656f-2dc6-4ecc-ba26-40661e7443be-kube-api-access-rhgsm\") pod \"neutron-5bb5cf79fd-qwwlx\" (UID: \"d48e656f-2dc6-4ecc-ba26-40661e7443be\") " pod="openstack-kuttl-tests/neutron-5bb5cf79fd-qwwlx" Jan 20 17:33:06 crc kubenswrapper[4558]: I0120 17:33:06.997528 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/d48e656f-2dc6-4ecc-ba26-40661e7443be-httpd-config\") pod \"neutron-5bb5cf79fd-qwwlx\" (UID: \"d48e656f-2dc6-4ecc-ba26-40661e7443be\") " pod="openstack-kuttl-tests/neutron-5bb5cf79fd-qwwlx" Jan 20 17:33:06 crc kubenswrapper[4558]: I0120 17:33:06.997565 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d48e656f-2dc6-4ecc-ba26-40661e7443be-combined-ca-bundle\") pod \"neutron-5bb5cf79fd-qwwlx\" (UID: \"d48e656f-2dc6-4ecc-ba26-40661e7443be\") " pod="openstack-kuttl-tests/neutron-5bb5cf79fd-qwwlx" Jan 20 17:33:06 crc kubenswrapper[4558]: I0120 17:33:06.997598 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/d48e656f-2dc6-4ecc-ba26-40661e7443be-config\") pod \"neutron-5bb5cf79fd-qwwlx\" (UID: \"d48e656f-2dc6-4ecc-ba26-40661e7443be\") " pod="openstack-kuttl-tests/neutron-5bb5cf79fd-qwwlx" Jan 20 17:33:07 crc kubenswrapper[4558]: I0120 17:33:07.020795 4558 generic.go:334] "Generic (PLEG): container finished" podID="1038c23d-3108-472d-b43c-4181fd8ac365" containerID="e2a18338fd9ab69e3f54fc92343898be5bb0e50f61066e2c11e95d851b940179" exitCode=0 Jan 20 17:33:07 crc kubenswrapper[4558]: I0120 17:33:07.020866 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-5f4bd4844-95dpq" event={"ID":"1038c23d-3108-472d-b43c-4181fd8ac365","Type":"ContainerDied","Data":"e2a18338fd9ab69e3f54fc92343898be5bb0e50f61066e2c11e95d851b940179"} Jan 20 17:33:07 crc kubenswrapper[4558]: I0120 17:33:07.026583 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-0" event={"ID":"37fd3160-ee6a-41a1-9c4b-260e72133f59","Type":"ContainerStarted","Data":"ca483fd25cf153fcad14ca9739754a6effd1115ea5c7b991da3c23aa4caa603c"} Jan 20 17:33:07 crc kubenswrapper[4558]: I0120 17:33:07.026661 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-0" event={"ID":"37fd3160-ee6a-41a1-9c4b-260e72133f59","Type":"ContainerStarted","Data":"fae23ddf543587439d5d9e8fd6cb43aec782fbdee10fa24c830597e1c43ab0ed"} Jan 20 17:33:07 crc kubenswrapper[4558]: I0120 17:33:07.027314 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:33:07 crc kubenswrapper[4558]: I0120 17:33:07.029384 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-9c84d7f96-k2vb8" event={"ID":"25ac8e40-35ea-4d5b-8d8a-ee123c2e2fff","Type":"ContainerStarted","Data":"22c9efbba433655e1e32d8ca30bd57190a70d36b9688d02f701b38f551db7ae8"} Jan 20 17:33:07 crc kubenswrapper[4558]: I0120 17:33:07.029754 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/neutron-9c84d7f96-k2vb8" Jan 20 17:33:07 crc kubenswrapper[4558]: I0120 17:33:07.039624 4558 generic.go:334] "Generic (PLEG): container finished" podID="3a9cf38d-4b8b-453b-bc39-b97deab68b72" containerID="6ed4c26e0b379eaa1189608d6dbca318d8f1ceda6ae39f733e1b04e40c4cbb57" exitCode=0 Jan 20 17:33:07 crc kubenswrapper[4558]: I0120 17:33:07.039672 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-7c4c8fc458-mv7sx" event={"ID":"3a9cf38d-4b8b-453b-bc39-b97deab68b72","Type":"ContainerDied","Data":"6ed4c26e0b379eaa1189608d6dbca318d8f1ceda6ae39f733e1b04e40c4cbb57"} Jan 20 17:33:07 crc kubenswrapper[4558]: I0120 17:33:07.041198 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"d78c49cb-72eb-4def-9c40-b16128c74418","Type":"ContainerStarted","Data":"8da85592700c5420bc1d1bbe307f08e0a71e1e162220d3e5dbe3bda34763061f"} Jan 20 17:33:07 crc kubenswrapper[4558]: I0120 17:33:07.044555 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"21c81b0a-c70b-4d57-bea0-96e4840af7dd","Type":"ContainerStarted","Data":"5f7d9bee57d3863532f7d1c9fc246fa579caa23eddd68f1cd8f774b112683665"} Jan 20 17:33:07 crc kubenswrapper[4558]: I0120 17:33:07.048344 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell0-conductor-0" podStartSLOduration=3.048327522 podStartE2EDuration="3.048327522s" podCreationTimestamp="2026-01-20 17:33:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:33:07.045992872 +0000 UTC m=+3080.806330839" watchObservedRunningTime="2026-01-20 17:33:07.048327522 +0000 UTC m=+3080.808665489" Jan 20 17:33:07 crc kubenswrapper[4558]: I0120 17:33:07.054174 4558 generic.go:334] "Generic (PLEG): container finished" podID="e0294130-264e-4fa0-b336-72c350ea61cc" containerID="62f48a277d2d3f69fc053a84173286db65e766dd294fabf68464419b15e8611b" exitCode=143 Jan 20 17:33:07 crc kubenswrapper[4558]: I0120 17:33:07.054224 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-worker-777f6887b5-wwl56" event={"ID":"e0294130-264e-4fa0-b336-72c350ea61cc","Type":"ContainerDied","Data":"62f48a277d2d3f69fc053a84173286db65e766dd294fabf68464419b15e8611b"} Jan 20 17:33:07 crc kubenswrapper[4558]: I0120 17:33:07.075267 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovsdbserver-sb-0_31c9af7b-b151-4eaa-b090-efecddd08c46/ovsdbserver-sb/0.log" Jan 20 17:33:07 crc kubenswrapper[4558]: I0120 17:33:07.075298 4558 generic.go:334] "Generic (PLEG): container finished" podID="31c9af7b-b151-4eaa-b090-efecddd08c46" containerID="309c8fa54d3864dc5b564b635ee338b83b4c559bc90397337a276c3ed181323e" exitCode=2 Jan 20 17:33:07 crc kubenswrapper[4558]: I0120 17:33:07.075310 4558 generic.go:334] "Generic (PLEG): container finished" podID="31c9af7b-b151-4eaa-b090-efecddd08c46" containerID="cddbf5e0f0aa3f8140abd8d8060e08f69d0761eab29715426c1715bdc2833585" exitCode=143 Jan 20 17:33:07 crc kubenswrapper[4558]: I0120 17:33:07.075836 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-sb-0" event={"ID":"31c9af7b-b151-4eaa-b090-efecddd08c46","Type":"ContainerDied","Data":"309c8fa54d3864dc5b564b635ee338b83b4c559bc90397337a276c3ed181323e"} Jan 20 17:33:07 crc kubenswrapper[4558]: I0120 17:33:07.075862 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-sb-0" event={"ID":"31c9af7b-b151-4eaa-b090-efecddd08c46","Type":"ContainerDied","Data":"cddbf5e0f0aa3f8140abd8d8060e08f69d0761eab29715426c1715bdc2833585"} Jan 20 17:33:07 crc kubenswrapper[4558]: I0120 17:33:07.085312 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovsdbserver-sb-0_31c9af7b-b151-4eaa-b090-efecddd08c46/ovsdbserver-sb/0.log" Jan 20 17:33:07 crc kubenswrapper[4558]: I0120 17:33:07.085362 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:33:07 crc kubenswrapper[4558]: I0120 17:33:07.100834 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/d48e656f-2dc6-4ecc-ba26-40661e7443be-config\") pod \"neutron-5bb5cf79fd-qwwlx\" (UID: \"d48e656f-2dc6-4ecc-ba26-40661e7443be\") " pod="openstack-kuttl-tests/neutron-5bb5cf79fd-qwwlx" Jan 20 17:33:07 crc kubenswrapper[4558]: I0120 17:33:07.100908 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rhgsm\" (UniqueName: \"kubernetes.io/projected/d48e656f-2dc6-4ecc-ba26-40661e7443be-kube-api-access-rhgsm\") pod \"neutron-5bb5cf79fd-qwwlx\" (UID: \"d48e656f-2dc6-4ecc-ba26-40661e7443be\") " pod="openstack-kuttl-tests/neutron-5bb5cf79fd-qwwlx" Jan 20 17:33:07 crc kubenswrapper[4558]: I0120 17:33:07.101062 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/d48e656f-2dc6-4ecc-ba26-40661e7443be-httpd-config\") pod \"neutron-5bb5cf79fd-qwwlx\" (UID: \"d48e656f-2dc6-4ecc-ba26-40661e7443be\") " pod="openstack-kuttl-tests/neutron-5bb5cf79fd-qwwlx" Jan 20 17:33:07 crc kubenswrapper[4558]: I0120 17:33:07.101142 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d48e656f-2dc6-4ecc-ba26-40661e7443be-combined-ca-bundle\") pod \"neutron-5bb5cf79fd-qwwlx\" (UID: \"d48e656f-2dc6-4ecc-ba26-40661e7443be\") " pod="openstack-kuttl-tests/neutron-5bb5cf79fd-qwwlx" Jan 20 17:33:07 crc kubenswrapper[4558]: I0120 17:33:07.108827 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/d48e656f-2dc6-4ecc-ba26-40661e7443be-config\") pod \"neutron-5bb5cf79fd-qwwlx\" (UID: \"d48e656f-2dc6-4ecc-ba26-40661e7443be\") " pod="openstack-kuttl-tests/neutron-5bb5cf79fd-qwwlx" Jan 20 17:33:07 crc kubenswrapper[4558]: I0120 17:33:07.109551 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d48e656f-2dc6-4ecc-ba26-40661e7443be-combined-ca-bundle\") pod \"neutron-5bb5cf79fd-qwwlx\" (UID: \"d48e656f-2dc6-4ecc-ba26-40661e7443be\") " pod="openstack-kuttl-tests/neutron-5bb5cf79fd-qwwlx" Jan 20 17:33:07 crc kubenswrapper[4558]: I0120 17:33:07.118749 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/d48e656f-2dc6-4ecc-ba26-40661e7443be-httpd-config\") pod \"neutron-5bb5cf79fd-qwwlx\" (UID: \"d48e656f-2dc6-4ecc-ba26-40661e7443be\") " pod="openstack-kuttl-tests/neutron-5bb5cf79fd-qwwlx" Jan 20 17:33:07 crc kubenswrapper[4558]: I0120 17:33:07.123477 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/glance-default-external-api-0" podStartSLOduration=5.123453022 podStartE2EDuration="5.123453022s" podCreationTimestamp="2026-01-20 17:33:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:33:07.069147369 +0000 UTC m=+3080.829485337" watchObservedRunningTime="2026-01-20 17:33:07.123453022 +0000 UTC m=+3080.883790989" Jan 20 17:33:07 crc kubenswrapper[4558]: I0120 17:33:07.126829 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rhgsm\" (UniqueName: \"kubernetes.io/projected/d48e656f-2dc6-4ecc-ba26-40661e7443be-kube-api-access-rhgsm\") pod \"neutron-5bb5cf79fd-qwwlx\" (UID: \"d48e656f-2dc6-4ecc-ba26-40661e7443be\") " pod="openstack-kuttl-tests/neutron-5bb5cf79fd-qwwlx" Jan 20 17:33:07 crc kubenswrapper[4558]: I0120 17:33:07.132497 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/neutron-9c84d7f96-k2vb8" podStartSLOduration=4.132480326 podStartE2EDuration="4.132480326s" podCreationTimestamp="2026-01-20 17:33:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:33:07.09033486 +0000 UTC m=+3080.850672846" watchObservedRunningTime="2026-01-20 17:33:07.132480326 +0000 UTC m=+3080.892818293" Jan 20 17:33:07 crc kubenswrapper[4558]: I0120 17:33:07.142364 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/glance-default-internal-api-0" podStartSLOduration=5.142346618 podStartE2EDuration="5.142346618s" podCreationTimestamp="2026-01-20 17:33:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:33:07.111025754 +0000 UTC m=+3080.871363731" watchObservedRunningTime="2026-01-20 17:33:07.142346618 +0000 UTC m=+3080.902684585" Jan 20 17:33:07 crc kubenswrapper[4558]: I0120 17:33:07.187335 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-5f4bd4844-95dpq" Jan 20 17:33:07 crc kubenswrapper[4558]: I0120 17:33:07.201705 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/31c9af7b-b151-4eaa-b090-efecddd08c46-combined-ca-bundle\") pod \"31c9af7b-b151-4eaa-b090-efecddd08c46\" (UID: \"31c9af7b-b151-4eaa-b090-efecddd08c46\") " Jan 20 17:33:07 crc kubenswrapper[4558]: I0120 17:33:07.201816 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6n69g\" (UniqueName: \"kubernetes.io/projected/1038c23d-3108-472d-b43c-4181fd8ac365-kube-api-access-6n69g\") pod \"1038c23d-3108-472d-b43c-4181fd8ac365\" (UID: \"1038c23d-3108-472d-b43c-4181fd8ac365\") " Jan 20 17:33:07 crc kubenswrapper[4558]: I0120 17:33:07.201859 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/31c9af7b-b151-4eaa-b090-efecddd08c46-ovsdb-rundir\") pod \"31c9af7b-b151-4eaa-b090-efecddd08c46\" (UID: \"31c9af7b-b151-4eaa-b090-efecddd08c46\") " Jan 20 17:33:07 crc kubenswrapper[4558]: I0120 17:33:07.201884 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1038c23d-3108-472d-b43c-4181fd8ac365-internal-tls-certs\") pod \"1038c23d-3108-472d-b43c-4181fd8ac365\" (UID: \"1038c23d-3108-472d-b43c-4181fd8ac365\") " Jan 20 17:33:07 crc kubenswrapper[4558]: I0120 17:33:07.201905 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1038c23d-3108-472d-b43c-4181fd8ac365-config-data\") pod \"1038c23d-3108-472d-b43c-4181fd8ac365\" (UID: \"1038c23d-3108-472d-b43c-4181fd8ac365\") " Jan 20 17:33:07 crc kubenswrapper[4558]: I0120 17:33:07.201979 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndbcluster-sb-etc-ovn\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"31c9af7b-b151-4eaa-b090-efecddd08c46\" (UID: \"31c9af7b-b151-4eaa-b090-efecddd08c46\") " Jan 20 17:33:07 crc kubenswrapper[4558]: I0120 17:33:07.202006 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/31c9af7b-b151-4eaa-b090-efecddd08c46-config\") pod \"31c9af7b-b151-4eaa-b090-efecddd08c46\" (UID: \"31c9af7b-b151-4eaa-b090-efecddd08c46\") " Jan 20 17:33:07 crc kubenswrapper[4558]: I0120 17:33:07.202040 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gt5pj\" (UniqueName: \"kubernetes.io/projected/31c9af7b-b151-4eaa-b090-efecddd08c46-kube-api-access-gt5pj\") pod \"31c9af7b-b151-4eaa-b090-efecddd08c46\" (UID: \"31c9af7b-b151-4eaa-b090-efecddd08c46\") " Jan 20 17:33:07 crc kubenswrapper[4558]: I0120 17:33:07.202067 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/31c9af7b-b151-4eaa-b090-efecddd08c46-scripts\") pod \"31c9af7b-b151-4eaa-b090-efecddd08c46\" (UID: \"31c9af7b-b151-4eaa-b090-efecddd08c46\") " Jan 20 17:33:07 crc kubenswrapper[4558]: I0120 17:33:07.202097 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1038c23d-3108-472d-b43c-4181fd8ac365-logs\") pod \"1038c23d-3108-472d-b43c-4181fd8ac365\" (UID: \"1038c23d-3108-472d-b43c-4181fd8ac365\") " Jan 20 17:33:07 crc kubenswrapper[4558]: I0120 17:33:07.202187 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1038c23d-3108-472d-b43c-4181fd8ac365-scripts\") pod \"1038c23d-3108-472d-b43c-4181fd8ac365\" (UID: \"1038c23d-3108-472d-b43c-4181fd8ac365\") " Jan 20 17:33:07 crc kubenswrapper[4558]: I0120 17:33:07.202233 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1038c23d-3108-472d-b43c-4181fd8ac365-combined-ca-bundle\") pod \"1038c23d-3108-472d-b43c-4181fd8ac365\" (UID: \"1038c23d-3108-472d-b43c-4181fd8ac365\") " Jan 20 17:33:07 crc kubenswrapper[4558]: I0120 17:33:07.202261 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1038c23d-3108-472d-b43c-4181fd8ac365-public-tls-certs\") pod \"1038c23d-3108-472d-b43c-4181fd8ac365\" (UID: \"1038c23d-3108-472d-b43c-4181fd8ac365\") " Jan 20 17:33:07 crc kubenswrapper[4558]: I0120 17:33:07.207968 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31c9af7b-b151-4eaa-b090-efecddd08c46-scripts" (OuterVolumeSpecName: "scripts") pod "31c9af7b-b151-4eaa-b090-efecddd08c46" (UID: "31c9af7b-b151-4eaa-b090-efecddd08c46"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:33:07 crc kubenswrapper[4558]: I0120 17:33:07.208636 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/31c9af7b-b151-4eaa-b090-efecddd08c46-ovsdb-rundir" (OuterVolumeSpecName: "ovsdb-rundir") pod "31c9af7b-b151-4eaa-b090-efecddd08c46" (UID: "31c9af7b-b151-4eaa-b090-efecddd08c46"). InnerVolumeSpecName "ovsdb-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:33:07 crc kubenswrapper[4558]: I0120 17:33:07.211606 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31c9af7b-b151-4eaa-b090-efecddd08c46-config" (OuterVolumeSpecName: "config") pod "31c9af7b-b151-4eaa-b090-efecddd08c46" (UID: "31c9af7b-b151-4eaa-b090-efecddd08c46"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:33:07 crc kubenswrapper[4558]: I0120 17:33:07.211750 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31c9af7b-b151-4eaa-b090-efecddd08c46-kube-api-access-gt5pj" (OuterVolumeSpecName: "kube-api-access-gt5pj") pod "31c9af7b-b151-4eaa-b090-efecddd08c46" (UID: "31c9af7b-b151-4eaa-b090-efecddd08c46"). InnerVolumeSpecName "kube-api-access-gt5pj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:33:07 crc kubenswrapper[4558]: I0120 17:33:07.212448 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1038c23d-3108-472d-b43c-4181fd8ac365-logs" (OuterVolumeSpecName: "logs") pod "1038c23d-3108-472d-b43c-4181fd8ac365" (UID: "1038c23d-3108-472d-b43c-4181fd8ac365"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:33:07 crc kubenswrapper[4558]: I0120 17:33:07.212730 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage04-crc" (OuterVolumeSpecName: "ovndbcluster-sb-etc-ovn") pod "31c9af7b-b151-4eaa-b090-efecddd08c46" (UID: "31c9af7b-b151-4eaa-b090-efecddd08c46"). InnerVolumeSpecName "local-storage04-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:33:07 crc kubenswrapper[4558]: I0120 17:33:07.216114 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1038c23d-3108-472d-b43c-4181fd8ac365-kube-api-access-6n69g" (OuterVolumeSpecName: "kube-api-access-6n69g") pod "1038c23d-3108-472d-b43c-4181fd8ac365" (UID: "1038c23d-3108-472d-b43c-4181fd8ac365"). InnerVolumeSpecName "kube-api-access-6n69g". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:33:07 crc kubenswrapper[4558]: I0120 17:33:07.217489 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1038c23d-3108-472d-b43c-4181fd8ac365-scripts" (OuterVolumeSpecName: "scripts") pod "1038c23d-3108-472d-b43c-4181fd8ac365" (UID: "1038c23d-3108-472d-b43c-4181fd8ac365"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:33:07 crc kubenswrapper[4558]: I0120 17:33:07.256250 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31c9af7b-b151-4eaa-b090-efecddd08c46-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "31c9af7b-b151-4eaa-b090-efecddd08c46" (UID: "31c9af7b-b151-4eaa-b090-efecddd08c46"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:33:07 crc kubenswrapper[4558]: I0120 17:33:07.259687 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-5bb5cf79fd-qwwlx" Jan 20 17:33:07 crc kubenswrapper[4558]: I0120 17:33:07.263490 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/cinder-api-0" podUID="846821b5-5647-4ffa-a1bb-8695fe90cbcc" containerName="cinder-api" probeResult="failure" output="Get \"http://10.217.1.220:8776/healthcheck\": read tcp 10.217.0.2:59784->10.217.1.220:8776: read: connection reset by peer" Jan 20 17:33:07 crc kubenswrapper[4558]: I0120 17:33:07.306306 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1038c23d-3108-472d-b43c-4181fd8ac365-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:07 crc kubenswrapper[4558]: I0120 17:33:07.306340 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/31c9af7b-b151-4eaa-b090-efecddd08c46-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:07 crc kubenswrapper[4558]: I0120 17:33:07.306354 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6n69g\" (UniqueName: \"kubernetes.io/projected/1038c23d-3108-472d-b43c-4181fd8ac365-kube-api-access-6n69g\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:07 crc kubenswrapper[4558]: I0120 17:33:07.306364 4558 reconciler_common.go:293] "Volume detached for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/31c9af7b-b151-4eaa-b090-efecddd08c46-ovsdb-rundir\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:07 crc kubenswrapper[4558]: I0120 17:33:07.306395 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" " Jan 20 17:33:07 crc kubenswrapper[4558]: I0120 17:33:07.306404 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/31c9af7b-b151-4eaa-b090-efecddd08c46-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:07 crc kubenswrapper[4558]: I0120 17:33:07.306414 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gt5pj\" (UniqueName: \"kubernetes.io/projected/31c9af7b-b151-4eaa-b090-efecddd08c46-kube-api-access-gt5pj\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:07 crc kubenswrapper[4558]: I0120 17:33:07.306424 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/31c9af7b-b151-4eaa-b090-efecddd08c46-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:07 crc kubenswrapper[4558]: I0120 17:33:07.306433 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1038c23d-3108-472d-b43c-4181fd8ac365-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:07 crc kubenswrapper[4558]: E0120 17:33:07.324993 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="a333ee4a53687d89af384ce1c1e7dd6ab814cf37eae12fca495be4bf694c8f99" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Jan 20 17:33:07 crc kubenswrapper[4558]: E0120 17:33:07.327043 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="a333ee4a53687d89af384ce1c1e7dd6ab814cf37eae12fca495be4bf694c8f99" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Jan 20 17:33:07 crc kubenswrapper[4558]: E0120 17:33:07.332761 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="a333ee4a53687d89af384ce1c1e7dd6ab814cf37eae12fca495be4bf694c8f99" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Jan 20 17:33:07 crc kubenswrapper[4558]: E0120 17:33:07.332835 4558 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack-kuttl-tests/ovn-northd-0" podUID="6ecac33c-30fa-4996-b271-318148e62416" containerName="ovn-northd" Jan 20 17:33:07 crc kubenswrapper[4558]: I0120 17:33:07.352761 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1038c23d-3108-472d-b43c-4181fd8ac365-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1038c23d-3108-472d-b43c-4181fd8ac365" (UID: "1038c23d-3108-472d-b43c-4181fd8ac365"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:33:07 crc kubenswrapper[4558]: I0120 17:33:07.354714 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage04-crc" (UniqueName: "kubernetes.io/local-volume/local-storage04-crc") on node "crc" Jan 20 17:33:07 crc kubenswrapper[4558]: I0120 17:33:07.374660 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1038c23d-3108-472d-b43c-4181fd8ac365-config-data" (OuterVolumeSpecName: "config-data") pod "1038c23d-3108-472d-b43c-4181fd8ac365" (UID: "1038c23d-3108-472d-b43c-4181fd8ac365"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:33:07 crc kubenswrapper[4558]: I0120 17:33:07.394591 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1038c23d-3108-472d-b43c-4181fd8ac365-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "1038c23d-3108-472d-b43c-4181fd8ac365" (UID: "1038c23d-3108-472d-b43c-4181fd8ac365"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:33:07 crc kubenswrapper[4558]: I0120 17:33:07.408191 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1038c23d-3108-472d-b43c-4181fd8ac365-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:07 crc kubenswrapper[4558]: I0120 17:33:07.408227 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1038c23d-3108-472d-b43c-4181fd8ac365-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:07 crc kubenswrapper[4558]: I0120 17:33:07.408237 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1038c23d-3108-472d-b43c-4181fd8ac365-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:07 crc kubenswrapper[4558]: I0120 17:33:07.408246 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:07 crc kubenswrapper[4558]: I0120 17:33:07.414827 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1038c23d-3108-472d-b43c-4181fd8ac365-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "1038c23d-3108-472d-b43c-4181fd8ac365" (UID: "1038c23d-3108-472d-b43c-4181fd8ac365"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:33:07 crc kubenswrapper[4558]: I0120 17:33:07.423055 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-nb-0"] Jan 20 17:33:07 crc kubenswrapper[4558]: I0120 17:33:07.511405 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1038c23d-3108-472d-b43c-4181fd8ac365-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:07 crc kubenswrapper[4558]: I0120 17:33:07.890683 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:33:07 crc kubenswrapper[4558]: I0120 17:33:07.927796 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/846821b5-5647-4ffa-a1bb-8695fe90cbcc-config-data\") pod \"846821b5-5647-4ffa-a1bb-8695fe90cbcc\" (UID: \"846821b5-5647-4ffa-a1bb-8695fe90cbcc\") " Jan 20 17:33:07 crc kubenswrapper[4558]: I0120 17:33:07.927852 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/846821b5-5647-4ffa-a1bb-8695fe90cbcc-scripts\") pod \"846821b5-5647-4ffa-a1bb-8695fe90cbcc\" (UID: \"846821b5-5647-4ffa-a1bb-8695fe90cbcc\") " Jan 20 17:33:07 crc kubenswrapper[4558]: I0120 17:33:07.927918 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d5qdl\" (UniqueName: \"kubernetes.io/projected/846821b5-5647-4ffa-a1bb-8695fe90cbcc-kube-api-access-d5qdl\") pod \"846821b5-5647-4ffa-a1bb-8695fe90cbcc\" (UID: \"846821b5-5647-4ffa-a1bb-8695fe90cbcc\") " Jan 20 17:33:07 crc kubenswrapper[4558]: I0120 17:33:07.928019 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/846821b5-5647-4ffa-a1bb-8695fe90cbcc-logs\") pod \"846821b5-5647-4ffa-a1bb-8695fe90cbcc\" (UID: \"846821b5-5647-4ffa-a1bb-8695fe90cbcc\") " Jan 20 17:33:07 crc kubenswrapper[4558]: I0120 17:33:07.928051 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/846821b5-5647-4ffa-a1bb-8695fe90cbcc-combined-ca-bundle\") pod \"846821b5-5647-4ffa-a1bb-8695fe90cbcc\" (UID: \"846821b5-5647-4ffa-a1bb-8695fe90cbcc\") " Jan 20 17:33:07 crc kubenswrapper[4558]: I0120 17:33:07.928076 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/846821b5-5647-4ffa-a1bb-8695fe90cbcc-etc-machine-id\") pod \"846821b5-5647-4ffa-a1bb-8695fe90cbcc\" (UID: \"846821b5-5647-4ffa-a1bb-8695fe90cbcc\") " Jan 20 17:33:07 crc kubenswrapper[4558]: I0120 17:33:07.928152 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/846821b5-5647-4ffa-a1bb-8695fe90cbcc-config-data-custom\") pod \"846821b5-5647-4ffa-a1bb-8695fe90cbcc\" (UID: \"846821b5-5647-4ffa-a1bb-8695fe90cbcc\") " Jan 20 17:33:07 crc kubenswrapper[4558]: I0120 17:33:07.928720 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/846821b5-5647-4ffa-a1bb-8695fe90cbcc-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "846821b5-5647-4ffa-a1bb-8695fe90cbcc" (UID: "846821b5-5647-4ffa-a1bb-8695fe90cbcc"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 17:33:07 crc kubenswrapper[4558]: I0120 17:33:07.929086 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/846821b5-5647-4ffa-a1bb-8695fe90cbcc-logs" (OuterVolumeSpecName: "logs") pod "846821b5-5647-4ffa-a1bb-8695fe90cbcc" (UID: "846821b5-5647-4ffa-a1bb-8695fe90cbcc"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:33:07 crc kubenswrapper[4558]: I0120 17:33:07.932934 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/846821b5-5647-4ffa-a1bb-8695fe90cbcc-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "846821b5-5647-4ffa-a1bb-8695fe90cbcc" (UID: "846821b5-5647-4ffa-a1bb-8695fe90cbcc"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:33:07 crc kubenswrapper[4558]: I0120 17:33:07.932999 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/846821b5-5647-4ffa-a1bb-8695fe90cbcc-kube-api-access-d5qdl" (OuterVolumeSpecName: "kube-api-access-d5qdl") pod "846821b5-5647-4ffa-a1bb-8695fe90cbcc" (UID: "846821b5-5647-4ffa-a1bb-8695fe90cbcc"). InnerVolumeSpecName "kube-api-access-d5qdl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:33:07 crc kubenswrapper[4558]: I0120 17:33:07.933449 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/846821b5-5647-4ffa-a1bb-8695fe90cbcc-scripts" (OuterVolumeSpecName: "scripts") pod "846821b5-5647-4ffa-a1bb-8695fe90cbcc" (UID: "846821b5-5647-4ffa-a1bb-8695fe90cbcc"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:33:07 crc kubenswrapper[4558]: I0120 17:33:07.972771 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/846821b5-5647-4ffa-a1bb-8695fe90cbcc-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "846821b5-5647-4ffa-a1bb-8695fe90cbcc" (UID: "846821b5-5647-4ffa-a1bb-8695fe90cbcc"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:33:07 crc kubenswrapper[4558]: I0120 17:33:07.992732 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-5bb5cf79fd-qwwlx"] Jan 20 17:33:08 crc kubenswrapper[4558]: W0120 17:33:08.007240 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd48e656f_2dc6_4ecc_ba26_40661e7443be.slice/crio-2702f28aa9af14f23e966e6e9fdc88a4f06b778da8c98c4238495695e87e74dd WatchSource:0}: Error finding container 2702f28aa9af14f23e966e6e9fdc88a4f06b778da8c98c4238495695e87e74dd: Status 404 returned error can't find the container with id 2702f28aa9af14f23e966e6e9fdc88a4f06b778da8c98c4238495695e87e74dd Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.014295 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/846821b5-5647-4ffa-a1bb-8695fe90cbcc-config-data" (OuterVolumeSpecName: "config-data") pod "846821b5-5647-4ffa-a1bb-8695fe90cbcc" (UID: "846821b5-5647-4ffa-a1bb-8695fe90cbcc"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.040748 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/846821b5-5647-4ffa-a1bb-8695fe90cbcc-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.040796 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/846821b5-5647-4ffa-a1bb-8695fe90cbcc-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.040818 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d5qdl\" (UniqueName: \"kubernetes.io/projected/846821b5-5647-4ffa-a1bb-8695fe90cbcc-kube-api-access-d5qdl\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.040835 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/846821b5-5647-4ffa-a1bb-8695fe90cbcc-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.040846 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/846821b5-5647-4ffa-a1bb-8695fe90cbcc-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.040861 4558 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/846821b5-5647-4ffa-a1bb-8695fe90cbcc-etc-machine-id\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.040872 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/846821b5-5647-4ffa-a1bb-8695fe90cbcc-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.090555 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovn-northd-0_6ecac33c-30fa-4996-b271-318148e62416/ovn-northd/0.log" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.090977 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.100671 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovn-northd-0_6ecac33c-30fa-4996-b271-318148e62416/ovn-northd/0.log" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.100734 4558 generic.go:334] "Generic (PLEG): container finished" podID="6ecac33c-30fa-4996-b271-318148e62416" containerID="a333ee4a53687d89af384ce1c1e7dd6ab814cf37eae12fca495be4bf694c8f99" exitCode=139 Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.100829 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-northd-0" event={"ID":"6ecac33c-30fa-4996-b271-318148e62416","Type":"ContainerDied","Data":"a333ee4a53687d89af384ce1c1e7dd6ab814cf37eae12fca495be4bf694c8f99"} Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.100874 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-northd-0" event={"ID":"6ecac33c-30fa-4996-b271-318148e62416","Type":"ContainerDied","Data":"b71ca26e6ba3729271cc4ed50b7c54870c0894291866c0187adee148967bb4e1"} Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.100897 4558 scope.go:117] "RemoveContainer" containerID="ae89028fc849f2ea980139f852b630c0eb1659da3362f8ae021fcba4d043bb4d" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.103229 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-5bb5cf79fd-qwwlx" event={"ID":"d48e656f-2dc6-4ecc-ba26-40661e7443be","Type":"ContainerStarted","Data":"2702f28aa9af14f23e966e6e9fdc88a4f06b778da8c98c4238495695e87e74dd"} Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.110214 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovsdbserver-sb-0_31c9af7b-b151-4eaa-b090-efecddd08c46/ovsdbserver-sb/0.log" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.110309 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-sb-0" event={"ID":"31c9af7b-b151-4eaa-b090-efecddd08c46","Type":"ContainerDied","Data":"7b131344a595bbce78d90b53b0bc828a5fbd683845a9c9535a0ee9a6a24128e7"} Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.110422 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.121919 4558 generic.go:334] "Generic (PLEG): container finished" podID="846821b5-5647-4ffa-a1bb-8695fe90cbcc" containerID="a31259f069ab4363f5184768435b810c1a24f8a773760135e07b1662c647ae59" exitCode=0 Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.122001 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"846821b5-5647-4ffa-a1bb-8695fe90cbcc","Type":"ContainerDied","Data":"a31259f069ab4363f5184768435b810c1a24f8a773760135e07b1662c647ae59"} Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.122036 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"846821b5-5647-4ffa-a1bb-8695fe90cbcc","Type":"ContainerDied","Data":"ddd93aadb72fd028114ea17b3d905d2242d090ec9a95a9d0b4c0f0502d9ec533"} Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.122174 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.130354 4558 generic.go:334] "Generic (PLEG): container finished" podID="a03b6fb7-d712-46bd-a8ed-e47bc25ecc7c" containerID="fcf35382d515efc646c6a18f5a871bb7d56050c4d19ea44e31380d2ddf0a363c" exitCode=0 Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.130430 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-bootstrap-s78ts" event={"ID":"a03b6fb7-d712-46bd-a8ed-e47bc25ecc7c","Type":"ContainerDied","Data":"fcf35382d515efc646c6a18f5a871bb7d56050c4d19ea44e31380d2ddf0a363c"} Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.142438 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ecac33c-30fa-4996-b271-318148e62416-combined-ca-bundle\") pod \"6ecac33c-30fa-4996-b271-318148e62416\" (UID: \"6ecac33c-30fa-4996-b271-318148e62416\") " Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.142565 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/6ecac33c-30fa-4996-b271-318148e62416-ovn-rundir\") pod \"6ecac33c-30fa-4996-b271-318148e62416\" (UID: \"6ecac33c-30fa-4996-b271-318148e62416\") " Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.142778 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6ecac33c-30fa-4996-b271-318148e62416-scripts\") pod \"6ecac33c-30fa-4996-b271-318148e62416\" (UID: \"6ecac33c-30fa-4996-b271-318148e62416\") " Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.142846 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tphf6\" (UniqueName: \"kubernetes.io/projected/6ecac33c-30fa-4996-b271-318148e62416-kube-api-access-tphf6\") pod \"6ecac33c-30fa-4996-b271-318148e62416\" (UID: \"6ecac33c-30fa-4996-b271-318148e62416\") " Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.142865 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6ecac33c-30fa-4996-b271-318148e62416-config\") pod \"6ecac33c-30fa-4996-b271-318148e62416\" (UID: \"6ecac33c-30fa-4996-b271-318148e62416\") " Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.145891 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-nb-0" event={"ID":"dcbebc6e-c10d-4a59-9bdb-cdf2678250f8","Type":"ContainerStarted","Data":"bd371b5b829a5f2506757dc4a8010d15cd571ccffeeaa8564edc735f093b7de0"} Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.145993 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-nb-0" event={"ID":"dcbebc6e-c10d-4a59-9bdb-cdf2678250f8","Type":"ContainerStarted","Data":"2766083888bd587166d18906030cf31145864ecd777e795cf63b0f41b9063b9e"} Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.146055 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-nb-0" event={"ID":"dcbebc6e-c10d-4a59-9bdb-cdf2678250f8","Type":"ContainerStarted","Data":"0978343ee70408eca9387c793d63efe6988caa6500955173dd4133b2c12d94f9"} Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.146094 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6ecac33c-30fa-4996-b271-318148e62416-ovn-rundir" (OuterVolumeSpecName: "ovn-rundir") pod "6ecac33c-30fa-4996-b271-318148e62416" (UID: "6ecac33c-30fa-4996-b271-318148e62416"). InnerVolumeSpecName "ovn-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.146695 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ecac33c-30fa-4996-b271-318148e62416-scripts" (OuterVolumeSpecName: "scripts") pod "6ecac33c-30fa-4996-b271-318148e62416" (UID: "6ecac33c-30fa-4996-b271-318148e62416"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.147439 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ecac33c-30fa-4996-b271-318148e62416-config" (OuterVolumeSpecName: "config") pod "6ecac33c-30fa-4996-b271-318148e62416" (UID: "6ecac33c-30fa-4996-b271-318148e62416"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.150772 4558 reconciler_common.go:293] "Volume detached for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/6ecac33c-30fa-4996-b271-318148e62416-ovn-rundir\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.150817 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6ecac33c-30fa-4996-b271-318148e62416-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.150833 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6ecac33c-30fa-4996-b271-318148e62416-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.154385 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/neutron-9c84d7f96-k2vb8" podUID="25ac8e40-35ea-4d5b-8d8a-ee123c2e2fff" containerName="neutron-api" containerID="cri-o://dab990962bd423061467701f509eb86caef0f4aae069688ebb9c4fa1d2c455d5" gracePeriod=30 Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.154756 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/neutron-9c84d7f96-k2vb8" podUID="25ac8e40-35ea-4d5b-8d8a-ee123c2e2fff" containerName="neutron-httpd" containerID="cri-o://22c9efbba433655e1e32d8ca30bd57190a70d36b9688d02f701b38f551db7ae8" gracePeriod=30 Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.154923 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-5f4bd4844-95dpq" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.155076 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-5f4bd4844-95dpq" event={"ID":"1038c23d-3108-472d-b43c-4181fd8ac365","Type":"ContainerDied","Data":"5031de08614f2d9218eedd73dad8864c17d8257834d19d6dbe3cc1fe9b0edfb4"} Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.166323 4558 scope.go:117] "RemoveContainer" containerID="a333ee4a53687d89af384ce1c1e7dd6ab814cf37eae12fca495be4bf694c8f99" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.166435 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ecac33c-30fa-4996-b271-318148e62416-kube-api-access-tphf6" (OuterVolumeSpecName: "kube-api-access-tphf6") pod "6ecac33c-30fa-4996-b271-318148e62416" (UID: "6ecac33c-30fa-4996-b271-318148e62416"). InnerVolumeSpecName "kube-api-access-tphf6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.192193 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-sb-0"] Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.201153 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-sb-0"] Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.208913 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ecac33c-30fa-4996-b271-318148e62416-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6ecac33c-30fa-4996-b271-318148e62416" (UID: "6ecac33c-30fa-4996-b271-318148e62416"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.258466 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ovsdbserver-sb-0"] Jan 20 17:33:08 crc kubenswrapper[4558]: E0120 17:33:08.259058 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1038c23d-3108-472d-b43c-4181fd8ac365" containerName="placement-log" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.264234 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="1038c23d-3108-472d-b43c-4181fd8ac365" containerName="placement-log" Jan 20 17:33:08 crc kubenswrapper[4558]: E0120 17:33:08.264313 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="846821b5-5647-4ffa-a1bb-8695fe90cbcc" containerName="cinder-api" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.264331 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="846821b5-5647-4ffa-a1bb-8695fe90cbcc" containerName="cinder-api" Jan 20 17:33:08 crc kubenswrapper[4558]: E0120 17:33:08.264376 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6ecac33c-30fa-4996-b271-318148e62416" containerName="openstack-network-exporter" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.264384 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="6ecac33c-30fa-4996-b271-318148e62416" containerName="openstack-network-exporter" Jan 20 17:33:08 crc kubenswrapper[4558]: E0120 17:33:08.264405 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1038c23d-3108-472d-b43c-4181fd8ac365" containerName="placement-api" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.264413 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="1038c23d-3108-472d-b43c-4181fd8ac365" containerName="placement-api" Jan 20 17:33:08 crc kubenswrapper[4558]: E0120 17:33:08.264436 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="31c9af7b-b151-4eaa-b090-efecddd08c46" containerName="openstack-network-exporter" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.264442 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="31c9af7b-b151-4eaa-b090-efecddd08c46" containerName="openstack-network-exporter" Jan 20 17:33:08 crc kubenswrapper[4558]: E0120 17:33:08.264453 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="31c9af7b-b151-4eaa-b090-efecddd08c46" containerName="ovsdbserver-sb" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.264459 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="31c9af7b-b151-4eaa-b090-efecddd08c46" containerName="ovsdbserver-sb" Jan 20 17:33:08 crc kubenswrapper[4558]: E0120 17:33:08.264484 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="846821b5-5647-4ffa-a1bb-8695fe90cbcc" containerName="cinder-api-log" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.264492 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="846821b5-5647-4ffa-a1bb-8695fe90cbcc" containerName="cinder-api-log" Jan 20 17:33:08 crc kubenswrapper[4558]: E0120 17:33:08.264509 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6ecac33c-30fa-4996-b271-318148e62416" containerName="ovn-northd" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.264521 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="6ecac33c-30fa-4996-b271-318148e62416" containerName="ovn-northd" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.264954 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="31c9af7b-b151-4eaa-b090-efecddd08c46" containerName="ovsdbserver-sb" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.264979 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="1038c23d-3108-472d-b43c-4181fd8ac365" containerName="placement-api" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.264992 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="846821b5-5647-4ffa-a1bb-8695fe90cbcc" containerName="cinder-api" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.265005 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="6ecac33c-30fa-4996-b271-318148e62416" containerName="ovn-northd" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.265015 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="6ecac33c-30fa-4996-b271-318148e62416" containerName="openstack-network-exporter" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.265024 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="31c9af7b-b151-4eaa-b090-efecddd08c46" containerName="openstack-network-exporter" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.265032 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="846821b5-5647-4ffa-a1bb-8695fe90cbcc" containerName="cinder-api-log" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.265051 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="1038c23d-3108-472d-b43c-4181fd8ac365" containerName="placement-log" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.267233 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ecac33c-30fa-4996-b271-318148e62416-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.267299 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tphf6\" (UniqueName: \"kubernetes.io/projected/6ecac33c-30fa-4996-b271-318148e62416-kube-api-access-tphf6\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.272343 4558 scope.go:117] "RemoveContainer" containerID="ae89028fc849f2ea980139f852b630c0eb1659da3362f8ae021fcba4d043bb4d" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.277028 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.281035 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-sb-0"] Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.285746 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ovncluster-ovndbcluster-sb-dockercfg-g95pp" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.285818 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-ovndbcluster-sb-ovndbs" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.285937 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"ovndbcluster-sb-config" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.286553 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"ovndbcluster-sb-scripts" Jan 20 17:33:08 crc kubenswrapper[4558]: E0120 17:33:08.293330 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ae89028fc849f2ea980139f852b630c0eb1659da3362f8ae021fcba4d043bb4d\": container with ID starting with ae89028fc849f2ea980139f852b630c0eb1659da3362f8ae021fcba4d043bb4d not found: ID does not exist" containerID="ae89028fc849f2ea980139f852b630c0eb1659da3362f8ae021fcba4d043bb4d" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.293375 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ae89028fc849f2ea980139f852b630c0eb1659da3362f8ae021fcba4d043bb4d"} err="failed to get container status \"ae89028fc849f2ea980139f852b630c0eb1659da3362f8ae021fcba4d043bb4d\": rpc error: code = NotFound desc = could not find container \"ae89028fc849f2ea980139f852b630c0eb1659da3362f8ae021fcba4d043bb4d\": container with ID starting with ae89028fc849f2ea980139f852b630c0eb1659da3362f8ae021fcba4d043bb4d not found: ID does not exist" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.293405 4558 scope.go:117] "RemoveContainer" containerID="a333ee4a53687d89af384ce1c1e7dd6ab814cf37eae12fca495be4bf694c8f99" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.293830 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:33:08 crc kubenswrapper[4558]: E0120 17:33:08.300208 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a333ee4a53687d89af384ce1c1e7dd6ab814cf37eae12fca495be4bf694c8f99\": container with ID starting with a333ee4a53687d89af384ce1c1e7dd6ab814cf37eae12fca495be4bf694c8f99 not found: ID does not exist" containerID="a333ee4a53687d89af384ce1c1e7dd6ab814cf37eae12fca495be4bf694c8f99" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.300267 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a333ee4a53687d89af384ce1c1e7dd6ab814cf37eae12fca495be4bf694c8f99"} err="failed to get container status \"a333ee4a53687d89af384ce1c1e7dd6ab814cf37eae12fca495be4bf694c8f99\": rpc error: code = NotFound desc = could not find container \"a333ee4a53687d89af384ce1c1e7dd6ab814cf37eae12fca495be4bf694c8f99\": container with ID starting with a333ee4a53687d89af384ce1c1e7dd6ab814cf37eae12fca495be4bf694c8f99 not found: ID does not exist" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.300308 4558 scope.go:117] "RemoveContainer" containerID="309c8fa54d3864dc5b564b635ee338b83b4c559bc90397337a276c3ed181323e" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.310682 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.317751 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.319716 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.322204 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ovsdbserver-nb-0" podStartSLOduration=2.322190358 podStartE2EDuration="2.322190358s" podCreationTimestamp="2026-01-20 17:33:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:33:08.243845815 +0000 UTC m=+3082.004183782" watchObservedRunningTime="2026-01-20 17:33:08.322190358 +0000 UTC m=+3082.082528325" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.323421 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-cinder-public-svc" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.323466 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-cinder-internal-svc" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.323527 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cinder-api-config-data" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.332241 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.337956 4558 scope.go:117] "RemoveContainer" containerID="cddbf5e0f0aa3f8140abd8d8060e08f69d0761eab29715426c1715bdc2833585" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.353276 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/placement-5f4bd4844-95dpq"] Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.362332 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/placement-5f4bd4844-95dpq"] Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.380363 4558 scope.go:117] "RemoveContainer" containerID="a31259f069ab4363f5184768435b810c1a24f8a773760135e07b1662c647ae59" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.410441 4558 scope.go:117] "RemoveContainer" containerID="72da703d73712000127a5c91a66e96458bf2133274e58fb7bcbe402b18ccca9b" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.432280 4558 scope.go:117] "RemoveContainer" containerID="a31259f069ab4363f5184768435b810c1a24f8a773760135e07b1662c647ae59" Jan 20 17:33:08 crc kubenswrapper[4558]: E0120 17:33:08.432559 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a31259f069ab4363f5184768435b810c1a24f8a773760135e07b1662c647ae59\": container with ID starting with a31259f069ab4363f5184768435b810c1a24f8a773760135e07b1662c647ae59 not found: ID does not exist" containerID="a31259f069ab4363f5184768435b810c1a24f8a773760135e07b1662c647ae59" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.432600 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a31259f069ab4363f5184768435b810c1a24f8a773760135e07b1662c647ae59"} err="failed to get container status \"a31259f069ab4363f5184768435b810c1a24f8a773760135e07b1662c647ae59\": rpc error: code = NotFound desc = could not find container \"a31259f069ab4363f5184768435b810c1a24f8a773760135e07b1662c647ae59\": container with ID starting with a31259f069ab4363f5184768435b810c1a24f8a773760135e07b1662c647ae59 not found: ID does not exist" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.432630 4558 scope.go:117] "RemoveContainer" containerID="72da703d73712000127a5c91a66e96458bf2133274e58fb7bcbe402b18ccca9b" Jan 20 17:33:08 crc kubenswrapper[4558]: E0120 17:33:08.432943 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"72da703d73712000127a5c91a66e96458bf2133274e58fb7bcbe402b18ccca9b\": container with ID starting with 72da703d73712000127a5c91a66e96458bf2133274e58fb7bcbe402b18ccca9b not found: ID does not exist" containerID="72da703d73712000127a5c91a66e96458bf2133274e58fb7bcbe402b18ccca9b" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.432986 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"72da703d73712000127a5c91a66e96458bf2133274e58fb7bcbe402b18ccca9b"} err="failed to get container status \"72da703d73712000127a5c91a66e96458bf2133274e58fb7bcbe402b18ccca9b\": rpc error: code = NotFound desc = could not find container \"72da703d73712000127a5c91a66e96458bf2133274e58fb7bcbe402b18ccca9b\": container with ID starting with 72da703d73712000127a5c91a66e96458bf2133274e58fb7bcbe402b18ccca9b not found: ID does not exist" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.433010 4558 scope.go:117] "RemoveContainer" containerID="e2a18338fd9ab69e3f54fc92343898be5bb0e50f61066e2c11e95d851b940179" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.451812 4558 scope.go:117] "RemoveContainer" containerID="8a1e30fe4204a02290d90187641c27f6cbb6b966db67d83a76c5a54e949c72ae" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.472182 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/637234b1-5373-46ac-99ea-be390202b982-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"637234b1-5373-46ac-99ea-be390202b982\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.472227 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/75547bf4-a6ea-45b7-a6a8-ba2955995008-config-data\") pod \"cinder-api-0\" (UID: \"75547bf4-a6ea-45b7-a6a8-ba2955995008\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.472253 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"ovsdbserver-sb-0\" (UID: \"637234b1-5373-46ac-99ea-be390202b982\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.472278 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/637234b1-5373-46ac-99ea-be390202b982-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"637234b1-5373-46ac-99ea-be390202b982\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.472327 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/637234b1-5373-46ac-99ea-be390202b982-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"637234b1-5373-46ac-99ea-be390202b982\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.472367 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/637234b1-5373-46ac-99ea-be390202b982-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"637234b1-5373-46ac-99ea-be390202b982\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.472388 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-47nwq\" (UniqueName: \"kubernetes.io/projected/75547bf4-a6ea-45b7-a6a8-ba2955995008-kube-api-access-47nwq\") pod \"cinder-api-0\" (UID: \"75547bf4-a6ea-45b7-a6a8-ba2955995008\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.472425 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/75547bf4-a6ea-45b7-a6a8-ba2955995008-scripts\") pod \"cinder-api-0\" (UID: \"75547bf4-a6ea-45b7-a6a8-ba2955995008\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.472450 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/637234b1-5373-46ac-99ea-be390202b982-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"637234b1-5373-46ac-99ea-be390202b982\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.472499 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/75547bf4-a6ea-45b7-a6a8-ba2955995008-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"75547bf4-a6ea-45b7-a6a8-ba2955995008\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.472518 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/75547bf4-a6ea-45b7-a6a8-ba2955995008-logs\") pod \"cinder-api-0\" (UID: \"75547bf4-a6ea-45b7-a6a8-ba2955995008\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.472548 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/75547bf4-a6ea-45b7-a6a8-ba2955995008-etc-machine-id\") pod \"cinder-api-0\" (UID: \"75547bf4-a6ea-45b7-a6a8-ba2955995008\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.472610 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/637234b1-5373-46ac-99ea-be390202b982-config\") pod \"ovsdbserver-sb-0\" (UID: \"637234b1-5373-46ac-99ea-be390202b982\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.472673 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m9vtc\" (UniqueName: \"kubernetes.io/projected/637234b1-5373-46ac-99ea-be390202b982-kube-api-access-m9vtc\") pod \"ovsdbserver-sb-0\" (UID: \"637234b1-5373-46ac-99ea-be390202b982\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.472731 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/75547bf4-a6ea-45b7-a6a8-ba2955995008-config-data-custom\") pod \"cinder-api-0\" (UID: \"75547bf4-a6ea-45b7-a6a8-ba2955995008\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.472751 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/75547bf4-a6ea-45b7-a6a8-ba2955995008-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"75547bf4-a6ea-45b7-a6a8-ba2955995008\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.472777 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/75547bf4-a6ea-45b7-a6a8-ba2955995008-public-tls-certs\") pod \"cinder-api-0\" (UID: \"75547bf4-a6ea-45b7-a6a8-ba2955995008\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.576781 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/75547bf4-a6ea-45b7-a6a8-ba2955995008-config-data\") pod \"cinder-api-0\" (UID: \"75547bf4-a6ea-45b7-a6a8-ba2955995008\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.577154 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"ovsdbserver-sb-0\" (UID: \"637234b1-5373-46ac-99ea-be390202b982\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.577197 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/637234b1-5373-46ac-99ea-be390202b982-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"637234b1-5373-46ac-99ea-be390202b982\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.577506 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"ovsdbserver-sb-0\" (UID: \"637234b1-5373-46ac-99ea-be390202b982\") device mount path \"/mnt/openstack/pv04\"" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.580704 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/637234b1-5373-46ac-99ea-be390202b982-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"637234b1-5373-46ac-99ea-be390202b982\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.580780 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/637234b1-5373-46ac-99ea-be390202b982-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"637234b1-5373-46ac-99ea-be390202b982\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.580802 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-47nwq\" (UniqueName: \"kubernetes.io/projected/75547bf4-a6ea-45b7-a6a8-ba2955995008-kube-api-access-47nwq\") pod \"cinder-api-0\" (UID: \"75547bf4-a6ea-45b7-a6a8-ba2955995008\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.580846 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/75547bf4-a6ea-45b7-a6a8-ba2955995008-scripts\") pod \"cinder-api-0\" (UID: \"75547bf4-a6ea-45b7-a6a8-ba2955995008\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.580869 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/637234b1-5373-46ac-99ea-be390202b982-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"637234b1-5373-46ac-99ea-be390202b982\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.580907 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/75547bf4-a6ea-45b7-a6a8-ba2955995008-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"75547bf4-a6ea-45b7-a6a8-ba2955995008\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.580924 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/75547bf4-a6ea-45b7-a6a8-ba2955995008-logs\") pod \"cinder-api-0\" (UID: \"75547bf4-a6ea-45b7-a6a8-ba2955995008\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.580949 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/75547bf4-a6ea-45b7-a6a8-ba2955995008-etc-machine-id\") pod \"cinder-api-0\" (UID: \"75547bf4-a6ea-45b7-a6a8-ba2955995008\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.580998 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/637234b1-5373-46ac-99ea-be390202b982-config\") pod \"ovsdbserver-sb-0\" (UID: \"637234b1-5373-46ac-99ea-be390202b982\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.581049 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m9vtc\" (UniqueName: \"kubernetes.io/projected/637234b1-5373-46ac-99ea-be390202b982-kube-api-access-m9vtc\") pod \"ovsdbserver-sb-0\" (UID: \"637234b1-5373-46ac-99ea-be390202b982\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.581091 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/75547bf4-a6ea-45b7-a6a8-ba2955995008-config-data-custom\") pod \"cinder-api-0\" (UID: \"75547bf4-a6ea-45b7-a6a8-ba2955995008\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.581112 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/75547bf4-a6ea-45b7-a6a8-ba2955995008-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"75547bf4-a6ea-45b7-a6a8-ba2955995008\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.581136 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/75547bf4-a6ea-45b7-a6a8-ba2955995008-public-tls-certs\") pod \"cinder-api-0\" (UID: \"75547bf4-a6ea-45b7-a6a8-ba2955995008\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.581186 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/637234b1-5373-46ac-99ea-be390202b982-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"637234b1-5373-46ac-99ea-be390202b982\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.581969 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/75547bf4-a6ea-45b7-a6a8-ba2955995008-etc-machine-id\") pod \"cinder-api-0\" (UID: \"75547bf4-a6ea-45b7-a6a8-ba2955995008\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.581995 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/75547bf4-a6ea-45b7-a6a8-ba2955995008-logs\") pod \"cinder-api-0\" (UID: \"75547bf4-a6ea-45b7-a6a8-ba2955995008\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.582366 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/637234b1-5373-46ac-99ea-be390202b982-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"637234b1-5373-46ac-99ea-be390202b982\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.582645 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/637234b1-5373-46ac-99ea-be390202b982-config\") pod \"ovsdbserver-sb-0\" (UID: \"637234b1-5373-46ac-99ea-be390202b982\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.584645 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/637234b1-5373-46ac-99ea-be390202b982-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"637234b1-5373-46ac-99ea-be390202b982\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.592976 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/75547bf4-a6ea-45b7-a6a8-ba2955995008-config-data\") pod \"cinder-api-0\" (UID: \"75547bf4-a6ea-45b7-a6a8-ba2955995008\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.594934 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1038c23d-3108-472d-b43c-4181fd8ac365" path="/var/lib/kubelet/pods/1038c23d-3108-472d-b43c-4181fd8ac365/volumes" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.596320 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/637234b1-5373-46ac-99ea-be390202b982-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"637234b1-5373-46ac-99ea-be390202b982\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.597457 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/637234b1-5373-46ac-99ea-be390202b982-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"637234b1-5373-46ac-99ea-be390202b982\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.598306 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/637234b1-5373-46ac-99ea-be390202b982-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"637234b1-5373-46ac-99ea-be390202b982\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.598381 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31c9af7b-b151-4eaa-b090-efecddd08c46" path="/var/lib/kubelet/pods/31c9af7b-b151-4eaa-b090-efecddd08c46/volumes" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.599133 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="846821b5-5647-4ffa-a1bb-8695fe90cbcc" path="/var/lib/kubelet/pods/846821b5-5647-4ffa-a1bb-8695fe90cbcc/volumes" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.599639 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/75547bf4-a6ea-45b7-a6a8-ba2955995008-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"75547bf4-a6ea-45b7-a6a8-ba2955995008\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.600044 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-47nwq\" (UniqueName: \"kubernetes.io/projected/75547bf4-a6ea-45b7-a6a8-ba2955995008-kube-api-access-47nwq\") pod \"cinder-api-0\" (UID: \"75547bf4-a6ea-45b7-a6a8-ba2955995008\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.601502 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/75547bf4-a6ea-45b7-a6a8-ba2955995008-config-data-custom\") pod \"cinder-api-0\" (UID: \"75547bf4-a6ea-45b7-a6a8-ba2955995008\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.603336 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/75547bf4-a6ea-45b7-a6a8-ba2955995008-scripts\") pod \"cinder-api-0\" (UID: \"75547bf4-a6ea-45b7-a6a8-ba2955995008\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.616048 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/75547bf4-a6ea-45b7-a6a8-ba2955995008-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"75547bf4-a6ea-45b7-a6a8-ba2955995008\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.619312 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m9vtc\" (UniqueName: \"kubernetes.io/projected/637234b1-5373-46ac-99ea-be390202b982-kube-api-access-m9vtc\") pod \"ovsdbserver-sb-0\" (UID: \"637234b1-5373-46ac-99ea-be390202b982\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.620359 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/75547bf4-a6ea-45b7-a6a8-ba2955995008-public-tls-certs\") pod \"cinder-api-0\" (UID: \"75547bf4-a6ea-45b7-a6a8-ba2955995008\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.621565 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"ovsdbserver-sb-0\" (UID: \"637234b1-5373-46ac-99ea-be390202b982\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.640306 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:33:08 crc kubenswrapper[4558]: I0120 17:33:08.911943 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:33:09 crc kubenswrapper[4558]: I0120 17:33:09.091174 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:33:09 crc kubenswrapper[4558]: W0120 17:33:09.105452 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod75547bf4_a6ea_45b7_a6a8_ba2955995008.slice/crio-d22af631b8e109e08433f436c2272df13fde7b1f43934acf0b74e126442edc45 WatchSource:0}: Error finding container d22af631b8e109e08433f436c2272df13fde7b1f43934acf0b74e126442edc45: Status 404 returned error can't find the container with id d22af631b8e109e08433f436c2272df13fde7b1f43934acf0b74e126442edc45 Jan 20 17:33:09 crc kubenswrapper[4558]: I0120 17:33:09.166839 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"75547bf4-a6ea-45b7-a6a8-ba2955995008","Type":"ContainerStarted","Data":"d22af631b8e109e08433f436c2272df13fde7b1f43934acf0b74e126442edc45"} Jan 20 17:33:09 crc kubenswrapper[4558]: I0120 17:33:09.171423 4558 generic.go:334] "Generic (PLEG): container finished" podID="25ac8e40-35ea-4d5b-8d8a-ee123c2e2fff" containerID="22c9efbba433655e1e32d8ca30bd57190a70d36b9688d02f701b38f551db7ae8" exitCode=0 Jan 20 17:33:09 crc kubenswrapper[4558]: I0120 17:33:09.171484 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-9c84d7f96-k2vb8" event={"ID":"25ac8e40-35ea-4d5b-8d8a-ee123c2e2fff","Type":"ContainerDied","Data":"22c9efbba433655e1e32d8ca30bd57190a70d36b9688d02f701b38f551db7ae8"} Jan 20 17:33:09 crc kubenswrapper[4558]: I0120 17:33:09.172992 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:33:09 crc kubenswrapper[4558]: I0120 17:33:09.180533 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-5bb5cf79fd-qwwlx" event={"ID":"d48e656f-2dc6-4ecc-ba26-40661e7443be","Type":"ContainerStarted","Data":"9c81b3c994e4a29a9c09cfb4a71e0a12b59291ec0580185cdc50cbf5be163007"} Jan 20 17:33:09 crc kubenswrapper[4558]: I0120 17:33:09.180570 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-5bb5cf79fd-qwwlx" event={"ID":"d48e656f-2dc6-4ecc-ba26-40661e7443be","Type":"ContainerStarted","Data":"4ede584424fe3e5e06d54d22231ee5cfcf1fa1823f294e5bb975df95cd01501c"} Jan 20 17:33:09 crc kubenswrapper[4558]: I0120 17:33:09.180732 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/neutron-5bb5cf79fd-qwwlx" Jan 20 17:33:09 crc kubenswrapper[4558]: I0120 17:33:09.199148 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovn-northd-0"] Jan 20 17:33:09 crc kubenswrapper[4558]: I0120 17:33:09.204407 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ovn-northd-0"] Jan 20 17:33:09 crc kubenswrapper[4558]: I0120 17:33:09.275267 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/neutron-5bb5cf79fd-qwwlx" podStartSLOduration=3.275220778 podStartE2EDuration="3.275220778s" podCreationTimestamp="2026-01-20 17:33:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:33:09.232740662 +0000 UTC m=+3082.993078628" watchObservedRunningTime="2026-01-20 17:33:09.275220778 +0000 UTC m=+3083.035558744" Jan 20 17:33:09 crc kubenswrapper[4558]: I0120 17:33:09.281245 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ovn-northd-0"] Jan 20 17:33:09 crc kubenswrapper[4558]: I0120 17:33:09.295337 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:33:09 crc kubenswrapper[4558]: I0120 17:33:09.302223 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"ovnnorthd-config" Jan 20 17:33:09 crc kubenswrapper[4558]: I0120 17:33:09.309893 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ovnnorthd-ovnnorthd-dockercfg-dj5lx" Jan 20 17:33:09 crc kubenswrapper[4558]: I0120 17:33:09.310246 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"ovnnorthd-scripts" Jan 20 17:33:09 crc kubenswrapper[4558]: I0120 17:33:09.319448 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-ovnnorthd-ovndbs" Jan 20 17:33:09 crc kubenswrapper[4558]: I0120 17:33:09.356480 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovn-northd-0"] Jan 20 17:33:09 crc kubenswrapper[4558]: I0120 17:33:09.373206 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-5bb5cf79fd-qwwlx"] Jan 20 17:33:09 crc kubenswrapper[4558]: I0120 17:33:09.380210 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/neutron-5b4c8486cd-bh7qx"] Jan 20 17:33:09 crc kubenswrapper[4558]: I0120 17:33:09.382139 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-5b4c8486cd-bh7qx" Jan 20 17:33:09 crc kubenswrapper[4558]: I0120 17:33:09.387158 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-neutron-public-svc" Jan 20 17:33:09 crc kubenswrapper[4558]: I0120 17:33:09.387602 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-neutron-internal-svc" Jan 20 17:33:09 crc kubenswrapper[4558]: I0120 17:33:09.387668 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-neutron-ovndbs" Jan 20 17:33:09 crc kubenswrapper[4558]: I0120 17:33:09.405825 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-5b4c8486cd-bh7qx"] Jan 20 17:33:09 crc kubenswrapper[4558]: I0120 17:33:09.406997 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/76c1fbce-d8ff-4469-85cb-11124a82402a-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"76c1fbce-d8ff-4469-85cb-11124a82402a\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:33:09 crc kubenswrapper[4558]: I0120 17:33:09.407124 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/76c1fbce-d8ff-4469-85cb-11124a82402a-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"76c1fbce-d8ff-4469-85cb-11124a82402a\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:33:09 crc kubenswrapper[4558]: I0120 17:33:09.407241 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/76c1fbce-d8ff-4469-85cb-11124a82402a-scripts\") pod \"ovn-northd-0\" (UID: \"76c1fbce-d8ff-4469-85cb-11124a82402a\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:33:09 crc kubenswrapper[4558]: I0120 17:33:09.407328 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zsdjq\" (UniqueName: \"kubernetes.io/projected/76c1fbce-d8ff-4469-85cb-11124a82402a-kube-api-access-zsdjq\") pod \"ovn-northd-0\" (UID: \"76c1fbce-d8ff-4469-85cb-11124a82402a\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:33:09 crc kubenswrapper[4558]: I0120 17:33:09.407474 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/76c1fbce-d8ff-4469-85cb-11124a82402a-config\") pod \"ovn-northd-0\" (UID: \"76c1fbce-d8ff-4469-85cb-11124a82402a\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:33:09 crc kubenswrapper[4558]: I0120 17:33:09.407520 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/76c1fbce-d8ff-4469-85cb-11124a82402a-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"76c1fbce-d8ff-4469-85cb-11124a82402a\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:33:09 crc kubenswrapper[4558]: I0120 17:33:09.407582 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/76c1fbce-d8ff-4469-85cb-11124a82402a-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"76c1fbce-d8ff-4469-85cb-11124a82402a\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:33:09 crc kubenswrapper[4558]: I0120 17:33:09.417678 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-sb-0"] Jan 20 17:33:09 crc kubenswrapper[4558]: I0120 17:33:09.510023 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/89049a89-bdd9-4d74-bdc0-64002d4f4842-httpd-config\") pod \"neutron-5b4c8486cd-bh7qx\" (UID: \"89049a89-bdd9-4d74-bdc0-64002d4f4842\") " pod="openstack-kuttl-tests/neutron-5b4c8486cd-bh7qx" Jan 20 17:33:09 crc kubenswrapper[4558]: I0120 17:33:09.510105 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/89049a89-bdd9-4d74-bdc0-64002d4f4842-public-tls-certs\") pod \"neutron-5b4c8486cd-bh7qx\" (UID: \"89049a89-bdd9-4d74-bdc0-64002d4f4842\") " pod="openstack-kuttl-tests/neutron-5b4c8486cd-bh7qx" Jan 20 17:33:09 crc kubenswrapper[4558]: I0120 17:33:09.510192 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/76c1fbce-d8ff-4469-85cb-11124a82402a-config\") pod \"ovn-northd-0\" (UID: \"76c1fbce-d8ff-4469-85cb-11124a82402a\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:33:09 crc kubenswrapper[4558]: I0120 17:33:09.510278 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/76c1fbce-d8ff-4469-85cb-11124a82402a-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"76c1fbce-d8ff-4469-85cb-11124a82402a\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:33:09 crc kubenswrapper[4558]: I0120 17:33:09.510297 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/76c1fbce-d8ff-4469-85cb-11124a82402a-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"76c1fbce-d8ff-4469-85cb-11124a82402a\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:33:09 crc kubenswrapper[4558]: I0120 17:33:09.510316 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/89049a89-bdd9-4d74-bdc0-64002d4f4842-internal-tls-certs\") pod \"neutron-5b4c8486cd-bh7qx\" (UID: \"89049a89-bdd9-4d74-bdc0-64002d4f4842\") " pod="openstack-kuttl-tests/neutron-5b4c8486cd-bh7qx" Jan 20 17:33:09 crc kubenswrapper[4558]: I0120 17:33:09.510335 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/76c1fbce-d8ff-4469-85cb-11124a82402a-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"76c1fbce-d8ff-4469-85cb-11124a82402a\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:33:09 crc kubenswrapper[4558]: I0120 17:33:09.510396 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/76c1fbce-d8ff-4469-85cb-11124a82402a-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"76c1fbce-d8ff-4469-85cb-11124a82402a\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:33:09 crc kubenswrapper[4558]: I0120 17:33:09.510457 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d8pfp\" (UniqueName: \"kubernetes.io/projected/89049a89-bdd9-4d74-bdc0-64002d4f4842-kube-api-access-d8pfp\") pod \"neutron-5b4c8486cd-bh7qx\" (UID: \"89049a89-bdd9-4d74-bdc0-64002d4f4842\") " pod="openstack-kuttl-tests/neutron-5b4c8486cd-bh7qx" Jan 20 17:33:09 crc kubenswrapper[4558]: I0120 17:33:09.510489 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/76c1fbce-d8ff-4469-85cb-11124a82402a-scripts\") pod \"ovn-northd-0\" (UID: \"76c1fbce-d8ff-4469-85cb-11124a82402a\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:33:09 crc kubenswrapper[4558]: I0120 17:33:09.510511 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/89049a89-bdd9-4d74-bdc0-64002d4f4842-config\") pod \"neutron-5b4c8486cd-bh7qx\" (UID: \"89049a89-bdd9-4d74-bdc0-64002d4f4842\") " pod="openstack-kuttl-tests/neutron-5b4c8486cd-bh7qx" Jan 20 17:33:09 crc kubenswrapper[4558]: I0120 17:33:09.510527 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/89049a89-bdd9-4d74-bdc0-64002d4f4842-combined-ca-bundle\") pod \"neutron-5b4c8486cd-bh7qx\" (UID: \"89049a89-bdd9-4d74-bdc0-64002d4f4842\") " pod="openstack-kuttl-tests/neutron-5b4c8486cd-bh7qx" Jan 20 17:33:09 crc kubenswrapper[4558]: I0120 17:33:09.510555 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/89049a89-bdd9-4d74-bdc0-64002d4f4842-ovndb-tls-certs\") pod \"neutron-5b4c8486cd-bh7qx\" (UID: \"89049a89-bdd9-4d74-bdc0-64002d4f4842\") " pod="openstack-kuttl-tests/neutron-5b4c8486cd-bh7qx" Jan 20 17:33:09 crc kubenswrapper[4558]: I0120 17:33:09.510617 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zsdjq\" (UniqueName: \"kubernetes.io/projected/76c1fbce-d8ff-4469-85cb-11124a82402a-kube-api-access-zsdjq\") pod \"ovn-northd-0\" (UID: \"76c1fbce-d8ff-4469-85cb-11124a82402a\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:33:09 crc kubenswrapper[4558]: I0120 17:33:09.511444 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/76c1fbce-d8ff-4469-85cb-11124a82402a-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"76c1fbce-d8ff-4469-85cb-11124a82402a\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:33:09 crc kubenswrapper[4558]: I0120 17:33:09.512186 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/76c1fbce-d8ff-4469-85cb-11124a82402a-config\") pod \"ovn-northd-0\" (UID: \"76c1fbce-d8ff-4469-85cb-11124a82402a\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:33:09 crc kubenswrapper[4558]: I0120 17:33:09.512223 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/76c1fbce-d8ff-4469-85cb-11124a82402a-scripts\") pod \"ovn-northd-0\" (UID: \"76c1fbce-d8ff-4469-85cb-11124a82402a\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:33:09 crc kubenswrapper[4558]: I0120 17:33:09.517896 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/76c1fbce-d8ff-4469-85cb-11124a82402a-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"76c1fbce-d8ff-4469-85cb-11124a82402a\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:33:09 crc kubenswrapper[4558]: I0120 17:33:09.518007 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/76c1fbce-d8ff-4469-85cb-11124a82402a-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"76c1fbce-d8ff-4469-85cb-11124a82402a\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:33:09 crc kubenswrapper[4558]: I0120 17:33:09.518375 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/76c1fbce-d8ff-4469-85cb-11124a82402a-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"76c1fbce-d8ff-4469-85cb-11124a82402a\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:33:09 crc kubenswrapper[4558]: I0120 17:33:09.533993 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zsdjq\" (UniqueName: \"kubernetes.io/projected/76c1fbce-d8ff-4469-85cb-11124a82402a-kube-api-access-zsdjq\") pod \"ovn-northd-0\" (UID: \"76c1fbce-d8ff-4469-85cb-11124a82402a\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:33:09 crc kubenswrapper[4558]: I0120 17:33:09.600094 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-bootstrap-s78ts" Jan 20 17:33:09 crc kubenswrapper[4558]: I0120 17:33:09.612473 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/89049a89-bdd9-4d74-bdc0-64002d4f4842-config\") pod \"neutron-5b4c8486cd-bh7qx\" (UID: \"89049a89-bdd9-4d74-bdc0-64002d4f4842\") " pod="openstack-kuttl-tests/neutron-5b4c8486cd-bh7qx" Jan 20 17:33:09 crc kubenswrapper[4558]: I0120 17:33:09.612528 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/89049a89-bdd9-4d74-bdc0-64002d4f4842-combined-ca-bundle\") pod \"neutron-5b4c8486cd-bh7qx\" (UID: \"89049a89-bdd9-4d74-bdc0-64002d4f4842\") " pod="openstack-kuttl-tests/neutron-5b4c8486cd-bh7qx" Jan 20 17:33:09 crc kubenswrapper[4558]: I0120 17:33:09.612597 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/89049a89-bdd9-4d74-bdc0-64002d4f4842-ovndb-tls-certs\") pod \"neutron-5b4c8486cd-bh7qx\" (UID: \"89049a89-bdd9-4d74-bdc0-64002d4f4842\") " pod="openstack-kuttl-tests/neutron-5b4c8486cd-bh7qx" Jan 20 17:33:09 crc kubenswrapper[4558]: I0120 17:33:09.612694 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/89049a89-bdd9-4d74-bdc0-64002d4f4842-httpd-config\") pod \"neutron-5b4c8486cd-bh7qx\" (UID: \"89049a89-bdd9-4d74-bdc0-64002d4f4842\") " pod="openstack-kuttl-tests/neutron-5b4c8486cd-bh7qx" Jan 20 17:33:09 crc kubenswrapper[4558]: I0120 17:33:09.612751 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/89049a89-bdd9-4d74-bdc0-64002d4f4842-public-tls-certs\") pod \"neutron-5b4c8486cd-bh7qx\" (UID: \"89049a89-bdd9-4d74-bdc0-64002d4f4842\") " pod="openstack-kuttl-tests/neutron-5b4c8486cd-bh7qx" Jan 20 17:33:09 crc kubenswrapper[4558]: I0120 17:33:09.612837 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/89049a89-bdd9-4d74-bdc0-64002d4f4842-internal-tls-certs\") pod \"neutron-5b4c8486cd-bh7qx\" (UID: \"89049a89-bdd9-4d74-bdc0-64002d4f4842\") " pod="openstack-kuttl-tests/neutron-5b4c8486cd-bh7qx" Jan 20 17:33:09 crc kubenswrapper[4558]: I0120 17:33:09.613282 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d8pfp\" (UniqueName: \"kubernetes.io/projected/89049a89-bdd9-4d74-bdc0-64002d4f4842-kube-api-access-d8pfp\") pod \"neutron-5b4c8486cd-bh7qx\" (UID: \"89049a89-bdd9-4d74-bdc0-64002d4f4842\") " pod="openstack-kuttl-tests/neutron-5b4c8486cd-bh7qx" Jan 20 17:33:09 crc kubenswrapper[4558]: I0120 17:33:09.618112 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/89049a89-bdd9-4d74-bdc0-64002d4f4842-ovndb-tls-certs\") pod \"neutron-5b4c8486cd-bh7qx\" (UID: \"89049a89-bdd9-4d74-bdc0-64002d4f4842\") " pod="openstack-kuttl-tests/neutron-5b4c8486cd-bh7qx" Jan 20 17:33:09 crc kubenswrapper[4558]: I0120 17:33:09.621948 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/89049a89-bdd9-4d74-bdc0-64002d4f4842-internal-tls-certs\") pod \"neutron-5b4c8486cd-bh7qx\" (UID: \"89049a89-bdd9-4d74-bdc0-64002d4f4842\") " pod="openstack-kuttl-tests/neutron-5b4c8486cd-bh7qx" Jan 20 17:33:09 crc kubenswrapper[4558]: I0120 17:33:09.622025 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/89049a89-bdd9-4d74-bdc0-64002d4f4842-combined-ca-bundle\") pod \"neutron-5b4c8486cd-bh7qx\" (UID: \"89049a89-bdd9-4d74-bdc0-64002d4f4842\") " pod="openstack-kuttl-tests/neutron-5b4c8486cd-bh7qx" Jan 20 17:33:09 crc kubenswrapper[4558]: I0120 17:33:09.622444 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/89049a89-bdd9-4d74-bdc0-64002d4f4842-config\") pod \"neutron-5b4c8486cd-bh7qx\" (UID: \"89049a89-bdd9-4d74-bdc0-64002d4f4842\") " pod="openstack-kuttl-tests/neutron-5b4c8486cd-bh7qx" Jan 20 17:33:09 crc kubenswrapper[4558]: I0120 17:33:09.623190 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/89049a89-bdd9-4d74-bdc0-64002d4f4842-public-tls-certs\") pod \"neutron-5b4c8486cd-bh7qx\" (UID: \"89049a89-bdd9-4d74-bdc0-64002d4f4842\") " pod="openstack-kuttl-tests/neutron-5b4c8486cd-bh7qx" Jan 20 17:33:09 crc kubenswrapper[4558]: I0120 17:33:09.632334 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d8pfp\" (UniqueName: \"kubernetes.io/projected/89049a89-bdd9-4d74-bdc0-64002d4f4842-kube-api-access-d8pfp\") pod \"neutron-5b4c8486cd-bh7qx\" (UID: \"89049a89-bdd9-4d74-bdc0-64002d4f4842\") " pod="openstack-kuttl-tests/neutron-5b4c8486cd-bh7qx" Jan 20 17:33:09 crc kubenswrapper[4558]: I0120 17:33:09.644277 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/89049a89-bdd9-4d74-bdc0-64002d4f4842-httpd-config\") pod \"neutron-5b4c8486cd-bh7qx\" (UID: \"89049a89-bdd9-4d74-bdc0-64002d4f4842\") " pod="openstack-kuttl-tests/neutron-5b4c8486cd-bh7qx" Jan 20 17:33:09 crc kubenswrapper[4558]: I0120 17:33:09.653987 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:33:09 crc kubenswrapper[4558]: I0120 17:33:09.714275 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-5b4c8486cd-bh7qx" Jan 20 17:33:09 crc kubenswrapper[4558]: I0120 17:33:09.714473 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/a03b6fb7-d712-46bd-a8ed-e47bc25ecc7c-fernet-keys\") pod \"a03b6fb7-d712-46bd-a8ed-e47bc25ecc7c\" (UID: \"a03b6fb7-d712-46bd-a8ed-e47bc25ecc7c\") " Jan 20 17:33:09 crc kubenswrapper[4558]: I0120 17:33:09.714510 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a03b6fb7-d712-46bd-a8ed-e47bc25ecc7c-scripts\") pod \"a03b6fb7-d712-46bd-a8ed-e47bc25ecc7c\" (UID: \"a03b6fb7-d712-46bd-a8ed-e47bc25ecc7c\") " Jan 20 17:33:09 crc kubenswrapper[4558]: I0120 17:33:09.714681 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a03b6fb7-d712-46bd-a8ed-e47bc25ecc7c-config-data\") pod \"a03b6fb7-d712-46bd-a8ed-e47bc25ecc7c\" (UID: \"a03b6fb7-d712-46bd-a8ed-e47bc25ecc7c\") " Jan 20 17:33:09 crc kubenswrapper[4558]: I0120 17:33:09.714730 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a03b6fb7-d712-46bd-a8ed-e47bc25ecc7c-combined-ca-bundle\") pod \"a03b6fb7-d712-46bd-a8ed-e47bc25ecc7c\" (UID: \"a03b6fb7-d712-46bd-a8ed-e47bc25ecc7c\") " Jan 20 17:33:09 crc kubenswrapper[4558]: I0120 17:33:09.714763 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/a03b6fb7-d712-46bd-a8ed-e47bc25ecc7c-credential-keys\") pod \"a03b6fb7-d712-46bd-a8ed-e47bc25ecc7c\" (UID: \"a03b6fb7-d712-46bd-a8ed-e47bc25ecc7c\") " Jan 20 17:33:09 crc kubenswrapper[4558]: I0120 17:33:09.714798 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zk644\" (UniqueName: \"kubernetes.io/projected/a03b6fb7-d712-46bd-a8ed-e47bc25ecc7c-kube-api-access-zk644\") pod \"a03b6fb7-d712-46bd-a8ed-e47bc25ecc7c\" (UID: \"a03b6fb7-d712-46bd-a8ed-e47bc25ecc7c\") " Jan 20 17:33:09 crc kubenswrapper[4558]: I0120 17:33:09.720718 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a03b6fb7-d712-46bd-a8ed-e47bc25ecc7c-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "a03b6fb7-d712-46bd-a8ed-e47bc25ecc7c" (UID: "a03b6fb7-d712-46bd-a8ed-e47bc25ecc7c"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:33:09 crc kubenswrapper[4558]: I0120 17:33:09.721020 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a03b6fb7-d712-46bd-a8ed-e47bc25ecc7c-kube-api-access-zk644" (OuterVolumeSpecName: "kube-api-access-zk644") pod "a03b6fb7-d712-46bd-a8ed-e47bc25ecc7c" (UID: "a03b6fb7-d712-46bd-a8ed-e47bc25ecc7c"). InnerVolumeSpecName "kube-api-access-zk644". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:33:09 crc kubenswrapper[4558]: I0120 17:33:09.733601 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a03b6fb7-d712-46bd-a8ed-e47bc25ecc7c-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "a03b6fb7-d712-46bd-a8ed-e47bc25ecc7c" (UID: "a03b6fb7-d712-46bd-a8ed-e47bc25ecc7c"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:33:09 crc kubenswrapper[4558]: I0120 17:33:09.741678 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a03b6fb7-d712-46bd-a8ed-e47bc25ecc7c-scripts" (OuterVolumeSpecName: "scripts") pod "a03b6fb7-d712-46bd-a8ed-e47bc25ecc7c" (UID: "a03b6fb7-d712-46bd-a8ed-e47bc25ecc7c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:33:09 crc kubenswrapper[4558]: I0120 17:33:09.765250 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a03b6fb7-d712-46bd-a8ed-e47bc25ecc7c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a03b6fb7-d712-46bd-a8ed-e47bc25ecc7c" (UID: "a03b6fb7-d712-46bd-a8ed-e47bc25ecc7c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:33:09 crc kubenswrapper[4558]: I0120 17:33:09.765950 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a03b6fb7-d712-46bd-a8ed-e47bc25ecc7c-config-data" (OuterVolumeSpecName: "config-data") pod "a03b6fb7-d712-46bd-a8ed-e47bc25ecc7c" (UID: "a03b6fb7-d712-46bd-a8ed-e47bc25ecc7c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:33:09 crc kubenswrapper[4558]: I0120 17:33:09.818637 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a03b6fb7-d712-46bd-a8ed-e47bc25ecc7c-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:09 crc kubenswrapper[4558]: I0120 17:33:09.818670 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a03b6fb7-d712-46bd-a8ed-e47bc25ecc7c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:09 crc kubenswrapper[4558]: I0120 17:33:09.818684 4558 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/a03b6fb7-d712-46bd-a8ed-e47bc25ecc7c-credential-keys\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:09 crc kubenswrapper[4558]: I0120 17:33:09.818694 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zk644\" (UniqueName: \"kubernetes.io/projected/a03b6fb7-d712-46bd-a8ed-e47bc25ecc7c-kube-api-access-zk644\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:09 crc kubenswrapper[4558]: I0120 17:33:09.818704 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a03b6fb7-d712-46bd-a8ed-e47bc25ecc7c-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:09 crc kubenswrapper[4558]: I0120 17:33:09.818711 4558 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/a03b6fb7-d712-46bd-a8ed-e47bc25ecc7c-fernet-keys\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:09 crc kubenswrapper[4558]: I0120 17:33:09.828009 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:33:10 crc kubenswrapper[4558]: I0120 17:33:10.161150 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-5b4c8486cd-bh7qx"] Jan 20 17:33:10 crc kubenswrapper[4558]: I0120 17:33:10.177261 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovn-northd-0"] Jan 20 17:33:10 crc kubenswrapper[4558]: W0120 17:33:10.180579 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod89049a89_bdd9_4d74_bdc0_64002d4f4842.slice/crio-6ac1d2a9274972d0bb14c388776752fcb28b35c9e1abb126096096f4857f41f1 WatchSource:0}: Error finding container 6ac1d2a9274972d0bb14c388776752fcb28b35c9e1abb126096096f4857f41f1: Status 404 returned error can't find the container with id 6ac1d2a9274972d0bb14c388776752fcb28b35c9e1abb126096096f4857f41f1 Jan 20 17:33:10 crc kubenswrapper[4558]: W0120 17:33:10.187258 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod76c1fbce_d8ff_4469_85cb_11124a82402a.slice/crio-713cb74e1cabebfa7d1cc7a7da54294b672f46e7d0652d75be881bebd0472ef1 WatchSource:0}: Error finding container 713cb74e1cabebfa7d1cc7a7da54294b672f46e7d0652d75be881bebd0472ef1: Status 404 returned error can't find the container with id 713cb74e1cabebfa7d1cc7a7da54294b672f46e7d0652d75be881bebd0472ef1 Jan 20 17:33:10 crc kubenswrapper[4558]: I0120 17:33:10.214378 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-northd-0" event={"ID":"76c1fbce-d8ff-4469-85cb-11124a82402a","Type":"ContainerStarted","Data":"713cb74e1cabebfa7d1cc7a7da54294b672f46e7d0652d75be881bebd0472ef1"} Jan 20 17:33:10 crc kubenswrapper[4558]: I0120 17:33:10.216685 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-sb-0" event={"ID":"637234b1-5373-46ac-99ea-be390202b982","Type":"ContainerStarted","Data":"17df1b24593c3011ada27218df6bb80be57a9d60832634eea50b0c271601a4f4"} Jan 20 17:33:10 crc kubenswrapper[4558]: I0120 17:33:10.216734 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-sb-0" event={"ID":"637234b1-5373-46ac-99ea-be390202b982","Type":"ContainerStarted","Data":"93e11ff2740df41a59ff853fde3d9b9c61a53b8cc77a0513ff1cbea5527314b8"} Jan 20 17:33:10 crc kubenswrapper[4558]: I0120 17:33:10.216747 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-sb-0" event={"ID":"637234b1-5373-46ac-99ea-be390202b982","Type":"ContainerStarted","Data":"c11d825b13ee73691aceb00cb004cdfe49668dad4253bed0eb39b62e8eea7707"} Jan 20 17:33:10 crc kubenswrapper[4558]: I0120 17:33:10.219461 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-5b4c8486cd-bh7qx" event={"ID":"89049a89-bdd9-4d74-bdc0-64002d4f4842","Type":"ContainerStarted","Data":"6ac1d2a9274972d0bb14c388776752fcb28b35c9e1abb126096096f4857f41f1"} Jan 20 17:33:10 crc kubenswrapper[4558]: I0120 17:33:10.225827 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-bootstrap-s78ts" event={"ID":"a03b6fb7-d712-46bd-a8ed-e47bc25ecc7c","Type":"ContainerDied","Data":"caf3b86c536afa6f89b87145a2e56bad21ebf5126d817742dfc2556c1dbf4d10"} Jan 20 17:33:10 crc kubenswrapper[4558]: I0120 17:33:10.225866 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="caf3b86c536afa6f89b87145a2e56bad21ebf5126d817742dfc2556c1dbf4d10" Jan 20 17:33:10 crc kubenswrapper[4558]: I0120 17:33:10.226036 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-bootstrap-s78ts" Jan 20 17:33:10 crc kubenswrapper[4558]: I0120 17:33:10.228154 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"75547bf4-a6ea-45b7-a6a8-ba2955995008","Type":"ContainerStarted","Data":"37e2905fb8733b28399a61b7869ba9f115a3ee071f8f2f9d43f5d0e1ebc3969d"} Jan 20 17:33:10 crc kubenswrapper[4558]: I0120 17:33:10.239879 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ovsdbserver-sb-0" podStartSLOduration=2.239861427 podStartE2EDuration="2.239861427s" podCreationTimestamp="2026-01-20 17:33:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:33:10.23447535 +0000 UTC m=+3083.994813317" watchObservedRunningTime="2026-01-20 17:33:10.239861427 +0000 UTC m=+3084.000199395" Jan 20 17:33:10 crc kubenswrapper[4558]: I0120 17:33:10.331310 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-7fb744b5b7-vj8fh"] Jan 20 17:33:10 crc kubenswrapper[4558]: I0120 17:33:10.331659 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/keystone-7fb744b5b7-vj8fh" podUID="8516d676-2fa0-47c4-b81a-254ec50efd52" containerName="keystone-api" containerID="cri-o://5f4eff277fd4cf9e126dcb473d4bc2082809a8fcea976047692503f0bb42d441" gracePeriod=30 Jan 20 17:33:10 crc kubenswrapper[4558]: I0120 17:33:10.341531 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/keystone-7fb744b5b7-vj8fh" podUID="8516d676-2fa0-47c4-b81a-254ec50efd52" containerName="keystone-api" probeResult="failure" output="Get \"http://10.217.1.212:5000/v3\": EOF" Jan 20 17:33:10 crc kubenswrapper[4558]: I0120 17:33:10.362198 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/keystone-c7b8d4458-7ddjq"] Jan 20 17:33:10 crc kubenswrapper[4558]: E0120 17:33:10.362726 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a03b6fb7-d712-46bd-a8ed-e47bc25ecc7c" containerName="keystone-bootstrap" Jan 20 17:33:10 crc kubenswrapper[4558]: I0120 17:33:10.362738 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="a03b6fb7-d712-46bd-a8ed-e47bc25ecc7c" containerName="keystone-bootstrap" Jan 20 17:33:10 crc kubenswrapper[4558]: I0120 17:33:10.362930 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="a03b6fb7-d712-46bd-a8ed-e47bc25ecc7c" containerName="keystone-bootstrap" Jan 20 17:33:10 crc kubenswrapper[4558]: I0120 17:33:10.363671 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-c7b8d4458-7ddjq" Jan 20 17:33:10 crc kubenswrapper[4558]: I0120 17:33:10.367467 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-keystone-public-svc" Jan 20 17:33:10 crc kubenswrapper[4558]: I0120 17:33:10.367667 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-keystone-internal-svc" Jan 20 17:33:10 crc kubenswrapper[4558]: I0120 17:33:10.409237 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-c7b8d4458-7ddjq"] Jan 20 17:33:10 crc kubenswrapper[4558]: I0120 17:33:10.443253 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ff846ca3-9970-4311-915b-e2ad95aa07b8-config-data\") pod \"keystone-c7b8d4458-7ddjq\" (UID: \"ff846ca3-9970-4311-915b-e2ad95aa07b8\") " pod="openstack-kuttl-tests/keystone-c7b8d4458-7ddjq" Jan 20 17:33:10 crc kubenswrapper[4558]: I0120 17:33:10.443367 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ff846ca3-9970-4311-915b-e2ad95aa07b8-internal-tls-certs\") pod \"keystone-c7b8d4458-7ddjq\" (UID: \"ff846ca3-9970-4311-915b-e2ad95aa07b8\") " pod="openstack-kuttl-tests/keystone-c7b8d4458-7ddjq" Jan 20 17:33:10 crc kubenswrapper[4558]: I0120 17:33:10.443419 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c78m6\" (UniqueName: \"kubernetes.io/projected/ff846ca3-9970-4311-915b-e2ad95aa07b8-kube-api-access-c78m6\") pod \"keystone-c7b8d4458-7ddjq\" (UID: \"ff846ca3-9970-4311-915b-e2ad95aa07b8\") " pod="openstack-kuttl-tests/keystone-c7b8d4458-7ddjq" Jan 20 17:33:10 crc kubenswrapper[4558]: I0120 17:33:10.443495 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/ff846ca3-9970-4311-915b-e2ad95aa07b8-credential-keys\") pod \"keystone-c7b8d4458-7ddjq\" (UID: \"ff846ca3-9970-4311-915b-e2ad95aa07b8\") " pod="openstack-kuttl-tests/keystone-c7b8d4458-7ddjq" Jan 20 17:33:10 crc kubenswrapper[4558]: I0120 17:33:10.443514 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ff846ca3-9970-4311-915b-e2ad95aa07b8-scripts\") pod \"keystone-c7b8d4458-7ddjq\" (UID: \"ff846ca3-9970-4311-915b-e2ad95aa07b8\") " pod="openstack-kuttl-tests/keystone-c7b8d4458-7ddjq" Jan 20 17:33:10 crc kubenswrapper[4558]: I0120 17:33:10.443540 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ff846ca3-9970-4311-915b-e2ad95aa07b8-public-tls-certs\") pod \"keystone-c7b8d4458-7ddjq\" (UID: \"ff846ca3-9970-4311-915b-e2ad95aa07b8\") " pod="openstack-kuttl-tests/keystone-c7b8d4458-7ddjq" Jan 20 17:33:10 crc kubenswrapper[4558]: I0120 17:33:10.443559 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/ff846ca3-9970-4311-915b-e2ad95aa07b8-fernet-keys\") pod \"keystone-c7b8d4458-7ddjq\" (UID: \"ff846ca3-9970-4311-915b-e2ad95aa07b8\") " pod="openstack-kuttl-tests/keystone-c7b8d4458-7ddjq" Jan 20 17:33:10 crc kubenswrapper[4558]: I0120 17:33:10.443604 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ff846ca3-9970-4311-915b-e2ad95aa07b8-combined-ca-bundle\") pod \"keystone-c7b8d4458-7ddjq\" (UID: \"ff846ca3-9970-4311-915b-e2ad95aa07b8\") " pod="openstack-kuttl-tests/keystone-c7b8d4458-7ddjq" Jan 20 17:33:10 crc kubenswrapper[4558]: I0120 17:33:10.547002 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ff846ca3-9970-4311-915b-e2ad95aa07b8-combined-ca-bundle\") pod \"keystone-c7b8d4458-7ddjq\" (UID: \"ff846ca3-9970-4311-915b-e2ad95aa07b8\") " pod="openstack-kuttl-tests/keystone-c7b8d4458-7ddjq" Jan 20 17:33:10 crc kubenswrapper[4558]: I0120 17:33:10.547088 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ff846ca3-9970-4311-915b-e2ad95aa07b8-config-data\") pod \"keystone-c7b8d4458-7ddjq\" (UID: \"ff846ca3-9970-4311-915b-e2ad95aa07b8\") " pod="openstack-kuttl-tests/keystone-c7b8d4458-7ddjq" Jan 20 17:33:10 crc kubenswrapper[4558]: I0120 17:33:10.547150 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ff846ca3-9970-4311-915b-e2ad95aa07b8-internal-tls-certs\") pod \"keystone-c7b8d4458-7ddjq\" (UID: \"ff846ca3-9970-4311-915b-e2ad95aa07b8\") " pod="openstack-kuttl-tests/keystone-c7b8d4458-7ddjq" Jan 20 17:33:10 crc kubenswrapper[4558]: I0120 17:33:10.547233 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c78m6\" (UniqueName: \"kubernetes.io/projected/ff846ca3-9970-4311-915b-e2ad95aa07b8-kube-api-access-c78m6\") pod \"keystone-c7b8d4458-7ddjq\" (UID: \"ff846ca3-9970-4311-915b-e2ad95aa07b8\") " pod="openstack-kuttl-tests/keystone-c7b8d4458-7ddjq" Jan 20 17:33:10 crc kubenswrapper[4558]: I0120 17:33:10.547378 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ff846ca3-9970-4311-915b-e2ad95aa07b8-scripts\") pod \"keystone-c7b8d4458-7ddjq\" (UID: \"ff846ca3-9970-4311-915b-e2ad95aa07b8\") " pod="openstack-kuttl-tests/keystone-c7b8d4458-7ddjq" Jan 20 17:33:10 crc kubenswrapper[4558]: I0120 17:33:10.547399 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/ff846ca3-9970-4311-915b-e2ad95aa07b8-credential-keys\") pod \"keystone-c7b8d4458-7ddjq\" (UID: \"ff846ca3-9970-4311-915b-e2ad95aa07b8\") " pod="openstack-kuttl-tests/keystone-c7b8d4458-7ddjq" Jan 20 17:33:10 crc kubenswrapper[4558]: I0120 17:33:10.547443 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ff846ca3-9970-4311-915b-e2ad95aa07b8-public-tls-certs\") pod \"keystone-c7b8d4458-7ddjq\" (UID: \"ff846ca3-9970-4311-915b-e2ad95aa07b8\") " pod="openstack-kuttl-tests/keystone-c7b8d4458-7ddjq" Jan 20 17:33:10 crc kubenswrapper[4558]: I0120 17:33:10.547468 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/ff846ca3-9970-4311-915b-e2ad95aa07b8-fernet-keys\") pod \"keystone-c7b8d4458-7ddjq\" (UID: \"ff846ca3-9970-4311-915b-e2ad95aa07b8\") " pod="openstack-kuttl-tests/keystone-c7b8d4458-7ddjq" Jan 20 17:33:10 crc kubenswrapper[4558]: I0120 17:33:10.551372 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ff846ca3-9970-4311-915b-e2ad95aa07b8-config-data\") pod \"keystone-c7b8d4458-7ddjq\" (UID: \"ff846ca3-9970-4311-915b-e2ad95aa07b8\") " pod="openstack-kuttl-tests/keystone-c7b8d4458-7ddjq" Jan 20 17:33:10 crc kubenswrapper[4558]: I0120 17:33:10.553994 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ff846ca3-9970-4311-915b-e2ad95aa07b8-public-tls-certs\") pod \"keystone-c7b8d4458-7ddjq\" (UID: \"ff846ca3-9970-4311-915b-e2ad95aa07b8\") " pod="openstack-kuttl-tests/keystone-c7b8d4458-7ddjq" Jan 20 17:33:10 crc kubenswrapper[4558]: I0120 17:33:10.554051 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ff846ca3-9970-4311-915b-e2ad95aa07b8-internal-tls-certs\") pod \"keystone-c7b8d4458-7ddjq\" (UID: \"ff846ca3-9970-4311-915b-e2ad95aa07b8\") " pod="openstack-kuttl-tests/keystone-c7b8d4458-7ddjq" Jan 20 17:33:10 crc kubenswrapper[4558]: I0120 17:33:10.554363 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/ff846ca3-9970-4311-915b-e2ad95aa07b8-credential-keys\") pod \"keystone-c7b8d4458-7ddjq\" (UID: \"ff846ca3-9970-4311-915b-e2ad95aa07b8\") " pod="openstack-kuttl-tests/keystone-c7b8d4458-7ddjq" Jan 20 17:33:10 crc kubenswrapper[4558]: I0120 17:33:10.555212 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ff846ca3-9970-4311-915b-e2ad95aa07b8-scripts\") pod \"keystone-c7b8d4458-7ddjq\" (UID: \"ff846ca3-9970-4311-915b-e2ad95aa07b8\") " pod="openstack-kuttl-tests/keystone-c7b8d4458-7ddjq" Jan 20 17:33:10 crc kubenswrapper[4558]: I0120 17:33:10.558851 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ff846ca3-9970-4311-915b-e2ad95aa07b8-combined-ca-bundle\") pod \"keystone-c7b8d4458-7ddjq\" (UID: \"ff846ca3-9970-4311-915b-e2ad95aa07b8\") " pod="openstack-kuttl-tests/keystone-c7b8d4458-7ddjq" Jan 20 17:33:10 crc kubenswrapper[4558]: I0120 17:33:10.561300 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/ff846ca3-9970-4311-915b-e2ad95aa07b8-fernet-keys\") pod \"keystone-c7b8d4458-7ddjq\" (UID: \"ff846ca3-9970-4311-915b-e2ad95aa07b8\") " pod="openstack-kuttl-tests/keystone-c7b8d4458-7ddjq" Jan 20 17:33:10 crc kubenswrapper[4558]: I0120 17:33:10.564449 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c78m6\" (UniqueName: \"kubernetes.io/projected/ff846ca3-9970-4311-915b-e2ad95aa07b8-kube-api-access-c78m6\") pod \"keystone-c7b8d4458-7ddjq\" (UID: \"ff846ca3-9970-4311-915b-e2ad95aa07b8\") " pod="openstack-kuttl-tests/keystone-c7b8d4458-7ddjq" Jan 20 17:33:10 crc kubenswrapper[4558]: I0120 17:33:10.583923 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ecac33c-30fa-4996-b271-318148e62416" path="/var/lib/kubelet/pods/6ecac33c-30fa-4996-b271-318148e62416/volumes" Jan 20 17:33:10 crc kubenswrapper[4558]: I0120 17:33:10.694012 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-c7b8d4458-7ddjq" Jan 20 17:33:11 crc kubenswrapper[4558]: E0120 17:33:11.160687 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="46edde77e0df814c67d5b21fe72058f3eb4efd6acd4a7f68a737219cd8448b40" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:33:11 crc kubenswrapper[4558]: E0120 17:33:11.163332 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="46edde77e0df814c67d5b21fe72058f3eb4efd6acd4a7f68a737219cd8448b40" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:33:11 crc kubenswrapper[4558]: E0120 17:33:11.165018 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="46edde77e0df814c67d5b21fe72058f3eb4efd6acd4a7f68a737219cd8448b40" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:33:11 crc kubenswrapper[4558]: E0120 17:33:11.165069 4558 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack-kuttl-tests/nova-cell1-conductor-0" podUID="c686e4c2-68d0-4999-9078-90c70927d9ae" containerName="nova-cell1-conductor-conductor" Jan 20 17:33:11 crc kubenswrapper[4558]: W0120 17:33:11.237521 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podff846ca3_9970_4311_915b_e2ad95aa07b8.slice/crio-9761e5f0f23f3fda1bf98680cc96c5f153da35b0b62e442b1989dff912c77a8d WatchSource:0}: Error finding container 9761e5f0f23f3fda1bf98680cc96c5f153da35b0b62e442b1989dff912c77a8d: Status 404 returned error can't find the container with id 9761e5f0f23f3fda1bf98680cc96c5f153da35b0b62e442b1989dff912c77a8d Jan 20 17:33:11 crc kubenswrapper[4558]: I0120 17:33:11.243231 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-c7b8d4458-7ddjq"] Jan 20 17:33:11 crc kubenswrapper[4558]: I0120 17:33:11.244058 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-northd-0" event={"ID":"76c1fbce-d8ff-4469-85cb-11124a82402a","Type":"ContainerStarted","Data":"610a512ff345e04683281515fbc1b1706ebf44507e8dbe3063d14bccc948cae5"} Jan 20 17:33:11 crc kubenswrapper[4558]: I0120 17:33:11.244094 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-northd-0" event={"ID":"76c1fbce-d8ff-4469-85cb-11124a82402a","Type":"ContainerStarted","Data":"79b61ba8ff702ce7bd226d54ae1f42ddd1006b9082d80760f2a349aad7555bc4"} Jan 20 17:33:11 crc kubenswrapper[4558]: I0120 17:33:11.244903 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:33:11 crc kubenswrapper[4558]: I0120 17:33:11.258507 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-5b4c8486cd-bh7qx" event={"ID":"89049a89-bdd9-4d74-bdc0-64002d4f4842","Type":"ContainerStarted","Data":"ae7a97185cd9fbb38ff879e0bdbf67d1b5340be215aff23c59be1f3116c969cd"} Jan 20 17:33:11 crc kubenswrapper[4558]: I0120 17:33:11.258763 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-5b4c8486cd-bh7qx" event={"ID":"89049a89-bdd9-4d74-bdc0-64002d4f4842","Type":"ContainerStarted","Data":"ea1f141ab42d1923c6ad40bf5bb7a415db52b6566887f2c7238c3eb688cf92ea"} Jan 20 17:33:11 crc kubenswrapper[4558]: I0120 17:33:11.261259 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/neutron-5b4c8486cd-bh7qx" Jan 20 17:33:11 crc kubenswrapper[4558]: I0120 17:33:11.261312 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"75547bf4-a6ea-45b7-a6a8-ba2955995008","Type":"ContainerStarted","Data":"828468f6f165adb235da38999ede843b78146540c25db7da9e9afb21516374a2"} Jan 20 17:33:11 crc kubenswrapper[4558]: I0120 17:33:11.263346 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/neutron-5bb5cf79fd-qwwlx" podUID="d48e656f-2dc6-4ecc-ba26-40661e7443be" containerName="neutron-api" containerID="cri-o://4ede584424fe3e5e06d54d22231ee5cfcf1fa1823f294e5bb975df95cd01501c" gracePeriod=30 Jan 20 17:33:11 crc kubenswrapper[4558]: I0120 17:33:11.263563 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/neutron-5bb5cf79fd-qwwlx" podUID="d48e656f-2dc6-4ecc-ba26-40661e7443be" containerName="neutron-httpd" containerID="cri-o://9c81b3c994e4a29a9c09cfb4a71e0a12b59291ec0580185cdc50cbf5be163007" gracePeriod=30 Jan 20 17:33:11 crc kubenswrapper[4558]: I0120 17:33:11.292574 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ovn-northd-0" podStartSLOduration=2.292552941 podStartE2EDuration="2.292552941s" podCreationTimestamp="2026-01-20 17:33:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:33:11.27238037 +0000 UTC m=+3085.032718338" watchObservedRunningTime="2026-01-20 17:33:11.292552941 +0000 UTC m=+3085.052890908" Jan 20 17:33:11 crc kubenswrapper[4558]: I0120 17:33:11.315784 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/neutron-5b4c8486cd-bh7qx" podStartSLOduration=2.315762984 podStartE2EDuration="2.315762984s" podCreationTimestamp="2026-01-20 17:33:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:33:11.29718437 +0000 UTC m=+3085.057522337" watchObservedRunningTime="2026-01-20 17:33:11.315762984 +0000 UTC m=+3085.076100951" Jan 20 17:33:11 crc kubenswrapper[4558]: I0120 17:33:11.381258 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/cinder-api-0" podStartSLOduration=3.381235533 podStartE2EDuration="3.381235533s" podCreationTimestamp="2026-01-20 17:33:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:33:11.34091667 +0000 UTC m=+3085.101254638" watchObservedRunningTime="2026-01-20 17:33:11.381235533 +0000 UTC m=+3085.141573500" Jan 20 17:33:11 crc kubenswrapper[4558]: I0120 17:33:11.827980 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:33:11 crc kubenswrapper[4558]: I0120 17:33:11.912193 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:33:12 crc kubenswrapper[4558]: I0120 17:33:12.136525 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_neutron-7c4c8fc458-mv7sx_3a9cf38d-4b8b-453b-bc39-b97deab68b72/neutron-api/0.log" Jan 20 17:33:12 crc kubenswrapper[4558]: I0120 17:33:12.136624 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-7c4c8fc458-mv7sx" Jan 20 17:33:12 crc kubenswrapper[4558]: I0120 17:33:12.195033 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:33:12 crc kubenswrapper[4558]: I0120 17:33:12.195355 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:33:12 crc kubenswrapper[4558]: I0120 17:33:12.195511 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:33:12 crc kubenswrapper[4558]: I0120 17:33:12.210383 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/3a9cf38d-4b8b-453b-bc39-b97deab68b72-config\") pod \"3a9cf38d-4b8b-453b-bc39-b97deab68b72\" (UID: \"3a9cf38d-4b8b-453b-bc39-b97deab68b72\") " Jan 20 17:33:12 crc kubenswrapper[4558]: I0120 17:33:12.210523 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mtl24\" (UniqueName: \"kubernetes.io/projected/3a9cf38d-4b8b-453b-bc39-b97deab68b72-kube-api-access-mtl24\") pod \"3a9cf38d-4b8b-453b-bc39-b97deab68b72\" (UID: \"3a9cf38d-4b8b-453b-bc39-b97deab68b72\") " Jan 20 17:33:12 crc kubenswrapper[4558]: I0120 17:33:12.210551 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/3a9cf38d-4b8b-453b-bc39-b97deab68b72-httpd-config\") pod \"3a9cf38d-4b8b-453b-bc39-b97deab68b72\" (UID: \"3a9cf38d-4b8b-453b-bc39-b97deab68b72\") " Jan 20 17:33:12 crc kubenswrapper[4558]: I0120 17:33:12.210577 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3a9cf38d-4b8b-453b-bc39-b97deab68b72-combined-ca-bundle\") pod \"3a9cf38d-4b8b-453b-bc39-b97deab68b72\" (UID: \"3a9cf38d-4b8b-453b-bc39-b97deab68b72\") " Jan 20 17:33:12 crc kubenswrapper[4558]: I0120 17:33:12.220295 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3a9cf38d-4b8b-453b-bc39-b97deab68b72-kube-api-access-mtl24" (OuterVolumeSpecName: "kube-api-access-mtl24") pod "3a9cf38d-4b8b-453b-bc39-b97deab68b72" (UID: "3a9cf38d-4b8b-453b-bc39-b97deab68b72"). InnerVolumeSpecName "kube-api-access-mtl24". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:33:12 crc kubenswrapper[4558]: I0120 17:33:12.223386 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3a9cf38d-4b8b-453b-bc39-b97deab68b72-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "3a9cf38d-4b8b-453b-bc39-b97deab68b72" (UID: "3a9cf38d-4b8b-453b-bc39-b97deab68b72"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:33:12 crc kubenswrapper[4558]: I0120 17:33:12.233442 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:33:12 crc kubenswrapper[4558]: I0120 17:33:12.272963 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-c7b8d4458-7ddjq" event={"ID":"ff846ca3-9970-4311-915b-e2ad95aa07b8","Type":"ContainerStarted","Data":"82d446802d7e8d621542fbedc2f7511c1da11e01b6b157d8ef90209c4965a1a1"} Jan 20 17:33:12 crc kubenswrapper[4558]: I0120 17:33:12.273007 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-c7b8d4458-7ddjq" event={"ID":"ff846ca3-9970-4311-915b-e2ad95aa07b8","Type":"ContainerStarted","Data":"9761e5f0f23f3fda1bf98680cc96c5f153da35b0b62e442b1989dff912c77a8d"} Jan 20 17:33:12 crc kubenswrapper[4558]: I0120 17:33:12.273594 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3a9cf38d-4b8b-453b-bc39-b97deab68b72-config" (OuterVolumeSpecName: "config") pod "3a9cf38d-4b8b-453b-bc39-b97deab68b72" (UID: "3a9cf38d-4b8b-453b-bc39-b97deab68b72"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:33:12 crc kubenswrapper[4558]: I0120 17:33:12.273663 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/keystone-c7b8d4458-7ddjq" Jan 20 17:33:12 crc kubenswrapper[4558]: I0120 17:33:12.275590 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_neutron-7c4c8fc458-mv7sx_3a9cf38d-4b8b-453b-bc39-b97deab68b72/neutron-api/0.log" Jan 20 17:33:12 crc kubenswrapper[4558]: I0120 17:33:12.275640 4558 generic.go:334] "Generic (PLEG): container finished" podID="3a9cf38d-4b8b-453b-bc39-b97deab68b72" containerID="76e12ebe02e4aa0edbf97593983bce1f44046a97fbfe38a1b74fd48dd726a1c5" exitCode=1 Jan 20 17:33:12 crc kubenswrapper[4558]: I0120 17:33:12.276377 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-7c4c8fc458-mv7sx" event={"ID":"3a9cf38d-4b8b-453b-bc39-b97deab68b72","Type":"ContainerDied","Data":"76e12ebe02e4aa0edbf97593983bce1f44046a97fbfe38a1b74fd48dd726a1c5"} Jan 20 17:33:12 crc kubenswrapper[4558]: I0120 17:33:12.276419 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-7c4c8fc458-mv7sx" event={"ID":"3a9cf38d-4b8b-453b-bc39-b97deab68b72","Type":"ContainerDied","Data":"1e5b7fdafd6b5dfe7da064308a77b7e92ccf6f49d26523f64fcc883be1040d57"} Jan 20 17:33:12 crc kubenswrapper[4558]: I0120 17:33:12.276439 4558 scope.go:117] "RemoveContainer" containerID="6ed4c26e0b379eaa1189608d6dbca318d8f1ceda6ae39f733e1b04e40c4cbb57" Jan 20 17:33:12 crc kubenswrapper[4558]: I0120 17:33:12.276514 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-7c4c8fc458-mv7sx" Jan 20 17:33:12 crc kubenswrapper[4558]: I0120 17:33:12.276769 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:33:12 crc kubenswrapper[4558]: I0120 17:33:12.276834 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:33:12 crc kubenswrapper[4558]: I0120 17:33:12.281864 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:33:12 crc kubenswrapper[4558]: I0120 17:33:12.292929 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/keystone-c7b8d4458-7ddjq" podStartSLOduration=2.292920148 podStartE2EDuration="2.292920148s" podCreationTimestamp="2026-01-20 17:33:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:33:12.2867993 +0000 UTC m=+3086.047137266" watchObservedRunningTime="2026-01-20 17:33:12.292920148 +0000 UTC m=+3086.053258106" Jan 20 17:33:12 crc kubenswrapper[4558]: I0120 17:33:12.302425 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3a9cf38d-4b8b-453b-bc39-b97deab68b72-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3a9cf38d-4b8b-453b-bc39-b97deab68b72" (UID: "3a9cf38d-4b8b-453b-bc39-b97deab68b72"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:33:12 crc kubenswrapper[4558]: I0120 17:33:12.320630 4558 scope.go:117] "RemoveContainer" containerID="76e12ebe02e4aa0edbf97593983bce1f44046a97fbfe38a1b74fd48dd726a1c5" Jan 20 17:33:12 crc kubenswrapper[4558]: I0120 17:33:12.324648 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mtl24\" (UniqueName: \"kubernetes.io/projected/3a9cf38d-4b8b-453b-bc39-b97deab68b72-kube-api-access-mtl24\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:12 crc kubenswrapper[4558]: I0120 17:33:12.324680 4558 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/3a9cf38d-4b8b-453b-bc39-b97deab68b72-httpd-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:12 crc kubenswrapper[4558]: I0120 17:33:12.324691 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3a9cf38d-4b8b-453b-bc39-b97deab68b72-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:12 crc kubenswrapper[4558]: I0120 17:33:12.324702 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/3a9cf38d-4b8b-453b-bc39-b97deab68b72-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:12 crc kubenswrapper[4558]: I0120 17:33:12.363923 4558 scope.go:117] "RemoveContainer" containerID="6ed4c26e0b379eaa1189608d6dbca318d8f1ceda6ae39f733e1b04e40c4cbb57" Jan 20 17:33:12 crc kubenswrapper[4558]: E0120 17:33:12.364468 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6ed4c26e0b379eaa1189608d6dbca318d8f1ceda6ae39f733e1b04e40c4cbb57\": container with ID starting with 6ed4c26e0b379eaa1189608d6dbca318d8f1ceda6ae39f733e1b04e40c4cbb57 not found: ID does not exist" containerID="6ed4c26e0b379eaa1189608d6dbca318d8f1ceda6ae39f733e1b04e40c4cbb57" Jan 20 17:33:12 crc kubenswrapper[4558]: I0120 17:33:12.364499 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6ed4c26e0b379eaa1189608d6dbca318d8f1ceda6ae39f733e1b04e40c4cbb57"} err="failed to get container status \"6ed4c26e0b379eaa1189608d6dbca318d8f1ceda6ae39f733e1b04e40c4cbb57\": rpc error: code = NotFound desc = could not find container \"6ed4c26e0b379eaa1189608d6dbca318d8f1ceda6ae39f733e1b04e40c4cbb57\": container with ID starting with 6ed4c26e0b379eaa1189608d6dbca318d8f1ceda6ae39f733e1b04e40c4cbb57 not found: ID does not exist" Jan 20 17:33:12 crc kubenswrapper[4558]: I0120 17:33:12.364520 4558 scope.go:117] "RemoveContainer" containerID="76e12ebe02e4aa0edbf97593983bce1f44046a97fbfe38a1b74fd48dd726a1c5" Jan 20 17:33:12 crc kubenswrapper[4558]: E0120 17:33:12.365364 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"76e12ebe02e4aa0edbf97593983bce1f44046a97fbfe38a1b74fd48dd726a1c5\": container with ID starting with 76e12ebe02e4aa0edbf97593983bce1f44046a97fbfe38a1b74fd48dd726a1c5 not found: ID does not exist" containerID="76e12ebe02e4aa0edbf97593983bce1f44046a97fbfe38a1b74fd48dd726a1c5" Jan 20 17:33:12 crc kubenswrapper[4558]: I0120 17:33:12.365392 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"76e12ebe02e4aa0edbf97593983bce1f44046a97fbfe38a1b74fd48dd726a1c5"} err="failed to get container status \"76e12ebe02e4aa0edbf97593983bce1f44046a97fbfe38a1b74fd48dd726a1c5\": rpc error: code = NotFound desc = could not find container \"76e12ebe02e4aa0edbf97593983bce1f44046a97fbfe38a1b74fd48dd726a1c5\": container with ID starting with 76e12ebe02e4aa0edbf97593983bce1f44046a97fbfe38a1b74fd48dd726a1c5 not found: ID does not exist" Jan 20 17:33:12 crc kubenswrapper[4558]: I0120 17:33:12.618727 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-7c4c8fc458-mv7sx"] Jan 20 17:33:12 crc kubenswrapper[4558]: I0120 17:33:12.626920 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/neutron-7c4c8fc458-mv7sx"] Jan 20 17:33:12 crc kubenswrapper[4558]: I0120 17:33:12.846336 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:33:12 crc kubenswrapper[4558]: I0120 17:33:12.846376 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:33:12 crc kubenswrapper[4558]: I0120 17:33:12.854155 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_neutron-9c84d7f96-k2vb8_25ac8e40-35ea-4d5b-8d8a-ee123c2e2fff/neutron-api/0.log" Jan 20 17:33:12 crc kubenswrapper[4558]: I0120 17:33:12.854221 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-9c84d7f96-k2vb8" Jan 20 17:33:12 crc kubenswrapper[4558]: I0120 17:33:12.864736 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:33:12 crc kubenswrapper[4558]: I0120 17:33:12.903221 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:33:12 crc kubenswrapper[4558]: I0120 17:33:12.905186 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:33:12 crc kubenswrapper[4558]: I0120 17:33:12.908474 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:33:12 crc kubenswrapper[4558]: I0120 17:33:12.940617 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pfd22\" (UniqueName: \"kubernetes.io/projected/25ac8e40-35ea-4d5b-8d8a-ee123c2e2fff-kube-api-access-pfd22\") pod \"25ac8e40-35ea-4d5b-8d8a-ee123c2e2fff\" (UID: \"25ac8e40-35ea-4d5b-8d8a-ee123c2e2fff\") " Jan 20 17:33:12 crc kubenswrapper[4558]: I0120 17:33:12.940702 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/25ac8e40-35ea-4d5b-8d8a-ee123c2e2fff-combined-ca-bundle\") pod \"25ac8e40-35ea-4d5b-8d8a-ee123c2e2fff\" (UID: \"25ac8e40-35ea-4d5b-8d8a-ee123c2e2fff\") " Jan 20 17:33:12 crc kubenswrapper[4558]: I0120 17:33:12.940887 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/25ac8e40-35ea-4d5b-8d8a-ee123c2e2fff-config\") pod \"25ac8e40-35ea-4d5b-8d8a-ee123c2e2fff\" (UID: \"25ac8e40-35ea-4d5b-8d8a-ee123c2e2fff\") " Jan 20 17:33:12 crc kubenswrapper[4558]: I0120 17:33:12.941058 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/25ac8e40-35ea-4d5b-8d8a-ee123c2e2fff-httpd-config\") pod \"25ac8e40-35ea-4d5b-8d8a-ee123c2e2fff\" (UID: \"25ac8e40-35ea-4d5b-8d8a-ee123c2e2fff\") " Jan 20 17:33:12 crc kubenswrapper[4558]: I0120 17:33:12.950206 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25ac8e40-35ea-4d5b-8d8a-ee123c2e2fff-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "25ac8e40-35ea-4d5b-8d8a-ee123c2e2fff" (UID: "25ac8e40-35ea-4d5b-8d8a-ee123c2e2fff"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:33:13 crc kubenswrapper[4558]: I0120 17:33:13.001315 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25ac8e40-35ea-4d5b-8d8a-ee123c2e2fff-kube-api-access-pfd22" (OuterVolumeSpecName: "kube-api-access-pfd22") pod "25ac8e40-35ea-4d5b-8d8a-ee123c2e2fff" (UID: "25ac8e40-35ea-4d5b-8d8a-ee123c2e2fff"). InnerVolumeSpecName "kube-api-access-pfd22". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:33:13 crc kubenswrapper[4558]: I0120 17:33:13.027986 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25ac8e40-35ea-4d5b-8d8a-ee123c2e2fff-config" (OuterVolumeSpecName: "config") pod "25ac8e40-35ea-4d5b-8d8a-ee123c2e2fff" (UID: "25ac8e40-35ea-4d5b-8d8a-ee123c2e2fff"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:33:13 crc kubenswrapper[4558]: I0120 17:33:13.044823 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25ac8e40-35ea-4d5b-8d8a-ee123c2e2fff-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "25ac8e40-35ea-4d5b-8d8a-ee123c2e2fff" (UID: "25ac8e40-35ea-4d5b-8d8a-ee123c2e2fff"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:33:13 crc kubenswrapper[4558]: I0120 17:33:13.047005 4558 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/25ac8e40-35ea-4d5b-8d8a-ee123c2e2fff-httpd-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:13 crc kubenswrapper[4558]: I0120 17:33:13.047069 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pfd22\" (UniqueName: \"kubernetes.io/projected/25ac8e40-35ea-4d5b-8d8a-ee123c2e2fff-kube-api-access-pfd22\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:13 crc kubenswrapper[4558]: I0120 17:33:13.047093 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/25ac8e40-35ea-4d5b-8d8a-ee123c2e2fff-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:13 crc kubenswrapper[4558]: I0120 17:33:13.047107 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/25ac8e40-35ea-4d5b-8d8a-ee123c2e2fff-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:13 crc kubenswrapper[4558]: I0120 17:33:13.100157 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:33:13 crc kubenswrapper[4558]: I0120 17:33:13.100434 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:33:13 crc kubenswrapper[4558]: I0120 17:33:13.128872 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:33:13 crc kubenswrapper[4558]: I0120 17:33:13.133107 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:33:13 crc kubenswrapper[4558]: I0120 17:33:13.203044 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/barbican-api-b7d7d464-2s2ds" Jan 20 17:33:13 crc kubenswrapper[4558]: I0120 17:33:13.258309 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:33:13 crc kubenswrapper[4558]: I0120 17:33:13.259847 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:33:13 crc kubenswrapper[4558]: I0120 17:33:13.260027 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:33:13 crc kubenswrapper[4558]: I0120 17:33:13.303453 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_neutron-9c84d7f96-k2vb8_25ac8e40-35ea-4d5b-8d8a-ee123c2e2fff/neutron-api/0.log" Jan 20 17:33:13 crc kubenswrapper[4558]: I0120 17:33:13.303505 4558 generic.go:334] "Generic (PLEG): container finished" podID="25ac8e40-35ea-4d5b-8d8a-ee123c2e2fff" containerID="dab990962bd423061467701f509eb86caef0f4aae069688ebb9c4fa1d2c455d5" exitCode=1 Jan 20 17:33:13 crc kubenswrapper[4558]: I0120 17:33:13.303564 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-9c84d7f96-k2vb8" event={"ID":"25ac8e40-35ea-4d5b-8d8a-ee123c2e2fff","Type":"ContainerDied","Data":"dab990962bd423061467701f509eb86caef0f4aae069688ebb9c4fa1d2c455d5"} Jan 20 17:33:13 crc kubenswrapper[4558]: I0120 17:33:13.303590 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-9c84d7f96-k2vb8" event={"ID":"25ac8e40-35ea-4d5b-8d8a-ee123c2e2fff","Type":"ContainerDied","Data":"03663a328f32678b585838e982e6923562fdcb85fff77f09aaaf3c9fd37d3f88"} Jan 20 17:33:13 crc kubenswrapper[4558]: I0120 17:33:13.303610 4558 scope.go:117] "RemoveContainer" containerID="22c9efbba433655e1e32d8ca30bd57190a70d36b9688d02f701b38f551db7ae8" Jan 20 17:33:13 crc kubenswrapper[4558]: I0120 17:33:13.303769 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-9c84d7f96-k2vb8" Jan 20 17:33:13 crc kubenswrapper[4558]: I0120 17:33:13.317009 4558 generic.go:334] "Generic (PLEG): container finished" podID="d48e656f-2dc6-4ecc-ba26-40661e7443be" containerID="9c81b3c994e4a29a9c09cfb4a71e0a12b59291ec0580185cdc50cbf5be163007" exitCode=0 Jan 20 17:33:13 crc kubenswrapper[4558]: I0120 17:33:13.317216 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-5bb5cf79fd-qwwlx" event={"ID":"d48e656f-2dc6-4ecc-ba26-40661e7443be","Type":"ContainerDied","Data":"9c81b3c994e4a29a9c09cfb4a71e0a12b59291ec0580185cdc50cbf5be163007"} Jan 20 17:33:13 crc kubenswrapper[4558]: I0120 17:33:13.322038 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:33:13 crc kubenswrapper[4558]: I0120 17:33:13.322079 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:33:13 crc kubenswrapper[4558]: I0120 17:33:13.322093 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:33:13 crc kubenswrapper[4558]: I0120 17:33:13.322101 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:33:13 crc kubenswrapper[4558]: I0120 17:33:13.323439 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:33:13 crc kubenswrapper[4558]: I0120 17:33:13.336681 4558 scope.go:117] "RemoveContainer" containerID="dab990962bd423061467701f509eb86caef0f4aae069688ebb9c4fa1d2c455d5" Jan 20 17:33:13 crc kubenswrapper[4558]: I0120 17:33:13.340755 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-9c84d7f96-k2vb8"] Jan 20 17:33:13 crc kubenswrapper[4558]: I0120 17:33:13.353249 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/neutron-9c84d7f96-k2vb8"] Jan 20 17:33:13 crc kubenswrapper[4558]: I0120 17:33:13.360316 4558 scope.go:117] "RemoveContainer" containerID="22c9efbba433655e1e32d8ca30bd57190a70d36b9688d02f701b38f551db7ae8" Jan 20 17:33:13 crc kubenswrapper[4558]: E0120 17:33:13.360860 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"22c9efbba433655e1e32d8ca30bd57190a70d36b9688d02f701b38f551db7ae8\": container with ID starting with 22c9efbba433655e1e32d8ca30bd57190a70d36b9688d02f701b38f551db7ae8 not found: ID does not exist" containerID="22c9efbba433655e1e32d8ca30bd57190a70d36b9688d02f701b38f551db7ae8" Jan 20 17:33:13 crc kubenswrapper[4558]: I0120 17:33:13.360947 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"22c9efbba433655e1e32d8ca30bd57190a70d36b9688d02f701b38f551db7ae8"} err="failed to get container status \"22c9efbba433655e1e32d8ca30bd57190a70d36b9688d02f701b38f551db7ae8\": rpc error: code = NotFound desc = could not find container \"22c9efbba433655e1e32d8ca30bd57190a70d36b9688d02f701b38f551db7ae8\": container with ID starting with 22c9efbba433655e1e32d8ca30bd57190a70d36b9688d02f701b38f551db7ae8 not found: ID does not exist" Jan 20 17:33:13 crc kubenswrapper[4558]: I0120 17:33:13.361020 4558 scope.go:117] "RemoveContainer" containerID="dab990962bd423061467701f509eb86caef0f4aae069688ebb9c4fa1d2c455d5" Jan 20 17:33:13 crc kubenswrapper[4558]: E0120 17:33:13.361419 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dab990962bd423061467701f509eb86caef0f4aae069688ebb9c4fa1d2c455d5\": container with ID starting with dab990962bd423061467701f509eb86caef0f4aae069688ebb9c4fa1d2c455d5 not found: ID does not exist" containerID="dab990962bd423061467701f509eb86caef0f4aae069688ebb9c4fa1d2c455d5" Jan 20 17:33:13 crc kubenswrapper[4558]: I0120 17:33:13.361520 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dab990962bd423061467701f509eb86caef0f4aae069688ebb9c4fa1d2c455d5"} err="failed to get container status \"dab990962bd423061467701f509eb86caef0f4aae069688ebb9c4fa1d2c455d5\": rpc error: code = NotFound desc = could not find container \"dab990962bd423061467701f509eb86caef0f4aae069688ebb9c4fa1d2c455d5\": container with ID starting with dab990962bd423061467701f509eb86caef0f4aae069688ebb9c4fa1d2c455d5 not found: ID does not exist" Jan 20 17:33:13 crc kubenswrapper[4558]: I0120 17:33:13.371019 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/barbican-api-b7d7d464-2s2ds" Jan 20 17:33:13 crc kubenswrapper[4558]: I0120 17:33:13.444135 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-api-6d8c858488-xrdwn"] Jan 20 17:33:13 crc kubenswrapper[4558]: I0120 17:33:13.444461 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-api-6d8c858488-xrdwn" podUID="d0062240-1ed7-488f-ac15-d3c5e0812ccd" containerName="barbican-api-log" containerID="cri-o://c0fe7edacfd45633530e170a5c39c2fd85b8ee4e9a397ea2a78b53cb55d8f966" gracePeriod=30 Jan 20 17:33:13 crc kubenswrapper[4558]: I0120 17:33:13.444552 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-api-6d8c858488-xrdwn" podUID="d0062240-1ed7-488f-ac15-d3c5e0812ccd" containerName="barbican-api" containerID="cri-o://f8f2ef208173ae07911e14421c874575ec9ac47e3161c054e5cda79b8cc76030" gracePeriod=30 Jan 20 17:33:13 crc kubenswrapper[4558]: I0120 17:33:13.617646 4558 scope.go:117] "RemoveContainer" containerID="08372e554acc972018371c794d0195d55e9ca9f63ea522358caad97d33501495" Jan 20 17:33:13 crc kubenswrapper[4558]: I0120 17:33:13.669099 4558 scope.go:117] "RemoveContainer" containerID="fa220b08cefc27ae29c6a4562f29cca0ef746f81fe72e0cd26f3b1e6b453f18e" Jan 20 17:33:13 crc kubenswrapper[4558]: I0120 17:33:13.697381 4558 scope.go:117] "RemoveContainer" containerID="dcea6d9bce8f8b365b1e3907b2409a3e2ed23260cd8dc669504ca51f06ab237e" Jan 20 17:33:13 crc kubenswrapper[4558]: I0120 17:33:13.760965 4558 scope.go:117] "RemoveContainer" containerID="d20cdc2ad9b52842bb98fbc066110f969938fc19f05488867c07cc1ed5ad8486" Jan 20 17:33:13 crc kubenswrapper[4558]: I0120 17:33:13.795854 4558 scope.go:117] "RemoveContainer" containerID="12bf0dfcd5e93ae240822317488ad957dc71e08560168ddf9dbb00c2223c68b3" Jan 20 17:33:13 crc kubenswrapper[4558]: I0120 17:33:13.864666 4558 scope.go:117] "RemoveContainer" containerID="a9c0c1143b3b33cc02b475f942743f25949d7a8e463523453117829d18bc66f6" Jan 20 17:33:13 crc kubenswrapper[4558]: I0120 17:33:13.912659 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:33:13 crc kubenswrapper[4558]: I0120 17:33:13.932076 4558 scope.go:117] "RemoveContainer" containerID="5c49ce71384981a8c60295f3e437aa3231d0f9a9b4690345fd2a2732983bcc39" Jan 20 17:33:13 crc kubenswrapper[4558]: I0120 17:33:13.963219 4558 scope.go:117] "RemoveContainer" containerID="ba1f5590ea277f251775376a8988a4f0e9b00277af3bcaf111020b2f5cccdc43" Jan 20 17:33:14 crc kubenswrapper[4558]: I0120 17:33:14.016099 4558 scope.go:117] "RemoveContainer" containerID="e2e1e1cb6e8a72f68287eff8a5396b012d963913a047795276d785bba4b6a44c" Jan 20 17:33:14 crc kubenswrapper[4558]: I0120 17:33:14.037180 4558 scope.go:117] "RemoveContainer" containerID="7e009ea8664346031f4001d328d14e88633993579983342f57172300cb2e9d67" Jan 20 17:33:14 crc kubenswrapper[4558]: I0120 17:33:14.068424 4558 scope.go:117] "RemoveContainer" containerID="ea8201fc9fae9612c7677cb7b1dbff6d7938a87582e7ecc4e74f7875b23122ec" Jan 20 17:33:14 crc kubenswrapper[4558]: I0120 17:33:14.092616 4558 scope.go:117] "RemoveContainer" containerID="8657c2c036a43ada6b1b0d8548a2af3d55c5d81b571839269cbc4e19ec5c4f82" Jan 20 17:33:14 crc kubenswrapper[4558]: I0120 17:33:14.134454 4558 scope.go:117] "RemoveContainer" containerID="364d7369bafd0402a20a7f9c1276504839740cce0e4e93df229f18382a27c891" Jan 20 17:33:14 crc kubenswrapper[4558]: I0120 17:33:14.157956 4558 scope.go:117] "RemoveContainer" containerID="46211dbcac65c4f79d8afc41ddb2ceb06935611e997fd6f57f19214db5ea0343" Jan 20 17:33:14 crc kubenswrapper[4558]: I0120 17:33:14.357026 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_neutron-5bb5cf79fd-qwwlx_d48e656f-2dc6-4ecc-ba26-40661e7443be/neutron-api/0.log" Jan 20 17:33:14 crc kubenswrapper[4558]: I0120 17:33:14.357358 4558 generic.go:334] "Generic (PLEG): container finished" podID="d48e656f-2dc6-4ecc-ba26-40661e7443be" containerID="4ede584424fe3e5e06d54d22231ee5cfcf1fa1823f294e5bb975df95cd01501c" exitCode=1 Jan 20 17:33:14 crc kubenswrapper[4558]: I0120 17:33:14.357453 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-5bb5cf79fd-qwwlx" event={"ID":"d48e656f-2dc6-4ecc-ba26-40661e7443be","Type":"ContainerDied","Data":"4ede584424fe3e5e06d54d22231ee5cfcf1fa1823f294e5bb975df95cd01501c"} Jan 20 17:33:14 crc kubenswrapper[4558]: I0120 17:33:14.384399 4558 generic.go:334] "Generic (PLEG): container finished" podID="d0062240-1ed7-488f-ac15-d3c5e0812ccd" containerID="c0fe7edacfd45633530e170a5c39c2fd85b8ee4e9a397ea2a78b53cb55d8f966" exitCode=143 Jan 20 17:33:14 crc kubenswrapper[4558]: I0120 17:33:14.385117 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-6d8c858488-xrdwn" event={"ID":"d0062240-1ed7-488f-ac15-d3c5e0812ccd","Type":"ContainerDied","Data":"c0fe7edacfd45633530e170a5c39c2fd85b8ee4e9a397ea2a78b53cb55d8f966"} Jan 20 17:33:14 crc kubenswrapper[4558]: I0120 17:33:14.568153 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_neutron-5bb5cf79fd-qwwlx_d48e656f-2dc6-4ecc-ba26-40661e7443be/neutron-api/0.log" Jan 20 17:33:14 crc kubenswrapper[4558]: I0120 17:33:14.568255 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-5bb5cf79fd-qwwlx" Jan 20 17:33:14 crc kubenswrapper[4558]: I0120 17:33:14.584738 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25ac8e40-35ea-4d5b-8d8a-ee123c2e2fff" path="/var/lib/kubelet/pods/25ac8e40-35ea-4d5b-8d8a-ee123c2e2fff/volumes" Jan 20 17:33:14 crc kubenswrapper[4558]: I0120 17:33:14.585440 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3a9cf38d-4b8b-453b-bc39-b97deab68b72" path="/var/lib/kubelet/pods/3a9cf38d-4b8b-453b-bc39-b97deab68b72/volumes" Jan 20 17:33:14 crc kubenswrapper[4558]: I0120 17:33:14.703023 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rhgsm\" (UniqueName: \"kubernetes.io/projected/d48e656f-2dc6-4ecc-ba26-40661e7443be-kube-api-access-rhgsm\") pod \"d48e656f-2dc6-4ecc-ba26-40661e7443be\" (UID: \"d48e656f-2dc6-4ecc-ba26-40661e7443be\") " Jan 20 17:33:14 crc kubenswrapper[4558]: I0120 17:33:14.703323 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d48e656f-2dc6-4ecc-ba26-40661e7443be-combined-ca-bundle\") pod \"d48e656f-2dc6-4ecc-ba26-40661e7443be\" (UID: \"d48e656f-2dc6-4ecc-ba26-40661e7443be\") " Jan 20 17:33:14 crc kubenswrapper[4558]: I0120 17:33:14.703380 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/d48e656f-2dc6-4ecc-ba26-40661e7443be-httpd-config\") pod \"d48e656f-2dc6-4ecc-ba26-40661e7443be\" (UID: \"d48e656f-2dc6-4ecc-ba26-40661e7443be\") " Jan 20 17:33:14 crc kubenswrapper[4558]: I0120 17:33:14.703483 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/d48e656f-2dc6-4ecc-ba26-40661e7443be-config\") pod \"d48e656f-2dc6-4ecc-ba26-40661e7443be\" (UID: \"d48e656f-2dc6-4ecc-ba26-40661e7443be\") " Jan 20 17:33:14 crc kubenswrapper[4558]: I0120 17:33:14.731473 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d48e656f-2dc6-4ecc-ba26-40661e7443be-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "d48e656f-2dc6-4ecc-ba26-40661e7443be" (UID: "d48e656f-2dc6-4ecc-ba26-40661e7443be"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:33:14 crc kubenswrapper[4558]: I0120 17:33:14.733630 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d48e656f-2dc6-4ecc-ba26-40661e7443be-kube-api-access-rhgsm" (OuterVolumeSpecName: "kube-api-access-rhgsm") pod "d48e656f-2dc6-4ecc-ba26-40661e7443be" (UID: "d48e656f-2dc6-4ecc-ba26-40661e7443be"). InnerVolumeSpecName "kube-api-access-rhgsm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:33:14 crc kubenswrapper[4558]: I0120 17:33:14.758494 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d48e656f-2dc6-4ecc-ba26-40661e7443be-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d48e656f-2dc6-4ecc-ba26-40661e7443be" (UID: "d48e656f-2dc6-4ecc-ba26-40661e7443be"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:33:14 crc kubenswrapper[4558]: I0120 17:33:14.778298 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d48e656f-2dc6-4ecc-ba26-40661e7443be-config" (OuterVolumeSpecName: "config") pod "d48e656f-2dc6-4ecc-ba26-40661e7443be" (UID: "d48e656f-2dc6-4ecc-ba26-40661e7443be"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:33:14 crc kubenswrapper[4558]: I0120 17:33:14.809683 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d48e656f-2dc6-4ecc-ba26-40661e7443be-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:14 crc kubenswrapper[4558]: I0120 17:33:14.809708 4558 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/d48e656f-2dc6-4ecc-ba26-40661e7443be-httpd-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:14 crc kubenswrapper[4558]: I0120 17:33:14.809717 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/d48e656f-2dc6-4ecc-ba26-40661e7443be-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:14 crc kubenswrapper[4558]: I0120 17:33:14.809730 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rhgsm\" (UniqueName: \"kubernetes.io/projected/d48e656f-2dc6-4ecc-ba26-40661e7443be-kube-api-access-rhgsm\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:14 crc kubenswrapper[4558]: I0120 17:33:14.948278 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:33:14 crc kubenswrapper[4558]: I0120 17:33:14.988926 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:33:15 crc kubenswrapper[4558]: I0120 17:33:15.240132 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:33:15 crc kubenswrapper[4558]: I0120 17:33:15.244424 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:33:15 crc kubenswrapper[4558]: I0120 17:33:15.357485 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:33:15 crc kubenswrapper[4558]: I0120 17:33:15.394071 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-5bb5cf79fd-qwwlx" Jan 20 17:33:15 crc kubenswrapper[4558]: I0120 17:33:15.399686 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-5bb5cf79fd-qwwlx" event={"ID":"d48e656f-2dc6-4ecc-ba26-40661e7443be","Type":"ContainerDied","Data":"2702f28aa9af14f23e966e6e9fdc88a4f06b778da8c98c4238495695e87e74dd"} Jan 20 17:33:15 crc kubenswrapper[4558]: I0120 17:33:15.399779 4558 scope.go:117] "RemoveContainer" containerID="9c81b3c994e4a29a9c09cfb4a71e0a12b59291ec0580185cdc50cbf5be163007" Jan 20 17:33:15 crc kubenswrapper[4558]: I0120 17:33:15.422968 4558 scope.go:117] "RemoveContainer" containerID="4ede584424fe3e5e06d54d22231ee5cfcf1fa1823f294e5bb975df95cd01501c" Jan 20 17:33:15 crc kubenswrapper[4558]: I0120 17:33:15.423064 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:33:15 crc kubenswrapper[4558]: I0120 17:33:15.423461 4558 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 20 17:33:15 crc kubenswrapper[4558]: I0120 17:33:15.427564 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:33:15 crc kubenswrapper[4558]: I0120 17:33:15.441980 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-5bb5cf79fd-qwwlx"] Jan 20 17:33:15 crc kubenswrapper[4558]: I0120 17:33:15.466831 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/neutron-5bb5cf79fd-qwwlx"] Jan 20 17:33:15 crc kubenswrapper[4558]: I0120 17:33:15.566534 4558 scope.go:117] "RemoveContainer" containerID="f275e9f5de330b3c79316d5871106c6bcf90b698e0744c5beb28da45c336b36d" Jan 20 17:33:15 crc kubenswrapper[4558]: E0120 17:33:15.566755 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 17:33:16 crc kubenswrapper[4558]: E0120 17:33:16.160416 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="46edde77e0df814c67d5b21fe72058f3eb4efd6acd4a7f68a737219cd8448b40" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:33:16 crc kubenswrapper[4558]: E0120 17:33:16.168678 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="46edde77e0df814c67d5b21fe72058f3eb4efd6acd4a7f68a737219cd8448b40" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:33:16 crc kubenswrapper[4558]: E0120 17:33:16.172250 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="46edde77e0df814c67d5b21fe72058f3eb4efd6acd4a7f68a737219cd8448b40" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:33:16 crc kubenswrapper[4558]: E0120 17:33:16.172328 4558 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack-kuttl-tests/nova-cell1-conductor-0" podUID="c686e4c2-68d0-4999-9078-90c70927d9ae" containerName="nova-cell1-conductor-conductor" Jan 20 17:33:16 crc kubenswrapper[4558]: I0120 17:33:16.191840 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/barbican-api-55c969d68-6j7rc"] Jan 20 17:33:16 crc kubenswrapper[4558]: E0120 17:33:16.192256 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d48e656f-2dc6-4ecc-ba26-40661e7443be" containerName="neutron-httpd" Jan 20 17:33:16 crc kubenswrapper[4558]: I0120 17:33:16.192274 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d48e656f-2dc6-4ecc-ba26-40661e7443be" containerName="neutron-httpd" Jan 20 17:33:16 crc kubenswrapper[4558]: E0120 17:33:16.192293 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="25ac8e40-35ea-4d5b-8d8a-ee123c2e2fff" containerName="neutron-api" Jan 20 17:33:16 crc kubenswrapper[4558]: I0120 17:33:16.192300 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="25ac8e40-35ea-4d5b-8d8a-ee123c2e2fff" containerName="neutron-api" Jan 20 17:33:16 crc kubenswrapper[4558]: E0120 17:33:16.192309 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d48e656f-2dc6-4ecc-ba26-40661e7443be" containerName="neutron-api" Jan 20 17:33:16 crc kubenswrapper[4558]: I0120 17:33:16.192315 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d48e656f-2dc6-4ecc-ba26-40661e7443be" containerName="neutron-api" Jan 20 17:33:16 crc kubenswrapper[4558]: E0120 17:33:16.192326 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3a9cf38d-4b8b-453b-bc39-b97deab68b72" containerName="neutron-httpd" Jan 20 17:33:16 crc kubenswrapper[4558]: I0120 17:33:16.192331 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="3a9cf38d-4b8b-453b-bc39-b97deab68b72" containerName="neutron-httpd" Jan 20 17:33:16 crc kubenswrapper[4558]: E0120 17:33:16.192344 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3a9cf38d-4b8b-453b-bc39-b97deab68b72" containerName="neutron-api" Jan 20 17:33:16 crc kubenswrapper[4558]: I0120 17:33:16.192349 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="3a9cf38d-4b8b-453b-bc39-b97deab68b72" containerName="neutron-api" Jan 20 17:33:16 crc kubenswrapper[4558]: E0120 17:33:16.192377 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="25ac8e40-35ea-4d5b-8d8a-ee123c2e2fff" containerName="neutron-httpd" Jan 20 17:33:16 crc kubenswrapper[4558]: I0120 17:33:16.192382 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="25ac8e40-35ea-4d5b-8d8a-ee123c2e2fff" containerName="neutron-httpd" Jan 20 17:33:16 crc kubenswrapper[4558]: I0120 17:33:16.192540 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="d48e656f-2dc6-4ecc-ba26-40661e7443be" containerName="neutron-api" Jan 20 17:33:16 crc kubenswrapper[4558]: I0120 17:33:16.192555 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="3a9cf38d-4b8b-453b-bc39-b97deab68b72" containerName="neutron-httpd" Jan 20 17:33:16 crc kubenswrapper[4558]: I0120 17:33:16.192564 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="3a9cf38d-4b8b-453b-bc39-b97deab68b72" containerName="neutron-api" Jan 20 17:33:16 crc kubenswrapper[4558]: I0120 17:33:16.192570 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="d48e656f-2dc6-4ecc-ba26-40661e7443be" containerName="neutron-httpd" Jan 20 17:33:16 crc kubenswrapper[4558]: I0120 17:33:16.192578 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="25ac8e40-35ea-4d5b-8d8a-ee123c2e2fff" containerName="neutron-api" Jan 20 17:33:16 crc kubenswrapper[4558]: I0120 17:33:16.192591 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="25ac8e40-35ea-4d5b-8d8a-ee123c2e2fff" containerName="neutron-httpd" Jan 20 17:33:16 crc kubenswrapper[4558]: I0120 17:33:16.194543 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-55c969d68-6j7rc" Jan 20 17:33:16 crc kubenswrapper[4558]: I0120 17:33:16.196980 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-barbican-public-svc" Jan 20 17:33:16 crc kubenswrapper[4558]: I0120 17:33:16.196990 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-barbican-internal-svc" Jan 20 17:33:16 crc kubenswrapper[4558]: I0120 17:33:16.198501 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-7fb744b5b7-vj8fh" Jan 20 17:33:16 crc kubenswrapper[4558]: I0120 17:33:16.219978 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-api-55c969d68-6j7rc"] Jan 20 17:33:16 crc kubenswrapper[4558]: I0120 17:33:16.255475 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8516d676-2fa0-47c4-b81a-254ec50efd52-combined-ca-bundle\") pod \"8516d676-2fa0-47c4-b81a-254ec50efd52\" (UID: \"8516d676-2fa0-47c4-b81a-254ec50efd52\") " Jan 20 17:33:16 crc kubenswrapper[4558]: I0120 17:33:16.255532 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5wvk2\" (UniqueName: \"kubernetes.io/projected/8516d676-2fa0-47c4-b81a-254ec50efd52-kube-api-access-5wvk2\") pod \"8516d676-2fa0-47c4-b81a-254ec50efd52\" (UID: \"8516d676-2fa0-47c4-b81a-254ec50efd52\") " Jan 20 17:33:16 crc kubenswrapper[4558]: I0120 17:33:16.255657 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/8516d676-2fa0-47c4-b81a-254ec50efd52-credential-keys\") pod \"8516d676-2fa0-47c4-b81a-254ec50efd52\" (UID: \"8516d676-2fa0-47c4-b81a-254ec50efd52\") " Jan 20 17:33:16 crc kubenswrapper[4558]: I0120 17:33:16.255880 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/8516d676-2fa0-47c4-b81a-254ec50efd52-fernet-keys\") pod \"8516d676-2fa0-47c4-b81a-254ec50efd52\" (UID: \"8516d676-2fa0-47c4-b81a-254ec50efd52\") " Jan 20 17:33:16 crc kubenswrapper[4558]: I0120 17:33:16.255913 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8516d676-2fa0-47c4-b81a-254ec50efd52-scripts\") pod \"8516d676-2fa0-47c4-b81a-254ec50efd52\" (UID: \"8516d676-2fa0-47c4-b81a-254ec50efd52\") " Jan 20 17:33:16 crc kubenswrapper[4558]: I0120 17:33:16.255994 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8516d676-2fa0-47c4-b81a-254ec50efd52-config-data\") pod \"8516d676-2fa0-47c4-b81a-254ec50efd52\" (UID: \"8516d676-2fa0-47c4-b81a-254ec50efd52\") " Jan 20 17:33:16 crc kubenswrapper[4558]: I0120 17:33:16.256385 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/44bcde0a-cc03-4cd1-bc4f-ab22cf2bb13f-config-data-custom\") pod \"barbican-api-55c969d68-6j7rc\" (UID: \"44bcde0a-cc03-4cd1-bc4f-ab22cf2bb13f\") " pod="openstack-kuttl-tests/barbican-api-55c969d68-6j7rc" Jan 20 17:33:16 crc kubenswrapper[4558]: I0120 17:33:16.256462 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/44bcde0a-cc03-4cd1-bc4f-ab22cf2bb13f-combined-ca-bundle\") pod \"barbican-api-55c969d68-6j7rc\" (UID: \"44bcde0a-cc03-4cd1-bc4f-ab22cf2bb13f\") " pod="openstack-kuttl-tests/barbican-api-55c969d68-6j7rc" Jan 20 17:33:16 crc kubenswrapper[4558]: I0120 17:33:16.256494 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/44bcde0a-cc03-4cd1-bc4f-ab22cf2bb13f-internal-tls-certs\") pod \"barbican-api-55c969d68-6j7rc\" (UID: \"44bcde0a-cc03-4cd1-bc4f-ab22cf2bb13f\") " pod="openstack-kuttl-tests/barbican-api-55c969d68-6j7rc" Jan 20 17:33:16 crc kubenswrapper[4558]: I0120 17:33:16.256545 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vs7w5\" (UniqueName: \"kubernetes.io/projected/44bcde0a-cc03-4cd1-bc4f-ab22cf2bb13f-kube-api-access-vs7w5\") pod \"barbican-api-55c969d68-6j7rc\" (UID: \"44bcde0a-cc03-4cd1-bc4f-ab22cf2bb13f\") " pod="openstack-kuttl-tests/barbican-api-55c969d68-6j7rc" Jan 20 17:33:16 crc kubenswrapper[4558]: I0120 17:33:16.256639 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/44bcde0a-cc03-4cd1-bc4f-ab22cf2bb13f-public-tls-certs\") pod \"barbican-api-55c969d68-6j7rc\" (UID: \"44bcde0a-cc03-4cd1-bc4f-ab22cf2bb13f\") " pod="openstack-kuttl-tests/barbican-api-55c969d68-6j7rc" Jan 20 17:33:16 crc kubenswrapper[4558]: I0120 17:33:16.256670 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/44bcde0a-cc03-4cd1-bc4f-ab22cf2bb13f-config-data\") pod \"barbican-api-55c969d68-6j7rc\" (UID: \"44bcde0a-cc03-4cd1-bc4f-ab22cf2bb13f\") " pod="openstack-kuttl-tests/barbican-api-55c969d68-6j7rc" Jan 20 17:33:16 crc kubenswrapper[4558]: I0120 17:33:16.256686 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/44bcde0a-cc03-4cd1-bc4f-ab22cf2bb13f-logs\") pod \"barbican-api-55c969d68-6j7rc\" (UID: \"44bcde0a-cc03-4cd1-bc4f-ab22cf2bb13f\") " pod="openstack-kuttl-tests/barbican-api-55c969d68-6j7rc" Jan 20 17:33:16 crc kubenswrapper[4558]: I0120 17:33:16.276565 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8516d676-2fa0-47c4-b81a-254ec50efd52-scripts" (OuterVolumeSpecName: "scripts") pod "8516d676-2fa0-47c4-b81a-254ec50efd52" (UID: "8516d676-2fa0-47c4-b81a-254ec50efd52"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:33:16 crc kubenswrapper[4558]: I0120 17:33:16.276602 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8516d676-2fa0-47c4-b81a-254ec50efd52-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "8516d676-2fa0-47c4-b81a-254ec50efd52" (UID: "8516d676-2fa0-47c4-b81a-254ec50efd52"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:33:16 crc kubenswrapper[4558]: I0120 17:33:16.276656 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8516d676-2fa0-47c4-b81a-254ec50efd52-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "8516d676-2fa0-47c4-b81a-254ec50efd52" (UID: "8516d676-2fa0-47c4-b81a-254ec50efd52"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:33:16 crc kubenswrapper[4558]: I0120 17:33:16.277236 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8516d676-2fa0-47c4-b81a-254ec50efd52-kube-api-access-5wvk2" (OuterVolumeSpecName: "kube-api-access-5wvk2") pod "8516d676-2fa0-47c4-b81a-254ec50efd52" (UID: "8516d676-2fa0-47c4-b81a-254ec50efd52"). InnerVolumeSpecName "kube-api-access-5wvk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:33:16 crc kubenswrapper[4558]: I0120 17:33:16.292642 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8516d676-2fa0-47c4-b81a-254ec50efd52-config-data" (OuterVolumeSpecName: "config-data") pod "8516d676-2fa0-47c4-b81a-254ec50efd52" (UID: "8516d676-2fa0-47c4-b81a-254ec50efd52"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:33:16 crc kubenswrapper[4558]: I0120 17:33:16.295258 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8516d676-2fa0-47c4-b81a-254ec50efd52-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8516d676-2fa0-47c4-b81a-254ec50efd52" (UID: "8516d676-2fa0-47c4-b81a-254ec50efd52"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:33:16 crc kubenswrapper[4558]: I0120 17:33:16.359817 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/44bcde0a-cc03-4cd1-bc4f-ab22cf2bb13f-internal-tls-certs\") pod \"barbican-api-55c969d68-6j7rc\" (UID: \"44bcde0a-cc03-4cd1-bc4f-ab22cf2bb13f\") " pod="openstack-kuttl-tests/barbican-api-55c969d68-6j7rc" Jan 20 17:33:16 crc kubenswrapper[4558]: I0120 17:33:16.359984 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vs7w5\" (UniqueName: \"kubernetes.io/projected/44bcde0a-cc03-4cd1-bc4f-ab22cf2bb13f-kube-api-access-vs7w5\") pod \"barbican-api-55c969d68-6j7rc\" (UID: \"44bcde0a-cc03-4cd1-bc4f-ab22cf2bb13f\") " pod="openstack-kuttl-tests/barbican-api-55c969d68-6j7rc" Jan 20 17:33:16 crc kubenswrapper[4558]: I0120 17:33:16.360177 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/44bcde0a-cc03-4cd1-bc4f-ab22cf2bb13f-public-tls-certs\") pod \"barbican-api-55c969d68-6j7rc\" (UID: \"44bcde0a-cc03-4cd1-bc4f-ab22cf2bb13f\") " pod="openstack-kuttl-tests/barbican-api-55c969d68-6j7rc" Jan 20 17:33:16 crc kubenswrapper[4558]: I0120 17:33:16.360226 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/44bcde0a-cc03-4cd1-bc4f-ab22cf2bb13f-config-data\") pod \"barbican-api-55c969d68-6j7rc\" (UID: \"44bcde0a-cc03-4cd1-bc4f-ab22cf2bb13f\") " pod="openstack-kuttl-tests/barbican-api-55c969d68-6j7rc" Jan 20 17:33:16 crc kubenswrapper[4558]: I0120 17:33:16.360244 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/44bcde0a-cc03-4cd1-bc4f-ab22cf2bb13f-logs\") pod \"barbican-api-55c969d68-6j7rc\" (UID: \"44bcde0a-cc03-4cd1-bc4f-ab22cf2bb13f\") " pod="openstack-kuttl-tests/barbican-api-55c969d68-6j7rc" Jan 20 17:33:16 crc kubenswrapper[4558]: I0120 17:33:16.360442 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/44bcde0a-cc03-4cd1-bc4f-ab22cf2bb13f-config-data-custom\") pod \"barbican-api-55c969d68-6j7rc\" (UID: \"44bcde0a-cc03-4cd1-bc4f-ab22cf2bb13f\") " pod="openstack-kuttl-tests/barbican-api-55c969d68-6j7rc" Jan 20 17:33:16 crc kubenswrapper[4558]: I0120 17:33:16.360594 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/44bcde0a-cc03-4cd1-bc4f-ab22cf2bb13f-combined-ca-bundle\") pod \"barbican-api-55c969d68-6j7rc\" (UID: \"44bcde0a-cc03-4cd1-bc4f-ab22cf2bb13f\") " pod="openstack-kuttl-tests/barbican-api-55c969d68-6j7rc" Jan 20 17:33:16 crc kubenswrapper[4558]: I0120 17:33:16.360666 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8516d676-2fa0-47c4-b81a-254ec50efd52-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:16 crc kubenswrapper[4558]: I0120 17:33:16.360684 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5wvk2\" (UniqueName: \"kubernetes.io/projected/8516d676-2fa0-47c4-b81a-254ec50efd52-kube-api-access-5wvk2\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:16 crc kubenswrapper[4558]: I0120 17:33:16.360695 4558 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/8516d676-2fa0-47c4-b81a-254ec50efd52-credential-keys\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:16 crc kubenswrapper[4558]: I0120 17:33:16.360706 4558 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/8516d676-2fa0-47c4-b81a-254ec50efd52-fernet-keys\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:16 crc kubenswrapper[4558]: I0120 17:33:16.360714 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8516d676-2fa0-47c4-b81a-254ec50efd52-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:16 crc kubenswrapper[4558]: I0120 17:33:16.360722 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8516d676-2fa0-47c4-b81a-254ec50efd52-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:16 crc kubenswrapper[4558]: I0120 17:33:16.362031 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/44bcde0a-cc03-4cd1-bc4f-ab22cf2bb13f-logs\") pod \"barbican-api-55c969d68-6j7rc\" (UID: \"44bcde0a-cc03-4cd1-bc4f-ab22cf2bb13f\") " pod="openstack-kuttl-tests/barbican-api-55c969d68-6j7rc" Jan 20 17:33:16 crc kubenswrapper[4558]: I0120 17:33:16.363102 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/44bcde0a-cc03-4cd1-bc4f-ab22cf2bb13f-internal-tls-certs\") pod \"barbican-api-55c969d68-6j7rc\" (UID: \"44bcde0a-cc03-4cd1-bc4f-ab22cf2bb13f\") " pod="openstack-kuttl-tests/barbican-api-55c969d68-6j7rc" Jan 20 17:33:16 crc kubenswrapper[4558]: I0120 17:33:16.363437 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/44bcde0a-cc03-4cd1-bc4f-ab22cf2bb13f-combined-ca-bundle\") pod \"barbican-api-55c969d68-6j7rc\" (UID: \"44bcde0a-cc03-4cd1-bc4f-ab22cf2bb13f\") " pod="openstack-kuttl-tests/barbican-api-55c969d68-6j7rc" Jan 20 17:33:16 crc kubenswrapper[4558]: I0120 17:33:16.364912 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/44bcde0a-cc03-4cd1-bc4f-ab22cf2bb13f-config-data-custom\") pod \"barbican-api-55c969d68-6j7rc\" (UID: \"44bcde0a-cc03-4cd1-bc4f-ab22cf2bb13f\") " pod="openstack-kuttl-tests/barbican-api-55c969d68-6j7rc" Jan 20 17:33:16 crc kubenswrapper[4558]: I0120 17:33:16.365293 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/44bcde0a-cc03-4cd1-bc4f-ab22cf2bb13f-public-tls-certs\") pod \"barbican-api-55c969d68-6j7rc\" (UID: \"44bcde0a-cc03-4cd1-bc4f-ab22cf2bb13f\") " pod="openstack-kuttl-tests/barbican-api-55c969d68-6j7rc" Jan 20 17:33:16 crc kubenswrapper[4558]: I0120 17:33:16.365443 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/44bcde0a-cc03-4cd1-bc4f-ab22cf2bb13f-config-data\") pod \"barbican-api-55c969d68-6j7rc\" (UID: \"44bcde0a-cc03-4cd1-bc4f-ab22cf2bb13f\") " pod="openstack-kuttl-tests/barbican-api-55c969d68-6j7rc" Jan 20 17:33:16 crc kubenswrapper[4558]: I0120 17:33:16.375471 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vs7w5\" (UniqueName: \"kubernetes.io/projected/44bcde0a-cc03-4cd1-bc4f-ab22cf2bb13f-kube-api-access-vs7w5\") pod \"barbican-api-55c969d68-6j7rc\" (UID: \"44bcde0a-cc03-4cd1-bc4f-ab22cf2bb13f\") " pod="openstack-kuttl-tests/barbican-api-55c969d68-6j7rc" Jan 20 17:33:16 crc kubenswrapper[4558]: I0120 17:33:16.407551 4558 generic.go:334] "Generic (PLEG): container finished" podID="8516d676-2fa0-47c4-b81a-254ec50efd52" containerID="5f4eff277fd4cf9e126dcb473d4bc2082809a8fcea976047692503f0bb42d441" exitCode=0 Jan 20 17:33:16 crc kubenswrapper[4558]: I0120 17:33:16.407627 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-7fb744b5b7-vj8fh" event={"ID":"8516d676-2fa0-47c4-b81a-254ec50efd52","Type":"ContainerDied","Data":"5f4eff277fd4cf9e126dcb473d4bc2082809a8fcea976047692503f0bb42d441"} Jan 20 17:33:16 crc kubenswrapper[4558]: I0120 17:33:16.407664 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-7fb744b5b7-vj8fh" event={"ID":"8516d676-2fa0-47c4-b81a-254ec50efd52","Type":"ContainerDied","Data":"30df890f266e77b1f0f4c83ebc87ae7d845870e96e6f6159151d6c7ad9b88cdb"} Jan 20 17:33:16 crc kubenswrapper[4558]: I0120 17:33:16.407687 4558 scope.go:117] "RemoveContainer" containerID="5f4eff277fd4cf9e126dcb473d4bc2082809a8fcea976047692503f0bb42d441" Jan 20 17:33:16 crc kubenswrapper[4558]: I0120 17:33:16.407824 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-7fb744b5b7-vj8fh" Jan 20 17:33:16 crc kubenswrapper[4558]: I0120 17:33:16.441410 4558 scope.go:117] "RemoveContainer" containerID="5f4eff277fd4cf9e126dcb473d4bc2082809a8fcea976047692503f0bb42d441" Jan 20 17:33:16 crc kubenswrapper[4558]: E0120 17:33:16.441817 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5f4eff277fd4cf9e126dcb473d4bc2082809a8fcea976047692503f0bb42d441\": container with ID starting with 5f4eff277fd4cf9e126dcb473d4bc2082809a8fcea976047692503f0bb42d441 not found: ID does not exist" containerID="5f4eff277fd4cf9e126dcb473d4bc2082809a8fcea976047692503f0bb42d441" Jan 20 17:33:16 crc kubenswrapper[4558]: I0120 17:33:16.441850 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5f4eff277fd4cf9e126dcb473d4bc2082809a8fcea976047692503f0bb42d441"} err="failed to get container status \"5f4eff277fd4cf9e126dcb473d4bc2082809a8fcea976047692503f0bb42d441\": rpc error: code = NotFound desc = could not find container \"5f4eff277fd4cf9e126dcb473d4bc2082809a8fcea976047692503f0bb42d441\": container with ID starting with 5f4eff277fd4cf9e126dcb473d4bc2082809a8fcea976047692503f0bb42d441 not found: ID does not exist" Jan 20 17:33:16 crc kubenswrapper[4558]: I0120 17:33:16.449229 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-7fb744b5b7-vj8fh"] Jan 20 17:33:16 crc kubenswrapper[4558]: I0120 17:33:16.454287 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/keystone-7fb744b5b7-vj8fh"] Jan 20 17:33:16 crc kubenswrapper[4558]: I0120 17:33:16.508337 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-55c969d68-6j7rc" Jan 20 17:33:16 crc kubenswrapper[4558]: I0120 17:33:16.581190 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8516d676-2fa0-47c4-b81a-254ec50efd52" path="/var/lib/kubelet/pods/8516d676-2fa0-47c4-b81a-254ec50efd52/volumes" Jan 20 17:33:16 crc kubenswrapper[4558]: I0120 17:33:16.581901 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d48e656f-2dc6-4ecc-ba26-40661e7443be" path="/var/lib/kubelet/pods/d48e656f-2dc6-4ecc-ba26-40661e7443be/volumes" Jan 20 17:33:16 crc kubenswrapper[4558]: I0120 17:33:16.958733 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-api-55c969d68-6j7rc"] Jan 20 17:33:17 crc kubenswrapper[4558]: I0120 17:33:17.083050 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:33:17 crc kubenswrapper[4558]: I0120 17:33:17.206140 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-6d8c858488-xrdwn" Jan 20 17:33:17 crc kubenswrapper[4558]: I0120 17:33:17.286228 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/568979fa-537c-45c5-952c-f87a40b194ef-config-data-custom\") pod \"568979fa-537c-45c5-952c-f87a40b194ef\" (UID: \"568979fa-537c-45c5-952c-f87a40b194ef\") " Jan 20 17:33:17 crc kubenswrapper[4558]: I0120 17:33:17.286509 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xsjnm\" (UniqueName: \"kubernetes.io/projected/568979fa-537c-45c5-952c-f87a40b194ef-kube-api-access-xsjnm\") pod \"568979fa-537c-45c5-952c-f87a40b194ef\" (UID: \"568979fa-537c-45c5-952c-f87a40b194ef\") " Jan 20 17:33:17 crc kubenswrapper[4558]: I0120 17:33:17.286611 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/568979fa-537c-45c5-952c-f87a40b194ef-scripts\") pod \"568979fa-537c-45c5-952c-f87a40b194ef\" (UID: \"568979fa-537c-45c5-952c-f87a40b194ef\") " Jan 20 17:33:17 crc kubenswrapper[4558]: I0120 17:33:17.286689 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/568979fa-537c-45c5-952c-f87a40b194ef-combined-ca-bundle\") pod \"568979fa-537c-45c5-952c-f87a40b194ef\" (UID: \"568979fa-537c-45c5-952c-f87a40b194ef\") " Jan 20 17:33:17 crc kubenswrapper[4558]: I0120 17:33:17.286732 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/568979fa-537c-45c5-952c-f87a40b194ef-config-data\") pod \"568979fa-537c-45c5-952c-f87a40b194ef\" (UID: \"568979fa-537c-45c5-952c-f87a40b194ef\") " Jan 20 17:33:17 crc kubenswrapper[4558]: I0120 17:33:17.287764 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/568979fa-537c-45c5-952c-f87a40b194ef-etc-machine-id\") pod \"568979fa-537c-45c5-952c-f87a40b194ef\" (UID: \"568979fa-537c-45c5-952c-f87a40b194ef\") " Jan 20 17:33:17 crc kubenswrapper[4558]: I0120 17:33:17.287924 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/568979fa-537c-45c5-952c-f87a40b194ef-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "568979fa-537c-45c5-952c-f87a40b194ef" (UID: "568979fa-537c-45c5-952c-f87a40b194ef"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 17:33:17 crc kubenswrapper[4558]: I0120 17:33:17.289587 4558 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/568979fa-537c-45c5-952c-f87a40b194ef-etc-machine-id\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:17 crc kubenswrapper[4558]: I0120 17:33:17.292412 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/568979fa-537c-45c5-952c-f87a40b194ef-kube-api-access-xsjnm" (OuterVolumeSpecName: "kube-api-access-xsjnm") pod "568979fa-537c-45c5-952c-f87a40b194ef" (UID: "568979fa-537c-45c5-952c-f87a40b194ef"). InnerVolumeSpecName "kube-api-access-xsjnm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:33:17 crc kubenswrapper[4558]: I0120 17:33:17.295798 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/568979fa-537c-45c5-952c-f87a40b194ef-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "568979fa-537c-45c5-952c-f87a40b194ef" (UID: "568979fa-537c-45c5-952c-f87a40b194ef"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:33:17 crc kubenswrapper[4558]: I0120 17:33:17.297152 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/568979fa-537c-45c5-952c-f87a40b194ef-scripts" (OuterVolumeSpecName: "scripts") pod "568979fa-537c-45c5-952c-f87a40b194ef" (UID: "568979fa-537c-45c5-952c-f87a40b194ef"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:33:17 crc kubenswrapper[4558]: I0120 17:33:17.342371 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/568979fa-537c-45c5-952c-f87a40b194ef-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "568979fa-537c-45c5-952c-f87a40b194ef" (UID: "568979fa-537c-45c5-952c-f87a40b194ef"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:33:17 crc kubenswrapper[4558]: I0120 17:33:17.382706 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/568979fa-537c-45c5-952c-f87a40b194ef-config-data" (OuterVolumeSpecName: "config-data") pod "568979fa-537c-45c5-952c-f87a40b194ef" (UID: "568979fa-537c-45c5-952c-f87a40b194ef"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:33:17 crc kubenswrapper[4558]: I0120 17:33:17.391020 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d0062240-1ed7-488f-ac15-d3c5e0812ccd-combined-ca-bundle\") pod \"d0062240-1ed7-488f-ac15-d3c5e0812ccd\" (UID: \"d0062240-1ed7-488f-ac15-d3c5e0812ccd\") " Jan 20 17:33:17 crc kubenswrapper[4558]: I0120 17:33:17.391197 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d0062240-1ed7-488f-ac15-d3c5e0812ccd-logs\") pod \"d0062240-1ed7-488f-ac15-d3c5e0812ccd\" (UID: \"d0062240-1ed7-488f-ac15-d3c5e0812ccd\") " Jan 20 17:33:17 crc kubenswrapper[4558]: I0120 17:33:17.391305 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d0062240-1ed7-488f-ac15-d3c5e0812ccd-config-data\") pod \"d0062240-1ed7-488f-ac15-d3c5e0812ccd\" (UID: \"d0062240-1ed7-488f-ac15-d3c5e0812ccd\") " Jan 20 17:33:17 crc kubenswrapper[4558]: I0120 17:33:17.391360 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k6dxk\" (UniqueName: \"kubernetes.io/projected/d0062240-1ed7-488f-ac15-d3c5e0812ccd-kube-api-access-k6dxk\") pod \"d0062240-1ed7-488f-ac15-d3c5e0812ccd\" (UID: \"d0062240-1ed7-488f-ac15-d3c5e0812ccd\") " Jan 20 17:33:17 crc kubenswrapper[4558]: I0120 17:33:17.391392 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d0062240-1ed7-488f-ac15-d3c5e0812ccd-config-data-custom\") pod \"d0062240-1ed7-488f-ac15-d3c5e0812ccd\" (UID: \"d0062240-1ed7-488f-ac15-d3c5e0812ccd\") " Jan 20 17:33:17 crc kubenswrapper[4558]: I0120 17:33:17.391661 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d0062240-1ed7-488f-ac15-d3c5e0812ccd-logs" (OuterVolumeSpecName: "logs") pod "d0062240-1ed7-488f-ac15-d3c5e0812ccd" (UID: "d0062240-1ed7-488f-ac15-d3c5e0812ccd"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:33:17 crc kubenswrapper[4558]: I0120 17:33:17.392386 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/568979fa-537c-45c5-952c-f87a40b194ef-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:17 crc kubenswrapper[4558]: I0120 17:33:17.392408 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/568979fa-537c-45c5-952c-f87a40b194ef-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:17 crc kubenswrapper[4558]: I0120 17:33:17.392419 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d0062240-1ed7-488f-ac15-d3c5e0812ccd-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:17 crc kubenswrapper[4558]: I0120 17:33:17.392428 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/568979fa-537c-45c5-952c-f87a40b194ef-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:17 crc kubenswrapper[4558]: I0120 17:33:17.392439 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xsjnm\" (UniqueName: \"kubernetes.io/projected/568979fa-537c-45c5-952c-f87a40b194ef-kube-api-access-xsjnm\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:17 crc kubenswrapper[4558]: I0120 17:33:17.392451 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/568979fa-537c-45c5-952c-f87a40b194ef-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:17 crc kubenswrapper[4558]: I0120 17:33:17.394643 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d0062240-1ed7-488f-ac15-d3c5e0812ccd-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "d0062240-1ed7-488f-ac15-d3c5e0812ccd" (UID: "d0062240-1ed7-488f-ac15-d3c5e0812ccd"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:33:17 crc kubenswrapper[4558]: I0120 17:33:17.394647 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d0062240-1ed7-488f-ac15-d3c5e0812ccd-kube-api-access-k6dxk" (OuterVolumeSpecName: "kube-api-access-k6dxk") pod "d0062240-1ed7-488f-ac15-d3c5e0812ccd" (UID: "d0062240-1ed7-488f-ac15-d3c5e0812ccd"). InnerVolumeSpecName "kube-api-access-k6dxk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:33:17 crc kubenswrapper[4558]: I0120 17:33:17.422801 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-55c969d68-6j7rc" event={"ID":"44bcde0a-cc03-4cd1-bc4f-ab22cf2bb13f","Type":"ContainerStarted","Data":"b689cf6d60478e65385b8ac3d3deae0dc109647d01351e0e9d0bb25a6658a4ae"} Jan 20 17:33:17 crc kubenswrapper[4558]: I0120 17:33:17.422874 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-55c969d68-6j7rc" event={"ID":"44bcde0a-cc03-4cd1-bc4f-ab22cf2bb13f","Type":"ContainerStarted","Data":"f68d263a2081165f043b77271abdfd4da85864255da0492403c4be2647086b5e"} Jan 20 17:33:17 crc kubenswrapper[4558]: I0120 17:33:17.422888 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-55c969d68-6j7rc" event={"ID":"44bcde0a-cc03-4cd1-bc4f-ab22cf2bb13f","Type":"ContainerStarted","Data":"492776aaadfa9ad1a21765295c99ae29e8897befd944da8c131532de9a1a927f"} Jan 20 17:33:17 crc kubenswrapper[4558]: I0120 17:33:17.423083 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/barbican-api-55c969d68-6j7rc" Jan 20 17:33:17 crc kubenswrapper[4558]: I0120 17:33:17.423300 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/barbican-api-55c969d68-6j7rc" Jan 20 17:33:17 crc kubenswrapper[4558]: I0120 17:33:17.424111 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d0062240-1ed7-488f-ac15-d3c5e0812ccd-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d0062240-1ed7-488f-ac15-d3c5e0812ccd" (UID: "d0062240-1ed7-488f-ac15-d3c5e0812ccd"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:33:17 crc kubenswrapper[4558]: I0120 17:33:17.426546 4558 generic.go:334] "Generic (PLEG): container finished" podID="d0062240-1ed7-488f-ac15-d3c5e0812ccd" containerID="f8f2ef208173ae07911e14421c874575ec9ac47e3161c054e5cda79b8cc76030" exitCode=0 Jan 20 17:33:17 crc kubenswrapper[4558]: I0120 17:33:17.426606 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-6d8c858488-xrdwn" event={"ID":"d0062240-1ed7-488f-ac15-d3c5e0812ccd","Type":"ContainerDied","Data":"f8f2ef208173ae07911e14421c874575ec9ac47e3161c054e5cda79b8cc76030"} Jan 20 17:33:17 crc kubenswrapper[4558]: I0120 17:33:17.426625 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-6d8c858488-xrdwn" event={"ID":"d0062240-1ed7-488f-ac15-d3c5e0812ccd","Type":"ContainerDied","Data":"d072a6a8cc0a04bf971ca24e0c66fbd5ad8b6cba92a127c02e13563a25c1c21f"} Jan 20 17:33:17 crc kubenswrapper[4558]: I0120 17:33:17.426642 4558 scope.go:117] "RemoveContainer" containerID="f8f2ef208173ae07911e14421c874575ec9ac47e3161c054e5cda79b8cc76030" Jan 20 17:33:17 crc kubenswrapper[4558]: I0120 17:33:17.426772 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-6d8c858488-xrdwn" Jan 20 17:33:17 crc kubenswrapper[4558]: I0120 17:33:17.430972 4558 generic.go:334] "Generic (PLEG): container finished" podID="568979fa-537c-45c5-952c-f87a40b194ef" containerID="1395db81401be1f584e96686dac08b81079dc9377f0f67ac885fa17b47a87685" exitCode=137 Jan 20 17:33:17 crc kubenswrapper[4558]: I0120 17:33:17.431070 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"568979fa-537c-45c5-952c-f87a40b194ef","Type":"ContainerDied","Data":"1395db81401be1f584e96686dac08b81079dc9377f0f67ac885fa17b47a87685"} Jan 20 17:33:17 crc kubenswrapper[4558]: I0120 17:33:17.431122 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"568979fa-537c-45c5-952c-f87a40b194ef","Type":"ContainerDied","Data":"881644d23f93cffca43d14cbda0bbbea6ddba271b723e6f35ce42103ef988265"} Jan 20 17:33:17 crc kubenswrapper[4558]: I0120 17:33:17.431255 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:33:17 crc kubenswrapper[4558]: I0120 17:33:17.435568 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/neutron-75b5f89ffd-dsnd8" podUID="e8c90851-f646-4a0c-8c0c-7ecac86fa5a7" containerName="neutron-httpd" probeResult="failure" output="Get \"http://10.217.1.211:9696/\": dial tcp 10.217.1.211:9696: connect: connection refused" Jan 20 17:33:17 crc kubenswrapper[4558]: I0120 17:33:17.445267 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d0062240-1ed7-488f-ac15-d3c5e0812ccd-config-data" (OuterVolumeSpecName: "config-data") pod "d0062240-1ed7-488f-ac15-d3c5e0812ccd" (UID: "d0062240-1ed7-488f-ac15-d3c5e0812ccd"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:33:17 crc kubenswrapper[4558]: I0120 17:33:17.448980 4558 scope.go:117] "RemoveContainer" containerID="c0fe7edacfd45633530e170a5c39c2fd85b8ee4e9a397ea2a78b53cb55d8f966" Jan 20 17:33:17 crc kubenswrapper[4558]: I0120 17:33:17.470194 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/barbican-api-55c969d68-6j7rc" podStartSLOduration=1.4701851590000001 podStartE2EDuration="1.470185159s" podCreationTimestamp="2026-01-20 17:33:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:33:17.445387331 +0000 UTC m=+3091.205725308" watchObservedRunningTime="2026-01-20 17:33:17.470185159 +0000 UTC m=+3091.230523126" Jan 20 17:33:17 crc kubenswrapper[4558]: I0120 17:33:17.472004 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:33:17 crc kubenswrapper[4558]: I0120 17:33:17.477154 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:33:17 crc kubenswrapper[4558]: I0120 17:33:17.494398 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d0062240-1ed7-488f-ac15-d3c5e0812ccd-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:17 crc kubenswrapper[4558]: I0120 17:33:17.494422 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k6dxk\" (UniqueName: \"kubernetes.io/projected/d0062240-1ed7-488f-ac15-d3c5e0812ccd-kube-api-access-k6dxk\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:17 crc kubenswrapper[4558]: I0120 17:33:17.494432 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d0062240-1ed7-488f-ac15-d3c5e0812ccd-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:17 crc kubenswrapper[4558]: I0120 17:33:17.494441 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d0062240-1ed7-488f-ac15-d3c5e0812ccd-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:17 crc kubenswrapper[4558]: I0120 17:33:17.498987 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:33:17 crc kubenswrapper[4558]: E0120 17:33:17.499492 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8516d676-2fa0-47c4-b81a-254ec50efd52" containerName="keystone-api" Jan 20 17:33:17 crc kubenswrapper[4558]: I0120 17:33:17.499506 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="8516d676-2fa0-47c4-b81a-254ec50efd52" containerName="keystone-api" Jan 20 17:33:17 crc kubenswrapper[4558]: E0120 17:33:17.499535 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="568979fa-537c-45c5-952c-f87a40b194ef" containerName="probe" Jan 20 17:33:17 crc kubenswrapper[4558]: I0120 17:33:17.499542 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="568979fa-537c-45c5-952c-f87a40b194ef" containerName="probe" Jan 20 17:33:17 crc kubenswrapper[4558]: E0120 17:33:17.499551 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d0062240-1ed7-488f-ac15-d3c5e0812ccd" containerName="barbican-api" Jan 20 17:33:17 crc kubenswrapper[4558]: I0120 17:33:17.499557 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d0062240-1ed7-488f-ac15-d3c5e0812ccd" containerName="barbican-api" Jan 20 17:33:17 crc kubenswrapper[4558]: E0120 17:33:17.499568 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d0062240-1ed7-488f-ac15-d3c5e0812ccd" containerName="barbican-api-log" Jan 20 17:33:17 crc kubenswrapper[4558]: I0120 17:33:17.499573 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d0062240-1ed7-488f-ac15-d3c5e0812ccd" containerName="barbican-api-log" Jan 20 17:33:17 crc kubenswrapper[4558]: E0120 17:33:17.499582 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="568979fa-537c-45c5-952c-f87a40b194ef" containerName="cinder-scheduler" Jan 20 17:33:17 crc kubenswrapper[4558]: I0120 17:33:17.499588 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="568979fa-537c-45c5-952c-f87a40b194ef" containerName="cinder-scheduler" Jan 20 17:33:17 crc kubenswrapper[4558]: I0120 17:33:17.499777 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="568979fa-537c-45c5-952c-f87a40b194ef" containerName="probe" Jan 20 17:33:17 crc kubenswrapper[4558]: I0120 17:33:17.499791 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="568979fa-537c-45c5-952c-f87a40b194ef" containerName="cinder-scheduler" Jan 20 17:33:17 crc kubenswrapper[4558]: I0120 17:33:17.499814 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="8516d676-2fa0-47c4-b81a-254ec50efd52" containerName="keystone-api" Jan 20 17:33:17 crc kubenswrapper[4558]: I0120 17:33:17.499832 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="d0062240-1ed7-488f-ac15-d3c5e0812ccd" containerName="barbican-api-log" Jan 20 17:33:17 crc kubenswrapper[4558]: I0120 17:33:17.499843 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="d0062240-1ed7-488f-ac15-d3c5e0812ccd" containerName="barbican-api" Jan 20 17:33:17 crc kubenswrapper[4558]: I0120 17:33:17.500885 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:33:17 crc kubenswrapper[4558]: I0120 17:33:17.503016 4558 scope.go:117] "RemoveContainer" containerID="f8f2ef208173ae07911e14421c874575ec9ac47e3161c054e5cda79b8cc76030" Jan 20 17:33:17 crc kubenswrapper[4558]: I0120 17:33:17.503139 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cinder-scheduler-config-data" Jan 20 17:33:17 crc kubenswrapper[4558]: E0120 17:33:17.503956 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f8f2ef208173ae07911e14421c874575ec9ac47e3161c054e5cda79b8cc76030\": container with ID starting with f8f2ef208173ae07911e14421c874575ec9ac47e3161c054e5cda79b8cc76030 not found: ID does not exist" containerID="f8f2ef208173ae07911e14421c874575ec9ac47e3161c054e5cda79b8cc76030" Jan 20 17:33:17 crc kubenswrapper[4558]: I0120 17:33:17.504060 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f8f2ef208173ae07911e14421c874575ec9ac47e3161c054e5cda79b8cc76030"} err="failed to get container status \"f8f2ef208173ae07911e14421c874575ec9ac47e3161c054e5cda79b8cc76030\": rpc error: code = NotFound desc = could not find container \"f8f2ef208173ae07911e14421c874575ec9ac47e3161c054e5cda79b8cc76030\": container with ID starting with f8f2ef208173ae07911e14421c874575ec9ac47e3161c054e5cda79b8cc76030 not found: ID does not exist" Jan 20 17:33:17 crc kubenswrapper[4558]: I0120 17:33:17.507553 4558 scope.go:117] "RemoveContainer" containerID="c0fe7edacfd45633530e170a5c39c2fd85b8ee4e9a397ea2a78b53cb55d8f966" Jan 20 17:33:17 crc kubenswrapper[4558]: E0120 17:33:17.508603 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c0fe7edacfd45633530e170a5c39c2fd85b8ee4e9a397ea2a78b53cb55d8f966\": container with ID starting with c0fe7edacfd45633530e170a5c39c2fd85b8ee4e9a397ea2a78b53cb55d8f966 not found: ID does not exist" containerID="c0fe7edacfd45633530e170a5c39c2fd85b8ee4e9a397ea2a78b53cb55d8f966" Jan 20 17:33:17 crc kubenswrapper[4558]: I0120 17:33:17.508650 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c0fe7edacfd45633530e170a5c39c2fd85b8ee4e9a397ea2a78b53cb55d8f966"} err="failed to get container status \"c0fe7edacfd45633530e170a5c39c2fd85b8ee4e9a397ea2a78b53cb55d8f966\": rpc error: code = NotFound desc = could not find container \"c0fe7edacfd45633530e170a5c39c2fd85b8ee4e9a397ea2a78b53cb55d8f966\": container with ID starting with c0fe7edacfd45633530e170a5c39c2fd85b8ee4e9a397ea2a78b53cb55d8f966 not found: ID does not exist" Jan 20 17:33:17 crc kubenswrapper[4558]: I0120 17:33:17.508682 4558 scope.go:117] "RemoveContainer" containerID="b2cc3eb7df47c10e18cf97ab2f8a7780ca8a8e35c8a8c749aa1b4a85dd0a58ad" Jan 20 17:33:17 crc kubenswrapper[4558]: I0120 17:33:17.509977 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:33:17 crc kubenswrapper[4558]: I0120 17:33:17.532125 4558 scope.go:117] "RemoveContainer" containerID="1395db81401be1f584e96686dac08b81079dc9377f0f67ac885fa17b47a87685" Jan 20 17:33:17 crc kubenswrapper[4558]: I0120 17:33:17.549357 4558 scope.go:117] "RemoveContainer" containerID="b2cc3eb7df47c10e18cf97ab2f8a7780ca8a8e35c8a8c749aa1b4a85dd0a58ad" Jan 20 17:33:17 crc kubenswrapper[4558]: E0120 17:33:17.549892 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b2cc3eb7df47c10e18cf97ab2f8a7780ca8a8e35c8a8c749aa1b4a85dd0a58ad\": container with ID starting with b2cc3eb7df47c10e18cf97ab2f8a7780ca8a8e35c8a8c749aa1b4a85dd0a58ad not found: ID does not exist" containerID="b2cc3eb7df47c10e18cf97ab2f8a7780ca8a8e35c8a8c749aa1b4a85dd0a58ad" Jan 20 17:33:17 crc kubenswrapper[4558]: I0120 17:33:17.549929 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b2cc3eb7df47c10e18cf97ab2f8a7780ca8a8e35c8a8c749aa1b4a85dd0a58ad"} err="failed to get container status \"b2cc3eb7df47c10e18cf97ab2f8a7780ca8a8e35c8a8c749aa1b4a85dd0a58ad\": rpc error: code = NotFound desc = could not find container \"b2cc3eb7df47c10e18cf97ab2f8a7780ca8a8e35c8a8c749aa1b4a85dd0a58ad\": container with ID starting with b2cc3eb7df47c10e18cf97ab2f8a7780ca8a8e35c8a8c749aa1b4a85dd0a58ad not found: ID does not exist" Jan 20 17:33:17 crc kubenswrapper[4558]: I0120 17:33:17.549955 4558 scope.go:117] "RemoveContainer" containerID="1395db81401be1f584e96686dac08b81079dc9377f0f67ac885fa17b47a87685" Jan 20 17:33:17 crc kubenswrapper[4558]: E0120 17:33:17.550445 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1395db81401be1f584e96686dac08b81079dc9377f0f67ac885fa17b47a87685\": container with ID starting with 1395db81401be1f584e96686dac08b81079dc9377f0f67ac885fa17b47a87685 not found: ID does not exist" containerID="1395db81401be1f584e96686dac08b81079dc9377f0f67ac885fa17b47a87685" Jan 20 17:33:17 crc kubenswrapper[4558]: I0120 17:33:17.550508 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1395db81401be1f584e96686dac08b81079dc9377f0f67ac885fa17b47a87685"} err="failed to get container status \"1395db81401be1f584e96686dac08b81079dc9377f0f67ac885fa17b47a87685\": rpc error: code = NotFound desc = could not find container \"1395db81401be1f584e96686dac08b81079dc9377f0f67ac885fa17b47a87685\": container with ID starting with 1395db81401be1f584e96686dac08b81079dc9377f0f67ac885fa17b47a87685 not found: ID does not exist" Jan 20 17:33:17 crc kubenswrapper[4558]: I0120 17:33:17.598572 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/4bd47eac-4090-4fc1-91c4-553ebd84964d-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"4bd47eac-4090-4fc1-91c4-553ebd84964d\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:33:17 crc kubenswrapper[4558]: I0120 17:33:17.598668 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4bd47eac-4090-4fc1-91c4-553ebd84964d-config-data\") pod \"cinder-scheduler-0\" (UID: \"4bd47eac-4090-4fc1-91c4-553ebd84964d\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:33:17 crc kubenswrapper[4558]: I0120 17:33:17.598872 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4bd47eac-4090-4fc1-91c4-553ebd84964d-scripts\") pod \"cinder-scheduler-0\" (UID: \"4bd47eac-4090-4fc1-91c4-553ebd84964d\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:33:17 crc kubenswrapper[4558]: I0120 17:33:17.599124 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4bd47eac-4090-4fc1-91c4-553ebd84964d-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"4bd47eac-4090-4fc1-91c4-553ebd84964d\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:33:17 crc kubenswrapper[4558]: I0120 17:33:17.599273 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s5x4k\" (UniqueName: \"kubernetes.io/projected/4bd47eac-4090-4fc1-91c4-553ebd84964d-kube-api-access-s5x4k\") pod \"cinder-scheduler-0\" (UID: \"4bd47eac-4090-4fc1-91c4-553ebd84964d\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:33:17 crc kubenswrapper[4558]: I0120 17:33:17.599342 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4bd47eac-4090-4fc1-91c4-553ebd84964d-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"4bd47eac-4090-4fc1-91c4-553ebd84964d\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:33:17 crc kubenswrapper[4558]: I0120 17:33:17.703659 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/4bd47eac-4090-4fc1-91c4-553ebd84964d-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"4bd47eac-4090-4fc1-91c4-553ebd84964d\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:33:17 crc kubenswrapper[4558]: I0120 17:33:17.703717 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4bd47eac-4090-4fc1-91c4-553ebd84964d-config-data\") pod \"cinder-scheduler-0\" (UID: \"4bd47eac-4090-4fc1-91c4-553ebd84964d\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:33:17 crc kubenswrapper[4558]: I0120 17:33:17.703752 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4bd47eac-4090-4fc1-91c4-553ebd84964d-scripts\") pod \"cinder-scheduler-0\" (UID: \"4bd47eac-4090-4fc1-91c4-553ebd84964d\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:33:17 crc kubenswrapper[4558]: I0120 17:33:17.703815 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4bd47eac-4090-4fc1-91c4-553ebd84964d-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"4bd47eac-4090-4fc1-91c4-553ebd84964d\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:33:17 crc kubenswrapper[4558]: I0120 17:33:17.703859 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s5x4k\" (UniqueName: \"kubernetes.io/projected/4bd47eac-4090-4fc1-91c4-553ebd84964d-kube-api-access-s5x4k\") pod \"cinder-scheduler-0\" (UID: \"4bd47eac-4090-4fc1-91c4-553ebd84964d\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:33:17 crc kubenswrapper[4558]: I0120 17:33:17.703803 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/4bd47eac-4090-4fc1-91c4-553ebd84964d-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"4bd47eac-4090-4fc1-91c4-553ebd84964d\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:33:17 crc kubenswrapper[4558]: I0120 17:33:17.703889 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4bd47eac-4090-4fc1-91c4-553ebd84964d-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"4bd47eac-4090-4fc1-91c4-553ebd84964d\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:33:17 crc kubenswrapper[4558]: I0120 17:33:17.707140 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4bd47eac-4090-4fc1-91c4-553ebd84964d-scripts\") pod \"cinder-scheduler-0\" (UID: \"4bd47eac-4090-4fc1-91c4-553ebd84964d\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:33:17 crc kubenswrapper[4558]: I0120 17:33:17.708070 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4bd47eac-4090-4fc1-91c4-553ebd84964d-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"4bd47eac-4090-4fc1-91c4-553ebd84964d\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:33:17 crc kubenswrapper[4558]: I0120 17:33:17.708331 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4bd47eac-4090-4fc1-91c4-553ebd84964d-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"4bd47eac-4090-4fc1-91c4-553ebd84964d\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:33:17 crc kubenswrapper[4558]: I0120 17:33:17.710887 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4bd47eac-4090-4fc1-91c4-553ebd84964d-config-data\") pod \"cinder-scheduler-0\" (UID: \"4bd47eac-4090-4fc1-91c4-553ebd84964d\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:33:17 crc kubenswrapper[4558]: I0120 17:33:17.719593 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s5x4k\" (UniqueName: \"kubernetes.io/projected/4bd47eac-4090-4fc1-91c4-553ebd84964d-kube-api-access-s5x4k\") pod \"cinder-scheduler-0\" (UID: \"4bd47eac-4090-4fc1-91c4-553ebd84964d\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:33:17 crc kubenswrapper[4558]: I0120 17:33:17.753759 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-api-6d8c858488-xrdwn"] Jan 20 17:33:17 crc kubenswrapper[4558]: I0120 17:33:17.760434 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/barbican-api-6d8c858488-xrdwn"] Jan 20 17:33:17 crc kubenswrapper[4558]: I0120 17:33:17.818605 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:33:18 crc kubenswrapper[4558]: I0120 17:33:18.242460 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:33:18 crc kubenswrapper[4558]: W0120 17:33:18.245691 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4bd47eac_4090_4fc1_91c4_553ebd84964d.slice/crio-41b3f54e2b9bfb4b0a9c2c2cb167cb97f4161a9d82938df1b6dbaf7803136def WatchSource:0}: Error finding container 41b3f54e2b9bfb4b0a9c2c2cb167cb97f4161a9d82938df1b6dbaf7803136def: Status 404 returned error can't find the container with id 41b3f54e2b9bfb4b0a9c2c2cb167cb97f4161a9d82938df1b6dbaf7803136def Jan 20 17:33:18 crc kubenswrapper[4558]: I0120 17:33:18.309840 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:33:18 crc kubenswrapper[4558]: I0120 17:33:18.417046 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x64sj\" (UniqueName: \"kubernetes.io/projected/8c74a8f5-be12-4ac4-b45c-d459801927ea-kube-api-access-x64sj\") pod \"8c74a8f5-be12-4ac4-b45c-d459801927ea\" (UID: \"8c74a8f5-be12-4ac4-b45c-d459801927ea\") " Jan 20 17:33:18 crc kubenswrapper[4558]: I0120 17:33:18.417321 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swift\" (UniqueName: \"kubernetes.io/local-volume/local-storage14-crc\") pod \"8c74a8f5-be12-4ac4-b45c-d459801927ea\" (UID: \"8c74a8f5-be12-4ac4-b45c-d459801927ea\") " Jan 20 17:33:18 crc kubenswrapper[4558]: I0120 17:33:18.417409 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/8c74a8f5-be12-4ac4-b45c-d459801927ea-cache\") pod \"8c74a8f5-be12-4ac4-b45c-d459801927ea\" (UID: \"8c74a8f5-be12-4ac4-b45c-d459801927ea\") " Jan 20 17:33:18 crc kubenswrapper[4558]: I0120 17:33:18.417464 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/8c74a8f5-be12-4ac4-b45c-d459801927ea-etc-swift\") pod \"8c74a8f5-be12-4ac4-b45c-d459801927ea\" (UID: \"8c74a8f5-be12-4ac4-b45c-d459801927ea\") " Jan 20 17:33:18 crc kubenswrapper[4558]: I0120 17:33:18.417561 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/8c74a8f5-be12-4ac4-b45c-d459801927ea-lock\") pod \"8c74a8f5-be12-4ac4-b45c-d459801927ea\" (UID: \"8c74a8f5-be12-4ac4-b45c-d459801927ea\") " Jan 20 17:33:18 crc kubenswrapper[4558]: I0120 17:33:18.418284 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8c74a8f5-be12-4ac4-b45c-d459801927ea-cache" (OuterVolumeSpecName: "cache") pod "8c74a8f5-be12-4ac4-b45c-d459801927ea" (UID: "8c74a8f5-be12-4ac4-b45c-d459801927ea"). InnerVolumeSpecName "cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:33:18 crc kubenswrapper[4558]: I0120 17:33:18.418406 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8c74a8f5-be12-4ac4-b45c-d459801927ea-lock" (OuterVolumeSpecName: "lock") pod "8c74a8f5-be12-4ac4-b45c-d459801927ea" (UID: "8c74a8f5-be12-4ac4-b45c-d459801927ea"). InnerVolumeSpecName "lock". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:33:18 crc kubenswrapper[4558]: I0120 17:33:18.418515 4558 reconciler_common.go:293] "Volume detached for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/8c74a8f5-be12-4ac4-b45c-d459801927ea-cache\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:18 crc kubenswrapper[4558]: I0120 17:33:18.418531 4558 reconciler_common.go:293] "Volume detached for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/8c74a8f5-be12-4ac4-b45c-d459801927ea-lock\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:18 crc kubenswrapper[4558]: I0120 17:33:18.420996 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage14-crc" (OuterVolumeSpecName: "swift") pod "8c74a8f5-be12-4ac4-b45c-d459801927ea" (UID: "8c74a8f5-be12-4ac4-b45c-d459801927ea"). InnerVolumeSpecName "local-storage14-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:33:18 crc kubenswrapper[4558]: I0120 17:33:18.422061 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8c74a8f5-be12-4ac4-b45c-d459801927ea-kube-api-access-x64sj" (OuterVolumeSpecName: "kube-api-access-x64sj") pod "8c74a8f5-be12-4ac4-b45c-d459801927ea" (UID: "8c74a8f5-be12-4ac4-b45c-d459801927ea"). InnerVolumeSpecName "kube-api-access-x64sj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:33:18 crc kubenswrapper[4558]: I0120 17:33:18.425958 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8c74a8f5-be12-4ac4-b45c-d459801927ea-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "8c74a8f5-be12-4ac4-b45c-d459801927ea" (UID: "8c74a8f5-be12-4ac4-b45c-d459801927ea"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:33:18 crc kubenswrapper[4558]: I0120 17:33:18.463629 4558 generic.go:334] "Generic (PLEG): container finished" podID="8c74a8f5-be12-4ac4-b45c-d459801927ea" containerID="440ae9e1e2d1bfffd629b66e5f2b1ad26abca913a264c8ed449f65a224346b30" exitCode=137 Jan 20 17:33:18 crc kubenswrapper[4558]: I0120 17:33:18.463674 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"8c74a8f5-be12-4ac4-b45c-d459801927ea","Type":"ContainerDied","Data":"440ae9e1e2d1bfffd629b66e5f2b1ad26abca913a264c8ed449f65a224346b30"} Jan 20 17:33:18 crc kubenswrapper[4558]: I0120 17:33:18.463723 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"8c74a8f5-be12-4ac4-b45c-d459801927ea","Type":"ContainerDied","Data":"fcaba0c4f99da6b8b205133f8184debe1ce8d6bc61b3c4f4ee8f189ec592ace1"} Jan 20 17:33:18 crc kubenswrapper[4558]: I0120 17:33:18.463892 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:33:18 crc kubenswrapper[4558]: I0120 17:33:18.464093 4558 scope.go:117] "RemoveContainer" containerID="440ae9e1e2d1bfffd629b66e5f2b1ad26abca913a264c8ed449f65a224346b30" Jan 20 17:33:18 crc kubenswrapper[4558]: I0120 17:33:18.471566 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"4bd47eac-4090-4fc1-91c4-553ebd84964d","Type":"ContainerStarted","Data":"41b3f54e2b9bfb4b0a9c2c2cb167cb97f4161a9d82938df1b6dbaf7803136def"} Jan 20 17:33:18 crc kubenswrapper[4558]: I0120 17:33:18.505214 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/swift-storage-0"] Jan 20 17:33:18 crc kubenswrapper[4558]: I0120 17:33:18.511710 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/swift-storage-0"] Jan 20 17:33:18 crc kubenswrapper[4558]: I0120 17:33:18.515557 4558 scope.go:117] "RemoveContainer" containerID="e84993808b2bc5f85b1ae50493298e6a10eb7c3fcbd64da879a0e7a937ba17e1" Jan 20 17:33:18 crc kubenswrapper[4558]: I0120 17:33:18.523074 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x64sj\" (UniqueName: \"kubernetes.io/projected/8c74a8f5-be12-4ac4-b45c-d459801927ea-kube-api-access-x64sj\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:18 crc kubenswrapper[4558]: I0120 17:33:18.523107 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage14-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage14-crc\") on node \"crc\" " Jan 20 17:33:18 crc kubenswrapper[4558]: I0120 17:33:18.523117 4558 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/8c74a8f5-be12-4ac4-b45c-d459801927ea-etc-swift\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:18 crc kubenswrapper[4558]: I0120 17:33:18.531543 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/swift-storage-0"] Jan 20 17:33:18 crc kubenswrapper[4558]: E0120 17:33:18.531853 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8c74a8f5-be12-4ac4-b45c-d459801927ea" containerName="container-updater" Jan 20 17:33:18 crc kubenswrapper[4558]: I0120 17:33:18.531866 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="8c74a8f5-be12-4ac4-b45c-d459801927ea" containerName="container-updater" Jan 20 17:33:18 crc kubenswrapper[4558]: E0120 17:33:18.531877 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8c74a8f5-be12-4ac4-b45c-d459801927ea" containerName="object-replicator" Jan 20 17:33:18 crc kubenswrapper[4558]: I0120 17:33:18.531883 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="8c74a8f5-be12-4ac4-b45c-d459801927ea" containerName="object-replicator" Jan 20 17:33:18 crc kubenswrapper[4558]: E0120 17:33:18.531895 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8c74a8f5-be12-4ac4-b45c-d459801927ea" containerName="object-server" Jan 20 17:33:18 crc kubenswrapper[4558]: I0120 17:33:18.531900 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="8c74a8f5-be12-4ac4-b45c-d459801927ea" containerName="object-server" Jan 20 17:33:18 crc kubenswrapper[4558]: E0120 17:33:18.531913 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8c74a8f5-be12-4ac4-b45c-d459801927ea" containerName="container-auditor" Jan 20 17:33:18 crc kubenswrapper[4558]: I0120 17:33:18.531919 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="8c74a8f5-be12-4ac4-b45c-d459801927ea" containerName="container-auditor" Jan 20 17:33:18 crc kubenswrapper[4558]: E0120 17:33:18.531930 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8c74a8f5-be12-4ac4-b45c-d459801927ea" containerName="object-expirer" Jan 20 17:33:18 crc kubenswrapper[4558]: I0120 17:33:18.531936 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="8c74a8f5-be12-4ac4-b45c-d459801927ea" containerName="object-expirer" Jan 20 17:33:18 crc kubenswrapper[4558]: E0120 17:33:18.531948 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8c74a8f5-be12-4ac4-b45c-d459801927ea" containerName="account-reaper" Jan 20 17:33:18 crc kubenswrapper[4558]: I0120 17:33:18.531953 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="8c74a8f5-be12-4ac4-b45c-d459801927ea" containerName="account-reaper" Jan 20 17:33:18 crc kubenswrapper[4558]: E0120 17:33:18.531964 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8c74a8f5-be12-4ac4-b45c-d459801927ea" containerName="account-auditor" Jan 20 17:33:18 crc kubenswrapper[4558]: I0120 17:33:18.531969 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="8c74a8f5-be12-4ac4-b45c-d459801927ea" containerName="account-auditor" Jan 20 17:33:18 crc kubenswrapper[4558]: E0120 17:33:18.531980 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8c74a8f5-be12-4ac4-b45c-d459801927ea" containerName="swift-recon-cron" Jan 20 17:33:18 crc kubenswrapper[4558]: I0120 17:33:18.531985 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="8c74a8f5-be12-4ac4-b45c-d459801927ea" containerName="swift-recon-cron" Jan 20 17:33:18 crc kubenswrapper[4558]: E0120 17:33:18.531993 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8c74a8f5-be12-4ac4-b45c-d459801927ea" containerName="container-server" Jan 20 17:33:18 crc kubenswrapper[4558]: I0120 17:33:18.531998 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="8c74a8f5-be12-4ac4-b45c-d459801927ea" containerName="container-server" Jan 20 17:33:18 crc kubenswrapper[4558]: E0120 17:33:18.532009 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8c74a8f5-be12-4ac4-b45c-d459801927ea" containerName="account-server" Jan 20 17:33:18 crc kubenswrapper[4558]: I0120 17:33:18.532015 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="8c74a8f5-be12-4ac4-b45c-d459801927ea" containerName="account-server" Jan 20 17:33:18 crc kubenswrapper[4558]: E0120 17:33:18.532020 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8c74a8f5-be12-4ac4-b45c-d459801927ea" containerName="container-replicator" Jan 20 17:33:18 crc kubenswrapper[4558]: I0120 17:33:18.532026 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="8c74a8f5-be12-4ac4-b45c-d459801927ea" containerName="container-replicator" Jan 20 17:33:18 crc kubenswrapper[4558]: E0120 17:33:18.532034 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8c74a8f5-be12-4ac4-b45c-d459801927ea" containerName="account-replicator" Jan 20 17:33:18 crc kubenswrapper[4558]: I0120 17:33:18.532039 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="8c74a8f5-be12-4ac4-b45c-d459801927ea" containerName="account-replicator" Jan 20 17:33:18 crc kubenswrapper[4558]: E0120 17:33:18.532046 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8c74a8f5-be12-4ac4-b45c-d459801927ea" containerName="rsync" Jan 20 17:33:18 crc kubenswrapper[4558]: I0120 17:33:18.532052 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="8c74a8f5-be12-4ac4-b45c-d459801927ea" containerName="rsync" Jan 20 17:33:18 crc kubenswrapper[4558]: E0120 17:33:18.532072 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8c74a8f5-be12-4ac4-b45c-d459801927ea" containerName="object-updater" Jan 20 17:33:18 crc kubenswrapper[4558]: I0120 17:33:18.532077 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="8c74a8f5-be12-4ac4-b45c-d459801927ea" containerName="object-updater" Jan 20 17:33:18 crc kubenswrapper[4558]: E0120 17:33:18.532085 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8c74a8f5-be12-4ac4-b45c-d459801927ea" containerName="object-auditor" Jan 20 17:33:18 crc kubenswrapper[4558]: I0120 17:33:18.532090 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="8c74a8f5-be12-4ac4-b45c-d459801927ea" containerName="object-auditor" Jan 20 17:33:18 crc kubenswrapper[4558]: I0120 17:33:18.532260 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="8c74a8f5-be12-4ac4-b45c-d459801927ea" containerName="container-server" Jan 20 17:33:18 crc kubenswrapper[4558]: I0120 17:33:18.532273 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="8c74a8f5-be12-4ac4-b45c-d459801927ea" containerName="account-auditor" Jan 20 17:33:18 crc kubenswrapper[4558]: I0120 17:33:18.532283 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="8c74a8f5-be12-4ac4-b45c-d459801927ea" containerName="container-updater" Jan 20 17:33:18 crc kubenswrapper[4558]: I0120 17:33:18.532292 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="8c74a8f5-be12-4ac4-b45c-d459801927ea" containerName="object-auditor" Jan 20 17:33:18 crc kubenswrapper[4558]: I0120 17:33:18.532298 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="8c74a8f5-be12-4ac4-b45c-d459801927ea" containerName="account-server" Jan 20 17:33:18 crc kubenswrapper[4558]: I0120 17:33:18.532307 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="8c74a8f5-be12-4ac4-b45c-d459801927ea" containerName="swift-recon-cron" Jan 20 17:33:18 crc kubenswrapper[4558]: I0120 17:33:18.532314 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="8c74a8f5-be12-4ac4-b45c-d459801927ea" containerName="object-updater" Jan 20 17:33:18 crc kubenswrapper[4558]: I0120 17:33:18.532321 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="8c74a8f5-be12-4ac4-b45c-d459801927ea" containerName="account-reaper" Jan 20 17:33:18 crc kubenswrapper[4558]: I0120 17:33:18.532330 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="8c74a8f5-be12-4ac4-b45c-d459801927ea" containerName="object-server" Jan 20 17:33:18 crc kubenswrapper[4558]: I0120 17:33:18.532340 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="8c74a8f5-be12-4ac4-b45c-d459801927ea" containerName="container-replicator" Jan 20 17:33:18 crc kubenswrapper[4558]: I0120 17:33:18.532349 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="8c74a8f5-be12-4ac4-b45c-d459801927ea" containerName="object-replicator" Jan 20 17:33:18 crc kubenswrapper[4558]: I0120 17:33:18.532358 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="8c74a8f5-be12-4ac4-b45c-d459801927ea" containerName="container-auditor" Jan 20 17:33:18 crc kubenswrapper[4558]: I0120 17:33:18.532364 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="8c74a8f5-be12-4ac4-b45c-d459801927ea" containerName="object-expirer" Jan 20 17:33:18 crc kubenswrapper[4558]: I0120 17:33:18.532373 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="8c74a8f5-be12-4ac4-b45c-d459801927ea" containerName="account-replicator" Jan 20 17:33:18 crc kubenswrapper[4558]: I0120 17:33:18.532381 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="8c74a8f5-be12-4ac4-b45c-d459801927ea" containerName="rsync" Jan 20 17:33:18 crc kubenswrapper[4558]: I0120 17:33:18.541141 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:33:18 crc kubenswrapper[4558]: I0120 17:33:18.553426 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"swift-storage-config-data" Jan 20 17:33:18 crc kubenswrapper[4558]: I0120 17:33:18.553437 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage14-crc" (UniqueName: "kubernetes.io/local-volume/local-storage14-crc") on node "crc" Jan 20 17:33:18 crc kubenswrapper[4558]: I0120 17:33:18.591940 4558 scope.go:117] "RemoveContainer" containerID="6bd5c4f64f7527d8728149bf64ddbf3f8288b598bd1840a6821fe48789fce821" Jan 20 17:33:18 crc kubenswrapper[4558]: I0120 17:33:18.626837 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage14-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage14-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:18 crc kubenswrapper[4558]: I0120 17:33:18.628535 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="568979fa-537c-45c5-952c-f87a40b194ef" path="/var/lib/kubelet/pods/568979fa-537c-45c5-952c-f87a40b194ef/volumes" Jan 20 17:33:18 crc kubenswrapper[4558]: I0120 17:33:18.629824 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8c74a8f5-be12-4ac4-b45c-d459801927ea" path="/var/lib/kubelet/pods/8c74a8f5-be12-4ac4-b45c-d459801927ea/volumes" Jan 20 17:33:18 crc kubenswrapper[4558]: I0120 17:33:18.631895 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d0062240-1ed7-488f-ac15-d3c5e0812ccd" path="/var/lib/kubelet/pods/d0062240-1ed7-488f-ac15-d3c5e0812ccd/volumes" Jan 20 17:33:18 crc kubenswrapper[4558]: I0120 17:33:18.632614 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/swift-storage-0"] Jan 20 17:33:18 crc kubenswrapper[4558]: I0120 17:33:18.632741 4558 scope.go:117] "RemoveContainer" containerID="642fe8d6d1e22629c1ee44fe92b12d3105252f0e570aa04c45e104ebe419f7ad" Jan 20 17:33:18 crc kubenswrapper[4558]: I0120 17:33:18.659208 4558 scope.go:117] "RemoveContainer" containerID="49db71c25f4101de9d35070da8e88df17b13463d85f4085b268b15d6f2fd0010" Jan 20 17:33:18 crc kubenswrapper[4558]: I0120 17:33:18.696517 4558 scope.go:117] "RemoveContainer" containerID="b1a440452721f6300aa80e2b85895305c5e163dc53fb6fedc65d4a4ad30c530d" Jan 20 17:33:18 crc kubenswrapper[4558]: I0120 17:33:18.721663 4558 scope.go:117] "RemoveContainer" containerID="5b64dc2431311cf052b4ff6df6c6144511a386c51c5291266eaa5ffde8e35a57" Jan 20 17:33:18 crc kubenswrapper[4558]: I0120 17:33:18.729072 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/85471d11-01bf-4a15-b6bc-fabbdaa5750b-cache\") pod \"swift-storage-0\" (UID: \"85471d11-01bf-4a15-b6bc-fabbdaa5750b\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:33:18 crc kubenswrapper[4558]: I0120 17:33:18.729117 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage14-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage14-crc\") pod \"swift-storage-0\" (UID: \"85471d11-01bf-4a15-b6bc-fabbdaa5750b\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:33:18 crc kubenswrapper[4558]: I0120 17:33:18.729309 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gzsnj\" (UniqueName: \"kubernetes.io/projected/85471d11-01bf-4a15-b6bc-fabbdaa5750b-kube-api-access-gzsnj\") pod \"swift-storage-0\" (UID: \"85471d11-01bf-4a15-b6bc-fabbdaa5750b\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:33:18 crc kubenswrapper[4558]: I0120 17:33:18.729412 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/85471d11-01bf-4a15-b6bc-fabbdaa5750b-lock\") pod \"swift-storage-0\" (UID: \"85471d11-01bf-4a15-b6bc-fabbdaa5750b\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:33:18 crc kubenswrapper[4558]: I0120 17:33:18.729558 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/85471d11-01bf-4a15-b6bc-fabbdaa5750b-etc-swift\") pod \"swift-storage-0\" (UID: \"85471d11-01bf-4a15-b6bc-fabbdaa5750b\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:33:18 crc kubenswrapper[4558]: I0120 17:33:18.745634 4558 scope.go:117] "RemoveContainer" containerID="6750a373d500a14c8bb8f71b32edf18286f29379d3c4862db6be084c829808ed" Jan 20 17:33:18 crc kubenswrapper[4558]: I0120 17:33:18.751551 4558 pod_container_manager_linux.go:210] "Failed to delete cgroup paths" cgroupName=["kubepods","besteffort","pod7d6b9532-22b5-41c9-8b15-83a9c515c52d"] err="unable to destroy cgroup paths for cgroup [kubepods besteffort pod7d6b9532-22b5-41c9-8b15-83a9c515c52d] : Timed out while waiting for systemd to remove kubepods-besteffort-pod7d6b9532_22b5_41c9_8b15_83a9c515c52d.slice" Jan 20 17:33:18 crc kubenswrapper[4558]: I0120 17:33:18.767307 4558 scope.go:117] "RemoveContainer" containerID="7312f6e5f2e6499af9f3601374373df6aa29dfce310cc2421bf1cf529c300a02" Jan 20 17:33:18 crc kubenswrapper[4558]: I0120 17:33:18.786840 4558 scope.go:117] "RemoveContainer" containerID="b854ccf3f0261f0809cff51e86b924877af436c76a3e8bb217d1f85c3dda4221" Jan 20 17:33:18 crc kubenswrapper[4558]: I0120 17:33:18.808858 4558 scope.go:117] "RemoveContainer" containerID="e2141ae9ecf7ffabbc3fdb13e8e88e4a3ea99534f37f0efd0752308e25d02c42" Jan 20 17:33:18 crc kubenswrapper[4558]: I0120 17:33:18.828832 4558 scope.go:117] "RemoveContainer" containerID="b70b64e3b3bd252db5a3a5cd9a2f046dc46575ddcb991f7fbfb5eae9caa6a55b" Jan 20 17:33:18 crc kubenswrapper[4558]: I0120 17:33:18.835692 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/85471d11-01bf-4a15-b6bc-fabbdaa5750b-lock\") pod \"swift-storage-0\" (UID: \"85471d11-01bf-4a15-b6bc-fabbdaa5750b\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:33:18 crc kubenswrapper[4558]: I0120 17:33:18.835803 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/85471d11-01bf-4a15-b6bc-fabbdaa5750b-etc-swift\") pod \"swift-storage-0\" (UID: \"85471d11-01bf-4a15-b6bc-fabbdaa5750b\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:33:18 crc kubenswrapper[4558]: I0120 17:33:18.836082 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/85471d11-01bf-4a15-b6bc-fabbdaa5750b-cache\") pod \"swift-storage-0\" (UID: \"85471d11-01bf-4a15-b6bc-fabbdaa5750b\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:33:18 crc kubenswrapper[4558]: I0120 17:33:18.836118 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage14-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage14-crc\") pod \"swift-storage-0\" (UID: \"85471d11-01bf-4a15-b6bc-fabbdaa5750b\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:33:18 crc kubenswrapper[4558]: I0120 17:33:18.836157 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/85471d11-01bf-4a15-b6bc-fabbdaa5750b-lock\") pod \"swift-storage-0\" (UID: \"85471d11-01bf-4a15-b6bc-fabbdaa5750b\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:33:18 crc kubenswrapper[4558]: I0120 17:33:18.836331 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gzsnj\" (UniqueName: \"kubernetes.io/projected/85471d11-01bf-4a15-b6bc-fabbdaa5750b-kube-api-access-gzsnj\") pod \"swift-storage-0\" (UID: \"85471d11-01bf-4a15-b6bc-fabbdaa5750b\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:33:18 crc kubenswrapper[4558]: I0120 17:33:18.836412 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/85471d11-01bf-4a15-b6bc-fabbdaa5750b-cache\") pod \"swift-storage-0\" (UID: \"85471d11-01bf-4a15-b6bc-fabbdaa5750b\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:33:18 crc kubenswrapper[4558]: I0120 17:33:18.836580 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage14-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage14-crc\") pod \"swift-storage-0\" (UID: \"85471d11-01bf-4a15-b6bc-fabbdaa5750b\") device mount path \"/mnt/openstack/pv14\"" pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:33:18 crc kubenswrapper[4558]: I0120 17:33:18.841058 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/85471d11-01bf-4a15-b6bc-fabbdaa5750b-etc-swift\") pod \"swift-storage-0\" (UID: \"85471d11-01bf-4a15-b6bc-fabbdaa5750b\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:33:18 crc kubenswrapper[4558]: I0120 17:33:18.850661 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gzsnj\" (UniqueName: \"kubernetes.io/projected/85471d11-01bf-4a15-b6bc-fabbdaa5750b-kube-api-access-gzsnj\") pod \"swift-storage-0\" (UID: \"85471d11-01bf-4a15-b6bc-fabbdaa5750b\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:33:18 crc kubenswrapper[4558]: I0120 17:33:18.859320 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage14-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage14-crc\") pod \"swift-storage-0\" (UID: \"85471d11-01bf-4a15-b6bc-fabbdaa5750b\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:33:18 crc kubenswrapper[4558]: I0120 17:33:18.860664 4558 scope.go:117] "RemoveContainer" containerID="c51ba6d01533031c51006f14e866bc5bdedd26de8afcbe43c5bcf1e7025e0dd3" Jan 20 17:33:18 crc kubenswrapper[4558]: I0120 17:33:18.886518 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:33:19 crc kubenswrapper[4558]: I0120 17:33:19.018474 4558 scope.go:117] "RemoveContainer" containerID="abb482af37b6f6a3f13af0986ae717f0f32b8467e1976fdbb59f0c73b8240e46" Jan 20 17:33:19 crc kubenswrapper[4558]: I0120 17:33:19.043431 4558 scope.go:117] "RemoveContainer" containerID="33d41f8923ec28aa7b94229f05d825972526de6642700e3a9555dbc7f7a6e7ce" Jan 20 17:33:19 crc kubenswrapper[4558]: I0120 17:33:19.082514 4558 scope.go:117] "RemoveContainer" containerID="440ae9e1e2d1bfffd629b66e5f2b1ad26abca913a264c8ed449f65a224346b30" Jan 20 17:33:19 crc kubenswrapper[4558]: E0120 17:33:19.083070 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"440ae9e1e2d1bfffd629b66e5f2b1ad26abca913a264c8ed449f65a224346b30\": container with ID starting with 440ae9e1e2d1bfffd629b66e5f2b1ad26abca913a264c8ed449f65a224346b30 not found: ID does not exist" containerID="440ae9e1e2d1bfffd629b66e5f2b1ad26abca913a264c8ed449f65a224346b30" Jan 20 17:33:19 crc kubenswrapper[4558]: I0120 17:33:19.083095 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"440ae9e1e2d1bfffd629b66e5f2b1ad26abca913a264c8ed449f65a224346b30"} err="failed to get container status \"440ae9e1e2d1bfffd629b66e5f2b1ad26abca913a264c8ed449f65a224346b30\": rpc error: code = NotFound desc = could not find container \"440ae9e1e2d1bfffd629b66e5f2b1ad26abca913a264c8ed449f65a224346b30\": container with ID starting with 440ae9e1e2d1bfffd629b66e5f2b1ad26abca913a264c8ed449f65a224346b30 not found: ID does not exist" Jan 20 17:33:19 crc kubenswrapper[4558]: I0120 17:33:19.083116 4558 scope.go:117] "RemoveContainer" containerID="e84993808b2bc5f85b1ae50493298e6a10eb7c3fcbd64da879a0e7a937ba17e1" Jan 20 17:33:19 crc kubenswrapper[4558]: E0120 17:33:19.084206 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e84993808b2bc5f85b1ae50493298e6a10eb7c3fcbd64da879a0e7a937ba17e1\": container with ID starting with e84993808b2bc5f85b1ae50493298e6a10eb7c3fcbd64da879a0e7a937ba17e1 not found: ID does not exist" containerID="e84993808b2bc5f85b1ae50493298e6a10eb7c3fcbd64da879a0e7a937ba17e1" Jan 20 17:33:19 crc kubenswrapper[4558]: I0120 17:33:19.084230 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e84993808b2bc5f85b1ae50493298e6a10eb7c3fcbd64da879a0e7a937ba17e1"} err="failed to get container status \"e84993808b2bc5f85b1ae50493298e6a10eb7c3fcbd64da879a0e7a937ba17e1\": rpc error: code = NotFound desc = could not find container \"e84993808b2bc5f85b1ae50493298e6a10eb7c3fcbd64da879a0e7a937ba17e1\": container with ID starting with e84993808b2bc5f85b1ae50493298e6a10eb7c3fcbd64da879a0e7a937ba17e1 not found: ID does not exist" Jan 20 17:33:19 crc kubenswrapper[4558]: I0120 17:33:19.084243 4558 scope.go:117] "RemoveContainer" containerID="6bd5c4f64f7527d8728149bf64ddbf3f8288b598bd1840a6821fe48789fce821" Jan 20 17:33:19 crc kubenswrapper[4558]: E0120 17:33:19.084753 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6bd5c4f64f7527d8728149bf64ddbf3f8288b598bd1840a6821fe48789fce821\": container with ID starting with 6bd5c4f64f7527d8728149bf64ddbf3f8288b598bd1840a6821fe48789fce821 not found: ID does not exist" containerID="6bd5c4f64f7527d8728149bf64ddbf3f8288b598bd1840a6821fe48789fce821" Jan 20 17:33:19 crc kubenswrapper[4558]: I0120 17:33:19.084788 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6bd5c4f64f7527d8728149bf64ddbf3f8288b598bd1840a6821fe48789fce821"} err="failed to get container status \"6bd5c4f64f7527d8728149bf64ddbf3f8288b598bd1840a6821fe48789fce821\": rpc error: code = NotFound desc = could not find container \"6bd5c4f64f7527d8728149bf64ddbf3f8288b598bd1840a6821fe48789fce821\": container with ID starting with 6bd5c4f64f7527d8728149bf64ddbf3f8288b598bd1840a6821fe48789fce821 not found: ID does not exist" Jan 20 17:33:19 crc kubenswrapper[4558]: I0120 17:33:19.084839 4558 scope.go:117] "RemoveContainer" containerID="642fe8d6d1e22629c1ee44fe92b12d3105252f0e570aa04c45e104ebe419f7ad" Jan 20 17:33:19 crc kubenswrapper[4558]: E0120 17:33:19.085319 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"642fe8d6d1e22629c1ee44fe92b12d3105252f0e570aa04c45e104ebe419f7ad\": container with ID starting with 642fe8d6d1e22629c1ee44fe92b12d3105252f0e570aa04c45e104ebe419f7ad not found: ID does not exist" containerID="642fe8d6d1e22629c1ee44fe92b12d3105252f0e570aa04c45e104ebe419f7ad" Jan 20 17:33:19 crc kubenswrapper[4558]: I0120 17:33:19.085357 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"642fe8d6d1e22629c1ee44fe92b12d3105252f0e570aa04c45e104ebe419f7ad"} err="failed to get container status \"642fe8d6d1e22629c1ee44fe92b12d3105252f0e570aa04c45e104ebe419f7ad\": rpc error: code = NotFound desc = could not find container \"642fe8d6d1e22629c1ee44fe92b12d3105252f0e570aa04c45e104ebe419f7ad\": container with ID starting with 642fe8d6d1e22629c1ee44fe92b12d3105252f0e570aa04c45e104ebe419f7ad not found: ID does not exist" Jan 20 17:33:19 crc kubenswrapper[4558]: I0120 17:33:19.085371 4558 scope.go:117] "RemoveContainer" containerID="49db71c25f4101de9d35070da8e88df17b13463d85f4085b268b15d6f2fd0010" Jan 20 17:33:19 crc kubenswrapper[4558]: E0120 17:33:19.085639 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"49db71c25f4101de9d35070da8e88df17b13463d85f4085b268b15d6f2fd0010\": container with ID starting with 49db71c25f4101de9d35070da8e88df17b13463d85f4085b268b15d6f2fd0010 not found: ID does not exist" containerID="49db71c25f4101de9d35070da8e88df17b13463d85f4085b268b15d6f2fd0010" Jan 20 17:33:19 crc kubenswrapper[4558]: I0120 17:33:19.085662 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"49db71c25f4101de9d35070da8e88df17b13463d85f4085b268b15d6f2fd0010"} err="failed to get container status \"49db71c25f4101de9d35070da8e88df17b13463d85f4085b268b15d6f2fd0010\": rpc error: code = NotFound desc = could not find container \"49db71c25f4101de9d35070da8e88df17b13463d85f4085b268b15d6f2fd0010\": container with ID starting with 49db71c25f4101de9d35070da8e88df17b13463d85f4085b268b15d6f2fd0010 not found: ID does not exist" Jan 20 17:33:19 crc kubenswrapper[4558]: I0120 17:33:19.085676 4558 scope.go:117] "RemoveContainer" containerID="b1a440452721f6300aa80e2b85895305c5e163dc53fb6fedc65d4a4ad30c530d" Jan 20 17:33:19 crc kubenswrapper[4558]: E0120 17:33:19.086037 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b1a440452721f6300aa80e2b85895305c5e163dc53fb6fedc65d4a4ad30c530d\": container with ID starting with b1a440452721f6300aa80e2b85895305c5e163dc53fb6fedc65d4a4ad30c530d not found: ID does not exist" containerID="b1a440452721f6300aa80e2b85895305c5e163dc53fb6fedc65d4a4ad30c530d" Jan 20 17:33:19 crc kubenswrapper[4558]: I0120 17:33:19.086060 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b1a440452721f6300aa80e2b85895305c5e163dc53fb6fedc65d4a4ad30c530d"} err="failed to get container status \"b1a440452721f6300aa80e2b85895305c5e163dc53fb6fedc65d4a4ad30c530d\": rpc error: code = NotFound desc = could not find container \"b1a440452721f6300aa80e2b85895305c5e163dc53fb6fedc65d4a4ad30c530d\": container with ID starting with b1a440452721f6300aa80e2b85895305c5e163dc53fb6fedc65d4a4ad30c530d not found: ID does not exist" Jan 20 17:33:19 crc kubenswrapper[4558]: I0120 17:33:19.086087 4558 scope.go:117] "RemoveContainer" containerID="5b64dc2431311cf052b4ff6df6c6144511a386c51c5291266eaa5ffde8e35a57" Jan 20 17:33:19 crc kubenswrapper[4558]: E0120 17:33:19.086328 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5b64dc2431311cf052b4ff6df6c6144511a386c51c5291266eaa5ffde8e35a57\": container with ID starting with 5b64dc2431311cf052b4ff6df6c6144511a386c51c5291266eaa5ffde8e35a57 not found: ID does not exist" containerID="5b64dc2431311cf052b4ff6df6c6144511a386c51c5291266eaa5ffde8e35a57" Jan 20 17:33:19 crc kubenswrapper[4558]: I0120 17:33:19.086348 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5b64dc2431311cf052b4ff6df6c6144511a386c51c5291266eaa5ffde8e35a57"} err="failed to get container status \"5b64dc2431311cf052b4ff6df6c6144511a386c51c5291266eaa5ffde8e35a57\": rpc error: code = NotFound desc = could not find container \"5b64dc2431311cf052b4ff6df6c6144511a386c51c5291266eaa5ffde8e35a57\": container with ID starting with 5b64dc2431311cf052b4ff6df6c6144511a386c51c5291266eaa5ffde8e35a57 not found: ID does not exist" Jan 20 17:33:19 crc kubenswrapper[4558]: I0120 17:33:19.086363 4558 scope.go:117] "RemoveContainer" containerID="6750a373d500a14c8bb8f71b32edf18286f29379d3c4862db6be084c829808ed" Jan 20 17:33:19 crc kubenswrapper[4558]: E0120 17:33:19.086592 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6750a373d500a14c8bb8f71b32edf18286f29379d3c4862db6be084c829808ed\": container with ID starting with 6750a373d500a14c8bb8f71b32edf18286f29379d3c4862db6be084c829808ed not found: ID does not exist" containerID="6750a373d500a14c8bb8f71b32edf18286f29379d3c4862db6be084c829808ed" Jan 20 17:33:19 crc kubenswrapper[4558]: I0120 17:33:19.086610 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6750a373d500a14c8bb8f71b32edf18286f29379d3c4862db6be084c829808ed"} err="failed to get container status \"6750a373d500a14c8bb8f71b32edf18286f29379d3c4862db6be084c829808ed\": rpc error: code = NotFound desc = could not find container \"6750a373d500a14c8bb8f71b32edf18286f29379d3c4862db6be084c829808ed\": container with ID starting with 6750a373d500a14c8bb8f71b32edf18286f29379d3c4862db6be084c829808ed not found: ID does not exist" Jan 20 17:33:19 crc kubenswrapper[4558]: I0120 17:33:19.086622 4558 scope.go:117] "RemoveContainer" containerID="7312f6e5f2e6499af9f3601374373df6aa29dfce310cc2421bf1cf529c300a02" Jan 20 17:33:19 crc kubenswrapper[4558]: E0120 17:33:19.086823 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7312f6e5f2e6499af9f3601374373df6aa29dfce310cc2421bf1cf529c300a02\": container with ID starting with 7312f6e5f2e6499af9f3601374373df6aa29dfce310cc2421bf1cf529c300a02 not found: ID does not exist" containerID="7312f6e5f2e6499af9f3601374373df6aa29dfce310cc2421bf1cf529c300a02" Jan 20 17:33:19 crc kubenswrapper[4558]: I0120 17:33:19.086842 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7312f6e5f2e6499af9f3601374373df6aa29dfce310cc2421bf1cf529c300a02"} err="failed to get container status \"7312f6e5f2e6499af9f3601374373df6aa29dfce310cc2421bf1cf529c300a02\": rpc error: code = NotFound desc = could not find container \"7312f6e5f2e6499af9f3601374373df6aa29dfce310cc2421bf1cf529c300a02\": container with ID starting with 7312f6e5f2e6499af9f3601374373df6aa29dfce310cc2421bf1cf529c300a02 not found: ID does not exist" Jan 20 17:33:19 crc kubenswrapper[4558]: I0120 17:33:19.086853 4558 scope.go:117] "RemoveContainer" containerID="b854ccf3f0261f0809cff51e86b924877af436c76a3e8bb217d1f85c3dda4221" Jan 20 17:33:19 crc kubenswrapper[4558]: E0120 17:33:19.087677 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b854ccf3f0261f0809cff51e86b924877af436c76a3e8bb217d1f85c3dda4221\": container with ID starting with b854ccf3f0261f0809cff51e86b924877af436c76a3e8bb217d1f85c3dda4221 not found: ID does not exist" containerID="b854ccf3f0261f0809cff51e86b924877af436c76a3e8bb217d1f85c3dda4221" Jan 20 17:33:19 crc kubenswrapper[4558]: I0120 17:33:19.087728 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b854ccf3f0261f0809cff51e86b924877af436c76a3e8bb217d1f85c3dda4221"} err="failed to get container status \"b854ccf3f0261f0809cff51e86b924877af436c76a3e8bb217d1f85c3dda4221\": rpc error: code = NotFound desc = could not find container \"b854ccf3f0261f0809cff51e86b924877af436c76a3e8bb217d1f85c3dda4221\": container with ID starting with b854ccf3f0261f0809cff51e86b924877af436c76a3e8bb217d1f85c3dda4221 not found: ID does not exist" Jan 20 17:33:19 crc kubenswrapper[4558]: I0120 17:33:19.087742 4558 scope.go:117] "RemoveContainer" containerID="e2141ae9ecf7ffabbc3fdb13e8e88e4a3ea99534f37f0efd0752308e25d02c42" Jan 20 17:33:19 crc kubenswrapper[4558]: E0120 17:33:19.088033 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e2141ae9ecf7ffabbc3fdb13e8e88e4a3ea99534f37f0efd0752308e25d02c42\": container with ID starting with e2141ae9ecf7ffabbc3fdb13e8e88e4a3ea99534f37f0efd0752308e25d02c42 not found: ID does not exist" containerID="e2141ae9ecf7ffabbc3fdb13e8e88e4a3ea99534f37f0efd0752308e25d02c42" Jan 20 17:33:19 crc kubenswrapper[4558]: I0120 17:33:19.088054 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e2141ae9ecf7ffabbc3fdb13e8e88e4a3ea99534f37f0efd0752308e25d02c42"} err="failed to get container status \"e2141ae9ecf7ffabbc3fdb13e8e88e4a3ea99534f37f0efd0752308e25d02c42\": rpc error: code = NotFound desc = could not find container \"e2141ae9ecf7ffabbc3fdb13e8e88e4a3ea99534f37f0efd0752308e25d02c42\": container with ID starting with e2141ae9ecf7ffabbc3fdb13e8e88e4a3ea99534f37f0efd0752308e25d02c42 not found: ID does not exist" Jan 20 17:33:19 crc kubenswrapper[4558]: I0120 17:33:19.088066 4558 scope.go:117] "RemoveContainer" containerID="b70b64e3b3bd252db5a3a5cd9a2f046dc46575ddcb991f7fbfb5eae9caa6a55b" Jan 20 17:33:19 crc kubenswrapper[4558]: E0120 17:33:19.088318 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b70b64e3b3bd252db5a3a5cd9a2f046dc46575ddcb991f7fbfb5eae9caa6a55b\": container with ID starting with b70b64e3b3bd252db5a3a5cd9a2f046dc46575ddcb991f7fbfb5eae9caa6a55b not found: ID does not exist" containerID="b70b64e3b3bd252db5a3a5cd9a2f046dc46575ddcb991f7fbfb5eae9caa6a55b" Jan 20 17:33:19 crc kubenswrapper[4558]: I0120 17:33:19.088340 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b70b64e3b3bd252db5a3a5cd9a2f046dc46575ddcb991f7fbfb5eae9caa6a55b"} err="failed to get container status \"b70b64e3b3bd252db5a3a5cd9a2f046dc46575ddcb991f7fbfb5eae9caa6a55b\": rpc error: code = NotFound desc = could not find container \"b70b64e3b3bd252db5a3a5cd9a2f046dc46575ddcb991f7fbfb5eae9caa6a55b\": container with ID starting with b70b64e3b3bd252db5a3a5cd9a2f046dc46575ddcb991f7fbfb5eae9caa6a55b not found: ID does not exist" Jan 20 17:33:19 crc kubenswrapper[4558]: I0120 17:33:19.088354 4558 scope.go:117] "RemoveContainer" containerID="c51ba6d01533031c51006f14e866bc5bdedd26de8afcbe43c5bcf1e7025e0dd3" Jan 20 17:33:19 crc kubenswrapper[4558]: E0120 17:33:19.088673 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c51ba6d01533031c51006f14e866bc5bdedd26de8afcbe43c5bcf1e7025e0dd3\": container with ID starting with c51ba6d01533031c51006f14e866bc5bdedd26de8afcbe43c5bcf1e7025e0dd3 not found: ID does not exist" containerID="c51ba6d01533031c51006f14e866bc5bdedd26de8afcbe43c5bcf1e7025e0dd3" Jan 20 17:33:19 crc kubenswrapper[4558]: I0120 17:33:19.088711 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c51ba6d01533031c51006f14e866bc5bdedd26de8afcbe43c5bcf1e7025e0dd3"} err="failed to get container status \"c51ba6d01533031c51006f14e866bc5bdedd26de8afcbe43c5bcf1e7025e0dd3\": rpc error: code = NotFound desc = could not find container \"c51ba6d01533031c51006f14e866bc5bdedd26de8afcbe43c5bcf1e7025e0dd3\": container with ID starting with c51ba6d01533031c51006f14e866bc5bdedd26de8afcbe43c5bcf1e7025e0dd3 not found: ID does not exist" Jan 20 17:33:19 crc kubenswrapper[4558]: I0120 17:33:19.088737 4558 scope.go:117] "RemoveContainer" containerID="abb482af37b6f6a3f13af0986ae717f0f32b8467e1976fdbb59f0c73b8240e46" Jan 20 17:33:19 crc kubenswrapper[4558]: E0120 17:33:19.089031 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"abb482af37b6f6a3f13af0986ae717f0f32b8467e1976fdbb59f0c73b8240e46\": container with ID starting with abb482af37b6f6a3f13af0986ae717f0f32b8467e1976fdbb59f0c73b8240e46 not found: ID does not exist" containerID="abb482af37b6f6a3f13af0986ae717f0f32b8467e1976fdbb59f0c73b8240e46" Jan 20 17:33:19 crc kubenswrapper[4558]: I0120 17:33:19.089068 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"abb482af37b6f6a3f13af0986ae717f0f32b8467e1976fdbb59f0c73b8240e46"} err="failed to get container status \"abb482af37b6f6a3f13af0986ae717f0f32b8467e1976fdbb59f0c73b8240e46\": rpc error: code = NotFound desc = could not find container \"abb482af37b6f6a3f13af0986ae717f0f32b8467e1976fdbb59f0c73b8240e46\": container with ID starting with abb482af37b6f6a3f13af0986ae717f0f32b8467e1976fdbb59f0c73b8240e46 not found: ID does not exist" Jan 20 17:33:19 crc kubenswrapper[4558]: I0120 17:33:19.089082 4558 scope.go:117] "RemoveContainer" containerID="33d41f8923ec28aa7b94229f05d825972526de6642700e3a9555dbc7f7a6e7ce" Jan 20 17:33:19 crc kubenswrapper[4558]: E0120 17:33:19.089395 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"33d41f8923ec28aa7b94229f05d825972526de6642700e3a9555dbc7f7a6e7ce\": container with ID starting with 33d41f8923ec28aa7b94229f05d825972526de6642700e3a9555dbc7f7a6e7ce not found: ID does not exist" containerID="33d41f8923ec28aa7b94229f05d825972526de6642700e3a9555dbc7f7a6e7ce" Jan 20 17:33:19 crc kubenswrapper[4558]: I0120 17:33:19.089418 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"33d41f8923ec28aa7b94229f05d825972526de6642700e3a9555dbc7f7a6e7ce"} err="failed to get container status \"33d41f8923ec28aa7b94229f05d825972526de6642700e3a9555dbc7f7a6e7ce\": rpc error: code = NotFound desc = could not find container \"33d41f8923ec28aa7b94229f05d825972526de6642700e3a9555dbc7f7a6e7ce\": container with ID starting with 33d41f8923ec28aa7b94229f05d825972526de6642700e3a9555dbc7f7a6e7ce not found: ID does not exist" Jan 20 17:33:19 crc kubenswrapper[4558]: I0120 17:33:19.338070 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/swift-storage-0"] Jan 20 17:33:19 crc kubenswrapper[4558]: I0120 17:33:19.484283 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"4bd47eac-4090-4fc1-91c4-553ebd84964d","Type":"ContainerStarted","Data":"e1524456582814b161678d93ca35c12161b2ebbf4ff7b988cf13d97b61480ee9"} Jan 20 17:33:19 crc kubenswrapper[4558]: I0120 17:33:19.484329 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"4bd47eac-4090-4fc1-91c4-553ebd84964d","Type":"ContainerStarted","Data":"f5234b6873e40d97dab9fc0f95b0d08db6c6a9b663a804a6d9a45e0c39abccc9"} Jan 20 17:33:19 crc kubenswrapper[4558]: I0120 17:33:19.488579 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"85471d11-01bf-4a15-b6bc-fabbdaa5750b","Type":"ContainerStarted","Data":"9c9eafe66c53451254720116575d1d481ccd40e2a6e99e2b650e0cbf12c38725"} Jan 20 17:33:19 crc kubenswrapper[4558]: I0120 17:33:19.501961 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/cinder-scheduler-0" podStartSLOduration=2.501945883 podStartE2EDuration="2.501945883s" podCreationTimestamp="2026-01-20 17:33:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:33:19.497447634 +0000 UTC m=+3093.257785622" watchObservedRunningTime="2026-01-20 17:33:19.501945883 +0000 UTC m=+3093.262283850" Jan 20 17:33:20 crc kubenswrapper[4558]: I0120 17:33:20.278682 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:33:20 crc kubenswrapper[4558]: I0120 17:33:20.510873 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_neutron-75b5f89ffd-dsnd8_e8c90851-f646-4a0c-8c0c-7ecac86fa5a7/neutron-api/0.log" Jan 20 17:33:20 crc kubenswrapper[4558]: I0120 17:33:20.511124 4558 generic.go:334] "Generic (PLEG): container finished" podID="e8c90851-f646-4a0c-8c0c-7ecac86fa5a7" containerID="ad169539cfb088bb07910338d7ba610c14a018ab31874536a6e7fafebd60e428" exitCode=137 Jan 20 17:33:20 crc kubenswrapper[4558]: I0120 17:33:20.511216 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-75b5f89ffd-dsnd8" event={"ID":"e8c90851-f646-4a0c-8c0c-7ecac86fa5a7","Type":"ContainerDied","Data":"ad169539cfb088bb07910338d7ba610c14a018ab31874536a6e7fafebd60e428"} Jan 20 17:33:20 crc kubenswrapper[4558]: I0120 17:33:20.527239 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"85471d11-01bf-4a15-b6bc-fabbdaa5750b","Type":"ContainerStarted","Data":"59a8367d177182b4518509dac1c5fda5058c9891b3fbc80a0b902bff689f902b"} Jan 20 17:33:20 crc kubenswrapper[4558]: I0120 17:33:20.527283 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"85471d11-01bf-4a15-b6bc-fabbdaa5750b","Type":"ContainerStarted","Data":"47797d4b979364b402e28a83de59b30acd8a2031f77af23bdb31c8827f23a687"} Jan 20 17:33:20 crc kubenswrapper[4558]: I0120 17:33:20.527294 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"85471d11-01bf-4a15-b6bc-fabbdaa5750b","Type":"ContainerStarted","Data":"9f1797db51cad1bf6e1bdae441554b6ce94379733c94a7ad9722e8bdd8202d02"} Jan 20 17:33:20 crc kubenswrapper[4558]: I0120 17:33:20.527308 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"85471d11-01bf-4a15-b6bc-fabbdaa5750b","Type":"ContainerStarted","Data":"2b29e3812d37926ee8f85d9b22c2a577e69fa75b6f1695111d8868abfe8319dc"} Jan 20 17:33:20 crc kubenswrapper[4558]: I0120 17:33:20.527317 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"85471d11-01bf-4a15-b6bc-fabbdaa5750b","Type":"ContainerStarted","Data":"d0d49ab0892c850e328fb329607d75c06674dd1862b648f22f4a98fb49a98e23"} Jan 20 17:33:20 crc kubenswrapper[4558]: I0120 17:33:20.527327 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"85471d11-01bf-4a15-b6bc-fabbdaa5750b","Type":"ContainerStarted","Data":"2539753eba585cbf5573638f6961f2168ad0433d885e5c40c84893ee902e38b5"} Jan 20 17:33:20 crc kubenswrapper[4558]: I0120 17:33:20.527335 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"85471d11-01bf-4a15-b6bc-fabbdaa5750b","Type":"ContainerStarted","Data":"f84266a8171f0ff80bdcdf75ef52c201b51aa2133964c7208ddf98fa9d157472"} Jan 20 17:33:20 crc kubenswrapper[4558]: I0120 17:33:20.536270 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_neutron-75b5f89ffd-dsnd8_e8c90851-f646-4a0c-8c0c-7ecac86fa5a7/neutron-api/0.log" Jan 20 17:33:20 crc kubenswrapper[4558]: I0120 17:33:20.536345 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-75b5f89ffd-dsnd8" Jan 20 17:33:20 crc kubenswrapper[4558]: I0120 17:33:20.676568 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e8c90851-f646-4a0c-8c0c-7ecac86fa5a7-combined-ca-bundle\") pod \"e8c90851-f646-4a0c-8c0c-7ecac86fa5a7\" (UID: \"e8c90851-f646-4a0c-8c0c-7ecac86fa5a7\") " Jan 20 17:33:20 crc kubenswrapper[4558]: I0120 17:33:20.676845 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/e8c90851-f646-4a0c-8c0c-7ecac86fa5a7-config\") pod \"e8c90851-f646-4a0c-8c0c-7ecac86fa5a7\" (UID: \"e8c90851-f646-4a0c-8c0c-7ecac86fa5a7\") " Jan 20 17:33:20 crc kubenswrapper[4558]: I0120 17:33:20.676933 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/e8c90851-f646-4a0c-8c0c-7ecac86fa5a7-httpd-config\") pod \"e8c90851-f646-4a0c-8c0c-7ecac86fa5a7\" (UID: \"e8c90851-f646-4a0c-8c0c-7ecac86fa5a7\") " Jan 20 17:33:20 crc kubenswrapper[4558]: I0120 17:33:20.679439 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r5p5r\" (UniqueName: \"kubernetes.io/projected/e8c90851-f646-4a0c-8c0c-7ecac86fa5a7-kube-api-access-r5p5r\") pod \"e8c90851-f646-4a0c-8c0c-7ecac86fa5a7\" (UID: \"e8c90851-f646-4a0c-8c0c-7ecac86fa5a7\") " Jan 20 17:33:20 crc kubenswrapper[4558]: I0120 17:33:20.684585 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e8c90851-f646-4a0c-8c0c-7ecac86fa5a7-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "e8c90851-f646-4a0c-8c0c-7ecac86fa5a7" (UID: "e8c90851-f646-4a0c-8c0c-7ecac86fa5a7"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:33:20 crc kubenswrapper[4558]: I0120 17:33:20.684751 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e8c90851-f646-4a0c-8c0c-7ecac86fa5a7-kube-api-access-r5p5r" (OuterVolumeSpecName: "kube-api-access-r5p5r") pod "e8c90851-f646-4a0c-8c0c-7ecac86fa5a7" (UID: "e8c90851-f646-4a0c-8c0c-7ecac86fa5a7"). InnerVolumeSpecName "kube-api-access-r5p5r". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:33:20 crc kubenswrapper[4558]: I0120 17:33:20.738044 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e8c90851-f646-4a0c-8c0c-7ecac86fa5a7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e8c90851-f646-4a0c-8c0c-7ecac86fa5a7" (UID: "e8c90851-f646-4a0c-8c0c-7ecac86fa5a7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:33:20 crc kubenswrapper[4558]: I0120 17:33:20.738643 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e8c90851-f646-4a0c-8c0c-7ecac86fa5a7-config" (OuterVolumeSpecName: "config") pod "e8c90851-f646-4a0c-8c0c-7ecac86fa5a7" (UID: "e8c90851-f646-4a0c-8c0c-7ecac86fa5a7"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:33:20 crc kubenswrapper[4558]: I0120 17:33:20.784010 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r5p5r\" (UniqueName: \"kubernetes.io/projected/e8c90851-f646-4a0c-8c0c-7ecac86fa5a7-kube-api-access-r5p5r\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:20 crc kubenswrapper[4558]: I0120 17:33:20.784034 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e8c90851-f646-4a0c-8c0c-7ecac86fa5a7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:20 crc kubenswrapper[4558]: I0120 17:33:20.784046 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/e8c90851-f646-4a0c-8c0c-7ecac86fa5a7-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:20 crc kubenswrapper[4558]: I0120 17:33:20.784056 4558 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/e8c90851-f646-4a0c-8c0c-7ecac86fa5a7-httpd-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:21 crc kubenswrapper[4558]: E0120 17:33:21.161436 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="46edde77e0df814c67d5b21fe72058f3eb4efd6acd4a7f68a737219cd8448b40" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:33:21 crc kubenswrapper[4558]: E0120 17:33:21.163737 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="46edde77e0df814c67d5b21fe72058f3eb4efd6acd4a7f68a737219cd8448b40" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:33:21 crc kubenswrapper[4558]: E0120 17:33:21.165308 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="46edde77e0df814c67d5b21fe72058f3eb4efd6acd4a7f68a737219cd8448b40" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:33:21 crc kubenswrapper[4558]: E0120 17:33:21.165403 4558 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack-kuttl-tests/nova-cell1-conductor-0" podUID="c686e4c2-68d0-4999-9078-90c70927d9ae" containerName="nova-cell1-conductor-conductor" Jan 20 17:33:21 crc kubenswrapper[4558]: I0120 17:33:21.537592 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_neutron-75b5f89ffd-dsnd8_e8c90851-f646-4a0c-8c0c-7ecac86fa5a7/neutron-api/0.log" Jan 20 17:33:21 crc kubenswrapper[4558]: I0120 17:33:21.537688 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-75b5f89ffd-dsnd8" event={"ID":"e8c90851-f646-4a0c-8c0c-7ecac86fa5a7","Type":"ContainerDied","Data":"0d9278d84f2e42b2855482ba5a3543e181c3048878d068d282fee3f7d6cdefd4"} Jan 20 17:33:21 crc kubenswrapper[4558]: I0120 17:33:21.537732 4558 scope.go:117] "RemoveContainer" containerID="13d6958e3f022365a5a2101645a748bcf6a2a94aa5188200a2ebb9ecfc435482" Jan 20 17:33:21 crc kubenswrapper[4558]: I0120 17:33:21.537733 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-75b5f89ffd-dsnd8" Jan 20 17:33:21 crc kubenswrapper[4558]: I0120 17:33:21.580876 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-75b5f89ffd-dsnd8"] Jan 20 17:33:21 crc kubenswrapper[4558]: I0120 17:33:21.581279 4558 scope.go:117] "RemoveContainer" containerID="ad169539cfb088bb07910338d7ba610c14a018ab31874536a6e7fafebd60e428" Jan 20 17:33:21 crc kubenswrapper[4558]: I0120 17:33:21.584675 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"85471d11-01bf-4a15-b6bc-fabbdaa5750b","Type":"ContainerStarted","Data":"9230e487ce369f41cbd9bc631dd1261a599ee2dacc4825a2345c5d5867719c36"} Jan 20 17:33:21 crc kubenswrapper[4558]: I0120 17:33:21.584720 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"85471d11-01bf-4a15-b6bc-fabbdaa5750b","Type":"ContainerStarted","Data":"bad7f6a8d29de5c70b1535d6075c42725412283bb958655edcb536d62f10c419"} Jan 20 17:33:21 crc kubenswrapper[4558]: I0120 17:33:21.584732 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"85471d11-01bf-4a15-b6bc-fabbdaa5750b","Type":"ContainerStarted","Data":"3a2576d6f85fa2071ef37c084cdd8c32f7c6ddb702173be6308da0b714ce8400"} Jan 20 17:33:21 crc kubenswrapper[4558]: I0120 17:33:21.584740 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"85471d11-01bf-4a15-b6bc-fabbdaa5750b","Type":"ContainerStarted","Data":"2773fa3804e1639afaf6e312e08a0b9f626d09103c779a099e0c1bbf0928a8b9"} Jan 20 17:33:21 crc kubenswrapper[4558]: I0120 17:33:21.584749 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"85471d11-01bf-4a15-b6bc-fabbdaa5750b","Type":"ContainerStarted","Data":"23ff4d2ea69aec4c032a694e47beda8f08067327b0540a599b22bd0a5e1d7fe1"} Jan 20 17:33:21 crc kubenswrapper[4558]: I0120 17:33:21.591091 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/neutron-75b5f89ffd-dsnd8"] Jan 20 17:33:22 crc kubenswrapper[4558]: I0120 17:33:22.577750 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e8c90851-f646-4a0c-8c0c-7ecac86fa5a7" path="/var/lib/kubelet/pods/e8c90851-f646-4a0c-8c0c-7ecac86fa5a7/volumes" Jan 20 17:33:22 crc kubenswrapper[4558]: I0120 17:33:22.600070 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"85471d11-01bf-4a15-b6bc-fabbdaa5750b","Type":"ContainerStarted","Data":"ab686f3cd584a56d6b1aa614ebae97ca9c4f9dcf44febc60af60b85d5951643a"} Jan 20 17:33:22 crc kubenswrapper[4558]: I0120 17:33:22.600125 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"85471d11-01bf-4a15-b6bc-fabbdaa5750b","Type":"ContainerStarted","Data":"ed8a91ba58ec453d2111a81153c4d592bd204afe8c8353e52171871827a439ce"} Jan 20 17:33:22 crc kubenswrapper[4558]: I0120 17:33:22.600140 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"85471d11-01bf-4a15-b6bc-fabbdaa5750b","Type":"ContainerStarted","Data":"e07ad59dc71e1f67f59603c4aac989fb94b2c710bade32305a1dc8101de80423"} Jan 20 17:33:22 crc kubenswrapper[4558]: I0120 17:33:22.636403 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/swift-storage-0" podStartSLOduration=4.636372719 podStartE2EDuration="4.636372719s" podCreationTimestamp="2026-01-20 17:33:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:33:22.6328261 +0000 UTC m=+3096.393164077" watchObservedRunningTime="2026-01-20 17:33:22.636372719 +0000 UTC m=+3096.396710686" Jan 20 17:33:22 crc kubenswrapper[4558]: I0120 17:33:22.698385 4558 pod_container_manager_linux.go:210] "Failed to delete cgroup paths" cgroupName=["kubepods","besteffort","podeb4881e5-cce6-4423-a9f4-d93f96a2ccb1"] err="unable to destroy cgroup paths for cgroup [kubepods besteffort podeb4881e5-cce6-4423-a9f4-d93f96a2ccb1] : Timed out while waiting for systemd to remove kubepods-besteffort-podeb4881e5_cce6_4423_a9f4_d93f96a2ccb1.slice" Jan 20 17:33:22 crc kubenswrapper[4558]: I0120 17:33:22.741248 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-df78f4c65-4ns5m"] Jan 20 17:33:22 crc kubenswrapper[4558]: E0120 17:33:22.742018 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e8c90851-f646-4a0c-8c0c-7ecac86fa5a7" containerName="neutron-httpd" Jan 20 17:33:22 crc kubenswrapper[4558]: I0120 17:33:22.742041 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="e8c90851-f646-4a0c-8c0c-7ecac86fa5a7" containerName="neutron-httpd" Jan 20 17:33:22 crc kubenswrapper[4558]: E0120 17:33:22.742081 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e8c90851-f646-4a0c-8c0c-7ecac86fa5a7" containerName="neutron-api" Jan 20 17:33:22 crc kubenswrapper[4558]: I0120 17:33:22.742089 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="e8c90851-f646-4a0c-8c0c-7ecac86fa5a7" containerName="neutron-api" Jan 20 17:33:22 crc kubenswrapper[4558]: I0120 17:33:22.742396 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="e8c90851-f646-4a0c-8c0c-7ecac86fa5a7" containerName="neutron-api" Jan 20 17:33:22 crc kubenswrapper[4558]: I0120 17:33:22.742416 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="e8c90851-f646-4a0c-8c0c-7ecac86fa5a7" containerName="neutron-httpd" Jan 20 17:33:22 crc kubenswrapper[4558]: I0120 17:33:22.743816 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-df78f4c65-4ns5m" Jan 20 17:33:22 crc kubenswrapper[4558]: I0120 17:33:22.748698 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-df78f4c65-4ns5m"] Jan 20 17:33:22 crc kubenswrapper[4558]: I0120 17:33:22.819720 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:33:22 crc kubenswrapper[4558]: I0120 17:33:22.926849 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8763c4a4-3d9c-4e31-b33c-3bc9c67c55cf-dns-swift-storage-0\") pod \"dnsmasq-dnsmasq-df78f4c65-4ns5m\" (UID: \"8763c4a4-3d9c-4e31-b33c-3bc9c67c55cf\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-df78f4c65-4ns5m" Jan 20 17:33:22 crc kubenswrapper[4558]: I0120 17:33:22.926959 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8763c4a4-3d9c-4e31-b33c-3bc9c67c55cf-config\") pod \"dnsmasq-dnsmasq-df78f4c65-4ns5m\" (UID: \"8763c4a4-3d9c-4e31-b33c-3bc9c67c55cf\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-df78f4c65-4ns5m" Jan 20 17:33:22 crc kubenswrapper[4558]: I0120 17:33:22.927014 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/8763c4a4-3d9c-4e31-b33c-3bc9c67c55cf-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-df78f4c65-4ns5m\" (UID: \"8763c4a4-3d9c-4e31-b33c-3bc9c67c55cf\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-df78f4c65-4ns5m" Jan 20 17:33:22 crc kubenswrapper[4558]: I0120 17:33:22.927676 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dht5s\" (UniqueName: \"kubernetes.io/projected/8763c4a4-3d9c-4e31-b33c-3bc9c67c55cf-kube-api-access-dht5s\") pod \"dnsmasq-dnsmasq-df78f4c65-4ns5m\" (UID: \"8763c4a4-3d9c-4e31-b33c-3bc9c67c55cf\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-df78f4c65-4ns5m" Jan 20 17:33:23 crc kubenswrapper[4558]: I0120 17:33:23.030477 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8763c4a4-3d9c-4e31-b33c-3bc9c67c55cf-dns-swift-storage-0\") pod \"dnsmasq-dnsmasq-df78f4c65-4ns5m\" (UID: \"8763c4a4-3d9c-4e31-b33c-3bc9c67c55cf\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-df78f4c65-4ns5m" Jan 20 17:33:23 crc kubenswrapper[4558]: I0120 17:33:23.030657 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8763c4a4-3d9c-4e31-b33c-3bc9c67c55cf-config\") pod \"dnsmasq-dnsmasq-df78f4c65-4ns5m\" (UID: \"8763c4a4-3d9c-4e31-b33c-3bc9c67c55cf\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-df78f4c65-4ns5m" Jan 20 17:33:23 crc kubenswrapper[4558]: I0120 17:33:23.030736 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/8763c4a4-3d9c-4e31-b33c-3bc9c67c55cf-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-df78f4c65-4ns5m\" (UID: \"8763c4a4-3d9c-4e31-b33c-3bc9c67c55cf\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-df78f4c65-4ns5m" Jan 20 17:33:23 crc kubenswrapper[4558]: I0120 17:33:23.030802 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dht5s\" (UniqueName: \"kubernetes.io/projected/8763c4a4-3d9c-4e31-b33c-3bc9c67c55cf-kube-api-access-dht5s\") pod \"dnsmasq-dnsmasq-df78f4c65-4ns5m\" (UID: \"8763c4a4-3d9c-4e31-b33c-3bc9c67c55cf\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-df78f4c65-4ns5m" Jan 20 17:33:23 crc kubenswrapper[4558]: I0120 17:33:23.031548 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/8763c4a4-3d9c-4e31-b33c-3bc9c67c55cf-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-df78f4c65-4ns5m\" (UID: \"8763c4a4-3d9c-4e31-b33c-3bc9c67c55cf\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-df78f4c65-4ns5m" Jan 20 17:33:23 crc kubenswrapper[4558]: I0120 17:33:23.031569 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8763c4a4-3d9c-4e31-b33c-3bc9c67c55cf-config\") pod \"dnsmasq-dnsmasq-df78f4c65-4ns5m\" (UID: \"8763c4a4-3d9c-4e31-b33c-3bc9c67c55cf\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-df78f4c65-4ns5m" Jan 20 17:33:23 crc kubenswrapper[4558]: I0120 17:33:23.031629 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8763c4a4-3d9c-4e31-b33c-3bc9c67c55cf-dns-swift-storage-0\") pod \"dnsmasq-dnsmasq-df78f4c65-4ns5m\" (UID: \"8763c4a4-3d9c-4e31-b33c-3bc9c67c55cf\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-df78f4c65-4ns5m" Jan 20 17:33:23 crc kubenswrapper[4558]: I0120 17:33:23.056102 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dht5s\" (UniqueName: \"kubernetes.io/projected/8763c4a4-3d9c-4e31-b33c-3bc9c67c55cf-kube-api-access-dht5s\") pod \"dnsmasq-dnsmasq-df78f4c65-4ns5m\" (UID: \"8763c4a4-3d9c-4e31-b33c-3bc9c67c55cf\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-df78f4c65-4ns5m" Jan 20 17:33:23 crc kubenswrapper[4558]: I0120 17:33:23.084298 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-df78f4c65-4ns5m" Jan 20 17:33:23 crc kubenswrapper[4558]: I0120 17:33:23.496353 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-df78f4c65-4ns5m"] Jan 20 17:33:23 crc kubenswrapper[4558]: I0120 17:33:23.617889 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-df78f4c65-4ns5m" event={"ID":"8763c4a4-3d9c-4e31-b33c-3bc9c67c55cf","Type":"ContainerStarted","Data":"e218c3452d8595d6da904a9e460b3a71848450d846aa87617211affbfe339500"} Jan 20 17:33:24 crc kubenswrapper[4558]: I0120 17:33:24.631486 4558 generic.go:334] "Generic (PLEG): container finished" podID="8763c4a4-3d9c-4e31-b33c-3bc9c67c55cf" containerID="7e2a717fb31d664a7b093b633891b75455ed429e27dcd7947ed7830a4c04e595" exitCode=0 Jan 20 17:33:24 crc kubenswrapper[4558]: I0120 17:33:24.631552 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-df78f4c65-4ns5m" event={"ID":"8763c4a4-3d9c-4e31-b33c-3bc9c67c55cf","Type":"ContainerDied","Data":"7e2a717fb31d664a7b093b633891b75455ed429e27dcd7947ed7830a4c04e595"} Jan 20 17:33:24 crc kubenswrapper[4558]: I0120 17:33:24.712587 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:33:25 crc kubenswrapper[4558]: I0120 17:33:25.647850 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-df78f4c65-4ns5m" event={"ID":"8763c4a4-3d9c-4e31-b33c-3bc9c67c55cf","Type":"ContainerStarted","Data":"b3d7f402be9f8d06e4e0f9ddd335d93a05f7d8f3602e1aebbde602938c65db9b"} Jan 20 17:33:25 crc kubenswrapper[4558]: I0120 17:33:25.648022 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-df78f4c65-4ns5m" Jan 20 17:33:25 crc kubenswrapper[4558]: I0120 17:33:25.669444 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-df78f4c65-4ns5m" podStartSLOduration=3.669420981 podStartE2EDuration="3.669420981s" podCreationTimestamp="2026-01-20 17:33:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:33:25.661778241 +0000 UTC m=+3099.422116208" watchObservedRunningTime="2026-01-20 17:33:25.669420981 +0000 UTC m=+3099.429758949" Jan 20 17:33:26 crc kubenswrapper[4558]: E0120 17:33:26.160062 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="46edde77e0df814c67d5b21fe72058f3eb4efd6acd4a7f68a737219cd8448b40" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:33:26 crc kubenswrapper[4558]: E0120 17:33:26.161752 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="46edde77e0df814c67d5b21fe72058f3eb4efd6acd4a7f68a737219cd8448b40" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:33:26 crc kubenswrapper[4558]: E0120 17:33:26.163559 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="46edde77e0df814c67d5b21fe72058f3eb4efd6acd4a7f68a737219cd8448b40" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:33:26 crc kubenswrapper[4558]: E0120 17:33:26.163614 4558 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack-kuttl-tests/nova-cell1-conductor-0" podUID="c686e4c2-68d0-4999-9078-90c70927d9ae" containerName="nova-cell1-conductor-conductor" Jan 20 17:33:27 crc kubenswrapper[4558]: I0120 17:33:27.569928 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/ceilometer-0" podUID="b115808a-3ae3-40c5-b330-f58aa6af7503" containerName="proxy-httpd" probeResult="failure" output="Get \"http://10.217.1.226:3000/\": dial tcp 10.217.1.226:3000: connect: connection refused" Jan 20 17:33:27 crc kubenswrapper[4558]: I0120 17:33:27.713064 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/barbican-api-55c969d68-6j7rc" Jan 20 17:33:27 crc kubenswrapper[4558]: I0120 17:33:27.780191 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/barbican-api-55c969d68-6j7rc" Jan 20 17:33:27 crc kubenswrapper[4558]: I0120 17:33:27.864115 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-api-b7d7d464-2s2ds"] Jan 20 17:33:27 crc kubenswrapper[4558]: I0120 17:33:27.864412 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-api-b7d7d464-2s2ds" podUID="f9691b53-0a90-45ee-bd69-c75b1195eeb8" containerName="barbican-api-log" containerID="cri-o://ea4aa09b4d785b96f464c4b23e3df19dec0b597d02c3e90b688f8fa7421d46c0" gracePeriod=30 Jan 20 17:33:27 crc kubenswrapper[4558]: I0120 17:33:27.864963 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-api-b7d7d464-2s2ds" podUID="f9691b53-0a90-45ee-bd69-c75b1195eeb8" containerName="barbican-api" containerID="cri-o://355dd454cf13c43abb76b22e552d53dadf06d22d2c8ec34e79c955905ece6c1f" gracePeriod=30 Jan 20 17:33:28 crc kubenswrapper[4558]: I0120 17:33:28.042645 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:33:28 crc kubenswrapper[4558]: I0120 17:33:28.701681 4558 generic.go:334] "Generic (PLEG): container finished" podID="f9691b53-0a90-45ee-bd69-c75b1195eeb8" containerID="ea4aa09b4d785b96f464c4b23e3df19dec0b597d02c3e90b688f8fa7421d46c0" exitCode=143 Jan 20 17:33:28 crc kubenswrapper[4558]: I0120 17:33:28.701762 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-b7d7d464-2s2ds" event={"ID":"f9691b53-0a90-45ee-bd69-c75b1195eeb8","Type":"ContainerDied","Data":"ea4aa09b4d785b96f464c4b23e3df19dec0b597d02c3e90b688f8fa7421d46c0"} Jan 20 17:33:29 crc kubenswrapper[4558]: I0120 17:33:29.566228 4558 scope.go:117] "RemoveContainer" containerID="f275e9f5de330b3c79316d5871106c6bcf90b698e0744c5beb28da45c336b36d" Jan 20 17:33:29 crc kubenswrapper[4558]: E0120 17:33:29.567219 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 17:33:30 crc kubenswrapper[4558]: I0120 17:33:30.726201 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"a336b03d-9969-41de-9f19-f3479f76a33d","Type":"ContainerDied","Data":"90ed3b76b87cd561c155928fde1a7320fbfce5af416fc09381ed42b6ead92480"} Jan 20 17:33:30 crc kubenswrapper[4558]: I0120 17:33:30.726153 4558 generic.go:334] "Generic (PLEG): container finished" podID="a336b03d-9969-41de-9f19-f3479f76a33d" containerID="90ed3b76b87cd561c155928fde1a7320fbfce5af416fc09381ed42b6ead92480" exitCode=137 Jan 20 17:33:30 crc kubenswrapper[4558]: I0120 17:33:30.728436 4558 generic.go:334] "Generic (PLEG): container finished" podID="c686e4c2-68d0-4999-9078-90c70927d9ae" containerID="46edde77e0df814c67d5b21fe72058f3eb4efd6acd4a7f68a737219cd8448b40" exitCode=137 Jan 20 17:33:30 crc kubenswrapper[4558]: I0120 17:33:30.728478 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-0" event={"ID":"c686e4c2-68d0-4999-9078-90c70927d9ae","Type":"ContainerDied","Data":"46edde77e0df814c67d5b21fe72058f3eb4efd6acd4a7f68a737219cd8448b40"} Jan 20 17:33:30 crc kubenswrapper[4558]: I0120 17:33:30.729656 4558 generic.go:334] "Generic (PLEG): container finished" podID="0c6a6265-cf90-4039-9200-ba478d612baa" containerID="44c62c978803560adab011f48335024e3733608109c5e30c32a43f89dc5997fd" exitCode=0 Jan 20 17:33:30 crc kubenswrapper[4558]: I0120 17:33:30.729675 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" event={"ID":"0c6a6265-cf90-4039-9200-ba478d612baa","Type":"ContainerDied","Data":"44c62c978803560adab011f48335024e3733608109c5e30c32a43f89dc5997fd"} Jan 20 17:33:30 crc kubenswrapper[4558]: I0120 17:33:30.920568 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:33:30 crc kubenswrapper[4558]: I0120 17:33:30.926082 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:33:30 crc kubenswrapper[4558]: I0120 17:33:30.992379 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzmw4\" (UniqueName: \"kubernetes.io/projected/a336b03d-9969-41de-9f19-f3479f76a33d-kube-api-access-lzmw4\") pod \"a336b03d-9969-41de-9f19-f3479f76a33d\" (UID: \"a336b03d-9969-41de-9f19-f3479f76a33d\") " Jan 20 17:33:30 crc kubenswrapper[4558]: I0120 17:33:30.992729 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-btqr4\" (UniqueName: \"kubernetes.io/projected/c686e4c2-68d0-4999-9078-90c70927d9ae-kube-api-access-btqr4\") pod \"c686e4c2-68d0-4999-9078-90c70927d9ae\" (UID: \"c686e4c2-68d0-4999-9078-90c70927d9ae\") " Jan 20 17:33:30 crc kubenswrapper[4558]: I0120 17:33:30.992938 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a336b03d-9969-41de-9f19-f3479f76a33d-combined-ca-bundle\") pod \"a336b03d-9969-41de-9f19-f3479f76a33d\" (UID: \"a336b03d-9969-41de-9f19-f3479f76a33d\") " Jan 20 17:33:30 crc kubenswrapper[4558]: I0120 17:33:30.993015 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a336b03d-9969-41de-9f19-f3479f76a33d-config-data\") pod \"a336b03d-9969-41de-9f19-f3479f76a33d\" (UID: \"a336b03d-9969-41de-9f19-f3479f76a33d\") " Jan 20 17:33:30 crc kubenswrapper[4558]: I0120 17:33:30.993056 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c686e4c2-68d0-4999-9078-90c70927d9ae-combined-ca-bundle\") pod \"c686e4c2-68d0-4999-9078-90c70927d9ae\" (UID: \"c686e4c2-68d0-4999-9078-90c70927d9ae\") " Jan 20 17:33:30 crc kubenswrapper[4558]: I0120 17:33:30.993100 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c686e4c2-68d0-4999-9078-90c70927d9ae-config-data\") pod \"c686e4c2-68d0-4999-9078-90c70927d9ae\" (UID: \"c686e4c2-68d0-4999-9078-90c70927d9ae\") " Jan 20 17:33:30 crc kubenswrapper[4558]: I0120 17:33:30.996653 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a336b03d-9969-41de-9f19-f3479f76a33d-kube-api-access-lzmw4" (OuterVolumeSpecName: "kube-api-access-lzmw4") pod "a336b03d-9969-41de-9f19-f3479f76a33d" (UID: "a336b03d-9969-41de-9f19-f3479f76a33d"). InnerVolumeSpecName "kube-api-access-lzmw4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:33:31 crc kubenswrapper[4558]: I0120 17:33:31.000250 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c686e4c2-68d0-4999-9078-90c70927d9ae-kube-api-access-btqr4" (OuterVolumeSpecName: "kube-api-access-btqr4") pod "c686e4c2-68d0-4999-9078-90c70927d9ae" (UID: "c686e4c2-68d0-4999-9078-90c70927d9ae"). InnerVolumeSpecName "kube-api-access-btqr4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:33:31 crc kubenswrapper[4558]: I0120 17:33:31.020971 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a336b03d-9969-41de-9f19-f3479f76a33d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a336b03d-9969-41de-9f19-f3479f76a33d" (UID: "a336b03d-9969-41de-9f19-f3479f76a33d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:33:31 crc kubenswrapper[4558]: I0120 17:33:31.024413 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a336b03d-9969-41de-9f19-f3479f76a33d-config-data" (OuterVolumeSpecName: "config-data") pod "a336b03d-9969-41de-9f19-f3479f76a33d" (UID: "a336b03d-9969-41de-9f19-f3479f76a33d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:33:31 crc kubenswrapper[4558]: I0120 17:33:31.030779 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c686e4c2-68d0-4999-9078-90c70927d9ae-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c686e4c2-68d0-4999-9078-90c70927d9ae" (UID: "c686e4c2-68d0-4999-9078-90c70927d9ae"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:33:31 crc kubenswrapper[4558]: I0120 17:33:31.039225 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c686e4c2-68d0-4999-9078-90c70927d9ae-config-data" (OuterVolumeSpecName: "config-data") pod "c686e4c2-68d0-4999-9078-90c70927d9ae" (UID: "c686e4c2-68d0-4999-9078-90c70927d9ae"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:33:31 crc kubenswrapper[4558]: I0120 17:33:31.096889 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-btqr4\" (UniqueName: \"kubernetes.io/projected/c686e4c2-68d0-4999-9078-90c70927d9ae-kube-api-access-btqr4\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:31 crc kubenswrapper[4558]: I0120 17:33:31.096922 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a336b03d-9969-41de-9f19-f3479f76a33d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:31 crc kubenswrapper[4558]: I0120 17:33:31.096937 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a336b03d-9969-41de-9f19-f3479f76a33d-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:31 crc kubenswrapper[4558]: I0120 17:33:31.096950 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c686e4c2-68d0-4999-9078-90c70927d9ae-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:31 crc kubenswrapper[4558]: I0120 17:33:31.096961 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c686e4c2-68d0-4999-9078-90c70927d9ae-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:31 crc kubenswrapper[4558]: I0120 17:33:31.096971 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzmw4\" (UniqueName: \"kubernetes.io/projected/a336b03d-9969-41de-9f19-f3479f76a33d-kube-api-access-lzmw4\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:31 crc kubenswrapper[4558]: I0120 17:33:31.334973 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-b7d7d464-2s2ds" Jan 20 17:33:31 crc kubenswrapper[4558]: I0120 17:33:31.401677 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f9691b53-0a90-45ee-bd69-c75b1195eeb8-config-data\") pod \"f9691b53-0a90-45ee-bd69-c75b1195eeb8\" (UID: \"f9691b53-0a90-45ee-bd69-c75b1195eeb8\") " Jan 20 17:33:31 crc kubenswrapper[4558]: I0120 17:33:31.401773 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f9691b53-0a90-45ee-bd69-c75b1195eeb8-combined-ca-bundle\") pod \"f9691b53-0a90-45ee-bd69-c75b1195eeb8\" (UID: \"f9691b53-0a90-45ee-bd69-c75b1195eeb8\") " Jan 20 17:33:31 crc kubenswrapper[4558]: I0120 17:33:31.401829 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f9691b53-0a90-45ee-bd69-c75b1195eeb8-config-data-custom\") pod \"f9691b53-0a90-45ee-bd69-c75b1195eeb8\" (UID: \"f9691b53-0a90-45ee-bd69-c75b1195eeb8\") " Jan 20 17:33:31 crc kubenswrapper[4558]: I0120 17:33:31.402042 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4kdg5\" (UniqueName: \"kubernetes.io/projected/f9691b53-0a90-45ee-bd69-c75b1195eeb8-kube-api-access-4kdg5\") pod \"f9691b53-0a90-45ee-bd69-c75b1195eeb8\" (UID: \"f9691b53-0a90-45ee-bd69-c75b1195eeb8\") " Jan 20 17:33:31 crc kubenswrapper[4558]: I0120 17:33:31.402071 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f9691b53-0a90-45ee-bd69-c75b1195eeb8-logs\") pod \"f9691b53-0a90-45ee-bd69-c75b1195eeb8\" (UID: \"f9691b53-0a90-45ee-bd69-c75b1195eeb8\") " Jan 20 17:33:31 crc kubenswrapper[4558]: I0120 17:33:31.402526 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f9691b53-0a90-45ee-bd69-c75b1195eeb8-logs" (OuterVolumeSpecName: "logs") pod "f9691b53-0a90-45ee-bd69-c75b1195eeb8" (UID: "f9691b53-0a90-45ee-bd69-c75b1195eeb8"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:33:31 crc kubenswrapper[4558]: I0120 17:33:31.402881 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f9691b53-0a90-45ee-bd69-c75b1195eeb8-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:31 crc kubenswrapper[4558]: I0120 17:33:31.407270 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f9691b53-0a90-45ee-bd69-c75b1195eeb8-kube-api-access-4kdg5" (OuterVolumeSpecName: "kube-api-access-4kdg5") pod "f9691b53-0a90-45ee-bd69-c75b1195eeb8" (UID: "f9691b53-0a90-45ee-bd69-c75b1195eeb8"). InnerVolumeSpecName "kube-api-access-4kdg5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:33:31 crc kubenswrapper[4558]: I0120 17:33:31.407341 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f9691b53-0a90-45ee-bd69-c75b1195eeb8-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "f9691b53-0a90-45ee-bd69-c75b1195eeb8" (UID: "f9691b53-0a90-45ee-bd69-c75b1195eeb8"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:33:31 crc kubenswrapper[4558]: I0120 17:33:31.425853 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f9691b53-0a90-45ee-bd69-c75b1195eeb8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f9691b53-0a90-45ee-bd69-c75b1195eeb8" (UID: "f9691b53-0a90-45ee-bd69-c75b1195eeb8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:33:31 crc kubenswrapper[4558]: I0120 17:33:31.442130 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f9691b53-0a90-45ee-bd69-c75b1195eeb8-config-data" (OuterVolumeSpecName: "config-data") pod "f9691b53-0a90-45ee-bd69-c75b1195eeb8" (UID: "f9691b53-0a90-45ee-bd69-c75b1195eeb8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:33:31 crc kubenswrapper[4558]: I0120 17:33:31.505421 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f9691b53-0a90-45ee-bd69-c75b1195eeb8-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:31 crc kubenswrapper[4558]: I0120 17:33:31.505678 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f9691b53-0a90-45ee-bd69-c75b1195eeb8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:31 crc kubenswrapper[4558]: I0120 17:33:31.505749 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f9691b53-0a90-45ee-bd69-c75b1195eeb8-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:31 crc kubenswrapper[4558]: I0120 17:33:31.505826 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4kdg5\" (UniqueName: \"kubernetes.io/projected/f9691b53-0a90-45ee-bd69-c75b1195eeb8-kube-api-access-4kdg5\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:31 crc kubenswrapper[4558]: I0120 17:33:31.742010 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" event={"ID":"0c6a6265-cf90-4039-9200-ba478d612baa","Type":"ContainerStarted","Data":"b6873866e3f13dd35b6f2b58c613d3e9843e8b8a9fdc297c58001a3d665529a7"} Jan 20 17:33:31 crc kubenswrapper[4558]: I0120 17:33:31.742735 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:33:31 crc kubenswrapper[4558]: I0120 17:33:31.743884 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:33:31 crc kubenswrapper[4558]: I0120 17:33:31.743861 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"a336b03d-9969-41de-9f19-f3479f76a33d","Type":"ContainerDied","Data":"d3cfa6c9ac5163efbd1033e4185e44f56fb8dcb7c6a8af9fb92422829151f9ea"} Jan 20 17:33:31 crc kubenswrapper[4558]: I0120 17:33:31.743977 4558 scope.go:117] "RemoveContainer" containerID="90ed3b76b87cd561c155928fde1a7320fbfce5af416fc09381ed42b6ead92480" Jan 20 17:33:31 crc kubenswrapper[4558]: I0120 17:33:31.745789 4558 generic.go:334] "Generic (PLEG): container finished" podID="0696a635-5dc9-46e9-8502-47fab9ff761c" containerID="89fad12073aaf3b621cce6e9c2e512578ea6f57b9f2d06d4a3ac809c1fe90a00" exitCode=0 Jan 20 17:33:31 crc kubenswrapper[4558]: I0120 17:33:31.746273 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-server-0" event={"ID":"0696a635-5dc9-46e9-8502-47fab9ff761c","Type":"ContainerDied","Data":"89fad12073aaf3b621cce6e9c2e512578ea6f57b9f2d06d4a3ac809c1fe90a00"} Jan 20 17:33:31 crc kubenswrapper[4558]: I0120 17:33:31.749631 4558 generic.go:334] "Generic (PLEG): container finished" podID="f9691b53-0a90-45ee-bd69-c75b1195eeb8" containerID="355dd454cf13c43abb76b22e552d53dadf06d22d2c8ec34e79c955905ece6c1f" exitCode=0 Jan 20 17:33:31 crc kubenswrapper[4558]: I0120 17:33:31.749692 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-b7d7d464-2s2ds" event={"ID":"f9691b53-0a90-45ee-bd69-c75b1195eeb8","Type":"ContainerDied","Data":"355dd454cf13c43abb76b22e552d53dadf06d22d2c8ec34e79c955905ece6c1f"} Jan 20 17:33:31 crc kubenswrapper[4558]: I0120 17:33:31.749715 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-b7d7d464-2s2ds" event={"ID":"f9691b53-0a90-45ee-bd69-c75b1195eeb8","Type":"ContainerDied","Data":"93673d7da26917595853550ced8a076cdb7f6814e0bc857960d3ee07fd0ffe91"} Jan 20 17:33:31 crc kubenswrapper[4558]: I0120 17:33:31.749780 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-b7d7d464-2s2ds" Jan 20 17:33:31 crc kubenswrapper[4558]: I0120 17:33:31.751492 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-0" event={"ID":"c686e4c2-68d0-4999-9078-90c70927d9ae","Type":"ContainerDied","Data":"e29581ea8179ca9452a837725910db234522125b44cce80de4ab2a0f44cb5ea0"} Jan 20 17:33:31 crc kubenswrapper[4558]: I0120 17:33:31.751659 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:33:31 crc kubenswrapper[4558]: I0120 17:33:31.773883 4558 scope.go:117] "RemoveContainer" containerID="355dd454cf13c43abb76b22e552d53dadf06d22d2c8ec34e79c955905ece6c1f" Jan 20 17:33:31 crc kubenswrapper[4558]: I0120 17:33:31.777390 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" podStartSLOduration=35.777378047 podStartE2EDuration="35.777378047s" podCreationTimestamp="2026-01-20 17:32:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:33:31.772788207 +0000 UTC m=+3105.533126174" watchObservedRunningTime="2026-01-20 17:33:31.777378047 +0000 UTC m=+3105.537716004" Jan 20 17:33:31 crc kubenswrapper[4558]: I0120 17:33:31.912713 4558 scope.go:117] "RemoveContainer" containerID="ea4aa09b4d785b96f464c4b23e3df19dec0b597d02c3e90b688f8fa7421d46c0" Jan 20 17:33:31 crc kubenswrapper[4558]: I0120 17:33:31.929375 4558 scope.go:117] "RemoveContainer" containerID="355dd454cf13c43abb76b22e552d53dadf06d22d2c8ec34e79c955905ece6c1f" Jan 20 17:33:31 crc kubenswrapper[4558]: E0120 17:33:31.929798 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"355dd454cf13c43abb76b22e552d53dadf06d22d2c8ec34e79c955905ece6c1f\": container with ID starting with 355dd454cf13c43abb76b22e552d53dadf06d22d2c8ec34e79c955905ece6c1f not found: ID does not exist" containerID="355dd454cf13c43abb76b22e552d53dadf06d22d2c8ec34e79c955905ece6c1f" Jan 20 17:33:31 crc kubenswrapper[4558]: I0120 17:33:31.929845 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"355dd454cf13c43abb76b22e552d53dadf06d22d2c8ec34e79c955905ece6c1f"} err="failed to get container status \"355dd454cf13c43abb76b22e552d53dadf06d22d2c8ec34e79c955905ece6c1f\": rpc error: code = NotFound desc = could not find container \"355dd454cf13c43abb76b22e552d53dadf06d22d2c8ec34e79c955905ece6c1f\": container with ID starting with 355dd454cf13c43abb76b22e552d53dadf06d22d2c8ec34e79c955905ece6c1f not found: ID does not exist" Jan 20 17:33:31 crc kubenswrapper[4558]: I0120 17:33:31.929885 4558 scope.go:117] "RemoveContainer" containerID="ea4aa09b4d785b96f464c4b23e3df19dec0b597d02c3e90b688f8fa7421d46c0" Jan 20 17:33:31 crc kubenswrapper[4558]: E0120 17:33:31.930248 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ea4aa09b4d785b96f464c4b23e3df19dec0b597d02c3e90b688f8fa7421d46c0\": container with ID starting with ea4aa09b4d785b96f464c4b23e3df19dec0b597d02c3e90b688f8fa7421d46c0 not found: ID does not exist" containerID="ea4aa09b4d785b96f464c4b23e3df19dec0b597d02c3e90b688f8fa7421d46c0" Jan 20 17:33:31 crc kubenswrapper[4558]: I0120 17:33:31.930292 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ea4aa09b4d785b96f464c4b23e3df19dec0b597d02c3e90b688f8fa7421d46c0"} err="failed to get container status \"ea4aa09b4d785b96f464c4b23e3df19dec0b597d02c3e90b688f8fa7421d46c0\": rpc error: code = NotFound desc = could not find container \"ea4aa09b4d785b96f464c4b23e3df19dec0b597d02c3e90b688f8fa7421d46c0\": container with ID starting with ea4aa09b4d785b96f464c4b23e3df19dec0b597d02c3e90b688f8fa7421d46c0 not found: ID does not exist" Jan 20 17:33:31 crc kubenswrapper[4558]: I0120 17:33:31.930315 4558 scope.go:117] "RemoveContainer" containerID="46edde77e0df814c67d5b21fe72058f3eb4efd6acd4a7f68a737219cd8448b40" Jan 20 17:33:31 crc kubenswrapper[4558]: I0120 17:33:31.944711 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-0"] Jan 20 17:33:31 crc kubenswrapper[4558]: I0120 17:33:31.957198 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-0"] Jan 20 17:33:31 crc kubenswrapper[4558]: I0120 17:33:31.973056 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:33:31 crc kubenswrapper[4558]: I0120 17:33:31.979214 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-0"] Jan 20 17:33:31 crc kubenswrapper[4558]: E0120 17:33:31.979638 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f9691b53-0a90-45ee-bd69-c75b1195eeb8" containerName="barbican-api" Jan 20 17:33:31 crc kubenswrapper[4558]: I0120 17:33:31.979660 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="f9691b53-0a90-45ee-bd69-c75b1195eeb8" containerName="barbican-api" Jan 20 17:33:31 crc kubenswrapper[4558]: E0120 17:33:31.979706 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c686e4c2-68d0-4999-9078-90c70927d9ae" containerName="nova-cell1-conductor-conductor" Jan 20 17:33:31 crc kubenswrapper[4558]: I0120 17:33:31.979712 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="c686e4c2-68d0-4999-9078-90c70927d9ae" containerName="nova-cell1-conductor-conductor" Jan 20 17:33:31 crc kubenswrapper[4558]: E0120 17:33:31.979723 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f9691b53-0a90-45ee-bd69-c75b1195eeb8" containerName="barbican-api-log" Jan 20 17:33:31 crc kubenswrapper[4558]: I0120 17:33:31.979728 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="f9691b53-0a90-45ee-bd69-c75b1195eeb8" containerName="barbican-api-log" Jan 20 17:33:31 crc kubenswrapper[4558]: E0120 17:33:31.979744 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a336b03d-9969-41de-9f19-f3479f76a33d" containerName="nova-scheduler-scheduler" Jan 20 17:33:31 crc kubenswrapper[4558]: I0120 17:33:31.979749 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="a336b03d-9969-41de-9f19-f3479f76a33d" containerName="nova-scheduler-scheduler" Jan 20 17:33:31 crc kubenswrapper[4558]: I0120 17:33:31.979970 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="c686e4c2-68d0-4999-9078-90c70927d9ae" containerName="nova-cell1-conductor-conductor" Jan 20 17:33:31 crc kubenswrapper[4558]: I0120 17:33:31.979988 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="f9691b53-0a90-45ee-bd69-c75b1195eeb8" containerName="barbican-api" Jan 20 17:33:31 crc kubenswrapper[4558]: I0120 17:33:31.980005 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="f9691b53-0a90-45ee-bd69-c75b1195eeb8" containerName="barbican-api-log" Jan 20 17:33:31 crc kubenswrapper[4558]: I0120 17:33:31.980020 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="a336b03d-9969-41de-9f19-f3479f76a33d" containerName="nova-scheduler-scheduler" Jan 20 17:33:31 crc kubenswrapper[4558]: I0120 17:33:31.980685 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:33:31 crc kubenswrapper[4558]: I0120 17:33:31.987033 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell1-conductor-config-data" Jan 20 17:33:31 crc kubenswrapper[4558]: I0120 17:33:31.989029 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:33:31 crc kubenswrapper[4558]: I0120 17:33:31.999230 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-api-b7d7d464-2s2ds"] Jan 20 17:33:32 crc kubenswrapper[4558]: I0120 17:33:32.010312 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-0"] Jan 20 17:33:32 crc kubenswrapper[4558]: I0120 17:33:32.018314 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/barbican-api-b7d7d464-2s2ds"] Jan 20 17:33:32 crc kubenswrapper[4558]: I0120 17:33:32.029370 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:33:32 crc kubenswrapper[4558]: I0120 17:33:32.030867 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:33:32 crc kubenswrapper[4558]: I0120 17:33:32.041719 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-scheduler-config-data" Jan 20 17:33:32 crc kubenswrapper[4558]: I0120 17:33:32.047730 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:33:32 crc kubenswrapper[4558]: I0120 17:33:32.124029 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dfc93436-8ed7-4388-bd32-347d897ac50a-config-data\") pod \"nova-scheduler-0\" (UID: \"dfc93436-8ed7-4388-bd32-347d897ac50a\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:33:32 crc kubenswrapper[4558]: I0120 17:33:32.124105 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/097b903e-9e8a-464f-99e3-a69b68206465-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"097b903e-9e8a-464f-99e3-a69b68206465\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:33:32 crc kubenswrapper[4558]: I0120 17:33:32.124256 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c4kvd\" (UniqueName: \"kubernetes.io/projected/dfc93436-8ed7-4388-bd32-347d897ac50a-kube-api-access-c4kvd\") pod \"nova-scheduler-0\" (UID: \"dfc93436-8ed7-4388-bd32-347d897ac50a\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:33:32 crc kubenswrapper[4558]: I0120 17:33:32.124329 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/097b903e-9e8a-464f-99e3-a69b68206465-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"097b903e-9e8a-464f-99e3-a69b68206465\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:33:32 crc kubenswrapper[4558]: I0120 17:33:32.124378 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dfc93436-8ed7-4388-bd32-347d897ac50a-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"dfc93436-8ed7-4388-bd32-347d897ac50a\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:33:32 crc kubenswrapper[4558]: I0120 17:33:32.124482 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tzrg2\" (UniqueName: \"kubernetes.io/projected/097b903e-9e8a-464f-99e3-a69b68206465-kube-api-access-tzrg2\") pod \"nova-cell1-conductor-0\" (UID: \"097b903e-9e8a-464f-99e3-a69b68206465\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:33:32 crc kubenswrapper[4558]: I0120 17:33:32.226993 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tzrg2\" (UniqueName: \"kubernetes.io/projected/097b903e-9e8a-464f-99e3-a69b68206465-kube-api-access-tzrg2\") pod \"nova-cell1-conductor-0\" (UID: \"097b903e-9e8a-464f-99e3-a69b68206465\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:33:32 crc kubenswrapper[4558]: I0120 17:33:32.227132 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dfc93436-8ed7-4388-bd32-347d897ac50a-config-data\") pod \"nova-scheduler-0\" (UID: \"dfc93436-8ed7-4388-bd32-347d897ac50a\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:33:32 crc kubenswrapper[4558]: I0120 17:33:32.227208 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/097b903e-9e8a-464f-99e3-a69b68206465-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"097b903e-9e8a-464f-99e3-a69b68206465\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:33:32 crc kubenswrapper[4558]: I0120 17:33:32.227295 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c4kvd\" (UniqueName: \"kubernetes.io/projected/dfc93436-8ed7-4388-bd32-347d897ac50a-kube-api-access-c4kvd\") pod \"nova-scheduler-0\" (UID: \"dfc93436-8ed7-4388-bd32-347d897ac50a\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:33:32 crc kubenswrapper[4558]: I0120 17:33:32.227355 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/097b903e-9e8a-464f-99e3-a69b68206465-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"097b903e-9e8a-464f-99e3-a69b68206465\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:33:32 crc kubenswrapper[4558]: I0120 17:33:32.227393 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dfc93436-8ed7-4388-bd32-347d897ac50a-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"dfc93436-8ed7-4388-bd32-347d897ac50a\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:33:32 crc kubenswrapper[4558]: I0120 17:33:32.233569 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/097b903e-9e8a-464f-99e3-a69b68206465-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"097b903e-9e8a-464f-99e3-a69b68206465\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:33:32 crc kubenswrapper[4558]: I0120 17:33:32.234356 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/097b903e-9e8a-464f-99e3-a69b68206465-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"097b903e-9e8a-464f-99e3-a69b68206465\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:33:32 crc kubenswrapper[4558]: I0120 17:33:32.234976 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dfc93436-8ed7-4388-bd32-347d897ac50a-config-data\") pod \"nova-scheduler-0\" (UID: \"dfc93436-8ed7-4388-bd32-347d897ac50a\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:33:32 crc kubenswrapper[4558]: I0120 17:33:32.237849 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dfc93436-8ed7-4388-bd32-347d897ac50a-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"dfc93436-8ed7-4388-bd32-347d897ac50a\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:33:32 crc kubenswrapper[4558]: I0120 17:33:32.247229 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c4kvd\" (UniqueName: \"kubernetes.io/projected/dfc93436-8ed7-4388-bd32-347d897ac50a-kube-api-access-c4kvd\") pod \"nova-scheduler-0\" (UID: \"dfc93436-8ed7-4388-bd32-347d897ac50a\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:33:32 crc kubenswrapper[4558]: I0120 17:33:32.254724 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tzrg2\" (UniqueName: \"kubernetes.io/projected/097b903e-9e8a-464f-99e3-a69b68206465-kube-api-access-tzrg2\") pod \"nova-cell1-conductor-0\" (UID: \"097b903e-9e8a-464f-99e3-a69b68206465\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:33:32 crc kubenswrapper[4558]: I0120 17:33:32.306212 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:33:32 crc kubenswrapper[4558]: I0120 17:33:32.306979 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/placement-95f6c56c8-5t8cm" Jan 20 17:33:32 crc kubenswrapper[4558]: I0120 17:33:32.321904 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/placement-95f6c56c8-5t8cm" Jan 20 17:33:32 crc kubenswrapper[4558]: I0120 17:33:32.362955 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:33:32 crc kubenswrapper[4558]: I0120 17:33:32.399157 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/placement-76cff77fc4-bkwrr"] Jan 20 17:33:32 crc kubenswrapper[4558]: I0120 17:33:32.399950 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/placement-76cff77fc4-bkwrr" podUID="e7c82345-58f6-4542-9483-604f1ad2b5f4" containerName="placement-log" containerID="cri-o://3907acb149bd72bc7bc6e07f835a9cc0d239aacbc88c4c4e9b53259322e14cc9" gracePeriod=30 Jan 20 17:33:32 crc kubenswrapper[4558]: I0120 17:33:32.400120 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/placement-76cff77fc4-bkwrr" podUID="e7c82345-58f6-4542-9483-604f1ad2b5f4" containerName="placement-api" containerID="cri-o://61313214dcb3a55bfb055d2bef3c402cad0651369d70f00a0842a2ed0b39da06" gracePeriod=30 Jan 20 17:33:32 crc kubenswrapper[4558]: I0120 17:33:32.581272 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a336b03d-9969-41de-9f19-f3479f76a33d" path="/var/lib/kubelet/pods/a336b03d-9969-41de-9f19-f3479f76a33d/volumes" Jan 20 17:33:32 crc kubenswrapper[4558]: I0120 17:33:32.582150 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c686e4c2-68d0-4999-9078-90c70927d9ae" path="/var/lib/kubelet/pods/c686e4c2-68d0-4999-9078-90c70927d9ae/volumes" Jan 20 17:33:32 crc kubenswrapper[4558]: I0120 17:33:32.582711 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f9691b53-0a90-45ee-bd69-c75b1195eeb8" path="/var/lib/kubelet/pods/f9691b53-0a90-45ee-bd69-c75b1195eeb8/volumes" Jan 20 17:33:32 crc kubenswrapper[4558]: I0120 17:33:32.760786 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-server-0" event={"ID":"0696a635-5dc9-46e9-8502-47fab9ff761c","Type":"ContainerStarted","Data":"9f1296f437298973ce0b0219f1980b151cca677e25fb375f92cfbf98670c7983"} Jan 20 17:33:32 crc kubenswrapper[4558]: I0120 17:33:32.762047 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:33:32 crc kubenswrapper[4558]: I0120 17:33:32.767267 4558 generic.go:334] "Generic (PLEG): container finished" podID="e7c82345-58f6-4542-9483-604f1ad2b5f4" containerID="3907acb149bd72bc7bc6e07f835a9cc0d239aacbc88c4c4e9b53259322e14cc9" exitCode=143 Jan 20 17:33:32 crc kubenswrapper[4558]: I0120 17:33:32.767341 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-76cff77fc4-bkwrr" event={"ID":"e7c82345-58f6-4542-9483-604f1ad2b5f4","Type":"ContainerDied","Data":"3907acb149bd72bc7bc6e07f835a9cc0d239aacbc88c4c4e9b53259322e14cc9"} Jan 20 17:33:32 crc kubenswrapper[4558]: I0120 17:33:32.807702 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-0"] Jan 20 17:33:32 crc kubenswrapper[4558]: I0120 17:33:32.849220 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/rabbitmq-server-0" podStartSLOduration=35.849198209 podStartE2EDuration="35.849198209s" podCreationTimestamp="2026-01-20 17:32:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:33:32.805426835 +0000 UTC m=+3106.565764803" watchObservedRunningTime="2026-01-20 17:33:32.849198209 +0000 UTC m=+3106.609536175" Jan 20 17:33:32 crc kubenswrapper[4558]: I0120 17:33:32.904414 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:33:33 crc kubenswrapper[4558]: I0120 17:33:33.086379 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-df78f4c65-4ns5m" Jan 20 17:33:33 crc kubenswrapper[4558]: I0120 17:33:33.151674 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-59d597f5bf-w7957"] Jan 20 17:33:33 crc kubenswrapper[4558]: I0120 17:33:33.151917 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-59d597f5bf-w7957" podUID="d41575b2-4a6b-4d95-b15d-4699f0193d4f" containerName="dnsmasq-dns" containerID="cri-o://a20edd87246e6f47b5bac9aeb82f24c7d1f1dec5608b67514645643cdf25e8de" gracePeriod=10 Jan 20 17:33:33 crc kubenswrapper[4558]: I0120 17:33:33.664509 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-59d597f5bf-w7957" Jan 20 17:33:33 crc kubenswrapper[4558]: I0120 17:33:33.770117 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d41575b2-4a6b-4d95-b15d-4699f0193d4f-dns-swift-storage-0\") pod \"d41575b2-4a6b-4d95-b15d-4699f0193d4f\" (UID: \"d41575b2-4a6b-4d95-b15d-4699f0193d4f\") " Jan 20 17:33:33 crc kubenswrapper[4558]: I0120 17:33:33.770340 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d41575b2-4a6b-4d95-b15d-4699f0193d4f-config\") pod \"d41575b2-4a6b-4d95-b15d-4699f0193d4f\" (UID: \"d41575b2-4a6b-4d95-b15d-4699f0193d4f\") " Jan 20 17:33:33 crc kubenswrapper[4558]: I0120 17:33:33.770386 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/d41575b2-4a6b-4d95-b15d-4699f0193d4f-dnsmasq-svc\") pod \"d41575b2-4a6b-4d95-b15d-4699f0193d4f\" (UID: \"d41575b2-4a6b-4d95-b15d-4699f0193d4f\") " Jan 20 17:33:33 crc kubenswrapper[4558]: I0120 17:33:33.770432 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tx84d\" (UniqueName: \"kubernetes.io/projected/d41575b2-4a6b-4d95-b15d-4699f0193d4f-kube-api-access-tx84d\") pod \"d41575b2-4a6b-4d95-b15d-4699f0193d4f\" (UID: \"d41575b2-4a6b-4d95-b15d-4699f0193d4f\") " Jan 20 17:33:33 crc kubenswrapper[4558]: I0120 17:33:33.801407 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d41575b2-4a6b-4d95-b15d-4699f0193d4f-kube-api-access-tx84d" (OuterVolumeSpecName: "kube-api-access-tx84d") pod "d41575b2-4a6b-4d95-b15d-4699f0193d4f" (UID: "d41575b2-4a6b-4d95-b15d-4699f0193d4f"). InnerVolumeSpecName "kube-api-access-tx84d". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:33:33 crc kubenswrapper[4558]: I0120 17:33:33.863447 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-0" event={"ID":"097b903e-9e8a-464f-99e3-a69b68206465","Type":"ContainerStarted","Data":"9446c119ea59f7973c5f6771da039eff1a3de25e2aacb0e940e611c36a10a136"} Jan 20 17:33:33 crc kubenswrapper[4558]: I0120 17:33:33.863503 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-0" event={"ID":"097b903e-9e8a-464f-99e3-a69b68206465","Type":"ContainerStarted","Data":"758bedcb04298972f20c5b8b77fb009bd0a4978f604a162ef844d7deeffb61b6"} Jan 20 17:33:33 crc kubenswrapper[4558]: I0120 17:33:33.864873 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:33:33 crc kubenswrapper[4558]: I0120 17:33:33.875379 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"dfc93436-8ed7-4388-bd32-347d897ac50a","Type":"ContainerStarted","Data":"fa9a54a48730a2a2a8909d086b14f29f0cc401aa57683c836b21a37c37c71a30"} Jan 20 17:33:33 crc kubenswrapper[4558]: I0120 17:33:33.875417 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"dfc93436-8ed7-4388-bd32-347d897ac50a","Type":"ContainerStarted","Data":"879a753f86794356931547df23026fc2d1f24346731a19f53a98e503e5bb22f3"} Jan 20 17:33:33 crc kubenswrapper[4558]: I0120 17:33:33.878081 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tx84d\" (UniqueName: \"kubernetes.io/projected/d41575b2-4a6b-4d95-b15d-4699f0193d4f-kube-api-access-tx84d\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:33 crc kubenswrapper[4558]: I0120 17:33:33.888874 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d41575b2-4a6b-4d95-b15d-4699f0193d4f-dnsmasq-svc" (OuterVolumeSpecName: "dnsmasq-svc") pod "d41575b2-4a6b-4d95-b15d-4699f0193d4f" (UID: "d41575b2-4a6b-4d95-b15d-4699f0193d4f"). InnerVolumeSpecName "dnsmasq-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:33:33 crc kubenswrapper[4558]: I0120 17:33:33.894623 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d41575b2-4a6b-4d95-b15d-4699f0193d4f-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "d41575b2-4a6b-4d95-b15d-4699f0193d4f" (UID: "d41575b2-4a6b-4d95-b15d-4699f0193d4f"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:33:33 crc kubenswrapper[4558]: I0120 17:33:33.900784 4558 generic.go:334] "Generic (PLEG): container finished" podID="d41575b2-4a6b-4d95-b15d-4699f0193d4f" containerID="a20edd87246e6f47b5bac9aeb82f24c7d1f1dec5608b67514645643cdf25e8de" exitCode=0 Jan 20 17:33:33 crc kubenswrapper[4558]: I0120 17:33:33.901497 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-59d597f5bf-w7957" Jan 20 17:33:33 crc kubenswrapper[4558]: I0120 17:33:33.901837 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-59d597f5bf-w7957" event={"ID":"d41575b2-4a6b-4d95-b15d-4699f0193d4f","Type":"ContainerDied","Data":"a20edd87246e6f47b5bac9aeb82f24c7d1f1dec5608b67514645643cdf25e8de"} Jan 20 17:33:33 crc kubenswrapper[4558]: I0120 17:33:33.901905 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-59d597f5bf-w7957" event={"ID":"d41575b2-4a6b-4d95-b15d-4699f0193d4f","Type":"ContainerDied","Data":"cbeefef59a12b3553955af3e7b1c8cdf9e902a35bb8b9b86902ec7460f4e1b10"} Jan 20 17:33:33 crc kubenswrapper[4558]: I0120 17:33:33.901938 4558 scope.go:117] "RemoveContainer" containerID="a20edd87246e6f47b5bac9aeb82f24c7d1f1dec5608b67514645643cdf25e8de" Jan 20 17:33:33 crc kubenswrapper[4558]: I0120 17:33:33.912780 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell1-conductor-0" podStartSLOduration=2.912767557 podStartE2EDuration="2.912767557s" podCreationTimestamp="2026-01-20 17:33:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:33:33.910273848 +0000 UTC m=+3107.670611815" watchObservedRunningTime="2026-01-20 17:33:33.912767557 +0000 UTC m=+3107.673105514" Jan 20 17:33:33 crc kubenswrapper[4558]: I0120 17:33:33.934560 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d41575b2-4a6b-4d95-b15d-4699f0193d4f-config" (OuterVolumeSpecName: "config") pod "d41575b2-4a6b-4d95-b15d-4699f0193d4f" (UID: "d41575b2-4a6b-4d95-b15d-4699f0193d4f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:33:33 crc kubenswrapper[4558]: I0120 17:33:33.968299 4558 scope.go:117] "RemoveContainer" containerID="8ca32d67a64ed7ab458dc4f478ee11665eebc4e2687ca7e2c89888f1694106fd" Jan 20 17:33:33 crc kubenswrapper[4558]: I0120 17:33:33.980876 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d41575b2-4a6b-4d95-b15d-4699f0193d4f-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:33 crc kubenswrapper[4558]: I0120 17:33:33.980902 4558 reconciler_common.go:293] "Volume detached for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/d41575b2-4a6b-4d95-b15d-4699f0193d4f-dnsmasq-svc\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:33 crc kubenswrapper[4558]: I0120 17:33:33.980912 4558 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d41575b2-4a6b-4d95-b15d-4699f0193d4f-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:34 crc kubenswrapper[4558]: I0120 17:33:34.013339 4558 scope.go:117] "RemoveContainer" containerID="a20edd87246e6f47b5bac9aeb82f24c7d1f1dec5608b67514645643cdf25e8de" Jan 20 17:33:34 crc kubenswrapper[4558]: E0120 17:33:34.015729 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a20edd87246e6f47b5bac9aeb82f24c7d1f1dec5608b67514645643cdf25e8de\": container with ID starting with a20edd87246e6f47b5bac9aeb82f24c7d1f1dec5608b67514645643cdf25e8de not found: ID does not exist" containerID="a20edd87246e6f47b5bac9aeb82f24c7d1f1dec5608b67514645643cdf25e8de" Jan 20 17:33:34 crc kubenswrapper[4558]: I0120 17:33:34.015770 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a20edd87246e6f47b5bac9aeb82f24c7d1f1dec5608b67514645643cdf25e8de"} err="failed to get container status \"a20edd87246e6f47b5bac9aeb82f24c7d1f1dec5608b67514645643cdf25e8de\": rpc error: code = NotFound desc = could not find container \"a20edd87246e6f47b5bac9aeb82f24c7d1f1dec5608b67514645643cdf25e8de\": container with ID starting with a20edd87246e6f47b5bac9aeb82f24c7d1f1dec5608b67514645643cdf25e8de not found: ID does not exist" Jan 20 17:33:34 crc kubenswrapper[4558]: I0120 17:33:34.015794 4558 scope.go:117] "RemoveContainer" containerID="8ca32d67a64ed7ab458dc4f478ee11665eebc4e2687ca7e2c89888f1694106fd" Jan 20 17:33:34 crc kubenswrapper[4558]: E0120 17:33:34.016766 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8ca32d67a64ed7ab458dc4f478ee11665eebc4e2687ca7e2c89888f1694106fd\": container with ID starting with 8ca32d67a64ed7ab458dc4f478ee11665eebc4e2687ca7e2c89888f1694106fd not found: ID does not exist" containerID="8ca32d67a64ed7ab458dc4f478ee11665eebc4e2687ca7e2c89888f1694106fd" Jan 20 17:33:34 crc kubenswrapper[4558]: I0120 17:33:34.016787 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8ca32d67a64ed7ab458dc4f478ee11665eebc4e2687ca7e2c89888f1694106fd"} err="failed to get container status \"8ca32d67a64ed7ab458dc4f478ee11665eebc4e2687ca7e2c89888f1694106fd\": rpc error: code = NotFound desc = could not find container \"8ca32d67a64ed7ab458dc4f478ee11665eebc4e2687ca7e2c89888f1694106fd\": container with ID starting with 8ca32d67a64ed7ab458dc4f478ee11665eebc4e2687ca7e2c89888f1694106fd not found: ID does not exist" Jan 20 17:33:34 crc kubenswrapper[4558]: I0120 17:33:34.242947 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-scheduler-0" podStartSLOduration=3.242927703 podStartE2EDuration="3.242927703s" podCreationTimestamp="2026-01-20 17:33:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:33:33.962379202 +0000 UTC m=+3107.722717170" watchObservedRunningTime="2026-01-20 17:33:34.242927703 +0000 UTC m=+3108.003265669" Jan 20 17:33:34 crc kubenswrapper[4558]: I0120 17:33:34.265215 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-59d597f5bf-w7957"] Jan 20 17:33:34 crc kubenswrapper[4558]: I0120 17:33:34.273253 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-59d597f5bf-w7957"] Jan 20 17:33:34 crc kubenswrapper[4558]: I0120 17:33:34.522772 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:33:34 crc kubenswrapper[4558]: I0120 17:33:34.575382 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d41575b2-4a6b-4d95-b15d-4699f0193d4f" path="/var/lib/kubelet/pods/d41575b2-4a6b-4d95-b15d-4699f0193d4f/volumes" Jan 20 17:33:34 crc kubenswrapper[4558]: I0120 17:33:34.594908 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b115808a-3ae3-40c5-b330-f58aa6af7503-combined-ca-bundle\") pod \"b115808a-3ae3-40c5-b330-f58aa6af7503\" (UID: \"b115808a-3ae3-40c5-b330-f58aa6af7503\") " Jan 20 17:33:34 crc kubenswrapper[4558]: I0120 17:33:34.595051 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b115808a-3ae3-40c5-b330-f58aa6af7503-scripts\") pod \"b115808a-3ae3-40c5-b330-f58aa6af7503\" (UID: \"b115808a-3ae3-40c5-b330-f58aa6af7503\") " Jan 20 17:33:34 crc kubenswrapper[4558]: I0120 17:33:34.595119 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b115808a-3ae3-40c5-b330-f58aa6af7503-config-data\") pod \"b115808a-3ae3-40c5-b330-f58aa6af7503\" (UID: \"b115808a-3ae3-40c5-b330-f58aa6af7503\") " Jan 20 17:33:34 crc kubenswrapper[4558]: I0120 17:33:34.595207 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b115808a-3ae3-40c5-b330-f58aa6af7503-run-httpd\") pod \"b115808a-3ae3-40c5-b330-f58aa6af7503\" (UID: \"b115808a-3ae3-40c5-b330-f58aa6af7503\") " Jan 20 17:33:34 crc kubenswrapper[4558]: I0120 17:33:34.595251 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b115808a-3ae3-40c5-b330-f58aa6af7503-sg-core-conf-yaml\") pod \"b115808a-3ae3-40c5-b330-f58aa6af7503\" (UID: \"b115808a-3ae3-40c5-b330-f58aa6af7503\") " Jan 20 17:33:34 crc kubenswrapper[4558]: I0120 17:33:34.595289 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t4prt\" (UniqueName: \"kubernetes.io/projected/b115808a-3ae3-40c5-b330-f58aa6af7503-kube-api-access-t4prt\") pod \"b115808a-3ae3-40c5-b330-f58aa6af7503\" (UID: \"b115808a-3ae3-40c5-b330-f58aa6af7503\") " Jan 20 17:33:34 crc kubenswrapper[4558]: I0120 17:33:34.595342 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b115808a-3ae3-40c5-b330-f58aa6af7503-log-httpd\") pod \"b115808a-3ae3-40c5-b330-f58aa6af7503\" (UID: \"b115808a-3ae3-40c5-b330-f58aa6af7503\") " Jan 20 17:33:34 crc kubenswrapper[4558]: I0120 17:33:34.597487 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b115808a-3ae3-40c5-b330-f58aa6af7503-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "b115808a-3ae3-40c5-b330-f58aa6af7503" (UID: "b115808a-3ae3-40c5-b330-f58aa6af7503"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:33:34 crc kubenswrapper[4558]: I0120 17:33:34.597972 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b115808a-3ae3-40c5-b330-f58aa6af7503-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "b115808a-3ae3-40c5-b330-f58aa6af7503" (UID: "b115808a-3ae3-40c5-b330-f58aa6af7503"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:33:34 crc kubenswrapper[4558]: I0120 17:33:34.605762 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b115808a-3ae3-40c5-b330-f58aa6af7503-kube-api-access-t4prt" (OuterVolumeSpecName: "kube-api-access-t4prt") pod "b115808a-3ae3-40c5-b330-f58aa6af7503" (UID: "b115808a-3ae3-40c5-b330-f58aa6af7503"). InnerVolumeSpecName "kube-api-access-t4prt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:33:34 crc kubenswrapper[4558]: I0120 17:33:34.627913 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b115808a-3ae3-40c5-b330-f58aa6af7503-scripts" (OuterVolumeSpecName: "scripts") pod "b115808a-3ae3-40c5-b330-f58aa6af7503" (UID: "b115808a-3ae3-40c5-b330-f58aa6af7503"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:33:34 crc kubenswrapper[4558]: I0120 17:33:34.640489 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b115808a-3ae3-40c5-b330-f58aa6af7503-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "b115808a-3ae3-40c5-b330-f58aa6af7503" (UID: "b115808a-3ae3-40c5-b330-f58aa6af7503"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:33:34 crc kubenswrapper[4558]: I0120 17:33:34.699774 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b115808a-3ae3-40c5-b330-f58aa6af7503-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:34 crc kubenswrapper[4558]: I0120 17:33:34.699803 4558 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b115808a-3ae3-40c5-b330-f58aa6af7503-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:34 crc kubenswrapper[4558]: I0120 17:33:34.699818 4558 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b115808a-3ae3-40c5-b330-f58aa6af7503-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:34 crc kubenswrapper[4558]: I0120 17:33:34.699827 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t4prt\" (UniqueName: \"kubernetes.io/projected/b115808a-3ae3-40c5-b330-f58aa6af7503-kube-api-access-t4prt\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:34 crc kubenswrapper[4558]: I0120 17:33:34.699837 4558 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b115808a-3ae3-40c5-b330-f58aa6af7503-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:34 crc kubenswrapper[4558]: I0120 17:33:34.701407 4558 pod_container_manager_linux.go:210] "Failed to delete cgroup paths" cgroupName=["kubepods","besteffort","pod6dbc6996-80bf-4f86-9acf-f74dd04e51b1"] err="unable to destroy cgroup paths for cgroup [kubepods besteffort pod6dbc6996-80bf-4f86-9acf-f74dd04e51b1] : Timed out while waiting for systemd to remove kubepods-besteffort-pod6dbc6996_80bf_4f86_9acf_f74dd04e51b1.slice" Jan 20 17:33:34 crc kubenswrapper[4558]: I0120 17:33:34.723235 4558 pod_container_manager_linux.go:210] "Failed to delete cgroup paths" cgroupName=["kubepods","besteffort","pod48ebccca-cc18-415f-b6f8-f301f99e5d3a"] err="unable to destroy cgroup paths for cgroup [kubepods besteffort pod48ebccca-cc18-415f-b6f8-f301f99e5d3a] : Timed out while waiting for systemd to remove kubepods-besteffort-pod48ebccca_cc18_415f_b6f8_f301f99e5d3a.slice" Jan 20 17:33:34 crc kubenswrapper[4558]: I0120 17:33:34.729888 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b115808a-3ae3-40c5-b330-f58aa6af7503-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b115808a-3ae3-40c5-b330-f58aa6af7503" (UID: "b115808a-3ae3-40c5-b330-f58aa6af7503"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:33:34 crc kubenswrapper[4558]: I0120 17:33:34.742472 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b115808a-3ae3-40c5-b330-f58aa6af7503-config-data" (OuterVolumeSpecName: "config-data") pod "b115808a-3ae3-40c5-b330-f58aa6af7503" (UID: "b115808a-3ae3-40c5-b330-f58aa6af7503"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:33:34 crc kubenswrapper[4558]: I0120 17:33:34.801993 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b115808a-3ae3-40c5-b330-f58aa6af7503-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:34 crc kubenswrapper[4558]: I0120 17:33:34.802028 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b115808a-3ae3-40c5-b330-f58aa6af7503-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:34 crc kubenswrapper[4558]: I0120 17:33:34.936480 4558 generic.go:334] "Generic (PLEG): container finished" podID="b115808a-3ae3-40c5-b330-f58aa6af7503" containerID="902c7c53a3328dda0625dd77131ac2bb26f4fce8e93e1be078eaef8a855526bf" exitCode=137 Jan 20 17:33:34 crc kubenswrapper[4558]: I0120 17:33:34.936539 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"b115808a-3ae3-40c5-b330-f58aa6af7503","Type":"ContainerDied","Data":"902c7c53a3328dda0625dd77131ac2bb26f4fce8e93e1be078eaef8a855526bf"} Jan 20 17:33:34 crc kubenswrapper[4558]: I0120 17:33:34.936570 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"b115808a-3ae3-40c5-b330-f58aa6af7503","Type":"ContainerDied","Data":"be7d51cbfa258bd2e7eea0fc06c1a734c129b658ae00c98bb8b2246305f07912"} Jan 20 17:33:34 crc kubenswrapper[4558]: I0120 17:33:34.936590 4558 scope.go:117] "RemoveContainer" containerID="8f9b1bb1f9284398997331917a18ecd29e5f1f938a3f0cdf95397ef7ef755186" Jan 20 17:33:34 crc kubenswrapper[4558]: I0120 17:33:34.936698 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:33:34 crc kubenswrapper[4558]: I0120 17:33:34.959985 4558 generic.go:334] "Generic (PLEG): container finished" podID="d6b6da7f-6f9b-415b-ac10-b3bd8655bb1a" containerID="616dfc84f48150feccd210dc50efb30772487af019214a6546143c12c3014223" exitCode=137 Jan 20 17:33:34 crc kubenswrapper[4558]: I0120 17:33:34.962739 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-keystone-listener-5459fb5f8-h5bbp" event={"ID":"d6b6da7f-6f9b-415b-ac10-b3bd8655bb1a","Type":"ContainerDied","Data":"616dfc84f48150feccd210dc50efb30772487af019214a6546143c12c3014223"} Jan 20 17:33:34 crc kubenswrapper[4558]: I0120 17:33:34.987275 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:33:35 crc kubenswrapper[4558]: I0120 17:33:34.991323 4558 scope.go:117] "RemoveContainer" containerID="9521f46d4a99703b801e57ee0c4402fc90e695fd2198c5972592e15eac8d40d6" Jan 20 17:33:35 crc kubenswrapper[4558]: I0120 17:33:35.003214 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:33:35 crc kubenswrapper[4558]: I0120 17:33:35.035576 4558 scope.go:117] "RemoveContainer" containerID="0704db880c3d402aa539743da9a5f815da9504c0ee52f7941ada9ffd86b66fe0" Jan 20 17:33:35 crc kubenswrapper[4558]: I0120 17:33:35.053587 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:33:35 crc kubenswrapper[4558]: E0120 17:33:35.057181 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b115808a-3ae3-40c5-b330-f58aa6af7503" containerName="ceilometer-notification-agent" Jan 20 17:33:35 crc kubenswrapper[4558]: I0120 17:33:35.057427 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="b115808a-3ae3-40c5-b330-f58aa6af7503" containerName="ceilometer-notification-agent" Jan 20 17:33:35 crc kubenswrapper[4558]: E0120 17:33:35.057581 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b115808a-3ae3-40c5-b330-f58aa6af7503" containerName="proxy-httpd" Jan 20 17:33:35 crc kubenswrapper[4558]: I0120 17:33:35.057592 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="b115808a-3ae3-40c5-b330-f58aa6af7503" containerName="proxy-httpd" Jan 20 17:33:35 crc kubenswrapper[4558]: E0120 17:33:35.057639 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b115808a-3ae3-40c5-b330-f58aa6af7503" containerName="sg-core" Jan 20 17:33:35 crc kubenswrapper[4558]: I0120 17:33:35.057647 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="b115808a-3ae3-40c5-b330-f58aa6af7503" containerName="sg-core" Jan 20 17:33:35 crc kubenswrapper[4558]: E0120 17:33:35.057823 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d41575b2-4a6b-4d95-b15d-4699f0193d4f" containerName="init" Jan 20 17:33:35 crc kubenswrapper[4558]: I0120 17:33:35.057884 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d41575b2-4a6b-4d95-b15d-4699f0193d4f" containerName="init" Jan 20 17:33:35 crc kubenswrapper[4558]: E0120 17:33:35.057986 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d41575b2-4a6b-4d95-b15d-4699f0193d4f" containerName="dnsmasq-dns" Jan 20 17:33:35 crc kubenswrapper[4558]: I0120 17:33:35.057997 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d41575b2-4a6b-4d95-b15d-4699f0193d4f" containerName="dnsmasq-dns" Jan 20 17:33:35 crc kubenswrapper[4558]: E0120 17:33:35.058273 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b115808a-3ae3-40c5-b330-f58aa6af7503" containerName="ceilometer-central-agent" Jan 20 17:33:35 crc kubenswrapper[4558]: I0120 17:33:35.058316 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="b115808a-3ae3-40c5-b330-f58aa6af7503" containerName="ceilometer-central-agent" Jan 20 17:33:35 crc kubenswrapper[4558]: I0120 17:33:35.059187 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="b115808a-3ae3-40c5-b330-f58aa6af7503" containerName="ceilometer-central-agent" Jan 20 17:33:35 crc kubenswrapper[4558]: I0120 17:33:35.059214 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="b115808a-3ae3-40c5-b330-f58aa6af7503" containerName="ceilometer-notification-agent" Jan 20 17:33:35 crc kubenswrapper[4558]: I0120 17:33:35.059226 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="b115808a-3ae3-40c5-b330-f58aa6af7503" containerName="sg-core" Jan 20 17:33:35 crc kubenswrapper[4558]: I0120 17:33:35.059239 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="d41575b2-4a6b-4d95-b15d-4699f0193d4f" containerName="dnsmasq-dns" Jan 20 17:33:35 crc kubenswrapper[4558]: I0120 17:33:35.059570 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="b115808a-3ae3-40c5-b330-f58aa6af7503" containerName="proxy-httpd" Jan 20 17:33:35 crc kubenswrapper[4558]: I0120 17:33:35.065767 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:33:35 crc kubenswrapper[4558]: I0120 17:33:35.065884 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:33:35 crc kubenswrapper[4558]: I0120 17:33:35.069611 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-scripts" Jan 20 17:33:35 crc kubenswrapper[4558]: I0120 17:33:35.070304 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-config-data" Jan 20 17:33:35 crc kubenswrapper[4558]: I0120 17:33:35.106756 4558 scope.go:117] "RemoveContainer" containerID="902c7c53a3328dda0625dd77131ac2bb26f4fce8e93e1be078eaef8a855526bf" Jan 20 17:33:35 crc kubenswrapper[4558]: I0120 17:33:35.130996 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-keystone-listener-5459fb5f8-h5bbp" Jan 20 17:33:35 crc kubenswrapper[4558]: I0120 17:33:35.136850 4558 scope.go:117] "RemoveContainer" containerID="8f9b1bb1f9284398997331917a18ecd29e5f1f938a3f0cdf95397ef7ef755186" Jan 20 17:33:35 crc kubenswrapper[4558]: E0120 17:33:35.137338 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8f9b1bb1f9284398997331917a18ecd29e5f1f938a3f0cdf95397ef7ef755186\": container with ID starting with 8f9b1bb1f9284398997331917a18ecd29e5f1f938a3f0cdf95397ef7ef755186 not found: ID does not exist" containerID="8f9b1bb1f9284398997331917a18ecd29e5f1f938a3f0cdf95397ef7ef755186" Jan 20 17:33:35 crc kubenswrapper[4558]: I0120 17:33:35.137486 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8f9b1bb1f9284398997331917a18ecd29e5f1f938a3f0cdf95397ef7ef755186"} err="failed to get container status \"8f9b1bb1f9284398997331917a18ecd29e5f1f938a3f0cdf95397ef7ef755186\": rpc error: code = NotFound desc = could not find container \"8f9b1bb1f9284398997331917a18ecd29e5f1f938a3f0cdf95397ef7ef755186\": container with ID starting with 8f9b1bb1f9284398997331917a18ecd29e5f1f938a3f0cdf95397ef7ef755186 not found: ID does not exist" Jan 20 17:33:35 crc kubenswrapper[4558]: I0120 17:33:35.137578 4558 scope.go:117] "RemoveContainer" containerID="9521f46d4a99703b801e57ee0c4402fc90e695fd2198c5972592e15eac8d40d6" Jan 20 17:33:35 crc kubenswrapper[4558]: E0120 17:33:35.137912 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9521f46d4a99703b801e57ee0c4402fc90e695fd2198c5972592e15eac8d40d6\": container with ID starting with 9521f46d4a99703b801e57ee0c4402fc90e695fd2198c5972592e15eac8d40d6 not found: ID does not exist" containerID="9521f46d4a99703b801e57ee0c4402fc90e695fd2198c5972592e15eac8d40d6" Jan 20 17:33:35 crc kubenswrapper[4558]: I0120 17:33:35.138088 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9521f46d4a99703b801e57ee0c4402fc90e695fd2198c5972592e15eac8d40d6"} err="failed to get container status \"9521f46d4a99703b801e57ee0c4402fc90e695fd2198c5972592e15eac8d40d6\": rpc error: code = NotFound desc = could not find container \"9521f46d4a99703b801e57ee0c4402fc90e695fd2198c5972592e15eac8d40d6\": container with ID starting with 9521f46d4a99703b801e57ee0c4402fc90e695fd2198c5972592e15eac8d40d6 not found: ID does not exist" Jan 20 17:33:35 crc kubenswrapper[4558]: I0120 17:33:35.138221 4558 scope.go:117] "RemoveContainer" containerID="0704db880c3d402aa539743da9a5f815da9504c0ee52f7941ada9ffd86b66fe0" Jan 20 17:33:35 crc kubenswrapper[4558]: E0120 17:33:35.138636 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0704db880c3d402aa539743da9a5f815da9504c0ee52f7941ada9ffd86b66fe0\": container with ID starting with 0704db880c3d402aa539743da9a5f815da9504c0ee52f7941ada9ffd86b66fe0 not found: ID does not exist" containerID="0704db880c3d402aa539743da9a5f815da9504c0ee52f7941ada9ffd86b66fe0" Jan 20 17:33:35 crc kubenswrapper[4558]: I0120 17:33:35.138798 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0704db880c3d402aa539743da9a5f815da9504c0ee52f7941ada9ffd86b66fe0"} err="failed to get container status \"0704db880c3d402aa539743da9a5f815da9504c0ee52f7941ada9ffd86b66fe0\": rpc error: code = NotFound desc = could not find container \"0704db880c3d402aa539743da9a5f815da9504c0ee52f7941ada9ffd86b66fe0\": container with ID starting with 0704db880c3d402aa539743da9a5f815da9504c0ee52f7941ada9ffd86b66fe0 not found: ID does not exist" Jan 20 17:33:35 crc kubenswrapper[4558]: I0120 17:33:35.138931 4558 scope.go:117] "RemoveContainer" containerID="902c7c53a3328dda0625dd77131ac2bb26f4fce8e93e1be078eaef8a855526bf" Jan 20 17:33:35 crc kubenswrapper[4558]: E0120 17:33:35.139247 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"902c7c53a3328dda0625dd77131ac2bb26f4fce8e93e1be078eaef8a855526bf\": container with ID starting with 902c7c53a3328dda0625dd77131ac2bb26f4fce8e93e1be078eaef8a855526bf not found: ID does not exist" containerID="902c7c53a3328dda0625dd77131ac2bb26f4fce8e93e1be078eaef8a855526bf" Jan 20 17:33:35 crc kubenswrapper[4558]: I0120 17:33:35.139278 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"902c7c53a3328dda0625dd77131ac2bb26f4fce8e93e1be078eaef8a855526bf"} err="failed to get container status \"902c7c53a3328dda0625dd77131ac2bb26f4fce8e93e1be078eaef8a855526bf\": rpc error: code = NotFound desc = could not find container \"902c7c53a3328dda0625dd77131ac2bb26f4fce8e93e1be078eaef8a855526bf\": container with ID starting with 902c7c53a3328dda0625dd77131ac2bb26f4fce8e93e1be078eaef8a855526bf not found: ID does not exist" Jan 20 17:33:35 crc kubenswrapper[4558]: I0120 17:33:35.211158 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d6b6da7f-6f9b-415b-ac10-b3bd8655bb1a-logs\") pod \"d6b6da7f-6f9b-415b-ac10-b3bd8655bb1a\" (UID: \"d6b6da7f-6f9b-415b-ac10-b3bd8655bb1a\") " Jan 20 17:33:35 crc kubenswrapper[4558]: I0120 17:33:35.211307 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d6b6da7f-6f9b-415b-ac10-b3bd8655bb1a-config-data-custom\") pod \"d6b6da7f-6f9b-415b-ac10-b3bd8655bb1a\" (UID: \"d6b6da7f-6f9b-415b-ac10-b3bd8655bb1a\") " Jan 20 17:33:35 crc kubenswrapper[4558]: I0120 17:33:35.211337 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d6b6da7f-6f9b-415b-ac10-b3bd8655bb1a-combined-ca-bundle\") pod \"d6b6da7f-6f9b-415b-ac10-b3bd8655bb1a\" (UID: \"d6b6da7f-6f9b-415b-ac10-b3bd8655bb1a\") " Jan 20 17:33:35 crc kubenswrapper[4558]: I0120 17:33:35.211359 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d6b6da7f-6f9b-415b-ac10-b3bd8655bb1a-config-data\") pod \"d6b6da7f-6f9b-415b-ac10-b3bd8655bb1a\" (UID: \"d6b6da7f-6f9b-415b-ac10-b3bd8655bb1a\") " Jan 20 17:33:35 crc kubenswrapper[4558]: I0120 17:33:35.211471 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d6b6da7f-6f9b-415b-ac10-b3bd8655bb1a-logs" (OuterVolumeSpecName: "logs") pod "d6b6da7f-6f9b-415b-ac10-b3bd8655bb1a" (UID: "d6b6da7f-6f9b-415b-ac10-b3bd8655bb1a"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:33:35 crc kubenswrapper[4558]: I0120 17:33:35.211481 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-spt7n\" (UniqueName: \"kubernetes.io/projected/d6b6da7f-6f9b-415b-ac10-b3bd8655bb1a-kube-api-access-spt7n\") pod \"d6b6da7f-6f9b-415b-ac10-b3bd8655bb1a\" (UID: \"d6b6da7f-6f9b-415b-ac10-b3bd8655bb1a\") " Jan 20 17:33:35 crc kubenswrapper[4558]: I0120 17:33:35.211911 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/eca2edbe-72d9-45b0-9445-b734f97b7ece-log-httpd\") pod \"ceilometer-0\" (UID: \"eca2edbe-72d9-45b0-9445-b734f97b7ece\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:33:35 crc kubenswrapper[4558]: I0120 17:33:35.212022 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/eca2edbe-72d9-45b0-9445-b734f97b7ece-run-httpd\") pod \"ceilometer-0\" (UID: \"eca2edbe-72d9-45b0-9445-b734f97b7ece\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:33:35 crc kubenswrapper[4558]: I0120 17:33:35.212187 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eca2edbe-72d9-45b0-9445-b734f97b7ece-config-data\") pod \"ceilometer-0\" (UID: \"eca2edbe-72d9-45b0-9445-b734f97b7ece\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:33:35 crc kubenswrapper[4558]: I0120 17:33:35.212269 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/eca2edbe-72d9-45b0-9445-b734f97b7ece-scripts\") pod \"ceilometer-0\" (UID: \"eca2edbe-72d9-45b0-9445-b734f97b7ece\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:33:35 crc kubenswrapper[4558]: I0120 17:33:35.212401 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wk88z\" (UniqueName: \"kubernetes.io/projected/eca2edbe-72d9-45b0-9445-b734f97b7ece-kube-api-access-wk88z\") pod \"ceilometer-0\" (UID: \"eca2edbe-72d9-45b0-9445-b734f97b7ece\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:33:35 crc kubenswrapper[4558]: I0120 17:33:35.212467 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/eca2edbe-72d9-45b0-9445-b734f97b7ece-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"eca2edbe-72d9-45b0-9445-b734f97b7ece\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:33:35 crc kubenswrapper[4558]: I0120 17:33:35.212488 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eca2edbe-72d9-45b0-9445-b734f97b7ece-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"eca2edbe-72d9-45b0-9445-b734f97b7ece\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:33:35 crc kubenswrapper[4558]: I0120 17:33:35.212541 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d6b6da7f-6f9b-415b-ac10-b3bd8655bb1a-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:35 crc kubenswrapper[4558]: I0120 17:33:35.215570 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d6b6da7f-6f9b-415b-ac10-b3bd8655bb1a-kube-api-access-spt7n" (OuterVolumeSpecName: "kube-api-access-spt7n") pod "d6b6da7f-6f9b-415b-ac10-b3bd8655bb1a" (UID: "d6b6da7f-6f9b-415b-ac10-b3bd8655bb1a"). InnerVolumeSpecName "kube-api-access-spt7n". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:33:35 crc kubenswrapper[4558]: I0120 17:33:35.215995 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d6b6da7f-6f9b-415b-ac10-b3bd8655bb1a-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "d6b6da7f-6f9b-415b-ac10-b3bd8655bb1a" (UID: "d6b6da7f-6f9b-415b-ac10-b3bd8655bb1a"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:33:35 crc kubenswrapper[4558]: I0120 17:33:35.241289 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d6b6da7f-6f9b-415b-ac10-b3bd8655bb1a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d6b6da7f-6f9b-415b-ac10-b3bd8655bb1a" (UID: "d6b6da7f-6f9b-415b-ac10-b3bd8655bb1a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:33:35 crc kubenswrapper[4558]: I0120 17:33:35.252157 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d6b6da7f-6f9b-415b-ac10-b3bd8655bb1a-config-data" (OuterVolumeSpecName: "config-data") pod "d6b6da7f-6f9b-415b-ac10-b3bd8655bb1a" (UID: "d6b6da7f-6f9b-415b-ac10-b3bd8655bb1a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:33:35 crc kubenswrapper[4558]: I0120 17:33:35.313960 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/eca2edbe-72d9-45b0-9445-b734f97b7ece-log-httpd\") pod \"ceilometer-0\" (UID: \"eca2edbe-72d9-45b0-9445-b734f97b7ece\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:33:35 crc kubenswrapper[4558]: I0120 17:33:35.314028 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/eca2edbe-72d9-45b0-9445-b734f97b7ece-run-httpd\") pod \"ceilometer-0\" (UID: \"eca2edbe-72d9-45b0-9445-b734f97b7ece\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:33:35 crc kubenswrapper[4558]: I0120 17:33:35.314102 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eca2edbe-72d9-45b0-9445-b734f97b7ece-config-data\") pod \"ceilometer-0\" (UID: \"eca2edbe-72d9-45b0-9445-b734f97b7ece\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:33:35 crc kubenswrapper[4558]: I0120 17:33:35.314150 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/eca2edbe-72d9-45b0-9445-b734f97b7ece-scripts\") pod \"ceilometer-0\" (UID: \"eca2edbe-72d9-45b0-9445-b734f97b7ece\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:33:35 crc kubenswrapper[4558]: I0120 17:33:35.314246 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wk88z\" (UniqueName: \"kubernetes.io/projected/eca2edbe-72d9-45b0-9445-b734f97b7ece-kube-api-access-wk88z\") pod \"ceilometer-0\" (UID: \"eca2edbe-72d9-45b0-9445-b734f97b7ece\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:33:35 crc kubenswrapper[4558]: I0120 17:33:35.314292 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/eca2edbe-72d9-45b0-9445-b734f97b7ece-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"eca2edbe-72d9-45b0-9445-b734f97b7ece\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:33:35 crc kubenswrapper[4558]: I0120 17:33:35.314308 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eca2edbe-72d9-45b0-9445-b734f97b7ece-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"eca2edbe-72d9-45b0-9445-b734f97b7ece\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:33:35 crc kubenswrapper[4558]: I0120 17:33:35.314370 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d6b6da7f-6f9b-415b-ac10-b3bd8655bb1a-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:35 crc kubenswrapper[4558]: I0120 17:33:35.314382 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d6b6da7f-6f9b-415b-ac10-b3bd8655bb1a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:35 crc kubenswrapper[4558]: I0120 17:33:35.314390 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d6b6da7f-6f9b-415b-ac10-b3bd8655bb1a-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:35 crc kubenswrapper[4558]: I0120 17:33:35.314400 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-spt7n\" (UniqueName: \"kubernetes.io/projected/d6b6da7f-6f9b-415b-ac10-b3bd8655bb1a-kube-api-access-spt7n\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:35 crc kubenswrapper[4558]: I0120 17:33:35.314769 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/eca2edbe-72d9-45b0-9445-b734f97b7ece-log-httpd\") pod \"ceilometer-0\" (UID: \"eca2edbe-72d9-45b0-9445-b734f97b7ece\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:33:35 crc kubenswrapper[4558]: I0120 17:33:35.315428 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/eca2edbe-72d9-45b0-9445-b734f97b7ece-run-httpd\") pod \"ceilometer-0\" (UID: \"eca2edbe-72d9-45b0-9445-b734f97b7ece\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:33:35 crc kubenswrapper[4558]: I0120 17:33:35.317835 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eca2edbe-72d9-45b0-9445-b734f97b7ece-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"eca2edbe-72d9-45b0-9445-b734f97b7ece\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:33:35 crc kubenswrapper[4558]: I0120 17:33:35.319478 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/eca2edbe-72d9-45b0-9445-b734f97b7ece-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"eca2edbe-72d9-45b0-9445-b734f97b7ece\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:33:35 crc kubenswrapper[4558]: I0120 17:33:35.319655 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/eca2edbe-72d9-45b0-9445-b734f97b7ece-scripts\") pod \"ceilometer-0\" (UID: \"eca2edbe-72d9-45b0-9445-b734f97b7ece\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:33:35 crc kubenswrapper[4558]: I0120 17:33:35.320037 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eca2edbe-72d9-45b0-9445-b734f97b7ece-config-data\") pod \"ceilometer-0\" (UID: \"eca2edbe-72d9-45b0-9445-b734f97b7ece\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:33:35 crc kubenswrapper[4558]: I0120 17:33:35.332885 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wk88z\" (UniqueName: \"kubernetes.io/projected/eca2edbe-72d9-45b0-9445-b734f97b7ece-kube-api-access-wk88z\") pod \"ceilometer-0\" (UID: \"eca2edbe-72d9-45b0-9445-b734f97b7ece\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:33:35 crc kubenswrapper[4558]: I0120 17:33:35.392120 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:33:35 crc kubenswrapper[4558]: I0120 17:33:35.706613 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/placement-76cff77fc4-bkwrr" podUID="e7c82345-58f6-4542-9483-604f1ad2b5f4" containerName="placement-log" probeResult="failure" output="Get \"http://10.217.1.163:8778/\": dial tcp 10.217.1.163:8778: connect: connection refused" Jan 20 17:33:35 crc kubenswrapper[4558]: I0120 17:33:35.707100 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/placement-76cff77fc4-bkwrr" podUID="e7c82345-58f6-4542-9483-604f1ad2b5f4" containerName="placement-api" probeResult="failure" output="Get \"http://10.217.1.163:8778/\": dial tcp 10.217.1.163:8778: connect: connection refused" Jan 20 17:33:35 crc kubenswrapper[4558]: I0120 17:33:35.843474 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:33:35 crc kubenswrapper[4558]: W0120 17:33:35.853433 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podeca2edbe_72d9_45b0_9445_b734f97b7ece.slice/crio-34d466471f290283494de163bbbbb8d5d698c67d3e1b567efea8750207a0ad33 WatchSource:0}: Error finding container 34d466471f290283494de163bbbbb8d5d698c67d3e1b567efea8750207a0ad33: Status 404 returned error can't find the container with id 34d466471f290283494de163bbbbb8d5d698c67d3e1b567efea8750207a0ad33 Jan 20 17:33:35 crc kubenswrapper[4558]: I0120 17:33:35.907945 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-76cff77fc4-bkwrr" Jan 20 17:33:35 crc kubenswrapper[4558]: I0120 17:33:35.932552 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e7c82345-58f6-4542-9483-604f1ad2b5f4-config-data\") pod \"e7c82345-58f6-4542-9483-604f1ad2b5f4\" (UID: \"e7c82345-58f6-4542-9483-604f1ad2b5f4\") " Jan 20 17:33:35 crc kubenswrapper[4558]: I0120 17:33:35.932756 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wfb2k\" (UniqueName: \"kubernetes.io/projected/e7c82345-58f6-4542-9483-604f1ad2b5f4-kube-api-access-wfb2k\") pod \"e7c82345-58f6-4542-9483-604f1ad2b5f4\" (UID: \"e7c82345-58f6-4542-9483-604f1ad2b5f4\") " Jan 20 17:33:35 crc kubenswrapper[4558]: I0120 17:33:35.934996 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7c82345-58f6-4542-9483-604f1ad2b5f4-combined-ca-bundle\") pod \"e7c82345-58f6-4542-9483-604f1ad2b5f4\" (UID: \"e7c82345-58f6-4542-9483-604f1ad2b5f4\") " Jan 20 17:33:35 crc kubenswrapper[4558]: I0120 17:33:35.935458 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e7c82345-58f6-4542-9483-604f1ad2b5f4-logs\") pod \"e7c82345-58f6-4542-9483-604f1ad2b5f4\" (UID: \"e7c82345-58f6-4542-9483-604f1ad2b5f4\") " Jan 20 17:33:35 crc kubenswrapper[4558]: I0120 17:33:35.935670 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e7c82345-58f6-4542-9483-604f1ad2b5f4-scripts\") pod \"e7c82345-58f6-4542-9483-604f1ad2b5f4\" (UID: \"e7c82345-58f6-4542-9483-604f1ad2b5f4\") " Jan 20 17:33:35 crc kubenswrapper[4558]: I0120 17:33:35.936112 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e7c82345-58f6-4542-9483-604f1ad2b5f4-logs" (OuterVolumeSpecName: "logs") pod "e7c82345-58f6-4542-9483-604f1ad2b5f4" (UID: "e7c82345-58f6-4542-9483-604f1ad2b5f4"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:33:35 crc kubenswrapper[4558]: I0120 17:33:35.937391 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e7c82345-58f6-4542-9483-604f1ad2b5f4-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:35 crc kubenswrapper[4558]: I0120 17:33:35.938782 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7c82345-58f6-4542-9483-604f1ad2b5f4-kube-api-access-wfb2k" (OuterVolumeSpecName: "kube-api-access-wfb2k") pod "e7c82345-58f6-4542-9483-604f1ad2b5f4" (UID: "e7c82345-58f6-4542-9483-604f1ad2b5f4"). InnerVolumeSpecName "kube-api-access-wfb2k". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:33:35 crc kubenswrapper[4558]: I0120 17:33:35.963011 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7c82345-58f6-4542-9483-604f1ad2b5f4-scripts" (OuterVolumeSpecName: "scripts") pod "e7c82345-58f6-4542-9483-604f1ad2b5f4" (UID: "e7c82345-58f6-4542-9483-604f1ad2b5f4"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:33:35 crc kubenswrapper[4558]: I0120 17:33:35.984749 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"eca2edbe-72d9-45b0-9445-b734f97b7ece","Type":"ContainerStarted","Data":"34d466471f290283494de163bbbbb8d5d698c67d3e1b567efea8750207a0ad33"} Jan 20 17:33:35 crc kubenswrapper[4558]: I0120 17:33:35.986722 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7c82345-58f6-4542-9483-604f1ad2b5f4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e7c82345-58f6-4542-9483-604f1ad2b5f4" (UID: "e7c82345-58f6-4542-9483-604f1ad2b5f4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:33:35 crc kubenswrapper[4558]: I0120 17:33:35.987506 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-keystone-listener-5459fb5f8-h5bbp" event={"ID":"d6b6da7f-6f9b-415b-ac10-b3bd8655bb1a","Type":"ContainerDied","Data":"01ee3a8fed591849102b4d4c6d98d89fd578aa4be7ef209e7467b90ceed6447d"} Jan 20 17:33:35 crc kubenswrapper[4558]: I0120 17:33:35.987582 4558 scope.go:117] "RemoveContainer" containerID="616dfc84f48150feccd210dc50efb30772487af019214a6546143c12c3014223" Jan 20 17:33:35 crc kubenswrapper[4558]: I0120 17:33:35.987869 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-keystone-listener-5459fb5f8-h5bbp" Jan 20 17:33:35 crc kubenswrapper[4558]: I0120 17:33:35.992158 4558 generic.go:334] "Generic (PLEG): container finished" podID="e7c82345-58f6-4542-9483-604f1ad2b5f4" containerID="61313214dcb3a55bfb055d2bef3c402cad0651369d70f00a0842a2ed0b39da06" exitCode=0 Jan 20 17:33:35 crc kubenswrapper[4558]: I0120 17:33:35.993009 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-76cff77fc4-bkwrr" Jan 20 17:33:35 crc kubenswrapper[4558]: I0120 17:33:35.993397 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-76cff77fc4-bkwrr" event={"ID":"e7c82345-58f6-4542-9483-604f1ad2b5f4","Type":"ContainerDied","Data":"61313214dcb3a55bfb055d2bef3c402cad0651369d70f00a0842a2ed0b39da06"} Jan 20 17:33:35 crc kubenswrapper[4558]: I0120 17:33:35.993440 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-76cff77fc4-bkwrr" event={"ID":"e7c82345-58f6-4542-9483-604f1ad2b5f4","Type":"ContainerDied","Data":"22d159542849bbd0a9f6cc94b62f9e83aec785861d4e1104bbfeace836675a08"} Jan 20 17:33:36 crc kubenswrapper[4558]: I0120 17:33:36.015274 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7c82345-58f6-4542-9483-604f1ad2b5f4-config-data" (OuterVolumeSpecName: "config-data") pod "e7c82345-58f6-4542-9483-604f1ad2b5f4" (UID: "e7c82345-58f6-4542-9483-604f1ad2b5f4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:33:36 crc kubenswrapper[4558]: I0120 17:33:36.037326 4558 scope.go:117] "RemoveContainer" containerID="f06e9dc08abc874c209a5383b2994f653202e9f66eb7464ef36f77ca919a05a8" Jan 20 17:33:36 crc kubenswrapper[4558]: I0120 17:33:36.039756 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e7c82345-58f6-4542-9483-604f1ad2b5f4-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:36 crc kubenswrapper[4558]: I0120 17:33:36.039789 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e7c82345-58f6-4542-9483-604f1ad2b5f4-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:36 crc kubenswrapper[4558]: I0120 17:33:36.039800 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wfb2k\" (UniqueName: \"kubernetes.io/projected/e7c82345-58f6-4542-9483-604f1ad2b5f4-kube-api-access-wfb2k\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:36 crc kubenswrapper[4558]: I0120 17:33:36.039817 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7c82345-58f6-4542-9483-604f1ad2b5f4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:36 crc kubenswrapper[4558]: I0120 17:33:36.072735 4558 scope.go:117] "RemoveContainer" containerID="61313214dcb3a55bfb055d2bef3c402cad0651369d70f00a0842a2ed0b39da06" Jan 20 17:33:36 crc kubenswrapper[4558]: I0120 17:33:36.076004 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-keystone-listener-5459fb5f8-h5bbp"] Jan 20 17:33:36 crc kubenswrapper[4558]: I0120 17:33:36.083512 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/barbican-keystone-listener-5459fb5f8-h5bbp"] Jan 20 17:33:36 crc kubenswrapper[4558]: I0120 17:33:36.094530 4558 scope.go:117] "RemoveContainer" containerID="3907acb149bd72bc7bc6e07f835a9cc0d239aacbc88c4c4e9b53259322e14cc9" Jan 20 17:33:36 crc kubenswrapper[4558]: I0120 17:33:36.111889 4558 scope.go:117] "RemoveContainer" containerID="61313214dcb3a55bfb055d2bef3c402cad0651369d70f00a0842a2ed0b39da06" Jan 20 17:33:36 crc kubenswrapper[4558]: E0120 17:33:36.112504 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"61313214dcb3a55bfb055d2bef3c402cad0651369d70f00a0842a2ed0b39da06\": container with ID starting with 61313214dcb3a55bfb055d2bef3c402cad0651369d70f00a0842a2ed0b39da06 not found: ID does not exist" containerID="61313214dcb3a55bfb055d2bef3c402cad0651369d70f00a0842a2ed0b39da06" Jan 20 17:33:36 crc kubenswrapper[4558]: I0120 17:33:36.112568 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"61313214dcb3a55bfb055d2bef3c402cad0651369d70f00a0842a2ed0b39da06"} err="failed to get container status \"61313214dcb3a55bfb055d2bef3c402cad0651369d70f00a0842a2ed0b39da06\": rpc error: code = NotFound desc = could not find container \"61313214dcb3a55bfb055d2bef3c402cad0651369d70f00a0842a2ed0b39da06\": container with ID starting with 61313214dcb3a55bfb055d2bef3c402cad0651369d70f00a0842a2ed0b39da06 not found: ID does not exist" Jan 20 17:33:36 crc kubenswrapper[4558]: I0120 17:33:36.112598 4558 scope.go:117] "RemoveContainer" containerID="3907acb149bd72bc7bc6e07f835a9cc0d239aacbc88c4c4e9b53259322e14cc9" Jan 20 17:33:36 crc kubenswrapper[4558]: E0120 17:33:36.113020 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3907acb149bd72bc7bc6e07f835a9cc0d239aacbc88c4c4e9b53259322e14cc9\": container with ID starting with 3907acb149bd72bc7bc6e07f835a9cc0d239aacbc88c4c4e9b53259322e14cc9 not found: ID does not exist" containerID="3907acb149bd72bc7bc6e07f835a9cc0d239aacbc88c4c4e9b53259322e14cc9" Jan 20 17:33:36 crc kubenswrapper[4558]: I0120 17:33:36.113064 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3907acb149bd72bc7bc6e07f835a9cc0d239aacbc88c4c4e9b53259322e14cc9"} err="failed to get container status \"3907acb149bd72bc7bc6e07f835a9cc0d239aacbc88c4c4e9b53259322e14cc9\": rpc error: code = NotFound desc = could not find container \"3907acb149bd72bc7bc6e07f835a9cc0d239aacbc88c4c4e9b53259322e14cc9\": container with ID starting with 3907acb149bd72bc7bc6e07f835a9cc0d239aacbc88c4c4e9b53259322e14cc9 not found: ID does not exist" Jan 20 17:33:36 crc kubenswrapper[4558]: I0120 17:33:36.282455 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-worker-777f6887b5-wwl56" Jan 20 17:33:36 crc kubenswrapper[4558]: I0120 17:33:36.344419 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e0294130-264e-4fa0-b336-72c350ea61cc-config-data-custom\") pod \"e0294130-264e-4fa0-b336-72c350ea61cc\" (UID: \"e0294130-264e-4fa0-b336-72c350ea61cc\") " Jan 20 17:33:36 crc kubenswrapper[4558]: I0120 17:33:36.344484 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e0294130-264e-4fa0-b336-72c350ea61cc-logs\") pod \"e0294130-264e-4fa0-b336-72c350ea61cc\" (UID: \"e0294130-264e-4fa0-b336-72c350ea61cc\") " Jan 20 17:33:36 crc kubenswrapper[4558]: I0120 17:33:36.344529 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e0294130-264e-4fa0-b336-72c350ea61cc-combined-ca-bundle\") pod \"e0294130-264e-4fa0-b336-72c350ea61cc\" (UID: \"e0294130-264e-4fa0-b336-72c350ea61cc\") " Jan 20 17:33:36 crc kubenswrapper[4558]: I0120 17:33:36.344565 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e0294130-264e-4fa0-b336-72c350ea61cc-config-data\") pod \"e0294130-264e-4fa0-b336-72c350ea61cc\" (UID: \"e0294130-264e-4fa0-b336-72c350ea61cc\") " Jan 20 17:33:36 crc kubenswrapper[4558]: I0120 17:33:36.344603 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2lpkg\" (UniqueName: \"kubernetes.io/projected/e0294130-264e-4fa0-b336-72c350ea61cc-kube-api-access-2lpkg\") pod \"e0294130-264e-4fa0-b336-72c350ea61cc\" (UID: \"e0294130-264e-4fa0-b336-72c350ea61cc\") " Jan 20 17:33:36 crc kubenswrapper[4558]: I0120 17:33:36.345822 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/placement-76cff77fc4-bkwrr"] Jan 20 17:33:36 crc kubenswrapper[4558]: I0120 17:33:36.346225 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e0294130-264e-4fa0-b336-72c350ea61cc-logs" (OuterVolumeSpecName: "logs") pod "e0294130-264e-4fa0-b336-72c350ea61cc" (UID: "e0294130-264e-4fa0-b336-72c350ea61cc"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:33:36 crc kubenswrapper[4558]: I0120 17:33:36.350915 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e0294130-264e-4fa0-b336-72c350ea61cc-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "e0294130-264e-4fa0-b336-72c350ea61cc" (UID: "e0294130-264e-4fa0-b336-72c350ea61cc"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:33:36 crc kubenswrapper[4558]: I0120 17:33:36.350946 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e0294130-264e-4fa0-b336-72c350ea61cc-kube-api-access-2lpkg" (OuterVolumeSpecName: "kube-api-access-2lpkg") pod "e0294130-264e-4fa0-b336-72c350ea61cc" (UID: "e0294130-264e-4fa0-b336-72c350ea61cc"). InnerVolumeSpecName "kube-api-access-2lpkg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:33:36 crc kubenswrapper[4558]: I0120 17:33:36.353340 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/placement-76cff77fc4-bkwrr"] Jan 20 17:33:36 crc kubenswrapper[4558]: I0120 17:33:36.377071 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e0294130-264e-4fa0-b336-72c350ea61cc-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e0294130-264e-4fa0-b336-72c350ea61cc" (UID: "e0294130-264e-4fa0-b336-72c350ea61cc"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:33:36 crc kubenswrapper[4558]: I0120 17:33:36.384327 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e0294130-264e-4fa0-b336-72c350ea61cc-config-data" (OuterVolumeSpecName: "config-data") pod "e0294130-264e-4fa0-b336-72c350ea61cc" (UID: "e0294130-264e-4fa0-b336-72c350ea61cc"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:33:36 crc kubenswrapper[4558]: I0120 17:33:36.446352 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e0294130-264e-4fa0-b336-72c350ea61cc-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:36 crc kubenswrapper[4558]: I0120 17:33:36.446379 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e0294130-264e-4fa0-b336-72c350ea61cc-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:36 crc kubenswrapper[4558]: I0120 17:33:36.446391 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2lpkg\" (UniqueName: \"kubernetes.io/projected/e0294130-264e-4fa0-b336-72c350ea61cc-kube-api-access-2lpkg\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:36 crc kubenswrapper[4558]: I0120 17:33:36.446401 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e0294130-264e-4fa0-b336-72c350ea61cc-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:36 crc kubenswrapper[4558]: I0120 17:33:36.446410 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e0294130-264e-4fa0-b336-72c350ea61cc-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:36 crc kubenswrapper[4558]: I0120 17:33:36.578570 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b115808a-3ae3-40c5-b330-f58aa6af7503" path="/var/lib/kubelet/pods/b115808a-3ae3-40c5-b330-f58aa6af7503/volumes" Jan 20 17:33:36 crc kubenswrapper[4558]: I0120 17:33:36.579363 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d6b6da7f-6f9b-415b-ac10-b3bd8655bb1a" path="/var/lib/kubelet/pods/d6b6da7f-6f9b-415b-ac10-b3bd8655bb1a/volumes" Jan 20 17:33:36 crc kubenswrapper[4558]: I0120 17:33:36.580482 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7c82345-58f6-4542-9483-604f1ad2b5f4" path="/var/lib/kubelet/pods/e7c82345-58f6-4542-9483-604f1ad2b5f4/volumes" Jan 20 17:33:37 crc kubenswrapper[4558]: I0120 17:33:37.006484 4558 generic.go:334] "Generic (PLEG): container finished" podID="e0294130-264e-4fa0-b336-72c350ea61cc" containerID="c084c5e8cdb480d00ce491d7dbdb0d773ed2ee7cdfeacd273ca45bb3c51772de" exitCode=137 Jan 20 17:33:37 crc kubenswrapper[4558]: I0120 17:33:37.006557 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-worker-777f6887b5-wwl56" event={"ID":"e0294130-264e-4fa0-b336-72c350ea61cc","Type":"ContainerDied","Data":"c084c5e8cdb480d00ce491d7dbdb0d773ed2ee7cdfeacd273ca45bb3c51772de"} Jan 20 17:33:37 crc kubenswrapper[4558]: I0120 17:33:37.006592 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-worker-777f6887b5-wwl56" Jan 20 17:33:37 crc kubenswrapper[4558]: I0120 17:33:37.006631 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-worker-777f6887b5-wwl56" event={"ID":"e0294130-264e-4fa0-b336-72c350ea61cc","Type":"ContainerDied","Data":"c9d625c04190b37538111c82cbf9d2725c6c8892cfd1a4af3f02910be0e108ed"} Jan 20 17:33:37 crc kubenswrapper[4558]: I0120 17:33:37.006661 4558 scope.go:117] "RemoveContainer" containerID="c084c5e8cdb480d00ce491d7dbdb0d773ed2ee7cdfeacd273ca45bb3c51772de" Jan 20 17:33:37 crc kubenswrapper[4558]: I0120 17:33:37.011519 4558 generic.go:334] "Generic (PLEG): container finished" podID="dfc93436-8ed7-4388-bd32-347d897ac50a" containerID="fa9a54a48730a2a2a8909d086b14f29f0cc401aa57683c836b21a37c37c71a30" exitCode=1 Jan 20 17:33:37 crc kubenswrapper[4558]: I0120 17:33:37.011747 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"dfc93436-8ed7-4388-bd32-347d897ac50a","Type":"ContainerDied","Data":"fa9a54a48730a2a2a8909d086b14f29f0cc401aa57683c836b21a37c37c71a30"} Jan 20 17:33:37 crc kubenswrapper[4558]: I0120 17:33:37.013107 4558 scope.go:117] "RemoveContainer" containerID="fa9a54a48730a2a2a8909d086b14f29f0cc401aa57683c836b21a37c37c71a30" Jan 20 17:33:37 crc kubenswrapper[4558]: I0120 17:33:37.021003 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"eca2edbe-72d9-45b0-9445-b734f97b7ece","Type":"ContainerStarted","Data":"91409ae0d9ca451c965331387d6a76c89a4420be7021b37195b0d5002fe4ec86"} Jan 20 17:33:37 crc kubenswrapper[4558]: I0120 17:33:37.029549 4558 generic.go:334] "Generic (PLEG): container finished" podID="097b903e-9e8a-464f-99e3-a69b68206465" containerID="9446c119ea59f7973c5f6771da039eff1a3de25e2aacb0e940e611c36a10a136" exitCode=1 Jan 20 17:33:37 crc kubenswrapper[4558]: I0120 17:33:37.029613 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-0" event={"ID":"097b903e-9e8a-464f-99e3-a69b68206465","Type":"ContainerDied","Data":"9446c119ea59f7973c5f6771da039eff1a3de25e2aacb0e940e611c36a10a136"} Jan 20 17:33:37 crc kubenswrapper[4558]: I0120 17:33:37.030427 4558 scope.go:117] "RemoveContainer" containerID="9446c119ea59f7973c5f6771da039eff1a3de25e2aacb0e940e611c36a10a136" Jan 20 17:33:37 crc kubenswrapper[4558]: I0120 17:33:37.034743 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-worker-777f6887b5-wwl56"] Jan 20 17:33:37 crc kubenswrapper[4558]: I0120 17:33:37.041667 4558 scope.go:117] "RemoveContainer" containerID="62f48a277d2d3f69fc053a84173286db65e766dd294fabf68464419b15e8611b" Jan 20 17:33:37 crc kubenswrapper[4558]: I0120 17:33:37.043659 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/barbican-worker-777f6887b5-wwl56"] Jan 20 17:33:37 crc kubenswrapper[4558]: I0120 17:33:37.115292 4558 scope.go:117] "RemoveContainer" containerID="c084c5e8cdb480d00ce491d7dbdb0d773ed2ee7cdfeacd273ca45bb3c51772de" Jan 20 17:33:37 crc kubenswrapper[4558]: E0120 17:33:37.115823 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c084c5e8cdb480d00ce491d7dbdb0d773ed2ee7cdfeacd273ca45bb3c51772de\": container with ID starting with c084c5e8cdb480d00ce491d7dbdb0d773ed2ee7cdfeacd273ca45bb3c51772de not found: ID does not exist" containerID="c084c5e8cdb480d00ce491d7dbdb0d773ed2ee7cdfeacd273ca45bb3c51772de" Jan 20 17:33:37 crc kubenswrapper[4558]: I0120 17:33:37.115863 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c084c5e8cdb480d00ce491d7dbdb0d773ed2ee7cdfeacd273ca45bb3c51772de"} err="failed to get container status \"c084c5e8cdb480d00ce491d7dbdb0d773ed2ee7cdfeacd273ca45bb3c51772de\": rpc error: code = NotFound desc = could not find container \"c084c5e8cdb480d00ce491d7dbdb0d773ed2ee7cdfeacd273ca45bb3c51772de\": container with ID starting with c084c5e8cdb480d00ce491d7dbdb0d773ed2ee7cdfeacd273ca45bb3c51772de not found: ID does not exist" Jan 20 17:33:37 crc kubenswrapper[4558]: I0120 17:33:37.115896 4558 scope.go:117] "RemoveContainer" containerID="62f48a277d2d3f69fc053a84173286db65e766dd294fabf68464419b15e8611b" Jan 20 17:33:37 crc kubenswrapper[4558]: E0120 17:33:37.116333 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"62f48a277d2d3f69fc053a84173286db65e766dd294fabf68464419b15e8611b\": container with ID starting with 62f48a277d2d3f69fc053a84173286db65e766dd294fabf68464419b15e8611b not found: ID does not exist" containerID="62f48a277d2d3f69fc053a84173286db65e766dd294fabf68464419b15e8611b" Jan 20 17:33:37 crc kubenswrapper[4558]: I0120 17:33:37.116366 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"62f48a277d2d3f69fc053a84173286db65e766dd294fabf68464419b15e8611b"} err="failed to get container status \"62f48a277d2d3f69fc053a84173286db65e766dd294fabf68464419b15e8611b\": rpc error: code = NotFound desc = could not find container \"62f48a277d2d3f69fc053a84173286db65e766dd294fabf68464419b15e8611b\": container with ID starting with 62f48a277d2d3f69fc053a84173286db65e766dd294fabf68464419b15e8611b not found: ID does not exist" Jan 20 17:33:37 crc kubenswrapper[4558]: I0120 17:33:37.363441 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:33:38 crc kubenswrapper[4558]: I0120 17:33:38.042414 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"eca2edbe-72d9-45b0-9445-b734f97b7ece","Type":"ContainerStarted","Data":"bb94cc5337340b1860cfc1f60ccc29ac0f8630ee7dc24e80e7ce2178442eadda"} Jan 20 17:33:38 crc kubenswrapper[4558]: I0120 17:33:38.046803 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-0" event={"ID":"097b903e-9e8a-464f-99e3-a69b68206465","Type":"ContainerStarted","Data":"c8737ad851fee617d21ad48dbaf6b3a6790fc1895c0c097d362b21aaec9de85b"} Jan 20 17:33:38 crc kubenswrapper[4558]: I0120 17:33:38.047342 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:33:38 crc kubenswrapper[4558]: I0120 17:33:38.050224 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"dfc93436-8ed7-4388-bd32-347d897ac50a","Type":"ContainerStarted","Data":"9b52c657a24907f806e9541bf4f5890d8aeebe08d47a2a0bad6a5771f6287d48"} Jan 20 17:33:38 crc kubenswrapper[4558]: I0120 17:33:38.575049 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e0294130-264e-4fa0-b336-72c350ea61cc" path="/var/lib/kubelet/pods/e0294130-264e-4fa0-b336-72c350ea61cc/volumes" Jan 20 17:33:39 crc kubenswrapper[4558]: I0120 17:33:39.062237 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"eca2edbe-72d9-45b0-9445-b734f97b7ece","Type":"ContainerStarted","Data":"279cc96ec167325242d88523084a02ee78c5305d2befa02401033a29b1ae9959"} Jan 20 17:33:39 crc kubenswrapper[4558]: I0120 17:33:39.738936 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/neutron-5b4c8486cd-bh7qx" Jan 20 17:33:39 crc kubenswrapper[4558]: I0120 17:33:39.813410 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-5878f88784-4j5nq"] Jan 20 17:33:39 crc kubenswrapper[4558]: I0120 17:33:39.813909 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/neutron-5878f88784-4j5nq" podUID="a289616c-ea44-447c-a263-4744c01d5b5e" containerName="neutron-api" containerID="cri-o://2eba8336553f4bec8ad0739564c6671e683452b7d048a156e1a6e3cf92ae581f" gracePeriod=30 Jan 20 17:33:39 crc kubenswrapper[4558]: I0120 17:33:39.814042 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/neutron-5878f88784-4j5nq" podUID="a289616c-ea44-447c-a263-4744c01d5b5e" containerName="neutron-httpd" containerID="cri-o://f8d054c8c4c51555f3cda668c1ab5da9f1dcb5f7d87997a3ed3c22db03630067" gracePeriod=30 Jan 20 17:33:40 crc kubenswrapper[4558]: I0120 17:33:40.076972 4558 generic.go:334] "Generic (PLEG): container finished" podID="dfc93436-8ed7-4388-bd32-347d897ac50a" containerID="9b52c657a24907f806e9541bf4f5890d8aeebe08d47a2a0bad6a5771f6287d48" exitCode=1 Jan 20 17:33:40 crc kubenswrapper[4558]: I0120 17:33:40.077019 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"dfc93436-8ed7-4388-bd32-347d897ac50a","Type":"ContainerDied","Data":"9b52c657a24907f806e9541bf4f5890d8aeebe08d47a2a0bad6a5771f6287d48"} Jan 20 17:33:40 crc kubenswrapper[4558]: I0120 17:33:40.077082 4558 scope.go:117] "RemoveContainer" containerID="fa9a54a48730a2a2a8909d086b14f29f0cc401aa57683c836b21a37c37c71a30" Jan 20 17:33:40 crc kubenswrapper[4558]: I0120 17:33:40.077568 4558 scope.go:117] "RemoveContainer" containerID="9b52c657a24907f806e9541bf4f5890d8aeebe08d47a2a0bad6a5771f6287d48" Jan 20 17:33:40 crc kubenswrapper[4558]: E0120 17:33:40.077910 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"nova-scheduler-scheduler\" with CrashLoopBackOff: \"back-off 10s restarting failed container=nova-scheduler-scheduler pod=nova-scheduler-0_openstack-kuttl-tests(dfc93436-8ed7-4388-bd32-347d897ac50a)\"" pod="openstack-kuttl-tests/nova-scheduler-0" podUID="dfc93436-8ed7-4388-bd32-347d897ac50a" Jan 20 17:33:40 crc kubenswrapper[4558]: I0120 17:33:40.081430 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"eca2edbe-72d9-45b0-9445-b734f97b7ece","Type":"ContainerStarted","Data":"9a7331e32c5060ace879fa32c1a382d3ac8d9f2cefae695e7da910a50b371ff9"} Jan 20 17:33:40 crc kubenswrapper[4558]: I0120 17:33:40.081914 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:33:40 crc kubenswrapper[4558]: I0120 17:33:40.084183 4558 generic.go:334] "Generic (PLEG): container finished" podID="a289616c-ea44-447c-a263-4744c01d5b5e" containerID="f8d054c8c4c51555f3cda668c1ab5da9f1dcb5f7d87997a3ed3c22db03630067" exitCode=0 Jan 20 17:33:40 crc kubenswrapper[4558]: I0120 17:33:40.084243 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-5878f88784-4j5nq" event={"ID":"a289616c-ea44-447c-a263-4744c01d5b5e","Type":"ContainerDied","Data":"f8d054c8c4c51555f3cda668c1ab5da9f1dcb5f7d87997a3ed3c22db03630067"} Jan 20 17:33:40 crc kubenswrapper[4558]: I0120 17:33:40.086565 4558 generic.go:334] "Generic (PLEG): container finished" podID="097b903e-9e8a-464f-99e3-a69b68206465" containerID="c8737ad851fee617d21ad48dbaf6b3a6790fc1895c0c097d362b21aaec9de85b" exitCode=1 Jan 20 17:33:40 crc kubenswrapper[4558]: I0120 17:33:40.086597 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-0" event={"ID":"097b903e-9e8a-464f-99e3-a69b68206465","Type":"ContainerDied","Data":"c8737ad851fee617d21ad48dbaf6b3a6790fc1895c0c097d362b21aaec9de85b"} Jan 20 17:33:40 crc kubenswrapper[4558]: I0120 17:33:40.086944 4558 scope.go:117] "RemoveContainer" containerID="c8737ad851fee617d21ad48dbaf6b3a6790fc1895c0c097d362b21aaec9de85b" Jan 20 17:33:40 crc kubenswrapper[4558]: E0120 17:33:40.087152 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"nova-cell1-conductor-conductor\" with CrashLoopBackOff: \"back-off 10s restarting failed container=nova-cell1-conductor-conductor pod=nova-cell1-conductor-0_openstack-kuttl-tests(097b903e-9e8a-464f-99e3-a69b68206465)\"" pod="openstack-kuttl-tests/nova-cell1-conductor-0" podUID="097b903e-9e8a-464f-99e3-a69b68206465" Jan 20 17:33:40 crc kubenswrapper[4558]: I0120 17:33:40.126741 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ceilometer-0" podStartSLOduration=2.210825495 podStartE2EDuration="6.126713311s" podCreationTimestamp="2026-01-20 17:33:34 +0000 UTC" firstStartedPulling="2026-01-20 17:33:35.857497215 +0000 UTC m=+3109.617835182" lastFinishedPulling="2026-01-20 17:33:39.77338503 +0000 UTC m=+3113.533722998" observedRunningTime="2026-01-20 17:33:40.121824319 +0000 UTC m=+3113.882162285" watchObservedRunningTime="2026-01-20 17:33:40.126713311 +0000 UTC m=+3113.887051278" Jan 20 17:33:40 crc kubenswrapper[4558]: I0120 17:33:40.137067 4558 scope.go:117] "RemoveContainer" containerID="9446c119ea59f7973c5f6771da039eff1a3de25e2aacb0e940e611c36a10a136" Jan 20 17:33:40 crc kubenswrapper[4558]: I0120 17:33:40.616776 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/neutron-5878f88784-4j5nq" podUID="a289616c-ea44-447c-a263-4744c01d5b5e" containerName="neutron-httpd" probeResult="failure" output="Get \"http://10.217.1.174:9696/\": dial tcp 10.217.1.174:9696: connect: connection refused" Jan 20 17:33:41 crc kubenswrapper[4558]: I0120 17:33:41.307542 4558 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:33:41 crc kubenswrapper[4558]: I0120 17:33:41.308671 4558 scope.go:117] "RemoveContainer" containerID="c8737ad851fee617d21ad48dbaf6b3a6790fc1895c0c097d362b21aaec9de85b" Jan 20 17:33:41 crc kubenswrapper[4558]: E0120 17:33:41.309132 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"nova-cell1-conductor-conductor\" with CrashLoopBackOff: \"back-off 10s restarting failed container=nova-cell1-conductor-conductor pod=nova-cell1-conductor-0_openstack-kuttl-tests(097b903e-9e8a-464f-99e3-a69b68206465)\"" pod="openstack-kuttl-tests/nova-cell1-conductor-0" podUID="097b903e-9e8a-464f-99e3-a69b68206465" Jan 20 17:33:42 crc kubenswrapper[4558]: I0120 17:33:42.053264 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/keystone-c7b8d4458-7ddjq" Jan 20 17:33:42 crc kubenswrapper[4558]: I0120 17:33:42.120315 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-55b7797494-6bq45"] Jan 20 17:33:42 crc kubenswrapper[4558]: I0120 17:33:42.120510 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/keystone-55b7797494-6bq45" podUID="bb135ff5-5284-4558-b549-c59fee8cc0e9" containerName="keystone-api" containerID="cri-o://9842d6850ee563852429054c791f3062cb318e3f1aae2137f5ef5dfa0598575e" gracePeriod=30 Jan 20 17:33:42 crc kubenswrapper[4558]: I0120 17:33:42.363507 4558 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:33:42 crc kubenswrapper[4558]: I0120 17:33:42.363563 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:33:42 crc kubenswrapper[4558]: I0120 17:33:42.363576 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:33:42 crc kubenswrapper[4558]: I0120 17:33:42.363591 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:33:42 crc kubenswrapper[4558]: I0120 17:33:42.364575 4558 scope.go:117] "RemoveContainer" containerID="9b52c657a24907f806e9541bf4f5890d8aeebe08d47a2a0bad6a5771f6287d48" Jan 20 17:33:42 crc kubenswrapper[4558]: E0120 17:33:42.364846 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"nova-scheduler-scheduler\" with CrashLoopBackOff: \"back-off 10s restarting failed container=nova-scheduler-scheduler pod=nova-scheduler-0_openstack-kuttl-tests(dfc93436-8ed7-4388-bd32-347d897ac50a)\"" pod="openstack-kuttl-tests/nova-scheduler-0" podUID="dfc93436-8ed7-4388-bd32-347d897ac50a" Jan 20 17:33:44 crc kubenswrapper[4558]: I0120 17:33:44.569980 4558 scope.go:117] "RemoveContainer" containerID="f275e9f5de330b3c79316d5871106c6bcf90b698e0744c5beb28da45c336b36d" Jan 20 17:33:44 crc kubenswrapper[4558]: E0120 17:33:44.570974 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 17:33:45 crc kubenswrapper[4558]: I0120 17:33:45.284040 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/keystone-55b7797494-6bq45" podUID="bb135ff5-5284-4558-b549-c59fee8cc0e9" containerName="keystone-api" probeResult="failure" output="Get \"http://10.217.1.175:5000/v3\": read tcp 10.217.0.2:54160->10.217.1.175:5000: read: connection reset by peer" Jan 20 17:33:45 crc kubenswrapper[4558]: I0120 17:33:45.563572 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:33:45 crc kubenswrapper[4558]: I0120 17:33:45.572918 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-0"] Jan 20 17:33:45 crc kubenswrapper[4558]: I0120 17:33:45.573082 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-cell0-conductor-0" podUID="37fd3160-ee6a-41a1-9c4b-260e72133f59" containerName="nova-cell0-conductor-conductor" containerID="cri-o://ca483fd25cf153fcad14ca9739754a6effd1115ea5c7b991da3c23aa4caa603c" gracePeriod=30 Jan 20 17:33:45 crc kubenswrapper[4558]: I0120 17:33:45.580227 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:33:45 crc kubenswrapper[4558]: I0120 17:33:45.580541 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-api-0" podUID="6a66de52-5f9c-46c6-aa54-8a0ba3288230" containerName="nova-api-log" containerID="cri-o://1099f7ebf3750cf92ad4dcca603aaebf0e6a74e93107a073aead4aee1f85dd17" gracePeriod=30 Jan 20 17:33:45 crc kubenswrapper[4558]: I0120 17:33:45.580754 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-api-0" podUID="6a66de52-5f9c-46c6-aa54-8a0ba3288230" containerName="nova-api-api" containerID="cri-o://915a019c132b2d4f4aeb77f2aacee6bc94ce4e5d6b372707956a6621db84cd38" gracePeriod=30 Jan 20 17:33:45 crc kubenswrapper[4558]: I0120 17:33:45.585866 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:33:45 crc kubenswrapper[4558]: I0120 17:33:45.586077 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" podUID="760135a9-d7f7-456e-b7d6-9f2e7730e382" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://c6654ae1097dc1779180cf64c658db2d6969ccaab32c2416576f239f35513d6a" gracePeriod=30 Jan 20 17:33:45 crc kubenswrapper[4558]: I0120 17:33:45.593616 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:33:45 crc kubenswrapper[4558]: I0120 17:33:45.593772 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-metadata-0" podUID="8fb66f31-6ee9-4ad8-8bd2-14e489b98591" containerName="nova-metadata-log" containerID="cri-o://a9c596fe297f41ede3bfa332670ccef94114b4b90d59c0d5cf0160934a47ac18" gracePeriod=30 Jan 20 17:33:45 crc kubenswrapper[4558]: I0120 17:33:45.593879 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-metadata-0" podUID="8fb66f31-6ee9-4ad8-8bd2-14e489b98591" containerName="nova-metadata-metadata" containerID="cri-o://642f551615ce48a73abe795ca46d2227e4e6a93313045edf6c62bffaecdc9441" gracePeriod=30 Jan 20 17:33:45 crc kubenswrapper[4558]: I0120 17:33:45.708564 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-55b7797494-6bq45" Jan 20 17:33:45 crc kubenswrapper[4558]: I0120 17:33:45.743424 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-0"] Jan 20 17:33:46 crc kubenswrapper[4558]: I0120 17:33:45.868334 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/bb135ff5-5284-4558-b549-c59fee8cc0e9-fernet-keys\") pod \"bb135ff5-5284-4558-b549-c59fee8cc0e9\" (UID: \"bb135ff5-5284-4558-b549-c59fee8cc0e9\") " Jan 20 17:33:46 crc kubenswrapper[4558]: I0120 17:33:45.868717 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bb135ff5-5284-4558-b549-c59fee8cc0e9-config-data\") pod \"bb135ff5-5284-4558-b549-c59fee8cc0e9\" (UID: \"bb135ff5-5284-4558-b549-c59fee8cc0e9\") " Jan 20 17:33:46 crc kubenswrapper[4558]: I0120 17:33:45.868760 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/bb135ff5-5284-4558-b549-c59fee8cc0e9-credential-keys\") pod \"bb135ff5-5284-4558-b549-c59fee8cc0e9\" (UID: \"bb135ff5-5284-4558-b549-c59fee8cc0e9\") " Jan 20 17:33:46 crc kubenswrapper[4558]: I0120 17:33:45.868790 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2kxc2\" (UniqueName: \"kubernetes.io/projected/bb135ff5-5284-4558-b549-c59fee8cc0e9-kube-api-access-2kxc2\") pod \"bb135ff5-5284-4558-b549-c59fee8cc0e9\" (UID: \"bb135ff5-5284-4558-b549-c59fee8cc0e9\") " Jan 20 17:33:46 crc kubenswrapper[4558]: I0120 17:33:45.868862 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bb135ff5-5284-4558-b549-c59fee8cc0e9-combined-ca-bundle\") pod \"bb135ff5-5284-4558-b549-c59fee8cc0e9\" (UID: \"bb135ff5-5284-4558-b549-c59fee8cc0e9\") " Jan 20 17:33:46 crc kubenswrapper[4558]: I0120 17:33:45.868996 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bb135ff5-5284-4558-b549-c59fee8cc0e9-scripts\") pod \"bb135ff5-5284-4558-b549-c59fee8cc0e9\" (UID: \"bb135ff5-5284-4558-b549-c59fee8cc0e9\") " Jan 20 17:33:46 crc kubenswrapper[4558]: I0120 17:33:45.873431 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bb135ff5-5284-4558-b549-c59fee8cc0e9-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "bb135ff5-5284-4558-b549-c59fee8cc0e9" (UID: "bb135ff5-5284-4558-b549-c59fee8cc0e9"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:33:46 crc kubenswrapper[4558]: I0120 17:33:45.873840 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bb135ff5-5284-4558-b549-c59fee8cc0e9-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "bb135ff5-5284-4558-b549-c59fee8cc0e9" (UID: "bb135ff5-5284-4558-b549-c59fee8cc0e9"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:33:46 crc kubenswrapper[4558]: I0120 17:33:45.880440 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bb135ff5-5284-4558-b549-c59fee8cc0e9-scripts" (OuterVolumeSpecName: "scripts") pod "bb135ff5-5284-4558-b549-c59fee8cc0e9" (UID: "bb135ff5-5284-4558-b549-c59fee8cc0e9"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:33:46 crc kubenswrapper[4558]: I0120 17:33:45.892941 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bb135ff5-5284-4558-b549-c59fee8cc0e9-kube-api-access-2kxc2" (OuterVolumeSpecName: "kube-api-access-2kxc2") pod "bb135ff5-5284-4558-b549-c59fee8cc0e9" (UID: "bb135ff5-5284-4558-b549-c59fee8cc0e9"). InnerVolumeSpecName "kube-api-access-2kxc2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:33:46 crc kubenswrapper[4558]: I0120 17:33:45.896437 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bb135ff5-5284-4558-b549-c59fee8cc0e9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "bb135ff5-5284-4558-b549-c59fee8cc0e9" (UID: "bb135ff5-5284-4558-b549-c59fee8cc0e9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:33:46 crc kubenswrapper[4558]: I0120 17:33:45.913047 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bb135ff5-5284-4558-b549-c59fee8cc0e9-config-data" (OuterVolumeSpecName: "config-data") pod "bb135ff5-5284-4558-b549-c59fee8cc0e9" (UID: "bb135ff5-5284-4558-b549-c59fee8cc0e9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:33:46 crc kubenswrapper[4558]: I0120 17:33:45.924833 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:33:46 crc kubenswrapper[4558]: I0120 17:33:45.971149 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dfc93436-8ed7-4388-bd32-347d897ac50a-config-data\") pod \"dfc93436-8ed7-4388-bd32-347d897ac50a\" (UID: \"dfc93436-8ed7-4388-bd32-347d897ac50a\") " Jan 20 17:33:46 crc kubenswrapper[4558]: I0120 17:33:45.971304 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c4kvd\" (UniqueName: \"kubernetes.io/projected/dfc93436-8ed7-4388-bd32-347d897ac50a-kube-api-access-c4kvd\") pod \"dfc93436-8ed7-4388-bd32-347d897ac50a\" (UID: \"dfc93436-8ed7-4388-bd32-347d897ac50a\") " Jan 20 17:33:46 crc kubenswrapper[4558]: I0120 17:33:45.971359 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dfc93436-8ed7-4388-bd32-347d897ac50a-combined-ca-bundle\") pod \"dfc93436-8ed7-4388-bd32-347d897ac50a\" (UID: \"dfc93436-8ed7-4388-bd32-347d897ac50a\") " Jan 20 17:33:46 crc kubenswrapper[4558]: I0120 17:33:45.971878 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bb135ff5-5284-4558-b549-c59fee8cc0e9-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:46 crc kubenswrapper[4558]: I0120 17:33:45.971891 4558 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/bb135ff5-5284-4558-b549-c59fee8cc0e9-fernet-keys\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:46 crc kubenswrapper[4558]: I0120 17:33:45.971900 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bb135ff5-5284-4558-b549-c59fee8cc0e9-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:46 crc kubenswrapper[4558]: I0120 17:33:45.971908 4558 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/bb135ff5-5284-4558-b549-c59fee8cc0e9-credential-keys\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:46 crc kubenswrapper[4558]: I0120 17:33:45.971920 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2kxc2\" (UniqueName: \"kubernetes.io/projected/bb135ff5-5284-4558-b549-c59fee8cc0e9-kube-api-access-2kxc2\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:46 crc kubenswrapper[4558]: I0120 17:33:45.971930 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bb135ff5-5284-4558-b549-c59fee8cc0e9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:46 crc kubenswrapper[4558]: I0120 17:33:45.974985 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dfc93436-8ed7-4388-bd32-347d897ac50a-kube-api-access-c4kvd" (OuterVolumeSpecName: "kube-api-access-c4kvd") pod "dfc93436-8ed7-4388-bd32-347d897ac50a" (UID: "dfc93436-8ed7-4388-bd32-347d897ac50a"). InnerVolumeSpecName "kube-api-access-c4kvd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:33:46 crc kubenswrapper[4558]: I0120 17:33:45.992642 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dfc93436-8ed7-4388-bd32-347d897ac50a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "dfc93436-8ed7-4388-bd32-347d897ac50a" (UID: "dfc93436-8ed7-4388-bd32-347d897ac50a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:33:46 crc kubenswrapper[4558]: I0120 17:33:45.992984 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dfc93436-8ed7-4388-bd32-347d897ac50a-config-data" (OuterVolumeSpecName: "config-data") pod "dfc93436-8ed7-4388-bd32-347d897ac50a" (UID: "dfc93436-8ed7-4388-bd32-347d897ac50a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:33:46 crc kubenswrapper[4558]: I0120 17:33:46.081081 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c4kvd\" (UniqueName: \"kubernetes.io/projected/dfc93436-8ed7-4388-bd32-347d897ac50a-kube-api-access-c4kvd\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:46 crc kubenswrapper[4558]: I0120 17:33:46.081124 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dfc93436-8ed7-4388-bd32-347d897ac50a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:46 crc kubenswrapper[4558]: I0120 17:33:46.081135 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dfc93436-8ed7-4388-bd32-347d897ac50a-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:46 crc kubenswrapper[4558]: I0120 17:33:46.188308 4558 generic.go:334] "Generic (PLEG): container finished" podID="760135a9-d7f7-456e-b7d6-9f2e7730e382" containerID="c6654ae1097dc1779180cf64c658db2d6969ccaab32c2416576f239f35513d6a" exitCode=0 Jan 20 17:33:46 crc kubenswrapper[4558]: I0120 17:33:46.188360 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"760135a9-d7f7-456e-b7d6-9f2e7730e382","Type":"ContainerDied","Data":"c6654ae1097dc1779180cf64c658db2d6969ccaab32c2416576f239f35513d6a"} Jan 20 17:33:46 crc kubenswrapper[4558]: I0120 17:33:46.190384 4558 generic.go:334] "Generic (PLEG): container finished" podID="bb135ff5-5284-4558-b549-c59fee8cc0e9" containerID="9842d6850ee563852429054c791f3062cb318e3f1aae2137f5ef5dfa0598575e" exitCode=0 Jan 20 17:33:46 crc kubenswrapper[4558]: I0120 17:33:46.190425 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-55b7797494-6bq45" event={"ID":"bb135ff5-5284-4558-b549-c59fee8cc0e9","Type":"ContainerDied","Data":"9842d6850ee563852429054c791f3062cb318e3f1aae2137f5ef5dfa0598575e"} Jan 20 17:33:46 crc kubenswrapper[4558]: I0120 17:33:46.190445 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-55b7797494-6bq45" event={"ID":"bb135ff5-5284-4558-b549-c59fee8cc0e9","Type":"ContainerDied","Data":"2fbc5b4b4ea247202660c1ce798e4d3959684966521bbd40599e4512a87b0f52"} Jan 20 17:33:46 crc kubenswrapper[4558]: I0120 17:33:46.190461 4558 scope.go:117] "RemoveContainer" containerID="9842d6850ee563852429054c791f3062cb318e3f1aae2137f5ef5dfa0598575e" Jan 20 17:33:46 crc kubenswrapper[4558]: I0120 17:33:46.190573 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-55b7797494-6bq45" Jan 20 17:33:46 crc kubenswrapper[4558]: I0120 17:33:46.193867 4558 generic.go:334] "Generic (PLEG): container finished" podID="6a66de52-5f9c-46c6-aa54-8a0ba3288230" containerID="1099f7ebf3750cf92ad4dcca603aaebf0e6a74e93107a073aead4aee1f85dd17" exitCode=143 Jan 20 17:33:46 crc kubenswrapper[4558]: I0120 17:33:46.193969 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"6a66de52-5f9c-46c6-aa54-8a0ba3288230","Type":"ContainerDied","Data":"1099f7ebf3750cf92ad4dcca603aaebf0e6a74e93107a073aead4aee1f85dd17"} Jan 20 17:33:46 crc kubenswrapper[4558]: I0120 17:33:46.196487 4558 generic.go:334] "Generic (PLEG): container finished" podID="8fb66f31-6ee9-4ad8-8bd2-14e489b98591" containerID="a9c596fe297f41ede3bfa332670ccef94114b4b90d59c0d5cf0160934a47ac18" exitCode=143 Jan 20 17:33:46 crc kubenswrapper[4558]: I0120 17:33:46.196569 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"8fb66f31-6ee9-4ad8-8bd2-14e489b98591","Type":"ContainerDied","Data":"a9c596fe297f41ede3bfa332670ccef94114b4b90d59c0d5cf0160934a47ac18"} Jan 20 17:33:46 crc kubenswrapper[4558]: I0120 17:33:46.198480 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"dfc93436-8ed7-4388-bd32-347d897ac50a","Type":"ContainerDied","Data":"879a753f86794356931547df23026fc2d1f24346731a19f53a98e503e5bb22f3"} Jan 20 17:33:46 crc kubenswrapper[4558]: I0120 17:33:46.198554 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:33:46 crc kubenswrapper[4558]: I0120 17:33:46.223995 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-55b7797494-6bq45"] Jan 20 17:33:46 crc kubenswrapper[4558]: I0120 17:33:46.230196 4558 scope.go:117] "RemoveContainer" containerID="9842d6850ee563852429054c791f3062cb318e3f1aae2137f5ef5dfa0598575e" Jan 20 17:33:46 crc kubenswrapper[4558]: E0120 17:33:46.234379 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9842d6850ee563852429054c791f3062cb318e3f1aae2137f5ef5dfa0598575e\": container with ID starting with 9842d6850ee563852429054c791f3062cb318e3f1aae2137f5ef5dfa0598575e not found: ID does not exist" containerID="9842d6850ee563852429054c791f3062cb318e3f1aae2137f5ef5dfa0598575e" Jan 20 17:33:46 crc kubenswrapper[4558]: I0120 17:33:46.234448 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9842d6850ee563852429054c791f3062cb318e3f1aae2137f5ef5dfa0598575e"} err="failed to get container status \"9842d6850ee563852429054c791f3062cb318e3f1aae2137f5ef5dfa0598575e\": rpc error: code = NotFound desc = could not find container \"9842d6850ee563852429054c791f3062cb318e3f1aae2137f5ef5dfa0598575e\": container with ID starting with 9842d6850ee563852429054c791f3062cb318e3f1aae2137f5ef5dfa0598575e not found: ID does not exist" Jan 20 17:33:46 crc kubenswrapper[4558]: I0120 17:33:46.234482 4558 scope.go:117] "RemoveContainer" containerID="9b52c657a24907f806e9541bf4f5890d8aeebe08d47a2a0bad6a5771f6287d48" Jan 20 17:33:46 crc kubenswrapper[4558]: I0120 17:33:46.244582 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/keystone-55b7797494-6bq45"] Jan 20 17:33:46 crc kubenswrapper[4558]: I0120 17:33:46.259116 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:33:46 crc kubenswrapper[4558]: I0120 17:33:46.276454 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:33:46 crc kubenswrapper[4558]: I0120 17:33:46.286195 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:33:46 crc kubenswrapper[4558]: E0120 17:33:46.286592 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e0294130-264e-4fa0-b336-72c350ea61cc" containerName="barbican-worker" Jan 20 17:33:46 crc kubenswrapper[4558]: I0120 17:33:46.286605 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="e0294130-264e-4fa0-b336-72c350ea61cc" containerName="barbican-worker" Jan 20 17:33:46 crc kubenswrapper[4558]: E0120 17:33:46.286624 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e7c82345-58f6-4542-9483-604f1ad2b5f4" containerName="placement-api" Jan 20 17:33:46 crc kubenswrapper[4558]: I0120 17:33:46.286631 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="e7c82345-58f6-4542-9483-604f1ad2b5f4" containerName="placement-api" Jan 20 17:33:46 crc kubenswrapper[4558]: E0120 17:33:46.286645 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e0294130-264e-4fa0-b336-72c350ea61cc" containerName="barbican-worker-log" Jan 20 17:33:46 crc kubenswrapper[4558]: I0120 17:33:46.286651 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="e0294130-264e-4fa0-b336-72c350ea61cc" containerName="barbican-worker-log" Jan 20 17:33:46 crc kubenswrapper[4558]: E0120 17:33:46.286664 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dfc93436-8ed7-4388-bd32-347d897ac50a" containerName="nova-scheduler-scheduler" Jan 20 17:33:46 crc kubenswrapper[4558]: I0120 17:33:46.286671 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="dfc93436-8ed7-4388-bd32-347d897ac50a" containerName="nova-scheduler-scheduler" Jan 20 17:33:46 crc kubenswrapper[4558]: E0120 17:33:46.286683 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d6b6da7f-6f9b-415b-ac10-b3bd8655bb1a" containerName="barbican-keystone-listener" Jan 20 17:33:46 crc kubenswrapper[4558]: I0120 17:33:46.286688 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d6b6da7f-6f9b-415b-ac10-b3bd8655bb1a" containerName="barbican-keystone-listener" Jan 20 17:33:46 crc kubenswrapper[4558]: E0120 17:33:46.286702 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dfc93436-8ed7-4388-bd32-347d897ac50a" containerName="nova-scheduler-scheduler" Jan 20 17:33:46 crc kubenswrapper[4558]: I0120 17:33:46.286708 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="dfc93436-8ed7-4388-bd32-347d897ac50a" containerName="nova-scheduler-scheduler" Jan 20 17:33:46 crc kubenswrapper[4558]: E0120 17:33:46.286717 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e7c82345-58f6-4542-9483-604f1ad2b5f4" containerName="placement-log" Jan 20 17:33:46 crc kubenswrapper[4558]: I0120 17:33:46.286722 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="e7c82345-58f6-4542-9483-604f1ad2b5f4" containerName="placement-log" Jan 20 17:33:46 crc kubenswrapper[4558]: E0120 17:33:46.286740 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bb135ff5-5284-4558-b549-c59fee8cc0e9" containerName="keystone-api" Jan 20 17:33:46 crc kubenswrapper[4558]: I0120 17:33:46.286746 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="bb135ff5-5284-4558-b549-c59fee8cc0e9" containerName="keystone-api" Jan 20 17:33:46 crc kubenswrapper[4558]: E0120 17:33:46.286761 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d6b6da7f-6f9b-415b-ac10-b3bd8655bb1a" containerName="barbican-keystone-listener-log" Jan 20 17:33:46 crc kubenswrapper[4558]: I0120 17:33:46.286767 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d6b6da7f-6f9b-415b-ac10-b3bd8655bb1a" containerName="barbican-keystone-listener-log" Jan 20 17:33:46 crc kubenswrapper[4558]: I0120 17:33:46.286941 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="e0294130-264e-4fa0-b336-72c350ea61cc" containerName="barbican-worker" Jan 20 17:33:46 crc kubenswrapper[4558]: I0120 17:33:46.286952 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="dfc93436-8ed7-4388-bd32-347d897ac50a" containerName="nova-scheduler-scheduler" Jan 20 17:33:46 crc kubenswrapper[4558]: I0120 17:33:46.286960 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="dfc93436-8ed7-4388-bd32-347d897ac50a" containerName="nova-scheduler-scheduler" Jan 20 17:33:46 crc kubenswrapper[4558]: I0120 17:33:46.286970 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="e7c82345-58f6-4542-9483-604f1ad2b5f4" containerName="placement-log" Jan 20 17:33:46 crc kubenswrapper[4558]: I0120 17:33:46.286983 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="bb135ff5-5284-4558-b549-c59fee8cc0e9" containerName="keystone-api" Jan 20 17:33:46 crc kubenswrapper[4558]: I0120 17:33:46.286996 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="e0294130-264e-4fa0-b336-72c350ea61cc" containerName="barbican-worker-log" Jan 20 17:33:46 crc kubenswrapper[4558]: I0120 17:33:46.287006 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="d6b6da7f-6f9b-415b-ac10-b3bd8655bb1a" containerName="barbican-keystone-listener-log" Jan 20 17:33:46 crc kubenswrapper[4558]: I0120 17:33:46.287016 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="d6b6da7f-6f9b-415b-ac10-b3bd8655bb1a" containerName="barbican-keystone-listener" Jan 20 17:33:46 crc kubenswrapper[4558]: I0120 17:33:46.287026 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="e7c82345-58f6-4542-9483-604f1ad2b5f4" containerName="placement-api" Jan 20 17:33:46 crc kubenswrapper[4558]: I0120 17:33:46.287694 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:33:46 crc kubenswrapper[4558]: I0120 17:33:46.289603 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-scheduler-config-data" Jan 20 17:33:46 crc kubenswrapper[4558]: I0120 17:33:46.291821 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:33:46 crc kubenswrapper[4558]: I0120 17:33:46.387097 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4c732a1-ca26-4d37-b769-f0c69283ef29-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"f4c732a1-ca26-4d37-b769-f0c69283ef29\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:33:46 crc kubenswrapper[4558]: I0120 17:33:46.387141 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f4c732a1-ca26-4d37-b769-f0c69283ef29-config-data\") pod \"nova-scheduler-0\" (UID: \"f4c732a1-ca26-4d37-b769-f0c69283ef29\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:33:46 crc kubenswrapper[4558]: I0120 17:33:46.387261 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cfc7x\" (UniqueName: \"kubernetes.io/projected/f4c732a1-ca26-4d37-b769-f0c69283ef29-kube-api-access-cfc7x\") pod \"nova-scheduler-0\" (UID: \"f4c732a1-ca26-4d37-b769-f0c69283ef29\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:33:46 crc kubenswrapper[4558]: I0120 17:33:46.488787 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cfc7x\" (UniqueName: \"kubernetes.io/projected/f4c732a1-ca26-4d37-b769-f0c69283ef29-kube-api-access-cfc7x\") pod \"nova-scheduler-0\" (UID: \"f4c732a1-ca26-4d37-b769-f0c69283ef29\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:33:46 crc kubenswrapper[4558]: I0120 17:33:46.488894 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4c732a1-ca26-4d37-b769-f0c69283ef29-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"f4c732a1-ca26-4d37-b769-f0c69283ef29\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:33:46 crc kubenswrapper[4558]: I0120 17:33:46.488917 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f4c732a1-ca26-4d37-b769-f0c69283ef29-config-data\") pod \"nova-scheduler-0\" (UID: \"f4c732a1-ca26-4d37-b769-f0c69283ef29\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:33:46 crc kubenswrapper[4558]: I0120 17:33:46.495780 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f4c732a1-ca26-4d37-b769-f0c69283ef29-config-data\") pod \"nova-scheduler-0\" (UID: \"f4c732a1-ca26-4d37-b769-f0c69283ef29\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:33:46 crc kubenswrapper[4558]: I0120 17:33:46.503980 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4c732a1-ca26-4d37-b769-f0c69283ef29-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"f4c732a1-ca26-4d37-b769-f0c69283ef29\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:33:46 crc kubenswrapper[4558]: I0120 17:33:46.505788 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cfc7x\" (UniqueName: \"kubernetes.io/projected/f4c732a1-ca26-4d37-b769-f0c69283ef29-kube-api-access-cfc7x\") pod \"nova-scheduler-0\" (UID: \"f4c732a1-ca26-4d37-b769-f0c69283ef29\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:33:46 crc kubenswrapper[4558]: I0120 17:33:46.572605 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:33:46 crc kubenswrapper[4558]: I0120 17:33:46.584269 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bb135ff5-5284-4558-b549-c59fee8cc0e9" path="/var/lib/kubelet/pods/bb135ff5-5284-4558-b549-c59fee8cc0e9/volumes" Jan 20 17:33:46 crc kubenswrapper[4558]: I0120 17:33:46.585007 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dfc93436-8ed7-4388-bd32-347d897ac50a" path="/var/lib/kubelet/pods/dfc93436-8ed7-4388-bd32-347d897ac50a/volumes" Jan 20 17:33:46 crc kubenswrapper[4558]: I0120 17:33:46.590201 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tzrg2\" (UniqueName: \"kubernetes.io/projected/097b903e-9e8a-464f-99e3-a69b68206465-kube-api-access-tzrg2\") pod \"097b903e-9e8a-464f-99e3-a69b68206465\" (UID: \"097b903e-9e8a-464f-99e3-a69b68206465\") " Jan 20 17:33:46 crc kubenswrapper[4558]: I0120 17:33:46.590474 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/097b903e-9e8a-464f-99e3-a69b68206465-config-data\") pod \"097b903e-9e8a-464f-99e3-a69b68206465\" (UID: \"097b903e-9e8a-464f-99e3-a69b68206465\") " Jan 20 17:33:46 crc kubenswrapper[4558]: I0120 17:33:46.590507 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/097b903e-9e8a-464f-99e3-a69b68206465-combined-ca-bundle\") pod \"097b903e-9e8a-464f-99e3-a69b68206465\" (UID: \"097b903e-9e8a-464f-99e3-a69b68206465\") " Jan 20 17:33:46 crc kubenswrapper[4558]: I0120 17:33:46.598378 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/097b903e-9e8a-464f-99e3-a69b68206465-kube-api-access-tzrg2" (OuterVolumeSpecName: "kube-api-access-tzrg2") pod "097b903e-9e8a-464f-99e3-a69b68206465" (UID: "097b903e-9e8a-464f-99e3-a69b68206465"). InnerVolumeSpecName "kube-api-access-tzrg2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:33:46 crc kubenswrapper[4558]: I0120 17:33:46.611560 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:33:46 crc kubenswrapper[4558]: I0120 17:33:46.626855 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/097b903e-9e8a-464f-99e3-a69b68206465-config-data" (OuterVolumeSpecName: "config-data") pod "097b903e-9e8a-464f-99e3-a69b68206465" (UID: "097b903e-9e8a-464f-99e3-a69b68206465"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:33:46 crc kubenswrapper[4558]: I0120 17:33:46.629983 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/097b903e-9e8a-464f-99e3-a69b68206465-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "097b903e-9e8a-464f-99e3-a69b68206465" (UID: "097b903e-9e8a-464f-99e3-a69b68206465"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:33:46 crc kubenswrapper[4558]: I0120 17:33:46.660509 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:33:46 crc kubenswrapper[4558]: I0120 17:33:46.691446 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/760135a9-d7f7-456e-b7d6-9f2e7730e382-config-data\") pod \"760135a9-d7f7-456e-b7d6-9f2e7730e382\" (UID: \"760135a9-d7f7-456e-b7d6-9f2e7730e382\") " Jan 20 17:33:46 crc kubenswrapper[4558]: I0120 17:33:46.691574 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/760135a9-d7f7-456e-b7d6-9f2e7730e382-combined-ca-bundle\") pod \"760135a9-d7f7-456e-b7d6-9f2e7730e382\" (UID: \"760135a9-d7f7-456e-b7d6-9f2e7730e382\") " Jan 20 17:33:46 crc kubenswrapper[4558]: I0120 17:33:46.691686 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qq6ds\" (UniqueName: \"kubernetes.io/projected/760135a9-d7f7-456e-b7d6-9f2e7730e382-kube-api-access-qq6ds\") pod \"760135a9-d7f7-456e-b7d6-9f2e7730e382\" (UID: \"760135a9-d7f7-456e-b7d6-9f2e7730e382\") " Jan 20 17:33:46 crc kubenswrapper[4558]: I0120 17:33:46.692466 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/097b903e-9e8a-464f-99e3-a69b68206465-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:46 crc kubenswrapper[4558]: I0120 17:33:46.692493 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/097b903e-9e8a-464f-99e3-a69b68206465-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:46 crc kubenswrapper[4558]: I0120 17:33:46.692504 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tzrg2\" (UniqueName: \"kubernetes.io/projected/097b903e-9e8a-464f-99e3-a69b68206465-kube-api-access-tzrg2\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:46 crc kubenswrapper[4558]: I0120 17:33:46.694635 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/760135a9-d7f7-456e-b7d6-9f2e7730e382-kube-api-access-qq6ds" (OuterVolumeSpecName: "kube-api-access-qq6ds") pod "760135a9-d7f7-456e-b7d6-9f2e7730e382" (UID: "760135a9-d7f7-456e-b7d6-9f2e7730e382"). InnerVolumeSpecName "kube-api-access-qq6ds". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:33:46 crc kubenswrapper[4558]: I0120 17:33:46.708794 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:33:46 crc kubenswrapper[4558]: I0120 17:33:46.718142 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/760135a9-d7f7-456e-b7d6-9f2e7730e382-config-data" (OuterVolumeSpecName: "config-data") pod "760135a9-d7f7-456e-b7d6-9f2e7730e382" (UID: "760135a9-d7f7-456e-b7d6-9f2e7730e382"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:33:46 crc kubenswrapper[4558]: I0120 17:33:46.741433 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/760135a9-d7f7-456e-b7d6-9f2e7730e382-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "760135a9-d7f7-456e-b7d6-9f2e7730e382" (UID: "760135a9-d7f7-456e-b7d6-9f2e7730e382"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:33:46 crc kubenswrapper[4558]: I0120 17:33:46.805900 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qq6ds\" (UniqueName: \"kubernetes.io/projected/760135a9-d7f7-456e-b7d6-9f2e7730e382-kube-api-access-qq6ds\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:46 crc kubenswrapper[4558]: I0120 17:33:46.805931 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/760135a9-d7f7-456e-b7d6-9f2e7730e382-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:46 crc kubenswrapper[4558]: I0120 17:33:46.805944 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/760135a9-d7f7-456e-b7d6-9f2e7730e382-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:47 crc kubenswrapper[4558]: I0120 17:33:47.111561 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:33:47 crc kubenswrapper[4558]: I0120 17:33:47.209639 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-0" event={"ID":"097b903e-9e8a-464f-99e3-a69b68206465","Type":"ContainerDied","Data":"758bedcb04298972f20c5b8b77fb009bd0a4978f604a162ef844d7deeffb61b6"} Jan 20 17:33:47 crc kubenswrapper[4558]: I0120 17:33:47.209683 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:33:47 crc kubenswrapper[4558]: I0120 17:33:47.209712 4558 scope.go:117] "RemoveContainer" containerID="c8737ad851fee617d21ad48dbaf6b3a6790fc1895c0c097d362b21aaec9de85b" Jan 20 17:33:47 crc kubenswrapper[4558]: I0120 17:33:47.213703 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"760135a9-d7f7-456e-b7d6-9f2e7730e382","Type":"ContainerDied","Data":"2ddccc288a019aaabae17c74e035d0bf4fbbfc674f41378c25ee7d422b3e847c"} Jan 20 17:33:47 crc kubenswrapper[4558]: I0120 17:33:47.213846 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:33:47 crc kubenswrapper[4558]: I0120 17:33:47.217061 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"f4c732a1-ca26-4d37-b769-f0c69283ef29","Type":"ContainerStarted","Data":"211619e0c4ffb8889b074381e1e07ad757346bf8c1a8548a8d8053ae5245dff5"} Jan 20 17:33:47 crc kubenswrapper[4558]: I0120 17:33:47.250134 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-0"] Jan 20 17:33:47 crc kubenswrapper[4558]: I0120 17:33:47.257862 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-0"] Jan 20 17:33:47 crc kubenswrapper[4558]: I0120 17:33:47.265541 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:33:47 crc kubenswrapper[4558]: I0120 17:33:47.268970 4558 scope.go:117] "RemoveContainer" containerID="c6654ae1097dc1779180cf64c658db2d6969ccaab32c2416576f239f35513d6a" Jan 20 17:33:47 crc kubenswrapper[4558]: I0120 17:33:47.272868 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:33:47 crc kubenswrapper[4558]: I0120 17:33:47.279772 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-0"] Jan 20 17:33:47 crc kubenswrapper[4558]: E0120 17:33:47.280211 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="097b903e-9e8a-464f-99e3-a69b68206465" containerName="nova-cell1-conductor-conductor" Jan 20 17:33:47 crc kubenswrapper[4558]: I0120 17:33:47.280230 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="097b903e-9e8a-464f-99e3-a69b68206465" containerName="nova-cell1-conductor-conductor" Jan 20 17:33:47 crc kubenswrapper[4558]: E0120 17:33:47.280256 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="760135a9-d7f7-456e-b7d6-9f2e7730e382" containerName="nova-cell1-novncproxy-novncproxy" Jan 20 17:33:47 crc kubenswrapper[4558]: I0120 17:33:47.280263 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="760135a9-d7f7-456e-b7d6-9f2e7730e382" containerName="nova-cell1-novncproxy-novncproxy" Jan 20 17:33:47 crc kubenswrapper[4558]: E0120 17:33:47.280280 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="097b903e-9e8a-464f-99e3-a69b68206465" containerName="nova-cell1-conductor-conductor" Jan 20 17:33:47 crc kubenswrapper[4558]: I0120 17:33:47.280286 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="097b903e-9e8a-464f-99e3-a69b68206465" containerName="nova-cell1-conductor-conductor" Jan 20 17:33:47 crc kubenswrapper[4558]: I0120 17:33:47.280473 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="097b903e-9e8a-464f-99e3-a69b68206465" containerName="nova-cell1-conductor-conductor" Jan 20 17:33:47 crc kubenswrapper[4558]: I0120 17:33:47.280491 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="097b903e-9e8a-464f-99e3-a69b68206465" containerName="nova-cell1-conductor-conductor" Jan 20 17:33:47 crc kubenswrapper[4558]: I0120 17:33:47.280510 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="760135a9-d7f7-456e-b7d6-9f2e7730e382" containerName="nova-cell1-novncproxy-novncproxy" Jan 20 17:33:47 crc kubenswrapper[4558]: I0120 17:33:47.281108 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:33:47 crc kubenswrapper[4558]: I0120 17:33:47.282635 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell1-conductor-config-data" Jan 20 17:33:47 crc kubenswrapper[4558]: I0120 17:33:47.283244 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:33:47 crc kubenswrapper[4558]: I0120 17:33:47.284279 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:33:47 crc kubenswrapper[4558]: I0120 17:33:47.288988 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell1-novncproxy-config-data" Jan 20 17:33:47 crc kubenswrapper[4558]: I0120 17:33:47.306899 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:33:47 crc kubenswrapper[4558]: I0120 17:33:47.320345 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-0"] Jan 20 17:33:47 crc kubenswrapper[4558]: I0120 17:33:47.428644 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/04a134bc-533d-490a-8544-61f705a6d4f2-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"04a134bc-533d-490a-8544-61f705a6d4f2\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:33:47 crc kubenswrapper[4558]: I0120 17:33:47.428711 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fsc8j\" (UniqueName: \"kubernetes.io/projected/04a134bc-533d-490a-8544-61f705a6d4f2-kube-api-access-fsc8j\") pod \"nova-cell1-conductor-0\" (UID: \"04a134bc-533d-490a-8544-61f705a6d4f2\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:33:47 crc kubenswrapper[4558]: I0120 17:33:47.428828 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4f8tn\" (UniqueName: \"kubernetes.io/projected/cf7d4ada-26ee-451b-9f80-e2f65ba17eac-kube-api-access-4f8tn\") pod \"nova-cell1-novncproxy-0\" (UID: \"cf7d4ada-26ee-451b-9f80-e2f65ba17eac\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:33:47 crc kubenswrapper[4558]: I0120 17:33:47.428857 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/04a134bc-533d-490a-8544-61f705a6d4f2-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"04a134bc-533d-490a-8544-61f705a6d4f2\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:33:47 crc kubenswrapper[4558]: I0120 17:33:47.428876 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf7d4ada-26ee-451b-9f80-e2f65ba17eac-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"cf7d4ada-26ee-451b-9f80-e2f65ba17eac\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:33:47 crc kubenswrapper[4558]: I0120 17:33:47.428966 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf7d4ada-26ee-451b-9f80-e2f65ba17eac-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"cf7d4ada-26ee-451b-9f80-e2f65ba17eac\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:33:47 crc kubenswrapper[4558]: I0120 17:33:47.481369 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:33:47 crc kubenswrapper[4558]: I0120 17:33:47.531341 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf7d4ada-26ee-451b-9f80-e2f65ba17eac-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"cf7d4ada-26ee-451b-9f80-e2f65ba17eac\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:33:47 crc kubenswrapper[4558]: I0120 17:33:47.531526 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf7d4ada-26ee-451b-9f80-e2f65ba17eac-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"cf7d4ada-26ee-451b-9f80-e2f65ba17eac\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:33:47 crc kubenswrapper[4558]: I0120 17:33:47.531598 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/04a134bc-533d-490a-8544-61f705a6d4f2-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"04a134bc-533d-490a-8544-61f705a6d4f2\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:33:47 crc kubenswrapper[4558]: I0120 17:33:47.531651 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fsc8j\" (UniqueName: \"kubernetes.io/projected/04a134bc-533d-490a-8544-61f705a6d4f2-kube-api-access-fsc8j\") pod \"nova-cell1-conductor-0\" (UID: \"04a134bc-533d-490a-8544-61f705a6d4f2\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:33:47 crc kubenswrapper[4558]: I0120 17:33:47.531809 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4f8tn\" (UniqueName: \"kubernetes.io/projected/cf7d4ada-26ee-451b-9f80-e2f65ba17eac-kube-api-access-4f8tn\") pod \"nova-cell1-novncproxy-0\" (UID: \"cf7d4ada-26ee-451b-9f80-e2f65ba17eac\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:33:47 crc kubenswrapper[4558]: I0120 17:33:47.531857 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/04a134bc-533d-490a-8544-61f705a6d4f2-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"04a134bc-533d-490a-8544-61f705a6d4f2\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:33:47 crc kubenswrapper[4558]: I0120 17:33:47.541582 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf7d4ada-26ee-451b-9f80-e2f65ba17eac-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"cf7d4ada-26ee-451b-9f80-e2f65ba17eac\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:33:47 crc kubenswrapper[4558]: I0120 17:33:47.541708 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/04a134bc-533d-490a-8544-61f705a6d4f2-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"04a134bc-533d-490a-8544-61f705a6d4f2\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:33:47 crc kubenswrapper[4558]: I0120 17:33:47.542612 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/04a134bc-533d-490a-8544-61f705a6d4f2-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"04a134bc-533d-490a-8544-61f705a6d4f2\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:33:47 crc kubenswrapper[4558]: I0120 17:33:47.562676 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf7d4ada-26ee-451b-9f80-e2f65ba17eac-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"cf7d4ada-26ee-451b-9f80-e2f65ba17eac\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:33:47 crc kubenswrapper[4558]: I0120 17:33:47.565670 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fsc8j\" (UniqueName: \"kubernetes.io/projected/04a134bc-533d-490a-8544-61f705a6d4f2-kube-api-access-fsc8j\") pod \"nova-cell1-conductor-0\" (UID: \"04a134bc-533d-490a-8544-61f705a6d4f2\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:33:47 crc kubenswrapper[4558]: I0120 17:33:47.579684 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4f8tn\" (UniqueName: \"kubernetes.io/projected/cf7d4ada-26ee-451b-9f80-e2f65ba17eac-kube-api-access-4f8tn\") pod \"nova-cell1-novncproxy-0\" (UID: \"cf7d4ada-26ee-451b-9f80-e2f65ba17eac\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:33:47 crc kubenswrapper[4558]: I0120 17:33:47.619508 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:33:47 crc kubenswrapper[4558]: I0120 17:33:47.638137 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:33:47 crc kubenswrapper[4558]: I0120 17:33:47.924609 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:33:48 crc kubenswrapper[4558]: I0120 17:33:48.093335 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:33:48 crc kubenswrapper[4558]: I0120 17:33:48.208905 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:33:48 crc kubenswrapper[4558]: W0120 17:33:48.209935 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcf7d4ada_26ee_451b_9f80_e2f65ba17eac.slice/crio-86ab52ccb17858817e9868895e83804721f6854bd8266f62d50b094b56165e79 WatchSource:0}: Error finding container 86ab52ccb17858817e9868895e83804721f6854bd8266f62d50b094b56165e79: Status 404 returned error can't find the container with id 86ab52ccb17858817e9868895e83804721f6854bd8266f62d50b094b56165e79 Jan 20 17:33:48 crc kubenswrapper[4558]: I0120 17:33:48.237768 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"f4c732a1-ca26-4d37-b769-f0c69283ef29","Type":"ContainerStarted","Data":"56bc59ccee15107e85170fe2b5008d13a71e507e5eb9558d7798d0abd57ef3bf"} Jan 20 17:33:48 crc kubenswrapper[4558]: I0120 17:33:48.239237 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"cf7d4ada-26ee-451b-9f80-e2f65ba17eac","Type":"ContainerStarted","Data":"86ab52ccb17858817e9868895e83804721f6854bd8266f62d50b094b56165e79"} Jan 20 17:33:48 crc kubenswrapper[4558]: I0120 17:33:48.253829 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-scheduler-0" podStartSLOduration=2.253805322 podStartE2EDuration="2.253805322s" podCreationTimestamp="2026-01-20 17:33:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:33:48.249152162 +0000 UTC m=+3122.009490130" watchObservedRunningTime="2026-01-20 17:33:48.253805322 +0000 UTC m=+3122.014143289" Jan 20 17:33:48 crc kubenswrapper[4558]: I0120 17:33:48.312758 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-0"] Jan 20 17:33:48 crc kubenswrapper[4558]: I0120 17:33:48.526898 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:33:48 crc kubenswrapper[4558]: I0120 17:33:48.527390 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="eca2edbe-72d9-45b0-9445-b734f97b7ece" containerName="ceilometer-central-agent" containerID="cri-o://91409ae0d9ca451c965331387d6a76c89a4420be7021b37195b0d5002fe4ec86" gracePeriod=30 Jan 20 17:33:48 crc kubenswrapper[4558]: I0120 17:33:48.527508 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="eca2edbe-72d9-45b0-9445-b734f97b7ece" containerName="ceilometer-notification-agent" containerID="cri-o://bb94cc5337340b1860cfc1f60ccc29ac0f8630ee7dc24e80e7ce2178442eadda" gracePeriod=30 Jan 20 17:33:48 crc kubenswrapper[4558]: I0120 17:33:48.527495 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="eca2edbe-72d9-45b0-9445-b734f97b7ece" containerName="proxy-httpd" containerID="cri-o://9a7331e32c5060ace879fa32c1a382d3ac8d9f2cefae695e7da910a50b371ff9" gracePeriod=30 Jan 20 17:33:48 crc kubenswrapper[4558]: I0120 17:33:48.527546 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="eca2edbe-72d9-45b0-9445-b734f97b7ece" containerName="sg-core" containerID="cri-o://279cc96ec167325242d88523084a02ee78c5305d2befa02401033a29b1ae9959" gracePeriod=30 Jan 20 17:33:48 crc kubenswrapper[4558]: I0120 17:33:48.578452 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="097b903e-9e8a-464f-99e3-a69b68206465" path="/var/lib/kubelet/pods/097b903e-9e8a-464f-99e3-a69b68206465/volumes" Jan 20 17:33:48 crc kubenswrapper[4558]: I0120 17:33:48.579132 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="760135a9-d7f7-456e-b7d6-9f2e7730e382" path="/var/lib/kubelet/pods/760135a9-d7f7-456e-b7d6-9f2e7730e382/volumes" Jan 20 17:33:48 crc kubenswrapper[4558]: I0120 17:33:48.710995 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/swift-proxy-b45ff9748-dd8kw"] Jan 20 17:33:48 crc kubenswrapper[4558]: I0120 17:33:48.712504 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-proxy-b45ff9748-dd8kw" Jan 20 17:33:48 crc kubenswrapper[4558]: I0120 17:33:48.721481 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-swift-internal-svc" Jan 20 17:33:48 crc kubenswrapper[4558]: I0120 17:33:48.721631 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-swift-public-svc" Jan 20 17:33:48 crc kubenswrapper[4558]: I0120 17:33:48.721644 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/swift-proxy-b45ff9748-dd8kw"] Jan 20 17:33:48 crc kubenswrapper[4558]: I0120 17:33:48.755380 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/nova-metadata-0" podUID="8fb66f31-6ee9-4ad8-8bd2-14e489b98591" containerName="nova-metadata-log" probeResult="failure" output="Get \"http://10.217.1.219:8775/\": read tcp 10.217.0.2:58942->10.217.1.219:8775: read: connection reset by peer" Jan 20 17:33:48 crc kubenswrapper[4558]: I0120 17:33:48.755735 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/nova-metadata-0" podUID="8fb66f31-6ee9-4ad8-8bd2-14e489b98591" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"http://10.217.1.219:8775/\": read tcp 10.217.0.2:58958->10.217.1.219:8775: read: connection reset by peer" Jan 20 17:33:48 crc kubenswrapper[4558]: I0120 17:33:48.773548 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0abcf7eb-7e84-4a8e-81e8-a397db63848b-combined-ca-bundle\") pod \"swift-proxy-b45ff9748-dd8kw\" (UID: \"0abcf7eb-7e84-4a8e-81e8-a397db63848b\") " pod="openstack-kuttl-tests/swift-proxy-b45ff9748-dd8kw" Jan 20 17:33:48 crc kubenswrapper[4558]: I0120 17:33:48.773630 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0abcf7eb-7e84-4a8e-81e8-a397db63848b-internal-tls-certs\") pod \"swift-proxy-b45ff9748-dd8kw\" (UID: \"0abcf7eb-7e84-4a8e-81e8-a397db63848b\") " pod="openstack-kuttl-tests/swift-proxy-b45ff9748-dd8kw" Jan 20 17:33:48 crc kubenswrapper[4558]: I0120 17:33:48.773667 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0abcf7eb-7e84-4a8e-81e8-a397db63848b-public-tls-certs\") pod \"swift-proxy-b45ff9748-dd8kw\" (UID: \"0abcf7eb-7e84-4a8e-81e8-a397db63848b\") " pod="openstack-kuttl-tests/swift-proxy-b45ff9748-dd8kw" Jan 20 17:33:48 crc kubenswrapper[4558]: I0120 17:33:48.773731 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0abcf7eb-7e84-4a8e-81e8-a397db63848b-log-httpd\") pod \"swift-proxy-b45ff9748-dd8kw\" (UID: \"0abcf7eb-7e84-4a8e-81e8-a397db63848b\") " pod="openstack-kuttl-tests/swift-proxy-b45ff9748-dd8kw" Jan 20 17:33:48 crc kubenswrapper[4558]: I0120 17:33:48.773791 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/0abcf7eb-7e84-4a8e-81e8-a397db63848b-etc-swift\") pod \"swift-proxy-b45ff9748-dd8kw\" (UID: \"0abcf7eb-7e84-4a8e-81e8-a397db63848b\") " pod="openstack-kuttl-tests/swift-proxy-b45ff9748-dd8kw" Jan 20 17:33:48 crc kubenswrapper[4558]: I0120 17:33:48.773975 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gnw9s\" (UniqueName: \"kubernetes.io/projected/0abcf7eb-7e84-4a8e-81e8-a397db63848b-kube-api-access-gnw9s\") pod \"swift-proxy-b45ff9748-dd8kw\" (UID: \"0abcf7eb-7e84-4a8e-81e8-a397db63848b\") " pod="openstack-kuttl-tests/swift-proxy-b45ff9748-dd8kw" Jan 20 17:33:48 crc kubenswrapper[4558]: I0120 17:33:48.774027 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0abcf7eb-7e84-4a8e-81e8-a397db63848b-run-httpd\") pod \"swift-proxy-b45ff9748-dd8kw\" (UID: \"0abcf7eb-7e84-4a8e-81e8-a397db63848b\") " pod="openstack-kuttl-tests/swift-proxy-b45ff9748-dd8kw" Jan 20 17:33:48 crc kubenswrapper[4558]: I0120 17:33:48.774052 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0abcf7eb-7e84-4a8e-81e8-a397db63848b-config-data\") pod \"swift-proxy-b45ff9748-dd8kw\" (UID: \"0abcf7eb-7e84-4a8e-81e8-a397db63848b\") " pod="openstack-kuttl-tests/swift-proxy-b45ff9748-dd8kw" Jan 20 17:33:48 crc kubenswrapper[4558]: I0120 17:33:48.873537 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-0"] Jan 20 17:33:48 crc kubenswrapper[4558]: I0120 17:33:48.882676 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0abcf7eb-7e84-4a8e-81e8-a397db63848b-internal-tls-certs\") pod \"swift-proxy-b45ff9748-dd8kw\" (UID: \"0abcf7eb-7e84-4a8e-81e8-a397db63848b\") " pod="openstack-kuttl-tests/swift-proxy-b45ff9748-dd8kw" Jan 20 17:33:48 crc kubenswrapper[4558]: I0120 17:33:48.882745 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0abcf7eb-7e84-4a8e-81e8-a397db63848b-public-tls-certs\") pod \"swift-proxy-b45ff9748-dd8kw\" (UID: \"0abcf7eb-7e84-4a8e-81e8-a397db63848b\") " pod="openstack-kuttl-tests/swift-proxy-b45ff9748-dd8kw" Jan 20 17:33:48 crc kubenswrapper[4558]: I0120 17:33:48.882794 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0abcf7eb-7e84-4a8e-81e8-a397db63848b-log-httpd\") pod \"swift-proxy-b45ff9748-dd8kw\" (UID: \"0abcf7eb-7e84-4a8e-81e8-a397db63848b\") " pod="openstack-kuttl-tests/swift-proxy-b45ff9748-dd8kw" Jan 20 17:33:48 crc kubenswrapper[4558]: I0120 17:33:48.882855 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/0abcf7eb-7e84-4a8e-81e8-a397db63848b-etc-swift\") pod \"swift-proxy-b45ff9748-dd8kw\" (UID: \"0abcf7eb-7e84-4a8e-81e8-a397db63848b\") " pod="openstack-kuttl-tests/swift-proxy-b45ff9748-dd8kw" Jan 20 17:33:48 crc kubenswrapper[4558]: I0120 17:33:48.882996 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gnw9s\" (UniqueName: \"kubernetes.io/projected/0abcf7eb-7e84-4a8e-81e8-a397db63848b-kube-api-access-gnw9s\") pod \"swift-proxy-b45ff9748-dd8kw\" (UID: \"0abcf7eb-7e84-4a8e-81e8-a397db63848b\") " pod="openstack-kuttl-tests/swift-proxy-b45ff9748-dd8kw" Jan 20 17:33:48 crc kubenswrapper[4558]: I0120 17:33:48.883043 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0abcf7eb-7e84-4a8e-81e8-a397db63848b-run-httpd\") pod \"swift-proxy-b45ff9748-dd8kw\" (UID: \"0abcf7eb-7e84-4a8e-81e8-a397db63848b\") " pod="openstack-kuttl-tests/swift-proxy-b45ff9748-dd8kw" Jan 20 17:33:48 crc kubenswrapper[4558]: I0120 17:33:48.883070 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0abcf7eb-7e84-4a8e-81e8-a397db63848b-config-data\") pod \"swift-proxy-b45ff9748-dd8kw\" (UID: \"0abcf7eb-7e84-4a8e-81e8-a397db63848b\") " pod="openstack-kuttl-tests/swift-proxy-b45ff9748-dd8kw" Jan 20 17:33:48 crc kubenswrapper[4558]: I0120 17:33:48.883142 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0abcf7eb-7e84-4a8e-81e8-a397db63848b-combined-ca-bundle\") pod \"swift-proxy-b45ff9748-dd8kw\" (UID: \"0abcf7eb-7e84-4a8e-81e8-a397db63848b\") " pod="openstack-kuttl-tests/swift-proxy-b45ff9748-dd8kw" Jan 20 17:33:48 crc kubenswrapper[4558]: I0120 17:33:48.884995 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0abcf7eb-7e84-4a8e-81e8-a397db63848b-run-httpd\") pod \"swift-proxy-b45ff9748-dd8kw\" (UID: \"0abcf7eb-7e84-4a8e-81e8-a397db63848b\") " pod="openstack-kuttl-tests/swift-proxy-b45ff9748-dd8kw" Jan 20 17:33:48 crc kubenswrapper[4558]: I0120 17:33:48.887877 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0abcf7eb-7e84-4a8e-81e8-a397db63848b-public-tls-certs\") pod \"swift-proxy-b45ff9748-dd8kw\" (UID: \"0abcf7eb-7e84-4a8e-81e8-a397db63848b\") " pod="openstack-kuttl-tests/swift-proxy-b45ff9748-dd8kw" Jan 20 17:33:48 crc kubenswrapper[4558]: I0120 17:33:48.888137 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0abcf7eb-7e84-4a8e-81e8-a397db63848b-log-httpd\") pod \"swift-proxy-b45ff9748-dd8kw\" (UID: \"0abcf7eb-7e84-4a8e-81e8-a397db63848b\") " pod="openstack-kuttl-tests/swift-proxy-b45ff9748-dd8kw" Jan 20 17:33:48 crc kubenswrapper[4558]: I0120 17:33:48.888248 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0abcf7eb-7e84-4a8e-81e8-a397db63848b-internal-tls-certs\") pod \"swift-proxy-b45ff9748-dd8kw\" (UID: \"0abcf7eb-7e84-4a8e-81e8-a397db63848b\") " pod="openstack-kuttl-tests/swift-proxy-b45ff9748-dd8kw" Jan 20 17:33:48 crc kubenswrapper[4558]: I0120 17:33:48.919651 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0abcf7eb-7e84-4a8e-81e8-a397db63848b-config-data\") pod \"swift-proxy-b45ff9748-dd8kw\" (UID: \"0abcf7eb-7e84-4a8e-81e8-a397db63848b\") " pod="openstack-kuttl-tests/swift-proxy-b45ff9748-dd8kw" Jan 20 17:33:48 crc kubenswrapper[4558]: I0120 17:33:48.927631 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/0abcf7eb-7e84-4a8e-81e8-a397db63848b-etc-swift\") pod \"swift-proxy-b45ff9748-dd8kw\" (UID: \"0abcf7eb-7e84-4a8e-81e8-a397db63848b\") " pod="openstack-kuttl-tests/swift-proxy-b45ff9748-dd8kw" Jan 20 17:33:48 crc kubenswrapper[4558]: I0120 17:33:48.933020 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gnw9s\" (UniqueName: \"kubernetes.io/projected/0abcf7eb-7e84-4a8e-81e8-a397db63848b-kube-api-access-gnw9s\") pod \"swift-proxy-b45ff9748-dd8kw\" (UID: \"0abcf7eb-7e84-4a8e-81e8-a397db63848b\") " pod="openstack-kuttl-tests/swift-proxy-b45ff9748-dd8kw" Jan 20 17:33:48 crc kubenswrapper[4558]: I0120 17:33:48.933112 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0abcf7eb-7e84-4a8e-81e8-a397db63848b-combined-ca-bundle\") pod \"swift-proxy-b45ff9748-dd8kw\" (UID: \"0abcf7eb-7e84-4a8e-81e8-a397db63848b\") " pod="openstack-kuttl-tests/swift-proxy-b45ff9748-dd8kw" Jan 20 17:33:49 crc kubenswrapper[4558]: I0120 17:33:49.043902 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-proxy-b45ff9748-dd8kw" Jan 20 17:33:49 crc kubenswrapper[4558]: I0120 17:33:49.187603 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:33:49 crc kubenswrapper[4558]: I0120 17:33:49.252985 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-0" event={"ID":"04a134bc-533d-490a-8544-61f705a6d4f2","Type":"ContainerStarted","Data":"2623aa795d9d97ceb168764d999b00e635d55fa28ca538052a877af229f57732"} Jan 20 17:33:49 crc kubenswrapper[4558]: I0120 17:33:49.253038 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-0" event={"ID":"04a134bc-533d-490a-8544-61f705a6d4f2","Type":"ContainerStarted","Data":"4962bbdddc3126dcebfffd77d4fe1852739f34d6c6ffc23ea3b54bab816f6a72"} Jan 20 17:33:49 crc kubenswrapper[4558]: I0120 17:33:49.253973 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:33:49 crc kubenswrapper[4558]: I0120 17:33:49.282778 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"cf7d4ada-26ee-451b-9f80-e2f65ba17eac","Type":"ContainerStarted","Data":"61340ea4c6d4601e8dba8fa5338ab203b0a918c71c61c9f5e5eb7af8611f7eb7"} Jan 20 17:33:49 crc kubenswrapper[4558]: I0120 17:33:49.282912 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" podUID="cf7d4ada-26ee-451b-9f80-e2f65ba17eac" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://61340ea4c6d4601e8dba8fa5338ab203b0a918c71c61c9f5e5eb7af8611f7eb7" gracePeriod=30 Jan 20 17:33:49 crc kubenswrapper[4558]: I0120 17:33:49.303038 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bvdnd\" (UniqueName: \"kubernetes.io/projected/8fb66f31-6ee9-4ad8-8bd2-14e489b98591-kube-api-access-bvdnd\") pod \"8fb66f31-6ee9-4ad8-8bd2-14e489b98591\" (UID: \"8fb66f31-6ee9-4ad8-8bd2-14e489b98591\") " Jan 20 17:33:49 crc kubenswrapper[4558]: I0120 17:33:49.303215 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8fb66f31-6ee9-4ad8-8bd2-14e489b98591-combined-ca-bundle\") pod \"8fb66f31-6ee9-4ad8-8bd2-14e489b98591\" (UID: \"8fb66f31-6ee9-4ad8-8bd2-14e489b98591\") " Jan 20 17:33:49 crc kubenswrapper[4558]: I0120 17:33:49.303356 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8fb66f31-6ee9-4ad8-8bd2-14e489b98591-logs\") pod \"8fb66f31-6ee9-4ad8-8bd2-14e489b98591\" (UID: \"8fb66f31-6ee9-4ad8-8bd2-14e489b98591\") " Jan 20 17:33:49 crc kubenswrapper[4558]: I0120 17:33:49.303376 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8fb66f31-6ee9-4ad8-8bd2-14e489b98591-config-data\") pod \"8fb66f31-6ee9-4ad8-8bd2-14e489b98591\" (UID: \"8fb66f31-6ee9-4ad8-8bd2-14e489b98591\") " Jan 20 17:33:49 crc kubenswrapper[4558]: I0120 17:33:49.305822 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8fb66f31-6ee9-4ad8-8bd2-14e489b98591-logs" (OuterVolumeSpecName: "logs") pod "8fb66f31-6ee9-4ad8-8bd2-14e489b98591" (UID: "8fb66f31-6ee9-4ad8-8bd2-14e489b98591"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:33:49 crc kubenswrapper[4558]: I0120 17:33:49.306927 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell1-conductor-0" podStartSLOduration=2.306914153 podStartE2EDuration="2.306914153s" podCreationTimestamp="2026-01-20 17:33:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:33:49.278907691 +0000 UTC m=+3123.039245658" watchObservedRunningTime="2026-01-20 17:33:49.306914153 +0000 UTC m=+3123.067252110" Jan 20 17:33:49 crc kubenswrapper[4558]: I0120 17:33:49.333971 4558 generic.go:334] "Generic (PLEG): container finished" podID="eca2edbe-72d9-45b0-9445-b734f97b7ece" containerID="9a7331e32c5060ace879fa32c1a382d3ac8d9f2cefae695e7da910a50b371ff9" exitCode=0 Jan 20 17:33:49 crc kubenswrapper[4558]: I0120 17:33:49.334001 4558 generic.go:334] "Generic (PLEG): container finished" podID="eca2edbe-72d9-45b0-9445-b734f97b7ece" containerID="279cc96ec167325242d88523084a02ee78c5305d2befa02401033a29b1ae9959" exitCode=2 Jan 20 17:33:49 crc kubenswrapper[4558]: I0120 17:33:49.334017 4558 generic.go:334] "Generic (PLEG): container finished" podID="eca2edbe-72d9-45b0-9445-b734f97b7ece" containerID="91409ae0d9ca451c965331387d6a76c89a4420be7021b37195b0d5002fe4ec86" exitCode=0 Jan 20 17:33:49 crc kubenswrapper[4558]: I0120 17:33:49.334059 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"eca2edbe-72d9-45b0-9445-b734f97b7ece","Type":"ContainerDied","Data":"9a7331e32c5060ace879fa32c1a382d3ac8d9f2cefae695e7da910a50b371ff9"} Jan 20 17:33:49 crc kubenswrapper[4558]: I0120 17:33:49.334091 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"eca2edbe-72d9-45b0-9445-b734f97b7ece","Type":"ContainerDied","Data":"279cc96ec167325242d88523084a02ee78c5305d2befa02401033a29b1ae9959"} Jan 20 17:33:49 crc kubenswrapper[4558]: I0120 17:33:49.334101 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"eca2edbe-72d9-45b0-9445-b734f97b7ece","Type":"ContainerDied","Data":"91409ae0d9ca451c965331387d6a76c89a4420be7021b37195b0d5002fe4ec86"} Jan 20 17:33:49 crc kubenswrapper[4558]: I0120 17:33:49.336743 4558 generic.go:334] "Generic (PLEG): container finished" podID="8fb66f31-6ee9-4ad8-8bd2-14e489b98591" containerID="642f551615ce48a73abe795ca46d2227e4e6a93313045edf6c62bffaecdc9441" exitCode=0 Jan 20 17:33:49 crc kubenswrapper[4558]: I0120 17:33:49.336785 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"8fb66f31-6ee9-4ad8-8bd2-14e489b98591","Type":"ContainerDied","Data":"642f551615ce48a73abe795ca46d2227e4e6a93313045edf6c62bffaecdc9441"} Jan 20 17:33:49 crc kubenswrapper[4558]: I0120 17:33:49.336803 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"8fb66f31-6ee9-4ad8-8bd2-14e489b98591","Type":"ContainerDied","Data":"a65a5a35a730b473f8fda1850c8d424f2df977b3cb244f46616c780a7ae4c4f0"} Jan 20 17:33:49 crc kubenswrapper[4558]: I0120 17:33:49.336834 4558 scope.go:117] "RemoveContainer" containerID="642f551615ce48a73abe795ca46d2227e4e6a93313045edf6c62bffaecdc9441" Jan 20 17:33:49 crc kubenswrapper[4558]: I0120 17:33:49.336961 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:33:49 crc kubenswrapper[4558]: I0120 17:33:49.337443 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" podStartSLOduration=2.337419475 podStartE2EDuration="2.337419475s" podCreationTimestamp="2026-01-20 17:33:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:33:49.323639152 +0000 UTC m=+3123.083977119" watchObservedRunningTime="2026-01-20 17:33:49.337419475 +0000 UTC m=+3123.097757442" Jan 20 17:33:49 crc kubenswrapper[4558]: I0120 17:33:49.343147 4558 generic.go:334] "Generic (PLEG): container finished" podID="6a66de52-5f9c-46c6-aa54-8a0ba3288230" containerID="915a019c132b2d4f4aeb77f2aacee6bc94ce4e5d6b372707956a6621db84cd38" exitCode=0 Jan 20 17:33:49 crc kubenswrapper[4558]: I0120 17:33:49.343276 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"6a66de52-5f9c-46c6-aa54-8a0ba3288230","Type":"ContainerDied","Data":"915a019c132b2d4f4aeb77f2aacee6bc94ce4e5d6b372707956a6621db84cd38"} Jan 20 17:33:49 crc kubenswrapper[4558]: I0120 17:33:49.343327 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-scheduler-0" podUID="f4c732a1-ca26-4d37-b769-f0c69283ef29" containerName="nova-scheduler-scheduler" containerID="cri-o://56bc59ccee15107e85170fe2b5008d13a71e507e5eb9558d7798d0abd57ef3bf" gracePeriod=30 Jan 20 17:33:49 crc kubenswrapper[4558]: I0120 17:33:49.347122 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8fb66f31-6ee9-4ad8-8bd2-14e489b98591-kube-api-access-bvdnd" (OuterVolumeSpecName: "kube-api-access-bvdnd") pod "8fb66f31-6ee9-4ad8-8bd2-14e489b98591" (UID: "8fb66f31-6ee9-4ad8-8bd2-14e489b98591"). InnerVolumeSpecName "kube-api-access-bvdnd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:33:49 crc kubenswrapper[4558]: I0120 17:33:49.381943 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8fb66f31-6ee9-4ad8-8bd2-14e489b98591-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8fb66f31-6ee9-4ad8-8bd2-14e489b98591" (UID: "8fb66f31-6ee9-4ad8-8bd2-14e489b98591"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:33:49 crc kubenswrapper[4558]: I0120 17:33:49.384807 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8fb66f31-6ee9-4ad8-8bd2-14e489b98591-config-data" (OuterVolumeSpecName: "config-data") pod "8fb66f31-6ee9-4ad8-8bd2-14e489b98591" (UID: "8fb66f31-6ee9-4ad8-8bd2-14e489b98591"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:33:49 crc kubenswrapper[4558]: I0120 17:33:49.408055 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bvdnd\" (UniqueName: \"kubernetes.io/projected/8fb66f31-6ee9-4ad8-8bd2-14e489b98591-kube-api-access-bvdnd\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:49 crc kubenswrapper[4558]: I0120 17:33:49.408097 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8fb66f31-6ee9-4ad8-8bd2-14e489b98591-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:49 crc kubenswrapper[4558]: I0120 17:33:49.408110 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8fb66f31-6ee9-4ad8-8bd2-14e489b98591-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:49 crc kubenswrapper[4558]: I0120 17:33:49.408121 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8fb66f31-6ee9-4ad8-8bd2-14e489b98591-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:49 crc kubenswrapper[4558]: I0120 17:33:49.480252 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/swift-proxy-b45ff9748-dd8kw"] Jan 20 17:33:49 crc kubenswrapper[4558]: I0120 17:33:49.610041 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:33:49 crc kubenswrapper[4558]: I0120 17:33:49.614933 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pzlfz\" (UniqueName: \"kubernetes.io/projected/6a66de52-5f9c-46c6-aa54-8a0ba3288230-kube-api-access-pzlfz\") pod \"6a66de52-5f9c-46c6-aa54-8a0ba3288230\" (UID: \"6a66de52-5f9c-46c6-aa54-8a0ba3288230\") " Jan 20 17:33:49 crc kubenswrapper[4558]: I0120 17:33:49.615008 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6a66de52-5f9c-46c6-aa54-8a0ba3288230-combined-ca-bundle\") pod \"6a66de52-5f9c-46c6-aa54-8a0ba3288230\" (UID: \"6a66de52-5f9c-46c6-aa54-8a0ba3288230\") " Jan 20 17:33:49 crc kubenswrapper[4558]: I0120 17:33:49.615102 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6a66de52-5f9c-46c6-aa54-8a0ba3288230-config-data\") pod \"6a66de52-5f9c-46c6-aa54-8a0ba3288230\" (UID: \"6a66de52-5f9c-46c6-aa54-8a0ba3288230\") " Jan 20 17:33:49 crc kubenswrapper[4558]: I0120 17:33:49.615188 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6a66de52-5f9c-46c6-aa54-8a0ba3288230-logs\") pod \"6a66de52-5f9c-46c6-aa54-8a0ba3288230\" (UID: \"6a66de52-5f9c-46c6-aa54-8a0ba3288230\") " Jan 20 17:33:49 crc kubenswrapper[4558]: I0120 17:33:49.621116 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6a66de52-5f9c-46c6-aa54-8a0ba3288230-logs" (OuterVolumeSpecName: "logs") pod "6a66de52-5f9c-46c6-aa54-8a0ba3288230" (UID: "6a66de52-5f9c-46c6-aa54-8a0ba3288230"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:33:49 crc kubenswrapper[4558]: I0120 17:33:49.626283 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6a66de52-5f9c-46c6-aa54-8a0ba3288230-kube-api-access-pzlfz" (OuterVolumeSpecName: "kube-api-access-pzlfz") pod "6a66de52-5f9c-46c6-aa54-8a0ba3288230" (UID: "6a66de52-5f9c-46c6-aa54-8a0ba3288230"). InnerVolumeSpecName "kube-api-access-pzlfz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:33:49 crc kubenswrapper[4558]: I0120 17:33:49.653353 4558 scope.go:117] "RemoveContainer" containerID="a9c596fe297f41ede3bfa332670ccef94114b4b90d59c0d5cf0160934a47ac18" Jan 20 17:33:49 crc kubenswrapper[4558]: I0120 17:33:49.653384 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6a66de52-5f9c-46c6-aa54-8a0ba3288230-config-data" (OuterVolumeSpecName: "config-data") pod "6a66de52-5f9c-46c6-aa54-8a0ba3288230" (UID: "6a66de52-5f9c-46c6-aa54-8a0ba3288230"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:33:49 crc kubenswrapper[4558]: I0120 17:33:49.685577 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6a66de52-5f9c-46c6-aa54-8a0ba3288230-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6a66de52-5f9c-46c6-aa54-8a0ba3288230" (UID: "6a66de52-5f9c-46c6-aa54-8a0ba3288230"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:33:49 crc kubenswrapper[4558]: I0120 17:33:49.700338 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:33:49 crc kubenswrapper[4558]: I0120 17:33:49.721262 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6a66de52-5f9c-46c6-aa54-8a0ba3288230-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:49 crc kubenswrapper[4558]: I0120 17:33:49.721313 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6a66de52-5f9c-46c6-aa54-8a0ba3288230-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:49 crc kubenswrapper[4558]: I0120 17:33:49.721326 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6a66de52-5f9c-46c6-aa54-8a0ba3288230-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:49 crc kubenswrapper[4558]: I0120 17:33:49.721336 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pzlfz\" (UniqueName: \"kubernetes.io/projected/6a66de52-5f9c-46c6-aa54-8a0ba3288230-kube-api-access-pzlfz\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:49 crc kubenswrapper[4558]: I0120 17:33:49.732001 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:33:49 crc kubenswrapper[4558]: I0120 17:33:49.747350 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:33:49 crc kubenswrapper[4558]: E0120 17:33:49.747878 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8fb66f31-6ee9-4ad8-8bd2-14e489b98591" containerName="nova-metadata-metadata" Jan 20 17:33:49 crc kubenswrapper[4558]: I0120 17:33:49.747893 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="8fb66f31-6ee9-4ad8-8bd2-14e489b98591" containerName="nova-metadata-metadata" Jan 20 17:33:49 crc kubenswrapper[4558]: E0120 17:33:49.747958 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6a66de52-5f9c-46c6-aa54-8a0ba3288230" containerName="nova-api-api" Jan 20 17:33:49 crc kubenswrapper[4558]: I0120 17:33:49.747966 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="6a66de52-5f9c-46c6-aa54-8a0ba3288230" containerName="nova-api-api" Jan 20 17:33:49 crc kubenswrapper[4558]: E0120 17:33:49.747973 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8fb66f31-6ee9-4ad8-8bd2-14e489b98591" containerName="nova-metadata-log" Jan 20 17:33:49 crc kubenswrapper[4558]: I0120 17:33:49.747982 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="8fb66f31-6ee9-4ad8-8bd2-14e489b98591" containerName="nova-metadata-log" Jan 20 17:33:49 crc kubenswrapper[4558]: E0120 17:33:49.748002 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6a66de52-5f9c-46c6-aa54-8a0ba3288230" containerName="nova-api-log" Jan 20 17:33:49 crc kubenswrapper[4558]: I0120 17:33:49.748008 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="6a66de52-5f9c-46c6-aa54-8a0ba3288230" containerName="nova-api-log" Jan 20 17:33:49 crc kubenswrapper[4558]: I0120 17:33:49.748208 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="8fb66f31-6ee9-4ad8-8bd2-14e489b98591" containerName="nova-metadata-log" Jan 20 17:33:49 crc kubenswrapper[4558]: I0120 17:33:49.748220 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="8fb66f31-6ee9-4ad8-8bd2-14e489b98591" containerName="nova-metadata-metadata" Jan 20 17:33:49 crc kubenswrapper[4558]: I0120 17:33:49.748232 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="6a66de52-5f9c-46c6-aa54-8a0ba3288230" containerName="nova-api-api" Jan 20 17:33:49 crc kubenswrapper[4558]: I0120 17:33:49.748245 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="6a66de52-5f9c-46c6-aa54-8a0ba3288230" containerName="nova-api-log" Jan 20 17:33:49 crc kubenswrapper[4558]: I0120 17:33:49.749333 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:33:49 crc kubenswrapper[4558]: I0120 17:33:49.754763 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-metadata-config-data" Jan 20 17:33:49 crc kubenswrapper[4558]: I0120 17:33:49.760322 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:33:49 crc kubenswrapper[4558]: I0120 17:33:49.771238 4558 scope.go:117] "RemoveContainer" containerID="642f551615ce48a73abe795ca46d2227e4e6a93313045edf6c62bffaecdc9441" Jan 20 17:33:49 crc kubenswrapper[4558]: E0120 17:33:49.774306 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"642f551615ce48a73abe795ca46d2227e4e6a93313045edf6c62bffaecdc9441\": container with ID starting with 642f551615ce48a73abe795ca46d2227e4e6a93313045edf6c62bffaecdc9441 not found: ID does not exist" containerID="642f551615ce48a73abe795ca46d2227e4e6a93313045edf6c62bffaecdc9441" Jan 20 17:33:49 crc kubenswrapper[4558]: I0120 17:33:49.775800 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"642f551615ce48a73abe795ca46d2227e4e6a93313045edf6c62bffaecdc9441"} err="failed to get container status \"642f551615ce48a73abe795ca46d2227e4e6a93313045edf6c62bffaecdc9441\": rpc error: code = NotFound desc = could not find container \"642f551615ce48a73abe795ca46d2227e4e6a93313045edf6c62bffaecdc9441\": container with ID starting with 642f551615ce48a73abe795ca46d2227e4e6a93313045edf6c62bffaecdc9441 not found: ID does not exist" Jan 20 17:33:49 crc kubenswrapper[4558]: I0120 17:33:49.775933 4558 scope.go:117] "RemoveContainer" containerID="a9c596fe297f41ede3bfa332670ccef94114b4b90d59c0d5cf0160934a47ac18" Jan 20 17:33:49 crc kubenswrapper[4558]: E0120 17:33:49.779689 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a9c596fe297f41ede3bfa332670ccef94114b4b90d59c0d5cf0160934a47ac18\": container with ID starting with a9c596fe297f41ede3bfa332670ccef94114b4b90d59c0d5cf0160934a47ac18 not found: ID does not exist" containerID="a9c596fe297f41ede3bfa332670ccef94114b4b90d59c0d5cf0160934a47ac18" Jan 20 17:33:49 crc kubenswrapper[4558]: I0120 17:33:49.779737 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a9c596fe297f41ede3bfa332670ccef94114b4b90d59c0d5cf0160934a47ac18"} err="failed to get container status \"a9c596fe297f41ede3bfa332670ccef94114b4b90d59c0d5cf0160934a47ac18\": rpc error: code = NotFound desc = could not find container \"a9c596fe297f41ede3bfa332670ccef94114b4b90d59c0d5cf0160934a47ac18\": container with ID starting with a9c596fe297f41ede3bfa332670ccef94114b4b90d59c0d5cf0160934a47ac18 not found: ID does not exist" Jan 20 17:33:49 crc kubenswrapper[4558]: I0120 17:33:49.823400 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/70209150-5da2-411c-ac05-641a336b6c12-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"70209150-5da2-411c-ac05-641a336b6c12\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:33:49 crc kubenswrapper[4558]: I0120 17:33:49.823466 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/70209150-5da2-411c-ac05-641a336b6c12-logs\") pod \"nova-metadata-0\" (UID: \"70209150-5da2-411c-ac05-641a336b6c12\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:33:49 crc kubenswrapper[4558]: I0120 17:33:49.823589 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/70209150-5da2-411c-ac05-641a336b6c12-config-data\") pod \"nova-metadata-0\" (UID: \"70209150-5da2-411c-ac05-641a336b6c12\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:33:49 crc kubenswrapper[4558]: I0120 17:33:49.823684 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6hj55\" (UniqueName: \"kubernetes.io/projected/70209150-5da2-411c-ac05-641a336b6c12-kube-api-access-6hj55\") pod \"nova-metadata-0\" (UID: \"70209150-5da2-411c-ac05-641a336b6c12\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:33:49 crc kubenswrapper[4558]: I0120 17:33:49.892571 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:33:49 crc kubenswrapper[4558]: I0120 17:33:49.927720 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wk88z\" (UniqueName: \"kubernetes.io/projected/eca2edbe-72d9-45b0-9445-b734f97b7ece-kube-api-access-wk88z\") pod \"eca2edbe-72d9-45b0-9445-b734f97b7ece\" (UID: \"eca2edbe-72d9-45b0-9445-b734f97b7ece\") " Jan 20 17:33:49 crc kubenswrapper[4558]: I0120 17:33:49.927865 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/eca2edbe-72d9-45b0-9445-b734f97b7ece-sg-core-conf-yaml\") pod \"eca2edbe-72d9-45b0-9445-b734f97b7ece\" (UID: \"eca2edbe-72d9-45b0-9445-b734f97b7ece\") " Jan 20 17:33:49 crc kubenswrapper[4558]: I0120 17:33:49.927968 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/eca2edbe-72d9-45b0-9445-b734f97b7ece-run-httpd\") pod \"eca2edbe-72d9-45b0-9445-b734f97b7ece\" (UID: \"eca2edbe-72d9-45b0-9445-b734f97b7ece\") " Jan 20 17:33:49 crc kubenswrapper[4558]: I0120 17:33:49.928055 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/eca2edbe-72d9-45b0-9445-b734f97b7ece-scripts\") pod \"eca2edbe-72d9-45b0-9445-b734f97b7ece\" (UID: \"eca2edbe-72d9-45b0-9445-b734f97b7ece\") " Jan 20 17:33:49 crc kubenswrapper[4558]: I0120 17:33:49.928155 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eca2edbe-72d9-45b0-9445-b734f97b7ece-config-data\") pod \"eca2edbe-72d9-45b0-9445-b734f97b7ece\" (UID: \"eca2edbe-72d9-45b0-9445-b734f97b7ece\") " Jan 20 17:33:49 crc kubenswrapper[4558]: I0120 17:33:49.928199 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eca2edbe-72d9-45b0-9445-b734f97b7ece-combined-ca-bundle\") pod \"eca2edbe-72d9-45b0-9445-b734f97b7ece\" (UID: \"eca2edbe-72d9-45b0-9445-b734f97b7ece\") " Jan 20 17:33:49 crc kubenswrapper[4558]: I0120 17:33:49.928247 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/eca2edbe-72d9-45b0-9445-b734f97b7ece-log-httpd\") pod \"eca2edbe-72d9-45b0-9445-b734f97b7ece\" (UID: \"eca2edbe-72d9-45b0-9445-b734f97b7ece\") " Jan 20 17:33:49 crc kubenswrapper[4558]: I0120 17:33:49.928583 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/eca2edbe-72d9-45b0-9445-b734f97b7ece-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "eca2edbe-72d9-45b0-9445-b734f97b7ece" (UID: "eca2edbe-72d9-45b0-9445-b734f97b7ece"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:33:49 crc kubenswrapper[4558]: I0120 17:33:49.928755 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/70209150-5da2-411c-ac05-641a336b6c12-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"70209150-5da2-411c-ac05-641a336b6c12\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:33:49 crc kubenswrapper[4558]: I0120 17:33:49.928999 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/eca2edbe-72d9-45b0-9445-b734f97b7ece-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "eca2edbe-72d9-45b0-9445-b734f97b7ece" (UID: "eca2edbe-72d9-45b0-9445-b734f97b7ece"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:33:49 crc kubenswrapper[4558]: I0120 17:33:49.929557 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/70209150-5da2-411c-ac05-641a336b6c12-logs\") pod \"nova-metadata-0\" (UID: \"70209150-5da2-411c-ac05-641a336b6c12\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:33:49 crc kubenswrapper[4558]: I0120 17:33:49.929742 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/70209150-5da2-411c-ac05-641a336b6c12-config-data\") pod \"nova-metadata-0\" (UID: \"70209150-5da2-411c-ac05-641a336b6c12\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:33:49 crc kubenswrapper[4558]: I0120 17:33:49.929820 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6hj55\" (UniqueName: \"kubernetes.io/projected/70209150-5da2-411c-ac05-641a336b6c12-kube-api-access-6hj55\") pod \"nova-metadata-0\" (UID: \"70209150-5da2-411c-ac05-641a336b6c12\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:33:49 crc kubenswrapper[4558]: I0120 17:33:49.930103 4558 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/eca2edbe-72d9-45b0-9445-b734f97b7ece-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:49 crc kubenswrapper[4558]: I0120 17:33:49.930116 4558 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/eca2edbe-72d9-45b0-9445-b734f97b7ece-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:49 crc kubenswrapper[4558]: I0120 17:33:49.930762 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/70209150-5da2-411c-ac05-641a336b6c12-logs\") pod \"nova-metadata-0\" (UID: \"70209150-5da2-411c-ac05-641a336b6c12\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:33:49 crc kubenswrapper[4558]: I0120 17:33:49.933565 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/70209150-5da2-411c-ac05-641a336b6c12-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"70209150-5da2-411c-ac05-641a336b6c12\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:33:49 crc kubenswrapper[4558]: I0120 17:33:49.937341 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eca2edbe-72d9-45b0-9445-b734f97b7ece-scripts" (OuterVolumeSpecName: "scripts") pod "eca2edbe-72d9-45b0-9445-b734f97b7ece" (UID: "eca2edbe-72d9-45b0-9445-b734f97b7ece"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:33:49 crc kubenswrapper[4558]: I0120 17:33:49.937383 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eca2edbe-72d9-45b0-9445-b734f97b7ece-kube-api-access-wk88z" (OuterVolumeSpecName: "kube-api-access-wk88z") pod "eca2edbe-72d9-45b0-9445-b734f97b7ece" (UID: "eca2edbe-72d9-45b0-9445-b734f97b7ece"). InnerVolumeSpecName "kube-api-access-wk88z". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:33:49 crc kubenswrapper[4558]: I0120 17:33:49.941655 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/70209150-5da2-411c-ac05-641a336b6c12-config-data\") pod \"nova-metadata-0\" (UID: \"70209150-5da2-411c-ac05-641a336b6c12\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:33:49 crc kubenswrapper[4558]: I0120 17:33:49.952257 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6hj55\" (UniqueName: \"kubernetes.io/projected/70209150-5da2-411c-ac05-641a336b6c12-kube-api-access-6hj55\") pod \"nova-metadata-0\" (UID: \"70209150-5da2-411c-ac05-641a336b6c12\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.023240 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eca2edbe-72d9-45b0-9445-b734f97b7ece-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "eca2edbe-72d9-45b0-9445-b734f97b7ece" (UID: "eca2edbe-72d9-45b0-9445-b734f97b7ece"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.031821 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/eca2edbe-72d9-45b0-9445-b734f97b7ece-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.031857 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wk88z\" (UniqueName: \"kubernetes.io/projected/eca2edbe-72d9-45b0-9445-b734f97b7ece-kube-api-access-wk88z\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.031870 4558 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/eca2edbe-72d9-45b0-9445-b734f97b7ece-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.098328 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.124289 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eca2edbe-72d9-45b0-9445-b734f97b7ece-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "eca2edbe-72d9-45b0-9445-b734f97b7ece" (UID: "eca2edbe-72d9-45b0-9445-b734f97b7ece"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.138763 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eca2edbe-72d9-45b0-9445-b734f97b7ece-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.169049 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eca2edbe-72d9-45b0-9445-b734f97b7ece-config-data" (OuterVolumeSpecName: "config-data") pod "eca2edbe-72d9-45b0-9445-b734f97b7ece" (UID: "eca2edbe-72d9-45b0-9445-b734f97b7ece"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.183451 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.240882 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f4c732a1-ca26-4d37-b769-f0c69283ef29-config-data\") pod \"f4c732a1-ca26-4d37-b769-f0c69283ef29\" (UID: \"f4c732a1-ca26-4d37-b769-f0c69283ef29\") " Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.241098 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfc7x\" (UniqueName: \"kubernetes.io/projected/f4c732a1-ca26-4d37-b769-f0c69283ef29-kube-api-access-cfc7x\") pod \"f4c732a1-ca26-4d37-b769-f0c69283ef29\" (UID: \"f4c732a1-ca26-4d37-b769-f0c69283ef29\") " Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.241304 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4c732a1-ca26-4d37-b769-f0c69283ef29-combined-ca-bundle\") pod \"f4c732a1-ca26-4d37-b769-f0c69283ef29\" (UID: \"f4c732a1-ca26-4d37-b769-f0c69283ef29\") " Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.241910 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eca2edbe-72d9-45b0-9445-b734f97b7ece-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.251379 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f4c732a1-ca26-4d37-b769-f0c69283ef29-kube-api-access-cfc7x" (OuterVolumeSpecName: "kube-api-access-cfc7x") pod "f4c732a1-ca26-4d37-b769-f0c69283ef29" (UID: "f4c732a1-ca26-4d37-b769-f0c69283ef29"). InnerVolumeSpecName "kube-api-access-cfc7x". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.292243 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f4c732a1-ca26-4d37-b769-f0c69283ef29-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f4c732a1-ca26-4d37-b769-f0c69283ef29" (UID: "f4c732a1-ca26-4d37-b769-f0c69283ef29"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.299528 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.302142 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f4c732a1-ca26-4d37-b769-f0c69283ef29-config-data" (OuterVolumeSpecName: "config-data") pod "f4c732a1-ca26-4d37-b769-f0c69283ef29" (UID: "f4c732a1-ca26-4d37-b769-f0c69283ef29"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:33:50 crc kubenswrapper[4558]: E0120 17:33:50.353327 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="ca483fd25cf153fcad14ca9739754a6effd1115ea5c7b991da3c23aa4caa603c" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.359508 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f4c732a1-ca26-4d37-b769-f0c69283ef29-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.359536 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfc7x\" (UniqueName: \"kubernetes.io/projected/f4c732a1-ca26-4d37-b769-f0c69283ef29-kube-api-access-cfc7x\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.359547 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4c732a1-ca26-4d37-b769-f0c69283ef29-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:50 crc kubenswrapper[4558]: E0120 17:33:50.366683 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="ca483fd25cf153fcad14ca9739754a6effd1115ea5c7b991da3c23aa4caa603c" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:33:50 crc kubenswrapper[4558]: E0120 17:33:50.375944 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="ca483fd25cf153fcad14ca9739754a6effd1115ea5c7b991da3c23aa4caa603c" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:33:50 crc kubenswrapper[4558]: E0120 17:33:50.375996 4558 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack-kuttl-tests/nova-cell0-conductor-0" podUID="37fd3160-ee6a-41a1-9c4b-260e72133f59" containerName="nova-cell0-conductor-conductor" Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.376710 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"6a66de52-5f9c-46c6-aa54-8a0ba3288230","Type":"ContainerDied","Data":"4b143bb9dea569bfb66e69be82985991ccec7aef83b6118dc9d97737a6c4598f"} Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.376755 4558 scope.go:117] "RemoveContainer" containerID="915a019c132b2d4f4aeb77f2aacee6bc94ce4e5d6b372707956a6621db84cd38" Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.376877 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.382928 4558 generic.go:334] "Generic (PLEG): container finished" podID="f4c732a1-ca26-4d37-b769-f0c69283ef29" containerID="56bc59ccee15107e85170fe2b5008d13a71e507e5eb9558d7798d0abd57ef3bf" exitCode=1 Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.383029 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.383430 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"f4c732a1-ca26-4d37-b769-f0c69283ef29","Type":"ContainerDied","Data":"56bc59ccee15107e85170fe2b5008d13a71e507e5eb9558d7798d0abd57ef3bf"} Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.383480 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"f4c732a1-ca26-4d37-b769-f0c69283ef29","Type":"ContainerDied","Data":"211619e0c4ffb8889b074381e1e07ad757346bf8c1a8548a8d8053ae5245dff5"} Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.395747 4558 generic.go:334] "Generic (PLEG): container finished" podID="eca2edbe-72d9-45b0-9445-b734f97b7ece" containerID="bb94cc5337340b1860cfc1f60ccc29ac0f8630ee7dc24e80e7ce2178442eadda" exitCode=0 Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.395846 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"eca2edbe-72d9-45b0-9445-b734f97b7ece","Type":"ContainerDied","Data":"bb94cc5337340b1860cfc1f60ccc29ac0f8630ee7dc24e80e7ce2178442eadda"} Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.395878 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"eca2edbe-72d9-45b0-9445-b734f97b7ece","Type":"ContainerDied","Data":"34d466471f290283494de163bbbbb8d5d698c67d3e1b567efea8750207a0ad33"} Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.395954 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.432905 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-cell1-conductor-0" podUID="04a134bc-533d-490a-8544-61f705a6d4f2" containerName="nova-cell1-conductor-conductor" containerID="cri-o://2623aa795d9d97ceb168764d999b00e635d55fa28ca538052a877af229f57732" gracePeriod=30 Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.436883 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-proxy-b45ff9748-dd8kw" event={"ID":"0abcf7eb-7e84-4a8e-81e8-a397db63848b","Type":"ContainerStarted","Data":"68b9c7b951138f4587464c2bbf21370065674f896ef8d19de0af36cc2a194b4a"} Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.437124 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-proxy-b45ff9748-dd8kw" event={"ID":"0abcf7eb-7e84-4a8e-81e8-a397db63848b","Type":"ContainerStarted","Data":"036131de8a482756286de316af4df0fc95119e515057e70e07dc6e69e0faa490"} Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.437194 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/swift-proxy-b45ff9748-dd8kw" Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.437209 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-proxy-b45ff9748-dd8kw" event={"ID":"0abcf7eb-7e84-4a8e-81e8-a397db63848b","Type":"ContainerStarted","Data":"39730bfaeec948fd4e69f6e00d93bc1cefea775bae6a5aff3b1683c00212afa8"} Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.437221 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/swift-proxy-b45ff9748-dd8kw" Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.468707 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:33:50 crc kubenswrapper[4558]: W0120 17:33:50.469481 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod70209150_5da2_411c_ac05_641a336b6c12.slice/crio-1a961e4edc787aaa20f30df9c81a5db77bf6e2f57609110642779124e01e42f1 WatchSource:0}: Error finding container 1a961e4edc787aaa20f30df9c81a5db77bf6e2f57609110642779124e01e42f1: Status 404 returned error can't find the container with id 1a961e4edc787aaa20f30df9c81a5db77bf6e2f57609110642779124e01e42f1 Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.479919 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/swift-proxy-b45ff9748-dd8kw" podStartSLOduration=2.479897933 podStartE2EDuration="2.479897933s" podCreationTimestamp="2026-01-20 17:33:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:33:50.449523949 +0000 UTC m=+3124.209861917" watchObservedRunningTime="2026-01-20 17:33:50.479897933 +0000 UTC m=+3124.240235901" Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.525373 4558 scope.go:117] "RemoveContainer" containerID="1099f7ebf3750cf92ad4dcca603aaebf0e6a74e93107a073aead4aee1f85dd17" Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.566067 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.600401 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8fb66f31-6ee9-4ad8-8bd2-14e489b98591" path="/var/lib/kubelet/pods/8fb66f31-6ee9-4ad8-8bd2-14e489b98591/volumes" Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.601072 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.609112 4558 scope.go:117] "RemoveContainer" containerID="56bc59ccee15107e85170fe2b5008d13a71e507e5eb9558d7798d0abd57ef3bf" Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.610500 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:33:50 crc kubenswrapper[4558]: E0120 17:33:50.611040 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eca2edbe-72d9-45b0-9445-b734f97b7ece" containerName="sg-core" Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.611060 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="eca2edbe-72d9-45b0-9445-b734f97b7ece" containerName="sg-core" Jan 20 17:33:50 crc kubenswrapper[4558]: E0120 17:33:50.611089 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eca2edbe-72d9-45b0-9445-b734f97b7ece" containerName="proxy-httpd" Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.611095 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="eca2edbe-72d9-45b0-9445-b734f97b7ece" containerName="proxy-httpd" Jan 20 17:33:50 crc kubenswrapper[4558]: E0120 17:33:50.611114 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eca2edbe-72d9-45b0-9445-b734f97b7ece" containerName="ceilometer-notification-agent" Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.611120 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="eca2edbe-72d9-45b0-9445-b734f97b7ece" containerName="ceilometer-notification-agent" Jan 20 17:33:50 crc kubenswrapper[4558]: E0120 17:33:50.611137 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eca2edbe-72d9-45b0-9445-b734f97b7ece" containerName="ceilometer-central-agent" Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.611143 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="eca2edbe-72d9-45b0-9445-b734f97b7ece" containerName="ceilometer-central-agent" Jan 20 17:33:50 crc kubenswrapper[4558]: E0120 17:33:50.611155 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4c732a1-ca26-4d37-b769-f0c69283ef29" containerName="nova-scheduler-scheduler" Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.611246 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4c732a1-ca26-4d37-b769-f0c69283ef29" containerName="nova-scheduler-scheduler" Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.611436 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="eca2edbe-72d9-45b0-9445-b734f97b7ece" containerName="ceilometer-central-agent" Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.611449 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="eca2edbe-72d9-45b0-9445-b734f97b7ece" containerName="ceilometer-notification-agent" Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.611466 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="eca2edbe-72d9-45b0-9445-b734f97b7ece" containerName="sg-core" Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.611480 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4c732a1-ca26-4d37-b769-f0c69283ef29" containerName="nova-scheduler-scheduler" Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.611491 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="eca2edbe-72d9-45b0-9445-b734f97b7ece" containerName="proxy-httpd" Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.612428 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.616523 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-scheduler-config-data" Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.627246 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.634313 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.650690 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.667380 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.669880 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6c0b9601-3d98-40d3-b02e-7804d7f2b216-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"6c0b9601-3d98-40d3-b02e-7804d7f2b216\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.670100 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6c0b9601-3d98-40d3-b02e-7804d7f2b216-config-data\") pod \"nova-scheduler-0\" (UID: \"6c0b9601-3d98-40d3-b02e-7804d7f2b216\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.670197 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bjsl7\" (UniqueName: \"kubernetes.io/projected/6c0b9601-3d98-40d3-b02e-7804d7f2b216-kube-api-access-bjsl7\") pod \"nova-scheduler-0\" (UID: \"6c0b9601-3d98-40d3-b02e-7804d7f2b216\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.670316 4558 scope.go:117] "RemoveContainer" containerID="56bc59ccee15107e85170fe2b5008d13a71e507e5eb9558d7798d0abd57ef3bf" Jan 20 17:33:50 crc kubenswrapper[4558]: E0120 17:33:50.672195 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"56bc59ccee15107e85170fe2b5008d13a71e507e5eb9558d7798d0abd57ef3bf\": container with ID starting with 56bc59ccee15107e85170fe2b5008d13a71e507e5eb9558d7798d0abd57ef3bf not found: ID does not exist" containerID="56bc59ccee15107e85170fe2b5008d13a71e507e5eb9558d7798d0abd57ef3bf" Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.672240 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"56bc59ccee15107e85170fe2b5008d13a71e507e5eb9558d7798d0abd57ef3bf"} err="failed to get container status \"56bc59ccee15107e85170fe2b5008d13a71e507e5eb9558d7798d0abd57ef3bf\": rpc error: code = NotFound desc = could not find container \"56bc59ccee15107e85170fe2b5008d13a71e507e5eb9558d7798d0abd57ef3bf\": container with ID starting with 56bc59ccee15107e85170fe2b5008d13a71e507e5eb9558d7798d0abd57ef3bf not found: ID does not exist" Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.672271 4558 scope.go:117] "RemoveContainer" containerID="9a7331e32c5060ace879fa32c1a382d3ac8d9f2cefae695e7da910a50b371ff9" Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.702444 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.705672 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.708357 4558 scope.go:117] "RemoveContainer" containerID="279cc96ec167325242d88523084a02ee78c5305d2befa02401033a29b1ae9959" Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.713186 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-api-config-data" Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.743158 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.752739 4558 scope.go:117] "RemoveContainer" containerID="bb94cc5337340b1860cfc1f60ccc29ac0f8630ee7dc24e80e7ce2178442eadda" Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.766887 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.772437 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/56a29e7b-9a25-497a-9bee-52f6cd546e3c-config-data\") pod \"nova-api-0\" (UID: \"56a29e7b-9a25-497a-9bee-52f6cd546e3c\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.772564 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6c0b9601-3d98-40d3-b02e-7804d7f2b216-config-data\") pod \"nova-scheduler-0\" (UID: \"6c0b9601-3d98-40d3-b02e-7804d7f2b216\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.772635 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/56a29e7b-9a25-497a-9bee-52f6cd546e3c-logs\") pod \"nova-api-0\" (UID: \"56a29e7b-9a25-497a-9bee-52f6cd546e3c\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.772741 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bjsl7\" (UniqueName: \"kubernetes.io/projected/6c0b9601-3d98-40d3-b02e-7804d7f2b216-kube-api-access-bjsl7\") pod \"nova-scheduler-0\" (UID: \"6c0b9601-3d98-40d3-b02e-7804d7f2b216\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.772835 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/56a29e7b-9a25-497a-9bee-52f6cd546e3c-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"56a29e7b-9a25-497a-9bee-52f6cd546e3c\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.772945 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6c0b9601-3d98-40d3-b02e-7804d7f2b216-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"6c0b9601-3d98-40d3-b02e-7804d7f2b216\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.773054 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-222zc\" (UniqueName: \"kubernetes.io/projected/56a29e7b-9a25-497a-9bee-52f6cd546e3c-kube-api-access-222zc\") pod \"nova-api-0\" (UID: \"56a29e7b-9a25-497a-9bee-52f6cd546e3c\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.790675 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.791181 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6c0b9601-3d98-40d3-b02e-7804d7f2b216-config-data\") pod \"nova-scheduler-0\" (UID: \"6c0b9601-3d98-40d3-b02e-7804d7f2b216\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.790691 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6c0b9601-3d98-40d3-b02e-7804d7f2b216-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"6c0b9601-3d98-40d3-b02e-7804d7f2b216\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.791554 4558 scope.go:117] "RemoveContainer" containerID="91409ae0d9ca451c965331387d6a76c89a4420be7021b37195b0d5002fe4ec86" Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.793499 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.800177 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-scripts" Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.800453 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-config-data" Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.807737 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bjsl7\" (UniqueName: \"kubernetes.io/projected/6c0b9601-3d98-40d3-b02e-7804d7f2b216-kube-api-access-bjsl7\") pod \"nova-scheduler-0\" (UID: \"6c0b9601-3d98-40d3-b02e-7804d7f2b216\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.809541 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.842575 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell0-cell-mapping-b6m6v"] Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.852175 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell0-cell-mapping-b6m6v"] Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.856611 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:33:50 crc kubenswrapper[4558]: E0120 17:33:50.860246 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[combined-ca-bundle config-data kube-api-access-222zc logs], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack-kuttl-tests/nova-api-0" podUID="56a29e7b-9a25-497a-9bee-52f6cd546e3c" Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.868322 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.869297 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.872381 4558 scope.go:117] "RemoveContainer" containerID="9a7331e32c5060ace879fa32c1a382d3ac8d9f2cefae695e7da910a50b371ff9" Jan 20 17:33:50 crc kubenswrapper[4558]: E0120 17:33:50.872794 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9a7331e32c5060ace879fa32c1a382d3ac8d9f2cefae695e7da910a50b371ff9\": container with ID starting with 9a7331e32c5060ace879fa32c1a382d3ac8d9f2cefae695e7da910a50b371ff9 not found: ID does not exist" containerID="9a7331e32c5060ace879fa32c1a382d3ac8d9f2cefae695e7da910a50b371ff9" Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.872886 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9a7331e32c5060ace879fa32c1a382d3ac8d9f2cefae695e7da910a50b371ff9"} err="failed to get container status \"9a7331e32c5060ace879fa32c1a382d3ac8d9f2cefae695e7da910a50b371ff9\": rpc error: code = NotFound desc = could not find container \"9a7331e32c5060ace879fa32c1a382d3ac8d9f2cefae695e7da910a50b371ff9\": container with ID starting with 9a7331e32c5060ace879fa32c1a382d3ac8d9f2cefae695e7da910a50b371ff9 not found: ID does not exist" Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.872958 4558 scope.go:117] "RemoveContainer" containerID="279cc96ec167325242d88523084a02ee78c5305d2befa02401033a29b1ae9959" Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.874059 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qrxtk\" (UniqueName: \"kubernetes.io/projected/44287ea3-63fc-4ab2-8140-5bf616c3a7ff-kube-api-access-qrxtk\") pod \"ceilometer-0\" (UID: \"44287ea3-63fc-4ab2-8140-5bf616c3a7ff\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.874109 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/56a29e7b-9a25-497a-9bee-52f6cd546e3c-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"56a29e7b-9a25-497a-9bee-52f6cd546e3c\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.874214 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/44287ea3-63fc-4ab2-8140-5bf616c3a7ff-config-data\") pod \"ceilometer-0\" (UID: \"44287ea3-63fc-4ab2-8140-5bf616c3a7ff\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.874242 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-222zc\" (UniqueName: \"kubernetes.io/projected/56a29e7b-9a25-497a-9bee-52f6cd546e3c-kube-api-access-222zc\") pod \"nova-api-0\" (UID: \"56a29e7b-9a25-497a-9bee-52f6cd546e3c\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:33:50 crc kubenswrapper[4558]: E0120 17:33:50.874280 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"279cc96ec167325242d88523084a02ee78c5305d2befa02401033a29b1ae9959\": container with ID starting with 279cc96ec167325242d88523084a02ee78c5305d2befa02401033a29b1ae9959 not found: ID does not exist" containerID="279cc96ec167325242d88523084a02ee78c5305d2befa02401033a29b1ae9959" Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.874296 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/44287ea3-63fc-4ab2-8140-5bf616c3a7ff-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"44287ea3-63fc-4ab2-8140-5bf616c3a7ff\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.874312 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"279cc96ec167325242d88523084a02ee78c5305d2befa02401033a29b1ae9959"} err="failed to get container status \"279cc96ec167325242d88523084a02ee78c5305d2befa02401033a29b1ae9959\": rpc error: code = NotFound desc = could not find container \"279cc96ec167325242d88523084a02ee78c5305d2befa02401033a29b1ae9959\": container with ID starting with 279cc96ec167325242d88523084a02ee78c5305d2befa02401033a29b1ae9959 not found: ID does not exist" Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.874341 4558 scope.go:117] "RemoveContainer" containerID="bb94cc5337340b1860cfc1f60ccc29ac0f8630ee7dc24e80e7ce2178442eadda" Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.874325 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/44287ea3-63fc-4ab2-8140-5bf616c3a7ff-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"44287ea3-63fc-4ab2-8140-5bf616c3a7ff\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.874584 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/56a29e7b-9a25-497a-9bee-52f6cd546e3c-config-data\") pod \"nova-api-0\" (UID: \"56a29e7b-9a25-497a-9bee-52f6cd546e3c\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.874669 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/56a29e7b-9a25-497a-9bee-52f6cd546e3c-logs\") pod \"nova-api-0\" (UID: \"56a29e7b-9a25-497a-9bee-52f6cd546e3c\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.874839 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/44287ea3-63fc-4ab2-8140-5bf616c3a7ff-scripts\") pod \"ceilometer-0\" (UID: \"44287ea3-63fc-4ab2-8140-5bf616c3a7ff\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.874902 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/44287ea3-63fc-4ab2-8140-5bf616c3a7ff-run-httpd\") pod \"ceilometer-0\" (UID: \"44287ea3-63fc-4ab2-8140-5bf616c3a7ff\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.874978 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/44287ea3-63fc-4ab2-8140-5bf616c3a7ff-log-httpd\") pod \"ceilometer-0\" (UID: \"44287ea3-63fc-4ab2-8140-5bf616c3a7ff\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.875384 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/56a29e7b-9a25-497a-9bee-52f6cd546e3c-logs\") pod \"nova-api-0\" (UID: \"56a29e7b-9a25-497a-9bee-52f6cd546e3c\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:33:50 crc kubenswrapper[4558]: E0120 17:33:50.875537 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bb94cc5337340b1860cfc1f60ccc29ac0f8630ee7dc24e80e7ce2178442eadda\": container with ID starting with bb94cc5337340b1860cfc1f60ccc29ac0f8630ee7dc24e80e7ce2178442eadda not found: ID does not exist" containerID="bb94cc5337340b1860cfc1f60ccc29ac0f8630ee7dc24e80e7ce2178442eadda" Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.875603 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bb94cc5337340b1860cfc1f60ccc29ac0f8630ee7dc24e80e7ce2178442eadda"} err="failed to get container status \"bb94cc5337340b1860cfc1f60ccc29ac0f8630ee7dc24e80e7ce2178442eadda\": rpc error: code = NotFound desc = could not find container \"bb94cc5337340b1860cfc1f60ccc29ac0f8630ee7dc24e80e7ce2178442eadda\": container with ID starting with bb94cc5337340b1860cfc1f60ccc29ac0f8630ee7dc24e80e7ce2178442eadda not found: ID does not exist" Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.875656 4558 scope.go:117] "RemoveContainer" containerID="91409ae0d9ca451c965331387d6a76c89a4420be7021b37195b0d5002fe4ec86" Jan 20 17:33:50 crc kubenswrapper[4558]: E0120 17:33:50.875927 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"91409ae0d9ca451c965331387d6a76c89a4420be7021b37195b0d5002fe4ec86\": container with ID starting with 91409ae0d9ca451c965331387d6a76c89a4420be7021b37195b0d5002fe4ec86 not found: ID does not exist" containerID="91409ae0d9ca451c965331387d6a76c89a4420be7021b37195b0d5002fe4ec86" Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.876001 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"91409ae0d9ca451c965331387d6a76c89a4420be7021b37195b0d5002fe4ec86"} err="failed to get container status \"91409ae0d9ca451c965331387d6a76c89a4420be7021b37195b0d5002fe4ec86\": rpc error: code = NotFound desc = could not find container \"91409ae0d9ca451c965331387d6a76c89a4420be7021b37195b0d5002fe4ec86\": container with ID starting with 91409ae0d9ca451c965331387d6a76c89a4420be7021b37195b0d5002fe4ec86 not found: ID does not exist" Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.878857 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/56a29e7b-9a25-497a-9bee-52f6cd546e3c-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"56a29e7b-9a25-497a-9bee-52f6cd546e3c\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.879521 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/56a29e7b-9a25-497a-9bee-52f6cd546e3c-config-data\") pod \"nova-api-0\" (UID: \"56a29e7b-9a25-497a-9bee-52f6cd546e3c\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.889901 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-222zc\" (UniqueName: \"kubernetes.io/projected/56a29e7b-9a25-497a-9bee-52f6cd546e3c-kube-api-access-222zc\") pod \"nova-api-0\" (UID: \"56a29e7b-9a25-497a-9bee-52f6cd546e3c\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.976597 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qrxtk\" (UniqueName: \"kubernetes.io/projected/44287ea3-63fc-4ab2-8140-5bf616c3a7ff-kube-api-access-qrxtk\") pod \"ceilometer-0\" (UID: \"44287ea3-63fc-4ab2-8140-5bf616c3a7ff\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.977131 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/44287ea3-63fc-4ab2-8140-5bf616c3a7ff-config-data\") pod \"ceilometer-0\" (UID: \"44287ea3-63fc-4ab2-8140-5bf616c3a7ff\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.977303 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/44287ea3-63fc-4ab2-8140-5bf616c3a7ff-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"44287ea3-63fc-4ab2-8140-5bf616c3a7ff\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.977357 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/44287ea3-63fc-4ab2-8140-5bf616c3a7ff-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"44287ea3-63fc-4ab2-8140-5bf616c3a7ff\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.977447 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/44287ea3-63fc-4ab2-8140-5bf616c3a7ff-scripts\") pod \"ceilometer-0\" (UID: \"44287ea3-63fc-4ab2-8140-5bf616c3a7ff\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.977475 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/44287ea3-63fc-4ab2-8140-5bf616c3a7ff-run-httpd\") pod \"ceilometer-0\" (UID: \"44287ea3-63fc-4ab2-8140-5bf616c3a7ff\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.977522 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/44287ea3-63fc-4ab2-8140-5bf616c3a7ff-log-httpd\") pod \"ceilometer-0\" (UID: \"44287ea3-63fc-4ab2-8140-5bf616c3a7ff\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.978150 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/44287ea3-63fc-4ab2-8140-5bf616c3a7ff-run-httpd\") pod \"ceilometer-0\" (UID: \"44287ea3-63fc-4ab2-8140-5bf616c3a7ff\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.978619 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/44287ea3-63fc-4ab2-8140-5bf616c3a7ff-log-httpd\") pod \"ceilometer-0\" (UID: \"44287ea3-63fc-4ab2-8140-5bf616c3a7ff\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.994064 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/44287ea3-63fc-4ab2-8140-5bf616c3a7ff-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"44287ea3-63fc-4ab2-8140-5bf616c3a7ff\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.999383 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/44287ea3-63fc-4ab2-8140-5bf616c3a7ff-scripts\") pod \"ceilometer-0\" (UID: \"44287ea3-63fc-4ab2-8140-5bf616c3a7ff\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.999741 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/44287ea3-63fc-4ab2-8140-5bf616c3a7ff-config-data\") pod \"ceilometer-0\" (UID: \"44287ea3-63fc-4ab2-8140-5bf616c3a7ff\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:33:50 crc kubenswrapper[4558]: I0120 17:33:50.999843 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qrxtk\" (UniqueName: \"kubernetes.io/projected/44287ea3-63fc-4ab2-8140-5bf616c3a7ff-kube-api-access-qrxtk\") pod \"ceilometer-0\" (UID: \"44287ea3-63fc-4ab2-8140-5bf616c3a7ff\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:33:51 crc kubenswrapper[4558]: I0120 17:33:51.000439 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/44287ea3-63fc-4ab2-8140-5bf616c3a7ff-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"44287ea3-63fc-4ab2-8140-5bf616c3a7ff\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:33:51 crc kubenswrapper[4558]: I0120 17:33:51.172086 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:33:51 crc kubenswrapper[4558]: I0120 17:33:51.416334 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:33:51 crc kubenswrapper[4558]: I0120 17:33:51.469022 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"70209150-5da2-411c-ac05-641a336b6c12","Type":"ContainerStarted","Data":"3591e8b056c12783c58a71fe1d084db8eb2a4555e329046872fd52062408894a"} Jan 20 17:33:51 crc kubenswrapper[4558]: I0120 17:33:51.469080 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"70209150-5da2-411c-ac05-641a336b6c12","Type":"ContainerStarted","Data":"ea4c18fc95b5ea8fbe3c18909d888ddca0ea90bf7d976f9bd51dcc8a70a069b0"} Jan 20 17:33:51 crc kubenswrapper[4558]: I0120 17:33:51.469099 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"70209150-5da2-411c-ac05-641a336b6c12","Type":"ContainerStarted","Data":"1a961e4edc787aaa20f30df9c81a5db77bf6e2f57609110642779124e01e42f1"} Jan 20 17:33:51 crc kubenswrapper[4558]: I0120 17:33:51.469251 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-metadata-0" podUID="70209150-5da2-411c-ac05-641a336b6c12" containerName="nova-metadata-log" containerID="cri-o://ea4c18fc95b5ea8fbe3c18909d888ddca0ea90bf7d976f9bd51dcc8a70a069b0" gracePeriod=30 Jan 20 17:33:51 crc kubenswrapper[4558]: I0120 17:33:51.471243 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-metadata-0" podUID="70209150-5da2-411c-ac05-641a336b6c12" containerName="nova-metadata-metadata" containerID="cri-o://3591e8b056c12783c58a71fe1d084db8eb2a4555e329046872fd52062408894a" gracePeriod=30 Jan 20 17:33:51 crc kubenswrapper[4558]: I0120 17:33:51.493993 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-metadata-0" podStartSLOduration=2.49397487 podStartE2EDuration="2.49397487s" podCreationTimestamp="2026-01-20 17:33:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:33:51.489086748 +0000 UTC m=+3125.249424716" watchObservedRunningTime="2026-01-20 17:33:51.49397487 +0000 UTC m=+3125.254312837" Jan 20 17:33:51 crc kubenswrapper[4558]: I0120 17:33:51.525385 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"6c0b9601-3d98-40d3-b02e-7804d7f2b216","Type":"ContainerStarted","Data":"e2264acdb013d147464a57bb1cabbb71553fe9c8c47bf6a363b034f1a18909f8"} Jan 20 17:33:51 crc kubenswrapper[4558]: I0120 17:33:51.526389 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:33:51 crc kubenswrapper[4558]: I0120 17:33:51.562734 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:33:51 crc kubenswrapper[4558]: I0120 17:33:51.577685 4558 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 20 17:33:51 crc kubenswrapper[4558]: I0120 17:33:51.619493 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:33:51 crc kubenswrapper[4558]: I0120 17:33:51.708108 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/56a29e7b-9a25-497a-9bee-52f6cd546e3c-config-data\") pod \"56a29e7b-9a25-497a-9bee-52f6cd546e3c\" (UID: \"56a29e7b-9a25-497a-9bee-52f6cd546e3c\") " Jan 20 17:33:51 crc kubenswrapper[4558]: I0120 17:33:51.708238 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-222zc\" (UniqueName: \"kubernetes.io/projected/56a29e7b-9a25-497a-9bee-52f6cd546e3c-kube-api-access-222zc\") pod \"56a29e7b-9a25-497a-9bee-52f6cd546e3c\" (UID: \"56a29e7b-9a25-497a-9bee-52f6cd546e3c\") " Jan 20 17:33:51 crc kubenswrapper[4558]: I0120 17:33:51.708271 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/56a29e7b-9a25-497a-9bee-52f6cd546e3c-combined-ca-bundle\") pod \"56a29e7b-9a25-497a-9bee-52f6cd546e3c\" (UID: \"56a29e7b-9a25-497a-9bee-52f6cd546e3c\") " Jan 20 17:33:51 crc kubenswrapper[4558]: I0120 17:33:51.708353 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/56a29e7b-9a25-497a-9bee-52f6cd546e3c-logs\") pod \"56a29e7b-9a25-497a-9bee-52f6cd546e3c\" (UID: \"56a29e7b-9a25-497a-9bee-52f6cd546e3c\") " Jan 20 17:33:51 crc kubenswrapper[4558]: I0120 17:33:51.709352 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/56a29e7b-9a25-497a-9bee-52f6cd546e3c-logs" (OuterVolumeSpecName: "logs") pod "56a29e7b-9a25-497a-9bee-52f6cd546e3c" (UID: "56a29e7b-9a25-497a-9bee-52f6cd546e3c"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:33:51 crc kubenswrapper[4558]: I0120 17:33:51.717120 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/56a29e7b-9a25-497a-9bee-52f6cd546e3c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "56a29e7b-9a25-497a-9bee-52f6cd546e3c" (UID: "56a29e7b-9a25-497a-9bee-52f6cd546e3c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:33:51 crc kubenswrapper[4558]: I0120 17:33:51.717320 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/56a29e7b-9a25-497a-9bee-52f6cd546e3c-config-data" (OuterVolumeSpecName: "config-data") pod "56a29e7b-9a25-497a-9bee-52f6cd546e3c" (UID: "56a29e7b-9a25-497a-9bee-52f6cd546e3c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:33:51 crc kubenswrapper[4558]: I0120 17:33:51.717393 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/56a29e7b-9a25-497a-9bee-52f6cd546e3c-kube-api-access-222zc" (OuterVolumeSpecName: "kube-api-access-222zc") pod "56a29e7b-9a25-497a-9bee-52f6cd546e3c" (UID: "56a29e7b-9a25-497a-9bee-52f6cd546e3c"). InnerVolumeSpecName "kube-api-access-222zc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:33:51 crc kubenswrapper[4558]: I0120 17:33:51.811450 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/56a29e7b-9a25-497a-9bee-52f6cd546e3c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:51 crc kubenswrapper[4558]: I0120 17:33:51.812362 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/56a29e7b-9a25-497a-9bee-52f6cd546e3c-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:51 crc kubenswrapper[4558]: I0120 17:33:51.812441 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/56a29e7b-9a25-497a-9bee-52f6cd546e3c-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:51 crc kubenswrapper[4558]: I0120 17:33:51.812496 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-222zc\" (UniqueName: \"kubernetes.io/projected/56a29e7b-9a25-497a-9bee-52f6cd546e3c-kube-api-access-222zc\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:52 crc kubenswrapper[4558]: I0120 17:33:52.062180 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell0-cell-mapping-ml6gb"] Jan 20 17:33:52 crc kubenswrapper[4558]: I0120 17:33:52.063535 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-cell-mapping-ml6gb" Jan 20 17:33:52 crc kubenswrapper[4558]: I0120 17:33:52.065468 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell0-manage-config-data" Jan 20 17:33:52 crc kubenswrapper[4558]: I0120 17:33:52.067431 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell0-manage-scripts" Jan 20 17:33:52 crc kubenswrapper[4558]: I0120 17:33:52.087530 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-cell-mapping-ml6gb"] Jan 20 17:33:52 crc kubenswrapper[4558]: I0120 17:33:52.119779 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gb2b7\" (UniqueName: \"kubernetes.io/projected/d656fa11-4336-4ddf-a168-af6f682bfaab-kube-api-access-gb2b7\") pod \"nova-cell0-cell-mapping-ml6gb\" (UID: \"d656fa11-4336-4ddf-a168-af6f682bfaab\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-ml6gb" Jan 20 17:33:52 crc kubenswrapper[4558]: I0120 17:33:52.119846 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d656fa11-4336-4ddf-a168-af6f682bfaab-config-data\") pod \"nova-cell0-cell-mapping-ml6gb\" (UID: \"d656fa11-4336-4ddf-a168-af6f682bfaab\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-ml6gb" Jan 20 17:33:52 crc kubenswrapper[4558]: I0120 17:33:52.119922 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d656fa11-4336-4ddf-a168-af6f682bfaab-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-ml6gb\" (UID: \"d656fa11-4336-4ddf-a168-af6f682bfaab\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-ml6gb" Jan 20 17:33:52 crc kubenswrapper[4558]: I0120 17:33:52.119990 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d656fa11-4336-4ddf-a168-af6f682bfaab-scripts\") pod \"nova-cell0-cell-mapping-ml6gb\" (UID: \"d656fa11-4336-4ddf-a168-af6f682bfaab\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-ml6gb" Jan 20 17:33:52 crc kubenswrapper[4558]: I0120 17:33:52.205537 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:33:52 crc kubenswrapper[4558]: I0120 17:33:52.221806 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d656fa11-4336-4ddf-a168-af6f682bfaab-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-ml6gb\" (UID: \"d656fa11-4336-4ddf-a168-af6f682bfaab\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-ml6gb" Jan 20 17:33:52 crc kubenswrapper[4558]: I0120 17:33:52.221910 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d656fa11-4336-4ddf-a168-af6f682bfaab-scripts\") pod \"nova-cell0-cell-mapping-ml6gb\" (UID: \"d656fa11-4336-4ddf-a168-af6f682bfaab\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-ml6gb" Jan 20 17:33:52 crc kubenswrapper[4558]: I0120 17:33:52.221986 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gb2b7\" (UniqueName: \"kubernetes.io/projected/d656fa11-4336-4ddf-a168-af6f682bfaab-kube-api-access-gb2b7\") pod \"nova-cell0-cell-mapping-ml6gb\" (UID: \"d656fa11-4336-4ddf-a168-af6f682bfaab\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-ml6gb" Jan 20 17:33:52 crc kubenswrapper[4558]: I0120 17:33:52.222019 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d656fa11-4336-4ddf-a168-af6f682bfaab-config-data\") pod \"nova-cell0-cell-mapping-ml6gb\" (UID: \"d656fa11-4336-4ddf-a168-af6f682bfaab\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-ml6gb" Jan 20 17:33:52 crc kubenswrapper[4558]: I0120 17:33:52.226447 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d656fa11-4336-4ddf-a168-af6f682bfaab-scripts\") pod \"nova-cell0-cell-mapping-ml6gb\" (UID: \"d656fa11-4336-4ddf-a168-af6f682bfaab\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-ml6gb" Jan 20 17:33:52 crc kubenswrapper[4558]: I0120 17:33:52.226480 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d656fa11-4336-4ddf-a168-af6f682bfaab-config-data\") pod \"nova-cell0-cell-mapping-ml6gb\" (UID: \"d656fa11-4336-4ddf-a168-af6f682bfaab\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-ml6gb" Jan 20 17:33:52 crc kubenswrapper[4558]: I0120 17:33:52.226879 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d656fa11-4336-4ddf-a168-af6f682bfaab-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-ml6gb\" (UID: \"d656fa11-4336-4ddf-a168-af6f682bfaab\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-ml6gb" Jan 20 17:33:52 crc kubenswrapper[4558]: I0120 17:33:52.235256 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gb2b7\" (UniqueName: \"kubernetes.io/projected/d656fa11-4336-4ddf-a168-af6f682bfaab-kube-api-access-gb2b7\") pod \"nova-cell0-cell-mapping-ml6gb\" (UID: \"d656fa11-4336-4ddf-a168-af6f682bfaab\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-ml6gb" Jan 20 17:33:52 crc kubenswrapper[4558]: I0120 17:33:52.379064 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-cell-mapping-ml6gb" Jan 20 17:33:52 crc kubenswrapper[4558]: I0120 17:33:52.538610 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"6c0b9601-3d98-40d3-b02e-7804d7f2b216","Type":"ContainerStarted","Data":"c8d5bddc3ef41b660c5503f880e165c59af93fc4ac03e6716b78a0cee338c9f0"} Jan 20 17:33:52 crc kubenswrapper[4558]: I0120 17:33:52.539141 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-scheduler-0" podUID="6c0b9601-3d98-40d3-b02e-7804d7f2b216" containerName="nova-scheduler-scheduler" containerID="cri-o://c8d5bddc3ef41b660c5503f880e165c59af93fc4ac03e6716b78a0cee338c9f0" gracePeriod=30 Jan 20 17:33:52 crc kubenswrapper[4558]: I0120 17:33:52.546710 4558 generic.go:334] "Generic (PLEG): container finished" podID="70209150-5da2-411c-ac05-641a336b6c12" containerID="ea4c18fc95b5ea8fbe3c18909d888ddca0ea90bf7d976f9bd51dcc8a70a069b0" exitCode=143 Jan 20 17:33:52 crc kubenswrapper[4558]: I0120 17:33:52.546774 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"70209150-5da2-411c-ac05-641a336b6c12","Type":"ContainerDied","Data":"ea4c18fc95b5ea8fbe3c18909d888ddca0ea90bf7d976f9bd51dcc8a70a069b0"} Jan 20 17:33:52 crc kubenswrapper[4558]: I0120 17:33:52.550793 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:33:52 crc kubenswrapper[4558]: I0120 17:33:52.550908 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"44287ea3-63fc-4ab2-8140-5bf616c3a7ff","Type":"ContainerStarted","Data":"faebabce8a842ab2e7002067a6ce76f73862c2812a1910727df13575c8eb1b5b"} Jan 20 17:33:52 crc kubenswrapper[4558]: I0120 17:33:52.550930 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"44287ea3-63fc-4ab2-8140-5bf616c3a7ff","Type":"ContainerStarted","Data":"c6d16525a5743ded2bf43287537c44ad70ca52a310f4a5166535b316b655facd"} Jan 20 17:33:52 crc kubenswrapper[4558]: I0120 17:33:52.561796 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-scheduler-0" podStartSLOduration=2.561782401 podStartE2EDuration="2.561782401s" podCreationTimestamp="2026-01-20 17:33:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:33:52.560419547 +0000 UTC m=+3126.320757514" watchObservedRunningTime="2026-01-20 17:33:52.561782401 +0000 UTC m=+3126.322120367" Jan 20 17:33:52 crc kubenswrapper[4558]: I0120 17:33:52.585178 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6a66de52-5f9c-46c6-aa54-8a0ba3288230" path="/var/lib/kubelet/pods/6a66de52-5f9c-46c6-aa54-8a0ba3288230/volumes" Jan 20 17:33:52 crc kubenswrapper[4558]: I0120 17:33:52.585807 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ca7c35ef-9af8-452c-800d-89bfa63317af" path="/var/lib/kubelet/pods/ca7c35ef-9af8-452c-800d-89bfa63317af/volumes" Jan 20 17:33:52 crc kubenswrapper[4558]: I0120 17:33:52.596040 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eca2edbe-72d9-45b0-9445-b734f97b7ece" path="/var/lib/kubelet/pods/eca2edbe-72d9-45b0-9445-b734f97b7ece/volumes" Jan 20 17:33:52 crc kubenswrapper[4558]: I0120 17:33:52.596805 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4c732a1-ca26-4d37-b769-f0c69283ef29" path="/var/lib/kubelet/pods/f4c732a1-ca26-4d37-b769-f0c69283ef29/volumes" Jan 20 17:33:52 crc kubenswrapper[4558]: I0120 17:33:52.639104 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:33:52 crc kubenswrapper[4558]: I0120 17:33:52.644204 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:33:52 crc kubenswrapper[4558]: I0120 17:33:52.671514 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:33:52 crc kubenswrapper[4558]: I0120 17:33:52.690144 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:33:52 crc kubenswrapper[4558]: I0120 17:33:52.700232 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:33:52 crc kubenswrapper[4558]: I0120 17:33:52.709647 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-api-config-data" Jan 20 17:33:52 crc kubenswrapper[4558]: I0120 17:33:52.709971 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-public-svc" Jan 20 17:33:52 crc kubenswrapper[4558]: I0120 17:33:52.710575 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-internal-svc" Jan 20 17:33:52 crc kubenswrapper[4558]: I0120 17:33:52.736203 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ff5bfe3c-249e-4a0a-9600-9b6cae433075-public-tls-certs\") pod \"nova-api-0\" (UID: \"ff5bfe3c-249e-4a0a-9600-9b6cae433075\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:33:52 crc kubenswrapper[4558]: I0120 17:33:52.736269 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ff5bfe3c-249e-4a0a-9600-9b6cae433075-config-data\") pod \"nova-api-0\" (UID: \"ff5bfe3c-249e-4a0a-9600-9b6cae433075\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:33:52 crc kubenswrapper[4558]: I0120 17:33:52.736337 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ff5bfe3c-249e-4a0a-9600-9b6cae433075-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"ff5bfe3c-249e-4a0a-9600-9b6cae433075\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:33:52 crc kubenswrapper[4558]: I0120 17:33:52.736387 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ff5bfe3c-249e-4a0a-9600-9b6cae433075-logs\") pod \"nova-api-0\" (UID: \"ff5bfe3c-249e-4a0a-9600-9b6cae433075\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:33:52 crc kubenswrapper[4558]: I0120 17:33:52.736410 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ff5bfe3c-249e-4a0a-9600-9b6cae433075-internal-tls-certs\") pod \"nova-api-0\" (UID: \"ff5bfe3c-249e-4a0a-9600-9b6cae433075\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:33:52 crc kubenswrapper[4558]: I0120 17:33:52.736446 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gqp84\" (UniqueName: \"kubernetes.io/projected/ff5bfe3c-249e-4a0a-9600-9b6cae433075-kube-api-access-gqp84\") pod \"nova-api-0\" (UID: \"ff5bfe3c-249e-4a0a-9600-9b6cae433075\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:33:52 crc kubenswrapper[4558]: I0120 17:33:52.741527 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:33:52 crc kubenswrapper[4558]: I0120 17:33:52.839388 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ff5bfe3c-249e-4a0a-9600-9b6cae433075-config-data\") pod \"nova-api-0\" (UID: \"ff5bfe3c-249e-4a0a-9600-9b6cae433075\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:33:52 crc kubenswrapper[4558]: I0120 17:33:52.839514 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ff5bfe3c-249e-4a0a-9600-9b6cae433075-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"ff5bfe3c-249e-4a0a-9600-9b6cae433075\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:33:52 crc kubenswrapper[4558]: I0120 17:33:52.839614 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ff5bfe3c-249e-4a0a-9600-9b6cae433075-logs\") pod \"nova-api-0\" (UID: \"ff5bfe3c-249e-4a0a-9600-9b6cae433075\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:33:52 crc kubenswrapper[4558]: I0120 17:33:52.839663 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ff5bfe3c-249e-4a0a-9600-9b6cae433075-internal-tls-certs\") pod \"nova-api-0\" (UID: \"ff5bfe3c-249e-4a0a-9600-9b6cae433075\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:33:52 crc kubenswrapper[4558]: I0120 17:33:52.839725 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gqp84\" (UniqueName: \"kubernetes.io/projected/ff5bfe3c-249e-4a0a-9600-9b6cae433075-kube-api-access-gqp84\") pod \"nova-api-0\" (UID: \"ff5bfe3c-249e-4a0a-9600-9b6cae433075\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:33:52 crc kubenswrapper[4558]: I0120 17:33:52.839810 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ff5bfe3c-249e-4a0a-9600-9b6cae433075-public-tls-certs\") pod \"nova-api-0\" (UID: \"ff5bfe3c-249e-4a0a-9600-9b6cae433075\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:33:52 crc kubenswrapper[4558]: I0120 17:33:52.840461 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ff5bfe3c-249e-4a0a-9600-9b6cae433075-logs\") pod \"nova-api-0\" (UID: \"ff5bfe3c-249e-4a0a-9600-9b6cae433075\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:33:52 crc kubenswrapper[4558]: I0120 17:33:52.846633 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ff5bfe3c-249e-4a0a-9600-9b6cae433075-config-data\") pod \"nova-api-0\" (UID: \"ff5bfe3c-249e-4a0a-9600-9b6cae433075\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:33:52 crc kubenswrapper[4558]: I0120 17:33:52.847662 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ff5bfe3c-249e-4a0a-9600-9b6cae433075-internal-tls-certs\") pod \"nova-api-0\" (UID: \"ff5bfe3c-249e-4a0a-9600-9b6cae433075\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:33:52 crc kubenswrapper[4558]: I0120 17:33:52.848482 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ff5bfe3c-249e-4a0a-9600-9b6cae433075-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"ff5bfe3c-249e-4a0a-9600-9b6cae433075\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:33:52 crc kubenswrapper[4558]: I0120 17:33:52.848865 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ff5bfe3c-249e-4a0a-9600-9b6cae433075-public-tls-certs\") pod \"nova-api-0\" (UID: \"ff5bfe3c-249e-4a0a-9600-9b6cae433075\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:33:52 crc kubenswrapper[4558]: I0120 17:33:52.867400 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gqp84\" (UniqueName: \"kubernetes.io/projected/ff5bfe3c-249e-4a0a-9600-9b6cae433075-kube-api-access-gqp84\") pod \"nova-api-0\" (UID: \"ff5bfe3c-249e-4a0a-9600-9b6cae433075\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:33:52 crc kubenswrapper[4558]: I0120 17:33:52.955510 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-cell-mapping-ml6gb"] Jan 20 17:33:52 crc kubenswrapper[4558]: W0120 17:33:52.961191 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd656fa11_4336_4ddf_a168_af6f682bfaab.slice/crio-ac334f99e319222acf21ff42f7f65655a438c507ccd82f37e0c5cfabfca5c7a3 WatchSource:0}: Error finding container ac334f99e319222acf21ff42f7f65655a438c507ccd82f37e0c5cfabfca5c7a3: Status 404 returned error can't find the container with id ac334f99e319222acf21ff42f7f65655a438c507ccd82f37e0c5cfabfca5c7a3 Jan 20 17:33:53 crc kubenswrapper[4558]: I0120 17:33:53.026104 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:33:53 crc kubenswrapper[4558]: I0120 17:33:53.523990 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:33:53 crc kubenswrapper[4558]: I0120 17:33:53.566738 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-cell-mapping-ml6gb" event={"ID":"d656fa11-4336-4ddf-a168-af6f682bfaab","Type":"ContainerStarted","Data":"50d26ce4c22ef55a6ba6f240018a6850524c10427a08e374734fa5e7aa9279db"} Jan 20 17:33:53 crc kubenswrapper[4558]: I0120 17:33:53.566788 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-cell-mapping-ml6gb" event={"ID":"d656fa11-4336-4ddf-a168-af6f682bfaab","Type":"ContainerStarted","Data":"ac334f99e319222acf21ff42f7f65655a438c507ccd82f37e0c5cfabfca5c7a3"} Jan 20 17:33:53 crc kubenswrapper[4558]: I0120 17:33:53.574576 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"44287ea3-63fc-4ab2-8140-5bf616c3a7ff","Type":"ContainerStarted","Data":"f59292330b9f8b7139ba7ca90d368c82d20ac2bd0baeea4553498e6095afac6b"} Jan 20 17:33:53 crc kubenswrapper[4558]: I0120 17:33:53.583496 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"ff5bfe3c-249e-4a0a-9600-9b6cae433075","Type":"ContainerStarted","Data":"bd875fb853f089ad2863714996457b47ca4639af4e5573ab5e83cbd45f418450"} Jan 20 17:33:53 crc kubenswrapper[4558]: I0120 17:33:53.596335 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell0-cell-mapping-ml6gb" podStartSLOduration=1.5963157620000001 podStartE2EDuration="1.596315762s" podCreationTimestamp="2026-01-20 17:33:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:33:53.580221979 +0000 UTC m=+3127.340559946" watchObservedRunningTime="2026-01-20 17:33:53.596315762 +0000 UTC m=+3127.356653718" Jan 20 17:33:53 crc kubenswrapper[4558]: I0120 17:33:53.683110 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:33:54 crc kubenswrapper[4558]: I0120 17:33:54.055744 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/swift-proxy-b45ff9748-dd8kw" Jan 20 17:33:54 crc kubenswrapper[4558]: I0120 17:33:54.483884 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:33:54 crc kubenswrapper[4558]: I0120 17:33:54.580923 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6c0b9601-3d98-40d3-b02e-7804d7f2b216-config-data\") pod \"6c0b9601-3d98-40d3-b02e-7804d7f2b216\" (UID: \"6c0b9601-3d98-40d3-b02e-7804d7f2b216\") " Jan 20 17:33:54 crc kubenswrapper[4558]: I0120 17:33:54.581377 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bjsl7\" (UniqueName: \"kubernetes.io/projected/6c0b9601-3d98-40d3-b02e-7804d7f2b216-kube-api-access-bjsl7\") pod \"6c0b9601-3d98-40d3-b02e-7804d7f2b216\" (UID: \"6c0b9601-3d98-40d3-b02e-7804d7f2b216\") " Jan 20 17:33:54 crc kubenswrapper[4558]: I0120 17:33:54.581579 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6c0b9601-3d98-40d3-b02e-7804d7f2b216-combined-ca-bundle\") pod \"6c0b9601-3d98-40d3-b02e-7804d7f2b216\" (UID: \"6c0b9601-3d98-40d3-b02e-7804d7f2b216\") " Jan 20 17:33:54 crc kubenswrapper[4558]: I0120 17:33:54.588141 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6c0b9601-3d98-40d3-b02e-7804d7f2b216-kube-api-access-bjsl7" (OuterVolumeSpecName: "kube-api-access-bjsl7") pod "6c0b9601-3d98-40d3-b02e-7804d7f2b216" (UID: "6c0b9601-3d98-40d3-b02e-7804d7f2b216"). InnerVolumeSpecName "kube-api-access-bjsl7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:33:54 crc kubenswrapper[4558]: I0120 17:33:54.603701 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="56a29e7b-9a25-497a-9bee-52f6cd546e3c" path="/var/lib/kubelet/pods/56a29e7b-9a25-497a-9bee-52f6cd546e3c/volumes" Jan 20 17:33:54 crc kubenswrapper[4558]: I0120 17:33:54.616849 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6c0b9601-3d98-40d3-b02e-7804d7f2b216-config-data" (OuterVolumeSpecName: "config-data") pod "6c0b9601-3d98-40d3-b02e-7804d7f2b216" (UID: "6c0b9601-3d98-40d3-b02e-7804d7f2b216"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:33:54 crc kubenswrapper[4558]: I0120 17:33:54.617609 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6c0b9601-3d98-40d3-b02e-7804d7f2b216-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6c0b9601-3d98-40d3-b02e-7804d7f2b216" (UID: "6c0b9601-3d98-40d3-b02e-7804d7f2b216"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:33:54 crc kubenswrapper[4558]: I0120 17:33:54.635569 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-api-0" podUID="ff5bfe3c-249e-4a0a-9600-9b6cae433075" containerName="nova-api-log" containerID="cri-o://51b6ee77ce9202ee0b83dad72a75cea201c87eed8b16ec9da84319857f9268e8" gracePeriod=30 Jan 20 17:33:54 crc kubenswrapper[4558]: I0120 17:33:54.636078 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-api-0" podUID="ff5bfe3c-249e-4a0a-9600-9b6cae433075" containerName="nova-api-api" containerID="cri-o://08f98eafe13109b8c088124509e5d0e568ef173e15db9455289ca6166e622060" gracePeriod=30 Jan 20 17:33:54 crc kubenswrapper[4558]: I0120 17:33:54.646683 4558 generic.go:334] "Generic (PLEG): container finished" podID="6c0b9601-3d98-40d3-b02e-7804d7f2b216" containerID="c8d5bddc3ef41b660c5503f880e165c59af93fc4ac03e6716b78a0cee338c9f0" exitCode=1 Jan 20 17:33:54 crc kubenswrapper[4558]: I0120 17:33:54.647576 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:33:54 crc kubenswrapper[4558]: I0120 17:33:54.663655 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-api-0" podStartSLOduration=2.663636898 podStartE2EDuration="2.663636898s" podCreationTimestamp="2026-01-20 17:33:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:33:54.653869471 +0000 UTC m=+3128.414207438" watchObservedRunningTime="2026-01-20 17:33:54.663636898 +0000 UTC m=+3128.423974865" Jan 20 17:33:54 crc kubenswrapper[4558]: I0120 17:33:54.679823 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"44287ea3-63fc-4ab2-8140-5bf616c3a7ff","Type":"ContainerStarted","Data":"e00823438a3d8284744c0067c4c4a1fe27184f085f878c30569dbc4f58bc8bde"} Jan 20 17:33:54 crc kubenswrapper[4558]: I0120 17:33:54.679859 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"ff5bfe3c-249e-4a0a-9600-9b6cae433075","Type":"ContainerStarted","Data":"08f98eafe13109b8c088124509e5d0e568ef173e15db9455289ca6166e622060"} Jan 20 17:33:54 crc kubenswrapper[4558]: I0120 17:33:54.679872 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"ff5bfe3c-249e-4a0a-9600-9b6cae433075","Type":"ContainerStarted","Data":"51b6ee77ce9202ee0b83dad72a75cea201c87eed8b16ec9da84319857f9268e8"} Jan 20 17:33:54 crc kubenswrapper[4558]: I0120 17:33:54.679886 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"6c0b9601-3d98-40d3-b02e-7804d7f2b216","Type":"ContainerDied","Data":"c8d5bddc3ef41b660c5503f880e165c59af93fc4ac03e6716b78a0cee338c9f0"} Jan 20 17:33:54 crc kubenswrapper[4558]: I0120 17:33:54.679909 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"6c0b9601-3d98-40d3-b02e-7804d7f2b216","Type":"ContainerDied","Data":"e2264acdb013d147464a57bb1cabbb71553fe9c8c47bf6a363b034f1a18909f8"} Jan 20 17:33:54 crc kubenswrapper[4558]: I0120 17:33:54.679927 4558 scope.go:117] "RemoveContainer" containerID="c8d5bddc3ef41b660c5503f880e165c59af93fc4ac03e6716b78a0cee338c9f0" Jan 20 17:33:54 crc kubenswrapper[4558]: I0120 17:33:54.684495 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6c0b9601-3d98-40d3-b02e-7804d7f2b216-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:54 crc kubenswrapper[4558]: I0120 17:33:54.684521 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bjsl7\" (UniqueName: \"kubernetes.io/projected/6c0b9601-3d98-40d3-b02e-7804d7f2b216-kube-api-access-bjsl7\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:54 crc kubenswrapper[4558]: I0120 17:33:54.684534 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6c0b9601-3d98-40d3-b02e-7804d7f2b216-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:54 crc kubenswrapper[4558]: I0120 17:33:54.704679 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:33:54 crc kubenswrapper[4558]: I0120 17:33:54.716728 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:33:54 crc kubenswrapper[4558]: I0120 17:33:54.719273 4558 scope.go:117] "RemoveContainer" containerID="c8d5bddc3ef41b660c5503f880e165c59af93fc4ac03e6716b78a0cee338c9f0" Jan 20 17:33:54 crc kubenswrapper[4558]: E0120 17:33:54.723236 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c8d5bddc3ef41b660c5503f880e165c59af93fc4ac03e6716b78a0cee338c9f0\": container with ID starting with c8d5bddc3ef41b660c5503f880e165c59af93fc4ac03e6716b78a0cee338c9f0 not found: ID does not exist" containerID="c8d5bddc3ef41b660c5503f880e165c59af93fc4ac03e6716b78a0cee338c9f0" Jan 20 17:33:54 crc kubenswrapper[4558]: I0120 17:33:54.723263 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c8d5bddc3ef41b660c5503f880e165c59af93fc4ac03e6716b78a0cee338c9f0"} err="failed to get container status \"c8d5bddc3ef41b660c5503f880e165c59af93fc4ac03e6716b78a0cee338c9f0\": rpc error: code = NotFound desc = could not find container \"c8d5bddc3ef41b660c5503f880e165c59af93fc4ac03e6716b78a0cee338c9f0\": container with ID starting with c8d5bddc3ef41b660c5503f880e165c59af93fc4ac03e6716b78a0cee338c9f0 not found: ID does not exist" Jan 20 17:33:54 crc kubenswrapper[4558]: I0120 17:33:54.723606 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:33:54 crc kubenswrapper[4558]: E0120 17:33:54.724016 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c0b9601-3d98-40d3-b02e-7804d7f2b216" containerName="nova-scheduler-scheduler" Jan 20 17:33:54 crc kubenswrapper[4558]: I0120 17:33:54.724033 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c0b9601-3d98-40d3-b02e-7804d7f2b216" containerName="nova-scheduler-scheduler" Jan 20 17:33:54 crc kubenswrapper[4558]: I0120 17:33:54.724206 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="6c0b9601-3d98-40d3-b02e-7804d7f2b216" containerName="nova-scheduler-scheduler" Jan 20 17:33:54 crc kubenswrapper[4558]: I0120 17:33:54.726604 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:33:54 crc kubenswrapper[4558]: I0120 17:33:54.729655 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-scheduler-config-data" Jan 20 17:33:54 crc kubenswrapper[4558]: I0120 17:33:54.732659 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:33:54 crc kubenswrapper[4558]: I0120 17:33:54.786835 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wxg64\" (UniqueName: \"kubernetes.io/projected/3481546b-c57f-43c9-92f9-ffcbc3af0d8f-kube-api-access-wxg64\") pod \"nova-scheduler-0\" (UID: \"3481546b-c57f-43c9-92f9-ffcbc3af0d8f\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:33:54 crc kubenswrapper[4558]: I0120 17:33:54.786891 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3481546b-c57f-43c9-92f9-ffcbc3af0d8f-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"3481546b-c57f-43c9-92f9-ffcbc3af0d8f\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:33:54 crc kubenswrapper[4558]: I0120 17:33:54.787002 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3481546b-c57f-43c9-92f9-ffcbc3af0d8f-config-data\") pod \"nova-scheduler-0\" (UID: \"3481546b-c57f-43c9-92f9-ffcbc3af0d8f\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:33:54 crc kubenswrapper[4558]: I0120 17:33:54.904832 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wxg64\" (UniqueName: \"kubernetes.io/projected/3481546b-c57f-43c9-92f9-ffcbc3af0d8f-kube-api-access-wxg64\") pod \"nova-scheduler-0\" (UID: \"3481546b-c57f-43c9-92f9-ffcbc3af0d8f\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:33:54 crc kubenswrapper[4558]: I0120 17:33:54.904919 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3481546b-c57f-43c9-92f9-ffcbc3af0d8f-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"3481546b-c57f-43c9-92f9-ffcbc3af0d8f\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:33:54 crc kubenswrapper[4558]: I0120 17:33:54.905090 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3481546b-c57f-43c9-92f9-ffcbc3af0d8f-config-data\") pod \"nova-scheduler-0\" (UID: \"3481546b-c57f-43c9-92f9-ffcbc3af0d8f\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:33:54 crc kubenswrapper[4558]: I0120 17:33:54.926867 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3481546b-c57f-43c9-92f9-ffcbc3af0d8f-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"3481546b-c57f-43c9-92f9-ffcbc3af0d8f\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:33:54 crc kubenswrapper[4558]: I0120 17:33:54.928125 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3481546b-c57f-43c9-92f9-ffcbc3af0d8f-config-data\") pod \"nova-scheduler-0\" (UID: \"3481546b-c57f-43c9-92f9-ffcbc3af0d8f\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:33:54 crc kubenswrapper[4558]: I0120 17:33:54.949615 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wxg64\" (UniqueName: \"kubernetes.io/projected/3481546b-c57f-43c9-92f9-ffcbc3af0d8f-kube-api-access-wxg64\") pod \"nova-scheduler-0\" (UID: \"3481546b-c57f-43c9-92f9-ffcbc3af0d8f\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:33:55 crc kubenswrapper[4558]: I0120 17:33:55.043246 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:33:55 crc kubenswrapper[4558]: I0120 17:33:55.100479 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:33:55 crc kubenswrapper[4558]: I0120 17:33:55.100544 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:33:55 crc kubenswrapper[4558]: I0120 17:33:55.220480 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:33:55 crc kubenswrapper[4558]: I0120 17:33:55.320122 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gqp84\" (UniqueName: \"kubernetes.io/projected/ff5bfe3c-249e-4a0a-9600-9b6cae433075-kube-api-access-gqp84\") pod \"ff5bfe3c-249e-4a0a-9600-9b6cae433075\" (UID: \"ff5bfe3c-249e-4a0a-9600-9b6cae433075\") " Jan 20 17:33:55 crc kubenswrapper[4558]: I0120 17:33:55.320357 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ff5bfe3c-249e-4a0a-9600-9b6cae433075-logs\") pod \"ff5bfe3c-249e-4a0a-9600-9b6cae433075\" (UID: \"ff5bfe3c-249e-4a0a-9600-9b6cae433075\") " Jan 20 17:33:55 crc kubenswrapper[4558]: I0120 17:33:55.320393 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ff5bfe3c-249e-4a0a-9600-9b6cae433075-combined-ca-bundle\") pod \"ff5bfe3c-249e-4a0a-9600-9b6cae433075\" (UID: \"ff5bfe3c-249e-4a0a-9600-9b6cae433075\") " Jan 20 17:33:55 crc kubenswrapper[4558]: I0120 17:33:55.320494 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ff5bfe3c-249e-4a0a-9600-9b6cae433075-config-data\") pod \"ff5bfe3c-249e-4a0a-9600-9b6cae433075\" (UID: \"ff5bfe3c-249e-4a0a-9600-9b6cae433075\") " Jan 20 17:33:55 crc kubenswrapper[4558]: I0120 17:33:55.320605 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ff5bfe3c-249e-4a0a-9600-9b6cae433075-public-tls-certs\") pod \"ff5bfe3c-249e-4a0a-9600-9b6cae433075\" (UID: \"ff5bfe3c-249e-4a0a-9600-9b6cae433075\") " Jan 20 17:33:55 crc kubenswrapper[4558]: I0120 17:33:55.320665 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ff5bfe3c-249e-4a0a-9600-9b6cae433075-internal-tls-certs\") pod \"ff5bfe3c-249e-4a0a-9600-9b6cae433075\" (UID: \"ff5bfe3c-249e-4a0a-9600-9b6cae433075\") " Jan 20 17:33:55 crc kubenswrapper[4558]: I0120 17:33:55.320838 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ff5bfe3c-249e-4a0a-9600-9b6cae433075-logs" (OuterVolumeSpecName: "logs") pod "ff5bfe3c-249e-4a0a-9600-9b6cae433075" (UID: "ff5bfe3c-249e-4a0a-9600-9b6cae433075"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:33:55 crc kubenswrapper[4558]: I0120 17:33:55.321384 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ff5bfe3c-249e-4a0a-9600-9b6cae433075-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:55 crc kubenswrapper[4558]: I0120 17:33:55.331269 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ff5bfe3c-249e-4a0a-9600-9b6cae433075-kube-api-access-gqp84" (OuterVolumeSpecName: "kube-api-access-gqp84") pod "ff5bfe3c-249e-4a0a-9600-9b6cae433075" (UID: "ff5bfe3c-249e-4a0a-9600-9b6cae433075"). InnerVolumeSpecName "kube-api-access-gqp84". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:33:55 crc kubenswrapper[4558]: E0120 17:33:55.346503 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="ca483fd25cf153fcad14ca9739754a6effd1115ea5c7b991da3c23aa4caa603c" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:33:55 crc kubenswrapper[4558]: E0120 17:33:55.369700 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="ca483fd25cf153fcad14ca9739754a6effd1115ea5c7b991da3c23aa4caa603c" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:33:55 crc kubenswrapper[4558]: I0120 17:33:55.373288 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ff5bfe3c-249e-4a0a-9600-9b6cae433075-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "ff5bfe3c-249e-4a0a-9600-9b6cae433075" (UID: "ff5bfe3c-249e-4a0a-9600-9b6cae433075"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:33:55 crc kubenswrapper[4558]: I0120 17:33:55.373327 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ff5bfe3c-249e-4a0a-9600-9b6cae433075-config-data" (OuterVolumeSpecName: "config-data") pod "ff5bfe3c-249e-4a0a-9600-9b6cae433075" (UID: "ff5bfe3c-249e-4a0a-9600-9b6cae433075"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:33:55 crc kubenswrapper[4558]: I0120 17:33:55.373569 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ff5bfe3c-249e-4a0a-9600-9b6cae433075-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "ff5bfe3c-249e-4a0a-9600-9b6cae433075" (UID: "ff5bfe3c-249e-4a0a-9600-9b6cae433075"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:33:55 crc kubenswrapper[4558]: I0120 17:33:55.375602 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ff5bfe3c-249e-4a0a-9600-9b6cae433075-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ff5bfe3c-249e-4a0a-9600-9b6cae433075" (UID: "ff5bfe3c-249e-4a0a-9600-9b6cae433075"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:33:55 crc kubenswrapper[4558]: E0120 17:33:55.382267 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="ca483fd25cf153fcad14ca9739754a6effd1115ea5c7b991da3c23aa4caa603c" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:33:55 crc kubenswrapper[4558]: E0120 17:33:55.382325 4558 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack-kuttl-tests/nova-cell0-conductor-0" podUID="37fd3160-ee6a-41a1-9c4b-260e72133f59" containerName="nova-cell0-conductor-conductor" Jan 20 17:33:55 crc kubenswrapper[4558]: I0120 17:33:55.422718 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ff5bfe3c-249e-4a0a-9600-9b6cae433075-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:55 crc kubenswrapper[4558]: I0120 17:33:55.422748 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ff5bfe3c-249e-4a0a-9600-9b6cae433075-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:55 crc kubenswrapper[4558]: I0120 17:33:55.422757 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ff5bfe3c-249e-4a0a-9600-9b6cae433075-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:55 crc kubenswrapper[4558]: I0120 17:33:55.422766 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ff5bfe3c-249e-4a0a-9600-9b6cae433075-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:55 crc kubenswrapper[4558]: I0120 17:33:55.422778 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gqp84\" (UniqueName: \"kubernetes.io/projected/ff5bfe3c-249e-4a0a-9600-9b6cae433075-kube-api-access-gqp84\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:55 crc kubenswrapper[4558]: I0120 17:33:55.667061 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:33:55 crc kubenswrapper[4558]: I0120 17:33:55.677358 4558 generic.go:334] "Generic (PLEG): container finished" podID="ff5bfe3c-249e-4a0a-9600-9b6cae433075" containerID="08f98eafe13109b8c088124509e5d0e568ef173e15db9455289ca6166e622060" exitCode=0 Jan 20 17:33:55 crc kubenswrapper[4558]: I0120 17:33:55.677402 4558 generic.go:334] "Generic (PLEG): container finished" podID="ff5bfe3c-249e-4a0a-9600-9b6cae433075" containerID="51b6ee77ce9202ee0b83dad72a75cea201c87eed8b16ec9da84319857f9268e8" exitCode=143 Jan 20 17:33:55 crc kubenswrapper[4558]: I0120 17:33:55.677516 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"ff5bfe3c-249e-4a0a-9600-9b6cae433075","Type":"ContainerDied","Data":"08f98eafe13109b8c088124509e5d0e568ef173e15db9455289ca6166e622060"} Jan 20 17:33:55 crc kubenswrapper[4558]: I0120 17:33:55.677543 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:33:55 crc kubenswrapper[4558]: I0120 17:33:55.677559 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"ff5bfe3c-249e-4a0a-9600-9b6cae433075","Type":"ContainerDied","Data":"51b6ee77ce9202ee0b83dad72a75cea201c87eed8b16ec9da84319857f9268e8"} Jan 20 17:33:55 crc kubenswrapper[4558]: I0120 17:33:55.677573 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"ff5bfe3c-249e-4a0a-9600-9b6cae433075","Type":"ContainerDied","Data":"bd875fb853f089ad2863714996457b47ca4639af4e5573ab5e83cbd45f418450"} Jan 20 17:33:55 crc kubenswrapper[4558]: I0120 17:33:55.677605 4558 scope.go:117] "RemoveContainer" containerID="08f98eafe13109b8c088124509e5d0e568ef173e15db9455289ca6166e622060" Jan 20 17:33:55 crc kubenswrapper[4558]: I0120 17:33:55.749992 4558 scope.go:117] "RemoveContainer" containerID="51b6ee77ce9202ee0b83dad72a75cea201c87eed8b16ec9da84319857f9268e8" Jan 20 17:33:55 crc kubenswrapper[4558]: I0120 17:33:55.786259 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:33:55 crc kubenswrapper[4558]: I0120 17:33:55.789491 4558 scope.go:117] "RemoveContainer" containerID="08f98eafe13109b8c088124509e5d0e568ef173e15db9455289ca6166e622060" Jan 20 17:33:55 crc kubenswrapper[4558]: E0120 17:33:55.790121 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"08f98eafe13109b8c088124509e5d0e568ef173e15db9455289ca6166e622060\": container with ID starting with 08f98eafe13109b8c088124509e5d0e568ef173e15db9455289ca6166e622060 not found: ID does not exist" containerID="08f98eafe13109b8c088124509e5d0e568ef173e15db9455289ca6166e622060" Jan 20 17:33:55 crc kubenswrapper[4558]: I0120 17:33:55.790270 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"08f98eafe13109b8c088124509e5d0e568ef173e15db9455289ca6166e622060"} err="failed to get container status \"08f98eafe13109b8c088124509e5d0e568ef173e15db9455289ca6166e622060\": rpc error: code = NotFound desc = could not find container \"08f98eafe13109b8c088124509e5d0e568ef173e15db9455289ca6166e622060\": container with ID starting with 08f98eafe13109b8c088124509e5d0e568ef173e15db9455289ca6166e622060 not found: ID does not exist" Jan 20 17:33:55 crc kubenswrapper[4558]: I0120 17:33:55.790353 4558 scope.go:117] "RemoveContainer" containerID="51b6ee77ce9202ee0b83dad72a75cea201c87eed8b16ec9da84319857f9268e8" Jan 20 17:33:55 crc kubenswrapper[4558]: E0120 17:33:55.792566 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"51b6ee77ce9202ee0b83dad72a75cea201c87eed8b16ec9da84319857f9268e8\": container with ID starting with 51b6ee77ce9202ee0b83dad72a75cea201c87eed8b16ec9da84319857f9268e8 not found: ID does not exist" containerID="51b6ee77ce9202ee0b83dad72a75cea201c87eed8b16ec9da84319857f9268e8" Jan 20 17:33:55 crc kubenswrapper[4558]: I0120 17:33:55.792616 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"51b6ee77ce9202ee0b83dad72a75cea201c87eed8b16ec9da84319857f9268e8"} err="failed to get container status \"51b6ee77ce9202ee0b83dad72a75cea201c87eed8b16ec9da84319857f9268e8\": rpc error: code = NotFound desc = could not find container \"51b6ee77ce9202ee0b83dad72a75cea201c87eed8b16ec9da84319857f9268e8\": container with ID starting with 51b6ee77ce9202ee0b83dad72a75cea201c87eed8b16ec9da84319857f9268e8 not found: ID does not exist" Jan 20 17:33:55 crc kubenswrapper[4558]: I0120 17:33:55.792645 4558 scope.go:117] "RemoveContainer" containerID="08f98eafe13109b8c088124509e5d0e568ef173e15db9455289ca6166e622060" Jan 20 17:33:55 crc kubenswrapper[4558]: I0120 17:33:55.795275 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"08f98eafe13109b8c088124509e5d0e568ef173e15db9455289ca6166e622060"} err="failed to get container status \"08f98eafe13109b8c088124509e5d0e568ef173e15db9455289ca6166e622060\": rpc error: code = NotFound desc = could not find container \"08f98eafe13109b8c088124509e5d0e568ef173e15db9455289ca6166e622060\": container with ID starting with 08f98eafe13109b8c088124509e5d0e568ef173e15db9455289ca6166e622060 not found: ID does not exist" Jan 20 17:33:55 crc kubenswrapper[4558]: I0120 17:33:55.795324 4558 scope.go:117] "RemoveContainer" containerID="51b6ee77ce9202ee0b83dad72a75cea201c87eed8b16ec9da84319857f9268e8" Jan 20 17:33:55 crc kubenswrapper[4558]: I0120 17:33:55.798676 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"51b6ee77ce9202ee0b83dad72a75cea201c87eed8b16ec9da84319857f9268e8"} err="failed to get container status \"51b6ee77ce9202ee0b83dad72a75cea201c87eed8b16ec9da84319857f9268e8\": rpc error: code = NotFound desc = could not find container \"51b6ee77ce9202ee0b83dad72a75cea201c87eed8b16ec9da84319857f9268e8\": container with ID starting with 51b6ee77ce9202ee0b83dad72a75cea201c87eed8b16ec9da84319857f9268e8 not found: ID does not exist" Jan 20 17:33:55 crc kubenswrapper[4558]: I0120 17:33:55.801223 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:33:55 crc kubenswrapper[4558]: I0120 17:33:55.820598 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:33:55 crc kubenswrapper[4558]: E0120 17:33:55.821134 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ff5bfe3c-249e-4a0a-9600-9b6cae433075" containerName="nova-api-api" Jan 20 17:33:55 crc kubenswrapper[4558]: I0120 17:33:55.821153 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ff5bfe3c-249e-4a0a-9600-9b6cae433075" containerName="nova-api-api" Jan 20 17:33:55 crc kubenswrapper[4558]: E0120 17:33:55.821223 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ff5bfe3c-249e-4a0a-9600-9b6cae433075" containerName="nova-api-log" Jan 20 17:33:55 crc kubenswrapper[4558]: I0120 17:33:55.821233 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ff5bfe3c-249e-4a0a-9600-9b6cae433075" containerName="nova-api-log" Jan 20 17:33:55 crc kubenswrapper[4558]: I0120 17:33:55.821462 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="ff5bfe3c-249e-4a0a-9600-9b6cae433075" containerName="nova-api-log" Jan 20 17:33:55 crc kubenswrapper[4558]: I0120 17:33:55.821481 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="ff5bfe3c-249e-4a0a-9600-9b6cae433075" containerName="nova-api-api" Jan 20 17:33:55 crc kubenswrapper[4558]: I0120 17:33:55.822563 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:33:55 crc kubenswrapper[4558]: I0120 17:33:55.825581 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-api-config-data" Jan 20 17:33:55 crc kubenswrapper[4558]: I0120 17:33:55.825601 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-public-svc" Jan 20 17:33:55 crc kubenswrapper[4558]: I0120 17:33:55.825822 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-internal-svc" Jan 20 17:33:55 crc kubenswrapper[4558]: I0120 17:33:55.832970 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/56208926-59b4-4fa1-9e8b-97846ed85d61-internal-tls-certs\") pod \"nova-api-0\" (UID: \"56208926-59b4-4fa1-9e8b-97846ed85d61\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:33:55 crc kubenswrapper[4558]: I0120 17:33:55.833022 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/56208926-59b4-4fa1-9e8b-97846ed85d61-logs\") pod \"nova-api-0\" (UID: \"56208926-59b4-4fa1-9e8b-97846ed85d61\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:33:55 crc kubenswrapper[4558]: I0120 17:33:55.833053 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/56208926-59b4-4fa1-9e8b-97846ed85d61-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"56208926-59b4-4fa1-9e8b-97846ed85d61\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:33:55 crc kubenswrapper[4558]: I0120 17:33:55.833088 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/56208926-59b4-4fa1-9e8b-97846ed85d61-public-tls-certs\") pod \"nova-api-0\" (UID: \"56208926-59b4-4fa1-9e8b-97846ed85d61\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:33:55 crc kubenswrapper[4558]: I0120 17:33:55.833140 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nq2qh\" (UniqueName: \"kubernetes.io/projected/56208926-59b4-4fa1-9e8b-97846ed85d61-kube-api-access-nq2qh\") pod \"nova-api-0\" (UID: \"56208926-59b4-4fa1-9e8b-97846ed85d61\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:33:55 crc kubenswrapper[4558]: I0120 17:33:55.833187 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/56208926-59b4-4fa1-9e8b-97846ed85d61-config-data\") pod \"nova-api-0\" (UID: \"56208926-59b4-4fa1-9e8b-97846ed85d61\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:33:55 crc kubenswrapper[4558]: I0120 17:33:55.838638 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:33:55 crc kubenswrapper[4558]: I0120 17:33:55.934462 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/56208926-59b4-4fa1-9e8b-97846ed85d61-logs\") pod \"nova-api-0\" (UID: \"56208926-59b4-4fa1-9e8b-97846ed85d61\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:33:55 crc kubenswrapper[4558]: I0120 17:33:55.934518 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/56208926-59b4-4fa1-9e8b-97846ed85d61-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"56208926-59b4-4fa1-9e8b-97846ed85d61\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:33:55 crc kubenswrapper[4558]: I0120 17:33:55.934566 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/56208926-59b4-4fa1-9e8b-97846ed85d61-public-tls-certs\") pod \"nova-api-0\" (UID: \"56208926-59b4-4fa1-9e8b-97846ed85d61\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:33:55 crc kubenswrapper[4558]: I0120 17:33:55.934621 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nq2qh\" (UniqueName: \"kubernetes.io/projected/56208926-59b4-4fa1-9e8b-97846ed85d61-kube-api-access-nq2qh\") pod \"nova-api-0\" (UID: \"56208926-59b4-4fa1-9e8b-97846ed85d61\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:33:55 crc kubenswrapper[4558]: I0120 17:33:55.934670 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/56208926-59b4-4fa1-9e8b-97846ed85d61-config-data\") pod \"nova-api-0\" (UID: \"56208926-59b4-4fa1-9e8b-97846ed85d61\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:33:55 crc kubenswrapper[4558]: I0120 17:33:55.934741 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/56208926-59b4-4fa1-9e8b-97846ed85d61-internal-tls-certs\") pod \"nova-api-0\" (UID: \"56208926-59b4-4fa1-9e8b-97846ed85d61\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:33:55 crc kubenswrapper[4558]: I0120 17:33:55.934903 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/56208926-59b4-4fa1-9e8b-97846ed85d61-logs\") pod \"nova-api-0\" (UID: \"56208926-59b4-4fa1-9e8b-97846ed85d61\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:33:55 crc kubenswrapper[4558]: I0120 17:33:55.939678 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/56208926-59b4-4fa1-9e8b-97846ed85d61-public-tls-certs\") pod \"nova-api-0\" (UID: \"56208926-59b4-4fa1-9e8b-97846ed85d61\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:33:55 crc kubenswrapper[4558]: I0120 17:33:55.940623 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/56208926-59b4-4fa1-9e8b-97846ed85d61-internal-tls-certs\") pod \"nova-api-0\" (UID: \"56208926-59b4-4fa1-9e8b-97846ed85d61\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:33:55 crc kubenswrapper[4558]: I0120 17:33:55.940741 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/56208926-59b4-4fa1-9e8b-97846ed85d61-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"56208926-59b4-4fa1-9e8b-97846ed85d61\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:33:55 crc kubenswrapper[4558]: I0120 17:33:55.941714 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/56208926-59b4-4fa1-9e8b-97846ed85d61-config-data\") pod \"nova-api-0\" (UID: \"56208926-59b4-4fa1-9e8b-97846ed85d61\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:33:55 crc kubenswrapper[4558]: I0120 17:33:55.958738 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nq2qh\" (UniqueName: \"kubernetes.io/projected/56208926-59b4-4fa1-9e8b-97846ed85d61-kube-api-access-nq2qh\") pod \"nova-api-0\" (UID: \"56208926-59b4-4fa1-9e8b-97846ed85d61\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:33:56 crc kubenswrapper[4558]: I0120 17:33:56.143685 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:33:56 crc kubenswrapper[4558]: I0120 17:33:56.199544 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-5878f88784-4j5nq" Jan 20 17:33:56 crc kubenswrapper[4558]: I0120 17:33:56.243598 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c2jvr\" (UniqueName: \"kubernetes.io/projected/a289616c-ea44-447c-a263-4744c01d5b5e-kube-api-access-c2jvr\") pod \"a289616c-ea44-447c-a263-4744c01d5b5e\" (UID: \"a289616c-ea44-447c-a263-4744c01d5b5e\") " Jan 20 17:33:56 crc kubenswrapper[4558]: I0120 17:33:56.243725 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/a289616c-ea44-447c-a263-4744c01d5b5e-config\") pod \"a289616c-ea44-447c-a263-4744c01d5b5e\" (UID: \"a289616c-ea44-447c-a263-4744c01d5b5e\") " Jan 20 17:33:56 crc kubenswrapper[4558]: I0120 17:33:56.243887 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/a289616c-ea44-447c-a263-4744c01d5b5e-httpd-config\") pod \"a289616c-ea44-447c-a263-4744c01d5b5e\" (UID: \"a289616c-ea44-447c-a263-4744c01d5b5e\") " Jan 20 17:33:56 crc kubenswrapper[4558]: I0120 17:33:56.244252 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a289616c-ea44-447c-a263-4744c01d5b5e-combined-ca-bundle\") pod \"a289616c-ea44-447c-a263-4744c01d5b5e\" (UID: \"a289616c-ea44-447c-a263-4744c01d5b5e\") " Jan 20 17:33:56 crc kubenswrapper[4558]: I0120 17:33:56.249983 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a289616c-ea44-447c-a263-4744c01d5b5e-kube-api-access-c2jvr" (OuterVolumeSpecName: "kube-api-access-c2jvr") pod "a289616c-ea44-447c-a263-4744c01d5b5e" (UID: "a289616c-ea44-447c-a263-4744c01d5b5e"). InnerVolumeSpecName "kube-api-access-c2jvr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:33:56 crc kubenswrapper[4558]: I0120 17:33:56.253639 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a289616c-ea44-447c-a263-4744c01d5b5e-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "a289616c-ea44-447c-a263-4744c01d5b5e" (UID: "a289616c-ea44-447c-a263-4744c01d5b5e"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:33:56 crc kubenswrapper[4558]: I0120 17:33:56.294513 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a289616c-ea44-447c-a263-4744c01d5b5e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a289616c-ea44-447c-a263-4744c01d5b5e" (UID: "a289616c-ea44-447c-a263-4744c01d5b5e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:33:56 crc kubenswrapper[4558]: I0120 17:33:56.296000 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a289616c-ea44-447c-a263-4744c01d5b5e-config" (OuterVolumeSpecName: "config") pod "a289616c-ea44-447c-a263-4744c01d5b5e" (UID: "a289616c-ea44-447c-a263-4744c01d5b5e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:33:56 crc kubenswrapper[4558]: I0120 17:33:56.347221 4558 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/a289616c-ea44-447c-a263-4744c01d5b5e-httpd-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:56 crc kubenswrapper[4558]: I0120 17:33:56.347719 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a289616c-ea44-447c-a263-4744c01d5b5e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:56 crc kubenswrapper[4558]: I0120 17:33:56.347789 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c2jvr\" (UniqueName: \"kubernetes.io/projected/a289616c-ea44-447c-a263-4744c01d5b5e-kube-api-access-c2jvr\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:56 crc kubenswrapper[4558]: I0120 17:33:56.347854 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/a289616c-ea44-447c-a263-4744c01d5b5e-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:56 crc kubenswrapper[4558]: I0120 17:33:56.580635 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6c0b9601-3d98-40d3-b02e-7804d7f2b216" path="/var/lib/kubelet/pods/6c0b9601-3d98-40d3-b02e-7804d7f2b216/volumes" Jan 20 17:33:56 crc kubenswrapper[4558]: I0120 17:33:56.581600 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ff5bfe3c-249e-4a0a-9600-9b6cae433075" path="/var/lib/kubelet/pods/ff5bfe3c-249e-4a0a-9600-9b6cae433075/volumes" Jan 20 17:33:56 crc kubenswrapper[4558]: I0120 17:33:56.600016 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:33:56 crc kubenswrapper[4558]: I0120 17:33:56.691636 4558 generic.go:334] "Generic (PLEG): container finished" podID="a289616c-ea44-447c-a263-4744c01d5b5e" containerID="2eba8336553f4bec8ad0739564c6671e683452b7d048a156e1a6e3cf92ae581f" exitCode=0 Jan 20 17:33:56 crc kubenswrapper[4558]: I0120 17:33:56.691719 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-5878f88784-4j5nq" event={"ID":"a289616c-ea44-447c-a263-4744c01d5b5e","Type":"ContainerDied","Data":"2eba8336553f4bec8ad0739564c6671e683452b7d048a156e1a6e3cf92ae581f"} Jan 20 17:33:56 crc kubenswrapper[4558]: I0120 17:33:56.691757 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-5878f88784-4j5nq" event={"ID":"a289616c-ea44-447c-a263-4744c01d5b5e","Type":"ContainerDied","Data":"477eb853f8cade9de5067dc4ac34643d4e443f00f7b1f52ebd367a76fb0b8ca2"} Jan 20 17:33:56 crc kubenswrapper[4558]: I0120 17:33:56.691778 4558 scope.go:117] "RemoveContainer" containerID="f8d054c8c4c51555f3cda668c1ab5da9f1dcb5f7d87997a3ed3c22db03630067" Jan 20 17:33:56 crc kubenswrapper[4558]: I0120 17:33:56.691921 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-5878f88784-4j5nq" Jan 20 17:33:56 crc kubenswrapper[4558]: I0120 17:33:56.697185 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"44287ea3-63fc-4ab2-8140-5bf616c3a7ff","Type":"ContainerStarted","Data":"03a9426897f16d041a1386a77332d0afd6e83735904065a47bb859471d68f190"} Jan 20 17:33:56 crc kubenswrapper[4558]: I0120 17:33:56.697312 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="44287ea3-63fc-4ab2-8140-5bf616c3a7ff" containerName="ceilometer-central-agent" containerID="cri-o://faebabce8a842ab2e7002067a6ce76f73862c2812a1910727df13575c8eb1b5b" gracePeriod=30 Jan 20 17:33:56 crc kubenswrapper[4558]: I0120 17:33:56.697430 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="44287ea3-63fc-4ab2-8140-5bf616c3a7ff" containerName="proxy-httpd" containerID="cri-o://03a9426897f16d041a1386a77332d0afd6e83735904065a47bb859471d68f190" gracePeriod=30 Jan 20 17:33:56 crc kubenswrapper[4558]: I0120 17:33:56.697435 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:33:56 crc kubenswrapper[4558]: I0120 17:33:56.697493 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="44287ea3-63fc-4ab2-8140-5bf616c3a7ff" containerName="sg-core" containerID="cri-o://e00823438a3d8284744c0067c4c4a1fe27184f085f878c30569dbc4f58bc8bde" gracePeriod=30 Jan 20 17:33:56 crc kubenswrapper[4558]: I0120 17:33:56.697533 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="44287ea3-63fc-4ab2-8140-5bf616c3a7ff" containerName="ceilometer-notification-agent" containerID="cri-o://f59292330b9f8b7139ba7ca90d368c82d20ac2bd0baeea4553498e6095afac6b" gracePeriod=30 Jan 20 17:33:56 crc kubenswrapper[4558]: I0120 17:33:56.709762 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"3481546b-c57f-43c9-92f9-ffcbc3af0d8f","Type":"ContainerStarted","Data":"7387066aabcf8bc09e2a1f9c8d883fa307f7ddccde17efaf31e07b22b87457ba"} Jan 20 17:33:56 crc kubenswrapper[4558]: I0120 17:33:56.709810 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"3481546b-c57f-43c9-92f9-ffcbc3af0d8f","Type":"ContainerStarted","Data":"a0d826feadee368ccf3856e699cf2cd99ea908f0d2dac8a5188ad31f925dc33d"} Jan 20 17:33:56 crc kubenswrapper[4558]: I0120 17:33:56.718316 4558 scope.go:117] "RemoveContainer" containerID="2eba8336553f4bec8ad0739564c6671e683452b7d048a156e1a6e3cf92ae581f" Jan 20 17:33:56 crc kubenswrapper[4558]: I0120 17:33:56.719574 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"56208926-59b4-4fa1-9e8b-97846ed85d61","Type":"ContainerStarted","Data":"a0032abe6081a17455b83a1d0df740d9aa8e90749e7b4116b6bfe867ba6cdf50"} Jan 20 17:33:56 crc kubenswrapper[4558]: I0120 17:33:56.746952 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ceilometer-0" podStartSLOduration=2.733843619 podStartE2EDuration="6.746930653s" podCreationTimestamp="2026-01-20 17:33:50 +0000 UTC" firstStartedPulling="2026-01-20 17:33:51.577446097 +0000 UTC m=+3125.337784064" lastFinishedPulling="2026-01-20 17:33:55.590533131 +0000 UTC m=+3129.350871098" observedRunningTime="2026-01-20 17:33:56.726204491 +0000 UTC m=+3130.486542458" watchObservedRunningTime="2026-01-20 17:33:56.746930653 +0000 UTC m=+3130.507268610" Jan 20 17:33:56 crc kubenswrapper[4558]: I0120 17:33:56.749402 4558 scope.go:117] "RemoveContainer" containerID="f8d054c8c4c51555f3cda668c1ab5da9f1dcb5f7d87997a3ed3c22db03630067" Jan 20 17:33:56 crc kubenswrapper[4558]: E0120 17:33:56.749784 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f8d054c8c4c51555f3cda668c1ab5da9f1dcb5f7d87997a3ed3c22db03630067\": container with ID starting with f8d054c8c4c51555f3cda668c1ab5da9f1dcb5f7d87997a3ed3c22db03630067 not found: ID does not exist" containerID="f8d054c8c4c51555f3cda668c1ab5da9f1dcb5f7d87997a3ed3c22db03630067" Jan 20 17:33:56 crc kubenswrapper[4558]: I0120 17:33:56.749826 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f8d054c8c4c51555f3cda668c1ab5da9f1dcb5f7d87997a3ed3c22db03630067"} err="failed to get container status \"f8d054c8c4c51555f3cda668c1ab5da9f1dcb5f7d87997a3ed3c22db03630067\": rpc error: code = NotFound desc = could not find container \"f8d054c8c4c51555f3cda668c1ab5da9f1dcb5f7d87997a3ed3c22db03630067\": container with ID starting with f8d054c8c4c51555f3cda668c1ab5da9f1dcb5f7d87997a3ed3c22db03630067 not found: ID does not exist" Jan 20 17:33:56 crc kubenswrapper[4558]: I0120 17:33:56.749848 4558 scope.go:117] "RemoveContainer" containerID="2eba8336553f4bec8ad0739564c6671e683452b7d048a156e1a6e3cf92ae581f" Jan 20 17:33:56 crc kubenswrapper[4558]: E0120 17:33:56.753327 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2eba8336553f4bec8ad0739564c6671e683452b7d048a156e1a6e3cf92ae581f\": container with ID starting with 2eba8336553f4bec8ad0739564c6671e683452b7d048a156e1a6e3cf92ae581f not found: ID does not exist" containerID="2eba8336553f4bec8ad0739564c6671e683452b7d048a156e1a6e3cf92ae581f" Jan 20 17:33:56 crc kubenswrapper[4558]: I0120 17:33:56.753380 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2eba8336553f4bec8ad0739564c6671e683452b7d048a156e1a6e3cf92ae581f"} err="failed to get container status \"2eba8336553f4bec8ad0739564c6671e683452b7d048a156e1a6e3cf92ae581f\": rpc error: code = NotFound desc = could not find container \"2eba8336553f4bec8ad0739564c6671e683452b7d048a156e1a6e3cf92ae581f\": container with ID starting with 2eba8336553f4bec8ad0739564c6671e683452b7d048a156e1a6e3cf92ae581f not found: ID does not exist" Jan 20 17:33:56 crc kubenswrapper[4558]: I0120 17:33:56.761403 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-scheduler-0" podStartSLOduration=2.7613891710000003 podStartE2EDuration="2.761389171s" podCreationTimestamp="2026-01-20 17:33:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:33:56.753582351 +0000 UTC m=+3130.513920318" watchObservedRunningTime="2026-01-20 17:33:56.761389171 +0000 UTC m=+3130.521727138" Jan 20 17:33:56 crc kubenswrapper[4558]: I0120 17:33:56.786553 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-5878f88784-4j5nq"] Jan 20 17:33:56 crc kubenswrapper[4558]: I0120 17:33:56.796085 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/neutron-5878f88784-4j5nq"] Jan 20 17:33:57 crc kubenswrapper[4558]: E0120 17:33:57.622381 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="2623aa795d9d97ceb168764d999b00e635d55fa28ca538052a877af229f57732" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:33:57 crc kubenswrapper[4558]: E0120 17:33:57.625172 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="2623aa795d9d97ceb168764d999b00e635d55fa28ca538052a877af229f57732" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:33:57 crc kubenswrapper[4558]: E0120 17:33:57.626657 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="2623aa795d9d97ceb168764d999b00e635d55fa28ca538052a877af229f57732" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:33:57 crc kubenswrapper[4558]: E0120 17:33:57.626746 4558 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack-kuttl-tests/nova-cell1-conductor-0" podUID="04a134bc-533d-490a-8544-61f705a6d4f2" containerName="nova-cell1-conductor-conductor" Jan 20 17:33:57 crc kubenswrapper[4558]: I0120 17:33:57.729177 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"56208926-59b4-4fa1-9e8b-97846ed85d61","Type":"ContainerStarted","Data":"29810e61422f066b6fe1d3ae95b85fe80170ad2ddf3b6183dea1aff947fae5b0"} Jan 20 17:33:57 crc kubenswrapper[4558]: I0120 17:33:57.729231 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"56208926-59b4-4fa1-9e8b-97846ed85d61","Type":"ContainerStarted","Data":"97b9e76652e3421488afeadedd1e7cc12b71010b839f97d3a9e51d0443a85116"} Jan 20 17:33:57 crc kubenswrapper[4558]: I0120 17:33:57.733419 4558 generic.go:334] "Generic (PLEG): container finished" podID="44287ea3-63fc-4ab2-8140-5bf616c3a7ff" containerID="03a9426897f16d041a1386a77332d0afd6e83735904065a47bb859471d68f190" exitCode=0 Jan 20 17:33:57 crc kubenswrapper[4558]: I0120 17:33:57.733508 4558 generic.go:334] "Generic (PLEG): container finished" podID="44287ea3-63fc-4ab2-8140-5bf616c3a7ff" containerID="e00823438a3d8284744c0067c4c4a1fe27184f085f878c30569dbc4f58bc8bde" exitCode=2 Jan 20 17:33:57 crc kubenswrapper[4558]: I0120 17:33:57.733556 4558 generic.go:334] "Generic (PLEG): container finished" podID="44287ea3-63fc-4ab2-8140-5bf616c3a7ff" containerID="f59292330b9f8b7139ba7ca90d368c82d20ac2bd0baeea4553498e6095afac6b" exitCode=0 Jan 20 17:33:57 crc kubenswrapper[4558]: I0120 17:33:57.733785 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"44287ea3-63fc-4ab2-8140-5bf616c3a7ff","Type":"ContainerDied","Data":"03a9426897f16d041a1386a77332d0afd6e83735904065a47bb859471d68f190"} Jan 20 17:33:57 crc kubenswrapper[4558]: I0120 17:33:57.733838 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"44287ea3-63fc-4ab2-8140-5bf616c3a7ff","Type":"ContainerDied","Data":"e00823438a3d8284744c0067c4c4a1fe27184f085f878c30569dbc4f58bc8bde"} Jan 20 17:33:57 crc kubenswrapper[4558]: I0120 17:33:57.733850 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"44287ea3-63fc-4ab2-8140-5bf616c3a7ff","Type":"ContainerDied","Data":"f59292330b9f8b7139ba7ca90d368c82d20ac2bd0baeea4553498e6095afac6b"} Jan 20 17:33:57 crc kubenswrapper[4558]: I0120 17:33:57.747089 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-api-0" podStartSLOduration=2.7470792189999997 podStartE2EDuration="2.747079219s" podCreationTimestamp="2026-01-20 17:33:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:33:57.743880765 +0000 UTC m=+3131.504218732" watchObservedRunningTime="2026-01-20 17:33:57.747079219 +0000 UTC m=+3131.507417186" Jan 20 17:33:58 crc kubenswrapper[4558]: I0120 17:33:58.566578 4558 scope.go:117] "RemoveContainer" containerID="f275e9f5de330b3c79316d5871106c6bcf90b698e0744c5beb28da45c336b36d" Jan 20 17:33:58 crc kubenswrapper[4558]: E0120 17:33:58.567356 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 17:33:58 crc kubenswrapper[4558]: I0120 17:33:58.578785 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a289616c-ea44-447c-a263-4744c01d5b5e" path="/var/lib/kubelet/pods/a289616c-ea44-447c-a263-4744c01d5b5e/volumes" Jan 20 17:33:58 crc kubenswrapper[4558]: I0120 17:33:58.744039 4558 generic.go:334] "Generic (PLEG): container finished" podID="d656fa11-4336-4ddf-a168-af6f682bfaab" containerID="50d26ce4c22ef55a6ba6f240018a6850524c10427a08e374734fa5e7aa9279db" exitCode=0 Jan 20 17:33:58 crc kubenswrapper[4558]: I0120 17:33:58.744131 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-cell-mapping-ml6gb" event={"ID":"d656fa11-4336-4ddf-a168-af6f682bfaab","Type":"ContainerDied","Data":"50d26ce4c22ef55a6ba6f240018a6850524c10427a08e374734fa5e7aa9279db"} Jan 20 17:33:59 crc kubenswrapper[4558]: I0120 17:33:59.050549 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/swift-proxy-b45ff9748-dd8kw" Jan 20 17:33:59 crc kubenswrapper[4558]: I0120 17:33:59.108419 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/swift-proxy-6b8d9c59b6-z8fqn"] Jan 20 17:33:59 crc kubenswrapper[4558]: I0120 17:33:59.108872 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-proxy-6b8d9c59b6-z8fqn" podUID="fcd9c0dd-1942-49ff-b3de-7a49b4e2ad5d" containerName="proxy-httpd" containerID="cri-o://157e978c4ecc8e76d59459dde878219563637e712816d504a6cc4d4c37722f06" gracePeriod=30 Jan 20 17:33:59 crc kubenswrapper[4558]: I0120 17:33:59.109252 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-proxy-6b8d9c59b6-z8fqn" podUID="fcd9c0dd-1942-49ff-b3de-7a49b4e2ad5d" containerName="proxy-server" containerID="cri-o://1990d0bdef6da51da465520b09ecd48fce8e28b11c4163d2bf1d75f432ef1a20" gracePeriod=30 Jan 20 17:33:59 crc kubenswrapper[4558]: I0120 17:33:59.599621 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:33:59 crc kubenswrapper[4558]: I0120 17:33:59.612687 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fc6ph\" (UniqueName: \"kubernetes.io/projected/37fd3160-ee6a-41a1-9c4b-260e72133f59-kube-api-access-fc6ph\") pod \"37fd3160-ee6a-41a1-9c4b-260e72133f59\" (UID: \"37fd3160-ee6a-41a1-9c4b-260e72133f59\") " Jan 20 17:33:59 crc kubenswrapper[4558]: I0120 17:33:59.612800 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/37fd3160-ee6a-41a1-9c4b-260e72133f59-config-data\") pod \"37fd3160-ee6a-41a1-9c4b-260e72133f59\" (UID: \"37fd3160-ee6a-41a1-9c4b-260e72133f59\") " Jan 20 17:33:59 crc kubenswrapper[4558]: I0120 17:33:59.612968 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/37fd3160-ee6a-41a1-9c4b-260e72133f59-combined-ca-bundle\") pod \"37fd3160-ee6a-41a1-9c4b-260e72133f59\" (UID: \"37fd3160-ee6a-41a1-9c4b-260e72133f59\") " Jan 20 17:33:59 crc kubenswrapper[4558]: I0120 17:33:59.633274 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/37fd3160-ee6a-41a1-9c4b-260e72133f59-kube-api-access-fc6ph" (OuterVolumeSpecName: "kube-api-access-fc6ph") pod "37fd3160-ee6a-41a1-9c4b-260e72133f59" (UID: "37fd3160-ee6a-41a1-9c4b-260e72133f59"). InnerVolumeSpecName "kube-api-access-fc6ph". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:33:59 crc kubenswrapper[4558]: I0120 17:33:59.669287 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/37fd3160-ee6a-41a1-9c4b-260e72133f59-config-data" (OuterVolumeSpecName: "config-data") pod "37fd3160-ee6a-41a1-9c4b-260e72133f59" (UID: "37fd3160-ee6a-41a1-9c4b-260e72133f59"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:33:59 crc kubenswrapper[4558]: I0120 17:33:59.674625 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/37fd3160-ee6a-41a1-9c4b-260e72133f59-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "37fd3160-ee6a-41a1-9c4b-260e72133f59" (UID: "37fd3160-ee6a-41a1-9c4b-260e72133f59"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:33:59 crc kubenswrapper[4558]: I0120 17:33:59.715981 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/37fd3160-ee6a-41a1-9c4b-260e72133f59-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:59 crc kubenswrapper[4558]: I0120 17:33:59.716012 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/37fd3160-ee6a-41a1-9c4b-260e72133f59-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:59 crc kubenswrapper[4558]: I0120 17:33:59.716025 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fc6ph\" (UniqueName: \"kubernetes.io/projected/37fd3160-ee6a-41a1-9c4b-260e72133f59-kube-api-access-fc6ph\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:59 crc kubenswrapper[4558]: I0120 17:33:59.753725 4558 generic.go:334] "Generic (PLEG): container finished" podID="37fd3160-ee6a-41a1-9c4b-260e72133f59" containerID="ca483fd25cf153fcad14ca9739754a6effd1115ea5c7b991da3c23aa4caa603c" exitCode=0 Jan 20 17:33:59 crc kubenswrapper[4558]: I0120 17:33:59.753812 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-0" event={"ID":"37fd3160-ee6a-41a1-9c4b-260e72133f59","Type":"ContainerDied","Data":"ca483fd25cf153fcad14ca9739754a6effd1115ea5c7b991da3c23aa4caa603c"} Jan 20 17:33:59 crc kubenswrapper[4558]: I0120 17:33:59.753858 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-0" event={"ID":"37fd3160-ee6a-41a1-9c4b-260e72133f59","Type":"ContainerDied","Data":"fae23ddf543587439d5d9e8fd6cb43aec782fbdee10fa24c830597e1c43ab0ed"} Jan 20 17:33:59 crc kubenswrapper[4558]: I0120 17:33:59.753913 4558 scope.go:117] "RemoveContainer" containerID="ca483fd25cf153fcad14ca9739754a6effd1115ea5c7b991da3c23aa4caa603c" Jan 20 17:33:59 crc kubenswrapper[4558]: I0120 17:33:59.754145 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:33:59 crc kubenswrapper[4558]: I0120 17:33:59.768482 4558 generic.go:334] "Generic (PLEG): container finished" podID="fcd9c0dd-1942-49ff-b3de-7a49b4e2ad5d" containerID="1990d0bdef6da51da465520b09ecd48fce8e28b11c4163d2bf1d75f432ef1a20" exitCode=0 Jan 20 17:33:59 crc kubenswrapper[4558]: I0120 17:33:59.768513 4558 generic.go:334] "Generic (PLEG): container finished" podID="fcd9c0dd-1942-49ff-b3de-7a49b4e2ad5d" containerID="157e978c4ecc8e76d59459dde878219563637e712816d504a6cc4d4c37722f06" exitCode=0 Jan 20 17:33:59 crc kubenswrapper[4558]: I0120 17:33:59.768532 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-proxy-6b8d9c59b6-z8fqn" event={"ID":"fcd9c0dd-1942-49ff-b3de-7a49b4e2ad5d","Type":"ContainerDied","Data":"1990d0bdef6da51da465520b09ecd48fce8e28b11c4163d2bf1d75f432ef1a20"} Jan 20 17:33:59 crc kubenswrapper[4558]: I0120 17:33:59.768591 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-proxy-6b8d9c59b6-z8fqn" event={"ID":"fcd9c0dd-1942-49ff-b3de-7a49b4e2ad5d","Type":"ContainerDied","Data":"157e978c4ecc8e76d59459dde878219563637e712816d504a6cc4d4c37722f06"} Jan 20 17:33:59 crc kubenswrapper[4558]: I0120 17:33:59.803449 4558 scope.go:117] "RemoveContainer" containerID="ca483fd25cf153fcad14ca9739754a6effd1115ea5c7b991da3c23aa4caa603c" Jan 20 17:33:59 crc kubenswrapper[4558]: E0120 17:33:59.808100 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ca483fd25cf153fcad14ca9739754a6effd1115ea5c7b991da3c23aa4caa603c\": container with ID starting with ca483fd25cf153fcad14ca9739754a6effd1115ea5c7b991da3c23aa4caa603c not found: ID does not exist" containerID="ca483fd25cf153fcad14ca9739754a6effd1115ea5c7b991da3c23aa4caa603c" Jan 20 17:33:59 crc kubenswrapper[4558]: I0120 17:33:59.808143 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ca483fd25cf153fcad14ca9739754a6effd1115ea5c7b991da3c23aa4caa603c"} err="failed to get container status \"ca483fd25cf153fcad14ca9739754a6effd1115ea5c7b991da3c23aa4caa603c\": rpc error: code = NotFound desc = could not find container \"ca483fd25cf153fcad14ca9739754a6effd1115ea5c7b991da3c23aa4caa603c\": container with ID starting with ca483fd25cf153fcad14ca9739754a6effd1115ea5c7b991da3c23aa4caa603c not found: ID does not exist" Jan 20 17:33:59 crc kubenswrapper[4558]: I0120 17:33:59.812911 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-0"] Jan 20 17:33:59 crc kubenswrapper[4558]: I0120 17:33:59.825474 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-0"] Jan 20 17:33:59 crc kubenswrapper[4558]: I0120 17:33:59.834610 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-0"] Jan 20 17:33:59 crc kubenswrapper[4558]: E0120 17:33:59.835122 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a289616c-ea44-447c-a263-4744c01d5b5e" containerName="neutron-httpd" Jan 20 17:33:59 crc kubenswrapper[4558]: I0120 17:33:59.835142 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="a289616c-ea44-447c-a263-4744c01d5b5e" containerName="neutron-httpd" Jan 20 17:33:59 crc kubenswrapper[4558]: E0120 17:33:59.835180 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="37fd3160-ee6a-41a1-9c4b-260e72133f59" containerName="nova-cell0-conductor-conductor" Jan 20 17:33:59 crc kubenswrapper[4558]: I0120 17:33:59.835188 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="37fd3160-ee6a-41a1-9c4b-260e72133f59" containerName="nova-cell0-conductor-conductor" Jan 20 17:33:59 crc kubenswrapper[4558]: E0120 17:33:59.835196 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a289616c-ea44-447c-a263-4744c01d5b5e" containerName="neutron-api" Jan 20 17:33:59 crc kubenswrapper[4558]: I0120 17:33:59.835202 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="a289616c-ea44-447c-a263-4744c01d5b5e" containerName="neutron-api" Jan 20 17:33:59 crc kubenswrapper[4558]: I0120 17:33:59.835407 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="a289616c-ea44-447c-a263-4744c01d5b5e" containerName="neutron-api" Jan 20 17:33:59 crc kubenswrapper[4558]: I0120 17:33:59.835423 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="a289616c-ea44-447c-a263-4744c01d5b5e" containerName="neutron-httpd" Jan 20 17:33:59 crc kubenswrapper[4558]: I0120 17:33:59.835443 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="37fd3160-ee6a-41a1-9c4b-260e72133f59" containerName="nova-cell0-conductor-conductor" Jan 20 17:33:59 crc kubenswrapper[4558]: I0120 17:33:59.836182 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:33:59 crc kubenswrapper[4558]: I0120 17:33:59.842608 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell0-conductor-config-data" Jan 20 17:33:59 crc kubenswrapper[4558]: I0120 17:33:59.847093 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-proxy-6b8d9c59b6-z8fqn" Jan 20 17:33:59 crc kubenswrapper[4558]: I0120 17:33:59.847990 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-0"] Jan 20 17:33:59 crc kubenswrapper[4558]: I0120 17:33:59.920282 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fcd9c0dd-1942-49ff-b3de-7a49b4e2ad5d-combined-ca-bundle\") pod \"fcd9c0dd-1942-49ff-b3de-7a49b4e2ad5d\" (UID: \"fcd9c0dd-1942-49ff-b3de-7a49b4e2ad5d\") " Jan 20 17:33:59 crc kubenswrapper[4558]: I0120 17:33:59.920350 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fcd9c0dd-1942-49ff-b3de-7a49b4e2ad5d-config-data\") pod \"fcd9c0dd-1942-49ff-b3de-7a49b4e2ad5d\" (UID: \"fcd9c0dd-1942-49ff-b3de-7a49b4e2ad5d\") " Jan 20 17:33:59 crc kubenswrapper[4558]: I0120 17:33:59.920385 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qlcrt\" (UniqueName: \"kubernetes.io/projected/fcd9c0dd-1942-49ff-b3de-7a49b4e2ad5d-kube-api-access-qlcrt\") pod \"fcd9c0dd-1942-49ff-b3de-7a49b4e2ad5d\" (UID: \"fcd9c0dd-1942-49ff-b3de-7a49b4e2ad5d\") " Jan 20 17:33:59 crc kubenswrapper[4558]: I0120 17:33:59.920451 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fcd9c0dd-1942-49ff-b3de-7a49b4e2ad5d-log-httpd\") pod \"fcd9c0dd-1942-49ff-b3de-7a49b4e2ad5d\" (UID: \"fcd9c0dd-1942-49ff-b3de-7a49b4e2ad5d\") " Jan 20 17:33:59 crc kubenswrapper[4558]: I0120 17:33:59.920528 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fcd9c0dd-1942-49ff-b3de-7a49b4e2ad5d-run-httpd\") pod \"fcd9c0dd-1942-49ff-b3de-7a49b4e2ad5d\" (UID: \"fcd9c0dd-1942-49ff-b3de-7a49b4e2ad5d\") " Jan 20 17:33:59 crc kubenswrapper[4558]: I0120 17:33:59.920565 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/fcd9c0dd-1942-49ff-b3de-7a49b4e2ad5d-etc-swift\") pod \"fcd9c0dd-1942-49ff-b3de-7a49b4e2ad5d\" (UID: \"fcd9c0dd-1942-49ff-b3de-7a49b4e2ad5d\") " Jan 20 17:33:59 crc kubenswrapper[4558]: I0120 17:33:59.920964 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fcd9c0dd-1942-49ff-b3de-7a49b4e2ad5d-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "fcd9c0dd-1942-49ff-b3de-7a49b4e2ad5d" (UID: "fcd9c0dd-1942-49ff-b3de-7a49b4e2ad5d"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:33:59 crc kubenswrapper[4558]: I0120 17:33:59.921148 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fcd9c0dd-1942-49ff-b3de-7a49b4e2ad5d-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "fcd9c0dd-1942-49ff-b3de-7a49b4e2ad5d" (UID: "fcd9c0dd-1942-49ff-b3de-7a49b4e2ad5d"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:33:59 crc kubenswrapper[4558]: I0120 17:33:59.921455 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4d77869-9132-4170-b50d-88389a33c597-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"f4d77869-9132-4170-b50d-88389a33c597\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:33:59 crc kubenswrapper[4558]: I0120 17:33:59.921490 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f4d77869-9132-4170-b50d-88389a33c597-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"f4d77869-9132-4170-b50d-88389a33c597\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:33:59 crc kubenswrapper[4558]: I0120 17:33:59.921579 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xxdh6\" (UniqueName: \"kubernetes.io/projected/f4d77869-9132-4170-b50d-88389a33c597-kube-api-access-xxdh6\") pod \"nova-cell0-conductor-0\" (UID: \"f4d77869-9132-4170-b50d-88389a33c597\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:33:59 crc kubenswrapper[4558]: I0120 17:33:59.922094 4558 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fcd9c0dd-1942-49ff-b3de-7a49b4e2ad5d-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:59 crc kubenswrapper[4558]: I0120 17:33:59.922126 4558 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fcd9c0dd-1942-49ff-b3de-7a49b4e2ad5d-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:33:59 crc kubenswrapper[4558]: I0120 17:33:59.924288 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fcd9c0dd-1942-49ff-b3de-7a49b4e2ad5d-kube-api-access-qlcrt" (OuterVolumeSpecName: "kube-api-access-qlcrt") pod "fcd9c0dd-1942-49ff-b3de-7a49b4e2ad5d" (UID: "fcd9c0dd-1942-49ff-b3de-7a49b4e2ad5d"). InnerVolumeSpecName "kube-api-access-qlcrt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:33:59 crc kubenswrapper[4558]: I0120 17:33:59.930037 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fcd9c0dd-1942-49ff-b3de-7a49b4e2ad5d-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "fcd9c0dd-1942-49ff-b3de-7a49b4e2ad5d" (UID: "fcd9c0dd-1942-49ff-b3de-7a49b4e2ad5d"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:33:59 crc kubenswrapper[4558]: I0120 17:33:59.970712 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fcd9c0dd-1942-49ff-b3de-7a49b4e2ad5d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fcd9c0dd-1942-49ff-b3de-7a49b4e2ad5d" (UID: "fcd9c0dd-1942-49ff-b3de-7a49b4e2ad5d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:33:59 crc kubenswrapper[4558]: I0120 17:33:59.972264 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fcd9c0dd-1942-49ff-b3de-7a49b4e2ad5d-config-data" (OuterVolumeSpecName: "config-data") pod "fcd9c0dd-1942-49ff-b3de-7a49b4e2ad5d" (UID: "fcd9c0dd-1942-49ff-b3de-7a49b4e2ad5d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:34:00 crc kubenswrapper[4558]: I0120 17:34:00.024157 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4d77869-9132-4170-b50d-88389a33c597-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"f4d77869-9132-4170-b50d-88389a33c597\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:34:00 crc kubenswrapper[4558]: I0120 17:34:00.024220 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f4d77869-9132-4170-b50d-88389a33c597-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"f4d77869-9132-4170-b50d-88389a33c597\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:34:00 crc kubenswrapper[4558]: I0120 17:34:00.024290 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xxdh6\" (UniqueName: \"kubernetes.io/projected/f4d77869-9132-4170-b50d-88389a33c597-kube-api-access-xxdh6\") pod \"nova-cell0-conductor-0\" (UID: \"f4d77869-9132-4170-b50d-88389a33c597\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:34:00 crc kubenswrapper[4558]: I0120 17:34:00.024358 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fcd9c0dd-1942-49ff-b3de-7a49b4e2ad5d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:34:00 crc kubenswrapper[4558]: I0120 17:34:00.024375 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fcd9c0dd-1942-49ff-b3de-7a49b4e2ad5d-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:34:00 crc kubenswrapper[4558]: I0120 17:34:00.024385 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qlcrt\" (UniqueName: \"kubernetes.io/projected/fcd9c0dd-1942-49ff-b3de-7a49b4e2ad5d-kube-api-access-qlcrt\") on node \"crc\" DevicePath \"\"" Jan 20 17:34:00 crc kubenswrapper[4558]: I0120 17:34:00.024395 4558 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/fcd9c0dd-1942-49ff-b3de-7a49b4e2ad5d-etc-swift\") on node \"crc\" DevicePath \"\"" Jan 20 17:34:00 crc kubenswrapper[4558]: I0120 17:34:00.036634 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4d77869-9132-4170-b50d-88389a33c597-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"f4d77869-9132-4170-b50d-88389a33c597\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:34:00 crc kubenswrapper[4558]: I0120 17:34:00.036673 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f4d77869-9132-4170-b50d-88389a33c597-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"f4d77869-9132-4170-b50d-88389a33c597\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:34:00 crc kubenswrapper[4558]: I0120 17:34:00.047640 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xxdh6\" (UniqueName: \"kubernetes.io/projected/f4d77869-9132-4170-b50d-88389a33c597-kube-api-access-xxdh6\") pod \"nova-cell0-conductor-0\" (UID: \"f4d77869-9132-4170-b50d-88389a33c597\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:34:00 crc kubenswrapper[4558]: I0120 17:34:00.047761 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:34:00 crc kubenswrapper[4558]: I0120 17:34:00.066321 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-cell-mapping-ml6gb" Jan 20 17:34:00 crc kubenswrapper[4558]: I0120 17:34:00.126122 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d656fa11-4336-4ddf-a168-af6f682bfaab-combined-ca-bundle\") pod \"d656fa11-4336-4ddf-a168-af6f682bfaab\" (UID: \"d656fa11-4336-4ddf-a168-af6f682bfaab\") " Jan 20 17:34:00 crc kubenswrapper[4558]: I0120 17:34:00.126270 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gb2b7\" (UniqueName: \"kubernetes.io/projected/d656fa11-4336-4ddf-a168-af6f682bfaab-kube-api-access-gb2b7\") pod \"d656fa11-4336-4ddf-a168-af6f682bfaab\" (UID: \"d656fa11-4336-4ddf-a168-af6f682bfaab\") " Jan 20 17:34:00 crc kubenswrapper[4558]: I0120 17:34:00.126362 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d656fa11-4336-4ddf-a168-af6f682bfaab-config-data\") pod \"d656fa11-4336-4ddf-a168-af6f682bfaab\" (UID: \"d656fa11-4336-4ddf-a168-af6f682bfaab\") " Jan 20 17:34:00 crc kubenswrapper[4558]: I0120 17:34:00.126441 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d656fa11-4336-4ddf-a168-af6f682bfaab-scripts\") pod \"d656fa11-4336-4ddf-a168-af6f682bfaab\" (UID: \"d656fa11-4336-4ddf-a168-af6f682bfaab\") " Jan 20 17:34:00 crc kubenswrapper[4558]: I0120 17:34:00.141856 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d656fa11-4336-4ddf-a168-af6f682bfaab-scripts" (OuterVolumeSpecName: "scripts") pod "d656fa11-4336-4ddf-a168-af6f682bfaab" (UID: "d656fa11-4336-4ddf-a168-af6f682bfaab"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:34:00 crc kubenswrapper[4558]: I0120 17:34:00.145310 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d656fa11-4336-4ddf-a168-af6f682bfaab-kube-api-access-gb2b7" (OuterVolumeSpecName: "kube-api-access-gb2b7") pod "d656fa11-4336-4ddf-a168-af6f682bfaab" (UID: "d656fa11-4336-4ddf-a168-af6f682bfaab"). InnerVolumeSpecName "kube-api-access-gb2b7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:34:00 crc kubenswrapper[4558]: I0120 17:34:00.156048 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:34:00 crc kubenswrapper[4558]: I0120 17:34:00.193421 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d656fa11-4336-4ddf-a168-af6f682bfaab-config-data" (OuterVolumeSpecName: "config-data") pod "d656fa11-4336-4ddf-a168-af6f682bfaab" (UID: "d656fa11-4336-4ddf-a168-af6f682bfaab"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:34:00 crc kubenswrapper[4558]: I0120 17:34:00.196395 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d656fa11-4336-4ddf-a168-af6f682bfaab-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d656fa11-4336-4ddf-a168-af6f682bfaab" (UID: "d656fa11-4336-4ddf-a168-af6f682bfaab"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:34:00 crc kubenswrapper[4558]: I0120 17:34:00.235882 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d656fa11-4336-4ddf-a168-af6f682bfaab-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:34:00 crc kubenswrapper[4558]: I0120 17:34:00.235917 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d656fa11-4336-4ddf-a168-af6f682bfaab-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:34:00 crc kubenswrapper[4558]: I0120 17:34:00.235934 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gb2b7\" (UniqueName: \"kubernetes.io/projected/d656fa11-4336-4ddf-a168-af6f682bfaab-kube-api-access-gb2b7\") on node \"crc\" DevicePath \"\"" Jan 20 17:34:00 crc kubenswrapper[4558]: I0120 17:34:00.235945 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d656fa11-4336-4ddf-a168-af6f682bfaab-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:34:00 crc kubenswrapper[4558]: I0120 17:34:00.574807 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="37fd3160-ee6a-41a1-9c4b-260e72133f59" path="/var/lib/kubelet/pods/37fd3160-ee6a-41a1-9c4b-260e72133f59/volumes" Jan 20 17:34:00 crc kubenswrapper[4558]: I0120 17:34:00.686487 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-0"] Jan 20 17:34:00 crc kubenswrapper[4558]: I0120 17:34:00.780608 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-cell-mapping-ml6gb" event={"ID":"d656fa11-4336-4ddf-a168-af6f682bfaab","Type":"ContainerDied","Data":"ac334f99e319222acf21ff42f7f65655a438c507ccd82f37e0c5cfabfca5c7a3"} Jan 20 17:34:00 crc kubenswrapper[4558]: I0120 17:34:00.780637 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-cell-mapping-ml6gb" Jan 20 17:34:00 crc kubenswrapper[4558]: I0120 17:34:00.780659 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ac334f99e319222acf21ff42f7f65655a438c507ccd82f37e0c5cfabfca5c7a3" Jan 20 17:34:00 crc kubenswrapper[4558]: I0120 17:34:00.783911 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-0" event={"ID":"f4d77869-9132-4170-b50d-88389a33c597","Type":"ContainerStarted","Data":"02995edadd67159839c771c36a6d802eeb43852d8c189cae748c4f00fd0b7c25"} Jan 20 17:34:00 crc kubenswrapper[4558]: I0120 17:34:00.786221 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-proxy-6b8d9c59b6-z8fqn" event={"ID":"fcd9c0dd-1942-49ff-b3de-7a49b4e2ad5d","Type":"ContainerDied","Data":"4088b38ae9786f8c6b4ef5c81e601b056820aaab42a1dacb13b196d0e2754e1c"} Jan 20 17:34:00 crc kubenswrapper[4558]: I0120 17:34:00.786367 4558 scope.go:117] "RemoveContainer" containerID="1990d0bdef6da51da465520b09ecd48fce8e28b11c4163d2bf1d75f432ef1a20" Jan 20 17:34:00 crc kubenswrapper[4558]: I0120 17:34:00.786593 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-proxy-6b8d9c59b6-z8fqn" Jan 20 17:34:00 crc kubenswrapper[4558]: I0120 17:34:00.831136 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/swift-proxy-6b8d9c59b6-z8fqn"] Jan 20 17:34:00 crc kubenswrapper[4558]: I0120 17:34:00.837428 4558 scope.go:117] "RemoveContainer" containerID="157e978c4ecc8e76d59459dde878219563637e712816d504a6cc4d4c37722f06" Jan 20 17:34:00 crc kubenswrapper[4558]: I0120 17:34:00.841058 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/swift-proxy-6b8d9c59b6-z8fqn"] Jan 20 17:34:01 crc kubenswrapper[4558]: I0120 17:34:01.797336 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-0" event={"ID":"f4d77869-9132-4170-b50d-88389a33c597","Type":"ContainerStarted","Data":"96c4e3a692bc0dac49e1c6d07e764ff13bc464c6bdc3c29e2e82a3d65c290531"} Jan 20 17:34:01 crc kubenswrapper[4558]: I0120 17:34:01.797743 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:34:01 crc kubenswrapper[4558]: I0120 17:34:01.813583 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell0-conductor-0" podStartSLOduration=2.813571454 podStartE2EDuration="2.813571454s" podCreationTimestamp="2026-01-20 17:33:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:34:01.813397758 +0000 UTC m=+3135.573735724" watchObservedRunningTime="2026-01-20 17:34:01.813571454 +0000 UTC m=+3135.573909422" Jan 20 17:34:02 crc kubenswrapper[4558]: I0120 17:34:02.597430 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fcd9c0dd-1942-49ff-b3de-7a49b4e2ad5d" path="/var/lib/kubelet/pods/fcd9c0dd-1942-49ff-b3de-7a49b4e2ad5d/volumes" Jan 20 17:34:02 crc kubenswrapper[4558]: E0120 17:34:02.624408 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="2623aa795d9d97ceb168764d999b00e635d55fa28ca538052a877af229f57732" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:34:02 crc kubenswrapper[4558]: E0120 17:34:02.626135 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="2623aa795d9d97ceb168764d999b00e635d55fa28ca538052a877af229f57732" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:34:02 crc kubenswrapper[4558]: E0120 17:34:02.627648 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="2623aa795d9d97ceb168764d999b00e635d55fa28ca538052a877af229f57732" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:34:02 crc kubenswrapper[4558]: E0120 17:34:02.627715 4558 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack-kuttl-tests/nova-cell1-conductor-0" podUID="04a134bc-533d-490a-8544-61f705a6d4f2" containerName="nova-cell1-conductor-conductor" Jan 20 17:34:02 crc kubenswrapper[4558]: I0120 17:34:02.717018 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:34:02 crc kubenswrapper[4558]: I0120 17:34:02.811399 4558 generic.go:334] "Generic (PLEG): container finished" podID="44287ea3-63fc-4ab2-8140-5bf616c3a7ff" containerID="faebabce8a842ab2e7002067a6ce76f73862c2812a1910727df13575c8eb1b5b" exitCode=0 Jan 20 17:34:02 crc kubenswrapper[4558]: I0120 17:34:02.811446 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"44287ea3-63fc-4ab2-8140-5bf616c3a7ff","Type":"ContainerDied","Data":"faebabce8a842ab2e7002067a6ce76f73862c2812a1910727df13575c8eb1b5b"} Jan 20 17:34:02 crc kubenswrapper[4558]: I0120 17:34:02.811487 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:34:02 crc kubenswrapper[4558]: I0120 17:34:02.811507 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"44287ea3-63fc-4ab2-8140-5bf616c3a7ff","Type":"ContainerDied","Data":"c6d16525a5743ded2bf43287537c44ad70ca52a310f4a5166535b316b655facd"} Jan 20 17:34:02 crc kubenswrapper[4558]: I0120 17:34:02.811532 4558 scope.go:117] "RemoveContainer" containerID="03a9426897f16d041a1386a77332d0afd6e83735904065a47bb859471d68f190" Jan 20 17:34:02 crc kubenswrapper[4558]: I0120 17:34:02.829538 4558 scope.go:117] "RemoveContainer" containerID="e00823438a3d8284744c0067c4c4a1fe27184f085f878c30569dbc4f58bc8bde" Jan 20 17:34:02 crc kubenswrapper[4558]: I0120 17:34:02.845107 4558 scope.go:117] "RemoveContainer" containerID="f59292330b9f8b7139ba7ca90d368c82d20ac2bd0baeea4553498e6095afac6b" Jan 20 17:34:02 crc kubenswrapper[4558]: I0120 17:34:02.869199 4558 scope.go:117] "RemoveContainer" containerID="faebabce8a842ab2e7002067a6ce76f73862c2812a1910727df13575c8eb1b5b" Jan 20 17:34:02 crc kubenswrapper[4558]: I0120 17:34:02.889765 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qrxtk\" (UniqueName: \"kubernetes.io/projected/44287ea3-63fc-4ab2-8140-5bf616c3a7ff-kube-api-access-qrxtk\") pod \"44287ea3-63fc-4ab2-8140-5bf616c3a7ff\" (UID: \"44287ea3-63fc-4ab2-8140-5bf616c3a7ff\") " Jan 20 17:34:02 crc kubenswrapper[4558]: I0120 17:34:02.889851 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/44287ea3-63fc-4ab2-8140-5bf616c3a7ff-combined-ca-bundle\") pod \"44287ea3-63fc-4ab2-8140-5bf616c3a7ff\" (UID: \"44287ea3-63fc-4ab2-8140-5bf616c3a7ff\") " Jan 20 17:34:02 crc kubenswrapper[4558]: I0120 17:34:02.889984 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/44287ea3-63fc-4ab2-8140-5bf616c3a7ff-sg-core-conf-yaml\") pod \"44287ea3-63fc-4ab2-8140-5bf616c3a7ff\" (UID: \"44287ea3-63fc-4ab2-8140-5bf616c3a7ff\") " Jan 20 17:34:02 crc kubenswrapper[4558]: I0120 17:34:02.890090 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/44287ea3-63fc-4ab2-8140-5bf616c3a7ff-run-httpd\") pod \"44287ea3-63fc-4ab2-8140-5bf616c3a7ff\" (UID: \"44287ea3-63fc-4ab2-8140-5bf616c3a7ff\") " Jan 20 17:34:02 crc kubenswrapper[4558]: I0120 17:34:02.890151 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/44287ea3-63fc-4ab2-8140-5bf616c3a7ff-config-data\") pod \"44287ea3-63fc-4ab2-8140-5bf616c3a7ff\" (UID: \"44287ea3-63fc-4ab2-8140-5bf616c3a7ff\") " Jan 20 17:34:02 crc kubenswrapper[4558]: I0120 17:34:02.890212 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/44287ea3-63fc-4ab2-8140-5bf616c3a7ff-log-httpd\") pod \"44287ea3-63fc-4ab2-8140-5bf616c3a7ff\" (UID: \"44287ea3-63fc-4ab2-8140-5bf616c3a7ff\") " Jan 20 17:34:02 crc kubenswrapper[4558]: I0120 17:34:02.890346 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/44287ea3-63fc-4ab2-8140-5bf616c3a7ff-scripts\") pod \"44287ea3-63fc-4ab2-8140-5bf616c3a7ff\" (UID: \"44287ea3-63fc-4ab2-8140-5bf616c3a7ff\") " Jan 20 17:34:02 crc kubenswrapper[4558]: I0120 17:34:02.890975 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/44287ea3-63fc-4ab2-8140-5bf616c3a7ff-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "44287ea3-63fc-4ab2-8140-5bf616c3a7ff" (UID: "44287ea3-63fc-4ab2-8140-5bf616c3a7ff"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:34:02 crc kubenswrapper[4558]: I0120 17:34:02.891016 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/44287ea3-63fc-4ab2-8140-5bf616c3a7ff-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "44287ea3-63fc-4ab2-8140-5bf616c3a7ff" (UID: "44287ea3-63fc-4ab2-8140-5bf616c3a7ff"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:34:02 crc kubenswrapper[4558]: I0120 17:34:02.891446 4558 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/44287ea3-63fc-4ab2-8140-5bf616c3a7ff-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:34:02 crc kubenswrapper[4558]: I0120 17:34:02.891495 4558 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/44287ea3-63fc-4ab2-8140-5bf616c3a7ff-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:34:02 crc kubenswrapper[4558]: I0120 17:34:02.894422 4558 scope.go:117] "RemoveContainer" containerID="03a9426897f16d041a1386a77332d0afd6e83735904065a47bb859471d68f190" Jan 20 17:34:02 crc kubenswrapper[4558]: E0120 17:34:02.894909 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"03a9426897f16d041a1386a77332d0afd6e83735904065a47bb859471d68f190\": container with ID starting with 03a9426897f16d041a1386a77332d0afd6e83735904065a47bb859471d68f190 not found: ID does not exist" containerID="03a9426897f16d041a1386a77332d0afd6e83735904065a47bb859471d68f190" Jan 20 17:34:02 crc kubenswrapper[4558]: I0120 17:34:02.894955 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"03a9426897f16d041a1386a77332d0afd6e83735904065a47bb859471d68f190"} err="failed to get container status \"03a9426897f16d041a1386a77332d0afd6e83735904065a47bb859471d68f190\": rpc error: code = NotFound desc = could not find container \"03a9426897f16d041a1386a77332d0afd6e83735904065a47bb859471d68f190\": container with ID starting with 03a9426897f16d041a1386a77332d0afd6e83735904065a47bb859471d68f190 not found: ID does not exist" Jan 20 17:34:02 crc kubenswrapper[4558]: I0120 17:34:02.894988 4558 scope.go:117] "RemoveContainer" containerID="e00823438a3d8284744c0067c4c4a1fe27184f085f878c30569dbc4f58bc8bde" Jan 20 17:34:02 crc kubenswrapper[4558]: E0120 17:34:02.897039 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e00823438a3d8284744c0067c4c4a1fe27184f085f878c30569dbc4f58bc8bde\": container with ID starting with e00823438a3d8284744c0067c4c4a1fe27184f085f878c30569dbc4f58bc8bde not found: ID does not exist" containerID="e00823438a3d8284744c0067c4c4a1fe27184f085f878c30569dbc4f58bc8bde" Jan 20 17:34:02 crc kubenswrapper[4558]: I0120 17:34:02.897077 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e00823438a3d8284744c0067c4c4a1fe27184f085f878c30569dbc4f58bc8bde"} err="failed to get container status \"e00823438a3d8284744c0067c4c4a1fe27184f085f878c30569dbc4f58bc8bde\": rpc error: code = NotFound desc = could not find container \"e00823438a3d8284744c0067c4c4a1fe27184f085f878c30569dbc4f58bc8bde\": container with ID starting with e00823438a3d8284744c0067c4c4a1fe27184f085f878c30569dbc4f58bc8bde not found: ID does not exist" Jan 20 17:34:02 crc kubenswrapper[4558]: I0120 17:34:02.897108 4558 scope.go:117] "RemoveContainer" containerID="f59292330b9f8b7139ba7ca90d368c82d20ac2bd0baeea4553498e6095afac6b" Jan 20 17:34:02 crc kubenswrapper[4558]: E0120 17:34:02.897435 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f59292330b9f8b7139ba7ca90d368c82d20ac2bd0baeea4553498e6095afac6b\": container with ID starting with f59292330b9f8b7139ba7ca90d368c82d20ac2bd0baeea4553498e6095afac6b not found: ID does not exist" containerID="f59292330b9f8b7139ba7ca90d368c82d20ac2bd0baeea4553498e6095afac6b" Jan 20 17:34:02 crc kubenswrapper[4558]: I0120 17:34:02.897463 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f59292330b9f8b7139ba7ca90d368c82d20ac2bd0baeea4553498e6095afac6b"} err="failed to get container status \"f59292330b9f8b7139ba7ca90d368c82d20ac2bd0baeea4553498e6095afac6b\": rpc error: code = NotFound desc = could not find container \"f59292330b9f8b7139ba7ca90d368c82d20ac2bd0baeea4553498e6095afac6b\": container with ID starting with f59292330b9f8b7139ba7ca90d368c82d20ac2bd0baeea4553498e6095afac6b not found: ID does not exist" Jan 20 17:34:02 crc kubenswrapper[4558]: I0120 17:34:02.897479 4558 scope.go:117] "RemoveContainer" containerID="faebabce8a842ab2e7002067a6ce76f73862c2812a1910727df13575c8eb1b5b" Jan 20 17:34:02 crc kubenswrapper[4558]: I0120 17:34:02.897368 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44287ea3-63fc-4ab2-8140-5bf616c3a7ff-kube-api-access-qrxtk" (OuterVolumeSpecName: "kube-api-access-qrxtk") pod "44287ea3-63fc-4ab2-8140-5bf616c3a7ff" (UID: "44287ea3-63fc-4ab2-8140-5bf616c3a7ff"). InnerVolumeSpecName "kube-api-access-qrxtk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:34:02 crc kubenswrapper[4558]: E0120 17:34:02.898058 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"faebabce8a842ab2e7002067a6ce76f73862c2812a1910727df13575c8eb1b5b\": container with ID starting with faebabce8a842ab2e7002067a6ce76f73862c2812a1910727df13575c8eb1b5b not found: ID does not exist" containerID="faebabce8a842ab2e7002067a6ce76f73862c2812a1910727df13575c8eb1b5b" Jan 20 17:34:02 crc kubenswrapper[4558]: I0120 17:34:02.898091 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"faebabce8a842ab2e7002067a6ce76f73862c2812a1910727df13575c8eb1b5b"} err="failed to get container status \"faebabce8a842ab2e7002067a6ce76f73862c2812a1910727df13575c8eb1b5b\": rpc error: code = NotFound desc = could not find container \"faebabce8a842ab2e7002067a6ce76f73862c2812a1910727df13575c8eb1b5b\": container with ID starting with faebabce8a842ab2e7002067a6ce76f73862c2812a1910727df13575c8eb1b5b not found: ID does not exist" Jan 20 17:34:02 crc kubenswrapper[4558]: I0120 17:34:02.898286 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/44287ea3-63fc-4ab2-8140-5bf616c3a7ff-scripts" (OuterVolumeSpecName: "scripts") pod "44287ea3-63fc-4ab2-8140-5bf616c3a7ff" (UID: "44287ea3-63fc-4ab2-8140-5bf616c3a7ff"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:34:02 crc kubenswrapper[4558]: I0120 17:34:02.923283 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/44287ea3-63fc-4ab2-8140-5bf616c3a7ff-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "44287ea3-63fc-4ab2-8140-5bf616c3a7ff" (UID: "44287ea3-63fc-4ab2-8140-5bf616c3a7ff"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:34:02 crc kubenswrapper[4558]: I0120 17:34:02.958845 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/44287ea3-63fc-4ab2-8140-5bf616c3a7ff-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "44287ea3-63fc-4ab2-8140-5bf616c3a7ff" (UID: "44287ea3-63fc-4ab2-8140-5bf616c3a7ff"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:34:02 crc kubenswrapper[4558]: I0120 17:34:02.983592 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/44287ea3-63fc-4ab2-8140-5bf616c3a7ff-config-data" (OuterVolumeSpecName: "config-data") pod "44287ea3-63fc-4ab2-8140-5bf616c3a7ff" (UID: "44287ea3-63fc-4ab2-8140-5bf616c3a7ff"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:34:02 crc kubenswrapper[4558]: I0120 17:34:02.995416 4558 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/44287ea3-63fc-4ab2-8140-5bf616c3a7ff-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 20 17:34:02 crc kubenswrapper[4558]: I0120 17:34:02.996246 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/44287ea3-63fc-4ab2-8140-5bf616c3a7ff-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:34:02 crc kubenswrapper[4558]: I0120 17:34:02.996274 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/44287ea3-63fc-4ab2-8140-5bf616c3a7ff-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:34:02 crc kubenswrapper[4558]: I0120 17:34:02.996292 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qrxtk\" (UniqueName: \"kubernetes.io/projected/44287ea3-63fc-4ab2-8140-5bf616c3a7ff-kube-api-access-qrxtk\") on node \"crc\" DevicePath \"\"" Jan 20 17:34:02 crc kubenswrapper[4558]: I0120 17:34:02.996307 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/44287ea3-63fc-4ab2-8140-5bf616c3a7ff-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:34:03 crc kubenswrapper[4558]: I0120 17:34:03.144397 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:34:03 crc kubenswrapper[4558]: I0120 17:34:03.152893 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:34:03 crc kubenswrapper[4558]: I0120 17:34:03.187233 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:34:03 crc kubenswrapper[4558]: E0120 17:34:03.190144 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="44287ea3-63fc-4ab2-8140-5bf616c3a7ff" containerName="ceilometer-central-agent" Jan 20 17:34:03 crc kubenswrapper[4558]: I0120 17:34:03.190191 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="44287ea3-63fc-4ab2-8140-5bf616c3a7ff" containerName="ceilometer-central-agent" Jan 20 17:34:03 crc kubenswrapper[4558]: E0120 17:34:03.190220 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="44287ea3-63fc-4ab2-8140-5bf616c3a7ff" containerName="proxy-httpd" Jan 20 17:34:03 crc kubenswrapper[4558]: I0120 17:34:03.190227 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="44287ea3-63fc-4ab2-8140-5bf616c3a7ff" containerName="proxy-httpd" Jan 20 17:34:03 crc kubenswrapper[4558]: E0120 17:34:03.190267 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fcd9c0dd-1942-49ff-b3de-7a49b4e2ad5d" containerName="proxy-server" Jan 20 17:34:03 crc kubenswrapper[4558]: I0120 17:34:03.190273 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="fcd9c0dd-1942-49ff-b3de-7a49b4e2ad5d" containerName="proxy-server" Jan 20 17:34:03 crc kubenswrapper[4558]: E0120 17:34:03.190291 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="44287ea3-63fc-4ab2-8140-5bf616c3a7ff" containerName="ceilometer-notification-agent" Jan 20 17:34:03 crc kubenswrapper[4558]: I0120 17:34:03.190298 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="44287ea3-63fc-4ab2-8140-5bf616c3a7ff" containerName="ceilometer-notification-agent" Jan 20 17:34:03 crc kubenswrapper[4558]: E0120 17:34:03.190317 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d656fa11-4336-4ddf-a168-af6f682bfaab" containerName="nova-manage" Jan 20 17:34:03 crc kubenswrapper[4558]: I0120 17:34:03.190324 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d656fa11-4336-4ddf-a168-af6f682bfaab" containerName="nova-manage" Jan 20 17:34:03 crc kubenswrapper[4558]: E0120 17:34:03.190335 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fcd9c0dd-1942-49ff-b3de-7a49b4e2ad5d" containerName="proxy-httpd" Jan 20 17:34:03 crc kubenswrapper[4558]: I0120 17:34:03.190343 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="fcd9c0dd-1942-49ff-b3de-7a49b4e2ad5d" containerName="proxy-httpd" Jan 20 17:34:03 crc kubenswrapper[4558]: E0120 17:34:03.190361 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="44287ea3-63fc-4ab2-8140-5bf616c3a7ff" containerName="sg-core" Jan 20 17:34:03 crc kubenswrapper[4558]: I0120 17:34:03.190367 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="44287ea3-63fc-4ab2-8140-5bf616c3a7ff" containerName="sg-core" Jan 20 17:34:03 crc kubenswrapper[4558]: I0120 17:34:03.190717 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="d656fa11-4336-4ddf-a168-af6f682bfaab" containerName="nova-manage" Jan 20 17:34:03 crc kubenswrapper[4558]: I0120 17:34:03.190743 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="44287ea3-63fc-4ab2-8140-5bf616c3a7ff" containerName="proxy-httpd" Jan 20 17:34:03 crc kubenswrapper[4558]: I0120 17:34:03.190767 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="fcd9c0dd-1942-49ff-b3de-7a49b4e2ad5d" containerName="proxy-server" Jan 20 17:34:03 crc kubenswrapper[4558]: I0120 17:34:03.190776 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="44287ea3-63fc-4ab2-8140-5bf616c3a7ff" containerName="sg-core" Jan 20 17:34:03 crc kubenswrapper[4558]: I0120 17:34:03.190793 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="44287ea3-63fc-4ab2-8140-5bf616c3a7ff" containerName="ceilometer-central-agent" Jan 20 17:34:03 crc kubenswrapper[4558]: I0120 17:34:03.190802 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="44287ea3-63fc-4ab2-8140-5bf616c3a7ff" containerName="ceilometer-notification-agent" Jan 20 17:34:03 crc kubenswrapper[4558]: I0120 17:34:03.190820 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="fcd9c0dd-1942-49ff-b3de-7a49b4e2ad5d" containerName="proxy-httpd" Jan 20 17:34:03 crc kubenswrapper[4558]: I0120 17:34:03.195046 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:34:03 crc kubenswrapper[4558]: I0120 17:34:03.195184 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:34:03 crc kubenswrapper[4558]: I0120 17:34:03.200557 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-config-data" Jan 20 17:34:03 crc kubenswrapper[4558]: I0120 17:34:03.200581 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-scripts" Jan 20 17:34:03 crc kubenswrapper[4558]: I0120 17:34:03.200915 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/064c3f1d-6117-4cd9-bf5f-400b06b11d59-log-httpd\") pod \"ceilometer-0\" (UID: \"064c3f1d-6117-4cd9-bf5f-400b06b11d59\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:34:03 crc kubenswrapper[4558]: I0120 17:34:03.200992 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jk84b\" (UniqueName: \"kubernetes.io/projected/064c3f1d-6117-4cd9-bf5f-400b06b11d59-kube-api-access-jk84b\") pod \"ceilometer-0\" (UID: \"064c3f1d-6117-4cd9-bf5f-400b06b11d59\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:34:03 crc kubenswrapper[4558]: I0120 17:34:03.201060 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/064c3f1d-6117-4cd9-bf5f-400b06b11d59-scripts\") pod \"ceilometer-0\" (UID: \"064c3f1d-6117-4cd9-bf5f-400b06b11d59\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:34:03 crc kubenswrapper[4558]: I0120 17:34:03.201097 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/064c3f1d-6117-4cd9-bf5f-400b06b11d59-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"064c3f1d-6117-4cd9-bf5f-400b06b11d59\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:34:03 crc kubenswrapper[4558]: I0120 17:34:03.201225 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/064c3f1d-6117-4cd9-bf5f-400b06b11d59-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"064c3f1d-6117-4cd9-bf5f-400b06b11d59\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:34:03 crc kubenswrapper[4558]: I0120 17:34:03.201285 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/064c3f1d-6117-4cd9-bf5f-400b06b11d59-run-httpd\") pod \"ceilometer-0\" (UID: \"064c3f1d-6117-4cd9-bf5f-400b06b11d59\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:34:03 crc kubenswrapper[4558]: I0120 17:34:03.201517 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/064c3f1d-6117-4cd9-bf5f-400b06b11d59-config-data\") pod \"ceilometer-0\" (UID: \"064c3f1d-6117-4cd9-bf5f-400b06b11d59\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:34:03 crc kubenswrapper[4558]: I0120 17:34:03.303585 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/064c3f1d-6117-4cd9-bf5f-400b06b11d59-config-data\") pod \"ceilometer-0\" (UID: \"064c3f1d-6117-4cd9-bf5f-400b06b11d59\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:34:03 crc kubenswrapper[4558]: I0120 17:34:03.303671 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/064c3f1d-6117-4cd9-bf5f-400b06b11d59-log-httpd\") pod \"ceilometer-0\" (UID: \"064c3f1d-6117-4cd9-bf5f-400b06b11d59\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:34:03 crc kubenswrapper[4558]: I0120 17:34:03.303729 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jk84b\" (UniqueName: \"kubernetes.io/projected/064c3f1d-6117-4cd9-bf5f-400b06b11d59-kube-api-access-jk84b\") pod \"ceilometer-0\" (UID: \"064c3f1d-6117-4cd9-bf5f-400b06b11d59\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:34:03 crc kubenswrapper[4558]: I0120 17:34:03.303789 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/064c3f1d-6117-4cd9-bf5f-400b06b11d59-scripts\") pod \"ceilometer-0\" (UID: \"064c3f1d-6117-4cd9-bf5f-400b06b11d59\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:34:03 crc kubenswrapper[4558]: I0120 17:34:03.303831 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/064c3f1d-6117-4cd9-bf5f-400b06b11d59-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"064c3f1d-6117-4cd9-bf5f-400b06b11d59\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:34:03 crc kubenswrapper[4558]: I0120 17:34:03.303950 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/064c3f1d-6117-4cd9-bf5f-400b06b11d59-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"064c3f1d-6117-4cd9-bf5f-400b06b11d59\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:34:03 crc kubenswrapper[4558]: I0120 17:34:03.303999 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/064c3f1d-6117-4cd9-bf5f-400b06b11d59-run-httpd\") pod \"ceilometer-0\" (UID: \"064c3f1d-6117-4cd9-bf5f-400b06b11d59\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:34:03 crc kubenswrapper[4558]: I0120 17:34:03.304584 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/064c3f1d-6117-4cd9-bf5f-400b06b11d59-run-httpd\") pod \"ceilometer-0\" (UID: \"064c3f1d-6117-4cd9-bf5f-400b06b11d59\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:34:03 crc kubenswrapper[4558]: I0120 17:34:03.304856 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/064c3f1d-6117-4cd9-bf5f-400b06b11d59-log-httpd\") pod \"ceilometer-0\" (UID: \"064c3f1d-6117-4cd9-bf5f-400b06b11d59\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:34:03 crc kubenswrapper[4558]: I0120 17:34:03.308630 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/064c3f1d-6117-4cd9-bf5f-400b06b11d59-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"064c3f1d-6117-4cd9-bf5f-400b06b11d59\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:34:03 crc kubenswrapper[4558]: I0120 17:34:03.308732 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/064c3f1d-6117-4cd9-bf5f-400b06b11d59-scripts\") pod \"ceilometer-0\" (UID: \"064c3f1d-6117-4cd9-bf5f-400b06b11d59\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:34:03 crc kubenswrapper[4558]: I0120 17:34:03.309511 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/064c3f1d-6117-4cd9-bf5f-400b06b11d59-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"064c3f1d-6117-4cd9-bf5f-400b06b11d59\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:34:03 crc kubenswrapper[4558]: I0120 17:34:03.309550 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/064c3f1d-6117-4cd9-bf5f-400b06b11d59-config-data\") pod \"ceilometer-0\" (UID: \"064c3f1d-6117-4cd9-bf5f-400b06b11d59\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:34:03 crc kubenswrapper[4558]: I0120 17:34:03.320085 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jk84b\" (UniqueName: \"kubernetes.io/projected/064c3f1d-6117-4cd9-bf5f-400b06b11d59-kube-api-access-jk84b\") pod \"ceilometer-0\" (UID: \"064c3f1d-6117-4cd9-bf5f-400b06b11d59\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:34:03 crc kubenswrapper[4558]: I0120 17:34:03.517810 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:34:03 crc kubenswrapper[4558]: I0120 17:34:03.947848 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:34:03 crc kubenswrapper[4558]: W0120 17:34:03.952686 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod064c3f1d_6117_4cd9_bf5f_400b06b11d59.slice/crio-efd21b49b63ae4659fc950bab4c8aa6216890d0805a08681be0f14455536c3ae WatchSource:0}: Error finding container efd21b49b63ae4659fc950bab4c8aa6216890d0805a08681be0f14455536c3ae: Status 404 returned error can't find the container with id efd21b49b63ae4659fc950bab4c8aa6216890d0805a08681be0f14455536c3ae Jan 20 17:34:04 crc kubenswrapper[4558]: I0120 17:34:04.576309 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44287ea3-63fc-4ab2-8140-5bf616c3a7ff" path="/var/lib/kubelet/pods/44287ea3-63fc-4ab2-8140-5bf616c3a7ff/volumes" Jan 20 17:34:04 crc kubenswrapper[4558]: I0120 17:34:04.844560 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"064c3f1d-6117-4cd9-bf5f-400b06b11d59","Type":"ContainerStarted","Data":"b379979fb76580540ae38daa220da7cd19bceeb6504b4c7e195ca0c79794a990"} Jan 20 17:34:04 crc kubenswrapper[4558]: I0120 17:34:04.844775 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"064c3f1d-6117-4cd9-bf5f-400b06b11d59","Type":"ContainerStarted","Data":"efd21b49b63ae4659fc950bab4c8aa6216890d0805a08681be0f14455536c3ae"} Jan 20 17:34:05 crc kubenswrapper[4558]: I0120 17:34:05.044299 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:34:05 crc kubenswrapper[4558]: I0120 17:34:05.072042 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:34:05 crc kubenswrapper[4558]: I0120 17:34:05.187066 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:34:05 crc kubenswrapper[4558]: I0120 17:34:05.706957 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:34:05 crc kubenswrapper[4558]: I0120 17:34:05.714786 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:34:05 crc kubenswrapper[4558]: I0120 17:34:05.715055 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-api-0" podUID="56208926-59b4-4fa1-9e8b-97846ed85d61" containerName="nova-api-log" containerID="cri-o://97b9e76652e3421488afeadedd1e7cc12b71010b839f97d3a9e51d0443a85116" gracePeriod=30 Jan 20 17:34:05 crc kubenswrapper[4558]: I0120 17:34:05.715129 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-api-0" podUID="56208926-59b4-4fa1-9e8b-97846ed85d61" containerName="nova-api-api" containerID="cri-o://29810e61422f066b6fe1d3ae95b85fe80170ad2ddf3b6183dea1aff947fae5b0" gracePeriod=30 Jan 20 17:34:05 crc kubenswrapper[4558]: I0120 17:34:05.856838 4558 generic.go:334] "Generic (PLEG): container finished" podID="56208926-59b4-4fa1-9e8b-97846ed85d61" containerID="97b9e76652e3421488afeadedd1e7cc12b71010b839f97d3a9e51d0443a85116" exitCode=143 Jan 20 17:34:05 crc kubenswrapper[4558]: I0120 17:34:05.856863 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"56208926-59b4-4fa1-9e8b-97846ed85d61","Type":"ContainerDied","Data":"97b9e76652e3421488afeadedd1e7cc12b71010b839f97d3a9e51d0443a85116"} Jan 20 17:34:05 crc kubenswrapper[4558]: I0120 17:34:05.859028 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"064c3f1d-6117-4cd9-bf5f-400b06b11d59","Type":"ContainerStarted","Data":"b76c14d8886d5774bd6f812f24811198b815faf3df6e02edfe827602a5f05a10"} Jan 20 17:34:05 crc kubenswrapper[4558]: I0120 17:34:05.885123 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:34:06 crc kubenswrapper[4558]: I0120 17:34:06.217248 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:34:06 crc kubenswrapper[4558]: I0120 17:34:06.283665 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nq2qh\" (UniqueName: \"kubernetes.io/projected/56208926-59b4-4fa1-9e8b-97846ed85d61-kube-api-access-nq2qh\") pod \"56208926-59b4-4fa1-9e8b-97846ed85d61\" (UID: \"56208926-59b4-4fa1-9e8b-97846ed85d61\") " Jan 20 17:34:06 crc kubenswrapper[4558]: I0120 17:34:06.283908 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/56208926-59b4-4fa1-9e8b-97846ed85d61-config-data\") pod \"56208926-59b4-4fa1-9e8b-97846ed85d61\" (UID: \"56208926-59b4-4fa1-9e8b-97846ed85d61\") " Jan 20 17:34:06 crc kubenswrapper[4558]: I0120 17:34:06.284075 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/56208926-59b4-4fa1-9e8b-97846ed85d61-logs\") pod \"56208926-59b4-4fa1-9e8b-97846ed85d61\" (UID: \"56208926-59b4-4fa1-9e8b-97846ed85d61\") " Jan 20 17:34:06 crc kubenswrapper[4558]: I0120 17:34:06.284208 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/56208926-59b4-4fa1-9e8b-97846ed85d61-internal-tls-certs\") pod \"56208926-59b4-4fa1-9e8b-97846ed85d61\" (UID: \"56208926-59b4-4fa1-9e8b-97846ed85d61\") " Jan 20 17:34:06 crc kubenswrapper[4558]: I0120 17:34:06.284331 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/56208926-59b4-4fa1-9e8b-97846ed85d61-combined-ca-bundle\") pod \"56208926-59b4-4fa1-9e8b-97846ed85d61\" (UID: \"56208926-59b4-4fa1-9e8b-97846ed85d61\") " Jan 20 17:34:06 crc kubenswrapper[4558]: I0120 17:34:06.284418 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/56208926-59b4-4fa1-9e8b-97846ed85d61-public-tls-certs\") pod \"56208926-59b4-4fa1-9e8b-97846ed85d61\" (UID: \"56208926-59b4-4fa1-9e8b-97846ed85d61\") " Jan 20 17:34:06 crc kubenswrapper[4558]: I0120 17:34:06.285320 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/56208926-59b4-4fa1-9e8b-97846ed85d61-logs" (OuterVolumeSpecName: "logs") pod "56208926-59b4-4fa1-9e8b-97846ed85d61" (UID: "56208926-59b4-4fa1-9e8b-97846ed85d61"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:34:06 crc kubenswrapper[4558]: I0120 17:34:06.289757 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/56208926-59b4-4fa1-9e8b-97846ed85d61-kube-api-access-nq2qh" (OuterVolumeSpecName: "kube-api-access-nq2qh") pod "56208926-59b4-4fa1-9e8b-97846ed85d61" (UID: "56208926-59b4-4fa1-9e8b-97846ed85d61"). InnerVolumeSpecName "kube-api-access-nq2qh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:34:06 crc kubenswrapper[4558]: I0120 17:34:06.317586 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/56208926-59b4-4fa1-9e8b-97846ed85d61-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "56208926-59b4-4fa1-9e8b-97846ed85d61" (UID: "56208926-59b4-4fa1-9e8b-97846ed85d61"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:34:06 crc kubenswrapper[4558]: I0120 17:34:06.318222 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/56208926-59b4-4fa1-9e8b-97846ed85d61-config-data" (OuterVolumeSpecName: "config-data") pod "56208926-59b4-4fa1-9e8b-97846ed85d61" (UID: "56208926-59b4-4fa1-9e8b-97846ed85d61"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:34:06 crc kubenswrapper[4558]: I0120 17:34:06.351882 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/56208926-59b4-4fa1-9e8b-97846ed85d61-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "56208926-59b4-4fa1-9e8b-97846ed85d61" (UID: "56208926-59b4-4fa1-9e8b-97846ed85d61"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:34:06 crc kubenswrapper[4558]: I0120 17:34:06.354238 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/56208926-59b4-4fa1-9e8b-97846ed85d61-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "56208926-59b4-4fa1-9e8b-97846ed85d61" (UID: "56208926-59b4-4fa1-9e8b-97846ed85d61"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:34:06 crc kubenswrapper[4558]: I0120 17:34:06.387541 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nq2qh\" (UniqueName: \"kubernetes.io/projected/56208926-59b4-4fa1-9e8b-97846ed85d61-kube-api-access-nq2qh\") on node \"crc\" DevicePath \"\"" Jan 20 17:34:06 crc kubenswrapper[4558]: I0120 17:34:06.387574 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/56208926-59b4-4fa1-9e8b-97846ed85d61-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:34:06 crc kubenswrapper[4558]: I0120 17:34:06.387588 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/56208926-59b4-4fa1-9e8b-97846ed85d61-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:34:06 crc kubenswrapper[4558]: I0120 17:34:06.387599 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/56208926-59b4-4fa1-9e8b-97846ed85d61-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:34:06 crc kubenswrapper[4558]: I0120 17:34:06.387611 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/56208926-59b4-4fa1-9e8b-97846ed85d61-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:34:06 crc kubenswrapper[4558]: I0120 17:34:06.387620 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/56208926-59b4-4fa1-9e8b-97846ed85d61-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:34:06 crc kubenswrapper[4558]: I0120 17:34:06.874400 4558 generic.go:334] "Generic (PLEG): container finished" podID="56208926-59b4-4fa1-9e8b-97846ed85d61" containerID="29810e61422f066b6fe1d3ae95b85fe80170ad2ddf3b6183dea1aff947fae5b0" exitCode=0 Jan 20 17:34:06 crc kubenswrapper[4558]: I0120 17:34:06.874497 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:34:06 crc kubenswrapper[4558]: I0120 17:34:06.874498 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"56208926-59b4-4fa1-9e8b-97846ed85d61","Type":"ContainerDied","Data":"29810e61422f066b6fe1d3ae95b85fe80170ad2ddf3b6183dea1aff947fae5b0"} Jan 20 17:34:06 crc kubenswrapper[4558]: I0120 17:34:06.874591 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"56208926-59b4-4fa1-9e8b-97846ed85d61","Type":"ContainerDied","Data":"a0032abe6081a17455b83a1d0df740d9aa8e90749e7b4116b6bfe867ba6cdf50"} Jan 20 17:34:06 crc kubenswrapper[4558]: I0120 17:34:06.874623 4558 scope.go:117] "RemoveContainer" containerID="29810e61422f066b6fe1d3ae95b85fe80170ad2ddf3b6183dea1aff947fae5b0" Jan 20 17:34:06 crc kubenswrapper[4558]: I0120 17:34:06.878854 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"064c3f1d-6117-4cd9-bf5f-400b06b11d59","Type":"ContainerStarted","Data":"f48aeb30b2cfaa08bb3f13eee1aaf3c0f1c1855c7c00bdf4b9fa474db4b08957"} Jan 20 17:34:06 crc kubenswrapper[4558]: I0120 17:34:06.879006 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-scheduler-0" podUID="3481546b-c57f-43c9-92f9-ffcbc3af0d8f" containerName="nova-scheduler-scheduler" containerID="cri-o://7387066aabcf8bc09e2a1f9c8d883fa307f7ddccde17efaf31e07b22b87457ba" gracePeriod=30 Jan 20 17:34:06 crc kubenswrapper[4558]: I0120 17:34:06.907143 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:34:06 crc kubenswrapper[4558]: I0120 17:34:06.914464 4558 scope.go:117] "RemoveContainer" containerID="97b9e76652e3421488afeadedd1e7cc12b71010b839f97d3a9e51d0443a85116" Jan 20 17:34:06 crc kubenswrapper[4558]: I0120 17:34:06.918498 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:34:06 crc kubenswrapper[4558]: I0120 17:34:06.928354 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:34:06 crc kubenswrapper[4558]: E0120 17:34:06.928924 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="56208926-59b4-4fa1-9e8b-97846ed85d61" containerName="nova-api-log" Jan 20 17:34:06 crc kubenswrapper[4558]: I0120 17:34:06.928942 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="56208926-59b4-4fa1-9e8b-97846ed85d61" containerName="nova-api-log" Jan 20 17:34:06 crc kubenswrapper[4558]: E0120 17:34:06.928956 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="56208926-59b4-4fa1-9e8b-97846ed85d61" containerName="nova-api-api" Jan 20 17:34:06 crc kubenswrapper[4558]: I0120 17:34:06.928963 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="56208926-59b4-4fa1-9e8b-97846ed85d61" containerName="nova-api-api" Jan 20 17:34:06 crc kubenswrapper[4558]: I0120 17:34:06.929251 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="56208926-59b4-4fa1-9e8b-97846ed85d61" containerName="nova-api-api" Jan 20 17:34:06 crc kubenswrapper[4558]: I0120 17:34:06.929266 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="56208926-59b4-4fa1-9e8b-97846ed85d61" containerName="nova-api-log" Jan 20 17:34:06 crc kubenswrapper[4558]: I0120 17:34:06.930419 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:34:06 crc kubenswrapper[4558]: I0120 17:34:06.937360 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-api-config-data" Jan 20 17:34:06 crc kubenswrapper[4558]: I0120 17:34:06.937470 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-public-svc" Jan 20 17:34:06 crc kubenswrapper[4558]: I0120 17:34:06.937710 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-internal-svc" Jan 20 17:34:06 crc kubenswrapper[4558]: I0120 17:34:06.948340 4558 scope.go:117] "RemoveContainer" containerID="29810e61422f066b6fe1d3ae95b85fe80170ad2ddf3b6183dea1aff947fae5b0" Jan 20 17:34:06 crc kubenswrapper[4558]: E0120 17:34:06.948904 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"29810e61422f066b6fe1d3ae95b85fe80170ad2ddf3b6183dea1aff947fae5b0\": container with ID starting with 29810e61422f066b6fe1d3ae95b85fe80170ad2ddf3b6183dea1aff947fae5b0 not found: ID does not exist" containerID="29810e61422f066b6fe1d3ae95b85fe80170ad2ddf3b6183dea1aff947fae5b0" Jan 20 17:34:06 crc kubenswrapper[4558]: I0120 17:34:06.948938 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"29810e61422f066b6fe1d3ae95b85fe80170ad2ddf3b6183dea1aff947fae5b0"} err="failed to get container status \"29810e61422f066b6fe1d3ae95b85fe80170ad2ddf3b6183dea1aff947fae5b0\": rpc error: code = NotFound desc = could not find container \"29810e61422f066b6fe1d3ae95b85fe80170ad2ddf3b6183dea1aff947fae5b0\": container with ID starting with 29810e61422f066b6fe1d3ae95b85fe80170ad2ddf3b6183dea1aff947fae5b0 not found: ID does not exist" Jan 20 17:34:06 crc kubenswrapper[4558]: I0120 17:34:06.948963 4558 scope.go:117] "RemoveContainer" containerID="97b9e76652e3421488afeadedd1e7cc12b71010b839f97d3a9e51d0443a85116" Jan 20 17:34:06 crc kubenswrapper[4558]: E0120 17:34:06.949672 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"97b9e76652e3421488afeadedd1e7cc12b71010b839f97d3a9e51d0443a85116\": container with ID starting with 97b9e76652e3421488afeadedd1e7cc12b71010b839f97d3a9e51d0443a85116 not found: ID does not exist" containerID="97b9e76652e3421488afeadedd1e7cc12b71010b839f97d3a9e51d0443a85116" Jan 20 17:34:06 crc kubenswrapper[4558]: I0120 17:34:06.949715 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"97b9e76652e3421488afeadedd1e7cc12b71010b839f97d3a9e51d0443a85116"} err="failed to get container status \"97b9e76652e3421488afeadedd1e7cc12b71010b839f97d3a9e51d0443a85116\": rpc error: code = NotFound desc = could not find container \"97b9e76652e3421488afeadedd1e7cc12b71010b839f97d3a9e51d0443a85116\": container with ID starting with 97b9e76652e3421488afeadedd1e7cc12b71010b839f97d3a9e51d0443a85116 not found: ID does not exist" Jan 20 17:34:06 crc kubenswrapper[4558]: I0120 17:34:06.950085 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:34:07 crc kubenswrapper[4558]: I0120 17:34:07.002204 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/403ecf58-a186-4cda-b9c0-3504f3edeb02-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"403ecf58-a186-4cda-b9c0-3504f3edeb02\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:34:07 crc kubenswrapper[4558]: I0120 17:34:07.002277 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/403ecf58-a186-4cda-b9c0-3504f3edeb02-internal-tls-certs\") pod \"nova-api-0\" (UID: \"403ecf58-a186-4cda-b9c0-3504f3edeb02\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:34:07 crc kubenswrapper[4558]: I0120 17:34:07.002565 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/403ecf58-a186-4cda-b9c0-3504f3edeb02-config-data\") pod \"nova-api-0\" (UID: \"403ecf58-a186-4cda-b9c0-3504f3edeb02\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:34:07 crc kubenswrapper[4558]: I0120 17:34:07.002696 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/403ecf58-a186-4cda-b9c0-3504f3edeb02-logs\") pod \"nova-api-0\" (UID: \"403ecf58-a186-4cda-b9c0-3504f3edeb02\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:34:07 crc kubenswrapper[4558]: I0120 17:34:07.002861 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/403ecf58-a186-4cda-b9c0-3504f3edeb02-public-tls-certs\") pod \"nova-api-0\" (UID: \"403ecf58-a186-4cda-b9c0-3504f3edeb02\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:34:07 crc kubenswrapper[4558]: I0120 17:34:07.003041 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r2hzs\" (UniqueName: \"kubernetes.io/projected/403ecf58-a186-4cda-b9c0-3504f3edeb02-kube-api-access-r2hzs\") pod \"nova-api-0\" (UID: \"403ecf58-a186-4cda-b9c0-3504f3edeb02\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:34:07 crc kubenswrapper[4558]: I0120 17:34:07.104385 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r2hzs\" (UniqueName: \"kubernetes.io/projected/403ecf58-a186-4cda-b9c0-3504f3edeb02-kube-api-access-r2hzs\") pod \"nova-api-0\" (UID: \"403ecf58-a186-4cda-b9c0-3504f3edeb02\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:34:07 crc kubenswrapper[4558]: I0120 17:34:07.104469 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/403ecf58-a186-4cda-b9c0-3504f3edeb02-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"403ecf58-a186-4cda-b9c0-3504f3edeb02\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:34:07 crc kubenswrapper[4558]: I0120 17:34:07.104497 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/403ecf58-a186-4cda-b9c0-3504f3edeb02-internal-tls-certs\") pod \"nova-api-0\" (UID: \"403ecf58-a186-4cda-b9c0-3504f3edeb02\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:34:07 crc kubenswrapper[4558]: I0120 17:34:07.104537 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/403ecf58-a186-4cda-b9c0-3504f3edeb02-config-data\") pod \"nova-api-0\" (UID: \"403ecf58-a186-4cda-b9c0-3504f3edeb02\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:34:07 crc kubenswrapper[4558]: I0120 17:34:07.104557 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/403ecf58-a186-4cda-b9c0-3504f3edeb02-logs\") pod \"nova-api-0\" (UID: \"403ecf58-a186-4cda-b9c0-3504f3edeb02\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:34:07 crc kubenswrapper[4558]: I0120 17:34:07.104603 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/403ecf58-a186-4cda-b9c0-3504f3edeb02-public-tls-certs\") pod \"nova-api-0\" (UID: \"403ecf58-a186-4cda-b9c0-3504f3edeb02\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:34:07 crc kubenswrapper[4558]: I0120 17:34:07.105901 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/403ecf58-a186-4cda-b9c0-3504f3edeb02-logs\") pod \"nova-api-0\" (UID: \"403ecf58-a186-4cda-b9c0-3504f3edeb02\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:34:07 crc kubenswrapper[4558]: I0120 17:34:07.110622 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/403ecf58-a186-4cda-b9c0-3504f3edeb02-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"403ecf58-a186-4cda-b9c0-3504f3edeb02\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:34:07 crc kubenswrapper[4558]: I0120 17:34:07.111722 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/403ecf58-a186-4cda-b9c0-3504f3edeb02-internal-tls-certs\") pod \"nova-api-0\" (UID: \"403ecf58-a186-4cda-b9c0-3504f3edeb02\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:34:07 crc kubenswrapper[4558]: I0120 17:34:07.111853 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/403ecf58-a186-4cda-b9c0-3504f3edeb02-config-data\") pod \"nova-api-0\" (UID: \"403ecf58-a186-4cda-b9c0-3504f3edeb02\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:34:07 crc kubenswrapper[4558]: I0120 17:34:07.117208 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/403ecf58-a186-4cda-b9c0-3504f3edeb02-public-tls-certs\") pod \"nova-api-0\" (UID: \"403ecf58-a186-4cda-b9c0-3504f3edeb02\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:34:07 crc kubenswrapper[4558]: I0120 17:34:07.120053 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r2hzs\" (UniqueName: \"kubernetes.io/projected/403ecf58-a186-4cda-b9c0-3504f3edeb02-kube-api-access-r2hzs\") pod \"nova-api-0\" (UID: \"403ecf58-a186-4cda-b9c0-3504f3edeb02\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:34:07 crc kubenswrapper[4558]: I0120 17:34:07.258428 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:34:07 crc kubenswrapper[4558]: E0120 17:34:07.622406 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="2623aa795d9d97ceb168764d999b00e635d55fa28ca538052a877af229f57732" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:34:07 crc kubenswrapper[4558]: E0120 17:34:07.626450 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="2623aa795d9d97ceb168764d999b00e635d55fa28ca538052a877af229f57732" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:34:07 crc kubenswrapper[4558]: E0120 17:34:07.630675 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="2623aa795d9d97ceb168764d999b00e635d55fa28ca538052a877af229f57732" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:34:07 crc kubenswrapper[4558]: E0120 17:34:07.630742 4558 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack-kuttl-tests/nova-cell1-conductor-0" podUID="04a134bc-533d-490a-8544-61f705a6d4f2" containerName="nova-cell1-conductor-conductor" Jan 20 17:34:07 crc kubenswrapper[4558]: I0120 17:34:07.704019 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:34:07 crc kubenswrapper[4558]: I0120 17:34:07.896617 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"403ecf58-a186-4cda-b9c0-3504f3edeb02","Type":"ContainerStarted","Data":"1cf660a3fd74859ff963a723ee3b7beb2a4d2c7466eead735cedc8f92917bf47"} Jan 20 17:34:07 crc kubenswrapper[4558]: I0120 17:34:07.896675 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"403ecf58-a186-4cda-b9c0-3504f3edeb02","Type":"ContainerStarted","Data":"1d86216a784830a97f30241fe62252e3ad3341a0743a0d2d7b8cb38df1498998"} Jan 20 17:34:07 crc kubenswrapper[4558]: I0120 17:34:07.901040 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"064c3f1d-6117-4cd9-bf5f-400b06b11d59","Type":"ContainerStarted","Data":"dc58496c4ba70fea566614a3cce2786b241478b1915fd38952580ac26231d50f"} Jan 20 17:34:07 crc kubenswrapper[4558]: I0120 17:34:07.901650 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:34:07 crc kubenswrapper[4558]: I0120 17:34:07.931541 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ceilometer-0" podStartSLOduration=1.3614344250000001 podStartE2EDuration="4.931521111s" podCreationTimestamp="2026-01-20 17:34:03 +0000 UTC" firstStartedPulling="2026-01-20 17:34:03.955192427 +0000 UTC m=+3137.715530395" lastFinishedPulling="2026-01-20 17:34:07.525279114 +0000 UTC m=+3141.285617081" observedRunningTime="2026-01-20 17:34:07.917156081 +0000 UTC m=+3141.677494047" watchObservedRunningTime="2026-01-20 17:34:07.931521111 +0000 UTC m=+3141.691859078" Jan 20 17:34:08 crc kubenswrapper[4558]: I0120 17:34:08.575697 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="56208926-59b4-4fa1-9e8b-97846ed85d61" path="/var/lib/kubelet/pods/56208926-59b4-4fa1-9e8b-97846ed85d61/volumes" Jan 20 17:34:08 crc kubenswrapper[4558]: I0120 17:34:08.921087 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"403ecf58-a186-4cda-b9c0-3504f3edeb02","Type":"ContainerStarted","Data":"808933550d924de3c7edc379c7a62583aecaf780d81bdd690f9ea9e7511961c0"} Jan 20 17:34:08 crc kubenswrapper[4558]: I0120 17:34:08.942647 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-api-0" podStartSLOduration=2.94262617 podStartE2EDuration="2.94262617s" podCreationTimestamp="2026-01-20 17:34:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:34:08.941674169 +0000 UTC m=+3142.702012136" watchObservedRunningTime="2026-01-20 17:34:08.94262617 +0000 UTC m=+3142.702964137" Jan 20 17:34:10 crc kubenswrapper[4558]: E0120 17:34:10.046482 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="7387066aabcf8bc09e2a1f9c8d883fa307f7ddccde17efaf31e07b22b87457ba" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 20 17:34:10 crc kubenswrapper[4558]: E0120 17:34:10.047700 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="7387066aabcf8bc09e2a1f9c8d883fa307f7ddccde17efaf31e07b22b87457ba" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 20 17:34:10 crc kubenswrapper[4558]: E0120 17:34:10.049732 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="7387066aabcf8bc09e2a1f9c8d883fa307f7ddccde17efaf31e07b22b87457ba" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 20 17:34:10 crc kubenswrapper[4558]: E0120 17:34:10.049800 4558 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack-kuttl-tests/nova-scheduler-0" podUID="3481546b-c57f-43c9-92f9-ffcbc3af0d8f" containerName="nova-scheduler-scheduler" Jan 20 17:34:11 crc kubenswrapper[4558]: I0120 17:34:11.961860 4558 generic.go:334] "Generic (PLEG): container finished" podID="3481546b-c57f-43c9-92f9-ffcbc3af0d8f" containerID="7387066aabcf8bc09e2a1f9c8d883fa307f7ddccde17efaf31e07b22b87457ba" exitCode=0 Jan 20 17:34:11 crc kubenswrapper[4558]: I0120 17:34:11.961999 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"3481546b-c57f-43c9-92f9-ffcbc3af0d8f","Type":"ContainerDied","Data":"7387066aabcf8bc09e2a1f9c8d883fa307f7ddccde17efaf31e07b22b87457ba"} Jan 20 17:34:12 crc kubenswrapper[4558]: I0120 17:34:12.198388 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:34:12 crc kubenswrapper[4558]: I0120 17:34:12.221528 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxg64\" (UniqueName: \"kubernetes.io/projected/3481546b-c57f-43c9-92f9-ffcbc3af0d8f-kube-api-access-wxg64\") pod \"3481546b-c57f-43c9-92f9-ffcbc3af0d8f\" (UID: \"3481546b-c57f-43c9-92f9-ffcbc3af0d8f\") " Jan 20 17:34:12 crc kubenswrapper[4558]: I0120 17:34:12.221621 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3481546b-c57f-43c9-92f9-ffcbc3af0d8f-combined-ca-bundle\") pod \"3481546b-c57f-43c9-92f9-ffcbc3af0d8f\" (UID: \"3481546b-c57f-43c9-92f9-ffcbc3af0d8f\") " Jan 20 17:34:12 crc kubenswrapper[4558]: I0120 17:34:12.221872 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3481546b-c57f-43c9-92f9-ffcbc3af0d8f-config-data\") pod \"3481546b-c57f-43c9-92f9-ffcbc3af0d8f\" (UID: \"3481546b-c57f-43c9-92f9-ffcbc3af0d8f\") " Jan 20 17:34:12 crc kubenswrapper[4558]: I0120 17:34:12.226935 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3481546b-c57f-43c9-92f9-ffcbc3af0d8f-kube-api-access-wxg64" (OuterVolumeSpecName: "kube-api-access-wxg64") pod "3481546b-c57f-43c9-92f9-ffcbc3af0d8f" (UID: "3481546b-c57f-43c9-92f9-ffcbc3af0d8f"). InnerVolumeSpecName "kube-api-access-wxg64". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:34:12 crc kubenswrapper[4558]: I0120 17:34:12.259587 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3481546b-c57f-43c9-92f9-ffcbc3af0d8f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3481546b-c57f-43c9-92f9-ffcbc3af0d8f" (UID: "3481546b-c57f-43c9-92f9-ffcbc3af0d8f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:34:12 crc kubenswrapper[4558]: I0120 17:34:12.259711 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3481546b-c57f-43c9-92f9-ffcbc3af0d8f-config-data" (OuterVolumeSpecName: "config-data") pod "3481546b-c57f-43c9-92f9-ffcbc3af0d8f" (UID: "3481546b-c57f-43c9-92f9-ffcbc3af0d8f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:34:12 crc kubenswrapper[4558]: I0120 17:34:12.328355 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3481546b-c57f-43c9-92f9-ffcbc3af0d8f-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:34:12 crc kubenswrapper[4558]: I0120 17:34:12.328393 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxg64\" (UniqueName: \"kubernetes.io/projected/3481546b-c57f-43c9-92f9-ffcbc3af0d8f-kube-api-access-wxg64\") on node \"crc\" DevicePath \"\"" Jan 20 17:34:12 crc kubenswrapper[4558]: I0120 17:34:12.328405 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3481546b-c57f-43c9-92f9-ffcbc3af0d8f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:34:12 crc kubenswrapper[4558]: E0120 17:34:12.622360 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="2623aa795d9d97ceb168764d999b00e635d55fa28ca538052a877af229f57732" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:34:12 crc kubenswrapper[4558]: E0120 17:34:12.624090 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="2623aa795d9d97ceb168764d999b00e635d55fa28ca538052a877af229f57732" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:34:12 crc kubenswrapper[4558]: E0120 17:34:12.625919 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="2623aa795d9d97ceb168764d999b00e635d55fa28ca538052a877af229f57732" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:34:12 crc kubenswrapper[4558]: E0120 17:34:12.625998 4558 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack-kuttl-tests/nova-cell1-conductor-0" podUID="04a134bc-533d-490a-8544-61f705a6d4f2" containerName="nova-cell1-conductor-conductor" Jan 20 17:34:12 crc kubenswrapper[4558]: I0120 17:34:12.973026 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"3481546b-c57f-43c9-92f9-ffcbc3af0d8f","Type":"ContainerDied","Data":"a0d826feadee368ccf3856e699cf2cd99ea908f0d2dac8a5188ad31f925dc33d"} Jan 20 17:34:12 crc kubenswrapper[4558]: I0120 17:34:12.973088 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:34:12 crc kubenswrapper[4558]: I0120 17:34:12.973101 4558 scope.go:117] "RemoveContainer" containerID="7387066aabcf8bc09e2a1f9c8d883fa307f7ddccde17efaf31e07b22b87457ba" Jan 20 17:34:13 crc kubenswrapper[4558]: I0120 17:34:13.004566 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:34:13 crc kubenswrapper[4558]: I0120 17:34:13.009642 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:34:13 crc kubenswrapper[4558]: I0120 17:34:13.031848 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:34:13 crc kubenswrapper[4558]: E0120 17:34:13.032450 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3481546b-c57f-43c9-92f9-ffcbc3af0d8f" containerName="nova-scheduler-scheduler" Jan 20 17:34:13 crc kubenswrapper[4558]: I0120 17:34:13.032476 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="3481546b-c57f-43c9-92f9-ffcbc3af0d8f" containerName="nova-scheduler-scheduler" Jan 20 17:34:13 crc kubenswrapper[4558]: I0120 17:34:13.032781 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="3481546b-c57f-43c9-92f9-ffcbc3af0d8f" containerName="nova-scheduler-scheduler" Jan 20 17:34:13 crc kubenswrapper[4558]: I0120 17:34:13.033593 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:34:13 crc kubenswrapper[4558]: I0120 17:34:13.036070 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-scheduler-config-data" Jan 20 17:34:13 crc kubenswrapper[4558]: I0120 17:34:13.041864 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:34:13 crc kubenswrapper[4558]: I0120 17:34:13.151932 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ktzpc\" (UniqueName: \"kubernetes.io/projected/6a79b74c-56ea-47f2-aec7-8b63d0a54f3c-kube-api-access-ktzpc\") pod \"nova-scheduler-0\" (UID: \"6a79b74c-56ea-47f2-aec7-8b63d0a54f3c\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:34:13 crc kubenswrapper[4558]: I0120 17:34:13.152003 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6a79b74c-56ea-47f2-aec7-8b63d0a54f3c-config-data\") pod \"nova-scheduler-0\" (UID: \"6a79b74c-56ea-47f2-aec7-8b63d0a54f3c\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:34:13 crc kubenswrapper[4558]: I0120 17:34:13.152125 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6a79b74c-56ea-47f2-aec7-8b63d0a54f3c-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"6a79b74c-56ea-47f2-aec7-8b63d0a54f3c\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:34:13 crc kubenswrapper[4558]: I0120 17:34:13.255300 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6a79b74c-56ea-47f2-aec7-8b63d0a54f3c-config-data\") pod \"nova-scheduler-0\" (UID: \"6a79b74c-56ea-47f2-aec7-8b63d0a54f3c\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:34:13 crc kubenswrapper[4558]: I0120 17:34:13.255377 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6a79b74c-56ea-47f2-aec7-8b63d0a54f3c-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"6a79b74c-56ea-47f2-aec7-8b63d0a54f3c\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:34:13 crc kubenswrapper[4558]: I0120 17:34:13.255696 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ktzpc\" (UniqueName: \"kubernetes.io/projected/6a79b74c-56ea-47f2-aec7-8b63d0a54f3c-kube-api-access-ktzpc\") pod \"nova-scheduler-0\" (UID: \"6a79b74c-56ea-47f2-aec7-8b63d0a54f3c\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:34:13 crc kubenswrapper[4558]: I0120 17:34:13.259780 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6a79b74c-56ea-47f2-aec7-8b63d0a54f3c-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"6a79b74c-56ea-47f2-aec7-8b63d0a54f3c\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:34:13 crc kubenswrapper[4558]: I0120 17:34:13.260369 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6a79b74c-56ea-47f2-aec7-8b63d0a54f3c-config-data\") pod \"nova-scheduler-0\" (UID: \"6a79b74c-56ea-47f2-aec7-8b63d0a54f3c\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:34:13 crc kubenswrapper[4558]: I0120 17:34:13.271560 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ktzpc\" (UniqueName: \"kubernetes.io/projected/6a79b74c-56ea-47f2-aec7-8b63d0a54f3c-kube-api-access-ktzpc\") pod \"nova-scheduler-0\" (UID: \"6a79b74c-56ea-47f2-aec7-8b63d0a54f3c\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:34:13 crc kubenswrapper[4558]: I0120 17:34:13.352278 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:34:13 crc kubenswrapper[4558]: I0120 17:34:13.566363 4558 scope.go:117] "RemoveContainer" containerID="f275e9f5de330b3c79316d5871106c6bcf90b698e0744c5beb28da45c336b36d" Jan 20 17:34:13 crc kubenswrapper[4558]: E0120 17:34:13.566701 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 17:34:13 crc kubenswrapper[4558]: I0120 17:34:13.739669 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:34:13 crc kubenswrapper[4558]: W0120 17:34:13.741909 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6a79b74c_56ea_47f2_aec7_8b63d0a54f3c.slice/crio-d81683bc405cd6eb7d7aad1e014d9b09d9fac18cd52755985f94c217aa9a511d WatchSource:0}: Error finding container d81683bc405cd6eb7d7aad1e014d9b09d9fac18cd52755985f94c217aa9a511d: Status 404 returned error can't find the container with id d81683bc405cd6eb7d7aad1e014d9b09d9fac18cd52755985f94c217aa9a511d Jan 20 17:34:13 crc kubenswrapper[4558]: I0120 17:34:13.985688 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"6a79b74c-56ea-47f2-aec7-8b63d0a54f3c","Type":"ContainerStarted","Data":"6d8e674d2d833aea6fdf17f48244d761ba7c0a3485ad6276685492e29412ecf5"} Jan 20 17:34:13 crc kubenswrapper[4558]: I0120 17:34:13.985944 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"6a79b74c-56ea-47f2-aec7-8b63d0a54f3c","Type":"ContainerStarted","Data":"d81683bc405cd6eb7d7aad1e014d9b09d9fac18cd52755985f94c217aa9a511d"} Jan 20 17:34:14 crc kubenswrapper[4558]: I0120 17:34:14.013812 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-scheduler-0" podStartSLOduration=2.013792431 podStartE2EDuration="2.013792431s" podCreationTimestamp="2026-01-20 17:34:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:34:13.999221262 +0000 UTC m=+3147.759559229" watchObservedRunningTime="2026-01-20 17:34:14.013792431 +0000 UTC m=+3147.774130398" Jan 20 17:34:14 crc kubenswrapper[4558]: I0120 17:34:14.576103 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3481546b-c57f-43c9-92f9-ffcbc3af0d8f" path="/var/lib/kubelet/pods/3481546b-c57f-43c9-92f9-ffcbc3af0d8f/volumes" Jan 20 17:34:14 crc kubenswrapper[4558]: I0120 17:34:14.987964 4558 scope.go:117] "RemoveContainer" containerID="08b72bd01a759efc57d52868101c30d8254f92693dba81d382bc14dd5867a057" Jan 20 17:34:15 crc kubenswrapper[4558]: I0120 17:34:15.034375 4558 scope.go:117] "RemoveContainer" containerID="2c9fa6b9561c4a984ecb954991ac4f52ef42ab0fbc3c9bb866972b9d6d85f164" Jan 20 17:34:15 crc kubenswrapper[4558]: I0120 17:34:15.053090 4558 scope.go:117] "RemoveContainer" containerID="078d2b2ec0a8bee0701e4f1972ae83126a9c9ca7a4fe3a16d9d7a16978e58d58" Jan 20 17:34:15 crc kubenswrapper[4558]: I0120 17:34:15.091125 4558 scope.go:117] "RemoveContainer" containerID="41bd033877c5a7cf1ffe2c31818a011adc08dd537bfdb844181969248bd15676" Jan 20 17:34:15 crc kubenswrapper[4558]: I0120 17:34:15.136373 4558 scope.go:117] "RemoveContainer" containerID="50e04d94313c300a1bbfb5aa67197781874c448647ec0443d12262cb9d1314e4" Jan 20 17:34:17 crc kubenswrapper[4558]: I0120 17:34:17.259318 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:34:17 crc kubenswrapper[4558]: I0120 17:34:17.259676 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:34:17 crc kubenswrapper[4558]: E0120 17:34:17.622502 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="2623aa795d9d97ceb168764d999b00e635d55fa28ca538052a877af229f57732" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:34:17 crc kubenswrapper[4558]: E0120 17:34:17.624657 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="2623aa795d9d97ceb168764d999b00e635d55fa28ca538052a877af229f57732" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:34:17 crc kubenswrapper[4558]: E0120 17:34:17.626948 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="2623aa795d9d97ceb168764d999b00e635d55fa28ca538052a877af229f57732" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:34:17 crc kubenswrapper[4558]: E0120 17:34:17.627064 4558 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack-kuttl-tests/nova-cell1-conductor-0" podUID="04a134bc-533d-490a-8544-61f705a6d4f2" containerName="nova-cell1-conductor-conductor" Jan 20 17:34:18 crc kubenswrapper[4558]: I0120 17:34:18.272331 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-api-0" podUID="403ecf58-a186-4cda-b9c0-3504f3edeb02" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.95:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 20 17:34:18 crc kubenswrapper[4558]: I0120 17:34:18.272331 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-api-0" podUID="403ecf58-a186-4cda-b9c0-3504f3edeb02" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.95:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 20 17:34:18 crc kubenswrapper[4558]: I0120 17:34:18.353156 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:34:19 crc kubenswrapper[4558]: I0120 17:34:19.643868 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:34:19 crc kubenswrapper[4558]: I0120 17:34:19.713030 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4f8tn\" (UniqueName: \"kubernetes.io/projected/cf7d4ada-26ee-451b-9f80-e2f65ba17eac-kube-api-access-4f8tn\") pod \"cf7d4ada-26ee-451b-9f80-e2f65ba17eac\" (UID: \"cf7d4ada-26ee-451b-9f80-e2f65ba17eac\") " Jan 20 17:34:19 crc kubenswrapper[4558]: I0120 17:34:19.713300 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf7d4ada-26ee-451b-9f80-e2f65ba17eac-combined-ca-bundle\") pod \"cf7d4ada-26ee-451b-9f80-e2f65ba17eac\" (UID: \"cf7d4ada-26ee-451b-9f80-e2f65ba17eac\") " Jan 20 17:34:19 crc kubenswrapper[4558]: I0120 17:34:19.713382 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf7d4ada-26ee-451b-9f80-e2f65ba17eac-config-data\") pod \"cf7d4ada-26ee-451b-9f80-e2f65ba17eac\" (UID: \"cf7d4ada-26ee-451b-9f80-e2f65ba17eac\") " Jan 20 17:34:19 crc kubenswrapper[4558]: I0120 17:34:19.718570 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cf7d4ada-26ee-451b-9f80-e2f65ba17eac-kube-api-access-4f8tn" (OuterVolumeSpecName: "kube-api-access-4f8tn") pod "cf7d4ada-26ee-451b-9f80-e2f65ba17eac" (UID: "cf7d4ada-26ee-451b-9f80-e2f65ba17eac"). InnerVolumeSpecName "kube-api-access-4f8tn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:34:19 crc kubenswrapper[4558]: I0120 17:34:19.735861 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cf7d4ada-26ee-451b-9f80-e2f65ba17eac-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "cf7d4ada-26ee-451b-9f80-e2f65ba17eac" (UID: "cf7d4ada-26ee-451b-9f80-e2f65ba17eac"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:34:19 crc kubenswrapper[4558]: I0120 17:34:19.736235 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cf7d4ada-26ee-451b-9f80-e2f65ba17eac-config-data" (OuterVolumeSpecName: "config-data") pod "cf7d4ada-26ee-451b-9f80-e2f65ba17eac" (UID: "cf7d4ada-26ee-451b-9f80-e2f65ba17eac"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:34:19 crc kubenswrapper[4558]: I0120 17:34:19.816345 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4f8tn\" (UniqueName: \"kubernetes.io/projected/cf7d4ada-26ee-451b-9f80-e2f65ba17eac-kube-api-access-4f8tn\") on node \"crc\" DevicePath \"\"" Jan 20 17:34:19 crc kubenswrapper[4558]: I0120 17:34:19.816378 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf7d4ada-26ee-451b-9f80-e2f65ba17eac-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:34:19 crc kubenswrapper[4558]: I0120 17:34:19.816388 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf7d4ada-26ee-451b-9f80-e2f65ba17eac-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:34:20 crc kubenswrapper[4558]: I0120 17:34:20.061074 4558 generic.go:334] "Generic (PLEG): container finished" podID="cf7d4ada-26ee-451b-9f80-e2f65ba17eac" containerID="61340ea4c6d4601e8dba8fa5338ab203b0a918c71c61c9f5e5eb7af8611f7eb7" exitCode=137 Jan 20 17:34:20 crc kubenswrapper[4558]: I0120 17:34:20.061131 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"cf7d4ada-26ee-451b-9f80-e2f65ba17eac","Type":"ContainerDied","Data":"61340ea4c6d4601e8dba8fa5338ab203b0a918c71c61c9f5e5eb7af8611f7eb7"} Jan 20 17:34:20 crc kubenswrapper[4558]: I0120 17:34:20.061150 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:34:20 crc kubenswrapper[4558]: I0120 17:34:20.061192 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"cf7d4ada-26ee-451b-9f80-e2f65ba17eac","Type":"ContainerDied","Data":"86ab52ccb17858817e9868895e83804721f6854bd8266f62d50b094b56165e79"} Jan 20 17:34:20 crc kubenswrapper[4558]: I0120 17:34:20.061214 4558 scope.go:117] "RemoveContainer" containerID="61340ea4c6d4601e8dba8fa5338ab203b0a918c71c61c9f5e5eb7af8611f7eb7" Jan 20 17:34:20 crc kubenswrapper[4558]: I0120 17:34:20.091050 4558 scope.go:117] "RemoveContainer" containerID="61340ea4c6d4601e8dba8fa5338ab203b0a918c71c61c9f5e5eb7af8611f7eb7" Jan 20 17:34:20 crc kubenswrapper[4558]: E0120 17:34:20.091620 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"61340ea4c6d4601e8dba8fa5338ab203b0a918c71c61c9f5e5eb7af8611f7eb7\": container with ID starting with 61340ea4c6d4601e8dba8fa5338ab203b0a918c71c61c9f5e5eb7af8611f7eb7 not found: ID does not exist" containerID="61340ea4c6d4601e8dba8fa5338ab203b0a918c71c61c9f5e5eb7af8611f7eb7" Jan 20 17:34:20 crc kubenswrapper[4558]: I0120 17:34:20.091657 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"61340ea4c6d4601e8dba8fa5338ab203b0a918c71c61c9f5e5eb7af8611f7eb7"} err="failed to get container status \"61340ea4c6d4601e8dba8fa5338ab203b0a918c71c61c9f5e5eb7af8611f7eb7\": rpc error: code = NotFound desc = could not find container \"61340ea4c6d4601e8dba8fa5338ab203b0a918c71c61c9f5e5eb7af8611f7eb7\": container with ID starting with 61340ea4c6d4601e8dba8fa5338ab203b0a918c71c61c9f5e5eb7af8611f7eb7 not found: ID does not exist" Jan 20 17:34:20 crc kubenswrapper[4558]: I0120 17:34:20.094650 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:34:20 crc kubenswrapper[4558]: I0120 17:34:20.107322 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:34:20 crc kubenswrapper[4558]: I0120 17:34:20.169347 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:34:20 crc kubenswrapper[4558]: E0120 17:34:20.169947 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf7d4ada-26ee-451b-9f80-e2f65ba17eac" containerName="nova-cell1-novncproxy-novncproxy" Jan 20 17:34:20 crc kubenswrapper[4558]: I0120 17:34:20.169965 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf7d4ada-26ee-451b-9f80-e2f65ba17eac" containerName="nova-cell1-novncproxy-novncproxy" Jan 20 17:34:20 crc kubenswrapper[4558]: I0120 17:34:20.170212 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="cf7d4ada-26ee-451b-9f80-e2f65ba17eac" containerName="nova-cell1-novncproxy-novncproxy" Jan 20 17:34:20 crc kubenswrapper[4558]: I0120 17:34:20.171245 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:34:20 crc kubenswrapper[4558]: I0120 17:34:20.175688 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-novncproxy-cell1-vencrypt" Jan 20 17:34:20 crc kubenswrapper[4558]: I0120 17:34:20.176199 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-novncproxy-cell1-public-svc" Jan 20 17:34:20 crc kubenswrapper[4558]: I0120 17:34:20.176485 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell1-novncproxy-config-data" Jan 20 17:34:20 crc kubenswrapper[4558]: I0120 17:34:20.184602 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:34:20 crc kubenswrapper[4558]: I0120 17:34:20.229974 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wt5f2\" (UniqueName: \"kubernetes.io/projected/639521c5-8df8-4c45-a66a-0be7119bb8d2-kube-api-access-wt5f2\") pod \"nova-cell1-novncproxy-0\" (UID: \"639521c5-8df8-4c45-a66a-0be7119bb8d2\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:34:20 crc kubenswrapper[4558]: I0120 17:34:20.230022 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/639521c5-8df8-4c45-a66a-0be7119bb8d2-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"639521c5-8df8-4c45-a66a-0be7119bb8d2\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:34:20 crc kubenswrapper[4558]: I0120 17:34:20.230042 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/639521c5-8df8-4c45-a66a-0be7119bb8d2-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"639521c5-8df8-4c45-a66a-0be7119bb8d2\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:34:20 crc kubenswrapper[4558]: I0120 17:34:20.230288 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/639521c5-8df8-4c45-a66a-0be7119bb8d2-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"639521c5-8df8-4c45-a66a-0be7119bb8d2\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:34:20 crc kubenswrapper[4558]: I0120 17:34:20.230321 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/639521c5-8df8-4c45-a66a-0be7119bb8d2-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"639521c5-8df8-4c45-a66a-0be7119bb8d2\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:34:20 crc kubenswrapper[4558]: I0120 17:34:20.332457 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wt5f2\" (UniqueName: \"kubernetes.io/projected/639521c5-8df8-4c45-a66a-0be7119bb8d2-kube-api-access-wt5f2\") pod \"nova-cell1-novncproxy-0\" (UID: \"639521c5-8df8-4c45-a66a-0be7119bb8d2\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:34:20 crc kubenswrapper[4558]: I0120 17:34:20.332512 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/639521c5-8df8-4c45-a66a-0be7119bb8d2-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"639521c5-8df8-4c45-a66a-0be7119bb8d2\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:34:20 crc kubenswrapper[4558]: I0120 17:34:20.332540 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/639521c5-8df8-4c45-a66a-0be7119bb8d2-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"639521c5-8df8-4c45-a66a-0be7119bb8d2\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:34:20 crc kubenswrapper[4558]: I0120 17:34:20.332643 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/639521c5-8df8-4c45-a66a-0be7119bb8d2-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"639521c5-8df8-4c45-a66a-0be7119bb8d2\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:34:20 crc kubenswrapper[4558]: I0120 17:34:20.332677 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/639521c5-8df8-4c45-a66a-0be7119bb8d2-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"639521c5-8df8-4c45-a66a-0be7119bb8d2\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:34:20 crc kubenswrapper[4558]: I0120 17:34:20.338606 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/639521c5-8df8-4c45-a66a-0be7119bb8d2-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"639521c5-8df8-4c45-a66a-0be7119bb8d2\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:34:20 crc kubenswrapper[4558]: I0120 17:34:20.338604 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/639521c5-8df8-4c45-a66a-0be7119bb8d2-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"639521c5-8df8-4c45-a66a-0be7119bb8d2\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:34:20 crc kubenswrapper[4558]: I0120 17:34:20.338709 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/639521c5-8df8-4c45-a66a-0be7119bb8d2-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"639521c5-8df8-4c45-a66a-0be7119bb8d2\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:34:20 crc kubenswrapper[4558]: I0120 17:34:20.341496 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/639521c5-8df8-4c45-a66a-0be7119bb8d2-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"639521c5-8df8-4c45-a66a-0be7119bb8d2\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:34:20 crc kubenswrapper[4558]: I0120 17:34:20.349829 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wt5f2\" (UniqueName: \"kubernetes.io/projected/639521c5-8df8-4c45-a66a-0be7119bb8d2-kube-api-access-wt5f2\") pod \"nova-cell1-novncproxy-0\" (UID: \"639521c5-8df8-4c45-a66a-0be7119bb8d2\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:34:20 crc kubenswrapper[4558]: I0120 17:34:20.493131 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:34:20 crc kubenswrapper[4558]: I0120 17:34:20.586535 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cf7d4ada-26ee-451b-9f80-e2f65ba17eac" path="/var/lib/kubelet/pods/cf7d4ada-26ee-451b-9f80-e2f65ba17eac/volumes" Jan 20 17:34:20 crc kubenswrapper[4558]: I0120 17:34:20.831523 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:34:20 crc kubenswrapper[4558]: I0120 17:34:20.955655 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fsc8j\" (UniqueName: \"kubernetes.io/projected/04a134bc-533d-490a-8544-61f705a6d4f2-kube-api-access-fsc8j\") pod \"04a134bc-533d-490a-8544-61f705a6d4f2\" (UID: \"04a134bc-533d-490a-8544-61f705a6d4f2\") " Jan 20 17:34:20 crc kubenswrapper[4558]: I0120 17:34:20.956020 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/04a134bc-533d-490a-8544-61f705a6d4f2-combined-ca-bundle\") pod \"04a134bc-533d-490a-8544-61f705a6d4f2\" (UID: \"04a134bc-533d-490a-8544-61f705a6d4f2\") " Jan 20 17:34:20 crc kubenswrapper[4558]: I0120 17:34:20.956194 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/04a134bc-533d-490a-8544-61f705a6d4f2-config-data\") pod \"04a134bc-533d-490a-8544-61f705a6d4f2\" (UID: \"04a134bc-533d-490a-8544-61f705a6d4f2\") " Jan 20 17:34:20 crc kubenswrapper[4558]: I0120 17:34:20.961482 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/04a134bc-533d-490a-8544-61f705a6d4f2-kube-api-access-fsc8j" (OuterVolumeSpecName: "kube-api-access-fsc8j") pod "04a134bc-533d-490a-8544-61f705a6d4f2" (UID: "04a134bc-533d-490a-8544-61f705a6d4f2"). InnerVolumeSpecName "kube-api-access-fsc8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:34:20 crc kubenswrapper[4558]: I0120 17:34:20.989206 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/04a134bc-533d-490a-8544-61f705a6d4f2-config-data" (OuterVolumeSpecName: "config-data") pod "04a134bc-533d-490a-8544-61f705a6d4f2" (UID: "04a134bc-533d-490a-8544-61f705a6d4f2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:34:20 crc kubenswrapper[4558]: I0120 17:34:20.989814 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:34:20 crc kubenswrapper[4558]: I0120 17:34:20.992488 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/04a134bc-533d-490a-8544-61f705a6d4f2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "04a134bc-533d-490a-8544-61f705a6d4f2" (UID: "04a134bc-533d-490a-8544-61f705a6d4f2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:34:21 crc kubenswrapper[4558]: I0120 17:34:21.061255 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fsc8j\" (UniqueName: \"kubernetes.io/projected/04a134bc-533d-490a-8544-61f705a6d4f2-kube-api-access-fsc8j\") on node \"crc\" DevicePath \"\"" Jan 20 17:34:21 crc kubenswrapper[4558]: I0120 17:34:21.061354 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/04a134bc-533d-490a-8544-61f705a6d4f2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:34:21 crc kubenswrapper[4558]: I0120 17:34:21.061426 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/04a134bc-533d-490a-8544-61f705a6d4f2-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:34:21 crc kubenswrapper[4558]: I0120 17:34:21.070528 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"639521c5-8df8-4c45-a66a-0be7119bb8d2","Type":"ContainerStarted","Data":"20e8f42a160445317c6f7a51b7e9e69eb9dc463c3fba20116a2cd1d22263ebe6"} Jan 20 17:34:21 crc kubenswrapper[4558]: I0120 17:34:21.072435 4558 generic.go:334] "Generic (PLEG): container finished" podID="04a134bc-533d-490a-8544-61f705a6d4f2" containerID="2623aa795d9d97ceb168764d999b00e635d55fa28ca538052a877af229f57732" exitCode=137 Jan 20 17:34:21 crc kubenswrapper[4558]: I0120 17:34:21.072494 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-0" event={"ID":"04a134bc-533d-490a-8544-61f705a6d4f2","Type":"ContainerDied","Data":"2623aa795d9d97ceb168764d999b00e635d55fa28ca538052a877af229f57732"} Jan 20 17:34:21 crc kubenswrapper[4558]: I0120 17:34:21.072558 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-0" event={"ID":"04a134bc-533d-490a-8544-61f705a6d4f2","Type":"ContainerDied","Data":"4962bbdddc3126dcebfffd77d4fe1852739f34d6c6ffc23ea3b54bab816f6a72"} Jan 20 17:34:21 crc kubenswrapper[4558]: I0120 17:34:21.072581 4558 scope.go:117] "RemoveContainer" containerID="2623aa795d9d97ceb168764d999b00e635d55fa28ca538052a877af229f57732" Jan 20 17:34:21 crc kubenswrapper[4558]: I0120 17:34:21.072512 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:34:21 crc kubenswrapper[4558]: I0120 17:34:21.095985 4558 scope.go:117] "RemoveContainer" containerID="2623aa795d9d97ceb168764d999b00e635d55fa28ca538052a877af229f57732" Jan 20 17:34:21 crc kubenswrapper[4558]: E0120 17:34:21.096612 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2623aa795d9d97ceb168764d999b00e635d55fa28ca538052a877af229f57732\": container with ID starting with 2623aa795d9d97ceb168764d999b00e635d55fa28ca538052a877af229f57732 not found: ID does not exist" containerID="2623aa795d9d97ceb168764d999b00e635d55fa28ca538052a877af229f57732" Jan 20 17:34:21 crc kubenswrapper[4558]: I0120 17:34:21.096648 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2623aa795d9d97ceb168764d999b00e635d55fa28ca538052a877af229f57732"} err="failed to get container status \"2623aa795d9d97ceb168764d999b00e635d55fa28ca538052a877af229f57732\": rpc error: code = NotFound desc = could not find container \"2623aa795d9d97ceb168764d999b00e635d55fa28ca538052a877af229f57732\": container with ID starting with 2623aa795d9d97ceb168764d999b00e635d55fa28ca538052a877af229f57732 not found: ID does not exist" Jan 20 17:34:21 crc kubenswrapper[4558]: I0120 17:34:21.127281 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-0"] Jan 20 17:34:21 crc kubenswrapper[4558]: I0120 17:34:21.137672 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-0"] Jan 20 17:34:21 crc kubenswrapper[4558]: I0120 17:34:21.144417 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-0"] Jan 20 17:34:21 crc kubenswrapper[4558]: E0120 17:34:21.144856 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="04a134bc-533d-490a-8544-61f705a6d4f2" containerName="nova-cell1-conductor-conductor" Jan 20 17:34:21 crc kubenswrapper[4558]: I0120 17:34:21.144878 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="04a134bc-533d-490a-8544-61f705a6d4f2" containerName="nova-cell1-conductor-conductor" Jan 20 17:34:21 crc kubenswrapper[4558]: I0120 17:34:21.145116 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="04a134bc-533d-490a-8544-61f705a6d4f2" containerName="nova-cell1-conductor-conductor" Jan 20 17:34:21 crc kubenswrapper[4558]: I0120 17:34:21.145832 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:34:21 crc kubenswrapper[4558]: I0120 17:34:21.147928 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell1-conductor-config-data" Jan 20 17:34:21 crc kubenswrapper[4558]: I0120 17:34:21.150389 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-0"] Jan 20 17:34:21 crc kubenswrapper[4558]: I0120 17:34:21.269621 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5cn75\" (UniqueName: \"kubernetes.io/projected/9b9c1cf7-0cbb-4bb3-a764-8a8fa446c8a8-kube-api-access-5cn75\") pod \"nova-cell1-conductor-0\" (UID: \"9b9c1cf7-0cbb-4bb3-a764-8a8fa446c8a8\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:34:21 crc kubenswrapper[4558]: I0120 17:34:21.269726 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9b9c1cf7-0cbb-4bb3-a764-8a8fa446c8a8-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"9b9c1cf7-0cbb-4bb3-a764-8a8fa446c8a8\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:34:21 crc kubenswrapper[4558]: I0120 17:34:21.269905 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9b9c1cf7-0cbb-4bb3-a764-8a8fa446c8a8-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"9b9c1cf7-0cbb-4bb3-a764-8a8fa446c8a8\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:34:21 crc kubenswrapper[4558]: I0120 17:34:21.372142 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9b9c1cf7-0cbb-4bb3-a764-8a8fa446c8a8-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"9b9c1cf7-0cbb-4bb3-a764-8a8fa446c8a8\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:34:21 crc kubenswrapper[4558]: I0120 17:34:21.372390 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5cn75\" (UniqueName: \"kubernetes.io/projected/9b9c1cf7-0cbb-4bb3-a764-8a8fa446c8a8-kube-api-access-5cn75\") pod \"nova-cell1-conductor-0\" (UID: \"9b9c1cf7-0cbb-4bb3-a764-8a8fa446c8a8\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:34:21 crc kubenswrapper[4558]: I0120 17:34:21.372423 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9b9c1cf7-0cbb-4bb3-a764-8a8fa446c8a8-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"9b9c1cf7-0cbb-4bb3-a764-8a8fa446c8a8\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:34:21 crc kubenswrapper[4558]: I0120 17:34:21.379894 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9b9c1cf7-0cbb-4bb3-a764-8a8fa446c8a8-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"9b9c1cf7-0cbb-4bb3-a764-8a8fa446c8a8\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:34:21 crc kubenswrapper[4558]: I0120 17:34:21.379959 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9b9c1cf7-0cbb-4bb3-a764-8a8fa446c8a8-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"9b9c1cf7-0cbb-4bb3-a764-8a8fa446c8a8\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:34:21 crc kubenswrapper[4558]: I0120 17:34:21.387739 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5cn75\" (UniqueName: \"kubernetes.io/projected/9b9c1cf7-0cbb-4bb3-a764-8a8fa446c8a8-kube-api-access-5cn75\") pod \"nova-cell1-conductor-0\" (UID: \"9b9c1cf7-0cbb-4bb3-a764-8a8fa446c8a8\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:34:21 crc kubenswrapper[4558]: I0120 17:34:21.460749 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:34:21 crc kubenswrapper[4558]: I0120 17:34:21.808864 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:34:21 crc kubenswrapper[4558]: I0120 17:34:21.871732 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-0"] Jan 20 17:34:21 crc kubenswrapper[4558]: W0120 17:34:21.874684 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9b9c1cf7_0cbb_4bb3_a764_8a8fa446c8a8.slice/crio-76a8468b3bbc48f7fb9efe000d0f26373aebe12ee16ba381522258f93da08ca3 WatchSource:0}: Error finding container 76a8468b3bbc48f7fb9efe000d0f26373aebe12ee16ba381522258f93da08ca3: Status 404 returned error can't find the container with id 76a8468b3bbc48f7fb9efe000d0f26373aebe12ee16ba381522258f93da08ca3 Jan 20 17:34:21 crc kubenswrapper[4558]: I0120 17:34:21.883872 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/70209150-5da2-411c-ac05-641a336b6c12-logs\") pod \"70209150-5da2-411c-ac05-641a336b6c12\" (UID: \"70209150-5da2-411c-ac05-641a336b6c12\") " Jan 20 17:34:21 crc kubenswrapper[4558]: I0120 17:34:21.884013 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6hj55\" (UniqueName: \"kubernetes.io/projected/70209150-5da2-411c-ac05-641a336b6c12-kube-api-access-6hj55\") pod \"70209150-5da2-411c-ac05-641a336b6c12\" (UID: \"70209150-5da2-411c-ac05-641a336b6c12\") " Jan 20 17:34:21 crc kubenswrapper[4558]: I0120 17:34:21.884220 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/70209150-5da2-411c-ac05-641a336b6c12-combined-ca-bundle\") pod \"70209150-5da2-411c-ac05-641a336b6c12\" (UID: \"70209150-5da2-411c-ac05-641a336b6c12\") " Jan 20 17:34:21 crc kubenswrapper[4558]: I0120 17:34:21.884285 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/70209150-5da2-411c-ac05-641a336b6c12-logs" (OuterVolumeSpecName: "logs") pod "70209150-5da2-411c-ac05-641a336b6c12" (UID: "70209150-5da2-411c-ac05-641a336b6c12"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:34:21 crc kubenswrapper[4558]: I0120 17:34:21.884667 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/70209150-5da2-411c-ac05-641a336b6c12-config-data\") pod \"70209150-5da2-411c-ac05-641a336b6c12\" (UID: \"70209150-5da2-411c-ac05-641a336b6c12\") " Jan 20 17:34:21 crc kubenswrapper[4558]: I0120 17:34:21.885213 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/70209150-5da2-411c-ac05-641a336b6c12-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:34:21 crc kubenswrapper[4558]: I0120 17:34:21.887123 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/70209150-5da2-411c-ac05-641a336b6c12-kube-api-access-6hj55" (OuterVolumeSpecName: "kube-api-access-6hj55") pod "70209150-5da2-411c-ac05-641a336b6c12" (UID: "70209150-5da2-411c-ac05-641a336b6c12"). InnerVolumeSpecName "kube-api-access-6hj55". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:34:21 crc kubenswrapper[4558]: I0120 17:34:21.905139 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/70209150-5da2-411c-ac05-641a336b6c12-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "70209150-5da2-411c-ac05-641a336b6c12" (UID: "70209150-5da2-411c-ac05-641a336b6c12"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:34:21 crc kubenswrapper[4558]: I0120 17:34:21.906368 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/70209150-5da2-411c-ac05-641a336b6c12-config-data" (OuterVolumeSpecName: "config-data") pod "70209150-5da2-411c-ac05-641a336b6c12" (UID: "70209150-5da2-411c-ac05-641a336b6c12"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:34:21 crc kubenswrapper[4558]: I0120 17:34:21.987101 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/70209150-5da2-411c-ac05-641a336b6c12-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:34:21 crc kubenswrapper[4558]: I0120 17:34:21.987130 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/70209150-5da2-411c-ac05-641a336b6c12-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:34:21 crc kubenswrapper[4558]: I0120 17:34:21.987146 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6hj55\" (UniqueName: \"kubernetes.io/projected/70209150-5da2-411c-ac05-641a336b6c12-kube-api-access-6hj55\") on node \"crc\" DevicePath \"\"" Jan 20 17:34:22 crc kubenswrapper[4558]: I0120 17:34:22.108018 4558 generic.go:334] "Generic (PLEG): container finished" podID="70209150-5da2-411c-ac05-641a336b6c12" containerID="3591e8b056c12783c58a71fe1d084db8eb2a4555e329046872fd52062408894a" exitCode=137 Jan 20 17:34:22 crc kubenswrapper[4558]: I0120 17:34:22.108098 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"70209150-5da2-411c-ac05-641a336b6c12","Type":"ContainerDied","Data":"3591e8b056c12783c58a71fe1d084db8eb2a4555e329046872fd52062408894a"} Jan 20 17:34:22 crc kubenswrapper[4558]: I0120 17:34:22.108112 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:34:22 crc kubenswrapper[4558]: I0120 17:34:22.108141 4558 scope.go:117] "RemoveContainer" containerID="3591e8b056c12783c58a71fe1d084db8eb2a4555e329046872fd52062408894a" Jan 20 17:34:22 crc kubenswrapper[4558]: I0120 17:34:22.108129 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"70209150-5da2-411c-ac05-641a336b6c12","Type":"ContainerDied","Data":"1a961e4edc787aaa20f30df9c81a5db77bf6e2f57609110642779124e01e42f1"} Jan 20 17:34:22 crc kubenswrapper[4558]: I0120 17:34:22.109806 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"639521c5-8df8-4c45-a66a-0be7119bb8d2","Type":"ContainerStarted","Data":"36a455e01433abd0de3acdfe5a36add132aa44e9a3eb34155cfc2b6b22bd9d3c"} Jan 20 17:34:22 crc kubenswrapper[4558]: I0120 17:34:22.111994 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-0" event={"ID":"9b9c1cf7-0cbb-4bb3-a764-8a8fa446c8a8","Type":"ContainerStarted","Data":"f80866aad1b997b9edf8ab7c0962cd1436981eced3f24d965081cee0a0b92d9a"} Jan 20 17:34:22 crc kubenswrapper[4558]: I0120 17:34:22.112612 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-0" event={"ID":"9b9c1cf7-0cbb-4bb3-a764-8a8fa446c8a8","Type":"ContainerStarted","Data":"76a8468b3bbc48f7fb9efe000d0f26373aebe12ee16ba381522258f93da08ca3"} Jan 20 17:34:22 crc kubenswrapper[4558]: I0120 17:34:22.113601 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:34:22 crc kubenswrapper[4558]: I0120 17:34:22.128244 4558 scope.go:117] "RemoveContainer" containerID="ea4c18fc95b5ea8fbe3c18909d888ddca0ea90bf7d976f9bd51dcc8a70a069b0" Jan 20 17:34:22 crc kubenswrapper[4558]: I0120 17:34:22.148645 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" podStartSLOduration=2.14862158 podStartE2EDuration="2.14862158s" podCreationTimestamp="2026-01-20 17:34:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:34:22.1277958 +0000 UTC m=+3155.888133767" watchObservedRunningTime="2026-01-20 17:34:22.14862158 +0000 UTC m=+3155.908959547" Jan 20 17:34:22 crc kubenswrapper[4558]: I0120 17:34:22.163796 4558 scope.go:117] "RemoveContainer" containerID="3591e8b056c12783c58a71fe1d084db8eb2a4555e329046872fd52062408894a" Jan 20 17:34:22 crc kubenswrapper[4558]: E0120 17:34:22.164261 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3591e8b056c12783c58a71fe1d084db8eb2a4555e329046872fd52062408894a\": container with ID starting with 3591e8b056c12783c58a71fe1d084db8eb2a4555e329046872fd52062408894a not found: ID does not exist" containerID="3591e8b056c12783c58a71fe1d084db8eb2a4555e329046872fd52062408894a" Jan 20 17:34:22 crc kubenswrapper[4558]: I0120 17:34:22.164291 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3591e8b056c12783c58a71fe1d084db8eb2a4555e329046872fd52062408894a"} err="failed to get container status \"3591e8b056c12783c58a71fe1d084db8eb2a4555e329046872fd52062408894a\": rpc error: code = NotFound desc = could not find container \"3591e8b056c12783c58a71fe1d084db8eb2a4555e329046872fd52062408894a\": container with ID starting with 3591e8b056c12783c58a71fe1d084db8eb2a4555e329046872fd52062408894a not found: ID does not exist" Jan 20 17:34:22 crc kubenswrapper[4558]: I0120 17:34:22.164314 4558 scope.go:117] "RemoveContainer" containerID="ea4c18fc95b5ea8fbe3c18909d888ddca0ea90bf7d976f9bd51dcc8a70a069b0" Jan 20 17:34:22 crc kubenswrapper[4558]: E0120 17:34:22.166860 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ea4c18fc95b5ea8fbe3c18909d888ddca0ea90bf7d976f9bd51dcc8a70a069b0\": container with ID starting with ea4c18fc95b5ea8fbe3c18909d888ddca0ea90bf7d976f9bd51dcc8a70a069b0 not found: ID does not exist" containerID="ea4c18fc95b5ea8fbe3c18909d888ddca0ea90bf7d976f9bd51dcc8a70a069b0" Jan 20 17:34:22 crc kubenswrapper[4558]: I0120 17:34:22.166885 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ea4c18fc95b5ea8fbe3c18909d888ddca0ea90bf7d976f9bd51dcc8a70a069b0"} err="failed to get container status \"ea4c18fc95b5ea8fbe3c18909d888ddca0ea90bf7d976f9bd51dcc8a70a069b0\": rpc error: code = NotFound desc = could not find container \"ea4c18fc95b5ea8fbe3c18909d888ddca0ea90bf7d976f9bd51dcc8a70a069b0\": container with ID starting with ea4c18fc95b5ea8fbe3c18909d888ddca0ea90bf7d976f9bd51dcc8a70a069b0 not found: ID does not exist" Jan 20 17:34:22 crc kubenswrapper[4558]: I0120 17:34:22.174343 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:34:22 crc kubenswrapper[4558]: I0120 17:34:22.182095 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:34:22 crc kubenswrapper[4558]: I0120 17:34:22.188621 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:34:22 crc kubenswrapper[4558]: E0120 17:34:22.189192 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="70209150-5da2-411c-ac05-641a336b6c12" containerName="nova-metadata-log" Jan 20 17:34:22 crc kubenswrapper[4558]: I0120 17:34:22.189216 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="70209150-5da2-411c-ac05-641a336b6c12" containerName="nova-metadata-log" Jan 20 17:34:22 crc kubenswrapper[4558]: E0120 17:34:22.189241 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="70209150-5da2-411c-ac05-641a336b6c12" containerName="nova-metadata-metadata" Jan 20 17:34:22 crc kubenswrapper[4558]: I0120 17:34:22.189248 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="70209150-5da2-411c-ac05-641a336b6c12" containerName="nova-metadata-metadata" Jan 20 17:34:22 crc kubenswrapper[4558]: I0120 17:34:22.189439 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="70209150-5da2-411c-ac05-641a336b6c12" containerName="nova-metadata-log" Jan 20 17:34:22 crc kubenswrapper[4558]: I0120 17:34:22.189458 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="70209150-5da2-411c-ac05-641a336b6c12" containerName="nova-metadata-metadata" Jan 20 17:34:22 crc kubenswrapper[4558]: I0120 17:34:22.190600 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:34:22 crc kubenswrapper[4558]: I0120 17:34:22.191661 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell1-conductor-0" podStartSLOduration=1.191647486 podStartE2EDuration="1.191647486s" podCreationTimestamp="2026-01-20 17:34:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:34:22.158808215 +0000 UTC m=+3155.919146182" watchObservedRunningTime="2026-01-20 17:34:22.191647486 +0000 UTC m=+3155.951985453" Jan 20 17:34:22 crc kubenswrapper[4558]: I0120 17:34:22.192268 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-metadata-internal-svc" Jan 20 17:34:22 crc kubenswrapper[4558]: I0120 17:34:22.192529 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-metadata-config-data" Jan 20 17:34:22 crc kubenswrapper[4558]: I0120 17:34:22.203956 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:34:22 crc kubenswrapper[4558]: I0120 17:34:22.294250 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/99b8519e-6e6f-4b03-bc0e-9b159ed53f94-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"99b8519e-6e6f-4b03-bc0e-9b159ed53f94\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:34:22 crc kubenswrapper[4558]: I0120 17:34:22.294333 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4kbpw\" (UniqueName: \"kubernetes.io/projected/99b8519e-6e6f-4b03-bc0e-9b159ed53f94-kube-api-access-4kbpw\") pod \"nova-metadata-0\" (UID: \"99b8519e-6e6f-4b03-bc0e-9b159ed53f94\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:34:22 crc kubenswrapper[4558]: I0120 17:34:22.294374 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/99b8519e-6e6f-4b03-bc0e-9b159ed53f94-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"99b8519e-6e6f-4b03-bc0e-9b159ed53f94\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:34:22 crc kubenswrapper[4558]: I0120 17:34:22.294454 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/99b8519e-6e6f-4b03-bc0e-9b159ed53f94-config-data\") pod \"nova-metadata-0\" (UID: \"99b8519e-6e6f-4b03-bc0e-9b159ed53f94\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:34:22 crc kubenswrapper[4558]: I0120 17:34:22.294494 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/99b8519e-6e6f-4b03-bc0e-9b159ed53f94-logs\") pod \"nova-metadata-0\" (UID: \"99b8519e-6e6f-4b03-bc0e-9b159ed53f94\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:34:22 crc kubenswrapper[4558]: I0120 17:34:22.396498 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/99b8519e-6e6f-4b03-bc0e-9b159ed53f94-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"99b8519e-6e6f-4b03-bc0e-9b159ed53f94\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:34:22 crc kubenswrapper[4558]: I0120 17:34:22.396789 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4kbpw\" (UniqueName: \"kubernetes.io/projected/99b8519e-6e6f-4b03-bc0e-9b159ed53f94-kube-api-access-4kbpw\") pod \"nova-metadata-0\" (UID: \"99b8519e-6e6f-4b03-bc0e-9b159ed53f94\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:34:22 crc kubenswrapper[4558]: I0120 17:34:22.396828 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/99b8519e-6e6f-4b03-bc0e-9b159ed53f94-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"99b8519e-6e6f-4b03-bc0e-9b159ed53f94\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:34:22 crc kubenswrapper[4558]: I0120 17:34:22.396890 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/99b8519e-6e6f-4b03-bc0e-9b159ed53f94-config-data\") pod \"nova-metadata-0\" (UID: \"99b8519e-6e6f-4b03-bc0e-9b159ed53f94\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:34:22 crc kubenswrapper[4558]: I0120 17:34:22.396931 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/99b8519e-6e6f-4b03-bc0e-9b159ed53f94-logs\") pod \"nova-metadata-0\" (UID: \"99b8519e-6e6f-4b03-bc0e-9b159ed53f94\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:34:22 crc kubenswrapper[4558]: I0120 17:34:22.397351 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/99b8519e-6e6f-4b03-bc0e-9b159ed53f94-logs\") pod \"nova-metadata-0\" (UID: \"99b8519e-6e6f-4b03-bc0e-9b159ed53f94\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:34:22 crc kubenswrapper[4558]: I0120 17:34:22.401640 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/99b8519e-6e6f-4b03-bc0e-9b159ed53f94-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"99b8519e-6e6f-4b03-bc0e-9b159ed53f94\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:34:22 crc kubenswrapper[4558]: I0120 17:34:22.401646 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/99b8519e-6e6f-4b03-bc0e-9b159ed53f94-config-data\") pod \"nova-metadata-0\" (UID: \"99b8519e-6e6f-4b03-bc0e-9b159ed53f94\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:34:22 crc kubenswrapper[4558]: I0120 17:34:22.402082 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/99b8519e-6e6f-4b03-bc0e-9b159ed53f94-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"99b8519e-6e6f-4b03-bc0e-9b159ed53f94\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:34:22 crc kubenswrapper[4558]: I0120 17:34:22.410412 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4kbpw\" (UniqueName: \"kubernetes.io/projected/99b8519e-6e6f-4b03-bc0e-9b159ed53f94-kube-api-access-4kbpw\") pod \"nova-metadata-0\" (UID: \"99b8519e-6e6f-4b03-bc0e-9b159ed53f94\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:34:22 crc kubenswrapper[4558]: I0120 17:34:22.506005 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:34:22 crc kubenswrapper[4558]: I0120 17:34:22.617517 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="04a134bc-533d-490a-8544-61f705a6d4f2" path="/var/lib/kubelet/pods/04a134bc-533d-490a-8544-61f705a6d4f2/volumes" Jan 20 17:34:22 crc kubenswrapper[4558]: I0120 17:34:22.618355 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="70209150-5da2-411c-ac05-641a336b6c12" path="/var/lib/kubelet/pods/70209150-5da2-411c-ac05-641a336b6c12/volumes" Jan 20 17:34:22 crc kubenswrapper[4558]: W0120 17:34:22.910984 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod99b8519e_6e6f_4b03_bc0e_9b159ed53f94.slice/crio-094e78246e2cf02fc340ead546fe5604db08f42b14dc3ebe3fc80018c629673b WatchSource:0}: Error finding container 094e78246e2cf02fc340ead546fe5604db08f42b14dc3ebe3fc80018c629673b: Status 404 returned error can't find the container with id 094e78246e2cf02fc340ead546fe5604db08f42b14dc3ebe3fc80018c629673b Jan 20 17:34:22 crc kubenswrapper[4558]: I0120 17:34:22.914927 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:34:23 crc kubenswrapper[4558]: I0120 17:34:23.069318 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-grwr2"] Jan 20 17:34:23 crc kubenswrapper[4558]: I0120 17:34:23.071414 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-grwr2" Jan 20 17:34:23 crc kubenswrapper[4558]: I0120 17:34:23.100234 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-grwr2"] Jan 20 17:34:23 crc kubenswrapper[4558]: I0120 17:34:23.139478 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"99b8519e-6e6f-4b03-bc0e-9b159ed53f94","Type":"ContainerStarted","Data":"02e098431930837fb92268532afd86ec94107352fd27d8c1491004cdd3da1bdd"} Jan 20 17:34:23 crc kubenswrapper[4558]: I0120 17:34:23.139527 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"99b8519e-6e6f-4b03-bc0e-9b159ed53f94","Type":"ContainerStarted","Data":"094e78246e2cf02fc340ead546fe5604db08f42b14dc3ebe3fc80018c629673b"} Jan 20 17:34:23 crc kubenswrapper[4558]: I0120 17:34:23.233962 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qrffx\" (UniqueName: \"kubernetes.io/projected/97258279-69d3-4e8e-935a-b34e6a73be4e-kube-api-access-qrffx\") pod \"community-operators-grwr2\" (UID: \"97258279-69d3-4e8e-935a-b34e6a73be4e\") " pod="openshift-marketplace/community-operators-grwr2" Jan 20 17:34:23 crc kubenswrapper[4558]: I0120 17:34:23.234274 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/97258279-69d3-4e8e-935a-b34e6a73be4e-utilities\") pod \"community-operators-grwr2\" (UID: \"97258279-69d3-4e8e-935a-b34e6a73be4e\") " pod="openshift-marketplace/community-operators-grwr2" Jan 20 17:34:23 crc kubenswrapper[4558]: I0120 17:34:23.234304 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/97258279-69d3-4e8e-935a-b34e6a73be4e-catalog-content\") pod \"community-operators-grwr2\" (UID: \"97258279-69d3-4e8e-935a-b34e6a73be4e\") " pod="openshift-marketplace/community-operators-grwr2" Jan 20 17:34:23 crc kubenswrapper[4558]: I0120 17:34:23.336995 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qrffx\" (UniqueName: \"kubernetes.io/projected/97258279-69d3-4e8e-935a-b34e6a73be4e-kube-api-access-qrffx\") pod \"community-operators-grwr2\" (UID: \"97258279-69d3-4e8e-935a-b34e6a73be4e\") " pod="openshift-marketplace/community-operators-grwr2" Jan 20 17:34:23 crc kubenswrapper[4558]: I0120 17:34:23.337315 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/97258279-69d3-4e8e-935a-b34e6a73be4e-utilities\") pod \"community-operators-grwr2\" (UID: \"97258279-69d3-4e8e-935a-b34e6a73be4e\") " pod="openshift-marketplace/community-operators-grwr2" Jan 20 17:34:23 crc kubenswrapper[4558]: I0120 17:34:23.337350 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/97258279-69d3-4e8e-935a-b34e6a73be4e-catalog-content\") pod \"community-operators-grwr2\" (UID: \"97258279-69d3-4e8e-935a-b34e6a73be4e\") " pod="openshift-marketplace/community-operators-grwr2" Jan 20 17:34:23 crc kubenswrapper[4558]: I0120 17:34:23.338194 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/97258279-69d3-4e8e-935a-b34e6a73be4e-utilities\") pod \"community-operators-grwr2\" (UID: \"97258279-69d3-4e8e-935a-b34e6a73be4e\") " pod="openshift-marketplace/community-operators-grwr2" Jan 20 17:34:23 crc kubenswrapper[4558]: I0120 17:34:23.338513 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/97258279-69d3-4e8e-935a-b34e6a73be4e-catalog-content\") pod \"community-operators-grwr2\" (UID: \"97258279-69d3-4e8e-935a-b34e6a73be4e\") " pod="openshift-marketplace/community-operators-grwr2" Jan 20 17:34:23 crc kubenswrapper[4558]: I0120 17:34:23.352584 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:34:23 crc kubenswrapper[4558]: I0120 17:34:23.356890 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qrffx\" (UniqueName: \"kubernetes.io/projected/97258279-69d3-4e8e-935a-b34e6a73be4e-kube-api-access-qrffx\") pod \"community-operators-grwr2\" (UID: \"97258279-69d3-4e8e-935a-b34e6a73be4e\") " pod="openshift-marketplace/community-operators-grwr2" Jan 20 17:34:23 crc kubenswrapper[4558]: I0120 17:34:23.386376 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-grwr2" Jan 20 17:34:23 crc kubenswrapper[4558]: I0120 17:34:23.387437 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:34:23 crc kubenswrapper[4558]: I0120 17:34:23.899944 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-grwr2"] Jan 20 17:34:24 crc kubenswrapper[4558]: I0120 17:34:24.158915 4558 generic.go:334] "Generic (PLEG): container finished" podID="97258279-69d3-4e8e-935a-b34e6a73be4e" containerID="48fa808dd2c20d63c69f8008c57b98745686660b1cbc6015b9223632d525754a" exitCode=0 Jan 20 17:34:24 crc kubenswrapper[4558]: I0120 17:34:24.159996 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-grwr2" event={"ID":"97258279-69d3-4e8e-935a-b34e6a73be4e","Type":"ContainerDied","Data":"48fa808dd2c20d63c69f8008c57b98745686660b1cbc6015b9223632d525754a"} Jan 20 17:34:24 crc kubenswrapper[4558]: I0120 17:34:24.160126 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-grwr2" event={"ID":"97258279-69d3-4e8e-935a-b34e6a73be4e","Type":"ContainerStarted","Data":"dd6a8f534d74beb4ab6b8766bd29c2c7d4d9decfad85a5b150aa257a9ef50230"} Jan 20 17:34:24 crc kubenswrapper[4558]: I0120 17:34:24.165773 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"99b8519e-6e6f-4b03-bc0e-9b159ed53f94","Type":"ContainerStarted","Data":"bd6efedb4729482e0b53a3f4f56d4d7e625727ca7567b4ba0e01c49e0cbbf75f"} Jan 20 17:34:24 crc kubenswrapper[4558]: I0120 17:34:24.205229 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:34:24 crc kubenswrapper[4558]: I0120 17:34:24.211728 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-metadata-0" podStartSLOduration=2.211699523 podStartE2EDuration="2.211699523s" podCreationTimestamp="2026-01-20 17:34:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:34:24.195571206 +0000 UTC m=+3157.955909173" watchObservedRunningTime="2026-01-20 17:34:24.211699523 +0000 UTC m=+3157.972037490" Jan 20 17:34:25 crc kubenswrapper[4558]: I0120 17:34:25.176931 4558 generic.go:334] "Generic (PLEG): container finished" podID="97258279-69d3-4e8e-935a-b34e6a73be4e" containerID="82c6171f2834671748234907c74bac442e2b2b8b07fcbfa9ec41535fbfa23462" exitCode=0 Jan 20 17:34:25 crc kubenswrapper[4558]: I0120 17:34:25.176986 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-grwr2" event={"ID":"97258279-69d3-4e8e-935a-b34e6a73be4e","Type":"ContainerDied","Data":"82c6171f2834671748234907c74bac442e2b2b8b07fcbfa9ec41535fbfa23462"} Jan 20 17:34:25 crc kubenswrapper[4558]: I0120 17:34:25.493681 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:34:26 crc kubenswrapper[4558]: I0120 17:34:26.194864 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-grwr2" event={"ID":"97258279-69d3-4e8e-935a-b34e6a73be4e","Type":"ContainerStarted","Data":"9545d00982f094a98930fb9fe418c2cefaa2598df66b5c7b6805ad188bf042ce"} Jan 20 17:34:26 crc kubenswrapper[4558]: I0120 17:34:26.219846 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-grwr2" podStartSLOduration=1.7102480020000002 podStartE2EDuration="3.219819144s" podCreationTimestamp="2026-01-20 17:34:23 +0000 UTC" firstStartedPulling="2026-01-20 17:34:24.16110658 +0000 UTC m=+3157.921444546" lastFinishedPulling="2026-01-20 17:34:25.670677731 +0000 UTC m=+3159.431015688" observedRunningTime="2026-01-20 17:34:26.217844931 +0000 UTC m=+3159.978182898" watchObservedRunningTime="2026-01-20 17:34:26.219819144 +0000 UTC m=+3159.980157111" Jan 20 17:34:26 crc kubenswrapper[4558]: I0120 17:34:26.491871 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:34:27 crc kubenswrapper[4558]: I0120 17:34:27.266915 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:34:27 crc kubenswrapper[4558]: I0120 17:34:27.267812 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:34:27 crc kubenswrapper[4558]: I0120 17:34:27.268393 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:34:27 crc kubenswrapper[4558]: I0120 17:34:27.273252 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:34:27 crc kubenswrapper[4558]: I0120 17:34:27.506337 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:34:27 crc kubenswrapper[4558]: I0120 17:34:27.506682 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:34:27 crc kubenswrapper[4558]: I0120 17:34:27.566305 4558 scope.go:117] "RemoveContainer" containerID="f275e9f5de330b3c79316d5871106c6bcf90b698e0744c5beb28da45c336b36d" Jan 20 17:34:28 crc kubenswrapper[4558]: I0120 17:34:28.219728 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" event={"ID":"68337d27-3fa6-4a29-88b0-82e60c3739eb","Type":"ContainerStarted","Data":"23984d013a068cbbd112be5ba4e29373c221b0f5de1715ba81ff9be27247b9c2"} Jan 20 17:34:28 crc kubenswrapper[4558]: I0120 17:34:28.221268 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:34:28 crc kubenswrapper[4558]: I0120 17:34:28.231805 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:34:29 crc kubenswrapper[4558]: I0120 17:34:29.923600 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:34:29 crc kubenswrapper[4558]: I0120 17:34:29.925685 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="064c3f1d-6117-4cd9-bf5f-400b06b11d59" containerName="sg-core" containerID="cri-o://f48aeb30b2cfaa08bb3f13eee1aaf3c0f1c1855c7c00bdf4b9fa474db4b08957" gracePeriod=30 Jan 20 17:34:29 crc kubenswrapper[4558]: I0120 17:34:29.925720 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="064c3f1d-6117-4cd9-bf5f-400b06b11d59" containerName="ceilometer-notification-agent" containerID="cri-o://b76c14d8886d5774bd6f812f24811198b815faf3df6e02edfe827602a5f05a10" gracePeriod=30 Jan 20 17:34:29 crc kubenswrapper[4558]: I0120 17:34:29.925749 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="064c3f1d-6117-4cd9-bf5f-400b06b11d59" containerName="proxy-httpd" containerID="cri-o://dc58496c4ba70fea566614a3cce2786b241478b1915fd38952580ac26231d50f" gracePeriod=30 Jan 20 17:34:29 crc kubenswrapper[4558]: I0120 17:34:29.926205 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="064c3f1d-6117-4cd9-bf5f-400b06b11d59" containerName="ceilometer-central-agent" containerID="cri-o://b379979fb76580540ae38daa220da7cd19bceeb6504b4c7e195ca0c79794a990" gracePeriod=30 Jan 20 17:34:29 crc kubenswrapper[4558]: I0120 17:34:29.934992 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/ceilometer-0" podUID="064c3f1d-6117-4cd9-bf5f-400b06b11d59" containerName="proxy-httpd" probeResult="failure" output="Get \"http://10.217.0.94:3000/\": EOF" Jan 20 17:34:30 crc kubenswrapper[4558]: I0120 17:34:30.259114 4558 generic.go:334] "Generic (PLEG): container finished" podID="064c3f1d-6117-4cd9-bf5f-400b06b11d59" containerID="dc58496c4ba70fea566614a3cce2786b241478b1915fd38952580ac26231d50f" exitCode=0 Jan 20 17:34:30 crc kubenswrapper[4558]: I0120 17:34:30.259390 4558 generic.go:334] "Generic (PLEG): container finished" podID="064c3f1d-6117-4cd9-bf5f-400b06b11d59" containerID="f48aeb30b2cfaa08bb3f13eee1aaf3c0f1c1855c7c00bdf4b9fa474db4b08957" exitCode=2 Jan 20 17:34:30 crc kubenswrapper[4558]: I0120 17:34:30.259399 4558 generic.go:334] "Generic (PLEG): container finished" podID="064c3f1d-6117-4cd9-bf5f-400b06b11d59" containerID="b379979fb76580540ae38daa220da7cd19bceeb6504b4c7e195ca0c79794a990" exitCode=0 Jan 20 17:34:30 crc kubenswrapper[4558]: I0120 17:34:30.259200 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"064c3f1d-6117-4cd9-bf5f-400b06b11d59","Type":"ContainerDied","Data":"dc58496c4ba70fea566614a3cce2786b241478b1915fd38952580ac26231d50f"} Jan 20 17:34:30 crc kubenswrapper[4558]: I0120 17:34:30.259456 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"064c3f1d-6117-4cd9-bf5f-400b06b11d59","Type":"ContainerDied","Data":"f48aeb30b2cfaa08bb3f13eee1aaf3c0f1c1855c7c00bdf4b9fa474db4b08957"} Jan 20 17:34:30 crc kubenswrapper[4558]: I0120 17:34:30.259471 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"064c3f1d-6117-4cd9-bf5f-400b06b11d59","Type":"ContainerDied","Data":"b379979fb76580540ae38daa220da7cd19bceeb6504b4c7e195ca0c79794a990"} Jan 20 17:34:30 crc kubenswrapper[4558]: I0120 17:34:30.493363 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:34:30 crc kubenswrapper[4558]: I0120 17:34:30.516160 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:34:31 crc kubenswrapper[4558]: I0120 17:34:31.288837 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:34:31 crc kubenswrapper[4558]: I0120 17:34:31.401661 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-cell-mapping-nbrcj"] Jan 20 17:34:31 crc kubenswrapper[4558]: I0120 17:34:31.408561 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell1-cell-mapping-nbrcj"] Jan 20 17:34:31 crc kubenswrapper[4558]: I0120 17:34:31.513379 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell1-cell-mapping-jqwc9"] Jan 20 17:34:31 crc kubenswrapper[4558]: I0120 17:34:31.515188 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-cell-mapping-jqwc9" Jan 20 17:34:31 crc kubenswrapper[4558]: I0120 17:34:31.517541 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell1-manage-config-data" Jan 20 17:34:31 crc kubenswrapper[4558]: I0120 17:34:31.518093 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell1-manage-scripts" Jan 20 17:34:31 crc kubenswrapper[4558]: I0120 17:34:31.525449 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e0e37c27-e17c-46c1-9e53-ce2a92c13f02-scripts\") pod \"nova-cell1-cell-mapping-jqwc9\" (UID: \"e0e37c27-e17c-46c1-9e53-ce2a92c13f02\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-jqwc9" Jan 20 17:34:31 crc kubenswrapper[4558]: I0120 17:34:31.525503 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e0e37c27-e17c-46c1-9e53-ce2a92c13f02-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-jqwc9\" (UID: \"e0e37c27-e17c-46c1-9e53-ce2a92c13f02\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-jqwc9" Jan 20 17:34:31 crc kubenswrapper[4558]: I0120 17:34:31.525623 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e0e37c27-e17c-46c1-9e53-ce2a92c13f02-config-data\") pod \"nova-cell1-cell-mapping-jqwc9\" (UID: \"e0e37c27-e17c-46c1-9e53-ce2a92c13f02\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-jqwc9" Jan 20 17:34:31 crc kubenswrapper[4558]: I0120 17:34:31.525645 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r4kq5\" (UniqueName: \"kubernetes.io/projected/e0e37c27-e17c-46c1-9e53-ce2a92c13f02-kube-api-access-r4kq5\") pod \"nova-cell1-cell-mapping-jqwc9\" (UID: \"e0e37c27-e17c-46c1-9e53-ce2a92c13f02\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-jqwc9" Jan 20 17:34:31 crc kubenswrapper[4558]: I0120 17:34:31.527794 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-cell-mapping-jqwc9"] Jan 20 17:34:31 crc kubenswrapper[4558]: I0120 17:34:31.627950 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e0e37c27-e17c-46c1-9e53-ce2a92c13f02-config-data\") pod \"nova-cell1-cell-mapping-jqwc9\" (UID: \"e0e37c27-e17c-46c1-9e53-ce2a92c13f02\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-jqwc9" Jan 20 17:34:31 crc kubenswrapper[4558]: I0120 17:34:31.628019 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r4kq5\" (UniqueName: \"kubernetes.io/projected/e0e37c27-e17c-46c1-9e53-ce2a92c13f02-kube-api-access-r4kq5\") pod \"nova-cell1-cell-mapping-jqwc9\" (UID: \"e0e37c27-e17c-46c1-9e53-ce2a92c13f02\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-jqwc9" Jan 20 17:34:31 crc kubenswrapper[4558]: I0120 17:34:31.628202 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e0e37c27-e17c-46c1-9e53-ce2a92c13f02-scripts\") pod \"nova-cell1-cell-mapping-jqwc9\" (UID: \"e0e37c27-e17c-46c1-9e53-ce2a92c13f02\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-jqwc9" Jan 20 17:34:31 crc kubenswrapper[4558]: I0120 17:34:31.628275 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e0e37c27-e17c-46c1-9e53-ce2a92c13f02-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-jqwc9\" (UID: \"e0e37c27-e17c-46c1-9e53-ce2a92c13f02\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-jqwc9" Jan 20 17:34:31 crc kubenswrapper[4558]: I0120 17:34:31.637374 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e0e37c27-e17c-46c1-9e53-ce2a92c13f02-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-jqwc9\" (UID: \"e0e37c27-e17c-46c1-9e53-ce2a92c13f02\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-jqwc9" Jan 20 17:34:31 crc kubenswrapper[4558]: I0120 17:34:31.639030 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e0e37c27-e17c-46c1-9e53-ce2a92c13f02-scripts\") pod \"nova-cell1-cell-mapping-jqwc9\" (UID: \"e0e37c27-e17c-46c1-9e53-ce2a92c13f02\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-jqwc9" Jan 20 17:34:31 crc kubenswrapper[4558]: I0120 17:34:31.639070 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e0e37c27-e17c-46c1-9e53-ce2a92c13f02-config-data\") pod \"nova-cell1-cell-mapping-jqwc9\" (UID: \"e0e37c27-e17c-46c1-9e53-ce2a92c13f02\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-jqwc9" Jan 20 17:34:31 crc kubenswrapper[4558]: I0120 17:34:31.645060 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r4kq5\" (UniqueName: \"kubernetes.io/projected/e0e37c27-e17c-46c1-9e53-ce2a92c13f02-kube-api-access-r4kq5\") pod \"nova-cell1-cell-mapping-jqwc9\" (UID: \"e0e37c27-e17c-46c1-9e53-ce2a92c13f02\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-jqwc9" Jan 20 17:34:31 crc kubenswrapper[4558]: I0120 17:34:31.838935 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-cell-mapping-jqwc9" Jan 20 17:34:32 crc kubenswrapper[4558]: I0120 17:34:32.270225 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-cell-mapping-jqwc9"] Jan 20 17:34:32 crc kubenswrapper[4558]: I0120 17:34:32.281267 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-cell-mapping-jqwc9" event={"ID":"e0e37c27-e17c-46c1-9e53-ce2a92c13f02","Type":"ContainerStarted","Data":"6e85c520d9bd9b0e59652c5d00f02c104d7ac1fbfa360eb133b22ffb6aefb714"} Jan 20 17:34:32 crc kubenswrapper[4558]: I0120 17:34:32.506999 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:34:32 crc kubenswrapper[4558]: I0120 17:34:32.507110 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:34:32 crc kubenswrapper[4558]: I0120 17:34:32.574435 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9ab93517-8f51-467d-b830-16030668be2b" path="/var/lib/kubelet/pods/9ab93517-8f51-467d-b830-16030668be2b/volumes" Jan 20 17:34:33 crc kubenswrapper[4558]: I0120 17:34:33.297781 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-cell-mapping-jqwc9" event={"ID":"e0e37c27-e17c-46c1-9e53-ce2a92c13f02","Type":"ContainerStarted","Data":"60ee7c69684601f241168e9490c910c322ce0ad1bc0d2f8e418a384d04073baa"} Jan 20 17:34:33 crc kubenswrapper[4558]: I0120 17:34:33.315316 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell1-cell-mapping-jqwc9" podStartSLOduration=2.3153016810000002 podStartE2EDuration="2.315301681s" podCreationTimestamp="2026-01-20 17:34:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:34:33.312490454 +0000 UTC m=+3167.072828422" watchObservedRunningTime="2026-01-20 17:34:33.315301681 +0000 UTC m=+3167.075639648" Jan 20 17:34:33 crc kubenswrapper[4558]: I0120 17:34:33.387311 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-grwr2" Jan 20 17:34:33 crc kubenswrapper[4558]: I0120 17:34:33.387357 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-grwr2" Jan 20 17:34:33 crc kubenswrapper[4558]: I0120 17:34:33.432811 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-grwr2" Jan 20 17:34:33 crc kubenswrapper[4558]: I0120 17:34:33.513369 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-metadata-0" podUID="99b8519e-6e6f-4b03-bc0e-9b159ed53f94" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.99:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 20 17:34:33 crc kubenswrapper[4558]: I0120 17:34:33.519362 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-metadata-0" podUID="99b8519e-6e6f-4b03-bc0e-9b159ed53f94" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.99:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 20 17:34:33 crc kubenswrapper[4558]: I0120 17:34:33.519765 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/ceilometer-0" podUID="064c3f1d-6117-4cd9-bf5f-400b06b11d59" containerName="proxy-httpd" probeResult="failure" output="Get \"http://10.217.0.94:3000/\": dial tcp 10.217.0.94:3000: connect: connection refused" Jan 20 17:34:34 crc kubenswrapper[4558]: I0120 17:34:34.347969 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-grwr2" Jan 20 17:34:34 crc kubenswrapper[4558]: I0120 17:34:34.397915 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-grwr2"] Jan 20 17:34:35 crc kubenswrapper[4558]: I0120 17:34:35.325810 4558 generic.go:334] "Generic (PLEG): container finished" podID="064c3f1d-6117-4cd9-bf5f-400b06b11d59" containerID="b76c14d8886d5774bd6f812f24811198b815faf3df6e02edfe827602a5f05a10" exitCode=0 Jan 20 17:34:35 crc kubenswrapper[4558]: I0120 17:34:35.325894 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"064c3f1d-6117-4cd9-bf5f-400b06b11d59","Type":"ContainerDied","Data":"b76c14d8886d5774bd6f812f24811198b815faf3df6e02edfe827602a5f05a10"} Jan 20 17:34:35 crc kubenswrapper[4558]: I0120 17:34:35.452864 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:34:35 crc kubenswrapper[4558]: I0120 17:34:35.632062 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/064c3f1d-6117-4cd9-bf5f-400b06b11d59-log-httpd\") pod \"064c3f1d-6117-4cd9-bf5f-400b06b11d59\" (UID: \"064c3f1d-6117-4cd9-bf5f-400b06b11d59\") " Jan 20 17:34:35 crc kubenswrapper[4558]: I0120 17:34:35.632197 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/064c3f1d-6117-4cd9-bf5f-400b06b11d59-scripts\") pod \"064c3f1d-6117-4cd9-bf5f-400b06b11d59\" (UID: \"064c3f1d-6117-4cd9-bf5f-400b06b11d59\") " Jan 20 17:34:35 crc kubenswrapper[4558]: I0120 17:34:35.632245 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/064c3f1d-6117-4cd9-bf5f-400b06b11d59-run-httpd\") pod \"064c3f1d-6117-4cd9-bf5f-400b06b11d59\" (UID: \"064c3f1d-6117-4cd9-bf5f-400b06b11d59\") " Jan 20 17:34:35 crc kubenswrapper[4558]: I0120 17:34:35.632958 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/064c3f1d-6117-4cd9-bf5f-400b06b11d59-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "064c3f1d-6117-4cd9-bf5f-400b06b11d59" (UID: "064c3f1d-6117-4cd9-bf5f-400b06b11d59"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:34:35 crc kubenswrapper[4558]: I0120 17:34:35.633009 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/064c3f1d-6117-4cd9-bf5f-400b06b11d59-sg-core-conf-yaml\") pod \"064c3f1d-6117-4cd9-bf5f-400b06b11d59\" (UID: \"064c3f1d-6117-4cd9-bf5f-400b06b11d59\") " Jan 20 17:34:35 crc kubenswrapper[4558]: I0120 17:34:35.633159 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jk84b\" (UniqueName: \"kubernetes.io/projected/064c3f1d-6117-4cd9-bf5f-400b06b11d59-kube-api-access-jk84b\") pod \"064c3f1d-6117-4cd9-bf5f-400b06b11d59\" (UID: \"064c3f1d-6117-4cd9-bf5f-400b06b11d59\") " Jan 20 17:34:35 crc kubenswrapper[4558]: I0120 17:34:35.633208 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/064c3f1d-6117-4cd9-bf5f-400b06b11d59-config-data\") pod \"064c3f1d-6117-4cd9-bf5f-400b06b11d59\" (UID: \"064c3f1d-6117-4cd9-bf5f-400b06b11d59\") " Jan 20 17:34:35 crc kubenswrapper[4558]: I0120 17:34:35.633255 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/064c3f1d-6117-4cd9-bf5f-400b06b11d59-combined-ca-bundle\") pod \"064c3f1d-6117-4cd9-bf5f-400b06b11d59\" (UID: \"064c3f1d-6117-4cd9-bf5f-400b06b11d59\") " Jan 20 17:34:35 crc kubenswrapper[4558]: I0120 17:34:35.632972 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/064c3f1d-6117-4cd9-bf5f-400b06b11d59-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "064c3f1d-6117-4cd9-bf5f-400b06b11d59" (UID: "064c3f1d-6117-4cd9-bf5f-400b06b11d59"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:34:35 crc kubenswrapper[4558]: I0120 17:34:35.634070 4558 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/064c3f1d-6117-4cd9-bf5f-400b06b11d59-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:34:35 crc kubenswrapper[4558]: I0120 17:34:35.634114 4558 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/064c3f1d-6117-4cd9-bf5f-400b06b11d59-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:34:35 crc kubenswrapper[4558]: I0120 17:34:35.638615 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/064c3f1d-6117-4cd9-bf5f-400b06b11d59-scripts" (OuterVolumeSpecName: "scripts") pod "064c3f1d-6117-4cd9-bf5f-400b06b11d59" (UID: "064c3f1d-6117-4cd9-bf5f-400b06b11d59"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:34:35 crc kubenswrapper[4558]: I0120 17:34:35.639272 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/064c3f1d-6117-4cd9-bf5f-400b06b11d59-kube-api-access-jk84b" (OuterVolumeSpecName: "kube-api-access-jk84b") pod "064c3f1d-6117-4cd9-bf5f-400b06b11d59" (UID: "064c3f1d-6117-4cd9-bf5f-400b06b11d59"). InnerVolumeSpecName "kube-api-access-jk84b". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:34:35 crc kubenswrapper[4558]: I0120 17:34:35.667416 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/064c3f1d-6117-4cd9-bf5f-400b06b11d59-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "064c3f1d-6117-4cd9-bf5f-400b06b11d59" (UID: "064c3f1d-6117-4cd9-bf5f-400b06b11d59"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:34:35 crc kubenswrapper[4558]: I0120 17:34:35.694199 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/064c3f1d-6117-4cd9-bf5f-400b06b11d59-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "064c3f1d-6117-4cd9-bf5f-400b06b11d59" (UID: "064c3f1d-6117-4cd9-bf5f-400b06b11d59"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:34:35 crc kubenswrapper[4558]: I0120 17:34:35.704681 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/064c3f1d-6117-4cd9-bf5f-400b06b11d59-config-data" (OuterVolumeSpecName: "config-data") pod "064c3f1d-6117-4cd9-bf5f-400b06b11d59" (UID: "064c3f1d-6117-4cd9-bf5f-400b06b11d59"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:34:35 crc kubenswrapper[4558]: I0120 17:34:35.737773 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jk84b\" (UniqueName: \"kubernetes.io/projected/064c3f1d-6117-4cd9-bf5f-400b06b11d59-kube-api-access-jk84b\") on node \"crc\" DevicePath \"\"" Jan 20 17:34:35 crc kubenswrapper[4558]: I0120 17:34:35.737831 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/064c3f1d-6117-4cd9-bf5f-400b06b11d59-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:34:35 crc kubenswrapper[4558]: I0120 17:34:35.737849 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/064c3f1d-6117-4cd9-bf5f-400b06b11d59-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:34:35 crc kubenswrapper[4558]: I0120 17:34:35.737862 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/064c3f1d-6117-4cd9-bf5f-400b06b11d59-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:34:35 crc kubenswrapper[4558]: I0120 17:34:35.737875 4558 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/064c3f1d-6117-4cd9-bf5f-400b06b11d59-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 20 17:34:36 crc kubenswrapper[4558]: I0120 17:34:36.337544 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"064c3f1d-6117-4cd9-bf5f-400b06b11d59","Type":"ContainerDied","Data":"efd21b49b63ae4659fc950bab4c8aa6216890d0805a08681be0f14455536c3ae"} Jan 20 17:34:36 crc kubenswrapper[4558]: I0120 17:34:36.337753 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-grwr2" podUID="97258279-69d3-4e8e-935a-b34e6a73be4e" containerName="registry-server" containerID="cri-o://9545d00982f094a98930fb9fe418c2cefaa2598df66b5c7b6805ad188bf042ce" gracePeriod=2 Jan 20 17:34:36 crc kubenswrapper[4558]: I0120 17:34:36.337840 4558 scope.go:117] "RemoveContainer" containerID="dc58496c4ba70fea566614a3cce2786b241478b1915fd38952580ac26231d50f" Jan 20 17:34:36 crc kubenswrapper[4558]: I0120 17:34:36.337582 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:34:36 crc kubenswrapper[4558]: I0120 17:34:36.372928 4558 scope.go:117] "RemoveContainer" containerID="f48aeb30b2cfaa08bb3f13eee1aaf3c0f1c1855c7c00bdf4b9fa474db4b08957" Jan 20 17:34:36 crc kubenswrapper[4558]: I0120 17:34:36.377915 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:34:36 crc kubenswrapper[4558]: I0120 17:34:36.384857 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:34:36 crc kubenswrapper[4558]: I0120 17:34:36.402660 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:34:36 crc kubenswrapper[4558]: E0120 17:34:36.403380 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="064c3f1d-6117-4cd9-bf5f-400b06b11d59" containerName="sg-core" Jan 20 17:34:36 crc kubenswrapper[4558]: I0120 17:34:36.403401 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="064c3f1d-6117-4cd9-bf5f-400b06b11d59" containerName="sg-core" Jan 20 17:34:36 crc kubenswrapper[4558]: E0120 17:34:36.403415 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="064c3f1d-6117-4cd9-bf5f-400b06b11d59" containerName="ceilometer-notification-agent" Jan 20 17:34:36 crc kubenswrapper[4558]: I0120 17:34:36.403422 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="064c3f1d-6117-4cd9-bf5f-400b06b11d59" containerName="ceilometer-notification-agent" Jan 20 17:34:36 crc kubenswrapper[4558]: E0120 17:34:36.403437 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="064c3f1d-6117-4cd9-bf5f-400b06b11d59" containerName="ceilometer-central-agent" Jan 20 17:34:36 crc kubenswrapper[4558]: I0120 17:34:36.403442 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="064c3f1d-6117-4cd9-bf5f-400b06b11d59" containerName="ceilometer-central-agent" Jan 20 17:34:36 crc kubenswrapper[4558]: E0120 17:34:36.403468 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="064c3f1d-6117-4cd9-bf5f-400b06b11d59" containerName="proxy-httpd" Jan 20 17:34:36 crc kubenswrapper[4558]: I0120 17:34:36.403473 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="064c3f1d-6117-4cd9-bf5f-400b06b11d59" containerName="proxy-httpd" Jan 20 17:34:36 crc kubenswrapper[4558]: I0120 17:34:36.403647 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="064c3f1d-6117-4cd9-bf5f-400b06b11d59" containerName="proxy-httpd" Jan 20 17:34:36 crc kubenswrapper[4558]: I0120 17:34:36.403659 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="064c3f1d-6117-4cd9-bf5f-400b06b11d59" containerName="sg-core" Jan 20 17:34:36 crc kubenswrapper[4558]: I0120 17:34:36.403669 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="064c3f1d-6117-4cd9-bf5f-400b06b11d59" containerName="ceilometer-central-agent" Jan 20 17:34:36 crc kubenswrapper[4558]: I0120 17:34:36.403676 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="064c3f1d-6117-4cd9-bf5f-400b06b11d59" containerName="ceilometer-notification-agent" Jan 20 17:34:36 crc kubenswrapper[4558]: I0120 17:34:36.405510 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:34:36 crc kubenswrapper[4558]: I0120 17:34:36.407019 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-scripts" Jan 20 17:34:36 crc kubenswrapper[4558]: I0120 17:34:36.407913 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-config-data" Jan 20 17:34:36 crc kubenswrapper[4558]: I0120 17:34:36.421665 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:34:36 crc kubenswrapper[4558]: I0120 17:34:36.440007 4558 scope.go:117] "RemoveContainer" containerID="b76c14d8886d5774bd6f812f24811198b815faf3df6e02edfe827602a5f05a10" Jan 20 17:34:36 crc kubenswrapper[4558]: I0120 17:34:36.454757 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d393d0ac-0a07-4a10-ab9a-ee4187b06a1a-run-httpd\") pod \"ceilometer-0\" (UID: \"d393d0ac-0a07-4a10-ab9a-ee4187b06a1a\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:34:36 crc kubenswrapper[4558]: I0120 17:34:36.454798 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d393d0ac-0a07-4a10-ab9a-ee4187b06a1a-scripts\") pod \"ceilometer-0\" (UID: \"d393d0ac-0a07-4a10-ab9a-ee4187b06a1a\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:34:36 crc kubenswrapper[4558]: I0120 17:34:36.454899 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d393d0ac-0a07-4a10-ab9a-ee4187b06a1a-config-data\") pod \"ceilometer-0\" (UID: \"d393d0ac-0a07-4a10-ab9a-ee4187b06a1a\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:34:36 crc kubenswrapper[4558]: I0120 17:34:36.454936 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d393d0ac-0a07-4a10-ab9a-ee4187b06a1a-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"d393d0ac-0a07-4a10-ab9a-ee4187b06a1a\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:34:36 crc kubenswrapper[4558]: I0120 17:34:36.454967 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d393d0ac-0a07-4a10-ab9a-ee4187b06a1a-log-httpd\") pod \"ceilometer-0\" (UID: \"d393d0ac-0a07-4a10-ab9a-ee4187b06a1a\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:34:36 crc kubenswrapper[4558]: I0120 17:34:36.455068 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qhr8s\" (UniqueName: \"kubernetes.io/projected/d393d0ac-0a07-4a10-ab9a-ee4187b06a1a-kube-api-access-qhr8s\") pod \"ceilometer-0\" (UID: \"d393d0ac-0a07-4a10-ab9a-ee4187b06a1a\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:34:36 crc kubenswrapper[4558]: I0120 17:34:36.455096 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d393d0ac-0a07-4a10-ab9a-ee4187b06a1a-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"d393d0ac-0a07-4a10-ab9a-ee4187b06a1a\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:34:36 crc kubenswrapper[4558]: I0120 17:34:36.492378 4558 scope.go:117] "RemoveContainer" containerID="b379979fb76580540ae38daa220da7cd19bceeb6504b4c7e195ca0c79794a990" Jan 20 17:34:36 crc kubenswrapper[4558]: I0120 17:34:36.557098 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qhr8s\" (UniqueName: \"kubernetes.io/projected/d393d0ac-0a07-4a10-ab9a-ee4187b06a1a-kube-api-access-qhr8s\") pod \"ceilometer-0\" (UID: \"d393d0ac-0a07-4a10-ab9a-ee4187b06a1a\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:34:36 crc kubenswrapper[4558]: I0120 17:34:36.557173 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d393d0ac-0a07-4a10-ab9a-ee4187b06a1a-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"d393d0ac-0a07-4a10-ab9a-ee4187b06a1a\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:34:36 crc kubenswrapper[4558]: I0120 17:34:36.557299 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d393d0ac-0a07-4a10-ab9a-ee4187b06a1a-run-httpd\") pod \"ceilometer-0\" (UID: \"d393d0ac-0a07-4a10-ab9a-ee4187b06a1a\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:34:36 crc kubenswrapper[4558]: I0120 17:34:36.557375 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d393d0ac-0a07-4a10-ab9a-ee4187b06a1a-scripts\") pod \"ceilometer-0\" (UID: \"d393d0ac-0a07-4a10-ab9a-ee4187b06a1a\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:34:36 crc kubenswrapper[4558]: I0120 17:34:36.557462 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d393d0ac-0a07-4a10-ab9a-ee4187b06a1a-config-data\") pod \"ceilometer-0\" (UID: \"d393d0ac-0a07-4a10-ab9a-ee4187b06a1a\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:34:36 crc kubenswrapper[4558]: I0120 17:34:36.557516 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d393d0ac-0a07-4a10-ab9a-ee4187b06a1a-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"d393d0ac-0a07-4a10-ab9a-ee4187b06a1a\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:34:36 crc kubenswrapper[4558]: I0120 17:34:36.557551 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d393d0ac-0a07-4a10-ab9a-ee4187b06a1a-log-httpd\") pod \"ceilometer-0\" (UID: \"d393d0ac-0a07-4a10-ab9a-ee4187b06a1a\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:34:36 crc kubenswrapper[4558]: I0120 17:34:36.557946 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d393d0ac-0a07-4a10-ab9a-ee4187b06a1a-run-httpd\") pod \"ceilometer-0\" (UID: \"d393d0ac-0a07-4a10-ab9a-ee4187b06a1a\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:34:36 crc kubenswrapper[4558]: I0120 17:34:36.558217 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d393d0ac-0a07-4a10-ab9a-ee4187b06a1a-log-httpd\") pod \"ceilometer-0\" (UID: \"d393d0ac-0a07-4a10-ab9a-ee4187b06a1a\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:34:36 crc kubenswrapper[4558]: I0120 17:34:36.564588 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d393d0ac-0a07-4a10-ab9a-ee4187b06a1a-config-data\") pod \"ceilometer-0\" (UID: \"d393d0ac-0a07-4a10-ab9a-ee4187b06a1a\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:34:36 crc kubenswrapper[4558]: I0120 17:34:36.588446 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d393d0ac-0a07-4a10-ab9a-ee4187b06a1a-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"d393d0ac-0a07-4a10-ab9a-ee4187b06a1a\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:34:36 crc kubenswrapper[4558]: I0120 17:34:36.589739 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d393d0ac-0a07-4a10-ab9a-ee4187b06a1a-scripts\") pod \"ceilometer-0\" (UID: \"d393d0ac-0a07-4a10-ab9a-ee4187b06a1a\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:34:36 crc kubenswrapper[4558]: I0120 17:34:36.595432 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d393d0ac-0a07-4a10-ab9a-ee4187b06a1a-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"d393d0ac-0a07-4a10-ab9a-ee4187b06a1a\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:34:36 crc kubenswrapper[4558]: I0120 17:34:36.600829 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qhr8s\" (UniqueName: \"kubernetes.io/projected/d393d0ac-0a07-4a10-ab9a-ee4187b06a1a-kube-api-access-qhr8s\") pod \"ceilometer-0\" (UID: \"d393d0ac-0a07-4a10-ab9a-ee4187b06a1a\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:34:36 crc kubenswrapper[4558]: I0120 17:34:36.615292 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="064c3f1d-6117-4cd9-bf5f-400b06b11d59" path="/var/lib/kubelet/pods/064c3f1d-6117-4cd9-bf5f-400b06b11d59/volumes" Jan 20 17:34:36 crc kubenswrapper[4558]: I0120 17:34:36.742923 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:34:36 crc kubenswrapper[4558]: I0120 17:34:36.819107 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-grwr2" Jan 20 17:34:36 crc kubenswrapper[4558]: I0120 17:34:36.874487 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/97258279-69d3-4e8e-935a-b34e6a73be4e-catalog-content\") pod \"97258279-69d3-4e8e-935a-b34e6a73be4e\" (UID: \"97258279-69d3-4e8e-935a-b34e6a73be4e\") " Jan 20 17:34:36 crc kubenswrapper[4558]: I0120 17:34:36.874797 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qrffx\" (UniqueName: \"kubernetes.io/projected/97258279-69d3-4e8e-935a-b34e6a73be4e-kube-api-access-qrffx\") pod \"97258279-69d3-4e8e-935a-b34e6a73be4e\" (UID: \"97258279-69d3-4e8e-935a-b34e6a73be4e\") " Jan 20 17:34:36 crc kubenswrapper[4558]: I0120 17:34:36.874929 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/97258279-69d3-4e8e-935a-b34e6a73be4e-utilities\") pod \"97258279-69d3-4e8e-935a-b34e6a73be4e\" (UID: \"97258279-69d3-4e8e-935a-b34e6a73be4e\") " Jan 20 17:34:36 crc kubenswrapper[4558]: I0120 17:34:36.877869 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/97258279-69d3-4e8e-935a-b34e6a73be4e-utilities" (OuterVolumeSpecName: "utilities") pod "97258279-69d3-4e8e-935a-b34e6a73be4e" (UID: "97258279-69d3-4e8e-935a-b34e6a73be4e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:34:36 crc kubenswrapper[4558]: I0120 17:34:36.882989 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/97258279-69d3-4e8e-935a-b34e6a73be4e-kube-api-access-qrffx" (OuterVolumeSpecName: "kube-api-access-qrffx") pod "97258279-69d3-4e8e-935a-b34e6a73be4e" (UID: "97258279-69d3-4e8e-935a-b34e6a73be4e"). InnerVolumeSpecName "kube-api-access-qrffx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:34:36 crc kubenswrapper[4558]: I0120 17:34:36.916424 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/97258279-69d3-4e8e-935a-b34e6a73be4e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "97258279-69d3-4e8e-935a-b34e6a73be4e" (UID: "97258279-69d3-4e8e-935a-b34e6a73be4e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:34:36 crc kubenswrapper[4558]: I0120 17:34:36.978433 4558 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/97258279-69d3-4e8e-935a-b34e6a73be4e-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 20 17:34:36 crc kubenswrapper[4558]: I0120 17:34:36.978475 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qrffx\" (UniqueName: \"kubernetes.io/projected/97258279-69d3-4e8e-935a-b34e6a73be4e-kube-api-access-qrffx\") on node \"crc\" DevicePath \"\"" Jan 20 17:34:36 crc kubenswrapper[4558]: I0120 17:34:36.978495 4558 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/97258279-69d3-4e8e-935a-b34e6a73be4e-utilities\") on node \"crc\" DevicePath \"\"" Jan 20 17:34:37 crc kubenswrapper[4558]: I0120 17:34:37.260844 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:34:37 crc kubenswrapper[4558]: I0120 17:34:37.349381 4558 generic.go:334] "Generic (PLEG): container finished" podID="97258279-69d3-4e8e-935a-b34e6a73be4e" containerID="9545d00982f094a98930fb9fe418c2cefaa2598df66b5c7b6805ad188bf042ce" exitCode=0 Jan 20 17:34:37 crc kubenswrapper[4558]: I0120 17:34:37.349430 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-grwr2" event={"ID":"97258279-69d3-4e8e-935a-b34e6a73be4e","Type":"ContainerDied","Data":"9545d00982f094a98930fb9fe418c2cefaa2598df66b5c7b6805ad188bf042ce"} Jan 20 17:34:37 crc kubenswrapper[4558]: I0120 17:34:37.349457 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-grwr2" Jan 20 17:34:37 crc kubenswrapper[4558]: I0120 17:34:37.349523 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-grwr2" event={"ID":"97258279-69d3-4e8e-935a-b34e6a73be4e","Type":"ContainerDied","Data":"dd6a8f534d74beb4ab6b8766bd29c2c7d4d9decfad85a5b150aa257a9ef50230"} Jan 20 17:34:37 crc kubenswrapper[4558]: I0120 17:34:37.349565 4558 scope.go:117] "RemoveContainer" containerID="9545d00982f094a98930fb9fe418c2cefaa2598df66b5c7b6805ad188bf042ce" Jan 20 17:34:37 crc kubenswrapper[4558]: I0120 17:34:37.351263 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"d393d0ac-0a07-4a10-ab9a-ee4187b06a1a","Type":"ContainerStarted","Data":"3795ac64306831ca35f8eec37e400102938a138c8b3af7101862f037d96dab39"} Jan 20 17:34:37 crc kubenswrapper[4558]: I0120 17:34:37.352771 4558 generic.go:334] "Generic (PLEG): container finished" podID="e0e37c27-e17c-46c1-9e53-ce2a92c13f02" containerID="60ee7c69684601f241168e9490c910c322ce0ad1bc0d2f8e418a384d04073baa" exitCode=0 Jan 20 17:34:37 crc kubenswrapper[4558]: I0120 17:34:37.352848 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-cell-mapping-jqwc9" event={"ID":"e0e37c27-e17c-46c1-9e53-ce2a92c13f02","Type":"ContainerDied","Data":"60ee7c69684601f241168e9490c910c322ce0ad1bc0d2f8e418a384d04073baa"} Jan 20 17:34:37 crc kubenswrapper[4558]: I0120 17:34:37.369076 4558 scope.go:117] "RemoveContainer" containerID="82c6171f2834671748234907c74bac442e2b2b8b07fcbfa9ec41535fbfa23462" Jan 20 17:34:37 crc kubenswrapper[4558]: I0120 17:34:37.396728 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-grwr2"] Jan 20 17:34:37 crc kubenswrapper[4558]: I0120 17:34:37.418155 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-grwr2"] Jan 20 17:34:37 crc kubenswrapper[4558]: I0120 17:34:37.422907 4558 scope.go:117] "RemoveContainer" containerID="48fa808dd2c20d63c69f8008c57b98745686660b1cbc6015b9223632d525754a" Jan 20 17:34:37 crc kubenswrapper[4558]: I0120 17:34:37.442944 4558 scope.go:117] "RemoveContainer" containerID="9545d00982f094a98930fb9fe418c2cefaa2598df66b5c7b6805ad188bf042ce" Jan 20 17:34:37 crc kubenswrapper[4558]: E0120 17:34:37.443426 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9545d00982f094a98930fb9fe418c2cefaa2598df66b5c7b6805ad188bf042ce\": container with ID starting with 9545d00982f094a98930fb9fe418c2cefaa2598df66b5c7b6805ad188bf042ce not found: ID does not exist" containerID="9545d00982f094a98930fb9fe418c2cefaa2598df66b5c7b6805ad188bf042ce" Jan 20 17:34:37 crc kubenswrapper[4558]: I0120 17:34:37.443460 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9545d00982f094a98930fb9fe418c2cefaa2598df66b5c7b6805ad188bf042ce"} err="failed to get container status \"9545d00982f094a98930fb9fe418c2cefaa2598df66b5c7b6805ad188bf042ce\": rpc error: code = NotFound desc = could not find container \"9545d00982f094a98930fb9fe418c2cefaa2598df66b5c7b6805ad188bf042ce\": container with ID starting with 9545d00982f094a98930fb9fe418c2cefaa2598df66b5c7b6805ad188bf042ce not found: ID does not exist" Jan 20 17:34:37 crc kubenswrapper[4558]: I0120 17:34:37.443480 4558 scope.go:117] "RemoveContainer" containerID="82c6171f2834671748234907c74bac442e2b2b8b07fcbfa9ec41535fbfa23462" Jan 20 17:34:37 crc kubenswrapper[4558]: E0120 17:34:37.443738 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"82c6171f2834671748234907c74bac442e2b2b8b07fcbfa9ec41535fbfa23462\": container with ID starting with 82c6171f2834671748234907c74bac442e2b2b8b07fcbfa9ec41535fbfa23462 not found: ID does not exist" containerID="82c6171f2834671748234907c74bac442e2b2b8b07fcbfa9ec41535fbfa23462" Jan 20 17:34:37 crc kubenswrapper[4558]: I0120 17:34:37.443758 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"82c6171f2834671748234907c74bac442e2b2b8b07fcbfa9ec41535fbfa23462"} err="failed to get container status \"82c6171f2834671748234907c74bac442e2b2b8b07fcbfa9ec41535fbfa23462\": rpc error: code = NotFound desc = could not find container \"82c6171f2834671748234907c74bac442e2b2b8b07fcbfa9ec41535fbfa23462\": container with ID starting with 82c6171f2834671748234907c74bac442e2b2b8b07fcbfa9ec41535fbfa23462 not found: ID does not exist" Jan 20 17:34:37 crc kubenswrapper[4558]: I0120 17:34:37.443770 4558 scope.go:117] "RemoveContainer" containerID="48fa808dd2c20d63c69f8008c57b98745686660b1cbc6015b9223632d525754a" Jan 20 17:34:37 crc kubenswrapper[4558]: E0120 17:34:37.443971 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"48fa808dd2c20d63c69f8008c57b98745686660b1cbc6015b9223632d525754a\": container with ID starting with 48fa808dd2c20d63c69f8008c57b98745686660b1cbc6015b9223632d525754a not found: ID does not exist" containerID="48fa808dd2c20d63c69f8008c57b98745686660b1cbc6015b9223632d525754a" Jan 20 17:34:37 crc kubenswrapper[4558]: I0120 17:34:37.443989 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"48fa808dd2c20d63c69f8008c57b98745686660b1cbc6015b9223632d525754a"} err="failed to get container status \"48fa808dd2c20d63c69f8008c57b98745686660b1cbc6015b9223632d525754a\": rpc error: code = NotFound desc = could not find container \"48fa808dd2c20d63c69f8008c57b98745686660b1cbc6015b9223632d525754a\": container with ID starting with 48fa808dd2c20d63c69f8008c57b98745686660b1cbc6015b9223632d525754a not found: ID does not exist" Jan 20 17:34:38 crc kubenswrapper[4558]: I0120 17:34:38.374437 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"d393d0ac-0a07-4a10-ab9a-ee4187b06a1a","Type":"ContainerStarted","Data":"c28767df71c970ea5e8653da53296b3b86a33c31261c457782efaa5c0c34b9aa"} Jan 20 17:34:38 crc kubenswrapper[4558]: I0120 17:34:38.580848 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="97258279-69d3-4e8e-935a-b34e6a73be4e" path="/var/lib/kubelet/pods/97258279-69d3-4e8e-935a-b34e6a73be4e/volumes" Jan 20 17:34:38 crc kubenswrapper[4558]: I0120 17:34:38.708206 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-cell-mapping-jqwc9" Jan 20 17:34:38 crc kubenswrapper[4558]: I0120 17:34:38.823846 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e0e37c27-e17c-46c1-9e53-ce2a92c13f02-config-data\") pod \"e0e37c27-e17c-46c1-9e53-ce2a92c13f02\" (UID: \"e0e37c27-e17c-46c1-9e53-ce2a92c13f02\") " Jan 20 17:34:38 crc kubenswrapper[4558]: I0120 17:34:38.824195 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r4kq5\" (UniqueName: \"kubernetes.io/projected/e0e37c27-e17c-46c1-9e53-ce2a92c13f02-kube-api-access-r4kq5\") pod \"e0e37c27-e17c-46c1-9e53-ce2a92c13f02\" (UID: \"e0e37c27-e17c-46c1-9e53-ce2a92c13f02\") " Jan 20 17:34:38 crc kubenswrapper[4558]: I0120 17:34:38.824377 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e0e37c27-e17c-46c1-9e53-ce2a92c13f02-scripts\") pod \"e0e37c27-e17c-46c1-9e53-ce2a92c13f02\" (UID: \"e0e37c27-e17c-46c1-9e53-ce2a92c13f02\") " Jan 20 17:34:38 crc kubenswrapper[4558]: I0120 17:34:38.824428 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e0e37c27-e17c-46c1-9e53-ce2a92c13f02-combined-ca-bundle\") pod \"e0e37c27-e17c-46c1-9e53-ce2a92c13f02\" (UID: \"e0e37c27-e17c-46c1-9e53-ce2a92c13f02\") " Jan 20 17:34:38 crc kubenswrapper[4558]: I0120 17:34:38.827790 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e0e37c27-e17c-46c1-9e53-ce2a92c13f02-scripts" (OuterVolumeSpecName: "scripts") pod "e0e37c27-e17c-46c1-9e53-ce2a92c13f02" (UID: "e0e37c27-e17c-46c1-9e53-ce2a92c13f02"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:34:38 crc kubenswrapper[4558]: I0120 17:34:38.828109 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e0e37c27-e17c-46c1-9e53-ce2a92c13f02-kube-api-access-r4kq5" (OuterVolumeSpecName: "kube-api-access-r4kq5") pod "e0e37c27-e17c-46c1-9e53-ce2a92c13f02" (UID: "e0e37c27-e17c-46c1-9e53-ce2a92c13f02"). InnerVolumeSpecName "kube-api-access-r4kq5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:34:38 crc kubenswrapper[4558]: I0120 17:34:38.858134 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e0e37c27-e17c-46c1-9e53-ce2a92c13f02-config-data" (OuterVolumeSpecName: "config-data") pod "e0e37c27-e17c-46c1-9e53-ce2a92c13f02" (UID: "e0e37c27-e17c-46c1-9e53-ce2a92c13f02"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:34:38 crc kubenswrapper[4558]: I0120 17:34:38.861074 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e0e37c27-e17c-46c1-9e53-ce2a92c13f02-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e0e37c27-e17c-46c1-9e53-ce2a92c13f02" (UID: "e0e37c27-e17c-46c1-9e53-ce2a92c13f02"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:34:38 crc kubenswrapper[4558]: I0120 17:34:38.929851 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r4kq5\" (UniqueName: \"kubernetes.io/projected/e0e37c27-e17c-46c1-9e53-ce2a92c13f02-kube-api-access-r4kq5\") on node \"crc\" DevicePath \"\"" Jan 20 17:34:38 crc kubenswrapper[4558]: I0120 17:34:38.929904 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e0e37c27-e17c-46c1-9e53-ce2a92c13f02-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:34:38 crc kubenswrapper[4558]: I0120 17:34:38.929922 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e0e37c27-e17c-46c1-9e53-ce2a92c13f02-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:34:38 crc kubenswrapper[4558]: I0120 17:34:38.929934 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e0e37c27-e17c-46c1-9e53-ce2a92c13f02-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:34:39 crc kubenswrapper[4558]: I0120 17:34:39.387929 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"d393d0ac-0a07-4a10-ab9a-ee4187b06a1a","Type":"ContainerStarted","Data":"885f382b361929255e7062afd4f25e3aac93e29a4f55784a8ee8a90d6b2f53d8"} Jan 20 17:34:39 crc kubenswrapper[4558]: I0120 17:34:39.389616 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-cell-mapping-jqwc9" event={"ID":"e0e37c27-e17c-46c1-9e53-ce2a92c13f02","Type":"ContainerDied","Data":"6e85c520d9bd9b0e59652c5d00f02c104d7ac1fbfa360eb133b22ffb6aefb714"} Jan 20 17:34:39 crc kubenswrapper[4558]: I0120 17:34:39.389649 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6e85c520d9bd9b0e59652c5d00f02c104d7ac1fbfa360eb133b22ffb6aefb714" Jan 20 17:34:39 crc kubenswrapper[4558]: I0120 17:34:39.389745 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-cell-mapping-jqwc9" Jan 20 17:34:39 crc kubenswrapper[4558]: I0120 17:34:39.567756 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:34:39 crc kubenswrapper[4558]: I0120 17:34:39.568014 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-api-0" podUID="403ecf58-a186-4cda-b9c0-3504f3edeb02" containerName="nova-api-log" containerID="cri-o://1cf660a3fd74859ff963a723ee3b7beb2a4d2c7466eead735cedc8f92917bf47" gracePeriod=30 Jan 20 17:34:39 crc kubenswrapper[4558]: I0120 17:34:39.568262 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-api-0" podUID="403ecf58-a186-4cda-b9c0-3504f3edeb02" containerName="nova-api-api" containerID="cri-o://808933550d924de3c7edc379c7a62583aecaf780d81bdd690f9ea9e7511961c0" gracePeriod=30 Jan 20 17:34:39 crc kubenswrapper[4558]: I0120 17:34:39.636459 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:34:39 crc kubenswrapper[4558]: I0120 17:34:39.636713 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-scheduler-0" podUID="6a79b74c-56ea-47f2-aec7-8b63d0a54f3c" containerName="nova-scheduler-scheduler" containerID="cri-o://6d8e674d2d833aea6fdf17f48244d761ba7c0a3485ad6276685492e29412ecf5" gracePeriod=30 Jan 20 17:34:39 crc kubenswrapper[4558]: I0120 17:34:39.659212 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:34:39 crc kubenswrapper[4558]: I0120 17:34:39.659498 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-metadata-0" podUID="99b8519e-6e6f-4b03-bc0e-9b159ed53f94" containerName="nova-metadata-metadata" containerID="cri-o://bd6efedb4729482e0b53a3f4f56d4d7e625727ca7567b4ba0e01c49e0cbbf75f" gracePeriod=30 Jan 20 17:34:39 crc kubenswrapper[4558]: I0120 17:34:39.659574 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-metadata-0" podUID="99b8519e-6e6f-4b03-bc0e-9b159ed53f94" containerName="nova-metadata-log" containerID="cri-o://02e098431930837fb92268532afd86ec94107352fd27d8c1491004cdd3da1bdd" gracePeriod=30 Jan 20 17:34:40 crc kubenswrapper[4558]: I0120 17:34:40.409136 4558 generic.go:334] "Generic (PLEG): container finished" podID="99b8519e-6e6f-4b03-bc0e-9b159ed53f94" containerID="02e098431930837fb92268532afd86ec94107352fd27d8c1491004cdd3da1bdd" exitCode=143 Jan 20 17:34:40 crc kubenswrapper[4558]: I0120 17:34:40.409232 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"99b8519e-6e6f-4b03-bc0e-9b159ed53f94","Type":"ContainerDied","Data":"02e098431930837fb92268532afd86ec94107352fd27d8c1491004cdd3da1bdd"} Jan 20 17:34:40 crc kubenswrapper[4558]: I0120 17:34:40.412857 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"d393d0ac-0a07-4a10-ab9a-ee4187b06a1a","Type":"ContainerStarted","Data":"a668618c15e38bf505cbb7753467741c1052b1b1764b31556e8205a83f0c0c3b"} Jan 20 17:34:40 crc kubenswrapper[4558]: I0120 17:34:40.416277 4558 generic.go:334] "Generic (PLEG): container finished" podID="403ecf58-a186-4cda-b9c0-3504f3edeb02" containerID="1cf660a3fd74859ff963a723ee3b7beb2a4d2c7466eead735cedc8f92917bf47" exitCode=143 Jan 20 17:34:40 crc kubenswrapper[4558]: I0120 17:34:40.416384 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"403ecf58-a186-4cda-b9c0-3504f3edeb02","Type":"ContainerDied","Data":"1cf660a3fd74859ff963a723ee3b7beb2a4d2c7466eead735cedc8f92917bf47"} Jan 20 17:34:41 crc kubenswrapper[4558]: I0120 17:34:41.430597 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"d393d0ac-0a07-4a10-ab9a-ee4187b06a1a","Type":"ContainerStarted","Data":"37f082e53f69b60d82922862c4aabdec9b3cd30d3ff34d5ac037935a9c1bd638"} Jan 20 17:34:41 crc kubenswrapper[4558]: I0120 17:34:41.431392 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:34:41 crc kubenswrapper[4558]: I0120 17:34:41.451715 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ceilometer-0" podStartSLOduration=1.915037173 podStartE2EDuration="5.451690883s" podCreationTimestamp="2026-01-20 17:34:36 +0000 UTC" firstStartedPulling="2026-01-20 17:34:37.26662192 +0000 UTC m=+3171.026959886" lastFinishedPulling="2026-01-20 17:34:40.803275629 +0000 UTC m=+3174.563613596" observedRunningTime="2026-01-20 17:34:41.447983102 +0000 UTC m=+3175.208321069" watchObservedRunningTime="2026-01-20 17:34:41.451690883 +0000 UTC m=+3175.212028841" Jan 20 17:34:43 crc kubenswrapper[4558]: I0120 17:34:43.155049 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:34:43 crc kubenswrapper[4558]: I0120 17:34:43.221559 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/403ecf58-a186-4cda-b9c0-3504f3edeb02-public-tls-certs\") pod \"403ecf58-a186-4cda-b9c0-3504f3edeb02\" (UID: \"403ecf58-a186-4cda-b9c0-3504f3edeb02\") " Jan 20 17:34:43 crc kubenswrapper[4558]: I0120 17:34:43.221674 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/403ecf58-a186-4cda-b9c0-3504f3edeb02-config-data\") pod \"403ecf58-a186-4cda-b9c0-3504f3edeb02\" (UID: \"403ecf58-a186-4cda-b9c0-3504f3edeb02\") " Jan 20 17:34:43 crc kubenswrapper[4558]: I0120 17:34:43.221770 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/403ecf58-a186-4cda-b9c0-3504f3edeb02-internal-tls-certs\") pod \"403ecf58-a186-4cda-b9c0-3504f3edeb02\" (UID: \"403ecf58-a186-4cda-b9c0-3504f3edeb02\") " Jan 20 17:34:43 crc kubenswrapper[4558]: I0120 17:34:43.221869 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r2hzs\" (UniqueName: \"kubernetes.io/projected/403ecf58-a186-4cda-b9c0-3504f3edeb02-kube-api-access-r2hzs\") pod \"403ecf58-a186-4cda-b9c0-3504f3edeb02\" (UID: \"403ecf58-a186-4cda-b9c0-3504f3edeb02\") " Jan 20 17:34:43 crc kubenswrapper[4558]: I0120 17:34:43.221947 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/403ecf58-a186-4cda-b9c0-3504f3edeb02-logs\") pod \"403ecf58-a186-4cda-b9c0-3504f3edeb02\" (UID: \"403ecf58-a186-4cda-b9c0-3504f3edeb02\") " Jan 20 17:34:43 crc kubenswrapper[4558]: I0120 17:34:43.222087 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/403ecf58-a186-4cda-b9c0-3504f3edeb02-combined-ca-bundle\") pod \"403ecf58-a186-4cda-b9c0-3504f3edeb02\" (UID: \"403ecf58-a186-4cda-b9c0-3504f3edeb02\") " Jan 20 17:34:43 crc kubenswrapper[4558]: I0120 17:34:43.229182 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/403ecf58-a186-4cda-b9c0-3504f3edeb02-logs" (OuterVolumeSpecName: "logs") pod "403ecf58-a186-4cda-b9c0-3504f3edeb02" (UID: "403ecf58-a186-4cda-b9c0-3504f3edeb02"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:34:43 crc kubenswrapper[4558]: I0120 17:34:43.231221 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/403ecf58-a186-4cda-b9c0-3504f3edeb02-kube-api-access-r2hzs" (OuterVolumeSpecName: "kube-api-access-r2hzs") pod "403ecf58-a186-4cda-b9c0-3504f3edeb02" (UID: "403ecf58-a186-4cda-b9c0-3504f3edeb02"). InnerVolumeSpecName "kube-api-access-r2hzs". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:34:43 crc kubenswrapper[4558]: I0120 17:34:43.260036 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/403ecf58-a186-4cda-b9c0-3504f3edeb02-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "403ecf58-a186-4cda-b9c0-3504f3edeb02" (UID: "403ecf58-a186-4cda-b9c0-3504f3edeb02"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:34:43 crc kubenswrapper[4558]: I0120 17:34:43.262542 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:34:43 crc kubenswrapper[4558]: I0120 17:34:43.262945 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/403ecf58-a186-4cda-b9c0-3504f3edeb02-config-data" (OuterVolumeSpecName: "config-data") pod "403ecf58-a186-4cda-b9c0-3504f3edeb02" (UID: "403ecf58-a186-4cda-b9c0-3504f3edeb02"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:34:43 crc kubenswrapper[4558]: I0120 17:34:43.275231 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/403ecf58-a186-4cda-b9c0-3504f3edeb02-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "403ecf58-a186-4cda-b9c0-3504f3edeb02" (UID: "403ecf58-a186-4cda-b9c0-3504f3edeb02"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:34:43 crc kubenswrapper[4558]: I0120 17:34:43.308553 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/403ecf58-a186-4cda-b9c0-3504f3edeb02-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "403ecf58-a186-4cda-b9c0-3504f3edeb02" (UID: "403ecf58-a186-4cda-b9c0-3504f3edeb02"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:34:43 crc kubenswrapper[4558]: I0120 17:34:43.323560 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/99b8519e-6e6f-4b03-bc0e-9b159ed53f94-combined-ca-bundle\") pod \"99b8519e-6e6f-4b03-bc0e-9b159ed53f94\" (UID: \"99b8519e-6e6f-4b03-bc0e-9b159ed53f94\") " Jan 20 17:34:43 crc kubenswrapper[4558]: I0120 17:34:43.323622 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4kbpw\" (UniqueName: \"kubernetes.io/projected/99b8519e-6e6f-4b03-bc0e-9b159ed53f94-kube-api-access-4kbpw\") pod \"99b8519e-6e6f-4b03-bc0e-9b159ed53f94\" (UID: \"99b8519e-6e6f-4b03-bc0e-9b159ed53f94\") " Jan 20 17:34:43 crc kubenswrapper[4558]: I0120 17:34:43.323653 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/99b8519e-6e6f-4b03-bc0e-9b159ed53f94-logs\") pod \"99b8519e-6e6f-4b03-bc0e-9b159ed53f94\" (UID: \"99b8519e-6e6f-4b03-bc0e-9b159ed53f94\") " Jan 20 17:34:43 crc kubenswrapper[4558]: I0120 17:34:43.323858 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/99b8519e-6e6f-4b03-bc0e-9b159ed53f94-config-data\") pod \"99b8519e-6e6f-4b03-bc0e-9b159ed53f94\" (UID: \"99b8519e-6e6f-4b03-bc0e-9b159ed53f94\") " Jan 20 17:34:43 crc kubenswrapper[4558]: I0120 17:34:43.323893 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/99b8519e-6e6f-4b03-bc0e-9b159ed53f94-nova-metadata-tls-certs\") pod \"99b8519e-6e6f-4b03-bc0e-9b159ed53f94\" (UID: \"99b8519e-6e6f-4b03-bc0e-9b159ed53f94\") " Jan 20 17:34:43 crc kubenswrapper[4558]: I0120 17:34:43.324376 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/403ecf58-a186-4cda-b9c0-3504f3edeb02-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:34:43 crc kubenswrapper[4558]: I0120 17:34:43.324398 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r2hzs\" (UniqueName: \"kubernetes.io/projected/403ecf58-a186-4cda-b9c0-3504f3edeb02-kube-api-access-r2hzs\") on node \"crc\" DevicePath \"\"" Jan 20 17:34:43 crc kubenswrapper[4558]: I0120 17:34:43.324411 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/403ecf58-a186-4cda-b9c0-3504f3edeb02-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:34:43 crc kubenswrapper[4558]: I0120 17:34:43.324425 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/403ecf58-a186-4cda-b9c0-3504f3edeb02-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:34:43 crc kubenswrapper[4558]: I0120 17:34:43.324434 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/403ecf58-a186-4cda-b9c0-3504f3edeb02-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:34:43 crc kubenswrapper[4558]: I0120 17:34:43.324443 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/403ecf58-a186-4cda-b9c0-3504f3edeb02-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:34:43 crc kubenswrapper[4558]: I0120 17:34:43.324691 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/99b8519e-6e6f-4b03-bc0e-9b159ed53f94-logs" (OuterVolumeSpecName: "logs") pod "99b8519e-6e6f-4b03-bc0e-9b159ed53f94" (UID: "99b8519e-6e6f-4b03-bc0e-9b159ed53f94"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:34:43 crc kubenswrapper[4558]: I0120 17:34:43.327269 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/99b8519e-6e6f-4b03-bc0e-9b159ed53f94-kube-api-access-4kbpw" (OuterVolumeSpecName: "kube-api-access-4kbpw") pod "99b8519e-6e6f-4b03-bc0e-9b159ed53f94" (UID: "99b8519e-6e6f-4b03-bc0e-9b159ed53f94"). InnerVolumeSpecName "kube-api-access-4kbpw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:34:43 crc kubenswrapper[4558]: I0120 17:34:43.347396 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/99b8519e-6e6f-4b03-bc0e-9b159ed53f94-config-data" (OuterVolumeSpecName: "config-data") pod "99b8519e-6e6f-4b03-bc0e-9b159ed53f94" (UID: "99b8519e-6e6f-4b03-bc0e-9b159ed53f94"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:34:43 crc kubenswrapper[4558]: E0120 17:34:43.357102 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="6d8e674d2d833aea6fdf17f48244d761ba7c0a3485ad6276685492e29412ecf5" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 20 17:34:43 crc kubenswrapper[4558]: E0120 17:34:43.359615 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="6d8e674d2d833aea6fdf17f48244d761ba7c0a3485ad6276685492e29412ecf5" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 20 17:34:43 crc kubenswrapper[4558]: E0120 17:34:43.361061 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="6d8e674d2d833aea6fdf17f48244d761ba7c0a3485ad6276685492e29412ecf5" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 20 17:34:43 crc kubenswrapper[4558]: E0120 17:34:43.361105 4558 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack-kuttl-tests/nova-scheduler-0" podUID="6a79b74c-56ea-47f2-aec7-8b63d0a54f3c" containerName="nova-scheduler-scheduler" Jan 20 17:34:43 crc kubenswrapper[4558]: I0120 17:34:43.361377 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/99b8519e-6e6f-4b03-bc0e-9b159ed53f94-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "99b8519e-6e6f-4b03-bc0e-9b159ed53f94" (UID: "99b8519e-6e6f-4b03-bc0e-9b159ed53f94"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:34:43 crc kubenswrapper[4558]: I0120 17:34:43.366992 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/99b8519e-6e6f-4b03-bc0e-9b159ed53f94-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "99b8519e-6e6f-4b03-bc0e-9b159ed53f94" (UID: "99b8519e-6e6f-4b03-bc0e-9b159ed53f94"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:34:43 crc kubenswrapper[4558]: I0120 17:34:43.426207 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/99b8519e-6e6f-4b03-bc0e-9b159ed53f94-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:34:43 crc kubenswrapper[4558]: I0120 17:34:43.426243 4558 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/99b8519e-6e6f-4b03-bc0e-9b159ed53f94-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:34:43 crc kubenswrapper[4558]: I0120 17:34:43.426257 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/99b8519e-6e6f-4b03-bc0e-9b159ed53f94-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:34:43 crc kubenswrapper[4558]: I0120 17:34:43.426270 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4kbpw\" (UniqueName: \"kubernetes.io/projected/99b8519e-6e6f-4b03-bc0e-9b159ed53f94-kube-api-access-4kbpw\") on node \"crc\" DevicePath \"\"" Jan 20 17:34:43 crc kubenswrapper[4558]: I0120 17:34:43.426281 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/99b8519e-6e6f-4b03-bc0e-9b159ed53f94-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:34:43 crc kubenswrapper[4558]: I0120 17:34:43.453569 4558 generic.go:334] "Generic (PLEG): container finished" podID="99b8519e-6e6f-4b03-bc0e-9b159ed53f94" containerID="bd6efedb4729482e0b53a3f4f56d4d7e625727ca7567b4ba0e01c49e0cbbf75f" exitCode=0 Jan 20 17:34:43 crc kubenswrapper[4558]: I0120 17:34:43.453647 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:34:43 crc kubenswrapper[4558]: I0120 17:34:43.453733 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"99b8519e-6e6f-4b03-bc0e-9b159ed53f94","Type":"ContainerDied","Data":"bd6efedb4729482e0b53a3f4f56d4d7e625727ca7567b4ba0e01c49e0cbbf75f"} Jan 20 17:34:43 crc kubenswrapper[4558]: I0120 17:34:43.453788 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"99b8519e-6e6f-4b03-bc0e-9b159ed53f94","Type":"ContainerDied","Data":"094e78246e2cf02fc340ead546fe5604db08f42b14dc3ebe3fc80018c629673b"} Jan 20 17:34:43 crc kubenswrapper[4558]: I0120 17:34:43.453822 4558 scope.go:117] "RemoveContainer" containerID="bd6efedb4729482e0b53a3f4f56d4d7e625727ca7567b4ba0e01c49e0cbbf75f" Jan 20 17:34:43 crc kubenswrapper[4558]: I0120 17:34:43.459770 4558 generic.go:334] "Generic (PLEG): container finished" podID="403ecf58-a186-4cda-b9c0-3504f3edeb02" containerID="808933550d924de3c7edc379c7a62583aecaf780d81bdd690f9ea9e7511961c0" exitCode=0 Jan 20 17:34:43 crc kubenswrapper[4558]: I0120 17:34:43.459842 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"403ecf58-a186-4cda-b9c0-3504f3edeb02","Type":"ContainerDied","Data":"808933550d924de3c7edc379c7a62583aecaf780d81bdd690f9ea9e7511961c0"} Jan 20 17:34:43 crc kubenswrapper[4558]: I0120 17:34:43.459892 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"403ecf58-a186-4cda-b9c0-3504f3edeb02","Type":"ContainerDied","Data":"1d86216a784830a97f30241fe62252e3ad3341a0743a0d2d7b8cb38df1498998"} Jan 20 17:34:43 crc kubenswrapper[4558]: I0120 17:34:43.459996 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:34:43 crc kubenswrapper[4558]: I0120 17:34:43.490246 4558 scope.go:117] "RemoveContainer" containerID="02e098431930837fb92268532afd86ec94107352fd27d8c1491004cdd3da1bdd" Jan 20 17:34:43 crc kubenswrapper[4558]: I0120 17:34:43.501318 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:34:43 crc kubenswrapper[4558]: I0120 17:34:43.519061 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:34:43 crc kubenswrapper[4558]: I0120 17:34:43.529107 4558 scope.go:117] "RemoveContainer" containerID="bd6efedb4729482e0b53a3f4f56d4d7e625727ca7567b4ba0e01c49e0cbbf75f" Jan 20 17:34:43 crc kubenswrapper[4558]: E0120 17:34:43.529530 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bd6efedb4729482e0b53a3f4f56d4d7e625727ca7567b4ba0e01c49e0cbbf75f\": container with ID starting with bd6efedb4729482e0b53a3f4f56d4d7e625727ca7567b4ba0e01c49e0cbbf75f not found: ID does not exist" containerID="bd6efedb4729482e0b53a3f4f56d4d7e625727ca7567b4ba0e01c49e0cbbf75f" Jan 20 17:34:43 crc kubenswrapper[4558]: I0120 17:34:43.529559 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bd6efedb4729482e0b53a3f4f56d4d7e625727ca7567b4ba0e01c49e0cbbf75f"} err="failed to get container status \"bd6efedb4729482e0b53a3f4f56d4d7e625727ca7567b4ba0e01c49e0cbbf75f\": rpc error: code = NotFound desc = could not find container \"bd6efedb4729482e0b53a3f4f56d4d7e625727ca7567b4ba0e01c49e0cbbf75f\": container with ID starting with bd6efedb4729482e0b53a3f4f56d4d7e625727ca7567b4ba0e01c49e0cbbf75f not found: ID does not exist" Jan 20 17:34:43 crc kubenswrapper[4558]: I0120 17:34:43.529580 4558 scope.go:117] "RemoveContainer" containerID="02e098431930837fb92268532afd86ec94107352fd27d8c1491004cdd3da1bdd" Jan 20 17:34:43 crc kubenswrapper[4558]: E0120 17:34:43.530509 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"02e098431930837fb92268532afd86ec94107352fd27d8c1491004cdd3da1bdd\": container with ID starting with 02e098431930837fb92268532afd86ec94107352fd27d8c1491004cdd3da1bdd not found: ID does not exist" containerID="02e098431930837fb92268532afd86ec94107352fd27d8c1491004cdd3da1bdd" Jan 20 17:34:43 crc kubenswrapper[4558]: I0120 17:34:43.530549 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"02e098431930837fb92268532afd86ec94107352fd27d8c1491004cdd3da1bdd"} err="failed to get container status \"02e098431930837fb92268532afd86ec94107352fd27d8c1491004cdd3da1bdd\": rpc error: code = NotFound desc = could not find container \"02e098431930837fb92268532afd86ec94107352fd27d8c1491004cdd3da1bdd\": container with ID starting with 02e098431930837fb92268532afd86ec94107352fd27d8c1491004cdd3da1bdd not found: ID does not exist" Jan 20 17:34:43 crc kubenswrapper[4558]: I0120 17:34:43.530578 4558 scope.go:117] "RemoveContainer" containerID="808933550d924de3c7edc379c7a62583aecaf780d81bdd690f9ea9e7511961c0" Jan 20 17:34:43 crc kubenswrapper[4558]: I0120 17:34:43.536562 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:34:43 crc kubenswrapper[4558]: I0120 17:34:43.541329 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:34:43 crc kubenswrapper[4558]: I0120 17:34:43.546838 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:34:43 crc kubenswrapper[4558]: E0120 17:34:43.547462 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="403ecf58-a186-4cda-b9c0-3504f3edeb02" containerName="nova-api-api" Jan 20 17:34:43 crc kubenswrapper[4558]: I0120 17:34:43.547482 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="403ecf58-a186-4cda-b9c0-3504f3edeb02" containerName="nova-api-api" Jan 20 17:34:43 crc kubenswrapper[4558]: E0120 17:34:43.547669 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="403ecf58-a186-4cda-b9c0-3504f3edeb02" containerName="nova-api-log" Jan 20 17:34:43 crc kubenswrapper[4558]: I0120 17:34:43.547684 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="403ecf58-a186-4cda-b9c0-3504f3edeb02" containerName="nova-api-log" Jan 20 17:34:43 crc kubenswrapper[4558]: E0120 17:34:43.547704 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e0e37c27-e17c-46c1-9e53-ce2a92c13f02" containerName="nova-manage" Jan 20 17:34:43 crc kubenswrapper[4558]: I0120 17:34:43.547713 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="e0e37c27-e17c-46c1-9e53-ce2a92c13f02" containerName="nova-manage" Jan 20 17:34:43 crc kubenswrapper[4558]: E0120 17:34:43.547754 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="99b8519e-6e6f-4b03-bc0e-9b159ed53f94" containerName="nova-metadata-log" Jan 20 17:34:43 crc kubenswrapper[4558]: I0120 17:34:43.547762 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="99b8519e-6e6f-4b03-bc0e-9b159ed53f94" containerName="nova-metadata-log" Jan 20 17:34:43 crc kubenswrapper[4558]: E0120 17:34:43.547776 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="99b8519e-6e6f-4b03-bc0e-9b159ed53f94" containerName="nova-metadata-metadata" Jan 20 17:34:43 crc kubenswrapper[4558]: I0120 17:34:43.547782 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="99b8519e-6e6f-4b03-bc0e-9b159ed53f94" containerName="nova-metadata-metadata" Jan 20 17:34:43 crc kubenswrapper[4558]: E0120 17:34:43.547790 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="97258279-69d3-4e8e-935a-b34e6a73be4e" containerName="registry-server" Jan 20 17:34:43 crc kubenswrapper[4558]: I0120 17:34:43.547796 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="97258279-69d3-4e8e-935a-b34e6a73be4e" containerName="registry-server" Jan 20 17:34:43 crc kubenswrapper[4558]: E0120 17:34:43.547806 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="97258279-69d3-4e8e-935a-b34e6a73be4e" containerName="extract-content" Jan 20 17:34:43 crc kubenswrapper[4558]: I0120 17:34:43.547841 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="97258279-69d3-4e8e-935a-b34e6a73be4e" containerName="extract-content" Jan 20 17:34:43 crc kubenswrapper[4558]: E0120 17:34:43.547859 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="97258279-69d3-4e8e-935a-b34e6a73be4e" containerName="extract-utilities" Jan 20 17:34:43 crc kubenswrapper[4558]: I0120 17:34:43.547865 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="97258279-69d3-4e8e-935a-b34e6a73be4e" containerName="extract-utilities" Jan 20 17:34:43 crc kubenswrapper[4558]: I0120 17:34:43.548109 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="99b8519e-6e6f-4b03-bc0e-9b159ed53f94" containerName="nova-metadata-log" Jan 20 17:34:43 crc kubenswrapper[4558]: I0120 17:34:43.548130 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="e0e37c27-e17c-46c1-9e53-ce2a92c13f02" containerName="nova-manage" Jan 20 17:34:43 crc kubenswrapper[4558]: I0120 17:34:43.548155 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="97258279-69d3-4e8e-935a-b34e6a73be4e" containerName="registry-server" Jan 20 17:34:43 crc kubenswrapper[4558]: I0120 17:34:43.548195 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="99b8519e-6e6f-4b03-bc0e-9b159ed53f94" containerName="nova-metadata-metadata" Jan 20 17:34:43 crc kubenswrapper[4558]: I0120 17:34:43.548206 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="403ecf58-a186-4cda-b9c0-3504f3edeb02" containerName="nova-api-api" Jan 20 17:34:43 crc kubenswrapper[4558]: I0120 17:34:43.548250 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="403ecf58-a186-4cda-b9c0-3504f3edeb02" containerName="nova-api-log" Jan 20 17:34:43 crc kubenswrapper[4558]: I0120 17:34:43.550305 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:34:43 crc kubenswrapper[4558]: I0120 17:34:43.552131 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-metadata-internal-svc" Jan 20 17:34:43 crc kubenswrapper[4558]: I0120 17:34:43.552357 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:34:43 crc kubenswrapper[4558]: I0120 17:34:43.552450 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-metadata-config-data" Jan 20 17:34:43 crc kubenswrapper[4558]: I0120 17:34:43.554078 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:34:43 crc kubenswrapper[4558]: I0120 17:34:43.557575 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-api-config-data" Jan 20 17:34:43 crc kubenswrapper[4558]: I0120 17:34:43.558097 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:34:43 crc kubenswrapper[4558]: I0120 17:34:43.558257 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-internal-svc" Jan 20 17:34:43 crc kubenswrapper[4558]: I0120 17:34:43.558273 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-public-svc" Jan 20 17:34:43 crc kubenswrapper[4558]: I0120 17:34:43.563273 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:34:43 crc kubenswrapper[4558]: I0120 17:34:43.584974 4558 scope.go:117] "RemoveContainer" containerID="1cf660a3fd74859ff963a723ee3b7beb2a4d2c7466eead735cedc8f92917bf47" Jan 20 17:34:43 crc kubenswrapper[4558]: I0120 17:34:43.611747 4558 scope.go:117] "RemoveContainer" containerID="808933550d924de3c7edc379c7a62583aecaf780d81bdd690f9ea9e7511961c0" Jan 20 17:34:43 crc kubenswrapper[4558]: E0120 17:34:43.612213 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"808933550d924de3c7edc379c7a62583aecaf780d81bdd690f9ea9e7511961c0\": container with ID starting with 808933550d924de3c7edc379c7a62583aecaf780d81bdd690f9ea9e7511961c0 not found: ID does not exist" containerID="808933550d924de3c7edc379c7a62583aecaf780d81bdd690f9ea9e7511961c0" Jan 20 17:34:43 crc kubenswrapper[4558]: I0120 17:34:43.612255 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"808933550d924de3c7edc379c7a62583aecaf780d81bdd690f9ea9e7511961c0"} err="failed to get container status \"808933550d924de3c7edc379c7a62583aecaf780d81bdd690f9ea9e7511961c0\": rpc error: code = NotFound desc = could not find container \"808933550d924de3c7edc379c7a62583aecaf780d81bdd690f9ea9e7511961c0\": container with ID starting with 808933550d924de3c7edc379c7a62583aecaf780d81bdd690f9ea9e7511961c0 not found: ID does not exist" Jan 20 17:34:43 crc kubenswrapper[4558]: I0120 17:34:43.612278 4558 scope.go:117] "RemoveContainer" containerID="1cf660a3fd74859ff963a723ee3b7beb2a4d2c7466eead735cedc8f92917bf47" Jan 20 17:34:43 crc kubenswrapper[4558]: E0120 17:34:43.612745 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1cf660a3fd74859ff963a723ee3b7beb2a4d2c7466eead735cedc8f92917bf47\": container with ID starting with 1cf660a3fd74859ff963a723ee3b7beb2a4d2c7466eead735cedc8f92917bf47 not found: ID does not exist" containerID="1cf660a3fd74859ff963a723ee3b7beb2a4d2c7466eead735cedc8f92917bf47" Jan 20 17:34:43 crc kubenswrapper[4558]: I0120 17:34:43.612771 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1cf660a3fd74859ff963a723ee3b7beb2a4d2c7466eead735cedc8f92917bf47"} err="failed to get container status \"1cf660a3fd74859ff963a723ee3b7beb2a4d2c7466eead735cedc8f92917bf47\": rpc error: code = NotFound desc = could not find container \"1cf660a3fd74859ff963a723ee3b7beb2a4d2c7466eead735cedc8f92917bf47\": container with ID starting with 1cf660a3fd74859ff963a723ee3b7beb2a4d2c7466eead735cedc8f92917bf47 not found: ID does not exist" Jan 20 17:34:43 crc kubenswrapper[4558]: I0120 17:34:43.731388 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tff9b\" (UniqueName: \"kubernetes.io/projected/872e7858-d089-4bb5-89aa-060646e32754-kube-api-access-tff9b\") pod \"nova-api-0\" (UID: \"872e7858-d089-4bb5-89aa-060646e32754\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:34:43 crc kubenswrapper[4558]: I0120 17:34:43.731619 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-flg88\" (UniqueName: \"kubernetes.io/projected/a0122392-1c61-4133-b05f-cc6a622abaf9-kube-api-access-flg88\") pod \"nova-metadata-0\" (UID: \"a0122392-1c61-4133-b05f-cc6a622abaf9\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:34:43 crc kubenswrapper[4558]: I0120 17:34:43.731669 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/872e7858-d089-4bb5-89aa-060646e32754-public-tls-certs\") pod \"nova-api-0\" (UID: \"872e7858-d089-4bb5-89aa-060646e32754\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:34:43 crc kubenswrapper[4558]: I0120 17:34:43.731707 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/a0122392-1c61-4133-b05f-cc6a622abaf9-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"a0122392-1c61-4133-b05f-cc6a622abaf9\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:34:43 crc kubenswrapper[4558]: I0120 17:34:43.731796 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a0122392-1c61-4133-b05f-cc6a622abaf9-logs\") pod \"nova-metadata-0\" (UID: \"a0122392-1c61-4133-b05f-cc6a622abaf9\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:34:43 crc kubenswrapper[4558]: I0120 17:34:43.731843 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0122392-1c61-4133-b05f-cc6a622abaf9-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"a0122392-1c61-4133-b05f-cc6a622abaf9\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:34:43 crc kubenswrapper[4558]: I0120 17:34:43.731884 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a0122392-1c61-4133-b05f-cc6a622abaf9-config-data\") pod \"nova-metadata-0\" (UID: \"a0122392-1c61-4133-b05f-cc6a622abaf9\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:34:43 crc kubenswrapper[4558]: I0120 17:34:43.731994 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/872e7858-d089-4bb5-89aa-060646e32754-config-data\") pod \"nova-api-0\" (UID: \"872e7858-d089-4bb5-89aa-060646e32754\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:34:43 crc kubenswrapper[4558]: I0120 17:34:43.732063 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/872e7858-d089-4bb5-89aa-060646e32754-logs\") pod \"nova-api-0\" (UID: \"872e7858-d089-4bb5-89aa-060646e32754\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:34:43 crc kubenswrapper[4558]: I0120 17:34:43.732119 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/872e7858-d089-4bb5-89aa-060646e32754-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"872e7858-d089-4bb5-89aa-060646e32754\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:34:43 crc kubenswrapper[4558]: I0120 17:34:43.732335 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/872e7858-d089-4bb5-89aa-060646e32754-internal-tls-certs\") pod \"nova-api-0\" (UID: \"872e7858-d089-4bb5-89aa-060646e32754\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:34:43 crc kubenswrapper[4558]: I0120 17:34:43.835087 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tff9b\" (UniqueName: \"kubernetes.io/projected/872e7858-d089-4bb5-89aa-060646e32754-kube-api-access-tff9b\") pod \"nova-api-0\" (UID: \"872e7858-d089-4bb5-89aa-060646e32754\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:34:43 crc kubenswrapper[4558]: I0120 17:34:43.835555 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-flg88\" (UniqueName: \"kubernetes.io/projected/a0122392-1c61-4133-b05f-cc6a622abaf9-kube-api-access-flg88\") pod \"nova-metadata-0\" (UID: \"a0122392-1c61-4133-b05f-cc6a622abaf9\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:34:43 crc kubenswrapper[4558]: I0120 17:34:43.835594 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/872e7858-d089-4bb5-89aa-060646e32754-public-tls-certs\") pod \"nova-api-0\" (UID: \"872e7858-d089-4bb5-89aa-060646e32754\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:34:43 crc kubenswrapper[4558]: I0120 17:34:43.835623 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/a0122392-1c61-4133-b05f-cc6a622abaf9-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"a0122392-1c61-4133-b05f-cc6a622abaf9\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:34:43 crc kubenswrapper[4558]: I0120 17:34:43.835666 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a0122392-1c61-4133-b05f-cc6a622abaf9-logs\") pod \"nova-metadata-0\" (UID: \"a0122392-1c61-4133-b05f-cc6a622abaf9\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:34:43 crc kubenswrapper[4558]: I0120 17:34:43.835692 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0122392-1c61-4133-b05f-cc6a622abaf9-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"a0122392-1c61-4133-b05f-cc6a622abaf9\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:34:43 crc kubenswrapper[4558]: I0120 17:34:43.835726 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a0122392-1c61-4133-b05f-cc6a622abaf9-config-data\") pod \"nova-metadata-0\" (UID: \"a0122392-1c61-4133-b05f-cc6a622abaf9\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:34:43 crc kubenswrapper[4558]: I0120 17:34:43.835774 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/872e7858-d089-4bb5-89aa-060646e32754-config-data\") pod \"nova-api-0\" (UID: \"872e7858-d089-4bb5-89aa-060646e32754\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:34:43 crc kubenswrapper[4558]: I0120 17:34:43.835819 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/872e7858-d089-4bb5-89aa-060646e32754-logs\") pod \"nova-api-0\" (UID: \"872e7858-d089-4bb5-89aa-060646e32754\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:34:43 crc kubenswrapper[4558]: I0120 17:34:43.835850 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/872e7858-d089-4bb5-89aa-060646e32754-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"872e7858-d089-4bb5-89aa-060646e32754\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:34:43 crc kubenswrapper[4558]: I0120 17:34:43.835896 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/872e7858-d089-4bb5-89aa-060646e32754-internal-tls-certs\") pod \"nova-api-0\" (UID: \"872e7858-d089-4bb5-89aa-060646e32754\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:34:43 crc kubenswrapper[4558]: I0120 17:34:43.837019 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/872e7858-d089-4bb5-89aa-060646e32754-logs\") pod \"nova-api-0\" (UID: \"872e7858-d089-4bb5-89aa-060646e32754\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:34:43 crc kubenswrapper[4558]: I0120 17:34:43.838343 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a0122392-1c61-4133-b05f-cc6a622abaf9-logs\") pod \"nova-metadata-0\" (UID: \"a0122392-1c61-4133-b05f-cc6a622abaf9\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:34:43 crc kubenswrapper[4558]: I0120 17:34:43.843093 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/872e7858-d089-4bb5-89aa-060646e32754-internal-tls-certs\") pod \"nova-api-0\" (UID: \"872e7858-d089-4bb5-89aa-060646e32754\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:34:43 crc kubenswrapper[4558]: I0120 17:34:43.843295 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/872e7858-d089-4bb5-89aa-060646e32754-public-tls-certs\") pod \"nova-api-0\" (UID: \"872e7858-d089-4bb5-89aa-060646e32754\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:34:43 crc kubenswrapper[4558]: I0120 17:34:43.843554 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0122392-1c61-4133-b05f-cc6a622abaf9-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"a0122392-1c61-4133-b05f-cc6a622abaf9\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:34:43 crc kubenswrapper[4558]: I0120 17:34:43.845762 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/872e7858-d089-4bb5-89aa-060646e32754-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"872e7858-d089-4bb5-89aa-060646e32754\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:34:43 crc kubenswrapper[4558]: I0120 17:34:43.847681 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/a0122392-1c61-4133-b05f-cc6a622abaf9-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"a0122392-1c61-4133-b05f-cc6a622abaf9\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:34:43 crc kubenswrapper[4558]: I0120 17:34:43.848248 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/872e7858-d089-4bb5-89aa-060646e32754-config-data\") pod \"nova-api-0\" (UID: \"872e7858-d089-4bb5-89aa-060646e32754\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:34:43 crc kubenswrapper[4558]: I0120 17:34:43.852912 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-flg88\" (UniqueName: \"kubernetes.io/projected/a0122392-1c61-4133-b05f-cc6a622abaf9-kube-api-access-flg88\") pod \"nova-metadata-0\" (UID: \"a0122392-1c61-4133-b05f-cc6a622abaf9\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:34:43 crc kubenswrapper[4558]: I0120 17:34:43.853528 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tff9b\" (UniqueName: \"kubernetes.io/projected/872e7858-d089-4bb5-89aa-060646e32754-kube-api-access-tff9b\") pod \"nova-api-0\" (UID: \"872e7858-d089-4bb5-89aa-060646e32754\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:34:43 crc kubenswrapper[4558]: I0120 17:34:43.854975 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a0122392-1c61-4133-b05f-cc6a622abaf9-config-data\") pod \"nova-metadata-0\" (UID: \"a0122392-1c61-4133-b05f-cc6a622abaf9\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:34:43 crc kubenswrapper[4558]: I0120 17:34:43.874072 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:34:43 crc kubenswrapper[4558]: I0120 17:34:43.890977 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:34:44 crc kubenswrapper[4558]: I0120 17:34:44.323595 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:34:44 crc kubenswrapper[4558]: I0120 17:34:44.375182 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:34:44 crc kubenswrapper[4558]: W0120 17:34:44.379438 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod872e7858_d089_4bb5_89aa_060646e32754.slice/crio-d58e3b71d614150c0a7b3cd7ba45e6d20229691cd7dd6c2cc8f72d53384bec24 WatchSource:0}: Error finding container d58e3b71d614150c0a7b3cd7ba45e6d20229691cd7dd6c2cc8f72d53384bec24: Status 404 returned error can't find the container with id d58e3b71d614150c0a7b3cd7ba45e6d20229691cd7dd6c2cc8f72d53384bec24 Jan 20 17:34:44 crc kubenswrapper[4558]: I0120 17:34:44.472060 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"a0122392-1c61-4133-b05f-cc6a622abaf9","Type":"ContainerStarted","Data":"3cc11aee3513e5dcd611c51793684a73d892722ce3a04656b00dd8dbb31df8c7"} Jan 20 17:34:44 crc kubenswrapper[4558]: I0120 17:34:44.474712 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"872e7858-d089-4bb5-89aa-060646e32754","Type":"ContainerStarted","Data":"d58e3b71d614150c0a7b3cd7ba45e6d20229691cd7dd6c2cc8f72d53384bec24"} Jan 20 17:34:44 crc kubenswrapper[4558]: I0120 17:34:44.579864 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="403ecf58-a186-4cda-b9c0-3504f3edeb02" path="/var/lib/kubelet/pods/403ecf58-a186-4cda-b9c0-3504f3edeb02/volumes" Jan 20 17:34:44 crc kubenswrapper[4558]: I0120 17:34:44.581116 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="99b8519e-6e6f-4b03-bc0e-9b159ed53f94" path="/var/lib/kubelet/pods/99b8519e-6e6f-4b03-bc0e-9b159ed53f94/volumes" Jan 20 17:34:44 crc kubenswrapper[4558]: I0120 17:34:44.871003 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:34:45 crc kubenswrapper[4558]: I0120 17:34:45.063436 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ktzpc\" (UniqueName: \"kubernetes.io/projected/6a79b74c-56ea-47f2-aec7-8b63d0a54f3c-kube-api-access-ktzpc\") pod \"6a79b74c-56ea-47f2-aec7-8b63d0a54f3c\" (UID: \"6a79b74c-56ea-47f2-aec7-8b63d0a54f3c\") " Jan 20 17:34:45 crc kubenswrapper[4558]: I0120 17:34:45.064067 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6a79b74c-56ea-47f2-aec7-8b63d0a54f3c-combined-ca-bundle\") pod \"6a79b74c-56ea-47f2-aec7-8b63d0a54f3c\" (UID: \"6a79b74c-56ea-47f2-aec7-8b63d0a54f3c\") " Jan 20 17:34:45 crc kubenswrapper[4558]: I0120 17:34:45.064207 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6a79b74c-56ea-47f2-aec7-8b63d0a54f3c-config-data\") pod \"6a79b74c-56ea-47f2-aec7-8b63d0a54f3c\" (UID: \"6a79b74c-56ea-47f2-aec7-8b63d0a54f3c\") " Jan 20 17:34:45 crc kubenswrapper[4558]: I0120 17:34:45.070515 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6a79b74c-56ea-47f2-aec7-8b63d0a54f3c-kube-api-access-ktzpc" (OuterVolumeSpecName: "kube-api-access-ktzpc") pod "6a79b74c-56ea-47f2-aec7-8b63d0a54f3c" (UID: "6a79b74c-56ea-47f2-aec7-8b63d0a54f3c"). InnerVolumeSpecName "kube-api-access-ktzpc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:34:45 crc kubenswrapper[4558]: I0120 17:34:45.093266 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6a79b74c-56ea-47f2-aec7-8b63d0a54f3c-config-data" (OuterVolumeSpecName: "config-data") pod "6a79b74c-56ea-47f2-aec7-8b63d0a54f3c" (UID: "6a79b74c-56ea-47f2-aec7-8b63d0a54f3c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:34:45 crc kubenswrapper[4558]: I0120 17:34:45.095949 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6a79b74c-56ea-47f2-aec7-8b63d0a54f3c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6a79b74c-56ea-47f2-aec7-8b63d0a54f3c" (UID: "6a79b74c-56ea-47f2-aec7-8b63d0a54f3c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:34:45 crc kubenswrapper[4558]: I0120 17:34:45.167493 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6a79b74c-56ea-47f2-aec7-8b63d0a54f3c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:34:45 crc kubenswrapper[4558]: I0120 17:34:45.167525 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6a79b74c-56ea-47f2-aec7-8b63d0a54f3c-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:34:45 crc kubenswrapper[4558]: I0120 17:34:45.167540 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ktzpc\" (UniqueName: \"kubernetes.io/projected/6a79b74c-56ea-47f2-aec7-8b63d0a54f3c-kube-api-access-ktzpc\") on node \"crc\" DevicePath \"\"" Jan 20 17:34:45 crc kubenswrapper[4558]: I0120 17:34:45.490343 4558 generic.go:334] "Generic (PLEG): container finished" podID="6a79b74c-56ea-47f2-aec7-8b63d0a54f3c" containerID="6d8e674d2d833aea6fdf17f48244d761ba7c0a3485ad6276685492e29412ecf5" exitCode=0 Jan 20 17:34:45 crc kubenswrapper[4558]: I0120 17:34:45.490425 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:34:45 crc kubenswrapper[4558]: I0120 17:34:45.490464 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"6a79b74c-56ea-47f2-aec7-8b63d0a54f3c","Type":"ContainerDied","Data":"6d8e674d2d833aea6fdf17f48244d761ba7c0a3485ad6276685492e29412ecf5"} Jan 20 17:34:45 crc kubenswrapper[4558]: I0120 17:34:45.490546 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"6a79b74c-56ea-47f2-aec7-8b63d0a54f3c","Type":"ContainerDied","Data":"d81683bc405cd6eb7d7aad1e014d9b09d9fac18cd52755985f94c217aa9a511d"} Jan 20 17:34:45 crc kubenswrapper[4558]: I0120 17:34:45.490580 4558 scope.go:117] "RemoveContainer" containerID="6d8e674d2d833aea6fdf17f48244d761ba7c0a3485ad6276685492e29412ecf5" Jan 20 17:34:45 crc kubenswrapper[4558]: I0120 17:34:45.493954 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"a0122392-1c61-4133-b05f-cc6a622abaf9","Type":"ContainerStarted","Data":"c5868b91333e3f1c6356506d56295db65bbb6580ddcdb5dfaafeb5e8213164a5"} Jan 20 17:34:45 crc kubenswrapper[4558]: I0120 17:34:45.494663 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"a0122392-1c61-4133-b05f-cc6a622abaf9","Type":"ContainerStarted","Data":"d73d4daa0fc88cb61145408b21e4b824ef93c1b31996aa4f8630de0f2161615b"} Jan 20 17:34:45 crc kubenswrapper[4558]: I0120 17:34:45.511704 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"872e7858-d089-4bb5-89aa-060646e32754","Type":"ContainerStarted","Data":"52ee2a447d2cae531422324e0f180c5e03fd7fe612731ae435103a62b66fa686"} Jan 20 17:34:45 crc kubenswrapper[4558]: I0120 17:34:45.511857 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"872e7858-d089-4bb5-89aa-060646e32754","Type":"ContainerStarted","Data":"9dea7e4de1f3490846ec2c17c47d30837c43228f400cb136eee18c9970b72bb6"} Jan 20 17:34:45 crc kubenswrapper[4558]: I0120 17:34:45.542302 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-metadata-0" podStartSLOduration=2.54227501 podStartE2EDuration="2.54227501s" podCreationTimestamp="2026-01-20 17:34:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:34:45.520714209 +0000 UTC m=+3179.281052177" watchObservedRunningTime="2026-01-20 17:34:45.54227501 +0000 UTC m=+3179.302612978" Jan 20 17:34:45 crc kubenswrapper[4558]: I0120 17:34:45.542357 4558 scope.go:117] "RemoveContainer" containerID="6d8e674d2d833aea6fdf17f48244d761ba7c0a3485ad6276685492e29412ecf5" Jan 20 17:34:45 crc kubenswrapper[4558]: E0120 17:34:45.542871 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6d8e674d2d833aea6fdf17f48244d761ba7c0a3485ad6276685492e29412ecf5\": container with ID starting with 6d8e674d2d833aea6fdf17f48244d761ba7c0a3485ad6276685492e29412ecf5 not found: ID does not exist" containerID="6d8e674d2d833aea6fdf17f48244d761ba7c0a3485ad6276685492e29412ecf5" Jan 20 17:34:45 crc kubenswrapper[4558]: I0120 17:34:45.542927 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6d8e674d2d833aea6fdf17f48244d761ba7c0a3485ad6276685492e29412ecf5"} err="failed to get container status \"6d8e674d2d833aea6fdf17f48244d761ba7c0a3485ad6276685492e29412ecf5\": rpc error: code = NotFound desc = could not find container \"6d8e674d2d833aea6fdf17f48244d761ba7c0a3485ad6276685492e29412ecf5\": container with ID starting with 6d8e674d2d833aea6fdf17f48244d761ba7c0a3485ad6276685492e29412ecf5 not found: ID does not exist" Jan 20 17:34:45 crc kubenswrapper[4558]: I0120 17:34:45.563918 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:34:45 crc kubenswrapper[4558]: I0120 17:34:45.574148 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:34:45 crc kubenswrapper[4558]: I0120 17:34:45.579890 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:34:45 crc kubenswrapper[4558]: E0120 17:34:45.580589 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6a79b74c-56ea-47f2-aec7-8b63d0a54f3c" containerName="nova-scheduler-scheduler" Jan 20 17:34:45 crc kubenswrapper[4558]: I0120 17:34:45.580614 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="6a79b74c-56ea-47f2-aec7-8b63d0a54f3c" containerName="nova-scheduler-scheduler" Jan 20 17:34:45 crc kubenswrapper[4558]: I0120 17:34:45.580870 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="6a79b74c-56ea-47f2-aec7-8b63d0a54f3c" containerName="nova-scheduler-scheduler" Jan 20 17:34:45 crc kubenswrapper[4558]: I0120 17:34:45.581669 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:34:45 crc kubenswrapper[4558]: I0120 17:34:45.583888 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-scheduler-config-data" Jan 20 17:34:45 crc kubenswrapper[4558]: I0120 17:34:45.585122 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:34:45 crc kubenswrapper[4558]: I0120 17:34:45.589156 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-api-0" podStartSLOduration=2.589138312 podStartE2EDuration="2.589138312s" podCreationTimestamp="2026-01-20 17:34:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:34:45.55611685 +0000 UTC m=+3179.316454816" watchObservedRunningTime="2026-01-20 17:34:45.589138312 +0000 UTC m=+3179.349476279" Jan 20 17:34:45 crc kubenswrapper[4558]: I0120 17:34:45.678617 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/833337d4-94d4-4bf6-9d0b-5791b9cc115e-config-data\") pod \"nova-scheduler-0\" (UID: \"833337d4-94d4-4bf6-9d0b-5791b9cc115e\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:34:45 crc kubenswrapper[4558]: I0120 17:34:45.678973 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/833337d4-94d4-4bf6-9d0b-5791b9cc115e-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"833337d4-94d4-4bf6-9d0b-5791b9cc115e\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:34:45 crc kubenswrapper[4558]: I0120 17:34:45.679145 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-58rts\" (UniqueName: \"kubernetes.io/projected/833337d4-94d4-4bf6-9d0b-5791b9cc115e-kube-api-access-58rts\") pod \"nova-scheduler-0\" (UID: \"833337d4-94d4-4bf6-9d0b-5791b9cc115e\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:34:45 crc kubenswrapper[4558]: I0120 17:34:45.780508 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/833337d4-94d4-4bf6-9d0b-5791b9cc115e-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"833337d4-94d4-4bf6-9d0b-5791b9cc115e\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:34:45 crc kubenswrapper[4558]: I0120 17:34:45.780570 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-58rts\" (UniqueName: \"kubernetes.io/projected/833337d4-94d4-4bf6-9d0b-5791b9cc115e-kube-api-access-58rts\") pod \"nova-scheduler-0\" (UID: \"833337d4-94d4-4bf6-9d0b-5791b9cc115e\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:34:45 crc kubenswrapper[4558]: I0120 17:34:45.780652 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/833337d4-94d4-4bf6-9d0b-5791b9cc115e-config-data\") pod \"nova-scheduler-0\" (UID: \"833337d4-94d4-4bf6-9d0b-5791b9cc115e\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:34:45 crc kubenswrapper[4558]: I0120 17:34:45.785548 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/833337d4-94d4-4bf6-9d0b-5791b9cc115e-config-data\") pod \"nova-scheduler-0\" (UID: \"833337d4-94d4-4bf6-9d0b-5791b9cc115e\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:34:45 crc kubenswrapper[4558]: I0120 17:34:45.786934 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/833337d4-94d4-4bf6-9d0b-5791b9cc115e-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"833337d4-94d4-4bf6-9d0b-5791b9cc115e\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:34:45 crc kubenswrapper[4558]: I0120 17:34:45.797717 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-58rts\" (UniqueName: \"kubernetes.io/projected/833337d4-94d4-4bf6-9d0b-5791b9cc115e-kube-api-access-58rts\") pod \"nova-scheduler-0\" (UID: \"833337d4-94d4-4bf6-9d0b-5791b9cc115e\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:34:45 crc kubenswrapper[4558]: I0120 17:34:45.900247 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:34:46 crc kubenswrapper[4558]: I0120 17:34:46.312643 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:34:46 crc kubenswrapper[4558]: W0120 17:34:46.320548 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod833337d4_94d4_4bf6_9d0b_5791b9cc115e.slice/crio-f422b0d2120eb762e76d7887687df612b99aa8a3f0d1ecb29f853d086c2939d7 WatchSource:0}: Error finding container f422b0d2120eb762e76d7887687df612b99aa8a3f0d1ecb29f853d086c2939d7: Status 404 returned error can't find the container with id f422b0d2120eb762e76d7887687df612b99aa8a3f0d1ecb29f853d086c2939d7 Jan 20 17:34:46 crc kubenswrapper[4558]: I0120 17:34:46.524238 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"833337d4-94d4-4bf6-9d0b-5791b9cc115e","Type":"ContainerStarted","Data":"b138c24e8875ddd5251012c3048e6574ab595b0a17f136458768b1a33c6227cb"} Jan 20 17:34:46 crc kubenswrapper[4558]: I0120 17:34:46.524469 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"833337d4-94d4-4bf6-9d0b-5791b9cc115e","Type":"ContainerStarted","Data":"f422b0d2120eb762e76d7887687df612b99aa8a3f0d1ecb29f853d086c2939d7"} Jan 20 17:34:46 crc kubenswrapper[4558]: I0120 17:34:46.543229 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-scheduler-0" podStartSLOduration=1.5432104070000001 podStartE2EDuration="1.543210407s" podCreationTimestamp="2026-01-20 17:34:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:34:46.536477987 +0000 UTC m=+3180.296815954" watchObservedRunningTime="2026-01-20 17:34:46.543210407 +0000 UTC m=+3180.303548374" Jan 20 17:34:46 crc kubenswrapper[4558]: I0120 17:34:46.577225 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6a79b74c-56ea-47f2-aec7-8b63d0a54f3c" path="/var/lib/kubelet/pods/6a79b74c-56ea-47f2-aec7-8b63d0a54f3c/volumes" Jan 20 17:34:48 crc kubenswrapper[4558]: I0120 17:34:48.875191 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:34:48 crc kubenswrapper[4558]: I0120 17:34:48.875836 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:34:50 crc kubenswrapper[4558]: I0120 17:34:50.900819 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:34:50 crc kubenswrapper[4558]: I0120 17:34:50.935394 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-qgrgz"] Jan 20 17:34:50 crc kubenswrapper[4558]: I0120 17:34:50.937651 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qgrgz" Jan 20 17:34:50 crc kubenswrapper[4558]: I0120 17:34:50.947551 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-qgrgz"] Jan 20 17:34:50 crc kubenswrapper[4558]: I0120 17:34:50.989730 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6c5847de-e066-4253-90e7-92d49e25c89d-catalog-content\") pod \"redhat-marketplace-qgrgz\" (UID: \"6c5847de-e066-4253-90e7-92d49e25c89d\") " pod="openshift-marketplace/redhat-marketplace-qgrgz" Jan 20 17:34:50 crc kubenswrapper[4558]: I0120 17:34:50.989778 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6c5847de-e066-4253-90e7-92d49e25c89d-utilities\") pod \"redhat-marketplace-qgrgz\" (UID: \"6c5847de-e066-4253-90e7-92d49e25c89d\") " pod="openshift-marketplace/redhat-marketplace-qgrgz" Jan 20 17:34:50 crc kubenswrapper[4558]: I0120 17:34:50.990025 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2zwrv\" (UniqueName: \"kubernetes.io/projected/6c5847de-e066-4253-90e7-92d49e25c89d-kube-api-access-2zwrv\") pod \"redhat-marketplace-qgrgz\" (UID: \"6c5847de-e066-4253-90e7-92d49e25c89d\") " pod="openshift-marketplace/redhat-marketplace-qgrgz" Jan 20 17:34:51 crc kubenswrapper[4558]: I0120 17:34:51.092439 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2zwrv\" (UniqueName: \"kubernetes.io/projected/6c5847de-e066-4253-90e7-92d49e25c89d-kube-api-access-2zwrv\") pod \"redhat-marketplace-qgrgz\" (UID: \"6c5847de-e066-4253-90e7-92d49e25c89d\") " pod="openshift-marketplace/redhat-marketplace-qgrgz" Jan 20 17:34:51 crc kubenswrapper[4558]: I0120 17:34:51.092538 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6c5847de-e066-4253-90e7-92d49e25c89d-catalog-content\") pod \"redhat-marketplace-qgrgz\" (UID: \"6c5847de-e066-4253-90e7-92d49e25c89d\") " pod="openshift-marketplace/redhat-marketplace-qgrgz" Jan 20 17:34:51 crc kubenswrapper[4558]: I0120 17:34:51.092567 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6c5847de-e066-4253-90e7-92d49e25c89d-utilities\") pod \"redhat-marketplace-qgrgz\" (UID: \"6c5847de-e066-4253-90e7-92d49e25c89d\") " pod="openshift-marketplace/redhat-marketplace-qgrgz" Jan 20 17:34:51 crc kubenswrapper[4558]: I0120 17:34:51.093115 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6c5847de-e066-4253-90e7-92d49e25c89d-catalog-content\") pod \"redhat-marketplace-qgrgz\" (UID: \"6c5847de-e066-4253-90e7-92d49e25c89d\") " pod="openshift-marketplace/redhat-marketplace-qgrgz" Jan 20 17:34:51 crc kubenswrapper[4558]: I0120 17:34:51.093133 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6c5847de-e066-4253-90e7-92d49e25c89d-utilities\") pod \"redhat-marketplace-qgrgz\" (UID: \"6c5847de-e066-4253-90e7-92d49e25c89d\") " pod="openshift-marketplace/redhat-marketplace-qgrgz" Jan 20 17:34:51 crc kubenswrapper[4558]: I0120 17:34:51.112979 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2zwrv\" (UniqueName: \"kubernetes.io/projected/6c5847de-e066-4253-90e7-92d49e25c89d-kube-api-access-2zwrv\") pod \"redhat-marketplace-qgrgz\" (UID: \"6c5847de-e066-4253-90e7-92d49e25c89d\") " pod="openshift-marketplace/redhat-marketplace-qgrgz" Jan 20 17:34:51 crc kubenswrapper[4558]: I0120 17:34:51.255219 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qgrgz" Jan 20 17:34:51 crc kubenswrapper[4558]: I0120 17:34:51.682284 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-qgrgz"] Jan 20 17:34:51 crc kubenswrapper[4558]: W0120 17:34:51.689912 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6c5847de_e066_4253_90e7_92d49e25c89d.slice/crio-846082c5fbdd406a41fd06ed774b39a67b89fb41d6c650fc6b2d96bb17da964a WatchSource:0}: Error finding container 846082c5fbdd406a41fd06ed774b39a67b89fb41d6c650fc6b2d96bb17da964a: Status 404 returned error can't find the container with id 846082c5fbdd406a41fd06ed774b39a67b89fb41d6c650fc6b2d96bb17da964a Jan 20 17:34:52 crc kubenswrapper[4558]: I0120 17:34:52.584242 4558 generic.go:334] "Generic (PLEG): container finished" podID="6c5847de-e066-4253-90e7-92d49e25c89d" containerID="aa606070426560a776edfb39f7c84c805e33ec7b742a3c3f51e8bcef25e4e8b2" exitCode=0 Jan 20 17:34:52 crc kubenswrapper[4558]: I0120 17:34:52.584482 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qgrgz" event={"ID":"6c5847de-e066-4253-90e7-92d49e25c89d","Type":"ContainerDied","Data":"aa606070426560a776edfb39f7c84c805e33ec7b742a3c3f51e8bcef25e4e8b2"} Jan 20 17:34:52 crc kubenswrapper[4558]: I0120 17:34:52.584505 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qgrgz" event={"ID":"6c5847de-e066-4253-90e7-92d49e25c89d","Type":"ContainerStarted","Data":"846082c5fbdd406a41fd06ed774b39a67b89fb41d6c650fc6b2d96bb17da964a"} Jan 20 17:34:53 crc kubenswrapper[4558]: I0120 17:34:53.340036 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-56fcp"] Jan 20 17:34:53 crc kubenswrapper[4558]: I0120 17:34:53.342347 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-56fcp" Jan 20 17:34:53 crc kubenswrapper[4558]: I0120 17:34:53.356528 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-56fcp"] Jan 20 17:34:53 crc kubenswrapper[4558]: I0120 17:34:53.539327 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7a80871b-0d64-437f-993f-1aaade8dc094-catalog-content\") pod \"certified-operators-56fcp\" (UID: \"7a80871b-0d64-437f-993f-1aaade8dc094\") " pod="openshift-marketplace/certified-operators-56fcp" Jan 20 17:34:53 crc kubenswrapper[4558]: I0120 17:34:53.539388 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7a80871b-0d64-437f-993f-1aaade8dc094-utilities\") pod \"certified-operators-56fcp\" (UID: \"7a80871b-0d64-437f-993f-1aaade8dc094\") " pod="openshift-marketplace/certified-operators-56fcp" Jan 20 17:34:53 crc kubenswrapper[4558]: I0120 17:34:53.539728 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tkrbn\" (UniqueName: \"kubernetes.io/projected/7a80871b-0d64-437f-993f-1aaade8dc094-kube-api-access-tkrbn\") pod \"certified-operators-56fcp\" (UID: \"7a80871b-0d64-437f-993f-1aaade8dc094\") " pod="openshift-marketplace/certified-operators-56fcp" Jan 20 17:34:53 crc kubenswrapper[4558]: I0120 17:34:53.595424 4558 generic.go:334] "Generic (PLEG): container finished" podID="6c5847de-e066-4253-90e7-92d49e25c89d" containerID="db80031b1c09d9c8183f7fcad63ea7eb684b3691f0b802afecc65c10680ec89a" exitCode=0 Jan 20 17:34:53 crc kubenswrapper[4558]: I0120 17:34:53.595475 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qgrgz" event={"ID":"6c5847de-e066-4253-90e7-92d49e25c89d","Type":"ContainerDied","Data":"db80031b1c09d9c8183f7fcad63ea7eb684b3691f0b802afecc65c10680ec89a"} Jan 20 17:34:53 crc kubenswrapper[4558]: I0120 17:34:53.641824 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7a80871b-0d64-437f-993f-1aaade8dc094-catalog-content\") pod \"certified-operators-56fcp\" (UID: \"7a80871b-0d64-437f-993f-1aaade8dc094\") " pod="openshift-marketplace/certified-operators-56fcp" Jan 20 17:34:53 crc kubenswrapper[4558]: I0120 17:34:53.641899 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7a80871b-0d64-437f-993f-1aaade8dc094-utilities\") pod \"certified-operators-56fcp\" (UID: \"7a80871b-0d64-437f-993f-1aaade8dc094\") " pod="openshift-marketplace/certified-operators-56fcp" Jan 20 17:34:53 crc kubenswrapper[4558]: I0120 17:34:53.641961 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tkrbn\" (UniqueName: \"kubernetes.io/projected/7a80871b-0d64-437f-993f-1aaade8dc094-kube-api-access-tkrbn\") pod \"certified-operators-56fcp\" (UID: \"7a80871b-0d64-437f-993f-1aaade8dc094\") " pod="openshift-marketplace/certified-operators-56fcp" Jan 20 17:34:53 crc kubenswrapper[4558]: I0120 17:34:53.642400 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7a80871b-0d64-437f-993f-1aaade8dc094-catalog-content\") pod \"certified-operators-56fcp\" (UID: \"7a80871b-0d64-437f-993f-1aaade8dc094\") " pod="openshift-marketplace/certified-operators-56fcp" Jan 20 17:34:53 crc kubenswrapper[4558]: I0120 17:34:53.642494 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7a80871b-0d64-437f-993f-1aaade8dc094-utilities\") pod \"certified-operators-56fcp\" (UID: \"7a80871b-0d64-437f-993f-1aaade8dc094\") " pod="openshift-marketplace/certified-operators-56fcp" Jan 20 17:34:53 crc kubenswrapper[4558]: I0120 17:34:53.667043 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tkrbn\" (UniqueName: \"kubernetes.io/projected/7a80871b-0d64-437f-993f-1aaade8dc094-kube-api-access-tkrbn\") pod \"certified-operators-56fcp\" (UID: \"7a80871b-0d64-437f-993f-1aaade8dc094\") " pod="openshift-marketplace/certified-operators-56fcp" Jan 20 17:34:53 crc kubenswrapper[4558]: I0120 17:34:53.874920 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:34:53 crc kubenswrapper[4558]: I0120 17:34:53.876514 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:34:53 crc kubenswrapper[4558]: I0120 17:34:53.891904 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:34:53 crc kubenswrapper[4558]: I0120 17:34:53.892474 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:34:53 crc kubenswrapper[4558]: I0120 17:34:53.956839 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-56fcp" Jan 20 17:34:54 crc kubenswrapper[4558]: W0120 17:34:54.389882 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7a80871b_0d64_437f_993f_1aaade8dc094.slice/crio-ca8e8f0e0d9836c8d6ac41df5de8ff54eb4d76c62bf213b51d913e0e2c48c83a WatchSource:0}: Error finding container ca8e8f0e0d9836c8d6ac41df5de8ff54eb4d76c62bf213b51d913e0e2c48c83a: Status 404 returned error can't find the container with id ca8e8f0e0d9836c8d6ac41df5de8ff54eb4d76c62bf213b51d913e0e2c48c83a Jan 20 17:34:54 crc kubenswrapper[4558]: I0120 17:34:54.393625 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-56fcp"] Jan 20 17:34:54 crc kubenswrapper[4558]: I0120 17:34:54.608288 4558 generic.go:334] "Generic (PLEG): container finished" podID="7a80871b-0d64-437f-993f-1aaade8dc094" containerID="d9df8a742e4f3bc4716247814f9ae7a5c23cd9e0f483c6760b841353a6cff095" exitCode=0 Jan 20 17:34:54 crc kubenswrapper[4558]: I0120 17:34:54.608417 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-56fcp" event={"ID":"7a80871b-0d64-437f-993f-1aaade8dc094","Type":"ContainerDied","Data":"d9df8a742e4f3bc4716247814f9ae7a5c23cd9e0f483c6760b841353a6cff095"} Jan 20 17:34:54 crc kubenswrapper[4558]: I0120 17:34:54.608472 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-56fcp" event={"ID":"7a80871b-0d64-437f-993f-1aaade8dc094","Type":"ContainerStarted","Data":"ca8e8f0e0d9836c8d6ac41df5de8ff54eb4d76c62bf213b51d913e0e2c48c83a"} Jan 20 17:34:54 crc kubenswrapper[4558]: I0120 17:34:54.611538 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qgrgz" event={"ID":"6c5847de-e066-4253-90e7-92d49e25c89d","Type":"ContainerStarted","Data":"633c64e7f83d87281707971b314bce72f028946ea7b9b5b9ac06a4a8d71d8a7e"} Jan 20 17:34:54 crc kubenswrapper[4558]: I0120 17:34:54.647329 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-qgrgz" podStartSLOduration=3.071020105 podStartE2EDuration="4.647276059s" podCreationTimestamp="2026-01-20 17:34:50 +0000 UTC" firstStartedPulling="2026-01-20 17:34:52.585677658 +0000 UTC m=+3186.346015625" lastFinishedPulling="2026-01-20 17:34:54.161933612 +0000 UTC m=+3187.922271579" observedRunningTime="2026-01-20 17:34:54.644322425 +0000 UTC m=+3188.404660393" watchObservedRunningTime="2026-01-20 17:34:54.647276059 +0000 UTC m=+3188.407614026" Jan 20 17:34:54 crc kubenswrapper[4558]: I0120 17:34:54.884329 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-metadata-0" podUID="a0122392-1c61-4133-b05f-cc6a622abaf9" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.103:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 20 17:34:54 crc kubenswrapper[4558]: I0120 17:34:54.884350 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-metadata-0" podUID="a0122392-1c61-4133-b05f-cc6a622abaf9" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.103:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 20 17:34:54 crc kubenswrapper[4558]: I0120 17:34:54.902739 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-api-0" podUID="872e7858-d089-4bb5-89aa-060646e32754" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.104:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 20 17:34:54 crc kubenswrapper[4558]: I0120 17:34:54.902760 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-api-0" podUID="872e7858-d089-4bb5-89aa-060646e32754" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.104:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 20 17:34:55 crc kubenswrapper[4558]: I0120 17:34:55.901085 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:34:55 crc kubenswrapper[4558]: I0120 17:34:55.926450 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:34:56 crc kubenswrapper[4558]: I0120 17:34:56.646933 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-56fcp" event={"ID":"7a80871b-0d64-437f-993f-1aaade8dc094","Type":"ContainerStarted","Data":"86c736cefce98fdd5a0ffecf55240dd1cc174990f90c54685358e07e4204e2d3"} Jan 20 17:34:56 crc kubenswrapper[4558]: I0120 17:34:56.675624 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:34:57 crc kubenswrapper[4558]: I0120 17:34:57.660786 4558 generic.go:334] "Generic (PLEG): container finished" podID="7a80871b-0d64-437f-993f-1aaade8dc094" containerID="86c736cefce98fdd5a0ffecf55240dd1cc174990f90c54685358e07e4204e2d3" exitCode=0 Jan 20 17:34:57 crc kubenswrapper[4558]: I0120 17:34:57.660861 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-56fcp" event={"ID":"7a80871b-0d64-437f-993f-1aaade8dc094","Type":"ContainerDied","Data":"86c736cefce98fdd5a0ffecf55240dd1cc174990f90c54685358e07e4204e2d3"} Jan 20 17:34:58 crc kubenswrapper[4558]: I0120 17:34:58.685663 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-56fcp" event={"ID":"7a80871b-0d64-437f-993f-1aaade8dc094","Type":"ContainerStarted","Data":"ddd5ee0dce865a97242002cdc9e05ef4f5ae1a7290b54324e80ed0753e3be3db"} Jan 20 17:34:58 crc kubenswrapper[4558]: I0120 17:34:58.707504 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-56fcp" podStartSLOduration=2.142372096 podStartE2EDuration="5.707484328s" podCreationTimestamp="2026-01-20 17:34:53 +0000 UTC" firstStartedPulling="2026-01-20 17:34:54.614323085 +0000 UTC m=+3188.374661052" lastFinishedPulling="2026-01-20 17:34:58.179435317 +0000 UTC m=+3191.939773284" observedRunningTime="2026-01-20 17:34:58.704717095 +0000 UTC m=+3192.465055062" watchObservedRunningTime="2026-01-20 17:34:58.707484328 +0000 UTC m=+3192.467822294" Jan 20 17:35:01 crc kubenswrapper[4558]: I0120 17:35:01.256363 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-qgrgz" Jan 20 17:35:01 crc kubenswrapper[4558]: I0120 17:35:01.256441 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-qgrgz" Jan 20 17:35:01 crc kubenswrapper[4558]: I0120 17:35:01.298224 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-qgrgz" Jan 20 17:35:01 crc kubenswrapper[4558]: I0120 17:35:01.754400 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-qgrgz" Jan 20 17:35:02 crc kubenswrapper[4558]: I0120 17:35:02.727735 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-qgrgz"] Jan 20 17:35:03 crc kubenswrapper[4558]: I0120 17:35:03.730403 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-qgrgz" podUID="6c5847de-e066-4253-90e7-92d49e25c89d" containerName="registry-server" containerID="cri-o://633c64e7f83d87281707971b314bce72f028946ea7b9b5b9ac06a4a8d71d8a7e" gracePeriod=2 Jan 20 17:35:03 crc kubenswrapper[4558]: I0120 17:35:03.881661 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:35:03 crc kubenswrapper[4558]: I0120 17:35:03.883586 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:35:03 crc kubenswrapper[4558]: I0120 17:35:03.888062 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:35:03 crc kubenswrapper[4558]: I0120 17:35:03.898219 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:35:03 crc kubenswrapper[4558]: I0120 17:35:03.898565 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:35:03 crc kubenswrapper[4558]: I0120 17:35:03.902703 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:35:03 crc kubenswrapper[4558]: I0120 17:35:03.910334 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:35:03 crc kubenswrapper[4558]: I0120 17:35:03.957850 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-56fcp" Jan 20 17:35:03 crc kubenswrapper[4558]: I0120 17:35:03.957895 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-56fcp" Jan 20 17:35:04 crc kubenswrapper[4558]: I0120 17:35:04.004063 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-56fcp" Jan 20 17:35:04 crc kubenswrapper[4558]: I0120 17:35:04.137064 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qgrgz" Jan 20 17:35:04 crc kubenswrapper[4558]: I0120 17:35:04.257518 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6c5847de-e066-4253-90e7-92d49e25c89d-catalog-content\") pod \"6c5847de-e066-4253-90e7-92d49e25c89d\" (UID: \"6c5847de-e066-4253-90e7-92d49e25c89d\") " Jan 20 17:35:04 crc kubenswrapper[4558]: I0120 17:35:04.257765 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2zwrv\" (UniqueName: \"kubernetes.io/projected/6c5847de-e066-4253-90e7-92d49e25c89d-kube-api-access-2zwrv\") pod \"6c5847de-e066-4253-90e7-92d49e25c89d\" (UID: \"6c5847de-e066-4253-90e7-92d49e25c89d\") " Jan 20 17:35:04 crc kubenswrapper[4558]: I0120 17:35:04.257902 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6c5847de-e066-4253-90e7-92d49e25c89d-utilities\") pod \"6c5847de-e066-4253-90e7-92d49e25c89d\" (UID: \"6c5847de-e066-4253-90e7-92d49e25c89d\") " Jan 20 17:35:04 crc kubenswrapper[4558]: I0120 17:35:04.258868 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6c5847de-e066-4253-90e7-92d49e25c89d-utilities" (OuterVolumeSpecName: "utilities") pod "6c5847de-e066-4253-90e7-92d49e25c89d" (UID: "6c5847de-e066-4253-90e7-92d49e25c89d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:35:04 crc kubenswrapper[4558]: I0120 17:35:04.264039 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6c5847de-e066-4253-90e7-92d49e25c89d-kube-api-access-2zwrv" (OuterVolumeSpecName: "kube-api-access-2zwrv") pod "6c5847de-e066-4253-90e7-92d49e25c89d" (UID: "6c5847de-e066-4253-90e7-92d49e25c89d"). InnerVolumeSpecName "kube-api-access-2zwrv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:35:04 crc kubenswrapper[4558]: I0120 17:35:04.274527 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6c5847de-e066-4253-90e7-92d49e25c89d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6c5847de-e066-4253-90e7-92d49e25c89d" (UID: "6c5847de-e066-4253-90e7-92d49e25c89d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:35:04 crc kubenswrapper[4558]: I0120 17:35:04.360273 4558 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6c5847de-e066-4253-90e7-92d49e25c89d-utilities\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:04 crc kubenswrapper[4558]: I0120 17:35:04.360304 4558 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6c5847de-e066-4253-90e7-92d49e25c89d-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:04 crc kubenswrapper[4558]: I0120 17:35:04.360320 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2zwrv\" (UniqueName: \"kubernetes.io/projected/6c5847de-e066-4253-90e7-92d49e25c89d-kube-api-access-2zwrv\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:04 crc kubenswrapper[4558]: I0120 17:35:04.744297 4558 generic.go:334] "Generic (PLEG): container finished" podID="6c5847de-e066-4253-90e7-92d49e25c89d" containerID="633c64e7f83d87281707971b314bce72f028946ea7b9b5b9ac06a4a8d71d8a7e" exitCode=0 Jan 20 17:35:04 crc kubenswrapper[4558]: I0120 17:35:04.744441 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qgrgz" Jan 20 17:35:04 crc kubenswrapper[4558]: I0120 17:35:04.744457 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qgrgz" event={"ID":"6c5847de-e066-4253-90e7-92d49e25c89d","Type":"ContainerDied","Data":"633c64e7f83d87281707971b314bce72f028946ea7b9b5b9ac06a4a8d71d8a7e"} Jan 20 17:35:04 crc kubenswrapper[4558]: I0120 17:35:04.744517 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qgrgz" event={"ID":"6c5847de-e066-4253-90e7-92d49e25c89d","Type":"ContainerDied","Data":"846082c5fbdd406a41fd06ed774b39a67b89fb41d6c650fc6b2d96bb17da964a"} Jan 20 17:35:04 crc kubenswrapper[4558]: I0120 17:35:04.744541 4558 scope.go:117] "RemoveContainer" containerID="633c64e7f83d87281707971b314bce72f028946ea7b9b5b9ac06a4a8d71d8a7e" Jan 20 17:35:04 crc kubenswrapper[4558]: I0120 17:35:04.745404 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:35:04 crc kubenswrapper[4558]: I0120 17:35:04.750842 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:35:04 crc kubenswrapper[4558]: I0120 17:35:04.752869 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:35:04 crc kubenswrapper[4558]: I0120 17:35:04.767810 4558 scope.go:117] "RemoveContainer" containerID="db80031b1c09d9c8183f7fcad63ea7eb684b3691f0b802afecc65c10680ec89a" Jan 20 17:35:04 crc kubenswrapper[4558]: I0120 17:35:04.768992 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-qgrgz"] Jan 20 17:35:04 crc kubenswrapper[4558]: I0120 17:35:04.776727 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-qgrgz"] Jan 20 17:35:04 crc kubenswrapper[4558]: I0120 17:35:04.797257 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-56fcp" Jan 20 17:35:04 crc kubenswrapper[4558]: I0120 17:35:04.807824 4558 scope.go:117] "RemoveContainer" containerID="aa606070426560a776edfb39f7c84c805e33ec7b742a3c3f51e8bcef25e4e8b2" Jan 20 17:35:04 crc kubenswrapper[4558]: I0120 17:35:04.850069 4558 scope.go:117] "RemoveContainer" containerID="633c64e7f83d87281707971b314bce72f028946ea7b9b5b9ac06a4a8d71d8a7e" Jan 20 17:35:04 crc kubenswrapper[4558]: E0120 17:35:04.850569 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"633c64e7f83d87281707971b314bce72f028946ea7b9b5b9ac06a4a8d71d8a7e\": container with ID starting with 633c64e7f83d87281707971b314bce72f028946ea7b9b5b9ac06a4a8d71d8a7e not found: ID does not exist" containerID="633c64e7f83d87281707971b314bce72f028946ea7b9b5b9ac06a4a8d71d8a7e" Jan 20 17:35:04 crc kubenswrapper[4558]: I0120 17:35:04.850604 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"633c64e7f83d87281707971b314bce72f028946ea7b9b5b9ac06a4a8d71d8a7e"} err="failed to get container status \"633c64e7f83d87281707971b314bce72f028946ea7b9b5b9ac06a4a8d71d8a7e\": rpc error: code = NotFound desc = could not find container \"633c64e7f83d87281707971b314bce72f028946ea7b9b5b9ac06a4a8d71d8a7e\": container with ID starting with 633c64e7f83d87281707971b314bce72f028946ea7b9b5b9ac06a4a8d71d8a7e not found: ID does not exist" Jan 20 17:35:04 crc kubenswrapper[4558]: I0120 17:35:04.850629 4558 scope.go:117] "RemoveContainer" containerID="db80031b1c09d9c8183f7fcad63ea7eb684b3691f0b802afecc65c10680ec89a" Jan 20 17:35:04 crc kubenswrapper[4558]: E0120 17:35:04.851007 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"db80031b1c09d9c8183f7fcad63ea7eb684b3691f0b802afecc65c10680ec89a\": container with ID starting with db80031b1c09d9c8183f7fcad63ea7eb684b3691f0b802afecc65c10680ec89a not found: ID does not exist" containerID="db80031b1c09d9c8183f7fcad63ea7eb684b3691f0b802afecc65c10680ec89a" Jan 20 17:35:04 crc kubenswrapper[4558]: I0120 17:35:04.851028 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"db80031b1c09d9c8183f7fcad63ea7eb684b3691f0b802afecc65c10680ec89a"} err="failed to get container status \"db80031b1c09d9c8183f7fcad63ea7eb684b3691f0b802afecc65c10680ec89a\": rpc error: code = NotFound desc = could not find container \"db80031b1c09d9c8183f7fcad63ea7eb684b3691f0b802afecc65c10680ec89a\": container with ID starting with db80031b1c09d9c8183f7fcad63ea7eb684b3691f0b802afecc65c10680ec89a not found: ID does not exist" Jan 20 17:35:04 crc kubenswrapper[4558]: I0120 17:35:04.851043 4558 scope.go:117] "RemoveContainer" containerID="aa606070426560a776edfb39f7c84c805e33ec7b742a3c3f51e8bcef25e4e8b2" Jan 20 17:35:04 crc kubenswrapper[4558]: E0120 17:35:04.851420 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aa606070426560a776edfb39f7c84c805e33ec7b742a3c3f51e8bcef25e4e8b2\": container with ID starting with aa606070426560a776edfb39f7c84c805e33ec7b742a3c3f51e8bcef25e4e8b2 not found: ID does not exist" containerID="aa606070426560a776edfb39f7c84c805e33ec7b742a3c3f51e8bcef25e4e8b2" Jan 20 17:35:04 crc kubenswrapper[4558]: I0120 17:35:04.851443 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aa606070426560a776edfb39f7c84c805e33ec7b742a3c3f51e8bcef25e4e8b2"} err="failed to get container status \"aa606070426560a776edfb39f7c84c805e33ec7b742a3c3f51e8bcef25e4e8b2\": rpc error: code = NotFound desc = could not find container \"aa606070426560a776edfb39f7c84c805e33ec7b742a3c3f51e8bcef25e4e8b2\": container with ID starting with aa606070426560a776edfb39f7c84c805e33ec7b742a3c3f51e8bcef25e4e8b2 not found: ID does not exist" Jan 20 17:35:05 crc kubenswrapper[4558]: I0120 17:35:05.927002 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-56fcp"] Jan 20 17:35:06 crc kubenswrapper[4558]: I0120 17:35:06.579376 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6c5847de-e066-4253-90e7-92d49e25c89d" path="/var/lib/kubelet/pods/6c5847de-e066-4253-90e7-92d49e25c89d/volumes" Jan 20 17:35:06 crc kubenswrapper[4558]: I0120 17:35:06.751310 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:35:06 crc kubenswrapper[4558]: I0120 17:35:06.767791 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-56fcp" podUID="7a80871b-0d64-437f-993f-1aaade8dc094" containerName="registry-server" containerID="cri-o://ddd5ee0dce865a97242002cdc9e05ef4f5ae1a7290b54324e80ed0753e3be3db" gracePeriod=2 Jan 20 17:35:07 crc kubenswrapper[4558]: I0120 17:35:07.208063 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-56fcp" Jan 20 17:35:07 crc kubenswrapper[4558]: I0120 17:35:07.325098 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7a80871b-0d64-437f-993f-1aaade8dc094-utilities\") pod \"7a80871b-0d64-437f-993f-1aaade8dc094\" (UID: \"7a80871b-0d64-437f-993f-1aaade8dc094\") " Jan 20 17:35:07 crc kubenswrapper[4558]: I0120 17:35:07.325415 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7a80871b-0d64-437f-993f-1aaade8dc094-catalog-content\") pod \"7a80871b-0d64-437f-993f-1aaade8dc094\" (UID: \"7a80871b-0d64-437f-993f-1aaade8dc094\") " Jan 20 17:35:07 crc kubenswrapper[4558]: I0120 17:35:07.325720 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tkrbn\" (UniqueName: \"kubernetes.io/projected/7a80871b-0d64-437f-993f-1aaade8dc094-kube-api-access-tkrbn\") pod \"7a80871b-0d64-437f-993f-1aaade8dc094\" (UID: \"7a80871b-0d64-437f-993f-1aaade8dc094\") " Jan 20 17:35:07 crc kubenswrapper[4558]: I0120 17:35:07.325754 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7a80871b-0d64-437f-993f-1aaade8dc094-utilities" (OuterVolumeSpecName: "utilities") pod "7a80871b-0d64-437f-993f-1aaade8dc094" (UID: "7a80871b-0d64-437f-993f-1aaade8dc094"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:35:07 crc kubenswrapper[4558]: I0120 17:35:07.326276 4558 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7a80871b-0d64-437f-993f-1aaade8dc094-utilities\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:07 crc kubenswrapper[4558]: I0120 17:35:07.332719 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7a80871b-0d64-437f-993f-1aaade8dc094-kube-api-access-tkrbn" (OuterVolumeSpecName: "kube-api-access-tkrbn") pod "7a80871b-0d64-437f-993f-1aaade8dc094" (UID: "7a80871b-0d64-437f-993f-1aaade8dc094"). InnerVolumeSpecName "kube-api-access-tkrbn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:35:07 crc kubenswrapper[4558]: I0120 17:35:07.361538 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7a80871b-0d64-437f-993f-1aaade8dc094-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7a80871b-0d64-437f-993f-1aaade8dc094" (UID: "7a80871b-0d64-437f-993f-1aaade8dc094"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:35:07 crc kubenswrapper[4558]: I0120 17:35:07.428394 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tkrbn\" (UniqueName: \"kubernetes.io/projected/7a80871b-0d64-437f-993f-1aaade8dc094-kube-api-access-tkrbn\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:07 crc kubenswrapper[4558]: I0120 17:35:07.428428 4558 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7a80871b-0d64-437f-993f-1aaade8dc094-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:07 crc kubenswrapper[4558]: I0120 17:35:07.779688 4558 generic.go:334] "Generic (PLEG): container finished" podID="7a80871b-0d64-437f-993f-1aaade8dc094" containerID="ddd5ee0dce865a97242002cdc9e05ef4f5ae1a7290b54324e80ed0753e3be3db" exitCode=0 Jan 20 17:35:07 crc kubenswrapper[4558]: I0120 17:35:07.779736 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-56fcp" event={"ID":"7a80871b-0d64-437f-993f-1aaade8dc094","Type":"ContainerDied","Data":"ddd5ee0dce865a97242002cdc9e05ef4f5ae1a7290b54324e80ed0753e3be3db"} Jan 20 17:35:07 crc kubenswrapper[4558]: I0120 17:35:07.779767 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-56fcp" event={"ID":"7a80871b-0d64-437f-993f-1aaade8dc094","Type":"ContainerDied","Data":"ca8e8f0e0d9836c8d6ac41df5de8ff54eb4d76c62bf213b51d913e0e2c48c83a"} Jan 20 17:35:07 crc kubenswrapper[4558]: I0120 17:35:07.779785 4558 scope.go:117] "RemoveContainer" containerID="ddd5ee0dce865a97242002cdc9e05ef4f5ae1a7290b54324e80ed0753e3be3db" Jan 20 17:35:07 crc kubenswrapper[4558]: I0120 17:35:07.779742 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-56fcp" Jan 20 17:35:07 crc kubenswrapper[4558]: I0120 17:35:07.821467 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-56fcp"] Jan 20 17:35:07 crc kubenswrapper[4558]: I0120 17:35:07.824745 4558 scope.go:117] "RemoveContainer" containerID="86c736cefce98fdd5a0ffecf55240dd1cc174990f90c54685358e07e4204e2d3" Jan 20 17:35:07 crc kubenswrapper[4558]: I0120 17:35:07.829478 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-56fcp"] Jan 20 17:35:07 crc kubenswrapper[4558]: I0120 17:35:07.847854 4558 scope.go:117] "RemoveContainer" containerID="d9df8a742e4f3bc4716247814f9ae7a5c23cd9e0f483c6760b841353a6cff095" Jan 20 17:35:07 crc kubenswrapper[4558]: I0120 17:35:07.880335 4558 scope.go:117] "RemoveContainer" containerID="ddd5ee0dce865a97242002cdc9e05ef4f5ae1a7290b54324e80ed0753e3be3db" Jan 20 17:35:07 crc kubenswrapper[4558]: E0120 17:35:07.880837 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ddd5ee0dce865a97242002cdc9e05ef4f5ae1a7290b54324e80ed0753e3be3db\": container with ID starting with ddd5ee0dce865a97242002cdc9e05ef4f5ae1a7290b54324e80ed0753e3be3db not found: ID does not exist" containerID="ddd5ee0dce865a97242002cdc9e05ef4f5ae1a7290b54324e80ed0753e3be3db" Jan 20 17:35:07 crc kubenswrapper[4558]: I0120 17:35:07.880873 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ddd5ee0dce865a97242002cdc9e05ef4f5ae1a7290b54324e80ed0753e3be3db"} err="failed to get container status \"ddd5ee0dce865a97242002cdc9e05ef4f5ae1a7290b54324e80ed0753e3be3db\": rpc error: code = NotFound desc = could not find container \"ddd5ee0dce865a97242002cdc9e05ef4f5ae1a7290b54324e80ed0753e3be3db\": container with ID starting with ddd5ee0dce865a97242002cdc9e05ef4f5ae1a7290b54324e80ed0753e3be3db not found: ID does not exist" Jan 20 17:35:07 crc kubenswrapper[4558]: I0120 17:35:07.880897 4558 scope.go:117] "RemoveContainer" containerID="86c736cefce98fdd5a0ffecf55240dd1cc174990f90c54685358e07e4204e2d3" Jan 20 17:35:07 crc kubenswrapper[4558]: E0120 17:35:07.881635 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"86c736cefce98fdd5a0ffecf55240dd1cc174990f90c54685358e07e4204e2d3\": container with ID starting with 86c736cefce98fdd5a0ffecf55240dd1cc174990f90c54685358e07e4204e2d3 not found: ID does not exist" containerID="86c736cefce98fdd5a0ffecf55240dd1cc174990f90c54685358e07e4204e2d3" Jan 20 17:35:07 crc kubenswrapper[4558]: I0120 17:35:07.881664 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"86c736cefce98fdd5a0ffecf55240dd1cc174990f90c54685358e07e4204e2d3"} err="failed to get container status \"86c736cefce98fdd5a0ffecf55240dd1cc174990f90c54685358e07e4204e2d3\": rpc error: code = NotFound desc = could not find container \"86c736cefce98fdd5a0ffecf55240dd1cc174990f90c54685358e07e4204e2d3\": container with ID starting with 86c736cefce98fdd5a0ffecf55240dd1cc174990f90c54685358e07e4204e2d3 not found: ID does not exist" Jan 20 17:35:07 crc kubenswrapper[4558]: I0120 17:35:07.881679 4558 scope.go:117] "RemoveContainer" containerID="d9df8a742e4f3bc4716247814f9ae7a5c23cd9e0f483c6760b841353a6cff095" Jan 20 17:35:07 crc kubenswrapper[4558]: E0120 17:35:07.881947 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d9df8a742e4f3bc4716247814f9ae7a5c23cd9e0f483c6760b841353a6cff095\": container with ID starting with d9df8a742e4f3bc4716247814f9ae7a5c23cd9e0f483c6760b841353a6cff095 not found: ID does not exist" containerID="d9df8a742e4f3bc4716247814f9ae7a5c23cd9e0f483c6760b841353a6cff095" Jan 20 17:35:07 crc kubenswrapper[4558]: I0120 17:35:07.881967 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d9df8a742e4f3bc4716247814f9ae7a5c23cd9e0f483c6760b841353a6cff095"} err="failed to get container status \"d9df8a742e4f3bc4716247814f9ae7a5c23cd9e0f483c6760b841353a6cff095\": rpc error: code = NotFound desc = could not find container \"d9df8a742e4f3bc4716247814f9ae7a5c23cd9e0f483c6760b841353a6cff095\": container with ID starting with d9df8a742e4f3bc4716247814f9ae7a5c23cd9e0f483c6760b841353a6cff095 not found: ID does not exist" Jan 20 17:35:08 crc kubenswrapper[4558]: I0120 17:35:08.575547 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7a80871b-0d64-437f-993f-1aaade8dc094" path="/var/lib/kubelet/pods/7a80871b-0d64-437f-993f-1aaade8dc094/volumes" Jan 20 17:35:09 crc kubenswrapper[4558]: I0120 17:35:09.809714 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 17:35:09 crc kubenswrapper[4558]: I0120 17:35:09.809956 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/kube-state-metrics-0" podUID="7505236e-4372-4f2b-9b7c-172a1348f818" containerName="kube-state-metrics" containerID="cri-o://d7c483fdbe053d7af01b1d39038ffcbb4a12a0b71bb1b88b7d45655675034466" gracePeriod=30 Jan 20 17:35:10 crc kubenswrapper[4558]: I0120 17:35:10.221567 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:35:10 crc kubenswrapper[4558]: I0120 17:35:10.292817 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6glq8\" (UniqueName: \"kubernetes.io/projected/7505236e-4372-4f2b-9b7c-172a1348f818-kube-api-access-6glq8\") pod \"7505236e-4372-4f2b-9b7c-172a1348f818\" (UID: \"7505236e-4372-4f2b-9b7c-172a1348f818\") " Jan 20 17:35:10 crc kubenswrapper[4558]: I0120 17:35:10.299143 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7505236e-4372-4f2b-9b7c-172a1348f818-kube-api-access-6glq8" (OuterVolumeSpecName: "kube-api-access-6glq8") pod "7505236e-4372-4f2b-9b7c-172a1348f818" (UID: "7505236e-4372-4f2b-9b7c-172a1348f818"). InnerVolumeSpecName "kube-api-access-6glq8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:35:10 crc kubenswrapper[4558]: I0120 17:35:10.394966 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6glq8\" (UniqueName: \"kubernetes.io/projected/7505236e-4372-4f2b-9b7c-172a1348f818-kube-api-access-6glq8\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:10 crc kubenswrapper[4558]: I0120 17:35:10.813308 4558 generic.go:334] "Generic (PLEG): container finished" podID="7505236e-4372-4f2b-9b7c-172a1348f818" containerID="d7c483fdbe053d7af01b1d39038ffcbb4a12a0b71bb1b88b7d45655675034466" exitCode=2 Jan 20 17:35:10 crc kubenswrapper[4558]: I0120 17:35:10.813370 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/kube-state-metrics-0" event={"ID":"7505236e-4372-4f2b-9b7c-172a1348f818","Type":"ContainerDied","Data":"d7c483fdbe053d7af01b1d39038ffcbb4a12a0b71bb1b88b7d45655675034466"} Jan 20 17:35:10 crc kubenswrapper[4558]: I0120 17:35:10.813419 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/kube-state-metrics-0" event={"ID":"7505236e-4372-4f2b-9b7c-172a1348f818","Type":"ContainerDied","Data":"d17e9607c701acd713b927e4018f02455e070b56378384c2b40bb6ab6b925287"} Jan 20 17:35:10 crc kubenswrapper[4558]: I0120 17:35:10.813427 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:35:10 crc kubenswrapper[4558]: I0120 17:35:10.813441 4558 scope.go:117] "RemoveContainer" containerID="d7c483fdbe053d7af01b1d39038ffcbb4a12a0b71bb1b88b7d45655675034466" Jan 20 17:35:10 crc kubenswrapper[4558]: I0120 17:35:10.836621 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 17:35:10 crc kubenswrapper[4558]: I0120 17:35:10.849634 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 17:35:10 crc kubenswrapper[4558]: I0120 17:35:10.861786 4558 scope.go:117] "RemoveContainer" containerID="d7c483fdbe053d7af01b1d39038ffcbb4a12a0b71bb1b88b7d45655675034466" Jan 20 17:35:10 crc kubenswrapper[4558]: E0120 17:35:10.862870 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d7c483fdbe053d7af01b1d39038ffcbb4a12a0b71bb1b88b7d45655675034466\": container with ID starting with d7c483fdbe053d7af01b1d39038ffcbb4a12a0b71bb1b88b7d45655675034466 not found: ID does not exist" containerID="d7c483fdbe053d7af01b1d39038ffcbb4a12a0b71bb1b88b7d45655675034466" Jan 20 17:35:10 crc kubenswrapper[4558]: I0120 17:35:10.862924 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d7c483fdbe053d7af01b1d39038ffcbb4a12a0b71bb1b88b7d45655675034466"} err="failed to get container status \"d7c483fdbe053d7af01b1d39038ffcbb4a12a0b71bb1b88b7d45655675034466\": rpc error: code = NotFound desc = could not find container \"d7c483fdbe053d7af01b1d39038ffcbb4a12a0b71bb1b88b7d45655675034466\": container with ID starting with d7c483fdbe053d7af01b1d39038ffcbb4a12a0b71bb1b88b7d45655675034466 not found: ID does not exist" Jan 20 17:35:10 crc kubenswrapper[4558]: I0120 17:35:10.881209 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 17:35:10 crc kubenswrapper[4558]: E0120 17:35:10.881851 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c5847de-e066-4253-90e7-92d49e25c89d" containerName="registry-server" Jan 20 17:35:10 crc kubenswrapper[4558]: I0120 17:35:10.881875 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c5847de-e066-4253-90e7-92d49e25c89d" containerName="registry-server" Jan 20 17:35:10 crc kubenswrapper[4558]: E0120 17:35:10.881897 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c5847de-e066-4253-90e7-92d49e25c89d" containerName="extract-utilities" Jan 20 17:35:10 crc kubenswrapper[4558]: I0120 17:35:10.881909 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c5847de-e066-4253-90e7-92d49e25c89d" containerName="extract-utilities" Jan 20 17:35:10 crc kubenswrapper[4558]: E0120 17:35:10.881921 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7a80871b-0d64-437f-993f-1aaade8dc094" containerName="extract-utilities" Jan 20 17:35:10 crc kubenswrapper[4558]: I0120 17:35:10.881929 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="7a80871b-0d64-437f-993f-1aaade8dc094" containerName="extract-utilities" Jan 20 17:35:10 crc kubenswrapper[4558]: E0120 17:35:10.881942 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7a80871b-0d64-437f-993f-1aaade8dc094" containerName="registry-server" Jan 20 17:35:10 crc kubenswrapper[4558]: I0120 17:35:10.881949 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="7a80871b-0d64-437f-993f-1aaade8dc094" containerName="registry-server" Jan 20 17:35:10 crc kubenswrapper[4558]: E0120 17:35:10.881976 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7505236e-4372-4f2b-9b7c-172a1348f818" containerName="kube-state-metrics" Jan 20 17:35:10 crc kubenswrapper[4558]: I0120 17:35:10.881984 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="7505236e-4372-4f2b-9b7c-172a1348f818" containerName="kube-state-metrics" Jan 20 17:35:10 crc kubenswrapper[4558]: E0120 17:35:10.881995 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c5847de-e066-4253-90e7-92d49e25c89d" containerName="extract-content" Jan 20 17:35:10 crc kubenswrapper[4558]: I0120 17:35:10.882001 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c5847de-e066-4253-90e7-92d49e25c89d" containerName="extract-content" Jan 20 17:35:10 crc kubenswrapper[4558]: E0120 17:35:10.882036 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7a80871b-0d64-437f-993f-1aaade8dc094" containerName="extract-content" Jan 20 17:35:10 crc kubenswrapper[4558]: I0120 17:35:10.882043 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="7a80871b-0d64-437f-993f-1aaade8dc094" containerName="extract-content" Jan 20 17:35:10 crc kubenswrapper[4558]: I0120 17:35:10.882346 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="7a80871b-0d64-437f-993f-1aaade8dc094" containerName="registry-server" Jan 20 17:35:10 crc kubenswrapper[4558]: I0120 17:35:10.882369 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="7505236e-4372-4f2b-9b7c-172a1348f818" containerName="kube-state-metrics" Jan 20 17:35:10 crc kubenswrapper[4558]: I0120 17:35:10.882397 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="6c5847de-e066-4253-90e7-92d49e25c89d" containerName="registry-server" Jan 20 17:35:10 crc kubenswrapper[4558]: I0120 17:35:10.883440 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:35:10 crc kubenswrapper[4558]: I0120 17:35:10.886498 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-kube-state-metrics-svc" Jan 20 17:35:10 crc kubenswrapper[4558]: I0120 17:35:10.892445 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"kube-state-metrics-tls-config" Jan 20 17:35:10 crc kubenswrapper[4558]: I0120 17:35:10.896000 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 17:35:11 crc kubenswrapper[4558]: I0120 17:35:11.018600 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hwl42\" (UniqueName: \"kubernetes.io/projected/f09a19a5-d5eb-4466-9cb5-309cd246004e-kube-api-access-hwl42\") pod \"kube-state-metrics-0\" (UID: \"f09a19a5-d5eb-4466-9cb5-309cd246004e\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:35:11 crc kubenswrapper[4558]: I0120 17:35:11.018668 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/f09a19a5-d5eb-4466-9cb5-309cd246004e-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"f09a19a5-d5eb-4466-9cb5-309cd246004e\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:35:11 crc kubenswrapper[4558]: I0120 17:35:11.018727 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f09a19a5-d5eb-4466-9cb5-309cd246004e-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"f09a19a5-d5eb-4466-9cb5-309cd246004e\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:35:11 crc kubenswrapper[4558]: I0120 17:35:11.019121 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/f09a19a5-d5eb-4466-9cb5-309cd246004e-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"f09a19a5-d5eb-4466-9cb5-309cd246004e\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:35:11 crc kubenswrapper[4558]: I0120 17:35:11.123749 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/f09a19a5-d5eb-4466-9cb5-309cd246004e-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"f09a19a5-d5eb-4466-9cb5-309cd246004e\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:35:11 crc kubenswrapper[4558]: I0120 17:35:11.124085 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hwl42\" (UniqueName: \"kubernetes.io/projected/f09a19a5-d5eb-4466-9cb5-309cd246004e-kube-api-access-hwl42\") pod \"kube-state-metrics-0\" (UID: \"f09a19a5-d5eb-4466-9cb5-309cd246004e\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:35:11 crc kubenswrapper[4558]: I0120 17:35:11.124191 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/f09a19a5-d5eb-4466-9cb5-309cd246004e-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"f09a19a5-d5eb-4466-9cb5-309cd246004e\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:35:11 crc kubenswrapper[4558]: I0120 17:35:11.124259 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f09a19a5-d5eb-4466-9cb5-309cd246004e-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"f09a19a5-d5eb-4466-9cb5-309cd246004e\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:35:11 crc kubenswrapper[4558]: I0120 17:35:11.129947 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f09a19a5-d5eb-4466-9cb5-309cd246004e-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"f09a19a5-d5eb-4466-9cb5-309cd246004e\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:35:11 crc kubenswrapper[4558]: I0120 17:35:11.134209 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/f09a19a5-d5eb-4466-9cb5-309cd246004e-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"f09a19a5-d5eb-4466-9cb5-309cd246004e\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:35:11 crc kubenswrapper[4558]: I0120 17:35:11.140019 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/f09a19a5-d5eb-4466-9cb5-309cd246004e-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"f09a19a5-d5eb-4466-9cb5-309cd246004e\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:35:11 crc kubenswrapper[4558]: I0120 17:35:11.142435 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hwl42\" (UniqueName: \"kubernetes.io/projected/f09a19a5-d5eb-4466-9cb5-309cd246004e-kube-api-access-hwl42\") pod \"kube-state-metrics-0\" (UID: \"f09a19a5-d5eb-4466-9cb5-309cd246004e\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:35:11 crc kubenswrapper[4558]: I0120 17:35:11.209663 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:35:11 crc kubenswrapper[4558]: I0120 17:35:11.413973 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:35:11 crc kubenswrapper[4558]: I0120 17:35:11.414500 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="d393d0ac-0a07-4a10-ab9a-ee4187b06a1a" containerName="ceilometer-central-agent" containerID="cri-o://c28767df71c970ea5e8653da53296b3b86a33c31261c457782efaa5c0c34b9aa" gracePeriod=30 Jan 20 17:35:11 crc kubenswrapper[4558]: I0120 17:35:11.414652 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="d393d0ac-0a07-4a10-ab9a-ee4187b06a1a" containerName="proxy-httpd" containerID="cri-o://37f082e53f69b60d82922862c4aabdec9b3cd30d3ff34d5ac037935a9c1bd638" gracePeriod=30 Jan 20 17:35:11 crc kubenswrapper[4558]: I0120 17:35:11.415003 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="d393d0ac-0a07-4a10-ab9a-ee4187b06a1a" containerName="sg-core" containerID="cri-o://a668618c15e38bf505cbb7753467741c1052b1b1764b31556e8205a83f0c0c3b" gracePeriod=30 Jan 20 17:35:11 crc kubenswrapper[4558]: I0120 17:35:11.414983 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="d393d0ac-0a07-4a10-ab9a-ee4187b06a1a" containerName="ceilometer-notification-agent" containerID="cri-o://885f382b361929255e7062afd4f25e3aac93e29a4f55784a8ee8a90d6b2f53d8" gracePeriod=30 Jan 20 17:35:11 crc kubenswrapper[4558]: I0120 17:35:11.629569 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 17:35:11 crc kubenswrapper[4558]: W0120 17:35:11.630446 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf09a19a5_d5eb_4466_9cb5_309cd246004e.slice/crio-4fafc9dc1171b3e13b4b66651e32b5c393c05ec4d403b508afbb604d16363071 WatchSource:0}: Error finding container 4fafc9dc1171b3e13b4b66651e32b5c393c05ec4d403b508afbb604d16363071: Status 404 returned error can't find the container with id 4fafc9dc1171b3e13b4b66651e32b5c393c05ec4d403b508afbb604d16363071 Jan 20 17:35:11 crc kubenswrapper[4558]: I0120 17:35:11.828661 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/kube-state-metrics-0" event={"ID":"f09a19a5-d5eb-4466-9cb5-309cd246004e","Type":"ContainerStarted","Data":"4fafc9dc1171b3e13b4b66651e32b5c393c05ec4d403b508afbb604d16363071"} Jan 20 17:35:11 crc kubenswrapper[4558]: I0120 17:35:11.831226 4558 generic.go:334] "Generic (PLEG): container finished" podID="d393d0ac-0a07-4a10-ab9a-ee4187b06a1a" containerID="37f082e53f69b60d82922862c4aabdec9b3cd30d3ff34d5ac037935a9c1bd638" exitCode=0 Jan 20 17:35:11 crc kubenswrapper[4558]: I0120 17:35:11.831260 4558 generic.go:334] "Generic (PLEG): container finished" podID="d393d0ac-0a07-4a10-ab9a-ee4187b06a1a" containerID="a668618c15e38bf505cbb7753467741c1052b1b1764b31556e8205a83f0c0c3b" exitCode=2 Jan 20 17:35:11 crc kubenswrapper[4558]: I0120 17:35:11.831268 4558 generic.go:334] "Generic (PLEG): container finished" podID="d393d0ac-0a07-4a10-ab9a-ee4187b06a1a" containerID="c28767df71c970ea5e8653da53296b3b86a33c31261c457782efaa5c0c34b9aa" exitCode=0 Jan 20 17:35:11 crc kubenswrapper[4558]: I0120 17:35:11.831294 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"d393d0ac-0a07-4a10-ab9a-ee4187b06a1a","Type":"ContainerDied","Data":"37f082e53f69b60d82922862c4aabdec9b3cd30d3ff34d5ac037935a9c1bd638"} Jan 20 17:35:11 crc kubenswrapper[4558]: I0120 17:35:11.831370 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"d393d0ac-0a07-4a10-ab9a-ee4187b06a1a","Type":"ContainerDied","Data":"a668618c15e38bf505cbb7753467741c1052b1b1764b31556e8205a83f0c0c3b"} Jan 20 17:35:11 crc kubenswrapper[4558]: I0120 17:35:11.831384 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"d393d0ac-0a07-4a10-ab9a-ee4187b06a1a","Type":"ContainerDied","Data":"c28767df71c970ea5e8653da53296b3b86a33c31261c457782efaa5c0c34b9aa"} Jan 20 17:35:12 crc kubenswrapper[4558]: I0120 17:35:12.578747 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7505236e-4372-4f2b-9b7c-172a1348f818" path="/var/lib/kubelet/pods/7505236e-4372-4f2b-9b7c-172a1348f818/volumes" Jan 20 17:35:12 crc kubenswrapper[4558]: I0120 17:35:12.855500 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/kube-state-metrics-0" event={"ID":"f09a19a5-d5eb-4466-9cb5-309cd246004e","Type":"ContainerStarted","Data":"e618252041bc2c7c61af3ff12fe2cfdf9bcc601b2bc8675901ba9e059426eff6"} Jan 20 17:35:12 crc kubenswrapper[4558]: I0120 17:35:12.855625 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:35:12 crc kubenswrapper[4558]: I0120 17:35:12.885394 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/kube-state-metrics-0" podStartSLOduration=2.527450884 podStartE2EDuration="2.885374216s" podCreationTimestamp="2026-01-20 17:35:10 +0000 UTC" firstStartedPulling="2026-01-20 17:35:11.633083788 +0000 UTC m=+3205.393421755" lastFinishedPulling="2026-01-20 17:35:11.99100712 +0000 UTC m=+3205.751345087" observedRunningTime="2026-01-20 17:35:12.878996153 +0000 UTC m=+3206.639334120" watchObservedRunningTime="2026-01-20 17:35:12.885374216 +0000 UTC m=+3206.645712183" Jan 20 17:35:13 crc kubenswrapper[4558]: I0120 17:35:13.884501 4558 generic.go:334] "Generic (PLEG): container finished" podID="d393d0ac-0a07-4a10-ab9a-ee4187b06a1a" containerID="885f382b361929255e7062afd4f25e3aac93e29a4f55784a8ee8a90d6b2f53d8" exitCode=0 Jan 20 17:35:13 crc kubenswrapper[4558]: I0120 17:35:13.884598 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"d393d0ac-0a07-4a10-ab9a-ee4187b06a1a","Type":"ContainerDied","Data":"885f382b361929255e7062afd4f25e3aac93e29a4f55784a8ee8a90d6b2f53d8"} Jan 20 17:35:13 crc kubenswrapper[4558]: I0120 17:35:13.885383 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"d393d0ac-0a07-4a10-ab9a-ee4187b06a1a","Type":"ContainerDied","Data":"3795ac64306831ca35f8eec37e400102938a138c8b3af7101862f037d96dab39"} Jan 20 17:35:13 crc kubenswrapper[4558]: I0120 17:35:13.885404 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3795ac64306831ca35f8eec37e400102938a138c8b3af7101862f037d96dab39" Jan 20 17:35:13 crc kubenswrapper[4558]: I0120 17:35:13.901393 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:35:13 crc kubenswrapper[4558]: I0120 17:35:13.989411 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d393d0ac-0a07-4a10-ab9a-ee4187b06a1a-combined-ca-bundle\") pod \"d393d0ac-0a07-4a10-ab9a-ee4187b06a1a\" (UID: \"d393d0ac-0a07-4a10-ab9a-ee4187b06a1a\") " Jan 20 17:35:13 crc kubenswrapper[4558]: I0120 17:35:13.989553 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d393d0ac-0a07-4a10-ab9a-ee4187b06a1a-log-httpd\") pod \"d393d0ac-0a07-4a10-ab9a-ee4187b06a1a\" (UID: \"d393d0ac-0a07-4a10-ab9a-ee4187b06a1a\") " Jan 20 17:35:13 crc kubenswrapper[4558]: I0120 17:35:13.990089 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d393d0ac-0a07-4a10-ab9a-ee4187b06a1a-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "d393d0ac-0a07-4a10-ab9a-ee4187b06a1a" (UID: "d393d0ac-0a07-4a10-ab9a-ee4187b06a1a"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:35:13 crc kubenswrapper[4558]: I0120 17:35:13.990255 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qhr8s\" (UniqueName: \"kubernetes.io/projected/d393d0ac-0a07-4a10-ab9a-ee4187b06a1a-kube-api-access-qhr8s\") pod \"d393d0ac-0a07-4a10-ab9a-ee4187b06a1a\" (UID: \"d393d0ac-0a07-4a10-ab9a-ee4187b06a1a\") " Jan 20 17:35:13 crc kubenswrapper[4558]: I0120 17:35:13.990378 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d393d0ac-0a07-4a10-ab9a-ee4187b06a1a-config-data\") pod \"d393d0ac-0a07-4a10-ab9a-ee4187b06a1a\" (UID: \"d393d0ac-0a07-4a10-ab9a-ee4187b06a1a\") " Jan 20 17:35:13 crc kubenswrapper[4558]: I0120 17:35:13.990414 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d393d0ac-0a07-4a10-ab9a-ee4187b06a1a-sg-core-conf-yaml\") pod \"d393d0ac-0a07-4a10-ab9a-ee4187b06a1a\" (UID: \"d393d0ac-0a07-4a10-ab9a-ee4187b06a1a\") " Jan 20 17:35:13 crc kubenswrapper[4558]: I0120 17:35:13.990486 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d393d0ac-0a07-4a10-ab9a-ee4187b06a1a-scripts\") pod \"d393d0ac-0a07-4a10-ab9a-ee4187b06a1a\" (UID: \"d393d0ac-0a07-4a10-ab9a-ee4187b06a1a\") " Jan 20 17:35:13 crc kubenswrapper[4558]: I0120 17:35:13.990506 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d393d0ac-0a07-4a10-ab9a-ee4187b06a1a-run-httpd\") pod \"d393d0ac-0a07-4a10-ab9a-ee4187b06a1a\" (UID: \"d393d0ac-0a07-4a10-ab9a-ee4187b06a1a\") " Jan 20 17:35:13 crc kubenswrapper[4558]: I0120 17:35:13.991615 4558 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d393d0ac-0a07-4a10-ab9a-ee4187b06a1a-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:13 crc kubenswrapper[4558]: I0120 17:35:13.992465 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d393d0ac-0a07-4a10-ab9a-ee4187b06a1a-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "d393d0ac-0a07-4a10-ab9a-ee4187b06a1a" (UID: "d393d0ac-0a07-4a10-ab9a-ee4187b06a1a"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:35:13 crc kubenswrapper[4558]: I0120 17:35:13.996404 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d393d0ac-0a07-4a10-ab9a-ee4187b06a1a-kube-api-access-qhr8s" (OuterVolumeSpecName: "kube-api-access-qhr8s") pod "d393d0ac-0a07-4a10-ab9a-ee4187b06a1a" (UID: "d393d0ac-0a07-4a10-ab9a-ee4187b06a1a"). InnerVolumeSpecName "kube-api-access-qhr8s". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:35:14 crc kubenswrapper[4558]: I0120 17:35:14.008873 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d393d0ac-0a07-4a10-ab9a-ee4187b06a1a-scripts" (OuterVolumeSpecName: "scripts") pod "d393d0ac-0a07-4a10-ab9a-ee4187b06a1a" (UID: "d393d0ac-0a07-4a10-ab9a-ee4187b06a1a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:14 crc kubenswrapper[4558]: I0120 17:35:14.020353 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d393d0ac-0a07-4a10-ab9a-ee4187b06a1a-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "d393d0ac-0a07-4a10-ab9a-ee4187b06a1a" (UID: "d393d0ac-0a07-4a10-ab9a-ee4187b06a1a"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:14 crc kubenswrapper[4558]: I0120 17:35:14.066579 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d393d0ac-0a07-4a10-ab9a-ee4187b06a1a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d393d0ac-0a07-4a10-ab9a-ee4187b06a1a" (UID: "d393d0ac-0a07-4a10-ab9a-ee4187b06a1a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:14 crc kubenswrapper[4558]: I0120 17:35:14.093030 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qhr8s\" (UniqueName: \"kubernetes.io/projected/d393d0ac-0a07-4a10-ab9a-ee4187b06a1a-kube-api-access-qhr8s\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:14 crc kubenswrapper[4558]: I0120 17:35:14.093056 4558 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d393d0ac-0a07-4a10-ab9a-ee4187b06a1a-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:14 crc kubenswrapper[4558]: I0120 17:35:14.093067 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d393d0ac-0a07-4a10-ab9a-ee4187b06a1a-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:14 crc kubenswrapper[4558]: I0120 17:35:14.093076 4558 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d393d0ac-0a07-4a10-ab9a-ee4187b06a1a-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:14 crc kubenswrapper[4558]: I0120 17:35:14.093086 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d393d0ac-0a07-4a10-ab9a-ee4187b06a1a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:14 crc kubenswrapper[4558]: I0120 17:35:14.094109 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d393d0ac-0a07-4a10-ab9a-ee4187b06a1a-config-data" (OuterVolumeSpecName: "config-data") pod "d393d0ac-0a07-4a10-ab9a-ee4187b06a1a" (UID: "d393d0ac-0a07-4a10-ab9a-ee4187b06a1a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:14 crc kubenswrapper[4558]: I0120 17:35:14.195446 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d393d0ac-0a07-4a10-ab9a-ee4187b06a1a-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:14 crc kubenswrapper[4558]: I0120 17:35:14.896325 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:35:14 crc kubenswrapper[4558]: I0120 17:35:14.919252 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:35:14 crc kubenswrapper[4558]: I0120 17:35:14.930126 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:35:14 crc kubenswrapper[4558]: I0120 17:35:14.949852 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:35:14 crc kubenswrapper[4558]: E0120 17:35:14.950318 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d393d0ac-0a07-4a10-ab9a-ee4187b06a1a" containerName="ceilometer-central-agent" Jan 20 17:35:14 crc kubenswrapper[4558]: I0120 17:35:14.950341 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d393d0ac-0a07-4a10-ab9a-ee4187b06a1a" containerName="ceilometer-central-agent" Jan 20 17:35:14 crc kubenswrapper[4558]: E0120 17:35:14.950350 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d393d0ac-0a07-4a10-ab9a-ee4187b06a1a" containerName="sg-core" Jan 20 17:35:14 crc kubenswrapper[4558]: I0120 17:35:14.950358 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d393d0ac-0a07-4a10-ab9a-ee4187b06a1a" containerName="sg-core" Jan 20 17:35:14 crc kubenswrapper[4558]: E0120 17:35:14.950380 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d393d0ac-0a07-4a10-ab9a-ee4187b06a1a" containerName="proxy-httpd" Jan 20 17:35:14 crc kubenswrapper[4558]: I0120 17:35:14.950386 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d393d0ac-0a07-4a10-ab9a-ee4187b06a1a" containerName="proxy-httpd" Jan 20 17:35:14 crc kubenswrapper[4558]: E0120 17:35:14.950394 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d393d0ac-0a07-4a10-ab9a-ee4187b06a1a" containerName="ceilometer-notification-agent" Jan 20 17:35:14 crc kubenswrapper[4558]: I0120 17:35:14.950400 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d393d0ac-0a07-4a10-ab9a-ee4187b06a1a" containerName="ceilometer-notification-agent" Jan 20 17:35:14 crc kubenswrapper[4558]: I0120 17:35:14.951357 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="d393d0ac-0a07-4a10-ab9a-ee4187b06a1a" containerName="proxy-httpd" Jan 20 17:35:14 crc kubenswrapper[4558]: I0120 17:35:14.951381 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="d393d0ac-0a07-4a10-ab9a-ee4187b06a1a" containerName="sg-core" Jan 20 17:35:14 crc kubenswrapper[4558]: I0120 17:35:14.951392 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="d393d0ac-0a07-4a10-ab9a-ee4187b06a1a" containerName="ceilometer-notification-agent" Jan 20 17:35:14 crc kubenswrapper[4558]: I0120 17:35:14.951407 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="d393d0ac-0a07-4a10-ab9a-ee4187b06a1a" containerName="ceilometer-central-agent" Jan 20 17:35:14 crc kubenswrapper[4558]: I0120 17:35:14.953201 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:35:14 crc kubenswrapper[4558]: I0120 17:35:14.958954 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-scripts" Jan 20 17:35:14 crc kubenswrapper[4558]: I0120 17:35:14.959183 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-config-data" Jan 20 17:35:14 crc kubenswrapper[4558]: I0120 17:35:14.959452 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-ceilometer-internal-svc" Jan 20 17:35:14 crc kubenswrapper[4558]: I0120 17:35:14.974554 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:35:15 crc kubenswrapper[4558]: I0120 17:35:15.013080 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b9d9b3dd-ab88-4de7-9176-26d0f37316c5-run-httpd\") pod \"ceilometer-0\" (UID: \"b9d9b3dd-ab88-4de7-9176-26d0f37316c5\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:35:15 crc kubenswrapper[4558]: I0120 17:35:15.013122 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b9d9b3dd-ab88-4de7-9176-26d0f37316c5-scripts\") pod \"ceilometer-0\" (UID: \"b9d9b3dd-ab88-4de7-9176-26d0f37316c5\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:35:15 crc kubenswrapper[4558]: I0120 17:35:15.013151 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b9d9b3dd-ab88-4de7-9176-26d0f37316c5-config-data\") pod \"ceilometer-0\" (UID: \"b9d9b3dd-ab88-4de7-9176-26d0f37316c5\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:35:15 crc kubenswrapper[4558]: I0120 17:35:15.013208 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b9d9b3dd-ab88-4de7-9176-26d0f37316c5-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"b9d9b3dd-ab88-4de7-9176-26d0f37316c5\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:35:15 crc kubenswrapper[4558]: I0120 17:35:15.013433 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b9d9b3dd-ab88-4de7-9176-26d0f37316c5-log-httpd\") pod \"ceilometer-0\" (UID: \"b9d9b3dd-ab88-4de7-9176-26d0f37316c5\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:35:15 crc kubenswrapper[4558]: I0120 17:35:15.013521 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mvt72\" (UniqueName: \"kubernetes.io/projected/b9d9b3dd-ab88-4de7-9176-26d0f37316c5-kube-api-access-mvt72\") pod \"ceilometer-0\" (UID: \"b9d9b3dd-ab88-4de7-9176-26d0f37316c5\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:35:15 crc kubenswrapper[4558]: I0120 17:35:15.013589 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/b9d9b3dd-ab88-4de7-9176-26d0f37316c5-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"b9d9b3dd-ab88-4de7-9176-26d0f37316c5\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:35:15 crc kubenswrapper[4558]: I0120 17:35:15.013695 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9d9b3dd-ab88-4de7-9176-26d0f37316c5-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"b9d9b3dd-ab88-4de7-9176-26d0f37316c5\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:35:15 crc kubenswrapper[4558]: I0120 17:35:15.115814 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b9d9b3dd-ab88-4de7-9176-26d0f37316c5-run-httpd\") pod \"ceilometer-0\" (UID: \"b9d9b3dd-ab88-4de7-9176-26d0f37316c5\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:35:15 crc kubenswrapper[4558]: I0120 17:35:15.115865 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b9d9b3dd-ab88-4de7-9176-26d0f37316c5-scripts\") pod \"ceilometer-0\" (UID: \"b9d9b3dd-ab88-4de7-9176-26d0f37316c5\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:35:15 crc kubenswrapper[4558]: I0120 17:35:15.115918 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b9d9b3dd-ab88-4de7-9176-26d0f37316c5-config-data\") pod \"ceilometer-0\" (UID: \"b9d9b3dd-ab88-4de7-9176-26d0f37316c5\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:35:15 crc kubenswrapper[4558]: I0120 17:35:15.115991 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b9d9b3dd-ab88-4de7-9176-26d0f37316c5-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"b9d9b3dd-ab88-4de7-9176-26d0f37316c5\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:35:15 crc kubenswrapper[4558]: I0120 17:35:15.116075 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b9d9b3dd-ab88-4de7-9176-26d0f37316c5-log-httpd\") pod \"ceilometer-0\" (UID: \"b9d9b3dd-ab88-4de7-9176-26d0f37316c5\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:35:15 crc kubenswrapper[4558]: I0120 17:35:15.116112 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mvt72\" (UniqueName: \"kubernetes.io/projected/b9d9b3dd-ab88-4de7-9176-26d0f37316c5-kube-api-access-mvt72\") pod \"ceilometer-0\" (UID: \"b9d9b3dd-ab88-4de7-9176-26d0f37316c5\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:35:15 crc kubenswrapper[4558]: I0120 17:35:15.116151 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/b9d9b3dd-ab88-4de7-9176-26d0f37316c5-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"b9d9b3dd-ab88-4de7-9176-26d0f37316c5\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:35:15 crc kubenswrapper[4558]: I0120 17:35:15.116205 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9d9b3dd-ab88-4de7-9176-26d0f37316c5-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"b9d9b3dd-ab88-4de7-9176-26d0f37316c5\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:35:15 crc kubenswrapper[4558]: I0120 17:35:15.116307 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b9d9b3dd-ab88-4de7-9176-26d0f37316c5-run-httpd\") pod \"ceilometer-0\" (UID: \"b9d9b3dd-ab88-4de7-9176-26d0f37316c5\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:35:15 crc kubenswrapper[4558]: I0120 17:35:15.117820 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b9d9b3dd-ab88-4de7-9176-26d0f37316c5-log-httpd\") pod \"ceilometer-0\" (UID: \"b9d9b3dd-ab88-4de7-9176-26d0f37316c5\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:35:15 crc kubenswrapper[4558]: I0120 17:35:15.122664 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/b9d9b3dd-ab88-4de7-9176-26d0f37316c5-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"b9d9b3dd-ab88-4de7-9176-26d0f37316c5\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:35:15 crc kubenswrapper[4558]: I0120 17:35:15.124009 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b9d9b3dd-ab88-4de7-9176-26d0f37316c5-config-data\") pod \"ceilometer-0\" (UID: \"b9d9b3dd-ab88-4de7-9176-26d0f37316c5\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:35:15 crc kubenswrapper[4558]: I0120 17:35:15.124792 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b9d9b3dd-ab88-4de7-9176-26d0f37316c5-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"b9d9b3dd-ab88-4de7-9176-26d0f37316c5\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:35:15 crc kubenswrapper[4558]: I0120 17:35:15.125726 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9d9b3dd-ab88-4de7-9176-26d0f37316c5-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"b9d9b3dd-ab88-4de7-9176-26d0f37316c5\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:35:15 crc kubenswrapper[4558]: I0120 17:35:15.127046 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b9d9b3dd-ab88-4de7-9176-26d0f37316c5-scripts\") pod \"ceilometer-0\" (UID: \"b9d9b3dd-ab88-4de7-9176-26d0f37316c5\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:35:15 crc kubenswrapper[4558]: I0120 17:35:15.131731 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mvt72\" (UniqueName: \"kubernetes.io/projected/b9d9b3dd-ab88-4de7-9176-26d0f37316c5-kube-api-access-mvt72\") pod \"ceilometer-0\" (UID: \"b9d9b3dd-ab88-4de7-9176-26d0f37316c5\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:35:15 crc kubenswrapper[4558]: I0120 17:35:15.276823 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:35:15 crc kubenswrapper[4558]: I0120 17:35:15.715781 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:35:15 crc kubenswrapper[4558]: I0120 17:35:15.731085 4558 scope.go:117] "RemoveContainer" containerID="ce4c4a8dc51cc8a0747c9a283ff63624944b94308dde57af4f43cc0ee489bfaa" Jan 20 17:35:15 crc kubenswrapper[4558]: I0120 17:35:15.760398 4558 scope.go:117] "RemoveContainer" containerID="f2a87a3274b85da1b912ddda9d9c24218552ae2474139c4df992c4937f8ad8bb" Jan 20 17:35:15 crc kubenswrapper[4558]: I0120 17:35:15.773968 4558 scope.go:117] "RemoveContainer" containerID="9277589ed5b4239b74d19c171dcaaa83b3f9fc262243f6f1c2b59156f314f5d8" Jan 20 17:35:15 crc kubenswrapper[4558]: I0120 17:35:15.909817 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"b9d9b3dd-ab88-4de7-9176-26d0f37316c5","Type":"ContainerStarted","Data":"2f91d48b5bee8bf34bf7bcc372fcd223b53bea549f07ae364e3c4b83a55b5521"} Jan 20 17:35:16 crc kubenswrapper[4558]: I0120 17:35:16.592095 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d393d0ac-0a07-4a10-ab9a-ee4187b06a1a" path="/var/lib/kubelet/pods/d393d0ac-0a07-4a10-ab9a-ee4187b06a1a/volumes" Jan 20 17:35:16 crc kubenswrapper[4558]: I0120 17:35:16.933714 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"b9d9b3dd-ab88-4de7-9176-26d0f37316c5","Type":"ContainerStarted","Data":"4731e15eb33f9d7ef47c235ab8bf0bf5e078da54679e25a286db416ef1024765"} Jan 20 17:35:17 crc kubenswrapper[4558]: I0120 17:35:17.957584 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"b9d9b3dd-ab88-4de7-9176-26d0f37316c5","Type":"ContainerStarted","Data":"f82f341bf9593ff0a595b29c75c685e0ea8f432820b9a8fa0036ae5fd0ac0676"} Jan 20 17:35:18 crc kubenswrapper[4558]: I0120 17:35:18.966797 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"b9d9b3dd-ab88-4de7-9176-26d0f37316c5","Type":"ContainerStarted","Data":"4cd61fbb091f93563313226e5a4a9e8bacd968ce5f506d798f29e62045d70876"} Jan 20 17:35:19 crc kubenswrapper[4558]: I0120 17:35:19.976827 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"b9d9b3dd-ab88-4de7-9176-26d0f37316c5","Type":"ContainerStarted","Data":"cd093ad0a62335ae0f0d6459708d68848864df2a42f2f720194e2e8f2fdd8b3f"} Jan 20 17:35:19 crc kubenswrapper[4558]: I0120 17:35:19.977041 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:35:19 crc kubenswrapper[4558]: I0120 17:35:19.993415 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ceilometer-0" podStartSLOduration=2.238771754 podStartE2EDuration="5.993397844s" podCreationTimestamp="2026-01-20 17:35:14 +0000 UTC" firstStartedPulling="2026-01-20 17:35:15.723261762 +0000 UTC m=+3209.483599729" lastFinishedPulling="2026-01-20 17:35:19.477887852 +0000 UTC m=+3213.238225819" observedRunningTime="2026-01-20 17:35:19.991254185 +0000 UTC m=+3213.751592151" watchObservedRunningTime="2026-01-20 17:35:19.993397844 +0000 UTC m=+3213.753735812" Jan 20 17:35:21 crc kubenswrapper[4558]: I0120 17:35:21.223211 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:35:23 crc kubenswrapper[4558]: I0120 17:35:23.218542 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/openstack-galera-0"] Jan 20 17:35:23 crc kubenswrapper[4558]: I0120 17:35:23.336512 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/openstack-galera-0" podUID="5e0a76a1-125c-47f2-a903-b5c8ce8e5277" containerName="galera" containerID="cri-o://aaa3870751af8f0c4b7878382a9021d1b0d86ba2fbbf69032fcd3bacd83d2d92" gracePeriod=30 Jan 20 17:35:23 crc kubenswrapper[4558]: I0120 17:35:23.452811 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:35:23 crc kubenswrapper[4558]: I0120 17:35:23.453047 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="b9d9b3dd-ab88-4de7-9176-26d0f37316c5" containerName="ceilometer-central-agent" containerID="cri-o://4731e15eb33f9d7ef47c235ab8bf0bf5e078da54679e25a286db416ef1024765" gracePeriod=30 Jan 20 17:35:23 crc kubenswrapper[4558]: I0120 17:35:23.453255 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="b9d9b3dd-ab88-4de7-9176-26d0f37316c5" containerName="proxy-httpd" containerID="cri-o://cd093ad0a62335ae0f0d6459708d68848864df2a42f2f720194e2e8f2fdd8b3f" gracePeriod=30 Jan 20 17:35:23 crc kubenswrapper[4558]: I0120 17:35:23.453330 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="b9d9b3dd-ab88-4de7-9176-26d0f37316c5" containerName="ceilometer-notification-agent" containerID="cri-o://f82f341bf9593ff0a595b29c75c685e0ea8f432820b9a8fa0036ae5fd0ac0676" gracePeriod=30 Jan 20 17:35:23 crc kubenswrapper[4558]: I0120 17:35:23.453432 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="b9d9b3dd-ab88-4de7-9176-26d0f37316c5" containerName="sg-core" containerID="cri-o://4cd61fbb091f93563313226e5a4a9e8bacd968ce5f506d798f29e62045d70876" gracePeriod=30 Jan 20 17:35:23 crc kubenswrapper[4558]: I0120 17:35:23.580494 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/openstack-cell1-galera-0"] Jan 20 17:35:23 crc kubenswrapper[4558]: I0120 17:35:23.692703 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/openstack-cell1-galera-0" podUID="57872fb7-1988-4da4-b8b9-eee9c5e8c827" containerName="galera" containerID="cri-o://d8e9ed0ae69bd19b53807e767c4bcd9fcd074682e879fd040455a50aac23dd93" gracePeriod=30 Jan 20 17:35:23 crc kubenswrapper[4558]: E0120 17:35:23.763430 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="aaa3870751af8f0c4b7878382a9021d1b0d86ba2fbbf69032fcd3bacd83d2d92" cmd=["/bin/bash","/var/lib/operator-scripts/mysql_probe.sh","readiness"] Jan 20 17:35:23 crc kubenswrapper[4558]: E0120 17:35:23.764806 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="aaa3870751af8f0c4b7878382a9021d1b0d86ba2fbbf69032fcd3bacd83d2d92" cmd=["/bin/bash","/var/lib/operator-scripts/mysql_probe.sh","readiness"] Jan 20 17:35:23 crc kubenswrapper[4558]: E0120 17:35:23.766179 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="aaa3870751af8f0c4b7878382a9021d1b0d86ba2fbbf69032fcd3bacd83d2d92" cmd=["/bin/bash","/var/lib/operator-scripts/mysql_probe.sh","readiness"] Jan 20 17:35:23 crc kubenswrapper[4558]: E0120 17:35:23.766231 4558 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack-kuttl-tests/openstack-galera-0" podUID="5e0a76a1-125c-47f2-a903-b5c8ce8e5277" containerName="galera" Jan 20 17:35:23 crc kubenswrapper[4558]: I0120 17:35:23.848778 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack-kuttl-tests/cinder-scheduler-0" podUID="4bd47eac-4090-4fc1-91c4-553ebd84964d" containerName="cinder-scheduler" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 20 17:35:23 crc kubenswrapper[4558]: I0120 17:35:23.984008 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/memcached-0"] Jan 20 17:35:23 crc kubenswrapper[4558]: I0120 17:35:23.984291 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/memcached-0" podUID="568f7066-e99f-45b2-aa41-28c808fb27b8" containerName="memcached" containerID="cri-o://0b26ed4dfd437ec4fd0415ae7cb52ebc72347c26385495a0c7ac90e3a3686790" gracePeriod=30 Jan 20 17:35:24 crc kubenswrapper[4558]: I0120 17:35:24.034759 4558 generic.go:334] "Generic (PLEG): container finished" podID="b9d9b3dd-ab88-4de7-9176-26d0f37316c5" containerID="cd093ad0a62335ae0f0d6459708d68848864df2a42f2f720194e2e8f2fdd8b3f" exitCode=0 Jan 20 17:35:24 crc kubenswrapper[4558]: I0120 17:35:24.034793 4558 generic.go:334] "Generic (PLEG): container finished" podID="b9d9b3dd-ab88-4de7-9176-26d0f37316c5" containerID="4cd61fbb091f93563313226e5a4a9e8bacd968ce5f506d798f29e62045d70876" exitCode=2 Jan 20 17:35:24 crc kubenswrapper[4558]: I0120 17:35:24.034801 4558 generic.go:334] "Generic (PLEG): container finished" podID="b9d9b3dd-ab88-4de7-9176-26d0f37316c5" containerID="f82f341bf9593ff0a595b29c75c685e0ea8f432820b9a8fa0036ae5fd0ac0676" exitCode=0 Jan 20 17:35:24 crc kubenswrapper[4558]: I0120 17:35:24.034809 4558 generic.go:334] "Generic (PLEG): container finished" podID="b9d9b3dd-ab88-4de7-9176-26d0f37316c5" containerID="4731e15eb33f9d7ef47c235ab8bf0bf5e078da54679e25a286db416ef1024765" exitCode=0 Jan 20 17:35:24 crc kubenswrapper[4558]: I0120 17:35:24.034831 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"b9d9b3dd-ab88-4de7-9176-26d0f37316c5","Type":"ContainerDied","Data":"cd093ad0a62335ae0f0d6459708d68848864df2a42f2f720194e2e8f2fdd8b3f"} Jan 20 17:35:24 crc kubenswrapper[4558]: I0120 17:35:24.034870 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"b9d9b3dd-ab88-4de7-9176-26d0f37316c5","Type":"ContainerDied","Data":"4cd61fbb091f93563313226e5a4a9e8bacd968ce5f506d798f29e62045d70876"} Jan 20 17:35:24 crc kubenswrapper[4558]: I0120 17:35:24.034880 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"b9d9b3dd-ab88-4de7-9176-26d0f37316c5","Type":"ContainerDied","Data":"f82f341bf9593ff0a595b29c75c685e0ea8f432820b9a8fa0036ae5fd0ac0676"} Jan 20 17:35:24 crc kubenswrapper[4558]: I0120 17:35:24.034889 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"b9d9b3dd-ab88-4de7-9176-26d0f37316c5","Type":"ContainerDied","Data":"4731e15eb33f9d7ef47c235ab8bf0bf5e078da54679e25a286db416ef1024765"} Jan 20 17:35:24 crc kubenswrapper[4558]: I0120 17:35:24.136176 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:35:24 crc kubenswrapper[4558]: I0120 17:35:24.229150 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b9d9b3dd-ab88-4de7-9176-26d0f37316c5-run-httpd\") pod \"b9d9b3dd-ab88-4de7-9176-26d0f37316c5\" (UID: \"b9d9b3dd-ab88-4de7-9176-26d0f37316c5\") " Jan 20 17:35:24 crc kubenswrapper[4558]: I0120 17:35:24.229259 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b9d9b3dd-ab88-4de7-9176-26d0f37316c5-log-httpd\") pod \"b9d9b3dd-ab88-4de7-9176-26d0f37316c5\" (UID: \"b9d9b3dd-ab88-4de7-9176-26d0f37316c5\") " Jan 20 17:35:24 crc kubenswrapper[4558]: I0120 17:35:24.229368 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b9d9b3dd-ab88-4de7-9176-26d0f37316c5-scripts\") pod \"b9d9b3dd-ab88-4de7-9176-26d0f37316c5\" (UID: \"b9d9b3dd-ab88-4de7-9176-26d0f37316c5\") " Jan 20 17:35:24 crc kubenswrapper[4558]: I0120 17:35:24.229416 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/b9d9b3dd-ab88-4de7-9176-26d0f37316c5-ceilometer-tls-certs\") pod \"b9d9b3dd-ab88-4de7-9176-26d0f37316c5\" (UID: \"b9d9b3dd-ab88-4de7-9176-26d0f37316c5\") " Jan 20 17:35:24 crc kubenswrapper[4558]: I0120 17:35:24.229444 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b9d9b3dd-ab88-4de7-9176-26d0f37316c5-config-data\") pod \"b9d9b3dd-ab88-4de7-9176-26d0f37316c5\" (UID: \"b9d9b3dd-ab88-4de7-9176-26d0f37316c5\") " Jan 20 17:35:24 crc kubenswrapper[4558]: I0120 17:35:24.229461 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9d9b3dd-ab88-4de7-9176-26d0f37316c5-combined-ca-bundle\") pod \"b9d9b3dd-ab88-4de7-9176-26d0f37316c5\" (UID: \"b9d9b3dd-ab88-4de7-9176-26d0f37316c5\") " Jan 20 17:35:24 crc kubenswrapper[4558]: I0120 17:35:24.229531 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mvt72\" (UniqueName: \"kubernetes.io/projected/b9d9b3dd-ab88-4de7-9176-26d0f37316c5-kube-api-access-mvt72\") pod \"b9d9b3dd-ab88-4de7-9176-26d0f37316c5\" (UID: \"b9d9b3dd-ab88-4de7-9176-26d0f37316c5\") " Jan 20 17:35:24 crc kubenswrapper[4558]: I0120 17:35:24.229599 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b9d9b3dd-ab88-4de7-9176-26d0f37316c5-sg-core-conf-yaml\") pod \"b9d9b3dd-ab88-4de7-9176-26d0f37316c5\" (UID: \"b9d9b3dd-ab88-4de7-9176-26d0f37316c5\") " Jan 20 17:35:24 crc kubenswrapper[4558]: I0120 17:35:24.236146 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b9d9b3dd-ab88-4de7-9176-26d0f37316c5-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "b9d9b3dd-ab88-4de7-9176-26d0f37316c5" (UID: "b9d9b3dd-ab88-4de7-9176-26d0f37316c5"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:35:24 crc kubenswrapper[4558]: I0120 17:35:24.236522 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b9d9b3dd-ab88-4de7-9176-26d0f37316c5-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "b9d9b3dd-ab88-4de7-9176-26d0f37316c5" (UID: "b9d9b3dd-ab88-4de7-9176-26d0f37316c5"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:35:24 crc kubenswrapper[4558]: I0120 17:35:24.239938 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b9d9b3dd-ab88-4de7-9176-26d0f37316c5-scripts" (OuterVolumeSpecName: "scripts") pod "b9d9b3dd-ab88-4de7-9176-26d0f37316c5" (UID: "b9d9b3dd-ab88-4de7-9176-26d0f37316c5"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:24 crc kubenswrapper[4558]: I0120 17:35:24.245767 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b9d9b3dd-ab88-4de7-9176-26d0f37316c5-kube-api-access-mvt72" (OuterVolumeSpecName: "kube-api-access-mvt72") pod "b9d9b3dd-ab88-4de7-9176-26d0f37316c5" (UID: "b9d9b3dd-ab88-4de7-9176-26d0f37316c5"). InnerVolumeSpecName "kube-api-access-mvt72". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:35:24 crc kubenswrapper[4558]: I0120 17:35:24.264325 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b9d9b3dd-ab88-4de7-9176-26d0f37316c5-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "b9d9b3dd-ab88-4de7-9176-26d0f37316c5" (UID: "b9d9b3dd-ab88-4de7-9176-26d0f37316c5"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:24 crc kubenswrapper[4558]: I0120 17:35:24.318358 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b9d9b3dd-ab88-4de7-9176-26d0f37316c5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b9d9b3dd-ab88-4de7-9176-26d0f37316c5" (UID: "b9d9b3dd-ab88-4de7-9176-26d0f37316c5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:24 crc kubenswrapper[4558]: I0120 17:35:24.322724 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b9d9b3dd-ab88-4de7-9176-26d0f37316c5-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "b9d9b3dd-ab88-4de7-9176-26d0f37316c5" (UID: "b9d9b3dd-ab88-4de7-9176-26d0f37316c5"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:24 crc kubenswrapper[4558]: I0120 17:35:24.332463 4558 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b9d9b3dd-ab88-4de7-9176-26d0f37316c5-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:24 crc kubenswrapper[4558]: I0120 17:35:24.332497 4558 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b9d9b3dd-ab88-4de7-9176-26d0f37316c5-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:24 crc kubenswrapper[4558]: I0120 17:35:24.332506 4558 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b9d9b3dd-ab88-4de7-9176-26d0f37316c5-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:24 crc kubenswrapper[4558]: I0120 17:35:24.332520 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b9d9b3dd-ab88-4de7-9176-26d0f37316c5-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:24 crc kubenswrapper[4558]: I0120 17:35:24.332529 4558 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/b9d9b3dd-ab88-4de7-9176-26d0f37316c5-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:24 crc kubenswrapper[4558]: I0120 17:35:24.332542 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9d9b3dd-ab88-4de7-9176-26d0f37316c5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:24 crc kubenswrapper[4558]: I0120 17:35:24.332555 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mvt72\" (UniqueName: \"kubernetes.io/projected/b9d9b3dd-ab88-4de7-9176-26d0f37316c5-kube-api-access-mvt72\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:24 crc kubenswrapper[4558]: I0120 17:35:24.338396 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b9d9b3dd-ab88-4de7-9176-26d0f37316c5-config-data" (OuterVolumeSpecName: "config-data") pod "b9d9b3dd-ab88-4de7-9176-26d0f37316c5" (UID: "b9d9b3dd-ab88-4de7-9176-26d0f37316c5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:24 crc kubenswrapper[4558]: I0120 17:35:24.339455 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:35:24 crc kubenswrapper[4558]: I0120 17:35:24.433600 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/5e0a76a1-125c-47f2-a903-b5c8ce8e5277-config-data-generated\") pod \"5e0a76a1-125c-47f2-a903-b5c8ce8e5277\" (UID: \"5e0a76a1-125c-47f2-a903-b5c8ce8e5277\") " Jan 20 17:35:24 crc kubenswrapper[4558]: I0120 17:35:24.433658 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/5e0a76a1-125c-47f2-a903-b5c8ce8e5277-config-data-default\") pod \"5e0a76a1-125c-47f2-a903-b5c8ce8e5277\" (UID: \"5e0a76a1-125c-47f2-a903-b5c8ce8e5277\") " Jan 20 17:35:24 crc kubenswrapper[4558]: I0120 17:35:24.433686 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e0a76a1-125c-47f2-a903-b5c8ce8e5277-combined-ca-bundle\") pod \"5e0a76a1-125c-47f2-a903-b5c8ce8e5277\" (UID: \"5e0a76a1-125c-47f2-a903-b5c8ce8e5277\") " Jan 20 17:35:24 crc kubenswrapper[4558]: I0120 17:35:24.434105 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5e0a76a1-125c-47f2-a903-b5c8ce8e5277-operator-scripts\") pod \"5e0a76a1-125c-47f2-a903-b5c8ce8e5277\" (UID: \"5e0a76a1-125c-47f2-a903-b5c8ce8e5277\") " Jan 20 17:35:24 crc kubenswrapper[4558]: I0120 17:35:24.434136 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mysql-db\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"5e0a76a1-125c-47f2-a903-b5c8ce8e5277\" (UID: \"5e0a76a1-125c-47f2-a903-b5c8ce8e5277\") " Jan 20 17:35:24 crc kubenswrapper[4558]: I0120 17:35:24.434158 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5jck4\" (UniqueName: \"kubernetes.io/projected/5e0a76a1-125c-47f2-a903-b5c8ce8e5277-kube-api-access-5jck4\") pod \"5e0a76a1-125c-47f2-a903-b5c8ce8e5277\" (UID: \"5e0a76a1-125c-47f2-a903-b5c8ce8e5277\") " Jan 20 17:35:24 crc kubenswrapper[4558]: I0120 17:35:24.434242 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/5e0a76a1-125c-47f2-a903-b5c8ce8e5277-kolla-config\") pod \"5e0a76a1-125c-47f2-a903-b5c8ce8e5277\" (UID: \"5e0a76a1-125c-47f2-a903-b5c8ce8e5277\") " Jan 20 17:35:24 crc kubenswrapper[4558]: I0120 17:35:24.434278 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/5e0a76a1-125c-47f2-a903-b5c8ce8e5277-galera-tls-certs\") pod \"5e0a76a1-125c-47f2-a903-b5c8ce8e5277\" (UID: \"5e0a76a1-125c-47f2-a903-b5c8ce8e5277\") " Jan 20 17:35:24 crc kubenswrapper[4558]: I0120 17:35:24.434043 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5e0a76a1-125c-47f2-a903-b5c8ce8e5277-config-data-generated" (OuterVolumeSpecName: "config-data-generated") pod "5e0a76a1-125c-47f2-a903-b5c8ce8e5277" (UID: "5e0a76a1-125c-47f2-a903-b5c8ce8e5277"). InnerVolumeSpecName "config-data-generated". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:35:24 crc kubenswrapper[4558]: I0120 17:35:24.434628 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5e0a76a1-125c-47f2-a903-b5c8ce8e5277-config-data-default" (OuterVolumeSpecName: "config-data-default") pod "5e0a76a1-125c-47f2-a903-b5c8ce8e5277" (UID: "5e0a76a1-125c-47f2-a903-b5c8ce8e5277"). InnerVolumeSpecName "config-data-default". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:35:24 crc kubenswrapper[4558]: I0120 17:35:24.434679 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5e0a76a1-125c-47f2-a903-b5c8ce8e5277-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "5e0a76a1-125c-47f2-a903-b5c8ce8e5277" (UID: "5e0a76a1-125c-47f2-a903-b5c8ce8e5277"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:35:24 crc kubenswrapper[4558]: I0120 17:35:24.434702 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b9d9b3dd-ab88-4de7-9176-26d0f37316c5-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:24 crc kubenswrapper[4558]: I0120 17:35:24.434716 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/5e0a76a1-125c-47f2-a903-b5c8ce8e5277-config-data-generated\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:24 crc kubenswrapper[4558]: I0120 17:35:24.435283 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5e0a76a1-125c-47f2-a903-b5c8ce8e5277-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "5e0a76a1-125c-47f2-a903-b5c8ce8e5277" (UID: "5e0a76a1-125c-47f2-a903-b5c8ce8e5277"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:35:24 crc kubenswrapper[4558]: I0120 17:35:24.437953 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5e0a76a1-125c-47f2-a903-b5c8ce8e5277-kube-api-access-5jck4" (OuterVolumeSpecName: "kube-api-access-5jck4") pod "5e0a76a1-125c-47f2-a903-b5c8ce8e5277" (UID: "5e0a76a1-125c-47f2-a903-b5c8ce8e5277"). InnerVolumeSpecName "kube-api-access-5jck4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:35:24 crc kubenswrapper[4558]: I0120 17:35:24.447697 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage02-crc" (OuterVolumeSpecName: "mysql-db") pod "5e0a76a1-125c-47f2-a903-b5c8ce8e5277" (UID: "5e0a76a1-125c-47f2-a903-b5c8ce8e5277"). InnerVolumeSpecName "local-storage02-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:35:24 crc kubenswrapper[4558]: I0120 17:35:24.461318 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5e0a76a1-125c-47f2-a903-b5c8ce8e5277-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5e0a76a1-125c-47f2-a903-b5c8ce8e5277" (UID: "5e0a76a1-125c-47f2-a903-b5c8ce8e5277"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:24 crc kubenswrapper[4558]: I0120 17:35:24.472518 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5e0a76a1-125c-47f2-a903-b5c8ce8e5277-galera-tls-certs" (OuterVolumeSpecName: "galera-tls-certs") pod "5e0a76a1-125c-47f2-a903-b5c8ce8e5277" (UID: "5e0a76a1-125c-47f2-a903-b5c8ce8e5277"). InnerVolumeSpecName "galera-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:24 crc kubenswrapper[4558]: I0120 17:35:24.498148 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:35:24 crc kubenswrapper[4558]: I0120 17:35:24.540159 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/57872fb7-1988-4da4-b8b9-eee9c5e8c827-combined-ca-bundle\") pod \"57872fb7-1988-4da4-b8b9-eee9c5e8c827\" (UID: \"57872fb7-1988-4da4-b8b9-eee9c5e8c827\") " Jan 20 17:35:24 crc kubenswrapper[4558]: I0120 17:35:24.540306 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/57872fb7-1988-4da4-b8b9-eee9c5e8c827-galera-tls-certs\") pod \"57872fb7-1988-4da4-b8b9-eee9c5e8c827\" (UID: \"57872fb7-1988-4da4-b8b9-eee9c5e8c827\") " Jan 20 17:35:24 crc kubenswrapper[4558]: I0120 17:35:24.540344 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wtxm4\" (UniqueName: \"kubernetes.io/projected/57872fb7-1988-4da4-b8b9-eee9c5e8c827-kube-api-access-wtxm4\") pod \"57872fb7-1988-4da4-b8b9-eee9c5e8c827\" (UID: \"57872fb7-1988-4da4-b8b9-eee9c5e8c827\") " Jan 20 17:35:24 crc kubenswrapper[4558]: I0120 17:35:24.540369 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/57872fb7-1988-4da4-b8b9-eee9c5e8c827-kolla-config\") pod \"57872fb7-1988-4da4-b8b9-eee9c5e8c827\" (UID: \"57872fb7-1988-4da4-b8b9-eee9c5e8c827\") " Jan 20 17:35:24 crc kubenswrapper[4558]: I0120 17:35:24.540395 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/57872fb7-1988-4da4-b8b9-eee9c5e8c827-operator-scripts\") pod \"57872fb7-1988-4da4-b8b9-eee9c5e8c827\" (UID: \"57872fb7-1988-4da4-b8b9-eee9c5e8c827\") " Jan 20 17:35:24 crc kubenswrapper[4558]: I0120 17:35:24.540473 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/57872fb7-1988-4da4-b8b9-eee9c5e8c827-config-data-generated\") pod \"57872fb7-1988-4da4-b8b9-eee9c5e8c827\" (UID: \"57872fb7-1988-4da4-b8b9-eee9c5e8c827\") " Jan 20 17:35:24 crc kubenswrapper[4558]: I0120 17:35:24.540599 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/57872fb7-1988-4da4-b8b9-eee9c5e8c827-config-data-default\") pod \"57872fb7-1988-4da4-b8b9-eee9c5e8c827\" (UID: \"57872fb7-1988-4da4-b8b9-eee9c5e8c827\") " Jan 20 17:35:24 crc kubenswrapper[4558]: I0120 17:35:24.540683 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mysql-db\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"57872fb7-1988-4da4-b8b9-eee9c5e8c827\" (UID: \"57872fb7-1988-4da4-b8b9-eee9c5e8c827\") " Jan 20 17:35:24 crc kubenswrapper[4558]: I0120 17:35:24.541441 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5e0a76a1-125c-47f2-a903-b5c8ce8e5277-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:24 crc kubenswrapper[4558]: I0120 17:35:24.541470 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" " Jan 20 17:35:24 crc kubenswrapper[4558]: I0120 17:35:24.541482 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5jck4\" (UniqueName: \"kubernetes.io/projected/5e0a76a1-125c-47f2-a903-b5c8ce8e5277-kube-api-access-5jck4\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:24 crc kubenswrapper[4558]: I0120 17:35:24.541493 4558 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/5e0a76a1-125c-47f2-a903-b5c8ce8e5277-kolla-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:24 crc kubenswrapper[4558]: I0120 17:35:24.541503 4558 reconciler_common.go:293] "Volume detached for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/5e0a76a1-125c-47f2-a903-b5c8ce8e5277-galera-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:24 crc kubenswrapper[4558]: I0120 17:35:24.541512 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/5e0a76a1-125c-47f2-a903-b5c8ce8e5277-config-data-default\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:24 crc kubenswrapper[4558]: I0120 17:35:24.541520 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e0a76a1-125c-47f2-a903-b5c8ce8e5277-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:24 crc kubenswrapper[4558]: I0120 17:35:24.542465 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/57872fb7-1988-4da4-b8b9-eee9c5e8c827-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "57872fb7-1988-4da4-b8b9-eee9c5e8c827" (UID: "57872fb7-1988-4da4-b8b9-eee9c5e8c827"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:35:24 crc kubenswrapper[4558]: I0120 17:35:24.547758 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57872fb7-1988-4da4-b8b9-eee9c5e8c827-config-data-generated" (OuterVolumeSpecName: "config-data-generated") pod "57872fb7-1988-4da4-b8b9-eee9c5e8c827" (UID: "57872fb7-1988-4da4-b8b9-eee9c5e8c827"). InnerVolumeSpecName "config-data-generated". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:35:24 crc kubenswrapper[4558]: I0120 17:35:24.547843 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/57872fb7-1988-4da4-b8b9-eee9c5e8c827-config-data-default" (OuterVolumeSpecName: "config-data-default") pod "57872fb7-1988-4da4-b8b9-eee9c5e8c827" (UID: "57872fb7-1988-4da4-b8b9-eee9c5e8c827"). InnerVolumeSpecName "config-data-default". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:35:24 crc kubenswrapper[4558]: I0120 17:35:24.548375 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/57872fb7-1988-4da4-b8b9-eee9c5e8c827-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "57872fb7-1988-4da4-b8b9-eee9c5e8c827" (UID: "57872fb7-1988-4da4-b8b9-eee9c5e8c827"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:35:24 crc kubenswrapper[4558]: I0120 17:35:24.560723 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57872fb7-1988-4da4-b8b9-eee9c5e8c827-kube-api-access-wtxm4" (OuterVolumeSpecName: "kube-api-access-wtxm4") pod "57872fb7-1988-4da4-b8b9-eee9c5e8c827" (UID: "57872fb7-1988-4da4-b8b9-eee9c5e8c827"). InnerVolumeSpecName "kube-api-access-wtxm4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:35:24 crc kubenswrapper[4558]: I0120 17:35:24.564057 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage02-crc" (UniqueName: "kubernetes.io/local-volume/local-storage02-crc") on node "crc" Jan 20 17:35:24 crc kubenswrapper[4558]: I0120 17:35:24.580025 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/57872fb7-1988-4da4-b8b9-eee9c5e8c827-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "57872fb7-1988-4da4-b8b9-eee9c5e8c827" (UID: "57872fb7-1988-4da4-b8b9-eee9c5e8c827"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:24 crc kubenswrapper[4558]: I0120 17:35:24.592184 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage06-crc" (OuterVolumeSpecName: "mysql-db") pod "57872fb7-1988-4da4-b8b9-eee9c5e8c827" (UID: "57872fb7-1988-4da4-b8b9-eee9c5e8c827"). InnerVolumeSpecName "local-storage06-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:35:24 crc kubenswrapper[4558]: I0120 17:35:24.596611 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/57872fb7-1988-4da4-b8b9-eee9c5e8c827-galera-tls-certs" (OuterVolumeSpecName: "galera-tls-certs") pod "57872fb7-1988-4da4-b8b9-eee9c5e8c827" (UID: "57872fb7-1988-4da4-b8b9-eee9c5e8c827"). InnerVolumeSpecName "galera-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:24 crc kubenswrapper[4558]: I0120 17:35:24.643826 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/57872fb7-1988-4da4-b8b9-eee9c5e8c827-config-data-generated\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:24 crc kubenswrapper[4558]: I0120 17:35:24.643872 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/57872fb7-1988-4da4-b8b9-eee9c5e8c827-config-data-default\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:24 crc kubenswrapper[4558]: I0120 17:35:24.643903 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" " Jan 20 17:35:24 crc kubenswrapper[4558]: I0120 17:35:24.643921 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/57872fb7-1988-4da4-b8b9-eee9c5e8c827-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:24 crc kubenswrapper[4558]: I0120 17:35:24.643933 4558 reconciler_common.go:293] "Volume detached for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/57872fb7-1988-4da4-b8b9-eee9c5e8c827-galera-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:24 crc kubenswrapper[4558]: I0120 17:35:24.643945 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wtxm4\" (UniqueName: \"kubernetes.io/projected/57872fb7-1988-4da4-b8b9-eee9c5e8c827-kube-api-access-wtxm4\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:24 crc kubenswrapper[4558]: I0120 17:35:24.643956 4558 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/57872fb7-1988-4da4-b8b9-eee9c5e8c827-kolla-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:24 crc kubenswrapper[4558]: I0120 17:35:24.643969 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:24 crc kubenswrapper[4558]: I0120 17:35:24.643982 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/57872fb7-1988-4da4-b8b9-eee9c5e8c827-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:24 crc kubenswrapper[4558]: I0120 17:35:24.673494 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage06-crc" (UniqueName: "kubernetes.io/local-volume/local-storage06-crc") on node "crc" Jan 20 17:35:24 crc kubenswrapper[4558]: I0120 17:35:24.744953 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.010890 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/memcached-0" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.044365 4558 generic.go:334] "Generic (PLEG): container finished" podID="5e0a76a1-125c-47f2-a903-b5c8ce8e5277" containerID="aaa3870751af8f0c4b7878382a9021d1b0d86ba2fbbf69032fcd3bacd83d2d92" exitCode=0 Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.044433 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-galera-0" event={"ID":"5e0a76a1-125c-47f2-a903-b5c8ce8e5277","Type":"ContainerDied","Data":"aaa3870751af8f0c4b7878382a9021d1b0d86ba2fbbf69032fcd3bacd83d2d92"} Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.044468 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-galera-0" event={"ID":"5e0a76a1-125c-47f2-a903-b5c8ce8e5277","Type":"ContainerDied","Data":"5cb005d56635787d08a647fb641db00f33241027dae1261587298498379f014b"} Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.044485 4558 scope.go:117] "RemoveContainer" containerID="aaa3870751af8f0c4b7878382a9021d1b0d86ba2fbbf69032fcd3bacd83d2d92" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.044643 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.047426 4558 generic.go:334] "Generic (PLEG): container finished" podID="568f7066-e99f-45b2-aa41-28c808fb27b8" containerID="0b26ed4dfd437ec4fd0415ae7cb52ebc72347c26385495a0c7ac90e3a3686790" exitCode=0 Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.047466 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/memcached-0" event={"ID":"568f7066-e99f-45b2-aa41-28c808fb27b8","Type":"ContainerDied","Data":"0b26ed4dfd437ec4fd0415ae7cb52ebc72347c26385495a0c7ac90e3a3686790"} Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.047482 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/memcached-0" event={"ID":"568f7066-e99f-45b2-aa41-28c808fb27b8","Type":"ContainerDied","Data":"8adabdd93232b90e30c1a677f57f38211dad7fec2224573f6fbfa669870559a9"} Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.047516 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/memcached-0" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.051893 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.051912 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"b9d9b3dd-ab88-4de7-9176-26d0f37316c5","Type":"ContainerDied","Data":"2f91d48b5bee8bf34bf7bcc372fcd223b53bea549f07ae364e3c4b83a55b5521"} Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.053737 4558 generic.go:334] "Generic (PLEG): container finished" podID="57872fb7-1988-4da4-b8b9-eee9c5e8c827" containerID="d8e9ed0ae69bd19b53807e767c4bcd9fcd074682e879fd040455a50aac23dd93" exitCode=0 Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.053791 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-cell1-galera-0" event={"ID":"57872fb7-1988-4da4-b8b9-eee9c5e8c827","Type":"ContainerDied","Data":"d8e9ed0ae69bd19b53807e767c4bcd9fcd074682e879fd040455a50aac23dd93"} Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.053832 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.053851 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-cell1-galera-0" event={"ID":"57872fb7-1988-4da4-b8b9-eee9c5e8c827","Type":"ContainerDied","Data":"18c8cf1d30438d409b1f52f4aebc69b25a65d800d6cdaa24d524a6e89415459d"} Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.082557 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/openstack-galera-0"] Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.088959 4558 scope.go:117] "RemoveContainer" containerID="74a82b2cb87f85950d65db39375572be31ea2b3a88bbbb153f941cdc471461ac" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.090201 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/openstack-galera-0"] Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.122724 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.150042 4558 scope.go:117] "RemoveContainer" containerID="aaa3870751af8f0c4b7878382a9021d1b0d86ba2fbbf69032fcd3bacd83d2d92" Jan 20 17:35:25 crc kubenswrapper[4558]: E0120 17:35:25.151057 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aaa3870751af8f0c4b7878382a9021d1b0d86ba2fbbf69032fcd3bacd83d2d92\": container with ID starting with aaa3870751af8f0c4b7878382a9021d1b0d86ba2fbbf69032fcd3bacd83d2d92 not found: ID does not exist" containerID="aaa3870751af8f0c4b7878382a9021d1b0d86ba2fbbf69032fcd3bacd83d2d92" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.151096 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aaa3870751af8f0c4b7878382a9021d1b0d86ba2fbbf69032fcd3bacd83d2d92"} err="failed to get container status \"aaa3870751af8f0c4b7878382a9021d1b0d86ba2fbbf69032fcd3bacd83d2d92\": rpc error: code = NotFound desc = could not find container \"aaa3870751af8f0c4b7878382a9021d1b0d86ba2fbbf69032fcd3bacd83d2d92\": container with ID starting with aaa3870751af8f0c4b7878382a9021d1b0d86ba2fbbf69032fcd3bacd83d2d92 not found: ID does not exist" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.151120 4558 scope.go:117] "RemoveContainer" containerID="74a82b2cb87f85950d65db39375572be31ea2b3a88bbbb153f941cdc471461ac" Jan 20 17:35:25 crc kubenswrapper[4558]: E0120 17:35:25.151536 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"74a82b2cb87f85950d65db39375572be31ea2b3a88bbbb153f941cdc471461ac\": container with ID starting with 74a82b2cb87f85950d65db39375572be31ea2b3a88bbbb153f941cdc471461ac not found: ID does not exist" containerID="74a82b2cb87f85950d65db39375572be31ea2b3a88bbbb153f941cdc471461ac" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.151556 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"74a82b2cb87f85950d65db39375572be31ea2b3a88bbbb153f941cdc471461ac"} err="failed to get container status \"74a82b2cb87f85950d65db39375572be31ea2b3a88bbbb153f941cdc471461ac\": rpc error: code = NotFound desc = could not find container \"74a82b2cb87f85950d65db39375572be31ea2b3a88bbbb153f941cdc471461ac\": container with ID starting with 74a82b2cb87f85950d65db39375572be31ea2b3a88bbbb153f941cdc471461ac not found: ID does not exist" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.151569 4558 scope.go:117] "RemoveContainer" containerID="0b26ed4dfd437ec4fd0415ae7cb52ebc72347c26385495a0c7ac90e3a3686790" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.156227 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/568f7066-e99f-45b2-aa41-28c808fb27b8-combined-ca-bundle\") pod \"568f7066-e99f-45b2-aa41-28c808fb27b8\" (UID: \"568f7066-e99f-45b2-aa41-28c808fb27b8\") " Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.156301 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/568f7066-e99f-45b2-aa41-28c808fb27b8-memcached-tls-certs\") pod \"568f7066-e99f-45b2-aa41-28c808fb27b8\" (UID: \"568f7066-e99f-45b2-aa41-28c808fb27b8\") " Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.156337 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/568f7066-e99f-45b2-aa41-28c808fb27b8-config-data\") pod \"568f7066-e99f-45b2-aa41-28c808fb27b8\" (UID: \"568f7066-e99f-45b2-aa41-28c808fb27b8\") " Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.156513 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-67q4s\" (UniqueName: \"kubernetes.io/projected/568f7066-e99f-45b2-aa41-28c808fb27b8-kube-api-access-67q4s\") pod \"568f7066-e99f-45b2-aa41-28c808fb27b8\" (UID: \"568f7066-e99f-45b2-aa41-28c808fb27b8\") " Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.156586 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/568f7066-e99f-45b2-aa41-28c808fb27b8-kolla-config\") pod \"568f7066-e99f-45b2-aa41-28c808fb27b8\" (UID: \"568f7066-e99f-45b2-aa41-28c808fb27b8\") " Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.160012 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/568f7066-e99f-45b2-aa41-28c808fb27b8-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "568f7066-e99f-45b2-aa41-28c808fb27b8" (UID: "568f7066-e99f-45b2-aa41-28c808fb27b8"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.160626 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/568f7066-e99f-45b2-aa41-28c808fb27b8-config-data" (OuterVolumeSpecName: "config-data") pod "568f7066-e99f-45b2-aa41-28c808fb27b8" (UID: "568f7066-e99f-45b2-aa41-28c808fb27b8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.161844 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.177708 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/568f7066-e99f-45b2-aa41-28c808fb27b8-kube-api-access-67q4s" (OuterVolumeSpecName: "kube-api-access-67q4s") pod "568f7066-e99f-45b2-aa41-28c808fb27b8" (UID: "568f7066-e99f-45b2-aa41-28c808fb27b8"). InnerVolumeSpecName "kube-api-access-67q4s". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.181611 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/openstack-galera-0"] Jan 20 17:35:25 crc kubenswrapper[4558]: E0120 17:35:25.182395 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b9d9b3dd-ab88-4de7-9176-26d0f37316c5" containerName="sg-core" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.182505 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="b9d9b3dd-ab88-4de7-9176-26d0f37316c5" containerName="sg-core" Jan 20 17:35:25 crc kubenswrapper[4558]: E0120 17:35:25.182598 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="57872fb7-1988-4da4-b8b9-eee9c5e8c827" containerName="galera" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.182674 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="57872fb7-1988-4da4-b8b9-eee9c5e8c827" containerName="galera" Jan 20 17:35:25 crc kubenswrapper[4558]: E0120 17:35:25.182762 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b9d9b3dd-ab88-4de7-9176-26d0f37316c5" containerName="proxy-httpd" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.182842 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="b9d9b3dd-ab88-4de7-9176-26d0f37316c5" containerName="proxy-httpd" Jan 20 17:35:25 crc kubenswrapper[4558]: E0120 17:35:25.182917 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5e0a76a1-125c-47f2-a903-b5c8ce8e5277" containerName="galera" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.182972 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="5e0a76a1-125c-47f2-a903-b5c8ce8e5277" containerName="galera" Jan 20 17:35:25 crc kubenswrapper[4558]: E0120 17:35:25.183045 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5e0a76a1-125c-47f2-a903-b5c8ce8e5277" containerName="mysql-bootstrap" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.183110 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="5e0a76a1-125c-47f2-a903-b5c8ce8e5277" containerName="mysql-bootstrap" Jan 20 17:35:25 crc kubenswrapper[4558]: E0120 17:35:25.183183 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="568f7066-e99f-45b2-aa41-28c808fb27b8" containerName="memcached" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.184115 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="568f7066-e99f-45b2-aa41-28c808fb27b8" containerName="memcached" Jan 20 17:35:25 crc kubenswrapper[4558]: E0120 17:35:25.184227 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="57872fb7-1988-4da4-b8b9-eee9c5e8c827" containerName="mysql-bootstrap" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.184293 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="57872fb7-1988-4da4-b8b9-eee9c5e8c827" containerName="mysql-bootstrap" Jan 20 17:35:25 crc kubenswrapper[4558]: E0120 17:35:25.184352 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b9d9b3dd-ab88-4de7-9176-26d0f37316c5" containerName="ceilometer-central-agent" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.184396 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="b9d9b3dd-ab88-4de7-9176-26d0f37316c5" containerName="ceilometer-central-agent" Jan 20 17:35:25 crc kubenswrapper[4558]: E0120 17:35:25.184483 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b9d9b3dd-ab88-4de7-9176-26d0f37316c5" containerName="ceilometer-notification-agent" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.184563 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="b9d9b3dd-ab88-4de7-9176-26d0f37316c5" containerName="ceilometer-notification-agent" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.184856 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="b9d9b3dd-ab88-4de7-9176-26d0f37316c5" containerName="sg-core" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.184957 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="b9d9b3dd-ab88-4de7-9176-26d0f37316c5" containerName="proxy-httpd" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.185037 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="5e0a76a1-125c-47f2-a903-b5c8ce8e5277" containerName="galera" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.185114 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="568f7066-e99f-45b2-aa41-28c808fb27b8" containerName="memcached" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.185199 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="b9d9b3dd-ab88-4de7-9176-26d0f37316c5" containerName="ceilometer-notification-agent" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.185283 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="57872fb7-1988-4da4-b8b9-eee9c5e8c827" containerName="galera" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.185362 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="b9d9b3dd-ab88-4de7-9176-26d0f37316c5" containerName="ceilometer-central-agent" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.189353 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.193118 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-config-data" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.193487 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"galera-openstack-dockercfg-7ct58" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.193639 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-galera-openstack-svc" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.193777 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-scripts" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.195410 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/openstack-galera-0"] Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.201945 4558 scope.go:117] "RemoveContainer" containerID="0b26ed4dfd437ec4fd0415ae7cb52ebc72347c26385495a0c7ac90e3a3686790" Jan 20 17:35:25 crc kubenswrapper[4558]: E0120 17:35:25.203305 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0b26ed4dfd437ec4fd0415ae7cb52ebc72347c26385495a0c7ac90e3a3686790\": container with ID starting with 0b26ed4dfd437ec4fd0415ae7cb52ebc72347c26385495a0c7ac90e3a3686790 not found: ID does not exist" containerID="0b26ed4dfd437ec4fd0415ae7cb52ebc72347c26385495a0c7ac90e3a3686790" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.203355 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0b26ed4dfd437ec4fd0415ae7cb52ebc72347c26385495a0c7ac90e3a3686790"} err="failed to get container status \"0b26ed4dfd437ec4fd0415ae7cb52ebc72347c26385495a0c7ac90e3a3686790\": rpc error: code = NotFound desc = could not find container \"0b26ed4dfd437ec4fd0415ae7cb52ebc72347c26385495a0c7ac90e3a3686790\": container with ID starting with 0b26ed4dfd437ec4fd0415ae7cb52ebc72347c26385495a0c7ac90e3a3686790 not found: ID does not exist" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.203376 4558 scope.go:117] "RemoveContainer" containerID="cd093ad0a62335ae0f0d6459708d68848864df2a42f2f720194e2e8f2fdd8b3f" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.204361 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/568f7066-e99f-45b2-aa41-28c808fb27b8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "568f7066-e99f-45b2-aa41-28c808fb27b8" (UID: "568f7066-e99f-45b2-aa41-28c808fb27b8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.213077 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/568f7066-e99f-45b2-aa41-28c808fb27b8-memcached-tls-certs" (OuterVolumeSpecName: "memcached-tls-certs") pod "568f7066-e99f-45b2-aa41-28c808fb27b8" (UID: "568f7066-e99f-45b2-aa41-28c808fb27b8"). InnerVolumeSpecName "memcached-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.215762 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/openstack-cell1-galera-0"] Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.224579 4558 scope.go:117] "RemoveContainer" containerID="4cd61fbb091f93563313226e5a4a9e8bacd968ce5f506d798f29e62045d70876" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.237152 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.240393 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.242598 4558 scope.go:117] "RemoveContainer" containerID="f82f341bf9593ff0a595b29c75c685e0ea8f432820b9a8fa0036ae5fd0ac0676" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.242778 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-ceilometer-internal-svc" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.242850 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-config-data" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.242778 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-scripts" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.246461 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/openstack-cell1-galera-0"] Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.260618 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/b1ce826a-c52c-42c6-8b67-6074b78c9fb7-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"b1ce826a-c52c-42c6-8b67-6074b78c9fb7\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.260736 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/b1ce826a-c52c-42c6-8b67-6074b78c9fb7-config-data-default\") pod \"openstack-galera-0\" (UID: \"b1ce826a-c52c-42c6-8b67-6074b78c9fb7\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.260848 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t2l69\" (UniqueName: \"kubernetes.io/projected/b1ce826a-c52c-42c6-8b67-6074b78c9fb7-kube-api-access-t2l69\") pod \"openstack-galera-0\" (UID: \"b1ce826a-c52c-42c6-8b67-6074b78c9fb7\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.260949 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/b1ce826a-c52c-42c6-8b67-6074b78c9fb7-kolla-config\") pod \"openstack-galera-0\" (UID: \"b1ce826a-c52c-42c6-8b67-6074b78c9fb7\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.261029 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b1ce826a-c52c-42c6-8b67-6074b78c9fb7-operator-scripts\") pod \"openstack-galera-0\" (UID: \"b1ce826a-c52c-42c6-8b67-6074b78c9fb7\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.261122 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b1ce826a-c52c-42c6-8b67-6074b78c9fb7-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"b1ce826a-c52c-42c6-8b67-6074b78c9fb7\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.261647 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/b1ce826a-c52c-42c6-8b67-6074b78c9fb7-config-data-generated\") pod \"openstack-galera-0\" (UID: \"b1ce826a-c52c-42c6-8b67-6074b78c9fb7\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.261757 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-galera-0\" (UID: \"b1ce826a-c52c-42c6-8b67-6074b78c9fb7\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.261870 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/568f7066-e99f-45b2-aa41-28c808fb27b8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.261927 4558 reconciler_common.go:293] "Volume detached for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/568f7066-e99f-45b2-aa41-28c808fb27b8-memcached-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.261990 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/568f7066-e99f-45b2-aa41-28c808fb27b8-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.262047 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-67q4s\" (UniqueName: \"kubernetes.io/projected/568f7066-e99f-45b2-aa41-28c808fb27b8-kube-api-access-67q4s\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.262097 4558 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/568f7066-e99f-45b2-aa41-28c808fb27b8-kolla-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.262297 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.269271 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/openstack-cell1-galera-0"] Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.270776 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.273681 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-cell1-scripts" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.273828 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"galera-openstack-cell1-dockercfg-ftflb" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.274422 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-galera-openstack-cell1-svc" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.278785 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-cell1-config-data" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.285605 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/openstack-cell1-galera-0"] Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.288443 4558 scope.go:117] "RemoveContainer" containerID="4731e15eb33f9d7ef47c235ab8bf0bf5e078da54679e25a286db416ef1024765" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.314362 4558 scope.go:117] "RemoveContainer" containerID="d8e9ed0ae69bd19b53807e767c4bcd9fcd074682e879fd040455a50aac23dd93" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.341376 4558 scope.go:117] "RemoveContainer" containerID="bf2b91504ae946bbd07d60c83c5fc451a51026e889c1d6b6cb64618f1bb1865a" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.363348 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/a205fa61-cd83-4d6e-b82b-aba6f9144c71-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"a205fa61-cd83-4d6e-b82b-aba6f9144c71\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.363406 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a205fa61-cd83-4d6e-b82b-aba6f9144c71-config-data\") pod \"ceilometer-0\" (UID: \"a205fa61-cd83-4d6e-b82b-aba6f9144c71\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.363432 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a205fa61-cd83-4d6e-b82b-aba6f9144c71-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a205fa61-cd83-4d6e-b82b-aba6f9144c71\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.363455 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a205fa61-cd83-4d6e-b82b-aba6f9144c71-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a205fa61-cd83-4d6e-b82b-aba6f9144c71\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.363629 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/db6ef9f1-8a92-4c74-b476-a24b66585268-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"db6ef9f1-8a92-4c74-b476-a24b66585268\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.363697 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/b1ce826a-c52c-42c6-8b67-6074b78c9fb7-kolla-config\") pod \"openstack-galera-0\" (UID: \"b1ce826a-c52c-42c6-8b67-6074b78c9fb7\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.363798 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/db6ef9f1-8a92-4c74-b476-a24b66585268-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"db6ef9f1-8a92-4c74-b476-a24b66585268\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.363823 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b1ce826a-c52c-42c6-8b67-6074b78c9fb7-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"b1ce826a-c52c-42c6-8b67-6074b78c9fb7\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.363858 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b1ce826a-c52c-42c6-8b67-6074b78c9fb7-operator-scripts\") pod \"openstack-galera-0\" (UID: \"b1ce826a-c52c-42c6-8b67-6074b78c9fb7\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.363912 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/b1ce826a-c52c-42c6-8b67-6074b78c9fb7-config-data-generated\") pod \"openstack-galera-0\" (UID: \"b1ce826a-c52c-42c6-8b67-6074b78c9fb7\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.363951 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/db6ef9f1-8a92-4c74-b476-a24b66585268-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"db6ef9f1-8a92-4c74-b476-a24b66585268\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.364052 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a205fa61-cd83-4d6e-b82b-aba6f9144c71-run-httpd\") pod \"ceilometer-0\" (UID: \"a205fa61-cd83-4d6e-b82b-aba6f9144c71\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.364096 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"openstack-cell1-galera-0\" (UID: \"db6ef9f1-8a92-4c74-b476-a24b66585268\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.364149 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-galera-0\" (UID: \"b1ce826a-c52c-42c6-8b67-6074b78c9fb7\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.364207 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a205fa61-cd83-4d6e-b82b-aba6f9144c71-scripts\") pod \"ceilometer-0\" (UID: \"a205fa61-cd83-4d6e-b82b-aba6f9144c71\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.364277 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/db6ef9f1-8a92-4c74-b476-a24b66585268-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"db6ef9f1-8a92-4c74-b476-a24b66585268\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.364304 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/db6ef9f1-8a92-4c74-b476-a24b66585268-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"db6ef9f1-8a92-4c74-b476-a24b66585268\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.364340 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/b1ce826a-c52c-42c6-8b67-6074b78c9fb7-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"b1ce826a-c52c-42c6-8b67-6074b78c9fb7\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.364363 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sjmnl\" (UniqueName: \"kubernetes.io/projected/a205fa61-cd83-4d6e-b82b-aba6f9144c71-kube-api-access-sjmnl\") pod \"ceilometer-0\" (UID: \"a205fa61-cd83-4d6e-b82b-aba6f9144c71\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.364382 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/b1ce826a-c52c-42c6-8b67-6074b78c9fb7-config-data-default\") pod \"openstack-galera-0\" (UID: \"b1ce826a-c52c-42c6-8b67-6074b78c9fb7\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.364459 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dcl8m\" (UniqueName: \"kubernetes.io/projected/db6ef9f1-8a92-4c74-b476-a24b66585268-kube-api-access-dcl8m\") pod \"openstack-cell1-galera-0\" (UID: \"db6ef9f1-8a92-4c74-b476-a24b66585268\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.364471 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/b1ce826a-c52c-42c6-8b67-6074b78c9fb7-kolla-config\") pod \"openstack-galera-0\" (UID: \"b1ce826a-c52c-42c6-8b67-6074b78c9fb7\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.364495 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a205fa61-cd83-4d6e-b82b-aba6f9144c71-log-httpd\") pod \"ceilometer-0\" (UID: \"a205fa61-cd83-4d6e-b82b-aba6f9144c71\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.364543 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t2l69\" (UniqueName: \"kubernetes.io/projected/b1ce826a-c52c-42c6-8b67-6074b78c9fb7-kube-api-access-t2l69\") pod \"openstack-galera-0\" (UID: \"b1ce826a-c52c-42c6-8b67-6074b78c9fb7\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.364570 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/db6ef9f1-8a92-4c74-b476-a24b66585268-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"db6ef9f1-8a92-4c74-b476-a24b66585268\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.364590 4558 scope.go:117] "RemoveContainer" containerID="d8e9ed0ae69bd19b53807e767c4bcd9fcd074682e879fd040455a50aac23dd93" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.364918 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/b1ce826a-c52c-42c6-8b67-6074b78c9fb7-config-data-generated\") pod \"openstack-galera-0\" (UID: \"b1ce826a-c52c-42c6-8b67-6074b78c9fb7\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.365279 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-galera-0\" (UID: \"b1ce826a-c52c-42c6-8b67-6074b78c9fb7\") device mount path \"/mnt/openstack/pv02\"" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.365872 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/b1ce826a-c52c-42c6-8b67-6074b78c9fb7-config-data-default\") pod \"openstack-galera-0\" (UID: \"b1ce826a-c52c-42c6-8b67-6074b78c9fb7\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.365278 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b1ce826a-c52c-42c6-8b67-6074b78c9fb7-operator-scripts\") pod \"openstack-galera-0\" (UID: \"b1ce826a-c52c-42c6-8b67-6074b78c9fb7\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:35:25 crc kubenswrapper[4558]: E0120 17:35:25.370919 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d8e9ed0ae69bd19b53807e767c4bcd9fcd074682e879fd040455a50aac23dd93\": container with ID starting with d8e9ed0ae69bd19b53807e767c4bcd9fcd074682e879fd040455a50aac23dd93 not found: ID does not exist" containerID="d8e9ed0ae69bd19b53807e767c4bcd9fcd074682e879fd040455a50aac23dd93" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.371030 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d8e9ed0ae69bd19b53807e767c4bcd9fcd074682e879fd040455a50aac23dd93"} err="failed to get container status \"d8e9ed0ae69bd19b53807e767c4bcd9fcd074682e879fd040455a50aac23dd93\": rpc error: code = NotFound desc = could not find container \"d8e9ed0ae69bd19b53807e767c4bcd9fcd074682e879fd040455a50aac23dd93\": container with ID starting with d8e9ed0ae69bd19b53807e767c4bcd9fcd074682e879fd040455a50aac23dd93 not found: ID does not exist" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.371119 4558 scope.go:117] "RemoveContainer" containerID="bf2b91504ae946bbd07d60c83c5fc451a51026e889c1d6b6cb64618f1bb1865a" Jan 20 17:35:25 crc kubenswrapper[4558]: E0120 17:35:25.372866 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bf2b91504ae946bbd07d60c83c5fc451a51026e889c1d6b6cb64618f1bb1865a\": container with ID starting with bf2b91504ae946bbd07d60c83c5fc451a51026e889c1d6b6cb64618f1bb1865a not found: ID does not exist" containerID="bf2b91504ae946bbd07d60c83c5fc451a51026e889c1d6b6cb64618f1bb1865a" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.372924 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bf2b91504ae946bbd07d60c83c5fc451a51026e889c1d6b6cb64618f1bb1865a"} err="failed to get container status \"bf2b91504ae946bbd07d60c83c5fc451a51026e889c1d6b6cb64618f1bb1865a\": rpc error: code = NotFound desc = could not find container \"bf2b91504ae946bbd07d60c83c5fc451a51026e889c1d6b6cb64618f1bb1865a\": container with ID starting with bf2b91504ae946bbd07d60c83c5fc451a51026e889c1d6b6cb64618f1bb1865a not found: ID does not exist" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.372933 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b1ce826a-c52c-42c6-8b67-6074b78c9fb7-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"b1ce826a-c52c-42c6-8b67-6074b78c9fb7\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.373114 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/b1ce826a-c52c-42c6-8b67-6074b78c9fb7-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"b1ce826a-c52c-42c6-8b67-6074b78c9fb7\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.380454 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t2l69\" (UniqueName: \"kubernetes.io/projected/b1ce826a-c52c-42c6-8b67-6074b78c9fb7-kube-api-access-t2l69\") pod \"openstack-galera-0\" (UID: \"b1ce826a-c52c-42c6-8b67-6074b78c9fb7\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.382727 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/memcached-0"] Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.399505 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-galera-0\" (UID: \"b1ce826a-c52c-42c6-8b67-6074b78c9fb7\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.405047 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/memcached-0"] Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.423694 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/memcached-0"] Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.425711 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/memcached-0" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.428106 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"memcached-config-data" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.428352 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"memcached-memcached-dockercfg-8bzhf" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.436814 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-memcached-svc" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.439147 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/memcached-0"] Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.468896 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/a205fa61-cd83-4d6e-b82b-aba6f9144c71-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"a205fa61-cd83-4d6e-b82b-aba6f9144c71\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.468977 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a205fa61-cd83-4d6e-b82b-aba6f9144c71-config-data\") pod \"ceilometer-0\" (UID: \"a205fa61-cd83-4d6e-b82b-aba6f9144c71\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.469004 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a205fa61-cd83-4d6e-b82b-aba6f9144c71-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a205fa61-cd83-4d6e-b82b-aba6f9144c71\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.469027 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a205fa61-cd83-4d6e-b82b-aba6f9144c71-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a205fa61-cd83-4d6e-b82b-aba6f9144c71\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.469065 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/db6ef9f1-8a92-4c74-b476-a24b66585268-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"db6ef9f1-8a92-4c74-b476-a24b66585268\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.469140 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5391541f-5057-4d20-ae1e-bbc88e6b33a4-config-data\") pod \"memcached-0\" (UID: \"5391541f-5057-4d20-ae1e-bbc88e6b33a4\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.469181 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/db6ef9f1-8a92-4c74-b476-a24b66585268-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"db6ef9f1-8a92-4c74-b476-a24b66585268\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.469242 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/db6ef9f1-8a92-4c74-b476-a24b66585268-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"db6ef9f1-8a92-4c74-b476-a24b66585268\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.469291 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9x6z5\" (UniqueName: \"kubernetes.io/projected/5391541f-5057-4d20-ae1e-bbc88e6b33a4-kube-api-access-9x6z5\") pod \"memcached-0\" (UID: \"5391541f-5057-4d20-ae1e-bbc88e6b33a4\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.469341 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a205fa61-cd83-4d6e-b82b-aba6f9144c71-run-httpd\") pod \"ceilometer-0\" (UID: \"a205fa61-cd83-4d6e-b82b-aba6f9144c71\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.469366 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"openstack-cell1-galera-0\" (UID: \"db6ef9f1-8a92-4c74-b476-a24b66585268\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.469411 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5391541f-5057-4d20-ae1e-bbc88e6b33a4-combined-ca-bundle\") pod \"memcached-0\" (UID: \"5391541f-5057-4d20-ae1e-bbc88e6b33a4\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.469437 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a205fa61-cd83-4d6e-b82b-aba6f9144c71-scripts\") pod \"ceilometer-0\" (UID: \"a205fa61-cd83-4d6e-b82b-aba6f9144c71\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.469471 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/5391541f-5057-4d20-ae1e-bbc88e6b33a4-kolla-config\") pod \"memcached-0\" (UID: \"5391541f-5057-4d20-ae1e-bbc88e6b33a4\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.469512 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/db6ef9f1-8a92-4c74-b476-a24b66585268-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"db6ef9f1-8a92-4c74-b476-a24b66585268\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.469533 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/db6ef9f1-8a92-4c74-b476-a24b66585268-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"db6ef9f1-8a92-4c74-b476-a24b66585268\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.469566 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/5391541f-5057-4d20-ae1e-bbc88e6b33a4-memcached-tls-certs\") pod \"memcached-0\" (UID: \"5391541f-5057-4d20-ae1e-bbc88e6b33a4\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.469588 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sjmnl\" (UniqueName: \"kubernetes.io/projected/a205fa61-cd83-4d6e-b82b-aba6f9144c71-kube-api-access-sjmnl\") pod \"ceilometer-0\" (UID: \"a205fa61-cd83-4d6e-b82b-aba6f9144c71\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.469657 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dcl8m\" (UniqueName: \"kubernetes.io/projected/db6ef9f1-8a92-4c74-b476-a24b66585268-kube-api-access-dcl8m\") pod \"openstack-cell1-galera-0\" (UID: \"db6ef9f1-8a92-4c74-b476-a24b66585268\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.469688 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a205fa61-cd83-4d6e-b82b-aba6f9144c71-log-httpd\") pod \"ceilometer-0\" (UID: \"a205fa61-cd83-4d6e-b82b-aba6f9144c71\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.469735 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/db6ef9f1-8a92-4c74-b476-a24b66585268-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"db6ef9f1-8a92-4c74-b476-a24b66585268\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.470961 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a205fa61-cd83-4d6e-b82b-aba6f9144c71-run-httpd\") pod \"ceilometer-0\" (UID: \"a205fa61-cd83-4d6e-b82b-aba6f9144c71\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.471496 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a205fa61-cd83-4d6e-b82b-aba6f9144c71-log-httpd\") pod \"ceilometer-0\" (UID: \"a205fa61-cd83-4d6e-b82b-aba6f9144c71\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.472100 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/db6ef9f1-8a92-4c74-b476-a24b66585268-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"db6ef9f1-8a92-4c74-b476-a24b66585268\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.472150 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"openstack-cell1-galera-0\" (UID: \"db6ef9f1-8a92-4c74-b476-a24b66585268\") device mount path \"/mnt/openstack/pv06\"" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.478585 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/a205fa61-cd83-4d6e-b82b-aba6f9144c71-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"a205fa61-cd83-4d6e-b82b-aba6f9144c71\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.478667 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a205fa61-cd83-4d6e-b82b-aba6f9144c71-scripts\") pod \"ceilometer-0\" (UID: \"a205fa61-cd83-4d6e-b82b-aba6f9144c71\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.483039 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/db6ef9f1-8a92-4c74-b476-a24b66585268-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"db6ef9f1-8a92-4c74-b476-a24b66585268\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.484035 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/db6ef9f1-8a92-4c74-b476-a24b66585268-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"db6ef9f1-8a92-4c74-b476-a24b66585268\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.484570 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/db6ef9f1-8a92-4c74-b476-a24b66585268-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"db6ef9f1-8a92-4c74-b476-a24b66585268\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.485471 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a205fa61-cd83-4d6e-b82b-aba6f9144c71-config-data\") pod \"ceilometer-0\" (UID: \"a205fa61-cd83-4d6e-b82b-aba6f9144c71\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.499752 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a205fa61-cd83-4d6e-b82b-aba6f9144c71-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a205fa61-cd83-4d6e-b82b-aba6f9144c71\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.499827 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/db6ef9f1-8a92-4c74-b476-a24b66585268-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"db6ef9f1-8a92-4c74-b476-a24b66585268\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.518570 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/db6ef9f1-8a92-4c74-b476-a24b66585268-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"db6ef9f1-8a92-4c74-b476-a24b66585268\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.519094 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a205fa61-cd83-4d6e-b82b-aba6f9144c71-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a205fa61-cd83-4d6e-b82b-aba6f9144c71\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.519099 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sjmnl\" (UniqueName: \"kubernetes.io/projected/a205fa61-cd83-4d6e-b82b-aba6f9144c71-kube-api-access-sjmnl\") pod \"ceilometer-0\" (UID: \"a205fa61-cd83-4d6e-b82b-aba6f9144c71\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.520066 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.522789 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dcl8m\" (UniqueName: \"kubernetes.io/projected/db6ef9f1-8a92-4c74-b476-a24b66585268-kube-api-access-dcl8m\") pod \"openstack-cell1-galera-0\" (UID: \"db6ef9f1-8a92-4c74-b476-a24b66585268\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.531541 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"openstack-cell1-galera-0\" (UID: \"db6ef9f1-8a92-4c74-b476-a24b66585268\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.557464 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.592609 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5391541f-5057-4d20-ae1e-bbc88e6b33a4-config-data\") pod \"memcached-0\" (UID: \"5391541f-5057-4d20-ae1e-bbc88e6b33a4\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.592741 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9x6z5\" (UniqueName: \"kubernetes.io/projected/5391541f-5057-4d20-ae1e-bbc88e6b33a4-kube-api-access-9x6z5\") pod \"memcached-0\" (UID: \"5391541f-5057-4d20-ae1e-bbc88e6b33a4\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.592860 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5391541f-5057-4d20-ae1e-bbc88e6b33a4-combined-ca-bundle\") pod \"memcached-0\" (UID: \"5391541f-5057-4d20-ae1e-bbc88e6b33a4\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.592914 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/5391541f-5057-4d20-ae1e-bbc88e6b33a4-kolla-config\") pod \"memcached-0\" (UID: \"5391541f-5057-4d20-ae1e-bbc88e6b33a4\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.592985 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/5391541f-5057-4d20-ae1e-bbc88e6b33a4-memcached-tls-certs\") pod \"memcached-0\" (UID: \"5391541f-5057-4d20-ae1e-bbc88e6b33a4\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.595718 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5391541f-5057-4d20-ae1e-bbc88e6b33a4-config-data\") pod \"memcached-0\" (UID: \"5391541f-5057-4d20-ae1e-bbc88e6b33a4\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.598083 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/5391541f-5057-4d20-ae1e-bbc88e6b33a4-kolla-config\") pod \"memcached-0\" (UID: \"5391541f-5057-4d20-ae1e-bbc88e6b33a4\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.598525 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/5391541f-5057-4d20-ae1e-bbc88e6b33a4-memcached-tls-certs\") pod \"memcached-0\" (UID: \"5391541f-5057-4d20-ae1e-bbc88e6b33a4\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.602891 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5391541f-5057-4d20-ae1e-bbc88e6b33a4-combined-ca-bundle\") pod \"memcached-0\" (UID: \"5391541f-5057-4d20-ae1e-bbc88e6b33a4\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.607354 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.642647 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9x6z5\" (UniqueName: \"kubernetes.io/projected/5391541f-5057-4d20-ae1e-bbc88e6b33a4-kube-api-access-9x6z5\") pod \"memcached-0\" (UID: \"5391541f-5057-4d20-ae1e-bbc88e6b33a4\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:35:25 crc kubenswrapper[4558]: I0120 17:35:25.747465 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/memcached-0" Jan 20 17:35:26 crc kubenswrapper[4558]: I0120 17:35:26.134583 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/openstack-galera-0"] Jan 20 17:35:26 crc kubenswrapper[4558]: W0120 17:35:26.139035 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb1ce826a_c52c_42c6_8b67_6074b78c9fb7.slice/crio-a1c834964cceaf00e0e994b0ec3af7afd2ea6ab12fe6ebb957debd7cce22686d WatchSource:0}: Error finding container a1c834964cceaf00e0e994b0ec3af7afd2ea6ab12fe6ebb957debd7cce22686d: Status 404 returned error can't find the container with id a1c834964cceaf00e0e994b0ec3af7afd2ea6ab12fe6ebb957debd7cce22686d Jan 20 17:35:26 crc kubenswrapper[4558]: W0120 17:35:26.235394 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda205fa61_cd83_4d6e_b82b_aba6f9144c71.slice/crio-5188af4bebe114136c461026009c42e7f53429599650512ccee2779a56443bfe WatchSource:0}: Error finding container 5188af4bebe114136c461026009c42e7f53429599650512ccee2779a56443bfe: Status 404 returned error can't find the container with id 5188af4bebe114136c461026009c42e7f53429599650512ccee2779a56443bfe Jan 20 17:35:26 crc kubenswrapper[4558]: I0120 17:35:26.236499 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:35:26 crc kubenswrapper[4558]: I0120 17:35:26.245670 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/openstack-cell1-galera-0"] Jan 20 17:35:26 crc kubenswrapper[4558]: I0120 17:35:26.289359 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/memcached-0"] Jan 20 17:35:26 crc kubenswrapper[4558]: I0120 17:35:26.581531 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="568f7066-e99f-45b2-aa41-28c808fb27b8" path="/var/lib/kubelet/pods/568f7066-e99f-45b2-aa41-28c808fb27b8/volumes" Jan 20 17:35:26 crc kubenswrapper[4558]: I0120 17:35:26.582811 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57872fb7-1988-4da4-b8b9-eee9c5e8c827" path="/var/lib/kubelet/pods/57872fb7-1988-4da4-b8b9-eee9c5e8c827/volumes" Jan 20 17:35:26 crc kubenswrapper[4558]: I0120 17:35:26.583542 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5e0a76a1-125c-47f2-a903-b5c8ce8e5277" path="/var/lib/kubelet/pods/5e0a76a1-125c-47f2-a903-b5c8ce8e5277/volumes" Jan 20 17:35:26 crc kubenswrapper[4558]: I0120 17:35:26.584653 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b9d9b3dd-ab88-4de7-9176-26d0f37316c5" path="/var/lib/kubelet/pods/b9d9b3dd-ab88-4de7-9176-26d0f37316c5/volumes" Jan 20 17:35:26 crc kubenswrapper[4558]: I0120 17:35:26.835704 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack-kuttl-tests/cinder-scheduler-0" podUID="4bd47eac-4090-4fc1-91c4-553ebd84964d" containerName="cinder-scheduler" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 20 17:35:27 crc kubenswrapper[4558]: I0120 17:35:27.080591 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-galera-0" event={"ID":"b1ce826a-c52c-42c6-8b67-6074b78c9fb7","Type":"ContainerStarted","Data":"10f10d689a9d323a86b2d378fb03cb6850503e157f7b9f55a3eb04a8d068973c"} Jan 20 17:35:27 crc kubenswrapper[4558]: I0120 17:35:27.080724 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-galera-0" event={"ID":"b1ce826a-c52c-42c6-8b67-6074b78c9fb7","Type":"ContainerStarted","Data":"a1c834964cceaf00e0e994b0ec3af7afd2ea6ab12fe6ebb957debd7cce22686d"} Jan 20 17:35:27 crc kubenswrapper[4558]: I0120 17:35:27.082235 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"a205fa61-cd83-4d6e-b82b-aba6f9144c71","Type":"ContainerStarted","Data":"73a579cd1ed45c429083e3153dc8ac019914451ccabba3eac12a668e6e281974"} Jan 20 17:35:27 crc kubenswrapper[4558]: I0120 17:35:27.082324 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"a205fa61-cd83-4d6e-b82b-aba6f9144c71","Type":"ContainerStarted","Data":"5188af4bebe114136c461026009c42e7f53429599650512ccee2779a56443bfe"} Jan 20 17:35:27 crc kubenswrapper[4558]: I0120 17:35:27.084348 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/memcached-0" event={"ID":"5391541f-5057-4d20-ae1e-bbc88e6b33a4","Type":"ContainerStarted","Data":"4f3f925041f116a55af0c01739c0b06976097986bc12562c41828bd96d52c5f4"} Jan 20 17:35:27 crc kubenswrapper[4558]: I0120 17:35:27.084445 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/memcached-0" event={"ID":"5391541f-5057-4d20-ae1e-bbc88e6b33a4","Type":"ContainerStarted","Data":"7d13902c841d8bbee2f4f76a4ae23a2a6890b13d14ecfc5f4bf5f2aa27e2f967"} Jan 20 17:35:27 crc kubenswrapper[4558]: I0120 17:35:27.085372 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/memcached-0" Jan 20 17:35:27 crc kubenswrapper[4558]: I0120 17:35:27.086907 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-cell1-galera-0" event={"ID":"db6ef9f1-8a92-4c74-b476-a24b66585268","Type":"ContainerStarted","Data":"4bd125dcc99d626eaea56657ad3f9225c43b7d93f078669af7ed4447f91d3330"} Jan 20 17:35:27 crc kubenswrapper[4558]: I0120 17:35:27.087032 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-cell1-galera-0" event={"ID":"db6ef9f1-8a92-4c74-b476-a24b66585268","Type":"ContainerStarted","Data":"2993fdbda70ff371d15f21c84b8a5240e810ba181d38fa057ea8020b0703d009"} Jan 20 17:35:27 crc kubenswrapper[4558]: I0120 17:35:27.111005 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/memcached-0" podStartSLOduration=2.110987151 podStartE2EDuration="2.110987151s" podCreationTimestamp="2026-01-20 17:35:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:35:27.106931385 +0000 UTC m=+3220.867269352" watchObservedRunningTime="2026-01-20 17:35:27.110987151 +0000 UTC m=+3220.871325118" Jan 20 17:35:27 crc kubenswrapper[4558]: I0120 17:35:27.155527 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/placement-64c94c78bb-k28q4"] Jan 20 17:35:27 crc kubenswrapper[4558]: I0120 17:35:27.156949 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-64c94c78bb-k28q4" Jan 20 17:35:27 crc kubenswrapper[4558]: I0120 17:35:27.177080 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-64c94c78bb-k28q4"] Jan 20 17:35:27 crc kubenswrapper[4558]: I0120 17:35:27.236069 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f64d9468-aa9e-4acc-b037-3ba28425e746-internal-tls-certs\") pod \"placement-64c94c78bb-k28q4\" (UID: \"f64d9468-aa9e-4acc-b037-3ba28425e746\") " pod="openstack-kuttl-tests/placement-64c94c78bb-k28q4" Jan 20 17:35:27 crc kubenswrapper[4558]: I0120 17:35:27.236144 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f64d9468-aa9e-4acc-b037-3ba28425e746-combined-ca-bundle\") pod \"placement-64c94c78bb-k28q4\" (UID: \"f64d9468-aa9e-4acc-b037-3ba28425e746\") " pod="openstack-kuttl-tests/placement-64c94c78bb-k28q4" Jan 20 17:35:27 crc kubenswrapper[4558]: I0120 17:35:27.236221 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f64d9468-aa9e-4acc-b037-3ba28425e746-scripts\") pod \"placement-64c94c78bb-k28q4\" (UID: \"f64d9468-aa9e-4acc-b037-3ba28425e746\") " pod="openstack-kuttl-tests/placement-64c94c78bb-k28q4" Jan 20 17:35:27 crc kubenswrapper[4558]: I0120 17:35:27.236250 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xqb7p\" (UniqueName: \"kubernetes.io/projected/f64d9468-aa9e-4acc-b037-3ba28425e746-kube-api-access-xqb7p\") pod \"placement-64c94c78bb-k28q4\" (UID: \"f64d9468-aa9e-4acc-b037-3ba28425e746\") " pod="openstack-kuttl-tests/placement-64c94c78bb-k28q4" Jan 20 17:35:27 crc kubenswrapper[4558]: I0120 17:35:27.236280 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f64d9468-aa9e-4acc-b037-3ba28425e746-config-data\") pod \"placement-64c94c78bb-k28q4\" (UID: \"f64d9468-aa9e-4acc-b037-3ba28425e746\") " pod="openstack-kuttl-tests/placement-64c94c78bb-k28q4" Jan 20 17:35:27 crc kubenswrapper[4558]: I0120 17:35:27.236335 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f64d9468-aa9e-4acc-b037-3ba28425e746-public-tls-certs\") pod \"placement-64c94c78bb-k28q4\" (UID: \"f64d9468-aa9e-4acc-b037-3ba28425e746\") " pod="openstack-kuttl-tests/placement-64c94c78bb-k28q4" Jan 20 17:35:27 crc kubenswrapper[4558]: I0120 17:35:27.236471 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f64d9468-aa9e-4acc-b037-3ba28425e746-logs\") pod \"placement-64c94c78bb-k28q4\" (UID: \"f64d9468-aa9e-4acc-b037-3ba28425e746\") " pod="openstack-kuttl-tests/placement-64c94c78bb-k28q4" Jan 20 17:35:27 crc kubenswrapper[4558]: I0120 17:35:27.338626 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f64d9468-aa9e-4acc-b037-3ba28425e746-logs\") pod \"placement-64c94c78bb-k28q4\" (UID: \"f64d9468-aa9e-4acc-b037-3ba28425e746\") " pod="openstack-kuttl-tests/placement-64c94c78bb-k28q4" Jan 20 17:35:27 crc kubenswrapper[4558]: I0120 17:35:27.338739 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f64d9468-aa9e-4acc-b037-3ba28425e746-internal-tls-certs\") pod \"placement-64c94c78bb-k28q4\" (UID: \"f64d9468-aa9e-4acc-b037-3ba28425e746\") " pod="openstack-kuttl-tests/placement-64c94c78bb-k28q4" Jan 20 17:35:27 crc kubenswrapper[4558]: I0120 17:35:27.338798 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f64d9468-aa9e-4acc-b037-3ba28425e746-combined-ca-bundle\") pod \"placement-64c94c78bb-k28q4\" (UID: \"f64d9468-aa9e-4acc-b037-3ba28425e746\") " pod="openstack-kuttl-tests/placement-64c94c78bb-k28q4" Jan 20 17:35:27 crc kubenswrapper[4558]: I0120 17:35:27.338880 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f64d9468-aa9e-4acc-b037-3ba28425e746-scripts\") pod \"placement-64c94c78bb-k28q4\" (UID: \"f64d9468-aa9e-4acc-b037-3ba28425e746\") " pod="openstack-kuttl-tests/placement-64c94c78bb-k28q4" Jan 20 17:35:27 crc kubenswrapper[4558]: I0120 17:35:27.338906 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xqb7p\" (UniqueName: \"kubernetes.io/projected/f64d9468-aa9e-4acc-b037-3ba28425e746-kube-api-access-xqb7p\") pod \"placement-64c94c78bb-k28q4\" (UID: \"f64d9468-aa9e-4acc-b037-3ba28425e746\") " pod="openstack-kuttl-tests/placement-64c94c78bb-k28q4" Jan 20 17:35:27 crc kubenswrapper[4558]: I0120 17:35:27.338944 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f64d9468-aa9e-4acc-b037-3ba28425e746-config-data\") pod \"placement-64c94c78bb-k28q4\" (UID: \"f64d9468-aa9e-4acc-b037-3ba28425e746\") " pod="openstack-kuttl-tests/placement-64c94c78bb-k28q4" Jan 20 17:35:27 crc kubenswrapper[4558]: I0120 17:35:27.339004 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f64d9468-aa9e-4acc-b037-3ba28425e746-public-tls-certs\") pod \"placement-64c94c78bb-k28q4\" (UID: \"f64d9468-aa9e-4acc-b037-3ba28425e746\") " pod="openstack-kuttl-tests/placement-64c94c78bb-k28q4" Jan 20 17:35:27 crc kubenswrapper[4558]: I0120 17:35:27.339891 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f64d9468-aa9e-4acc-b037-3ba28425e746-logs\") pod \"placement-64c94c78bb-k28q4\" (UID: \"f64d9468-aa9e-4acc-b037-3ba28425e746\") " pod="openstack-kuttl-tests/placement-64c94c78bb-k28q4" Jan 20 17:35:27 crc kubenswrapper[4558]: I0120 17:35:27.343933 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f64d9468-aa9e-4acc-b037-3ba28425e746-combined-ca-bundle\") pod \"placement-64c94c78bb-k28q4\" (UID: \"f64d9468-aa9e-4acc-b037-3ba28425e746\") " pod="openstack-kuttl-tests/placement-64c94c78bb-k28q4" Jan 20 17:35:27 crc kubenswrapper[4558]: I0120 17:35:27.344255 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f64d9468-aa9e-4acc-b037-3ba28425e746-public-tls-certs\") pod \"placement-64c94c78bb-k28q4\" (UID: \"f64d9468-aa9e-4acc-b037-3ba28425e746\") " pod="openstack-kuttl-tests/placement-64c94c78bb-k28q4" Jan 20 17:35:27 crc kubenswrapper[4558]: I0120 17:35:27.344914 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f64d9468-aa9e-4acc-b037-3ba28425e746-config-data\") pod \"placement-64c94c78bb-k28q4\" (UID: \"f64d9468-aa9e-4acc-b037-3ba28425e746\") " pod="openstack-kuttl-tests/placement-64c94c78bb-k28q4" Jan 20 17:35:27 crc kubenswrapper[4558]: I0120 17:35:27.360539 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f64d9468-aa9e-4acc-b037-3ba28425e746-scripts\") pod \"placement-64c94c78bb-k28q4\" (UID: \"f64d9468-aa9e-4acc-b037-3ba28425e746\") " pod="openstack-kuttl-tests/placement-64c94c78bb-k28q4" Jan 20 17:35:27 crc kubenswrapper[4558]: I0120 17:35:27.360682 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f64d9468-aa9e-4acc-b037-3ba28425e746-internal-tls-certs\") pod \"placement-64c94c78bb-k28q4\" (UID: \"f64d9468-aa9e-4acc-b037-3ba28425e746\") " pod="openstack-kuttl-tests/placement-64c94c78bb-k28q4" Jan 20 17:35:27 crc kubenswrapper[4558]: I0120 17:35:27.363051 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xqb7p\" (UniqueName: \"kubernetes.io/projected/f64d9468-aa9e-4acc-b037-3ba28425e746-kube-api-access-xqb7p\") pod \"placement-64c94c78bb-k28q4\" (UID: \"f64d9468-aa9e-4acc-b037-3ba28425e746\") " pod="openstack-kuttl-tests/placement-64c94c78bb-k28q4" Jan 20 17:35:27 crc kubenswrapper[4558]: I0120 17:35:27.478428 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-64c94c78bb-k28q4" Jan 20 17:35:27 crc kubenswrapper[4558]: I0120 17:35:27.917465 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-64c94c78bb-k28q4"] Jan 20 17:35:27 crc kubenswrapper[4558]: W0120 17:35:27.927657 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf64d9468_aa9e_4acc_b037_3ba28425e746.slice/crio-95ce9292d9e4a721a5da998e13c03061ee1ae9cf7f73b3a31693dd6dd40ab05b WatchSource:0}: Error finding container 95ce9292d9e4a721a5da998e13c03061ee1ae9cf7f73b3a31693dd6dd40ab05b: Status 404 returned error can't find the container with id 95ce9292d9e4a721a5da998e13c03061ee1ae9cf7f73b3a31693dd6dd40ab05b Jan 20 17:35:28 crc kubenswrapper[4558]: I0120 17:35:28.095440 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-64c94c78bb-k28q4" event={"ID":"f64d9468-aa9e-4acc-b037-3ba28425e746","Type":"ContainerStarted","Data":"95ce9292d9e4a721a5da998e13c03061ee1ae9cf7f73b3a31693dd6dd40ab05b"} Jan 20 17:35:28 crc kubenswrapper[4558]: I0120 17:35:28.098859 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"a205fa61-cd83-4d6e-b82b-aba6f9144c71","Type":"ContainerStarted","Data":"d6ec41d7d0083a1b2dfe623be5f2d5647fc24682bdfeff8a9ef6b95d75507454"} Jan 20 17:35:28 crc kubenswrapper[4558]: I0120 17:35:28.449482 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/placement-64c94c78bb-k28q4"] Jan 20 17:35:28 crc kubenswrapper[4558]: I0120 17:35:28.474479 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/placement-5577476b98-wgg9z"] Jan 20 17:35:28 crc kubenswrapper[4558]: I0120 17:35:28.476292 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-5577476b98-wgg9z" Jan 20 17:35:28 crc kubenswrapper[4558]: I0120 17:35:28.494885 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-5577476b98-wgg9z"] Jan 20 17:35:28 crc kubenswrapper[4558]: I0120 17:35:28.573929 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-77p2s\" (UniqueName: \"kubernetes.io/projected/06b8e2bb-a133-4a4b-92dd-12cf20ee4300-kube-api-access-77p2s\") pod \"placement-5577476b98-wgg9z\" (UID: \"06b8e2bb-a133-4a4b-92dd-12cf20ee4300\") " pod="openstack-kuttl-tests/placement-5577476b98-wgg9z" Jan 20 17:35:28 crc kubenswrapper[4558]: I0120 17:35:28.574113 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/06b8e2bb-a133-4a4b-92dd-12cf20ee4300-config-data\") pod \"placement-5577476b98-wgg9z\" (UID: \"06b8e2bb-a133-4a4b-92dd-12cf20ee4300\") " pod="openstack-kuttl-tests/placement-5577476b98-wgg9z" Jan 20 17:35:28 crc kubenswrapper[4558]: I0120 17:35:28.574351 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/06b8e2bb-a133-4a4b-92dd-12cf20ee4300-internal-tls-certs\") pod \"placement-5577476b98-wgg9z\" (UID: \"06b8e2bb-a133-4a4b-92dd-12cf20ee4300\") " pod="openstack-kuttl-tests/placement-5577476b98-wgg9z" Jan 20 17:35:28 crc kubenswrapper[4558]: I0120 17:35:28.574468 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/06b8e2bb-a133-4a4b-92dd-12cf20ee4300-public-tls-certs\") pod \"placement-5577476b98-wgg9z\" (UID: \"06b8e2bb-a133-4a4b-92dd-12cf20ee4300\") " pod="openstack-kuttl-tests/placement-5577476b98-wgg9z" Jan 20 17:35:28 crc kubenswrapper[4558]: I0120 17:35:28.574550 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/06b8e2bb-a133-4a4b-92dd-12cf20ee4300-scripts\") pod \"placement-5577476b98-wgg9z\" (UID: \"06b8e2bb-a133-4a4b-92dd-12cf20ee4300\") " pod="openstack-kuttl-tests/placement-5577476b98-wgg9z" Jan 20 17:35:28 crc kubenswrapper[4558]: I0120 17:35:28.574639 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/06b8e2bb-a133-4a4b-92dd-12cf20ee4300-logs\") pod \"placement-5577476b98-wgg9z\" (UID: \"06b8e2bb-a133-4a4b-92dd-12cf20ee4300\") " pod="openstack-kuttl-tests/placement-5577476b98-wgg9z" Jan 20 17:35:28 crc kubenswrapper[4558]: I0120 17:35:28.574725 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/06b8e2bb-a133-4a4b-92dd-12cf20ee4300-combined-ca-bundle\") pod \"placement-5577476b98-wgg9z\" (UID: \"06b8e2bb-a133-4a4b-92dd-12cf20ee4300\") " pod="openstack-kuttl-tests/placement-5577476b98-wgg9z" Jan 20 17:35:28 crc kubenswrapper[4558]: I0120 17:35:28.677198 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/06b8e2bb-a133-4a4b-92dd-12cf20ee4300-config-data\") pod \"placement-5577476b98-wgg9z\" (UID: \"06b8e2bb-a133-4a4b-92dd-12cf20ee4300\") " pod="openstack-kuttl-tests/placement-5577476b98-wgg9z" Jan 20 17:35:28 crc kubenswrapper[4558]: I0120 17:35:28.677302 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/06b8e2bb-a133-4a4b-92dd-12cf20ee4300-internal-tls-certs\") pod \"placement-5577476b98-wgg9z\" (UID: \"06b8e2bb-a133-4a4b-92dd-12cf20ee4300\") " pod="openstack-kuttl-tests/placement-5577476b98-wgg9z" Jan 20 17:35:28 crc kubenswrapper[4558]: I0120 17:35:28.677425 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/06b8e2bb-a133-4a4b-92dd-12cf20ee4300-public-tls-certs\") pod \"placement-5577476b98-wgg9z\" (UID: \"06b8e2bb-a133-4a4b-92dd-12cf20ee4300\") " pod="openstack-kuttl-tests/placement-5577476b98-wgg9z" Jan 20 17:35:28 crc kubenswrapper[4558]: I0120 17:35:28.677454 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/06b8e2bb-a133-4a4b-92dd-12cf20ee4300-scripts\") pod \"placement-5577476b98-wgg9z\" (UID: \"06b8e2bb-a133-4a4b-92dd-12cf20ee4300\") " pod="openstack-kuttl-tests/placement-5577476b98-wgg9z" Jan 20 17:35:28 crc kubenswrapper[4558]: I0120 17:35:28.677559 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/06b8e2bb-a133-4a4b-92dd-12cf20ee4300-logs\") pod \"placement-5577476b98-wgg9z\" (UID: \"06b8e2bb-a133-4a4b-92dd-12cf20ee4300\") " pod="openstack-kuttl-tests/placement-5577476b98-wgg9z" Jan 20 17:35:28 crc kubenswrapper[4558]: I0120 17:35:28.677595 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/06b8e2bb-a133-4a4b-92dd-12cf20ee4300-combined-ca-bundle\") pod \"placement-5577476b98-wgg9z\" (UID: \"06b8e2bb-a133-4a4b-92dd-12cf20ee4300\") " pod="openstack-kuttl-tests/placement-5577476b98-wgg9z" Jan 20 17:35:28 crc kubenswrapper[4558]: I0120 17:35:28.677846 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-77p2s\" (UniqueName: \"kubernetes.io/projected/06b8e2bb-a133-4a4b-92dd-12cf20ee4300-kube-api-access-77p2s\") pod \"placement-5577476b98-wgg9z\" (UID: \"06b8e2bb-a133-4a4b-92dd-12cf20ee4300\") " pod="openstack-kuttl-tests/placement-5577476b98-wgg9z" Jan 20 17:35:28 crc kubenswrapper[4558]: I0120 17:35:28.678395 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/06b8e2bb-a133-4a4b-92dd-12cf20ee4300-logs\") pod \"placement-5577476b98-wgg9z\" (UID: \"06b8e2bb-a133-4a4b-92dd-12cf20ee4300\") " pod="openstack-kuttl-tests/placement-5577476b98-wgg9z" Jan 20 17:35:28 crc kubenswrapper[4558]: I0120 17:35:28.682180 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/06b8e2bb-a133-4a4b-92dd-12cf20ee4300-internal-tls-certs\") pod \"placement-5577476b98-wgg9z\" (UID: \"06b8e2bb-a133-4a4b-92dd-12cf20ee4300\") " pod="openstack-kuttl-tests/placement-5577476b98-wgg9z" Jan 20 17:35:28 crc kubenswrapper[4558]: I0120 17:35:28.682996 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/06b8e2bb-a133-4a4b-92dd-12cf20ee4300-combined-ca-bundle\") pod \"placement-5577476b98-wgg9z\" (UID: \"06b8e2bb-a133-4a4b-92dd-12cf20ee4300\") " pod="openstack-kuttl-tests/placement-5577476b98-wgg9z" Jan 20 17:35:28 crc kubenswrapper[4558]: I0120 17:35:28.683104 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/06b8e2bb-a133-4a4b-92dd-12cf20ee4300-public-tls-certs\") pod \"placement-5577476b98-wgg9z\" (UID: \"06b8e2bb-a133-4a4b-92dd-12cf20ee4300\") " pod="openstack-kuttl-tests/placement-5577476b98-wgg9z" Jan 20 17:35:28 crc kubenswrapper[4558]: I0120 17:35:28.685349 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/06b8e2bb-a133-4a4b-92dd-12cf20ee4300-scripts\") pod \"placement-5577476b98-wgg9z\" (UID: \"06b8e2bb-a133-4a4b-92dd-12cf20ee4300\") " pod="openstack-kuttl-tests/placement-5577476b98-wgg9z" Jan 20 17:35:28 crc kubenswrapper[4558]: I0120 17:35:28.685919 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/06b8e2bb-a133-4a4b-92dd-12cf20ee4300-config-data\") pod \"placement-5577476b98-wgg9z\" (UID: \"06b8e2bb-a133-4a4b-92dd-12cf20ee4300\") " pod="openstack-kuttl-tests/placement-5577476b98-wgg9z" Jan 20 17:35:28 crc kubenswrapper[4558]: I0120 17:35:28.694032 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-77p2s\" (UniqueName: \"kubernetes.io/projected/06b8e2bb-a133-4a4b-92dd-12cf20ee4300-kube-api-access-77p2s\") pod \"placement-5577476b98-wgg9z\" (UID: \"06b8e2bb-a133-4a4b-92dd-12cf20ee4300\") " pod="openstack-kuttl-tests/placement-5577476b98-wgg9z" Jan 20 17:35:28 crc kubenswrapper[4558]: I0120 17:35:28.792686 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-5577476b98-wgg9z" Jan 20 17:35:29 crc kubenswrapper[4558]: I0120 17:35:29.111033 4558 generic.go:334] "Generic (PLEG): container finished" podID="db6ef9f1-8a92-4c74-b476-a24b66585268" containerID="4bd125dcc99d626eaea56657ad3f9225c43b7d93f078669af7ed4447f91d3330" exitCode=0 Jan 20 17:35:29 crc kubenswrapper[4558]: I0120 17:35:29.111416 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-cell1-galera-0" event={"ID":"db6ef9f1-8a92-4c74-b476-a24b66585268","Type":"ContainerDied","Data":"4bd125dcc99d626eaea56657ad3f9225c43b7d93f078669af7ed4447f91d3330"} Jan 20 17:35:29 crc kubenswrapper[4558]: I0120 17:35:29.117222 4558 generic.go:334] "Generic (PLEG): container finished" podID="b1ce826a-c52c-42c6-8b67-6074b78c9fb7" containerID="10f10d689a9d323a86b2d378fb03cb6850503e157f7b9f55a3eb04a8d068973c" exitCode=0 Jan 20 17:35:29 crc kubenswrapper[4558]: I0120 17:35:29.117286 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-galera-0" event={"ID":"b1ce826a-c52c-42c6-8b67-6074b78c9fb7","Type":"ContainerDied","Data":"10f10d689a9d323a86b2d378fb03cb6850503e157f7b9f55a3eb04a8d068973c"} Jan 20 17:35:29 crc kubenswrapper[4558]: I0120 17:35:29.120885 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-64c94c78bb-k28q4" event={"ID":"f64d9468-aa9e-4acc-b037-3ba28425e746","Type":"ContainerStarted","Data":"eaef017e07f368be1fe47035fb3e78dded3804a550df9f35801ce0981ab0accd"} Jan 20 17:35:29 crc kubenswrapper[4558]: I0120 17:35:29.120987 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/placement-64c94c78bb-k28q4" Jan 20 17:35:29 crc kubenswrapper[4558]: I0120 17:35:29.121003 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-64c94c78bb-k28q4" event={"ID":"f64d9468-aa9e-4acc-b037-3ba28425e746","Type":"ContainerStarted","Data":"c0f31e188f634a0031c26d8014a69c7b255c6dbdd3d161351e4143d939fc8401"} Jan 20 17:35:29 crc kubenswrapper[4558]: I0120 17:35:29.124286 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"a205fa61-cd83-4d6e-b82b-aba6f9144c71","Type":"ContainerStarted","Data":"9787f119978c69e093f0f9dac43490e32de39c2c966b73ff0ee092a20cb9152f"} Jan 20 17:35:29 crc kubenswrapper[4558]: I0120 17:35:29.174556 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/placement-64c94c78bb-k28q4" podStartSLOduration=2.1745416300000002 podStartE2EDuration="2.17454163s" podCreationTimestamp="2026-01-20 17:35:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:35:29.167523573 +0000 UTC m=+3222.927861541" watchObservedRunningTime="2026-01-20 17:35:29.17454163 +0000 UTC m=+3222.934879587" Jan 20 17:35:29 crc kubenswrapper[4558]: I0120 17:35:29.211204 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-5577476b98-wgg9z"] Jan 20 17:35:29 crc kubenswrapper[4558]: W0120 17:35:29.222454 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod06b8e2bb_a133_4a4b_92dd_12cf20ee4300.slice/crio-3b1b219bd9663be2d1a49ac8851cb6f5be898ae04c21007da18b9e4e44ccdd94 WatchSource:0}: Error finding container 3b1b219bd9663be2d1a49ac8851cb6f5be898ae04c21007da18b9e4e44ccdd94: Status 404 returned error can't find the container with id 3b1b219bd9663be2d1a49ac8851cb6f5be898ae04c21007da18b9e4e44ccdd94 Jan 20 17:35:29 crc kubenswrapper[4558]: I0120 17:35:29.838338 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack-kuttl-tests/cinder-scheduler-0" podUID="4bd47eac-4090-4fc1-91c4-553ebd84964d" containerName="cinder-scheduler" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 20 17:35:29 crc kubenswrapper[4558]: I0120 17:35:29.838672 4558 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:35:29 crc kubenswrapper[4558]: I0120 17:35:29.839706 4558 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="cinder-scheduler" containerStatusID={"Type":"cri-o","ID":"f5234b6873e40d97dab9fc0f95b0d08db6c6a9b663a804a6d9a45e0c39abccc9"} pod="openstack-kuttl-tests/cinder-scheduler-0" containerMessage="Container cinder-scheduler failed liveness probe, will be restarted" Jan 20 17:35:29 crc kubenswrapper[4558]: I0120 17:35:29.839769 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/cinder-scheduler-0" podUID="4bd47eac-4090-4fc1-91c4-553ebd84964d" containerName="cinder-scheduler" containerID="cri-o://f5234b6873e40d97dab9fc0f95b0d08db6c6a9b663a804a6d9a45e0c39abccc9" gracePeriod=30 Jan 20 17:35:30 crc kubenswrapper[4558]: I0120 17:35:30.145228 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"a205fa61-cd83-4d6e-b82b-aba6f9144c71","Type":"ContainerStarted","Data":"7674db8d22b702209ebaaadbb855ca6def67d0a91881b8c25b343fb381ebccde"} Jan 20 17:35:30 crc kubenswrapper[4558]: I0120 17:35:30.145377 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:35:30 crc kubenswrapper[4558]: I0120 17:35:30.147482 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-cell1-galera-0" event={"ID":"db6ef9f1-8a92-4c74-b476-a24b66585268","Type":"ContainerStarted","Data":"6500342a069cbdc40f29bb0f7540a921249de5ac706a7008df5f940aa1c75eb9"} Jan 20 17:35:30 crc kubenswrapper[4558]: I0120 17:35:30.160494 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-galera-0" event={"ID":"b1ce826a-c52c-42c6-8b67-6074b78c9fb7","Type":"ContainerStarted","Data":"1115d7fa22eeb3cf4d6e899963fd5f9d10ada47b94c34b2d601965731bba164c"} Jan 20 17:35:30 crc kubenswrapper[4558]: I0120 17:35:30.166634 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ceilometer-0" podStartSLOduration=1.575633704 podStartE2EDuration="5.166620992s" podCreationTimestamp="2026-01-20 17:35:25 +0000 UTC" firstStartedPulling="2026-01-20 17:35:26.237594314 +0000 UTC m=+3219.997932281" lastFinishedPulling="2026-01-20 17:35:29.828581601 +0000 UTC m=+3223.588919569" observedRunningTime="2026-01-20 17:35:30.158677366 +0000 UTC m=+3223.919015332" watchObservedRunningTime="2026-01-20 17:35:30.166620992 +0000 UTC m=+3223.926958960" Jan 20 17:35:30 crc kubenswrapper[4558]: I0120 17:35:30.167944 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/placement-64c94c78bb-k28q4" podUID="f64d9468-aa9e-4acc-b037-3ba28425e746" containerName="placement-log" containerID="cri-o://c0f31e188f634a0031c26d8014a69c7b255c6dbdd3d161351e4143d939fc8401" gracePeriod=30 Jan 20 17:35:30 crc kubenswrapper[4558]: I0120 17:35:30.168199 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-5577476b98-wgg9z" event={"ID":"06b8e2bb-a133-4a4b-92dd-12cf20ee4300","Type":"ContainerStarted","Data":"1d3016d6f66a0a15d2f54946e32a1f1391b54b015baba44a9003218a3420e045"} Jan 20 17:35:30 crc kubenswrapper[4558]: I0120 17:35:30.168242 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-5577476b98-wgg9z" event={"ID":"06b8e2bb-a133-4a4b-92dd-12cf20ee4300","Type":"ContainerStarted","Data":"cb36bad8596228046899d22f8d26fc3bac1a3290c060da3fe4c6163f9ec94ebb"} Jan 20 17:35:30 crc kubenswrapper[4558]: I0120 17:35:30.168253 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-5577476b98-wgg9z" event={"ID":"06b8e2bb-a133-4a4b-92dd-12cf20ee4300","Type":"ContainerStarted","Data":"3b1b219bd9663be2d1a49ac8851cb6f5be898ae04c21007da18b9e4e44ccdd94"} Jan 20 17:35:30 crc kubenswrapper[4558]: I0120 17:35:30.168278 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/placement-64c94c78bb-k28q4" Jan 20 17:35:30 crc kubenswrapper[4558]: I0120 17:35:30.168312 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/placement-64c94c78bb-k28q4" podUID="f64d9468-aa9e-4acc-b037-3ba28425e746" containerName="placement-api" containerID="cri-o://eaef017e07f368be1fe47035fb3e78dded3804a550df9f35801ce0981ab0accd" gracePeriod=30 Jan 20 17:35:30 crc kubenswrapper[4558]: I0120 17:35:30.168480 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/placement-5577476b98-wgg9z" Jan 20 17:35:30 crc kubenswrapper[4558]: I0120 17:35:30.168510 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/placement-5577476b98-wgg9z" Jan 20 17:35:30 crc kubenswrapper[4558]: I0120 17:35:30.183635 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/openstack-cell1-galera-0" podStartSLOduration=5.183626319 podStartE2EDuration="5.183626319s" podCreationTimestamp="2026-01-20 17:35:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:35:30.17473492 +0000 UTC m=+3223.935072886" watchObservedRunningTime="2026-01-20 17:35:30.183626319 +0000 UTC m=+3223.943964286" Jan 20 17:35:30 crc kubenswrapper[4558]: I0120 17:35:30.203132 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/placement-5577476b98-wgg9z" podStartSLOduration=2.203114484 podStartE2EDuration="2.203114484s" podCreationTimestamp="2026-01-20 17:35:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:35:30.192518068 +0000 UTC m=+3223.952856036" watchObservedRunningTime="2026-01-20 17:35:30.203114484 +0000 UTC m=+3223.963452451" Jan 20 17:35:30 crc kubenswrapper[4558]: I0120 17:35:30.212111 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/openstack-galera-0" podStartSLOduration=5.212100702 podStartE2EDuration="5.212100702s" podCreationTimestamp="2026-01-20 17:35:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:35:30.207771952 +0000 UTC m=+3223.968109919" watchObservedRunningTime="2026-01-20 17:35:30.212100702 +0000 UTC m=+3223.972438669" Jan 20 17:35:30 crc kubenswrapper[4558]: I0120 17:35:30.683959 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-64c94c78bb-k28q4" Jan 20 17:35:30 crc kubenswrapper[4558]: I0120 17:35:30.740119 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f64d9468-aa9e-4acc-b037-3ba28425e746-combined-ca-bundle\") pod \"f64d9468-aa9e-4acc-b037-3ba28425e746\" (UID: \"f64d9468-aa9e-4acc-b037-3ba28425e746\") " Jan 20 17:35:30 crc kubenswrapper[4558]: I0120 17:35:30.740217 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f64d9468-aa9e-4acc-b037-3ba28425e746-internal-tls-certs\") pod \"f64d9468-aa9e-4acc-b037-3ba28425e746\" (UID: \"f64d9468-aa9e-4acc-b037-3ba28425e746\") " Jan 20 17:35:30 crc kubenswrapper[4558]: I0120 17:35:30.740282 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xqb7p\" (UniqueName: \"kubernetes.io/projected/f64d9468-aa9e-4acc-b037-3ba28425e746-kube-api-access-xqb7p\") pod \"f64d9468-aa9e-4acc-b037-3ba28425e746\" (UID: \"f64d9468-aa9e-4acc-b037-3ba28425e746\") " Jan 20 17:35:30 crc kubenswrapper[4558]: I0120 17:35:30.740324 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f64d9468-aa9e-4acc-b037-3ba28425e746-public-tls-certs\") pod \"f64d9468-aa9e-4acc-b037-3ba28425e746\" (UID: \"f64d9468-aa9e-4acc-b037-3ba28425e746\") " Jan 20 17:35:30 crc kubenswrapper[4558]: I0120 17:35:30.741050 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f64d9468-aa9e-4acc-b037-3ba28425e746-scripts\") pod \"f64d9468-aa9e-4acc-b037-3ba28425e746\" (UID: \"f64d9468-aa9e-4acc-b037-3ba28425e746\") " Jan 20 17:35:30 crc kubenswrapper[4558]: I0120 17:35:30.741087 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f64d9468-aa9e-4acc-b037-3ba28425e746-config-data\") pod \"f64d9468-aa9e-4acc-b037-3ba28425e746\" (UID: \"f64d9468-aa9e-4acc-b037-3ba28425e746\") " Jan 20 17:35:30 crc kubenswrapper[4558]: I0120 17:35:30.741110 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f64d9468-aa9e-4acc-b037-3ba28425e746-logs\") pod \"f64d9468-aa9e-4acc-b037-3ba28425e746\" (UID: \"f64d9468-aa9e-4acc-b037-3ba28425e746\") " Jan 20 17:35:30 crc kubenswrapper[4558]: I0120 17:35:30.743733 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f64d9468-aa9e-4acc-b037-3ba28425e746-logs" (OuterVolumeSpecName: "logs") pod "f64d9468-aa9e-4acc-b037-3ba28425e746" (UID: "f64d9468-aa9e-4acc-b037-3ba28425e746"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:35:30 crc kubenswrapper[4558]: I0120 17:35:30.746454 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f64d9468-aa9e-4acc-b037-3ba28425e746-scripts" (OuterVolumeSpecName: "scripts") pod "f64d9468-aa9e-4acc-b037-3ba28425e746" (UID: "f64d9468-aa9e-4acc-b037-3ba28425e746"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:30 crc kubenswrapper[4558]: I0120 17:35:30.749404 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f64d9468-aa9e-4acc-b037-3ba28425e746-kube-api-access-xqb7p" (OuterVolumeSpecName: "kube-api-access-xqb7p") pod "f64d9468-aa9e-4acc-b037-3ba28425e746" (UID: "f64d9468-aa9e-4acc-b037-3ba28425e746"). InnerVolumeSpecName "kube-api-access-xqb7p". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:35:30 crc kubenswrapper[4558]: I0120 17:35:30.797267 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f64d9468-aa9e-4acc-b037-3ba28425e746-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f64d9468-aa9e-4acc-b037-3ba28425e746" (UID: "f64d9468-aa9e-4acc-b037-3ba28425e746"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:30 crc kubenswrapper[4558]: I0120 17:35:30.803583 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f64d9468-aa9e-4acc-b037-3ba28425e746-config-data" (OuterVolumeSpecName: "config-data") pod "f64d9468-aa9e-4acc-b037-3ba28425e746" (UID: "f64d9468-aa9e-4acc-b037-3ba28425e746"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:30 crc kubenswrapper[4558]: I0120 17:35:30.831335 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f64d9468-aa9e-4acc-b037-3ba28425e746-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "f64d9468-aa9e-4acc-b037-3ba28425e746" (UID: "f64d9468-aa9e-4acc-b037-3ba28425e746"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:30 crc kubenswrapper[4558]: I0120 17:35:30.842149 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f64d9468-aa9e-4acc-b037-3ba28425e746-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "f64d9468-aa9e-4acc-b037-3ba28425e746" (UID: "f64d9468-aa9e-4acc-b037-3ba28425e746"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:30 crc kubenswrapper[4558]: I0120 17:35:30.843340 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f64d9468-aa9e-4acc-b037-3ba28425e746-internal-tls-certs\") pod \"f64d9468-aa9e-4acc-b037-3ba28425e746\" (UID: \"f64d9468-aa9e-4acc-b037-3ba28425e746\") " Jan 20 17:35:30 crc kubenswrapper[4558]: W0120 17:35:30.844019 4558 empty_dir.go:500] Warning: Unmount skipped because path does not exist: /var/lib/kubelet/pods/f64d9468-aa9e-4acc-b037-3ba28425e746/volumes/kubernetes.io~secret/internal-tls-certs Jan 20 17:35:30 crc kubenswrapper[4558]: I0120 17:35:30.844069 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f64d9468-aa9e-4acc-b037-3ba28425e746-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "f64d9468-aa9e-4acc-b037-3ba28425e746" (UID: "f64d9468-aa9e-4acc-b037-3ba28425e746"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:30 crc kubenswrapper[4558]: I0120 17:35:30.846024 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f64d9468-aa9e-4acc-b037-3ba28425e746-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:30 crc kubenswrapper[4558]: I0120 17:35:30.846059 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xqb7p\" (UniqueName: \"kubernetes.io/projected/f64d9468-aa9e-4acc-b037-3ba28425e746-kube-api-access-xqb7p\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:30 crc kubenswrapper[4558]: I0120 17:35:30.846077 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f64d9468-aa9e-4acc-b037-3ba28425e746-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:30 crc kubenswrapper[4558]: I0120 17:35:30.846092 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f64d9468-aa9e-4acc-b037-3ba28425e746-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:30 crc kubenswrapper[4558]: I0120 17:35:30.846103 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f64d9468-aa9e-4acc-b037-3ba28425e746-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:30 crc kubenswrapper[4558]: I0120 17:35:30.846113 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f64d9468-aa9e-4acc-b037-3ba28425e746-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:30 crc kubenswrapper[4558]: I0120 17:35:30.846124 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f64d9468-aa9e-4acc-b037-3ba28425e746-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:31 crc kubenswrapper[4558]: I0120 17:35:31.181536 4558 generic.go:334] "Generic (PLEG): container finished" podID="f64d9468-aa9e-4acc-b037-3ba28425e746" containerID="eaef017e07f368be1fe47035fb3e78dded3804a550df9f35801ce0981ab0accd" exitCode=0 Jan 20 17:35:31 crc kubenswrapper[4558]: I0120 17:35:31.181975 4558 generic.go:334] "Generic (PLEG): container finished" podID="f64d9468-aa9e-4acc-b037-3ba28425e746" containerID="c0f31e188f634a0031c26d8014a69c7b255c6dbdd3d161351e4143d939fc8401" exitCode=143 Jan 20 17:35:31 crc kubenswrapper[4558]: I0120 17:35:31.181678 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-64c94c78bb-k28q4" Jan 20 17:35:31 crc kubenswrapper[4558]: I0120 17:35:31.181616 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-64c94c78bb-k28q4" event={"ID":"f64d9468-aa9e-4acc-b037-3ba28425e746","Type":"ContainerDied","Data":"eaef017e07f368be1fe47035fb3e78dded3804a550df9f35801ce0981ab0accd"} Jan 20 17:35:31 crc kubenswrapper[4558]: I0120 17:35:31.185461 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-64c94c78bb-k28q4" event={"ID":"f64d9468-aa9e-4acc-b037-3ba28425e746","Type":"ContainerDied","Data":"c0f31e188f634a0031c26d8014a69c7b255c6dbdd3d161351e4143d939fc8401"} Jan 20 17:35:31 crc kubenswrapper[4558]: I0120 17:35:31.185490 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-64c94c78bb-k28q4" event={"ID":"f64d9468-aa9e-4acc-b037-3ba28425e746","Type":"ContainerDied","Data":"95ce9292d9e4a721a5da998e13c03061ee1ae9cf7f73b3a31693dd6dd40ab05b"} Jan 20 17:35:31 crc kubenswrapper[4558]: I0120 17:35:31.185522 4558 scope.go:117] "RemoveContainer" containerID="eaef017e07f368be1fe47035fb3e78dded3804a550df9f35801ce0981ab0accd" Jan 20 17:35:31 crc kubenswrapper[4558]: I0120 17:35:31.218792 4558 scope.go:117] "RemoveContainer" containerID="c0f31e188f634a0031c26d8014a69c7b255c6dbdd3d161351e4143d939fc8401" Jan 20 17:35:31 crc kubenswrapper[4558]: I0120 17:35:31.221994 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/placement-64c94c78bb-k28q4"] Jan 20 17:35:31 crc kubenswrapper[4558]: I0120 17:35:31.229749 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/placement-64c94c78bb-k28q4"] Jan 20 17:35:31 crc kubenswrapper[4558]: I0120 17:35:31.239871 4558 scope.go:117] "RemoveContainer" containerID="eaef017e07f368be1fe47035fb3e78dded3804a550df9f35801ce0981ab0accd" Jan 20 17:35:31 crc kubenswrapper[4558]: E0120 17:35:31.240236 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eaef017e07f368be1fe47035fb3e78dded3804a550df9f35801ce0981ab0accd\": container with ID starting with eaef017e07f368be1fe47035fb3e78dded3804a550df9f35801ce0981ab0accd not found: ID does not exist" containerID="eaef017e07f368be1fe47035fb3e78dded3804a550df9f35801ce0981ab0accd" Jan 20 17:35:31 crc kubenswrapper[4558]: I0120 17:35:31.240301 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eaef017e07f368be1fe47035fb3e78dded3804a550df9f35801ce0981ab0accd"} err="failed to get container status \"eaef017e07f368be1fe47035fb3e78dded3804a550df9f35801ce0981ab0accd\": rpc error: code = NotFound desc = could not find container \"eaef017e07f368be1fe47035fb3e78dded3804a550df9f35801ce0981ab0accd\": container with ID starting with eaef017e07f368be1fe47035fb3e78dded3804a550df9f35801ce0981ab0accd not found: ID does not exist" Jan 20 17:35:31 crc kubenswrapper[4558]: I0120 17:35:31.240325 4558 scope.go:117] "RemoveContainer" containerID="c0f31e188f634a0031c26d8014a69c7b255c6dbdd3d161351e4143d939fc8401" Jan 20 17:35:31 crc kubenswrapper[4558]: E0120 17:35:31.240635 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c0f31e188f634a0031c26d8014a69c7b255c6dbdd3d161351e4143d939fc8401\": container with ID starting with c0f31e188f634a0031c26d8014a69c7b255c6dbdd3d161351e4143d939fc8401 not found: ID does not exist" containerID="c0f31e188f634a0031c26d8014a69c7b255c6dbdd3d161351e4143d939fc8401" Jan 20 17:35:31 crc kubenswrapper[4558]: I0120 17:35:31.240665 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c0f31e188f634a0031c26d8014a69c7b255c6dbdd3d161351e4143d939fc8401"} err="failed to get container status \"c0f31e188f634a0031c26d8014a69c7b255c6dbdd3d161351e4143d939fc8401\": rpc error: code = NotFound desc = could not find container \"c0f31e188f634a0031c26d8014a69c7b255c6dbdd3d161351e4143d939fc8401\": container with ID starting with c0f31e188f634a0031c26d8014a69c7b255c6dbdd3d161351e4143d939fc8401 not found: ID does not exist" Jan 20 17:35:31 crc kubenswrapper[4558]: I0120 17:35:31.240687 4558 scope.go:117] "RemoveContainer" containerID="eaef017e07f368be1fe47035fb3e78dded3804a550df9f35801ce0981ab0accd" Jan 20 17:35:31 crc kubenswrapper[4558]: I0120 17:35:31.240926 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eaef017e07f368be1fe47035fb3e78dded3804a550df9f35801ce0981ab0accd"} err="failed to get container status \"eaef017e07f368be1fe47035fb3e78dded3804a550df9f35801ce0981ab0accd\": rpc error: code = NotFound desc = could not find container \"eaef017e07f368be1fe47035fb3e78dded3804a550df9f35801ce0981ab0accd\": container with ID starting with eaef017e07f368be1fe47035fb3e78dded3804a550df9f35801ce0981ab0accd not found: ID does not exist" Jan 20 17:35:31 crc kubenswrapper[4558]: I0120 17:35:31.240962 4558 scope.go:117] "RemoveContainer" containerID="c0f31e188f634a0031c26d8014a69c7b255c6dbdd3d161351e4143d939fc8401" Jan 20 17:35:31 crc kubenswrapper[4558]: I0120 17:35:31.241215 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c0f31e188f634a0031c26d8014a69c7b255c6dbdd3d161351e4143d939fc8401"} err="failed to get container status \"c0f31e188f634a0031c26d8014a69c7b255c6dbdd3d161351e4143d939fc8401\": rpc error: code = NotFound desc = could not find container \"c0f31e188f634a0031c26d8014a69c7b255c6dbdd3d161351e4143d939fc8401\": container with ID starting with c0f31e188f634a0031c26d8014a69c7b255c6dbdd3d161351e4143d939fc8401 not found: ID does not exist" Jan 20 17:35:32 crc kubenswrapper[4558]: I0120 17:35:32.197068 4558 generic.go:334] "Generic (PLEG): container finished" podID="4bd47eac-4090-4fc1-91c4-553ebd84964d" containerID="f5234b6873e40d97dab9fc0f95b0d08db6c6a9b663a804a6d9a45e0c39abccc9" exitCode=0 Jan 20 17:35:32 crc kubenswrapper[4558]: I0120 17:35:32.197108 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"4bd47eac-4090-4fc1-91c4-553ebd84964d","Type":"ContainerDied","Data":"f5234b6873e40d97dab9fc0f95b0d08db6c6a9b663a804a6d9a45e0c39abccc9"} Jan 20 17:35:32 crc kubenswrapper[4558]: I0120 17:35:32.287521 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:35:32 crc kubenswrapper[4558]: I0120 17:35:32.287987 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/cinder-api-0" podUID="75547bf4-a6ea-45b7-a6a8-ba2955995008" containerName="cinder-api-log" containerID="cri-o://37e2905fb8733b28399a61b7869ba9f115a3ee071f8f2f9d43f5d0e1ebc3969d" gracePeriod=30 Jan 20 17:35:32 crc kubenswrapper[4558]: I0120 17:35:32.288051 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/cinder-api-0" podUID="75547bf4-a6ea-45b7-a6a8-ba2955995008" containerName="cinder-api" containerID="cri-o://828468f6f165adb235da38999ede843b78146540c25db7da9e9afb21516374a2" gracePeriod=30 Jan 20 17:35:32 crc kubenswrapper[4558]: I0120 17:35:32.608452 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f64d9468-aa9e-4acc-b037-3ba28425e746" path="/var/lib/kubelet/pods/f64d9468-aa9e-4acc-b037-3ba28425e746/volumes" Jan 20 17:35:33 crc kubenswrapper[4558]: I0120 17:35:33.220437 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"4bd47eac-4090-4fc1-91c4-553ebd84964d","Type":"ContainerStarted","Data":"8fb64177ca582dd6f77f7548f90b08a99b652fb60686912cfa1ae24e3d6b7093"} Jan 20 17:35:33 crc kubenswrapper[4558]: I0120 17:35:33.226110 4558 generic.go:334] "Generic (PLEG): container finished" podID="75547bf4-a6ea-45b7-a6a8-ba2955995008" containerID="37e2905fb8733b28399a61b7869ba9f115a3ee071f8f2f9d43f5d0e1ebc3969d" exitCode=143 Jan 20 17:35:33 crc kubenswrapper[4558]: I0120 17:35:33.226191 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"75547bf4-a6ea-45b7-a6a8-ba2955995008","Type":"ContainerDied","Data":"37e2905fb8733b28399a61b7869ba9f115a3ee071f8f2f9d43f5d0e1ebc3969d"} Jan 20 17:35:34 crc kubenswrapper[4558]: I0120 17:35:34.847063 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovn-northd-0"] Jan 20 17:35:34 crc kubenswrapper[4558]: I0120 17:35:34.847600 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ovn-northd-0" podUID="76c1fbce-d8ff-4469-85cb-11124a82402a" containerName="ovn-northd" containerID="cri-o://79b61ba8ff702ce7bd226d54ae1f42ddd1006b9082d80760f2a349aad7555bc4" gracePeriod=30 Jan 20 17:35:34 crc kubenswrapper[4558]: I0120 17:35:34.847655 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ovn-northd-0" podUID="76c1fbce-d8ff-4469-85cb-11124a82402a" containerName="openstack-network-exporter" containerID="cri-o://610a512ff345e04683281515fbc1b1706ebf44507e8dbe3063d14bccc948cae5" gracePeriod=30 Jan 20 17:35:34 crc kubenswrapper[4558]: I0120 17:35:34.856990 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-nb-0"] Jan 20 17:35:34 crc kubenswrapper[4558]: I0120 17:35:34.857401 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ovsdbserver-nb-0" podUID="dcbebc6e-c10d-4a59-9bdb-cdf2678250f8" containerName="openstack-network-exporter" containerID="cri-o://bd371b5b829a5f2506757dc4a8010d15cd571ccffeeaa8564edc735f093b7de0" gracePeriod=300 Jan 20 17:35:34 crc kubenswrapper[4558]: I0120 17:35:34.907863 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ovsdbserver-nb-0" podUID="dcbebc6e-c10d-4a59-9bdb-cdf2678250f8" containerName="ovsdbserver-nb" containerID="cri-o://2766083888bd587166d18906030cf31145864ecd777e795cf63b0f41b9063b9e" gracePeriod=300 Jan 20 17:35:35 crc kubenswrapper[4558]: I0120 17:35:35.138803 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-sb-0"] Jan 20 17:35:35 crc kubenswrapper[4558]: I0120 17:35:35.139467 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ovsdbserver-sb-0" podUID="637234b1-5373-46ac-99ea-be390202b982" containerName="openstack-network-exporter" containerID="cri-o://17df1b24593c3011ada27218df6bb80be57a9d60832634eea50b0c271601a4f4" gracePeriod=300 Jan 20 17:35:35 crc kubenswrapper[4558]: I0120 17:35:35.198975 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ovsdbserver-sb-0" podUID="637234b1-5373-46ac-99ea-be390202b982" containerName="ovsdbserver-sb" containerID="cri-o://93e11ff2740df41a59ff853fde3d9b9c61a53b8cc77a0513ff1cbea5527314b8" gracePeriod=300 Jan 20 17:35:35 crc kubenswrapper[4558]: I0120 17:35:35.272002 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovsdbserver-nb-0_dcbebc6e-c10d-4a59-9bdb-cdf2678250f8/ovsdbserver-nb/0.log" Jan 20 17:35:35 crc kubenswrapper[4558]: I0120 17:35:35.272380 4558 generic.go:334] "Generic (PLEG): container finished" podID="dcbebc6e-c10d-4a59-9bdb-cdf2678250f8" containerID="bd371b5b829a5f2506757dc4a8010d15cd571ccffeeaa8564edc735f093b7de0" exitCode=2 Jan 20 17:35:35 crc kubenswrapper[4558]: I0120 17:35:35.272427 4558 generic.go:334] "Generic (PLEG): container finished" podID="dcbebc6e-c10d-4a59-9bdb-cdf2678250f8" containerID="2766083888bd587166d18906030cf31145864ecd777e795cf63b0f41b9063b9e" exitCode=143 Jan 20 17:35:35 crc kubenswrapper[4558]: I0120 17:35:35.272538 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-nb-0" event={"ID":"dcbebc6e-c10d-4a59-9bdb-cdf2678250f8","Type":"ContainerDied","Data":"bd371b5b829a5f2506757dc4a8010d15cd571ccffeeaa8564edc735f093b7de0"} Jan 20 17:35:35 crc kubenswrapper[4558]: I0120 17:35:35.272592 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-nb-0" event={"ID":"dcbebc6e-c10d-4a59-9bdb-cdf2678250f8","Type":"ContainerDied","Data":"2766083888bd587166d18906030cf31145864ecd777e795cf63b0f41b9063b9e"} Jan 20 17:35:35 crc kubenswrapper[4558]: I0120 17:35:35.283618 4558 generic.go:334] "Generic (PLEG): container finished" podID="76c1fbce-d8ff-4469-85cb-11124a82402a" containerID="610a512ff345e04683281515fbc1b1706ebf44507e8dbe3063d14bccc948cae5" exitCode=2 Jan 20 17:35:35 crc kubenswrapper[4558]: I0120 17:35:35.283698 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-northd-0" event={"ID":"76c1fbce-d8ff-4469-85cb-11124a82402a","Type":"ContainerDied","Data":"610a512ff345e04683281515fbc1b1706ebf44507e8dbe3063d14bccc948cae5"} Jan 20 17:35:35 crc kubenswrapper[4558]: I0120 17:35:35.351613 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovsdbserver-nb-0_dcbebc6e-c10d-4a59-9bdb-cdf2678250f8/ovsdbserver-nb/0.log" Jan 20 17:35:35 crc kubenswrapper[4558]: I0120 17:35:35.351712 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:35:35 crc kubenswrapper[4558]: I0120 17:35:35.443906 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/cinder-api-0" podUID="75547bf4-a6ea-45b7-a6a8-ba2955995008" containerName="cinder-api" probeResult="failure" output="Get \"https://10.217.1.240:8776/healthcheck\": read tcp 10.217.0.2:37352->10.217.1.240:8776: read: connection reset by peer" Jan 20 17:35:35 crc kubenswrapper[4558]: I0120 17:35:35.453522 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/dcbebc6e-c10d-4a59-9bdb-cdf2678250f8-ovsdb-rundir\") pod \"dcbebc6e-c10d-4a59-9bdb-cdf2678250f8\" (UID: \"dcbebc6e-c10d-4a59-9bdb-cdf2678250f8\") " Jan 20 17:35:35 crc kubenswrapper[4558]: I0120 17:35:35.453650 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/dcbebc6e-c10d-4a59-9bdb-cdf2678250f8-metrics-certs-tls-certs\") pod \"dcbebc6e-c10d-4a59-9bdb-cdf2678250f8\" (UID: \"dcbebc6e-c10d-4a59-9bdb-cdf2678250f8\") " Jan 20 17:35:35 crc kubenswrapper[4558]: I0120 17:35:35.453691 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/dcbebc6e-c10d-4a59-9bdb-cdf2678250f8-scripts\") pod \"dcbebc6e-c10d-4a59-9bdb-cdf2678250f8\" (UID: \"dcbebc6e-c10d-4a59-9bdb-cdf2678250f8\") " Jan 20 17:35:35 crc kubenswrapper[4558]: I0120 17:35:35.453712 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dcbebc6e-c10d-4a59-9bdb-cdf2678250f8-combined-ca-bundle\") pod \"dcbebc6e-c10d-4a59-9bdb-cdf2678250f8\" (UID: \"dcbebc6e-c10d-4a59-9bdb-cdf2678250f8\") " Jan 20 17:35:35 crc kubenswrapper[4558]: I0120 17:35:35.453782 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6mbd9\" (UniqueName: \"kubernetes.io/projected/dcbebc6e-c10d-4a59-9bdb-cdf2678250f8-kube-api-access-6mbd9\") pod \"dcbebc6e-c10d-4a59-9bdb-cdf2678250f8\" (UID: \"dcbebc6e-c10d-4a59-9bdb-cdf2678250f8\") " Jan 20 17:35:35 crc kubenswrapper[4558]: I0120 17:35:35.453829 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dcbebc6e-c10d-4a59-9bdb-cdf2678250f8-config\") pod \"dcbebc6e-c10d-4a59-9bdb-cdf2678250f8\" (UID: \"dcbebc6e-c10d-4a59-9bdb-cdf2678250f8\") " Jan 20 17:35:35 crc kubenswrapper[4558]: I0120 17:35:35.453975 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndbcluster-nb-etc-ovn\" (UniqueName: \"kubernetes.io/local-volume/local-storage13-crc\") pod \"dcbebc6e-c10d-4a59-9bdb-cdf2678250f8\" (UID: \"dcbebc6e-c10d-4a59-9bdb-cdf2678250f8\") " Jan 20 17:35:35 crc kubenswrapper[4558]: I0120 17:35:35.454006 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/dcbebc6e-c10d-4a59-9bdb-cdf2678250f8-ovsdbserver-nb-tls-certs\") pod \"dcbebc6e-c10d-4a59-9bdb-cdf2678250f8\" (UID: \"dcbebc6e-c10d-4a59-9bdb-cdf2678250f8\") " Jan 20 17:35:35 crc kubenswrapper[4558]: I0120 17:35:35.454475 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dcbebc6e-c10d-4a59-9bdb-cdf2678250f8-ovsdb-rundir" (OuterVolumeSpecName: "ovsdb-rundir") pod "dcbebc6e-c10d-4a59-9bdb-cdf2678250f8" (UID: "dcbebc6e-c10d-4a59-9bdb-cdf2678250f8"). InnerVolumeSpecName "ovsdb-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:35:35 crc kubenswrapper[4558]: I0120 17:35:35.454804 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dcbebc6e-c10d-4a59-9bdb-cdf2678250f8-scripts" (OuterVolumeSpecName: "scripts") pod "dcbebc6e-c10d-4a59-9bdb-cdf2678250f8" (UID: "dcbebc6e-c10d-4a59-9bdb-cdf2678250f8"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:35:35 crc kubenswrapper[4558]: I0120 17:35:35.455003 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dcbebc6e-c10d-4a59-9bdb-cdf2678250f8-config" (OuterVolumeSpecName: "config") pod "dcbebc6e-c10d-4a59-9bdb-cdf2678250f8" (UID: "dcbebc6e-c10d-4a59-9bdb-cdf2678250f8"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:35:35 crc kubenswrapper[4558]: I0120 17:35:35.455575 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dcbebc6e-c10d-4a59-9bdb-cdf2678250f8-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:35 crc kubenswrapper[4558]: I0120 17:35:35.455591 4558 reconciler_common.go:293] "Volume detached for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/dcbebc6e-c10d-4a59-9bdb-cdf2678250f8-ovsdb-rundir\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:35 crc kubenswrapper[4558]: I0120 17:35:35.455601 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/dcbebc6e-c10d-4a59-9bdb-cdf2678250f8-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:35 crc kubenswrapper[4558]: I0120 17:35:35.459964 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage13-crc" (OuterVolumeSpecName: "ovndbcluster-nb-etc-ovn") pod "dcbebc6e-c10d-4a59-9bdb-cdf2678250f8" (UID: "dcbebc6e-c10d-4a59-9bdb-cdf2678250f8"). InnerVolumeSpecName "local-storage13-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:35:35 crc kubenswrapper[4558]: I0120 17:35:35.459972 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dcbebc6e-c10d-4a59-9bdb-cdf2678250f8-kube-api-access-6mbd9" (OuterVolumeSpecName: "kube-api-access-6mbd9") pod "dcbebc6e-c10d-4a59-9bdb-cdf2678250f8" (UID: "dcbebc6e-c10d-4a59-9bdb-cdf2678250f8"). InnerVolumeSpecName "kube-api-access-6mbd9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:35:35 crc kubenswrapper[4558]: I0120 17:35:35.480655 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dcbebc6e-c10d-4a59-9bdb-cdf2678250f8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "dcbebc6e-c10d-4a59-9bdb-cdf2678250f8" (UID: "dcbebc6e-c10d-4a59-9bdb-cdf2678250f8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:35 crc kubenswrapper[4558]: I0120 17:35:35.520700 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:35:35 crc kubenswrapper[4558]: I0120 17:35:35.520755 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:35:35 crc kubenswrapper[4558]: I0120 17:35:35.522664 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dcbebc6e-c10d-4a59-9bdb-cdf2678250f8-ovsdbserver-nb-tls-certs" (OuterVolumeSpecName: "ovsdbserver-nb-tls-certs") pod "dcbebc6e-c10d-4a59-9bdb-cdf2678250f8" (UID: "dcbebc6e-c10d-4a59-9bdb-cdf2678250f8"). InnerVolumeSpecName "ovsdbserver-nb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:35 crc kubenswrapper[4558]: I0120 17:35:35.524887 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dcbebc6e-c10d-4a59-9bdb-cdf2678250f8-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "dcbebc6e-c10d-4a59-9bdb-cdf2678250f8" (UID: "dcbebc6e-c10d-4a59-9bdb-cdf2678250f8"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:35 crc kubenswrapper[4558]: I0120 17:35:35.561640 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage13-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage13-crc\") on node \"crc\" " Jan 20 17:35:35 crc kubenswrapper[4558]: I0120 17:35:35.561683 4558 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/dcbebc6e-c10d-4a59-9bdb-cdf2678250f8-ovsdbserver-nb-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:35 crc kubenswrapper[4558]: I0120 17:35:35.561696 4558 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/dcbebc6e-c10d-4a59-9bdb-cdf2678250f8-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:35 crc kubenswrapper[4558]: I0120 17:35:35.561707 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dcbebc6e-c10d-4a59-9bdb-cdf2678250f8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:35 crc kubenswrapper[4558]: I0120 17:35:35.561718 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6mbd9\" (UniqueName: \"kubernetes.io/projected/dcbebc6e-c10d-4a59-9bdb-cdf2678250f8-kube-api-access-6mbd9\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:35 crc kubenswrapper[4558]: I0120 17:35:35.565361 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovsdbserver-sb-0_637234b1-5373-46ac-99ea-be390202b982/ovsdbserver-sb/0.log" Jan 20 17:35:35 crc kubenswrapper[4558]: I0120 17:35:35.565580 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:35:35 crc kubenswrapper[4558]: I0120 17:35:35.581069 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage13-crc" (UniqueName: "kubernetes.io/local-volume/local-storage13-crc") on node "crc" Jan 20 17:35:35 crc kubenswrapper[4558]: I0120 17:35:35.608569 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:35:35 crc kubenswrapper[4558]: I0120 17:35:35.608624 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:35:35 crc kubenswrapper[4558]: I0120 17:35:35.633602 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:35:35 crc kubenswrapper[4558]: I0120 17:35:35.662920 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m9vtc\" (UniqueName: \"kubernetes.io/projected/637234b1-5373-46ac-99ea-be390202b982-kube-api-access-m9vtc\") pod \"637234b1-5373-46ac-99ea-be390202b982\" (UID: \"637234b1-5373-46ac-99ea-be390202b982\") " Jan 20 17:35:35 crc kubenswrapper[4558]: I0120 17:35:35.662968 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/637234b1-5373-46ac-99ea-be390202b982-scripts\") pod \"637234b1-5373-46ac-99ea-be390202b982\" (UID: \"637234b1-5373-46ac-99ea-be390202b982\") " Jan 20 17:35:35 crc kubenswrapper[4558]: I0120 17:35:35.663021 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/637234b1-5373-46ac-99ea-be390202b982-ovsdbserver-sb-tls-certs\") pod \"637234b1-5373-46ac-99ea-be390202b982\" (UID: \"637234b1-5373-46ac-99ea-be390202b982\") " Jan 20 17:35:35 crc kubenswrapper[4558]: I0120 17:35:35.663087 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndbcluster-sb-etc-ovn\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"637234b1-5373-46ac-99ea-be390202b982\" (UID: \"637234b1-5373-46ac-99ea-be390202b982\") " Jan 20 17:35:35 crc kubenswrapper[4558]: I0120 17:35:35.663112 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/637234b1-5373-46ac-99ea-be390202b982-metrics-certs-tls-certs\") pod \"637234b1-5373-46ac-99ea-be390202b982\" (UID: \"637234b1-5373-46ac-99ea-be390202b982\") " Jan 20 17:35:35 crc kubenswrapper[4558]: I0120 17:35:35.663201 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/637234b1-5373-46ac-99ea-be390202b982-combined-ca-bundle\") pod \"637234b1-5373-46ac-99ea-be390202b982\" (UID: \"637234b1-5373-46ac-99ea-be390202b982\") " Jan 20 17:35:35 crc kubenswrapper[4558]: I0120 17:35:35.663237 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/637234b1-5373-46ac-99ea-be390202b982-ovsdb-rundir\") pod \"637234b1-5373-46ac-99ea-be390202b982\" (UID: \"637234b1-5373-46ac-99ea-be390202b982\") " Jan 20 17:35:35 crc kubenswrapper[4558]: I0120 17:35:35.663312 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/637234b1-5373-46ac-99ea-be390202b982-config\") pod \"637234b1-5373-46ac-99ea-be390202b982\" (UID: \"637234b1-5373-46ac-99ea-be390202b982\") " Jan 20 17:35:35 crc kubenswrapper[4558]: I0120 17:35:35.663896 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage13-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage13-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:35 crc kubenswrapper[4558]: I0120 17:35:35.664856 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/637234b1-5373-46ac-99ea-be390202b982-ovsdb-rundir" (OuterVolumeSpecName: "ovsdb-rundir") pod "637234b1-5373-46ac-99ea-be390202b982" (UID: "637234b1-5373-46ac-99ea-be390202b982"). InnerVolumeSpecName "ovsdb-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:35:35 crc kubenswrapper[4558]: I0120 17:35:35.665349 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/637234b1-5373-46ac-99ea-be390202b982-config" (OuterVolumeSpecName: "config") pod "637234b1-5373-46ac-99ea-be390202b982" (UID: "637234b1-5373-46ac-99ea-be390202b982"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:35:35 crc kubenswrapper[4558]: I0120 17:35:35.665861 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/637234b1-5373-46ac-99ea-be390202b982-scripts" (OuterVolumeSpecName: "scripts") pod "637234b1-5373-46ac-99ea-be390202b982" (UID: "637234b1-5373-46ac-99ea-be390202b982"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:35:35 crc kubenswrapper[4558]: I0120 17:35:35.669022 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage04-crc" (OuterVolumeSpecName: "ovndbcluster-sb-etc-ovn") pod "637234b1-5373-46ac-99ea-be390202b982" (UID: "637234b1-5373-46ac-99ea-be390202b982"). InnerVolumeSpecName "local-storage04-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:35:35 crc kubenswrapper[4558]: I0120 17:35:35.675361 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/637234b1-5373-46ac-99ea-be390202b982-kube-api-access-m9vtc" (OuterVolumeSpecName: "kube-api-access-m9vtc") pod "637234b1-5373-46ac-99ea-be390202b982" (UID: "637234b1-5373-46ac-99ea-be390202b982"). InnerVolumeSpecName "kube-api-access-m9vtc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:35:35 crc kubenswrapper[4558]: I0120 17:35:35.692535 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/637234b1-5373-46ac-99ea-be390202b982-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "637234b1-5373-46ac-99ea-be390202b982" (UID: "637234b1-5373-46ac-99ea-be390202b982"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:35 crc kubenswrapper[4558]: I0120 17:35:35.695683 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:35:35 crc kubenswrapper[4558]: I0120 17:35:35.743409 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/637234b1-5373-46ac-99ea-be390202b982-ovsdbserver-sb-tls-certs" (OuterVolumeSpecName: "ovsdbserver-sb-tls-certs") pod "637234b1-5373-46ac-99ea-be390202b982" (UID: "637234b1-5373-46ac-99ea-be390202b982"). InnerVolumeSpecName "ovsdbserver-sb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:35 crc kubenswrapper[4558]: I0120 17:35:35.748399 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/637234b1-5373-46ac-99ea-be390202b982-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "637234b1-5373-46ac-99ea-be390202b982" (UID: "637234b1-5373-46ac-99ea-be390202b982"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:35 crc kubenswrapper[4558]: I0120 17:35:35.748834 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/memcached-0" Jan 20 17:35:35 crc kubenswrapper[4558]: I0120 17:35:35.766345 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/637234b1-5373-46ac-99ea-be390202b982-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:35 crc kubenswrapper[4558]: I0120 17:35:35.766373 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m9vtc\" (UniqueName: \"kubernetes.io/projected/637234b1-5373-46ac-99ea-be390202b982-kube-api-access-m9vtc\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:35 crc kubenswrapper[4558]: I0120 17:35:35.766416 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/637234b1-5373-46ac-99ea-be390202b982-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:35 crc kubenswrapper[4558]: I0120 17:35:35.766427 4558 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/637234b1-5373-46ac-99ea-be390202b982-ovsdbserver-sb-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:35 crc kubenswrapper[4558]: I0120 17:35:35.766447 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" " Jan 20 17:35:35 crc kubenswrapper[4558]: I0120 17:35:35.766479 4558 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/637234b1-5373-46ac-99ea-be390202b982-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:35 crc kubenswrapper[4558]: I0120 17:35:35.766488 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/637234b1-5373-46ac-99ea-be390202b982-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:35 crc kubenswrapper[4558]: I0120 17:35:35.766497 4558 reconciler_common.go:293] "Volume detached for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/637234b1-5373-46ac-99ea-be390202b982-ovsdb-rundir\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:35 crc kubenswrapper[4558]: I0120 17:35:35.790474 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage04-crc" (UniqueName: "kubernetes.io/local-volume/local-storage04-crc") on node "crc" Jan 20 17:35:35 crc kubenswrapper[4558]: I0120 17:35:35.868019 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:35 crc kubenswrapper[4558]: I0120 17:35:35.876587 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:35:35 crc kubenswrapper[4558]: I0120 17:35:35.877061 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-external-api-0" podUID="d78c49cb-72eb-4def-9c40-b16128c74418" containerName="glance-log" containerID="cri-o://c9fcf8291fd575a7777eae1e5a6b38407c0f8527d5473b4afc554f44d2c8098e" gracePeriod=30 Jan 20 17:35:35 crc kubenswrapper[4558]: I0120 17:35:35.877304 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-external-api-0" podUID="d78c49cb-72eb-4def-9c40-b16128c74418" containerName="glance-httpd" containerID="cri-o://8da85592700c5420bc1d1bbe307f08e0a71e1e162220d3e5dbe3bda34763061f" gracePeriod=30 Jan 20 17:35:35 crc kubenswrapper[4558]: I0120 17:35:35.887608 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:35:35 crc kubenswrapper[4558]: I0120 17:35:35.925132 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:35:35 crc kubenswrapper[4558]: I0120 17:35:35.925503 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-internal-api-0" podUID="21c81b0a-c70b-4d57-bea0-96e4840af7dd" containerName="glance-log" containerID="cri-o://21722b5c1856dd9260830023e156411f09aabc0fe431ae02c3192f233463c11f" gracePeriod=30 Jan 20 17:35:35 crc kubenswrapper[4558]: I0120 17:35:35.926405 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-internal-api-0" podUID="21c81b0a-c70b-4d57-bea0-96e4840af7dd" containerName="glance-httpd" containerID="cri-o://5f7d9bee57d3863532f7d1c9fc246fa579caa23eddd68f1cd8f774b112683665" gracePeriod=30 Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:35.999182 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/keystone-6874444f77-w92bj"] Jan 20 17:35:36 crc kubenswrapper[4558]: E0120 17:35:35.999830 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dcbebc6e-c10d-4a59-9bdb-cdf2678250f8" containerName="openstack-network-exporter" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:35.999852 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="dcbebc6e-c10d-4a59-9bdb-cdf2678250f8" containerName="openstack-network-exporter" Jan 20 17:35:36 crc kubenswrapper[4558]: E0120 17:35:35.999888 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="637234b1-5373-46ac-99ea-be390202b982" containerName="openstack-network-exporter" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:35.999894 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="637234b1-5373-46ac-99ea-be390202b982" containerName="openstack-network-exporter" Jan 20 17:35:36 crc kubenswrapper[4558]: E0120 17:35:35.999907 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f64d9468-aa9e-4acc-b037-3ba28425e746" containerName="placement-api" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:35.999913 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="f64d9468-aa9e-4acc-b037-3ba28425e746" containerName="placement-api" Jan 20 17:35:36 crc kubenswrapper[4558]: E0120 17:35:35.999926 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="637234b1-5373-46ac-99ea-be390202b982" containerName="ovsdbserver-sb" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:35.999932 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="637234b1-5373-46ac-99ea-be390202b982" containerName="ovsdbserver-sb" Jan 20 17:35:36 crc kubenswrapper[4558]: E0120 17:35:35.999942 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="75547bf4-a6ea-45b7-a6a8-ba2955995008" containerName="cinder-api" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:35.999948 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="75547bf4-a6ea-45b7-a6a8-ba2955995008" containerName="cinder-api" Jan 20 17:35:36 crc kubenswrapper[4558]: E0120 17:35:35.999959 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="75547bf4-a6ea-45b7-a6a8-ba2955995008" containerName="cinder-api-log" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:35.999964 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="75547bf4-a6ea-45b7-a6a8-ba2955995008" containerName="cinder-api-log" Jan 20 17:35:36 crc kubenswrapper[4558]: E0120 17:35:35.999977 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dcbebc6e-c10d-4a59-9bdb-cdf2678250f8" containerName="ovsdbserver-nb" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:35.999984 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="dcbebc6e-c10d-4a59-9bdb-cdf2678250f8" containerName="ovsdbserver-nb" Jan 20 17:35:36 crc kubenswrapper[4558]: E0120 17:35:36.000000 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f64d9468-aa9e-4acc-b037-3ba28425e746" containerName="placement-log" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.000007 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="f64d9468-aa9e-4acc-b037-3ba28425e746" containerName="placement-log" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.000196 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="75547bf4-a6ea-45b7-a6a8-ba2955995008" containerName="cinder-api" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.000210 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="dcbebc6e-c10d-4a59-9bdb-cdf2678250f8" containerName="openstack-network-exporter" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.000226 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="637234b1-5373-46ac-99ea-be390202b982" containerName="ovsdbserver-sb" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.000234 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="f64d9468-aa9e-4acc-b037-3ba28425e746" containerName="placement-api" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.000241 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="637234b1-5373-46ac-99ea-be390202b982" containerName="openstack-network-exporter" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.000251 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="75547bf4-a6ea-45b7-a6a8-ba2955995008" containerName="cinder-api-log" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.000274 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="f64d9468-aa9e-4acc-b037-3ba28425e746" containerName="placement-log" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.000286 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="dcbebc6e-c10d-4a59-9bdb-cdf2678250f8" containerName="ovsdbserver-nb" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.001050 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-6874444f77-w92bj" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.024341 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-6874444f77-w92bj"] Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.078536 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/75547bf4-a6ea-45b7-a6a8-ba2955995008-config-data-custom\") pod \"75547bf4-a6ea-45b7-a6a8-ba2955995008\" (UID: \"75547bf4-a6ea-45b7-a6a8-ba2955995008\") " Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.078608 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/75547bf4-a6ea-45b7-a6a8-ba2955995008-scripts\") pod \"75547bf4-a6ea-45b7-a6a8-ba2955995008\" (UID: \"75547bf4-a6ea-45b7-a6a8-ba2955995008\") " Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.079149 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-47nwq\" (UniqueName: \"kubernetes.io/projected/75547bf4-a6ea-45b7-a6a8-ba2955995008-kube-api-access-47nwq\") pod \"75547bf4-a6ea-45b7-a6a8-ba2955995008\" (UID: \"75547bf4-a6ea-45b7-a6a8-ba2955995008\") " Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.079208 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/75547bf4-a6ea-45b7-a6a8-ba2955995008-public-tls-certs\") pod \"75547bf4-a6ea-45b7-a6a8-ba2955995008\" (UID: \"75547bf4-a6ea-45b7-a6a8-ba2955995008\") " Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.079394 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/75547bf4-a6ea-45b7-a6a8-ba2955995008-logs\") pod \"75547bf4-a6ea-45b7-a6a8-ba2955995008\" (UID: \"75547bf4-a6ea-45b7-a6a8-ba2955995008\") " Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.079450 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/75547bf4-a6ea-45b7-a6a8-ba2955995008-combined-ca-bundle\") pod \"75547bf4-a6ea-45b7-a6a8-ba2955995008\" (UID: \"75547bf4-a6ea-45b7-a6a8-ba2955995008\") " Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.079486 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/75547bf4-a6ea-45b7-a6a8-ba2955995008-config-data\") pod \"75547bf4-a6ea-45b7-a6a8-ba2955995008\" (UID: \"75547bf4-a6ea-45b7-a6a8-ba2955995008\") " Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.079584 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/75547bf4-a6ea-45b7-a6a8-ba2955995008-etc-machine-id\") pod \"75547bf4-a6ea-45b7-a6a8-ba2955995008\" (UID: \"75547bf4-a6ea-45b7-a6a8-ba2955995008\") " Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.079616 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/75547bf4-a6ea-45b7-a6a8-ba2955995008-internal-tls-certs\") pod \"75547bf4-a6ea-45b7-a6a8-ba2955995008\" (UID: \"75547bf4-a6ea-45b7-a6a8-ba2955995008\") " Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.080701 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/75547bf4-a6ea-45b7-a6a8-ba2955995008-logs" (OuterVolumeSpecName: "logs") pod "75547bf4-a6ea-45b7-a6a8-ba2955995008" (UID: "75547bf4-a6ea-45b7-a6a8-ba2955995008"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.082966 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/75547bf4-a6ea-45b7-a6a8-ba2955995008-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "75547bf4-a6ea-45b7-a6a8-ba2955995008" (UID: "75547bf4-a6ea-45b7-a6a8-ba2955995008"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.107686 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/75547bf4-a6ea-45b7-a6a8-ba2955995008-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "75547bf4-a6ea-45b7-a6a8-ba2955995008" (UID: "75547bf4-a6ea-45b7-a6a8-ba2955995008"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.108897 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/75547bf4-a6ea-45b7-a6a8-ba2955995008-scripts" (OuterVolumeSpecName: "scripts") pod "75547bf4-a6ea-45b7-a6a8-ba2955995008" (UID: "75547bf4-a6ea-45b7-a6a8-ba2955995008"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.112340 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/75547bf4-a6ea-45b7-a6a8-ba2955995008-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "75547bf4-a6ea-45b7-a6a8-ba2955995008" (UID: "75547bf4-a6ea-45b7-a6a8-ba2955995008"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.138041 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/75547bf4-a6ea-45b7-a6a8-ba2955995008-kube-api-access-47nwq" (OuterVolumeSpecName: "kube-api-access-47nwq") pod "75547bf4-a6ea-45b7-a6a8-ba2955995008" (UID: "75547bf4-a6ea-45b7-a6a8-ba2955995008"). InnerVolumeSpecName "kube-api-access-47nwq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.143701 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/75547bf4-a6ea-45b7-a6a8-ba2955995008-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "75547bf4-a6ea-45b7-a6a8-ba2955995008" (UID: "75547bf4-a6ea-45b7-a6a8-ba2955995008"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.145786 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/75547bf4-a6ea-45b7-a6a8-ba2955995008-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "75547bf4-a6ea-45b7-a6a8-ba2955995008" (UID: "75547bf4-a6ea-45b7-a6a8-ba2955995008"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.174375 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/75547bf4-a6ea-45b7-a6a8-ba2955995008-config-data" (OuterVolumeSpecName: "config-data") pod "75547bf4-a6ea-45b7-a6a8-ba2955995008" (UID: "75547bf4-a6ea-45b7-a6a8-ba2955995008"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.184868 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pfctj\" (UniqueName: \"kubernetes.io/projected/a4d26c7d-095d-4f3d-b6cc-c270961e9794-kube-api-access-pfctj\") pod \"keystone-6874444f77-w92bj\" (UID: \"a4d26c7d-095d-4f3d-b6cc-c270961e9794\") " pod="openstack-kuttl-tests/keystone-6874444f77-w92bj" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.185102 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a4d26c7d-095d-4f3d-b6cc-c270961e9794-internal-tls-certs\") pod \"keystone-6874444f77-w92bj\" (UID: \"a4d26c7d-095d-4f3d-b6cc-c270961e9794\") " pod="openstack-kuttl-tests/keystone-6874444f77-w92bj" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.185138 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/a4d26c7d-095d-4f3d-b6cc-c270961e9794-fernet-keys\") pod \"keystone-6874444f77-w92bj\" (UID: \"a4d26c7d-095d-4f3d-b6cc-c270961e9794\") " pod="openstack-kuttl-tests/keystone-6874444f77-w92bj" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.185359 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a4d26c7d-095d-4f3d-b6cc-c270961e9794-scripts\") pod \"keystone-6874444f77-w92bj\" (UID: \"a4d26c7d-095d-4f3d-b6cc-c270961e9794\") " pod="openstack-kuttl-tests/keystone-6874444f77-w92bj" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.185414 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a4d26c7d-095d-4f3d-b6cc-c270961e9794-public-tls-certs\") pod \"keystone-6874444f77-w92bj\" (UID: \"a4d26c7d-095d-4f3d-b6cc-c270961e9794\") " pod="openstack-kuttl-tests/keystone-6874444f77-w92bj" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.185447 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/a4d26c7d-095d-4f3d-b6cc-c270961e9794-credential-keys\") pod \"keystone-6874444f77-w92bj\" (UID: \"a4d26c7d-095d-4f3d-b6cc-c270961e9794\") " pod="openstack-kuttl-tests/keystone-6874444f77-w92bj" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.185495 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a4d26c7d-095d-4f3d-b6cc-c270961e9794-combined-ca-bundle\") pod \"keystone-6874444f77-w92bj\" (UID: \"a4d26c7d-095d-4f3d-b6cc-c270961e9794\") " pod="openstack-kuttl-tests/keystone-6874444f77-w92bj" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.185611 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a4d26c7d-095d-4f3d-b6cc-c270961e9794-config-data\") pod \"keystone-6874444f77-w92bj\" (UID: \"a4d26c7d-095d-4f3d-b6cc-c270961e9794\") " pod="openstack-kuttl-tests/keystone-6874444f77-w92bj" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.185734 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/75547bf4-a6ea-45b7-a6a8-ba2955995008-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.185753 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/75547bf4-a6ea-45b7-a6a8-ba2955995008-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.185763 4558 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/75547bf4-a6ea-45b7-a6a8-ba2955995008-etc-machine-id\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.185771 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/75547bf4-a6ea-45b7-a6a8-ba2955995008-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.185782 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/75547bf4-a6ea-45b7-a6a8-ba2955995008-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.185792 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/75547bf4-a6ea-45b7-a6a8-ba2955995008-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.185803 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-47nwq\" (UniqueName: \"kubernetes.io/projected/75547bf4-a6ea-45b7-a6a8-ba2955995008-kube-api-access-47nwq\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.185813 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/75547bf4-a6ea-45b7-a6a8-ba2955995008-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.185821 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/75547bf4-a6ea-45b7-a6a8-ba2955995008-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.287887 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a4d26c7d-095d-4f3d-b6cc-c270961e9794-internal-tls-certs\") pod \"keystone-6874444f77-w92bj\" (UID: \"a4d26c7d-095d-4f3d-b6cc-c270961e9794\") " pod="openstack-kuttl-tests/keystone-6874444f77-w92bj" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.287930 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/a4d26c7d-095d-4f3d-b6cc-c270961e9794-fernet-keys\") pod \"keystone-6874444f77-w92bj\" (UID: \"a4d26c7d-095d-4f3d-b6cc-c270961e9794\") " pod="openstack-kuttl-tests/keystone-6874444f77-w92bj" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.287979 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a4d26c7d-095d-4f3d-b6cc-c270961e9794-scripts\") pod \"keystone-6874444f77-w92bj\" (UID: \"a4d26c7d-095d-4f3d-b6cc-c270961e9794\") " pod="openstack-kuttl-tests/keystone-6874444f77-w92bj" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.288023 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a4d26c7d-095d-4f3d-b6cc-c270961e9794-public-tls-certs\") pod \"keystone-6874444f77-w92bj\" (UID: \"a4d26c7d-095d-4f3d-b6cc-c270961e9794\") " pod="openstack-kuttl-tests/keystone-6874444f77-w92bj" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.288054 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/a4d26c7d-095d-4f3d-b6cc-c270961e9794-credential-keys\") pod \"keystone-6874444f77-w92bj\" (UID: \"a4d26c7d-095d-4f3d-b6cc-c270961e9794\") " pod="openstack-kuttl-tests/keystone-6874444f77-w92bj" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.288098 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a4d26c7d-095d-4f3d-b6cc-c270961e9794-combined-ca-bundle\") pod \"keystone-6874444f77-w92bj\" (UID: \"a4d26c7d-095d-4f3d-b6cc-c270961e9794\") " pod="openstack-kuttl-tests/keystone-6874444f77-w92bj" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.288142 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a4d26c7d-095d-4f3d-b6cc-c270961e9794-config-data\") pod \"keystone-6874444f77-w92bj\" (UID: \"a4d26c7d-095d-4f3d-b6cc-c270961e9794\") " pod="openstack-kuttl-tests/keystone-6874444f77-w92bj" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.288214 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pfctj\" (UniqueName: \"kubernetes.io/projected/a4d26c7d-095d-4f3d-b6cc-c270961e9794-kube-api-access-pfctj\") pod \"keystone-6874444f77-w92bj\" (UID: \"a4d26c7d-095d-4f3d-b6cc-c270961e9794\") " pod="openstack-kuttl-tests/keystone-6874444f77-w92bj" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.293557 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/a4d26c7d-095d-4f3d-b6cc-c270961e9794-credential-keys\") pod \"keystone-6874444f77-w92bj\" (UID: \"a4d26c7d-095d-4f3d-b6cc-c270961e9794\") " pod="openstack-kuttl-tests/keystone-6874444f77-w92bj" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.296404 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a4d26c7d-095d-4f3d-b6cc-c270961e9794-combined-ca-bundle\") pod \"keystone-6874444f77-w92bj\" (UID: \"a4d26c7d-095d-4f3d-b6cc-c270961e9794\") " pod="openstack-kuttl-tests/keystone-6874444f77-w92bj" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.297264 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a4d26c7d-095d-4f3d-b6cc-c270961e9794-internal-tls-certs\") pod \"keystone-6874444f77-w92bj\" (UID: \"a4d26c7d-095d-4f3d-b6cc-c270961e9794\") " pod="openstack-kuttl-tests/keystone-6874444f77-w92bj" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.297767 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a4d26c7d-095d-4f3d-b6cc-c270961e9794-config-data\") pod \"keystone-6874444f77-w92bj\" (UID: \"a4d26c7d-095d-4f3d-b6cc-c270961e9794\") " pod="openstack-kuttl-tests/keystone-6874444f77-w92bj" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.297979 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a4d26c7d-095d-4f3d-b6cc-c270961e9794-public-tls-certs\") pod \"keystone-6874444f77-w92bj\" (UID: \"a4d26c7d-095d-4f3d-b6cc-c270961e9794\") " pod="openstack-kuttl-tests/keystone-6874444f77-w92bj" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.298671 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/a4d26c7d-095d-4f3d-b6cc-c270961e9794-fernet-keys\") pod \"keystone-6874444f77-w92bj\" (UID: \"a4d26c7d-095d-4f3d-b6cc-c270961e9794\") " pod="openstack-kuttl-tests/keystone-6874444f77-w92bj" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.299184 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a4d26c7d-095d-4f3d-b6cc-c270961e9794-scripts\") pod \"keystone-6874444f77-w92bj\" (UID: \"a4d26c7d-095d-4f3d-b6cc-c270961e9794\") " pod="openstack-kuttl-tests/keystone-6874444f77-w92bj" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.302896 4558 generic.go:334] "Generic (PLEG): container finished" podID="75547bf4-a6ea-45b7-a6a8-ba2955995008" containerID="828468f6f165adb235da38999ede843b78146540c25db7da9e9afb21516374a2" exitCode=0 Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.302962 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"75547bf4-a6ea-45b7-a6a8-ba2955995008","Type":"ContainerDied","Data":"828468f6f165adb235da38999ede843b78146540c25db7da9e9afb21516374a2"} Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.302993 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"75547bf4-a6ea-45b7-a6a8-ba2955995008","Type":"ContainerDied","Data":"d22af631b8e109e08433f436c2272df13fde7b1f43934acf0b74e126442edc45"} Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.303709 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.304791 4558 scope.go:117] "RemoveContainer" containerID="828468f6f165adb235da38999ede843b78146540c25db7da9e9afb21516374a2" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.306435 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pfctj\" (UniqueName: \"kubernetes.io/projected/a4d26c7d-095d-4f3d-b6cc-c270961e9794-kube-api-access-pfctj\") pod \"keystone-6874444f77-w92bj\" (UID: \"a4d26c7d-095d-4f3d-b6cc-c270961e9794\") " pod="openstack-kuttl-tests/keystone-6874444f77-w92bj" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.309030 4558 generic.go:334] "Generic (PLEG): container finished" podID="d78c49cb-72eb-4def-9c40-b16128c74418" containerID="c9fcf8291fd575a7777eae1e5a6b38407c0f8527d5473b4afc554f44d2c8098e" exitCode=143 Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.309085 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"d78c49cb-72eb-4def-9c40-b16128c74418","Type":"ContainerDied","Data":"c9fcf8291fd575a7777eae1e5a6b38407c0f8527d5473b4afc554f44d2c8098e"} Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.312481 4558 generic.go:334] "Generic (PLEG): container finished" podID="21c81b0a-c70b-4d57-bea0-96e4840af7dd" containerID="21722b5c1856dd9260830023e156411f09aabc0fe431ae02c3192f233463c11f" exitCode=143 Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.312535 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"21c81b0a-c70b-4d57-bea0-96e4840af7dd","Type":"ContainerDied","Data":"21722b5c1856dd9260830023e156411f09aabc0fe431ae02c3192f233463c11f"} Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.318484 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovsdbserver-sb-0_637234b1-5373-46ac-99ea-be390202b982/ovsdbserver-sb/0.log" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.318521 4558 generic.go:334] "Generic (PLEG): container finished" podID="637234b1-5373-46ac-99ea-be390202b982" containerID="17df1b24593c3011ada27218df6bb80be57a9d60832634eea50b0c271601a4f4" exitCode=2 Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.318534 4558 generic.go:334] "Generic (PLEG): container finished" podID="637234b1-5373-46ac-99ea-be390202b982" containerID="93e11ff2740df41a59ff853fde3d9b9c61a53b8cc77a0513ff1cbea5527314b8" exitCode=143 Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.318622 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-sb-0" event={"ID":"637234b1-5373-46ac-99ea-be390202b982","Type":"ContainerDied","Data":"17df1b24593c3011ada27218df6bb80be57a9d60832634eea50b0c271601a4f4"} Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.318644 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-sb-0" event={"ID":"637234b1-5373-46ac-99ea-be390202b982","Type":"ContainerDied","Data":"93e11ff2740df41a59ff853fde3d9b9c61a53b8cc77a0513ff1cbea5527314b8"} Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.318655 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-sb-0" event={"ID":"637234b1-5373-46ac-99ea-be390202b982","Type":"ContainerDied","Data":"c11d825b13ee73691aceb00cb004cdfe49668dad4253bed0eb39b62e8eea7707"} Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.319181 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.322892 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovsdbserver-nb-0_dcbebc6e-c10d-4a59-9bdb-cdf2678250f8/ovsdbserver-nb/0.log" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.324391 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.326739 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-nb-0" event={"ID":"dcbebc6e-c10d-4a59-9bdb-cdf2678250f8","Type":"ContainerDied","Data":"0978343ee70408eca9387c793d63efe6988caa6500955173dd4133b2c12d94f9"} Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.358729 4558 scope.go:117] "RemoveContainer" containerID="37e2905fb8733b28399a61b7869ba9f115a3ee071f8f2f9d43f5d0e1ebc3969d" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.364055 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.390224 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.405664 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.406757 4558 scope.go:117] "RemoveContainer" containerID="828468f6f165adb235da38999ede843b78146540c25db7da9e9afb21516374a2" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.407791 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:35:36 crc kubenswrapper[4558]: E0120 17:35:36.409153 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"828468f6f165adb235da38999ede843b78146540c25db7da9e9afb21516374a2\": container with ID starting with 828468f6f165adb235da38999ede843b78146540c25db7da9e9afb21516374a2 not found: ID does not exist" containerID="828468f6f165adb235da38999ede843b78146540c25db7da9e9afb21516374a2" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.409219 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"828468f6f165adb235da38999ede843b78146540c25db7da9e9afb21516374a2"} err="failed to get container status \"828468f6f165adb235da38999ede843b78146540c25db7da9e9afb21516374a2\": rpc error: code = NotFound desc = could not find container \"828468f6f165adb235da38999ede843b78146540c25db7da9e9afb21516374a2\": container with ID starting with 828468f6f165adb235da38999ede843b78146540c25db7da9e9afb21516374a2 not found: ID does not exist" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.409243 4558 scope.go:117] "RemoveContainer" containerID="37e2905fb8733b28399a61b7869ba9f115a3ee071f8f2f9d43f5d0e1ebc3969d" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.409779 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cinder-api-config-data" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.410041 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-cinder-internal-svc" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.410185 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-cinder-public-svc" Jan 20 17:35:36 crc kubenswrapper[4558]: E0120 17:35:36.415872 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"37e2905fb8733b28399a61b7869ba9f115a3ee071f8f2f9d43f5d0e1ebc3969d\": container with ID starting with 37e2905fb8733b28399a61b7869ba9f115a3ee071f8f2f9d43f5d0e1ebc3969d not found: ID does not exist" containerID="37e2905fb8733b28399a61b7869ba9f115a3ee071f8f2f9d43f5d0e1ebc3969d" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.415905 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"37e2905fb8733b28399a61b7869ba9f115a3ee071f8f2f9d43f5d0e1ebc3969d"} err="failed to get container status \"37e2905fb8733b28399a61b7869ba9f115a3ee071f8f2f9d43f5d0e1ebc3969d\": rpc error: code = NotFound desc = could not find container \"37e2905fb8733b28399a61b7869ba9f115a3ee071f8f2f9d43f5d0e1ebc3969d\": container with ID starting with 37e2905fb8733b28399a61b7869ba9f115a3ee071f8f2f9d43f5d0e1ebc3969d not found: ID does not exist" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.415927 4558 scope.go:117] "RemoveContainer" containerID="17df1b24593c3011ada27218df6bb80be57a9d60832634eea50b0c271601a4f4" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.425636 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-nb-0"] Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.433977 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-nb-0"] Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.446066 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.461531 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-sb-0"] Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.467094 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-sb-0"] Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.479219 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ovsdbserver-nb-0"] Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.481395 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-6874444f77-w92bj" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.482089 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.485737 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ovsdbserver-sb-0"] Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.489730 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-ovndbcluster-nb-ovndbs" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.490456 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.495980 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ovncluster-ovndbcluster-nb-dockercfg-djpzb" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.496222 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"ovndbcluster-nb-config" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.496381 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-ovndbcluster-sb-ovndbs" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.496690 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"ovndbcluster-sb-scripts" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.496792 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"ovndbcluster-nb-scripts" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.496902 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ovncluster-ovndbcluster-sb-dockercfg-g95pp" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.505949 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"ovndbcluster-sb-config" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.511215 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-sb-0"] Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.520813 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-nb-0"] Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.559448 4558 scope.go:117] "RemoveContainer" containerID="93e11ff2740df41a59ff853fde3d9b9c61a53b8cc77a0513ff1cbea5527314b8" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.603753 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/e8a8d7c2-9fc7-48d9-bd28-2f0416b7c61c-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"e8a8d7c2-9fc7-48d9-bd28-2f0416b7c61c\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.603793 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e8a8d7c2-9fc7-48d9-bd28-2f0416b7c61c-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"e8a8d7c2-9fc7-48d9-bd28-2f0416b7c61c\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.603823 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e8a8d7c2-9fc7-48d9-bd28-2f0416b7c61c-config\") pod \"ovsdbserver-nb-0\" (UID: \"e8a8d7c2-9fc7-48d9-bd28-2f0416b7c61c\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.603886 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/532ef149-f32a-4b6e-8d0c-458a04952d34-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"532ef149-f32a-4b6e-8d0c-458a04952d34\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.603910 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/532ef149-f32a-4b6e-8d0c-458a04952d34-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"532ef149-f32a-4b6e-8d0c-458a04952d34\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.603931 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/e8a8d7c2-9fc7-48d9-bd28-2f0416b7c61c-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"e8a8d7c2-9fc7-48d9-bd28-2f0416b7c61c\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.603973 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/e8a8d7c2-9fc7-48d9-bd28-2f0416b7c61c-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"e8a8d7c2-9fc7-48d9-bd28-2f0416b7c61c\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.603990 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/532ef149-f32a-4b6e-8d0c-458a04952d34-config\") pod \"ovsdbserver-sb-0\" (UID: \"532ef149-f32a-4b6e-8d0c-458a04952d34\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.604016 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/532ef149-f32a-4b6e-8d0c-458a04952d34-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"532ef149-f32a-4b6e-8d0c-458a04952d34\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.604097 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"ovsdbserver-sb-0\" (UID: \"532ef149-f32a-4b6e-8d0c-458a04952d34\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.604129 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e228514c-6d22-4527-b365-913e3ea3cfdb-public-tls-certs\") pod \"cinder-api-0\" (UID: \"e228514c-6d22-4527-b365-913e3ea3cfdb\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.604153 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/e228514c-6d22-4527-b365-913e3ea3cfdb-etc-machine-id\") pod \"cinder-api-0\" (UID: \"e228514c-6d22-4527-b365-913e3ea3cfdb\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.604199 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j6z96\" (UniqueName: \"kubernetes.io/projected/e228514c-6d22-4527-b365-913e3ea3cfdb-kube-api-access-j6z96\") pod \"cinder-api-0\" (UID: \"e228514c-6d22-4527-b365-913e3ea3cfdb\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.604245 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nv6cc\" (UniqueName: \"kubernetes.io/projected/e8a8d7c2-9fc7-48d9-bd28-2f0416b7c61c-kube-api-access-nv6cc\") pod \"ovsdbserver-nb-0\" (UID: \"e8a8d7c2-9fc7-48d9-bd28-2f0416b7c61c\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.604260 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e228514c-6d22-4527-b365-913e3ea3cfdb-logs\") pod \"cinder-api-0\" (UID: \"e228514c-6d22-4527-b365-913e3ea3cfdb\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.604289 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/532ef149-f32a-4b6e-8d0c-458a04952d34-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"532ef149-f32a-4b6e-8d0c-458a04952d34\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.604312 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage13-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage13-crc\") pod \"ovsdbserver-nb-0\" (UID: \"e8a8d7c2-9fc7-48d9-bd28-2f0416b7c61c\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.604331 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e228514c-6d22-4527-b365-913e3ea3cfdb-config-data-custom\") pod \"cinder-api-0\" (UID: \"e228514c-6d22-4527-b365-913e3ea3cfdb\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.604346 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zgqkq\" (UniqueName: \"kubernetes.io/projected/532ef149-f32a-4b6e-8d0c-458a04952d34-kube-api-access-zgqkq\") pod \"ovsdbserver-sb-0\" (UID: \"532ef149-f32a-4b6e-8d0c-458a04952d34\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.604372 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e8a8d7c2-9fc7-48d9-bd28-2f0416b7c61c-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"e8a8d7c2-9fc7-48d9-bd28-2f0416b7c61c\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.604396 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e228514c-6d22-4527-b365-913e3ea3cfdb-config-data\") pod \"cinder-api-0\" (UID: \"e228514c-6d22-4527-b365-913e3ea3cfdb\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.604415 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e228514c-6d22-4527-b365-913e3ea3cfdb-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"e228514c-6d22-4527-b365-913e3ea3cfdb\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.604435 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e228514c-6d22-4527-b365-913e3ea3cfdb-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"e228514c-6d22-4527-b365-913e3ea3cfdb\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.604451 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e228514c-6d22-4527-b365-913e3ea3cfdb-scripts\") pod \"cinder-api-0\" (UID: \"e228514c-6d22-4527-b365-913e3ea3cfdb\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.604471 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/532ef149-f32a-4b6e-8d0c-458a04952d34-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"532ef149-f32a-4b6e-8d0c-458a04952d34\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.623687 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="637234b1-5373-46ac-99ea-be390202b982" path="/var/lib/kubelet/pods/637234b1-5373-46ac-99ea-be390202b982/volumes" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.624338 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="75547bf4-a6ea-45b7-a6a8-ba2955995008" path="/var/lib/kubelet/pods/75547bf4-a6ea-45b7-a6a8-ba2955995008/volumes" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.634237 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dcbebc6e-c10d-4a59-9bdb-cdf2678250f8" path="/var/lib/kubelet/pods/dcbebc6e-c10d-4a59-9bdb-cdf2678250f8/volumes" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.713273 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e228514c-6d22-4527-b365-913e3ea3cfdb-public-tls-certs\") pod \"cinder-api-0\" (UID: \"e228514c-6d22-4527-b365-913e3ea3cfdb\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.713315 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/e228514c-6d22-4527-b365-913e3ea3cfdb-etc-machine-id\") pod \"cinder-api-0\" (UID: \"e228514c-6d22-4527-b365-913e3ea3cfdb\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.713350 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j6z96\" (UniqueName: \"kubernetes.io/projected/e228514c-6d22-4527-b365-913e3ea3cfdb-kube-api-access-j6z96\") pod \"cinder-api-0\" (UID: \"e228514c-6d22-4527-b365-913e3ea3cfdb\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.713402 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e228514c-6d22-4527-b365-913e3ea3cfdb-logs\") pod \"cinder-api-0\" (UID: \"e228514c-6d22-4527-b365-913e3ea3cfdb\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.713420 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nv6cc\" (UniqueName: \"kubernetes.io/projected/e8a8d7c2-9fc7-48d9-bd28-2f0416b7c61c-kube-api-access-nv6cc\") pod \"ovsdbserver-nb-0\" (UID: \"e8a8d7c2-9fc7-48d9-bd28-2f0416b7c61c\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.713451 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/532ef149-f32a-4b6e-8d0c-458a04952d34-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"532ef149-f32a-4b6e-8d0c-458a04952d34\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.713473 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage13-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage13-crc\") pod \"ovsdbserver-nb-0\" (UID: \"e8a8d7c2-9fc7-48d9-bd28-2f0416b7c61c\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.713495 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e228514c-6d22-4527-b365-913e3ea3cfdb-config-data-custom\") pod \"cinder-api-0\" (UID: \"e228514c-6d22-4527-b365-913e3ea3cfdb\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.713512 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zgqkq\" (UniqueName: \"kubernetes.io/projected/532ef149-f32a-4b6e-8d0c-458a04952d34-kube-api-access-zgqkq\") pod \"ovsdbserver-sb-0\" (UID: \"532ef149-f32a-4b6e-8d0c-458a04952d34\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.713535 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e8a8d7c2-9fc7-48d9-bd28-2f0416b7c61c-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"e8a8d7c2-9fc7-48d9-bd28-2f0416b7c61c\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.713565 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e228514c-6d22-4527-b365-913e3ea3cfdb-config-data\") pod \"cinder-api-0\" (UID: \"e228514c-6d22-4527-b365-913e3ea3cfdb\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.713580 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e228514c-6d22-4527-b365-913e3ea3cfdb-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"e228514c-6d22-4527-b365-913e3ea3cfdb\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.713600 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e228514c-6d22-4527-b365-913e3ea3cfdb-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"e228514c-6d22-4527-b365-913e3ea3cfdb\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.713621 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e228514c-6d22-4527-b365-913e3ea3cfdb-scripts\") pod \"cinder-api-0\" (UID: \"e228514c-6d22-4527-b365-913e3ea3cfdb\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.713651 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/532ef149-f32a-4b6e-8d0c-458a04952d34-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"532ef149-f32a-4b6e-8d0c-458a04952d34\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.713678 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/e8a8d7c2-9fc7-48d9-bd28-2f0416b7c61c-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"e8a8d7c2-9fc7-48d9-bd28-2f0416b7c61c\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.713699 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e8a8d7c2-9fc7-48d9-bd28-2f0416b7c61c-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"e8a8d7c2-9fc7-48d9-bd28-2f0416b7c61c\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.713723 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e8a8d7c2-9fc7-48d9-bd28-2f0416b7c61c-config\") pod \"ovsdbserver-nb-0\" (UID: \"e8a8d7c2-9fc7-48d9-bd28-2f0416b7c61c\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.713762 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/532ef149-f32a-4b6e-8d0c-458a04952d34-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"532ef149-f32a-4b6e-8d0c-458a04952d34\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.713784 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/532ef149-f32a-4b6e-8d0c-458a04952d34-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"532ef149-f32a-4b6e-8d0c-458a04952d34\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.713801 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/e8a8d7c2-9fc7-48d9-bd28-2f0416b7c61c-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"e8a8d7c2-9fc7-48d9-bd28-2f0416b7c61c\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.713844 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/e8a8d7c2-9fc7-48d9-bd28-2f0416b7c61c-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"e8a8d7c2-9fc7-48d9-bd28-2f0416b7c61c\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.713871 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/532ef149-f32a-4b6e-8d0c-458a04952d34-config\") pod \"ovsdbserver-sb-0\" (UID: \"532ef149-f32a-4b6e-8d0c-458a04952d34\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.713894 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/532ef149-f32a-4b6e-8d0c-458a04952d34-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"532ef149-f32a-4b6e-8d0c-458a04952d34\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.713980 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"ovsdbserver-sb-0\" (UID: \"532ef149-f32a-4b6e-8d0c-458a04952d34\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.715585 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/532ef149-f32a-4b6e-8d0c-458a04952d34-config\") pod \"ovsdbserver-sb-0\" (UID: \"532ef149-f32a-4b6e-8d0c-458a04952d34\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.717102 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"ovsdbserver-sb-0\" (UID: \"532ef149-f32a-4b6e-8d0c-458a04952d34\") device mount path \"/mnt/openstack/pv04\"" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.717255 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage13-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage13-crc\") pod \"ovsdbserver-nb-0\" (UID: \"e8a8d7c2-9fc7-48d9-bd28-2f0416b7c61c\") device mount path \"/mnt/openstack/pv13\"" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.718852 4558 scope.go:117] "RemoveContainer" containerID="17df1b24593c3011ada27218df6bb80be57a9d60832634eea50b0c271601a4f4" Jan 20 17:35:36 crc kubenswrapper[4558]: E0120 17:35:36.721533 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"17df1b24593c3011ada27218df6bb80be57a9d60832634eea50b0c271601a4f4\": container with ID starting with 17df1b24593c3011ada27218df6bb80be57a9d60832634eea50b0c271601a4f4 not found: ID does not exist" containerID="17df1b24593c3011ada27218df6bb80be57a9d60832634eea50b0c271601a4f4" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.721595 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"17df1b24593c3011ada27218df6bb80be57a9d60832634eea50b0c271601a4f4"} err="failed to get container status \"17df1b24593c3011ada27218df6bb80be57a9d60832634eea50b0c271601a4f4\": rpc error: code = NotFound desc = could not find container \"17df1b24593c3011ada27218df6bb80be57a9d60832634eea50b0c271601a4f4\": container with ID starting with 17df1b24593c3011ada27218df6bb80be57a9d60832634eea50b0c271601a4f4 not found: ID does not exist" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.721631 4558 scope.go:117] "RemoveContainer" containerID="93e11ff2740df41a59ff853fde3d9b9c61a53b8cc77a0513ff1cbea5527314b8" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.721812 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/e228514c-6d22-4527-b365-913e3ea3cfdb-etc-machine-id\") pod \"cinder-api-0\" (UID: \"e228514c-6d22-4527-b365-913e3ea3cfdb\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.725356 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/e8a8d7c2-9fc7-48d9-bd28-2f0416b7c61c-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"e8a8d7c2-9fc7-48d9-bd28-2f0416b7c61c\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.729280 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e228514c-6d22-4527-b365-913e3ea3cfdb-public-tls-certs\") pod \"cinder-api-0\" (UID: \"e228514c-6d22-4527-b365-913e3ea3cfdb\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.736259 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e228514c-6d22-4527-b365-913e3ea3cfdb-logs\") pod \"cinder-api-0\" (UID: \"e228514c-6d22-4527-b365-913e3ea3cfdb\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.736702 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/532ef149-f32a-4b6e-8d0c-458a04952d34-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"532ef149-f32a-4b6e-8d0c-458a04952d34\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:35:36 crc kubenswrapper[4558]: E0120 17:35:36.736850 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"93e11ff2740df41a59ff853fde3d9b9c61a53b8cc77a0513ff1cbea5527314b8\": container with ID starting with 93e11ff2740df41a59ff853fde3d9b9c61a53b8cc77a0513ff1cbea5527314b8 not found: ID does not exist" containerID="93e11ff2740df41a59ff853fde3d9b9c61a53b8cc77a0513ff1cbea5527314b8" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.736881 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"93e11ff2740df41a59ff853fde3d9b9c61a53b8cc77a0513ff1cbea5527314b8"} err="failed to get container status \"93e11ff2740df41a59ff853fde3d9b9c61a53b8cc77a0513ff1cbea5527314b8\": rpc error: code = NotFound desc = could not find container \"93e11ff2740df41a59ff853fde3d9b9c61a53b8cc77a0513ff1cbea5527314b8\": container with ID starting with 93e11ff2740df41a59ff853fde3d9b9c61a53b8cc77a0513ff1cbea5527314b8 not found: ID does not exist" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.736907 4558 scope.go:117] "RemoveContainer" containerID="17df1b24593c3011ada27218df6bb80be57a9d60832634eea50b0c271601a4f4" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.737299 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/532ef149-f32a-4b6e-8d0c-458a04952d34-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"532ef149-f32a-4b6e-8d0c-458a04952d34\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.737563 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e8a8d7c2-9fc7-48d9-bd28-2f0416b7c61c-config\") pod \"ovsdbserver-nb-0\" (UID: \"e8a8d7c2-9fc7-48d9-bd28-2f0416b7c61c\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.738016 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e228514c-6d22-4527-b365-913e3ea3cfdb-config-data-custom\") pod \"cinder-api-0\" (UID: \"e228514c-6d22-4527-b365-913e3ea3cfdb\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.738194 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e8a8d7c2-9fc7-48d9-bd28-2f0416b7c61c-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"e8a8d7c2-9fc7-48d9-bd28-2f0416b7c61c\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.746662 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e228514c-6d22-4527-b365-913e3ea3cfdb-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"e228514c-6d22-4527-b365-913e3ea3cfdb\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.747384 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/532ef149-f32a-4b6e-8d0c-458a04952d34-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"532ef149-f32a-4b6e-8d0c-458a04952d34\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.747413 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e228514c-6d22-4527-b365-913e3ea3cfdb-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"e228514c-6d22-4527-b365-913e3ea3cfdb\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.748688 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"17df1b24593c3011ada27218df6bb80be57a9d60832634eea50b0c271601a4f4"} err="failed to get container status \"17df1b24593c3011ada27218df6bb80be57a9d60832634eea50b0c271601a4f4\": rpc error: code = NotFound desc = could not find container \"17df1b24593c3011ada27218df6bb80be57a9d60832634eea50b0c271601a4f4\": container with ID starting with 17df1b24593c3011ada27218df6bb80be57a9d60832634eea50b0c271601a4f4 not found: ID does not exist" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.748732 4558 scope.go:117] "RemoveContainer" containerID="93e11ff2740df41a59ff853fde3d9b9c61a53b8cc77a0513ff1cbea5527314b8" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.749617 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/e8a8d7c2-9fc7-48d9-bd28-2f0416b7c61c-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"e8a8d7c2-9fc7-48d9-bd28-2f0416b7c61c\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.750502 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"93e11ff2740df41a59ff853fde3d9b9c61a53b8cc77a0513ff1cbea5527314b8"} err="failed to get container status \"93e11ff2740df41a59ff853fde3d9b9c61a53b8cc77a0513ff1cbea5527314b8\": rpc error: code = NotFound desc = could not find container \"93e11ff2740df41a59ff853fde3d9b9c61a53b8cc77a0513ff1cbea5527314b8\": container with ID starting with 93e11ff2740df41a59ff853fde3d9b9c61a53b8cc77a0513ff1cbea5527314b8 not found: ID does not exist" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.750530 4558 scope.go:117] "RemoveContainer" containerID="bd371b5b829a5f2506757dc4a8010d15cd571ccffeeaa8564edc735f093b7de0" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.752209 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/e8a8d7c2-9fc7-48d9-bd28-2f0416b7c61c-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"e8a8d7c2-9fc7-48d9-bd28-2f0416b7c61c\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.752795 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/532ef149-f32a-4b6e-8d0c-458a04952d34-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"532ef149-f32a-4b6e-8d0c-458a04952d34\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.756091 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j6z96\" (UniqueName: \"kubernetes.io/projected/e228514c-6d22-4527-b365-913e3ea3cfdb-kube-api-access-j6z96\") pod \"cinder-api-0\" (UID: \"e228514c-6d22-4527-b365-913e3ea3cfdb\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.756578 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e228514c-6d22-4527-b365-913e3ea3cfdb-scripts\") pod \"cinder-api-0\" (UID: \"e228514c-6d22-4527-b365-913e3ea3cfdb\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.757948 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e8a8d7c2-9fc7-48d9-bd28-2f0416b7c61c-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"e8a8d7c2-9fc7-48d9-bd28-2f0416b7c61c\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.758239 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/532ef149-f32a-4b6e-8d0c-458a04952d34-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"532ef149-f32a-4b6e-8d0c-458a04952d34\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.761383 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nv6cc\" (UniqueName: \"kubernetes.io/projected/e8a8d7c2-9fc7-48d9-bd28-2f0416b7c61c-kube-api-access-nv6cc\") pod \"ovsdbserver-nb-0\" (UID: \"e8a8d7c2-9fc7-48d9-bd28-2f0416b7c61c\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.761573 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zgqkq\" (UniqueName: \"kubernetes.io/projected/532ef149-f32a-4b6e-8d0c-458a04952d34-kube-api-access-zgqkq\") pod \"ovsdbserver-sb-0\" (UID: \"532ef149-f32a-4b6e-8d0c-458a04952d34\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.763413 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e228514c-6d22-4527-b365-913e3ea3cfdb-config-data\") pod \"cinder-api-0\" (UID: \"e228514c-6d22-4527-b365-913e3ea3cfdb\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.770632 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.781596 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage13-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage13-crc\") pod \"ovsdbserver-nb-0\" (UID: \"e8a8d7c2-9fc7-48d9-bd28-2f0416b7c61c\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.793186 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"ovsdbserver-sb-0\" (UID: \"532ef149-f32a-4b6e-8d0c-458a04952d34\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.793388 4558 scope.go:117] "RemoveContainer" containerID="2766083888bd587166d18906030cf31145864ecd777e795cf63b0f41b9063b9e" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.800207 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.822213 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/neutron-6f4bb7946d-nl7kt"] Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.823602 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-6f4bb7946d-nl7kt" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.855786 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-6f4bb7946d-nl7kt"] Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.894744 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.915614 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:35:36 crc kubenswrapper[4558]: I0120 17:35:36.959775 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:35:37 crc kubenswrapper[4558]: I0120 17:35:37.025465 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/ba60e66e-45cc-48a7-92dc-126983f4aa43-config\") pod \"neutron-6f4bb7946d-nl7kt\" (UID: \"ba60e66e-45cc-48a7-92dc-126983f4aa43\") " pod="openstack-kuttl-tests/neutron-6f4bb7946d-nl7kt" Jan 20 17:35:37 crc kubenswrapper[4558]: I0120 17:35:37.025503 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ba60e66e-45cc-48a7-92dc-126983f4aa43-public-tls-certs\") pod \"neutron-6f4bb7946d-nl7kt\" (UID: \"ba60e66e-45cc-48a7-92dc-126983f4aa43\") " pod="openstack-kuttl-tests/neutron-6f4bb7946d-nl7kt" Jan 20 17:35:37 crc kubenswrapper[4558]: I0120 17:35:37.025580 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/ba60e66e-45cc-48a7-92dc-126983f4aa43-httpd-config\") pod \"neutron-6f4bb7946d-nl7kt\" (UID: \"ba60e66e-45cc-48a7-92dc-126983f4aa43\") " pod="openstack-kuttl-tests/neutron-6f4bb7946d-nl7kt" Jan 20 17:35:37 crc kubenswrapper[4558]: I0120 17:35:37.025613 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fzxkx\" (UniqueName: \"kubernetes.io/projected/ba60e66e-45cc-48a7-92dc-126983f4aa43-kube-api-access-fzxkx\") pod \"neutron-6f4bb7946d-nl7kt\" (UID: \"ba60e66e-45cc-48a7-92dc-126983f4aa43\") " pod="openstack-kuttl-tests/neutron-6f4bb7946d-nl7kt" Jan 20 17:35:37 crc kubenswrapper[4558]: I0120 17:35:37.025642 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ba60e66e-45cc-48a7-92dc-126983f4aa43-internal-tls-certs\") pod \"neutron-6f4bb7946d-nl7kt\" (UID: \"ba60e66e-45cc-48a7-92dc-126983f4aa43\") " pod="openstack-kuttl-tests/neutron-6f4bb7946d-nl7kt" Jan 20 17:35:37 crc kubenswrapper[4558]: I0120 17:35:37.025692 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ba60e66e-45cc-48a7-92dc-126983f4aa43-combined-ca-bundle\") pod \"neutron-6f4bb7946d-nl7kt\" (UID: \"ba60e66e-45cc-48a7-92dc-126983f4aa43\") " pod="openstack-kuttl-tests/neutron-6f4bb7946d-nl7kt" Jan 20 17:35:37 crc kubenswrapper[4558]: I0120 17:35:37.025761 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/ba60e66e-45cc-48a7-92dc-126983f4aa43-ovndb-tls-certs\") pod \"neutron-6f4bb7946d-nl7kt\" (UID: \"ba60e66e-45cc-48a7-92dc-126983f4aa43\") " pod="openstack-kuttl-tests/neutron-6f4bb7946d-nl7kt" Jan 20 17:35:37 crc kubenswrapper[4558]: I0120 17:35:37.129066 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fzxkx\" (UniqueName: \"kubernetes.io/projected/ba60e66e-45cc-48a7-92dc-126983f4aa43-kube-api-access-fzxkx\") pod \"neutron-6f4bb7946d-nl7kt\" (UID: \"ba60e66e-45cc-48a7-92dc-126983f4aa43\") " pod="openstack-kuttl-tests/neutron-6f4bb7946d-nl7kt" Jan 20 17:35:37 crc kubenswrapper[4558]: I0120 17:35:37.139547 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ba60e66e-45cc-48a7-92dc-126983f4aa43-internal-tls-certs\") pod \"neutron-6f4bb7946d-nl7kt\" (UID: \"ba60e66e-45cc-48a7-92dc-126983f4aa43\") " pod="openstack-kuttl-tests/neutron-6f4bb7946d-nl7kt" Jan 20 17:35:37 crc kubenswrapper[4558]: I0120 17:35:37.139769 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ba60e66e-45cc-48a7-92dc-126983f4aa43-combined-ca-bundle\") pod \"neutron-6f4bb7946d-nl7kt\" (UID: \"ba60e66e-45cc-48a7-92dc-126983f4aa43\") " pod="openstack-kuttl-tests/neutron-6f4bb7946d-nl7kt" Jan 20 17:35:37 crc kubenswrapper[4558]: I0120 17:35:37.140607 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/ba60e66e-45cc-48a7-92dc-126983f4aa43-ovndb-tls-certs\") pod \"neutron-6f4bb7946d-nl7kt\" (UID: \"ba60e66e-45cc-48a7-92dc-126983f4aa43\") " pod="openstack-kuttl-tests/neutron-6f4bb7946d-nl7kt" Jan 20 17:35:37 crc kubenswrapper[4558]: I0120 17:35:37.141035 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/ba60e66e-45cc-48a7-92dc-126983f4aa43-config\") pod \"neutron-6f4bb7946d-nl7kt\" (UID: \"ba60e66e-45cc-48a7-92dc-126983f4aa43\") " pod="openstack-kuttl-tests/neutron-6f4bb7946d-nl7kt" Jan 20 17:35:37 crc kubenswrapper[4558]: I0120 17:35:37.142784 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ba60e66e-45cc-48a7-92dc-126983f4aa43-public-tls-certs\") pod \"neutron-6f4bb7946d-nl7kt\" (UID: \"ba60e66e-45cc-48a7-92dc-126983f4aa43\") " pod="openstack-kuttl-tests/neutron-6f4bb7946d-nl7kt" Jan 20 17:35:37 crc kubenswrapper[4558]: I0120 17:35:37.143084 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/ba60e66e-45cc-48a7-92dc-126983f4aa43-httpd-config\") pod \"neutron-6f4bb7946d-nl7kt\" (UID: \"ba60e66e-45cc-48a7-92dc-126983f4aa43\") " pod="openstack-kuttl-tests/neutron-6f4bb7946d-nl7kt" Jan 20 17:35:37 crc kubenswrapper[4558]: I0120 17:35:37.157564 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ba60e66e-45cc-48a7-92dc-126983f4aa43-combined-ca-bundle\") pod \"neutron-6f4bb7946d-nl7kt\" (UID: \"ba60e66e-45cc-48a7-92dc-126983f4aa43\") " pod="openstack-kuttl-tests/neutron-6f4bb7946d-nl7kt" Jan 20 17:35:37 crc kubenswrapper[4558]: I0120 17:35:37.196834 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ba60e66e-45cc-48a7-92dc-126983f4aa43-public-tls-certs\") pod \"neutron-6f4bb7946d-nl7kt\" (UID: \"ba60e66e-45cc-48a7-92dc-126983f4aa43\") " pod="openstack-kuttl-tests/neutron-6f4bb7946d-nl7kt" Jan 20 17:35:37 crc kubenswrapper[4558]: I0120 17:35:37.197002 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/ba60e66e-45cc-48a7-92dc-126983f4aa43-ovndb-tls-certs\") pod \"neutron-6f4bb7946d-nl7kt\" (UID: \"ba60e66e-45cc-48a7-92dc-126983f4aa43\") " pod="openstack-kuttl-tests/neutron-6f4bb7946d-nl7kt" Jan 20 17:35:37 crc kubenswrapper[4558]: I0120 17:35:37.197239 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/ba60e66e-45cc-48a7-92dc-126983f4aa43-httpd-config\") pod \"neutron-6f4bb7946d-nl7kt\" (UID: \"ba60e66e-45cc-48a7-92dc-126983f4aa43\") " pod="openstack-kuttl-tests/neutron-6f4bb7946d-nl7kt" Jan 20 17:35:37 crc kubenswrapper[4558]: I0120 17:35:37.197834 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ba60e66e-45cc-48a7-92dc-126983f4aa43-internal-tls-certs\") pod \"neutron-6f4bb7946d-nl7kt\" (UID: \"ba60e66e-45cc-48a7-92dc-126983f4aa43\") " pod="openstack-kuttl-tests/neutron-6f4bb7946d-nl7kt" Jan 20 17:35:37 crc kubenswrapper[4558]: I0120 17:35:37.198573 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fzxkx\" (UniqueName: \"kubernetes.io/projected/ba60e66e-45cc-48a7-92dc-126983f4aa43-kube-api-access-fzxkx\") pod \"neutron-6f4bb7946d-nl7kt\" (UID: \"ba60e66e-45cc-48a7-92dc-126983f4aa43\") " pod="openstack-kuttl-tests/neutron-6f4bb7946d-nl7kt" Jan 20 17:35:37 crc kubenswrapper[4558]: I0120 17:35:37.199959 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/ba60e66e-45cc-48a7-92dc-126983f4aa43-config\") pod \"neutron-6f4bb7946d-nl7kt\" (UID: \"ba60e66e-45cc-48a7-92dc-126983f4aa43\") " pod="openstack-kuttl-tests/neutron-6f4bb7946d-nl7kt" Jan 20 17:35:37 crc kubenswrapper[4558]: I0120 17:35:37.322432 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-6874444f77-w92bj"] Jan 20 17:35:37 crc kubenswrapper[4558]: I0120 17:35:37.336813 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:35:37 crc kubenswrapper[4558]: I0120 17:35:37.366426 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-6874444f77-w92bj" event={"ID":"a4d26c7d-095d-4f3d-b6cc-c270961e9794","Type":"ContainerStarted","Data":"d19d6bcdf0254eb31ac9d803fe49d407f81d95266bfc66f730bfd688e5da9db6"} Jan 20 17:35:37 crc kubenswrapper[4558]: I0120 17:35:37.484123 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-6f4bb7946d-nl7kt" Jan 20 17:35:37 crc kubenswrapper[4558]: W0120 17:35:37.535609 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode8a8d7c2_9fc7_48d9_bd28_2f0416b7c61c.slice/crio-e0768009b6e30ed978a8014343d3fbe81202632e1022bdbfd5c82eef6ba8b41a WatchSource:0}: Error finding container e0768009b6e30ed978a8014343d3fbe81202632e1022bdbfd5c82eef6ba8b41a: Status 404 returned error can't find the container with id e0768009b6e30ed978a8014343d3fbe81202632e1022bdbfd5c82eef6ba8b41a Jan 20 17:35:37 crc kubenswrapper[4558]: I0120 17:35:37.535957 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-nb-0"] Jan 20 17:35:37 crc kubenswrapper[4558]: I0120 17:35:37.546244 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-sb-0"] Jan 20 17:35:37 crc kubenswrapper[4558]: W0120 17:35:37.571735 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod532ef149_f32a_4b6e_8d0c_458a04952d34.slice/crio-c12a03e6473f4843b945a7bd727f639e663e7def6ac3ec56127de8b4b910d659 WatchSource:0}: Error finding container c12a03e6473f4843b945a7bd727f639e663e7def6ac3ec56127de8b4b910d659: Status 404 returned error can't find the container with id c12a03e6473f4843b945a7bd727f639e663e7def6ac3ec56127de8b4b910d659 Jan 20 17:35:37 crc kubenswrapper[4558]: I0120 17:35:37.819010 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:35:37 crc kubenswrapper[4558]: I0120 17:35:37.849440 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:35:37 crc kubenswrapper[4558]: W0120 17:35:37.977759 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podba60e66e_45cc_48a7_92dc_126983f4aa43.slice/crio-28b5bc97035406c2509fb60ee1ac0ff1e8441d4a30293b4a1c159195284409ff WatchSource:0}: Error finding container 28b5bc97035406c2509fb60ee1ac0ff1e8441d4a30293b4a1c159195284409ff: Status 404 returned error can't find the container with id 28b5bc97035406c2509fb60ee1ac0ff1e8441d4a30293b4a1c159195284409ff Jan 20 17:35:38 crc kubenswrapper[4558]: I0120 17:35:37.983624 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-6f4bb7946d-nl7kt"] Jan 20 17:35:38 crc kubenswrapper[4558]: I0120 17:35:38.227483 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-6f4bb7946d-nl7kt"] Jan 20 17:35:38 crc kubenswrapper[4558]: I0120 17:35:38.247127 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/neutron-7d788974f4-sf4l4"] Jan 20 17:35:38 crc kubenswrapper[4558]: I0120 17:35:38.252473 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-7d788974f4-sf4l4" Jan 20 17:35:38 crc kubenswrapper[4558]: I0120 17:35:38.270363 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-7d788974f4-sf4l4"] Jan 20 17:35:38 crc kubenswrapper[4558]: I0120 17:35:38.379303 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/13d3779b-9cf3-47f3-b330-a3adfd5b10f7-internal-tls-certs\") pod \"neutron-7d788974f4-sf4l4\" (UID: \"13d3779b-9cf3-47f3-b330-a3adfd5b10f7\") " pod="openstack-kuttl-tests/neutron-7d788974f4-sf4l4" Jan 20 17:35:38 crc kubenswrapper[4558]: I0120 17:35:38.379380 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/13d3779b-9cf3-47f3-b330-a3adfd5b10f7-httpd-config\") pod \"neutron-7d788974f4-sf4l4\" (UID: \"13d3779b-9cf3-47f3-b330-a3adfd5b10f7\") " pod="openstack-kuttl-tests/neutron-7d788974f4-sf4l4" Jan 20 17:35:38 crc kubenswrapper[4558]: I0120 17:35:38.379475 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8lmnc\" (UniqueName: \"kubernetes.io/projected/13d3779b-9cf3-47f3-b330-a3adfd5b10f7-kube-api-access-8lmnc\") pod \"neutron-7d788974f4-sf4l4\" (UID: \"13d3779b-9cf3-47f3-b330-a3adfd5b10f7\") " pod="openstack-kuttl-tests/neutron-7d788974f4-sf4l4" Jan 20 17:35:38 crc kubenswrapper[4558]: I0120 17:35:38.379573 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/13d3779b-9cf3-47f3-b330-a3adfd5b10f7-combined-ca-bundle\") pod \"neutron-7d788974f4-sf4l4\" (UID: \"13d3779b-9cf3-47f3-b330-a3adfd5b10f7\") " pod="openstack-kuttl-tests/neutron-7d788974f4-sf4l4" Jan 20 17:35:38 crc kubenswrapper[4558]: I0120 17:35:38.379596 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/13d3779b-9cf3-47f3-b330-a3adfd5b10f7-public-tls-certs\") pod \"neutron-7d788974f4-sf4l4\" (UID: \"13d3779b-9cf3-47f3-b330-a3adfd5b10f7\") " pod="openstack-kuttl-tests/neutron-7d788974f4-sf4l4" Jan 20 17:35:38 crc kubenswrapper[4558]: I0120 17:35:38.379628 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/13d3779b-9cf3-47f3-b330-a3adfd5b10f7-config\") pod \"neutron-7d788974f4-sf4l4\" (UID: \"13d3779b-9cf3-47f3-b330-a3adfd5b10f7\") " pod="openstack-kuttl-tests/neutron-7d788974f4-sf4l4" Jan 20 17:35:38 crc kubenswrapper[4558]: I0120 17:35:38.379703 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/13d3779b-9cf3-47f3-b330-a3adfd5b10f7-ovndb-tls-certs\") pod \"neutron-7d788974f4-sf4l4\" (UID: \"13d3779b-9cf3-47f3-b330-a3adfd5b10f7\") " pod="openstack-kuttl-tests/neutron-7d788974f4-sf4l4" Jan 20 17:35:38 crc kubenswrapper[4558]: I0120 17:35:38.392602 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovn-northd-0_76c1fbce-d8ff-4469-85cb-11124a82402a/ovn-northd/0.log" Jan 20 17:35:38 crc kubenswrapper[4558]: I0120 17:35:38.392679 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:35:38 crc kubenswrapper[4558]: I0120 17:35:38.404640 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-6874444f77-w92bj" event={"ID":"a4d26c7d-095d-4f3d-b6cc-c270961e9794","Type":"ContainerStarted","Data":"964d0ad276f99dd9fa9d8522022840ffe7ce1002dc0bd50778cbc03decd4341c"} Jan 20 17:35:38 crc kubenswrapper[4558]: I0120 17:35:38.405714 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/keystone-6874444f77-w92bj" Jan 20 17:35:38 crc kubenswrapper[4558]: I0120 17:35:38.427706 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-nb-0" event={"ID":"e8a8d7c2-9fc7-48d9-bd28-2f0416b7c61c","Type":"ContainerStarted","Data":"ea31e1aad28c22c0366c0c296a61f01044fbf0caf04c9f039be12136bde4291d"} Jan 20 17:35:38 crc kubenswrapper[4558]: I0120 17:35:38.427857 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-nb-0" event={"ID":"e8a8d7c2-9fc7-48d9-bd28-2f0416b7c61c","Type":"ContainerStarted","Data":"b5bed1bf3813bd740cfee2d99ab02e9629a1b8a01adb60f10e7219ffeb492771"} Jan 20 17:35:38 crc kubenswrapper[4558]: I0120 17:35:38.427931 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-nb-0" event={"ID":"e8a8d7c2-9fc7-48d9-bd28-2f0416b7c61c","Type":"ContainerStarted","Data":"e0768009b6e30ed978a8014343d3fbe81202632e1022bdbfd5c82eef6ba8b41a"} Jan 20 17:35:38 crc kubenswrapper[4558]: I0120 17:35:38.435332 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-sb-0" event={"ID":"532ef149-f32a-4b6e-8d0c-458a04952d34","Type":"ContainerStarted","Data":"93d74d768c827cf81147355c461a6f8bd023a18e684bd30dab48966103e6de7e"} Jan 20 17:35:38 crc kubenswrapper[4558]: I0120 17:35:38.435483 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-sb-0" event={"ID":"532ef149-f32a-4b6e-8d0c-458a04952d34","Type":"ContainerStarted","Data":"7b466bcc44125b12b3728f5c2f0dd6ae9956b945bf663448fef7df702634964d"} Jan 20 17:35:38 crc kubenswrapper[4558]: I0120 17:35:38.435547 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-sb-0" event={"ID":"532ef149-f32a-4b6e-8d0c-458a04952d34","Type":"ContainerStarted","Data":"c12a03e6473f4843b945a7bd727f639e663e7def6ac3ec56127de8b4b910d659"} Jan 20 17:35:38 crc kubenswrapper[4558]: I0120 17:35:38.442749 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovn-northd-0_76c1fbce-d8ff-4469-85cb-11124a82402a/ovn-northd/0.log" Jan 20 17:35:38 crc kubenswrapper[4558]: I0120 17:35:38.442924 4558 generic.go:334] "Generic (PLEG): container finished" podID="76c1fbce-d8ff-4469-85cb-11124a82402a" containerID="79b61ba8ff702ce7bd226d54ae1f42ddd1006b9082d80760f2a349aad7555bc4" exitCode=139 Jan 20 17:35:38 crc kubenswrapper[4558]: I0120 17:35:38.442976 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-northd-0" event={"ID":"76c1fbce-d8ff-4469-85cb-11124a82402a","Type":"ContainerDied","Data":"79b61ba8ff702ce7bd226d54ae1f42ddd1006b9082d80760f2a349aad7555bc4"} Jan 20 17:35:38 crc kubenswrapper[4558]: I0120 17:35:38.443001 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-northd-0" event={"ID":"76c1fbce-d8ff-4469-85cb-11124a82402a","Type":"ContainerDied","Data":"713cb74e1cabebfa7d1cc7a7da54294b672f46e7d0652d75be881bebd0472ef1"} Jan 20 17:35:38 crc kubenswrapper[4558]: I0120 17:35:38.443022 4558 scope.go:117] "RemoveContainer" containerID="610a512ff345e04683281515fbc1b1706ebf44507e8dbe3063d14bccc948cae5" Jan 20 17:35:38 crc kubenswrapper[4558]: I0120 17:35:38.443120 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:35:38 crc kubenswrapper[4558]: I0120 17:35:38.443928 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/keystone-6874444f77-w92bj" podStartSLOduration=3.44391504 podStartE2EDuration="3.44391504s" podCreationTimestamp="2026-01-20 17:35:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:35:38.436062354 +0000 UTC m=+3232.196400311" watchObservedRunningTime="2026-01-20 17:35:38.44391504 +0000 UTC m=+3232.204253007" Jan 20 17:35:38 crc kubenswrapper[4558]: I0120 17:35:38.447636 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"e228514c-6d22-4527-b365-913e3ea3cfdb","Type":"ContainerStarted","Data":"8ac7d16d0f78462693261b691f194857cebebf572d8c74e83499e3ff412581c0"} Jan 20 17:35:38 crc kubenswrapper[4558]: I0120 17:35:38.447664 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"e228514c-6d22-4527-b365-913e3ea3cfdb","Type":"ContainerStarted","Data":"c4d8f498217a4761f2808d12b3f68f581b7f216075fc52ef93e3194e0d7de2e2"} Jan 20 17:35:38 crc kubenswrapper[4558]: I0120 17:35:38.461433 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-6f4bb7946d-nl7kt" event={"ID":"ba60e66e-45cc-48a7-92dc-126983f4aa43","Type":"ContainerStarted","Data":"b3cfb60543d88a74e156430e8c9432a06e86b67228b8b014df284600429f9b92"} Jan 20 17:35:38 crc kubenswrapper[4558]: I0120 17:35:38.461521 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-6f4bb7946d-nl7kt" event={"ID":"ba60e66e-45cc-48a7-92dc-126983f4aa43","Type":"ContainerStarted","Data":"28b5bc97035406c2509fb60ee1ac0ff1e8441d4a30293b4a1c159195284409ff"} Jan 20 17:35:38 crc kubenswrapper[4558]: I0120 17:35:38.467531 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ovsdbserver-sb-0" podStartSLOduration=2.467518703 podStartE2EDuration="2.467518703s" podCreationTimestamp="2026-01-20 17:35:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:35:38.462908134 +0000 UTC m=+3232.223246102" watchObservedRunningTime="2026-01-20 17:35:38.467518703 +0000 UTC m=+3232.227856670" Jan 20 17:35:38 crc kubenswrapper[4558]: I0120 17:35:38.481303 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8lmnc\" (UniqueName: \"kubernetes.io/projected/13d3779b-9cf3-47f3-b330-a3adfd5b10f7-kube-api-access-8lmnc\") pod \"neutron-7d788974f4-sf4l4\" (UID: \"13d3779b-9cf3-47f3-b330-a3adfd5b10f7\") " pod="openstack-kuttl-tests/neutron-7d788974f4-sf4l4" Jan 20 17:35:38 crc kubenswrapper[4558]: I0120 17:35:38.481390 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/13d3779b-9cf3-47f3-b330-a3adfd5b10f7-combined-ca-bundle\") pod \"neutron-7d788974f4-sf4l4\" (UID: \"13d3779b-9cf3-47f3-b330-a3adfd5b10f7\") " pod="openstack-kuttl-tests/neutron-7d788974f4-sf4l4" Jan 20 17:35:38 crc kubenswrapper[4558]: I0120 17:35:38.481411 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/13d3779b-9cf3-47f3-b330-a3adfd5b10f7-public-tls-certs\") pod \"neutron-7d788974f4-sf4l4\" (UID: \"13d3779b-9cf3-47f3-b330-a3adfd5b10f7\") " pod="openstack-kuttl-tests/neutron-7d788974f4-sf4l4" Jan 20 17:35:38 crc kubenswrapper[4558]: I0120 17:35:38.481443 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/13d3779b-9cf3-47f3-b330-a3adfd5b10f7-config\") pod \"neutron-7d788974f4-sf4l4\" (UID: \"13d3779b-9cf3-47f3-b330-a3adfd5b10f7\") " pod="openstack-kuttl-tests/neutron-7d788974f4-sf4l4" Jan 20 17:35:38 crc kubenswrapper[4558]: I0120 17:35:38.481501 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/13d3779b-9cf3-47f3-b330-a3adfd5b10f7-ovndb-tls-certs\") pod \"neutron-7d788974f4-sf4l4\" (UID: \"13d3779b-9cf3-47f3-b330-a3adfd5b10f7\") " pod="openstack-kuttl-tests/neutron-7d788974f4-sf4l4" Jan 20 17:35:38 crc kubenswrapper[4558]: I0120 17:35:38.481544 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/13d3779b-9cf3-47f3-b330-a3adfd5b10f7-internal-tls-certs\") pod \"neutron-7d788974f4-sf4l4\" (UID: \"13d3779b-9cf3-47f3-b330-a3adfd5b10f7\") " pod="openstack-kuttl-tests/neutron-7d788974f4-sf4l4" Jan 20 17:35:38 crc kubenswrapper[4558]: I0120 17:35:38.481584 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/13d3779b-9cf3-47f3-b330-a3adfd5b10f7-httpd-config\") pod \"neutron-7d788974f4-sf4l4\" (UID: \"13d3779b-9cf3-47f3-b330-a3adfd5b10f7\") " pod="openstack-kuttl-tests/neutron-7d788974f4-sf4l4" Jan 20 17:35:38 crc kubenswrapper[4558]: I0120 17:35:38.491286 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ovsdbserver-nb-0" podStartSLOduration=2.491274103 podStartE2EDuration="2.491274103s" podCreationTimestamp="2026-01-20 17:35:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:35:38.479598669 +0000 UTC m=+3232.239936635" watchObservedRunningTime="2026-01-20 17:35:38.491274103 +0000 UTC m=+3232.251612070" Jan 20 17:35:38 crc kubenswrapper[4558]: I0120 17:35:38.494862 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/13d3779b-9cf3-47f3-b330-a3adfd5b10f7-httpd-config\") pod \"neutron-7d788974f4-sf4l4\" (UID: \"13d3779b-9cf3-47f3-b330-a3adfd5b10f7\") " pod="openstack-kuttl-tests/neutron-7d788974f4-sf4l4" Jan 20 17:35:38 crc kubenswrapper[4558]: I0120 17:35:38.494875 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/13d3779b-9cf3-47f3-b330-a3adfd5b10f7-public-tls-certs\") pod \"neutron-7d788974f4-sf4l4\" (UID: \"13d3779b-9cf3-47f3-b330-a3adfd5b10f7\") " pod="openstack-kuttl-tests/neutron-7d788974f4-sf4l4" Jan 20 17:35:38 crc kubenswrapper[4558]: I0120 17:35:38.495242 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/13d3779b-9cf3-47f3-b330-a3adfd5b10f7-config\") pod \"neutron-7d788974f4-sf4l4\" (UID: \"13d3779b-9cf3-47f3-b330-a3adfd5b10f7\") " pod="openstack-kuttl-tests/neutron-7d788974f4-sf4l4" Jan 20 17:35:38 crc kubenswrapper[4558]: I0120 17:35:38.495620 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/13d3779b-9cf3-47f3-b330-a3adfd5b10f7-ovndb-tls-certs\") pod \"neutron-7d788974f4-sf4l4\" (UID: \"13d3779b-9cf3-47f3-b330-a3adfd5b10f7\") " pod="openstack-kuttl-tests/neutron-7d788974f4-sf4l4" Jan 20 17:35:38 crc kubenswrapper[4558]: I0120 17:35:38.498658 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/13d3779b-9cf3-47f3-b330-a3adfd5b10f7-internal-tls-certs\") pod \"neutron-7d788974f4-sf4l4\" (UID: \"13d3779b-9cf3-47f3-b330-a3adfd5b10f7\") " pod="openstack-kuttl-tests/neutron-7d788974f4-sf4l4" Jan 20 17:35:38 crc kubenswrapper[4558]: I0120 17:35:38.499449 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8lmnc\" (UniqueName: \"kubernetes.io/projected/13d3779b-9cf3-47f3-b330-a3adfd5b10f7-kube-api-access-8lmnc\") pod \"neutron-7d788974f4-sf4l4\" (UID: \"13d3779b-9cf3-47f3-b330-a3adfd5b10f7\") " pod="openstack-kuttl-tests/neutron-7d788974f4-sf4l4" Jan 20 17:35:38 crc kubenswrapper[4558]: I0120 17:35:38.499913 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/13d3779b-9cf3-47f3-b330-a3adfd5b10f7-combined-ca-bundle\") pod \"neutron-7d788974f4-sf4l4\" (UID: \"13d3779b-9cf3-47f3-b330-a3adfd5b10f7\") " pod="openstack-kuttl-tests/neutron-7d788974f4-sf4l4" Jan 20 17:35:38 crc kubenswrapper[4558]: I0120 17:35:38.508934 4558 scope.go:117] "RemoveContainer" containerID="79b61ba8ff702ce7bd226d54ae1f42ddd1006b9082d80760f2a349aad7555bc4" Jan 20 17:35:38 crc kubenswrapper[4558]: I0120 17:35:38.578397 4558 scope.go:117] "RemoveContainer" containerID="610a512ff345e04683281515fbc1b1706ebf44507e8dbe3063d14bccc948cae5" Jan 20 17:35:38 crc kubenswrapper[4558]: E0120 17:35:38.578857 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"610a512ff345e04683281515fbc1b1706ebf44507e8dbe3063d14bccc948cae5\": container with ID starting with 610a512ff345e04683281515fbc1b1706ebf44507e8dbe3063d14bccc948cae5 not found: ID does not exist" containerID="610a512ff345e04683281515fbc1b1706ebf44507e8dbe3063d14bccc948cae5" Jan 20 17:35:38 crc kubenswrapper[4558]: I0120 17:35:38.578884 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"610a512ff345e04683281515fbc1b1706ebf44507e8dbe3063d14bccc948cae5"} err="failed to get container status \"610a512ff345e04683281515fbc1b1706ebf44507e8dbe3063d14bccc948cae5\": rpc error: code = NotFound desc = could not find container \"610a512ff345e04683281515fbc1b1706ebf44507e8dbe3063d14bccc948cae5\": container with ID starting with 610a512ff345e04683281515fbc1b1706ebf44507e8dbe3063d14bccc948cae5 not found: ID does not exist" Jan 20 17:35:38 crc kubenswrapper[4558]: I0120 17:35:38.578903 4558 scope.go:117] "RemoveContainer" containerID="79b61ba8ff702ce7bd226d54ae1f42ddd1006b9082d80760f2a349aad7555bc4" Jan 20 17:35:38 crc kubenswrapper[4558]: I0120 17:35:38.579442 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-7d788974f4-sf4l4" Jan 20 17:35:38 crc kubenswrapper[4558]: E0120 17:35:38.581748 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"79b61ba8ff702ce7bd226d54ae1f42ddd1006b9082d80760f2a349aad7555bc4\": container with ID starting with 79b61ba8ff702ce7bd226d54ae1f42ddd1006b9082d80760f2a349aad7555bc4 not found: ID does not exist" containerID="79b61ba8ff702ce7bd226d54ae1f42ddd1006b9082d80760f2a349aad7555bc4" Jan 20 17:35:38 crc kubenswrapper[4558]: I0120 17:35:38.581811 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"79b61ba8ff702ce7bd226d54ae1f42ddd1006b9082d80760f2a349aad7555bc4"} err="failed to get container status \"79b61ba8ff702ce7bd226d54ae1f42ddd1006b9082d80760f2a349aad7555bc4\": rpc error: code = NotFound desc = could not find container \"79b61ba8ff702ce7bd226d54ae1f42ddd1006b9082d80760f2a349aad7555bc4\": container with ID starting with 79b61ba8ff702ce7bd226d54ae1f42ddd1006b9082d80760f2a349aad7555bc4 not found: ID does not exist" Jan 20 17:35:38 crc kubenswrapper[4558]: I0120 17:35:38.582295 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/76c1fbce-d8ff-4469-85cb-11124a82402a-config\") pod \"76c1fbce-d8ff-4469-85cb-11124a82402a\" (UID: \"76c1fbce-d8ff-4469-85cb-11124a82402a\") " Jan 20 17:35:38 crc kubenswrapper[4558]: I0120 17:35:38.582393 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zsdjq\" (UniqueName: \"kubernetes.io/projected/76c1fbce-d8ff-4469-85cb-11124a82402a-kube-api-access-zsdjq\") pod \"76c1fbce-d8ff-4469-85cb-11124a82402a\" (UID: \"76c1fbce-d8ff-4469-85cb-11124a82402a\") " Jan 20 17:35:38 crc kubenswrapper[4558]: I0120 17:35:38.582474 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/76c1fbce-d8ff-4469-85cb-11124a82402a-ovn-rundir\") pod \"76c1fbce-d8ff-4469-85cb-11124a82402a\" (UID: \"76c1fbce-d8ff-4469-85cb-11124a82402a\") " Jan 20 17:35:38 crc kubenswrapper[4558]: I0120 17:35:38.582561 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/76c1fbce-d8ff-4469-85cb-11124a82402a-combined-ca-bundle\") pod \"76c1fbce-d8ff-4469-85cb-11124a82402a\" (UID: \"76c1fbce-d8ff-4469-85cb-11124a82402a\") " Jan 20 17:35:38 crc kubenswrapper[4558]: I0120 17:35:38.582759 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/76c1fbce-d8ff-4469-85cb-11124a82402a-scripts\") pod \"76c1fbce-d8ff-4469-85cb-11124a82402a\" (UID: \"76c1fbce-d8ff-4469-85cb-11124a82402a\") " Jan 20 17:35:38 crc kubenswrapper[4558]: I0120 17:35:38.582855 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/76c1fbce-d8ff-4469-85cb-11124a82402a-ovn-northd-tls-certs\") pod \"76c1fbce-d8ff-4469-85cb-11124a82402a\" (UID: \"76c1fbce-d8ff-4469-85cb-11124a82402a\") " Jan 20 17:35:38 crc kubenswrapper[4558]: I0120 17:35:38.582883 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/76c1fbce-d8ff-4469-85cb-11124a82402a-metrics-certs-tls-certs\") pod \"76c1fbce-d8ff-4469-85cb-11124a82402a\" (UID: \"76c1fbce-d8ff-4469-85cb-11124a82402a\") " Jan 20 17:35:38 crc kubenswrapper[4558]: I0120 17:35:38.583041 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/76c1fbce-d8ff-4469-85cb-11124a82402a-ovn-rundir" (OuterVolumeSpecName: "ovn-rundir") pod "76c1fbce-d8ff-4469-85cb-11124a82402a" (UID: "76c1fbce-d8ff-4469-85cb-11124a82402a"). InnerVolumeSpecName "ovn-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:35:38 crc kubenswrapper[4558]: I0120 17:35:38.583189 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/76c1fbce-d8ff-4469-85cb-11124a82402a-config" (OuterVolumeSpecName: "config") pod "76c1fbce-d8ff-4469-85cb-11124a82402a" (UID: "76c1fbce-d8ff-4469-85cb-11124a82402a"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:35:38 crc kubenswrapper[4558]: I0120 17:35:38.583770 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/76c1fbce-d8ff-4469-85cb-11124a82402a-scripts" (OuterVolumeSpecName: "scripts") pod "76c1fbce-d8ff-4469-85cb-11124a82402a" (UID: "76c1fbce-d8ff-4469-85cb-11124a82402a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:35:38 crc kubenswrapper[4558]: I0120 17:35:38.584643 4558 reconciler_common.go:293] "Volume detached for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/76c1fbce-d8ff-4469-85cb-11124a82402a-ovn-rundir\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:38 crc kubenswrapper[4558]: I0120 17:35:38.584667 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/76c1fbce-d8ff-4469-85cb-11124a82402a-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:38 crc kubenswrapper[4558]: I0120 17:35:38.584677 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/76c1fbce-d8ff-4469-85cb-11124a82402a-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:38 crc kubenswrapper[4558]: I0120 17:35:38.586612 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/76c1fbce-d8ff-4469-85cb-11124a82402a-kube-api-access-zsdjq" (OuterVolumeSpecName: "kube-api-access-zsdjq") pod "76c1fbce-d8ff-4469-85cb-11124a82402a" (UID: "76c1fbce-d8ff-4469-85cb-11124a82402a"). InnerVolumeSpecName "kube-api-access-zsdjq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:35:38 crc kubenswrapper[4558]: I0120 17:35:38.614487 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/76c1fbce-d8ff-4469-85cb-11124a82402a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "76c1fbce-d8ff-4469-85cb-11124a82402a" (UID: "76c1fbce-d8ff-4469-85cb-11124a82402a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:38 crc kubenswrapper[4558]: I0120 17:35:38.641256 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/76c1fbce-d8ff-4469-85cb-11124a82402a-ovn-northd-tls-certs" (OuterVolumeSpecName: "ovn-northd-tls-certs") pod "76c1fbce-d8ff-4469-85cb-11124a82402a" (UID: "76c1fbce-d8ff-4469-85cb-11124a82402a"). InnerVolumeSpecName "ovn-northd-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:38 crc kubenswrapper[4558]: I0120 17:35:38.647630 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/76c1fbce-d8ff-4469-85cb-11124a82402a-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "76c1fbce-d8ff-4469-85cb-11124a82402a" (UID: "76c1fbce-d8ff-4469-85cb-11124a82402a"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:38 crc kubenswrapper[4558]: I0120 17:35:38.687798 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/76c1fbce-d8ff-4469-85cb-11124a82402a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:38 crc kubenswrapper[4558]: I0120 17:35:38.687824 4558 reconciler_common.go:293] "Volume detached for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/76c1fbce-d8ff-4469-85cb-11124a82402a-ovn-northd-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:38 crc kubenswrapper[4558]: I0120 17:35:38.687849 4558 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/76c1fbce-d8ff-4469-85cb-11124a82402a-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:38 crc kubenswrapper[4558]: I0120 17:35:38.687861 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zsdjq\" (UniqueName: \"kubernetes.io/projected/76c1fbce-d8ff-4469-85cb-11124a82402a-kube-api-access-zsdjq\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:38 crc kubenswrapper[4558]: I0120 17:35:38.789855 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovn-northd-0"] Jan 20 17:35:38 crc kubenswrapper[4558]: I0120 17:35:38.804213 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ovn-northd-0"] Jan 20 17:35:38 crc kubenswrapper[4558]: I0120 17:35:38.826224 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ovn-northd-0"] Jan 20 17:35:38 crc kubenswrapper[4558]: E0120 17:35:38.826612 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="76c1fbce-d8ff-4469-85cb-11124a82402a" containerName="openstack-network-exporter" Jan 20 17:35:38 crc kubenswrapper[4558]: I0120 17:35:38.826633 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="76c1fbce-d8ff-4469-85cb-11124a82402a" containerName="openstack-network-exporter" Jan 20 17:35:38 crc kubenswrapper[4558]: E0120 17:35:38.826656 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="76c1fbce-d8ff-4469-85cb-11124a82402a" containerName="ovn-northd" Jan 20 17:35:38 crc kubenswrapper[4558]: I0120 17:35:38.826663 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="76c1fbce-d8ff-4469-85cb-11124a82402a" containerName="ovn-northd" Jan 20 17:35:38 crc kubenswrapper[4558]: I0120 17:35:38.826833 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="76c1fbce-d8ff-4469-85cb-11124a82402a" containerName="ovn-northd" Jan 20 17:35:38 crc kubenswrapper[4558]: I0120 17:35:38.826870 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="76c1fbce-d8ff-4469-85cb-11124a82402a" containerName="openstack-network-exporter" Jan 20 17:35:38 crc kubenswrapper[4558]: I0120 17:35:38.838535 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:35:38 crc kubenswrapper[4558]: I0120 17:35:38.841322 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-ovnnorthd-ovndbs" Jan 20 17:35:38 crc kubenswrapper[4558]: I0120 17:35:38.841456 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ovnnorthd-ovnnorthd-dockercfg-dj5lx" Jan 20 17:35:38 crc kubenswrapper[4558]: I0120 17:35:38.841573 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"ovnnorthd-scripts" Jan 20 17:35:38 crc kubenswrapper[4558]: I0120 17:35:38.843974 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"ovnnorthd-config" Jan 20 17:35:38 crc kubenswrapper[4558]: I0120 17:35:38.849081 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovn-northd-0"] Jan 20 17:35:38 crc kubenswrapper[4558]: I0120 17:35:38.995490 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/39600642-1418-4cb4-8617-f2ecc7089e0b-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"39600642-1418-4cb4-8617-f2ecc7089e0b\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:35:38 crc kubenswrapper[4558]: I0120 17:35:38.995755 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/39600642-1418-4cb4-8617-f2ecc7089e0b-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"39600642-1418-4cb4-8617-f2ecc7089e0b\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:35:38 crc kubenswrapper[4558]: I0120 17:35:38.995872 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/39600642-1418-4cb4-8617-f2ecc7089e0b-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"39600642-1418-4cb4-8617-f2ecc7089e0b\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:35:38 crc kubenswrapper[4558]: I0120 17:35:38.995998 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/39600642-1418-4cb4-8617-f2ecc7089e0b-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"39600642-1418-4cb4-8617-f2ecc7089e0b\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:35:38 crc kubenswrapper[4558]: I0120 17:35:38.996191 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4jfnx\" (UniqueName: \"kubernetes.io/projected/39600642-1418-4cb4-8617-f2ecc7089e0b-kube-api-access-4jfnx\") pod \"ovn-northd-0\" (UID: \"39600642-1418-4cb4-8617-f2ecc7089e0b\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:35:38 crc kubenswrapper[4558]: I0120 17:35:38.996267 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/39600642-1418-4cb4-8617-f2ecc7089e0b-scripts\") pod \"ovn-northd-0\" (UID: \"39600642-1418-4cb4-8617-f2ecc7089e0b\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:35:38 crc kubenswrapper[4558]: I0120 17:35:38.996419 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/39600642-1418-4cb4-8617-f2ecc7089e0b-config\") pod \"ovn-northd-0\" (UID: \"39600642-1418-4cb4-8617-f2ecc7089e0b\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:35:39 crc kubenswrapper[4558]: I0120 17:35:39.022569 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-7d788974f4-sf4l4"] Jan 20 17:35:39 crc kubenswrapper[4558]: I0120 17:35:39.098873 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/39600642-1418-4cb4-8617-f2ecc7089e0b-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"39600642-1418-4cb4-8617-f2ecc7089e0b\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:35:39 crc kubenswrapper[4558]: I0120 17:35:39.099235 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/39600642-1418-4cb4-8617-f2ecc7089e0b-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"39600642-1418-4cb4-8617-f2ecc7089e0b\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:35:39 crc kubenswrapper[4558]: I0120 17:35:39.099312 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/39600642-1418-4cb4-8617-f2ecc7089e0b-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"39600642-1418-4cb4-8617-f2ecc7089e0b\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:35:39 crc kubenswrapper[4558]: I0120 17:35:39.099390 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4jfnx\" (UniqueName: \"kubernetes.io/projected/39600642-1418-4cb4-8617-f2ecc7089e0b-kube-api-access-4jfnx\") pod \"ovn-northd-0\" (UID: \"39600642-1418-4cb4-8617-f2ecc7089e0b\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:35:39 crc kubenswrapper[4558]: I0120 17:35:39.099433 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/39600642-1418-4cb4-8617-f2ecc7089e0b-scripts\") pod \"ovn-northd-0\" (UID: \"39600642-1418-4cb4-8617-f2ecc7089e0b\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:35:39 crc kubenswrapper[4558]: I0120 17:35:39.099548 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/39600642-1418-4cb4-8617-f2ecc7089e0b-config\") pod \"ovn-northd-0\" (UID: \"39600642-1418-4cb4-8617-f2ecc7089e0b\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:35:39 crc kubenswrapper[4558]: I0120 17:35:39.099635 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/39600642-1418-4cb4-8617-f2ecc7089e0b-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"39600642-1418-4cb4-8617-f2ecc7089e0b\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:35:39 crc kubenswrapper[4558]: I0120 17:35:39.100128 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/39600642-1418-4cb4-8617-f2ecc7089e0b-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"39600642-1418-4cb4-8617-f2ecc7089e0b\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:35:39 crc kubenswrapper[4558]: I0120 17:35:39.100928 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/39600642-1418-4cb4-8617-f2ecc7089e0b-scripts\") pod \"ovn-northd-0\" (UID: \"39600642-1418-4cb4-8617-f2ecc7089e0b\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:35:39 crc kubenswrapper[4558]: I0120 17:35:39.103411 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/39600642-1418-4cb4-8617-f2ecc7089e0b-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"39600642-1418-4cb4-8617-f2ecc7089e0b\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:35:39 crc kubenswrapper[4558]: I0120 17:35:39.103493 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/39600642-1418-4cb4-8617-f2ecc7089e0b-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"39600642-1418-4cb4-8617-f2ecc7089e0b\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:35:39 crc kubenswrapper[4558]: I0120 17:35:39.103914 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/39600642-1418-4cb4-8617-f2ecc7089e0b-config\") pod \"ovn-northd-0\" (UID: \"39600642-1418-4cb4-8617-f2ecc7089e0b\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:35:39 crc kubenswrapper[4558]: I0120 17:35:39.106954 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/39600642-1418-4cb4-8617-f2ecc7089e0b-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"39600642-1418-4cb4-8617-f2ecc7089e0b\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:35:39 crc kubenswrapper[4558]: I0120 17:35:39.131877 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4jfnx\" (UniqueName: \"kubernetes.io/projected/39600642-1418-4cb4-8617-f2ecc7089e0b-kube-api-access-4jfnx\") pod \"ovn-northd-0\" (UID: \"39600642-1418-4cb4-8617-f2ecc7089e0b\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:35:39 crc kubenswrapper[4558]: I0120 17:35:39.158845 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-7d788974f4-sf4l4"] Jan 20 17:35:39 crc kubenswrapper[4558]: I0120 17:35:39.167113 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:35:39 crc kubenswrapper[4558]: I0120 17:35:39.176207 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/neutron-6865f4c68b-s7dm7"] Jan 20 17:35:39 crc kubenswrapper[4558]: I0120 17:35:39.177945 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-6865f4c68b-s7dm7" Jan 20 17:35:39 crc kubenswrapper[4558]: I0120 17:35:39.187202 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-6865f4c68b-s7dm7"] Jan 20 17:35:39 crc kubenswrapper[4558]: I0120 17:35:39.306107 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/b6b873be-e034-4d85-b131-548eff8013ed-config\") pod \"neutron-6865f4c68b-s7dm7\" (UID: \"b6b873be-e034-4d85-b131-548eff8013ed\") " pod="openstack-kuttl-tests/neutron-6865f4c68b-s7dm7" Jan 20 17:35:39 crc kubenswrapper[4558]: I0120 17:35:39.306365 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/b6b873be-e034-4d85-b131-548eff8013ed-httpd-config\") pod \"neutron-6865f4c68b-s7dm7\" (UID: \"b6b873be-e034-4d85-b131-548eff8013ed\") " pod="openstack-kuttl-tests/neutron-6865f4c68b-s7dm7" Jan 20 17:35:39 crc kubenswrapper[4558]: I0120 17:35:39.306431 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/b6b873be-e034-4d85-b131-548eff8013ed-ovndb-tls-certs\") pod \"neutron-6865f4c68b-s7dm7\" (UID: \"b6b873be-e034-4d85-b131-548eff8013ed\") " pod="openstack-kuttl-tests/neutron-6865f4c68b-s7dm7" Jan 20 17:35:39 crc kubenswrapper[4558]: I0120 17:35:39.306535 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b6b873be-e034-4d85-b131-548eff8013ed-public-tls-certs\") pod \"neutron-6865f4c68b-s7dm7\" (UID: \"b6b873be-e034-4d85-b131-548eff8013ed\") " pod="openstack-kuttl-tests/neutron-6865f4c68b-s7dm7" Jan 20 17:35:39 crc kubenswrapper[4558]: I0120 17:35:39.306645 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b6b873be-e034-4d85-b131-548eff8013ed-internal-tls-certs\") pod \"neutron-6865f4c68b-s7dm7\" (UID: \"b6b873be-e034-4d85-b131-548eff8013ed\") " pod="openstack-kuttl-tests/neutron-6865f4c68b-s7dm7" Jan 20 17:35:39 crc kubenswrapper[4558]: I0120 17:35:39.306675 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b6b873be-e034-4d85-b131-548eff8013ed-combined-ca-bundle\") pod \"neutron-6865f4c68b-s7dm7\" (UID: \"b6b873be-e034-4d85-b131-548eff8013ed\") " pod="openstack-kuttl-tests/neutron-6865f4c68b-s7dm7" Jan 20 17:35:39 crc kubenswrapper[4558]: I0120 17:35:39.306697 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-md7lm\" (UniqueName: \"kubernetes.io/projected/b6b873be-e034-4d85-b131-548eff8013ed-kube-api-access-md7lm\") pod \"neutron-6865f4c68b-s7dm7\" (UID: \"b6b873be-e034-4d85-b131-548eff8013ed\") " pod="openstack-kuttl-tests/neutron-6865f4c68b-s7dm7" Jan 20 17:35:39 crc kubenswrapper[4558]: I0120 17:35:39.410536 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b6b873be-e034-4d85-b131-548eff8013ed-public-tls-certs\") pod \"neutron-6865f4c68b-s7dm7\" (UID: \"b6b873be-e034-4d85-b131-548eff8013ed\") " pod="openstack-kuttl-tests/neutron-6865f4c68b-s7dm7" Jan 20 17:35:39 crc kubenswrapper[4558]: I0120 17:35:39.410656 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b6b873be-e034-4d85-b131-548eff8013ed-internal-tls-certs\") pod \"neutron-6865f4c68b-s7dm7\" (UID: \"b6b873be-e034-4d85-b131-548eff8013ed\") " pod="openstack-kuttl-tests/neutron-6865f4c68b-s7dm7" Jan 20 17:35:39 crc kubenswrapper[4558]: I0120 17:35:39.410701 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b6b873be-e034-4d85-b131-548eff8013ed-combined-ca-bundle\") pod \"neutron-6865f4c68b-s7dm7\" (UID: \"b6b873be-e034-4d85-b131-548eff8013ed\") " pod="openstack-kuttl-tests/neutron-6865f4c68b-s7dm7" Jan 20 17:35:39 crc kubenswrapper[4558]: I0120 17:35:39.410722 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-md7lm\" (UniqueName: \"kubernetes.io/projected/b6b873be-e034-4d85-b131-548eff8013ed-kube-api-access-md7lm\") pod \"neutron-6865f4c68b-s7dm7\" (UID: \"b6b873be-e034-4d85-b131-548eff8013ed\") " pod="openstack-kuttl-tests/neutron-6865f4c68b-s7dm7" Jan 20 17:35:39 crc kubenswrapper[4558]: I0120 17:35:39.410810 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/b6b873be-e034-4d85-b131-548eff8013ed-config\") pod \"neutron-6865f4c68b-s7dm7\" (UID: \"b6b873be-e034-4d85-b131-548eff8013ed\") " pod="openstack-kuttl-tests/neutron-6865f4c68b-s7dm7" Jan 20 17:35:39 crc kubenswrapper[4558]: I0120 17:35:39.410854 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/b6b873be-e034-4d85-b131-548eff8013ed-httpd-config\") pod \"neutron-6865f4c68b-s7dm7\" (UID: \"b6b873be-e034-4d85-b131-548eff8013ed\") " pod="openstack-kuttl-tests/neutron-6865f4c68b-s7dm7" Jan 20 17:35:39 crc kubenswrapper[4558]: I0120 17:35:39.410885 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/b6b873be-e034-4d85-b131-548eff8013ed-ovndb-tls-certs\") pod \"neutron-6865f4c68b-s7dm7\" (UID: \"b6b873be-e034-4d85-b131-548eff8013ed\") " pod="openstack-kuttl-tests/neutron-6865f4c68b-s7dm7" Jan 20 17:35:39 crc kubenswrapper[4558]: I0120 17:35:39.416728 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b6b873be-e034-4d85-b131-548eff8013ed-combined-ca-bundle\") pod \"neutron-6865f4c68b-s7dm7\" (UID: \"b6b873be-e034-4d85-b131-548eff8013ed\") " pod="openstack-kuttl-tests/neutron-6865f4c68b-s7dm7" Jan 20 17:35:39 crc kubenswrapper[4558]: I0120 17:35:39.416909 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b6b873be-e034-4d85-b131-548eff8013ed-internal-tls-certs\") pod \"neutron-6865f4c68b-s7dm7\" (UID: \"b6b873be-e034-4d85-b131-548eff8013ed\") " pod="openstack-kuttl-tests/neutron-6865f4c68b-s7dm7" Jan 20 17:35:39 crc kubenswrapper[4558]: I0120 17:35:39.416963 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/b6b873be-e034-4d85-b131-548eff8013ed-ovndb-tls-certs\") pod \"neutron-6865f4c68b-s7dm7\" (UID: \"b6b873be-e034-4d85-b131-548eff8013ed\") " pod="openstack-kuttl-tests/neutron-6865f4c68b-s7dm7" Jan 20 17:35:39 crc kubenswrapper[4558]: I0120 17:35:39.417378 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/b6b873be-e034-4d85-b131-548eff8013ed-config\") pod \"neutron-6865f4c68b-s7dm7\" (UID: \"b6b873be-e034-4d85-b131-548eff8013ed\") " pod="openstack-kuttl-tests/neutron-6865f4c68b-s7dm7" Jan 20 17:35:39 crc kubenswrapper[4558]: I0120 17:35:39.417942 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b6b873be-e034-4d85-b131-548eff8013ed-public-tls-certs\") pod \"neutron-6865f4c68b-s7dm7\" (UID: \"b6b873be-e034-4d85-b131-548eff8013ed\") " pod="openstack-kuttl-tests/neutron-6865f4c68b-s7dm7" Jan 20 17:35:39 crc kubenswrapper[4558]: I0120 17:35:39.419123 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/b6b873be-e034-4d85-b131-548eff8013ed-httpd-config\") pod \"neutron-6865f4c68b-s7dm7\" (UID: \"b6b873be-e034-4d85-b131-548eff8013ed\") " pod="openstack-kuttl-tests/neutron-6865f4c68b-s7dm7" Jan 20 17:35:39 crc kubenswrapper[4558]: I0120 17:35:39.428368 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-md7lm\" (UniqueName: \"kubernetes.io/projected/b6b873be-e034-4d85-b131-548eff8013ed-kube-api-access-md7lm\") pod \"neutron-6865f4c68b-s7dm7\" (UID: \"b6b873be-e034-4d85-b131-548eff8013ed\") " pod="openstack-kuttl-tests/neutron-6865f4c68b-s7dm7" Jan 20 17:35:39 crc kubenswrapper[4558]: I0120 17:35:39.472763 4558 generic.go:334] "Generic (PLEG): container finished" podID="21c81b0a-c70b-4d57-bea0-96e4840af7dd" containerID="5f7d9bee57d3863532f7d1c9fc246fa579caa23eddd68f1cd8f774b112683665" exitCode=0 Jan 20 17:35:39 crc kubenswrapper[4558]: I0120 17:35:39.472863 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"21c81b0a-c70b-4d57-bea0-96e4840af7dd","Type":"ContainerDied","Data":"5f7d9bee57d3863532f7d1c9fc246fa579caa23eddd68f1cd8f774b112683665"} Jan 20 17:35:39 crc kubenswrapper[4558]: I0120 17:35:39.480649 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"e228514c-6d22-4527-b365-913e3ea3cfdb","Type":"ContainerStarted","Data":"53088ffac0cd61c3f88675b4eee842375a2868c6b2cbeba471fe06c8d7634d43"} Jan 20 17:35:39 crc kubenswrapper[4558]: I0120 17:35:39.485706 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:35:39 crc kubenswrapper[4558]: I0120 17:35:39.508863 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:35:39 crc kubenswrapper[4558]: I0120 17:35:39.509146 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="a205fa61-cd83-4d6e-b82b-aba6f9144c71" containerName="ceilometer-central-agent" containerID="cri-o://73a579cd1ed45c429083e3153dc8ac019914451ccabba3eac12a668e6e281974" gracePeriod=30 Jan 20 17:35:39 crc kubenswrapper[4558]: I0120 17:35:39.509554 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="a205fa61-cd83-4d6e-b82b-aba6f9144c71" containerName="proxy-httpd" containerID="cri-o://7674db8d22b702209ebaaadbb855ca6def67d0a91881b8c25b343fb381ebccde" gracePeriod=30 Jan 20 17:35:39 crc kubenswrapper[4558]: I0120 17:35:39.509660 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="a205fa61-cd83-4d6e-b82b-aba6f9144c71" containerName="ceilometer-notification-agent" containerID="cri-o://d6ec41d7d0083a1b2dfe623be5f2d5647fc24682bdfeff8a9ef6b95d75507454" gracePeriod=30 Jan 20 17:35:39 crc kubenswrapper[4558]: I0120 17:35:39.509702 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="a205fa61-cd83-4d6e-b82b-aba6f9144c71" containerName="sg-core" containerID="cri-o://9787f119978c69e093f0f9dac43490e32de39c2c966b73ff0ee092a20cb9152f" gracePeriod=30 Jan 20 17:35:39 crc kubenswrapper[4558]: I0120 17:35:39.518618 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-6865f4c68b-s7dm7" Jan 20 17:35:39 crc kubenswrapper[4558]: I0120 17:35:39.521745 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-6f4bb7946d-nl7kt" event={"ID":"ba60e66e-45cc-48a7-92dc-126983f4aa43","Type":"ContainerStarted","Data":"f166cbf6a54dbadf426ff3b23a2dc8fc38d9019e839086e6e93b425c311ae2f8"} Jan 20 17:35:39 crc kubenswrapper[4558]: I0120 17:35:39.521909 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/neutron-6f4bb7946d-nl7kt" podUID="ba60e66e-45cc-48a7-92dc-126983f4aa43" containerName="neutron-api" containerID="cri-o://b3cfb60543d88a74e156430e8c9432a06e86b67228b8b014df284600429f9b92" gracePeriod=30 Jan 20 17:35:39 crc kubenswrapper[4558]: I0120 17:35:39.521987 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/neutron-6f4bb7946d-nl7kt" Jan 20 17:35:39 crc kubenswrapper[4558]: I0120 17:35:39.522382 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/neutron-6f4bb7946d-nl7kt" podUID="ba60e66e-45cc-48a7-92dc-126983f4aa43" containerName="neutron-httpd" containerID="cri-o://f166cbf6a54dbadf426ff3b23a2dc8fc38d9019e839086e6e93b425c311ae2f8" gracePeriod=30 Jan 20 17:35:39 crc kubenswrapper[4558]: I0120 17:35:39.524647 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/cinder-api-0" podStartSLOduration=3.5246298400000002 podStartE2EDuration="3.52462984s" podCreationTimestamp="2026-01-20 17:35:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:35:39.512352134 +0000 UTC m=+3233.272690101" watchObservedRunningTime="2026-01-20 17:35:39.52462984 +0000 UTC m=+3233.284967807" Jan 20 17:35:39 crc kubenswrapper[4558]: I0120 17:35:39.543933 4558 generic.go:334] "Generic (PLEG): container finished" podID="d78c49cb-72eb-4def-9c40-b16128c74418" containerID="8da85592700c5420bc1d1bbe307f08e0a71e1e162220d3e5dbe3bda34763061f" exitCode=0 Jan 20 17:35:39 crc kubenswrapper[4558]: I0120 17:35:39.543990 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"d78c49cb-72eb-4def-9c40-b16128c74418","Type":"ContainerDied","Data":"8da85592700c5420bc1d1bbe307f08e0a71e1e162220d3e5dbe3bda34763061f"} Jan 20 17:35:39 crc kubenswrapper[4558]: I0120 17:35:39.558248 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-7d788974f4-sf4l4" event={"ID":"13d3779b-9cf3-47f3-b330-a3adfd5b10f7","Type":"ContainerStarted","Data":"78d0d5adf4dba87cab66796976c167974cf7f1657e1f2e864bdb469b33fef266"} Jan 20 17:35:39 crc kubenswrapper[4558]: I0120 17:35:39.558306 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-7d788974f4-sf4l4" event={"ID":"13d3779b-9cf3-47f3-b330-a3adfd5b10f7","Type":"ContainerStarted","Data":"d610015d2bf74a42a31afff0f9df8d4dc81ed4fbf3c4e0bca7a1f70d778bab5a"} Jan 20 17:35:39 crc kubenswrapper[4558]: I0120 17:35:39.558430 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/neutron-7d788974f4-sf4l4" podUID="13d3779b-9cf3-47f3-b330-a3adfd5b10f7" containerName="neutron-api" containerID="cri-o://78d0d5adf4dba87cab66796976c167974cf7f1657e1f2e864bdb469b33fef266" gracePeriod=30 Jan 20 17:35:39 crc kubenswrapper[4558]: I0120 17:35:39.558523 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/neutron-7d788974f4-sf4l4" Jan 20 17:35:39 crc kubenswrapper[4558]: I0120 17:35:39.558850 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/neutron-7d788974f4-sf4l4" podUID="13d3779b-9cf3-47f3-b330-a3adfd5b10f7" containerName="neutron-httpd" containerID="cri-o://8a0c4e15f43d48c5c3bf5cca5d164426d653ec27a6971028af1b9300e881743e" gracePeriod=30 Jan 20 17:35:39 crc kubenswrapper[4558]: I0120 17:35:39.564022 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/neutron-6f4bb7946d-nl7kt" podStartSLOduration=3.564008297 podStartE2EDuration="3.564008297s" podCreationTimestamp="2026-01-20 17:35:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:35:39.545796131 +0000 UTC m=+3233.306134098" watchObservedRunningTime="2026-01-20 17:35:39.564008297 +0000 UTC m=+3233.324346265" Jan 20 17:35:39 crc kubenswrapper[4558]: I0120 17:35:39.582891 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/neutron-7d788974f4-sf4l4" podStartSLOduration=1.582880574 podStartE2EDuration="1.582880574s" podCreationTimestamp="2026-01-20 17:35:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:35:39.580738255 +0000 UTC m=+3233.341076232" watchObservedRunningTime="2026-01-20 17:35:39.582880574 +0000 UTC m=+3233.343218540" Jan 20 17:35:39 crc kubenswrapper[4558]: I0120 17:35:39.639799 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:35:39 crc kubenswrapper[4558]: I0120 17:35:39.642705 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovn-northd-0"] Jan 20 17:35:39 crc kubenswrapper[4558]: I0120 17:35:39.649027 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/ceilometer-0" podUID="a205fa61-cd83-4d6e-b82b-aba6f9144c71" containerName="proxy-httpd" probeResult="failure" output="Get \"https://10.217.0.111:3000/\": read tcp 10.217.0.2:60086->10.217.0.111:3000: read: connection reset by peer" Jan 20 17:35:39 crc kubenswrapper[4558]: I0120 17:35:39.753456 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:35:39 crc kubenswrapper[4558]: I0120 17:35:39.826278 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/21c81b0a-c70b-4d57-bea0-96e4840af7dd-scripts\") pod \"21c81b0a-c70b-4d57-bea0-96e4840af7dd\" (UID: \"21c81b0a-c70b-4d57-bea0-96e4840af7dd\") " Jan 20 17:35:39 crc kubenswrapper[4558]: I0120 17:35:39.826322 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/21c81b0a-c70b-4d57-bea0-96e4840af7dd-internal-tls-certs\") pod \"21c81b0a-c70b-4d57-bea0-96e4840af7dd\" (UID: \"21c81b0a-c70b-4d57-bea0-96e4840af7dd\") " Jan 20 17:35:39 crc kubenswrapper[4558]: I0120 17:35:39.826365 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2jzkg\" (UniqueName: \"kubernetes.io/projected/21c81b0a-c70b-4d57-bea0-96e4840af7dd-kube-api-access-2jzkg\") pod \"21c81b0a-c70b-4d57-bea0-96e4840af7dd\" (UID: \"21c81b0a-c70b-4d57-bea0-96e4840af7dd\") " Jan 20 17:35:39 crc kubenswrapper[4558]: I0120 17:35:39.826404 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21c81b0a-c70b-4d57-bea0-96e4840af7dd-combined-ca-bundle\") pod \"21c81b0a-c70b-4d57-bea0-96e4840af7dd\" (UID: \"21c81b0a-c70b-4d57-bea0-96e4840af7dd\") " Jan 20 17:35:39 crc kubenswrapper[4558]: I0120 17:35:39.826471 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") pod \"21c81b0a-c70b-4d57-bea0-96e4840af7dd\" (UID: \"21c81b0a-c70b-4d57-bea0-96e4840af7dd\") " Jan 20 17:35:39 crc kubenswrapper[4558]: I0120 17:35:39.826524 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/21c81b0a-c70b-4d57-bea0-96e4840af7dd-httpd-run\") pod \"21c81b0a-c70b-4d57-bea0-96e4840af7dd\" (UID: \"21c81b0a-c70b-4d57-bea0-96e4840af7dd\") " Jan 20 17:35:39 crc kubenswrapper[4558]: I0120 17:35:39.826744 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/21c81b0a-c70b-4d57-bea0-96e4840af7dd-config-data\") pod \"21c81b0a-c70b-4d57-bea0-96e4840af7dd\" (UID: \"21c81b0a-c70b-4d57-bea0-96e4840af7dd\") " Jan 20 17:35:39 crc kubenswrapper[4558]: I0120 17:35:39.826806 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/21c81b0a-c70b-4d57-bea0-96e4840af7dd-logs\") pod \"21c81b0a-c70b-4d57-bea0-96e4840af7dd\" (UID: \"21c81b0a-c70b-4d57-bea0-96e4840af7dd\") " Jan 20 17:35:39 crc kubenswrapper[4558]: I0120 17:35:39.828098 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/21c81b0a-c70b-4d57-bea0-96e4840af7dd-logs" (OuterVolumeSpecName: "logs") pod "21c81b0a-c70b-4d57-bea0-96e4840af7dd" (UID: "21c81b0a-c70b-4d57-bea0-96e4840af7dd"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:35:39 crc kubenswrapper[4558]: I0120 17:35:39.828121 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/21c81b0a-c70b-4d57-bea0-96e4840af7dd-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "21c81b0a-c70b-4d57-bea0-96e4840af7dd" (UID: "21c81b0a-c70b-4d57-bea0-96e4840af7dd"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:35:39 crc kubenswrapper[4558]: I0120 17:35:39.830617 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/21c81b0a-c70b-4d57-bea0-96e4840af7dd-kube-api-access-2jzkg" (OuterVolumeSpecName: "kube-api-access-2jzkg") pod "21c81b0a-c70b-4d57-bea0-96e4840af7dd" (UID: "21c81b0a-c70b-4d57-bea0-96e4840af7dd"). InnerVolumeSpecName "kube-api-access-2jzkg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:35:39 crc kubenswrapper[4558]: I0120 17:35:39.832867 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/21c81b0a-c70b-4d57-bea0-96e4840af7dd-scripts" (OuterVolumeSpecName: "scripts") pod "21c81b0a-c70b-4d57-bea0-96e4840af7dd" (UID: "21c81b0a-c70b-4d57-bea0-96e4840af7dd"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:39 crc kubenswrapper[4558]: I0120 17:35:39.838259 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage15-crc" (OuterVolumeSpecName: "glance") pod "21c81b0a-c70b-4d57-bea0-96e4840af7dd" (UID: "21c81b0a-c70b-4d57-bea0-96e4840af7dd"). InnerVolumeSpecName "local-storage15-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:35:39 crc kubenswrapper[4558]: I0120 17:35:39.873347 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/21c81b0a-c70b-4d57-bea0-96e4840af7dd-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "21c81b0a-c70b-4d57-bea0-96e4840af7dd" (UID: "21c81b0a-c70b-4d57-bea0-96e4840af7dd"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:39 crc kubenswrapper[4558]: I0120 17:35:39.898270 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:35:39 crc kubenswrapper[4558]: I0120 17:35:39.906980 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/21c81b0a-c70b-4d57-bea0-96e4840af7dd-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "21c81b0a-c70b-4d57-bea0-96e4840af7dd" (UID: "21c81b0a-c70b-4d57-bea0-96e4840af7dd"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:39 crc kubenswrapper[4558]: I0120 17:35:39.924270 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/21c81b0a-c70b-4d57-bea0-96e4840af7dd-config-data" (OuterVolumeSpecName: "config-data") pod "21c81b0a-c70b-4d57-bea0-96e4840af7dd" (UID: "21c81b0a-c70b-4d57-bea0-96e4840af7dd"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:39 crc kubenswrapper[4558]: I0120 17:35:39.928262 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d78c49cb-72eb-4def-9c40-b16128c74418-config-data\") pod \"d78c49cb-72eb-4def-9c40-b16128c74418\" (UID: \"d78c49cb-72eb-4def-9c40-b16128c74418\") " Jan 20 17:35:39 crc kubenswrapper[4558]: I0120 17:35:39.928458 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d78c49cb-72eb-4def-9c40-b16128c74418-public-tls-certs\") pod \"d78c49cb-72eb-4def-9c40-b16128c74418\" (UID: \"d78c49cb-72eb-4def-9c40-b16128c74418\") " Jan 20 17:35:39 crc kubenswrapper[4558]: I0120 17:35:39.928551 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d78c49cb-72eb-4def-9c40-b16128c74418-logs\") pod \"d78c49cb-72eb-4def-9c40-b16128c74418\" (UID: \"d78c49cb-72eb-4def-9c40-b16128c74418\") " Jan 20 17:35:39 crc kubenswrapper[4558]: I0120 17:35:39.928592 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage16-crc\") pod \"d78c49cb-72eb-4def-9c40-b16128c74418\" (UID: \"d78c49cb-72eb-4def-9c40-b16128c74418\") " Jan 20 17:35:39 crc kubenswrapper[4558]: I0120 17:35:39.928615 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d78c49cb-72eb-4def-9c40-b16128c74418-combined-ca-bundle\") pod \"d78c49cb-72eb-4def-9c40-b16128c74418\" (UID: \"d78c49cb-72eb-4def-9c40-b16128c74418\") " Jan 20 17:35:39 crc kubenswrapper[4558]: I0120 17:35:39.928745 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d78c49cb-72eb-4def-9c40-b16128c74418-scripts\") pod \"d78c49cb-72eb-4def-9c40-b16128c74418\" (UID: \"d78c49cb-72eb-4def-9c40-b16128c74418\") " Jan 20 17:35:39 crc kubenswrapper[4558]: I0120 17:35:39.928775 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d78c49cb-72eb-4def-9c40-b16128c74418-httpd-run\") pod \"d78c49cb-72eb-4def-9c40-b16128c74418\" (UID: \"d78c49cb-72eb-4def-9c40-b16128c74418\") " Jan 20 17:35:39 crc kubenswrapper[4558]: I0120 17:35:39.929001 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fdln6\" (UniqueName: \"kubernetes.io/projected/d78c49cb-72eb-4def-9c40-b16128c74418-kube-api-access-fdln6\") pod \"d78c49cb-72eb-4def-9c40-b16128c74418\" (UID: \"d78c49cb-72eb-4def-9c40-b16128c74418\") " Jan 20 17:35:39 crc kubenswrapper[4558]: I0120 17:35:39.929846 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/21c81b0a-c70b-4d57-bea0-96e4840af7dd-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:39 crc kubenswrapper[4558]: I0120 17:35:39.929879 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/21c81b0a-c70b-4d57-bea0-96e4840af7dd-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:39 crc kubenswrapper[4558]: I0120 17:35:39.929889 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/21c81b0a-c70b-4d57-bea0-96e4840af7dd-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:39 crc kubenswrapper[4558]: I0120 17:35:39.929900 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/21c81b0a-c70b-4d57-bea0-96e4840af7dd-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:39 crc kubenswrapper[4558]: I0120 17:35:39.929913 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2jzkg\" (UniqueName: \"kubernetes.io/projected/21c81b0a-c70b-4d57-bea0-96e4840af7dd-kube-api-access-2jzkg\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:39 crc kubenswrapper[4558]: I0120 17:35:39.929922 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21c81b0a-c70b-4d57-bea0-96e4840af7dd-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:39 crc kubenswrapper[4558]: I0120 17:35:39.929958 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage15-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") on node \"crc\" " Jan 20 17:35:39 crc kubenswrapper[4558]: I0120 17:35:39.929968 4558 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/21c81b0a-c70b-4d57-bea0-96e4840af7dd-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:39 crc kubenswrapper[4558]: I0120 17:35:39.931691 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d78c49cb-72eb-4def-9c40-b16128c74418-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "d78c49cb-72eb-4def-9c40-b16128c74418" (UID: "d78c49cb-72eb-4def-9c40-b16128c74418"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:35:39 crc kubenswrapper[4558]: I0120 17:35:39.932995 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d78c49cb-72eb-4def-9c40-b16128c74418-logs" (OuterVolumeSpecName: "logs") pod "d78c49cb-72eb-4def-9c40-b16128c74418" (UID: "d78c49cb-72eb-4def-9c40-b16128c74418"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:35:39 crc kubenswrapper[4558]: I0120 17:35:39.935918 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage16-crc" (OuterVolumeSpecName: "glance") pod "d78c49cb-72eb-4def-9c40-b16128c74418" (UID: "d78c49cb-72eb-4def-9c40-b16128c74418"). InnerVolumeSpecName "local-storage16-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:35:39 crc kubenswrapper[4558]: I0120 17:35:39.944801 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d78c49cb-72eb-4def-9c40-b16128c74418-scripts" (OuterVolumeSpecName: "scripts") pod "d78c49cb-72eb-4def-9c40-b16128c74418" (UID: "d78c49cb-72eb-4def-9c40-b16128c74418"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:39 crc kubenswrapper[4558]: I0120 17:35:39.947408 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d78c49cb-72eb-4def-9c40-b16128c74418-kube-api-access-fdln6" (OuterVolumeSpecName: "kube-api-access-fdln6") pod "d78c49cb-72eb-4def-9c40-b16128c74418" (UID: "d78c49cb-72eb-4def-9c40-b16128c74418"). InnerVolumeSpecName "kube-api-access-fdln6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:35:39 crc kubenswrapper[4558]: I0120 17:35:39.960605 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:35:39 crc kubenswrapper[4558]: I0120 17:35:39.977546 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage15-crc" (UniqueName: "kubernetes.io/local-volume/local-storage15-crc") on node "crc" Jan 20 17:35:39 crc kubenswrapper[4558]: I0120 17:35:39.983617 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d78c49cb-72eb-4def-9c40-b16128c74418-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d78c49cb-72eb-4def-9c40-b16128c74418" (UID: "d78c49cb-72eb-4def-9c40-b16128c74418"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:40 crc kubenswrapper[4558]: I0120 17:35:40.014832 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d78c49cb-72eb-4def-9c40-b16128c74418-config-data" (OuterVolumeSpecName: "config-data") pod "d78c49cb-72eb-4def-9c40-b16128c74418" (UID: "d78c49cb-72eb-4def-9c40-b16128c74418"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:40 crc kubenswrapper[4558]: I0120 17:35:40.026875 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:35:40 crc kubenswrapper[4558]: I0120 17:35:40.027184 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-api-0" podUID="872e7858-d089-4bb5-89aa-060646e32754" containerName="nova-api-log" containerID="cri-o://9dea7e4de1f3490846ec2c17c47d30837c43228f400cb136eee18c9970b72bb6" gracePeriod=30 Jan 20 17:35:40 crc kubenswrapper[4558]: I0120 17:35:40.027689 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-api-0" podUID="872e7858-d089-4bb5-89aa-060646e32754" containerName="nova-api-api" containerID="cri-o://52ee2a447d2cae531422324e0f180c5e03fd7fe612731ae435103a62b66fa686" gracePeriod=30 Jan 20 17:35:40 crc kubenswrapper[4558]: I0120 17:35:40.032349 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fdln6\" (UniqueName: \"kubernetes.io/projected/d78c49cb-72eb-4def-9c40-b16128c74418-kube-api-access-fdln6\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:40 crc kubenswrapper[4558]: I0120 17:35:40.032380 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d78c49cb-72eb-4def-9c40-b16128c74418-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:40 crc kubenswrapper[4558]: I0120 17:35:40.032391 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d78c49cb-72eb-4def-9c40-b16128c74418-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:40 crc kubenswrapper[4558]: I0120 17:35:40.032413 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage16-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage16-crc\") on node \"crc\" " Jan 20 17:35:40 crc kubenswrapper[4558]: I0120 17:35:40.032425 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d78c49cb-72eb-4def-9c40-b16128c74418-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:40 crc kubenswrapper[4558]: I0120 17:35:40.032433 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d78c49cb-72eb-4def-9c40-b16128c74418-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:40 crc kubenswrapper[4558]: I0120 17:35:40.032442 4558 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d78c49cb-72eb-4def-9c40-b16128c74418-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:40 crc kubenswrapper[4558]: I0120 17:35:40.032451 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage15-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:40 crc kubenswrapper[4558]: I0120 17:35:40.055558 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d78c49cb-72eb-4def-9c40-b16128c74418-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "d78c49cb-72eb-4def-9c40-b16128c74418" (UID: "d78c49cb-72eb-4def-9c40-b16128c74418"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:40 crc kubenswrapper[4558]: I0120 17:35:40.062127 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage16-crc" (UniqueName: "kubernetes.io/local-volume/local-storage16-crc") on node "crc" Jan 20 17:35:40 crc kubenswrapper[4558]: I0120 17:35:40.082186 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-6865f4c68b-s7dm7"] Jan 20 17:35:40 crc kubenswrapper[4558]: I0120 17:35:40.134977 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d78c49cb-72eb-4def-9c40-b16128c74418-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:40 crc kubenswrapper[4558]: I0120 17:35:40.135113 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage16-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage16-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:40 crc kubenswrapper[4558]: I0120 17:35:40.572681 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:35:40 crc kubenswrapper[4558]: I0120 17:35:40.580032 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="76c1fbce-d8ff-4469-85cb-11124a82402a" path="/var/lib/kubelet/pods/76c1fbce-d8ff-4469-85cb-11124a82402a/volumes" Jan 20 17:35:40 crc kubenswrapper[4558]: I0120 17:35:40.581375 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:35:40 crc kubenswrapper[4558]: I0120 17:35:40.581472 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"d78c49cb-72eb-4def-9c40-b16128c74418","Type":"ContainerDied","Data":"ec4550a0ff6fadc406c9437c9c4710fec8bca31865e14b1400128f0f71c62981"} Jan 20 17:35:40 crc kubenswrapper[4558]: I0120 17:35:40.581572 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-7d788974f4-sf4l4" event={"ID":"13d3779b-9cf3-47f3-b330-a3adfd5b10f7","Type":"ContainerStarted","Data":"8a0c4e15f43d48c5c3bf5cca5d164426d653ec27a6971028af1b9300e881743e"} Jan 20 17:35:40 crc kubenswrapper[4558]: I0120 17:35:40.581654 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-northd-0" event={"ID":"39600642-1418-4cb4-8617-f2ecc7089e0b","Type":"ContainerStarted","Data":"838cfcc8e26bf3e11489fea7f7801117e2630d787be77ecdb9b5831a033f4a6c"} Jan 20 17:35:40 crc kubenswrapper[4558]: I0120 17:35:40.581714 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-northd-0" event={"ID":"39600642-1418-4cb4-8617-f2ecc7089e0b","Type":"ContainerStarted","Data":"33b43d304994a43dcb87337ad0c6e08deaf4a2507647b3ba6f756482ec39e182"} Jan 20 17:35:40 crc kubenswrapper[4558]: I0120 17:35:40.581783 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-northd-0" event={"ID":"39600642-1418-4cb4-8617-f2ecc7089e0b","Type":"ContainerStarted","Data":"bdb22742dbe82f2b8c2b2d05c6407feef886a24bb79b2700a3f4555ca2e2cde4"} Jan 20 17:35:40 crc kubenswrapper[4558]: I0120 17:35:40.581617 4558 scope.go:117] "RemoveContainer" containerID="8da85592700c5420bc1d1bbe307f08e0a71e1e162220d3e5dbe3bda34763061f" Jan 20 17:35:40 crc kubenswrapper[4558]: I0120 17:35:40.619110 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"21c81b0a-c70b-4d57-bea0-96e4840af7dd","Type":"ContainerDied","Data":"8da018bd9bfe4c0ed93d768061a960a8f05d5986aebfa42f877a7b9fc4eaca37"} Jan 20 17:35:40 crc kubenswrapper[4558]: I0120 17:35:40.619210 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:35:40 crc kubenswrapper[4558]: I0120 17:35:40.639328 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-6865f4c68b-s7dm7" event={"ID":"b6b873be-e034-4d85-b131-548eff8013ed","Type":"ContainerStarted","Data":"813d6d3e667f27b128b13d9ed3b5e2eb7fbd264b4a64cde9c0a7a2ffc3eabf31"} Jan 20 17:35:40 crc kubenswrapper[4558]: I0120 17:35:40.639400 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-6865f4c68b-s7dm7" event={"ID":"b6b873be-e034-4d85-b131-548eff8013ed","Type":"ContainerStarted","Data":"ab9f91ee996ebed5befa70a8ec573471b2011a4fc0f88f09cde407f6a7347062"} Jan 20 17:35:40 crc kubenswrapper[4558]: I0120 17:35:40.639416 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-6865f4c68b-s7dm7" event={"ID":"b6b873be-e034-4d85-b131-548eff8013ed","Type":"ContainerStarted","Data":"3972e8f809e0386263608c53f1401f9b5908912fbbaf03f2a6de873c9c9993fe"} Jan 20 17:35:40 crc kubenswrapper[4558]: I0120 17:35:40.639599 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/neutron-6865f4c68b-s7dm7" Jan 20 17:35:40 crc kubenswrapper[4558]: I0120 17:35:40.640552 4558 scope.go:117] "RemoveContainer" containerID="c9fcf8291fd575a7777eae1e5a6b38407c0f8527d5473b4afc554f44d2c8098e" Jan 20 17:35:40 crc kubenswrapper[4558]: I0120 17:35:40.648478 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ovn-northd-0" podStartSLOduration=2.648462268 podStartE2EDuration="2.648462268s" podCreationTimestamp="2026-01-20 17:35:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:35:40.614470571 +0000 UTC m=+3234.374808539" watchObservedRunningTime="2026-01-20 17:35:40.648462268 +0000 UTC m=+3234.408800235" Jan 20 17:35:40 crc kubenswrapper[4558]: I0120 17:35:40.664428 4558 generic.go:334] "Generic (PLEG): container finished" podID="ba60e66e-45cc-48a7-92dc-126983f4aa43" containerID="f166cbf6a54dbadf426ff3b23a2dc8fc38d9019e839086e6e93b425c311ae2f8" exitCode=0 Jan 20 17:35:40 crc kubenswrapper[4558]: I0120 17:35:40.664516 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-6f4bb7946d-nl7kt" event={"ID":"ba60e66e-45cc-48a7-92dc-126983f4aa43","Type":"ContainerDied","Data":"f166cbf6a54dbadf426ff3b23a2dc8fc38d9019e839086e6e93b425c311ae2f8"} Jan 20 17:35:40 crc kubenswrapper[4558]: I0120 17:35:40.676256 4558 generic.go:334] "Generic (PLEG): container finished" podID="872e7858-d089-4bb5-89aa-060646e32754" containerID="9dea7e4de1f3490846ec2c17c47d30837c43228f400cb136eee18c9970b72bb6" exitCode=143 Jan 20 17:35:40 crc kubenswrapper[4558]: I0120 17:35:40.676333 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"872e7858-d089-4bb5-89aa-060646e32754","Type":"ContainerDied","Data":"9dea7e4de1f3490846ec2c17c47d30837c43228f400cb136eee18c9970b72bb6"} Jan 20 17:35:40 crc kubenswrapper[4558]: I0120 17:35:40.702133 4558 generic.go:334] "Generic (PLEG): container finished" podID="a205fa61-cd83-4d6e-b82b-aba6f9144c71" containerID="7674db8d22b702209ebaaadbb855ca6def67d0a91881b8c25b343fb381ebccde" exitCode=0 Jan 20 17:35:40 crc kubenswrapper[4558]: I0120 17:35:40.702189 4558 generic.go:334] "Generic (PLEG): container finished" podID="a205fa61-cd83-4d6e-b82b-aba6f9144c71" containerID="9787f119978c69e093f0f9dac43490e32de39c2c966b73ff0ee092a20cb9152f" exitCode=2 Jan 20 17:35:40 crc kubenswrapper[4558]: I0120 17:35:40.702201 4558 generic.go:334] "Generic (PLEG): container finished" podID="a205fa61-cd83-4d6e-b82b-aba6f9144c71" containerID="73a579cd1ed45c429083e3153dc8ac019914451ccabba3eac12a668e6e281974" exitCode=0 Jan 20 17:35:40 crc kubenswrapper[4558]: I0120 17:35:40.703705 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"a205fa61-cd83-4d6e-b82b-aba6f9144c71","Type":"ContainerDied","Data":"7674db8d22b702209ebaaadbb855ca6def67d0a91881b8c25b343fb381ebccde"} Jan 20 17:35:40 crc kubenswrapper[4558]: I0120 17:35:40.703766 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"a205fa61-cd83-4d6e-b82b-aba6f9144c71","Type":"ContainerDied","Data":"9787f119978c69e093f0f9dac43490e32de39c2c966b73ff0ee092a20cb9152f"} Jan 20 17:35:40 crc kubenswrapper[4558]: I0120 17:35:40.703793 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"a205fa61-cd83-4d6e-b82b-aba6f9144c71","Type":"ContainerDied","Data":"73a579cd1ed45c429083e3153dc8ac019914451ccabba3eac12a668e6e281974"} Jan 20 17:35:40 crc kubenswrapper[4558]: I0120 17:35:40.711403 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:35:40 crc kubenswrapper[4558]: I0120 17:35:40.725328 4558 scope.go:117] "RemoveContainer" containerID="5f7d9bee57d3863532f7d1c9fc246fa579caa23eddd68f1cd8f774b112683665" Jan 20 17:35:40 crc kubenswrapper[4558]: I0120 17:35:40.771730 4558 scope.go:117] "RemoveContainer" containerID="21722b5c1856dd9260830023e156411f09aabc0fe431ae02c3192f233463c11f" Jan 20 17:35:40 crc kubenswrapper[4558]: I0120 17:35:40.772730 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:35:40 crc kubenswrapper[4558]: I0120 17:35:40.790219 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:35:40 crc kubenswrapper[4558]: E0120 17:35:40.790675 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d78c49cb-72eb-4def-9c40-b16128c74418" containerName="glance-log" Jan 20 17:35:40 crc kubenswrapper[4558]: I0120 17:35:40.790695 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d78c49cb-72eb-4def-9c40-b16128c74418" containerName="glance-log" Jan 20 17:35:40 crc kubenswrapper[4558]: E0120 17:35:40.790704 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d78c49cb-72eb-4def-9c40-b16128c74418" containerName="glance-httpd" Jan 20 17:35:40 crc kubenswrapper[4558]: I0120 17:35:40.790710 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d78c49cb-72eb-4def-9c40-b16128c74418" containerName="glance-httpd" Jan 20 17:35:40 crc kubenswrapper[4558]: E0120 17:35:40.790722 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="21c81b0a-c70b-4d57-bea0-96e4840af7dd" containerName="glance-log" Jan 20 17:35:40 crc kubenswrapper[4558]: I0120 17:35:40.790728 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="21c81b0a-c70b-4d57-bea0-96e4840af7dd" containerName="glance-log" Jan 20 17:35:40 crc kubenswrapper[4558]: E0120 17:35:40.790755 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="21c81b0a-c70b-4d57-bea0-96e4840af7dd" containerName="glance-httpd" Jan 20 17:35:40 crc kubenswrapper[4558]: I0120 17:35:40.790761 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="21c81b0a-c70b-4d57-bea0-96e4840af7dd" containerName="glance-httpd" Jan 20 17:35:40 crc kubenswrapper[4558]: I0120 17:35:40.791003 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="21c81b0a-c70b-4d57-bea0-96e4840af7dd" containerName="glance-httpd" Jan 20 17:35:40 crc kubenswrapper[4558]: I0120 17:35:40.791025 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="d78c49cb-72eb-4def-9c40-b16128c74418" containerName="glance-httpd" Jan 20 17:35:40 crc kubenswrapper[4558]: I0120 17:35:40.791033 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="21c81b0a-c70b-4d57-bea0-96e4840af7dd" containerName="glance-log" Jan 20 17:35:40 crc kubenswrapper[4558]: I0120 17:35:40.791041 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="d78c49cb-72eb-4def-9c40-b16128c74418" containerName="glance-log" Jan 20 17:35:40 crc kubenswrapper[4558]: I0120 17:35:40.792221 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:35:40 crc kubenswrapper[4558]: I0120 17:35:40.794492 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-default-external-config-data" Jan 20 17:35:40 crc kubenswrapper[4558]: I0120 17:35:40.794621 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-scripts" Jan 20 17:35:40 crc kubenswrapper[4558]: I0120 17:35:40.794875 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-glance-dockercfg-bsssw" Jan 20 17:35:40 crc kubenswrapper[4558]: I0120 17:35:40.795308 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-glance-default-public-svc" Jan 20 17:35:40 crc kubenswrapper[4558]: I0120 17:35:40.795512 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:35:40 crc kubenswrapper[4558]: I0120 17:35:40.802913 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/neutron-6865f4c68b-s7dm7" podStartSLOduration=1.802882656 podStartE2EDuration="1.802882656s" podCreationTimestamp="2026-01-20 17:35:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:35:40.676352632 +0000 UTC m=+3234.436690599" watchObservedRunningTime="2026-01-20 17:35:40.802882656 +0000 UTC m=+3234.563220624" Jan 20 17:35:40 crc kubenswrapper[4558]: I0120 17:35:40.813957 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:35:40 crc kubenswrapper[4558]: I0120 17:35:40.821220 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:35:40 crc kubenswrapper[4558]: I0120 17:35:40.826867 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:35:40 crc kubenswrapper[4558]: I0120 17:35:40.829538 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:35:40 crc kubenswrapper[4558]: I0120 17:35:40.833004 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-default-internal-config-data" Jan 20 17:35:40 crc kubenswrapper[4558]: I0120 17:35:40.833335 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:35:40 crc kubenswrapper[4558]: I0120 17:35:40.835995 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-glance-default-internal-svc" Jan 20 17:35:40 crc kubenswrapper[4558]: I0120 17:35:40.979135 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c9ec2559-f7c5-4318-9a3b-4544d222ff8e-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"c9ec2559-f7c5-4318-9a3b-4544d222ff8e\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:35:40 crc kubenswrapper[4558]: I0120 17:35:40.979425 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9ec2559-f7c5-4318-9a3b-4544d222ff8e-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"c9ec2559-f7c5-4318-9a3b-4544d222ff8e\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:35:40 crc kubenswrapper[4558]: I0120 17:35:40.979587 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/91fac67e-69a9-4d88-9136-0d2484ca0dce-scripts\") pod \"glance-default-internal-api-0\" (UID: \"91fac67e-69a9-4d88-9136-0d2484ca0dce\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:35:40 crc kubenswrapper[4558]: I0120 17:35:40.979624 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage15-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") pod \"glance-default-internal-api-0\" (UID: \"91fac67e-69a9-4d88-9136-0d2484ca0dce\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:35:40 crc kubenswrapper[4558]: I0120 17:35:40.979654 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/91fac67e-69a9-4d88-9136-0d2484ca0dce-logs\") pod \"glance-default-internal-api-0\" (UID: \"91fac67e-69a9-4d88-9136-0d2484ca0dce\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:35:40 crc kubenswrapper[4558]: I0120 17:35:40.979694 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c9ec2559-f7c5-4318-9a3b-4544d222ff8e-logs\") pod \"glance-default-external-api-0\" (UID: \"c9ec2559-f7c5-4318-9a3b-4544d222ff8e\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:35:40 crc kubenswrapper[4558]: I0120 17:35:40.979724 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2fspt\" (UniqueName: \"kubernetes.io/projected/c9ec2559-f7c5-4318-9a3b-4544d222ff8e-kube-api-access-2fspt\") pod \"glance-default-external-api-0\" (UID: \"c9ec2559-f7c5-4318-9a3b-4544d222ff8e\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:35:40 crc kubenswrapper[4558]: I0120 17:35:40.979761 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mmm6k\" (UniqueName: \"kubernetes.io/projected/91fac67e-69a9-4d88-9136-0d2484ca0dce-kube-api-access-mmm6k\") pod \"glance-default-internal-api-0\" (UID: \"91fac67e-69a9-4d88-9136-0d2484ca0dce\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:35:40 crc kubenswrapper[4558]: I0120 17:35:40.979857 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/91fac67e-69a9-4d88-9136-0d2484ca0dce-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"91fac67e-69a9-4d88-9136-0d2484ca0dce\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:35:40 crc kubenswrapper[4558]: I0120 17:35:40.979889 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c9ec2559-f7c5-4318-9a3b-4544d222ff8e-config-data\") pod \"glance-default-external-api-0\" (UID: \"c9ec2559-f7c5-4318-9a3b-4544d222ff8e\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:35:40 crc kubenswrapper[4558]: I0120 17:35:40.979959 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage16-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage16-crc\") pod \"glance-default-external-api-0\" (UID: \"c9ec2559-f7c5-4318-9a3b-4544d222ff8e\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:35:40 crc kubenswrapper[4558]: I0120 17:35:40.980962 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c9ec2559-f7c5-4318-9a3b-4544d222ff8e-scripts\") pod \"glance-default-external-api-0\" (UID: \"c9ec2559-f7c5-4318-9a3b-4544d222ff8e\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:35:40 crc kubenswrapper[4558]: I0120 17:35:40.981155 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/91fac67e-69a9-4d88-9136-0d2484ca0dce-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"91fac67e-69a9-4d88-9136-0d2484ca0dce\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:35:40 crc kubenswrapper[4558]: I0120 17:35:40.981283 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/91fac67e-69a9-4d88-9136-0d2484ca0dce-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"91fac67e-69a9-4d88-9136-0d2484ca0dce\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:35:40 crc kubenswrapper[4558]: I0120 17:35:40.981457 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c9ec2559-f7c5-4318-9a3b-4544d222ff8e-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"c9ec2559-f7c5-4318-9a3b-4544d222ff8e\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:35:40 crc kubenswrapper[4558]: I0120 17:35:40.981502 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/91fac67e-69a9-4d88-9136-0d2484ca0dce-config-data\") pod \"glance-default-internal-api-0\" (UID: \"91fac67e-69a9-4d88-9136-0d2484ca0dce\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:35:41 crc kubenswrapper[4558]: I0120 17:35:41.084466 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9ec2559-f7c5-4318-9a3b-4544d222ff8e-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"c9ec2559-f7c5-4318-9a3b-4544d222ff8e\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:35:41 crc kubenswrapper[4558]: I0120 17:35:41.084577 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage15-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") pod \"glance-default-internal-api-0\" (UID: \"91fac67e-69a9-4d88-9136-0d2484ca0dce\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:35:41 crc kubenswrapper[4558]: I0120 17:35:41.084602 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/91fac67e-69a9-4d88-9136-0d2484ca0dce-scripts\") pod \"glance-default-internal-api-0\" (UID: \"91fac67e-69a9-4d88-9136-0d2484ca0dce\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:35:41 crc kubenswrapper[4558]: I0120 17:35:41.084634 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c9ec2559-f7c5-4318-9a3b-4544d222ff8e-logs\") pod \"glance-default-external-api-0\" (UID: \"c9ec2559-f7c5-4318-9a3b-4544d222ff8e\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:35:41 crc kubenswrapper[4558]: I0120 17:35:41.084651 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/91fac67e-69a9-4d88-9136-0d2484ca0dce-logs\") pod \"glance-default-internal-api-0\" (UID: \"91fac67e-69a9-4d88-9136-0d2484ca0dce\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:35:41 crc kubenswrapper[4558]: I0120 17:35:41.084679 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2fspt\" (UniqueName: \"kubernetes.io/projected/c9ec2559-f7c5-4318-9a3b-4544d222ff8e-kube-api-access-2fspt\") pod \"glance-default-external-api-0\" (UID: \"c9ec2559-f7c5-4318-9a3b-4544d222ff8e\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:35:41 crc kubenswrapper[4558]: I0120 17:35:41.084715 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mmm6k\" (UniqueName: \"kubernetes.io/projected/91fac67e-69a9-4d88-9136-0d2484ca0dce-kube-api-access-mmm6k\") pod \"glance-default-internal-api-0\" (UID: \"91fac67e-69a9-4d88-9136-0d2484ca0dce\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:35:41 crc kubenswrapper[4558]: I0120 17:35:41.084747 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/91fac67e-69a9-4d88-9136-0d2484ca0dce-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"91fac67e-69a9-4d88-9136-0d2484ca0dce\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:35:41 crc kubenswrapper[4558]: I0120 17:35:41.084772 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c9ec2559-f7c5-4318-9a3b-4544d222ff8e-config-data\") pod \"glance-default-external-api-0\" (UID: \"c9ec2559-f7c5-4318-9a3b-4544d222ff8e\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:35:41 crc kubenswrapper[4558]: I0120 17:35:41.084809 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage16-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage16-crc\") pod \"glance-default-external-api-0\" (UID: \"c9ec2559-f7c5-4318-9a3b-4544d222ff8e\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:35:41 crc kubenswrapper[4558]: I0120 17:35:41.085002 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c9ec2559-f7c5-4318-9a3b-4544d222ff8e-scripts\") pod \"glance-default-external-api-0\" (UID: \"c9ec2559-f7c5-4318-9a3b-4544d222ff8e\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:35:41 crc kubenswrapper[4558]: I0120 17:35:41.085036 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/91fac67e-69a9-4d88-9136-0d2484ca0dce-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"91fac67e-69a9-4d88-9136-0d2484ca0dce\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:35:41 crc kubenswrapper[4558]: I0120 17:35:41.085335 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/91fac67e-69a9-4d88-9136-0d2484ca0dce-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"91fac67e-69a9-4d88-9136-0d2484ca0dce\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:35:41 crc kubenswrapper[4558]: I0120 17:35:41.085776 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/91fac67e-69a9-4d88-9136-0d2484ca0dce-logs\") pod \"glance-default-internal-api-0\" (UID: \"91fac67e-69a9-4d88-9136-0d2484ca0dce\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:35:41 crc kubenswrapper[4558]: I0120 17:35:41.086104 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage15-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") pod \"glance-default-internal-api-0\" (UID: \"91fac67e-69a9-4d88-9136-0d2484ca0dce\") device mount path \"/mnt/openstack/pv15\"" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:35:41 crc kubenswrapper[4558]: I0120 17:35:41.086124 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c9ec2559-f7c5-4318-9a3b-4544d222ff8e-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"c9ec2559-f7c5-4318-9a3b-4544d222ff8e\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:35:41 crc kubenswrapper[4558]: I0120 17:35:41.086151 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/91fac67e-69a9-4d88-9136-0d2484ca0dce-config-data\") pod \"glance-default-internal-api-0\" (UID: \"91fac67e-69a9-4d88-9136-0d2484ca0dce\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:35:41 crc kubenswrapper[4558]: I0120 17:35:41.086458 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage16-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage16-crc\") pod \"glance-default-external-api-0\" (UID: \"c9ec2559-f7c5-4318-9a3b-4544d222ff8e\") device mount path \"/mnt/openstack/pv16\"" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:35:41 crc kubenswrapper[4558]: I0120 17:35:41.088533 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c9ec2559-f7c5-4318-9a3b-4544d222ff8e-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"c9ec2559-f7c5-4318-9a3b-4544d222ff8e\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:35:41 crc kubenswrapper[4558]: I0120 17:35:41.089740 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/91fac67e-69a9-4d88-9136-0d2484ca0dce-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"91fac67e-69a9-4d88-9136-0d2484ca0dce\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:35:41 crc kubenswrapper[4558]: I0120 17:35:41.090532 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c9ec2559-f7c5-4318-9a3b-4544d222ff8e-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"c9ec2559-f7c5-4318-9a3b-4544d222ff8e\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:35:41 crc kubenswrapper[4558]: I0120 17:35:41.090626 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c9ec2559-f7c5-4318-9a3b-4544d222ff8e-logs\") pod \"glance-default-external-api-0\" (UID: \"c9ec2559-f7c5-4318-9a3b-4544d222ff8e\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:35:41 crc kubenswrapper[4558]: I0120 17:35:41.090869 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/91fac67e-69a9-4d88-9136-0d2484ca0dce-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"91fac67e-69a9-4d88-9136-0d2484ca0dce\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:35:41 crc kubenswrapper[4558]: I0120 17:35:41.090882 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9ec2559-f7c5-4318-9a3b-4544d222ff8e-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"c9ec2559-f7c5-4318-9a3b-4544d222ff8e\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:35:41 crc kubenswrapper[4558]: I0120 17:35:41.093673 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c9ec2559-f7c5-4318-9a3b-4544d222ff8e-config-data\") pod \"glance-default-external-api-0\" (UID: \"c9ec2559-f7c5-4318-9a3b-4544d222ff8e\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:35:41 crc kubenswrapper[4558]: I0120 17:35:41.094104 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c9ec2559-f7c5-4318-9a3b-4544d222ff8e-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"c9ec2559-f7c5-4318-9a3b-4544d222ff8e\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:35:41 crc kubenswrapper[4558]: I0120 17:35:41.094359 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/91fac67e-69a9-4d88-9136-0d2484ca0dce-scripts\") pod \"glance-default-internal-api-0\" (UID: \"91fac67e-69a9-4d88-9136-0d2484ca0dce\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:35:41 crc kubenswrapper[4558]: I0120 17:35:41.097080 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c9ec2559-f7c5-4318-9a3b-4544d222ff8e-scripts\") pod \"glance-default-external-api-0\" (UID: \"c9ec2559-f7c5-4318-9a3b-4544d222ff8e\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:35:41 crc kubenswrapper[4558]: I0120 17:35:41.100313 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/91fac67e-69a9-4d88-9136-0d2484ca0dce-config-data\") pod \"glance-default-internal-api-0\" (UID: \"91fac67e-69a9-4d88-9136-0d2484ca0dce\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:35:41 crc kubenswrapper[4558]: I0120 17:35:41.101003 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/91fac67e-69a9-4d88-9136-0d2484ca0dce-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"91fac67e-69a9-4d88-9136-0d2484ca0dce\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:35:41 crc kubenswrapper[4558]: I0120 17:35:41.104525 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mmm6k\" (UniqueName: \"kubernetes.io/projected/91fac67e-69a9-4d88-9136-0d2484ca0dce-kube-api-access-mmm6k\") pod \"glance-default-internal-api-0\" (UID: \"91fac67e-69a9-4d88-9136-0d2484ca0dce\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:35:41 crc kubenswrapper[4558]: I0120 17:35:41.108256 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2fspt\" (UniqueName: \"kubernetes.io/projected/c9ec2559-f7c5-4318-9a3b-4544d222ff8e-kube-api-access-2fspt\") pod \"glance-default-external-api-0\" (UID: \"c9ec2559-f7c5-4318-9a3b-4544d222ff8e\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:35:41 crc kubenswrapper[4558]: I0120 17:35:41.125190 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage15-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") pod \"glance-default-internal-api-0\" (UID: \"91fac67e-69a9-4d88-9136-0d2484ca0dce\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:35:41 crc kubenswrapper[4558]: I0120 17:35:41.127291 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage16-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage16-crc\") pod \"glance-default-external-api-0\" (UID: \"c9ec2559-f7c5-4318-9a3b-4544d222ff8e\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:35:41 crc kubenswrapper[4558]: I0120 17:35:41.149229 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:35:41 crc kubenswrapper[4558]: I0120 17:35:41.414780 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:35:41 crc kubenswrapper[4558]: I0120 17:35:41.598978 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:35:41 crc kubenswrapper[4558]: I0120 17:35:41.721656 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"91fac67e-69a9-4d88-9136-0d2484ca0dce","Type":"ContainerStarted","Data":"5551c79462136929334b1ffe858711f0e5df3f4ccfbeeaa390c0be33ddf3d7d4"} Jan 20 17:35:41 crc kubenswrapper[4558]: I0120 17:35:41.832256 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:35:41 crc kubenswrapper[4558]: I0120 17:35:41.896405 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:35:41 crc kubenswrapper[4558]: I0120 17:35:41.960598 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:35:42 crc kubenswrapper[4558]: I0120 17:35:42.584956 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="21c81b0a-c70b-4d57-bea0-96e4840af7dd" path="/var/lib/kubelet/pods/21c81b0a-c70b-4d57-bea0-96e4840af7dd/volumes" Jan 20 17:35:42 crc kubenswrapper[4558]: I0120 17:35:42.585898 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d78c49cb-72eb-4def-9c40-b16128c74418" path="/var/lib/kubelet/pods/d78c49cb-72eb-4def-9c40-b16128c74418/volumes" Jan 20 17:35:42 crc kubenswrapper[4558]: I0120 17:35:42.760346 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"91fac67e-69a9-4d88-9136-0d2484ca0dce","Type":"ContainerStarted","Data":"61659b3cdbe7237e6c49c005bd8d5ec58e23a181bee16a952d93828bb6852edf"} Jan 20 17:35:42 crc kubenswrapper[4558]: I0120 17:35:42.760657 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"91fac67e-69a9-4d88-9136-0d2484ca0dce","Type":"ContainerStarted","Data":"ff3f57bfd0fc845e414a7a24e7f078b2e8b3a550fdb80d3739b2e449bcf27a81"} Jan 20 17:35:42 crc kubenswrapper[4558]: I0120 17:35:42.764007 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"c9ec2559-f7c5-4318-9a3b-4544d222ff8e","Type":"ContainerStarted","Data":"de49ed715fad547662149d1b7edd446a91ec26a644380970de0736db54838406"} Jan 20 17:35:42 crc kubenswrapper[4558]: I0120 17:35:42.764055 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"c9ec2559-f7c5-4318-9a3b-4544d222ff8e","Type":"ContainerStarted","Data":"93b40d8da0c4b87ba8085be29e8668598179c32876925320f6cb4f333502e422"} Jan 20 17:35:42 crc kubenswrapper[4558]: I0120 17:35:42.765956 4558 generic.go:334] "Generic (PLEG): container finished" podID="a205fa61-cd83-4d6e-b82b-aba6f9144c71" containerID="d6ec41d7d0083a1b2dfe623be5f2d5647fc24682bdfeff8a9ef6b95d75507454" exitCode=0 Jan 20 17:35:42 crc kubenswrapper[4558]: I0120 17:35:42.765988 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"a205fa61-cd83-4d6e-b82b-aba6f9144c71","Type":"ContainerDied","Data":"d6ec41d7d0083a1b2dfe623be5f2d5647fc24682bdfeff8a9ef6b95d75507454"} Jan 20 17:35:42 crc kubenswrapper[4558]: I0120 17:35:42.854510 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:35:42 crc kubenswrapper[4558]: I0120 17:35:42.884527 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/glance-default-internal-api-0" podStartSLOduration=2.884508496 podStartE2EDuration="2.884508496s" podCreationTimestamp="2026-01-20 17:35:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:35:42.781904402 +0000 UTC m=+3236.542242369" watchObservedRunningTime="2026-01-20 17:35:42.884508496 +0000 UTC m=+3236.644846464" Jan 20 17:35:42 crc kubenswrapper[4558]: I0120 17:35:42.934916 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:35:42 crc kubenswrapper[4558]: I0120 17:35:42.945374 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a205fa61-cd83-4d6e-b82b-aba6f9144c71-combined-ca-bundle\") pod \"a205fa61-cd83-4d6e-b82b-aba6f9144c71\" (UID: \"a205fa61-cd83-4d6e-b82b-aba6f9144c71\") " Jan 20 17:35:42 crc kubenswrapper[4558]: I0120 17:35:42.945653 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/a205fa61-cd83-4d6e-b82b-aba6f9144c71-ceilometer-tls-certs\") pod \"a205fa61-cd83-4d6e-b82b-aba6f9144c71\" (UID: \"a205fa61-cd83-4d6e-b82b-aba6f9144c71\") " Jan 20 17:35:42 crc kubenswrapper[4558]: I0120 17:35:42.945716 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a205fa61-cd83-4d6e-b82b-aba6f9144c71-run-httpd\") pod \"a205fa61-cd83-4d6e-b82b-aba6f9144c71\" (UID: \"a205fa61-cd83-4d6e-b82b-aba6f9144c71\") " Jan 20 17:35:42 crc kubenswrapper[4558]: I0120 17:35:42.945767 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a205fa61-cd83-4d6e-b82b-aba6f9144c71-log-httpd\") pod \"a205fa61-cd83-4d6e-b82b-aba6f9144c71\" (UID: \"a205fa61-cd83-4d6e-b82b-aba6f9144c71\") " Jan 20 17:35:42 crc kubenswrapper[4558]: I0120 17:35:42.945785 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a205fa61-cd83-4d6e-b82b-aba6f9144c71-scripts\") pod \"a205fa61-cd83-4d6e-b82b-aba6f9144c71\" (UID: \"a205fa61-cd83-4d6e-b82b-aba6f9144c71\") " Jan 20 17:35:42 crc kubenswrapper[4558]: I0120 17:35:42.945822 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sjmnl\" (UniqueName: \"kubernetes.io/projected/a205fa61-cd83-4d6e-b82b-aba6f9144c71-kube-api-access-sjmnl\") pod \"a205fa61-cd83-4d6e-b82b-aba6f9144c71\" (UID: \"a205fa61-cd83-4d6e-b82b-aba6f9144c71\") " Jan 20 17:35:42 crc kubenswrapper[4558]: I0120 17:35:42.945898 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a205fa61-cd83-4d6e-b82b-aba6f9144c71-config-data\") pod \"a205fa61-cd83-4d6e-b82b-aba6f9144c71\" (UID: \"a205fa61-cd83-4d6e-b82b-aba6f9144c71\") " Jan 20 17:35:42 crc kubenswrapper[4558]: I0120 17:35:42.946032 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a205fa61-cd83-4d6e-b82b-aba6f9144c71-sg-core-conf-yaml\") pod \"a205fa61-cd83-4d6e-b82b-aba6f9144c71\" (UID: \"a205fa61-cd83-4d6e-b82b-aba6f9144c71\") " Jan 20 17:35:42 crc kubenswrapper[4558]: I0120 17:35:42.946677 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a205fa61-cd83-4d6e-b82b-aba6f9144c71-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "a205fa61-cd83-4d6e-b82b-aba6f9144c71" (UID: "a205fa61-cd83-4d6e-b82b-aba6f9144c71"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:35:42 crc kubenswrapper[4558]: I0120 17:35:42.950465 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a205fa61-cd83-4d6e-b82b-aba6f9144c71-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "a205fa61-cd83-4d6e-b82b-aba6f9144c71" (UID: "a205fa61-cd83-4d6e-b82b-aba6f9144c71"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:35:42 crc kubenswrapper[4558]: I0120 17:35:42.952356 4558 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a205fa61-cd83-4d6e-b82b-aba6f9144c71-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:42 crc kubenswrapper[4558]: I0120 17:35:42.952388 4558 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a205fa61-cd83-4d6e-b82b-aba6f9144c71-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:42 crc kubenswrapper[4558]: I0120 17:35:42.959776 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a205fa61-cd83-4d6e-b82b-aba6f9144c71-scripts" (OuterVolumeSpecName: "scripts") pod "a205fa61-cd83-4d6e-b82b-aba6f9144c71" (UID: "a205fa61-cd83-4d6e-b82b-aba6f9144c71"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:42 crc kubenswrapper[4558]: I0120 17:35:42.963300 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a205fa61-cd83-4d6e-b82b-aba6f9144c71-kube-api-access-sjmnl" (OuterVolumeSpecName: "kube-api-access-sjmnl") pod "a205fa61-cd83-4d6e-b82b-aba6f9144c71" (UID: "a205fa61-cd83-4d6e-b82b-aba6f9144c71"). InnerVolumeSpecName "kube-api-access-sjmnl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:35:42 crc kubenswrapper[4558]: I0120 17:35:42.994341 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:35:43 crc kubenswrapper[4558]: I0120 17:35:43.006045 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:35:43 crc kubenswrapper[4558]: I0120 17:35:43.047898 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a205fa61-cd83-4d6e-b82b-aba6f9144c71-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "a205fa61-cd83-4d6e-b82b-aba6f9144c71" (UID: "a205fa61-cd83-4d6e-b82b-aba6f9144c71"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:43 crc kubenswrapper[4558]: I0120 17:35:43.056797 4558 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a205fa61-cd83-4d6e-b82b-aba6f9144c71-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:43 crc kubenswrapper[4558]: I0120 17:35:43.057068 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a205fa61-cd83-4d6e-b82b-aba6f9144c71-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:43 crc kubenswrapper[4558]: I0120 17:35:43.057082 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sjmnl\" (UniqueName: \"kubernetes.io/projected/a205fa61-cd83-4d6e-b82b-aba6f9144c71-kube-api-access-sjmnl\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:43 crc kubenswrapper[4558]: I0120 17:35:43.062533 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:35:43 crc kubenswrapper[4558]: I0120 17:35:43.062645 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a205fa61-cd83-4d6e-b82b-aba6f9144c71-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a205fa61-cd83-4d6e-b82b-aba6f9144c71" (UID: "a205fa61-cd83-4d6e-b82b-aba6f9144c71"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:43 crc kubenswrapper[4558]: I0120 17:35:43.109753 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:35:43 crc kubenswrapper[4558]: I0120 17:35:43.110004 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" podUID="639521c5-8df8-4c45-a66a-0be7119bb8d2" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://36a455e01433abd0de3acdfe5a36add132aa44e9a3eb34155cfc2b6b22bd9d3c" gracePeriod=30 Jan 20 17:35:43 crc kubenswrapper[4558]: I0120 17:35:43.117849 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a205fa61-cd83-4d6e-b82b-aba6f9144c71-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "a205fa61-cd83-4d6e-b82b-aba6f9144c71" (UID: "a205fa61-cd83-4d6e-b82b-aba6f9144c71"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:43 crc kubenswrapper[4558]: I0120 17:35:43.141286 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a205fa61-cd83-4d6e-b82b-aba6f9144c71-config-data" (OuterVolumeSpecName: "config-data") pod "a205fa61-cd83-4d6e-b82b-aba6f9144c71" (UID: "a205fa61-cd83-4d6e-b82b-aba6f9144c71"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:43 crc kubenswrapper[4558]: I0120 17:35:43.165553 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a205fa61-cd83-4d6e-b82b-aba6f9144c71-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:43 crc kubenswrapper[4558]: I0120 17:35:43.165589 4558 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/a205fa61-cd83-4d6e-b82b-aba6f9144c71-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:43 crc kubenswrapper[4558]: I0120 17:35:43.165598 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a205fa61-cd83-4d6e-b82b-aba6f9144c71-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:43 crc kubenswrapper[4558]: I0120 17:35:43.467656 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:35:43 crc kubenswrapper[4558]: I0120 17:35:43.569353 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/872e7858-d089-4bb5-89aa-060646e32754-config-data\") pod \"872e7858-d089-4bb5-89aa-060646e32754\" (UID: \"872e7858-d089-4bb5-89aa-060646e32754\") " Jan 20 17:35:43 crc kubenswrapper[4558]: I0120 17:35:43.569451 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/872e7858-d089-4bb5-89aa-060646e32754-internal-tls-certs\") pod \"872e7858-d089-4bb5-89aa-060646e32754\" (UID: \"872e7858-d089-4bb5-89aa-060646e32754\") " Jan 20 17:35:43 crc kubenswrapper[4558]: I0120 17:35:43.569522 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/872e7858-d089-4bb5-89aa-060646e32754-logs\") pod \"872e7858-d089-4bb5-89aa-060646e32754\" (UID: \"872e7858-d089-4bb5-89aa-060646e32754\") " Jan 20 17:35:43 crc kubenswrapper[4558]: I0120 17:35:43.569547 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/872e7858-d089-4bb5-89aa-060646e32754-combined-ca-bundle\") pod \"872e7858-d089-4bb5-89aa-060646e32754\" (UID: \"872e7858-d089-4bb5-89aa-060646e32754\") " Jan 20 17:35:43 crc kubenswrapper[4558]: I0120 17:35:43.569579 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/872e7858-d089-4bb5-89aa-060646e32754-public-tls-certs\") pod \"872e7858-d089-4bb5-89aa-060646e32754\" (UID: \"872e7858-d089-4bb5-89aa-060646e32754\") " Jan 20 17:35:43 crc kubenswrapper[4558]: I0120 17:35:43.569612 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tff9b\" (UniqueName: \"kubernetes.io/projected/872e7858-d089-4bb5-89aa-060646e32754-kube-api-access-tff9b\") pod \"872e7858-d089-4bb5-89aa-060646e32754\" (UID: \"872e7858-d089-4bb5-89aa-060646e32754\") " Jan 20 17:35:43 crc kubenswrapper[4558]: I0120 17:35:43.570264 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/872e7858-d089-4bb5-89aa-060646e32754-logs" (OuterVolumeSpecName: "logs") pod "872e7858-d089-4bb5-89aa-060646e32754" (UID: "872e7858-d089-4bb5-89aa-060646e32754"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:35:43 crc kubenswrapper[4558]: I0120 17:35:43.574281 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/872e7858-d089-4bb5-89aa-060646e32754-kube-api-access-tff9b" (OuterVolumeSpecName: "kube-api-access-tff9b") pod "872e7858-d089-4bb5-89aa-060646e32754" (UID: "872e7858-d089-4bb5-89aa-060646e32754"). InnerVolumeSpecName "kube-api-access-tff9b". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:35:43 crc kubenswrapper[4558]: I0120 17:35:43.594212 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/872e7858-d089-4bb5-89aa-060646e32754-config-data" (OuterVolumeSpecName: "config-data") pod "872e7858-d089-4bb5-89aa-060646e32754" (UID: "872e7858-d089-4bb5-89aa-060646e32754"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:43 crc kubenswrapper[4558]: I0120 17:35:43.598122 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/872e7858-d089-4bb5-89aa-060646e32754-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "872e7858-d089-4bb5-89aa-060646e32754" (UID: "872e7858-d089-4bb5-89aa-060646e32754"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:43 crc kubenswrapper[4558]: I0120 17:35:43.617027 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/872e7858-d089-4bb5-89aa-060646e32754-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "872e7858-d089-4bb5-89aa-060646e32754" (UID: "872e7858-d089-4bb5-89aa-060646e32754"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:43 crc kubenswrapper[4558]: I0120 17:35:43.617934 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/872e7858-d089-4bb5-89aa-060646e32754-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "872e7858-d089-4bb5-89aa-060646e32754" (UID: "872e7858-d089-4bb5-89aa-060646e32754"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:43 crc kubenswrapper[4558]: I0120 17:35:43.671029 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/872e7858-d089-4bb5-89aa-060646e32754-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:43 crc kubenswrapper[4558]: I0120 17:35:43.671060 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/872e7858-d089-4bb5-89aa-060646e32754-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:43 crc kubenswrapper[4558]: I0120 17:35:43.671074 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/872e7858-d089-4bb5-89aa-060646e32754-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:43 crc kubenswrapper[4558]: I0120 17:35:43.671085 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/872e7858-d089-4bb5-89aa-060646e32754-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:43 crc kubenswrapper[4558]: I0120 17:35:43.671096 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tff9b\" (UniqueName: \"kubernetes.io/projected/872e7858-d089-4bb5-89aa-060646e32754-kube-api-access-tff9b\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:43 crc kubenswrapper[4558]: I0120 17:35:43.671104 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/872e7858-d089-4bb5-89aa-060646e32754-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:43 crc kubenswrapper[4558]: I0120 17:35:43.788045 4558 generic.go:334] "Generic (PLEG): container finished" podID="872e7858-d089-4bb5-89aa-060646e32754" containerID="52ee2a447d2cae531422324e0f180c5e03fd7fe612731ae435103a62b66fa686" exitCode=0 Jan 20 17:35:43 crc kubenswrapper[4558]: I0120 17:35:43.788309 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:35:43 crc kubenswrapper[4558]: I0120 17:35:43.788584 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"872e7858-d089-4bb5-89aa-060646e32754","Type":"ContainerDied","Data":"52ee2a447d2cae531422324e0f180c5e03fd7fe612731ae435103a62b66fa686"} Jan 20 17:35:43 crc kubenswrapper[4558]: I0120 17:35:43.789473 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"872e7858-d089-4bb5-89aa-060646e32754","Type":"ContainerDied","Data":"d58e3b71d614150c0a7b3cd7ba45e6d20229691cd7dd6c2cc8f72d53384bec24"} Jan 20 17:35:43 crc kubenswrapper[4558]: I0120 17:35:43.789525 4558 scope.go:117] "RemoveContainer" containerID="52ee2a447d2cae531422324e0f180c5e03fd7fe612731ae435103a62b66fa686" Jan 20 17:35:43 crc kubenswrapper[4558]: I0120 17:35:43.802503 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"c9ec2559-f7c5-4318-9a3b-4544d222ff8e","Type":"ContainerStarted","Data":"0b6ba9125bd1b63702f6dc81feb7c84047318daad92c337b8f2184e3028bcdfb"} Jan 20 17:35:43 crc kubenswrapper[4558]: I0120 17:35:43.814414 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"a205fa61-cd83-4d6e-b82b-aba6f9144c71","Type":"ContainerDied","Data":"5188af4bebe114136c461026009c42e7f53429599650512ccee2779a56443bfe"} Jan 20 17:35:43 crc kubenswrapper[4558]: I0120 17:35:43.814569 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:35:43 crc kubenswrapper[4558]: I0120 17:35:43.832092 4558 scope.go:117] "RemoveContainer" containerID="9dea7e4de1f3490846ec2c17c47d30837c43228f400cb136eee18c9970b72bb6" Jan 20 17:35:43 crc kubenswrapper[4558]: I0120 17:35:43.832405 4558 generic.go:334] "Generic (PLEG): container finished" podID="639521c5-8df8-4c45-a66a-0be7119bb8d2" containerID="36a455e01433abd0de3acdfe5a36add132aa44e9a3eb34155cfc2b6b22bd9d3c" exitCode=0 Jan 20 17:35:43 crc kubenswrapper[4558]: I0120 17:35:43.833373 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"639521c5-8df8-4c45-a66a-0be7119bb8d2","Type":"ContainerDied","Data":"36a455e01433abd0de3acdfe5a36add132aa44e9a3eb34155cfc2b6b22bd9d3c"} Jan 20 17:35:43 crc kubenswrapper[4558]: I0120 17:35:43.839150 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/glance-default-external-api-0" podStartSLOduration=3.839121108 podStartE2EDuration="3.839121108s" podCreationTimestamp="2026-01-20 17:35:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:35:43.828409015 +0000 UTC m=+3237.588746982" watchObservedRunningTime="2026-01-20 17:35:43.839121108 +0000 UTC m=+3237.599459074" Jan 20 17:35:43 crc kubenswrapper[4558]: I0120 17:35:43.877130 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:35:43 crc kubenswrapper[4558]: I0120 17:35:43.902080 4558 scope.go:117] "RemoveContainer" containerID="52ee2a447d2cae531422324e0f180c5e03fd7fe612731ae435103a62b66fa686" Jan 20 17:35:43 crc kubenswrapper[4558]: E0120 17:35:43.902572 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"52ee2a447d2cae531422324e0f180c5e03fd7fe612731ae435103a62b66fa686\": container with ID starting with 52ee2a447d2cae531422324e0f180c5e03fd7fe612731ae435103a62b66fa686 not found: ID does not exist" containerID="52ee2a447d2cae531422324e0f180c5e03fd7fe612731ae435103a62b66fa686" Jan 20 17:35:43 crc kubenswrapper[4558]: I0120 17:35:43.902609 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"52ee2a447d2cae531422324e0f180c5e03fd7fe612731ae435103a62b66fa686"} err="failed to get container status \"52ee2a447d2cae531422324e0f180c5e03fd7fe612731ae435103a62b66fa686\": rpc error: code = NotFound desc = could not find container \"52ee2a447d2cae531422324e0f180c5e03fd7fe612731ae435103a62b66fa686\": container with ID starting with 52ee2a447d2cae531422324e0f180c5e03fd7fe612731ae435103a62b66fa686 not found: ID does not exist" Jan 20 17:35:43 crc kubenswrapper[4558]: I0120 17:35:43.902633 4558 scope.go:117] "RemoveContainer" containerID="9dea7e4de1f3490846ec2c17c47d30837c43228f400cb136eee18c9970b72bb6" Jan 20 17:35:43 crc kubenswrapper[4558]: E0120 17:35:43.902971 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9dea7e4de1f3490846ec2c17c47d30837c43228f400cb136eee18c9970b72bb6\": container with ID starting with 9dea7e4de1f3490846ec2c17c47d30837c43228f400cb136eee18c9970b72bb6 not found: ID does not exist" containerID="9dea7e4de1f3490846ec2c17c47d30837c43228f400cb136eee18c9970b72bb6" Jan 20 17:35:43 crc kubenswrapper[4558]: I0120 17:35:43.903020 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9dea7e4de1f3490846ec2c17c47d30837c43228f400cb136eee18c9970b72bb6"} err="failed to get container status \"9dea7e4de1f3490846ec2c17c47d30837c43228f400cb136eee18c9970b72bb6\": rpc error: code = NotFound desc = could not find container \"9dea7e4de1f3490846ec2c17c47d30837c43228f400cb136eee18c9970b72bb6\": container with ID starting with 9dea7e4de1f3490846ec2c17c47d30837c43228f400cb136eee18c9970b72bb6 not found: ID does not exist" Jan 20 17:35:43 crc kubenswrapper[4558]: I0120 17:35:43.903050 4558 scope.go:117] "RemoveContainer" containerID="7674db8d22b702209ebaaadbb855ca6def67d0a91881b8c25b343fb381ebccde" Jan 20 17:35:43 crc kubenswrapper[4558]: I0120 17:35:43.904379 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:35:43 crc kubenswrapper[4558]: I0120 17:35:43.915039 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:35:43 crc kubenswrapper[4558]: I0120 17:35:43.927623 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:35:43 crc kubenswrapper[4558]: I0120 17:35:43.931826 4558 scope.go:117] "RemoveContainer" containerID="9787f119978c69e093f0f9dac43490e32de39c2c966b73ff0ee092a20cb9152f" Jan 20 17:35:43 crc kubenswrapper[4558]: I0120 17:35:43.957151 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:35:43 crc kubenswrapper[4558]: E0120 17:35:43.964122 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a205fa61-cd83-4d6e-b82b-aba6f9144c71" containerName="proxy-httpd" Jan 20 17:35:43 crc kubenswrapper[4558]: I0120 17:35:43.964145 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="a205fa61-cd83-4d6e-b82b-aba6f9144c71" containerName="proxy-httpd" Jan 20 17:35:43 crc kubenswrapper[4558]: E0120 17:35:43.964197 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a205fa61-cd83-4d6e-b82b-aba6f9144c71" containerName="ceilometer-notification-agent" Jan 20 17:35:43 crc kubenswrapper[4558]: I0120 17:35:43.964205 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="a205fa61-cd83-4d6e-b82b-aba6f9144c71" containerName="ceilometer-notification-agent" Jan 20 17:35:43 crc kubenswrapper[4558]: E0120 17:35:43.964223 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a205fa61-cd83-4d6e-b82b-aba6f9144c71" containerName="ceilometer-central-agent" Jan 20 17:35:43 crc kubenswrapper[4558]: I0120 17:35:43.964229 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="a205fa61-cd83-4d6e-b82b-aba6f9144c71" containerName="ceilometer-central-agent" Jan 20 17:35:43 crc kubenswrapper[4558]: E0120 17:35:43.964250 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a205fa61-cd83-4d6e-b82b-aba6f9144c71" containerName="sg-core" Jan 20 17:35:43 crc kubenswrapper[4558]: I0120 17:35:43.964255 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="a205fa61-cd83-4d6e-b82b-aba6f9144c71" containerName="sg-core" Jan 20 17:35:43 crc kubenswrapper[4558]: E0120 17:35:43.964289 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="872e7858-d089-4bb5-89aa-060646e32754" containerName="nova-api-api" Jan 20 17:35:43 crc kubenswrapper[4558]: I0120 17:35:43.964296 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="872e7858-d089-4bb5-89aa-060646e32754" containerName="nova-api-api" Jan 20 17:35:43 crc kubenswrapper[4558]: E0120 17:35:43.964307 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="872e7858-d089-4bb5-89aa-060646e32754" containerName="nova-api-log" Jan 20 17:35:43 crc kubenswrapper[4558]: I0120 17:35:43.964312 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="872e7858-d089-4bb5-89aa-060646e32754" containerName="nova-api-log" Jan 20 17:35:43 crc kubenswrapper[4558]: I0120 17:35:43.964552 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="872e7858-d089-4bb5-89aa-060646e32754" containerName="nova-api-log" Jan 20 17:35:43 crc kubenswrapper[4558]: I0120 17:35:43.964561 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="a205fa61-cd83-4d6e-b82b-aba6f9144c71" containerName="sg-core" Jan 20 17:35:43 crc kubenswrapper[4558]: I0120 17:35:43.964575 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="a205fa61-cd83-4d6e-b82b-aba6f9144c71" containerName="ceilometer-notification-agent" Jan 20 17:35:43 crc kubenswrapper[4558]: I0120 17:35:43.964591 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="a205fa61-cd83-4d6e-b82b-aba6f9144c71" containerName="proxy-httpd" Jan 20 17:35:43 crc kubenswrapper[4558]: I0120 17:35:43.964599 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="872e7858-d089-4bb5-89aa-060646e32754" containerName="nova-api-api" Jan 20 17:35:43 crc kubenswrapper[4558]: I0120 17:35:43.964607 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="a205fa61-cd83-4d6e-b82b-aba6f9144c71" containerName="ceilometer-central-agent" Jan 20 17:35:43 crc kubenswrapper[4558]: I0120 17:35:43.965756 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:35:43 crc kubenswrapper[4558]: I0120 17:35:43.974611 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-internal-svc" Jan 20 17:35:43 crc kubenswrapper[4558]: I0120 17:35:43.975592 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-public-svc" Jan 20 17:35:43 crc kubenswrapper[4558]: I0120 17:35:43.979592 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-api-config-data" Jan 20 17:35:43 crc kubenswrapper[4558]: I0120 17:35:43.979725 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:35:43 crc kubenswrapper[4558]: I0120 17:35:43.986669 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a046f3a-bea5-4aea-8081-1c5994ecdbd9-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"5a046f3a-bea5-4aea-8081-1c5994ecdbd9\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:35:43 crc kubenswrapper[4558]: I0120 17:35:43.986782 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5a046f3a-bea5-4aea-8081-1c5994ecdbd9-internal-tls-certs\") pod \"nova-api-0\" (UID: \"5a046f3a-bea5-4aea-8081-1c5994ecdbd9\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:35:43 crc kubenswrapper[4558]: I0120 17:35:43.986904 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5a046f3a-bea5-4aea-8081-1c5994ecdbd9-logs\") pod \"nova-api-0\" (UID: \"5a046f3a-bea5-4aea-8081-1c5994ecdbd9\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:35:43 crc kubenswrapper[4558]: I0120 17:35:43.986967 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5a046f3a-bea5-4aea-8081-1c5994ecdbd9-config-data\") pod \"nova-api-0\" (UID: \"5a046f3a-bea5-4aea-8081-1c5994ecdbd9\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:35:43 crc kubenswrapper[4558]: I0120 17:35:43.987002 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5a046f3a-bea5-4aea-8081-1c5994ecdbd9-public-tls-certs\") pod \"nova-api-0\" (UID: \"5a046f3a-bea5-4aea-8081-1c5994ecdbd9\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:35:43 crc kubenswrapper[4558]: I0120 17:35:43.987059 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gccht\" (UniqueName: \"kubernetes.io/projected/5a046f3a-bea5-4aea-8081-1c5994ecdbd9-kube-api-access-gccht\") pod \"nova-api-0\" (UID: \"5a046f3a-bea5-4aea-8081-1c5994ecdbd9\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:35:43 crc kubenswrapper[4558]: I0120 17:35:43.988428 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:35:43 crc kubenswrapper[4558]: I0120 17:35:43.992279 4558 scope.go:117] "RemoveContainer" containerID="d6ec41d7d0083a1b2dfe623be5f2d5647fc24682bdfeff8a9ef6b95d75507454" Jan 20 17:35:43 crc kubenswrapper[4558]: I0120 17:35:43.992681 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-ceilometer-internal-svc" Jan 20 17:35:43 crc kubenswrapper[4558]: I0120 17:35:43.993046 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-scripts" Jan 20 17:35:43 crc kubenswrapper[4558]: I0120 17:35:43.993219 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-config-data" Jan 20 17:35:43 crc kubenswrapper[4558]: I0120 17:35:43.993412 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:35:43 crc kubenswrapper[4558]: I0120 17:35:43.998235 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:35:44 crc kubenswrapper[4558]: I0120 17:35:44.050741 4558 scope.go:117] "RemoveContainer" containerID="73a579cd1ed45c429083e3153dc8ac019914451ccabba3eac12a668e6e281974" Jan 20 17:35:44 crc kubenswrapper[4558]: I0120 17:35:44.088265 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gccht\" (UniqueName: \"kubernetes.io/projected/5a046f3a-bea5-4aea-8081-1c5994ecdbd9-kube-api-access-gccht\") pod \"nova-api-0\" (UID: \"5a046f3a-bea5-4aea-8081-1c5994ecdbd9\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:35:44 crc kubenswrapper[4558]: I0120 17:35:44.088695 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-js2gg\" (UniqueName: \"kubernetes.io/projected/bbb20173-fee8-4323-afbb-f2bbbec3e978-kube-api-access-js2gg\") pod \"ceilometer-0\" (UID: \"bbb20173-fee8-4323-afbb-f2bbbec3e978\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:35:44 crc kubenswrapper[4558]: I0120 17:35:44.088827 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/bbb20173-fee8-4323-afbb-f2bbbec3e978-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"bbb20173-fee8-4323-afbb-f2bbbec3e978\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:35:44 crc kubenswrapper[4558]: I0120 17:35:44.088901 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bbb20173-fee8-4323-afbb-f2bbbec3e978-run-httpd\") pod \"ceilometer-0\" (UID: \"bbb20173-fee8-4323-afbb-f2bbbec3e978\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:35:44 crc kubenswrapper[4558]: I0120 17:35:44.088989 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bbb20173-fee8-4323-afbb-f2bbbec3e978-config-data\") pod \"ceilometer-0\" (UID: \"bbb20173-fee8-4323-afbb-f2bbbec3e978\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:35:44 crc kubenswrapper[4558]: I0120 17:35:44.089048 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bbb20173-fee8-4323-afbb-f2bbbec3e978-log-httpd\") pod \"ceilometer-0\" (UID: \"bbb20173-fee8-4323-afbb-f2bbbec3e978\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:35:44 crc kubenswrapper[4558]: I0120 17:35:44.089108 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a046f3a-bea5-4aea-8081-1c5994ecdbd9-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"5a046f3a-bea5-4aea-8081-1c5994ecdbd9\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:35:44 crc kubenswrapper[4558]: I0120 17:35:44.089216 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5a046f3a-bea5-4aea-8081-1c5994ecdbd9-internal-tls-certs\") pod \"nova-api-0\" (UID: \"5a046f3a-bea5-4aea-8081-1c5994ecdbd9\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:35:44 crc kubenswrapper[4558]: I0120 17:35:44.089332 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5a046f3a-bea5-4aea-8081-1c5994ecdbd9-logs\") pod \"nova-api-0\" (UID: \"5a046f3a-bea5-4aea-8081-1c5994ecdbd9\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:35:44 crc kubenswrapper[4558]: I0120 17:35:44.089410 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bbb20173-fee8-4323-afbb-f2bbbec3e978-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"bbb20173-fee8-4323-afbb-f2bbbec3e978\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:35:44 crc kubenswrapper[4558]: I0120 17:35:44.089469 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5a046f3a-bea5-4aea-8081-1c5994ecdbd9-config-data\") pod \"nova-api-0\" (UID: \"5a046f3a-bea5-4aea-8081-1c5994ecdbd9\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:35:44 crc kubenswrapper[4558]: I0120 17:35:44.089529 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/bbb20173-fee8-4323-afbb-f2bbbec3e978-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"bbb20173-fee8-4323-afbb-f2bbbec3e978\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:35:44 crc kubenswrapper[4558]: I0120 17:35:44.089600 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5a046f3a-bea5-4aea-8081-1c5994ecdbd9-public-tls-certs\") pod \"nova-api-0\" (UID: \"5a046f3a-bea5-4aea-8081-1c5994ecdbd9\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:35:44 crc kubenswrapper[4558]: I0120 17:35:44.089666 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bbb20173-fee8-4323-afbb-f2bbbec3e978-scripts\") pod \"ceilometer-0\" (UID: \"bbb20173-fee8-4323-afbb-f2bbbec3e978\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:35:44 crc kubenswrapper[4558]: I0120 17:35:44.091011 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5a046f3a-bea5-4aea-8081-1c5994ecdbd9-logs\") pod \"nova-api-0\" (UID: \"5a046f3a-bea5-4aea-8081-1c5994ecdbd9\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:35:44 crc kubenswrapper[4558]: I0120 17:35:44.095536 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5a046f3a-bea5-4aea-8081-1c5994ecdbd9-config-data\") pod \"nova-api-0\" (UID: \"5a046f3a-bea5-4aea-8081-1c5994ecdbd9\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:35:44 crc kubenswrapper[4558]: I0120 17:35:44.095932 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5a046f3a-bea5-4aea-8081-1c5994ecdbd9-public-tls-certs\") pod \"nova-api-0\" (UID: \"5a046f3a-bea5-4aea-8081-1c5994ecdbd9\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:35:44 crc kubenswrapper[4558]: I0120 17:35:44.096429 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5a046f3a-bea5-4aea-8081-1c5994ecdbd9-internal-tls-certs\") pod \"nova-api-0\" (UID: \"5a046f3a-bea5-4aea-8081-1c5994ecdbd9\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:35:44 crc kubenswrapper[4558]: I0120 17:35:44.100619 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a046f3a-bea5-4aea-8081-1c5994ecdbd9-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"5a046f3a-bea5-4aea-8081-1c5994ecdbd9\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:35:44 crc kubenswrapper[4558]: I0120 17:35:44.104595 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gccht\" (UniqueName: \"kubernetes.io/projected/5a046f3a-bea5-4aea-8081-1c5994ecdbd9-kube-api-access-gccht\") pod \"nova-api-0\" (UID: \"5a046f3a-bea5-4aea-8081-1c5994ecdbd9\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:35:44 crc kubenswrapper[4558]: I0120 17:35:44.171224 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:35:44 crc kubenswrapper[4558]: I0120 17:35:44.190703 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/639521c5-8df8-4c45-a66a-0be7119bb8d2-nova-novncproxy-tls-certs\") pod \"639521c5-8df8-4c45-a66a-0be7119bb8d2\" (UID: \"639521c5-8df8-4c45-a66a-0be7119bb8d2\") " Jan 20 17:35:44 crc kubenswrapper[4558]: I0120 17:35:44.190812 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wt5f2\" (UniqueName: \"kubernetes.io/projected/639521c5-8df8-4c45-a66a-0be7119bb8d2-kube-api-access-wt5f2\") pod \"639521c5-8df8-4c45-a66a-0be7119bb8d2\" (UID: \"639521c5-8df8-4c45-a66a-0be7119bb8d2\") " Jan 20 17:35:44 crc kubenswrapper[4558]: I0120 17:35:44.190911 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/639521c5-8df8-4c45-a66a-0be7119bb8d2-vencrypt-tls-certs\") pod \"639521c5-8df8-4c45-a66a-0be7119bb8d2\" (UID: \"639521c5-8df8-4c45-a66a-0be7119bb8d2\") " Jan 20 17:35:44 crc kubenswrapper[4558]: I0120 17:35:44.190956 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/639521c5-8df8-4c45-a66a-0be7119bb8d2-combined-ca-bundle\") pod \"639521c5-8df8-4c45-a66a-0be7119bb8d2\" (UID: \"639521c5-8df8-4c45-a66a-0be7119bb8d2\") " Jan 20 17:35:44 crc kubenswrapper[4558]: I0120 17:35:44.190978 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/639521c5-8df8-4c45-a66a-0be7119bb8d2-config-data\") pod \"639521c5-8df8-4c45-a66a-0be7119bb8d2\" (UID: \"639521c5-8df8-4c45-a66a-0be7119bb8d2\") " Jan 20 17:35:44 crc kubenswrapper[4558]: I0120 17:35:44.191336 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bbb20173-fee8-4323-afbb-f2bbbec3e978-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"bbb20173-fee8-4323-afbb-f2bbbec3e978\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:35:44 crc kubenswrapper[4558]: I0120 17:35:44.191366 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/bbb20173-fee8-4323-afbb-f2bbbec3e978-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"bbb20173-fee8-4323-afbb-f2bbbec3e978\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:35:44 crc kubenswrapper[4558]: I0120 17:35:44.191412 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bbb20173-fee8-4323-afbb-f2bbbec3e978-scripts\") pod \"ceilometer-0\" (UID: \"bbb20173-fee8-4323-afbb-f2bbbec3e978\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:35:44 crc kubenswrapper[4558]: I0120 17:35:44.191452 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-js2gg\" (UniqueName: \"kubernetes.io/projected/bbb20173-fee8-4323-afbb-f2bbbec3e978-kube-api-access-js2gg\") pod \"ceilometer-0\" (UID: \"bbb20173-fee8-4323-afbb-f2bbbec3e978\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:35:44 crc kubenswrapper[4558]: I0120 17:35:44.191564 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/bbb20173-fee8-4323-afbb-f2bbbec3e978-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"bbb20173-fee8-4323-afbb-f2bbbec3e978\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:35:44 crc kubenswrapper[4558]: I0120 17:35:44.191589 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bbb20173-fee8-4323-afbb-f2bbbec3e978-run-httpd\") pod \"ceilometer-0\" (UID: \"bbb20173-fee8-4323-afbb-f2bbbec3e978\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:35:44 crc kubenswrapper[4558]: I0120 17:35:44.191634 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bbb20173-fee8-4323-afbb-f2bbbec3e978-config-data\") pod \"ceilometer-0\" (UID: \"bbb20173-fee8-4323-afbb-f2bbbec3e978\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:35:44 crc kubenswrapper[4558]: I0120 17:35:44.191653 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bbb20173-fee8-4323-afbb-f2bbbec3e978-log-httpd\") pod \"ceilometer-0\" (UID: \"bbb20173-fee8-4323-afbb-f2bbbec3e978\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:35:44 crc kubenswrapper[4558]: I0120 17:35:44.192131 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bbb20173-fee8-4323-afbb-f2bbbec3e978-log-httpd\") pod \"ceilometer-0\" (UID: \"bbb20173-fee8-4323-afbb-f2bbbec3e978\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:35:44 crc kubenswrapper[4558]: I0120 17:35:44.199679 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bbb20173-fee8-4323-afbb-f2bbbec3e978-run-httpd\") pod \"ceilometer-0\" (UID: \"bbb20173-fee8-4323-afbb-f2bbbec3e978\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:35:44 crc kubenswrapper[4558]: I0120 17:35:44.201902 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/bbb20173-fee8-4323-afbb-f2bbbec3e978-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"bbb20173-fee8-4323-afbb-f2bbbec3e978\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:35:44 crc kubenswrapper[4558]: I0120 17:35:44.206697 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/639521c5-8df8-4c45-a66a-0be7119bb8d2-kube-api-access-wt5f2" (OuterVolumeSpecName: "kube-api-access-wt5f2") pod "639521c5-8df8-4c45-a66a-0be7119bb8d2" (UID: "639521c5-8df8-4c45-a66a-0be7119bb8d2"). InnerVolumeSpecName "kube-api-access-wt5f2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:35:44 crc kubenswrapper[4558]: I0120 17:35:44.207301 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bbb20173-fee8-4323-afbb-f2bbbec3e978-scripts\") pod \"ceilometer-0\" (UID: \"bbb20173-fee8-4323-afbb-f2bbbec3e978\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:35:44 crc kubenswrapper[4558]: I0120 17:35:44.208178 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bbb20173-fee8-4323-afbb-f2bbbec3e978-config-data\") pod \"ceilometer-0\" (UID: \"bbb20173-fee8-4323-afbb-f2bbbec3e978\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:35:44 crc kubenswrapper[4558]: I0120 17:35:44.208754 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/bbb20173-fee8-4323-afbb-f2bbbec3e978-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"bbb20173-fee8-4323-afbb-f2bbbec3e978\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:35:44 crc kubenswrapper[4558]: I0120 17:35:44.226890 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bbb20173-fee8-4323-afbb-f2bbbec3e978-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"bbb20173-fee8-4323-afbb-f2bbbec3e978\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:35:44 crc kubenswrapper[4558]: I0120 17:35:44.242070 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-js2gg\" (UniqueName: \"kubernetes.io/projected/bbb20173-fee8-4323-afbb-f2bbbec3e978-kube-api-access-js2gg\") pod \"ceilometer-0\" (UID: \"bbb20173-fee8-4323-afbb-f2bbbec3e978\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:35:44 crc kubenswrapper[4558]: I0120 17:35:44.258078 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/639521c5-8df8-4c45-a66a-0be7119bb8d2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "639521c5-8df8-4c45-a66a-0be7119bb8d2" (UID: "639521c5-8df8-4c45-a66a-0be7119bb8d2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:44 crc kubenswrapper[4558]: I0120 17:35:44.259444 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/639521c5-8df8-4c45-a66a-0be7119bb8d2-config-data" (OuterVolumeSpecName: "config-data") pod "639521c5-8df8-4c45-a66a-0be7119bb8d2" (UID: "639521c5-8df8-4c45-a66a-0be7119bb8d2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:44 crc kubenswrapper[4558]: I0120 17:35:44.269817 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/639521c5-8df8-4c45-a66a-0be7119bb8d2-nova-novncproxy-tls-certs" (OuterVolumeSpecName: "nova-novncproxy-tls-certs") pod "639521c5-8df8-4c45-a66a-0be7119bb8d2" (UID: "639521c5-8df8-4c45-a66a-0be7119bb8d2"). InnerVolumeSpecName "nova-novncproxy-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:44 crc kubenswrapper[4558]: I0120 17:35:44.273143 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/639521c5-8df8-4c45-a66a-0be7119bb8d2-vencrypt-tls-certs" (OuterVolumeSpecName: "vencrypt-tls-certs") pod "639521c5-8df8-4c45-a66a-0be7119bb8d2" (UID: "639521c5-8df8-4c45-a66a-0be7119bb8d2"). InnerVolumeSpecName "vencrypt-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:44 crc kubenswrapper[4558]: I0120 17:35:44.293568 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wt5f2\" (UniqueName: \"kubernetes.io/projected/639521c5-8df8-4c45-a66a-0be7119bb8d2-kube-api-access-wt5f2\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:44 crc kubenswrapper[4558]: I0120 17:35:44.293598 4558 reconciler_common.go:293] "Volume detached for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/639521c5-8df8-4c45-a66a-0be7119bb8d2-vencrypt-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:44 crc kubenswrapper[4558]: I0120 17:35:44.293608 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/639521c5-8df8-4c45-a66a-0be7119bb8d2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:44 crc kubenswrapper[4558]: I0120 17:35:44.293618 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/639521c5-8df8-4c45-a66a-0be7119bb8d2-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:44 crc kubenswrapper[4558]: I0120 17:35:44.293627 4558 reconciler_common.go:293] "Volume detached for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/639521c5-8df8-4c45-a66a-0be7119bb8d2-nova-novncproxy-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:44 crc kubenswrapper[4558]: I0120 17:35:44.343536 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:35:44 crc kubenswrapper[4558]: I0120 17:35:44.353047 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:35:44 crc kubenswrapper[4558]: I0120 17:35:44.575183 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="872e7858-d089-4bb5-89aa-060646e32754" path="/var/lib/kubelet/pods/872e7858-d089-4bb5-89aa-060646e32754/volumes" Jan 20 17:35:44 crc kubenswrapper[4558]: I0120 17:35:44.576207 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a205fa61-cd83-4d6e-b82b-aba6f9144c71" path="/var/lib/kubelet/pods/a205fa61-cd83-4d6e-b82b-aba6f9144c71/volumes" Jan 20 17:35:44 crc kubenswrapper[4558]: W0120 17:35:44.774455 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5a046f3a_bea5_4aea_8081_1c5994ecdbd9.slice/crio-b1d2e1fd3bf44cc2f1cfad03805180162622dea0f9c59f40c177d95250384b8b WatchSource:0}: Error finding container b1d2e1fd3bf44cc2f1cfad03805180162622dea0f9c59f40c177d95250384b8b: Status 404 returned error can't find the container with id b1d2e1fd3bf44cc2f1cfad03805180162622dea0f9c59f40c177d95250384b8b Jan 20 17:35:44 crc kubenswrapper[4558]: I0120 17:35:44.778317 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:35:44 crc kubenswrapper[4558]: I0120 17:35:44.808092 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:35:44 crc kubenswrapper[4558]: W0120 17:35:44.821106 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbbb20173_fee8_4323_afbb_f2bbbec3e978.slice/crio-b5bdedf70433a1028c539e44bc993a783930852815b819b981c5d4ae75bba7dd WatchSource:0}: Error finding container b5bdedf70433a1028c539e44bc993a783930852815b819b981c5d4ae75bba7dd: Status 404 returned error can't find the container with id b5bdedf70433a1028c539e44bc993a783930852815b819b981c5d4ae75bba7dd Jan 20 17:35:44 crc kubenswrapper[4558]: I0120 17:35:44.854997 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"bbb20173-fee8-4323-afbb-f2bbbec3e978","Type":"ContainerStarted","Data":"b5bdedf70433a1028c539e44bc993a783930852815b819b981c5d4ae75bba7dd"} Jan 20 17:35:44 crc kubenswrapper[4558]: I0120 17:35:44.856618 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"639521c5-8df8-4c45-a66a-0be7119bb8d2","Type":"ContainerDied","Data":"20e8f42a160445317c6f7a51b7e9e69eb9dc463c3fba20116a2cd1d22263ebe6"} Jan 20 17:35:44 crc kubenswrapper[4558]: I0120 17:35:44.856652 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:35:44 crc kubenswrapper[4558]: I0120 17:35:44.856660 4558 scope.go:117] "RemoveContainer" containerID="36a455e01433abd0de3acdfe5a36add132aa44e9a3eb34155cfc2b6b22bd9d3c" Jan 20 17:35:44 crc kubenswrapper[4558]: I0120 17:35:44.860097 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"5a046f3a-bea5-4aea-8081-1c5994ecdbd9","Type":"ContainerStarted","Data":"b1d2e1fd3bf44cc2f1cfad03805180162622dea0f9c59f40c177d95250384b8b"} Jan 20 17:35:44 crc kubenswrapper[4558]: I0120 17:35:44.883630 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:35:44 crc kubenswrapper[4558]: I0120 17:35:44.894359 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:35:44 crc kubenswrapper[4558]: I0120 17:35:44.896445 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:35:44 crc kubenswrapper[4558]: E0120 17:35:44.896891 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="639521c5-8df8-4c45-a66a-0be7119bb8d2" containerName="nova-cell1-novncproxy-novncproxy" Jan 20 17:35:44 crc kubenswrapper[4558]: I0120 17:35:44.896911 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="639521c5-8df8-4c45-a66a-0be7119bb8d2" containerName="nova-cell1-novncproxy-novncproxy" Jan 20 17:35:44 crc kubenswrapper[4558]: I0120 17:35:44.897132 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="639521c5-8df8-4c45-a66a-0be7119bb8d2" containerName="nova-cell1-novncproxy-novncproxy" Jan 20 17:35:44 crc kubenswrapper[4558]: I0120 17:35:44.898949 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:35:44 crc kubenswrapper[4558]: I0120 17:35:44.902119 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-novncproxy-cell1-vencrypt" Jan 20 17:35:44 crc kubenswrapper[4558]: I0120 17:35:44.908210 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-novncproxy-cell1-public-svc" Jan 20 17:35:44 crc kubenswrapper[4558]: I0120 17:35:44.908487 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell1-novncproxy-config-data" Jan 20 17:35:44 crc kubenswrapper[4558]: I0120 17:35:44.917499 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:35:44 crc kubenswrapper[4558]: I0120 17:35:44.943262 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-khsdd\" (UniqueName: \"kubernetes.io/projected/099193f1-dcf1-4c0a-beeb-fac5f0824cb8-kube-api-access-khsdd\") pod \"nova-cell1-novncproxy-0\" (UID: \"099193f1-dcf1-4c0a-beeb-fac5f0824cb8\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:35:44 crc kubenswrapper[4558]: I0120 17:35:44.943497 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/099193f1-dcf1-4c0a-beeb-fac5f0824cb8-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"099193f1-dcf1-4c0a-beeb-fac5f0824cb8\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:35:44 crc kubenswrapper[4558]: I0120 17:35:44.944389 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/099193f1-dcf1-4c0a-beeb-fac5f0824cb8-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"099193f1-dcf1-4c0a-beeb-fac5f0824cb8\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:35:44 crc kubenswrapper[4558]: I0120 17:35:44.944533 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/099193f1-dcf1-4c0a-beeb-fac5f0824cb8-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"099193f1-dcf1-4c0a-beeb-fac5f0824cb8\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:35:44 crc kubenswrapper[4558]: I0120 17:35:44.944745 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/099193f1-dcf1-4c0a-beeb-fac5f0824cb8-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"099193f1-dcf1-4c0a-beeb-fac5f0824cb8\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:35:45 crc kubenswrapper[4558]: I0120 17:35:45.046436 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/099193f1-dcf1-4c0a-beeb-fac5f0824cb8-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"099193f1-dcf1-4c0a-beeb-fac5f0824cb8\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:35:45 crc kubenswrapper[4558]: I0120 17:35:45.046501 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/099193f1-dcf1-4c0a-beeb-fac5f0824cb8-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"099193f1-dcf1-4c0a-beeb-fac5f0824cb8\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:35:45 crc kubenswrapper[4558]: I0120 17:35:45.046538 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/099193f1-dcf1-4c0a-beeb-fac5f0824cb8-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"099193f1-dcf1-4c0a-beeb-fac5f0824cb8\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:35:45 crc kubenswrapper[4558]: I0120 17:35:45.046570 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-khsdd\" (UniqueName: \"kubernetes.io/projected/099193f1-dcf1-4c0a-beeb-fac5f0824cb8-kube-api-access-khsdd\") pod \"nova-cell1-novncproxy-0\" (UID: \"099193f1-dcf1-4c0a-beeb-fac5f0824cb8\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:35:45 crc kubenswrapper[4558]: I0120 17:35:45.046618 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/099193f1-dcf1-4c0a-beeb-fac5f0824cb8-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"099193f1-dcf1-4c0a-beeb-fac5f0824cb8\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:35:45 crc kubenswrapper[4558]: I0120 17:35:45.050014 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/099193f1-dcf1-4c0a-beeb-fac5f0824cb8-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"099193f1-dcf1-4c0a-beeb-fac5f0824cb8\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:35:45 crc kubenswrapper[4558]: I0120 17:35:45.050054 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/099193f1-dcf1-4c0a-beeb-fac5f0824cb8-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"099193f1-dcf1-4c0a-beeb-fac5f0824cb8\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:35:45 crc kubenswrapper[4558]: I0120 17:35:45.051113 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/099193f1-dcf1-4c0a-beeb-fac5f0824cb8-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"099193f1-dcf1-4c0a-beeb-fac5f0824cb8\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:35:45 crc kubenswrapper[4558]: I0120 17:35:45.054633 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/099193f1-dcf1-4c0a-beeb-fac5f0824cb8-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"099193f1-dcf1-4c0a-beeb-fac5f0824cb8\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:35:45 crc kubenswrapper[4558]: I0120 17:35:45.062934 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-khsdd\" (UniqueName: \"kubernetes.io/projected/099193f1-dcf1-4c0a-beeb-fac5f0824cb8-kube-api-access-khsdd\") pod \"nova-cell1-novncproxy-0\" (UID: \"099193f1-dcf1-4c0a-beeb-fac5f0824cb8\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:35:45 crc kubenswrapper[4558]: I0120 17:35:45.229102 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/barbican-api-5bd54998d4-vhnpd"] Jan 20 17:35:45 crc kubenswrapper[4558]: I0120 17:35:45.230769 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-5bd54998d4-vhnpd" Jan 20 17:35:45 crc kubenswrapper[4558]: I0120 17:35:45.230991 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:35:45 crc kubenswrapper[4558]: I0120 17:35:45.244692 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-api-5bd54998d4-vhnpd"] Jan 20 17:35:45 crc kubenswrapper[4558]: I0120 17:35:45.251689 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kr6b6\" (UniqueName: \"kubernetes.io/projected/b65a4359-8f00-46fd-acd3-02731c1e1389-kube-api-access-kr6b6\") pod \"barbican-api-5bd54998d4-vhnpd\" (UID: \"b65a4359-8f00-46fd-acd3-02731c1e1389\") " pod="openstack-kuttl-tests/barbican-api-5bd54998d4-vhnpd" Jan 20 17:35:45 crc kubenswrapper[4558]: I0120 17:35:45.251733 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b65a4359-8f00-46fd-acd3-02731c1e1389-combined-ca-bundle\") pod \"barbican-api-5bd54998d4-vhnpd\" (UID: \"b65a4359-8f00-46fd-acd3-02731c1e1389\") " pod="openstack-kuttl-tests/barbican-api-5bd54998d4-vhnpd" Jan 20 17:35:45 crc kubenswrapper[4558]: I0120 17:35:45.251794 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b65a4359-8f00-46fd-acd3-02731c1e1389-config-data\") pod \"barbican-api-5bd54998d4-vhnpd\" (UID: \"b65a4359-8f00-46fd-acd3-02731c1e1389\") " pod="openstack-kuttl-tests/barbican-api-5bd54998d4-vhnpd" Jan 20 17:35:45 crc kubenswrapper[4558]: I0120 17:35:45.251833 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b65a4359-8f00-46fd-acd3-02731c1e1389-internal-tls-certs\") pod \"barbican-api-5bd54998d4-vhnpd\" (UID: \"b65a4359-8f00-46fd-acd3-02731c1e1389\") " pod="openstack-kuttl-tests/barbican-api-5bd54998d4-vhnpd" Jan 20 17:35:45 crc kubenswrapper[4558]: I0120 17:35:45.251873 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b65a4359-8f00-46fd-acd3-02731c1e1389-config-data-custom\") pod \"barbican-api-5bd54998d4-vhnpd\" (UID: \"b65a4359-8f00-46fd-acd3-02731c1e1389\") " pod="openstack-kuttl-tests/barbican-api-5bd54998d4-vhnpd" Jan 20 17:35:45 crc kubenswrapper[4558]: I0120 17:35:45.251925 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b65a4359-8f00-46fd-acd3-02731c1e1389-logs\") pod \"barbican-api-5bd54998d4-vhnpd\" (UID: \"b65a4359-8f00-46fd-acd3-02731c1e1389\") " pod="openstack-kuttl-tests/barbican-api-5bd54998d4-vhnpd" Jan 20 17:35:45 crc kubenswrapper[4558]: I0120 17:35:45.251968 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b65a4359-8f00-46fd-acd3-02731c1e1389-public-tls-certs\") pod \"barbican-api-5bd54998d4-vhnpd\" (UID: \"b65a4359-8f00-46fd-acd3-02731c1e1389\") " pod="openstack-kuttl-tests/barbican-api-5bd54998d4-vhnpd" Jan 20 17:35:45 crc kubenswrapper[4558]: I0120 17:35:45.353134 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b65a4359-8f00-46fd-acd3-02731c1e1389-config-data-custom\") pod \"barbican-api-5bd54998d4-vhnpd\" (UID: \"b65a4359-8f00-46fd-acd3-02731c1e1389\") " pod="openstack-kuttl-tests/barbican-api-5bd54998d4-vhnpd" Jan 20 17:35:45 crc kubenswrapper[4558]: I0120 17:35:45.353965 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b65a4359-8f00-46fd-acd3-02731c1e1389-logs\") pod \"barbican-api-5bd54998d4-vhnpd\" (UID: \"b65a4359-8f00-46fd-acd3-02731c1e1389\") " pod="openstack-kuttl-tests/barbican-api-5bd54998d4-vhnpd" Jan 20 17:35:45 crc kubenswrapper[4558]: I0120 17:35:45.354006 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b65a4359-8f00-46fd-acd3-02731c1e1389-public-tls-certs\") pod \"barbican-api-5bd54998d4-vhnpd\" (UID: \"b65a4359-8f00-46fd-acd3-02731c1e1389\") " pod="openstack-kuttl-tests/barbican-api-5bd54998d4-vhnpd" Jan 20 17:35:45 crc kubenswrapper[4558]: I0120 17:35:45.354150 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kr6b6\" (UniqueName: \"kubernetes.io/projected/b65a4359-8f00-46fd-acd3-02731c1e1389-kube-api-access-kr6b6\") pod \"barbican-api-5bd54998d4-vhnpd\" (UID: \"b65a4359-8f00-46fd-acd3-02731c1e1389\") " pod="openstack-kuttl-tests/barbican-api-5bd54998d4-vhnpd" Jan 20 17:35:45 crc kubenswrapper[4558]: I0120 17:35:45.354192 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b65a4359-8f00-46fd-acd3-02731c1e1389-combined-ca-bundle\") pod \"barbican-api-5bd54998d4-vhnpd\" (UID: \"b65a4359-8f00-46fd-acd3-02731c1e1389\") " pod="openstack-kuttl-tests/barbican-api-5bd54998d4-vhnpd" Jan 20 17:35:45 crc kubenswrapper[4558]: I0120 17:35:45.354239 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b65a4359-8f00-46fd-acd3-02731c1e1389-config-data\") pod \"barbican-api-5bd54998d4-vhnpd\" (UID: \"b65a4359-8f00-46fd-acd3-02731c1e1389\") " pod="openstack-kuttl-tests/barbican-api-5bd54998d4-vhnpd" Jan 20 17:35:45 crc kubenswrapper[4558]: I0120 17:35:45.354272 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b65a4359-8f00-46fd-acd3-02731c1e1389-internal-tls-certs\") pod \"barbican-api-5bd54998d4-vhnpd\" (UID: \"b65a4359-8f00-46fd-acd3-02731c1e1389\") " pod="openstack-kuttl-tests/barbican-api-5bd54998d4-vhnpd" Jan 20 17:35:45 crc kubenswrapper[4558]: I0120 17:35:45.354526 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b65a4359-8f00-46fd-acd3-02731c1e1389-logs\") pod \"barbican-api-5bd54998d4-vhnpd\" (UID: \"b65a4359-8f00-46fd-acd3-02731c1e1389\") " pod="openstack-kuttl-tests/barbican-api-5bd54998d4-vhnpd" Jan 20 17:35:45 crc kubenswrapper[4558]: I0120 17:35:45.363848 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b65a4359-8f00-46fd-acd3-02731c1e1389-config-data-custom\") pod \"barbican-api-5bd54998d4-vhnpd\" (UID: \"b65a4359-8f00-46fd-acd3-02731c1e1389\") " pod="openstack-kuttl-tests/barbican-api-5bd54998d4-vhnpd" Jan 20 17:35:45 crc kubenswrapper[4558]: I0120 17:35:45.363927 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b65a4359-8f00-46fd-acd3-02731c1e1389-combined-ca-bundle\") pod \"barbican-api-5bd54998d4-vhnpd\" (UID: \"b65a4359-8f00-46fd-acd3-02731c1e1389\") " pod="openstack-kuttl-tests/barbican-api-5bd54998d4-vhnpd" Jan 20 17:35:45 crc kubenswrapper[4558]: I0120 17:35:45.364685 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b65a4359-8f00-46fd-acd3-02731c1e1389-internal-tls-certs\") pod \"barbican-api-5bd54998d4-vhnpd\" (UID: \"b65a4359-8f00-46fd-acd3-02731c1e1389\") " pod="openstack-kuttl-tests/barbican-api-5bd54998d4-vhnpd" Jan 20 17:35:45 crc kubenswrapper[4558]: I0120 17:35:45.365716 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b65a4359-8f00-46fd-acd3-02731c1e1389-config-data\") pod \"barbican-api-5bd54998d4-vhnpd\" (UID: \"b65a4359-8f00-46fd-acd3-02731c1e1389\") " pod="openstack-kuttl-tests/barbican-api-5bd54998d4-vhnpd" Jan 20 17:35:45 crc kubenswrapper[4558]: I0120 17:35:45.365932 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b65a4359-8f00-46fd-acd3-02731c1e1389-public-tls-certs\") pod \"barbican-api-5bd54998d4-vhnpd\" (UID: \"b65a4359-8f00-46fd-acd3-02731c1e1389\") " pod="openstack-kuttl-tests/barbican-api-5bd54998d4-vhnpd" Jan 20 17:35:45 crc kubenswrapper[4558]: I0120 17:35:45.370903 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kr6b6\" (UniqueName: \"kubernetes.io/projected/b65a4359-8f00-46fd-acd3-02731c1e1389-kube-api-access-kr6b6\") pod \"barbican-api-5bd54998d4-vhnpd\" (UID: \"b65a4359-8f00-46fd-acd3-02731c1e1389\") " pod="openstack-kuttl-tests/barbican-api-5bd54998d4-vhnpd" Jan 20 17:35:45 crc kubenswrapper[4558]: I0120 17:35:45.539337 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-api-5bd54998d4-vhnpd"] Jan 20 17:35:45 crc kubenswrapper[4558]: I0120 17:35:45.540830 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-5bd54998d4-vhnpd" Jan 20 17:35:45 crc kubenswrapper[4558]: I0120 17:35:45.566933 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/barbican-api-f45698ff6-c9mtk"] Jan 20 17:35:45 crc kubenswrapper[4558]: I0120 17:35:45.568576 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-f45698ff6-c9mtk" Jan 20 17:35:45 crc kubenswrapper[4558]: I0120 17:35:45.578803 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-api-f45698ff6-c9mtk"] Jan 20 17:35:45 crc kubenswrapper[4558]: I0120 17:35:45.668945 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7de6386e-7693-430d-9753-2d7fa8c31b5d-config-data-custom\") pod \"barbican-api-f45698ff6-c9mtk\" (UID: \"7de6386e-7693-430d-9753-2d7fa8c31b5d\") " pod="openstack-kuttl-tests/barbican-api-f45698ff6-c9mtk" Jan 20 17:35:45 crc kubenswrapper[4558]: I0120 17:35:45.669054 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7de6386e-7693-430d-9753-2d7fa8c31b5d-config-data\") pod \"barbican-api-f45698ff6-c9mtk\" (UID: \"7de6386e-7693-430d-9753-2d7fa8c31b5d\") " pod="openstack-kuttl-tests/barbican-api-f45698ff6-c9mtk" Jan 20 17:35:45 crc kubenswrapper[4558]: I0120 17:35:45.669157 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7de6386e-7693-430d-9753-2d7fa8c31b5d-combined-ca-bundle\") pod \"barbican-api-f45698ff6-c9mtk\" (UID: \"7de6386e-7693-430d-9753-2d7fa8c31b5d\") " pod="openstack-kuttl-tests/barbican-api-f45698ff6-c9mtk" Jan 20 17:35:45 crc kubenswrapper[4558]: I0120 17:35:45.669338 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7de6386e-7693-430d-9753-2d7fa8c31b5d-logs\") pod \"barbican-api-f45698ff6-c9mtk\" (UID: \"7de6386e-7693-430d-9753-2d7fa8c31b5d\") " pod="openstack-kuttl-tests/barbican-api-f45698ff6-c9mtk" Jan 20 17:35:45 crc kubenswrapper[4558]: I0120 17:35:45.669405 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nbd52\" (UniqueName: \"kubernetes.io/projected/7de6386e-7693-430d-9753-2d7fa8c31b5d-kube-api-access-nbd52\") pod \"barbican-api-f45698ff6-c9mtk\" (UID: \"7de6386e-7693-430d-9753-2d7fa8c31b5d\") " pod="openstack-kuttl-tests/barbican-api-f45698ff6-c9mtk" Jan 20 17:35:45 crc kubenswrapper[4558]: I0120 17:35:45.669457 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7de6386e-7693-430d-9753-2d7fa8c31b5d-public-tls-certs\") pod \"barbican-api-f45698ff6-c9mtk\" (UID: \"7de6386e-7693-430d-9753-2d7fa8c31b5d\") " pod="openstack-kuttl-tests/barbican-api-f45698ff6-c9mtk" Jan 20 17:35:45 crc kubenswrapper[4558]: I0120 17:35:45.669493 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7de6386e-7693-430d-9753-2d7fa8c31b5d-internal-tls-certs\") pod \"barbican-api-f45698ff6-c9mtk\" (UID: \"7de6386e-7693-430d-9753-2d7fa8c31b5d\") " pod="openstack-kuttl-tests/barbican-api-f45698ff6-c9mtk" Jan 20 17:35:45 crc kubenswrapper[4558]: I0120 17:35:45.688783 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:35:45 crc kubenswrapper[4558]: I0120 17:35:45.771396 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7de6386e-7693-430d-9753-2d7fa8c31b5d-config-data-custom\") pod \"barbican-api-f45698ff6-c9mtk\" (UID: \"7de6386e-7693-430d-9753-2d7fa8c31b5d\") " pod="openstack-kuttl-tests/barbican-api-f45698ff6-c9mtk" Jan 20 17:35:45 crc kubenswrapper[4558]: I0120 17:35:45.771690 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7de6386e-7693-430d-9753-2d7fa8c31b5d-config-data\") pod \"barbican-api-f45698ff6-c9mtk\" (UID: \"7de6386e-7693-430d-9753-2d7fa8c31b5d\") " pod="openstack-kuttl-tests/barbican-api-f45698ff6-c9mtk" Jan 20 17:35:45 crc kubenswrapper[4558]: I0120 17:35:45.771788 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7de6386e-7693-430d-9753-2d7fa8c31b5d-combined-ca-bundle\") pod \"barbican-api-f45698ff6-c9mtk\" (UID: \"7de6386e-7693-430d-9753-2d7fa8c31b5d\") " pod="openstack-kuttl-tests/barbican-api-f45698ff6-c9mtk" Jan 20 17:35:45 crc kubenswrapper[4558]: I0120 17:35:45.771914 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7de6386e-7693-430d-9753-2d7fa8c31b5d-logs\") pod \"barbican-api-f45698ff6-c9mtk\" (UID: \"7de6386e-7693-430d-9753-2d7fa8c31b5d\") " pod="openstack-kuttl-tests/barbican-api-f45698ff6-c9mtk" Jan 20 17:35:45 crc kubenswrapper[4558]: I0120 17:35:45.771988 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nbd52\" (UniqueName: \"kubernetes.io/projected/7de6386e-7693-430d-9753-2d7fa8c31b5d-kube-api-access-nbd52\") pod \"barbican-api-f45698ff6-c9mtk\" (UID: \"7de6386e-7693-430d-9753-2d7fa8c31b5d\") " pod="openstack-kuttl-tests/barbican-api-f45698ff6-c9mtk" Jan 20 17:35:45 crc kubenswrapper[4558]: I0120 17:35:45.772035 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7de6386e-7693-430d-9753-2d7fa8c31b5d-public-tls-certs\") pod \"barbican-api-f45698ff6-c9mtk\" (UID: \"7de6386e-7693-430d-9753-2d7fa8c31b5d\") " pod="openstack-kuttl-tests/barbican-api-f45698ff6-c9mtk" Jan 20 17:35:45 crc kubenswrapper[4558]: I0120 17:35:45.772074 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7de6386e-7693-430d-9753-2d7fa8c31b5d-internal-tls-certs\") pod \"barbican-api-f45698ff6-c9mtk\" (UID: \"7de6386e-7693-430d-9753-2d7fa8c31b5d\") " pod="openstack-kuttl-tests/barbican-api-f45698ff6-c9mtk" Jan 20 17:35:45 crc kubenswrapper[4558]: I0120 17:35:45.772446 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7de6386e-7693-430d-9753-2d7fa8c31b5d-logs\") pod \"barbican-api-f45698ff6-c9mtk\" (UID: \"7de6386e-7693-430d-9753-2d7fa8c31b5d\") " pod="openstack-kuttl-tests/barbican-api-f45698ff6-c9mtk" Jan 20 17:35:45 crc kubenswrapper[4558]: I0120 17:35:45.775231 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7de6386e-7693-430d-9753-2d7fa8c31b5d-config-data\") pod \"barbican-api-f45698ff6-c9mtk\" (UID: \"7de6386e-7693-430d-9753-2d7fa8c31b5d\") " pod="openstack-kuttl-tests/barbican-api-f45698ff6-c9mtk" Jan 20 17:35:45 crc kubenswrapper[4558]: I0120 17:35:45.775619 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7de6386e-7693-430d-9753-2d7fa8c31b5d-public-tls-certs\") pod \"barbican-api-f45698ff6-c9mtk\" (UID: \"7de6386e-7693-430d-9753-2d7fa8c31b5d\") " pod="openstack-kuttl-tests/barbican-api-f45698ff6-c9mtk" Jan 20 17:35:45 crc kubenswrapper[4558]: I0120 17:35:45.776696 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7de6386e-7693-430d-9753-2d7fa8c31b5d-internal-tls-certs\") pod \"barbican-api-f45698ff6-c9mtk\" (UID: \"7de6386e-7693-430d-9753-2d7fa8c31b5d\") " pod="openstack-kuttl-tests/barbican-api-f45698ff6-c9mtk" Jan 20 17:35:45 crc kubenswrapper[4558]: I0120 17:35:45.778598 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7de6386e-7693-430d-9753-2d7fa8c31b5d-config-data-custom\") pod \"barbican-api-f45698ff6-c9mtk\" (UID: \"7de6386e-7693-430d-9753-2d7fa8c31b5d\") " pod="openstack-kuttl-tests/barbican-api-f45698ff6-c9mtk" Jan 20 17:35:45 crc kubenswrapper[4558]: I0120 17:35:45.778649 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7de6386e-7693-430d-9753-2d7fa8c31b5d-combined-ca-bundle\") pod \"barbican-api-f45698ff6-c9mtk\" (UID: \"7de6386e-7693-430d-9753-2d7fa8c31b5d\") " pod="openstack-kuttl-tests/barbican-api-f45698ff6-c9mtk" Jan 20 17:35:45 crc kubenswrapper[4558]: I0120 17:35:45.787876 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nbd52\" (UniqueName: \"kubernetes.io/projected/7de6386e-7693-430d-9753-2d7fa8c31b5d-kube-api-access-nbd52\") pod \"barbican-api-f45698ff6-c9mtk\" (UID: \"7de6386e-7693-430d-9753-2d7fa8c31b5d\") " pod="openstack-kuttl-tests/barbican-api-f45698ff6-c9mtk" Jan 20 17:35:45 crc kubenswrapper[4558]: I0120 17:35:45.911668 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"5a046f3a-bea5-4aea-8081-1c5994ecdbd9","Type":"ContainerStarted","Data":"9940485ddf968cc8ec26bafdb2ca664eaf066521e0b63efd3dafb4c251d88a6e"} Jan 20 17:35:45 crc kubenswrapper[4558]: I0120 17:35:45.911732 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"5a046f3a-bea5-4aea-8081-1c5994ecdbd9","Type":"ContainerStarted","Data":"ae2197c02d74d0b7fb6e5576d5e29ab4e2fe41416be7e0d896afd6625b8eed8a"} Jan 20 17:35:45 crc kubenswrapper[4558]: I0120 17:35:45.913770 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-f45698ff6-c9mtk" Jan 20 17:35:45 crc kubenswrapper[4558]: I0120 17:35:45.926289 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"099193f1-dcf1-4c0a-beeb-fac5f0824cb8","Type":"ContainerStarted","Data":"53ba20480eaba79bd67435843f2f6a3f80cc9813358a7b7eca6feb2250f3c8b4"} Jan 20 17:35:45 crc kubenswrapper[4558]: I0120 17:35:45.928016 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"bbb20173-fee8-4323-afbb-f2bbbec3e978","Type":"ContainerStarted","Data":"1f1063795318c00e5b19c483c70ac74ca60c9c396081249534c260035e47ad7a"} Jan 20 17:35:45 crc kubenswrapper[4558]: I0120 17:35:45.943265 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-api-0" podStartSLOduration=2.9432506529999998 podStartE2EDuration="2.943250653s" podCreationTimestamp="2026-01-20 17:35:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:35:45.928266377 +0000 UTC m=+3239.688604345" watchObservedRunningTime="2026-01-20 17:35:45.943250653 +0000 UTC m=+3239.703588620" Jan 20 17:35:46 crc kubenswrapper[4558]: I0120 17:35:46.140615 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-api-5bd54998d4-vhnpd"] Jan 20 17:35:46 crc kubenswrapper[4558]: I0120 17:35:46.524137 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-api-f45698ff6-c9mtk"] Jan 20 17:35:46 crc kubenswrapper[4558]: I0120 17:35:46.640745 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="639521c5-8df8-4c45-a66a-0be7119bb8d2" path="/var/lib/kubelet/pods/639521c5-8df8-4c45-a66a-0be7119bb8d2/volumes" Jan 20 17:35:46 crc kubenswrapper[4558]: I0120 17:35:46.943964 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"099193f1-dcf1-4c0a-beeb-fac5f0824cb8","Type":"ContainerStarted","Data":"c2621d3cab78726f6f01fe56a565e7406820ab18caa470c8455fa8fed99663ce"} Jan 20 17:35:46 crc kubenswrapper[4558]: I0120 17:35:46.947874 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-f45698ff6-c9mtk" event={"ID":"7de6386e-7693-430d-9753-2d7fa8c31b5d","Type":"ContainerStarted","Data":"65853762fe6cac36e4de6676f22c99039ab79573b354ec57daadc513c082bba2"} Jan 20 17:35:46 crc kubenswrapper[4558]: I0120 17:35:46.947900 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-f45698ff6-c9mtk" event={"ID":"7de6386e-7693-430d-9753-2d7fa8c31b5d","Type":"ContainerStarted","Data":"da69badc281328054557a9a09ff8ac7156fbf69e9baa4127114183ce9151e616"} Jan 20 17:35:46 crc kubenswrapper[4558]: I0120 17:35:46.949296 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"bbb20173-fee8-4323-afbb-f2bbbec3e978","Type":"ContainerStarted","Data":"f9994a423f0457d16e1857b55313f41375f006cd328176af514da36879ef8b96"} Jan 20 17:35:46 crc kubenswrapper[4558]: I0120 17:35:46.951778 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-5bd54998d4-vhnpd" event={"ID":"b65a4359-8f00-46fd-acd3-02731c1e1389","Type":"ContainerStarted","Data":"e83ed111fb3ee3dc0bd3a5d840dc90b5d39eefacfe46dbeaa76ace5b7cd2f7ba"} Jan 20 17:35:46 crc kubenswrapper[4558]: I0120 17:35:46.951802 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-5bd54998d4-vhnpd" event={"ID":"b65a4359-8f00-46fd-acd3-02731c1e1389","Type":"ContainerStarted","Data":"e12a28a96c9d0ba545f44cb57fd8a67b5fc9fb1e38c66d21b4f72e12208b5c07"} Jan 20 17:35:46 crc kubenswrapper[4558]: I0120 17:35:46.997966 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" podStartSLOduration=2.997938843 podStartE2EDuration="2.997938843s" podCreationTimestamp="2026-01-20 17:35:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:35:46.98439181 +0000 UTC m=+3240.744729777" watchObservedRunningTime="2026-01-20 17:35:46.997938843 +0000 UTC m=+3240.758276811" Jan 20 17:35:47 crc kubenswrapper[4558]: I0120 17:35:47.962485 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-f45698ff6-c9mtk" event={"ID":"7de6386e-7693-430d-9753-2d7fa8c31b5d","Type":"ContainerStarted","Data":"af363f8fa728755ec981997979783ddd99710b030181fea30ee5db9adb704c8e"} Jan 20 17:35:47 crc kubenswrapper[4558]: I0120 17:35:47.964524 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/barbican-api-f45698ff6-c9mtk" Jan 20 17:35:47 crc kubenswrapper[4558]: I0120 17:35:47.964887 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"bbb20173-fee8-4323-afbb-f2bbbec3e978","Type":"ContainerStarted","Data":"ea6d1ca32a116c466cabb20cadcc9e9af27e7b82039308f7f1df1e6178fa2a36"} Jan 20 17:35:47 crc kubenswrapper[4558]: I0120 17:35:47.967211 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-api-5bd54998d4-vhnpd" podUID="b65a4359-8f00-46fd-acd3-02731c1e1389" containerName="barbican-api-log" containerID="cri-o://e83ed111fb3ee3dc0bd3a5d840dc90b5d39eefacfe46dbeaa76ace5b7cd2f7ba" gracePeriod=30 Jan 20 17:35:47 crc kubenswrapper[4558]: I0120 17:35:47.967487 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-5bd54998d4-vhnpd" event={"ID":"b65a4359-8f00-46fd-acd3-02731c1e1389","Type":"ContainerStarted","Data":"e0ed3312930991cbf925c1b0e5d1e007a5bd9ddca399ef8faa0ede5c15dcce69"} Jan 20 17:35:47 crc kubenswrapper[4558]: I0120 17:35:47.967543 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/barbican-api-5bd54998d4-vhnpd" Jan 20 17:35:47 crc kubenswrapper[4558]: I0120 17:35:47.967564 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/barbican-api-5bd54998d4-vhnpd" Jan 20 17:35:47 crc kubenswrapper[4558]: I0120 17:35:47.967603 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-api-5bd54998d4-vhnpd" podUID="b65a4359-8f00-46fd-acd3-02731c1e1389" containerName="barbican-api" containerID="cri-o://e0ed3312930991cbf925c1b0e5d1e007a5bd9ddca399ef8faa0ede5c15dcce69" gracePeriod=30 Jan 20 17:35:47 crc kubenswrapper[4558]: I0120 17:35:47.998239 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/barbican-api-f45698ff6-c9mtk" podStartSLOduration=2.998226731 podStartE2EDuration="2.998226731s" podCreationTimestamp="2026-01-20 17:35:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:35:47.98166607 +0000 UTC m=+3241.742004037" watchObservedRunningTime="2026-01-20 17:35:47.998226731 +0000 UTC m=+3241.758564698" Jan 20 17:35:48 crc kubenswrapper[4558]: I0120 17:35:48.007893 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/barbican-api-5bd54998d4-vhnpd" podStartSLOduration=3.007873371 podStartE2EDuration="3.007873371s" podCreationTimestamp="2026-01-20 17:35:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:35:48.001217675 +0000 UTC m=+3241.761555643" watchObservedRunningTime="2026-01-20 17:35:48.007873371 +0000 UTC m=+3241.768211337" Jan 20 17:35:48 crc kubenswrapper[4558]: I0120 17:35:48.521451 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-5bd54998d4-vhnpd" Jan 20 17:35:48 crc kubenswrapper[4558]: I0120 17:35:48.541249 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:35:48 crc kubenswrapper[4558]: I0120 17:35:48.577062 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b65a4359-8f00-46fd-acd3-02731c1e1389-combined-ca-bundle\") pod \"b65a4359-8f00-46fd-acd3-02731c1e1389\" (UID: \"b65a4359-8f00-46fd-acd3-02731c1e1389\") " Jan 20 17:35:48 crc kubenswrapper[4558]: I0120 17:35:48.639476 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b65a4359-8f00-46fd-acd3-02731c1e1389-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b65a4359-8f00-46fd-acd3-02731c1e1389" (UID: "b65a4359-8f00-46fd-acd3-02731c1e1389"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:48 crc kubenswrapper[4558]: I0120 17:35:48.685997 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b65a4359-8f00-46fd-acd3-02731c1e1389-internal-tls-certs\") pod \"b65a4359-8f00-46fd-acd3-02731c1e1389\" (UID: \"b65a4359-8f00-46fd-acd3-02731c1e1389\") " Jan 20 17:35:48 crc kubenswrapper[4558]: I0120 17:35:48.686039 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b65a4359-8f00-46fd-acd3-02731c1e1389-logs\") pod \"b65a4359-8f00-46fd-acd3-02731c1e1389\" (UID: \"b65a4359-8f00-46fd-acd3-02731c1e1389\") " Jan 20 17:35:48 crc kubenswrapper[4558]: I0120 17:35:48.686075 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b65a4359-8f00-46fd-acd3-02731c1e1389-config-data-custom\") pod \"b65a4359-8f00-46fd-acd3-02731c1e1389\" (UID: \"b65a4359-8f00-46fd-acd3-02731c1e1389\") " Jan 20 17:35:48 crc kubenswrapper[4558]: I0120 17:35:48.686136 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b65a4359-8f00-46fd-acd3-02731c1e1389-public-tls-certs\") pod \"b65a4359-8f00-46fd-acd3-02731c1e1389\" (UID: \"b65a4359-8f00-46fd-acd3-02731c1e1389\") " Jan 20 17:35:48 crc kubenswrapper[4558]: I0120 17:35:48.686220 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b65a4359-8f00-46fd-acd3-02731c1e1389-config-data\") pod \"b65a4359-8f00-46fd-acd3-02731c1e1389\" (UID: \"b65a4359-8f00-46fd-acd3-02731c1e1389\") " Jan 20 17:35:48 crc kubenswrapper[4558]: I0120 17:35:48.686253 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kr6b6\" (UniqueName: \"kubernetes.io/projected/b65a4359-8f00-46fd-acd3-02731c1e1389-kube-api-access-kr6b6\") pod \"b65a4359-8f00-46fd-acd3-02731c1e1389\" (UID: \"b65a4359-8f00-46fd-acd3-02731c1e1389\") " Jan 20 17:35:48 crc kubenswrapper[4558]: I0120 17:35:48.686921 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b65a4359-8f00-46fd-acd3-02731c1e1389-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:48 crc kubenswrapper[4558]: I0120 17:35:48.688597 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b65a4359-8f00-46fd-acd3-02731c1e1389-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "b65a4359-8f00-46fd-acd3-02731c1e1389" (UID: "b65a4359-8f00-46fd-acd3-02731c1e1389"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:48 crc kubenswrapper[4558]: I0120 17:35:48.688963 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b65a4359-8f00-46fd-acd3-02731c1e1389-logs" (OuterVolumeSpecName: "logs") pod "b65a4359-8f00-46fd-acd3-02731c1e1389" (UID: "b65a4359-8f00-46fd-acd3-02731c1e1389"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:35:48 crc kubenswrapper[4558]: I0120 17:35:48.698412 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b65a4359-8f00-46fd-acd3-02731c1e1389-kube-api-access-kr6b6" (OuterVolumeSpecName: "kube-api-access-kr6b6") pod "b65a4359-8f00-46fd-acd3-02731c1e1389" (UID: "b65a4359-8f00-46fd-acd3-02731c1e1389"). InnerVolumeSpecName "kube-api-access-kr6b6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:35:48 crc kubenswrapper[4558]: I0120 17:35:48.728081 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b65a4359-8f00-46fd-acd3-02731c1e1389-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "b65a4359-8f00-46fd-acd3-02731c1e1389" (UID: "b65a4359-8f00-46fd-acd3-02731c1e1389"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:48 crc kubenswrapper[4558]: I0120 17:35:48.732185 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b65a4359-8f00-46fd-acd3-02731c1e1389-config-data" (OuterVolumeSpecName: "config-data") pod "b65a4359-8f00-46fd-acd3-02731c1e1389" (UID: "b65a4359-8f00-46fd-acd3-02731c1e1389"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:48 crc kubenswrapper[4558]: I0120 17:35:48.771377 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b65a4359-8f00-46fd-acd3-02731c1e1389-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "b65a4359-8f00-46fd-acd3-02731c1e1389" (UID: "b65a4359-8f00-46fd-acd3-02731c1e1389"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:48 crc kubenswrapper[4558]: I0120 17:35:48.789217 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kr6b6\" (UniqueName: \"kubernetes.io/projected/b65a4359-8f00-46fd-acd3-02731c1e1389-kube-api-access-kr6b6\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:48 crc kubenswrapper[4558]: I0120 17:35:48.789250 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b65a4359-8f00-46fd-acd3-02731c1e1389-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:48 crc kubenswrapper[4558]: I0120 17:35:48.789261 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b65a4359-8f00-46fd-acd3-02731c1e1389-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:48 crc kubenswrapper[4558]: I0120 17:35:48.789272 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b65a4359-8f00-46fd-acd3-02731c1e1389-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:48 crc kubenswrapper[4558]: I0120 17:35:48.789281 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b65a4359-8f00-46fd-acd3-02731c1e1389-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:48 crc kubenswrapper[4558]: I0120 17:35:48.789288 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b65a4359-8f00-46fd-acd3-02731c1e1389-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:48 crc kubenswrapper[4558]: I0120 17:35:48.813068 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovn-northd-0"] Jan 20 17:35:48 crc kubenswrapper[4558]: I0120 17:35:48.813409 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ovn-northd-0" podUID="39600642-1418-4cb4-8617-f2ecc7089e0b" containerName="ovn-northd" containerID="cri-o://33b43d304994a43dcb87337ad0c6e08deaf4a2507647b3ba6f756482ec39e182" gracePeriod=30 Jan 20 17:35:48 crc kubenswrapper[4558]: I0120 17:35:48.813522 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ovn-northd-0" podUID="39600642-1418-4cb4-8617-f2ecc7089e0b" containerName="openstack-network-exporter" containerID="cri-o://838cfcc8e26bf3e11489fea7f7801117e2630d787be77ecdb9b5831a033f4a6c" gracePeriod=30 Jan 20 17:35:48 crc kubenswrapper[4558]: I0120 17:35:48.829140 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/barbican-keystone-listener-7d7dbcb65b-ttg5w"] Jan 20 17:35:48 crc kubenswrapper[4558]: E0120 17:35:48.829740 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b65a4359-8f00-46fd-acd3-02731c1e1389" containerName="barbican-api" Jan 20 17:35:48 crc kubenswrapper[4558]: I0120 17:35:48.829764 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="b65a4359-8f00-46fd-acd3-02731c1e1389" containerName="barbican-api" Jan 20 17:35:48 crc kubenswrapper[4558]: E0120 17:35:48.829785 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b65a4359-8f00-46fd-acd3-02731c1e1389" containerName="barbican-api-log" Jan 20 17:35:48 crc kubenswrapper[4558]: I0120 17:35:48.829793 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="b65a4359-8f00-46fd-acd3-02731c1e1389" containerName="barbican-api-log" Jan 20 17:35:48 crc kubenswrapper[4558]: I0120 17:35:48.830015 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="b65a4359-8f00-46fd-acd3-02731c1e1389" containerName="barbican-api" Jan 20 17:35:48 crc kubenswrapper[4558]: I0120 17:35:48.830043 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="b65a4359-8f00-46fd-acd3-02731c1e1389" containerName="barbican-api-log" Jan 20 17:35:48 crc kubenswrapper[4558]: I0120 17:35:48.831228 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-keystone-listener-7d7dbcb65b-ttg5w" Jan 20 17:35:48 crc kubenswrapper[4558]: I0120 17:35:48.848572 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/memcached-0"] Jan 20 17:35:48 crc kubenswrapper[4558]: I0120 17:35:48.848783 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/memcached-0" podUID="5391541f-5057-4d20-ae1e-bbc88e6b33a4" containerName="memcached" containerID="cri-o://4f3f925041f116a55af0c01739c0b06976097986bc12562c41828bd96d52c5f4" gracePeriod=30 Jan 20 17:35:48 crc kubenswrapper[4558]: I0120 17:35:48.858334 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-keystone-listener-7d7dbcb65b-ttg5w"] Jan 20 17:35:48 crc kubenswrapper[4558]: I0120 17:35:48.880892 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/barbican-worker-dd7c4cf57-h4b8r"] Jan 20 17:35:48 crc kubenswrapper[4558]: I0120 17:35:48.883424 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-worker-dd7c4cf57-h4b8r" Jan 20 17:35:48 crc kubenswrapper[4558]: I0120 17:35:48.891756 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4bb0e311-8438-4794-929d-791f0285afaf-config-data\") pod \"barbican-worker-dd7c4cf57-h4b8r\" (UID: \"4bb0e311-8438-4794-929d-791f0285afaf\") " pod="openstack-kuttl-tests/barbican-worker-dd7c4cf57-h4b8r" Jan 20 17:35:48 crc kubenswrapper[4558]: I0120 17:35:48.891868 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1ab0a776-6551-4ef8-bd41-0ef0e58d892d-combined-ca-bundle\") pod \"barbican-keystone-listener-7d7dbcb65b-ttg5w\" (UID: \"1ab0a776-6551-4ef8-bd41-0ef0e58d892d\") " pod="openstack-kuttl-tests/barbican-keystone-listener-7d7dbcb65b-ttg5w" Jan 20 17:35:48 crc kubenswrapper[4558]: I0120 17:35:48.891910 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1ab0a776-6551-4ef8-bd41-0ef0e58d892d-logs\") pod \"barbican-keystone-listener-7d7dbcb65b-ttg5w\" (UID: \"1ab0a776-6551-4ef8-bd41-0ef0e58d892d\") " pod="openstack-kuttl-tests/barbican-keystone-listener-7d7dbcb65b-ttg5w" Jan 20 17:35:48 crc kubenswrapper[4558]: I0120 17:35:48.891935 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1ab0a776-6551-4ef8-bd41-0ef0e58d892d-config-data\") pod \"barbican-keystone-listener-7d7dbcb65b-ttg5w\" (UID: \"1ab0a776-6551-4ef8-bd41-0ef0e58d892d\") " pod="openstack-kuttl-tests/barbican-keystone-listener-7d7dbcb65b-ttg5w" Jan 20 17:35:48 crc kubenswrapper[4558]: I0120 17:35:48.891953 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4bb0e311-8438-4794-929d-791f0285afaf-combined-ca-bundle\") pod \"barbican-worker-dd7c4cf57-h4b8r\" (UID: \"4bb0e311-8438-4794-929d-791f0285afaf\") " pod="openstack-kuttl-tests/barbican-worker-dd7c4cf57-h4b8r" Jan 20 17:35:48 crc kubenswrapper[4558]: I0120 17:35:48.891980 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v44jg\" (UniqueName: \"kubernetes.io/projected/1ab0a776-6551-4ef8-bd41-0ef0e58d892d-kube-api-access-v44jg\") pod \"barbican-keystone-listener-7d7dbcb65b-ttg5w\" (UID: \"1ab0a776-6551-4ef8-bd41-0ef0e58d892d\") " pod="openstack-kuttl-tests/barbican-keystone-listener-7d7dbcb65b-ttg5w" Jan 20 17:35:48 crc kubenswrapper[4558]: I0120 17:35:48.892001 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4bb0e311-8438-4794-929d-791f0285afaf-config-data-custom\") pod \"barbican-worker-dd7c4cf57-h4b8r\" (UID: \"4bb0e311-8438-4794-929d-791f0285afaf\") " pod="openstack-kuttl-tests/barbican-worker-dd7c4cf57-h4b8r" Jan 20 17:35:48 crc kubenswrapper[4558]: I0120 17:35:48.892042 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1ab0a776-6551-4ef8-bd41-0ef0e58d892d-config-data-custom\") pod \"barbican-keystone-listener-7d7dbcb65b-ttg5w\" (UID: \"1ab0a776-6551-4ef8-bd41-0ef0e58d892d\") " pod="openstack-kuttl-tests/barbican-keystone-listener-7d7dbcb65b-ttg5w" Jan 20 17:35:48 crc kubenswrapper[4558]: I0120 17:35:48.892073 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4bb0e311-8438-4794-929d-791f0285afaf-logs\") pod \"barbican-worker-dd7c4cf57-h4b8r\" (UID: \"4bb0e311-8438-4794-929d-791f0285afaf\") " pod="openstack-kuttl-tests/barbican-worker-dd7c4cf57-h4b8r" Jan 20 17:35:48 crc kubenswrapper[4558]: I0120 17:35:48.892100 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m86b4\" (UniqueName: \"kubernetes.io/projected/4bb0e311-8438-4794-929d-791f0285afaf-kube-api-access-m86b4\") pod \"barbican-worker-dd7c4cf57-h4b8r\" (UID: \"4bb0e311-8438-4794-929d-791f0285afaf\") " pod="openstack-kuttl-tests/barbican-worker-dd7c4cf57-h4b8r" Jan 20 17:35:48 crc kubenswrapper[4558]: I0120 17:35:48.892235 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-nb-0"] Jan 20 17:35:48 crc kubenswrapper[4558]: I0120 17:35:48.892636 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ovsdbserver-nb-0" podUID="e8a8d7c2-9fc7-48d9-bd28-2f0416b7c61c" containerName="openstack-network-exporter" containerID="cri-o://ea31e1aad28c22c0366c0c296a61f01044fbf0caf04c9f039be12136bde4291d" gracePeriod=300 Jan 20 17:35:48 crc kubenswrapper[4558]: I0120 17:35:48.961776 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-worker-dd7c4cf57-h4b8r"] Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:48.994198 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4bb0e311-8438-4794-929d-791f0285afaf-config-data\") pod \"barbican-worker-dd7c4cf57-h4b8r\" (UID: \"4bb0e311-8438-4794-929d-791f0285afaf\") " pod="openstack-kuttl-tests/barbican-worker-dd7c4cf57-h4b8r" Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:48.994381 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1ab0a776-6551-4ef8-bd41-0ef0e58d892d-combined-ca-bundle\") pod \"barbican-keystone-listener-7d7dbcb65b-ttg5w\" (UID: \"1ab0a776-6551-4ef8-bd41-0ef0e58d892d\") " pod="openstack-kuttl-tests/barbican-keystone-listener-7d7dbcb65b-ttg5w" Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:48.994463 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1ab0a776-6551-4ef8-bd41-0ef0e58d892d-logs\") pod \"barbican-keystone-listener-7d7dbcb65b-ttg5w\" (UID: \"1ab0a776-6551-4ef8-bd41-0ef0e58d892d\") " pod="openstack-kuttl-tests/barbican-keystone-listener-7d7dbcb65b-ttg5w" Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:48.994507 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1ab0a776-6551-4ef8-bd41-0ef0e58d892d-config-data\") pod \"barbican-keystone-listener-7d7dbcb65b-ttg5w\" (UID: \"1ab0a776-6551-4ef8-bd41-0ef0e58d892d\") " pod="openstack-kuttl-tests/barbican-keystone-listener-7d7dbcb65b-ttg5w" Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:48.994525 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4bb0e311-8438-4794-929d-791f0285afaf-combined-ca-bundle\") pod \"barbican-worker-dd7c4cf57-h4b8r\" (UID: \"4bb0e311-8438-4794-929d-791f0285afaf\") " pod="openstack-kuttl-tests/barbican-worker-dd7c4cf57-h4b8r" Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:48.994579 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v44jg\" (UniqueName: \"kubernetes.io/projected/1ab0a776-6551-4ef8-bd41-0ef0e58d892d-kube-api-access-v44jg\") pod \"barbican-keystone-listener-7d7dbcb65b-ttg5w\" (UID: \"1ab0a776-6551-4ef8-bd41-0ef0e58d892d\") " pod="openstack-kuttl-tests/barbican-keystone-listener-7d7dbcb65b-ttg5w" Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:48.994613 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4bb0e311-8438-4794-929d-791f0285afaf-config-data-custom\") pod \"barbican-worker-dd7c4cf57-h4b8r\" (UID: \"4bb0e311-8438-4794-929d-791f0285afaf\") " pod="openstack-kuttl-tests/barbican-worker-dd7c4cf57-h4b8r" Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:48.994691 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1ab0a776-6551-4ef8-bd41-0ef0e58d892d-config-data-custom\") pod \"barbican-keystone-listener-7d7dbcb65b-ttg5w\" (UID: \"1ab0a776-6551-4ef8-bd41-0ef0e58d892d\") " pod="openstack-kuttl-tests/barbican-keystone-listener-7d7dbcb65b-ttg5w" Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:48.994767 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4bb0e311-8438-4794-929d-791f0285afaf-logs\") pod \"barbican-worker-dd7c4cf57-h4b8r\" (UID: \"4bb0e311-8438-4794-929d-791f0285afaf\") " pod="openstack-kuttl-tests/barbican-worker-dd7c4cf57-h4b8r" Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:48.994819 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m86b4\" (UniqueName: \"kubernetes.io/projected/4bb0e311-8438-4794-929d-791f0285afaf-kube-api-access-m86b4\") pod \"barbican-worker-dd7c4cf57-h4b8r\" (UID: \"4bb0e311-8438-4794-929d-791f0285afaf\") " pod="openstack-kuttl-tests/barbican-worker-dd7c4cf57-h4b8r" Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.010967 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4bb0e311-8438-4794-929d-791f0285afaf-logs\") pod \"barbican-worker-dd7c4cf57-h4b8r\" (UID: \"4bb0e311-8438-4794-929d-791f0285afaf\") " pod="openstack-kuttl-tests/barbican-worker-dd7c4cf57-h4b8r" Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.013510 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1ab0a776-6551-4ef8-bd41-0ef0e58d892d-logs\") pod \"barbican-keystone-listener-7d7dbcb65b-ttg5w\" (UID: \"1ab0a776-6551-4ef8-bd41-0ef0e58d892d\") " pod="openstack-kuttl-tests/barbican-keystone-listener-7d7dbcb65b-ttg5w" Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.039467 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1ab0a776-6551-4ef8-bd41-0ef0e58d892d-config-data\") pod \"barbican-keystone-listener-7d7dbcb65b-ttg5w\" (UID: \"1ab0a776-6551-4ef8-bd41-0ef0e58d892d\") " pod="openstack-kuttl-tests/barbican-keystone-listener-7d7dbcb65b-ttg5w" Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.040916 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4bb0e311-8438-4794-929d-791f0285afaf-combined-ca-bundle\") pod \"barbican-worker-dd7c4cf57-h4b8r\" (UID: \"4bb0e311-8438-4794-929d-791f0285afaf\") " pod="openstack-kuttl-tests/barbican-worker-dd7c4cf57-h4b8r" Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.056710 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1ab0a776-6551-4ef8-bd41-0ef0e58d892d-config-data-custom\") pod \"barbican-keystone-listener-7d7dbcb65b-ttg5w\" (UID: \"1ab0a776-6551-4ef8-bd41-0ef0e58d892d\") " pod="openstack-kuttl-tests/barbican-keystone-listener-7d7dbcb65b-ttg5w" Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.056749 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1ab0a776-6551-4ef8-bd41-0ef0e58d892d-combined-ca-bundle\") pod \"barbican-keystone-listener-7d7dbcb65b-ttg5w\" (UID: \"1ab0a776-6551-4ef8-bd41-0ef0e58d892d\") " pod="openstack-kuttl-tests/barbican-keystone-listener-7d7dbcb65b-ttg5w" Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.057232 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4bb0e311-8438-4794-929d-791f0285afaf-config-data-custom\") pod \"barbican-worker-dd7c4cf57-h4b8r\" (UID: \"4bb0e311-8438-4794-929d-791f0285afaf\") " pod="openstack-kuttl-tests/barbican-worker-dd7c4cf57-h4b8r" Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.057288 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v44jg\" (UniqueName: \"kubernetes.io/projected/1ab0a776-6551-4ef8-bd41-0ef0e58d892d-kube-api-access-v44jg\") pod \"barbican-keystone-listener-7d7dbcb65b-ttg5w\" (UID: \"1ab0a776-6551-4ef8-bd41-0ef0e58d892d\") " pod="openstack-kuttl-tests/barbican-keystone-listener-7d7dbcb65b-ttg5w" Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.058067 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4bb0e311-8438-4794-929d-791f0285afaf-config-data\") pod \"barbican-worker-dd7c4cf57-h4b8r\" (UID: \"4bb0e311-8438-4794-929d-791f0285afaf\") " pod="openstack-kuttl-tests/barbican-worker-dd7c4cf57-h4b8r" Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.058363 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m86b4\" (UniqueName: \"kubernetes.io/projected/4bb0e311-8438-4794-929d-791f0285afaf-kube-api-access-m86b4\") pod \"barbican-worker-dd7c4cf57-h4b8r\" (UID: \"4bb0e311-8438-4794-929d-791f0285afaf\") " pod="openstack-kuttl-tests/barbican-worker-dd7c4cf57-h4b8r" Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.085469 4558 generic.go:334] "Generic (PLEG): container finished" podID="39600642-1418-4cb4-8617-f2ecc7089e0b" containerID="838cfcc8e26bf3e11489fea7f7801117e2630d787be77ecdb9b5831a033f4a6c" exitCode=2 Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.085586 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-northd-0" event={"ID":"39600642-1418-4cb4-8617-f2ecc7089e0b","Type":"ContainerDied","Data":"838cfcc8e26bf3e11489fea7f7801117e2630d787be77ecdb9b5831a033f4a6c"} Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.109422 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.110244 4558 generic.go:334] "Generic (PLEG): container finished" podID="b65a4359-8f00-46fd-acd3-02731c1e1389" containerID="e0ed3312930991cbf925c1b0e5d1e007a5bd9ddca399ef8faa0ede5c15dcce69" exitCode=0 Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.110371 4558 generic.go:334] "Generic (PLEG): container finished" podID="b65a4359-8f00-46fd-acd3-02731c1e1389" containerID="e83ed111fb3ee3dc0bd3a5d840dc90b5d39eefacfe46dbeaa76ace5b7cd2f7ba" exitCode=143 Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.110613 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" podUID="099193f1-dcf1-4c0a-beeb-fac5f0824cb8" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://c2621d3cab78726f6f01fe56a565e7406820ab18caa470c8455fa8fed99663ce" gracePeriod=30 Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.110891 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-5bd54998d4-vhnpd" Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.111154 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-5bd54998d4-vhnpd" event={"ID":"b65a4359-8f00-46fd-acd3-02731c1e1389","Type":"ContainerDied","Data":"e0ed3312930991cbf925c1b0e5d1e007a5bd9ddca399ef8faa0ede5c15dcce69"} Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.111253 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-5bd54998d4-vhnpd" event={"ID":"b65a4359-8f00-46fd-acd3-02731c1e1389","Type":"ContainerDied","Data":"e83ed111fb3ee3dc0bd3a5d840dc90b5d39eefacfe46dbeaa76ace5b7cd2f7ba"} Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.111329 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-5bd54998d4-vhnpd" event={"ID":"b65a4359-8f00-46fd-acd3-02731c1e1389","Type":"ContainerDied","Data":"e12a28a96c9d0ba545f44cb57fd8a67b5fc9fb1e38c66d21b4f72e12208b5c07"} Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.111392 4558 scope.go:117] "RemoveContainer" containerID="e0ed3312930991cbf925c1b0e5d1e007a5bd9ddca399ef8faa0ede5c15dcce69" Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.113692 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/barbican-api-f45698ff6-c9mtk" Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.138246 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.138521 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-external-api-0" podUID="c9ec2559-f7c5-4318-9a3b-4544d222ff8e" containerName="glance-log" containerID="cri-o://de49ed715fad547662149d1b7edd446a91ec26a644380970de0736db54838406" gracePeriod=30 Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.138937 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-external-api-0" podUID="c9ec2559-f7c5-4318-9a3b-4544d222ff8e" containerName="glance-httpd" containerID="cri-o://0b6ba9125bd1b63702f6dc81feb7c84047318daad92c337b8f2184e3028bcdfb" gracePeriod=30 Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.159808 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-keystone-listener-7d7dbcb65b-ttg5w" Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.187228 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.217118 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ovsdbserver-nb-0" podUID="e8a8d7c2-9fc7-48d9-bd28-2f0416b7c61c" containerName="ovsdbserver-nb" containerID="cri-o://b5bed1bf3813bd740cfee2d99ab02e9629a1b8a01adb60f10e7219ffeb492771" gracePeriod=300 Jan 20 17:35:49 crc kubenswrapper[4558]: E0120 17:35:49.217266 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="33b43d304994a43dcb87337ad0c6e08deaf4a2507647b3ba6f756482ec39e182" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.218032 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-metadata-0" podUID="a0122392-1c61-4133-b05f-cc6a622abaf9" containerName="nova-metadata-log" containerID="cri-o://d73d4daa0fc88cb61145408b21e4b824ef93c1b31996aa4f8630de0f2161615b" gracePeriod=30 Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.218210 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-metadata-0" podUID="a0122392-1c61-4133-b05f-cc6a622abaf9" containerName="nova-metadata-metadata" containerID="cri-o://c5868b91333e3f1c6356506d56295db65bbb6580ddcdb5dfaafeb5e8213164a5" gracePeriod=30 Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.224090 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-worker-dd7c4cf57-h4b8r" Jan 20 17:35:49 crc kubenswrapper[4558]: E0120 17:35:49.224802 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="33b43d304994a43dcb87337ad0c6e08deaf4a2507647b3ba6f756482ec39e182" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.253474 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-api-f45698ff6-c9mtk"] Jan 20 17:35:49 crc kubenswrapper[4558]: E0120 17:35:49.254431 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="33b43d304994a43dcb87337ad0c6e08deaf4a2507647b3ba6f756482ec39e182" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Jan 20 17:35:49 crc kubenswrapper[4558]: E0120 17:35:49.254503 4558 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack-kuttl-tests/ovn-northd-0" podUID="39600642-1418-4cb4-8617-f2ecc7089e0b" containerName="ovn-northd" Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.282345 4558 scope.go:117] "RemoveContainer" containerID="e83ed111fb3ee3dc0bd3a5d840dc90b5d39eefacfe46dbeaa76ace5b7cd2f7ba" Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.309273 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.309581 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/cinder-scheduler-0" podUID="4bd47eac-4090-4fc1-91c4-553ebd84964d" containerName="probe" containerID="cri-o://e1524456582814b161678d93ca35c12161b2ebbf4ff7b988cf13d97b61480ee9" gracePeriod=30 Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.310060 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/cinder-scheduler-0" podUID="4bd47eac-4090-4fc1-91c4-553ebd84964d" containerName="cinder-scheduler" containerID="cri-o://8fb64177ca582dd6f77f7548f90b08a99b652fb60686912cfa1ae24e3d6b7093" gracePeriod=30 Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.351867 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-0"] Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.352179 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-cell1-conductor-0" podUID="9b9c1cf7-0cbb-4bb3-a764-8a8fa446c8a8" containerName="nova-cell1-conductor-conductor" containerID="cri-o://f80866aad1b997b9edf8ab7c0962cd1436981eced3f24d965081cee0a0b92d9a" gracePeriod=30 Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.374017 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.374333 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-scheduler-0" podUID="833337d4-94d4-4bf6-9d0b-5791b9cc115e" containerName="nova-scheduler-scheduler" containerID="cri-o://b138c24e8875ddd5251012c3048e6574ab595b0a17f136458768b1a33c6227cb" gracePeriod=30 Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.395514 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/placement-5577476b98-wgg9z"] Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.395793 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/placement-5577476b98-wgg9z" podUID="06b8e2bb-a133-4a4b-92dd-12cf20ee4300" containerName="placement-log" containerID="cri-o://cb36bad8596228046899d22f8d26fc3bac1a3290c060da3fe4c6163f9ec94ebb" gracePeriod=30 Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.398756 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/placement-5577476b98-wgg9z" podUID="06b8e2bb-a133-4a4b-92dd-12cf20ee4300" containerName="placement-api" containerID="cri-o://1d3016d6f66a0a15d2f54946e32a1f1391b54b015baba44a9003218a3420e045" gracePeriod=30 Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.412637 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.412996 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-api-0" podUID="5a046f3a-bea5-4aea-8081-1c5994ecdbd9" containerName="nova-api-log" containerID="cri-o://ae2197c02d74d0b7fb6e5576d5e29ab4e2fe41416be7e0d896afd6625b8eed8a" gracePeriod=30 Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.413623 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-api-0" podUID="5a046f3a-bea5-4aea-8081-1c5994ecdbd9" containerName="nova-api-api" containerID="cri-o://9940485ddf968cc8ec26bafdb2ca664eaf066521e0b63efd3dafb4c251d88a6e" gracePeriod=30 Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.430620 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/barbican-api-7cccd8f896-m6rvs"] Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.432360 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-7cccd8f896-m6rvs" Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.460642 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-api-7cccd8f896-m6rvs"] Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.486972 4558 scope.go:117] "RemoveContainer" containerID="e0ed3312930991cbf925c1b0e5d1e007a5bd9ddca399ef8faa0ede5c15dcce69" Jan 20 17:35:49 crc kubenswrapper[4558]: E0120 17:35:49.501020 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e0ed3312930991cbf925c1b0e5d1e007a5bd9ddca399ef8faa0ede5c15dcce69\": container with ID starting with e0ed3312930991cbf925c1b0e5d1e007a5bd9ddca399ef8faa0ede5c15dcce69 not found: ID does not exist" containerID="e0ed3312930991cbf925c1b0e5d1e007a5bd9ddca399ef8faa0ede5c15dcce69" Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.501056 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e0ed3312930991cbf925c1b0e5d1e007a5bd9ddca399ef8faa0ede5c15dcce69"} err="failed to get container status \"e0ed3312930991cbf925c1b0e5d1e007a5bd9ddca399ef8faa0ede5c15dcce69\": rpc error: code = NotFound desc = could not find container \"e0ed3312930991cbf925c1b0e5d1e007a5bd9ddca399ef8faa0ede5c15dcce69\": container with ID starting with e0ed3312930991cbf925c1b0e5d1e007a5bd9ddca399ef8faa0ede5c15dcce69 not found: ID does not exist" Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.501078 4558 scope.go:117] "RemoveContainer" containerID="e83ed111fb3ee3dc0bd3a5d840dc90b5d39eefacfe46dbeaa76ace5b7cd2f7ba" Jan 20 17:35:49 crc kubenswrapper[4558]: E0120 17:35:49.502199 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e83ed111fb3ee3dc0bd3a5d840dc90b5d39eefacfe46dbeaa76ace5b7cd2f7ba\": container with ID starting with e83ed111fb3ee3dc0bd3a5d840dc90b5d39eefacfe46dbeaa76ace5b7cd2f7ba not found: ID does not exist" containerID="e83ed111fb3ee3dc0bd3a5d840dc90b5d39eefacfe46dbeaa76ace5b7cd2f7ba" Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.502232 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e83ed111fb3ee3dc0bd3a5d840dc90b5d39eefacfe46dbeaa76ace5b7cd2f7ba"} err="failed to get container status \"e83ed111fb3ee3dc0bd3a5d840dc90b5d39eefacfe46dbeaa76ace5b7cd2f7ba\": rpc error: code = NotFound desc = could not find container \"e83ed111fb3ee3dc0bd3a5d840dc90b5d39eefacfe46dbeaa76ace5b7cd2f7ba\": container with ID starting with e83ed111fb3ee3dc0bd3a5d840dc90b5d39eefacfe46dbeaa76ace5b7cd2f7ba not found: ID does not exist" Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.502468 4558 scope.go:117] "RemoveContainer" containerID="e0ed3312930991cbf925c1b0e5d1e007a5bd9ddca399ef8faa0ede5c15dcce69" Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.509141 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/openstack-galera-0"] Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.516392 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/placement-5577476b98-wgg9z" podUID="06b8e2bb-a133-4a4b-92dd-12cf20ee4300" containerName="placement-log" probeResult="failure" output="Get \"https://10.217.0.115:8778/\": EOF" Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.516682 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/placement-5577476b98-wgg9z" podUID="06b8e2bb-a133-4a4b-92dd-12cf20ee4300" containerName="placement-api" probeResult="failure" output="Get \"https://10.217.0.115:8778/\": EOF" Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.516873 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e0ed3312930991cbf925c1b0e5d1e007a5bd9ddca399ef8faa0ede5c15dcce69"} err="failed to get container status \"e0ed3312930991cbf925c1b0e5d1e007a5bd9ddca399ef8faa0ede5c15dcce69\": rpc error: code = NotFound desc = could not find container \"e0ed3312930991cbf925c1b0e5d1e007a5bd9ddca399ef8faa0ede5c15dcce69\": container with ID starting with e0ed3312930991cbf925c1b0e5d1e007a5bd9ddca399ef8faa0ede5c15dcce69 not found: ID does not exist" Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.516898 4558 scope.go:117] "RemoveContainer" containerID="e83ed111fb3ee3dc0bd3a5d840dc90b5d39eefacfe46dbeaa76ace5b7cd2f7ba" Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.517262 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e83ed111fb3ee3dc0bd3a5d840dc90b5d39eefacfe46dbeaa76ace5b7cd2f7ba"} err="failed to get container status \"e83ed111fb3ee3dc0bd3a5d840dc90b5d39eefacfe46dbeaa76ace5b7cd2f7ba\": rpc error: code = NotFound desc = could not find container \"e83ed111fb3ee3dc0bd3a5d840dc90b5d39eefacfe46dbeaa76ace5b7cd2f7ba\": container with ID starting with e83ed111fb3ee3dc0bd3a5d840dc90b5d39eefacfe46dbeaa76ace5b7cd2f7ba not found: ID does not exist" Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.528118 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/placement-67cbb6f96d-89sm2"] Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.531480 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-67cbb6f96d-89sm2" Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.546272 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c35b601d-7c76-46a4-9703-7a52ef1bbf73-combined-ca-bundle\") pod \"barbican-api-7cccd8f896-m6rvs\" (UID: \"c35b601d-7c76-46a4-9703-7a52ef1bbf73\") " pod="openstack-kuttl-tests/barbican-api-7cccd8f896-m6rvs" Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.546545 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c35b601d-7c76-46a4-9703-7a52ef1bbf73-public-tls-certs\") pod \"barbican-api-7cccd8f896-m6rvs\" (UID: \"c35b601d-7c76-46a4-9703-7a52ef1bbf73\") " pod="openstack-kuttl-tests/barbican-api-7cccd8f896-m6rvs" Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.548364 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c35b601d-7c76-46a4-9703-7a52ef1bbf73-config-data\") pod \"barbican-api-7cccd8f896-m6rvs\" (UID: \"c35b601d-7c76-46a4-9703-7a52ef1bbf73\") " pod="openstack-kuttl-tests/barbican-api-7cccd8f896-m6rvs" Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.548430 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c35b601d-7c76-46a4-9703-7a52ef1bbf73-config-data-custom\") pod \"barbican-api-7cccd8f896-m6rvs\" (UID: \"c35b601d-7c76-46a4-9703-7a52ef1bbf73\") " pod="openstack-kuttl-tests/barbican-api-7cccd8f896-m6rvs" Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.548481 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c35b601d-7c76-46a4-9703-7a52ef1bbf73-logs\") pod \"barbican-api-7cccd8f896-m6rvs\" (UID: \"c35b601d-7c76-46a4-9703-7a52ef1bbf73\") " pod="openstack-kuttl-tests/barbican-api-7cccd8f896-m6rvs" Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.548581 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c35b601d-7c76-46a4-9703-7a52ef1bbf73-internal-tls-certs\") pod \"barbican-api-7cccd8f896-m6rvs\" (UID: \"c35b601d-7c76-46a4-9703-7a52ef1bbf73\") " pod="openstack-kuttl-tests/barbican-api-7cccd8f896-m6rvs" Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.548810 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7ctxp\" (UniqueName: \"kubernetes.io/projected/c35b601d-7c76-46a4-9703-7a52ef1bbf73-kube-api-access-7ctxp\") pod \"barbican-api-7cccd8f896-m6rvs\" (UID: \"c35b601d-7c76-46a4-9703-7a52ef1bbf73\") " pod="openstack-kuttl-tests/barbican-api-7cccd8f896-m6rvs" Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.598630 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.599086 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-internal-api-0" podUID="91fac67e-69a9-4d88-9136-0d2484ca0dce" containerName="glance-log" containerID="cri-o://ff3f57bfd0fc845e414a7a24e7f078b2e8b3a550fdb80d3739b2e449bcf27a81" gracePeriod=30 Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.607041 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-internal-api-0" podUID="91fac67e-69a9-4d88-9136-0d2484ca0dce" containerName="glance-httpd" containerID="cri-o://61659b3cdbe7237e6c49c005bd8d5ec58e23a181bee16a952d93828bb6852edf" gracePeriod=30 Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.622274 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/openstack-cell1-galera-0"] Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.651757 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3d958c0e-f259-45a7-9c66-9d2b82c92980-scripts\") pod \"placement-67cbb6f96d-89sm2\" (UID: \"3d958c0e-f259-45a7-9c66-9d2b82c92980\") " pod="openstack-kuttl-tests/placement-67cbb6f96d-89sm2" Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.660286 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7ctxp\" (UniqueName: \"kubernetes.io/projected/c35b601d-7c76-46a4-9703-7a52ef1bbf73-kube-api-access-7ctxp\") pod \"barbican-api-7cccd8f896-m6rvs\" (UID: \"c35b601d-7c76-46a4-9703-7a52ef1bbf73\") " pod="openstack-kuttl-tests/barbican-api-7cccd8f896-m6rvs" Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.660349 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c35b601d-7c76-46a4-9703-7a52ef1bbf73-combined-ca-bundle\") pod \"barbican-api-7cccd8f896-m6rvs\" (UID: \"c35b601d-7c76-46a4-9703-7a52ef1bbf73\") " pod="openstack-kuttl-tests/barbican-api-7cccd8f896-m6rvs" Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.660427 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3d958c0e-f259-45a7-9c66-9d2b82c92980-public-tls-certs\") pod \"placement-67cbb6f96d-89sm2\" (UID: \"3d958c0e-f259-45a7-9c66-9d2b82c92980\") " pod="openstack-kuttl-tests/placement-67cbb6f96d-89sm2" Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.660495 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c35b601d-7c76-46a4-9703-7a52ef1bbf73-public-tls-certs\") pod \"barbican-api-7cccd8f896-m6rvs\" (UID: \"c35b601d-7c76-46a4-9703-7a52ef1bbf73\") " pod="openstack-kuttl-tests/barbican-api-7cccd8f896-m6rvs" Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.660558 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c35b601d-7c76-46a4-9703-7a52ef1bbf73-config-data\") pod \"barbican-api-7cccd8f896-m6rvs\" (UID: \"c35b601d-7c76-46a4-9703-7a52ef1bbf73\") " pod="openstack-kuttl-tests/barbican-api-7cccd8f896-m6rvs" Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.660595 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c35b601d-7c76-46a4-9703-7a52ef1bbf73-config-data-custom\") pod \"barbican-api-7cccd8f896-m6rvs\" (UID: \"c35b601d-7c76-46a4-9703-7a52ef1bbf73\") " pod="openstack-kuttl-tests/barbican-api-7cccd8f896-m6rvs" Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.660632 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c35b601d-7c76-46a4-9703-7a52ef1bbf73-logs\") pod \"barbican-api-7cccd8f896-m6rvs\" (UID: \"c35b601d-7c76-46a4-9703-7a52ef1bbf73\") " pod="openstack-kuttl-tests/barbican-api-7cccd8f896-m6rvs" Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.660690 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3d958c0e-f259-45a7-9c66-9d2b82c92980-logs\") pod \"placement-67cbb6f96d-89sm2\" (UID: \"3d958c0e-f259-45a7-9c66-9d2b82c92980\") " pod="openstack-kuttl-tests/placement-67cbb6f96d-89sm2" Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.660722 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3d958c0e-f259-45a7-9c66-9d2b82c92980-combined-ca-bundle\") pod \"placement-67cbb6f96d-89sm2\" (UID: \"3d958c0e-f259-45a7-9c66-9d2b82c92980\") " pod="openstack-kuttl-tests/placement-67cbb6f96d-89sm2" Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.660750 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c35b601d-7c76-46a4-9703-7a52ef1bbf73-internal-tls-certs\") pod \"barbican-api-7cccd8f896-m6rvs\" (UID: \"c35b601d-7c76-46a4-9703-7a52ef1bbf73\") " pod="openstack-kuttl-tests/barbican-api-7cccd8f896-m6rvs" Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.664115 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c35b601d-7c76-46a4-9703-7a52ef1bbf73-logs\") pod \"barbican-api-7cccd8f896-m6rvs\" (UID: \"c35b601d-7c76-46a4-9703-7a52ef1bbf73\") " pod="openstack-kuttl-tests/barbican-api-7cccd8f896-m6rvs" Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.665213 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-67cbb6f96d-89sm2"] Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.665410 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3d958c0e-f259-45a7-9c66-9d2b82c92980-config-data\") pod \"placement-67cbb6f96d-89sm2\" (UID: \"3d958c0e-f259-45a7-9c66-9d2b82c92980\") " pod="openstack-kuttl-tests/placement-67cbb6f96d-89sm2" Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.665527 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3d958c0e-f259-45a7-9c66-9d2b82c92980-internal-tls-certs\") pod \"placement-67cbb6f96d-89sm2\" (UID: \"3d958c0e-f259-45a7-9c66-9d2b82c92980\") " pod="openstack-kuttl-tests/placement-67cbb6f96d-89sm2" Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.665621 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fcxct\" (UniqueName: \"kubernetes.io/projected/3d958c0e-f259-45a7-9c66-9d2b82c92980-kube-api-access-fcxct\") pod \"placement-67cbb6f96d-89sm2\" (UID: \"3d958c0e-f259-45a7-9c66-9d2b82c92980\") " pod="openstack-kuttl-tests/placement-67cbb6f96d-89sm2" Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.680834 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c35b601d-7c76-46a4-9703-7a52ef1bbf73-internal-tls-certs\") pod \"barbican-api-7cccd8f896-m6rvs\" (UID: \"c35b601d-7c76-46a4-9703-7a52ef1bbf73\") " pod="openstack-kuttl-tests/barbican-api-7cccd8f896-m6rvs" Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.691466 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c35b601d-7c76-46a4-9703-7a52ef1bbf73-combined-ca-bundle\") pod \"barbican-api-7cccd8f896-m6rvs\" (UID: \"c35b601d-7c76-46a4-9703-7a52ef1bbf73\") " pod="openstack-kuttl-tests/barbican-api-7cccd8f896-m6rvs" Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.692790 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c35b601d-7c76-46a4-9703-7a52ef1bbf73-config-data\") pod \"barbican-api-7cccd8f896-m6rvs\" (UID: \"c35b601d-7c76-46a4-9703-7a52ef1bbf73\") " pod="openstack-kuttl-tests/barbican-api-7cccd8f896-m6rvs" Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.695716 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c35b601d-7c76-46a4-9703-7a52ef1bbf73-config-data-custom\") pod \"barbican-api-7cccd8f896-m6rvs\" (UID: \"c35b601d-7c76-46a4-9703-7a52ef1bbf73\") " pod="openstack-kuttl-tests/barbican-api-7cccd8f896-m6rvs" Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.698234 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.698562 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/cinder-api-0" podUID="e228514c-6d22-4527-b365-913e3ea3cfdb" containerName="cinder-api-log" containerID="cri-o://8ac7d16d0f78462693261b691f194857cebebf572d8c74e83499e3ff412581c0" gracePeriod=30 Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.699236 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/cinder-api-0" podUID="e228514c-6d22-4527-b365-913e3ea3cfdb" containerName="cinder-api" containerID="cri-o://53088ffac0cd61c3f88675b4eee842375a2868c6b2cbeba471fe06c8d7634d43" gracePeriod=30 Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.702199 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c35b601d-7c76-46a4-9703-7a52ef1bbf73-public-tls-certs\") pod \"barbican-api-7cccd8f896-m6rvs\" (UID: \"c35b601d-7c76-46a4-9703-7a52ef1bbf73\") " pod="openstack-kuttl-tests/barbican-api-7cccd8f896-m6rvs" Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.717120 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack-kuttl-tests/cinder-api-0" podUID="e228514c-6d22-4527-b365-913e3ea3cfdb" containerName="cinder-api" probeResult="failure" output="Get \"https://10.217.0.117:8776/healthcheck\": EOF" Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.720793 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7ctxp\" (UniqueName: \"kubernetes.io/projected/c35b601d-7c76-46a4-9703-7a52ef1bbf73-kube-api-access-7ctxp\") pod \"barbican-api-7cccd8f896-m6rvs\" (UID: \"c35b601d-7c76-46a4-9703-7a52ef1bbf73\") " pod="openstack-kuttl-tests/barbican-api-7cccd8f896-m6rvs" Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.727304 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-0"] Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.727589 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-cell0-conductor-0" podUID="f4d77869-9132-4170-b50d-88389a33c597" containerName="nova-cell0-conductor-conductor" containerID="cri-o://96c4e3a692bc0dac49e1c6d07e764ff13bc464c6bdc3c29e2e82a3d65c290531" gracePeriod=30 Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.744479 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-api-5bd54998d4-vhnpd"] Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.777389 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/barbican-api-5bd54998d4-vhnpd"] Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.782800 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3d958c0e-f259-45a7-9c66-9d2b82c92980-public-tls-certs\") pod \"placement-67cbb6f96d-89sm2\" (UID: \"3d958c0e-f259-45a7-9c66-9d2b82c92980\") " pod="openstack-kuttl-tests/placement-67cbb6f96d-89sm2" Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.783060 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3d958c0e-f259-45a7-9c66-9d2b82c92980-logs\") pod \"placement-67cbb6f96d-89sm2\" (UID: \"3d958c0e-f259-45a7-9c66-9d2b82c92980\") " pod="openstack-kuttl-tests/placement-67cbb6f96d-89sm2" Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.783095 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3d958c0e-f259-45a7-9c66-9d2b82c92980-combined-ca-bundle\") pod \"placement-67cbb6f96d-89sm2\" (UID: \"3d958c0e-f259-45a7-9c66-9d2b82c92980\") " pod="openstack-kuttl-tests/placement-67cbb6f96d-89sm2" Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.783190 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3d958c0e-f259-45a7-9c66-9d2b82c92980-config-data\") pod \"placement-67cbb6f96d-89sm2\" (UID: \"3d958c0e-f259-45a7-9c66-9d2b82c92980\") " pod="openstack-kuttl-tests/placement-67cbb6f96d-89sm2" Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.783217 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3d958c0e-f259-45a7-9c66-9d2b82c92980-internal-tls-certs\") pod \"placement-67cbb6f96d-89sm2\" (UID: \"3d958c0e-f259-45a7-9c66-9d2b82c92980\") " pod="openstack-kuttl-tests/placement-67cbb6f96d-89sm2" Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.783239 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fcxct\" (UniqueName: \"kubernetes.io/projected/3d958c0e-f259-45a7-9c66-9d2b82c92980-kube-api-access-fcxct\") pod \"placement-67cbb6f96d-89sm2\" (UID: \"3d958c0e-f259-45a7-9c66-9d2b82c92980\") " pod="openstack-kuttl-tests/placement-67cbb6f96d-89sm2" Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.783337 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3d958c0e-f259-45a7-9c66-9d2b82c92980-scripts\") pod \"placement-67cbb6f96d-89sm2\" (UID: \"3d958c0e-f259-45a7-9c66-9d2b82c92980\") " pod="openstack-kuttl-tests/placement-67cbb6f96d-89sm2" Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.786270 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3d958c0e-f259-45a7-9c66-9d2b82c92980-logs\") pod \"placement-67cbb6f96d-89sm2\" (UID: \"3d958c0e-f259-45a7-9c66-9d2b82c92980\") " pod="openstack-kuttl-tests/placement-67cbb6f96d-89sm2" Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.790610 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-6874444f77-w92bj"] Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.790856 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/keystone-6874444f77-w92bj" podUID="a4d26c7d-095d-4f3d-b6cc-c270961e9794" containerName="keystone-api" containerID="cri-o://964d0ad276f99dd9fa9d8522022840ffe7ce1002dc0bd50778cbc03decd4341c" gracePeriod=30 Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.792795 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3d958c0e-f259-45a7-9c66-9d2b82c92980-combined-ca-bundle\") pod \"placement-67cbb6f96d-89sm2\" (UID: \"3d958c0e-f259-45a7-9c66-9d2b82c92980\") " pod="openstack-kuttl-tests/placement-67cbb6f96d-89sm2" Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.793249 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3d958c0e-f259-45a7-9c66-9d2b82c92980-internal-tls-certs\") pod \"placement-67cbb6f96d-89sm2\" (UID: \"3d958c0e-f259-45a7-9c66-9d2b82c92980\") " pod="openstack-kuttl-tests/placement-67cbb6f96d-89sm2" Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.797491 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3d958c0e-f259-45a7-9c66-9d2b82c92980-config-data\") pod \"placement-67cbb6f96d-89sm2\" (UID: \"3d958c0e-f259-45a7-9c66-9d2b82c92980\") " pod="openstack-kuttl-tests/placement-67cbb6f96d-89sm2" Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.800683 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3d958c0e-f259-45a7-9c66-9d2b82c92980-scripts\") pod \"placement-67cbb6f96d-89sm2\" (UID: \"3d958c0e-f259-45a7-9c66-9d2b82c92980\") " pod="openstack-kuttl-tests/placement-67cbb6f96d-89sm2" Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.800746 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-6865f4c68b-s7dm7"] Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.801008 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/neutron-6865f4c68b-s7dm7" podUID="b6b873be-e034-4d85-b131-548eff8013ed" containerName="neutron-api" containerID="cri-o://ab9f91ee996ebed5befa70a8ec573471b2011a4fc0f88f09cde407f6a7347062" gracePeriod=30 Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.801734 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/neutron-6865f4c68b-s7dm7" podUID="b6b873be-e034-4d85-b131-548eff8013ed" containerName="neutron-httpd" containerID="cri-o://813d6d3e667f27b128b13d9ed3b5e2eb7fbd264b4a64cde9c0a7a2ffc3eabf31" gracePeriod=30 Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.810725 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-7cccd8f896-m6rvs" Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.812788 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3d958c0e-f259-45a7-9c66-9d2b82c92980-public-tls-certs\") pod \"placement-67cbb6f96d-89sm2\" (UID: \"3d958c0e-f259-45a7-9c66-9d2b82c92980\") " pod="openstack-kuttl-tests/placement-67cbb6f96d-89sm2" Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.814663 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-sb-0"] Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.815671 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ovsdbserver-sb-0" podUID="532ef149-f32a-4b6e-8d0c-458a04952d34" containerName="openstack-network-exporter" containerID="cri-o://93d74d768c827cf81147355c461a6f8bd023a18e684bd30dab48966103e6de7e" gracePeriod=300 Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.820343 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fcxct\" (UniqueName: \"kubernetes.io/projected/3d958c0e-f259-45a7-9c66-9d2b82c92980-kube-api-access-fcxct\") pod \"placement-67cbb6f96d-89sm2\" (UID: \"3d958c0e-f259-45a7-9c66-9d2b82c92980\") " pod="openstack-kuttl-tests/placement-67cbb6f96d-89sm2" Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.831235 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/keystone-6874444f77-w92bj" podUID="a4d26c7d-095d-4f3d-b6cc-c270961e9794" containerName="keystone-api" probeResult="failure" output="Get \"https://10.217.0.116:5000/v3\": EOF" Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.833797 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/keystone-77fd7f9ff9-zs7mz"] Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.835261 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-77fd7f9ff9-zs7mz" Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.840055 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-77fd7f9ff9-zs7mz"] Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.845385 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/neutron-7749bf6d58-dwpbl"] Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.869563 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-7749bf6d58-dwpbl" Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.886235 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-7749bf6d58-dwpbl"] Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.887856 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kdhw5\" (UniqueName: \"kubernetes.io/projected/9bca6361-b1d7-4953-b4bd-2bbda0f90581-kube-api-access-kdhw5\") pod \"neutron-7749bf6d58-dwpbl\" (UID: \"9bca6361-b1d7-4953-b4bd-2bbda0f90581\") " pod="openstack-kuttl-tests/neutron-7749bf6d58-dwpbl" Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.887883 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/89eb3a4c-0235-4583-ad0a-d015228d8d6b-public-tls-certs\") pod \"keystone-77fd7f9ff9-zs7mz\" (UID: \"89eb3a4c-0235-4583-ad0a-d015228d8d6b\") " pod="openstack-kuttl-tests/keystone-77fd7f9ff9-zs7mz" Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.887907 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9bca6361-b1d7-4953-b4bd-2bbda0f90581-combined-ca-bundle\") pod \"neutron-7749bf6d58-dwpbl\" (UID: \"9bca6361-b1d7-4953-b4bd-2bbda0f90581\") " pod="openstack-kuttl-tests/neutron-7749bf6d58-dwpbl" Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.887942 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/9bca6361-b1d7-4953-b4bd-2bbda0f90581-httpd-config\") pod \"neutron-7749bf6d58-dwpbl\" (UID: \"9bca6361-b1d7-4953-b4bd-2bbda0f90581\") " pod="openstack-kuttl-tests/neutron-7749bf6d58-dwpbl" Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.887959 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/89eb3a4c-0235-4583-ad0a-d015228d8d6b-credential-keys\") pod \"keystone-77fd7f9ff9-zs7mz\" (UID: \"89eb3a4c-0235-4583-ad0a-d015228d8d6b\") " pod="openstack-kuttl-tests/keystone-77fd7f9ff9-zs7mz" Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.887980 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9bca6361-b1d7-4953-b4bd-2bbda0f90581-internal-tls-certs\") pod \"neutron-7749bf6d58-dwpbl\" (UID: \"9bca6361-b1d7-4953-b4bd-2bbda0f90581\") " pod="openstack-kuttl-tests/neutron-7749bf6d58-dwpbl" Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.888005 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gbhsk\" (UniqueName: \"kubernetes.io/projected/89eb3a4c-0235-4583-ad0a-d015228d8d6b-kube-api-access-gbhsk\") pod \"keystone-77fd7f9ff9-zs7mz\" (UID: \"89eb3a4c-0235-4583-ad0a-d015228d8d6b\") " pod="openstack-kuttl-tests/keystone-77fd7f9ff9-zs7mz" Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.888027 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/89eb3a4c-0235-4583-ad0a-d015228d8d6b-config-data\") pod \"keystone-77fd7f9ff9-zs7mz\" (UID: \"89eb3a4c-0235-4583-ad0a-d015228d8d6b\") " pod="openstack-kuttl-tests/keystone-77fd7f9ff9-zs7mz" Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.888053 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/89eb3a4c-0235-4583-ad0a-d015228d8d6b-internal-tls-certs\") pod \"keystone-77fd7f9ff9-zs7mz\" (UID: \"89eb3a4c-0235-4583-ad0a-d015228d8d6b\") " pod="openstack-kuttl-tests/keystone-77fd7f9ff9-zs7mz" Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.888082 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/89eb3a4c-0235-4583-ad0a-d015228d8d6b-scripts\") pod \"keystone-77fd7f9ff9-zs7mz\" (UID: \"89eb3a4c-0235-4583-ad0a-d015228d8d6b\") " pod="openstack-kuttl-tests/keystone-77fd7f9ff9-zs7mz" Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.888099 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/9bca6361-b1d7-4953-b4bd-2bbda0f90581-config\") pod \"neutron-7749bf6d58-dwpbl\" (UID: \"9bca6361-b1d7-4953-b4bd-2bbda0f90581\") " pod="openstack-kuttl-tests/neutron-7749bf6d58-dwpbl" Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.888120 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/9bca6361-b1d7-4953-b4bd-2bbda0f90581-ovndb-tls-certs\") pod \"neutron-7749bf6d58-dwpbl\" (UID: \"9bca6361-b1d7-4953-b4bd-2bbda0f90581\") " pod="openstack-kuttl-tests/neutron-7749bf6d58-dwpbl" Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.888150 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9bca6361-b1d7-4953-b4bd-2bbda0f90581-public-tls-certs\") pod \"neutron-7749bf6d58-dwpbl\" (UID: \"9bca6361-b1d7-4953-b4bd-2bbda0f90581\") " pod="openstack-kuttl-tests/neutron-7749bf6d58-dwpbl" Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.888193 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/89eb3a4c-0235-4583-ad0a-d015228d8d6b-combined-ca-bundle\") pod \"keystone-77fd7f9ff9-zs7mz\" (UID: \"89eb3a4c-0235-4583-ad0a-d015228d8d6b\") " pod="openstack-kuttl-tests/keystone-77fd7f9ff9-zs7mz" Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.888213 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/89eb3a4c-0235-4583-ad0a-d015228d8d6b-fernet-keys\") pod \"keystone-77fd7f9ff9-zs7mz\" (UID: \"89eb3a4c-0235-4583-ad0a-d015228d8d6b\") " pod="openstack-kuttl-tests/keystone-77fd7f9ff9-zs7mz" Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.913100 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/neutron-6865f4c68b-s7dm7" podUID="b6b873be-e034-4d85-b131-548eff8013ed" containerName="neutron-httpd" probeResult="failure" output="Get \"https://10.217.0.124:9696/\": read tcp 10.217.0.2:33366->10.217.0.124:9696: read: connection reset by peer" Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.931094 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ovsdbserver-sb-0" podUID="532ef149-f32a-4b6e-8d0c-458a04952d34" containerName="ovsdbserver-sb" containerID="cri-o://7b466bcc44125b12b3728f5c2f0dd6ae9956b945bf663448fef7df702634964d" gracePeriod=300 Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.950829 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/openstack-galera-0" podUID="b1ce826a-c52c-42c6-8b67-6074b78c9fb7" containerName="galera" containerID="cri-o://1115d7fa22eeb3cf4d6e899963fd5f9d10ada47b94c34b2d601965731bba164c" gracePeriod=30 Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.973575 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-67cbb6f96d-89sm2" Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.989059 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/89eb3a4c-0235-4583-ad0a-d015228d8d6b-public-tls-certs\") pod \"keystone-77fd7f9ff9-zs7mz\" (UID: \"89eb3a4c-0235-4583-ad0a-d015228d8d6b\") " pod="openstack-kuttl-tests/keystone-77fd7f9ff9-zs7mz" Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.989101 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kdhw5\" (UniqueName: \"kubernetes.io/projected/9bca6361-b1d7-4953-b4bd-2bbda0f90581-kube-api-access-kdhw5\") pod \"neutron-7749bf6d58-dwpbl\" (UID: \"9bca6361-b1d7-4953-b4bd-2bbda0f90581\") " pod="openstack-kuttl-tests/neutron-7749bf6d58-dwpbl" Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.989125 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9bca6361-b1d7-4953-b4bd-2bbda0f90581-combined-ca-bundle\") pod \"neutron-7749bf6d58-dwpbl\" (UID: \"9bca6361-b1d7-4953-b4bd-2bbda0f90581\") " pod="openstack-kuttl-tests/neutron-7749bf6d58-dwpbl" Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.989149 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/9bca6361-b1d7-4953-b4bd-2bbda0f90581-httpd-config\") pod \"neutron-7749bf6d58-dwpbl\" (UID: \"9bca6361-b1d7-4953-b4bd-2bbda0f90581\") " pod="openstack-kuttl-tests/neutron-7749bf6d58-dwpbl" Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.989178 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/89eb3a4c-0235-4583-ad0a-d015228d8d6b-credential-keys\") pod \"keystone-77fd7f9ff9-zs7mz\" (UID: \"89eb3a4c-0235-4583-ad0a-d015228d8d6b\") " pod="openstack-kuttl-tests/keystone-77fd7f9ff9-zs7mz" Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.989198 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9bca6361-b1d7-4953-b4bd-2bbda0f90581-internal-tls-certs\") pod \"neutron-7749bf6d58-dwpbl\" (UID: \"9bca6361-b1d7-4953-b4bd-2bbda0f90581\") " pod="openstack-kuttl-tests/neutron-7749bf6d58-dwpbl" Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.989222 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gbhsk\" (UniqueName: \"kubernetes.io/projected/89eb3a4c-0235-4583-ad0a-d015228d8d6b-kube-api-access-gbhsk\") pod \"keystone-77fd7f9ff9-zs7mz\" (UID: \"89eb3a4c-0235-4583-ad0a-d015228d8d6b\") " pod="openstack-kuttl-tests/keystone-77fd7f9ff9-zs7mz" Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.989244 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/89eb3a4c-0235-4583-ad0a-d015228d8d6b-config-data\") pod \"keystone-77fd7f9ff9-zs7mz\" (UID: \"89eb3a4c-0235-4583-ad0a-d015228d8d6b\") " pod="openstack-kuttl-tests/keystone-77fd7f9ff9-zs7mz" Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.989266 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/89eb3a4c-0235-4583-ad0a-d015228d8d6b-internal-tls-certs\") pod \"keystone-77fd7f9ff9-zs7mz\" (UID: \"89eb3a4c-0235-4583-ad0a-d015228d8d6b\") " pod="openstack-kuttl-tests/keystone-77fd7f9ff9-zs7mz" Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.989289 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/89eb3a4c-0235-4583-ad0a-d015228d8d6b-scripts\") pod \"keystone-77fd7f9ff9-zs7mz\" (UID: \"89eb3a4c-0235-4583-ad0a-d015228d8d6b\") " pod="openstack-kuttl-tests/keystone-77fd7f9ff9-zs7mz" Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.989318 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/9bca6361-b1d7-4953-b4bd-2bbda0f90581-config\") pod \"neutron-7749bf6d58-dwpbl\" (UID: \"9bca6361-b1d7-4953-b4bd-2bbda0f90581\") " pod="openstack-kuttl-tests/neutron-7749bf6d58-dwpbl" Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.989336 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/9bca6361-b1d7-4953-b4bd-2bbda0f90581-ovndb-tls-certs\") pod \"neutron-7749bf6d58-dwpbl\" (UID: \"9bca6361-b1d7-4953-b4bd-2bbda0f90581\") " pod="openstack-kuttl-tests/neutron-7749bf6d58-dwpbl" Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.989367 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9bca6361-b1d7-4953-b4bd-2bbda0f90581-public-tls-certs\") pod \"neutron-7749bf6d58-dwpbl\" (UID: \"9bca6361-b1d7-4953-b4bd-2bbda0f90581\") " pod="openstack-kuttl-tests/neutron-7749bf6d58-dwpbl" Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.989393 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/89eb3a4c-0235-4583-ad0a-d015228d8d6b-combined-ca-bundle\") pod \"keystone-77fd7f9ff9-zs7mz\" (UID: \"89eb3a4c-0235-4583-ad0a-d015228d8d6b\") " pod="openstack-kuttl-tests/keystone-77fd7f9ff9-zs7mz" Jan 20 17:35:49 crc kubenswrapper[4558]: I0120 17:35:49.989411 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/89eb3a4c-0235-4583-ad0a-d015228d8d6b-fernet-keys\") pod \"keystone-77fd7f9ff9-zs7mz\" (UID: \"89eb3a4c-0235-4583-ad0a-d015228d8d6b\") " pod="openstack-kuttl-tests/keystone-77fd7f9ff9-zs7mz" Jan 20 17:35:50 crc kubenswrapper[4558]: I0120 17:35:50.003752 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/89eb3a4c-0235-4583-ad0a-d015228d8d6b-credential-keys\") pod \"keystone-77fd7f9ff9-zs7mz\" (UID: \"89eb3a4c-0235-4583-ad0a-d015228d8d6b\") " pod="openstack-kuttl-tests/keystone-77fd7f9ff9-zs7mz" Jan 20 17:35:50 crc kubenswrapper[4558]: I0120 17:35:50.003991 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/89eb3a4c-0235-4583-ad0a-d015228d8d6b-fernet-keys\") pod \"keystone-77fd7f9ff9-zs7mz\" (UID: \"89eb3a4c-0235-4583-ad0a-d015228d8d6b\") " pod="openstack-kuttl-tests/keystone-77fd7f9ff9-zs7mz" Jan 20 17:35:50 crc kubenswrapper[4558]: I0120 17:35:50.006886 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/9bca6361-b1d7-4953-b4bd-2bbda0f90581-config\") pod \"neutron-7749bf6d58-dwpbl\" (UID: \"9bca6361-b1d7-4953-b4bd-2bbda0f90581\") " pod="openstack-kuttl-tests/neutron-7749bf6d58-dwpbl" Jan 20 17:35:50 crc kubenswrapper[4558]: I0120 17:35:50.009122 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9bca6361-b1d7-4953-b4bd-2bbda0f90581-combined-ca-bundle\") pod \"neutron-7749bf6d58-dwpbl\" (UID: \"9bca6361-b1d7-4953-b4bd-2bbda0f90581\") " pod="openstack-kuttl-tests/neutron-7749bf6d58-dwpbl" Jan 20 17:35:50 crc kubenswrapper[4558]: I0120 17:35:50.016490 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/89eb3a4c-0235-4583-ad0a-d015228d8d6b-internal-tls-certs\") pod \"keystone-77fd7f9ff9-zs7mz\" (UID: \"89eb3a4c-0235-4583-ad0a-d015228d8d6b\") " pod="openstack-kuttl-tests/keystone-77fd7f9ff9-zs7mz" Jan 20 17:35:50 crc kubenswrapper[4558]: I0120 17:35:50.016945 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gbhsk\" (UniqueName: \"kubernetes.io/projected/89eb3a4c-0235-4583-ad0a-d015228d8d6b-kube-api-access-gbhsk\") pod \"keystone-77fd7f9ff9-zs7mz\" (UID: \"89eb3a4c-0235-4583-ad0a-d015228d8d6b\") " pod="openstack-kuttl-tests/keystone-77fd7f9ff9-zs7mz" Jan 20 17:35:50 crc kubenswrapper[4558]: I0120 17:35:50.018455 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/9bca6361-b1d7-4953-b4bd-2bbda0f90581-httpd-config\") pod \"neutron-7749bf6d58-dwpbl\" (UID: \"9bca6361-b1d7-4953-b4bd-2bbda0f90581\") " pod="openstack-kuttl-tests/neutron-7749bf6d58-dwpbl" Jan 20 17:35:50 crc kubenswrapper[4558]: I0120 17:35:50.020029 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9bca6361-b1d7-4953-b4bd-2bbda0f90581-public-tls-certs\") pod \"neutron-7749bf6d58-dwpbl\" (UID: \"9bca6361-b1d7-4953-b4bd-2bbda0f90581\") " pod="openstack-kuttl-tests/neutron-7749bf6d58-dwpbl" Jan 20 17:35:50 crc kubenswrapper[4558]: I0120 17:35:50.020303 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/89eb3a4c-0235-4583-ad0a-d015228d8d6b-scripts\") pod \"keystone-77fd7f9ff9-zs7mz\" (UID: \"89eb3a4c-0235-4583-ad0a-d015228d8d6b\") " pod="openstack-kuttl-tests/keystone-77fd7f9ff9-zs7mz" Jan 20 17:35:50 crc kubenswrapper[4558]: I0120 17:35:50.021314 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/89eb3a4c-0235-4583-ad0a-d015228d8d6b-public-tls-certs\") pod \"keystone-77fd7f9ff9-zs7mz\" (UID: \"89eb3a4c-0235-4583-ad0a-d015228d8d6b\") " pod="openstack-kuttl-tests/keystone-77fd7f9ff9-zs7mz" Jan 20 17:35:50 crc kubenswrapper[4558]: I0120 17:35:50.023004 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9bca6361-b1d7-4953-b4bd-2bbda0f90581-internal-tls-certs\") pod \"neutron-7749bf6d58-dwpbl\" (UID: \"9bca6361-b1d7-4953-b4bd-2bbda0f90581\") " pod="openstack-kuttl-tests/neutron-7749bf6d58-dwpbl" Jan 20 17:35:50 crc kubenswrapper[4558]: I0120 17:35:50.023316 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/9bca6361-b1d7-4953-b4bd-2bbda0f90581-ovndb-tls-certs\") pod \"neutron-7749bf6d58-dwpbl\" (UID: \"9bca6361-b1d7-4953-b4bd-2bbda0f90581\") " pod="openstack-kuttl-tests/neutron-7749bf6d58-dwpbl" Jan 20 17:35:50 crc kubenswrapper[4558]: I0120 17:35:50.036221 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/89eb3a4c-0235-4583-ad0a-d015228d8d6b-combined-ca-bundle\") pod \"keystone-77fd7f9ff9-zs7mz\" (UID: \"89eb3a4c-0235-4583-ad0a-d015228d8d6b\") " pod="openstack-kuttl-tests/keystone-77fd7f9ff9-zs7mz" Jan 20 17:35:50 crc kubenswrapper[4558]: I0120 17:35:50.037819 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/89eb3a4c-0235-4583-ad0a-d015228d8d6b-config-data\") pod \"keystone-77fd7f9ff9-zs7mz\" (UID: \"89eb3a4c-0235-4583-ad0a-d015228d8d6b\") " pod="openstack-kuttl-tests/keystone-77fd7f9ff9-zs7mz" Jan 20 17:35:50 crc kubenswrapper[4558]: I0120 17:35:50.039032 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kdhw5\" (UniqueName: \"kubernetes.io/projected/9bca6361-b1d7-4953-b4bd-2bbda0f90581-kube-api-access-kdhw5\") pod \"neutron-7749bf6d58-dwpbl\" (UID: \"9bca6361-b1d7-4953-b4bd-2bbda0f90581\") " pod="openstack-kuttl-tests/neutron-7749bf6d58-dwpbl" Jan 20 17:35:50 crc kubenswrapper[4558]: I0120 17:35:50.082788 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-worker-dd7c4cf57-h4b8r"] Jan 20 17:35:50 crc kubenswrapper[4558]: I0120 17:35:50.083373 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/openstack-cell1-galera-0" podUID="db6ef9f1-8a92-4c74-b476-a24b66585268" containerName="galera" containerID="cri-o://6500342a069cbdc40f29bb0f7540a921249de5ac706a7008df5f940aa1c75eb9" gracePeriod=30 Jan 20 17:35:50 crc kubenswrapper[4558]: I0120 17:35:50.154899 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"bbb20173-fee8-4323-afbb-f2bbbec3e978","Type":"ContainerStarted","Data":"3ff2458b3f3311a509b35bc6e23de18f9f9253176e2e50da406ead896867e39d"} Jan 20 17:35:50 crc kubenswrapper[4558]: I0120 17:35:50.156252 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:35:50 crc kubenswrapper[4558]: I0120 17:35:50.157737 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovsdbserver-sb-0_532ef149-f32a-4b6e-8d0c-458a04952d34/ovsdbserver-sb/0.log" Jan 20 17:35:50 crc kubenswrapper[4558]: I0120 17:35:50.157766 4558 generic.go:334] "Generic (PLEG): container finished" podID="532ef149-f32a-4b6e-8d0c-458a04952d34" containerID="93d74d768c827cf81147355c461a6f8bd023a18e684bd30dab48966103e6de7e" exitCode=2 Jan 20 17:35:50 crc kubenswrapper[4558]: I0120 17:35:50.157778 4558 generic.go:334] "Generic (PLEG): container finished" podID="532ef149-f32a-4b6e-8d0c-458a04952d34" containerID="7b466bcc44125b12b3728f5c2f0dd6ae9956b945bf663448fef7df702634964d" exitCode=143 Jan 20 17:35:50 crc kubenswrapper[4558]: I0120 17:35:50.157823 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-sb-0" event={"ID":"532ef149-f32a-4b6e-8d0c-458a04952d34","Type":"ContainerDied","Data":"93d74d768c827cf81147355c461a6f8bd023a18e684bd30dab48966103e6de7e"} Jan 20 17:35:50 crc kubenswrapper[4558]: I0120 17:35:50.157850 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-sb-0" event={"ID":"532ef149-f32a-4b6e-8d0c-458a04952d34","Type":"ContainerDied","Data":"7b466bcc44125b12b3728f5c2f0dd6ae9956b945bf663448fef7df702634964d"} Jan 20 17:35:50 crc kubenswrapper[4558]: I0120 17:35:50.175622 4558 generic.go:334] "Generic (PLEG): container finished" podID="06b8e2bb-a133-4a4b-92dd-12cf20ee4300" containerID="cb36bad8596228046899d22f8d26fc3bac1a3290c060da3fe4c6163f9ec94ebb" exitCode=143 Jan 20 17:35:50 crc kubenswrapper[4558]: I0120 17:35:50.175677 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-5577476b98-wgg9z" event={"ID":"06b8e2bb-a133-4a4b-92dd-12cf20ee4300","Type":"ContainerDied","Data":"cb36bad8596228046899d22f8d26fc3bac1a3290c060da3fe4c6163f9ec94ebb"} Jan 20 17:35:50 crc kubenswrapper[4558]: I0120 17:35:50.195336 4558 generic.go:334] "Generic (PLEG): container finished" podID="4bd47eac-4090-4fc1-91c4-553ebd84964d" containerID="e1524456582814b161678d93ca35c12161b2ebbf4ff7b988cf13d97b61480ee9" exitCode=0 Jan 20 17:35:50 crc kubenswrapper[4558]: I0120 17:35:50.195426 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"4bd47eac-4090-4fc1-91c4-553ebd84964d","Type":"ContainerDied","Data":"e1524456582814b161678d93ca35c12161b2ebbf4ff7b988cf13d97b61480ee9"} Jan 20 17:35:50 crc kubenswrapper[4558]: I0120 17:35:50.205641 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ceilometer-0" podStartSLOduration=3.373447665 podStartE2EDuration="7.205627415s" podCreationTimestamp="2026-01-20 17:35:43 +0000 UTC" firstStartedPulling="2026-01-20 17:35:44.823895735 +0000 UTC m=+3238.584233701" lastFinishedPulling="2026-01-20 17:35:48.656075484 +0000 UTC m=+3242.416413451" observedRunningTime="2026-01-20 17:35:50.197531402 +0000 UTC m=+3243.957869369" watchObservedRunningTime="2026-01-20 17:35:50.205627415 +0000 UTC m=+3243.965965381" Jan 20 17:35:50 crc kubenswrapper[4558]: I0120 17:35:50.217763 4558 generic.go:334] "Generic (PLEG): container finished" podID="099193f1-dcf1-4c0a-beeb-fac5f0824cb8" containerID="c2621d3cab78726f6f01fe56a565e7406820ab18caa470c8455fa8fed99663ce" exitCode=0 Jan 20 17:35:50 crc kubenswrapper[4558]: I0120 17:35:50.217824 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"099193f1-dcf1-4c0a-beeb-fac5f0824cb8","Type":"ContainerDied","Data":"c2621d3cab78726f6f01fe56a565e7406820ab18caa470c8455fa8fed99663ce"} Jan 20 17:35:50 crc kubenswrapper[4558]: E0120 17:35:50.232592 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="96c4e3a692bc0dac49e1c6d07e764ff13bc464c6bdc3c29e2e82a3d65c290531" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:35:50 crc kubenswrapper[4558]: I0120 17:35:50.232752 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:35:50 crc kubenswrapper[4558]: I0120 17:35:50.233018 4558 generic.go:334] "Generic (PLEG): container finished" podID="a0122392-1c61-4133-b05f-cc6a622abaf9" containerID="d73d4daa0fc88cb61145408b21e4b824ef93c1b31996aa4f8630de0f2161615b" exitCode=143 Jan 20 17:35:50 crc kubenswrapper[4558]: I0120 17:35:50.233054 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"a0122392-1c61-4133-b05f-cc6a622abaf9","Type":"ContainerDied","Data":"d73d4daa0fc88cb61145408b21e4b824ef93c1b31996aa4f8630de0f2161615b"} Jan 20 17:35:50 crc kubenswrapper[4558]: E0120 17:35:50.277564 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="96c4e3a692bc0dac49e1c6d07e764ff13bc464c6bdc3c29e2e82a3d65c290531" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:35:50 crc kubenswrapper[4558]: I0120 17:35:50.284401 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-77fd7f9ff9-zs7mz" Jan 20 17:35:50 crc kubenswrapper[4558]: I0120 17:35:50.310762 4558 generic.go:334] "Generic (PLEG): container finished" podID="e228514c-6d22-4527-b365-913e3ea3cfdb" containerID="8ac7d16d0f78462693261b691f194857cebebf572d8c74e83499e3ff412581c0" exitCode=143 Jan 20 17:35:50 crc kubenswrapper[4558]: I0120 17:35:50.310869 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"e228514c-6d22-4527-b365-913e3ea3cfdb","Type":"ContainerDied","Data":"8ac7d16d0f78462693261b691f194857cebebf572d8c74e83499e3ff412581c0"} Jan 20 17:35:50 crc kubenswrapper[4558]: I0120 17:35:50.326231 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-keystone-listener-7d7dbcb65b-ttg5w"] Jan 20 17:35:50 crc kubenswrapper[4558]: E0120 17:35:50.347293 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="96c4e3a692bc0dac49e1c6d07e764ff13bc464c6bdc3c29e2e82a3d65c290531" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:35:50 crc kubenswrapper[4558]: E0120 17:35:50.347348 4558 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack-kuttl-tests/nova-cell0-conductor-0" podUID="f4d77869-9132-4170-b50d-88389a33c597" containerName="nova-cell0-conductor-conductor" Jan 20 17:35:50 crc kubenswrapper[4558]: I0120 17:35:50.347504 4558 generic.go:334] "Generic (PLEG): container finished" podID="5391541f-5057-4d20-ae1e-bbc88e6b33a4" containerID="4f3f925041f116a55af0c01739c0b06976097986bc12562c41828bd96d52c5f4" exitCode=0 Jan 20 17:35:50 crc kubenswrapper[4558]: I0120 17:35:50.347554 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/memcached-0" event={"ID":"5391541f-5057-4d20-ae1e-bbc88e6b33a4","Type":"ContainerDied","Data":"4f3f925041f116a55af0c01739c0b06976097986bc12562c41828bd96d52c5f4"} Jan 20 17:35:50 crc kubenswrapper[4558]: I0120 17:35:50.390938 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-7749bf6d58-dwpbl" Jan 20 17:35:50 crc kubenswrapper[4558]: I0120 17:35:50.411230 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-keystone-listener-7d7dbcb65b-ttg5w"] Jan 20 17:35:50 crc kubenswrapper[4558]: I0120 17:35:50.455398 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-worker-dd7c4cf57-h4b8r"] Jan 20 17:35:50 crc kubenswrapper[4558]: I0120 17:35:50.511777 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/barbican-keystone-listener-6d4445bf46-57dqj"] Jan 20 17:35:50 crc kubenswrapper[4558]: I0120 17:35:50.513424 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-keystone-listener-6d4445bf46-57dqj" Jan 20 17:35:50 crc kubenswrapper[4558]: I0120 17:35:50.525877 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovsdbserver-nb-0_e8a8d7c2-9fc7-48d9-bd28-2f0416b7c61c/ovsdbserver-nb/0.log" Jan 20 17:35:50 crc kubenswrapper[4558]: I0120 17:35:50.525920 4558 generic.go:334] "Generic (PLEG): container finished" podID="e8a8d7c2-9fc7-48d9-bd28-2f0416b7c61c" containerID="ea31e1aad28c22c0366c0c296a61f01044fbf0caf04c9f039be12136bde4291d" exitCode=2 Jan 20 17:35:50 crc kubenswrapper[4558]: I0120 17:35:50.525965 4558 generic.go:334] "Generic (PLEG): container finished" podID="e8a8d7c2-9fc7-48d9-bd28-2f0416b7c61c" containerID="b5bed1bf3813bd740cfee2d99ab02e9629a1b8a01adb60f10e7219ffeb492771" exitCode=143 Jan 20 17:35:50 crc kubenswrapper[4558]: I0120 17:35:50.526089 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-nb-0" event={"ID":"e8a8d7c2-9fc7-48d9-bd28-2f0416b7c61c","Type":"ContainerDied","Data":"ea31e1aad28c22c0366c0c296a61f01044fbf0caf04c9f039be12136bde4291d"} Jan 20 17:35:50 crc kubenswrapper[4558]: I0120 17:35:50.526141 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-nb-0" event={"ID":"e8a8d7c2-9fc7-48d9-bd28-2f0416b7c61c","Type":"ContainerDied","Data":"b5bed1bf3813bd740cfee2d99ab02e9629a1b8a01adb60f10e7219ffeb492771"} Jan 20 17:35:50 crc kubenswrapper[4558]: I0120 17:35:50.563857 4558 generic.go:334] "Generic (PLEG): container finished" podID="b6b873be-e034-4d85-b131-548eff8013ed" containerID="813d6d3e667f27b128b13d9ed3b5e2eb7fbd264b4a64cde9c0a7a2ffc3eabf31" exitCode=0 Jan 20 17:35:50 crc kubenswrapper[4558]: I0120 17:35:50.563932 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-6865f4c68b-s7dm7" event={"ID":"b6b873be-e034-4d85-b131-548eff8013ed","Type":"ContainerDied","Data":"813d6d3e667f27b128b13d9ed3b5e2eb7fbd264b4a64cde9c0a7a2ffc3eabf31"} Jan 20 17:35:50 crc kubenswrapper[4558]: I0120 17:35:50.609434 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/11d1aa99-639c-451a-876e-59de6098e407-combined-ca-bundle\") pod \"barbican-keystone-listener-6d4445bf46-57dqj\" (UID: \"11d1aa99-639c-451a-876e-59de6098e407\") " pod="openstack-kuttl-tests/barbican-keystone-listener-6d4445bf46-57dqj" Jan 20 17:35:50 crc kubenswrapper[4558]: I0120 17:35:50.609499 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/11d1aa99-639c-451a-876e-59de6098e407-config-data\") pod \"barbican-keystone-listener-6d4445bf46-57dqj\" (UID: \"11d1aa99-639c-451a-876e-59de6098e407\") " pod="openstack-kuttl-tests/barbican-keystone-listener-6d4445bf46-57dqj" Jan 20 17:35:50 crc kubenswrapper[4558]: I0120 17:35:50.609538 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ztbb6\" (UniqueName: \"kubernetes.io/projected/11d1aa99-639c-451a-876e-59de6098e407-kube-api-access-ztbb6\") pod \"barbican-keystone-listener-6d4445bf46-57dqj\" (UID: \"11d1aa99-639c-451a-876e-59de6098e407\") " pod="openstack-kuttl-tests/barbican-keystone-listener-6d4445bf46-57dqj" Jan 20 17:35:50 crc kubenswrapper[4558]: I0120 17:35:50.609607 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/11d1aa99-639c-451a-876e-59de6098e407-logs\") pod \"barbican-keystone-listener-6d4445bf46-57dqj\" (UID: \"11d1aa99-639c-451a-876e-59de6098e407\") " pod="openstack-kuttl-tests/barbican-keystone-listener-6d4445bf46-57dqj" Jan 20 17:35:50 crc kubenswrapper[4558]: I0120 17:35:50.609683 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/11d1aa99-639c-451a-876e-59de6098e407-config-data-custom\") pod \"barbican-keystone-listener-6d4445bf46-57dqj\" (UID: \"11d1aa99-639c-451a-876e-59de6098e407\") " pod="openstack-kuttl-tests/barbican-keystone-listener-6d4445bf46-57dqj" Jan 20 17:35:50 crc kubenswrapper[4558]: I0120 17:35:50.638000 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b65a4359-8f00-46fd-acd3-02731c1e1389" path="/var/lib/kubelet/pods/b65a4359-8f00-46fd-acd3-02731c1e1389/volumes" Jan 20 17:35:50 crc kubenswrapper[4558]: I0120 17:35:50.638715 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-keystone-listener-6d4445bf46-57dqj"] Jan 20 17:35:50 crc kubenswrapper[4558]: I0120 17:35:50.638746 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/barbican-worker-dd549b649-grdrm"] Jan 20 17:35:50 crc kubenswrapper[4558]: I0120 17:35:50.654577 4558 generic.go:334] "Generic (PLEG): container finished" podID="c9ec2559-f7c5-4318-9a3b-4544d222ff8e" containerID="0b6ba9125bd1b63702f6dc81feb7c84047318daad92c337b8f2184e3028bcdfb" exitCode=0 Jan 20 17:35:50 crc kubenswrapper[4558]: I0120 17:35:50.654608 4558 generic.go:334] "Generic (PLEG): container finished" podID="c9ec2559-f7c5-4318-9a3b-4544d222ff8e" containerID="de49ed715fad547662149d1b7edd446a91ec26a644380970de0736db54838406" exitCode=143 Jan 20 17:35:50 crc kubenswrapper[4558]: I0120 17:35:50.671873 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"c9ec2559-f7c5-4318-9a3b-4544d222ff8e","Type":"ContainerDied","Data":"0b6ba9125bd1b63702f6dc81feb7c84047318daad92c337b8f2184e3028bcdfb"} Jan 20 17:35:50 crc kubenswrapper[4558]: I0120 17:35:50.671908 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"c9ec2559-f7c5-4318-9a3b-4544d222ff8e","Type":"ContainerDied","Data":"de49ed715fad547662149d1b7edd446a91ec26a644380970de0736db54838406"} Jan 20 17:35:50 crc kubenswrapper[4558]: I0120 17:35:50.671924 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-api-7cccd8f896-m6rvs"] Jan 20 17:35:50 crc kubenswrapper[4558]: I0120 17:35:50.672007 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-worker-dd549b649-grdrm" Jan 20 17:35:50 crc kubenswrapper[4558]: I0120 17:35:50.690404 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-worker-dd549b649-grdrm"] Jan 20 17:35:50 crc kubenswrapper[4558]: I0120 17:35:50.693408 4558 generic.go:334] "Generic (PLEG): container finished" podID="91fac67e-69a9-4d88-9136-0d2484ca0dce" containerID="ff3f57bfd0fc845e414a7a24e7f078b2e8b3a550fdb80d3739b2e449bcf27a81" exitCode=143 Jan 20 17:35:50 crc kubenswrapper[4558]: I0120 17:35:50.693524 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/placement-67cbb6f96d-89sm2"] Jan 20 17:35:50 crc kubenswrapper[4558]: I0120 17:35:50.693554 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"91fac67e-69a9-4d88-9136-0d2484ca0dce","Type":"ContainerDied","Data":"ff3f57bfd0fc845e414a7a24e7f078b2e8b3a550fdb80d3739b2e449bcf27a81"} Jan 20 17:35:50 crc kubenswrapper[4558]: I0120 17:35:50.714701 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/11d1aa99-639c-451a-876e-59de6098e407-logs\") pod \"barbican-keystone-listener-6d4445bf46-57dqj\" (UID: \"11d1aa99-639c-451a-876e-59de6098e407\") " pod="openstack-kuttl-tests/barbican-keystone-listener-6d4445bf46-57dqj" Jan 20 17:35:50 crc kubenswrapper[4558]: I0120 17:35:50.714807 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/11d1aa99-639c-451a-876e-59de6098e407-config-data-custom\") pod \"barbican-keystone-listener-6d4445bf46-57dqj\" (UID: \"11d1aa99-639c-451a-876e-59de6098e407\") " pod="openstack-kuttl-tests/barbican-keystone-listener-6d4445bf46-57dqj" Jan 20 17:35:50 crc kubenswrapper[4558]: I0120 17:35:50.715079 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/11d1aa99-639c-451a-876e-59de6098e407-combined-ca-bundle\") pod \"barbican-keystone-listener-6d4445bf46-57dqj\" (UID: \"11d1aa99-639c-451a-876e-59de6098e407\") " pod="openstack-kuttl-tests/barbican-keystone-listener-6d4445bf46-57dqj" Jan 20 17:35:50 crc kubenswrapper[4558]: I0120 17:35:50.715111 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/11d1aa99-639c-451a-876e-59de6098e407-config-data\") pod \"barbican-keystone-listener-6d4445bf46-57dqj\" (UID: \"11d1aa99-639c-451a-876e-59de6098e407\") " pod="openstack-kuttl-tests/barbican-keystone-listener-6d4445bf46-57dqj" Jan 20 17:35:50 crc kubenswrapper[4558]: I0120 17:35:50.715176 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ztbb6\" (UniqueName: \"kubernetes.io/projected/11d1aa99-639c-451a-876e-59de6098e407-kube-api-access-ztbb6\") pod \"barbican-keystone-listener-6d4445bf46-57dqj\" (UID: \"11d1aa99-639c-451a-876e-59de6098e407\") " pod="openstack-kuttl-tests/barbican-keystone-listener-6d4445bf46-57dqj" Jan 20 17:35:50 crc kubenswrapper[4558]: I0120 17:35:50.716541 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/11d1aa99-639c-451a-876e-59de6098e407-logs\") pod \"barbican-keystone-listener-6d4445bf46-57dqj\" (UID: \"11d1aa99-639c-451a-876e-59de6098e407\") " pod="openstack-kuttl-tests/barbican-keystone-listener-6d4445bf46-57dqj" Jan 20 17:35:50 crc kubenswrapper[4558]: I0120 17:35:50.719559 4558 generic.go:334] "Generic (PLEG): container finished" podID="5a046f3a-bea5-4aea-8081-1c5994ecdbd9" containerID="9940485ddf968cc8ec26bafdb2ca664eaf066521e0b63efd3dafb4c251d88a6e" exitCode=0 Jan 20 17:35:50 crc kubenswrapper[4558]: I0120 17:35:50.719576 4558 generic.go:334] "Generic (PLEG): container finished" podID="5a046f3a-bea5-4aea-8081-1c5994ecdbd9" containerID="ae2197c02d74d0b7fb6e5576d5e29ab4e2fe41416be7e0d896afd6625b8eed8a" exitCode=143 Jan 20 17:35:50 crc kubenswrapper[4558]: I0120 17:35:50.720418 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"5a046f3a-bea5-4aea-8081-1c5994ecdbd9","Type":"ContainerDied","Data":"9940485ddf968cc8ec26bafdb2ca664eaf066521e0b63efd3dafb4c251d88a6e"} Jan 20 17:35:50 crc kubenswrapper[4558]: I0120 17:35:50.720440 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"5a046f3a-bea5-4aea-8081-1c5994ecdbd9","Type":"ContainerDied","Data":"ae2197c02d74d0b7fb6e5576d5e29ab4e2fe41416be7e0d896afd6625b8eed8a"} Jan 20 17:35:50 crc kubenswrapper[4558]: I0120 17:35:50.734914 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/11d1aa99-639c-451a-876e-59de6098e407-config-data-custom\") pod \"barbican-keystone-listener-6d4445bf46-57dqj\" (UID: \"11d1aa99-639c-451a-876e-59de6098e407\") " pod="openstack-kuttl-tests/barbican-keystone-listener-6d4445bf46-57dqj" Jan 20 17:35:50 crc kubenswrapper[4558]: I0120 17:35:50.749011 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/11d1aa99-639c-451a-876e-59de6098e407-combined-ca-bundle\") pod \"barbican-keystone-listener-6d4445bf46-57dqj\" (UID: \"11d1aa99-639c-451a-876e-59de6098e407\") " pod="openstack-kuttl-tests/barbican-keystone-listener-6d4445bf46-57dqj" Jan 20 17:35:50 crc kubenswrapper[4558]: I0120 17:35:50.755196 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/memcached-0" podUID="5391541f-5057-4d20-ae1e-bbc88e6b33a4" containerName="memcached" probeResult="failure" output="dial tcp 10.217.0.113:11211: connect: connection refused" Jan 20 17:35:50 crc kubenswrapper[4558]: I0120 17:35:50.779607 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ztbb6\" (UniqueName: \"kubernetes.io/projected/11d1aa99-639c-451a-876e-59de6098e407-kube-api-access-ztbb6\") pod \"barbican-keystone-listener-6d4445bf46-57dqj\" (UID: \"11d1aa99-639c-451a-876e-59de6098e407\") " pod="openstack-kuttl-tests/barbican-keystone-listener-6d4445bf46-57dqj" Jan 20 17:35:50 crc kubenswrapper[4558]: I0120 17:35:50.779801 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/barbican-api-64bd4857c6-2s9h4"] Jan 20 17:35:50 crc kubenswrapper[4558]: I0120 17:35:50.782213 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/11d1aa99-639c-451a-876e-59de6098e407-config-data\") pod \"barbican-keystone-listener-6d4445bf46-57dqj\" (UID: \"11d1aa99-639c-451a-876e-59de6098e407\") " pod="openstack-kuttl-tests/barbican-keystone-listener-6d4445bf46-57dqj" Jan 20 17:35:50 crc kubenswrapper[4558]: I0120 17:35:50.810535 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-64bd4857c6-2s9h4" Jan 20 17:35:50 crc kubenswrapper[4558]: I0120 17:35:50.823111 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/640de3b3-60e1-41b8-ab00-22e375bad65c-config-data-custom\") pod \"barbican-worker-dd549b649-grdrm\" (UID: \"640de3b3-60e1-41b8-ab00-22e375bad65c\") " pod="openstack-kuttl-tests/barbican-worker-dd549b649-grdrm" Jan 20 17:35:50 crc kubenswrapper[4558]: I0120 17:35:50.823524 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/640de3b3-60e1-41b8-ab00-22e375bad65c-config-data\") pod \"barbican-worker-dd549b649-grdrm\" (UID: \"640de3b3-60e1-41b8-ab00-22e375bad65c\") " pod="openstack-kuttl-tests/barbican-worker-dd549b649-grdrm" Jan 20 17:35:50 crc kubenswrapper[4558]: I0120 17:35:50.823610 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/640de3b3-60e1-41b8-ab00-22e375bad65c-combined-ca-bundle\") pod \"barbican-worker-dd549b649-grdrm\" (UID: \"640de3b3-60e1-41b8-ab00-22e375bad65c\") " pod="openstack-kuttl-tests/barbican-worker-dd549b649-grdrm" Jan 20 17:35:50 crc kubenswrapper[4558]: I0120 17:35:50.823867 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k7grk\" (UniqueName: \"kubernetes.io/projected/640de3b3-60e1-41b8-ab00-22e375bad65c-kube-api-access-k7grk\") pod \"barbican-worker-dd549b649-grdrm\" (UID: \"640de3b3-60e1-41b8-ab00-22e375bad65c\") " pod="openstack-kuttl-tests/barbican-worker-dd549b649-grdrm" Jan 20 17:35:50 crc kubenswrapper[4558]: I0120 17:35:50.824027 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/640de3b3-60e1-41b8-ab00-22e375bad65c-logs\") pod \"barbican-worker-dd549b649-grdrm\" (UID: \"640de3b3-60e1-41b8-ab00-22e375bad65c\") " pod="openstack-kuttl-tests/barbican-worker-dd549b649-grdrm" Jan 20 17:35:50 crc kubenswrapper[4558]: I0120 17:35:50.871772 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-api-64bd4857c6-2s9h4"] Jan 20 17:35:50 crc kubenswrapper[4558]: I0120 17:35:50.893744 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-keystone-listener-6d4445bf46-57dqj" Jan 20 17:35:50 crc kubenswrapper[4558]: I0120 17:35:50.935124 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/placement-55dfc8964d-kck7c"] Jan 20 17:35:50 crc kubenswrapper[4558]: I0120 17:35:50.943908 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-55dfc8964d-kck7c" Jan 20 17:35:50 crc kubenswrapper[4558]: I0120 17:35:50.946614 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x6ksk\" (UniqueName: \"kubernetes.io/projected/94062d42-28cb-4c8a-afa4-f51458dedc6c-kube-api-access-x6ksk\") pod \"barbican-api-64bd4857c6-2s9h4\" (UID: \"94062d42-28cb-4c8a-afa4-f51458dedc6c\") " pod="openstack-kuttl-tests/barbican-api-64bd4857c6-2s9h4" Jan 20 17:35:50 crc kubenswrapper[4558]: I0120 17:35:50.947350 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94062d42-28cb-4c8a-afa4-f51458dedc6c-combined-ca-bundle\") pod \"barbican-api-64bd4857c6-2s9h4\" (UID: \"94062d42-28cb-4c8a-afa4-f51458dedc6c\") " pod="openstack-kuttl-tests/barbican-api-64bd4857c6-2s9h4" Jan 20 17:35:50 crc kubenswrapper[4558]: I0120 17:35:50.947411 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/640de3b3-60e1-41b8-ab00-22e375bad65c-logs\") pod \"barbican-worker-dd549b649-grdrm\" (UID: \"640de3b3-60e1-41b8-ab00-22e375bad65c\") " pod="openstack-kuttl-tests/barbican-worker-dd549b649-grdrm" Jan 20 17:35:50 crc kubenswrapper[4558]: I0120 17:35:50.947475 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/640de3b3-60e1-41b8-ab00-22e375bad65c-config-data-custom\") pod \"barbican-worker-dd549b649-grdrm\" (UID: \"640de3b3-60e1-41b8-ab00-22e375bad65c\") " pod="openstack-kuttl-tests/barbican-worker-dd549b649-grdrm" Jan 20 17:35:50 crc kubenswrapper[4558]: I0120 17:35:50.947537 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/94062d42-28cb-4c8a-afa4-f51458dedc6c-config-data-custom\") pod \"barbican-api-64bd4857c6-2s9h4\" (UID: \"94062d42-28cb-4c8a-afa4-f51458dedc6c\") " pod="openstack-kuttl-tests/barbican-api-64bd4857c6-2s9h4" Jan 20 17:35:50 crc kubenswrapper[4558]: I0120 17:35:50.947603 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/94062d42-28cb-4c8a-afa4-f51458dedc6c-logs\") pod \"barbican-api-64bd4857c6-2s9h4\" (UID: \"94062d42-28cb-4c8a-afa4-f51458dedc6c\") " pod="openstack-kuttl-tests/barbican-api-64bd4857c6-2s9h4" Jan 20 17:35:50 crc kubenswrapper[4558]: I0120 17:35:50.947700 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/94062d42-28cb-4c8a-afa4-f51458dedc6c-public-tls-certs\") pod \"barbican-api-64bd4857c6-2s9h4\" (UID: \"94062d42-28cb-4c8a-afa4-f51458dedc6c\") " pod="openstack-kuttl-tests/barbican-api-64bd4857c6-2s9h4" Jan 20 17:35:50 crc kubenswrapper[4558]: I0120 17:35:50.947768 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/94062d42-28cb-4c8a-afa4-f51458dedc6c-internal-tls-certs\") pod \"barbican-api-64bd4857c6-2s9h4\" (UID: \"94062d42-28cb-4c8a-afa4-f51458dedc6c\") " pod="openstack-kuttl-tests/barbican-api-64bd4857c6-2s9h4" Jan 20 17:35:50 crc kubenswrapper[4558]: I0120 17:35:50.947798 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/640de3b3-60e1-41b8-ab00-22e375bad65c-config-data\") pod \"barbican-worker-dd549b649-grdrm\" (UID: \"640de3b3-60e1-41b8-ab00-22e375bad65c\") " pod="openstack-kuttl-tests/barbican-worker-dd549b649-grdrm" Jan 20 17:35:50 crc kubenswrapper[4558]: I0120 17:35:50.947868 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/94062d42-28cb-4c8a-afa4-f51458dedc6c-config-data\") pod \"barbican-api-64bd4857c6-2s9h4\" (UID: \"94062d42-28cb-4c8a-afa4-f51458dedc6c\") " pod="openstack-kuttl-tests/barbican-api-64bd4857c6-2s9h4" Jan 20 17:35:50 crc kubenswrapper[4558]: I0120 17:35:50.947892 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/640de3b3-60e1-41b8-ab00-22e375bad65c-combined-ca-bundle\") pod \"barbican-worker-dd549b649-grdrm\" (UID: \"640de3b3-60e1-41b8-ab00-22e375bad65c\") " pod="openstack-kuttl-tests/barbican-worker-dd549b649-grdrm" Jan 20 17:35:50 crc kubenswrapper[4558]: I0120 17:35:50.948036 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k7grk\" (UniqueName: \"kubernetes.io/projected/640de3b3-60e1-41b8-ab00-22e375bad65c-kube-api-access-k7grk\") pod \"barbican-worker-dd549b649-grdrm\" (UID: \"640de3b3-60e1-41b8-ab00-22e375bad65c\") " pod="openstack-kuttl-tests/barbican-worker-dd549b649-grdrm" Jan 20 17:35:50 crc kubenswrapper[4558]: I0120 17:35:50.949292 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/640de3b3-60e1-41b8-ab00-22e375bad65c-logs\") pod \"barbican-worker-dd549b649-grdrm\" (UID: \"640de3b3-60e1-41b8-ab00-22e375bad65c\") " pod="openstack-kuttl-tests/barbican-worker-dd549b649-grdrm" Jan 20 17:35:50 crc kubenswrapper[4558]: I0120 17:35:50.955377 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/640de3b3-60e1-41b8-ab00-22e375bad65c-combined-ca-bundle\") pod \"barbican-worker-dd549b649-grdrm\" (UID: \"640de3b3-60e1-41b8-ab00-22e375bad65c\") " pod="openstack-kuttl-tests/barbican-worker-dd549b649-grdrm" Jan 20 17:35:50 crc kubenswrapper[4558]: E0120 17:35:50.969898 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="b138c24e8875ddd5251012c3048e6574ab595b0a17f136458768b1a33c6227cb" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 20 17:35:50 crc kubenswrapper[4558]: I0120 17:35:50.970416 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovsdbserver-nb-0_e8a8d7c2-9fc7-48d9-bd28-2f0416b7c61c/ovsdbserver-nb/0.log" Jan 20 17:35:50 crc kubenswrapper[4558]: I0120 17:35:50.970485 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:35:50 crc kubenswrapper[4558]: I0120 17:35:50.971397 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/640de3b3-60e1-41b8-ab00-22e375bad65c-config-data-custom\") pod \"barbican-worker-dd549b649-grdrm\" (UID: \"640de3b3-60e1-41b8-ab00-22e375bad65c\") " pod="openstack-kuttl-tests/barbican-worker-dd549b649-grdrm" Jan 20 17:35:51 crc kubenswrapper[4558]: E0120 17:35:51.025559 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="b138c24e8875ddd5251012c3048e6574ab595b0a17f136458768b1a33c6227cb" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.026701 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/640de3b3-60e1-41b8-ab00-22e375bad65c-config-data\") pod \"barbican-worker-dd549b649-grdrm\" (UID: \"640de3b3-60e1-41b8-ab00-22e375bad65c\") " pod="openstack-kuttl-tests/barbican-worker-dd549b649-grdrm" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.036896 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-55dfc8964d-kck7c"] Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.047948 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k7grk\" (UniqueName: \"kubernetes.io/projected/640de3b3-60e1-41b8-ab00-22e375bad65c-kube-api-access-k7grk\") pod \"barbican-worker-dd549b649-grdrm\" (UID: \"640de3b3-60e1-41b8-ab00-22e375bad65c\") " pod="openstack-kuttl-tests/barbican-worker-dd549b649-grdrm" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.059441 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b77441a-39f5-4ed5-bf1b-c29900900242-combined-ca-bundle\") pod \"placement-55dfc8964d-kck7c\" (UID: \"6b77441a-39f5-4ed5-bf1b-c29900900242\") " pod="openstack-kuttl-tests/placement-55dfc8964d-kck7c" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.059507 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/94062d42-28cb-4c8a-afa4-f51458dedc6c-config-data-custom\") pod \"barbican-api-64bd4857c6-2s9h4\" (UID: \"94062d42-28cb-4c8a-afa4-f51458dedc6c\") " pod="openstack-kuttl-tests/barbican-api-64bd4857c6-2s9h4" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.059591 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/94062d42-28cb-4c8a-afa4-f51458dedc6c-logs\") pod \"barbican-api-64bd4857c6-2s9h4\" (UID: \"94062d42-28cb-4c8a-afa4-f51458dedc6c\") " pod="openstack-kuttl-tests/barbican-api-64bd4857c6-2s9h4" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.059656 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b77441a-39f5-4ed5-bf1b-c29900900242-config-data\") pod \"placement-55dfc8964d-kck7c\" (UID: \"6b77441a-39f5-4ed5-bf1b-c29900900242\") " pod="openstack-kuttl-tests/placement-55dfc8964d-kck7c" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.059702 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/94062d42-28cb-4c8a-afa4-f51458dedc6c-public-tls-certs\") pod \"barbican-api-64bd4857c6-2s9h4\" (UID: \"94062d42-28cb-4c8a-afa4-f51458dedc6c\") " pod="openstack-kuttl-tests/barbican-api-64bd4857c6-2s9h4" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.059775 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6b77441a-39f5-4ed5-bf1b-c29900900242-internal-tls-certs\") pod \"placement-55dfc8964d-kck7c\" (UID: \"6b77441a-39f5-4ed5-bf1b-c29900900242\") " pod="openstack-kuttl-tests/placement-55dfc8964d-kck7c" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.059805 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6b77441a-39f5-4ed5-bf1b-c29900900242-public-tls-certs\") pod \"placement-55dfc8964d-kck7c\" (UID: \"6b77441a-39f5-4ed5-bf1b-c29900900242\") " pod="openstack-kuttl-tests/placement-55dfc8964d-kck7c" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.059829 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6b77441a-39f5-4ed5-bf1b-c29900900242-logs\") pod \"placement-55dfc8964d-kck7c\" (UID: \"6b77441a-39f5-4ed5-bf1b-c29900900242\") " pod="openstack-kuttl-tests/placement-55dfc8964d-kck7c" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.059854 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/94062d42-28cb-4c8a-afa4-f51458dedc6c-internal-tls-certs\") pod \"barbican-api-64bd4857c6-2s9h4\" (UID: \"94062d42-28cb-4c8a-afa4-f51458dedc6c\") " pod="openstack-kuttl-tests/barbican-api-64bd4857c6-2s9h4" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.060021 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/94062d42-28cb-4c8a-afa4-f51458dedc6c-config-data\") pod \"barbican-api-64bd4857c6-2s9h4\" (UID: \"94062d42-28cb-4c8a-afa4-f51458dedc6c\") " pod="openstack-kuttl-tests/barbican-api-64bd4857c6-2s9h4" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.060408 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x6ksk\" (UniqueName: \"kubernetes.io/projected/94062d42-28cb-4c8a-afa4-f51458dedc6c-kube-api-access-x6ksk\") pod \"barbican-api-64bd4857c6-2s9h4\" (UID: \"94062d42-28cb-4c8a-afa4-f51458dedc6c\") " pod="openstack-kuttl-tests/barbican-api-64bd4857c6-2s9h4" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.060462 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6b77441a-39f5-4ed5-bf1b-c29900900242-scripts\") pod \"placement-55dfc8964d-kck7c\" (UID: \"6b77441a-39f5-4ed5-bf1b-c29900900242\") " pod="openstack-kuttl-tests/placement-55dfc8964d-kck7c" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.060506 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94062d42-28cb-4c8a-afa4-f51458dedc6c-combined-ca-bundle\") pod \"barbican-api-64bd4857c6-2s9h4\" (UID: \"94062d42-28cb-4c8a-afa4-f51458dedc6c\") " pod="openstack-kuttl-tests/barbican-api-64bd4857c6-2s9h4" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.060581 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xxs87\" (UniqueName: \"kubernetes.io/projected/6b77441a-39f5-4ed5-bf1b-c29900900242-kube-api-access-xxs87\") pod \"placement-55dfc8964d-kck7c\" (UID: \"6b77441a-39f5-4ed5-bf1b-c29900900242\") " pod="openstack-kuttl-tests/placement-55dfc8964d-kck7c" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.070795 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/94062d42-28cb-4c8a-afa4-f51458dedc6c-logs\") pod \"barbican-api-64bd4857c6-2s9h4\" (UID: \"94062d42-28cb-4c8a-afa4-f51458dedc6c\") " pod="openstack-kuttl-tests/barbican-api-64bd4857c6-2s9h4" Jan 20 17:35:51 crc kubenswrapper[4558]: E0120 17:35:51.077298 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="b138c24e8875ddd5251012c3048e6574ab595b0a17f136458768b1a33c6227cb" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 20 17:35:51 crc kubenswrapper[4558]: E0120 17:35:51.077401 4558 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack-kuttl-tests/nova-scheduler-0" podUID="833337d4-94d4-4bf6-9d0b-5791b9cc115e" containerName="nova-scheduler-scheduler" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.141489 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-77fd7f9ff9-zs7mz"] Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.172319 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/e8a8d7c2-9fc7-48d9-bd28-2f0416b7c61c-metrics-certs-tls-certs\") pod \"e8a8d7c2-9fc7-48d9-bd28-2f0416b7c61c\" (UID: \"e8a8d7c2-9fc7-48d9-bd28-2f0416b7c61c\") " Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.172471 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e8a8d7c2-9fc7-48d9-bd28-2f0416b7c61c-scripts\") pod \"e8a8d7c2-9fc7-48d9-bd28-2f0416b7c61c\" (UID: \"e8a8d7c2-9fc7-48d9-bd28-2f0416b7c61c\") " Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.172570 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/e8a8d7c2-9fc7-48d9-bd28-2f0416b7c61c-ovsdbserver-nb-tls-certs\") pod \"e8a8d7c2-9fc7-48d9-bd28-2f0416b7c61c\" (UID: \"e8a8d7c2-9fc7-48d9-bd28-2f0416b7c61c\") " Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.172706 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nv6cc\" (UniqueName: \"kubernetes.io/projected/e8a8d7c2-9fc7-48d9-bd28-2f0416b7c61c-kube-api-access-nv6cc\") pod \"e8a8d7c2-9fc7-48d9-bd28-2f0416b7c61c\" (UID: \"e8a8d7c2-9fc7-48d9-bd28-2f0416b7c61c\") " Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.172807 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e8a8d7c2-9fc7-48d9-bd28-2f0416b7c61c-combined-ca-bundle\") pod \"e8a8d7c2-9fc7-48d9-bd28-2f0416b7c61c\" (UID: \"e8a8d7c2-9fc7-48d9-bd28-2f0416b7c61c\") " Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.172921 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e8a8d7c2-9fc7-48d9-bd28-2f0416b7c61c-config\") pod \"e8a8d7c2-9fc7-48d9-bd28-2f0416b7c61c\" (UID: \"e8a8d7c2-9fc7-48d9-bd28-2f0416b7c61c\") " Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.173012 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndbcluster-nb-etc-ovn\" (UniqueName: \"kubernetes.io/local-volume/local-storage13-crc\") pod \"e8a8d7c2-9fc7-48d9-bd28-2f0416b7c61c\" (UID: \"e8a8d7c2-9fc7-48d9-bd28-2f0416b7c61c\") " Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.173092 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/e8a8d7c2-9fc7-48d9-bd28-2f0416b7c61c-ovsdb-rundir\") pod \"e8a8d7c2-9fc7-48d9-bd28-2f0416b7c61c\" (UID: \"e8a8d7c2-9fc7-48d9-bd28-2f0416b7c61c\") " Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.173851 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6b77441a-39f5-4ed5-bf1b-c29900900242-scripts\") pod \"placement-55dfc8964d-kck7c\" (UID: \"6b77441a-39f5-4ed5-bf1b-c29900900242\") " pod="openstack-kuttl-tests/placement-55dfc8964d-kck7c" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.173991 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xxs87\" (UniqueName: \"kubernetes.io/projected/6b77441a-39f5-4ed5-bf1b-c29900900242-kube-api-access-xxs87\") pod \"placement-55dfc8964d-kck7c\" (UID: \"6b77441a-39f5-4ed5-bf1b-c29900900242\") " pod="openstack-kuttl-tests/placement-55dfc8964d-kck7c" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.174081 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b77441a-39f5-4ed5-bf1b-c29900900242-combined-ca-bundle\") pod \"placement-55dfc8964d-kck7c\" (UID: \"6b77441a-39f5-4ed5-bf1b-c29900900242\") " pod="openstack-kuttl-tests/placement-55dfc8964d-kck7c" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.174231 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b77441a-39f5-4ed5-bf1b-c29900900242-config-data\") pod \"placement-55dfc8964d-kck7c\" (UID: \"6b77441a-39f5-4ed5-bf1b-c29900900242\") " pod="openstack-kuttl-tests/placement-55dfc8964d-kck7c" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.174334 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6b77441a-39f5-4ed5-bf1b-c29900900242-internal-tls-certs\") pod \"placement-55dfc8964d-kck7c\" (UID: \"6b77441a-39f5-4ed5-bf1b-c29900900242\") " pod="openstack-kuttl-tests/placement-55dfc8964d-kck7c" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.189061 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6b77441a-39f5-4ed5-bf1b-c29900900242-public-tls-certs\") pod \"placement-55dfc8964d-kck7c\" (UID: \"6b77441a-39f5-4ed5-bf1b-c29900900242\") " pod="openstack-kuttl-tests/placement-55dfc8964d-kck7c" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.189185 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6b77441a-39f5-4ed5-bf1b-c29900900242-logs\") pod \"placement-55dfc8964d-kck7c\" (UID: \"6b77441a-39f5-4ed5-bf1b-c29900900242\") " pod="openstack-kuttl-tests/placement-55dfc8964d-kck7c" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.189819 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e8a8d7c2-9fc7-48d9-bd28-2f0416b7c61c-scripts" (OuterVolumeSpecName: "scripts") pod "e8a8d7c2-9fc7-48d9-bd28-2f0416b7c61c" (UID: "e8a8d7c2-9fc7-48d9-bd28-2f0416b7c61c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.192876 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6b77441a-39f5-4ed5-bf1b-c29900900242-logs\") pod \"placement-55dfc8964d-kck7c\" (UID: \"6b77441a-39f5-4ed5-bf1b-c29900900242\") " pod="openstack-kuttl-tests/placement-55dfc8964d-kck7c" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.184674 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.195786 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e8a8d7c2-9fc7-48d9-bd28-2f0416b7c61c-config" (OuterVolumeSpecName: "config") pod "e8a8d7c2-9fc7-48d9-bd28-2f0416b7c61c" (UID: "e8a8d7c2-9fc7-48d9-bd28-2f0416b7c61c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.196331 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e8a8d7c2-9fc7-48d9-bd28-2f0416b7c61c-ovsdb-rundir" (OuterVolumeSpecName: "ovsdb-rundir") pod "e8a8d7c2-9fc7-48d9-bd28-2f0416b7c61c" (UID: "e8a8d7c2-9fc7-48d9-bd28-2f0416b7c61c"). InnerVolumeSpecName "ovsdb-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.226256 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-7749bf6d58-dwpbl"] Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.259672 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/94062d42-28cb-4c8a-afa4-f51458dedc6c-public-tls-certs\") pod \"barbican-api-64bd4857c6-2s9h4\" (UID: \"94062d42-28cb-4c8a-afa4-f51458dedc6c\") " pod="openstack-kuttl-tests/barbican-api-64bd4857c6-2s9h4" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.271814 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/94062d42-28cb-4c8a-afa4-f51458dedc6c-internal-tls-certs\") pod \"barbican-api-64bd4857c6-2s9h4\" (UID: \"94062d42-28cb-4c8a-afa4-f51458dedc6c\") " pod="openstack-kuttl-tests/barbican-api-64bd4857c6-2s9h4" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.276276 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/94062d42-28cb-4c8a-afa4-f51458dedc6c-config-data-custom\") pod \"barbican-api-64bd4857c6-2s9h4\" (UID: \"94062d42-28cb-4c8a-afa4-f51458dedc6c\") " pod="openstack-kuttl-tests/barbican-api-64bd4857c6-2s9h4" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.281327 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/keystone-59b8cc448d-b9wnn"] Jan 20 17:35:51 crc kubenswrapper[4558]: E0120 17:35:51.292697 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c9ec2559-f7c5-4318-9a3b-4544d222ff8e" containerName="glance-log" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.292730 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="c9ec2559-f7c5-4318-9a3b-4544d222ff8e" containerName="glance-log" Jan 20 17:35:51 crc kubenswrapper[4558]: E0120 17:35:51.292751 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e8a8d7c2-9fc7-48d9-bd28-2f0416b7c61c" containerName="ovsdbserver-nb" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.292757 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="e8a8d7c2-9fc7-48d9-bd28-2f0416b7c61c" containerName="ovsdbserver-nb" Jan 20 17:35:51 crc kubenswrapper[4558]: E0120 17:35:51.292766 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e8a8d7c2-9fc7-48d9-bd28-2f0416b7c61c" containerName="openstack-network-exporter" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.292771 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="e8a8d7c2-9fc7-48d9-bd28-2f0416b7c61c" containerName="openstack-network-exporter" Jan 20 17:35:51 crc kubenswrapper[4558]: E0120 17:35:51.292801 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c9ec2559-f7c5-4318-9a3b-4544d222ff8e" containerName="glance-httpd" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.292806 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="c9ec2559-f7c5-4318-9a3b-4544d222ff8e" containerName="glance-httpd" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.293064 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="e8a8d7c2-9fc7-48d9-bd28-2f0416b7c61c" containerName="openstack-network-exporter" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.293080 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="e8a8d7c2-9fc7-48d9-bd28-2f0416b7c61c" containerName="ovsdbserver-nb" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.293091 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="c9ec2559-f7c5-4318-9a3b-4544d222ff8e" containerName="glance-log" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.293102 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="c9ec2559-f7c5-4318-9a3b-4544d222ff8e" containerName="glance-httpd" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.293895 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-59b8cc448d-b9wnn" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.294127 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c9ec2559-f7c5-4318-9a3b-4544d222ff8e-scripts\") pod \"c9ec2559-f7c5-4318-9a3b-4544d222ff8e\" (UID: \"c9ec2559-f7c5-4318-9a3b-4544d222ff8e\") " Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.294404 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c9ec2559-f7c5-4318-9a3b-4544d222ff8e-public-tls-certs\") pod \"c9ec2559-f7c5-4318-9a3b-4544d222ff8e\" (UID: \"c9ec2559-f7c5-4318-9a3b-4544d222ff8e\") " Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.294425 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage16-crc\") pod \"c9ec2559-f7c5-4318-9a3b-4544d222ff8e\" (UID: \"c9ec2559-f7c5-4318-9a3b-4544d222ff8e\") " Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.294477 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2fspt\" (UniqueName: \"kubernetes.io/projected/c9ec2559-f7c5-4318-9a3b-4544d222ff8e-kube-api-access-2fspt\") pod \"c9ec2559-f7c5-4318-9a3b-4544d222ff8e\" (UID: \"c9ec2559-f7c5-4318-9a3b-4544d222ff8e\") " Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.294623 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c9ec2559-f7c5-4318-9a3b-4544d222ff8e-logs\") pod \"c9ec2559-f7c5-4318-9a3b-4544d222ff8e\" (UID: \"c9ec2559-f7c5-4318-9a3b-4544d222ff8e\") " Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.294671 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c9ec2559-f7c5-4318-9a3b-4544d222ff8e-config-data\") pod \"c9ec2559-f7c5-4318-9a3b-4544d222ff8e\" (UID: \"c9ec2559-f7c5-4318-9a3b-4544d222ff8e\") " Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.294706 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c9ec2559-f7c5-4318-9a3b-4544d222ff8e-httpd-run\") pod \"c9ec2559-f7c5-4318-9a3b-4544d222ff8e\" (UID: \"c9ec2559-f7c5-4318-9a3b-4544d222ff8e\") " Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.294731 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9ec2559-f7c5-4318-9a3b-4544d222ff8e-combined-ca-bundle\") pod \"c9ec2559-f7c5-4318-9a3b-4544d222ff8e\" (UID: \"c9ec2559-f7c5-4318-9a3b-4544d222ff8e\") " Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.295469 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c9ec2559-f7c5-4318-9a3b-4544d222ff8e-logs" (OuterVolumeSpecName: "logs") pod "c9ec2559-f7c5-4318-9a3b-4544d222ff8e" (UID: "c9ec2559-f7c5-4318-9a3b-4544d222ff8e"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.312220 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-59b8cc448d-b9wnn"] Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.312719 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xxs87\" (UniqueName: \"kubernetes.io/projected/6b77441a-39f5-4ed5-bf1b-c29900900242-kube-api-access-xxs87\") pod \"placement-55dfc8964d-kck7c\" (UID: \"6b77441a-39f5-4ed5-bf1b-c29900900242\") " pod="openstack-kuttl-tests/placement-55dfc8964d-kck7c" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.313382 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c9ec2559-f7c5-4318-9a3b-4544d222ff8e-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "c9ec2559-f7c5-4318-9a3b-4544d222ff8e" (UID: "c9ec2559-f7c5-4318-9a3b-4544d222ff8e"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.313484 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94062d42-28cb-4c8a-afa4-f51458dedc6c-combined-ca-bundle\") pod \"barbican-api-64bd4857c6-2s9h4\" (UID: \"94062d42-28cb-4c8a-afa4-f51458dedc6c\") " pod="openstack-kuttl-tests/barbican-api-64bd4857c6-2s9h4" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.314086 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/94062d42-28cb-4c8a-afa4-f51458dedc6c-config-data\") pod \"barbican-api-64bd4857c6-2s9h4\" (UID: \"94062d42-28cb-4c8a-afa4-f51458dedc6c\") " pod="openstack-kuttl-tests/barbican-api-64bd4857c6-2s9h4" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.314559 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x6ksk\" (UniqueName: \"kubernetes.io/projected/94062d42-28cb-4c8a-afa4-f51458dedc6c-kube-api-access-x6ksk\") pod \"barbican-api-64bd4857c6-2s9h4\" (UID: \"94062d42-28cb-4c8a-afa4-f51458dedc6c\") " pod="openstack-kuttl-tests/barbican-api-64bd4857c6-2s9h4" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.337080 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage16-crc" (OuterVolumeSpecName: "glance") pod "c9ec2559-f7c5-4318-9a3b-4544d222ff8e" (UID: "c9ec2559-f7c5-4318-9a3b-4544d222ff8e"). InnerVolumeSpecName "local-storage16-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.339308 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c9ec2559-f7c5-4318-9a3b-4544d222ff8e-scripts" (OuterVolumeSpecName: "scripts") pod "c9ec2559-f7c5-4318-9a3b-4544d222ff8e" (UID: "c9ec2559-f7c5-4318-9a3b-4544d222ff8e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.339672 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c9ec2559-f7c5-4318-9a3b-4544d222ff8e-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.339720 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e8a8d7c2-9fc7-48d9-bd28-2f0416b7c61c-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.339761 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage16-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage16-crc\") on node \"crc\" " Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.342282 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c9ec2559-f7c5-4318-9a3b-4544d222ff8e-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.342681 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e8a8d7c2-9fc7-48d9-bd28-2f0416b7c61c-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.343339 4558 reconciler_common.go:293] "Volume detached for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/e8a8d7c2-9fc7-48d9-bd28-2f0416b7c61c-ovsdb-rundir\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.343354 4558 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c9ec2559-f7c5-4318-9a3b-4544d222ff8e-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.343814 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-worker-dd549b649-grdrm" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.345580 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c9ec2559-f7c5-4318-9a3b-4544d222ff8e-kube-api-access-2fspt" (OuterVolumeSpecName: "kube-api-access-2fspt") pod "c9ec2559-f7c5-4318-9a3b-4544d222ff8e" (UID: "c9ec2559-f7c5-4318-9a3b-4544d222ff8e"). InnerVolumeSpecName "kube-api-access-2fspt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.437935 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/neutron-6d8f9956fd-j6jg4"] Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.447055 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/73789760-f9bd-44e0-bf9b-c1864e5f8803-fernet-keys\") pod \"keystone-59b8cc448d-b9wnn\" (UID: \"73789760-f9bd-44e0-bf9b-c1864e5f8803\") " pod="openstack-kuttl-tests/keystone-59b8cc448d-b9wnn" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.447118 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/73789760-f9bd-44e0-bf9b-c1864e5f8803-combined-ca-bundle\") pod \"keystone-59b8cc448d-b9wnn\" (UID: \"73789760-f9bd-44e0-bf9b-c1864e5f8803\") " pod="openstack-kuttl-tests/keystone-59b8cc448d-b9wnn" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.447241 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/73789760-f9bd-44e0-bf9b-c1864e5f8803-credential-keys\") pod \"keystone-59b8cc448d-b9wnn\" (UID: \"73789760-f9bd-44e0-bf9b-c1864e5f8803\") " pod="openstack-kuttl-tests/keystone-59b8cc448d-b9wnn" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.447272 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tdnlj\" (UniqueName: \"kubernetes.io/projected/73789760-f9bd-44e0-bf9b-c1864e5f8803-kube-api-access-tdnlj\") pod \"keystone-59b8cc448d-b9wnn\" (UID: \"73789760-f9bd-44e0-bf9b-c1864e5f8803\") " pod="openstack-kuttl-tests/keystone-59b8cc448d-b9wnn" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.447312 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/73789760-f9bd-44e0-bf9b-c1864e5f8803-config-data\") pod \"keystone-59b8cc448d-b9wnn\" (UID: \"73789760-f9bd-44e0-bf9b-c1864e5f8803\") " pod="openstack-kuttl-tests/keystone-59b8cc448d-b9wnn" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.447375 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/73789760-f9bd-44e0-bf9b-c1864e5f8803-internal-tls-certs\") pod \"keystone-59b8cc448d-b9wnn\" (UID: \"73789760-f9bd-44e0-bf9b-c1864e5f8803\") " pod="openstack-kuttl-tests/keystone-59b8cc448d-b9wnn" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.447393 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/73789760-f9bd-44e0-bf9b-c1864e5f8803-scripts\") pod \"keystone-59b8cc448d-b9wnn\" (UID: \"73789760-f9bd-44e0-bf9b-c1864e5f8803\") " pod="openstack-kuttl-tests/keystone-59b8cc448d-b9wnn" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.447430 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/73789760-f9bd-44e0-bf9b-c1864e5f8803-public-tls-certs\") pod \"keystone-59b8cc448d-b9wnn\" (UID: \"73789760-f9bd-44e0-bf9b-c1864e5f8803\") " pod="openstack-kuttl-tests/keystone-59b8cc448d-b9wnn" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.448502 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-6d8f9956fd-j6jg4" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.448600 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2fspt\" (UniqueName: \"kubernetes.io/projected/c9ec2559-f7c5-4318-9a3b-4544d222ff8e-kube-api-access-2fspt\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.462686 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b77441a-39f5-4ed5-bf1b-c29900900242-combined-ca-bundle\") pod \"placement-55dfc8964d-kck7c\" (UID: \"6b77441a-39f5-4ed5-bf1b-c29900900242\") " pod="openstack-kuttl-tests/placement-55dfc8964d-kck7c" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.462728 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6b77441a-39f5-4ed5-bf1b-c29900900242-scripts\") pod \"placement-55dfc8964d-kck7c\" (UID: \"6b77441a-39f5-4ed5-bf1b-c29900900242\") " pod="openstack-kuttl-tests/placement-55dfc8964d-kck7c" Jan 20 17:35:51 crc kubenswrapper[4558]: E0120 17:35:51.466601 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="f80866aad1b997b9edf8ab7c0962cd1436981eced3f24d965081cee0a0b92d9a" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:35:51 crc kubenswrapper[4558]: E0120 17:35:51.467821 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="f80866aad1b997b9edf8ab7c0962cd1436981eced3f24d965081cee0a0b92d9a" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:35:51 crc kubenswrapper[4558]: E0120 17:35:51.468977 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="f80866aad1b997b9edf8ab7c0962cd1436981eced3f24d965081cee0a0b92d9a" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:35:51 crc kubenswrapper[4558]: E0120 17:35:51.469837 4558 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack-kuttl-tests/nova-cell1-conductor-0" podUID="9b9c1cf7-0cbb-4bb3-a764-8a8fa446c8a8" containerName="nova-cell1-conductor-conductor" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.476234 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6b77441a-39f5-4ed5-bf1b-c29900900242-internal-tls-certs\") pod \"placement-55dfc8964d-kck7c\" (UID: \"6b77441a-39f5-4ed5-bf1b-c29900900242\") " pod="openstack-kuttl-tests/placement-55dfc8964d-kck7c" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.477232 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage13-crc" (OuterVolumeSpecName: "ovndbcluster-nb-etc-ovn") pod "e8a8d7c2-9fc7-48d9-bd28-2f0416b7c61c" (UID: "e8a8d7c2-9fc7-48d9-bd28-2f0416b7c61c"). InnerVolumeSpecName "local-storage13-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.480725 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6b77441a-39f5-4ed5-bf1b-c29900900242-public-tls-certs\") pod \"placement-55dfc8964d-kck7c\" (UID: \"6b77441a-39f5-4ed5-bf1b-c29900900242\") " pod="openstack-kuttl-tests/placement-55dfc8964d-kck7c" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.481305 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e8a8d7c2-9fc7-48d9-bd28-2f0416b7c61c-kube-api-access-nv6cc" (OuterVolumeSpecName: "kube-api-access-nv6cc") pod "e8a8d7c2-9fc7-48d9-bd28-2f0416b7c61c" (UID: "e8a8d7c2-9fc7-48d9-bd28-2f0416b7c61c"). InnerVolumeSpecName "kube-api-access-nv6cc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.481971 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b77441a-39f5-4ed5-bf1b-c29900900242-config-data\") pod \"placement-55dfc8964d-kck7c\" (UID: \"6b77441a-39f5-4ed5-bf1b-c29900900242\") " pod="openstack-kuttl-tests/placement-55dfc8964d-kck7c" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.482866 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-6d8f9956fd-j6jg4"] Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.498716 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-64bd4857c6-2s9h4" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.510313 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.553632 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hnmc5\" (UniqueName: \"kubernetes.io/projected/5b57aa66-3c71-422d-b029-86cb0e3a9aef-kube-api-access-hnmc5\") pod \"neutron-6d8f9956fd-j6jg4\" (UID: \"5b57aa66-3c71-422d-b029-86cb0e3a9aef\") " pod="openstack-kuttl-tests/neutron-6d8f9956fd-j6jg4" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.553699 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/73789760-f9bd-44e0-bf9b-c1864e5f8803-internal-tls-certs\") pod \"keystone-59b8cc448d-b9wnn\" (UID: \"73789760-f9bd-44e0-bf9b-c1864e5f8803\") " pod="openstack-kuttl-tests/keystone-59b8cc448d-b9wnn" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.553725 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/73789760-f9bd-44e0-bf9b-c1864e5f8803-scripts\") pod \"keystone-59b8cc448d-b9wnn\" (UID: \"73789760-f9bd-44e0-bf9b-c1864e5f8803\") " pod="openstack-kuttl-tests/keystone-59b8cc448d-b9wnn" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.553759 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5b57aa66-3c71-422d-b029-86cb0e3a9aef-combined-ca-bundle\") pod \"neutron-6d8f9956fd-j6jg4\" (UID: \"5b57aa66-3c71-422d-b029-86cb0e3a9aef\") " pod="openstack-kuttl-tests/neutron-6d8f9956fd-j6jg4" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.553787 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/73789760-f9bd-44e0-bf9b-c1864e5f8803-public-tls-certs\") pod \"keystone-59b8cc448d-b9wnn\" (UID: \"73789760-f9bd-44e0-bf9b-c1864e5f8803\") " pod="openstack-kuttl-tests/keystone-59b8cc448d-b9wnn" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.553812 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/5b57aa66-3c71-422d-b029-86cb0e3a9aef-httpd-config\") pod \"neutron-6d8f9956fd-j6jg4\" (UID: \"5b57aa66-3c71-422d-b029-86cb0e3a9aef\") " pod="openstack-kuttl-tests/neutron-6d8f9956fd-j6jg4" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.553866 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/73789760-f9bd-44e0-bf9b-c1864e5f8803-fernet-keys\") pod \"keystone-59b8cc448d-b9wnn\" (UID: \"73789760-f9bd-44e0-bf9b-c1864e5f8803\") " pod="openstack-kuttl-tests/keystone-59b8cc448d-b9wnn" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.553886 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/73789760-f9bd-44e0-bf9b-c1864e5f8803-combined-ca-bundle\") pod \"keystone-59b8cc448d-b9wnn\" (UID: \"73789760-f9bd-44e0-bf9b-c1864e5f8803\") " pod="openstack-kuttl-tests/keystone-59b8cc448d-b9wnn" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.553902 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/5b57aa66-3c71-422d-b029-86cb0e3a9aef-config\") pod \"neutron-6d8f9956fd-j6jg4\" (UID: \"5b57aa66-3c71-422d-b029-86cb0e3a9aef\") " pod="openstack-kuttl-tests/neutron-6d8f9956fd-j6jg4" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.553947 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5b57aa66-3c71-422d-b029-86cb0e3a9aef-public-tls-certs\") pod \"neutron-6d8f9956fd-j6jg4\" (UID: \"5b57aa66-3c71-422d-b029-86cb0e3a9aef\") " pod="openstack-kuttl-tests/neutron-6d8f9956fd-j6jg4" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.554047 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/73789760-f9bd-44e0-bf9b-c1864e5f8803-credential-keys\") pod \"keystone-59b8cc448d-b9wnn\" (UID: \"73789760-f9bd-44e0-bf9b-c1864e5f8803\") " pod="openstack-kuttl-tests/keystone-59b8cc448d-b9wnn" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.554080 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tdnlj\" (UniqueName: \"kubernetes.io/projected/73789760-f9bd-44e0-bf9b-c1864e5f8803-kube-api-access-tdnlj\") pod \"keystone-59b8cc448d-b9wnn\" (UID: \"73789760-f9bd-44e0-bf9b-c1864e5f8803\") " pod="openstack-kuttl-tests/keystone-59b8cc448d-b9wnn" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.554115 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/73789760-f9bd-44e0-bf9b-c1864e5f8803-config-data\") pod \"keystone-59b8cc448d-b9wnn\" (UID: \"73789760-f9bd-44e0-bf9b-c1864e5f8803\") " pod="openstack-kuttl-tests/keystone-59b8cc448d-b9wnn" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.554132 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5b57aa66-3c71-422d-b029-86cb0e3a9aef-internal-tls-certs\") pod \"neutron-6d8f9956fd-j6jg4\" (UID: \"5b57aa66-3c71-422d-b029-86cb0e3a9aef\") " pod="openstack-kuttl-tests/neutron-6d8f9956fd-j6jg4" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.554154 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/5b57aa66-3c71-422d-b029-86cb0e3a9aef-ovndb-tls-certs\") pod \"neutron-6d8f9956fd-j6jg4\" (UID: \"5b57aa66-3c71-422d-b029-86cb0e3a9aef\") " pod="openstack-kuttl-tests/neutron-6d8f9956fd-j6jg4" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.554528 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage13-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage13-crc\") on node \"crc\" " Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.554549 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nv6cc\" (UniqueName: \"kubernetes.io/projected/e8a8d7c2-9fc7-48d9-bd28-2f0416b7c61c-kube-api-access-nv6cc\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.575086 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c9ec2559-f7c5-4318-9a3b-4544d222ff8e-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "c9ec2559-f7c5-4318-9a3b-4544d222ff8e" (UID: "c9ec2559-f7c5-4318-9a3b-4544d222ff8e"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.624516 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/73789760-f9bd-44e0-bf9b-c1864e5f8803-credential-keys\") pod \"keystone-59b8cc448d-b9wnn\" (UID: \"73789760-f9bd-44e0-bf9b-c1864e5f8803\") " pod="openstack-kuttl-tests/keystone-59b8cc448d-b9wnn" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.627553 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/73789760-f9bd-44e0-bf9b-c1864e5f8803-scripts\") pod \"keystone-59b8cc448d-b9wnn\" (UID: \"73789760-f9bd-44e0-bf9b-c1864e5f8803\") " pod="openstack-kuttl-tests/keystone-59b8cc448d-b9wnn" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.628941 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/73789760-f9bd-44e0-bf9b-c1864e5f8803-combined-ca-bundle\") pod \"keystone-59b8cc448d-b9wnn\" (UID: \"73789760-f9bd-44e0-bf9b-c1864e5f8803\") " pod="openstack-kuttl-tests/keystone-59b8cc448d-b9wnn" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.638667 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/73789760-f9bd-44e0-bf9b-c1864e5f8803-internal-tls-certs\") pod \"keystone-59b8cc448d-b9wnn\" (UID: \"73789760-f9bd-44e0-bf9b-c1864e5f8803\") " pod="openstack-kuttl-tests/keystone-59b8cc448d-b9wnn" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.640413 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/73789760-f9bd-44e0-bf9b-c1864e5f8803-public-tls-certs\") pod \"keystone-59b8cc448d-b9wnn\" (UID: \"73789760-f9bd-44e0-bf9b-c1864e5f8803\") " pod="openstack-kuttl-tests/keystone-59b8cc448d-b9wnn" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.644285 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/73789760-f9bd-44e0-bf9b-c1864e5f8803-config-data\") pod \"keystone-59b8cc448d-b9wnn\" (UID: \"73789760-f9bd-44e0-bf9b-c1864e5f8803\") " pod="openstack-kuttl-tests/keystone-59b8cc448d-b9wnn" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.647952 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/73789760-f9bd-44e0-bf9b-c1864e5f8803-fernet-keys\") pod \"keystone-59b8cc448d-b9wnn\" (UID: \"73789760-f9bd-44e0-bf9b-c1864e5f8803\") " pod="openstack-kuttl-tests/keystone-59b8cc448d-b9wnn" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.664321 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5b57aa66-3c71-422d-b029-86cb0e3a9aef-internal-tls-certs\") pod \"neutron-6d8f9956fd-j6jg4\" (UID: \"5b57aa66-3c71-422d-b029-86cb0e3a9aef\") " pod="openstack-kuttl-tests/neutron-6d8f9956fd-j6jg4" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.664552 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/5b57aa66-3c71-422d-b029-86cb0e3a9aef-ovndb-tls-certs\") pod \"neutron-6d8f9956fd-j6jg4\" (UID: \"5b57aa66-3c71-422d-b029-86cb0e3a9aef\") " pod="openstack-kuttl-tests/neutron-6d8f9956fd-j6jg4" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.664592 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hnmc5\" (UniqueName: \"kubernetes.io/projected/5b57aa66-3c71-422d-b029-86cb0e3a9aef-kube-api-access-hnmc5\") pod \"neutron-6d8f9956fd-j6jg4\" (UID: \"5b57aa66-3c71-422d-b029-86cb0e3a9aef\") " pod="openstack-kuttl-tests/neutron-6d8f9956fd-j6jg4" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.664655 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5b57aa66-3c71-422d-b029-86cb0e3a9aef-combined-ca-bundle\") pod \"neutron-6d8f9956fd-j6jg4\" (UID: \"5b57aa66-3c71-422d-b029-86cb0e3a9aef\") " pod="openstack-kuttl-tests/neutron-6d8f9956fd-j6jg4" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.664695 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/5b57aa66-3c71-422d-b029-86cb0e3a9aef-httpd-config\") pod \"neutron-6d8f9956fd-j6jg4\" (UID: \"5b57aa66-3c71-422d-b029-86cb0e3a9aef\") " pod="openstack-kuttl-tests/neutron-6d8f9956fd-j6jg4" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.664743 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/5b57aa66-3c71-422d-b029-86cb0e3a9aef-config\") pod \"neutron-6d8f9956fd-j6jg4\" (UID: \"5b57aa66-3c71-422d-b029-86cb0e3a9aef\") " pod="openstack-kuttl-tests/neutron-6d8f9956fd-j6jg4" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.664794 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5b57aa66-3c71-422d-b029-86cb0e3a9aef-public-tls-certs\") pod \"neutron-6d8f9956fd-j6jg4\" (UID: \"5b57aa66-3c71-422d-b029-86cb0e3a9aef\") " pod="openstack-kuttl-tests/neutron-6d8f9956fd-j6jg4" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.664874 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c9ec2559-f7c5-4318-9a3b-4544d222ff8e-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.668897 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tdnlj\" (UniqueName: \"kubernetes.io/projected/73789760-f9bd-44e0-bf9b-c1864e5f8803-kube-api-access-tdnlj\") pod \"keystone-59b8cc448d-b9wnn\" (UID: \"73789760-f9bd-44e0-bf9b-c1864e5f8803\") " pod="openstack-kuttl-tests/keystone-59b8cc448d-b9wnn" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.679566 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-55dfc8964d-kck7c" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.705373 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5b57aa66-3c71-422d-b029-86cb0e3a9aef-combined-ca-bundle\") pod \"neutron-6d8f9956fd-j6jg4\" (UID: \"5b57aa66-3c71-422d-b029-86cb0e3a9aef\") " pod="openstack-kuttl-tests/neutron-6d8f9956fd-j6jg4" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.712760 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hnmc5\" (UniqueName: \"kubernetes.io/projected/5b57aa66-3c71-422d-b029-86cb0e3a9aef-kube-api-access-hnmc5\") pod \"neutron-6d8f9956fd-j6jg4\" (UID: \"5b57aa66-3c71-422d-b029-86cb0e3a9aef\") " pod="openstack-kuttl-tests/neutron-6d8f9956fd-j6jg4" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.740627 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-api-7cccd8f896-m6rvs"] Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.747764 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5b57aa66-3c71-422d-b029-86cb0e3a9aef-public-tls-certs\") pod \"neutron-6d8f9956fd-j6jg4\" (UID: \"5b57aa66-3c71-422d-b029-86cb0e3a9aef\") " pod="openstack-kuttl-tests/neutron-6d8f9956fd-j6jg4" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.748288 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5b57aa66-3c71-422d-b029-86cb0e3a9aef-internal-tls-certs\") pod \"neutron-6d8f9956fd-j6jg4\" (UID: \"5b57aa66-3c71-422d-b029-86cb0e3a9aef\") " pod="openstack-kuttl-tests/neutron-6d8f9956fd-j6jg4" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.748699 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/5b57aa66-3c71-422d-b029-86cb0e3a9aef-ovndb-tls-certs\") pod \"neutron-6d8f9956fd-j6jg4\" (UID: \"5b57aa66-3c71-422d-b029-86cb0e3a9aef\") " pod="openstack-kuttl-tests/neutron-6d8f9956fd-j6jg4" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.761824 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/5b57aa66-3c71-422d-b029-86cb0e3a9aef-httpd-config\") pod \"neutron-6d8f9956fd-j6jg4\" (UID: \"5b57aa66-3c71-422d-b029-86cb0e3a9aef\") " pod="openstack-kuttl-tests/neutron-6d8f9956fd-j6jg4" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.786984 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/5b57aa66-3c71-422d-b029-86cb0e3a9aef-config\") pod \"neutron-6d8f9956fd-j6jg4\" (UID: \"5b57aa66-3c71-422d-b029-86cb0e3a9aef\") " pod="openstack-kuttl-tests/neutron-6d8f9956fd-j6jg4" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.793257 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage13-crc" (UniqueName: "kubernetes.io/local-volume/local-storage13-crc") on node "crc" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.842517 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.843422 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"c9ec2559-f7c5-4318-9a3b-4544d222ff8e","Type":"ContainerDied","Data":"93b40d8da0c4b87ba8085be29e8668598179c32876925320f6cb4f333502e422"} Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.843474 4558 scope.go:117] "RemoveContainer" containerID="0b6ba9125bd1b63702f6dc81feb7c84047318daad92c337b8f2184e3028bcdfb" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.852227 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-worker-dd7c4cf57-h4b8r" event={"ID":"4bb0e311-8438-4794-929d-791f0285afaf","Type":"ContainerStarted","Data":"35397d3a5d4d82dd46f53954cc007539946af832f780d8b2e57180fff3b5a662"} Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.852286 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c9ec2559-f7c5-4318-9a3b-4544d222ff8e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c9ec2559-f7c5-4318-9a3b-4544d222ff8e" (UID: "c9ec2559-f7c5-4318-9a3b-4544d222ff8e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.862637 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/memcached-0" event={"ID":"5391541f-5057-4d20-ae1e-bbc88e6b33a4","Type":"ContainerDied","Data":"7d13902c841d8bbee2f4f76a4ae23a2a6890b13d14ecfc5f4bf5f2aa27e2f967"} Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.862910 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7d13902c841d8bbee2f4f76a4ae23a2a6890b13d14ecfc5f4bf5f2aa27e2f967" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.865465 4558 generic.go:334] "Generic (PLEG): container finished" podID="91fac67e-69a9-4d88-9136-0d2484ca0dce" containerID="61659b3cdbe7237e6c49c005bd8d5ec58e23a181bee16a952d93828bb6852edf" exitCode=0 Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.871433 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"91fac67e-69a9-4d88-9136-0d2484ca0dce","Type":"ContainerDied","Data":"61659b3cdbe7237e6c49c005bd8d5ec58e23a181bee16a952d93828bb6852edf"} Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.874150 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage13-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage13-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.874193 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9ec2559-f7c5-4318-9a3b-4544d222ff8e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.888803 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"5a046f3a-bea5-4aea-8081-1c5994ecdbd9","Type":"ContainerDied","Data":"b1d2e1fd3bf44cc2f1cfad03805180162622dea0f9c59f40c177d95250384b8b"} Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.888838 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b1d2e1fd3bf44cc2f1cfad03805180162622dea0f9c59f40c177d95250384b8b" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.906283 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c9ec2559-f7c5-4318-9a3b-4544d222ff8e-config-data" (OuterVolumeSpecName: "config-data") pod "c9ec2559-f7c5-4318-9a3b-4544d222ff8e" (UID: "c9ec2559-f7c5-4318-9a3b-4544d222ff8e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.909565 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovsdbserver-nb-0_e8a8d7c2-9fc7-48d9-bd28-2f0416b7c61c/ovsdbserver-nb/0.log" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.909666 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-nb-0" event={"ID":"e8a8d7c2-9fc7-48d9-bd28-2f0416b7c61c","Type":"ContainerDied","Data":"e0768009b6e30ed978a8014343d3fbe81202632e1022bdbfd5c82eef6ba8b41a"} Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.909768 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:35:51 crc kubenswrapper[4558]: E0120 17:35:51.964083 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 7b466bcc44125b12b3728f5c2f0dd6ae9956b945bf663448fef7df702634964d is running failed: container process not found" containerID="7b466bcc44125b12b3728f5c2f0dd6ae9956b945bf663448fef7df702634964d" cmd=["/usr/bin/pidof","ovsdb-server"] Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.965468 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovsdbserver-sb-0_532ef149-f32a-4b6e-8d0c-458a04952d34/ovsdbserver-sb/0.log" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.965622 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-sb-0" event={"ID":"532ef149-f32a-4b6e-8d0c-458a04952d34","Type":"ContainerDied","Data":"c12a03e6473f4843b945a7bd727f639e663e7def6ac3ec56127de8b4b910d659"} Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.965725 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c12a03e6473f4843b945a7bd727f639e663e7def6ac3ec56127de8b4b910d659" Jan 20 17:35:51 crc kubenswrapper[4558]: E0120 17:35:51.965807 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 7b466bcc44125b12b3728f5c2f0dd6ae9956b945bf663448fef7df702634964d is running failed: container process not found" containerID="7b466bcc44125b12b3728f5c2f0dd6ae9956b945bf663448fef7df702634964d" cmd=["/usr/bin/pidof","ovsdb-server"] Jan 20 17:35:51 crc kubenswrapper[4558]: E0120 17:35:51.968664 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 7b466bcc44125b12b3728f5c2f0dd6ae9956b945bf663448fef7df702634964d is running failed: container process not found" containerID="7b466bcc44125b12b3728f5c2f0dd6ae9956b945bf663448fef7df702634964d" cmd=["/usr/bin/pidof","ovsdb-server"] Jan 20 17:35:51 crc kubenswrapper[4558]: E0120 17:35:51.968773 4558 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 7b466bcc44125b12b3728f5c2f0dd6ae9956b945bf663448fef7df702634964d is running failed: container process not found" probeType="Readiness" pod="openstack-kuttl-tests/ovsdbserver-sb-0" podUID="532ef149-f32a-4b6e-8d0c-458a04952d34" containerName="ovsdbserver-sb" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.968852 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage16-crc" (UniqueName: "kubernetes.io/local-volume/local-storage16-crc") on node "crc" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.971398 4558 generic.go:334] "Generic (PLEG): container finished" podID="b1ce826a-c52c-42c6-8b67-6074b78c9fb7" containerID="1115d7fa22eeb3cf4d6e899963fd5f9d10ada47b94c34b2d601965731bba164c" exitCode=0 Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.971465 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-galera-0" event={"ID":"b1ce826a-c52c-42c6-8b67-6074b78c9fb7","Type":"ContainerDied","Data":"1115d7fa22eeb3cf4d6e899963fd5f9d10ada47b94c34b2d601965731bba164c"} Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.978262 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage16-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage16-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.978310 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c9ec2559-f7c5-4318-9a3b-4544d222ff8e-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.995498 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e8a8d7c2-9fc7-48d9-bd28-2f0416b7c61c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e8a8d7c2-9fc7-48d9-bd28-2f0416b7c61c" (UID: "e8a8d7c2-9fc7-48d9-bd28-2f0416b7c61c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:51 crc kubenswrapper[4558]: I0120 17:35:51.996873 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-keystone-listener-7d7dbcb65b-ttg5w" event={"ID":"1ab0a776-6551-4ef8-bd41-0ef0e58d892d","Type":"ContainerStarted","Data":"489226669dfe047f0a2a36de4e641f09811f99e9a771910dbd05ac7a99055622"} Jan 20 17:35:52 crc kubenswrapper[4558]: I0120 17:35:51.999971 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-api-f45698ff6-c9mtk" podUID="7de6386e-7693-430d-9753-2d7fa8c31b5d" containerName="barbican-api-log" containerID="cri-o://65853762fe6cac36e4de6676f22c99039ab79573b354ec57daadc513c082bba2" gracePeriod=30 Jan 20 17:35:52 crc kubenswrapper[4558]: I0120 17:35:52.000246 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"099193f1-dcf1-4c0a-beeb-fac5f0824cb8","Type":"ContainerDied","Data":"53ba20480eaba79bd67435843f2f6a3f80cc9813358a7b7eca6feb2250f3c8b4"} Jan 20 17:35:52 crc kubenswrapper[4558]: I0120 17:35:52.000269 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="53ba20480eaba79bd67435843f2f6a3f80cc9813358a7b7eca6feb2250f3c8b4" Jan 20 17:35:52 crc kubenswrapper[4558]: I0120 17:35:52.003081 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-api-f45698ff6-c9mtk" podUID="7de6386e-7693-430d-9753-2d7fa8c31b5d" containerName="barbican-api" containerID="cri-o://af363f8fa728755ec981997979783ddd99710b030181fea30ee5db9adb704c8e" gracePeriod=30 Jan 20 17:35:52 crc kubenswrapper[4558]: I0120 17:35:52.017305 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack-kuttl-tests/barbican-api-f45698ff6-c9mtk" podUID="7de6386e-7693-430d-9753-2d7fa8c31b5d" containerName="barbican-api-log" probeResult="failure" output="Get \"https://10.217.0.131:9311/healthcheck\": EOF" Jan 20 17:35:52 crc kubenswrapper[4558]: I0120 17:35:52.023455 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/barbican-api-f45698ff6-c9mtk" podUID="7de6386e-7693-430d-9753-2d7fa8c31b5d" containerName="barbican-api-log" probeResult="failure" output="Get \"https://10.217.0.131:9311/healthcheck\": EOF" Jan 20 17:35:52 crc kubenswrapper[4558]: I0120 17:35:52.023503 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/barbican-api-f45698ff6-c9mtk" podUID="7de6386e-7693-430d-9753-2d7fa8c31b5d" containerName="barbican-api" probeResult="failure" output="Get \"https://10.217.0.131:9311/healthcheck\": EOF" Jan 20 17:35:52 crc kubenswrapper[4558]: I0120 17:35:52.030670 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e8a8d7c2-9fc7-48d9-bd28-2f0416b7c61c-ovsdbserver-nb-tls-certs" (OuterVolumeSpecName: "ovsdbserver-nb-tls-certs") pod "e8a8d7c2-9fc7-48d9-bd28-2f0416b7c61c" (UID: "e8a8d7c2-9fc7-48d9-bd28-2f0416b7c61c"). InnerVolumeSpecName "ovsdbserver-nb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:52 crc kubenswrapper[4558]: I0120 17:35:52.082056 4558 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/e8a8d7c2-9fc7-48d9-bd28-2f0416b7c61c-ovsdbserver-nb-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:52 crc kubenswrapper[4558]: I0120 17:35:52.082372 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e8a8d7c2-9fc7-48d9-bd28-2f0416b7c61c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:52 crc kubenswrapper[4558]: I0120 17:35:52.098524 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e8a8d7c2-9fc7-48d9-bd28-2f0416b7c61c-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "e8a8d7c2-9fc7-48d9-bd28-2f0416b7c61c" (UID: "e8a8d7c2-9fc7-48d9-bd28-2f0416b7c61c"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:52 crc kubenswrapper[4558]: I0120 17:35:52.164308 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/placement-67cbb6f96d-89sm2"] Jan 20 17:35:52 crc kubenswrapper[4558]: I0120 17:35:52.186731 4558 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/e8a8d7c2-9fc7-48d9-bd28-2f0416b7c61c-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:52 crc kubenswrapper[4558]: I0120 17:35:52.255033 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-7749bf6d58-dwpbl"] Jan 20 17:35:52 crc kubenswrapper[4558]: I0120 17:35:52.636317 4558 scope.go:117] "RemoveContainer" containerID="de49ed715fad547662149d1b7edd446a91ec26a644380970de0736db54838406" Jan 20 17:35:52 crc kubenswrapper[4558]: I0120 17:35:52.752722 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-77fd7f9ff9-zs7mz"] Jan 20 17:35:52 crc kubenswrapper[4558]: I0120 17:35:52.809392 4558 scope.go:117] "RemoveContainer" containerID="ea31e1aad28c22c0366c0c296a61f01044fbf0caf04c9f039be12136bde4291d" Jan 20 17:35:52 crc kubenswrapper[4558]: I0120 17:35:52.843784 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-nb-0"] Jan 20 17:35:52 crc kubenswrapper[4558]: I0120 17:35:52.869763 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-59b8cc448d-b9wnn" Jan 20 17:35:52 crc kubenswrapper[4558]: I0120 17:35:52.880046 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/memcached-0" Jan 20 17:35:52 crc kubenswrapper[4558]: I0120 17:35:52.880412 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-nb-0"] Jan 20 17:35:52 crc kubenswrapper[4558]: I0120 17:35:52.895777 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ovsdbserver-nb-0"] Jan 20 17:35:52 crc kubenswrapper[4558]: E0120 17:35:52.896449 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5391541f-5057-4d20-ae1e-bbc88e6b33a4" containerName="memcached" Jan 20 17:35:52 crc kubenswrapper[4558]: I0120 17:35:52.896522 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="5391541f-5057-4d20-ae1e-bbc88e6b33a4" containerName="memcached" Jan 20 17:35:52 crc kubenswrapper[4558]: I0120 17:35:52.896789 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="5391541f-5057-4d20-ae1e-bbc88e6b33a4" containerName="memcached" Jan 20 17:35:52 crc kubenswrapper[4558]: I0120 17:35:52.898067 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:35:52 crc kubenswrapper[4558]: I0120 17:35:52.914482 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-ovndbcluster-nb-ovndbs" Jan 20 17:35:52 crc kubenswrapper[4558]: I0120 17:35:52.914658 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ovncluster-ovndbcluster-nb-dockercfg-djpzb" Jan 20 17:35:52 crc kubenswrapper[4558]: I0120 17:35:52.914773 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"ovndbcluster-nb-config" Jan 20 17:35:52 crc kubenswrapper[4558]: I0120 17:35:52.924565 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"ovndbcluster-nb-scripts" Jan 20 17:35:52 crc kubenswrapper[4558]: I0120 17:35:52.939125 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-nb-0"] Jan 20 17:35:52 crc kubenswrapper[4558]: I0120 17:35:52.986595 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-6d8f9956fd-j6jg4" Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.035388 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovsdbserver-sb-0_532ef149-f32a-4b6e-8d0c-458a04952d34/ovsdbserver-sb/0.log" Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.035476 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.048203 4558 scope.go:117] "RemoveContainer" containerID="b5bed1bf3813bd740cfee2d99ab02e9629a1b8a01adb60f10e7219ffeb492771" Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.054924 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.079037 4558 generic.go:334] "Generic (PLEG): container finished" podID="7de6386e-7693-430d-9753-2d7fa8c31b5d" containerID="65853762fe6cac36e4de6676f22c99039ab79573b354ec57daadc513c082bba2" exitCode=143 Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.079087 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-f45698ff6-c9mtk" event={"ID":"7de6386e-7693-430d-9753-2d7fa8c31b5d","Type":"ContainerDied","Data":"65853762fe6cac36e4de6676f22c99039ab79573b354ec57daadc513c082bba2"} Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.079367 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9x6z5\" (UniqueName: \"kubernetes.io/projected/5391541f-5057-4d20-ae1e-bbc88e6b33a4-kube-api-access-9x6z5\") pod \"5391541f-5057-4d20-ae1e-bbc88e6b33a4\" (UID: \"5391541f-5057-4d20-ae1e-bbc88e6b33a4\") " Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.079482 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/5391541f-5057-4d20-ae1e-bbc88e6b33a4-kolla-config\") pod \"5391541f-5057-4d20-ae1e-bbc88e6b33a4\" (UID: \"5391541f-5057-4d20-ae1e-bbc88e6b33a4\") " Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.079566 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/5391541f-5057-4d20-ae1e-bbc88e6b33a4-memcached-tls-certs\") pod \"5391541f-5057-4d20-ae1e-bbc88e6b33a4\" (UID: \"5391541f-5057-4d20-ae1e-bbc88e6b33a4\") " Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.079604 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5391541f-5057-4d20-ae1e-bbc88e6b33a4-combined-ca-bundle\") pod \"5391541f-5057-4d20-ae1e-bbc88e6b33a4\" (UID: \"5391541f-5057-4d20-ae1e-bbc88e6b33a4\") " Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.079675 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5391541f-5057-4d20-ae1e-bbc88e6b33a4-config-data\") pod \"5391541f-5057-4d20-ae1e-bbc88e6b33a4\" (UID: \"5391541f-5057-4d20-ae1e-bbc88e6b33a4\") " Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.080001 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/98f8dc8b-6446-4ba8-b37b-a11ae7414b65-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"98f8dc8b-6446-4ba8-b37b-a11ae7414b65\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.080048 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/98f8dc8b-6446-4ba8-b37b-a11ae7414b65-config\") pod \"ovsdbserver-nb-0\" (UID: \"98f8dc8b-6446-4ba8-b37b-a11ae7414b65\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.080078 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/98f8dc8b-6446-4ba8-b37b-a11ae7414b65-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"98f8dc8b-6446-4ba8-b37b-a11ae7414b65\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.080098 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage13-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage13-crc\") pod \"ovsdbserver-nb-0\" (UID: \"98f8dc8b-6446-4ba8-b37b-a11ae7414b65\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.080134 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/98f8dc8b-6446-4ba8-b37b-a11ae7414b65-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"98f8dc8b-6446-4ba8-b37b-a11ae7414b65\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.080172 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/98f8dc8b-6446-4ba8-b37b-a11ae7414b65-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"98f8dc8b-6446-4ba8-b37b-a11ae7414b65\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.080193 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bzkqr\" (UniqueName: \"kubernetes.io/projected/98f8dc8b-6446-4ba8-b37b-a11ae7414b65-kube-api-access-bzkqr\") pod \"ovsdbserver-nb-0\" (UID: \"98f8dc8b-6446-4ba8-b37b-a11ae7414b65\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.080219 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/98f8dc8b-6446-4ba8-b37b-a11ae7414b65-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"98f8dc8b-6446-4ba8-b37b-a11ae7414b65\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.081308 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5391541f-5057-4d20-ae1e-bbc88e6b33a4-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "5391541f-5057-4d20-ae1e-bbc88e6b33a4" (UID: "5391541f-5057-4d20-ae1e-bbc88e6b33a4"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.082048 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5391541f-5057-4d20-ae1e-bbc88e6b33a4-config-data" (OuterVolumeSpecName: "config-data") pod "5391541f-5057-4d20-ae1e-bbc88e6b33a4" (UID: "5391541f-5057-4d20-ae1e-bbc88e6b33a4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.083717 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.084476 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.093997 4558 generic.go:334] "Generic (PLEG): container finished" podID="a0122392-1c61-4133-b05f-cc6a622abaf9" containerID="c5868b91333e3f1c6356506d56295db65bbb6580ddcdb5dfaafeb5e8213164a5" exitCode=0 Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.094076 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"a0122392-1c61-4133-b05f-cc6a622abaf9","Type":"ContainerDied","Data":"c5868b91333e3f1c6356506d56295db65bbb6580ddcdb5dfaafeb5e8213164a5"} Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.126580 4558 generic.go:334] "Generic (PLEG): container finished" podID="9b9c1cf7-0cbb-4bb3-a764-8a8fa446c8a8" containerID="f80866aad1b997b9edf8ab7c0962cd1436981eced3f24d965081cee0a0b92d9a" exitCode=0 Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.126660 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-0" event={"ID":"9b9c1cf7-0cbb-4bb3-a764-8a8fa446c8a8","Type":"ContainerDied","Data":"f80866aad1b997b9edf8ab7c0962cd1436981eced3f24d965081cee0a0b92d9a"} Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.153076 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-7cccd8f896-m6rvs" event={"ID":"c35b601d-7c76-46a4-9703-7a52ef1bbf73","Type":"ContainerStarted","Data":"0bc7e2c3dbc064ab76b9b4762dd97b35dee711a2da893fa3da5a26872cf6121d"} Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.154290 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-worker-dd549b649-grdrm"] Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.156802 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-7749bf6d58-dwpbl" event={"ID":"9bca6361-b1d7-4953-b4bd-2bbda0f90581","Type":"ContainerStarted","Data":"91f8246f419fbaf90740183bfaf7574ba3d21f60910d06ed9ac2bc540e17f90a"} Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.157546 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5391541f-5057-4d20-ae1e-bbc88e6b33a4-kube-api-access-9x6z5" (OuterVolumeSpecName: "kube-api-access-9x6z5") pod "5391541f-5057-4d20-ae1e-bbc88e6b33a4" (UID: "5391541f-5057-4d20-ae1e-bbc88e6b33a4"). InnerVolumeSpecName "kube-api-access-9x6z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.183235 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-67cbb6f96d-89sm2" event={"ID":"3d958c0e-f259-45a7-9c66-9d2b82c92980","Type":"ContainerStarted","Data":"ee30ee6d1f48b7280d7892c450e3c16ef850ba6629614ab41753839c5eba2213"} Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.184821 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/099193f1-dcf1-4c0a-beeb-fac5f0824cb8-nova-novncproxy-tls-certs\") pod \"099193f1-dcf1-4c0a-beeb-fac5f0824cb8\" (UID: \"099193f1-dcf1-4c0a-beeb-fac5f0824cb8\") " Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.184874 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/532ef149-f32a-4b6e-8d0c-458a04952d34-metrics-certs-tls-certs\") pod \"532ef149-f32a-4b6e-8d0c-458a04952d34\" (UID: \"532ef149-f32a-4b6e-8d0c-458a04952d34\") " Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.185274 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/532ef149-f32a-4b6e-8d0c-458a04952d34-ovsdb-rundir\") pod \"532ef149-f32a-4b6e-8d0c-458a04952d34\" (UID: \"532ef149-f32a-4b6e-8d0c-458a04952d34\") " Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.185449 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/532ef149-f32a-4b6e-8d0c-458a04952d34-ovsdbserver-sb-tls-certs\") pod \"532ef149-f32a-4b6e-8d0c-458a04952d34\" (UID: \"532ef149-f32a-4b6e-8d0c-458a04952d34\") " Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.185489 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") pod \"91fac67e-69a9-4d88-9136-0d2484ca0dce\" (UID: \"91fac67e-69a9-4d88-9136-0d2484ca0dce\") " Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.185535 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndbcluster-sb-etc-ovn\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"532ef149-f32a-4b6e-8d0c-458a04952d34\" (UID: \"532ef149-f32a-4b6e-8d0c-458a04952d34\") " Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.185674 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mmm6k\" (UniqueName: \"kubernetes.io/projected/91fac67e-69a9-4d88-9136-0d2484ca0dce-kube-api-access-mmm6k\") pod \"91fac67e-69a9-4d88-9136-0d2484ca0dce\" (UID: \"91fac67e-69a9-4d88-9136-0d2484ca0dce\") " Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.185718 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/91fac67e-69a9-4d88-9136-0d2484ca0dce-scripts\") pod \"91fac67e-69a9-4d88-9136-0d2484ca0dce\" (UID: \"91fac67e-69a9-4d88-9136-0d2484ca0dce\") " Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.185777 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/532ef149-f32a-4b6e-8d0c-458a04952d34-scripts\") pod \"532ef149-f32a-4b6e-8d0c-458a04952d34\" (UID: \"532ef149-f32a-4b6e-8d0c-458a04952d34\") " Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.185805 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/91fac67e-69a9-4d88-9136-0d2484ca0dce-httpd-run\") pod \"91fac67e-69a9-4d88-9136-0d2484ca0dce\" (UID: \"91fac67e-69a9-4d88-9136-0d2484ca0dce\") " Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.185974 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5a046f3a-bea5-4aea-8081-1c5994ecdbd9-logs\") pod \"5a046f3a-bea5-4aea-8081-1c5994ecdbd9\" (UID: \"5a046f3a-bea5-4aea-8081-1c5994ecdbd9\") " Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.186061 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/099193f1-dcf1-4c0a-beeb-fac5f0824cb8-config-data\") pod \"099193f1-dcf1-4c0a-beeb-fac5f0824cb8\" (UID: \"099193f1-dcf1-4c0a-beeb-fac5f0824cb8\") " Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.186210 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgqkq\" (UniqueName: \"kubernetes.io/projected/532ef149-f32a-4b6e-8d0c-458a04952d34-kube-api-access-zgqkq\") pod \"532ef149-f32a-4b6e-8d0c-458a04952d34\" (UID: \"532ef149-f32a-4b6e-8d0c-458a04952d34\") " Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.186259 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a046f3a-bea5-4aea-8081-1c5994ecdbd9-combined-ca-bundle\") pod \"5a046f3a-bea5-4aea-8081-1c5994ecdbd9\" (UID: \"5a046f3a-bea5-4aea-8081-1c5994ecdbd9\") " Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.186281 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5a046f3a-bea5-4aea-8081-1c5994ecdbd9-config-data\") pod \"5a046f3a-bea5-4aea-8081-1c5994ecdbd9\" (UID: \"5a046f3a-bea5-4aea-8081-1c5994ecdbd9\") " Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.186298 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/532ef149-f32a-4b6e-8d0c-458a04952d34-combined-ca-bundle\") pod \"532ef149-f32a-4b6e-8d0c-458a04952d34\" (UID: \"532ef149-f32a-4b6e-8d0c-458a04952d34\") " Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.186418 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-khsdd\" (UniqueName: \"kubernetes.io/projected/099193f1-dcf1-4c0a-beeb-fac5f0824cb8-kube-api-access-khsdd\") pod \"099193f1-dcf1-4c0a-beeb-fac5f0824cb8\" (UID: \"099193f1-dcf1-4c0a-beeb-fac5f0824cb8\") " Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.186479 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/91fac67e-69a9-4d88-9136-0d2484ca0dce-logs\") pod \"91fac67e-69a9-4d88-9136-0d2484ca0dce\" (UID: \"91fac67e-69a9-4d88-9136-0d2484ca0dce\") " Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.186510 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/532ef149-f32a-4b6e-8d0c-458a04952d34-config\") pod \"532ef149-f32a-4b6e-8d0c-458a04952d34\" (UID: \"532ef149-f32a-4b6e-8d0c-458a04952d34\") " Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.186531 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/099193f1-dcf1-4c0a-beeb-fac5f0824cb8-combined-ca-bundle\") pod \"099193f1-dcf1-4c0a-beeb-fac5f0824cb8\" (UID: \"099193f1-dcf1-4c0a-beeb-fac5f0824cb8\") " Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.186569 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5a046f3a-bea5-4aea-8081-1c5994ecdbd9-public-tls-certs\") pod \"5a046f3a-bea5-4aea-8081-1c5994ecdbd9\" (UID: \"5a046f3a-bea5-4aea-8081-1c5994ecdbd9\") " Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.186588 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gccht\" (UniqueName: \"kubernetes.io/projected/5a046f3a-bea5-4aea-8081-1c5994ecdbd9-kube-api-access-gccht\") pod \"5a046f3a-bea5-4aea-8081-1c5994ecdbd9\" (UID: \"5a046f3a-bea5-4aea-8081-1c5994ecdbd9\") " Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.186604 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/099193f1-dcf1-4c0a-beeb-fac5f0824cb8-vencrypt-tls-certs\") pod \"099193f1-dcf1-4c0a-beeb-fac5f0824cb8\" (UID: \"099193f1-dcf1-4c0a-beeb-fac5f0824cb8\") " Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.186640 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5a046f3a-bea5-4aea-8081-1c5994ecdbd9-internal-tls-certs\") pod \"5a046f3a-bea5-4aea-8081-1c5994ecdbd9\" (UID: \"5a046f3a-bea5-4aea-8081-1c5994ecdbd9\") " Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.186670 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/91fac67e-69a9-4d88-9136-0d2484ca0dce-internal-tls-certs\") pod \"91fac67e-69a9-4d88-9136-0d2484ca0dce\" (UID: \"91fac67e-69a9-4d88-9136-0d2484ca0dce\") " Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.186689 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/91fac67e-69a9-4d88-9136-0d2484ca0dce-config-data\") pod \"91fac67e-69a9-4d88-9136-0d2484ca0dce\" (UID: \"91fac67e-69a9-4d88-9136-0d2484ca0dce\") " Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.186721 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/91fac67e-69a9-4d88-9136-0d2484ca0dce-combined-ca-bundle\") pod \"91fac67e-69a9-4d88-9136-0d2484ca0dce\" (UID: \"91fac67e-69a9-4d88-9136-0d2484ca0dce\") " Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.187621 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/98f8dc8b-6446-4ba8-b37b-a11ae7414b65-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"98f8dc8b-6446-4ba8-b37b-a11ae7414b65\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.187724 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/98f8dc8b-6446-4ba8-b37b-a11ae7414b65-config\") pod \"ovsdbserver-nb-0\" (UID: \"98f8dc8b-6446-4ba8-b37b-a11ae7414b65\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.187799 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/98f8dc8b-6446-4ba8-b37b-a11ae7414b65-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"98f8dc8b-6446-4ba8-b37b-a11ae7414b65\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.187829 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage13-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage13-crc\") pod \"ovsdbserver-nb-0\" (UID: \"98f8dc8b-6446-4ba8-b37b-a11ae7414b65\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.187915 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/98f8dc8b-6446-4ba8-b37b-a11ae7414b65-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"98f8dc8b-6446-4ba8-b37b-a11ae7414b65\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.187956 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/98f8dc8b-6446-4ba8-b37b-a11ae7414b65-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"98f8dc8b-6446-4ba8-b37b-a11ae7414b65\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.187992 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bzkqr\" (UniqueName: \"kubernetes.io/projected/98f8dc8b-6446-4ba8-b37b-a11ae7414b65-kube-api-access-bzkqr\") pod \"ovsdbserver-nb-0\" (UID: \"98f8dc8b-6446-4ba8-b37b-a11ae7414b65\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.188046 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/98f8dc8b-6446-4ba8-b37b-a11ae7414b65-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"98f8dc8b-6446-4ba8-b37b-a11ae7414b65\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.188407 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5391541f-5057-4d20-ae1e-bbc88e6b33a4-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.188420 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9x6z5\" (UniqueName: \"kubernetes.io/projected/5391541f-5057-4d20-ae1e-bbc88e6b33a4-kube-api-access-9x6z5\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.188430 4558 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/5391541f-5057-4d20-ae1e-bbc88e6b33a4-kolla-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.193029 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/91fac67e-69a9-4d88-9136-0d2484ca0dce-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "91fac67e-69a9-4d88-9136-0d2484ca0dce" (UID: "91fac67e-69a9-4d88-9136-0d2484ca0dce"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.195584 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage13-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage13-crc\") pod \"ovsdbserver-nb-0\" (UID: \"98f8dc8b-6446-4ba8-b37b-a11ae7414b65\") device mount path \"/mnt/openstack/pv13\"" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.196993 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-77fd7f9ff9-zs7mz" event={"ID":"89eb3a4c-0235-4583-ad0a-d015228d8d6b","Type":"ContainerStarted","Data":"3574ce4b764847fbce0cd2ee876e1f434a17b27120ecaf4e6218396e48f814f0"} Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.208353 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5a046f3a-bea5-4aea-8081-1c5994ecdbd9-logs" (OuterVolumeSpecName: "logs") pod "5a046f3a-bea5-4aea-8081-1c5994ecdbd9" (UID: "5a046f3a-bea5-4aea-8081-1c5994ecdbd9"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.209107 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/98f8dc8b-6446-4ba8-b37b-a11ae7414b65-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"98f8dc8b-6446-4ba8-b37b-a11ae7414b65\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.212236 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5a046f3a-bea5-4aea-8081-1c5994ecdbd9-kube-api-access-gccht" (OuterVolumeSpecName: "kube-api-access-gccht") pod "5a046f3a-bea5-4aea-8081-1c5994ecdbd9" (UID: "5a046f3a-bea5-4aea-8081-1c5994ecdbd9"). InnerVolumeSpecName "kube-api-access-gccht". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.232223 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/98f8dc8b-6446-4ba8-b37b-a11ae7414b65-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"98f8dc8b-6446-4ba8-b37b-a11ae7414b65\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.235875 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage15-crc" (OuterVolumeSpecName: "glance") pod "91fac67e-69a9-4d88-9136-0d2484ca0dce" (UID: "91fac67e-69a9-4d88-9136-0d2484ca0dce"). InnerVolumeSpecName "local-storage15-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.238995 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/98f8dc8b-6446-4ba8-b37b-a11ae7414b65-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"98f8dc8b-6446-4ba8-b37b-a11ae7414b65\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.248245 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.248656 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/98f8dc8b-6446-4ba8-b37b-a11ae7414b65-config\") pod \"ovsdbserver-nb-0\" (UID: \"98f8dc8b-6446-4ba8-b37b-a11ae7414b65\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.249338 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/91fac67e-69a9-4d88-9136-0d2484ca0dce-logs" (OuterVolumeSpecName: "logs") pod "91fac67e-69a9-4d88-9136-0d2484ca0dce" (UID: "91fac67e-69a9-4d88-9136-0d2484ca0dce"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.250899 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/532ef149-f32a-4b6e-8d0c-458a04952d34-ovsdb-rundir" (OuterVolumeSpecName: "ovsdb-rundir") pod "532ef149-f32a-4b6e-8d0c-458a04952d34" (UID: "532ef149-f32a-4b6e-8d0c-458a04952d34"). InnerVolumeSpecName "ovsdb-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.251355 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"91fac67e-69a9-4d88-9136-0d2484ca0dce","Type":"ContainerDied","Data":"5551c79462136929334b1ffe858711f0e5df3f4ccfbeeaa390c0be33ddf3d7d4"} Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.251395 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-keystone-listener-6d4445bf46-57dqj"] Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.252078 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/532ef149-f32a-4b6e-8d0c-458a04952d34-scripts" (OuterVolumeSpecName: "scripts") pod "532ef149-f32a-4b6e-8d0c-458a04952d34" (UID: "532ef149-f32a-4b6e-8d0c-458a04952d34"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.252149 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/532ef149-f32a-4b6e-8d0c-458a04952d34-config" (OuterVolumeSpecName: "config") pod "532ef149-f32a-4b6e-8d0c-458a04952d34" (UID: "532ef149-f32a-4b6e-8d0c-458a04952d34"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.252980 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/98f8dc8b-6446-4ba8-b37b-a11ae7414b65-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"98f8dc8b-6446-4ba8-b37b-a11ae7414b65\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.253830 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/98f8dc8b-6446-4ba8-b37b-a11ae7414b65-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"98f8dc8b-6446-4ba8-b37b-a11ae7414b65\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.257961 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-55dfc8964d-kck7c"] Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.259986 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/532ef149-f32a-4b6e-8d0c-458a04952d34-kube-api-access-zgqkq" (OuterVolumeSpecName: "kube-api-access-zgqkq") pod "532ef149-f32a-4b6e-8d0c-458a04952d34" (UID: "532ef149-f32a-4b6e-8d0c-458a04952d34"). InnerVolumeSpecName "kube-api-access-zgqkq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.260458 4558 generic.go:334] "Generic (PLEG): container finished" podID="db6ef9f1-8a92-4c74-b476-a24b66585268" containerID="6500342a069cbdc40f29bb0f7540a921249de5ac706a7008df5f940aa1c75eb9" exitCode=0 Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.260768 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="bbb20173-fee8-4323-afbb-f2bbbec3e978" containerName="ceilometer-central-agent" containerID="cri-o://1f1063795318c00e5b19c483c70ac74ca60c9c396081249534c260035e47ad7a" gracePeriod=30 Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.260999 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-cell1-galera-0" event={"ID":"db6ef9f1-8a92-4c74-b476-a24b66585268","Type":"ContainerDied","Data":"6500342a069cbdc40f29bb0f7540a921249de5ac706a7008df5f940aa1c75eb9"} Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.261048 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/memcached-0" Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.261775 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="bbb20173-fee8-4323-afbb-f2bbbec3e978" containerName="proxy-httpd" containerID="cri-o://3ff2458b3f3311a509b35bc6e23de18f9f9253176e2e50da406ead896867e39d" gracePeriod=30 Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.261888 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="bbb20173-fee8-4323-afbb-f2bbbec3e978" containerName="ceilometer-notification-agent" containerID="cri-o://f9994a423f0457d16e1857b55313f41375f006cd328176af514da36879ef8b96" gracePeriod=30 Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.262429 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="bbb20173-fee8-4323-afbb-f2bbbec3e978" containerName="sg-core" containerID="cri-o://ea6d1ca32a116c466cabb20cadcc9e9af27e7b82039308f7f1df1e6178fa2a36" gracePeriod=30 Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.263833 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage04-crc" (OuterVolumeSpecName: "ovndbcluster-sb-etc-ovn") pod "532ef149-f32a-4b6e-8d0c-458a04952d34" (UID: "532ef149-f32a-4b6e-8d0c-458a04952d34"). InnerVolumeSpecName "local-storage04-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.285402 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bzkqr\" (UniqueName: \"kubernetes.io/projected/98f8dc8b-6446-4ba8-b37b-a11ae7414b65-kube-api-access-bzkqr\") pod \"ovsdbserver-nb-0\" (UID: \"98f8dc8b-6446-4ba8-b37b-a11ae7414b65\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.294954 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/91fac67e-69a9-4d88-9136-0d2484ca0dce-kube-api-access-mmm6k" (OuterVolumeSpecName: "kube-api-access-mmm6k") pod "91fac67e-69a9-4d88-9136-0d2484ca0dce" (UID: "91fac67e-69a9-4d88-9136-0d2484ca0dce"). InnerVolumeSpecName "kube-api-access-mmm6k". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.296440 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/91fac67e-69a9-4d88-9136-0d2484ca0dce-scripts" (OuterVolumeSpecName: "scripts") pod "91fac67e-69a9-4d88-9136-0d2484ca0dce" (UID: "91fac67e-69a9-4d88-9136-0d2484ca0dce"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.297959 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mmm6k\" (UniqueName: \"kubernetes.io/projected/91fac67e-69a9-4d88-9136-0d2484ca0dce-kube-api-access-mmm6k\") pod \"91fac67e-69a9-4d88-9136-0d2484ca0dce\" (UID: \"91fac67e-69a9-4d88-9136-0d2484ca0dce\") " Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.298073 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/91fac67e-69a9-4d88-9136-0d2484ca0dce-scripts\") pod \"91fac67e-69a9-4d88-9136-0d2484ca0dce\" (UID: \"91fac67e-69a9-4d88-9136-0d2484ca0dce\") " Jan 20 17:35:53 crc kubenswrapper[4558]: W0120 17:35:53.299338 4558 empty_dir.go:500] Warning: Unmount skipped because path does not exist: /var/lib/kubelet/pods/91fac67e-69a9-4d88-9136-0d2484ca0dce/volumes/kubernetes.io~projected/kube-api-access-mmm6k Jan 20 17:35:53 crc kubenswrapper[4558]: W0120 17:35:53.299381 4558 empty_dir.go:500] Warning: Unmount skipped because path does not exist: /var/lib/kubelet/pods/91fac67e-69a9-4d88-9136-0d2484ca0dce/volumes/kubernetes.io~secret/scripts Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.299394 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/91fac67e-69a9-4d88-9136-0d2484ca0dce-scripts" (OuterVolumeSpecName: "scripts") pod "91fac67e-69a9-4d88-9136-0d2484ca0dce" (UID: "91fac67e-69a9-4d88-9136-0d2484ca0dce"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.299418 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/91fac67e-69a9-4d88-9136-0d2484ca0dce-kube-api-access-mmm6k" (OuterVolumeSpecName: "kube-api-access-mmm6k") pod "91fac67e-69a9-4d88-9136-0d2484ca0dce" (UID: "91fac67e-69a9-4d88-9136-0d2484ca0dce"). InnerVolumeSpecName "kube-api-access-mmm6k". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.300233 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/91fac67e-69a9-4d88-9136-0d2484ca0dce-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.300531 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/532ef149-f32a-4b6e-8d0c-458a04952d34-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.300543 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gccht\" (UniqueName: \"kubernetes.io/projected/5a046f3a-bea5-4aea-8081-1c5994ecdbd9-kube-api-access-gccht\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.300558 4558 reconciler_common.go:293] "Volume detached for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/532ef149-f32a-4b6e-8d0c-458a04952d34-ovsdb-rundir\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.300585 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage15-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") on node \"crc\" " Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.300599 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" " Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.300610 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mmm6k\" (UniqueName: \"kubernetes.io/projected/91fac67e-69a9-4d88-9136-0d2484ca0dce-kube-api-access-mmm6k\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.300618 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/91fac67e-69a9-4d88-9136-0d2484ca0dce-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.300627 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/532ef149-f32a-4b6e-8d0c-458a04952d34-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.300636 4558 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/91fac67e-69a9-4d88-9136-0d2484ca0dce-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.300650 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5a046f3a-bea5-4aea-8081-1c5994ecdbd9-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.300658 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgqkq\" (UniqueName: \"kubernetes.io/projected/532ef149-f32a-4b6e-8d0c-458a04952d34-kube-api-access-zgqkq\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.311035 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/099193f1-dcf1-4c0a-beeb-fac5f0824cb8-kube-api-access-khsdd" (OuterVolumeSpecName: "kube-api-access-khsdd") pod "099193f1-dcf1-4c0a-beeb-fac5f0824cb8" (UID: "099193f1-dcf1-4c0a-beeb-fac5f0824cb8"). InnerVolumeSpecName "kube-api-access-khsdd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.312722 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-api-64bd4857c6-2s9h4"] Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.390221 4558 scope.go:117] "RemoveContainer" containerID="61659b3cdbe7237e6c49c005bd8d5ec58e23a181bee16a952d93828bb6852edf" Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.403226 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-khsdd\" (UniqueName: \"kubernetes.io/projected/099193f1-dcf1-4c0a-beeb-fac5f0824cb8-kube-api-access-khsdd\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.771406 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/91fac67e-69a9-4d88-9136-0d2484ca0dce-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "91fac67e-69a9-4d88-9136-0d2484ca0dce" (UID: "91fac67e-69a9-4d88-9136-0d2484ca0dce"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.802246 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-59b8cc448d-b9wnn"] Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.824461 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/91fac67e-69a9-4d88-9136-0d2484ca0dce-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.899820 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage13-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage13-crc\") pod \"ovsdbserver-nb-0\" (UID: \"98f8dc8b-6446-4ba8-b37b-a11ae7414b65\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.922596 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5a046f3a-bea5-4aea-8081-1c5994ecdbd9-config-data" (OuterVolumeSpecName: "config-data") pod "5a046f3a-bea5-4aea-8081-1c5994ecdbd9" (UID: "5a046f3a-bea5-4aea-8081-1c5994ecdbd9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.927733 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5a046f3a-bea5-4aea-8081-1c5994ecdbd9-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.927955 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage15-crc" (UniqueName: "kubernetes.io/local-volume/local-storage15-crc") on node "crc" Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.955894 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/099193f1-dcf1-4c0a-beeb-fac5f0824cb8-config-data" (OuterVolumeSpecName: "config-data") pod "099193f1-dcf1-4c0a-beeb-fac5f0824cb8" (UID: "099193f1-dcf1-4c0a-beeb-fac5f0824cb8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.996886 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:35:53 crc kubenswrapper[4558]: I0120 17:35:53.998233 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/532ef149-f32a-4b6e-8d0c-458a04952d34-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "532ef149-f32a-4b6e-8d0c-458a04952d34" (UID: "532ef149-f32a-4b6e-8d0c-458a04952d34"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.035769 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage15-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.035803 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/099193f1-dcf1-4c0a-beeb-fac5f0824cb8-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.035816 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/532ef149-f32a-4b6e-8d0c-458a04952d34-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.103775 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/91fac67e-69a9-4d88-9136-0d2484ca0dce-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "91fac67e-69a9-4d88-9136-0d2484ca0dce" (UID: "91fac67e-69a9-4d88-9136-0d2484ca0dce"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.106817 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/099193f1-dcf1-4c0a-beeb-fac5f0824cb8-nova-novncproxy-tls-certs" (OuterVolumeSpecName: "nova-novncproxy-tls-certs") pod "099193f1-dcf1-4c0a-beeb-fac5f0824cb8" (UID: "099193f1-dcf1-4c0a-beeb-fac5f0824cb8"). InnerVolumeSpecName "nova-novncproxy-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.115836 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5a046f3a-bea5-4aea-8081-1c5994ecdbd9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5a046f3a-bea5-4aea-8081-1c5994ecdbd9" (UID: "5a046f3a-bea5-4aea-8081-1c5994ecdbd9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.137413 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5a046f3a-bea5-4aea-8081-1c5994ecdbd9-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "5a046f3a-bea5-4aea-8081-1c5994ecdbd9" (UID: "5a046f3a-bea5-4aea-8081-1c5994ecdbd9"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.139110 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5a046f3a-bea5-4aea-8081-1c5994ecdbd9-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.139341 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/91fac67e-69a9-4d88-9136-0d2484ca0dce-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.139368 4558 reconciler_common.go:293] "Volume detached for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/099193f1-dcf1-4c0a-beeb-fac5f0824cb8-nova-novncproxy-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.139382 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a046f3a-bea5-4aea-8081-1c5994ecdbd9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.150238 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/91fac67e-69a9-4d88-9136-0d2484ca0dce-config-data" (OuterVolumeSpecName: "config-data") pod "91fac67e-69a9-4d88-9136-0d2484ca0dce" (UID: "91fac67e-69a9-4d88-9136-0d2484ca0dce"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.152519 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage04-crc" (UniqueName: "kubernetes.io/local-volume/local-storage04-crc") on node "crc" Jan 20 17:35:54 crc kubenswrapper[4558]: E0120 17:35:54.183546 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="33b43d304994a43dcb87337ad0c6e08deaf4a2507647b3ba6f756482ec39e182" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Jan 20 17:35:54 crc kubenswrapper[4558]: E0120 17:35:54.185103 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="33b43d304994a43dcb87337ad0c6e08deaf4a2507647b3ba6f756482ec39e182" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Jan 20 17:35:54 crc kubenswrapper[4558]: E0120 17:35:54.186140 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="33b43d304994a43dcb87337ad0c6e08deaf4a2507647b3ba6f756482ec39e182" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Jan 20 17:35:54 crc kubenswrapper[4558]: E0120 17:35:54.186285 4558 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack-kuttl-tests/ovn-northd-0" podUID="39600642-1418-4cb4-8617-f2ecc7089e0b" containerName="ovn-northd" Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.187206 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5391541f-5057-4d20-ae1e-bbc88e6b33a4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5391541f-5057-4d20-ae1e-bbc88e6b33a4" (UID: "5391541f-5057-4d20-ae1e-bbc88e6b33a4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.187879 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/099193f1-dcf1-4c0a-beeb-fac5f0824cb8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "099193f1-dcf1-4c0a-beeb-fac5f0824cb8" (UID: "099193f1-dcf1-4c0a-beeb-fac5f0824cb8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.227926 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-6d8f9956fd-j6jg4"] Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.231615 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/532ef149-f32a-4b6e-8d0c-458a04952d34-ovsdbserver-sb-tls-certs" (OuterVolumeSpecName: "ovsdbserver-sb-tls-certs") pod "532ef149-f32a-4b6e-8d0c-458a04952d34" (UID: "532ef149-f32a-4b6e-8d0c-458a04952d34"). InnerVolumeSpecName "ovsdbserver-sb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.243431 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/099193f1-dcf1-4c0a-beeb-fac5f0824cb8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.243457 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/91fac67e-69a9-4d88-9136-0d2484ca0dce-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.243471 4558 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/532ef149-f32a-4b6e-8d0c-458a04952d34-ovsdbserver-sb-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.243482 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.243491 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5391541f-5057-4d20-ae1e-bbc88e6b33a4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.304908 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-keystone-listener-6d4445bf46-57dqj" event={"ID":"11d1aa99-639c-451a-876e-59de6098e407","Type":"ContainerStarted","Data":"6c93cca8c9437e2f5e4d214be622632393244aadf349ae3d098813341938a3c1"} Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.307298 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-worker-dd549b649-grdrm" event={"ID":"640de3b3-60e1-41b8-ab00-22e375bad65c","Type":"ContainerStarted","Data":"64c5a0b4f6f64a2c7351affbc5ef072d820638d5f937ae2941cbbbdbbb97d6dd"} Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.322395 4558 generic.go:334] "Generic (PLEG): container finished" podID="bbb20173-fee8-4323-afbb-f2bbbec3e978" containerID="3ff2458b3f3311a509b35bc6e23de18f9f9253176e2e50da406ead896867e39d" exitCode=0 Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.322514 4558 generic.go:334] "Generic (PLEG): container finished" podID="bbb20173-fee8-4323-afbb-f2bbbec3e978" containerID="ea6d1ca32a116c466cabb20cadcc9e9af27e7b82039308f7f1df1e6178fa2a36" exitCode=2 Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.322572 4558 generic.go:334] "Generic (PLEG): container finished" podID="bbb20173-fee8-4323-afbb-f2bbbec3e978" containerID="f9994a423f0457d16e1857b55313f41375f006cd328176af514da36879ef8b96" exitCode=0 Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.322619 4558 generic.go:334] "Generic (PLEG): container finished" podID="bbb20173-fee8-4323-afbb-f2bbbec3e978" containerID="1f1063795318c00e5b19c483c70ac74ca60c9c396081249534c260035e47ad7a" exitCode=0 Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.322729 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"bbb20173-fee8-4323-afbb-f2bbbec3e978","Type":"ContainerDied","Data":"3ff2458b3f3311a509b35bc6e23de18f9f9253176e2e50da406ead896867e39d"} Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.322830 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"bbb20173-fee8-4323-afbb-f2bbbec3e978","Type":"ContainerDied","Data":"ea6d1ca32a116c466cabb20cadcc9e9af27e7b82039308f7f1df1e6178fa2a36"} Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.322920 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"bbb20173-fee8-4323-afbb-f2bbbec3e978","Type":"ContainerDied","Data":"f9994a423f0457d16e1857b55313f41375f006cd328176af514da36879ef8b96"} Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.322985 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"bbb20173-fee8-4323-afbb-f2bbbec3e978","Type":"ContainerDied","Data":"1f1063795318c00e5b19c483c70ac74ca60c9c396081249534c260035e47ad7a"} Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.330713 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/532ef149-f32a-4b6e-8d0c-458a04952d34-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "532ef149-f32a-4b6e-8d0c-458a04952d34" (UID: "532ef149-f32a-4b6e-8d0c-458a04952d34"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.336620 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"a0122392-1c61-4133-b05f-cc6a622abaf9","Type":"ContainerDied","Data":"3cc11aee3513e5dcd611c51793684a73d892722ce3a04656b00dd8dbb31df8c7"} Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.336758 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3cc11aee3513e5dcd611c51793684a73d892722ce3a04656b00dd8dbb31df8c7" Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.337763 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/099193f1-dcf1-4c0a-beeb-fac5f0824cb8-vencrypt-tls-certs" (OuterVolumeSpecName: "vencrypt-tls-certs") pod "099193f1-dcf1-4c0a-beeb-fac5f0824cb8" (UID: "099193f1-dcf1-4c0a-beeb-fac5f0824cb8"). InnerVolumeSpecName "vencrypt-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.339705 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-0" event={"ID":"9b9c1cf7-0cbb-4bb3-a764-8a8fa446c8a8","Type":"ContainerDied","Data":"76a8468b3bbc48f7fb9efe000d0f26373aebe12ee16ba381522258f93da08ca3"} Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.339832 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="76a8468b3bbc48f7fb9efe000d0f26373aebe12ee16ba381522258f93da08ca3" Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.346556 4558 reconciler_common.go:293] "Volume detached for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/099193f1-dcf1-4c0a-beeb-fac5f0824cb8-vencrypt-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.348838 4558 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/532ef149-f32a-4b6e-8d0c-458a04952d34-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.354216 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-7749bf6d58-dwpbl" event={"ID":"9bca6361-b1d7-4953-b4bd-2bbda0f90581","Type":"ContainerStarted","Data":"1371ec8f64533ee2b1ef24267d239bc9ffc81022e42aaedff8dc1abab24b701a"} Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.362487 4558 generic.go:334] "Generic (PLEG): container finished" podID="f4d77869-9132-4170-b50d-88389a33c597" containerID="96c4e3a692bc0dac49e1c6d07e764ff13bc464c6bdc3c29e2e82a3d65c290531" exitCode=0 Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.362532 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-0" event={"ID":"f4d77869-9132-4170-b50d-88389a33c597","Type":"ContainerDied","Data":"96c4e3a692bc0dac49e1c6d07e764ff13bc464c6bdc3c29e2e82a3d65c290531"} Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.368780 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5a046f3a-bea5-4aea-8081-1c5994ecdbd9-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "5a046f3a-bea5-4aea-8081-1c5994ecdbd9" (UID: "5a046f3a-bea5-4aea-8081-1c5994ecdbd9"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.394065 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-worker-dd7c4cf57-h4b8r" event={"ID":"4bb0e311-8438-4794-929d-791f0285afaf","Type":"ContainerStarted","Data":"999b8d65f2a7e8adcf754626b17414547aff5caec391cc7f682d54acf1ccdb23"} Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.396726 4558 generic.go:334] "Generic (PLEG): container finished" podID="833337d4-94d4-4bf6-9d0b-5791b9cc115e" containerID="b138c24e8875ddd5251012c3048e6574ab595b0a17f136458768b1a33c6227cb" exitCode=0 Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.396783 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"833337d4-94d4-4bf6-9d0b-5791b9cc115e","Type":"ContainerDied","Data":"b138c24e8875ddd5251012c3048e6574ab595b0a17f136458768b1a33c6227cb"} Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.398618 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-59b8cc448d-b9wnn" event={"ID":"73789760-f9bd-44e0-bf9b-c1864e5f8803","Type":"ContainerStarted","Data":"c373c169c88718501a4aca6353f4c3929127c2d9a1ff27e8e3ee3c889f567934"} Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.401261 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-55dfc8964d-kck7c" event={"ID":"6b77441a-39f5-4ed5-bf1b-c29900900242","Type":"ContainerStarted","Data":"a3c5ec3a558b306e30b65b8420b0b19bc99a95aeffaa62621dadeb4185b84438"} Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.405599 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-7cccd8f896-m6rvs" event={"ID":"c35b601d-7c76-46a4-9703-7a52ef1bbf73","Type":"ContainerStarted","Data":"3373f1f257436b290a1f8fd92a670c542b443e2f7b3e9ce8dcf84df8344a869e"} Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.407664 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-keystone-listener-7d7dbcb65b-ttg5w" event={"ID":"1ab0a776-6551-4ef8-bd41-0ef0e58d892d","Type":"ContainerStarted","Data":"5ceee282e729d804923fe9fb975a64436f6260d190a2814c82d6d5c4adb2113b"} Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.410934 4558 generic.go:334] "Generic (PLEG): container finished" podID="4bd47eac-4090-4fc1-91c4-553ebd84964d" containerID="8fb64177ca582dd6f77f7548f90b08a99b652fb60686912cfa1ae24e3d6b7093" exitCode=0 Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.411001 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"4bd47eac-4090-4fc1-91c4-553ebd84964d","Type":"ContainerDied","Data":"8fb64177ca582dd6f77f7548f90b08a99b652fb60686912cfa1ae24e3d6b7093"} Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.411024 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"4bd47eac-4090-4fc1-91c4-553ebd84964d","Type":"ContainerDied","Data":"41b3f54e2b9bfb4b0a9c2c2cb167cb97f4161a9d82938df1b6dbaf7803136def"} Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.411035 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="41b3f54e2b9bfb4b0a9c2c2cb167cb97f4161a9d82938df1b6dbaf7803136def" Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.412555 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-64bd4857c6-2s9h4" event={"ID":"94062d42-28cb-4c8a-afa4-f51458dedc6c","Type":"ContainerStarted","Data":"59c5e37b9c318b38142437caa43ecc8cb25b5a45951476dadbd6eb66682057b8"} Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.414515 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5391541f-5057-4d20-ae1e-bbc88e6b33a4-memcached-tls-certs" (OuterVolumeSpecName: "memcached-tls-certs") pod "5391541f-5057-4d20-ae1e-bbc88e6b33a4" (UID: "5391541f-5057-4d20-ae1e-bbc88e6b33a4"). InnerVolumeSpecName "memcached-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.419715 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-cell1-galera-0" event={"ID":"db6ef9f1-8a92-4c74-b476-a24b66585268","Type":"ContainerDied","Data":"2993fdbda70ff371d15f21c84b8a5240e810ba181d38fa057ea8020b0703d009"} Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.419750 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2993fdbda70ff371d15f21c84b8a5240e810ba181d38fa057ea8020b0703d009" Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.424075 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-galera-0" event={"ID":"b1ce826a-c52c-42c6-8b67-6074b78c9fb7","Type":"ContainerDied","Data":"a1c834964cceaf00e0e994b0ec3af7afd2ea6ab12fe6ebb957debd7cce22686d"} Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.424107 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a1c834964cceaf00e0e994b0ec3af7afd2ea6ab12fe6ebb957debd7cce22686d" Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.427354 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.427392 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.427423 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-67cbb6f96d-89sm2" event={"ID":"3d958c0e-f259-45a7-9c66-9d2b82c92980","Type":"ContainerStarted","Data":"2f3a880c1014f060f55cc93ec2f22fdc0bc281cbb530d9eeb1a0031f82c5f171"} Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.427688 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.452624 4558 reconciler_common.go:293] "Volume detached for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/5391541f-5057-4d20-ae1e-bbc88e6b33a4-memcached-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.452662 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5a046f3a-bea5-4aea-8081-1c5994ecdbd9-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.586716 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e8a8d7c2-9fc7-48d9-bd28-2f0416b7c61c" path="/var/lib/kubelet/pods/e8a8d7c2-9fc7-48d9-bd28-2f0416b7c61c/volumes" Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.599857 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.653434 4558 scope.go:117] "RemoveContainer" containerID="ff3f57bfd0fc845e414a7a24e7f078b2e8b3a550fdb80d3739b2e449bcf27a81" Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.657624 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/db6ef9f1-8a92-4c74-b476-a24b66585268-galera-tls-certs\") pod \"db6ef9f1-8a92-4c74-b476-a24b66585268\" (UID: \"db6ef9f1-8a92-4c74-b476-a24b66585268\") " Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.657687 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/db6ef9f1-8a92-4c74-b476-a24b66585268-kolla-config\") pod \"db6ef9f1-8a92-4c74-b476-a24b66585268\" (UID: \"db6ef9f1-8a92-4c74-b476-a24b66585268\") " Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.657838 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mysql-db\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"db6ef9f1-8a92-4c74-b476-a24b66585268\" (UID: \"db6ef9f1-8a92-4c74-b476-a24b66585268\") " Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.657917 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/db6ef9f1-8a92-4c74-b476-a24b66585268-operator-scripts\") pod \"db6ef9f1-8a92-4c74-b476-a24b66585268\" (UID: \"db6ef9f1-8a92-4c74-b476-a24b66585268\") " Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.657969 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/db6ef9f1-8a92-4c74-b476-a24b66585268-combined-ca-bundle\") pod \"db6ef9f1-8a92-4c74-b476-a24b66585268\" (UID: \"db6ef9f1-8a92-4c74-b476-a24b66585268\") " Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.658036 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/db6ef9f1-8a92-4c74-b476-a24b66585268-config-data-default\") pod \"db6ef9f1-8a92-4c74-b476-a24b66585268\" (UID: \"db6ef9f1-8a92-4c74-b476-a24b66585268\") " Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.658150 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/db6ef9f1-8a92-4c74-b476-a24b66585268-config-data-generated\") pod \"db6ef9f1-8a92-4c74-b476-a24b66585268\" (UID: \"db6ef9f1-8a92-4c74-b476-a24b66585268\") " Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.658201 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dcl8m\" (UniqueName: \"kubernetes.io/projected/db6ef9f1-8a92-4c74-b476-a24b66585268-kube-api-access-dcl8m\") pod \"db6ef9f1-8a92-4c74-b476-a24b66585268\" (UID: \"db6ef9f1-8a92-4c74-b476-a24b66585268\") " Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.661497 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/db6ef9f1-8a92-4c74-b476-a24b66585268-config-data-generated" (OuterVolumeSpecName: "config-data-generated") pod "db6ef9f1-8a92-4c74-b476-a24b66585268" (UID: "db6ef9f1-8a92-4c74-b476-a24b66585268"). InnerVolumeSpecName "config-data-generated". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.661718 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/db6ef9f1-8a92-4c74-b476-a24b66585268-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "db6ef9f1-8a92-4c74-b476-a24b66585268" (UID: "db6ef9f1-8a92-4c74-b476-a24b66585268"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.662358 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/db6ef9f1-8a92-4c74-b476-a24b66585268-config-data-default" (OuterVolumeSpecName: "config-data-default") pod "db6ef9f1-8a92-4c74-b476-a24b66585268" (UID: "db6ef9f1-8a92-4c74-b476-a24b66585268"). InnerVolumeSpecName "config-data-default". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.662423 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/db6ef9f1-8a92-4c74-b476-a24b66585268-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "db6ef9f1-8a92-4c74-b476-a24b66585268" (UID: "db6ef9f1-8a92-4c74-b476-a24b66585268"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.665328 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/db6ef9f1-8a92-4c74-b476-a24b66585268-kube-api-access-dcl8m" (OuterVolumeSpecName: "kube-api-access-dcl8m") pod "db6ef9f1-8a92-4c74-b476-a24b66585268" (UID: "db6ef9f1-8a92-4c74-b476-a24b66585268"). InnerVolumeSpecName "kube-api-access-dcl8m". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.677403 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage06-crc" (OuterVolumeSpecName: "mysql-db") pod "db6ef9f1-8a92-4c74-b476-a24b66585268" (UID: "db6ef9f1-8a92-4c74-b476-a24b66585268"). InnerVolumeSpecName "local-storage06-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.700448 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/db6ef9f1-8a92-4c74-b476-a24b66585268-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "db6ef9f1-8a92-4c74-b476-a24b66585268" (UID: "db6ef9f1-8a92-4c74-b476-a24b66585268"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.717513 4558 scope.go:117] "RemoveContainer" containerID="f5234b6873e40d97dab9fc0f95b0d08db6c6a9b663a804a6d9a45e0c39abccc9" Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.721015 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/db6ef9f1-8a92-4c74-b476-a24b66585268-galera-tls-certs" (OuterVolumeSpecName: "galera-tls-certs") pod "db6ef9f1-8a92-4c74-b476-a24b66585268" (UID: "db6ef9f1-8a92-4c74-b476-a24b66585268"). InnerVolumeSpecName "galera-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.758817 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.762525 4558 reconciler_common.go:293] "Volume detached for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/db6ef9f1-8a92-4c74-b476-a24b66585268-galera-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.762560 4558 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/db6ef9f1-8a92-4c74-b476-a24b66585268-kolla-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.762605 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" " Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.762620 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/db6ef9f1-8a92-4c74-b476-a24b66585268-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.762634 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/db6ef9f1-8a92-4c74-b476-a24b66585268-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.762645 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/db6ef9f1-8a92-4c74-b476-a24b66585268-config-data-default\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.762657 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/db6ef9f1-8a92-4c74-b476-a24b66585268-config-data-generated\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.762671 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dcl8m\" (UniqueName: \"kubernetes.io/projected/db6ef9f1-8a92-4c74-b476-a24b66585268-kube-api-access-dcl8m\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.791569 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage06-crc" (UniqueName: "kubernetes.io/local-volume/local-storage06-crc") on node "crc" Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.864496 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5cn75\" (UniqueName: \"kubernetes.io/projected/9b9c1cf7-0cbb-4bb3-a764-8a8fa446c8a8-kube-api-access-5cn75\") pod \"9b9c1cf7-0cbb-4bb3-a764-8a8fa446c8a8\" (UID: \"9b9c1cf7-0cbb-4bb3-a764-8a8fa446c8a8\") " Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.864586 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9b9c1cf7-0cbb-4bb3-a764-8a8fa446c8a8-config-data\") pod \"9b9c1cf7-0cbb-4bb3-a764-8a8fa446c8a8\" (UID: \"9b9c1cf7-0cbb-4bb3-a764-8a8fa446c8a8\") " Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.864643 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9b9c1cf7-0cbb-4bb3-a764-8a8fa446c8a8-combined-ca-bundle\") pod \"9b9c1cf7-0cbb-4bb3-a764-8a8fa446c8a8\" (UID: \"9b9c1cf7-0cbb-4bb3-a764-8a8fa446c8a8\") " Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.865536 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.875355 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9b9c1cf7-0cbb-4bb3-a764-8a8fa446c8a8-kube-api-access-5cn75" (OuterVolumeSpecName: "kube-api-access-5cn75") pod "9b9c1cf7-0cbb-4bb3-a764-8a8fa446c8a8" (UID: "9b9c1cf7-0cbb-4bb3-a764-8a8fa446c8a8"). InnerVolumeSpecName "kube-api-access-5cn75". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.886927 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.940254 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-sb-0"] Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.946854 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.947963 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9b9c1cf7-0cbb-4bb3-a764-8a8fa446c8a8-config-data" (OuterVolumeSpecName: "config-data") pod "9b9c1cf7-0cbb-4bb3-a764-8a8fa446c8a8" (UID: "9b9c1cf7-0cbb-4bb3-a764-8a8fa446c8a8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.972075 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/b1ce826a-c52c-42c6-8b67-6074b78c9fb7-config-data-default\") pod \"b1ce826a-c52c-42c6-8b67-6074b78c9fb7\" (UID: \"b1ce826a-c52c-42c6-8b67-6074b78c9fb7\") " Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.972132 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a0122392-1c61-4133-b05f-cc6a622abaf9-config-data\") pod \"a0122392-1c61-4133-b05f-cc6a622abaf9\" (UID: \"a0122392-1c61-4133-b05f-cc6a622abaf9\") " Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.972205 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0122392-1c61-4133-b05f-cc6a622abaf9-combined-ca-bundle\") pod \"a0122392-1c61-4133-b05f-cc6a622abaf9\" (UID: \"a0122392-1c61-4133-b05f-cc6a622abaf9\") " Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.972239 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b1ce826a-c52c-42c6-8b67-6074b78c9fb7-combined-ca-bundle\") pod \"b1ce826a-c52c-42c6-8b67-6074b78c9fb7\" (UID: \"b1ce826a-c52c-42c6-8b67-6074b78c9fb7\") " Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.972275 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mysql-db\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"b1ce826a-c52c-42c6-8b67-6074b78c9fb7\" (UID: \"b1ce826a-c52c-42c6-8b67-6074b78c9fb7\") " Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.972302 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b1ce826a-c52c-42c6-8b67-6074b78c9fb7-operator-scripts\") pod \"b1ce826a-c52c-42c6-8b67-6074b78c9fb7\" (UID: \"b1ce826a-c52c-42c6-8b67-6074b78c9fb7\") " Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.972334 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/b1ce826a-c52c-42c6-8b67-6074b78c9fb7-kolla-config\") pod \"b1ce826a-c52c-42c6-8b67-6074b78c9fb7\" (UID: \"b1ce826a-c52c-42c6-8b67-6074b78c9fb7\") " Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.972374 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/a0122392-1c61-4133-b05f-cc6a622abaf9-nova-metadata-tls-certs\") pod \"a0122392-1c61-4133-b05f-cc6a622abaf9\" (UID: \"a0122392-1c61-4133-b05f-cc6a622abaf9\") " Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.972409 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t2l69\" (UniqueName: \"kubernetes.io/projected/b1ce826a-c52c-42c6-8b67-6074b78c9fb7-kube-api-access-t2l69\") pod \"b1ce826a-c52c-42c6-8b67-6074b78c9fb7\" (UID: \"b1ce826a-c52c-42c6-8b67-6074b78c9fb7\") " Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.972446 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a0122392-1c61-4133-b05f-cc6a622abaf9-logs\") pod \"a0122392-1c61-4133-b05f-cc6a622abaf9\" (UID: \"a0122392-1c61-4133-b05f-cc6a622abaf9\") " Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.972477 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-flg88\" (UniqueName: \"kubernetes.io/projected/a0122392-1c61-4133-b05f-cc6a622abaf9-kube-api-access-flg88\") pod \"a0122392-1c61-4133-b05f-cc6a622abaf9\" (UID: \"a0122392-1c61-4133-b05f-cc6a622abaf9\") " Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.972519 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/b1ce826a-c52c-42c6-8b67-6074b78c9fb7-config-data-generated\") pod \"b1ce826a-c52c-42c6-8b67-6074b78c9fb7\" (UID: \"b1ce826a-c52c-42c6-8b67-6074b78c9fb7\") " Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.972561 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/b1ce826a-c52c-42c6-8b67-6074b78c9fb7-galera-tls-certs\") pod \"b1ce826a-c52c-42c6-8b67-6074b78c9fb7\" (UID: \"b1ce826a-c52c-42c6-8b67-6074b78c9fb7\") " Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.972874 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5cn75\" (UniqueName: \"kubernetes.io/projected/9b9c1cf7-0cbb-4bb3-a764-8a8fa446c8a8-kube-api-access-5cn75\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.972874 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.972887 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9b9c1cf7-0cbb-4bb3-a764-8a8fa446c8a8-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.980490 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b1ce826a-c52c-42c6-8b67-6074b78c9fb7-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "b1ce826a-c52c-42c6-8b67-6074b78c9fb7" (UID: "b1ce826a-c52c-42c6-8b67-6074b78c9fb7"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.990943 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b1ce826a-c52c-42c6-8b67-6074b78c9fb7-config-data-generated" (OuterVolumeSpecName: "config-data-generated") pod "b1ce826a-c52c-42c6-8b67-6074b78c9fb7" (UID: "b1ce826a-c52c-42c6-8b67-6074b78c9fb7"). InnerVolumeSpecName "config-data-generated". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.992076 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b1ce826a-c52c-42c6-8b67-6074b78c9fb7-config-data-default" (OuterVolumeSpecName: "config-data-default") pod "b1ce826a-c52c-42c6-8b67-6074b78c9fb7" (UID: "b1ce826a-c52c-42c6-8b67-6074b78c9fb7"). InnerVolumeSpecName "config-data-default". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:35:54 crc kubenswrapper[4558]: I0120 17:35:54.998632 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a0122392-1c61-4133-b05f-cc6a622abaf9-logs" (OuterVolumeSpecName: "logs") pod "a0122392-1c61-4133-b05f-cc6a622abaf9" (UID: "a0122392-1c61-4133-b05f-cc6a622abaf9"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.000734 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b1ce826a-c52c-42c6-8b67-6074b78c9fb7-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "b1ce826a-c52c-42c6-8b67-6074b78c9fb7" (UID: "b1ce826a-c52c-42c6-8b67-6074b78c9fb7"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.002795 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.005639 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-sb-0"] Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.026132 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-nb-0"] Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.040359 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0122392-1c61-4133-b05f-cc6a622abaf9-kube-api-access-flg88" (OuterVolumeSpecName: "kube-api-access-flg88") pod "a0122392-1c61-4133-b05f-cc6a622abaf9" (UID: "a0122392-1c61-4133-b05f-cc6a622abaf9"). InnerVolumeSpecName "kube-api-access-flg88". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.069535 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.074060 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ovsdbserver-sb-0"] Jan 20 17:35:55 crc kubenswrapper[4558]: E0120 17:35:55.074886 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="91fac67e-69a9-4d88-9136-0d2484ca0dce" containerName="glance-httpd" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.075019 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="91fac67e-69a9-4d88-9136-0d2484ca0dce" containerName="glance-httpd" Jan 20 17:35:55 crc kubenswrapper[4558]: E0120 17:35:55.075092 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bbb20173-fee8-4323-afbb-f2bbbec3e978" containerName="proxy-httpd" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.075211 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="bbb20173-fee8-4323-afbb-f2bbbec3e978" containerName="proxy-httpd" Jan 20 17:35:55 crc kubenswrapper[4558]: E0120 17:35:55.075294 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5a046f3a-bea5-4aea-8081-1c5994ecdbd9" containerName="nova-api-log" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.075355 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="5a046f3a-bea5-4aea-8081-1c5994ecdbd9" containerName="nova-api-log" Jan 20 17:35:55 crc kubenswrapper[4558]: E0120 17:35:55.075463 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="532ef149-f32a-4b6e-8d0c-458a04952d34" containerName="openstack-network-exporter" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.075573 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="532ef149-f32a-4b6e-8d0c-458a04952d34" containerName="openstack-network-exporter" Jan 20 17:35:55 crc kubenswrapper[4558]: E0120 17:35:55.075787 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9b9c1cf7-0cbb-4bb3-a764-8a8fa446c8a8" containerName="nova-cell1-conductor-conductor" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.075881 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="9b9c1cf7-0cbb-4bb3-a764-8a8fa446c8a8" containerName="nova-cell1-conductor-conductor" Jan 20 17:35:55 crc kubenswrapper[4558]: E0120 17:35:55.075949 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a0122392-1c61-4133-b05f-cc6a622abaf9" containerName="nova-metadata-metadata" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.076007 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="a0122392-1c61-4133-b05f-cc6a622abaf9" containerName="nova-metadata-metadata" Jan 20 17:35:55 crc kubenswrapper[4558]: E0120 17:35:55.076130 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4bd47eac-4090-4fc1-91c4-553ebd84964d" containerName="cinder-scheduler" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.076198 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="4bd47eac-4090-4fc1-91c4-553ebd84964d" containerName="cinder-scheduler" Jan 20 17:35:55 crc kubenswrapper[4558]: E0120 17:35:55.076811 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bbb20173-fee8-4323-afbb-f2bbbec3e978" containerName="ceilometer-notification-agent" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.076941 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="bbb20173-fee8-4323-afbb-f2bbbec3e978" containerName="ceilometer-notification-agent" Jan 20 17:35:55 crc kubenswrapper[4558]: E0120 17:35:55.077095 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b1ce826a-c52c-42c6-8b67-6074b78c9fb7" containerName="mysql-bootstrap" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.077368 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="b1ce826a-c52c-42c6-8b67-6074b78c9fb7" containerName="mysql-bootstrap" Jan 20 17:35:55 crc kubenswrapper[4558]: E0120 17:35:55.077437 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="099193f1-dcf1-4c0a-beeb-fac5f0824cb8" containerName="nova-cell1-novncproxy-novncproxy" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.077491 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="099193f1-dcf1-4c0a-beeb-fac5f0824cb8" containerName="nova-cell1-novncproxy-novncproxy" Jan 20 17:35:55 crc kubenswrapper[4558]: E0120 17:35:55.077547 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bbb20173-fee8-4323-afbb-f2bbbec3e978" containerName="ceilometer-central-agent" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.077597 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="bbb20173-fee8-4323-afbb-f2bbbec3e978" containerName="ceilometer-central-agent" Jan 20 17:35:55 crc kubenswrapper[4558]: E0120 17:35:55.077665 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4d77869-9132-4170-b50d-88389a33c597" containerName="nova-cell0-conductor-conductor" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.077715 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4bd47eac-4090-4fc1-91c4-553ebd84964d-config-data-custom\") pod \"4bd47eac-4090-4fc1-91c4-553ebd84964d\" (UID: \"4bd47eac-4090-4fc1-91c4-553ebd84964d\") " Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.077724 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4d77869-9132-4170-b50d-88389a33c597" containerName="nova-cell0-conductor-conductor" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.077880 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4bd47eac-4090-4fc1-91c4-553ebd84964d-combined-ca-bundle\") pod \"4bd47eac-4090-4fc1-91c4-553ebd84964d\" (UID: \"4bd47eac-4090-4fc1-91c4-553ebd84964d\") " Jan 20 17:35:55 crc kubenswrapper[4558]: E0120 17:35:55.077907 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="91fac67e-69a9-4d88-9136-0d2484ca0dce" containerName="glance-log" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.077923 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="91fac67e-69a9-4d88-9136-0d2484ca0dce" containerName="glance-log" Jan 20 17:35:55 crc kubenswrapper[4558]: E0120 17:35:55.077947 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="532ef149-f32a-4b6e-8d0c-458a04952d34" containerName="ovsdbserver-sb" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.077954 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="532ef149-f32a-4b6e-8d0c-458a04952d34" containerName="ovsdbserver-sb" Jan 20 17:35:55 crc kubenswrapper[4558]: E0120 17:35:55.077967 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5a046f3a-bea5-4aea-8081-1c5994ecdbd9" containerName="nova-api-api" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.077973 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="5a046f3a-bea5-4aea-8081-1c5994ecdbd9" containerName="nova-api-api" Jan 20 17:35:55 crc kubenswrapper[4558]: E0120 17:35:55.077985 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b1ce826a-c52c-42c6-8b67-6074b78c9fb7" containerName="galera" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.077991 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="b1ce826a-c52c-42c6-8b67-6074b78c9fb7" containerName="galera" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.077999 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4bd47eac-4090-4fc1-91c4-553ebd84964d-scripts\") pod \"4bd47eac-4090-4fc1-91c4-553ebd84964d\" (UID: \"4bd47eac-4090-4fc1-91c4-553ebd84964d\") " Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.078113 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s5x4k\" (UniqueName: \"kubernetes.io/projected/4bd47eac-4090-4fc1-91c4-553ebd84964d-kube-api-access-s5x4k\") pod \"4bd47eac-4090-4fc1-91c4-553ebd84964d\" (UID: \"4bd47eac-4090-4fc1-91c4-553ebd84964d\") " Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.078350 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xxdh6\" (UniqueName: \"kubernetes.io/projected/f4d77869-9132-4170-b50d-88389a33c597-kube-api-access-xxdh6\") pod \"f4d77869-9132-4170-b50d-88389a33c597\" (UID: \"f4d77869-9132-4170-b50d-88389a33c597\") " Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.078461 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4bd47eac-4090-4fc1-91c4-553ebd84964d-config-data\") pod \"4bd47eac-4090-4fc1-91c4-553ebd84964d\" (UID: \"4bd47eac-4090-4fc1-91c4-553ebd84964d\") " Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.078530 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4d77869-9132-4170-b50d-88389a33c597-combined-ca-bundle\") pod \"f4d77869-9132-4170-b50d-88389a33c597\" (UID: \"f4d77869-9132-4170-b50d-88389a33c597\") " Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.078561 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f4d77869-9132-4170-b50d-88389a33c597-config-data\") pod \"f4d77869-9132-4170-b50d-88389a33c597\" (UID: \"f4d77869-9132-4170-b50d-88389a33c597\") " Jan 20 17:35:55 crc kubenswrapper[4558]: E0120 17:35:55.078006 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="db6ef9f1-8a92-4c74-b476-a24b66585268" containerName="galera" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.078606 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="db6ef9f1-8a92-4c74-b476-a24b66585268" containerName="galera" Jan 20 17:35:55 crc kubenswrapper[4558]: E0120 17:35:55.078628 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4bd47eac-4090-4fc1-91c4-553ebd84964d" containerName="probe" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.078635 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="4bd47eac-4090-4fc1-91c4-553ebd84964d" containerName="probe" Jan 20 17:35:55 crc kubenswrapper[4558]: E0120 17:35:55.078646 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bbb20173-fee8-4323-afbb-f2bbbec3e978" containerName="sg-core" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.078651 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="bbb20173-fee8-4323-afbb-f2bbbec3e978" containerName="sg-core" Jan 20 17:35:55 crc kubenswrapper[4558]: E0120 17:35:55.078663 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a0122392-1c61-4133-b05f-cc6a622abaf9" containerName="nova-metadata-log" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.078670 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="a0122392-1c61-4133-b05f-cc6a622abaf9" containerName="nova-metadata-log" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.078670 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/4bd47eac-4090-4fc1-91c4-553ebd84964d-etc-machine-id\") pod \"4bd47eac-4090-4fc1-91c4-553ebd84964d\" (UID: \"4bd47eac-4090-4fc1-91c4-553ebd84964d\") " Jan 20 17:35:55 crc kubenswrapper[4558]: E0120 17:35:55.078691 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="db6ef9f1-8a92-4c74-b476-a24b66585268" containerName="mysql-bootstrap" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.078698 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="db6ef9f1-8a92-4c74-b476-a24b66585268" containerName="mysql-bootstrap" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.079054 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="4bd47eac-4090-4fc1-91c4-553ebd84964d" containerName="cinder-scheduler" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.079070 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="bbb20173-fee8-4323-afbb-f2bbbec3e978" containerName="sg-core" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.079081 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="bbb20173-fee8-4323-afbb-f2bbbec3e978" containerName="proxy-httpd" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.079089 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="91fac67e-69a9-4d88-9136-0d2484ca0dce" containerName="glance-httpd" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.079101 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="532ef149-f32a-4b6e-8d0c-458a04952d34" containerName="openstack-network-exporter" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.079112 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4d77869-9132-4170-b50d-88389a33c597" containerName="nova-cell0-conductor-conductor" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.079119 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="a0122392-1c61-4133-b05f-cc6a622abaf9" containerName="nova-metadata-log" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.079133 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="9b9c1cf7-0cbb-4bb3-a764-8a8fa446c8a8" containerName="nova-cell1-conductor-conductor" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.079144 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="099193f1-dcf1-4c0a-beeb-fac5f0824cb8" containerName="nova-cell1-novncproxy-novncproxy" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.079155 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="bbb20173-fee8-4323-afbb-f2bbbec3e978" containerName="ceilometer-central-agent" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.079177 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="5a046f3a-bea5-4aea-8081-1c5994ecdbd9" containerName="nova-api-log" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.079187 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="db6ef9f1-8a92-4c74-b476-a24b66585268" containerName="galera" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.079194 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="a0122392-1c61-4133-b05f-cc6a622abaf9" containerName="nova-metadata-metadata" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.079202 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="bbb20173-fee8-4323-afbb-f2bbbec3e978" containerName="ceilometer-notification-agent" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.079213 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="5a046f3a-bea5-4aea-8081-1c5994ecdbd9" containerName="nova-api-api" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.079222 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="4bd47eac-4090-4fc1-91c4-553ebd84964d" containerName="probe" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.079232 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="532ef149-f32a-4b6e-8d0c-458a04952d34" containerName="ovsdbserver-sb" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.079242 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="b1ce826a-c52c-42c6-8b67-6074b78c9fb7" containerName="galera" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.079256 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="91fac67e-69a9-4d88-9136-0d2484ca0dce" containerName="glance-log" Jan 20 17:35:55 crc kubenswrapper[4558]: E0120 17:35:55.079473 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4bd47eac-4090-4fc1-91c4-553ebd84964d" containerName="cinder-scheduler" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.079483 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="4bd47eac-4090-4fc1-91c4-553ebd84964d" containerName="cinder-scheduler" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.079689 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="4bd47eac-4090-4fc1-91c4-553ebd84964d" containerName="cinder-scheduler" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.079834 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-flg88\" (UniqueName: \"kubernetes.io/projected/a0122392-1c61-4133-b05f-cc6a622abaf9-kube-api-access-flg88\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.079855 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/b1ce826a-c52c-42c6-8b67-6074b78c9fb7-config-data-generated\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.080047 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/b1ce826a-c52c-42c6-8b67-6074b78c9fb7-config-data-default\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.080061 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b1ce826a-c52c-42c6-8b67-6074b78c9fb7-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.080072 4558 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/b1ce826a-c52c-42c6-8b67-6074b78c9fb7-kolla-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.080083 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4bd47eac-4090-4fc1-91c4-553ebd84964d-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "4bd47eac-4090-4fc1-91c4-553ebd84964d" (UID: "4bd47eac-4090-4fc1-91c4-553ebd84964d"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.080080 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a0122392-1c61-4133-b05f-cc6a622abaf9-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.080417 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.081031 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.081988 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ovncluster-ovndbcluster-sb-dockercfg-g95pp" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.082761 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"ovndbcluster-sb-scripts" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.083129 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"ovndbcluster-sb-config" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.083329 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-ovndbcluster-sb-ovndbs" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.088311 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4bd47eac-4090-4fc1-91c4-553ebd84964d-scripts" (OuterVolumeSpecName: "scripts") pod "4bd47eac-4090-4fc1-91c4-553ebd84964d" (UID: "4bd47eac-4090-4fc1-91c4-553ebd84964d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.089437 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-sb-0"] Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.090429 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b1ce826a-c52c-42c6-8b67-6074b78c9fb7-kube-api-access-t2l69" (OuterVolumeSpecName: "kube-api-access-t2l69") pod "b1ce826a-c52c-42c6-8b67-6074b78c9fb7" (UID: "b1ce826a-c52c-42c6-8b67-6074b78c9fb7"). InnerVolumeSpecName "kube-api-access-t2l69". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.110406 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9b9c1cf7-0cbb-4bb3-a764-8a8fa446c8a8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9b9c1cf7-0cbb-4bb3-a764-8a8fa446c8a8" (UID: "9b9c1cf7-0cbb-4bb3-a764-8a8fa446c8a8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.129214 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.143280 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.155873 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f4d77869-9132-4170-b50d-88389a33c597-kube-api-access-xxdh6" (OuterVolumeSpecName: "kube-api-access-xxdh6") pod "f4d77869-9132-4170-b50d-88389a33c597" (UID: "f4d77869-9132-4170-b50d-88389a33c597"). InnerVolumeSpecName "kube-api-access-xxdh6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.156002 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.158079 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4bd47eac-4090-4fc1-91c4-553ebd84964d-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "4bd47eac-4090-4fc1-91c4-553ebd84964d" (UID: "4bd47eac-4090-4fc1-91c4-553ebd84964d"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:55 crc kubenswrapper[4558]: E0120 17:35:55.159475 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="833337d4-94d4-4bf6-9d0b-5791b9cc115e" containerName="nova-scheduler-scheduler" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.159527 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="833337d4-94d4-4bf6-9d0b-5791b9cc115e" containerName="nova-scheduler-scheduler" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.160269 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="833337d4-94d4-4bf6-9d0b-5791b9cc115e" containerName="nova-scheduler-scheduler" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.162124 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.163180 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bd47eac-4090-4fc1-91c4-553ebd84964d-kube-api-access-s5x4k" (OuterVolumeSpecName: "kube-api-access-s5x4k") pod "4bd47eac-4090-4fc1-91c4-553ebd84964d" (UID: "4bd47eac-4090-4fc1-91c4-553ebd84964d"). InnerVolumeSpecName "kube-api-access-s5x4k". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.163459 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage02-crc" (OuterVolumeSpecName: "mysql-db") pod "b1ce826a-c52c-42c6-8b67-6074b78c9fb7" (UID: "b1ce826a-c52c-42c6-8b67-6074b78c9fb7"). InnerVolumeSpecName "local-storage02-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.176774 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-glance-dockercfg-bsssw" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.177182 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-default-internal-config-data" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.177469 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.177824 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-scripts" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.178001 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-glance-default-internal-svc" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.186426 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f7881580-f0a5-47a0-9622-3927bcce668a-scripts\") pod \"glance-default-internal-api-0\" (UID: \"f7881580-f0a5-47a0-9622-3927bcce668a\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.186512 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage15-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") pod \"glance-default-internal-api-0\" (UID: \"f7881580-f0a5-47a0-9622-3927bcce668a\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.186545 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f7881580-f0a5-47a0-9622-3927bcce668a-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"f7881580-f0a5-47a0-9622-3927bcce668a\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.186612 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f7881580-f0a5-47a0-9622-3927bcce668a-config-data\") pod \"glance-default-internal-api-0\" (UID: \"f7881580-f0a5-47a0-9622-3927bcce668a\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.186640 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f7881580-f0a5-47a0-9622-3927bcce668a-logs\") pod \"glance-default-internal-api-0\" (UID: \"f7881580-f0a5-47a0-9622-3927bcce668a\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.186739 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7881580-f0a5-47a0-9622-3927bcce668a-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"f7881580-f0a5-47a0-9622-3927bcce668a\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.186771 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xjrx8\" (UniqueName: \"kubernetes.io/projected/f7881580-f0a5-47a0-9622-3927bcce668a-kube-api-access-xjrx8\") pod \"glance-default-internal-api-0\" (UID: \"f7881580-f0a5-47a0-9622-3927bcce668a\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.186833 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f7881580-f0a5-47a0-9622-3927bcce668a-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"f7881580-f0a5-47a0-9622-3927bcce668a\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.186918 4558 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/4bd47eac-4090-4fc1-91c4-553ebd84964d-etc-machine-id\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.186948 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" " Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.186958 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4bd47eac-4090-4fc1-91c4-553ebd84964d-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.186972 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t2l69\" (UniqueName: \"kubernetes.io/projected/b1ce826a-c52c-42c6-8b67-6074b78c9fb7-kube-api-access-t2l69\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.186982 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9b9c1cf7-0cbb-4bb3-a764-8a8fa446c8a8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.186990 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4bd47eac-4090-4fc1-91c4-553ebd84964d-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.186998 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s5x4k\" (UniqueName: \"kubernetes.io/projected/4bd47eac-4090-4fc1-91c4-553ebd84964d-kube-api-access-s5x4k\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.187009 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xxdh6\" (UniqueName: \"kubernetes.io/projected/f4d77869-9132-4170-b50d-88389a33c597-kube-api-access-xxdh6\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.292604 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bbb20173-fee8-4323-afbb-f2bbbec3e978-combined-ca-bundle\") pod \"bbb20173-fee8-4323-afbb-f2bbbec3e978\" (UID: \"bbb20173-fee8-4323-afbb-f2bbbec3e978\") " Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.296941 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bbb20173-fee8-4323-afbb-f2bbbec3e978-run-httpd\") pod \"bbb20173-fee8-4323-afbb-f2bbbec3e978\" (UID: \"bbb20173-fee8-4323-afbb-f2bbbec3e978\") " Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.297108 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bbb20173-fee8-4323-afbb-f2bbbec3e978-config-data\") pod \"bbb20173-fee8-4323-afbb-f2bbbec3e978\" (UID: \"bbb20173-fee8-4323-afbb-f2bbbec3e978\") " Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.297137 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/bbb20173-fee8-4323-afbb-f2bbbec3e978-ceilometer-tls-certs\") pod \"bbb20173-fee8-4323-afbb-f2bbbec3e978\" (UID: \"bbb20173-fee8-4323-afbb-f2bbbec3e978\") " Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.298593 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bbb20173-fee8-4323-afbb-f2bbbec3e978-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "bbb20173-fee8-4323-afbb-f2bbbec3e978" (UID: "bbb20173-fee8-4323-afbb-f2bbbec3e978"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.298678 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bbb20173-fee8-4323-afbb-f2bbbec3e978-log-httpd\") pod \"bbb20173-fee8-4323-afbb-f2bbbec3e978\" (UID: \"bbb20173-fee8-4323-afbb-f2bbbec3e978\") " Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.298719 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-58rts\" (UniqueName: \"kubernetes.io/projected/833337d4-94d4-4bf6-9d0b-5791b9cc115e-kube-api-access-58rts\") pod \"833337d4-94d4-4bf6-9d0b-5791b9cc115e\" (UID: \"833337d4-94d4-4bf6-9d0b-5791b9cc115e\") " Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.298831 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-js2gg\" (UniqueName: \"kubernetes.io/projected/bbb20173-fee8-4323-afbb-f2bbbec3e978-kube-api-access-js2gg\") pod \"bbb20173-fee8-4323-afbb-f2bbbec3e978\" (UID: \"bbb20173-fee8-4323-afbb-f2bbbec3e978\") " Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.298885 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bbb20173-fee8-4323-afbb-f2bbbec3e978-scripts\") pod \"bbb20173-fee8-4323-afbb-f2bbbec3e978\" (UID: \"bbb20173-fee8-4323-afbb-f2bbbec3e978\") " Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.298981 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/833337d4-94d4-4bf6-9d0b-5791b9cc115e-combined-ca-bundle\") pod \"833337d4-94d4-4bf6-9d0b-5791b9cc115e\" (UID: \"833337d4-94d4-4bf6-9d0b-5791b9cc115e\") " Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.299151 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/833337d4-94d4-4bf6-9d0b-5791b9cc115e-config-data\") pod \"833337d4-94d4-4bf6-9d0b-5791b9cc115e\" (UID: \"833337d4-94d4-4bf6-9d0b-5791b9cc115e\") " Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.299253 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/bbb20173-fee8-4323-afbb-f2bbbec3e978-sg-core-conf-yaml\") pod \"bbb20173-fee8-4323-afbb-f2bbbec3e978\" (UID: \"bbb20173-fee8-4323-afbb-f2bbbec3e978\") " Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.299936 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xjrx8\" (UniqueName: \"kubernetes.io/projected/f7881580-f0a5-47a0-9622-3927bcce668a-kube-api-access-xjrx8\") pod \"glance-default-internal-api-0\" (UID: \"f7881580-f0a5-47a0-9622-3927bcce668a\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.300035 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f7881580-f0a5-47a0-9622-3927bcce668a-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"f7881580-f0a5-47a0-9622-3927bcce668a\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.300136 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f7881580-f0a5-47a0-9622-3927bcce668a-scripts\") pod \"glance-default-internal-api-0\" (UID: \"f7881580-f0a5-47a0-9622-3927bcce668a\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.300196 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8b45e22f-53b9-4228-a28a-a5db18b6e583-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"8b45e22f-53b9-4228-a28a-a5db18b6e583\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.300234 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage15-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") pod \"glance-default-internal-api-0\" (UID: \"f7881580-f0a5-47a0-9622-3927bcce668a\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.300284 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f7881580-f0a5-47a0-9622-3927bcce668a-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"f7881580-f0a5-47a0-9622-3927bcce668a\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.299136 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bbb20173-fee8-4323-afbb-f2bbbec3e978-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "bbb20173-fee8-4323-afbb-f2bbbec3e978" (UID: "bbb20173-fee8-4323-afbb-f2bbbec3e978"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.300376 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f7881580-f0a5-47a0-9622-3927bcce668a-config-data\") pod \"glance-default-internal-api-0\" (UID: \"f7881580-f0a5-47a0-9622-3927bcce668a\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.300410 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8b45e22f-53b9-4228-a28a-a5db18b6e583-config\") pod \"ovsdbserver-sb-0\" (UID: \"8b45e22f-53b9-4228-a28a-a5db18b6e583\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.300436 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8b45e22f-53b9-4228-a28a-a5db18b6e583-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"8b45e22f-53b9-4228-a28a-a5db18b6e583\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.300493 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/8b45e22f-53b9-4228-a28a-a5db18b6e583-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"8b45e22f-53b9-4228-a28a-a5db18b6e583\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.300518 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f7881580-f0a5-47a0-9622-3927bcce668a-logs\") pod \"glance-default-internal-api-0\" (UID: \"f7881580-f0a5-47a0-9622-3927bcce668a\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.300584 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-twwcp\" (UniqueName: \"kubernetes.io/projected/8b45e22f-53b9-4228-a28a-a5db18b6e583-kube-api-access-twwcp\") pod \"ovsdbserver-sb-0\" (UID: \"8b45e22f-53b9-4228-a28a-a5db18b6e583\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.300619 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"ovsdbserver-sb-0\" (UID: \"8b45e22f-53b9-4228-a28a-a5db18b6e583\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.300647 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/8b45e22f-53b9-4228-a28a-a5db18b6e583-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"8b45e22f-53b9-4228-a28a-a5db18b6e583\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.300678 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/8b45e22f-53b9-4228-a28a-a5db18b6e583-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"8b45e22f-53b9-4228-a28a-a5db18b6e583\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.300716 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7881580-f0a5-47a0-9622-3927bcce668a-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"f7881580-f0a5-47a0-9622-3927bcce668a\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.300887 4558 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bbb20173-fee8-4323-afbb-f2bbbec3e978-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.300910 4558 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bbb20173-fee8-4323-afbb-f2bbbec3e978-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.301005 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage15-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") pod \"glance-default-internal-api-0\" (UID: \"f7881580-f0a5-47a0-9622-3927bcce668a\") device mount path \"/mnt/openstack/pv15\"" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.301092 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f7881580-f0a5-47a0-9622-3927bcce668a-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"f7881580-f0a5-47a0-9622-3927bcce668a\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.301464 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f7881580-f0a5-47a0-9622-3927bcce668a-logs\") pod \"glance-default-internal-api-0\" (UID: \"f7881580-f0a5-47a0-9622-3927bcce668a\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.318749 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f7881580-f0a5-47a0-9622-3927bcce668a-config-data\") pod \"glance-default-internal-api-0\" (UID: \"f7881580-f0a5-47a0-9622-3927bcce668a\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.320456 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xjrx8\" (UniqueName: \"kubernetes.io/projected/f7881580-f0a5-47a0-9622-3927bcce668a-kube-api-access-xjrx8\") pod \"glance-default-internal-api-0\" (UID: \"f7881580-f0a5-47a0-9622-3927bcce668a\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.322259 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7881580-f0a5-47a0-9622-3927bcce668a-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"f7881580-f0a5-47a0-9622-3927bcce668a\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.322710 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f7881580-f0a5-47a0-9622-3927bcce668a-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"f7881580-f0a5-47a0-9622-3927bcce668a\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.325605 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f7881580-f0a5-47a0-9622-3927bcce668a-scripts\") pod \"glance-default-internal-api-0\" (UID: \"f7881580-f0a5-47a0-9622-3927bcce668a\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.334246 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bbb20173-fee8-4323-afbb-f2bbbec3e978-kube-api-access-js2gg" (OuterVolumeSpecName: "kube-api-access-js2gg") pod "bbb20173-fee8-4323-afbb-f2bbbec3e978" (UID: "bbb20173-fee8-4323-afbb-f2bbbec3e978"). InnerVolumeSpecName "kube-api-access-js2gg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.334249 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/833337d4-94d4-4bf6-9d0b-5791b9cc115e-kube-api-access-58rts" (OuterVolumeSpecName: "kube-api-access-58rts") pod "833337d4-94d4-4bf6-9d0b-5791b9cc115e" (UID: "833337d4-94d4-4bf6-9d0b-5791b9cc115e"). InnerVolumeSpecName "kube-api-access-58rts". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.338814 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bbb20173-fee8-4323-afbb-f2bbbec3e978-scripts" (OuterVolumeSpecName: "scripts") pod "bbb20173-fee8-4323-afbb-f2bbbec3e978" (UID: "bbb20173-fee8-4323-afbb-f2bbbec3e978"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.352772 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage02-crc" (UniqueName: "kubernetes.io/local-volume/local-storage02-crc") on node "crc" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.403664 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8b45e22f-53b9-4228-a28a-a5db18b6e583-config\") pod \"ovsdbserver-sb-0\" (UID: \"8b45e22f-53b9-4228-a28a-a5db18b6e583\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.403974 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8b45e22f-53b9-4228-a28a-a5db18b6e583-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"8b45e22f-53b9-4228-a28a-a5db18b6e583\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.404733 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/8b45e22f-53b9-4228-a28a-a5db18b6e583-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"8b45e22f-53b9-4228-a28a-a5db18b6e583\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.404866 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-twwcp\" (UniqueName: \"kubernetes.io/projected/8b45e22f-53b9-4228-a28a-a5db18b6e583-kube-api-access-twwcp\") pod \"ovsdbserver-sb-0\" (UID: \"8b45e22f-53b9-4228-a28a-a5db18b6e583\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.404942 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"ovsdbserver-sb-0\" (UID: \"8b45e22f-53b9-4228-a28a-a5db18b6e583\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.405019 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/8b45e22f-53b9-4228-a28a-a5db18b6e583-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"8b45e22f-53b9-4228-a28a-a5db18b6e583\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.405224 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/8b45e22f-53b9-4228-a28a-a5db18b6e583-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"8b45e22f-53b9-4228-a28a-a5db18b6e583\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.405487 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8b45e22f-53b9-4228-a28a-a5db18b6e583-config\") pod \"ovsdbserver-sb-0\" (UID: \"8b45e22f-53b9-4228-a28a-a5db18b6e583\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.414364 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8b45e22f-53b9-4228-a28a-a5db18b6e583-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"8b45e22f-53b9-4228-a28a-a5db18b6e583\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.414467 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"ovsdbserver-sb-0\" (UID: \"8b45e22f-53b9-4228-a28a-a5db18b6e583\") device mount path \"/mnt/openstack/pv04\"" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.414621 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/8b45e22f-53b9-4228-a28a-a5db18b6e583-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"8b45e22f-53b9-4228-a28a-a5db18b6e583\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.415138 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.415197 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-58rts\" (UniqueName: \"kubernetes.io/projected/833337d4-94d4-4bf6-9d0b-5791b9cc115e-kube-api-access-58rts\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.415211 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-js2gg\" (UniqueName: \"kubernetes.io/projected/bbb20173-fee8-4323-afbb-f2bbbec3e978-kube-api-access-js2gg\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.415220 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bbb20173-fee8-4323-afbb-f2bbbec3e978-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.415905 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8b45e22f-53b9-4228-a28a-a5db18b6e583-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"8b45e22f-53b9-4228-a28a-a5db18b6e583\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.416688 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/8b45e22f-53b9-4228-a28a-a5db18b6e583-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"8b45e22f-53b9-4228-a28a-a5db18b6e583\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.425884 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/8b45e22f-53b9-4228-a28a-a5db18b6e583-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"8b45e22f-53b9-4228-a28a-a5db18b6e583\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.431433 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8b45e22f-53b9-4228-a28a-a5db18b6e583-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"8b45e22f-53b9-4228-a28a-a5db18b6e583\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.433499 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-twwcp\" (UniqueName: \"kubernetes.io/projected/8b45e22f-53b9-4228-a28a-a5db18b6e583-kube-api-access-twwcp\") pod \"ovsdbserver-sb-0\" (UID: \"8b45e22f-53b9-4228-a28a-a5db18b6e583\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.448885 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.453556 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"bbb20173-fee8-4323-afbb-f2bbbec3e978","Type":"ContainerDied","Data":"b5bdedf70433a1028c539e44bc993a783930852815b819b981c5d4ae75bba7dd"} Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.453594 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.453603 4558 scope.go:117] "RemoveContainer" containerID="3ff2458b3f3311a509b35bc6e23de18f9f9253176e2e50da406ead896867e39d" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.455801 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-6d8f9956fd-j6jg4" event={"ID":"5b57aa66-3c71-422d-b029-86cb0e3a9aef","Type":"ContainerStarted","Data":"bbdfd5ce6843e43c9db75a7afcdc0a1ab6a9db243de4fc167edef10ed383f798"} Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.455850 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-6d8f9956fd-j6jg4" event={"ID":"5b57aa66-3c71-422d-b029-86cb0e3a9aef","Type":"ContainerStarted","Data":"5c598ae289e51b4f694eaad151caaba6baebea9ddd1de5aa5781296530efbf55"} Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.461008 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-0" event={"ID":"f4d77869-9132-4170-b50d-88389a33c597","Type":"ContainerDied","Data":"02995edadd67159839c771c36a6d802eeb43852d8c189cae748c4f00fd0b7c25"} Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.461033 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.467961 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"833337d4-94d4-4bf6-9d0b-5791b9cc115e","Type":"ContainerDied","Data":"f422b0d2120eb762e76d7887687df612b99aa8a3f0d1ecb29f853d086c2939d7"} Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.468002 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.472027 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-67cbb6f96d-89sm2" event={"ID":"3d958c0e-f259-45a7-9c66-9d2b82c92980","Type":"ContainerStarted","Data":"f1f72a7cad9a8e9ebdf3bd8dc8027d327089333501c70a43397912c9f5f104c7"} Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.472203 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/placement-67cbb6f96d-89sm2" podUID="3d958c0e-f259-45a7-9c66-9d2b82c92980" containerName="placement-log" containerID="cri-o://2f3a880c1014f060f55cc93ec2f22fdc0bc281cbb530d9eeb1a0031f82c5f171" gracePeriod=30 Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.472510 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/placement-67cbb6f96d-89sm2" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.472540 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/placement-67cbb6f96d-89sm2" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.472856 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/placement-67cbb6f96d-89sm2" podUID="3d958c0e-f259-45a7-9c66-9d2b82c92980" containerName="placement-api" containerID="cri-o://f1f72a7cad9a8e9ebdf3bd8dc8027d327089333501c70a43397912c9f5f104c7" gracePeriod=30 Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.480320 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-7749bf6d58-dwpbl" event={"ID":"9bca6361-b1d7-4953-b4bd-2bbda0f90581","Type":"ContainerStarted","Data":"f6f5a5e0be8e181ac0a6e571683bbf4717d8a0dc273866a4b41c89d9befcf017"} Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.480518 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/neutron-7749bf6d58-dwpbl" podUID="9bca6361-b1d7-4953-b4bd-2bbda0f90581" containerName="neutron-api" containerID="cri-o://1371ec8f64533ee2b1ef24267d239bc9ffc81022e42aaedff8dc1abab24b701a" gracePeriod=30 Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.480656 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/neutron-7749bf6d58-dwpbl" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.480716 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/neutron-7749bf6d58-dwpbl" podUID="9bca6361-b1d7-4953-b4bd-2bbda0f90581" containerName="neutron-httpd" containerID="cri-o://f6f5a5e0be8e181ac0a6e571683bbf4717d8a0dc273866a4b41c89d9befcf017" gracePeriod=30 Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.490299 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-keystone-listener-7d7dbcb65b-ttg5w" event={"ID":"1ab0a776-6551-4ef8-bd41-0ef0e58d892d","Type":"ContainerStarted","Data":"14383d0552e72e8dcba897054fdb5c936fabfa390440e34f12f122f7b98f85e9"} Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.490461 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-keystone-listener-7d7dbcb65b-ttg5w" podUID="1ab0a776-6551-4ef8-bd41-0ef0e58d892d" containerName="barbican-keystone-listener-log" containerID="cri-o://5ceee282e729d804923fe9fb975a64436f6260d190a2814c82d6d5c4adb2113b" gracePeriod=30 Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.490542 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-keystone-listener-7d7dbcb65b-ttg5w" podUID="1ab0a776-6551-4ef8-bd41-0ef0e58d892d" containerName="barbican-keystone-listener" containerID="cri-o://14383d0552e72e8dcba897054fdb5c936fabfa390440e34f12f122f7b98f85e9" gracePeriod=30 Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.503342 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-nb-0" event={"ID":"98f8dc8b-6446-4ba8-b37b-a11ae7414b65","Type":"ContainerStarted","Data":"d718cd641de672fcf4206b38f0077f442a339f62d8e331e2436bbb1183faaea9"} Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.523856 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.524267 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.524311 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.524662 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.554067 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/placement-67cbb6f96d-89sm2" podStartSLOduration=6.554054921 podStartE2EDuration="6.554054921s" podCreationTimestamp="2026-01-20 17:35:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:35:55.49926237 +0000 UTC m=+3249.259600357" watchObservedRunningTime="2026-01-20 17:35:55.554054921 +0000 UTC m=+3249.314392888" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.570568 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/neutron-7749bf6d58-dwpbl" podStartSLOduration=6.57055701 podStartE2EDuration="6.57055701s" podCreationTimestamp="2026-01-20 17:35:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:35:55.544757698 +0000 UTC m=+3249.305095655" watchObservedRunningTime="2026-01-20 17:35:55.57055701 +0000 UTC m=+3249.330894968" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.613327 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/barbican-keystone-listener-7d7dbcb65b-ttg5w" podStartSLOduration=7.613303721 podStartE2EDuration="7.613303721s" podCreationTimestamp="2026-01-20 17:35:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:35:55.590351522 +0000 UTC m=+3249.350689489" watchObservedRunningTime="2026-01-20 17:35:55.613303721 +0000 UTC m=+3249.373641687" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.969526 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage15-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") pod \"glance-default-internal-api-0\" (UID: \"f7881580-f0a5-47a0-9622-3927bcce668a\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.971728 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0122392-1c61-4133-b05f-cc6a622abaf9-config-data" (OuterVolumeSpecName: "config-data") pod "a0122392-1c61-4133-b05f-cc6a622abaf9" (UID: "a0122392-1c61-4133-b05f-cc6a622abaf9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:55 crc kubenswrapper[4558]: I0120 17:35:55.984701 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b1ce826a-c52c-42c6-8b67-6074b78c9fb7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b1ce826a-c52c-42c6-8b67-6074b78c9fb7" (UID: "b1ce826a-c52c-42c6-8b67-6074b78c9fb7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:56 crc kubenswrapper[4558]: I0120 17:35:56.043466 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a0122392-1c61-4133-b05f-cc6a622abaf9-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:56 crc kubenswrapper[4558]: I0120 17:35:56.043769 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b1ce826a-c52c-42c6-8b67-6074b78c9fb7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:56 crc kubenswrapper[4558]: I0120 17:35:56.057830 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/833337d4-94d4-4bf6-9d0b-5791b9cc115e-config-data" (OuterVolumeSpecName: "config-data") pod "833337d4-94d4-4bf6-9d0b-5791b9cc115e" (UID: "833337d4-94d4-4bf6-9d0b-5791b9cc115e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:56 crc kubenswrapper[4558]: I0120 17:35:56.078244 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/833337d4-94d4-4bf6-9d0b-5791b9cc115e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "833337d4-94d4-4bf6-9d0b-5791b9cc115e" (UID: "833337d4-94d4-4bf6-9d0b-5791b9cc115e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:56 crc kubenswrapper[4558]: I0120 17:35:56.102422 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0122392-1c61-4133-b05f-cc6a622abaf9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a0122392-1c61-4133-b05f-cc6a622abaf9" (UID: "a0122392-1c61-4133-b05f-cc6a622abaf9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:56 crc kubenswrapper[4558]: I0120 17:35:56.139026 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"ovsdbserver-sb-0\" (UID: \"8b45e22f-53b9-4228-a28a-a5db18b6e583\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:35:56 crc kubenswrapper[4558]: I0120 17:35:56.140293 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bbb20173-fee8-4323-afbb-f2bbbec3e978-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "bbb20173-fee8-4323-afbb-f2bbbec3e978" (UID: "bbb20173-fee8-4323-afbb-f2bbbec3e978"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:56 crc kubenswrapper[4558]: I0120 17:35:56.145682 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/833337d4-94d4-4bf6-9d0b-5791b9cc115e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:56 crc kubenswrapper[4558]: I0120 17:35:56.145713 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0122392-1c61-4133-b05f-cc6a622abaf9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:56 crc kubenswrapper[4558]: I0120 17:35:56.145725 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/833337d4-94d4-4bf6-9d0b-5791b9cc115e-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:56 crc kubenswrapper[4558]: I0120 17:35:56.145734 4558 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/bbb20173-fee8-4323-afbb-f2bbbec3e978-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:56 crc kubenswrapper[4558]: I0120 17:35:56.187262 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4bd47eac-4090-4fc1-91c4-553ebd84964d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4bd47eac-4090-4fc1-91c4-553ebd84964d" (UID: "4bd47eac-4090-4fc1-91c4-553ebd84964d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:56 crc kubenswrapper[4558]: I0120 17:35:56.227282 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b1ce826a-c52c-42c6-8b67-6074b78c9fb7-galera-tls-certs" (OuterVolumeSpecName: "galera-tls-certs") pod "b1ce826a-c52c-42c6-8b67-6074b78c9fb7" (UID: "b1ce826a-c52c-42c6-8b67-6074b78c9fb7"). InnerVolumeSpecName "galera-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:56 crc kubenswrapper[4558]: I0120 17:35:56.253577 4558 reconciler_common.go:293] "Volume detached for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/b1ce826a-c52c-42c6-8b67-6074b78c9fb7-galera-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:56 crc kubenswrapper[4558]: I0120 17:35:56.253608 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4bd47eac-4090-4fc1-91c4-553ebd84964d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:56 crc kubenswrapper[4558]: I0120 17:35:56.265474 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0122392-1c61-4133-b05f-cc6a622abaf9-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "a0122392-1c61-4133-b05f-cc6a622abaf9" (UID: "a0122392-1c61-4133-b05f-cc6a622abaf9"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:56 crc kubenswrapper[4558]: I0120 17:35:56.267260 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f4d77869-9132-4170-b50d-88389a33c597-config-data" (OuterVolumeSpecName: "config-data") pod "f4d77869-9132-4170-b50d-88389a33c597" (UID: "f4d77869-9132-4170-b50d-88389a33c597"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:56 crc kubenswrapper[4558]: I0120 17:35:56.312901 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bbb20173-fee8-4323-afbb-f2bbbec3e978-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "bbb20173-fee8-4323-afbb-f2bbbec3e978" (UID: "bbb20173-fee8-4323-afbb-f2bbbec3e978"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:56 crc kubenswrapper[4558]: I0120 17:35:56.314300 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f4d77869-9132-4170-b50d-88389a33c597-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f4d77869-9132-4170-b50d-88389a33c597" (UID: "f4d77869-9132-4170-b50d-88389a33c597"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:56 crc kubenswrapper[4558]: I0120 17:35:56.325290 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bbb20173-fee8-4323-afbb-f2bbbec3e978-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "bbb20173-fee8-4323-afbb-f2bbbec3e978" (UID: "bbb20173-fee8-4323-afbb-f2bbbec3e978"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:56 crc kubenswrapper[4558]: I0120 17:35:56.332111 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:35:56 crc kubenswrapper[4558]: I0120 17:35:56.366712 4558 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/bbb20173-fee8-4323-afbb-f2bbbec3e978-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:56 crc kubenswrapper[4558]: I0120 17:35:56.366741 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4d77869-9132-4170-b50d-88389a33c597-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:56 crc kubenswrapper[4558]: I0120 17:35:56.366751 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f4d77869-9132-4170-b50d-88389a33c597-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:56 crc kubenswrapper[4558]: I0120 17:35:56.366759 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bbb20173-fee8-4323-afbb-f2bbbec3e978-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:56 crc kubenswrapper[4558]: I0120 17:35:56.366770 4558 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/a0122392-1c61-4133-b05f-cc6a622abaf9-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:56 crc kubenswrapper[4558]: I0120 17:35:56.373245 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bbb20173-fee8-4323-afbb-f2bbbec3e978-config-data" (OuterVolumeSpecName: "config-data") pod "bbb20173-fee8-4323-afbb-f2bbbec3e978" (UID: "bbb20173-fee8-4323-afbb-f2bbbec3e978"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:56 crc kubenswrapper[4558]: I0120 17:35:56.428253 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4bd47eac-4090-4fc1-91c4-553ebd84964d-config-data" (OuterVolumeSpecName: "config-data") pod "4bd47eac-4090-4fc1-91c4-553ebd84964d" (UID: "4bd47eac-4090-4fc1-91c4-553ebd84964d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:56 crc kubenswrapper[4558]: I0120 17:35:56.476671 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bbb20173-fee8-4323-afbb-f2bbbec3e978-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:56 crc kubenswrapper[4558]: I0120 17:35:56.477131 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4bd47eac-4090-4fc1-91c4-553ebd84964d-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:56 crc kubenswrapper[4558]: I0120 17:35:56.555591 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-59b8cc448d-b9wnn" event={"ID":"73789760-f9bd-44e0-bf9b-c1864e5f8803","Type":"ContainerStarted","Data":"46ea3b795dad740b543a64ff0cb9c02ffc22ebc03e4c397cb2f2f859f67d530b"} Jan 20 17:35:56 crc kubenswrapper[4558]: I0120 17:35:56.557499 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/keystone-59b8cc448d-b9wnn" Jan 20 17:35:56 crc kubenswrapper[4558]: I0120 17:35:56.574355 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/keystone-59b8cc448d-b9wnn" podStartSLOduration=6.574339871 podStartE2EDuration="6.574339871s" podCreationTimestamp="2026-01-20 17:35:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:35:56.568998978 +0000 UTC m=+3250.329336945" watchObservedRunningTime="2026-01-20 17:35:56.574339871 +0000 UTC m=+3250.334677837" Jan 20 17:35:56 crc kubenswrapper[4558]: I0120 17:35:56.590111 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-worker-dd7c4cf57-h4b8r" podUID="4bb0e311-8438-4794-929d-791f0285afaf" containerName="barbican-worker-log" containerID="cri-o://999b8d65f2a7e8adcf754626b17414547aff5caec391cc7f682d54acf1ccdb23" gracePeriod=30 Jan 20 17:35:56 crc kubenswrapper[4558]: I0120 17:35:56.590228 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-worker-dd7c4cf57-h4b8r" podUID="4bb0e311-8438-4794-929d-791f0285afaf" containerName="barbican-worker" containerID="cri-o://58b48bcfefd9b4cc3d847e36e04114fde6f3e780dae5153d04a315662e85320c" gracePeriod=30 Jan 20 17:35:56 crc kubenswrapper[4558]: I0120 17:35:56.591860 4558 generic.go:334] "Generic (PLEG): container finished" podID="3d958c0e-f259-45a7-9c66-9d2b82c92980" containerID="2f3a880c1014f060f55cc93ec2f22fdc0bc281cbb530d9eeb1a0031f82c5f171" exitCode=143 Jan 20 17:35:56 crc kubenswrapper[4558]: I0120 17:35:56.598008 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-api-7cccd8f896-m6rvs" podUID="c35b601d-7c76-46a4-9703-7a52ef1bbf73" containerName="barbican-api-log" containerID="cri-o://3373f1f257436b290a1f8fd92a670c542b443e2f7b3e9ce8dcf84df8344a869e" gracePeriod=30 Jan 20 17:35:56 crc kubenswrapper[4558]: I0120 17:35:56.598099 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-api-7cccd8f896-m6rvs" podUID="c35b601d-7c76-46a4-9703-7a52ef1bbf73" containerName="barbican-api" containerID="cri-o://e0e425bf5a28a6ed179630d953fb4a71b76a9a86fb6c1aee46f9c124aec8a8b5" gracePeriod=30 Jan 20 17:35:56 crc kubenswrapper[4558]: I0120 17:35:56.602814 4558 generic.go:334] "Generic (PLEG): container finished" podID="1ab0a776-6551-4ef8-bd41-0ef0e58d892d" containerID="5ceee282e729d804923fe9fb975a64436f6260d190a2814c82d6d5c4adb2113b" exitCode=143 Jan 20 17:35:56 crc kubenswrapper[4558]: I0120 17:35:56.620476 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/keystone-77fd7f9ff9-zs7mz" podUID="89eb3a4c-0235-4583-ad0a-d015228d8d6b" containerName="keystone-api" containerID="cri-o://ff357d9afa0fdfe50b8131aecfe6bfb5a9652e3416ca436a65179fc6ef691689" gracePeriod=30 Jan 20 17:35:56 crc kubenswrapper[4558]: I0120 17:35:56.668357 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="532ef149-f32a-4b6e-8d0c-458a04952d34" path="/var/lib/kubelet/pods/532ef149-f32a-4b6e-8d0c-458a04952d34/volumes" Jan 20 17:35:56 crc kubenswrapper[4558]: I0120 17:35:56.669230 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="91fac67e-69a9-4d88-9136-0d2484ca0dce" path="/var/lib/kubelet/pods/91fac67e-69a9-4d88-9136-0d2484ca0dce/volumes" Jan 20 17:35:56 crc kubenswrapper[4558]: I0120 17:35:56.686132 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-keystone-listener-6d4445bf46-57dqj" event={"ID":"11d1aa99-639c-451a-876e-59de6098e407","Type":"ContainerStarted","Data":"db329d06eb52b93ff0cb7bb5890e9f6c1ce660d1bfa28f5bef02917fc16526c6"} Jan 20 17:35:56 crc kubenswrapper[4558]: I0120 17:35:56.686261 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/barbican-api-7cccd8f896-m6rvs" Jan 20 17:35:56 crc kubenswrapper[4558]: I0120 17:35:56.686278 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-worker-dd7c4cf57-h4b8r" event={"ID":"4bb0e311-8438-4794-929d-791f0285afaf","Type":"ContainerStarted","Data":"58b48bcfefd9b4cc3d847e36e04114fde6f3e780dae5153d04a315662e85320c"} Jan 20 17:35:56 crc kubenswrapper[4558]: I0120 17:35:56.686294 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-55dfc8964d-kck7c" event={"ID":"6b77441a-39f5-4ed5-bf1b-c29900900242","Type":"ContainerStarted","Data":"0eec862e72c84cd059688cd69902bbff28efcdcf452151cfde7339967b5160f1"} Jan 20 17:35:56 crc kubenswrapper[4558]: I0120 17:35:56.686306 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-67cbb6f96d-89sm2" event={"ID":"3d958c0e-f259-45a7-9c66-9d2b82c92980","Type":"ContainerDied","Data":"2f3a880c1014f060f55cc93ec2f22fdc0bc281cbb530d9eeb1a0031f82c5f171"} Jan 20 17:35:56 crc kubenswrapper[4558]: I0120 17:35:56.686352 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/keystone-77fd7f9ff9-zs7mz" Jan 20 17:35:56 crc kubenswrapper[4558]: I0120 17:35:56.686364 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-7cccd8f896-m6rvs" event={"ID":"c35b601d-7c76-46a4-9703-7a52ef1bbf73","Type":"ContainerStarted","Data":"e0e425bf5a28a6ed179630d953fb4a71b76a9a86fb6c1aee46f9c124aec8a8b5"} Jan 20 17:35:56 crc kubenswrapper[4558]: I0120 17:35:56.686374 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-keystone-listener-7d7dbcb65b-ttg5w" event={"ID":"1ab0a776-6551-4ef8-bd41-0ef0e58d892d","Type":"ContainerDied","Data":"5ceee282e729d804923fe9fb975a64436f6260d190a2814c82d6d5c4adb2113b"} Jan 20 17:35:56 crc kubenswrapper[4558]: I0120 17:35:56.686389 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-64bd4857c6-2s9h4" event={"ID":"94062d42-28cb-4c8a-afa4-f51458dedc6c","Type":"ContainerStarted","Data":"bd3938888a8a3a7fda5215bb19bd64fb769bd0151d4bb989be309f5b70b3ad7c"} Jan 20 17:35:56 crc kubenswrapper[4558]: I0120 17:35:56.686403 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-77fd7f9ff9-zs7mz" event={"ID":"89eb3a4c-0235-4583-ad0a-d015228d8d6b","Type":"ContainerStarted","Data":"ff357d9afa0fdfe50b8131aecfe6bfb5a9652e3416ca436a65179fc6ef691689"} Jan 20 17:35:56 crc kubenswrapper[4558]: I0120 17:35:56.802006 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/cinder-api-0" podUID="e228514c-6d22-4527-b365-913e3ea3cfdb" containerName="cinder-api" probeResult="failure" output="Get \"https://10.217.0.117:8776/healthcheck\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Jan 20 17:35:56 crc kubenswrapper[4558]: I0120 17:35:56.920226 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/barbican-api-7cccd8f896-m6rvs" podStartSLOduration=7.920204275 podStartE2EDuration="7.920204275s" podCreationTimestamp="2026-01-20 17:35:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:35:56.834712109 +0000 UTC m=+3250.595050075" watchObservedRunningTime="2026-01-20 17:35:56.920204275 +0000 UTC m=+3250.680542242" Jan 20 17:35:56 crc kubenswrapper[4558]: I0120 17:35:56.958156 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/barbican-worker-dd7c4cf57-h4b8r" podStartSLOduration=8.958140771 podStartE2EDuration="8.958140771s" podCreationTimestamp="2026-01-20 17:35:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:35:56.862775117 +0000 UTC m=+3250.623113084" watchObservedRunningTime="2026-01-20 17:35:56.958140771 +0000 UTC m=+3250.718478738" Jan 20 17:35:56 crc kubenswrapper[4558]: I0120 17:35:56.976567 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/keystone-77fd7f9ff9-zs7mz" podStartSLOduration=7.976554174 podStartE2EDuration="7.976554174s" podCreationTimestamp="2026-01-20 17:35:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:35:56.886672282 +0000 UTC m=+3250.647010249" watchObservedRunningTime="2026-01-20 17:35:56.976554174 +0000 UTC m=+3250.736892141" Jan 20 17:35:57 crc kubenswrapper[4558]: I0120 17:35:57.078550 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-sb-0"] Jan 20 17:35:57 crc kubenswrapper[4558]: I0120 17:35:57.083337 4558 scope.go:117] "RemoveContainer" containerID="ea6d1ca32a116c466cabb20cadcc9e9af27e7b82039308f7f1df1e6178fa2a36" Jan 20 17:35:57 crc kubenswrapper[4558]: I0120 17:35:57.200041 4558 scope.go:117] "RemoveContainer" containerID="f9994a423f0457d16e1857b55313f41375f006cd328176af514da36879ef8b96" Jan 20 17:35:57 crc kubenswrapper[4558]: I0120 17:35:57.564362 4558 scope.go:117] "RemoveContainer" containerID="1f1063795318c00e5b19c483c70ac74ca60c9c396081249534c260035e47ad7a" Jan 20 17:35:57 crc kubenswrapper[4558]: I0120 17:35:57.614063 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-77fd7f9ff9-zs7mz" Jan 20 17:35:57 crc kubenswrapper[4558]: I0120 17:35:57.638441 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-7cccd8f896-m6rvs" Jan 20 17:35:57 crc kubenswrapper[4558]: I0120 17:35:57.654234 4558 scope.go:117] "RemoveContainer" containerID="96c4e3a692bc0dac49e1c6d07e764ff13bc464c6bdc3c29e2e82a3d65c290531" Jan 20 17:35:57 crc kubenswrapper[4558]: I0120 17:35:57.670983 4558 generic.go:334] "Generic (PLEG): container finished" podID="4bb0e311-8438-4794-929d-791f0285afaf" containerID="999b8d65f2a7e8adcf754626b17414547aff5caec391cc7f682d54acf1ccdb23" exitCode=143 Jan 20 17:35:57 crc kubenswrapper[4558]: I0120 17:35:57.671049 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-worker-dd7c4cf57-h4b8r" event={"ID":"4bb0e311-8438-4794-929d-791f0285afaf","Type":"ContainerDied","Data":"999b8d65f2a7e8adcf754626b17414547aff5caec391cc7f682d54acf1ccdb23"} Jan 20 17:35:57 crc kubenswrapper[4558]: I0120 17:35:57.708617 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-worker-dd549b649-grdrm" event={"ID":"640de3b3-60e1-41b8-ab00-22e375bad65c","Type":"ContainerStarted","Data":"6c5d77de3e10ffa19cd49dc2e45df3fcf7c673448b447d8327c2137ebcc78c33"} Jan 20 17:35:57 crc kubenswrapper[4558]: I0120 17:35:57.715982 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovn-northd-0_39600642-1418-4cb4-8617-f2ecc7089e0b/ovn-northd/0.log" Jan 20 17:35:57 crc kubenswrapper[4558]: I0120 17:35:57.716012 4558 generic.go:334] "Generic (PLEG): container finished" podID="39600642-1418-4cb4-8617-f2ecc7089e0b" containerID="33b43d304994a43dcb87337ad0c6e08deaf4a2507647b3ba6f756482ec39e182" exitCode=139 Jan 20 17:35:57 crc kubenswrapper[4558]: I0120 17:35:57.716049 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-northd-0" event={"ID":"39600642-1418-4cb4-8617-f2ecc7089e0b","Type":"ContainerDied","Data":"33b43d304994a43dcb87337ad0c6e08deaf4a2507647b3ba6f756482ec39e182"} Jan 20 17:35:57 crc kubenswrapper[4558]: I0120 17:35:57.729518 4558 generic.go:334] "Generic (PLEG): container finished" podID="c35b601d-7c76-46a4-9703-7a52ef1bbf73" containerID="e0e425bf5a28a6ed179630d953fb4a71b76a9a86fb6c1aee46f9c124aec8a8b5" exitCode=0 Jan 20 17:35:57 crc kubenswrapper[4558]: I0120 17:35:57.729551 4558 generic.go:334] "Generic (PLEG): container finished" podID="c35b601d-7c76-46a4-9703-7a52ef1bbf73" containerID="3373f1f257436b290a1f8fd92a670c542b443e2f7b3e9ce8dcf84df8344a869e" exitCode=143 Jan 20 17:35:57 crc kubenswrapper[4558]: I0120 17:35:57.729620 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-7cccd8f896-m6rvs" event={"ID":"c35b601d-7c76-46a4-9703-7a52ef1bbf73","Type":"ContainerDied","Data":"e0e425bf5a28a6ed179630d953fb4a71b76a9a86fb6c1aee46f9c124aec8a8b5"} Jan 20 17:35:57 crc kubenswrapper[4558]: I0120 17:35:57.729653 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-7cccd8f896-m6rvs" event={"ID":"c35b601d-7c76-46a4-9703-7a52ef1bbf73","Type":"ContainerDied","Data":"3373f1f257436b290a1f8fd92a670c542b443e2f7b3e9ce8dcf84df8344a869e"} Jan 20 17:35:57 crc kubenswrapper[4558]: I0120 17:35:57.729662 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-7cccd8f896-m6rvs" event={"ID":"c35b601d-7c76-46a4-9703-7a52ef1bbf73","Type":"ContainerDied","Data":"0bc7e2c3dbc064ab76b9b4762dd97b35dee711a2da893fa3da5a26872cf6121d"} Jan 20 17:35:57 crc kubenswrapper[4558]: I0120 17:35:57.729739 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-7cccd8f896-m6rvs" Jan 20 17:35:57 crc kubenswrapper[4558]: I0120 17:35:57.738477 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-sb-0" event={"ID":"8b45e22f-53b9-4228-a28a-a5db18b6e583","Type":"ContainerStarted","Data":"39e249504e929dc695fa7780d0e4dbc5348077f58efd0a1fff4b4a74e8b3a27a"} Jan 20 17:35:57 crc kubenswrapper[4558]: I0120 17:35:57.740537 4558 scope.go:117] "RemoveContainer" containerID="b138c24e8875ddd5251012c3048e6574ab595b0a17f136458768b1a33c6227cb" Jan 20 17:35:57 crc kubenswrapper[4558]: I0120 17:35:57.750102 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7ctxp\" (UniqueName: \"kubernetes.io/projected/c35b601d-7c76-46a4-9703-7a52ef1bbf73-kube-api-access-7ctxp\") pod \"c35b601d-7c76-46a4-9703-7a52ef1bbf73\" (UID: \"c35b601d-7c76-46a4-9703-7a52ef1bbf73\") " Jan 20 17:35:57 crc kubenswrapper[4558]: I0120 17:35:57.750200 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/89eb3a4c-0235-4583-ad0a-d015228d8d6b-credential-keys\") pod \"89eb3a4c-0235-4583-ad0a-d015228d8d6b\" (UID: \"89eb3a4c-0235-4583-ad0a-d015228d8d6b\") " Jan 20 17:35:57 crc kubenswrapper[4558]: I0120 17:35:57.750270 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c35b601d-7c76-46a4-9703-7a52ef1bbf73-config-data\") pod \"c35b601d-7c76-46a4-9703-7a52ef1bbf73\" (UID: \"c35b601d-7c76-46a4-9703-7a52ef1bbf73\") " Jan 20 17:35:57 crc kubenswrapper[4558]: I0120 17:35:57.750304 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/89eb3a4c-0235-4583-ad0a-d015228d8d6b-config-data\") pod \"89eb3a4c-0235-4583-ad0a-d015228d8d6b\" (UID: \"89eb3a4c-0235-4583-ad0a-d015228d8d6b\") " Jan 20 17:35:57 crc kubenswrapper[4558]: I0120 17:35:57.750405 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c35b601d-7c76-46a4-9703-7a52ef1bbf73-internal-tls-certs\") pod \"c35b601d-7c76-46a4-9703-7a52ef1bbf73\" (UID: \"c35b601d-7c76-46a4-9703-7a52ef1bbf73\") " Jan 20 17:35:57 crc kubenswrapper[4558]: I0120 17:35:57.750427 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-6874444f77-w92bj" event={"ID":"a4d26c7d-095d-4f3d-b6cc-c270961e9794","Type":"ContainerDied","Data":"964d0ad276f99dd9fa9d8522022840ffe7ce1002dc0bd50778cbc03decd4341c"} Jan 20 17:35:57 crc kubenswrapper[4558]: I0120 17:35:57.750445 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/89eb3a4c-0235-4583-ad0a-d015228d8d6b-public-tls-certs\") pod \"89eb3a4c-0235-4583-ad0a-d015228d8d6b\" (UID: \"89eb3a4c-0235-4583-ad0a-d015228d8d6b\") " Jan 20 17:35:57 crc kubenswrapper[4558]: I0120 17:35:57.750470 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/89eb3a4c-0235-4583-ad0a-d015228d8d6b-fernet-keys\") pod \"89eb3a4c-0235-4583-ad0a-d015228d8d6b\" (UID: \"89eb3a4c-0235-4583-ad0a-d015228d8d6b\") " Jan 20 17:35:57 crc kubenswrapper[4558]: I0120 17:35:57.750397 4558 generic.go:334] "Generic (PLEG): container finished" podID="a4d26c7d-095d-4f3d-b6cc-c270961e9794" containerID="964d0ad276f99dd9fa9d8522022840ffe7ce1002dc0bd50778cbc03decd4341c" exitCode=0 Jan 20 17:35:57 crc kubenswrapper[4558]: I0120 17:35:57.753326 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c35b601d-7c76-46a4-9703-7a52ef1bbf73-logs\") pod \"c35b601d-7c76-46a4-9703-7a52ef1bbf73\" (UID: \"c35b601d-7c76-46a4-9703-7a52ef1bbf73\") " Jan 20 17:35:57 crc kubenswrapper[4558]: I0120 17:35:57.753383 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/89eb3a4c-0235-4583-ad0a-d015228d8d6b-combined-ca-bundle\") pod \"89eb3a4c-0235-4583-ad0a-d015228d8d6b\" (UID: \"89eb3a4c-0235-4583-ad0a-d015228d8d6b\") " Jan 20 17:35:57 crc kubenswrapper[4558]: I0120 17:35:57.753453 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/89eb3a4c-0235-4583-ad0a-d015228d8d6b-scripts\") pod \"89eb3a4c-0235-4583-ad0a-d015228d8d6b\" (UID: \"89eb3a4c-0235-4583-ad0a-d015228d8d6b\") " Jan 20 17:35:57 crc kubenswrapper[4558]: I0120 17:35:57.753496 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c35b601d-7c76-46a4-9703-7a52ef1bbf73-public-tls-certs\") pod \"c35b601d-7c76-46a4-9703-7a52ef1bbf73\" (UID: \"c35b601d-7c76-46a4-9703-7a52ef1bbf73\") " Jan 20 17:35:57 crc kubenswrapper[4558]: I0120 17:35:57.753586 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/89eb3a4c-0235-4583-ad0a-d015228d8d6b-internal-tls-certs\") pod \"89eb3a4c-0235-4583-ad0a-d015228d8d6b\" (UID: \"89eb3a4c-0235-4583-ad0a-d015228d8d6b\") " Jan 20 17:35:57 crc kubenswrapper[4558]: I0120 17:35:57.753647 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c35b601d-7c76-46a4-9703-7a52ef1bbf73-combined-ca-bundle\") pod \"c35b601d-7c76-46a4-9703-7a52ef1bbf73\" (UID: \"c35b601d-7c76-46a4-9703-7a52ef1bbf73\") " Jan 20 17:35:57 crc kubenswrapper[4558]: I0120 17:35:57.753670 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c35b601d-7c76-46a4-9703-7a52ef1bbf73-config-data-custom\") pod \"c35b601d-7c76-46a4-9703-7a52ef1bbf73\" (UID: \"c35b601d-7c76-46a4-9703-7a52ef1bbf73\") " Jan 20 17:35:57 crc kubenswrapper[4558]: I0120 17:35:57.753762 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gbhsk\" (UniqueName: \"kubernetes.io/projected/89eb3a4c-0235-4583-ad0a-d015228d8d6b-kube-api-access-gbhsk\") pod \"89eb3a4c-0235-4583-ad0a-d015228d8d6b\" (UID: \"89eb3a4c-0235-4583-ad0a-d015228d8d6b\") " Jan 20 17:35:57 crc kubenswrapper[4558]: I0120 17:35:57.755551 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:35:57 crc kubenswrapper[4558]: I0120 17:35:57.765317 4558 generic.go:334] "Generic (PLEG): container finished" podID="89eb3a4c-0235-4583-ad0a-d015228d8d6b" containerID="ff357d9afa0fdfe50b8131aecfe6bfb5a9652e3416ca436a65179fc6ef691689" exitCode=0 Jan 20 17:35:57 crc kubenswrapper[4558]: I0120 17:35:57.769418 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-77fd7f9ff9-zs7mz" Jan 20 17:35:57 crc kubenswrapper[4558]: I0120 17:35:57.769811 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-77fd7f9ff9-zs7mz" event={"ID":"89eb3a4c-0235-4583-ad0a-d015228d8d6b","Type":"ContainerDied","Data":"ff357d9afa0fdfe50b8131aecfe6bfb5a9652e3416ca436a65179fc6ef691689"} Jan 20 17:35:57 crc kubenswrapper[4558]: I0120 17:35:57.769931 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-77fd7f9ff9-zs7mz" event={"ID":"89eb3a4c-0235-4583-ad0a-d015228d8d6b","Type":"ContainerDied","Data":"3574ce4b764847fbce0cd2ee876e1f434a17b27120ecaf4e6218396e48f814f0"} Jan 20 17:35:57 crc kubenswrapper[4558]: I0120 17:35:57.771286 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c35b601d-7c76-46a4-9703-7a52ef1bbf73-logs" (OuterVolumeSpecName: "logs") pod "c35b601d-7c76-46a4-9703-7a52ef1bbf73" (UID: "c35b601d-7c76-46a4-9703-7a52ef1bbf73"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:35:57 crc kubenswrapper[4558]: I0120 17:35:57.771575 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c35b601d-7c76-46a4-9703-7a52ef1bbf73-kube-api-access-7ctxp" (OuterVolumeSpecName: "kube-api-access-7ctxp") pod "c35b601d-7c76-46a4-9703-7a52ef1bbf73" (UID: "c35b601d-7c76-46a4-9703-7a52ef1bbf73"). InnerVolumeSpecName "kube-api-access-7ctxp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:35:57 crc kubenswrapper[4558]: I0120 17:35:57.773286 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/89eb3a4c-0235-4583-ad0a-d015228d8d6b-scripts" (OuterVolumeSpecName: "scripts") pod "89eb3a4c-0235-4583-ad0a-d015228d8d6b" (UID: "89eb3a4c-0235-4583-ad0a-d015228d8d6b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:57 crc kubenswrapper[4558]: I0120 17:35:57.786917 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7ctxp\" (UniqueName: \"kubernetes.io/projected/c35b601d-7c76-46a4-9703-7a52ef1bbf73-kube-api-access-7ctxp\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:57 crc kubenswrapper[4558]: I0120 17:35:57.786947 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c35b601d-7c76-46a4-9703-7a52ef1bbf73-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:57 crc kubenswrapper[4558]: I0120 17:35:57.786957 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/89eb3a4c-0235-4583-ad0a-d015228d8d6b-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:57 crc kubenswrapper[4558]: I0120 17:35:57.807393 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/89eb3a4c-0235-4583-ad0a-d015228d8d6b-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "89eb3a4c-0235-4583-ad0a-d015228d8d6b" (UID: "89eb3a4c-0235-4583-ad0a-d015228d8d6b"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:57 crc kubenswrapper[4558]: I0120 17:35:57.828587 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:35:57 crc kubenswrapper[4558]: I0120 17:35:57.858411 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:35:57 crc kubenswrapper[4558]: I0120 17:35:57.863872 4558 scope.go:117] "RemoveContainer" containerID="e0e425bf5a28a6ed179630d953fb4a71b76a9a86fb6c1aee46f9c124aec8a8b5" Jan 20 17:35:57 crc kubenswrapper[4558]: I0120 17:35:57.894227 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-0"] Jan 20 17:35:57 crc kubenswrapper[4558]: I0120 17:35:57.896991 4558 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/89eb3a4c-0235-4583-ad0a-d015228d8d6b-credential-keys\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:57 crc kubenswrapper[4558]: I0120 17:35:57.910561 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c35b601d-7c76-46a4-9703-7a52ef1bbf73-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "c35b601d-7c76-46a4-9703-7a52ef1bbf73" (UID: "c35b601d-7c76-46a4-9703-7a52ef1bbf73"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:57 crc kubenswrapper[4558]: I0120 17:35:57.923295 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/89eb3a4c-0235-4583-ad0a-d015228d8d6b-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "89eb3a4c-0235-4583-ad0a-d015228d8d6b" (UID: "89eb3a4c-0235-4583-ad0a-d015228d8d6b"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:57 crc kubenswrapper[4558]: I0120 17:35:57.933659 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/89eb3a4c-0235-4583-ad0a-d015228d8d6b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "89eb3a4c-0235-4583-ad0a-d015228d8d6b" (UID: "89eb3a4c-0235-4583-ad0a-d015228d8d6b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:57 crc kubenswrapper[4558]: I0120 17:35:57.943672 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/89eb3a4c-0235-4583-ad0a-d015228d8d6b-kube-api-access-gbhsk" (OuterVolumeSpecName: "kube-api-access-gbhsk") pod "89eb3a4c-0235-4583-ad0a-d015228d8d6b" (UID: "89eb3a4c-0235-4583-ad0a-d015228d8d6b"). InnerVolumeSpecName "kube-api-access-gbhsk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:35:57 crc kubenswrapper[4558]: I0120 17:35:57.954754 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-0"] Jan 20 17:35:57 crc kubenswrapper[4558]: I0120 17:35:57.962416 4558 scope.go:117] "RemoveContainer" containerID="3373f1f257436b290a1f8fd92a670c542b443e2f7b3e9ce8dcf84df8344a869e" Jan 20 17:35:57 crc kubenswrapper[4558]: I0120 17:35:57.964803 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-6874444f77-w92bj" Jan 20 17:35:57 crc kubenswrapper[4558]: I0120 17:35:57.967912 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:35:57 crc kubenswrapper[4558]: E0120 17:35:57.968456 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c35b601d-7c76-46a4-9703-7a52ef1bbf73" containerName="barbican-api-log" Jan 20 17:35:57 crc kubenswrapper[4558]: I0120 17:35:57.968471 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="c35b601d-7c76-46a4-9703-7a52ef1bbf73" containerName="barbican-api-log" Jan 20 17:35:57 crc kubenswrapper[4558]: E0120 17:35:57.968485 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c35b601d-7c76-46a4-9703-7a52ef1bbf73" containerName="barbican-api" Jan 20 17:35:57 crc kubenswrapper[4558]: I0120 17:35:57.968492 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="c35b601d-7c76-46a4-9703-7a52ef1bbf73" containerName="barbican-api" Jan 20 17:35:57 crc kubenswrapper[4558]: E0120 17:35:57.968509 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="89eb3a4c-0235-4583-ad0a-d015228d8d6b" containerName="keystone-api" Jan 20 17:35:57 crc kubenswrapper[4558]: I0120 17:35:57.968515 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="89eb3a4c-0235-4583-ad0a-d015228d8d6b" containerName="keystone-api" Jan 20 17:35:57 crc kubenswrapper[4558]: E0120 17:35:57.968530 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a4d26c7d-095d-4f3d-b6cc-c270961e9794" containerName="keystone-api" Jan 20 17:35:57 crc kubenswrapper[4558]: I0120 17:35:57.968535 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="a4d26c7d-095d-4f3d-b6cc-c270961e9794" containerName="keystone-api" Jan 20 17:35:57 crc kubenswrapper[4558]: I0120 17:35:57.968729 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="a4d26c7d-095d-4f3d-b6cc-c270961e9794" containerName="keystone-api" Jan 20 17:35:57 crc kubenswrapper[4558]: I0120 17:35:57.968745 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="89eb3a4c-0235-4583-ad0a-d015228d8d6b" containerName="keystone-api" Jan 20 17:35:57 crc kubenswrapper[4558]: I0120 17:35:57.968754 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="c35b601d-7c76-46a4-9703-7a52ef1bbf73" containerName="barbican-api-log" Jan 20 17:35:57 crc kubenswrapper[4558]: I0120 17:35:57.968767 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="c35b601d-7c76-46a4-9703-7a52ef1bbf73" containerName="barbican-api" Jan 20 17:35:57 crc kubenswrapper[4558]: I0120 17:35:57.970899 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:35:57 crc kubenswrapper[4558]: I0120 17:35:57.974593 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-config-data" Jan 20 17:35:57 crc kubenswrapper[4558]: I0120 17:35:57.974746 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-ceilometer-internal-svc" Jan 20 17:35:57 crc kubenswrapper[4558]: I0120 17:35:57.974780 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-scripts" Jan 20 17:35:57 crc kubenswrapper[4558]: I0120 17:35:57.985646 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-0"] Jan 20 17:35:57 crc kubenswrapper[4558]: I0120 17:35:57.987684 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:35:57 crc kubenswrapper[4558]: I0120 17:35:57.993932 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-nova-dockercfg-24bfk" Jan 20 17:35:57 crc kubenswrapper[4558]: I0120 17:35:57.994739 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell0-conductor-config-data" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.000696 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.002374 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c35b601d-7c76-46a4-9703-7a52ef1bbf73-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.002417 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gbhsk\" (UniqueName: \"kubernetes.io/projected/89eb3a4c-0235-4583-ad0a-d015228d8d6b-kube-api-access-gbhsk\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.002431 4558 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/89eb3a4c-0235-4583-ad0a-d015228d8d6b-fernet-keys\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.002461 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/89eb3a4c-0235-4583-ad0a-d015228d8d6b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.007488 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c35b601d-7c76-46a4-9703-7a52ef1bbf73-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c35b601d-7c76-46a4-9703-7a52ef1bbf73" (UID: "c35b601d-7c76-46a4-9703-7a52ef1bbf73"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.024889 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-0"] Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.032722 4558 scope.go:117] "RemoveContainer" containerID="e0e425bf5a28a6ed179630d953fb4a71b76a9a86fb6c1aee46f9c124aec8a8b5" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.037932 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.038673 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovn-northd-0_39600642-1418-4cb4-8617-f2ecc7089e0b/ovn-northd/0.log" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.038734 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:35:58 crc kubenswrapper[4558]: E0120 17:35:58.039071 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e0e425bf5a28a6ed179630d953fb4a71b76a9a86fb6c1aee46f9c124aec8a8b5\": container with ID starting with e0e425bf5a28a6ed179630d953fb4a71b76a9a86fb6c1aee46f9c124aec8a8b5 not found: ID does not exist" containerID="e0e425bf5a28a6ed179630d953fb4a71b76a9a86fb6c1aee46f9c124aec8a8b5" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.039094 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e0e425bf5a28a6ed179630d953fb4a71b76a9a86fb6c1aee46f9c124aec8a8b5"} err="failed to get container status \"e0e425bf5a28a6ed179630d953fb4a71b76a9a86fb6c1aee46f9c124aec8a8b5\": rpc error: code = NotFound desc = could not find container \"e0e425bf5a28a6ed179630d953fb4a71b76a9a86fb6c1aee46f9c124aec8a8b5\": container with ID starting with e0e425bf5a28a6ed179630d953fb4a71b76a9a86fb6c1aee46f9c124aec8a8b5 not found: ID does not exist" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.039116 4558 scope.go:117] "RemoveContainer" containerID="3373f1f257436b290a1f8fd92a670c542b443e2f7b3e9ce8dcf84df8344a869e" Jan 20 17:35:58 crc kubenswrapper[4558]: E0120 17:35:58.039390 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3373f1f257436b290a1f8fd92a670c542b443e2f7b3e9ce8dcf84df8344a869e\": container with ID starting with 3373f1f257436b290a1f8fd92a670c542b443e2f7b3e9ce8dcf84df8344a869e not found: ID does not exist" containerID="3373f1f257436b290a1f8fd92a670c542b443e2f7b3e9ce8dcf84df8344a869e" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.039406 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3373f1f257436b290a1f8fd92a670c542b443e2f7b3e9ce8dcf84df8344a869e"} err="failed to get container status \"3373f1f257436b290a1f8fd92a670c542b443e2f7b3e9ce8dcf84df8344a869e\": rpc error: code = NotFound desc = could not find container \"3373f1f257436b290a1f8fd92a670c542b443e2f7b3e9ce8dcf84df8344a869e\": container with ID starting with 3373f1f257436b290a1f8fd92a670c542b443e2f7b3e9ce8dcf84df8344a869e not found: ID does not exist" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.039419 4558 scope.go:117] "RemoveContainer" containerID="e0e425bf5a28a6ed179630d953fb4a71b76a9a86fb6c1aee46f9c124aec8a8b5" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.039595 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e0e425bf5a28a6ed179630d953fb4a71b76a9a86fb6c1aee46f9c124aec8a8b5"} err="failed to get container status \"e0e425bf5a28a6ed179630d953fb4a71b76a9a86fb6c1aee46f9c124aec8a8b5\": rpc error: code = NotFound desc = could not find container \"e0e425bf5a28a6ed179630d953fb4a71b76a9a86fb6c1aee46f9c124aec8a8b5\": container with ID starting with e0e425bf5a28a6ed179630d953fb4a71b76a9a86fb6c1aee46f9c124aec8a8b5 not found: ID does not exist" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.039614 4558 scope.go:117] "RemoveContainer" containerID="3373f1f257436b290a1f8fd92a670c542b443e2f7b3e9ce8dcf84df8344a869e" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.039768 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3373f1f257436b290a1f8fd92a670c542b443e2f7b3e9ce8dcf84df8344a869e"} err="failed to get container status \"3373f1f257436b290a1f8fd92a670c542b443e2f7b3e9ce8dcf84df8344a869e\": rpc error: code = NotFound desc = could not find container \"3373f1f257436b290a1f8fd92a670c542b443e2f7b3e9ce8dcf84df8344a869e\": container with ID starting with 3373f1f257436b290a1f8fd92a670c542b443e2f7b3e9ce8dcf84df8344a869e not found: ID does not exist" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.039784 4558 scope.go:117] "RemoveContainer" containerID="ff357d9afa0fdfe50b8131aecfe6bfb5a9652e3416ca436a65179fc6ef691689" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.076707 4558 scope.go:117] "RemoveContainer" containerID="ff357d9afa0fdfe50b8131aecfe6bfb5a9652e3416ca436a65179fc6ef691689" Jan 20 17:35:58 crc kubenswrapper[4558]: E0120 17:35:58.081578 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ff357d9afa0fdfe50b8131aecfe6bfb5a9652e3416ca436a65179fc6ef691689\": container with ID starting with ff357d9afa0fdfe50b8131aecfe6bfb5a9652e3416ca436a65179fc6ef691689 not found: ID does not exist" containerID="ff357d9afa0fdfe50b8131aecfe6bfb5a9652e3416ca436a65179fc6ef691689" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.081632 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ff357d9afa0fdfe50b8131aecfe6bfb5a9652e3416ca436a65179fc6ef691689"} err="failed to get container status \"ff357d9afa0fdfe50b8131aecfe6bfb5a9652e3416ca436a65179fc6ef691689\": rpc error: code = NotFound desc = could not find container \"ff357d9afa0fdfe50b8131aecfe6bfb5a9652e3416ca436a65179fc6ef691689\": container with ID starting with ff357d9afa0fdfe50b8131aecfe6bfb5a9652e3416ca436a65179fc6ef691689 not found: ID does not exist" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.094475 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.110714 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/39600642-1418-4cb4-8617-f2ecc7089e0b-metrics-certs-tls-certs\") pod \"39600642-1418-4cb4-8617-f2ecc7089e0b\" (UID: \"39600642-1418-4cb4-8617-f2ecc7089e0b\") " Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.110774 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/39600642-1418-4cb4-8617-f2ecc7089e0b-scripts\") pod \"39600642-1418-4cb4-8617-f2ecc7089e0b\" (UID: \"39600642-1418-4cb4-8617-f2ecc7089e0b\") " Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.110850 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a4d26c7d-095d-4f3d-b6cc-c270961e9794-config-data\") pod \"a4d26c7d-095d-4f3d-b6cc-c270961e9794\" (UID: \"a4d26c7d-095d-4f3d-b6cc-c270961e9794\") " Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.110899 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/a4d26c7d-095d-4f3d-b6cc-c270961e9794-fernet-keys\") pod \"a4d26c7d-095d-4f3d-b6cc-c270961e9794\" (UID: \"a4d26c7d-095d-4f3d-b6cc-c270961e9794\") " Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.110943 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a4d26c7d-095d-4f3d-b6cc-c270961e9794-internal-tls-certs\") pod \"a4d26c7d-095d-4f3d-b6cc-c270961e9794\" (UID: \"a4d26c7d-095d-4f3d-b6cc-c270961e9794\") " Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.110964 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/39600642-1418-4cb4-8617-f2ecc7089e0b-ovn-rundir\") pod \"39600642-1418-4cb4-8617-f2ecc7089e0b\" (UID: \"39600642-1418-4cb4-8617-f2ecc7089e0b\") " Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.111042 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4jfnx\" (UniqueName: \"kubernetes.io/projected/39600642-1418-4cb4-8617-f2ecc7089e0b-kube-api-access-4jfnx\") pod \"39600642-1418-4cb4-8617-f2ecc7089e0b\" (UID: \"39600642-1418-4cb4-8617-f2ecc7089e0b\") " Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.111063 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pfctj\" (UniqueName: \"kubernetes.io/projected/a4d26c7d-095d-4f3d-b6cc-c270961e9794-kube-api-access-pfctj\") pod \"a4d26c7d-095d-4f3d-b6cc-c270961e9794\" (UID: \"a4d26c7d-095d-4f3d-b6cc-c270961e9794\") " Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.111090 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/a4d26c7d-095d-4f3d-b6cc-c270961e9794-credential-keys\") pod \"a4d26c7d-095d-4f3d-b6cc-c270961e9794\" (UID: \"a4d26c7d-095d-4f3d-b6cc-c270961e9794\") " Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.111290 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/39600642-1418-4cb4-8617-f2ecc7089e0b-combined-ca-bundle\") pod \"39600642-1418-4cb4-8617-f2ecc7089e0b\" (UID: \"39600642-1418-4cb4-8617-f2ecc7089e0b\") " Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.111368 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/39600642-1418-4cb4-8617-f2ecc7089e0b-ovn-northd-tls-certs\") pod \"39600642-1418-4cb4-8617-f2ecc7089e0b\" (UID: \"39600642-1418-4cb4-8617-f2ecc7089e0b\") " Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.111405 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a4d26c7d-095d-4f3d-b6cc-c270961e9794-scripts\") pod \"a4d26c7d-095d-4f3d-b6cc-c270961e9794\" (UID: \"a4d26c7d-095d-4f3d-b6cc-c270961e9794\") " Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.111405 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/89eb3a4c-0235-4583-ad0a-d015228d8d6b-config-data" (OuterVolumeSpecName: "config-data") pod "89eb3a4c-0235-4583-ad0a-d015228d8d6b" (UID: "89eb3a4c-0235-4583-ad0a-d015228d8d6b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.117444 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/39600642-1418-4cb4-8617-f2ecc7089e0b-ovn-rundir" (OuterVolumeSpecName: "ovn-rundir") pod "39600642-1418-4cb4-8617-f2ecc7089e0b" (UID: "39600642-1418-4cb4-8617-f2ecc7089e0b"). InnerVolumeSpecName "ovn-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.118151 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/39600642-1418-4cb4-8617-f2ecc7089e0b-config\") pod \"39600642-1418-4cb4-8617-f2ecc7089e0b\" (UID: \"39600642-1418-4cb4-8617-f2ecc7089e0b\") " Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.118202 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/89eb3a4c-0235-4583-ad0a-d015228d8d6b-config-data\") pod \"89eb3a4c-0235-4583-ad0a-d015228d8d6b\" (UID: \"89eb3a4c-0235-4583-ad0a-d015228d8d6b\") " Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.118229 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a4d26c7d-095d-4f3d-b6cc-c270961e9794-combined-ca-bundle\") pod \"a4d26c7d-095d-4f3d-b6cc-c270961e9794\" (UID: \"a4d26c7d-095d-4f3d-b6cc-c270961e9794\") " Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.118791 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a4d26c7d-095d-4f3d-b6cc-c270961e9794-public-tls-certs\") pod \"a4d26c7d-095d-4f3d-b6cc-c270961e9794\" (UID: \"a4d26c7d-095d-4f3d-b6cc-c270961e9794\") " Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.119198 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/466e0ec0-043e-49a6-b30d-0272443cb839-log-httpd\") pod \"ceilometer-0\" (UID: \"466e0ec0-043e-49a6-b30d-0272443cb839\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.119259 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/466e0ec0-043e-49a6-b30d-0272443cb839-config-data\") pod \"ceilometer-0\" (UID: \"466e0ec0-043e-49a6-b30d-0272443cb839\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.119606 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0d2b6a10-ca95-4d72-a80b-1706822a07a7-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"0d2b6a10-ca95-4d72-a80b-1706822a07a7\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.119639 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qmb4z\" (UniqueName: \"kubernetes.io/projected/466e0ec0-043e-49a6-b30d-0272443cb839-kube-api-access-qmb4z\") pod \"ceilometer-0\" (UID: \"466e0ec0-043e-49a6-b30d-0272443cb839\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.119685 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/466e0ec0-043e-49a6-b30d-0272443cb839-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"466e0ec0-043e-49a6-b30d-0272443cb839\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.119775 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c7q7h\" (UniqueName: \"kubernetes.io/projected/0d2b6a10-ca95-4d72-a80b-1706822a07a7-kube-api-access-c7q7h\") pod \"nova-cell0-conductor-0\" (UID: \"0d2b6a10-ca95-4d72-a80b-1706822a07a7\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.119891 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/466e0ec0-043e-49a6-b30d-0272443cb839-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"466e0ec0-043e-49a6-b30d-0272443cb839\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.119970 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/466e0ec0-043e-49a6-b30d-0272443cb839-scripts\") pod \"ceilometer-0\" (UID: \"466e0ec0-043e-49a6-b30d-0272443cb839\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.120010 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/466e0ec0-043e-49a6-b30d-0272443cb839-run-httpd\") pod \"ceilometer-0\" (UID: \"466e0ec0-043e-49a6-b30d-0272443cb839\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.120026 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/466e0ec0-043e-49a6-b30d-0272443cb839-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"466e0ec0-043e-49a6-b30d-0272443cb839\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.120046 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d2b6a10-ca95-4d72-a80b-1706822a07a7-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"0d2b6a10-ca95-4d72-a80b-1706822a07a7\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.123854 4558 reconciler_common.go:293] "Volume detached for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/39600642-1418-4cb4-8617-f2ecc7089e0b-ovn-rundir\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.123880 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c35b601d-7c76-46a4-9703-7a52ef1bbf73-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:58 crc kubenswrapper[4558]: W0120 17:35:58.124039 4558 empty_dir.go:500] Warning: Unmount skipped because path does not exist: /var/lib/kubelet/pods/89eb3a4c-0235-4583-ad0a-d015228d8d6b/volumes/kubernetes.io~secret/config-data Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.124055 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/89eb3a4c-0235-4583-ad0a-d015228d8d6b-config-data" (OuterVolumeSpecName: "config-data") pod "89eb3a4c-0235-4583-ad0a-d015228d8d6b" (UID: "89eb3a4c-0235-4583-ad0a-d015228d8d6b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.137586 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:35:58 crc kubenswrapper[4558]: E0120 17:35:58.138105 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="39600642-1418-4cb4-8617-f2ecc7089e0b" containerName="ovn-northd" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.138119 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="39600642-1418-4cb4-8617-f2ecc7089e0b" containerName="ovn-northd" Jan 20 17:35:58 crc kubenswrapper[4558]: E0120 17:35:58.138132 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="39600642-1418-4cb4-8617-f2ecc7089e0b" containerName="openstack-network-exporter" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.138140 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="39600642-1418-4cb4-8617-f2ecc7089e0b" containerName="openstack-network-exporter" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.138389 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="39600642-1418-4cb4-8617-f2ecc7089e0b" containerName="ovn-northd" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.138409 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="39600642-1418-4cb4-8617-f2ecc7089e0b" containerName="openstack-network-exporter" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.139772 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.142880 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cinder-scheduler-config-data" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.144124 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a4d26c7d-095d-4f3d-b6cc-c270961e9794-scripts" (OuterVolumeSpecName: "scripts") pod "a4d26c7d-095d-4f3d-b6cc-c270961e9794" (UID: "a4d26c7d-095d-4f3d-b6cc-c270961e9794"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.144362 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a4d26c7d-095d-4f3d-b6cc-c270961e9794-kube-api-access-pfctj" (OuterVolumeSpecName: "kube-api-access-pfctj") pod "a4d26c7d-095d-4f3d-b6cc-c270961e9794" (UID: "a4d26c7d-095d-4f3d-b6cc-c270961e9794"). InnerVolumeSpecName "kube-api-access-pfctj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.146703 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/39600642-1418-4cb4-8617-f2ecc7089e0b-config" (OuterVolumeSpecName: "config") pod "39600642-1418-4cb4-8617-f2ecc7089e0b" (UID: "39600642-1418-4cb4-8617-f2ecc7089e0b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.147345 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/39600642-1418-4cb4-8617-f2ecc7089e0b-kube-api-access-4jfnx" (OuterVolumeSpecName: "kube-api-access-4jfnx") pod "39600642-1418-4cb4-8617-f2ecc7089e0b" (UID: "39600642-1418-4cb4-8617-f2ecc7089e0b"). InnerVolumeSpecName "kube-api-access-4jfnx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.149155 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/39600642-1418-4cb4-8617-f2ecc7089e0b-scripts" (OuterVolumeSpecName: "scripts") pod "39600642-1418-4cb4-8617-f2ecc7089e0b" (UID: "39600642-1418-4cb4-8617-f2ecc7089e0b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.163787 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/openstack-cell1-galera-0"] Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.178043 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/openstack-cell1-galera-0"] Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.180374 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a4d26c7d-095d-4f3d-b6cc-c270961e9794-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "a4d26c7d-095d-4f3d-b6cc-c270961e9794" (UID: "a4d26c7d-095d-4f3d-b6cc-c270961e9794"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.184201 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.190628 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a4d26c7d-095d-4f3d-b6cc-c270961e9794-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "a4d26c7d-095d-4f3d-b6cc-c270961e9794" (UID: "a4d26c7d-095d-4f3d-b6cc-c270961e9794"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.190648 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-0"] Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.194206 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-0"] Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.200151 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/openstack-cell1-galera-0"] Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.202046 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.206224 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-cell1-config-data" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.206463 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-galera-openstack-cell1-svc" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.206583 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"galera-openstack-cell1-dockercfg-ftflb" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.206907 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-cell1-scripts" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.207509 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-0"] Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.208823 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.212785 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell1-conductor-config-data" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.217022 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/openstack-galera-0"] Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.227760 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/466e0ec0-043e-49a6-b30d-0272443cb839-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"466e0ec0-043e-49a6-b30d-0272443cb839\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.227811 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9800a14c-ebf5-4a5d-b384-c133973b55ff-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"9800a14c-ebf5-4a5d-b384-c133973b55ff\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.227863 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/466e0ec0-043e-49a6-b30d-0272443cb839-scripts\") pod \"ceilometer-0\" (UID: \"466e0ec0-043e-49a6-b30d-0272443cb839\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.227900 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/466e0ec0-043e-49a6-b30d-0272443cb839-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"466e0ec0-043e-49a6-b30d-0272443cb839\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.227917 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9800a14c-ebf5-4a5d-b384-c133973b55ff-scripts\") pod \"cinder-scheduler-0\" (UID: \"9800a14c-ebf5-4a5d-b384-c133973b55ff\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.227936 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/466e0ec0-043e-49a6-b30d-0272443cb839-run-httpd\") pod \"ceilometer-0\" (UID: \"466e0ec0-043e-49a6-b30d-0272443cb839\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.227957 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d2b6a10-ca95-4d72-a80b-1706822a07a7-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"0d2b6a10-ca95-4d72-a80b-1706822a07a7\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.228019 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9800a14c-ebf5-4a5d-b384-c133973b55ff-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"9800a14c-ebf5-4a5d-b384-c133973b55ff\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.228053 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/466e0ec0-043e-49a6-b30d-0272443cb839-log-httpd\") pod \"ceilometer-0\" (UID: \"466e0ec0-043e-49a6-b30d-0272443cb839\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.228091 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/466e0ec0-043e-49a6-b30d-0272443cb839-config-data\") pod \"ceilometer-0\" (UID: \"466e0ec0-043e-49a6-b30d-0272443cb839\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.228184 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6tqjr\" (UniqueName: \"kubernetes.io/projected/9800a14c-ebf5-4a5d-b384-c133973b55ff-kube-api-access-6tqjr\") pod \"cinder-scheduler-0\" (UID: \"9800a14c-ebf5-4a5d-b384-c133973b55ff\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.228208 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9800a14c-ebf5-4a5d-b384-c133973b55ff-config-data\") pod \"cinder-scheduler-0\" (UID: \"9800a14c-ebf5-4a5d-b384-c133973b55ff\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.228237 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9800a14c-ebf5-4a5d-b384-c133973b55ff-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"9800a14c-ebf5-4a5d-b384-c133973b55ff\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.228285 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0d2b6a10-ca95-4d72-a80b-1706822a07a7-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"0d2b6a10-ca95-4d72-a80b-1706822a07a7\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.228313 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qmb4z\" (UniqueName: \"kubernetes.io/projected/466e0ec0-043e-49a6-b30d-0272443cb839-kube-api-access-qmb4z\") pod \"ceilometer-0\" (UID: \"466e0ec0-043e-49a6-b30d-0272443cb839\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.228350 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/466e0ec0-043e-49a6-b30d-0272443cb839-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"466e0ec0-043e-49a6-b30d-0272443cb839\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.228419 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c7q7h\" (UniqueName: \"kubernetes.io/projected/0d2b6a10-ca95-4d72-a80b-1706822a07a7-kube-api-access-c7q7h\") pod \"nova-cell0-conductor-0\" (UID: \"0d2b6a10-ca95-4d72-a80b-1706822a07a7\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.228515 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/39600642-1418-4cb4-8617-f2ecc7089e0b-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.228528 4558 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/a4d26c7d-095d-4f3d-b6cc-c270961e9794-fernet-keys\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.228540 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4jfnx\" (UniqueName: \"kubernetes.io/projected/39600642-1418-4cb4-8617-f2ecc7089e0b-kube-api-access-4jfnx\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.228554 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pfctj\" (UniqueName: \"kubernetes.io/projected/a4d26c7d-095d-4f3d-b6cc-c270961e9794-kube-api-access-pfctj\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.228563 4558 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/a4d26c7d-095d-4f3d-b6cc-c270961e9794-credential-keys\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.228573 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a4d26c7d-095d-4f3d-b6cc-c270961e9794-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.228585 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/39600642-1418-4cb4-8617-f2ecc7089e0b-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.228594 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/89eb3a4c-0235-4583-ad0a-d015228d8d6b-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.229543 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/466e0ec0-043e-49a6-b30d-0272443cb839-log-httpd\") pod \"ceilometer-0\" (UID: \"466e0ec0-043e-49a6-b30d-0272443cb839\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.238701 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/466e0ec0-043e-49a6-b30d-0272443cb839-run-httpd\") pod \"ceilometer-0\" (UID: \"466e0ec0-043e-49a6-b30d-0272443cb839\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.248112 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/openstack-galera-0"] Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.249447 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/openstack-cell1-galera-0"] Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.257386 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/466e0ec0-043e-49a6-b30d-0272443cb839-scripts\") pod \"ceilometer-0\" (UID: \"466e0ec0-043e-49a6-b30d-0272443cb839\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.257763 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0d2b6a10-ca95-4d72-a80b-1706822a07a7-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"0d2b6a10-ca95-4d72-a80b-1706822a07a7\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.261761 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/466e0ec0-043e-49a6-b30d-0272443cb839-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"466e0ec0-043e-49a6-b30d-0272443cb839\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.262690 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/466e0ec0-043e-49a6-b30d-0272443cb839-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"466e0ec0-043e-49a6-b30d-0272443cb839\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.263567 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qmb4z\" (UniqueName: \"kubernetes.io/projected/466e0ec0-043e-49a6-b30d-0272443cb839-kube-api-access-qmb4z\") pod \"ceilometer-0\" (UID: \"466e0ec0-043e-49a6-b30d-0272443cb839\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.263752 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c7q7h\" (UniqueName: \"kubernetes.io/projected/0d2b6a10-ca95-4d72-a80b-1706822a07a7-kube-api-access-c7q7h\") pod \"nova-cell0-conductor-0\" (UID: \"0d2b6a10-ca95-4d72-a80b-1706822a07a7\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.263972 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/466e0ec0-043e-49a6-b30d-0272443cb839-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"466e0ec0-043e-49a6-b30d-0272443cb839\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.264886 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d2b6a10-ca95-4d72-a80b-1706822a07a7-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"0d2b6a10-ca95-4d72-a80b-1706822a07a7\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.266053 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/466e0ec0-043e-49a6-b30d-0272443cb839-config-data\") pod \"ceilometer-0\" (UID: \"466e0ec0-043e-49a6-b30d-0272443cb839\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.271895 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-0"] Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.296326 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.302210 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/openstack-galera-0"] Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.304225 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.311484 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-scripts" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.311879 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"galera-openstack-dockercfg-7ct58" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.312024 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-galera-openstack-svc" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.312193 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-config-data" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.318321 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.326535 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.328379 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.330079 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0def1343-9e4b-4f74-84bd-3212688b59ce-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"0def1343-9e4b-4f74-84bd-3212688b59ce\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.330120 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0def1343-9e4b-4f74-84bd-3212688b59ce-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"0def1343-9e4b-4f74-84bd-3212688b59ce\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.330144 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/1fff3747-796d-420a-aff0-846dfc615df9-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"1fff3747-796d-420a-aff0-846dfc615df9\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.330196 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1fff3747-796d-420a-aff0-846dfc615df9-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"1fff3747-796d-420a-aff0-846dfc615df9\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.330226 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9800a14c-ebf5-4a5d-b384-c133973b55ff-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"9800a14c-ebf5-4a5d-b384-c133973b55ff\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.330254 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9800a14c-ebf5-4a5d-b384-c133973b55ff-scripts\") pod \"cinder-scheduler-0\" (UID: \"9800a14c-ebf5-4a5d-b384-c133973b55ff\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.330284 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-46zt7\" (UniqueName: \"kubernetes.io/projected/1fff3747-796d-420a-aff0-846dfc615df9-kube-api-access-46zt7\") pod \"openstack-cell1-galera-0\" (UID: \"1fff3747-796d-420a-aff0-846dfc615df9\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.330308 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9800a14c-ebf5-4a5d-b384-c133973b55ff-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"9800a14c-ebf5-4a5d-b384-c133973b55ff\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.330400 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/1fff3747-796d-420a-aff0-846dfc615df9-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"1fff3747-796d-420a-aff0-846dfc615df9\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.330444 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/1fff3747-796d-420a-aff0-846dfc615df9-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"1fff3747-796d-420a-aff0-846dfc615df9\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.330497 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6tqjr\" (UniqueName: \"kubernetes.io/projected/9800a14c-ebf5-4a5d-b384-c133973b55ff-kube-api-access-6tqjr\") pod \"cinder-scheduler-0\" (UID: \"9800a14c-ebf5-4a5d-b384-c133973b55ff\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.330526 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9800a14c-ebf5-4a5d-b384-c133973b55ff-config-data\") pod \"cinder-scheduler-0\" (UID: \"9800a14c-ebf5-4a5d-b384-c133973b55ff\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.330555 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9800a14c-ebf5-4a5d-b384-c133973b55ff-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"9800a14c-ebf5-4a5d-b384-c133973b55ff\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.330611 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"openstack-cell1-galera-0\" (UID: \"1fff3747-796d-420a-aff0-846dfc615df9\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.330652 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1fff3747-796d-420a-aff0-846dfc615df9-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"1fff3747-796d-420a-aff0-846dfc615df9\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.330707 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hcjxf\" (UniqueName: \"kubernetes.io/projected/0def1343-9e4b-4f74-84bd-3212688b59ce-kube-api-access-hcjxf\") pod \"nova-cell1-conductor-0\" (UID: \"0def1343-9e4b-4f74-84bd-3212688b59ce\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.330742 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/1fff3747-796d-420a-aff0-846dfc615df9-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"1fff3747-796d-420a-aff0-846dfc615df9\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.331674 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9800a14c-ebf5-4a5d-b384-c133973b55ff-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"9800a14c-ebf5-4a5d-b384-c133973b55ff\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.338080 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/openstack-galera-0"] Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.339213 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9800a14c-ebf5-4a5d-b384-c133973b55ff-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"9800a14c-ebf5-4a5d-b384-c133973b55ff\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.347270 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.347379 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9800a14c-ebf5-4a5d-b384-c133973b55ff-scripts\") pod \"cinder-scheduler-0\" (UID: \"9800a14c-ebf5-4a5d-b384-c133973b55ff\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.347816 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9800a14c-ebf5-4a5d-b384-c133973b55ff-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"9800a14c-ebf5-4a5d-b384-c133973b55ff\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.348460 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9800a14c-ebf5-4a5d-b384-c133973b55ff-config-data\") pod \"cinder-scheduler-0\" (UID: \"9800a14c-ebf5-4a5d-b384-c133973b55ff\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.351553 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.353941 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-scheduler-config-data" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.354208 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.360276 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6tqjr\" (UniqueName: \"kubernetes.io/projected/9800a14c-ebf5-4a5d-b384-c133973b55ff-kube-api-access-6tqjr\") pod \"cinder-scheduler-0\" (UID: \"9800a14c-ebf5-4a5d-b384-c133973b55ff\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.436563 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-77z9r\" (UniqueName: \"kubernetes.io/projected/79a4b689-d81b-44da-8baa-88fc6ce78172-kube-api-access-77z9r\") pod \"openstack-galera-0\" (UID: \"79a4b689-d81b-44da-8baa-88fc6ce78172\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.436853 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/1fff3747-796d-420a-aff0-846dfc615df9-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"1fff3747-796d-420a-aff0-846dfc615df9\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.436878 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/1fff3747-796d-420a-aff0-846dfc615df9-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"1fff3747-796d-420a-aff0-846dfc615df9\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.436898 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/79a4b689-d81b-44da-8baa-88fc6ce78172-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"79a4b689-d81b-44da-8baa-88fc6ce78172\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.436915 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/86db33ff-9888-4ae6-b6f8-48593b3cd2e2-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"86db33ff-9888-4ae6-b6f8-48593b3cd2e2\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.436950 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"openstack-cell1-galera-0\" (UID: \"1fff3747-796d-420a-aff0-846dfc615df9\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.436971 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/79a4b689-d81b-44da-8baa-88fc6ce78172-kolla-config\") pod \"openstack-galera-0\" (UID: \"79a4b689-d81b-44da-8baa-88fc6ce78172\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.436988 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1fff3747-796d-420a-aff0-846dfc615df9-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"1fff3747-796d-420a-aff0-846dfc615df9\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.437006 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-galera-0\" (UID: \"79a4b689-d81b-44da-8baa-88fc6ce78172\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.437023 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hcjxf\" (UniqueName: \"kubernetes.io/projected/0def1343-9e4b-4f74-84bd-3212688b59ce-kube-api-access-hcjxf\") pod \"nova-cell1-conductor-0\" (UID: \"0def1343-9e4b-4f74-84bd-3212688b59ce\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.437048 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/1fff3747-796d-420a-aff0-846dfc615df9-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"1fff3747-796d-420a-aff0-846dfc615df9\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.437081 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0def1343-9e4b-4f74-84bd-3212688b59ce-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"0def1343-9e4b-4f74-84bd-3212688b59ce\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.437103 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0def1343-9e4b-4f74-84bd-3212688b59ce-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"0def1343-9e4b-4f74-84bd-3212688b59ce\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.437128 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zgzj5\" (UniqueName: \"kubernetes.io/projected/86db33ff-9888-4ae6-b6f8-48593b3cd2e2-kube-api-access-zgzj5\") pod \"nova-scheduler-0\" (UID: \"86db33ff-9888-4ae6-b6f8-48593b3cd2e2\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.437144 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/79a4b689-d81b-44da-8baa-88fc6ce78172-config-data-generated\") pod \"openstack-galera-0\" (UID: \"79a4b689-d81b-44da-8baa-88fc6ce78172\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.437176 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/1fff3747-796d-420a-aff0-846dfc615df9-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"1fff3747-796d-420a-aff0-846dfc615df9\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.437208 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/79a4b689-d81b-44da-8baa-88fc6ce78172-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"79a4b689-d81b-44da-8baa-88fc6ce78172\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.437226 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1fff3747-796d-420a-aff0-846dfc615df9-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"1fff3747-796d-420a-aff0-846dfc615df9\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.437673 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"openstack-cell1-galera-0\" (UID: \"1fff3747-796d-420a-aff0-846dfc615df9\") device mount path \"/mnt/openstack/pv06\"" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.438200 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/1fff3747-796d-420a-aff0-846dfc615df9-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"1fff3747-796d-420a-aff0-846dfc615df9\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.439336 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/1fff3747-796d-420a-aff0-846dfc615df9-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"1fff3747-796d-420a-aff0-846dfc615df9\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.439668 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/1fff3747-796d-420a-aff0-846dfc615df9-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"1fff3747-796d-420a-aff0-846dfc615df9\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.446301 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/86db33ff-9888-4ae6-b6f8-48593b3cd2e2-config-data\") pod \"nova-scheduler-0\" (UID: \"86db33ff-9888-4ae6-b6f8-48593b3cd2e2\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.446370 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/79a4b689-d81b-44da-8baa-88fc6ce78172-operator-scripts\") pod \"openstack-galera-0\" (UID: \"79a4b689-d81b-44da-8baa-88fc6ce78172\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.446475 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-46zt7\" (UniqueName: \"kubernetes.io/projected/1fff3747-796d-420a-aff0-846dfc615df9-kube-api-access-46zt7\") pod \"openstack-cell1-galera-0\" (UID: \"1fff3747-796d-420a-aff0-846dfc615df9\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.446618 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/79a4b689-d81b-44da-8baa-88fc6ce78172-config-data-default\") pod \"openstack-galera-0\" (UID: \"79a4b689-d81b-44da-8baa-88fc6ce78172\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.453119 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0def1343-9e4b-4f74-84bd-3212688b59ce-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"0def1343-9e4b-4f74-84bd-3212688b59ce\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.460264 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1fff3747-796d-420a-aff0-846dfc615df9-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"1fff3747-796d-420a-aff0-846dfc615df9\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.460400 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/1fff3747-796d-420a-aff0-846dfc615df9-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"1fff3747-796d-420a-aff0-846dfc615df9\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.465322 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0def1343-9e4b-4f74-84bd-3212688b59ce-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"0def1343-9e4b-4f74-84bd-3212688b59ce\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.465350 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.473265 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1fff3747-796d-420a-aff0-846dfc615df9-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"1fff3747-796d-420a-aff0-846dfc615df9\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.473577 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.482751 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hcjxf\" (UniqueName: \"kubernetes.io/projected/0def1343-9e4b-4f74-84bd-3212688b59ce-kube-api-access-hcjxf\") pod \"nova-cell1-conductor-0\" (UID: \"0def1343-9e4b-4f74-84bd-3212688b59ce\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.483979 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-46zt7\" (UniqueName: \"kubernetes.io/projected/1fff3747-796d-420a-aff0-846dfc615df9-kube-api-access-46zt7\") pod \"openstack-cell1-galera-0\" (UID: \"1fff3747-796d-420a-aff0-846dfc615df9\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.549259 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/79a4b689-d81b-44da-8baa-88fc6ce78172-kolla-config\") pod \"openstack-galera-0\" (UID: \"79a4b689-d81b-44da-8baa-88fc6ce78172\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.549333 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-galera-0\" (UID: \"79a4b689-d81b-44da-8baa-88fc6ce78172\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.549430 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zgzj5\" (UniqueName: \"kubernetes.io/projected/86db33ff-9888-4ae6-b6f8-48593b3cd2e2-kube-api-access-zgzj5\") pod \"nova-scheduler-0\" (UID: \"86db33ff-9888-4ae6-b6f8-48593b3cd2e2\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.549454 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/79a4b689-d81b-44da-8baa-88fc6ce78172-config-data-generated\") pod \"openstack-galera-0\" (UID: \"79a4b689-d81b-44da-8baa-88fc6ce78172\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.549524 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/79a4b689-d81b-44da-8baa-88fc6ce78172-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"79a4b689-d81b-44da-8baa-88fc6ce78172\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.549580 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/86db33ff-9888-4ae6-b6f8-48593b3cd2e2-config-data\") pod \"nova-scheduler-0\" (UID: \"86db33ff-9888-4ae6-b6f8-48593b3cd2e2\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.549607 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/79a4b689-d81b-44da-8baa-88fc6ce78172-operator-scripts\") pod \"openstack-galera-0\" (UID: \"79a4b689-d81b-44da-8baa-88fc6ce78172\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.549730 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/79a4b689-d81b-44da-8baa-88fc6ce78172-config-data-default\") pod \"openstack-galera-0\" (UID: \"79a4b689-d81b-44da-8baa-88fc6ce78172\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.549773 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-77z9r\" (UniqueName: \"kubernetes.io/projected/79a4b689-d81b-44da-8baa-88fc6ce78172-kube-api-access-77z9r\") pod \"openstack-galera-0\" (UID: \"79a4b689-d81b-44da-8baa-88fc6ce78172\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.550751 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/79a4b689-d81b-44da-8baa-88fc6ce78172-config-data-generated\") pod \"openstack-galera-0\" (UID: \"79a4b689-d81b-44da-8baa-88fc6ce78172\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.551198 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/79a4b689-d81b-44da-8baa-88fc6ce78172-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"79a4b689-d81b-44da-8baa-88fc6ce78172\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.551243 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/86db33ff-9888-4ae6-b6f8-48593b3cd2e2-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"86db33ff-9888-4ae6-b6f8-48593b3cd2e2\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.551957 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/79a4b689-d81b-44da-8baa-88fc6ce78172-kolla-config\") pod \"openstack-galera-0\" (UID: \"79a4b689-d81b-44da-8baa-88fc6ce78172\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.553078 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/79a4b689-d81b-44da-8baa-88fc6ce78172-operator-scripts\") pod \"openstack-galera-0\" (UID: \"79a4b689-d81b-44da-8baa-88fc6ce78172\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.553774 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/79a4b689-d81b-44da-8baa-88fc6ce78172-config-data-default\") pod \"openstack-galera-0\" (UID: \"79a4b689-d81b-44da-8baa-88fc6ce78172\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.554146 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-galera-0\" (UID: \"79a4b689-d81b-44da-8baa-88fc6ce78172\") device mount path \"/mnt/openstack/pv02\"" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.562675 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/86db33ff-9888-4ae6-b6f8-48593b3cd2e2-config-data\") pod \"nova-scheduler-0\" (UID: \"86db33ff-9888-4ae6-b6f8-48593b3cd2e2\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.565951 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/79a4b689-d81b-44da-8baa-88fc6ce78172-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"79a4b689-d81b-44da-8baa-88fc6ce78172\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.576543 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/79a4b689-d81b-44da-8baa-88fc6ce78172-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"79a4b689-d81b-44da-8baa-88fc6ce78172\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.586989 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/86db33ff-9888-4ae6-b6f8-48593b3cd2e2-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"86db33ff-9888-4ae6-b6f8-48593b3cd2e2\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.588519 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a4d26c7d-095d-4f3d-b6cc-c270961e9794-config-data" (OuterVolumeSpecName: "config-data") pod "a4d26c7d-095d-4f3d-b6cc-c270961e9794" (UID: "a4d26c7d-095d-4f3d-b6cc-c270961e9794"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.592956 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bd47eac-4090-4fc1-91c4-553ebd84964d" path="/var/lib/kubelet/pods/4bd47eac-4090-4fc1-91c4-553ebd84964d/volumes" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.593730 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="833337d4-94d4-4bf6-9d0b-5791b9cc115e" path="/var/lib/kubelet/pods/833337d4-94d4-4bf6-9d0b-5791b9cc115e/volumes" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.599696 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9b9c1cf7-0cbb-4bb3-a764-8a8fa446c8a8" path="/var/lib/kubelet/pods/9b9c1cf7-0cbb-4bb3-a764-8a8fa446c8a8/volumes" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.600431 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b1ce826a-c52c-42c6-8b67-6074b78c9fb7" path="/var/lib/kubelet/pods/b1ce826a-c52c-42c6-8b67-6074b78c9fb7/volumes" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.602120 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bbb20173-fee8-4323-afbb-f2bbbec3e978" path="/var/lib/kubelet/pods/bbb20173-fee8-4323-afbb-f2bbbec3e978/volumes" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.603854 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="db6ef9f1-8a92-4c74-b476-a24b66585268" path="/var/lib/kubelet/pods/db6ef9f1-8a92-4c74-b476-a24b66585268/volumes" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.606747 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4d77869-9132-4170-b50d-88389a33c597" path="/var/lib/kubelet/pods/f4d77869-9132-4170-b50d-88389a33c597/volumes" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.623864 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zgzj5\" (UniqueName: \"kubernetes.io/projected/86db33ff-9888-4ae6-b6f8-48593b3cd2e2-kube-api-access-zgzj5\") pod \"nova-scheduler-0\" (UID: \"86db33ff-9888-4ae6-b6f8-48593b3cd2e2\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.625959 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-77z9r\" (UniqueName: \"kubernetes.io/projected/79a4b689-d81b-44da-8baa-88fc6ce78172-kube-api-access-77z9r\") pod \"openstack-galera-0\" (UID: \"79a4b689-d81b-44da-8baa-88fc6ce78172\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:35:58 crc kubenswrapper[4558]: I0120 17:35:58.630579 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/39600642-1418-4cb4-8617-f2ecc7089e0b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "39600642-1418-4cb4-8617-f2ecc7089e0b" (UID: "39600642-1418-4cb4-8617-f2ecc7089e0b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:59 crc kubenswrapper[4558]: I0120 17:35:58.634945 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/89eb3a4c-0235-4583-ad0a-d015228d8d6b-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "89eb3a4c-0235-4583-ad0a-d015228d8d6b" (UID: "89eb3a4c-0235-4583-ad0a-d015228d8d6b"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:59 crc kubenswrapper[4558]: I0120 17:35:58.674157 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/39600642-1418-4cb4-8617-f2ecc7089e0b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:59 crc kubenswrapper[4558]: I0120 17:35:58.674200 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/89eb3a4c-0235-4583-ad0a-d015228d8d6b-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:59 crc kubenswrapper[4558]: I0120 17:35:58.674211 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a4d26c7d-095d-4f3d-b6cc-c270961e9794-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:59 crc kubenswrapper[4558]: I0120 17:35:58.732126 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c35b601d-7c76-46a4-9703-7a52ef1bbf73-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "c35b601d-7c76-46a4-9703-7a52ef1bbf73" (UID: "c35b601d-7c76-46a4-9703-7a52ef1bbf73"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:59 crc kubenswrapper[4558]: I0120 17:35:58.760000 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"openstack-cell1-galera-0\" (UID: \"1fff3747-796d-420a-aff0-846dfc615df9\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:35:59 crc kubenswrapper[4558]: I0120 17:35:58.784469 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c35b601d-7c76-46a4-9703-7a52ef1bbf73-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:59 crc kubenswrapper[4558]: I0120 17:35:58.809343 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a4d26c7d-095d-4f3d-b6cc-c270961e9794-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a4d26c7d-095d-4f3d-b6cc-c270961e9794" (UID: "a4d26c7d-095d-4f3d-b6cc-c270961e9794"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:59 crc kubenswrapper[4558]: I0120 17:35:58.852871 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-galera-0\" (UID: \"79a4b689-d81b-44da-8baa-88fc6ce78172\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:35:59 crc kubenswrapper[4558]: I0120 17:35:58.876702 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/nova-metadata-0" podUID="a0122392-1c61-4133-b05f-cc6a622abaf9" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.103:8775/\": dial tcp 10.217.0.103:8775: i/o timeout" Jan 20 17:35:59 crc kubenswrapper[4558]: I0120 17:35:58.878332 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/nova-metadata-0" podUID="a0122392-1c61-4133-b05f-cc6a622abaf9" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.103:8775/\": dial tcp 10.217.0.103:8775: i/o timeout" Jan 20 17:35:59 crc kubenswrapper[4558]: I0120 17:35:58.886082 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a4d26c7d-095d-4f3d-b6cc-c270961e9794-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "a4d26c7d-095d-4f3d-b6cc-c270961e9794" (UID: "a4d26c7d-095d-4f3d-b6cc-c270961e9794"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:59 crc kubenswrapper[4558]: I0120 17:35:58.887454 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/barbican-worker-dd549b649-grdrm" podStartSLOduration=8.887441141 podStartE2EDuration="8.887441141s" podCreationTimestamp="2026-01-20 17:35:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:35:58.8837713 +0000 UTC m=+3252.644109268" watchObservedRunningTime="2026-01-20 17:35:58.887441141 +0000 UTC m=+3252.647779108" Jan 20 17:35:59 crc kubenswrapper[4558]: I0120 17:35:58.887559 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a4d26c7d-095d-4f3d-b6cc-c270961e9794-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:59 crc kubenswrapper[4558]: I0120 17:35:58.887575 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a4d26c7d-095d-4f3d-b6cc-c270961e9794-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:59 crc kubenswrapper[4558]: I0120 17:35:58.897108 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/neutron-6d8f9956fd-j6jg4" podStartSLOduration=8.897097689 podStartE2EDuration="8.897097689s" podCreationTimestamp="2026-01-20 17:35:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:35:58.862827899 +0000 UTC m=+3252.623165886" watchObservedRunningTime="2026-01-20 17:35:58.897097689 +0000 UTC m=+3252.657435656" Jan 20 17:35:59 crc kubenswrapper[4558]: I0120 17:35:58.920196 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c35b601d-7c76-46a4-9703-7a52ef1bbf73-config-data" (OuterVolumeSpecName: "config-data") pod "c35b601d-7c76-46a4-9703-7a52ef1bbf73" (UID: "c35b601d-7c76-46a4-9703-7a52ef1bbf73"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:59 crc kubenswrapper[4558]: I0120 17:35:58.959712 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/89eb3a4c-0235-4583-ad0a-d015228d8d6b-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "89eb3a4c-0235-4583-ad0a-d015228d8d6b" (UID: "89eb3a4c-0235-4583-ad0a-d015228d8d6b"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:59 crc kubenswrapper[4558]: I0120 17:35:58.960232 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-6874444f77-w92bj" Jan 20 17:35:59 crc kubenswrapper[4558]: I0120 17:35:58.977187 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a4d26c7d-095d-4f3d-b6cc-c270961e9794-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "a4d26c7d-095d-4f3d-b6cc-c270961e9794" (UID: "a4d26c7d-095d-4f3d-b6cc-c270961e9794"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:59 crc kubenswrapper[4558]: I0120 17:35:58.984404 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c35b601d-7c76-46a4-9703-7a52ef1bbf73-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "c35b601d-7c76-46a4-9703-7a52ef1bbf73" (UID: "c35b601d-7c76-46a4-9703-7a52ef1bbf73"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:59 crc kubenswrapper[4558]: I0120 17:35:58.992581 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c35b601d-7c76-46a4-9703-7a52ef1bbf73-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:59 crc kubenswrapper[4558]: I0120 17:35:58.992603 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a4d26c7d-095d-4f3d-b6cc-c270961e9794-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:59 crc kubenswrapper[4558]: I0120 17:35:58.992614 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c35b601d-7c76-46a4-9703-7a52ef1bbf73-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:59 crc kubenswrapper[4558]: I0120 17:35:58.992623 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/89eb3a4c-0235-4583-ad0a-d015228d8d6b-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:59 crc kubenswrapper[4558]: I0120 17:35:59.008875 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovn-northd-0_39600642-1418-4cb4-8617-f2ecc7089e0b/ovn-northd/0.log" Jan 20 17:35:59 crc kubenswrapper[4558]: I0120 17:35:59.009088 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:35:59 crc kubenswrapper[4558]: I0120 17:35:59.040660 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-6d8f9956fd-j6jg4" event={"ID":"5b57aa66-3c71-422d-b029-86cb0e3a9aef","Type":"ContainerStarted","Data":"1760926567dc1435e74fb123df5eb6e6a9b5f8ff375b1b59306d1e2e66b1ba69"} Jan 20 17:35:59 crc kubenswrapper[4558]: I0120 17:35:59.040709 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-worker-dd549b649-grdrm" event={"ID":"640de3b3-60e1-41b8-ab00-22e375bad65c","Type":"ContainerStarted","Data":"c97affc0c0f8fba83bdeaf55c35e5f0b460177945ced19a520ae5ea01ea75dfd"} Jan 20 17:35:59 crc kubenswrapper[4558]: I0120 17:35:59.040727 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:35:59 crc kubenswrapper[4558]: I0120 17:35:59.041631 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/neutron-6d8f9956fd-j6jg4" Jan 20 17:35:59 crc kubenswrapper[4558]: I0120 17:35:59.041688 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/barbican-api-64bd4857c6-2s9h4" Jan 20 17:35:59 crc kubenswrapper[4558]: I0120 17:35:59.041703 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-worker-5b76bfdfd9-6pwrz"] Jan 20 17:35:59 crc kubenswrapper[4558]: I0120 17:35:59.041718 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/barbican-api-64bd4857c6-2s9h4" Jan 20 17:35:59 crc kubenswrapper[4558]: I0120 17:35:59.041747 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-0"] Jan 20 17:35:59 crc kubenswrapper[4558]: I0120 17:35:59.041763 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-64bd4857c6-2s9h4" event={"ID":"94062d42-28cb-4c8a-afa4-f51458dedc6c","Type":"ContainerStarted","Data":"1aa46d5cbab3879dbc6e24ca32547b207cb8e11bc47b9d499882d8000bc7b577"} Jan 20 17:35:59 crc kubenswrapper[4558]: I0120 17:35:59.041776 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-keystone-listener-6d4445bf46-57dqj" event={"ID":"11d1aa99-639c-451a-876e-59de6098e407","Type":"ContainerStarted","Data":"cb6897619dc4891e5845ed9d58dbce37e97a4c36140e8ccd66c2309dcdea8abb"} Jan 20 17:35:59 crc kubenswrapper[4558]: I0120 17:35:59.041792 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/placement-55dfc8964d-kck7c" Jan 20 17:35:59 crc kubenswrapper[4558]: I0120 17:35:59.041804 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/placement-55dfc8964d-kck7c" Jan 20 17:35:59 crc kubenswrapper[4558]: I0120 17:35:59.041848 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-6874444f77-w92bj" event={"ID":"a4d26c7d-095d-4f3d-b6cc-c270961e9794","Type":"ContainerDied","Data":"d19d6bcdf0254eb31ac9d803fe49d407f81d95266bfc66f730bfd688e5da9db6"} Jan 20 17:35:59 crc kubenswrapper[4558]: I0120 17:35:59.041865 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-nb-0" event={"ID":"98f8dc8b-6446-4ba8-b37b-a11ae7414b65","Type":"ContainerStarted","Data":"ed49f3f83c5373eef322ba453c4777bc515d3e9dfcde8de0a5408d13b285222c"} Jan 20 17:35:59 crc kubenswrapper[4558]: I0120 17:35:59.041879 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-55dfc8964d-kck7c" event={"ID":"6b77441a-39f5-4ed5-bf1b-c29900900242","Type":"ContainerStarted","Data":"bbf204899e737785b34e43ac3dcf8c1621d7b4c38d2b11ab4f044288ae0cdd0a"} Jan 20 17:35:59 crc kubenswrapper[4558]: I0120 17:35:59.042016 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-northd-0" event={"ID":"39600642-1418-4cb4-8617-f2ecc7089e0b","Type":"ContainerDied","Data":"bdb22742dbe82f2b8c2b2d05c6407feef886a24bb79b2700a3f4555ca2e2cde4"} Jan 20 17:35:59 crc kubenswrapper[4558]: I0120 17:35:59.042033 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"f7881580-f0a5-47a0-9622-3927bcce668a","Type":"ContainerStarted","Data":"d6d4df9057e443dc8c92a45823eeab8e542aa20fffd7d458a84e248b6b67fa96"} Jan 20 17:35:59 crc kubenswrapper[4558]: I0120 17:35:59.042052 4558 scope.go:117] "RemoveContainer" containerID="964d0ad276f99dd9fa9d8522022840ffe7ce1002dc0bd50778cbc03decd4341c" Jan 20 17:35:59 crc kubenswrapper[4558]: I0120 17:35:59.042613 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-worker-5b76bfdfd9-6pwrz" podUID="377a0d4c-edf2-4912-a04f-0f814ea9efdb" containerName="barbican-worker-log" containerID="cri-o://c2b0747f8229855e5230624f8d196c8c02e4e23423e1c651c0079dce53d51d01" gracePeriod=30 Jan 20 17:35:59 crc kubenswrapper[4558]: I0120 17:35:59.043071 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-worker-5b76bfdfd9-6pwrz" podUID="377a0d4c-edf2-4912-a04f-0f814ea9efdb" containerName="barbican-worker" containerID="cri-o://8508c73a6d33c4073f424676d96dd78cecddaccb095da8154e3ebbb8171a2314" gracePeriod=30 Jan 20 17:35:59 crc kubenswrapper[4558]: I0120 17:35:59.053041 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/barbican-api-64bd4857c6-2s9h4" podStartSLOduration=9.053015313 podStartE2EDuration="9.053015313s" podCreationTimestamp="2026-01-20 17:35:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:35:58.945959998 +0000 UTC m=+3252.706297965" watchObservedRunningTime="2026-01-20 17:35:59.053015313 +0000 UTC m=+3252.813353280" Jan 20 17:35:59 crc kubenswrapper[4558]: I0120 17:35:59.062772 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/39600642-1418-4cb4-8617-f2ecc7089e0b-ovn-northd-tls-certs" (OuterVolumeSpecName: "ovn-northd-tls-certs") pod "39600642-1418-4cb4-8617-f2ecc7089e0b" (UID: "39600642-1418-4cb4-8617-f2ecc7089e0b"). InnerVolumeSpecName "ovn-northd-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:59 crc kubenswrapper[4558]: I0120 17:35:59.066581 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-sb-0" event={"ID":"8b45e22f-53b9-4228-a28a-a5db18b6e583","Type":"ContainerStarted","Data":"f48902bc3180bcf59f02a8f100bbf7e30cefe4513466bce23fe46f25d1cf1438"} Jan 20 17:35:59 crc kubenswrapper[4558]: I0120 17:35:59.067285 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/39600642-1418-4cb4-8617-f2ecc7089e0b-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "39600642-1418-4cb4-8617-f2ecc7089e0b" (UID: "39600642-1418-4cb4-8617-f2ecc7089e0b"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:35:59 crc kubenswrapper[4558]: I0120 17:35:59.075098 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/barbican-keystone-listener-6d4445bf46-57dqj" podStartSLOduration=9.0750732 podStartE2EDuration="9.0750732s" podCreationTimestamp="2026-01-20 17:35:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:35:58.98093252 +0000 UTC m=+3252.741270487" watchObservedRunningTime="2026-01-20 17:35:59.0750732 +0000 UTC m=+3252.835411168" Jan 20 17:35:59 crc kubenswrapper[4558]: I0120 17:35:59.096337 4558 reconciler_common.go:293] "Volume detached for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/39600642-1418-4cb4-8617-f2ecc7089e0b-ovn-northd-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:59 crc kubenswrapper[4558]: I0120 17:35:59.096359 4558 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/39600642-1418-4cb4-8617-f2ecc7089e0b-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:35:59 crc kubenswrapper[4558]: I0120 17:35:59.107942 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/placement-55dfc8964d-kck7c" podStartSLOduration=9.107928931 podStartE2EDuration="9.107928931s" podCreationTimestamp="2026-01-20 17:35:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:35:59.009319308 +0000 UTC m=+3252.769657295" watchObservedRunningTime="2026-01-20 17:35:59.107928931 +0000 UTC m=+3252.868266898" Jan 20 17:35:59 crc kubenswrapper[4558]: I0120 17:35:59.108523 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-keystone-listener-755c97c57d-cswnd"] Jan 20 17:35:59 crc kubenswrapper[4558]: I0120 17:35:59.108720 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-keystone-listener-755c97c57d-cswnd" podUID="09df5e63-9283-4cae-80fe-cfa88f5a9b72" containerName="barbican-keystone-listener-log" containerID="cri-o://7bd2233090ba7d102c3a1cba643b0d5335cea55cf7e97e49488020c2bb64eaa3" gracePeriod=30 Jan 20 17:35:59 crc kubenswrapper[4558]: I0120 17:35:59.109110 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-keystone-listener-755c97c57d-cswnd" podUID="09df5e63-9283-4cae-80fe-cfa88f5a9b72" containerName="barbican-keystone-listener" containerID="cri-o://de6accc2640fd6714fc093d22da413b576997cc0e51ebcd994892e58839c6118" gracePeriod=30 Jan 20 17:35:59 crc kubenswrapper[4558]: I0120 17:35:59.155520 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-77fd7f9ff9-zs7mz"] Jan 20 17:35:59 crc kubenswrapper[4558]: I0120 17:35:59.169450 4558 scope.go:117] "RemoveContainer" containerID="838cfcc8e26bf3e11489fea7f7801117e2630d787be77ecdb9b5831a033f4a6c" Jan 20 17:35:59 crc kubenswrapper[4558]: I0120 17:35:59.171810 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/keystone-77fd7f9ff9-zs7mz"] Jan 20 17:35:59 crc kubenswrapper[4558]: I0120 17:35:59.188572 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:35:59 crc kubenswrapper[4558]: I0120 17:35:59.188595 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:35:59 crc kubenswrapper[4558]: I0120 17:35:59.349506 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:35:59 crc kubenswrapper[4558]: I0120 17:35:59.362067 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:35:59 crc kubenswrapper[4558]: I0120 17:35:59.377206 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-api-7cccd8f896-m6rvs"] Jan 20 17:35:59 crc kubenswrapper[4558]: I0120 17:35:59.385350 4558 scope.go:117] "RemoveContainer" containerID="33b43d304994a43dcb87337ad0c6e08deaf4a2507647b3ba6f756482ec39e182" Jan 20 17:35:59 crc kubenswrapper[4558]: I0120 17:35:59.405700 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/barbican-api-7cccd8f896-m6rvs"] Jan 20 17:35:59 crc kubenswrapper[4558]: I0120 17:35:59.419514 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovn-northd-0"] Jan 20 17:35:59 crc kubenswrapper[4558]: I0120 17:35:59.439671 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ovn-northd-0"] Jan 20 17:35:59 crc kubenswrapper[4558]: I0120 17:35:59.449908 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-6874444f77-w92bj"] Jan 20 17:35:59 crc kubenswrapper[4558]: I0120 17:35:59.461026 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/keystone-6874444f77-w92bj"] Jan 20 17:35:59 crc kubenswrapper[4558]: I0120 17:35:59.484590 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ovn-northd-0"] Jan 20 17:35:59 crc kubenswrapper[4558]: I0120 17:35:59.490226 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:35:59 crc kubenswrapper[4558]: I0120 17:35:59.496115 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ovnnorthd-ovnnorthd-dockercfg-dj5lx" Jan 20 17:35:59 crc kubenswrapper[4558]: I0120 17:35:59.496828 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"ovnnorthd-config" Jan 20 17:35:59 crc kubenswrapper[4558]: I0120 17:35:59.496987 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-ovnnorthd-ovndbs" Jan 20 17:35:59 crc kubenswrapper[4558]: I0120 17:35:59.498469 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovn-northd-0"] Jan 20 17:35:59 crc kubenswrapper[4558]: I0120 17:35:59.499037 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"ovnnorthd-scripts" Jan 20 17:35:59 crc kubenswrapper[4558]: I0120 17:35:59.615070 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c570a7ea-d296-44bc-a48f-6dd8be9754d0-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"c570a7ea-d296-44bc-a48f-6dd8be9754d0\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:35:59 crc kubenswrapper[4558]: I0120 17:35:59.625032 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c570a7ea-d296-44bc-a48f-6dd8be9754d0-config\") pod \"ovn-northd-0\" (UID: \"c570a7ea-d296-44bc-a48f-6dd8be9754d0\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:35:59 crc kubenswrapper[4558]: I0120 17:35:59.626392 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/c570a7ea-d296-44bc-a48f-6dd8be9754d0-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"c570a7ea-d296-44bc-a48f-6dd8be9754d0\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:35:59 crc kubenswrapper[4558]: I0120 17:35:59.626475 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/c570a7ea-d296-44bc-a48f-6dd8be9754d0-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"c570a7ea-d296-44bc-a48f-6dd8be9754d0\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:35:59 crc kubenswrapper[4558]: I0120 17:35:59.626603 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/c570a7ea-d296-44bc-a48f-6dd8be9754d0-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"c570a7ea-d296-44bc-a48f-6dd8be9754d0\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:35:59 crc kubenswrapper[4558]: I0120 17:35:59.626693 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c570a7ea-d296-44bc-a48f-6dd8be9754d0-scripts\") pod \"ovn-northd-0\" (UID: \"c570a7ea-d296-44bc-a48f-6dd8be9754d0\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:35:59 crc kubenswrapper[4558]: I0120 17:35:59.626763 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p8cm8\" (UniqueName: \"kubernetes.io/projected/c570a7ea-d296-44bc-a48f-6dd8be9754d0-kube-api-access-p8cm8\") pod \"ovn-northd-0\" (UID: \"c570a7ea-d296-44bc-a48f-6dd8be9754d0\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:35:59 crc kubenswrapper[4558]: I0120 17:35:59.688023 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:35:59 crc kubenswrapper[4558]: I0120 17:35:59.728267 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/c570a7ea-d296-44bc-a48f-6dd8be9754d0-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"c570a7ea-d296-44bc-a48f-6dd8be9754d0\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:35:59 crc kubenswrapper[4558]: I0120 17:35:59.728719 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c570a7ea-d296-44bc-a48f-6dd8be9754d0-scripts\") pod \"ovn-northd-0\" (UID: \"c570a7ea-d296-44bc-a48f-6dd8be9754d0\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:35:59 crc kubenswrapper[4558]: I0120 17:35:59.728816 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p8cm8\" (UniqueName: \"kubernetes.io/projected/c570a7ea-d296-44bc-a48f-6dd8be9754d0-kube-api-access-p8cm8\") pod \"ovn-northd-0\" (UID: \"c570a7ea-d296-44bc-a48f-6dd8be9754d0\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:35:59 crc kubenswrapper[4558]: I0120 17:35:59.729002 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c570a7ea-d296-44bc-a48f-6dd8be9754d0-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"c570a7ea-d296-44bc-a48f-6dd8be9754d0\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:35:59 crc kubenswrapper[4558]: I0120 17:35:59.729097 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c570a7ea-d296-44bc-a48f-6dd8be9754d0-config\") pod \"ovn-northd-0\" (UID: \"c570a7ea-d296-44bc-a48f-6dd8be9754d0\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:35:59 crc kubenswrapper[4558]: I0120 17:35:59.729308 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/c570a7ea-d296-44bc-a48f-6dd8be9754d0-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"c570a7ea-d296-44bc-a48f-6dd8be9754d0\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:35:59 crc kubenswrapper[4558]: I0120 17:35:59.729372 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/c570a7ea-d296-44bc-a48f-6dd8be9754d0-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"c570a7ea-d296-44bc-a48f-6dd8be9754d0\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:35:59 crc kubenswrapper[4558]: I0120 17:35:59.731554 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c570a7ea-d296-44bc-a48f-6dd8be9754d0-config\") pod \"ovn-northd-0\" (UID: \"c570a7ea-d296-44bc-a48f-6dd8be9754d0\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:35:59 crc kubenswrapper[4558]: I0120 17:35:59.732644 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/c570a7ea-d296-44bc-a48f-6dd8be9754d0-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"c570a7ea-d296-44bc-a48f-6dd8be9754d0\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:35:59 crc kubenswrapper[4558]: I0120 17:35:59.733556 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c570a7ea-d296-44bc-a48f-6dd8be9754d0-scripts\") pod \"ovn-northd-0\" (UID: \"c570a7ea-d296-44bc-a48f-6dd8be9754d0\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:35:59 crc kubenswrapper[4558]: I0120 17:35:59.734693 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/c570a7ea-d296-44bc-a48f-6dd8be9754d0-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"c570a7ea-d296-44bc-a48f-6dd8be9754d0\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:35:59 crc kubenswrapper[4558]: I0120 17:35:59.736201 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c570a7ea-d296-44bc-a48f-6dd8be9754d0-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"c570a7ea-d296-44bc-a48f-6dd8be9754d0\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:35:59 crc kubenswrapper[4558]: I0120 17:35:59.736710 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/c570a7ea-d296-44bc-a48f-6dd8be9754d0-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"c570a7ea-d296-44bc-a48f-6dd8be9754d0\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:35:59 crc kubenswrapper[4558]: I0120 17:35:59.742247 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p8cm8\" (UniqueName: \"kubernetes.io/projected/c570a7ea-d296-44bc-a48f-6dd8be9754d0-kube-api-access-p8cm8\") pod \"ovn-northd-0\" (UID: \"c570a7ea-d296-44bc-a48f-6dd8be9754d0\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:35:59 crc kubenswrapper[4558]: I0120 17:35:59.790962 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-0"] Jan 20 17:35:59 crc kubenswrapper[4558]: I0120 17:35:59.847000 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:35:59 crc kubenswrapper[4558]: I0120 17:35:59.876767 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:35:59 crc kubenswrapper[4558]: I0120 17:35:59.914537 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/openstack-cell1-galera-0"] Jan 20 17:35:59 crc kubenswrapper[4558]: I0120 17:35:59.971122 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/openstack-galera-0"] Jan 20 17:36:00 crc kubenswrapper[4558]: I0120 17:36:00.088204 4558 generic.go:334] "Generic (PLEG): container finished" podID="377a0d4c-edf2-4912-a04f-0f814ea9efdb" containerID="c2b0747f8229855e5230624f8d196c8c02e4e23423e1c651c0079dce53d51d01" exitCode=143 Jan 20 17:36:00 crc kubenswrapper[4558]: I0120 17:36:00.088291 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-worker-5b76bfdfd9-6pwrz" event={"ID":"377a0d4c-edf2-4912-a04f-0f814ea9efdb","Type":"ContainerDied","Data":"c2b0747f8229855e5230624f8d196c8c02e4e23423e1c651c0079dce53d51d01"} Jan 20 17:36:00 crc kubenswrapper[4558]: I0120 17:36:00.091268 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-0" event={"ID":"0def1343-9e4b-4f74-84bd-3212688b59ce","Type":"ContainerStarted","Data":"620f238c2702c3c6e8d341adc99ebb5ddd89a8d718eb1447a8506a134b9b1dea"} Jan 20 17:36:00 crc kubenswrapper[4558]: I0120 17:36:00.091320 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:36:00 crc kubenswrapper[4558]: I0120 17:36:00.091332 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-0" event={"ID":"0def1343-9e4b-4f74-84bd-3212688b59ce","Type":"ContainerStarted","Data":"9ce12d640a1a49360840fb100db33b1424f611fbbd9760cea4e53ff395f04875"} Jan 20 17:36:00 crc kubenswrapper[4558]: I0120 17:36:00.094044 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-cell1-galera-0" event={"ID":"1fff3747-796d-420a-aff0-846dfc615df9","Type":"ContainerStarted","Data":"99486727b440696ecb3f8577b6d50e4e5a3567bf981fd662768ea579439910d0"} Jan 20 17:36:00 crc kubenswrapper[4558]: I0120 17:36:00.097766 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"466e0ec0-043e-49a6-b30d-0272443cb839","Type":"ContainerStarted","Data":"4b3ea526bd0d6082a8f713f347d305fa594ec7d31b198fcc81b42053f7c829e0"} Jan 20 17:36:00 crc kubenswrapper[4558]: I0120 17:36:00.097811 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"466e0ec0-043e-49a6-b30d-0272443cb839","Type":"ContainerStarted","Data":"18ed28c6a594076520e6c3f1696bd2790369ffc42a8b0b782a7fe3b8e55a695c"} Jan 20 17:36:00 crc kubenswrapper[4558]: I0120 17:36:00.121529 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"86db33ff-9888-4ae6-b6f8-48593b3cd2e2","Type":"ContainerStarted","Data":"55ea30433e3b76c2e5b015dd0e61983f0d4c582fa12d2bcb4a12963788086413"} Jan 20 17:36:00 crc kubenswrapper[4558]: I0120 17:36:00.123405 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell1-conductor-0" podStartSLOduration=3.123391713 podStartE2EDuration="3.123391713s" podCreationTimestamp="2026-01-20 17:35:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:36:00.110453314 +0000 UTC m=+3253.870791281" watchObservedRunningTime="2026-01-20 17:36:00.123391713 +0000 UTC m=+3253.883729680" Jan 20 17:36:00 crc kubenswrapper[4558]: I0120 17:36:00.126752 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-galera-0" event={"ID":"79a4b689-d81b-44da-8baa-88fc6ce78172","Type":"ContainerStarted","Data":"dc1f5d11078340aab4c21ec4674412ae459987ad6b30aacb6430d494ae31cb3e"} Jan 20 17:36:00 crc kubenswrapper[4558]: I0120 17:36:00.151691 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-sb-0" event={"ID":"8b45e22f-53b9-4228-a28a-a5db18b6e583","Type":"ContainerStarted","Data":"561616071d5e6e8584628b6c63ccb49a3ccf23901814e4850a47530cd546d416"} Jan 20 17:36:00 crc kubenswrapper[4558]: I0120 17:36:00.172582 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ovsdbserver-sb-0" podStartSLOduration=6.172562533 podStartE2EDuration="6.172562533s" podCreationTimestamp="2026-01-20 17:35:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:36:00.169332068 +0000 UTC m=+3253.929670035" watchObservedRunningTime="2026-01-20 17:36:00.172562533 +0000 UTC m=+3253.932900500" Jan 20 17:36:00 crc kubenswrapper[4558]: I0120 17:36:00.185502 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-nb-0" event={"ID":"98f8dc8b-6446-4ba8-b37b-a11ae7414b65","Type":"ContainerStarted","Data":"f56a163a8189df30e61d8f643eac8347a79e40378fdf5ec2740f041b866096e0"} Jan 20 17:36:00 crc kubenswrapper[4558]: I0120 17:36:00.189343 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"9800a14c-ebf5-4a5d-b384-c133973b55ff","Type":"ContainerStarted","Data":"d400a17cccf44fd0f3413257fdcf4df411c41c45d5b7a8f8edeb7681c60a710b"} Jan 20 17:36:00 crc kubenswrapper[4558]: I0120 17:36:00.193951 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"f7881580-f0a5-47a0-9622-3927bcce668a","Type":"ContainerStarted","Data":"97f89fd69e51892f2e484fb54c15bb385da2a687f24a01aa8d3d2e7c65114191"} Jan 20 17:36:00 crc kubenswrapper[4558]: I0120 17:36:00.202443 4558 generic.go:334] "Generic (PLEG): container finished" podID="09df5e63-9283-4cae-80fe-cfa88f5a9b72" containerID="7bd2233090ba7d102c3a1cba643b0d5335cea55cf7e97e49488020c2bb64eaa3" exitCode=143 Jan 20 17:36:00 crc kubenswrapper[4558]: I0120 17:36:00.202494 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-keystone-listener-755c97c57d-cswnd" event={"ID":"09df5e63-9283-4cae-80fe-cfa88f5a9b72","Type":"ContainerDied","Data":"7bd2233090ba7d102c3a1cba643b0d5335cea55cf7e97e49488020c2bb64eaa3"} Jan 20 17:36:00 crc kubenswrapper[4558]: I0120 17:36:00.225832 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ovsdbserver-nb-0" podStartSLOduration=8.225820457 podStartE2EDuration="8.225820457s" podCreationTimestamp="2026-01-20 17:35:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:36:00.211058178 +0000 UTC m=+3253.971396146" watchObservedRunningTime="2026-01-20 17:36:00.225820457 +0000 UTC m=+3253.986158424" Jan 20 17:36:00 crc kubenswrapper[4558]: I0120 17:36:00.233220 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-0" event={"ID":"0d2b6a10-ca95-4d72-a80b-1706822a07a7","Type":"ContainerStarted","Data":"4c8e41d3a2f28510194a679aee0e70d2fe8353517ab636b8947a4bcddafb37df"} Jan 20 17:36:00 crc kubenswrapper[4558]: I0120 17:36:00.233255 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-0" event={"ID":"0d2b6a10-ca95-4d72-a80b-1706822a07a7","Type":"ContainerStarted","Data":"26f4693a90769d4adb810c7a07bab664111d1d6749bae0af629048a3de8a1ac9"} Jan 20 17:36:00 crc kubenswrapper[4558]: I0120 17:36:00.234188 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:36:00 crc kubenswrapper[4558]: I0120 17:36:00.253318 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell0-conductor-0" podStartSLOduration=3.25330029 podStartE2EDuration="3.25330029s" podCreationTimestamp="2026-01-20 17:35:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:36:00.247917448 +0000 UTC m=+3254.008255445" watchObservedRunningTime="2026-01-20 17:36:00.25330029 +0000 UTC m=+3254.013638257" Jan 20 17:36:00 crc kubenswrapper[4558]: I0120 17:36:00.490393 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovn-northd-0"] Jan 20 17:36:00 crc kubenswrapper[4558]: I0120 17:36:00.590612 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="39600642-1418-4cb4-8617-f2ecc7089e0b" path="/var/lib/kubelet/pods/39600642-1418-4cb4-8617-f2ecc7089e0b/volumes" Jan 20 17:36:00 crc kubenswrapper[4558]: I0120 17:36:00.591412 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="89eb3a4c-0235-4583-ad0a-d015228d8d6b" path="/var/lib/kubelet/pods/89eb3a4c-0235-4583-ad0a-d015228d8d6b/volumes" Jan 20 17:36:00 crc kubenswrapper[4558]: I0120 17:36:00.591881 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a4d26c7d-095d-4f3d-b6cc-c270961e9794" path="/var/lib/kubelet/pods/a4d26c7d-095d-4f3d-b6cc-c270961e9794/volumes" Jan 20 17:36:00 crc kubenswrapper[4558]: I0120 17:36:00.593179 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c35b601d-7c76-46a4-9703-7a52ef1bbf73" path="/var/lib/kubelet/pods/c35b601d-7c76-46a4-9703-7a52ef1bbf73/volumes" Jan 20 17:36:00 crc kubenswrapper[4558]: E0120 17:36:00.839027 4558 fsHandler.go:119] failed to collect filesystem stats - rootDiskErr: could not stat "/var/lib/containers/storage/overlay/7389305c0decfa4337b135b922b75c0234ec0d1e6284439eb704fa0193074bac/diff" to get inode usage: stat /var/lib/containers/storage/overlay/7389305c0decfa4337b135b922b75c0234ec0d1e6284439eb704fa0193074bac/diff: no such file or directory, extraDiskErr: could not stat "/var/log/pods/openstack-kuttl-tests_nova-cell0-conductor-0_f4d77869-9132-4170-b50d-88389a33c597/nova-cell0-conductor-conductor/0.log" to get inode usage: stat /var/log/pods/openstack-kuttl-tests_nova-cell0-conductor-0_f4d77869-9132-4170-b50d-88389a33c597/nova-cell0-conductor-conductor/0.log: no such file or directory Jan 20 17:36:00 crc kubenswrapper[4558]: I0120 17:36:00.922893 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/barbican-api-f45698ff6-c9mtk" podUID="7de6386e-7693-430d-9753-2d7fa8c31b5d" containerName="barbican-api" probeResult="failure" output="Get \"https://10.217.0.131:9311/healthcheck\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 20 17:36:00 crc kubenswrapper[4558]: I0120 17:36:00.923924 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/barbican-api-f45698ff6-c9mtk" podUID="7de6386e-7693-430d-9753-2d7fa8c31b5d" containerName="barbican-api-log" probeResult="failure" output="Get \"https://10.217.0.131:9311/healthcheck\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Jan 20 17:36:01 crc kubenswrapper[4558]: I0120 17:36:01.023908 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-keystone-listener-755c97c57d-cswnd" Jan 20 17:36:01 crc kubenswrapper[4558]: I0120 17:36:01.067524 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/09df5e63-9283-4cae-80fe-cfa88f5a9b72-logs\") pod \"09df5e63-9283-4cae-80fe-cfa88f5a9b72\" (UID: \"09df5e63-9283-4cae-80fe-cfa88f5a9b72\") " Jan 20 17:36:01 crc kubenswrapper[4558]: I0120 17:36:01.067592 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/09df5e63-9283-4cae-80fe-cfa88f5a9b72-combined-ca-bundle\") pod \"09df5e63-9283-4cae-80fe-cfa88f5a9b72\" (UID: \"09df5e63-9283-4cae-80fe-cfa88f5a9b72\") " Jan 20 17:36:01 crc kubenswrapper[4558]: I0120 17:36:01.067735 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/09df5e63-9283-4cae-80fe-cfa88f5a9b72-config-data-custom\") pod \"09df5e63-9283-4cae-80fe-cfa88f5a9b72\" (UID: \"09df5e63-9283-4cae-80fe-cfa88f5a9b72\") " Jan 20 17:36:01 crc kubenswrapper[4558]: I0120 17:36:01.067802 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hnhc4\" (UniqueName: \"kubernetes.io/projected/09df5e63-9283-4cae-80fe-cfa88f5a9b72-kube-api-access-hnhc4\") pod \"09df5e63-9283-4cae-80fe-cfa88f5a9b72\" (UID: \"09df5e63-9283-4cae-80fe-cfa88f5a9b72\") " Jan 20 17:36:01 crc kubenswrapper[4558]: I0120 17:36:01.067826 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/09df5e63-9283-4cae-80fe-cfa88f5a9b72-config-data\") pod \"09df5e63-9283-4cae-80fe-cfa88f5a9b72\" (UID: \"09df5e63-9283-4cae-80fe-cfa88f5a9b72\") " Jan 20 17:36:01 crc kubenswrapper[4558]: I0120 17:36:01.071546 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/09df5e63-9283-4cae-80fe-cfa88f5a9b72-logs" (OuterVolumeSpecName: "logs") pod "09df5e63-9283-4cae-80fe-cfa88f5a9b72" (UID: "09df5e63-9283-4cae-80fe-cfa88f5a9b72"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:36:01 crc kubenswrapper[4558]: I0120 17:36:01.080045 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09df5e63-9283-4cae-80fe-cfa88f5a9b72-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "09df5e63-9283-4cae-80fe-cfa88f5a9b72" (UID: "09df5e63-9283-4cae-80fe-cfa88f5a9b72"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:36:01 crc kubenswrapper[4558]: I0120 17:36:01.086122 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09df5e63-9283-4cae-80fe-cfa88f5a9b72-kube-api-access-hnhc4" (OuterVolumeSpecName: "kube-api-access-hnhc4") pod "09df5e63-9283-4cae-80fe-cfa88f5a9b72" (UID: "09df5e63-9283-4cae-80fe-cfa88f5a9b72"). InnerVolumeSpecName "kube-api-access-hnhc4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:36:01 crc kubenswrapper[4558]: I0120 17:36:01.121409 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09df5e63-9283-4cae-80fe-cfa88f5a9b72-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "09df5e63-9283-4cae-80fe-cfa88f5a9b72" (UID: "09df5e63-9283-4cae-80fe-cfa88f5a9b72"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:36:01 crc kubenswrapper[4558]: I0120 17:36:01.169899 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/09df5e63-9283-4cae-80fe-cfa88f5a9b72-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:01 crc kubenswrapper[4558]: I0120 17:36:01.169928 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/09df5e63-9283-4cae-80fe-cfa88f5a9b72-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:01 crc kubenswrapper[4558]: I0120 17:36:01.169942 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hnhc4\" (UniqueName: \"kubernetes.io/projected/09df5e63-9283-4cae-80fe-cfa88f5a9b72-kube-api-access-hnhc4\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:01 crc kubenswrapper[4558]: I0120 17:36:01.169953 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/09df5e63-9283-4cae-80fe-cfa88f5a9b72-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:01 crc kubenswrapper[4558]: I0120 17:36:01.180927 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09df5e63-9283-4cae-80fe-cfa88f5a9b72-config-data" (OuterVolumeSpecName: "config-data") pod "09df5e63-9283-4cae-80fe-cfa88f5a9b72" (UID: "09df5e63-9283-4cae-80fe-cfa88f5a9b72"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:36:01 crc kubenswrapper[4558]: I0120 17:36:01.269671 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"f7881580-f0a5-47a0-9622-3927bcce668a","Type":"ContainerStarted","Data":"7afb4e77f803740421952c0dbe28e7f78cf5d2d3ff6b921fafb8facbbd98325f"} Jan 20 17:36:01 crc kubenswrapper[4558]: I0120 17:36:01.272934 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/09df5e63-9283-4cae-80fe-cfa88f5a9b72-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:01 crc kubenswrapper[4558]: I0120 17:36:01.291401 4558 generic.go:334] "Generic (PLEG): container finished" podID="09df5e63-9283-4cae-80fe-cfa88f5a9b72" containerID="de6accc2640fd6714fc093d22da413b576997cc0e51ebcd994892e58839c6118" exitCode=0 Jan 20 17:36:01 crc kubenswrapper[4558]: I0120 17:36:01.291479 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-keystone-listener-755c97c57d-cswnd" event={"ID":"09df5e63-9283-4cae-80fe-cfa88f5a9b72","Type":"ContainerDied","Data":"de6accc2640fd6714fc093d22da413b576997cc0e51ebcd994892e58839c6118"} Jan 20 17:36:01 crc kubenswrapper[4558]: I0120 17:36:01.291520 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-keystone-listener-755c97c57d-cswnd" event={"ID":"09df5e63-9283-4cae-80fe-cfa88f5a9b72","Type":"ContainerDied","Data":"89b0d3ea7ad3a30fd9ad478e238eb045ab4c94d8dcbfebf59a338b01371c9018"} Jan 20 17:36:01 crc kubenswrapper[4558]: I0120 17:36:01.291554 4558 scope.go:117] "RemoveContainer" containerID="de6accc2640fd6714fc093d22da413b576997cc0e51ebcd994892e58839c6118" Jan 20 17:36:01 crc kubenswrapper[4558]: I0120 17:36:01.291690 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-keystone-listener-755c97c57d-cswnd" Jan 20 17:36:01 crc kubenswrapper[4558]: I0120 17:36:01.299411 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-galera-0" event={"ID":"79a4b689-d81b-44da-8baa-88fc6ce78172","Type":"ContainerStarted","Data":"d2e273ef1d32ff90a2aef248fb8b2c6529baf7e88fc616e72b397d1ef78a7d62"} Jan 20 17:36:01 crc kubenswrapper[4558]: I0120 17:36:01.303565 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/glance-default-internal-api-0" podStartSLOduration=7.303545407 podStartE2EDuration="7.303545407s" podCreationTimestamp="2026-01-20 17:35:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:36:01.293071181 +0000 UTC m=+3255.053409147" watchObservedRunningTime="2026-01-20 17:36:01.303545407 +0000 UTC m=+3255.063883374" Jan 20 17:36:01 crc kubenswrapper[4558]: I0120 17:36:01.315368 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-northd-0" event={"ID":"c570a7ea-d296-44bc-a48f-6dd8be9754d0","Type":"ContainerStarted","Data":"80fe464dc7cd617b6ebc3164ada8f61f3bec4caca84b82392b77289a475163d7"} Jan 20 17:36:01 crc kubenswrapper[4558]: I0120 17:36:01.315412 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-northd-0" event={"ID":"c570a7ea-d296-44bc-a48f-6dd8be9754d0","Type":"ContainerStarted","Data":"1e1ade780111891ab36a10d37a0917edea1f34135b19b61c709e80ef7ae68b98"} Jan 20 17:36:01 crc kubenswrapper[4558]: I0120 17:36:01.315423 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-northd-0" event={"ID":"c570a7ea-d296-44bc-a48f-6dd8be9754d0","Type":"ContainerStarted","Data":"9eef7e7d47c2973d90654e404fc2609985341dcd776220debaa62a81de5588aa"} Jan 20 17:36:01 crc kubenswrapper[4558]: I0120 17:36:01.316354 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:36:01 crc kubenswrapper[4558]: I0120 17:36:01.320666 4558 scope.go:117] "RemoveContainer" containerID="7bd2233090ba7d102c3a1cba643b0d5335cea55cf7e97e49488020c2bb64eaa3" Jan 20 17:36:01 crc kubenswrapper[4558]: I0120 17:36:01.333233 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:36:01 crc kubenswrapper[4558]: I0120 17:36:01.333492 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"9800a14c-ebf5-4a5d-b384-c133973b55ff","Type":"ContainerStarted","Data":"f203c5705e628d436a58ef25c8ec48e9cd0684833bf7150a4abe881181faec1b"} Jan 20 17:36:01 crc kubenswrapper[4558]: I0120 17:36:01.336677 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-cell1-galera-0" event={"ID":"1fff3747-796d-420a-aff0-846dfc615df9","Type":"ContainerStarted","Data":"1adea188750d94315e6ae35bd310f13626df1c915eb5fa791f5e8484767fcbb0"} Jan 20 17:36:01 crc kubenswrapper[4558]: I0120 17:36:01.360529 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"466e0ec0-043e-49a6-b30d-0272443cb839","Type":"ContainerStarted","Data":"078b66eeb03b0f73239ad0cd6bf5aa81ad61f3191161fab43388406215e122a8"} Jan 20 17:36:01 crc kubenswrapper[4558]: I0120 17:36:01.375550 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ovn-northd-0" podStartSLOduration=2.375522891 podStartE2EDuration="2.375522891s" podCreationTimestamp="2026-01-20 17:35:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:36:01.349555202 +0000 UTC m=+3255.109893170" watchObservedRunningTime="2026-01-20 17:36:01.375522891 +0000 UTC m=+3255.135860858" Jan 20 17:36:01 crc kubenswrapper[4558]: I0120 17:36:01.378284 4558 scope.go:117] "RemoveContainer" containerID="de6accc2640fd6714fc093d22da413b576997cc0e51ebcd994892e58839c6118" Jan 20 17:36:01 crc kubenswrapper[4558]: E0120 17:36:01.382626 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"de6accc2640fd6714fc093d22da413b576997cc0e51ebcd994892e58839c6118\": container with ID starting with de6accc2640fd6714fc093d22da413b576997cc0e51ebcd994892e58839c6118 not found: ID does not exist" containerID="de6accc2640fd6714fc093d22da413b576997cc0e51ebcd994892e58839c6118" Jan 20 17:36:01 crc kubenswrapper[4558]: I0120 17:36:01.382690 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"de6accc2640fd6714fc093d22da413b576997cc0e51ebcd994892e58839c6118"} err="failed to get container status \"de6accc2640fd6714fc093d22da413b576997cc0e51ebcd994892e58839c6118\": rpc error: code = NotFound desc = could not find container \"de6accc2640fd6714fc093d22da413b576997cc0e51ebcd994892e58839c6118\": container with ID starting with de6accc2640fd6714fc093d22da413b576997cc0e51ebcd994892e58839c6118 not found: ID does not exist" Jan 20 17:36:01 crc kubenswrapper[4558]: I0120 17:36:01.382738 4558 scope.go:117] "RemoveContainer" containerID="7bd2233090ba7d102c3a1cba643b0d5335cea55cf7e97e49488020c2bb64eaa3" Jan 20 17:36:01 crc kubenswrapper[4558]: E0120 17:36:01.386693 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7bd2233090ba7d102c3a1cba643b0d5335cea55cf7e97e49488020c2bb64eaa3\": container with ID starting with 7bd2233090ba7d102c3a1cba643b0d5335cea55cf7e97e49488020c2bb64eaa3 not found: ID does not exist" containerID="7bd2233090ba7d102c3a1cba643b0d5335cea55cf7e97e49488020c2bb64eaa3" Jan 20 17:36:01 crc kubenswrapper[4558]: I0120 17:36:01.386728 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7bd2233090ba7d102c3a1cba643b0d5335cea55cf7e97e49488020c2bb64eaa3"} err="failed to get container status \"7bd2233090ba7d102c3a1cba643b0d5335cea55cf7e97e49488020c2bb64eaa3\": rpc error: code = NotFound desc = could not find container \"7bd2233090ba7d102c3a1cba643b0d5335cea55cf7e97e49488020c2bb64eaa3\": container with ID starting with 7bd2233090ba7d102c3a1cba643b0d5335cea55cf7e97e49488020c2bb64eaa3 not found: ID does not exist" Jan 20 17:36:01 crc kubenswrapper[4558]: I0120 17:36:01.386917 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"86db33ff-9888-4ae6-b6f8-48593b3cd2e2","Type":"ContainerStarted","Data":"02edffa36e0ddd93cf579526cab4e0b85b23379d76a1aa96a22d61b92edbed39"} Jan 20 17:36:01 crc kubenswrapper[4558]: I0120 17:36:01.394626 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/cinder-scheduler-0" podStartSLOduration=4.394612998 podStartE2EDuration="4.394612998s" podCreationTimestamp="2026-01-20 17:35:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:36:01.367581128 +0000 UTC m=+3255.127919094" watchObservedRunningTime="2026-01-20 17:36:01.394612998 +0000 UTC m=+3255.154950964" Jan 20 17:36:01 crc kubenswrapper[4558]: I0120 17:36:01.412219 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-keystone-listener-755c97c57d-cswnd"] Jan 20 17:36:01 crc kubenswrapper[4558]: I0120 17:36:01.417516 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/barbican-keystone-listener-755c97c57d-cswnd"] Jan 20 17:36:01 crc kubenswrapper[4558]: I0120 17:36:01.442276 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-scheduler-0" podStartSLOduration=4.442259059 podStartE2EDuration="4.442259059s" podCreationTimestamp="2026-01-20 17:35:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:36:01.421690073 +0000 UTC m=+3255.182028039" watchObservedRunningTime="2026-01-20 17:36:01.442259059 +0000 UTC m=+3255.202597026" Jan 20 17:36:01 crc kubenswrapper[4558]: I0120 17:36:01.802706 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/cinder-api-0" podUID="e228514c-6d22-4527-b365-913e3ea3cfdb" containerName="cinder-api" probeResult="failure" output="Get \"https://10.217.0.117:8776/healthcheck\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Jan 20 17:36:02 crc kubenswrapper[4558]: I0120 17:36:02.045807 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-worker-5b76bfdfd9-6pwrz" Jan 20 17:36:02 crc kubenswrapper[4558]: I0120 17:36:02.196524 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/377a0d4c-edf2-4912-a04f-0f814ea9efdb-config-data-custom\") pod \"377a0d4c-edf2-4912-a04f-0f814ea9efdb\" (UID: \"377a0d4c-edf2-4912-a04f-0f814ea9efdb\") " Jan 20 17:36:02 crc kubenswrapper[4558]: I0120 17:36:02.196574 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/377a0d4c-edf2-4912-a04f-0f814ea9efdb-config-data\") pod \"377a0d4c-edf2-4912-a04f-0f814ea9efdb\" (UID: \"377a0d4c-edf2-4912-a04f-0f814ea9efdb\") " Jan 20 17:36:02 crc kubenswrapper[4558]: I0120 17:36:02.196830 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dzm29\" (UniqueName: \"kubernetes.io/projected/377a0d4c-edf2-4912-a04f-0f814ea9efdb-kube-api-access-dzm29\") pod \"377a0d4c-edf2-4912-a04f-0f814ea9efdb\" (UID: \"377a0d4c-edf2-4912-a04f-0f814ea9efdb\") " Jan 20 17:36:02 crc kubenswrapper[4558]: I0120 17:36:02.196910 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/377a0d4c-edf2-4912-a04f-0f814ea9efdb-logs\") pod \"377a0d4c-edf2-4912-a04f-0f814ea9efdb\" (UID: \"377a0d4c-edf2-4912-a04f-0f814ea9efdb\") " Jan 20 17:36:02 crc kubenswrapper[4558]: I0120 17:36:02.196983 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/377a0d4c-edf2-4912-a04f-0f814ea9efdb-combined-ca-bundle\") pod \"377a0d4c-edf2-4912-a04f-0f814ea9efdb\" (UID: \"377a0d4c-edf2-4912-a04f-0f814ea9efdb\") " Jan 20 17:36:02 crc kubenswrapper[4558]: I0120 17:36:02.197631 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/377a0d4c-edf2-4912-a04f-0f814ea9efdb-logs" (OuterVolumeSpecName: "logs") pod "377a0d4c-edf2-4912-a04f-0f814ea9efdb" (UID: "377a0d4c-edf2-4912-a04f-0f814ea9efdb"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:36:02 crc kubenswrapper[4558]: I0120 17:36:02.198037 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/377a0d4c-edf2-4912-a04f-0f814ea9efdb-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:02 crc kubenswrapper[4558]: I0120 17:36:02.204021 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/377a0d4c-edf2-4912-a04f-0f814ea9efdb-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "377a0d4c-edf2-4912-a04f-0f814ea9efdb" (UID: "377a0d4c-edf2-4912-a04f-0f814ea9efdb"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:36:02 crc kubenswrapper[4558]: I0120 17:36:02.205068 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/377a0d4c-edf2-4912-a04f-0f814ea9efdb-kube-api-access-dzm29" (OuterVolumeSpecName: "kube-api-access-dzm29") pod "377a0d4c-edf2-4912-a04f-0f814ea9efdb" (UID: "377a0d4c-edf2-4912-a04f-0f814ea9efdb"). InnerVolumeSpecName "kube-api-access-dzm29". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:36:02 crc kubenswrapper[4558]: I0120 17:36:02.224534 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/377a0d4c-edf2-4912-a04f-0f814ea9efdb-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "377a0d4c-edf2-4912-a04f-0f814ea9efdb" (UID: "377a0d4c-edf2-4912-a04f-0f814ea9efdb"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:36:02 crc kubenswrapper[4558]: I0120 17:36:02.244563 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/377a0d4c-edf2-4912-a04f-0f814ea9efdb-config-data" (OuterVolumeSpecName: "config-data") pod "377a0d4c-edf2-4912-a04f-0f814ea9efdb" (UID: "377a0d4c-edf2-4912-a04f-0f814ea9efdb"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:36:02 crc kubenswrapper[4558]: I0120 17:36:02.302939 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/377a0d4c-edf2-4912-a04f-0f814ea9efdb-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:02 crc kubenswrapper[4558]: I0120 17:36:02.302985 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/377a0d4c-edf2-4912-a04f-0f814ea9efdb-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:02 crc kubenswrapper[4558]: I0120 17:36:02.303002 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dzm29\" (UniqueName: \"kubernetes.io/projected/377a0d4c-edf2-4912-a04f-0f814ea9efdb-kube-api-access-dzm29\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:02 crc kubenswrapper[4558]: I0120 17:36:02.303018 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/377a0d4c-edf2-4912-a04f-0f814ea9efdb-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:02 crc kubenswrapper[4558]: I0120 17:36:02.333099 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:36:02 crc kubenswrapper[4558]: I0120 17:36:02.367250 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:36:02 crc kubenswrapper[4558]: I0120 17:36:02.417300 4558 generic.go:334] "Generic (PLEG): container finished" podID="377a0d4c-edf2-4912-a04f-0f814ea9efdb" containerID="8508c73a6d33c4073f424676d96dd78cecddaccb095da8154e3ebbb8171a2314" exitCode=0 Jan 20 17:36:02 crc kubenswrapper[4558]: I0120 17:36:02.417374 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-worker-5b76bfdfd9-6pwrz" event={"ID":"377a0d4c-edf2-4912-a04f-0f814ea9efdb","Type":"ContainerDied","Data":"8508c73a6d33c4073f424676d96dd78cecddaccb095da8154e3ebbb8171a2314"} Jan 20 17:36:02 crc kubenswrapper[4558]: I0120 17:36:02.417400 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-worker-5b76bfdfd9-6pwrz" event={"ID":"377a0d4c-edf2-4912-a04f-0f814ea9efdb","Type":"ContainerDied","Data":"414f0875a609073a32394286fcda0d02e11c9c0d582bf0952c06f509510acf10"} Jan 20 17:36:02 crc kubenswrapper[4558]: I0120 17:36:02.417419 4558 scope.go:117] "RemoveContainer" containerID="8508c73a6d33c4073f424676d96dd78cecddaccb095da8154e3ebbb8171a2314" Jan 20 17:36:02 crc kubenswrapper[4558]: I0120 17:36:02.417570 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-worker-5b76bfdfd9-6pwrz" Jan 20 17:36:02 crc kubenswrapper[4558]: I0120 17:36:02.433338 4558 generic.go:334] "Generic (PLEG): container finished" podID="0d2b6a10-ca95-4d72-a80b-1706822a07a7" containerID="4c8e41d3a2f28510194a679aee0e70d2fe8353517ab636b8947a4bcddafb37df" exitCode=1 Jan 20 17:36:02 crc kubenswrapper[4558]: I0120 17:36:02.433390 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-0" event={"ID":"0d2b6a10-ca95-4d72-a80b-1706822a07a7","Type":"ContainerDied","Data":"4c8e41d3a2f28510194a679aee0e70d2fe8353517ab636b8947a4bcddafb37df"} Jan 20 17:36:02 crc kubenswrapper[4558]: I0120 17:36:02.433759 4558 scope.go:117] "RemoveContainer" containerID="4c8e41d3a2f28510194a679aee0e70d2fe8353517ab636b8947a4bcddafb37df" Jan 20 17:36:02 crc kubenswrapper[4558]: I0120 17:36:02.441075 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"9800a14c-ebf5-4a5d-b384-c133973b55ff","Type":"ContainerStarted","Data":"f88977b0890e7c6b4326fe8b4078f4b100c6c3f58772736456b0c1ccabf3b4fb"} Jan 20 17:36:02 crc kubenswrapper[4558]: I0120 17:36:02.451907 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"466e0ec0-043e-49a6-b30d-0272443cb839","Type":"ContainerStarted","Data":"bed66136ce5e42c0bdeb7a5aa041c344e04bfab855657e5edb47b7c7bc7a73a1"} Jan 20 17:36:02 crc kubenswrapper[4558]: I0120 17:36:02.453811 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-worker-5b76bfdfd9-6pwrz"] Jan 20 17:36:02 crc kubenswrapper[4558]: E0120 17:36:02.458910 4558 fsHandler.go:119] failed to collect filesystem stats - rootDiskErr: could not stat "/var/lib/containers/storage/overlay/be8aaae091f88350b6e283f226a64d7e628a165f5cc5cf8e3e65732d19a66e5a/diff" to get inode usage: stat /var/lib/containers/storage/overlay/be8aaae091f88350b6e283f226a64d7e628a165f5cc5cf8e3e65732d19a66e5a/diff: no such file or directory, extraDiskErr: could not stat "/var/log/pods/openstack-kuttl-tests_barbican-keystone-listener-755c97c57d-cswnd_09df5e63-9283-4cae-80fe-cfa88f5a9b72/barbican-keystone-listener-log/0.log" to get inode usage: stat /var/log/pods/openstack-kuttl-tests_barbican-keystone-listener-755c97c57d-cswnd_09df5e63-9283-4cae-80fe-cfa88f5a9b72/barbican-keystone-listener-log/0.log: no such file or directory Jan 20 17:36:02 crc kubenswrapper[4558]: I0120 17:36:02.459103 4558 scope.go:117] "RemoveContainer" containerID="c2b0747f8229855e5230624f8d196c8c02e4e23423e1c651c0079dce53d51d01" Jan 20 17:36:02 crc kubenswrapper[4558]: I0120 17:36:02.464604 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/barbican-worker-5b76bfdfd9-6pwrz"] Jan 20 17:36:02 crc kubenswrapper[4558]: I0120 17:36:02.486707 4558 scope.go:117] "RemoveContainer" containerID="8508c73a6d33c4073f424676d96dd78cecddaccb095da8154e3ebbb8171a2314" Jan 20 17:36:02 crc kubenswrapper[4558]: E0120 17:36:02.487112 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8508c73a6d33c4073f424676d96dd78cecddaccb095da8154e3ebbb8171a2314\": container with ID starting with 8508c73a6d33c4073f424676d96dd78cecddaccb095da8154e3ebbb8171a2314 not found: ID does not exist" containerID="8508c73a6d33c4073f424676d96dd78cecddaccb095da8154e3ebbb8171a2314" Jan 20 17:36:02 crc kubenswrapper[4558]: I0120 17:36:02.487148 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8508c73a6d33c4073f424676d96dd78cecddaccb095da8154e3ebbb8171a2314"} err="failed to get container status \"8508c73a6d33c4073f424676d96dd78cecddaccb095da8154e3ebbb8171a2314\": rpc error: code = NotFound desc = could not find container \"8508c73a6d33c4073f424676d96dd78cecddaccb095da8154e3ebbb8171a2314\": container with ID starting with 8508c73a6d33c4073f424676d96dd78cecddaccb095da8154e3ebbb8171a2314 not found: ID does not exist" Jan 20 17:36:02 crc kubenswrapper[4558]: I0120 17:36:02.487190 4558 scope.go:117] "RemoveContainer" containerID="c2b0747f8229855e5230624f8d196c8c02e4e23423e1c651c0079dce53d51d01" Jan 20 17:36:02 crc kubenswrapper[4558]: E0120 17:36:02.487584 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c2b0747f8229855e5230624f8d196c8c02e4e23423e1c651c0079dce53d51d01\": container with ID starting with c2b0747f8229855e5230624f8d196c8c02e4e23423e1c651c0079dce53d51d01 not found: ID does not exist" containerID="c2b0747f8229855e5230624f8d196c8c02e4e23423e1c651c0079dce53d51d01" Jan 20 17:36:02 crc kubenswrapper[4558]: I0120 17:36:02.487616 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c2b0747f8229855e5230624f8d196c8c02e4e23423e1c651c0079dce53d51d01"} err="failed to get container status \"c2b0747f8229855e5230624f8d196c8c02e4e23423e1c651c0079dce53d51d01\": rpc error: code = NotFound desc = could not find container \"c2b0747f8229855e5230624f8d196c8c02e4e23423e1c651c0079dce53d51d01\": container with ID starting with c2b0747f8229855e5230624f8d196c8c02e4e23423e1c651c0079dce53d51d01 not found: ID does not exist" Jan 20 17:36:02 crc kubenswrapper[4558]: I0120 17:36:02.577615 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09df5e63-9283-4cae-80fe-cfa88f5a9b72" path="/var/lib/kubelet/pods/09df5e63-9283-4cae-80fe-cfa88f5a9b72/volumes" Jan 20 17:36:02 crc kubenswrapper[4558]: I0120 17:36:02.578282 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="377a0d4c-edf2-4912-a04f-0f814ea9efdb" path="/var/lib/kubelet/pods/377a0d4c-edf2-4912-a04f-0f814ea9efdb/volumes" Jan 20 17:36:02 crc kubenswrapper[4558]: I0120 17:36:02.863394 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/placement-5577476b98-wgg9z" podUID="06b8e2bb-a133-4a4b-92dd-12cf20ee4300" containerName="placement-api" probeResult="failure" output="Get \"https://10.217.0.115:8778/\": read tcp 10.217.0.2:57122->10.217.0.115:8778: read: connection reset by peer" Jan 20 17:36:02 crc kubenswrapper[4558]: I0120 17:36:02.863476 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/placement-5577476b98-wgg9z" podUID="06b8e2bb-a133-4a4b-92dd-12cf20ee4300" containerName="placement-log" probeResult="failure" output="Get \"https://10.217.0.115:8778/\": read tcp 10.217.0.2:57124->10.217.0.115:8778: read: connection reset by peer" Jan 20 17:36:02 crc kubenswrapper[4558]: I0120 17:36:02.998233 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:36:03 crc kubenswrapper[4558]: I0120 17:36:03.039183 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:36:03 crc kubenswrapper[4558]: I0120 17:36:03.160694 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/cinder-api-0" podUID="e228514c-6d22-4527-b365-913e3ea3cfdb" containerName="cinder-api" probeResult="failure" output="Get \"https://10.217.0.117:8776/healthcheck\": read tcp 10.217.0.2:39114->10.217.0.117:8776: read: connection reset by peer" Jan 20 17:36:03 crc kubenswrapper[4558]: I0120 17:36:03.160835 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:36:03 crc kubenswrapper[4558]: E0120 17:36:03.209082 4558 fsHandler.go:119] failed to collect filesystem stats - rootDiskErr: could not stat "/var/lib/containers/storage/overlay/bbe814488069f022781c489a599135da76578178852d1ad6af29bf5840e6fcb4/diff" to get inode usage: stat /var/lib/containers/storage/overlay/bbe814488069f022781c489a599135da76578178852d1ad6af29bf5840e6fcb4/diff: no such file or directory, extraDiskErr: could not stat "/var/log/pods/openstack-kuttl-tests_barbican-keystone-listener-755c97c57d-cswnd_09df5e63-9283-4cae-80fe-cfa88f5a9b72/barbican-keystone-listener/0.log" to get inode usage: stat /var/log/pods/openstack-kuttl-tests_barbican-keystone-listener-755c97c57d-cswnd_09df5e63-9283-4cae-80fe-cfa88f5a9b72/barbican-keystone-listener/0.log: no such file or directory Jan 20 17:36:03 crc kubenswrapper[4558]: E0120 17:36:03.215411 4558 fsHandler.go:119] failed to collect filesystem stats - rootDiskErr: could not stat "/var/lib/containers/storage/overlay/a84601742a31b423c6b8a7778cab99d2d74756f3b265d4349b641bc41fffe6f5/diff" to get inode usage: stat /var/lib/containers/storage/overlay/a84601742a31b423c6b8a7778cab99d2d74756f3b265d4349b641bc41fffe6f5/diff: no such file or directory, extraDiskErr: could not stat "/var/log/pods/openstack-kuttl-tests_barbican-worker-5b76bfdfd9-6pwrz_377a0d4c-edf2-4912-a04f-0f814ea9efdb/barbican-worker-log/0.log" to get inode usage: stat /var/log/pods/openstack-kuttl-tests_barbican-worker-5b76bfdfd9-6pwrz_377a0d4c-edf2-4912-a04f-0f814ea9efdb/barbican-worker-log/0.log: no such file or directory Jan 20 17:36:03 crc kubenswrapper[4558]: I0120 17:36:03.287592 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-5577476b98-wgg9z" Jan 20 17:36:03 crc kubenswrapper[4558]: I0120 17:36:03.436931 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/06b8e2bb-a133-4a4b-92dd-12cf20ee4300-logs\") pod \"06b8e2bb-a133-4a4b-92dd-12cf20ee4300\" (UID: \"06b8e2bb-a133-4a4b-92dd-12cf20ee4300\") " Jan 20 17:36:03 crc kubenswrapper[4558]: I0120 17:36:03.437055 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/06b8e2bb-a133-4a4b-92dd-12cf20ee4300-scripts\") pod \"06b8e2bb-a133-4a4b-92dd-12cf20ee4300\" (UID: \"06b8e2bb-a133-4a4b-92dd-12cf20ee4300\") " Jan 20 17:36:03 crc kubenswrapper[4558]: I0120 17:36:03.437187 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/06b8e2bb-a133-4a4b-92dd-12cf20ee4300-public-tls-certs\") pod \"06b8e2bb-a133-4a4b-92dd-12cf20ee4300\" (UID: \"06b8e2bb-a133-4a4b-92dd-12cf20ee4300\") " Jan 20 17:36:03 crc kubenswrapper[4558]: I0120 17:36:03.437370 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/06b8e2bb-a133-4a4b-92dd-12cf20ee4300-logs" (OuterVolumeSpecName: "logs") pod "06b8e2bb-a133-4a4b-92dd-12cf20ee4300" (UID: "06b8e2bb-a133-4a4b-92dd-12cf20ee4300"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:36:03 crc kubenswrapper[4558]: I0120 17:36:03.437817 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-77p2s\" (UniqueName: \"kubernetes.io/projected/06b8e2bb-a133-4a4b-92dd-12cf20ee4300-kube-api-access-77p2s\") pod \"06b8e2bb-a133-4a4b-92dd-12cf20ee4300\" (UID: \"06b8e2bb-a133-4a4b-92dd-12cf20ee4300\") " Jan 20 17:36:03 crc kubenswrapper[4558]: I0120 17:36:03.438029 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/06b8e2bb-a133-4a4b-92dd-12cf20ee4300-config-data\") pod \"06b8e2bb-a133-4a4b-92dd-12cf20ee4300\" (UID: \"06b8e2bb-a133-4a4b-92dd-12cf20ee4300\") " Jan 20 17:36:03 crc kubenswrapper[4558]: I0120 17:36:03.438123 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/06b8e2bb-a133-4a4b-92dd-12cf20ee4300-combined-ca-bundle\") pod \"06b8e2bb-a133-4a4b-92dd-12cf20ee4300\" (UID: \"06b8e2bb-a133-4a4b-92dd-12cf20ee4300\") " Jan 20 17:36:03 crc kubenswrapper[4558]: I0120 17:36:03.438152 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/06b8e2bb-a133-4a4b-92dd-12cf20ee4300-internal-tls-certs\") pod \"06b8e2bb-a133-4a4b-92dd-12cf20ee4300\" (UID: \"06b8e2bb-a133-4a4b-92dd-12cf20ee4300\") " Jan 20 17:36:03 crc kubenswrapper[4558]: I0120 17:36:03.438779 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/06b8e2bb-a133-4a4b-92dd-12cf20ee4300-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:03 crc kubenswrapper[4558]: I0120 17:36:03.442463 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/06b8e2bb-a133-4a4b-92dd-12cf20ee4300-scripts" (OuterVolumeSpecName: "scripts") pod "06b8e2bb-a133-4a4b-92dd-12cf20ee4300" (UID: "06b8e2bb-a133-4a4b-92dd-12cf20ee4300"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:36:03 crc kubenswrapper[4558]: I0120 17:36:03.444293 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/06b8e2bb-a133-4a4b-92dd-12cf20ee4300-kube-api-access-77p2s" (OuterVolumeSpecName: "kube-api-access-77p2s") pod "06b8e2bb-a133-4a4b-92dd-12cf20ee4300" (UID: "06b8e2bb-a133-4a4b-92dd-12cf20ee4300"). InnerVolumeSpecName "kube-api-access-77p2s". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:36:03 crc kubenswrapper[4558]: I0120 17:36:03.473135 4558 generic.go:334] "Generic (PLEG): container finished" podID="79a4b689-d81b-44da-8baa-88fc6ce78172" containerID="d2e273ef1d32ff90a2aef248fb8b2c6529baf7e88fc616e72b397d1ef78a7d62" exitCode=0 Jan 20 17:36:03 crc kubenswrapper[4558]: I0120 17:36:03.473316 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-galera-0" event={"ID":"79a4b689-d81b-44da-8baa-88fc6ce78172","Type":"ContainerDied","Data":"d2e273ef1d32ff90a2aef248fb8b2c6529baf7e88fc616e72b397d1ef78a7d62"} Jan 20 17:36:03 crc kubenswrapper[4558]: I0120 17:36:03.494426 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:36:03 crc kubenswrapper[4558]: I0120 17:36:03.512888 4558 generic.go:334] "Generic (PLEG): container finished" podID="06b8e2bb-a133-4a4b-92dd-12cf20ee4300" containerID="1d3016d6f66a0a15d2f54946e32a1f1391b54b015baba44a9003218a3420e045" exitCode=0 Jan 20 17:36:03 crc kubenswrapper[4558]: I0120 17:36:03.513034 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-5577476b98-wgg9z" Jan 20 17:36:03 crc kubenswrapper[4558]: I0120 17:36:03.513219 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-5577476b98-wgg9z" event={"ID":"06b8e2bb-a133-4a4b-92dd-12cf20ee4300","Type":"ContainerDied","Data":"1d3016d6f66a0a15d2f54946e32a1f1391b54b015baba44a9003218a3420e045"} Jan 20 17:36:03 crc kubenswrapper[4558]: I0120 17:36:03.513254 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-5577476b98-wgg9z" event={"ID":"06b8e2bb-a133-4a4b-92dd-12cf20ee4300","Type":"ContainerDied","Data":"3b1b219bd9663be2d1a49ac8851cb6f5be898ae04c21007da18b9e4e44ccdd94"} Jan 20 17:36:03 crc kubenswrapper[4558]: I0120 17:36:03.513275 4558 scope.go:117] "RemoveContainer" containerID="1d3016d6f66a0a15d2f54946e32a1f1391b54b015baba44a9003218a3420e045" Jan 20 17:36:03 crc kubenswrapper[4558]: I0120 17:36:03.536137 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/06b8e2bb-a133-4a4b-92dd-12cf20ee4300-config-data" (OuterVolumeSpecName: "config-data") pod "06b8e2bb-a133-4a4b-92dd-12cf20ee4300" (UID: "06b8e2bb-a133-4a4b-92dd-12cf20ee4300"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:36:03 crc kubenswrapper[4558]: I0120 17:36:03.538994 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-0" event={"ID":"0d2b6a10-ca95-4d72-a80b-1706822a07a7","Type":"ContainerStarted","Data":"99dba2fa739dee263fd622caaa8566eb366703c39dd9dcf2a61dbad75ba8d1e5"} Jan 20 17:36:03 crc kubenswrapper[4558]: I0120 17:36:03.539311 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:36:03 crc kubenswrapper[4558]: I0120 17:36:03.540923 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/06b8e2bb-a133-4a4b-92dd-12cf20ee4300-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:03 crc kubenswrapper[4558]: I0120 17:36:03.540951 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/06b8e2bb-a133-4a4b-92dd-12cf20ee4300-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:03 crc kubenswrapper[4558]: I0120 17:36:03.541020 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-77p2s\" (UniqueName: \"kubernetes.io/projected/06b8e2bb-a133-4a4b-92dd-12cf20ee4300-kube-api-access-77p2s\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:03 crc kubenswrapper[4558]: I0120 17:36:03.545384 4558 generic.go:334] "Generic (PLEG): container finished" podID="1fff3747-796d-420a-aff0-846dfc615df9" containerID="1adea188750d94315e6ae35bd310f13626df1c915eb5fa791f5e8484767fcbb0" exitCode=0 Jan 20 17:36:03 crc kubenswrapper[4558]: I0120 17:36:03.545463 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-cell1-galera-0" event={"ID":"1fff3747-796d-420a-aff0-846dfc615df9","Type":"ContainerDied","Data":"1adea188750d94315e6ae35bd310f13626df1c915eb5fa791f5e8484767fcbb0"} Jan 20 17:36:03 crc kubenswrapper[4558]: I0120 17:36:03.557367 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"466e0ec0-043e-49a6-b30d-0272443cb839","Type":"ContainerStarted","Data":"46bc188726f1c02977bbc17eab4df62a8fd4adc650c33bd2be8f2e4f3435a82c"} Jan 20 17:36:03 crc kubenswrapper[4558]: I0120 17:36:03.557644 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:36:03 crc kubenswrapper[4558]: I0120 17:36:03.566384 4558 generic.go:334] "Generic (PLEG): container finished" podID="e228514c-6d22-4527-b365-913e3ea3cfdb" containerID="53088ffac0cd61c3f88675b4eee842375a2868c6b2cbeba471fe06c8d7634d43" exitCode=0 Jan 20 17:36:03 crc kubenswrapper[4558]: I0120 17:36:03.566707 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"e228514c-6d22-4527-b365-913e3ea3cfdb","Type":"ContainerDied","Data":"53088ffac0cd61c3f88675b4eee842375a2868c6b2cbeba471fe06c8d7634d43"} Jan 20 17:36:03 crc kubenswrapper[4558]: I0120 17:36:03.573393 4558 generic.go:334] "Generic (PLEG): container finished" podID="0def1343-9e4b-4f74-84bd-3212688b59ce" containerID="620f238c2702c3c6e8d341adc99ebb5ddd89a8d718eb1447a8506a134b9b1dea" exitCode=1 Jan 20 17:36:03 crc kubenswrapper[4558]: I0120 17:36:03.574633 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-0" event={"ID":"0def1343-9e4b-4f74-84bd-3212688b59ce","Type":"ContainerDied","Data":"620f238c2702c3c6e8d341adc99ebb5ddd89a8d718eb1447a8506a134b9b1dea"} Jan 20 17:36:03 crc kubenswrapper[4558]: I0120 17:36:03.574927 4558 scope.go:117] "RemoveContainer" containerID="620f238c2702c3c6e8d341adc99ebb5ddd89a8d718eb1447a8506a134b9b1dea" Jan 20 17:36:03 crc kubenswrapper[4558]: I0120 17:36:03.575840 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:36:03 crc kubenswrapper[4558]: I0120 17:36:03.589807 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:36:03 crc kubenswrapper[4558]: I0120 17:36:03.601242 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/06b8e2bb-a133-4a4b-92dd-12cf20ee4300-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "06b8e2bb-a133-4a4b-92dd-12cf20ee4300" (UID: "06b8e2bb-a133-4a4b-92dd-12cf20ee4300"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:36:03 crc kubenswrapper[4558]: I0120 17:36:03.606559 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ceilometer-0" podStartSLOduration=2.530951362 podStartE2EDuration="6.606539742s" podCreationTimestamp="2026-01-20 17:35:57 +0000 UTC" firstStartedPulling="2026-01-20 17:35:58.868376883 +0000 UTC m=+3252.628714850" lastFinishedPulling="2026-01-20 17:36:02.943965263 +0000 UTC m=+3256.704303230" observedRunningTime="2026-01-20 17:36:03.585806606 +0000 UTC m=+3257.346144573" watchObservedRunningTime="2026-01-20 17:36:03.606539742 +0000 UTC m=+3257.366877709" Jan 20 17:36:03 crc kubenswrapper[4558]: I0120 17:36:03.619262 4558 scope.go:117] "RemoveContainer" containerID="cb36bad8596228046899d22f8d26fc3bac1a3290c060da3fe4c6163f9ec94ebb" Jan 20 17:36:03 crc kubenswrapper[4558]: I0120 17:36:03.629105 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/06b8e2bb-a133-4a4b-92dd-12cf20ee4300-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "06b8e2bb-a133-4a4b-92dd-12cf20ee4300" (UID: "06b8e2bb-a133-4a4b-92dd-12cf20ee4300"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:36:03 crc kubenswrapper[4558]: I0120 17:36:03.636719 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/06b8e2bb-a133-4a4b-92dd-12cf20ee4300-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "06b8e2bb-a133-4a4b-92dd-12cf20ee4300" (UID: "06b8e2bb-a133-4a4b-92dd-12cf20ee4300"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:36:03 crc kubenswrapper[4558]: I0120 17:36:03.642441 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/06b8e2bb-a133-4a4b-92dd-12cf20ee4300-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:03 crc kubenswrapper[4558]: I0120 17:36:03.642470 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/06b8e2bb-a133-4a4b-92dd-12cf20ee4300-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:03 crc kubenswrapper[4558]: I0120 17:36:03.642480 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/06b8e2bb-a133-4a4b-92dd-12cf20ee4300-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:03 crc kubenswrapper[4558]: I0120 17:36:03.655389 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:36:03 crc kubenswrapper[4558]: I0120 17:36:03.655718 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:36:03 crc kubenswrapper[4558]: I0120 17:36:03.683344 4558 scope.go:117] "RemoveContainer" containerID="1d3016d6f66a0a15d2f54946e32a1f1391b54b015baba44a9003218a3420e045" Jan 20 17:36:03 crc kubenswrapper[4558]: E0120 17:36:03.683718 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1d3016d6f66a0a15d2f54946e32a1f1391b54b015baba44a9003218a3420e045\": container with ID starting with 1d3016d6f66a0a15d2f54946e32a1f1391b54b015baba44a9003218a3420e045 not found: ID does not exist" containerID="1d3016d6f66a0a15d2f54946e32a1f1391b54b015baba44a9003218a3420e045" Jan 20 17:36:03 crc kubenswrapper[4558]: I0120 17:36:03.683747 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1d3016d6f66a0a15d2f54946e32a1f1391b54b015baba44a9003218a3420e045"} err="failed to get container status \"1d3016d6f66a0a15d2f54946e32a1f1391b54b015baba44a9003218a3420e045\": rpc error: code = NotFound desc = could not find container \"1d3016d6f66a0a15d2f54946e32a1f1391b54b015baba44a9003218a3420e045\": container with ID starting with 1d3016d6f66a0a15d2f54946e32a1f1391b54b015baba44a9003218a3420e045 not found: ID does not exist" Jan 20 17:36:03 crc kubenswrapper[4558]: I0120 17:36:03.683766 4558 scope.go:117] "RemoveContainer" containerID="cb36bad8596228046899d22f8d26fc3bac1a3290c060da3fe4c6163f9ec94ebb" Jan 20 17:36:03 crc kubenswrapper[4558]: E0120 17:36:03.684036 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cb36bad8596228046899d22f8d26fc3bac1a3290c060da3fe4c6163f9ec94ebb\": container with ID starting with cb36bad8596228046899d22f8d26fc3bac1a3290c060da3fe4c6163f9ec94ebb not found: ID does not exist" containerID="cb36bad8596228046899d22f8d26fc3bac1a3290c060da3fe4c6163f9ec94ebb" Jan 20 17:36:03 crc kubenswrapper[4558]: I0120 17:36:03.684051 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cb36bad8596228046899d22f8d26fc3bac1a3290c060da3fe4c6163f9ec94ebb"} err="failed to get container status \"cb36bad8596228046899d22f8d26fc3bac1a3290c060da3fe4c6163f9ec94ebb\": rpc error: code = NotFound desc = could not find container \"cb36bad8596228046899d22f8d26fc3bac1a3290c060da3fe4c6163f9ec94ebb\": container with ID starting with cb36bad8596228046899d22f8d26fc3bac1a3290c060da3fe4c6163f9ec94ebb not found: ID does not exist" Jan 20 17:36:03 crc kubenswrapper[4558]: I0120 17:36:03.748642 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e228514c-6d22-4527-b365-913e3ea3cfdb-public-tls-certs\") pod \"e228514c-6d22-4527-b365-913e3ea3cfdb\" (UID: \"e228514c-6d22-4527-b365-913e3ea3cfdb\") " Jan 20 17:36:03 crc kubenswrapper[4558]: I0120 17:36:03.749854 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/e228514c-6d22-4527-b365-913e3ea3cfdb-etc-machine-id\") pod \"e228514c-6d22-4527-b365-913e3ea3cfdb\" (UID: \"e228514c-6d22-4527-b365-913e3ea3cfdb\") " Jan 20 17:36:03 crc kubenswrapper[4558]: I0120 17:36:03.749881 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e228514c-6d22-4527-b365-913e3ea3cfdb-scripts\") pod \"e228514c-6d22-4527-b365-913e3ea3cfdb\" (UID: \"e228514c-6d22-4527-b365-913e3ea3cfdb\") " Jan 20 17:36:03 crc kubenswrapper[4558]: I0120 17:36:03.749904 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e228514c-6d22-4527-b365-913e3ea3cfdb-config-data\") pod \"e228514c-6d22-4527-b365-913e3ea3cfdb\" (UID: \"e228514c-6d22-4527-b365-913e3ea3cfdb\") " Jan 20 17:36:03 crc kubenswrapper[4558]: I0120 17:36:03.749972 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e228514c-6d22-4527-b365-913e3ea3cfdb-logs\") pod \"e228514c-6d22-4527-b365-913e3ea3cfdb\" (UID: \"e228514c-6d22-4527-b365-913e3ea3cfdb\") " Jan 20 17:36:03 crc kubenswrapper[4558]: I0120 17:36:03.750030 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e228514c-6d22-4527-b365-913e3ea3cfdb-combined-ca-bundle\") pod \"e228514c-6d22-4527-b365-913e3ea3cfdb\" (UID: \"e228514c-6d22-4527-b365-913e3ea3cfdb\") " Jan 20 17:36:03 crc kubenswrapper[4558]: I0120 17:36:03.750095 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j6z96\" (UniqueName: \"kubernetes.io/projected/e228514c-6d22-4527-b365-913e3ea3cfdb-kube-api-access-j6z96\") pod \"e228514c-6d22-4527-b365-913e3ea3cfdb\" (UID: \"e228514c-6d22-4527-b365-913e3ea3cfdb\") " Jan 20 17:36:03 crc kubenswrapper[4558]: I0120 17:36:03.750218 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e228514c-6d22-4527-b365-913e3ea3cfdb-internal-tls-certs\") pod \"e228514c-6d22-4527-b365-913e3ea3cfdb\" (UID: \"e228514c-6d22-4527-b365-913e3ea3cfdb\") " Jan 20 17:36:03 crc kubenswrapper[4558]: I0120 17:36:03.750248 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e228514c-6d22-4527-b365-913e3ea3cfdb-config-data-custom\") pod \"e228514c-6d22-4527-b365-913e3ea3cfdb\" (UID: \"e228514c-6d22-4527-b365-913e3ea3cfdb\") " Jan 20 17:36:03 crc kubenswrapper[4558]: I0120 17:36:03.753789 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e228514c-6d22-4527-b365-913e3ea3cfdb-logs" (OuterVolumeSpecName: "logs") pod "e228514c-6d22-4527-b365-913e3ea3cfdb" (UID: "e228514c-6d22-4527-b365-913e3ea3cfdb"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:36:03 crc kubenswrapper[4558]: I0120 17:36:03.754693 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e228514c-6d22-4527-b365-913e3ea3cfdb-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "e228514c-6d22-4527-b365-913e3ea3cfdb" (UID: "e228514c-6d22-4527-b365-913e3ea3cfdb"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 17:36:03 crc kubenswrapper[4558]: I0120 17:36:03.756276 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e228514c-6d22-4527-b365-913e3ea3cfdb-scripts" (OuterVolumeSpecName: "scripts") pod "e228514c-6d22-4527-b365-913e3ea3cfdb" (UID: "e228514c-6d22-4527-b365-913e3ea3cfdb"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:36:03 crc kubenswrapper[4558]: I0120 17:36:03.768808 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e228514c-6d22-4527-b365-913e3ea3cfdb-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "e228514c-6d22-4527-b365-913e3ea3cfdb" (UID: "e228514c-6d22-4527-b365-913e3ea3cfdb"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:36:03 crc kubenswrapper[4558]: I0120 17:36:03.776854 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e228514c-6d22-4527-b365-913e3ea3cfdb-kube-api-access-j6z96" (OuterVolumeSpecName: "kube-api-access-j6z96") pod "e228514c-6d22-4527-b365-913e3ea3cfdb" (UID: "e228514c-6d22-4527-b365-913e3ea3cfdb"). InnerVolumeSpecName "kube-api-access-j6z96". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:36:03 crc kubenswrapper[4558]: I0120 17:36:03.809226 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e228514c-6d22-4527-b365-913e3ea3cfdb-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e228514c-6d22-4527-b365-913e3ea3cfdb" (UID: "e228514c-6d22-4527-b365-913e3ea3cfdb"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:36:03 crc kubenswrapper[4558]: I0120 17:36:03.855624 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j6z96\" (UniqueName: \"kubernetes.io/projected/e228514c-6d22-4527-b365-913e3ea3cfdb-kube-api-access-j6z96\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:03 crc kubenswrapper[4558]: I0120 17:36:03.855655 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e228514c-6d22-4527-b365-913e3ea3cfdb-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:03 crc kubenswrapper[4558]: I0120 17:36:03.855666 4558 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/e228514c-6d22-4527-b365-913e3ea3cfdb-etc-machine-id\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:03 crc kubenswrapper[4558]: I0120 17:36:03.855677 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e228514c-6d22-4527-b365-913e3ea3cfdb-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:03 crc kubenswrapper[4558]: I0120 17:36:03.855689 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e228514c-6d22-4527-b365-913e3ea3cfdb-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:03 crc kubenswrapper[4558]: I0120 17:36:03.855698 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e228514c-6d22-4527-b365-913e3ea3cfdb-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:03 crc kubenswrapper[4558]: I0120 17:36:03.864827 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/placement-5577476b98-wgg9z"] Jan 20 17:36:03 crc kubenswrapper[4558]: I0120 17:36:03.876145 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/placement-5577476b98-wgg9z"] Jan 20 17:36:03 crc kubenswrapper[4558]: I0120 17:36:03.943432 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e228514c-6d22-4527-b365-913e3ea3cfdb-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "e228514c-6d22-4527-b365-913e3ea3cfdb" (UID: "e228514c-6d22-4527-b365-913e3ea3cfdb"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:36:03 crc kubenswrapper[4558]: I0120 17:36:03.945811 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e228514c-6d22-4527-b365-913e3ea3cfdb-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "e228514c-6d22-4527-b365-913e3ea3cfdb" (UID: "e228514c-6d22-4527-b365-913e3ea3cfdb"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:36:03 crc kubenswrapper[4558]: I0120 17:36:03.949268 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e228514c-6d22-4527-b365-913e3ea3cfdb-config-data" (OuterVolumeSpecName: "config-data") pod "e228514c-6d22-4527-b365-913e3ea3cfdb" (UID: "e228514c-6d22-4527-b365-913e3ea3cfdb"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:36:03 crc kubenswrapper[4558]: I0120 17:36:03.957863 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e228514c-6d22-4527-b365-913e3ea3cfdb-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:03 crc kubenswrapper[4558]: I0120 17:36:03.957905 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e228514c-6d22-4527-b365-913e3ea3cfdb-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:03 crc kubenswrapper[4558]: I0120 17:36:03.957916 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e228514c-6d22-4527-b365-913e3ea3cfdb-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:04 crc kubenswrapper[4558]: I0120 17:36:04.363538 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:36:04 crc kubenswrapper[4558]: I0120 17:36:04.577291 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="06b8e2bb-a133-4a4b-92dd-12cf20ee4300" path="/var/lib/kubelet/pods/06b8e2bb-a133-4a4b-92dd-12cf20ee4300/volumes" Jan 20 17:36:04 crc kubenswrapper[4558]: I0120 17:36:04.588564 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-cell1-galera-0" event={"ID":"1fff3747-796d-420a-aff0-846dfc615df9","Type":"ContainerStarted","Data":"f5120513f44b4adc059dbacc3c6c22162c7eee356a5828f446ded77ed4759a9f"} Jan 20 17:36:04 crc kubenswrapper[4558]: I0120 17:36:04.591013 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"e228514c-6d22-4527-b365-913e3ea3cfdb","Type":"ContainerDied","Data":"c4d8f498217a4761f2808d12b3f68f581b7f216075fc52ef93e3194e0d7de2e2"} Jan 20 17:36:04 crc kubenswrapper[4558]: I0120 17:36:04.591074 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:36:04 crc kubenswrapper[4558]: I0120 17:36:04.591093 4558 scope.go:117] "RemoveContainer" containerID="53088ffac0cd61c3f88675b4eee842375a2868c6b2cbeba471fe06c8d7634d43" Jan 20 17:36:04 crc kubenswrapper[4558]: I0120 17:36:04.597596 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-0" event={"ID":"0def1343-9e4b-4f74-84bd-3212688b59ce","Type":"ContainerStarted","Data":"4777bd55f602e6282b42a942acc808f0478764354d0970617b2e6c1dddd4da85"} Jan 20 17:36:04 crc kubenswrapper[4558]: I0120 17:36:04.597817 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:36:04 crc kubenswrapper[4558]: I0120 17:36:04.604109 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-galera-0" event={"ID":"79a4b689-d81b-44da-8baa-88fc6ce78172","Type":"ContainerStarted","Data":"4937e3357efe6ba2fe8b94eb2c378f889e567ce6926af22853306ab11522be96"} Jan 20 17:36:04 crc kubenswrapper[4558]: I0120 17:36:04.615826 4558 generic.go:334] "Generic (PLEG): container finished" podID="0d2b6a10-ca95-4d72-a80b-1706822a07a7" containerID="99dba2fa739dee263fd622caaa8566eb366703c39dd9dcf2a61dbad75ba8d1e5" exitCode=1 Jan 20 17:36:04 crc kubenswrapper[4558]: I0120 17:36:04.616336 4558 scope.go:117] "RemoveContainer" containerID="99dba2fa739dee263fd622caaa8566eb366703c39dd9dcf2a61dbad75ba8d1e5" Jan 20 17:36:04 crc kubenswrapper[4558]: E0120 17:36:04.616583 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"nova-cell0-conductor-conductor\" with CrashLoopBackOff: \"back-off 10s restarting failed container=nova-cell0-conductor-conductor pod=nova-cell0-conductor-0_openstack-kuttl-tests(0d2b6a10-ca95-4d72-a80b-1706822a07a7)\"" pod="openstack-kuttl-tests/nova-cell0-conductor-0" podUID="0d2b6a10-ca95-4d72-a80b-1706822a07a7" Jan 20 17:36:04 crc kubenswrapper[4558]: I0120 17:36:04.616759 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-0" event={"ID":"0d2b6a10-ca95-4d72-a80b-1706822a07a7","Type":"ContainerDied","Data":"99dba2fa739dee263fd622caaa8566eb366703c39dd9dcf2a61dbad75ba8d1e5"} Jan 20 17:36:04 crc kubenswrapper[4558]: I0120 17:36:04.621946 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/openstack-cell1-galera-0" podStartSLOduration=7.62193094 podStartE2EDuration="7.62193094s" podCreationTimestamp="2026-01-20 17:35:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:36:04.607119498 +0000 UTC m=+3258.367457455" watchObservedRunningTime="2026-01-20 17:36:04.62193094 +0000 UTC m=+3258.382268907" Jan 20 17:36:04 crc kubenswrapper[4558]: I0120 17:36:04.628374 4558 scope.go:117] "RemoveContainer" containerID="8ac7d16d0f78462693261b691f194857cebebf572d8c74e83499e3ff412581c0" Jan 20 17:36:04 crc kubenswrapper[4558]: I0120 17:36:04.646833 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/openstack-galera-0" podStartSLOduration=7.64681472 podStartE2EDuration="7.64681472s" podCreationTimestamp="2026-01-20 17:35:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:36:04.646463639 +0000 UTC m=+3258.406801607" watchObservedRunningTime="2026-01-20 17:36:04.64681472 +0000 UTC m=+3258.407152687" Jan 20 17:36:04 crc kubenswrapper[4558]: I0120 17:36:04.665012 4558 scope.go:117] "RemoveContainer" containerID="4c8e41d3a2f28510194a679aee0e70d2fe8353517ab636b8947a4bcddafb37df" Jan 20 17:36:04 crc kubenswrapper[4558]: I0120 17:36:04.668717 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:36:04 crc kubenswrapper[4558]: I0120 17:36:04.678574 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:36:04 crc kubenswrapper[4558]: I0120 17:36:04.689490 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:36:04 crc kubenswrapper[4558]: E0120 17:36:04.691058 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="377a0d4c-edf2-4912-a04f-0f814ea9efdb" containerName="barbican-worker-log" Jan 20 17:36:04 crc kubenswrapper[4558]: I0120 17:36:04.691082 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="377a0d4c-edf2-4912-a04f-0f814ea9efdb" containerName="barbican-worker-log" Jan 20 17:36:04 crc kubenswrapper[4558]: E0120 17:36:04.691102 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="06b8e2bb-a133-4a4b-92dd-12cf20ee4300" containerName="placement-log" Jan 20 17:36:04 crc kubenswrapper[4558]: I0120 17:36:04.691109 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="06b8e2bb-a133-4a4b-92dd-12cf20ee4300" containerName="placement-log" Jan 20 17:36:04 crc kubenswrapper[4558]: E0120 17:36:04.691126 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09df5e63-9283-4cae-80fe-cfa88f5a9b72" containerName="barbican-keystone-listener-log" Jan 20 17:36:04 crc kubenswrapper[4558]: I0120 17:36:04.691133 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="09df5e63-9283-4cae-80fe-cfa88f5a9b72" containerName="barbican-keystone-listener-log" Jan 20 17:36:04 crc kubenswrapper[4558]: E0120 17:36:04.691142 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e228514c-6d22-4527-b365-913e3ea3cfdb" containerName="cinder-api-log" Jan 20 17:36:04 crc kubenswrapper[4558]: I0120 17:36:04.691149 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="e228514c-6d22-4527-b365-913e3ea3cfdb" containerName="cinder-api-log" Jan 20 17:36:04 crc kubenswrapper[4558]: E0120 17:36:04.691180 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09df5e63-9283-4cae-80fe-cfa88f5a9b72" containerName="barbican-keystone-listener" Jan 20 17:36:04 crc kubenswrapper[4558]: I0120 17:36:04.691186 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="09df5e63-9283-4cae-80fe-cfa88f5a9b72" containerName="barbican-keystone-listener" Jan 20 17:36:04 crc kubenswrapper[4558]: E0120 17:36:04.691197 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="06b8e2bb-a133-4a4b-92dd-12cf20ee4300" containerName="placement-api" Jan 20 17:36:04 crc kubenswrapper[4558]: I0120 17:36:04.691202 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="06b8e2bb-a133-4a4b-92dd-12cf20ee4300" containerName="placement-api" Jan 20 17:36:04 crc kubenswrapper[4558]: E0120 17:36:04.691224 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e228514c-6d22-4527-b365-913e3ea3cfdb" containerName="cinder-api" Jan 20 17:36:04 crc kubenswrapper[4558]: I0120 17:36:04.691230 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="e228514c-6d22-4527-b365-913e3ea3cfdb" containerName="cinder-api" Jan 20 17:36:04 crc kubenswrapper[4558]: E0120 17:36:04.691242 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="377a0d4c-edf2-4912-a04f-0f814ea9efdb" containerName="barbican-worker" Jan 20 17:36:04 crc kubenswrapper[4558]: I0120 17:36:04.691248 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="377a0d4c-edf2-4912-a04f-0f814ea9efdb" containerName="barbican-worker" Jan 20 17:36:04 crc kubenswrapper[4558]: I0120 17:36:04.691450 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="e228514c-6d22-4527-b365-913e3ea3cfdb" containerName="cinder-api" Jan 20 17:36:04 crc kubenswrapper[4558]: I0120 17:36:04.691469 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="377a0d4c-edf2-4912-a04f-0f814ea9efdb" containerName="barbican-worker-log" Jan 20 17:36:04 crc kubenswrapper[4558]: I0120 17:36:04.691480 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="09df5e63-9283-4cae-80fe-cfa88f5a9b72" containerName="barbican-keystone-listener-log" Jan 20 17:36:04 crc kubenswrapper[4558]: I0120 17:36:04.691495 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="06b8e2bb-a133-4a4b-92dd-12cf20ee4300" containerName="placement-api" Jan 20 17:36:04 crc kubenswrapper[4558]: I0120 17:36:04.691507 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="06b8e2bb-a133-4a4b-92dd-12cf20ee4300" containerName="placement-log" Jan 20 17:36:04 crc kubenswrapper[4558]: I0120 17:36:04.691518 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="377a0d4c-edf2-4912-a04f-0f814ea9efdb" containerName="barbican-worker" Jan 20 17:36:04 crc kubenswrapper[4558]: I0120 17:36:04.691544 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="09df5e63-9283-4cae-80fe-cfa88f5a9b72" containerName="barbican-keystone-listener" Jan 20 17:36:04 crc kubenswrapper[4558]: I0120 17:36:04.691556 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="e228514c-6d22-4527-b365-913e3ea3cfdb" containerName="cinder-api-log" Jan 20 17:36:04 crc kubenswrapper[4558]: I0120 17:36:04.692538 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:36:04 crc kubenswrapper[4558]: I0120 17:36:04.696537 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cinder-api-config-data" Jan 20 17:36:04 crc kubenswrapper[4558]: I0120 17:36:04.696741 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-cinder-internal-svc" Jan 20 17:36:04 crc kubenswrapper[4558]: I0120 17:36:04.696872 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-cinder-public-svc" Jan 20 17:36:04 crc kubenswrapper[4558]: I0120 17:36:04.719516 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:36:04 crc kubenswrapper[4558]: I0120 17:36:04.883200 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gjr4x\" (UniqueName: \"kubernetes.io/projected/e1a1ac97-a530-4ba0-8c9b-835a2b576c8d-kube-api-access-gjr4x\") pod \"cinder-api-0\" (UID: \"e1a1ac97-a530-4ba0-8c9b-835a2b576c8d\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:36:04 crc kubenswrapper[4558]: I0120 17:36:04.883261 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e1a1ac97-a530-4ba0-8c9b-835a2b576c8d-config-data\") pod \"cinder-api-0\" (UID: \"e1a1ac97-a530-4ba0-8c9b-835a2b576c8d\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:36:04 crc kubenswrapper[4558]: I0120 17:36:04.883294 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e1a1ac97-a530-4ba0-8c9b-835a2b576c8d-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"e1a1ac97-a530-4ba0-8c9b-835a2b576c8d\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:36:04 crc kubenswrapper[4558]: I0120 17:36:04.883463 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e1a1ac97-a530-4ba0-8c9b-835a2b576c8d-logs\") pod \"cinder-api-0\" (UID: \"e1a1ac97-a530-4ba0-8c9b-835a2b576c8d\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:36:04 crc kubenswrapper[4558]: I0120 17:36:04.883537 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e1a1ac97-a530-4ba0-8c9b-835a2b576c8d-public-tls-certs\") pod \"cinder-api-0\" (UID: \"e1a1ac97-a530-4ba0-8c9b-835a2b576c8d\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:36:04 crc kubenswrapper[4558]: I0120 17:36:04.883572 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/e1a1ac97-a530-4ba0-8c9b-835a2b576c8d-etc-machine-id\") pod \"cinder-api-0\" (UID: \"e1a1ac97-a530-4ba0-8c9b-835a2b576c8d\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:36:04 crc kubenswrapper[4558]: I0120 17:36:04.883611 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e1a1ac97-a530-4ba0-8c9b-835a2b576c8d-config-data-custom\") pod \"cinder-api-0\" (UID: \"e1a1ac97-a530-4ba0-8c9b-835a2b576c8d\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:36:04 crc kubenswrapper[4558]: I0120 17:36:04.883713 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e1a1ac97-a530-4ba0-8c9b-835a2b576c8d-scripts\") pod \"cinder-api-0\" (UID: \"e1a1ac97-a530-4ba0-8c9b-835a2b576c8d\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:36:04 crc kubenswrapper[4558]: I0120 17:36:04.884388 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e1a1ac97-a530-4ba0-8c9b-835a2b576c8d-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"e1a1ac97-a530-4ba0-8c9b-835a2b576c8d\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:36:04 crc kubenswrapper[4558]: E0120 17:36:04.986225 4558 fsHandler.go:119] failed to collect filesystem stats - rootDiskErr: could not stat "/var/lib/containers/storage/overlay/2f68fc732c70a6bb774e13ccc626e2b2d10561956ffe6fe0b275afd6aba0b13f/diff" to get inode usage: stat /var/lib/containers/storage/overlay/2f68fc732c70a6bb774e13ccc626e2b2d10561956ffe6fe0b275afd6aba0b13f/diff: no such file or directory, extraDiskErr: could not stat "/var/log/pods/openstack-kuttl-tests_barbican-worker-5b76bfdfd9-6pwrz_377a0d4c-edf2-4912-a04f-0f814ea9efdb/barbican-worker/0.log" to get inode usage: stat /var/log/pods/openstack-kuttl-tests_barbican-worker-5b76bfdfd9-6pwrz_377a0d4c-edf2-4912-a04f-0f814ea9efdb/barbican-worker/0.log: no such file or directory Jan 20 17:36:04 crc kubenswrapper[4558]: I0120 17:36:04.987154 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e1a1ac97-a530-4ba0-8c9b-835a2b576c8d-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"e1a1ac97-a530-4ba0-8c9b-835a2b576c8d\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:36:04 crc kubenswrapper[4558]: I0120 17:36:04.987295 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gjr4x\" (UniqueName: \"kubernetes.io/projected/e1a1ac97-a530-4ba0-8c9b-835a2b576c8d-kube-api-access-gjr4x\") pod \"cinder-api-0\" (UID: \"e1a1ac97-a530-4ba0-8c9b-835a2b576c8d\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:36:04 crc kubenswrapper[4558]: I0120 17:36:04.987332 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e1a1ac97-a530-4ba0-8c9b-835a2b576c8d-config-data\") pod \"cinder-api-0\" (UID: \"e1a1ac97-a530-4ba0-8c9b-835a2b576c8d\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:36:04 crc kubenswrapper[4558]: I0120 17:36:04.987365 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e1a1ac97-a530-4ba0-8c9b-835a2b576c8d-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"e1a1ac97-a530-4ba0-8c9b-835a2b576c8d\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:36:04 crc kubenswrapper[4558]: I0120 17:36:04.987402 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e1a1ac97-a530-4ba0-8c9b-835a2b576c8d-logs\") pod \"cinder-api-0\" (UID: \"e1a1ac97-a530-4ba0-8c9b-835a2b576c8d\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:36:04 crc kubenswrapper[4558]: I0120 17:36:04.987432 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e1a1ac97-a530-4ba0-8c9b-835a2b576c8d-public-tls-certs\") pod \"cinder-api-0\" (UID: \"e1a1ac97-a530-4ba0-8c9b-835a2b576c8d\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:36:04 crc kubenswrapper[4558]: I0120 17:36:04.987464 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/e1a1ac97-a530-4ba0-8c9b-835a2b576c8d-etc-machine-id\") pod \"cinder-api-0\" (UID: \"e1a1ac97-a530-4ba0-8c9b-835a2b576c8d\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:36:04 crc kubenswrapper[4558]: I0120 17:36:04.987490 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e1a1ac97-a530-4ba0-8c9b-835a2b576c8d-config-data-custom\") pod \"cinder-api-0\" (UID: \"e1a1ac97-a530-4ba0-8c9b-835a2b576c8d\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:36:04 crc kubenswrapper[4558]: I0120 17:36:04.987535 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e1a1ac97-a530-4ba0-8c9b-835a2b576c8d-scripts\") pod \"cinder-api-0\" (UID: \"e1a1ac97-a530-4ba0-8c9b-835a2b576c8d\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:36:04 crc kubenswrapper[4558]: I0120 17:36:04.987681 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/e1a1ac97-a530-4ba0-8c9b-835a2b576c8d-etc-machine-id\") pod \"cinder-api-0\" (UID: \"e1a1ac97-a530-4ba0-8c9b-835a2b576c8d\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:36:04 crc kubenswrapper[4558]: I0120 17:36:04.988059 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e1a1ac97-a530-4ba0-8c9b-835a2b576c8d-logs\") pod \"cinder-api-0\" (UID: \"e1a1ac97-a530-4ba0-8c9b-835a2b576c8d\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:36:04 crc kubenswrapper[4558]: I0120 17:36:04.994666 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e1a1ac97-a530-4ba0-8c9b-835a2b576c8d-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"e1a1ac97-a530-4ba0-8c9b-835a2b576c8d\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:36:04 crc kubenswrapper[4558]: I0120 17:36:04.994938 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e1a1ac97-a530-4ba0-8c9b-835a2b576c8d-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"e1a1ac97-a530-4ba0-8c9b-835a2b576c8d\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:36:04 crc kubenswrapper[4558]: I0120 17:36:04.995292 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e1a1ac97-a530-4ba0-8c9b-835a2b576c8d-scripts\") pod \"cinder-api-0\" (UID: \"e1a1ac97-a530-4ba0-8c9b-835a2b576c8d\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:36:04 crc kubenswrapper[4558]: I0120 17:36:04.996373 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e1a1ac97-a530-4ba0-8c9b-835a2b576c8d-config-data\") pod \"cinder-api-0\" (UID: \"e1a1ac97-a530-4ba0-8c9b-835a2b576c8d\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:36:04 crc kubenswrapper[4558]: I0120 17:36:04.998442 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e1a1ac97-a530-4ba0-8c9b-835a2b576c8d-public-tls-certs\") pod \"cinder-api-0\" (UID: \"e1a1ac97-a530-4ba0-8c9b-835a2b576c8d\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:36:05 crc kubenswrapper[4558]: I0120 17:36:05.000885 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e1a1ac97-a530-4ba0-8c9b-835a2b576c8d-config-data-custom\") pod \"cinder-api-0\" (UID: \"e1a1ac97-a530-4ba0-8c9b-835a2b576c8d\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:36:05 crc kubenswrapper[4558]: I0120 17:36:05.002568 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gjr4x\" (UniqueName: \"kubernetes.io/projected/e1a1ac97-a530-4ba0-8c9b-835a2b576c8d-kube-api-access-gjr4x\") pod \"cinder-api-0\" (UID: \"e1a1ac97-a530-4ba0-8c9b-835a2b576c8d\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:36:05 crc kubenswrapper[4558]: I0120 17:36:05.034400 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:36:05 crc kubenswrapper[4558]: I0120 17:36:05.242387 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/barbican-api-64bd4857c6-2s9h4" podUID="94062d42-28cb-4c8a-afa4-f51458dedc6c" containerName="barbican-api-log" probeResult="failure" output="Get \"https://10.217.0.140:9311/healthcheck\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 20 17:36:05 crc kubenswrapper[4558]: I0120 17:36:05.433451 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/barbican-api-f45698ff6-c9mtk" podUID="7de6386e-7693-430d-9753-2d7fa8c31b5d" containerName="barbican-api-log" probeResult="failure" output="Get \"https://10.217.0.131:9311/healthcheck\": read tcp 10.217.0.2:41902->10.217.0.131:9311: read: connection reset by peer" Jan 20 17:36:05 crc kubenswrapper[4558]: I0120 17:36:05.433946 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/barbican-api-f45698ff6-c9mtk" podUID="7de6386e-7693-430d-9753-2d7fa8c31b5d" containerName="barbican-api" probeResult="failure" output="Get \"https://10.217.0.131:9311/healthcheck\": read tcp 10.217.0.2:41900->10.217.0.131:9311: read: connection reset by peer" Jan 20 17:36:05 crc kubenswrapper[4558]: I0120 17:36:05.476666 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:36:05 crc kubenswrapper[4558]: I0120 17:36:05.507352 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack-kuttl-tests/barbican-api-64bd4857c6-2s9h4" podUID="94062d42-28cb-4c8a-afa4-f51458dedc6c" containerName="barbican-api-log" probeResult="failure" output="Get \"https://10.217.0.140:9311/healthcheck\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 20 17:36:05 crc kubenswrapper[4558]: I0120 17:36:05.657119 4558 generic.go:334] "Generic (PLEG): container finished" podID="7de6386e-7693-430d-9753-2d7fa8c31b5d" containerID="af363f8fa728755ec981997979783ddd99710b030181fea30ee5db9adb704c8e" exitCode=0 Jan 20 17:36:05 crc kubenswrapper[4558]: I0120 17:36:05.657212 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-f45698ff6-c9mtk" event={"ID":"7de6386e-7693-430d-9753-2d7fa8c31b5d","Type":"ContainerDied","Data":"af363f8fa728755ec981997979783ddd99710b030181fea30ee5db9adb704c8e"} Jan 20 17:36:05 crc kubenswrapper[4558]: I0120 17:36:05.659565 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"e1a1ac97-a530-4ba0-8c9b-835a2b576c8d","Type":"ContainerStarted","Data":"88da74428e97c037a8331206de66e098d70b33fcebb603be2a7417ecafe4363a"} Jan 20 17:36:05 crc kubenswrapper[4558]: I0120 17:36:05.662805 4558 scope.go:117] "RemoveContainer" containerID="99dba2fa739dee263fd622caaa8566eb366703c39dd9dcf2a61dbad75ba8d1e5" Jan 20 17:36:05 crc kubenswrapper[4558]: E0120 17:36:05.663117 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"nova-cell0-conductor-conductor\" with CrashLoopBackOff: \"back-off 10s restarting failed container=nova-cell0-conductor-conductor pod=nova-cell0-conductor-0_openstack-kuttl-tests(0d2b6a10-ca95-4d72-a80b-1706822a07a7)\"" pod="openstack-kuttl-tests/nova-cell0-conductor-0" podUID="0d2b6a10-ca95-4d72-a80b-1706822a07a7" Jan 20 17:36:05 crc kubenswrapper[4558]: I0120 17:36:05.916677 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-f45698ff6-c9mtk" Jan 20 17:36:06 crc kubenswrapper[4558]: I0120 17:36:06.014077 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7de6386e-7693-430d-9753-2d7fa8c31b5d-internal-tls-certs\") pod \"7de6386e-7693-430d-9753-2d7fa8c31b5d\" (UID: \"7de6386e-7693-430d-9753-2d7fa8c31b5d\") " Jan 20 17:36:06 crc kubenswrapper[4558]: I0120 17:36:06.014195 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7de6386e-7693-430d-9753-2d7fa8c31b5d-config-data\") pod \"7de6386e-7693-430d-9753-2d7fa8c31b5d\" (UID: \"7de6386e-7693-430d-9753-2d7fa8c31b5d\") " Jan 20 17:36:06 crc kubenswrapper[4558]: I0120 17:36:06.014256 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7de6386e-7693-430d-9753-2d7fa8c31b5d-logs\") pod \"7de6386e-7693-430d-9753-2d7fa8c31b5d\" (UID: \"7de6386e-7693-430d-9753-2d7fa8c31b5d\") " Jan 20 17:36:06 crc kubenswrapper[4558]: I0120 17:36:06.014295 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nbd52\" (UniqueName: \"kubernetes.io/projected/7de6386e-7693-430d-9753-2d7fa8c31b5d-kube-api-access-nbd52\") pod \"7de6386e-7693-430d-9753-2d7fa8c31b5d\" (UID: \"7de6386e-7693-430d-9753-2d7fa8c31b5d\") " Jan 20 17:36:06 crc kubenswrapper[4558]: I0120 17:36:06.014324 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7de6386e-7693-430d-9753-2d7fa8c31b5d-public-tls-certs\") pod \"7de6386e-7693-430d-9753-2d7fa8c31b5d\" (UID: \"7de6386e-7693-430d-9753-2d7fa8c31b5d\") " Jan 20 17:36:06 crc kubenswrapper[4558]: I0120 17:36:06.014441 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7de6386e-7693-430d-9753-2d7fa8c31b5d-combined-ca-bundle\") pod \"7de6386e-7693-430d-9753-2d7fa8c31b5d\" (UID: \"7de6386e-7693-430d-9753-2d7fa8c31b5d\") " Jan 20 17:36:06 crc kubenswrapper[4558]: I0120 17:36:06.014517 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7de6386e-7693-430d-9753-2d7fa8c31b5d-config-data-custom\") pod \"7de6386e-7693-430d-9753-2d7fa8c31b5d\" (UID: \"7de6386e-7693-430d-9753-2d7fa8c31b5d\") " Jan 20 17:36:06 crc kubenswrapper[4558]: I0120 17:36:06.018375 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7de6386e-7693-430d-9753-2d7fa8c31b5d-logs" (OuterVolumeSpecName: "logs") pod "7de6386e-7693-430d-9753-2d7fa8c31b5d" (UID: "7de6386e-7693-430d-9753-2d7fa8c31b5d"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:36:06 crc kubenswrapper[4558]: I0120 17:36:06.020555 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7de6386e-7693-430d-9753-2d7fa8c31b5d-kube-api-access-nbd52" (OuterVolumeSpecName: "kube-api-access-nbd52") pod "7de6386e-7693-430d-9753-2d7fa8c31b5d" (UID: "7de6386e-7693-430d-9753-2d7fa8c31b5d"). InnerVolumeSpecName "kube-api-access-nbd52". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:36:06 crc kubenswrapper[4558]: I0120 17:36:06.026350 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7de6386e-7693-430d-9753-2d7fa8c31b5d-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "7de6386e-7693-430d-9753-2d7fa8c31b5d" (UID: "7de6386e-7693-430d-9753-2d7fa8c31b5d"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:36:06 crc kubenswrapper[4558]: I0120 17:36:06.064118 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7de6386e-7693-430d-9753-2d7fa8c31b5d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7de6386e-7693-430d-9753-2d7fa8c31b5d" (UID: "7de6386e-7693-430d-9753-2d7fa8c31b5d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:36:06 crc kubenswrapper[4558]: I0120 17:36:06.078249 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7de6386e-7693-430d-9753-2d7fa8c31b5d-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "7de6386e-7693-430d-9753-2d7fa8c31b5d" (UID: "7de6386e-7693-430d-9753-2d7fa8c31b5d"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:36:06 crc kubenswrapper[4558]: I0120 17:36:06.078826 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7de6386e-7693-430d-9753-2d7fa8c31b5d-config-data" (OuterVolumeSpecName: "config-data") pod "7de6386e-7693-430d-9753-2d7fa8c31b5d" (UID: "7de6386e-7693-430d-9753-2d7fa8c31b5d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:36:06 crc kubenswrapper[4558]: I0120 17:36:06.086091 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7de6386e-7693-430d-9753-2d7fa8c31b5d-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "7de6386e-7693-430d-9753-2d7fa8c31b5d" (UID: "7de6386e-7693-430d-9753-2d7fa8c31b5d"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:36:06 crc kubenswrapper[4558]: I0120 17:36:06.118064 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7de6386e-7693-430d-9753-2d7fa8c31b5d-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:06 crc kubenswrapper[4558]: I0120 17:36:06.118299 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7de6386e-7693-430d-9753-2d7fa8c31b5d-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:06 crc kubenswrapper[4558]: I0120 17:36:06.118311 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7de6386e-7693-430d-9753-2d7fa8c31b5d-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:06 crc kubenswrapper[4558]: I0120 17:36:06.118322 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nbd52\" (UniqueName: \"kubernetes.io/projected/7de6386e-7693-430d-9753-2d7fa8c31b5d-kube-api-access-nbd52\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:06 crc kubenswrapper[4558]: I0120 17:36:06.118333 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7de6386e-7693-430d-9753-2d7fa8c31b5d-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:06 crc kubenswrapper[4558]: I0120 17:36:06.118342 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7de6386e-7693-430d-9753-2d7fa8c31b5d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:06 crc kubenswrapper[4558]: I0120 17:36:06.118351 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7de6386e-7693-430d-9753-2d7fa8c31b5d-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:06 crc kubenswrapper[4558]: I0120 17:36:06.583236 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e228514c-6d22-4527-b365-913e3ea3cfdb" path="/var/lib/kubelet/pods/e228514c-6d22-4527-b365-913e3ea3cfdb/volumes" Jan 20 17:36:06 crc kubenswrapper[4558]: I0120 17:36:06.680252 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"e1a1ac97-a530-4ba0-8c9b-835a2b576c8d","Type":"ContainerStarted","Data":"f401d7772f0d907bbf33421516a8e607439cd73653fec584dcb05860382f5a83"} Jan 20 17:36:06 crc kubenswrapper[4558]: I0120 17:36:06.682014 4558 generic.go:334] "Generic (PLEG): container finished" podID="0def1343-9e4b-4f74-84bd-3212688b59ce" containerID="4777bd55f602e6282b42a942acc808f0478764354d0970617b2e6c1dddd4da85" exitCode=1 Jan 20 17:36:06 crc kubenswrapper[4558]: I0120 17:36:06.682077 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-0" event={"ID":"0def1343-9e4b-4f74-84bd-3212688b59ce","Type":"ContainerDied","Data":"4777bd55f602e6282b42a942acc808f0478764354d0970617b2e6c1dddd4da85"} Jan 20 17:36:06 crc kubenswrapper[4558]: I0120 17:36:06.682117 4558 scope.go:117] "RemoveContainer" containerID="620f238c2702c3c6e8d341adc99ebb5ddd89a8d718eb1447a8506a134b9b1dea" Jan 20 17:36:06 crc kubenswrapper[4558]: I0120 17:36:06.682776 4558 scope.go:117] "RemoveContainer" containerID="4777bd55f602e6282b42a942acc808f0478764354d0970617b2e6c1dddd4da85" Jan 20 17:36:06 crc kubenswrapper[4558]: E0120 17:36:06.683144 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"nova-cell1-conductor-conductor\" with CrashLoopBackOff: \"back-off 10s restarting failed container=nova-cell1-conductor-conductor pod=nova-cell1-conductor-0_openstack-kuttl-tests(0def1343-9e4b-4f74-84bd-3212688b59ce)\"" pod="openstack-kuttl-tests/nova-cell1-conductor-0" podUID="0def1343-9e4b-4f74-84bd-3212688b59ce" Jan 20 17:36:06 crc kubenswrapper[4558]: I0120 17:36:06.684741 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-f45698ff6-c9mtk" event={"ID":"7de6386e-7693-430d-9753-2d7fa8c31b5d","Type":"ContainerDied","Data":"da69badc281328054557a9a09ff8ac7156fbf69e9baa4127114183ce9151e616"} Jan 20 17:36:06 crc kubenswrapper[4558]: I0120 17:36:06.684854 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-f45698ff6-c9mtk" Jan 20 17:36:06 crc kubenswrapper[4558]: I0120 17:36:06.756548 4558 scope.go:117] "RemoveContainer" containerID="af363f8fa728755ec981997979783ddd99710b030181fea30ee5db9adb704c8e" Jan 20 17:36:06 crc kubenswrapper[4558]: I0120 17:36:06.763094 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-api-f45698ff6-c9mtk"] Jan 20 17:36:06 crc kubenswrapper[4558]: I0120 17:36:06.780229 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/barbican-api-f45698ff6-c9mtk"] Jan 20 17:36:06 crc kubenswrapper[4558]: I0120 17:36:06.801705 4558 scope.go:117] "RemoveContainer" containerID="65853762fe6cac36e4de6676f22c99039ab79573b354ec57daadc513c082bba2" Jan 20 17:36:07 crc kubenswrapper[4558]: I0120 17:36:07.328350 4558 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:36:07 crc kubenswrapper[4558]: I0120 17:36:07.329384 4558 scope.go:117] "RemoveContainer" containerID="99dba2fa739dee263fd622caaa8566eb366703c39dd9dcf2a61dbad75ba8d1e5" Jan 20 17:36:07 crc kubenswrapper[4558]: E0120 17:36:07.329717 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"nova-cell0-conductor-conductor\" with CrashLoopBackOff: \"back-off 10s restarting failed container=nova-cell0-conductor-conductor pod=nova-cell0-conductor-0_openstack-kuttl-tests(0d2b6a10-ca95-4d72-a80b-1706822a07a7)\"" pod="openstack-kuttl-tests/nova-cell0-conductor-0" podUID="0d2b6a10-ca95-4d72-a80b-1706822a07a7" Jan 20 17:36:07 crc kubenswrapper[4558]: I0120 17:36:07.486280 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/neutron-6f4bb7946d-nl7kt" podUID="ba60e66e-45cc-48a7-92dc-126983f4aa43" containerName="neutron-httpd" probeResult="failure" output="Get \"https://10.217.0.121:9696/\": dial tcp 10.217.0.121:9696: connect: connection refused" Jan 20 17:36:07 crc kubenswrapper[4558]: I0120 17:36:07.699612 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"e1a1ac97-a530-4ba0-8c9b-835a2b576c8d","Type":"ContainerStarted","Data":"cfc0219326dabcde321d0d0fb037a9ecb6ea36b64ee17a015344e0990a28b989"} Jan 20 17:36:07 crc kubenswrapper[4558]: I0120 17:36:07.700825 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:36:07 crc kubenswrapper[4558]: I0120 17:36:07.726651 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/cinder-api-0" podStartSLOduration=3.726639349 podStartE2EDuration="3.726639349s" podCreationTimestamp="2026-01-20 17:36:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:36:07.714209937 +0000 UTC m=+3261.474547904" watchObservedRunningTime="2026-01-20 17:36:07.726639349 +0000 UTC m=+3261.486977316" Jan 20 17:36:07 crc kubenswrapper[4558]: I0120 17:36:07.757189 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:36:07 crc kubenswrapper[4558]: I0120 17:36:07.757222 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:36:07 crc kubenswrapper[4558]: I0120 17:36:07.792288 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:36:07 crc kubenswrapper[4558]: I0120 17:36:07.799315 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:36:08 crc kubenswrapper[4558]: I0120 17:36:08.189295 4558 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:36:08 crc kubenswrapper[4558]: I0120 17:36:08.190632 4558 scope.go:117] "RemoveContainer" containerID="4777bd55f602e6282b42a942acc808f0478764354d0970617b2e6c1dddd4da85" Jan 20 17:36:08 crc kubenswrapper[4558]: E0120 17:36:08.191288 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"nova-cell1-conductor-conductor\" with CrashLoopBackOff: \"back-off 10s restarting failed container=nova-cell1-conductor-conductor pod=nova-cell1-conductor-0_openstack-kuttl-tests(0def1343-9e4b-4f74-84bd-3212688b59ce)\"" pod="openstack-kuttl-tests/nova-cell1-conductor-0" podUID="0def1343-9e4b-4f74-84bd-3212688b59ce" Jan 20 17:36:08 crc kubenswrapper[4558]: I0120 17:36:08.515342 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack-kuttl-tests/barbican-api-64bd4857c6-2s9h4" podUID="94062d42-28cb-4c8a-afa4-f51458dedc6c" containerName="barbican-api" probeResult="failure" output="Get \"https://10.217.0.140:9311/healthcheck\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 20 17:36:08 crc kubenswrapper[4558]: I0120 17:36:08.580649 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7de6386e-7693-430d-9753-2d7fa8c31b5d" path="/var/lib/kubelet/pods/7de6386e-7693-430d-9753-2d7fa8c31b5d/volumes" Jan 20 17:36:08 crc kubenswrapper[4558]: I0120 17:36:08.592084 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/neutron-7d788974f4-sf4l4" Jan 20 17:36:08 crc kubenswrapper[4558]: I0120 17:36:08.716286 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:36:08 crc kubenswrapper[4558]: I0120 17:36:08.716329 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:36:09 crc kubenswrapper[4558]: I0120 17:36:09.189621 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:36:09 crc kubenswrapper[4558]: I0120 17:36:09.189688 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:36:09 crc kubenswrapper[4558]: I0120 17:36:09.267406 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:36:09 crc kubenswrapper[4558]: I0120 17:36:09.350002 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:36:09 crc kubenswrapper[4558]: I0120 17:36:09.350070 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:36:09 crc kubenswrapper[4558]: I0120 17:36:09.363094 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:36:09 crc kubenswrapper[4558]: I0120 17:36:09.390532 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:36:09 crc kubenswrapper[4558]: I0120 17:36:09.426800 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:36:09 crc kubenswrapper[4558]: I0120 17:36:09.522065 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/neutron-6865f4c68b-s7dm7" podUID="b6b873be-e034-4d85-b131-548eff8013ed" containerName="neutron-httpd" probeResult="failure" output="Get \"https://10.217.0.124:9696/\": dial tcp 10.217.0.124:9696: connect: connection refused" Jan 20 17:36:09 crc kubenswrapper[4558]: W0120 17:36:09.644462 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb65a4359_8f00_46fd_acd3_02731c1e1389.slice/crio-e0ed3312930991cbf925c1b0e5d1e007a5bd9ddca399ef8faa0ede5c15dcce69.scope WatchSource:0}: Error finding container e0ed3312930991cbf925c1b0e5d1e007a5bd9ddca399ef8faa0ede5c15dcce69: Status 404 returned error can't find the container with id e0ed3312930991cbf925c1b0e5d1e007a5bd9ddca399ef8faa0ede5c15dcce69 Jan 20 17:36:09 crc kubenswrapper[4558]: W0120 17:36:09.644964 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7de6386e_7693_430d_9753_2d7fa8c31b5d.slice/crio-af363f8fa728755ec981997979783ddd99710b030181fea30ee5db9adb704c8e.scope WatchSource:0}: Error finding container af363f8fa728755ec981997979783ddd99710b030181fea30ee5db9adb704c8e: Status 404 returned error can't find the container with id af363f8fa728755ec981997979783ddd99710b030181fea30ee5db9adb704c8e Jan 20 17:36:09 crc kubenswrapper[4558]: W0120 17:36:09.645454 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbbb20173_fee8_4323_afbb_f2bbbec3e978.slice/crio-ea6d1ca32a116c466cabb20cadcc9e9af27e7b82039308f7f1df1e6178fa2a36.scope WatchSource:0}: Error finding container ea6d1ca32a116c466cabb20cadcc9e9af27e7b82039308f7f1df1e6178fa2a36: Status 404 returned error can't find the container with id ea6d1ca32a116c466cabb20cadcc9e9af27e7b82039308f7f1df1e6178fa2a36 Jan 20 17:36:09 crc kubenswrapper[4558]: W0120 17:36:09.645808 4558 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc35b601d_7c76_46a4_9703_7a52ef1bbf73.slice": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc35b601d_7c76_46a4_9703_7a52ef1bbf73.slice: no such file or directory Jan 20 17:36:09 crc kubenswrapper[4558]: W0120 17:36:09.649731 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbbb20173_fee8_4323_afbb_f2bbbec3e978.slice/crio-3ff2458b3f3311a509b35bc6e23de18f9f9253176e2e50da406ead896867e39d.scope WatchSource:0}: Error finding container 3ff2458b3f3311a509b35bc6e23de18f9f9253176e2e50da406ead896867e39d: Status 404 returned error can't find the container with id 3ff2458b3f3311a509b35bc6e23de18f9f9253176e2e50da406ead896867e39d Jan 20 17:36:09 crc kubenswrapper[4558]: W0120 17:36:09.650045 4558 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod89eb3a4c_0235_4583_ad0a_d015228d8d6b.slice": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod89eb3a4c_0235_4583_ad0a_d015228d8d6b.slice: no such file or directory Jan 20 17:36:09 crc kubenswrapper[4558]: I0120 17:36:09.732834 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_neutron-7d788974f4-sf4l4_13d3779b-9cf3-47f3-b330-a3adfd5b10f7/neutron-httpd/0.log" Jan 20 17:36:09 crc kubenswrapper[4558]: I0120 17:36:09.733463 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_neutron-7d788974f4-sf4l4_13d3779b-9cf3-47f3-b330-a3adfd5b10f7/neutron-api/0.log" Jan 20 17:36:09 crc kubenswrapper[4558]: I0120 17:36:09.733511 4558 generic.go:334] "Generic (PLEG): container finished" podID="13d3779b-9cf3-47f3-b330-a3adfd5b10f7" containerID="8a0c4e15f43d48c5c3bf5cca5d164426d653ec27a6971028af1b9300e881743e" exitCode=137 Jan 20 17:36:09 crc kubenswrapper[4558]: I0120 17:36:09.733527 4558 generic.go:334] "Generic (PLEG): container finished" podID="13d3779b-9cf3-47f3-b330-a3adfd5b10f7" containerID="78d0d5adf4dba87cab66796976c167974cf7f1657e1f2e864bdb469b33fef266" exitCode=137 Jan 20 17:36:09 crc kubenswrapper[4558]: I0120 17:36:09.733598 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-7d788974f4-sf4l4" event={"ID":"13d3779b-9cf3-47f3-b330-a3adfd5b10f7","Type":"ContainerDied","Data":"8a0c4e15f43d48c5c3bf5cca5d164426d653ec27a6971028af1b9300e881743e"} Jan 20 17:36:09 crc kubenswrapper[4558]: I0120 17:36:09.733624 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-7d788974f4-sf4l4" event={"ID":"13d3779b-9cf3-47f3-b330-a3adfd5b10f7","Type":"ContainerDied","Data":"78d0d5adf4dba87cab66796976c167974cf7f1657e1f2e864bdb469b33fef266"} Jan 20 17:36:09 crc kubenswrapper[4558]: I0120 17:36:09.736233 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_neutron-6f4bb7946d-nl7kt_ba60e66e-45cc-48a7-92dc-126983f4aa43/neutron-api/0.log" Jan 20 17:36:09 crc kubenswrapper[4558]: I0120 17:36:09.736267 4558 generic.go:334] "Generic (PLEG): container finished" podID="ba60e66e-45cc-48a7-92dc-126983f4aa43" containerID="b3cfb60543d88a74e156430e8c9432a06e86b67228b8b014df284600429f9b92" exitCode=137 Jan 20 17:36:09 crc kubenswrapper[4558]: I0120 17:36:09.736983 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-6f4bb7946d-nl7kt" event={"ID":"ba60e66e-45cc-48a7-92dc-126983f4aa43","Type":"ContainerDied","Data":"b3cfb60543d88a74e156430e8c9432a06e86b67228b8b014df284600429f9b92"} Jan 20 17:36:09 crc kubenswrapper[4558]: I0120 17:36:09.769196 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:36:09 crc kubenswrapper[4558]: I0120 17:36:09.815282 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:36:09 crc kubenswrapper[4558]: I0120 17:36:09.828593 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:36:10 crc kubenswrapper[4558]: I0120 17:36:10.075540 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_neutron-6f4bb7946d-nl7kt_ba60e66e-45cc-48a7-92dc-126983f4aa43/neutron-api/0.log" Jan 20 17:36:10 crc kubenswrapper[4558]: I0120 17:36:10.075613 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-6f4bb7946d-nl7kt" Jan 20 17:36:10 crc kubenswrapper[4558]: I0120 17:36:10.082267 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_neutron-7d788974f4-sf4l4_13d3779b-9cf3-47f3-b330-a3adfd5b10f7/neutron-httpd/0.log" Jan 20 17:36:10 crc kubenswrapper[4558]: I0120 17:36:10.082737 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_neutron-7d788974f4-sf4l4_13d3779b-9cf3-47f3-b330-a3adfd5b10f7/neutron-api/0.log" Jan 20 17:36:10 crc kubenswrapper[4558]: I0120 17:36:10.082871 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-7d788974f4-sf4l4" Jan 20 17:36:10 crc kubenswrapper[4558]: I0120 17:36:10.130495 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/ba60e66e-45cc-48a7-92dc-126983f4aa43-httpd-config\") pod \"ba60e66e-45cc-48a7-92dc-126983f4aa43\" (UID: \"ba60e66e-45cc-48a7-92dc-126983f4aa43\") " Jan 20 17:36:10 crc kubenswrapper[4558]: I0120 17:36:10.130593 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ba60e66e-45cc-48a7-92dc-126983f4aa43-internal-tls-certs\") pod \"ba60e66e-45cc-48a7-92dc-126983f4aa43\" (UID: \"ba60e66e-45cc-48a7-92dc-126983f4aa43\") " Jan 20 17:36:10 crc kubenswrapper[4558]: I0120 17:36:10.130658 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ba60e66e-45cc-48a7-92dc-126983f4aa43-public-tls-certs\") pod \"ba60e66e-45cc-48a7-92dc-126983f4aa43\" (UID: \"ba60e66e-45cc-48a7-92dc-126983f4aa43\") " Jan 20 17:36:10 crc kubenswrapper[4558]: I0120 17:36:10.130806 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/ba60e66e-45cc-48a7-92dc-126983f4aa43-ovndb-tls-certs\") pod \"ba60e66e-45cc-48a7-92dc-126983f4aa43\" (UID: \"ba60e66e-45cc-48a7-92dc-126983f4aa43\") " Jan 20 17:36:10 crc kubenswrapper[4558]: I0120 17:36:10.130840 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/13d3779b-9cf3-47f3-b330-a3adfd5b10f7-config\") pod \"13d3779b-9cf3-47f3-b330-a3adfd5b10f7\" (UID: \"13d3779b-9cf3-47f3-b330-a3adfd5b10f7\") " Jan 20 17:36:10 crc kubenswrapper[4558]: I0120 17:36:10.131065 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ba60e66e-45cc-48a7-92dc-126983f4aa43-combined-ca-bundle\") pod \"ba60e66e-45cc-48a7-92dc-126983f4aa43\" (UID: \"ba60e66e-45cc-48a7-92dc-126983f4aa43\") " Jan 20 17:36:10 crc kubenswrapper[4558]: I0120 17:36:10.131122 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/13d3779b-9cf3-47f3-b330-a3adfd5b10f7-ovndb-tls-certs\") pod \"13d3779b-9cf3-47f3-b330-a3adfd5b10f7\" (UID: \"13d3779b-9cf3-47f3-b330-a3adfd5b10f7\") " Jan 20 17:36:10 crc kubenswrapper[4558]: I0120 17:36:10.131196 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/13d3779b-9cf3-47f3-b330-a3adfd5b10f7-combined-ca-bundle\") pod \"13d3779b-9cf3-47f3-b330-a3adfd5b10f7\" (UID: \"13d3779b-9cf3-47f3-b330-a3adfd5b10f7\") " Jan 20 17:36:10 crc kubenswrapper[4558]: I0120 17:36:10.131305 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/ba60e66e-45cc-48a7-92dc-126983f4aa43-config\") pod \"ba60e66e-45cc-48a7-92dc-126983f4aa43\" (UID: \"ba60e66e-45cc-48a7-92dc-126983f4aa43\") " Jan 20 17:36:10 crc kubenswrapper[4558]: I0120 17:36:10.131341 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/13d3779b-9cf3-47f3-b330-a3adfd5b10f7-internal-tls-certs\") pod \"13d3779b-9cf3-47f3-b330-a3adfd5b10f7\" (UID: \"13d3779b-9cf3-47f3-b330-a3adfd5b10f7\") " Jan 20 17:36:10 crc kubenswrapper[4558]: I0120 17:36:10.131367 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/13d3779b-9cf3-47f3-b330-a3adfd5b10f7-httpd-config\") pod \"13d3779b-9cf3-47f3-b330-a3adfd5b10f7\" (UID: \"13d3779b-9cf3-47f3-b330-a3adfd5b10f7\") " Jan 20 17:36:10 crc kubenswrapper[4558]: I0120 17:36:10.131419 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fzxkx\" (UniqueName: \"kubernetes.io/projected/ba60e66e-45cc-48a7-92dc-126983f4aa43-kube-api-access-fzxkx\") pod \"ba60e66e-45cc-48a7-92dc-126983f4aa43\" (UID: \"ba60e66e-45cc-48a7-92dc-126983f4aa43\") " Jan 20 17:36:10 crc kubenswrapper[4558]: I0120 17:36:10.131596 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/13d3779b-9cf3-47f3-b330-a3adfd5b10f7-public-tls-certs\") pod \"13d3779b-9cf3-47f3-b330-a3adfd5b10f7\" (UID: \"13d3779b-9cf3-47f3-b330-a3adfd5b10f7\") " Jan 20 17:36:10 crc kubenswrapper[4558]: I0120 17:36:10.131668 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8lmnc\" (UniqueName: \"kubernetes.io/projected/13d3779b-9cf3-47f3-b330-a3adfd5b10f7-kube-api-access-8lmnc\") pod \"13d3779b-9cf3-47f3-b330-a3adfd5b10f7\" (UID: \"13d3779b-9cf3-47f3-b330-a3adfd5b10f7\") " Jan 20 17:36:10 crc kubenswrapper[4558]: I0120 17:36:10.146751 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ba60e66e-45cc-48a7-92dc-126983f4aa43-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "ba60e66e-45cc-48a7-92dc-126983f4aa43" (UID: "ba60e66e-45cc-48a7-92dc-126983f4aa43"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:36:10 crc kubenswrapper[4558]: I0120 17:36:10.148343 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/13d3779b-9cf3-47f3-b330-a3adfd5b10f7-kube-api-access-8lmnc" (OuterVolumeSpecName: "kube-api-access-8lmnc") pod "13d3779b-9cf3-47f3-b330-a3adfd5b10f7" (UID: "13d3779b-9cf3-47f3-b330-a3adfd5b10f7"). InnerVolumeSpecName "kube-api-access-8lmnc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:36:10 crc kubenswrapper[4558]: I0120 17:36:10.150186 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/13d3779b-9cf3-47f3-b330-a3adfd5b10f7-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "13d3779b-9cf3-47f3-b330-a3adfd5b10f7" (UID: "13d3779b-9cf3-47f3-b330-a3adfd5b10f7"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:36:10 crc kubenswrapper[4558]: I0120 17:36:10.150349 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ba60e66e-45cc-48a7-92dc-126983f4aa43-kube-api-access-fzxkx" (OuterVolumeSpecName: "kube-api-access-fzxkx") pod "ba60e66e-45cc-48a7-92dc-126983f4aa43" (UID: "ba60e66e-45cc-48a7-92dc-126983f4aa43"). InnerVolumeSpecName "kube-api-access-fzxkx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:36:10 crc kubenswrapper[4558]: I0120 17:36:10.196660 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ba60e66e-45cc-48a7-92dc-126983f4aa43-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "ba60e66e-45cc-48a7-92dc-126983f4aa43" (UID: "ba60e66e-45cc-48a7-92dc-126983f4aa43"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:36:10 crc kubenswrapper[4558]: I0120 17:36:10.197220 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ba60e66e-45cc-48a7-92dc-126983f4aa43-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "ba60e66e-45cc-48a7-92dc-126983f4aa43" (UID: "ba60e66e-45cc-48a7-92dc-126983f4aa43"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:36:10 crc kubenswrapper[4558]: I0120 17:36:10.197718 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/13d3779b-9cf3-47f3-b330-a3adfd5b10f7-config" (OuterVolumeSpecName: "config") pod "13d3779b-9cf3-47f3-b330-a3adfd5b10f7" (UID: "13d3779b-9cf3-47f3-b330-a3adfd5b10f7"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:36:10 crc kubenswrapper[4558]: I0120 17:36:10.208266 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/13d3779b-9cf3-47f3-b330-a3adfd5b10f7-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "13d3779b-9cf3-47f3-b330-a3adfd5b10f7" (UID: "13d3779b-9cf3-47f3-b330-a3adfd5b10f7"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:36:10 crc kubenswrapper[4558]: I0120 17:36:10.221976 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/13d3779b-9cf3-47f3-b330-a3adfd5b10f7-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "13d3779b-9cf3-47f3-b330-a3adfd5b10f7" (UID: "13d3779b-9cf3-47f3-b330-a3adfd5b10f7"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:36:10 crc kubenswrapper[4558]: I0120 17:36:10.224327 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/13d3779b-9cf3-47f3-b330-a3adfd5b10f7-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "13d3779b-9cf3-47f3-b330-a3adfd5b10f7" (UID: "13d3779b-9cf3-47f3-b330-a3adfd5b10f7"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:36:10 crc kubenswrapper[4558]: I0120 17:36:10.224863 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ba60e66e-45cc-48a7-92dc-126983f4aa43-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ba60e66e-45cc-48a7-92dc-126983f4aa43" (UID: "ba60e66e-45cc-48a7-92dc-126983f4aa43"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:36:10 crc kubenswrapper[4558]: I0120 17:36:10.231385 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ba60e66e-45cc-48a7-92dc-126983f4aa43-config" (OuterVolumeSpecName: "config") pod "ba60e66e-45cc-48a7-92dc-126983f4aa43" (UID: "ba60e66e-45cc-48a7-92dc-126983f4aa43"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:36:10 crc kubenswrapper[4558]: I0120 17:36:10.235501 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/13d3779b-9cf3-47f3-b330-a3adfd5b10f7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "13d3779b-9cf3-47f3-b330-a3adfd5b10f7" (UID: "13d3779b-9cf3-47f3-b330-a3adfd5b10f7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:36:10 crc kubenswrapper[4558]: I0120 17:36:10.240255 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/ba60e66e-45cc-48a7-92dc-126983f4aa43-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:10 crc kubenswrapper[4558]: I0120 17:36:10.240285 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/13d3779b-9cf3-47f3-b330-a3adfd5b10f7-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:10 crc kubenswrapper[4558]: I0120 17:36:10.240300 4558 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/13d3779b-9cf3-47f3-b330-a3adfd5b10f7-httpd-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:10 crc kubenswrapper[4558]: I0120 17:36:10.240313 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fzxkx\" (UniqueName: \"kubernetes.io/projected/ba60e66e-45cc-48a7-92dc-126983f4aa43-kube-api-access-fzxkx\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:10 crc kubenswrapper[4558]: I0120 17:36:10.240323 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/13d3779b-9cf3-47f3-b330-a3adfd5b10f7-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:10 crc kubenswrapper[4558]: I0120 17:36:10.240335 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8lmnc\" (UniqueName: \"kubernetes.io/projected/13d3779b-9cf3-47f3-b330-a3adfd5b10f7-kube-api-access-8lmnc\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:10 crc kubenswrapper[4558]: I0120 17:36:10.240347 4558 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/ba60e66e-45cc-48a7-92dc-126983f4aa43-httpd-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:10 crc kubenswrapper[4558]: I0120 17:36:10.240356 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ba60e66e-45cc-48a7-92dc-126983f4aa43-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:10 crc kubenswrapper[4558]: I0120 17:36:10.240365 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ba60e66e-45cc-48a7-92dc-126983f4aa43-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:10 crc kubenswrapper[4558]: I0120 17:36:10.240374 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/13d3779b-9cf3-47f3-b330-a3adfd5b10f7-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:10 crc kubenswrapper[4558]: I0120 17:36:10.240382 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ba60e66e-45cc-48a7-92dc-126983f4aa43-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:10 crc kubenswrapper[4558]: I0120 17:36:10.240392 4558 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/13d3779b-9cf3-47f3-b330-a3adfd5b10f7-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:10 crc kubenswrapper[4558]: I0120 17:36:10.240400 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/13d3779b-9cf3-47f3-b330-a3adfd5b10f7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:10 crc kubenswrapper[4558]: I0120 17:36:10.240919 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ba60e66e-45cc-48a7-92dc-126983f4aa43-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "ba60e66e-45cc-48a7-92dc-126983f4aa43" (UID: "ba60e66e-45cc-48a7-92dc-126983f4aa43"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:36:10 crc kubenswrapper[4558]: I0120 17:36:10.246336 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/barbican-api-64bd4857c6-2s9h4" podUID="94062d42-28cb-4c8a-afa4-f51458dedc6c" containerName="barbican-api-log" probeResult="failure" output="Get \"https://10.217.0.140:9311/healthcheck\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 20 17:36:10 crc kubenswrapper[4558]: I0120 17:36:10.342300 4558 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/ba60e66e-45cc-48a7-92dc-126983f4aa43-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:10 crc kubenswrapper[4558]: I0120 17:36:10.512331 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack-kuttl-tests/barbican-api-64bd4857c6-2s9h4" podUID="94062d42-28cb-4c8a-afa4-f51458dedc6c" containerName="barbican-api-log" probeResult="failure" output="Get \"https://10.217.0.140:9311/healthcheck\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 20 17:36:10 crc kubenswrapper[4558]: I0120 17:36:10.530565 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:36:10 crc kubenswrapper[4558]: I0120 17:36:10.534969 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:36:10 crc kubenswrapper[4558]: I0120 17:36:10.746669 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_neutron-7d788974f4-sf4l4_13d3779b-9cf3-47f3-b330-a3adfd5b10f7/neutron-httpd/0.log" Jan 20 17:36:10 crc kubenswrapper[4558]: I0120 17:36:10.748923 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_neutron-7d788974f4-sf4l4_13d3779b-9cf3-47f3-b330-a3adfd5b10f7/neutron-api/0.log" Jan 20 17:36:10 crc kubenswrapper[4558]: I0120 17:36:10.748998 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-7d788974f4-sf4l4" event={"ID":"13d3779b-9cf3-47f3-b330-a3adfd5b10f7","Type":"ContainerDied","Data":"d610015d2bf74a42a31afff0f9df8d4dc81ed4fbf3c4e0bca7a1f70d778bab5a"} Jan 20 17:36:10 crc kubenswrapper[4558]: I0120 17:36:10.749041 4558 scope.go:117] "RemoveContainer" containerID="8a0c4e15f43d48c5c3bf5cca5d164426d653ec27a6971028af1b9300e881743e" Jan 20 17:36:10 crc kubenswrapper[4558]: I0120 17:36:10.749212 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-7d788974f4-sf4l4" Jan 20 17:36:10 crc kubenswrapper[4558]: I0120 17:36:10.753609 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_neutron-6f4bb7946d-nl7kt_ba60e66e-45cc-48a7-92dc-126983f4aa43/neutron-api/0.log" Jan 20 17:36:10 crc kubenswrapper[4558]: I0120 17:36:10.754963 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-6f4bb7946d-nl7kt" Jan 20 17:36:10 crc kubenswrapper[4558]: I0120 17:36:10.754991 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-6f4bb7946d-nl7kt" event={"ID":"ba60e66e-45cc-48a7-92dc-126983f4aa43","Type":"ContainerDied","Data":"28b5bc97035406c2509fb60ee1ac0ff1e8441d4a30293b4a1c159195284409ff"} Jan 20 17:36:10 crc kubenswrapper[4558]: I0120 17:36:10.771824 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-7d788974f4-sf4l4"] Jan 20 17:36:10 crc kubenswrapper[4558]: I0120 17:36:10.776673 4558 scope.go:117] "RemoveContainer" containerID="78d0d5adf4dba87cab66796976c167974cf7f1657e1f2e864bdb469b33fef266" Jan 20 17:36:10 crc kubenswrapper[4558]: I0120 17:36:10.781211 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/neutron-7d788974f4-sf4l4"] Jan 20 17:36:10 crc kubenswrapper[4558]: I0120 17:36:10.788036 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-6f4bb7946d-nl7kt"] Jan 20 17:36:10 crc kubenswrapper[4558]: I0120 17:36:10.793603 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/neutron-6f4bb7946d-nl7kt"] Jan 20 17:36:10 crc kubenswrapper[4558]: I0120 17:36:10.798607 4558 scope.go:117] "RemoveContainer" containerID="f166cbf6a54dbadf426ff3b23a2dc8fc38d9019e839086e6e93b425c311ae2f8" Jan 20 17:36:10 crc kubenswrapper[4558]: I0120 17:36:10.816403 4558 scope.go:117] "RemoveContainer" containerID="b3cfb60543d88a74e156430e8c9432a06e86b67228b8b014df284600429f9b92" Jan 20 17:36:11 crc kubenswrapper[4558]: I0120 17:36:11.503339 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/barbican-api-64bd4857c6-2s9h4" podUID="94062d42-28cb-4c8a-afa4-f51458dedc6c" containerName="barbican-api" probeResult="failure" output="Get \"https://10.217.0.140:9311/healthcheck\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 20 17:36:11 crc kubenswrapper[4558]: I0120 17:36:11.503613 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/barbican-api-64bd4857c6-2s9h4" Jan 20 17:36:11 crc kubenswrapper[4558]: I0120 17:36:11.512329 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/barbican-api-64bd4857c6-2s9h4" Jan 20 17:36:11 crc kubenswrapper[4558]: I0120 17:36:11.618640 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-api-55c969d68-6j7rc"] Jan 20 17:36:11 crc kubenswrapper[4558]: I0120 17:36:11.618912 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-api-55c969d68-6j7rc" podUID="44bcde0a-cc03-4cd1-bc4f-ab22cf2bb13f" containerName="barbican-api-log" containerID="cri-o://f68d263a2081165f043b77271abdfd4da85864255da0492403c4be2647086b5e" gracePeriod=30 Jan 20 17:36:11 crc kubenswrapper[4558]: I0120 17:36:11.619205 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-api-55c969d68-6j7rc" podUID="44bcde0a-cc03-4cd1-bc4f-ab22cf2bb13f" containerName="barbican-api" containerID="cri-o://b689cf6d60478e65385b8ac3d3deae0dc109647d01351e0e9d0bb25a6658a4ae" gracePeriod=30 Jan 20 17:36:11 crc kubenswrapper[4558]: I0120 17:36:11.677111 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/placement-55dfc8964d-kck7c" Jan 20 17:36:11 crc kubenswrapper[4558]: I0120 17:36:11.764922 4558 generic.go:334] "Generic (PLEG): container finished" podID="44bcde0a-cc03-4cd1-bc4f-ab22cf2bb13f" containerID="f68d263a2081165f043b77271abdfd4da85864255da0492403c4be2647086b5e" exitCode=143 Jan 20 17:36:11 crc kubenswrapper[4558]: I0120 17:36:11.765436 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-55c969d68-6j7rc" event={"ID":"44bcde0a-cc03-4cd1-bc4f-ab22cf2bb13f","Type":"ContainerDied","Data":"f68d263a2081165f043b77271abdfd4da85864255da0492403c4be2647086b5e"} Jan 20 17:36:12 crc kubenswrapper[4558]: I0120 17:36:12.114938 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:36:12 crc kubenswrapper[4558]: I0120 17:36:12.115261 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="466e0ec0-043e-49a6-b30d-0272443cb839" containerName="ceilometer-central-agent" containerID="cri-o://4b3ea526bd0d6082a8f713f347d305fa594ec7d31b198fcc81b42053f7c829e0" gracePeriod=30 Jan 20 17:36:12 crc kubenswrapper[4558]: I0120 17:36:12.115336 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="466e0ec0-043e-49a6-b30d-0272443cb839" containerName="ceilometer-notification-agent" containerID="cri-o://078b66eeb03b0f73239ad0cd6bf5aa81ad61f3191161fab43388406215e122a8" gracePeriod=30 Jan 20 17:36:12 crc kubenswrapper[4558]: I0120 17:36:12.115431 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="466e0ec0-043e-49a6-b30d-0272443cb839" containerName="proxy-httpd" containerID="cri-o://46bc188726f1c02977bbc17eab4df62a8fd4adc650c33bd2be8f2e4f3435a82c" gracePeriod=30 Jan 20 17:36:12 crc kubenswrapper[4558]: I0120 17:36:12.115339 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="466e0ec0-043e-49a6-b30d-0272443cb839" containerName="sg-core" containerID="cri-o://bed66136ce5e42c0bdeb7a5aa041c344e04bfab855657e5edb47b7c7bc7a73a1" gracePeriod=30 Jan 20 17:36:12 crc kubenswrapper[4558]: I0120 17:36:12.538231 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/placement-55dfc8964d-kck7c" Jan 20 17:36:12 crc kubenswrapper[4558]: I0120 17:36:12.581437 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="13d3779b-9cf3-47f3-b330-a3adfd5b10f7" path="/var/lib/kubelet/pods/13d3779b-9cf3-47f3-b330-a3adfd5b10f7/volumes" Jan 20 17:36:12 crc kubenswrapper[4558]: I0120 17:36:12.582290 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ba60e66e-45cc-48a7-92dc-126983f4aa43" path="/var/lib/kubelet/pods/ba60e66e-45cc-48a7-92dc-126983f4aa43/volumes" Jan 20 17:36:12 crc kubenswrapper[4558]: I0120 17:36:12.611206 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/placement-95f6c56c8-5t8cm"] Jan 20 17:36:12 crc kubenswrapper[4558]: I0120 17:36:12.798294 4558 generic.go:334] "Generic (PLEG): container finished" podID="466e0ec0-043e-49a6-b30d-0272443cb839" containerID="46bc188726f1c02977bbc17eab4df62a8fd4adc650c33bd2be8f2e4f3435a82c" exitCode=0 Jan 20 17:36:12 crc kubenswrapper[4558]: I0120 17:36:12.798639 4558 generic.go:334] "Generic (PLEG): container finished" podID="466e0ec0-043e-49a6-b30d-0272443cb839" containerID="bed66136ce5e42c0bdeb7a5aa041c344e04bfab855657e5edb47b7c7bc7a73a1" exitCode=2 Jan 20 17:36:12 crc kubenswrapper[4558]: I0120 17:36:12.798372 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"466e0ec0-043e-49a6-b30d-0272443cb839","Type":"ContainerDied","Data":"46bc188726f1c02977bbc17eab4df62a8fd4adc650c33bd2be8f2e4f3435a82c"} Jan 20 17:36:12 crc kubenswrapper[4558]: I0120 17:36:12.798678 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"466e0ec0-043e-49a6-b30d-0272443cb839","Type":"ContainerDied","Data":"bed66136ce5e42c0bdeb7a5aa041c344e04bfab855657e5edb47b7c7bc7a73a1"} Jan 20 17:36:12 crc kubenswrapper[4558]: I0120 17:36:12.798691 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"466e0ec0-043e-49a6-b30d-0272443cb839","Type":"ContainerDied","Data":"4b3ea526bd0d6082a8f713f347d305fa594ec7d31b198fcc81b42053f7c829e0"} Jan 20 17:36:12 crc kubenswrapper[4558]: I0120 17:36:12.798649 4558 generic.go:334] "Generic (PLEG): container finished" podID="466e0ec0-043e-49a6-b30d-0272443cb839" containerID="4b3ea526bd0d6082a8f713f347d305fa594ec7d31b198fcc81b42053f7c829e0" exitCode=0 Jan 20 17:36:12 crc kubenswrapper[4558]: I0120 17:36:12.798901 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/placement-95f6c56c8-5t8cm" podUID="e0b4cbdd-cd9f-4ff7-87c5-b24833dc6e0a" containerName="placement-log" containerID="cri-o://a23e411ace410c8611d6138e0db0566765ec515acb969fbae9e8e3de65c92841" gracePeriod=30 Jan 20 17:36:12 crc kubenswrapper[4558]: I0120 17:36:12.798935 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/placement-95f6c56c8-5t8cm" podUID="e0b4cbdd-cd9f-4ff7-87c5-b24833dc6e0a" containerName="placement-api" containerID="cri-o://498b22181051bf7df1d792470910cf1984b67820328f2a3e03b54681be9c8fb8" gracePeriod=30 Jan 20 17:36:13 crc kubenswrapper[4558]: I0120 17:36:13.519882 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/cinder-scheduler-0" podUID="9800a14c-ebf5-4a5d-b384-c133973b55ff" containerName="cinder-scheduler" probeResult="failure" output="Get \"http://10.217.0.149:8080/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 20 17:36:13 crc kubenswrapper[4558]: I0120 17:36:13.524303 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack-kuttl-tests/barbican-api-64bd4857c6-2s9h4" podUID="94062d42-28cb-4c8a-afa4-f51458dedc6c" containerName="barbican-api" probeResult="failure" output="Get \"https://10.217.0.140:9311/healthcheck\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 20 17:36:13 crc kubenswrapper[4558]: I0120 17:36:13.807933 4558 generic.go:334] "Generic (PLEG): container finished" podID="e0b4cbdd-cd9f-4ff7-87c5-b24833dc6e0a" containerID="a23e411ace410c8611d6138e0db0566765ec515acb969fbae9e8e3de65c92841" exitCode=143 Jan 20 17:36:13 crc kubenswrapper[4558]: I0120 17:36:13.807979 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-95f6c56c8-5t8cm" event={"ID":"e0b4cbdd-cd9f-4ff7-87c5-b24833dc6e0a","Type":"ContainerDied","Data":"a23e411ace410c8611d6138e0db0566765ec515acb969fbae9e8e3de65c92841"} Jan 20 17:36:14 crc kubenswrapper[4558]: I0120 17:36:14.941316 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:36:15 crc kubenswrapper[4558]: I0120 17:36:15.198321 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-55c969d68-6j7rc" Jan 20 17:36:15 crc kubenswrapper[4558]: I0120 17:36:15.372456 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vs7w5\" (UniqueName: \"kubernetes.io/projected/44bcde0a-cc03-4cd1-bc4f-ab22cf2bb13f-kube-api-access-vs7w5\") pod \"44bcde0a-cc03-4cd1-bc4f-ab22cf2bb13f\" (UID: \"44bcde0a-cc03-4cd1-bc4f-ab22cf2bb13f\") " Jan 20 17:36:15 crc kubenswrapper[4558]: I0120 17:36:15.372633 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/44bcde0a-cc03-4cd1-bc4f-ab22cf2bb13f-config-data\") pod \"44bcde0a-cc03-4cd1-bc4f-ab22cf2bb13f\" (UID: \"44bcde0a-cc03-4cd1-bc4f-ab22cf2bb13f\") " Jan 20 17:36:15 crc kubenswrapper[4558]: I0120 17:36:15.372752 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/44bcde0a-cc03-4cd1-bc4f-ab22cf2bb13f-public-tls-certs\") pod \"44bcde0a-cc03-4cd1-bc4f-ab22cf2bb13f\" (UID: \"44bcde0a-cc03-4cd1-bc4f-ab22cf2bb13f\") " Jan 20 17:36:15 crc kubenswrapper[4558]: I0120 17:36:15.372859 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/44bcde0a-cc03-4cd1-bc4f-ab22cf2bb13f-combined-ca-bundle\") pod \"44bcde0a-cc03-4cd1-bc4f-ab22cf2bb13f\" (UID: \"44bcde0a-cc03-4cd1-bc4f-ab22cf2bb13f\") " Jan 20 17:36:15 crc kubenswrapper[4558]: I0120 17:36:15.372979 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/44bcde0a-cc03-4cd1-bc4f-ab22cf2bb13f-config-data-custom\") pod \"44bcde0a-cc03-4cd1-bc4f-ab22cf2bb13f\" (UID: \"44bcde0a-cc03-4cd1-bc4f-ab22cf2bb13f\") " Jan 20 17:36:15 crc kubenswrapper[4558]: I0120 17:36:15.373074 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/44bcde0a-cc03-4cd1-bc4f-ab22cf2bb13f-internal-tls-certs\") pod \"44bcde0a-cc03-4cd1-bc4f-ab22cf2bb13f\" (UID: \"44bcde0a-cc03-4cd1-bc4f-ab22cf2bb13f\") " Jan 20 17:36:15 crc kubenswrapper[4558]: I0120 17:36:15.373203 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/44bcde0a-cc03-4cd1-bc4f-ab22cf2bb13f-logs\") pod \"44bcde0a-cc03-4cd1-bc4f-ab22cf2bb13f\" (UID: \"44bcde0a-cc03-4cd1-bc4f-ab22cf2bb13f\") " Jan 20 17:36:15 crc kubenswrapper[4558]: I0120 17:36:15.373624 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/44bcde0a-cc03-4cd1-bc4f-ab22cf2bb13f-logs" (OuterVolumeSpecName: "logs") pod "44bcde0a-cc03-4cd1-bc4f-ab22cf2bb13f" (UID: "44bcde0a-cc03-4cd1-bc4f-ab22cf2bb13f"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:36:15 crc kubenswrapper[4558]: I0120 17:36:15.373925 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/44bcde0a-cc03-4cd1-bc4f-ab22cf2bb13f-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:15 crc kubenswrapper[4558]: I0120 17:36:15.382377 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/44bcde0a-cc03-4cd1-bc4f-ab22cf2bb13f-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "44bcde0a-cc03-4cd1-bc4f-ab22cf2bb13f" (UID: "44bcde0a-cc03-4cd1-bc4f-ab22cf2bb13f"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:36:15 crc kubenswrapper[4558]: I0120 17:36:15.383222 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44bcde0a-cc03-4cd1-bc4f-ab22cf2bb13f-kube-api-access-vs7w5" (OuterVolumeSpecName: "kube-api-access-vs7w5") pod "44bcde0a-cc03-4cd1-bc4f-ab22cf2bb13f" (UID: "44bcde0a-cc03-4cd1-bc4f-ab22cf2bb13f"). InnerVolumeSpecName "kube-api-access-vs7w5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:36:15 crc kubenswrapper[4558]: I0120 17:36:15.416831 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/44bcde0a-cc03-4cd1-bc4f-ab22cf2bb13f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "44bcde0a-cc03-4cd1-bc4f-ab22cf2bb13f" (UID: "44bcde0a-cc03-4cd1-bc4f-ab22cf2bb13f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:36:15 crc kubenswrapper[4558]: I0120 17:36:15.418039 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/44bcde0a-cc03-4cd1-bc4f-ab22cf2bb13f-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "44bcde0a-cc03-4cd1-bc4f-ab22cf2bb13f" (UID: "44bcde0a-cc03-4cd1-bc4f-ab22cf2bb13f"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:36:15 crc kubenswrapper[4558]: I0120 17:36:15.418460 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/44bcde0a-cc03-4cd1-bc4f-ab22cf2bb13f-config-data" (OuterVolumeSpecName: "config-data") pod "44bcde0a-cc03-4cd1-bc4f-ab22cf2bb13f" (UID: "44bcde0a-cc03-4cd1-bc4f-ab22cf2bb13f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:36:15 crc kubenswrapper[4558]: I0120 17:36:15.419883 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/44bcde0a-cc03-4cd1-bc4f-ab22cf2bb13f-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "44bcde0a-cc03-4cd1-bc4f-ab22cf2bb13f" (UID: "44bcde0a-cc03-4cd1-bc4f-ab22cf2bb13f"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:36:15 crc kubenswrapper[4558]: I0120 17:36:15.477969 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vs7w5\" (UniqueName: \"kubernetes.io/projected/44bcde0a-cc03-4cd1-bc4f-ab22cf2bb13f-kube-api-access-vs7w5\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:15 crc kubenswrapper[4558]: I0120 17:36:15.478005 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/44bcde0a-cc03-4cd1-bc4f-ab22cf2bb13f-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:15 crc kubenswrapper[4558]: I0120 17:36:15.478022 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/44bcde0a-cc03-4cd1-bc4f-ab22cf2bb13f-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:15 crc kubenswrapper[4558]: I0120 17:36:15.478037 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/44bcde0a-cc03-4cd1-bc4f-ab22cf2bb13f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:15 crc kubenswrapper[4558]: I0120 17:36:15.478049 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/44bcde0a-cc03-4cd1-bc4f-ab22cf2bb13f-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:15 crc kubenswrapper[4558]: I0120 17:36:15.478060 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/44bcde0a-cc03-4cd1-bc4f-ab22cf2bb13f-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:15 crc kubenswrapper[4558]: I0120 17:36:15.887768 4558 generic.go:334] "Generic (PLEG): container finished" podID="44bcde0a-cc03-4cd1-bc4f-ab22cf2bb13f" containerID="b689cf6d60478e65385b8ac3d3deae0dc109647d01351e0e9d0bb25a6658a4ae" exitCode=0 Jan 20 17:36:15 crc kubenswrapper[4558]: I0120 17:36:15.887911 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-55c969d68-6j7rc" Jan 20 17:36:15 crc kubenswrapper[4558]: I0120 17:36:15.887945 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-55c969d68-6j7rc" event={"ID":"44bcde0a-cc03-4cd1-bc4f-ab22cf2bb13f","Type":"ContainerDied","Data":"b689cf6d60478e65385b8ac3d3deae0dc109647d01351e0e9d0bb25a6658a4ae"} Jan 20 17:36:15 crc kubenswrapper[4558]: I0120 17:36:15.888229 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-55c969d68-6j7rc" event={"ID":"44bcde0a-cc03-4cd1-bc4f-ab22cf2bb13f","Type":"ContainerDied","Data":"492776aaadfa9ad1a21765295c99ae29e8897befd944da8c131532de9a1a927f"} Jan 20 17:36:15 crc kubenswrapper[4558]: I0120 17:36:15.888265 4558 scope.go:117] "RemoveContainer" containerID="b689cf6d60478e65385b8ac3d3deae0dc109647d01351e0e9d0bb25a6658a4ae" Jan 20 17:36:15 crc kubenswrapper[4558]: I0120 17:36:15.926842 4558 scope.go:117] "RemoveContainer" containerID="f68d263a2081165f043b77271abdfd4da85864255da0492403c4be2647086b5e" Jan 20 17:36:15 crc kubenswrapper[4558]: I0120 17:36:15.953242 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-api-55c969d68-6j7rc"] Jan 20 17:36:15 crc kubenswrapper[4558]: I0120 17:36:15.964994 4558 scope.go:117] "RemoveContainer" containerID="b689cf6d60478e65385b8ac3d3deae0dc109647d01351e0e9d0bb25a6658a4ae" Jan 20 17:36:15 crc kubenswrapper[4558]: E0120 17:36:15.965692 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b689cf6d60478e65385b8ac3d3deae0dc109647d01351e0e9d0bb25a6658a4ae\": container with ID starting with b689cf6d60478e65385b8ac3d3deae0dc109647d01351e0e9d0bb25a6658a4ae not found: ID does not exist" containerID="b689cf6d60478e65385b8ac3d3deae0dc109647d01351e0e9d0bb25a6658a4ae" Jan 20 17:36:15 crc kubenswrapper[4558]: I0120 17:36:15.965827 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b689cf6d60478e65385b8ac3d3deae0dc109647d01351e0e9d0bb25a6658a4ae"} err="failed to get container status \"b689cf6d60478e65385b8ac3d3deae0dc109647d01351e0e9d0bb25a6658a4ae\": rpc error: code = NotFound desc = could not find container \"b689cf6d60478e65385b8ac3d3deae0dc109647d01351e0e9d0bb25a6658a4ae\": container with ID starting with b689cf6d60478e65385b8ac3d3deae0dc109647d01351e0e9d0bb25a6658a4ae not found: ID does not exist" Jan 20 17:36:15 crc kubenswrapper[4558]: I0120 17:36:15.965876 4558 scope.go:117] "RemoveContainer" containerID="f68d263a2081165f043b77271abdfd4da85864255da0492403c4be2647086b5e" Jan 20 17:36:15 crc kubenswrapper[4558]: E0120 17:36:15.966830 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f68d263a2081165f043b77271abdfd4da85864255da0492403c4be2647086b5e\": container with ID starting with f68d263a2081165f043b77271abdfd4da85864255da0492403c4be2647086b5e not found: ID does not exist" containerID="f68d263a2081165f043b77271abdfd4da85864255da0492403c4be2647086b5e" Jan 20 17:36:15 crc kubenswrapper[4558]: I0120 17:36:15.966889 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f68d263a2081165f043b77271abdfd4da85864255da0492403c4be2647086b5e"} err="failed to get container status \"f68d263a2081165f043b77271abdfd4da85864255da0492403c4be2647086b5e\": rpc error: code = NotFound desc = could not find container \"f68d263a2081165f043b77271abdfd4da85864255da0492403c4be2647086b5e\": container with ID starting with f68d263a2081165f043b77271abdfd4da85864255da0492403c4be2647086b5e not found: ID does not exist" Jan 20 17:36:15 crc kubenswrapper[4558]: I0120 17:36:15.968025 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/barbican-api-55c969d68-6j7rc"] Jan 20 17:36:16 crc kubenswrapper[4558]: I0120 17:36:16.043922 4558 scope.go:117] "RemoveContainer" containerID="30636a3c4260263ef23765c51ceb13ef78993f311ae6ee434f35b1a095aaf86e" Jan 20 17:36:16 crc kubenswrapper[4558]: I0120 17:36:16.120938 4558 scope.go:117] "RemoveContainer" containerID="6a2e345da9c0d3bd2685dcab70d32403d6708298dcd5c04143e21d6e1aaaf4d5" Jan 20 17:36:16 crc kubenswrapper[4558]: I0120 17:36:16.197914 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:36:16 crc kubenswrapper[4558]: I0120 17:36:16.222333 4558 scope.go:117] "RemoveContainer" containerID="4899b425b0055fe954b8d69c9913e490586788a42bae1052bff5b0f5accd9abb" Jan 20 17:36:16 crc kubenswrapper[4558]: I0120 17:36:16.304155 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/466e0ec0-043e-49a6-b30d-0272443cb839-log-httpd\") pod \"466e0ec0-043e-49a6-b30d-0272443cb839\" (UID: \"466e0ec0-043e-49a6-b30d-0272443cb839\") " Jan 20 17:36:16 crc kubenswrapper[4558]: I0120 17:36:16.304241 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/466e0ec0-043e-49a6-b30d-0272443cb839-sg-core-conf-yaml\") pod \"466e0ec0-043e-49a6-b30d-0272443cb839\" (UID: \"466e0ec0-043e-49a6-b30d-0272443cb839\") " Jan 20 17:36:16 crc kubenswrapper[4558]: I0120 17:36:16.304332 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/466e0ec0-043e-49a6-b30d-0272443cb839-combined-ca-bundle\") pod \"466e0ec0-043e-49a6-b30d-0272443cb839\" (UID: \"466e0ec0-043e-49a6-b30d-0272443cb839\") " Jan 20 17:36:16 crc kubenswrapper[4558]: I0120 17:36:16.304363 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qmb4z\" (UniqueName: \"kubernetes.io/projected/466e0ec0-043e-49a6-b30d-0272443cb839-kube-api-access-qmb4z\") pod \"466e0ec0-043e-49a6-b30d-0272443cb839\" (UID: \"466e0ec0-043e-49a6-b30d-0272443cb839\") " Jan 20 17:36:16 crc kubenswrapper[4558]: I0120 17:36:16.304433 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/466e0ec0-043e-49a6-b30d-0272443cb839-config-data\") pod \"466e0ec0-043e-49a6-b30d-0272443cb839\" (UID: \"466e0ec0-043e-49a6-b30d-0272443cb839\") " Jan 20 17:36:16 crc kubenswrapper[4558]: I0120 17:36:16.304562 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/466e0ec0-043e-49a6-b30d-0272443cb839-scripts\") pod \"466e0ec0-043e-49a6-b30d-0272443cb839\" (UID: \"466e0ec0-043e-49a6-b30d-0272443cb839\") " Jan 20 17:36:16 crc kubenswrapper[4558]: I0120 17:36:16.304610 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/466e0ec0-043e-49a6-b30d-0272443cb839-ceilometer-tls-certs\") pod \"466e0ec0-043e-49a6-b30d-0272443cb839\" (UID: \"466e0ec0-043e-49a6-b30d-0272443cb839\") " Jan 20 17:36:16 crc kubenswrapper[4558]: I0120 17:36:16.304635 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/466e0ec0-043e-49a6-b30d-0272443cb839-run-httpd\") pod \"466e0ec0-043e-49a6-b30d-0272443cb839\" (UID: \"466e0ec0-043e-49a6-b30d-0272443cb839\") " Jan 20 17:36:16 crc kubenswrapper[4558]: I0120 17:36:16.305698 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/466e0ec0-043e-49a6-b30d-0272443cb839-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "466e0ec0-043e-49a6-b30d-0272443cb839" (UID: "466e0ec0-043e-49a6-b30d-0272443cb839"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:36:16 crc kubenswrapper[4558]: I0120 17:36:16.312054 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/466e0ec0-043e-49a6-b30d-0272443cb839-kube-api-access-qmb4z" (OuterVolumeSpecName: "kube-api-access-qmb4z") pod "466e0ec0-043e-49a6-b30d-0272443cb839" (UID: "466e0ec0-043e-49a6-b30d-0272443cb839"). InnerVolumeSpecName "kube-api-access-qmb4z". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:36:16 crc kubenswrapper[4558]: I0120 17:36:16.312080 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/466e0ec0-043e-49a6-b30d-0272443cb839-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "466e0ec0-043e-49a6-b30d-0272443cb839" (UID: "466e0ec0-043e-49a6-b30d-0272443cb839"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:36:16 crc kubenswrapper[4558]: I0120 17:36:16.312665 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/466e0ec0-043e-49a6-b30d-0272443cb839-scripts" (OuterVolumeSpecName: "scripts") pod "466e0ec0-043e-49a6-b30d-0272443cb839" (UID: "466e0ec0-043e-49a6-b30d-0272443cb839"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:36:16 crc kubenswrapper[4558]: I0120 17:36:16.331242 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/466e0ec0-043e-49a6-b30d-0272443cb839-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "466e0ec0-043e-49a6-b30d-0272443cb839" (UID: "466e0ec0-043e-49a6-b30d-0272443cb839"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:36:16 crc kubenswrapper[4558]: I0120 17:36:16.355506 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/466e0ec0-043e-49a6-b30d-0272443cb839-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "466e0ec0-043e-49a6-b30d-0272443cb839" (UID: "466e0ec0-043e-49a6-b30d-0272443cb839"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:36:16 crc kubenswrapper[4558]: I0120 17:36:16.358891 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-95f6c56c8-5t8cm" Jan 20 17:36:16 crc kubenswrapper[4558]: I0120 17:36:16.376985 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/466e0ec0-043e-49a6-b30d-0272443cb839-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "466e0ec0-043e-49a6-b30d-0272443cb839" (UID: "466e0ec0-043e-49a6-b30d-0272443cb839"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:36:16 crc kubenswrapper[4558]: I0120 17:36:16.399035 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/466e0ec0-043e-49a6-b30d-0272443cb839-config-data" (OuterVolumeSpecName: "config-data") pod "466e0ec0-043e-49a6-b30d-0272443cb839" (UID: "466e0ec0-043e-49a6-b30d-0272443cb839"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:36:16 crc kubenswrapper[4558]: I0120 17:36:16.407525 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e0b4cbdd-cd9f-4ff7-87c5-b24833dc6e0a-config-data\") pod \"e0b4cbdd-cd9f-4ff7-87c5-b24833dc6e0a\" (UID: \"e0b4cbdd-cd9f-4ff7-87c5-b24833dc6e0a\") " Jan 20 17:36:16 crc kubenswrapper[4558]: I0120 17:36:16.407623 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-26tpl\" (UniqueName: \"kubernetes.io/projected/e0b4cbdd-cd9f-4ff7-87c5-b24833dc6e0a-kube-api-access-26tpl\") pod \"e0b4cbdd-cd9f-4ff7-87c5-b24833dc6e0a\" (UID: \"e0b4cbdd-cd9f-4ff7-87c5-b24833dc6e0a\") " Jan 20 17:36:16 crc kubenswrapper[4558]: I0120 17:36:16.407719 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e0b4cbdd-cd9f-4ff7-87c5-b24833dc6e0a-combined-ca-bundle\") pod \"e0b4cbdd-cd9f-4ff7-87c5-b24833dc6e0a\" (UID: \"e0b4cbdd-cd9f-4ff7-87c5-b24833dc6e0a\") " Jan 20 17:36:16 crc kubenswrapper[4558]: I0120 17:36:16.407919 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e0b4cbdd-cd9f-4ff7-87c5-b24833dc6e0a-internal-tls-certs\") pod \"e0b4cbdd-cd9f-4ff7-87c5-b24833dc6e0a\" (UID: \"e0b4cbdd-cd9f-4ff7-87c5-b24833dc6e0a\") " Jan 20 17:36:16 crc kubenswrapper[4558]: I0120 17:36:16.408034 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e0b4cbdd-cd9f-4ff7-87c5-b24833dc6e0a-public-tls-certs\") pod \"e0b4cbdd-cd9f-4ff7-87c5-b24833dc6e0a\" (UID: \"e0b4cbdd-cd9f-4ff7-87c5-b24833dc6e0a\") " Jan 20 17:36:16 crc kubenswrapper[4558]: I0120 17:36:16.408153 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e0b4cbdd-cd9f-4ff7-87c5-b24833dc6e0a-scripts\") pod \"e0b4cbdd-cd9f-4ff7-87c5-b24833dc6e0a\" (UID: \"e0b4cbdd-cd9f-4ff7-87c5-b24833dc6e0a\") " Jan 20 17:36:16 crc kubenswrapper[4558]: I0120 17:36:16.408325 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e0b4cbdd-cd9f-4ff7-87c5-b24833dc6e0a-logs\") pod \"e0b4cbdd-cd9f-4ff7-87c5-b24833dc6e0a\" (UID: \"e0b4cbdd-cd9f-4ff7-87c5-b24833dc6e0a\") " Jan 20 17:36:16 crc kubenswrapper[4558]: I0120 17:36:16.408861 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/466e0ec0-043e-49a6-b30d-0272443cb839-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:16 crc kubenswrapper[4558]: I0120 17:36:16.408924 4558 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/466e0ec0-043e-49a6-b30d-0272443cb839-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:16 crc kubenswrapper[4558]: I0120 17:36:16.408994 4558 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/466e0ec0-043e-49a6-b30d-0272443cb839-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:16 crc kubenswrapper[4558]: I0120 17:36:16.409048 4558 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/466e0ec0-043e-49a6-b30d-0272443cb839-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:16 crc kubenswrapper[4558]: I0120 17:36:16.409094 4558 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/466e0ec0-043e-49a6-b30d-0272443cb839-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:16 crc kubenswrapper[4558]: I0120 17:36:16.409152 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/466e0ec0-043e-49a6-b30d-0272443cb839-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:16 crc kubenswrapper[4558]: I0120 17:36:16.409239 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qmb4z\" (UniqueName: \"kubernetes.io/projected/466e0ec0-043e-49a6-b30d-0272443cb839-kube-api-access-qmb4z\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:16 crc kubenswrapper[4558]: I0120 17:36:16.409285 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/466e0ec0-043e-49a6-b30d-0272443cb839-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:16 crc kubenswrapper[4558]: I0120 17:36:16.409666 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e0b4cbdd-cd9f-4ff7-87c5-b24833dc6e0a-logs" (OuterVolumeSpecName: "logs") pod "e0b4cbdd-cd9f-4ff7-87c5-b24833dc6e0a" (UID: "e0b4cbdd-cd9f-4ff7-87c5-b24833dc6e0a"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:36:16 crc kubenswrapper[4558]: I0120 17:36:16.415409 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e0b4cbdd-cd9f-4ff7-87c5-b24833dc6e0a-kube-api-access-26tpl" (OuterVolumeSpecName: "kube-api-access-26tpl") pod "e0b4cbdd-cd9f-4ff7-87c5-b24833dc6e0a" (UID: "e0b4cbdd-cd9f-4ff7-87c5-b24833dc6e0a"). InnerVolumeSpecName "kube-api-access-26tpl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:36:16 crc kubenswrapper[4558]: I0120 17:36:16.419084 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e0b4cbdd-cd9f-4ff7-87c5-b24833dc6e0a-scripts" (OuterVolumeSpecName: "scripts") pod "e0b4cbdd-cd9f-4ff7-87c5-b24833dc6e0a" (UID: "e0b4cbdd-cd9f-4ff7-87c5-b24833dc6e0a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:36:16 crc kubenswrapper[4558]: I0120 17:36:16.451579 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e0b4cbdd-cd9f-4ff7-87c5-b24833dc6e0a-config-data" (OuterVolumeSpecName: "config-data") pod "e0b4cbdd-cd9f-4ff7-87c5-b24833dc6e0a" (UID: "e0b4cbdd-cd9f-4ff7-87c5-b24833dc6e0a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:36:16 crc kubenswrapper[4558]: I0120 17:36:16.458887 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e0b4cbdd-cd9f-4ff7-87c5-b24833dc6e0a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e0b4cbdd-cd9f-4ff7-87c5-b24833dc6e0a" (UID: "e0b4cbdd-cd9f-4ff7-87c5-b24833dc6e0a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:36:16 crc kubenswrapper[4558]: I0120 17:36:16.488154 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e0b4cbdd-cd9f-4ff7-87c5-b24833dc6e0a-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "e0b4cbdd-cd9f-4ff7-87c5-b24833dc6e0a" (UID: "e0b4cbdd-cd9f-4ff7-87c5-b24833dc6e0a"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:36:16 crc kubenswrapper[4558]: I0120 17:36:16.513956 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e0b4cbdd-cd9f-4ff7-87c5-b24833dc6e0a-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:16 crc kubenswrapper[4558]: I0120 17:36:16.513979 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e0b4cbdd-cd9f-4ff7-87c5-b24833dc6e0a-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:16 crc kubenswrapper[4558]: I0120 17:36:16.513990 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-26tpl\" (UniqueName: \"kubernetes.io/projected/e0b4cbdd-cd9f-4ff7-87c5-b24833dc6e0a-kube-api-access-26tpl\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:16 crc kubenswrapper[4558]: I0120 17:36:16.514000 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e0b4cbdd-cd9f-4ff7-87c5-b24833dc6e0a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:16 crc kubenswrapper[4558]: I0120 17:36:16.514009 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e0b4cbdd-cd9f-4ff7-87c5-b24833dc6e0a-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:16 crc kubenswrapper[4558]: I0120 17:36:16.514016 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e0b4cbdd-cd9f-4ff7-87c5-b24833dc6e0a-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:16 crc kubenswrapper[4558]: I0120 17:36:16.515633 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e0b4cbdd-cd9f-4ff7-87c5-b24833dc6e0a-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "e0b4cbdd-cd9f-4ff7-87c5-b24833dc6e0a" (UID: "e0b4cbdd-cd9f-4ff7-87c5-b24833dc6e0a"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:36:16 crc kubenswrapper[4558]: I0120 17:36:16.574817 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44bcde0a-cc03-4cd1-bc4f-ab22cf2bb13f" path="/var/lib/kubelet/pods/44bcde0a-cc03-4cd1-bc4f-ab22cf2bb13f/volumes" Jan 20 17:36:16 crc kubenswrapper[4558]: I0120 17:36:16.616609 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e0b4cbdd-cd9f-4ff7-87c5-b24833dc6e0a-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:16 crc kubenswrapper[4558]: I0120 17:36:16.779193 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:36:16 crc kubenswrapper[4558]: I0120 17:36:16.901725 4558 generic.go:334] "Generic (PLEG): container finished" podID="466e0ec0-043e-49a6-b30d-0272443cb839" containerID="078b66eeb03b0f73239ad0cd6bf5aa81ad61f3191161fab43388406215e122a8" exitCode=0 Jan 20 17:36:16 crc kubenswrapper[4558]: I0120 17:36:16.901878 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"466e0ec0-043e-49a6-b30d-0272443cb839","Type":"ContainerDied","Data":"078b66eeb03b0f73239ad0cd6bf5aa81ad61f3191161fab43388406215e122a8"} Jan 20 17:36:16 crc kubenswrapper[4558]: I0120 17:36:16.902283 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"466e0ec0-043e-49a6-b30d-0272443cb839","Type":"ContainerDied","Data":"18ed28c6a594076520e6c3f1696bd2790369ffc42a8b0b782a7fe3b8e55a695c"} Jan 20 17:36:16 crc kubenswrapper[4558]: I0120 17:36:16.902080 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:36:16 crc kubenswrapper[4558]: I0120 17:36:16.902395 4558 scope.go:117] "RemoveContainer" containerID="46bc188726f1c02977bbc17eab4df62a8fd4adc650c33bd2be8f2e4f3435a82c" Jan 20 17:36:16 crc kubenswrapper[4558]: I0120 17:36:16.905924 4558 generic.go:334] "Generic (PLEG): container finished" podID="e0b4cbdd-cd9f-4ff7-87c5-b24833dc6e0a" containerID="498b22181051bf7df1d792470910cf1984b67820328f2a3e03b54681be9c8fb8" exitCode=0 Jan 20 17:36:16 crc kubenswrapper[4558]: I0120 17:36:16.906046 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-95f6c56c8-5t8cm" event={"ID":"e0b4cbdd-cd9f-4ff7-87c5-b24833dc6e0a","Type":"ContainerDied","Data":"498b22181051bf7df1d792470910cf1984b67820328f2a3e03b54681be9c8fb8"} Jan 20 17:36:16 crc kubenswrapper[4558]: I0120 17:36:16.906110 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-95f6c56c8-5t8cm" event={"ID":"e0b4cbdd-cd9f-4ff7-87c5-b24833dc6e0a","Type":"ContainerDied","Data":"da662d5236ba854a24d04ef2fd7223581f34c137e93a7f81b34b1e201dbeda89"} Jan 20 17:36:16 crc kubenswrapper[4558]: I0120 17:36:16.906256 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-95f6c56c8-5t8cm" Jan 20 17:36:16 crc kubenswrapper[4558]: I0120 17:36:16.932344 4558 scope.go:117] "RemoveContainer" containerID="bed66136ce5e42c0bdeb7a5aa041c344e04bfab855657e5edb47b7c7bc7a73a1" Jan 20 17:36:16 crc kubenswrapper[4558]: I0120 17:36:16.940473 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:36:16 crc kubenswrapper[4558]: I0120 17:36:16.949664 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:36:16 crc kubenswrapper[4558]: E0120 17:36:16.959893 4558 fsHandler.go:119] failed to collect filesystem stats - rootDiskErr: could not stat "/var/lib/containers/storage/overlay/897d2d59707cc52e43f6cb9b242e3f4ff771a151567b77ff32ecf69bf1f3276c/diff" to get inode usage: stat /var/lib/containers/storage/overlay/897d2d59707cc52e43f6cb9b242e3f4ff771a151567b77ff32ecf69bf1f3276c/diff: no such file or directory, extraDiskErr: Jan 20 17:36:16 crc kubenswrapper[4558]: I0120 17:36:16.965267 4558 scope.go:117] "RemoveContainer" containerID="078b66eeb03b0f73239ad0cd6bf5aa81ad61f3191161fab43388406215e122a8" Jan 20 17:36:16 crc kubenswrapper[4558]: I0120 17:36:16.966461 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/placement-95f6c56c8-5t8cm"] Jan 20 17:36:16 crc kubenswrapper[4558]: I0120 17:36:16.978221 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/placement-95f6c56c8-5t8cm"] Jan 20 17:36:16 crc kubenswrapper[4558]: I0120 17:36:16.985475 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:36:16 crc kubenswrapper[4558]: E0120 17:36:16.985906 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="13d3779b-9cf3-47f3-b330-a3adfd5b10f7" containerName="neutron-httpd" Jan 20 17:36:16 crc kubenswrapper[4558]: I0120 17:36:16.985929 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="13d3779b-9cf3-47f3-b330-a3adfd5b10f7" containerName="neutron-httpd" Jan 20 17:36:16 crc kubenswrapper[4558]: E0120 17:36:16.985950 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="44bcde0a-cc03-4cd1-bc4f-ab22cf2bb13f" containerName="barbican-api" Jan 20 17:36:16 crc kubenswrapper[4558]: I0120 17:36:16.985958 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="44bcde0a-cc03-4cd1-bc4f-ab22cf2bb13f" containerName="barbican-api" Jan 20 17:36:16 crc kubenswrapper[4558]: E0120 17:36:16.985974 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ba60e66e-45cc-48a7-92dc-126983f4aa43" containerName="neutron-httpd" Jan 20 17:36:16 crc kubenswrapper[4558]: I0120 17:36:16.985981 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ba60e66e-45cc-48a7-92dc-126983f4aa43" containerName="neutron-httpd" Jan 20 17:36:16 crc kubenswrapper[4558]: E0120 17:36:16.985997 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="466e0ec0-043e-49a6-b30d-0272443cb839" containerName="sg-core" Jan 20 17:36:16 crc kubenswrapper[4558]: I0120 17:36:16.986005 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="466e0ec0-043e-49a6-b30d-0272443cb839" containerName="sg-core" Jan 20 17:36:16 crc kubenswrapper[4558]: E0120 17:36:16.986021 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="13d3779b-9cf3-47f3-b330-a3adfd5b10f7" containerName="neutron-api" Jan 20 17:36:16 crc kubenswrapper[4558]: I0120 17:36:16.986027 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="13d3779b-9cf3-47f3-b330-a3adfd5b10f7" containerName="neutron-api" Jan 20 17:36:16 crc kubenswrapper[4558]: E0120 17:36:16.986035 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="466e0ec0-043e-49a6-b30d-0272443cb839" containerName="ceilometer-notification-agent" Jan 20 17:36:16 crc kubenswrapper[4558]: I0120 17:36:16.986041 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="466e0ec0-043e-49a6-b30d-0272443cb839" containerName="ceilometer-notification-agent" Jan 20 17:36:16 crc kubenswrapper[4558]: E0120 17:36:16.986049 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="44bcde0a-cc03-4cd1-bc4f-ab22cf2bb13f" containerName="barbican-api-log" Jan 20 17:36:16 crc kubenswrapper[4558]: I0120 17:36:16.986054 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="44bcde0a-cc03-4cd1-bc4f-ab22cf2bb13f" containerName="barbican-api-log" Jan 20 17:36:16 crc kubenswrapper[4558]: E0120 17:36:16.986061 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="466e0ec0-043e-49a6-b30d-0272443cb839" containerName="ceilometer-central-agent" Jan 20 17:36:16 crc kubenswrapper[4558]: I0120 17:36:16.986067 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="466e0ec0-043e-49a6-b30d-0272443cb839" containerName="ceilometer-central-agent" Jan 20 17:36:16 crc kubenswrapper[4558]: E0120 17:36:16.986089 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e0b4cbdd-cd9f-4ff7-87c5-b24833dc6e0a" containerName="placement-log" Jan 20 17:36:16 crc kubenswrapper[4558]: I0120 17:36:16.986095 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="e0b4cbdd-cd9f-4ff7-87c5-b24833dc6e0a" containerName="placement-log" Jan 20 17:36:16 crc kubenswrapper[4558]: E0120 17:36:16.986108 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e0b4cbdd-cd9f-4ff7-87c5-b24833dc6e0a" containerName="placement-api" Jan 20 17:36:16 crc kubenswrapper[4558]: I0120 17:36:16.986114 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="e0b4cbdd-cd9f-4ff7-87c5-b24833dc6e0a" containerName="placement-api" Jan 20 17:36:16 crc kubenswrapper[4558]: E0120 17:36:16.986123 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ba60e66e-45cc-48a7-92dc-126983f4aa43" containerName="neutron-api" Jan 20 17:36:16 crc kubenswrapper[4558]: I0120 17:36:16.986129 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ba60e66e-45cc-48a7-92dc-126983f4aa43" containerName="neutron-api" Jan 20 17:36:16 crc kubenswrapper[4558]: E0120 17:36:16.986143 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="466e0ec0-043e-49a6-b30d-0272443cb839" containerName="proxy-httpd" Jan 20 17:36:16 crc kubenswrapper[4558]: I0120 17:36:16.986149 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="466e0ec0-043e-49a6-b30d-0272443cb839" containerName="proxy-httpd" Jan 20 17:36:16 crc kubenswrapper[4558]: E0120 17:36:16.986159 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7de6386e-7693-430d-9753-2d7fa8c31b5d" containerName="barbican-api-log" Jan 20 17:36:16 crc kubenswrapper[4558]: I0120 17:36:16.986177 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="7de6386e-7693-430d-9753-2d7fa8c31b5d" containerName="barbican-api-log" Jan 20 17:36:16 crc kubenswrapper[4558]: E0120 17:36:16.986186 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7de6386e-7693-430d-9753-2d7fa8c31b5d" containerName="barbican-api" Jan 20 17:36:16 crc kubenswrapper[4558]: I0120 17:36:16.986191 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="7de6386e-7693-430d-9753-2d7fa8c31b5d" containerName="barbican-api" Jan 20 17:36:16 crc kubenswrapper[4558]: I0120 17:36:16.986381 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="13d3779b-9cf3-47f3-b330-a3adfd5b10f7" containerName="neutron-api" Jan 20 17:36:16 crc kubenswrapper[4558]: I0120 17:36:16.986392 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="7de6386e-7693-430d-9753-2d7fa8c31b5d" containerName="barbican-api" Jan 20 17:36:16 crc kubenswrapper[4558]: I0120 17:36:16.986403 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="466e0ec0-043e-49a6-b30d-0272443cb839" containerName="proxy-httpd" Jan 20 17:36:16 crc kubenswrapper[4558]: I0120 17:36:16.986414 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="ba60e66e-45cc-48a7-92dc-126983f4aa43" containerName="neutron-api" Jan 20 17:36:16 crc kubenswrapper[4558]: I0120 17:36:16.986425 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="466e0ec0-043e-49a6-b30d-0272443cb839" containerName="sg-core" Jan 20 17:36:16 crc kubenswrapper[4558]: I0120 17:36:16.986440 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="44bcde0a-cc03-4cd1-bc4f-ab22cf2bb13f" containerName="barbican-api-log" Jan 20 17:36:16 crc kubenswrapper[4558]: I0120 17:36:16.986450 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="7de6386e-7693-430d-9753-2d7fa8c31b5d" containerName="barbican-api-log" Jan 20 17:36:16 crc kubenswrapper[4558]: I0120 17:36:16.986456 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="e0b4cbdd-cd9f-4ff7-87c5-b24833dc6e0a" containerName="placement-api" Jan 20 17:36:16 crc kubenswrapper[4558]: I0120 17:36:16.986466 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="ba60e66e-45cc-48a7-92dc-126983f4aa43" containerName="neutron-httpd" Jan 20 17:36:16 crc kubenswrapper[4558]: I0120 17:36:16.986474 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="466e0ec0-043e-49a6-b30d-0272443cb839" containerName="ceilometer-notification-agent" Jan 20 17:36:16 crc kubenswrapper[4558]: I0120 17:36:16.986482 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="466e0ec0-043e-49a6-b30d-0272443cb839" containerName="ceilometer-central-agent" Jan 20 17:36:16 crc kubenswrapper[4558]: I0120 17:36:16.986489 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="44bcde0a-cc03-4cd1-bc4f-ab22cf2bb13f" containerName="barbican-api" Jan 20 17:36:16 crc kubenswrapper[4558]: I0120 17:36:16.986496 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="13d3779b-9cf3-47f3-b330-a3adfd5b10f7" containerName="neutron-httpd" Jan 20 17:36:16 crc kubenswrapper[4558]: I0120 17:36:16.986504 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="e0b4cbdd-cd9f-4ff7-87c5-b24833dc6e0a" containerName="placement-log" Jan 20 17:36:16 crc kubenswrapper[4558]: I0120 17:36:16.988073 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:36:16 crc kubenswrapper[4558]: I0120 17:36:16.990410 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-ceilometer-internal-svc" Jan 20 17:36:16 crc kubenswrapper[4558]: I0120 17:36:16.991205 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-config-data" Jan 20 17:36:16 crc kubenswrapper[4558]: I0120 17:36:16.991465 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-scripts" Jan 20 17:36:16 crc kubenswrapper[4558]: I0120 17:36:16.991731 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:36:17 crc kubenswrapper[4558]: I0120 17:36:17.000275 4558 scope.go:117] "RemoveContainer" containerID="4b3ea526bd0d6082a8f713f347d305fa594ec7d31b198fcc81b42053f7c829e0" Jan 20 17:36:17 crc kubenswrapper[4558]: I0120 17:36:17.023455 4558 scope.go:117] "RemoveContainer" containerID="46bc188726f1c02977bbc17eab4df62a8fd4adc650c33bd2be8f2e4f3435a82c" Jan 20 17:36:17 crc kubenswrapper[4558]: E0120 17:36:17.023808 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"46bc188726f1c02977bbc17eab4df62a8fd4adc650c33bd2be8f2e4f3435a82c\": container with ID starting with 46bc188726f1c02977bbc17eab4df62a8fd4adc650c33bd2be8f2e4f3435a82c not found: ID does not exist" containerID="46bc188726f1c02977bbc17eab4df62a8fd4adc650c33bd2be8f2e4f3435a82c" Jan 20 17:36:17 crc kubenswrapper[4558]: I0120 17:36:17.023854 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"46bc188726f1c02977bbc17eab4df62a8fd4adc650c33bd2be8f2e4f3435a82c"} err="failed to get container status \"46bc188726f1c02977bbc17eab4df62a8fd4adc650c33bd2be8f2e4f3435a82c\": rpc error: code = NotFound desc = could not find container \"46bc188726f1c02977bbc17eab4df62a8fd4adc650c33bd2be8f2e4f3435a82c\": container with ID starting with 46bc188726f1c02977bbc17eab4df62a8fd4adc650c33bd2be8f2e4f3435a82c not found: ID does not exist" Jan 20 17:36:17 crc kubenswrapper[4558]: I0120 17:36:17.023879 4558 scope.go:117] "RemoveContainer" containerID="bed66136ce5e42c0bdeb7a5aa041c344e04bfab855657e5edb47b7c7bc7a73a1" Jan 20 17:36:17 crc kubenswrapper[4558]: E0120 17:36:17.024357 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bed66136ce5e42c0bdeb7a5aa041c344e04bfab855657e5edb47b7c7bc7a73a1\": container with ID starting with bed66136ce5e42c0bdeb7a5aa041c344e04bfab855657e5edb47b7c7bc7a73a1 not found: ID does not exist" containerID="bed66136ce5e42c0bdeb7a5aa041c344e04bfab855657e5edb47b7c7bc7a73a1" Jan 20 17:36:17 crc kubenswrapper[4558]: I0120 17:36:17.024394 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bed66136ce5e42c0bdeb7a5aa041c344e04bfab855657e5edb47b7c7bc7a73a1"} err="failed to get container status \"bed66136ce5e42c0bdeb7a5aa041c344e04bfab855657e5edb47b7c7bc7a73a1\": rpc error: code = NotFound desc = could not find container \"bed66136ce5e42c0bdeb7a5aa041c344e04bfab855657e5edb47b7c7bc7a73a1\": container with ID starting with bed66136ce5e42c0bdeb7a5aa041c344e04bfab855657e5edb47b7c7bc7a73a1 not found: ID does not exist" Jan 20 17:36:17 crc kubenswrapper[4558]: I0120 17:36:17.024418 4558 scope.go:117] "RemoveContainer" containerID="078b66eeb03b0f73239ad0cd6bf5aa81ad61f3191161fab43388406215e122a8" Jan 20 17:36:17 crc kubenswrapper[4558]: E0120 17:36:17.024712 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"078b66eeb03b0f73239ad0cd6bf5aa81ad61f3191161fab43388406215e122a8\": container with ID starting with 078b66eeb03b0f73239ad0cd6bf5aa81ad61f3191161fab43388406215e122a8 not found: ID does not exist" containerID="078b66eeb03b0f73239ad0cd6bf5aa81ad61f3191161fab43388406215e122a8" Jan 20 17:36:17 crc kubenswrapper[4558]: I0120 17:36:17.024734 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"078b66eeb03b0f73239ad0cd6bf5aa81ad61f3191161fab43388406215e122a8"} err="failed to get container status \"078b66eeb03b0f73239ad0cd6bf5aa81ad61f3191161fab43388406215e122a8\": rpc error: code = NotFound desc = could not find container \"078b66eeb03b0f73239ad0cd6bf5aa81ad61f3191161fab43388406215e122a8\": container with ID starting with 078b66eeb03b0f73239ad0cd6bf5aa81ad61f3191161fab43388406215e122a8 not found: ID does not exist" Jan 20 17:36:17 crc kubenswrapper[4558]: I0120 17:36:17.024749 4558 scope.go:117] "RemoveContainer" containerID="4b3ea526bd0d6082a8f713f347d305fa594ec7d31b198fcc81b42053f7c829e0" Jan 20 17:36:17 crc kubenswrapper[4558]: E0120 17:36:17.025025 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4b3ea526bd0d6082a8f713f347d305fa594ec7d31b198fcc81b42053f7c829e0\": container with ID starting with 4b3ea526bd0d6082a8f713f347d305fa594ec7d31b198fcc81b42053f7c829e0 not found: ID does not exist" containerID="4b3ea526bd0d6082a8f713f347d305fa594ec7d31b198fcc81b42053f7c829e0" Jan 20 17:36:17 crc kubenswrapper[4558]: I0120 17:36:17.025048 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4b3ea526bd0d6082a8f713f347d305fa594ec7d31b198fcc81b42053f7c829e0"} err="failed to get container status \"4b3ea526bd0d6082a8f713f347d305fa594ec7d31b198fcc81b42053f7c829e0\": rpc error: code = NotFound desc = could not find container \"4b3ea526bd0d6082a8f713f347d305fa594ec7d31b198fcc81b42053f7c829e0\": container with ID starting with 4b3ea526bd0d6082a8f713f347d305fa594ec7d31b198fcc81b42053f7c829e0 not found: ID does not exist" Jan 20 17:36:17 crc kubenswrapper[4558]: I0120 17:36:17.025068 4558 scope.go:117] "RemoveContainer" containerID="498b22181051bf7df1d792470910cf1984b67820328f2a3e03b54681be9c8fb8" Jan 20 17:36:17 crc kubenswrapper[4558]: I0120 17:36:17.049501 4558 scope.go:117] "RemoveContainer" containerID="a23e411ace410c8611d6138e0db0566765ec515acb969fbae9e8e3de65c92841" Jan 20 17:36:17 crc kubenswrapper[4558]: I0120 17:36:17.068484 4558 scope.go:117] "RemoveContainer" containerID="498b22181051bf7df1d792470910cf1984b67820328f2a3e03b54681be9c8fb8" Jan 20 17:36:17 crc kubenswrapper[4558]: E0120 17:36:17.068938 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"498b22181051bf7df1d792470910cf1984b67820328f2a3e03b54681be9c8fb8\": container with ID starting with 498b22181051bf7df1d792470910cf1984b67820328f2a3e03b54681be9c8fb8 not found: ID does not exist" containerID="498b22181051bf7df1d792470910cf1984b67820328f2a3e03b54681be9c8fb8" Jan 20 17:36:17 crc kubenswrapper[4558]: I0120 17:36:17.069002 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"498b22181051bf7df1d792470910cf1984b67820328f2a3e03b54681be9c8fb8"} err="failed to get container status \"498b22181051bf7df1d792470910cf1984b67820328f2a3e03b54681be9c8fb8\": rpc error: code = NotFound desc = could not find container \"498b22181051bf7df1d792470910cf1984b67820328f2a3e03b54681be9c8fb8\": container with ID starting with 498b22181051bf7df1d792470910cf1984b67820328f2a3e03b54681be9c8fb8 not found: ID does not exist" Jan 20 17:36:17 crc kubenswrapper[4558]: I0120 17:36:17.069044 4558 scope.go:117] "RemoveContainer" containerID="a23e411ace410c8611d6138e0db0566765ec515acb969fbae9e8e3de65c92841" Jan 20 17:36:17 crc kubenswrapper[4558]: E0120 17:36:17.069449 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a23e411ace410c8611d6138e0db0566765ec515acb969fbae9e8e3de65c92841\": container with ID starting with a23e411ace410c8611d6138e0db0566765ec515acb969fbae9e8e3de65c92841 not found: ID does not exist" containerID="a23e411ace410c8611d6138e0db0566765ec515acb969fbae9e8e3de65c92841" Jan 20 17:36:17 crc kubenswrapper[4558]: I0120 17:36:17.069482 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a23e411ace410c8611d6138e0db0566765ec515acb969fbae9e8e3de65c92841"} err="failed to get container status \"a23e411ace410c8611d6138e0db0566765ec515acb969fbae9e8e3de65c92841\": rpc error: code = NotFound desc = could not find container \"a23e411ace410c8611d6138e0db0566765ec515acb969fbae9e8e3de65c92841\": container with ID starting with a23e411ace410c8611d6138e0db0566765ec515acb969fbae9e8e3de65c92841 not found: ID does not exist" Jan 20 17:36:17 crc kubenswrapper[4558]: E0120 17:36:17.092868 4558 fsHandler.go:119] failed to collect filesystem stats - rootDiskErr: could not stat "/var/lib/containers/storage/overlay/653b313c57d4ed8ecc278574f62b8cde5541bb4e32f2bb54273cecd493621931/diff" to get inode usage: stat /var/lib/containers/storage/overlay/653b313c57d4ed8ecc278574f62b8cde5541bb4e32f2bb54273cecd493621931/diff: no such file or directory, extraDiskErr: could not stat "/var/log/pods/openstack-kuttl-tests_barbican-api-55c969d68-6j7rc_44bcde0a-cc03-4cd1-bc4f-ab22cf2bb13f/barbican-api-log/0.log" to get inode usage: stat /var/log/pods/openstack-kuttl-tests_barbican-api-55c969d68-6j7rc_44bcde0a-cc03-4cd1-bc4f-ab22cf2bb13f/barbican-api-log/0.log: no such file or directory Jan 20 17:36:17 crc kubenswrapper[4558]: I0120 17:36:17.126827 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rgxzh\" (UniqueName: \"kubernetes.io/projected/dc94159e-846c-4207-a4e6-fc877335d179-kube-api-access-rgxzh\") pod \"ceilometer-0\" (UID: \"dc94159e-846c-4207-a4e6-fc877335d179\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:36:17 crc kubenswrapper[4558]: I0120 17:36:17.126902 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dc94159e-846c-4207-a4e6-fc877335d179-log-httpd\") pod \"ceilometer-0\" (UID: \"dc94159e-846c-4207-a4e6-fc877335d179\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:36:17 crc kubenswrapper[4558]: I0120 17:36:17.126966 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/dc94159e-846c-4207-a4e6-fc877335d179-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"dc94159e-846c-4207-a4e6-fc877335d179\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:36:17 crc kubenswrapper[4558]: I0120 17:36:17.127433 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dc94159e-846c-4207-a4e6-fc877335d179-run-httpd\") pod \"ceilometer-0\" (UID: \"dc94159e-846c-4207-a4e6-fc877335d179\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:36:17 crc kubenswrapper[4558]: I0120 17:36:17.127517 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dc94159e-846c-4207-a4e6-fc877335d179-config-data\") pod \"ceilometer-0\" (UID: \"dc94159e-846c-4207-a4e6-fc877335d179\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:36:17 crc kubenswrapper[4558]: I0120 17:36:17.127699 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/dc94159e-846c-4207-a4e6-fc877335d179-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"dc94159e-846c-4207-a4e6-fc877335d179\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:36:17 crc kubenswrapper[4558]: I0120 17:36:17.127806 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc94159e-846c-4207-a4e6-fc877335d179-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"dc94159e-846c-4207-a4e6-fc877335d179\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:36:17 crc kubenswrapper[4558]: I0120 17:36:17.127923 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dc94159e-846c-4207-a4e6-fc877335d179-scripts\") pod \"ceilometer-0\" (UID: \"dc94159e-846c-4207-a4e6-fc877335d179\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:36:17 crc kubenswrapper[4558]: I0120 17:36:17.228983 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc94159e-846c-4207-a4e6-fc877335d179-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"dc94159e-846c-4207-a4e6-fc877335d179\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:36:17 crc kubenswrapper[4558]: I0120 17:36:17.229058 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dc94159e-846c-4207-a4e6-fc877335d179-scripts\") pod \"ceilometer-0\" (UID: \"dc94159e-846c-4207-a4e6-fc877335d179\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:36:17 crc kubenswrapper[4558]: I0120 17:36:17.229113 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rgxzh\" (UniqueName: \"kubernetes.io/projected/dc94159e-846c-4207-a4e6-fc877335d179-kube-api-access-rgxzh\") pod \"ceilometer-0\" (UID: \"dc94159e-846c-4207-a4e6-fc877335d179\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:36:17 crc kubenswrapper[4558]: I0120 17:36:17.229150 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dc94159e-846c-4207-a4e6-fc877335d179-log-httpd\") pod \"ceilometer-0\" (UID: \"dc94159e-846c-4207-a4e6-fc877335d179\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:36:17 crc kubenswrapper[4558]: I0120 17:36:17.229195 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/dc94159e-846c-4207-a4e6-fc877335d179-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"dc94159e-846c-4207-a4e6-fc877335d179\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:36:17 crc kubenswrapper[4558]: I0120 17:36:17.229316 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dc94159e-846c-4207-a4e6-fc877335d179-run-httpd\") pod \"ceilometer-0\" (UID: \"dc94159e-846c-4207-a4e6-fc877335d179\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:36:17 crc kubenswrapper[4558]: I0120 17:36:17.229349 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dc94159e-846c-4207-a4e6-fc877335d179-config-data\") pod \"ceilometer-0\" (UID: \"dc94159e-846c-4207-a4e6-fc877335d179\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:36:17 crc kubenswrapper[4558]: I0120 17:36:17.229404 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/dc94159e-846c-4207-a4e6-fc877335d179-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"dc94159e-846c-4207-a4e6-fc877335d179\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:36:17 crc kubenswrapper[4558]: I0120 17:36:17.229690 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dc94159e-846c-4207-a4e6-fc877335d179-log-httpd\") pod \"ceilometer-0\" (UID: \"dc94159e-846c-4207-a4e6-fc877335d179\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:36:17 crc kubenswrapper[4558]: I0120 17:36:17.229922 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dc94159e-846c-4207-a4e6-fc877335d179-run-httpd\") pod \"ceilometer-0\" (UID: \"dc94159e-846c-4207-a4e6-fc877335d179\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:36:17 crc kubenswrapper[4558]: I0120 17:36:17.233387 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/dc94159e-846c-4207-a4e6-fc877335d179-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"dc94159e-846c-4207-a4e6-fc877335d179\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:36:17 crc kubenswrapper[4558]: I0120 17:36:17.234541 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc94159e-846c-4207-a4e6-fc877335d179-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"dc94159e-846c-4207-a4e6-fc877335d179\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:36:17 crc kubenswrapper[4558]: I0120 17:36:17.234893 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dc94159e-846c-4207-a4e6-fc877335d179-config-data\") pod \"ceilometer-0\" (UID: \"dc94159e-846c-4207-a4e6-fc877335d179\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:36:17 crc kubenswrapper[4558]: I0120 17:36:17.235491 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/dc94159e-846c-4207-a4e6-fc877335d179-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"dc94159e-846c-4207-a4e6-fc877335d179\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:36:17 crc kubenswrapper[4558]: I0120 17:36:17.238990 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dc94159e-846c-4207-a4e6-fc877335d179-scripts\") pod \"ceilometer-0\" (UID: \"dc94159e-846c-4207-a4e6-fc877335d179\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:36:17 crc kubenswrapper[4558]: I0120 17:36:17.257281 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rgxzh\" (UniqueName: \"kubernetes.io/projected/dc94159e-846c-4207-a4e6-fc877335d179-kube-api-access-rgxzh\") pod \"ceilometer-0\" (UID: \"dc94159e-846c-4207-a4e6-fc877335d179\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:36:17 crc kubenswrapper[4558]: I0120 17:36:17.304888 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:36:17 crc kubenswrapper[4558]: E0120 17:36:17.359829 4558 fsHandler.go:119] failed to collect filesystem stats - rootDiskErr: could not stat "/var/lib/containers/storage/overlay/06acdea3f305ce2bcc17a7033b94a8c98e4747c9b8873cfd3e8d2470a72272aa/diff" to get inode usage: stat /var/lib/containers/storage/overlay/06acdea3f305ce2bcc17a7033b94a8c98e4747c9b8873cfd3e8d2470a72272aa/diff: no such file or directory, extraDiskErr: could not stat "/var/log/pods/openstack-kuttl-tests_barbican-api-55c969d68-6j7rc_44bcde0a-cc03-4cd1-bc4f-ab22cf2bb13f/barbican-api/0.log" to get inode usage: stat /var/log/pods/openstack-kuttl-tests_barbican-api-55c969d68-6j7rc_44bcde0a-cc03-4cd1-bc4f-ab22cf2bb13f/barbican-api/0.log: no such file or directory Jan 20 17:36:17 crc kubenswrapper[4558]: I0120 17:36:17.726596 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-6865f4c68b-s7dm7" Jan 20 17:36:17 crc kubenswrapper[4558]: I0120 17:36:17.745278 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/b6b873be-e034-4d85-b131-548eff8013ed-config\") pod \"b6b873be-e034-4d85-b131-548eff8013ed\" (UID: \"b6b873be-e034-4d85-b131-548eff8013ed\") " Jan 20 17:36:17 crc kubenswrapper[4558]: I0120 17:36:17.745320 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b6b873be-e034-4d85-b131-548eff8013ed-internal-tls-certs\") pod \"b6b873be-e034-4d85-b131-548eff8013ed\" (UID: \"b6b873be-e034-4d85-b131-548eff8013ed\") " Jan 20 17:36:17 crc kubenswrapper[4558]: I0120 17:36:17.745360 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/b6b873be-e034-4d85-b131-548eff8013ed-ovndb-tls-certs\") pod \"b6b873be-e034-4d85-b131-548eff8013ed\" (UID: \"b6b873be-e034-4d85-b131-548eff8013ed\") " Jan 20 17:36:17 crc kubenswrapper[4558]: I0120 17:36:17.745396 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/b6b873be-e034-4d85-b131-548eff8013ed-httpd-config\") pod \"b6b873be-e034-4d85-b131-548eff8013ed\" (UID: \"b6b873be-e034-4d85-b131-548eff8013ed\") " Jan 20 17:36:17 crc kubenswrapper[4558]: I0120 17:36:17.745458 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b6b873be-e034-4d85-b131-548eff8013ed-combined-ca-bundle\") pod \"b6b873be-e034-4d85-b131-548eff8013ed\" (UID: \"b6b873be-e034-4d85-b131-548eff8013ed\") " Jan 20 17:36:17 crc kubenswrapper[4558]: I0120 17:36:17.745472 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b6b873be-e034-4d85-b131-548eff8013ed-public-tls-certs\") pod \"b6b873be-e034-4d85-b131-548eff8013ed\" (UID: \"b6b873be-e034-4d85-b131-548eff8013ed\") " Jan 20 17:36:17 crc kubenswrapper[4558]: I0120 17:36:17.745547 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-md7lm\" (UniqueName: \"kubernetes.io/projected/b6b873be-e034-4d85-b131-548eff8013ed-kube-api-access-md7lm\") pod \"b6b873be-e034-4d85-b131-548eff8013ed\" (UID: \"b6b873be-e034-4d85-b131-548eff8013ed\") " Jan 20 17:36:17 crc kubenswrapper[4558]: I0120 17:36:17.765210 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6b873be-e034-4d85-b131-548eff8013ed-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "b6b873be-e034-4d85-b131-548eff8013ed" (UID: "b6b873be-e034-4d85-b131-548eff8013ed"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:36:17 crc kubenswrapper[4558]: I0120 17:36:17.766307 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6b873be-e034-4d85-b131-548eff8013ed-kube-api-access-md7lm" (OuterVolumeSpecName: "kube-api-access-md7lm") pod "b6b873be-e034-4d85-b131-548eff8013ed" (UID: "b6b873be-e034-4d85-b131-548eff8013ed"). InnerVolumeSpecName "kube-api-access-md7lm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:36:17 crc kubenswrapper[4558]: I0120 17:36:17.768281 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:36:17 crc kubenswrapper[4558]: I0120 17:36:17.835223 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6b873be-e034-4d85-b131-548eff8013ed-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "b6b873be-e034-4d85-b131-548eff8013ed" (UID: "b6b873be-e034-4d85-b131-548eff8013ed"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:36:17 crc kubenswrapper[4558]: I0120 17:36:17.837387 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6b873be-e034-4d85-b131-548eff8013ed-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b6b873be-e034-4d85-b131-548eff8013ed" (UID: "b6b873be-e034-4d85-b131-548eff8013ed"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:36:17 crc kubenswrapper[4558]: I0120 17:36:17.839404 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6b873be-e034-4d85-b131-548eff8013ed-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "b6b873be-e034-4d85-b131-548eff8013ed" (UID: "b6b873be-e034-4d85-b131-548eff8013ed"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:36:17 crc kubenswrapper[4558]: I0120 17:36:17.845042 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6b873be-e034-4d85-b131-548eff8013ed-config" (OuterVolumeSpecName: "config") pod "b6b873be-e034-4d85-b131-548eff8013ed" (UID: "b6b873be-e034-4d85-b131-548eff8013ed"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:36:17 crc kubenswrapper[4558]: I0120 17:36:17.847259 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/b6b873be-e034-4d85-b131-548eff8013ed-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:17 crc kubenswrapper[4558]: I0120 17:36:17.847282 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b6b873be-e034-4d85-b131-548eff8013ed-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:17 crc kubenswrapper[4558]: I0120 17:36:17.847291 4558 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/b6b873be-e034-4d85-b131-548eff8013ed-httpd-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:17 crc kubenswrapper[4558]: I0120 17:36:17.847303 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b6b873be-e034-4d85-b131-548eff8013ed-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:17 crc kubenswrapper[4558]: I0120 17:36:17.847312 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b6b873be-e034-4d85-b131-548eff8013ed-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:17 crc kubenswrapper[4558]: I0120 17:36:17.847320 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-md7lm\" (UniqueName: \"kubernetes.io/projected/b6b873be-e034-4d85-b131-548eff8013ed-kube-api-access-md7lm\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:17 crc kubenswrapper[4558]: I0120 17:36:17.857914 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6b873be-e034-4d85-b131-548eff8013ed-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "b6b873be-e034-4d85-b131-548eff8013ed" (UID: "b6b873be-e034-4d85-b131-548eff8013ed"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:36:17 crc kubenswrapper[4558]: I0120 17:36:17.922590 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"dc94159e-846c-4207-a4e6-fc877335d179","Type":"ContainerStarted","Data":"931802c7078140a8ddc656e86c086b0574dc8ad1ad02d3bfdce7a2e061fb41d4"} Jan 20 17:36:17 crc kubenswrapper[4558]: I0120 17:36:17.924690 4558 generic.go:334] "Generic (PLEG): container finished" podID="b6b873be-e034-4d85-b131-548eff8013ed" containerID="ab9f91ee996ebed5befa70a8ec573471b2011a4fc0f88f09cde407f6a7347062" exitCode=0 Jan 20 17:36:17 crc kubenswrapper[4558]: I0120 17:36:17.924726 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-6865f4c68b-s7dm7" event={"ID":"b6b873be-e034-4d85-b131-548eff8013ed","Type":"ContainerDied","Data":"ab9f91ee996ebed5befa70a8ec573471b2011a4fc0f88f09cde407f6a7347062"} Jan 20 17:36:17 crc kubenswrapper[4558]: I0120 17:36:17.924762 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-6865f4c68b-s7dm7" event={"ID":"b6b873be-e034-4d85-b131-548eff8013ed","Type":"ContainerDied","Data":"3972e8f809e0386263608c53f1401f9b5908912fbbaf03f2a6de873c9c9993fe"} Jan 20 17:36:17 crc kubenswrapper[4558]: I0120 17:36:17.924766 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-6865f4c68b-s7dm7" Jan 20 17:36:17 crc kubenswrapper[4558]: I0120 17:36:17.924785 4558 scope.go:117] "RemoveContainer" containerID="813d6d3e667f27b128b13d9ed3b5e2eb7fbd264b4a64cde9c0a7a2ffc3eabf31" Jan 20 17:36:17 crc kubenswrapper[4558]: I0120 17:36:17.952371 4558 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/b6b873be-e034-4d85-b131-548eff8013ed-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:17 crc kubenswrapper[4558]: I0120 17:36:17.960030 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-6865f4c68b-s7dm7"] Jan 20 17:36:17 crc kubenswrapper[4558]: I0120 17:36:17.963589 4558 scope.go:117] "RemoveContainer" containerID="ab9f91ee996ebed5befa70a8ec573471b2011a4fc0f88f09cde407f6a7347062" Jan 20 17:36:17 crc kubenswrapper[4558]: I0120 17:36:17.969089 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/neutron-6865f4c68b-s7dm7"] Jan 20 17:36:17 crc kubenswrapper[4558]: I0120 17:36:17.983877 4558 scope.go:117] "RemoveContainer" containerID="813d6d3e667f27b128b13d9ed3b5e2eb7fbd264b4a64cde9c0a7a2ffc3eabf31" Jan 20 17:36:17 crc kubenswrapper[4558]: E0120 17:36:17.984261 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"813d6d3e667f27b128b13d9ed3b5e2eb7fbd264b4a64cde9c0a7a2ffc3eabf31\": container with ID starting with 813d6d3e667f27b128b13d9ed3b5e2eb7fbd264b4a64cde9c0a7a2ffc3eabf31 not found: ID does not exist" containerID="813d6d3e667f27b128b13d9ed3b5e2eb7fbd264b4a64cde9c0a7a2ffc3eabf31" Jan 20 17:36:17 crc kubenswrapper[4558]: I0120 17:36:17.984404 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"813d6d3e667f27b128b13d9ed3b5e2eb7fbd264b4a64cde9c0a7a2ffc3eabf31"} err="failed to get container status \"813d6d3e667f27b128b13d9ed3b5e2eb7fbd264b4a64cde9c0a7a2ffc3eabf31\": rpc error: code = NotFound desc = could not find container \"813d6d3e667f27b128b13d9ed3b5e2eb7fbd264b4a64cde9c0a7a2ffc3eabf31\": container with ID starting with 813d6d3e667f27b128b13d9ed3b5e2eb7fbd264b4a64cde9c0a7a2ffc3eabf31 not found: ID does not exist" Jan 20 17:36:17 crc kubenswrapper[4558]: I0120 17:36:17.984538 4558 scope.go:117] "RemoveContainer" containerID="ab9f91ee996ebed5befa70a8ec573471b2011a4fc0f88f09cde407f6a7347062" Jan 20 17:36:17 crc kubenswrapper[4558]: E0120 17:36:17.985035 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ab9f91ee996ebed5befa70a8ec573471b2011a4fc0f88f09cde407f6a7347062\": container with ID starting with ab9f91ee996ebed5befa70a8ec573471b2011a4fc0f88f09cde407f6a7347062 not found: ID does not exist" containerID="ab9f91ee996ebed5befa70a8ec573471b2011a4fc0f88f09cde407f6a7347062" Jan 20 17:36:17 crc kubenswrapper[4558]: I0120 17:36:17.985121 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ab9f91ee996ebed5befa70a8ec573471b2011a4fc0f88f09cde407f6a7347062"} err="failed to get container status \"ab9f91ee996ebed5befa70a8ec573471b2011a4fc0f88f09cde407f6a7347062\": rpc error: code = NotFound desc = could not find container \"ab9f91ee996ebed5befa70a8ec573471b2011a4fc0f88f09cde407f6a7347062\": container with ID starting with ab9f91ee996ebed5befa70a8ec573471b2011a4fc0f88f09cde407f6a7347062 not found: ID does not exist" Jan 20 17:36:18 crc kubenswrapper[4558]: I0120 17:36:18.561569 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/cinder-scheduler-0" podUID="9800a14c-ebf5-4a5d-b384-c133973b55ff" containerName="cinder-scheduler" probeResult="failure" output="Get \"http://10.217.0.149:8080/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 20 17:36:18 crc kubenswrapper[4558]: I0120 17:36:18.585387 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="466e0ec0-043e-49a6-b30d-0272443cb839" path="/var/lib/kubelet/pods/466e0ec0-043e-49a6-b30d-0272443cb839/volumes" Jan 20 17:36:18 crc kubenswrapper[4558]: I0120 17:36:18.586193 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6b873be-e034-4d85-b131-548eff8013ed" path="/var/lib/kubelet/pods/b6b873be-e034-4d85-b131-548eff8013ed/volumes" Jan 20 17:36:18 crc kubenswrapper[4558]: I0120 17:36:18.586829 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e0b4cbdd-cd9f-4ff7-87c5-b24833dc6e0a" path="/var/lib/kubelet/pods/e0b4cbdd-cd9f-4ff7-87c5-b24833dc6e0a/volumes" Jan 20 17:36:18 crc kubenswrapper[4558]: I0120 17:36:18.714790 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:36:18 crc kubenswrapper[4558]: I0120 17:36:18.937369 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"dc94159e-846c-4207-a4e6-fc877335d179","Type":"ContainerStarted","Data":"5bf498b0c5a887fb9070dae794c0855721714041c990374c5706d705de7cdc2d"} Jan 20 17:36:19 crc kubenswrapper[4558]: E0120 17:36:19.251439 4558 fsHandler.go:119] failed to collect filesystem stats - rootDiskErr: , extraDiskErr: could not stat "/var/log/pods/openstack-kuttl-tests_cinder-scheduler-0_4bd47eac-4090-4fc1-91c4-553ebd84964d/probe/0.log" to get inode usage: stat /var/log/pods/openstack-kuttl-tests_cinder-scheduler-0_4bd47eac-4090-4fc1-91c4-553ebd84964d/probe/0.log: no such file or directory Jan 20 17:36:19 crc kubenswrapper[4558]: I0120 17:36:19.566131 4558 scope.go:117] "RemoveContainer" containerID="99dba2fa739dee263fd622caaa8566eb366703c39dd9dcf2a61dbad75ba8d1e5" Jan 20 17:36:19 crc kubenswrapper[4558]: I0120 17:36:19.950518 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-0" event={"ID":"0d2b6a10-ca95-4d72-a80b-1706822a07a7","Type":"ContainerStarted","Data":"29de3130a0b6727cccd3d029a221407989f24f198d0147b0d9d994fbbe61ae64"} Jan 20 17:36:19 crc kubenswrapper[4558]: I0120 17:36:19.950733 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:36:19 crc kubenswrapper[4558]: I0120 17:36:19.952251 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"dc94159e-846c-4207-a4e6-fc877335d179","Type":"ContainerStarted","Data":"83ce987c5b708549cef86a6913ed91475d61f00a374b43c5a0a9d8d6b9e15ad1"} Jan 20 17:36:20 crc kubenswrapper[4558]: I0120 17:36:20.406969 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/neutron-7749bf6d58-dwpbl" Jan 20 17:36:20 crc kubenswrapper[4558]: I0120 17:36:20.565883 4558 scope.go:117] "RemoveContainer" containerID="4777bd55f602e6282b42a942acc808f0478764354d0970617b2e6c1dddd4da85" Jan 20 17:36:20 crc kubenswrapper[4558]: I0120 17:36:20.859539 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/placement-67cbb6f96d-89sm2" Jan 20 17:36:20 crc kubenswrapper[4558]: I0120 17:36:20.860550 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/placement-67cbb6f96d-89sm2" Jan 20 17:36:20 crc kubenswrapper[4558]: I0120 17:36:20.963615 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"dc94159e-846c-4207-a4e6-fc877335d179","Type":"ContainerStarted","Data":"e4b0f89bb58f0ef8658ad1ca88dceb67d6f84e237fdd51276dbf055a76577647"} Jan 20 17:36:20 crc kubenswrapper[4558]: I0120 17:36:20.966571 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-0" event={"ID":"0def1343-9e4b-4f74-84bd-3212688b59ce","Type":"ContainerStarted","Data":"1c79880b61b7b00d3acd244cc4b46331c0b20f7998564a054c816dfa317eab5b"} Jan 20 17:36:20 crc kubenswrapper[4558]: I0120 17:36:20.967509 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:36:21 crc kubenswrapper[4558]: E0120 17:36:21.955695 4558 fsHandler.go:119] failed to collect filesystem stats - rootDiskErr: , extraDiskErr: could not stat "/var/log/pods/openstack-kuttl-tests_nova-cell1-conductor-0_9b9c1cf7-0cbb-4bb3-a764-8a8fa446c8a8/nova-cell1-conductor-conductor/0.log" to get inode usage: stat /var/log/pods/openstack-kuttl-tests_nova-cell1-conductor-0_9b9c1cf7-0cbb-4bb3-a764-8a8fa446c8a8/nova-cell1-conductor-conductor/0.log: no such file or directory Jan 20 17:36:22 crc kubenswrapper[4558]: I0120 17:36:22.616697 4558 pod_container_manager_linux.go:210] "Failed to delete cgroup paths" cgroupName=["kubepods","besteffort","podc9ec2559-f7c5-4318-9a3b-4544d222ff8e"] err="unable to destroy cgroup paths for cgroup [kubepods besteffort podc9ec2559-f7c5-4318-9a3b-4544d222ff8e] : Timed out while waiting for systemd to remove kubepods-besteffort-podc9ec2559_f7c5_4318_9a3b_4544d222ff8e.slice" Jan 20 17:36:22 crc kubenswrapper[4558]: E0120 17:36:22.617038 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to delete cgroup paths for [kubepods besteffort podc9ec2559-f7c5-4318-9a3b-4544d222ff8e] : unable to destroy cgroup paths for cgroup [kubepods besteffort podc9ec2559-f7c5-4318-9a3b-4544d222ff8e] : Timed out while waiting for systemd to remove kubepods-besteffort-podc9ec2559_f7c5_4318_9a3b_4544d222ff8e.slice" pod="openstack-kuttl-tests/glance-default-external-api-0" podUID="c9ec2559-f7c5-4318-9a3b-4544d222ff8e" Jan 20 17:36:22 crc kubenswrapper[4558]: I0120 17:36:22.992018 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:36:22 crc kubenswrapper[4558]: I0120 17:36:22.993356 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"dc94159e-846c-4207-a4e6-fc877335d179","Type":"ContainerStarted","Data":"8d18756d85f771ea1a8e265733bd0f0de8bef3da799fcc5dac7436aa91d96d1b"} Jan 20 17:36:22 crc kubenswrapper[4558]: I0120 17:36:22.993396 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:36:23 crc kubenswrapper[4558]: I0120 17:36:23.001544 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/neutron-6d8f9956fd-j6jg4" Jan 20 17:36:23 crc kubenswrapper[4558]: I0120 17:36:23.052041 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ceilometer-0" podStartSLOduration=3.03642372 podStartE2EDuration="7.052015454s" podCreationTimestamp="2026-01-20 17:36:16 +0000 UTC" firstStartedPulling="2026-01-20 17:36:17.775878241 +0000 UTC m=+3271.536216208" lastFinishedPulling="2026-01-20 17:36:21.791469974 +0000 UTC m=+3275.551807942" observedRunningTime="2026-01-20 17:36:23.040528514 +0000 UTC m=+3276.800866471" watchObservedRunningTime="2026-01-20 17:36:23.052015454 +0000 UTC m=+3276.812353421" Jan 20 17:36:23 crc kubenswrapper[4558]: I0120 17:36:23.059582 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:36:23 crc kubenswrapper[4558]: I0120 17:36:23.069283 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:36:23 crc kubenswrapper[4558]: I0120 17:36:23.100012 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:36:23 crc kubenswrapper[4558]: E0120 17:36:23.100384 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b6b873be-e034-4d85-b131-548eff8013ed" containerName="neutron-httpd" Jan 20 17:36:23 crc kubenswrapper[4558]: I0120 17:36:23.100400 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="b6b873be-e034-4d85-b131-548eff8013ed" containerName="neutron-httpd" Jan 20 17:36:23 crc kubenswrapper[4558]: E0120 17:36:23.100438 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b6b873be-e034-4d85-b131-548eff8013ed" containerName="neutron-api" Jan 20 17:36:23 crc kubenswrapper[4558]: I0120 17:36:23.100443 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="b6b873be-e034-4d85-b131-548eff8013ed" containerName="neutron-api" Jan 20 17:36:23 crc kubenswrapper[4558]: I0120 17:36:23.100586 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="b6b873be-e034-4d85-b131-548eff8013ed" containerName="neutron-httpd" Jan 20 17:36:23 crc kubenswrapper[4558]: I0120 17:36:23.100609 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="b6b873be-e034-4d85-b131-548eff8013ed" containerName="neutron-api" Jan 20 17:36:23 crc kubenswrapper[4558]: I0120 17:36:23.101502 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:36:23 crc kubenswrapper[4558]: I0120 17:36:23.104416 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-glance-default-public-svc" Jan 20 17:36:23 crc kubenswrapper[4558]: I0120 17:36:23.105132 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-default-external-config-data" Jan 20 17:36:23 crc kubenswrapper[4558]: I0120 17:36:23.115469 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:36:23 crc kubenswrapper[4558]: I0120 17:36:23.145715 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-5b4c8486cd-bh7qx"] Jan 20 17:36:23 crc kubenswrapper[4558]: I0120 17:36:23.146223 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/neutron-5b4c8486cd-bh7qx" podUID="89049a89-bdd9-4d74-bdc0-64002d4f4842" containerName="neutron-httpd" containerID="cri-o://ae7a97185cd9fbb38ff879e0bdbf67d1b5340be215aff23c59be1f3116c969cd" gracePeriod=30 Jan 20 17:36:23 crc kubenswrapper[4558]: I0120 17:36:23.146477 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/neutron-5b4c8486cd-bh7qx" podUID="89049a89-bdd9-4d74-bdc0-64002d4f4842" containerName="neutron-api" containerID="cri-o://ea1f141ab42d1923c6ad40bf5bb7a415db52b6566887f2c7238c3eb688cf92ea" gracePeriod=30 Jan 20 17:36:23 crc kubenswrapper[4558]: I0120 17:36:23.184007 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f19d5a0b-6d74-4b46-86a3-9381feb3f158-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"f19d5a0b-6d74-4b46-86a3-9381feb3f158\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:36:23 crc kubenswrapper[4558]: I0120 17:36:23.184042 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f19d5a0b-6d74-4b46-86a3-9381feb3f158-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"f19d5a0b-6d74-4b46-86a3-9381feb3f158\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:36:23 crc kubenswrapper[4558]: I0120 17:36:23.184068 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f19d5a0b-6d74-4b46-86a3-9381feb3f158-scripts\") pod \"glance-default-external-api-0\" (UID: \"f19d5a0b-6d74-4b46-86a3-9381feb3f158\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:36:23 crc kubenswrapper[4558]: I0120 17:36:23.184117 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f19d5a0b-6d74-4b46-86a3-9381feb3f158-config-data\") pod \"glance-default-external-api-0\" (UID: \"f19d5a0b-6d74-4b46-86a3-9381feb3f158\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:36:23 crc kubenswrapper[4558]: I0120 17:36:23.184146 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage16-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage16-crc\") pod \"glance-default-external-api-0\" (UID: \"f19d5a0b-6d74-4b46-86a3-9381feb3f158\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:36:23 crc kubenswrapper[4558]: I0120 17:36:23.184203 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f19d5a0b-6d74-4b46-86a3-9381feb3f158-logs\") pod \"glance-default-external-api-0\" (UID: \"f19d5a0b-6d74-4b46-86a3-9381feb3f158\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:36:23 crc kubenswrapper[4558]: I0120 17:36:23.184246 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f19d5a0b-6d74-4b46-86a3-9381feb3f158-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"f19d5a0b-6d74-4b46-86a3-9381feb3f158\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:36:23 crc kubenswrapper[4558]: I0120 17:36:23.184271 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2f625\" (UniqueName: \"kubernetes.io/projected/f19d5a0b-6d74-4b46-86a3-9381feb3f158-kube-api-access-2f625\") pod \"glance-default-external-api-0\" (UID: \"f19d5a0b-6d74-4b46-86a3-9381feb3f158\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:36:23 crc kubenswrapper[4558]: I0120 17:36:23.285488 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f19d5a0b-6d74-4b46-86a3-9381feb3f158-config-data\") pod \"glance-default-external-api-0\" (UID: \"f19d5a0b-6d74-4b46-86a3-9381feb3f158\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:36:23 crc kubenswrapper[4558]: I0120 17:36:23.285601 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage16-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage16-crc\") pod \"glance-default-external-api-0\" (UID: \"f19d5a0b-6d74-4b46-86a3-9381feb3f158\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:36:23 crc kubenswrapper[4558]: I0120 17:36:23.285637 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f19d5a0b-6d74-4b46-86a3-9381feb3f158-logs\") pod \"glance-default-external-api-0\" (UID: \"f19d5a0b-6d74-4b46-86a3-9381feb3f158\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:36:23 crc kubenswrapper[4558]: I0120 17:36:23.285690 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f19d5a0b-6d74-4b46-86a3-9381feb3f158-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"f19d5a0b-6d74-4b46-86a3-9381feb3f158\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:36:23 crc kubenswrapper[4558]: I0120 17:36:23.285735 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2f625\" (UniqueName: \"kubernetes.io/projected/f19d5a0b-6d74-4b46-86a3-9381feb3f158-kube-api-access-2f625\") pod \"glance-default-external-api-0\" (UID: \"f19d5a0b-6d74-4b46-86a3-9381feb3f158\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:36:23 crc kubenswrapper[4558]: I0120 17:36:23.285804 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f19d5a0b-6d74-4b46-86a3-9381feb3f158-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"f19d5a0b-6d74-4b46-86a3-9381feb3f158\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:36:23 crc kubenswrapper[4558]: I0120 17:36:23.285824 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f19d5a0b-6d74-4b46-86a3-9381feb3f158-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"f19d5a0b-6d74-4b46-86a3-9381feb3f158\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:36:23 crc kubenswrapper[4558]: I0120 17:36:23.285857 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f19d5a0b-6d74-4b46-86a3-9381feb3f158-scripts\") pod \"glance-default-external-api-0\" (UID: \"f19d5a0b-6d74-4b46-86a3-9381feb3f158\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:36:23 crc kubenswrapper[4558]: I0120 17:36:23.286507 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f19d5a0b-6d74-4b46-86a3-9381feb3f158-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"f19d5a0b-6d74-4b46-86a3-9381feb3f158\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:36:23 crc kubenswrapper[4558]: I0120 17:36:23.286535 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage16-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage16-crc\") pod \"glance-default-external-api-0\" (UID: \"f19d5a0b-6d74-4b46-86a3-9381feb3f158\") device mount path \"/mnt/openstack/pv16\"" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:36:23 crc kubenswrapper[4558]: I0120 17:36:23.286734 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f19d5a0b-6d74-4b46-86a3-9381feb3f158-logs\") pod \"glance-default-external-api-0\" (UID: \"f19d5a0b-6d74-4b46-86a3-9381feb3f158\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:36:23 crc kubenswrapper[4558]: I0120 17:36:23.290617 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f19d5a0b-6d74-4b46-86a3-9381feb3f158-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"f19d5a0b-6d74-4b46-86a3-9381feb3f158\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:36:23 crc kubenswrapper[4558]: I0120 17:36:23.291895 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f19d5a0b-6d74-4b46-86a3-9381feb3f158-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"f19d5a0b-6d74-4b46-86a3-9381feb3f158\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:36:23 crc kubenswrapper[4558]: I0120 17:36:23.291979 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f19d5a0b-6d74-4b46-86a3-9381feb3f158-scripts\") pod \"glance-default-external-api-0\" (UID: \"f19d5a0b-6d74-4b46-86a3-9381feb3f158\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:36:23 crc kubenswrapper[4558]: I0120 17:36:23.292669 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f19d5a0b-6d74-4b46-86a3-9381feb3f158-config-data\") pod \"glance-default-external-api-0\" (UID: \"f19d5a0b-6d74-4b46-86a3-9381feb3f158\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:36:23 crc kubenswrapper[4558]: I0120 17:36:23.304232 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2f625\" (UniqueName: \"kubernetes.io/projected/f19d5a0b-6d74-4b46-86a3-9381feb3f158-kube-api-access-2f625\") pod \"glance-default-external-api-0\" (UID: \"f19d5a0b-6d74-4b46-86a3-9381feb3f158\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:36:23 crc kubenswrapper[4558]: I0120 17:36:23.312924 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage16-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage16-crc\") pod \"glance-default-external-api-0\" (UID: \"f19d5a0b-6d74-4b46-86a3-9381feb3f158\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:36:23 crc kubenswrapper[4558]: I0120 17:36:23.422615 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:36:23 crc kubenswrapper[4558]: I0120 17:36:23.864186 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:36:24 crc kubenswrapper[4558]: I0120 17:36:24.006735 4558 generic.go:334] "Generic (PLEG): container finished" podID="89049a89-bdd9-4d74-bdc0-64002d4f4842" containerID="ae7a97185cd9fbb38ff879e0bdbf67d1b5340be215aff23c59be1f3116c969cd" exitCode=0 Jan 20 17:36:24 crc kubenswrapper[4558]: I0120 17:36:24.006812 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-5b4c8486cd-bh7qx" event={"ID":"89049a89-bdd9-4d74-bdc0-64002d4f4842","Type":"ContainerDied","Data":"ae7a97185cd9fbb38ff879e0bdbf67d1b5340be215aff23c59be1f3116c969cd"} Jan 20 17:36:24 crc kubenswrapper[4558]: I0120 17:36:24.008694 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"f19d5a0b-6d74-4b46-86a3-9381feb3f158","Type":"ContainerStarted","Data":"d4e2f679e54a723449be5a222ad844732a4eba5485e616d791e7b62e71774850"} Jan 20 17:36:24 crc kubenswrapper[4558]: I0120 17:36:24.286004 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/keystone-59b8cc448d-b9wnn" Jan 20 17:36:24 crc kubenswrapper[4558]: I0120 17:36:24.365808 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-c7b8d4458-7ddjq"] Jan 20 17:36:24 crc kubenswrapper[4558]: I0120 17:36:24.366047 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/keystone-c7b8d4458-7ddjq" podUID="ff846ca3-9970-4311-915b-e2ad95aa07b8" containerName="keystone-api" containerID="cri-o://82d446802d7e8d621542fbedc2f7511c1da11e01b6b157d8ef90209c4965a1a1" gracePeriod=30 Jan 20 17:36:24 crc kubenswrapper[4558]: I0120 17:36:24.581536 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c9ec2559-f7c5-4318-9a3b-4544d222ff8e" path="/var/lib/kubelet/pods/c9ec2559-f7c5-4318-9a3b-4544d222ff8e/volumes" Jan 20 17:36:24 crc kubenswrapper[4558]: I0120 17:36:24.676288 4558 pod_container_manager_linux.go:210] "Failed to delete cgroup paths" cgroupName=["kubepods","besteffort","pod5a046f3a-bea5-4aea-8081-1c5994ecdbd9"] err="unable to destroy cgroup paths for cgroup [kubepods besteffort pod5a046f3a-bea5-4aea-8081-1c5994ecdbd9] : Timed out while waiting for systemd to remove kubepods-besteffort-pod5a046f3a_bea5_4aea_8081_1c5994ecdbd9.slice" Jan 20 17:36:24 crc kubenswrapper[4558]: E0120 17:36:24.676341 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to delete cgroup paths for [kubepods besteffort pod5a046f3a-bea5-4aea-8081-1c5994ecdbd9] : unable to destroy cgroup paths for cgroup [kubepods besteffort pod5a046f3a-bea5-4aea-8081-1c5994ecdbd9] : Timed out while waiting for systemd to remove kubepods-besteffort-pod5a046f3a_bea5_4aea_8081_1c5994ecdbd9.slice" pod="openstack-kuttl-tests/nova-api-0" podUID="5a046f3a-bea5-4aea-8081-1c5994ecdbd9" Jan 20 17:36:24 crc kubenswrapper[4558]: I0120 17:36:24.689804 4558 pod_container_manager_linux.go:210] "Failed to delete cgroup paths" cgroupName=["kubepods","besteffort","pod099193f1-dcf1-4c0a-beeb-fac5f0824cb8"] err="unable to destroy cgroup paths for cgroup [kubepods besteffort pod099193f1-dcf1-4c0a-beeb-fac5f0824cb8] : Timed out while waiting for systemd to remove kubepods-besteffort-pod099193f1_dcf1_4c0a_beeb_fac5f0824cb8.slice" Jan 20 17:36:24 crc kubenswrapper[4558]: E0120 17:36:24.689860 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to delete cgroup paths for [kubepods besteffort pod099193f1-dcf1-4c0a-beeb-fac5f0824cb8] : unable to destroy cgroup paths for cgroup [kubepods besteffort pod099193f1-dcf1-4c0a-beeb-fac5f0824cb8] : Timed out while waiting for systemd to remove kubepods-besteffort-pod099193f1_dcf1_4c0a_beeb_fac5f0824cb8.slice" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" podUID="099193f1-dcf1-4c0a-beeb-fac5f0824cb8" Jan 20 17:36:24 crc kubenswrapper[4558]: I0120 17:36:24.755771 4558 pod_container_manager_linux.go:210] "Failed to delete cgroup paths" cgroupName=["kubepods","besteffort","pod5391541f-5057-4d20-ae1e-bbc88e6b33a4"] err="unable to destroy cgroup paths for cgroup [kubepods besteffort pod5391541f-5057-4d20-ae1e-bbc88e6b33a4] : Timed out while waiting for systemd to remove kubepods-besteffort-pod5391541f_5057_4d20_ae1e_bbc88e6b33a4.slice" Jan 20 17:36:24 crc kubenswrapper[4558]: E0120 17:36:24.755819 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to delete cgroup paths for [kubepods besteffort pod5391541f-5057-4d20-ae1e-bbc88e6b33a4] : unable to destroy cgroup paths for cgroup [kubepods besteffort pod5391541f-5057-4d20-ae1e-bbc88e6b33a4] : Timed out while waiting for systemd to remove kubepods-besteffort-pod5391541f_5057_4d20_ae1e_bbc88e6b33a4.slice" pod="openstack-kuttl-tests/memcached-0" podUID="5391541f-5057-4d20-ae1e-bbc88e6b33a4" Jan 20 17:36:25 crc kubenswrapper[4558]: I0120 17:36:25.020339 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:36:25 crc kubenswrapper[4558]: I0120 17:36:25.020597 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"f19d5a0b-6d74-4b46-86a3-9381feb3f158","Type":"ContainerStarted","Data":"caebb498b12b9c8930363d5154873da3243854fe3f120dfec66b79198ba1728b"} Jan 20 17:36:25 crc kubenswrapper[4558]: I0120 17:36:25.020642 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"f19d5a0b-6d74-4b46-86a3-9381feb3f158","Type":"ContainerStarted","Data":"32a0b1b4cb9343de4b8cd2f43bcb90496f890496ba1d6d3239983dac00184a6b"} Jan 20 17:36:25 crc kubenswrapper[4558]: I0120 17:36:25.020678 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:36:25 crc kubenswrapper[4558]: I0120 17:36:25.020705 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/memcached-0" Jan 20 17:36:25 crc kubenswrapper[4558]: I0120 17:36:25.039844 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/glance-default-external-api-0" podStartSLOduration=2.03981278 podStartE2EDuration="2.03981278s" podCreationTimestamp="2026-01-20 17:36:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:36:25.036398821 +0000 UTC m=+3278.796736789" watchObservedRunningTime="2026-01-20 17:36:25.03981278 +0000 UTC m=+3278.800150738" Jan 20 17:36:25 crc kubenswrapper[4558]: I0120 17:36:25.084414 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/memcached-0"] Jan 20 17:36:25 crc kubenswrapper[4558]: I0120 17:36:25.122575 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/memcached-0"] Jan 20 17:36:25 crc kubenswrapper[4558]: I0120 17:36:25.136983 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:36:25 crc kubenswrapper[4558]: I0120 17:36:25.143826 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:36:25 crc kubenswrapper[4558]: I0120 17:36:25.150002 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/memcached-0"] Jan 20 17:36:25 crc kubenswrapper[4558]: I0120 17:36:25.151691 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/memcached-0" Jan 20 17:36:25 crc kubenswrapper[4558]: I0120 17:36:25.153743 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"memcached-config-data" Jan 20 17:36:25 crc kubenswrapper[4558]: I0120 17:36:25.153895 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"memcached-memcached-dockercfg-8bzhf" Jan 20 17:36:25 crc kubenswrapper[4558]: I0120 17:36:25.158425 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:36:25 crc kubenswrapper[4558]: I0120 17:36:25.163894 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-memcached-svc" Jan 20 17:36:25 crc kubenswrapper[4558]: I0120 17:36:25.167620 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:36:25 crc kubenswrapper[4558]: I0120 17:36:25.174628 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:36:25 crc kubenswrapper[4558]: I0120 17:36:25.177157 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:36:25 crc kubenswrapper[4558]: I0120 17:36:25.178811 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-internal-svc" Jan 20 17:36:25 crc kubenswrapper[4558]: I0120 17:36:25.179832 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-public-svc" Jan 20 17:36:25 crc kubenswrapper[4558]: I0120 17:36:25.180062 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-api-config-data" Jan 20 17:36:25 crc kubenswrapper[4558]: I0120 17:36:25.181966 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/memcached-0"] Jan 20 17:36:25 crc kubenswrapper[4558]: I0120 17:36:25.191412 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:36:25 crc kubenswrapper[4558]: I0120 17:36:25.193081 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:36:25 crc kubenswrapper[4558]: I0120 17:36:25.194680 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-novncproxy-cell1-public-svc" Jan 20 17:36:25 crc kubenswrapper[4558]: I0120 17:36:25.194910 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-novncproxy-cell1-vencrypt" Jan 20 17:36:25 crc kubenswrapper[4558]: I0120 17:36:25.198706 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell1-novncproxy-config-data" Jan 20 17:36:25 crc kubenswrapper[4558]: I0120 17:36:25.202673 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:36:25 crc kubenswrapper[4558]: I0120 17:36:25.212925 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:36:25 crc kubenswrapper[4558]: I0120 17:36:25.334494 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9gj5l\" (UniqueName: \"kubernetes.io/projected/b18f4fcf-eaae-401f-99ec-7b130ad8a6c1-kube-api-access-9gj5l\") pod \"memcached-0\" (UID: \"b18f4fcf-eaae-401f-99ec-7b130ad8a6c1\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:36:25 crc kubenswrapper[4558]: I0120 17:36:25.334544 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dzwgx\" (UniqueName: \"kubernetes.io/projected/906beedc-3fa2-4d6e-a6e8-485ca2fb1082-kube-api-access-dzwgx\") pod \"nova-api-0\" (UID: \"906beedc-3fa2-4d6e-a6e8-485ca2fb1082\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:36:25 crc kubenswrapper[4558]: I0120 17:36:25.334581 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b18f4fcf-eaae-401f-99ec-7b130ad8a6c1-combined-ca-bundle\") pod \"memcached-0\" (UID: \"b18f4fcf-eaae-401f-99ec-7b130ad8a6c1\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:36:25 crc kubenswrapper[4558]: I0120 17:36:25.334611 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/906beedc-3fa2-4d6e-a6e8-485ca2fb1082-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"906beedc-3fa2-4d6e-a6e8-485ca2fb1082\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:36:25 crc kubenswrapper[4558]: I0120 17:36:25.334631 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mskc9\" (UniqueName: \"kubernetes.io/projected/4fd9bebc-9868-4970-bc1f-d047286980d0-kube-api-access-mskc9\") pod \"nova-cell1-novncproxy-0\" (UID: \"4fd9bebc-9868-4970-bc1f-d047286980d0\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:36:25 crc kubenswrapper[4558]: I0120 17:36:25.334655 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/b18f4fcf-eaae-401f-99ec-7b130ad8a6c1-memcached-tls-certs\") pod \"memcached-0\" (UID: \"b18f4fcf-eaae-401f-99ec-7b130ad8a6c1\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:36:25 crc kubenswrapper[4558]: I0120 17:36:25.334678 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/b18f4fcf-eaae-401f-99ec-7b130ad8a6c1-kolla-config\") pod \"memcached-0\" (UID: \"b18f4fcf-eaae-401f-99ec-7b130ad8a6c1\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:36:25 crc kubenswrapper[4558]: I0120 17:36:25.334712 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4fd9bebc-9868-4970-bc1f-d047286980d0-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"4fd9bebc-9868-4970-bc1f-d047286980d0\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:36:25 crc kubenswrapper[4558]: I0120 17:36:25.334744 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/4fd9bebc-9868-4970-bc1f-d047286980d0-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"4fd9bebc-9868-4970-bc1f-d047286980d0\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:36:25 crc kubenswrapper[4558]: I0120 17:36:25.334765 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/906beedc-3fa2-4d6e-a6e8-485ca2fb1082-internal-tls-certs\") pod \"nova-api-0\" (UID: \"906beedc-3fa2-4d6e-a6e8-485ca2fb1082\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:36:25 crc kubenswrapper[4558]: I0120 17:36:25.334787 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/906beedc-3fa2-4d6e-a6e8-485ca2fb1082-public-tls-certs\") pod \"nova-api-0\" (UID: \"906beedc-3fa2-4d6e-a6e8-485ca2fb1082\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:36:25 crc kubenswrapper[4558]: I0120 17:36:25.334811 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4fd9bebc-9868-4970-bc1f-d047286980d0-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"4fd9bebc-9868-4970-bc1f-d047286980d0\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:36:25 crc kubenswrapper[4558]: I0120 17:36:25.334826 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b18f4fcf-eaae-401f-99ec-7b130ad8a6c1-config-data\") pod \"memcached-0\" (UID: \"b18f4fcf-eaae-401f-99ec-7b130ad8a6c1\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:36:25 crc kubenswrapper[4558]: I0120 17:36:25.334860 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/906beedc-3fa2-4d6e-a6e8-485ca2fb1082-config-data\") pod \"nova-api-0\" (UID: \"906beedc-3fa2-4d6e-a6e8-485ca2fb1082\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:36:25 crc kubenswrapper[4558]: I0120 17:36:25.334883 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/906beedc-3fa2-4d6e-a6e8-485ca2fb1082-logs\") pod \"nova-api-0\" (UID: \"906beedc-3fa2-4d6e-a6e8-485ca2fb1082\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:36:25 crc kubenswrapper[4558]: I0120 17:36:25.334923 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/4fd9bebc-9868-4970-bc1f-d047286980d0-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"4fd9bebc-9868-4970-bc1f-d047286980d0\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:36:25 crc kubenswrapper[4558]: I0120 17:36:25.436195 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/906beedc-3fa2-4d6e-a6e8-485ca2fb1082-logs\") pod \"nova-api-0\" (UID: \"906beedc-3fa2-4d6e-a6e8-485ca2fb1082\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:36:25 crc kubenswrapper[4558]: I0120 17:36:25.436261 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/4fd9bebc-9868-4970-bc1f-d047286980d0-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"4fd9bebc-9868-4970-bc1f-d047286980d0\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:36:25 crc kubenswrapper[4558]: I0120 17:36:25.436320 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9gj5l\" (UniqueName: \"kubernetes.io/projected/b18f4fcf-eaae-401f-99ec-7b130ad8a6c1-kube-api-access-9gj5l\") pod \"memcached-0\" (UID: \"b18f4fcf-eaae-401f-99ec-7b130ad8a6c1\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:36:25 crc kubenswrapper[4558]: I0120 17:36:25.436354 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dzwgx\" (UniqueName: \"kubernetes.io/projected/906beedc-3fa2-4d6e-a6e8-485ca2fb1082-kube-api-access-dzwgx\") pod \"nova-api-0\" (UID: \"906beedc-3fa2-4d6e-a6e8-485ca2fb1082\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:36:25 crc kubenswrapper[4558]: I0120 17:36:25.436386 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b18f4fcf-eaae-401f-99ec-7b130ad8a6c1-combined-ca-bundle\") pod \"memcached-0\" (UID: \"b18f4fcf-eaae-401f-99ec-7b130ad8a6c1\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:36:25 crc kubenswrapper[4558]: I0120 17:36:25.436413 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/906beedc-3fa2-4d6e-a6e8-485ca2fb1082-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"906beedc-3fa2-4d6e-a6e8-485ca2fb1082\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:36:25 crc kubenswrapper[4558]: I0120 17:36:25.436430 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mskc9\" (UniqueName: \"kubernetes.io/projected/4fd9bebc-9868-4970-bc1f-d047286980d0-kube-api-access-mskc9\") pod \"nova-cell1-novncproxy-0\" (UID: \"4fd9bebc-9868-4970-bc1f-d047286980d0\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:36:25 crc kubenswrapper[4558]: I0120 17:36:25.436449 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/b18f4fcf-eaae-401f-99ec-7b130ad8a6c1-memcached-tls-certs\") pod \"memcached-0\" (UID: \"b18f4fcf-eaae-401f-99ec-7b130ad8a6c1\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:36:25 crc kubenswrapper[4558]: I0120 17:36:25.436473 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/b18f4fcf-eaae-401f-99ec-7b130ad8a6c1-kolla-config\") pod \"memcached-0\" (UID: \"b18f4fcf-eaae-401f-99ec-7b130ad8a6c1\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:36:25 crc kubenswrapper[4558]: I0120 17:36:25.436503 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4fd9bebc-9868-4970-bc1f-d047286980d0-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"4fd9bebc-9868-4970-bc1f-d047286980d0\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:36:25 crc kubenswrapper[4558]: I0120 17:36:25.436530 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/4fd9bebc-9868-4970-bc1f-d047286980d0-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"4fd9bebc-9868-4970-bc1f-d047286980d0\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:36:25 crc kubenswrapper[4558]: I0120 17:36:25.436552 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/906beedc-3fa2-4d6e-a6e8-485ca2fb1082-internal-tls-certs\") pod \"nova-api-0\" (UID: \"906beedc-3fa2-4d6e-a6e8-485ca2fb1082\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:36:25 crc kubenswrapper[4558]: I0120 17:36:25.436587 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/906beedc-3fa2-4d6e-a6e8-485ca2fb1082-public-tls-certs\") pod \"nova-api-0\" (UID: \"906beedc-3fa2-4d6e-a6e8-485ca2fb1082\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:36:25 crc kubenswrapper[4558]: I0120 17:36:25.436610 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b18f4fcf-eaae-401f-99ec-7b130ad8a6c1-config-data\") pod \"memcached-0\" (UID: \"b18f4fcf-eaae-401f-99ec-7b130ad8a6c1\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:36:25 crc kubenswrapper[4558]: I0120 17:36:25.436631 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4fd9bebc-9868-4970-bc1f-d047286980d0-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"4fd9bebc-9868-4970-bc1f-d047286980d0\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:36:25 crc kubenswrapper[4558]: I0120 17:36:25.436652 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/906beedc-3fa2-4d6e-a6e8-485ca2fb1082-config-data\") pod \"nova-api-0\" (UID: \"906beedc-3fa2-4d6e-a6e8-485ca2fb1082\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:36:25 crc kubenswrapper[4558]: I0120 17:36:25.437717 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/906beedc-3fa2-4d6e-a6e8-485ca2fb1082-logs\") pod \"nova-api-0\" (UID: \"906beedc-3fa2-4d6e-a6e8-485ca2fb1082\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:36:25 crc kubenswrapper[4558]: I0120 17:36:25.439639 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/b18f4fcf-eaae-401f-99ec-7b130ad8a6c1-kolla-config\") pod \"memcached-0\" (UID: \"b18f4fcf-eaae-401f-99ec-7b130ad8a6c1\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:36:25 crc kubenswrapper[4558]: I0120 17:36:25.440061 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b18f4fcf-eaae-401f-99ec-7b130ad8a6c1-config-data\") pod \"memcached-0\" (UID: \"b18f4fcf-eaae-401f-99ec-7b130ad8a6c1\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:36:25 crc kubenswrapper[4558]: I0120 17:36:25.444549 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/906beedc-3fa2-4d6e-a6e8-485ca2fb1082-internal-tls-certs\") pod \"nova-api-0\" (UID: \"906beedc-3fa2-4d6e-a6e8-485ca2fb1082\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:36:25 crc kubenswrapper[4558]: I0120 17:36:25.445412 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/b18f4fcf-eaae-401f-99ec-7b130ad8a6c1-memcached-tls-certs\") pod \"memcached-0\" (UID: \"b18f4fcf-eaae-401f-99ec-7b130ad8a6c1\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:36:25 crc kubenswrapper[4558]: I0120 17:36:25.445967 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b18f4fcf-eaae-401f-99ec-7b130ad8a6c1-combined-ca-bundle\") pod \"memcached-0\" (UID: \"b18f4fcf-eaae-401f-99ec-7b130ad8a6c1\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:36:25 crc kubenswrapper[4558]: I0120 17:36:25.446026 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/906beedc-3fa2-4d6e-a6e8-485ca2fb1082-config-data\") pod \"nova-api-0\" (UID: \"906beedc-3fa2-4d6e-a6e8-485ca2fb1082\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:36:25 crc kubenswrapper[4558]: I0120 17:36:25.446631 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/906beedc-3fa2-4d6e-a6e8-485ca2fb1082-public-tls-certs\") pod \"nova-api-0\" (UID: \"906beedc-3fa2-4d6e-a6e8-485ca2fb1082\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:36:25 crc kubenswrapper[4558]: I0120 17:36:25.446745 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/4fd9bebc-9868-4970-bc1f-d047286980d0-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"4fd9bebc-9868-4970-bc1f-d047286980d0\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:36:25 crc kubenswrapper[4558]: I0120 17:36:25.447287 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4fd9bebc-9868-4970-bc1f-d047286980d0-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"4fd9bebc-9868-4970-bc1f-d047286980d0\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:36:25 crc kubenswrapper[4558]: I0120 17:36:25.449327 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/906beedc-3fa2-4d6e-a6e8-485ca2fb1082-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"906beedc-3fa2-4d6e-a6e8-485ca2fb1082\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:36:25 crc kubenswrapper[4558]: I0120 17:36:25.454018 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/4fd9bebc-9868-4970-bc1f-d047286980d0-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"4fd9bebc-9868-4970-bc1f-d047286980d0\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:36:25 crc kubenswrapper[4558]: I0120 17:36:25.454521 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mskc9\" (UniqueName: \"kubernetes.io/projected/4fd9bebc-9868-4970-bc1f-d047286980d0-kube-api-access-mskc9\") pod \"nova-cell1-novncproxy-0\" (UID: \"4fd9bebc-9868-4970-bc1f-d047286980d0\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:36:25 crc kubenswrapper[4558]: I0120 17:36:25.459749 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4fd9bebc-9868-4970-bc1f-d047286980d0-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"4fd9bebc-9868-4970-bc1f-d047286980d0\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:36:25 crc kubenswrapper[4558]: I0120 17:36:25.466823 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9gj5l\" (UniqueName: \"kubernetes.io/projected/b18f4fcf-eaae-401f-99ec-7b130ad8a6c1-kube-api-access-9gj5l\") pod \"memcached-0\" (UID: \"b18f4fcf-eaae-401f-99ec-7b130ad8a6c1\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:36:25 crc kubenswrapper[4558]: I0120 17:36:25.468185 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dzwgx\" (UniqueName: \"kubernetes.io/projected/906beedc-3fa2-4d6e-a6e8-485ca2fb1082-kube-api-access-dzwgx\") pod \"nova-api-0\" (UID: \"906beedc-3fa2-4d6e-a6e8-485ca2fb1082\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:36:25 crc kubenswrapper[4558]: I0120 17:36:25.493821 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:36:25 crc kubenswrapper[4558]: I0120 17:36:25.506910 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:36:25 crc kubenswrapper[4558]: I0120 17:36:25.766743 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/memcached-0" Jan 20 17:36:25 crc kubenswrapper[4558]: I0120 17:36:25.884587 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-67cbb6f96d-89sm2" Jan 20 17:36:25 crc kubenswrapper[4558]: I0120 17:36:25.938492 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_neutron-7749bf6d58-dwpbl_9bca6361-b1d7-4953-b4bd-2bbda0f90581/neutron-httpd/0.log" Jan 20 17:36:25 crc kubenswrapper[4558]: I0120 17:36:25.939084 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_neutron-7749bf6d58-dwpbl_9bca6361-b1d7-4953-b4bd-2bbda0f90581/neutron-api/0.log" Jan 20 17:36:25 crc kubenswrapper[4558]: I0120 17:36:25.939185 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-7749bf6d58-dwpbl" Jan 20 17:36:25 crc kubenswrapper[4558]: I0120 17:36:25.950974 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9bca6361-b1d7-4953-b4bd-2bbda0f90581-internal-tls-certs\") pod \"9bca6361-b1d7-4953-b4bd-2bbda0f90581\" (UID: \"9bca6361-b1d7-4953-b4bd-2bbda0f90581\") " Jan 20 17:36:25 crc kubenswrapper[4558]: I0120 17:36:25.951314 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/9bca6361-b1d7-4953-b4bd-2bbda0f90581-httpd-config\") pod \"9bca6361-b1d7-4953-b4bd-2bbda0f90581\" (UID: \"9bca6361-b1d7-4953-b4bd-2bbda0f90581\") " Jan 20 17:36:25 crc kubenswrapper[4558]: I0120 17:36:25.951361 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/9bca6361-b1d7-4953-b4bd-2bbda0f90581-ovndb-tls-certs\") pod \"9bca6361-b1d7-4953-b4bd-2bbda0f90581\" (UID: \"9bca6361-b1d7-4953-b4bd-2bbda0f90581\") " Jan 20 17:36:25 crc kubenswrapper[4558]: I0120 17:36:25.951445 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kdhw5\" (UniqueName: \"kubernetes.io/projected/9bca6361-b1d7-4953-b4bd-2bbda0f90581-kube-api-access-kdhw5\") pod \"9bca6361-b1d7-4953-b4bd-2bbda0f90581\" (UID: \"9bca6361-b1d7-4953-b4bd-2bbda0f90581\") " Jan 20 17:36:25 crc kubenswrapper[4558]: I0120 17:36:25.951484 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcxct\" (UniqueName: \"kubernetes.io/projected/3d958c0e-f259-45a7-9c66-9d2b82c92980-kube-api-access-fcxct\") pod \"3d958c0e-f259-45a7-9c66-9d2b82c92980\" (UID: \"3d958c0e-f259-45a7-9c66-9d2b82c92980\") " Jan 20 17:36:25 crc kubenswrapper[4558]: I0120 17:36:25.951714 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3d958c0e-f259-45a7-9c66-9d2b82c92980-combined-ca-bundle\") pod \"3d958c0e-f259-45a7-9c66-9d2b82c92980\" (UID: \"3d958c0e-f259-45a7-9c66-9d2b82c92980\") " Jan 20 17:36:25 crc kubenswrapper[4558]: I0120 17:36:25.951764 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3d958c0e-f259-45a7-9c66-9d2b82c92980-logs\") pod \"3d958c0e-f259-45a7-9c66-9d2b82c92980\" (UID: \"3d958c0e-f259-45a7-9c66-9d2b82c92980\") " Jan 20 17:36:25 crc kubenswrapper[4558]: I0120 17:36:25.961576 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9bca6361-b1d7-4953-b4bd-2bbda0f90581-combined-ca-bundle\") pod \"9bca6361-b1d7-4953-b4bd-2bbda0f90581\" (UID: \"9bca6361-b1d7-4953-b4bd-2bbda0f90581\") " Jan 20 17:36:25 crc kubenswrapper[4558]: I0120 17:36:25.952202 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3d958c0e-f259-45a7-9c66-9d2b82c92980-logs" (OuterVolumeSpecName: "logs") pod "3d958c0e-f259-45a7-9c66-9d2b82c92980" (UID: "3d958c0e-f259-45a7-9c66-9d2b82c92980"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:36:25 crc kubenswrapper[4558]: I0120 17:36:25.961627 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3d958c0e-f259-45a7-9c66-9d2b82c92980-scripts\") pod \"3d958c0e-f259-45a7-9c66-9d2b82c92980\" (UID: \"3d958c0e-f259-45a7-9c66-9d2b82c92980\") " Jan 20 17:36:25 crc kubenswrapper[4558]: I0120 17:36:25.961775 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3d958c0e-f259-45a7-9c66-9d2b82c92980-public-tls-certs\") pod \"3d958c0e-f259-45a7-9c66-9d2b82c92980\" (UID: \"3d958c0e-f259-45a7-9c66-9d2b82c92980\") " Jan 20 17:36:25 crc kubenswrapper[4558]: I0120 17:36:25.961831 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9bca6361-b1d7-4953-b4bd-2bbda0f90581-public-tls-certs\") pod \"9bca6361-b1d7-4953-b4bd-2bbda0f90581\" (UID: \"9bca6361-b1d7-4953-b4bd-2bbda0f90581\") " Jan 20 17:36:25 crc kubenswrapper[4558]: I0120 17:36:25.961880 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-keystone-listener-7d7dbcb65b-ttg5w" Jan 20 17:36:25 crc kubenswrapper[4558]: I0120 17:36:25.962916 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9bca6361-b1d7-4953-b4bd-2bbda0f90581-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "9bca6361-b1d7-4953-b4bd-2bbda0f90581" (UID: "9bca6361-b1d7-4953-b4bd-2bbda0f90581"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:36:25 crc kubenswrapper[4558]: I0120 17:36:25.961887 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3d958c0e-f259-45a7-9c66-9d2b82c92980-config-data\") pod \"3d958c0e-f259-45a7-9c66-9d2b82c92980\" (UID: \"3d958c0e-f259-45a7-9c66-9d2b82c92980\") " Jan 20 17:36:25 crc kubenswrapper[4558]: I0120 17:36:25.963313 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/9bca6361-b1d7-4953-b4bd-2bbda0f90581-config\") pod \"9bca6361-b1d7-4953-b4bd-2bbda0f90581\" (UID: \"9bca6361-b1d7-4953-b4bd-2bbda0f90581\") " Jan 20 17:36:25 crc kubenswrapper[4558]: I0120 17:36:25.963426 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3d958c0e-f259-45a7-9c66-9d2b82c92980-internal-tls-certs\") pod \"3d958c0e-f259-45a7-9c66-9d2b82c92980\" (UID: \"3d958c0e-f259-45a7-9c66-9d2b82c92980\") " Jan 20 17:36:25 crc kubenswrapper[4558]: I0120 17:36:25.964862 4558 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/9bca6361-b1d7-4953-b4bd-2bbda0f90581-httpd-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:25 crc kubenswrapper[4558]: I0120 17:36:25.964952 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3d958c0e-f259-45a7-9c66-9d2b82c92980-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:25 crc kubenswrapper[4558]: I0120 17:36:25.980707 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3d958c0e-f259-45a7-9c66-9d2b82c92980-kube-api-access-fcxct" (OuterVolumeSpecName: "kube-api-access-fcxct") pod "3d958c0e-f259-45a7-9c66-9d2b82c92980" (UID: "3d958c0e-f259-45a7-9c66-9d2b82c92980"). InnerVolumeSpecName "kube-api-access-fcxct". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:36:25 crc kubenswrapper[4558]: I0120 17:36:25.981753 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9bca6361-b1d7-4953-b4bd-2bbda0f90581-kube-api-access-kdhw5" (OuterVolumeSpecName: "kube-api-access-kdhw5") pod "9bca6361-b1d7-4953-b4bd-2bbda0f90581" (UID: "9bca6361-b1d7-4953-b4bd-2bbda0f90581"). InnerVolumeSpecName "kube-api-access-kdhw5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:36:25 crc kubenswrapper[4558]: I0120 17:36:25.997750 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3d958c0e-f259-45a7-9c66-9d2b82c92980-scripts" (OuterVolumeSpecName: "scripts") pod "3d958c0e-f259-45a7-9c66-9d2b82c92980" (UID: "3d958c0e-f259-45a7-9c66-9d2b82c92980"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:36:26 crc kubenswrapper[4558]: I0120 17:36:26.029789 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_neutron-7749bf6d58-dwpbl_9bca6361-b1d7-4953-b4bd-2bbda0f90581/neutron-httpd/0.log" Jan 20 17:36:26 crc kubenswrapper[4558]: I0120 17:36:26.030243 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_neutron-7749bf6d58-dwpbl_9bca6361-b1d7-4953-b4bd-2bbda0f90581/neutron-api/0.log" Jan 20 17:36:26 crc kubenswrapper[4558]: I0120 17:36:26.030289 4558 generic.go:334] "Generic (PLEG): container finished" podID="9bca6361-b1d7-4953-b4bd-2bbda0f90581" containerID="f6f5a5e0be8e181ac0a6e571683bbf4717d8a0dc273866a4b41c89d9befcf017" exitCode=137 Jan 20 17:36:26 crc kubenswrapper[4558]: I0120 17:36:26.030304 4558 generic.go:334] "Generic (PLEG): container finished" podID="9bca6361-b1d7-4953-b4bd-2bbda0f90581" containerID="1371ec8f64533ee2b1ef24267d239bc9ffc81022e42aaedff8dc1abab24b701a" exitCode=137 Jan 20 17:36:26 crc kubenswrapper[4558]: I0120 17:36:26.030362 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-7749bf6d58-dwpbl" event={"ID":"9bca6361-b1d7-4953-b4bd-2bbda0f90581","Type":"ContainerDied","Data":"f6f5a5e0be8e181ac0a6e571683bbf4717d8a0dc273866a4b41c89d9befcf017"} Jan 20 17:36:26 crc kubenswrapper[4558]: I0120 17:36:26.030392 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-7749bf6d58-dwpbl" event={"ID":"9bca6361-b1d7-4953-b4bd-2bbda0f90581","Type":"ContainerDied","Data":"1371ec8f64533ee2b1ef24267d239bc9ffc81022e42aaedff8dc1abab24b701a"} Jan 20 17:36:26 crc kubenswrapper[4558]: I0120 17:36:26.030420 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-7749bf6d58-dwpbl" event={"ID":"9bca6361-b1d7-4953-b4bd-2bbda0f90581","Type":"ContainerDied","Data":"91f8246f419fbaf90740183bfaf7574ba3d21f60910d06ed9ac2bc540e17f90a"} Jan 20 17:36:26 crc kubenswrapper[4558]: I0120 17:36:26.030437 4558 scope.go:117] "RemoveContainer" containerID="f6f5a5e0be8e181ac0a6e571683bbf4717d8a0dc273866a4b41c89d9befcf017" Jan 20 17:36:26 crc kubenswrapper[4558]: I0120 17:36:26.030667 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-7749bf6d58-dwpbl" Jan 20 17:36:26 crc kubenswrapper[4558]: I0120 17:36:26.032555 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3d958c0e-f259-45a7-9c66-9d2b82c92980-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3d958c0e-f259-45a7-9c66-9d2b82c92980" (UID: "3d958c0e-f259-45a7-9c66-9d2b82c92980"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:36:26 crc kubenswrapper[4558]: I0120 17:36:26.037839 4558 generic.go:334] "Generic (PLEG): container finished" podID="1ab0a776-6551-4ef8-bd41-0ef0e58d892d" containerID="14383d0552e72e8dcba897054fdb5c936fabfa390440e34f12f122f7b98f85e9" exitCode=137 Jan 20 17:36:26 crc kubenswrapper[4558]: I0120 17:36:26.037912 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-keystone-listener-7d7dbcb65b-ttg5w" Jan 20 17:36:26 crc kubenswrapper[4558]: I0120 17:36:26.037932 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-keystone-listener-7d7dbcb65b-ttg5w" event={"ID":"1ab0a776-6551-4ef8-bd41-0ef0e58d892d","Type":"ContainerDied","Data":"14383d0552e72e8dcba897054fdb5c936fabfa390440e34f12f122f7b98f85e9"} Jan 20 17:36:26 crc kubenswrapper[4558]: I0120 17:36:26.037968 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-keystone-listener-7d7dbcb65b-ttg5w" event={"ID":"1ab0a776-6551-4ef8-bd41-0ef0e58d892d","Type":"ContainerDied","Data":"489226669dfe047f0a2a36de4e641f09811f99e9a771910dbd05ac7a99055622"} Jan 20 17:36:26 crc kubenswrapper[4558]: I0120 17:36:26.041554 4558 generic.go:334] "Generic (PLEG): container finished" podID="3d958c0e-f259-45a7-9c66-9d2b82c92980" containerID="f1f72a7cad9a8e9ebdf3bd8dc8027d327089333501c70a43397912c9f5f104c7" exitCode=137 Jan 20 17:36:26 crc kubenswrapper[4558]: I0120 17:36:26.042067 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-67cbb6f96d-89sm2" event={"ID":"3d958c0e-f259-45a7-9c66-9d2b82c92980","Type":"ContainerDied","Data":"f1f72a7cad9a8e9ebdf3bd8dc8027d327089333501c70a43397912c9f5f104c7"} Jan 20 17:36:26 crc kubenswrapper[4558]: I0120 17:36:26.042115 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-67cbb6f96d-89sm2" Jan 20 17:36:26 crc kubenswrapper[4558]: I0120 17:36:26.042137 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-67cbb6f96d-89sm2" event={"ID":"3d958c0e-f259-45a7-9c66-9d2b82c92980","Type":"ContainerDied","Data":"ee30ee6d1f48b7280d7892c450e3c16ef850ba6629614ab41753839c5eba2213"} Jan 20 17:36:26 crc kubenswrapper[4558]: I0120 17:36:26.066385 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1ab0a776-6551-4ef8-bd41-0ef0e58d892d-config-data-custom\") pod \"1ab0a776-6551-4ef8-bd41-0ef0e58d892d\" (UID: \"1ab0a776-6551-4ef8-bd41-0ef0e58d892d\") " Jan 20 17:36:26 crc kubenswrapper[4558]: I0120 17:36:26.066507 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1ab0a776-6551-4ef8-bd41-0ef0e58d892d-logs\") pod \"1ab0a776-6551-4ef8-bd41-0ef0e58d892d\" (UID: \"1ab0a776-6551-4ef8-bd41-0ef0e58d892d\") " Jan 20 17:36:26 crc kubenswrapper[4558]: I0120 17:36:26.066568 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1ab0a776-6551-4ef8-bd41-0ef0e58d892d-config-data\") pod \"1ab0a776-6551-4ef8-bd41-0ef0e58d892d\" (UID: \"1ab0a776-6551-4ef8-bd41-0ef0e58d892d\") " Jan 20 17:36:26 crc kubenswrapper[4558]: I0120 17:36:26.066608 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1ab0a776-6551-4ef8-bd41-0ef0e58d892d-combined-ca-bundle\") pod \"1ab0a776-6551-4ef8-bd41-0ef0e58d892d\" (UID: \"1ab0a776-6551-4ef8-bd41-0ef0e58d892d\") " Jan 20 17:36:26 crc kubenswrapper[4558]: I0120 17:36:26.066662 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v44jg\" (UniqueName: \"kubernetes.io/projected/1ab0a776-6551-4ef8-bd41-0ef0e58d892d-kube-api-access-v44jg\") pod \"1ab0a776-6551-4ef8-bd41-0ef0e58d892d\" (UID: \"1ab0a776-6551-4ef8-bd41-0ef0e58d892d\") " Jan 20 17:36:26 crc kubenswrapper[4558]: I0120 17:36:26.067470 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kdhw5\" (UniqueName: \"kubernetes.io/projected/9bca6361-b1d7-4953-b4bd-2bbda0f90581-kube-api-access-kdhw5\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:26 crc kubenswrapper[4558]: I0120 17:36:26.067484 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcxct\" (UniqueName: \"kubernetes.io/projected/3d958c0e-f259-45a7-9c66-9d2b82c92980-kube-api-access-fcxct\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:26 crc kubenswrapper[4558]: I0120 17:36:26.067494 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3d958c0e-f259-45a7-9c66-9d2b82c92980-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:26 crc kubenswrapper[4558]: I0120 17:36:26.067505 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3d958c0e-f259-45a7-9c66-9d2b82c92980-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:26 crc kubenswrapper[4558]: I0120 17:36:26.068116 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9bca6361-b1d7-4953-b4bd-2bbda0f90581-config" (OuterVolumeSpecName: "config") pod "9bca6361-b1d7-4953-b4bd-2bbda0f90581" (UID: "9bca6361-b1d7-4953-b4bd-2bbda0f90581"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:36:26 crc kubenswrapper[4558]: I0120 17:36:26.069883 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1ab0a776-6551-4ef8-bd41-0ef0e58d892d-logs" (OuterVolumeSpecName: "logs") pod "1ab0a776-6551-4ef8-bd41-0ef0e58d892d" (UID: "1ab0a776-6551-4ef8-bd41-0ef0e58d892d"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:36:26 crc kubenswrapper[4558]: I0120 17:36:26.080285 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9bca6361-b1d7-4953-b4bd-2bbda0f90581-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "9bca6361-b1d7-4953-b4bd-2bbda0f90581" (UID: "9bca6361-b1d7-4953-b4bd-2bbda0f90581"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:36:26 crc kubenswrapper[4558]: I0120 17:36:26.080926 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1ab0a776-6551-4ef8-bd41-0ef0e58d892d-kube-api-access-v44jg" (OuterVolumeSpecName: "kube-api-access-v44jg") pod "1ab0a776-6551-4ef8-bd41-0ef0e58d892d" (UID: "1ab0a776-6551-4ef8-bd41-0ef0e58d892d"). InnerVolumeSpecName "kube-api-access-v44jg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:36:26 crc kubenswrapper[4558]: I0120 17:36:26.082583 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9bca6361-b1d7-4953-b4bd-2bbda0f90581-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9bca6361-b1d7-4953-b4bd-2bbda0f90581" (UID: "9bca6361-b1d7-4953-b4bd-2bbda0f90581"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:36:26 crc kubenswrapper[4558]: I0120 17:36:26.082774 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9bca6361-b1d7-4953-b4bd-2bbda0f90581-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "9bca6361-b1d7-4953-b4bd-2bbda0f90581" (UID: "9bca6361-b1d7-4953-b4bd-2bbda0f90581"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:36:26 crc kubenswrapper[4558]: I0120 17:36:26.084791 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1ab0a776-6551-4ef8-bd41-0ef0e58d892d-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "1ab0a776-6551-4ef8-bd41-0ef0e58d892d" (UID: "1ab0a776-6551-4ef8-bd41-0ef0e58d892d"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:36:26 crc kubenswrapper[4558]: I0120 17:36:26.089545 4558 scope.go:117] "RemoveContainer" containerID="1371ec8f64533ee2b1ef24267d239bc9ffc81022e42aaedff8dc1abab24b701a" Jan 20 17:36:26 crc kubenswrapper[4558]: I0120 17:36:26.090181 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:36:26 crc kubenswrapper[4558]: I0120 17:36:26.095197 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3d958c0e-f259-45a7-9c66-9d2b82c92980-config-data" (OuterVolumeSpecName: "config-data") pod "3d958c0e-f259-45a7-9c66-9d2b82c92980" (UID: "3d958c0e-f259-45a7-9c66-9d2b82c92980"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:36:26 crc kubenswrapper[4558]: I0120 17:36:26.102954 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9bca6361-b1d7-4953-b4bd-2bbda0f90581-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "9bca6361-b1d7-4953-b4bd-2bbda0f90581" (UID: "9bca6361-b1d7-4953-b4bd-2bbda0f90581"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:36:26 crc kubenswrapper[4558]: I0120 17:36:26.108516 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:36:26 crc kubenswrapper[4558]: I0120 17:36:26.110696 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1ab0a776-6551-4ef8-bd41-0ef0e58d892d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1ab0a776-6551-4ef8-bd41-0ef0e58d892d" (UID: "1ab0a776-6551-4ef8-bd41-0ef0e58d892d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:36:26 crc kubenswrapper[4558]: I0120 17:36:26.111961 4558 scope.go:117] "RemoveContainer" containerID="f6f5a5e0be8e181ac0a6e571683bbf4717d8a0dc273866a4b41c89d9befcf017" Jan 20 17:36:26 crc kubenswrapper[4558]: E0120 17:36:26.112405 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f6f5a5e0be8e181ac0a6e571683bbf4717d8a0dc273866a4b41c89d9befcf017\": container with ID starting with f6f5a5e0be8e181ac0a6e571683bbf4717d8a0dc273866a4b41c89d9befcf017 not found: ID does not exist" containerID="f6f5a5e0be8e181ac0a6e571683bbf4717d8a0dc273866a4b41c89d9befcf017" Jan 20 17:36:26 crc kubenswrapper[4558]: I0120 17:36:26.112454 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f6f5a5e0be8e181ac0a6e571683bbf4717d8a0dc273866a4b41c89d9befcf017"} err="failed to get container status \"f6f5a5e0be8e181ac0a6e571683bbf4717d8a0dc273866a4b41c89d9befcf017\": rpc error: code = NotFound desc = could not find container \"f6f5a5e0be8e181ac0a6e571683bbf4717d8a0dc273866a4b41c89d9befcf017\": container with ID starting with f6f5a5e0be8e181ac0a6e571683bbf4717d8a0dc273866a4b41c89d9befcf017 not found: ID does not exist" Jan 20 17:36:26 crc kubenswrapper[4558]: I0120 17:36:26.112485 4558 scope.go:117] "RemoveContainer" containerID="1371ec8f64533ee2b1ef24267d239bc9ffc81022e42aaedff8dc1abab24b701a" Jan 20 17:36:26 crc kubenswrapper[4558]: E0120 17:36:26.112838 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1371ec8f64533ee2b1ef24267d239bc9ffc81022e42aaedff8dc1abab24b701a\": container with ID starting with 1371ec8f64533ee2b1ef24267d239bc9ffc81022e42aaedff8dc1abab24b701a not found: ID does not exist" containerID="1371ec8f64533ee2b1ef24267d239bc9ffc81022e42aaedff8dc1abab24b701a" Jan 20 17:36:26 crc kubenswrapper[4558]: I0120 17:36:26.112881 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1371ec8f64533ee2b1ef24267d239bc9ffc81022e42aaedff8dc1abab24b701a"} err="failed to get container status \"1371ec8f64533ee2b1ef24267d239bc9ffc81022e42aaedff8dc1abab24b701a\": rpc error: code = NotFound desc = could not find container \"1371ec8f64533ee2b1ef24267d239bc9ffc81022e42aaedff8dc1abab24b701a\": container with ID starting with 1371ec8f64533ee2b1ef24267d239bc9ffc81022e42aaedff8dc1abab24b701a not found: ID does not exist" Jan 20 17:36:26 crc kubenswrapper[4558]: I0120 17:36:26.112907 4558 scope.go:117] "RemoveContainer" containerID="f6f5a5e0be8e181ac0a6e571683bbf4717d8a0dc273866a4b41c89d9befcf017" Jan 20 17:36:26 crc kubenswrapper[4558]: I0120 17:36:26.115237 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f6f5a5e0be8e181ac0a6e571683bbf4717d8a0dc273866a4b41c89d9befcf017"} err="failed to get container status \"f6f5a5e0be8e181ac0a6e571683bbf4717d8a0dc273866a4b41c89d9befcf017\": rpc error: code = NotFound desc = could not find container \"f6f5a5e0be8e181ac0a6e571683bbf4717d8a0dc273866a4b41c89d9befcf017\": container with ID starting with f6f5a5e0be8e181ac0a6e571683bbf4717d8a0dc273866a4b41c89d9befcf017 not found: ID does not exist" Jan 20 17:36:26 crc kubenswrapper[4558]: I0120 17:36:26.115264 4558 scope.go:117] "RemoveContainer" containerID="1371ec8f64533ee2b1ef24267d239bc9ffc81022e42aaedff8dc1abab24b701a" Jan 20 17:36:26 crc kubenswrapper[4558]: I0120 17:36:26.115524 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1371ec8f64533ee2b1ef24267d239bc9ffc81022e42aaedff8dc1abab24b701a"} err="failed to get container status \"1371ec8f64533ee2b1ef24267d239bc9ffc81022e42aaedff8dc1abab24b701a\": rpc error: code = NotFound desc = could not find container \"1371ec8f64533ee2b1ef24267d239bc9ffc81022e42aaedff8dc1abab24b701a\": container with ID starting with 1371ec8f64533ee2b1ef24267d239bc9ffc81022e42aaedff8dc1abab24b701a not found: ID does not exist" Jan 20 17:36:26 crc kubenswrapper[4558]: I0120 17:36:26.115555 4558 scope.go:117] "RemoveContainer" containerID="14383d0552e72e8dcba897054fdb5c936fabfa390440e34f12f122f7b98f85e9" Jan 20 17:36:26 crc kubenswrapper[4558]: I0120 17:36:26.136395 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3d958c0e-f259-45a7-9c66-9d2b82c92980-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "3d958c0e-f259-45a7-9c66-9d2b82c92980" (UID: "3d958c0e-f259-45a7-9c66-9d2b82c92980"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:36:26 crc kubenswrapper[4558]: I0120 17:36:26.136919 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1ab0a776-6551-4ef8-bd41-0ef0e58d892d-config-data" (OuterVolumeSpecName: "config-data") pod "1ab0a776-6551-4ef8-bd41-0ef0e58d892d" (UID: "1ab0a776-6551-4ef8-bd41-0ef0e58d892d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:36:26 crc kubenswrapper[4558]: I0120 17:36:26.138746 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3d958c0e-f259-45a7-9c66-9d2b82c92980-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "3d958c0e-f259-45a7-9c66-9d2b82c92980" (UID: "3d958c0e-f259-45a7-9c66-9d2b82c92980"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:36:26 crc kubenswrapper[4558]: I0120 17:36:26.139876 4558 scope.go:117] "RemoveContainer" containerID="5ceee282e729d804923fe9fb975a64436f6260d190a2814c82d6d5c4adb2113b" Jan 20 17:36:26 crc kubenswrapper[4558]: I0120 17:36:26.159669 4558 scope.go:117] "RemoveContainer" containerID="14383d0552e72e8dcba897054fdb5c936fabfa390440e34f12f122f7b98f85e9" Jan 20 17:36:26 crc kubenswrapper[4558]: E0120 17:36:26.160001 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"14383d0552e72e8dcba897054fdb5c936fabfa390440e34f12f122f7b98f85e9\": container with ID starting with 14383d0552e72e8dcba897054fdb5c936fabfa390440e34f12f122f7b98f85e9 not found: ID does not exist" containerID="14383d0552e72e8dcba897054fdb5c936fabfa390440e34f12f122f7b98f85e9" Jan 20 17:36:26 crc kubenswrapper[4558]: I0120 17:36:26.160034 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"14383d0552e72e8dcba897054fdb5c936fabfa390440e34f12f122f7b98f85e9"} err="failed to get container status \"14383d0552e72e8dcba897054fdb5c936fabfa390440e34f12f122f7b98f85e9\": rpc error: code = NotFound desc = could not find container \"14383d0552e72e8dcba897054fdb5c936fabfa390440e34f12f122f7b98f85e9\": container with ID starting with 14383d0552e72e8dcba897054fdb5c936fabfa390440e34f12f122f7b98f85e9 not found: ID does not exist" Jan 20 17:36:26 crc kubenswrapper[4558]: I0120 17:36:26.160059 4558 scope.go:117] "RemoveContainer" containerID="5ceee282e729d804923fe9fb975a64436f6260d190a2814c82d6d5c4adb2113b" Jan 20 17:36:26 crc kubenswrapper[4558]: E0120 17:36:26.160312 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5ceee282e729d804923fe9fb975a64436f6260d190a2814c82d6d5c4adb2113b\": container with ID starting with 5ceee282e729d804923fe9fb975a64436f6260d190a2814c82d6d5c4adb2113b not found: ID does not exist" containerID="5ceee282e729d804923fe9fb975a64436f6260d190a2814c82d6d5c4adb2113b" Jan 20 17:36:26 crc kubenswrapper[4558]: I0120 17:36:26.160336 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5ceee282e729d804923fe9fb975a64436f6260d190a2814c82d6d5c4adb2113b"} err="failed to get container status \"5ceee282e729d804923fe9fb975a64436f6260d190a2814c82d6d5c4adb2113b\": rpc error: code = NotFound desc = could not find container \"5ceee282e729d804923fe9fb975a64436f6260d190a2814c82d6d5c4adb2113b\": container with ID starting with 5ceee282e729d804923fe9fb975a64436f6260d190a2814c82d6d5c4adb2113b not found: ID does not exist" Jan 20 17:36:26 crc kubenswrapper[4558]: I0120 17:36:26.160351 4558 scope.go:117] "RemoveContainer" containerID="f1f72a7cad9a8e9ebdf3bd8dc8027d327089333501c70a43397912c9f5f104c7" Jan 20 17:36:26 crc kubenswrapper[4558]: I0120 17:36:26.170724 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3d958c0e-f259-45a7-9c66-9d2b82c92980-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:26 crc kubenswrapper[4558]: I0120 17:36:26.170753 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9bca6361-b1d7-4953-b4bd-2bbda0f90581-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:26 crc kubenswrapper[4558]: I0120 17:36:26.170766 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3d958c0e-f259-45a7-9c66-9d2b82c92980-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:26 crc kubenswrapper[4558]: I0120 17:36:26.170777 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/9bca6361-b1d7-4953-b4bd-2bbda0f90581-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:26 crc kubenswrapper[4558]: I0120 17:36:26.170790 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3d958c0e-f259-45a7-9c66-9d2b82c92980-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:26 crc kubenswrapper[4558]: I0120 17:36:26.170801 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1ab0a776-6551-4ef8-bd41-0ef0e58d892d-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:26 crc kubenswrapper[4558]: I0120 17:36:26.170812 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9bca6361-b1d7-4953-b4bd-2bbda0f90581-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:26 crc kubenswrapper[4558]: I0120 17:36:26.170824 4558 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/9bca6361-b1d7-4953-b4bd-2bbda0f90581-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:26 crc kubenswrapper[4558]: I0120 17:36:26.170832 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1ab0a776-6551-4ef8-bd41-0ef0e58d892d-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:26 crc kubenswrapper[4558]: I0120 17:36:26.170841 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1ab0a776-6551-4ef8-bd41-0ef0e58d892d-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:26 crc kubenswrapper[4558]: I0120 17:36:26.170859 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1ab0a776-6551-4ef8-bd41-0ef0e58d892d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:26 crc kubenswrapper[4558]: I0120 17:36:26.170870 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v44jg\" (UniqueName: \"kubernetes.io/projected/1ab0a776-6551-4ef8-bd41-0ef0e58d892d-kube-api-access-v44jg\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:26 crc kubenswrapper[4558]: I0120 17:36:26.170881 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9bca6361-b1d7-4953-b4bd-2bbda0f90581-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:26 crc kubenswrapper[4558]: I0120 17:36:26.207785 4558 scope.go:117] "RemoveContainer" containerID="2f3a880c1014f060f55cc93ec2f22fdc0bc281cbb530d9eeb1a0031f82c5f171" Jan 20 17:36:26 crc kubenswrapper[4558]: I0120 17:36:26.233030 4558 scope.go:117] "RemoveContainer" containerID="f1f72a7cad9a8e9ebdf3bd8dc8027d327089333501c70a43397912c9f5f104c7" Jan 20 17:36:26 crc kubenswrapper[4558]: E0120 17:36:26.235555 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f1f72a7cad9a8e9ebdf3bd8dc8027d327089333501c70a43397912c9f5f104c7\": container with ID starting with f1f72a7cad9a8e9ebdf3bd8dc8027d327089333501c70a43397912c9f5f104c7 not found: ID does not exist" containerID="f1f72a7cad9a8e9ebdf3bd8dc8027d327089333501c70a43397912c9f5f104c7" Jan 20 17:36:26 crc kubenswrapper[4558]: I0120 17:36:26.235584 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f1f72a7cad9a8e9ebdf3bd8dc8027d327089333501c70a43397912c9f5f104c7"} err="failed to get container status \"f1f72a7cad9a8e9ebdf3bd8dc8027d327089333501c70a43397912c9f5f104c7\": rpc error: code = NotFound desc = could not find container \"f1f72a7cad9a8e9ebdf3bd8dc8027d327089333501c70a43397912c9f5f104c7\": container with ID starting with f1f72a7cad9a8e9ebdf3bd8dc8027d327089333501c70a43397912c9f5f104c7 not found: ID does not exist" Jan 20 17:36:26 crc kubenswrapper[4558]: I0120 17:36:26.235602 4558 scope.go:117] "RemoveContainer" containerID="2f3a880c1014f060f55cc93ec2f22fdc0bc281cbb530d9eeb1a0031f82c5f171" Jan 20 17:36:26 crc kubenswrapper[4558]: E0120 17:36:26.235962 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2f3a880c1014f060f55cc93ec2f22fdc0bc281cbb530d9eeb1a0031f82c5f171\": container with ID starting with 2f3a880c1014f060f55cc93ec2f22fdc0bc281cbb530d9eeb1a0031f82c5f171 not found: ID does not exist" containerID="2f3a880c1014f060f55cc93ec2f22fdc0bc281cbb530d9eeb1a0031f82c5f171" Jan 20 17:36:26 crc kubenswrapper[4558]: I0120 17:36:26.235998 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2f3a880c1014f060f55cc93ec2f22fdc0bc281cbb530d9eeb1a0031f82c5f171"} err="failed to get container status \"2f3a880c1014f060f55cc93ec2f22fdc0bc281cbb530d9eeb1a0031f82c5f171\": rpc error: code = NotFound desc = could not find container \"2f3a880c1014f060f55cc93ec2f22fdc0bc281cbb530d9eeb1a0031f82c5f171\": container with ID starting with 2f3a880c1014f060f55cc93ec2f22fdc0bc281cbb530d9eeb1a0031f82c5f171 not found: ID does not exist" Jan 20 17:36:26 crc kubenswrapper[4558]: I0120 17:36:26.266727 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/memcached-0"] Jan 20 17:36:26 crc kubenswrapper[4558]: I0120 17:36:26.381618 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-7749bf6d58-dwpbl"] Jan 20 17:36:26 crc kubenswrapper[4558]: I0120 17:36:26.395825 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/neutron-7749bf6d58-dwpbl"] Jan 20 17:36:26 crc kubenswrapper[4558]: I0120 17:36:26.410345 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-keystone-listener-7d7dbcb65b-ttg5w"] Jan 20 17:36:26 crc kubenswrapper[4558]: I0120 17:36:26.416802 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/barbican-keystone-listener-7d7dbcb65b-ttg5w"] Jan 20 17:36:26 crc kubenswrapper[4558]: I0120 17:36:26.422650 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/placement-67cbb6f96d-89sm2"] Jan 20 17:36:26 crc kubenswrapper[4558]: I0120 17:36:26.445763 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/placement-67cbb6f96d-89sm2"] Jan 20 17:36:26 crc kubenswrapper[4558]: I0120 17:36:26.579612 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="099193f1-dcf1-4c0a-beeb-fac5f0824cb8" path="/var/lib/kubelet/pods/099193f1-dcf1-4c0a-beeb-fac5f0824cb8/volumes" Jan 20 17:36:26 crc kubenswrapper[4558]: I0120 17:36:26.580273 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1ab0a776-6551-4ef8-bd41-0ef0e58d892d" path="/var/lib/kubelet/pods/1ab0a776-6551-4ef8-bd41-0ef0e58d892d/volumes" Jan 20 17:36:26 crc kubenswrapper[4558]: I0120 17:36:26.580894 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3d958c0e-f259-45a7-9c66-9d2b82c92980" path="/var/lib/kubelet/pods/3d958c0e-f259-45a7-9c66-9d2b82c92980/volumes" Jan 20 17:36:26 crc kubenswrapper[4558]: I0120 17:36:26.582023 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5391541f-5057-4d20-ae1e-bbc88e6b33a4" path="/var/lib/kubelet/pods/5391541f-5057-4d20-ae1e-bbc88e6b33a4/volumes" Jan 20 17:36:26 crc kubenswrapper[4558]: I0120 17:36:26.582573 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5a046f3a-bea5-4aea-8081-1c5994ecdbd9" path="/var/lib/kubelet/pods/5a046f3a-bea5-4aea-8081-1c5994ecdbd9/volumes" Jan 20 17:36:26 crc kubenswrapper[4558]: I0120 17:36:26.583127 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9bca6361-b1d7-4953-b4bd-2bbda0f90581" path="/var/lib/kubelet/pods/9bca6361-b1d7-4953-b4bd-2bbda0f90581/volumes" Jan 20 17:36:26 crc kubenswrapper[4558]: W0120 17:36:26.632443 4558 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1ab0a776_6551_4ef8_bd41_0ef0e58d892d.slice/crio-conmon-5ceee282e729d804923fe9fb975a64436f6260d190a2814c82d6d5c4adb2113b.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1ab0a776_6551_4ef8_bd41_0ef0e58d892d.slice/crio-conmon-5ceee282e729d804923fe9fb975a64436f6260d190a2814c82d6d5c4adb2113b.scope: no such file or directory Jan 20 17:36:26 crc kubenswrapper[4558]: W0120 17:36:26.632801 4558 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4bb0e311_8438_4794_929d_791f0285afaf.slice/crio-conmon-999b8d65f2a7e8adcf754626b17414547aff5caec391cc7f682d54acf1ccdb23.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4bb0e311_8438_4794_929d_791f0285afaf.slice/crio-conmon-999b8d65f2a7e8adcf754626b17414547aff5caec391cc7f682d54acf1ccdb23.scope: no such file or directory Jan 20 17:36:26 crc kubenswrapper[4558]: W0120 17:36:26.635583 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9bca6361_b1d7_4953_b4bd_2bbda0f90581.slice/crio-1371ec8f64533ee2b1ef24267d239bc9ffc81022e42aaedff8dc1abab24b701a.scope WatchSource:0}: Error finding container 1371ec8f64533ee2b1ef24267d239bc9ffc81022e42aaedff8dc1abab24b701a: Status 404 returned error can't find the container with id 1371ec8f64533ee2b1ef24267d239bc9ffc81022e42aaedff8dc1abab24b701a Jan 20 17:36:26 crc kubenswrapper[4558]: W0120 17:36:26.641280 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9bca6361_b1d7_4953_b4bd_2bbda0f90581.slice/crio-f6f5a5e0be8e181ac0a6e571683bbf4717d8a0dc273866a4b41c89d9befcf017.scope WatchSource:0}: Error finding container f6f5a5e0be8e181ac0a6e571683bbf4717d8a0dc273866a4b41c89d9befcf017: Status 404 returned error can't find the container with id f6f5a5e0be8e181ac0a6e571683bbf4717d8a0dc273866a4b41c89d9befcf017 Jan 20 17:36:26 crc kubenswrapper[4558]: W0120 17:36:26.662351 4558 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1ab0a776_6551_4ef8_bd41_0ef0e58d892d.slice/crio-5ceee282e729d804923fe9fb975a64436f6260d190a2814c82d6d5c4adb2113b.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1ab0a776_6551_4ef8_bd41_0ef0e58d892d.slice/crio-5ceee282e729d804923fe9fb975a64436f6260d190a2814c82d6d5c4adb2113b.scope: no such file or directory Jan 20 17:36:26 crc kubenswrapper[4558]: W0120 17:36:26.662628 4558 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4bb0e311_8438_4794_929d_791f0285afaf.slice/crio-999b8d65f2a7e8adcf754626b17414547aff5caec391cc7f682d54acf1ccdb23.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4bb0e311_8438_4794_929d_791f0285afaf.slice/crio-999b8d65f2a7e8adcf754626b17414547aff5caec391cc7f682d54acf1ccdb23.scope: no such file or directory Jan 20 17:36:26 crc kubenswrapper[4558]: W0120 17:36:26.710034 4558 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1ab0a776_6551_4ef8_bd41_0ef0e58d892d.slice/crio-conmon-14383d0552e72e8dcba897054fdb5c936fabfa390440e34f12f122f7b98f85e9.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1ab0a776_6551_4ef8_bd41_0ef0e58d892d.slice/crio-conmon-14383d0552e72e8dcba897054fdb5c936fabfa390440e34f12f122f7b98f85e9.scope: no such file or directory Jan 20 17:36:26 crc kubenswrapper[4558]: W0120 17:36:26.710224 4558 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4bb0e311_8438_4794_929d_791f0285afaf.slice/crio-conmon-58b48bcfefd9b4cc3d847e36e04114fde6f3e780dae5153d04a315662e85320c.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4bb0e311_8438_4794_929d_791f0285afaf.slice/crio-conmon-58b48bcfefd9b4cc3d847e36e04114fde6f3e780dae5153d04a315662e85320c.scope: no such file or directory Jan 20 17:36:26 crc kubenswrapper[4558]: W0120 17:36:26.712357 4558 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1ab0a776_6551_4ef8_bd41_0ef0e58d892d.slice/crio-14383d0552e72e8dcba897054fdb5c936fabfa390440e34f12f122f7b98f85e9.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1ab0a776_6551_4ef8_bd41_0ef0e58d892d.slice/crio-14383d0552e72e8dcba897054fdb5c936fabfa390440e34f12f122f7b98f85e9.scope: no such file or directory Jan 20 17:36:26 crc kubenswrapper[4558]: W0120 17:36:26.714533 4558 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4bb0e311_8438_4794_929d_791f0285afaf.slice/crio-58b48bcfefd9b4cc3d847e36e04114fde6f3e780dae5153d04a315662e85320c.scope": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4bb0e311_8438_4794_929d_791f0285afaf.slice/crio-58b48bcfefd9b4cc3d847e36e04114fde6f3e780dae5153d04a315662e85320c.scope: no such file or directory Jan 20 17:36:26 crc kubenswrapper[4558]: W0120 17:36:26.733431 4558 watcher.go:93] Error while processing event ("/sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod466e0ec0_043e_49a6_b30d_0272443cb839.slice": 0x40000100 == IN_CREATE|IN_ISDIR): inotify_add_watch /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod466e0ec0_043e_49a6_b30d_0272443cb839.slice: no such file or directory Jan 20 17:36:26 crc kubenswrapper[4558]: E0120 17:36:26.803059 4558 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbbb20173_fee8_4323_afbb_f2bbbec3e978.slice/crio-conmon-ea6d1ca32a116c466cabb20cadcc9e9af27e7b82039308f7f1df1e6178fa2a36.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5391541f_5057_4d20_ae1e_bbc88e6b33a4.slice/crio-conmon-4f3f925041f116a55af0c01739c0b06976097986bc12562c41828bd96d52c5f4.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7de6386e_7693_430d_9753_2d7fa8c31b5d.slice/crio-conmon-af363f8fa728755ec981997979783ddd99710b030181fea30ee5db9adb704c8e.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod91fac67e_69a9_4d88_9136_0d2484ca0dce.slice/crio-conmon-61659b3cdbe7237e6c49c005bd8d5ec58e23a181bee16a952d93828bb6852edf.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode8a8d7c2_9fc7_48d9_bd28_2f0416b7c61c.slice/crio-e0768009b6e30ed978a8014343d3fbe81202632e1022bdbfd5c82eef6ba8b41a\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9b9c1cf7_0cbb_4bb3_a764_8a8fa446c8a8.slice/crio-f80866aad1b997b9edf8ab7c0962cd1436981eced3f24d965081cee0a0b92d9a.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbbb20173_fee8_4323_afbb_f2bbbec3e978.slice/crio-conmon-3ff2458b3f3311a509b35bc6e23de18f9f9253176e2e50da406ead896867e39d.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9b9c1cf7_0cbb_4bb3_a764_8a8fa446c8a8.slice/crio-conmon-f80866aad1b997b9edf8ab7c0962cd1436981eced3f24d965081cee0a0b92d9a.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4bd47eac_4090_4fc1_91c4_553ebd84964d.slice/crio-conmon-e1524456582814b161678d93ca35c12161b2ebbf4ff7b988cf13d97b61480ee9.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5a046f3a_bea5_4aea_8081_1c5994ecdbd9.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbbb20173_fee8_4323_afbb_f2bbbec3e978.slice/crio-conmon-f9994a423f0457d16e1857b55313f41375f006cd328176af514da36879ef8b96.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod833337d4_94d4_4bf6_9d0b_5791b9cc115e.slice/crio-b138c24e8875ddd5251012c3048e6574ab595b0a17f136458768b1a33c6227cb.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4bd47eac_4090_4fc1_91c4_553ebd84964d.slice/crio-conmon-8fb64177ca582dd6f77f7548f90b08a99b652fb60686912cfa1ae24e3d6b7093.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddb6ef9f1_8a92_4c74_b476_a24b66585268.slice/crio-conmon-6500342a069cbdc40f29bb0f7540a921249de5ac706a7008df5f940aa1c75eb9.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc9ec2559_f7c5_4318_9a3b_4544d222ff8e.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod833337d4_94d4_4bf6_9d0b_5791b9cc115e.slice/crio-conmon-b138c24e8875ddd5251012c3048e6574ab595b0a17f136458768b1a33c6227cb.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod09df5e63_9283_4cae_80fe_cfa88f5a9b72.slice/crio-7bd2233090ba7d102c3a1cba643b0d5335cea55cf7e97e49488020c2bb64eaa3.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddb6ef9f1_8a92_4c74_b476_a24b66585268.slice/crio-2993fdbda70ff371d15f21c84b8a5240e810ba181d38fa057ea8020b0703d009\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf4d77869_9132_4170_b50d_88389a33c597.slice/crio-conmon-96c4e3a692bc0dac49e1c6d07e764ff13bc464c6bdc3c29e2e82a3d65c290531.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda0122392_1c61_4133_b05f_cc6a622abaf9.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5391541f_5057_4d20_ae1e_bbc88e6b33a4.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7de6386e_7693_430d_9753_2d7fa8c31b5d.slice/crio-65853762fe6cac36e4de6676f22c99039ab79573b354ec57daadc513c082bba2.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod91fac67e_69a9_4d88_9136_0d2484ca0dce.slice/crio-61659b3cdbe7237e6c49c005bd8d5ec58e23a181bee16a952d93828bb6852edf.scope\": RecentStats: unable to find data in memory cache], [\"/system.slice/system-systemd\\\\x2dcoredump.slice/systemd-coredump@7-281537-0.service\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod833337d4_94d4_4bf6_9d0b_5791b9cc115e.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod099193f1_dcf1_4c0a_beeb_fac5f0824cb8.slice/crio-53ba20480eaba79bd67435843f2f6a3f80cc9813358a7b7eca6feb2250f3c8b4\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode8a8d7c2_9fc7_48d9_bd28_2f0416b7c61c.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbbb20173_fee8_4323_afbb_f2bbbec3e978.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda0122392_1c61_4133_b05f_cc6a622abaf9.slice/crio-c5868b91333e3f1c6356506d56295db65bbb6580ddcdb5dfaafeb5e8213164a5.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf4d77869_9132_4170_b50d_88389a33c597.slice/crio-96c4e3a692bc0dac49e1c6d07e764ff13bc464c6bdc3c29e2e82a3d65c290531.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod91fac67e_69a9_4d88_9136_0d2484ca0dce.slice/crio-5551c79462136929334b1ffe858711f0e5df3f4ccfbeeaa390c0be33ddf3d7d4\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf4d77869_9132_4170_b50d_88389a33c597.slice/crio-02995edadd67159839c771c36a6d802eeb43852d8c189cae748c4f00fd0b7c25\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode228514c_6d22_4527_b365_913e3ea3cfdb.slice/crio-c4d8f498217a4761f2808d12b3f68f581b7f216075fc52ef93e3194e0d7de2e2\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7de6386e_7693_430d_9753_2d7fa8c31b5d.slice/crio-conmon-65853762fe6cac36e4de6676f22c99039ab79573b354ec57daadc513c082bba2.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod06b8e2bb_a133_4a4b_92dd_12cf20ee4300.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4bd47eac_4090_4fc1_91c4_553ebd84964d.slice/crio-41b3f54e2b9bfb4b0a9c2c2cb167cb97f4161a9d82938df1b6dbaf7803136def\": RecentStats: unable to find data in memory cache]" Jan 20 17:36:26 crc kubenswrapper[4558]: I0120 17:36:26.963086 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-worker-dd7c4cf57-h4b8r" Jan 20 17:36:26 crc kubenswrapper[4558]: I0120 17:36:26.991538 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m86b4\" (UniqueName: \"kubernetes.io/projected/4bb0e311-8438-4794-929d-791f0285afaf-kube-api-access-m86b4\") pod \"4bb0e311-8438-4794-929d-791f0285afaf\" (UID: \"4bb0e311-8438-4794-929d-791f0285afaf\") " Jan 20 17:36:26 crc kubenswrapper[4558]: I0120 17:36:26.991665 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4bb0e311-8438-4794-929d-791f0285afaf-config-data-custom\") pod \"4bb0e311-8438-4794-929d-791f0285afaf\" (UID: \"4bb0e311-8438-4794-929d-791f0285afaf\") " Jan 20 17:36:26 crc kubenswrapper[4558]: I0120 17:36:26.991754 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4bb0e311-8438-4794-929d-791f0285afaf-combined-ca-bundle\") pod \"4bb0e311-8438-4794-929d-791f0285afaf\" (UID: \"4bb0e311-8438-4794-929d-791f0285afaf\") " Jan 20 17:36:26 crc kubenswrapper[4558]: I0120 17:36:26.991911 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4bb0e311-8438-4794-929d-791f0285afaf-logs\") pod \"4bb0e311-8438-4794-929d-791f0285afaf\" (UID: \"4bb0e311-8438-4794-929d-791f0285afaf\") " Jan 20 17:36:26 crc kubenswrapper[4558]: I0120 17:36:26.992039 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4bb0e311-8438-4794-929d-791f0285afaf-config-data\") pod \"4bb0e311-8438-4794-929d-791f0285afaf\" (UID: \"4bb0e311-8438-4794-929d-791f0285afaf\") " Jan 20 17:36:26 crc kubenswrapper[4558]: I0120 17:36:26.992452 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4bb0e311-8438-4794-929d-791f0285afaf-logs" (OuterVolumeSpecName: "logs") pod "4bb0e311-8438-4794-929d-791f0285afaf" (UID: "4bb0e311-8438-4794-929d-791f0285afaf"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:36:26 crc kubenswrapper[4558]: I0120 17:36:26.993268 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4bb0e311-8438-4794-929d-791f0285afaf-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:27 crc kubenswrapper[4558]: I0120 17:36:27.004845 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb0e311-8438-4794-929d-791f0285afaf-kube-api-access-m86b4" (OuterVolumeSpecName: "kube-api-access-m86b4") pod "4bb0e311-8438-4794-929d-791f0285afaf" (UID: "4bb0e311-8438-4794-929d-791f0285afaf"). InnerVolumeSpecName "kube-api-access-m86b4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:36:27 crc kubenswrapper[4558]: I0120 17:36:27.011509 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4bb0e311-8438-4794-929d-791f0285afaf-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "4bb0e311-8438-4794-929d-791f0285afaf" (UID: "4bb0e311-8438-4794-929d-791f0285afaf"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:36:27 crc kubenswrapper[4558]: I0120 17:36:27.031089 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4bb0e311-8438-4794-929d-791f0285afaf-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4bb0e311-8438-4794-929d-791f0285afaf" (UID: "4bb0e311-8438-4794-929d-791f0285afaf"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:36:27 crc kubenswrapper[4558]: I0120 17:36:27.034318 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4bb0e311-8438-4794-929d-791f0285afaf-config-data" (OuterVolumeSpecName: "config-data") pod "4bb0e311-8438-4794-929d-791f0285afaf" (UID: "4bb0e311-8438-4794-929d-791f0285afaf"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:36:27 crc kubenswrapper[4558]: I0120 17:36:27.052027 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/memcached-0" event={"ID":"b18f4fcf-eaae-401f-99ec-7b130ad8a6c1","Type":"ContainerStarted","Data":"1912908256def343892d596509f3c9466765e94f19ab6d147f037bd8d37ce167"} Jan 20 17:36:27 crc kubenswrapper[4558]: I0120 17:36:27.052103 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/memcached-0" event={"ID":"b18f4fcf-eaae-401f-99ec-7b130ad8a6c1","Type":"ContainerStarted","Data":"15b24ba0e05dc30e42bec5e997f3d15329eec1a28a5f88a4d0916339e17fd0d9"} Jan 20 17:36:27 crc kubenswrapper[4558]: I0120 17:36:27.052131 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/memcached-0" Jan 20 17:36:27 crc kubenswrapper[4558]: I0120 17:36:27.053872 4558 generic.go:334] "Generic (PLEG): container finished" podID="4bb0e311-8438-4794-929d-791f0285afaf" containerID="58b48bcfefd9b4cc3d847e36e04114fde6f3e780dae5153d04a315662e85320c" exitCode=137 Jan 20 17:36:27 crc kubenswrapper[4558]: I0120 17:36:27.053921 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-worker-dd7c4cf57-h4b8r" Jan 20 17:36:27 crc kubenswrapper[4558]: I0120 17:36:27.053941 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-worker-dd7c4cf57-h4b8r" event={"ID":"4bb0e311-8438-4794-929d-791f0285afaf","Type":"ContainerDied","Data":"58b48bcfefd9b4cc3d847e36e04114fde6f3e780dae5153d04a315662e85320c"} Jan 20 17:36:27 crc kubenswrapper[4558]: I0120 17:36:27.053974 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-worker-dd7c4cf57-h4b8r" event={"ID":"4bb0e311-8438-4794-929d-791f0285afaf","Type":"ContainerDied","Data":"35397d3a5d4d82dd46f53954cc007539946af832f780d8b2e57180fff3b5a662"} Jan 20 17:36:27 crc kubenswrapper[4558]: I0120 17:36:27.053993 4558 scope.go:117] "RemoveContainer" containerID="58b48bcfefd9b4cc3d847e36e04114fde6f3e780dae5153d04a315662e85320c" Jan 20 17:36:27 crc kubenswrapper[4558]: I0120 17:36:27.055538 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"4fd9bebc-9868-4970-bc1f-d047286980d0","Type":"ContainerStarted","Data":"6517bf0111713477475cc09d3cf93d2400d5d1a9f549d2bfca4f578e113b540a"} Jan 20 17:36:27 crc kubenswrapper[4558]: I0120 17:36:27.055578 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"4fd9bebc-9868-4970-bc1f-d047286980d0","Type":"ContainerStarted","Data":"d99151254e0a3c0a84117324083d366981d38a46a10d9178906321cdbc97ddc3"} Jan 20 17:36:27 crc kubenswrapper[4558]: I0120 17:36:27.060456 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"906beedc-3fa2-4d6e-a6e8-485ca2fb1082","Type":"ContainerStarted","Data":"48ace894e440f9c70ed8c855d176b45725c5d24b103871d2c8046cc04e75abb3"} Jan 20 17:36:27 crc kubenswrapper[4558]: I0120 17:36:27.060483 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"906beedc-3fa2-4d6e-a6e8-485ca2fb1082","Type":"ContainerStarted","Data":"9da02a19e6777029509b4f8838ad4012f0bd07388cadfbfa55cbc40a5673b825"} Jan 20 17:36:27 crc kubenswrapper[4558]: I0120 17:36:27.060492 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"906beedc-3fa2-4d6e-a6e8-485ca2fb1082","Type":"ContainerStarted","Data":"bdd05f55856c8fd5a792832668c4f4ad2d1ebef99027ecad7d4990a5516ec21a"} Jan 20 17:36:27 crc kubenswrapper[4558]: I0120 17:36:27.074730 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/memcached-0" podStartSLOduration=2.074717217 podStartE2EDuration="2.074717217s" podCreationTimestamp="2026-01-20 17:36:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:36:27.068637124 +0000 UTC m=+3280.828975091" watchObservedRunningTime="2026-01-20 17:36:27.074717217 +0000 UTC m=+3280.835055183" Jan 20 17:36:27 crc kubenswrapper[4558]: I0120 17:36:27.096071 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m86b4\" (UniqueName: \"kubernetes.io/projected/4bb0e311-8438-4794-929d-791f0285afaf-kube-api-access-m86b4\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:27 crc kubenswrapper[4558]: I0120 17:36:27.096145 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4bb0e311-8438-4794-929d-791f0285afaf-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:27 crc kubenswrapper[4558]: I0120 17:36:27.096174 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4bb0e311-8438-4794-929d-791f0285afaf-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:27 crc kubenswrapper[4558]: I0120 17:36:27.096186 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4bb0e311-8438-4794-929d-791f0285afaf-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:27 crc kubenswrapper[4558]: I0120 17:36:27.103366 4558 scope.go:117] "RemoveContainer" containerID="999b8d65f2a7e8adcf754626b17414547aff5caec391cc7f682d54acf1ccdb23" Jan 20 17:36:27 crc kubenswrapper[4558]: I0120 17:36:27.110077 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-api-0" podStartSLOduration=2.110056789 podStartE2EDuration="2.110056789s" podCreationTimestamp="2026-01-20 17:36:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:36:27.098671109 +0000 UTC m=+3280.859009077" watchObservedRunningTime="2026-01-20 17:36:27.110056789 +0000 UTC m=+3280.870394755" Jan 20 17:36:27 crc kubenswrapper[4558]: I0120 17:36:27.141509 4558 scope.go:117] "RemoveContainer" containerID="58b48bcfefd9b4cc3d847e36e04114fde6f3e780dae5153d04a315662e85320c" Jan 20 17:36:27 crc kubenswrapper[4558]: I0120 17:36:27.141958 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" podStartSLOduration=2.141940021 podStartE2EDuration="2.141940021s" podCreationTimestamp="2026-01-20 17:36:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:36:27.122792827 +0000 UTC m=+3280.883130794" watchObservedRunningTime="2026-01-20 17:36:27.141940021 +0000 UTC m=+3280.902277987" Jan 20 17:36:27 crc kubenswrapper[4558]: E0120 17:36:27.142218 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"58b48bcfefd9b4cc3d847e36e04114fde6f3e780dae5153d04a315662e85320c\": container with ID starting with 58b48bcfefd9b4cc3d847e36e04114fde6f3e780dae5153d04a315662e85320c not found: ID does not exist" containerID="58b48bcfefd9b4cc3d847e36e04114fde6f3e780dae5153d04a315662e85320c" Jan 20 17:36:27 crc kubenswrapper[4558]: I0120 17:36:27.142255 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"58b48bcfefd9b4cc3d847e36e04114fde6f3e780dae5153d04a315662e85320c"} err="failed to get container status \"58b48bcfefd9b4cc3d847e36e04114fde6f3e780dae5153d04a315662e85320c\": rpc error: code = NotFound desc = could not find container \"58b48bcfefd9b4cc3d847e36e04114fde6f3e780dae5153d04a315662e85320c\": container with ID starting with 58b48bcfefd9b4cc3d847e36e04114fde6f3e780dae5153d04a315662e85320c not found: ID does not exist" Jan 20 17:36:27 crc kubenswrapper[4558]: I0120 17:36:27.142283 4558 scope.go:117] "RemoveContainer" containerID="999b8d65f2a7e8adcf754626b17414547aff5caec391cc7f682d54acf1ccdb23" Jan 20 17:36:27 crc kubenswrapper[4558]: E0120 17:36:27.142811 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"999b8d65f2a7e8adcf754626b17414547aff5caec391cc7f682d54acf1ccdb23\": container with ID starting with 999b8d65f2a7e8adcf754626b17414547aff5caec391cc7f682d54acf1ccdb23 not found: ID does not exist" containerID="999b8d65f2a7e8adcf754626b17414547aff5caec391cc7f682d54acf1ccdb23" Jan 20 17:36:27 crc kubenswrapper[4558]: I0120 17:36:27.142839 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"999b8d65f2a7e8adcf754626b17414547aff5caec391cc7f682d54acf1ccdb23"} err="failed to get container status \"999b8d65f2a7e8adcf754626b17414547aff5caec391cc7f682d54acf1ccdb23\": rpc error: code = NotFound desc = could not find container \"999b8d65f2a7e8adcf754626b17414547aff5caec391cc7f682d54acf1ccdb23\": container with ID starting with 999b8d65f2a7e8adcf754626b17414547aff5caec391cc7f682d54acf1ccdb23 not found: ID does not exist" Jan 20 17:36:27 crc kubenswrapper[4558]: I0120 17:36:27.149308 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-worker-dd7c4cf57-h4b8r"] Jan 20 17:36:27 crc kubenswrapper[4558]: I0120 17:36:27.155707 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/barbican-worker-dd7c4cf57-h4b8r"] Jan 20 17:36:27 crc kubenswrapper[4558]: I0120 17:36:27.179768 4558 pod_container_manager_linux.go:210] "Failed to delete cgroup paths" cgroupName=["kubepods","besteffort","podbbb20173-fee8-4323-afbb-f2bbbec3e978"] err="unable to destroy cgroup paths for cgroup [kubepods besteffort podbbb20173-fee8-4323-afbb-f2bbbec3e978] : Timed out while waiting for systemd to remove kubepods-besteffort-podbbb20173_fee8_4323_afbb_f2bbbec3e978.slice" Jan 20 17:36:27 crc kubenswrapper[4558]: I0120 17:36:27.181921 4558 pod_container_manager_linux.go:210] "Failed to delete cgroup paths" cgroupName=["kubepods","besteffort","poda0122392-1c61-4133-b05f-cc6a622abaf9"] err="unable to destroy cgroup paths for cgroup [kubepods besteffort poda0122392-1c61-4133-b05f-cc6a622abaf9] : Timed out while waiting for systemd to remove kubepods-besteffort-poda0122392_1c61_4133_b05f_cc6a622abaf9.slice" Jan 20 17:36:27 crc kubenswrapper[4558]: E0120 17:36:27.181954 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to delete cgroup paths for [kubepods besteffort poda0122392-1c61-4133-b05f-cc6a622abaf9] : unable to destroy cgroup paths for cgroup [kubepods besteffort poda0122392-1c61-4133-b05f-cc6a622abaf9] : Timed out while waiting for systemd to remove kubepods-besteffort-poda0122392_1c61_4133_b05f_cc6a622abaf9.slice" pod="openstack-kuttl-tests/nova-metadata-0" podUID="a0122392-1c61-4133-b05f-cc6a622abaf9" Jan 20 17:36:27 crc kubenswrapper[4558]: I0120 17:36:27.329998 4558 patch_prober.go:28] interesting pod/machine-config-daemon-2vr4r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 20 17:36:27 crc kubenswrapper[4558]: I0120 17:36:27.330103 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 20 17:36:28 crc kubenswrapper[4558]: I0120 17:36:28.070710 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:36:28 crc kubenswrapper[4558]: I0120 17:36:28.103278 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:36:28 crc kubenswrapper[4558]: I0120 17:36:28.113064 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:36:28 crc kubenswrapper[4558]: I0120 17:36:28.121707 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:36:28 crc kubenswrapper[4558]: E0120 17:36:28.122176 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3d958c0e-f259-45a7-9c66-9d2b82c92980" containerName="placement-log" Jan 20 17:36:28 crc kubenswrapper[4558]: I0120 17:36:28.122195 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="3d958c0e-f259-45a7-9c66-9d2b82c92980" containerName="placement-log" Jan 20 17:36:28 crc kubenswrapper[4558]: E0120 17:36:28.122206 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1ab0a776-6551-4ef8-bd41-0ef0e58d892d" containerName="barbican-keystone-listener" Jan 20 17:36:28 crc kubenswrapper[4558]: I0120 17:36:28.122214 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="1ab0a776-6551-4ef8-bd41-0ef0e58d892d" containerName="barbican-keystone-listener" Jan 20 17:36:28 crc kubenswrapper[4558]: E0120 17:36:28.122232 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4bb0e311-8438-4794-929d-791f0285afaf" containerName="barbican-worker-log" Jan 20 17:36:28 crc kubenswrapper[4558]: I0120 17:36:28.122238 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="4bb0e311-8438-4794-929d-791f0285afaf" containerName="barbican-worker-log" Jan 20 17:36:28 crc kubenswrapper[4558]: E0120 17:36:28.122259 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3d958c0e-f259-45a7-9c66-9d2b82c92980" containerName="placement-api" Jan 20 17:36:28 crc kubenswrapper[4558]: I0120 17:36:28.122267 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="3d958c0e-f259-45a7-9c66-9d2b82c92980" containerName="placement-api" Jan 20 17:36:28 crc kubenswrapper[4558]: E0120 17:36:28.122276 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9bca6361-b1d7-4953-b4bd-2bbda0f90581" containerName="neutron-api" Jan 20 17:36:28 crc kubenswrapper[4558]: I0120 17:36:28.122281 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="9bca6361-b1d7-4953-b4bd-2bbda0f90581" containerName="neutron-api" Jan 20 17:36:28 crc kubenswrapper[4558]: E0120 17:36:28.122294 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1ab0a776-6551-4ef8-bd41-0ef0e58d892d" containerName="barbican-keystone-listener-log" Jan 20 17:36:28 crc kubenswrapper[4558]: I0120 17:36:28.122301 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="1ab0a776-6551-4ef8-bd41-0ef0e58d892d" containerName="barbican-keystone-listener-log" Jan 20 17:36:28 crc kubenswrapper[4558]: E0120 17:36:28.122320 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4bb0e311-8438-4794-929d-791f0285afaf" containerName="barbican-worker" Jan 20 17:36:28 crc kubenswrapper[4558]: I0120 17:36:28.122325 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="4bb0e311-8438-4794-929d-791f0285afaf" containerName="barbican-worker" Jan 20 17:36:28 crc kubenswrapper[4558]: E0120 17:36:28.122334 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9bca6361-b1d7-4953-b4bd-2bbda0f90581" containerName="neutron-httpd" Jan 20 17:36:28 crc kubenswrapper[4558]: I0120 17:36:28.122340 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="9bca6361-b1d7-4953-b4bd-2bbda0f90581" containerName="neutron-httpd" Jan 20 17:36:28 crc kubenswrapper[4558]: I0120 17:36:28.122520 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="9bca6361-b1d7-4953-b4bd-2bbda0f90581" containerName="neutron-httpd" Jan 20 17:36:28 crc kubenswrapper[4558]: I0120 17:36:28.122541 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="1ab0a776-6551-4ef8-bd41-0ef0e58d892d" containerName="barbican-keystone-listener" Jan 20 17:36:28 crc kubenswrapper[4558]: I0120 17:36:28.122550 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="4bb0e311-8438-4794-929d-791f0285afaf" containerName="barbican-worker-log" Jan 20 17:36:28 crc kubenswrapper[4558]: I0120 17:36:28.122564 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="3d958c0e-f259-45a7-9c66-9d2b82c92980" containerName="placement-log" Jan 20 17:36:28 crc kubenswrapper[4558]: I0120 17:36:28.122574 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="1ab0a776-6551-4ef8-bd41-0ef0e58d892d" containerName="barbican-keystone-listener-log" Jan 20 17:36:28 crc kubenswrapper[4558]: I0120 17:36:28.122584 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="4bb0e311-8438-4794-929d-791f0285afaf" containerName="barbican-worker" Jan 20 17:36:28 crc kubenswrapper[4558]: I0120 17:36:28.122594 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="3d958c0e-f259-45a7-9c66-9d2b82c92980" containerName="placement-api" Jan 20 17:36:28 crc kubenswrapper[4558]: I0120 17:36:28.122602 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="9bca6361-b1d7-4953-b4bd-2bbda0f90581" containerName="neutron-api" Jan 20 17:36:28 crc kubenswrapper[4558]: I0120 17:36:28.123635 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:36:28 crc kubenswrapper[4558]: I0120 17:36:28.125271 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-metadata-internal-svc" Jan 20 17:36:28 crc kubenswrapper[4558]: I0120 17:36:28.128055 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-metadata-config-data" Jan 20 17:36:28 crc kubenswrapper[4558]: I0120 17:36:28.129829 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7ee90982-23a3-4111-9c4e-159828a51a69-logs\") pod \"nova-metadata-0\" (UID: \"7ee90982-23a3-4111-9c4e-159828a51a69\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:36:28 crc kubenswrapper[4558]: I0120 17:36:28.129883 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ee90982-23a3-4111-9c4e-159828a51a69-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"7ee90982-23a3-4111-9c4e-159828a51a69\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:36:28 crc kubenswrapper[4558]: I0120 17:36:28.129915 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7ee90982-23a3-4111-9c4e-159828a51a69-config-data\") pod \"nova-metadata-0\" (UID: \"7ee90982-23a3-4111-9c4e-159828a51a69\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:36:28 crc kubenswrapper[4558]: I0120 17:36:28.129957 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/7ee90982-23a3-4111-9c4e-159828a51a69-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"7ee90982-23a3-4111-9c4e-159828a51a69\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:36:28 crc kubenswrapper[4558]: I0120 17:36:28.130124 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4dq7s\" (UniqueName: \"kubernetes.io/projected/7ee90982-23a3-4111-9c4e-159828a51a69-kube-api-access-4dq7s\") pod \"nova-metadata-0\" (UID: \"7ee90982-23a3-4111-9c4e-159828a51a69\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:36:28 crc kubenswrapper[4558]: I0120 17:36:28.135579 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:36:28 crc kubenswrapper[4558]: I0120 17:36:28.233446 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4dq7s\" (UniqueName: \"kubernetes.io/projected/7ee90982-23a3-4111-9c4e-159828a51a69-kube-api-access-4dq7s\") pod \"nova-metadata-0\" (UID: \"7ee90982-23a3-4111-9c4e-159828a51a69\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:36:28 crc kubenswrapper[4558]: I0120 17:36:28.233662 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7ee90982-23a3-4111-9c4e-159828a51a69-logs\") pod \"nova-metadata-0\" (UID: \"7ee90982-23a3-4111-9c4e-159828a51a69\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:36:28 crc kubenswrapper[4558]: I0120 17:36:28.233723 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ee90982-23a3-4111-9c4e-159828a51a69-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"7ee90982-23a3-4111-9c4e-159828a51a69\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:36:28 crc kubenswrapper[4558]: I0120 17:36:28.233774 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7ee90982-23a3-4111-9c4e-159828a51a69-config-data\") pod \"nova-metadata-0\" (UID: \"7ee90982-23a3-4111-9c4e-159828a51a69\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:36:28 crc kubenswrapper[4558]: I0120 17:36:28.233831 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/7ee90982-23a3-4111-9c4e-159828a51a69-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"7ee90982-23a3-4111-9c4e-159828a51a69\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:36:28 crc kubenswrapper[4558]: I0120 17:36:28.234181 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7ee90982-23a3-4111-9c4e-159828a51a69-logs\") pod \"nova-metadata-0\" (UID: \"7ee90982-23a3-4111-9c4e-159828a51a69\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:36:28 crc kubenswrapper[4558]: I0120 17:36:28.240683 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/7ee90982-23a3-4111-9c4e-159828a51a69-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"7ee90982-23a3-4111-9c4e-159828a51a69\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:36:28 crc kubenswrapper[4558]: I0120 17:36:28.247585 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7ee90982-23a3-4111-9c4e-159828a51a69-config-data\") pod \"nova-metadata-0\" (UID: \"7ee90982-23a3-4111-9c4e-159828a51a69\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:36:28 crc kubenswrapper[4558]: I0120 17:36:28.248433 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ee90982-23a3-4111-9c4e-159828a51a69-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"7ee90982-23a3-4111-9c4e-159828a51a69\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:36:28 crc kubenswrapper[4558]: I0120 17:36:28.248520 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4dq7s\" (UniqueName: \"kubernetes.io/projected/7ee90982-23a3-4111-9c4e-159828a51a69-kube-api-access-4dq7s\") pod \"nova-metadata-0\" (UID: \"7ee90982-23a3-4111-9c4e-159828a51a69\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:36:28 crc kubenswrapper[4558]: I0120 17:36:28.356560 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:36:28 crc kubenswrapper[4558]: I0120 17:36:28.442984 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:36:28 crc kubenswrapper[4558]: I0120 17:36:28.581113 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb0e311-8438-4794-929d-791f0285afaf" path="/var/lib/kubelet/pods/4bb0e311-8438-4794-929d-791f0285afaf/volumes" Jan 20 17:36:28 crc kubenswrapper[4558]: I0120 17:36:28.581991 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0122392-1c61-4133-b05f-cc6a622abaf9" path="/var/lib/kubelet/pods/a0122392-1c61-4133-b05f-cc6a622abaf9/volumes" Jan 20 17:36:28 crc kubenswrapper[4558]: I0120 17:36:28.849615 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:36:29 crc kubenswrapper[4558]: I0120 17:36:29.084078 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"7ee90982-23a3-4111-9c4e-159828a51a69","Type":"ContainerStarted","Data":"a9b864e0c7723915ae39749b96750b7b8695818471060aad197d75c66a8cc364"} Jan 20 17:36:29 crc kubenswrapper[4558]: I0120 17:36:29.084486 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"7ee90982-23a3-4111-9c4e-159828a51a69","Type":"ContainerStarted","Data":"4af9a0c1b7692f786bcc6daedbc066a18d8652934d6ca178b1e24a0324714a28"} Jan 20 17:36:29 crc kubenswrapper[4558]: I0120 17:36:29.089080 4558 generic.go:334] "Generic (PLEG): container finished" podID="ff846ca3-9970-4311-915b-e2ad95aa07b8" containerID="82d446802d7e8d621542fbedc2f7511c1da11e01b6b157d8ef90209c4965a1a1" exitCode=0 Jan 20 17:36:29 crc kubenswrapper[4558]: I0120 17:36:29.089109 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-c7b8d4458-7ddjq" event={"ID":"ff846ca3-9970-4311-915b-e2ad95aa07b8","Type":"ContainerDied","Data":"82d446802d7e8d621542fbedc2f7511c1da11e01b6b157d8ef90209c4965a1a1"} Jan 20 17:36:29 crc kubenswrapper[4558]: I0120 17:36:29.123955 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-c7b8d4458-7ddjq" Jan 20 17:36:29 crc kubenswrapper[4558]: I0120 17:36:29.165825 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ff846ca3-9970-4311-915b-e2ad95aa07b8-combined-ca-bundle\") pod \"ff846ca3-9970-4311-915b-e2ad95aa07b8\" (UID: \"ff846ca3-9970-4311-915b-e2ad95aa07b8\") " Jan 20 17:36:29 crc kubenswrapper[4558]: I0120 17:36:29.165900 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/ff846ca3-9970-4311-915b-e2ad95aa07b8-credential-keys\") pod \"ff846ca3-9970-4311-915b-e2ad95aa07b8\" (UID: \"ff846ca3-9970-4311-915b-e2ad95aa07b8\") " Jan 20 17:36:29 crc kubenswrapper[4558]: I0120 17:36:29.165929 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ff846ca3-9970-4311-915b-e2ad95aa07b8-internal-tls-certs\") pod \"ff846ca3-9970-4311-915b-e2ad95aa07b8\" (UID: \"ff846ca3-9970-4311-915b-e2ad95aa07b8\") " Jan 20 17:36:29 crc kubenswrapper[4558]: I0120 17:36:29.165992 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ff846ca3-9970-4311-915b-e2ad95aa07b8-public-tls-certs\") pod \"ff846ca3-9970-4311-915b-e2ad95aa07b8\" (UID: \"ff846ca3-9970-4311-915b-e2ad95aa07b8\") " Jan 20 17:36:29 crc kubenswrapper[4558]: I0120 17:36:29.166109 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/ff846ca3-9970-4311-915b-e2ad95aa07b8-fernet-keys\") pod \"ff846ca3-9970-4311-915b-e2ad95aa07b8\" (UID: \"ff846ca3-9970-4311-915b-e2ad95aa07b8\") " Jan 20 17:36:29 crc kubenswrapper[4558]: I0120 17:36:29.166157 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c78m6\" (UniqueName: \"kubernetes.io/projected/ff846ca3-9970-4311-915b-e2ad95aa07b8-kube-api-access-c78m6\") pod \"ff846ca3-9970-4311-915b-e2ad95aa07b8\" (UID: \"ff846ca3-9970-4311-915b-e2ad95aa07b8\") " Jan 20 17:36:29 crc kubenswrapper[4558]: I0120 17:36:29.166231 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ff846ca3-9970-4311-915b-e2ad95aa07b8-config-data\") pod \"ff846ca3-9970-4311-915b-e2ad95aa07b8\" (UID: \"ff846ca3-9970-4311-915b-e2ad95aa07b8\") " Jan 20 17:36:29 crc kubenswrapper[4558]: I0120 17:36:29.166282 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ff846ca3-9970-4311-915b-e2ad95aa07b8-scripts\") pod \"ff846ca3-9970-4311-915b-e2ad95aa07b8\" (UID: \"ff846ca3-9970-4311-915b-e2ad95aa07b8\") " Jan 20 17:36:29 crc kubenswrapper[4558]: I0120 17:36:29.169503 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ff846ca3-9970-4311-915b-e2ad95aa07b8-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "ff846ca3-9970-4311-915b-e2ad95aa07b8" (UID: "ff846ca3-9970-4311-915b-e2ad95aa07b8"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:36:29 crc kubenswrapper[4558]: I0120 17:36:29.170029 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ff846ca3-9970-4311-915b-e2ad95aa07b8-scripts" (OuterVolumeSpecName: "scripts") pod "ff846ca3-9970-4311-915b-e2ad95aa07b8" (UID: "ff846ca3-9970-4311-915b-e2ad95aa07b8"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:36:29 crc kubenswrapper[4558]: I0120 17:36:29.171559 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ff846ca3-9970-4311-915b-e2ad95aa07b8-kube-api-access-c78m6" (OuterVolumeSpecName: "kube-api-access-c78m6") pod "ff846ca3-9970-4311-915b-e2ad95aa07b8" (UID: "ff846ca3-9970-4311-915b-e2ad95aa07b8"). InnerVolumeSpecName "kube-api-access-c78m6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:36:29 crc kubenswrapper[4558]: I0120 17:36:29.172140 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ff846ca3-9970-4311-915b-e2ad95aa07b8-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "ff846ca3-9970-4311-915b-e2ad95aa07b8" (UID: "ff846ca3-9970-4311-915b-e2ad95aa07b8"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:36:29 crc kubenswrapper[4558]: I0120 17:36:29.205007 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ff846ca3-9970-4311-915b-e2ad95aa07b8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ff846ca3-9970-4311-915b-e2ad95aa07b8" (UID: "ff846ca3-9970-4311-915b-e2ad95aa07b8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:36:29 crc kubenswrapper[4558]: I0120 17:36:29.207766 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ff846ca3-9970-4311-915b-e2ad95aa07b8-config-data" (OuterVolumeSpecName: "config-data") pod "ff846ca3-9970-4311-915b-e2ad95aa07b8" (UID: "ff846ca3-9970-4311-915b-e2ad95aa07b8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:36:29 crc kubenswrapper[4558]: I0120 17:36:29.235611 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ff846ca3-9970-4311-915b-e2ad95aa07b8-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "ff846ca3-9970-4311-915b-e2ad95aa07b8" (UID: "ff846ca3-9970-4311-915b-e2ad95aa07b8"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:36:29 crc kubenswrapper[4558]: I0120 17:36:29.235803 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ff846ca3-9970-4311-915b-e2ad95aa07b8-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "ff846ca3-9970-4311-915b-e2ad95aa07b8" (UID: "ff846ca3-9970-4311-915b-e2ad95aa07b8"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:36:29 crc kubenswrapper[4558]: I0120 17:36:29.236847 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:36:29 crc kubenswrapper[4558]: I0120 17:36:29.270083 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ff846ca3-9970-4311-915b-e2ad95aa07b8-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:29 crc kubenswrapper[4558]: I0120 17:36:29.270364 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ff846ca3-9970-4311-915b-e2ad95aa07b8-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:29 crc kubenswrapper[4558]: I0120 17:36:29.270377 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ff846ca3-9970-4311-915b-e2ad95aa07b8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:29 crc kubenswrapper[4558]: I0120 17:36:29.270390 4558 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/ff846ca3-9970-4311-915b-e2ad95aa07b8-credential-keys\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:29 crc kubenswrapper[4558]: I0120 17:36:29.270400 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ff846ca3-9970-4311-915b-e2ad95aa07b8-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:29 crc kubenswrapper[4558]: I0120 17:36:29.270410 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ff846ca3-9970-4311-915b-e2ad95aa07b8-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:29 crc kubenswrapper[4558]: I0120 17:36:29.270422 4558 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/ff846ca3-9970-4311-915b-e2ad95aa07b8-fernet-keys\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:29 crc kubenswrapper[4558]: I0120 17:36:29.270431 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c78m6\" (UniqueName: \"kubernetes.io/projected/ff846ca3-9970-4311-915b-e2ad95aa07b8-kube-api-access-c78m6\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:29 crc kubenswrapper[4558]: I0120 17:36:29.558531 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-5b4c8486cd-bh7qx" Jan 20 17:36:29 crc kubenswrapper[4558]: I0120 17:36:29.575444 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/89049a89-bdd9-4d74-bdc0-64002d4f4842-public-tls-certs\") pod \"89049a89-bdd9-4d74-bdc0-64002d4f4842\" (UID: \"89049a89-bdd9-4d74-bdc0-64002d4f4842\") " Jan 20 17:36:29 crc kubenswrapper[4558]: I0120 17:36:29.575511 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/89049a89-bdd9-4d74-bdc0-64002d4f4842-config\") pod \"89049a89-bdd9-4d74-bdc0-64002d4f4842\" (UID: \"89049a89-bdd9-4d74-bdc0-64002d4f4842\") " Jan 20 17:36:29 crc kubenswrapper[4558]: I0120 17:36:29.575548 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/89049a89-bdd9-4d74-bdc0-64002d4f4842-combined-ca-bundle\") pod \"89049a89-bdd9-4d74-bdc0-64002d4f4842\" (UID: \"89049a89-bdd9-4d74-bdc0-64002d4f4842\") " Jan 20 17:36:29 crc kubenswrapper[4558]: I0120 17:36:29.575570 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/89049a89-bdd9-4d74-bdc0-64002d4f4842-ovndb-tls-certs\") pod \"89049a89-bdd9-4d74-bdc0-64002d4f4842\" (UID: \"89049a89-bdd9-4d74-bdc0-64002d4f4842\") " Jan 20 17:36:29 crc kubenswrapper[4558]: I0120 17:36:29.575596 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d8pfp\" (UniqueName: \"kubernetes.io/projected/89049a89-bdd9-4d74-bdc0-64002d4f4842-kube-api-access-d8pfp\") pod \"89049a89-bdd9-4d74-bdc0-64002d4f4842\" (UID: \"89049a89-bdd9-4d74-bdc0-64002d4f4842\") " Jan 20 17:36:29 crc kubenswrapper[4558]: I0120 17:36:29.575628 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/89049a89-bdd9-4d74-bdc0-64002d4f4842-httpd-config\") pod \"89049a89-bdd9-4d74-bdc0-64002d4f4842\" (UID: \"89049a89-bdd9-4d74-bdc0-64002d4f4842\") " Jan 20 17:36:29 crc kubenswrapper[4558]: I0120 17:36:29.575714 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/89049a89-bdd9-4d74-bdc0-64002d4f4842-internal-tls-certs\") pod \"89049a89-bdd9-4d74-bdc0-64002d4f4842\" (UID: \"89049a89-bdd9-4d74-bdc0-64002d4f4842\") " Jan 20 17:36:29 crc kubenswrapper[4558]: I0120 17:36:29.581287 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/89049a89-bdd9-4d74-bdc0-64002d4f4842-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "89049a89-bdd9-4d74-bdc0-64002d4f4842" (UID: "89049a89-bdd9-4d74-bdc0-64002d4f4842"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:36:29 crc kubenswrapper[4558]: I0120 17:36:29.582829 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/89049a89-bdd9-4d74-bdc0-64002d4f4842-kube-api-access-d8pfp" (OuterVolumeSpecName: "kube-api-access-d8pfp") pod "89049a89-bdd9-4d74-bdc0-64002d4f4842" (UID: "89049a89-bdd9-4d74-bdc0-64002d4f4842"). InnerVolumeSpecName "kube-api-access-d8pfp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:36:29 crc kubenswrapper[4558]: I0120 17:36:29.626745 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/89049a89-bdd9-4d74-bdc0-64002d4f4842-config" (OuterVolumeSpecName: "config") pod "89049a89-bdd9-4d74-bdc0-64002d4f4842" (UID: "89049a89-bdd9-4d74-bdc0-64002d4f4842"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:36:29 crc kubenswrapper[4558]: I0120 17:36:29.627035 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/89049a89-bdd9-4d74-bdc0-64002d4f4842-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "89049a89-bdd9-4d74-bdc0-64002d4f4842" (UID: "89049a89-bdd9-4d74-bdc0-64002d4f4842"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:36:29 crc kubenswrapper[4558]: I0120 17:36:29.627501 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/89049a89-bdd9-4d74-bdc0-64002d4f4842-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "89049a89-bdd9-4d74-bdc0-64002d4f4842" (UID: "89049a89-bdd9-4d74-bdc0-64002d4f4842"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:36:29 crc kubenswrapper[4558]: I0120 17:36:29.632376 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/89049a89-bdd9-4d74-bdc0-64002d4f4842-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "89049a89-bdd9-4d74-bdc0-64002d4f4842" (UID: "89049a89-bdd9-4d74-bdc0-64002d4f4842"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:36:29 crc kubenswrapper[4558]: I0120 17:36:29.642185 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/89049a89-bdd9-4d74-bdc0-64002d4f4842-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "89049a89-bdd9-4d74-bdc0-64002d4f4842" (UID: "89049a89-bdd9-4d74-bdc0-64002d4f4842"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:36:29 crc kubenswrapper[4558]: I0120 17:36:29.678348 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/89049a89-bdd9-4d74-bdc0-64002d4f4842-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:29 crc kubenswrapper[4558]: I0120 17:36:29.678381 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/89049a89-bdd9-4d74-bdc0-64002d4f4842-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:29 crc kubenswrapper[4558]: I0120 17:36:29.678393 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/89049a89-bdd9-4d74-bdc0-64002d4f4842-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:29 crc kubenswrapper[4558]: I0120 17:36:29.678405 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/89049a89-bdd9-4d74-bdc0-64002d4f4842-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:29 crc kubenswrapper[4558]: I0120 17:36:29.678415 4558 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/89049a89-bdd9-4d74-bdc0-64002d4f4842-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:29 crc kubenswrapper[4558]: I0120 17:36:29.678427 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d8pfp\" (UniqueName: \"kubernetes.io/projected/89049a89-bdd9-4d74-bdc0-64002d4f4842-kube-api-access-d8pfp\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:29 crc kubenswrapper[4558]: I0120 17:36:29.678439 4558 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/89049a89-bdd9-4d74-bdc0-64002d4f4842-httpd-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:30 crc kubenswrapper[4558]: I0120 17:36:30.102328 4558 generic.go:334] "Generic (PLEG): container finished" podID="89049a89-bdd9-4d74-bdc0-64002d4f4842" containerID="ea1f141ab42d1923c6ad40bf5bb7a415db52b6566887f2c7238c3eb688cf92ea" exitCode=0 Jan 20 17:36:30 crc kubenswrapper[4558]: I0120 17:36:30.102402 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-5b4c8486cd-bh7qx" event={"ID":"89049a89-bdd9-4d74-bdc0-64002d4f4842","Type":"ContainerDied","Data":"ea1f141ab42d1923c6ad40bf5bb7a415db52b6566887f2c7238c3eb688cf92ea"} Jan 20 17:36:30 crc kubenswrapper[4558]: I0120 17:36:30.102450 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-5b4c8486cd-bh7qx" event={"ID":"89049a89-bdd9-4d74-bdc0-64002d4f4842","Type":"ContainerDied","Data":"6ac1d2a9274972d0bb14c388776752fcb28b35c9e1abb126096096f4857f41f1"} Jan 20 17:36:30 crc kubenswrapper[4558]: I0120 17:36:30.102477 4558 scope.go:117] "RemoveContainer" containerID="ae7a97185cd9fbb38ff879e0bdbf67d1b5340be215aff23c59be1f3116c969cd" Jan 20 17:36:30 crc kubenswrapper[4558]: I0120 17:36:30.102654 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-5b4c8486cd-bh7qx" Jan 20 17:36:30 crc kubenswrapper[4558]: I0120 17:36:30.111146 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"7ee90982-23a3-4111-9c4e-159828a51a69","Type":"ContainerStarted","Data":"3bb4afac6c286ba59753565000e90ace9365499f3dff7e44738101c6faf5cc39"} Jan 20 17:36:30 crc kubenswrapper[4558]: I0120 17:36:30.113911 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-c7b8d4458-7ddjq" event={"ID":"ff846ca3-9970-4311-915b-e2ad95aa07b8","Type":"ContainerDied","Data":"9761e5f0f23f3fda1bf98680cc96c5f153da35b0b62e442b1989dff912c77a8d"} Jan 20 17:36:30 crc kubenswrapper[4558]: I0120 17:36:30.113962 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-c7b8d4458-7ddjq" Jan 20 17:36:30 crc kubenswrapper[4558]: I0120 17:36:30.132562 4558 scope.go:117] "RemoveContainer" containerID="ea1f141ab42d1923c6ad40bf5bb7a415db52b6566887f2c7238c3eb688cf92ea" Jan 20 17:36:30 crc kubenswrapper[4558]: I0120 17:36:30.135536 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-metadata-0" podStartSLOduration=2.13551535 podStartE2EDuration="2.13551535s" podCreationTimestamp="2026-01-20 17:36:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:36:30.132834779 +0000 UTC m=+3283.893172736" watchObservedRunningTime="2026-01-20 17:36:30.13551535 +0000 UTC m=+3283.895853316" Jan 20 17:36:30 crc kubenswrapper[4558]: I0120 17:36:30.153441 4558 scope.go:117] "RemoveContainer" containerID="ae7a97185cd9fbb38ff879e0bdbf67d1b5340be215aff23c59be1f3116c969cd" Jan 20 17:36:30 crc kubenswrapper[4558]: E0120 17:36:30.153838 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ae7a97185cd9fbb38ff879e0bdbf67d1b5340be215aff23c59be1f3116c969cd\": container with ID starting with ae7a97185cd9fbb38ff879e0bdbf67d1b5340be215aff23c59be1f3116c969cd not found: ID does not exist" containerID="ae7a97185cd9fbb38ff879e0bdbf67d1b5340be215aff23c59be1f3116c969cd" Jan 20 17:36:30 crc kubenswrapper[4558]: I0120 17:36:30.153903 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ae7a97185cd9fbb38ff879e0bdbf67d1b5340be215aff23c59be1f3116c969cd"} err="failed to get container status \"ae7a97185cd9fbb38ff879e0bdbf67d1b5340be215aff23c59be1f3116c969cd\": rpc error: code = NotFound desc = could not find container \"ae7a97185cd9fbb38ff879e0bdbf67d1b5340be215aff23c59be1f3116c969cd\": container with ID starting with ae7a97185cd9fbb38ff879e0bdbf67d1b5340be215aff23c59be1f3116c969cd not found: ID does not exist" Jan 20 17:36:30 crc kubenswrapper[4558]: I0120 17:36:30.153941 4558 scope.go:117] "RemoveContainer" containerID="ea1f141ab42d1923c6ad40bf5bb7a415db52b6566887f2c7238c3eb688cf92ea" Jan 20 17:36:30 crc kubenswrapper[4558]: E0120 17:36:30.154235 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ea1f141ab42d1923c6ad40bf5bb7a415db52b6566887f2c7238c3eb688cf92ea\": container with ID starting with ea1f141ab42d1923c6ad40bf5bb7a415db52b6566887f2c7238c3eb688cf92ea not found: ID does not exist" containerID="ea1f141ab42d1923c6ad40bf5bb7a415db52b6566887f2c7238c3eb688cf92ea" Jan 20 17:36:30 crc kubenswrapper[4558]: I0120 17:36:30.154283 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ea1f141ab42d1923c6ad40bf5bb7a415db52b6566887f2c7238c3eb688cf92ea"} err="failed to get container status \"ea1f141ab42d1923c6ad40bf5bb7a415db52b6566887f2c7238c3eb688cf92ea\": rpc error: code = NotFound desc = could not find container \"ea1f141ab42d1923c6ad40bf5bb7a415db52b6566887f2c7238c3eb688cf92ea\": container with ID starting with ea1f141ab42d1923c6ad40bf5bb7a415db52b6566887f2c7238c3eb688cf92ea not found: ID does not exist" Jan 20 17:36:30 crc kubenswrapper[4558]: I0120 17:36:30.154300 4558 scope.go:117] "RemoveContainer" containerID="82d446802d7e8d621542fbedc2f7511c1da11e01b6b157d8ef90209c4965a1a1" Jan 20 17:36:30 crc kubenswrapper[4558]: I0120 17:36:30.169391 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-5b4c8486cd-bh7qx"] Jan 20 17:36:30 crc kubenswrapper[4558]: I0120 17:36:30.179823 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/neutron-5b4c8486cd-bh7qx"] Jan 20 17:36:30 crc kubenswrapper[4558]: I0120 17:36:30.188739 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-c7b8d4458-7ddjq"] Jan 20 17:36:30 crc kubenswrapper[4558]: I0120 17:36:30.200029 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/keystone-c7b8d4458-7ddjq"] Jan 20 17:36:30 crc kubenswrapper[4558]: I0120 17:36:30.507600 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:36:30 crc kubenswrapper[4558]: I0120 17:36:30.580546 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="89049a89-bdd9-4d74-bdc0-64002d4f4842" path="/var/lib/kubelet/pods/89049a89-bdd9-4d74-bdc0-64002d4f4842/volumes" Jan 20 17:36:30 crc kubenswrapper[4558]: I0120 17:36:30.581280 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ff846ca3-9970-4311-915b-e2ad95aa07b8" path="/var/lib/kubelet/pods/ff846ca3-9970-4311-915b-e2ad95aa07b8/volumes" Jan 20 17:36:33 crc kubenswrapper[4558]: I0120 17:36:33.423634 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:36:33 crc kubenswrapper[4558]: I0120 17:36:33.424121 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:36:33 crc kubenswrapper[4558]: I0120 17:36:33.443621 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:36:33 crc kubenswrapper[4558]: I0120 17:36:33.443679 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:36:33 crc kubenswrapper[4558]: I0120 17:36:33.455461 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:36:33 crc kubenswrapper[4558]: I0120 17:36:33.458081 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:36:34 crc kubenswrapper[4558]: I0120 17:36:34.163234 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:36:34 crc kubenswrapper[4558]: I0120 17:36:34.163595 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:36:35 crc kubenswrapper[4558]: I0120 17:36:35.494291 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:36:35 crc kubenswrapper[4558]: I0120 17:36:35.494348 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:36:35 crc kubenswrapper[4558]: I0120 17:36:35.507671 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:36:35 crc kubenswrapper[4558]: I0120 17:36:35.535684 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:36:35 crc kubenswrapper[4558]: I0120 17:36:35.732078 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:36:35 crc kubenswrapper[4558]: I0120 17:36:35.767449 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/memcached-0" Jan 20 17:36:35 crc kubenswrapper[4558]: I0120 17:36:35.856812 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:36:36 crc kubenswrapper[4558]: I0120 17:36:36.200429 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:36:36 crc kubenswrapper[4558]: I0120 17:36:36.506306 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-api-0" podUID="906beedc-3fa2-4d6e-a6e8-485ca2fb1082" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.159:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 20 17:36:36 crc kubenswrapper[4558]: I0120 17:36:36.506335 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-api-0" podUID="906beedc-3fa2-4d6e-a6e8-485ca2fb1082" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.159:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 20 17:36:38 crc kubenswrapper[4558]: I0120 17:36:38.443485 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:36:38 crc kubenswrapper[4558]: I0120 17:36:38.444125 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:36:38 crc kubenswrapper[4558]: I0120 17:36:38.832727 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/openstackclient"] Jan 20 17:36:38 crc kubenswrapper[4558]: I0120 17:36:38.832920 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/openstackclient" podUID="a3058bcc-fe5c-4662-ad3c-982e823eb04a" containerName="openstackclient" containerID="cri-o://83a869e63968cef48fe6ad2503082138083707d2b97eb35d862c937912bd1d3d" gracePeriod=2 Jan 20 17:36:38 crc kubenswrapper[4558]: I0120 17:36:38.841601 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/openstackclient"] Jan 20 17:36:38 crc kubenswrapper[4558]: I0120 17:36:38.869996 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/openstackclient"] Jan 20 17:36:38 crc kubenswrapper[4558]: E0120 17:36:38.870452 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a3058bcc-fe5c-4662-ad3c-982e823eb04a" containerName="openstackclient" Jan 20 17:36:38 crc kubenswrapper[4558]: I0120 17:36:38.870470 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="a3058bcc-fe5c-4662-ad3c-982e823eb04a" containerName="openstackclient" Jan 20 17:36:38 crc kubenswrapper[4558]: E0120 17:36:38.870491 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="89049a89-bdd9-4d74-bdc0-64002d4f4842" containerName="neutron-httpd" Jan 20 17:36:38 crc kubenswrapper[4558]: I0120 17:36:38.870496 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="89049a89-bdd9-4d74-bdc0-64002d4f4842" containerName="neutron-httpd" Jan 20 17:36:38 crc kubenswrapper[4558]: E0120 17:36:38.870516 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ff846ca3-9970-4311-915b-e2ad95aa07b8" containerName="keystone-api" Jan 20 17:36:38 crc kubenswrapper[4558]: I0120 17:36:38.870521 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ff846ca3-9970-4311-915b-e2ad95aa07b8" containerName="keystone-api" Jan 20 17:36:38 crc kubenswrapper[4558]: E0120 17:36:38.870535 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="89049a89-bdd9-4d74-bdc0-64002d4f4842" containerName="neutron-api" Jan 20 17:36:38 crc kubenswrapper[4558]: I0120 17:36:38.870540 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="89049a89-bdd9-4d74-bdc0-64002d4f4842" containerName="neutron-api" Jan 20 17:36:38 crc kubenswrapper[4558]: I0120 17:36:38.870711 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="89049a89-bdd9-4d74-bdc0-64002d4f4842" containerName="neutron-api" Jan 20 17:36:38 crc kubenswrapper[4558]: I0120 17:36:38.870736 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="ff846ca3-9970-4311-915b-e2ad95aa07b8" containerName="keystone-api" Jan 20 17:36:38 crc kubenswrapper[4558]: I0120 17:36:38.870744 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="a3058bcc-fe5c-4662-ad3c-982e823eb04a" containerName="openstackclient" Jan 20 17:36:38 crc kubenswrapper[4558]: I0120 17:36:38.870754 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="89049a89-bdd9-4d74-bdc0-64002d4f4842" containerName="neutron-httpd" Jan 20 17:36:38 crc kubenswrapper[4558]: I0120 17:36:38.871370 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstackclient" Jan 20 17:36:38 crc kubenswrapper[4558]: I0120 17:36:38.878771 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/openstackclient"] Jan 20 17:36:38 crc kubenswrapper[4558]: I0120 17:36:38.893112 4558 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack-kuttl-tests/openstackclient" oldPodUID="a3058bcc-fe5c-4662-ad3c-982e823eb04a" podUID="0b511fac-7a17-433d-8baa-9eefa278bd25" Jan 20 17:36:38 crc kubenswrapper[4558]: I0120 17:36:38.977303 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/0b511fac-7a17-433d-8baa-9eefa278bd25-openstack-config\") pod \"openstackclient\" (UID: \"0b511fac-7a17-433d-8baa-9eefa278bd25\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:36:38 crc kubenswrapper[4558]: I0120 17:36:38.977389 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0b511fac-7a17-433d-8baa-9eefa278bd25-combined-ca-bundle\") pod \"openstackclient\" (UID: \"0b511fac-7a17-433d-8baa-9eefa278bd25\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:36:38 crc kubenswrapper[4558]: I0120 17:36:38.977878 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/0b511fac-7a17-433d-8baa-9eefa278bd25-openstack-config-secret\") pod \"openstackclient\" (UID: \"0b511fac-7a17-433d-8baa-9eefa278bd25\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:36:38 crc kubenswrapper[4558]: I0120 17:36:38.978018 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b6l2l\" (UniqueName: \"kubernetes.io/projected/0b511fac-7a17-433d-8baa-9eefa278bd25-kube-api-access-b6l2l\") pod \"openstackclient\" (UID: \"0b511fac-7a17-433d-8baa-9eefa278bd25\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:36:39 crc kubenswrapper[4558]: I0120 17:36:39.079956 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/0b511fac-7a17-433d-8baa-9eefa278bd25-openstack-config-secret\") pod \"openstackclient\" (UID: \"0b511fac-7a17-433d-8baa-9eefa278bd25\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:36:39 crc kubenswrapper[4558]: I0120 17:36:39.080030 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b6l2l\" (UniqueName: \"kubernetes.io/projected/0b511fac-7a17-433d-8baa-9eefa278bd25-kube-api-access-b6l2l\") pod \"openstackclient\" (UID: \"0b511fac-7a17-433d-8baa-9eefa278bd25\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:36:39 crc kubenswrapper[4558]: I0120 17:36:39.080301 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/0b511fac-7a17-433d-8baa-9eefa278bd25-openstack-config\") pod \"openstackclient\" (UID: \"0b511fac-7a17-433d-8baa-9eefa278bd25\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:36:39 crc kubenswrapper[4558]: I0120 17:36:39.080331 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0b511fac-7a17-433d-8baa-9eefa278bd25-combined-ca-bundle\") pod \"openstackclient\" (UID: \"0b511fac-7a17-433d-8baa-9eefa278bd25\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:36:39 crc kubenswrapper[4558]: I0120 17:36:39.081204 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/0b511fac-7a17-433d-8baa-9eefa278bd25-openstack-config\") pod \"openstackclient\" (UID: \"0b511fac-7a17-433d-8baa-9eefa278bd25\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:36:39 crc kubenswrapper[4558]: I0120 17:36:39.086087 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0b511fac-7a17-433d-8baa-9eefa278bd25-combined-ca-bundle\") pod \"openstackclient\" (UID: \"0b511fac-7a17-433d-8baa-9eefa278bd25\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:36:39 crc kubenswrapper[4558]: I0120 17:36:39.092567 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/0b511fac-7a17-433d-8baa-9eefa278bd25-openstack-config-secret\") pod \"openstackclient\" (UID: \"0b511fac-7a17-433d-8baa-9eefa278bd25\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:36:39 crc kubenswrapper[4558]: I0120 17:36:39.093525 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b6l2l\" (UniqueName: \"kubernetes.io/projected/0b511fac-7a17-433d-8baa-9eefa278bd25-kube-api-access-b6l2l\") pod \"openstackclient\" (UID: \"0b511fac-7a17-433d-8baa-9eefa278bd25\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:36:39 crc kubenswrapper[4558]: I0120 17:36:39.211545 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstackclient" Jan 20 17:36:39 crc kubenswrapper[4558]: I0120 17:36:39.454318 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-metadata-0" podUID="7ee90982-23a3-4111-9c4e-159828a51a69" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.161:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 20 17:36:39 crc kubenswrapper[4558]: I0120 17:36:39.454341 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-metadata-0" podUID="7ee90982-23a3-4111-9c4e-159828a51a69" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.161:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 20 17:36:39 crc kubenswrapper[4558]: I0120 17:36:39.674473 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/openstackclient"] Jan 20 17:36:40 crc kubenswrapper[4558]: I0120 17:36:40.238511 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstackclient" event={"ID":"0b511fac-7a17-433d-8baa-9eefa278bd25","Type":"ContainerStarted","Data":"b9d69bec651538e6c3bb7125cf005d0443912c83fa1175fd1642916e1d9181f3"} Jan 20 17:36:40 crc kubenswrapper[4558]: I0120 17:36:40.238888 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstackclient" event={"ID":"0b511fac-7a17-433d-8baa-9eefa278bd25","Type":"ContainerStarted","Data":"ed94a999ca3a11041e0551ee693970fb5ee2739423611c5172c4257c4ba4cdae"} Jan 20 17:36:40 crc kubenswrapper[4558]: I0120 17:36:40.261405 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/openstackclient" podStartSLOduration=2.2613884410000002 podStartE2EDuration="2.261388441s" podCreationTimestamp="2026-01-20 17:36:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:36:40.25229399 +0000 UTC m=+3294.012631958" watchObservedRunningTime="2026-01-20 17:36:40.261388441 +0000 UTC m=+3294.021726408" Jan 20 17:36:41 crc kubenswrapper[4558]: I0120 17:36:41.097674 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstackclient" Jan 20 17:36:41 crc kubenswrapper[4558]: I0120 17:36:41.226191 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/a3058bcc-fe5c-4662-ad3c-982e823eb04a-openstack-config\") pod \"a3058bcc-fe5c-4662-ad3c-982e823eb04a\" (UID: \"a3058bcc-fe5c-4662-ad3c-982e823eb04a\") " Jan 20 17:36:41 crc kubenswrapper[4558]: I0120 17:36:41.226617 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/a3058bcc-fe5c-4662-ad3c-982e823eb04a-openstack-config-secret\") pod \"a3058bcc-fe5c-4662-ad3c-982e823eb04a\" (UID: \"a3058bcc-fe5c-4662-ad3c-982e823eb04a\") " Jan 20 17:36:41 crc kubenswrapper[4558]: I0120 17:36:41.226715 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a3058bcc-fe5c-4662-ad3c-982e823eb04a-combined-ca-bundle\") pod \"a3058bcc-fe5c-4662-ad3c-982e823eb04a\" (UID: \"a3058bcc-fe5c-4662-ad3c-982e823eb04a\") " Jan 20 17:36:41 crc kubenswrapper[4558]: I0120 17:36:41.226758 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bcth7\" (UniqueName: \"kubernetes.io/projected/a3058bcc-fe5c-4662-ad3c-982e823eb04a-kube-api-access-bcth7\") pod \"a3058bcc-fe5c-4662-ad3c-982e823eb04a\" (UID: \"a3058bcc-fe5c-4662-ad3c-982e823eb04a\") " Jan 20 17:36:41 crc kubenswrapper[4558]: I0120 17:36:41.233876 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a3058bcc-fe5c-4662-ad3c-982e823eb04a-kube-api-access-bcth7" (OuterVolumeSpecName: "kube-api-access-bcth7") pod "a3058bcc-fe5c-4662-ad3c-982e823eb04a" (UID: "a3058bcc-fe5c-4662-ad3c-982e823eb04a"). InnerVolumeSpecName "kube-api-access-bcth7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:36:41 crc kubenswrapper[4558]: I0120 17:36:41.249612 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a3058bcc-fe5c-4662-ad3c-982e823eb04a-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "a3058bcc-fe5c-4662-ad3c-982e823eb04a" (UID: "a3058bcc-fe5c-4662-ad3c-982e823eb04a"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:36:41 crc kubenswrapper[4558]: I0120 17:36:41.252808 4558 generic.go:334] "Generic (PLEG): container finished" podID="a3058bcc-fe5c-4662-ad3c-982e823eb04a" containerID="83a869e63968cef48fe6ad2503082138083707d2b97eb35d862c937912bd1d3d" exitCode=137 Jan 20 17:36:41 crc kubenswrapper[4558]: I0120 17:36:41.252963 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstackclient" Jan 20 17:36:41 crc kubenswrapper[4558]: I0120 17:36:41.252967 4558 scope.go:117] "RemoveContainer" containerID="83a869e63968cef48fe6ad2503082138083707d2b97eb35d862c937912bd1d3d" Jan 20 17:36:41 crc kubenswrapper[4558]: I0120 17:36:41.257435 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a3058bcc-fe5c-4662-ad3c-982e823eb04a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a3058bcc-fe5c-4662-ad3c-982e823eb04a" (UID: "a3058bcc-fe5c-4662-ad3c-982e823eb04a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:36:41 crc kubenswrapper[4558]: I0120 17:36:41.273751 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a3058bcc-fe5c-4662-ad3c-982e823eb04a-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "a3058bcc-fe5c-4662-ad3c-982e823eb04a" (UID: "a3058bcc-fe5c-4662-ad3c-982e823eb04a"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:36:41 crc kubenswrapper[4558]: I0120 17:36:41.330636 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a3058bcc-fe5c-4662-ad3c-982e823eb04a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:41 crc kubenswrapper[4558]: I0120 17:36:41.330673 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bcth7\" (UniqueName: \"kubernetes.io/projected/a3058bcc-fe5c-4662-ad3c-982e823eb04a-kube-api-access-bcth7\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:41 crc kubenswrapper[4558]: I0120 17:36:41.330691 4558 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/a3058bcc-fe5c-4662-ad3c-982e823eb04a-openstack-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:41 crc kubenswrapper[4558]: I0120 17:36:41.330703 4558 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/a3058bcc-fe5c-4662-ad3c-982e823eb04a-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:41 crc kubenswrapper[4558]: I0120 17:36:41.339744 4558 scope.go:117] "RemoveContainer" containerID="83a869e63968cef48fe6ad2503082138083707d2b97eb35d862c937912bd1d3d" Jan 20 17:36:41 crc kubenswrapper[4558]: E0120 17:36:41.340198 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"83a869e63968cef48fe6ad2503082138083707d2b97eb35d862c937912bd1d3d\": container with ID starting with 83a869e63968cef48fe6ad2503082138083707d2b97eb35d862c937912bd1d3d not found: ID does not exist" containerID="83a869e63968cef48fe6ad2503082138083707d2b97eb35d862c937912bd1d3d" Jan 20 17:36:41 crc kubenswrapper[4558]: I0120 17:36:41.340257 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"83a869e63968cef48fe6ad2503082138083707d2b97eb35d862c937912bd1d3d"} err="failed to get container status \"83a869e63968cef48fe6ad2503082138083707d2b97eb35d862c937912bd1d3d\": rpc error: code = NotFound desc = could not find container \"83a869e63968cef48fe6ad2503082138083707d2b97eb35d862c937912bd1d3d\": container with ID starting with 83a869e63968cef48fe6ad2503082138083707d2b97eb35d862c937912bd1d3d not found: ID does not exist" Jan 20 17:36:41 crc kubenswrapper[4558]: I0120 17:36:41.568312 4558 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack-kuttl-tests/openstackclient" oldPodUID="a3058bcc-fe5c-4662-ad3c-982e823eb04a" podUID="0b511fac-7a17-433d-8baa-9eefa278bd25" Jan 20 17:36:42 crc kubenswrapper[4558]: I0120 17:36:42.577543 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a3058bcc-fe5c-4662-ad3c-982e823eb04a" path="/var/lib/kubelet/pods/a3058bcc-fe5c-4662-ad3c-982e823eb04a/volumes" Jan 20 17:36:45 crc kubenswrapper[4558]: I0120 17:36:45.502884 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:36:45 crc kubenswrapper[4558]: I0120 17:36:45.504442 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:36:45 crc kubenswrapper[4558]: I0120 17:36:45.504696 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:36:45 crc kubenswrapper[4558]: I0120 17:36:45.516348 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:36:46 crc kubenswrapper[4558]: I0120 17:36:46.338414 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:36:46 crc kubenswrapper[4558]: I0120 17:36:46.345134 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:36:46 crc kubenswrapper[4558]: I0120 17:36:46.677916 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/swift-proxy-96bd7cb78-txfmk"] Jan 20 17:36:46 crc kubenswrapper[4558]: I0120 17:36:46.681422 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-proxy-96bd7cb78-txfmk" Jan 20 17:36:46 crc kubenswrapper[4558]: I0120 17:36:46.690182 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/swift-proxy-96bd7cb78-txfmk"] Jan 20 17:36:46 crc kubenswrapper[4558]: I0120 17:36:46.866074 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c7ffc3d3-5db6-484c-b9c1-d065e7177eb7-run-httpd\") pod \"swift-proxy-96bd7cb78-txfmk\" (UID: \"c7ffc3d3-5db6-484c-b9c1-d065e7177eb7\") " pod="openstack-kuttl-tests/swift-proxy-96bd7cb78-txfmk" Jan 20 17:36:46 crc kubenswrapper[4558]: I0120 17:36:46.866210 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c7ffc3d3-5db6-484c-b9c1-d065e7177eb7-log-httpd\") pod \"swift-proxy-96bd7cb78-txfmk\" (UID: \"c7ffc3d3-5db6-484c-b9c1-d065e7177eb7\") " pod="openstack-kuttl-tests/swift-proxy-96bd7cb78-txfmk" Jan 20 17:36:46 crc kubenswrapper[4558]: I0120 17:36:46.866260 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c7ffc3d3-5db6-484c-b9c1-d065e7177eb7-config-data\") pod \"swift-proxy-96bd7cb78-txfmk\" (UID: \"c7ffc3d3-5db6-484c-b9c1-d065e7177eb7\") " pod="openstack-kuttl-tests/swift-proxy-96bd7cb78-txfmk" Jan 20 17:36:46 crc kubenswrapper[4558]: I0120 17:36:46.866290 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/c7ffc3d3-5db6-484c-b9c1-d065e7177eb7-etc-swift\") pod \"swift-proxy-96bd7cb78-txfmk\" (UID: \"c7ffc3d3-5db6-484c-b9c1-d065e7177eb7\") " pod="openstack-kuttl-tests/swift-proxy-96bd7cb78-txfmk" Jan 20 17:36:46 crc kubenswrapper[4558]: I0120 17:36:46.866335 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c7ffc3d3-5db6-484c-b9c1-d065e7177eb7-internal-tls-certs\") pod \"swift-proxy-96bd7cb78-txfmk\" (UID: \"c7ffc3d3-5db6-484c-b9c1-d065e7177eb7\") " pod="openstack-kuttl-tests/swift-proxy-96bd7cb78-txfmk" Jan 20 17:36:46 crc kubenswrapper[4558]: I0120 17:36:46.866514 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c7ffc3d3-5db6-484c-b9c1-d065e7177eb7-combined-ca-bundle\") pod \"swift-proxy-96bd7cb78-txfmk\" (UID: \"c7ffc3d3-5db6-484c-b9c1-d065e7177eb7\") " pod="openstack-kuttl-tests/swift-proxy-96bd7cb78-txfmk" Jan 20 17:36:46 crc kubenswrapper[4558]: I0120 17:36:46.866543 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c7ffc3d3-5db6-484c-b9c1-d065e7177eb7-public-tls-certs\") pod \"swift-proxy-96bd7cb78-txfmk\" (UID: \"c7ffc3d3-5db6-484c-b9c1-d065e7177eb7\") " pod="openstack-kuttl-tests/swift-proxy-96bd7cb78-txfmk" Jan 20 17:36:46 crc kubenswrapper[4558]: I0120 17:36:46.866621 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wwzwk\" (UniqueName: \"kubernetes.io/projected/c7ffc3d3-5db6-484c-b9c1-d065e7177eb7-kube-api-access-wwzwk\") pod \"swift-proxy-96bd7cb78-txfmk\" (UID: \"c7ffc3d3-5db6-484c-b9c1-d065e7177eb7\") " pod="openstack-kuttl-tests/swift-proxy-96bd7cb78-txfmk" Jan 20 17:36:46 crc kubenswrapper[4558]: I0120 17:36:46.968768 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c7ffc3d3-5db6-484c-b9c1-d065e7177eb7-combined-ca-bundle\") pod \"swift-proxy-96bd7cb78-txfmk\" (UID: \"c7ffc3d3-5db6-484c-b9c1-d065e7177eb7\") " pod="openstack-kuttl-tests/swift-proxy-96bd7cb78-txfmk" Jan 20 17:36:46 crc kubenswrapper[4558]: I0120 17:36:46.969347 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c7ffc3d3-5db6-484c-b9c1-d065e7177eb7-public-tls-certs\") pod \"swift-proxy-96bd7cb78-txfmk\" (UID: \"c7ffc3d3-5db6-484c-b9c1-d065e7177eb7\") " pod="openstack-kuttl-tests/swift-proxy-96bd7cb78-txfmk" Jan 20 17:36:46 crc kubenswrapper[4558]: I0120 17:36:46.969502 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wwzwk\" (UniqueName: \"kubernetes.io/projected/c7ffc3d3-5db6-484c-b9c1-d065e7177eb7-kube-api-access-wwzwk\") pod \"swift-proxy-96bd7cb78-txfmk\" (UID: \"c7ffc3d3-5db6-484c-b9c1-d065e7177eb7\") " pod="openstack-kuttl-tests/swift-proxy-96bd7cb78-txfmk" Jan 20 17:36:46 crc kubenswrapper[4558]: I0120 17:36:46.969654 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c7ffc3d3-5db6-484c-b9c1-d065e7177eb7-run-httpd\") pod \"swift-proxy-96bd7cb78-txfmk\" (UID: \"c7ffc3d3-5db6-484c-b9c1-d065e7177eb7\") " pod="openstack-kuttl-tests/swift-proxy-96bd7cb78-txfmk" Jan 20 17:36:46 crc kubenswrapper[4558]: I0120 17:36:46.969783 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c7ffc3d3-5db6-484c-b9c1-d065e7177eb7-log-httpd\") pod \"swift-proxy-96bd7cb78-txfmk\" (UID: \"c7ffc3d3-5db6-484c-b9c1-d065e7177eb7\") " pod="openstack-kuttl-tests/swift-proxy-96bd7cb78-txfmk" Jan 20 17:36:46 crc kubenswrapper[4558]: I0120 17:36:46.969904 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c7ffc3d3-5db6-484c-b9c1-d065e7177eb7-config-data\") pod \"swift-proxy-96bd7cb78-txfmk\" (UID: \"c7ffc3d3-5db6-484c-b9c1-d065e7177eb7\") " pod="openstack-kuttl-tests/swift-proxy-96bd7cb78-txfmk" Jan 20 17:36:46 crc kubenswrapper[4558]: I0120 17:36:46.969988 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/c7ffc3d3-5db6-484c-b9c1-d065e7177eb7-etc-swift\") pod \"swift-proxy-96bd7cb78-txfmk\" (UID: \"c7ffc3d3-5db6-484c-b9c1-d065e7177eb7\") " pod="openstack-kuttl-tests/swift-proxy-96bd7cb78-txfmk" Jan 20 17:36:46 crc kubenswrapper[4558]: I0120 17:36:46.970085 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c7ffc3d3-5db6-484c-b9c1-d065e7177eb7-internal-tls-certs\") pod \"swift-proxy-96bd7cb78-txfmk\" (UID: \"c7ffc3d3-5db6-484c-b9c1-d065e7177eb7\") " pod="openstack-kuttl-tests/swift-proxy-96bd7cb78-txfmk" Jan 20 17:36:46 crc kubenswrapper[4558]: I0120 17:36:46.970347 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c7ffc3d3-5db6-484c-b9c1-d065e7177eb7-run-httpd\") pod \"swift-proxy-96bd7cb78-txfmk\" (UID: \"c7ffc3d3-5db6-484c-b9c1-d065e7177eb7\") " pod="openstack-kuttl-tests/swift-proxy-96bd7cb78-txfmk" Jan 20 17:36:46 crc kubenswrapper[4558]: I0120 17:36:46.971039 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c7ffc3d3-5db6-484c-b9c1-d065e7177eb7-log-httpd\") pod \"swift-proxy-96bd7cb78-txfmk\" (UID: \"c7ffc3d3-5db6-484c-b9c1-d065e7177eb7\") " pod="openstack-kuttl-tests/swift-proxy-96bd7cb78-txfmk" Jan 20 17:36:46 crc kubenswrapper[4558]: I0120 17:36:46.977652 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/c7ffc3d3-5db6-484c-b9c1-d065e7177eb7-etc-swift\") pod \"swift-proxy-96bd7cb78-txfmk\" (UID: \"c7ffc3d3-5db6-484c-b9c1-d065e7177eb7\") " pod="openstack-kuttl-tests/swift-proxy-96bd7cb78-txfmk" Jan 20 17:36:46 crc kubenswrapper[4558]: I0120 17:36:46.978121 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c7ffc3d3-5db6-484c-b9c1-d065e7177eb7-internal-tls-certs\") pod \"swift-proxy-96bd7cb78-txfmk\" (UID: \"c7ffc3d3-5db6-484c-b9c1-d065e7177eb7\") " pod="openstack-kuttl-tests/swift-proxy-96bd7cb78-txfmk" Jan 20 17:36:46 crc kubenswrapper[4558]: I0120 17:36:46.979057 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c7ffc3d3-5db6-484c-b9c1-d065e7177eb7-config-data\") pod \"swift-proxy-96bd7cb78-txfmk\" (UID: \"c7ffc3d3-5db6-484c-b9c1-d065e7177eb7\") " pod="openstack-kuttl-tests/swift-proxy-96bd7cb78-txfmk" Jan 20 17:36:46 crc kubenswrapper[4558]: I0120 17:36:46.979656 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c7ffc3d3-5db6-484c-b9c1-d065e7177eb7-combined-ca-bundle\") pod \"swift-proxy-96bd7cb78-txfmk\" (UID: \"c7ffc3d3-5db6-484c-b9c1-d065e7177eb7\") " pod="openstack-kuttl-tests/swift-proxy-96bd7cb78-txfmk" Jan 20 17:36:46 crc kubenswrapper[4558]: I0120 17:36:46.988748 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wwzwk\" (UniqueName: \"kubernetes.io/projected/c7ffc3d3-5db6-484c-b9c1-d065e7177eb7-kube-api-access-wwzwk\") pod \"swift-proxy-96bd7cb78-txfmk\" (UID: \"c7ffc3d3-5db6-484c-b9c1-d065e7177eb7\") " pod="openstack-kuttl-tests/swift-proxy-96bd7cb78-txfmk" Jan 20 17:36:46 crc kubenswrapper[4558]: I0120 17:36:46.990109 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c7ffc3d3-5db6-484c-b9c1-d065e7177eb7-public-tls-certs\") pod \"swift-proxy-96bd7cb78-txfmk\" (UID: \"c7ffc3d3-5db6-484c-b9c1-d065e7177eb7\") " pod="openstack-kuttl-tests/swift-proxy-96bd7cb78-txfmk" Jan 20 17:36:47 crc kubenswrapper[4558]: I0120 17:36:47.002159 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-proxy-96bd7cb78-txfmk" Jan 20 17:36:47 crc kubenswrapper[4558]: I0120 17:36:47.316369 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:36:47 crc kubenswrapper[4558]: W0120 17:36:47.419653 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc7ffc3d3_5db6_484c_b9c1_d065e7177eb7.slice/crio-44cace5f75ae4607a7bdba8ebe710db8b5b6bac667a22188c9e242e506829df6 WatchSource:0}: Error finding container 44cace5f75ae4607a7bdba8ebe710db8b5b6bac667a22188c9e242e506829df6: Status 404 returned error can't find the container with id 44cace5f75ae4607a7bdba8ebe710db8b5b6bac667a22188c9e242e506829df6 Jan 20 17:36:47 crc kubenswrapper[4558]: I0120 17:36:47.421319 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/swift-proxy-96bd7cb78-txfmk"] Jan 20 17:36:48 crc kubenswrapper[4558]: I0120 17:36:48.379734 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-proxy-96bd7cb78-txfmk" event={"ID":"c7ffc3d3-5db6-484c-b9c1-d065e7177eb7","Type":"ContainerStarted","Data":"cc29bf400e020c3de6a91d31522715a48fb29ae6d1627268b6c2963ee53e1761"} Jan 20 17:36:48 crc kubenswrapper[4558]: I0120 17:36:48.380062 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-proxy-96bd7cb78-txfmk" event={"ID":"c7ffc3d3-5db6-484c-b9c1-d065e7177eb7","Type":"ContainerStarted","Data":"1c07e4b28b7cf0b4abe599ad445f1580d0e65d644cc547c83bfc14814b631d5c"} Jan 20 17:36:48 crc kubenswrapper[4558]: I0120 17:36:48.380077 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-proxy-96bd7cb78-txfmk" event={"ID":"c7ffc3d3-5db6-484c-b9c1-d065e7177eb7","Type":"ContainerStarted","Data":"44cace5f75ae4607a7bdba8ebe710db8b5b6bac667a22188c9e242e506829df6"} Jan 20 17:36:48 crc kubenswrapper[4558]: I0120 17:36:48.380093 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/swift-proxy-96bd7cb78-txfmk" Jan 20 17:36:48 crc kubenswrapper[4558]: I0120 17:36:48.380106 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/swift-proxy-96bd7cb78-txfmk" Jan 20 17:36:48 crc kubenswrapper[4558]: I0120 17:36:48.400861 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/swift-proxy-96bd7cb78-txfmk" podStartSLOduration=2.400833431 podStartE2EDuration="2.400833431s" podCreationTimestamp="2026-01-20 17:36:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:36:48.398114588 +0000 UTC m=+3302.158452565" watchObservedRunningTime="2026-01-20 17:36:48.400833431 +0000 UTC m=+3302.161171398" Jan 20 17:36:48 crc kubenswrapper[4558]: I0120 17:36:48.447665 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:36:48 crc kubenswrapper[4558]: I0120 17:36:48.449256 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:36:48 crc kubenswrapper[4558]: I0120 17:36:48.469520 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:36:49 crc kubenswrapper[4558]: I0120 17:36:49.401312 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:36:52 crc kubenswrapper[4558]: I0120 17:36:52.011253 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/swift-proxy-96bd7cb78-txfmk" Jan 20 17:36:52 crc kubenswrapper[4558]: I0120 17:36:52.011879 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/swift-proxy-96bd7cb78-txfmk" Jan 20 17:36:52 crc kubenswrapper[4558]: I0120 17:36:52.081189 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/swift-proxy-b45ff9748-dd8kw"] Jan 20 17:36:52 crc kubenswrapper[4558]: I0120 17:36:52.081415 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-proxy-b45ff9748-dd8kw" podUID="0abcf7eb-7e84-4a8e-81e8-a397db63848b" containerName="proxy-httpd" containerID="cri-o://036131de8a482756286de316af4df0fc95119e515057e70e07dc6e69e0faa490" gracePeriod=30 Jan 20 17:36:52 crc kubenswrapper[4558]: I0120 17:36:52.081561 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-proxy-b45ff9748-dd8kw" podUID="0abcf7eb-7e84-4a8e-81e8-a397db63848b" containerName="proxy-server" containerID="cri-o://68b9c7b951138f4587464c2bbf21370065674f896ef8d19de0af36cc2a194b4a" gracePeriod=30 Jan 20 17:36:52 crc kubenswrapper[4558]: I0120 17:36:52.420236 4558 generic.go:334] "Generic (PLEG): container finished" podID="0abcf7eb-7e84-4a8e-81e8-a397db63848b" containerID="036131de8a482756286de316af4df0fc95119e515057e70e07dc6e69e0faa490" exitCode=0 Jan 20 17:36:52 crc kubenswrapper[4558]: I0120 17:36:52.420359 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-proxy-b45ff9748-dd8kw" event={"ID":"0abcf7eb-7e84-4a8e-81e8-a397db63848b","Type":"ContainerDied","Data":"036131de8a482756286de316af4df0fc95119e515057e70e07dc6e69e0faa490"} Jan 20 17:36:53 crc kubenswrapper[4558]: I0120 17:36:53.004434 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-proxy-b45ff9748-dd8kw" Jan 20 17:36:53 crc kubenswrapper[4558]: I0120 17:36:53.112793 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0abcf7eb-7e84-4a8e-81e8-a397db63848b-config-data\") pod \"0abcf7eb-7e84-4a8e-81e8-a397db63848b\" (UID: \"0abcf7eb-7e84-4a8e-81e8-a397db63848b\") " Jan 20 17:36:53 crc kubenswrapper[4558]: I0120 17:36:53.112900 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gnw9s\" (UniqueName: \"kubernetes.io/projected/0abcf7eb-7e84-4a8e-81e8-a397db63848b-kube-api-access-gnw9s\") pod \"0abcf7eb-7e84-4a8e-81e8-a397db63848b\" (UID: \"0abcf7eb-7e84-4a8e-81e8-a397db63848b\") " Jan 20 17:36:53 crc kubenswrapper[4558]: I0120 17:36:53.112946 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0abcf7eb-7e84-4a8e-81e8-a397db63848b-log-httpd\") pod \"0abcf7eb-7e84-4a8e-81e8-a397db63848b\" (UID: \"0abcf7eb-7e84-4a8e-81e8-a397db63848b\") " Jan 20 17:36:53 crc kubenswrapper[4558]: I0120 17:36:53.112980 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0abcf7eb-7e84-4a8e-81e8-a397db63848b-internal-tls-certs\") pod \"0abcf7eb-7e84-4a8e-81e8-a397db63848b\" (UID: \"0abcf7eb-7e84-4a8e-81e8-a397db63848b\") " Jan 20 17:36:53 crc kubenswrapper[4558]: I0120 17:36:53.113048 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0abcf7eb-7e84-4a8e-81e8-a397db63848b-run-httpd\") pod \"0abcf7eb-7e84-4a8e-81e8-a397db63848b\" (UID: \"0abcf7eb-7e84-4a8e-81e8-a397db63848b\") " Jan 20 17:36:53 crc kubenswrapper[4558]: I0120 17:36:53.113203 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0abcf7eb-7e84-4a8e-81e8-a397db63848b-combined-ca-bundle\") pod \"0abcf7eb-7e84-4a8e-81e8-a397db63848b\" (UID: \"0abcf7eb-7e84-4a8e-81e8-a397db63848b\") " Jan 20 17:36:53 crc kubenswrapper[4558]: I0120 17:36:53.113251 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/0abcf7eb-7e84-4a8e-81e8-a397db63848b-etc-swift\") pod \"0abcf7eb-7e84-4a8e-81e8-a397db63848b\" (UID: \"0abcf7eb-7e84-4a8e-81e8-a397db63848b\") " Jan 20 17:36:53 crc kubenswrapper[4558]: I0120 17:36:53.113341 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0abcf7eb-7e84-4a8e-81e8-a397db63848b-public-tls-certs\") pod \"0abcf7eb-7e84-4a8e-81e8-a397db63848b\" (UID: \"0abcf7eb-7e84-4a8e-81e8-a397db63848b\") " Jan 20 17:36:53 crc kubenswrapper[4558]: I0120 17:36:53.115472 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0abcf7eb-7e84-4a8e-81e8-a397db63848b-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "0abcf7eb-7e84-4a8e-81e8-a397db63848b" (UID: "0abcf7eb-7e84-4a8e-81e8-a397db63848b"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:36:53 crc kubenswrapper[4558]: I0120 17:36:53.115806 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0abcf7eb-7e84-4a8e-81e8-a397db63848b-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "0abcf7eb-7e84-4a8e-81e8-a397db63848b" (UID: "0abcf7eb-7e84-4a8e-81e8-a397db63848b"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:36:53 crc kubenswrapper[4558]: I0120 17:36:53.125553 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0abcf7eb-7e84-4a8e-81e8-a397db63848b-kube-api-access-gnw9s" (OuterVolumeSpecName: "kube-api-access-gnw9s") pod "0abcf7eb-7e84-4a8e-81e8-a397db63848b" (UID: "0abcf7eb-7e84-4a8e-81e8-a397db63848b"). InnerVolumeSpecName "kube-api-access-gnw9s". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:36:53 crc kubenswrapper[4558]: I0120 17:36:53.126404 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0abcf7eb-7e84-4a8e-81e8-a397db63848b-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "0abcf7eb-7e84-4a8e-81e8-a397db63848b" (UID: "0abcf7eb-7e84-4a8e-81e8-a397db63848b"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:36:53 crc kubenswrapper[4558]: I0120 17:36:53.161500 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0abcf7eb-7e84-4a8e-81e8-a397db63848b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0abcf7eb-7e84-4a8e-81e8-a397db63848b" (UID: "0abcf7eb-7e84-4a8e-81e8-a397db63848b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:36:53 crc kubenswrapper[4558]: I0120 17:36:53.161431 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0abcf7eb-7e84-4a8e-81e8-a397db63848b-config-data" (OuterVolumeSpecName: "config-data") pod "0abcf7eb-7e84-4a8e-81e8-a397db63848b" (UID: "0abcf7eb-7e84-4a8e-81e8-a397db63848b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:36:53 crc kubenswrapper[4558]: I0120 17:36:53.166574 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0abcf7eb-7e84-4a8e-81e8-a397db63848b-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "0abcf7eb-7e84-4a8e-81e8-a397db63848b" (UID: "0abcf7eb-7e84-4a8e-81e8-a397db63848b"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:36:53 crc kubenswrapper[4558]: I0120 17:36:53.167994 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0abcf7eb-7e84-4a8e-81e8-a397db63848b-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "0abcf7eb-7e84-4a8e-81e8-a397db63848b" (UID: "0abcf7eb-7e84-4a8e-81e8-a397db63848b"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:36:53 crc kubenswrapper[4558]: I0120 17:36:53.218024 4558 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0abcf7eb-7e84-4a8e-81e8-a397db63848b-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:53 crc kubenswrapper[4558]: I0120 17:36:53.218393 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0abcf7eb-7e84-4a8e-81e8-a397db63848b-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:53 crc kubenswrapper[4558]: I0120 17:36:53.218414 4558 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0abcf7eb-7e84-4a8e-81e8-a397db63848b-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:53 crc kubenswrapper[4558]: I0120 17:36:53.218426 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0abcf7eb-7e84-4a8e-81e8-a397db63848b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:53 crc kubenswrapper[4558]: I0120 17:36:53.218439 4558 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/0abcf7eb-7e84-4a8e-81e8-a397db63848b-etc-swift\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:53 crc kubenswrapper[4558]: I0120 17:36:53.218452 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0abcf7eb-7e84-4a8e-81e8-a397db63848b-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:53 crc kubenswrapper[4558]: I0120 17:36:53.218463 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0abcf7eb-7e84-4a8e-81e8-a397db63848b-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:53 crc kubenswrapper[4558]: I0120 17:36:53.218478 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gnw9s\" (UniqueName: \"kubernetes.io/projected/0abcf7eb-7e84-4a8e-81e8-a397db63848b-kube-api-access-gnw9s\") on node \"crc\" DevicePath \"\"" Jan 20 17:36:53 crc kubenswrapper[4558]: I0120 17:36:53.437274 4558 generic.go:334] "Generic (PLEG): container finished" podID="0abcf7eb-7e84-4a8e-81e8-a397db63848b" containerID="68b9c7b951138f4587464c2bbf21370065674f896ef8d19de0af36cc2a194b4a" exitCode=0 Jan 20 17:36:53 crc kubenswrapper[4558]: I0120 17:36:53.437355 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-proxy-b45ff9748-dd8kw" Jan 20 17:36:53 crc kubenswrapper[4558]: I0120 17:36:53.437355 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-proxy-b45ff9748-dd8kw" event={"ID":"0abcf7eb-7e84-4a8e-81e8-a397db63848b","Type":"ContainerDied","Data":"68b9c7b951138f4587464c2bbf21370065674f896ef8d19de0af36cc2a194b4a"} Jan 20 17:36:53 crc kubenswrapper[4558]: I0120 17:36:53.437414 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-proxy-b45ff9748-dd8kw" event={"ID":"0abcf7eb-7e84-4a8e-81e8-a397db63848b","Type":"ContainerDied","Data":"39730bfaeec948fd4e69f6e00d93bc1cefea775bae6a5aff3b1683c00212afa8"} Jan 20 17:36:53 crc kubenswrapper[4558]: I0120 17:36:53.437435 4558 scope.go:117] "RemoveContainer" containerID="68b9c7b951138f4587464c2bbf21370065674f896ef8d19de0af36cc2a194b4a" Jan 20 17:36:53 crc kubenswrapper[4558]: I0120 17:36:53.462543 4558 scope.go:117] "RemoveContainer" containerID="036131de8a482756286de316af4df0fc95119e515057e70e07dc6e69e0faa490" Jan 20 17:36:53 crc kubenswrapper[4558]: I0120 17:36:53.477561 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/swift-proxy-b45ff9748-dd8kw"] Jan 20 17:36:53 crc kubenswrapper[4558]: I0120 17:36:53.484914 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/swift-proxy-b45ff9748-dd8kw"] Jan 20 17:36:53 crc kubenswrapper[4558]: I0120 17:36:53.486064 4558 scope.go:117] "RemoveContainer" containerID="68b9c7b951138f4587464c2bbf21370065674f896ef8d19de0af36cc2a194b4a" Jan 20 17:36:53 crc kubenswrapper[4558]: E0120 17:36:53.486571 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"68b9c7b951138f4587464c2bbf21370065674f896ef8d19de0af36cc2a194b4a\": container with ID starting with 68b9c7b951138f4587464c2bbf21370065674f896ef8d19de0af36cc2a194b4a not found: ID does not exist" containerID="68b9c7b951138f4587464c2bbf21370065674f896ef8d19de0af36cc2a194b4a" Jan 20 17:36:53 crc kubenswrapper[4558]: I0120 17:36:53.486614 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"68b9c7b951138f4587464c2bbf21370065674f896ef8d19de0af36cc2a194b4a"} err="failed to get container status \"68b9c7b951138f4587464c2bbf21370065674f896ef8d19de0af36cc2a194b4a\": rpc error: code = NotFound desc = could not find container \"68b9c7b951138f4587464c2bbf21370065674f896ef8d19de0af36cc2a194b4a\": container with ID starting with 68b9c7b951138f4587464c2bbf21370065674f896ef8d19de0af36cc2a194b4a not found: ID does not exist" Jan 20 17:36:53 crc kubenswrapper[4558]: I0120 17:36:53.486645 4558 scope.go:117] "RemoveContainer" containerID="036131de8a482756286de316af4df0fc95119e515057e70e07dc6e69e0faa490" Jan 20 17:36:53 crc kubenswrapper[4558]: E0120 17:36:53.486995 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"036131de8a482756286de316af4df0fc95119e515057e70e07dc6e69e0faa490\": container with ID starting with 036131de8a482756286de316af4df0fc95119e515057e70e07dc6e69e0faa490 not found: ID does not exist" containerID="036131de8a482756286de316af4df0fc95119e515057e70e07dc6e69e0faa490" Jan 20 17:36:53 crc kubenswrapper[4558]: I0120 17:36:53.487028 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"036131de8a482756286de316af4df0fc95119e515057e70e07dc6e69e0faa490"} err="failed to get container status \"036131de8a482756286de316af4df0fc95119e515057e70e07dc6e69e0faa490\": rpc error: code = NotFound desc = could not find container \"036131de8a482756286de316af4df0fc95119e515057e70e07dc6e69e0faa490\": container with ID starting with 036131de8a482756286de316af4df0fc95119e515057e70e07dc6e69e0faa490 not found: ID does not exist" Jan 20 17:36:54 crc kubenswrapper[4558]: I0120 17:36:54.185610 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:36:54 crc kubenswrapper[4558]: I0120 17:36:54.186003 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="dc94159e-846c-4207-a4e6-fc877335d179" containerName="ceilometer-central-agent" containerID="cri-o://5bf498b0c5a887fb9070dae794c0855721714041c990374c5706d705de7cdc2d" gracePeriod=30 Jan 20 17:36:54 crc kubenswrapper[4558]: I0120 17:36:54.186067 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="dc94159e-846c-4207-a4e6-fc877335d179" containerName="proxy-httpd" containerID="cri-o://8d18756d85f771ea1a8e265733bd0f0de8bef3da799fcc5dac7436aa91d96d1b" gracePeriod=30 Jan 20 17:36:54 crc kubenswrapper[4558]: I0120 17:36:54.186202 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="dc94159e-846c-4207-a4e6-fc877335d179" containerName="sg-core" containerID="cri-o://e4b0f89bb58f0ef8658ad1ca88dceb67d6f84e237fdd51276dbf055a76577647" gracePeriod=30 Jan 20 17:36:54 crc kubenswrapper[4558]: I0120 17:36:54.186293 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="dc94159e-846c-4207-a4e6-fc877335d179" containerName="ceilometer-notification-agent" containerID="cri-o://83ce987c5b708549cef86a6913ed91475d61f00a374b43c5a0a9d8d6b9e15ad1" gracePeriod=30 Jan 20 17:36:54 crc kubenswrapper[4558]: I0120 17:36:54.452482 4558 generic.go:334] "Generic (PLEG): container finished" podID="dc94159e-846c-4207-a4e6-fc877335d179" containerID="8d18756d85f771ea1a8e265733bd0f0de8bef3da799fcc5dac7436aa91d96d1b" exitCode=0 Jan 20 17:36:54 crc kubenswrapper[4558]: I0120 17:36:54.452848 4558 generic.go:334] "Generic (PLEG): container finished" podID="dc94159e-846c-4207-a4e6-fc877335d179" containerID="e4b0f89bb58f0ef8658ad1ca88dceb67d6f84e237fdd51276dbf055a76577647" exitCode=2 Jan 20 17:36:54 crc kubenswrapper[4558]: I0120 17:36:54.452657 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"dc94159e-846c-4207-a4e6-fc877335d179","Type":"ContainerDied","Data":"8d18756d85f771ea1a8e265733bd0f0de8bef3da799fcc5dac7436aa91d96d1b"} Jan 20 17:36:54 crc kubenswrapper[4558]: I0120 17:36:54.452911 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"dc94159e-846c-4207-a4e6-fc877335d179","Type":"ContainerDied","Data":"e4b0f89bb58f0ef8658ad1ca88dceb67d6f84e237fdd51276dbf055a76577647"} Jan 20 17:36:54 crc kubenswrapper[4558]: I0120 17:36:54.575425 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0abcf7eb-7e84-4a8e-81e8-a397db63848b" path="/var/lib/kubelet/pods/0abcf7eb-7e84-4a8e-81e8-a397db63848b/volumes" Jan 20 17:36:54 crc kubenswrapper[4558]: E0120 17:36:54.740645 4558 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 192.168.25.8:59950->192.168.25.8:43883: write tcp 192.168.25.8:59950->192.168.25.8:43883: write: connection reset by peer Jan 20 17:36:55 crc kubenswrapper[4558]: I0120 17:36:55.476649 4558 generic.go:334] "Generic (PLEG): container finished" podID="dc94159e-846c-4207-a4e6-fc877335d179" containerID="5bf498b0c5a887fb9070dae794c0855721714041c990374c5706d705de7cdc2d" exitCode=0 Jan 20 17:36:55 crc kubenswrapper[4558]: I0120 17:36:55.476727 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"dc94159e-846c-4207-a4e6-fc877335d179","Type":"ContainerDied","Data":"5bf498b0c5a887fb9070dae794c0855721714041c990374c5706d705de7cdc2d"} Jan 20 17:36:57 crc kubenswrapper[4558]: I0120 17:36:57.329487 4558 patch_prober.go:28] interesting pod/machine-config-daemon-2vr4r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 20 17:36:57 crc kubenswrapper[4558]: I0120 17:36:57.330205 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 20 17:37:01 crc kubenswrapper[4558]: E0120 17:37:01.018442 4558 upgradeaware.go:441] Error proxying data from backend to client: writeto tcp 192.168.25.8:32804->192.168.25.8:43883: read tcp 192.168.25.8:32804->192.168.25.8:43883: read: connection reset by peer Jan 20 17:37:01 crc kubenswrapper[4558]: I0120 17:37:01.222110 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:37:01 crc kubenswrapper[4558]: I0120 17:37:01.407904 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dc94159e-846c-4207-a4e6-fc877335d179-scripts\") pod \"dc94159e-846c-4207-a4e6-fc877335d179\" (UID: \"dc94159e-846c-4207-a4e6-fc877335d179\") " Jan 20 17:37:01 crc kubenswrapper[4558]: I0120 17:37:01.408369 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dc94159e-846c-4207-a4e6-fc877335d179-log-httpd\") pod \"dc94159e-846c-4207-a4e6-fc877335d179\" (UID: \"dc94159e-846c-4207-a4e6-fc877335d179\") " Jan 20 17:37:01 crc kubenswrapper[4558]: I0120 17:37:01.408523 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc94159e-846c-4207-a4e6-fc877335d179-combined-ca-bundle\") pod \"dc94159e-846c-4207-a4e6-fc877335d179\" (UID: \"dc94159e-846c-4207-a4e6-fc877335d179\") " Jan 20 17:37:01 crc kubenswrapper[4558]: I0120 17:37:01.408571 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/dc94159e-846c-4207-a4e6-fc877335d179-sg-core-conf-yaml\") pod \"dc94159e-846c-4207-a4e6-fc877335d179\" (UID: \"dc94159e-846c-4207-a4e6-fc877335d179\") " Jan 20 17:37:01 crc kubenswrapper[4558]: I0120 17:37:01.408633 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rgxzh\" (UniqueName: \"kubernetes.io/projected/dc94159e-846c-4207-a4e6-fc877335d179-kube-api-access-rgxzh\") pod \"dc94159e-846c-4207-a4e6-fc877335d179\" (UID: \"dc94159e-846c-4207-a4e6-fc877335d179\") " Jan 20 17:37:01 crc kubenswrapper[4558]: I0120 17:37:01.408891 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dc94159e-846c-4207-a4e6-fc877335d179-config-data\") pod \"dc94159e-846c-4207-a4e6-fc877335d179\" (UID: \"dc94159e-846c-4207-a4e6-fc877335d179\") " Jan 20 17:37:01 crc kubenswrapper[4558]: I0120 17:37:01.408999 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dc94159e-846c-4207-a4e6-fc877335d179-run-httpd\") pod \"dc94159e-846c-4207-a4e6-fc877335d179\" (UID: \"dc94159e-846c-4207-a4e6-fc877335d179\") " Jan 20 17:37:01 crc kubenswrapper[4558]: I0120 17:37:01.409042 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/dc94159e-846c-4207-a4e6-fc877335d179-ceilometer-tls-certs\") pod \"dc94159e-846c-4207-a4e6-fc877335d179\" (UID: \"dc94159e-846c-4207-a4e6-fc877335d179\") " Jan 20 17:37:01 crc kubenswrapper[4558]: I0120 17:37:01.409246 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dc94159e-846c-4207-a4e6-fc877335d179-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "dc94159e-846c-4207-a4e6-fc877335d179" (UID: "dc94159e-846c-4207-a4e6-fc877335d179"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:37:01 crc kubenswrapper[4558]: I0120 17:37:01.409629 4558 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dc94159e-846c-4207-a4e6-fc877335d179-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:01 crc kubenswrapper[4558]: I0120 17:37:01.409642 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dc94159e-846c-4207-a4e6-fc877335d179-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "dc94159e-846c-4207-a4e6-fc877335d179" (UID: "dc94159e-846c-4207-a4e6-fc877335d179"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:37:01 crc kubenswrapper[4558]: I0120 17:37:01.419375 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dc94159e-846c-4207-a4e6-fc877335d179-scripts" (OuterVolumeSpecName: "scripts") pod "dc94159e-846c-4207-a4e6-fc877335d179" (UID: "dc94159e-846c-4207-a4e6-fc877335d179"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:37:01 crc kubenswrapper[4558]: I0120 17:37:01.429189 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dc94159e-846c-4207-a4e6-fc877335d179-kube-api-access-rgxzh" (OuterVolumeSpecName: "kube-api-access-rgxzh") pod "dc94159e-846c-4207-a4e6-fc877335d179" (UID: "dc94159e-846c-4207-a4e6-fc877335d179"). InnerVolumeSpecName "kube-api-access-rgxzh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:37:01 crc kubenswrapper[4558]: I0120 17:37:01.433229 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dc94159e-846c-4207-a4e6-fc877335d179-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "dc94159e-846c-4207-a4e6-fc877335d179" (UID: "dc94159e-846c-4207-a4e6-fc877335d179"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:37:01 crc kubenswrapper[4558]: I0120 17:37:01.473969 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dc94159e-846c-4207-a4e6-fc877335d179-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "dc94159e-846c-4207-a4e6-fc877335d179" (UID: "dc94159e-846c-4207-a4e6-fc877335d179"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:37:01 crc kubenswrapper[4558]: I0120 17:37:01.479339 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dc94159e-846c-4207-a4e6-fc877335d179-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "dc94159e-846c-4207-a4e6-fc877335d179" (UID: "dc94159e-846c-4207-a4e6-fc877335d179"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:37:01 crc kubenswrapper[4558]: I0120 17:37:01.499608 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dc94159e-846c-4207-a4e6-fc877335d179-config-data" (OuterVolumeSpecName: "config-data") pod "dc94159e-846c-4207-a4e6-fc877335d179" (UID: "dc94159e-846c-4207-a4e6-fc877335d179"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:37:01 crc kubenswrapper[4558]: I0120 17:37:01.512240 4558 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/dc94159e-846c-4207-a4e6-fc877335d179-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:01 crc kubenswrapper[4558]: I0120 17:37:01.512278 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dc94159e-846c-4207-a4e6-fc877335d179-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:01 crc kubenswrapper[4558]: I0120 17:37:01.512294 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc94159e-846c-4207-a4e6-fc877335d179-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:01 crc kubenswrapper[4558]: I0120 17:37:01.512308 4558 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/dc94159e-846c-4207-a4e6-fc877335d179-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:01 crc kubenswrapper[4558]: I0120 17:37:01.512320 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rgxzh\" (UniqueName: \"kubernetes.io/projected/dc94159e-846c-4207-a4e6-fc877335d179-kube-api-access-rgxzh\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:01 crc kubenswrapper[4558]: I0120 17:37:01.512333 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dc94159e-846c-4207-a4e6-fc877335d179-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:01 crc kubenswrapper[4558]: I0120 17:37:01.512344 4558 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dc94159e-846c-4207-a4e6-fc877335d179-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:01 crc kubenswrapper[4558]: I0120 17:37:01.541738 4558 generic.go:334] "Generic (PLEG): container finished" podID="dc94159e-846c-4207-a4e6-fc877335d179" containerID="83ce987c5b708549cef86a6913ed91475d61f00a374b43c5a0a9d8d6b9e15ad1" exitCode=0 Jan 20 17:37:01 crc kubenswrapper[4558]: I0120 17:37:01.541793 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"dc94159e-846c-4207-a4e6-fc877335d179","Type":"ContainerDied","Data":"83ce987c5b708549cef86a6913ed91475d61f00a374b43c5a0a9d8d6b9e15ad1"} Jan 20 17:37:01 crc kubenswrapper[4558]: I0120 17:37:01.541836 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"dc94159e-846c-4207-a4e6-fc877335d179","Type":"ContainerDied","Data":"931802c7078140a8ddc656e86c086b0574dc8ad1ad02d3bfdce7a2e061fb41d4"} Jan 20 17:37:01 crc kubenswrapper[4558]: I0120 17:37:01.541868 4558 scope.go:117] "RemoveContainer" containerID="8d18756d85f771ea1a8e265733bd0f0de8bef3da799fcc5dac7436aa91d96d1b" Jan 20 17:37:01 crc kubenswrapper[4558]: I0120 17:37:01.542044 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:37:01 crc kubenswrapper[4558]: I0120 17:37:01.579107 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:37:01 crc kubenswrapper[4558]: I0120 17:37:01.585886 4558 scope.go:117] "RemoveContainer" containerID="e4b0f89bb58f0ef8658ad1ca88dceb67d6f84e237fdd51276dbf055a76577647" Jan 20 17:37:01 crc kubenswrapper[4558]: I0120 17:37:01.586335 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:37:01 crc kubenswrapper[4558]: I0120 17:37:01.604749 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:37:01 crc kubenswrapper[4558]: E0120 17:37:01.605249 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc94159e-846c-4207-a4e6-fc877335d179" containerName="sg-core" Jan 20 17:37:01 crc kubenswrapper[4558]: I0120 17:37:01.605270 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc94159e-846c-4207-a4e6-fc877335d179" containerName="sg-core" Jan 20 17:37:01 crc kubenswrapper[4558]: E0120 17:37:01.605281 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0abcf7eb-7e84-4a8e-81e8-a397db63848b" containerName="proxy-server" Jan 20 17:37:01 crc kubenswrapper[4558]: I0120 17:37:01.605287 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="0abcf7eb-7e84-4a8e-81e8-a397db63848b" containerName="proxy-server" Jan 20 17:37:01 crc kubenswrapper[4558]: E0120 17:37:01.605301 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc94159e-846c-4207-a4e6-fc877335d179" containerName="ceilometer-central-agent" Jan 20 17:37:01 crc kubenswrapper[4558]: I0120 17:37:01.605307 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc94159e-846c-4207-a4e6-fc877335d179" containerName="ceilometer-central-agent" Jan 20 17:37:01 crc kubenswrapper[4558]: E0120 17:37:01.605318 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc94159e-846c-4207-a4e6-fc877335d179" containerName="proxy-httpd" Jan 20 17:37:01 crc kubenswrapper[4558]: I0120 17:37:01.605324 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc94159e-846c-4207-a4e6-fc877335d179" containerName="proxy-httpd" Jan 20 17:37:01 crc kubenswrapper[4558]: E0120 17:37:01.605349 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0abcf7eb-7e84-4a8e-81e8-a397db63848b" containerName="proxy-httpd" Jan 20 17:37:01 crc kubenswrapper[4558]: I0120 17:37:01.605355 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="0abcf7eb-7e84-4a8e-81e8-a397db63848b" containerName="proxy-httpd" Jan 20 17:37:01 crc kubenswrapper[4558]: E0120 17:37:01.605370 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc94159e-846c-4207-a4e6-fc877335d179" containerName="ceilometer-notification-agent" Jan 20 17:37:01 crc kubenswrapper[4558]: I0120 17:37:01.605376 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc94159e-846c-4207-a4e6-fc877335d179" containerName="ceilometer-notification-agent" Jan 20 17:37:01 crc kubenswrapper[4558]: I0120 17:37:01.605590 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="dc94159e-846c-4207-a4e6-fc877335d179" containerName="sg-core" Jan 20 17:37:01 crc kubenswrapper[4558]: I0120 17:37:01.605613 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="0abcf7eb-7e84-4a8e-81e8-a397db63848b" containerName="proxy-server" Jan 20 17:37:01 crc kubenswrapper[4558]: I0120 17:37:01.605621 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="dc94159e-846c-4207-a4e6-fc877335d179" containerName="ceilometer-notification-agent" Jan 20 17:37:01 crc kubenswrapper[4558]: I0120 17:37:01.605633 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="0abcf7eb-7e84-4a8e-81e8-a397db63848b" containerName="proxy-httpd" Jan 20 17:37:01 crc kubenswrapper[4558]: I0120 17:37:01.605648 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="dc94159e-846c-4207-a4e6-fc877335d179" containerName="proxy-httpd" Jan 20 17:37:01 crc kubenswrapper[4558]: I0120 17:37:01.605665 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="dc94159e-846c-4207-a4e6-fc877335d179" containerName="ceilometer-central-agent" Jan 20 17:37:01 crc kubenswrapper[4558]: I0120 17:37:01.607420 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:37:01 crc kubenswrapper[4558]: I0120 17:37:01.611132 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-config-data" Jan 20 17:37:01 crc kubenswrapper[4558]: I0120 17:37:01.611425 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-scripts" Jan 20 17:37:01 crc kubenswrapper[4558]: I0120 17:37:01.617500 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:37:01 crc kubenswrapper[4558]: I0120 17:37:01.617682 4558 scope.go:117] "RemoveContainer" containerID="83ce987c5b708549cef86a6913ed91475d61f00a374b43c5a0a9d8d6b9e15ad1" Jan 20 17:37:01 crc kubenswrapper[4558]: I0120 17:37:01.617929 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-ceilometer-internal-svc" Jan 20 17:37:01 crc kubenswrapper[4558]: I0120 17:37:01.628655 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pnzfr\" (UniqueName: \"kubernetes.io/projected/1addf16e-1af6-431b-aff1-05c48a952f5a-kube-api-access-pnzfr\") pod \"ceilometer-0\" (UID: \"1addf16e-1af6-431b-aff1-05c48a952f5a\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:37:01 crc kubenswrapper[4558]: I0120 17:37:01.628705 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/1addf16e-1af6-431b-aff1-05c48a952f5a-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"1addf16e-1af6-431b-aff1-05c48a952f5a\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:37:01 crc kubenswrapper[4558]: I0120 17:37:01.628790 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1addf16e-1af6-431b-aff1-05c48a952f5a-log-httpd\") pod \"ceilometer-0\" (UID: \"1addf16e-1af6-431b-aff1-05c48a952f5a\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:37:01 crc kubenswrapper[4558]: I0120 17:37:01.628872 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/1addf16e-1af6-431b-aff1-05c48a952f5a-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"1addf16e-1af6-431b-aff1-05c48a952f5a\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:37:01 crc kubenswrapper[4558]: I0120 17:37:01.628919 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1addf16e-1af6-431b-aff1-05c48a952f5a-run-httpd\") pod \"ceilometer-0\" (UID: \"1addf16e-1af6-431b-aff1-05c48a952f5a\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:37:01 crc kubenswrapper[4558]: I0120 17:37:01.628959 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1addf16e-1af6-431b-aff1-05c48a952f5a-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"1addf16e-1af6-431b-aff1-05c48a952f5a\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:37:01 crc kubenswrapper[4558]: I0120 17:37:01.629091 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1addf16e-1af6-431b-aff1-05c48a952f5a-config-data\") pod \"ceilometer-0\" (UID: \"1addf16e-1af6-431b-aff1-05c48a952f5a\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:37:01 crc kubenswrapper[4558]: I0120 17:37:01.629113 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1addf16e-1af6-431b-aff1-05c48a952f5a-scripts\") pod \"ceilometer-0\" (UID: \"1addf16e-1af6-431b-aff1-05c48a952f5a\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:37:01 crc kubenswrapper[4558]: I0120 17:37:01.652838 4558 scope.go:117] "RemoveContainer" containerID="5bf498b0c5a887fb9070dae794c0855721714041c990374c5706d705de7cdc2d" Jan 20 17:37:01 crc kubenswrapper[4558]: I0120 17:37:01.672848 4558 scope.go:117] "RemoveContainer" containerID="8d18756d85f771ea1a8e265733bd0f0de8bef3da799fcc5dac7436aa91d96d1b" Jan 20 17:37:01 crc kubenswrapper[4558]: E0120 17:37:01.673199 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8d18756d85f771ea1a8e265733bd0f0de8bef3da799fcc5dac7436aa91d96d1b\": container with ID starting with 8d18756d85f771ea1a8e265733bd0f0de8bef3da799fcc5dac7436aa91d96d1b not found: ID does not exist" containerID="8d18756d85f771ea1a8e265733bd0f0de8bef3da799fcc5dac7436aa91d96d1b" Jan 20 17:37:01 crc kubenswrapper[4558]: I0120 17:37:01.673245 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8d18756d85f771ea1a8e265733bd0f0de8bef3da799fcc5dac7436aa91d96d1b"} err="failed to get container status \"8d18756d85f771ea1a8e265733bd0f0de8bef3da799fcc5dac7436aa91d96d1b\": rpc error: code = NotFound desc = could not find container \"8d18756d85f771ea1a8e265733bd0f0de8bef3da799fcc5dac7436aa91d96d1b\": container with ID starting with 8d18756d85f771ea1a8e265733bd0f0de8bef3da799fcc5dac7436aa91d96d1b not found: ID does not exist" Jan 20 17:37:01 crc kubenswrapper[4558]: I0120 17:37:01.673272 4558 scope.go:117] "RemoveContainer" containerID="e4b0f89bb58f0ef8658ad1ca88dceb67d6f84e237fdd51276dbf055a76577647" Jan 20 17:37:01 crc kubenswrapper[4558]: E0120 17:37:01.673610 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e4b0f89bb58f0ef8658ad1ca88dceb67d6f84e237fdd51276dbf055a76577647\": container with ID starting with e4b0f89bb58f0ef8658ad1ca88dceb67d6f84e237fdd51276dbf055a76577647 not found: ID does not exist" containerID="e4b0f89bb58f0ef8658ad1ca88dceb67d6f84e237fdd51276dbf055a76577647" Jan 20 17:37:01 crc kubenswrapper[4558]: I0120 17:37:01.673654 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e4b0f89bb58f0ef8658ad1ca88dceb67d6f84e237fdd51276dbf055a76577647"} err="failed to get container status \"e4b0f89bb58f0ef8658ad1ca88dceb67d6f84e237fdd51276dbf055a76577647\": rpc error: code = NotFound desc = could not find container \"e4b0f89bb58f0ef8658ad1ca88dceb67d6f84e237fdd51276dbf055a76577647\": container with ID starting with e4b0f89bb58f0ef8658ad1ca88dceb67d6f84e237fdd51276dbf055a76577647 not found: ID does not exist" Jan 20 17:37:01 crc kubenswrapper[4558]: I0120 17:37:01.673686 4558 scope.go:117] "RemoveContainer" containerID="83ce987c5b708549cef86a6913ed91475d61f00a374b43c5a0a9d8d6b9e15ad1" Jan 20 17:37:01 crc kubenswrapper[4558]: E0120 17:37:01.673972 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"83ce987c5b708549cef86a6913ed91475d61f00a374b43c5a0a9d8d6b9e15ad1\": container with ID starting with 83ce987c5b708549cef86a6913ed91475d61f00a374b43c5a0a9d8d6b9e15ad1 not found: ID does not exist" containerID="83ce987c5b708549cef86a6913ed91475d61f00a374b43c5a0a9d8d6b9e15ad1" Jan 20 17:37:01 crc kubenswrapper[4558]: I0120 17:37:01.674006 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"83ce987c5b708549cef86a6913ed91475d61f00a374b43c5a0a9d8d6b9e15ad1"} err="failed to get container status \"83ce987c5b708549cef86a6913ed91475d61f00a374b43c5a0a9d8d6b9e15ad1\": rpc error: code = NotFound desc = could not find container \"83ce987c5b708549cef86a6913ed91475d61f00a374b43c5a0a9d8d6b9e15ad1\": container with ID starting with 83ce987c5b708549cef86a6913ed91475d61f00a374b43c5a0a9d8d6b9e15ad1 not found: ID does not exist" Jan 20 17:37:01 crc kubenswrapper[4558]: I0120 17:37:01.674029 4558 scope.go:117] "RemoveContainer" containerID="5bf498b0c5a887fb9070dae794c0855721714041c990374c5706d705de7cdc2d" Jan 20 17:37:01 crc kubenswrapper[4558]: E0120 17:37:01.674258 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5bf498b0c5a887fb9070dae794c0855721714041c990374c5706d705de7cdc2d\": container with ID starting with 5bf498b0c5a887fb9070dae794c0855721714041c990374c5706d705de7cdc2d not found: ID does not exist" containerID="5bf498b0c5a887fb9070dae794c0855721714041c990374c5706d705de7cdc2d" Jan 20 17:37:01 crc kubenswrapper[4558]: I0120 17:37:01.674284 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5bf498b0c5a887fb9070dae794c0855721714041c990374c5706d705de7cdc2d"} err="failed to get container status \"5bf498b0c5a887fb9070dae794c0855721714041c990374c5706d705de7cdc2d\": rpc error: code = NotFound desc = could not find container \"5bf498b0c5a887fb9070dae794c0855721714041c990374c5706d705de7cdc2d\": container with ID starting with 5bf498b0c5a887fb9070dae794c0855721714041c990374c5706d705de7cdc2d not found: ID does not exist" Jan 20 17:37:01 crc kubenswrapper[4558]: I0120 17:37:01.732235 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1addf16e-1af6-431b-aff1-05c48a952f5a-log-httpd\") pod \"ceilometer-0\" (UID: \"1addf16e-1af6-431b-aff1-05c48a952f5a\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:37:01 crc kubenswrapper[4558]: I0120 17:37:01.732368 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/1addf16e-1af6-431b-aff1-05c48a952f5a-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"1addf16e-1af6-431b-aff1-05c48a952f5a\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:37:01 crc kubenswrapper[4558]: I0120 17:37:01.732447 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1addf16e-1af6-431b-aff1-05c48a952f5a-run-httpd\") pod \"ceilometer-0\" (UID: \"1addf16e-1af6-431b-aff1-05c48a952f5a\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:37:01 crc kubenswrapper[4558]: I0120 17:37:01.732517 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1addf16e-1af6-431b-aff1-05c48a952f5a-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"1addf16e-1af6-431b-aff1-05c48a952f5a\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:37:01 crc kubenswrapper[4558]: I0120 17:37:01.732705 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1addf16e-1af6-431b-aff1-05c48a952f5a-log-httpd\") pod \"ceilometer-0\" (UID: \"1addf16e-1af6-431b-aff1-05c48a952f5a\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:37:01 crc kubenswrapper[4558]: I0120 17:37:01.732709 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1addf16e-1af6-431b-aff1-05c48a952f5a-config-data\") pod \"ceilometer-0\" (UID: \"1addf16e-1af6-431b-aff1-05c48a952f5a\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:37:01 crc kubenswrapper[4558]: I0120 17:37:01.732799 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1addf16e-1af6-431b-aff1-05c48a952f5a-scripts\") pod \"ceilometer-0\" (UID: \"1addf16e-1af6-431b-aff1-05c48a952f5a\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:37:01 crc kubenswrapper[4558]: I0120 17:37:01.732990 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pnzfr\" (UniqueName: \"kubernetes.io/projected/1addf16e-1af6-431b-aff1-05c48a952f5a-kube-api-access-pnzfr\") pod \"ceilometer-0\" (UID: \"1addf16e-1af6-431b-aff1-05c48a952f5a\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:37:01 crc kubenswrapper[4558]: I0120 17:37:01.733057 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/1addf16e-1af6-431b-aff1-05c48a952f5a-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"1addf16e-1af6-431b-aff1-05c48a952f5a\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:37:01 crc kubenswrapper[4558]: I0120 17:37:01.733619 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1addf16e-1af6-431b-aff1-05c48a952f5a-run-httpd\") pod \"ceilometer-0\" (UID: \"1addf16e-1af6-431b-aff1-05c48a952f5a\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:37:01 crc kubenswrapper[4558]: I0120 17:37:01.737587 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/1addf16e-1af6-431b-aff1-05c48a952f5a-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"1addf16e-1af6-431b-aff1-05c48a952f5a\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:37:01 crc kubenswrapper[4558]: I0120 17:37:01.737602 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1addf16e-1af6-431b-aff1-05c48a952f5a-scripts\") pod \"ceilometer-0\" (UID: \"1addf16e-1af6-431b-aff1-05c48a952f5a\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:37:01 crc kubenswrapper[4558]: I0120 17:37:01.737885 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1addf16e-1af6-431b-aff1-05c48a952f5a-config-data\") pod \"ceilometer-0\" (UID: \"1addf16e-1af6-431b-aff1-05c48a952f5a\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:37:01 crc kubenswrapper[4558]: I0120 17:37:01.738941 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/1addf16e-1af6-431b-aff1-05c48a952f5a-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"1addf16e-1af6-431b-aff1-05c48a952f5a\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:37:01 crc kubenswrapper[4558]: I0120 17:37:01.739029 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1addf16e-1af6-431b-aff1-05c48a952f5a-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"1addf16e-1af6-431b-aff1-05c48a952f5a\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:37:01 crc kubenswrapper[4558]: I0120 17:37:01.749590 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pnzfr\" (UniqueName: \"kubernetes.io/projected/1addf16e-1af6-431b-aff1-05c48a952f5a-kube-api-access-pnzfr\") pod \"ceilometer-0\" (UID: \"1addf16e-1af6-431b-aff1-05c48a952f5a\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:37:01 crc kubenswrapper[4558]: I0120 17:37:01.937089 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:37:02 crc kubenswrapper[4558]: W0120 17:37:02.391811 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1addf16e_1af6_431b_aff1_05c48a952f5a.slice/crio-f902a2ceba04dbde9c216501a45d205187ae4274458bc1599d976afd3b962475 WatchSource:0}: Error finding container f902a2ceba04dbde9c216501a45d205187ae4274458bc1599d976afd3b962475: Status 404 returned error can't find the container with id f902a2ceba04dbde9c216501a45d205187ae4274458bc1599d976afd3b962475 Jan 20 17:37:02 crc kubenswrapper[4558]: I0120 17:37:02.392587 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:37:02 crc kubenswrapper[4558]: I0120 17:37:02.561751 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"1addf16e-1af6-431b-aff1-05c48a952f5a","Type":"ContainerStarted","Data":"f902a2ceba04dbde9c216501a45d205187ae4274458bc1599d976afd3b962475"} Jan 20 17:37:02 crc kubenswrapper[4558]: I0120 17:37:02.577962 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dc94159e-846c-4207-a4e6-fc877335d179" path="/var/lib/kubelet/pods/dc94159e-846c-4207-a4e6-fc877335d179/volumes" Jan 20 17:37:03 crc kubenswrapper[4558]: I0120 17:37:03.574283 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"1addf16e-1af6-431b-aff1-05c48a952f5a","Type":"ContainerStarted","Data":"0fde5ea8da11b84a29f2a0ac302c4c48b6a41c64a33f837f39b3bbaaf4487feb"} Jan 20 17:37:04 crc kubenswrapper[4558]: I0120 17:37:04.603021 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"1addf16e-1af6-431b-aff1-05c48a952f5a","Type":"ContainerStarted","Data":"f49f1bac498f7d02524aaee31c0af982140e123e147ff77674e41803999fb5b9"} Jan 20 17:37:05 crc kubenswrapper[4558]: I0120 17:37:05.618539 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"1addf16e-1af6-431b-aff1-05c48a952f5a","Type":"ContainerStarted","Data":"9c1da6a399e8373ade66666f30fdfad18e61c1a4529147d6a10d0385c82a426f"} Jan 20 17:37:06 crc kubenswrapper[4558]: I0120 17:37:06.631244 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"1addf16e-1af6-431b-aff1-05c48a952f5a","Type":"ContainerStarted","Data":"d7f94ffeca974ded0f05a2d4e0bd7bdbcb45172d758621d01d81132148b96b8b"} Jan 20 17:37:06 crc kubenswrapper[4558]: I0120 17:37:06.631886 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:37:16 crc kubenswrapper[4558]: I0120 17:37:16.915227 4558 scope.go:117] "RemoveContainer" containerID="947c28f9113ea23ac9a8fe215ee6bfabb2e7509fa98fad8873bc83baf344bdb1" Jan 20 17:37:21 crc kubenswrapper[4558]: E0120 17:37:21.679489 4558 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 192.168.25.8:41282->192.168.25.8:43883: write tcp 192.168.25.8:41282->192.168.25.8:43883: write: connection reset by peer Jan 20 17:37:23 crc kubenswrapper[4558]: E0120 17:37:23.873941 4558 upgradeaware.go:441] Error proxying data from backend to client: writeto tcp 192.168.25.8:41406->192.168.25.8:43883: read tcp 192.168.25.8:41406->192.168.25.8:43883: read: connection reset by peer Jan 20 17:37:27 crc kubenswrapper[4558]: I0120 17:37:27.329933 4558 patch_prober.go:28] interesting pod/machine-config-daemon-2vr4r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 20 17:37:27 crc kubenswrapper[4558]: I0120 17:37:27.330935 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 20 17:37:27 crc kubenswrapper[4558]: I0120 17:37:27.331018 4558 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" Jan 20 17:37:27 crc kubenswrapper[4558]: I0120 17:37:27.332393 4558 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"23984d013a068cbbd112be5ba4e29373c221b0f5de1715ba81ff9be27247b9c2"} pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 20 17:37:27 crc kubenswrapper[4558]: I0120 17:37:27.332479 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" containerID="cri-o://23984d013a068cbbd112be5ba4e29373c221b0f5de1715ba81ff9be27247b9c2" gracePeriod=600 Jan 20 17:37:27 crc kubenswrapper[4558]: I0120 17:37:27.859543 4558 generic.go:334] "Generic (PLEG): container finished" podID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerID="23984d013a068cbbd112be5ba4e29373c221b0f5de1715ba81ff9be27247b9c2" exitCode=0 Jan 20 17:37:27 crc kubenswrapper[4558]: I0120 17:37:27.859616 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" event={"ID":"68337d27-3fa6-4a29-88b0-82e60c3739eb","Type":"ContainerDied","Data":"23984d013a068cbbd112be5ba4e29373c221b0f5de1715ba81ff9be27247b9c2"} Jan 20 17:37:27 crc kubenswrapper[4558]: I0120 17:37:27.859933 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" event={"ID":"68337d27-3fa6-4a29-88b0-82e60c3739eb","Type":"ContainerStarted","Data":"2d2a8989ca5a5af98b2c9dc8f801ea0675339ec14800f437c058c5a43c20146b"} Jan 20 17:37:27 crc kubenswrapper[4558]: I0120 17:37:27.859973 4558 scope.go:117] "RemoveContainer" containerID="f275e9f5de330b3c79316d5871106c6bcf90b698e0744c5beb28da45c336b36d" Jan 20 17:37:27 crc kubenswrapper[4558]: I0120 17:37:27.882128 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ceilometer-0" podStartSLOduration=22.966866375 podStartE2EDuration="26.882109486s" podCreationTimestamp="2026-01-20 17:37:01 +0000 UTC" firstStartedPulling="2026-01-20 17:37:02.394960606 +0000 UTC m=+3316.155298572" lastFinishedPulling="2026-01-20 17:37:06.310203716 +0000 UTC m=+3320.070541683" observedRunningTime="2026-01-20 17:37:06.661553225 +0000 UTC m=+3320.421891192" watchObservedRunningTime="2026-01-20 17:37:27.882109486 +0000 UTC m=+3341.642447454" Jan 20 17:37:30 crc kubenswrapper[4558]: E0120 17:37:30.005556 4558 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 192.168.25.8:60486->192.168.25.8:43883: write tcp 192.168.25.8:60486->192.168.25.8:43883: write: broken pipe Jan 20 17:37:31 crc kubenswrapper[4558]: I0120 17:37:31.943862 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:37:36 crc kubenswrapper[4558]: E0120 17:37:36.518236 4558 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 192.168.25.8:60786->192.168.25.8:43883: write tcp 192.168.25.8:60786->192.168.25.8:43883: write: connection reset by peer Jan 20 17:37:39 crc kubenswrapper[4558]: I0120 17:37:39.513584 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/rabbitmq-server-0"] Jan 20 17:37:39 crc kubenswrapper[4558]: I0120 17:37:39.566200 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:37:39 crc kubenswrapper[4558]: I0120 17:37:39.566490 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/cinder-scheduler-0" podUID="9800a14c-ebf5-4a5d-b384-c133973b55ff" containerName="cinder-scheduler" containerID="cri-o://f203c5705e628d436a58ef25c8ec48e9cd0684833bf7150a4abe881181faec1b" gracePeriod=30 Jan 20 17:37:39 crc kubenswrapper[4558]: I0120 17:37:39.566993 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/cinder-scheduler-0" podUID="9800a14c-ebf5-4a5d-b384-c133973b55ff" containerName="probe" containerID="cri-o://f88977b0890e7c6b4326fe8b4078f4b100c6c3f58772736456b0c1ccabf3b4fb" gracePeriod=30 Jan 20 17:37:39 crc kubenswrapper[4558]: I0120 17:37:39.589261 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-7s2th"] Jan 20 17:37:39 crc kubenswrapper[4558]: I0120 17:37:39.605494 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-7s2th"] Jan 20 17:37:39 crc kubenswrapper[4558]: I0120 17:37:39.685239 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:37:39 crc kubenswrapper[4558]: I0120 17:37:39.685468 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/cinder-api-0" podUID="e1a1ac97-a530-4ba0-8c9b-835a2b576c8d" containerName="cinder-api-log" containerID="cri-o://f401d7772f0d907bbf33421516a8e607439cd73653fec584dcb05860382f5a83" gracePeriod=30 Jan 20 17:37:39 crc kubenswrapper[4558]: I0120 17:37:39.685875 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/cinder-api-0" podUID="e1a1ac97-a530-4ba0-8c9b-835a2b576c8d" containerName="cinder-api" containerID="cri-o://cfc0219326dabcde321d0d0fb037a9ecb6ea36b64ee17a015344e0990a28b989" gracePeriod=30 Jan 20 17:37:39 crc kubenswrapper[4558]: E0120 17:37:39.691010 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Jan 20 17:37:39 crc kubenswrapper[4558]: E0120 17:37:39.691068 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/0696a635-5dc9-46e9-8502-47fab9ff761c-config-data podName:0696a635-5dc9-46e9-8502-47fab9ff761c nodeName:}" failed. No retries permitted until 2026-01-20 17:37:40.191051341 +0000 UTC m=+3353.951389308 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/0696a635-5dc9-46e9-8502-47fab9ff761c-config-data") pod "rabbitmq-server-0" (UID: "0696a635-5dc9-46e9-8502-47fab9ff761c") : configmap "rabbitmq-config-data" not found Jan 20 17:37:39 crc kubenswrapper[4558]: I0120 17:37:39.757467 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/root-account-create-update-wlsbh"] Jan 20 17:37:39 crc kubenswrapper[4558]: I0120 17:37:39.758861 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-wlsbh" Jan 20 17:37:39 crc kubenswrapper[4558]: I0120 17:37:39.781402 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"openstack-cell1-mariadb-root-db-secret" Jan 20 17:37:39 crc kubenswrapper[4558]: I0120 17:37:39.782650 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-wlsbh"] Jan 20 17:37:39 crc kubenswrapper[4558]: I0120 17:37:39.852402 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/rabbitmq-cell1-server-0"] Jan 20 17:37:39 crc kubenswrapper[4558]: I0120 17:37:39.894095 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xgm67\" (UniqueName: \"kubernetes.io/projected/52895fa4-c08b-43b8-9f63-cbc8621db11e-kube-api-access-xgm67\") pod \"root-account-create-update-wlsbh\" (UID: \"52895fa4-c08b-43b8-9f63-cbc8621db11e\") " pod="openstack-kuttl-tests/root-account-create-update-wlsbh" Jan 20 17:37:39 crc kubenswrapper[4558]: I0120 17:37:39.894139 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/52895fa4-c08b-43b8-9f63-cbc8621db11e-operator-scripts\") pod \"root-account-create-update-wlsbh\" (UID: \"52895fa4-c08b-43b8-9f63-cbc8621db11e\") " pod="openstack-kuttl-tests/root-account-create-update-wlsbh" Jan 20 17:37:39 crc kubenswrapper[4558]: I0120 17:37:39.912224 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/openstackclient"] Jan 20 17:37:39 crc kubenswrapper[4558]: I0120 17:37:39.912465 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/openstackclient" podUID="0b511fac-7a17-433d-8baa-9eefa278bd25" containerName="openstackclient" containerID="cri-o://b9d69bec651538e6c3bb7125cf005d0443912c83fa1175fd1642916e1d9181f3" gracePeriod=2 Jan 20 17:37:39 crc kubenswrapper[4558]: I0120 17:37:39.936744 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/neutron-f7c7-account-create-update-hf7rn"] Jan 20 17:37:39 crc kubenswrapper[4558]: I0120 17:37:39.938143 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-f7c7-account-create-update-hf7rn" Jan 20 17:37:39 crc kubenswrapper[4558]: I0120 17:37:39.954906 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/openstackclient"] Jan 20 17:37:39 crc kubenswrapper[4558]: I0120 17:37:39.958443 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"neutron-db-secret" Jan 20 17:37:39 crc kubenswrapper[4558]: I0120 17:37:39.973058 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-f7c7-account-create-update-hf7rn"] Jan 20 17:37:39 crc kubenswrapper[4558]: I0120 17:37:39.996435 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xgm67\" (UniqueName: \"kubernetes.io/projected/52895fa4-c08b-43b8-9f63-cbc8621db11e-kube-api-access-xgm67\") pod \"root-account-create-update-wlsbh\" (UID: \"52895fa4-c08b-43b8-9f63-cbc8621db11e\") " pod="openstack-kuttl-tests/root-account-create-update-wlsbh" Jan 20 17:37:39 crc kubenswrapper[4558]: I0120 17:37:39.996480 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/52895fa4-c08b-43b8-9f63-cbc8621db11e-operator-scripts\") pod \"root-account-create-update-wlsbh\" (UID: \"52895fa4-c08b-43b8-9f63-cbc8621db11e\") " pod="openstack-kuttl-tests/root-account-create-update-wlsbh" Jan 20 17:37:39 crc kubenswrapper[4558]: E0120 17:37:39.997442 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Jan 20 17:37:39 crc kubenswrapper[4558]: I0120 17:37:39.997696 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/52895fa4-c08b-43b8-9f63-cbc8621db11e-operator-scripts\") pod \"root-account-create-update-wlsbh\" (UID: \"52895fa4-c08b-43b8-9f63-cbc8621db11e\") " pod="openstack-kuttl-tests/root-account-create-update-wlsbh" Jan 20 17:37:39 crc kubenswrapper[4558]: E0120 17:37:39.997839 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/0c6a6265-cf90-4039-9200-ba478d612baa-config-data podName:0c6a6265-cf90-4039-9200-ba478d612baa nodeName:}" failed. No retries permitted until 2026-01-20 17:37:40.497819701 +0000 UTC m=+3354.258157668 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/0c6a6265-cf90-4039-9200-ba478d612baa-config-data") pod "rabbitmq-cell1-server-0" (UID: "0c6a6265-cf90-4039-9200-ba478d612baa") : configmap "rabbitmq-cell1-config-data" not found Jan 20 17:37:40 crc kubenswrapper[4558]: I0120 17:37:40.019529 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-f7c7-account-create-update-wldkt"] Jan 20 17:37:40 crc kubenswrapper[4558]: I0120 17:37:40.061249 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/neutron-f7c7-account-create-update-wldkt"] Jan 20 17:37:40 crc kubenswrapper[4558]: I0120 17:37:40.079530 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xgm67\" (UniqueName: \"kubernetes.io/projected/52895fa4-c08b-43b8-9f63-cbc8621db11e-kube-api-access-xgm67\") pod \"root-account-create-update-wlsbh\" (UID: \"52895fa4-c08b-43b8-9f63-cbc8621db11e\") " pod="openstack-kuttl-tests/root-account-create-update-wlsbh" Jan 20 17:37:40 crc kubenswrapper[4558]: I0120 17:37:40.099187 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/barbican-b147-account-create-update-zdjzj"] Jan 20 17:37:40 crc kubenswrapper[4558]: E0120 17:37:40.099932 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0b511fac-7a17-433d-8baa-9eefa278bd25" containerName="openstackclient" Jan 20 17:37:40 crc kubenswrapper[4558]: I0120 17:37:40.099950 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="0b511fac-7a17-433d-8baa-9eefa278bd25" containerName="openstackclient" Jan 20 17:37:40 crc kubenswrapper[4558]: I0120 17:37:40.100112 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="0b511fac-7a17-433d-8baa-9eefa278bd25" containerName="openstackclient" Jan 20 17:37:40 crc kubenswrapper[4558]: I0120 17:37:40.100744 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-b147-account-create-update-zdjzj" Jan 20 17:37:40 crc kubenswrapper[4558]: I0120 17:37:40.104068 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0f5a8541-5b86-4fff-8f94-18b8b5ee3ebf-operator-scripts\") pod \"neutron-f7c7-account-create-update-hf7rn\" (UID: \"0f5a8541-5b86-4fff-8f94-18b8b5ee3ebf\") " pod="openstack-kuttl-tests/neutron-f7c7-account-create-update-hf7rn" Jan 20 17:37:40 crc kubenswrapper[4558]: I0120 17:37:40.104292 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2grqk\" (UniqueName: \"kubernetes.io/projected/0f5a8541-5b86-4fff-8f94-18b8b5ee3ebf-kube-api-access-2grqk\") pod \"neutron-f7c7-account-create-update-hf7rn\" (UID: \"0f5a8541-5b86-4fff-8f94-18b8b5ee3ebf\") " pod="openstack-kuttl-tests/neutron-f7c7-account-create-update-hf7rn" Jan 20 17:37:40 crc kubenswrapper[4558]: I0120 17:37:40.104618 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-wlsbh" Jan 20 17:37:40 crc kubenswrapper[4558]: I0120 17:37:40.124382 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"barbican-db-secret" Jan 20 17:37:40 crc kubenswrapper[4558]: I0120 17:37:40.171013 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovn-northd-0"] Jan 20 17:37:40 crc kubenswrapper[4558]: I0120 17:37:40.171316 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ovn-northd-0" podUID="c570a7ea-d296-44bc-a48f-6dd8be9754d0" containerName="ovn-northd" containerID="cri-o://1e1ade780111891ab36a10d37a0917edea1f34135b19b61c709e80ef7ae68b98" gracePeriod=30 Jan 20 17:37:40 crc kubenswrapper[4558]: I0120 17:37:40.171466 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ovn-northd-0" podUID="c570a7ea-d296-44bc-a48f-6dd8be9754d0" containerName="openstack-network-exporter" containerID="cri-o://80fe464dc7cd617b6ebc3164ada8f61f3bec4caca84b82392b77289a475163d7" gracePeriod=30 Jan 20 17:37:40 crc kubenswrapper[4558]: I0120 17:37:40.207235 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-b147-account-create-update-zdjzj"] Jan 20 17:37:40 crc kubenswrapper[4558]: I0120 17:37:40.209335 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9mfdq\" (UniqueName: \"kubernetes.io/projected/7ca40caa-67e9-4be0-8697-cf15086736bc-kube-api-access-9mfdq\") pod \"barbican-b147-account-create-update-zdjzj\" (UID: \"7ca40caa-67e9-4be0-8697-cf15086736bc\") " pod="openstack-kuttl-tests/barbican-b147-account-create-update-zdjzj" Jan 20 17:37:40 crc kubenswrapper[4558]: I0120 17:37:40.209407 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2grqk\" (UniqueName: \"kubernetes.io/projected/0f5a8541-5b86-4fff-8f94-18b8b5ee3ebf-kube-api-access-2grqk\") pod \"neutron-f7c7-account-create-update-hf7rn\" (UID: \"0f5a8541-5b86-4fff-8f94-18b8b5ee3ebf\") " pod="openstack-kuttl-tests/neutron-f7c7-account-create-update-hf7rn" Jan 20 17:37:40 crc kubenswrapper[4558]: I0120 17:37:40.209490 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7ca40caa-67e9-4be0-8697-cf15086736bc-operator-scripts\") pod \"barbican-b147-account-create-update-zdjzj\" (UID: \"7ca40caa-67e9-4be0-8697-cf15086736bc\") " pod="openstack-kuttl-tests/barbican-b147-account-create-update-zdjzj" Jan 20 17:37:40 crc kubenswrapper[4558]: I0120 17:37:40.209727 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0f5a8541-5b86-4fff-8f94-18b8b5ee3ebf-operator-scripts\") pod \"neutron-f7c7-account-create-update-hf7rn\" (UID: \"0f5a8541-5b86-4fff-8f94-18b8b5ee3ebf\") " pod="openstack-kuttl-tests/neutron-f7c7-account-create-update-hf7rn" Jan 20 17:37:40 crc kubenswrapper[4558]: I0120 17:37:40.210924 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0f5a8541-5b86-4fff-8f94-18b8b5ee3ebf-operator-scripts\") pod \"neutron-f7c7-account-create-update-hf7rn\" (UID: \"0f5a8541-5b86-4fff-8f94-18b8b5ee3ebf\") " pod="openstack-kuttl-tests/neutron-f7c7-account-create-update-hf7rn" Jan 20 17:37:40 crc kubenswrapper[4558]: E0120 17:37:40.211416 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Jan 20 17:37:40 crc kubenswrapper[4558]: E0120 17:37:40.211485 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/0696a635-5dc9-46e9-8502-47fab9ff761c-config-data podName:0696a635-5dc9-46e9-8502-47fab9ff761c nodeName:}" failed. No retries permitted until 2026-01-20 17:37:41.211471236 +0000 UTC m=+3354.971809203 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/0696a635-5dc9-46e9-8502-47fab9ff761c-config-data") pod "rabbitmq-server-0" (UID: "0696a635-5dc9-46e9-8502-47fab9ff761c") : configmap "rabbitmq-config-data" not found Jan 20 17:37:40 crc kubenswrapper[4558]: I0120 17:37:40.243188 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-db-sync-cmhmv"] Jan 20 17:37:40 crc kubenswrapper[4558]: I0120 17:37:40.251681 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2grqk\" (UniqueName: \"kubernetes.io/projected/0f5a8541-5b86-4fff-8f94-18b8b5ee3ebf-kube-api-access-2grqk\") pod \"neutron-f7c7-account-create-update-hf7rn\" (UID: \"0f5a8541-5b86-4fff-8f94-18b8b5ee3ebf\") " pod="openstack-kuttl-tests/neutron-f7c7-account-create-update-hf7rn" Jan 20 17:37:40 crc kubenswrapper[4558]: I0120 17:37:40.272477 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-b147-account-create-update-jnh4d"] Jan 20 17:37:40 crc kubenswrapper[4558]: I0120 17:37:40.291099 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-f7c7-account-create-update-hf7rn" Jan 20 17:37:40 crc kubenswrapper[4558]: I0120 17:37:40.298650 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/barbican-b147-account-create-update-jnh4d"] Jan 20 17:37:40 crc kubenswrapper[4558]: I0120 17:37:40.312518 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9mfdq\" (UniqueName: \"kubernetes.io/projected/7ca40caa-67e9-4be0-8697-cf15086736bc-kube-api-access-9mfdq\") pod \"barbican-b147-account-create-update-zdjzj\" (UID: \"7ca40caa-67e9-4be0-8697-cf15086736bc\") " pod="openstack-kuttl-tests/barbican-b147-account-create-update-zdjzj" Jan 20 17:37:40 crc kubenswrapper[4558]: I0120 17:37:40.312587 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7ca40caa-67e9-4be0-8697-cf15086736bc-operator-scripts\") pod \"barbican-b147-account-create-update-zdjzj\" (UID: \"7ca40caa-67e9-4be0-8697-cf15086736bc\") " pod="openstack-kuttl-tests/barbican-b147-account-create-update-zdjzj" Jan 20 17:37:40 crc kubenswrapper[4558]: I0120 17:37:40.313609 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7ca40caa-67e9-4be0-8697-cf15086736bc-operator-scripts\") pod \"barbican-b147-account-create-update-zdjzj\" (UID: \"7ca40caa-67e9-4be0-8697-cf15086736bc\") " pod="openstack-kuttl-tests/barbican-b147-account-create-update-zdjzj" Jan 20 17:37:40 crc kubenswrapper[4558]: I0120 17:37:40.325433 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/glance-db-sync-cmhmv"] Jan 20 17:37:40 crc kubenswrapper[4558]: I0120 17:37:40.352354 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-nb-0"] Jan 20 17:37:40 crc kubenswrapper[4558]: I0120 17:37:40.353625 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ovsdbserver-nb-0" podUID="98f8dc8b-6446-4ba8-b37b-a11ae7414b65" containerName="openstack-network-exporter" containerID="cri-o://f56a163a8189df30e61d8f643eac8347a79e40378fdf5ec2740f041b866096e0" gracePeriod=300 Jan 20 17:37:40 crc kubenswrapper[4558]: I0120 17:37:40.363883 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9mfdq\" (UniqueName: \"kubernetes.io/projected/7ca40caa-67e9-4be0-8697-cf15086736bc-kube-api-access-9mfdq\") pod \"barbican-b147-account-create-update-zdjzj\" (UID: \"7ca40caa-67e9-4be0-8697-cf15086736bc\") " pod="openstack-kuttl-tests/barbican-b147-account-create-update-zdjzj" Jan 20 17:37:40 crc kubenswrapper[4558]: I0120 17:37:40.382290 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-bf60-account-create-update-v2bzq"] Jan 20 17:37:40 crc kubenswrapper[4558]: I0120 17:37:40.390332 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/cinder-bf60-account-create-update-v2bzq"] Jan 20 17:37:40 crc kubenswrapper[4558]: I0120 17:37:40.460924 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/placement-db-sync-b88r8"] Jan 20 17:37:40 crc kubenswrapper[4558]: I0120 17:37:40.482224 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack-kuttl-tests/ovn-northd-0" podUID="c570a7ea-d296-44bc-a48f-6dd8be9754d0" containerName="ovn-northd" probeResult="failure" output=< Jan 20 17:37:40 crc kubenswrapper[4558]: 2026-01-20T17:37:40Z|00001|unixctl|WARN|failed to connect to /tmp/ovn-northd.1.ctl Jan 20 17:37:40 crc kubenswrapper[4558]: ovn-appctl: cannot connect to "/tmp/ovn-northd.1.ctl" (No such file or directory) Jan 20 17:37:40 crc kubenswrapper[4558]: > Jan 20 17:37:40 crc kubenswrapper[4558]: I0120 17:37:40.502793 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-b147-account-create-update-zdjzj" Jan 20 17:37:40 crc kubenswrapper[4558]: I0120 17:37:40.515075 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/placement-db-sync-b88r8"] Jan 20 17:37:40 crc kubenswrapper[4558]: E0120 17:37:40.520504 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Jan 20 17:37:40 crc kubenswrapper[4558]: E0120 17:37:40.520576 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/0c6a6265-cf90-4039-9200-ba478d612baa-config-data podName:0c6a6265-cf90-4039-9200-ba478d612baa nodeName:}" failed. No retries permitted until 2026-01-20 17:37:41.520558586 +0000 UTC m=+3355.280896553 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/0c6a6265-cf90-4039-9200-ba478d612baa-config-data") pod "rabbitmq-cell1-server-0" (UID: "0c6a6265-cf90-4039-9200-ba478d612baa") : configmap "rabbitmq-cell1-config-data" not found Jan 20 17:37:40 crc kubenswrapper[4558]: I0120 17:37:40.607929 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ovsdbserver-nb-0" podUID="98f8dc8b-6446-4ba8-b37b-a11ae7414b65" containerName="ovsdbserver-nb" containerID="cri-o://ed49f3f83c5373eef322ba453c4777bc515d3e9dfcde8de0a5408d13b285222c" gracePeriod=300 Jan 20 17:37:40 crc kubenswrapper[4558]: E0120 17:37:40.626403 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-memcached-svc: secret "cert-memcached-svc" not found Jan 20 17:37:40 crc kubenswrapper[4558]: E0120 17:37:40.626482 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b18f4fcf-eaae-401f-99ec-7b130ad8a6c1-memcached-tls-certs podName:b18f4fcf-eaae-401f-99ec-7b130ad8a6c1 nodeName:}" failed. No retries permitted until 2026-01-20 17:37:41.126463628 +0000 UTC m=+3354.886801595 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memcached-tls-certs" (UniqueName: "kubernetes.io/secret/b18f4fcf-eaae-401f-99ec-7b130ad8a6c1-memcached-tls-certs") pod "memcached-0" (UID: "b18f4fcf-eaae-401f-99ec-7b130ad8a6c1") : secret "cert-memcached-svc" not found Jan 20 17:37:40 crc kubenswrapper[4558]: E0120 17:37:40.626714 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/combined-ca-bundle: secret "combined-ca-bundle" not found Jan 20 17:37:40 crc kubenswrapper[4558]: E0120 17:37:40.630009 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b18f4fcf-eaae-401f-99ec-7b130ad8a6c1-combined-ca-bundle podName:b18f4fcf-eaae-401f-99ec-7b130ad8a6c1 nodeName:}" failed. No retries permitted until 2026-01-20 17:37:41.126734046 +0000 UTC m=+3354.887072013 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/b18f4fcf-eaae-401f-99ec-7b130ad8a6c1-combined-ca-bundle") pod "memcached-0" (UID: "b18f4fcf-eaae-401f-99ec-7b130ad8a6c1") : secret "combined-ca-bundle" not found Jan 20 17:37:40 crc kubenswrapper[4558]: I0120 17:37:40.640912 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3d6a4075-3af8-4980-adbc-cbf55fcaf10d" path="/var/lib/kubelet/pods/3d6a4075-3af8-4980-adbc-cbf55fcaf10d/volumes" Jan 20 17:37:40 crc kubenswrapper[4558]: I0120 17:37:40.641785 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="40b11e15-3f6e-4fb0-9a1f-e4f145f9c5b9" path="/var/lib/kubelet/pods/40b11e15-3f6e-4fb0-9a1f-e4f145f9c5b9/volumes" Jan 20 17:37:40 crc kubenswrapper[4558]: I0120 17:37:40.652045 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="48a0dae5-34e3-490e-9cc5-f474f6cd2fad" path="/var/lib/kubelet/pods/48a0dae5-34e3-490e-9cc5-f474f6cd2fad/volumes" Jan 20 17:37:40 crc kubenswrapper[4558]: I0120 17:37:40.652852 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7d991da1-52c2-4010-b467-8341490801b3" path="/var/lib/kubelet/pods/7d991da1-52c2-4010-b467-8341490801b3/volumes" Jan 20 17:37:40 crc kubenswrapper[4558]: I0120 17:37:40.653469 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="838617e6-6285-4b18-9fc1-022ec5989dd2" path="/var/lib/kubelet/pods/838617e6-6285-4b18-9fc1-022ec5989dd2/volumes" Jan 20 17:37:40 crc kubenswrapper[4558]: I0120 17:37:40.726103 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9219712c-6093-4d73-b176-86aa131bb6d2" path="/var/lib/kubelet/pods/9219712c-6093-4d73-b176-86aa131bb6d2/volumes" Jan 20 17:37:40 crc kubenswrapper[4558]: I0120 17:37:40.726888 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-api-2fe2-account-create-update-t264m"] Jan 20 17:37:40 crc kubenswrapper[4558]: I0120 17:37:40.775079 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-2fe2-account-create-update-t264m"] Jan 20 17:37:40 crc kubenswrapper[4558]: I0120 17:37:40.775406 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell0-41bf-account-create-update-ztpg6"] Jan 20 17:37:40 crc kubenswrapper[4558]: I0120 17:37:40.777113 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-41bf-account-create-update-ztpg6"] Jan 20 17:37:40 crc kubenswrapper[4558]: I0120 17:37:40.777138 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-db-sync-hbj94"] Jan 20 17:37:40 crc kubenswrapper[4558]: I0120 17:37:40.777242 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-41bf-account-create-update-ztpg6" Jan 20 17:37:40 crc kubenswrapper[4558]: I0120 17:37:40.777938 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-2fe2-account-create-update-t264m" Jan 20 17:37:40 crc kubenswrapper[4558]: I0120 17:37:40.794268 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-api-db-secret" Jan 20 17:37:40 crc kubenswrapper[4558]: I0120 17:37:40.794621 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell0-db-secret" Jan 20 17:37:40 crc kubenswrapper[4558]: I0120 17:37:40.804140 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-2fe2-account-create-update-tmfpb"] Jan 20 17:37:40 crc kubenswrapper[4558]: I0120 17:37:40.831718 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tpp6q\" (UniqueName: \"kubernetes.io/projected/cdf89a68-b15b-4f2d-9016-55fb28011197-kube-api-access-tpp6q\") pod \"nova-api-2fe2-account-create-update-t264m\" (UID: \"cdf89a68-b15b-4f2d-9016-55fb28011197\") " pod="openstack-kuttl-tests/nova-api-2fe2-account-create-update-t264m" Jan 20 17:37:40 crc kubenswrapper[4558]: I0120 17:37:40.831917 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cdf89a68-b15b-4f2d-9016-55fb28011197-operator-scripts\") pod \"nova-api-2fe2-account-create-update-t264m\" (UID: \"cdf89a68-b15b-4f2d-9016-55fb28011197\") " pod="openstack-kuttl-tests/nova-api-2fe2-account-create-update-t264m" Jan 20 17:37:40 crc kubenswrapper[4558]: I0120 17:37:40.832194 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e5f57088-ae0f-4326-89fb-a7957da83477-operator-scripts\") pod \"nova-cell0-41bf-account-create-update-ztpg6\" (UID: \"e5f57088-ae0f-4326-89fb-a7957da83477\") " pod="openstack-kuttl-tests/nova-cell0-41bf-account-create-update-ztpg6" Jan 20 17:37:40 crc kubenswrapper[4558]: I0120 17:37:40.832422 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-246h4\" (UniqueName: \"kubernetes.io/projected/e5f57088-ae0f-4326-89fb-a7957da83477-kube-api-access-246h4\") pod \"nova-cell0-41bf-account-create-update-ztpg6\" (UID: \"e5f57088-ae0f-4326-89fb-a7957da83477\") " pod="openstack-kuttl-tests/nova-cell0-41bf-account-create-update-ztpg6" Jan 20 17:37:40 crc kubenswrapper[4558]: I0120 17:37:40.835557 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-api-2fe2-account-create-update-tmfpb"] Jan 20 17:37:40 crc kubenswrapper[4558]: I0120 17:37:40.858626 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/neutron-db-sync-hbj94"] Jan 20 17:37:40 crc kubenswrapper[4558]: I0120 17:37:40.902312 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-sb-0"] Jan 20 17:37:40 crc kubenswrapper[4558]: I0120 17:37:40.904003 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ovsdbserver-sb-0" podUID="8b45e22f-53b9-4228-a28a-a5db18b6e583" containerName="openstack-network-exporter" containerID="cri-o://561616071d5e6e8584628b6c63ccb49a3ccf23901814e4850a47530cd546d416" gracePeriod=300 Jan 20 17:37:40 crc kubenswrapper[4558]: I0120 17:37:40.912832 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-db-sync-kd8pp"] Jan 20 17:37:40 crc kubenswrapper[4558]: I0120 17:37:40.921943 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/cinder-db-sync-kd8pp"] Jan 20 17:37:40 crc kubenswrapper[4558]: I0120 17:37:40.937213 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell0-41bf-account-create-update-9f8d4"] Jan 20 17:37:40 crc kubenswrapper[4558]: I0120 17:37:40.938444 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cdf89a68-b15b-4f2d-9016-55fb28011197-operator-scripts\") pod \"nova-api-2fe2-account-create-update-t264m\" (UID: \"cdf89a68-b15b-4f2d-9016-55fb28011197\") " pod="openstack-kuttl-tests/nova-api-2fe2-account-create-update-t264m" Jan 20 17:37:40 crc kubenswrapper[4558]: I0120 17:37:40.938750 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tpp6q\" (UniqueName: \"kubernetes.io/projected/cdf89a68-b15b-4f2d-9016-55fb28011197-kube-api-access-tpp6q\") pod \"nova-api-2fe2-account-create-update-t264m\" (UID: \"cdf89a68-b15b-4f2d-9016-55fb28011197\") " pod="openstack-kuttl-tests/nova-api-2fe2-account-create-update-t264m" Jan 20 17:37:40 crc kubenswrapper[4558]: I0120 17:37:40.938975 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e5f57088-ae0f-4326-89fb-a7957da83477-operator-scripts\") pod \"nova-cell0-41bf-account-create-update-ztpg6\" (UID: \"e5f57088-ae0f-4326-89fb-a7957da83477\") " pod="openstack-kuttl-tests/nova-cell0-41bf-account-create-update-ztpg6" Jan 20 17:37:40 crc kubenswrapper[4558]: I0120 17:37:40.941817 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cdf89a68-b15b-4f2d-9016-55fb28011197-operator-scripts\") pod \"nova-api-2fe2-account-create-update-t264m\" (UID: \"cdf89a68-b15b-4f2d-9016-55fb28011197\") " pod="openstack-kuttl-tests/nova-api-2fe2-account-create-update-t264m" Jan 20 17:37:40 crc kubenswrapper[4558]: I0120 17:37:40.946371 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell0-41bf-account-create-update-9f8d4"] Jan 20 17:37:40 crc kubenswrapper[4558]: I0120 17:37:40.946552 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-246h4\" (UniqueName: \"kubernetes.io/projected/e5f57088-ae0f-4326-89fb-a7957da83477-kube-api-access-246h4\") pod \"nova-cell0-41bf-account-create-update-ztpg6\" (UID: \"e5f57088-ae0f-4326-89fb-a7957da83477\") " pod="openstack-kuttl-tests/nova-cell0-41bf-account-create-update-ztpg6" Jan 20 17:37:40 crc kubenswrapper[4558]: I0120 17:37:40.947316 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e5f57088-ae0f-4326-89fb-a7957da83477-operator-scripts\") pod \"nova-cell0-41bf-account-create-update-ztpg6\" (UID: \"e5f57088-ae0f-4326-89fb-a7957da83477\") " pod="openstack-kuttl-tests/nova-cell0-41bf-account-create-update-ztpg6" Jan 20 17:37:40 crc kubenswrapper[4558]: I0120 17:37:40.951228 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-c055-account-create-update-4rstr"] Jan 20 17:37:40 crc kubenswrapper[4558]: I0120 17:37:40.974826 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tpp6q\" (UniqueName: \"kubernetes.io/projected/cdf89a68-b15b-4f2d-9016-55fb28011197-kube-api-access-tpp6q\") pod \"nova-api-2fe2-account-create-update-t264m\" (UID: \"cdf89a68-b15b-4f2d-9016-55fb28011197\") " pod="openstack-kuttl-tests/nova-api-2fe2-account-create-update-t264m" Jan 20 17:37:40 crc kubenswrapper[4558]: I0120 17:37:40.979679 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-246h4\" (UniqueName: \"kubernetes.io/projected/e5f57088-ae0f-4326-89fb-a7957da83477-kube-api-access-246h4\") pod \"nova-cell0-41bf-account-create-update-ztpg6\" (UID: \"e5f57088-ae0f-4326-89fb-a7957da83477\") " pod="openstack-kuttl-tests/nova-cell0-41bf-account-create-update-ztpg6" Jan 20 17:37:40 crc kubenswrapper[4558]: I0120 17:37:40.981217 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell1-c055-account-create-update-4rstr"] Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.000621 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ovsdbserver-sb-0" podUID="8b45e22f-53b9-4228-a28a-a5db18b6e583" containerName="ovsdbserver-sb" containerID="cri-o://f48902bc3180bcf59f02a8f100bbf7e30cefe4513466bce23fe46f25d1cf1438" gracePeriod=300 Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.010366 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/swift-storage-0"] Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.011067 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="85471d11-01bf-4a15-b6bc-fabbdaa5750b" containerName="account-server" containerID="cri-o://59a8367d177182b4518509dac1c5fda5058c9891b3fbc80a0b902bff689f902b" gracePeriod=30 Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.011243 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="85471d11-01bf-4a15-b6bc-fabbdaa5750b" containerName="container-updater" containerID="cri-o://23ff4d2ea69aec4c032a694e47beda8f08067327b0540a599b22bd0a5e1d7fe1" gracePeriod=30 Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.011310 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="85471d11-01bf-4a15-b6bc-fabbdaa5750b" containerName="container-auditor" containerID="cri-o://47797d4b979364b402e28a83de59b30acd8a2031f77af23bdb31c8827f23a687" gracePeriod=30 Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.011353 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="85471d11-01bf-4a15-b6bc-fabbdaa5750b" containerName="container-replicator" containerID="cri-o://9f1797db51cad1bf6e1bdae441554b6ce94379733c94a7ad9722e8bdd8202d02" gracePeriod=30 Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.011394 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="85471d11-01bf-4a15-b6bc-fabbdaa5750b" containerName="container-server" containerID="cri-o://2b29e3812d37926ee8f85d9b22c2a577e69fa75b6f1695111d8868abfe8319dc" gracePeriod=30 Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.011434 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="85471d11-01bf-4a15-b6bc-fabbdaa5750b" containerName="account-reaper" containerID="cri-o://d0d49ab0892c850e328fb329607d75c06674dd1862b648f22f4a98fb49a98e23" gracePeriod=30 Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.011468 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="85471d11-01bf-4a15-b6bc-fabbdaa5750b" containerName="account-auditor" containerID="cri-o://2539753eba585cbf5573638f6961f2168ad0433d885e5c40c84893ee902e38b5" gracePeriod=30 Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.011509 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="85471d11-01bf-4a15-b6bc-fabbdaa5750b" containerName="account-replicator" containerID="cri-o://f84266a8171f0ff80bdcdf75ef52c201b51aa2133964c7208ddf98fa9d157472" gracePeriod=30 Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.011648 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="85471d11-01bf-4a15-b6bc-fabbdaa5750b" containerName="object-expirer" containerID="cri-o://e07ad59dc71e1f67f59603c4aac989fb94b2c710bade32305a1dc8101de80423" gracePeriod=30 Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.011814 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="85471d11-01bf-4a15-b6bc-fabbdaa5750b" containerName="object-server" containerID="cri-o://2773fa3804e1639afaf6e312e08a0b9f626d09103c779a099e0c1bbf0928a8b9" gracePeriod=30 Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.011989 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="85471d11-01bf-4a15-b6bc-fabbdaa5750b" containerName="object-updater" containerID="cri-o://9230e487ce369f41cbd9bc631dd1261a599ee2dacc4825a2345c5d5867719c36" gracePeriod=30 Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.012017 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="85471d11-01bf-4a15-b6bc-fabbdaa5750b" containerName="swift-recon-cron" containerID="cri-o://ab686f3cd584a56d6b1aa614ebae97ca9c4f9dcf44febc60af60b85d5951643a" gracePeriod=30 Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.012049 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="85471d11-01bf-4a15-b6bc-fabbdaa5750b" containerName="object-auditor" containerID="cri-o://bad7f6a8d29de5c70b1535d6075c42725412283bb958655edcb536d62f10c419" gracePeriod=30 Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.012067 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="85471d11-01bf-4a15-b6bc-fabbdaa5750b" containerName="rsync" containerID="cri-o://ed8a91ba58ec453d2111a81153c4d592bd204afe8c8353e52171871827a439ce" gracePeriod=30 Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.012088 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="85471d11-01bf-4a15-b6bc-fabbdaa5750b" containerName="object-replicator" containerID="cri-o://3a2576d6f85fa2071ef37c084cdd8c32f7c6ddb702173be6308da0b714ce8400" gracePeriod=30 Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.050031 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-9hlt9"] Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.056523 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-9hlt9" Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.087795 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-9hlt9"] Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.108003 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/swift-ring-rebalance-zzttm"] Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.129489 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/swift-ring-rebalance-zzttm"] Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.139206 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-41bf-account-create-update-ztpg6" Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.141359 4558 generic.go:334] "Generic (PLEG): container finished" podID="e1a1ac97-a530-4ba0-8c9b-835a2b576c8d" containerID="f401d7772f0d907bbf33421516a8e607439cd73653fec584dcb05860382f5a83" exitCode=143 Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.141438 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"e1a1ac97-a530-4ba0-8c9b-835a2b576c8d","Type":"ContainerDied","Data":"f401d7772f0d907bbf33421516a8e607439cd73653fec584dcb05860382f5a83"} Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.144640 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.145578 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-external-api-0" podUID="f19d5a0b-6d74-4b46-86a3-9381feb3f158" containerName="glance-log" containerID="cri-o://32a0b1b4cb9343de4b8cd2f43bcb90496f890496ba1d6d3239983dac00184a6b" gracePeriod=30 Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.145766 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-external-api-0" podUID="f19d5a0b-6d74-4b46-86a3-9381feb3f158" containerName="glance-httpd" containerID="cri-o://caebb498b12b9c8930363d5154873da3243854fe3f120dfec66b79198ba1728b" gracePeriod=30 Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.147369 4558 generic.go:334] "Generic (PLEG): container finished" podID="c570a7ea-d296-44bc-a48f-6dd8be9754d0" containerID="80fe464dc7cd617b6ebc3164ada8f61f3bec4caca84b82392b77289a475163d7" exitCode=2 Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.147437 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-northd-0" event={"ID":"c570a7ea-d296-44bc-a48f-6dd8be9754d0","Type":"ContainerDied","Data":"80fe464dc7cd617b6ebc3164ada8f61f3bec4caca84b82392b77289a475163d7"} Jan 20 17:37:41 crc kubenswrapper[4558]: W0120 17:37:41.154879 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod52895fa4_c08b_43b8_9f63_cbc8621db11e.slice/crio-b188a203f3875e52c33c42111e0cc880025085cb9a11a04452bcf06cd6b045e9 WatchSource:0}: Error finding container b188a203f3875e52c33c42111e0cc880025085cb9a11a04452bcf06cd6b045e9: Status 404 returned error can't find the container with id b188a203f3875e52c33c42111e0cc880025085cb9a11a04452bcf06cd6b045e9 Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.157065 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/cff9fbf3-9b9a-4350-8589-8bfe5543d79f-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-84b9f45d47-9hlt9\" (UID: \"cff9fbf3-9b9a-4350-8589-8bfe5543d79f\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-9hlt9" Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.157252 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5ghgz\" (UniqueName: \"kubernetes.io/projected/cff9fbf3-9b9a-4350-8589-8bfe5543d79f-kube-api-access-5ghgz\") pod \"dnsmasq-dnsmasq-84b9f45d47-9hlt9\" (UID: \"cff9fbf3-9b9a-4350-8589-8bfe5543d79f\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-9hlt9" Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.157284 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cff9fbf3-9b9a-4350-8589-8bfe5543d79f-config\") pod \"dnsmasq-dnsmasq-84b9f45d47-9hlt9\" (UID: \"cff9fbf3-9b9a-4350-8589-8bfe5543d79f\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-9hlt9" Jan 20 17:37:41 crc kubenswrapper[4558]: E0120 17:37:41.157427 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/combined-ca-bundle: secret "combined-ca-bundle" not found Jan 20 17:37:41 crc kubenswrapper[4558]: E0120 17:37:41.157463 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b18f4fcf-eaae-401f-99ec-7b130ad8a6c1-combined-ca-bundle podName:b18f4fcf-eaae-401f-99ec-7b130ad8a6c1 nodeName:}" failed. No retries permitted until 2026-01-20 17:37:42.15744864 +0000 UTC m=+3355.917786607 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/b18f4fcf-eaae-401f-99ec-7b130ad8a6c1-combined-ca-bundle") pod "memcached-0" (UID: "b18f4fcf-eaae-401f-99ec-7b130ad8a6c1") : secret "combined-ca-bundle" not found Jan 20 17:37:41 crc kubenswrapper[4558]: E0120 17:37:41.157702 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-memcached-svc: secret "cert-memcached-svc" not found Jan 20 17:37:41 crc kubenswrapper[4558]: E0120 17:37:41.157732 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b18f4fcf-eaae-401f-99ec-7b130ad8a6c1-memcached-tls-certs podName:b18f4fcf-eaae-401f-99ec-7b130ad8a6c1 nodeName:}" failed. No retries permitted until 2026-01-20 17:37:42.157724708 +0000 UTC m=+3355.918062675 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "memcached-tls-certs" (UniqueName: "kubernetes.io/secret/b18f4fcf-eaae-401f-99ec-7b130ad8a6c1-memcached-tls-certs") pod "memcached-0" (UID: "b18f4fcf-eaae-401f-99ec-7b130ad8a6c1") : secret "cert-memcached-svc" not found Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.170206 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-2fe2-account-create-update-t264m" Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.185982 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-db-sync-8rp7v"] Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.200909 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovsdbserver-nb-0_98f8dc8b-6446-4ba8-b37b-a11ae7414b65/ovsdbserver-nb/0.log" Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.200954 4558 generic.go:334] "Generic (PLEG): container finished" podID="98f8dc8b-6446-4ba8-b37b-a11ae7414b65" containerID="f56a163a8189df30e61d8f643eac8347a79e40378fdf5ec2740f041b866096e0" exitCode=2 Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.200970 4558 generic.go:334] "Generic (PLEG): container finished" podID="98f8dc8b-6446-4ba8-b37b-a11ae7414b65" containerID="ed49f3f83c5373eef322ba453c4777bc515d3e9dfcde8de0a5408d13b285222c" exitCode=143 Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.200997 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-nb-0" event={"ID":"98f8dc8b-6446-4ba8-b37b-a11ae7414b65","Type":"ContainerDied","Data":"f56a163a8189df30e61d8f643eac8347a79e40378fdf5ec2740f041b866096e0"} Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.201030 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-nb-0" event={"ID":"98f8dc8b-6446-4ba8-b37b-a11ae7414b65","Type":"ContainerDied","Data":"ed49f3f83c5373eef322ba453c4777bc515d3e9dfcde8de0a5408d13b285222c"} Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.213338 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/barbican-db-sync-8rp7v"] Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.237796 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/rabbitmq-server-0"] Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.268756 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5ghgz\" (UniqueName: \"kubernetes.io/projected/cff9fbf3-9b9a-4350-8589-8bfe5543d79f-kube-api-access-5ghgz\") pod \"dnsmasq-dnsmasq-84b9f45d47-9hlt9\" (UID: \"cff9fbf3-9b9a-4350-8589-8bfe5543d79f\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-9hlt9" Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.268954 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cff9fbf3-9b9a-4350-8589-8bfe5543d79f-config\") pod \"dnsmasq-dnsmasq-84b9f45d47-9hlt9\" (UID: \"cff9fbf3-9b9a-4350-8589-8bfe5543d79f\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-9hlt9" Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.269086 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/cff9fbf3-9b9a-4350-8589-8bfe5543d79f-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-84b9f45d47-9hlt9\" (UID: \"cff9fbf3-9b9a-4350-8589-8bfe5543d79f\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-9hlt9" Jan 20 17:37:41 crc kubenswrapper[4558]: E0120 17:37:41.269513 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Jan 20 17:37:41 crc kubenswrapper[4558]: E0120 17:37:41.269572 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/0696a635-5dc9-46e9-8502-47fab9ff761c-config-data podName:0696a635-5dc9-46e9-8502-47fab9ff761c nodeName:}" failed. No retries permitted until 2026-01-20 17:37:43.269554971 +0000 UTC m=+3357.029892938 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/0696a635-5dc9-46e9-8502-47fab9ff761c-config-data") pod "rabbitmq-server-0" (UID: "0696a635-5dc9-46e9-8502-47fab9ff761c") : configmap "rabbitmq-config-data" not found Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.270506 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cff9fbf3-9b9a-4350-8589-8bfe5543d79f-config\") pod \"dnsmasq-dnsmasq-84b9f45d47-9hlt9\" (UID: \"cff9fbf3-9b9a-4350-8589-8bfe5543d79f\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-9hlt9" Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.272854 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/cff9fbf3-9b9a-4350-8589-8bfe5543d79f-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-84b9f45d47-9hlt9\" (UID: \"cff9fbf3-9b9a-4350-8589-8bfe5543d79f\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-9hlt9" Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.294735 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5ghgz\" (UniqueName: \"kubernetes.io/projected/cff9fbf3-9b9a-4350-8589-8bfe5543d79f-kube-api-access-5ghgz\") pod \"dnsmasq-dnsmasq-84b9f45d47-9hlt9\" (UID: \"cff9fbf3-9b9a-4350-8589-8bfe5543d79f\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-9hlt9" Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.305318 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-wlsbh"] Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.323706 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/rabbitmq-server-0" podUID="0696a635-5dc9-46e9-8502-47fab9ff761c" containerName="rabbitmq" containerID="cri-o://9f1296f437298973ce0b0219f1980b151cca677e25fb375f92cfbf98670c7983" gracePeriod=604800 Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.326732 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-db-create-5jmnq"] Jan 20 17:37:41 crc kubenswrapper[4558]: E0120 17:37:41.337557 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of f48902bc3180bcf59f02a8f100bbf7e30cefe4513466bce23fe46f25d1cf1438 is running failed: container process not found" containerID="f48902bc3180bcf59f02a8f100bbf7e30cefe4513466bce23fe46f25d1cf1438" cmd=["/usr/bin/pidof","ovsdb-server"] Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.348142 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell0-cell-mapping-ml6gb"] Jan 20 17:37:41 crc kubenswrapper[4558]: E0120 17:37:41.351407 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of f48902bc3180bcf59f02a8f100bbf7e30cefe4513466bce23fe46f25d1cf1438 is running failed: container process not found" containerID="f48902bc3180bcf59f02a8f100bbf7e30cefe4513466bce23fe46f25d1cf1438" cmd=["/usr/bin/pidof","ovsdb-server"] Jan 20 17:37:41 crc kubenswrapper[4558]: E0120 17:37:41.352355 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of f48902bc3180bcf59f02a8f100bbf7e30cefe4513466bce23fe46f25d1cf1438 is running failed: container process not found" containerID="f48902bc3180bcf59f02a8f100bbf7e30cefe4513466bce23fe46f25d1cf1438" cmd=["/usr/bin/pidof","ovsdb-server"] Jan 20 17:37:41 crc kubenswrapper[4558]: E0120 17:37:41.352415 4558 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of f48902bc3180bcf59f02a8f100bbf7e30cefe4513466bce23fe46f25d1cf1438 is running failed: container process not found" probeType="Readiness" pod="openstack-kuttl-tests/ovsdbserver-sb-0" podUID="8b45e22f-53b9-4228-a28a-a5db18b6e583" containerName="ovsdbserver-sb" Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.355227 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-9hlt9" Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.361881 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/glance-db-create-5jmnq"] Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.378386 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell0-cell-mapping-ml6gb"] Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.393859 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/placement-55dfc8964d-kck7c"] Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.397686 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/placement-55dfc8964d-kck7c" podUID="6b77441a-39f5-4ed5-bf1b-c29900900242" containerName="placement-log" containerID="cri-o://0eec862e72c84cd059688cd69902bbff28efcdcf452151cfde7339967b5160f1" gracePeriod=30 Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.397811 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/placement-55dfc8964d-kck7c" podUID="6b77441a-39f5-4ed5-bf1b-c29900900242" containerName="placement-api" containerID="cri-o://bbf204899e737785b34e43ac3dcf8c1621d7b4c38d2b11ab4f044288ae0cdd0a" gracePeriod=30 Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.455817 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-cell-mapping-jqwc9"] Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.479380 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell1-cell-mapping-jqwc9"] Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.484757 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.484984 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-internal-api-0" podUID="f7881580-f0a5-47a0-9622-3927bcce668a" containerName="glance-log" containerID="cri-o://97f89fd69e51892f2e484fb54c15bb385da2a687f24a01aa8d3d2e7c65114191" gracePeriod=30 Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.485278 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-internal-api-0" podUID="f7881580-f0a5-47a0-9622-3927bcce668a" containerName="glance-httpd" containerID="cri-o://7afb4e77f803740421952c0dbe28e7f78cf5d2d3ff6b921fafb8facbbd98325f" gracePeriod=30 Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.489340 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-f7c7-account-create-update-hf7rn"] Jan 20 17:37:41 crc kubenswrapper[4558]: E0120 17:37:41.493239 4558 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 20 17:37:41 crc kubenswrapper[4558]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[/bin/sh -c #!/bin/bash Jan 20 17:37:41 crc kubenswrapper[4558]: Jan 20 17:37:41 crc kubenswrapper[4558]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 20 17:37:41 crc kubenswrapper[4558]: Jan 20 17:37:41 crc kubenswrapper[4558]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 20 17:37:41 crc kubenswrapper[4558]: Jan 20 17:37:41 crc kubenswrapper[4558]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 20 17:37:41 crc kubenswrapper[4558]: Jan 20 17:37:41 crc kubenswrapper[4558]: if [ -n "neutron" ]; then Jan 20 17:37:41 crc kubenswrapper[4558]: GRANT_DATABASE="neutron" Jan 20 17:37:41 crc kubenswrapper[4558]: else Jan 20 17:37:41 crc kubenswrapper[4558]: GRANT_DATABASE="*" Jan 20 17:37:41 crc kubenswrapper[4558]: fi Jan 20 17:37:41 crc kubenswrapper[4558]: Jan 20 17:37:41 crc kubenswrapper[4558]: # going for maximum compatibility here: Jan 20 17:37:41 crc kubenswrapper[4558]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 20 17:37:41 crc kubenswrapper[4558]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 20 17:37:41 crc kubenswrapper[4558]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 20 17:37:41 crc kubenswrapper[4558]: # support updates Jan 20 17:37:41 crc kubenswrapper[4558]: Jan 20 17:37:41 crc kubenswrapper[4558]: $MYSQL_CMD < logger="UnhandledError" Jan 20 17:37:41 crc kubenswrapper[4558]: E0120 17:37:41.494885 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"neutron-db-secret\\\" not found\"" pod="openstack-kuttl-tests/neutron-f7c7-account-create-update-hf7rn" podUID="0f5a8541-5b86-4fff-8f94-18b8b5ee3ebf" Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.508382 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-6d8f9956fd-j6jg4"] Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.508705 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/neutron-6d8f9956fd-j6jg4" podUID="5b57aa66-3c71-422d-b029-86cb0e3a9aef" containerName="neutron-api" containerID="cri-o://bbdfd5ce6843e43c9db75a7afcdc0a1ab6a9db243de4fc167edef10ed383f798" gracePeriod=30 Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.509132 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/neutron-6d8f9956fd-j6jg4" podUID="5b57aa66-3c71-422d-b029-86cb0e3a9aef" containerName="neutron-httpd" containerID="cri-o://1760926567dc1435e74fb123df5eb6e6a9b5f8ff375b1b59306d1e2e66b1ba69" gracePeriod=30 Jan 20 17:37:41 crc kubenswrapper[4558]: E0120 17:37:41.523728 4558 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 20 17:37:41 crc kubenswrapper[4558]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[/bin/sh -c #!/bin/bash Jan 20 17:37:41 crc kubenswrapper[4558]: Jan 20 17:37:41 crc kubenswrapper[4558]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 20 17:37:41 crc kubenswrapper[4558]: Jan 20 17:37:41 crc kubenswrapper[4558]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 20 17:37:41 crc kubenswrapper[4558]: Jan 20 17:37:41 crc kubenswrapper[4558]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 20 17:37:41 crc kubenswrapper[4558]: Jan 20 17:37:41 crc kubenswrapper[4558]: if [ -n "barbican" ]; then Jan 20 17:37:41 crc kubenswrapper[4558]: GRANT_DATABASE="barbican" Jan 20 17:37:41 crc kubenswrapper[4558]: else Jan 20 17:37:41 crc kubenswrapper[4558]: GRANT_DATABASE="*" Jan 20 17:37:41 crc kubenswrapper[4558]: fi Jan 20 17:37:41 crc kubenswrapper[4558]: Jan 20 17:37:41 crc kubenswrapper[4558]: # going for maximum compatibility here: Jan 20 17:37:41 crc kubenswrapper[4558]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 20 17:37:41 crc kubenswrapper[4558]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 20 17:37:41 crc kubenswrapper[4558]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 20 17:37:41 crc kubenswrapper[4558]: # support updates Jan 20 17:37:41 crc kubenswrapper[4558]: Jan 20 17:37:41 crc kubenswrapper[4558]: $MYSQL_CMD < logger="UnhandledError" Jan 20 17:37:41 crc kubenswrapper[4558]: E0120 17:37:41.531310 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"barbican-db-secret\\\" not found\"" pod="openstack-kuttl-tests/barbican-b147-account-create-update-zdjzj" podUID="7ca40caa-67e9-4be0-8697-cf15086736bc" Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.531444 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-19e0-account-create-update-x5cqg"] Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.546469 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/glance-19e0-account-create-update-x5cqg"] Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.549236 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-b147-account-create-update-zdjzj"] Jan 20 17:37:41 crc kubenswrapper[4558]: E0120 17:37:41.577875 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Jan 20 17:37:41 crc kubenswrapper[4558]: E0120 17:37:41.578226 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/0c6a6265-cf90-4039-9200-ba478d612baa-config-data podName:0c6a6265-cf90-4039-9200-ba478d612baa nodeName:}" failed. No retries permitted until 2026-01-20 17:37:43.578208937 +0000 UTC m=+3357.338546903 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/0c6a6265-cf90-4039-9200-ba478d612baa-config-data") pod "rabbitmq-cell1-server-0" (UID: "0c6a6265-cf90-4039-9200-ba478d612baa") : configmap "rabbitmq-cell1-config-data" not found Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.596614 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/rabbitmq-cell1-server-0"] Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.613795 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovsdbserver-nb-0_98f8dc8b-6446-4ba8-b37b-a11ae7414b65/ovsdbserver-nb/0.log" Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.613877 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.639599 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-db-create-55tnd"] Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.679138 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/98f8dc8b-6446-4ba8-b37b-a11ae7414b65-combined-ca-bundle\") pod \"98f8dc8b-6446-4ba8-b37b-a11ae7414b65\" (UID: \"98f8dc8b-6446-4ba8-b37b-a11ae7414b65\") " Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.679340 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/98f8dc8b-6446-4ba8-b37b-a11ae7414b65-config\") pod \"98f8dc8b-6446-4ba8-b37b-a11ae7414b65\" (UID: \"98f8dc8b-6446-4ba8-b37b-a11ae7414b65\") " Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.679385 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/98f8dc8b-6446-4ba8-b37b-a11ae7414b65-metrics-certs-tls-certs\") pod \"98f8dc8b-6446-4ba8-b37b-a11ae7414b65\" (UID: \"98f8dc8b-6446-4ba8-b37b-a11ae7414b65\") " Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.679410 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/98f8dc8b-6446-4ba8-b37b-a11ae7414b65-ovsdbserver-nb-tls-certs\") pod \"98f8dc8b-6446-4ba8-b37b-a11ae7414b65\" (UID: \"98f8dc8b-6446-4ba8-b37b-a11ae7414b65\") " Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.679468 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/98f8dc8b-6446-4ba8-b37b-a11ae7414b65-scripts\") pod \"98f8dc8b-6446-4ba8-b37b-a11ae7414b65\" (UID: \"98f8dc8b-6446-4ba8-b37b-a11ae7414b65\") " Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.679536 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/98f8dc8b-6446-4ba8-b37b-a11ae7414b65-ovsdb-rundir\") pod \"98f8dc8b-6446-4ba8-b37b-a11ae7414b65\" (UID: \"98f8dc8b-6446-4ba8-b37b-a11ae7414b65\") " Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.679640 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bzkqr\" (UniqueName: \"kubernetes.io/projected/98f8dc8b-6446-4ba8-b37b-a11ae7414b65-kube-api-access-bzkqr\") pod \"98f8dc8b-6446-4ba8-b37b-a11ae7414b65\" (UID: \"98f8dc8b-6446-4ba8-b37b-a11ae7414b65\") " Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.679753 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndbcluster-nb-etc-ovn\" (UniqueName: \"kubernetes.io/local-volume/local-storage13-crc\") pod \"98f8dc8b-6446-4ba8-b37b-a11ae7414b65\" (UID: \"98f8dc8b-6446-4ba8-b37b-a11ae7414b65\") " Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.685643 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/98f8dc8b-6446-4ba8-b37b-a11ae7414b65-config" (OuterVolumeSpecName: "config") pod "98f8dc8b-6446-4ba8-b37b-a11ae7414b65" (UID: "98f8dc8b-6446-4ba8-b37b-a11ae7414b65"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.686413 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/98f8dc8b-6446-4ba8-b37b-a11ae7414b65-scripts" (OuterVolumeSpecName: "scripts") pod "98f8dc8b-6446-4ba8-b37b-a11ae7414b65" (UID: "98f8dc8b-6446-4ba8-b37b-a11ae7414b65"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.695257 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/98f8dc8b-6446-4ba8-b37b-a11ae7414b65-ovsdb-rundir" (OuterVolumeSpecName: "ovsdb-rundir") pod "98f8dc8b-6446-4ba8-b37b-a11ae7414b65" (UID: "98f8dc8b-6446-4ba8-b37b-a11ae7414b65"). InnerVolumeSpecName "ovsdb-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.695508 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage13-crc" (OuterVolumeSpecName: "ovndbcluster-nb-etc-ovn") pod "98f8dc8b-6446-4ba8-b37b-a11ae7414b65" (UID: "98f8dc8b-6446-4ba8-b37b-a11ae7414b65"). InnerVolumeSpecName "local-storage13-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.696582 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/98f8dc8b-6446-4ba8-b37b-a11ae7414b65-kube-api-access-bzkqr" (OuterVolumeSpecName: "kube-api-access-bzkqr") pod "98f8dc8b-6446-4ba8-b37b-a11ae7414b65" (UID: "98f8dc8b-6446-4ba8-b37b-a11ae7414b65"). InnerVolumeSpecName "kube-api-access-bzkqr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.702939 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/cinder-db-create-55tnd"] Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.715841 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" podUID="0c6a6265-cf90-4039-9200-ba478d612baa" containerName="rabbitmq" containerID="cri-o://b6873866e3f13dd35b6f2b58c613d3e9843e8b8a9fdc297c58001a3d665529a7" gracePeriod=604800 Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.735720 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/placement-db-create-b82kr"] Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.758874 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/placement-db-create-b82kr"] Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.773395 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/placement-61de-account-create-update-467z8"] Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.774492 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/98f8dc8b-6446-4ba8-b37b-a11ae7414b65-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "98f8dc8b-6446-4ba8-b37b-a11ae7414b65" (UID: "98f8dc8b-6446-4ba8-b37b-a11ae7414b65"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.781020 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/placement-61de-account-create-update-467z8"] Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.795917 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-db-create-cvpdw"] Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.796442 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/98f8dc8b-6446-4ba8-b37b-a11ae7414b65-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.796465 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/98f8dc8b-6446-4ba8-b37b-a11ae7414b65-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.796475 4558 reconciler_common.go:293] "Volume detached for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/98f8dc8b-6446-4ba8-b37b-a11ae7414b65-ovsdb-rundir\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.796489 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bzkqr\" (UniqueName: \"kubernetes.io/projected/98f8dc8b-6446-4ba8-b37b-a11ae7414b65-kube-api-access-bzkqr\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.796516 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage13-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage13-crc\") on node \"crc\" " Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.796525 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/98f8dc8b-6446-4ba8-b37b-a11ae7414b65-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.799296 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/ovn-northd-0" podUID="c570a7ea-d296-44bc-a48f-6dd8be9754d0" containerName="ovn-northd" probeResult="failure" output="command timed out" Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.812664 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/neutron-db-create-cvpdw"] Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.832851 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-f7c7-account-create-update-hf7rn"] Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.838347 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-2fe2-account-create-update-t264m"] Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.848898 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage13-crc" (UniqueName: "kubernetes.io/local-volume/local-storage13-crc") on node "crc" Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.853736 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-db-create-2rxk6"] Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.855684 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovsdbserver-sb-0_8b45e22f-53b9-4228-a28a-a5db18b6e583/ovsdbserver-sb/0.log" Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.855806 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.861671 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/barbican-db-create-2rxk6"] Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.872739 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.874315 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-metadata-0" podUID="7ee90982-23a3-4111-9c4e-159828a51a69" containerName="nova-metadata-metadata" containerID="cri-o://3bb4afac6c286ba59753565000e90ace9365499f3dff7e44738101c6faf5cc39" gracePeriod=30 Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.874315 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-metadata-0" podUID="7ee90982-23a3-4111-9c4e-159828a51a69" containerName="nova-metadata-log" containerID="cri-o://a9b864e0c7723915ae39749b96750b7b8695818471060aad197d75c66a8cc364" gracePeriod=30 Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.897438 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndbcluster-sb-etc-ovn\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"8b45e22f-53b9-4228-a28a-a5db18b6e583\" (UID: \"8b45e22f-53b9-4228-a28a-a5db18b6e583\") " Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.897484 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/8b45e22f-53b9-4228-a28a-a5db18b6e583-ovsdbserver-sb-tls-certs\") pod \"8b45e22f-53b9-4228-a28a-a5db18b6e583\" (UID: \"8b45e22f-53b9-4228-a28a-a5db18b6e583\") " Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.897606 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8b45e22f-53b9-4228-a28a-a5db18b6e583-combined-ca-bundle\") pod \"8b45e22f-53b9-4228-a28a-a5db18b6e583\" (UID: \"8b45e22f-53b9-4228-a28a-a5db18b6e583\") " Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.916322 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-twwcp\" (UniqueName: \"kubernetes.io/projected/8b45e22f-53b9-4228-a28a-a5db18b6e583-kube-api-access-twwcp\") pod \"8b45e22f-53b9-4228-a28a-a5db18b6e583\" (UID: \"8b45e22f-53b9-4228-a28a-a5db18b6e583\") " Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.916363 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/8b45e22f-53b9-4228-a28a-a5db18b6e583-metrics-certs-tls-certs\") pod \"8b45e22f-53b9-4228-a28a-a5db18b6e583\" (UID: \"8b45e22f-53b9-4228-a28a-a5db18b6e583\") " Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.916447 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8b45e22f-53b9-4228-a28a-a5db18b6e583-config\") pod \"8b45e22f-53b9-4228-a28a-a5db18b6e583\" (UID: \"8b45e22f-53b9-4228-a28a-a5db18b6e583\") " Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.916483 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8b45e22f-53b9-4228-a28a-a5db18b6e583-scripts\") pod \"8b45e22f-53b9-4228-a28a-a5db18b6e583\" (UID: \"8b45e22f-53b9-4228-a28a-a5db18b6e583\") " Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.916571 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/8b45e22f-53b9-4228-a28a-a5db18b6e583-ovsdb-rundir\") pod \"8b45e22f-53b9-4228-a28a-a5db18b6e583\" (UID: \"8b45e22f-53b9-4228-a28a-a5db18b6e583\") " Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.917461 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage13-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage13-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.918229 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8b45e22f-53b9-4228-a28a-a5db18b6e583-config" (OuterVolumeSpecName: "config") pod "8b45e22f-53b9-4228-a28a-a5db18b6e583" (UID: "8b45e22f-53b9-4228-a28a-a5db18b6e583"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.918678 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-b147-account-create-update-zdjzj"] Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.919305 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8b45e22f-53b9-4228-a28a-a5db18b6e583-scripts" (OuterVolumeSpecName: "scripts") pod "8b45e22f-53b9-4228-a28a-a5db18b6e583" (UID: "8b45e22f-53b9-4228-a28a-a5db18b6e583"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.919727 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8b45e22f-53b9-4228-a28a-a5db18b6e583-ovsdb-rundir" (OuterVolumeSpecName: "ovsdb-rundir") pod "8b45e22f-53b9-4228-a28a-a5db18b6e583" (UID: "8b45e22f-53b9-4228-a28a-a5db18b6e583"). InnerVolumeSpecName "ovsdb-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.934618 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage04-crc" (OuterVolumeSpecName: "ovndbcluster-sb-etc-ovn") pod "8b45e22f-53b9-4228-a28a-a5db18b6e583" (UID: "8b45e22f-53b9-4228-a28a-a5db18b6e583"). InnerVolumeSpecName "local-storage04-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.942922 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8b45e22f-53b9-4228-a28a-a5db18b6e583-kube-api-access-twwcp" (OuterVolumeSpecName: "kube-api-access-twwcp") pod "8b45e22f-53b9-4228-a28a-a5db18b6e583" (UID: "8b45e22f-53b9-4228-a28a-a5db18b6e583"). InnerVolumeSpecName "kube-api-access-twwcp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.948539 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/98f8dc8b-6446-4ba8-b37b-a11ae7414b65-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "98f8dc8b-6446-4ba8-b37b-a11ae7414b65" (UID: "98f8dc8b-6446-4ba8-b37b-a11ae7414b65"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.964453 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/openstack-cell1-galera-0"] Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.979934 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-db-create-ph8wn"] Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.984116 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-api-db-create-ph8wn"] Jan 20 17:37:41 crc kubenswrapper[4558]: I0120 17:37:41.987694 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8b45e22f-53b9-4228-a28a-a5db18b6e583-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8b45e22f-53b9-4228-a28a-a5db18b6e583" (UID: "8b45e22f-53b9-4228-a28a-a5db18b6e583"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.059875 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.060447 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="1addf16e-1af6-431b-aff1-05c48a952f5a" containerName="ceilometer-central-agent" containerID="cri-o://0fde5ea8da11b84a29f2a0ac302c4c48b6a41c64a33f837f39b3bbaaf4487feb" gracePeriod=30 Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.060906 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="1addf16e-1af6-431b-aff1-05c48a952f5a" containerName="proxy-httpd" containerID="cri-o://d7f94ffeca974ded0f05a2d4e0bd7bdbcb45172d758621d01d81132148b96b8b" gracePeriod=30 Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.060961 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="1addf16e-1af6-431b-aff1-05c48a952f5a" containerName="sg-core" containerID="cri-o://9c1da6a399e8373ade66666f30fdfad18e61c1a4529147d6a10d0385c82a426f" gracePeriod=30 Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.061005 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="1addf16e-1af6-431b-aff1-05c48a952f5a" containerName="ceilometer-notification-agent" containerID="cri-o://f49f1bac498f7d02524aaee31c0af982140e123e147ff77674e41803999fb5b9" gracePeriod=30 Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.064024 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-twwcp\" (UniqueName: \"kubernetes.io/projected/8b45e22f-53b9-4228-a28a-a5db18b6e583-kube-api-access-twwcp\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.064667 4558 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/98f8dc8b-6446-4ba8-b37b-a11ae7414b65-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.064772 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8b45e22f-53b9-4228-a28a-a5db18b6e583-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.064837 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8b45e22f-53b9-4228-a28a-a5db18b6e583-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.064928 4558 reconciler_common.go:293] "Volume detached for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/8b45e22f-53b9-4228-a28a-a5db18b6e583-ovsdb-rundir\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.065009 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" " Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.065072 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8b45e22f-53b9-4228-a28a-a5db18b6e583-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.077208 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.077401 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-api-0" podUID="906beedc-3fa2-4d6e-a6e8-485ca2fb1082" containerName="nova-api-log" containerID="cri-o://9da02a19e6777029509b4f8838ad4012f0bd07388cadfbfa55cbc40a5673b825" gracePeriod=30 Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.078780 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-api-0" podUID="906beedc-3fa2-4d6e-a6e8-485ca2fb1082" containerName="nova-api-api" containerID="cri-o://48ace894e440f9c70ed8c855d176b45725c5d24b103871d2c8046cc04e75abb3" gracePeriod=30 Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.103257 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/98f8dc8b-6446-4ba8-b37b-a11ae7414b65-ovsdbserver-nb-tls-certs" (OuterVolumeSpecName: "ovsdbserver-nb-tls-certs") pod "98f8dc8b-6446-4ba8-b37b-a11ae7414b65" (UID: "98f8dc8b-6446-4ba8-b37b-a11ae7414b65"). InnerVolumeSpecName "ovsdbserver-nb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.116547 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8b45e22f-53b9-4228-a28a-a5db18b6e583-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "8b45e22f-53b9-4228-a28a-a5db18b6e583" (UID: "8b45e22f-53b9-4228-a28a-a5db18b6e583"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.155735 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage04-crc" (UniqueName: "kubernetes.io/local-volume/local-storage04-crc") on node "crc" Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.158024 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell0-db-create-ngdbp"] Jan 20 17:37:42 crc kubenswrapper[4558]: W0120 17:37:42.159602 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode5f57088_ae0f_4326_89fb_a7957da83477.slice/crio-959fda3de432efcca43d31c3d21aa1e3d6a27a4e4f6ba00d7a2cc9eeab56411b WatchSource:0}: Error finding container 959fda3de432efcca43d31c3d21aa1e3d6a27a4e4f6ba00d7a2cc9eeab56411b: Status 404 returned error can't find the container with id 959fda3de432efcca43d31c3d21aa1e3d6a27a4e4f6ba00d7a2cc9eeab56411b Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.183219 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell0-41bf-account-create-update-ztpg6"] Jan 20 17:37:42 crc kubenswrapper[4558]: E0120 17:37:42.186062 4558 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 20 17:37:42 crc kubenswrapper[4558]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[/bin/sh -c #!/bin/bash Jan 20 17:37:42 crc kubenswrapper[4558]: Jan 20 17:37:42 crc kubenswrapper[4558]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 20 17:37:42 crc kubenswrapper[4558]: Jan 20 17:37:42 crc kubenswrapper[4558]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 20 17:37:42 crc kubenswrapper[4558]: Jan 20 17:37:42 crc kubenswrapper[4558]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 20 17:37:42 crc kubenswrapper[4558]: Jan 20 17:37:42 crc kubenswrapper[4558]: if [ -n "nova_cell0" ]; then Jan 20 17:37:42 crc kubenswrapper[4558]: GRANT_DATABASE="nova_cell0" Jan 20 17:37:42 crc kubenswrapper[4558]: else Jan 20 17:37:42 crc kubenswrapper[4558]: GRANT_DATABASE="*" Jan 20 17:37:42 crc kubenswrapper[4558]: fi Jan 20 17:37:42 crc kubenswrapper[4558]: Jan 20 17:37:42 crc kubenswrapper[4558]: # going for maximum compatibility here: Jan 20 17:37:42 crc kubenswrapper[4558]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 20 17:37:42 crc kubenswrapper[4558]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 20 17:37:42 crc kubenswrapper[4558]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 20 17:37:42 crc kubenswrapper[4558]: # support updates Jan 20 17:37:42 crc kubenswrapper[4558]: Jan 20 17:37:42 crc kubenswrapper[4558]: $MYSQL_CMD < logger="UnhandledError" Jan 20 17:37:42 crc kubenswrapper[4558]: E0120 17:37:42.187314 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"nova-cell0-db-secret\\\" not found\"" pod="openstack-kuttl-tests/nova-cell0-41bf-account-create-update-ztpg6" podUID="e5f57088-ae0f-4326-89fb-a7957da83477" Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.195498 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-db-create-45f8v"] Jan 20 17:37:42 crc kubenswrapper[4558]: E0120 17:37:42.204756 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/combined-ca-bundle: secret "combined-ca-bundle" not found Jan 20 17:37:42 crc kubenswrapper[4558]: E0120 17:37:42.204820 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b18f4fcf-eaae-401f-99ec-7b130ad8a6c1-combined-ca-bundle podName:b18f4fcf-eaae-401f-99ec-7b130ad8a6c1 nodeName:}" failed. No retries permitted until 2026-01-20 17:37:44.204801586 +0000 UTC m=+3357.965139544 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/b18f4fcf-eaae-401f-99ec-7b130ad8a6c1-combined-ca-bundle") pod "memcached-0" (UID: "b18f4fcf-eaae-401f-99ec-7b130ad8a6c1") : secret "combined-ca-bundle" not found Jan 20 17:37:42 crc kubenswrapper[4558]: E0120 17:37:42.205154 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-memcached-svc: secret "cert-memcached-svc" not found Jan 20 17:37:42 crc kubenswrapper[4558]: E0120 17:37:42.205244 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b18f4fcf-eaae-401f-99ec-7b130ad8a6c1-memcached-tls-certs podName:b18f4fcf-eaae-401f-99ec-7b130ad8a6c1 nodeName:}" failed. No retries permitted until 2026-01-20 17:37:44.205223189 +0000 UTC m=+3357.965561156 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "memcached-tls-certs" (UniqueName: "kubernetes.io/secret/b18f4fcf-eaae-401f-99ec-7b130ad8a6c1-memcached-tls-certs") pod "memcached-0" (UID: "b18f4fcf-eaae-401f-99ec-7b130ad8a6c1") : secret "cert-memcached-svc" not found Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.205364 4558 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/8b45e22f-53b9-4228-a28a-a5db18b6e583-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.205453 4558 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/98f8dc8b-6446-4ba8-b37b-a11ae7414b65-ovsdbserver-nb-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.210253 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.224217 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell0-db-create-ngdbp"] Jan 20 17:37:42 crc kubenswrapper[4558]: E0120 17:37:42.242214 4558 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 20 17:37:42 crc kubenswrapper[4558]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[/bin/sh -c #!/bin/bash Jan 20 17:37:42 crc kubenswrapper[4558]: Jan 20 17:37:42 crc kubenswrapper[4558]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 20 17:37:42 crc kubenswrapper[4558]: Jan 20 17:37:42 crc kubenswrapper[4558]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 20 17:37:42 crc kubenswrapper[4558]: Jan 20 17:37:42 crc kubenswrapper[4558]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 20 17:37:42 crc kubenswrapper[4558]: Jan 20 17:37:42 crc kubenswrapper[4558]: if [ -n "nova_api" ]; then Jan 20 17:37:42 crc kubenswrapper[4558]: GRANT_DATABASE="nova_api" Jan 20 17:37:42 crc kubenswrapper[4558]: else Jan 20 17:37:42 crc kubenswrapper[4558]: GRANT_DATABASE="*" Jan 20 17:37:42 crc kubenswrapper[4558]: fi Jan 20 17:37:42 crc kubenswrapper[4558]: Jan 20 17:37:42 crc kubenswrapper[4558]: # going for maximum compatibility here: Jan 20 17:37:42 crc kubenswrapper[4558]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 20 17:37:42 crc kubenswrapper[4558]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 20 17:37:42 crc kubenswrapper[4558]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 20 17:37:42 crc kubenswrapper[4558]: # support updates Jan 20 17:37:42 crc kubenswrapper[4558]: Jan 20 17:37:42 crc kubenswrapper[4558]: $MYSQL_CMD < logger="UnhandledError" Jan 20 17:37:42 crc kubenswrapper[4558]: E0120 17:37:42.244425 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"nova-api-db-secret\\\" not found\"" pod="openstack-kuttl-tests/nova-api-2fe2-account-create-update-t264m" podUID="cdf89a68-b15b-4f2d-9016-55fb28011197" Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.256231 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-keystone-listener-6d4445bf46-57dqj"] Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.256515 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-keystone-listener-6d4445bf46-57dqj" podUID="11d1aa99-639c-451a-876e-59de6098e407" containerName="barbican-keystone-listener-log" containerID="cri-o://db329d06eb52b93ff0cb7bb5890e9f6c1ce660d1bfa28f5bef02917fc16526c6" gracePeriod=30 Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.256632 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-keystone-listener-6d4445bf46-57dqj" podUID="11d1aa99-639c-451a-876e-59de6098e407" containerName="barbican-keystone-listener" containerID="cri-o://cb6897619dc4891e5845ed9d58dbce37e97a4c36140e8ccd66c2309dcdea8abb" gracePeriod=30 Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.274542 4558 generic.go:334] "Generic (PLEG): container finished" podID="f7881580-f0a5-47a0-9622-3927bcce668a" containerID="97f89fd69e51892f2e484fb54c15bb385da2a687f24a01aa8d3d2e7c65114191" exitCode=143 Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.274617 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"f7881580-f0a5-47a0-9622-3927bcce668a","Type":"ContainerDied","Data":"97f89fd69e51892f2e484fb54c15bb385da2a687f24a01aa8d3d2e7c65114191"} Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.278138 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell1-db-create-45f8v"] Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.287060 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8b45e22f-53b9-4228-a28a-a5db18b6e583-ovsdbserver-sb-tls-certs" (OuterVolumeSpecName: "ovsdbserver-sb-tls-certs") pod "8b45e22f-53b9-4228-a28a-a5db18b6e583" (UID: "8b45e22f-53b9-4228-a28a-a5db18b6e583"). InnerVolumeSpecName "ovsdbserver-sb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.301758 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-worker-dd549b649-grdrm"] Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.302022 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-worker-dd549b649-grdrm" podUID="640de3b3-60e1-41b8-ab00-22e375bad65c" containerName="barbican-worker-log" containerID="cri-o://6c5d77de3e10ffa19cd49dc2e45df3fcf7c673448b447d8327c2137ebcc78c33" gracePeriod=30 Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.302288 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-worker-dd549b649-grdrm" podUID="640de3b3-60e1-41b8-ab00-22e375bad65c" containerName="barbican-worker" containerID="cri-o://c97affc0c0f8fba83bdeaf55c35e5f0b460177945ced19a520ae5ea01ea75dfd" gracePeriod=30 Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.320563 4558 generic.go:334] "Generic (PLEG): container finished" podID="f19d5a0b-6d74-4b46-86a3-9381feb3f158" containerID="32a0b1b4cb9343de4b8cd2f43bcb90496f890496ba1d6d3239983dac00184a6b" exitCode=143 Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.320645 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"f19d5a0b-6d74-4b46-86a3-9381feb3f158","Type":"ContainerDied","Data":"32a0b1b4cb9343de4b8cd2f43bcb90496f890496ba1d6d3239983dac00184a6b"} Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.325392 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-b147-account-create-update-zdjzj" event={"ID":"7ca40caa-67e9-4be0-8697-cf15086736bc","Type":"ContainerStarted","Data":"92f0d365d5411ae74d15e2181c24da69129cd7f91b296bf5f51e909e71befc8c"} Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.327309 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-api-64bd4857c6-2s9h4"] Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.327511 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-api-64bd4857c6-2s9h4" podUID="94062d42-28cb-4c8a-afa4-f51458dedc6c" containerName="barbican-api-log" containerID="cri-o://bd3938888a8a3a7fda5215bb19bd64fb769bd0151d4bb989be309f5b70b3ad7c" gracePeriod=30 Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.327611 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-api-64bd4857c6-2s9h4" podUID="94062d42-28cb-4c8a-afa4-f51458dedc6c" containerName="barbican-api" containerID="cri-o://1aa46d5cbab3879dbc6e24ca32547b207cb8e11bc47b9d499882d8000bc7b577" gracePeriod=30 Jan 20 17:37:42 crc kubenswrapper[4558]: E0120 17:37:42.331835 4558 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 20 17:37:42 crc kubenswrapper[4558]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[/bin/sh -c #!/bin/bash Jan 20 17:37:42 crc kubenswrapper[4558]: Jan 20 17:37:42 crc kubenswrapper[4558]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 20 17:37:42 crc kubenswrapper[4558]: Jan 20 17:37:42 crc kubenswrapper[4558]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 20 17:37:42 crc kubenswrapper[4558]: Jan 20 17:37:42 crc kubenswrapper[4558]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 20 17:37:42 crc kubenswrapper[4558]: Jan 20 17:37:42 crc kubenswrapper[4558]: if [ -n "barbican" ]; then Jan 20 17:37:42 crc kubenswrapper[4558]: GRANT_DATABASE="barbican" Jan 20 17:37:42 crc kubenswrapper[4558]: else Jan 20 17:37:42 crc kubenswrapper[4558]: GRANT_DATABASE="*" Jan 20 17:37:42 crc kubenswrapper[4558]: fi Jan 20 17:37:42 crc kubenswrapper[4558]: Jan 20 17:37:42 crc kubenswrapper[4558]: # going for maximum compatibility here: Jan 20 17:37:42 crc kubenswrapper[4558]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 20 17:37:42 crc kubenswrapper[4558]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 20 17:37:42 crc kubenswrapper[4558]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 20 17:37:42 crc kubenswrapper[4558]: # support updates Jan 20 17:37:42 crc kubenswrapper[4558]: Jan 20 17:37:42 crc kubenswrapper[4558]: $MYSQL_CMD < logger="UnhandledError" Jan 20 17:37:42 crc kubenswrapper[4558]: E0120 17:37:42.334132 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"barbican-db-secret\\\" not found\"" pod="openstack-kuttl-tests/barbican-b147-account-create-update-zdjzj" podUID="7ca40caa-67e9-4be0-8697-cf15086736bc" Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.344205 4558 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/8b45e22f-53b9-4228-a28a-a5db18b6e583-ovsdbserver-sb-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.348700 4558 generic.go:334] "Generic (PLEG): container finished" podID="5b57aa66-3c71-422d-b029-86cb0e3a9aef" containerID="1760926567dc1435e74fb123df5eb6e6a9b5f8ff375b1b59306d1e2e66b1ba69" exitCode=0 Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.348777 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-6d8f9956fd-j6jg4" event={"ID":"5b57aa66-3c71-422d-b029-86cb0e3a9aef","Type":"ContainerDied","Data":"1760926567dc1435e74fb123df5eb6e6a9b5f8ff375b1b59306d1e2e66b1ba69"} Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.352708 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.353023 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" podUID="4fd9bebc-9868-4970-bc1f-d047286980d0" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://6517bf0111713477475cc09d3cf93d2400d5d1a9f549d2bfca4f578e113b540a" gracePeriod=30 Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.380328 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell0-41bf-account-create-update-ztpg6"] Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.382081 4558 generic.go:334] "Generic (PLEG): container finished" podID="6b77441a-39f5-4ed5-bf1b-c29900900242" containerID="0eec862e72c84cd059688cd69902bbff28efcdcf452151cfde7339967b5160f1" exitCode=143 Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.382191 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-55dfc8964d-kck7c" event={"ID":"6b77441a-39f5-4ed5-bf1b-c29900900242","Type":"ContainerDied","Data":"0eec862e72c84cd059688cd69902bbff28efcdcf452151cfde7339967b5160f1"} Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.391745 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-2fe2-account-create-update-t264m"] Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.410208 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-wlsbh"] Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.411584 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/openstack-cell1-galera-0" podUID="1fff3747-796d-420a-aff0-846dfc615df9" containerName="galera" containerID="cri-o://f5120513f44b4adc059dbacc3c6c22162c7eee356a5828f446ded77ed4759a9f" gracePeriod=30 Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.416269 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.416501 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/kube-state-metrics-0" podUID="f09a19a5-d5eb-4466-9cb5-309cd246004e" containerName="kube-state-metrics" containerID="cri-o://e618252041bc2c7c61af3ff12fe2cfdf9bcc601b2bc8675901ba9e059426eff6" gracePeriod=30 Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.424607 4558 generic.go:334] "Generic (PLEG): container finished" podID="85471d11-01bf-4a15-b6bc-fabbdaa5750b" containerID="ed8a91ba58ec453d2111a81153c4d592bd204afe8c8353e52171871827a439ce" exitCode=0 Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.424630 4558 generic.go:334] "Generic (PLEG): container finished" podID="85471d11-01bf-4a15-b6bc-fabbdaa5750b" containerID="e07ad59dc71e1f67f59603c4aac989fb94b2c710bade32305a1dc8101de80423" exitCode=0 Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.424637 4558 generic.go:334] "Generic (PLEG): container finished" podID="85471d11-01bf-4a15-b6bc-fabbdaa5750b" containerID="9230e487ce369f41cbd9bc631dd1261a599ee2dacc4825a2345c5d5867719c36" exitCode=0 Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.424644 4558 generic.go:334] "Generic (PLEG): container finished" podID="85471d11-01bf-4a15-b6bc-fabbdaa5750b" containerID="bad7f6a8d29de5c70b1535d6075c42725412283bb958655edcb536d62f10c419" exitCode=0 Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.424651 4558 generic.go:334] "Generic (PLEG): container finished" podID="85471d11-01bf-4a15-b6bc-fabbdaa5750b" containerID="3a2576d6f85fa2071ef37c084cdd8c32f7c6ddb702173be6308da0b714ce8400" exitCode=0 Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.424657 4558 generic.go:334] "Generic (PLEG): container finished" podID="85471d11-01bf-4a15-b6bc-fabbdaa5750b" containerID="2773fa3804e1639afaf6e312e08a0b9f626d09103c779a099e0c1bbf0928a8b9" exitCode=0 Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.424663 4558 generic.go:334] "Generic (PLEG): container finished" podID="85471d11-01bf-4a15-b6bc-fabbdaa5750b" containerID="23ff4d2ea69aec4c032a694e47beda8f08067327b0540a599b22bd0a5e1d7fe1" exitCode=0 Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.424671 4558 generic.go:334] "Generic (PLEG): container finished" podID="85471d11-01bf-4a15-b6bc-fabbdaa5750b" containerID="47797d4b979364b402e28a83de59b30acd8a2031f77af23bdb31c8827f23a687" exitCode=0 Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.424679 4558 generic.go:334] "Generic (PLEG): container finished" podID="85471d11-01bf-4a15-b6bc-fabbdaa5750b" containerID="9f1797db51cad1bf6e1bdae441554b6ce94379733c94a7ad9722e8bdd8202d02" exitCode=0 Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.424686 4558 generic.go:334] "Generic (PLEG): container finished" podID="85471d11-01bf-4a15-b6bc-fabbdaa5750b" containerID="2b29e3812d37926ee8f85d9b22c2a577e69fa75b6f1695111d8868abfe8319dc" exitCode=0 Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.424693 4558 generic.go:334] "Generic (PLEG): container finished" podID="85471d11-01bf-4a15-b6bc-fabbdaa5750b" containerID="d0d49ab0892c850e328fb329607d75c06674dd1862b648f22f4a98fb49a98e23" exitCode=0 Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.424700 4558 generic.go:334] "Generic (PLEG): container finished" podID="85471d11-01bf-4a15-b6bc-fabbdaa5750b" containerID="2539753eba585cbf5573638f6961f2168ad0433d885e5c40c84893ee902e38b5" exitCode=0 Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.424706 4558 generic.go:334] "Generic (PLEG): container finished" podID="85471d11-01bf-4a15-b6bc-fabbdaa5750b" containerID="f84266a8171f0ff80bdcdf75ef52c201b51aa2133964c7208ddf98fa9d157472" exitCode=0 Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.424713 4558 generic.go:334] "Generic (PLEG): container finished" podID="85471d11-01bf-4a15-b6bc-fabbdaa5750b" containerID="59a8367d177182b4518509dac1c5fda5058c9891b3fbc80a0b902bff689f902b" exitCode=0 Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.424768 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"85471d11-01bf-4a15-b6bc-fabbdaa5750b","Type":"ContainerDied","Data":"ed8a91ba58ec453d2111a81153c4d592bd204afe8c8353e52171871827a439ce"} Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.424789 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"85471d11-01bf-4a15-b6bc-fabbdaa5750b","Type":"ContainerDied","Data":"e07ad59dc71e1f67f59603c4aac989fb94b2c710bade32305a1dc8101de80423"} Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.424799 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"85471d11-01bf-4a15-b6bc-fabbdaa5750b","Type":"ContainerDied","Data":"9230e487ce369f41cbd9bc631dd1261a599ee2dacc4825a2345c5d5867719c36"} Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.424808 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"85471d11-01bf-4a15-b6bc-fabbdaa5750b","Type":"ContainerDied","Data":"bad7f6a8d29de5c70b1535d6075c42725412283bb958655edcb536d62f10c419"} Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.424817 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"85471d11-01bf-4a15-b6bc-fabbdaa5750b","Type":"ContainerDied","Data":"3a2576d6f85fa2071ef37c084cdd8c32f7c6ddb702173be6308da0b714ce8400"} Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.424828 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"85471d11-01bf-4a15-b6bc-fabbdaa5750b","Type":"ContainerDied","Data":"2773fa3804e1639afaf6e312e08a0b9f626d09103c779a099e0c1bbf0928a8b9"} Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.424838 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"85471d11-01bf-4a15-b6bc-fabbdaa5750b","Type":"ContainerDied","Data":"23ff4d2ea69aec4c032a694e47beda8f08067327b0540a599b22bd0a5e1d7fe1"} Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.424847 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"85471d11-01bf-4a15-b6bc-fabbdaa5750b","Type":"ContainerDied","Data":"47797d4b979364b402e28a83de59b30acd8a2031f77af23bdb31c8827f23a687"} Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.424856 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"85471d11-01bf-4a15-b6bc-fabbdaa5750b","Type":"ContainerDied","Data":"9f1797db51cad1bf6e1bdae441554b6ce94379733c94a7ad9722e8bdd8202d02"} Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.424877 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"85471d11-01bf-4a15-b6bc-fabbdaa5750b","Type":"ContainerDied","Data":"2b29e3812d37926ee8f85d9b22c2a577e69fa75b6f1695111d8868abfe8319dc"} Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.424886 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"85471d11-01bf-4a15-b6bc-fabbdaa5750b","Type":"ContainerDied","Data":"d0d49ab0892c850e328fb329607d75c06674dd1862b648f22f4a98fb49a98e23"} Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.424894 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"85471d11-01bf-4a15-b6bc-fabbdaa5750b","Type":"ContainerDied","Data":"2539753eba585cbf5573638f6961f2168ad0433d885e5c40c84893ee902e38b5"} Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.424904 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"85471d11-01bf-4a15-b6bc-fabbdaa5750b","Type":"ContainerDied","Data":"f84266a8171f0ff80bdcdf75ef52c201b51aa2133964c7208ddf98fa9d157472"} Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.424913 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"85471d11-01bf-4a15-b6bc-fabbdaa5750b","Type":"ContainerDied","Data":"59a8367d177182b4518509dac1c5fda5058c9891b3fbc80a0b902bff689f902b"} Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.434229 4558 generic.go:334] "Generic (PLEG): container finished" podID="9800a14c-ebf5-4a5d-b384-c133973b55ff" containerID="f88977b0890e7c6b4326fe8b4078f4b100c6c3f58772736456b0c1ccabf3b4fb" exitCode=0 Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.434310 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"9800a14c-ebf5-4a5d-b384-c133973b55ff","Type":"ContainerDied","Data":"f88977b0890e7c6b4326fe8b4078f4b100c6c3f58772736456b0c1ccabf3b4fb"} Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.447720 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-9hlt9"] Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.448664 4558 generic.go:334] "Generic (PLEG): container finished" podID="0b511fac-7a17-433d-8baa-9eefa278bd25" containerID="b9d69bec651538e6c3bb7125cf005d0443912c83fa1175fd1642916e1d9181f3" exitCode=137 Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.453640 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-41bf-account-create-update-ztpg6" event={"ID":"e5f57088-ae0f-4326-89fb-a7957da83477","Type":"ContainerStarted","Data":"959fda3de432efcca43d31c3d21aa1e3d6a27a4e4f6ba00d7a2cc9eeab56411b"} Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.474299 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovsdbserver-sb-0_8b45e22f-53b9-4228-a28a-a5db18b6e583/ovsdbserver-sb/0.log" Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.474374 4558 generic.go:334] "Generic (PLEG): container finished" podID="8b45e22f-53b9-4228-a28a-a5db18b6e583" containerID="561616071d5e6e8584628b6c63ccb49a3ccf23901814e4850a47530cd546d416" exitCode=2 Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.474399 4558 generic.go:334] "Generic (PLEG): container finished" podID="8b45e22f-53b9-4228-a28a-a5db18b6e583" containerID="f48902bc3180bcf59f02a8f100bbf7e30cefe4513466bce23fe46f25d1cf1438" exitCode=143 Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.474517 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-sb-0" event={"ID":"8b45e22f-53b9-4228-a28a-a5db18b6e583","Type":"ContainerDied","Data":"561616071d5e6e8584628b6c63ccb49a3ccf23901814e4850a47530cd546d416"} Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.474556 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-sb-0" event={"ID":"8b45e22f-53b9-4228-a28a-a5db18b6e583","Type":"ContainerDied","Data":"f48902bc3180bcf59f02a8f100bbf7e30cefe4513466bce23fe46f25d1cf1438"} Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.474569 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-sb-0" event={"ID":"8b45e22f-53b9-4228-a28a-a5db18b6e583","Type":"ContainerDied","Data":"39e249504e929dc695fa7780d0e4dbc5348077f58efd0a1fff4b4a74e8b3a27a"} Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.474587 4558 scope.go:117] "RemoveContainer" containerID="561616071d5e6e8584628b6c63ccb49a3ccf23901814e4850a47530cd546d416" Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.474786 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.498914 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovsdbserver-nb-0_98f8dc8b-6446-4ba8-b37b-a11ae7414b65/ovsdbserver-nb/0.log" Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.499013 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-nb-0" event={"ID":"98f8dc8b-6446-4ba8-b37b-a11ae7414b65","Type":"ContainerDied","Data":"d718cd641de672fcf4206b38f0077f442a339f62d8e331e2436bbb1183faaea9"} Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.499074 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.501535 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/root-account-create-update-wlsbh" event={"ID":"52895fa4-c08b-43b8-9f63-cbc8621db11e","Type":"ContainerStarted","Data":"44acec49d98d9ded22b3764b2d7e927cef749f0be77b61d3aa69d8940041d350"} Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.501569 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/root-account-create-update-wlsbh" event={"ID":"52895fa4-c08b-43b8-9f63-cbc8621db11e","Type":"ContainerStarted","Data":"b188a203f3875e52c33c42111e0cc880025085cb9a11a04452bcf06cd6b045e9"} Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.501801 4558 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="openstack-kuttl-tests/root-account-create-update-wlsbh" secret="" err="secret \"galera-openstack-cell1-dockercfg-ftflb\" not found" Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.501846 4558 scope.go:117] "RemoveContainer" containerID="44acec49d98d9ded22b3764b2d7e927cef749f0be77b61d3aa69d8940041d350" Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.515439 4558 generic.go:334] "Generic (PLEG): container finished" podID="7ee90982-23a3-4111-9c4e-159828a51a69" containerID="a9b864e0c7723915ae39749b96750b7b8695818471060aad197d75c66a8cc364" exitCode=143 Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.515501 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"7ee90982-23a3-4111-9c4e-159828a51a69","Type":"ContainerDied","Data":"a9b864e0c7723915ae39749b96750b7b8695818471060aad197d75c66a8cc364"} Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.533594 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-f7c7-account-create-update-hf7rn" event={"ID":"0f5a8541-5b86-4fff-8f94-18b8b5ee3ebf","Type":"ContainerStarted","Data":"94275ce4e738c54ea3dc1f9f5f7f981eb5e6cfb78172d8150a4debc197353c8f"} Jan 20 17:37:42 crc kubenswrapper[4558]: E0120 17:37:42.572534 4558 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 20 17:37:42 crc kubenswrapper[4558]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[/bin/sh -c #!/bin/bash Jan 20 17:37:42 crc kubenswrapper[4558]: Jan 20 17:37:42 crc kubenswrapper[4558]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 20 17:37:42 crc kubenswrapper[4558]: Jan 20 17:37:42 crc kubenswrapper[4558]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 20 17:37:42 crc kubenswrapper[4558]: Jan 20 17:37:42 crc kubenswrapper[4558]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 20 17:37:42 crc kubenswrapper[4558]: Jan 20 17:37:42 crc kubenswrapper[4558]: if [ -n "neutron" ]; then Jan 20 17:37:42 crc kubenswrapper[4558]: GRANT_DATABASE="neutron" Jan 20 17:37:42 crc kubenswrapper[4558]: else Jan 20 17:37:42 crc kubenswrapper[4558]: GRANT_DATABASE="*" Jan 20 17:37:42 crc kubenswrapper[4558]: fi Jan 20 17:37:42 crc kubenswrapper[4558]: Jan 20 17:37:42 crc kubenswrapper[4558]: # going for maximum compatibility here: Jan 20 17:37:42 crc kubenswrapper[4558]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 20 17:37:42 crc kubenswrapper[4558]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 20 17:37:42 crc kubenswrapper[4558]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 20 17:37:42 crc kubenswrapper[4558]: # support updates Jan 20 17:37:42 crc kubenswrapper[4558]: Jan 20 17:37:42 crc kubenswrapper[4558]: $MYSQL_CMD < logger="UnhandledError" Jan 20 17:37:42 crc kubenswrapper[4558]: E0120 17:37:42.573925 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"neutron-db-secret\\\" not found\"" pod="openstack-kuttl-tests/neutron-f7c7-account-create-update-hf7rn" podUID="0f5a8541-5b86-4fff-8f94-18b8b5ee3ebf" Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.597356 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31071f28-1020-4bbd-b122-c1dcf9a67a14" path="/var/lib/kubelet/pods/31071f28-1020-4bbd-b122-c1dcf9a67a14/volumes" Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.598083 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="323fa0e2-7555-4fd5-bd80-65474fa83b69" path="/var/lib/kubelet/pods/323fa0e2-7555-4fd5-bd80-65474fa83b69/volumes" Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.598704 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3850786b-f9ee-4ecb-a15e-ccad25577f01" path="/var/lib/kubelet/pods/3850786b-f9ee-4ecb-a15e-ccad25577f01/volumes" Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.600366 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="54807f10-de72-4664-a757-a1c048e4d5d7" path="/var/lib/kubelet/pods/54807f10-de72-4664-a757-a1c048e4d5d7/volumes" Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.601111 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6c4c8564-f383-46e0-8bc9-d7b68aaa5bcc" path="/var/lib/kubelet/pods/6c4c8564-f383-46e0-8bc9-d7b68aaa5bcc/volumes" Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.601753 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="70a900e8-163c-4d0e-8273-1697de2dba32" path="/var/lib/kubelet/pods/70a900e8-163c-4d0e-8273-1697de2dba32/volumes" Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.602368 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="91098276-3b20-48eb-a0c6-0126a50d19af" path="/var/lib/kubelet/pods/91098276-3b20-48eb-a0c6-0126a50d19af/volumes" Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.603649 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="957aa771-87eb-4421-b9bd-42f510ce1f8b" path="/var/lib/kubelet/pods/957aa771-87eb-4421-b9bd-42f510ce1f8b/volumes" Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.604265 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="95b44804-20a9-4636-bd47-551f819b8d53" path="/var/lib/kubelet/pods/95b44804-20a9-4636-bd47-551f819b8d53/volumes" Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.604855 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a26d1360-434d-4bc3-b02c-7158daac68d8" path="/var/lib/kubelet/pods/a26d1360-434d-4bc3-b02c-7158daac68d8/volumes" Jan 20 17:37:42 crc kubenswrapper[4558]: E0120 17:37:42.610757 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/openstack-cell1-scripts: configmap "openstack-cell1-scripts" not found Jan 20 17:37:42 crc kubenswrapper[4558]: E0120 17:37:42.610859 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/52895fa4-c08b-43b8-9f63-cbc8621db11e-operator-scripts podName:52895fa4-c08b-43b8-9f63-cbc8621db11e nodeName:}" failed. No retries permitted until 2026-01-20 17:37:43.110841424 +0000 UTC m=+3356.871179392 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/52895fa4-c08b-43b8-9f63-cbc8621db11e-operator-scripts") pod "root-account-create-update-wlsbh" (UID: "52895fa4-c08b-43b8-9f63-cbc8621db11e") : configmap "openstack-cell1-scripts" not found Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.612153 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a9c95d35-61c0-4abb-92a1-8b33d73139ab" path="/var/lib/kubelet/pods/a9c95d35-61c0-4abb-92a1-8b33d73139ab/volumes" Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.615139 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ac18e294-d27c-4012-91e4-16b6b875b781" path="/var/lib/kubelet/pods/ac18e294-d27c-4012-91e4-16b6b875b781/volumes" Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.615845 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c58c8dea-46da-43de-801f-68b8700c27e1" path="/var/lib/kubelet/pods/c58c8dea-46da-43de-801f-68b8700c27e1/volumes" Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.616511 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ce4270c3-42ac-4529-8dad-a9da8b758d43" path="/var/lib/kubelet/pods/ce4270c3-42ac-4529-8dad-a9da8b758d43/volumes" Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.621446 4558 scope.go:117] "RemoveContainer" containerID="f48902bc3180bcf59f02a8f100bbf7e30cefe4513466bce23fe46f25d1cf1438" Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.625482 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cf3515ce-a794-4635-8476-d84e98e393b8" path="/var/lib/kubelet/pods/cf3515ce-a794-4635-8476-d84e98e393b8/volumes" Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.626222 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d656fa11-4336-4ddf-a168-af6f682bfaab" path="/var/lib/kubelet/pods/d656fa11-4336-4ddf-a168-af6f682bfaab/volumes" Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.627290 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e0e37c27-e17c-46c1-9e53-ce2a92c13f02" path="/var/lib/kubelet/pods/e0e37c27-e17c-46c1-9e53-ce2a92c13f02/volumes" Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.627943 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eaa89d12-4192-4a41-b6f5-fffc5e1121e0" path="/var/lib/kubelet/pods/eaa89d12-4192-4a41-b6f5-fffc5e1121e0/volumes" Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.629822 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eb025a4d-55a2-4b4d-becb-6250cdd0055b" path="/var/lib/kubelet/pods/eb025a4d-55a2-4b4d-becb-6250cdd0055b/volumes" Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.633410 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-db-sync-s8ph9"] Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.655878 4558 scope.go:117] "RemoveContainer" containerID="561616071d5e6e8584628b6c63ccb49a3ccf23901814e4850a47530cd546d416" Jan 20 17:37:42 crc kubenswrapper[4558]: E0120 17:37:42.656779 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"561616071d5e6e8584628b6c63ccb49a3ccf23901814e4850a47530cd546d416\": container with ID starting with 561616071d5e6e8584628b6c63ccb49a3ccf23901814e4850a47530cd546d416 not found: ID does not exist" containerID="561616071d5e6e8584628b6c63ccb49a3ccf23901814e4850a47530cd546d416" Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.656891 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"561616071d5e6e8584628b6c63ccb49a3ccf23901814e4850a47530cd546d416"} err="failed to get container status \"561616071d5e6e8584628b6c63ccb49a3ccf23901814e4850a47530cd546d416\": rpc error: code = NotFound desc = could not find container \"561616071d5e6e8584628b6c63ccb49a3ccf23901814e4850a47530cd546d416\": container with ID starting with 561616071d5e6e8584628b6c63ccb49a3ccf23901814e4850a47530cd546d416 not found: ID does not exist" Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.656992 4558 scope.go:117] "RemoveContainer" containerID="f48902bc3180bcf59f02a8f100bbf7e30cefe4513466bce23fe46f25d1cf1438" Jan 20 17:37:42 crc kubenswrapper[4558]: E0120 17:37:42.659978 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f48902bc3180bcf59f02a8f100bbf7e30cefe4513466bce23fe46f25d1cf1438\": container with ID starting with f48902bc3180bcf59f02a8f100bbf7e30cefe4513466bce23fe46f25d1cf1438 not found: ID does not exist" containerID="f48902bc3180bcf59f02a8f100bbf7e30cefe4513466bce23fe46f25d1cf1438" Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.660038 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f48902bc3180bcf59f02a8f100bbf7e30cefe4513466bce23fe46f25d1cf1438"} err="failed to get container status \"f48902bc3180bcf59f02a8f100bbf7e30cefe4513466bce23fe46f25d1cf1438\": rpc error: code = NotFound desc = could not find container \"f48902bc3180bcf59f02a8f100bbf7e30cefe4513466bce23fe46f25d1cf1438\": container with ID starting with f48902bc3180bcf59f02a8f100bbf7e30cefe4513466bce23fe46f25d1cf1438 not found: ID does not exist" Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.660055 4558 scope.go:117] "RemoveContainer" containerID="561616071d5e6e8584628b6c63ccb49a3ccf23901814e4850a47530cd546d416" Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.660992 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"561616071d5e6e8584628b6c63ccb49a3ccf23901814e4850a47530cd546d416"} err="failed to get container status \"561616071d5e6e8584628b6c63ccb49a3ccf23901814e4850a47530cd546d416\": rpc error: code = NotFound desc = could not find container \"561616071d5e6e8584628b6c63ccb49a3ccf23901814e4850a47530cd546d416\": container with ID starting with 561616071d5e6e8584628b6c63ccb49a3ccf23901814e4850a47530cd546d416 not found: ID does not exist" Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.661034 4558 scope.go:117] "RemoveContainer" containerID="f48902bc3180bcf59f02a8f100bbf7e30cefe4513466bce23fe46f25d1cf1438" Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.667745 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f48902bc3180bcf59f02a8f100bbf7e30cefe4513466bce23fe46f25d1cf1438"} err="failed to get container status \"f48902bc3180bcf59f02a8f100bbf7e30cefe4513466bce23fe46f25d1cf1438\": rpc error: code = NotFound desc = could not find container \"f48902bc3180bcf59f02a8f100bbf7e30cefe4513466bce23fe46f25d1cf1438\": container with ID starting with f48902bc3180bcf59f02a8f100bbf7e30cefe4513466bce23fe46f25d1cf1438 not found: ID does not exist" Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.667792 4558 scope.go:117] "RemoveContainer" containerID="f56a163a8189df30e61d8f643eac8347a79e40378fdf5ec2740f041b866096e0" Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.685601 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-db-sync-s8ph9"] Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.700983 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstackclient" Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.707921 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-0"] Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.708600 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-cell1-conductor-0" podUID="0def1343-9e4b-4f74-84bd-3212688b59ce" containerName="nova-cell1-conductor-conductor" containerID="cri-o://1c79880b61b7b00d3acd244cc4b46331c0b20f7998564a054c816dfa317eab5b" gracePeriod=30 Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.712069 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b6l2l\" (UniqueName: \"kubernetes.io/projected/0b511fac-7a17-433d-8baa-9eefa278bd25-kube-api-access-b6l2l\") pod \"0b511fac-7a17-433d-8baa-9eefa278bd25\" (UID: \"0b511fac-7a17-433d-8baa-9eefa278bd25\") " Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.712205 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/0b511fac-7a17-433d-8baa-9eefa278bd25-openstack-config\") pod \"0b511fac-7a17-433d-8baa-9eefa278bd25\" (UID: \"0b511fac-7a17-433d-8baa-9eefa278bd25\") " Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.712356 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0b511fac-7a17-433d-8baa-9eefa278bd25-combined-ca-bundle\") pod \"0b511fac-7a17-433d-8baa-9eefa278bd25\" (UID: \"0b511fac-7a17-433d-8baa-9eefa278bd25\") " Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.712387 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/0b511fac-7a17-433d-8baa-9eefa278bd25-openstack-config-secret\") pod \"0b511fac-7a17-433d-8baa-9eefa278bd25\" (UID: \"0b511fac-7a17-433d-8baa-9eefa278bd25\") " Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.715676 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b511fac-7a17-433d-8baa-9eefa278bd25-kube-api-access-b6l2l" (OuterVolumeSpecName: "kube-api-access-b6l2l") pod "0b511fac-7a17-433d-8baa-9eefa278bd25" (UID: "0b511fac-7a17-433d-8baa-9eefa278bd25"). InnerVolumeSpecName "kube-api-access-b6l2l". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.740759 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b511fac-7a17-433d-8baa-9eefa278bd25-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "0b511fac-7a17-433d-8baa-9eefa278bd25" (UID: "0b511fac-7a17-433d-8baa-9eefa278bd25"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.747450 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b511fac-7a17-433d-8baa-9eefa278bd25-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0b511fac-7a17-433d-8baa-9eefa278bd25" (UID: "0b511fac-7a17-433d-8baa-9eefa278bd25"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.751389 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-sb-0"] Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.755842 4558 scope.go:117] "RemoveContainer" containerID="ed49f3f83c5373eef322ba453c4777bc515d3e9dfcde8de0a5408d13b285222c" Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.756008 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-sb-0"] Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.761608 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-0"] Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.761848 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-cell0-conductor-0" podUID="0d2b6a10-ca95-4d72-a80b-1706822a07a7" containerName="nova-cell0-conductor-conductor" containerID="cri-o://29de3130a0b6727cccd3d029a221407989f24f198d0147b0d9d994fbbe61ae64" gracePeriod=30 Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.767042 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-nb-0"] Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.773243 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b511fac-7a17-433d-8baa-9eefa278bd25-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "0b511fac-7a17-433d-8baa-9eefa278bd25" (UID: "0b511fac-7a17-433d-8baa-9eefa278bd25"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.773307 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-nb-0"] Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.785211 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-db-sync-qwwkb"] Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.786437 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-db-sync-qwwkb"] Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.817949 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0b511fac-7a17-433d-8baa-9eefa278bd25-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.817977 4558 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/0b511fac-7a17-433d-8baa-9eefa278bd25-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.817990 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b6l2l\" (UniqueName: \"kubernetes.io/projected/0b511fac-7a17-433d-8baa-9eefa278bd25-kube-api-access-b6l2l\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.818001 4558 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/0b511fac-7a17-433d-8baa-9eefa278bd25-openstack-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.943457 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/swift-proxy-96bd7cb78-txfmk"] Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.943686 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-proxy-96bd7cb78-txfmk" podUID="c7ffc3d3-5db6-484c-b9c1-d065e7177eb7" containerName="proxy-httpd" containerID="cri-o://1c07e4b28b7cf0b4abe599ad445f1580d0e65d644cc547c83bfc14814b631d5c" gracePeriod=30 Jan 20 17:37:42 crc kubenswrapper[4558]: I0120 17:37:42.943747 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-proxy-96bd7cb78-txfmk" podUID="c7ffc3d3-5db6-484c-b9c1-d065e7177eb7" containerName="proxy-server" containerID="cri-o://cc29bf400e020c3de6a91d31522715a48fb29ae6d1627268b6c2963ee53e1761" gracePeriod=30 Jan 20 17:37:43 crc kubenswrapper[4558]: I0120 17:37:43.051826 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-41bf-account-create-update-ztpg6" Jan 20 17:37:43 crc kubenswrapper[4558]: I0120 17:37:43.068641 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:37:43 crc kubenswrapper[4558]: I0120 17:37:43.125035 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-246h4\" (UniqueName: \"kubernetes.io/projected/e5f57088-ae0f-4326-89fb-a7957da83477-kube-api-access-246h4\") pod \"e5f57088-ae0f-4326-89fb-a7957da83477\" (UID: \"e5f57088-ae0f-4326-89fb-a7957da83477\") " Jan 20 17:37:43 crc kubenswrapper[4558]: I0120 17:37:43.125103 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/f09a19a5-d5eb-4466-9cb5-309cd246004e-kube-state-metrics-tls-config\") pod \"f09a19a5-d5eb-4466-9cb5-309cd246004e\" (UID: \"f09a19a5-d5eb-4466-9cb5-309cd246004e\") " Jan 20 17:37:43 crc kubenswrapper[4558]: I0120 17:37:43.125251 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f09a19a5-d5eb-4466-9cb5-309cd246004e-combined-ca-bundle\") pod \"f09a19a5-d5eb-4466-9cb5-309cd246004e\" (UID: \"f09a19a5-d5eb-4466-9cb5-309cd246004e\") " Jan 20 17:37:43 crc kubenswrapper[4558]: I0120 17:37:43.125354 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e5f57088-ae0f-4326-89fb-a7957da83477-operator-scripts\") pod \"e5f57088-ae0f-4326-89fb-a7957da83477\" (UID: \"e5f57088-ae0f-4326-89fb-a7957da83477\") " Jan 20 17:37:43 crc kubenswrapper[4558]: I0120 17:37:43.125389 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/f09a19a5-d5eb-4466-9cb5-309cd246004e-kube-state-metrics-tls-certs\") pod \"f09a19a5-d5eb-4466-9cb5-309cd246004e\" (UID: \"f09a19a5-d5eb-4466-9cb5-309cd246004e\") " Jan 20 17:37:43 crc kubenswrapper[4558]: I0120 17:37:43.125436 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hwl42\" (UniqueName: \"kubernetes.io/projected/f09a19a5-d5eb-4466-9cb5-309cd246004e-kube-api-access-hwl42\") pod \"f09a19a5-d5eb-4466-9cb5-309cd246004e\" (UID: \"f09a19a5-d5eb-4466-9cb5-309cd246004e\") " Jan 20 17:37:43 crc kubenswrapper[4558]: E0120 17:37:43.126035 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/openstack-cell1-scripts: configmap "openstack-cell1-scripts" not found Jan 20 17:37:43 crc kubenswrapper[4558]: E0120 17:37:43.126102 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/52895fa4-c08b-43b8-9f63-cbc8621db11e-operator-scripts podName:52895fa4-c08b-43b8-9f63-cbc8621db11e nodeName:}" failed. No retries permitted until 2026-01-20 17:37:44.126083083 +0000 UTC m=+3357.886421050 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/52895fa4-c08b-43b8-9f63-cbc8621db11e-operator-scripts") pod "root-account-create-update-wlsbh" (UID: "52895fa4-c08b-43b8-9f63-cbc8621db11e") : configmap "openstack-cell1-scripts" not found Jan 20 17:37:43 crc kubenswrapper[4558]: I0120 17:37:43.126978 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e5f57088-ae0f-4326-89fb-a7957da83477-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "e5f57088-ae0f-4326-89fb-a7957da83477" (UID: "e5f57088-ae0f-4326-89fb-a7957da83477"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:37:43 crc kubenswrapper[4558]: I0120 17:37:43.131197 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e5f57088-ae0f-4326-89fb-a7957da83477-kube-api-access-246h4" (OuterVolumeSpecName: "kube-api-access-246h4") pod "e5f57088-ae0f-4326-89fb-a7957da83477" (UID: "e5f57088-ae0f-4326-89fb-a7957da83477"). InnerVolumeSpecName "kube-api-access-246h4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:37:43 crc kubenswrapper[4558]: I0120 17:37:43.150821 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f09a19a5-d5eb-4466-9cb5-309cd246004e-kube-api-access-hwl42" (OuterVolumeSpecName: "kube-api-access-hwl42") pod "f09a19a5-d5eb-4466-9cb5-309cd246004e" (UID: "f09a19a5-d5eb-4466-9cb5-309cd246004e"). InnerVolumeSpecName "kube-api-access-hwl42". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:37:43 crc kubenswrapper[4558]: I0120 17:37:43.154913 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f09a19a5-d5eb-4466-9cb5-309cd246004e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f09a19a5-d5eb-4466-9cb5-309cd246004e" (UID: "f09a19a5-d5eb-4466-9cb5-309cd246004e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:37:43 crc kubenswrapper[4558]: I0120 17:37:43.156698 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f09a19a5-d5eb-4466-9cb5-309cd246004e-kube-state-metrics-tls-config" (OuterVolumeSpecName: "kube-state-metrics-tls-config") pod "f09a19a5-d5eb-4466-9cb5-309cd246004e" (UID: "f09a19a5-d5eb-4466-9cb5-309cd246004e"). InnerVolumeSpecName "kube-state-metrics-tls-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:37:43 crc kubenswrapper[4558]: I0120 17:37:43.177312 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f09a19a5-d5eb-4466-9cb5-309cd246004e-kube-state-metrics-tls-certs" (OuterVolumeSpecName: "kube-state-metrics-tls-certs") pod "f09a19a5-d5eb-4466-9cb5-309cd246004e" (UID: "f09a19a5-d5eb-4466-9cb5-309cd246004e"). InnerVolumeSpecName "kube-state-metrics-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:37:43 crc kubenswrapper[4558]: I0120 17:37:43.228341 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f09a19a5-d5eb-4466-9cb5-309cd246004e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:43 crc kubenswrapper[4558]: I0120 17:37:43.228368 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e5f57088-ae0f-4326-89fb-a7957da83477-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:43 crc kubenswrapper[4558]: I0120 17:37:43.228381 4558 reconciler_common.go:293] "Volume detached for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/f09a19a5-d5eb-4466-9cb5-309cd246004e-kube-state-metrics-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:43 crc kubenswrapper[4558]: I0120 17:37:43.228392 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hwl42\" (UniqueName: \"kubernetes.io/projected/f09a19a5-d5eb-4466-9cb5-309cd246004e-kube-api-access-hwl42\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:43 crc kubenswrapper[4558]: I0120 17:37:43.228402 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-246h4\" (UniqueName: \"kubernetes.io/projected/e5f57088-ae0f-4326-89fb-a7957da83477-kube-api-access-246h4\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:43 crc kubenswrapper[4558]: I0120 17:37:43.228412 4558 reconciler_common.go:293] "Volume detached for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/f09a19a5-d5eb-4466-9cb5-309cd246004e-kube-state-metrics-tls-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:43 crc kubenswrapper[4558]: I0120 17:37:43.249660 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:37:43 crc kubenswrapper[4558]: E0120 17:37:43.330177 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Jan 20 17:37:43 crc kubenswrapper[4558]: E0120 17:37:43.330255 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/0696a635-5dc9-46e9-8502-47fab9ff761c-config-data podName:0696a635-5dc9-46e9-8502-47fab9ff761c nodeName:}" failed. No retries permitted until 2026-01-20 17:37:47.330235045 +0000 UTC m=+3361.090573011 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/0696a635-5dc9-46e9-8502-47fab9ff761c-config-data") pod "rabbitmq-server-0" (UID: "0696a635-5dc9-46e9-8502-47fab9ff761c") : configmap "rabbitmq-config-data" not found Jan 20 17:37:43 crc kubenswrapper[4558]: E0120 17:37:43.330877 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="29de3130a0b6727cccd3d029a221407989f24f198d0147b0d9d994fbbe61ae64" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:37:43 crc kubenswrapper[4558]: E0120 17:37:43.334143 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="29de3130a0b6727cccd3d029a221407989f24f198d0147b0d9d994fbbe61ae64" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:37:43 crc kubenswrapper[4558]: E0120 17:37:43.335541 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="29de3130a0b6727cccd3d029a221407989f24f198d0147b0d9d994fbbe61ae64" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:37:43 crc kubenswrapper[4558]: E0120 17:37:43.335596 4558 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack-kuttl-tests/nova-cell0-conductor-0" podUID="0d2b6a10-ca95-4d72-a80b-1706822a07a7" containerName="nova-cell0-conductor-conductor" Jan 20 17:37:43 crc kubenswrapper[4558]: I0120 17:37:43.432215 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4fd9bebc-9868-4970-bc1f-d047286980d0-combined-ca-bundle\") pod \"4fd9bebc-9868-4970-bc1f-d047286980d0\" (UID: \"4fd9bebc-9868-4970-bc1f-d047286980d0\") " Jan 20 17:37:43 crc kubenswrapper[4558]: I0120 17:37:43.432369 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/4fd9bebc-9868-4970-bc1f-d047286980d0-vencrypt-tls-certs\") pod \"4fd9bebc-9868-4970-bc1f-d047286980d0\" (UID: \"4fd9bebc-9868-4970-bc1f-d047286980d0\") " Jan 20 17:37:43 crc kubenswrapper[4558]: I0120 17:37:43.432548 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4fd9bebc-9868-4970-bc1f-d047286980d0-config-data\") pod \"4fd9bebc-9868-4970-bc1f-d047286980d0\" (UID: \"4fd9bebc-9868-4970-bc1f-d047286980d0\") " Jan 20 17:37:43 crc kubenswrapper[4558]: I0120 17:37:43.432691 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mskc9\" (UniqueName: \"kubernetes.io/projected/4fd9bebc-9868-4970-bc1f-d047286980d0-kube-api-access-mskc9\") pod \"4fd9bebc-9868-4970-bc1f-d047286980d0\" (UID: \"4fd9bebc-9868-4970-bc1f-d047286980d0\") " Jan 20 17:37:43 crc kubenswrapper[4558]: I0120 17:37:43.432836 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/4fd9bebc-9868-4970-bc1f-d047286980d0-nova-novncproxy-tls-certs\") pod \"4fd9bebc-9868-4970-bc1f-d047286980d0\" (UID: \"4fd9bebc-9868-4970-bc1f-d047286980d0\") " Jan 20 17:37:43 crc kubenswrapper[4558]: I0120 17:37:43.440489 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4fd9bebc-9868-4970-bc1f-d047286980d0-kube-api-access-mskc9" (OuterVolumeSpecName: "kube-api-access-mskc9") pod "4fd9bebc-9868-4970-bc1f-d047286980d0" (UID: "4fd9bebc-9868-4970-bc1f-d047286980d0"). InnerVolumeSpecName "kube-api-access-mskc9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:37:43 crc kubenswrapper[4558]: I0120 17:37:43.477361 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4fd9bebc-9868-4970-bc1f-d047286980d0-config-data" (OuterVolumeSpecName: "config-data") pod "4fd9bebc-9868-4970-bc1f-d047286980d0" (UID: "4fd9bebc-9868-4970-bc1f-d047286980d0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:37:43 crc kubenswrapper[4558]: I0120 17:37:43.497220 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4fd9bebc-9868-4970-bc1f-d047286980d0-nova-novncproxy-tls-certs" (OuterVolumeSpecName: "nova-novncproxy-tls-certs") pod "4fd9bebc-9868-4970-bc1f-d047286980d0" (UID: "4fd9bebc-9868-4970-bc1f-d047286980d0"). InnerVolumeSpecName "nova-novncproxy-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:37:43 crc kubenswrapper[4558]: I0120 17:37:43.499706 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4fd9bebc-9868-4970-bc1f-d047286980d0-vencrypt-tls-certs" (OuterVolumeSpecName: "vencrypt-tls-certs") pod "4fd9bebc-9868-4970-bc1f-d047286980d0" (UID: "4fd9bebc-9868-4970-bc1f-d047286980d0"). InnerVolumeSpecName "vencrypt-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:37:43 crc kubenswrapper[4558]: I0120 17:37:43.507496 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4fd9bebc-9868-4970-bc1f-d047286980d0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4fd9bebc-9868-4970-bc1f-d047286980d0" (UID: "4fd9bebc-9868-4970-bc1f-d047286980d0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:37:43 crc kubenswrapper[4558]: I0120 17:37:43.539795 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4fd9bebc-9868-4970-bc1f-d047286980d0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:43 crc kubenswrapper[4558]: I0120 17:37:43.539838 4558 reconciler_common.go:293] "Volume detached for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/4fd9bebc-9868-4970-bc1f-d047286980d0-vencrypt-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:43 crc kubenswrapper[4558]: I0120 17:37:43.539852 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4fd9bebc-9868-4970-bc1f-d047286980d0-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:43 crc kubenswrapper[4558]: I0120 17:37:43.539880 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mskc9\" (UniqueName: \"kubernetes.io/projected/4fd9bebc-9868-4970-bc1f-d047286980d0-kube-api-access-mskc9\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:43 crc kubenswrapper[4558]: I0120 17:37:43.539893 4558 reconciler_common.go:293] "Volume detached for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/4fd9bebc-9868-4970-bc1f-d047286980d0-nova-novncproxy-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:43 crc kubenswrapper[4558]: I0120 17:37:43.557369 4558 generic.go:334] "Generic (PLEG): container finished" podID="f09a19a5-d5eb-4466-9cb5-309cd246004e" containerID="e618252041bc2c7c61af3ff12fe2cfdf9bcc601b2bc8675901ba9e059426eff6" exitCode=2 Jan 20 17:37:43 crc kubenswrapper[4558]: I0120 17:37:43.557469 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/kube-state-metrics-0" event={"ID":"f09a19a5-d5eb-4466-9cb5-309cd246004e","Type":"ContainerDied","Data":"e618252041bc2c7c61af3ff12fe2cfdf9bcc601b2bc8675901ba9e059426eff6"} Jan 20 17:37:43 crc kubenswrapper[4558]: I0120 17:37:43.557540 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/kube-state-metrics-0" event={"ID":"f09a19a5-d5eb-4466-9cb5-309cd246004e","Type":"ContainerDied","Data":"4fafc9dc1171b3e13b4b66651e32b5c393c05ec4d403b508afbb604d16363071"} Jan 20 17:37:43 crc kubenswrapper[4558]: I0120 17:37:43.557566 4558 scope.go:117] "RemoveContainer" containerID="e618252041bc2c7c61af3ff12fe2cfdf9bcc601b2bc8675901ba9e059426eff6" Jan 20 17:37:43 crc kubenswrapper[4558]: I0120 17:37:43.557682 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:37:43 crc kubenswrapper[4558]: I0120 17:37:43.559716 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-41bf-account-create-update-ztpg6" event={"ID":"e5f57088-ae0f-4326-89fb-a7957da83477","Type":"ContainerDied","Data":"959fda3de432efcca43d31c3d21aa1e3d6a27a4e4f6ba00d7a2cc9eeab56411b"} Jan 20 17:37:43 crc kubenswrapper[4558]: I0120 17:37:43.559780 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-41bf-account-create-update-ztpg6" Jan 20 17:37:43 crc kubenswrapper[4558]: I0120 17:37:43.593819 4558 generic.go:334] "Generic (PLEG): container finished" podID="1addf16e-1af6-431b-aff1-05c48a952f5a" containerID="d7f94ffeca974ded0f05a2d4e0bd7bdbcb45172d758621d01d81132148b96b8b" exitCode=0 Jan 20 17:37:43 crc kubenswrapper[4558]: I0120 17:37:43.593851 4558 generic.go:334] "Generic (PLEG): container finished" podID="1addf16e-1af6-431b-aff1-05c48a952f5a" containerID="9c1da6a399e8373ade66666f30fdfad18e61c1a4529147d6a10d0385c82a426f" exitCode=2 Jan 20 17:37:43 crc kubenswrapper[4558]: I0120 17:37:43.593863 4558 generic.go:334] "Generic (PLEG): container finished" podID="1addf16e-1af6-431b-aff1-05c48a952f5a" containerID="0fde5ea8da11b84a29f2a0ac302c4c48b6a41c64a33f837f39b3bbaaf4487feb" exitCode=0 Jan 20 17:37:43 crc kubenswrapper[4558]: I0120 17:37:43.593891 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"1addf16e-1af6-431b-aff1-05c48a952f5a","Type":"ContainerDied","Data":"d7f94ffeca974ded0f05a2d4e0bd7bdbcb45172d758621d01d81132148b96b8b"} Jan 20 17:37:43 crc kubenswrapper[4558]: I0120 17:37:43.593981 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"1addf16e-1af6-431b-aff1-05c48a952f5a","Type":"ContainerDied","Data":"9c1da6a399e8373ade66666f30fdfad18e61c1a4529147d6a10d0385c82a426f"} Jan 20 17:37:43 crc kubenswrapper[4558]: I0120 17:37:43.593999 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"1addf16e-1af6-431b-aff1-05c48a952f5a","Type":"ContainerDied","Data":"0fde5ea8da11b84a29f2a0ac302c4c48b6a41c64a33f837f39b3bbaaf4487feb"} Jan 20 17:37:43 crc kubenswrapper[4558]: I0120 17:37:43.611198 4558 generic.go:334] "Generic (PLEG): container finished" podID="11d1aa99-639c-451a-876e-59de6098e407" containerID="db329d06eb52b93ff0cb7bb5890e9f6c1ce660d1bfa28f5bef02917fc16526c6" exitCode=143 Jan 20 17:37:43 crc kubenswrapper[4558]: I0120 17:37:43.611375 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-keystone-listener-6d4445bf46-57dqj" event={"ID":"11d1aa99-639c-451a-876e-59de6098e407","Type":"ContainerDied","Data":"db329d06eb52b93ff0cb7bb5890e9f6c1ce660d1bfa28f5bef02917fc16526c6"} Jan 20 17:37:43 crc kubenswrapper[4558]: I0120 17:37:43.631510 4558 generic.go:334] "Generic (PLEG): container finished" podID="c7ffc3d3-5db6-484c-b9c1-d065e7177eb7" containerID="1c07e4b28b7cf0b4abe599ad445f1580d0e65d644cc547c83bfc14814b631d5c" exitCode=0 Jan 20 17:37:43 crc kubenswrapper[4558]: I0120 17:37:43.631625 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-proxy-96bd7cb78-txfmk" event={"ID":"c7ffc3d3-5db6-484c-b9c1-d065e7177eb7","Type":"ContainerDied","Data":"1c07e4b28b7cf0b4abe599ad445f1580d0e65d644cc547c83bfc14814b631d5c"} Jan 20 17:37:43 crc kubenswrapper[4558]: I0120 17:37:43.636559 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 17:37:43 crc kubenswrapper[4558]: I0120 17:37:43.637493 4558 scope.go:117] "RemoveContainer" containerID="e618252041bc2c7c61af3ff12fe2cfdf9bcc601b2bc8675901ba9e059426eff6" Jan 20 17:37:43 crc kubenswrapper[4558]: E0120 17:37:43.639938 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e618252041bc2c7c61af3ff12fe2cfdf9bcc601b2bc8675901ba9e059426eff6\": container with ID starting with e618252041bc2c7c61af3ff12fe2cfdf9bcc601b2bc8675901ba9e059426eff6 not found: ID does not exist" containerID="e618252041bc2c7c61af3ff12fe2cfdf9bcc601b2bc8675901ba9e059426eff6" Jan 20 17:37:43 crc kubenswrapper[4558]: I0120 17:37:43.639967 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e618252041bc2c7c61af3ff12fe2cfdf9bcc601b2bc8675901ba9e059426eff6"} err="failed to get container status \"e618252041bc2c7c61af3ff12fe2cfdf9bcc601b2bc8675901ba9e059426eff6\": rpc error: code = NotFound desc = could not find container \"e618252041bc2c7c61af3ff12fe2cfdf9bcc601b2bc8675901ba9e059426eff6\": container with ID starting with e618252041bc2c7c61af3ff12fe2cfdf9bcc601b2bc8675901ba9e059426eff6 not found: ID does not exist" Jan 20 17:37:43 crc kubenswrapper[4558]: E0120 17:37:43.641752 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Jan 20 17:37:43 crc kubenswrapper[4558]: E0120 17:37:43.641795 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/0c6a6265-cf90-4039-9200-ba478d612baa-config-data podName:0c6a6265-cf90-4039-9200-ba478d612baa nodeName:}" failed. No retries permitted until 2026-01-20 17:37:47.641781981 +0000 UTC m=+3361.402119948 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/0c6a6265-cf90-4039-9200-ba478d612baa-config-data") pod "rabbitmq-cell1-server-0" (UID: "0c6a6265-cf90-4039-9200-ba478d612baa") : configmap "rabbitmq-cell1-config-data" not found Jan 20 17:37:43 crc kubenswrapper[4558]: I0120 17:37:43.643605 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstackclient" Jan 20 17:37:43 crc kubenswrapper[4558]: I0120 17:37:43.652963 4558 generic.go:334] "Generic (PLEG): container finished" podID="52895fa4-c08b-43b8-9f63-cbc8621db11e" containerID="44acec49d98d9ded22b3764b2d7e927cef749f0be77b61d3aa69d8940041d350" exitCode=1 Jan 20 17:37:43 crc kubenswrapper[4558]: I0120 17:37:43.652996 4558 generic.go:334] "Generic (PLEG): container finished" podID="52895fa4-c08b-43b8-9f63-cbc8621db11e" containerID="c92d1dec67e80534ab703ca22d222cf507194a905156371d5521a2bcb6e6fdca" exitCode=1 Jan 20 17:37:43 crc kubenswrapper[4558]: I0120 17:37:43.654070 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/root-account-create-update-wlsbh" event={"ID":"52895fa4-c08b-43b8-9f63-cbc8621db11e","Type":"ContainerDied","Data":"44acec49d98d9ded22b3764b2d7e927cef749f0be77b61d3aa69d8940041d350"} Jan 20 17:37:43 crc kubenswrapper[4558]: I0120 17:37:43.654104 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/root-account-create-update-wlsbh" event={"ID":"52895fa4-c08b-43b8-9f63-cbc8621db11e","Type":"ContainerDied","Data":"c92d1dec67e80534ab703ca22d222cf507194a905156371d5521a2bcb6e6fdca"} Jan 20 17:37:43 crc kubenswrapper[4558]: I0120 17:37:43.654127 4558 scope.go:117] "RemoveContainer" containerID="b9d69bec651538e6c3bb7125cf005d0443912c83fa1175fd1642916e1d9181f3" Jan 20 17:37:43 crc kubenswrapper[4558]: I0120 17:37:43.664297 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 17:37:43 crc kubenswrapper[4558]: I0120 17:37:43.671909 4558 generic.go:334] "Generic (PLEG): container finished" podID="1fff3747-796d-420a-aff0-846dfc615df9" containerID="f5120513f44b4adc059dbacc3c6c22162c7eee356a5828f446ded77ed4759a9f" exitCode=0 Jan 20 17:37:43 crc kubenswrapper[4558]: I0120 17:37:43.671974 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-cell1-galera-0" event={"ID":"1fff3747-796d-420a-aff0-846dfc615df9","Type":"ContainerDied","Data":"f5120513f44b4adc059dbacc3c6c22162c7eee356a5828f446ded77ed4759a9f"} Jan 20 17:37:43 crc kubenswrapper[4558]: I0120 17:37:43.677909 4558 generic.go:334] "Generic (PLEG): container finished" podID="906beedc-3fa2-4d6e-a6e8-485ca2fb1082" containerID="9da02a19e6777029509b4f8838ad4012f0bd07388cadfbfa55cbc40a5673b825" exitCode=143 Jan 20 17:37:43 crc kubenswrapper[4558]: I0120 17:37:43.677978 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"906beedc-3fa2-4d6e-a6e8-485ca2fb1082","Type":"ContainerDied","Data":"9da02a19e6777029509b4f8838ad4012f0bd07388cadfbfa55cbc40a5673b825"} Jan 20 17:37:43 crc kubenswrapper[4558]: I0120 17:37:43.700487 4558 scope.go:117] "RemoveContainer" containerID="44acec49d98d9ded22b3764b2d7e927cef749f0be77b61d3aa69d8940041d350" Jan 20 17:37:43 crc kubenswrapper[4558]: I0120 17:37:43.700876 4558 generic.go:334] "Generic (PLEG): container finished" podID="cff9fbf3-9b9a-4350-8589-8bfe5543d79f" containerID="e6448305eeffbe8d3fb9ec22b8af62dc179b30760ae423dad6ce0772135d6933" exitCode=0 Jan 20 17:37:43 crc kubenswrapper[4558]: I0120 17:37:43.701077 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell0-41bf-account-create-update-ztpg6"] Jan 20 17:37:43 crc kubenswrapper[4558]: I0120 17:37:43.701106 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-9hlt9" event={"ID":"cff9fbf3-9b9a-4350-8589-8bfe5543d79f","Type":"ContainerDied","Data":"e6448305eeffbe8d3fb9ec22b8af62dc179b30760ae423dad6ce0772135d6933"} Jan 20 17:37:43 crc kubenswrapper[4558]: I0120 17:37:43.701141 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-9hlt9" event={"ID":"cff9fbf3-9b9a-4350-8589-8bfe5543d79f","Type":"ContainerStarted","Data":"dab4099481fe459aaa7225feaa6d94286a3cae316d7e9858f7841af3998a6df5"} Jan 20 17:37:43 crc kubenswrapper[4558]: I0120 17:37:43.711456 4558 generic.go:334] "Generic (PLEG): container finished" podID="640de3b3-60e1-41b8-ab00-22e375bad65c" containerID="6c5d77de3e10ffa19cd49dc2e45df3fcf7c673448b447d8327c2137ebcc78c33" exitCode=143 Jan 20 17:37:43 crc kubenswrapper[4558]: I0120 17:37:43.711535 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-worker-dd549b649-grdrm" event={"ID":"640de3b3-60e1-41b8-ab00-22e375bad65c","Type":"ContainerDied","Data":"6c5d77de3e10ffa19cd49dc2e45df3fcf7c673448b447d8327c2137ebcc78c33"} Jan 20 17:37:43 crc kubenswrapper[4558]: I0120 17:37:43.711586 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell0-41bf-account-create-update-ztpg6"] Jan 20 17:37:43 crc kubenswrapper[4558]: I0120 17:37:43.722139 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-2fe2-account-create-update-t264m" event={"ID":"cdf89a68-b15b-4f2d-9016-55fb28011197","Type":"ContainerStarted","Data":"694277bab699d2d3b5f0bd6717228950d8203ba374538cd7f1fad15bc43468a6"} Jan 20 17:37:43 crc kubenswrapper[4558]: I0120 17:37:43.762730 4558 generic.go:334] "Generic (PLEG): container finished" podID="4fd9bebc-9868-4970-bc1f-d047286980d0" containerID="6517bf0111713477475cc09d3cf93d2400d5d1a9f549d2bfca4f578e113b540a" exitCode=0 Jan 20 17:37:43 crc kubenswrapper[4558]: I0120 17:37:43.762958 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"4fd9bebc-9868-4970-bc1f-d047286980d0","Type":"ContainerDied","Data":"6517bf0111713477475cc09d3cf93d2400d5d1a9f549d2bfca4f578e113b540a"} Jan 20 17:37:43 crc kubenswrapper[4558]: I0120 17:37:43.762986 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"4fd9bebc-9868-4970-bc1f-d047286980d0","Type":"ContainerDied","Data":"d99151254e0a3c0a84117324083d366981d38a46a10d9178906321cdbc97ddc3"} Jan 20 17:37:43 crc kubenswrapper[4558]: I0120 17:37:43.763046 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:37:43 crc kubenswrapper[4558]: I0120 17:37:43.872437 4558 scope.go:117] "RemoveContainer" containerID="44acec49d98d9ded22b3764b2d7e927cef749f0be77b61d3aa69d8940041d350" Jan 20 17:37:43 crc kubenswrapper[4558]: I0120 17:37:43.872472 4558 generic.go:334] "Generic (PLEG): container finished" podID="94062d42-28cb-4c8a-afa4-f51458dedc6c" containerID="bd3938888a8a3a7fda5215bb19bd64fb769bd0151d4bb989be309f5b70b3ad7c" exitCode=143 Jan 20 17:37:43 crc kubenswrapper[4558]: I0120 17:37:43.872597 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-64bd4857c6-2s9h4" event={"ID":"94062d42-28cb-4c8a-afa4-f51458dedc6c","Type":"ContainerDied","Data":"bd3938888a8a3a7fda5215bb19bd64fb769bd0151d4bb989be309f5b70b3ad7c"} Jan 20 17:37:43 crc kubenswrapper[4558]: I0120 17:37:43.886305 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-2d06-account-create-update-ht2vp"] Jan 20 17:37:43 crc kubenswrapper[4558]: E0120 17:37:43.889885 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"44acec49d98d9ded22b3764b2d7e927cef749f0be77b61d3aa69d8940041d350\": container with ID starting with 44acec49d98d9ded22b3764b2d7e927cef749f0be77b61d3aa69d8940041d350 not found: ID does not exist" containerID="44acec49d98d9ded22b3764b2d7e927cef749f0be77b61d3aa69d8940041d350" Jan 20 17:37:43 crc kubenswrapper[4558]: I0120 17:37:43.889928 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"44acec49d98d9ded22b3764b2d7e927cef749f0be77b61d3aa69d8940041d350"} err="failed to get container status \"44acec49d98d9ded22b3764b2d7e927cef749f0be77b61d3aa69d8940041d350\": rpc error: code = NotFound desc = could not find container \"44acec49d98d9ded22b3764b2d7e927cef749f0be77b61d3aa69d8940041d350\": container with ID starting with 44acec49d98d9ded22b3764b2d7e927cef749f0be77b61d3aa69d8940041d350 not found: ID does not exist" Jan 20 17:37:43 crc kubenswrapper[4558]: I0120 17:37:43.889960 4558 scope.go:117] "RemoveContainer" containerID="6517bf0111713477475cc09d3cf93d2400d5d1a9f549d2bfca4f578e113b540a" Jan 20 17:37:43 crc kubenswrapper[4558]: I0120 17:37:43.896865 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/cinder-api-0" podUID="e1a1ac97-a530-4ba0-8c9b-835a2b576c8d" containerName="cinder-api" probeResult="failure" output="Get \"https://10.217.0.155:8776/healthcheck\": read tcp 10.217.0.2:35380->10.217.0.155:8776: read: connection reset by peer" Jan 20 17:37:43 crc kubenswrapper[4558]: I0120 17:37:43.898901 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/keystone-2d06-account-create-update-ht2vp"] Jan 20 17:37:43 crc kubenswrapper[4558]: I0120 17:37:43.934095 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/keystone-2d06-account-create-update-2wqtq"] Jan 20 17:37:43 crc kubenswrapper[4558]: E0120 17:37:43.934566 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8b45e22f-53b9-4228-a28a-a5db18b6e583" containerName="ovsdbserver-sb" Jan 20 17:37:43 crc kubenswrapper[4558]: I0120 17:37:43.934592 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="8b45e22f-53b9-4228-a28a-a5db18b6e583" containerName="ovsdbserver-sb" Jan 20 17:37:43 crc kubenswrapper[4558]: E0120 17:37:43.934612 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="98f8dc8b-6446-4ba8-b37b-a11ae7414b65" containerName="openstack-network-exporter" Jan 20 17:37:43 crc kubenswrapper[4558]: I0120 17:37:43.934618 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="98f8dc8b-6446-4ba8-b37b-a11ae7414b65" containerName="openstack-network-exporter" Jan 20 17:37:43 crc kubenswrapper[4558]: E0120 17:37:43.934629 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="98f8dc8b-6446-4ba8-b37b-a11ae7414b65" containerName="ovsdbserver-nb" Jan 20 17:37:43 crc kubenswrapper[4558]: I0120 17:37:43.934637 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="98f8dc8b-6446-4ba8-b37b-a11ae7414b65" containerName="ovsdbserver-nb" Jan 20 17:37:43 crc kubenswrapper[4558]: E0120 17:37:43.934644 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f09a19a5-d5eb-4466-9cb5-309cd246004e" containerName="kube-state-metrics" Jan 20 17:37:43 crc kubenswrapper[4558]: I0120 17:37:43.934650 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="f09a19a5-d5eb-4466-9cb5-309cd246004e" containerName="kube-state-metrics" Jan 20 17:37:43 crc kubenswrapper[4558]: E0120 17:37:43.934674 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4fd9bebc-9868-4970-bc1f-d047286980d0" containerName="nova-cell1-novncproxy-novncproxy" Jan 20 17:37:43 crc kubenswrapper[4558]: I0120 17:37:43.934681 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="4fd9bebc-9868-4970-bc1f-d047286980d0" containerName="nova-cell1-novncproxy-novncproxy" Jan 20 17:37:43 crc kubenswrapper[4558]: E0120 17:37:43.934707 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8b45e22f-53b9-4228-a28a-a5db18b6e583" containerName="openstack-network-exporter" Jan 20 17:37:43 crc kubenswrapper[4558]: I0120 17:37:43.934712 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="8b45e22f-53b9-4228-a28a-a5db18b6e583" containerName="openstack-network-exporter" Jan 20 17:37:43 crc kubenswrapper[4558]: I0120 17:37:43.934927 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="98f8dc8b-6446-4ba8-b37b-a11ae7414b65" containerName="ovsdbserver-nb" Jan 20 17:37:43 crc kubenswrapper[4558]: I0120 17:37:43.934949 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="98f8dc8b-6446-4ba8-b37b-a11ae7414b65" containerName="openstack-network-exporter" Jan 20 17:37:43 crc kubenswrapper[4558]: I0120 17:37:43.934960 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="8b45e22f-53b9-4228-a28a-a5db18b6e583" containerName="openstack-network-exporter" Jan 20 17:37:43 crc kubenswrapper[4558]: I0120 17:37:43.934974 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="f09a19a5-d5eb-4466-9cb5-309cd246004e" containerName="kube-state-metrics" Jan 20 17:37:43 crc kubenswrapper[4558]: I0120 17:37:43.934985 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="8b45e22f-53b9-4228-a28a-a5db18b6e583" containerName="ovsdbserver-sb" Jan 20 17:37:43 crc kubenswrapper[4558]: I0120 17:37:43.934993 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="4fd9bebc-9868-4970-bc1f-d047286980d0" containerName="nova-cell1-novncproxy-novncproxy" Jan 20 17:37:43 crc kubenswrapper[4558]: I0120 17:37:43.935687 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-2d06-account-create-update-2wqtq" Jan 20 17:37:43 crc kubenswrapper[4558]: I0120 17:37:43.952743 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-db-secret" Jan 20 17:37:43 crc kubenswrapper[4558]: I0120 17:37:43.952913 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:37:43 crc kubenswrapper[4558]: I0120 17:37:43.966196 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:37:43 crc kubenswrapper[4558]: I0120 17:37:43.977793 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-2d06-account-create-update-2wqtq"] Jan 20 17:37:43 crc kubenswrapper[4558]: I0120 17:37:43.981816 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-db-sync-fgdtt"] Jan 20 17:37:44 crc kubenswrapper[4558]: I0120 17:37:43.998446 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/keystone-db-sync-fgdtt"] Jan 20 17:37:44 crc kubenswrapper[4558]: I0120 17:37:44.008331 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-bootstrap-s78ts"] Jan 20 17:37:44 crc kubenswrapper[4558]: I0120 17:37:44.015602 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/keystone-bootstrap-s78ts"] Jan 20 17:37:44 crc kubenswrapper[4558]: I0120 17:37:44.023131 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-59b8cc448d-b9wnn"] Jan 20 17:37:44 crc kubenswrapper[4558]: I0120 17:37:44.023290 4558 scope.go:117] "RemoveContainer" containerID="6517bf0111713477475cc09d3cf93d2400d5d1a9f549d2bfca4f578e113b540a" Jan 20 17:37:44 crc kubenswrapper[4558]: I0120 17:37:44.023389 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/keystone-59b8cc448d-b9wnn" podUID="73789760-f9bd-44e0-bf9b-c1864e5f8803" containerName="keystone-api" containerID="cri-o://46ea3b795dad740b543a64ff0cb9c02ffc22ebc03e4c397cb2f2f859f67d530b" gracePeriod=30 Jan 20 17:37:44 crc kubenswrapper[4558]: E0120 17:37:44.025633 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6517bf0111713477475cc09d3cf93d2400d5d1a9f549d2bfca4f578e113b540a\": container with ID starting with 6517bf0111713477475cc09d3cf93d2400d5d1a9f549d2bfca4f578e113b540a not found: ID does not exist" containerID="6517bf0111713477475cc09d3cf93d2400d5d1a9f549d2bfca4f578e113b540a" Jan 20 17:37:44 crc kubenswrapper[4558]: I0120 17:37:44.025666 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6517bf0111713477475cc09d3cf93d2400d5d1a9f549d2bfca4f578e113b540a"} err="failed to get container status \"6517bf0111713477475cc09d3cf93d2400d5d1a9f549d2bfca4f578e113b540a\": rpc error: code = NotFound desc = could not find container \"6517bf0111713477475cc09d3cf93d2400d5d1a9f549d2bfca4f578e113b540a\": container with ID starting with 6517bf0111713477475cc09d3cf93d2400d5d1a9f549d2bfca4f578e113b540a not found: ID does not exist" Jan 20 17:37:44 crc kubenswrapper[4558]: I0120 17:37:44.041125 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/openstack-galera-0"] Jan 20 17:37:44 crc kubenswrapper[4558]: I0120 17:37:44.065016 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:37:44 crc kubenswrapper[4558]: I0120 17:37:44.070205 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mysql-db\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"1fff3747-796d-420a-aff0-846dfc615df9\" (UID: \"1fff3747-796d-420a-aff0-846dfc615df9\") " Jan 20 17:37:44 crc kubenswrapper[4558]: I0120 17:37:44.070367 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/1fff3747-796d-420a-aff0-846dfc615df9-kolla-config\") pod \"1fff3747-796d-420a-aff0-846dfc615df9\" (UID: \"1fff3747-796d-420a-aff0-846dfc615df9\") " Jan 20 17:37:44 crc kubenswrapper[4558]: I0120 17:37:44.070565 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-46zt7\" (UniqueName: \"kubernetes.io/projected/1fff3747-796d-420a-aff0-846dfc615df9-kube-api-access-46zt7\") pod \"1fff3747-796d-420a-aff0-846dfc615df9\" (UID: \"1fff3747-796d-420a-aff0-846dfc615df9\") " Jan 20 17:37:44 crc kubenswrapper[4558]: I0120 17:37:44.070762 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/1fff3747-796d-420a-aff0-846dfc615df9-config-data-default\") pod \"1fff3747-796d-420a-aff0-846dfc615df9\" (UID: \"1fff3747-796d-420a-aff0-846dfc615df9\") " Jan 20 17:37:44 crc kubenswrapper[4558]: I0120 17:37:44.070886 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/1fff3747-796d-420a-aff0-846dfc615df9-config-data-generated\") pod \"1fff3747-796d-420a-aff0-846dfc615df9\" (UID: \"1fff3747-796d-420a-aff0-846dfc615df9\") " Jan 20 17:37:44 crc kubenswrapper[4558]: I0120 17:37:44.070964 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1fff3747-796d-420a-aff0-846dfc615df9-operator-scripts\") pod \"1fff3747-796d-420a-aff0-846dfc615df9\" (UID: \"1fff3747-796d-420a-aff0-846dfc615df9\") " Jan 20 17:37:44 crc kubenswrapper[4558]: I0120 17:37:44.071035 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1fff3747-796d-420a-aff0-846dfc615df9-combined-ca-bundle\") pod \"1fff3747-796d-420a-aff0-846dfc615df9\" (UID: \"1fff3747-796d-420a-aff0-846dfc615df9\") " Jan 20 17:37:44 crc kubenswrapper[4558]: I0120 17:37:44.071111 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/1fff3747-796d-420a-aff0-846dfc615df9-galera-tls-certs\") pod \"1fff3747-796d-420a-aff0-846dfc615df9\" (UID: \"1fff3747-796d-420a-aff0-846dfc615df9\") " Jan 20 17:37:44 crc kubenswrapper[4558]: I0120 17:37:44.071648 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8rnxx\" (UniqueName: \"kubernetes.io/projected/ccf99999-f746-48ce-a387-642245ef0fd5-kube-api-access-8rnxx\") pod \"keystone-2d06-account-create-update-2wqtq\" (UID: \"ccf99999-f746-48ce-a387-642245ef0fd5\") " pod="openstack-kuttl-tests/keystone-2d06-account-create-update-2wqtq" Jan 20 17:37:44 crc kubenswrapper[4558]: I0120 17:37:44.071794 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ccf99999-f746-48ce-a387-642245ef0fd5-operator-scripts\") pod \"keystone-2d06-account-create-update-2wqtq\" (UID: \"ccf99999-f746-48ce-a387-642245ef0fd5\") " pod="openstack-kuttl-tests/keystone-2d06-account-create-update-2wqtq" Jan 20 17:37:44 crc kubenswrapper[4558]: I0120 17:37:44.073064 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1fff3747-796d-420a-aff0-846dfc615df9-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "1fff3747-796d-420a-aff0-846dfc615df9" (UID: "1fff3747-796d-420a-aff0-846dfc615df9"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:37:44 crc kubenswrapper[4558]: I0120 17:37:44.073244 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1fff3747-796d-420a-aff0-846dfc615df9-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "1fff3747-796d-420a-aff0-846dfc615df9" (UID: "1fff3747-796d-420a-aff0-846dfc615df9"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:37:44 crc kubenswrapper[4558]: I0120 17:37:44.073919 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1fff3747-796d-420a-aff0-846dfc615df9-config-data-default" (OuterVolumeSpecName: "config-data-default") pod "1fff3747-796d-420a-aff0-846dfc615df9" (UID: "1fff3747-796d-420a-aff0-846dfc615df9"). InnerVolumeSpecName "config-data-default". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:37:44 crc kubenswrapper[4558]: I0120 17:37:44.074048 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1fff3747-796d-420a-aff0-846dfc615df9-config-data-generated" (OuterVolumeSpecName: "config-data-generated") pod "1fff3747-796d-420a-aff0-846dfc615df9" (UID: "1fff3747-796d-420a-aff0-846dfc615df9"). InnerVolumeSpecName "config-data-generated". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:37:44 crc kubenswrapper[4558]: I0120 17:37:44.075542 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-db-create-dfmsf"] Jan 20 17:37:44 crc kubenswrapper[4558]: I0120 17:37:44.082075 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/keystone-db-create-dfmsf"] Jan 20 17:37:44 crc kubenswrapper[4558]: I0120 17:37:44.106648 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1fff3747-796d-420a-aff0-846dfc615df9-kube-api-access-46zt7" (OuterVolumeSpecName: "kube-api-access-46zt7") pod "1fff3747-796d-420a-aff0-846dfc615df9" (UID: "1fff3747-796d-420a-aff0-846dfc615df9"). InnerVolumeSpecName "kube-api-access-46zt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:37:44 crc kubenswrapper[4558]: I0120 17:37:44.135620 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage06-crc" (OuterVolumeSpecName: "mysql-db") pod "1fff3747-796d-420a-aff0-846dfc615df9" (UID: "1fff3747-796d-420a-aff0-846dfc615df9"). InnerVolumeSpecName "local-storage06-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:37:44 crc kubenswrapper[4558]: I0120 17:37:44.133930 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1fff3747-796d-420a-aff0-846dfc615df9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1fff3747-796d-420a-aff0-846dfc615df9" (UID: "1fff3747-796d-420a-aff0-846dfc615df9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:37:44 crc kubenswrapper[4558]: I0120 17:37:44.168149 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-2d06-account-create-update-2wqtq"] Jan 20 17:37:44 crc kubenswrapper[4558]: E0120 17:37:44.169146 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[kube-api-access-8rnxx operator-scripts], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack-kuttl-tests/keystone-2d06-account-create-update-2wqtq" podUID="ccf99999-f746-48ce-a387-642245ef0fd5" Jan 20 17:37:44 crc kubenswrapper[4558]: I0120 17:37:44.170920 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1fff3747-796d-420a-aff0-846dfc615df9-galera-tls-certs" (OuterVolumeSpecName: "galera-tls-certs") pod "1fff3747-796d-420a-aff0-846dfc615df9" (UID: "1fff3747-796d-420a-aff0-846dfc615df9"). InnerVolumeSpecName "galera-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:37:44 crc kubenswrapper[4558]: I0120 17:37:44.185368 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8rnxx\" (UniqueName: \"kubernetes.io/projected/ccf99999-f746-48ce-a387-642245ef0fd5-kube-api-access-8rnxx\") pod \"keystone-2d06-account-create-update-2wqtq\" (UID: \"ccf99999-f746-48ce-a387-642245ef0fd5\") " pod="openstack-kuttl-tests/keystone-2d06-account-create-update-2wqtq" Jan 20 17:37:44 crc kubenswrapper[4558]: I0120 17:37:44.185448 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ccf99999-f746-48ce-a387-642245ef0fd5-operator-scripts\") pod \"keystone-2d06-account-create-update-2wqtq\" (UID: \"ccf99999-f746-48ce-a387-642245ef0fd5\") " pod="openstack-kuttl-tests/keystone-2d06-account-create-update-2wqtq" Jan 20 17:37:44 crc kubenswrapper[4558]: I0120 17:37:44.185605 4558 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/1fff3747-796d-420a-aff0-846dfc615df9-kolla-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:44 crc kubenswrapper[4558]: I0120 17:37:44.185620 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-46zt7\" (UniqueName: \"kubernetes.io/projected/1fff3747-796d-420a-aff0-846dfc615df9-kube-api-access-46zt7\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:44 crc kubenswrapper[4558]: I0120 17:37:44.185633 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/1fff3747-796d-420a-aff0-846dfc615df9-config-data-default\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:44 crc kubenswrapper[4558]: I0120 17:37:44.185645 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/1fff3747-796d-420a-aff0-846dfc615df9-config-data-generated\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:44 crc kubenswrapper[4558]: I0120 17:37:44.185655 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1fff3747-796d-420a-aff0-846dfc615df9-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:44 crc kubenswrapper[4558]: I0120 17:37:44.185665 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1fff3747-796d-420a-aff0-846dfc615df9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:44 crc kubenswrapper[4558]: I0120 17:37:44.185674 4558 reconciler_common.go:293] "Volume detached for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/1fff3747-796d-420a-aff0-846dfc615df9-galera-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:44 crc kubenswrapper[4558]: I0120 17:37:44.185696 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" " Jan 20 17:37:44 crc kubenswrapper[4558]: E0120 17:37:44.187325 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/openstack-scripts: configmap "openstack-scripts" not found Jan 20 17:37:44 crc kubenswrapper[4558]: E0120 17:37:44.187397 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/ccf99999-f746-48ce-a387-642245ef0fd5-operator-scripts podName:ccf99999-f746-48ce-a387-642245ef0fd5 nodeName:}" failed. No retries permitted until 2026-01-20 17:37:44.687377285 +0000 UTC m=+3358.447715253 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/ccf99999-f746-48ce-a387-642245ef0fd5-operator-scripts") pod "keystone-2d06-account-create-update-2wqtq" (UID: "ccf99999-f746-48ce-a387-642245ef0fd5") : configmap "openstack-scripts" not found Jan 20 17:37:44 crc kubenswrapper[4558]: E0120 17:37:44.187550 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/openstack-cell1-scripts: configmap "openstack-cell1-scripts" not found Jan 20 17:37:44 crc kubenswrapper[4558]: E0120 17:37:44.187574 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/52895fa4-c08b-43b8-9f63-cbc8621db11e-operator-scripts podName:52895fa4-c08b-43b8-9f63-cbc8621db11e nodeName:}" failed. No retries permitted until 2026-01-20 17:37:46.187566932 +0000 UTC m=+3359.947904899 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/52895fa4-c08b-43b8-9f63-cbc8621db11e-operator-scripts") pod "root-account-create-update-wlsbh" (UID: "52895fa4-c08b-43b8-9f63-cbc8621db11e") : configmap "openstack-cell1-scripts" not found Jan 20 17:37:44 crc kubenswrapper[4558]: E0120 17:37:44.190796 4558 projected.go:194] Error preparing data for projected volume kube-api-access-8rnxx for pod openstack-kuttl-tests/keystone-2d06-account-create-update-2wqtq: failed to fetch token: serviceaccounts "galera-openstack" not found Jan 20 17:37:44 crc kubenswrapper[4558]: E0120 17:37:44.190884 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/ccf99999-f746-48ce-a387-642245ef0fd5-kube-api-access-8rnxx podName:ccf99999-f746-48ce-a387-642245ef0fd5 nodeName:}" failed. No retries permitted until 2026-01-20 17:37:44.69085778 +0000 UTC m=+3358.451195746 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-8rnxx" (UniqueName: "kubernetes.io/projected/ccf99999-f746-48ce-a387-642245ef0fd5-kube-api-access-8rnxx") pod "keystone-2d06-account-create-update-2wqtq" (UID: "ccf99999-f746-48ce-a387-642245ef0fd5") : failed to fetch token: serviceaccounts "galera-openstack" not found Jan 20 17:37:44 crc kubenswrapper[4558]: E0120 17:37:44.199785 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="1c79880b61b7b00d3acd244cc4b46331c0b20f7998564a054c816dfa317eab5b" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:37:44 crc kubenswrapper[4558]: E0120 17:37:44.202432 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="1c79880b61b7b00d3acd244cc4b46331c0b20f7998564a054c816dfa317eab5b" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:37:44 crc kubenswrapper[4558]: E0120 17:37:44.205151 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="1c79880b61b7b00d3acd244cc4b46331c0b20f7998564a054c816dfa317eab5b" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:37:44 crc kubenswrapper[4558]: E0120 17:37:44.205219 4558 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack-kuttl-tests/nova-cell1-conductor-0" podUID="0def1343-9e4b-4f74-84bd-3212688b59ce" containerName="nova-cell1-conductor-conductor" Jan 20 17:37:44 crc kubenswrapper[4558]: I0120 17:37:44.212545 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage06-crc" (UniqueName: "kubernetes.io/local-volume/local-storage06-crc") on node "crc" Jan 20 17:37:44 crc kubenswrapper[4558]: I0120 17:37:44.237722 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-proxy-96bd7cb78-txfmk" Jan 20 17:37:44 crc kubenswrapper[4558]: I0120 17:37:44.247551 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/openstack-galera-0" podUID="79a4b689-d81b-44da-8baa-88fc6ce78172" containerName="galera" containerID="cri-o://4937e3357efe6ba2fe8b94eb2c378f889e567ce6926af22853306ab11522be96" gracePeriod=30 Jan 20 17:37:44 crc kubenswrapper[4558]: I0120 17:37:44.287260 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c7ffc3d3-5db6-484c-b9c1-d065e7177eb7-internal-tls-certs\") pod \"c7ffc3d3-5db6-484c-b9c1-d065e7177eb7\" (UID: \"c7ffc3d3-5db6-484c-b9c1-d065e7177eb7\") " Jan 20 17:37:44 crc kubenswrapper[4558]: I0120 17:37:44.287653 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c7ffc3d3-5db6-484c-b9c1-d065e7177eb7-config-data\") pod \"c7ffc3d3-5db6-484c-b9c1-d065e7177eb7\" (UID: \"c7ffc3d3-5db6-484c-b9c1-d065e7177eb7\") " Jan 20 17:37:44 crc kubenswrapper[4558]: I0120 17:37:44.287719 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/c7ffc3d3-5db6-484c-b9c1-d065e7177eb7-etc-swift\") pod \"c7ffc3d3-5db6-484c-b9c1-d065e7177eb7\" (UID: \"c7ffc3d3-5db6-484c-b9c1-d065e7177eb7\") " Jan 20 17:37:44 crc kubenswrapper[4558]: I0120 17:37:44.287753 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c7ffc3d3-5db6-484c-b9c1-d065e7177eb7-run-httpd\") pod \"c7ffc3d3-5db6-484c-b9c1-d065e7177eb7\" (UID: \"c7ffc3d3-5db6-484c-b9c1-d065e7177eb7\") " Jan 20 17:37:44 crc kubenswrapper[4558]: I0120 17:37:44.287802 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wwzwk\" (UniqueName: \"kubernetes.io/projected/c7ffc3d3-5db6-484c-b9c1-d065e7177eb7-kube-api-access-wwzwk\") pod \"c7ffc3d3-5db6-484c-b9c1-d065e7177eb7\" (UID: \"c7ffc3d3-5db6-484c-b9c1-d065e7177eb7\") " Jan 20 17:37:44 crc kubenswrapper[4558]: I0120 17:37:44.287825 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c7ffc3d3-5db6-484c-b9c1-d065e7177eb7-public-tls-certs\") pod \"c7ffc3d3-5db6-484c-b9c1-d065e7177eb7\" (UID: \"c7ffc3d3-5db6-484c-b9c1-d065e7177eb7\") " Jan 20 17:37:44 crc kubenswrapper[4558]: I0120 17:37:44.287875 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c7ffc3d3-5db6-484c-b9c1-d065e7177eb7-log-httpd\") pod \"c7ffc3d3-5db6-484c-b9c1-d065e7177eb7\" (UID: \"c7ffc3d3-5db6-484c-b9c1-d065e7177eb7\") " Jan 20 17:37:44 crc kubenswrapper[4558]: I0120 17:37:44.287893 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c7ffc3d3-5db6-484c-b9c1-d065e7177eb7-combined-ca-bundle\") pod \"c7ffc3d3-5db6-484c-b9c1-d065e7177eb7\" (UID: \"c7ffc3d3-5db6-484c-b9c1-d065e7177eb7\") " Jan 20 17:37:44 crc kubenswrapper[4558]: I0120 17:37:44.288675 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c7ffc3d3-5db6-484c-b9c1-d065e7177eb7-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "c7ffc3d3-5db6-484c-b9c1-d065e7177eb7" (UID: "c7ffc3d3-5db6-484c-b9c1-d065e7177eb7"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:37:44 crc kubenswrapper[4558]: I0120 17:37:44.288694 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:44 crc kubenswrapper[4558]: E0120 17:37:44.288773 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-memcached-svc: secret "cert-memcached-svc" not found Jan 20 17:37:44 crc kubenswrapper[4558]: E0120 17:37:44.288819 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b18f4fcf-eaae-401f-99ec-7b130ad8a6c1-memcached-tls-certs podName:b18f4fcf-eaae-401f-99ec-7b130ad8a6c1 nodeName:}" failed. No retries permitted until 2026-01-20 17:37:48.288804847 +0000 UTC m=+3362.049142815 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "memcached-tls-certs" (UniqueName: "kubernetes.io/secret/b18f4fcf-eaae-401f-99ec-7b130ad8a6c1-memcached-tls-certs") pod "memcached-0" (UID: "b18f4fcf-eaae-401f-99ec-7b130ad8a6c1") : secret "cert-memcached-svc" not found Jan 20 17:37:44 crc kubenswrapper[4558]: I0120 17:37:44.289430 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c7ffc3d3-5db6-484c-b9c1-d065e7177eb7-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "c7ffc3d3-5db6-484c-b9c1-d065e7177eb7" (UID: "c7ffc3d3-5db6-484c-b9c1-d065e7177eb7"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:37:44 crc kubenswrapper[4558]: E0120 17:37:44.291403 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/combined-ca-bundle: secret "combined-ca-bundle" not found Jan 20 17:37:44 crc kubenswrapper[4558]: E0120 17:37:44.291470 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b18f4fcf-eaae-401f-99ec-7b130ad8a6c1-combined-ca-bundle podName:b18f4fcf-eaae-401f-99ec-7b130ad8a6c1 nodeName:}" failed. No retries permitted until 2026-01-20 17:37:48.291451935 +0000 UTC m=+3362.051789901 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/b18f4fcf-eaae-401f-99ec-7b130ad8a6c1-combined-ca-bundle") pod "memcached-0" (UID: "b18f4fcf-eaae-401f-99ec-7b130ad8a6c1") : secret "combined-ca-bundle" not found Jan 20 17:37:44 crc kubenswrapper[4558]: I0120 17:37:44.294022 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c7ffc3d3-5db6-484c-b9c1-d065e7177eb7-kube-api-access-wwzwk" (OuterVolumeSpecName: "kube-api-access-wwzwk") pod "c7ffc3d3-5db6-484c-b9c1-d065e7177eb7" (UID: "c7ffc3d3-5db6-484c-b9c1-d065e7177eb7"). InnerVolumeSpecName "kube-api-access-wwzwk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:37:44 crc kubenswrapper[4558]: I0120 17:37:44.296085 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c7ffc3d3-5db6-484c-b9c1-d065e7177eb7-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "c7ffc3d3-5db6-484c-b9c1-d065e7177eb7" (UID: "c7ffc3d3-5db6-484c-b9c1-d065e7177eb7"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:37:44 crc kubenswrapper[4558]: I0120 17:37:44.371297 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c7ffc3d3-5db6-484c-b9c1-d065e7177eb7-config-data" (OuterVolumeSpecName: "config-data") pod "c7ffc3d3-5db6-484c-b9c1-d065e7177eb7" (UID: "c7ffc3d3-5db6-484c-b9c1-d065e7177eb7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:37:44 crc kubenswrapper[4558]: I0120 17:37:44.388891 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c7ffc3d3-5db6-484c-b9c1-d065e7177eb7-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "c7ffc3d3-5db6-484c-b9c1-d065e7177eb7" (UID: "c7ffc3d3-5db6-484c-b9c1-d065e7177eb7"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:37:44 crc kubenswrapper[4558]: I0120 17:37:44.389887 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c7ffc3d3-5db6-484c-b9c1-d065e7177eb7-public-tls-certs\") pod \"c7ffc3d3-5db6-484c-b9c1-d065e7177eb7\" (UID: \"c7ffc3d3-5db6-484c-b9c1-d065e7177eb7\") " Jan 20 17:37:44 crc kubenswrapper[4558]: W0120 17:37:44.389977 4558 empty_dir.go:500] Warning: Unmount skipped because path does not exist: /var/lib/kubelet/pods/c7ffc3d3-5db6-484c-b9c1-d065e7177eb7/volumes/kubernetes.io~secret/public-tls-certs Jan 20 17:37:44 crc kubenswrapper[4558]: I0120 17:37:44.389998 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c7ffc3d3-5db6-484c-b9c1-d065e7177eb7-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "c7ffc3d3-5db6-484c-b9c1-d065e7177eb7" (UID: "c7ffc3d3-5db6-484c-b9c1-d065e7177eb7"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:37:44 crc kubenswrapper[4558]: I0120 17:37:44.391150 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c7ffc3d3-5db6-484c-b9c1-d065e7177eb7-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:44 crc kubenswrapper[4558]: I0120 17:37:44.391238 4558 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/c7ffc3d3-5db6-484c-b9c1-d065e7177eb7-etc-swift\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:44 crc kubenswrapper[4558]: I0120 17:37:44.391256 4558 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c7ffc3d3-5db6-484c-b9c1-d065e7177eb7-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:44 crc kubenswrapper[4558]: I0120 17:37:44.391274 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wwzwk\" (UniqueName: \"kubernetes.io/projected/c7ffc3d3-5db6-484c-b9c1-d065e7177eb7-kube-api-access-wwzwk\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:44 crc kubenswrapper[4558]: I0120 17:37:44.391294 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c7ffc3d3-5db6-484c-b9c1-d065e7177eb7-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:44 crc kubenswrapper[4558]: I0120 17:37:44.391309 4558 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c7ffc3d3-5db6-484c-b9c1-d065e7177eb7-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:44 crc kubenswrapper[4558]: I0120 17:37:44.392436 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c7ffc3d3-5db6-484c-b9c1-d065e7177eb7-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "c7ffc3d3-5db6-484c-b9c1-d065e7177eb7" (UID: "c7ffc3d3-5db6-484c-b9c1-d065e7177eb7"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:37:44 crc kubenswrapper[4558]: I0120 17:37:44.439044 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c7ffc3d3-5db6-484c-b9c1-d065e7177eb7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c7ffc3d3-5db6-484c-b9c1-d065e7177eb7" (UID: "c7ffc3d3-5db6-484c-b9c1-d065e7177eb7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:37:44 crc kubenswrapper[4558]: I0120 17:37:44.494794 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c7ffc3d3-5db6-484c-b9c1-d065e7177eb7-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:44 crc kubenswrapper[4558]: I0120 17:37:44.494859 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c7ffc3d3-5db6-484c-b9c1-d065e7177eb7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:44 crc kubenswrapper[4558]: I0120 17:37:44.525572 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-wlsbh" Jan 20 17:37:44 crc kubenswrapper[4558]: I0120 17:37:44.532731 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-2fe2-account-create-update-t264m" Jan 20 17:37:44 crc kubenswrapper[4558]: I0120 17:37:44.578813 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b511fac-7a17-433d-8baa-9eefa278bd25" path="/var/lib/kubelet/pods/0b511fac-7a17-433d-8baa-9eefa278bd25/volumes" Jan 20 17:37:44 crc kubenswrapper[4558]: I0120 17:37:44.579468 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="38224f24-100b-4765-b1f3-886f8941593d" path="/var/lib/kubelet/pods/38224f24-100b-4765-b1f3-886f8941593d/volumes" Jan 20 17:37:44 crc kubenswrapper[4558]: I0120 17:37:44.579997 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cf7d899-0457-4a78-9054-56101bd4963a" path="/var/lib/kubelet/pods/3cf7d899-0457-4a78-9054-56101bd4963a/volumes" Jan 20 17:37:44 crc kubenswrapper[4558]: I0120 17:37:44.581184 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4fd9bebc-9868-4970-bc1f-d047286980d0" path="/var/lib/kubelet/pods/4fd9bebc-9868-4970-bc1f-d047286980d0/volumes" Jan 20 17:37:44 crc kubenswrapper[4558]: I0120 17:37:44.582377 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8b45e22f-53b9-4228-a28a-a5db18b6e583" path="/var/lib/kubelet/pods/8b45e22f-53b9-4228-a28a-a5db18b6e583/volumes" Jan 20 17:37:44 crc kubenswrapper[4558]: I0120 17:37:44.583241 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8d3adcfe-4948-4e18-910b-bb3380e977a0" path="/var/lib/kubelet/pods/8d3adcfe-4948-4e18-910b-bb3380e977a0/volumes" Jan 20 17:37:44 crc kubenswrapper[4558]: I0120 17:37:44.584548 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="98f8dc8b-6446-4ba8-b37b-a11ae7414b65" path="/var/lib/kubelet/pods/98f8dc8b-6446-4ba8-b37b-a11ae7414b65/volumes" Jan 20 17:37:44 crc kubenswrapper[4558]: I0120 17:37:44.585130 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a03b6fb7-d712-46bd-a8ed-e47bc25ecc7c" path="/var/lib/kubelet/pods/a03b6fb7-d712-46bd-a8ed-e47bc25ecc7c/volumes" Jan 20 17:37:44 crc kubenswrapper[4558]: I0120 17:37:44.585621 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="adf0bec9-f1a7-41d3-a4ef-28573981892f" path="/var/lib/kubelet/pods/adf0bec9-f1a7-41d3-a4ef-28573981892f/volumes" Jan 20 17:37:44 crc kubenswrapper[4558]: I0120 17:37:44.586741 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e5f57088-ae0f-4326-89fb-a7957da83477" path="/var/lib/kubelet/pods/e5f57088-ae0f-4326-89fb-a7957da83477/volumes" Jan 20 17:37:44 crc kubenswrapper[4558]: I0120 17:37:44.587279 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ed722c75-6f59-4f89-a7fd-c9e2e6c402b3" path="/var/lib/kubelet/pods/ed722c75-6f59-4f89-a7fd-c9e2e6c402b3/volumes" Jan 20 17:37:44 crc kubenswrapper[4558]: I0120 17:37:44.587985 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f09a19a5-d5eb-4466-9cb5-309cd246004e" path="/var/lib/kubelet/pods/f09a19a5-d5eb-4466-9cb5-309cd246004e/volumes" Jan 20 17:37:44 crc kubenswrapper[4558]: I0120 17:37:44.595808 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tpp6q\" (UniqueName: \"kubernetes.io/projected/cdf89a68-b15b-4f2d-9016-55fb28011197-kube-api-access-tpp6q\") pod \"cdf89a68-b15b-4f2d-9016-55fb28011197\" (UID: \"cdf89a68-b15b-4f2d-9016-55fb28011197\") " Jan 20 17:37:44 crc kubenswrapper[4558]: I0120 17:37:44.595997 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/52895fa4-c08b-43b8-9f63-cbc8621db11e-operator-scripts\") pod \"52895fa4-c08b-43b8-9f63-cbc8621db11e\" (UID: \"52895fa4-c08b-43b8-9f63-cbc8621db11e\") " Jan 20 17:37:44 crc kubenswrapper[4558]: I0120 17:37:44.596306 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xgm67\" (UniqueName: \"kubernetes.io/projected/52895fa4-c08b-43b8-9f63-cbc8621db11e-kube-api-access-xgm67\") pod \"52895fa4-c08b-43b8-9f63-cbc8621db11e\" (UID: \"52895fa4-c08b-43b8-9f63-cbc8621db11e\") " Jan 20 17:37:44 crc kubenswrapper[4558]: I0120 17:37:44.596384 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cdf89a68-b15b-4f2d-9016-55fb28011197-operator-scripts\") pod \"cdf89a68-b15b-4f2d-9016-55fb28011197\" (UID: \"cdf89a68-b15b-4f2d-9016-55fb28011197\") " Jan 20 17:37:44 crc kubenswrapper[4558]: I0120 17:37:44.597256 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cdf89a68-b15b-4f2d-9016-55fb28011197-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "cdf89a68-b15b-4f2d-9016-55fb28011197" (UID: "cdf89a68-b15b-4f2d-9016-55fb28011197"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:37:44 crc kubenswrapper[4558]: I0120 17:37:44.597792 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/52895fa4-c08b-43b8-9f63-cbc8621db11e-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "52895fa4-c08b-43b8-9f63-cbc8621db11e" (UID: "52895fa4-c08b-43b8-9f63-cbc8621db11e"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:37:44 crc kubenswrapper[4558]: I0120 17:37:44.605604 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cdf89a68-b15b-4f2d-9016-55fb28011197-kube-api-access-tpp6q" (OuterVolumeSpecName: "kube-api-access-tpp6q") pod "cdf89a68-b15b-4f2d-9016-55fb28011197" (UID: "cdf89a68-b15b-4f2d-9016-55fb28011197"). InnerVolumeSpecName "kube-api-access-tpp6q". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:37:44 crc kubenswrapper[4558]: I0120 17:37:44.606962 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/52895fa4-c08b-43b8-9f63-cbc8621db11e-kube-api-access-xgm67" (OuterVolumeSpecName: "kube-api-access-xgm67") pod "52895fa4-c08b-43b8-9f63-cbc8621db11e" (UID: "52895fa4-c08b-43b8-9f63-cbc8621db11e"). InnerVolumeSpecName "kube-api-access-xgm67". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:37:44 crc kubenswrapper[4558]: I0120 17:37:44.698474 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8rnxx\" (UniqueName: \"kubernetes.io/projected/ccf99999-f746-48ce-a387-642245ef0fd5-kube-api-access-8rnxx\") pod \"keystone-2d06-account-create-update-2wqtq\" (UID: \"ccf99999-f746-48ce-a387-642245ef0fd5\") " pod="openstack-kuttl-tests/keystone-2d06-account-create-update-2wqtq" Jan 20 17:37:44 crc kubenswrapper[4558]: I0120 17:37:44.698541 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ccf99999-f746-48ce-a387-642245ef0fd5-operator-scripts\") pod \"keystone-2d06-account-create-update-2wqtq\" (UID: \"ccf99999-f746-48ce-a387-642245ef0fd5\") " pod="openstack-kuttl-tests/keystone-2d06-account-create-update-2wqtq" Jan 20 17:37:44 crc kubenswrapper[4558]: I0120 17:37:44.698658 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xgm67\" (UniqueName: \"kubernetes.io/projected/52895fa4-c08b-43b8-9f63-cbc8621db11e-kube-api-access-xgm67\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:44 crc kubenswrapper[4558]: I0120 17:37:44.698672 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cdf89a68-b15b-4f2d-9016-55fb28011197-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:44 crc kubenswrapper[4558]: E0120 17:37:44.698734 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/openstack-scripts: configmap "openstack-scripts" not found Jan 20 17:37:44 crc kubenswrapper[4558]: I0120 17:37:44.698790 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tpp6q\" (UniqueName: \"kubernetes.io/projected/cdf89a68-b15b-4f2d-9016-55fb28011197-kube-api-access-tpp6q\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:44 crc kubenswrapper[4558]: I0120 17:37:44.698856 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/52895fa4-c08b-43b8-9f63-cbc8621db11e-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:44 crc kubenswrapper[4558]: E0120 17:37:44.698915 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/ccf99999-f746-48ce-a387-642245ef0fd5-operator-scripts podName:ccf99999-f746-48ce-a387-642245ef0fd5 nodeName:}" failed. No retries permitted until 2026-01-20 17:37:45.698899599 +0000 UTC m=+3359.459237567 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/ccf99999-f746-48ce-a387-642245ef0fd5-operator-scripts") pod "keystone-2d06-account-create-update-2wqtq" (UID: "ccf99999-f746-48ce-a387-642245ef0fd5") : configmap "openstack-scripts" not found Jan 20 17:37:44 crc kubenswrapper[4558]: E0120 17:37:44.703479 4558 projected.go:194] Error preparing data for projected volume kube-api-access-8rnxx for pod openstack-kuttl-tests/keystone-2d06-account-create-update-2wqtq: failed to fetch token: serviceaccounts "galera-openstack" not found Jan 20 17:37:44 crc kubenswrapper[4558]: E0120 17:37:44.703556 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/ccf99999-f746-48ce-a387-642245ef0fd5-kube-api-access-8rnxx podName:ccf99999-f746-48ce-a387-642245ef0fd5 nodeName:}" failed. No retries permitted until 2026-01-20 17:37:45.703534885 +0000 UTC m=+3359.463872852 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-8rnxx" (UniqueName: "kubernetes.io/projected/ccf99999-f746-48ce-a387-642245ef0fd5-kube-api-access-8rnxx") pod "keystone-2d06-account-create-update-2wqtq" (UID: "ccf99999-f746-48ce-a387-642245ef0fd5") : failed to fetch token: serviceaccounts "galera-openstack" not found Jan 20 17:37:44 crc kubenswrapper[4558]: E0120 17:37:44.878796 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 1e1ade780111891ab36a10d37a0917edea1f34135b19b61c709e80ef7ae68b98 is running failed: container process not found" containerID="1e1ade780111891ab36a10d37a0917edea1f34135b19b61c709e80ef7ae68b98" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Jan 20 17:37:44 crc kubenswrapper[4558]: E0120 17:37:44.882390 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 1e1ade780111891ab36a10d37a0917edea1f34135b19b61c709e80ef7ae68b98 is running failed: container process not found" containerID="1e1ade780111891ab36a10d37a0917edea1f34135b19b61c709e80ef7ae68b98" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Jan 20 17:37:44 crc kubenswrapper[4558]: E0120 17:37:44.883008 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 1e1ade780111891ab36a10d37a0917edea1f34135b19b61c709e80ef7ae68b98 is running failed: container process not found" containerID="1e1ade780111891ab36a10d37a0917edea1f34135b19b61c709e80ef7ae68b98" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Jan 20 17:37:44 crc kubenswrapper[4558]: E0120 17:37:44.883036 4558 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 1e1ade780111891ab36a10d37a0917edea1f34135b19b61c709e80ef7ae68b98 is running failed: container process not found" probeType="Readiness" pod="openstack-kuttl-tests/ovn-northd-0" podUID="c570a7ea-d296-44bc-a48f-6dd8be9754d0" containerName="ovn-northd" Jan 20 17:37:44 crc kubenswrapper[4558]: I0120 17:37:44.892990 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-f7c7-account-create-update-hf7rn" Jan 20 17:37:44 crc kubenswrapper[4558]: I0120 17:37:44.910549 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:37:44 crc kubenswrapper[4558]: I0120 17:37:44.921981 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-b147-account-create-update-zdjzj" Jan 20 17:37:44 crc kubenswrapper[4558]: I0120 17:37:44.923564 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovn-northd-0_c570a7ea-d296-44bc-a48f-6dd8be9754d0/ovn-northd/0.log" Jan 20 17:37:44 crc kubenswrapper[4558]: I0120 17:37:44.923596 4558 generic.go:334] "Generic (PLEG): container finished" podID="c570a7ea-d296-44bc-a48f-6dd8be9754d0" containerID="1e1ade780111891ab36a10d37a0917edea1f34135b19b61c709e80ef7ae68b98" exitCode=139 Jan 20 17:37:44 crc kubenswrapper[4558]: I0120 17:37:44.923634 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-northd-0" event={"ID":"c570a7ea-d296-44bc-a48f-6dd8be9754d0","Type":"ContainerDied","Data":"1e1ade780111891ab36a10d37a0917edea1f34135b19b61c709e80ef7ae68b98"} Jan 20 17:37:44 crc kubenswrapper[4558]: I0120 17:37:44.927104 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-b147-account-create-update-zdjzj" event={"ID":"7ca40caa-67e9-4be0-8697-cf15086736bc","Type":"ContainerDied","Data":"92f0d365d5411ae74d15e2181c24da69129cd7f91b296bf5f51e909e71befc8c"} Jan 20 17:37:44 crc kubenswrapper[4558]: I0120 17:37:44.927318 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-b147-account-create-update-zdjzj" Jan 20 17:37:44 crc kubenswrapper[4558]: I0120 17:37:44.932354 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:37:44 crc kubenswrapper[4558]: I0120 17:37:44.954930 4558 generic.go:334] "Generic (PLEG): container finished" podID="6b77441a-39f5-4ed5-bf1b-c29900900242" containerID="bbf204899e737785b34e43ac3dcf8c1621d7b4c38d2b11ab4f044288ae0cdd0a" exitCode=0 Jan 20 17:37:44 crc kubenswrapper[4558]: I0120 17:37:44.955001 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-55dfc8964d-kck7c" event={"ID":"6b77441a-39f5-4ed5-bf1b-c29900900242","Type":"ContainerDied","Data":"bbf204899e737785b34e43ac3dcf8c1621d7b4c38d2b11ab4f044288ae0cdd0a"} Jan 20 17:37:44 crc kubenswrapper[4558]: I0120 17:37:44.976578 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/root-account-create-update-wlsbh" event={"ID":"52895fa4-c08b-43b8-9f63-cbc8621db11e","Type":"ContainerDied","Data":"b188a203f3875e52c33c42111e0cc880025085cb9a11a04452bcf06cd6b045e9"} Jan 20 17:37:44 crc kubenswrapper[4558]: I0120 17:37:44.976611 4558 scope.go:117] "RemoveContainer" containerID="c92d1dec67e80534ab703ca22d222cf507194a905156371d5521a2bcb6e6fdca" Jan 20 17:37:44 crc kubenswrapper[4558]: I0120 17:37:44.976723 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-wlsbh" Jan 20 17:37:44 crc kubenswrapper[4558]: I0120 17:37:44.991561 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"f7881580-f0a5-47a0-9622-3927bcce668a","Type":"ContainerDied","Data":"7afb4e77f803740421952c0dbe28e7f78cf5d2d3ff6b921fafb8facbbd98325f"} Jan 20 17:37:44 crc kubenswrapper[4558]: I0120 17:37:44.992347 4558 generic.go:334] "Generic (PLEG): container finished" podID="f7881580-f0a5-47a0-9622-3927bcce668a" containerID="7afb4e77f803740421952c0dbe28e7f78cf5d2d3ff6b921fafb8facbbd98325f" exitCode=0 Jan 20 17:37:44 crc kubenswrapper[4558]: I0120 17:37:44.996204 4558 generic.go:334] "Generic (PLEG): container finished" podID="1addf16e-1af6-431b-aff1-05c48a952f5a" containerID="f49f1bac498f7d02524aaee31c0af982140e123e147ff77674e41803999fb5b9" exitCode=0 Jan 20 17:37:44 crc kubenswrapper[4558]: I0120 17:37:44.996298 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"1addf16e-1af6-431b-aff1-05c48a952f5a","Type":"ContainerDied","Data":"f49f1bac498f7d02524aaee31c0af982140e123e147ff77674e41803999fb5b9"} Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:44.999339 4558 generic.go:334] "Generic (PLEG): container finished" podID="79a4b689-d81b-44da-8baa-88fc6ce78172" containerID="4937e3357efe6ba2fe8b94eb2c378f889e567ce6926af22853306ab11522be96" exitCode=0 Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:44.999393 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-galera-0" event={"ID":"79a4b689-d81b-44da-8baa-88fc6ce78172","Type":"ContainerDied","Data":"4937e3357efe6ba2fe8b94eb2c378f889e567ce6926af22853306ab11522be96"} Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.004265 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7ca40caa-67e9-4be0-8697-cf15086736bc-operator-scripts\") pod \"7ca40caa-67e9-4be0-8697-cf15086736bc\" (UID: \"7ca40caa-67e9-4be0-8697-cf15086736bc\") " Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.004350 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f19d5a0b-6d74-4b46-86a3-9381feb3f158-httpd-run\") pod \"f19d5a0b-6d74-4b46-86a3-9381feb3f158\" (UID: \"f19d5a0b-6d74-4b46-86a3-9381feb3f158\") " Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.004435 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9mfdq\" (UniqueName: \"kubernetes.io/projected/7ca40caa-67e9-4be0-8697-cf15086736bc-kube-api-access-9mfdq\") pod \"7ca40caa-67e9-4be0-8697-cf15086736bc\" (UID: \"7ca40caa-67e9-4be0-8697-cf15086736bc\") " Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.004474 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/e1a1ac97-a530-4ba0-8c9b-835a2b576c8d-etc-machine-id\") pod \"e1a1ac97-a530-4ba0-8c9b-835a2b576c8d\" (UID: \"e1a1ac97-a530-4ba0-8c9b-835a2b576c8d\") " Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.004526 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e1a1ac97-a530-4ba0-8c9b-835a2b576c8d-internal-tls-certs\") pod \"e1a1ac97-a530-4ba0-8c9b-835a2b576c8d\" (UID: \"e1a1ac97-a530-4ba0-8c9b-835a2b576c8d\") " Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.004546 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f19d5a0b-6d74-4b46-86a3-9381feb3f158-combined-ca-bundle\") pod \"f19d5a0b-6d74-4b46-86a3-9381feb3f158\" (UID: \"f19d5a0b-6d74-4b46-86a3-9381feb3f158\") " Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.004601 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e1a1ac97-a530-4ba0-8c9b-835a2b576c8d-config-data\") pod \"e1a1ac97-a530-4ba0-8c9b-835a2b576c8d\" (UID: \"e1a1ac97-a530-4ba0-8c9b-835a2b576c8d\") " Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.004622 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f19d5a0b-6d74-4b46-86a3-9381feb3f158-config-data\") pod \"f19d5a0b-6d74-4b46-86a3-9381feb3f158\" (UID: \"f19d5a0b-6d74-4b46-86a3-9381feb3f158\") " Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.004723 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f19d5a0b-6d74-4b46-86a3-9381feb3f158-scripts\") pod \"f19d5a0b-6d74-4b46-86a3-9381feb3f158\" (UID: \"f19d5a0b-6d74-4b46-86a3-9381feb3f158\") " Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.004774 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage16-crc\") pod \"f19d5a0b-6d74-4b46-86a3-9381feb3f158\" (UID: \"f19d5a0b-6d74-4b46-86a3-9381feb3f158\") " Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.004802 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0f5a8541-5b86-4fff-8f94-18b8b5ee3ebf-operator-scripts\") pod \"0f5a8541-5b86-4fff-8f94-18b8b5ee3ebf\" (UID: \"0f5a8541-5b86-4fff-8f94-18b8b5ee3ebf\") " Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.004849 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e1a1ac97-a530-4ba0-8c9b-835a2b576c8d-combined-ca-bundle\") pod \"e1a1ac97-a530-4ba0-8c9b-835a2b576c8d\" (UID: \"e1a1ac97-a530-4ba0-8c9b-835a2b576c8d\") " Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.004872 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2grqk\" (UniqueName: \"kubernetes.io/projected/0f5a8541-5b86-4fff-8f94-18b8b5ee3ebf-kube-api-access-2grqk\") pod \"0f5a8541-5b86-4fff-8f94-18b8b5ee3ebf\" (UID: \"0f5a8541-5b86-4fff-8f94-18b8b5ee3ebf\") " Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.004911 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gjr4x\" (UniqueName: \"kubernetes.io/projected/e1a1ac97-a530-4ba0-8c9b-835a2b576c8d-kube-api-access-gjr4x\") pod \"e1a1ac97-a530-4ba0-8c9b-835a2b576c8d\" (UID: \"e1a1ac97-a530-4ba0-8c9b-835a2b576c8d\") " Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.004931 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e1a1ac97-a530-4ba0-8c9b-835a2b576c8d-public-tls-certs\") pod \"e1a1ac97-a530-4ba0-8c9b-835a2b576c8d\" (UID: \"e1a1ac97-a530-4ba0-8c9b-835a2b576c8d\") " Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.004950 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e1a1ac97-a530-4ba0-8c9b-835a2b576c8d-config-data-custom\") pod \"e1a1ac97-a530-4ba0-8c9b-835a2b576c8d\" (UID: \"e1a1ac97-a530-4ba0-8c9b-835a2b576c8d\") " Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.004965 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e1a1ac97-a530-4ba0-8c9b-835a2b576c8d-scripts\") pod \"e1a1ac97-a530-4ba0-8c9b-835a2b576c8d\" (UID: \"e1a1ac97-a530-4ba0-8c9b-835a2b576c8d\") " Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.005004 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2f625\" (UniqueName: \"kubernetes.io/projected/f19d5a0b-6d74-4b46-86a3-9381feb3f158-kube-api-access-2f625\") pod \"f19d5a0b-6d74-4b46-86a3-9381feb3f158\" (UID: \"f19d5a0b-6d74-4b46-86a3-9381feb3f158\") " Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.005035 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f19d5a0b-6d74-4b46-86a3-9381feb3f158-logs\") pod \"f19d5a0b-6d74-4b46-86a3-9381feb3f158\" (UID: \"f19d5a0b-6d74-4b46-86a3-9381feb3f158\") " Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.005078 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7ca40caa-67e9-4be0-8697-cf15086736bc-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "7ca40caa-67e9-4be0-8697-cf15086736bc" (UID: "7ca40caa-67e9-4be0-8697-cf15086736bc"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.005124 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f19d5a0b-6d74-4b46-86a3-9381feb3f158-public-tls-certs\") pod \"f19d5a0b-6d74-4b46-86a3-9381feb3f158\" (UID: \"f19d5a0b-6d74-4b46-86a3-9381feb3f158\") " Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.005191 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e1a1ac97-a530-4ba0-8c9b-835a2b576c8d-logs\") pod \"e1a1ac97-a530-4ba0-8c9b-835a2b576c8d\" (UID: \"e1a1ac97-a530-4ba0-8c9b-835a2b576c8d\") " Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.006140 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7ca40caa-67e9-4be0-8697-cf15086736bc-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.006625 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e1a1ac97-a530-4ba0-8c9b-835a2b576c8d-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "e1a1ac97-a530-4ba0-8c9b-835a2b576c8d" (UID: "e1a1ac97-a530-4ba0-8c9b-835a2b576c8d"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.007308 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e1a1ac97-a530-4ba0-8c9b-835a2b576c8d-logs" (OuterVolumeSpecName: "logs") pod "e1a1ac97-a530-4ba0-8c9b-835a2b576c8d" (UID: "e1a1ac97-a530-4ba0-8c9b-835a2b576c8d"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.007516 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f19d5a0b-6d74-4b46-86a3-9381feb3f158-logs" (OuterVolumeSpecName: "logs") pod "f19d5a0b-6d74-4b46-86a3-9381feb3f158" (UID: "f19d5a0b-6d74-4b46-86a3-9381feb3f158"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.007838 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f19d5a0b-6d74-4b46-86a3-9381feb3f158-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "f19d5a0b-6d74-4b46-86a3-9381feb3f158" (UID: "f19d5a0b-6d74-4b46-86a3-9381feb3f158"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.008028 4558 generic.go:334] "Generic (PLEG): container finished" podID="e1a1ac97-a530-4ba0-8c9b-835a2b576c8d" containerID="cfc0219326dabcde321d0d0fb037a9ecb6ea36b64ee17a015344e0990a28b989" exitCode=0 Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.008127 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"e1a1ac97-a530-4ba0-8c9b-835a2b576c8d","Type":"ContainerDied","Data":"cfc0219326dabcde321d0d0fb037a9ecb6ea36b64ee17a015344e0990a28b989"} Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.008182 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"e1a1ac97-a530-4ba0-8c9b-835a2b576c8d","Type":"ContainerDied","Data":"88da74428e97c037a8331206de66e098d70b33fcebb603be2a7417ecafe4363a"} Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.008261 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.009625 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0f5a8541-5b86-4fff-8f94-18b8b5ee3ebf-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "0f5a8541-5b86-4fff-8f94-18b8b5ee3ebf" (UID: "0f5a8541-5b86-4fff-8f94-18b8b5ee3ebf"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.013855 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage16-crc" (OuterVolumeSpecName: "glance") pod "f19d5a0b-6d74-4b46-86a3-9381feb3f158" (UID: "f19d5a0b-6d74-4b46-86a3-9381feb3f158"). InnerVolumeSpecName "local-storage16-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.014063 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7ca40caa-67e9-4be0-8697-cf15086736bc-kube-api-access-9mfdq" (OuterVolumeSpecName: "kube-api-access-9mfdq") pod "7ca40caa-67e9-4be0-8697-cf15086736bc" (UID: "7ca40caa-67e9-4be0-8697-cf15086736bc"). InnerVolumeSpecName "kube-api-access-9mfdq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.014093 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-wlsbh"] Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.014568 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e1a1ac97-a530-4ba0-8c9b-835a2b576c8d-kube-api-access-gjr4x" (OuterVolumeSpecName: "kube-api-access-gjr4x") pod "e1a1ac97-a530-4ba0-8c9b-835a2b576c8d" (UID: "e1a1ac97-a530-4ba0-8c9b-835a2b576c8d"). InnerVolumeSpecName "kube-api-access-gjr4x". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.015190 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0f5a8541-5b86-4fff-8f94-18b8b5ee3ebf-kube-api-access-2grqk" (OuterVolumeSpecName: "kube-api-access-2grqk") pod "0f5a8541-5b86-4fff-8f94-18b8b5ee3ebf" (UID: "0f5a8541-5b86-4fff-8f94-18b8b5ee3ebf"). InnerVolumeSpecName "kube-api-access-2grqk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.015954 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e1a1ac97-a530-4ba0-8c9b-835a2b576c8d-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "e1a1ac97-a530-4ba0-8c9b-835a2b576c8d" (UID: "e1a1ac97-a530-4ba0-8c9b-835a2b576c8d"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.016448 4558 generic.go:334] "Generic (PLEG): container finished" podID="c7ffc3d3-5db6-484c-b9c1-d065e7177eb7" containerID="cc29bf400e020c3de6a91d31522715a48fb29ae6d1627268b6c2963ee53e1761" exitCode=0 Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.016503 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-proxy-96bd7cb78-txfmk" event={"ID":"c7ffc3d3-5db6-484c-b9c1-d065e7177eb7","Type":"ContainerDied","Data":"cc29bf400e020c3de6a91d31522715a48fb29ae6d1627268b6c2963ee53e1761"} Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.016531 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-proxy-96bd7cb78-txfmk" event={"ID":"c7ffc3d3-5db6-484c-b9c1-d065e7177eb7","Type":"ContainerDied","Data":"44cace5f75ae4607a7bdba8ebe710db8b5b6bac667a22188c9e242e506829df6"} Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.016590 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-proxy-96bd7cb78-txfmk" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.019583 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-wlsbh"] Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.020383 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e1a1ac97-a530-4ba0-8c9b-835a2b576c8d-scripts" (OuterVolumeSpecName: "scripts") pod "e1a1ac97-a530-4ba0-8c9b-835a2b576c8d" (UID: "e1a1ac97-a530-4ba0-8c9b-835a2b576c8d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.021111 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-cell1-galera-0" event={"ID":"1fff3747-796d-420a-aff0-846dfc615df9","Type":"ContainerDied","Data":"99486727b440696ecb3f8577b6d50e4e5a3567bf981fd662768ea579439910d0"} Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.021135 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.021941 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/nova-metadata-0" podUID="7ee90982-23a3-4111-9c4e-159828a51a69" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.161:8775/\": read tcp 10.217.0.2:38976->10.217.0.161:8775: read: connection reset by peer" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.022313 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/nova-metadata-0" podUID="7ee90982-23a3-4111-9c4e-159828a51a69" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.161:8775/\": read tcp 10.217.0.2:38978->10.217.0.161:8775: read: connection reset by peer" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.023107 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-2fe2-account-create-update-t264m" event={"ID":"cdf89a68-b15b-4f2d-9016-55fb28011197","Type":"ContainerDied","Data":"694277bab699d2d3b5f0bd6717228950d8203ba374538cd7f1fad15bc43468a6"} Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.023334 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-2fe2-account-create-update-t264m" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.027228 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-f7c7-account-create-update-hf7rn" event={"ID":"0f5a8541-5b86-4fff-8f94-18b8b5ee3ebf","Type":"ContainerDied","Data":"94275ce4e738c54ea3dc1f9f5f7f981eb5e6cfb78172d8150a4debc197353c8f"} Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.027301 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-f7c7-account-create-update-hf7rn" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.035001 4558 scope.go:117] "RemoveContainer" containerID="cfc0219326dabcde321d0d0fb037a9ecb6ea36b64ee17a015344e0990a28b989" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.036247 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f19d5a0b-6d74-4b46-86a3-9381feb3f158-kube-api-access-2f625" (OuterVolumeSpecName: "kube-api-access-2f625") pod "f19d5a0b-6d74-4b46-86a3-9381feb3f158" (UID: "f19d5a0b-6d74-4b46-86a3-9381feb3f158"). InnerVolumeSpecName "kube-api-access-2f625". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.040751 4558 generic.go:334] "Generic (PLEG): container finished" podID="f19d5a0b-6d74-4b46-86a3-9381feb3f158" containerID="caebb498b12b9c8930363d5154873da3243854fe3f120dfec66b79198ba1728b" exitCode=0 Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.040825 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"f19d5a0b-6d74-4b46-86a3-9381feb3f158","Type":"ContainerDied","Data":"caebb498b12b9c8930363d5154873da3243854fe3f120dfec66b79198ba1728b"} Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.040846 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"f19d5a0b-6d74-4b46-86a3-9381feb3f158","Type":"ContainerDied","Data":"d4e2f679e54a723449be5a222ad844732a4eba5485e616d791e7b62e71774850"} Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.040927 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.041515 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-55dfc8964d-kck7c" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.043449 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-2d06-account-create-update-2wqtq" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.043533 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-9hlt9" event={"ID":"cff9fbf3-9b9a-4350-8589-8bfe5543d79f","Type":"ContainerStarted","Data":"6c262a153185d7a397e55746fc98d2a04ef3c29b4b9f5a65d7f6916bcf01f3c6"} Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.043693 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-9hlt9" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.048711 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/swift-proxy-96bd7cb78-txfmk"] Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.056455 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/swift-proxy-96bd7cb78-txfmk"] Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.057312 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f19d5a0b-6d74-4b46-86a3-9381feb3f158-scripts" (OuterVolumeSpecName: "scripts") pod "f19d5a0b-6d74-4b46-86a3-9381feb3f158" (UID: "f19d5a0b-6d74-4b46-86a3-9381feb3f158"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.088899 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e1a1ac97-a530-4ba0-8c9b-835a2b576c8d-config-data" (OuterVolumeSpecName: "config-data") pod "e1a1ac97-a530-4ba0-8c9b-835a2b576c8d" (UID: "e1a1ac97-a530-4ba0-8c9b-835a2b576c8d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.093204 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e1a1ac97-a530-4ba0-8c9b-835a2b576c8d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e1a1ac97-a530-4ba0-8c9b-835a2b576c8d" (UID: "e1a1ac97-a530-4ba0-8c9b-835a2b576c8d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.107505 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f19d5a0b-6d74-4b46-86a3-9381feb3f158-config-data" (OuterVolumeSpecName: "config-data") pod "f19d5a0b-6d74-4b46-86a3-9381feb3f158" (UID: "f19d5a0b-6d74-4b46-86a3-9381feb3f158"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.107647 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e1a1ac97-a530-4ba0-8c9b-835a2b576c8d-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "e1a1ac97-a530-4ba0-8c9b-835a2b576c8d" (UID: "e1a1ac97-a530-4ba0-8c9b-835a2b576c8d"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.107812 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b77441a-39f5-4ed5-bf1b-c29900900242-config-data\") pod \"6b77441a-39f5-4ed5-bf1b-c29900900242\" (UID: \"6b77441a-39f5-4ed5-bf1b-c29900900242\") " Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.107987 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b77441a-39f5-4ed5-bf1b-c29900900242-combined-ca-bundle\") pod \"6b77441a-39f5-4ed5-bf1b-c29900900242\" (UID: \"6b77441a-39f5-4ed5-bf1b-c29900900242\") " Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.108050 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6b77441a-39f5-4ed5-bf1b-c29900900242-public-tls-certs\") pod \"6b77441a-39f5-4ed5-bf1b-c29900900242\" (UID: \"6b77441a-39f5-4ed5-bf1b-c29900900242\") " Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.108273 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6b77441a-39f5-4ed5-bf1b-c29900900242-scripts\") pod \"6b77441a-39f5-4ed5-bf1b-c29900900242\" (UID: \"6b77441a-39f5-4ed5-bf1b-c29900900242\") " Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.108390 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6b77441a-39f5-4ed5-bf1b-c29900900242-logs\") pod \"6b77441a-39f5-4ed5-bf1b-c29900900242\" (UID: \"6b77441a-39f5-4ed5-bf1b-c29900900242\") " Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.108417 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6b77441a-39f5-4ed5-bf1b-c29900900242-internal-tls-certs\") pod \"6b77441a-39f5-4ed5-bf1b-c29900900242\" (UID: \"6b77441a-39f5-4ed5-bf1b-c29900900242\") " Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.108481 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xxs87\" (UniqueName: \"kubernetes.io/projected/6b77441a-39f5-4ed5-bf1b-c29900900242-kube-api-access-xxs87\") pod \"6b77441a-39f5-4ed5-bf1b-c29900900242\" (UID: \"6b77441a-39f5-4ed5-bf1b-c29900900242\") " Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.115362 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e1a1ac97-a530-4ba0-8c9b-835a2b576c8d-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "e1a1ac97-a530-4ba0-8c9b-835a2b576c8d" (UID: "e1a1ac97-a530-4ba0-8c9b-835a2b576c8d"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.115681 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6b77441a-39f5-4ed5-bf1b-c29900900242-logs" (OuterVolumeSpecName: "logs") pod "6b77441a-39f5-4ed5-bf1b-c29900900242" (UID: "6b77441a-39f5-4ed5-bf1b-c29900900242"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.118765 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6b77441a-39f5-4ed5-bf1b-c29900900242-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.118783 4558 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f19d5a0b-6d74-4b46-86a3-9381feb3f158-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.118794 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9mfdq\" (UniqueName: \"kubernetes.io/projected/7ca40caa-67e9-4be0-8697-cf15086736bc-kube-api-access-9mfdq\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.118813 4558 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/e1a1ac97-a530-4ba0-8c9b-835a2b576c8d-etc-machine-id\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.118823 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e1a1ac97-a530-4ba0-8c9b-835a2b576c8d-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.118837 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e1a1ac97-a530-4ba0-8c9b-835a2b576c8d-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.118847 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f19d5a0b-6d74-4b46-86a3-9381feb3f158-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.118855 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f19d5a0b-6d74-4b46-86a3-9381feb3f158-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.118884 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage16-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage16-crc\") on node \"crc\" " Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.118896 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0f5a8541-5b86-4fff-8f94-18b8b5ee3ebf-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.118906 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e1a1ac97-a530-4ba0-8c9b-835a2b576c8d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.118918 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2grqk\" (UniqueName: \"kubernetes.io/projected/0f5a8541-5b86-4fff-8f94-18b8b5ee3ebf-kube-api-access-2grqk\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.118927 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gjr4x\" (UniqueName: \"kubernetes.io/projected/e1a1ac97-a530-4ba0-8c9b-835a2b576c8d-kube-api-access-gjr4x\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.120875 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6b77441a-39f5-4ed5-bf1b-c29900900242-scripts" (OuterVolumeSpecName: "scripts") pod "6b77441a-39f5-4ed5-bf1b-c29900900242" (UID: "6b77441a-39f5-4ed5-bf1b-c29900900242"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.120961 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e1a1ac97-a530-4ba0-8c9b-835a2b576c8d-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.120987 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e1a1ac97-a530-4ba0-8c9b-835a2b576c8d-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.121000 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e1a1ac97-a530-4ba0-8c9b-835a2b576c8d-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.121014 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2f625\" (UniqueName: \"kubernetes.io/projected/f19d5a0b-6d74-4b46-86a3-9381feb3f158-kube-api-access-2f625\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.121027 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f19d5a0b-6d74-4b46-86a3-9381feb3f158-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.121037 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e1a1ac97-a530-4ba0-8c9b-835a2b576c8d-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.125407 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6b77441a-39f5-4ed5-bf1b-c29900900242-kube-api-access-xxs87" (OuterVolumeSpecName: "kube-api-access-xxs87") pod "6b77441a-39f5-4ed5-bf1b-c29900900242" (UID: "6b77441a-39f5-4ed5-bf1b-c29900900242"). InnerVolumeSpecName "kube-api-access-xxs87". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.125473 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-9hlt9" podStartSLOduration=5.12314988 podStartE2EDuration="5.12314988s" podCreationTimestamp="2026-01-20 17:37:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:37:45.113755474 +0000 UTC m=+3358.874093442" watchObservedRunningTime="2026-01-20 17:37:45.12314988 +0000 UTC m=+3358.883487847" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.129732 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f19d5a0b-6d74-4b46-86a3-9381feb3f158-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f19d5a0b-6d74-4b46-86a3-9381feb3f158" (UID: "f19d5a0b-6d74-4b46-86a3-9381feb3f158"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.136884 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage16-crc" (UniqueName: "kubernetes.io/local-volume/local-storage16-crc") on node "crc" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.149328 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f19d5a0b-6d74-4b46-86a3-9381feb3f158-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "f19d5a0b-6d74-4b46-86a3-9381feb3f158" (UID: "f19d5a0b-6d74-4b46-86a3-9381feb3f158"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.176327 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6b77441a-39f5-4ed5-bf1b-c29900900242-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6b77441a-39f5-4ed5-bf1b-c29900900242" (UID: "6b77441a-39f5-4ed5-bf1b-c29900900242"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.195544 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6b77441a-39f5-4ed5-bf1b-c29900900242-config-data" (OuterVolumeSpecName: "config-data") pod "6b77441a-39f5-4ed5-bf1b-c29900900242" (UID: "6b77441a-39f5-4ed5-bf1b-c29900900242"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.215975 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6b77441a-39f5-4ed5-bf1b-c29900900242-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "6b77441a-39f5-4ed5-bf1b-c29900900242" (UID: "6b77441a-39f5-4ed5-bf1b-c29900900242"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.234855 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6b77441a-39f5-4ed5-bf1b-c29900900242-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "6b77441a-39f5-4ed5-bf1b-c29900900242" (UID: "6b77441a-39f5-4ed5-bf1b-c29900900242"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.246291 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6b77441a-39f5-4ed5-bf1b-c29900900242-internal-tls-certs\") pod \"6b77441a-39f5-4ed5-bf1b-c29900900242\" (UID: \"6b77441a-39f5-4ed5-bf1b-c29900900242\") " Jan 20 17:37:45 crc kubenswrapper[4558]: W0120 17:37:45.246463 4558 empty_dir.go:500] Warning: Unmount skipped because path does not exist: /var/lib/kubelet/pods/6b77441a-39f5-4ed5-bf1b-c29900900242/volumes/kubernetes.io~secret/internal-tls-certs Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.246659 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6b77441a-39f5-4ed5-bf1b-c29900900242-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "6b77441a-39f5-4ed5-bf1b-c29900900242" (UID: "6b77441a-39f5-4ed5-bf1b-c29900900242"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.252945 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f19d5a0b-6d74-4b46-86a3-9381feb3f158-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.252988 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6b77441a-39f5-4ed5-bf1b-c29900900242-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.253002 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6b77441a-39f5-4ed5-bf1b-c29900900242-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.253015 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xxs87\" (UniqueName: \"kubernetes.io/projected/6b77441a-39f5-4ed5-bf1b-c29900900242-kube-api-access-xxs87\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.253028 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b77441a-39f5-4ed5-bf1b-c29900900242-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.253042 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f19d5a0b-6d74-4b46-86a3-9381feb3f158-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.253052 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b77441a-39f5-4ed5-bf1b-c29900900242-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.253061 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6b77441a-39f5-4ed5-bf1b-c29900900242-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.253070 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage16-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage16-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.292603 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-2d06-account-create-update-2wqtq" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.310206 4558 scope.go:117] "RemoveContainer" containerID="f401d7772f0d907bbf33421516a8e607439cd73653fec584dcb05860382f5a83" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.318928 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovn-northd-0_c570a7ea-d296-44bc-a48f-6dd8be9754d0/ovn-northd/0.log" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.318992 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.320637 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.332282 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-2fe2-account-create-update-t264m"] Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.347798 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-api-2fe2-account-create-update-t264m"] Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.360084 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/openstack-cell1-galera-0"] Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.367336 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/openstack-cell1-galera-0"] Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.393235 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-f7c7-account-create-update-hf7rn"] Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.399642 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/neutron-f7c7-account-create-update-hf7rn"] Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.435841 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-b147-account-create-update-zdjzj"] Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.459095 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c570a7ea-d296-44bc-a48f-6dd8be9754d0-config\") pod \"c570a7ea-d296-44bc-a48f-6dd8be9754d0\" (UID: \"c570a7ea-d296-44bc-a48f-6dd8be9754d0\") " Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.459140 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/c570a7ea-d296-44bc-a48f-6dd8be9754d0-metrics-certs-tls-certs\") pod \"c570a7ea-d296-44bc-a48f-6dd8be9754d0\" (UID: \"c570a7ea-d296-44bc-a48f-6dd8be9754d0\") " Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.459190 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") pod \"f7881580-f0a5-47a0-9622-3927bcce668a\" (UID: \"f7881580-f0a5-47a0-9622-3927bcce668a\") " Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.459226 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7881580-f0a5-47a0-9622-3927bcce668a-combined-ca-bundle\") pod \"f7881580-f0a5-47a0-9622-3927bcce668a\" (UID: \"f7881580-f0a5-47a0-9622-3927bcce668a\") " Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.459261 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f7881580-f0a5-47a0-9622-3927bcce668a-scripts\") pod \"f7881580-f0a5-47a0-9622-3927bcce668a\" (UID: \"f7881580-f0a5-47a0-9622-3927bcce668a\") " Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.459289 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f7881580-f0a5-47a0-9622-3927bcce668a-httpd-run\") pod \"f7881580-f0a5-47a0-9622-3927bcce668a\" (UID: \"f7881580-f0a5-47a0-9622-3927bcce668a\") " Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.459340 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c570a7ea-d296-44bc-a48f-6dd8be9754d0-combined-ca-bundle\") pod \"c570a7ea-d296-44bc-a48f-6dd8be9754d0\" (UID: \"c570a7ea-d296-44bc-a48f-6dd8be9754d0\") " Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.459367 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c570a7ea-d296-44bc-a48f-6dd8be9754d0-scripts\") pod \"c570a7ea-d296-44bc-a48f-6dd8be9754d0\" (UID: \"c570a7ea-d296-44bc-a48f-6dd8be9754d0\") " Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.459410 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f7881580-f0a5-47a0-9622-3927bcce668a-logs\") pod \"f7881580-f0a5-47a0-9622-3927bcce668a\" (UID: \"f7881580-f0a5-47a0-9622-3927bcce668a\") " Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.459500 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f7881580-f0a5-47a0-9622-3927bcce668a-internal-tls-certs\") pod \"f7881580-f0a5-47a0-9622-3927bcce668a\" (UID: \"f7881580-f0a5-47a0-9622-3927bcce668a\") " Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.459550 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p8cm8\" (UniqueName: \"kubernetes.io/projected/c570a7ea-d296-44bc-a48f-6dd8be9754d0-kube-api-access-p8cm8\") pod \"c570a7ea-d296-44bc-a48f-6dd8be9754d0\" (UID: \"c570a7ea-d296-44bc-a48f-6dd8be9754d0\") " Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.459594 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/c570a7ea-d296-44bc-a48f-6dd8be9754d0-ovn-rundir\") pod \"c570a7ea-d296-44bc-a48f-6dd8be9754d0\" (UID: \"c570a7ea-d296-44bc-a48f-6dd8be9754d0\") " Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.459654 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/c570a7ea-d296-44bc-a48f-6dd8be9754d0-ovn-northd-tls-certs\") pod \"c570a7ea-d296-44bc-a48f-6dd8be9754d0\" (UID: \"c570a7ea-d296-44bc-a48f-6dd8be9754d0\") " Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.459696 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f7881580-f0a5-47a0-9622-3927bcce668a-config-data\") pod \"f7881580-f0a5-47a0-9622-3927bcce668a\" (UID: \"f7881580-f0a5-47a0-9622-3927bcce668a\") " Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.459787 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xjrx8\" (UniqueName: \"kubernetes.io/projected/f7881580-f0a5-47a0-9622-3927bcce668a-kube-api-access-xjrx8\") pod \"f7881580-f0a5-47a0-9622-3927bcce668a\" (UID: \"f7881580-f0a5-47a0-9622-3927bcce668a\") " Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.461102 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c570a7ea-d296-44bc-a48f-6dd8be9754d0-config" (OuterVolumeSpecName: "config") pod "c570a7ea-d296-44bc-a48f-6dd8be9754d0" (UID: "c570a7ea-d296-44bc-a48f-6dd8be9754d0"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.470529 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c570a7ea-d296-44bc-a48f-6dd8be9754d0-scripts" (OuterVolumeSpecName: "scripts") pod "c570a7ea-d296-44bc-a48f-6dd8be9754d0" (UID: "c570a7ea-d296-44bc-a48f-6dd8be9754d0"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.470955 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f7881580-f0a5-47a0-9622-3927bcce668a-logs" (OuterVolumeSpecName: "logs") pod "f7881580-f0a5-47a0-9622-3927bcce668a" (UID: "f7881580-f0a5-47a0-9622-3927bcce668a"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.472288 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c570a7ea-d296-44bc-a48f-6dd8be9754d0-ovn-rundir" (OuterVolumeSpecName: "ovn-rundir") pod "c570a7ea-d296-44bc-a48f-6dd8be9754d0" (UID: "c570a7ea-d296-44bc-a48f-6dd8be9754d0"). InnerVolumeSpecName "ovn-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.467392 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f7881580-f0a5-47a0-9622-3927bcce668a-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "f7881580-f0a5-47a0-9622-3927bcce668a" (UID: "f7881580-f0a5-47a0-9622-3927bcce668a"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.482331 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c570a7ea-d296-44bc-a48f-6dd8be9754d0-kube-api-access-p8cm8" (OuterVolumeSpecName: "kube-api-access-p8cm8") pod "c570a7ea-d296-44bc-a48f-6dd8be9754d0" (UID: "c570a7ea-d296-44bc-a48f-6dd8be9754d0"). InnerVolumeSpecName "kube-api-access-p8cm8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.491904 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage15-crc" (OuterVolumeSpecName: "glance") pod "f7881580-f0a5-47a0-9622-3927bcce668a" (UID: "f7881580-f0a5-47a0-9622-3927bcce668a"). InnerVolumeSpecName "local-storage15-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.492878 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c570a7ea-d296-44bc-a48f-6dd8be9754d0-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.492910 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage15-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") on node \"crc\" " Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.492924 4558 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f7881580-f0a5-47a0-9622-3927bcce668a-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.492946 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c570a7ea-d296-44bc-a48f-6dd8be9754d0-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.492956 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f7881580-f0a5-47a0-9622-3927bcce668a-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.492970 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p8cm8\" (UniqueName: \"kubernetes.io/projected/c570a7ea-d296-44bc-a48f-6dd8be9754d0-kube-api-access-p8cm8\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.492982 4558 reconciler_common.go:293] "Volume detached for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/c570a7ea-d296-44bc-a48f-6dd8be9754d0-ovn-rundir\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.494317 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f7881580-f0a5-47a0-9622-3927bcce668a-scripts" (OuterVolumeSpecName: "scripts") pod "f7881580-f0a5-47a0-9622-3927bcce668a" (UID: "f7881580-f0a5-47a0-9622-3927bcce668a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.495144 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/barbican-b147-account-create-update-zdjzj"] Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.496551 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f7881580-f0a5-47a0-9622-3927bcce668a-kube-api-access-xjrx8" (OuterVolumeSpecName: "kube-api-access-xjrx8") pod "f7881580-f0a5-47a0-9622-3927bcce668a" (UID: "f7881580-f0a5-47a0-9622-3927bcce668a"). InnerVolumeSpecName "kube-api-access-xjrx8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.497577 4558 scope.go:117] "RemoveContainer" containerID="cfc0219326dabcde321d0d0fb037a9ecb6ea36b64ee17a015344e0990a28b989" Jan 20 17:37:45 crc kubenswrapper[4558]: E0120 17:37:45.499657 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cfc0219326dabcde321d0d0fb037a9ecb6ea36b64ee17a015344e0990a28b989\": container with ID starting with cfc0219326dabcde321d0d0fb037a9ecb6ea36b64ee17a015344e0990a28b989 not found: ID does not exist" containerID="cfc0219326dabcde321d0d0fb037a9ecb6ea36b64ee17a015344e0990a28b989" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.499695 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cfc0219326dabcde321d0d0fb037a9ecb6ea36b64ee17a015344e0990a28b989"} err="failed to get container status \"cfc0219326dabcde321d0d0fb037a9ecb6ea36b64ee17a015344e0990a28b989\": rpc error: code = NotFound desc = could not find container \"cfc0219326dabcde321d0d0fb037a9ecb6ea36b64ee17a015344e0990a28b989\": container with ID starting with cfc0219326dabcde321d0d0fb037a9ecb6ea36b64ee17a015344e0990a28b989 not found: ID does not exist" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.499720 4558 scope.go:117] "RemoveContainer" containerID="f401d7772f0d907bbf33421516a8e607439cd73653fec584dcb05860382f5a83" Jan 20 17:37:45 crc kubenswrapper[4558]: E0120 17:37:45.500830 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f401d7772f0d907bbf33421516a8e607439cd73653fec584dcb05860382f5a83\": container with ID starting with f401d7772f0d907bbf33421516a8e607439cd73653fec584dcb05860382f5a83 not found: ID does not exist" containerID="f401d7772f0d907bbf33421516a8e607439cd73653fec584dcb05860382f5a83" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.500852 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f401d7772f0d907bbf33421516a8e607439cd73653fec584dcb05860382f5a83"} err="failed to get container status \"f401d7772f0d907bbf33421516a8e607439cd73653fec584dcb05860382f5a83\": rpc error: code = NotFound desc = could not find container \"f401d7772f0d907bbf33421516a8e607439cd73653fec584dcb05860382f5a83\": container with ID starting with f401d7772f0d907bbf33421516a8e607439cd73653fec584dcb05860382f5a83 not found: ID does not exist" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.500876 4558 scope.go:117] "RemoveContainer" containerID="cc29bf400e020c3de6a91d31522715a48fb29ae6d1627268b6c2963ee53e1761" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.501513 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.508053 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.542049 4558 scope.go:117] "RemoveContainer" containerID="1c07e4b28b7cf0b4abe599ad445f1580d0e65d644cc547c83bfc14814b631d5c" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.543029 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.550575 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.556294 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.565796 4558 scope.go:117] "RemoveContainer" containerID="cc29bf400e020c3de6a91d31522715a48fb29ae6d1627268b6c2963ee53e1761" Jan 20 17:37:45 crc kubenswrapper[4558]: E0120 17:37:45.566538 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cc29bf400e020c3de6a91d31522715a48fb29ae6d1627268b6c2963ee53e1761\": container with ID starting with cc29bf400e020c3de6a91d31522715a48fb29ae6d1627268b6c2963ee53e1761 not found: ID does not exist" containerID="cc29bf400e020c3de6a91d31522715a48fb29ae6d1627268b6c2963ee53e1761" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.566575 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cc29bf400e020c3de6a91d31522715a48fb29ae6d1627268b6c2963ee53e1761"} err="failed to get container status \"cc29bf400e020c3de6a91d31522715a48fb29ae6d1627268b6c2963ee53e1761\": rpc error: code = NotFound desc = could not find container \"cc29bf400e020c3de6a91d31522715a48fb29ae6d1627268b6c2963ee53e1761\": container with ID starting with cc29bf400e020c3de6a91d31522715a48fb29ae6d1627268b6c2963ee53e1761 not found: ID does not exist" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.566594 4558 scope.go:117] "RemoveContainer" containerID="1c07e4b28b7cf0b4abe599ad445f1580d0e65d644cc547c83bfc14814b631d5c" Jan 20 17:37:45 crc kubenswrapper[4558]: E0120 17:37:45.567474 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1c07e4b28b7cf0b4abe599ad445f1580d0e65d644cc547c83bfc14814b631d5c\": container with ID starting with 1c07e4b28b7cf0b4abe599ad445f1580d0e65d644cc547c83bfc14814b631d5c not found: ID does not exist" containerID="1c07e4b28b7cf0b4abe599ad445f1580d0e65d644cc547c83bfc14814b631d5c" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.567512 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1c07e4b28b7cf0b4abe599ad445f1580d0e65d644cc547c83bfc14814b631d5c"} err="failed to get container status \"1c07e4b28b7cf0b4abe599ad445f1580d0e65d644cc547c83bfc14814b631d5c\": rpc error: code = NotFound desc = could not find container \"1c07e4b28b7cf0b4abe599ad445f1580d0e65d644cc547c83bfc14814b631d5c\": container with ID starting with 1c07e4b28b7cf0b4abe599ad445f1580d0e65d644cc547c83bfc14814b631d5c not found: ID does not exist" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.567535 4558 scope.go:117] "RemoveContainer" containerID="f5120513f44b4adc059dbacc3c6c22162c7eee356a5828f446ded77ed4759a9f" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.573649 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.578416 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c570a7ea-d296-44bc-a48f-6dd8be9754d0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c570a7ea-d296-44bc-a48f-6dd8be9754d0" (UID: "c570a7ea-d296-44bc-a48f-6dd8be9754d0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.579379 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage15-crc" (UniqueName: "kubernetes.io/local-volume/local-storage15-crc") on node "crc" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.580444 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.591545 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f7881580-f0a5-47a0-9622-3927bcce668a-config-data" (OuterVolumeSpecName: "config-data") pod "f7881580-f0a5-47a0-9622-3927bcce668a" (UID: "f7881580-f0a5-47a0-9622-3927bcce668a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.592366 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f7881580-f0a5-47a0-9622-3927bcce668a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f7881580-f0a5-47a0-9622-3927bcce668a" (UID: "f7881580-f0a5-47a0-9622-3927bcce668a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.594859 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f7881580-f0a5-47a0-9622-3927bcce668a-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.594895 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xjrx8\" (UniqueName: \"kubernetes.io/projected/f7881580-f0a5-47a0-9622-3927bcce668a-kube-api-access-xjrx8\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.594905 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage15-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.594915 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7881580-f0a5-47a0-9622-3927bcce668a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.594924 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f7881580-f0a5-47a0-9622-3927bcce668a-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.594935 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c570a7ea-d296-44bc-a48f-6dd8be9754d0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.608278 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c570a7ea-d296-44bc-a48f-6dd8be9754d0-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "c570a7ea-d296-44bc-a48f-6dd8be9754d0" (UID: "c570a7ea-d296-44bc-a48f-6dd8be9754d0"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.617098 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c570a7ea-d296-44bc-a48f-6dd8be9754d0-ovn-northd-tls-certs" (OuterVolumeSpecName: "ovn-northd-tls-certs") pod "c570a7ea-d296-44bc-a48f-6dd8be9754d0" (UID: "c570a7ea-d296-44bc-a48f-6dd8be9754d0"). InnerVolumeSpecName "ovn-northd-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.627952 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f7881580-f0a5-47a0-9622-3927bcce668a-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "f7881580-f0a5-47a0-9622-3927bcce668a" (UID: "f7881580-f0a5-47a0-9622-3927bcce668a"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.645976 4558 scope.go:117] "RemoveContainer" containerID="1adea188750d94315e6ae35bd310f13626df1c915eb5fa791f5e8484767fcbb0" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.671560 4558 scope.go:117] "RemoveContainer" containerID="caebb498b12b9c8930363d5154873da3243854fe3f120dfec66b79198ba1728b" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.694159 4558 scope.go:117] "RemoveContainer" containerID="32a0b1b4cb9343de4b8cd2f43bcb90496f890496ba1d6d3239983dac00184a6b" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.695679 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-77z9r\" (UniqueName: \"kubernetes.io/projected/79a4b689-d81b-44da-8baa-88fc6ce78172-kube-api-access-77z9r\") pod \"79a4b689-d81b-44da-8baa-88fc6ce78172\" (UID: \"79a4b689-d81b-44da-8baa-88fc6ce78172\") " Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.695713 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/79a4b689-d81b-44da-8baa-88fc6ce78172-combined-ca-bundle\") pod \"79a4b689-d81b-44da-8baa-88fc6ce78172\" (UID: \"79a4b689-d81b-44da-8baa-88fc6ce78172\") " Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.695782 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1addf16e-1af6-431b-aff1-05c48a952f5a-run-httpd\") pod \"1addf16e-1af6-431b-aff1-05c48a952f5a\" (UID: \"1addf16e-1af6-431b-aff1-05c48a952f5a\") " Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.695802 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7ee90982-23a3-4111-9c4e-159828a51a69-config-data\") pod \"7ee90982-23a3-4111-9c4e-159828a51a69\" (UID: \"7ee90982-23a3-4111-9c4e-159828a51a69\") " Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.695823 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/79a4b689-d81b-44da-8baa-88fc6ce78172-config-data-default\") pod \"79a4b689-d81b-44da-8baa-88fc6ce78172\" (UID: \"79a4b689-d81b-44da-8baa-88fc6ce78172\") " Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.695857 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ee90982-23a3-4111-9c4e-159828a51a69-combined-ca-bundle\") pod \"7ee90982-23a3-4111-9c4e-159828a51a69\" (UID: \"7ee90982-23a3-4111-9c4e-159828a51a69\") " Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.695904 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/79a4b689-d81b-44da-8baa-88fc6ce78172-config-data-generated\") pod \"79a4b689-d81b-44da-8baa-88fc6ce78172\" (UID: \"79a4b689-d81b-44da-8baa-88fc6ce78172\") " Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.695922 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1addf16e-1af6-431b-aff1-05c48a952f5a-log-httpd\") pod \"1addf16e-1af6-431b-aff1-05c48a952f5a\" (UID: \"1addf16e-1af6-431b-aff1-05c48a952f5a\") " Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.695957 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/79a4b689-d81b-44da-8baa-88fc6ce78172-operator-scripts\") pod \"79a4b689-d81b-44da-8baa-88fc6ce78172\" (UID: \"79a4b689-d81b-44da-8baa-88fc6ce78172\") " Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.695978 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1addf16e-1af6-431b-aff1-05c48a952f5a-scripts\") pod \"1addf16e-1af6-431b-aff1-05c48a952f5a\" (UID: \"1addf16e-1af6-431b-aff1-05c48a952f5a\") " Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.696012 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mysql-db\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"79a4b689-d81b-44da-8baa-88fc6ce78172\" (UID: \"79a4b689-d81b-44da-8baa-88fc6ce78172\") " Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.696039 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7ee90982-23a3-4111-9c4e-159828a51a69-logs\") pod \"7ee90982-23a3-4111-9c4e-159828a51a69\" (UID: \"7ee90982-23a3-4111-9c4e-159828a51a69\") " Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.696062 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/79a4b689-d81b-44da-8baa-88fc6ce78172-kolla-config\") pod \"79a4b689-d81b-44da-8baa-88fc6ce78172\" (UID: \"79a4b689-d81b-44da-8baa-88fc6ce78172\") " Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.696106 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4dq7s\" (UniqueName: \"kubernetes.io/projected/7ee90982-23a3-4111-9c4e-159828a51a69-kube-api-access-4dq7s\") pod \"7ee90982-23a3-4111-9c4e-159828a51a69\" (UID: \"7ee90982-23a3-4111-9c4e-159828a51a69\") " Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.696233 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/1addf16e-1af6-431b-aff1-05c48a952f5a-sg-core-conf-yaml\") pod \"1addf16e-1af6-431b-aff1-05c48a952f5a\" (UID: \"1addf16e-1af6-431b-aff1-05c48a952f5a\") " Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.696268 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/7ee90982-23a3-4111-9c4e-159828a51a69-nova-metadata-tls-certs\") pod \"7ee90982-23a3-4111-9c4e-159828a51a69\" (UID: \"7ee90982-23a3-4111-9c4e-159828a51a69\") " Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.696300 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pnzfr\" (UniqueName: \"kubernetes.io/projected/1addf16e-1af6-431b-aff1-05c48a952f5a-kube-api-access-pnzfr\") pod \"1addf16e-1af6-431b-aff1-05c48a952f5a\" (UID: \"1addf16e-1af6-431b-aff1-05c48a952f5a\") " Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.696327 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/1addf16e-1af6-431b-aff1-05c48a952f5a-ceilometer-tls-certs\") pod \"1addf16e-1af6-431b-aff1-05c48a952f5a\" (UID: \"1addf16e-1af6-431b-aff1-05c48a952f5a\") " Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.696350 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1addf16e-1af6-431b-aff1-05c48a952f5a-combined-ca-bundle\") pod \"1addf16e-1af6-431b-aff1-05c48a952f5a\" (UID: \"1addf16e-1af6-431b-aff1-05c48a952f5a\") " Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.696375 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/79a4b689-d81b-44da-8baa-88fc6ce78172-galera-tls-certs\") pod \"79a4b689-d81b-44da-8baa-88fc6ce78172\" (UID: \"79a4b689-d81b-44da-8baa-88fc6ce78172\") " Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.696445 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1addf16e-1af6-431b-aff1-05c48a952f5a-config-data\") pod \"1addf16e-1af6-431b-aff1-05c48a952f5a\" (UID: \"1addf16e-1af6-431b-aff1-05c48a952f5a\") " Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.696894 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f7881580-f0a5-47a0-9622-3927bcce668a-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.696912 4558 reconciler_common.go:293] "Volume detached for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/c570a7ea-d296-44bc-a48f-6dd8be9754d0-ovn-northd-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.696921 4558 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/c570a7ea-d296-44bc-a48f-6dd8be9754d0-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.697543 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/79a4b689-d81b-44da-8baa-88fc6ce78172-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "79a4b689-d81b-44da-8baa-88fc6ce78172" (UID: "79a4b689-d81b-44da-8baa-88fc6ce78172"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.697553 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1addf16e-1af6-431b-aff1-05c48a952f5a-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "1addf16e-1af6-431b-aff1-05c48a952f5a" (UID: "1addf16e-1af6-431b-aff1-05c48a952f5a"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.698080 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1addf16e-1af6-431b-aff1-05c48a952f5a-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "1addf16e-1af6-431b-aff1-05c48a952f5a" (UID: "1addf16e-1af6-431b-aff1-05c48a952f5a"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.698276 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/79a4b689-d81b-44da-8baa-88fc6ce78172-config-data-generated" (OuterVolumeSpecName: "config-data-generated") pod "79a4b689-d81b-44da-8baa-88fc6ce78172" (UID: "79a4b689-d81b-44da-8baa-88fc6ce78172"). InnerVolumeSpecName "config-data-generated". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.698422 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/79a4b689-d81b-44da-8baa-88fc6ce78172-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "79a4b689-d81b-44da-8baa-88fc6ce78172" (UID: "79a4b689-d81b-44da-8baa-88fc6ce78172"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.698919 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7ee90982-23a3-4111-9c4e-159828a51a69-logs" (OuterVolumeSpecName: "logs") pod "7ee90982-23a3-4111-9c4e-159828a51a69" (UID: "7ee90982-23a3-4111-9c4e-159828a51a69"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.699695 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/79a4b689-d81b-44da-8baa-88fc6ce78172-config-data-default" (OuterVolumeSpecName: "config-data-default") pod "79a4b689-d81b-44da-8baa-88fc6ce78172" (UID: "79a4b689-d81b-44da-8baa-88fc6ce78172"). InnerVolumeSpecName "config-data-default". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.700028 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1addf16e-1af6-431b-aff1-05c48a952f5a-kube-api-access-pnzfr" (OuterVolumeSpecName: "kube-api-access-pnzfr") pod "1addf16e-1af6-431b-aff1-05c48a952f5a" (UID: "1addf16e-1af6-431b-aff1-05c48a952f5a"). InnerVolumeSpecName "kube-api-access-pnzfr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.703677 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7ee90982-23a3-4111-9c4e-159828a51a69-kube-api-access-4dq7s" (OuterVolumeSpecName: "kube-api-access-4dq7s") pod "7ee90982-23a3-4111-9c4e-159828a51a69" (UID: "7ee90982-23a3-4111-9c4e-159828a51a69"). InnerVolumeSpecName "kube-api-access-4dq7s". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.712621 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1addf16e-1af6-431b-aff1-05c48a952f5a-scripts" (OuterVolumeSpecName: "scripts") pod "1addf16e-1af6-431b-aff1-05c48a952f5a" (UID: "1addf16e-1af6-431b-aff1-05c48a952f5a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.712657 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/79a4b689-d81b-44da-8baa-88fc6ce78172-kube-api-access-77z9r" (OuterVolumeSpecName: "kube-api-access-77z9r") pod "79a4b689-d81b-44da-8baa-88fc6ce78172" (UID: "79a4b689-d81b-44da-8baa-88fc6ce78172"). InnerVolumeSpecName "kube-api-access-77z9r". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.724595 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage02-crc" (OuterVolumeSpecName: "mysql-db") pod "79a4b689-d81b-44da-8baa-88fc6ce78172" (UID: "79a4b689-d81b-44da-8baa-88fc6ce78172"). InnerVolumeSpecName "local-storage02-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.729441 4558 scope.go:117] "RemoveContainer" containerID="caebb498b12b9c8930363d5154873da3243854fe3f120dfec66b79198ba1728b" Jan 20 17:37:45 crc kubenswrapper[4558]: E0120 17:37:45.730853 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"caebb498b12b9c8930363d5154873da3243854fe3f120dfec66b79198ba1728b\": container with ID starting with caebb498b12b9c8930363d5154873da3243854fe3f120dfec66b79198ba1728b not found: ID does not exist" containerID="caebb498b12b9c8930363d5154873da3243854fe3f120dfec66b79198ba1728b" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.730899 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"caebb498b12b9c8930363d5154873da3243854fe3f120dfec66b79198ba1728b"} err="failed to get container status \"caebb498b12b9c8930363d5154873da3243854fe3f120dfec66b79198ba1728b\": rpc error: code = NotFound desc = could not find container \"caebb498b12b9c8930363d5154873da3243854fe3f120dfec66b79198ba1728b\": container with ID starting with caebb498b12b9c8930363d5154873da3243854fe3f120dfec66b79198ba1728b not found: ID does not exist" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.730924 4558 scope.go:117] "RemoveContainer" containerID="32a0b1b4cb9343de4b8cd2f43bcb90496f890496ba1d6d3239983dac00184a6b" Jan 20 17:37:45 crc kubenswrapper[4558]: E0120 17:37:45.744336 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"32a0b1b4cb9343de4b8cd2f43bcb90496f890496ba1d6d3239983dac00184a6b\": container with ID starting with 32a0b1b4cb9343de4b8cd2f43bcb90496f890496ba1d6d3239983dac00184a6b not found: ID does not exist" containerID="32a0b1b4cb9343de4b8cd2f43bcb90496f890496ba1d6d3239983dac00184a6b" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.744380 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"32a0b1b4cb9343de4b8cd2f43bcb90496f890496ba1d6d3239983dac00184a6b"} err="failed to get container status \"32a0b1b4cb9343de4b8cd2f43bcb90496f890496ba1d6d3239983dac00184a6b\": rpc error: code = NotFound desc = could not find container \"32a0b1b4cb9343de4b8cd2f43bcb90496f890496ba1d6d3239983dac00184a6b\": container with ID starting with 32a0b1b4cb9343de4b8cd2f43bcb90496f890496ba1d6d3239983dac00184a6b not found: ID does not exist" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.754047 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1addf16e-1af6-431b-aff1-05c48a952f5a-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "1addf16e-1af6-431b-aff1-05c48a952f5a" (UID: "1addf16e-1af6-431b-aff1-05c48a952f5a"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.756696 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.762930 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7ee90982-23a3-4111-9c4e-159828a51a69-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7ee90982-23a3-4111-9c4e-159828a51a69" (UID: "7ee90982-23a3-4111-9c4e-159828a51a69"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.772495 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7ee90982-23a3-4111-9c4e-159828a51a69-config-data" (OuterVolumeSpecName: "config-data") pod "7ee90982-23a3-4111-9c4e-159828a51a69" (UID: "7ee90982-23a3-4111-9c4e-159828a51a69"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.776951 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7ee90982-23a3-4111-9c4e-159828a51a69-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "7ee90982-23a3-4111-9c4e-159828a51a69" (UID: "7ee90982-23a3-4111-9c4e-159828a51a69"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.778745 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/79a4b689-d81b-44da-8baa-88fc6ce78172-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "79a4b689-d81b-44da-8baa-88fc6ce78172" (UID: "79a4b689-d81b-44da-8baa-88fc6ce78172"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.798115 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8rnxx\" (UniqueName: \"kubernetes.io/projected/ccf99999-f746-48ce-a387-642245ef0fd5-kube-api-access-8rnxx\") pod \"keystone-2d06-account-create-update-2wqtq\" (UID: \"ccf99999-f746-48ce-a387-642245ef0fd5\") " pod="openstack-kuttl-tests/keystone-2d06-account-create-update-2wqtq" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.798190 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ccf99999-f746-48ce-a387-642245ef0fd5-operator-scripts\") pod \"keystone-2d06-account-create-update-2wqtq\" (UID: \"ccf99999-f746-48ce-a387-642245ef0fd5\") " pod="openstack-kuttl-tests/keystone-2d06-account-create-update-2wqtq" Jan 20 17:37:45 crc kubenswrapper[4558]: E0120 17:37:45.798275 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/openstack-scripts: configmap "openstack-scripts" not found Jan 20 17:37:45 crc kubenswrapper[4558]: E0120 17:37:45.798356 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/ccf99999-f746-48ce-a387-642245ef0fd5-operator-scripts podName:ccf99999-f746-48ce-a387-642245ef0fd5 nodeName:}" failed. No retries permitted until 2026-01-20 17:37:47.798338068 +0000 UTC m=+3361.558676035 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/ccf99999-f746-48ce-a387-642245ef0fd5-operator-scripts") pod "keystone-2d06-account-create-update-2wqtq" (UID: "ccf99999-f746-48ce-a387-642245ef0fd5") : configmap "openstack-scripts" not found Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.798386 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-77z9r\" (UniqueName: \"kubernetes.io/projected/79a4b689-d81b-44da-8baa-88fc6ce78172-kube-api-access-77z9r\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.798402 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/79a4b689-d81b-44da-8baa-88fc6ce78172-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.798412 4558 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1addf16e-1af6-431b-aff1-05c48a952f5a-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.798422 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7ee90982-23a3-4111-9c4e-159828a51a69-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.798433 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/79a4b689-d81b-44da-8baa-88fc6ce78172-config-data-default\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.798442 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ee90982-23a3-4111-9c4e-159828a51a69-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.798452 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/79a4b689-d81b-44da-8baa-88fc6ce78172-config-data-generated\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.798461 4558 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1addf16e-1af6-431b-aff1-05c48a952f5a-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.798471 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/79a4b689-d81b-44da-8baa-88fc6ce78172-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.798479 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1addf16e-1af6-431b-aff1-05c48a952f5a-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.798500 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" " Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.798509 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7ee90982-23a3-4111-9c4e-159828a51a69-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.798517 4558 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/79a4b689-d81b-44da-8baa-88fc6ce78172-kolla-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.798528 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4dq7s\" (UniqueName: \"kubernetes.io/projected/7ee90982-23a3-4111-9c4e-159828a51a69-kube-api-access-4dq7s\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.798536 4558 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/1addf16e-1af6-431b-aff1-05c48a952f5a-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.798544 4558 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/7ee90982-23a3-4111-9c4e-159828a51a69-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.798552 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pnzfr\" (UniqueName: \"kubernetes.io/projected/1addf16e-1af6-431b-aff1-05c48a952f5a-kube-api-access-pnzfr\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:45 crc kubenswrapper[4558]: E0120 17:37:45.801402 4558 projected.go:194] Error preparing data for projected volume kube-api-access-8rnxx for pod openstack-kuttl-tests/keystone-2d06-account-create-update-2wqtq: failed to fetch token: serviceaccounts "galera-openstack" not found Jan 20 17:37:45 crc kubenswrapper[4558]: E0120 17:37:45.801466 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/ccf99999-f746-48ce-a387-642245ef0fd5-kube-api-access-8rnxx podName:ccf99999-f746-48ce-a387-642245ef0fd5 nodeName:}" failed. No retries permitted until 2026-01-20 17:37:47.801446132 +0000 UTC m=+3361.561784109 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-8rnxx" (UniqueName: "kubernetes.io/projected/ccf99999-f746-48ce-a387-642245ef0fd5-kube-api-access-8rnxx") pod "keystone-2d06-account-create-update-2wqtq" (UID: "ccf99999-f746-48ce-a387-642245ef0fd5") : failed to fetch token: serviceaccounts "galera-openstack" not found Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.802498 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/79a4b689-d81b-44da-8baa-88fc6ce78172-galera-tls-certs" (OuterVolumeSpecName: "galera-tls-certs") pod "79a4b689-d81b-44da-8baa-88fc6ce78172" (UID: "79a4b689-d81b-44da-8baa-88fc6ce78172"). InnerVolumeSpecName "galera-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.841046 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1addf16e-1af6-431b-aff1-05c48a952f5a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1addf16e-1af6-431b-aff1-05c48a952f5a" (UID: "1addf16e-1af6-431b-aff1-05c48a952f5a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.843950 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage02-crc" (UniqueName: "kubernetes.io/local-volume/local-storage02-crc") on node "crc" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.864576 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1addf16e-1af6-431b-aff1-05c48a952f5a-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "1addf16e-1af6-431b-aff1-05c48a952f5a" (UID: "1addf16e-1af6-431b-aff1-05c48a952f5a"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.886093 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1addf16e-1af6-431b-aff1-05c48a952f5a-config-data" (OuterVolumeSpecName: "config-data") pod "1addf16e-1af6-431b-aff1-05c48a952f5a" (UID: "1addf16e-1af6-431b-aff1-05c48a952f5a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.899775 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0def1343-9e4b-4f74-84bd-3212688b59ce-config-data\") pod \"0def1343-9e4b-4f74-84bd-3212688b59ce\" (UID: \"0def1343-9e4b-4f74-84bd-3212688b59ce\") " Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.900028 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0def1343-9e4b-4f74-84bd-3212688b59ce-combined-ca-bundle\") pod \"0def1343-9e4b-4f74-84bd-3212688b59ce\" (UID: \"0def1343-9e4b-4f74-84bd-3212688b59ce\") " Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.900076 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hcjxf\" (UniqueName: \"kubernetes.io/projected/0def1343-9e4b-4f74-84bd-3212688b59ce-kube-api-access-hcjxf\") pod \"0def1343-9e4b-4f74-84bd-3212688b59ce\" (UID: \"0def1343-9e4b-4f74-84bd-3212688b59ce\") " Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.900785 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.900808 4558 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/1addf16e-1af6-431b-aff1-05c48a952f5a-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.900821 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1addf16e-1af6-431b-aff1-05c48a952f5a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.900831 4558 reconciler_common.go:293] "Volume detached for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/79a4b689-d81b-44da-8baa-88fc6ce78172-galera-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.900840 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1addf16e-1af6-431b-aff1-05c48a952f5a-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.903646 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0def1343-9e4b-4f74-84bd-3212688b59ce-kube-api-access-hcjxf" (OuterVolumeSpecName: "kube-api-access-hcjxf") pod "0def1343-9e4b-4f74-84bd-3212688b59ce" (UID: "0def1343-9e4b-4f74-84bd-3212688b59ce"). InnerVolumeSpecName "kube-api-access-hcjxf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.920062 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0def1343-9e4b-4f74-84bd-3212688b59ce-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0def1343-9e4b-4f74-84bd-3212688b59ce" (UID: "0def1343-9e4b-4f74-84bd-3212688b59ce"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.920223 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0def1343-9e4b-4f74-84bd-3212688b59ce-config-data" (OuterVolumeSpecName: "config-data") pod "0def1343-9e4b-4f74-84bd-3212688b59ce" (UID: "0def1343-9e4b-4f74-84bd-3212688b59ce"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:37:45 crc kubenswrapper[4558]: I0120 17:37:45.982955 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-64bd4857c6-2s9h4" Jan 20 17:37:46 crc kubenswrapper[4558]: I0120 17:37:46.006363 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0def1343-9e4b-4f74-84bd-3212688b59ce-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:46 crc kubenswrapper[4558]: I0120 17:37:46.006392 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0def1343-9e4b-4f74-84bd-3212688b59ce-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:46 crc kubenswrapper[4558]: I0120 17:37:46.006408 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hcjxf\" (UniqueName: \"kubernetes.io/projected/0def1343-9e4b-4f74-84bd-3212688b59ce-kube-api-access-hcjxf\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:46 crc kubenswrapper[4558]: I0120 17:37:46.059595 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-55dfc8964d-kck7c" event={"ID":"6b77441a-39f5-4ed5-bf1b-c29900900242","Type":"ContainerDied","Data":"a3c5ec3a558b306e30b65b8420b0b19bc99a95aeffaa62621dadeb4185b84438"} Jan 20 17:37:46 crc kubenswrapper[4558]: I0120 17:37:46.060003 4558 scope.go:117] "RemoveContainer" containerID="bbf204899e737785b34e43ac3dcf8c1621d7b4c38d2b11ab4f044288ae0cdd0a" Jan 20 17:37:46 crc kubenswrapper[4558]: I0120 17:37:46.059716 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-55dfc8964d-kck7c" Jan 20 17:37:46 crc kubenswrapper[4558]: I0120 17:37:46.065686 4558 generic.go:334] "Generic (PLEG): container finished" podID="94062d42-28cb-4c8a-afa4-f51458dedc6c" containerID="1aa46d5cbab3879dbc6e24ca32547b207cb8e11bc47b9d499882d8000bc7b577" exitCode=0 Jan 20 17:37:46 crc kubenswrapper[4558]: I0120 17:37:46.065746 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-64bd4857c6-2s9h4" Jan 20 17:37:46 crc kubenswrapper[4558]: I0120 17:37:46.065777 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-64bd4857c6-2s9h4" event={"ID":"94062d42-28cb-4c8a-afa4-f51458dedc6c","Type":"ContainerDied","Data":"1aa46d5cbab3879dbc6e24ca32547b207cb8e11bc47b9d499882d8000bc7b577"} Jan 20 17:37:46 crc kubenswrapper[4558]: I0120 17:37:46.065815 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-64bd4857c6-2s9h4" event={"ID":"94062d42-28cb-4c8a-afa4-f51458dedc6c","Type":"ContainerDied","Data":"59c5e37b9c318b38142437caa43ecc8cb25b5a45951476dadbd6eb66682057b8"} Jan 20 17:37:46 crc kubenswrapper[4558]: I0120 17:37:46.072810 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-galera-0" event={"ID":"79a4b689-d81b-44da-8baa-88fc6ce78172","Type":"ContainerDied","Data":"dc1f5d11078340aab4c21ec4674412ae459987ad6b30aacb6430d494ae31cb3e"} Jan 20 17:37:46 crc kubenswrapper[4558]: I0120 17:37:46.072825 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:37:46 crc kubenswrapper[4558]: I0120 17:37:46.082974 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovn-northd-0_c570a7ea-d296-44bc-a48f-6dd8be9754d0/ovn-northd/0.log" Jan 20 17:37:46 crc kubenswrapper[4558]: I0120 17:37:46.083036 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-northd-0" event={"ID":"c570a7ea-d296-44bc-a48f-6dd8be9754d0","Type":"ContainerDied","Data":"9eef7e7d47c2973d90654e404fc2609985341dcd776220debaa62a81de5588aa"} Jan 20 17:37:46 crc kubenswrapper[4558]: I0120 17:37:46.083118 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:37:46 crc kubenswrapper[4558]: I0120 17:37:46.087557 4558 generic.go:334] "Generic (PLEG): container finished" podID="7ee90982-23a3-4111-9c4e-159828a51a69" containerID="3bb4afac6c286ba59753565000e90ace9365499f3dff7e44738101c6faf5cc39" exitCode=0 Jan 20 17:37:46 crc kubenswrapper[4558]: I0120 17:37:46.087632 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"7ee90982-23a3-4111-9c4e-159828a51a69","Type":"ContainerDied","Data":"3bb4afac6c286ba59753565000e90ace9365499f3dff7e44738101c6faf5cc39"} Jan 20 17:37:46 crc kubenswrapper[4558]: I0120 17:37:46.087656 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"7ee90982-23a3-4111-9c4e-159828a51a69","Type":"ContainerDied","Data":"4af9a0c1b7692f786bcc6daedbc066a18d8652934d6ca178b1e24a0324714a28"} Jan 20 17:37:46 crc kubenswrapper[4558]: I0120 17:37:46.087680 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:37:46 crc kubenswrapper[4558]: I0120 17:37:46.093048 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"f7881580-f0a5-47a0-9622-3927bcce668a","Type":"ContainerDied","Data":"d6d4df9057e443dc8c92a45823eeab8e542aa20fffd7d458a84e248b6b67fa96"} Jan 20 17:37:46 crc kubenswrapper[4558]: I0120 17:37:46.093275 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:37:46 crc kubenswrapper[4558]: I0120 17:37:46.100957 4558 scope.go:117] "RemoveContainer" containerID="0eec862e72c84cd059688cd69902bbff28efcdcf452151cfde7339967b5160f1" Jan 20 17:37:46 crc kubenswrapper[4558]: I0120 17:37:46.101220 4558 generic.go:334] "Generic (PLEG): container finished" podID="0def1343-9e4b-4f74-84bd-3212688b59ce" containerID="1c79880b61b7b00d3acd244cc4b46331c0b20f7998564a054c816dfa317eab5b" exitCode=0 Jan 20 17:37:46 crc kubenswrapper[4558]: I0120 17:37:46.101249 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-0" event={"ID":"0def1343-9e4b-4f74-84bd-3212688b59ce","Type":"ContainerDied","Data":"1c79880b61b7b00d3acd244cc4b46331c0b20f7998564a054c816dfa317eab5b"} Jan 20 17:37:46 crc kubenswrapper[4558]: I0120 17:37:46.101822 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-0" event={"ID":"0def1343-9e4b-4f74-84bd-3212688b59ce","Type":"ContainerDied","Data":"9ce12d640a1a49360840fb100db33b1424f611fbbd9760cea4e53ff395f04875"} Jan 20 17:37:46 crc kubenswrapper[4558]: I0120 17:37:46.101295 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:37:46 crc kubenswrapper[4558]: I0120 17:37:46.104856 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-2d06-account-create-update-2wqtq" Jan 20 17:37:46 crc kubenswrapper[4558]: I0120 17:37:46.108959 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"1addf16e-1af6-431b-aff1-05c48a952f5a","Type":"ContainerDied","Data":"f902a2ceba04dbde9c216501a45d205187ae4274458bc1599d976afd3b962475"} Jan 20 17:37:46 crc kubenswrapper[4558]: I0120 17:37:46.109099 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:37:46 crc kubenswrapper[4558]: I0120 17:37:46.110157 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/94062d42-28cb-4c8a-afa4-f51458dedc6c-public-tls-certs\") pod \"94062d42-28cb-4c8a-afa4-f51458dedc6c\" (UID: \"94062d42-28cb-4c8a-afa4-f51458dedc6c\") " Jan 20 17:37:46 crc kubenswrapper[4558]: I0120 17:37:46.110916 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/94062d42-28cb-4c8a-afa4-f51458dedc6c-logs\") pod \"94062d42-28cb-4c8a-afa4-f51458dedc6c\" (UID: \"94062d42-28cb-4c8a-afa4-f51458dedc6c\") " Jan 20 17:37:46 crc kubenswrapper[4558]: I0120 17:37:46.111025 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/94062d42-28cb-4c8a-afa4-f51458dedc6c-config-data-custom\") pod \"94062d42-28cb-4c8a-afa4-f51458dedc6c\" (UID: \"94062d42-28cb-4c8a-afa4-f51458dedc6c\") " Jan 20 17:37:46 crc kubenswrapper[4558]: I0120 17:37:46.111116 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x6ksk\" (UniqueName: \"kubernetes.io/projected/94062d42-28cb-4c8a-afa4-f51458dedc6c-kube-api-access-x6ksk\") pod \"94062d42-28cb-4c8a-afa4-f51458dedc6c\" (UID: \"94062d42-28cb-4c8a-afa4-f51458dedc6c\") " Jan 20 17:37:46 crc kubenswrapper[4558]: I0120 17:37:46.111231 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/94062d42-28cb-4c8a-afa4-f51458dedc6c-internal-tls-certs\") pod \"94062d42-28cb-4c8a-afa4-f51458dedc6c\" (UID: \"94062d42-28cb-4c8a-afa4-f51458dedc6c\") " Jan 20 17:37:46 crc kubenswrapper[4558]: I0120 17:37:46.111267 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/94062d42-28cb-4c8a-afa4-f51458dedc6c-config-data\") pod \"94062d42-28cb-4c8a-afa4-f51458dedc6c\" (UID: \"94062d42-28cb-4c8a-afa4-f51458dedc6c\") " Jan 20 17:37:46 crc kubenswrapper[4558]: I0120 17:37:46.111405 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94062d42-28cb-4c8a-afa4-f51458dedc6c-combined-ca-bundle\") pod \"94062d42-28cb-4c8a-afa4-f51458dedc6c\" (UID: \"94062d42-28cb-4c8a-afa4-f51458dedc6c\") " Jan 20 17:37:46 crc kubenswrapper[4558]: I0120 17:37:46.111415 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/placement-55dfc8964d-kck7c"] Jan 20 17:37:46 crc kubenswrapper[4558]: I0120 17:37:46.112497 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/94062d42-28cb-4c8a-afa4-f51458dedc6c-logs" (OuterVolumeSpecName: "logs") pod "94062d42-28cb-4c8a-afa4-f51458dedc6c" (UID: "94062d42-28cb-4c8a-afa4-f51458dedc6c"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:37:46 crc kubenswrapper[4558]: I0120 17:37:46.149150 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/94062d42-28cb-4c8a-afa4-f51458dedc6c-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "94062d42-28cb-4c8a-afa4-f51458dedc6c" (UID: "94062d42-28cb-4c8a-afa4-f51458dedc6c"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:37:46 crc kubenswrapper[4558]: I0120 17:37:46.155289 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/94062d42-28cb-4c8a-afa4-f51458dedc6c-kube-api-access-x6ksk" (OuterVolumeSpecName: "kube-api-access-x6ksk") pod "94062d42-28cb-4c8a-afa4-f51458dedc6c" (UID: "94062d42-28cb-4c8a-afa4-f51458dedc6c"). InnerVolumeSpecName "kube-api-access-x6ksk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:37:46 crc kubenswrapper[4558]: I0120 17:37:46.160655 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/94062d42-28cb-4c8a-afa4-f51458dedc6c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "94062d42-28cb-4c8a-afa4-f51458dedc6c" (UID: "94062d42-28cb-4c8a-afa4-f51458dedc6c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:37:46 crc kubenswrapper[4558]: I0120 17:37:46.166808 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/placement-55dfc8964d-kck7c"] Jan 20 17:37:46 crc kubenswrapper[4558]: I0120 17:37:46.170584 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/94062d42-28cb-4c8a-afa4-f51458dedc6c-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "94062d42-28cb-4c8a-afa4-f51458dedc6c" (UID: "94062d42-28cb-4c8a-afa4-f51458dedc6c"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:37:46 crc kubenswrapper[4558]: I0120 17:37:46.183323 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/94062d42-28cb-4c8a-afa4-f51458dedc6c-config-data" (OuterVolumeSpecName: "config-data") pod "94062d42-28cb-4c8a-afa4-f51458dedc6c" (UID: "94062d42-28cb-4c8a-afa4-f51458dedc6c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:37:46 crc kubenswrapper[4558]: I0120 17:37:46.184747 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/94062d42-28cb-4c8a-afa4-f51458dedc6c-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "94062d42-28cb-4c8a-afa4-f51458dedc6c" (UID: "94062d42-28cb-4c8a-afa4-f51458dedc6c"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:37:46 crc kubenswrapper[4558]: I0120 17:37:46.213854 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/94062d42-28cb-4c8a-afa4-f51458dedc6c-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:46 crc kubenswrapper[4558]: I0120 17:37:46.216194 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/94062d42-28cb-4c8a-afa4-f51458dedc6c-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:46 crc kubenswrapper[4558]: I0120 17:37:46.216223 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x6ksk\" (UniqueName: \"kubernetes.io/projected/94062d42-28cb-4c8a-afa4-f51458dedc6c-kube-api-access-x6ksk\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:46 crc kubenswrapper[4558]: I0120 17:37:46.216237 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/94062d42-28cb-4c8a-afa4-f51458dedc6c-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:46 crc kubenswrapper[4558]: I0120 17:37:46.216249 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/94062d42-28cb-4c8a-afa4-f51458dedc6c-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:46 crc kubenswrapper[4558]: I0120 17:37:46.216260 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94062d42-28cb-4c8a-afa4-f51458dedc6c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:46 crc kubenswrapper[4558]: I0120 17:37:46.216271 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/94062d42-28cb-4c8a-afa4-f51458dedc6c-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:46 crc kubenswrapper[4558]: I0120 17:37:46.261777 4558 scope.go:117] "RemoveContainer" containerID="1aa46d5cbab3879dbc6e24ca32547b207cb8e11bc47b9d499882d8000bc7b577" Jan 20 17:37:46 crc kubenswrapper[4558]: I0120 17:37:46.294230 4558 scope.go:117] "RemoveContainer" containerID="bd3938888a8a3a7fda5215bb19bd64fb769bd0151d4bb989be309f5b70b3ad7c" Jan 20 17:37:46 crc kubenswrapper[4558]: I0120 17:37:46.296288 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovn-northd-0"] Jan 20 17:37:46 crc kubenswrapper[4558]: I0120 17:37:46.304714 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ovn-northd-0"] Jan 20 17:37:46 crc kubenswrapper[4558]: I0120 17:37:46.311218 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:37:46 crc kubenswrapper[4558]: I0120 17:37:46.315014 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:37:46 crc kubenswrapper[4558]: I0120 17:37:46.320683 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:37:46 crc kubenswrapper[4558]: I0120 17:37:46.322220 4558 scope.go:117] "RemoveContainer" containerID="1aa46d5cbab3879dbc6e24ca32547b207cb8e11bc47b9d499882d8000bc7b577" Jan 20 17:37:46 crc kubenswrapper[4558]: E0120 17:37:46.324393 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1aa46d5cbab3879dbc6e24ca32547b207cb8e11bc47b9d499882d8000bc7b577\": container with ID starting with 1aa46d5cbab3879dbc6e24ca32547b207cb8e11bc47b9d499882d8000bc7b577 not found: ID does not exist" containerID="1aa46d5cbab3879dbc6e24ca32547b207cb8e11bc47b9d499882d8000bc7b577" Jan 20 17:37:46 crc kubenswrapper[4558]: I0120 17:37:46.324447 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1aa46d5cbab3879dbc6e24ca32547b207cb8e11bc47b9d499882d8000bc7b577"} err="failed to get container status \"1aa46d5cbab3879dbc6e24ca32547b207cb8e11bc47b9d499882d8000bc7b577\": rpc error: code = NotFound desc = could not find container \"1aa46d5cbab3879dbc6e24ca32547b207cb8e11bc47b9d499882d8000bc7b577\": container with ID starting with 1aa46d5cbab3879dbc6e24ca32547b207cb8e11bc47b9d499882d8000bc7b577 not found: ID does not exist" Jan 20 17:37:46 crc kubenswrapper[4558]: I0120 17:37:46.324471 4558 scope.go:117] "RemoveContainer" containerID="bd3938888a8a3a7fda5215bb19bd64fb769bd0151d4bb989be309f5b70b3ad7c" Jan 20 17:37:46 crc kubenswrapper[4558]: I0120 17:37:46.324829 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:37:46 crc kubenswrapper[4558]: E0120 17:37:46.325138 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bd3938888a8a3a7fda5215bb19bd64fb769bd0151d4bb989be309f5b70b3ad7c\": container with ID starting with bd3938888a8a3a7fda5215bb19bd64fb769bd0151d4bb989be309f5b70b3ad7c not found: ID does not exist" containerID="bd3938888a8a3a7fda5215bb19bd64fb769bd0151d4bb989be309f5b70b3ad7c" Jan 20 17:37:46 crc kubenswrapper[4558]: I0120 17:37:46.325187 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bd3938888a8a3a7fda5215bb19bd64fb769bd0151d4bb989be309f5b70b3ad7c"} err="failed to get container status \"bd3938888a8a3a7fda5215bb19bd64fb769bd0151d4bb989be309f5b70b3ad7c\": rpc error: code = NotFound desc = could not find container \"bd3938888a8a3a7fda5215bb19bd64fb769bd0151d4bb989be309f5b70b3ad7c\": container with ID starting with bd3938888a8a3a7fda5215bb19bd64fb769bd0151d4bb989be309f5b70b3ad7c not found: ID does not exist" Jan 20 17:37:46 crc kubenswrapper[4558]: I0120 17:37:46.325214 4558 scope.go:117] "RemoveContainer" containerID="4937e3357efe6ba2fe8b94eb2c378f889e567ce6926af22853306ab11522be96" Jan 20 17:37:46 crc kubenswrapper[4558]: I0120 17:37:46.329365 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:37:46 crc kubenswrapper[4558]: I0120 17:37:46.333219 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:37:46 crc kubenswrapper[4558]: I0120 17:37:46.356663 4558 scope.go:117] "RemoveContainer" containerID="d2e273ef1d32ff90a2aef248fb8b2c6529baf7e88fc616e72b397d1ef78a7d62" Jan 20 17:37:46 crc kubenswrapper[4558]: I0120 17:37:46.357448 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-0"] Jan 20 17:37:46 crc kubenswrapper[4558]: I0120 17:37:46.375925 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-0"] Jan 20 17:37:46 crc kubenswrapper[4558]: I0120 17:37:46.381130 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/openstack-galera-0"] Jan 20 17:37:46 crc kubenswrapper[4558]: I0120 17:37:46.385038 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/openstack-galera-0"] Jan 20 17:37:46 crc kubenswrapper[4558]: I0120 17:37:46.396687 4558 scope.go:117] "RemoveContainer" containerID="80fe464dc7cd617b6ebc3164ada8f61f3bec4caca84b82392b77289a475163d7" Jan 20 17:37:46 crc kubenswrapper[4558]: I0120 17:37:46.421767 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-2d06-account-create-update-2wqtq"] Jan 20 17:37:46 crc kubenswrapper[4558]: I0120 17:37:46.430851 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/keystone-2d06-account-create-update-2wqtq"] Jan 20 17:37:46 crc kubenswrapper[4558]: I0120 17:37:46.432012 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-api-64bd4857c6-2s9h4"] Jan 20 17:37:46 crc kubenswrapper[4558]: I0120 17:37:46.435523 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/barbican-api-64bd4857c6-2s9h4"] Jan 20 17:37:46 crc kubenswrapper[4558]: I0120 17:37:46.441435 4558 scope.go:117] "RemoveContainer" containerID="1e1ade780111891ab36a10d37a0917edea1f34135b19b61c709e80ef7ae68b98" Jan 20 17:37:46 crc kubenswrapper[4558]: I0120 17:37:46.466327 4558 scope.go:117] "RemoveContainer" containerID="3bb4afac6c286ba59753565000e90ace9365499f3dff7e44738101c6faf5cc39" Jan 20 17:37:46 crc kubenswrapper[4558]: I0120 17:37:46.485051 4558 scope.go:117] "RemoveContainer" containerID="a9b864e0c7723915ae39749b96750b7b8695818471060aad197d75c66a8cc364" Jan 20 17:37:46 crc kubenswrapper[4558]: I0120 17:37:46.517129 4558 scope.go:117] "RemoveContainer" containerID="3bb4afac6c286ba59753565000e90ace9365499f3dff7e44738101c6faf5cc39" Jan 20 17:37:46 crc kubenswrapper[4558]: E0120 17:37:46.518219 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3bb4afac6c286ba59753565000e90ace9365499f3dff7e44738101c6faf5cc39\": container with ID starting with 3bb4afac6c286ba59753565000e90ace9365499f3dff7e44738101c6faf5cc39 not found: ID does not exist" containerID="3bb4afac6c286ba59753565000e90ace9365499f3dff7e44738101c6faf5cc39" Jan 20 17:37:46 crc kubenswrapper[4558]: I0120 17:37:46.518259 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3bb4afac6c286ba59753565000e90ace9365499f3dff7e44738101c6faf5cc39"} err="failed to get container status \"3bb4afac6c286ba59753565000e90ace9365499f3dff7e44738101c6faf5cc39\": rpc error: code = NotFound desc = could not find container \"3bb4afac6c286ba59753565000e90ace9365499f3dff7e44738101c6faf5cc39\": container with ID starting with 3bb4afac6c286ba59753565000e90ace9365499f3dff7e44738101c6faf5cc39 not found: ID does not exist" Jan 20 17:37:46 crc kubenswrapper[4558]: I0120 17:37:46.518310 4558 scope.go:117] "RemoveContainer" containerID="a9b864e0c7723915ae39749b96750b7b8695818471060aad197d75c66a8cc364" Jan 20 17:37:46 crc kubenswrapper[4558]: E0120 17:37:46.518734 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a9b864e0c7723915ae39749b96750b7b8695818471060aad197d75c66a8cc364\": container with ID starting with a9b864e0c7723915ae39749b96750b7b8695818471060aad197d75c66a8cc364 not found: ID does not exist" containerID="a9b864e0c7723915ae39749b96750b7b8695818471060aad197d75c66a8cc364" Jan 20 17:37:46 crc kubenswrapper[4558]: I0120 17:37:46.518782 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a9b864e0c7723915ae39749b96750b7b8695818471060aad197d75c66a8cc364"} err="failed to get container status \"a9b864e0c7723915ae39749b96750b7b8695818471060aad197d75c66a8cc364\": rpc error: code = NotFound desc = could not find container \"a9b864e0c7723915ae39749b96750b7b8695818471060aad197d75c66a8cc364\": container with ID starting with a9b864e0c7723915ae39749b96750b7b8695818471060aad197d75c66a8cc364 not found: ID does not exist" Jan 20 17:37:46 crc kubenswrapper[4558]: I0120 17:37:46.518804 4558 scope.go:117] "RemoveContainer" containerID="7afb4e77f803740421952c0dbe28e7f78cf5d2d3ff6b921fafb8facbbd98325f" Jan 20 17:37:46 crc kubenswrapper[4558]: I0120 17:37:46.522747 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ccf99999-f746-48ce-a387-642245ef0fd5-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:46 crc kubenswrapper[4558]: I0120 17:37:46.522778 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8rnxx\" (UniqueName: \"kubernetes.io/projected/ccf99999-f746-48ce-a387-642245ef0fd5-kube-api-access-8rnxx\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:46 crc kubenswrapper[4558]: I0120 17:37:46.551138 4558 scope.go:117] "RemoveContainer" containerID="97f89fd69e51892f2e484fb54c15bb385da2a687f24a01aa8d3d2e7c65114191" Jan 20 17:37:46 crc kubenswrapper[4558]: I0120 17:37:46.615308 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0def1343-9e4b-4f74-84bd-3212688b59ce" path="/var/lib/kubelet/pods/0def1343-9e4b-4f74-84bd-3212688b59ce/volumes" Jan 20 17:37:46 crc kubenswrapper[4558]: I0120 17:37:46.617619 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0f5a8541-5b86-4fff-8f94-18b8b5ee3ebf" path="/var/lib/kubelet/pods/0f5a8541-5b86-4fff-8f94-18b8b5ee3ebf/volumes" Jan 20 17:37:46 crc kubenswrapper[4558]: I0120 17:37:46.618845 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1addf16e-1af6-431b-aff1-05c48a952f5a" path="/var/lib/kubelet/pods/1addf16e-1af6-431b-aff1-05c48a952f5a/volumes" Jan 20 17:37:46 crc kubenswrapper[4558]: I0120 17:37:46.623880 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1fff3747-796d-420a-aff0-846dfc615df9" path="/var/lib/kubelet/pods/1fff3747-796d-420a-aff0-846dfc615df9/volumes" Jan 20 17:37:46 crc kubenswrapper[4558]: I0120 17:37:46.626428 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="52895fa4-c08b-43b8-9f63-cbc8621db11e" path="/var/lib/kubelet/pods/52895fa4-c08b-43b8-9f63-cbc8621db11e/volumes" Jan 20 17:37:46 crc kubenswrapper[4558]: I0120 17:37:46.627557 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6b77441a-39f5-4ed5-bf1b-c29900900242" path="/var/lib/kubelet/pods/6b77441a-39f5-4ed5-bf1b-c29900900242/volumes" Jan 20 17:37:46 crc kubenswrapper[4558]: I0120 17:37:46.628507 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="79a4b689-d81b-44da-8baa-88fc6ce78172" path="/var/lib/kubelet/pods/79a4b689-d81b-44da-8baa-88fc6ce78172/volumes" Jan 20 17:37:46 crc kubenswrapper[4558]: I0120 17:37:46.629681 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7ca40caa-67e9-4be0-8697-cf15086736bc" path="/var/lib/kubelet/pods/7ca40caa-67e9-4be0-8697-cf15086736bc/volumes" Jan 20 17:37:46 crc kubenswrapper[4558]: I0120 17:37:46.630096 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7ee90982-23a3-4111-9c4e-159828a51a69" path="/var/lib/kubelet/pods/7ee90982-23a3-4111-9c4e-159828a51a69/volumes" Jan 20 17:37:46 crc kubenswrapper[4558]: I0120 17:37:46.630734 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="94062d42-28cb-4c8a-afa4-f51458dedc6c" path="/var/lib/kubelet/pods/94062d42-28cb-4c8a-afa4-f51458dedc6c/volumes" Jan 20 17:37:46 crc kubenswrapper[4558]: I0120 17:37:46.632010 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c570a7ea-d296-44bc-a48f-6dd8be9754d0" path="/var/lib/kubelet/pods/c570a7ea-d296-44bc-a48f-6dd8be9754d0/volumes" Jan 20 17:37:46 crc kubenswrapper[4558]: I0120 17:37:46.632839 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c7ffc3d3-5db6-484c-b9c1-d065e7177eb7" path="/var/lib/kubelet/pods/c7ffc3d3-5db6-484c-b9c1-d065e7177eb7/volumes" Jan 20 17:37:46 crc kubenswrapper[4558]: I0120 17:37:46.633769 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ccf99999-f746-48ce-a387-642245ef0fd5" path="/var/lib/kubelet/pods/ccf99999-f746-48ce-a387-642245ef0fd5/volumes" Jan 20 17:37:46 crc kubenswrapper[4558]: I0120 17:37:46.634703 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cdf89a68-b15b-4f2d-9016-55fb28011197" path="/var/lib/kubelet/pods/cdf89a68-b15b-4f2d-9016-55fb28011197/volumes" Jan 20 17:37:46 crc kubenswrapper[4558]: I0120 17:37:46.635188 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e1a1ac97-a530-4ba0-8c9b-835a2b576c8d" path="/var/lib/kubelet/pods/e1a1ac97-a530-4ba0-8c9b-835a2b576c8d/volumes" Jan 20 17:37:46 crc kubenswrapper[4558]: I0120 17:37:46.636007 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f19d5a0b-6d74-4b46-86a3-9381feb3f158" path="/var/lib/kubelet/pods/f19d5a0b-6d74-4b46-86a3-9381feb3f158/volumes" Jan 20 17:37:46 crc kubenswrapper[4558]: I0120 17:37:46.637096 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f7881580-f0a5-47a0-9622-3927bcce668a" path="/var/lib/kubelet/pods/f7881580-f0a5-47a0-9622-3927bcce668a/volumes" Jan 20 17:37:46 crc kubenswrapper[4558]: I0120 17:37:46.670940 4558 scope.go:117] "RemoveContainer" containerID="1c79880b61b7b00d3acd244cc4b46331c0b20f7998564a054c816dfa317eab5b" Jan 20 17:37:46 crc kubenswrapper[4558]: I0120 17:37:46.689543 4558 scope.go:117] "RemoveContainer" containerID="4777bd55f602e6282b42a942acc808f0478764354d0970617b2e6c1dddd4da85" Jan 20 17:37:46 crc kubenswrapper[4558]: I0120 17:37:46.695213 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" podUID="0c6a6265-cf90-4039-9200-ba478d612baa" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.1.224:5671: connect: connection refused" Jan 20 17:37:46 crc kubenswrapper[4558]: I0120 17:37:46.745254 4558 scope.go:117] "RemoveContainer" containerID="1c79880b61b7b00d3acd244cc4b46331c0b20f7998564a054c816dfa317eab5b" Jan 20 17:37:46 crc kubenswrapper[4558]: E0120 17:37:46.745860 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1c79880b61b7b00d3acd244cc4b46331c0b20f7998564a054c816dfa317eab5b\": container with ID starting with 1c79880b61b7b00d3acd244cc4b46331c0b20f7998564a054c816dfa317eab5b not found: ID does not exist" containerID="1c79880b61b7b00d3acd244cc4b46331c0b20f7998564a054c816dfa317eab5b" Jan 20 17:37:46 crc kubenswrapper[4558]: I0120 17:37:46.745912 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1c79880b61b7b00d3acd244cc4b46331c0b20f7998564a054c816dfa317eab5b"} err="failed to get container status \"1c79880b61b7b00d3acd244cc4b46331c0b20f7998564a054c816dfa317eab5b\": rpc error: code = NotFound desc = could not find container \"1c79880b61b7b00d3acd244cc4b46331c0b20f7998564a054c816dfa317eab5b\": container with ID starting with 1c79880b61b7b00d3acd244cc4b46331c0b20f7998564a054c816dfa317eab5b not found: ID does not exist" Jan 20 17:37:46 crc kubenswrapper[4558]: I0120 17:37:46.745941 4558 scope.go:117] "RemoveContainer" containerID="4777bd55f602e6282b42a942acc808f0478764354d0970617b2e6c1dddd4da85" Jan 20 17:37:46 crc kubenswrapper[4558]: E0120 17:37:46.746318 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4777bd55f602e6282b42a942acc808f0478764354d0970617b2e6c1dddd4da85\": container with ID starting with 4777bd55f602e6282b42a942acc808f0478764354d0970617b2e6c1dddd4da85 not found: ID does not exist" containerID="4777bd55f602e6282b42a942acc808f0478764354d0970617b2e6c1dddd4da85" Jan 20 17:37:46 crc kubenswrapper[4558]: I0120 17:37:46.746340 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4777bd55f602e6282b42a942acc808f0478764354d0970617b2e6c1dddd4da85"} err="failed to get container status \"4777bd55f602e6282b42a942acc808f0478764354d0970617b2e6c1dddd4da85\": rpc error: code = NotFound desc = could not find container \"4777bd55f602e6282b42a942acc808f0478764354d0970617b2e6c1dddd4da85\": container with ID starting with 4777bd55f602e6282b42a942acc808f0478764354d0970617b2e6c1dddd4da85 not found: ID does not exist" Jan 20 17:37:46 crc kubenswrapper[4558]: I0120 17:37:46.746353 4558 scope.go:117] "RemoveContainer" containerID="d7f94ffeca974ded0f05a2d4e0bd7bdbcb45172d758621d01d81132148b96b8b" Jan 20 17:37:46 crc kubenswrapper[4558]: I0120 17:37:46.770302 4558 scope.go:117] "RemoveContainer" containerID="9c1da6a399e8373ade66666f30fdfad18e61c1a4529147d6a10d0385c82a426f" Jan 20 17:37:46 crc kubenswrapper[4558]: I0120 17:37:46.799595 4558 scope.go:117] "RemoveContainer" containerID="f49f1bac498f7d02524aaee31c0af982140e123e147ff77674e41803999fb5b9" Jan 20 17:37:46 crc kubenswrapper[4558]: I0120 17:37:46.839741 4558 scope.go:117] "RemoveContainer" containerID="0fde5ea8da11b84a29f2a0ac302c4c48b6a41c64a33f837f39b3bbaaf4487feb" Jan 20 17:37:46 crc kubenswrapper[4558]: I0120 17:37:46.944257 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:37:47 crc kubenswrapper[4558]: I0120 17:37:47.032330 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/906beedc-3fa2-4d6e-a6e8-485ca2fb1082-config-data\") pod \"906beedc-3fa2-4d6e-a6e8-485ca2fb1082\" (UID: \"906beedc-3fa2-4d6e-a6e8-485ca2fb1082\") " Jan 20 17:37:47 crc kubenswrapper[4558]: I0120 17:37:47.032451 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dzwgx\" (UniqueName: \"kubernetes.io/projected/906beedc-3fa2-4d6e-a6e8-485ca2fb1082-kube-api-access-dzwgx\") pod \"906beedc-3fa2-4d6e-a6e8-485ca2fb1082\" (UID: \"906beedc-3fa2-4d6e-a6e8-485ca2fb1082\") " Jan 20 17:37:47 crc kubenswrapper[4558]: I0120 17:37:47.032531 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/906beedc-3fa2-4d6e-a6e8-485ca2fb1082-public-tls-certs\") pod \"906beedc-3fa2-4d6e-a6e8-485ca2fb1082\" (UID: \"906beedc-3fa2-4d6e-a6e8-485ca2fb1082\") " Jan 20 17:37:47 crc kubenswrapper[4558]: I0120 17:37:47.032624 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/906beedc-3fa2-4d6e-a6e8-485ca2fb1082-logs\") pod \"906beedc-3fa2-4d6e-a6e8-485ca2fb1082\" (UID: \"906beedc-3fa2-4d6e-a6e8-485ca2fb1082\") " Jan 20 17:37:47 crc kubenswrapper[4558]: I0120 17:37:47.032781 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/906beedc-3fa2-4d6e-a6e8-485ca2fb1082-combined-ca-bundle\") pod \"906beedc-3fa2-4d6e-a6e8-485ca2fb1082\" (UID: \"906beedc-3fa2-4d6e-a6e8-485ca2fb1082\") " Jan 20 17:37:47 crc kubenswrapper[4558]: I0120 17:37:47.032848 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/906beedc-3fa2-4d6e-a6e8-485ca2fb1082-internal-tls-certs\") pod \"906beedc-3fa2-4d6e-a6e8-485ca2fb1082\" (UID: \"906beedc-3fa2-4d6e-a6e8-485ca2fb1082\") " Jan 20 17:37:47 crc kubenswrapper[4558]: I0120 17:37:47.034837 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/906beedc-3fa2-4d6e-a6e8-485ca2fb1082-logs" (OuterVolumeSpecName: "logs") pod "906beedc-3fa2-4d6e-a6e8-485ca2fb1082" (UID: "906beedc-3fa2-4d6e-a6e8-485ca2fb1082"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:37:47 crc kubenswrapper[4558]: I0120 17:37:47.039019 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/906beedc-3fa2-4d6e-a6e8-485ca2fb1082-kube-api-access-dzwgx" (OuterVolumeSpecName: "kube-api-access-dzwgx") pod "906beedc-3fa2-4d6e-a6e8-485ca2fb1082" (UID: "906beedc-3fa2-4d6e-a6e8-485ca2fb1082"). InnerVolumeSpecName "kube-api-access-dzwgx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:37:47 crc kubenswrapper[4558]: I0120 17:37:47.064963 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/906beedc-3fa2-4d6e-a6e8-485ca2fb1082-config-data" (OuterVolumeSpecName: "config-data") pod "906beedc-3fa2-4d6e-a6e8-485ca2fb1082" (UID: "906beedc-3fa2-4d6e-a6e8-485ca2fb1082"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:37:47 crc kubenswrapper[4558]: I0120 17:37:47.067044 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/906beedc-3fa2-4d6e-a6e8-485ca2fb1082-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "906beedc-3fa2-4d6e-a6e8-485ca2fb1082" (UID: "906beedc-3fa2-4d6e-a6e8-485ca2fb1082"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:37:47 crc kubenswrapper[4558]: I0120 17:37:47.077152 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/906beedc-3fa2-4d6e-a6e8-485ca2fb1082-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "906beedc-3fa2-4d6e-a6e8-485ca2fb1082" (UID: "906beedc-3fa2-4d6e-a6e8-485ca2fb1082"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:37:47 crc kubenswrapper[4558]: I0120 17:37:47.077328 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/906beedc-3fa2-4d6e-a6e8-485ca2fb1082-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "906beedc-3fa2-4d6e-a6e8-485ca2fb1082" (UID: "906beedc-3fa2-4d6e-a6e8-485ca2fb1082"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:37:47 crc kubenswrapper[4558]: I0120 17:37:47.134650 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/906beedc-3fa2-4d6e-a6e8-485ca2fb1082-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:47 crc kubenswrapper[4558]: I0120 17:37:47.135302 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/906beedc-3fa2-4d6e-a6e8-485ca2fb1082-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:47 crc kubenswrapper[4558]: I0120 17:37:47.135314 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/906beedc-3fa2-4d6e-a6e8-485ca2fb1082-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:47 crc kubenswrapper[4558]: I0120 17:37:47.135328 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dzwgx\" (UniqueName: \"kubernetes.io/projected/906beedc-3fa2-4d6e-a6e8-485ca2fb1082-kube-api-access-dzwgx\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:47 crc kubenswrapper[4558]: I0120 17:37:47.135338 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/906beedc-3fa2-4d6e-a6e8-485ca2fb1082-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:47 crc kubenswrapper[4558]: I0120 17:37:47.135349 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/906beedc-3fa2-4d6e-a6e8-485ca2fb1082-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:47 crc kubenswrapper[4558]: I0120 17:37:47.136534 4558 generic.go:334] "Generic (PLEG): container finished" podID="906beedc-3fa2-4d6e-a6e8-485ca2fb1082" containerID="48ace894e440f9c70ed8c855d176b45725c5d24b103871d2c8046cc04e75abb3" exitCode=0 Jan 20 17:37:47 crc kubenswrapper[4558]: I0120 17:37:47.136596 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"906beedc-3fa2-4d6e-a6e8-485ca2fb1082","Type":"ContainerDied","Data":"48ace894e440f9c70ed8c855d176b45725c5d24b103871d2c8046cc04e75abb3"} Jan 20 17:37:47 crc kubenswrapper[4558]: I0120 17:37:47.136624 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"906beedc-3fa2-4d6e-a6e8-485ca2fb1082","Type":"ContainerDied","Data":"bdd05f55856c8fd5a792832668c4f4ad2d1ebef99027ecad7d4990a5516ec21a"} Jan 20 17:37:47 crc kubenswrapper[4558]: I0120 17:37:47.136651 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:37:47 crc kubenswrapper[4558]: I0120 17:37:47.136643 4558 scope.go:117] "RemoveContainer" containerID="48ace894e440f9c70ed8c855d176b45725c5d24b103871d2c8046cc04e75abb3" Jan 20 17:37:47 crc kubenswrapper[4558]: I0120 17:37:47.162135 4558 scope.go:117] "RemoveContainer" containerID="9da02a19e6777029509b4f8838ad4012f0bd07388cadfbfa55cbc40a5673b825" Jan 20 17:37:47 crc kubenswrapper[4558]: I0120 17:37:47.179712 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:37:47 crc kubenswrapper[4558]: I0120 17:37:47.187310 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:37:47 crc kubenswrapper[4558]: I0120 17:37:47.187952 4558 scope.go:117] "RemoveContainer" containerID="48ace894e440f9c70ed8c855d176b45725c5d24b103871d2c8046cc04e75abb3" Jan 20 17:37:47 crc kubenswrapper[4558]: E0120 17:37:47.188430 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"48ace894e440f9c70ed8c855d176b45725c5d24b103871d2c8046cc04e75abb3\": container with ID starting with 48ace894e440f9c70ed8c855d176b45725c5d24b103871d2c8046cc04e75abb3 not found: ID does not exist" containerID="48ace894e440f9c70ed8c855d176b45725c5d24b103871d2c8046cc04e75abb3" Jan 20 17:37:47 crc kubenswrapper[4558]: I0120 17:37:47.188465 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"48ace894e440f9c70ed8c855d176b45725c5d24b103871d2c8046cc04e75abb3"} err="failed to get container status \"48ace894e440f9c70ed8c855d176b45725c5d24b103871d2c8046cc04e75abb3\": rpc error: code = NotFound desc = could not find container \"48ace894e440f9c70ed8c855d176b45725c5d24b103871d2c8046cc04e75abb3\": container with ID starting with 48ace894e440f9c70ed8c855d176b45725c5d24b103871d2c8046cc04e75abb3 not found: ID does not exist" Jan 20 17:37:47 crc kubenswrapper[4558]: I0120 17:37:47.188493 4558 scope.go:117] "RemoveContainer" containerID="9da02a19e6777029509b4f8838ad4012f0bd07388cadfbfa55cbc40a5673b825" Jan 20 17:37:47 crc kubenswrapper[4558]: E0120 17:37:47.188773 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9da02a19e6777029509b4f8838ad4012f0bd07388cadfbfa55cbc40a5673b825\": container with ID starting with 9da02a19e6777029509b4f8838ad4012f0bd07388cadfbfa55cbc40a5673b825 not found: ID does not exist" containerID="9da02a19e6777029509b4f8838ad4012f0bd07388cadfbfa55cbc40a5673b825" Jan 20 17:37:47 crc kubenswrapper[4558]: I0120 17:37:47.188794 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9da02a19e6777029509b4f8838ad4012f0bd07388cadfbfa55cbc40a5673b825"} err="failed to get container status \"9da02a19e6777029509b4f8838ad4012f0bd07388cadfbfa55cbc40a5673b825\": rpc error: code = NotFound desc = could not find container \"9da02a19e6777029509b4f8838ad4012f0bd07388cadfbfa55cbc40a5673b825\": container with ID starting with 9da02a19e6777029509b4f8838ad4012f0bd07388cadfbfa55cbc40a5673b825 not found: ID does not exist" Jan 20 17:37:47 crc kubenswrapper[4558]: E0120 17:37:47.339468 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Jan 20 17:37:47 crc kubenswrapper[4558]: E0120 17:37:47.339538 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/0696a635-5dc9-46e9-8502-47fab9ff761c-config-data podName:0696a635-5dc9-46e9-8502-47fab9ff761c nodeName:}" failed. No retries permitted until 2026-01-20 17:37:55.339522314 +0000 UTC m=+3369.099860281 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/0696a635-5dc9-46e9-8502-47fab9ff761c-config-data") pod "rabbitmq-server-0" (UID: "0696a635-5dc9-46e9-8502-47fab9ff761c") : configmap "rabbitmq-config-data" not found Jan 20 17:37:47 crc kubenswrapper[4558]: I0120 17:37:47.477742 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/rabbitmq-server-0" podUID="0696a635-5dc9-46e9-8502-47fab9ff761c" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.1.225:5671: connect: connection refused" Jan 20 17:37:47 crc kubenswrapper[4558]: I0120 17:37:47.623133 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-59b8cc448d-b9wnn" Jan 20 17:37:47 crc kubenswrapper[4558]: E0120 17:37:47.660452 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Jan 20 17:37:47 crc kubenswrapper[4558]: E0120 17:37:47.660524 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/0c6a6265-cf90-4039-9200-ba478d612baa-config-data podName:0c6a6265-cf90-4039-9200-ba478d612baa nodeName:}" failed. No retries permitted until 2026-01-20 17:37:55.660507727 +0000 UTC m=+3369.420845694 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/0c6a6265-cf90-4039-9200-ba478d612baa-config-data") pod "rabbitmq-cell1-server-0" (UID: "0c6a6265-cf90-4039-9200-ba478d612baa") : configmap "rabbitmq-cell1-config-data" not found Jan 20 17:37:47 crc kubenswrapper[4558]: I0120 17:37:47.759985 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/73789760-f9bd-44e0-bf9b-c1864e5f8803-credential-keys\") pod \"73789760-f9bd-44e0-bf9b-c1864e5f8803\" (UID: \"73789760-f9bd-44e0-bf9b-c1864e5f8803\") " Jan 20 17:37:47 crc kubenswrapper[4558]: I0120 17:37:47.760069 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/73789760-f9bd-44e0-bf9b-c1864e5f8803-scripts\") pod \"73789760-f9bd-44e0-bf9b-c1864e5f8803\" (UID: \"73789760-f9bd-44e0-bf9b-c1864e5f8803\") " Jan 20 17:37:47 crc kubenswrapper[4558]: I0120 17:37:47.760106 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/73789760-f9bd-44e0-bf9b-c1864e5f8803-internal-tls-certs\") pod \"73789760-f9bd-44e0-bf9b-c1864e5f8803\" (UID: \"73789760-f9bd-44e0-bf9b-c1864e5f8803\") " Jan 20 17:37:47 crc kubenswrapper[4558]: I0120 17:37:47.760173 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/73789760-f9bd-44e0-bf9b-c1864e5f8803-public-tls-certs\") pod \"73789760-f9bd-44e0-bf9b-c1864e5f8803\" (UID: \"73789760-f9bd-44e0-bf9b-c1864e5f8803\") " Jan 20 17:37:47 crc kubenswrapper[4558]: I0120 17:37:47.760218 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/73789760-f9bd-44e0-bf9b-c1864e5f8803-config-data\") pod \"73789760-f9bd-44e0-bf9b-c1864e5f8803\" (UID: \"73789760-f9bd-44e0-bf9b-c1864e5f8803\") " Jan 20 17:37:47 crc kubenswrapper[4558]: I0120 17:37:47.760257 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tdnlj\" (UniqueName: \"kubernetes.io/projected/73789760-f9bd-44e0-bf9b-c1864e5f8803-kube-api-access-tdnlj\") pod \"73789760-f9bd-44e0-bf9b-c1864e5f8803\" (UID: \"73789760-f9bd-44e0-bf9b-c1864e5f8803\") " Jan 20 17:37:47 crc kubenswrapper[4558]: I0120 17:37:47.760283 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/73789760-f9bd-44e0-bf9b-c1864e5f8803-combined-ca-bundle\") pod \"73789760-f9bd-44e0-bf9b-c1864e5f8803\" (UID: \"73789760-f9bd-44e0-bf9b-c1864e5f8803\") " Jan 20 17:37:47 crc kubenswrapper[4558]: I0120 17:37:47.760337 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/73789760-f9bd-44e0-bf9b-c1864e5f8803-fernet-keys\") pod \"73789760-f9bd-44e0-bf9b-c1864e5f8803\" (UID: \"73789760-f9bd-44e0-bf9b-c1864e5f8803\") " Jan 20 17:37:47 crc kubenswrapper[4558]: I0120 17:37:47.764913 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/73789760-f9bd-44e0-bf9b-c1864e5f8803-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "73789760-f9bd-44e0-bf9b-c1864e5f8803" (UID: "73789760-f9bd-44e0-bf9b-c1864e5f8803"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:37:47 crc kubenswrapper[4558]: I0120 17:37:47.765362 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/73789760-f9bd-44e0-bf9b-c1864e5f8803-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "73789760-f9bd-44e0-bf9b-c1864e5f8803" (UID: "73789760-f9bd-44e0-bf9b-c1864e5f8803"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:37:47 crc kubenswrapper[4558]: I0120 17:37:47.765410 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/73789760-f9bd-44e0-bf9b-c1864e5f8803-kube-api-access-tdnlj" (OuterVolumeSpecName: "kube-api-access-tdnlj") pod "73789760-f9bd-44e0-bf9b-c1864e5f8803" (UID: "73789760-f9bd-44e0-bf9b-c1864e5f8803"). InnerVolumeSpecName "kube-api-access-tdnlj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:37:47 crc kubenswrapper[4558]: I0120 17:37:47.768529 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/73789760-f9bd-44e0-bf9b-c1864e5f8803-scripts" (OuterVolumeSpecName: "scripts") pod "73789760-f9bd-44e0-bf9b-c1864e5f8803" (UID: "73789760-f9bd-44e0-bf9b-c1864e5f8803"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:37:47 crc kubenswrapper[4558]: I0120 17:37:47.783359 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/73789760-f9bd-44e0-bf9b-c1864e5f8803-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "73789760-f9bd-44e0-bf9b-c1864e5f8803" (UID: "73789760-f9bd-44e0-bf9b-c1864e5f8803"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:37:47 crc kubenswrapper[4558]: I0120 17:37:47.783546 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/73789760-f9bd-44e0-bf9b-c1864e5f8803-config-data" (OuterVolumeSpecName: "config-data") pod "73789760-f9bd-44e0-bf9b-c1864e5f8803" (UID: "73789760-f9bd-44e0-bf9b-c1864e5f8803"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:37:47 crc kubenswrapper[4558]: I0120 17:37:47.795567 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/73789760-f9bd-44e0-bf9b-c1864e5f8803-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "73789760-f9bd-44e0-bf9b-c1864e5f8803" (UID: "73789760-f9bd-44e0-bf9b-c1864e5f8803"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:37:47 crc kubenswrapper[4558]: I0120 17:37:47.805681 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/73789760-f9bd-44e0-bf9b-c1864e5f8803-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "73789760-f9bd-44e0-bf9b-c1864e5f8803" (UID: "73789760-f9bd-44e0-bf9b-c1864e5f8803"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:37:47 crc kubenswrapper[4558]: I0120 17:37:47.863180 4558 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/73789760-f9bd-44e0-bf9b-c1864e5f8803-credential-keys\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:47 crc kubenswrapper[4558]: I0120 17:37:47.863212 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/73789760-f9bd-44e0-bf9b-c1864e5f8803-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:47 crc kubenswrapper[4558]: I0120 17:37:47.863223 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/73789760-f9bd-44e0-bf9b-c1864e5f8803-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:47 crc kubenswrapper[4558]: I0120 17:37:47.863236 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/73789760-f9bd-44e0-bf9b-c1864e5f8803-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:47 crc kubenswrapper[4558]: I0120 17:37:47.863246 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/73789760-f9bd-44e0-bf9b-c1864e5f8803-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:47 crc kubenswrapper[4558]: I0120 17:37:47.863255 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tdnlj\" (UniqueName: \"kubernetes.io/projected/73789760-f9bd-44e0-bf9b-c1864e5f8803-kube-api-access-tdnlj\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:47 crc kubenswrapper[4558]: I0120 17:37:47.863269 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/73789760-f9bd-44e0-bf9b-c1864e5f8803-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:47 crc kubenswrapper[4558]: I0120 17:37:47.863279 4558 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/73789760-f9bd-44e0-bf9b-c1864e5f8803-fernet-keys\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:47 crc kubenswrapper[4558]: I0120 17:37:47.900596 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:37:48 crc kubenswrapper[4558]: I0120 17:37:48.066990 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/0696a635-5dc9-46e9-8502-47fab9ff761c-config-data\") pod \"0696a635-5dc9-46e9-8502-47fab9ff761c\" (UID: \"0696a635-5dc9-46e9-8502-47fab9ff761c\") " Jan 20 17:37:48 crc kubenswrapper[4558]: I0120 17:37:48.067067 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/0696a635-5dc9-46e9-8502-47fab9ff761c-rabbitmq-confd\") pod \"0696a635-5dc9-46e9-8502-47fab9ff761c\" (UID: \"0696a635-5dc9-46e9-8502-47fab9ff761c\") " Jan 20 17:37:48 crc kubenswrapper[4558]: I0120 17:37:48.067121 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/0696a635-5dc9-46e9-8502-47fab9ff761c-server-conf\") pod \"0696a635-5dc9-46e9-8502-47fab9ff761c\" (UID: \"0696a635-5dc9-46e9-8502-47fab9ff761c\") " Jan 20 17:37:48 crc kubenswrapper[4558]: I0120 17:37:48.067177 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"0696a635-5dc9-46e9-8502-47fab9ff761c\" (UID: \"0696a635-5dc9-46e9-8502-47fab9ff761c\") " Jan 20 17:37:48 crc kubenswrapper[4558]: I0120 17:37:48.067201 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/0696a635-5dc9-46e9-8502-47fab9ff761c-plugins-conf\") pod \"0696a635-5dc9-46e9-8502-47fab9ff761c\" (UID: \"0696a635-5dc9-46e9-8502-47fab9ff761c\") " Jan 20 17:37:48 crc kubenswrapper[4558]: I0120 17:37:48.067225 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/0696a635-5dc9-46e9-8502-47fab9ff761c-pod-info\") pod \"0696a635-5dc9-46e9-8502-47fab9ff761c\" (UID: \"0696a635-5dc9-46e9-8502-47fab9ff761c\") " Jan 20 17:37:48 crc kubenswrapper[4558]: I0120 17:37:48.067249 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/0696a635-5dc9-46e9-8502-47fab9ff761c-rabbitmq-tls\") pod \"0696a635-5dc9-46e9-8502-47fab9ff761c\" (UID: \"0696a635-5dc9-46e9-8502-47fab9ff761c\") " Jan 20 17:37:48 crc kubenswrapper[4558]: I0120 17:37:48.067273 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/0696a635-5dc9-46e9-8502-47fab9ff761c-rabbitmq-plugins\") pod \"0696a635-5dc9-46e9-8502-47fab9ff761c\" (UID: \"0696a635-5dc9-46e9-8502-47fab9ff761c\") " Jan 20 17:37:48 crc kubenswrapper[4558]: I0120 17:37:48.067317 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/0696a635-5dc9-46e9-8502-47fab9ff761c-rabbitmq-erlang-cookie\") pod \"0696a635-5dc9-46e9-8502-47fab9ff761c\" (UID: \"0696a635-5dc9-46e9-8502-47fab9ff761c\") " Jan 20 17:37:48 crc kubenswrapper[4558]: I0120 17:37:48.067342 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/0696a635-5dc9-46e9-8502-47fab9ff761c-erlang-cookie-secret\") pod \"0696a635-5dc9-46e9-8502-47fab9ff761c\" (UID: \"0696a635-5dc9-46e9-8502-47fab9ff761c\") " Jan 20 17:37:48 crc kubenswrapper[4558]: I0120 17:37:48.067365 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qkf2f\" (UniqueName: \"kubernetes.io/projected/0696a635-5dc9-46e9-8502-47fab9ff761c-kube-api-access-qkf2f\") pod \"0696a635-5dc9-46e9-8502-47fab9ff761c\" (UID: \"0696a635-5dc9-46e9-8502-47fab9ff761c\") " Jan 20 17:37:48 crc kubenswrapper[4558]: I0120 17:37:48.068914 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0696a635-5dc9-46e9-8502-47fab9ff761c-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "0696a635-5dc9-46e9-8502-47fab9ff761c" (UID: "0696a635-5dc9-46e9-8502-47fab9ff761c"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:37:48 crc kubenswrapper[4558]: I0120 17:37:48.068924 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0696a635-5dc9-46e9-8502-47fab9ff761c-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "0696a635-5dc9-46e9-8502-47fab9ff761c" (UID: "0696a635-5dc9-46e9-8502-47fab9ff761c"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:37:48 crc kubenswrapper[4558]: I0120 17:37:48.069199 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0696a635-5dc9-46e9-8502-47fab9ff761c-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "0696a635-5dc9-46e9-8502-47fab9ff761c" (UID: "0696a635-5dc9-46e9-8502-47fab9ff761c"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:37:48 crc kubenswrapper[4558]: I0120 17:37:48.073333 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0696a635-5dc9-46e9-8502-47fab9ff761c-kube-api-access-qkf2f" (OuterVolumeSpecName: "kube-api-access-qkf2f") pod "0696a635-5dc9-46e9-8502-47fab9ff761c" (UID: "0696a635-5dc9-46e9-8502-47fab9ff761c"). InnerVolumeSpecName "kube-api-access-qkf2f". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:37:48 crc kubenswrapper[4558]: I0120 17:37:48.073378 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage12-crc" (OuterVolumeSpecName: "persistence") pod "0696a635-5dc9-46e9-8502-47fab9ff761c" (UID: "0696a635-5dc9-46e9-8502-47fab9ff761c"). InnerVolumeSpecName "local-storage12-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:37:48 crc kubenswrapper[4558]: I0120 17:37:48.073621 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/0696a635-5dc9-46e9-8502-47fab9ff761c-pod-info" (OuterVolumeSpecName: "pod-info") pod "0696a635-5dc9-46e9-8502-47fab9ff761c" (UID: "0696a635-5dc9-46e9-8502-47fab9ff761c"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Jan 20 17:37:48 crc kubenswrapper[4558]: I0120 17:37:48.075005 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0696a635-5dc9-46e9-8502-47fab9ff761c-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "0696a635-5dc9-46e9-8502-47fab9ff761c" (UID: "0696a635-5dc9-46e9-8502-47fab9ff761c"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:37:48 crc kubenswrapper[4558]: I0120 17:37:48.075336 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0696a635-5dc9-46e9-8502-47fab9ff761c-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "0696a635-5dc9-46e9-8502-47fab9ff761c" (UID: "0696a635-5dc9-46e9-8502-47fab9ff761c"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:37:48 crc kubenswrapper[4558]: I0120 17:37:48.096636 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0696a635-5dc9-46e9-8502-47fab9ff761c-config-data" (OuterVolumeSpecName: "config-data") pod "0696a635-5dc9-46e9-8502-47fab9ff761c" (UID: "0696a635-5dc9-46e9-8502-47fab9ff761c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:37:48 crc kubenswrapper[4558]: I0120 17:37:48.113835 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0696a635-5dc9-46e9-8502-47fab9ff761c-server-conf" (OuterVolumeSpecName: "server-conf") pod "0696a635-5dc9-46e9-8502-47fab9ff761c" (UID: "0696a635-5dc9-46e9-8502-47fab9ff761c"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:37:48 crc kubenswrapper[4558]: I0120 17:37:48.164349 4558 generic.go:334] "Generic (PLEG): container finished" podID="0696a635-5dc9-46e9-8502-47fab9ff761c" containerID="9f1296f437298973ce0b0219f1980b151cca677e25fb375f92cfbf98670c7983" exitCode=0 Jan 20 17:37:48 crc kubenswrapper[4558]: I0120 17:37:48.164419 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-server-0" event={"ID":"0696a635-5dc9-46e9-8502-47fab9ff761c","Type":"ContainerDied","Data":"9f1296f437298973ce0b0219f1980b151cca677e25fb375f92cfbf98670c7983"} Jan 20 17:37:48 crc kubenswrapper[4558]: I0120 17:37:48.164459 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-server-0" event={"ID":"0696a635-5dc9-46e9-8502-47fab9ff761c","Type":"ContainerDied","Data":"792a3568e787cfc41cee7d472f88c204c7d8314c5091fb0349e59f2ff91ddae6"} Jan 20 17:37:48 crc kubenswrapper[4558]: I0120 17:37:48.164482 4558 scope.go:117] "RemoveContainer" containerID="9f1296f437298973ce0b0219f1980b151cca677e25fb375f92cfbf98670c7983" Jan 20 17:37:48 crc kubenswrapper[4558]: I0120 17:37:48.164613 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:37:48 crc kubenswrapper[4558]: I0120 17:37:48.165432 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0696a635-5dc9-46e9-8502-47fab9ff761c-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "0696a635-5dc9-46e9-8502-47fab9ff761c" (UID: "0696a635-5dc9-46e9-8502-47fab9ff761c"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:37:48 crc kubenswrapper[4558]: I0120 17:37:48.166940 4558 generic.go:334] "Generic (PLEG): container finished" podID="73789760-f9bd-44e0-bf9b-c1864e5f8803" containerID="46ea3b795dad740b543a64ff0cb9c02ffc22ebc03e4c397cb2f2f859f67d530b" exitCode=0 Jan 20 17:37:48 crc kubenswrapper[4558]: I0120 17:37:48.166997 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-59b8cc448d-b9wnn" event={"ID":"73789760-f9bd-44e0-bf9b-c1864e5f8803","Type":"ContainerDied","Data":"46ea3b795dad740b543a64ff0cb9c02ffc22ebc03e4c397cb2f2f859f67d530b"} Jan 20 17:37:48 crc kubenswrapper[4558]: I0120 17:37:48.167014 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-59b8cc448d-b9wnn" event={"ID":"73789760-f9bd-44e0-bf9b-c1864e5f8803","Type":"ContainerDied","Data":"c373c169c88718501a4aca6353f4c3929127c2d9a1ff27e8e3ee3c889f567934"} Jan 20 17:37:48 crc kubenswrapper[4558]: I0120 17:37:48.167071 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-59b8cc448d-b9wnn" Jan 20 17:37:48 crc kubenswrapper[4558]: I0120 17:37:48.170065 4558 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/0696a635-5dc9-46e9-8502-47fab9ff761c-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:48 crc kubenswrapper[4558]: I0120 17:37:48.170150 4558 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/0696a635-5dc9-46e9-8502-47fab9ff761c-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:48 crc kubenswrapper[4558]: I0120 17:37:48.170226 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qkf2f\" (UniqueName: \"kubernetes.io/projected/0696a635-5dc9-46e9-8502-47fab9ff761c-kube-api-access-qkf2f\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:48 crc kubenswrapper[4558]: I0120 17:37:48.170275 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/0696a635-5dc9-46e9-8502-47fab9ff761c-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:48 crc kubenswrapper[4558]: I0120 17:37:48.170337 4558 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/0696a635-5dc9-46e9-8502-47fab9ff761c-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:48 crc kubenswrapper[4558]: I0120 17:37:48.170387 4558 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/0696a635-5dc9-46e9-8502-47fab9ff761c-server-conf\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:48 crc kubenswrapper[4558]: I0120 17:37:48.170454 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" " Jan 20 17:37:48 crc kubenswrapper[4558]: I0120 17:37:48.170510 4558 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/0696a635-5dc9-46e9-8502-47fab9ff761c-plugins-conf\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:48 crc kubenswrapper[4558]: I0120 17:37:48.170559 4558 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/0696a635-5dc9-46e9-8502-47fab9ff761c-pod-info\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:48 crc kubenswrapper[4558]: I0120 17:37:48.170605 4558 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/0696a635-5dc9-46e9-8502-47fab9ff761c-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:48 crc kubenswrapper[4558]: I0120 17:37:48.170657 4558 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/0696a635-5dc9-46e9-8502-47fab9ff761c-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:48 crc kubenswrapper[4558]: I0120 17:37:48.173582 4558 generic.go:334] "Generic (PLEG): container finished" podID="0c6a6265-cf90-4039-9200-ba478d612baa" containerID="b6873866e3f13dd35b6f2b58c613d3e9843e8b8a9fdc297c58001a3d665529a7" exitCode=0 Jan 20 17:37:48 crc kubenswrapper[4558]: I0120 17:37:48.173625 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" event={"ID":"0c6a6265-cf90-4039-9200-ba478d612baa","Type":"ContainerDied","Data":"b6873866e3f13dd35b6f2b58c613d3e9843e8b8a9fdc297c58001a3d665529a7"} Jan 20 17:37:48 crc kubenswrapper[4558]: I0120 17:37:48.193148 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage12-crc" (UniqueName: "kubernetes.io/local-volume/local-storage12-crc") on node "crc" Jan 20 17:37:48 crc kubenswrapper[4558]: I0120 17:37:48.240445 4558 scope.go:117] "RemoveContainer" containerID="89fad12073aaf3b621cce6e9c2e512578ea6f57b9f2d06d4a3ac809c1fe90a00" Jan 20 17:37:48 crc kubenswrapper[4558]: I0120 17:37:48.255511 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-59b8cc448d-b9wnn"] Jan 20 17:37:48 crc kubenswrapper[4558]: I0120 17:37:48.261396 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/keystone-59b8cc448d-b9wnn"] Jan 20 17:37:48 crc kubenswrapper[4558]: I0120 17:37:48.272696 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:48 crc kubenswrapper[4558]: I0120 17:37:48.281207 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:37:48 crc kubenswrapper[4558]: I0120 17:37:48.284071 4558 scope.go:117] "RemoveContainer" containerID="9f1296f437298973ce0b0219f1980b151cca677e25fb375f92cfbf98670c7983" Jan 20 17:37:48 crc kubenswrapper[4558]: E0120 17:37:48.284402 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9f1296f437298973ce0b0219f1980b151cca677e25fb375f92cfbf98670c7983\": container with ID starting with 9f1296f437298973ce0b0219f1980b151cca677e25fb375f92cfbf98670c7983 not found: ID does not exist" containerID="9f1296f437298973ce0b0219f1980b151cca677e25fb375f92cfbf98670c7983" Jan 20 17:37:48 crc kubenswrapper[4558]: I0120 17:37:48.284429 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9f1296f437298973ce0b0219f1980b151cca677e25fb375f92cfbf98670c7983"} err="failed to get container status \"9f1296f437298973ce0b0219f1980b151cca677e25fb375f92cfbf98670c7983\": rpc error: code = NotFound desc = could not find container \"9f1296f437298973ce0b0219f1980b151cca677e25fb375f92cfbf98670c7983\": container with ID starting with 9f1296f437298973ce0b0219f1980b151cca677e25fb375f92cfbf98670c7983 not found: ID does not exist" Jan 20 17:37:48 crc kubenswrapper[4558]: I0120 17:37:48.284450 4558 scope.go:117] "RemoveContainer" containerID="89fad12073aaf3b621cce6e9c2e512578ea6f57b9f2d06d4a3ac809c1fe90a00" Jan 20 17:37:48 crc kubenswrapper[4558]: E0120 17:37:48.284932 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"89fad12073aaf3b621cce6e9c2e512578ea6f57b9f2d06d4a3ac809c1fe90a00\": container with ID starting with 89fad12073aaf3b621cce6e9c2e512578ea6f57b9f2d06d4a3ac809c1fe90a00 not found: ID does not exist" containerID="89fad12073aaf3b621cce6e9c2e512578ea6f57b9f2d06d4a3ac809c1fe90a00" Jan 20 17:37:48 crc kubenswrapper[4558]: I0120 17:37:48.284968 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"89fad12073aaf3b621cce6e9c2e512578ea6f57b9f2d06d4a3ac809c1fe90a00"} err="failed to get container status \"89fad12073aaf3b621cce6e9c2e512578ea6f57b9f2d06d4a3ac809c1fe90a00\": rpc error: code = NotFound desc = could not find container \"89fad12073aaf3b621cce6e9c2e512578ea6f57b9f2d06d4a3ac809c1fe90a00\": container with ID starting with 89fad12073aaf3b621cce6e9c2e512578ea6f57b9f2d06d4a3ac809c1fe90a00 not found: ID does not exist" Jan 20 17:37:48 crc kubenswrapper[4558]: I0120 17:37:48.284991 4558 scope.go:117] "RemoveContainer" containerID="46ea3b795dad740b543a64ff0cb9c02ffc22ebc03e4c397cb2f2f859f67d530b" Jan 20 17:37:48 crc kubenswrapper[4558]: I0120 17:37:48.309410 4558 scope.go:117] "RemoveContainer" containerID="46ea3b795dad740b543a64ff0cb9c02ffc22ebc03e4c397cb2f2f859f67d530b" Jan 20 17:37:48 crc kubenswrapper[4558]: E0120 17:37:48.309762 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"46ea3b795dad740b543a64ff0cb9c02ffc22ebc03e4c397cb2f2f859f67d530b\": container with ID starting with 46ea3b795dad740b543a64ff0cb9c02ffc22ebc03e4c397cb2f2f859f67d530b not found: ID does not exist" containerID="46ea3b795dad740b543a64ff0cb9c02ffc22ebc03e4c397cb2f2f859f67d530b" Jan 20 17:37:48 crc kubenswrapper[4558]: I0120 17:37:48.309795 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"46ea3b795dad740b543a64ff0cb9c02ffc22ebc03e4c397cb2f2f859f67d530b"} err="failed to get container status \"46ea3b795dad740b543a64ff0cb9c02ffc22ebc03e4c397cb2f2f859f67d530b\": rpc error: code = NotFound desc = could not find container \"46ea3b795dad740b543a64ff0cb9c02ffc22ebc03e4c397cb2f2f859f67d530b\": container with ID starting with 46ea3b795dad740b543a64ff0cb9c02ffc22ebc03e4c397cb2f2f859f67d530b not found: ID does not exist" Jan 20 17:37:48 crc kubenswrapper[4558]: E0120 17:37:48.329837 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="29de3130a0b6727cccd3d029a221407989f24f198d0147b0d9d994fbbe61ae64" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:37:48 crc kubenswrapper[4558]: E0120 17:37:48.334941 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="29de3130a0b6727cccd3d029a221407989f24f198d0147b0d9d994fbbe61ae64" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:37:48 crc kubenswrapper[4558]: E0120 17:37:48.337837 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="29de3130a0b6727cccd3d029a221407989f24f198d0147b0d9d994fbbe61ae64" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:37:48 crc kubenswrapper[4558]: E0120 17:37:48.337881 4558 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack-kuttl-tests/nova-cell0-conductor-0" podUID="0d2b6a10-ca95-4d72-a80b-1706822a07a7" containerName="nova-cell0-conductor-conductor" Jan 20 17:37:48 crc kubenswrapper[4558]: I0120 17:37:48.373820 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/0c6a6265-cf90-4039-9200-ba478d612baa-server-conf\") pod \"0c6a6265-cf90-4039-9200-ba478d612baa\" (UID: \"0c6a6265-cf90-4039-9200-ba478d612baa\") " Jan 20 17:37:48 crc kubenswrapper[4558]: I0120 17:37:48.373891 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/0c6a6265-cf90-4039-9200-ba478d612baa-rabbitmq-confd\") pod \"0c6a6265-cf90-4039-9200-ba478d612baa\" (UID: \"0c6a6265-cf90-4039-9200-ba478d612baa\") " Jan 20 17:37:48 crc kubenswrapper[4558]: I0120 17:37:48.373949 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"0c6a6265-cf90-4039-9200-ba478d612baa\" (UID: \"0c6a6265-cf90-4039-9200-ba478d612baa\") " Jan 20 17:37:48 crc kubenswrapper[4558]: I0120 17:37:48.373980 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/0c6a6265-cf90-4039-9200-ba478d612baa-rabbitmq-plugins\") pod \"0c6a6265-cf90-4039-9200-ba478d612baa\" (UID: \"0c6a6265-cf90-4039-9200-ba478d612baa\") " Jan 20 17:37:48 crc kubenswrapper[4558]: I0120 17:37:48.374002 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/0c6a6265-cf90-4039-9200-ba478d612baa-rabbitmq-erlang-cookie\") pod \"0c6a6265-cf90-4039-9200-ba478d612baa\" (UID: \"0c6a6265-cf90-4039-9200-ba478d612baa\") " Jan 20 17:37:48 crc kubenswrapper[4558]: I0120 17:37:48.374021 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/0c6a6265-cf90-4039-9200-ba478d612baa-erlang-cookie-secret\") pod \"0c6a6265-cf90-4039-9200-ba478d612baa\" (UID: \"0c6a6265-cf90-4039-9200-ba478d612baa\") " Jan 20 17:37:48 crc kubenswrapper[4558]: I0120 17:37:48.374066 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/0c6a6265-cf90-4039-9200-ba478d612baa-pod-info\") pod \"0c6a6265-cf90-4039-9200-ba478d612baa\" (UID: \"0c6a6265-cf90-4039-9200-ba478d612baa\") " Jan 20 17:37:48 crc kubenswrapper[4558]: I0120 17:37:48.374103 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/0c6a6265-cf90-4039-9200-ba478d612baa-config-data\") pod \"0c6a6265-cf90-4039-9200-ba478d612baa\" (UID: \"0c6a6265-cf90-4039-9200-ba478d612baa\") " Jan 20 17:37:48 crc kubenswrapper[4558]: I0120 17:37:48.374184 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/0c6a6265-cf90-4039-9200-ba478d612baa-rabbitmq-tls\") pod \"0c6a6265-cf90-4039-9200-ba478d612baa\" (UID: \"0c6a6265-cf90-4039-9200-ba478d612baa\") " Jan 20 17:37:48 crc kubenswrapper[4558]: I0120 17:37:48.374211 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r5k8q\" (UniqueName: \"kubernetes.io/projected/0c6a6265-cf90-4039-9200-ba478d612baa-kube-api-access-r5k8q\") pod \"0c6a6265-cf90-4039-9200-ba478d612baa\" (UID: \"0c6a6265-cf90-4039-9200-ba478d612baa\") " Jan 20 17:37:48 crc kubenswrapper[4558]: I0120 17:37:48.374244 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/0c6a6265-cf90-4039-9200-ba478d612baa-plugins-conf\") pod \"0c6a6265-cf90-4039-9200-ba478d612baa\" (UID: \"0c6a6265-cf90-4039-9200-ba478d612baa\") " Jan 20 17:37:48 crc kubenswrapper[4558]: I0120 17:37:48.374572 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0c6a6265-cf90-4039-9200-ba478d612baa-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "0c6a6265-cf90-4039-9200-ba478d612baa" (UID: "0c6a6265-cf90-4039-9200-ba478d612baa"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:37:48 crc kubenswrapper[4558]: E0120 17:37:48.375355 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/combined-ca-bundle: secret "combined-ca-bundle" not found Jan 20 17:37:48 crc kubenswrapper[4558]: I0120 17:37:48.375373 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0c6a6265-cf90-4039-9200-ba478d612baa-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "0c6a6265-cf90-4039-9200-ba478d612baa" (UID: "0c6a6265-cf90-4039-9200-ba478d612baa"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:37:48 crc kubenswrapper[4558]: E0120 17:37:48.375539 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-memcached-svc: secret "cert-memcached-svc" not found Jan 20 17:37:48 crc kubenswrapper[4558]: E0120 17:37:48.375633 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b18f4fcf-eaae-401f-99ec-7b130ad8a6c1-combined-ca-bundle podName:b18f4fcf-eaae-401f-99ec-7b130ad8a6c1 nodeName:}" failed. No retries permitted until 2026-01-20 17:37:56.375471268 +0000 UTC m=+3370.135809234 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/b18f4fcf-eaae-401f-99ec-7b130ad8a6c1-combined-ca-bundle") pod "memcached-0" (UID: "b18f4fcf-eaae-401f-99ec-7b130ad8a6c1") : secret "combined-ca-bundle" not found Jan 20 17:37:48 crc kubenswrapper[4558]: E0120 17:37:48.375726 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b18f4fcf-eaae-401f-99ec-7b130ad8a6c1-memcached-tls-certs podName:b18f4fcf-eaae-401f-99ec-7b130ad8a6c1 nodeName:}" failed. No retries permitted until 2026-01-20 17:37:56.375715116 +0000 UTC m=+3370.136053084 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "memcached-tls-certs" (UniqueName: "kubernetes.io/secret/b18f4fcf-eaae-401f-99ec-7b130ad8a6c1-memcached-tls-certs") pod "memcached-0" (UID: "b18f4fcf-eaae-401f-99ec-7b130ad8a6c1") : secret "cert-memcached-svc" not found Jan 20 17:37:48 crc kubenswrapper[4558]: I0120 17:37:48.375731 4558 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/0c6a6265-cf90-4039-9200-ba478d612baa-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:48 crc kubenswrapper[4558]: I0120 17:37:48.376079 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0c6a6265-cf90-4039-9200-ba478d612baa-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "0c6a6265-cf90-4039-9200-ba478d612baa" (UID: "0c6a6265-cf90-4039-9200-ba478d612baa"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:37:48 crc kubenswrapper[4558]: I0120 17:37:48.377710 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage01-crc" (OuterVolumeSpecName: "persistence") pod "0c6a6265-cf90-4039-9200-ba478d612baa" (UID: "0c6a6265-cf90-4039-9200-ba478d612baa"). InnerVolumeSpecName "local-storage01-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:37:48 crc kubenswrapper[4558]: I0120 17:37:48.378111 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/0c6a6265-cf90-4039-9200-ba478d612baa-pod-info" (OuterVolumeSpecName: "pod-info") pod "0c6a6265-cf90-4039-9200-ba478d612baa" (UID: "0c6a6265-cf90-4039-9200-ba478d612baa"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Jan 20 17:37:48 crc kubenswrapper[4558]: I0120 17:37:48.379180 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0c6a6265-cf90-4039-9200-ba478d612baa-kube-api-access-r5k8q" (OuterVolumeSpecName: "kube-api-access-r5k8q") pod "0c6a6265-cf90-4039-9200-ba478d612baa" (UID: "0c6a6265-cf90-4039-9200-ba478d612baa"). InnerVolumeSpecName "kube-api-access-r5k8q". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:37:48 crc kubenswrapper[4558]: I0120 17:37:48.379392 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0c6a6265-cf90-4039-9200-ba478d612baa-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "0c6a6265-cf90-4039-9200-ba478d612baa" (UID: "0c6a6265-cf90-4039-9200-ba478d612baa"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:37:48 crc kubenswrapper[4558]: I0120 17:37:48.379536 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0c6a6265-cf90-4039-9200-ba478d612baa-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "0c6a6265-cf90-4039-9200-ba478d612baa" (UID: "0c6a6265-cf90-4039-9200-ba478d612baa"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:37:48 crc kubenswrapper[4558]: I0120 17:37:48.396458 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0c6a6265-cf90-4039-9200-ba478d612baa-config-data" (OuterVolumeSpecName: "config-data") pod "0c6a6265-cf90-4039-9200-ba478d612baa" (UID: "0c6a6265-cf90-4039-9200-ba478d612baa"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:37:48 crc kubenswrapper[4558]: I0120 17:37:48.410640 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0c6a6265-cf90-4039-9200-ba478d612baa-server-conf" (OuterVolumeSpecName: "server-conf") pod "0c6a6265-cf90-4039-9200-ba478d612baa" (UID: "0c6a6265-cf90-4039-9200-ba478d612baa"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:37:48 crc kubenswrapper[4558]: I0120 17:37:48.458024 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0c6a6265-cf90-4039-9200-ba478d612baa-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "0c6a6265-cf90-4039-9200-ba478d612baa" (UID: "0c6a6265-cf90-4039-9200-ba478d612baa"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:37:48 crc kubenswrapper[4558]: I0120 17:37:48.484885 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" " Jan 20 17:37:48 crc kubenswrapper[4558]: I0120 17:37:48.485224 4558 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/0c6a6265-cf90-4039-9200-ba478d612baa-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:48 crc kubenswrapper[4558]: I0120 17:37:48.485268 4558 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/0c6a6265-cf90-4039-9200-ba478d612baa-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:48 crc kubenswrapper[4558]: I0120 17:37:48.485288 4558 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/0c6a6265-cf90-4039-9200-ba478d612baa-pod-info\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:48 crc kubenswrapper[4558]: I0120 17:37:48.485297 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/0c6a6265-cf90-4039-9200-ba478d612baa-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:48 crc kubenswrapper[4558]: I0120 17:37:48.485320 4558 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/0c6a6265-cf90-4039-9200-ba478d612baa-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:48 crc kubenswrapper[4558]: I0120 17:37:48.485365 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r5k8q\" (UniqueName: \"kubernetes.io/projected/0c6a6265-cf90-4039-9200-ba478d612baa-kube-api-access-r5k8q\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:48 crc kubenswrapper[4558]: I0120 17:37:48.485389 4558 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/0c6a6265-cf90-4039-9200-ba478d612baa-plugins-conf\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:48 crc kubenswrapper[4558]: I0120 17:37:48.485398 4558 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/0c6a6265-cf90-4039-9200-ba478d612baa-server-conf\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:48 crc kubenswrapper[4558]: I0120 17:37:48.485419 4558 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/0c6a6265-cf90-4039-9200-ba478d612baa-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:48 crc kubenswrapper[4558]: I0120 17:37:48.495748 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage01-crc" (UniqueName: "kubernetes.io/local-volume/local-storage01-crc") on node "crc" Jan 20 17:37:48 crc kubenswrapper[4558]: I0120 17:37:48.498799 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/rabbitmq-server-0"] Jan 20 17:37:48 crc kubenswrapper[4558]: I0120 17:37:48.506657 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/rabbitmq-server-0"] Jan 20 17:37:48 crc kubenswrapper[4558]: E0120 17:37:48.569799 4558 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0696a635_5dc9_46e9_8502_47fab9ff761c.slice\": RecentStats: unable to find data in memory cache]" Jan 20 17:37:48 crc kubenswrapper[4558]: I0120 17:37:48.576091 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0696a635-5dc9-46e9-8502-47fab9ff761c" path="/var/lib/kubelet/pods/0696a635-5dc9-46e9-8502-47fab9ff761c/volumes" Jan 20 17:37:48 crc kubenswrapper[4558]: I0120 17:37:48.577039 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="73789760-f9bd-44e0-bf9b-c1864e5f8803" path="/var/lib/kubelet/pods/73789760-f9bd-44e0-bf9b-c1864e5f8803/volumes" Jan 20 17:37:48 crc kubenswrapper[4558]: I0120 17:37:48.577832 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="906beedc-3fa2-4d6e-a6e8-485ca2fb1082" path="/var/lib/kubelet/pods/906beedc-3fa2-4d6e-a6e8-485ca2fb1082/volumes" Jan 20 17:37:48 crc kubenswrapper[4558]: I0120 17:37:48.587206 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:49 crc kubenswrapper[4558]: I0120 17:37:49.079326 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-worker-dd549b649-grdrm" Jan 20 17:37:49 crc kubenswrapper[4558]: I0120 17:37:49.168625 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-keystone-listener-6d4445bf46-57dqj" Jan 20 17:37:49 crc kubenswrapper[4558]: I0120 17:37:49.193428 4558 generic.go:334] "Generic (PLEG): container finished" podID="11d1aa99-639c-451a-876e-59de6098e407" containerID="cb6897619dc4891e5845ed9d58dbce37e97a4c36140e8ccd66c2309dcdea8abb" exitCode=0 Jan 20 17:37:49 crc kubenswrapper[4558]: I0120 17:37:49.193494 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-keystone-listener-6d4445bf46-57dqj" event={"ID":"11d1aa99-639c-451a-876e-59de6098e407","Type":"ContainerDied","Data":"cb6897619dc4891e5845ed9d58dbce37e97a4c36140e8ccd66c2309dcdea8abb"} Jan 20 17:37:49 crc kubenswrapper[4558]: I0120 17:37:49.193525 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-keystone-listener-6d4445bf46-57dqj" event={"ID":"11d1aa99-639c-451a-876e-59de6098e407","Type":"ContainerDied","Data":"6c93cca8c9437e2f5e4d214be622632393244aadf349ae3d098813341938a3c1"} Jan 20 17:37:49 crc kubenswrapper[4558]: I0120 17:37:49.193551 4558 scope.go:117] "RemoveContainer" containerID="cb6897619dc4891e5845ed9d58dbce37e97a4c36140e8ccd66c2309dcdea8abb" Jan 20 17:37:49 crc kubenswrapper[4558]: I0120 17:37:49.193688 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-keystone-listener-6d4445bf46-57dqj" Jan 20 17:37:49 crc kubenswrapper[4558]: I0120 17:37:49.197150 4558 generic.go:334] "Generic (PLEG): container finished" podID="640de3b3-60e1-41b8-ab00-22e375bad65c" containerID="c97affc0c0f8fba83bdeaf55c35e5f0b460177945ced19a520ae5ea01ea75dfd" exitCode=0 Jan 20 17:37:49 crc kubenswrapper[4558]: I0120 17:37:49.197200 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-worker-dd549b649-grdrm" Jan 20 17:37:49 crc kubenswrapper[4558]: I0120 17:37:49.197198 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-worker-dd549b649-grdrm" event={"ID":"640de3b3-60e1-41b8-ab00-22e375bad65c","Type":"ContainerDied","Data":"c97affc0c0f8fba83bdeaf55c35e5f0b460177945ced19a520ae5ea01ea75dfd"} Jan 20 17:37:49 crc kubenswrapper[4558]: I0120 17:37:49.197312 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-worker-dd549b649-grdrm" event={"ID":"640de3b3-60e1-41b8-ab00-22e375bad65c","Type":"ContainerDied","Data":"64c5a0b4f6f64a2c7351affbc5ef072d820638d5f937ae2941cbbbdbbb97d6dd"} Jan 20 17:37:49 crc kubenswrapper[4558]: I0120 17:37:49.200011 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" event={"ID":"0c6a6265-cf90-4039-9200-ba478d612baa","Type":"ContainerDied","Data":"364bffe31431af105ef6fe2c18d88e8f24a66280d58bb91d9fab9166a07450c1"} Jan 20 17:37:49 crc kubenswrapper[4558]: I0120 17:37:49.200026 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:37:49 crc kubenswrapper[4558]: I0120 17:37:49.206895 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/640de3b3-60e1-41b8-ab00-22e375bad65c-config-data\") pod \"640de3b3-60e1-41b8-ab00-22e375bad65c\" (UID: \"640de3b3-60e1-41b8-ab00-22e375bad65c\") " Jan 20 17:37:49 crc kubenswrapper[4558]: I0120 17:37:49.207719 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k7grk\" (UniqueName: \"kubernetes.io/projected/640de3b3-60e1-41b8-ab00-22e375bad65c-kube-api-access-k7grk\") pod \"640de3b3-60e1-41b8-ab00-22e375bad65c\" (UID: \"640de3b3-60e1-41b8-ab00-22e375bad65c\") " Jan 20 17:37:49 crc kubenswrapper[4558]: I0120 17:37:49.207755 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/640de3b3-60e1-41b8-ab00-22e375bad65c-combined-ca-bundle\") pod \"640de3b3-60e1-41b8-ab00-22e375bad65c\" (UID: \"640de3b3-60e1-41b8-ab00-22e375bad65c\") " Jan 20 17:37:49 crc kubenswrapper[4558]: I0120 17:37:49.207810 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/640de3b3-60e1-41b8-ab00-22e375bad65c-config-data-custom\") pod \"640de3b3-60e1-41b8-ab00-22e375bad65c\" (UID: \"640de3b3-60e1-41b8-ab00-22e375bad65c\") " Jan 20 17:37:49 crc kubenswrapper[4558]: I0120 17:37:49.207862 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/640de3b3-60e1-41b8-ab00-22e375bad65c-logs\") pod \"640de3b3-60e1-41b8-ab00-22e375bad65c\" (UID: \"640de3b3-60e1-41b8-ab00-22e375bad65c\") " Jan 20 17:37:49 crc kubenswrapper[4558]: I0120 17:37:49.208685 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/640de3b3-60e1-41b8-ab00-22e375bad65c-logs" (OuterVolumeSpecName: "logs") pod "640de3b3-60e1-41b8-ab00-22e375bad65c" (UID: "640de3b3-60e1-41b8-ab00-22e375bad65c"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:37:49 crc kubenswrapper[4558]: I0120 17:37:49.216916 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/640de3b3-60e1-41b8-ab00-22e375bad65c-kube-api-access-k7grk" (OuterVolumeSpecName: "kube-api-access-k7grk") pod "640de3b3-60e1-41b8-ab00-22e375bad65c" (UID: "640de3b3-60e1-41b8-ab00-22e375bad65c"). InnerVolumeSpecName "kube-api-access-k7grk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:37:49 crc kubenswrapper[4558]: I0120 17:37:49.225193 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/rabbitmq-cell1-server-0"] Jan 20 17:37:49 crc kubenswrapper[4558]: I0120 17:37:49.231814 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/640de3b3-60e1-41b8-ab00-22e375bad65c-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "640de3b3-60e1-41b8-ab00-22e375bad65c" (UID: "640de3b3-60e1-41b8-ab00-22e375bad65c"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:37:49 crc kubenswrapper[4558]: I0120 17:37:49.233015 4558 scope.go:117] "RemoveContainer" containerID="db329d06eb52b93ff0cb7bb5890e9f6c1ce660d1bfa28f5bef02917fc16526c6" Jan 20 17:37:49 crc kubenswrapper[4558]: I0120 17:37:49.234299 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/rabbitmq-cell1-server-0"] Jan 20 17:37:49 crc kubenswrapper[4558]: I0120 17:37:49.245293 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/640de3b3-60e1-41b8-ab00-22e375bad65c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "640de3b3-60e1-41b8-ab00-22e375bad65c" (UID: "640de3b3-60e1-41b8-ab00-22e375bad65c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:37:49 crc kubenswrapper[4558]: I0120 17:37:49.249594 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/640de3b3-60e1-41b8-ab00-22e375bad65c-config-data" (OuterVolumeSpecName: "config-data") pod "640de3b3-60e1-41b8-ab00-22e375bad65c" (UID: "640de3b3-60e1-41b8-ab00-22e375bad65c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:37:49 crc kubenswrapper[4558]: I0120 17:37:49.292782 4558 scope.go:117] "RemoveContainer" containerID="cb6897619dc4891e5845ed9d58dbce37e97a4c36140e8ccd66c2309dcdea8abb" Jan 20 17:37:49 crc kubenswrapper[4558]: E0120 17:37:49.293203 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cb6897619dc4891e5845ed9d58dbce37e97a4c36140e8ccd66c2309dcdea8abb\": container with ID starting with cb6897619dc4891e5845ed9d58dbce37e97a4c36140e8ccd66c2309dcdea8abb not found: ID does not exist" containerID="cb6897619dc4891e5845ed9d58dbce37e97a4c36140e8ccd66c2309dcdea8abb" Jan 20 17:37:49 crc kubenswrapper[4558]: I0120 17:37:49.293265 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cb6897619dc4891e5845ed9d58dbce37e97a4c36140e8ccd66c2309dcdea8abb"} err="failed to get container status \"cb6897619dc4891e5845ed9d58dbce37e97a4c36140e8ccd66c2309dcdea8abb\": rpc error: code = NotFound desc = could not find container \"cb6897619dc4891e5845ed9d58dbce37e97a4c36140e8ccd66c2309dcdea8abb\": container with ID starting with cb6897619dc4891e5845ed9d58dbce37e97a4c36140e8ccd66c2309dcdea8abb not found: ID does not exist" Jan 20 17:37:49 crc kubenswrapper[4558]: I0120 17:37:49.293302 4558 scope.go:117] "RemoveContainer" containerID="db329d06eb52b93ff0cb7bb5890e9f6c1ce660d1bfa28f5bef02917fc16526c6" Jan 20 17:37:49 crc kubenswrapper[4558]: E0120 17:37:49.293653 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"db329d06eb52b93ff0cb7bb5890e9f6c1ce660d1bfa28f5bef02917fc16526c6\": container with ID starting with db329d06eb52b93ff0cb7bb5890e9f6c1ce660d1bfa28f5bef02917fc16526c6 not found: ID does not exist" containerID="db329d06eb52b93ff0cb7bb5890e9f6c1ce660d1bfa28f5bef02917fc16526c6" Jan 20 17:37:49 crc kubenswrapper[4558]: I0120 17:37:49.293698 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"db329d06eb52b93ff0cb7bb5890e9f6c1ce660d1bfa28f5bef02917fc16526c6"} err="failed to get container status \"db329d06eb52b93ff0cb7bb5890e9f6c1ce660d1bfa28f5bef02917fc16526c6\": rpc error: code = NotFound desc = could not find container \"db329d06eb52b93ff0cb7bb5890e9f6c1ce660d1bfa28f5bef02917fc16526c6\": container with ID starting with db329d06eb52b93ff0cb7bb5890e9f6c1ce660d1bfa28f5bef02917fc16526c6 not found: ID does not exist" Jan 20 17:37:49 crc kubenswrapper[4558]: I0120 17:37:49.293726 4558 scope.go:117] "RemoveContainer" containerID="c97affc0c0f8fba83bdeaf55c35e5f0b460177945ced19a520ae5ea01ea75dfd" Jan 20 17:37:49 crc kubenswrapper[4558]: I0120 17:37:49.309591 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/11d1aa99-639c-451a-876e-59de6098e407-config-data-custom\") pod \"11d1aa99-639c-451a-876e-59de6098e407\" (UID: \"11d1aa99-639c-451a-876e-59de6098e407\") " Jan 20 17:37:49 crc kubenswrapper[4558]: I0120 17:37:49.309652 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/11d1aa99-639c-451a-876e-59de6098e407-config-data\") pod \"11d1aa99-639c-451a-876e-59de6098e407\" (UID: \"11d1aa99-639c-451a-876e-59de6098e407\") " Jan 20 17:37:49 crc kubenswrapper[4558]: I0120 17:37:49.309678 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/11d1aa99-639c-451a-876e-59de6098e407-logs\") pod \"11d1aa99-639c-451a-876e-59de6098e407\" (UID: \"11d1aa99-639c-451a-876e-59de6098e407\") " Jan 20 17:37:49 crc kubenswrapper[4558]: I0120 17:37:49.309725 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ztbb6\" (UniqueName: \"kubernetes.io/projected/11d1aa99-639c-451a-876e-59de6098e407-kube-api-access-ztbb6\") pod \"11d1aa99-639c-451a-876e-59de6098e407\" (UID: \"11d1aa99-639c-451a-876e-59de6098e407\") " Jan 20 17:37:49 crc kubenswrapper[4558]: I0120 17:37:49.309973 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/11d1aa99-639c-451a-876e-59de6098e407-combined-ca-bundle\") pod \"11d1aa99-639c-451a-876e-59de6098e407\" (UID: \"11d1aa99-639c-451a-876e-59de6098e407\") " Jan 20 17:37:49 crc kubenswrapper[4558]: I0120 17:37:49.310136 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/11d1aa99-639c-451a-876e-59de6098e407-logs" (OuterVolumeSpecName: "logs") pod "11d1aa99-639c-451a-876e-59de6098e407" (UID: "11d1aa99-639c-451a-876e-59de6098e407"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:37:49 crc kubenswrapper[4558]: I0120 17:37:49.310232 4558 scope.go:117] "RemoveContainer" containerID="6c5d77de3e10ffa19cd49dc2e45df3fcf7c673448b447d8327c2137ebcc78c33" Jan 20 17:37:49 crc kubenswrapper[4558]: I0120 17:37:49.310509 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/640de3b3-60e1-41b8-ab00-22e375bad65c-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:49 crc kubenswrapper[4558]: I0120 17:37:49.310526 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/11d1aa99-639c-451a-876e-59de6098e407-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:49 crc kubenswrapper[4558]: I0120 17:37:49.310537 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k7grk\" (UniqueName: \"kubernetes.io/projected/640de3b3-60e1-41b8-ab00-22e375bad65c-kube-api-access-k7grk\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:49 crc kubenswrapper[4558]: I0120 17:37:49.310553 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/640de3b3-60e1-41b8-ab00-22e375bad65c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:49 crc kubenswrapper[4558]: I0120 17:37:49.310562 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/640de3b3-60e1-41b8-ab00-22e375bad65c-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:49 crc kubenswrapper[4558]: I0120 17:37:49.310570 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/640de3b3-60e1-41b8-ab00-22e375bad65c-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:49 crc kubenswrapper[4558]: I0120 17:37:49.312349 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/11d1aa99-639c-451a-876e-59de6098e407-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "11d1aa99-639c-451a-876e-59de6098e407" (UID: "11d1aa99-639c-451a-876e-59de6098e407"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:37:49 crc kubenswrapper[4558]: I0120 17:37:49.313302 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/11d1aa99-639c-451a-876e-59de6098e407-kube-api-access-ztbb6" (OuterVolumeSpecName: "kube-api-access-ztbb6") pod "11d1aa99-639c-451a-876e-59de6098e407" (UID: "11d1aa99-639c-451a-876e-59de6098e407"). InnerVolumeSpecName "kube-api-access-ztbb6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:37:49 crc kubenswrapper[4558]: I0120 17:37:49.325155 4558 scope.go:117] "RemoveContainer" containerID="c97affc0c0f8fba83bdeaf55c35e5f0b460177945ced19a520ae5ea01ea75dfd" Jan 20 17:37:49 crc kubenswrapper[4558]: E0120 17:37:49.325486 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c97affc0c0f8fba83bdeaf55c35e5f0b460177945ced19a520ae5ea01ea75dfd\": container with ID starting with c97affc0c0f8fba83bdeaf55c35e5f0b460177945ced19a520ae5ea01ea75dfd not found: ID does not exist" containerID="c97affc0c0f8fba83bdeaf55c35e5f0b460177945ced19a520ae5ea01ea75dfd" Jan 20 17:37:49 crc kubenswrapper[4558]: I0120 17:37:49.325522 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c97affc0c0f8fba83bdeaf55c35e5f0b460177945ced19a520ae5ea01ea75dfd"} err="failed to get container status \"c97affc0c0f8fba83bdeaf55c35e5f0b460177945ced19a520ae5ea01ea75dfd\": rpc error: code = NotFound desc = could not find container \"c97affc0c0f8fba83bdeaf55c35e5f0b460177945ced19a520ae5ea01ea75dfd\": container with ID starting with c97affc0c0f8fba83bdeaf55c35e5f0b460177945ced19a520ae5ea01ea75dfd not found: ID does not exist" Jan 20 17:37:49 crc kubenswrapper[4558]: I0120 17:37:49.325545 4558 scope.go:117] "RemoveContainer" containerID="6c5d77de3e10ffa19cd49dc2e45df3fcf7c673448b447d8327c2137ebcc78c33" Jan 20 17:37:49 crc kubenswrapper[4558]: E0120 17:37:49.325881 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6c5d77de3e10ffa19cd49dc2e45df3fcf7c673448b447d8327c2137ebcc78c33\": container with ID starting with 6c5d77de3e10ffa19cd49dc2e45df3fcf7c673448b447d8327c2137ebcc78c33 not found: ID does not exist" containerID="6c5d77de3e10ffa19cd49dc2e45df3fcf7c673448b447d8327c2137ebcc78c33" Jan 20 17:37:49 crc kubenswrapper[4558]: I0120 17:37:49.325912 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6c5d77de3e10ffa19cd49dc2e45df3fcf7c673448b447d8327c2137ebcc78c33"} err="failed to get container status \"6c5d77de3e10ffa19cd49dc2e45df3fcf7c673448b447d8327c2137ebcc78c33\": rpc error: code = NotFound desc = could not find container \"6c5d77de3e10ffa19cd49dc2e45df3fcf7c673448b447d8327c2137ebcc78c33\": container with ID starting with 6c5d77de3e10ffa19cd49dc2e45df3fcf7c673448b447d8327c2137ebcc78c33 not found: ID does not exist" Jan 20 17:37:49 crc kubenswrapper[4558]: I0120 17:37:49.325929 4558 scope.go:117] "RemoveContainer" containerID="b6873866e3f13dd35b6f2b58c613d3e9843e8b8a9fdc297c58001a3d665529a7" Jan 20 17:37:49 crc kubenswrapper[4558]: I0120 17:37:49.327366 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/11d1aa99-639c-451a-876e-59de6098e407-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "11d1aa99-639c-451a-876e-59de6098e407" (UID: "11d1aa99-639c-451a-876e-59de6098e407"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:37:49 crc kubenswrapper[4558]: I0120 17:37:49.335563 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/11d1aa99-639c-451a-876e-59de6098e407-config-data" (OuterVolumeSpecName: "config-data") pod "11d1aa99-639c-451a-876e-59de6098e407" (UID: "11d1aa99-639c-451a-876e-59de6098e407"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:37:49 crc kubenswrapper[4558]: I0120 17:37:49.344203 4558 scope.go:117] "RemoveContainer" containerID="44c62c978803560adab011f48335024e3733608109c5e30c32a43f89dc5997fd" Jan 20 17:37:49 crc kubenswrapper[4558]: I0120 17:37:49.412346 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/11d1aa99-639c-451a-876e-59de6098e407-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:49 crc kubenswrapper[4558]: I0120 17:37:49.412380 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/11d1aa99-639c-451a-876e-59de6098e407-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:49 crc kubenswrapper[4558]: I0120 17:37:49.412390 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/11d1aa99-639c-451a-876e-59de6098e407-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:49 crc kubenswrapper[4558]: I0120 17:37:49.412403 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ztbb6\" (UniqueName: \"kubernetes.io/projected/11d1aa99-639c-451a-876e-59de6098e407-kube-api-access-ztbb6\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:49 crc kubenswrapper[4558]: I0120 17:37:49.535080 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-keystone-listener-6d4445bf46-57dqj"] Jan 20 17:37:49 crc kubenswrapper[4558]: I0120 17:37:49.541825 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/barbican-keystone-listener-6d4445bf46-57dqj"] Jan 20 17:37:49 crc kubenswrapper[4558]: I0120 17:37:49.546522 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-worker-dd549b649-grdrm"] Jan 20 17:37:49 crc kubenswrapper[4558]: I0120 17:37:49.553196 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/barbican-worker-dd549b649-grdrm"] Jan 20 17:37:50 crc kubenswrapper[4558]: I0120 17:37:50.083380 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:37:50 crc kubenswrapper[4558]: I0120 17:37:50.212753 4558 generic.go:334] "Generic (PLEG): container finished" podID="0d2b6a10-ca95-4d72-a80b-1706822a07a7" containerID="29de3130a0b6727cccd3d029a221407989f24f198d0147b0d9d994fbbe61ae64" exitCode=0 Jan 20 17:37:50 crc kubenswrapper[4558]: I0120 17:37:50.212833 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-0" event={"ID":"0d2b6a10-ca95-4d72-a80b-1706822a07a7","Type":"ContainerDied","Data":"29de3130a0b6727cccd3d029a221407989f24f198d0147b0d9d994fbbe61ae64"} Jan 20 17:37:50 crc kubenswrapper[4558]: I0120 17:37:50.212877 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-0" event={"ID":"0d2b6a10-ca95-4d72-a80b-1706822a07a7","Type":"ContainerDied","Data":"26f4693a90769d4adb810c7a07bab664111d1d6749bae0af629048a3de8a1ac9"} Jan 20 17:37:50 crc kubenswrapper[4558]: I0120 17:37:50.212904 4558 scope.go:117] "RemoveContainer" containerID="29de3130a0b6727cccd3d029a221407989f24f198d0147b0d9d994fbbe61ae64" Jan 20 17:37:50 crc kubenswrapper[4558]: I0120 17:37:50.213027 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:37:50 crc kubenswrapper[4558]: I0120 17:37:50.224452 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c7q7h\" (UniqueName: \"kubernetes.io/projected/0d2b6a10-ca95-4d72-a80b-1706822a07a7-kube-api-access-c7q7h\") pod \"0d2b6a10-ca95-4d72-a80b-1706822a07a7\" (UID: \"0d2b6a10-ca95-4d72-a80b-1706822a07a7\") " Jan 20 17:37:50 crc kubenswrapper[4558]: I0120 17:37:50.224563 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d2b6a10-ca95-4d72-a80b-1706822a07a7-combined-ca-bundle\") pod \"0d2b6a10-ca95-4d72-a80b-1706822a07a7\" (UID: \"0d2b6a10-ca95-4d72-a80b-1706822a07a7\") " Jan 20 17:37:50 crc kubenswrapper[4558]: I0120 17:37:50.225079 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0d2b6a10-ca95-4d72-a80b-1706822a07a7-config-data\") pod \"0d2b6a10-ca95-4d72-a80b-1706822a07a7\" (UID: \"0d2b6a10-ca95-4d72-a80b-1706822a07a7\") " Jan 20 17:37:50 crc kubenswrapper[4558]: I0120 17:37:50.239524 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0d2b6a10-ca95-4d72-a80b-1706822a07a7-kube-api-access-c7q7h" (OuterVolumeSpecName: "kube-api-access-c7q7h") pod "0d2b6a10-ca95-4d72-a80b-1706822a07a7" (UID: "0d2b6a10-ca95-4d72-a80b-1706822a07a7"). InnerVolumeSpecName "kube-api-access-c7q7h". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:37:50 crc kubenswrapper[4558]: I0120 17:37:50.239702 4558 scope.go:117] "RemoveContainer" containerID="99dba2fa739dee263fd622caaa8566eb366703c39dd9dcf2a61dbad75ba8d1e5" Jan 20 17:37:50 crc kubenswrapper[4558]: I0120 17:37:50.242021 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0d2b6a10-ca95-4d72-a80b-1706822a07a7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0d2b6a10-ca95-4d72-a80b-1706822a07a7" (UID: "0d2b6a10-ca95-4d72-a80b-1706822a07a7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:37:50 crc kubenswrapper[4558]: I0120 17:37:50.246612 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0d2b6a10-ca95-4d72-a80b-1706822a07a7-config-data" (OuterVolumeSpecName: "config-data") pod "0d2b6a10-ca95-4d72-a80b-1706822a07a7" (UID: "0d2b6a10-ca95-4d72-a80b-1706822a07a7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:37:50 crc kubenswrapper[4558]: I0120 17:37:50.302809 4558 scope.go:117] "RemoveContainer" containerID="29de3130a0b6727cccd3d029a221407989f24f198d0147b0d9d994fbbe61ae64" Jan 20 17:37:50 crc kubenswrapper[4558]: E0120 17:37:50.303269 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"29de3130a0b6727cccd3d029a221407989f24f198d0147b0d9d994fbbe61ae64\": container with ID starting with 29de3130a0b6727cccd3d029a221407989f24f198d0147b0d9d994fbbe61ae64 not found: ID does not exist" containerID="29de3130a0b6727cccd3d029a221407989f24f198d0147b0d9d994fbbe61ae64" Jan 20 17:37:50 crc kubenswrapper[4558]: I0120 17:37:50.303316 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"29de3130a0b6727cccd3d029a221407989f24f198d0147b0d9d994fbbe61ae64"} err="failed to get container status \"29de3130a0b6727cccd3d029a221407989f24f198d0147b0d9d994fbbe61ae64\": rpc error: code = NotFound desc = could not find container \"29de3130a0b6727cccd3d029a221407989f24f198d0147b0d9d994fbbe61ae64\": container with ID starting with 29de3130a0b6727cccd3d029a221407989f24f198d0147b0d9d994fbbe61ae64 not found: ID does not exist" Jan 20 17:37:50 crc kubenswrapper[4558]: I0120 17:37:50.303356 4558 scope.go:117] "RemoveContainer" containerID="99dba2fa739dee263fd622caaa8566eb366703c39dd9dcf2a61dbad75ba8d1e5" Jan 20 17:37:50 crc kubenswrapper[4558]: E0120 17:37:50.303852 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"99dba2fa739dee263fd622caaa8566eb366703c39dd9dcf2a61dbad75ba8d1e5\": container with ID starting with 99dba2fa739dee263fd622caaa8566eb366703c39dd9dcf2a61dbad75ba8d1e5 not found: ID does not exist" containerID="99dba2fa739dee263fd622caaa8566eb366703c39dd9dcf2a61dbad75ba8d1e5" Jan 20 17:37:50 crc kubenswrapper[4558]: I0120 17:37:50.303899 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"99dba2fa739dee263fd622caaa8566eb366703c39dd9dcf2a61dbad75ba8d1e5"} err="failed to get container status \"99dba2fa739dee263fd622caaa8566eb366703c39dd9dcf2a61dbad75ba8d1e5\": rpc error: code = NotFound desc = could not find container \"99dba2fa739dee263fd622caaa8566eb366703c39dd9dcf2a61dbad75ba8d1e5\": container with ID starting with 99dba2fa739dee263fd622caaa8566eb366703c39dd9dcf2a61dbad75ba8d1e5 not found: ID does not exist" Jan 20 17:37:50 crc kubenswrapper[4558]: I0120 17:37:50.326611 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d2b6a10-ca95-4d72-a80b-1706822a07a7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:50 crc kubenswrapper[4558]: I0120 17:37:50.326641 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0d2b6a10-ca95-4d72-a80b-1706822a07a7-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:50 crc kubenswrapper[4558]: I0120 17:37:50.326654 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c7q7h\" (UniqueName: \"kubernetes.io/projected/0d2b6a10-ca95-4d72-a80b-1706822a07a7-kube-api-access-c7q7h\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:50 crc kubenswrapper[4558]: I0120 17:37:50.553582 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-0"] Jan 20 17:37:50 crc kubenswrapper[4558]: I0120 17:37:50.556121 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-0"] Jan 20 17:37:50 crc kubenswrapper[4558]: I0120 17:37:50.574900 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0c6a6265-cf90-4039-9200-ba478d612baa" path="/var/lib/kubelet/pods/0c6a6265-cf90-4039-9200-ba478d612baa/volumes" Jan 20 17:37:50 crc kubenswrapper[4558]: I0120 17:37:50.575497 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0d2b6a10-ca95-4d72-a80b-1706822a07a7" path="/var/lib/kubelet/pods/0d2b6a10-ca95-4d72-a80b-1706822a07a7/volumes" Jan 20 17:37:50 crc kubenswrapper[4558]: I0120 17:37:50.576034 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="11d1aa99-639c-451a-876e-59de6098e407" path="/var/lib/kubelet/pods/11d1aa99-639c-451a-876e-59de6098e407/volumes" Jan 20 17:37:50 crc kubenswrapper[4558]: I0120 17:37:50.577053 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="640de3b3-60e1-41b8-ab00-22e375bad65c" path="/var/lib/kubelet/pods/640de3b3-60e1-41b8-ab00-22e375bad65c/volumes" Jan 20 17:37:51 crc kubenswrapper[4558]: I0120 17:37:51.357371 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-9hlt9" Jan 20 17:37:51 crc kubenswrapper[4558]: I0120 17:37:51.420301 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-df78f4c65-4ns5m"] Jan 20 17:37:51 crc kubenswrapper[4558]: I0120 17:37:51.420864 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-df78f4c65-4ns5m" podUID="8763c4a4-3d9c-4e31-b33c-3bc9c67c55cf" containerName="dnsmasq-dns" containerID="cri-o://b3d7f402be9f8d06e4e0f9ddd335d93a05f7d8f3602e1aebbde602938c65db9b" gracePeriod=10 Jan 20 17:37:51 crc kubenswrapper[4558]: I0120 17:37:51.853849 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-df78f4c65-4ns5m" Jan 20 17:37:51 crc kubenswrapper[4558]: I0120 17:37:51.956524 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8763c4a4-3d9c-4e31-b33c-3bc9c67c55cf-config\") pod \"8763c4a4-3d9c-4e31-b33c-3bc9c67c55cf\" (UID: \"8763c4a4-3d9c-4e31-b33c-3bc9c67c55cf\") " Jan 20 17:37:51 crc kubenswrapper[4558]: I0120 17:37:51.956599 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8763c4a4-3d9c-4e31-b33c-3bc9c67c55cf-dns-swift-storage-0\") pod \"8763c4a4-3d9c-4e31-b33c-3bc9c67c55cf\" (UID: \"8763c4a4-3d9c-4e31-b33c-3bc9c67c55cf\") " Jan 20 17:37:51 crc kubenswrapper[4558]: I0120 17:37:51.956673 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dht5s\" (UniqueName: \"kubernetes.io/projected/8763c4a4-3d9c-4e31-b33c-3bc9c67c55cf-kube-api-access-dht5s\") pod \"8763c4a4-3d9c-4e31-b33c-3bc9c67c55cf\" (UID: \"8763c4a4-3d9c-4e31-b33c-3bc9c67c55cf\") " Jan 20 17:37:51 crc kubenswrapper[4558]: I0120 17:37:51.956832 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/8763c4a4-3d9c-4e31-b33c-3bc9c67c55cf-dnsmasq-svc\") pod \"8763c4a4-3d9c-4e31-b33c-3bc9c67c55cf\" (UID: \"8763c4a4-3d9c-4e31-b33c-3bc9c67c55cf\") " Jan 20 17:37:51 crc kubenswrapper[4558]: I0120 17:37:51.980396 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8763c4a4-3d9c-4e31-b33c-3bc9c67c55cf-kube-api-access-dht5s" (OuterVolumeSpecName: "kube-api-access-dht5s") pod "8763c4a4-3d9c-4e31-b33c-3bc9c67c55cf" (UID: "8763c4a4-3d9c-4e31-b33c-3bc9c67c55cf"). InnerVolumeSpecName "kube-api-access-dht5s". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:37:51 crc kubenswrapper[4558]: I0120 17:37:51.989281 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8763c4a4-3d9c-4e31-b33c-3bc9c67c55cf-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "8763c4a4-3d9c-4e31-b33c-3bc9c67c55cf" (UID: "8763c4a4-3d9c-4e31-b33c-3bc9c67c55cf"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:37:51 crc kubenswrapper[4558]: I0120 17:37:51.994980 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8763c4a4-3d9c-4e31-b33c-3bc9c67c55cf-config" (OuterVolumeSpecName: "config") pod "8763c4a4-3d9c-4e31-b33c-3bc9c67c55cf" (UID: "8763c4a4-3d9c-4e31-b33c-3bc9c67c55cf"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:37:51 crc kubenswrapper[4558]: I0120 17:37:51.995398 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8763c4a4-3d9c-4e31-b33c-3bc9c67c55cf-dnsmasq-svc" (OuterVolumeSpecName: "dnsmasq-svc") pod "8763c4a4-3d9c-4e31-b33c-3bc9c67c55cf" (UID: "8763c4a4-3d9c-4e31-b33c-3bc9c67c55cf"). InnerVolumeSpecName "dnsmasq-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:37:52 crc kubenswrapper[4558]: I0120 17:37:52.058604 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dht5s\" (UniqueName: \"kubernetes.io/projected/8763c4a4-3d9c-4e31-b33c-3bc9c67c55cf-kube-api-access-dht5s\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:52 crc kubenswrapper[4558]: I0120 17:37:52.058633 4558 reconciler_common.go:293] "Volume detached for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/8763c4a4-3d9c-4e31-b33c-3bc9c67c55cf-dnsmasq-svc\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:52 crc kubenswrapper[4558]: I0120 17:37:52.058646 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8763c4a4-3d9c-4e31-b33c-3bc9c67c55cf-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:52 crc kubenswrapper[4558]: I0120 17:37:52.058657 4558 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8763c4a4-3d9c-4e31-b33c-3bc9c67c55cf-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:52 crc kubenswrapper[4558]: I0120 17:37:52.186046 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/memcached-0"] Jan 20 17:37:52 crc kubenswrapper[4558]: I0120 17:37:52.186352 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/memcached-0" podUID="b18f4fcf-eaae-401f-99ec-7b130ad8a6c1" containerName="memcached" containerID="cri-o://1912908256def343892d596509f3c9466765e94f19ab6d147f037bd8d37ce167" gracePeriod=30 Jan 20 17:37:52 crc kubenswrapper[4558]: I0120 17:37:52.196999 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:37:52 crc kubenswrapper[4558]: I0120 17:37:52.197332 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-scheduler-0" podUID="86db33ff-9888-4ae6-b6f8-48593b3cd2e2" containerName="nova-scheduler-scheduler" containerID="cri-o://02edffa36e0ddd93cf579526cab4e0b85b23379d76a1aa96a22d61b92edbed39" gracePeriod=30 Jan 20 17:37:52 crc kubenswrapper[4558]: I0120 17:37:52.241712 4558 generic.go:334] "Generic (PLEG): container finished" podID="8763c4a4-3d9c-4e31-b33c-3bc9c67c55cf" containerID="b3d7f402be9f8d06e4e0f9ddd335d93a05f7d8f3602e1aebbde602938c65db9b" exitCode=0 Jan 20 17:37:52 crc kubenswrapper[4558]: I0120 17:37:52.241753 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-df78f4c65-4ns5m" event={"ID":"8763c4a4-3d9c-4e31-b33c-3bc9c67c55cf","Type":"ContainerDied","Data":"b3d7f402be9f8d06e4e0f9ddd335d93a05f7d8f3602e1aebbde602938c65db9b"} Jan 20 17:37:52 crc kubenswrapper[4558]: I0120 17:37:52.241781 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-df78f4c65-4ns5m" event={"ID":"8763c4a4-3d9c-4e31-b33c-3bc9c67c55cf","Type":"ContainerDied","Data":"e218c3452d8595d6da904a9e460b3a71848450d846aa87617211affbfe339500"} Jan 20 17:37:52 crc kubenswrapper[4558]: I0120 17:37:52.241800 4558 scope.go:117] "RemoveContainer" containerID="b3d7f402be9f8d06e4e0f9ddd335d93a05f7d8f3602e1aebbde602938c65db9b" Jan 20 17:37:52 crc kubenswrapper[4558]: I0120 17:37:52.241799 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-df78f4c65-4ns5m" Jan 20 17:37:52 crc kubenswrapper[4558]: I0120 17:37:52.258420 4558 scope.go:117] "RemoveContainer" containerID="7e2a717fb31d664a7b093b633891b75455ed429e27dcd7947ed7830a4c04e595" Jan 20 17:37:52 crc kubenswrapper[4558]: I0120 17:37:52.272397 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-df78f4c65-4ns5m"] Jan 20 17:37:52 crc kubenswrapper[4558]: I0120 17:37:52.285720 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-df78f4c65-4ns5m"] Jan 20 17:37:52 crc kubenswrapper[4558]: I0120 17:37:52.291263 4558 scope.go:117] "RemoveContainer" containerID="b3d7f402be9f8d06e4e0f9ddd335d93a05f7d8f3602e1aebbde602938c65db9b" Jan 20 17:37:52 crc kubenswrapper[4558]: E0120 17:37:52.292758 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b3d7f402be9f8d06e4e0f9ddd335d93a05f7d8f3602e1aebbde602938c65db9b\": container with ID starting with b3d7f402be9f8d06e4e0f9ddd335d93a05f7d8f3602e1aebbde602938c65db9b not found: ID does not exist" containerID="b3d7f402be9f8d06e4e0f9ddd335d93a05f7d8f3602e1aebbde602938c65db9b" Jan 20 17:37:52 crc kubenswrapper[4558]: I0120 17:37:52.292807 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b3d7f402be9f8d06e4e0f9ddd335d93a05f7d8f3602e1aebbde602938c65db9b"} err="failed to get container status \"b3d7f402be9f8d06e4e0f9ddd335d93a05f7d8f3602e1aebbde602938c65db9b\": rpc error: code = NotFound desc = could not find container \"b3d7f402be9f8d06e4e0f9ddd335d93a05f7d8f3602e1aebbde602938c65db9b\": container with ID starting with b3d7f402be9f8d06e4e0f9ddd335d93a05f7d8f3602e1aebbde602938c65db9b not found: ID does not exist" Jan 20 17:37:52 crc kubenswrapper[4558]: I0120 17:37:52.292838 4558 scope.go:117] "RemoveContainer" containerID="7e2a717fb31d664a7b093b633891b75455ed429e27dcd7947ed7830a4c04e595" Jan 20 17:37:52 crc kubenswrapper[4558]: E0120 17:37:52.293492 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7e2a717fb31d664a7b093b633891b75455ed429e27dcd7947ed7830a4c04e595\": container with ID starting with 7e2a717fb31d664a7b093b633891b75455ed429e27dcd7947ed7830a4c04e595 not found: ID does not exist" containerID="7e2a717fb31d664a7b093b633891b75455ed429e27dcd7947ed7830a4c04e595" Jan 20 17:37:52 crc kubenswrapper[4558]: I0120 17:37:52.293525 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7e2a717fb31d664a7b093b633891b75455ed429e27dcd7947ed7830a4c04e595"} err="failed to get container status \"7e2a717fb31d664a7b093b633891b75455ed429e27dcd7947ed7830a4c04e595\": rpc error: code = NotFound desc = could not find container \"7e2a717fb31d664a7b093b633891b75455ed429e27dcd7947ed7830a4c04e595\": container with ID starting with 7e2a717fb31d664a7b093b633891b75455ed429e27dcd7947ed7830a4c04e595 not found: ID does not exist" Jan 20 17:37:52 crc kubenswrapper[4558]: I0120 17:37:52.579663 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8763c4a4-3d9c-4e31-b33c-3bc9c67c55cf" path="/var/lib/kubelet/pods/8763c4a4-3d9c-4e31-b33c-3bc9c67c55cf/volumes" Jan 20 17:37:52 crc kubenswrapper[4558]: I0120 17:37:52.990002 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/neutron-6d8f9956fd-j6jg4" podUID="5b57aa66-3c71-422d-b029-86cb0e3a9aef" containerName="neutron-httpd" probeResult="failure" output="Get \"https://10.217.0.143:9696/\": dial tcp 10.217.0.143:9696: connect: connection refused" Jan 20 17:37:53 crc kubenswrapper[4558]: I0120 17:37:53.250136 4558 generic.go:334] "Generic (PLEG): container finished" podID="b18f4fcf-eaae-401f-99ec-7b130ad8a6c1" containerID="1912908256def343892d596509f3c9466765e94f19ab6d147f037bd8d37ce167" exitCode=0 Jan 20 17:37:53 crc kubenswrapper[4558]: I0120 17:37:53.250196 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/memcached-0" event={"ID":"b18f4fcf-eaae-401f-99ec-7b130ad8a6c1","Type":"ContainerDied","Data":"1912908256def343892d596509f3c9466765e94f19ab6d147f037bd8d37ce167"} Jan 20 17:37:53 crc kubenswrapper[4558]: I0120 17:37:53.406387 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/memcached-0" Jan 20 17:37:53 crc kubenswrapper[4558]: I0120 17:37:53.586629 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b18f4fcf-eaae-401f-99ec-7b130ad8a6c1-combined-ca-bundle\") pod \"b18f4fcf-eaae-401f-99ec-7b130ad8a6c1\" (UID: \"b18f4fcf-eaae-401f-99ec-7b130ad8a6c1\") " Jan 20 17:37:53 crc kubenswrapper[4558]: I0120 17:37:53.586807 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/b18f4fcf-eaae-401f-99ec-7b130ad8a6c1-kolla-config\") pod \"b18f4fcf-eaae-401f-99ec-7b130ad8a6c1\" (UID: \"b18f4fcf-eaae-401f-99ec-7b130ad8a6c1\") " Jan 20 17:37:53 crc kubenswrapper[4558]: I0120 17:37:53.586897 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9gj5l\" (UniqueName: \"kubernetes.io/projected/b18f4fcf-eaae-401f-99ec-7b130ad8a6c1-kube-api-access-9gj5l\") pod \"b18f4fcf-eaae-401f-99ec-7b130ad8a6c1\" (UID: \"b18f4fcf-eaae-401f-99ec-7b130ad8a6c1\") " Jan 20 17:37:53 crc kubenswrapper[4558]: I0120 17:37:53.586970 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b18f4fcf-eaae-401f-99ec-7b130ad8a6c1-config-data\") pod \"b18f4fcf-eaae-401f-99ec-7b130ad8a6c1\" (UID: \"b18f4fcf-eaae-401f-99ec-7b130ad8a6c1\") " Jan 20 17:37:53 crc kubenswrapper[4558]: I0120 17:37:53.587018 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/b18f4fcf-eaae-401f-99ec-7b130ad8a6c1-memcached-tls-certs\") pod \"b18f4fcf-eaae-401f-99ec-7b130ad8a6c1\" (UID: \"b18f4fcf-eaae-401f-99ec-7b130ad8a6c1\") " Jan 20 17:37:53 crc kubenswrapper[4558]: I0120 17:37:53.587589 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b18f4fcf-eaae-401f-99ec-7b130ad8a6c1-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "b18f4fcf-eaae-401f-99ec-7b130ad8a6c1" (UID: "b18f4fcf-eaae-401f-99ec-7b130ad8a6c1"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:37:53 crc kubenswrapper[4558]: I0120 17:37:53.588385 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b18f4fcf-eaae-401f-99ec-7b130ad8a6c1-config-data" (OuterVolumeSpecName: "config-data") pod "b18f4fcf-eaae-401f-99ec-7b130ad8a6c1" (UID: "b18f4fcf-eaae-401f-99ec-7b130ad8a6c1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:37:53 crc kubenswrapper[4558]: I0120 17:37:53.598446 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b18f4fcf-eaae-401f-99ec-7b130ad8a6c1-kube-api-access-9gj5l" (OuterVolumeSpecName: "kube-api-access-9gj5l") pod "b18f4fcf-eaae-401f-99ec-7b130ad8a6c1" (UID: "b18f4fcf-eaae-401f-99ec-7b130ad8a6c1"). InnerVolumeSpecName "kube-api-access-9gj5l". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:37:53 crc kubenswrapper[4558]: I0120 17:37:53.609551 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b18f4fcf-eaae-401f-99ec-7b130ad8a6c1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b18f4fcf-eaae-401f-99ec-7b130ad8a6c1" (UID: "b18f4fcf-eaae-401f-99ec-7b130ad8a6c1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:37:53 crc kubenswrapper[4558]: I0120 17:37:53.624767 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b18f4fcf-eaae-401f-99ec-7b130ad8a6c1-memcached-tls-certs" (OuterVolumeSpecName: "memcached-tls-certs") pod "b18f4fcf-eaae-401f-99ec-7b130ad8a6c1" (UID: "b18f4fcf-eaae-401f-99ec-7b130ad8a6c1"). InnerVolumeSpecName "memcached-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:37:53 crc kubenswrapper[4558]: I0120 17:37:53.688327 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b18f4fcf-eaae-401f-99ec-7b130ad8a6c1-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:53 crc kubenswrapper[4558]: I0120 17:37:53.688361 4558 reconciler_common.go:293] "Volume detached for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/b18f4fcf-eaae-401f-99ec-7b130ad8a6c1-memcached-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:53 crc kubenswrapper[4558]: I0120 17:37:53.688376 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b18f4fcf-eaae-401f-99ec-7b130ad8a6c1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:53 crc kubenswrapper[4558]: I0120 17:37:53.688387 4558 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/b18f4fcf-eaae-401f-99ec-7b130ad8a6c1-kolla-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:53 crc kubenswrapper[4558]: I0120 17:37:53.688399 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9gj5l\" (UniqueName: \"kubernetes.io/projected/b18f4fcf-eaae-401f-99ec-7b130ad8a6c1-kube-api-access-9gj5l\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:54 crc kubenswrapper[4558]: I0120 17:37:54.258602 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/memcached-0" event={"ID":"b18f4fcf-eaae-401f-99ec-7b130ad8a6c1","Type":"ContainerDied","Data":"15b24ba0e05dc30e42bec5e997f3d15329eec1a28a5f88a4d0916339e17fd0d9"} Jan 20 17:37:54 crc kubenswrapper[4558]: I0120 17:37:54.258656 4558 scope.go:117] "RemoveContainer" containerID="1912908256def343892d596509f3c9466765e94f19ab6d147f037bd8d37ce167" Jan 20 17:37:54 crc kubenswrapper[4558]: I0120 17:37:54.258740 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/memcached-0" Jan 20 17:37:54 crc kubenswrapper[4558]: I0120 17:37:54.289420 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/memcached-0"] Jan 20 17:37:54 crc kubenswrapper[4558]: I0120 17:37:54.295433 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/memcached-0"] Jan 20 17:37:54 crc kubenswrapper[4558]: E0120 17:37:54.364995 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="02edffa36e0ddd93cf579526cab4e0b85b23379d76a1aa96a22d61b92edbed39" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 20 17:37:54 crc kubenswrapper[4558]: E0120 17:37:54.366356 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="02edffa36e0ddd93cf579526cab4e0b85b23379d76a1aa96a22d61b92edbed39" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 20 17:37:54 crc kubenswrapper[4558]: E0120 17:37:54.367547 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="02edffa36e0ddd93cf579526cab4e0b85b23379d76a1aa96a22d61b92edbed39" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 20 17:37:54 crc kubenswrapper[4558]: E0120 17:37:54.367601 4558 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack-kuttl-tests/nova-scheduler-0" podUID="86db33ff-9888-4ae6-b6f8-48593b3cd2e2" containerName="nova-scheduler-scheduler" Jan 20 17:37:54 crc kubenswrapper[4558]: I0120 17:37:54.573708 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b18f4fcf-eaae-401f-99ec-7b130ad8a6c1" path="/var/lib/kubelet/pods/b18f4fcf-eaae-401f-99ec-7b130ad8a6c1/volumes" Jan 20 17:37:56 crc kubenswrapper[4558]: I0120 17:37:56.933984 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-6d8f9956fd-j6jg4" Jan 20 17:37:57 crc kubenswrapper[4558]: I0120 17:37:57.134821 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hnmc5\" (UniqueName: \"kubernetes.io/projected/5b57aa66-3c71-422d-b029-86cb0e3a9aef-kube-api-access-hnmc5\") pod \"5b57aa66-3c71-422d-b029-86cb0e3a9aef\" (UID: \"5b57aa66-3c71-422d-b029-86cb0e3a9aef\") " Jan 20 17:37:57 crc kubenswrapper[4558]: I0120 17:37:57.136027 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/5b57aa66-3c71-422d-b029-86cb0e3a9aef-httpd-config\") pod \"5b57aa66-3c71-422d-b029-86cb0e3a9aef\" (UID: \"5b57aa66-3c71-422d-b029-86cb0e3a9aef\") " Jan 20 17:37:57 crc kubenswrapper[4558]: I0120 17:37:57.136144 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5b57aa66-3c71-422d-b029-86cb0e3a9aef-public-tls-certs\") pod \"5b57aa66-3c71-422d-b029-86cb0e3a9aef\" (UID: \"5b57aa66-3c71-422d-b029-86cb0e3a9aef\") " Jan 20 17:37:57 crc kubenswrapper[4558]: I0120 17:37:57.136244 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/5b57aa66-3c71-422d-b029-86cb0e3a9aef-config\") pod \"5b57aa66-3c71-422d-b029-86cb0e3a9aef\" (UID: \"5b57aa66-3c71-422d-b029-86cb0e3a9aef\") " Jan 20 17:37:57 crc kubenswrapper[4558]: I0120 17:37:57.136380 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5b57aa66-3c71-422d-b029-86cb0e3a9aef-internal-tls-certs\") pod \"5b57aa66-3c71-422d-b029-86cb0e3a9aef\" (UID: \"5b57aa66-3c71-422d-b029-86cb0e3a9aef\") " Jan 20 17:37:57 crc kubenswrapper[4558]: I0120 17:37:57.136416 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/5b57aa66-3c71-422d-b029-86cb0e3a9aef-ovndb-tls-certs\") pod \"5b57aa66-3c71-422d-b029-86cb0e3a9aef\" (UID: \"5b57aa66-3c71-422d-b029-86cb0e3a9aef\") " Jan 20 17:37:57 crc kubenswrapper[4558]: I0120 17:37:57.136890 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5b57aa66-3c71-422d-b029-86cb0e3a9aef-combined-ca-bundle\") pod \"5b57aa66-3c71-422d-b029-86cb0e3a9aef\" (UID: \"5b57aa66-3c71-422d-b029-86cb0e3a9aef\") " Jan 20 17:37:57 crc kubenswrapper[4558]: I0120 17:37:57.142245 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b57aa66-3c71-422d-b029-86cb0e3a9aef-kube-api-access-hnmc5" (OuterVolumeSpecName: "kube-api-access-hnmc5") pod "5b57aa66-3c71-422d-b029-86cb0e3a9aef" (UID: "5b57aa66-3c71-422d-b029-86cb0e3a9aef"). InnerVolumeSpecName "kube-api-access-hnmc5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:37:57 crc kubenswrapper[4558]: I0120 17:37:57.143117 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b57aa66-3c71-422d-b029-86cb0e3a9aef-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "5b57aa66-3c71-422d-b029-86cb0e3a9aef" (UID: "5b57aa66-3c71-422d-b029-86cb0e3a9aef"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:37:57 crc kubenswrapper[4558]: I0120 17:37:57.168776 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b57aa66-3c71-422d-b029-86cb0e3a9aef-config" (OuterVolumeSpecName: "config") pod "5b57aa66-3c71-422d-b029-86cb0e3a9aef" (UID: "5b57aa66-3c71-422d-b029-86cb0e3a9aef"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:37:57 crc kubenswrapper[4558]: I0120 17:37:57.172469 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b57aa66-3c71-422d-b029-86cb0e3a9aef-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "5b57aa66-3c71-422d-b029-86cb0e3a9aef" (UID: "5b57aa66-3c71-422d-b029-86cb0e3a9aef"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:37:57 crc kubenswrapper[4558]: I0120 17:37:57.176506 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b57aa66-3c71-422d-b029-86cb0e3a9aef-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5b57aa66-3c71-422d-b029-86cb0e3a9aef" (UID: "5b57aa66-3c71-422d-b029-86cb0e3a9aef"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:37:57 crc kubenswrapper[4558]: I0120 17:37:57.178879 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b57aa66-3c71-422d-b029-86cb0e3a9aef-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "5b57aa66-3c71-422d-b029-86cb0e3a9aef" (UID: "5b57aa66-3c71-422d-b029-86cb0e3a9aef"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:37:57 crc kubenswrapper[4558]: I0120 17:37:57.186340 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b57aa66-3c71-422d-b029-86cb0e3a9aef-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "5b57aa66-3c71-422d-b029-86cb0e3a9aef" (UID: "5b57aa66-3c71-422d-b029-86cb0e3a9aef"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:37:57 crc kubenswrapper[4558]: I0120 17:37:57.239975 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5b57aa66-3c71-422d-b029-86cb0e3a9aef-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:57 crc kubenswrapper[4558]: I0120 17:37:57.240005 4558 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/5b57aa66-3c71-422d-b029-86cb0e3a9aef-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:57 crc kubenswrapper[4558]: I0120 17:37:57.240103 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5b57aa66-3c71-422d-b029-86cb0e3a9aef-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:57 crc kubenswrapper[4558]: I0120 17:37:57.240123 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hnmc5\" (UniqueName: \"kubernetes.io/projected/5b57aa66-3c71-422d-b029-86cb0e3a9aef-kube-api-access-hnmc5\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:57 crc kubenswrapper[4558]: I0120 17:37:57.240136 4558 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/5b57aa66-3c71-422d-b029-86cb0e3a9aef-httpd-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:57 crc kubenswrapper[4558]: I0120 17:37:57.240146 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5b57aa66-3c71-422d-b029-86cb0e3a9aef-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:57 crc kubenswrapper[4558]: I0120 17:37:57.240156 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/5b57aa66-3c71-422d-b029-86cb0e3a9aef-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:57 crc kubenswrapper[4558]: I0120 17:37:57.306859 4558 generic.go:334] "Generic (PLEG): container finished" podID="5b57aa66-3c71-422d-b029-86cb0e3a9aef" containerID="bbdfd5ce6843e43c9db75a7afcdc0a1ab6a9db243de4fc167edef10ed383f798" exitCode=0 Jan 20 17:37:57 crc kubenswrapper[4558]: I0120 17:37:57.306940 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-6d8f9956fd-j6jg4" event={"ID":"5b57aa66-3c71-422d-b029-86cb0e3a9aef","Type":"ContainerDied","Data":"bbdfd5ce6843e43c9db75a7afcdc0a1ab6a9db243de4fc167edef10ed383f798"} Jan 20 17:37:57 crc kubenswrapper[4558]: I0120 17:37:57.307137 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-6d8f9956fd-j6jg4" event={"ID":"5b57aa66-3c71-422d-b029-86cb0e3a9aef","Type":"ContainerDied","Data":"5c598ae289e51b4f694eaad151caaba6baebea9ddd1de5aa5781296530efbf55"} Jan 20 17:37:57 crc kubenswrapper[4558]: I0120 17:37:57.307174 4558 scope.go:117] "RemoveContainer" containerID="1760926567dc1435e74fb123df5eb6e6a9b5f8ff375b1b59306d1e2e66b1ba69" Jan 20 17:37:57 crc kubenswrapper[4558]: I0120 17:37:57.306973 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-6d8f9956fd-j6jg4" Jan 20 17:37:57 crc kubenswrapper[4558]: I0120 17:37:57.331071 4558 scope.go:117] "RemoveContainer" containerID="bbdfd5ce6843e43c9db75a7afcdc0a1ab6a9db243de4fc167edef10ed383f798" Jan 20 17:37:57 crc kubenswrapper[4558]: I0120 17:37:57.337395 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-6d8f9956fd-j6jg4"] Jan 20 17:37:57 crc kubenswrapper[4558]: I0120 17:37:57.343435 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/neutron-6d8f9956fd-j6jg4"] Jan 20 17:37:57 crc kubenswrapper[4558]: I0120 17:37:57.350199 4558 scope.go:117] "RemoveContainer" containerID="1760926567dc1435e74fb123df5eb6e6a9b5f8ff375b1b59306d1e2e66b1ba69" Jan 20 17:37:57 crc kubenswrapper[4558]: E0120 17:37:57.350587 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1760926567dc1435e74fb123df5eb6e6a9b5f8ff375b1b59306d1e2e66b1ba69\": container with ID starting with 1760926567dc1435e74fb123df5eb6e6a9b5f8ff375b1b59306d1e2e66b1ba69 not found: ID does not exist" containerID="1760926567dc1435e74fb123df5eb6e6a9b5f8ff375b1b59306d1e2e66b1ba69" Jan 20 17:37:57 crc kubenswrapper[4558]: I0120 17:37:57.350640 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1760926567dc1435e74fb123df5eb6e6a9b5f8ff375b1b59306d1e2e66b1ba69"} err="failed to get container status \"1760926567dc1435e74fb123df5eb6e6a9b5f8ff375b1b59306d1e2e66b1ba69\": rpc error: code = NotFound desc = could not find container \"1760926567dc1435e74fb123df5eb6e6a9b5f8ff375b1b59306d1e2e66b1ba69\": container with ID starting with 1760926567dc1435e74fb123df5eb6e6a9b5f8ff375b1b59306d1e2e66b1ba69 not found: ID does not exist" Jan 20 17:37:57 crc kubenswrapper[4558]: I0120 17:37:57.350674 4558 scope.go:117] "RemoveContainer" containerID="bbdfd5ce6843e43c9db75a7afcdc0a1ab6a9db243de4fc167edef10ed383f798" Jan 20 17:37:57 crc kubenswrapper[4558]: E0120 17:37:57.351082 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bbdfd5ce6843e43c9db75a7afcdc0a1ab6a9db243de4fc167edef10ed383f798\": container with ID starting with bbdfd5ce6843e43c9db75a7afcdc0a1ab6a9db243de4fc167edef10ed383f798 not found: ID does not exist" containerID="bbdfd5ce6843e43c9db75a7afcdc0a1ab6a9db243de4fc167edef10ed383f798" Jan 20 17:37:57 crc kubenswrapper[4558]: I0120 17:37:57.351127 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bbdfd5ce6843e43c9db75a7afcdc0a1ab6a9db243de4fc167edef10ed383f798"} err="failed to get container status \"bbdfd5ce6843e43c9db75a7afcdc0a1ab6a9db243de4fc167edef10ed383f798\": rpc error: code = NotFound desc = could not find container \"bbdfd5ce6843e43c9db75a7afcdc0a1ab6a9db243de4fc167edef10ed383f798\": container with ID starting with bbdfd5ce6843e43c9db75a7afcdc0a1ab6a9db243de4fc167edef10ed383f798 not found: ID does not exist" Jan 20 17:37:58 crc kubenswrapper[4558]: I0120 17:37:58.575688 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b57aa66-3c71-422d-b029-86cb0e3a9aef" path="/var/lib/kubelet/pods/5b57aa66-3c71-422d-b029-86cb0e3a9aef/volumes" Jan 20 17:37:59 crc kubenswrapper[4558]: I0120 17:37:59.138029 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:37:59 crc kubenswrapper[4558]: I0120 17:37:59.168431 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/86db33ff-9888-4ae6-b6f8-48593b3cd2e2-combined-ca-bundle\") pod \"86db33ff-9888-4ae6-b6f8-48593b3cd2e2\" (UID: \"86db33ff-9888-4ae6-b6f8-48593b3cd2e2\") " Jan 20 17:37:59 crc kubenswrapper[4558]: I0120 17:37:59.168548 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/86db33ff-9888-4ae6-b6f8-48593b3cd2e2-config-data\") pod \"86db33ff-9888-4ae6-b6f8-48593b3cd2e2\" (UID: \"86db33ff-9888-4ae6-b6f8-48593b3cd2e2\") " Jan 20 17:37:59 crc kubenswrapper[4558]: I0120 17:37:59.168610 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgzj5\" (UniqueName: \"kubernetes.io/projected/86db33ff-9888-4ae6-b6f8-48593b3cd2e2-kube-api-access-zgzj5\") pod \"86db33ff-9888-4ae6-b6f8-48593b3cd2e2\" (UID: \"86db33ff-9888-4ae6-b6f8-48593b3cd2e2\") " Jan 20 17:37:59 crc kubenswrapper[4558]: I0120 17:37:59.175249 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/86db33ff-9888-4ae6-b6f8-48593b3cd2e2-kube-api-access-zgzj5" (OuterVolumeSpecName: "kube-api-access-zgzj5") pod "86db33ff-9888-4ae6-b6f8-48593b3cd2e2" (UID: "86db33ff-9888-4ae6-b6f8-48593b3cd2e2"). InnerVolumeSpecName "kube-api-access-zgzj5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:37:59 crc kubenswrapper[4558]: I0120 17:37:59.187102 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/86db33ff-9888-4ae6-b6f8-48593b3cd2e2-config-data" (OuterVolumeSpecName: "config-data") pod "86db33ff-9888-4ae6-b6f8-48593b3cd2e2" (UID: "86db33ff-9888-4ae6-b6f8-48593b3cd2e2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:37:59 crc kubenswrapper[4558]: I0120 17:37:59.187758 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/86db33ff-9888-4ae6-b6f8-48593b3cd2e2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "86db33ff-9888-4ae6-b6f8-48593b3cd2e2" (UID: "86db33ff-9888-4ae6-b6f8-48593b3cd2e2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:37:59 crc kubenswrapper[4558]: I0120 17:37:59.270100 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/86db33ff-9888-4ae6-b6f8-48593b3cd2e2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:59 crc kubenswrapper[4558]: I0120 17:37:59.270130 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/86db33ff-9888-4ae6-b6f8-48593b3cd2e2-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:59 crc kubenswrapper[4558]: I0120 17:37:59.270140 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgzj5\" (UniqueName: \"kubernetes.io/projected/86db33ff-9888-4ae6-b6f8-48593b3cd2e2-kube-api-access-zgzj5\") on node \"crc\" DevicePath \"\"" Jan 20 17:37:59 crc kubenswrapper[4558]: I0120 17:37:59.330718 4558 generic.go:334] "Generic (PLEG): container finished" podID="86db33ff-9888-4ae6-b6f8-48593b3cd2e2" containerID="02edffa36e0ddd93cf579526cab4e0b85b23379d76a1aa96a22d61b92edbed39" exitCode=0 Jan 20 17:37:59 crc kubenswrapper[4558]: I0120 17:37:59.330800 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:37:59 crc kubenswrapper[4558]: I0120 17:37:59.330831 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"86db33ff-9888-4ae6-b6f8-48593b3cd2e2","Type":"ContainerDied","Data":"02edffa36e0ddd93cf579526cab4e0b85b23379d76a1aa96a22d61b92edbed39"} Jan 20 17:37:59 crc kubenswrapper[4558]: I0120 17:37:59.332973 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"86db33ff-9888-4ae6-b6f8-48593b3cd2e2","Type":"ContainerDied","Data":"55ea30433e3b76c2e5b015dd0e61983f0d4c582fa12d2bcb4a12963788086413"} Jan 20 17:37:59 crc kubenswrapper[4558]: I0120 17:37:59.333013 4558 scope.go:117] "RemoveContainer" containerID="02edffa36e0ddd93cf579526cab4e0b85b23379d76a1aa96a22d61b92edbed39" Jan 20 17:37:59 crc kubenswrapper[4558]: I0120 17:37:59.358348 4558 scope.go:117] "RemoveContainer" containerID="02edffa36e0ddd93cf579526cab4e0b85b23379d76a1aa96a22d61b92edbed39" Jan 20 17:37:59 crc kubenswrapper[4558]: E0120 17:37:59.358790 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"02edffa36e0ddd93cf579526cab4e0b85b23379d76a1aa96a22d61b92edbed39\": container with ID starting with 02edffa36e0ddd93cf579526cab4e0b85b23379d76a1aa96a22d61b92edbed39 not found: ID does not exist" containerID="02edffa36e0ddd93cf579526cab4e0b85b23379d76a1aa96a22d61b92edbed39" Jan 20 17:37:59 crc kubenswrapper[4558]: I0120 17:37:59.358840 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"02edffa36e0ddd93cf579526cab4e0b85b23379d76a1aa96a22d61b92edbed39"} err="failed to get container status \"02edffa36e0ddd93cf579526cab4e0b85b23379d76a1aa96a22d61b92edbed39\": rpc error: code = NotFound desc = could not find container \"02edffa36e0ddd93cf579526cab4e0b85b23379d76a1aa96a22d61b92edbed39\": container with ID starting with 02edffa36e0ddd93cf579526cab4e0b85b23379d76a1aa96a22d61b92edbed39 not found: ID does not exist" Jan 20 17:37:59 crc kubenswrapper[4558]: I0120 17:37:59.365297 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:37:59 crc kubenswrapper[4558]: I0120 17:37:59.376898 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:38:00 crc kubenswrapper[4558]: I0120 17:38:00.574823 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="86db33ff-9888-4ae6-b6f8-48593b3cd2e2" path="/var/lib/kubelet/pods/86db33ff-9888-4ae6-b6f8-48593b3cd2e2/volumes" Jan 20 17:38:09 crc kubenswrapper[4558]: I0120 17:38:09.928678 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:38:10 crc kubenswrapper[4558]: I0120 17:38:10.111840 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6tqjr\" (UniqueName: \"kubernetes.io/projected/9800a14c-ebf5-4a5d-b384-c133973b55ff-kube-api-access-6tqjr\") pod \"9800a14c-ebf5-4a5d-b384-c133973b55ff\" (UID: \"9800a14c-ebf5-4a5d-b384-c133973b55ff\") " Jan 20 17:38:10 crc kubenswrapper[4558]: I0120 17:38:10.111907 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9800a14c-ebf5-4a5d-b384-c133973b55ff-config-data-custom\") pod \"9800a14c-ebf5-4a5d-b384-c133973b55ff\" (UID: \"9800a14c-ebf5-4a5d-b384-c133973b55ff\") " Jan 20 17:38:10 crc kubenswrapper[4558]: I0120 17:38:10.111960 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9800a14c-ebf5-4a5d-b384-c133973b55ff-combined-ca-bundle\") pod \"9800a14c-ebf5-4a5d-b384-c133973b55ff\" (UID: \"9800a14c-ebf5-4a5d-b384-c133973b55ff\") " Jan 20 17:38:10 crc kubenswrapper[4558]: I0120 17:38:10.111989 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9800a14c-ebf5-4a5d-b384-c133973b55ff-etc-machine-id\") pod \"9800a14c-ebf5-4a5d-b384-c133973b55ff\" (UID: \"9800a14c-ebf5-4a5d-b384-c133973b55ff\") " Jan 20 17:38:10 crc kubenswrapper[4558]: I0120 17:38:10.112085 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9800a14c-ebf5-4a5d-b384-c133973b55ff-scripts\") pod \"9800a14c-ebf5-4a5d-b384-c133973b55ff\" (UID: \"9800a14c-ebf5-4a5d-b384-c133973b55ff\") " Jan 20 17:38:10 crc kubenswrapper[4558]: I0120 17:38:10.112102 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9800a14c-ebf5-4a5d-b384-c133973b55ff-config-data\") pod \"9800a14c-ebf5-4a5d-b384-c133973b55ff\" (UID: \"9800a14c-ebf5-4a5d-b384-c133973b55ff\") " Jan 20 17:38:10 crc kubenswrapper[4558]: I0120 17:38:10.112188 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9800a14c-ebf5-4a5d-b384-c133973b55ff-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "9800a14c-ebf5-4a5d-b384-c133973b55ff" (UID: "9800a14c-ebf5-4a5d-b384-c133973b55ff"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 17:38:10 crc kubenswrapper[4558]: I0120 17:38:10.113272 4558 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9800a14c-ebf5-4a5d-b384-c133973b55ff-etc-machine-id\") on node \"crc\" DevicePath \"\"" Jan 20 17:38:10 crc kubenswrapper[4558]: I0120 17:38:10.118052 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9800a14c-ebf5-4a5d-b384-c133973b55ff-kube-api-access-6tqjr" (OuterVolumeSpecName: "kube-api-access-6tqjr") pod "9800a14c-ebf5-4a5d-b384-c133973b55ff" (UID: "9800a14c-ebf5-4a5d-b384-c133973b55ff"). InnerVolumeSpecName "kube-api-access-6tqjr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:38:10 crc kubenswrapper[4558]: I0120 17:38:10.118680 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9800a14c-ebf5-4a5d-b384-c133973b55ff-scripts" (OuterVolumeSpecName: "scripts") pod "9800a14c-ebf5-4a5d-b384-c133973b55ff" (UID: "9800a14c-ebf5-4a5d-b384-c133973b55ff"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:38:10 crc kubenswrapper[4558]: I0120 17:38:10.119570 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9800a14c-ebf5-4a5d-b384-c133973b55ff-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "9800a14c-ebf5-4a5d-b384-c133973b55ff" (UID: "9800a14c-ebf5-4a5d-b384-c133973b55ff"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:38:10 crc kubenswrapper[4558]: I0120 17:38:10.142974 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9800a14c-ebf5-4a5d-b384-c133973b55ff-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9800a14c-ebf5-4a5d-b384-c133973b55ff" (UID: "9800a14c-ebf5-4a5d-b384-c133973b55ff"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:38:10 crc kubenswrapper[4558]: I0120 17:38:10.167013 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9800a14c-ebf5-4a5d-b384-c133973b55ff-config-data" (OuterVolumeSpecName: "config-data") pod "9800a14c-ebf5-4a5d-b384-c133973b55ff" (UID: "9800a14c-ebf5-4a5d-b384-c133973b55ff"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:38:10 crc kubenswrapper[4558]: I0120 17:38:10.214605 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9800a14c-ebf5-4a5d-b384-c133973b55ff-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:38:10 crc kubenswrapper[4558]: I0120 17:38:10.214634 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9800a14c-ebf5-4a5d-b384-c133973b55ff-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:38:10 crc kubenswrapper[4558]: I0120 17:38:10.214645 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9800a14c-ebf5-4a5d-b384-c133973b55ff-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:38:10 crc kubenswrapper[4558]: I0120 17:38:10.214654 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9800a14c-ebf5-4a5d-b384-c133973b55ff-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:38:10 crc kubenswrapper[4558]: I0120 17:38:10.214664 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6tqjr\" (UniqueName: \"kubernetes.io/projected/9800a14c-ebf5-4a5d-b384-c133973b55ff-kube-api-access-6tqjr\") on node \"crc\" DevicePath \"\"" Jan 20 17:38:10 crc kubenswrapper[4558]: I0120 17:38:10.446592 4558 generic.go:334] "Generic (PLEG): container finished" podID="9800a14c-ebf5-4a5d-b384-c133973b55ff" containerID="f203c5705e628d436a58ef25c8ec48e9cd0684833bf7150a4abe881181faec1b" exitCode=137 Jan 20 17:38:10 crc kubenswrapper[4558]: I0120 17:38:10.446634 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"9800a14c-ebf5-4a5d-b384-c133973b55ff","Type":"ContainerDied","Data":"f203c5705e628d436a58ef25c8ec48e9cd0684833bf7150a4abe881181faec1b"} Jan 20 17:38:10 crc kubenswrapper[4558]: I0120 17:38:10.446667 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"9800a14c-ebf5-4a5d-b384-c133973b55ff","Type":"ContainerDied","Data":"d400a17cccf44fd0f3413257fdcf4df411c41c45d5b7a8f8edeb7681c60a710b"} Jan 20 17:38:10 crc kubenswrapper[4558]: I0120 17:38:10.446686 4558 scope.go:117] "RemoveContainer" containerID="f88977b0890e7c6b4326fe8b4078f4b100c6c3f58772736456b0c1ccabf3b4fb" Jan 20 17:38:10 crc kubenswrapper[4558]: I0120 17:38:10.446716 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:38:10 crc kubenswrapper[4558]: I0120 17:38:10.482672 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:38:10 crc kubenswrapper[4558]: I0120 17:38:10.485722 4558 scope.go:117] "RemoveContainer" containerID="f203c5705e628d436a58ef25c8ec48e9cd0684833bf7150a4abe881181faec1b" Jan 20 17:38:10 crc kubenswrapper[4558]: I0120 17:38:10.488304 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:38:10 crc kubenswrapper[4558]: I0120 17:38:10.506517 4558 scope.go:117] "RemoveContainer" containerID="f88977b0890e7c6b4326fe8b4078f4b100c6c3f58772736456b0c1ccabf3b4fb" Jan 20 17:38:10 crc kubenswrapper[4558]: E0120 17:38:10.506849 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f88977b0890e7c6b4326fe8b4078f4b100c6c3f58772736456b0c1ccabf3b4fb\": container with ID starting with f88977b0890e7c6b4326fe8b4078f4b100c6c3f58772736456b0c1ccabf3b4fb not found: ID does not exist" containerID="f88977b0890e7c6b4326fe8b4078f4b100c6c3f58772736456b0c1ccabf3b4fb" Jan 20 17:38:10 crc kubenswrapper[4558]: I0120 17:38:10.506886 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f88977b0890e7c6b4326fe8b4078f4b100c6c3f58772736456b0c1ccabf3b4fb"} err="failed to get container status \"f88977b0890e7c6b4326fe8b4078f4b100c6c3f58772736456b0c1ccabf3b4fb\": rpc error: code = NotFound desc = could not find container \"f88977b0890e7c6b4326fe8b4078f4b100c6c3f58772736456b0c1ccabf3b4fb\": container with ID starting with f88977b0890e7c6b4326fe8b4078f4b100c6c3f58772736456b0c1ccabf3b4fb not found: ID does not exist" Jan 20 17:38:10 crc kubenswrapper[4558]: I0120 17:38:10.506906 4558 scope.go:117] "RemoveContainer" containerID="f203c5705e628d436a58ef25c8ec48e9cd0684833bf7150a4abe881181faec1b" Jan 20 17:38:10 crc kubenswrapper[4558]: E0120 17:38:10.507195 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f203c5705e628d436a58ef25c8ec48e9cd0684833bf7150a4abe881181faec1b\": container with ID starting with f203c5705e628d436a58ef25c8ec48e9cd0684833bf7150a4abe881181faec1b not found: ID does not exist" containerID="f203c5705e628d436a58ef25c8ec48e9cd0684833bf7150a4abe881181faec1b" Jan 20 17:38:10 crc kubenswrapper[4558]: I0120 17:38:10.507230 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f203c5705e628d436a58ef25c8ec48e9cd0684833bf7150a4abe881181faec1b"} err="failed to get container status \"f203c5705e628d436a58ef25c8ec48e9cd0684833bf7150a4abe881181faec1b\": rpc error: code = NotFound desc = could not find container \"f203c5705e628d436a58ef25c8ec48e9cd0684833bf7150a4abe881181faec1b\": container with ID starting with f203c5705e628d436a58ef25c8ec48e9cd0684833bf7150a4abe881181faec1b not found: ID does not exist" Jan 20 17:38:10 crc kubenswrapper[4558]: I0120 17:38:10.572238 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9800a14c-ebf5-4a5d-b384-c133973b55ff" path="/var/lib/kubelet/pods/9800a14c-ebf5-4a5d-b384-c133973b55ff/volumes" Jan 20 17:38:11 crc kubenswrapper[4558]: I0120 17:38:11.417843 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:38:11 crc kubenswrapper[4558]: I0120 17:38:11.474003 4558 generic.go:334] "Generic (PLEG): container finished" podID="85471d11-01bf-4a15-b6bc-fabbdaa5750b" containerID="ab686f3cd584a56d6b1aa614ebae97ca9c4f9dcf44febc60af60b85d5951643a" exitCode=137 Jan 20 17:38:11 crc kubenswrapper[4558]: I0120 17:38:11.474921 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"85471d11-01bf-4a15-b6bc-fabbdaa5750b","Type":"ContainerDied","Data":"ab686f3cd584a56d6b1aa614ebae97ca9c4f9dcf44febc60af60b85d5951643a"} Jan 20 17:38:11 crc kubenswrapper[4558]: I0120 17:38:11.475045 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"85471d11-01bf-4a15-b6bc-fabbdaa5750b","Type":"ContainerDied","Data":"9c9eafe66c53451254720116575d1d481ccd40e2a6e99e2b650e0cbf12c38725"} Jan 20 17:38:11 crc kubenswrapper[4558]: I0120 17:38:11.475122 4558 scope.go:117] "RemoveContainer" containerID="ab686f3cd584a56d6b1aa614ebae97ca9c4f9dcf44febc60af60b85d5951643a" Jan 20 17:38:11 crc kubenswrapper[4558]: I0120 17:38:11.475263 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:38:11 crc kubenswrapper[4558]: I0120 17:38:11.496278 4558 scope.go:117] "RemoveContainer" containerID="ed8a91ba58ec453d2111a81153c4d592bd204afe8c8353e52171871827a439ce" Jan 20 17:38:11 crc kubenswrapper[4558]: I0120 17:38:11.514197 4558 scope.go:117] "RemoveContainer" containerID="e07ad59dc71e1f67f59603c4aac989fb94b2c710bade32305a1dc8101de80423" Jan 20 17:38:11 crc kubenswrapper[4558]: I0120 17:38:11.530210 4558 scope.go:117] "RemoveContainer" containerID="9230e487ce369f41cbd9bc631dd1261a599ee2dacc4825a2345c5d5867719c36" Jan 20 17:38:11 crc kubenswrapper[4558]: I0120 17:38:11.533061 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/85471d11-01bf-4a15-b6bc-fabbdaa5750b-etc-swift\") pod \"85471d11-01bf-4a15-b6bc-fabbdaa5750b\" (UID: \"85471d11-01bf-4a15-b6bc-fabbdaa5750b\") " Jan 20 17:38:11 crc kubenswrapper[4558]: I0120 17:38:11.533134 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gzsnj\" (UniqueName: \"kubernetes.io/projected/85471d11-01bf-4a15-b6bc-fabbdaa5750b-kube-api-access-gzsnj\") pod \"85471d11-01bf-4a15-b6bc-fabbdaa5750b\" (UID: \"85471d11-01bf-4a15-b6bc-fabbdaa5750b\") " Jan 20 17:38:11 crc kubenswrapper[4558]: I0120 17:38:11.533227 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/85471d11-01bf-4a15-b6bc-fabbdaa5750b-lock\") pod \"85471d11-01bf-4a15-b6bc-fabbdaa5750b\" (UID: \"85471d11-01bf-4a15-b6bc-fabbdaa5750b\") " Jan 20 17:38:11 crc kubenswrapper[4558]: I0120 17:38:11.533287 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/85471d11-01bf-4a15-b6bc-fabbdaa5750b-cache\") pod \"85471d11-01bf-4a15-b6bc-fabbdaa5750b\" (UID: \"85471d11-01bf-4a15-b6bc-fabbdaa5750b\") " Jan 20 17:38:11 crc kubenswrapper[4558]: I0120 17:38:11.533327 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swift\" (UniqueName: \"kubernetes.io/local-volume/local-storage14-crc\") pod \"85471d11-01bf-4a15-b6bc-fabbdaa5750b\" (UID: \"85471d11-01bf-4a15-b6bc-fabbdaa5750b\") " Jan 20 17:38:11 crc kubenswrapper[4558]: I0120 17:38:11.533797 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/85471d11-01bf-4a15-b6bc-fabbdaa5750b-lock" (OuterVolumeSpecName: "lock") pod "85471d11-01bf-4a15-b6bc-fabbdaa5750b" (UID: "85471d11-01bf-4a15-b6bc-fabbdaa5750b"). InnerVolumeSpecName "lock". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:38:11 crc kubenswrapper[4558]: I0120 17:38:11.533914 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/85471d11-01bf-4a15-b6bc-fabbdaa5750b-cache" (OuterVolumeSpecName: "cache") pod "85471d11-01bf-4a15-b6bc-fabbdaa5750b" (UID: "85471d11-01bf-4a15-b6bc-fabbdaa5750b"). InnerVolumeSpecName "cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:38:11 crc kubenswrapper[4558]: I0120 17:38:11.537689 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/85471d11-01bf-4a15-b6bc-fabbdaa5750b-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "85471d11-01bf-4a15-b6bc-fabbdaa5750b" (UID: "85471d11-01bf-4a15-b6bc-fabbdaa5750b"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:38:11 crc kubenswrapper[4558]: I0120 17:38:11.538306 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/85471d11-01bf-4a15-b6bc-fabbdaa5750b-kube-api-access-gzsnj" (OuterVolumeSpecName: "kube-api-access-gzsnj") pod "85471d11-01bf-4a15-b6bc-fabbdaa5750b" (UID: "85471d11-01bf-4a15-b6bc-fabbdaa5750b"). InnerVolumeSpecName "kube-api-access-gzsnj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:38:11 crc kubenswrapper[4558]: I0120 17:38:11.538340 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage14-crc" (OuterVolumeSpecName: "swift") pod "85471d11-01bf-4a15-b6bc-fabbdaa5750b" (UID: "85471d11-01bf-4a15-b6bc-fabbdaa5750b"). InnerVolumeSpecName "local-storage14-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:38:11 crc kubenswrapper[4558]: I0120 17:38:11.547053 4558 scope.go:117] "RemoveContainer" containerID="bad7f6a8d29de5c70b1535d6075c42725412283bb958655edcb536d62f10c419" Jan 20 17:38:11 crc kubenswrapper[4558]: I0120 17:38:11.571192 4558 scope.go:117] "RemoveContainer" containerID="3a2576d6f85fa2071ef37c084cdd8c32f7c6ddb702173be6308da0b714ce8400" Jan 20 17:38:11 crc kubenswrapper[4558]: I0120 17:38:11.594768 4558 scope.go:117] "RemoveContainer" containerID="2773fa3804e1639afaf6e312e08a0b9f626d09103c779a099e0c1bbf0928a8b9" Jan 20 17:38:11 crc kubenswrapper[4558]: I0120 17:38:11.609993 4558 scope.go:117] "RemoveContainer" containerID="23ff4d2ea69aec4c032a694e47beda8f08067327b0540a599b22bd0a5e1d7fe1" Jan 20 17:38:11 crc kubenswrapper[4558]: I0120 17:38:11.625513 4558 scope.go:117] "RemoveContainer" containerID="47797d4b979364b402e28a83de59b30acd8a2031f77af23bdb31c8827f23a687" Jan 20 17:38:11 crc kubenswrapper[4558]: I0120 17:38:11.635426 4558 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/85471d11-01bf-4a15-b6bc-fabbdaa5750b-etc-swift\") on node \"crc\" DevicePath \"\"" Jan 20 17:38:11 crc kubenswrapper[4558]: I0120 17:38:11.635462 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gzsnj\" (UniqueName: \"kubernetes.io/projected/85471d11-01bf-4a15-b6bc-fabbdaa5750b-kube-api-access-gzsnj\") on node \"crc\" DevicePath \"\"" Jan 20 17:38:11 crc kubenswrapper[4558]: I0120 17:38:11.635475 4558 reconciler_common.go:293] "Volume detached for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/85471d11-01bf-4a15-b6bc-fabbdaa5750b-lock\") on node \"crc\" DevicePath \"\"" Jan 20 17:38:11 crc kubenswrapper[4558]: I0120 17:38:11.635485 4558 reconciler_common.go:293] "Volume detached for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/85471d11-01bf-4a15-b6bc-fabbdaa5750b-cache\") on node \"crc\" DevicePath \"\"" Jan 20 17:38:11 crc kubenswrapper[4558]: I0120 17:38:11.635518 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage14-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage14-crc\") on node \"crc\" " Jan 20 17:38:11 crc kubenswrapper[4558]: I0120 17:38:11.641827 4558 scope.go:117] "RemoveContainer" containerID="9f1797db51cad1bf6e1bdae441554b6ce94379733c94a7ad9722e8bdd8202d02" Jan 20 17:38:11 crc kubenswrapper[4558]: I0120 17:38:11.648031 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage14-crc" (UniqueName: "kubernetes.io/local-volume/local-storage14-crc") on node "crc" Jan 20 17:38:11 crc kubenswrapper[4558]: I0120 17:38:11.657772 4558 scope.go:117] "RemoveContainer" containerID="2b29e3812d37926ee8f85d9b22c2a577e69fa75b6f1695111d8868abfe8319dc" Jan 20 17:38:11 crc kubenswrapper[4558]: I0120 17:38:11.682378 4558 scope.go:117] "RemoveContainer" containerID="d0d49ab0892c850e328fb329607d75c06674dd1862b648f22f4a98fb49a98e23" Jan 20 17:38:11 crc kubenswrapper[4558]: I0120 17:38:11.704536 4558 scope.go:117] "RemoveContainer" containerID="2539753eba585cbf5573638f6961f2168ad0433d885e5c40c84893ee902e38b5" Jan 20 17:38:11 crc kubenswrapper[4558]: I0120 17:38:11.720076 4558 scope.go:117] "RemoveContainer" containerID="f84266a8171f0ff80bdcdf75ef52c201b51aa2133964c7208ddf98fa9d157472" Jan 20 17:38:11 crc kubenswrapper[4558]: I0120 17:38:11.735756 4558 scope.go:117] "RemoveContainer" containerID="59a8367d177182b4518509dac1c5fda5058c9891b3fbc80a0b902bff689f902b" Jan 20 17:38:11 crc kubenswrapper[4558]: I0120 17:38:11.737837 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage14-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage14-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:38:11 crc kubenswrapper[4558]: I0120 17:38:11.752763 4558 scope.go:117] "RemoveContainer" containerID="ab686f3cd584a56d6b1aa614ebae97ca9c4f9dcf44febc60af60b85d5951643a" Jan 20 17:38:11 crc kubenswrapper[4558]: E0120 17:38:11.753139 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ab686f3cd584a56d6b1aa614ebae97ca9c4f9dcf44febc60af60b85d5951643a\": container with ID starting with ab686f3cd584a56d6b1aa614ebae97ca9c4f9dcf44febc60af60b85d5951643a not found: ID does not exist" containerID="ab686f3cd584a56d6b1aa614ebae97ca9c4f9dcf44febc60af60b85d5951643a" Jan 20 17:38:11 crc kubenswrapper[4558]: I0120 17:38:11.753194 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ab686f3cd584a56d6b1aa614ebae97ca9c4f9dcf44febc60af60b85d5951643a"} err="failed to get container status \"ab686f3cd584a56d6b1aa614ebae97ca9c4f9dcf44febc60af60b85d5951643a\": rpc error: code = NotFound desc = could not find container \"ab686f3cd584a56d6b1aa614ebae97ca9c4f9dcf44febc60af60b85d5951643a\": container with ID starting with ab686f3cd584a56d6b1aa614ebae97ca9c4f9dcf44febc60af60b85d5951643a not found: ID does not exist" Jan 20 17:38:11 crc kubenswrapper[4558]: I0120 17:38:11.753217 4558 scope.go:117] "RemoveContainer" containerID="ed8a91ba58ec453d2111a81153c4d592bd204afe8c8353e52171871827a439ce" Jan 20 17:38:11 crc kubenswrapper[4558]: E0120 17:38:11.753566 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ed8a91ba58ec453d2111a81153c4d592bd204afe8c8353e52171871827a439ce\": container with ID starting with ed8a91ba58ec453d2111a81153c4d592bd204afe8c8353e52171871827a439ce not found: ID does not exist" containerID="ed8a91ba58ec453d2111a81153c4d592bd204afe8c8353e52171871827a439ce" Jan 20 17:38:11 crc kubenswrapper[4558]: I0120 17:38:11.753601 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ed8a91ba58ec453d2111a81153c4d592bd204afe8c8353e52171871827a439ce"} err="failed to get container status \"ed8a91ba58ec453d2111a81153c4d592bd204afe8c8353e52171871827a439ce\": rpc error: code = NotFound desc = could not find container \"ed8a91ba58ec453d2111a81153c4d592bd204afe8c8353e52171871827a439ce\": container with ID starting with ed8a91ba58ec453d2111a81153c4d592bd204afe8c8353e52171871827a439ce not found: ID does not exist" Jan 20 17:38:11 crc kubenswrapper[4558]: I0120 17:38:11.753627 4558 scope.go:117] "RemoveContainer" containerID="e07ad59dc71e1f67f59603c4aac989fb94b2c710bade32305a1dc8101de80423" Jan 20 17:38:11 crc kubenswrapper[4558]: E0120 17:38:11.754025 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e07ad59dc71e1f67f59603c4aac989fb94b2c710bade32305a1dc8101de80423\": container with ID starting with e07ad59dc71e1f67f59603c4aac989fb94b2c710bade32305a1dc8101de80423 not found: ID does not exist" containerID="e07ad59dc71e1f67f59603c4aac989fb94b2c710bade32305a1dc8101de80423" Jan 20 17:38:11 crc kubenswrapper[4558]: I0120 17:38:11.754049 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e07ad59dc71e1f67f59603c4aac989fb94b2c710bade32305a1dc8101de80423"} err="failed to get container status \"e07ad59dc71e1f67f59603c4aac989fb94b2c710bade32305a1dc8101de80423\": rpc error: code = NotFound desc = could not find container \"e07ad59dc71e1f67f59603c4aac989fb94b2c710bade32305a1dc8101de80423\": container with ID starting with e07ad59dc71e1f67f59603c4aac989fb94b2c710bade32305a1dc8101de80423 not found: ID does not exist" Jan 20 17:38:11 crc kubenswrapper[4558]: I0120 17:38:11.754071 4558 scope.go:117] "RemoveContainer" containerID="9230e487ce369f41cbd9bc631dd1261a599ee2dacc4825a2345c5d5867719c36" Jan 20 17:38:11 crc kubenswrapper[4558]: E0120 17:38:11.754424 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9230e487ce369f41cbd9bc631dd1261a599ee2dacc4825a2345c5d5867719c36\": container with ID starting with 9230e487ce369f41cbd9bc631dd1261a599ee2dacc4825a2345c5d5867719c36 not found: ID does not exist" containerID="9230e487ce369f41cbd9bc631dd1261a599ee2dacc4825a2345c5d5867719c36" Jan 20 17:38:11 crc kubenswrapper[4558]: I0120 17:38:11.754454 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9230e487ce369f41cbd9bc631dd1261a599ee2dacc4825a2345c5d5867719c36"} err="failed to get container status \"9230e487ce369f41cbd9bc631dd1261a599ee2dacc4825a2345c5d5867719c36\": rpc error: code = NotFound desc = could not find container \"9230e487ce369f41cbd9bc631dd1261a599ee2dacc4825a2345c5d5867719c36\": container with ID starting with 9230e487ce369f41cbd9bc631dd1261a599ee2dacc4825a2345c5d5867719c36 not found: ID does not exist" Jan 20 17:38:11 crc kubenswrapper[4558]: I0120 17:38:11.754472 4558 scope.go:117] "RemoveContainer" containerID="bad7f6a8d29de5c70b1535d6075c42725412283bb958655edcb536d62f10c419" Jan 20 17:38:11 crc kubenswrapper[4558]: E0120 17:38:11.754769 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bad7f6a8d29de5c70b1535d6075c42725412283bb958655edcb536d62f10c419\": container with ID starting with bad7f6a8d29de5c70b1535d6075c42725412283bb958655edcb536d62f10c419 not found: ID does not exist" containerID="bad7f6a8d29de5c70b1535d6075c42725412283bb958655edcb536d62f10c419" Jan 20 17:38:11 crc kubenswrapper[4558]: I0120 17:38:11.754789 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bad7f6a8d29de5c70b1535d6075c42725412283bb958655edcb536d62f10c419"} err="failed to get container status \"bad7f6a8d29de5c70b1535d6075c42725412283bb958655edcb536d62f10c419\": rpc error: code = NotFound desc = could not find container \"bad7f6a8d29de5c70b1535d6075c42725412283bb958655edcb536d62f10c419\": container with ID starting with bad7f6a8d29de5c70b1535d6075c42725412283bb958655edcb536d62f10c419 not found: ID does not exist" Jan 20 17:38:11 crc kubenswrapper[4558]: I0120 17:38:11.754806 4558 scope.go:117] "RemoveContainer" containerID="3a2576d6f85fa2071ef37c084cdd8c32f7c6ddb702173be6308da0b714ce8400" Jan 20 17:38:11 crc kubenswrapper[4558]: E0120 17:38:11.755112 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3a2576d6f85fa2071ef37c084cdd8c32f7c6ddb702173be6308da0b714ce8400\": container with ID starting with 3a2576d6f85fa2071ef37c084cdd8c32f7c6ddb702173be6308da0b714ce8400 not found: ID does not exist" containerID="3a2576d6f85fa2071ef37c084cdd8c32f7c6ddb702173be6308da0b714ce8400" Jan 20 17:38:11 crc kubenswrapper[4558]: I0120 17:38:11.755135 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3a2576d6f85fa2071ef37c084cdd8c32f7c6ddb702173be6308da0b714ce8400"} err="failed to get container status \"3a2576d6f85fa2071ef37c084cdd8c32f7c6ddb702173be6308da0b714ce8400\": rpc error: code = NotFound desc = could not find container \"3a2576d6f85fa2071ef37c084cdd8c32f7c6ddb702173be6308da0b714ce8400\": container with ID starting with 3a2576d6f85fa2071ef37c084cdd8c32f7c6ddb702173be6308da0b714ce8400 not found: ID does not exist" Jan 20 17:38:11 crc kubenswrapper[4558]: I0120 17:38:11.755147 4558 scope.go:117] "RemoveContainer" containerID="2773fa3804e1639afaf6e312e08a0b9f626d09103c779a099e0c1bbf0928a8b9" Jan 20 17:38:11 crc kubenswrapper[4558]: E0120 17:38:11.755431 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2773fa3804e1639afaf6e312e08a0b9f626d09103c779a099e0c1bbf0928a8b9\": container with ID starting with 2773fa3804e1639afaf6e312e08a0b9f626d09103c779a099e0c1bbf0928a8b9 not found: ID does not exist" containerID="2773fa3804e1639afaf6e312e08a0b9f626d09103c779a099e0c1bbf0928a8b9" Jan 20 17:38:11 crc kubenswrapper[4558]: I0120 17:38:11.755453 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2773fa3804e1639afaf6e312e08a0b9f626d09103c779a099e0c1bbf0928a8b9"} err="failed to get container status \"2773fa3804e1639afaf6e312e08a0b9f626d09103c779a099e0c1bbf0928a8b9\": rpc error: code = NotFound desc = could not find container \"2773fa3804e1639afaf6e312e08a0b9f626d09103c779a099e0c1bbf0928a8b9\": container with ID starting with 2773fa3804e1639afaf6e312e08a0b9f626d09103c779a099e0c1bbf0928a8b9 not found: ID does not exist" Jan 20 17:38:11 crc kubenswrapper[4558]: I0120 17:38:11.755466 4558 scope.go:117] "RemoveContainer" containerID="23ff4d2ea69aec4c032a694e47beda8f08067327b0540a599b22bd0a5e1d7fe1" Jan 20 17:38:11 crc kubenswrapper[4558]: E0120 17:38:11.755786 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"23ff4d2ea69aec4c032a694e47beda8f08067327b0540a599b22bd0a5e1d7fe1\": container with ID starting with 23ff4d2ea69aec4c032a694e47beda8f08067327b0540a599b22bd0a5e1d7fe1 not found: ID does not exist" containerID="23ff4d2ea69aec4c032a694e47beda8f08067327b0540a599b22bd0a5e1d7fe1" Jan 20 17:38:11 crc kubenswrapper[4558]: I0120 17:38:11.755810 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"23ff4d2ea69aec4c032a694e47beda8f08067327b0540a599b22bd0a5e1d7fe1"} err="failed to get container status \"23ff4d2ea69aec4c032a694e47beda8f08067327b0540a599b22bd0a5e1d7fe1\": rpc error: code = NotFound desc = could not find container \"23ff4d2ea69aec4c032a694e47beda8f08067327b0540a599b22bd0a5e1d7fe1\": container with ID starting with 23ff4d2ea69aec4c032a694e47beda8f08067327b0540a599b22bd0a5e1d7fe1 not found: ID does not exist" Jan 20 17:38:11 crc kubenswrapper[4558]: I0120 17:38:11.755825 4558 scope.go:117] "RemoveContainer" containerID="47797d4b979364b402e28a83de59b30acd8a2031f77af23bdb31c8827f23a687" Jan 20 17:38:11 crc kubenswrapper[4558]: E0120 17:38:11.756108 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"47797d4b979364b402e28a83de59b30acd8a2031f77af23bdb31c8827f23a687\": container with ID starting with 47797d4b979364b402e28a83de59b30acd8a2031f77af23bdb31c8827f23a687 not found: ID does not exist" containerID="47797d4b979364b402e28a83de59b30acd8a2031f77af23bdb31c8827f23a687" Jan 20 17:38:11 crc kubenswrapper[4558]: I0120 17:38:11.756132 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"47797d4b979364b402e28a83de59b30acd8a2031f77af23bdb31c8827f23a687"} err="failed to get container status \"47797d4b979364b402e28a83de59b30acd8a2031f77af23bdb31c8827f23a687\": rpc error: code = NotFound desc = could not find container \"47797d4b979364b402e28a83de59b30acd8a2031f77af23bdb31c8827f23a687\": container with ID starting with 47797d4b979364b402e28a83de59b30acd8a2031f77af23bdb31c8827f23a687 not found: ID does not exist" Jan 20 17:38:11 crc kubenswrapper[4558]: I0120 17:38:11.756146 4558 scope.go:117] "RemoveContainer" containerID="9f1797db51cad1bf6e1bdae441554b6ce94379733c94a7ad9722e8bdd8202d02" Jan 20 17:38:11 crc kubenswrapper[4558]: E0120 17:38:11.756488 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9f1797db51cad1bf6e1bdae441554b6ce94379733c94a7ad9722e8bdd8202d02\": container with ID starting with 9f1797db51cad1bf6e1bdae441554b6ce94379733c94a7ad9722e8bdd8202d02 not found: ID does not exist" containerID="9f1797db51cad1bf6e1bdae441554b6ce94379733c94a7ad9722e8bdd8202d02" Jan 20 17:38:11 crc kubenswrapper[4558]: I0120 17:38:11.756526 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9f1797db51cad1bf6e1bdae441554b6ce94379733c94a7ad9722e8bdd8202d02"} err="failed to get container status \"9f1797db51cad1bf6e1bdae441554b6ce94379733c94a7ad9722e8bdd8202d02\": rpc error: code = NotFound desc = could not find container \"9f1797db51cad1bf6e1bdae441554b6ce94379733c94a7ad9722e8bdd8202d02\": container with ID starting with 9f1797db51cad1bf6e1bdae441554b6ce94379733c94a7ad9722e8bdd8202d02 not found: ID does not exist" Jan 20 17:38:11 crc kubenswrapper[4558]: I0120 17:38:11.756557 4558 scope.go:117] "RemoveContainer" containerID="2b29e3812d37926ee8f85d9b22c2a577e69fa75b6f1695111d8868abfe8319dc" Jan 20 17:38:11 crc kubenswrapper[4558]: E0120 17:38:11.756862 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2b29e3812d37926ee8f85d9b22c2a577e69fa75b6f1695111d8868abfe8319dc\": container with ID starting with 2b29e3812d37926ee8f85d9b22c2a577e69fa75b6f1695111d8868abfe8319dc not found: ID does not exist" containerID="2b29e3812d37926ee8f85d9b22c2a577e69fa75b6f1695111d8868abfe8319dc" Jan 20 17:38:11 crc kubenswrapper[4558]: I0120 17:38:11.756900 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2b29e3812d37926ee8f85d9b22c2a577e69fa75b6f1695111d8868abfe8319dc"} err="failed to get container status \"2b29e3812d37926ee8f85d9b22c2a577e69fa75b6f1695111d8868abfe8319dc\": rpc error: code = NotFound desc = could not find container \"2b29e3812d37926ee8f85d9b22c2a577e69fa75b6f1695111d8868abfe8319dc\": container with ID starting with 2b29e3812d37926ee8f85d9b22c2a577e69fa75b6f1695111d8868abfe8319dc not found: ID does not exist" Jan 20 17:38:11 crc kubenswrapper[4558]: I0120 17:38:11.756913 4558 scope.go:117] "RemoveContainer" containerID="d0d49ab0892c850e328fb329607d75c06674dd1862b648f22f4a98fb49a98e23" Jan 20 17:38:11 crc kubenswrapper[4558]: E0120 17:38:11.757193 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d0d49ab0892c850e328fb329607d75c06674dd1862b648f22f4a98fb49a98e23\": container with ID starting with d0d49ab0892c850e328fb329607d75c06674dd1862b648f22f4a98fb49a98e23 not found: ID does not exist" containerID="d0d49ab0892c850e328fb329607d75c06674dd1862b648f22f4a98fb49a98e23" Jan 20 17:38:11 crc kubenswrapper[4558]: I0120 17:38:11.757219 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d0d49ab0892c850e328fb329607d75c06674dd1862b648f22f4a98fb49a98e23"} err="failed to get container status \"d0d49ab0892c850e328fb329607d75c06674dd1862b648f22f4a98fb49a98e23\": rpc error: code = NotFound desc = could not find container \"d0d49ab0892c850e328fb329607d75c06674dd1862b648f22f4a98fb49a98e23\": container with ID starting with d0d49ab0892c850e328fb329607d75c06674dd1862b648f22f4a98fb49a98e23 not found: ID does not exist" Jan 20 17:38:11 crc kubenswrapper[4558]: I0120 17:38:11.757233 4558 scope.go:117] "RemoveContainer" containerID="2539753eba585cbf5573638f6961f2168ad0433d885e5c40c84893ee902e38b5" Jan 20 17:38:11 crc kubenswrapper[4558]: E0120 17:38:11.757484 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2539753eba585cbf5573638f6961f2168ad0433d885e5c40c84893ee902e38b5\": container with ID starting with 2539753eba585cbf5573638f6961f2168ad0433d885e5c40c84893ee902e38b5 not found: ID does not exist" containerID="2539753eba585cbf5573638f6961f2168ad0433d885e5c40c84893ee902e38b5" Jan 20 17:38:11 crc kubenswrapper[4558]: I0120 17:38:11.757506 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2539753eba585cbf5573638f6961f2168ad0433d885e5c40c84893ee902e38b5"} err="failed to get container status \"2539753eba585cbf5573638f6961f2168ad0433d885e5c40c84893ee902e38b5\": rpc error: code = NotFound desc = could not find container \"2539753eba585cbf5573638f6961f2168ad0433d885e5c40c84893ee902e38b5\": container with ID starting with 2539753eba585cbf5573638f6961f2168ad0433d885e5c40c84893ee902e38b5 not found: ID does not exist" Jan 20 17:38:11 crc kubenswrapper[4558]: I0120 17:38:11.757520 4558 scope.go:117] "RemoveContainer" containerID="f84266a8171f0ff80bdcdf75ef52c201b51aa2133964c7208ddf98fa9d157472" Jan 20 17:38:11 crc kubenswrapper[4558]: E0120 17:38:11.757758 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f84266a8171f0ff80bdcdf75ef52c201b51aa2133964c7208ddf98fa9d157472\": container with ID starting with f84266a8171f0ff80bdcdf75ef52c201b51aa2133964c7208ddf98fa9d157472 not found: ID does not exist" containerID="f84266a8171f0ff80bdcdf75ef52c201b51aa2133964c7208ddf98fa9d157472" Jan 20 17:38:11 crc kubenswrapper[4558]: I0120 17:38:11.757788 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f84266a8171f0ff80bdcdf75ef52c201b51aa2133964c7208ddf98fa9d157472"} err="failed to get container status \"f84266a8171f0ff80bdcdf75ef52c201b51aa2133964c7208ddf98fa9d157472\": rpc error: code = NotFound desc = could not find container \"f84266a8171f0ff80bdcdf75ef52c201b51aa2133964c7208ddf98fa9d157472\": container with ID starting with f84266a8171f0ff80bdcdf75ef52c201b51aa2133964c7208ddf98fa9d157472 not found: ID does not exist" Jan 20 17:38:11 crc kubenswrapper[4558]: I0120 17:38:11.757799 4558 scope.go:117] "RemoveContainer" containerID="59a8367d177182b4518509dac1c5fda5058c9891b3fbc80a0b902bff689f902b" Jan 20 17:38:11 crc kubenswrapper[4558]: E0120 17:38:11.758084 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"59a8367d177182b4518509dac1c5fda5058c9891b3fbc80a0b902bff689f902b\": container with ID starting with 59a8367d177182b4518509dac1c5fda5058c9891b3fbc80a0b902bff689f902b not found: ID does not exist" containerID="59a8367d177182b4518509dac1c5fda5058c9891b3fbc80a0b902bff689f902b" Jan 20 17:38:11 crc kubenswrapper[4558]: I0120 17:38:11.758105 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"59a8367d177182b4518509dac1c5fda5058c9891b3fbc80a0b902bff689f902b"} err="failed to get container status \"59a8367d177182b4518509dac1c5fda5058c9891b3fbc80a0b902bff689f902b\": rpc error: code = NotFound desc = could not find container \"59a8367d177182b4518509dac1c5fda5058c9891b3fbc80a0b902bff689f902b\": container with ID starting with 59a8367d177182b4518509dac1c5fda5058c9891b3fbc80a0b902bff689f902b not found: ID does not exist" Jan 20 17:38:11 crc kubenswrapper[4558]: I0120 17:38:11.817587 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/swift-storage-0"] Jan 20 17:38:11 crc kubenswrapper[4558]: I0120 17:38:11.825857 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/swift-storage-0"] Jan 20 17:38:12 crc kubenswrapper[4558]: I0120 17:38:12.575268 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="85471d11-01bf-4a15-b6bc-fabbdaa5750b" path="/var/lib/kubelet/pods/85471d11-01bf-4a15-b6bc-fabbdaa5750b/volumes" Jan 20 17:38:17 crc kubenswrapper[4558]: I0120 17:38:17.169477 4558 scope.go:117] "RemoveContainer" containerID="29d4e80472a2976b5555e6b99ba389153a2a5584bf4c4181c2a9d783faf098b2" Jan 20 17:38:17 crc kubenswrapper[4558]: I0120 17:38:17.191361 4558 scope.go:117] "RemoveContainer" containerID="8bf3869dd26a4d9967ff8e46c95c2f1f103bd6f215f6faf970d4d2330579f3f1" Jan 20 17:38:17 crc kubenswrapper[4558]: I0120 17:38:17.220230 4558 scope.go:117] "RemoveContainer" containerID="db8e8828d641c74a4518b290f587aa376715ea2305d752194da19a7d7e76d365" Jan 20 17:38:17 crc kubenswrapper[4558]: I0120 17:38:17.268329 4558 scope.go:117] "RemoveContainer" containerID="e5f8385fa34d72587ed1458c9e852fe135326907c6264adfa2ce9be72b329691" Jan 20 17:38:17 crc kubenswrapper[4558]: I0120 17:38:17.299893 4558 scope.go:117] "RemoveContainer" containerID="62fe0620a925c3f6194650b38fc772a458aef217a8b081fff99c16e4e4ed75bf" Jan 20 17:38:17 crc kubenswrapper[4558]: I0120 17:38:17.321799 4558 scope.go:117] "RemoveContainer" containerID="75ebc41ab16927c48f8afc5adcf73afda1095d0f103558bf36ce7dae6248c5c7" Jan 20 17:38:17 crc kubenswrapper[4558]: I0120 17:38:17.344023 4558 scope.go:117] "RemoveContainer" containerID="3a3e340167ccce72bbe411439f7909473c7e135797ac4b0db65fe1584f2e6946" Jan 20 17:38:17 crc kubenswrapper[4558]: I0120 17:38:17.361280 4558 scope.go:117] "RemoveContainer" containerID="76b128f3cb64faa63f2cd117e4ba208a86aca8bf3e631936793f60b7e1161a97" Jan 20 17:38:17 crc kubenswrapper[4558]: I0120 17:38:17.408592 4558 scope.go:117] "RemoveContainer" containerID="c4b9cfb0a2514a56eb0849ff86a8a6cd56fb0b2cf35efee93f6ec0f03d4c6cfa" Jan 20 17:38:17 crc kubenswrapper[4558]: I0120 17:38:17.438432 4558 scope.go:117] "RemoveContainer" containerID="ed22a8f15040fdb70a4928a92fa242110b06a6c1df1e015b23c8cb0337987b61" Jan 20 17:38:17 crc kubenswrapper[4558]: I0120 17:38:17.469987 4558 scope.go:117] "RemoveContainer" containerID="22b0bb6d8dfc540e08d68aafb7f363e65e6bf28ecef8a61519f3bd8c6294ee98" Jan 20 17:38:17 crc kubenswrapper[4558]: I0120 17:38:17.490848 4558 scope.go:117] "RemoveContainer" containerID="9f9e4dd6e557284ebc769ce80f9deed9621afea1ebb690cdf6335f93f3210c7d" Jan 20 17:38:17 crc kubenswrapper[4558]: I0120 17:38:17.516650 4558 scope.go:117] "RemoveContainer" containerID="6c4463c91500e6f60b0e412648172b7bc947cb8a2b79b8f1016e559b589b2165" Jan 20 17:38:17 crc kubenswrapper[4558]: I0120 17:38:17.566573 4558 scope.go:117] "RemoveContainer" containerID="a7601c725675bc7987095de4d5932e6397cc2d621e319d01c58442eee53836e0" Jan 20 17:38:17 crc kubenswrapper[4558]: I0120 17:38:17.594024 4558 scope.go:117] "RemoveContainer" containerID="e6dbfcfe37cc1fc1b51a5b0ceb80a37987d1128a5d6564f93754a47973a5a2d5" Jan 20 17:38:17 crc kubenswrapper[4558]: I0120 17:38:17.615930 4558 scope.go:117] "RemoveContainer" containerID="182441449b8343f15df8dd6b8d344ab02e993a5dadb813be2e93069d80443bbe" Jan 20 17:38:17 crc kubenswrapper[4558]: I0120 17:38:17.639481 4558 scope.go:117] "RemoveContainer" containerID="aafa82e0ba7ad650ecae2bee5a0e71bb01982354e592b14191e645d9fc1d07bc" Jan 20 17:38:17 crc kubenswrapper[4558]: I0120 17:38:17.677949 4558 scope.go:117] "RemoveContainer" containerID="86faabac2df50963bf2c0ee255197a15360640acc9dad8b29910c78ef8f18691" Jan 20 17:38:17 crc kubenswrapper[4558]: I0120 17:38:17.695520 4558 scope.go:117] "RemoveContainer" containerID="a43d1649fd01e64f0e6f6c358fe3b57e49f79cb49bee9b16b8082898e6121cb8" Jan 20 17:38:17 crc kubenswrapper[4558]: I0120 17:38:17.739800 4558 scope.go:117] "RemoveContainer" containerID="c1062c77fd9698ee03c8343d7ee7f40ade58e38052555615881b886f6a23d884" Jan 20 17:38:17 crc kubenswrapper[4558]: I0120 17:38:17.759219 4558 scope.go:117] "RemoveContainer" containerID="984dea855e8e7fe42eeed165d3a29ee0a673310ad202b53132d6f313fe890e3d" Jan 20 17:38:17 crc kubenswrapper[4558]: I0120 17:38:17.777060 4558 scope.go:117] "RemoveContainer" containerID="65629aa894ee7068d5ea871d1a1a3d97a8a4dd768f01f9781ead6f9a6b82bfb2" Jan 20 17:38:17 crc kubenswrapper[4558]: I0120 17:38:17.794599 4558 scope.go:117] "RemoveContainer" containerID="10570933c613aadb10b453629851c02d74b4bcbbc5d9a50e439b2b15473a74fc" Jan 20 17:38:17 crc kubenswrapper[4558]: I0120 17:38:17.814157 4558 scope.go:117] "RemoveContainer" containerID="aba10c7094d84cccbde149d948a3e60389448792ee9a5d96fd2796f7883a64b4" Jan 20 17:38:17 crc kubenswrapper[4558]: I0120 17:38:17.835149 4558 scope.go:117] "RemoveContainer" containerID="bc4907b838488fbc10fd9186808e170474f88d3e2b4a74884a7a6935d84328d9" Jan 20 17:38:17 crc kubenswrapper[4558]: I0120 17:38:17.850455 4558 scope.go:117] "RemoveContainer" containerID="8e5c536c7a93e02e95f308947a5b32600cda6ec3942625a79d8f934630ae1c60" Jan 20 17:38:17 crc kubenswrapper[4558]: I0120 17:38:17.884529 4558 scope.go:117] "RemoveContainer" containerID="a8e3fc9bf9c4e7346570c5f357e43cdd9269a735e12bf5a5218a0b8161bcbfa1" Jan 20 17:38:17 crc kubenswrapper[4558]: I0120 17:38:17.902553 4558 scope.go:117] "RemoveContainer" containerID="5bd5fedc19895ba71e1b6d3077a41a79fa078729e40ca74ac886956eafb22003" Jan 20 17:38:17 crc kubenswrapper[4558]: I0120 17:38:17.928219 4558 scope.go:117] "RemoveContainer" containerID="f04b8f0579435de2c14d1baa0b1a6029323eaa4b69328f14b70cb3236804875c" Jan 20 17:38:17 crc kubenswrapper[4558]: I0120 17:38:17.944088 4558 scope.go:117] "RemoveContainer" containerID="dd7569b8ff01533e3501e65ae5d5c255b180a086f14b419b53f83fd01756e7f2" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.428933 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["crc-storage/crc-storage-crc-mhzjj"] Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.451332 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["crc-storage/crc-storage-crc-mhzjj"] Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.535424 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["crc-storage/crc-storage-crc-fgf24"] Jan 20 17:38:20 crc kubenswrapper[4558]: E0120 17:38:20.535793 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="85471d11-01bf-4a15-b6bc-fabbdaa5750b" containerName="account-server" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.535815 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="85471d11-01bf-4a15-b6bc-fabbdaa5750b" containerName="account-server" Jan 20 17:38:20 crc kubenswrapper[4558]: E0120 17:38:20.535831 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5b57aa66-3c71-422d-b029-86cb0e3a9aef" containerName="neutron-api" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.535838 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="5b57aa66-3c71-422d-b029-86cb0e3a9aef" containerName="neutron-api" Jan 20 17:38:20 crc kubenswrapper[4558]: E0120 17:38:20.535848 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0def1343-9e4b-4f74-84bd-3212688b59ce" containerName="nova-cell1-conductor-conductor" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.535854 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="0def1343-9e4b-4f74-84bd-3212688b59ce" containerName="nova-cell1-conductor-conductor" Jan 20 17:38:20 crc kubenswrapper[4558]: E0120 17:38:20.535862 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1fff3747-796d-420a-aff0-846dfc615df9" containerName="mysql-bootstrap" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.535867 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="1fff3747-796d-420a-aff0-846dfc615df9" containerName="mysql-bootstrap" Jan 20 17:38:20 crc kubenswrapper[4558]: E0120 17:38:20.535882 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="79a4b689-d81b-44da-8baa-88fc6ce78172" containerName="mysql-bootstrap" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.535888 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="79a4b689-d81b-44da-8baa-88fc6ce78172" containerName="mysql-bootstrap" Jan 20 17:38:20 crc kubenswrapper[4558]: E0120 17:38:20.535896 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="85471d11-01bf-4a15-b6bc-fabbdaa5750b" containerName="object-replicator" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.535903 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="85471d11-01bf-4a15-b6bc-fabbdaa5750b" containerName="object-replicator" Jan 20 17:38:20 crc kubenswrapper[4558]: E0120 17:38:20.535913 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="85471d11-01bf-4a15-b6bc-fabbdaa5750b" containerName="container-auditor" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.535920 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="85471d11-01bf-4a15-b6bc-fabbdaa5750b" containerName="container-auditor" Jan 20 17:38:20 crc kubenswrapper[4558]: E0120 17:38:20.535929 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="86db33ff-9888-4ae6-b6f8-48593b3cd2e2" containerName="nova-scheduler-scheduler" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.535935 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="86db33ff-9888-4ae6-b6f8-48593b3cd2e2" containerName="nova-scheduler-scheduler" Jan 20 17:38:20 crc kubenswrapper[4558]: E0120 17:38:20.535947 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="52895fa4-c08b-43b8-9f63-cbc8621db11e" containerName="mariadb-account-create-update" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.535953 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="52895fa4-c08b-43b8-9f63-cbc8621db11e" containerName="mariadb-account-create-update" Jan 20 17:38:20 crc kubenswrapper[4558]: E0120 17:38:20.535961 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0696a635-5dc9-46e9-8502-47fab9ff761c" containerName="setup-container" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.535966 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="0696a635-5dc9-46e9-8502-47fab9ff761c" containerName="setup-container" Jan 20 17:38:20 crc kubenswrapper[4558]: E0120 17:38:20.535974 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="79a4b689-d81b-44da-8baa-88fc6ce78172" containerName="galera" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.535980 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="79a4b689-d81b-44da-8baa-88fc6ce78172" containerName="galera" Jan 20 17:38:20 crc kubenswrapper[4558]: E0120 17:38:20.535988 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="85471d11-01bf-4a15-b6bc-fabbdaa5750b" containerName="container-server" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.535993 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="85471d11-01bf-4a15-b6bc-fabbdaa5750b" containerName="container-server" Jan 20 17:38:20 crc kubenswrapper[4558]: E0120 17:38:20.536001 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="85471d11-01bf-4a15-b6bc-fabbdaa5750b" containerName="container-replicator" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.536006 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="85471d11-01bf-4a15-b6bc-fabbdaa5750b" containerName="container-replicator" Jan 20 17:38:20 crc kubenswrapper[4558]: E0120 17:38:20.536016 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c7ffc3d3-5db6-484c-b9c1-d065e7177eb7" containerName="proxy-httpd" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.536021 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="c7ffc3d3-5db6-484c-b9c1-d065e7177eb7" containerName="proxy-httpd" Jan 20 17:38:20 crc kubenswrapper[4558]: E0120 17:38:20.536026 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1addf16e-1af6-431b-aff1-05c48a952f5a" containerName="ceilometer-central-agent" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.536032 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="1addf16e-1af6-431b-aff1-05c48a952f5a" containerName="ceilometer-central-agent" Jan 20 17:38:20 crc kubenswrapper[4558]: E0120 17:38:20.536042 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="73789760-f9bd-44e0-bf9b-c1864e5f8803" containerName="keystone-api" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.536047 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="73789760-f9bd-44e0-bf9b-c1864e5f8803" containerName="keystone-api" Jan 20 17:38:20 crc kubenswrapper[4558]: E0120 17:38:20.536056 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="85471d11-01bf-4a15-b6bc-fabbdaa5750b" containerName="container-updater" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.536062 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="85471d11-01bf-4a15-b6bc-fabbdaa5750b" containerName="container-updater" Jan 20 17:38:20 crc kubenswrapper[4558]: E0120 17:38:20.536070 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="85471d11-01bf-4a15-b6bc-fabbdaa5750b" containerName="object-server" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.536075 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="85471d11-01bf-4a15-b6bc-fabbdaa5750b" containerName="object-server" Jan 20 17:38:20 crc kubenswrapper[4558]: E0120 17:38:20.536083 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0c6a6265-cf90-4039-9200-ba478d612baa" containerName="rabbitmq" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.536088 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="0c6a6265-cf90-4039-9200-ba478d612baa" containerName="rabbitmq" Jan 20 17:38:20 crc kubenswrapper[4558]: E0120 17:38:20.536098 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="640de3b3-60e1-41b8-ab00-22e375bad65c" containerName="barbican-worker" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.536103 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="640de3b3-60e1-41b8-ab00-22e375bad65c" containerName="barbican-worker" Jan 20 17:38:20 crc kubenswrapper[4558]: E0120 17:38:20.536111 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0def1343-9e4b-4f74-84bd-3212688b59ce" containerName="nova-cell1-conductor-conductor" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.536117 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="0def1343-9e4b-4f74-84bd-3212688b59ce" containerName="nova-cell1-conductor-conductor" Jan 20 17:38:20 crc kubenswrapper[4558]: E0120 17:38:20.536125 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9800a14c-ebf5-4a5d-b384-c133973b55ff" containerName="probe" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.536132 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="9800a14c-ebf5-4a5d-b384-c133973b55ff" containerName="probe" Jan 20 17:38:20 crc kubenswrapper[4558]: E0120 17:38:20.536140 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d2b6a10-ca95-4d72-a80b-1706822a07a7" containerName="nova-cell0-conductor-conductor" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.536146 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d2b6a10-ca95-4d72-a80b-1706822a07a7" containerName="nova-cell0-conductor-conductor" Jan 20 17:38:20 crc kubenswrapper[4558]: E0120 17:38:20.536153 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8763c4a4-3d9c-4e31-b33c-3bc9c67c55cf" containerName="init" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.536174 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="8763c4a4-3d9c-4e31-b33c-3bc9c67c55cf" containerName="init" Jan 20 17:38:20 crc kubenswrapper[4558]: E0120 17:38:20.536186 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0c6a6265-cf90-4039-9200-ba478d612baa" containerName="setup-container" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.536191 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="0c6a6265-cf90-4039-9200-ba478d612baa" containerName="setup-container" Jan 20 17:38:20 crc kubenswrapper[4558]: E0120 17:38:20.536199 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6b77441a-39f5-4ed5-bf1b-c29900900242" containerName="placement-log" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.536204 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="6b77441a-39f5-4ed5-bf1b-c29900900242" containerName="placement-log" Jan 20 17:38:20 crc kubenswrapper[4558]: E0120 17:38:20.536211 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7ee90982-23a3-4111-9c4e-159828a51a69" containerName="nova-metadata-log" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.536216 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="7ee90982-23a3-4111-9c4e-159828a51a69" containerName="nova-metadata-log" Jan 20 17:38:20 crc kubenswrapper[4558]: E0120 17:38:20.536225 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="94062d42-28cb-4c8a-afa4-f51458dedc6c" containerName="barbican-api-log" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.536231 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="94062d42-28cb-4c8a-afa4-f51458dedc6c" containerName="barbican-api-log" Jan 20 17:38:20 crc kubenswrapper[4558]: E0120 17:38:20.536238 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="85471d11-01bf-4a15-b6bc-fabbdaa5750b" containerName="account-replicator" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.536243 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="85471d11-01bf-4a15-b6bc-fabbdaa5750b" containerName="account-replicator" Jan 20 17:38:20 crc kubenswrapper[4558]: E0120 17:38:20.536252 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="11d1aa99-639c-451a-876e-59de6098e407" containerName="barbican-keystone-listener-log" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.536257 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="11d1aa99-639c-451a-876e-59de6098e407" containerName="barbican-keystone-listener-log" Jan 20 17:38:20 crc kubenswrapper[4558]: E0120 17:38:20.536267 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7ee90982-23a3-4111-9c4e-159828a51a69" containerName="nova-metadata-metadata" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.536273 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="7ee90982-23a3-4111-9c4e-159828a51a69" containerName="nova-metadata-metadata" Jan 20 17:38:20 crc kubenswrapper[4558]: E0120 17:38:20.536284 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="85471d11-01bf-4a15-b6bc-fabbdaa5750b" containerName="account-auditor" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.536290 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="85471d11-01bf-4a15-b6bc-fabbdaa5750b" containerName="account-auditor" Jan 20 17:38:20 crc kubenswrapper[4558]: E0120 17:38:20.536300 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c570a7ea-d296-44bc-a48f-6dd8be9754d0" containerName="ovn-northd" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.536307 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="c570a7ea-d296-44bc-a48f-6dd8be9754d0" containerName="ovn-northd" Jan 20 17:38:20 crc kubenswrapper[4558]: E0120 17:38:20.536318 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="94062d42-28cb-4c8a-afa4-f51458dedc6c" containerName="barbican-api" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.536324 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="94062d42-28cb-4c8a-afa4-f51458dedc6c" containerName="barbican-api" Jan 20 17:38:20 crc kubenswrapper[4558]: E0120 17:38:20.536333 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d2b6a10-ca95-4d72-a80b-1706822a07a7" containerName="nova-cell0-conductor-conductor" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.536338 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d2b6a10-ca95-4d72-a80b-1706822a07a7" containerName="nova-cell0-conductor-conductor" Jan 20 17:38:20 crc kubenswrapper[4558]: E0120 17:38:20.536345 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1addf16e-1af6-431b-aff1-05c48a952f5a" containerName="proxy-httpd" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.536350 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="1addf16e-1af6-431b-aff1-05c48a952f5a" containerName="proxy-httpd" Jan 20 17:38:20 crc kubenswrapper[4558]: E0120 17:38:20.536360 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f19d5a0b-6d74-4b46-86a3-9381feb3f158" containerName="glance-log" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.536365 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="f19d5a0b-6d74-4b46-86a3-9381feb3f158" containerName="glance-log" Jan 20 17:38:20 crc kubenswrapper[4558]: E0120 17:38:20.536374 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="906beedc-3fa2-4d6e-a6e8-485ca2fb1082" containerName="nova-api-log" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.536379 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="906beedc-3fa2-4d6e-a6e8-485ca2fb1082" containerName="nova-api-log" Jan 20 17:38:20 crc kubenswrapper[4558]: E0120 17:38:20.536389 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="85471d11-01bf-4a15-b6bc-fabbdaa5750b" containerName="object-auditor" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.536395 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="85471d11-01bf-4a15-b6bc-fabbdaa5750b" containerName="object-auditor" Jan 20 17:38:20 crc kubenswrapper[4558]: E0120 17:38:20.536405 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b18f4fcf-eaae-401f-99ec-7b130ad8a6c1" containerName="memcached" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.536410 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="b18f4fcf-eaae-401f-99ec-7b130ad8a6c1" containerName="memcached" Jan 20 17:38:20 crc kubenswrapper[4558]: E0120 17:38:20.536418 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0def1343-9e4b-4f74-84bd-3212688b59ce" containerName="nova-cell1-conductor-conductor" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.536423 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="0def1343-9e4b-4f74-84bd-3212688b59ce" containerName="nova-cell1-conductor-conductor" Jan 20 17:38:20 crc kubenswrapper[4558]: E0120 17:38:20.536432 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f19d5a0b-6d74-4b46-86a3-9381feb3f158" containerName="glance-httpd" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.536438 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="f19d5a0b-6d74-4b46-86a3-9381feb3f158" containerName="glance-httpd" Jan 20 17:38:20 crc kubenswrapper[4558]: E0120 17:38:20.536444 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="640de3b3-60e1-41b8-ab00-22e375bad65c" containerName="barbican-worker-log" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.536451 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="640de3b3-60e1-41b8-ab00-22e375bad65c" containerName="barbican-worker-log" Jan 20 17:38:20 crc kubenswrapper[4558]: E0120 17:38:20.536460 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0696a635-5dc9-46e9-8502-47fab9ff761c" containerName="rabbitmq" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.536466 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="0696a635-5dc9-46e9-8502-47fab9ff761c" containerName="rabbitmq" Jan 20 17:38:20 crc kubenswrapper[4558]: E0120 17:38:20.536474 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="11d1aa99-639c-451a-876e-59de6098e407" containerName="barbican-keystone-listener" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.536479 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="11d1aa99-639c-451a-876e-59de6098e407" containerName="barbican-keystone-listener" Jan 20 17:38:20 crc kubenswrapper[4558]: E0120 17:38:20.536487 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="85471d11-01bf-4a15-b6bc-fabbdaa5750b" containerName="rsync" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.536492 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="85471d11-01bf-4a15-b6bc-fabbdaa5750b" containerName="rsync" Jan 20 17:38:20 crc kubenswrapper[4558]: E0120 17:38:20.536498 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="85471d11-01bf-4a15-b6bc-fabbdaa5750b" containerName="account-reaper" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.536503 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="85471d11-01bf-4a15-b6bc-fabbdaa5750b" containerName="account-reaper" Jan 20 17:38:20 crc kubenswrapper[4558]: E0120 17:38:20.536514 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8763c4a4-3d9c-4e31-b33c-3bc9c67c55cf" containerName="dnsmasq-dns" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.536519 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="8763c4a4-3d9c-4e31-b33c-3bc9c67c55cf" containerName="dnsmasq-dns" Jan 20 17:38:20 crc kubenswrapper[4558]: E0120 17:38:20.536525 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c7ffc3d3-5db6-484c-b9c1-d065e7177eb7" containerName="proxy-server" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.536531 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="c7ffc3d3-5db6-484c-b9c1-d065e7177eb7" containerName="proxy-server" Jan 20 17:38:20 crc kubenswrapper[4558]: E0120 17:38:20.536537 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="85471d11-01bf-4a15-b6bc-fabbdaa5750b" containerName="swift-recon-cron" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.536542 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="85471d11-01bf-4a15-b6bc-fabbdaa5750b" containerName="swift-recon-cron" Jan 20 17:38:20 crc kubenswrapper[4558]: E0120 17:38:20.536547 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f7881580-f0a5-47a0-9622-3927bcce668a" containerName="glance-log" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.536552 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="f7881580-f0a5-47a0-9622-3927bcce668a" containerName="glance-log" Jan 20 17:38:20 crc kubenswrapper[4558]: E0120 17:38:20.536559 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1addf16e-1af6-431b-aff1-05c48a952f5a" containerName="sg-core" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.536563 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="1addf16e-1af6-431b-aff1-05c48a952f5a" containerName="sg-core" Jan 20 17:38:20 crc kubenswrapper[4558]: E0120 17:38:20.536571 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="85471d11-01bf-4a15-b6bc-fabbdaa5750b" containerName="object-expirer" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.536576 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="85471d11-01bf-4a15-b6bc-fabbdaa5750b" containerName="object-expirer" Jan 20 17:38:20 crc kubenswrapper[4558]: E0120 17:38:20.536585 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f7881580-f0a5-47a0-9622-3927bcce668a" containerName="glance-httpd" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.536590 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="f7881580-f0a5-47a0-9622-3927bcce668a" containerName="glance-httpd" Jan 20 17:38:20 crc kubenswrapper[4558]: E0120 17:38:20.536597 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c570a7ea-d296-44bc-a48f-6dd8be9754d0" containerName="openstack-network-exporter" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.536602 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="c570a7ea-d296-44bc-a48f-6dd8be9754d0" containerName="openstack-network-exporter" Jan 20 17:38:20 crc kubenswrapper[4558]: E0120 17:38:20.536609 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9800a14c-ebf5-4a5d-b384-c133973b55ff" containerName="cinder-scheduler" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.536614 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="9800a14c-ebf5-4a5d-b384-c133973b55ff" containerName="cinder-scheduler" Jan 20 17:38:20 crc kubenswrapper[4558]: E0120 17:38:20.536621 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1fff3747-796d-420a-aff0-846dfc615df9" containerName="galera" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.536626 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="1fff3747-796d-420a-aff0-846dfc615df9" containerName="galera" Jan 20 17:38:20 crc kubenswrapper[4558]: E0120 17:38:20.536633 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5b57aa66-3c71-422d-b029-86cb0e3a9aef" containerName="neutron-httpd" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.536637 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="5b57aa66-3c71-422d-b029-86cb0e3a9aef" containerName="neutron-httpd" Jan 20 17:38:20 crc kubenswrapper[4558]: E0120 17:38:20.536644 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e1a1ac97-a530-4ba0-8c9b-835a2b576c8d" containerName="cinder-api-log" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.536649 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="e1a1ac97-a530-4ba0-8c9b-835a2b576c8d" containerName="cinder-api-log" Jan 20 17:38:20 crc kubenswrapper[4558]: E0120 17:38:20.536655 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="906beedc-3fa2-4d6e-a6e8-485ca2fb1082" containerName="nova-api-api" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.536661 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="906beedc-3fa2-4d6e-a6e8-485ca2fb1082" containerName="nova-api-api" Jan 20 17:38:20 crc kubenswrapper[4558]: E0120 17:38:20.536669 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e1a1ac97-a530-4ba0-8c9b-835a2b576c8d" containerName="cinder-api" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.536674 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="e1a1ac97-a530-4ba0-8c9b-835a2b576c8d" containerName="cinder-api" Jan 20 17:38:20 crc kubenswrapper[4558]: E0120 17:38:20.536679 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1addf16e-1af6-431b-aff1-05c48a952f5a" containerName="ceilometer-notification-agent" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.536684 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="1addf16e-1af6-431b-aff1-05c48a952f5a" containerName="ceilometer-notification-agent" Jan 20 17:38:20 crc kubenswrapper[4558]: E0120 17:38:20.536694 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="85471d11-01bf-4a15-b6bc-fabbdaa5750b" containerName="object-updater" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.536699 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="85471d11-01bf-4a15-b6bc-fabbdaa5750b" containerName="object-updater" Jan 20 17:38:20 crc kubenswrapper[4558]: E0120 17:38:20.536709 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6b77441a-39f5-4ed5-bf1b-c29900900242" containerName="placement-api" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.536714 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="6b77441a-39f5-4ed5-bf1b-c29900900242" containerName="placement-api" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.536891 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="85471d11-01bf-4a15-b6bc-fabbdaa5750b" containerName="object-server" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.536905 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="85471d11-01bf-4a15-b6bc-fabbdaa5750b" containerName="account-server" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.536911 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="11d1aa99-639c-451a-876e-59de6098e407" containerName="barbican-keystone-listener" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.536918 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="906beedc-3fa2-4d6e-a6e8-485ca2fb1082" containerName="nova-api-api" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.536926 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="85471d11-01bf-4a15-b6bc-fabbdaa5750b" containerName="rsync" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.536933 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="0d2b6a10-ca95-4d72-a80b-1706822a07a7" containerName="nova-cell0-conductor-conductor" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.536939 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="85471d11-01bf-4a15-b6bc-fabbdaa5750b" containerName="swift-recon-cron" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.536947 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="85471d11-01bf-4a15-b6bc-fabbdaa5750b" containerName="container-replicator" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.536957 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="85471d11-01bf-4a15-b6bc-fabbdaa5750b" containerName="account-auditor" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.536966 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="1addf16e-1af6-431b-aff1-05c48a952f5a" containerName="proxy-httpd" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.536975 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="1addf16e-1af6-431b-aff1-05c48a952f5a" containerName="ceilometer-central-agent" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.536983 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="7ee90982-23a3-4111-9c4e-159828a51a69" containerName="nova-metadata-metadata" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.536993 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="e1a1ac97-a530-4ba0-8c9b-835a2b576c8d" containerName="cinder-api" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.537001 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="11d1aa99-639c-451a-876e-59de6098e407" containerName="barbican-keystone-listener-log" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.537008 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="0def1343-9e4b-4f74-84bd-3212688b59ce" containerName="nova-cell1-conductor-conductor" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.537014 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="0def1343-9e4b-4f74-84bd-3212688b59ce" containerName="nova-cell1-conductor-conductor" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.537021 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="c570a7ea-d296-44bc-a48f-6dd8be9754d0" containerName="openstack-network-exporter" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.537031 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="85471d11-01bf-4a15-b6bc-fabbdaa5750b" containerName="object-replicator" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.537041 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="c7ffc3d3-5db6-484c-b9c1-d065e7177eb7" containerName="proxy-server" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.537047 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="0696a635-5dc9-46e9-8502-47fab9ff761c" containerName="rabbitmq" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.537058 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="94062d42-28cb-4c8a-afa4-f51458dedc6c" containerName="barbican-api" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.537064 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="c7ffc3d3-5db6-484c-b9c1-d065e7177eb7" containerName="proxy-httpd" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.537070 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="0d2b6a10-ca95-4d72-a80b-1706822a07a7" containerName="nova-cell0-conductor-conductor" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.537076 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="b18f4fcf-eaae-401f-99ec-7b130ad8a6c1" containerName="memcached" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.537084 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="e1a1ac97-a530-4ba0-8c9b-835a2b576c8d" containerName="cinder-api-log" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.537093 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="5b57aa66-3c71-422d-b029-86cb0e3a9aef" containerName="neutron-httpd" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.537098 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="906beedc-3fa2-4d6e-a6e8-485ca2fb1082" containerName="nova-api-log" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.537107 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="7ee90982-23a3-4111-9c4e-159828a51a69" containerName="nova-metadata-log" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.537112 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="1addf16e-1af6-431b-aff1-05c48a952f5a" containerName="ceilometer-notification-agent" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.537120 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="6b77441a-39f5-4ed5-bf1b-c29900900242" containerName="placement-log" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.537127 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="85471d11-01bf-4a15-b6bc-fabbdaa5750b" containerName="container-auditor" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.537136 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="1fff3747-796d-420a-aff0-846dfc615df9" containerName="galera" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.537144 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="85471d11-01bf-4a15-b6bc-fabbdaa5750b" containerName="account-reaper" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.537154 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="5b57aa66-3c71-422d-b029-86cb0e3a9aef" containerName="neutron-api" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.537179 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="0def1343-9e4b-4f74-84bd-3212688b59ce" containerName="nova-cell1-conductor-conductor" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.537189 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="9800a14c-ebf5-4a5d-b384-c133973b55ff" containerName="probe" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.537196 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="94062d42-28cb-4c8a-afa4-f51458dedc6c" containerName="barbican-api-log" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.537202 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="85471d11-01bf-4a15-b6bc-fabbdaa5750b" containerName="container-updater" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.537212 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="8763c4a4-3d9c-4e31-b33c-3bc9c67c55cf" containerName="dnsmasq-dns" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.537218 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="86db33ff-9888-4ae6-b6f8-48593b3cd2e2" containerName="nova-scheduler-scheduler" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.537225 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="85471d11-01bf-4a15-b6bc-fabbdaa5750b" containerName="object-updater" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.537233 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="52895fa4-c08b-43b8-9f63-cbc8621db11e" containerName="mariadb-account-create-update" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.537242 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="6b77441a-39f5-4ed5-bf1b-c29900900242" containerName="placement-api" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.537248 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="85471d11-01bf-4a15-b6bc-fabbdaa5750b" containerName="account-replicator" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.537256 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="85471d11-01bf-4a15-b6bc-fabbdaa5750b" containerName="object-expirer" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.537262 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="9800a14c-ebf5-4a5d-b384-c133973b55ff" containerName="cinder-scheduler" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.537270 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="640de3b3-60e1-41b8-ab00-22e375bad65c" containerName="barbican-worker-log" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.537277 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="52895fa4-c08b-43b8-9f63-cbc8621db11e" containerName="mariadb-account-create-update" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.537288 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="640de3b3-60e1-41b8-ab00-22e375bad65c" containerName="barbican-worker" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.537296 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="f7881580-f0a5-47a0-9622-3927bcce668a" containerName="glance-httpd" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.537305 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="0c6a6265-cf90-4039-9200-ba478d612baa" containerName="rabbitmq" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.537311 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="f7881580-f0a5-47a0-9622-3927bcce668a" containerName="glance-log" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.537319 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="85471d11-01bf-4a15-b6bc-fabbdaa5750b" containerName="object-auditor" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.537326 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="79a4b689-d81b-44da-8baa-88fc6ce78172" containerName="galera" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.537334 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="f19d5a0b-6d74-4b46-86a3-9381feb3f158" containerName="glance-httpd" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.537341 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="85471d11-01bf-4a15-b6bc-fabbdaa5750b" containerName="container-server" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.537348 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="73789760-f9bd-44e0-bf9b-c1864e5f8803" containerName="keystone-api" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.537354 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="f19d5a0b-6d74-4b46-86a3-9381feb3f158" containerName="glance-log" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.537362 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="1addf16e-1af6-431b-aff1-05c48a952f5a" containerName="sg-core" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.537369 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="c570a7ea-d296-44bc-a48f-6dd8be9754d0" containerName="ovn-northd" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.538000 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-fgf24" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.539324 4558 reflector.go:368] Caches populated for *v1.Secret from object-"crc-storage"/"crc-storage-dockercfg-k9ht8" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.540767 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"openshift-service-ca.crt" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.541040 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"kube-root-ca.crt" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.541148 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"crc-storage" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.544320 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-fgf24"] Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.585320 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="996db13b-869c-4880-a4ad-d9d22e5a493b" path="/var/lib/kubelet/pods/996db13b-869c-4880-a4ad-d9d22e5a493b/volumes" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.661586 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/8baa3936-46d1-4b92-8ca3-e433b48ce217-node-mnt\") pod \"crc-storage-crc-fgf24\" (UID: \"8baa3936-46d1-4b92-8ca3-e433b48ce217\") " pod="crc-storage/crc-storage-crc-fgf24" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.661765 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z66zv\" (UniqueName: \"kubernetes.io/projected/8baa3936-46d1-4b92-8ca3-e433b48ce217-kube-api-access-z66zv\") pod \"crc-storage-crc-fgf24\" (UID: \"8baa3936-46d1-4b92-8ca3-e433b48ce217\") " pod="crc-storage/crc-storage-crc-fgf24" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.662127 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/8baa3936-46d1-4b92-8ca3-e433b48ce217-crc-storage\") pod \"crc-storage-crc-fgf24\" (UID: \"8baa3936-46d1-4b92-8ca3-e433b48ce217\") " pod="crc-storage/crc-storage-crc-fgf24" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.763674 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/8baa3936-46d1-4b92-8ca3-e433b48ce217-crc-storage\") pod \"crc-storage-crc-fgf24\" (UID: \"8baa3936-46d1-4b92-8ca3-e433b48ce217\") " pod="crc-storage/crc-storage-crc-fgf24" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.763795 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/8baa3936-46d1-4b92-8ca3-e433b48ce217-node-mnt\") pod \"crc-storage-crc-fgf24\" (UID: \"8baa3936-46d1-4b92-8ca3-e433b48ce217\") " pod="crc-storage/crc-storage-crc-fgf24" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.763842 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z66zv\" (UniqueName: \"kubernetes.io/projected/8baa3936-46d1-4b92-8ca3-e433b48ce217-kube-api-access-z66zv\") pod \"crc-storage-crc-fgf24\" (UID: \"8baa3936-46d1-4b92-8ca3-e433b48ce217\") " pod="crc-storage/crc-storage-crc-fgf24" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.764436 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/8baa3936-46d1-4b92-8ca3-e433b48ce217-crc-storage\") pod \"crc-storage-crc-fgf24\" (UID: \"8baa3936-46d1-4b92-8ca3-e433b48ce217\") " pod="crc-storage/crc-storage-crc-fgf24" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.764514 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/8baa3936-46d1-4b92-8ca3-e433b48ce217-node-mnt\") pod \"crc-storage-crc-fgf24\" (UID: \"8baa3936-46d1-4b92-8ca3-e433b48ce217\") " pod="crc-storage/crc-storage-crc-fgf24" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.781280 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z66zv\" (UniqueName: \"kubernetes.io/projected/8baa3936-46d1-4b92-8ca3-e433b48ce217-kube-api-access-z66zv\") pod \"crc-storage-crc-fgf24\" (UID: \"8baa3936-46d1-4b92-8ca3-e433b48ce217\") " pod="crc-storage/crc-storage-crc-fgf24" Jan 20 17:38:20 crc kubenswrapper[4558]: I0120 17:38:20.860949 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-fgf24" Jan 20 17:38:21 crc kubenswrapper[4558]: I0120 17:38:21.249287 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-fgf24"] Jan 20 17:38:21 crc kubenswrapper[4558]: I0120 17:38:21.628427 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-fgf24" event={"ID":"8baa3936-46d1-4b92-8ca3-e433b48ce217","Type":"ContainerStarted","Data":"d7e16ac3de01885a4def78764230fc5de64d6509c56b6cf0eb7a3d1c48e506f0"} Jan 20 17:38:22 crc kubenswrapper[4558]: I0120 17:38:22.640679 4558 generic.go:334] "Generic (PLEG): container finished" podID="8baa3936-46d1-4b92-8ca3-e433b48ce217" containerID="2229f0a8777c34d3e061821af388151b63ed5e3782ef9ab2ed353af97398eb48" exitCode=0 Jan 20 17:38:22 crc kubenswrapper[4558]: I0120 17:38:22.640743 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-fgf24" event={"ID":"8baa3936-46d1-4b92-8ca3-e433b48ce217","Type":"ContainerDied","Data":"2229f0a8777c34d3e061821af388151b63ed5e3782ef9ab2ed353af97398eb48"} Jan 20 17:38:23 crc kubenswrapper[4558]: I0120 17:38:23.896199 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-fgf24" Jan 20 17:38:24 crc kubenswrapper[4558]: I0120 17:38:24.015484 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z66zv\" (UniqueName: \"kubernetes.io/projected/8baa3936-46d1-4b92-8ca3-e433b48ce217-kube-api-access-z66zv\") pod \"8baa3936-46d1-4b92-8ca3-e433b48ce217\" (UID: \"8baa3936-46d1-4b92-8ca3-e433b48ce217\") " Jan 20 17:38:24 crc kubenswrapper[4558]: I0120 17:38:24.015637 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/8baa3936-46d1-4b92-8ca3-e433b48ce217-crc-storage\") pod \"8baa3936-46d1-4b92-8ca3-e433b48ce217\" (UID: \"8baa3936-46d1-4b92-8ca3-e433b48ce217\") " Jan 20 17:38:24 crc kubenswrapper[4558]: I0120 17:38:24.015669 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/8baa3936-46d1-4b92-8ca3-e433b48ce217-node-mnt\") pod \"8baa3936-46d1-4b92-8ca3-e433b48ce217\" (UID: \"8baa3936-46d1-4b92-8ca3-e433b48ce217\") " Jan 20 17:38:24 crc kubenswrapper[4558]: I0120 17:38:24.016005 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8baa3936-46d1-4b92-8ca3-e433b48ce217-node-mnt" (OuterVolumeSpecName: "node-mnt") pod "8baa3936-46d1-4b92-8ca3-e433b48ce217" (UID: "8baa3936-46d1-4b92-8ca3-e433b48ce217"). InnerVolumeSpecName "node-mnt". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 17:38:24 crc kubenswrapper[4558]: I0120 17:38:24.016373 4558 reconciler_common.go:293] "Volume detached for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/8baa3936-46d1-4b92-8ca3-e433b48ce217-node-mnt\") on node \"crc\" DevicePath \"\"" Jan 20 17:38:24 crc kubenswrapper[4558]: I0120 17:38:24.020800 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8baa3936-46d1-4b92-8ca3-e433b48ce217-kube-api-access-z66zv" (OuterVolumeSpecName: "kube-api-access-z66zv") pod "8baa3936-46d1-4b92-8ca3-e433b48ce217" (UID: "8baa3936-46d1-4b92-8ca3-e433b48ce217"). InnerVolumeSpecName "kube-api-access-z66zv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:38:24 crc kubenswrapper[4558]: I0120 17:38:24.031948 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8baa3936-46d1-4b92-8ca3-e433b48ce217-crc-storage" (OuterVolumeSpecName: "crc-storage") pod "8baa3936-46d1-4b92-8ca3-e433b48ce217" (UID: "8baa3936-46d1-4b92-8ca3-e433b48ce217"). InnerVolumeSpecName "crc-storage". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:38:24 crc kubenswrapper[4558]: I0120 17:38:24.117069 4558 reconciler_common.go:293] "Volume detached for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/8baa3936-46d1-4b92-8ca3-e433b48ce217-crc-storage\") on node \"crc\" DevicePath \"\"" Jan 20 17:38:24 crc kubenswrapper[4558]: I0120 17:38:24.117110 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z66zv\" (UniqueName: \"kubernetes.io/projected/8baa3936-46d1-4b92-8ca3-e433b48ce217-kube-api-access-z66zv\") on node \"crc\" DevicePath \"\"" Jan 20 17:38:24 crc kubenswrapper[4558]: I0120 17:38:24.662250 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-fgf24" event={"ID":"8baa3936-46d1-4b92-8ca3-e433b48ce217","Type":"ContainerDied","Data":"d7e16ac3de01885a4def78764230fc5de64d6509c56b6cf0eb7a3d1c48e506f0"} Jan 20 17:38:24 crc kubenswrapper[4558]: I0120 17:38:24.662601 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d7e16ac3de01885a4def78764230fc5de64d6509c56b6cf0eb7a3d1c48e506f0" Jan 20 17:38:24 crc kubenswrapper[4558]: I0120 17:38:24.662837 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-fgf24" Jan 20 17:38:26 crc kubenswrapper[4558]: I0120 17:38:26.604130 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["crc-storage/crc-storage-crc-fgf24"] Jan 20 17:38:26 crc kubenswrapper[4558]: I0120 17:38:26.610004 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["crc-storage/crc-storage-crc-fgf24"] Jan 20 17:38:26 crc kubenswrapper[4558]: I0120 17:38:26.714451 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["crc-storage/crc-storage-crc-5p9zr"] Jan 20 17:38:26 crc kubenswrapper[4558]: E0120 17:38:26.714798 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8baa3936-46d1-4b92-8ca3-e433b48ce217" containerName="storage" Jan 20 17:38:26 crc kubenswrapper[4558]: I0120 17:38:26.714819 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="8baa3936-46d1-4b92-8ca3-e433b48ce217" containerName="storage" Jan 20 17:38:26 crc kubenswrapper[4558]: E0120 17:38:26.714836 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="52895fa4-c08b-43b8-9f63-cbc8621db11e" containerName="mariadb-account-create-update" Jan 20 17:38:26 crc kubenswrapper[4558]: I0120 17:38:26.714843 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="52895fa4-c08b-43b8-9f63-cbc8621db11e" containerName="mariadb-account-create-update" Jan 20 17:38:26 crc kubenswrapper[4558]: E0120 17:38:26.714865 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d2b6a10-ca95-4d72-a80b-1706822a07a7" containerName="nova-cell0-conductor-conductor" Jan 20 17:38:26 crc kubenswrapper[4558]: I0120 17:38:26.714880 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d2b6a10-ca95-4d72-a80b-1706822a07a7" containerName="nova-cell0-conductor-conductor" Jan 20 17:38:26 crc kubenswrapper[4558]: I0120 17:38:26.715039 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="8baa3936-46d1-4b92-8ca3-e433b48ce217" containerName="storage" Jan 20 17:38:26 crc kubenswrapper[4558]: I0120 17:38:26.715057 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="0d2b6a10-ca95-4d72-a80b-1706822a07a7" containerName="nova-cell0-conductor-conductor" Jan 20 17:38:26 crc kubenswrapper[4558]: I0120 17:38:26.715571 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-5p9zr" Jan 20 17:38:26 crc kubenswrapper[4558]: I0120 17:38:26.717423 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"openshift-service-ca.crt" Jan 20 17:38:26 crc kubenswrapper[4558]: I0120 17:38:26.718678 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"crc-storage" Jan 20 17:38:26 crc kubenswrapper[4558]: I0120 17:38:26.718836 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"kube-root-ca.crt" Jan 20 17:38:26 crc kubenswrapper[4558]: I0120 17:38:26.720050 4558 reflector.go:368] Caches populated for *v1.Secret from object-"crc-storage"/"crc-storage-dockercfg-k9ht8" Jan 20 17:38:26 crc kubenswrapper[4558]: I0120 17:38:26.728111 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-5p9zr"] Jan 20 17:38:26 crc kubenswrapper[4558]: I0120 17:38:26.755933 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/28001944-650c-4394-a080-dce76faf35bb-node-mnt\") pod \"crc-storage-crc-5p9zr\" (UID: \"28001944-650c-4394-a080-dce76faf35bb\") " pod="crc-storage/crc-storage-crc-5p9zr" Jan 20 17:38:26 crc kubenswrapper[4558]: I0120 17:38:26.755990 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/28001944-650c-4394-a080-dce76faf35bb-crc-storage\") pod \"crc-storage-crc-5p9zr\" (UID: \"28001944-650c-4394-a080-dce76faf35bb\") " pod="crc-storage/crc-storage-crc-5p9zr" Jan 20 17:38:26 crc kubenswrapper[4558]: I0120 17:38:26.756106 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pjz6l\" (UniqueName: \"kubernetes.io/projected/28001944-650c-4394-a080-dce76faf35bb-kube-api-access-pjz6l\") pod \"crc-storage-crc-5p9zr\" (UID: \"28001944-650c-4394-a080-dce76faf35bb\") " pod="crc-storage/crc-storage-crc-5p9zr" Jan 20 17:38:26 crc kubenswrapper[4558]: I0120 17:38:26.857139 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pjz6l\" (UniqueName: \"kubernetes.io/projected/28001944-650c-4394-a080-dce76faf35bb-kube-api-access-pjz6l\") pod \"crc-storage-crc-5p9zr\" (UID: \"28001944-650c-4394-a080-dce76faf35bb\") " pod="crc-storage/crc-storage-crc-5p9zr" Jan 20 17:38:26 crc kubenswrapper[4558]: I0120 17:38:26.857235 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/28001944-650c-4394-a080-dce76faf35bb-node-mnt\") pod \"crc-storage-crc-5p9zr\" (UID: \"28001944-650c-4394-a080-dce76faf35bb\") " pod="crc-storage/crc-storage-crc-5p9zr" Jan 20 17:38:26 crc kubenswrapper[4558]: I0120 17:38:26.857277 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/28001944-650c-4394-a080-dce76faf35bb-crc-storage\") pod \"crc-storage-crc-5p9zr\" (UID: \"28001944-650c-4394-a080-dce76faf35bb\") " pod="crc-storage/crc-storage-crc-5p9zr" Jan 20 17:38:26 crc kubenswrapper[4558]: I0120 17:38:26.857474 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/28001944-650c-4394-a080-dce76faf35bb-node-mnt\") pod \"crc-storage-crc-5p9zr\" (UID: \"28001944-650c-4394-a080-dce76faf35bb\") " pod="crc-storage/crc-storage-crc-5p9zr" Jan 20 17:38:26 crc kubenswrapper[4558]: I0120 17:38:26.857861 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/28001944-650c-4394-a080-dce76faf35bb-crc-storage\") pod \"crc-storage-crc-5p9zr\" (UID: \"28001944-650c-4394-a080-dce76faf35bb\") " pod="crc-storage/crc-storage-crc-5p9zr" Jan 20 17:38:26 crc kubenswrapper[4558]: I0120 17:38:26.880742 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pjz6l\" (UniqueName: \"kubernetes.io/projected/28001944-650c-4394-a080-dce76faf35bb-kube-api-access-pjz6l\") pod \"crc-storage-crc-5p9zr\" (UID: \"28001944-650c-4394-a080-dce76faf35bb\") " pod="crc-storage/crc-storage-crc-5p9zr" Jan 20 17:38:27 crc kubenswrapper[4558]: I0120 17:38:27.030991 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-5p9zr" Jan 20 17:38:27 crc kubenswrapper[4558]: I0120 17:38:27.432412 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-5p9zr"] Jan 20 17:38:27 crc kubenswrapper[4558]: I0120 17:38:27.686808 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-5p9zr" event={"ID":"28001944-650c-4394-a080-dce76faf35bb","Type":"ContainerStarted","Data":"f06007fce1956c8030b615ec314efe78d07fd9aff3761ec2bbd0d9bc45e54140"} Jan 20 17:38:28 crc kubenswrapper[4558]: I0120 17:38:28.575444 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8baa3936-46d1-4b92-8ca3-e433b48ce217" path="/var/lib/kubelet/pods/8baa3936-46d1-4b92-8ca3-e433b48ce217/volumes" Jan 20 17:38:28 crc kubenswrapper[4558]: I0120 17:38:28.696414 4558 generic.go:334] "Generic (PLEG): container finished" podID="28001944-650c-4394-a080-dce76faf35bb" containerID="528cd7b9e6027bbc3a400adab21b7705e6cfe2f8368f79f3e85bef1cda4dd1e6" exitCode=0 Jan 20 17:38:28 crc kubenswrapper[4558]: I0120 17:38:28.696455 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-5p9zr" event={"ID":"28001944-650c-4394-a080-dce76faf35bb","Type":"ContainerDied","Data":"528cd7b9e6027bbc3a400adab21b7705e6cfe2f8368f79f3e85bef1cda4dd1e6"} Jan 20 17:38:29 crc kubenswrapper[4558]: I0120 17:38:29.937966 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-5p9zr" Jan 20 17:38:30 crc kubenswrapper[4558]: I0120 17:38:30.105844 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjz6l\" (UniqueName: \"kubernetes.io/projected/28001944-650c-4394-a080-dce76faf35bb-kube-api-access-pjz6l\") pod \"28001944-650c-4394-a080-dce76faf35bb\" (UID: \"28001944-650c-4394-a080-dce76faf35bb\") " Jan 20 17:38:30 crc kubenswrapper[4558]: I0120 17:38:30.106127 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/28001944-650c-4394-a080-dce76faf35bb-crc-storage\") pod \"28001944-650c-4394-a080-dce76faf35bb\" (UID: \"28001944-650c-4394-a080-dce76faf35bb\") " Jan 20 17:38:30 crc kubenswrapper[4558]: I0120 17:38:30.106252 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/28001944-650c-4394-a080-dce76faf35bb-node-mnt\") pod \"28001944-650c-4394-a080-dce76faf35bb\" (UID: \"28001944-650c-4394-a080-dce76faf35bb\") " Jan 20 17:38:30 crc kubenswrapper[4558]: I0120 17:38:30.106304 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/28001944-650c-4394-a080-dce76faf35bb-node-mnt" (OuterVolumeSpecName: "node-mnt") pod "28001944-650c-4394-a080-dce76faf35bb" (UID: "28001944-650c-4394-a080-dce76faf35bb"). InnerVolumeSpecName "node-mnt". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 17:38:30 crc kubenswrapper[4558]: I0120 17:38:30.106798 4558 reconciler_common.go:293] "Volume detached for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/28001944-650c-4394-a080-dce76faf35bb-node-mnt\") on node \"crc\" DevicePath \"\"" Jan 20 17:38:30 crc kubenswrapper[4558]: I0120 17:38:30.113246 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/28001944-650c-4394-a080-dce76faf35bb-kube-api-access-pjz6l" (OuterVolumeSpecName: "kube-api-access-pjz6l") pod "28001944-650c-4394-a080-dce76faf35bb" (UID: "28001944-650c-4394-a080-dce76faf35bb"). InnerVolumeSpecName "kube-api-access-pjz6l". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:38:30 crc kubenswrapper[4558]: I0120 17:38:30.124312 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/28001944-650c-4394-a080-dce76faf35bb-crc-storage" (OuterVolumeSpecName: "crc-storage") pod "28001944-650c-4394-a080-dce76faf35bb" (UID: "28001944-650c-4394-a080-dce76faf35bb"). InnerVolumeSpecName "crc-storage". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:38:30 crc kubenswrapper[4558]: I0120 17:38:30.209078 4558 reconciler_common.go:293] "Volume detached for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/28001944-650c-4394-a080-dce76faf35bb-crc-storage\") on node \"crc\" DevicePath \"\"" Jan 20 17:38:30 crc kubenswrapper[4558]: I0120 17:38:30.209126 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjz6l\" (UniqueName: \"kubernetes.io/projected/28001944-650c-4394-a080-dce76faf35bb-kube-api-access-pjz6l\") on node \"crc\" DevicePath \"\"" Jan 20 17:38:30 crc kubenswrapper[4558]: I0120 17:38:30.712025 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-5p9zr" event={"ID":"28001944-650c-4394-a080-dce76faf35bb","Type":"ContainerDied","Data":"f06007fce1956c8030b615ec314efe78d07fd9aff3761ec2bbd0d9bc45e54140"} Jan 20 17:38:30 crc kubenswrapper[4558]: I0120 17:38:30.712353 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f06007fce1956c8030b615ec314efe78d07fd9aff3761ec2bbd0d9bc45e54140" Jan 20 17:38:30 crc kubenswrapper[4558]: I0120 17:38:30.712094 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-5p9zr" Jan 20 17:38:42 crc kubenswrapper[4558]: I0120 17:38:42.038068 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/rabbitmq-server-0"] Jan 20 17:38:42 crc kubenswrapper[4558]: E0120 17:38:42.038939 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="28001944-650c-4394-a080-dce76faf35bb" containerName="storage" Jan 20 17:38:42 crc kubenswrapper[4558]: I0120 17:38:42.038954 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="28001944-650c-4394-a080-dce76faf35bb" containerName="storage" Jan 20 17:38:42 crc kubenswrapper[4558]: I0120 17:38:42.039103 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="28001944-650c-4394-a080-dce76faf35bb" containerName="storage" Jan 20 17:38:42 crc kubenswrapper[4558]: I0120 17:38:42.039908 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:38:42 crc kubenswrapper[4558]: I0120 17:38:42.042095 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"rabbitmq-config-data" Jan 20 17:38:42 crc kubenswrapper[4558]: I0120 17:38:42.042119 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"rabbitmq-default-user" Jan 20 17:38:42 crc kubenswrapper[4558]: I0120 17:38:42.042101 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"rabbitmq-server-conf" Jan 20 17:38:42 crc kubenswrapper[4558]: I0120 17:38:42.042182 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"rabbitmq-plugins-conf" Jan 20 17:38:42 crc kubenswrapper[4558]: I0120 17:38:42.042349 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"rabbitmq-server-dockercfg-zrnd5" Jan 20 17:38:42 crc kubenswrapper[4558]: I0120 17:38:42.042681 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"rabbitmq-erlang-cookie" Jan 20 17:38:42 crc kubenswrapper[4558]: I0120 17:38:42.046444 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-rabbitmq-svc" Jan 20 17:38:42 crc kubenswrapper[4558]: I0120 17:38:42.049368 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/rabbitmq-server-0"] Jan 20 17:38:42 crc kubenswrapper[4558]: I0120 17:38:42.199445 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-server-0\" (UID: \"4765e529-9729-4d27-a252-c0c9a7b67beb\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:38:42 crc kubenswrapper[4558]: I0120 17:38:42.199518 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/4765e529-9729-4d27-a252-c0c9a7b67beb-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"4765e529-9729-4d27-a252-c0c9a7b67beb\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:38:42 crc kubenswrapper[4558]: I0120 17:38:42.199583 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/4765e529-9729-4d27-a252-c0c9a7b67beb-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"4765e529-9729-4d27-a252-c0c9a7b67beb\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:38:42 crc kubenswrapper[4558]: I0120 17:38:42.199607 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xw7h7\" (UniqueName: \"kubernetes.io/projected/4765e529-9729-4d27-a252-c0c9a7b67beb-kube-api-access-xw7h7\") pod \"rabbitmq-server-0\" (UID: \"4765e529-9729-4d27-a252-c0c9a7b67beb\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:38:42 crc kubenswrapper[4558]: I0120 17:38:42.199630 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4765e529-9729-4d27-a252-c0c9a7b67beb-config-data\") pod \"rabbitmq-server-0\" (UID: \"4765e529-9729-4d27-a252-c0c9a7b67beb\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:38:42 crc kubenswrapper[4558]: I0120 17:38:42.200076 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/4765e529-9729-4d27-a252-c0c9a7b67beb-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"4765e529-9729-4d27-a252-c0c9a7b67beb\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:38:42 crc kubenswrapper[4558]: I0120 17:38:42.200294 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/4765e529-9729-4d27-a252-c0c9a7b67beb-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"4765e529-9729-4d27-a252-c0c9a7b67beb\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:38:42 crc kubenswrapper[4558]: I0120 17:38:42.200474 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/4765e529-9729-4d27-a252-c0c9a7b67beb-server-conf\") pod \"rabbitmq-server-0\" (UID: \"4765e529-9729-4d27-a252-c0c9a7b67beb\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:38:42 crc kubenswrapper[4558]: I0120 17:38:42.200509 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/4765e529-9729-4d27-a252-c0c9a7b67beb-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"4765e529-9729-4d27-a252-c0c9a7b67beb\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:38:42 crc kubenswrapper[4558]: I0120 17:38:42.200566 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/4765e529-9729-4d27-a252-c0c9a7b67beb-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"4765e529-9729-4d27-a252-c0c9a7b67beb\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:38:42 crc kubenswrapper[4558]: I0120 17:38:42.200608 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/4765e529-9729-4d27-a252-c0c9a7b67beb-pod-info\") pod \"rabbitmq-server-0\" (UID: \"4765e529-9729-4d27-a252-c0c9a7b67beb\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:38:42 crc kubenswrapper[4558]: I0120 17:38:42.302219 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/4765e529-9729-4d27-a252-c0c9a7b67beb-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"4765e529-9729-4d27-a252-c0c9a7b67beb\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:38:42 crc kubenswrapper[4558]: I0120 17:38:42.302316 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/4765e529-9729-4d27-a252-c0c9a7b67beb-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"4765e529-9729-4d27-a252-c0c9a7b67beb\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:38:42 crc kubenswrapper[4558]: I0120 17:38:42.302386 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/4765e529-9729-4d27-a252-c0c9a7b67beb-server-conf\") pod \"rabbitmq-server-0\" (UID: \"4765e529-9729-4d27-a252-c0c9a7b67beb\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:38:42 crc kubenswrapper[4558]: I0120 17:38:42.302412 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/4765e529-9729-4d27-a252-c0c9a7b67beb-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"4765e529-9729-4d27-a252-c0c9a7b67beb\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:38:42 crc kubenswrapper[4558]: I0120 17:38:42.302446 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/4765e529-9729-4d27-a252-c0c9a7b67beb-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"4765e529-9729-4d27-a252-c0c9a7b67beb\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:38:42 crc kubenswrapper[4558]: I0120 17:38:42.302470 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/4765e529-9729-4d27-a252-c0c9a7b67beb-pod-info\") pod \"rabbitmq-server-0\" (UID: \"4765e529-9729-4d27-a252-c0c9a7b67beb\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:38:42 crc kubenswrapper[4558]: I0120 17:38:42.302502 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-server-0\" (UID: \"4765e529-9729-4d27-a252-c0c9a7b67beb\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:38:42 crc kubenswrapper[4558]: I0120 17:38:42.302543 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/4765e529-9729-4d27-a252-c0c9a7b67beb-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"4765e529-9729-4d27-a252-c0c9a7b67beb\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:38:42 crc kubenswrapper[4558]: I0120 17:38:42.302600 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/4765e529-9729-4d27-a252-c0c9a7b67beb-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"4765e529-9729-4d27-a252-c0c9a7b67beb\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:38:42 crc kubenswrapper[4558]: I0120 17:38:42.302624 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xw7h7\" (UniqueName: \"kubernetes.io/projected/4765e529-9729-4d27-a252-c0c9a7b67beb-kube-api-access-xw7h7\") pod \"rabbitmq-server-0\" (UID: \"4765e529-9729-4d27-a252-c0c9a7b67beb\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:38:42 crc kubenswrapper[4558]: I0120 17:38:42.302647 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4765e529-9729-4d27-a252-c0c9a7b67beb-config-data\") pod \"rabbitmq-server-0\" (UID: \"4765e529-9729-4d27-a252-c0c9a7b67beb\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:38:42 crc kubenswrapper[4558]: I0120 17:38:42.302917 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/4765e529-9729-4d27-a252-c0c9a7b67beb-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"4765e529-9729-4d27-a252-c0c9a7b67beb\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:38:42 crc kubenswrapper[4558]: I0120 17:38:42.303239 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/4765e529-9729-4d27-a252-c0c9a7b67beb-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"4765e529-9729-4d27-a252-c0c9a7b67beb\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:38:42 crc kubenswrapper[4558]: I0120 17:38:42.303594 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-server-0\" (UID: \"4765e529-9729-4d27-a252-c0c9a7b67beb\") device mount path \"/mnt/openstack/pv05\"" pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:38:42 crc kubenswrapper[4558]: I0120 17:38:42.304508 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/4765e529-9729-4d27-a252-c0c9a7b67beb-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"4765e529-9729-4d27-a252-c0c9a7b67beb\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:38:42 crc kubenswrapper[4558]: I0120 17:38:42.304571 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/4765e529-9729-4d27-a252-c0c9a7b67beb-server-conf\") pod \"rabbitmq-server-0\" (UID: \"4765e529-9729-4d27-a252-c0c9a7b67beb\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:38:42 crc kubenswrapper[4558]: I0120 17:38:42.304589 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4765e529-9729-4d27-a252-c0c9a7b67beb-config-data\") pod \"rabbitmq-server-0\" (UID: \"4765e529-9729-4d27-a252-c0c9a7b67beb\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:38:42 crc kubenswrapper[4558]: I0120 17:38:42.310284 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/4765e529-9729-4d27-a252-c0c9a7b67beb-pod-info\") pod \"rabbitmq-server-0\" (UID: \"4765e529-9729-4d27-a252-c0c9a7b67beb\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:38:42 crc kubenswrapper[4558]: I0120 17:38:42.310800 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/4765e529-9729-4d27-a252-c0c9a7b67beb-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"4765e529-9729-4d27-a252-c0c9a7b67beb\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:38:42 crc kubenswrapper[4558]: I0120 17:38:42.311374 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/4765e529-9729-4d27-a252-c0c9a7b67beb-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"4765e529-9729-4d27-a252-c0c9a7b67beb\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:38:42 crc kubenswrapper[4558]: I0120 17:38:42.311803 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/4765e529-9729-4d27-a252-c0c9a7b67beb-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"4765e529-9729-4d27-a252-c0c9a7b67beb\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:38:42 crc kubenswrapper[4558]: I0120 17:38:42.318075 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xw7h7\" (UniqueName: \"kubernetes.io/projected/4765e529-9729-4d27-a252-c0c9a7b67beb-kube-api-access-xw7h7\") pod \"rabbitmq-server-0\" (UID: \"4765e529-9729-4d27-a252-c0c9a7b67beb\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:38:42 crc kubenswrapper[4558]: I0120 17:38:42.323367 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"rabbitmq-server-0\" (UID: \"4765e529-9729-4d27-a252-c0c9a7b67beb\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:38:42 crc kubenswrapper[4558]: I0120 17:38:42.357544 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:38:42 crc kubenswrapper[4558]: I0120 17:38:42.410330 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/rabbitmq-cell1-server-0"] Jan 20 17:38:42 crc kubenswrapper[4558]: I0120 17:38:42.411611 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:38:42 crc kubenswrapper[4558]: I0120 17:38:42.413718 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"rabbitmq-cell1-erlang-cookie" Jan 20 17:38:42 crc kubenswrapper[4558]: I0120 17:38:42.413927 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"rabbitmq-cell1-config-data" Jan 20 17:38:42 crc kubenswrapper[4558]: I0120 17:38:42.415318 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"rabbitmq-cell1-server-dockercfg-648z2" Jan 20 17:38:42 crc kubenswrapper[4558]: I0120 17:38:42.415497 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-rabbitmq-cell1-svc" Jan 20 17:38:42 crc kubenswrapper[4558]: I0120 17:38:42.415519 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"rabbitmq-cell1-default-user" Jan 20 17:38:42 crc kubenswrapper[4558]: I0120 17:38:42.415567 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"rabbitmq-cell1-server-conf" Jan 20 17:38:42 crc kubenswrapper[4558]: I0120 17:38:42.415629 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"rabbitmq-cell1-plugins-conf" Jan 20 17:38:42 crc kubenswrapper[4558]: I0120 17:38:42.426674 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/rabbitmq-cell1-server-0"] Jan 20 17:38:42 crc kubenswrapper[4558]: I0120 17:38:42.612091 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/de4d8126-91cf-4149-bec4-4accaf558308-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"de4d8126-91cf-4149-bec4-4accaf558308\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:38:42 crc kubenswrapper[4558]: I0120 17:38:42.612500 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/de4d8126-91cf-4149-bec4-4accaf558308-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"de4d8126-91cf-4149-bec4-4accaf558308\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:38:42 crc kubenswrapper[4558]: I0120 17:38:42.612566 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/de4d8126-91cf-4149-bec4-4accaf558308-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"de4d8126-91cf-4149-bec4-4accaf558308\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:38:42 crc kubenswrapper[4558]: I0120 17:38:42.612587 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/de4d8126-91cf-4149-bec4-4accaf558308-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"de4d8126-91cf-4149-bec4-4accaf558308\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:38:42 crc kubenswrapper[4558]: I0120 17:38:42.612654 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/de4d8126-91cf-4149-bec4-4accaf558308-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"de4d8126-91cf-4149-bec4-4accaf558308\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:38:42 crc kubenswrapper[4558]: I0120 17:38:42.612780 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/de4d8126-91cf-4149-bec4-4accaf558308-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"de4d8126-91cf-4149-bec4-4accaf558308\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:38:42 crc kubenswrapper[4558]: I0120 17:38:42.612853 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/de4d8126-91cf-4149-bec4-4accaf558308-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"de4d8126-91cf-4149-bec4-4accaf558308\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:38:42 crc kubenswrapper[4558]: I0120 17:38:42.612936 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/de4d8126-91cf-4149-bec4-4accaf558308-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"de4d8126-91cf-4149-bec4-4accaf558308\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:38:42 crc kubenswrapper[4558]: I0120 17:38:42.613023 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/de4d8126-91cf-4149-bec4-4accaf558308-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"de4d8126-91cf-4149-bec4-4accaf558308\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:38:42 crc kubenswrapper[4558]: I0120 17:38:42.613074 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"de4d8126-91cf-4149-bec4-4accaf558308\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:38:42 crc kubenswrapper[4558]: I0120 17:38:42.613138 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kqptr\" (UniqueName: \"kubernetes.io/projected/de4d8126-91cf-4149-bec4-4accaf558308-kube-api-access-kqptr\") pod \"rabbitmq-cell1-server-0\" (UID: \"de4d8126-91cf-4149-bec4-4accaf558308\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:38:42 crc kubenswrapper[4558]: I0120 17:38:42.715471 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/de4d8126-91cf-4149-bec4-4accaf558308-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"de4d8126-91cf-4149-bec4-4accaf558308\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:38:42 crc kubenswrapper[4558]: I0120 17:38:42.715551 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/de4d8126-91cf-4149-bec4-4accaf558308-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"de4d8126-91cf-4149-bec4-4accaf558308\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:38:42 crc kubenswrapper[4558]: I0120 17:38:42.715601 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/de4d8126-91cf-4149-bec4-4accaf558308-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"de4d8126-91cf-4149-bec4-4accaf558308\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:38:42 crc kubenswrapper[4558]: I0120 17:38:42.715632 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/de4d8126-91cf-4149-bec4-4accaf558308-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"de4d8126-91cf-4149-bec4-4accaf558308\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:38:42 crc kubenswrapper[4558]: I0120 17:38:42.715687 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/de4d8126-91cf-4149-bec4-4accaf558308-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"de4d8126-91cf-4149-bec4-4accaf558308\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:38:42 crc kubenswrapper[4558]: I0120 17:38:42.715738 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"de4d8126-91cf-4149-bec4-4accaf558308\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:38:42 crc kubenswrapper[4558]: I0120 17:38:42.715770 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kqptr\" (UniqueName: \"kubernetes.io/projected/de4d8126-91cf-4149-bec4-4accaf558308-kube-api-access-kqptr\") pod \"rabbitmq-cell1-server-0\" (UID: \"de4d8126-91cf-4149-bec4-4accaf558308\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:38:42 crc kubenswrapper[4558]: I0120 17:38:42.715826 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/de4d8126-91cf-4149-bec4-4accaf558308-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"de4d8126-91cf-4149-bec4-4accaf558308\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:38:42 crc kubenswrapper[4558]: I0120 17:38:42.715935 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/de4d8126-91cf-4149-bec4-4accaf558308-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"de4d8126-91cf-4149-bec4-4accaf558308\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:38:42 crc kubenswrapper[4558]: I0120 17:38:42.716055 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/de4d8126-91cf-4149-bec4-4accaf558308-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"de4d8126-91cf-4149-bec4-4accaf558308\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:38:42 crc kubenswrapper[4558]: I0120 17:38:42.716089 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/de4d8126-91cf-4149-bec4-4accaf558308-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"de4d8126-91cf-4149-bec4-4accaf558308\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:38:42 crc kubenswrapper[4558]: I0120 17:38:42.716755 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/de4d8126-91cf-4149-bec4-4accaf558308-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"de4d8126-91cf-4149-bec4-4accaf558308\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:38:42 crc kubenswrapper[4558]: I0120 17:38:42.716992 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/de4d8126-91cf-4149-bec4-4accaf558308-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"de4d8126-91cf-4149-bec4-4accaf558308\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:38:42 crc kubenswrapper[4558]: I0120 17:38:42.717082 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"de4d8126-91cf-4149-bec4-4accaf558308\") device mount path \"/mnt/openstack/pv08\"" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:38:42 crc kubenswrapper[4558]: I0120 17:38:42.717373 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/de4d8126-91cf-4149-bec4-4accaf558308-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"de4d8126-91cf-4149-bec4-4accaf558308\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:38:42 crc kubenswrapper[4558]: I0120 17:38:42.718309 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/de4d8126-91cf-4149-bec4-4accaf558308-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"de4d8126-91cf-4149-bec4-4accaf558308\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:38:42 crc kubenswrapper[4558]: I0120 17:38:42.718322 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/de4d8126-91cf-4149-bec4-4accaf558308-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"de4d8126-91cf-4149-bec4-4accaf558308\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:38:42 crc kubenswrapper[4558]: I0120 17:38:42.721961 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/de4d8126-91cf-4149-bec4-4accaf558308-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"de4d8126-91cf-4149-bec4-4accaf558308\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:38:42 crc kubenswrapper[4558]: I0120 17:38:42.723083 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/de4d8126-91cf-4149-bec4-4accaf558308-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"de4d8126-91cf-4149-bec4-4accaf558308\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:38:42 crc kubenswrapper[4558]: I0120 17:38:42.723884 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/de4d8126-91cf-4149-bec4-4accaf558308-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"de4d8126-91cf-4149-bec4-4accaf558308\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:38:42 crc kubenswrapper[4558]: I0120 17:38:42.723959 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/de4d8126-91cf-4149-bec4-4accaf558308-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"de4d8126-91cf-4149-bec4-4accaf558308\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:38:42 crc kubenswrapper[4558]: I0120 17:38:42.743558 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"de4d8126-91cf-4149-bec4-4accaf558308\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:38:42 crc kubenswrapper[4558]: I0120 17:38:42.745551 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kqptr\" (UniqueName: \"kubernetes.io/projected/de4d8126-91cf-4149-bec4-4accaf558308-kube-api-access-kqptr\") pod \"rabbitmq-cell1-server-0\" (UID: \"de4d8126-91cf-4149-bec4-4accaf558308\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:38:42 crc kubenswrapper[4558]: I0120 17:38:42.788686 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/rabbitmq-server-0"] Jan 20 17:38:42 crc kubenswrapper[4558]: I0120 17:38:42.811937 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-server-0" event={"ID":"4765e529-9729-4d27-a252-c0c9a7b67beb","Type":"ContainerStarted","Data":"4884b3dd50e53b21f38132f924e5c4c791bcc5f669fa27a312699a6c27ce6c02"} Jan 20 17:38:43 crc kubenswrapper[4558]: I0120 17:38:43.038573 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:38:43 crc kubenswrapper[4558]: I0120 17:38:43.464299 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/rabbitmq-cell1-server-0"] Jan 20 17:38:43 crc kubenswrapper[4558]: I0120 17:38:43.822745 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" event={"ID":"de4d8126-91cf-4149-bec4-4accaf558308","Type":"ContainerStarted","Data":"e82ed5c52b8d8654bdc167e0ef446792c35695ed1870b83a2674a302926cfaf9"} Jan 20 17:38:44 crc kubenswrapper[4558]: I0120 17:38:44.105012 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/openstack-galera-0"] Jan 20 17:38:44 crc kubenswrapper[4558]: I0120 17:38:44.106225 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:38:44 crc kubenswrapper[4558]: I0120 17:38:44.108602 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"galera-openstack-dockercfg-xjdvx" Jan 20 17:38:44 crc kubenswrapper[4558]: I0120 17:38:44.108775 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-config-data" Jan 20 17:38:44 crc kubenswrapper[4558]: I0120 17:38:44.108816 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-galera-openstack-svc" Jan 20 17:38:44 crc kubenswrapper[4558]: I0120 17:38:44.109065 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-scripts" Jan 20 17:38:44 crc kubenswrapper[4558]: I0120 17:38:44.114037 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"combined-ca-bundle" Jan 20 17:38:44 crc kubenswrapper[4558]: I0120 17:38:44.119550 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/openstack-galera-0"] Jan 20 17:38:44 crc kubenswrapper[4558]: I0120 17:38:44.246305 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4bcc759d-3647-4194-9a91-acac49948173-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"4bcc759d-3647-4194-9a91-acac49948173\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:38:44 crc kubenswrapper[4558]: I0120 17:38:44.246438 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/4bcc759d-3647-4194-9a91-acac49948173-kolla-config\") pod \"openstack-galera-0\" (UID: \"4bcc759d-3647-4194-9a91-acac49948173\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:38:44 crc kubenswrapper[4558]: I0120 17:38:44.246476 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/4bcc759d-3647-4194-9a91-acac49948173-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"4bcc759d-3647-4194-9a91-acac49948173\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:38:44 crc kubenswrapper[4558]: I0120 17:38:44.246501 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x6vsj\" (UniqueName: \"kubernetes.io/projected/4bcc759d-3647-4194-9a91-acac49948173-kube-api-access-x6vsj\") pod \"openstack-galera-0\" (UID: \"4bcc759d-3647-4194-9a91-acac49948173\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:38:44 crc kubenswrapper[4558]: I0120 17:38:44.246529 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage14-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage14-crc\") pod \"openstack-galera-0\" (UID: \"4bcc759d-3647-4194-9a91-acac49948173\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:38:44 crc kubenswrapper[4558]: I0120 17:38:44.246559 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/4bcc759d-3647-4194-9a91-acac49948173-config-data-default\") pod \"openstack-galera-0\" (UID: \"4bcc759d-3647-4194-9a91-acac49948173\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:38:44 crc kubenswrapper[4558]: I0120 17:38:44.246998 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/4bcc759d-3647-4194-9a91-acac49948173-config-data-generated\") pod \"openstack-galera-0\" (UID: \"4bcc759d-3647-4194-9a91-acac49948173\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:38:44 crc kubenswrapper[4558]: I0120 17:38:44.247083 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4bcc759d-3647-4194-9a91-acac49948173-operator-scripts\") pod \"openstack-galera-0\" (UID: \"4bcc759d-3647-4194-9a91-acac49948173\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:38:44 crc kubenswrapper[4558]: I0120 17:38:44.348990 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4bcc759d-3647-4194-9a91-acac49948173-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"4bcc759d-3647-4194-9a91-acac49948173\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:38:44 crc kubenswrapper[4558]: I0120 17:38:44.349155 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/4bcc759d-3647-4194-9a91-acac49948173-kolla-config\") pod \"openstack-galera-0\" (UID: \"4bcc759d-3647-4194-9a91-acac49948173\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:38:44 crc kubenswrapper[4558]: I0120 17:38:44.349223 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/4bcc759d-3647-4194-9a91-acac49948173-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"4bcc759d-3647-4194-9a91-acac49948173\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:38:44 crc kubenswrapper[4558]: I0120 17:38:44.349245 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x6vsj\" (UniqueName: \"kubernetes.io/projected/4bcc759d-3647-4194-9a91-acac49948173-kube-api-access-x6vsj\") pod \"openstack-galera-0\" (UID: \"4bcc759d-3647-4194-9a91-acac49948173\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:38:44 crc kubenswrapper[4558]: I0120 17:38:44.349290 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage14-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage14-crc\") pod \"openstack-galera-0\" (UID: \"4bcc759d-3647-4194-9a91-acac49948173\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:38:44 crc kubenswrapper[4558]: I0120 17:38:44.349343 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/4bcc759d-3647-4194-9a91-acac49948173-config-data-default\") pod \"openstack-galera-0\" (UID: \"4bcc759d-3647-4194-9a91-acac49948173\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:38:44 crc kubenswrapper[4558]: I0120 17:38:44.349462 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/4bcc759d-3647-4194-9a91-acac49948173-config-data-generated\") pod \"openstack-galera-0\" (UID: \"4bcc759d-3647-4194-9a91-acac49948173\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:38:44 crc kubenswrapper[4558]: I0120 17:38:44.349506 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4bcc759d-3647-4194-9a91-acac49948173-operator-scripts\") pod \"openstack-galera-0\" (UID: \"4bcc759d-3647-4194-9a91-acac49948173\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:38:44 crc kubenswrapper[4558]: I0120 17:38:44.349647 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage14-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage14-crc\") pod \"openstack-galera-0\" (UID: \"4bcc759d-3647-4194-9a91-acac49948173\") device mount path \"/mnt/openstack/pv14\"" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:38:44 crc kubenswrapper[4558]: I0120 17:38:44.350121 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/4bcc759d-3647-4194-9a91-acac49948173-config-data-generated\") pod \"openstack-galera-0\" (UID: \"4bcc759d-3647-4194-9a91-acac49948173\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:38:44 crc kubenswrapper[4558]: I0120 17:38:44.350472 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/4bcc759d-3647-4194-9a91-acac49948173-config-data-default\") pod \"openstack-galera-0\" (UID: \"4bcc759d-3647-4194-9a91-acac49948173\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:38:44 crc kubenswrapper[4558]: I0120 17:38:44.351387 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4bcc759d-3647-4194-9a91-acac49948173-operator-scripts\") pod \"openstack-galera-0\" (UID: \"4bcc759d-3647-4194-9a91-acac49948173\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:38:44 crc kubenswrapper[4558]: I0120 17:38:44.353508 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4bcc759d-3647-4194-9a91-acac49948173-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"4bcc759d-3647-4194-9a91-acac49948173\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:38:44 crc kubenswrapper[4558]: I0120 17:38:44.354651 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/4bcc759d-3647-4194-9a91-acac49948173-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"4bcc759d-3647-4194-9a91-acac49948173\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:38:44 crc kubenswrapper[4558]: I0120 17:38:44.362145 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/4bcc759d-3647-4194-9a91-acac49948173-kolla-config\") pod \"openstack-galera-0\" (UID: \"4bcc759d-3647-4194-9a91-acac49948173\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:38:44 crc kubenswrapper[4558]: I0120 17:38:44.367074 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x6vsj\" (UniqueName: \"kubernetes.io/projected/4bcc759d-3647-4194-9a91-acac49948173-kube-api-access-x6vsj\") pod \"openstack-galera-0\" (UID: \"4bcc759d-3647-4194-9a91-acac49948173\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:38:44 crc kubenswrapper[4558]: I0120 17:38:44.370220 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage14-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage14-crc\") pod \"openstack-galera-0\" (UID: \"4bcc759d-3647-4194-9a91-acac49948173\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:38:44 crc kubenswrapper[4558]: I0120 17:38:44.421442 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:38:44 crc kubenswrapper[4558]: I0120 17:38:44.833265 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" event={"ID":"de4d8126-91cf-4149-bec4-4accaf558308","Type":"ContainerStarted","Data":"8a98a6a80e25b53e4ba0486d67f1bca724bbd186dd76d5aaf082369a41c0a1d4"} Jan 20 17:38:44 crc kubenswrapper[4558]: I0120 17:38:44.835216 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-server-0" event={"ID":"4765e529-9729-4d27-a252-c0c9a7b67beb","Type":"ContainerStarted","Data":"d93a1933ae3d32819f090f6a7b2f44526a5ddb9fbca636dcce7b4b06b41e979c"} Jan 20 17:38:44 crc kubenswrapper[4558]: I0120 17:38:44.846748 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/openstack-galera-0"] Jan 20 17:38:45 crc kubenswrapper[4558]: I0120 17:38:45.380724 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/openstack-cell1-galera-0"] Jan 20 17:38:45 crc kubenswrapper[4558]: I0120 17:38:45.381967 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:38:45 crc kubenswrapper[4558]: I0120 17:38:45.384198 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-cell1-config-data" Jan 20 17:38:45 crc kubenswrapper[4558]: I0120 17:38:45.384401 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"galera-openstack-cell1-dockercfg-mk4rp" Jan 20 17:38:45 crc kubenswrapper[4558]: I0120 17:38:45.384753 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-cell1-scripts" Jan 20 17:38:45 crc kubenswrapper[4558]: I0120 17:38:45.391833 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/openstack-cell1-galera-0"] Jan 20 17:38:45 crc kubenswrapper[4558]: I0120 17:38:45.391967 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-galera-openstack-cell1-svc" Jan 20 17:38:45 crc kubenswrapper[4558]: I0120 17:38:45.476419 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage15-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") pod \"openstack-cell1-galera-0\" (UID: \"0b56d0cd-1994-4e39-9c78-82b5105222db\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:38:45 crc kubenswrapper[4558]: I0120 17:38:45.476988 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0b56d0cd-1994-4e39-9c78-82b5105222db-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"0b56d0cd-1994-4e39-9c78-82b5105222db\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:38:45 crc kubenswrapper[4558]: I0120 17:38:45.477026 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/0b56d0cd-1994-4e39-9c78-82b5105222db-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"0b56d0cd-1994-4e39-9c78-82b5105222db\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:38:45 crc kubenswrapper[4558]: I0120 17:38:45.477051 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/0b56d0cd-1994-4e39-9c78-82b5105222db-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"0b56d0cd-1994-4e39-9c78-82b5105222db\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:38:45 crc kubenswrapper[4558]: I0120 17:38:45.477078 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/0b56d0cd-1994-4e39-9c78-82b5105222db-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"0b56d0cd-1994-4e39-9c78-82b5105222db\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:38:45 crc kubenswrapper[4558]: I0120 17:38:45.477315 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/0b56d0cd-1994-4e39-9c78-82b5105222db-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"0b56d0cd-1994-4e39-9c78-82b5105222db\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:38:45 crc kubenswrapper[4558]: I0120 17:38:45.477565 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lg4jc\" (UniqueName: \"kubernetes.io/projected/0b56d0cd-1994-4e39-9c78-82b5105222db-kube-api-access-lg4jc\") pod \"openstack-cell1-galera-0\" (UID: \"0b56d0cd-1994-4e39-9c78-82b5105222db\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:38:45 crc kubenswrapper[4558]: I0120 17:38:45.477811 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0b56d0cd-1994-4e39-9c78-82b5105222db-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"0b56d0cd-1994-4e39-9c78-82b5105222db\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:38:45 crc kubenswrapper[4558]: I0120 17:38:45.579138 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0b56d0cd-1994-4e39-9c78-82b5105222db-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"0b56d0cd-1994-4e39-9c78-82b5105222db\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:38:45 crc kubenswrapper[4558]: I0120 17:38:45.579192 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/0b56d0cd-1994-4e39-9c78-82b5105222db-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"0b56d0cd-1994-4e39-9c78-82b5105222db\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:38:45 crc kubenswrapper[4558]: I0120 17:38:45.579218 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/0b56d0cd-1994-4e39-9c78-82b5105222db-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"0b56d0cd-1994-4e39-9c78-82b5105222db\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:38:45 crc kubenswrapper[4558]: I0120 17:38:45.579247 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/0b56d0cd-1994-4e39-9c78-82b5105222db-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"0b56d0cd-1994-4e39-9c78-82b5105222db\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:38:45 crc kubenswrapper[4558]: I0120 17:38:45.579271 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/0b56d0cd-1994-4e39-9c78-82b5105222db-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"0b56d0cd-1994-4e39-9c78-82b5105222db\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:38:45 crc kubenswrapper[4558]: I0120 17:38:45.579306 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lg4jc\" (UniqueName: \"kubernetes.io/projected/0b56d0cd-1994-4e39-9c78-82b5105222db-kube-api-access-lg4jc\") pod \"openstack-cell1-galera-0\" (UID: \"0b56d0cd-1994-4e39-9c78-82b5105222db\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:38:45 crc kubenswrapper[4558]: I0120 17:38:45.579353 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0b56d0cd-1994-4e39-9c78-82b5105222db-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"0b56d0cd-1994-4e39-9c78-82b5105222db\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:38:45 crc kubenswrapper[4558]: I0120 17:38:45.579405 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage15-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") pod \"openstack-cell1-galera-0\" (UID: \"0b56d0cd-1994-4e39-9c78-82b5105222db\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:38:45 crc kubenswrapper[4558]: I0120 17:38:45.579666 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage15-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") pod \"openstack-cell1-galera-0\" (UID: \"0b56d0cd-1994-4e39-9c78-82b5105222db\") device mount path \"/mnt/openstack/pv15\"" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:38:45 crc kubenswrapper[4558]: I0120 17:38:45.579815 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/0b56d0cd-1994-4e39-9c78-82b5105222db-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"0b56d0cd-1994-4e39-9c78-82b5105222db\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:38:45 crc kubenswrapper[4558]: I0120 17:38:45.580297 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/0b56d0cd-1994-4e39-9c78-82b5105222db-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"0b56d0cd-1994-4e39-9c78-82b5105222db\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:38:45 crc kubenswrapper[4558]: I0120 17:38:45.580443 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/0b56d0cd-1994-4e39-9c78-82b5105222db-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"0b56d0cd-1994-4e39-9c78-82b5105222db\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:38:45 crc kubenswrapper[4558]: I0120 17:38:45.581279 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0b56d0cd-1994-4e39-9c78-82b5105222db-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"0b56d0cd-1994-4e39-9c78-82b5105222db\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:38:45 crc kubenswrapper[4558]: I0120 17:38:45.586104 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/0b56d0cd-1994-4e39-9c78-82b5105222db-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"0b56d0cd-1994-4e39-9c78-82b5105222db\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:38:45 crc kubenswrapper[4558]: I0120 17:38:45.586951 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0b56d0cd-1994-4e39-9c78-82b5105222db-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"0b56d0cd-1994-4e39-9c78-82b5105222db\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:38:45 crc kubenswrapper[4558]: I0120 17:38:45.594492 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lg4jc\" (UniqueName: \"kubernetes.io/projected/0b56d0cd-1994-4e39-9c78-82b5105222db-kube-api-access-lg4jc\") pod \"openstack-cell1-galera-0\" (UID: \"0b56d0cd-1994-4e39-9c78-82b5105222db\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:38:45 crc kubenswrapper[4558]: I0120 17:38:45.598519 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage15-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") pod \"openstack-cell1-galera-0\" (UID: \"0b56d0cd-1994-4e39-9c78-82b5105222db\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:38:45 crc kubenswrapper[4558]: I0120 17:38:45.696484 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:38:45 crc kubenswrapper[4558]: I0120 17:38:45.843560 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-galera-0" event={"ID":"4bcc759d-3647-4194-9a91-acac49948173","Type":"ContainerStarted","Data":"d8d925f249220118354cc5b256984244efa0db9e1f2f534a041c8fbdf41960e2"} Jan 20 17:38:45 crc kubenswrapper[4558]: I0120 17:38:45.843822 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-galera-0" event={"ID":"4bcc759d-3647-4194-9a91-acac49948173","Type":"ContainerStarted","Data":"6bab203ff066f66d425ed0dfd061e3206febb47ae8836ab8898de6689a2e3105"} Jan 20 17:38:45 crc kubenswrapper[4558]: I0120 17:38:45.893068 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/memcached-0"] Jan 20 17:38:45 crc kubenswrapper[4558]: I0120 17:38:45.894090 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/memcached-0" Jan 20 17:38:45 crc kubenswrapper[4558]: I0120 17:38:45.897639 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"memcached-config-data" Jan 20 17:38:45 crc kubenswrapper[4558]: I0120 17:38:45.898223 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"memcached-memcached-dockercfg-sqmsc" Jan 20 17:38:45 crc kubenswrapper[4558]: I0120 17:38:45.898324 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-memcached-svc" Jan 20 17:38:45 crc kubenswrapper[4558]: I0120 17:38:45.924782 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/memcached-0"] Jan 20 17:38:45 crc kubenswrapper[4558]: I0120 17:38:45.989980 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/02756298-ea84-4151-8ce2-32d602e2f7a7-memcached-tls-certs\") pod \"memcached-0\" (UID: \"02756298-ea84-4151-8ce2-32d602e2f7a7\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:38:45 crc kubenswrapper[4558]: I0120 17:38:45.990029 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gjnms\" (UniqueName: \"kubernetes.io/projected/02756298-ea84-4151-8ce2-32d602e2f7a7-kube-api-access-gjnms\") pod \"memcached-0\" (UID: \"02756298-ea84-4151-8ce2-32d602e2f7a7\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:38:45 crc kubenswrapper[4558]: I0120 17:38:45.990084 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/02756298-ea84-4151-8ce2-32d602e2f7a7-kolla-config\") pod \"memcached-0\" (UID: \"02756298-ea84-4151-8ce2-32d602e2f7a7\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:38:45 crc kubenswrapper[4558]: I0120 17:38:45.990117 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/02756298-ea84-4151-8ce2-32d602e2f7a7-config-data\") pod \"memcached-0\" (UID: \"02756298-ea84-4151-8ce2-32d602e2f7a7\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:38:45 crc kubenswrapper[4558]: I0120 17:38:45.990323 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02756298-ea84-4151-8ce2-32d602e2f7a7-combined-ca-bundle\") pod \"memcached-0\" (UID: \"02756298-ea84-4151-8ce2-32d602e2f7a7\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:38:46 crc kubenswrapper[4558]: I0120 17:38:46.092963 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/02756298-ea84-4151-8ce2-32d602e2f7a7-kolla-config\") pod \"memcached-0\" (UID: \"02756298-ea84-4151-8ce2-32d602e2f7a7\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:38:46 crc kubenswrapper[4558]: I0120 17:38:46.093022 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/02756298-ea84-4151-8ce2-32d602e2f7a7-config-data\") pod \"memcached-0\" (UID: \"02756298-ea84-4151-8ce2-32d602e2f7a7\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:38:46 crc kubenswrapper[4558]: I0120 17:38:46.093105 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02756298-ea84-4151-8ce2-32d602e2f7a7-combined-ca-bundle\") pod \"memcached-0\" (UID: \"02756298-ea84-4151-8ce2-32d602e2f7a7\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:38:46 crc kubenswrapper[4558]: I0120 17:38:46.093201 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/02756298-ea84-4151-8ce2-32d602e2f7a7-memcached-tls-certs\") pod \"memcached-0\" (UID: \"02756298-ea84-4151-8ce2-32d602e2f7a7\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:38:46 crc kubenswrapper[4558]: I0120 17:38:46.093240 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gjnms\" (UniqueName: \"kubernetes.io/projected/02756298-ea84-4151-8ce2-32d602e2f7a7-kube-api-access-gjnms\") pod \"memcached-0\" (UID: \"02756298-ea84-4151-8ce2-32d602e2f7a7\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:38:46 crc kubenswrapper[4558]: I0120 17:38:46.094045 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/02756298-ea84-4151-8ce2-32d602e2f7a7-kolla-config\") pod \"memcached-0\" (UID: \"02756298-ea84-4151-8ce2-32d602e2f7a7\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:38:46 crc kubenswrapper[4558]: I0120 17:38:46.094108 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/02756298-ea84-4151-8ce2-32d602e2f7a7-config-data\") pod \"memcached-0\" (UID: \"02756298-ea84-4151-8ce2-32d602e2f7a7\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:38:46 crc kubenswrapper[4558]: I0120 17:38:46.098450 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/02756298-ea84-4151-8ce2-32d602e2f7a7-memcached-tls-certs\") pod \"memcached-0\" (UID: \"02756298-ea84-4151-8ce2-32d602e2f7a7\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:38:46 crc kubenswrapper[4558]: I0120 17:38:46.098490 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02756298-ea84-4151-8ce2-32d602e2f7a7-combined-ca-bundle\") pod \"memcached-0\" (UID: \"02756298-ea84-4151-8ce2-32d602e2f7a7\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:38:46 crc kubenswrapper[4558]: I0120 17:38:46.109697 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gjnms\" (UniqueName: \"kubernetes.io/projected/02756298-ea84-4151-8ce2-32d602e2f7a7-kube-api-access-gjnms\") pod \"memcached-0\" (UID: \"02756298-ea84-4151-8ce2-32d602e2f7a7\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:38:46 crc kubenswrapper[4558]: I0120 17:38:46.180983 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/openstack-cell1-galera-0"] Jan 20 17:38:46 crc kubenswrapper[4558]: W0120 17:38:46.184906 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0b56d0cd_1994_4e39_9c78_82b5105222db.slice/crio-115e797d7f0e6b91193595613d09f836d34ed6f0c932ce0636d93690f028767f WatchSource:0}: Error finding container 115e797d7f0e6b91193595613d09f836d34ed6f0c932ce0636d93690f028767f: Status 404 returned error can't find the container with id 115e797d7f0e6b91193595613d09f836d34ed6f0c932ce0636d93690f028767f Jan 20 17:38:46 crc kubenswrapper[4558]: I0120 17:38:46.224903 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/memcached-0" Jan 20 17:38:46 crc kubenswrapper[4558]: I0120 17:38:46.420596 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/memcached-0"] Jan 20 17:38:46 crc kubenswrapper[4558]: I0120 17:38:46.855896 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-cell1-galera-0" event={"ID":"0b56d0cd-1994-4e39-9c78-82b5105222db","Type":"ContainerStarted","Data":"b03018c6cafe6bfe05fbcfd442923f9ba65a46cf60d3b585db1ba6e573cc935a"} Jan 20 17:38:46 crc kubenswrapper[4558]: I0120 17:38:46.856371 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-cell1-galera-0" event={"ID":"0b56d0cd-1994-4e39-9c78-82b5105222db","Type":"ContainerStarted","Data":"115e797d7f0e6b91193595613d09f836d34ed6f0c932ce0636d93690f028767f"} Jan 20 17:38:46 crc kubenswrapper[4558]: I0120 17:38:46.858402 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/memcached-0" event={"ID":"02756298-ea84-4151-8ce2-32d602e2f7a7","Type":"ContainerStarted","Data":"04b878c52f22dc6b748fcd22473200dd00e366b507cfd99f30210f8c582eb868"} Jan 20 17:38:46 crc kubenswrapper[4558]: I0120 17:38:46.858475 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/memcached-0" event={"ID":"02756298-ea84-4151-8ce2-32d602e2f7a7","Type":"ContainerStarted","Data":"618d6b185418a6c40e6fbef551b96474f87263cd7b28bbbd6723b9adc2e0f4b4"} Jan 20 17:38:47 crc kubenswrapper[4558]: I0120 17:38:47.634217 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/memcached-0" podStartSLOduration=2.634199485 podStartE2EDuration="2.634199485s" podCreationTimestamp="2026-01-20 17:38:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:38:46.897683589 +0000 UTC m=+3420.658021556" watchObservedRunningTime="2026-01-20 17:38:47.634199485 +0000 UTC m=+3421.394537452" Jan 20 17:38:47 crc kubenswrapper[4558]: I0120 17:38:47.638218 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 17:38:47 crc kubenswrapper[4558]: I0120 17:38:47.639097 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:38:47 crc kubenswrapper[4558]: I0120 17:38:47.641031 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"telemetry-ceilometer-dockercfg-77r4q" Jan 20 17:38:47 crc kubenswrapper[4558]: I0120 17:38:47.654568 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 17:38:47 crc kubenswrapper[4558]: I0120 17:38:47.725819 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bb8mv\" (UniqueName: \"kubernetes.io/projected/d0c8cc0b-d00f-4b57-ab57-c70a68ff043a-kube-api-access-bb8mv\") pod \"kube-state-metrics-0\" (UID: \"d0c8cc0b-d00f-4b57-ab57-c70a68ff043a\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:38:47 crc kubenswrapper[4558]: I0120 17:38:47.827799 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bb8mv\" (UniqueName: \"kubernetes.io/projected/d0c8cc0b-d00f-4b57-ab57-c70a68ff043a-kube-api-access-bb8mv\") pod \"kube-state-metrics-0\" (UID: \"d0c8cc0b-d00f-4b57-ab57-c70a68ff043a\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:38:47 crc kubenswrapper[4558]: I0120 17:38:47.845081 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bb8mv\" (UniqueName: \"kubernetes.io/projected/d0c8cc0b-d00f-4b57-ab57-c70a68ff043a-kube-api-access-bb8mv\") pod \"kube-state-metrics-0\" (UID: \"d0c8cc0b-d00f-4b57-ab57-c70a68ff043a\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:38:47 crc kubenswrapper[4558]: I0120 17:38:47.864372 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/memcached-0" Jan 20 17:38:47 crc kubenswrapper[4558]: I0120 17:38:47.956073 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:38:48 crc kubenswrapper[4558]: I0120 17:38:48.369535 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 17:38:48 crc kubenswrapper[4558]: W0120 17:38:48.375239 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd0c8cc0b_d00f_4b57_ab57_c70a68ff043a.slice/crio-b50f2f1e1ea94daf051f102c8a2333bc6f7926df4c7722562f05ec4cea9124bc WatchSource:0}: Error finding container b50f2f1e1ea94daf051f102c8a2333bc6f7926df4c7722562f05ec4cea9124bc: Status 404 returned error can't find the container with id b50f2f1e1ea94daf051f102c8a2333bc6f7926df4c7722562f05ec4cea9124bc Jan 20 17:38:48 crc kubenswrapper[4558]: I0120 17:38:48.876498 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/kube-state-metrics-0" event={"ID":"d0c8cc0b-d00f-4b57-ab57-c70a68ff043a","Type":"ContainerStarted","Data":"14660d2bf1b03430df88f4eb5caeb6c51bea9e40847e3711aa7dcc49d73097fc"} Jan 20 17:38:48 crc kubenswrapper[4558]: I0120 17:38:48.876782 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/kube-state-metrics-0" event={"ID":"d0c8cc0b-d00f-4b57-ab57-c70a68ff043a","Type":"ContainerStarted","Data":"b50f2f1e1ea94daf051f102c8a2333bc6f7926df4c7722562f05ec4cea9124bc"} Jan 20 17:38:48 crc kubenswrapper[4558]: I0120 17:38:48.876806 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:38:48 crc kubenswrapper[4558]: I0120 17:38:48.878366 4558 generic.go:334] "Generic (PLEG): container finished" podID="4bcc759d-3647-4194-9a91-acac49948173" containerID="d8d925f249220118354cc5b256984244efa0db9e1f2f534a041c8fbdf41960e2" exitCode=0 Jan 20 17:38:48 crc kubenswrapper[4558]: I0120 17:38:48.878466 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-galera-0" event={"ID":"4bcc759d-3647-4194-9a91-acac49948173","Type":"ContainerDied","Data":"d8d925f249220118354cc5b256984244efa0db9e1f2f534a041c8fbdf41960e2"} Jan 20 17:38:48 crc kubenswrapper[4558]: I0120 17:38:48.908940 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/kube-state-metrics-0" podStartSLOduration=1.6327854450000001 podStartE2EDuration="1.908915045s" podCreationTimestamp="2026-01-20 17:38:47 +0000 UTC" firstStartedPulling="2026-01-20 17:38:48.377160639 +0000 UTC m=+3422.137498607" lastFinishedPulling="2026-01-20 17:38:48.65329024 +0000 UTC m=+3422.413628207" observedRunningTime="2026-01-20 17:38:48.902980137 +0000 UTC m=+3422.663318104" watchObservedRunningTime="2026-01-20 17:38:48.908915045 +0000 UTC m=+3422.669253012" Jan 20 17:38:49 crc kubenswrapper[4558]: I0120 17:38:49.888907 4558 generic.go:334] "Generic (PLEG): container finished" podID="0b56d0cd-1994-4e39-9c78-82b5105222db" containerID="b03018c6cafe6bfe05fbcfd442923f9ba65a46cf60d3b585db1ba6e573cc935a" exitCode=0 Jan 20 17:38:49 crc kubenswrapper[4558]: I0120 17:38:49.889002 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-cell1-galera-0" event={"ID":"0b56d0cd-1994-4e39-9c78-82b5105222db","Type":"ContainerDied","Data":"b03018c6cafe6bfe05fbcfd442923f9ba65a46cf60d3b585db1ba6e573cc935a"} Jan 20 17:38:49 crc kubenswrapper[4558]: I0120 17:38:49.892533 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-galera-0" event={"ID":"4bcc759d-3647-4194-9a91-acac49948173","Type":"ContainerStarted","Data":"1b4d88c338068dde213bad574869f30ce42019618f93ed6c25b82149cf965480"} Jan 20 17:38:49 crc kubenswrapper[4558]: I0120 17:38:49.936554 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/openstack-galera-0" podStartSLOduration=6.936533264 podStartE2EDuration="6.936533264s" podCreationTimestamp="2026-01-20 17:38:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:38:49.93033517 +0000 UTC m=+3423.690673137" watchObservedRunningTime="2026-01-20 17:38:49.936533264 +0000 UTC m=+3423.696871221" Jan 20 17:38:50 crc kubenswrapper[4558]: I0120 17:38:50.766110 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ovsdbserver-nb-0"] Jan 20 17:38:50 crc kubenswrapper[4558]: I0120 17:38:50.767795 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:38:50 crc kubenswrapper[4558]: I0120 17:38:50.769578 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-ovn-metrics" Jan 20 17:38:50 crc kubenswrapper[4558]: I0120 17:38:50.769938 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ovncluster-ovndbcluster-nb-dockercfg-jdmxf" Jan 20 17:38:50 crc kubenswrapper[4558]: I0120 17:38:50.770379 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-ovndbcluster-nb-ovndbs" Jan 20 17:38:50 crc kubenswrapper[4558]: I0120 17:38:50.771179 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"ovndbcluster-nb-scripts" Jan 20 17:38:50 crc kubenswrapper[4558]: I0120 17:38:50.771331 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"ovndbcluster-nb-config" Jan 20 17:38:50 crc kubenswrapper[4558]: I0120 17:38:50.776677 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-nb-0"] Jan 20 17:38:50 crc kubenswrapper[4558]: I0120 17:38:50.881949 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/64c80877-f13c-4a3d-b352-66dc008f4e1b-config\") pod \"ovsdbserver-nb-0\" (UID: \"64c80877-f13c-4a3d-b352-66dc008f4e1b\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:38:50 crc kubenswrapper[4558]: I0120 17:38:50.882042 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64c80877-f13c-4a3d-b352-66dc008f4e1b-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"64c80877-f13c-4a3d-b352-66dc008f4e1b\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:38:50 crc kubenswrapper[4558]: I0120 17:38:50.882141 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/64c80877-f13c-4a3d-b352-66dc008f4e1b-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"64c80877-f13c-4a3d-b352-66dc008f4e1b\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:38:50 crc kubenswrapper[4558]: I0120 17:38:50.882282 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/64c80877-f13c-4a3d-b352-66dc008f4e1b-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"64c80877-f13c-4a3d-b352-66dc008f4e1b\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:38:50 crc kubenswrapper[4558]: I0120 17:38:50.882364 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jqplc\" (UniqueName: \"kubernetes.io/projected/64c80877-f13c-4a3d-b352-66dc008f4e1b-kube-api-access-jqplc\") pod \"ovsdbserver-nb-0\" (UID: \"64c80877-f13c-4a3d-b352-66dc008f4e1b\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:38:50 crc kubenswrapper[4558]: I0120 17:38:50.882441 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/64c80877-f13c-4a3d-b352-66dc008f4e1b-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"64c80877-f13c-4a3d-b352-66dc008f4e1b\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:38:50 crc kubenswrapper[4558]: I0120 17:38:50.882498 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"ovsdbserver-nb-0\" (UID: \"64c80877-f13c-4a3d-b352-66dc008f4e1b\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:38:50 crc kubenswrapper[4558]: I0120 17:38:50.882548 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/64c80877-f13c-4a3d-b352-66dc008f4e1b-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"64c80877-f13c-4a3d-b352-66dc008f4e1b\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:38:50 crc kubenswrapper[4558]: I0120 17:38:50.905671 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-cell1-galera-0" event={"ID":"0b56d0cd-1994-4e39-9c78-82b5105222db","Type":"ContainerStarted","Data":"0155fd8ce44500446dfb48466cd9d3cc3b7b70429fb0d969d7884dee5362f7a9"} Jan 20 17:38:50 crc kubenswrapper[4558]: I0120 17:38:50.930146 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/openstack-cell1-galera-0" podStartSLOduration=6.930114805 podStartE2EDuration="6.930114805s" podCreationTimestamp="2026-01-20 17:38:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:38:50.923505839 +0000 UTC m=+3424.683843805" watchObservedRunningTime="2026-01-20 17:38:50.930114805 +0000 UTC m=+3424.690452772" Jan 20 17:38:50 crc kubenswrapper[4558]: I0120 17:38:50.984576 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64c80877-f13c-4a3d-b352-66dc008f4e1b-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"64c80877-f13c-4a3d-b352-66dc008f4e1b\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:38:50 crc kubenswrapper[4558]: I0120 17:38:50.984660 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/64c80877-f13c-4a3d-b352-66dc008f4e1b-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"64c80877-f13c-4a3d-b352-66dc008f4e1b\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:38:50 crc kubenswrapper[4558]: I0120 17:38:50.984739 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/64c80877-f13c-4a3d-b352-66dc008f4e1b-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"64c80877-f13c-4a3d-b352-66dc008f4e1b\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:38:50 crc kubenswrapper[4558]: I0120 17:38:50.984792 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jqplc\" (UniqueName: \"kubernetes.io/projected/64c80877-f13c-4a3d-b352-66dc008f4e1b-kube-api-access-jqplc\") pod \"ovsdbserver-nb-0\" (UID: \"64c80877-f13c-4a3d-b352-66dc008f4e1b\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:38:50 crc kubenswrapper[4558]: I0120 17:38:50.984858 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/64c80877-f13c-4a3d-b352-66dc008f4e1b-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"64c80877-f13c-4a3d-b352-66dc008f4e1b\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:38:50 crc kubenswrapper[4558]: I0120 17:38:50.984899 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"ovsdbserver-nb-0\" (UID: \"64c80877-f13c-4a3d-b352-66dc008f4e1b\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:38:50 crc kubenswrapper[4558]: I0120 17:38:50.984968 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/64c80877-f13c-4a3d-b352-66dc008f4e1b-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"64c80877-f13c-4a3d-b352-66dc008f4e1b\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:38:50 crc kubenswrapper[4558]: I0120 17:38:50.985071 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/64c80877-f13c-4a3d-b352-66dc008f4e1b-config\") pod \"ovsdbserver-nb-0\" (UID: \"64c80877-f13c-4a3d-b352-66dc008f4e1b\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:38:50 crc kubenswrapper[4558]: I0120 17:38:50.985433 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/64c80877-f13c-4a3d-b352-66dc008f4e1b-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"64c80877-f13c-4a3d-b352-66dc008f4e1b\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:38:50 crc kubenswrapper[4558]: I0120 17:38:50.985896 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"ovsdbserver-nb-0\" (UID: \"64c80877-f13c-4a3d-b352-66dc008f4e1b\") device mount path \"/mnt/openstack/pv02\"" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:38:50 crc kubenswrapper[4558]: I0120 17:38:50.986384 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/64c80877-f13c-4a3d-b352-66dc008f4e1b-config\") pod \"ovsdbserver-nb-0\" (UID: \"64c80877-f13c-4a3d-b352-66dc008f4e1b\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:38:50 crc kubenswrapper[4558]: I0120 17:38:50.987369 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/64c80877-f13c-4a3d-b352-66dc008f4e1b-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"64c80877-f13c-4a3d-b352-66dc008f4e1b\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:38:50 crc kubenswrapper[4558]: I0120 17:38:50.991259 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/64c80877-f13c-4a3d-b352-66dc008f4e1b-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"64c80877-f13c-4a3d-b352-66dc008f4e1b\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:38:50 crc kubenswrapper[4558]: I0120 17:38:50.991569 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/64c80877-f13c-4a3d-b352-66dc008f4e1b-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"64c80877-f13c-4a3d-b352-66dc008f4e1b\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:38:50 crc kubenswrapper[4558]: I0120 17:38:50.994191 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64c80877-f13c-4a3d-b352-66dc008f4e1b-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"64c80877-f13c-4a3d-b352-66dc008f4e1b\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:38:51 crc kubenswrapper[4558]: I0120 17:38:51.001251 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jqplc\" (UniqueName: \"kubernetes.io/projected/64c80877-f13c-4a3d-b352-66dc008f4e1b-kube-api-access-jqplc\") pod \"ovsdbserver-nb-0\" (UID: \"64c80877-f13c-4a3d-b352-66dc008f4e1b\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:38:51 crc kubenswrapper[4558]: I0120 17:38:51.008276 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"ovsdbserver-nb-0\" (UID: \"64c80877-f13c-4a3d-b352-66dc008f4e1b\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:38:51 crc kubenswrapper[4558]: I0120 17:38:51.084113 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:38:51 crc kubenswrapper[4558]: I0120 17:38:51.227001 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/memcached-0" Jan 20 17:38:51 crc kubenswrapper[4558]: I0120 17:38:51.508346 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-nb-0"] Jan 20 17:38:51 crc kubenswrapper[4558]: W0120 17:38:51.512773 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod64c80877_f13c_4a3d_b352_66dc008f4e1b.slice/crio-2a03679a313abd285c2b2c5a0fb9ab4133d4834b509b22347420d2ccfbd78825 WatchSource:0}: Error finding container 2a03679a313abd285c2b2c5a0fb9ab4133d4834b509b22347420d2ccfbd78825: Status 404 returned error can't find the container with id 2a03679a313abd285c2b2c5a0fb9ab4133d4834b509b22347420d2ccfbd78825 Jan 20 17:38:51 crc kubenswrapper[4558]: I0120 17:38:51.919427 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-nb-0" event={"ID":"64c80877-f13c-4a3d-b352-66dc008f4e1b","Type":"ContainerStarted","Data":"cbe6e8e1b940e039a8a8149b56ab4e7de324c89518b562816f041b1e4ddc6121"} Jan 20 17:38:51 crc kubenswrapper[4558]: I0120 17:38:51.920933 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-nb-0" event={"ID":"64c80877-f13c-4a3d-b352-66dc008f4e1b","Type":"ContainerStarted","Data":"f112bf24b33d220f86c5571e1b43c010cb9948d3f6d700a30ce27b148b5f96bb"} Jan 20 17:38:51 crc kubenswrapper[4558]: I0120 17:38:51.921029 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-nb-0" event={"ID":"64c80877-f13c-4a3d-b352-66dc008f4e1b","Type":"ContainerStarted","Data":"2a03679a313abd285c2b2c5a0fb9ab4133d4834b509b22347420d2ccfbd78825"} Jan 20 17:38:51 crc kubenswrapper[4558]: I0120 17:38:51.940658 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ovsdbserver-nb-0" podStartSLOduration=2.940646847 podStartE2EDuration="2.940646847s" podCreationTimestamp="2026-01-20 17:38:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:38:51.93689376 +0000 UTC m=+3425.697231727" watchObservedRunningTime="2026-01-20 17:38:51.940646847 +0000 UTC m=+3425.700984814" Jan 20 17:38:53 crc kubenswrapper[4558]: I0120 17:38:53.875064 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ovsdbserver-sb-0"] Jan 20 17:38:53 crc kubenswrapper[4558]: I0120 17:38:53.876234 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:38:53 crc kubenswrapper[4558]: I0120 17:38:53.877983 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"ovndbcluster-sb-config" Jan 20 17:38:53 crc kubenswrapper[4558]: I0120 17:38:53.878229 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-ovndbcluster-sb-ovndbs" Jan 20 17:38:53 crc kubenswrapper[4558]: I0120 17:38:53.879330 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ovncluster-ovndbcluster-sb-dockercfg-b24l8" Jan 20 17:38:53 crc kubenswrapper[4558]: I0120 17:38:53.879852 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"ovndbcluster-sb-scripts" Jan 20 17:38:53 crc kubenswrapper[4558]: I0120 17:38:53.893972 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-sb-0"] Jan 20 17:38:54 crc kubenswrapper[4558]: I0120 17:38:54.046403 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/b5ab3ab7-6929-4165-91ad-860f5f109147-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"b5ab3ab7-6929-4165-91ad-860f5f109147\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:38:54 crc kubenswrapper[4558]: I0120 17:38:54.046494 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b5ab3ab7-6929-4165-91ad-860f5f109147-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"b5ab3ab7-6929-4165-91ad-860f5f109147\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:38:54 crc kubenswrapper[4558]: I0120 17:38:54.046680 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"ovsdbserver-sb-0\" (UID: \"b5ab3ab7-6929-4165-91ad-860f5f109147\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:38:54 crc kubenswrapper[4558]: I0120 17:38:54.046741 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pb654\" (UniqueName: \"kubernetes.io/projected/b5ab3ab7-6929-4165-91ad-860f5f109147-kube-api-access-pb654\") pod \"ovsdbserver-sb-0\" (UID: \"b5ab3ab7-6929-4165-91ad-860f5f109147\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:38:54 crc kubenswrapper[4558]: I0120 17:38:54.046783 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/b5ab3ab7-6929-4165-91ad-860f5f109147-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"b5ab3ab7-6929-4165-91ad-860f5f109147\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:38:54 crc kubenswrapper[4558]: I0120 17:38:54.046945 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b5ab3ab7-6929-4165-91ad-860f5f109147-config\") pod \"ovsdbserver-sb-0\" (UID: \"b5ab3ab7-6929-4165-91ad-860f5f109147\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:38:54 crc kubenswrapper[4558]: I0120 17:38:54.047225 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b5ab3ab7-6929-4165-91ad-860f5f109147-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"b5ab3ab7-6929-4165-91ad-860f5f109147\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:38:54 crc kubenswrapper[4558]: I0120 17:38:54.047267 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/b5ab3ab7-6929-4165-91ad-860f5f109147-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"b5ab3ab7-6929-4165-91ad-860f5f109147\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:38:54 crc kubenswrapper[4558]: I0120 17:38:54.084559 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:38:54 crc kubenswrapper[4558]: I0120 17:38:54.116133 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:38:54 crc kubenswrapper[4558]: I0120 17:38:54.149286 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"ovsdbserver-sb-0\" (UID: \"b5ab3ab7-6929-4165-91ad-860f5f109147\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:38:54 crc kubenswrapper[4558]: I0120 17:38:54.149339 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pb654\" (UniqueName: \"kubernetes.io/projected/b5ab3ab7-6929-4165-91ad-860f5f109147-kube-api-access-pb654\") pod \"ovsdbserver-sb-0\" (UID: \"b5ab3ab7-6929-4165-91ad-860f5f109147\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:38:54 crc kubenswrapper[4558]: I0120 17:38:54.149374 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/b5ab3ab7-6929-4165-91ad-860f5f109147-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"b5ab3ab7-6929-4165-91ad-860f5f109147\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:38:54 crc kubenswrapper[4558]: I0120 17:38:54.149408 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b5ab3ab7-6929-4165-91ad-860f5f109147-config\") pod \"ovsdbserver-sb-0\" (UID: \"b5ab3ab7-6929-4165-91ad-860f5f109147\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:38:54 crc kubenswrapper[4558]: I0120 17:38:54.149558 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b5ab3ab7-6929-4165-91ad-860f5f109147-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"b5ab3ab7-6929-4165-91ad-860f5f109147\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:38:54 crc kubenswrapper[4558]: I0120 17:38:54.149610 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/b5ab3ab7-6929-4165-91ad-860f5f109147-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"b5ab3ab7-6929-4165-91ad-860f5f109147\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:38:54 crc kubenswrapper[4558]: I0120 17:38:54.149677 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/b5ab3ab7-6929-4165-91ad-860f5f109147-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"b5ab3ab7-6929-4165-91ad-860f5f109147\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:38:54 crc kubenswrapper[4558]: I0120 17:38:54.149755 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"ovsdbserver-sb-0\" (UID: \"b5ab3ab7-6929-4165-91ad-860f5f109147\") device mount path \"/mnt/openstack/pv03\"" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:38:54 crc kubenswrapper[4558]: I0120 17:38:54.150260 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/b5ab3ab7-6929-4165-91ad-860f5f109147-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"b5ab3ab7-6929-4165-91ad-860f5f109147\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:38:54 crc kubenswrapper[4558]: I0120 17:38:54.150429 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b5ab3ab7-6929-4165-91ad-860f5f109147-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"b5ab3ab7-6929-4165-91ad-860f5f109147\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:38:54 crc kubenswrapper[4558]: I0120 17:38:54.150929 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b5ab3ab7-6929-4165-91ad-860f5f109147-config\") pod \"ovsdbserver-sb-0\" (UID: \"b5ab3ab7-6929-4165-91ad-860f5f109147\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:38:54 crc kubenswrapper[4558]: I0120 17:38:54.151038 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b5ab3ab7-6929-4165-91ad-860f5f109147-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"b5ab3ab7-6929-4165-91ad-860f5f109147\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:38:54 crc kubenswrapper[4558]: I0120 17:38:54.157297 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/b5ab3ab7-6929-4165-91ad-860f5f109147-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"b5ab3ab7-6929-4165-91ad-860f5f109147\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:38:54 crc kubenswrapper[4558]: I0120 17:38:54.157686 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b5ab3ab7-6929-4165-91ad-860f5f109147-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"b5ab3ab7-6929-4165-91ad-860f5f109147\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:38:54 crc kubenswrapper[4558]: I0120 17:38:54.157911 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/b5ab3ab7-6929-4165-91ad-860f5f109147-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"b5ab3ab7-6929-4165-91ad-860f5f109147\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:38:54 crc kubenswrapper[4558]: I0120 17:38:54.164978 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pb654\" (UniqueName: \"kubernetes.io/projected/b5ab3ab7-6929-4165-91ad-860f5f109147-kube-api-access-pb654\") pod \"ovsdbserver-sb-0\" (UID: \"b5ab3ab7-6929-4165-91ad-860f5f109147\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:38:54 crc kubenswrapper[4558]: I0120 17:38:54.171067 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"ovsdbserver-sb-0\" (UID: \"b5ab3ab7-6929-4165-91ad-860f5f109147\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:38:54 crc kubenswrapper[4558]: I0120 17:38:54.192999 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:38:54 crc kubenswrapper[4558]: I0120 17:38:54.421979 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:38:54 crc kubenswrapper[4558]: I0120 17:38:54.422194 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:38:54 crc kubenswrapper[4558]: I0120 17:38:54.492672 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:38:54 crc kubenswrapper[4558]: I0120 17:38:54.592759 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-sb-0"] Jan 20 17:38:54 crc kubenswrapper[4558]: W0120 17:38:54.596452 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb5ab3ab7_6929_4165_91ad_860f5f109147.slice/crio-36646009b07943d76f79ec7d2b41ba3a0295d123b4995172e48f7668d169667f WatchSource:0}: Error finding container 36646009b07943d76f79ec7d2b41ba3a0295d123b4995172e48f7668d169667f: Status 404 returned error can't find the container with id 36646009b07943d76f79ec7d2b41ba3a0295d123b4995172e48f7668d169667f Jan 20 17:38:54 crc kubenswrapper[4558]: I0120 17:38:54.945595 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-sb-0" event={"ID":"b5ab3ab7-6929-4165-91ad-860f5f109147","Type":"ContainerStarted","Data":"1c7d263212edab169a57b366b0d7bfd66e4b2a4fcfe052179e753959b5379043"} Jan 20 17:38:54 crc kubenswrapper[4558]: I0120 17:38:54.945943 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-sb-0" event={"ID":"b5ab3ab7-6929-4165-91ad-860f5f109147","Type":"ContainerStarted","Data":"66f66c6ec7d01615c200c8de367e75e38763936522bc9d564df7a1fc052430d3"} Jan 20 17:38:54 crc kubenswrapper[4558]: I0120 17:38:54.945956 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-sb-0" event={"ID":"b5ab3ab7-6929-4165-91ad-860f5f109147","Type":"ContainerStarted","Data":"36646009b07943d76f79ec7d2b41ba3a0295d123b4995172e48f7668d169667f"} Jan 20 17:38:54 crc kubenswrapper[4558]: I0120 17:38:54.946883 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:38:55 crc kubenswrapper[4558]: I0120 17:38:55.011680 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:38:55 crc kubenswrapper[4558]: I0120 17:38:55.025288 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ovsdbserver-sb-0" podStartSLOduration=3.025270999 podStartE2EDuration="3.025270999s" podCreationTimestamp="2026-01-20 17:38:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:38:54.976046555 +0000 UTC m=+3428.736384512" watchObservedRunningTime="2026-01-20 17:38:55.025270999 +0000 UTC m=+3428.785608957" Jan 20 17:38:55 crc kubenswrapper[4558]: I0120 17:38:55.697504 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:38:55 crc kubenswrapper[4558]: I0120 17:38:55.697675 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:38:55 crc kubenswrapper[4558]: I0120 17:38:55.763339 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/keystone-db-create-hbtdf"] Jan 20 17:38:55 crc kubenswrapper[4558]: I0120 17:38:55.764595 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-db-create-hbtdf" Jan 20 17:38:55 crc kubenswrapper[4558]: I0120 17:38:55.775752 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-db-create-hbtdf"] Jan 20 17:38:55 crc kubenswrapper[4558]: I0120 17:38:55.887202 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/keystone-6ed6-account-create-update-vb72p"] Jan 20 17:38:55 crc kubenswrapper[4558]: I0120 17:38:55.888396 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-6ed6-account-create-update-vb72p" Jan 20 17:38:55 crc kubenswrapper[4558]: I0120 17:38:55.889035 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4b507c13-bb43-43c8-b5c6-7194f9355516-operator-scripts\") pod \"keystone-db-create-hbtdf\" (UID: \"4b507c13-bb43-43c8-b5c6-7194f9355516\") " pod="openstack-kuttl-tests/keystone-db-create-hbtdf" Jan 20 17:38:55 crc kubenswrapper[4558]: I0120 17:38:55.889267 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h7fjw\" (UniqueName: \"kubernetes.io/projected/4b507c13-bb43-43c8-b5c6-7194f9355516-kube-api-access-h7fjw\") pod \"keystone-db-create-hbtdf\" (UID: \"4b507c13-bb43-43c8-b5c6-7194f9355516\") " pod="openstack-kuttl-tests/keystone-db-create-hbtdf" Jan 20 17:38:55 crc kubenswrapper[4558]: I0120 17:38:55.890305 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-db-secret" Jan 20 17:38:55 crc kubenswrapper[4558]: I0120 17:38:55.895028 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-6ed6-account-create-update-vb72p"] Jan 20 17:38:55 crc kubenswrapper[4558]: I0120 17:38:55.991399 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4b507c13-bb43-43c8-b5c6-7194f9355516-operator-scripts\") pod \"keystone-db-create-hbtdf\" (UID: \"4b507c13-bb43-43c8-b5c6-7194f9355516\") " pod="openstack-kuttl-tests/keystone-db-create-hbtdf" Jan 20 17:38:55 crc kubenswrapper[4558]: I0120 17:38:55.991784 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h7fjw\" (UniqueName: \"kubernetes.io/projected/4b507c13-bb43-43c8-b5c6-7194f9355516-kube-api-access-h7fjw\") pod \"keystone-db-create-hbtdf\" (UID: \"4b507c13-bb43-43c8-b5c6-7194f9355516\") " pod="openstack-kuttl-tests/keystone-db-create-hbtdf" Jan 20 17:38:55 crc kubenswrapper[4558]: I0120 17:38:55.991885 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/25182339-1f27-4f20-9fee-e132979e83f3-operator-scripts\") pod \"keystone-6ed6-account-create-update-vb72p\" (UID: \"25182339-1f27-4f20-9fee-e132979e83f3\") " pod="openstack-kuttl-tests/keystone-6ed6-account-create-update-vb72p" Jan 20 17:38:55 crc kubenswrapper[4558]: I0120 17:38:55.991925 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pq9jd\" (UniqueName: \"kubernetes.io/projected/25182339-1f27-4f20-9fee-e132979e83f3-kube-api-access-pq9jd\") pod \"keystone-6ed6-account-create-update-vb72p\" (UID: \"25182339-1f27-4f20-9fee-e132979e83f3\") " pod="openstack-kuttl-tests/keystone-6ed6-account-create-update-vb72p" Jan 20 17:38:55 crc kubenswrapper[4558]: I0120 17:38:55.992128 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4b507c13-bb43-43c8-b5c6-7194f9355516-operator-scripts\") pod \"keystone-db-create-hbtdf\" (UID: \"4b507c13-bb43-43c8-b5c6-7194f9355516\") " pod="openstack-kuttl-tests/keystone-db-create-hbtdf" Jan 20 17:38:56 crc kubenswrapper[4558]: I0120 17:38:56.007031 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h7fjw\" (UniqueName: \"kubernetes.io/projected/4b507c13-bb43-43c8-b5c6-7194f9355516-kube-api-access-h7fjw\") pod \"keystone-db-create-hbtdf\" (UID: \"4b507c13-bb43-43c8-b5c6-7194f9355516\") " pod="openstack-kuttl-tests/keystone-db-create-hbtdf" Jan 20 17:38:56 crc kubenswrapper[4558]: I0120 17:38:56.080228 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-db-create-hbtdf" Jan 20 17:38:56 crc kubenswrapper[4558]: I0120 17:38:56.093719 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/25182339-1f27-4f20-9fee-e132979e83f3-operator-scripts\") pod \"keystone-6ed6-account-create-update-vb72p\" (UID: \"25182339-1f27-4f20-9fee-e132979e83f3\") " pod="openstack-kuttl-tests/keystone-6ed6-account-create-update-vb72p" Jan 20 17:38:56 crc kubenswrapper[4558]: I0120 17:38:56.093816 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pq9jd\" (UniqueName: \"kubernetes.io/projected/25182339-1f27-4f20-9fee-e132979e83f3-kube-api-access-pq9jd\") pod \"keystone-6ed6-account-create-update-vb72p\" (UID: \"25182339-1f27-4f20-9fee-e132979e83f3\") " pod="openstack-kuttl-tests/keystone-6ed6-account-create-update-vb72p" Jan 20 17:38:56 crc kubenswrapper[4558]: I0120 17:38:56.096501 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/25182339-1f27-4f20-9fee-e132979e83f3-operator-scripts\") pod \"keystone-6ed6-account-create-update-vb72p\" (UID: \"25182339-1f27-4f20-9fee-e132979e83f3\") " pod="openstack-kuttl-tests/keystone-6ed6-account-create-update-vb72p" Jan 20 17:38:56 crc kubenswrapper[4558]: I0120 17:38:56.102837 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/placement-db-create-jxmgh"] Jan 20 17:38:56 crc kubenswrapper[4558]: I0120 17:38:56.103998 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-db-create-jxmgh" Jan 20 17:38:56 crc kubenswrapper[4558]: I0120 17:38:56.114033 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-db-create-jxmgh"] Jan 20 17:38:56 crc kubenswrapper[4558]: I0120 17:38:56.117598 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pq9jd\" (UniqueName: \"kubernetes.io/projected/25182339-1f27-4f20-9fee-e132979e83f3-kube-api-access-pq9jd\") pod \"keystone-6ed6-account-create-update-vb72p\" (UID: \"25182339-1f27-4f20-9fee-e132979e83f3\") " pod="openstack-kuttl-tests/keystone-6ed6-account-create-update-vb72p" Jan 20 17:38:56 crc kubenswrapper[4558]: I0120 17:38:56.126498 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:38:56 crc kubenswrapper[4558]: I0120 17:38:56.197295 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0a030561-c5c2-4b2b-87fd-677bde229db8-operator-scripts\") pod \"placement-db-create-jxmgh\" (UID: \"0a030561-c5c2-4b2b-87fd-677bde229db8\") " pod="openstack-kuttl-tests/placement-db-create-jxmgh" Jan 20 17:38:56 crc kubenswrapper[4558]: I0120 17:38:56.197889 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l5gww\" (UniqueName: \"kubernetes.io/projected/0a030561-c5c2-4b2b-87fd-677bde229db8-kube-api-access-l5gww\") pod \"placement-db-create-jxmgh\" (UID: \"0a030561-c5c2-4b2b-87fd-677bde229db8\") " pod="openstack-kuttl-tests/placement-db-create-jxmgh" Jan 20 17:38:56 crc kubenswrapper[4558]: I0120 17:38:56.207827 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-6ed6-account-create-update-vb72p" Jan 20 17:38:56 crc kubenswrapper[4558]: I0120 17:38:56.221110 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/placement-12a5-account-create-update-gsb28"] Jan 20 17:38:56 crc kubenswrapper[4558]: I0120 17:38:56.222274 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-12a5-account-create-update-gsb28" Jan 20 17:38:56 crc kubenswrapper[4558]: I0120 17:38:56.226380 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"placement-db-secret" Jan 20 17:38:56 crc kubenswrapper[4558]: I0120 17:38:56.241182 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-12a5-account-create-update-gsb28"] Jan 20 17:38:56 crc kubenswrapper[4558]: I0120 17:38:56.300767 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0a030561-c5c2-4b2b-87fd-677bde229db8-operator-scripts\") pod \"placement-db-create-jxmgh\" (UID: \"0a030561-c5c2-4b2b-87fd-677bde229db8\") " pod="openstack-kuttl-tests/placement-db-create-jxmgh" Jan 20 17:38:56 crc kubenswrapper[4558]: I0120 17:38:56.300845 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l5gww\" (UniqueName: \"kubernetes.io/projected/0a030561-c5c2-4b2b-87fd-677bde229db8-kube-api-access-l5gww\") pod \"placement-db-create-jxmgh\" (UID: \"0a030561-c5c2-4b2b-87fd-677bde229db8\") " pod="openstack-kuttl-tests/placement-db-create-jxmgh" Jan 20 17:38:56 crc kubenswrapper[4558]: I0120 17:38:56.300871 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d4e31f5d-cc80-4318-8249-aa2bef62b79a-operator-scripts\") pod \"placement-12a5-account-create-update-gsb28\" (UID: \"d4e31f5d-cc80-4318-8249-aa2bef62b79a\") " pod="openstack-kuttl-tests/placement-12a5-account-create-update-gsb28" Jan 20 17:38:56 crc kubenswrapper[4558]: I0120 17:38:56.301019 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-762pn\" (UniqueName: \"kubernetes.io/projected/d4e31f5d-cc80-4318-8249-aa2bef62b79a-kube-api-access-762pn\") pod \"placement-12a5-account-create-update-gsb28\" (UID: \"d4e31f5d-cc80-4318-8249-aa2bef62b79a\") " pod="openstack-kuttl-tests/placement-12a5-account-create-update-gsb28" Jan 20 17:38:56 crc kubenswrapper[4558]: I0120 17:38:56.301604 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0a030561-c5c2-4b2b-87fd-677bde229db8-operator-scripts\") pod \"placement-db-create-jxmgh\" (UID: \"0a030561-c5c2-4b2b-87fd-677bde229db8\") " pod="openstack-kuttl-tests/placement-db-create-jxmgh" Jan 20 17:38:56 crc kubenswrapper[4558]: I0120 17:38:56.320864 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l5gww\" (UniqueName: \"kubernetes.io/projected/0a030561-c5c2-4b2b-87fd-677bde229db8-kube-api-access-l5gww\") pod \"placement-db-create-jxmgh\" (UID: \"0a030561-c5c2-4b2b-87fd-677bde229db8\") " pod="openstack-kuttl-tests/placement-db-create-jxmgh" Jan 20 17:38:56 crc kubenswrapper[4558]: I0120 17:38:56.403932 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-762pn\" (UniqueName: \"kubernetes.io/projected/d4e31f5d-cc80-4318-8249-aa2bef62b79a-kube-api-access-762pn\") pod \"placement-12a5-account-create-update-gsb28\" (UID: \"d4e31f5d-cc80-4318-8249-aa2bef62b79a\") " pod="openstack-kuttl-tests/placement-12a5-account-create-update-gsb28" Jan 20 17:38:56 crc kubenswrapper[4558]: I0120 17:38:56.404022 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d4e31f5d-cc80-4318-8249-aa2bef62b79a-operator-scripts\") pod \"placement-12a5-account-create-update-gsb28\" (UID: \"d4e31f5d-cc80-4318-8249-aa2bef62b79a\") " pod="openstack-kuttl-tests/placement-12a5-account-create-update-gsb28" Jan 20 17:38:56 crc kubenswrapper[4558]: I0120 17:38:56.404754 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d4e31f5d-cc80-4318-8249-aa2bef62b79a-operator-scripts\") pod \"placement-12a5-account-create-update-gsb28\" (UID: \"d4e31f5d-cc80-4318-8249-aa2bef62b79a\") " pod="openstack-kuttl-tests/placement-12a5-account-create-update-gsb28" Jan 20 17:38:56 crc kubenswrapper[4558]: I0120 17:38:56.421851 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-762pn\" (UniqueName: \"kubernetes.io/projected/d4e31f5d-cc80-4318-8249-aa2bef62b79a-kube-api-access-762pn\") pod \"placement-12a5-account-create-update-gsb28\" (UID: \"d4e31f5d-cc80-4318-8249-aa2bef62b79a\") " pod="openstack-kuttl-tests/placement-12a5-account-create-update-gsb28" Jan 20 17:38:56 crc kubenswrapper[4558]: I0120 17:38:56.529152 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-db-create-jxmgh" Jan 20 17:38:56 crc kubenswrapper[4558]: I0120 17:38:56.532703 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-db-create-hbtdf"] Jan 20 17:38:56 crc kubenswrapper[4558]: W0120 17:38:56.536021 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4b507c13_bb43_43c8_b5c6_7194f9355516.slice/crio-f46105820f9a6fa789287e9d34c5e7d7ef167a29ad7e023c0316572409299b54 WatchSource:0}: Error finding container f46105820f9a6fa789287e9d34c5e7d7ef167a29ad7e023c0316572409299b54: Status 404 returned error can't find the container with id f46105820f9a6fa789287e9d34c5e7d7ef167a29ad7e023c0316572409299b54 Jan 20 17:38:56 crc kubenswrapper[4558]: I0120 17:38:56.541510 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-12a5-account-create-update-gsb28" Jan 20 17:38:56 crc kubenswrapper[4558]: I0120 17:38:56.634199 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-6ed6-account-create-update-vb72p"] Jan 20 17:38:56 crc kubenswrapper[4558]: W0120 17:38:56.652896 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod25182339_1f27_4f20_9fee_e132979e83f3.slice/crio-8685e7674f8abc592846dd4450ece4a80f957c1f4f09d040ea7232f6c61fccb9 WatchSource:0}: Error finding container 8685e7674f8abc592846dd4450ece4a80f957c1f4f09d040ea7232f6c61fccb9: Status 404 returned error can't find the container with id 8685e7674f8abc592846dd4450ece4a80f957c1f4f09d040ea7232f6c61fccb9 Jan 20 17:38:56 crc kubenswrapper[4558]: I0120 17:38:56.970049 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-6ed6-account-create-update-vb72p" event={"ID":"25182339-1f27-4f20-9fee-e132979e83f3","Type":"ContainerStarted","Data":"6c282fad70062d85972c6adb482f23b17e657cdacbcb499fa61b4c6196a96f01"} Jan 20 17:38:56 crc kubenswrapper[4558]: I0120 17:38:56.970097 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-6ed6-account-create-update-vb72p" event={"ID":"25182339-1f27-4f20-9fee-e132979e83f3","Type":"ContainerStarted","Data":"8685e7674f8abc592846dd4450ece4a80f957c1f4f09d040ea7232f6c61fccb9"} Jan 20 17:38:56 crc kubenswrapper[4558]: I0120 17:38:56.972349 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-db-create-hbtdf" event={"ID":"4b507c13-bb43-43c8-b5c6-7194f9355516","Type":"ContainerStarted","Data":"5ccb1048b6361c306cfb63ee87e682c10253e16a9c797cf219976e09c2dfdba2"} Jan 20 17:38:56 crc kubenswrapper[4558]: I0120 17:38:56.972406 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-db-create-hbtdf" event={"ID":"4b507c13-bb43-43c8-b5c6-7194f9355516","Type":"ContainerStarted","Data":"f46105820f9a6fa789287e9d34c5e7d7ef167a29ad7e023c0316572409299b54"} Jan 20 17:38:56 crc kubenswrapper[4558]: I0120 17:38:56.997084 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/keystone-6ed6-account-create-update-vb72p" podStartSLOduration=1.9970604349999999 podStartE2EDuration="1.997060435s" podCreationTimestamp="2026-01-20 17:38:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:38:56.99200533 +0000 UTC m=+3430.752343298" watchObservedRunningTime="2026-01-20 17:38:56.997060435 +0000 UTC m=+3430.757398402" Jan 20 17:38:57 crc kubenswrapper[4558]: I0120 17:38:57.008700 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-db-create-jxmgh"] Jan 20 17:38:57 crc kubenswrapper[4558]: W0120 17:38:57.013321 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0a030561_c5c2_4b2b_87fd_677bde229db8.slice/crio-3c7c2e3da653026d092e81c0a6cc391a631e3b3deb5d760f6909efd81f60deb4 WatchSource:0}: Error finding container 3c7c2e3da653026d092e81c0a6cc391a631e3b3deb5d760f6909efd81f60deb4: Status 404 returned error can't find the container with id 3c7c2e3da653026d092e81c0a6cc391a631e3b3deb5d760f6909efd81f60deb4 Jan 20 17:38:57 crc kubenswrapper[4558]: I0120 17:38:57.020722 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/keystone-db-create-hbtdf" podStartSLOduration=2.020702267 podStartE2EDuration="2.020702267s" podCreationTimestamp="2026-01-20 17:38:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:38:57.019194683 +0000 UTC m=+3430.779532650" watchObservedRunningTime="2026-01-20 17:38:57.020702267 +0000 UTC m=+3430.781040234" Jan 20 17:38:57 crc kubenswrapper[4558]: I0120 17:38:57.080962 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-12a5-account-create-update-gsb28"] Jan 20 17:38:57 crc kubenswrapper[4558]: I0120 17:38:57.193568 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:38:57 crc kubenswrapper[4558]: I0120 17:38:57.228085 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:38:57 crc kubenswrapper[4558]: I0120 17:38:57.963273 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:38:57 crc kubenswrapper[4558]: I0120 17:38:57.986731 4558 generic.go:334] "Generic (PLEG): container finished" podID="25182339-1f27-4f20-9fee-e132979e83f3" containerID="6c282fad70062d85972c6adb482f23b17e657cdacbcb499fa61b4c6196a96f01" exitCode=0 Jan 20 17:38:57 crc kubenswrapper[4558]: I0120 17:38:57.986842 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-6ed6-account-create-update-vb72p" event={"ID":"25182339-1f27-4f20-9fee-e132979e83f3","Type":"ContainerDied","Data":"6c282fad70062d85972c6adb482f23b17e657cdacbcb499fa61b4c6196a96f01"} Jan 20 17:38:57 crc kubenswrapper[4558]: I0120 17:38:57.988790 4558 generic.go:334] "Generic (PLEG): container finished" podID="0a030561-c5c2-4b2b-87fd-677bde229db8" containerID="c6211ef8501688a72f4964e04a066c8f91536f9834ec00cc06f62c4f60798f0c" exitCode=0 Jan 20 17:38:57 crc kubenswrapper[4558]: I0120 17:38:57.988826 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-db-create-jxmgh" event={"ID":"0a030561-c5c2-4b2b-87fd-677bde229db8","Type":"ContainerDied","Data":"c6211ef8501688a72f4964e04a066c8f91536f9834ec00cc06f62c4f60798f0c"} Jan 20 17:38:57 crc kubenswrapper[4558]: I0120 17:38:57.988864 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-db-create-jxmgh" event={"ID":"0a030561-c5c2-4b2b-87fd-677bde229db8","Type":"ContainerStarted","Data":"3c7c2e3da653026d092e81c0a6cc391a631e3b3deb5d760f6909efd81f60deb4"} Jan 20 17:38:57 crc kubenswrapper[4558]: I0120 17:38:57.990391 4558 generic.go:334] "Generic (PLEG): container finished" podID="4b507c13-bb43-43c8-b5c6-7194f9355516" containerID="5ccb1048b6361c306cfb63ee87e682c10253e16a9c797cf219976e09c2dfdba2" exitCode=0 Jan 20 17:38:57 crc kubenswrapper[4558]: I0120 17:38:57.990456 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-db-create-hbtdf" event={"ID":"4b507c13-bb43-43c8-b5c6-7194f9355516","Type":"ContainerDied","Data":"5ccb1048b6361c306cfb63ee87e682c10253e16a9c797cf219976e09c2dfdba2"} Jan 20 17:38:57 crc kubenswrapper[4558]: I0120 17:38:57.992382 4558 generic.go:334] "Generic (PLEG): container finished" podID="d4e31f5d-cc80-4318-8249-aa2bef62b79a" containerID="4bebdee4f64ef3c3342aede8317035e0af61503dae7f2812fa875d2a9e4ae32e" exitCode=0 Jan 20 17:38:57 crc kubenswrapper[4558]: I0120 17:38:57.992726 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-12a5-account-create-update-gsb28" event={"ID":"d4e31f5d-cc80-4318-8249-aa2bef62b79a","Type":"ContainerDied","Data":"4bebdee4f64ef3c3342aede8317035e0af61503dae7f2812fa875d2a9e4ae32e"} Jan 20 17:38:57 crc kubenswrapper[4558]: I0120 17:38:57.992772 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-12a5-account-create-update-gsb28" event={"ID":"d4e31f5d-cc80-4318-8249-aa2bef62b79a","Type":"ContainerStarted","Data":"375ab16254f7a1dc55d3686fd61c8e4a06f1703eac087a480501e328fdb0de48"} Jan 20 17:38:57 crc kubenswrapper[4558]: I0120 17:38:57.992917 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:38:57 crc kubenswrapper[4558]: I0120 17:38:57.993114 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:38:58 crc kubenswrapper[4558]: I0120 17:38:58.078761 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:38:58 crc kubenswrapper[4558]: I0120 17:38:58.912410 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/swift-storage-0"] Jan 20 17:38:58 crc kubenswrapper[4558]: I0120 17:38:58.923246 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:38:58 crc kubenswrapper[4558]: I0120 17:38:58.925082 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"swift-conf" Jan 20 17:38:58 crc kubenswrapper[4558]: I0120 17:38:58.925209 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"swift-ring-files" Jan 20 17:38:58 crc kubenswrapper[4558]: I0120 17:38:58.929243 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"swift-storage-config-data" Jan 20 17:38:58 crc kubenswrapper[4558]: I0120 17:38:58.929325 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"swift-swift-dockercfg-fvfsd" Jan 20 17:38:58 crc kubenswrapper[4558]: I0120 17:38:58.937312 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/swift-storage-0"] Jan 20 17:38:58 crc kubenswrapper[4558]: I0120 17:38:58.950391 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/fa551db1-9a4b-45c2-b640-4f3518b162f4-lock\") pod \"swift-storage-0\" (UID: \"fa551db1-9a4b-45c2-b640-4f3518b162f4\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:38:58 crc kubenswrapper[4558]: I0120 17:38:58.950438 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/fa551db1-9a4b-45c2-b640-4f3518b162f4-cache\") pod \"swift-storage-0\" (UID: \"fa551db1-9a4b-45c2-b640-4f3518b162f4\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:38:58 crc kubenswrapper[4558]: I0120 17:38:58.950469 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/fa551db1-9a4b-45c2-b640-4f3518b162f4-etc-swift\") pod \"swift-storage-0\" (UID: \"fa551db1-9a4b-45c2-b640-4f3518b162f4\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:38:58 crc kubenswrapper[4558]: I0120 17:38:58.950585 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"swift-storage-0\" (UID: \"fa551db1-9a4b-45c2-b640-4f3518b162f4\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:38:58 crc kubenswrapper[4558]: I0120 17:38:58.950617 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l6zjs\" (UniqueName: \"kubernetes.io/projected/fa551db1-9a4b-45c2-b640-4f3518b162f4-kube-api-access-l6zjs\") pod \"swift-storage-0\" (UID: \"fa551db1-9a4b-45c2-b640-4f3518b162f4\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:38:59 crc kubenswrapper[4558]: I0120 17:38:59.041516 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:38:59 crc kubenswrapper[4558]: I0120 17:38:59.052281 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/fa551db1-9a4b-45c2-b640-4f3518b162f4-lock\") pod \"swift-storage-0\" (UID: \"fa551db1-9a4b-45c2-b640-4f3518b162f4\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:38:59 crc kubenswrapper[4558]: I0120 17:38:59.052320 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/fa551db1-9a4b-45c2-b640-4f3518b162f4-cache\") pod \"swift-storage-0\" (UID: \"fa551db1-9a4b-45c2-b640-4f3518b162f4\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:38:59 crc kubenswrapper[4558]: I0120 17:38:59.052347 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/fa551db1-9a4b-45c2-b640-4f3518b162f4-etc-swift\") pod \"swift-storage-0\" (UID: \"fa551db1-9a4b-45c2-b640-4f3518b162f4\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:38:59 crc kubenswrapper[4558]: I0120 17:38:59.052414 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"swift-storage-0\" (UID: \"fa551db1-9a4b-45c2-b640-4f3518b162f4\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:38:59 crc kubenswrapper[4558]: I0120 17:38:59.052437 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l6zjs\" (UniqueName: \"kubernetes.io/projected/fa551db1-9a4b-45c2-b640-4f3518b162f4-kube-api-access-l6zjs\") pod \"swift-storage-0\" (UID: \"fa551db1-9a4b-45c2-b640-4f3518b162f4\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:38:59 crc kubenswrapper[4558]: E0120 17:38:59.052609 4558 projected.go:288] Couldn't get configMap openstack-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Jan 20 17:38:59 crc kubenswrapper[4558]: E0120 17:38:59.052640 4558 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack-kuttl-tests/swift-storage-0: configmap "swift-ring-files" not found Jan 20 17:38:59 crc kubenswrapper[4558]: E0120 17:38:59.052693 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/fa551db1-9a4b-45c2-b640-4f3518b162f4-etc-swift podName:fa551db1-9a4b-45c2-b640-4f3518b162f4 nodeName:}" failed. No retries permitted until 2026-01-20 17:38:59.552673499 +0000 UTC m=+3433.313011466 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/fa551db1-9a4b-45c2-b640-4f3518b162f4-etc-swift") pod "swift-storage-0" (UID: "fa551db1-9a4b-45c2-b640-4f3518b162f4") : configmap "swift-ring-files" not found Jan 20 17:38:59 crc kubenswrapper[4558]: I0120 17:38:59.052771 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"swift-storage-0\" (UID: \"fa551db1-9a4b-45c2-b640-4f3518b162f4\") device mount path \"/mnt/openstack/pv07\"" pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:38:59 crc kubenswrapper[4558]: I0120 17:38:59.052902 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/fa551db1-9a4b-45c2-b640-4f3518b162f4-cache\") pod \"swift-storage-0\" (UID: \"fa551db1-9a4b-45c2-b640-4f3518b162f4\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:38:59 crc kubenswrapper[4558]: I0120 17:38:59.052951 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/fa551db1-9a4b-45c2-b640-4f3518b162f4-lock\") pod \"swift-storage-0\" (UID: \"fa551db1-9a4b-45c2-b640-4f3518b162f4\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:38:59 crc kubenswrapper[4558]: I0120 17:38:59.079967 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l6zjs\" (UniqueName: \"kubernetes.io/projected/fa551db1-9a4b-45c2-b640-4f3518b162f4-kube-api-access-l6zjs\") pod \"swift-storage-0\" (UID: \"fa551db1-9a4b-45c2-b640-4f3518b162f4\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:38:59 crc kubenswrapper[4558]: I0120 17:38:59.087347 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"swift-storage-0\" (UID: \"fa551db1-9a4b-45c2-b640-4f3518b162f4\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:38:59 crc kubenswrapper[4558]: I0120 17:38:59.223133 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ovn-northd-0"] Jan 20 17:38:59 crc kubenswrapper[4558]: I0120 17:38:59.224667 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:38:59 crc kubenswrapper[4558]: I0120 17:38:59.226886 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"ovnnorthd-scripts" Jan 20 17:38:59 crc kubenswrapper[4558]: I0120 17:38:59.227112 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ovnnorthd-ovnnorthd-dockercfg-rznpl" Jan 20 17:38:59 crc kubenswrapper[4558]: I0120 17:38:59.227275 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"ovnnorthd-config" Jan 20 17:38:59 crc kubenswrapper[4558]: I0120 17:38:59.228891 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-ovnnorthd-ovndbs" Jan 20 17:38:59 crc kubenswrapper[4558]: I0120 17:38:59.247758 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovn-northd-0"] Jan 20 17:38:59 crc kubenswrapper[4558]: I0120 17:38:59.358247 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/4d2b64b6-6310-4348-a016-e7d8317e00d9-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"4d2b64b6-6310-4348-a016-e7d8317e00d9\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:38:59 crc kubenswrapper[4558]: I0120 17:38:59.358554 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/4d2b64b6-6310-4348-a016-e7d8317e00d9-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"4d2b64b6-6310-4348-a016-e7d8317e00d9\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:38:59 crc kubenswrapper[4558]: I0120 17:38:59.358612 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-htf4p\" (UniqueName: \"kubernetes.io/projected/4d2b64b6-6310-4348-a016-e7d8317e00d9-kube-api-access-htf4p\") pod \"ovn-northd-0\" (UID: \"4d2b64b6-6310-4348-a016-e7d8317e00d9\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:38:59 crc kubenswrapper[4558]: I0120 17:38:59.358656 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4d2b64b6-6310-4348-a016-e7d8317e00d9-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"4d2b64b6-6310-4348-a016-e7d8317e00d9\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:38:59 crc kubenswrapper[4558]: I0120 17:38:59.358736 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/4d2b64b6-6310-4348-a016-e7d8317e00d9-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"4d2b64b6-6310-4348-a016-e7d8317e00d9\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:38:59 crc kubenswrapper[4558]: I0120 17:38:59.358763 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4d2b64b6-6310-4348-a016-e7d8317e00d9-scripts\") pod \"ovn-northd-0\" (UID: \"4d2b64b6-6310-4348-a016-e7d8317e00d9\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:38:59 crc kubenswrapper[4558]: I0120 17:38:59.358793 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4d2b64b6-6310-4348-a016-e7d8317e00d9-config\") pod \"ovn-northd-0\" (UID: \"4d2b64b6-6310-4348-a016-e7d8317e00d9\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:38:59 crc kubenswrapper[4558]: I0120 17:38:59.405739 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-6ed6-account-create-update-vb72p" Jan 20 17:38:59 crc kubenswrapper[4558]: I0120 17:38:59.460859 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/4d2b64b6-6310-4348-a016-e7d8317e00d9-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"4d2b64b6-6310-4348-a016-e7d8317e00d9\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:38:59 crc kubenswrapper[4558]: I0120 17:38:59.460933 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/4d2b64b6-6310-4348-a016-e7d8317e00d9-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"4d2b64b6-6310-4348-a016-e7d8317e00d9\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:38:59 crc kubenswrapper[4558]: I0120 17:38:59.460997 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-htf4p\" (UniqueName: \"kubernetes.io/projected/4d2b64b6-6310-4348-a016-e7d8317e00d9-kube-api-access-htf4p\") pod \"ovn-northd-0\" (UID: \"4d2b64b6-6310-4348-a016-e7d8317e00d9\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:38:59 crc kubenswrapper[4558]: I0120 17:38:59.461050 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4d2b64b6-6310-4348-a016-e7d8317e00d9-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"4d2b64b6-6310-4348-a016-e7d8317e00d9\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:38:59 crc kubenswrapper[4558]: I0120 17:38:59.461125 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/4d2b64b6-6310-4348-a016-e7d8317e00d9-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"4d2b64b6-6310-4348-a016-e7d8317e00d9\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:38:59 crc kubenswrapper[4558]: I0120 17:38:59.461153 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4d2b64b6-6310-4348-a016-e7d8317e00d9-scripts\") pod \"ovn-northd-0\" (UID: \"4d2b64b6-6310-4348-a016-e7d8317e00d9\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:38:59 crc kubenswrapper[4558]: I0120 17:38:59.461204 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4d2b64b6-6310-4348-a016-e7d8317e00d9-config\") pod \"ovn-northd-0\" (UID: \"4d2b64b6-6310-4348-a016-e7d8317e00d9\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:38:59 crc kubenswrapper[4558]: I0120 17:38:59.462081 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/4d2b64b6-6310-4348-a016-e7d8317e00d9-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"4d2b64b6-6310-4348-a016-e7d8317e00d9\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:38:59 crc kubenswrapper[4558]: I0120 17:38:59.462490 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4d2b64b6-6310-4348-a016-e7d8317e00d9-config\") pod \"ovn-northd-0\" (UID: \"4d2b64b6-6310-4348-a016-e7d8317e00d9\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:38:59 crc kubenswrapper[4558]: I0120 17:38:59.463553 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4d2b64b6-6310-4348-a016-e7d8317e00d9-scripts\") pod \"ovn-northd-0\" (UID: \"4d2b64b6-6310-4348-a016-e7d8317e00d9\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:38:59 crc kubenswrapper[4558]: I0120 17:38:59.475391 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4d2b64b6-6310-4348-a016-e7d8317e00d9-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"4d2b64b6-6310-4348-a016-e7d8317e00d9\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:38:59 crc kubenswrapper[4558]: I0120 17:38:59.478016 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/4d2b64b6-6310-4348-a016-e7d8317e00d9-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"4d2b64b6-6310-4348-a016-e7d8317e00d9\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:38:59 crc kubenswrapper[4558]: I0120 17:38:59.479720 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/4d2b64b6-6310-4348-a016-e7d8317e00d9-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"4d2b64b6-6310-4348-a016-e7d8317e00d9\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:38:59 crc kubenswrapper[4558]: I0120 17:38:59.481526 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-htf4p\" (UniqueName: \"kubernetes.io/projected/4d2b64b6-6310-4348-a016-e7d8317e00d9-kube-api-access-htf4p\") pod \"ovn-northd-0\" (UID: \"4d2b64b6-6310-4348-a016-e7d8317e00d9\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:38:59 crc kubenswrapper[4558]: I0120 17:38:59.548401 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:38:59 crc kubenswrapper[4558]: I0120 17:38:59.550572 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-db-create-jxmgh" Jan 20 17:38:59 crc kubenswrapper[4558]: I0120 17:38:59.562767 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pq9jd\" (UniqueName: \"kubernetes.io/projected/25182339-1f27-4f20-9fee-e132979e83f3-kube-api-access-pq9jd\") pod \"25182339-1f27-4f20-9fee-e132979e83f3\" (UID: \"25182339-1f27-4f20-9fee-e132979e83f3\") " Jan 20 17:38:59 crc kubenswrapper[4558]: I0120 17:38:59.562827 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/25182339-1f27-4f20-9fee-e132979e83f3-operator-scripts\") pod \"25182339-1f27-4f20-9fee-e132979e83f3\" (UID: \"25182339-1f27-4f20-9fee-e132979e83f3\") " Jan 20 17:38:59 crc kubenswrapper[4558]: I0120 17:38:59.565521 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25182339-1f27-4f20-9fee-e132979e83f3-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "25182339-1f27-4f20-9fee-e132979e83f3" (UID: "25182339-1f27-4f20-9fee-e132979e83f3"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:38:59 crc kubenswrapper[4558]: I0120 17:38:59.566935 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/fa551db1-9a4b-45c2-b640-4f3518b162f4-etc-swift\") pod \"swift-storage-0\" (UID: \"fa551db1-9a4b-45c2-b640-4f3518b162f4\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:38:59 crc kubenswrapper[4558]: E0120 17:38:59.567317 4558 projected.go:288] Couldn't get configMap openstack-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Jan 20 17:38:59 crc kubenswrapper[4558]: E0120 17:38:59.567332 4558 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack-kuttl-tests/swift-storage-0: configmap "swift-ring-files" not found Jan 20 17:38:59 crc kubenswrapper[4558]: E0120 17:38:59.567375 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/fa551db1-9a4b-45c2-b640-4f3518b162f4-etc-swift podName:fa551db1-9a4b-45c2-b640-4f3518b162f4 nodeName:}" failed. No retries permitted until 2026-01-20 17:39:00.567360194 +0000 UTC m=+3434.327698151 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/fa551db1-9a4b-45c2-b640-4f3518b162f4-etc-swift") pod "swift-storage-0" (UID: "fa551db1-9a4b-45c2-b640-4f3518b162f4") : configmap "swift-ring-files" not found Jan 20 17:38:59 crc kubenswrapper[4558]: I0120 17:38:59.570641 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/25182339-1f27-4f20-9fee-e132979e83f3-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:38:59 crc kubenswrapper[4558]: I0120 17:38:59.572095 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25182339-1f27-4f20-9fee-e132979e83f3-kube-api-access-pq9jd" (OuterVolumeSpecName: "kube-api-access-pq9jd") pod "25182339-1f27-4f20-9fee-e132979e83f3" (UID: "25182339-1f27-4f20-9fee-e132979e83f3"). InnerVolumeSpecName "kube-api-access-pq9jd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:38:59 crc kubenswrapper[4558]: I0120 17:38:59.598393 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-db-create-hbtdf" Jan 20 17:38:59 crc kubenswrapper[4558]: I0120 17:38:59.602945 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-12a5-account-create-update-gsb28" Jan 20 17:38:59 crc kubenswrapper[4558]: I0120 17:38:59.675972 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l5gww\" (UniqueName: \"kubernetes.io/projected/0a030561-c5c2-4b2b-87fd-677bde229db8-kube-api-access-l5gww\") pod \"0a030561-c5c2-4b2b-87fd-677bde229db8\" (UID: \"0a030561-c5c2-4b2b-87fd-677bde229db8\") " Jan 20 17:38:59 crc kubenswrapper[4558]: I0120 17:38:59.676373 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0a030561-c5c2-4b2b-87fd-677bde229db8-operator-scripts\") pod \"0a030561-c5c2-4b2b-87fd-677bde229db8\" (UID: \"0a030561-c5c2-4b2b-87fd-677bde229db8\") " Jan 20 17:38:59 crc kubenswrapper[4558]: I0120 17:38:59.679678 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pq9jd\" (UniqueName: \"kubernetes.io/projected/25182339-1f27-4f20-9fee-e132979e83f3-kube-api-access-pq9jd\") on node \"crc\" DevicePath \"\"" Jan 20 17:38:59 crc kubenswrapper[4558]: I0120 17:38:59.681099 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0a030561-c5c2-4b2b-87fd-677bde229db8-kube-api-access-l5gww" (OuterVolumeSpecName: "kube-api-access-l5gww") pod "0a030561-c5c2-4b2b-87fd-677bde229db8" (UID: "0a030561-c5c2-4b2b-87fd-677bde229db8"). InnerVolumeSpecName "kube-api-access-l5gww". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:38:59 crc kubenswrapper[4558]: I0120 17:38:59.682239 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0a030561-c5c2-4b2b-87fd-677bde229db8-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "0a030561-c5c2-4b2b-87fd-677bde229db8" (UID: "0a030561-c5c2-4b2b-87fd-677bde229db8"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:38:59 crc kubenswrapper[4558]: I0120 17:38:59.780761 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4b507c13-bb43-43c8-b5c6-7194f9355516-operator-scripts\") pod \"4b507c13-bb43-43c8-b5c6-7194f9355516\" (UID: \"4b507c13-bb43-43c8-b5c6-7194f9355516\") " Jan 20 17:38:59 crc kubenswrapper[4558]: I0120 17:38:59.780831 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h7fjw\" (UniqueName: \"kubernetes.io/projected/4b507c13-bb43-43c8-b5c6-7194f9355516-kube-api-access-h7fjw\") pod \"4b507c13-bb43-43c8-b5c6-7194f9355516\" (UID: \"4b507c13-bb43-43c8-b5c6-7194f9355516\") " Jan 20 17:38:59 crc kubenswrapper[4558]: I0120 17:38:59.780914 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-762pn\" (UniqueName: \"kubernetes.io/projected/d4e31f5d-cc80-4318-8249-aa2bef62b79a-kube-api-access-762pn\") pod \"d4e31f5d-cc80-4318-8249-aa2bef62b79a\" (UID: \"d4e31f5d-cc80-4318-8249-aa2bef62b79a\") " Jan 20 17:38:59 crc kubenswrapper[4558]: I0120 17:38:59.780948 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d4e31f5d-cc80-4318-8249-aa2bef62b79a-operator-scripts\") pod \"d4e31f5d-cc80-4318-8249-aa2bef62b79a\" (UID: \"d4e31f5d-cc80-4318-8249-aa2bef62b79a\") " Jan 20 17:38:59 crc kubenswrapper[4558]: I0120 17:38:59.781750 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l5gww\" (UniqueName: \"kubernetes.io/projected/0a030561-c5c2-4b2b-87fd-677bde229db8-kube-api-access-l5gww\") on node \"crc\" DevicePath \"\"" Jan 20 17:38:59 crc kubenswrapper[4558]: I0120 17:38:59.781778 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0a030561-c5c2-4b2b-87fd-677bde229db8-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:38:59 crc kubenswrapper[4558]: I0120 17:38:59.782273 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d4e31f5d-cc80-4318-8249-aa2bef62b79a-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "d4e31f5d-cc80-4318-8249-aa2bef62b79a" (UID: "d4e31f5d-cc80-4318-8249-aa2bef62b79a"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:38:59 crc kubenswrapper[4558]: I0120 17:38:59.782687 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4b507c13-bb43-43c8-b5c6-7194f9355516-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "4b507c13-bb43-43c8-b5c6-7194f9355516" (UID: "4b507c13-bb43-43c8-b5c6-7194f9355516"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:38:59 crc kubenswrapper[4558]: I0120 17:38:59.789807 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d4e31f5d-cc80-4318-8249-aa2bef62b79a-kube-api-access-762pn" (OuterVolumeSpecName: "kube-api-access-762pn") pod "d4e31f5d-cc80-4318-8249-aa2bef62b79a" (UID: "d4e31f5d-cc80-4318-8249-aa2bef62b79a"). InnerVolumeSpecName "kube-api-access-762pn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:38:59 crc kubenswrapper[4558]: I0120 17:38:59.790368 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4b507c13-bb43-43c8-b5c6-7194f9355516-kube-api-access-h7fjw" (OuterVolumeSpecName: "kube-api-access-h7fjw") pod "4b507c13-bb43-43c8-b5c6-7194f9355516" (UID: "4b507c13-bb43-43c8-b5c6-7194f9355516"). InnerVolumeSpecName "kube-api-access-h7fjw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:38:59 crc kubenswrapper[4558]: I0120 17:38:59.883636 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4b507c13-bb43-43c8-b5c6-7194f9355516-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:38:59 crc kubenswrapper[4558]: I0120 17:38:59.883908 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h7fjw\" (UniqueName: \"kubernetes.io/projected/4b507c13-bb43-43c8-b5c6-7194f9355516-kube-api-access-h7fjw\") on node \"crc\" DevicePath \"\"" Jan 20 17:38:59 crc kubenswrapper[4558]: I0120 17:38:59.883922 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-762pn\" (UniqueName: \"kubernetes.io/projected/d4e31f5d-cc80-4318-8249-aa2bef62b79a-kube-api-access-762pn\") on node \"crc\" DevicePath \"\"" Jan 20 17:38:59 crc kubenswrapper[4558]: I0120 17:38:59.883932 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d4e31f5d-cc80-4318-8249-aa2bef62b79a-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:39:00 crc kubenswrapper[4558]: I0120 17:39:00.008101 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovn-northd-0"] Jan 20 17:39:00 crc kubenswrapper[4558]: I0120 17:39:00.016277 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-db-create-hbtdf" event={"ID":"4b507c13-bb43-43c8-b5c6-7194f9355516","Type":"ContainerDied","Data":"f46105820f9a6fa789287e9d34c5e7d7ef167a29ad7e023c0316572409299b54"} Jan 20 17:39:00 crc kubenswrapper[4558]: I0120 17:39:00.016348 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f46105820f9a6fa789287e9d34c5e7d7ef167a29ad7e023c0316572409299b54" Jan 20 17:39:00 crc kubenswrapper[4558]: I0120 17:39:00.016308 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-db-create-hbtdf" Jan 20 17:39:00 crc kubenswrapper[4558]: I0120 17:39:00.018411 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-12a5-account-create-update-gsb28" event={"ID":"d4e31f5d-cc80-4318-8249-aa2bef62b79a","Type":"ContainerDied","Data":"375ab16254f7a1dc55d3686fd61c8e4a06f1703eac087a480501e328fdb0de48"} Jan 20 17:39:00 crc kubenswrapper[4558]: I0120 17:39:00.018464 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="375ab16254f7a1dc55d3686fd61c8e4a06f1703eac087a480501e328fdb0de48" Jan 20 17:39:00 crc kubenswrapper[4558]: I0120 17:39:00.018528 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-12a5-account-create-update-gsb28" Jan 20 17:39:00 crc kubenswrapper[4558]: I0120 17:39:00.029936 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-6ed6-account-create-update-vb72p" event={"ID":"25182339-1f27-4f20-9fee-e132979e83f3","Type":"ContainerDied","Data":"8685e7674f8abc592846dd4450ece4a80f957c1f4f09d040ea7232f6c61fccb9"} Jan 20 17:39:00 crc kubenswrapper[4558]: I0120 17:39:00.030004 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8685e7674f8abc592846dd4450ece4a80f957c1f4f09d040ea7232f6c61fccb9" Jan 20 17:39:00 crc kubenswrapper[4558]: I0120 17:39:00.029975 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-6ed6-account-create-update-vb72p" Jan 20 17:39:00 crc kubenswrapper[4558]: I0120 17:39:00.032652 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-db-create-jxmgh" event={"ID":"0a030561-c5c2-4b2b-87fd-677bde229db8","Type":"ContainerDied","Data":"3c7c2e3da653026d092e81c0a6cc391a631e3b3deb5d760f6909efd81f60deb4"} Jan 20 17:39:00 crc kubenswrapper[4558]: I0120 17:39:00.032725 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3c7c2e3da653026d092e81c0a6cc391a631e3b3deb5d760f6909efd81f60deb4" Jan 20 17:39:00 crc kubenswrapper[4558]: I0120 17:39:00.032792 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-db-create-jxmgh" Jan 20 17:39:00 crc kubenswrapper[4558]: I0120 17:39:00.598767 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/fa551db1-9a4b-45c2-b640-4f3518b162f4-etc-swift\") pod \"swift-storage-0\" (UID: \"fa551db1-9a4b-45c2-b640-4f3518b162f4\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:39:00 crc kubenswrapper[4558]: E0120 17:39:00.598985 4558 projected.go:288] Couldn't get configMap openstack-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Jan 20 17:39:00 crc kubenswrapper[4558]: E0120 17:39:00.599808 4558 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack-kuttl-tests/swift-storage-0: configmap "swift-ring-files" not found Jan 20 17:39:00 crc kubenswrapper[4558]: E0120 17:39:00.599916 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/fa551db1-9a4b-45c2-b640-4f3518b162f4-etc-swift podName:fa551db1-9a4b-45c2-b640-4f3518b162f4 nodeName:}" failed. No retries permitted until 2026-01-20 17:39:02.599899986 +0000 UTC m=+3436.360237953 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/fa551db1-9a4b-45c2-b640-4f3518b162f4-etc-swift") pod "swift-storage-0" (UID: "fa551db1-9a4b-45c2-b640-4f3518b162f4") : configmap "swift-ring-files" not found Jan 20 17:39:01 crc kubenswrapper[4558]: I0120 17:39:01.041810 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-northd-0" event={"ID":"4d2b64b6-6310-4348-a016-e7d8317e00d9","Type":"ContainerStarted","Data":"e7e585f1de0828e950c0e7d6180c3e24f82d1e80ebef4c5a53c3cd4c1b15c7a5"} Jan 20 17:39:01 crc kubenswrapper[4558]: I0120 17:39:01.042197 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:39:01 crc kubenswrapper[4558]: I0120 17:39:01.042215 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-northd-0" event={"ID":"4d2b64b6-6310-4348-a016-e7d8317e00d9","Type":"ContainerStarted","Data":"80d1619f7e0d6d2bffa7bc9443ac70ae2f905fe8d2fed37e22b75d4144f64fe5"} Jan 20 17:39:01 crc kubenswrapper[4558]: I0120 17:39:01.042226 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-northd-0" event={"ID":"4d2b64b6-6310-4348-a016-e7d8317e00d9","Type":"ContainerStarted","Data":"e1d56266d8d5673b7b6627971e1dc6465a7f9a444268dc8898704703a6240197"} Jan 20 17:39:01 crc kubenswrapper[4558]: I0120 17:39:01.061695 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ovn-northd-0" podStartSLOduration=2.061683148 podStartE2EDuration="2.061683148s" podCreationTimestamp="2026-01-20 17:38:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:39:01.056549867 +0000 UTC m=+3434.816887834" watchObservedRunningTime="2026-01-20 17:39:01.061683148 +0000 UTC m=+3434.822021115" Jan 20 17:39:01 crc kubenswrapper[4558]: I0120 17:39:01.353122 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/glance-db-create-fznd5"] Jan 20 17:39:01 crc kubenswrapper[4558]: E0120 17:39:01.353666 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0a030561-c5c2-4b2b-87fd-677bde229db8" containerName="mariadb-database-create" Jan 20 17:39:01 crc kubenswrapper[4558]: I0120 17:39:01.353694 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="0a030561-c5c2-4b2b-87fd-677bde229db8" containerName="mariadb-database-create" Jan 20 17:39:01 crc kubenswrapper[4558]: E0120 17:39:01.353724 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d4e31f5d-cc80-4318-8249-aa2bef62b79a" containerName="mariadb-account-create-update" Jan 20 17:39:01 crc kubenswrapper[4558]: I0120 17:39:01.353732 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d4e31f5d-cc80-4318-8249-aa2bef62b79a" containerName="mariadb-account-create-update" Jan 20 17:39:01 crc kubenswrapper[4558]: E0120 17:39:01.353750 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4b507c13-bb43-43c8-b5c6-7194f9355516" containerName="mariadb-database-create" Jan 20 17:39:01 crc kubenswrapper[4558]: I0120 17:39:01.353758 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="4b507c13-bb43-43c8-b5c6-7194f9355516" containerName="mariadb-database-create" Jan 20 17:39:01 crc kubenswrapper[4558]: E0120 17:39:01.353769 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="25182339-1f27-4f20-9fee-e132979e83f3" containerName="mariadb-account-create-update" Jan 20 17:39:01 crc kubenswrapper[4558]: I0120 17:39:01.353776 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="25182339-1f27-4f20-9fee-e132979e83f3" containerName="mariadb-account-create-update" Jan 20 17:39:01 crc kubenswrapper[4558]: I0120 17:39:01.354013 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="d4e31f5d-cc80-4318-8249-aa2bef62b79a" containerName="mariadb-account-create-update" Jan 20 17:39:01 crc kubenswrapper[4558]: I0120 17:39:01.354044 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="25182339-1f27-4f20-9fee-e132979e83f3" containerName="mariadb-account-create-update" Jan 20 17:39:01 crc kubenswrapper[4558]: I0120 17:39:01.354059 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="4b507c13-bb43-43c8-b5c6-7194f9355516" containerName="mariadb-database-create" Jan 20 17:39:01 crc kubenswrapper[4558]: I0120 17:39:01.354076 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="0a030561-c5c2-4b2b-87fd-677bde229db8" containerName="mariadb-database-create" Jan 20 17:39:01 crc kubenswrapper[4558]: I0120 17:39:01.356154 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-db-create-fznd5" Jan 20 17:39:01 crc kubenswrapper[4558]: I0120 17:39:01.375572 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-db-create-fznd5"] Jan 20 17:39:01 crc kubenswrapper[4558]: I0120 17:39:01.456260 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/glance-8c6b-account-create-update-hgzwx"] Jan 20 17:39:01 crc kubenswrapper[4558]: I0120 17:39:01.457944 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-8c6b-account-create-update-hgzwx" Jan 20 17:39:01 crc kubenswrapper[4558]: I0120 17:39:01.459801 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-db-secret" Jan 20 17:39:01 crc kubenswrapper[4558]: I0120 17:39:01.461885 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-8c6b-account-create-update-hgzwx"] Jan 20 17:39:01 crc kubenswrapper[4558]: I0120 17:39:01.516884 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a2428daa-4062-491f-8da0-af1d11e7d327-operator-scripts\") pod \"glance-db-create-fznd5\" (UID: \"a2428daa-4062-491f-8da0-af1d11e7d327\") " pod="openstack-kuttl-tests/glance-db-create-fznd5" Jan 20 17:39:01 crc kubenswrapper[4558]: I0120 17:39:01.517238 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-csf72\" (UniqueName: \"kubernetes.io/projected/a2428daa-4062-491f-8da0-af1d11e7d327-kube-api-access-csf72\") pod \"glance-db-create-fznd5\" (UID: \"a2428daa-4062-491f-8da0-af1d11e7d327\") " pod="openstack-kuttl-tests/glance-db-create-fznd5" Jan 20 17:39:01 crc kubenswrapper[4558]: I0120 17:39:01.619242 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4h2rt\" (UniqueName: \"kubernetes.io/projected/af51a5e8-6b1c-4d83-bcc0-6be598211cad-kube-api-access-4h2rt\") pod \"glance-8c6b-account-create-update-hgzwx\" (UID: \"af51a5e8-6b1c-4d83-bcc0-6be598211cad\") " pod="openstack-kuttl-tests/glance-8c6b-account-create-update-hgzwx" Jan 20 17:39:01 crc kubenswrapper[4558]: I0120 17:39:01.619367 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-csf72\" (UniqueName: \"kubernetes.io/projected/a2428daa-4062-491f-8da0-af1d11e7d327-kube-api-access-csf72\") pod \"glance-db-create-fznd5\" (UID: \"a2428daa-4062-491f-8da0-af1d11e7d327\") " pod="openstack-kuttl-tests/glance-db-create-fznd5" Jan 20 17:39:01 crc kubenswrapper[4558]: I0120 17:39:01.619418 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/af51a5e8-6b1c-4d83-bcc0-6be598211cad-operator-scripts\") pod \"glance-8c6b-account-create-update-hgzwx\" (UID: \"af51a5e8-6b1c-4d83-bcc0-6be598211cad\") " pod="openstack-kuttl-tests/glance-8c6b-account-create-update-hgzwx" Jan 20 17:39:01 crc kubenswrapper[4558]: I0120 17:39:01.619582 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a2428daa-4062-491f-8da0-af1d11e7d327-operator-scripts\") pod \"glance-db-create-fznd5\" (UID: \"a2428daa-4062-491f-8da0-af1d11e7d327\") " pod="openstack-kuttl-tests/glance-db-create-fznd5" Jan 20 17:39:01 crc kubenswrapper[4558]: I0120 17:39:01.620469 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a2428daa-4062-491f-8da0-af1d11e7d327-operator-scripts\") pod \"glance-db-create-fznd5\" (UID: \"a2428daa-4062-491f-8da0-af1d11e7d327\") " pod="openstack-kuttl-tests/glance-db-create-fznd5" Jan 20 17:39:01 crc kubenswrapper[4558]: I0120 17:39:01.642356 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-csf72\" (UniqueName: \"kubernetes.io/projected/a2428daa-4062-491f-8da0-af1d11e7d327-kube-api-access-csf72\") pod \"glance-db-create-fznd5\" (UID: \"a2428daa-4062-491f-8da0-af1d11e7d327\") " pod="openstack-kuttl-tests/glance-db-create-fznd5" Jan 20 17:39:01 crc kubenswrapper[4558]: I0120 17:39:01.675360 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-db-create-fznd5" Jan 20 17:39:01 crc kubenswrapper[4558]: I0120 17:39:01.721638 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4h2rt\" (UniqueName: \"kubernetes.io/projected/af51a5e8-6b1c-4d83-bcc0-6be598211cad-kube-api-access-4h2rt\") pod \"glance-8c6b-account-create-update-hgzwx\" (UID: \"af51a5e8-6b1c-4d83-bcc0-6be598211cad\") " pod="openstack-kuttl-tests/glance-8c6b-account-create-update-hgzwx" Jan 20 17:39:01 crc kubenswrapper[4558]: I0120 17:39:01.721740 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/af51a5e8-6b1c-4d83-bcc0-6be598211cad-operator-scripts\") pod \"glance-8c6b-account-create-update-hgzwx\" (UID: \"af51a5e8-6b1c-4d83-bcc0-6be598211cad\") " pod="openstack-kuttl-tests/glance-8c6b-account-create-update-hgzwx" Jan 20 17:39:01 crc kubenswrapper[4558]: I0120 17:39:01.723294 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/af51a5e8-6b1c-4d83-bcc0-6be598211cad-operator-scripts\") pod \"glance-8c6b-account-create-update-hgzwx\" (UID: \"af51a5e8-6b1c-4d83-bcc0-6be598211cad\") " pod="openstack-kuttl-tests/glance-8c6b-account-create-update-hgzwx" Jan 20 17:39:01 crc kubenswrapper[4558]: I0120 17:39:01.740502 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4h2rt\" (UniqueName: \"kubernetes.io/projected/af51a5e8-6b1c-4d83-bcc0-6be598211cad-kube-api-access-4h2rt\") pod \"glance-8c6b-account-create-update-hgzwx\" (UID: \"af51a5e8-6b1c-4d83-bcc0-6be598211cad\") " pod="openstack-kuttl-tests/glance-8c6b-account-create-update-hgzwx" Jan 20 17:39:01 crc kubenswrapper[4558]: I0120 17:39:01.776899 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-8c6b-account-create-update-hgzwx" Jan 20 17:39:02 crc kubenswrapper[4558]: I0120 17:39:02.098947 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-db-create-fznd5"] Jan 20 17:39:02 crc kubenswrapper[4558]: W0120 17:39:02.103973 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda2428daa_4062_491f_8da0_af1d11e7d327.slice/crio-fd81f37fb2d7737c30479cbb4618cc874f25691f222e1fc92a687d7554237ee1 WatchSource:0}: Error finding container fd81f37fb2d7737c30479cbb4618cc874f25691f222e1fc92a687d7554237ee1: Status 404 returned error can't find the container with id fd81f37fb2d7737c30479cbb4618cc874f25691f222e1fc92a687d7554237ee1 Jan 20 17:39:02 crc kubenswrapper[4558]: I0120 17:39:02.231126 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-8c6b-account-create-update-hgzwx"] Jan 20 17:39:02 crc kubenswrapper[4558]: W0120 17:39:02.234703 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podaf51a5e8_6b1c_4d83_bcc0_6be598211cad.slice/crio-5e3d164b92828dbad19e5acc82f471f8604c79e0678759a0471c250c9b760200 WatchSource:0}: Error finding container 5e3d164b92828dbad19e5acc82f471f8604c79e0678759a0471c250c9b760200: Status 404 returned error can't find the container with id 5e3d164b92828dbad19e5acc82f471f8604c79e0678759a0471c250c9b760200 Jan 20 17:39:02 crc kubenswrapper[4558]: I0120 17:39:02.645932 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/fa551db1-9a4b-45c2-b640-4f3518b162f4-etc-swift\") pod \"swift-storage-0\" (UID: \"fa551db1-9a4b-45c2-b640-4f3518b162f4\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:39:02 crc kubenswrapper[4558]: E0120 17:39:02.646238 4558 projected.go:288] Couldn't get configMap openstack-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Jan 20 17:39:02 crc kubenswrapper[4558]: E0120 17:39:02.646272 4558 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack-kuttl-tests/swift-storage-0: configmap "swift-ring-files" not found Jan 20 17:39:02 crc kubenswrapper[4558]: E0120 17:39:02.646361 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/fa551db1-9a4b-45c2-b640-4f3518b162f4-etc-swift podName:fa551db1-9a4b-45c2-b640-4f3518b162f4 nodeName:}" failed. No retries permitted until 2026-01-20 17:39:06.646333871 +0000 UTC m=+3440.406671848 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/fa551db1-9a4b-45c2-b640-4f3518b162f4-etc-swift") pod "swift-storage-0" (UID: "fa551db1-9a4b-45c2-b640-4f3518b162f4") : configmap "swift-ring-files" not found Jan 20 17:39:02 crc kubenswrapper[4558]: I0120 17:39:02.807265 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/swift-ring-rebalance-dr9kl"] Jan 20 17:39:02 crc kubenswrapper[4558]: I0120 17:39:02.808278 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-ring-rebalance-dr9kl" Jan 20 17:39:02 crc kubenswrapper[4558]: I0120 17:39:02.816694 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"swift-proxy-config-data" Jan 20 17:39:02 crc kubenswrapper[4558]: I0120 17:39:02.816793 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"swift-ring-scripts" Jan 20 17:39:02 crc kubenswrapper[4558]: I0120 17:39:02.816903 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"swift-ring-config-data" Jan 20 17:39:02 crc kubenswrapper[4558]: I0120 17:39:02.832574 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/swift-ring-rebalance-dr9kl"] Jan 20 17:39:02 crc kubenswrapper[4558]: E0120 17:39:02.833286 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[combined-ca-bundle dispersionconf etc-swift kube-api-access-l9bcq ring-data-devices scripts swiftconf], unattached volumes=[], failed to process volumes=[combined-ca-bundle dispersionconf etc-swift kube-api-access-l9bcq ring-data-devices scripts swiftconf]: context canceled" pod="openstack-kuttl-tests/swift-ring-rebalance-dr9kl" podUID="b9f0d707-92b8-424a-8442-f7c98cc83624" Jan 20 17:39:02 crc kubenswrapper[4558]: I0120 17:39:02.842752 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/swift-ring-rebalance-5mvzb"] Jan 20 17:39:02 crc kubenswrapper[4558]: I0120 17:39:02.843689 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-ring-rebalance-5mvzb" Jan 20 17:39:02 crc kubenswrapper[4558]: I0120 17:39:02.849994 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/swift-ring-rebalance-dr9kl"] Jan 20 17:39:02 crc kubenswrapper[4558]: I0120 17:39:02.861607 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/swift-ring-rebalance-5mvzb"] Jan 20 17:39:02 crc kubenswrapper[4558]: I0120 17:39:02.951906 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l9bcq\" (UniqueName: \"kubernetes.io/projected/b9f0d707-92b8-424a-8442-f7c98cc83624-kube-api-access-l9bcq\") pod \"swift-ring-rebalance-dr9kl\" (UID: \"b9f0d707-92b8-424a-8442-f7c98cc83624\") " pod="openstack-kuttl-tests/swift-ring-rebalance-dr9kl" Jan 20 17:39:02 crc kubenswrapper[4558]: I0120 17:39:02.951999 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9f0d707-92b8-424a-8442-f7c98cc83624-combined-ca-bundle\") pod \"swift-ring-rebalance-dr9kl\" (UID: \"b9f0d707-92b8-424a-8442-f7c98cc83624\") " pod="openstack-kuttl-tests/swift-ring-rebalance-dr9kl" Jan 20 17:39:02 crc kubenswrapper[4558]: I0120 17:39:02.952024 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/b9f0d707-92b8-424a-8442-f7c98cc83624-ring-data-devices\") pod \"swift-ring-rebalance-dr9kl\" (UID: \"b9f0d707-92b8-424a-8442-f7c98cc83624\") " pod="openstack-kuttl-tests/swift-ring-rebalance-dr9kl" Jan 20 17:39:02 crc kubenswrapper[4558]: I0120 17:39:02.952050 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/b9f0d707-92b8-424a-8442-f7c98cc83624-etc-swift\") pod \"swift-ring-rebalance-dr9kl\" (UID: \"b9f0d707-92b8-424a-8442-f7c98cc83624\") " pod="openstack-kuttl-tests/swift-ring-rebalance-dr9kl" Jan 20 17:39:02 crc kubenswrapper[4558]: I0120 17:39:02.952097 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/b9f0d707-92b8-424a-8442-f7c98cc83624-dispersionconf\") pod \"swift-ring-rebalance-dr9kl\" (UID: \"b9f0d707-92b8-424a-8442-f7c98cc83624\") " pod="openstack-kuttl-tests/swift-ring-rebalance-dr9kl" Jan 20 17:39:02 crc kubenswrapper[4558]: I0120 17:39:02.952286 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b9f0d707-92b8-424a-8442-f7c98cc83624-scripts\") pod \"swift-ring-rebalance-dr9kl\" (UID: \"b9f0d707-92b8-424a-8442-f7c98cc83624\") " pod="openstack-kuttl-tests/swift-ring-rebalance-dr9kl" Jan 20 17:39:02 crc kubenswrapper[4558]: I0120 17:39:02.952344 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/8b82b74a-6466-4751-9d8a-7fe34276cde3-ring-data-devices\") pod \"swift-ring-rebalance-5mvzb\" (UID: \"8b82b74a-6466-4751-9d8a-7fe34276cde3\") " pod="openstack-kuttl-tests/swift-ring-rebalance-5mvzb" Jan 20 17:39:02 crc kubenswrapper[4558]: I0120 17:39:02.952412 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8b82b74a-6466-4751-9d8a-7fe34276cde3-scripts\") pod \"swift-ring-rebalance-5mvzb\" (UID: \"8b82b74a-6466-4751-9d8a-7fe34276cde3\") " pod="openstack-kuttl-tests/swift-ring-rebalance-5mvzb" Jan 20 17:39:02 crc kubenswrapper[4558]: I0120 17:39:02.952489 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/8b82b74a-6466-4751-9d8a-7fe34276cde3-swiftconf\") pod \"swift-ring-rebalance-5mvzb\" (UID: \"8b82b74a-6466-4751-9d8a-7fe34276cde3\") " pod="openstack-kuttl-tests/swift-ring-rebalance-5mvzb" Jan 20 17:39:02 crc kubenswrapper[4558]: I0120 17:39:02.952575 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/8b82b74a-6466-4751-9d8a-7fe34276cde3-dispersionconf\") pod \"swift-ring-rebalance-5mvzb\" (UID: \"8b82b74a-6466-4751-9d8a-7fe34276cde3\") " pod="openstack-kuttl-tests/swift-ring-rebalance-5mvzb" Jan 20 17:39:02 crc kubenswrapper[4558]: I0120 17:39:02.952634 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8b82b74a-6466-4751-9d8a-7fe34276cde3-combined-ca-bundle\") pod \"swift-ring-rebalance-5mvzb\" (UID: \"8b82b74a-6466-4751-9d8a-7fe34276cde3\") " pod="openstack-kuttl-tests/swift-ring-rebalance-5mvzb" Jan 20 17:39:02 crc kubenswrapper[4558]: I0120 17:39:02.952672 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/8b82b74a-6466-4751-9d8a-7fe34276cde3-etc-swift\") pod \"swift-ring-rebalance-5mvzb\" (UID: \"8b82b74a-6466-4751-9d8a-7fe34276cde3\") " pod="openstack-kuttl-tests/swift-ring-rebalance-5mvzb" Jan 20 17:39:02 crc kubenswrapper[4558]: I0120 17:39:02.952703 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/b9f0d707-92b8-424a-8442-f7c98cc83624-swiftconf\") pod \"swift-ring-rebalance-dr9kl\" (UID: \"b9f0d707-92b8-424a-8442-f7c98cc83624\") " pod="openstack-kuttl-tests/swift-ring-rebalance-dr9kl" Jan 20 17:39:02 crc kubenswrapper[4558]: I0120 17:39:02.952766 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7lp8k\" (UniqueName: \"kubernetes.io/projected/8b82b74a-6466-4751-9d8a-7fe34276cde3-kube-api-access-7lp8k\") pod \"swift-ring-rebalance-5mvzb\" (UID: \"8b82b74a-6466-4751-9d8a-7fe34276cde3\") " pod="openstack-kuttl-tests/swift-ring-rebalance-5mvzb" Jan 20 17:39:03 crc kubenswrapper[4558]: I0120 17:39:03.053761 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b9f0d707-92b8-424a-8442-f7c98cc83624-scripts\") pod \"swift-ring-rebalance-dr9kl\" (UID: \"b9f0d707-92b8-424a-8442-f7c98cc83624\") " pod="openstack-kuttl-tests/swift-ring-rebalance-dr9kl" Jan 20 17:39:03 crc kubenswrapper[4558]: I0120 17:39:03.054684 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b9f0d707-92b8-424a-8442-f7c98cc83624-scripts\") pod \"swift-ring-rebalance-dr9kl\" (UID: \"b9f0d707-92b8-424a-8442-f7c98cc83624\") " pod="openstack-kuttl-tests/swift-ring-rebalance-dr9kl" Jan 20 17:39:03 crc kubenswrapper[4558]: I0120 17:39:03.054752 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/8b82b74a-6466-4751-9d8a-7fe34276cde3-ring-data-devices\") pod \"swift-ring-rebalance-5mvzb\" (UID: \"8b82b74a-6466-4751-9d8a-7fe34276cde3\") " pod="openstack-kuttl-tests/swift-ring-rebalance-5mvzb" Jan 20 17:39:03 crc kubenswrapper[4558]: I0120 17:39:03.054863 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8b82b74a-6466-4751-9d8a-7fe34276cde3-scripts\") pod \"swift-ring-rebalance-5mvzb\" (UID: \"8b82b74a-6466-4751-9d8a-7fe34276cde3\") " pod="openstack-kuttl-tests/swift-ring-rebalance-5mvzb" Jan 20 17:39:03 crc kubenswrapper[4558]: I0120 17:39:03.054935 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/8b82b74a-6466-4751-9d8a-7fe34276cde3-swiftconf\") pod \"swift-ring-rebalance-5mvzb\" (UID: \"8b82b74a-6466-4751-9d8a-7fe34276cde3\") " pod="openstack-kuttl-tests/swift-ring-rebalance-5mvzb" Jan 20 17:39:03 crc kubenswrapper[4558]: I0120 17:39:03.055673 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/8b82b74a-6466-4751-9d8a-7fe34276cde3-dispersionconf\") pod \"swift-ring-rebalance-5mvzb\" (UID: \"8b82b74a-6466-4751-9d8a-7fe34276cde3\") " pod="openstack-kuttl-tests/swift-ring-rebalance-5mvzb" Jan 20 17:39:03 crc kubenswrapper[4558]: I0120 17:39:03.056296 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8b82b74a-6466-4751-9d8a-7fe34276cde3-combined-ca-bundle\") pod \"swift-ring-rebalance-5mvzb\" (UID: \"8b82b74a-6466-4751-9d8a-7fe34276cde3\") " pod="openstack-kuttl-tests/swift-ring-rebalance-5mvzb" Jan 20 17:39:03 crc kubenswrapper[4558]: I0120 17:39:03.055609 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8b82b74a-6466-4751-9d8a-7fe34276cde3-scripts\") pod \"swift-ring-rebalance-5mvzb\" (UID: \"8b82b74a-6466-4751-9d8a-7fe34276cde3\") " pod="openstack-kuttl-tests/swift-ring-rebalance-5mvzb" Jan 20 17:39:03 crc kubenswrapper[4558]: I0120 17:39:03.056339 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/8b82b74a-6466-4751-9d8a-7fe34276cde3-etc-swift\") pod \"swift-ring-rebalance-5mvzb\" (UID: \"8b82b74a-6466-4751-9d8a-7fe34276cde3\") " pod="openstack-kuttl-tests/swift-ring-rebalance-5mvzb" Jan 20 17:39:03 crc kubenswrapper[4558]: I0120 17:39:03.056416 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/b9f0d707-92b8-424a-8442-f7c98cc83624-swiftconf\") pod \"swift-ring-rebalance-dr9kl\" (UID: \"b9f0d707-92b8-424a-8442-f7c98cc83624\") " pod="openstack-kuttl-tests/swift-ring-rebalance-dr9kl" Jan 20 17:39:03 crc kubenswrapper[4558]: I0120 17:39:03.055730 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/8b82b74a-6466-4751-9d8a-7fe34276cde3-ring-data-devices\") pod \"swift-ring-rebalance-5mvzb\" (UID: \"8b82b74a-6466-4751-9d8a-7fe34276cde3\") " pod="openstack-kuttl-tests/swift-ring-rebalance-5mvzb" Jan 20 17:39:03 crc kubenswrapper[4558]: I0120 17:39:03.056570 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7lp8k\" (UniqueName: \"kubernetes.io/projected/8b82b74a-6466-4751-9d8a-7fe34276cde3-kube-api-access-7lp8k\") pod \"swift-ring-rebalance-5mvzb\" (UID: \"8b82b74a-6466-4751-9d8a-7fe34276cde3\") " pod="openstack-kuttl-tests/swift-ring-rebalance-5mvzb" Jan 20 17:39:03 crc kubenswrapper[4558]: I0120 17:39:03.056622 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/8b82b74a-6466-4751-9d8a-7fe34276cde3-etc-swift\") pod \"swift-ring-rebalance-5mvzb\" (UID: \"8b82b74a-6466-4751-9d8a-7fe34276cde3\") " pod="openstack-kuttl-tests/swift-ring-rebalance-5mvzb" Jan 20 17:39:03 crc kubenswrapper[4558]: I0120 17:39:03.056700 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l9bcq\" (UniqueName: \"kubernetes.io/projected/b9f0d707-92b8-424a-8442-f7c98cc83624-kube-api-access-l9bcq\") pod \"swift-ring-rebalance-dr9kl\" (UID: \"b9f0d707-92b8-424a-8442-f7c98cc83624\") " pod="openstack-kuttl-tests/swift-ring-rebalance-dr9kl" Jan 20 17:39:03 crc kubenswrapper[4558]: I0120 17:39:03.056814 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9f0d707-92b8-424a-8442-f7c98cc83624-combined-ca-bundle\") pod \"swift-ring-rebalance-dr9kl\" (UID: \"b9f0d707-92b8-424a-8442-f7c98cc83624\") " pod="openstack-kuttl-tests/swift-ring-rebalance-dr9kl" Jan 20 17:39:03 crc kubenswrapper[4558]: I0120 17:39:03.056853 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/b9f0d707-92b8-424a-8442-f7c98cc83624-ring-data-devices\") pod \"swift-ring-rebalance-dr9kl\" (UID: \"b9f0d707-92b8-424a-8442-f7c98cc83624\") " pod="openstack-kuttl-tests/swift-ring-rebalance-dr9kl" Jan 20 17:39:03 crc kubenswrapper[4558]: I0120 17:39:03.056897 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/b9f0d707-92b8-424a-8442-f7c98cc83624-etc-swift\") pod \"swift-ring-rebalance-dr9kl\" (UID: \"b9f0d707-92b8-424a-8442-f7c98cc83624\") " pod="openstack-kuttl-tests/swift-ring-rebalance-dr9kl" Jan 20 17:39:03 crc kubenswrapper[4558]: I0120 17:39:03.056932 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/b9f0d707-92b8-424a-8442-f7c98cc83624-dispersionconf\") pod \"swift-ring-rebalance-dr9kl\" (UID: \"b9f0d707-92b8-424a-8442-f7c98cc83624\") " pod="openstack-kuttl-tests/swift-ring-rebalance-dr9kl" Jan 20 17:39:03 crc kubenswrapper[4558]: I0120 17:39:03.057916 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/b9f0d707-92b8-424a-8442-f7c98cc83624-etc-swift\") pod \"swift-ring-rebalance-dr9kl\" (UID: \"b9f0d707-92b8-424a-8442-f7c98cc83624\") " pod="openstack-kuttl-tests/swift-ring-rebalance-dr9kl" Jan 20 17:39:03 crc kubenswrapper[4558]: I0120 17:39:03.058018 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/b9f0d707-92b8-424a-8442-f7c98cc83624-ring-data-devices\") pod \"swift-ring-rebalance-dr9kl\" (UID: \"b9f0d707-92b8-424a-8442-f7c98cc83624\") " pod="openstack-kuttl-tests/swift-ring-rebalance-dr9kl" Jan 20 17:39:03 crc kubenswrapper[4558]: I0120 17:39:03.062294 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/b9f0d707-92b8-424a-8442-f7c98cc83624-dispersionconf\") pod \"swift-ring-rebalance-dr9kl\" (UID: \"b9f0d707-92b8-424a-8442-f7c98cc83624\") " pod="openstack-kuttl-tests/swift-ring-rebalance-dr9kl" Jan 20 17:39:03 crc kubenswrapper[4558]: I0120 17:39:03.062804 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/8b82b74a-6466-4751-9d8a-7fe34276cde3-dispersionconf\") pod \"swift-ring-rebalance-5mvzb\" (UID: \"8b82b74a-6466-4751-9d8a-7fe34276cde3\") " pod="openstack-kuttl-tests/swift-ring-rebalance-5mvzb" Jan 20 17:39:03 crc kubenswrapper[4558]: I0120 17:39:03.063805 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/8b82b74a-6466-4751-9d8a-7fe34276cde3-swiftconf\") pod \"swift-ring-rebalance-5mvzb\" (UID: \"8b82b74a-6466-4751-9d8a-7fe34276cde3\") " pod="openstack-kuttl-tests/swift-ring-rebalance-5mvzb" Jan 20 17:39:03 crc kubenswrapper[4558]: I0120 17:39:03.065687 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8b82b74a-6466-4751-9d8a-7fe34276cde3-combined-ca-bundle\") pod \"swift-ring-rebalance-5mvzb\" (UID: \"8b82b74a-6466-4751-9d8a-7fe34276cde3\") " pod="openstack-kuttl-tests/swift-ring-rebalance-5mvzb" Jan 20 17:39:03 crc kubenswrapper[4558]: I0120 17:39:03.069286 4558 generic.go:334] "Generic (PLEG): container finished" podID="a2428daa-4062-491f-8da0-af1d11e7d327" containerID="b68f569a935c15b3257c38f101d44f0d8f64b1219ceeefd73b46c7e75346f66b" exitCode=0 Jan 20 17:39:03 crc kubenswrapper[4558]: I0120 17:39:03.069934 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-db-create-fznd5" event={"ID":"a2428daa-4062-491f-8da0-af1d11e7d327","Type":"ContainerDied","Data":"b68f569a935c15b3257c38f101d44f0d8f64b1219ceeefd73b46c7e75346f66b"} Jan 20 17:39:03 crc kubenswrapper[4558]: I0120 17:39:03.069989 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-db-create-fznd5" event={"ID":"a2428daa-4062-491f-8da0-af1d11e7d327","Type":"ContainerStarted","Data":"fd81f37fb2d7737c30479cbb4618cc874f25691f222e1fc92a687d7554237ee1"} Jan 20 17:39:03 crc kubenswrapper[4558]: I0120 17:39:03.072045 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/b9f0d707-92b8-424a-8442-f7c98cc83624-swiftconf\") pod \"swift-ring-rebalance-dr9kl\" (UID: \"b9f0d707-92b8-424a-8442-f7c98cc83624\") " pod="openstack-kuttl-tests/swift-ring-rebalance-dr9kl" Jan 20 17:39:03 crc kubenswrapper[4558]: I0120 17:39:03.073017 4558 generic.go:334] "Generic (PLEG): container finished" podID="af51a5e8-6b1c-4d83-bcc0-6be598211cad" containerID="450b309d3cda95b42e643d3c9564588a64f72a1b0f850d770b4f59fc19c4efe5" exitCode=0 Jan 20 17:39:03 crc kubenswrapper[4558]: I0120 17:39:03.073099 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-8c6b-account-create-update-hgzwx" event={"ID":"af51a5e8-6b1c-4d83-bcc0-6be598211cad","Type":"ContainerDied","Data":"450b309d3cda95b42e643d3c9564588a64f72a1b0f850d770b4f59fc19c4efe5"} Jan 20 17:39:03 crc kubenswrapper[4558]: I0120 17:39:03.073129 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-8c6b-account-create-update-hgzwx" event={"ID":"af51a5e8-6b1c-4d83-bcc0-6be598211cad","Type":"ContainerStarted","Data":"5e3d164b92828dbad19e5acc82f471f8604c79e0678759a0471c250c9b760200"} Jan 20 17:39:03 crc kubenswrapper[4558]: I0120 17:39:03.073114 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-ring-rebalance-dr9kl" Jan 20 17:39:03 crc kubenswrapper[4558]: I0120 17:39:03.077247 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l9bcq\" (UniqueName: \"kubernetes.io/projected/b9f0d707-92b8-424a-8442-f7c98cc83624-kube-api-access-l9bcq\") pod \"swift-ring-rebalance-dr9kl\" (UID: \"b9f0d707-92b8-424a-8442-f7c98cc83624\") " pod="openstack-kuttl-tests/swift-ring-rebalance-dr9kl" Jan 20 17:39:03 crc kubenswrapper[4558]: I0120 17:39:03.082927 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7lp8k\" (UniqueName: \"kubernetes.io/projected/8b82b74a-6466-4751-9d8a-7fe34276cde3-kube-api-access-7lp8k\") pod \"swift-ring-rebalance-5mvzb\" (UID: \"8b82b74a-6466-4751-9d8a-7fe34276cde3\") " pod="openstack-kuttl-tests/swift-ring-rebalance-5mvzb" Jan 20 17:39:03 crc kubenswrapper[4558]: I0120 17:39:03.093578 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/root-account-create-update-s2cvb"] Jan 20 17:39:03 crc kubenswrapper[4558]: I0120 17:39:03.095592 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-s2cvb" Jan 20 17:39:03 crc kubenswrapper[4558]: I0120 17:39:03.096957 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"openstack-mariadb-root-db-secret" Jan 20 17:39:03 crc kubenswrapper[4558]: I0120 17:39:03.097124 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9f0d707-92b8-424a-8442-f7c98cc83624-combined-ca-bundle\") pod \"swift-ring-rebalance-dr9kl\" (UID: \"b9f0d707-92b8-424a-8442-f7c98cc83624\") " pod="openstack-kuttl-tests/swift-ring-rebalance-dr9kl" Jan 20 17:39:03 crc kubenswrapper[4558]: I0120 17:39:03.114604 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-s2cvb"] Jan 20 17:39:03 crc kubenswrapper[4558]: I0120 17:39:03.140619 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-ring-rebalance-dr9kl" Jan 20 17:39:03 crc kubenswrapper[4558]: I0120 17:39:03.157395 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-ring-rebalance-5mvzb" Jan 20 17:39:03 crc kubenswrapper[4558]: I0120 17:39:03.260939 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/b9f0d707-92b8-424a-8442-f7c98cc83624-swiftconf\") pod \"b9f0d707-92b8-424a-8442-f7c98cc83624\" (UID: \"b9f0d707-92b8-424a-8442-f7c98cc83624\") " Jan 20 17:39:03 crc kubenswrapper[4558]: I0120 17:39:03.260997 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/b9f0d707-92b8-424a-8442-f7c98cc83624-dispersionconf\") pod \"b9f0d707-92b8-424a-8442-f7c98cc83624\" (UID: \"b9f0d707-92b8-424a-8442-f7c98cc83624\") " Jan 20 17:39:03 crc kubenswrapper[4558]: I0120 17:39:03.261059 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b9f0d707-92b8-424a-8442-f7c98cc83624-scripts\") pod \"b9f0d707-92b8-424a-8442-f7c98cc83624\" (UID: \"b9f0d707-92b8-424a-8442-f7c98cc83624\") " Jan 20 17:39:03 crc kubenswrapper[4558]: I0120 17:39:03.261393 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9f0d707-92b8-424a-8442-f7c98cc83624-combined-ca-bundle\") pod \"b9f0d707-92b8-424a-8442-f7c98cc83624\" (UID: \"b9f0d707-92b8-424a-8442-f7c98cc83624\") " Jan 20 17:39:03 crc kubenswrapper[4558]: I0120 17:39:03.261447 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/b9f0d707-92b8-424a-8442-f7c98cc83624-ring-data-devices\") pod \"b9f0d707-92b8-424a-8442-f7c98cc83624\" (UID: \"b9f0d707-92b8-424a-8442-f7c98cc83624\") " Jan 20 17:39:03 crc kubenswrapper[4558]: I0120 17:39:03.261535 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/b9f0d707-92b8-424a-8442-f7c98cc83624-etc-swift\") pod \"b9f0d707-92b8-424a-8442-f7c98cc83624\" (UID: \"b9f0d707-92b8-424a-8442-f7c98cc83624\") " Jan 20 17:39:03 crc kubenswrapper[4558]: I0120 17:39:03.261594 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l9bcq\" (UniqueName: \"kubernetes.io/projected/b9f0d707-92b8-424a-8442-f7c98cc83624-kube-api-access-l9bcq\") pod \"b9f0d707-92b8-424a-8442-f7c98cc83624\" (UID: \"b9f0d707-92b8-424a-8442-f7c98cc83624\") " Jan 20 17:39:03 crc kubenswrapper[4558]: I0120 17:39:03.262021 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z56h2\" (UniqueName: \"kubernetes.io/projected/b9e1b497-af23-4b76-a2e2-db0fc24131ca-kube-api-access-z56h2\") pod \"root-account-create-update-s2cvb\" (UID: \"b9e1b497-af23-4b76-a2e2-db0fc24131ca\") " pod="openstack-kuttl-tests/root-account-create-update-s2cvb" Jan 20 17:39:03 crc kubenswrapper[4558]: I0120 17:39:03.262125 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b9f0d707-92b8-424a-8442-f7c98cc83624-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "b9f0d707-92b8-424a-8442-f7c98cc83624" (UID: "b9f0d707-92b8-424a-8442-f7c98cc83624"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:39:03 crc kubenswrapper[4558]: I0120 17:39:03.262191 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b9f0d707-92b8-424a-8442-f7c98cc83624-scripts" (OuterVolumeSpecName: "scripts") pod "b9f0d707-92b8-424a-8442-f7c98cc83624" (UID: "b9f0d707-92b8-424a-8442-f7c98cc83624"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:39:03 crc kubenswrapper[4558]: I0120 17:39:03.262221 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b9e1b497-af23-4b76-a2e2-db0fc24131ca-operator-scripts\") pod \"root-account-create-update-s2cvb\" (UID: \"b9e1b497-af23-4b76-a2e2-db0fc24131ca\") " pod="openstack-kuttl-tests/root-account-create-update-s2cvb" Jan 20 17:39:03 crc kubenswrapper[4558]: I0120 17:39:03.262572 4558 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/b9f0d707-92b8-424a-8442-f7c98cc83624-etc-swift\") on node \"crc\" DevicePath \"\"" Jan 20 17:39:03 crc kubenswrapper[4558]: I0120 17:39:03.262587 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b9f0d707-92b8-424a-8442-f7c98cc83624-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:39:03 crc kubenswrapper[4558]: I0120 17:39:03.263000 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b9f0d707-92b8-424a-8442-f7c98cc83624-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "b9f0d707-92b8-424a-8442-f7c98cc83624" (UID: "b9f0d707-92b8-424a-8442-f7c98cc83624"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:39:03 crc kubenswrapper[4558]: I0120 17:39:03.264127 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b9f0d707-92b8-424a-8442-f7c98cc83624-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "b9f0d707-92b8-424a-8442-f7c98cc83624" (UID: "b9f0d707-92b8-424a-8442-f7c98cc83624"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:39:03 crc kubenswrapper[4558]: I0120 17:39:03.264186 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b9f0d707-92b8-424a-8442-f7c98cc83624-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "b9f0d707-92b8-424a-8442-f7c98cc83624" (UID: "b9f0d707-92b8-424a-8442-f7c98cc83624"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:39:03 crc kubenswrapper[4558]: I0120 17:39:03.264660 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b9f0d707-92b8-424a-8442-f7c98cc83624-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b9f0d707-92b8-424a-8442-f7c98cc83624" (UID: "b9f0d707-92b8-424a-8442-f7c98cc83624"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:39:03 crc kubenswrapper[4558]: I0120 17:39:03.264725 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b9f0d707-92b8-424a-8442-f7c98cc83624-kube-api-access-l9bcq" (OuterVolumeSpecName: "kube-api-access-l9bcq") pod "b9f0d707-92b8-424a-8442-f7c98cc83624" (UID: "b9f0d707-92b8-424a-8442-f7c98cc83624"). InnerVolumeSpecName "kube-api-access-l9bcq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:39:03 crc kubenswrapper[4558]: I0120 17:39:03.364761 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b9e1b497-af23-4b76-a2e2-db0fc24131ca-operator-scripts\") pod \"root-account-create-update-s2cvb\" (UID: \"b9e1b497-af23-4b76-a2e2-db0fc24131ca\") " pod="openstack-kuttl-tests/root-account-create-update-s2cvb" Jan 20 17:39:03 crc kubenswrapper[4558]: I0120 17:39:03.365007 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z56h2\" (UniqueName: \"kubernetes.io/projected/b9e1b497-af23-4b76-a2e2-db0fc24131ca-kube-api-access-z56h2\") pod \"root-account-create-update-s2cvb\" (UID: \"b9e1b497-af23-4b76-a2e2-db0fc24131ca\") " pod="openstack-kuttl-tests/root-account-create-update-s2cvb" Jan 20 17:39:03 crc kubenswrapper[4558]: I0120 17:39:03.365283 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9f0d707-92b8-424a-8442-f7c98cc83624-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:39:03 crc kubenswrapper[4558]: I0120 17:39:03.365303 4558 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/b9f0d707-92b8-424a-8442-f7c98cc83624-ring-data-devices\") on node \"crc\" DevicePath \"\"" Jan 20 17:39:03 crc kubenswrapper[4558]: I0120 17:39:03.365314 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l9bcq\" (UniqueName: \"kubernetes.io/projected/b9f0d707-92b8-424a-8442-f7c98cc83624-kube-api-access-l9bcq\") on node \"crc\" DevicePath \"\"" Jan 20 17:39:03 crc kubenswrapper[4558]: I0120 17:39:03.365326 4558 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/b9f0d707-92b8-424a-8442-f7c98cc83624-swiftconf\") on node \"crc\" DevicePath \"\"" Jan 20 17:39:03 crc kubenswrapper[4558]: I0120 17:39:03.365336 4558 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/b9f0d707-92b8-424a-8442-f7c98cc83624-dispersionconf\") on node \"crc\" DevicePath \"\"" Jan 20 17:39:03 crc kubenswrapper[4558]: I0120 17:39:03.366143 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b9e1b497-af23-4b76-a2e2-db0fc24131ca-operator-scripts\") pod \"root-account-create-update-s2cvb\" (UID: \"b9e1b497-af23-4b76-a2e2-db0fc24131ca\") " pod="openstack-kuttl-tests/root-account-create-update-s2cvb" Jan 20 17:39:03 crc kubenswrapper[4558]: I0120 17:39:03.381135 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z56h2\" (UniqueName: \"kubernetes.io/projected/b9e1b497-af23-4b76-a2e2-db0fc24131ca-kube-api-access-z56h2\") pod \"root-account-create-update-s2cvb\" (UID: \"b9e1b497-af23-4b76-a2e2-db0fc24131ca\") " pod="openstack-kuttl-tests/root-account-create-update-s2cvb" Jan 20 17:39:03 crc kubenswrapper[4558]: I0120 17:39:03.452679 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-s2cvb" Jan 20 17:39:03 crc kubenswrapper[4558]: I0120 17:39:03.551031 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/swift-ring-rebalance-5mvzb"] Jan 20 17:39:03 crc kubenswrapper[4558]: I0120 17:39:03.857825 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-s2cvb"] Jan 20 17:39:04 crc kubenswrapper[4558]: I0120 17:39:04.081385 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/root-account-create-update-s2cvb" event={"ID":"b9e1b497-af23-4b76-a2e2-db0fc24131ca","Type":"ContainerStarted","Data":"3274979f0e29cb3fdb36de68c7cbda0f264a0e0d285f39a08bf733de6ae50ef0"} Jan 20 17:39:04 crc kubenswrapper[4558]: I0120 17:39:04.081552 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/root-account-create-update-s2cvb" event={"ID":"b9e1b497-af23-4b76-a2e2-db0fc24131ca","Type":"ContainerStarted","Data":"098d0b6952924d238703e4081cb1977bdde6831fd2fa2db2082d02312946b174"} Jan 20 17:39:04 crc kubenswrapper[4558]: I0120 17:39:04.083178 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-ring-rebalance-5mvzb" event={"ID":"8b82b74a-6466-4751-9d8a-7fe34276cde3","Type":"ContainerStarted","Data":"20d7e0258ccf6ed7848be8e1fccaa5e6f35c7acc697d6f6c239e54a0f6d52637"} Jan 20 17:39:04 crc kubenswrapper[4558]: I0120 17:39:04.083206 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-ring-rebalance-5mvzb" event={"ID":"8b82b74a-6466-4751-9d8a-7fe34276cde3","Type":"ContainerStarted","Data":"795f1f6f2b5c1babd27c958f537abebfb080e46a541eece617bbdbe6654df642"} Jan 20 17:39:04 crc kubenswrapper[4558]: I0120 17:39:04.083346 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-ring-rebalance-dr9kl" Jan 20 17:39:04 crc kubenswrapper[4558]: I0120 17:39:04.099859 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/root-account-create-update-s2cvb" podStartSLOduration=1.099843307 podStartE2EDuration="1.099843307s" podCreationTimestamp="2026-01-20 17:39:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:39:04.093399982 +0000 UTC m=+3437.853737949" watchObservedRunningTime="2026-01-20 17:39:04.099843307 +0000 UTC m=+3437.860181274" Jan 20 17:39:04 crc kubenswrapper[4558]: I0120 17:39:04.110641 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/swift-ring-rebalance-5mvzb" podStartSLOduration=2.110620491 podStartE2EDuration="2.110620491s" podCreationTimestamp="2026-01-20 17:39:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:39:04.106475196 +0000 UTC m=+3437.866813163" watchObservedRunningTime="2026-01-20 17:39:04.110620491 +0000 UTC m=+3437.870958458" Jan 20 17:39:04 crc kubenswrapper[4558]: I0120 17:39:04.148597 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/swift-ring-rebalance-dr9kl"] Jan 20 17:39:04 crc kubenswrapper[4558]: I0120 17:39:04.160695 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/swift-ring-rebalance-dr9kl"] Jan 20 17:39:04 crc kubenswrapper[4558]: I0120 17:39:04.517583 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-db-create-fznd5" Jan 20 17:39:04 crc kubenswrapper[4558]: I0120 17:39:04.521693 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-8c6b-account-create-update-hgzwx" Jan 20 17:39:04 crc kubenswrapper[4558]: I0120 17:39:04.585035 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b9f0d707-92b8-424a-8442-f7c98cc83624" path="/var/lib/kubelet/pods/b9f0d707-92b8-424a-8442-f7c98cc83624/volumes" Jan 20 17:39:04 crc kubenswrapper[4558]: I0120 17:39:04.595038 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-csf72\" (UniqueName: \"kubernetes.io/projected/a2428daa-4062-491f-8da0-af1d11e7d327-kube-api-access-csf72\") pod \"a2428daa-4062-491f-8da0-af1d11e7d327\" (UID: \"a2428daa-4062-491f-8da0-af1d11e7d327\") " Jan 20 17:39:04 crc kubenswrapper[4558]: I0120 17:39:04.595158 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a2428daa-4062-491f-8da0-af1d11e7d327-operator-scripts\") pod \"a2428daa-4062-491f-8da0-af1d11e7d327\" (UID: \"a2428daa-4062-491f-8da0-af1d11e7d327\") " Jan 20 17:39:04 crc kubenswrapper[4558]: I0120 17:39:04.595319 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/af51a5e8-6b1c-4d83-bcc0-6be598211cad-operator-scripts\") pod \"af51a5e8-6b1c-4d83-bcc0-6be598211cad\" (UID: \"af51a5e8-6b1c-4d83-bcc0-6be598211cad\") " Jan 20 17:39:04 crc kubenswrapper[4558]: I0120 17:39:04.595369 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4h2rt\" (UniqueName: \"kubernetes.io/projected/af51a5e8-6b1c-4d83-bcc0-6be598211cad-kube-api-access-4h2rt\") pod \"af51a5e8-6b1c-4d83-bcc0-6be598211cad\" (UID: \"af51a5e8-6b1c-4d83-bcc0-6be598211cad\") " Jan 20 17:39:04 crc kubenswrapper[4558]: I0120 17:39:04.598571 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a2428daa-4062-491f-8da0-af1d11e7d327-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "a2428daa-4062-491f-8da0-af1d11e7d327" (UID: "a2428daa-4062-491f-8da0-af1d11e7d327"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:39:04 crc kubenswrapper[4558]: I0120 17:39:04.598577 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/af51a5e8-6b1c-4d83-bcc0-6be598211cad-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "af51a5e8-6b1c-4d83-bcc0-6be598211cad" (UID: "af51a5e8-6b1c-4d83-bcc0-6be598211cad"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:39:04 crc kubenswrapper[4558]: I0120 17:39:04.602916 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/af51a5e8-6b1c-4d83-bcc0-6be598211cad-kube-api-access-4h2rt" (OuterVolumeSpecName: "kube-api-access-4h2rt") pod "af51a5e8-6b1c-4d83-bcc0-6be598211cad" (UID: "af51a5e8-6b1c-4d83-bcc0-6be598211cad"). InnerVolumeSpecName "kube-api-access-4h2rt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:39:04 crc kubenswrapper[4558]: I0120 17:39:04.603851 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a2428daa-4062-491f-8da0-af1d11e7d327-kube-api-access-csf72" (OuterVolumeSpecName: "kube-api-access-csf72") pod "a2428daa-4062-491f-8da0-af1d11e7d327" (UID: "a2428daa-4062-491f-8da0-af1d11e7d327"). InnerVolumeSpecName "kube-api-access-csf72". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:39:04 crc kubenswrapper[4558]: I0120 17:39:04.697786 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-csf72\" (UniqueName: \"kubernetes.io/projected/a2428daa-4062-491f-8da0-af1d11e7d327-kube-api-access-csf72\") on node \"crc\" DevicePath \"\"" Jan 20 17:39:04 crc kubenswrapper[4558]: I0120 17:39:04.698051 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a2428daa-4062-491f-8da0-af1d11e7d327-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:39:04 crc kubenswrapper[4558]: I0120 17:39:04.698062 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/af51a5e8-6b1c-4d83-bcc0-6be598211cad-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:39:04 crc kubenswrapper[4558]: I0120 17:39:04.698074 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4h2rt\" (UniqueName: \"kubernetes.io/projected/af51a5e8-6b1c-4d83-bcc0-6be598211cad-kube-api-access-4h2rt\") on node \"crc\" DevicePath \"\"" Jan 20 17:39:05 crc kubenswrapper[4558]: I0120 17:39:05.092962 4558 generic.go:334] "Generic (PLEG): container finished" podID="b9e1b497-af23-4b76-a2e2-db0fc24131ca" containerID="3274979f0e29cb3fdb36de68c7cbda0f264a0e0d285f39a08bf733de6ae50ef0" exitCode=0 Jan 20 17:39:05 crc kubenswrapper[4558]: I0120 17:39:05.093016 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/root-account-create-update-s2cvb" event={"ID":"b9e1b497-af23-4b76-a2e2-db0fc24131ca","Type":"ContainerDied","Data":"3274979f0e29cb3fdb36de68c7cbda0f264a0e0d285f39a08bf733de6ae50ef0"} Jan 20 17:39:05 crc kubenswrapper[4558]: I0120 17:39:05.095367 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-db-create-fznd5" event={"ID":"a2428daa-4062-491f-8da0-af1d11e7d327","Type":"ContainerDied","Data":"fd81f37fb2d7737c30479cbb4618cc874f25691f222e1fc92a687d7554237ee1"} Jan 20 17:39:05 crc kubenswrapper[4558]: I0120 17:39:05.095408 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fd81f37fb2d7737c30479cbb4618cc874f25691f222e1fc92a687d7554237ee1" Jan 20 17:39:05 crc kubenswrapper[4558]: I0120 17:39:05.095475 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-db-create-fznd5" Jan 20 17:39:05 crc kubenswrapper[4558]: I0120 17:39:05.097293 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-8c6b-account-create-update-hgzwx" Jan 20 17:39:05 crc kubenswrapper[4558]: I0120 17:39:05.097277 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-8c6b-account-create-update-hgzwx" event={"ID":"af51a5e8-6b1c-4d83-bcc0-6be598211cad","Type":"ContainerDied","Data":"5e3d164b92828dbad19e5acc82f471f8604c79e0678759a0471c250c9b760200"} Jan 20 17:39:05 crc kubenswrapper[4558]: I0120 17:39:05.097441 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5e3d164b92828dbad19e5acc82f471f8604c79e0678759a0471c250c9b760200" Jan 20 17:39:06 crc kubenswrapper[4558]: I0120 17:39:06.457853 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-s2cvb" Jan 20 17:39:06 crc kubenswrapper[4558]: I0120 17:39:06.592588 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/glance-db-sync-dxp9v"] Jan 20 17:39:06 crc kubenswrapper[4558]: E0120 17:39:06.593056 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="af51a5e8-6b1c-4d83-bcc0-6be598211cad" containerName="mariadb-account-create-update" Jan 20 17:39:06 crc kubenswrapper[4558]: I0120 17:39:06.593077 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="af51a5e8-6b1c-4d83-bcc0-6be598211cad" containerName="mariadb-account-create-update" Jan 20 17:39:06 crc kubenswrapper[4558]: E0120 17:39:06.593093 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b9e1b497-af23-4b76-a2e2-db0fc24131ca" containerName="mariadb-account-create-update" Jan 20 17:39:06 crc kubenswrapper[4558]: I0120 17:39:06.593102 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="b9e1b497-af23-4b76-a2e2-db0fc24131ca" containerName="mariadb-account-create-update" Jan 20 17:39:06 crc kubenswrapper[4558]: E0120 17:39:06.593111 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a2428daa-4062-491f-8da0-af1d11e7d327" containerName="mariadb-database-create" Jan 20 17:39:06 crc kubenswrapper[4558]: I0120 17:39:06.593117 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="a2428daa-4062-491f-8da0-af1d11e7d327" containerName="mariadb-database-create" Jan 20 17:39:06 crc kubenswrapper[4558]: I0120 17:39:06.593330 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="b9e1b497-af23-4b76-a2e2-db0fc24131ca" containerName="mariadb-account-create-update" Jan 20 17:39:06 crc kubenswrapper[4558]: I0120 17:39:06.593348 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="a2428daa-4062-491f-8da0-af1d11e7d327" containerName="mariadb-database-create" Jan 20 17:39:06 crc kubenswrapper[4558]: I0120 17:39:06.593361 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="af51a5e8-6b1c-4d83-bcc0-6be598211cad" containerName="mariadb-account-create-update" Jan 20 17:39:06 crc kubenswrapper[4558]: I0120 17:39:06.593957 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-db-sync-dxp9v" Jan 20 17:39:06 crc kubenswrapper[4558]: I0120 17:39:06.595815 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-config-data" Jan 20 17:39:06 crc kubenswrapper[4558]: I0120 17:39:06.598362 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-db-sync-dxp9v"] Jan 20 17:39:06 crc kubenswrapper[4558]: I0120 17:39:06.599491 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-glance-dockercfg-gnzn6" Jan 20 17:39:06 crc kubenswrapper[4558]: I0120 17:39:06.641718 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z56h2\" (UniqueName: \"kubernetes.io/projected/b9e1b497-af23-4b76-a2e2-db0fc24131ca-kube-api-access-z56h2\") pod \"b9e1b497-af23-4b76-a2e2-db0fc24131ca\" (UID: \"b9e1b497-af23-4b76-a2e2-db0fc24131ca\") " Jan 20 17:39:06 crc kubenswrapper[4558]: I0120 17:39:06.642088 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b9e1b497-af23-4b76-a2e2-db0fc24131ca-operator-scripts\") pod \"b9e1b497-af23-4b76-a2e2-db0fc24131ca\" (UID: \"b9e1b497-af23-4b76-a2e2-db0fc24131ca\") " Jan 20 17:39:06 crc kubenswrapper[4558]: I0120 17:39:06.642822 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b9e1b497-af23-4b76-a2e2-db0fc24131ca-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "b9e1b497-af23-4b76-a2e2-db0fc24131ca" (UID: "b9e1b497-af23-4b76-a2e2-db0fc24131ca"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:39:06 crc kubenswrapper[4558]: I0120 17:39:06.642910 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/57add5a3-3401-425c-aa80-d168c20c4cb9-combined-ca-bundle\") pod \"glance-db-sync-dxp9v\" (UID: \"57add5a3-3401-425c-aa80-d168c20c4cb9\") " pod="openstack-kuttl-tests/glance-db-sync-dxp9v" Jan 20 17:39:06 crc kubenswrapper[4558]: I0120 17:39:06.642975 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/57add5a3-3401-425c-aa80-d168c20c4cb9-config-data\") pod \"glance-db-sync-dxp9v\" (UID: \"57add5a3-3401-425c-aa80-d168c20c4cb9\") " pod="openstack-kuttl-tests/glance-db-sync-dxp9v" Jan 20 17:39:06 crc kubenswrapper[4558]: I0120 17:39:06.643206 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/57add5a3-3401-425c-aa80-d168c20c4cb9-db-sync-config-data\") pod \"glance-db-sync-dxp9v\" (UID: \"57add5a3-3401-425c-aa80-d168c20c4cb9\") " pod="openstack-kuttl-tests/glance-db-sync-dxp9v" Jan 20 17:39:06 crc kubenswrapper[4558]: I0120 17:39:06.643368 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9dff7\" (UniqueName: \"kubernetes.io/projected/57add5a3-3401-425c-aa80-d168c20c4cb9-kube-api-access-9dff7\") pod \"glance-db-sync-dxp9v\" (UID: \"57add5a3-3401-425c-aa80-d168c20c4cb9\") " pod="openstack-kuttl-tests/glance-db-sync-dxp9v" Jan 20 17:39:06 crc kubenswrapper[4558]: I0120 17:39:06.643630 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b9e1b497-af23-4b76-a2e2-db0fc24131ca-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:39:06 crc kubenswrapper[4558]: I0120 17:39:06.648053 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b9e1b497-af23-4b76-a2e2-db0fc24131ca-kube-api-access-z56h2" (OuterVolumeSpecName: "kube-api-access-z56h2") pod "b9e1b497-af23-4b76-a2e2-db0fc24131ca" (UID: "b9e1b497-af23-4b76-a2e2-db0fc24131ca"). InnerVolumeSpecName "kube-api-access-z56h2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:39:06 crc kubenswrapper[4558]: I0120 17:39:06.745484 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9dff7\" (UniqueName: \"kubernetes.io/projected/57add5a3-3401-425c-aa80-d168c20c4cb9-kube-api-access-9dff7\") pod \"glance-db-sync-dxp9v\" (UID: \"57add5a3-3401-425c-aa80-d168c20c4cb9\") " pod="openstack-kuttl-tests/glance-db-sync-dxp9v" Jan 20 17:39:06 crc kubenswrapper[4558]: I0120 17:39:06.745548 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/fa551db1-9a4b-45c2-b640-4f3518b162f4-etc-swift\") pod \"swift-storage-0\" (UID: \"fa551db1-9a4b-45c2-b640-4f3518b162f4\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:39:06 crc kubenswrapper[4558]: I0120 17:39:06.745720 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/57add5a3-3401-425c-aa80-d168c20c4cb9-combined-ca-bundle\") pod \"glance-db-sync-dxp9v\" (UID: \"57add5a3-3401-425c-aa80-d168c20c4cb9\") " pod="openstack-kuttl-tests/glance-db-sync-dxp9v" Jan 20 17:39:06 crc kubenswrapper[4558]: E0120 17:39:06.745743 4558 projected.go:288] Couldn't get configMap openstack-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Jan 20 17:39:06 crc kubenswrapper[4558]: E0120 17:39:06.745771 4558 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack-kuttl-tests/swift-storage-0: configmap "swift-ring-files" not found Jan 20 17:39:06 crc kubenswrapper[4558]: I0120 17:39:06.745760 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/57add5a3-3401-425c-aa80-d168c20c4cb9-config-data\") pod \"glance-db-sync-dxp9v\" (UID: \"57add5a3-3401-425c-aa80-d168c20c4cb9\") " pod="openstack-kuttl-tests/glance-db-sync-dxp9v" Jan 20 17:39:06 crc kubenswrapper[4558]: E0120 17:39:06.745837 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/fa551db1-9a4b-45c2-b640-4f3518b162f4-etc-swift podName:fa551db1-9a4b-45c2-b640-4f3518b162f4 nodeName:}" failed. No retries permitted until 2026-01-20 17:39:14.745815598 +0000 UTC m=+3448.506153556 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/fa551db1-9a4b-45c2-b640-4f3518b162f4-etc-swift") pod "swift-storage-0" (UID: "fa551db1-9a4b-45c2-b640-4f3518b162f4") : configmap "swift-ring-files" not found Jan 20 17:39:06 crc kubenswrapper[4558]: I0120 17:39:06.745874 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/57add5a3-3401-425c-aa80-d168c20c4cb9-db-sync-config-data\") pod \"glance-db-sync-dxp9v\" (UID: \"57add5a3-3401-425c-aa80-d168c20c4cb9\") " pod="openstack-kuttl-tests/glance-db-sync-dxp9v" Jan 20 17:39:06 crc kubenswrapper[4558]: I0120 17:39:06.746077 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z56h2\" (UniqueName: \"kubernetes.io/projected/b9e1b497-af23-4b76-a2e2-db0fc24131ca-kube-api-access-z56h2\") on node \"crc\" DevicePath \"\"" Jan 20 17:39:06 crc kubenswrapper[4558]: I0120 17:39:06.751138 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/57add5a3-3401-425c-aa80-d168c20c4cb9-db-sync-config-data\") pod \"glance-db-sync-dxp9v\" (UID: \"57add5a3-3401-425c-aa80-d168c20c4cb9\") " pod="openstack-kuttl-tests/glance-db-sync-dxp9v" Jan 20 17:39:06 crc kubenswrapper[4558]: I0120 17:39:06.751220 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/57add5a3-3401-425c-aa80-d168c20c4cb9-combined-ca-bundle\") pod \"glance-db-sync-dxp9v\" (UID: \"57add5a3-3401-425c-aa80-d168c20c4cb9\") " pod="openstack-kuttl-tests/glance-db-sync-dxp9v" Jan 20 17:39:06 crc kubenswrapper[4558]: I0120 17:39:06.751251 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/57add5a3-3401-425c-aa80-d168c20c4cb9-config-data\") pod \"glance-db-sync-dxp9v\" (UID: \"57add5a3-3401-425c-aa80-d168c20c4cb9\") " pod="openstack-kuttl-tests/glance-db-sync-dxp9v" Jan 20 17:39:06 crc kubenswrapper[4558]: I0120 17:39:06.766674 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9dff7\" (UniqueName: \"kubernetes.io/projected/57add5a3-3401-425c-aa80-d168c20c4cb9-kube-api-access-9dff7\") pod \"glance-db-sync-dxp9v\" (UID: \"57add5a3-3401-425c-aa80-d168c20c4cb9\") " pod="openstack-kuttl-tests/glance-db-sync-dxp9v" Jan 20 17:39:06 crc kubenswrapper[4558]: I0120 17:39:06.907490 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-db-sync-dxp9v" Jan 20 17:39:07 crc kubenswrapper[4558]: I0120 17:39:07.132724 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/root-account-create-update-s2cvb" event={"ID":"b9e1b497-af23-4b76-a2e2-db0fc24131ca","Type":"ContainerDied","Data":"098d0b6952924d238703e4081cb1977bdde6831fd2fa2db2082d02312946b174"} Jan 20 17:39:07 crc kubenswrapper[4558]: I0120 17:39:07.133028 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="098d0b6952924d238703e4081cb1977bdde6831fd2fa2db2082d02312946b174" Jan 20 17:39:07 crc kubenswrapper[4558]: I0120 17:39:07.132777 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-s2cvb" Jan 20 17:39:07 crc kubenswrapper[4558]: I0120 17:39:07.337631 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-db-sync-dxp9v"] Jan 20 17:39:07 crc kubenswrapper[4558]: W0120 17:39:07.343280 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod57add5a3_3401_425c_aa80_d168c20c4cb9.slice/crio-d4a5b4c35681100e6e0c997509ac13e7ea6d81b4fa0029130900865946f58eac WatchSource:0}: Error finding container d4a5b4c35681100e6e0c997509ac13e7ea6d81b4fa0029130900865946f58eac: Status 404 returned error can't find the container with id d4a5b4c35681100e6e0c997509ac13e7ea6d81b4fa0029130900865946f58eac Jan 20 17:39:08 crc kubenswrapper[4558]: I0120 17:39:08.147706 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-db-sync-dxp9v" event={"ID":"57add5a3-3401-425c-aa80-d168c20c4cb9","Type":"ContainerStarted","Data":"1a8261f61c9b6fb46a3461edc080651cc99150b9dbc8b5de2458b82d4f428a9c"} Jan 20 17:39:08 crc kubenswrapper[4558]: I0120 17:39:08.148229 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-db-sync-dxp9v" event={"ID":"57add5a3-3401-425c-aa80-d168c20c4cb9","Type":"ContainerStarted","Data":"d4a5b4c35681100e6e0c997509ac13e7ea6d81b4fa0029130900865946f58eac"} Jan 20 17:39:09 crc kubenswrapper[4558]: I0120 17:39:09.377256 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/glance-db-sync-dxp9v" podStartSLOduration=3.377237461 podStartE2EDuration="3.377237461s" podCreationTimestamp="2026-01-20 17:39:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:39:08.167478244 +0000 UTC m=+3441.927816212" watchObservedRunningTime="2026-01-20 17:39:09.377237461 +0000 UTC m=+3443.137575428" Jan 20 17:39:09 crc kubenswrapper[4558]: I0120 17:39:09.378435 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-s2cvb"] Jan 20 17:39:09 crc kubenswrapper[4558]: I0120 17:39:09.382186 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-s2cvb"] Jan 20 17:39:10 crc kubenswrapper[4558]: I0120 17:39:10.166755 4558 generic.go:334] "Generic (PLEG): container finished" podID="8b82b74a-6466-4751-9d8a-7fe34276cde3" containerID="20d7e0258ccf6ed7848be8e1fccaa5e6f35c7acc697d6f6c239e54a0f6d52637" exitCode=0 Jan 20 17:39:10 crc kubenswrapper[4558]: I0120 17:39:10.167080 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-ring-rebalance-5mvzb" event={"ID":"8b82b74a-6466-4751-9d8a-7fe34276cde3","Type":"ContainerDied","Data":"20d7e0258ccf6ed7848be8e1fccaa5e6f35c7acc697d6f6c239e54a0f6d52637"} Jan 20 17:39:10 crc kubenswrapper[4558]: I0120 17:39:10.577956 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b9e1b497-af23-4b76-a2e2-db0fc24131ca" path="/var/lib/kubelet/pods/b9e1b497-af23-4b76-a2e2-db0fc24131ca/volumes" Jan 20 17:39:11 crc kubenswrapper[4558]: I0120 17:39:11.476051 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-ring-rebalance-5mvzb" Jan 20 17:39:11 crc kubenswrapper[4558]: I0120 17:39:11.543383 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8b82b74a-6466-4751-9d8a-7fe34276cde3-scripts\") pod \"8b82b74a-6466-4751-9d8a-7fe34276cde3\" (UID: \"8b82b74a-6466-4751-9d8a-7fe34276cde3\") " Jan 20 17:39:11 crc kubenswrapper[4558]: I0120 17:39:11.543531 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/8b82b74a-6466-4751-9d8a-7fe34276cde3-ring-data-devices\") pod \"8b82b74a-6466-4751-9d8a-7fe34276cde3\" (UID: \"8b82b74a-6466-4751-9d8a-7fe34276cde3\") " Jan 20 17:39:11 crc kubenswrapper[4558]: I0120 17:39:11.543677 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/8b82b74a-6466-4751-9d8a-7fe34276cde3-dispersionconf\") pod \"8b82b74a-6466-4751-9d8a-7fe34276cde3\" (UID: \"8b82b74a-6466-4751-9d8a-7fe34276cde3\") " Jan 20 17:39:11 crc kubenswrapper[4558]: I0120 17:39:11.543705 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/8b82b74a-6466-4751-9d8a-7fe34276cde3-swiftconf\") pod \"8b82b74a-6466-4751-9d8a-7fe34276cde3\" (UID: \"8b82b74a-6466-4751-9d8a-7fe34276cde3\") " Jan 20 17:39:11 crc kubenswrapper[4558]: I0120 17:39:11.543732 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/8b82b74a-6466-4751-9d8a-7fe34276cde3-etc-swift\") pod \"8b82b74a-6466-4751-9d8a-7fe34276cde3\" (UID: \"8b82b74a-6466-4751-9d8a-7fe34276cde3\") " Jan 20 17:39:11 crc kubenswrapper[4558]: I0120 17:39:11.543780 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8b82b74a-6466-4751-9d8a-7fe34276cde3-combined-ca-bundle\") pod \"8b82b74a-6466-4751-9d8a-7fe34276cde3\" (UID: \"8b82b74a-6466-4751-9d8a-7fe34276cde3\") " Jan 20 17:39:11 crc kubenswrapper[4558]: I0120 17:39:11.543841 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7lp8k\" (UniqueName: \"kubernetes.io/projected/8b82b74a-6466-4751-9d8a-7fe34276cde3-kube-api-access-7lp8k\") pod \"8b82b74a-6466-4751-9d8a-7fe34276cde3\" (UID: \"8b82b74a-6466-4751-9d8a-7fe34276cde3\") " Jan 20 17:39:11 crc kubenswrapper[4558]: I0120 17:39:11.545018 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8b82b74a-6466-4751-9d8a-7fe34276cde3-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "8b82b74a-6466-4751-9d8a-7fe34276cde3" (UID: "8b82b74a-6466-4751-9d8a-7fe34276cde3"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:39:11 crc kubenswrapper[4558]: I0120 17:39:11.545740 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8b82b74a-6466-4751-9d8a-7fe34276cde3-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "8b82b74a-6466-4751-9d8a-7fe34276cde3" (UID: "8b82b74a-6466-4751-9d8a-7fe34276cde3"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:39:11 crc kubenswrapper[4558]: I0120 17:39:11.548504 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8b82b74a-6466-4751-9d8a-7fe34276cde3-kube-api-access-7lp8k" (OuterVolumeSpecName: "kube-api-access-7lp8k") pod "8b82b74a-6466-4751-9d8a-7fe34276cde3" (UID: "8b82b74a-6466-4751-9d8a-7fe34276cde3"). InnerVolumeSpecName "kube-api-access-7lp8k". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:39:11 crc kubenswrapper[4558]: I0120 17:39:11.550309 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8b82b74a-6466-4751-9d8a-7fe34276cde3-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "8b82b74a-6466-4751-9d8a-7fe34276cde3" (UID: "8b82b74a-6466-4751-9d8a-7fe34276cde3"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:39:11 crc kubenswrapper[4558]: I0120 17:39:11.560889 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8b82b74a-6466-4751-9d8a-7fe34276cde3-scripts" (OuterVolumeSpecName: "scripts") pod "8b82b74a-6466-4751-9d8a-7fe34276cde3" (UID: "8b82b74a-6466-4751-9d8a-7fe34276cde3"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:39:11 crc kubenswrapper[4558]: I0120 17:39:11.563998 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8b82b74a-6466-4751-9d8a-7fe34276cde3-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "8b82b74a-6466-4751-9d8a-7fe34276cde3" (UID: "8b82b74a-6466-4751-9d8a-7fe34276cde3"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:39:11 crc kubenswrapper[4558]: I0120 17:39:11.566997 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8b82b74a-6466-4751-9d8a-7fe34276cde3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8b82b74a-6466-4751-9d8a-7fe34276cde3" (UID: "8b82b74a-6466-4751-9d8a-7fe34276cde3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:39:11 crc kubenswrapper[4558]: I0120 17:39:11.647101 4558 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/8b82b74a-6466-4751-9d8a-7fe34276cde3-dispersionconf\") on node \"crc\" DevicePath \"\"" Jan 20 17:39:11 crc kubenswrapper[4558]: I0120 17:39:11.647139 4558 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/8b82b74a-6466-4751-9d8a-7fe34276cde3-swiftconf\") on node \"crc\" DevicePath \"\"" Jan 20 17:39:11 crc kubenswrapper[4558]: I0120 17:39:11.647151 4558 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/8b82b74a-6466-4751-9d8a-7fe34276cde3-etc-swift\") on node \"crc\" DevicePath \"\"" Jan 20 17:39:11 crc kubenswrapper[4558]: I0120 17:39:11.647179 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8b82b74a-6466-4751-9d8a-7fe34276cde3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:39:11 crc kubenswrapper[4558]: I0120 17:39:11.647194 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7lp8k\" (UniqueName: \"kubernetes.io/projected/8b82b74a-6466-4751-9d8a-7fe34276cde3-kube-api-access-7lp8k\") on node \"crc\" DevicePath \"\"" Jan 20 17:39:11 crc kubenswrapper[4558]: I0120 17:39:11.647205 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8b82b74a-6466-4751-9d8a-7fe34276cde3-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:39:11 crc kubenswrapper[4558]: I0120 17:39:11.647218 4558 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/8b82b74a-6466-4751-9d8a-7fe34276cde3-ring-data-devices\") on node \"crc\" DevicePath \"\"" Jan 20 17:39:12 crc kubenswrapper[4558]: I0120 17:39:12.201351 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-ring-rebalance-5mvzb" Jan 20 17:39:12 crc kubenswrapper[4558]: I0120 17:39:12.201352 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-ring-rebalance-5mvzb" event={"ID":"8b82b74a-6466-4751-9d8a-7fe34276cde3","Type":"ContainerDied","Data":"795f1f6f2b5c1babd27c958f537abebfb080e46a541eece617bbdbe6654df642"} Jan 20 17:39:12 crc kubenswrapper[4558]: I0120 17:39:12.201483 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="795f1f6f2b5c1babd27c958f537abebfb080e46a541eece617bbdbe6654df642" Jan 20 17:39:12 crc kubenswrapper[4558]: I0120 17:39:12.203335 4558 generic.go:334] "Generic (PLEG): container finished" podID="57add5a3-3401-425c-aa80-d168c20c4cb9" containerID="1a8261f61c9b6fb46a3461edc080651cc99150b9dbc8b5de2458b82d4f428a9c" exitCode=0 Jan 20 17:39:12 crc kubenswrapper[4558]: I0120 17:39:12.203417 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-db-sync-dxp9v" event={"ID":"57add5a3-3401-425c-aa80-d168c20c4cb9","Type":"ContainerDied","Data":"1a8261f61c9b6fb46a3461edc080651cc99150b9dbc8b5de2458b82d4f428a9c"} Jan 20 17:39:13 crc kubenswrapper[4558]: I0120 17:39:13.512371 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-db-sync-dxp9v" Jan 20 17:39:13 crc kubenswrapper[4558]: I0120 17:39:13.580078 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/57add5a3-3401-425c-aa80-d168c20c4cb9-combined-ca-bundle\") pod \"57add5a3-3401-425c-aa80-d168c20c4cb9\" (UID: \"57add5a3-3401-425c-aa80-d168c20c4cb9\") " Jan 20 17:39:13 crc kubenswrapper[4558]: I0120 17:39:13.580121 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/57add5a3-3401-425c-aa80-d168c20c4cb9-config-data\") pod \"57add5a3-3401-425c-aa80-d168c20c4cb9\" (UID: \"57add5a3-3401-425c-aa80-d168c20c4cb9\") " Jan 20 17:39:13 crc kubenswrapper[4558]: I0120 17:39:13.580175 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9dff7\" (UniqueName: \"kubernetes.io/projected/57add5a3-3401-425c-aa80-d168c20c4cb9-kube-api-access-9dff7\") pod \"57add5a3-3401-425c-aa80-d168c20c4cb9\" (UID: \"57add5a3-3401-425c-aa80-d168c20c4cb9\") " Jan 20 17:39:13 crc kubenswrapper[4558]: I0120 17:39:13.580264 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/57add5a3-3401-425c-aa80-d168c20c4cb9-db-sync-config-data\") pod \"57add5a3-3401-425c-aa80-d168c20c4cb9\" (UID: \"57add5a3-3401-425c-aa80-d168c20c4cb9\") " Jan 20 17:39:13 crc kubenswrapper[4558]: I0120 17:39:13.584630 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57add5a3-3401-425c-aa80-d168c20c4cb9-kube-api-access-9dff7" (OuterVolumeSpecName: "kube-api-access-9dff7") pod "57add5a3-3401-425c-aa80-d168c20c4cb9" (UID: "57add5a3-3401-425c-aa80-d168c20c4cb9"). InnerVolumeSpecName "kube-api-access-9dff7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:39:13 crc kubenswrapper[4558]: I0120 17:39:13.584947 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/57add5a3-3401-425c-aa80-d168c20c4cb9-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "57add5a3-3401-425c-aa80-d168c20c4cb9" (UID: "57add5a3-3401-425c-aa80-d168c20c4cb9"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:39:13 crc kubenswrapper[4558]: I0120 17:39:13.598088 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/57add5a3-3401-425c-aa80-d168c20c4cb9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "57add5a3-3401-425c-aa80-d168c20c4cb9" (UID: "57add5a3-3401-425c-aa80-d168c20c4cb9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:39:13 crc kubenswrapper[4558]: I0120 17:39:13.610397 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/57add5a3-3401-425c-aa80-d168c20c4cb9-config-data" (OuterVolumeSpecName: "config-data") pod "57add5a3-3401-425c-aa80-d168c20c4cb9" (UID: "57add5a3-3401-425c-aa80-d168c20c4cb9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:39:13 crc kubenswrapper[4558]: I0120 17:39:13.683118 4558 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/57add5a3-3401-425c-aa80-d168c20c4cb9-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:39:13 crc kubenswrapper[4558]: I0120 17:39:13.683692 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/57add5a3-3401-425c-aa80-d168c20c4cb9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:39:13 crc kubenswrapper[4558]: I0120 17:39:13.683759 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/57add5a3-3401-425c-aa80-d168c20c4cb9-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:39:13 crc kubenswrapper[4558]: I0120 17:39:13.683828 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9dff7\" (UniqueName: \"kubernetes.io/projected/57add5a3-3401-425c-aa80-d168c20c4cb9-kube-api-access-9dff7\") on node \"crc\" DevicePath \"\"" Jan 20 17:39:14 crc kubenswrapper[4558]: I0120 17:39:14.224061 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-db-sync-dxp9v" event={"ID":"57add5a3-3401-425c-aa80-d168c20c4cb9","Type":"ContainerDied","Data":"d4a5b4c35681100e6e0c997509ac13e7ea6d81b4fa0029130900865946f58eac"} Jan 20 17:39:14 crc kubenswrapper[4558]: I0120 17:39:14.224107 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d4a5b4c35681100e6e0c997509ac13e7ea6d81b4fa0029130900865946f58eac" Jan 20 17:39:14 crc kubenswrapper[4558]: I0120 17:39:14.224572 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-db-sync-dxp9v" Jan 20 17:39:14 crc kubenswrapper[4558]: I0120 17:39:14.450908 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/root-account-create-update-tkb42"] Jan 20 17:39:14 crc kubenswrapper[4558]: E0120 17:39:14.451368 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="57add5a3-3401-425c-aa80-d168c20c4cb9" containerName="glance-db-sync" Jan 20 17:39:14 crc kubenswrapper[4558]: I0120 17:39:14.451393 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="57add5a3-3401-425c-aa80-d168c20c4cb9" containerName="glance-db-sync" Jan 20 17:39:14 crc kubenswrapper[4558]: E0120 17:39:14.451441 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8b82b74a-6466-4751-9d8a-7fe34276cde3" containerName="swift-ring-rebalance" Jan 20 17:39:14 crc kubenswrapper[4558]: I0120 17:39:14.451448 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="8b82b74a-6466-4751-9d8a-7fe34276cde3" containerName="swift-ring-rebalance" Jan 20 17:39:14 crc kubenswrapper[4558]: I0120 17:39:14.451664 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="57add5a3-3401-425c-aa80-d168c20c4cb9" containerName="glance-db-sync" Jan 20 17:39:14 crc kubenswrapper[4558]: I0120 17:39:14.451685 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="8b82b74a-6466-4751-9d8a-7fe34276cde3" containerName="swift-ring-rebalance" Jan 20 17:39:14 crc kubenswrapper[4558]: I0120 17:39:14.453568 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-tkb42" Jan 20 17:39:14 crc kubenswrapper[4558]: I0120 17:39:14.455784 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"openstack-cell1-mariadb-root-db-secret" Jan 20 17:39:14 crc kubenswrapper[4558]: I0120 17:39:14.464265 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-tkb42"] Jan 20 17:39:14 crc kubenswrapper[4558]: I0120 17:39:14.496541 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lsfv2\" (UniqueName: \"kubernetes.io/projected/4f7051ef-1b0c-4147-9dff-9e57de714b50-kube-api-access-lsfv2\") pod \"root-account-create-update-tkb42\" (UID: \"4f7051ef-1b0c-4147-9dff-9e57de714b50\") " pod="openstack-kuttl-tests/root-account-create-update-tkb42" Jan 20 17:39:14 crc kubenswrapper[4558]: I0120 17:39:14.496643 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4f7051ef-1b0c-4147-9dff-9e57de714b50-operator-scripts\") pod \"root-account-create-update-tkb42\" (UID: \"4f7051ef-1b0c-4147-9dff-9e57de714b50\") " pod="openstack-kuttl-tests/root-account-create-update-tkb42" Jan 20 17:39:14 crc kubenswrapper[4558]: I0120 17:39:14.598872 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lsfv2\" (UniqueName: \"kubernetes.io/projected/4f7051ef-1b0c-4147-9dff-9e57de714b50-kube-api-access-lsfv2\") pod \"root-account-create-update-tkb42\" (UID: \"4f7051ef-1b0c-4147-9dff-9e57de714b50\") " pod="openstack-kuttl-tests/root-account-create-update-tkb42" Jan 20 17:39:14 crc kubenswrapper[4558]: I0120 17:39:14.598998 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4f7051ef-1b0c-4147-9dff-9e57de714b50-operator-scripts\") pod \"root-account-create-update-tkb42\" (UID: \"4f7051ef-1b0c-4147-9dff-9e57de714b50\") " pod="openstack-kuttl-tests/root-account-create-update-tkb42" Jan 20 17:39:14 crc kubenswrapper[4558]: I0120 17:39:14.599871 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4f7051ef-1b0c-4147-9dff-9e57de714b50-operator-scripts\") pod \"root-account-create-update-tkb42\" (UID: \"4f7051ef-1b0c-4147-9dff-9e57de714b50\") " pod="openstack-kuttl-tests/root-account-create-update-tkb42" Jan 20 17:39:14 crc kubenswrapper[4558]: I0120 17:39:14.607445 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:39:14 crc kubenswrapper[4558]: I0120 17:39:14.619449 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lsfv2\" (UniqueName: \"kubernetes.io/projected/4f7051ef-1b0c-4147-9dff-9e57de714b50-kube-api-access-lsfv2\") pod \"root-account-create-update-tkb42\" (UID: \"4f7051ef-1b0c-4147-9dff-9e57de714b50\") " pod="openstack-kuttl-tests/root-account-create-update-tkb42" Jan 20 17:39:14 crc kubenswrapper[4558]: I0120 17:39:14.776044 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-tkb42" Jan 20 17:39:14 crc kubenswrapper[4558]: I0120 17:39:14.804000 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/fa551db1-9a4b-45c2-b640-4f3518b162f4-etc-swift\") pod \"swift-storage-0\" (UID: \"fa551db1-9a4b-45c2-b640-4f3518b162f4\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:39:14 crc kubenswrapper[4558]: I0120 17:39:14.811706 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/fa551db1-9a4b-45c2-b640-4f3518b162f4-etc-swift\") pod \"swift-storage-0\" (UID: \"fa551db1-9a4b-45c2-b640-4f3518b162f4\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:39:14 crc kubenswrapper[4558]: I0120 17:39:14.841216 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:39:15 crc kubenswrapper[4558]: I0120 17:39:15.232459 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-tkb42"] Jan 20 17:39:15 crc kubenswrapper[4558]: W0120 17:39:15.234688 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4f7051ef_1b0c_4147_9dff_9e57de714b50.slice/crio-bb7896e37f24aedcef1de4833b223f6b951a9402e81838ac5b72b4b21b0a8035 WatchSource:0}: Error finding container bb7896e37f24aedcef1de4833b223f6b951a9402e81838ac5b72b4b21b0a8035: Status 404 returned error can't find the container with id bb7896e37f24aedcef1de4833b223f6b951a9402e81838ac5b72b4b21b0a8035 Jan 20 17:39:15 crc kubenswrapper[4558]: I0120 17:39:15.351830 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/swift-storage-0"] Jan 20 17:39:15 crc kubenswrapper[4558]: W0120 17:39:15.353818 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfa551db1_9a4b_45c2_b640_4f3518b162f4.slice/crio-c7e8a0f021c02540371f8f70542f7136469c5d2c70392aaa2060efc6c4587260 WatchSource:0}: Error finding container c7e8a0f021c02540371f8f70542f7136469c5d2c70392aaa2060efc6c4587260: Status 404 returned error can't find the container with id c7e8a0f021c02540371f8f70542f7136469c5d2c70392aaa2060efc6c4587260 Jan 20 17:39:16 crc kubenswrapper[4558]: I0120 17:39:16.250043 4558 generic.go:334] "Generic (PLEG): container finished" podID="4f7051ef-1b0c-4147-9dff-9e57de714b50" containerID="64656c843094c952de334495564ba5189df632fc1d101b32c9cd7770f2e7f3be" exitCode=0 Jan 20 17:39:16 crc kubenswrapper[4558]: I0120 17:39:16.251032 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/root-account-create-update-tkb42" event={"ID":"4f7051ef-1b0c-4147-9dff-9e57de714b50","Type":"ContainerDied","Data":"64656c843094c952de334495564ba5189df632fc1d101b32c9cd7770f2e7f3be"} Jan 20 17:39:16 crc kubenswrapper[4558]: I0120 17:39:16.251110 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/root-account-create-update-tkb42" event={"ID":"4f7051ef-1b0c-4147-9dff-9e57de714b50","Type":"ContainerStarted","Data":"bb7896e37f24aedcef1de4833b223f6b951a9402e81838ac5b72b4b21b0a8035"} Jan 20 17:39:16 crc kubenswrapper[4558]: I0120 17:39:16.263378 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"fa551db1-9a4b-45c2-b640-4f3518b162f4","Type":"ContainerStarted","Data":"cbaf44dfc0734a14fd6ecab8c4428e033aaee10d6fd6549b9c1af4209a3025c5"} Jan 20 17:39:16 crc kubenswrapper[4558]: I0120 17:39:16.263437 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"fa551db1-9a4b-45c2-b640-4f3518b162f4","Type":"ContainerStarted","Data":"967813e267c1874f848c851b80566f8d4b10ddb11ae1e902fcced650a357a740"} Jan 20 17:39:16 crc kubenswrapper[4558]: I0120 17:39:16.263450 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"fa551db1-9a4b-45c2-b640-4f3518b162f4","Type":"ContainerStarted","Data":"c873af103469702cab190e42ac22b360e4b1c8addd7ea94631a91965a8840503"} Jan 20 17:39:16 crc kubenswrapper[4558]: I0120 17:39:16.263460 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"fa551db1-9a4b-45c2-b640-4f3518b162f4","Type":"ContainerStarted","Data":"7946458eae645c01b2edae8d93ef429443754ef2e0e2732538ed1fcc7aab4131"} Jan 20 17:39:16 crc kubenswrapper[4558]: I0120 17:39:16.263470 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"fa551db1-9a4b-45c2-b640-4f3518b162f4","Type":"ContainerStarted","Data":"c7e8a0f021c02540371f8f70542f7136469c5d2c70392aaa2060efc6c4587260"} Jan 20 17:39:16 crc kubenswrapper[4558]: I0120 17:39:16.264814 4558 generic.go:334] "Generic (PLEG): container finished" podID="de4d8126-91cf-4149-bec4-4accaf558308" containerID="8a98a6a80e25b53e4ba0486d67f1bca724bbd186dd76d5aaf082369a41c0a1d4" exitCode=0 Jan 20 17:39:16 crc kubenswrapper[4558]: I0120 17:39:16.264940 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" event={"ID":"de4d8126-91cf-4149-bec4-4accaf558308","Type":"ContainerDied","Data":"8a98a6a80e25b53e4ba0486d67f1bca724bbd186dd76d5aaf082369a41c0a1d4"} Jan 20 17:39:16 crc kubenswrapper[4558]: I0120 17:39:16.265966 4558 generic.go:334] "Generic (PLEG): container finished" podID="4765e529-9729-4d27-a252-c0c9a7b67beb" containerID="d93a1933ae3d32819f090f6a7b2f44526a5ddb9fbca636dcce7b4b06b41e979c" exitCode=0 Jan 20 17:39:16 crc kubenswrapper[4558]: I0120 17:39:16.266014 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-server-0" event={"ID":"4765e529-9729-4d27-a252-c0c9a7b67beb","Type":"ContainerDied","Data":"d93a1933ae3d32819f090f6a7b2f44526a5ddb9fbca636dcce7b4b06b41e979c"} Jan 20 17:39:17 crc kubenswrapper[4558]: I0120 17:39:17.278920 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-server-0" event={"ID":"4765e529-9729-4d27-a252-c0c9a7b67beb","Type":"ContainerStarted","Data":"28200cb794147f2ba9210855e02aa06f0b33943f75d1f48cedc60cedc86b6d96"} Jan 20 17:39:17 crc kubenswrapper[4558]: I0120 17:39:17.279239 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:39:17 crc kubenswrapper[4558]: I0120 17:39:17.285445 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"fa551db1-9a4b-45c2-b640-4f3518b162f4","Type":"ContainerStarted","Data":"db06ae2f5dcfb82f3b41bda4c320f201a9c03b7b5c3d71c0c19a43661eba2751"} Jan 20 17:39:17 crc kubenswrapper[4558]: I0120 17:39:17.285502 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"fa551db1-9a4b-45c2-b640-4f3518b162f4","Type":"ContainerStarted","Data":"1776b65beebb985be1fcd79b9aa133812c590cf0eb9a13dd5bf41981563e77b5"} Jan 20 17:39:17 crc kubenswrapper[4558]: I0120 17:39:17.285514 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"fa551db1-9a4b-45c2-b640-4f3518b162f4","Type":"ContainerStarted","Data":"0b9af6175ca2fe405cf0a757e866a177d1e865ed2977ca637413cf1645033f14"} Jan 20 17:39:17 crc kubenswrapper[4558]: I0120 17:39:17.285525 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"fa551db1-9a4b-45c2-b640-4f3518b162f4","Type":"ContainerStarted","Data":"35a4222bca9bef05ef01fd815e0b4f831f1296d08622fc5e89a2a361527c22e6"} Jan 20 17:39:17 crc kubenswrapper[4558]: I0120 17:39:17.285537 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"fa551db1-9a4b-45c2-b640-4f3518b162f4","Type":"ContainerStarted","Data":"6a4f662ca1f9347dca843ebc69844d9f6f10b499e6d23197e88dd1c43db718b8"} Jan 20 17:39:17 crc kubenswrapper[4558]: I0120 17:39:17.285547 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"fa551db1-9a4b-45c2-b640-4f3518b162f4","Type":"ContainerStarted","Data":"13c87e4a18a86e7c3b8ddb00e81ce8b0da89d4d0c280a7e98c30f1e03817bfc3"} Jan 20 17:39:17 crc kubenswrapper[4558]: I0120 17:39:17.285560 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"fa551db1-9a4b-45c2-b640-4f3518b162f4","Type":"ContainerStarted","Data":"c784e598ad58ecd5a81e3bf765a648caa06b6e4d7a98116be1483ea1a1916bd6"} Jan 20 17:39:17 crc kubenswrapper[4558]: I0120 17:39:17.287414 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" event={"ID":"de4d8126-91cf-4149-bec4-4accaf558308","Type":"ContainerStarted","Data":"f79892859b62fff49932e58f16de3c5503fa6029ca060df20454e36b9e9f37db"} Jan 20 17:39:17 crc kubenswrapper[4558]: I0120 17:39:17.287724 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:39:17 crc kubenswrapper[4558]: I0120 17:39:17.313854 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/rabbitmq-server-0" podStartSLOduration=36.313836307 podStartE2EDuration="36.313836307s" podCreationTimestamp="2026-01-20 17:38:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:39:17.30137668 +0000 UTC m=+3451.061714647" watchObservedRunningTime="2026-01-20 17:39:17.313836307 +0000 UTC m=+3451.074174274" Jan 20 17:39:17 crc kubenswrapper[4558]: I0120 17:39:17.331799 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" podStartSLOduration=36.331782509 podStartE2EDuration="36.331782509s" podCreationTimestamp="2026-01-20 17:38:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:39:17.329083725 +0000 UTC m=+3451.089421692" watchObservedRunningTime="2026-01-20 17:39:17.331782509 +0000 UTC m=+3451.092120476" Jan 20 17:39:17 crc kubenswrapper[4558]: I0120 17:39:17.605254 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-tkb42" Jan 20 17:39:17 crc kubenswrapper[4558]: I0120 17:39:17.688575 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4f7051ef-1b0c-4147-9dff-9e57de714b50-operator-scripts\") pod \"4f7051ef-1b0c-4147-9dff-9e57de714b50\" (UID: \"4f7051ef-1b0c-4147-9dff-9e57de714b50\") " Jan 20 17:39:17 crc kubenswrapper[4558]: I0120 17:39:17.688726 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lsfv2\" (UniqueName: \"kubernetes.io/projected/4f7051ef-1b0c-4147-9dff-9e57de714b50-kube-api-access-lsfv2\") pod \"4f7051ef-1b0c-4147-9dff-9e57de714b50\" (UID: \"4f7051ef-1b0c-4147-9dff-9e57de714b50\") " Jan 20 17:39:17 crc kubenswrapper[4558]: I0120 17:39:17.689087 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4f7051ef-1b0c-4147-9dff-9e57de714b50-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "4f7051ef-1b0c-4147-9dff-9e57de714b50" (UID: "4f7051ef-1b0c-4147-9dff-9e57de714b50"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:39:17 crc kubenswrapper[4558]: I0120 17:39:17.689542 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4f7051ef-1b0c-4147-9dff-9e57de714b50-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:39:17 crc kubenswrapper[4558]: I0120 17:39:17.695617 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4f7051ef-1b0c-4147-9dff-9e57de714b50-kube-api-access-lsfv2" (OuterVolumeSpecName: "kube-api-access-lsfv2") pod "4f7051ef-1b0c-4147-9dff-9e57de714b50" (UID: "4f7051ef-1b0c-4147-9dff-9e57de714b50"). InnerVolumeSpecName "kube-api-access-lsfv2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:39:17 crc kubenswrapper[4558]: I0120 17:39:17.790949 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lsfv2\" (UniqueName: \"kubernetes.io/projected/4f7051ef-1b0c-4147-9dff-9e57de714b50-kube-api-access-lsfv2\") on node \"crc\" DevicePath \"\"" Jan 20 17:39:18 crc kubenswrapper[4558]: I0120 17:39:18.303425 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"fa551db1-9a4b-45c2-b640-4f3518b162f4","Type":"ContainerStarted","Data":"0978e6e1ade70cc41df3d45431dd51e1520b653984893d0992b4c5d6e1fb3e47"} Jan 20 17:39:18 crc kubenswrapper[4558]: I0120 17:39:18.303480 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"fa551db1-9a4b-45c2-b640-4f3518b162f4","Type":"ContainerStarted","Data":"b5ee244c517bb7529681f18dd5be00352c8b0d2d264f9bf4dd993ae3ee88bc9f"} Jan 20 17:39:18 crc kubenswrapper[4558]: I0120 17:39:18.303495 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"fa551db1-9a4b-45c2-b640-4f3518b162f4","Type":"ContainerStarted","Data":"6ff615a38b1de5289443af49e872533c8fb33086c5c5f749a5757153445d80db"} Jan 20 17:39:18 crc kubenswrapper[4558]: I0120 17:39:18.303509 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"fa551db1-9a4b-45c2-b640-4f3518b162f4","Type":"ContainerStarted","Data":"730e7a8305ea822bf786ff7759b1b076db78be0ddb28d589685ca62f80722994"} Jan 20 17:39:18 crc kubenswrapper[4558]: I0120 17:39:18.304677 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/root-account-create-update-tkb42" event={"ID":"4f7051ef-1b0c-4147-9dff-9e57de714b50","Type":"ContainerDied","Data":"bb7896e37f24aedcef1de4833b223f6b951a9402e81838ac5b72b4b21b0a8035"} Jan 20 17:39:18 crc kubenswrapper[4558]: I0120 17:39:18.304727 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bb7896e37f24aedcef1de4833b223f6b951a9402e81838ac5b72b4b21b0a8035" Jan 20 17:39:18 crc kubenswrapper[4558]: I0120 17:39:18.304823 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-tkb42" Jan 20 17:39:18 crc kubenswrapper[4558]: I0120 17:39:18.341988 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/swift-storage-0" podStartSLOduration=21.341963061 podStartE2EDuration="21.341963061s" podCreationTimestamp="2026-01-20 17:38:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:39:18.339253877 +0000 UTC m=+3452.099591844" watchObservedRunningTime="2026-01-20 17:39:18.341963061 +0000 UTC m=+3452.102301028" Jan 20 17:39:18 crc kubenswrapper[4558]: I0120 17:39:18.489333 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-7659b55585-bzrvg"] Jan 20 17:39:18 crc kubenswrapper[4558]: E0120 17:39:18.489705 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4f7051ef-1b0c-4147-9dff-9e57de714b50" containerName="mariadb-account-create-update" Jan 20 17:39:18 crc kubenswrapper[4558]: I0120 17:39:18.489725 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="4f7051ef-1b0c-4147-9dff-9e57de714b50" containerName="mariadb-account-create-update" Jan 20 17:39:18 crc kubenswrapper[4558]: I0120 17:39:18.489937 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="4f7051ef-1b0c-4147-9dff-9e57de714b50" containerName="mariadb-account-create-update" Jan 20 17:39:18 crc kubenswrapper[4558]: I0120 17:39:18.490732 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-7659b55585-bzrvg" Jan 20 17:39:18 crc kubenswrapper[4558]: I0120 17:39:18.502889 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"dns-swift-storage-0" Jan 20 17:39:18 crc kubenswrapper[4558]: I0120 17:39:18.503493 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-7659b55585-bzrvg"] Jan 20 17:39:18 crc kubenswrapper[4558]: I0120 17:39:18.604367 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/0d54110a-6349-45a3-ad58-1bf105f65293-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-7659b55585-bzrvg\" (UID: \"0d54110a-6349-45a3-ad58-1bf105f65293\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-7659b55585-bzrvg" Jan 20 17:39:18 crc kubenswrapper[4558]: I0120 17:39:18.604424 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x6j7l\" (UniqueName: \"kubernetes.io/projected/0d54110a-6349-45a3-ad58-1bf105f65293-kube-api-access-x6j7l\") pod \"dnsmasq-dnsmasq-7659b55585-bzrvg\" (UID: \"0d54110a-6349-45a3-ad58-1bf105f65293\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-7659b55585-bzrvg" Jan 20 17:39:18 crc kubenswrapper[4558]: I0120 17:39:18.604557 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/0d54110a-6349-45a3-ad58-1bf105f65293-dns-swift-storage-0\") pod \"dnsmasq-dnsmasq-7659b55585-bzrvg\" (UID: \"0d54110a-6349-45a3-ad58-1bf105f65293\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-7659b55585-bzrvg" Jan 20 17:39:18 crc kubenswrapper[4558]: I0120 17:39:18.604664 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0d54110a-6349-45a3-ad58-1bf105f65293-config\") pod \"dnsmasq-dnsmasq-7659b55585-bzrvg\" (UID: \"0d54110a-6349-45a3-ad58-1bf105f65293\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-7659b55585-bzrvg" Jan 20 17:39:18 crc kubenswrapper[4558]: I0120 17:39:18.698693 4558 scope.go:117] "RemoveContainer" containerID="fcf35382d515efc646c6a18f5a871bb7d56050c4d19ea44e31380d2ddf0a363c" Jan 20 17:39:18 crc kubenswrapper[4558]: I0120 17:39:18.706830 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/0d54110a-6349-45a3-ad58-1bf105f65293-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-7659b55585-bzrvg\" (UID: \"0d54110a-6349-45a3-ad58-1bf105f65293\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-7659b55585-bzrvg" Jan 20 17:39:18 crc kubenswrapper[4558]: I0120 17:39:18.706889 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x6j7l\" (UniqueName: \"kubernetes.io/projected/0d54110a-6349-45a3-ad58-1bf105f65293-kube-api-access-x6j7l\") pod \"dnsmasq-dnsmasq-7659b55585-bzrvg\" (UID: \"0d54110a-6349-45a3-ad58-1bf105f65293\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-7659b55585-bzrvg" Jan 20 17:39:18 crc kubenswrapper[4558]: I0120 17:39:18.706966 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/0d54110a-6349-45a3-ad58-1bf105f65293-dns-swift-storage-0\") pod \"dnsmasq-dnsmasq-7659b55585-bzrvg\" (UID: \"0d54110a-6349-45a3-ad58-1bf105f65293\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-7659b55585-bzrvg" Jan 20 17:39:18 crc kubenswrapper[4558]: I0120 17:39:18.707051 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0d54110a-6349-45a3-ad58-1bf105f65293-config\") pod \"dnsmasq-dnsmasq-7659b55585-bzrvg\" (UID: \"0d54110a-6349-45a3-ad58-1bf105f65293\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-7659b55585-bzrvg" Jan 20 17:39:18 crc kubenswrapper[4558]: I0120 17:39:18.708059 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/0d54110a-6349-45a3-ad58-1bf105f65293-dns-swift-storage-0\") pod \"dnsmasq-dnsmasq-7659b55585-bzrvg\" (UID: \"0d54110a-6349-45a3-ad58-1bf105f65293\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-7659b55585-bzrvg" Jan 20 17:39:18 crc kubenswrapper[4558]: I0120 17:39:18.708317 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/0d54110a-6349-45a3-ad58-1bf105f65293-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-7659b55585-bzrvg\" (UID: \"0d54110a-6349-45a3-ad58-1bf105f65293\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-7659b55585-bzrvg" Jan 20 17:39:18 crc kubenswrapper[4558]: I0120 17:39:18.708363 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0d54110a-6349-45a3-ad58-1bf105f65293-config\") pod \"dnsmasq-dnsmasq-7659b55585-bzrvg\" (UID: \"0d54110a-6349-45a3-ad58-1bf105f65293\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-7659b55585-bzrvg" Jan 20 17:39:18 crc kubenswrapper[4558]: I0120 17:39:18.736699 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x6j7l\" (UniqueName: \"kubernetes.io/projected/0d54110a-6349-45a3-ad58-1bf105f65293-kube-api-access-x6j7l\") pod \"dnsmasq-dnsmasq-7659b55585-bzrvg\" (UID: \"0d54110a-6349-45a3-ad58-1bf105f65293\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-7659b55585-bzrvg" Jan 20 17:39:18 crc kubenswrapper[4558]: I0120 17:39:18.756746 4558 scope.go:117] "RemoveContainer" containerID="bd9bc91b84b06a59b8b191189a3f6d1dde72408f2e0800a8c07dc2c238aeebb3" Jan 20 17:39:18 crc kubenswrapper[4558]: I0120 17:39:18.780059 4558 scope.go:117] "RemoveContainer" containerID="d7b53b31033927ce6e8736e2b00e96c96f93bd828ee7bf1b9cf354815ad9c843" Jan 20 17:39:18 crc kubenswrapper[4558]: I0120 17:39:18.806769 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-7659b55585-bzrvg" Jan 20 17:39:19 crc kubenswrapper[4558]: I0120 17:39:19.232046 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-7659b55585-bzrvg"] Jan 20 17:39:19 crc kubenswrapper[4558]: W0120 17:39:19.235527 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0d54110a_6349_45a3_ad58_1bf105f65293.slice/crio-2c8c5c07e92cb16568efe46870af2accc71746fe3ee4c0639e7cf51293a77b15 WatchSource:0}: Error finding container 2c8c5c07e92cb16568efe46870af2accc71746fe3ee4c0639e7cf51293a77b15: Status 404 returned error can't find the container with id 2c8c5c07e92cb16568efe46870af2accc71746fe3ee4c0639e7cf51293a77b15 Jan 20 17:39:19 crc kubenswrapper[4558]: I0120 17:39:19.313945 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-7659b55585-bzrvg" event={"ID":"0d54110a-6349-45a3-ad58-1bf105f65293","Type":"ContainerStarted","Data":"2c8c5c07e92cb16568efe46870af2accc71746fe3ee4c0639e7cf51293a77b15"} Jan 20 17:39:20 crc kubenswrapper[4558]: I0120 17:39:20.324454 4558 generic.go:334] "Generic (PLEG): container finished" podID="0d54110a-6349-45a3-ad58-1bf105f65293" containerID="5647fbffdf9b85bb314b193f8160e0dcd8c0d803273190ff423a9a019f91e1ab" exitCode=0 Jan 20 17:39:20 crc kubenswrapper[4558]: I0120 17:39:20.324550 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-7659b55585-bzrvg" event={"ID":"0d54110a-6349-45a3-ad58-1bf105f65293","Type":"ContainerDied","Data":"5647fbffdf9b85bb314b193f8160e0dcd8c0d803273190ff423a9a019f91e1ab"} Jan 20 17:39:21 crc kubenswrapper[4558]: I0120 17:39:21.260039 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-wcmln"] Jan 20 17:39:21 crc kubenswrapper[4558]: I0120 17:39:21.261916 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-wcmln" Jan 20 17:39:21 crc kubenswrapper[4558]: I0120 17:39:21.274135 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-wcmln"] Jan 20 17:39:21 crc kubenswrapper[4558]: I0120 17:39:21.336834 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-7659b55585-bzrvg" event={"ID":"0d54110a-6349-45a3-ad58-1bf105f65293","Type":"ContainerStarted","Data":"83708f9e65c27738fe52f73ee0f27f750807ccf23623168fd5a88b2d1f7fefe6"} Jan 20 17:39:21 crc kubenswrapper[4558]: I0120 17:39:21.353806 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9d898536-f5dc-41be-b0c1-e68f52cf8915-utilities\") pod \"redhat-operators-wcmln\" (UID: \"9d898536-f5dc-41be-b0c1-e68f52cf8915\") " pod="openshift-marketplace/redhat-operators-wcmln" Jan 20 17:39:21 crc kubenswrapper[4558]: I0120 17:39:21.354074 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9d898536-f5dc-41be-b0c1-e68f52cf8915-catalog-content\") pod \"redhat-operators-wcmln\" (UID: \"9d898536-f5dc-41be-b0c1-e68f52cf8915\") " pod="openshift-marketplace/redhat-operators-wcmln" Jan 20 17:39:21 crc kubenswrapper[4558]: I0120 17:39:21.354209 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-29jlk\" (UniqueName: \"kubernetes.io/projected/9d898536-f5dc-41be-b0c1-e68f52cf8915-kube-api-access-29jlk\") pod \"redhat-operators-wcmln\" (UID: \"9d898536-f5dc-41be-b0c1-e68f52cf8915\") " pod="openshift-marketplace/redhat-operators-wcmln" Jan 20 17:39:21 crc kubenswrapper[4558]: I0120 17:39:21.362863 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-7659b55585-bzrvg" podStartSLOduration=3.362849951 podStartE2EDuration="3.362849951s" podCreationTimestamp="2026-01-20 17:39:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:39:21.357296709 +0000 UTC m=+3455.117634677" watchObservedRunningTime="2026-01-20 17:39:21.362849951 +0000 UTC m=+3455.123187919" Jan 20 17:39:21 crc kubenswrapper[4558]: I0120 17:39:21.455519 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9d898536-f5dc-41be-b0c1-e68f52cf8915-catalog-content\") pod \"redhat-operators-wcmln\" (UID: \"9d898536-f5dc-41be-b0c1-e68f52cf8915\") " pod="openshift-marketplace/redhat-operators-wcmln" Jan 20 17:39:21 crc kubenswrapper[4558]: I0120 17:39:21.455575 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-29jlk\" (UniqueName: \"kubernetes.io/projected/9d898536-f5dc-41be-b0c1-e68f52cf8915-kube-api-access-29jlk\") pod \"redhat-operators-wcmln\" (UID: \"9d898536-f5dc-41be-b0c1-e68f52cf8915\") " pod="openshift-marketplace/redhat-operators-wcmln" Jan 20 17:39:21 crc kubenswrapper[4558]: I0120 17:39:21.455693 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9d898536-f5dc-41be-b0c1-e68f52cf8915-utilities\") pod \"redhat-operators-wcmln\" (UID: \"9d898536-f5dc-41be-b0c1-e68f52cf8915\") " pod="openshift-marketplace/redhat-operators-wcmln" Jan 20 17:39:21 crc kubenswrapper[4558]: I0120 17:39:21.456039 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9d898536-f5dc-41be-b0c1-e68f52cf8915-catalog-content\") pod \"redhat-operators-wcmln\" (UID: \"9d898536-f5dc-41be-b0c1-e68f52cf8915\") " pod="openshift-marketplace/redhat-operators-wcmln" Jan 20 17:39:21 crc kubenswrapper[4558]: I0120 17:39:21.456159 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9d898536-f5dc-41be-b0c1-e68f52cf8915-utilities\") pod \"redhat-operators-wcmln\" (UID: \"9d898536-f5dc-41be-b0c1-e68f52cf8915\") " pod="openshift-marketplace/redhat-operators-wcmln" Jan 20 17:39:21 crc kubenswrapper[4558]: I0120 17:39:21.473573 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-29jlk\" (UniqueName: \"kubernetes.io/projected/9d898536-f5dc-41be-b0c1-e68f52cf8915-kube-api-access-29jlk\") pod \"redhat-operators-wcmln\" (UID: \"9d898536-f5dc-41be-b0c1-e68f52cf8915\") " pod="openshift-marketplace/redhat-operators-wcmln" Jan 20 17:39:21 crc kubenswrapper[4558]: I0120 17:39:21.577249 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-wcmln" Jan 20 17:39:21 crc kubenswrapper[4558]: I0120 17:39:21.976411 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-wcmln"] Jan 20 17:39:22 crc kubenswrapper[4558]: I0120 17:39:22.350918 4558 generic.go:334] "Generic (PLEG): container finished" podID="9d898536-f5dc-41be-b0c1-e68f52cf8915" containerID="2cf42ee1b438a58d93b934d2250d626d524b58ad1bcc00d311a3223ab3551ff9" exitCode=0 Jan 20 17:39:22 crc kubenswrapper[4558]: I0120 17:39:22.351509 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wcmln" event={"ID":"9d898536-f5dc-41be-b0c1-e68f52cf8915","Type":"ContainerDied","Data":"2cf42ee1b438a58d93b934d2250d626d524b58ad1bcc00d311a3223ab3551ff9"} Jan 20 17:39:22 crc kubenswrapper[4558]: I0120 17:39:22.351546 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wcmln" event={"ID":"9d898536-f5dc-41be-b0c1-e68f52cf8915","Type":"ContainerStarted","Data":"a42653d97e4b4ee97bcc2a6ea61e25c08f347e9b710bfae0e881201371f77285"} Jan 20 17:39:22 crc kubenswrapper[4558]: I0120 17:39:22.355309 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-7659b55585-bzrvg" Jan 20 17:39:22 crc kubenswrapper[4558]: I0120 17:39:22.357368 4558 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 20 17:39:23 crc kubenswrapper[4558]: I0120 17:39:23.362590 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wcmln" event={"ID":"9d898536-f5dc-41be-b0c1-e68f52cf8915","Type":"ContainerStarted","Data":"51ce38844d208430296ae7b4b68ee9fe10adefcd33f1c7e7c7f8f4527a81b581"} Jan 20 17:39:25 crc kubenswrapper[4558]: I0120 17:39:25.380435 4558 generic.go:334] "Generic (PLEG): container finished" podID="9d898536-f5dc-41be-b0c1-e68f52cf8915" containerID="51ce38844d208430296ae7b4b68ee9fe10adefcd33f1c7e7c7f8f4527a81b581" exitCode=0 Jan 20 17:39:25 crc kubenswrapper[4558]: I0120 17:39:25.380518 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wcmln" event={"ID":"9d898536-f5dc-41be-b0c1-e68f52cf8915","Type":"ContainerDied","Data":"51ce38844d208430296ae7b4b68ee9fe10adefcd33f1c7e7c7f8f4527a81b581"} Jan 20 17:39:26 crc kubenswrapper[4558]: I0120 17:39:26.405150 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wcmln" event={"ID":"9d898536-f5dc-41be-b0c1-e68f52cf8915","Type":"ContainerStarted","Data":"4efd50b328b99386c5850d1d969d17ec798633220f85dfda8f2f14381fa19921"} Jan 20 17:39:26 crc kubenswrapper[4558]: I0120 17:39:26.420152 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-wcmln" podStartSLOduration=1.8215162440000001 podStartE2EDuration="5.420139146s" podCreationTimestamp="2026-01-20 17:39:21 +0000 UTC" firstStartedPulling="2026-01-20 17:39:22.357039646 +0000 UTC m=+3456.117377613" lastFinishedPulling="2026-01-20 17:39:25.955662548 +0000 UTC m=+3459.716000515" observedRunningTime="2026-01-20 17:39:26.418292104 +0000 UTC m=+3460.178630071" watchObservedRunningTime="2026-01-20 17:39:26.420139146 +0000 UTC m=+3460.180477113" Jan 20 17:39:27 crc kubenswrapper[4558]: I0120 17:39:27.330195 4558 patch_prober.go:28] interesting pod/machine-config-daemon-2vr4r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 20 17:39:27 crc kubenswrapper[4558]: I0120 17:39:27.330276 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 20 17:39:28 crc kubenswrapper[4558]: I0120 17:39:28.809388 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-7659b55585-bzrvg" Jan 20 17:39:28 crc kubenswrapper[4558]: I0120 17:39:28.858296 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-9hlt9"] Jan 20 17:39:28 crc kubenswrapper[4558]: I0120 17:39:28.858537 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-9hlt9" podUID="cff9fbf3-9b9a-4350-8589-8bfe5543d79f" containerName="dnsmasq-dns" containerID="cri-o://6c262a153185d7a397e55746fc98d2a04ef3c29b4b9f5a65d7f6916bcf01f3c6" gracePeriod=10 Jan 20 17:39:29 crc kubenswrapper[4558]: I0120 17:39:29.445968 4558 generic.go:334] "Generic (PLEG): container finished" podID="cff9fbf3-9b9a-4350-8589-8bfe5543d79f" containerID="6c262a153185d7a397e55746fc98d2a04ef3c29b4b9f5a65d7f6916bcf01f3c6" exitCode=0 Jan 20 17:39:29 crc kubenswrapper[4558]: I0120 17:39:29.446015 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-9hlt9" event={"ID":"cff9fbf3-9b9a-4350-8589-8bfe5543d79f","Type":"ContainerDied","Data":"6c262a153185d7a397e55746fc98d2a04ef3c29b4b9f5a65d7f6916bcf01f3c6"} Jan 20 17:39:29 crc kubenswrapper[4558]: I0120 17:39:29.809836 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-9hlt9" Jan 20 17:39:29 crc kubenswrapper[4558]: I0120 17:39:29.932457 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5ghgz\" (UniqueName: \"kubernetes.io/projected/cff9fbf3-9b9a-4350-8589-8bfe5543d79f-kube-api-access-5ghgz\") pod \"cff9fbf3-9b9a-4350-8589-8bfe5543d79f\" (UID: \"cff9fbf3-9b9a-4350-8589-8bfe5543d79f\") " Jan 20 17:39:29 crc kubenswrapper[4558]: I0120 17:39:29.932541 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/cff9fbf3-9b9a-4350-8589-8bfe5543d79f-dnsmasq-svc\") pod \"cff9fbf3-9b9a-4350-8589-8bfe5543d79f\" (UID: \"cff9fbf3-9b9a-4350-8589-8bfe5543d79f\") " Jan 20 17:39:29 crc kubenswrapper[4558]: I0120 17:39:29.933032 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cff9fbf3-9b9a-4350-8589-8bfe5543d79f-config\") pod \"cff9fbf3-9b9a-4350-8589-8bfe5543d79f\" (UID: \"cff9fbf3-9b9a-4350-8589-8bfe5543d79f\") " Jan 20 17:39:29 crc kubenswrapper[4558]: I0120 17:39:29.939989 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cff9fbf3-9b9a-4350-8589-8bfe5543d79f-kube-api-access-5ghgz" (OuterVolumeSpecName: "kube-api-access-5ghgz") pod "cff9fbf3-9b9a-4350-8589-8bfe5543d79f" (UID: "cff9fbf3-9b9a-4350-8589-8bfe5543d79f"). InnerVolumeSpecName "kube-api-access-5ghgz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:39:29 crc kubenswrapper[4558]: I0120 17:39:29.969138 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cff9fbf3-9b9a-4350-8589-8bfe5543d79f-dnsmasq-svc" (OuterVolumeSpecName: "dnsmasq-svc") pod "cff9fbf3-9b9a-4350-8589-8bfe5543d79f" (UID: "cff9fbf3-9b9a-4350-8589-8bfe5543d79f"). InnerVolumeSpecName "dnsmasq-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:39:29 crc kubenswrapper[4558]: I0120 17:39:29.972238 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cff9fbf3-9b9a-4350-8589-8bfe5543d79f-config" (OuterVolumeSpecName: "config") pod "cff9fbf3-9b9a-4350-8589-8bfe5543d79f" (UID: "cff9fbf3-9b9a-4350-8589-8bfe5543d79f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:39:30 crc kubenswrapper[4558]: I0120 17:39:30.037688 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cff9fbf3-9b9a-4350-8589-8bfe5543d79f-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:39:30 crc kubenswrapper[4558]: I0120 17:39:30.037742 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5ghgz\" (UniqueName: \"kubernetes.io/projected/cff9fbf3-9b9a-4350-8589-8bfe5543d79f-kube-api-access-5ghgz\") on node \"crc\" DevicePath \"\"" Jan 20 17:39:30 crc kubenswrapper[4558]: I0120 17:39:30.037758 4558 reconciler_common.go:293] "Volume detached for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/cff9fbf3-9b9a-4350-8589-8bfe5543d79f-dnsmasq-svc\") on node \"crc\" DevicePath \"\"" Jan 20 17:39:30 crc kubenswrapper[4558]: I0120 17:39:30.459191 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-9hlt9" event={"ID":"cff9fbf3-9b9a-4350-8589-8bfe5543d79f","Type":"ContainerDied","Data":"dab4099481fe459aaa7225feaa6d94286a3cae316d7e9858f7841af3998a6df5"} Jan 20 17:39:30 crc kubenswrapper[4558]: I0120 17:39:30.459624 4558 scope.go:117] "RemoveContainer" containerID="6c262a153185d7a397e55746fc98d2a04ef3c29b4b9f5a65d7f6916bcf01f3c6" Jan 20 17:39:30 crc kubenswrapper[4558]: I0120 17:39:30.459266 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-9hlt9" Jan 20 17:39:30 crc kubenswrapper[4558]: I0120 17:39:30.486836 4558 scope.go:117] "RemoveContainer" containerID="e6448305eeffbe8d3fb9ec22b8af62dc179b30760ae423dad6ce0772135d6933" Jan 20 17:39:30 crc kubenswrapper[4558]: I0120 17:39:30.491033 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-9hlt9"] Jan 20 17:39:30 crc kubenswrapper[4558]: I0120 17:39:30.495340 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-9hlt9"] Jan 20 17:39:30 crc kubenswrapper[4558]: I0120 17:39:30.574353 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cff9fbf3-9b9a-4350-8589-8bfe5543d79f" path="/var/lib/kubelet/pods/cff9fbf3-9b9a-4350-8589-8bfe5543d79f/volumes" Jan 20 17:39:31 crc kubenswrapper[4558]: I0120 17:39:31.578284 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-wcmln" Jan 20 17:39:31 crc kubenswrapper[4558]: I0120 17:39:31.578346 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-wcmln" Jan 20 17:39:31 crc kubenswrapper[4558]: I0120 17:39:31.617106 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-wcmln" Jan 20 17:39:32 crc kubenswrapper[4558]: I0120 17:39:32.361389 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:39:32 crc kubenswrapper[4558]: I0120 17:39:32.533497 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-wcmln" Jan 20 17:39:32 crc kubenswrapper[4558]: I0120 17:39:32.579043 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-wcmln"] Jan 20 17:39:32 crc kubenswrapper[4558]: I0120 17:39:32.631123 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/barbican-db-create-t2lsm"] Jan 20 17:39:32 crc kubenswrapper[4558]: E0120 17:39:32.631587 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cff9fbf3-9b9a-4350-8589-8bfe5543d79f" containerName="init" Jan 20 17:39:32 crc kubenswrapper[4558]: I0120 17:39:32.631607 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="cff9fbf3-9b9a-4350-8589-8bfe5543d79f" containerName="init" Jan 20 17:39:32 crc kubenswrapper[4558]: E0120 17:39:32.631619 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cff9fbf3-9b9a-4350-8589-8bfe5543d79f" containerName="dnsmasq-dns" Jan 20 17:39:32 crc kubenswrapper[4558]: I0120 17:39:32.631625 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="cff9fbf3-9b9a-4350-8589-8bfe5543d79f" containerName="dnsmasq-dns" Jan 20 17:39:32 crc kubenswrapper[4558]: I0120 17:39:32.631798 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="cff9fbf3-9b9a-4350-8589-8bfe5543d79f" containerName="dnsmasq-dns" Jan 20 17:39:32 crc kubenswrapper[4558]: I0120 17:39:32.632481 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-db-create-t2lsm" Jan 20 17:39:32 crc kubenswrapper[4558]: I0120 17:39:32.646960 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-db-create-t2lsm"] Jan 20 17:39:32 crc kubenswrapper[4558]: I0120 17:39:32.701354 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/barbican-677c-account-create-update-c2md6"] Jan 20 17:39:32 crc kubenswrapper[4558]: I0120 17:39:32.702396 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-677c-account-create-update-c2md6" Jan 20 17:39:32 crc kubenswrapper[4558]: I0120 17:39:32.703899 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"barbican-db-secret" Jan 20 17:39:32 crc kubenswrapper[4558]: I0120 17:39:32.709113 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-677c-account-create-update-c2md6"] Jan 20 17:39:32 crc kubenswrapper[4558]: I0120 17:39:32.782753 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g5fxp\" (UniqueName: \"kubernetes.io/projected/dc2e997b-bfd4-47cc-981e-30db4c1c8ceb-kube-api-access-g5fxp\") pod \"barbican-db-create-t2lsm\" (UID: \"dc2e997b-bfd4-47cc-981e-30db4c1c8ceb\") " pod="openstack-kuttl-tests/barbican-db-create-t2lsm" Jan 20 17:39:32 crc kubenswrapper[4558]: I0120 17:39:32.783087 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/dc2e997b-bfd4-47cc-981e-30db4c1c8ceb-operator-scripts\") pod \"barbican-db-create-t2lsm\" (UID: \"dc2e997b-bfd4-47cc-981e-30db4c1c8ceb\") " pod="openstack-kuttl-tests/barbican-db-create-t2lsm" Jan 20 17:39:32 crc kubenswrapper[4558]: I0120 17:39:32.800711 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/cinder-db-create-xrj4d"] Jan 20 17:39:32 crc kubenswrapper[4558]: I0120 17:39:32.802442 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-db-create-xrj4d" Jan 20 17:39:32 crc kubenswrapper[4558]: I0120 17:39:32.821270 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/cinder-405d-account-create-update-hmk9w"] Jan 20 17:39:32 crc kubenswrapper[4558]: I0120 17:39:32.822425 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-405d-account-create-update-hmk9w" Jan 20 17:39:32 crc kubenswrapper[4558]: I0120 17:39:32.825957 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cinder-db-secret" Jan 20 17:39:32 crc kubenswrapper[4558]: I0120 17:39:32.834034 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-db-create-xrj4d"] Jan 20 17:39:32 crc kubenswrapper[4558]: I0120 17:39:32.842488 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-405d-account-create-update-hmk9w"] Jan 20 17:39:32 crc kubenswrapper[4558]: I0120 17:39:32.884647 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dfw5x\" (UniqueName: \"kubernetes.io/projected/71426e1b-db2d-4e6a-9382-81c6b239f747-kube-api-access-dfw5x\") pod \"barbican-677c-account-create-update-c2md6\" (UID: \"71426e1b-db2d-4e6a-9382-81c6b239f747\") " pod="openstack-kuttl-tests/barbican-677c-account-create-update-c2md6" Jan 20 17:39:32 crc kubenswrapper[4558]: I0120 17:39:32.884728 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/dc2e997b-bfd4-47cc-981e-30db4c1c8ceb-operator-scripts\") pod \"barbican-db-create-t2lsm\" (UID: \"dc2e997b-bfd4-47cc-981e-30db4c1c8ceb\") " pod="openstack-kuttl-tests/barbican-db-create-t2lsm" Jan 20 17:39:32 crc kubenswrapper[4558]: I0120 17:39:32.884892 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g5fxp\" (UniqueName: \"kubernetes.io/projected/dc2e997b-bfd4-47cc-981e-30db4c1c8ceb-kube-api-access-g5fxp\") pod \"barbican-db-create-t2lsm\" (UID: \"dc2e997b-bfd4-47cc-981e-30db4c1c8ceb\") " pod="openstack-kuttl-tests/barbican-db-create-t2lsm" Jan 20 17:39:32 crc kubenswrapper[4558]: I0120 17:39:32.885149 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/71426e1b-db2d-4e6a-9382-81c6b239f747-operator-scripts\") pod \"barbican-677c-account-create-update-c2md6\" (UID: \"71426e1b-db2d-4e6a-9382-81c6b239f747\") " pod="openstack-kuttl-tests/barbican-677c-account-create-update-c2md6" Jan 20 17:39:32 crc kubenswrapper[4558]: I0120 17:39:32.885420 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/dc2e997b-bfd4-47cc-981e-30db4c1c8ceb-operator-scripts\") pod \"barbican-db-create-t2lsm\" (UID: \"dc2e997b-bfd4-47cc-981e-30db4c1c8ceb\") " pod="openstack-kuttl-tests/barbican-db-create-t2lsm" Jan 20 17:39:32 crc kubenswrapper[4558]: I0120 17:39:32.904254 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/neutron-185c-account-create-update-f294p"] Jan 20 17:39:32 crc kubenswrapper[4558]: I0120 17:39:32.905482 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-185c-account-create-update-f294p" Jan 20 17:39:32 crc kubenswrapper[4558]: I0120 17:39:32.908379 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g5fxp\" (UniqueName: \"kubernetes.io/projected/dc2e997b-bfd4-47cc-981e-30db4c1c8ceb-kube-api-access-g5fxp\") pod \"barbican-db-create-t2lsm\" (UID: \"dc2e997b-bfd4-47cc-981e-30db4c1c8ceb\") " pod="openstack-kuttl-tests/barbican-db-create-t2lsm" Jan 20 17:39:32 crc kubenswrapper[4558]: I0120 17:39:32.909795 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"neutron-db-secret" Jan 20 17:39:32 crc kubenswrapper[4558]: I0120 17:39:32.916686 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/neutron-db-create-p729z"] Jan 20 17:39:32 crc kubenswrapper[4558]: I0120 17:39:32.918708 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-db-create-p729z" Jan 20 17:39:32 crc kubenswrapper[4558]: I0120 17:39:32.922742 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-db-create-p729z"] Jan 20 17:39:32 crc kubenswrapper[4558]: I0120 17:39:32.928440 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-185c-account-create-update-f294p"] Jan 20 17:39:32 crc kubenswrapper[4558]: I0120 17:39:32.948374 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-db-create-t2lsm" Jan 20 17:39:32 crc kubenswrapper[4558]: I0120 17:39:32.986817 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qz2bc\" (UniqueName: \"kubernetes.io/projected/0cb0fadf-7873-4efe-b8b5-965e4482ebf2-kube-api-access-qz2bc\") pod \"cinder-db-create-xrj4d\" (UID: \"0cb0fadf-7873-4efe-b8b5-965e4482ebf2\") " pod="openstack-kuttl-tests/cinder-db-create-xrj4d" Jan 20 17:39:32 crc kubenswrapper[4558]: I0120 17:39:32.986873 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/11c1b0d7-a15e-431e-bcbf-e01ab670c81e-operator-scripts\") pod \"cinder-405d-account-create-update-hmk9w\" (UID: \"11c1b0d7-a15e-431e-bcbf-e01ab670c81e\") " pod="openstack-kuttl-tests/cinder-405d-account-create-update-hmk9w" Jan 20 17:39:32 crc kubenswrapper[4558]: I0120 17:39:32.986922 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/71426e1b-db2d-4e6a-9382-81c6b239f747-operator-scripts\") pod \"barbican-677c-account-create-update-c2md6\" (UID: \"71426e1b-db2d-4e6a-9382-81c6b239f747\") " pod="openstack-kuttl-tests/barbican-677c-account-create-update-c2md6" Jan 20 17:39:32 crc kubenswrapper[4558]: I0120 17:39:32.986954 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dfw5x\" (UniqueName: \"kubernetes.io/projected/71426e1b-db2d-4e6a-9382-81c6b239f747-kube-api-access-dfw5x\") pod \"barbican-677c-account-create-update-c2md6\" (UID: \"71426e1b-db2d-4e6a-9382-81c6b239f747\") " pod="openstack-kuttl-tests/barbican-677c-account-create-update-c2md6" Jan 20 17:39:32 crc kubenswrapper[4558]: I0120 17:39:32.987307 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qcdnw\" (UniqueName: \"kubernetes.io/projected/11c1b0d7-a15e-431e-bcbf-e01ab670c81e-kube-api-access-qcdnw\") pod \"cinder-405d-account-create-update-hmk9w\" (UID: \"11c1b0d7-a15e-431e-bcbf-e01ab670c81e\") " pod="openstack-kuttl-tests/cinder-405d-account-create-update-hmk9w" Jan 20 17:39:32 crc kubenswrapper[4558]: I0120 17:39:32.987381 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0cb0fadf-7873-4efe-b8b5-965e4482ebf2-operator-scripts\") pod \"cinder-db-create-xrj4d\" (UID: \"0cb0fadf-7873-4efe-b8b5-965e4482ebf2\") " pod="openstack-kuttl-tests/cinder-db-create-xrj4d" Jan 20 17:39:32 crc kubenswrapper[4558]: I0120 17:39:32.987956 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/71426e1b-db2d-4e6a-9382-81c6b239f747-operator-scripts\") pod \"barbican-677c-account-create-update-c2md6\" (UID: \"71426e1b-db2d-4e6a-9382-81c6b239f747\") " pod="openstack-kuttl-tests/barbican-677c-account-create-update-c2md6" Jan 20 17:39:32 crc kubenswrapper[4558]: I0120 17:39:32.988810 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/keystone-db-sync-pq7q9"] Jan 20 17:39:32 crc kubenswrapper[4558]: I0120 17:39:32.990181 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-db-sync-pq7q9" Jan 20 17:39:32 crc kubenswrapper[4558]: I0120 17:39:32.993315 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone" Jan 20 17:39:32 crc kubenswrapper[4558]: I0120 17:39:32.993523 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-scripts" Jan 20 17:39:32 crc kubenswrapper[4558]: I0120 17:39:32.994558 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-config-data" Jan 20 17:39:32 crc kubenswrapper[4558]: I0120 17:39:32.994720 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-keystone-dockercfg-2rkcv" Jan 20 17:39:33 crc kubenswrapper[4558]: I0120 17:39:33.005141 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-db-sync-pq7q9"] Jan 20 17:39:33 crc kubenswrapper[4558]: I0120 17:39:33.019814 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dfw5x\" (UniqueName: \"kubernetes.io/projected/71426e1b-db2d-4e6a-9382-81c6b239f747-kube-api-access-dfw5x\") pod \"barbican-677c-account-create-update-c2md6\" (UID: \"71426e1b-db2d-4e6a-9382-81c6b239f747\") " pod="openstack-kuttl-tests/barbican-677c-account-create-update-c2md6" Jan 20 17:39:33 crc kubenswrapper[4558]: I0120 17:39:33.046290 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:39:33 crc kubenswrapper[4558]: I0120 17:39:33.096707 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m4kv2\" (UniqueName: \"kubernetes.io/projected/1889de01-2407-450e-b839-e4457e27e214-kube-api-access-m4kv2\") pod \"neutron-185c-account-create-update-f294p\" (UID: \"1889de01-2407-450e-b839-e4457e27e214\") " pod="openstack-kuttl-tests/neutron-185c-account-create-update-f294p" Jan 20 17:39:33 crc kubenswrapper[4558]: I0120 17:39:33.096786 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jvqgh\" (UniqueName: \"kubernetes.io/projected/f9b71041-5a81-4d86-9e25-45ecfa08fd58-kube-api-access-jvqgh\") pod \"keystone-db-sync-pq7q9\" (UID: \"f9b71041-5a81-4d86-9e25-45ecfa08fd58\") " pod="openstack-kuttl-tests/keystone-db-sync-pq7q9" Jan 20 17:39:33 crc kubenswrapper[4558]: I0120 17:39:33.096875 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f9b71041-5a81-4d86-9e25-45ecfa08fd58-combined-ca-bundle\") pod \"keystone-db-sync-pq7q9\" (UID: \"f9b71041-5a81-4d86-9e25-45ecfa08fd58\") " pod="openstack-kuttl-tests/keystone-db-sync-pq7q9" Jan 20 17:39:33 crc kubenswrapper[4558]: I0120 17:39:33.096906 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rz2lb\" (UniqueName: \"kubernetes.io/projected/e68a5661-3dc2-48c0-9f39-d54304c4127d-kube-api-access-rz2lb\") pod \"neutron-db-create-p729z\" (UID: \"e68a5661-3dc2-48c0-9f39-d54304c4127d\") " pod="openstack-kuttl-tests/neutron-db-create-p729z" Jan 20 17:39:33 crc kubenswrapper[4558]: I0120 17:39:33.096947 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f9b71041-5a81-4d86-9e25-45ecfa08fd58-config-data\") pod \"keystone-db-sync-pq7q9\" (UID: \"f9b71041-5a81-4d86-9e25-45ecfa08fd58\") " pod="openstack-kuttl-tests/keystone-db-sync-pq7q9" Jan 20 17:39:33 crc kubenswrapper[4558]: I0120 17:39:33.096982 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e68a5661-3dc2-48c0-9f39-d54304c4127d-operator-scripts\") pod \"neutron-db-create-p729z\" (UID: \"e68a5661-3dc2-48c0-9f39-d54304c4127d\") " pod="openstack-kuttl-tests/neutron-db-create-p729z" Jan 20 17:39:33 crc kubenswrapper[4558]: I0120 17:39:33.097022 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qcdnw\" (UniqueName: \"kubernetes.io/projected/11c1b0d7-a15e-431e-bcbf-e01ab670c81e-kube-api-access-qcdnw\") pod \"cinder-405d-account-create-update-hmk9w\" (UID: \"11c1b0d7-a15e-431e-bcbf-e01ab670c81e\") " pod="openstack-kuttl-tests/cinder-405d-account-create-update-hmk9w" Jan 20 17:39:33 crc kubenswrapper[4558]: I0120 17:39:33.097043 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1889de01-2407-450e-b839-e4457e27e214-operator-scripts\") pod \"neutron-185c-account-create-update-f294p\" (UID: \"1889de01-2407-450e-b839-e4457e27e214\") " pod="openstack-kuttl-tests/neutron-185c-account-create-update-f294p" Jan 20 17:39:33 crc kubenswrapper[4558]: I0120 17:39:33.097079 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0cb0fadf-7873-4efe-b8b5-965e4482ebf2-operator-scripts\") pod \"cinder-db-create-xrj4d\" (UID: \"0cb0fadf-7873-4efe-b8b5-965e4482ebf2\") " pod="openstack-kuttl-tests/cinder-db-create-xrj4d" Jan 20 17:39:33 crc kubenswrapper[4558]: I0120 17:39:33.097133 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qz2bc\" (UniqueName: \"kubernetes.io/projected/0cb0fadf-7873-4efe-b8b5-965e4482ebf2-kube-api-access-qz2bc\") pod \"cinder-db-create-xrj4d\" (UID: \"0cb0fadf-7873-4efe-b8b5-965e4482ebf2\") " pod="openstack-kuttl-tests/cinder-db-create-xrj4d" Jan 20 17:39:33 crc kubenswrapper[4558]: I0120 17:39:33.097182 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/11c1b0d7-a15e-431e-bcbf-e01ab670c81e-operator-scripts\") pod \"cinder-405d-account-create-update-hmk9w\" (UID: \"11c1b0d7-a15e-431e-bcbf-e01ab670c81e\") " pod="openstack-kuttl-tests/cinder-405d-account-create-update-hmk9w" Jan 20 17:39:33 crc kubenswrapper[4558]: I0120 17:39:33.098226 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/11c1b0d7-a15e-431e-bcbf-e01ab670c81e-operator-scripts\") pod \"cinder-405d-account-create-update-hmk9w\" (UID: \"11c1b0d7-a15e-431e-bcbf-e01ab670c81e\") " pod="openstack-kuttl-tests/cinder-405d-account-create-update-hmk9w" Jan 20 17:39:33 crc kubenswrapper[4558]: I0120 17:39:33.098719 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0cb0fadf-7873-4efe-b8b5-965e4482ebf2-operator-scripts\") pod \"cinder-db-create-xrj4d\" (UID: \"0cb0fadf-7873-4efe-b8b5-965e4482ebf2\") " pod="openstack-kuttl-tests/cinder-db-create-xrj4d" Jan 20 17:39:33 crc kubenswrapper[4558]: I0120 17:39:33.119155 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qz2bc\" (UniqueName: \"kubernetes.io/projected/0cb0fadf-7873-4efe-b8b5-965e4482ebf2-kube-api-access-qz2bc\") pod \"cinder-db-create-xrj4d\" (UID: \"0cb0fadf-7873-4efe-b8b5-965e4482ebf2\") " pod="openstack-kuttl-tests/cinder-db-create-xrj4d" Jan 20 17:39:33 crc kubenswrapper[4558]: I0120 17:39:33.120380 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qcdnw\" (UniqueName: \"kubernetes.io/projected/11c1b0d7-a15e-431e-bcbf-e01ab670c81e-kube-api-access-qcdnw\") pod \"cinder-405d-account-create-update-hmk9w\" (UID: \"11c1b0d7-a15e-431e-bcbf-e01ab670c81e\") " pod="openstack-kuttl-tests/cinder-405d-account-create-update-hmk9w" Jan 20 17:39:33 crc kubenswrapper[4558]: I0120 17:39:33.120531 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-db-create-xrj4d" Jan 20 17:39:33 crc kubenswrapper[4558]: I0120 17:39:33.134275 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-405d-account-create-update-hmk9w" Jan 20 17:39:33 crc kubenswrapper[4558]: I0120 17:39:33.199327 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m4kv2\" (UniqueName: \"kubernetes.io/projected/1889de01-2407-450e-b839-e4457e27e214-kube-api-access-m4kv2\") pod \"neutron-185c-account-create-update-f294p\" (UID: \"1889de01-2407-450e-b839-e4457e27e214\") " pod="openstack-kuttl-tests/neutron-185c-account-create-update-f294p" Jan 20 17:39:33 crc kubenswrapper[4558]: I0120 17:39:33.200470 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jvqgh\" (UniqueName: \"kubernetes.io/projected/f9b71041-5a81-4d86-9e25-45ecfa08fd58-kube-api-access-jvqgh\") pod \"keystone-db-sync-pq7q9\" (UID: \"f9b71041-5a81-4d86-9e25-45ecfa08fd58\") " pod="openstack-kuttl-tests/keystone-db-sync-pq7q9" Jan 20 17:39:33 crc kubenswrapper[4558]: I0120 17:39:33.201043 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f9b71041-5a81-4d86-9e25-45ecfa08fd58-combined-ca-bundle\") pod \"keystone-db-sync-pq7q9\" (UID: \"f9b71041-5a81-4d86-9e25-45ecfa08fd58\") " pod="openstack-kuttl-tests/keystone-db-sync-pq7q9" Jan 20 17:39:33 crc kubenswrapper[4558]: I0120 17:39:33.201445 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rz2lb\" (UniqueName: \"kubernetes.io/projected/e68a5661-3dc2-48c0-9f39-d54304c4127d-kube-api-access-rz2lb\") pod \"neutron-db-create-p729z\" (UID: \"e68a5661-3dc2-48c0-9f39-d54304c4127d\") " pod="openstack-kuttl-tests/neutron-db-create-p729z" Jan 20 17:39:33 crc kubenswrapper[4558]: I0120 17:39:33.203046 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f9b71041-5a81-4d86-9e25-45ecfa08fd58-config-data\") pod \"keystone-db-sync-pq7q9\" (UID: \"f9b71041-5a81-4d86-9e25-45ecfa08fd58\") " pod="openstack-kuttl-tests/keystone-db-sync-pq7q9" Jan 20 17:39:33 crc kubenswrapper[4558]: I0120 17:39:33.203206 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e68a5661-3dc2-48c0-9f39-d54304c4127d-operator-scripts\") pod \"neutron-db-create-p729z\" (UID: \"e68a5661-3dc2-48c0-9f39-d54304c4127d\") " pod="openstack-kuttl-tests/neutron-db-create-p729z" Jan 20 17:39:33 crc kubenswrapper[4558]: I0120 17:39:33.203296 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1889de01-2407-450e-b839-e4457e27e214-operator-scripts\") pod \"neutron-185c-account-create-update-f294p\" (UID: \"1889de01-2407-450e-b839-e4457e27e214\") " pod="openstack-kuttl-tests/neutron-185c-account-create-update-f294p" Jan 20 17:39:33 crc kubenswrapper[4558]: I0120 17:39:33.205944 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e68a5661-3dc2-48c0-9f39-d54304c4127d-operator-scripts\") pod \"neutron-db-create-p729z\" (UID: \"e68a5661-3dc2-48c0-9f39-d54304c4127d\") " pod="openstack-kuttl-tests/neutron-db-create-p729z" Jan 20 17:39:33 crc kubenswrapper[4558]: I0120 17:39:33.206663 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1889de01-2407-450e-b839-e4457e27e214-operator-scripts\") pod \"neutron-185c-account-create-update-f294p\" (UID: \"1889de01-2407-450e-b839-e4457e27e214\") " pod="openstack-kuttl-tests/neutron-185c-account-create-update-f294p" Jan 20 17:39:33 crc kubenswrapper[4558]: I0120 17:39:33.210248 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f9b71041-5a81-4d86-9e25-45ecfa08fd58-combined-ca-bundle\") pod \"keystone-db-sync-pq7q9\" (UID: \"f9b71041-5a81-4d86-9e25-45ecfa08fd58\") " pod="openstack-kuttl-tests/keystone-db-sync-pq7q9" Jan 20 17:39:33 crc kubenswrapper[4558]: I0120 17:39:33.221100 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rz2lb\" (UniqueName: \"kubernetes.io/projected/e68a5661-3dc2-48c0-9f39-d54304c4127d-kube-api-access-rz2lb\") pod \"neutron-db-create-p729z\" (UID: \"e68a5661-3dc2-48c0-9f39-d54304c4127d\") " pod="openstack-kuttl-tests/neutron-db-create-p729z" Jan 20 17:39:33 crc kubenswrapper[4558]: I0120 17:39:33.225184 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m4kv2\" (UniqueName: \"kubernetes.io/projected/1889de01-2407-450e-b839-e4457e27e214-kube-api-access-m4kv2\") pod \"neutron-185c-account-create-update-f294p\" (UID: \"1889de01-2407-450e-b839-e4457e27e214\") " pod="openstack-kuttl-tests/neutron-185c-account-create-update-f294p" Jan 20 17:39:33 crc kubenswrapper[4558]: I0120 17:39:33.225253 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jvqgh\" (UniqueName: \"kubernetes.io/projected/f9b71041-5a81-4d86-9e25-45ecfa08fd58-kube-api-access-jvqgh\") pod \"keystone-db-sync-pq7q9\" (UID: \"f9b71041-5a81-4d86-9e25-45ecfa08fd58\") " pod="openstack-kuttl-tests/keystone-db-sync-pq7q9" Jan 20 17:39:33 crc kubenswrapper[4558]: I0120 17:39:33.225667 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f9b71041-5a81-4d86-9e25-45ecfa08fd58-config-data\") pod \"keystone-db-sync-pq7q9\" (UID: \"f9b71041-5a81-4d86-9e25-45ecfa08fd58\") " pod="openstack-kuttl-tests/keystone-db-sync-pq7q9" Jan 20 17:39:33 crc kubenswrapper[4558]: I0120 17:39:33.256629 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-185c-account-create-update-f294p" Jan 20 17:39:33 crc kubenswrapper[4558]: I0120 17:39:33.260113 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-db-create-p729z" Jan 20 17:39:33 crc kubenswrapper[4558]: I0120 17:39:33.316458 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-677c-account-create-update-c2md6" Jan 20 17:39:33 crc kubenswrapper[4558]: I0120 17:39:33.358390 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-db-sync-pq7q9" Jan 20 17:39:33 crc kubenswrapper[4558]: I0120 17:39:33.444923 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-db-create-t2lsm"] Jan 20 17:39:33 crc kubenswrapper[4558]: I0120 17:39:33.487646 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-db-create-t2lsm" event={"ID":"dc2e997b-bfd4-47cc-981e-30db4c1c8ceb","Type":"ContainerStarted","Data":"0eec1c1942b51fc4eca1e42154e0785b45fbfa32e01a2fcf28c7ab7029dab94e"} Jan 20 17:39:33 crc kubenswrapper[4558]: I0120 17:39:33.603924 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-405d-account-create-update-hmk9w"] Jan 20 17:39:33 crc kubenswrapper[4558]: I0120 17:39:33.672571 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-db-create-xrj4d"] Jan 20 17:39:33 crc kubenswrapper[4558]: I0120 17:39:33.749946 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-185c-account-create-update-f294p"] Jan 20 17:39:33 crc kubenswrapper[4558]: W0120 17:39:33.759659 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode68a5661_3dc2_48c0_9f39_d54304c4127d.slice/crio-2f279cbc2b3b90ee57b229616ffbdd7bf34391f1e7ac9dccde23afc5ae68727b WatchSource:0}: Error finding container 2f279cbc2b3b90ee57b229616ffbdd7bf34391f1e7ac9dccde23afc5ae68727b: Status 404 returned error can't find the container with id 2f279cbc2b3b90ee57b229616ffbdd7bf34391f1e7ac9dccde23afc5ae68727b Jan 20 17:39:33 crc kubenswrapper[4558]: I0120 17:39:33.759773 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-db-create-p729z"] Jan 20 17:39:33 crc kubenswrapper[4558]: W0120 17:39:33.763225 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1889de01_2407_450e_b839_e4457e27e214.slice/crio-ff81f3d8d4ecfa724e3f4d100ea05336e8e0cde75b274739d70da21bfd6e6516 WatchSource:0}: Error finding container ff81f3d8d4ecfa724e3f4d100ea05336e8e0cde75b274739d70da21bfd6e6516: Status 404 returned error can't find the container with id ff81f3d8d4ecfa724e3f4d100ea05336e8e0cde75b274739d70da21bfd6e6516 Jan 20 17:39:33 crc kubenswrapper[4558]: I0120 17:39:33.854233 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-677c-account-create-update-c2md6"] Jan 20 17:39:33 crc kubenswrapper[4558]: I0120 17:39:33.862009 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-db-sync-pq7q9"] Jan 20 17:39:33 crc kubenswrapper[4558]: W0120 17:39:33.862675 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf9b71041_5a81_4d86_9e25_45ecfa08fd58.slice/crio-e5a23e61ddc2d54434ef9a6de5a4e1941f1404db343524ff467ff480d7e2ed2b WatchSource:0}: Error finding container e5a23e61ddc2d54434ef9a6de5a4e1941f1404db343524ff467ff480d7e2ed2b: Status 404 returned error can't find the container with id e5a23e61ddc2d54434ef9a6de5a4e1941f1404db343524ff467ff480d7e2ed2b Jan 20 17:39:33 crc kubenswrapper[4558]: W0120 17:39:33.869092 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod71426e1b_db2d_4e6a_9382_81c6b239f747.slice/crio-b2b6d70eb15300243d055045f1a0297158491949002cdbc0d5751e1999a8caa0 WatchSource:0}: Error finding container b2b6d70eb15300243d055045f1a0297158491949002cdbc0d5751e1999a8caa0: Status 404 returned error can't find the container with id b2b6d70eb15300243d055045f1a0297158491949002cdbc0d5751e1999a8caa0 Jan 20 17:39:34 crc kubenswrapper[4558]: I0120 17:39:34.499062 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-db-create-xrj4d" event={"ID":"0cb0fadf-7873-4efe-b8b5-965e4482ebf2","Type":"ContainerStarted","Data":"a1ac1631d78e025acf8dee79c56d1f3f9399f0b3799b9fac1a4445dbdb14b651"} Jan 20 17:39:34 crc kubenswrapper[4558]: I0120 17:39:34.499117 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-db-create-xrj4d" event={"ID":"0cb0fadf-7873-4efe-b8b5-965e4482ebf2","Type":"ContainerStarted","Data":"a6ec8c147a15dbc9bfc8c33df1c37b0a82a0ba6a6302263457a621108dacb968"} Jan 20 17:39:34 crc kubenswrapper[4558]: I0120 17:39:34.501548 4558 generic.go:334] "Generic (PLEG): container finished" podID="dc2e997b-bfd4-47cc-981e-30db4c1c8ceb" containerID="850ea8d8634e47c9c83b6988812d43198cd155cda048ff84263b6c83e1b13513" exitCode=0 Jan 20 17:39:34 crc kubenswrapper[4558]: I0120 17:39:34.501623 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-db-create-t2lsm" event={"ID":"dc2e997b-bfd4-47cc-981e-30db4c1c8ceb","Type":"ContainerDied","Data":"850ea8d8634e47c9c83b6988812d43198cd155cda048ff84263b6c83e1b13513"} Jan 20 17:39:34 crc kubenswrapper[4558]: I0120 17:39:34.503849 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-db-sync-pq7q9" event={"ID":"f9b71041-5a81-4d86-9e25-45ecfa08fd58","Type":"ContainerStarted","Data":"cf58cb958bdccb7801e6bad9e986f8b7ff2da356baccf3f2b510423139d72ab9"} Jan 20 17:39:34 crc kubenswrapper[4558]: I0120 17:39:34.503913 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-db-sync-pq7q9" event={"ID":"f9b71041-5a81-4d86-9e25-45ecfa08fd58","Type":"ContainerStarted","Data":"e5a23e61ddc2d54434ef9a6de5a4e1941f1404db343524ff467ff480d7e2ed2b"} Jan 20 17:39:34 crc kubenswrapper[4558]: I0120 17:39:34.507330 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-677c-account-create-update-c2md6" event={"ID":"71426e1b-db2d-4e6a-9382-81c6b239f747","Type":"ContainerStarted","Data":"5dc968322d518b4efe9f4ead465577ff861955df682c068ecad0f7cd5f0e3331"} Jan 20 17:39:34 crc kubenswrapper[4558]: I0120 17:39:34.507399 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-677c-account-create-update-c2md6" event={"ID":"71426e1b-db2d-4e6a-9382-81c6b239f747","Type":"ContainerStarted","Data":"b2b6d70eb15300243d055045f1a0297158491949002cdbc0d5751e1999a8caa0"} Jan 20 17:39:34 crc kubenswrapper[4558]: I0120 17:39:34.509437 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-405d-account-create-update-hmk9w" event={"ID":"11c1b0d7-a15e-431e-bcbf-e01ab670c81e","Type":"ContainerStarted","Data":"0aab856afdd76049e42b2a1262000dd8299f32a30b1a1bde280bebad2ed6494f"} Jan 20 17:39:34 crc kubenswrapper[4558]: I0120 17:39:34.509468 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-405d-account-create-update-hmk9w" event={"ID":"11c1b0d7-a15e-431e-bcbf-e01ab670c81e","Type":"ContainerStarted","Data":"e816a94ab824c0116cf7d32d8deb4e0faefca5727749d573aa3cee0311315511"} Jan 20 17:39:34 crc kubenswrapper[4558]: I0120 17:39:34.511855 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-185c-account-create-update-f294p" event={"ID":"1889de01-2407-450e-b839-e4457e27e214","Type":"ContainerStarted","Data":"6948f6a3a830cb0a407d66b31b655e5d16e46867673e25b1965dc7c563600f88"} Jan 20 17:39:34 crc kubenswrapper[4558]: I0120 17:39:34.511919 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-185c-account-create-update-f294p" event={"ID":"1889de01-2407-450e-b839-e4457e27e214","Type":"ContainerStarted","Data":"ff81f3d8d4ecfa724e3f4d100ea05336e8e0cde75b274739d70da21bfd6e6516"} Jan 20 17:39:34 crc kubenswrapper[4558]: I0120 17:39:34.516127 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-wcmln" podUID="9d898536-f5dc-41be-b0c1-e68f52cf8915" containerName="registry-server" containerID="cri-o://4efd50b328b99386c5850d1d969d17ec798633220f85dfda8f2f14381fa19921" gracePeriod=2 Jan 20 17:39:34 crc kubenswrapper[4558]: I0120 17:39:34.516216 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-db-create-p729z" event={"ID":"e68a5661-3dc2-48c0-9f39-d54304c4127d","Type":"ContainerStarted","Data":"873d559ff9be8bc43c9aa2eaec3294dcd239547229c462f8fe2d94e35ae04a73"} Jan 20 17:39:34 crc kubenswrapper[4558]: I0120 17:39:34.516255 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-db-create-p729z" event={"ID":"e68a5661-3dc2-48c0-9f39-d54304c4127d","Type":"ContainerStarted","Data":"2f279cbc2b3b90ee57b229616ffbdd7bf34391f1e7ac9dccde23afc5ae68727b"} Jan 20 17:39:34 crc kubenswrapper[4558]: I0120 17:39:34.526974 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/cinder-db-create-xrj4d" podStartSLOduration=2.526955011 podStartE2EDuration="2.526955011s" podCreationTimestamp="2026-01-20 17:39:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:39:34.520539527 +0000 UTC m=+3468.280877494" watchObservedRunningTime="2026-01-20 17:39:34.526955011 +0000 UTC m=+3468.287292978" Jan 20 17:39:34 crc kubenswrapper[4558]: I0120 17:39:34.543428 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/cinder-405d-account-create-update-hmk9w" podStartSLOduration=2.543409949 podStartE2EDuration="2.543409949s" podCreationTimestamp="2026-01-20 17:39:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:39:34.541318838 +0000 UTC m=+3468.301656805" watchObservedRunningTime="2026-01-20 17:39:34.543409949 +0000 UTC m=+3468.303747917" Jan 20 17:39:34 crc kubenswrapper[4558]: I0120 17:39:34.558224 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/neutron-185c-account-create-update-f294p" podStartSLOduration=2.558206732 podStartE2EDuration="2.558206732s" podCreationTimestamp="2026-01-20 17:39:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:39:34.555901207 +0000 UTC m=+3468.316239174" watchObservedRunningTime="2026-01-20 17:39:34.558206732 +0000 UTC m=+3468.318544699" Jan 20 17:39:34 crc kubenswrapper[4558]: I0120 17:39:34.575013 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/neutron-db-create-p729z" podStartSLOduration=2.574995017 podStartE2EDuration="2.574995017s" podCreationTimestamp="2026-01-20 17:39:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:39:34.568358108 +0000 UTC m=+3468.328696076" watchObservedRunningTime="2026-01-20 17:39:34.574995017 +0000 UTC m=+3468.335332985" Jan 20 17:39:34 crc kubenswrapper[4558]: I0120 17:39:34.585763 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/keystone-db-sync-pq7q9" podStartSLOduration=2.585753155 podStartE2EDuration="2.585753155s" podCreationTimestamp="2026-01-20 17:39:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:39:34.581027099 +0000 UTC m=+3468.341365066" watchObservedRunningTime="2026-01-20 17:39:34.585753155 +0000 UTC m=+3468.346091122" Jan 20 17:39:34 crc kubenswrapper[4558]: I0120 17:39:34.607252 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/barbican-677c-account-create-update-c2md6" podStartSLOduration=2.607235868 podStartE2EDuration="2.607235868s" podCreationTimestamp="2026-01-20 17:39:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:39:34.604297653 +0000 UTC m=+3468.364635620" watchObservedRunningTime="2026-01-20 17:39:34.607235868 +0000 UTC m=+3468.367573835" Jan 20 17:39:35 crc kubenswrapper[4558]: I0120 17:39:35.499216 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-wcmln" Jan 20 17:39:35 crc kubenswrapper[4558]: I0120 17:39:35.527781 4558 generic.go:334] "Generic (PLEG): container finished" podID="e68a5661-3dc2-48c0-9f39-d54304c4127d" containerID="873d559ff9be8bc43c9aa2eaec3294dcd239547229c462f8fe2d94e35ae04a73" exitCode=0 Jan 20 17:39:35 crc kubenswrapper[4558]: I0120 17:39:35.527912 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-db-create-p729z" event={"ID":"e68a5661-3dc2-48c0-9f39-d54304c4127d","Type":"ContainerDied","Data":"873d559ff9be8bc43c9aa2eaec3294dcd239547229c462f8fe2d94e35ae04a73"} Jan 20 17:39:35 crc kubenswrapper[4558]: I0120 17:39:35.532344 4558 generic.go:334] "Generic (PLEG): container finished" podID="9d898536-f5dc-41be-b0c1-e68f52cf8915" containerID="4efd50b328b99386c5850d1d969d17ec798633220f85dfda8f2f14381fa19921" exitCode=0 Jan 20 17:39:35 crc kubenswrapper[4558]: I0120 17:39:35.532385 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wcmln" event={"ID":"9d898536-f5dc-41be-b0c1-e68f52cf8915","Type":"ContainerDied","Data":"4efd50b328b99386c5850d1d969d17ec798633220f85dfda8f2f14381fa19921"} Jan 20 17:39:35 crc kubenswrapper[4558]: I0120 17:39:35.532400 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-wcmln" Jan 20 17:39:35 crc kubenswrapper[4558]: I0120 17:39:35.532428 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wcmln" event={"ID":"9d898536-f5dc-41be-b0c1-e68f52cf8915","Type":"ContainerDied","Data":"a42653d97e4b4ee97bcc2a6ea61e25c08f347e9b710bfae0e881201371f77285"} Jan 20 17:39:35 crc kubenswrapper[4558]: I0120 17:39:35.532450 4558 scope.go:117] "RemoveContainer" containerID="4efd50b328b99386c5850d1d969d17ec798633220f85dfda8f2f14381fa19921" Jan 20 17:39:35 crc kubenswrapper[4558]: I0120 17:39:35.534149 4558 generic.go:334] "Generic (PLEG): container finished" podID="0cb0fadf-7873-4efe-b8b5-965e4482ebf2" containerID="a1ac1631d78e025acf8dee79c56d1f3f9399f0b3799b9fac1a4445dbdb14b651" exitCode=0 Jan 20 17:39:35 crc kubenswrapper[4558]: I0120 17:39:35.534262 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-db-create-xrj4d" event={"ID":"0cb0fadf-7873-4efe-b8b5-965e4482ebf2","Type":"ContainerDied","Data":"a1ac1631d78e025acf8dee79c56d1f3f9399f0b3799b9fac1a4445dbdb14b651"} Jan 20 17:39:35 crc kubenswrapper[4558]: I0120 17:39:35.539110 4558 generic.go:334] "Generic (PLEG): container finished" podID="71426e1b-db2d-4e6a-9382-81c6b239f747" containerID="5dc968322d518b4efe9f4ead465577ff861955df682c068ecad0f7cd5f0e3331" exitCode=0 Jan 20 17:39:35 crc kubenswrapper[4558]: I0120 17:39:35.539227 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-677c-account-create-update-c2md6" event={"ID":"71426e1b-db2d-4e6a-9382-81c6b239f747","Type":"ContainerDied","Data":"5dc968322d518b4efe9f4ead465577ff861955df682c068ecad0f7cd5f0e3331"} Jan 20 17:39:35 crc kubenswrapper[4558]: I0120 17:39:35.543556 4558 generic.go:334] "Generic (PLEG): container finished" podID="11c1b0d7-a15e-431e-bcbf-e01ab670c81e" containerID="0aab856afdd76049e42b2a1262000dd8299f32a30b1a1bde280bebad2ed6494f" exitCode=0 Jan 20 17:39:35 crc kubenswrapper[4558]: I0120 17:39:35.543632 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-405d-account-create-update-hmk9w" event={"ID":"11c1b0d7-a15e-431e-bcbf-e01ab670c81e","Type":"ContainerDied","Data":"0aab856afdd76049e42b2a1262000dd8299f32a30b1a1bde280bebad2ed6494f"} Jan 20 17:39:35 crc kubenswrapper[4558]: I0120 17:39:35.545241 4558 generic.go:334] "Generic (PLEG): container finished" podID="1889de01-2407-450e-b839-e4457e27e214" containerID="6948f6a3a830cb0a407d66b31b655e5d16e46867673e25b1965dc7c563600f88" exitCode=0 Jan 20 17:39:35 crc kubenswrapper[4558]: I0120 17:39:35.545286 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-185c-account-create-update-f294p" event={"ID":"1889de01-2407-450e-b839-e4457e27e214","Type":"ContainerDied","Data":"6948f6a3a830cb0a407d66b31b655e5d16e46867673e25b1965dc7c563600f88"} Jan 20 17:39:35 crc kubenswrapper[4558]: I0120 17:39:35.564004 4558 scope.go:117] "RemoveContainer" containerID="51ce38844d208430296ae7b4b68ee9fe10adefcd33f1c7e7c7f8f4527a81b581" Jan 20 17:39:35 crc kubenswrapper[4558]: I0120 17:39:35.600004 4558 scope.go:117] "RemoveContainer" containerID="2cf42ee1b438a58d93b934d2250d626d524b58ad1bcc00d311a3223ab3551ff9" Jan 20 17:39:35 crc kubenswrapper[4558]: I0120 17:39:35.640528 4558 scope.go:117] "RemoveContainer" containerID="4efd50b328b99386c5850d1d969d17ec798633220f85dfda8f2f14381fa19921" Jan 20 17:39:35 crc kubenswrapper[4558]: E0120 17:39:35.641283 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4efd50b328b99386c5850d1d969d17ec798633220f85dfda8f2f14381fa19921\": container with ID starting with 4efd50b328b99386c5850d1d969d17ec798633220f85dfda8f2f14381fa19921 not found: ID does not exist" containerID="4efd50b328b99386c5850d1d969d17ec798633220f85dfda8f2f14381fa19921" Jan 20 17:39:35 crc kubenswrapper[4558]: I0120 17:39:35.641324 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4efd50b328b99386c5850d1d969d17ec798633220f85dfda8f2f14381fa19921"} err="failed to get container status \"4efd50b328b99386c5850d1d969d17ec798633220f85dfda8f2f14381fa19921\": rpc error: code = NotFound desc = could not find container \"4efd50b328b99386c5850d1d969d17ec798633220f85dfda8f2f14381fa19921\": container with ID starting with 4efd50b328b99386c5850d1d969d17ec798633220f85dfda8f2f14381fa19921 not found: ID does not exist" Jan 20 17:39:35 crc kubenswrapper[4558]: I0120 17:39:35.641350 4558 scope.go:117] "RemoveContainer" containerID="51ce38844d208430296ae7b4b68ee9fe10adefcd33f1c7e7c7f8f4527a81b581" Jan 20 17:39:35 crc kubenswrapper[4558]: E0120 17:39:35.641718 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"51ce38844d208430296ae7b4b68ee9fe10adefcd33f1c7e7c7f8f4527a81b581\": container with ID starting with 51ce38844d208430296ae7b4b68ee9fe10adefcd33f1c7e7c7f8f4527a81b581 not found: ID does not exist" containerID="51ce38844d208430296ae7b4b68ee9fe10adefcd33f1c7e7c7f8f4527a81b581" Jan 20 17:39:35 crc kubenswrapper[4558]: I0120 17:39:35.641746 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"51ce38844d208430296ae7b4b68ee9fe10adefcd33f1c7e7c7f8f4527a81b581"} err="failed to get container status \"51ce38844d208430296ae7b4b68ee9fe10adefcd33f1c7e7c7f8f4527a81b581\": rpc error: code = NotFound desc = could not find container \"51ce38844d208430296ae7b4b68ee9fe10adefcd33f1c7e7c7f8f4527a81b581\": container with ID starting with 51ce38844d208430296ae7b4b68ee9fe10adefcd33f1c7e7c7f8f4527a81b581 not found: ID does not exist" Jan 20 17:39:35 crc kubenswrapper[4558]: I0120 17:39:35.641761 4558 scope.go:117] "RemoveContainer" containerID="2cf42ee1b438a58d93b934d2250d626d524b58ad1bcc00d311a3223ab3551ff9" Jan 20 17:39:35 crc kubenswrapper[4558]: E0120 17:39:35.642145 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2cf42ee1b438a58d93b934d2250d626d524b58ad1bcc00d311a3223ab3551ff9\": container with ID starting with 2cf42ee1b438a58d93b934d2250d626d524b58ad1bcc00d311a3223ab3551ff9 not found: ID does not exist" containerID="2cf42ee1b438a58d93b934d2250d626d524b58ad1bcc00d311a3223ab3551ff9" Jan 20 17:39:35 crc kubenswrapper[4558]: I0120 17:39:35.642214 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2cf42ee1b438a58d93b934d2250d626d524b58ad1bcc00d311a3223ab3551ff9"} err="failed to get container status \"2cf42ee1b438a58d93b934d2250d626d524b58ad1bcc00d311a3223ab3551ff9\": rpc error: code = NotFound desc = could not find container \"2cf42ee1b438a58d93b934d2250d626d524b58ad1bcc00d311a3223ab3551ff9\": container with ID starting with 2cf42ee1b438a58d93b934d2250d626d524b58ad1bcc00d311a3223ab3551ff9 not found: ID does not exist" Jan 20 17:39:35 crc kubenswrapper[4558]: I0120 17:39:35.657063 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-29jlk\" (UniqueName: \"kubernetes.io/projected/9d898536-f5dc-41be-b0c1-e68f52cf8915-kube-api-access-29jlk\") pod \"9d898536-f5dc-41be-b0c1-e68f52cf8915\" (UID: \"9d898536-f5dc-41be-b0c1-e68f52cf8915\") " Jan 20 17:39:35 crc kubenswrapper[4558]: I0120 17:39:35.657186 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9d898536-f5dc-41be-b0c1-e68f52cf8915-catalog-content\") pod \"9d898536-f5dc-41be-b0c1-e68f52cf8915\" (UID: \"9d898536-f5dc-41be-b0c1-e68f52cf8915\") " Jan 20 17:39:35 crc kubenswrapper[4558]: I0120 17:39:35.657218 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9d898536-f5dc-41be-b0c1-e68f52cf8915-utilities\") pod \"9d898536-f5dc-41be-b0c1-e68f52cf8915\" (UID: \"9d898536-f5dc-41be-b0c1-e68f52cf8915\") " Jan 20 17:39:35 crc kubenswrapper[4558]: I0120 17:39:35.658962 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9d898536-f5dc-41be-b0c1-e68f52cf8915-utilities" (OuterVolumeSpecName: "utilities") pod "9d898536-f5dc-41be-b0c1-e68f52cf8915" (UID: "9d898536-f5dc-41be-b0c1-e68f52cf8915"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:39:35 crc kubenswrapper[4558]: I0120 17:39:35.663969 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d898536-f5dc-41be-b0c1-e68f52cf8915-kube-api-access-29jlk" (OuterVolumeSpecName: "kube-api-access-29jlk") pod "9d898536-f5dc-41be-b0c1-e68f52cf8915" (UID: "9d898536-f5dc-41be-b0c1-e68f52cf8915"). InnerVolumeSpecName "kube-api-access-29jlk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:39:35 crc kubenswrapper[4558]: I0120 17:39:35.763121 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-29jlk\" (UniqueName: \"kubernetes.io/projected/9d898536-f5dc-41be-b0c1-e68f52cf8915-kube-api-access-29jlk\") on node \"crc\" DevicePath \"\"" Jan 20 17:39:35 crc kubenswrapper[4558]: I0120 17:39:35.763153 4558 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9d898536-f5dc-41be-b0c1-e68f52cf8915-utilities\") on node \"crc\" DevicePath \"\"" Jan 20 17:39:35 crc kubenswrapper[4558]: I0120 17:39:35.793534 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9d898536-f5dc-41be-b0c1-e68f52cf8915-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9d898536-f5dc-41be-b0c1-e68f52cf8915" (UID: "9d898536-f5dc-41be-b0c1-e68f52cf8915"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:39:35 crc kubenswrapper[4558]: I0120 17:39:35.872853 4558 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9d898536-f5dc-41be-b0c1-e68f52cf8915-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 20 17:39:35 crc kubenswrapper[4558]: I0120 17:39:35.941484 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-wcmln"] Jan 20 17:39:35 crc kubenswrapper[4558]: I0120 17:39:35.962379 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-wcmln"] Jan 20 17:39:36 crc kubenswrapper[4558]: I0120 17:39:36.031658 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-db-create-t2lsm" Jan 20 17:39:36 crc kubenswrapper[4558]: I0120 17:39:36.177367 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g5fxp\" (UniqueName: \"kubernetes.io/projected/dc2e997b-bfd4-47cc-981e-30db4c1c8ceb-kube-api-access-g5fxp\") pod \"dc2e997b-bfd4-47cc-981e-30db4c1c8ceb\" (UID: \"dc2e997b-bfd4-47cc-981e-30db4c1c8ceb\") " Jan 20 17:39:36 crc kubenswrapper[4558]: I0120 17:39:36.177436 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/dc2e997b-bfd4-47cc-981e-30db4c1c8ceb-operator-scripts\") pod \"dc2e997b-bfd4-47cc-981e-30db4c1c8ceb\" (UID: \"dc2e997b-bfd4-47cc-981e-30db4c1c8ceb\") " Jan 20 17:39:36 crc kubenswrapper[4558]: I0120 17:39:36.178246 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dc2e997b-bfd4-47cc-981e-30db4c1c8ceb-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "dc2e997b-bfd4-47cc-981e-30db4c1c8ceb" (UID: "dc2e997b-bfd4-47cc-981e-30db4c1c8ceb"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:39:36 crc kubenswrapper[4558]: I0120 17:39:36.181452 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dc2e997b-bfd4-47cc-981e-30db4c1c8ceb-kube-api-access-g5fxp" (OuterVolumeSpecName: "kube-api-access-g5fxp") pod "dc2e997b-bfd4-47cc-981e-30db4c1c8ceb" (UID: "dc2e997b-bfd4-47cc-981e-30db4c1c8ceb"). InnerVolumeSpecName "kube-api-access-g5fxp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:39:36 crc kubenswrapper[4558]: I0120 17:39:36.280111 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g5fxp\" (UniqueName: \"kubernetes.io/projected/dc2e997b-bfd4-47cc-981e-30db4c1c8ceb-kube-api-access-g5fxp\") on node \"crc\" DevicePath \"\"" Jan 20 17:39:36 crc kubenswrapper[4558]: I0120 17:39:36.280156 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/dc2e997b-bfd4-47cc-981e-30db4c1c8ceb-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:39:36 crc kubenswrapper[4558]: I0120 17:39:36.555954 4558 generic.go:334] "Generic (PLEG): container finished" podID="f9b71041-5a81-4d86-9e25-45ecfa08fd58" containerID="cf58cb958bdccb7801e6bad9e986f8b7ff2da356baccf3f2b510423139d72ab9" exitCode=0 Jan 20 17:39:36 crc kubenswrapper[4558]: I0120 17:39:36.556047 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-db-sync-pq7q9" event={"ID":"f9b71041-5a81-4d86-9e25-45ecfa08fd58","Type":"ContainerDied","Data":"cf58cb958bdccb7801e6bad9e986f8b7ff2da356baccf3f2b510423139d72ab9"} Jan 20 17:39:36 crc kubenswrapper[4558]: I0120 17:39:36.558255 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-db-create-t2lsm" Jan 20 17:39:36 crc kubenswrapper[4558]: I0120 17:39:36.558303 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-db-create-t2lsm" event={"ID":"dc2e997b-bfd4-47cc-981e-30db4c1c8ceb","Type":"ContainerDied","Data":"0eec1c1942b51fc4eca1e42154e0785b45fbfa32e01a2fcf28c7ab7029dab94e"} Jan 20 17:39:36 crc kubenswrapper[4558]: I0120 17:39:36.558347 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0eec1c1942b51fc4eca1e42154e0785b45fbfa32e01a2fcf28c7ab7029dab94e" Jan 20 17:39:36 crc kubenswrapper[4558]: I0120 17:39:36.582408 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d898536-f5dc-41be-b0c1-e68f52cf8915" path="/var/lib/kubelet/pods/9d898536-f5dc-41be-b0c1-e68f52cf8915/volumes" Jan 20 17:39:36 crc kubenswrapper[4558]: I0120 17:39:36.884021 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-677c-account-create-update-c2md6" Jan 20 17:39:36 crc kubenswrapper[4558]: I0120 17:39:36.992782 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/71426e1b-db2d-4e6a-9382-81c6b239f747-operator-scripts\") pod \"71426e1b-db2d-4e6a-9382-81c6b239f747\" (UID: \"71426e1b-db2d-4e6a-9382-81c6b239f747\") " Jan 20 17:39:36 crc kubenswrapper[4558]: I0120 17:39:36.993119 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dfw5x\" (UniqueName: \"kubernetes.io/projected/71426e1b-db2d-4e6a-9382-81c6b239f747-kube-api-access-dfw5x\") pod \"71426e1b-db2d-4e6a-9382-81c6b239f747\" (UID: \"71426e1b-db2d-4e6a-9382-81c6b239f747\") " Jan 20 17:39:36 crc kubenswrapper[4558]: I0120 17:39:36.993809 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/71426e1b-db2d-4e6a-9382-81c6b239f747-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "71426e1b-db2d-4e6a-9382-81c6b239f747" (UID: "71426e1b-db2d-4e6a-9382-81c6b239f747"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:39:36 crc kubenswrapper[4558]: I0120 17:39:36.994452 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/71426e1b-db2d-4e6a-9382-81c6b239f747-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:39:36 crc kubenswrapper[4558]: I0120 17:39:36.998958 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/71426e1b-db2d-4e6a-9382-81c6b239f747-kube-api-access-dfw5x" (OuterVolumeSpecName: "kube-api-access-dfw5x") pod "71426e1b-db2d-4e6a-9382-81c6b239f747" (UID: "71426e1b-db2d-4e6a-9382-81c6b239f747"). InnerVolumeSpecName "kube-api-access-dfw5x". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:39:37 crc kubenswrapper[4558]: I0120 17:39:37.016292 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-db-create-p729z" Jan 20 17:39:37 crc kubenswrapper[4558]: I0120 17:39:37.023395 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-405d-account-create-update-hmk9w" Jan 20 17:39:37 crc kubenswrapper[4558]: I0120 17:39:37.040351 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-185c-account-create-update-f294p" Jan 20 17:39:37 crc kubenswrapper[4558]: I0120 17:39:37.042352 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-db-create-xrj4d" Jan 20 17:39:37 crc kubenswrapper[4558]: I0120 17:39:37.097591 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rz2lb\" (UniqueName: \"kubernetes.io/projected/e68a5661-3dc2-48c0-9f39-d54304c4127d-kube-api-access-rz2lb\") pod \"e68a5661-3dc2-48c0-9f39-d54304c4127d\" (UID: \"e68a5661-3dc2-48c0-9f39-d54304c4127d\") " Jan 20 17:39:37 crc kubenswrapper[4558]: I0120 17:39:37.097847 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e68a5661-3dc2-48c0-9f39-d54304c4127d-operator-scripts\") pod \"e68a5661-3dc2-48c0-9f39-d54304c4127d\" (UID: \"e68a5661-3dc2-48c0-9f39-d54304c4127d\") " Jan 20 17:39:37 crc kubenswrapper[4558]: I0120 17:39:37.098652 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dfw5x\" (UniqueName: \"kubernetes.io/projected/71426e1b-db2d-4e6a-9382-81c6b239f747-kube-api-access-dfw5x\") on node \"crc\" DevicePath \"\"" Jan 20 17:39:37 crc kubenswrapper[4558]: I0120 17:39:37.098952 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e68a5661-3dc2-48c0-9f39-d54304c4127d-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "e68a5661-3dc2-48c0-9f39-d54304c4127d" (UID: "e68a5661-3dc2-48c0-9f39-d54304c4127d"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:39:37 crc kubenswrapper[4558]: I0120 17:39:37.101718 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e68a5661-3dc2-48c0-9f39-d54304c4127d-kube-api-access-rz2lb" (OuterVolumeSpecName: "kube-api-access-rz2lb") pod "e68a5661-3dc2-48c0-9f39-d54304c4127d" (UID: "e68a5661-3dc2-48c0-9f39-d54304c4127d"). InnerVolumeSpecName "kube-api-access-rz2lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:39:37 crc kubenswrapper[4558]: I0120 17:39:37.199993 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/11c1b0d7-a15e-431e-bcbf-e01ab670c81e-operator-scripts\") pod \"11c1b0d7-a15e-431e-bcbf-e01ab670c81e\" (UID: \"11c1b0d7-a15e-431e-bcbf-e01ab670c81e\") " Jan 20 17:39:37 crc kubenswrapper[4558]: I0120 17:39:37.200057 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qz2bc\" (UniqueName: \"kubernetes.io/projected/0cb0fadf-7873-4efe-b8b5-965e4482ebf2-kube-api-access-qz2bc\") pod \"0cb0fadf-7873-4efe-b8b5-965e4482ebf2\" (UID: \"0cb0fadf-7873-4efe-b8b5-965e4482ebf2\") " Jan 20 17:39:37 crc kubenswrapper[4558]: I0120 17:39:37.200142 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m4kv2\" (UniqueName: \"kubernetes.io/projected/1889de01-2407-450e-b839-e4457e27e214-kube-api-access-m4kv2\") pod \"1889de01-2407-450e-b839-e4457e27e214\" (UID: \"1889de01-2407-450e-b839-e4457e27e214\") " Jan 20 17:39:37 crc kubenswrapper[4558]: I0120 17:39:37.200197 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qcdnw\" (UniqueName: \"kubernetes.io/projected/11c1b0d7-a15e-431e-bcbf-e01ab670c81e-kube-api-access-qcdnw\") pod \"11c1b0d7-a15e-431e-bcbf-e01ab670c81e\" (UID: \"11c1b0d7-a15e-431e-bcbf-e01ab670c81e\") " Jan 20 17:39:37 crc kubenswrapper[4558]: I0120 17:39:37.200226 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0cb0fadf-7873-4efe-b8b5-965e4482ebf2-operator-scripts\") pod \"0cb0fadf-7873-4efe-b8b5-965e4482ebf2\" (UID: \"0cb0fadf-7873-4efe-b8b5-965e4482ebf2\") " Jan 20 17:39:37 crc kubenswrapper[4558]: I0120 17:39:37.200250 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1889de01-2407-450e-b839-e4457e27e214-operator-scripts\") pod \"1889de01-2407-450e-b839-e4457e27e214\" (UID: \"1889de01-2407-450e-b839-e4457e27e214\") " Jan 20 17:39:37 crc kubenswrapper[4558]: I0120 17:39:37.200503 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rz2lb\" (UniqueName: \"kubernetes.io/projected/e68a5661-3dc2-48c0-9f39-d54304c4127d-kube-api-access-rz2lb\") on node \"crc\" DevicePath \"\"" Jan 20 17:39:37 crc kubenswrapper[4558]: I0120 17:39:37.200527 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e68a5661-3dc2-48c0-9f39-d54304c4127d-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:39:37 crc kubenswrapper[4558]: I0120 17:39:37.201200 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1889de01-2407-450e-b839-e4457e27e214-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "1889de01-2407-450e-b839-e4457e27e214" (UID: "1889de01-2407-450e-b839-e4457e27e214"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:39:37 crc kubenswrapper[4558]: I0120 17:39:37.201197 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/11c1b0d7-a15e-431e-bcbf-e01ab670c81e-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "11c1b0d7-a15e-431e-bcbf-e01ab670c81e" (UID: "11c1b0d7-a15e-431e-bcbf-e01ab670c81e"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:39:37 crc kubenswrapper[4558]: I0120 17:39:37.201372 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0cb0fadf-7873-4efe-b8b5-965e4482ebf2-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "0cb0fadf-7873-4efe-b8b5-965e4482ebf2" (UID: "0cb0fadf-7873-4efe-b8b5-965e4482ebf2"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:39:37 crc kubenswrapper[4558]: I0120 17:39:37.203738 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1889de01-2407-450e-b839-e4457e27e214-kube-api-access-m4kv2" (OuterVolumeSpecName: "kube-api-access-m4kv2") pod "1889de01-2407-450e-b839-e4457e27e214" (UID: "1889de01-2407-450e-b839-e4457e27e214"). InnerVolumeSpecName "kube-api-access-m4kv2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:39:37 crc kubenswrapper[4558]: I0120 17:39:37.204896 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/11c1b0d7-a15e-431e-bcbf-e01ab670c81e-kube-api-access-qcdnw" (OuterVolumeSpecName: "kube-api-access-qcdnw") pod "11c1b0d7-a15e-431e-bcbf-e01ab670c81e" (UID: "11c1b0d7-a15e-431e-bcbf-e01ab670c81e"). InnerVolumeSpecName "kube-api-access-qcdnw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:39:37 crc kubenswrapper[4558]: I0120 17:39:37.205234 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0cb0fadf-7873-4efe-b8b5-965e4482ebf2-kube-api-access-qz2bc" (OuterVolumeSpecName: "kube-api-access-qz2bc") pod "0cb0fadf-7873-4efe-b8b5-965e4482ebf2" (UID: "0cb0fadf-7873-4efe-b8b5-965e4482ebf2"). InnerVolumeSpecName "kube-api-access-qz2bc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:39:37 crc kubenswrapper[4558]: I0120 17:39:37.302055 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qcdnw\" (UniqueName: \"kubernetes.io/projected/11c1b0d7-a15e-431e-bcbf-e01ab670c81e-kube-api-access-qcdnw\") on node \"crc\" DevicePath \"\"" Jan 20 17:39:37 crc kubenswrapper[4558]: I0120 17:39:37.302092 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0cb0fadf-7873-4efe-b8b5-965e4482ebf2-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:39:37 crc kubenswrapper[4558]: I0120 17:39:37.302105 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1889de01-2407-450e-b839-e4457e27e214-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:39:37 crc kubenswrapper[4558]: I0120 17:39:37.302117 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/11c1b0d7-a15e-431e-bcbf-e01ab670c81e-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:39:37 crc kubenswrapper[4558]: I0120 17:39:37.302129 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qz2bc\" (UniqueName: \"kubernetes.io/projected/0cb0fadf-7873-4efe-b8b5-965e4482ebf2-kube-api-access-qz2bc\") on node \"crc\" DevicePath \"\"" Jan 20 17:39:37 crc kubenswrapper[4558]: I0120 17:39:37.302141 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m4kv2\" (UniqueName: \"kubernetes.io/projected/1889de01-2407-450e-b839-e4457e27e214-kube-api-access-m4kv2\") on node \"crc\" DevicePath \"\"" Jan 20 17:39:37 crc kubenswrapper[4558]: I0120 17:39:37.566633 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-db-create-p729z" Jan 20 17:39:37 crc kubenswrapper[4558]: I0120 17:39:37.566667 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-db-create-p729z" event={"ID":"e68a5661-3dc2-48c0-9f39-d54304c4127d","Type":"ContainerDied","Data":"2f279cbc2b3b90ee57b229616ffbdd7bf34391f1e7ac9dccde23afc5ae68727b"} Jan 20 17:39:37 crc kubenswrapper[4558]: I0120 17:39:37.566701 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2f279cbc2b3b90ee57b229616ffbdd7bf34391f1e7ac9dccde23afc5ae68727b" Jan 20 17:39:37 crc kubenswrapper[4558]: I0120 17:39:37.568026 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-db-create-xrj4d" event={"ID":"0cb0fadf-7873-4efe-b8b5-965e4482ebf2","Type":"ContainerDied","Data":"a6ec8c147a15dbc9bfc8c33df1c37b0a82a0ba6a6302263457a621108dacb968"} Jan 20 17:39:37 crc kubenswrapper[4558]: I0120 17:39:37.568061 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a6ec8c147a15dbc9bfc8c33df1c37b0a82a0ba6a6302263457a621108dacb968" Jan 20 17:39:37 crc kubenswrapper[4558]: I0120 17:39:37.568041 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-db-create-xrj4d" Jan 20 17:39:37 crc kubenswrapper[4558]: I0120 17:39:37.586776 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-677c-account-create-update-c2md6" event={"ID":"71426e1b-db2d-4e6a-9382-81c6b239f747","Type":"ContainerDied","Data":"b2b6d70eb15300243d055045f1a0297158491949002cdbc0d5751e1999a8caa0"} Jan 20 17:39:37 crc kubenswrapper[4558]: I0120 17:39:37.586818 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b2b6d70eb15300243d055045f1a0297158491949002cdbc0d5751e1999a8caa0" Jan 20 17:39:37 crc kubenswrapper[4558]: I0120 17:39:37.586890 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-677c-account-create-update-c2md6" Jan 20 17:39:37 crc kubenswrapper[4558]: I0120 17:39:37.597959 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-405d-account-create-update-hmk9w" event={"ID":"11c1b0d7-a15e-431e-bcbf-e01ab670c81e","Type":"ContainerDied","Data":"e816a94ab824c0116cf7d32d8deb4e0faefca5727749d573aa3cee0311315511"} Jan 20 17:39:37 crc kubenswrapper[4558]: I0120 17:39:37.598003 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e816a94ab824c0116cf7d32d8deb4e0faefca5727749d573aa3cee0311315511" Jan 20 17:39:37 crc kubenswrapper[4558]: I0120 17:39:37.598102 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-405d-account-create-update-hmk9w" Jan 20 17:39:37 crc kubenswrapper[4558]: I0120 17:39:37.604234 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-185c-account-create-update-f294p" Jan 20 17:39:37 crc kubenswrapper[4558]: I0120 17:39:37.604316 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-185c-account-create-update-f294p" event={"ID":"1889de01-2407-450e-b839-e4457e27e214","Type":"ContainerDied","Data":"ff81f3d8d4ecfa724e3f4d100ea05336e8e0cde75b274739d70da21bfd6e6516"} Jan 20 17:39:37 crc kubenswrapper[4558]: I0120 17:39:37.604366 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ff81f3d8d4ecfa724e3f4d100ea05336e8e0cde75b274739d70da21bfd6e6516" Jan 20 17:39:37 crc kubenswrapper[4558]: I0120 17:39:37.908678 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-db-sync-pq7q9" Jan 20 17:39:38 crc kubenswrapper[4558]: I0120 17:39:38.025638 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f9b71041-5a81-4d86-9e25-45ecfa08fd58-config-data\") pod \"f9b71041-5a81-4d86-9e25-45ecfa08fd58\" (UID: \"f9b71041-5a81-4d86-9e25-45ecfa08fd58\") " Jan 20 17:39:38 crc kubenswrapper[4558]: I0120 17:39:38.025819 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jvqgh\" (UniqueName: \"kubernetes.io/projected/f9b71041-5a81-4d86-9e25-45ecfa08fd58-kube-api-access-jvqgh\") pod \"f9b71041-5a81-4d86-9e25-45ecfa08fd58\" (UID: \"f9b71041-5a81-4d86-9e25-45ecfa08fd58\") " Jan 20 17:39:38 crc kubenswrapper[4558]: I0120 17:39:38.026043 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f9b71041-5a81-4d86-9e25-45ecfa08fd58-combined-ca-bundle\") pod \"f9b71041-5a81-4d86-9e25-45ecfa08fd58\" (UID: \"f9b71041-5a81-4d86-9e25-45ecfa08fd58\") " Jan 20 17:39:38 crc kubenswrapper[4558]: I0120 17:39:38.030422 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f9b71041-5a81-4d86-9e25-45ecfa08fd58-kube-api-access-jvqgh" (OuterVolumeSpecName: "kube-api-access-jvqgh") pod "f9b71041-5a81-4d86-9e25-45ecfa08fd58" (UID: "f9b71041-5a81-4d86-9e25-45ecfa08fd58"). InnerVolumeSpecName "kube-api-access-jvqgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:39:38 crc kubenswrapper[4558]: I0120 17:39:38.050427 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f9b71041-5a81-4d86-9e25-45ecfa08fd58-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f9b71041-5a81-4d86-9e25-45ecfa08fd58" (UID: "f9b71041-5a81-4d86-9e25-45ecfa08fd58"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:39:38 crc kubenswrapper[4558]: I0120 17:39:38.066537 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f9b71041-5a81-4d86-9e25-45ecfa08fd58-config-data" (OuterVolumeSpecName: "config-data") pod "f9b71041-5a81-4d86-9e25-45ecfa08fd58" (UID: "f9b71041-5a81-4d86-9e25-45ecfa08fd58"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:39:38 crc kubenswrapper[4558]: I0120 17:39:38.129874 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f9b71041-5a81-4d86-9e25-45ecfa08fd58-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:39:38 crc kubenswrapper[4558]: I0120 17:39:38.129919 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f9b71041-5a81-4d86-9e25-45ecfa08fd58-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:39:38 crc kubenswrapper[4558]: I0120 17:39:38.129936 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jvqgh\" (UniqueName: \"kubernetes.io/projected/f9b71041-5a81-4d86-9e25-45ecfa08fd58-kube-api-access-jvqgh\") on node \"crc\" DevicePath \"\"" Jan 20 17:39:38 crc kubenswrapper[4558]: I0120 17:39:38.615429 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-db-sync-pq7q9" event={"ID":"f9b71041-5a81-4d86-9e25-45ecfa08fd58","Type":"ContainerDied","Data":"e5a23e61ddc2d54434ef9a6de5a4e1941f1404db343524ff467ff480d7e2ed2b"} Jan 20 17:39:38 crc kubenswrapper[4558]: I0120 17:39:38.615641 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e5a23e61ddc2d54434ef9a6de5a4e1941f1404db343524ff467ff480d7e2ed2b" Jan 20 17:39:38 crc kubenswrapper[4558]: I0120 17:39:38.615516 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-db-sync-pq7q9" Jan 20 17:39:39 crc kubenswrapper[4558]: I0120 17:39:39.030861 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/keystone-bootstrap-rxwfk"] Jan 20 17:39:39 crc kubenswrapper[4558]: E0120 17:39:39.031326 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1889de01-2407-450e-b839-e4457e27e214" containerName="mariadb-account-create-update" Jan 20 17:39:39 crc kubenswrapper[4558]: I0120 17:39:39.031344 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="1889de01-2407-450e-b839-e4457e27e214" containerName="mariadb-account-create-update" Jan 20 17:39:39 crc kubenswrapper[4558]: E0120 17:39:39.031363 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="11c1b0d7-a15e-431e-bcbf-e01ab670c81e" containerName="mariadb-account-create-update" Jan 20 17:39:39 crc kubenswrapper[4558]: I0120 17:39:39.031369 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="11c1b0d7-a15e-431e-bcbf-e01ab670c81e" containerName="mariadb-account-create-update" Jan 20 17:39:39 crc kubenswrapper[4558]: E0120 17:39:39.031381 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0cb0fadf-7873-4efe-b8b5-965e4482ebf2" containerName="mariadb-database-create" Jan 20 17:39:39 crc kubenswrapper[4558]: I0120 17:39:39.031386 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="0cb0fadf-7873-4efe-b8b5-965e4482ebf2" containerName="mariadb-database-create" Jan 20 17:39:39 crc kubenswrapper[4558]: E0120 17:39:39.031397 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9d898536-f5dc-41be-b0c1-e68f52cf8915" containerName="extract-content" Jan 20 17:39:39 crc kubenswrapper[4558]: I0120 17:39:39.031402 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="9d898536-f5dc-41be-b0c1-e68f52cf8915" containerName="extract-content" Jan 20 17:39:39 crc kubenswrapper[4558]: E0120 17:39:39.031413 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9d898536-f5dc-41be-b0c1-e68f52cf8915" containerName="extract-utilities" Jan 20 17:39:39 crc kubenswrapper[4558]: I0120 17:39:39.031421 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="9d898536-f5dc-41be-b0c1-e68f52cf8915" containerName="extract-utilities" Jan 20 17:39:39 crc kubenswrapper[4558]: E0120 17:39:39.031428 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e68a5661-3dc2-48c0-9f39-d54304c4127d" containerName="mariadb-database-create" Jan 20 17:39:39 crc kubenswrapper[4558]: I0120 17:39:39.031433 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="e68a5661-3dc2-48c0-9f39-d54304c4127d" containerName="mariadb-database-create" Jan 20 17:39:39 crc kubenswrapper[4558]: E0120 17:39:39.031443 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f9b71041-5a81-4d86-9e25-45ecfa08fd58" containerName="keystone-db-sync" Jan 20 17:39:39 crc kubenswrapper[4558]: I0120 17:39:39.031448 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="f9b71041-5a81-4d86-9e25-45ecfa08fd58" containerName="keystone-db-sync" Jan 20 17:39:39 crc kubenswrapper[4558]: E0120 17:39:39.031472 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="71426e1b-db2d-4e6a-9382-81c6b239f747" containerName="mariadb-account-create-update" Jan 20 17:39:39 crc kubenswrapper[4558]: I0120 17:39:39.031477 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="71426e1b-db2d-4e6a-9382-81c6b239f747" containerName="mariadb-account-create-update" Jan 20 17:39:39 crc kubenswrapper[4558]: E0120 17:39:39.031483 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9d898536-f5dc-41be-b0c1-e68f52cf8915" containerName="registry-server" Jan 20 17:39:39 crc kubenswrapper[4558]: I0120 17:39:39.031488 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="9d898536-f5dc-41be-b0c1-e68f52cf8915" containerName="registry-server" Jan 20 17:39:39 crc kubenswrapper[4558]: E0120 17:39:39.031498 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc2e997b-bfd4-47cc-981e-30db4c1c8ceb" containerName="mariadb-database-create" Jan 20 17:39:39 crc kubenswrapper[4558]: I0120 17:39:39.031503 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc2e997b-bfd4-47cc-981e-30db4c1c8ceb" containerName="mariadb-database-create" Jan 20 17:39:39 crc kubenswrapper[4558]: I0120 17:39:39.031661 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="11c1b0d7-a15e-431e-bcbf-e01ab670c81e" containerName="mariadb-account-create-update" Jan 20 17:39:39 crc kubenswrapper[4558]: I0120 17:39:39.031674 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="1889de01-2407-450e-b839-e4457e27e214" containerName="mariadb-account-create-update" Jan 20 17:39:39 crc kubenswrapper[4558]: I0120 17:39:39.031681 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="9d898536-f5dc-41be-b0c1-e68f52cf8915" containerName="registry-server" Jan 20 17:39:39 crc kubenswrapper[4558]: I0120 17:39:39.031688 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="dc2e997b-bfd4-47cc-981e-30db4c1c8ceb" containerName="mariadb-database-create" Jan 20 17:39:39 crc kubenswrapper[4558]: I0120 17:39:39.031696 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="e68a5661-3dc2-48c0-9f39-d54304c4127d" containerName="mariadb-database-create" Jan 20 17:39:39 crc kubenswrapper[4558]: I0120 17:39:39.031704 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="71426e1b-db2d-4e6a-9382-81c6b239f747" containerName="mariadb-account-create-update" Jan 20 17:39:39 crc kubenswrapper[4558]: I0120 17:39:39.031716 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="f9b71041-5a81-4d86-9e25-45ecfa08fd58" containerName="keystone-db-sync" Jan 20 17:39:39 crc kubenswrapper[4558]: I0120 17:39:39.031725 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="0cb0fadf-7873-4efe-b8b5-965e4482ebf2" containerName="mariadb-database-create" Jan 20 17:39:39 crc kubenswrapper[4558]: I0120 17:39:39.032318 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-bootstrap-rxwfk" Jan 20 17:39:39 crc kubenswrapper[4558]: I0120 17:39:39.038215 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"osp-secret" Jan 20 17:39:39 crc kubenswrapper[4558]: I0120 17:39:39.038299 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-config-data" Jan 20 17:39:39 crc kubenswrapper[4558]: I0120 17:39:39.038436 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-scripts" Jan 20 17:39:39 crc kubenswrapper[4558]: I0120 17:39:39.038493 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-keystone-dockercfg-2rkcv" Jan 20 17:39:39 crc kubenswrapper[4558]: I0120 17:39:39.038564 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone" Jan 20 17:39:39 crc kubenswrapper[4558]: I0120 17:39:39.042989 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-bootstrap-rxwfk"] Jan 20 17:39:39 crc kubenswrapper[4558]: I0120 17:39:39.147867 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dvz2g\" (UniqueName: \"kubernetes.io/projected/427dd0db-2906-44d3-816e-a09f7cdd3a12-kube-api-access-dvz2g\") pod \"keystone-bootstrap-rxwfk\" (UID: \"427dd0db-2906-44d3-816e-a09f7cdd3a12\") " pod="openstack-kuttl-tests/keystone-bootstrap-rxwfk" Jan 20 17:39:39 crc kubenswrapper[4558]: I0120 17:39:39.147915 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/427dd0db-2906-44d3-816e-a09f7cdd3a12-credential-keys\") pod \"keystone-bootstrap-rxwfk\" (UID: \"427dd0db-2906-44d3-816e-a09f7cdd3a12\") " pod="openstack-kuttl-tests/keystone-bootstrap-rxwfk" Jan 20 17:39:39 crc kubenswrapper[4558]: I0120 17:39:39.148229 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/427dd0db-2906-44d3-816e-a09f7cdd3a12-fernet-keys\") pod \"keystone-bootstrap-rxwfk\" (UID: \"427dd0db-2906-44d3-816e-a09f7cdd3a12\") " pod="openstack-kuttl-tests/keystone-bootstrap-rxwfk" Jan 20 17:39:39 crc kubenswrapper[4558]: I0120 17:39:39.148455 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/427dd0db-2906-44d3-816e-a09f7cdd3a12-scripts\") pod \"keystone-bootstrap-rxwfk\" (UID: \"427dd0db-2906-44d3-816e-a09f7cdd3a12\") " pod="openstack-kuttl-tests/keystone-bootstrap-rxwfk" Jan 20 17:39:39 crc kubenswrapper[4558]: I0120 17:39:39.148503 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/427dd0db-2906-44d3-816e-a09f7cdd3a12-config-data\") pod \"keystone-bootstrap-rxwfk\" (UID: \"427dd0db-2906-44d3-816e-a09f7cdd3a12\") " pod="openstack-kuttl-tests/keystone-bootstrap-rxwfk" Jan 20 17:39:39 crc kubenswrapper[4558]: I0120 17:39:39.148563 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/427dd0db-2906-44d3-816e-a09f7cdd3a12-combined-ca-bundle\") pod \"keystone-bootstrap-rxwfk\" (UID: \"427dd0db-2906-44d3-816e-a09f7cdd3a12\") " pod="openstack-kuttl-tests/keystone-bootstrap-rxwfk" Jan 20 17:39:39 crc kubenswrapper[4558]: I0120 17:39:39.166533 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:39:39 crc kubenswrapper[4558]: I0120 17:39:39.168434 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:39:39 crc kubenswrapper[4558]: I0120 17:39:39.171747 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-scripts" Jan 20 17:39:39 crc kubenswrapper[4558]: I0120 17:39:39.175453 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-config-data" Jan 20 17:39:39 crc kubenswrapper[4558]: I0120 17:39:39.178596 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:39:39 crc kubenswrapper[4558]: I0120 17:39:39.235055 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/cinder-db-sync-2sxrh"] Jan 20 17:39:39 crc kubenswrapper[4558]: I0120 17:39:39.236655 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-db-sync-2sxrh" Jan 20 17:39:39 crc kubenswrapper[4558]: I0120 17:39:39.241621 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cinder-config-data" Jan 20 17:39:39 crc kubenswrapper[4558]: I0120 17:39:39.243125 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cinder-scripts" Jan 20 17:39:39 crc kubenswrapper[4558]: I0120 17:39:39.243310 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cinder-cinder-dockercfg-krqcj" Jan 20 17:39:39 crc kubenswrapper[4558]: I0120 17:39:39.249332 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-db-sync-2sxrh"] Jan 20 17:39:39 crc kubenswrapper[4558]: I0120 17:39:39.250215 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dvz2g\" (UniqueName: \"kubernetes.io/projected/427dd0db-2906-44d3-816e-a09f7cdd3a12-kube-api-access-dvz2g\") pod \"keystone-bootstrap-rxwfk\" (UID: \"427dd0db-2906-44d3-816e-a09f7cdd3a12\") " pod="openstack-kuttl-tests/keystone-bootstrap-rxwfk" Jan 20 17:39:39 crc kubenswrapper[4558]: I0120 17:39:39.250266 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/427dd0db-2906-44d3-816e-a09f7cdd3a12-credential-keys\") pod \"keystone-bootstrap-rxwfk\" (UID: \"427dd0db-2906-44d3-816e-a09f7cdd3a12\") " pod="openstack-kuttl-tests/keystone-bootstrap-rxwfk" Jan 20 17:39:39 crc kubenswrapper[4558]: I0120 17:39:39.250374 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/427dd0db-2906-44d3-816e-a09f7cdd3a12-fernet-keys\") pod \"keystone-bootstrap-rxwfk\" (UID: \"427dd0db-2906-44d3-816e-a09f7cdd3a12\") " pod="openstack-kuttl-tests/keystone-bootstrap-rxwfk" Jan 20 17:39:39 crc kubenswrapper[4558]: I0120 17:39:39.250454 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/427dd0db-2906-44d3-816e-a09f7cdd3a12-scripts\") pod \"keystone-bootstrap-rxwfk\" (UID: \"427dd0db-2906-44d3-816e-a09f7cdd3a12\") " pod="openstack-kuttl-tests/keystone-bootstrap-rxwfk" Jan 20 17:39:39 crc kubenswrapper[4558]: I0120 17:39:39.250482 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/427dd0db-2906-44d3-816e-a09f7cdd3a12-config-data\") pod \"keystone-bootstrap-rxwfk\" (UID: \"427dd0db-2906-44d3-816e-a09f7cdd3a12\") " pod="openstack-kuttl-tests/keystone-bootstrap-rxwfk" Jan 20 17:39:39 crc kubenswrapper[4558]: I0120 17:39:39.250512 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/427dd0db-2906-44d3-816e-a09f7cdd3a12-combined-ca-bundle\") pod \"keystone-bootstrap-rxwfk\" (UID: \"427dd0db-2906-44d3-816e-a09f7cdd3a12\") " pod="openstack-kuttl-tests/keystone-bootstrap-rxwfk" Jan 20 17:39:39 crc kubenswrapper[4558]: I0120 17:39:39.254412 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/placement-db-sync-fq8jg"] Jan 20 17:39:39 crc kubenswrapper[4558]: I0120 17:39:39.255756 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-db-sync-fq8jg" Jan 20 17:39:39 crc kubenswrapper[4558]: I0120 17:39:39.257621 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"placement-scripts" Jan 20 17:39:39 crc kubenswrapper[4558]: I0120 17:39:39.260204 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/427dd0db-2906-44d3-816e-a09f7cdd3a12-scripts\") pod \"keystone-bootstrap-rxwfk\" (UID: \"427dd0db-2906-44d3-816e-a09f7cdd3a12\") " pod="openstack-kuttl-tests/keystone-bootstrap-rxwfk" Jan 20 17:39:39 crc kubenswrapper[4558]: I0120 17:39:39.260582 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"placement-placement-dockercfg-tx9t2" Jan 20 17:39:39 crc kubenswrapper[4558]: I0120 17:39:39.260768 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"placement-config-data" Jan 20 17:39:39 crc kubenswrapper[4558]: I0120 17:39:39.260589 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/427dd0db-2906-44d3-816e-a09f7cdd3a12-credential-keys\") pod \"keystone-bootstrap-rxwfk\" (UID: \"427dd0db-2906-44d3-816e-a09f7cdd3a12\") " pod="openstack-kuttl-tests/keystone-bootstrap-rxwfk" Jan 20 17:39:39 crc kubenswrapper[4558]: I0120 17:39:39.263497 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/427dd0db-2906-44d3-816e-a09f7cdd3a12-combined-ca-bundle\") pod \"keystone-bootstrap-rxwfk\" (UID: \"427dd0db-2906-44d3-816e-a09f7cdd3a12\") " pod="openstack-kuttl-tests/keystone-bootstrap-rxwfk" Jan 20 17:39:39 crc kubenswrapper[4558]: I0120 17:39:39.266185 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/427dd0db-2906-44d3-816e-a09f7cdd3a12-config-data\") pod \"keystone-bootstrap-rxwfk\" (UID: \"427dd0db-2906-44d3-816e-a09f7cdd3a12\") " pod="openstack-kuttl-tests/keystone-bootstrap-rxwfk" Jan 20 17:39:39 crc kubenswrapper[4558]: I0120 17:39:39.267276 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/427dd0db-2906-44d3-816e-a09f7cdd3a12-fernet-keys\") pod \"keystone-bootstrap-rxwfk\" (UID: \"427dd0db-2906-44d3-816e-a09f7cdd3a12\") " pod="openstack-kuttl-tests/keystone-bootstrap-rxwfk" Jan 20 17:39:39 crc kubenswrapper[4558]: I0120 17:39:39.268417 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-db-sync-fq8jg"] Jan 20 17:39:39 crc kubenswrapper[4558]: I0120 17:39:39.275517 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dvz2g\" (UniqueName: \"kubernetes.io/projected/427dd0db-2906-44d3-816e-a09f7cdd3a12-kube-api-access-dvz2g\") pod \"keystone-bootstrap-rxwfk\" (UID: \"427dd0db-2906-44d3-816e-a09f7cdd3a12\") " pod="openstack-kuttl-tests/keystone-bootstrap-rxwfk" Jan 20 17:39:39 crc kubenswrapper[4558]: I0120 17:39:39.352731 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c48697d3-0295-45c7-89d7-7d2c524f3b20-combined-ca-bundle\") pod \"placement-db-sync-fq8jg\" (UID: \"c48697d3-0295-45c7-89d7-7d2c524f3b20\") " pod="openstack-kuttl-tests/placement-db-sync-fq8jg" Jan 20 17:39:39 crc kubenswrapper[4558]: I0120 17:39:39.352790 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-59s64\" (UniqueName: \"kubernetes.io/projected/fd4fc3e6-d57c-4c96-9109-8c8c3c833307-kube-api-access-59s64\") pod \"cinder-db-sync-2sxrh\" (UID: \"fd4fc3e6-d57c-4c96-9109-8c8c3c833307\") " pod="openstack-kuttl-tests/cinder-db-sync-2sxrh" Jan 20 17:39:39 crc kubenswrapper[4558]: I0120 17:39:39.352824 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1ae5026e-37c6-453a-943c-9dfd8cebb2c2-scripts\") pod \"ceilometer-0\" (UID: \"1ae5026e-37c6-453a-943c-9dfd8cebb2c2\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:39:39 crc kubenswrapper[4558]: I0120 17:39:39.352878 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1ae5026e-37c6-453a-943c-9dfd8cebb2c2-config-data\") pod \"ceilometer-0\" (UID: \"1ae5026e-37c6-453a-943c-9dfd8cebb2c2\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:39:39 crc kubenswrapper[4558]: I0120 17:39:39.352910 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c48697d3-0295-45c7-89d7-7d2c524f3b20-logs\") pod \"placement-db-sync-fq8jg\" (UID: \"c48697d3-0295-45c7-89d7-7d2c524f3b20\") " pod="openstack-kuttl-tests/placement-db-sync-fq8jg" Jan 20 17:39:39 crc kubenswrapper[4558]: I0120 17:39:39.352928 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1ae5026e-37c6-453a-943c-9dfd8cebb2c2-log-httpd\") pod \"ceilometer-0\" (UID: \"1ae5026e-37c6-453a-943c-9dfd8cebb2c2\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:39:39 crc kubenswrapper[4558]: I0120 17:39:39.352982 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c48697d3-0295-45c7-89d7-7d2c524f3b20-config-data\") pod \"placement-db-sync-fq8jg\" (UID: \"c48697d3-0295-45c7-89d7-7d2c524f3b20\") " pod="openstack-kuttl-tests/placement-db-sync-fq8jg" Jan 20 17:39:39 crc kubenswrapper[4558]: I0120 17:39:39.353011 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c48697d3-0295-45c7-89d7-7d2c524f3b20-scripts\") pod \"placement-db-sync-fq8jg\" (UID: \"c48697d3-0295-45c7-89d7-7d2c524f3b20\") " pod="openstack-kuttl-tests/placement-db-sync-fq8jg" Jan 20 17:39:39 crc kubenswrapper[4558]: I0120 17:39:39.353030 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fd4fc3e6-d57c-4c96-9109-8c8c3c833307-config-data\") pod \"cinder-db-sync-2sxrh\" (UID: \"fd4fc3e6-d57c-4c96-9109-8c8c3c833307\") " pod="openstack-kuttl-tests/cinder-db-sync-2sxrh" Jan 20 17:39:39 crc kubenswrapper[4558]: I0120 17:39:39.353068 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/fd4fc3e6-d57c-4c96-9109-8c8c3c833307-db-sync-config-data\") pod \"cinder-db-sync-2sxrh\" (UID: \"fd4fc3e6-d57c-4c96-9109-8c8c3c833307\") " pod="openstack-kuttl-tests/cinder-db-sync-2sxrh" Jan 20 17:39:39 crc kubenswrapper[4558]: I0120 17:39:39.353158 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mbpf4\" (UniqueName: \"kubernetes.io/projected/c48697d3-0295-45c7-89d7-7d2c524f3b20-kube-api-access-mbpf4\") pod \"placement-db-sync-fq8jg\" (UID: \"c48697d3-0295-45c7-89d7-7d2c524f3b20\") " pod="openstack-kuttl-tests/placement-db-sync-fq8jg" Jan 20 17:39:39 crc kubenswrapper[4558]: I0120 17:39:39.353199 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/1ae5026e-37c6-453a-943c-9dfd8cebb2c2-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"1ae5026e-37c6-453a-943c-9dfd8cebb2c2\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:39:39 crc kubenswrapper[4558]: I0120 17:39:39.353213 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1ae5026e-37c6-453a-943c-9dfd8cebb2c2-run-httpd\") pod \"ceilometer-0\" (UID: \"1ae5026e-37c6-453a-943c-9dfd8cebb2c2\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:39:39 crc kubenswrapper[4558]: I0120 17:39:39.353233 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tzz8b\" (UniqueName: \"kubernetes.io/projected/1ae5026e-37c6-453a-943c-9dfd8cebb2c2-kube-api-access-tzz8b\") pod \"ceilometer-0\" (UID: \"1ae5026e-37c6-453a-943c-9dfd8cebb2c2\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:39:39 crc kubenswrapper[4558]: I0120 17:39:39.353252 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fd4fc3e6-d57c-4c96-9109-8c8c3c833307-scripts\") pod \"cinder-db-sync-2sxrh\" (UID: \"fd4fc3e6-d57c-4c96-9109-8c8c3c833307\") " pod="openstack-kuttl-tests/cinder-db-sync-2sxrh" Jan 20 17:39:39 crc kubenswrapper[4558]: I0120 17:39:39.353270 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1ae5026e-37c6-453a-943c-9dfd8cebb2c2-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"1ae5026e-37c6-453a-943c-9dfd8cebb2c2\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:39:39 crc kubenswrapper[4558]: I0120 17:39:39.353289 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd4fc3e6-d57c-4c96-9109-8c8c3c833307-combined-ca-bundle\") pod \"cinder-db-sync-2sxrh\" (UID: \"fd4fc3e6-d57c-4c96-9109-8c8c3c833307\") " pod="openstack-kuttl-tests/cinder-db-sync-2sxrh" Jan 20 17:39:39 crc kubenswrapper[4558]: I0120 17:39:39.353318 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/fd4fc3e6-d57c-4c96-9109-8c8c3c833307-etc-machine-id\") pod \"cinder-db-sync-2sxrh\" (UID: \"fd4fc3e6-d57c-4c96-9109-8c8c3c833307\") " pod="openstack-kuttl-tests/cinder-db-sync-2sxrh" Jan 20 17:39:39 crc kubenswrapper[4558]: I0120 17:39:39.353482 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-bootstrap-rxwfk" Jan 20 17:39:39 crc kubenswrapper[4558]: I0120 17:39:39.455115 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/fd4fc3e6-d57c-4c96-9109-8c8c3c833307-db-sync-config-data\") pod \"cinder-db-sync-2sxrh\" (UID: \"fd4fc3e6-d57c-4c96-9109-8c8c3c833307\") " pod="openstack-kuttl-tests/cinder-db-sync-2sxrh" Jan 20 17:39:39 crc kubenswrapper[4558]: I0120 17:39:39.455421 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mbpf4\" (UniqueName: \"kubernetes.io/projected/c48697d3-0295-45c7-89d7-7d2c524f3b20-kube-api-access-mbpf4\") pod \"placement-db-sync-fq8jg\" (UID: \"c48697d3-0295-45c7-89d7-7d2c524f3b20\") " pod="openstack-kuttl-tests/placement-db-sync-fq8jg" Jan 20 17:39:39 crc kubenswrapper[4558]: I0120 17:39:39.455448 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/1ae5026e-37c6-453a-943c-9dfd8cebb2c2-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"1ae5026e-37c6-453a-943c-9dfd8cebb2c2\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:39:39 crc kubenswrapper[4558]: I0120 17:39:39.455466 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1ae5026e-37c6-453a-943c-9dfd8cebb2c2-run-httpd\") pod \"ceilometer-0\" (UID: \"1ae5026e-37c6-453a-943c-9dfd8cebb2c2\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:39:39 crc kubenswrapper[4558]: I0120 17:39:39.455491 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tzz8b\" (UniqueName: \"kubernetes.io/projected/1ae5026e-37c6-453a-943c-9dfd8cebb2c2-kube-api-access-tzz8b\") pod \"ceilometer-0\" (UID: \"1ae5026e-37c6-453a-943c-9dfd8cebb2c2\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:39:39 crc kubenswrapper[4558]: I0120 17:39:39.455511 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fd4fc3e6-d57c-4c96-9109-8c8c3c833307-scripts\") pod \"cinder-db-sync-2sxrh\" (UID: \"fd4fc3e6-d57c-4c96-9109-8c8c3c833307\") " pod="openstack-kuttl-tests/cinder-db-sync-2sxrh" Jan 20 17:39:39 crc kubenswrapper[4558]: I0120 17:39:39.455529 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1ae5026e-37c6-453a-943c-9dfd8cebb2c2-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"1ae5026e-37c6-453a-943c-9dfd8cebb2c2\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:39:39 crc kubenswrapper[4558]: I0120 17:39:39.455549 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd4fc3e6-d57c-4c96-9109-8c8c3c833307-combined-ca-bundle\") pod \"cinder-db-sync-2sxrh\" (UID: \"fd4fc3e6-d57c-4c96-9109-8c8c3c833307\") " pod="openstack-kuttl-tests/cinder-db-sync-2sxrh" Jan 20 17:39:39 crc kubenswrapper[4558]: I0120 17:39:39.455578 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/fd4fc3e6-d57c-4c96-9109-8c8c3c833307-etc-machine-id\") pod \"cinder-db-sync-2sxrh\" (UID: \"fd4fc3e6-d57c-4c96-9109-8c8c3c833307\") " pod="openstack-kuttl-tests/cinder-db-sync-2sxrh" Jan 20 17:39:39 crc kubenswrapper[4558]: I0120 17:39:39.455618 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c48697d3-0295-45c7-89d7-7d2c524f3b20-combined-ca-bundle\") pod \"placement-db-sync-fq8jg\" (UID: \"c48697d3-0295-45c7-89d7-7d2c524f3b20\") " pod="openstack-kuttl-tests/placement-db-sync-fq8jg" Jan 20 17:39:39 crc kubenswrapper[4558]: I0120 17:39:39.455658 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-59s64\" (UniqueName: \"kubernetes.io/projected/fd4fc3e6-d57c-4c96-9109-8c8c3c833307-kube-api-access-59s64\") pod \"cinder-db-sync-2sxrh\" (UID: \"fd4fc3e6-d57c-4c96-9109-8c8c3c833307\") " pod="openstack-kuttl-tests/cinder-db-sync-2sxrh" Jan 20 17:39:39 crc kubenswrapper[4558]: I0120 17:39:39.455678 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1ae5026e-37c6-453a-943c-9dfd8cebb2c2-scripts\") pod \"ceilometer-0\" (UID: \"1ae5026e-37c6-453a-943c-9dfd8cebb2c2\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:39:39 crc kubenswrapper[4558]: I0120 17:39:39.455704 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1ae5026e-37c6-453a-943c-9dfd8cebb2c2-config-data\") pod \"ceilometer-0\" (UID: \"1ae5026e-37c6-453a-943c-9dfd8cebb2c2\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:39:39 crc kubenswrapper[4558]: I0120 17:39:39.455724 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c48697d3-0295-45c7-89d7-7d2c524f3b20-logs\") pod \"placement-db-sync-fq8jg\" (UID: \"c48697d3-0295-45c7-89d7-7d2c524f3b20\") " pod="openstack-kuttl-tests/placement-db-sync-fq8jg" Jan 20 17:39:39 crc kubenswrapper[4558]: I0120 17:39:39.455744 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1ae5026e-37c6-453a-943c-9dfd8cebb2c2-log-httpd\") pod \"ceilometer-0\" (UID: \"1ae5026e-37c6-453a-943c-9dfd8cebb2c2\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:39:39 crc kubenswrapper[4558]: I0120 17:39:39.455803 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c48697d3-0295-45c7-89d7-7d2c524f3b20-config-data\") pod \"placement-db-sync-fq8jg\" (UID: \"c48697d3-0295-45c7-89d7-7d2c524f3b20\") " pod="openstack-kuttl-tests/placement-db-sync-fq8jg" Jan 20 17:39:39 crc kubenswrapper[4558]: I0120 17:39:39.455832 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c48697d3-0295-45c7-89d7-7d2c524f3b20-scripts\") pod \"placement-db-sync-fq8jg\" (UID: \"c48697d3-0295-45c7-89d7-7d2c524f3b20\") " pod="openstack-kuttl-tests/placement-db-sync-fq8jg" Jan 20 17:39:39 crc kubenswrapper[4558]: I0120 17:39:39.455850 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fd4fc3e6-d57c-4c96-9109-8c8c3c833307-config-data\") pod \"cinder-db-sync-2sxrh\" (UID: \"fd4fc3e6-d57c-4c96-9109-8c8c3c833307\") " pod="openstack-kuttl-tests/cinder-db-sync-2sxrh" Jan 20 17:39:39 crc kubenswrapper[4558]: I0120 17:39:39.456275 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/fd4fc3e6-d57c-4c96-9109-8c8c3c833307-etc-machine-id\") pod \"cinder-db-sync-2sxrh\" (UID: \"fd4fc3e6-d57c-4c96-9109-8c8c3c833307\") " pod="openstack-kuttl-tests/cinder-db-sync-2sxrh" Jan 20 17:39:39 crc kubenswrapper[4558]: I0120 17:39:39.458066 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1ae5026e-37c6-453a-943c-9dfd8cebb2c2-run-httpd\") pod \"ceilometer-0\" (UID: \"1ae5026e-37c6-453a-943c-9dfd8cebb2c2\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:39:39 crc kubenswrapper[4558]: I0120 17:39:39.458320 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1ae5026e-37c6-453a-943c-9dfd8cebb2c2-log-httpd\") pod \"ceilometer-0\" (UID: \"1ae5026e-37c6-453a-943c-9dfd8cebb2c2\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:39:39 crc kubenswrapper[4558]: I0120 17:39:39.458666 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c48697d3-0295-45c7-89d7-7d2c524f3b20-logs\") pod \"placement-db-sync-fq8jg\" (UID: \"c48697d3-0295-45c7-89d7-7d2c524f3b20\") " pod="openstack-kuttl-tests/placement-db-sync-fq8jg" Jan 20 17:39:39 crc kubenswrapper[4558]: I0120 17:39:39.462670 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fd4fc3e6-d57c-4c96-9109-8c8c3c833307-scripts\") pod \"cinder-db-sync-2sxrh\" (UID: \"fd4fc3e6-d57c-4c96-9109-8c8c3c833307\") " pod="openstack-kuttl-tests/cinder-db-sync-2sxrh" Jan 20 17:39:39 crc kubenswrapper[4558]: I0120 17:39:39.462682 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/fd4fc3e6-d57c-4c96-9109-8c8c3c833307-db-sync-config-data\") pod \"cinder-db-sync-2sxrh\" (UID: \"fd4fc3e6-d57c-4c96-9109-8c8c3c833307\") " pod="openstack-kuttl-tests/cinder-db-sync-2sxrh" Jan 20 17:39:39 crc kubenswrapper[4558]: I0120 17:39:39.463074 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fd4fc3e6-d57c-4c96-9109-8c8c3c833307-config-data\") pod \"cinder-db-sync-2sxrh\" (UID: \"fd4fc3e6-d57c-4c96-9109-8c8c3c833307\") " pod="openstack-kuttl-tests/cinder-db-sync-2sxrh" Jan 20 17:39:39 crc kubenswrapper[4558]: I0120 17:39:39.463097 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1ae5026e-37c6-453a-943c-9dfd8cebb2c2-config-data\") pod \"ceilometer-0\" (UID: \"1ae5026e-37c6-453a-943c-9dfd8cebb2c2\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:39:39 crc kubenswrapper[4558]: I0120 17:39:39.463233 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1ae5026e-37c6-453a-943c-9dfd8cebb2c2-scripts\") pod \"ceilometer-0\" (UID: \"1ae5026e-37c6-453a-943c-9dfd8cebb2c2\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:39:39 crc kubenswrapper[4558]: I0120 17:39:39.466528 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/1ae5026e-37c6-453a-943c-9dfd8cebb2c2-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"1ae5026e-37c6-453a-943c-9dfd8cebb2c2\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:39:39 crc kubenswrapper[4558]: I0120 17:39:39.466554 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c48697d3-0295-45c7-89d7-7d2c524f3b20-scripts\") pod \"placement-db-sync-fq8jg\" (UID: \"c48697d3-0295-45c7-89d7-7d2c524f3b20\") " pod="openstack-kuttl-tests/placement-db-sync-fq8jg" Jan 20 17:39:39 crc kubenswrapper[4558]: I0120 17:39:39.471131 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c48697d3-0295-45c7-89d7-7d2c524f3b20-config-data\") pod \"placement-db-sync-fq8jg\" (UID: \"c48697d3-0295-45c7-89d7-7d2c524f3b20\") " pod="openstack-kuttl-tests/placement-db-sync-fq8jg" Jan 20 17:39:39 crc kubenswrapper[4558]: I0120 17:39:39.471669 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c48697d3-0295-45c7-89d7-7d2c524f3b20-combined-ca-bundle\") pod \"placement-db-sync-fq8jg\" (UID: \"c48697d3-0295-45c7-89d7-7d2c524f3b20\") " pod="openstack-kuttl-tests/placement-db-sync-fq8jg" Jan 20 17:39:39 crc kubenswrapper[4558]: I0120 17:39:39.472761 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1ae5026e-37c6-453a-943c-9dfd8cebb2c2-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"1ae5026e-37c6-453a-943c-9dfd8cebb2c2\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:39:39 crc kubenswrapper[4558]: I0120 17:39:39.475463 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd4fc3e6-d57c-4c96-9109-8c8c3c833307-combined-ca-bundle\") pod \"cinder-db-sync-2sxrh\" (UID: \"fd4fc3e6-d57c-4c96-9109-8c8c3c833307\") " pod="openstack-kuttl-tests/cinder-db-sync-2sxrh" Jan 20 17:39:39 crc kubenswrapper[4558]: I0120 17:39:39.476761 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tzz8b\" (UniqueName: \"kubernetes.io/projected/1ae5026e-37c6-453a-943c-9dfd8cebb2c2-kube-api-access-tzz8b\") pod \"ceilometer-0\" (UID: \"1ae5026e-37c6-453a-943c-9dfd8cebb2c2\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:39:39 crc kubenswrapper[4558]: I0120 17:39:39.477032 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-59s64\" (UniqueName: \"kubernetes.io/projected/fd4fc3e6-d57c-4c96-9109-8c8c3c833307-kube-api-access-59s64\") pod \"cinder-db-sync-2sxrh\" (UID: \"fd4fc3e6-d57c-4c96-9109-8c8c3c833307\") " pod="openstack-kuttl-tests/cinder-db-sync-2sxrh" Jan 20 17:39:39 crc kubenswrapper[4558]: I0120 17:39:39.479511 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mbpf4\" (UniqueName: \"kubernetes.io/projected/c48697d3-0295-45c7-89d7-7d2c524f3b20-kube-api-access-mbpf4\") pod \"placement-db-sync-fq8jg\" (UID: \"c48697d3-0295-45c7-89d7-7d2c524f3b20\") " pod="openstack-kuttl-tests/placement-db-sync-fq8jg" Jan 20 17:39:39 crc kubenswrapper[4558]: I0120 17:39:39.483436 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:39:39 crc kubenswrapper[4558]: I0120 17:39:39.622292 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-db-sync-2sxrh" Jan 20 17:39:39 crc kubenswrapper[4558]: I0120 17:39:39.636566 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-db-sync-fq8jg" Jan 20 17:39:39 crc kubenswrapper[4558]: I0120 17:39:39.778861 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-bootstrap-rxwfk"] Jan 20 17:39:39 crc kubenswrapper[4558]: W0120 17:39:39.786777 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod427dd0db_2906_44d3_816e_a09f7cdd3a12.slice/crio-4c40242c83102c27d69a3eb2bcad687b523095b6c563321b929f90ae76355c4c WatchSource:0}: Error finding container 4c40242c83102c27d69a3eb2bcad687b523095b6c563321b929f90ae76355c4c: Status 404 returned error can't find the container with id 4c40242c83102c27d69a3eb2bcad687b523095b6c563321b929f90ae76355c4c Jan 20 17:39:39 crc kubenswrapper[4558]: W0120 17:39:39.898967 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1ae5026e_37c6_453a_943c_9dfd8cebb2c2.slice/crio-fbd933fbdd457e5313bd1e0b7c95fc1fd3cd0e4461e3949aec7216603544199f WatchSource:0}: Error finding container fbd933fbdd457e5313bd1e0b7c95fc1fd3cd0e4461e3949aec7216603544199f: Status 404 returned error can't find the container with id fbd933fbdd457e5313bd1e0b7c95fc1fd3cd0e4461e3949aec7216603544199f Jan 20 17:39:39 crc kubenswrapper[4558]: I0120 17:39:39.899556 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:39:40 crc kubenswrapper[4558]: I0120 17:39:40.060209 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-db-sync-2sxrh"] Jan 20 17:39:40 crc kubenswrapper[4558]: W0120 17:39:40.065025 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfd4fc3e6_d57c_4c96_9109_8c8c3c833307.slice/crio-f47bd09332bfe7495e8b13874999b60de3ec79cd649675900f4a6331bff4405d WatchSource:0}: Error finding container f47bd09332bfe7495e8b13874999b60de3ec79cd649675900f4a6331bff4405d: Status 404 returned error can't find the container with id f47bd09332bfe7495e8b13874999b60de3ec79cd649675900f4a6331bff4405d Jan 20 17:39:40 crc kubenswrapper[4558]: I0120 17:39:40.117766 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:39:40 crc kubenswrapper[4558]: I0120 17:39:40.119196 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:39:40 crc kubenswrapper[4558]: I0120 17:39:40.127574 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-glance-default-public-svc" Jan 20 17:39:40 crc kubenswrapper[4558]: I0120 17:39:40.127671 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-glance-dockercfg-gnzn6" Jan 20 17:39:40 crc kubenswrapper[4558]: I0120 17:39:40.127832 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-default-external-config-data" Jan 20 17:39:40 crc kubenswrapper[4558]: I0120 17:39:40.127915 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-scripts" Jan 20 17:39:40 crc kubenswrapper[4558]: I0120 17:39:40.133964 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:39:40 crc kubenswrapper[4558]: I0120 17:39:40.195570 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-db-sync-fq8jg"] Jan 20 17:39:40 crc kubenswrapper[4558]: I0120 17:39:40.223157 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:39:40 crc kubenswrapper[4558]: I0120 17:39:40.224851 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:39:40 crc kubenswrapper[4558]: I0120 17:39:40.227855 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-glance-default-internal-svc" Jan 20 17:39:40 crc kubenswrapper[4558]: I0120 17:39:40.231645 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-default-internal-config-data" Jan 20 17:39:40 crc kubenswrapper[4558]: I0120 17:39:40.246670 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:39:40 crc kubenswrapper[4558]: I0120 17:39:40.283359 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/844b234f-5c4d-4250-911d-cd06f864fbb1-logs\") pod \"glance-default-external-api-0\" (UID: \"844b234f-5c4d-4250-911d-cd06f864fbb1\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:39:40 crc kubenswrapper[4558]: I0120 17:39:40.283434 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/844b234f-5c4d-4250-911d-cd06f864fbb1-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"844b234f-5c4d-4250-911d-cd06f864fbb1\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:39:40 crc kubenswrapper[4558]: I0120 17:39:40.283468 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-stgbn\" (UniqueName: \"kubernetes.io/projected/844b234f-5c4d-4250-911d-cd06f864fbb1-kube-api-access-stgbn\") pod \"glance-default-external-api-0\" (UID: \"844b234f-5c4d-4250-911d-cd06f864fbb1\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:39:40 crc kubenswrapper[4558]: I0120 17:39:40.283518 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/844b234f-5c4d-4250-911d-cd06f864fbb1-config-data\") pod \"glance-default-external-api-0\" (UID: \"844b234f-5c4d-4250-911d-cd06f864fbb1\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:39:40 crc kubenswrapper[4558]: I0120 17:39:40.283547 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"844b234f-5c4d-4250-911d-cd06f864fbb1\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:39:40 crc kubenswrapper[4558]: I0120 17:39:40.283690 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/844b234f-5c4d-4250-911d-cd06f864fbb1-scripts\") pod \"glance-default-external-api-0\" (UID: \"844b234f-5c4d-4250-911d-cd06f864fbb1\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:39:40 crc kubenswrapper[4558]: I0120 17:39:40.283712 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/844b234f-5c4d-4250-911d-cd06f864fbb1-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"844b234f-5c4d-4250-911d-cd06f864fbb1\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:39:40 crc kubenswrapper[4558]: I0120 17:39:40.283763 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/844b234f-5c4d-4250-911d-cd06f864fbb1-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"844b234f-5c4d-4250-911d-cd06f864fbb1\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:39:40 crc kubenswrapper[4558]: I0120 17:39:40.405211 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/844b234f-5c4d-4250-911d-cd06f864fbb1-scripts\") pod \"glance-default-external-api-0\" (UID: \"844b234f-5c4d-4250-911d-cd06f864fbb1\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:39:40 crc kubenswrapper[4558]: I0120 17:39:40.405283 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/844b234f-5c4d-4250-911d-cd06f864fbb1-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"844b234f-5c4d-4250-911d-cd06f864fbb1\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:39:40 crc kubenswrapper[4558]: I0120 17:39:40.405342 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/844b234f-5c4d-4250-911d-cd06f864fbb1-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"844b234f-5c4d-4250-911d-cd06f864fbb1\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:39:40 crc kubenswrapper[4558]: I0120 17:39:40.405582 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a90f602d-27b9-4a25-ae05-2129b11e9076-scripts\") pod \"glance-default-internal-api-0\" (UID: \"a90f602d-27b9-4a25-ae05-2129b11e9076\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:39:40 crc kubenswrapper[4558]: I0120 17:39:40.405649 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/844b234f-5c4d-4250-911d-cd06f864fbb1-logs\") pod \"glance-default-external-api-0\" (UID: \"844b234f-5c4d-4250-911d-cd06f864fbb1\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:39:40 crc kubenswrapper[4558]: I0120 17:39:40.405712 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/844b234f-5c4d-4250-911d-cd06f864fbb1-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"844b234f-5c4d-4250-911d-cd06f864fbb1\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:39:40 crc kubenswrapper[4558]: I0120 17:39:40.405767 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-stgbn\" (UniqueName: \"kubernetes.io/projected/844b234f-5c4d-4250-911d-cd06f864fbb1-kube-api-access-stgbn\") pod \"glance-default-external-api-0\" (UID: \"844b234f-5c4d-4250-911d-cd06f864fbb1\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:39:40 crc kubenswrapper[4558]: I0120 17:39:40.405792 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a90f602d-27b9-4a25-ae05-2129b11e9076-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"a90f602d-27b9-4a25-ae05-2129b11e9076\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:39:40 crc kubenswrapper[4558]: I0120 17:39:40.405836 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/844b234f-5c4d-4250-911d-cd06f864fbb1-config-data\") pod \"glance-default-external-api-0\" (UID: \"844b234f-5c4d-4250-911d-cd06f864fbb1\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:39:40 crc kubenswrapper[4558]: I0120 17:39:40.405855 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a90f602d-27b9-4a25-ae05-2129b11e9076-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"a90f602d-27b9-4a25-ae05-2129b11e9076\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:39:40 crc kubenswrapper[4558]: I0120 17:39:40.405891 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a90f602d-27b9-4a25-ae05-2129b11e9076-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"a90f602d-27b9-4a25-ae05-2129b11e9076\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:39:40 crc kubenswrapper[4558]: I0120 17:39:40.405918 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l5nqk\" (UniqueName: \"kubernetes.io/projected/a90f602d-27b9-4a25-ae05-2129b11e9076-kube-api-access-l5nqk\") pod \"glance-default-internal-api-0\" (UID: \"a90f602d-27b9-4a25-ae05-2129b11e9076\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:39:40 crc kubenswrapper[4558]: I0120 17:39:40.405943 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a90f602d-27b9-4a25-ae05-2129b11e9076-config-data\") pod \"glance-default-internal-api-0\" (UID: \"a90f602d-27b9-4a25-ae05-2129b11e9076\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:39:40 crc kubenswrapper[4558]: I0120 17:39:40.405972 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"844b234f-5c4d-4250-911d-cd06f864fbb1\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:39:40 crc kubenswrapper[4558]: I0120 17:39:40.406031 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"a90f602d-27b9-4a25-ae05-2129b11e9076\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:39:40 crc kubenswrapper[4558]: I0120 17:39:40.406050 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a90f602d-27b9-4a25-ae05-2129b11e9076-logs\") pod \"glance-default-internal-api-0\" (UID: \"a90f602d-27b9-4a25-ae05-2129b11e9076\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:39:40 crc kubenswrapper[4558]: I0120 17:39:40.406862 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"844b234f-5c4d-4250-911d-cd06f864fbb1\") device mount path \"/mnt/openstack/pv09\"" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:39:40 crc kubenswrapper[4558]: I0120 17:39:40.413796 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/844b234f-5c4d-4250-911d-cd06f864fbb1-logs\") pod \"glance-default-external-api-0\" (UID: \"844b234f-5c4d-4250-911d-cd06f864fbb1\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:39:40 crc kubenswrapper[4558]: I0120 17:39:40.417700 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/844b234f-5c4d-4250-911d-cd06f864fbb1-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"844b234f-5c4d-4250-911d-cd06f864fbb1\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:39:40 crc kubenswrapper[4558]: I0120 17:39:40.419649 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/844b234f-5c4d-4250-911d-cd06f864fbb1-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"844b234f-5c4d-4250-911d-cd06f864fbb1\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:39:40 crc kubenswrapper[4558]: I0120 17:39:40.420652 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/844b234f-5c4d-4250-911d-cd06f864fbb1-scripts\") pod \"glance-default-external-api-0\" (UID: \"844b234f-5c4d-4250-911d-cd06f864fbb1\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:39:40 crc kubenswrapper[4558]: I0120 17:39:40.420809 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/844b234f-5c4d-4250-911d-cd06f864fbb1-config-data\") pod \"glance-default-external-api-0\" (UID: \"844b234f-5c4d-4250-911d-cd06f864fbb1\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:39:40 crc kubenswrapper[4558]: I0120 17:39:40.433614 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/844b234f-5c4d-4250-911d-cd06f864fbb1-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"844b234f-5c4d-4250-911d-cd06f864fbb1\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:39:40 crc kubenswrapper[4558]: I0120 17:39:40.439075 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-stgbn\" (UniqueName: \"kubernetes.io/projected/844b234f-5c4d-4250-911d-cd06f864fbb1-kube-api-access-stgbn\") pod \"glance-default-external-api-0\" (UID: \"844b234f-5c4d-4250-911d-cd06f864fbb1\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:39:40 crc kubenswrapper[4558]: I0120 17:39:40.455647 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"844b234f-5c4d-4250-911d-cd06f864fbb1\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:39:40 crc kubenswrapper[4558]: I0120 17:39:40.518273 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a90f602d-27b9-4a25-ae05-2129b11e9076-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"a90f602d-27b9-4a25-ae05-2129b11e9076\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:39:40 crc kubenswrapper[4558]: I0120 17:39:40.518357 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a90f602d-27b9-4a25-ae05-2129b11e9076-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"a90f602d-27b9-4a25-ae05-2129b11e9076\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:39:40 crc kubenswrapper[4558]: I0120 17:39:40.518397 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a90f602d-27b9-4a25-ae05-2129b11e9076-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"a90f602d-27b9-4a25-ae05-2129b11e9076\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:39:40 crc kubenswrapper[4558]: I0120 17:39:40.518424 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l5nqk\" (UniqueName: \"kubernetes.io/projected/a90f602d-27b9-4a25-ae05-2129b11e9076-kube-api-access-l5nqk\") pod \"glance-default-internal-api-0\" (UID: \"a90f602d-27b9-4a25-ae05-2129b11e9076\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:39:40 crc kubenswrapper[4558]: I0120 17:39:40.518447 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a90f602d-27b9-4a25-ae05-2129b11e9076-config-data\") pod \"glance-default-internal-api-0\" (UID: \"a90f602d-27b9-4a25-ae05-2129b11e9076\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:39:40 crc kubenswrapper[4558]: I0120 17:39:40.518513 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a90f602d-27b9-4a25-ae05-2129b11e9076-logs\") pod \"glance-default-internal-api-0\" (UID: \"a90f602d-27b9-4a25-ae05-2129b11e9076\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:39:40 crc kubenswrapper[4558]: I0120 17:39:40.518544 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"a90f602d-27b9-4a25-ae05-2129b11e9076\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:39:40 crc kubenswrapper[4558]: I0120 17:39:40.518748 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a90f602d-27b9-4a25-ae05-2129b11e9076-scripts\") pod \"glance-default-internal-api-0\" (UID: \"a90f602d-27b9-4a25-ae05-2129b11e9076\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:39:40 crc kubenswrapper[4558]: I0120 17:39:40.522670 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a90f602d-27b9-4a25-ae05-2129b11e9076-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"a90f602d-27b9-4a25-ae05-2129b11e9076\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:39:40 crc kubenswrapper[4558]: I0120 17:39:40.522960 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a90f602d-27b9-4a25-ae05-2129b11e9076-logs\") pod \"glance-default-internal-api-0\" (UID: \"a90f602d-27b9-4a25-ae05-2129b11e9076\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:39:40 crc kubenswrapper[4558]: I0120 17:39:40.523917 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"a90f602d-27b9-4a25-ae05-2129b11e9076\") device mount path \"/mnt/openstack/pv11\"" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:39:40 crc kubenswrapper[4558]: I0120 17:39:40.533086 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a90f602d-27b9-4a25-ae05-2129b11e9076-scripts\") pod \"glance-default-internal-api-0\" (UID: \"a90f602d-27b9-4a25-ae05-2129b11e9076\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:39:40 crc kubenswrapper[4558]: I0120 17:39:40.546208 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a90f602d-27b9-4a25-ae05-2129b11e9076-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"a90f602d-27b9-4a25-ae05-2129b11e9076\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:39:40 crc kubenswrapper[4558]: I0120 17:39:40.550292 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a90f602d-27b9-4a25-ae05-2129b11e9076-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"a90f602d-27b9-4a25-ae05-2129b11e9076\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:39:40 crc kubenswrapper[4558]: I0120 17:39:40.577375 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l5nqk\" (UniqueName: \"kubernetes.io/projected/a90f602d-27b9-4a25-ae05-2129b11e9076-kube-api-access-l5nqk\") pod \"glance-default-internal-api-0\" (UID: \"a90f602d-27b9-4a25-ae05-2129b11e9076\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:39:40 crc kubenswrapper[4558]: I0120 17:39:40.578417 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a90f602d-27b9-4a25-ae05-2129b11e9076-config-data\") pod \"glance-default-internal-api-0\" (UID: \"a90f602d-27b9-4a25-ae05-2129b11e9076\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:39:40 crc kubenswrapper[4558]: I0120 17:39:40.580938 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"a90f602d-27b9-4a25-ae05-2129b11e9076\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:39:40 crc kubenswrapper[4558]: I0120 17:39:40.639353 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-bootstrap-rxwfk" event={"ID":"427dd0db-2906-44d3-816e-a09f7cdd3a12","Type":"ContainerStarted","Data":"f19974f989568d1e6a6c2323bcacffe749a6aded654469aa967eea0f4c6fc9ba"} Jan 20 17:39:40 crc kubenswrapper[4558]: I0120 17:39:40.639601 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-bootstrap-rxwfk" event={"ID":"427dd0db-2906-44d3-816e-a09f7cdd3a12","Type":"ContainerStarted","Data":"4c40242c83102c27d69a3eb2bcad687b523095b6c563321b929f90ae76355c4c"} Jan 20 17:39:40 crc kubenswrapper[4558]: I0120 17:39:40.645194 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-db-sync-fq8jg" event={"ID":"c48697d3-0295-45c7-89d7-7d2c524f3b20","Type":"ContainerStarted","Data":"b939e3792d1ca4dac4c7681e6b9728b9b8e64bdf4b0beba07b105d7abb383dd6"} Jan 20 17:39:40 crc kubenswrapper[4558]: I0120 17:39:40.645257 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-db-sync-fq8jg" event={"ID":"c48697d3-0295-45c7-89d7-7d2c524f3b20","Type":"ContainerStarted","Data":"7718980a8e20290bb552139c1fa8bfcfa2247031699c68d3efc50e302eb4c54f"} Jan 20 17:39:40 crc kubenswrapper[4558]: I0120 17:39:40.647149 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"1ae5026e-37c6-453a-943c-9dfd8cebb2c2","Type":"ContainerStarted","Data":"fbd933fbdd457e5313bd1e0b7c95fc1fd3cd0e4461e3949aec7216603544199f"} Jan 20 17:39:40 crc kubenswrapper[4558]: I0120 17:39:40.647931 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-db-sync-2sxrh" event={"ID":"fd4fc3e6-d57c-4c96-9109-8c8c3c833307","Type":"ContainerStarted","Data":"f47bd09332bfe7495e8b13874999b60de3ec79cd649675900f4a6331bff4405d"} Jan 20 17:39:40 crc kubenswrapper[4558]: I0120 17:39:40.667712 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/keystone-bootstrap-rxwfk" podStartSLOduration=1.667697787 podStartE2EDuration="1.667697787s" podCreationTimestamp="2026-01-20 17:39:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:39:40.656130508 +0000 UTC m=+3474.416468475" watchObservedRunningTime="2026-01-20 17:39:40.667697787 +0000 UTC m=+3474.428035753" Jan 20 17:39:40 crc kubenswrapper[4558]: I0120 17:39:40.684501 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/placement-db-sync-fq8jg" podStartSLOduration=1.684482446 podStartE2EDuration="1.684482446s" podCreationTimestamp="2026-01-20 17:39:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:39:40.67753892 +0000 UTC m=+3474.437876878" watchObservedRunningTime="2026-01-20 17:39:40.684482446 +0000 UTC m=+3474.444820413" Jan 20 17:39:40 crc kubenswrapper[4558]: I0120 17:39:40.745330 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:39:40 crc kubenswrapper[4558]: I0120 17:39:40.857030 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:39:41 crc kubenswrapper[4558]: I0120 17:39:41.196901 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:39:41 crc kubenswrapper[4558]: W0120 17:39:41.200705 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod844b234f_5c4d_4250_911d_cd06f864fbb1.slice/crio-216748cd96d2962f732271bd1b02ae2aa122daca7bf4ff4c970ebccb50c4e9b7 WatchSource:0}: Error finding container 216748cd96d2962f732271bd1b02ae2aa122daca7bf4ff4c970ebccb50c4e9b7: Status 404 returned error can't find the container with id 216748cd96d2962f732271bd1b02ae2aa122daca7bf4ff4c970ebccb50c4e9b7 Jan 20 17:39:41 crc kubenswrapper[4558]: I0120 17:39:41.332197 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:39:41 crc kubenswrapper[4558]: W0120 17:39:41.336956 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda90f602d_27b9_4a25_ae05_2129b11e9076.slice/crio-a74dfb98456b10478ee0480ef4b32a5583f033aff61860ffab7f0892d6b8d45b WatchSource:0}: Error finding container a74dfb98456b10478ee0480ef4b32a5583f033aff61860ffab7f0892d6b8d45b: Status 404 returned error can't find the container with id a74dfb98456b10478ee0480ef4b32a5583f033aff61860ffab7f0892d6b8d45b Jan 20 17:39:41 crc kubenswrapper[4558]: I0120 17:39:41.425207 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:39:41 crc kubenswrapper[4558]: I0120 17:39:41.499402 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:39:41 crc kubenswrapper[4558]: I0120 17:39:41.689933 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"a90f602d-27b9-4a25-ae05-2129b11e9076","Type":"ContainerStarted","Data":"a74dfb98456b10478ee0480ef4b32a5583f033aff61860ffab7f0892d6b8d45b"} Jan 20 17:39:41 crc kubenswrapper[4558]: I0120 17:39:41.699542 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"1ae5026e-37c6-453a-943c-9dfd8cebb2c2","Type":"ContainerStarted","Data":"127398378ff58fceaa81c387bc824a1d5d37e0bf66fbdf7b76a1730850e6f86f"} Jan 20 17:39:41 crc kubenswrapper[4558]: I0120 17:39:41.699576 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"1ae5026e-37c6-453a-943c-9dfd8cebb2c2","Type":"ContainerStarted","Data":"5c6cab5622103a6a77631945133b52c00af4530b77736c2bdab0301ddea2e07b"} Jan 20 17:39:41 crc kubenswrapper[4558]: I0120 17:39:41.702331 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-db-sync-2sxrh" event={"ID":"fd4fc3e6-d57c-4c96-9109-8c8c3c833307","Type":"ContainerStarted","Data":"9ae2d86b2d1cd3b71d7cfe893aa40d27c0a5f18e5bb59818007aa52b54a73e7d"} Jan 20 17:39:41 crc kubenswrapper[4558]: I0120 17:39:41.708634 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"844b234f-5c4d-4250-911d-cd06f864fbb1","Type":"ContainerStarted","Data":"216748cd96d2962f732271bd1b02ae2aa122daca7bf4ff4c970ebccb50c4e9b7"} Jan 20 17:39:41 crc kubenswrapper[4558]: I0120 17:39:41.720710 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/cinder-db-sync-2sxrh" podStartSLOduration=2.7206960049999998 podStartE2EDuration="2.720696005s" podCreationTimestamp="2026-01-20 17:39:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:39:41.716067544 +0000 UTC m=+3475.476405500" watchObservedRunningTime="2026-01-20 17:39:41.720696005 +0000 UTC m=+3475.481033973" Jan 20 17:39:41 crc kubenswrapper[4558]: I0120 17:39:41.870037 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:39:42 crc kubenswrapper[4558]: I0120 17:39:42.746621 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"1ae5026e-37c6-453a-943c-9dfd8cebb2c2","Type":"ContainerStarted","Data":"4179fc735effbae6159a69c9ca68266281ba202cd4f205078b6baf7d7940eb2a"} Jan 20 17:39:42 crc kubenswrapper[4558]: I0120 17:39:42.754639 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"a90f602d-27b9-4a25-ae05-2129b11e9076","Type":"ContainerStarted","Data":"2c1d55af2186b92b9931bdbe12ae3a13127399ea1da682b81eb9b878b31b7f45"} Jan 20 17:39:42 crc kubenswrapper[4558]: I0120 17:39:42.754799 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-internal-api-0" podUID="a90f602d-27b9-4a25-ae05-2129b11e9076" containerName="glance-log" containerID="cri-o://60bc626a37db39f0a437bd21b829d927e3de167310335ce3e082e6a25ee18a05" gracePeriod=30 Jan 20 17:39:42 crc kubenswrapper[4558]: I0120 17:39:42.754821 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"a90f602d-27b9-4a25-ae05-2129b11e9076","Type":"ContainerStarted","Data":"60bc626a37db39f0a437bd21b829d927e3de167310335ce3e082e6a25ee18a05"} Jan 20 17:39:42 crc kubenswrapper[4558]: I0120 17:39:42.755008 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-internal-api-0" podUID="a90f602d-27b9-4a25-ae05-2129b11e9076" containerName="glance-httpd" containerID="cri-o://2c1d55af2186b92b9931bdbe12ae3a13127399ea1da682b81eb9b878b31b7f45" gracePeriod=30 Jan 20 17:39:42 crc kubenswrapper[4558]: I0120 17:39:42.761089 4558 generic.go:334] "Generic (PLEG): container finished" podID="c48697d3-0295-45c7-89d7-7d2c524f3b20" containerID="b939e3792d1ca4dac4c7681e6b9728b9b8e64bdf4b0beba07b105d7abb383dd6" exitCode=0 Jan 20 17:39:42 crc kubenswrapper[4558]: I0120 17:39:42.761173 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-db-sync-fq8jg" event={"ID":"c48697d3-0295-45c7-89d7-7d2c524f3b20","Type":"ContainerDied","Data":"b939e3792d1ca4dac4c7681e6b9728b9b8e64bdf4b0beba07b105d7abb383dd6"} Jan 20 17:39:42 crc kubenswrapper[4558]: I0120 17:39:42.777753 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/glance-default-internal-api-0" podStartSLOduration=3.777733018 podStartE2EDuration="3.777733018s" podCreationTimestamp="2026-01-20 17:39:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:39:42.771646484 +0000 UTC m=+3476.531984452" watchObservedRunningTime="2026-01-20 17:39:42.777733018 +0000 UTC m=+3476.538070985" Jan 20 17:39:42 crc kubenswrapper[4558]: I0120 17:39:42.780686 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"844b234f-5c4d-4250-911d-cd06f864fbb1","Type":"ContainerStarted","Data":"b315462cca6ea3017331a9a3b6006923169f3c785b7dc06a60a420f2b2a172ea"} Jan 20 17:39:42 crc kubenswrapper[4558]: I0120 17:39:42.780731 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"844b234f-5c4d-4250-911d-cd06f864fbb1","Type":"ContainerStarted","Data":"c917a83db8d033360cc85bd4df1907090be09ad74c6daace74860b79ca18dd60"} Jan 20 17:39:42 crc kubenswrapper[4558]: I0120 17:39:42.780933 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-external-api-0" podUID="844b234f-5c4d-4250-911d-cd06f864fbb1" containerName="glance-log" containerID="cri-o://c917a83db8d033360cc85bd4df1907090be09ad74c6daace74860b79ca18dd60" gracePeriod=30 Jan 20 17:39:42 crc kubenswrapper[4558]: I0120 17:39:42.781067 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-external-api-0" podUID="844b234f-5c4d-4250-911d-cd06f864fbb1" containerName="glance-httpd" containerID="cri-o://b315462cca6ea3017331a9a3b6006923169f3c785b7dc06a60a420f2b2a172ea" gracePeriod=30 Jan 20 17:39:42 crc kubenswrapper[4558]: I0120 17:39:42.824982 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/glance-default-external-api-0" podStartSLOduration=3.824965556 podStartE2EDuration="3.824965556s" podCreationTimestamp="2026-01-20 17:39:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:39:42.821540427 +0000 UTC m=+3476.581878394" watchObservedRunningTime="2026-01-20 17:39:42.824965556 +0000 UTC m=+3476.585303523" Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.067888 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/barbican-db-sync-ndrzj"] Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.073043 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-db-sync-ndrzj" Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.077767 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"barbican-barbican-dockercfg-585xp" Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.078102 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"barbican-config-data" Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.136971 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-db-sync-ndrzj"] Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.170378 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pl7ht\" (UniqueName: \"kubernetes.io/projected/86a50da2-ef44-4072-b680-d56cc4ee67c5-kube-api-access-pl7ht\") pod \"barbican-db-sync-ndrzj\" (UID: \"86a50da2-ef44-4072-b680-d56cc4ee67c5\") " pod="openstack-kuttl-tests/barbican-db-sync-ndrzj" Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.170589 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/86a50da2-ef44-4072-b680-d56cc4ee67c5-combined-ca-bundle\") pod \"barbican-db-sync-ndrzj\" (UID: \"86a50da2-ef44-4072-b680-d56cc4ee67c5\") " pod="openstack-kuttl-tests/barbican-db-sync-ndrzj" Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.170701 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/86a50da2-ef44-4072-b680-d56cc4ee67c5-db-sync-config-data\") pod \"barbican-db-sync-ndrzj\" (UID: \"86a50da2-ef44-4072-b680-d56cc4ee67c5\") " pod="openstack-kuttl-tests/barbican-db-sync-ndrzj" Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.250852 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.272901 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pl7ht\" (UniqueName: \"kubernetes.io/projected/86a50da2-ef44-4072-b680-d56cc4ee67c5-kube-api-access-pl7ht\") pod \"barbican-db-sync-ndrzj\" (UID: \"86a50da2-ef44-4072-b680-d56cc4ee67c5\") " pod="openstack-kuttl-tests/barbican-db-sync-ndrzj" Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.273028 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/86a50da2-ef44-4072-b680-d56cc4ee67c5-combined-ca-bundle\") pod \"barbican-db-sync-ndrzj\" (UID: \"86a50da2-ef44-4072-b680-d56cc4ee67c5\") " pod="openstack-kuttl-tests/barbican-db-sync-ndrzj" Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.273132 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/86a50da2-ef44-4072-b680-d56cc4ee67c5-db-sync-config-data\") pod \"barbican-db-sync-ndrzj\" (UID: \"86a50da2-ef44-4072-b680-d56cc4ee67c5\") " pod="openstack-kuttl-tests/barbican-db-sync-ndrzj" Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.279254 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/86a50da2-ef44-4072-b680-d56cc4ee67c5-db-sync-config-data\") pod \"barbican-db-sync-ndrzj\" (UID: \"86a50da2-ef44-4072-b680-d56cc4ee67c5\") " pod="openstack-kuttl-tests/barbican-db-sync-ndrzj" Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.279993 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/neutron-db-sync-fv6q5"] Jan 20 17:39:43 crc kubenswrapper[4558]: E0120 17:39:43.280439 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a90f602d-27b9-4a25-ae05-2129b11e9076" containerName="glance-httpd" Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.280457 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="a90f602d-27b9-4a25-ae05-2129b11e9076" containerName="glance-httpd" Jan 20 17:39:43 crc kubenswrapper[4558]: E0120 17:39:43.280473 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a90f602d-27b9-4a25-ae05-2129b11e9076" containerName="glance-log" Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.280479 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="a90f602d-27b9-4a25-ae05-2129b11e9076" containerName="glance-log" Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.280628 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="a90f602d-27b9-4a25-ae05-2129b11e9076" containerName="glance-log" Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.280650 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="a90f602d-27b9-4a25-ae05-2129b11e9076" containerName="glance-httpd" Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.281382 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-db-sync-fv6q5" Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.284492 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"neutron-httpd-config" Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.284728 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"neutron-neutron-dockercfg-dz7n9" Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.285379 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"neutron-config" Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.290841 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pl7ht\" (UniqueName: \"kubernetes.io/projected/86a50da2-ef44-4072-b680-d56cc4ee67c5-kube-api-access-pl7ht\") pod \"barbican-db-sync-ndrzj\" (UID: \"86a50da2-ef44-4072-b680-d56cc4ee67c5\") " pod="openstack-kuttl-tests/barbican-db-sync-ndrzj" Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.291014 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/86a50da2-ef44-4072-b680-d56cc4ee67c5-combined-ca-bundle\") pod \"barbican-db-sync-ndrzj\" (UID: \"86a50da2-ef44-4072-b680-d56cc4ee67c5\") " pod="openstack-kuttl-tests/barbican-db-sync-ndrzj" Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.298559 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-db-sync-fv6q5"] Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.317703 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.373991 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a90f602d-27b9-4a25-ae05-2129b11e9076-scripts\") pod \"a90f602d-27b9-4a25-ae05-2129b11e9076\" (UID: \"a90f602d-27b9-4a25-ae05-2129b11e9076\") " Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.374112 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l5nqk\" (UniqueName: \"kubernetes.io/projected/a90f602d-27b9-4a25-ae05-2129b11e9076-kube-api-access-l5nqk\") pod \"a90f602d-27b9-4a25-ae05-2129b11e9076\" (UID: \"a90f602d-27b9-4a25-ae05-2129b11e9076\") " Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.374177 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a90f602d-27b9-4a25-ae05-2129b11e9076-logs\") pod \"a90f602d-27b9-4a25-ae05-2129b11e9076\" (UID: \"a90f602d-27b9-4a25-ae05-2129b11e9076\") " Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.374248 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a90f602d-27b9-4a25-ae05-2129b11e9076-httpd-run\") pod \"a90f602d-27b9-4a25-ae05-2129b11e9076\" (UID: \"a90f602d-27b9-4a25-ae05-2129b11e9076\") " Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.374272 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a90f602d-27b9-4a25-ae05-2129b11e9076-combined-ca-bundle\") pod \"a90f602d-27b9-4a25-ae05-2129b11e9076\" (UID: \"a90f602d-27b9-4a25-ae05-2129b11e9076\") " Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.374316 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a90f602d-27b9-4a25-ae05-2129b11e9076-config-data\") pod \"a90f602d-27b9-4a25-ae05-2129b11e9076\" (UID: \"a90f602d-27b9-4a25-ae05-2129b11e9076\") " Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.374396 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"a90f602d-27b9-4a25-ae05-2129b11e9076\" (UID: \"a90f602d-27b9-4a25-ae05-2129b11e9076\") " Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.374493 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a90f602d-27b9-4a25-ae05-2129b11e9076-internal-tls-certs\") pod \"a90f602d-27b9-4a25-ae05-2129b11e9076\" (UID: \"a90f602d-27b9-4a25-ae05-2129b11e9076\") " Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.374770 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m48bs\" (UniqueName: \"kubernetes.io/projected/ae3119fb-1355-4da8-a005-3610a1733b90-kube-api-access-m48bs\") pod \"neutron-db-sync-fv6q5\" (UID: \"ae3119fb-1355-4da8-a005-3610a1733b90\") " pod="openstack-kuttl-tests/neutron-db-sync-fv6q5" Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.374863 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae3119fb-1355-4da8-a005-3610a1733b90-combined-ca-bundle\") pod \"neutron-db-sync-fv6q5\" (UID: \"ae3119fb-1355-4da8-a005-3610a1733b90\") " pod="openstack-kuttl-tests/neutron-db-sync-fv6q5" Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.375052 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/ae3119fb-1355-4da8-a005-3610a1733b90-config\") pod \"neutron-db-sync-fv6q5\" (UID: \"ae3119fb-1355-4da8-a005-3610a1733b90\") " pod="openstack-kuttl-tests/neutron-db-sync-fv6q5" Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.377038 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a90f602d-27b9-4a25-ae05-2129b11e9076-logs" (OuterVolumeSpecName: "logs") pod "a90f602d-27b9-4a25-ae05-2129b11e9076" (UID: "a90f602d-27b9-4a25-ae05-2129b11e9076"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.377086 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a90f602d-27b9-4a25-ae05-2129b11e9076-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "a90f602d-27b9-4a25-ae05-2129b11e9076" (UID: "a90f602d-27b9-4a25-ae05-2129b11e9076"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.379475 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a90f602d-27b9-4a25-ae05-2129b11e9076-scripts" (OuterVolumeSpecName: "scripts") pod "a90f602d-27b9-4a25-ae05-2129b11e9076" (UID: "a90f602d-27b9-4a25-ae05-2129b11e9076"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.379482 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a90f602d-27b9-4a25-ae05-2129b11e9076-kube-api-access-l5nqk" (OuterVolumeSpecName: "kube-api-access-l5nqk") pod "a90f602d-27b9-4a25-ae05-2129b11e9076" (UID: "a90f602d-27b9-4a25-ae05-2129b11e9076"). InnerVolumeSpecName "kube-api-access-l5nqk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.379976 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage11-crc" (OuterVolumeSpecName: "glance") pod "a90f602d-27b9-4a25-ae05-2129b11e9076" (UID: "a90f602d-27b9-4a25-ae05-2129b11e9076"). InnerVolumeSpecName "local-storage11-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.392533 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-db-sync-ndrzj" Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.398574 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a90f602d-27b9-4a25-ae05-2129b11e9076-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a90f602d-27b9-4a25-ae05-2129b11e9076" (UID: "a90f602d-27b9-4a25-ae05-2129b11e9076"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.410974 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a90f602d-27b9-4a25-ae05-2129b11e9076-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "a90f602d-27b9-4a25-ae05-2129b11e9076" (UID: "a90f602d-27b9-4a25-ae05-2129b11e9076"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.426239 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a90f602d-27b9-4a25-ae05-2129b11e9076-config-data" (OuterVolumeSpecName: "config-data") pod "a90f602d-27b9-4a25-ae05-2129b11e9076" (UID: "a90f602d-27b9-4a25-ae05-2129b11e9076"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.476090 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/844b234f-5c4d-4250-911d-cd06f864fbb1-httpd-run\") pod \"844b234f-5c4d-4250-911d-cd06f864fbb1\" (UID: \"844b234f-5c4d-4250-911d-cd06f864fbb1\") " Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.476186 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/844b234f-5c4d-4250-911d-cd06f864fbb1-combined-ca-bundle\") pod \"844b234f-5c4d-4250-911d-cd06f864fbb1\" (UID: \"844b234f-5c4d-4250-911d-cd06f864fbb1\") " Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.476226 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"844b234f-5c4d-4250-911d-cd06f864fbb1\" (UID: \"844b234f-5c4d-4250-911d-cd06f864fbb1\") " Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.476248 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/844b234f-5c4d-4250-911d-cd06f864fbb1-public-tls-certs\") pod \"844b234f-5c4d-4250-911d-cd06f864fbb1\" (UID: \"844b234f-5c4d-4250-911d-cd06f864fbb1\") " Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.476583 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-stgbn\" (UniqueName: \"kubernetes.io/projected/844b234f-5c4d-4250-911d-cd06f864fbb1-kube-api-access-stgbn\") pod \"844b234f-5c4d-4250-911d-cd06f864fbb1\" (UID: \"844b234f-5c4d-4250-911d-cd06f864fbb1\") " Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.476690 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/844b234f-5c4d-4250-911d-cd06f864fbb1-scripts\") pod \"844b234f-5c4d-4250-911d-cd06f864fbb1\" (UID: \"844b234f-5c4d-4250-911d-cd06f864fbb1\") " Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.476807 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/844b234f-5c4d-4250-911d-cd06f864fbb1-logs\") pod \"844b234f-5c4d-4250-911d-cd06f864fbb1\" (UID: \"844b234f-5c4d-4250-911d-cd06f864fbb1\") " Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.476843 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/844b234f-5c4d-4250-911d-cd06f864fbb1-config-data\") pod \"844b234f-5c4d-4250-911d-cd06f864fbb1\" (UID: \"844b234f-5c4d-4250-911d-cd06f864fbb1\") " Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.477714 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m48bs\" (UniqueName: \"kubernetes.io/projected/ae3119fb-1355-4da8-a005-3610a1733b90-kube-api-access-m48bs\") pod \"neutron-db-sync-fv6q5\" (UID: \"ae3119fb-1355-4da8-a005-3610a1733b90\") " pod="openstack-kuttl-tests/neutron-db-sync-fv6q5" Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.477840 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae3119fb-1355-4da8-a005-3610a1733b90-combined-ca-bundle\") pod \"neutron-db-sync-fv6q5\" (UID: \"ae3119fb-1355-4da8-a005-3610a1733b90\") " pod="openstack-kuttl-tests/neutron-db-sync-fv6q5" Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.478045 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/ae3119fb-1355-4da8-a005-3610a1733b90-config\") pod \"neutron-db-sync-fv6q5\" (UID: \"ae3119fb-1355-4da8-a005-3610a1733b90\") " pod="openstack-kuttl-tests/neutron-db-sync-fv6q5" Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.478174 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" " Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.478190 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a90f602d-27b9-4a25-ae05-2129b11e9076-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.478203 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a90f602d-27b9-4a25-ae05-2129b11e9076-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.478213 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l5nqk\" (UniqueName: \"kubernetes.io/projected/a90f602d-27b9-4a25-ae05-2129b11e9076-kube-api-access-l5nqk\") on node \"crc\" DevicePath \"\"" Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.478222 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a90f602d-27b9-4a25-ae05-2129b11e9076-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.478233 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a90f602d-27b9-4a25-ae05-2129b11e9076-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.478244 4558 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a90f602d-27b9-4a25-ae05-2129b11e9076-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.478254 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a90f602d-27b9-4a25-ae05-2129b11e9076-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.483599 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/844b234f-5c4d-4250-911d-cd06f864fbb1-kube-api-access-stgbn" (OuterVolumeSpecName: "kube-api-access-stgbn") pod "844b234f-5c4d-4250-911d-cd06f864fbb1" (UID: "844b234f-5c4d-4250-911d-cd06f864fbb1"). InnerVolumeSpecName "kube-api-access-stgbn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.484252 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage09-crc" (OuterVolumeSpecName: "glance") pod "844b234f-5c4d-4250-911d-cd06f864fbb1" (UID: "844b234f-5c4d-4250-911d-cd06f864fbb1"). InnerVolumeSpecName "local-storage09-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.484269 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/844b234f-5c4d-4250-911d-cd06f864fbb1-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "844b234f-5c4d-4250-911d-cd06f864fbb1" (UID: "844b234f-5c4d-4250-911d-cd06f864fbb1"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.485337 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/844b234f-5c4d-4250-911d-cd06f864fbb1-logs" (OuterVolumeSpecName: "logs") pod "844b234f-5c4d-4250-911d-cd06f864fbb1" (UID: "844b234f-5c4d-4250-911d-cd06f864fbb1"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.488282 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/844b234f-5c4d-4250-911d-cd06f864fbb1-scripts" (OuterVolumeSpecName: "scripts") pod "844b234f-5c4d-4250-911d-cd06f864fbb1" (UID: "844b234f-5c4d-4250-911d-cd06f864fbb1"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.491398 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/ae3119fb-1355-4da8-a005-3610a1733b90-config\") pod \"neutron-db-sync-fv6q5\" (UID: \"ae3119fb-1355-4da8-a005-3610a1733b90\") " pod="openstack-kuttl-tests/neutron-db-sync-fv6q5" Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.496081 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage11-crc" (UniqueName: "kubernetes.io/local-volume/local-storage11-crc") on node "crc" Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.504007 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae3119fb-1355-4da8-a005-3610a1733b90-combined-ca-bundle\") pod \"neutron-db-sync-fv6q5\" (UID: \"ae3119fb-1355-4da8-a005-3610a1733b90\") " pod="openstack-kuttl-tests/neutron-db-sync-fv6q5" Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.508787 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m48bs\" (UniqueName: \"kubernetes.io/projected/ae3119fb-1355-4da8-a005-3610a1733b90-kube-api-access-m48bs\") pod \"neutron-db-sync-fv6q5\" (UID: \"ae3119fb-1355-4da8-a005-3610a1733b90\") " pod="openstack-kuttl-tests/neutron-db-sync-fv6q5" Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.512239 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/844b234f-5c4d-4250-911d-cd06f864fbb1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "844b234f-5c4d-4250-911d-cd06f864fbb1" (UID: "844b234f-5c4d-4250-911d-cd06f864fbb1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.541845 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/844b234f-5c4d-4250-911d-cd06f864fbb1-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "844b234f-5c4d-4250-911d-cd06f864fbb1" (UID: "844b234f-5c4d-4250-911d-cd06f864fbb1"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.547022 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/844b234f-5c4d-4250-911d-cd06f864fbb1-config-data" (OuterVolumeSpecName: "config-data") pod "844b234f-5c4d-4250-911d-cd06f864fbb1" (UID: "844b234f-5c4d-4250-911d-cd06f864fbb1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.582463 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/844b234f-5c4d-4250-911d-cd06f864fbb1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.582498 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" " Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.582509 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/844b234f-5c4d-4250-911d-cd06f864fbb1-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.582520 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-stgbn\" (UniqueName: \"kubernetes.io/projected/844b234f-5c4d-4250-911d-cd06f864fbb1-kube-api-access-stgbn\") on node \"crc\" DevicePath \"\"" Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.582532 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/844b234f-5c4d-4250-911d-cd06f864fbb1-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.582541 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/844b234f-5c4d-4250-911d-cd06f864fbb1-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.582550 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/844b234f-5c4d-4250-911d-cd06f864fbb1-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.582559 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.582567 4558 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/844b234f-5c4d-4250-911d-cd06f864fbb1-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.596652 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage09-crc" (UniqueName: "kubernetes.io/local-volume/local-storage09-crc") on node "crc" Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.598924 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-db-sync-fv6q5" Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.686953 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.795692 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-db-sync-ndrzj"] Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.807772 4558 generic.go:334] "Generic (PLEG): container finished" podID="844b234f-5c4d-4250-911d-cd06f864fbb1" containerID="b315462cca6ea3017331a9a3b6006923169f3c785b7dc06a60a420f2b2a172ea" exitCode=0 Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.807805 4558 generic.go:334] "Generic (PLEG): container finished" podID="844b234f-5c4d-4250-911d-cd06f864fbb1" containerID="c917a83db8d033360cc85bd4df1907090be09ad74c6daace74860b79ca18dd60" exitCode=143 Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.807814 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"844b234f-5c4d-4250-911d-cd06f864fbb1","Type":"ContainerDied","Data":"b315462cca6ea3017331a9a3b6006923169f3c785b7dc06a60a420f2b2a172ea"} Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.807860 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"844b234f-5c4d-4250-911d-cd06f864fbb1","Type":"ContainerDied","Data":"c917a83db8d033360cc85bd4df1907090be09ad74c6daace74860b79ca18dd60"} Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.807877 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"844b234f-5c4d-4250-911d-cd06f864fbb1","Type":"ContainerDied","Data":"216748cd96d2962f732271bd1b02ae2aa122daca7bf4ff4c970ebccb50c4e9b7"} Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.807887 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.807903 4558 scope.go:117] "RemoveContainer" containerID="b315462cca6ea3017331a9a3b6006923169f3c785b7dc06a60a420f2b2a172ea" Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.810778 4558 generic.go:334] "Generic (PLEG): container finished" podID="a90f602d-27b9-4a25-ae05-2129b11e9076" containerID="2c1d55af2186b92b9931bdbe12ae3a13127399ea1da682b81eb9b878b31b7f45" exitCode=143 Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.810806 4558 generic.go:334] "Generic (PLEG): container finished" podID="a90f602d-27b9-4a25-ae05-2129b11e9076" containerID="60bc626a37db39f0a437bd21b829d927e3de167310335ce3e082e6a25ee18a05" exitCode=143 Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.810931 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.811328 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"a90f602d-27b9-4a25-ae05-2129b11e9076","Type":"ContainerDied","Data":"2c1d55af2186b92b9931bdbe12ae3a13127399ea1da682b81eb9b878b31b7f45"} Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.811380 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"a90f602d-27b9-4a25-ae05-2129b11e9076","Type":"ContainerDied","Data":"60bc626a37db39f0a437bd21b829d927e3de167310335ce3e082e6a25ee18a05"} Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.811391 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"a90f602d-27b9-4a25-ae05-2129b11e9076","Type":"ContainerDied","Data":"a74dfb98456b10478ee0480ef4b32a5583f033aff61860ffab7f0892d6b8d45b"} Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.816109 4558 generic.go:334] "Generic (PLEG): container finished" podID="fd4fc3e6-d57c-4c96-9109-8c8c3c833307" containerID="9ae2d86b2d1cd3b71d7cfe893aa40d27c0a5f18e5bb59818007aa52b54a73e7d" exitCode=0 Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.816193 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-db-sync-2sxrh" event={"ID":"fd4fc3e6-d57c-4c96-9109-8c8c3c833307","Type":"ContainerDied","Data":"9ae2d86b2d1cd3b71d7cfe893aa40d27c0a5f18e5bb59818007aa52b54a73e7d"} Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.829553 4558 scope.go:117] "RemoveContainer" containerID="c917a83db8d033360cc85bd4df1907090be09ad74c6daace74860b79ca18dd60" Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.829711 4558 generic.go:334] "Generic (PLEG): container finished" podID="427dd0db-2906-44d3-816e-a09f7cdd3a12" containerID="f19974f989568d1e6a6c2323bcacffe749a6aded654469aa967eea0f4c6fc9ba" exitCode=0 Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.829914 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-bootstrap-rxwfk" event={"ID":"427dd0db-2906-44d3-816e-a09f7cdd3a12","Type":"ContainerDied","Data":"f19974f989568d1e6a6c2323bcacffe749a6aded654469aa967eea0f4c6fc9ba"} Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.883950 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.885206 4558 scope.go:117] "RemoveContainer" containerID="b315462cca6ea3017331a9a3b6006923169f3c785b7dc06a60a420f2b2a172ea" Jan 20 17:39:43 crc kubenswrapper[4558]: E0120 17:39:43.895546 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b315462cca6ea3017331a9a3b6006923169f3c785b7dc06a60a420f2b2a172ea\": container with ID starting with b315462cca6ea3017331a9a3b6006923169f3c785b7dc06a60a420f2b2a172ea not found: ID does not exist" containerID="b315462cca6ea3017331a9a3b6006923169f3c785b7dc06a60a420f2b2a172ea" Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.896936 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b315462cca6ea3017331a9a3b6006923169f3c785b7dc06a60a420f2b2a172ea"} err="failed to get container status \"b315462cca6ea3017331a9a3b6006923169f3c785b7dc06a60a420f2b2a172ea\": rpc error: code = NotFound desc = could not find container \"b315462cca6ea3017331a9a3b6006923169f3c785b7dc06a60a420f2b2a172ea\": container with ID starting with b315462cca6ea3017331a9a3b6006923169f3c785b7dc06a60a420f2b2a172ea not found: ID does not exist" Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.896967 4558 scope.go:117] "RemoveContainer" containerID="c917a83db8d033360cc85bd4df1907090be09ad74c6daace74860b79ca18dd60" Jan 20 17:39:43 crc kubenswrapper[4558]: E0120 17:39:43.900899 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c917a83db8d033360cc85bd4df1907090be09ad74c6daace74860b79ca18dd60\": container with ID starting with c917a83db8d033360cc85bd4df1907090be09ad74c6daace74860b79ca18dd60 not found: ID does not exist" containerID="c917a83db8d033360cc85bd4df1907090be09ad74c6daace74860b79ca18dd60" Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.900928 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c917a83db8d033360cc85bd4df1907090be09ad74c6daace74860b79ca18dd60"} err="failed to get container status \"c917a83db8d033360cc85bd4df1907090be09ad74c6daace74860b79ca18dd60\": rpc error: code = NotFound desc = could not find container \"c917a83db8d033360cc85bd4df1907090be09ad74c6daace74860b79ca18dd60\": container with ID starting with c917a83db8d033360cc85bd4df1907090be09ad74c6daace74860b79ca18dd60 not found: ID does not exist" Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.900948 4558 scope.go:117] "RemoveContainer" containerID="b315462cca6ea3017331a9a3b6006923169f3c785b7dc06a60a420f2b2a172ea" Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.901354 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b315462cca6ea3017331a9a3b6006923169f3c785b7dc06a60a420f2b2a172ea"} err="failed to get container status \"b315462cca6ea3017331a9a3b6006923169f3c785b7dc06a60a420f2b2a172ea\": rpc error: code = NotFound desc = could not find container \"b315462cca6ea3017331a9a3b6006923169f3c785b7dc06a60a420f2b2a172ea\": container with ID starting with b315462cca6ea3017331a9a3b6006923169f3c785b7dc06a60a420f2b2a172ea not found: ID does not exist" Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.901378 4558 scope.go:117] "RemoveContainer" containerID="c917a83db8d033360cc85bd4df1907090be09ad74c6daace74860b79ca18dd60" Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.901661 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c917a83db8d033360cc85bd4df1907090be09ad74c6daace74860b79ca18dd60"} err="failed to get container status \"c917a83db8d033360cc85bd4df1907090be09ad74c6daace74860b79ca18dd60\": rpc error: code = NotFound desc = could not find container \"c917a83db8d033360cc85bd4df1907090be09ad74c6daace74860b79ca18dd60\": container with ID starting with c917a83db8d033360cc85bd4df1907090be09ad74c6daace74860b79ca18dd60 not found: ID does not exist" Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.901684 4558 scope.go:117] "RemoveContainer" containerID="2c1d55af2186b92b9931bdbe12ae3a13127399ea1da682b81eb9b878b31b7f45" Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.902813 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.930903 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.941214 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.945539 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:39:43 crc kubenswrapper[4558]: E0120 17:39:43.945920 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="844b234f-5c4d-4250-911d-cd06f864fbb1" containerName="glance-log" Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.945939 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="844b234f-5c4d-4250-911d-cd06f864fbb1" containerName="glance-log" Jan 20 17:39:43 crc kubenswrapper[4558]: E0120 17:39:43.945959 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="844b234f-5c4d-4250-911d-cd06f864fbb1" containerName="glance-httpd" Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.945965 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="844b234f-5c4d-4250-911d-cd06f864fbb1" containerName="glance-httpd" Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.946116 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="844b234f-5c4d-4250-911d-cd06f864fbb1" containerName="glance-log" Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.946140 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="844b234f-5c4d-4250-911d-cd06f864fbb1" containerName="glance-httpd" Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.947013 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.950924 4558 scope.go:117] "RemoveContainer" containerID="60bc626a37db39f0a437bd21b829d927e3de167310335ce3e082e6a25ee18a05" Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.952366 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-scripts" Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.952629 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-default-internal-config-data" Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.953739 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-glance-default-internal-svc" Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.956087 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-glance-dockercfg-gnzn6" Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.956329 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.958213 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.972451 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-default-external-config-data" Jan 20 17:39:43 crc kubenswrapper[4558]: I0120 17:39:43.972449 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-glance-default-public-svc" Jan 20 17:39:44 crc kubenswrapper[4558]: I0120 17:39:44.003223 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:39:44 crc kubenswrapper[4558]: I0120 17:39:44.007362 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:39:44 crc kubenswrapper[4558]: I0120 17:39:44.037466 4558 scope.go:117] "RemoveContainer" containerID="2c1d55af2186b92b9931bdbe12ae3a13127399ea1da682b81eb9b878b31b7f45" Jan 20 17:39:44 crc kubenswrapper[4558]: E0120 17:39:44.045513 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2c1d55af2186b92b9931bdbe12ae3a13127399ea1da682b81eb9b878b31b7f45\": container with ID starting with 2c1d55af2186b92b9931bdbe12ae3a13127399ea1da682b81eb9b878b31b7f45 not found: ID does not exist" containerID="2c1d55af2186b92b9931bdbe12ae3a13127399ea1da682b81eb9b878b31b7f45" Jan 20 17:39:44 crc kubenswrapper[4558]: I0120 17:39:44.045545 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2c1d55af2186b92b9931bdbe12ae3a13127399ea1da682b81eb9b878b31b7f45"} err="failed to get container status \"2c1d55af2186b92b9931bdbe12ae3a13127399ea1da682b81eb9b878b31b7f45\": rpc error: code = NotFound desc = could not find container \"2c1d55af2186b92b9931bdbe12ae3a13127399ea1da682b81eb9b878b31b7f45\": container with ID starting with 2c1d55af2186b92b9931bdbe12ae3a13127399ea1da682b81eb9b878b31b7f45 not found: ID does not exist" Jan 20 17:39:44 crc kubenswrapper[4558]: I0120 17:39:44.045568 4558 scope.go:117] "RemoveContainer" containerID="60bc626a37db39f0a437bd21b829d927e3de167310335ce3e082e6a25ee18a05" Jan 20 17:39:44 crc kubenswrapper[4558]: E0120 17:39:44.045745 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"60bc626a37db39f0a437bd21b829d927e3de167310335ce3e082e6a25ee18a05\": container with ID starting with 60bc626a37db39f0a437bd21b829d927e3de167310335ce3e082e6a25ee18a05 not found: ID does not exist" containerID="60bc626a37db39f0a437bd21b829d927e3de167310335ce3e082e6a25ee18a05" Jan 20 17:39:44 crc kubenswrapper[4558]: I0120 17:39:44.045771 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"60bc626a37db39f0a437bd21b829d927e3de167310335ce3e082e6a25ee18a05"} err="failed to get container status \"60bc626a37db39f0a437bd21b829d927e3de167310335ce3e082e6a25ee18a05\": rpc error: code = NotFound desc = could not find container \"60bc626a37db39f0a437bd21b829d927e3de167310335ce3e082e6a25ee18a05\": container with ID starting with 60bc626a37db39f0a437bd21b829d927e3de167310335ce3e082e6a25ee18a05 not found: ID does not exist" Jan 20 17:39:44 crc kubenswrapper[4558]: I0120 17:39:44.045783 4558 scope.go:117] "RemoveContainer" containerID="2c1d55af2186b92b9931bdbe12ae3a13127399ea1da682b81eb9b878b31b7f45" Jan 20 17:39:44 crc kubenswrapper[4558]: I0120 17:39:44.045949 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2c1d55af2186b92b9931bdbe12ae3a13127399ea1da682b81eb9b878b31b7f45"} err="failed to get container status \"2c1d55af2186b92b9931bdbe12ae3a13127399ea1da682b81eb9b878b31b7f45\": rpc error: code = NotFound desc = could not find container \"2c1d55af2186b92b9931bdbe12ae3a13127399ea1da682b81eb9b878b31b7f45\": container with ID starting with 2c1d55af2186b92b9931bdbe12ae3a13127399ea1da682b81eb9b878b31b7f45 not found: ID does not exist" Jan 20 17:39:44 crc kubenswrapper[4558]: I0120 17:39:44.045981 4558 scope.go:117] "RemoveContainer" containerID="60bc626a37db39f0a437bd21b829d927e3de167310335ce3e082e6a25ee18a05" Jan 20 17:39:44 crc kubenswrapper[4558]: I0120 17:39:44.046132 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"60bc626a37db39f0a437bd21b829d927e3de167310335ce3e082e6a25ee18a05"} err="failed to get container status \"60bc626a37db39f0a437bd21b829d927e3de167310335ce3e082e6a25ee18a05\": rpc error: code = NotFound desc = could not find container \"60bc626a37db39f0a437bd21b829d927e3de167310335ce3e082e6a25ee18a05\": container with ID starting with 60bc626a37db39f0a437bd21b829d927e3de167310335ce3e082e6a25ee18a05 not found: ID does not exist" Jan 20 17:39:44 crc kubenswrapper[4558]: I0120 17:39:44.106587 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/92c317c8-cf30-4046-af0b-8f6c36f69000-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"92c317c8-cf30-4046-af0b-8f6c36f69000\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:39:44 crc kubenswrapper[4558]: I0120 17:39:44.106639 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/92c317c8-cf30-4046-af0b-8f6c36f69000-scripts\") pod \"glance-default-internal-api-0\" (UID: \"92c317c8-cf30-4046-af0b-8f6c36f69000\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:39:44 crc kubenswrapper[4558]: I0120 17:39:44.106723 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2328ffd8-4eb7-41b8-abfb-b5ee5a12f117-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"2328ffd8-4eb7-41b8-abfb-b5ee5a12f117\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:39:44 crc kubenswrapper[4558]: I0120 17:39:44.106752 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2328ffd8-4eb7-41b8-abfb-b5ee5a12f117-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"2328ffd8-4eb7-41b8-abfb-b5ee5a12f117\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:39:44 crc kubenswrapper[4558]: I0120 17:39:44.106789 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2328ffd8-4eb7-41b8-abfb-b5ee5a12f117-logs\") pod \"glance-default-external-api-0\" (UID: \"2328ffd8-4eb7-41b8-abfb-b5ee5a12f117\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:39:44 crc kubenswrapper[4558]: I0120 17:39:44.106821 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4nd8m\" (UniqueName: \"kubernetes.io/projected/2328ffd8-4eb7-41b8-abfb-b5ee5a12f117-kube-api-access-4nd8m\") pod \"glance-default-external-api-0\" (UID: \"2328ffd8-4eb7-41b8-abfb-b5ee5a12f117\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:39:44 crc kubenswrapper[4558]: I0120 17:39:44.106893 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2328ffd8-4eb7-41b8-abfb-b5ee5a12f117-config-data\") pod \"glance-default-external-api-0\" (UID: \"2328ffd8-4eb7-41b8-abfb-b5ee5a12f117\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:39:44 crc kubenswrapper[4558]: I0120 17:39:44.106916 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2328ffd8-4eb7-41b8-abfb-b5ee5a12f117-scripts\") pod \"glance-default-external-api-0\" (UID: \"2328ffd8-4eb7-41b8-abfb-b5ee5a12f117\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:39:44 crc kubenswrapper[4558]: I0120 17:39:44.106945 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"2328ffd8-4eb7-41b8-abfb-b5ee5a12f117\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:39:44 crc kubenswrapper[4558]: I0120 17:39:44.106989 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/92c317c8-cf30-4046-af0b-8f6c36f69000-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"92c317c8-cf30-4046-af0b-8f6c36f69000\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:39:44 crc kubenswrapper[4558]: I0120 17:39:44.107018 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"92c317c8-cf30-4046-af0b-8f6c36f69000\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:39:44 crc kubenswrapper[4558]: I0120 17:39:44.107047 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/92c317c8-cf30-4046-af0b-8f6c36f69000-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"92c317c8-cf30-4046-af0b-8f6c36f69000\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:39:44 crc kubenswrapper[4558]: I0120 17:39:44.107076 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8fpts\" (UniqueName: \"kubernetes.io/projected/92c317c8-cf30-4046-af0b-8f6c36f69000-kube-api-access-8fpts\") pod \"glance-default-internal-api-0\" (UID: \"92c317c8-cf30-4046-af0b-8f6c36f69000\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:39:44 crc kubenswrapper[4558]: I0120 17:39:44.107098 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/92c317c8-cf30-4046-af0b-8f6c36f69000-config-data\") pod \"glance-default-internal-api-0\" (UID: \"92c317c8-cf30-4046-af0b-8f6c36f69000\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:39:44 crc kubenswrapper[4558]: I0120 17:39:44.107116 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/92c317c8-cf30-4046-af0b-8f6c36f69000-logs\") pod \"glance-default-internal-api-0\" (UID: \"92c317c8-cf30-4046-af0b-8f6c36f69000\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:39:44 crc kubenswrapper[4558]: I0120 17:39:44.107141 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/2328ffd8-4eb7-41b8-abfb-b5ee5a12f117-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"2328ffd8-4eb7-41b8-abfb-b5ee5a12f117\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:39:44 crc kubenswrapper[4558]: I0120 17:39:44.117144 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-db-sync-fv6q5"] Jan 20 17:39:44 crc kubenswrapper[4558]: I0120 17:39:44.213415 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/92c317c8-cf30-4046-af0b-8f6c36f69000-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"92c317c8-cf30-4046-af0b-8f6c36f69000\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:39:44 crc kubenswrapper[4558]: I0120 17:39:44.213459 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/92c317c8-cf30-4046-af0b-8f6c36f69000-scripts\") pod \"glance-default-internal-api-0\" (UID: \"92c317c8-cf30-4046-af0b-8f6c36f69000\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:39:44 crc kubenswrapper[4558]: I0120 17:39:44.213513 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2328ffd8-4eb7-41b8-abfb-b5ee5a12f117-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"2328ffd8-4eb7-41b8-abfb-b5ee5a12f117\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:39:44 crc kubenswrapper[4558]: I0120 17:39:44.213532 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2328ffd8-4eb7-41b8-abfb-b5ee5a12f117-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"2328ffd8-4eb7-41b8-abfb-b5ee5a12f117\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:39:44 crc kubenswrapper[4558]: I0120 17:39:44.213555 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2328ffd8-4eb7-41b8-abfb-b5ee5a12f117-logs\") pod \"glance-default-external-api-0\" (UID: \"2328ffd8-4eb7-41b8-abfb-b5ee5a12f117\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:39:44 crc kubenswrapper[4558]: I0120 17:39:44.213577 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4nd8m\" (UniqueName: \"kubernetes.io/projected/2328ffd8-4eb7-41b8-abfb-b5ee5a12f117-kube-api-access-4nd8m\") pod \"glance-default-external-api-0\" (UID: \"2328ffd8-4eb7-41b8-abfb-b5ee5a12f117\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:39:44 crc kubenswrapper[4558]: I0120 17:39:44.213615 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2328ffd8-4eb7-41b8-abfb-b5ee5a12f117-config-data\") pod \"glance-default-external-api-0\" (UID: \"2328ffd8-4eb7-41b8-abfb-b5ee5a12f117\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:39:44 crc kubenswrapper[4558]: I0120 17:39:44.213631 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2328ffd8-4eb7-41b8-abfb-b5ee5a12f117-scripts\") pod \"glance-default-external-api-0\" (UID: \"2328ffd8-4eb7-41b8-abfb-b5ee5a12f117\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:39:44 crc kubenswrapper[4558]: I0120 17:39:44.213654 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"2328ffd8-4eb7-41b8-abfb-b5ee5a12f117\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:39:44 crc kubenswrapper[4558]: I0120 17:39:44.213680 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/92c317c8-cf30-4046-af0b-8f6c36f69000-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"92c317c8-cf30-4046-af0b-8f6c36f69000\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:39:44 crc kubenswrapper[4558]: I0120 17:39:44.213703 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"92c317c8-cf30-4046-af0b-8f6c36f69000\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:39:44 crc kubenswrapper[4558]: I0120 17:39:44.213727 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/92c317c8-cf30-4046-af0b-8f6c36f69000-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"92c317c8-cf30-4046-af0b-8f6c36f69000\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:39:44 crc kubenswrapper[4558]: I0120 17:39:44.213753 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8fpts\" (UniqueName: \"kubernetes.io/projected/92c317c8-cf30-4046-af0b-8f6c36f69000-kube-api-access-8fpts\") pod \"glance-default-internal-api-0\" (UID: \"92c317c8-cf30-4046-af0b-8f6c36f69000\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:39:44 crc kubenswrapper[4558]: I0120 17:39:44.213768 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/92c317c8-cf30-4046-af0b-8f6c36f69000-config-data\") pod \"glance-default-internal-api-0\" (UID: \"92c317c8-cf30-4046-af0b-8f6c36f69000\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:39:44 crc kubenswrapper[4558]: I0120 17:39:44.213785 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/92c317c8-cf30-4046-af0b-8f6c36f69000-logs\") pod \"glance-default-internal-api-0\" (UID: \"92c317c8-cf30-4046-af0b-8f6c36f69000\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:39:44 crc kubenswrapper[4558]: I0120 17:39:44.213803 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/2328ffd8-4eb7-41b8-abfb-b5ee5a12f117-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"2328ffd8-4eb7-41b8-abfb-b5ee5a12f117\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:39:44 crc kubenswrapper[4558]: I0120 17:39:44.216860 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2328ffd8-4eb7-41b8-abfb-b5ee5a12f117-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"2328ffd8-4eb7-41b8-abfb-b5ee5a12f117\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:39:44 crc kubenswrapper[4558]: I0120 17:39:44.220648 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2328ffd8-4eb7-41b8-abfb-b5ee5a12f117-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"2328ffd8-4eb7-41b8-abfb-b5ee5a12f117\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:39:44 crc kubenswrapper[4558]: I0120 17:39:44.221315 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2328ffd8-4eb7-41b8-abfb-b5ee5a12f117-logs\") pod \"glance-default-external-api-0\" (UID: \"2328ffd8-4eb7-41b8-abfb-b5ee5a12f117\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:39:44 crc kubenswrapper[4558]: I0120 17:39:44.222419 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2328ffd8-4eb7-41b8-abfb-b5ee5a12f117-scripts\") pod \"glance-default-external-api-0\" (UID: \"2328ffd8-4eb7-41b8-abfb-b5ee5a12f117\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:39:44 crc kubenswrapper[4558]: I0120 17:39:44.222630 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/92c317c8-cf30-4046-af0b-8f6c36f69000-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"92c317c8-cf30-4046-af0b-8f6c36f69000\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:39:44 crc kubenswrapper[4558]: I0120 17:39:44.223038 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"2328ffd8-4eb7-41b8-abfb-b5ee5a12f117\") device mount path \"/mnt/openstack/pv09\"" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:39:44 crc kubenswrapper[4558]: I0120 17:39:44.227355 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2328ffd8-4eb7-41b8-abfb-b5ee5a12f117-config-data\") pod \"glance-default-external-api-0\" (UID: \"2328ffd8-4eb7-41b8-abfb-b5ee5a12f117\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:39:44 crc kubenswrapper[4558]: I0120 17:39:44.227626 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/2328ffd8-4eb7-41b8-abfb-b5ee5a12f117-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"2328ffd8-4eb7-41b8-abfb-b5ee5a12f117\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:39:44 crc kubenswrapper[4558]: I0120 17:39:44.227756 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"92c317c8-cf30-4046-af0b-8f6c36f69000\") device mount path \"/mnt/openstack/pv11\"" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:39:44 crc kubenswrapper[4558]: I0120 17:39:44.242290 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/92c317c8-cf30-4046-af0b-8f6c36f69000-logs\") pod \"glance-default-internal-api-0\" (UID: \"92c317c8-cf30-4046-af0b-8f6c36f69000\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:39:44 crc kubenswrapper[4558]: I0120 17:39:44.244729 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/92c317c8-cf30-4046-af0b-8f6c36f69000-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"92c317c8-cf30-4046-af0b-8f6c36f69000\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:39:44 crc kubenswrapper[4558]: I0120 17:39:44.249007 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/92c317c8-cf30-4046-af0b-8f6c36f69000-config-data\") pod \"glance-default-internal-api-0\" (UID: \"92c317c8-cf30-4046-af0b-8f6c36f69000\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:39:44 crc kubenswrapper[4558]: I0120 17:39:44.259743 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/92c317c8-cf30-4046-af0b-8f6c36f69000-scripts\") pod \"glance-default-internal-api-0\" (UID: \"92c317c8-cf30-4046-af0b-8f6c36f69000\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:39:44 crc kubenswrapper[4558]: I0120 17:39:44.261809 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/92c317c8-cf30-4046-af0b-8f6c36f69000-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"92c317c8-cf30-4046-af0b-8f6c36f69000\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:39:44 crc kubenswrapper[4558]: I0120 17:39:44.261856 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8fpts\" (UniqueName: \"kubernetes.io/projected/92c317c8-cf30-4046-af0b-8f6c36f69000-kube-api-access-8fpts\") pod \"glance-default-internal-api-0\" (UID: \"92c317c8-cf30-4046-af0b-8f6c36f69000\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:39:44 crc kubenswrapper[4558]: I0120 17:39:44.272827 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4nd8m\" (UniqueName: \"kubernetes.io/projected/2328ffd8-4eb7-41b8-abfb-b5ee5a12f117-kube-api-access-4nd8m\") pod \"glance-default-external-api-0\" (UID: \"2328ffd8-4eb7-41b8-abfb-b5ee5a12f117\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:39:44 crc kubenswrapper[4558]: I0120 17:39:44.283301 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"2328ffd8-4eb7-41b8-abfb-b5ee5a12f117\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:39:44 crc kubenswrapper[4558]: I0120 17:39:44.300901 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:39:44 crc kubenswrapper[4558]: I0120 17:39:44.313477 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-db-sync-fq8jg" Jan 20 17:39:44 crc kubenswrapper[4558]: I0120 17:39:44.313565 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"92c317c8-cf30-4046-af0b-8f6c36f69000\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:39:44 crc kubenswrapper[4558]: I0120 17:39:44.420054 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mbpf4\" (UniqueName: \"kubernetes.io/projected/c48697d3-0295-45c7-89d7-7d2c524f3b20-kube-api-access-mbpf4\") pod \"c48697d3-0295-45c7-89d7-7d2c524f3b20\" (UID: \"c48697d3-0295-45c7-89d7-7d2c524f3b20\") " Jan 20 17:39:44 crc kubenswrapper[4558]: I0120 17:39:44.420230 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c48697d3-0295-45c7-89d7-7d2c524f3b20-logs\") pod \"c48697d3-0295-45c7-89d7-7d2c524f3b20\" (UID: \"c48697d3-0295-45c7-89d7-7d2c524f3b20\") " Jan 20 17:39:44 crc kubenswrapper[4558]: I0120 17:39:44.420281 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c48697d3-0295-45c7-89d7-7d2c524f3b20-combined-ca-bundle\") pod \"c48697d3-0295-45c7-89d7-7d2c524f3b20\" (UID: \"c48697d3-0295-45c7-89d7-7d2c524f3b20\") " Jan 20 17:39:44 crc kubenswrapper[4558]: I0120 17:39:44.420311 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c48697d3-0295-45c7-89d7-7d2c524f3b20-config-data\") pod \"c48697d3-0295-45c7-89d7-7d2c524f3b20\" (UID: \"c48697d3-0295-45c7-89d7-7d2c524f3b20\") " Jan 20 17:39:44 crc kubenswrapper[4558]: I0120 17:39:44.420335 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c48697d3-0295-45c7-89d7-7d2c524f3b20-scripts\") pod \"c48697d3-0295-45c7-89d7-7d2c524f3b20\" (UID: \"c48697d3-0295-45c7-89d7-7d2c524f3b20\") " Jan 20 17:39:44 crc kubenswrapper[4558]: I0120 17:39:44.420718 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c48697d3-0295-45c7-89d7-7d2c524f3b20-logs" (OuterVolumeSpecName: "logs") pod "c48697d3-0295-45c7-89d7-7d2c524f3b20" (UID: "c48697d3-0295-45c7-89d7-7d2c524f3b20"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:39:44 crc kubenswrapper[4558]: I0120 17:39:44.427695 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c48697d3-0295-45c7-89d7-7d2c524f3b20-kube-api-access-mbpf4" (OuterVolumeSpecName: "kube-api-access-mbpf4") pod "c48697d3-0295-45c7-89d7-7d2c524f3b20" (UID: "c48697d3-0295-45c7-89d7-7d2c524f3b20"). InnerVolumeSpecName "kube-api-access-mbpf4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:39:44 crc kubenswrapper[4558]: I0120 17:39:44.428417 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c48697d3-0295-45c7-89d7-7d2c524f3b20-scripts" (OuterVolumeSpecName: "scripts") pod "c48697d3-0295-45c7-89d7-7d2c524f3b20" (UID: "c48697d3-0295-45c7-89d7-7d2c524f3b20"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:39:44 crc kubenswrapper[4558]: I0120 17:39:44.462271 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c48697d3-0295-45c7-89d7-7d2c524f3b20-config-data" (OuterVolumeSpecName: "config-data") pod "c48697d3-0295-45c7-89d7-7d2c524f3b20" (UID: "c48697d3-0295-45c7-89d7-7d2c524f3b20"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:39:44 crc kubenswrapper[4558]: I0120 17:39:44.484779 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c48697d3-0295-45c7-89d7-7d2c524f3b20-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c48697d3-0295-45c7-89d7-7d2c524f3b20" (UID: "c48697d3-0295-45c7-89d7-7d2c524f3b20"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:39:44 crc kubenswrapper[4558]: I0120 17:39:44.525207 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c48697d3-0295-45c7-89d7-7d2c524f3b20-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:39:44 crc kubenswrapper[4558]: I0120 17:39:44.525236 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c48697d3-0295-45c7-89d7-7d2c524f3b20-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:39:44 crc kubenswrapper[4558]: I0120 17:39:44.525248 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c48697d3-0295-45c7-89d7-7d2c524f3b20-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:39:44 crc kubenswrapper[4558]: I0120 17:39:44.525259 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c48697d3-0295-45c7-89d7-7d2c524f3b20-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:39:44 crc kubenswrapper[4558]: I0120 17:39:44.525269 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mbpf4\" (UniqueName: \"kubernetes.io/projected/c48697d3-0295-45c7-89d7-7d2c524f3b20-kube-api-access-mbpf4\") on node \"crc\" DevicePath \"\"" Jan 20 17:39:44 crc kubenswrapper[4558]: I0120 17:39:44.579347 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="844b234f-5c4d-4250-911d-cd06f864fbb1" path="/var/lib/kubelet/pods/844b234f-5c4d-4250-911d-cd06f864fbb1/volumes" Jan 20 17:39:44 crc kubenswrapper[4558]: I0120 17:39:44.580197 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a90f602d-27b9-4a25-ae05-2129b11e9076" path="/var/lib/kubelet/pods/a90f602d-27b9-4a25-ae05-2129b11e9076/volumes" Jan 20 17:39:44 crc kubenswrapper[4558]: I0120 17:39:44.589637 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:39:44 crc kubenswrapper[4558]: I0120 17:39:44.856957 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-db-sync-fq8jg" event={"ID":"c48697d3-0295-45c7-89d7-7d2c524f3b20","Type":"ContainerDied","Data":"7718980a8e20290bb552139c1fa8bfcfa2247031699c68d3efc50e302eb4c54f"} Jan 20 17:39:44 crc kubenswrapper[4558]: I0120 17:39:44.858659 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7718980a8e20290bb552139c1fa8bfcfa2247031699c68d3efc50e302eb4c54f" Jan 20 17:39:44 crc kubenswrapper[4558]: I0120 17:39:44.857383 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-db-sync-fq8jg" Jan 20 17:39:44 crc kubenswrapper[4558]: I0120 17:39:44.859537 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:39:44 crc kubenswrapper[4558]: I0120 17:39:44.865362 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"1ae5026e-37c6-453a-943c-9dfd8cebb2c2","Type":"ContainerStarted","Data":"82fae930e0ba2d4c5b5441974cd5c07a1f62d67af7c19b56c390f771baa7b88c"} Jan 20 17:39:44 crc kubenswrapper[4558]: I0120 17:39:44.865549 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="1ae5026e-37c6-453a-943c-9dfd8cebb2c2" containerName="ceilometer-central-agent" containerID="cri-o://5c6cab5622103a6a77631945133b52c00af4530b77736c2bdab0301ddea2e07b" gracePeriod=30 Jan 20 17:39:44 crc kubenswrapper[4558]: I0120 17:39:44.865813 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:39:44 crc kubenswrapper[4558]: I0120 17:39:44.865870 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="1ae5026e-37c6-453a-943c-9dfd8cebb2c2" containerName="proxy-httpd" containerID="cri-o://82fae930e0ba2d4c5b5441974cd5c07a1f62d67af7c19b56c390f771baa7b88c" gracePeriod=30 Jan 20 17:39:44 crc kubenswrapper[4558]: I0120 17:39:44.865931 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="1ae5026e-37c6-453a-943c-9dfd8cebb2c2" containerName="sg-core" containerID="cri-o://4179fc735effbae6159a69c9ca68266281ba202cd4f205078b6baf7d7940eb2a" gracePeriod=30 Jan 20 17:39:44 crc kubenswrapper[4558]: I0120 17:39:44.865970 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="1ae5026e-37c6-453a-943c-9dfd8cebb2c2" containerName="ceilometer-notification-agent" containerID="cri-o://127398378ff58fceaa81c387bc824a1d5d37e0bf66fbdf7b76a1730850e6f86f" gracePeriod=30 Jan 20 17:39:44 crc kubenswrapper[4558]: I0120 17:39:44.874329 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-db-sync-fv6q5" event={"ID":"ae3119fb-1355-4da8-a005-3610a1733b90","Type":"ContainerStarted","Data":"ef33eca59ed0960fac7c18dbc983784852c470d4f156ce3a7db2a36e9f8dde75"} Jan 20 17:39:44 crc kubenswrapper[4558]: I0120 17:39:44.874362 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-db-sync-fv6q5" event={"ID":"ae3119fb-1355-4da8-a005-3610a1733b90","Type":"ContainerStarted","Data":"7d77a5a90bafb070ab764dc73ad08392d0e8fca1442bd472a3965e3294e4bcc1"} Jan 20 17:39:44 crc kubenswrapper[4558]: I0120 17:39:44.877669 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-db-sync-ndrzj" event={"ID":"86a50da2-ef44-4072-b680-d56cc4ee67c5","Type":"ContainerStarted","Data":"2e74159a59845803699da99c5974e7ed46127faee243f67df4a03b31c6f34d49"} Jan 20 17:39:44 crc kubenswrapper[4558]: I0120 17:39:44.877708 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-db-sync-ndrzj" event={"ID":"86a50da2-ef44-4072-b680-d56cc4ee67c5","Type":"ContainerStarted","Data":"23f00391047468e4d9278d4c196659b045b6caece849b4789c2b2f0d6d4fecec"} Jan 20 17:39:44 crc kubenswrapper[4558]: I0120 17:39:44.889630 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ceilometer-0" podStartSLOduration=1.7225655020000001 podStartE2EDuration="5.8896087s" podCreationTimestamp="2026-01-20 17:39:39 +0000 UTC" firstStartedPulling="2026-01-20 17:39:39.910623046 +0000 UTC m=+3473.670961013" lastFinishedPulling="2026-01-20 17:39:44.077666244 +0000 UTC m=+3477.838004211" observedRunningTime="2026-01-20 17:39:44.885730559 +0000 UTC m=+3478.646068526" watchObservedRunningTime="2026-01-20 17:39:44.8896087 +0000 UTC m=+3478.649946668" Jan 20 17:39:44 crc kubenswrapper[4558]: I0120 17:39:44.924311 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/placement-85684d4d5b-ffd28"] Jan 20 17:39:44 crc kubenswrapper[4558]: E0120 17:39:44.924926 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c48697d3-0295-45c7-89d7-7d2c524f3b20" containerName="placement-db-sync" Jan 20 17:39:44 crc kubenswrapper[4558]: I0120 17:39:44.924939 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="c48697d3-0295-45c7-89d7-7d2c524f3b20" containerName="placement-db-sync" Jan 20 17:39:44 crc kubenswrapper[4558]: I0120 17:39:44.925139 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="c48697d3-0295-45c7-89d7-7d2c524f3b20" containerName="placement-db-sync" Jan 20 17:39:44 crc kubenswrapper[4558]: I0120 17:39:44.925992 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-85684d4d5b-ffd28" Jan 20 17:39:44 crc kubenswrapper[4558]: I0120 17:39:44.934595 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"placement-placement-dockercfg-tx9t2" Jan 20 17:39:44 crc kubenswrapper[4558]: I0120 17:39:44.934818 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"placement-scripts" Jan 20 17:39:44 crc kubenswrapper[4558]: I0120 17:39:44.934984 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"placement-config-data" Jan 20 17:39:44 crc kubenswrapper[4558]: I0120 17:39:44.935319 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-85684d4d5b-ffd28"] Jan 20 17:39:44 crc kubenswrapper[4558]: I0120 17:39:44.942060 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/barbican-db-sync-ndrzj" podStartSLOduration=1.942040575 podStartE2EDuration="1.942040575s" podCreationTimestamp="2026-01-20 17:39:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:39:44.907896456 +0000 UTC m=+3478.668234423" watchObservedRunningTime="2026-01-20 17:39:44.942040575 +0000 UTC m=+3478.702378541" Jan 20 17:39:44 crc kubenswrapper[4558]: I0120 17:39:44.945959 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/68263afb-f6b4-4665-bd7d-6f3b622c5640-combined-ca-bundle\") pod \"placement-85684d4d5b-ffd28\" (UID: \"68263afb-f6b4-4665-bd7d-6f3b622c5640\") " pod="openstack-kuttl-tests/placement-85684d4d5b-ffd28" Jan 20 17:39:44 crc kubenswrapper[4558]: I0120 17:39:44.946019 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/68263afb-f6b4-4665-bd7d-6f3b622c5640-scripts\") pod \"placement-85684d4d5b-ffd28\" (UID: \"68263afb-f6b4-4665-bd7d-6f3b622c5640\") " pod="openstack-kuttl-tests/placement-85684d4d5b-ffd28" Jan 20 17:39:44 crc kubenswrapper[4558]: I0120 17:39:44.946075 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/68263afb-f6b4-4665-bd7d-6f3b622c5640-config-data\") pod \"placement-85684d4d5b-ffd28\" (UID: \"68263afb-f6b4-4665-bd7d-6f3b622c5640\") " pod="openstack-kuttl-tests/placement-85684d4d5b-ffd28" Jan 20 17:39:44 crc kubenswrapper[4558]: I0120 17:39:44.946137 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/68263afb-f6b4-4665-bd7d-6f3b622c5640-logs\") pod \"placement-85684d4d5b-ffd28\" (UID: \"68263afb-f6b4-4665-bd7d-6f3b622c5640\") " pod="openstack-kuttl-tests/placement-85684d4d5b-ffd28" Jan 20 17:39:44 crc kubenswrapper[4558]: I0120 17:39:44.946290 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-js4h2\" (UniqueName: \"kubernetes.io/projected/68263afb-f6b4-4665-bd7d-6f3b622c5640-kube-api-access-js4h2\") pod \"placement-85684d4d5b-ffd28\" (UID: \"68263afb-f6b4-4665-bd7d-6f3b622c5640\") " pod="openstack-kuttl-tests/placement-85684d4d5b-ffd28" Jan 20 17:39:44 crc kubenswrapper[4558]: I0120 17:39:44.958306 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/neutron-db-sync-fv6q5" podStartSLOduration=1.9582879640000002 podStartE2EDuration="1.958287964s" podCreationTimestamp="2026-01-20 17:39:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:39:44.92429013 +0000 UTC m=+3478.684628097" watchObservedRunningTime="2026-01-20 17:39:44.958287964 +0000 UTC m=+3478.718625930" Jan 20 17:39:45 crc kubenswrapper[4558]: I0120 17:39:45.014212 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:39:45 crc kubenswrapper[4558]: W0120 17:39:45.035001 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod92c317c8_cf30_4046_af0b_8f6c36f69000.slice/crio-060bea66f307028730316adbd8f3bfcf48d15dc279eb87ccc62b037d74642d41 WatchSource:0}: Error finding container 060bea66f307028730316adbd8f3bfcf48d15dc279eb87ccc62b037d74642d41: Status 404 returned error can't find the container with id 060bea66f307028730316adbd8f3bfcf48d15dc279eb87ccc62b037d74642d41 Jan 20 17:39:45 crc kubenswrapper[4558]: I0120 17:39:45.049220 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-js4h2\" (UniqueName: \"kubernetes.io/projected/68263afb-f6b4-4665-bd7d-6f3b622c5640-kube-api-access-js4h2\") pod \"placement-85684d4d5b-ffd28\" (UID: \"68263afb-f6b4-4665-bd7d-6f3b622c5640\") " pod="openstack-kuttl-tests/placement-85684d4d5b-ffd28" Jan 20 17:39:45 crc kubenswrapper[4558]: I0120 17:39:45.049278 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/68263afb-f6b4-4665-bd7d-6f3b622c5640-combined-ca-bundle\") pod \"placement-85684d4d5b-ffd28\" (UID: \"68263afb-f6b4-4665-bd7d-6f3b622c5640\") " pod="openstack-kuttl-tests/placement-85684d4d5b-ffd28" Jan 20 17:39:45 crc kubenswrapper[4558]: I0120 17:39:45.049318 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/68263afb-f6b4-4665-bd7d-6f3b622c5640-scripts\") pod \"placement-85684d4d5b-ffd28\" (UID: \"68263afb-f6b4-4665-bd7d-6f3b622c5640\") " pod="openstack-kuttl-tests/placement-85684d4d5b-ffd28" Jan 20 17:39:45 crc kubenswrapper[4558]: I0120 17:39:45.049365 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/68263afb-f6b4-4665-bd7d-6f3b622c5640-config-data\") pod \"placement-85684d4d5b-ffd28\" (UID: \"68263afb-f6b4-4665-bd7d-6f3b622c5640\") " pod="openstack-kuttl-tests/placement-85684d4d5b-ffd28" Jan 20 17:39:45 crc kubenswrapper[4558]: I0120 17:39:45.049410 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/68263afb-f6b4-4665-bd7d-6f3b622c5640-logs\") pod \"placement-85684d4d5b-ffd28\" (UID: \"68263afb-f6b4-4665-bd7d-6f3b622c5640\") " pod="openstack-kuttl-tests/placement-85684d4d5b-ffd28" Jan 20 17:39:45 crc kubenswrapper[4558]: I0120 17:39:45.049846 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/68263afb-f6b4-4665-bd7d-6f3b622c5640-logs\") pod \"placement-85684d4d5b-ffd28\" (UID: \"68263afb-f6b4-4665-bd7d-6f3b622c5640\") " pod="openstack-kuttl-tests/placement-85684d4d5b-ffd28" Jan 20 17:39:45 crc kubenswrapper[4558]: I0120 17:39:45.056780 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/68263afb-f6b4-4665-bd7d-6f3b622c5640-scripts\") pod \"placement-85684d4d5b-ffd28\" (UID: \"68263afb-f6b4-4665-bd7d-6f3b622c5640\") " pod="openstack-kuttl-tests/placement-85684d4d5b-ffd28" Jan 20 17:39:45 crc kubenswrapper[4558]: I0120 17:39:45.060729 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/68263afb-f6b4-4665-bd7d-6f3b622c5640-combined-ca-bundle\") pod \"placement-85684d4d5b-ffd28\" (UID: \"68263afb-f6b4-4665-bd7d-6f3b622c5640\") " pod="openstack-kuttl-tests/placement-85684d4d5b-ffd28" Jan 20 17:39:45 crc kubenswrapper[4558]: I0120 17:39:45.064334 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-js4h2\" (UniqueName: \"kubernetes.io/projected/68263afb-f6b4-4665-bd7d-6f3b622c5640-kube-api-access-js4h2\") pod \"placement-85684d4d5b-ffd28\" (UID: \"68263afb-f6b4-4665-bd7d-6f3b622c5640\") " pod="openstack-kuttl-tests/placement-85684d4d5b-ffd28" Jan 20 17:39:45 crc kubenswrapper[4558]: I0120 17:39:45.064861 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/68263afb-f6b4-4665-bd7d-6f3b622c5640-config-data\") pod \"placement-85684d4d5b-ffd28\" (UID: \"68263afb-f6b4-4665-bd7d-6f3b622c5640\") " pod="openstack-kuttl-tests/placement-85684d4d5b-ffd28" Jan 20 17:39:45 crc kubenswrapper[4558]: I0120 17:39:45.267308 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-85684d4d5b-ffd28" Jan 20 17:39:45 crc kubenswrapper[4558]: I0120 17:39:45.343126 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-bootstrap-rxwfk" Jan 20 17:39:45 crc kubenswrapper[4558]: I0120 17:39:45.355632 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/427dd0db-2906-44d3-816e-a09f7cdd3a12-credential-keys\") pod \"427dd0db-2906-44d3-816e-a09f7cdd3a12\" (UID: \"427dd0db-2906-44d3-816e-a09f7cdd3a12\") " Jan 20 17:39:45 crc kubenswrapper[4558]: I0120 17:39:45.355705 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/427dd0db-2906-44d3-816e-a09f7cdd3a12-scripts\") pod \"427dd0db-2906-44d3-816e-a09f7cdd3a12\" (UID: \"427dd0db-2906-44d3-816e-a09f7cdd3a12\") " Jan 20 17:39:45 crc kubenswrapper[4558]: I0120 17:39:45.355834 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dvz2g\" (UniqueName: \"kubernetes.io/projected/427dd0db-2906-44d3-816e-a09f7cdd3a12-kube-api-access-dvz2g\") pod \"427dd0db-2906-44d3-816e-a09f7cdd3a12\" (UID: \"427dd0db-2906-44d3-816e-a09f7cdd3a12\") " Jan 20 17:39:45 crc kubenswrapper[4558]: I0120 17:39:45.355923 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/427dd0db-2906-44d3-816e-a09f7cdd3a12-config-data\") pod \"427dd0db-2906-44d3-816e-a09f7cdd3a12\" (UID: \"427dd0db-2906-44d3-816e-a09f7cdd3a12\") " Jan 20 17:39:45 crc kubenswrapper[4558]: I0120 17:39:45.355944 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/427dd0db-2906-44d3-816e-a09f7cdd3a12-fernet-keys\") pod \"427dd0db-2906-44d3-816e-a09f7cdd3a12\" (UID: \"427dd0db-2906-44d3-816e-a09f7cdd3a12\") " Jan 20 17:39:45 crc kubenswrapper[4558]: I0120 17:39:45.356005 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/427dd0db-2906-44d3-816e-a09f7cdd3a12-combined-ca-bundle\") pod \"427dd0db-2906-44d3-816e-a09f7cdd3a12\" (UID: \"427dd0db-2906-44d3-816e-a09f7cdd3a12\") " Jan 20 17:39:45 crc kubenswrapper[4558]: I0120 17:39:45.366771 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/427dd0db-2906-44d3-816e-a09f7cdd3a12-kube-api-access-dvz2g" (OuterVolumeSpecName: "kube-api-access-dvz2g") pod "427dd0db-2906-44d3-816e-a09f7cdd3a12" (UID: "427dd0db-2906-44d3-816e-a09f7cdd3a12"). InnerVolumeSpecName "kube-api-access-dvz2g". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:39:45 crc kubenswrapper[4558]: I0120 17:39:45.378233 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/427dd0db-2906-44d3-816e-a09f7cdd3a12-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "427dd0db-2906-44d3-816e-a09f7cdd3a12" (UID: "427dd0db-2906-44d3-816e-a09f7cdd3a12"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:39:45 crc kubenswrapper[4558]: I0120 17:39:45.383059 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/427dd0db-2906-44d3-816e-a09f7cdd3a12-scripts" (OuterVolumeSpecName: "scripts") pod "427dd0db-2906-44d3-816e-a09f7cdd3a12" (UID: "427dd0db-2906-44d3-816e-a09f7cdd3a12"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:39:45 crc kubenswrapper[4558]: I0120 17:39:45.391805 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/427dd0db-2906-44d3-816e-a09f7cdd3a12-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "427dd0db-2906-44d3-816e-a09f7cdd3a12" (UID: "427dd0db-2906-44d3-816e-a09f7cdd3a12"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:39:45 crc kubenswrapper[4558]: I0120 17:39:45.413762 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/placement-69d7f65964-r46nm"] Jan 20 17:39:45 crc kubenswrapper[4558]: E0120 17:39:45.414528 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="427dd0db-2906-44d3-816e-a09f7cdd3a12" containerName="keystone-bootstrap" Jan 20 17:39:45 crc kubenswrapper[4558]: I0120 17:39:45.414543 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="427dd0db-2906-44d3-816e-a09f7cdd3a12" containerName="keystone-bootstrap" Jan 20 17:39:45 crc kubenswrapper[4558]: I0120 17:39:45.414766 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="427dd0db-2906-44d3-816e-a09f7cdd3a12" containerName="keystone-bootstrap" Jan 20 17:39:45 crc kubenswrapper[4558]: I0120 17:39:45.418366 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-69d7f65964-r46nm" Jan 20 17:39:45 crc kubenswrapper[4558]: I0120 17:39:45.422138 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-placement-public-svc" Jan 20 17:39:45 crc kubenswrapper[4558]: I0120 17:39:45.422481 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-placement-internal-svc" Jan 20 17:39:45 crc kubenswrapper[4558]: I0120 17:39:45.428939 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-69d7f65964-r46nm"] Jan 20 17:39:45 crc kubenswrapper[4558]: I0120 17:39:45.440027 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/427dd0db-2906-44d3-816e-a09f7cdd3a12-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "427dd0db-2906-44d3-816e-a09f7cdd3a12" (UID: "427dd0db-2906-44d3-816e-a09f7cdd3a12"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:39:45 crc kubenswrapper[4558]: I0120 17:39:45.441209 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/427dd0db-2906-44d3-816e-a09f7cdd3a12-config-data" (OuterVolumeSpecName: "config-data") pod "427dd0db-2906-44d3-816e-a09f7cdd3a12" (UID: "427dd0db-2906-44d3-816e-a09f7cdd3a12"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:39:45 crc kubenswrapper[4558]: I0120 17:39:45.458634 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-59g5m\" (UniqueName: \"kubernetes.io/projected/bf32468e-b0a7-4b81-bb49-c65a95997903-kube-api-access-59g5m\") pod \"placement-69d7f65964-r46nm\" (UID: \"bf32468e-b0a7-4b81-bb49-c65a95997903\") " pod="openstack-kuttl-tests/placement-69d7f65964-r46nm" Jan 20 17:39:45 crc kubenswrapper[4558]: I0120 17:39:45.458678 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bf32468e-b0a7-4b81-bb49-c65a95997903-config-data\") pod \"placement-69d7f65964-r46nm\" (UID: \"bf32468e-b0a7-4b81-bb49-c65a95997903\") " pod="openstack-kuttl-tests/placement-69d7f65964-r46nm" Jan 20 17:39:45 crc kubenswrapper[4558]: I0120 17:39:45.458719 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/bf32468e-b0a7-4b81-bb49-c65a95997903-public-tls-certs\") pod \"placement-69d7f65964-r46nm\" (UID: \"bf32468e-b0a7-4b81-bb49-c65a95997903\") " pod="openstack-kuttl-tests/placement-69d7f65964-r46nm" Jan 20 17:39:45 crc kubenswrapper[4558]: I0120 17:39:45.458772 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bf32468e-b0a7-4b81-bb49-c65a95997903-combined-ca-bundle\") pod \"placement-69d7f65964-r46nm\" (UID: \"bf32468e-b0a7-4b81-bb49-c65a95997903\") " pod="openstack-kuttl-tests/placement-69d7f65964-r46nm" Jan 20 17:39:45 crc kubenswrapper[4558]: I0120 17:39:45.458793 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/bf32468e-b0a7-4b81-bb49-c65a95997903-internal-tls-certs\") pod \"placement-69d7f65964-r46nm\" (UID: \"bf32468e-b0a7-4b81-bb49-c65a95997903\") " pod="openstack-kuttl-tests/placement-69d7f65964-r46nm" Jan 20 17:39:45 crc kubenswrapper[4558]: I0120 17:39:45.458821 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bf32468e-b0a7-4b81-bb49-c65a95997903-logs\") pod \"placement-69d7f65964-r46nm\" (UID: \"bf32468e-b0a7-4b81-bb49-c65a95997903\") " pod="openstack-kuttl-tests/placement-69d7f65964-r46nm" Jan 20 17:39:45 crc kubenswrapper[4558]: I0120 17:39:45.458838 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bf32468e-b0a7-4b81-bb49-c65a95997903-scripts\") pod \"placement-69d7f65964-r46nm\" (UID: \"bf32468e-b0a7-4b81-bb49-c65a95997903\") " pod="openstack-kuttl-tests/placement-69d7f65964-r46nm" Jan 20 17:39:45 crc kubenswrapper[4558]: I0120 17:39:45.458921 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/427dd0db-2906-44d3-816e-a09f7cdd3a12-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:39:45 crc kubenswrapper[4558]: I0120 17:39:45.458931 4558 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/427dd0db-2906-44d3-816e-a09f7cdd3a12-fernet-keys\") on node \"crc\" DevicePath \"\"" Jan 20 17:39:45 crc kubenswrapper[4558]: I0120 17:39:45.458940 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/427dd0db-2906-44d3-816e-a09f7cdd3a12-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:39:45 crc kubenswrapper[4558]: I0120 17:39:45.458950 4558 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/427dd0db-2906-44d3-816e-a09f7cdd3a12-credential-keys\") on node \"crc\" DevicePath \"\"" Jan 20 17:39:45 crc kubenswrapper[4558]: I0120 17:39:45.458959 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/427dd0db-2906-44d3-816e-a09f7cdd3a12-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:39:45 crc kubenswrapper[4558]: I0120 17:39:45.458992 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dvz2g\" (UniqueName: \"kubernetes.io/projected/427dd0db-2906-44d3-816e-a09f7cdd3a12-kube-api-access-dvz2g\") on node \"crc\" DevicePath \"\"" Jan 20 17:39:45 crc kubenswrapper[4558]: I0120 17:39:45.483445 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-db-sync-2sxrh" Jan 20 17:39:45 crc kubenswrapper[4558]: I0120 17:39:45.560605 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-59s64\" (UniqueName: \"kubernetes.io/projected/fd4fc3e6-d57c-4c96-9109-8c8c3c833307-kube-api-access-59s64\") pod \"fd4fc3e6-d57c-4c96-9109-8c8c3c833307\" (UID: \"fd4fc3e6-d57c-4c96-9109-8c8c3c833307\") " Jan 20 17:39:45 crc kubenswrapper[4558]: I0120 17:39:45.560729 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/fd4fc3e6-d57c-4c96-9109-8c8c3c833307-db-sync-config-data\") pod \"fd4fc3e6-d57c-4c96-9109-8c8c3c833307\" (UID: \"fd4fc3e6-d57c-4c96-9109-8c8c3c833307\") " Jan 20 17:39:45 crc kubenswrapper[4558]: I0120 17:39:45.560775 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fd4fc3e6-d57c-4c96-9109-8c8c3c833307-scripts\") pod \"fd4fc3e6-d57c-4c96-9109-8c8c3c833307\" (UID: \"fd4fc3e6-d57c-4c96-9109-8c8c3c833307\") " Jan 20 17:39:45 crc kubenswrapper[4558]: I0120 17:39:45.561000 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd4fc3e6-d57c-4c96-9109-8c8c3c833307-combined-ca-bundle\") pod \"fd4fc3e6-d57c-4c96-9109-8c8c3c833307\" (UID: \"fd4fc3e6-d57c-4c96-9109-8c8c3c833307\") " Jan 20 17:39:45 crc kubenswrapper[4558]: I0120 17:39:45.561038 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fd4fc3e6-d57c-4c96-9109-8c8c3c833307-config-data\") pod \"fd4fc3e6-d57c-4c96-9109-8c8c3c833307\" (UID: \"fd4fc3e6-d57c-4c96-9109-8c8c3c833307\") " Jan 20 17:39:45 crc kubenswrapper[4558]: I0120 17:39:45.561073 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/fd4fc3e6-d57c-4c96-9109-8c8c3c833307-etc-machine-id\") pod \"fd4fc3e6-d57c-4c96-9109-8c8c3c833307\" (UID: \"fd4fc3e6-d57c-4c96-9109-8c8c3c833307\") " Jan 20 17:39:45 crc kubenswrapper[4558]: I0120 17:39:45.561472 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bf32468e-b0a7-4b81-bb49-c65a95997903-combined-ca-bundle\") pod \"placement-69d7f65964-r46nm\" (UID: \"bf32468e-b0a7-4b81-bb49-c65a95997903\") " pod="openstack-kuttl-tests/placement-69d7f65964-r46nm" Jan 20 17:39:45 crc kubenswrapper[4558]: I0120 17:39:45.561527 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/bf32468e-b0a7-4b81-bb49-c65a95997903-internal-tls-certs\") pod \"placement-69d7f65964-r46nm\" (UID: \"bf32468e-b0a7-4b81-bb49-c65a95997903\") " pod="openstack-kuttl-tests/placement-69d7f65964-r46nm" Jan 20 17:39:45 crc kubenswrapper[4558]: I0120 17:39:45.562581 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bf32468e-b0a7-4b81-bb49-c65a95997903-logs\") pod \"placement-69d7f65964-r46nm\" (UID: \"bf32468e-b0a7-4b81-bb49-c65a95997903\") " pod="openstack-kuttl-tests/placement-69d7f65964-r46nm" Jan 20 17:39:45 crc kubenswrapper[4558]: I0120 17:39:45.562641 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bf32468e-b0a7-4b81-bb49-c65a95997903-scripts\") pod \"placement-69d7f65964-r46nm\" (UID: \"bf32468e-b0a7-4b81-bb49-c65a95997903\") " pod="openstack-kuttl-tests/placement-69d7f65964-r46nm" Jan 20 17:39:45 crc kubenswrapper[4558]: I0120 17:39:45.562832 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-59g5m\" (UniqueName: \"kubernetes.io/projected/bf32468e-b0a7-4b81-bb49-c65a95997903-kube-api-access-59g5m\") pod \"placement-69d7f65964-r46nm\" (UID: \"bf32468e-b0a7-4b81-bb49-c65a95997903\") " pod="openstack-kuttl-tests/placement-69d7f65964-r46nm" Jan 20 17:39:45 crc kubenswrapper[4558]: I0120 17:39:45.562866 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bf32468e-b0a7-4b81-bb49-c65a95997903-config-data\") pod \"placement-69d7f65964-r46nm\" (UID: \"bf32468e-b0a7-4b81-bb49-c65a95997903\") " pod="openstack-kuttl-tests/placement-69d7f65964-r46nm" Jan 20 17:39:45 crc kubenswrapper[4558]: I0120 17:39:45.562982 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/bf32468e-b0a7-4b81-bb49-c65a95997903-public-tls-certs\") pod \"placement-69d7f65964-r46nm\" (UID: \"bf32468e-b0a7-4b81-bb49-c65a95997903\") " pod="openstack-kuttl-tests/placement-69d7f65964-r46nm" Jan 20 17:39:45 crc kubenswrapper[4558]: I0120 17:39:45.564944 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/fd4fc3e6-d57c-4c96-9109-8c8c3c833307-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "fd4fc3e6-d57c-4c96-9109-8c8c3c833307" (UID: "fd4fc3e6-d57c-4c96-9109-8c8c3c833307"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 17:39:45 crc kubenswrapper[4558]: I0120 17:39:45.565719 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bf32468e-b0a7-4b81-bb49-c65a95997903-logs\") pod \"placement-69d7f65964-r46nm\" (UID: \"bf32468e-b0a7-4b81-bb49-c65a95997903\") " pod="openstack-kuttl-tests/placement-69d7f65964-r46nm" Jan 20 17:39:45 crc kubenswrapper[4558]: I0120 17:39:45.569780 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/bf32468e-b0a7-4b81-bb49-c65a95997903-internal-tls-certs\") pod \"placement-69d7f65964-r46nm\" (UID: \"bf32468e-b0a7-4b81-bb49-c65a95997903\") " pod="openstack-kuttl-tests/placement-69d7f65964-r46nm" Jan 20 17:39:45 crc kubenswrapper[4558]: I0120 17:39:45.572444 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/bf32468e-b0a7-4b81-bb49-c65a95997903-public-tls-certs\") pod \"placement-69d7f65964-r46nm\" (UID: \"bf32468e-b0a7-4b81-bb49-c65a95997903\") " pod="openstack-kuttl-tests/placement-69d7f65964-r46nm" Jan 20 17:39:45 crc kubenswrapper[4558]: I0120 17:39:45.575990 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bf32468e-b0a7-4b81-bb49-c65a95997903-combined-ca-bundle\") pod \"placement-69d7f65964-r46nm\" (UID: \"bf32468e-b0a7-4b81-bb49-c65a95997903\") " pod="openstack-kuttl-tests/placement-69d7f65964-r46nm" Jan 20 17:39:45 crc kubenswrapper[4558]: I0120 17:39:45.577921 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fd4fc3e6-d57c-4c96-9109-8c8c3c833307-scripts" (OuterVolumeSpecName: "scripts") pod "fd4fc3e6-d57c-4c96-9109-8c8c3c833307" (UID: "fd4fc3e6-d57c-4c96-9109-8c8c3c833307"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:39:45 crc kubenswrapper[4558]: I0120 17:39:45.578011 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fd4fc3e6-d57c-4c96-9109-8c8c3c833307-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "fd4fc3e6-d57c-4c96-9109-8c8c3c833307" (UID: "fd4fc3e6-d57c-4c96-9109-8c8c3c833307"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:39:45 crc kubenswrapper[4558]: I0120 17:39:45.578617 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bf32468e-b0a7-4b81-bb49-c65a95997903-scripts\") pod \"placement-69d7f65964-r46nm\" (UID: \"bf32468e-b0a7-4b81-bb49-c65a95997903\") " pod="openstack-kuttl-tests/placement-69d7f65964-r46nm" Jan 20 17:39:45 crc kubenswrapper[4558]: I0120 17:39:45.579963 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fd4fc3e6-d57c-4c96-9109-8c8c3c833307-kube-api-access-59s64" (OuterVolumeSpecName: "kube-api-access-59s64") pod "fd4fc3e6-d57c-4c96-9109-8c8c3c833307" (UID: "fd4fc3e6-d57c-4c96-9109-8c8c3c833307"). InnerVolumeSpecName "kube-api-access-59s64". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:39:45 crc kubenswrapper[4558]: I0120 17:39:45.580573 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bf32468e-b0a7-4b81-bb49-c65a95997903-config-data\") pod \"placement-69d7f65964-r46nm\" (UID: \"bf32468e-b0a7-4b81-bb49-c65a95997903\") " pod="openstack-kuttl-tests/placement-69d7f65964-r46nm" Jan 20 17:39:45 crc kubenswrapper[4558]: I0120 17:39:45.587699 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-59g5m\" (UniqueName: \"kubernetes.io/projected/bf32468e-b0a7-4b81-bb49-c65a95997903-kube-api-access-59g5m\") pod \"placement-69d7f65964-r46nm\" (UID: \"bf32468e-b0a7-4b81-bb49-c65a95997903\") " pod="openstack-kuttl-tests/placement-69d7f65964-r46nm" Jan 20 17:39:45 crc kubenswrapper[4558]: I0120 17:39:45.603987 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fd4fc3e6-d57c-4c96-9109-8c8c3c833307-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fd4fc3e6-d57c-4c96-9109-8c8c3c833307" (UID: "fd4fc3e6-d57c-4c96-9109-8c8c3c833307"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:39:45 crc kubenswrapper[4558]: I0120 17:39:45.665301 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fd4fc3e6-d57c-4c96-9109-8c8c3c833307-config-data" (OuterVolumeSpecName: "config-data") pod "fd4fc3e6-d57c-4c96-9109-8c8c3c833307" (UID: "fd4fc3e6-d57c-4c96-9109-8c8c3c833307"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:39:45 crc kubenswrapper[4558]: I0120 17:39:45.666770 4558 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/fd4fc3e6-d57c-4c96-9109-8c8c3c833307-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:39:45 crc kubenswrapper[4558]: I0120 17:39:45.666802 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fd4fc3e6-d57c-4c96-9109-8c8c3c833307-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:39:45 crc kubenswrapper[4558]: I0120 17:39:45.666816 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd4fc3e6-d57c-4c96-9109-8c8c3c833307-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:39:45 crc kubenswrapper[4558]: I0120 17:39:45.666824 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fd4fc3e6-d57c-4c96-9109-8c8c3c833307-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:39:45 crc kubenswrapper[4558]: I0120 17:39:45.666833 4558 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/fd4fc3e6-d57c-4c96-9109-8c8c3c833307-etc-machine-id\") on node \"crc\" DevicePath \"\"" Jan 20 17:39:45 crc kubenswrapper[4558]: I0120 17:39:45.666847 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-59s64\" (UniqueName: \"kubernetes.io/projected/fd4fc3e6-d57c-4c96-9109-8c8c3c833307-kube-api-access-59s64\") on node \"crc\" DevicePath \"\"" Jan 20 17:39:45 crc kubenswrapper[4558]: I0120 17:39:45.787666 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-85684d4d5b-ffd28"] Jan 20 17:39:45 crc kubenswrapper[4558]: W0120 17:39:45.811572 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod68263afb_f6b4_4665_bd7d_6f3b622c5640.slice/crio-6586d0345276362475021102ade4402b7f08952f8a3a2a950e33992dfed4af6f WatchSource:0}: Error finding container 6586d0345276362475021102ade4402b7f08952f8a3a2a950e33992dfed4af6f: Status 404 returned error can't find the container with id 6586d0345276362475021102ade4402b7f08952f8a3a2a950e33992dfed4af6f Jan 20 17:39:45 crc kubenswrapper[4558]: I0120 17:39:45.815268 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:39:45 crc kubenswrapper[4558]: I0120 17:39:45.816242 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-69d7f65964-r46nm" Jan 20 17:39:45 crc kubenswrapper[4558]: I0120 17:39:45.871629 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1ae5026e-37c6-453a-943c-9dfd8cebb2c2-config-data\") pod \"1ae5026e-37c6-453a-943c-9dfd8cebb2c2\" (UID: \"1ae5026e-37c6-453a-943c-9dfd8cebb2c2\") " Jan 20 17:39:45 crc kubenswrapper[4558]: I0120 17:39:45.871708 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1ae5026e-37c6-453a-943c-9dfd8cebb2c2-log-httpd\") pod \"1ae5026e-37c6-453a-943c-9dfd8cebb2c2\" (UID: \"1ae5026e-37c6-453a-943c-9dfd8cebb2c2\") " Jan 20 17:39:45 crc kubenswrapper[4558]: I0120 17:39:45.871730 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/1ae5026e-37c6-453a-943c-9dfd8cebb2c2-sg-core-conf-yaml\") pod \"1ae5026e-37c6-453a-943c-9dfd8cebb2c2\" (UID: \"1ae5026e-37c6-453a-943c-9dfd8cebb2c2\") " Jan 20 17:39:45 crc kubenswrapper[4558]: I0120 17:39:45.871774 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1ae5026e-37c6-453a-943c-9dfd8cebb2c2-run-httpd\") pod \"1ae5026e-37c6-453a-943c-9dfd8cebb2c2\" (UID: \"1ae5026e-37c6-453a-943c-9dfd8cebb2c2\") " Jan 20 17:39:45 crc kubenswrapper[4558]: I0120 17:39:45.871795 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tzz8b\" (UniqueName: \"kubernetes.io/projected/1ae5026e-37c6-453a-943c-9dfd8cebb2c2-kube-api-access-tzz8b\") pod \"1ae5026e-37c6-453a-943c-9dfd8cebb2c2\" (UID: \"1ae5026e-37c6-453a-943c-9dfd8cebb2c2\") " Jan 20 17:39:45 crc kubenswrapper[4558]: I0120 17:39:45.871907 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1ae5026e-37c6-453a-943c-9dfd8cebb2c2-combined-ca-bundle\") pod \"1ae5026e-37c6-453a-943c-9dfd8cebb2c2\" (UID: \"1ae5026e-37c6-453a-943c-9dfd8cebb2c2\") " Jan 20 17:39:45 crc kubenswrapper[4558]: I0120 17:39:45.871952 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1ae5026e-37c6-453a-943c-9dfd8cebb2c2-scripts\") pod \"1ae5026e-37c6-453a-943c-9dfd8cebb2c2\" (UID: \"1ae5026e-37c6-453a-943c-9dfd8cebb2c2\") " Jan 20 17:39:45 crc kubenswrapper[4558]: I0120 17:39:45.875941 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1ae5026e-37c6-453a-943c-9dfd8cebb2c2-scripts" (OuterVolumeSpecName: "scripts") pod "1ae5026e-37c6-453a-943c-9dfd8cebb2c2" (UID: "1ae5026e-37c6-453a-943c-9dfd8cebb2c2"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:39:45 crc kubenswrapper[4558]: I0120 17:39:45.881728 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1ae5026e-37c6-453a-943c-9dfd8cebb2c2-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "1ae5026e-37c6-453a-943c-9dfd8cebb2c2" (UID: "1ae5026e-37c6-453a-943c-9dfd8cebb2c2"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:39:45 crc kubenswrapper[4558]: I0120 17:39:45.882099 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1ae5026e-37c6-453a-943c-9dfd8cebb2c2-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "1ae5026e-37c6-453a-943c-9dfd8cebb2c2" (UID: "1ae5026e-37c6-453a-943c-9dfd8cebb2c2"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:39:45 crc kubenswrapper[4558]: I0120 17:39:45.893843 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1ae5026e-37c6-453a-943c-9dfd8cebb2c2-kube-api-access-tzz8b" (OuterVolumeSpecName: "kube-api-access-tzz8b") pod "1ae5026e-37c6-453a-943c-9dfd8cebb2c2" (UID: "1ae5026e-37c6-453a-943c-9dfd8cebb2c2"). InnerVolumeSpecName "kube-api-access-tzz8b". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:39:45 crc kubenswrapper[4558]: I0120 17:39:45.945921 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1ae5026e-37c6-453a-943c-9dfd8cebb2c2-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "1ae5026e-37c6-453a-943c-9dfd8cebb2c2" (UID: "1ae5026e-37c6-453a-943c-9dfd8cebb2c2"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:39:45 crc kubenswrapper[4558]: I0120 17:39:45.955617 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-85684d4d5b-ffd28" event={"ID":"68263afb-f6b4-4665-bd7d-6f3b622c5640","Type":"ContainerStarted","Data":"6586d0345276362475021102ade4402b7f08952f8a3a2a950e33992dfed4af6f"} Jan 20 17:39:45 crc kubenswrapper[4558]: I0120 17:39:45.975244 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1ae5026e-37c6-453a-943c-9dfd8cebb2c2-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:39:45 crc kubenswrapper[4558]: I0120 17:39:45.975490 4558 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1ae5026e-37c6-453a-943c-9dfd8cebb2c2-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:39:45 crc kubenswrapper[4558]: I0120 17:39:45.975501 4558 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/1ae5026e-37c6-453a-943c-9dfd8cebb2c2-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 20 17:39:45 crc kubenswrapper[4558]: I0120 17:39:45.975511 4558 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1ae5026e-37c6-453a-943c-9dfd8cebb2c2-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:39:45 crc kubenswrapper[4558]: I0120 17:39:45.975522 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tzz8b\" (UniqueName: \"kubernetes.io/projected/1ae5026e-37c6-453a-943c-9dfd8cebb2c2-kube-api-access-tzz8b\") on node \"crc\" DevicePath \"\"" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.009605 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"92c317c8-cf30-4046-af0b-8f6c36f69000","Type":"ContainerStarted","Data":"9139dd64147779f4a8760ed5d87509a43e88362aa5356602bf57b77210031913"} Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.009650 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"92c317c8-cf30-4046-af0b-8f6c36f69000","Type":"ContainerStarted","Data":"060bea66f307028730316adbd8f3bfcf48d15dc279eb87ccc62b037d74642d41"} Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.018359 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1ae5026e-37c6-453a-943c-9dfd8cebb2c2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1ae5026e-37c6-453a-943c-9dfd8cebb2c2" (UID: "1ae5026e-37c6-453a-943c-9dfd8cebb2c2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.025367 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-bootstrap-rxwfk"] Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.035738 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/keystone-bootstrap-rxwfk"] Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.042757 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/keystone-bootstrap-7ww48"] Jan 20 17:39:46 crc kubenswrapper[4558]: E0120 17:39:46.043207 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1ae5026e-37c6-453a-943c-9dfd8cebb2c2" containerName="sg-core" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.043226 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="1ae5026e-37c6-453a-943c-9dfd8cebb2c2" containerName="sg-core" Jan 20 17:39:46 crc kubenswrapper[4558]: E0120 17:39:46.043246 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1ae5026e-37c6-453a-943c-9dfd8cebb2c2" containerName="proxy-httpd" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.043254 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="1ae5026e-37c6-453a-943c-9dfd8cebb2c2" containerName="proxy-httpd" Jan 20 17:39:46 crc kubenswrapper[4558]: E0120 17:39:46.043273 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fd4fc3e6-d57c-4c96-9109-8c8c3c833307" containerName="cinder-db-sync" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.043280 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="fd4fc3e6-d57c-4c96-9109-8c8c3c833307" containerName="cinder-db-sync" Jan 20 17:39:46 crc kubenswrapper[4558]: E0120 17:39:46.043301 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1ae5026e-37c6-453a-943c-9dfd8cebb2c2" containerName="ceilometer-central-agent" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.043308 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="1ae5026e-37c6-453a-943c-9dfd8cebb2c2" containerName="ceilometer-central-agent" Jan 20 17:39:46 crc kubenswrapper[4558]: E0120 17:39:46.043318 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1ae5026e-37c6-453a-943c-9dfd8cebb2c2" containerName="ceilometer-notification-agent" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.043325 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="1ae5026e-37c6-453a-943c-9dfd8cebb2c2" containerName="ceilometer-notification-agent" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.043482 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="fd4fc3e6-d57c-4c96-9109-8c8c3c833307" containerName="cinder-db-sync" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.043500 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="1ae5026e-37c6-453a-943c-9dfd8cebb2c2" containerName="ceilometer-central-agent" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.043510 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="1ae5026e-37c6-453a-943c-9dfd8cebb2c2" containerName="proxy-httpd" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.043518 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="1ae5026e-37c6-453a-943c-9dfd8cebb2c2" containerName="sg-core" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.043535 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="1ae5026e-37c6-453a-943c-9dfd8cebb2c2" containerName="ceilometer-notification-agent" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.044107 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-bootstrap-7ww48" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.052766 4558 generic.go:334] "Generic (PLEG): container finished" podID="1ae5026e-37c6-453a-943c-9dfd8cebb2c2" containerID="82fae930e0ba2d4c5b5441974cd5c07a1f62d67af7c19b56c390f771baa7b88c" exitCode=0 Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.052788 4558 generic.go:334] "Generic (PLEG): container finished" podID="1ae5026e-37c6-453a-943c-9dfd8cebb2c2" containerID="4179fc735effbae6159a69c9ca68266281ba202cd4f205078b6baf7d7940eb2a" exitCode=2 Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.052797 4558 generic.go:334] "Generic (PLEG): container finished" podID="1ae5026e-37c6-453a-943c-9dfd8cebb2c2" containerID="127398378ff58fceaa81c387bc824a1d5d37e0bf66fbdf7b76a1730850e6f86f" exitCode=0 Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.052804 4558 generic.go:334] "Generic (PLEG): container finished" podID="1ae5026e-37c6-453a-943c-9dfd8cebb2c2" containerID="5c6cab5622103a6a77631945133b52c00af4530b77736c2bdab0301ddea2e07b" exitCode=0 Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.052841 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"1ae5026e-37c6-453a-943c-9dfd8cebb2c2","Type":"ContainerDied","Data":"82fae930e0ba2d4c5b5441974cd5c07a1f62d67af7c19b56c390f771baa7b88c"} Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.052862 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"1ae5026e-37c6-453a-943c-9dfd8cebb2c2","Type":"ContainerDied","Data":"4179fc735effbae6159a69c9ca68266281ba202cd4f205078b6baf7d7940eb2a"} Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.052875 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"1ae5026e-37c6-453a-943c-9dfd8cebb2c2","Type":"ContainerDied","Data":"127398378ff58fceaa81c387bc824a1d5d37e0bf66fbdf7b76a1730850e6f86f"} Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.052895 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"1ae5026e-37c6-453a-943c-9dfd8cebb2c2","Type":"ContainerDied","Data":"5c6cab5622103a6a77631945133b52c00af4530b77736c2bdab0301ddea2e07b"} Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.052905 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"1ae5026e-37c6-453a-943c-9dfd8cebb2c2","Type":"ContainerDied","Data":"fbd933fbdd457e5313bd1e0b7c95fc1fd3cd0e4461e3949aec7216603544199f"} Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.052921 4558 scope.go:117] "RemoveContainer" containerID="82fae930e0ba2d4c5b5441974cd5c07a1f62d67af7c19b56c390f771baa7b88c" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.053056 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.055948 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-bootstrap-7ww48"] Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.062691 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.090421 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.094234 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cinder-scheduler-config-data" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.107952 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-db-sync-2sxrh" event={"ID":"fd4fc3e6-d57c-4c96-9109-8c8c3c833307","Type":"ContainerDied","Data":"f47bd09332bfe7495e8b13874999b60de3ec79cd649675900f4a6331bff4405d"} Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.108174 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f47bd09332bfe7495e8b13874999b60de3ec79cd649675900f4a6331bff4405d" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.108311 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-db-sync-2sxrh" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.119009 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4c40242c83102c27d69a3eb2bcad687b523095b6c563321b929f90ae76355c4c" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.119105 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-bootstrap-rxwfk" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.120452 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1ae5026e-37c6-453a-943c-9dfd8cebb2c2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.125378 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"2328ffd8-4eb7-41b8-abfb-b5ee5a12f117","Type":"ContainerStarted","Data":"51fd39b0c7c20748a2724d7df276628b2e6749c56463b33881b466e963a20122"} Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.125458 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"2328ffd8-4eb7-41b8-abfb-b5ee5a12f117","Type":"ContainerStarted","Data":"01c975adf8400e28d35866331bd51462a577f8d7244921c6bb34e10e1ffa8769"} Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.129084 4558 generic.go:334] "Generic (PLEG): container finished" podID="86a50da2-ef44-4072-b680-d56cc4ee67c5" containerID="2e74159a59845803699da99c5974e7ed46127faee243f67df4a03b31c6f34d49" exitCode=0 Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.130034 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-db-sync-ndrzj" event={"ID":"86a50da2-ef44-4072-b680-d56cc4ee67c5","Type":"ContainerDied","Data":"2e74159a59845803699da99c5974e7ed46127faee243f67df4a03b31c6f34d49"} Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.154481 4558 scope.go:117] "RemoveContainer" containerID="4179fc735effbae6159a69c9ca68266281ba202cd4f205078b6baf7d7940eb2a" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.212666 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1ae5026e-37c6-453a-943c-9dfd8cebb2c2-config-data" (OuterVolumeSpecName: "config-data") pod "1ae5026e-37c6-453a-943c-9dfd8cebb2c2" (UID: "1ae5026e-37c6-453a-943c-9dfd8cebb2c2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.213106 4558 scope.go:117] "RemoveContainer" containerID="127398378ff58fceaa81c387bc824a1d5d37e0bf66fbdf7b76a1730850e6f86f" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.223329 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/81080ef7-fd3d-487f-b6aa-0fb9eaebc855-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"81080ef7-fd3d-487f-b6aa-0fb9eaebc855\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.223450 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/81080ef7-fd3d-487f-b6aa-0fb9eaebc855-config-data\") pod \"cinder-scheduler-0\" (UID: \"81080ef7-fd3d-487f-b6aa-0fb9eaebc855\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.223484 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/227c3622-b4eb-4862-9b18-a90df7ea75b9-fernet-keys\") pod \"keystone-bootstrap-7ww48\" (UID: \"227c3622-b4eb-4862-9b18-a90df7ea75b9\") " pod="openstack-kuttl-tests/keystone-bootstrap-7ww48" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.223528 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/227c3622-b4eb-4862-9b18-a90df7ea75b9-config-data\") pod \"keystone-bootstrap-7ww48\" (UID: \"227c3622-b4eb-4862-9b18-a90df7ea75b9\") " pod="openstack-kuttl-tests/keystone-bootstrap-7ww48" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.223579 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/227c3622-b4eb-4862-9b18-a90df7ea75b9-credential-keys\") pod \"keystone-bootstrap-7ww48\" (UID: \"227c3622-b4eb-4862-9b18-a90df7ea75b9\") " pod="openstack-kuttl-tests/keystone-bootstrap-7ww48" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.223603 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/227c3622-b4eb-4862-9b18-a90df7ea75b9-scripts\") pod \"keystone-bootstrap-7ww48\" (UID: \"227c3622-b4eb-4862-9b18-a90df7ea75b9\") " pod="openstack-kuttl-tests/keystone-bootstrap-7ww48" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.223651 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-98t2c\" (UniqueName: \"kubernetes.io/projected/81080ef7-fd3d-487f-b6aa-0fb9eaebc855-kube-api-access-98t2c\") pod \"cinder-scheduler-0\" (UID: \"81080ef7-fd3d-487f-b6aa-0fb9eaebc855\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.223690 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/81080ef7-fd3d-487f-b6aa-0fb9eaebc855-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"81080ef7-fd3d-487f-b6aa-0fb9eaebc855\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.223750 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/81080ef7-fd3d-487f-b6aa-0fb9eaebc855-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"81080ef7-fd3d-487f-b6aa-0fb9eaebc855\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.223771 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/81080ef7-fd3d-487f-b6aa-0fb9eaebc855-scripts\") pod \"cinder-scheduler-0\" (UID: \"81080ef7-fd3d-487f-b6aa-0fb9eaebc855\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.223821 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ht6g2\" (UniqueName: \"kubernetes.io/projected/227c3622-b4eb-4862-9b18-a90df7ea75b9-kube-api-access-ht6g2\") pod \"keystone-bootstrap-7ww48\" (UID: \"227c3622-b4eb-4862-9b18-a90df7ea75b9\") " pod="openstack-kuttl-tests/keystone-bootstrap-7ww48" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.223843 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/227c3622-b4eb-4862-9b18-a90df7ea75b9-combined-ca-bundle\") pod \"keystone-bootstrap-7ww48\" (UID: \"227c3622-b4eb-4862-9b18-a90df7ea75b9\") " pod="openstack-kuttl-tests/keystone-bootstrap-7ww48" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.223942 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1ae5026e-37c6-453a-943c-9dfd8cebb2c2-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.239643 4558 scope.go:117] "RemoveContainer" containerID="5c6cab5622103a6a77631945133b52c00af4530b77736c2bdab0301ddea2e07b" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.247811 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.284377 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.288359 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.290828 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cinder-api-config-data" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.301398 4558 scope.go:117] "RemoveContainer" containerID="82fae930e0ba2d4c5b5441974cd5c07a1f62d67af7c19b56c390f771baa7b88c" Jan 20 17:39:46 crc kubenswrapper[4558]: E0120 17:39:46.305072 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"82fae930e0ba2d4c5b5441974cd5c07a1f62d67af7c19b56c390f771baa7b88c\": container with ID starting with 82fae930e0ba2d4c5b5441974cd5c07a1f62d67af7c19b56c390f771baa7b88c not found: ID does not exist" containerID="82fae930e0ba2d4c5b5441974cd5c07a1f62d67af7c19b56c390f771baa7b88c" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.305387 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"82fae930e0ba2d4c5b5441974cd5c07a1f62d67af7c19b56c390f771baa7b88c"} err="failed to get container status \"82fae930e0ba2d4c5b5441974cd5c07a1f62d67af7c19b56c390f771baa7b88c\": rpc error: code = NotFound desc = could not find container \"82fae930e0ba2d4c5b5441974cd5c07a1f62d67af7c19b56c390f771baa7b88c\": container with ID starting with 82fae930e0ba2d4c5b5441974cd5c07a1f62d67af7c19b56c390f771baa7b88c not found: ID does not exist" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.305838 4558 scope.go:117] "RemoveContainer" containerID="4179fc735effbae6159a69c9ca68266281ba202cd4f205078b6baf7d7940eb2a" Jan 20 17:39:46 crc kubenswrapper[4558]: E0120 17:39:46.306472 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4179fc735effbae6159a69c9ca68266281ba202cd4f205078b6baf7d7940eb2a\": container with ID starting with 4179fc735effbae6159a69c9ca68266281ba202cd4f205078b6baf7d7940eb2a not found: ID does not exist" containerID="4179fc735effbae6159a69c9ca68266281ba202cd4f205078b6baf7d7940eb2a" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.306575 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4179fc735effbae6159a69c9ca68266281ba202cd4f205078b6baf7d7940eb2a"} err="failed to get container status \"4179fc735effbae6159a69c9ca68266281ba202cd4f205078b6baf7d7940eb2a\": rpc error: code = NotFound desc = could not find container \"4179fc735effbae6159a69c9ca68266281ba202cd4f205078b6baf7d7940eb2a\": container with ID starting with 4179fc735effbae6159a69c9ca68266281ba202cd4f205078b6baf7d7940eb2a not found: ID does not exist" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.306657 4558 scope.go:117] "RemoveContainer" containerID="127398378ff58fceaa81c387bc824a1d5d37e0bf66fbdf7b76a1730850e6f86f" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.306478 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:39:46 crc kubenswrapper[4558]: E0120 17:39:46.307366 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"127398378ff58fceaa81c387bc824a1d5d37e0bf66fbdf7b76a1730850e6f86f\": container with ID starting with 127398378ff58fceaa81c387bc824a1d5d37e0bf66fbdf7b76a1730850e6f86f not found: ID does not exist" containerID="127398378ff58fceaa81c387bc824a1d5d37e0bf66fbdf7b76a1730850e6f86f" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.307404 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"127398378ff58fceaa81c387bc824a1d5d37e0bf66fbdf7b76a1730850e6f86f"} err="failed to get container status \"127398378ff58fceaa81c387bc824a1d5d37e0bf66fbdf7b76a1730850e6f86f\": rpc error: code = NotFound desc = could not find container \"127398378ff58fceaa81c387bc824a1d5d37e0bf66fbdf7b76a1730850e6f86f\": container with ID starting with 127398378ff58fceaa81c387bc824a1d5d37e0bf66fbdf7b76a1730850e6f86f not found: ID does not exist" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.307446 4558 scope.go:117] "RemoveContainer" containerID="5c6cab5622103a6a77631945133b52c00af4530b77736c2bdab0301ddea2e07b" Jan 20 17:39:46 crc kubenswrapper[4558]: E0120 17:39:46.307877 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5c6cab5622103a6a77631945133b52c00af4530b77736c2bdab0301ddea2e07b\": container with ID starting with 5c6cab5622103a6a77631945133b52c00af4530b77736c2bdab0301ddea2e07b not found: ID does not exist" containerID="5c6cab5622103a6a77631945133b52c00af4530b77736c2bdab0301ddea2e07b" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.307933 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5c6cab5622103a6a77631945133b52c00af4530b77736c2bdab0301ddea2e07b"} err="failed to get container status \"5c6cab5622103a6a77631945133b52c00af4530b77736c2bdab0301ddea2e07b\": rpc error: code = NotFound desc = could not find container \"5c6cab5622103a6a77631945133b52c00af4530b77736c2bdab0301ddea2e07b\": container with ID starting with 5c6cab5622103a6a77631945133b52c00af4530b77736c2bdab0301ddea2e07b not found: ID does not exist" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.307948 4558 scope.go:117] "RemoveContainer" containerID="82fae930e0ba2d4c5b5441974cd5c07a1f62d67af7c19b56c390f771baa7b88c" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.308914 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"82fae930e0ba2d4c5b5441974cd5c07a1f62d67af7c19b56c390f771baa7b88c"} err="failed to get container status \"82fae930e0ba2d4c5b5441974cd5c07a1f62d67af7c19b56c390f771baa7b88c\": rpc error: code = NotFound desc = could not find container \"82fae930e0ba2d4c5b5441974cd5c07a1f62d67af7c19b56c390f771baa7b88c\": container with ID starting with 82fae930e0ba2d4c5b5441974cd5c07a1f62d67af7c19b56c390f771baa7b88c not found: ID does not exist" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.308958 4558 scope.go:117] "RemoveContainer" containerID="4179fc735effbae6159a69c9ca68266281ba202cd4f205078b6baf7d7940eb2a" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.309591 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4179fc735effbae6159a69c9ca68266281ba202cd4f205078b6baf7d7940eb2a"} err="failed to get container status \"4179fc735effbae6159a69c9ca68266281ba202cd4f205078b6baf7d7940eb2a\": rpc error: code = NotFound desc = could not find container \"4179fc735effbae6159a69c9ca68266281ba202cd4f205078b6baf7d7940eb2a\": container with ID starting with 4179fc735effbae6159a69c9ca68266281ba202cd4f205078b6baf7d7940eb2a not found: ID does not exist" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.309619 4558 scope.go:117] "RemoveContainer" containerID="127398378ff58fceaa81c387bc824a1d5d37e0bf66fbdf7b76a1730850e6f86f" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.311697 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"127398378ff58fceaa81c387bc824a1d5d37e0bf66fbdf7b76a1730850e6f86f"} err="failed to get container status \"127398378ff58fceaa81c387bc824a1d5d37e0bf66fbdf7b76a1730850e6f86f\": rpc error: code = NotFound desc = could not find container \"127398378ff58fceaa81c387bc824a1d5d37e0bf66fbdf7b76a1730850e6f86f\": container with ID starting with 127398378ff58fceaa81c387bc824a1d5d37e0bf66fbdf7b76a1730850e6f86f not found: ID does not exist" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.311793 4558 scope.go:117] "RemoveContainer" containerID="5c6cab5622103a6a77631945133b52c00af4530b77736c2bdab0301ddea2e07b" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.312213 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5c6cab5622103a6a77631945133b52c00af4530b77736c2bdab0301ddea2e07b"} err="failed to get container status \"5c6cab5622103a6a77631945133b52c00af4530b77736c2bdab0301ddea2e07b\": rpc error: code = NotFound desc = could not find container \"5c6cab5622103a6a77631945133b52c00af4530b77736c2bdab0301ddea2e07b\": container with ID starting with 5c6cab5622103a6a77631945133b52c00af4530b77736c2bdab0301ddea2e07b not found: ID does not exist" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.312292 4558 scope.go:117] "RemoveContainer" containerID="82fae930e0ba2d4c5b5441974cd5c07a1f62d67af7c19b56c390f771baa7b88c" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.314332 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"82fae930e0ba2d4c5b5441974cd5c07a1f62d67af7c19b56c390f771baa7b88c"} err="failed to get container status \"82fae930e0ba2d4c5b5441974cd5c07a1f62d67af7c19b56c390f771baa7b88c\": rpc error: code = NotFound desc = could not find container \"82fae930e0ba2d4c5b5441974cd5c07a1f62d67af7c19b56c390f771baa7b88c\": container with ID starting with 82fae930e0ba2d4c5b5441974cd5c07a1f62d67af7c19b56c390f771baa7b88c not found: ID does not exist" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.314405 4558 scope.go:117] "RemoveContainer" containerID="4179fc735effbae6159a69c9ca68266281ba202cd4f205078b6baf7d7940eb2a" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.316030 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4179fc735effbae6159a69c9ca68266281ba202cd4f205078b6baf7d7940eb2a"} err="failed to get container status \"4179fc735effbae6159a69c9ca68266281ba202cd4f205078b6baf7d7940eb2a\": rpc error: code = NotFound desc = could not find container \"4179fc735effbae6159a69c9ca68266281ba202cd4f205078b6baf7d7940eb2a\": container with ID starting with 4179fc735effbae6159a69c9ca68266281ba202cd4f205078b6baf7d7940eb2a not found: ID does not exist" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.316058 4558 scope.go:117] "RemoveContainer" containerID="127398378ff58fceaa81c387bc824a1d5d37e0bf66fbdf7b76a1730850e6f86f" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.320121 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"127398378ff58fceaa81c387bc824a1d5d37e0bf66fbdf7b76a1730850e6f86f"} err="failed to get container status \"127398378ff58fceaa81c387bc824a1d5d37e0bf66fbdf7b76a1730850e6f86f\": rpc error: code = NotFound desc = could not find container \"127398378ff58fceaa81c387bc824a1d5d37e0bf66fbdf7b76a1730850e6f86f\": container with ID starting with 127398378ff58fceaa81c387bc824a1d5d37e0bf66fbdf7b76a1730850e6f86f not found: ID does not exist" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.320146 4558 scope.go:117] "RemoveContainer" containerID="5c6cab5622103a6a77631945133b52c00af4530b77736c2bdab0301ddea2e07b" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.320566 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5c6cab5622103a6a77631945133b52c00af4530b77736c2bdab0301ddea2e07b"} err="failed to get container status \"5c6cab5622103a6a77631945133b52c00af4530b77736c2bdab0301ddea2e07b\": rpc error: code = NotFound desc = could not find container \"5c6cab5622103a6a77631945133b52c00af4530b77736c2bdab0301ddea2e07b\": container with ID starting with 5c6cab5622103a6a77631945133b52c00af4530b77736c2bdab0301ddea2e07b not found: ID does not exist" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.320591 4558 scope.go:117] "RemoveContainer" containerID="82fae930e0ba2d4c5b5441974cd5c07a1f62d67af7c19b56c390f771baa7b88c" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.321837 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"82fae930e0ba2d4c5b5441974cd5c07a1f62d67af7c19b56c390f771baa7b88c"} err="failed to get container status \"82fae930e0ba2d4c5b5441974cd5c07a1f62d67af7c19b56c390f771baa7b88c\": rpc error: code = NotFound desc = could not find container \"82fae930e0ba2d4c5b5441974cd5c07a1f62d67af7c19b56c390f771baa7b88c\": container with ID starting with 82fae930e0ba2d4c5b5441974cd5c07a1f62d67af7c19b56c390f771baa7b88c not found: ID does not exist" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.321875 4558 scope.go:117] "RemoveContainer" containerID="4179fc735effbae6159a69c9ca68266281ba202cd4f205078b6baf7d7940eb2a" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.322523 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4179fc735effbae6159a69c9ca68266281ba202cd4f205078b6baf7d7940eb2a"} err="failed to get container status \"4179fc735effbae6159a69c9ca68266281ba202cd4f205078b6baf7d7940eb2a\": rpc error: code = NotFound desc = could not find container \"4179fc735effbae6159a69c9ca68266281ba202cd4f205078b6baf7d7940eb2a\": container with ID starting with 4179fc735effbae6159a69c9ca68266281ba202cd4f205078b6baf7d7940eb2a not found: ID does not exist" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.322573 4558 scope.go:117] "RemoveContainer" containerID="127398378ff58fceaa81c387bc824a1d5d37e0bf66fbdf7b76a1730850e6f86f" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.323234 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"127398378ff58fceaa81c387bc824a1d5d37e0bf66fbdf7b76a1730850e6f86f"} err="failed to get container status \"127398378ff58fceaa81c387bc824a1d5d37e0bf66fbdf7b76a1730850e6f86f\": rpc error: code = NotFound desc = could not find container \"127398378ff58fceaa81c387bc824a1d5d37e0bf66fbdf7b76a1730850e6f86f\": container with ID starting with 127398378ff58fceaa81c387bc824a1d5d37e0bf66fbdf7b76a1730850e6f86f not found: ID does not exist" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.323260 4558 scope.go:117] "RemoveContainer" containerID="5c6cab5622103a6a77631945133b52c00af4530b77736c2bdab0301ddea2e07b" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.323965 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5c6cab5622103a6a77631945133b52c00af4530b77736c2bdab0301ddea2e07b"} err="failed to get container status \"5c6cab5622103a6a77631945133b52c00af4530b77736c2bdab0301ddea2e07b\": rpc error: code = NotFound desc = could not find container \"5c6cab5622103a6a77631945133b52c00af4530b77736c2bdab0301ddea2e07b\": container with ID starting with 5c6cab5622103a6a77631945133b52c00af4530b77736c2bdab0301ddea2e07b not found: ID does not exist" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.326201 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-98t2c\" (UniqueName: \"kubernetes.io/projected/81080ef7-fd3d-487f-b6aa-0fb9eaebc855-kube-api-access-98t2c\") pod \"cinder-scheduler-0\" (UID: \"81080ef7-fd3d-487f-b6aa-0fb9eaebc855\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.326689 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/81080ef7-fd3d-487f-b6aa-0fb9eaebc855-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"81080ef7-fd3d-487f-b6aa-0fb9eaebc855\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.326800 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/81080ef7-fd3d-487f-b6aa-0fb9eaebc855-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"81080ef7-fd3d-487f-b6aa-0fb9eaebc855\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.326958 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/81080ef7-fd3d-487f-b6aa-0fb9eaebc855-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"81080ef7-fd3d-487f-b6aa-0fb9eaebc855\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.327077 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/81080ef7-fd3d-487f-b6aa-0fb9eaebc855-scripts\") pod \"cinder-scheduler-0\" (UID: \"81080ef7-fd3d-487f-b6aa-0fb9eaebc855\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.327249 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ht6g2\" (UniqueName: \"kubernetes.io/projected/227c3622-b4eb-4862-9b18-a90df7ea75b9-kube-api-access-ht6g2\") pod \"keystone-bootstrap-7ww48\" (UID: \"227c3622-b4eb-4862-9b18-a90df7ea75b9\") " pod="openstack-kuttl-tests/keystone-bootstrap-7ww48" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.327533 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/227c3622-b4eb-4862-9b18-a90df7ea75b9-combined-ca-bundle\") pod \"keystone-bootstrap-7ww48\" (UID: \"227c3622-b4eb-4862-9b18-a90df7ea75b9\") " pod="openstack-kuttl-tests/keystone-bootstrap-7ww48" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.327611 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/81080ef7-fd3d-487f-b6aa-0fb9eaebc855-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"81080ef7-fd3d-487f-b6aa-0fb9eaebc855\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.327789 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/81080ef7-fd3d-487f-b6aa-0fb9eaebc855-config-data\") pod \"cinder-scheduler-0\" (UID: \"81080ef7-fd3d-487f-b6aa-0fb9eaebc855\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.327829 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/227c3622-b4eb-4862-9b18-a90df7ea75b9-fernet-keys\") pod \"keystone-bootstrap-7ww48\" (UID: \"227c3622-b4eb-4862-9b18-a90df7ea75b9\") " pod="openstack-kuttl-tests/keystone-bootstrap-7ww48" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.327877 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/227c3622-b4eb-4862-9b18-a90df7ea75b9-config-data\") pod \"keystone-bootstrap-7ww48\" (UID: \"227c3622-b4eb-4862-9b18-a90df7ea75b9\") " pod="openstack-kuttl-tests/keystone-bootstrap-7ww48" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.327956 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/227c3622-b4eb-4862-9b18-a90df7ea75b9-credential-keys\") pod \"keystone-bootstrap-7ww48\" (UID: \"227c3622-b4eb-4862-9b18-a90df7ea75b9\") " pod="openstack-kuttl-tests/keystone-bootstrap-7ww48" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.327981 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/227c3622-b4eb-4862-9b18-a90df7ea75b9-scripts\") pod \"keystone-bootstrap-7ww48\" (UID: \"227c3622-b4eb-4862-9b18-a90df7ea75b9\") " pod="openstack-kuttl-tests/keystone-bootstrap-7ww48" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.332770 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/81080ef7-fd3d-487f-b6aa-0fb9eaebc855-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"81080ef7-fd3d-487f-b6aa-0fb9eaebc855\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.333532 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/81080ef7-fd3d-487f-b6aa-0fb9eaebc855-scripts\") pod \"cinder-scheduler-0\" (UID: \"81080ef7-fd3d-487f-b6aa-0fb9eaebc855\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.333763 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/227c3622-b4eb-4862-9b18-a90df7ea75b9-fernet-keys\") pod \"keystone-bootstrap-7ww48\" (UID: \"227c3622-b4eb-4862-9b18-a90df7ea75b9\") " pod="openstack-kuttl-tests/keystone-bootstrap-7ww48" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.333844 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/227c3622-b4eb-4862-9b18-a90df7ea75b9-scripts\") pod \"keystone-bootstrap-7ww48\" (UID: \"227c3622-b4eb-4862-9b18-a90df7ea75b9\") " pod="openstack-kuttl-tests/keystone-bootstrap-7ww48" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.336267 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/227c3622-b4eb-4862-9b18-a90df7ea75b9-config-data\") pod \"keystone-bootstrap-7ww48\" (UID: \"227c3622-b4eb-4862-9b18-a90df7ea75b9\") " pod="openstack-kuttl-tests/keystone-bootstrap-7ww48" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.338107 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/81080ef7-fd3d-487f-b6aa-0fb9eaebc855-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"81080ef7-fd3d-487f-b6aa-0fb9eaebc855\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.340796 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/227c3622-b4eb-4862-9b18-a90df7ea75b9-credential-keys\") pod \"keystone-bootstrap-7ww48\" (UID: \"227c3622-b4eb-4862-9b18-a90df7ea75b9\") " pod="openstack-kuttl-tests/keystone-bootstrap-7ww48" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.340910 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/81080ef7-fd3d-487f-b6aa-0fb9eaebc855-config-data\") pod \"cinder-scheduler-0\" (UID: \"81080ef7-fd3d-487f-b6aa-0fb9eaebc855\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.341052 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/227c3622-b4eb-4862-9b18-a90df7ea75b9-combined-ca-bundle\") pod \"keystone-bootstrap-7ww48\" (UID: \"227c3622-b4eb-4862-9b18-a90df7ea75b9\") " pod="openstack-kuttl-tests/keystone-bootstrap-7ww48" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.342663 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ht6g2\" (UniqueName: \"kubernetes.io/projected/227c3622-b4eb-4862-9b18-a90df7ea75b9-kube-api-access-ht6g2\") pod \"keystone-bootstrap-7ww48\" (UID: \"227c3622-b4eb-4862-9b18-a90df7ea75b9\") " pod="openstack-kuttl-tests/keystone-bootstrap-7ww48" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.343078 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-98t2c\" (UniqueName: \"kubernetes.io/projected/81080ef7-fd3d-487f-b6aa-0fb9eaebc855-kube-api-access-98t2c\") pod \"cinder-scheduler-0\" (UID: \"81080ef7-fd3d-487f-b6aa-0fb9eaebc855\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.410594 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-bootstrap-7ww48" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.418929 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.430184 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9d8e0386-7f68-436f-8a8a-4d79ec346619-etc-machine-id\") pod \"cinder-api-0\" (UID: \"9d8e0386-7f68-436f-8a8a-4d79ec346619\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.430255 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9d8e0386-7f68-436f-8a8a-4d79ec346619-logs\") pod \"cinder-api-0\" (UID: \"9d8e0386-7f68-436f-8a8a-4d79ec346619\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.430290 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9d8e0386-7f68-436f-8a8a-4d79ec346619-config-data-custom\") pod \"cinder-api-0\" (UID: \"9d8e0386-7f68-436f-8a8a-4d79ec346619\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.430341 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v8b7n\" (UniqueName: \"kubernetes.io/projected/9d8e0386-7f68-436f-8a8a-4d79ec346619-kube-api-access-v8b7n\") pod \"cinder-api-0\" (UID: \"9d8e0386-7f68-436f-8a8a-4d79ec346619\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.430426 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9d8e0386-7f68-436f-8a8a-4d79ec346619-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"9d8e0386-7f68-436f-8a8a-4d79ec346619\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.430442 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9d8e0386-7f68-436f-8a8a-4d79ec346619-scripts\") pod \"cinder-api-0\" (UID: \"9d8e0386-7f68-436f-8a8a-4d79ec346619\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.430484 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9d8e0386-7f68-436f-8a8a-4d79ec346619-config-data\") pod \"cinder-api-0\" (UID: \"9d8e0386-7f68-436f-8a8a-4d79ec346619\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.441837 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.451231 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.473898 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.476743 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.478501 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-config-data" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.478735 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-scripts" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.482101 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.488083 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-69d7f65964-r46nm"] Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.531992 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9d8e0386-7f68-436f-8a8a-4d79ec346619-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"9d8e0386-7f68-436f-8a8a-4d79ec346619\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.532036 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9d8e0386-7f68-436f-8a8a-4d79ec346619-scripts\") pod \"cinder-api-0\" (UID: \"9d8e0386-7f68-436f-8a8a-4d79ec346619\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.532090 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9d8e0386-7f68-436f-8a8a-4d79ec346619-config-data\") pod \"cinder-api-0\" (UID: \"9d8e0386-7f68-436f-8a8a-4d79ec346619\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.532208 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9d8e0386-7f68-436f-8a8a-4d79ec346619-etc-machine-id\") pod \"cinder-api-0\" (UID: \"9d8e0386-7f68-436f-8a8a-4d79ec346619\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.532257 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9d8e0386-7f68-436f-8a8a-4d79ec346619-logs\") pod \"cinder-api-0\" (UID: \"9d8e0386-7f68-436f-8a8a-4d79ec346619\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.532286 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9d8e0386-7f68-436f-8a8a-4d79ec346619-config-data-custom\") pod \"cinder-api-0\" (UID: \"9d8e0386-7f68-436f-8a8a-4d79ec346619\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.532345 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v8b7n\" (UniqueName: \"kubernetes.io/projected/9d8e0386-7f68-436f-8a8a-4d79ec346619-kube-api-access-v8b7n\") pod \"cinder-api-0\" (UID: \"9d8e0386-7f68-436f-8a8a-4d79ec346619\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.532943 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9d8e0386-7f68-436f-8a8a-4d79ec346619-etc-machine-id\") pod \"cinder-api-0\" (UID: \"9d8e0386-7f68-436f-8a8a-4d79ec346619\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.533790 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9d8e0386-7f68-436f-8a8a-4d79ec346619-logs\") pod \"cinder-api-0\" (UID: \"9d8e0386-7f68-436f-8a8a-4d79ec346619\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.542106 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9d8e0386-7f68-436f-8a8a-4d79ec346619-scripts\") pod \"cinder-api-0\" (UID: \"9d8e0386-7f68-436f-8a8a-4d79ec346619\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.549081 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9d8e0386-7f68-436f-8a8a-4d79ec346619-config-data\") pod \"cinder-api-0\" (UID: \"9d8e0386-7f68-436f-8a8a-4d79ec346619\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.553082 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9d8e0386-7f68-436f-8a8a-4d79ec346619-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"9d8e0386-7f68-436f-8a8a-4d79ec346619\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.553099 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v8b7n\" (UniqueName: \"kubernetes.io/projected/9d8e0386-7f68-436f-8a8a-4d79ec346619-kube-api-access-v8b7n\") pod \"cinder-api-0\" (UID: \"9d8e0386-7f68-436f-8a8a-4d79ec346619\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.557112 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9d8e0386-7f68-436f-8a8a-4d79ec346619-config-data-custom\") pod \"cinder-api-0\" (UID: \"9d8e0386-7f68-436f-8a8a-4d79ec346619\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.615638 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.630792 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1ae5026e-37c6-453a-943c-9dfd8cebb2c2" path="/var/lib/kubelet/pods/1ae5026e-37c6-453a-943c-9dfd8cebb2c2/volumes" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.631655 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="427dd0db-2906-44d3-816e-a09f7cdd3a12" path="/var/lib/kubelet/pods/427dd0db-2906-44d3-816e-a09f7cdd3a12/volumes" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.634217 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/32ebfd5d-3392-4ab0-b1f7-1c12001b2ef2-run-httpd\") pod \"ceilometer-0\" (UID: \"32ebfd5d-3392-4ab0-b1f7-1c12001b2ef2\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.634264 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t47hk\" (UniqueName: \"kubernetes.io/projected/32ebfd5d-3392-4ab0-b1f7-1c12001b2ef2-kube-api-access-t47hk\") pod \"ceilometer-0\" (UID: \"32ebfd5d-3392-4ab0-b1f7-1c12001b2ef2\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.634347 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/32ebfd5d-3392-4ab0-b1f7-1c12001b2ef2-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"32ebfd5d-3392-4ab0-b1f7-1c12001b2ef2\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.634373 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/32ebfd5d-3392-4ab0-b1f7-1c12001b2ef2-log-httpd\") pod \"ceilometer-0\" (UID: \"32ebfd5d-3392-4ab0-b1f7-1c12001b2ef2\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.634394 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/32ebfd5d-3392-4ab0-b1f7-1c12001b2ef2-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"32ebfd5d-3392-4ab0-b1f7-1c12001b2ef2\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.634425 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/32ebfd5d-3392-4ab0-b1f7-1c12001b2ef2-scripts\") pod \"ceilometer-0\" (UID: \"32ebfd5d-3392-4ab0-b1f7-1c12001b2ef2\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.634470 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/32ebfd5d-3392-4ab0-b1f7-1c12001b2ef2-config-data\") pod \"ceilometer-0\" (UID: \"32ebfd5d-3392-4ab0-b1f7-1c12001b2ef2\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.741447 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/32ebfd5d-3392-4ab0-b1f7-1c12001b2ef2-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"32ebfd5d-3392-4ab0-b1f7-1c12001b2ef2\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.741508 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/32ebfd5d-3392-4ab0-b1f7-1c12001b2ef2-log-httpd\") pod \"ceilometer-0\" (UID: \"32ebfd5d-3392-4ab0-b1f7-1c12001b2ef2\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.741531 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/32ebfd5d-3392-4ab0-b1f7-1c12001b2ef2-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"32ebfd5d-3392-4ab0-b1f7-1c12001b2ef2\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.741623 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/32ebfd5d-3392-4ab0-b1f7-1c12001b2ef2-scripts\") pod \"ceilometer-0\" (UID: \"32ebfd5d-3392-4ab0-b1f7-1c12001b2ef2\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.742143 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/32ebfd5d-3392-4ab0-b1f7-1c12001b2ef2-log-httpd\") pod \"ceilometer-0\" (UID: \"32ebfd5d-3392-4ab0-b1f7-1c12001b2ef2\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.747284 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/32ebfd5d-3392-4ab0-b1f7-1c12001b2ef2-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"32ebfd5d-3392-4ab0-b1f7-1c12001b2ef2\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.749563 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/32ebfd5d-3392-4ab0-b1f7-1c12001b2ef2-config-data\") pod \"ceilometer-0\" (UID: \"32ebfd5d-3392-4ab0-b1f7-1c12001b2ef2\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.749734 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/32ebfd5d-3392-4ab0-b1f7-1c12001b2ef2-run-httpd\") pod \"ceilometer-0\" (UID: \"32ebfd5d-3392-4ab0-b1f7-1c12001b2ef2\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.749759 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t47hk\" (UniqueName: \"kubernetes.io/projected/32ebfd5d-3392-4ab0-b1f7-1c12001b2ef2-kube-api-access-t47hk\") pod \"ceilometer-0\" (UID: \"32ebfd5d-3392-4ab0-b1f7-1c12001b2ef2\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.750135 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/32ebfd5d-3392-4ab0-b1f7-1c12001b2ef2-run-httpd\") pod \"ceilometer-0\" (UID: \"32ebfd5d-3392-4ab0-b1f7-1c12001b2ef2\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.762181 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-config-data" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.762953 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-scripts" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.772743 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/32ebfd5d-3392-4ab0-b1f7-1c12001b2ef2-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"32ebfd5d-3392-4ab0-b1f7-1c12001b2ef2\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.775009 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/32ebfd5d-3392-4ab0-b1f7-1c12001b2ef2-scripts\") pod \"ceilometer-0\" (UID: \"32ebfd5d-3392-4ab0-b1f7-1c12001b2ef2\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.777903 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t47hk\" (UniqueName: \"kubernetes.io/projected/32ebfd5d-3392-4ab0-b1f7-1c12001b2ef2-kube-api-access-t47hk\") pod \"ceilometer-0\" (UID: \"32ebfd5d-3392-4ab0-b1f7-1c12001b2ef2\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:39:46 crc kubenswrapper[4558]: I0120 17:39:46.780796 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/32ebfd5d-3392-4ab0-b1f7-1c12001b2ef2-config-data\") pod \"ceilometer-0\" (UID: \"32ebfd5d-3392-4ab0-b1f7-1c12001b2ef2\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:39:47 crc kubenswrapper[4558]: I0120 17:39:47.031569 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:39:47 crc kubenswrapper[4558]: I0120 17:39:47.041970 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-bootstrap-7ww48"] Jan 20 17:39:47 crc kubenswrapper[4558]: I0120 17:39:47.124052 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:39:47 crc kubenswrapper[4558]: I0120 17:39:47.142266 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"81080ef7-fd3d-487f-b6aa-0fb9eaebc855","Type":"ContainerStarted","Data":"78f5b8223baae1365eb10c24bd5fee398f1120188db2f2929d2b878c06d4e9b9"} Jan 20 17:39:47 crc kubenswrapper[4558]: I0120 17:39:47.145769 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"2328ffd8-4eb7-41b8-abfb-b5ee5a12f117","Type":"ContainerStarted","Data":"19edd3c6ba732244695f4759d0145fea4e245766ce9f1373b9b7c7005a8cd1a2"} Jan 20 17:39:47 crc kubenswrapper[4558]: I0120 17:39:47.149059 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-bootstrap-7ww48" event={"ID":"227c3622-b4eb-4862-9b18-a90df7ea75b9","Type":"ContainerStarted","Data":"0a7ed0e81119a78626ce3742f6e9e51a55adcd610bda36f42e6739e5bb4253ad"} Jan 20 17:39:47 crc kubenswrapper[4558]: I0120 17:39:47.159920 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-69d7f65964-r46nm" event={"ID":"bf32468e-b0a7-4b81-bb49-c65a95997903","Type":"ContainerStarted","Data":"fc1f3875679e2cd5005493bd3bd21c2f6acc53a59f363480f76d0371a068bda4"} Jan 20 17:39:47 crc kubenswrapper[4558]: I0120 17:39:47.159949 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-69d7f65964-r46nm" event={"ID":"bf32468e-b0a7-4b81-bb49-c65a95997903","Type":"ContainerStarted","Data":"1bea9ca407d948182088942a7baaa1288dbc8ed20180d840df071edc53c59c24"} Jan 20 17:39:47 crc kubenswrapper[4558]: I0120 17:39:47.159960 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-69d7f65964-r46nm" event={"ID":"bf32468e-b0a7-4b81-bb49-c65a95997903","Type":"ContainerStarted","Data":"4512eb627e22fd848aff2b908f742418cbcbdb935ba78319df317f4fe0107521"} Jan 20 17:39:47 crc kubenswrapper[4558]: I0120 17:39:47.159974 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/placement-69d7f65964-r46nm" Jan 20 17:39:47 crc kubenswrapper[4558]: I0120 17:39:47.159997 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/placement-69d7f65964-r46nm" Jan 20 17:39:47 crc kubenswrapper[4558]: I0120 17:39:47.167826 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/glance-default-external-api-0" podStartSLOduration=4.167803101 podStartE2EDuration="4.167803101s" podCreationTimestamp="2026-01-20 17:39:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:39:47.164137398 +0000 UTC m=+3480.924475366" watchObservedRunningTime="2026-01-20 17:39:47.167803101 +0000 UTC m=+3480.928141068" Jan 20 17:39:47 crc kubenswrapper[4558]: I0120 17:39:47.177037 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-85684d4d5b-ffd28" event={"ID":"68263afb-f6b4-4665-bd7d-6f3b622c5640","Type":"ContainerStarted","Data":"6173df580c0d2dc49bd3a6499431a0b48e8fd9a9364b67713c6871a2fee15ab6"} Jan 20 17:39:47 crc kubenswrapper[4558]: I0120 17:39:47.177091 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-85684d4d5b-ffd28" event={"ID":"68263afb-f6b4-4665-bd7d-6f3b622c5640","Type":"ContainerStarted","Data":"51207c618252938d282ce839f6d1b1d1e55bff3602a6cffa6c8e6977a2bf0fc0"} Jan 20 17:39:47 crc kubenswrapper[4558]: I0120 17:39:47.179310 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/placement-85684d4d5b-ffd28" Jan 20 17:39:47 crc kubenswrapper[4558]: I0120 17:39:47.180198 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/placement-85684d4d5b-ffd28" Jan 20 17:39:47 crc kubenswrapper[4558]: I0120 17:39:47.210940 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"92c317c8-cf30-4046-af0b-8f6c36f69000","Type":"ContainerStarted","Data":"0c31383a0ae8e3556df5e5d344c5e4300ff998da9a0dcb05e9d60b095898581e"} Jan 20 17:39:47 crc kubenswrapper[4558]: I0120 17:39:47.220590 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:39:47 crc kubenswrapper[4558]: I0120 17:39:47.221318 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/placement-69d7f65964-r46nm" podStartSLOduration=2.221295299 podStartE2EDuration="2.221295299s" podCreationTimestamp="2026-01-20 17:39:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:39:47.191784461 +0000 UTC m=+3480.952122428" watchObservedRunningTime="2026-01-20 17:39:47.221295299 +0000 UTC m=+3480.981633266" Jan 20 17:39:47 crc kubenswrapper[4558]: W0120 17:39:47.238616 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9d8e0386_7f68_436f_8a8a_4d79ec346619.slice/crio-ed9e9ae1da41245717b6c23a3a531fa27db15d89dce18231370de36a62198e30 WatchSource:0}: Error finding container ed9e9ae1da41245717b6c23a3a531fa27db15d89dce18231370de36a62198e30: Status 404 returned error can't find the container with id ed9e9ae1da41245717b6c23a3a531fa27db15d89dce18231370de36a62198e30 Jan 20 17:39:47 crc kubenswrapper[4558]: I0120 17:39:47.267922 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/placement-85684d4d5b-ffd28" podStartSLOduration=3.267899526 podStartE2EDuration="3.267899526s" podCreationTimestamp="2026-01-20 17:39:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:39:47.217949268 +0000 UTC m=+3480.978287235" watchObservedRunningTime="2026-01-20 17:39:47.267899526 +0000 UTC m=+3481.028237493" Jan 20 17:39:47 crc kubenswrapper[4558]: I0120 17:39:47.277182 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/glance-default-internal-api-0" podStartSLOduration=4.277147175 podStartE2EDuration="4.277147175s" podCreationTimestamp="2026-01-20 17:39:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:39:47.238247142 +0000 UTC m=+3480.998585109" watchObservedRunningTime="2026-01-20 17:39:47.277147175 +0000 UTC m=+3481.037485141" Jan 20 17:39:47 crc kubenswrapper[4558]: I0120 17:39:47.574409 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:39:47 crc kubenswrapper[4558]: W0120 17:39:47.580236 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod32ebfd5d_3392_4ab0_b1f7_1c12001b2ef2.slice/crio-33e8449b98d41f9cae4c7cb96d6febee4c0e3a93c6b8ac12297d03f549d1a23b WatchSource:0}: Error finding container 33e8449b98d41f9cae4c7cb96d6febee4c0e3a93c6b8ac12297d03f549d1a23b: Status 404 returned error can't find the container with id 33e8449b98d41f9cae4c7cb96d6febee4c0e3a93c6b8ac12297d03f549d1a23b Jan 20 17:39:47 crc kubenswrapper[4558]: I0120 17:39:47.700517 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-db-sync-ndrzj" Jan 20 17:39:47 crc kubenswrapper[4558]: I0120 17:39:47.773918 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pl7ht\" (UniqueName: \"kubernetes.io/projected/86a50da2-ef44-4072-b680-d56cc4ee67c5-kube-api-access-pl7ht\") pod \"86a50da2-ef44-4072-b680-d56cc4ee67c5\" (UID: \"86a50da2-ef44-4072-b680-d56cc4ee67c5\") " Jan 20 17:39:47 crc kubenswrapper[4558]: I0120 17:39:47.774010 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/86a50da2-ef44-4072-b680-d56cc4ee67c5-db-sync-config-data\") pod \"86a50da2-ef44-4072-b680-d56cc4ee67c5\" (UID: \"86a50da2-ef44-4072-b680-d56cc4ee67c5\") " Jan 20 17:39:47 crc kubenswrapper[4558]: I0120 17:39:47.774121 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/86a50da2-ef44-4072-b680-d56cc4ee67c5-combined-ca-bundle\") pod \"86a50da2-ef44-4072-b680-d56cc4ee67c5\" (UID: \"86a50da2-ef44-4072-b680-d56cc4ee67c5\") " Jan 20 17:39:47 crc kubenswrapper[4558]: I0120 17:39:47.788256 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/86a50da2-ef44-4072-b680-d56cc4ee67c5-kube-api-access-pl7ht" (OuterVolumeSpecName: "kube-api-access-pl7ht") pod "86a50da2-ef44-4072-b680-d56cc4ee67c5" (UID: "86a50da2-ef44-4072-b680-d56cc4ee67c5"). InnerVolumeSpecName "kube-api-access-pl7ht". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:39:47 crc kubenswrapper[4558]: I0120 17:39:47.809340 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/86a50da2-ef44-4072-b680-d56cc4ee67c5-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "86a50da2-ef44-4072-b680-d56cc4ee67c5" (UID: "86a50da2-ef44-4072-b680-d56cc4ee67c5"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:39:47 crc kubenswrapper[4558]: I0120 17:39:47.850326 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/86a50da2-ef44-4072-b680-d56cc4ee67c5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "86a50da2-ef44-4072-b680-d56cc4ee67c5" (UID: "86a50da2-ef44-4072-b680-d56cc4ee67c5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:39:47 crc kubenswrapper[4558]: I0120 17:39:47.879845 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pl7ht\" (UniqueName: \"kubernetes.io/projected/86a50da2-ef44-4072-b680-d56cc4ee67c5-kube-api-access-pl7ht\") on node \"crc\" DevicePath \"\"" Jan 20 17:39:47 crc kubenswrapper[4558]: I0120 17:39:47.879893 4558 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/86a50da2-ef44-4072-b680-d56cc4ee67c5-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:39:47 crc kubenswrapper[4558]: I0120 17:39:47.879905 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/86a50da2-ef44-4072-b680-d56cc4ee67c5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:39:48 crc kubenswrapper[4558]: I0120 17:39:48.186557 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/barbican-keystone-listener-5f469dd86b-h6dq9"] Jan 20 17:39:48 crc kubenswrapper[4558]: E0120 17:39:48.187270 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="86a50da2-ef44-4072-b680-d56cc4ee67c5" containerName="barbican-db-sync" Jan 20 17:39:48 crc kubenswrapper[4558]: I0120 17:39:48.187288 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="86a50da2-ef44-4072-b680-d56cc4ee67c5" containerName="barbican-db-sync" Jan 20 17:39:48 crc kubenswrapper[4558]: I0120 17:39:48.187466 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="86a50da2-ef44-4072-b680-d56cc4ee67c5" containerName="barbican-db-sync" Jan 20 17:39:48 crc kubenswrapper[4558]: I0120 17:39:48.189608 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-keystone-listener-5f469dd86b-h6dq9" Jan 20 17:39:48 crc kubenswrapper[4558]: I0120 17:39:48.197604 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"barbican-keystone-listener-config-data" Jan 20 17:39:48 crc kubenswrapper[4558]: I0120 17:39:48.205820 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/barbican-worker-6667c7bd65-7bw4v"] Jan 20 17:39:48 crc kubenswrapper[4558]: I0120 17:39:48.212278 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-worker-6667c7bd65-7bw4v" Jan 20 17:39:48 crc kubenswrapper[4558]: I0120 17:39:48.218550 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"barbican-worker-config-data" Jan 20 17:39:48 crc kubenswrapper[4558]: I0120 17:39:48.229928 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-keystone-listener-5f469dd86b-h6dq9"] Jan 20 17:39:48 crc kubenswrapper[4558]: I0120 17:39:48.254215 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-worker-6667c7bd65-7bw4v"] Jan 20 17:39:48 crc kubenswrapper[4558]: I0120 17:39:48.297598 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ba27bcbb-36f0-4575-afba-d6997d667fef-config-data\") pod \"barbican-worker-6667c7bd65-7bw4v\" (UID: \"ba27bcbb-36f0-4575-afba-d6997d667fef\") " pod="openstack-kuttl-tests/barbican-worker-6667c7bd65-7bw4v" Jan 20 17:39:48 crc kubenswrapper[4558]: I0120 17:39:48.297651 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ba27bcbb-36f0-4575-afba-d6997d667fef-combined-ca-bundle\") pod \"barbican-worker-6667c7bd65-7bw4v\" (UID: \"ba27bcbb-36f0-4575-afba-d6997d667fef\") " pod="openstack-kuttl-tests/barbican-worker-6667c7bd65-7bw4v" Jan 20 17:39:48 crc kubenswrapper[4558]: I0120 17:39:48.297685 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dfa1a7bb-2900-49f0-94ee-fe789d4ffec8-logs\") pod \"barbican-keystone-listener-5f469dd86b-h6dq9\" (UID: \"dfa1a7bb-2900-49f0-94ee-fe789d4ffec8\") " pod="openstack-kuttl-tests/barbican-keystone-listener-5f469dd86b-h6dq9" Jan 20 17:39:48 crc kubenswrapper[4558]: I0120 17:39:48.297725 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ba27bcbb-36f0-4575-afba-d6997d667fef-config-data-custom\") pod \"barbican-worker-6667c7bd65-7bw4v\" (UID: \"ba27bcbb-36f0-4575-afba-d6997d667fef\") " pod="openstack-kuttl-tests/barbican-worker-6667c7bd65-7bw4v" Jan 20 17:39:48 crc kubenswrapper[4558]: I0120 17:39:48.297792 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dfa1a7bb-2900-49f0-94ee-fe789d4ffec8-combined-ca-bundle\") pod \"barbican-keystone-listener-5f469dd86b-h6dq9\" (UID: \"dfa1a7bb-2900-49f0-94ee-fe789d4ffec8\") " pod="openstack-kuttl-tests/barbican-keystone-listener-5f469dd86b-h6dq9" Jan 20 17:39:48 crc kubenswrapper[4558]: I0120 17:39:48.297889 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dh4jh\" (UniqueName: \"kubernetes.io/projected/ba27bcbb-36f0-4575-afba-d6997d667fef-kube-api-access-dh4jh\") pod \"barbican-worker-6667c7bd65-7bw4v\" (UID: \"ba27bcbb-36f0-4575-afba-d6997d667fef\") " pod="openstack-kuttl-tests/barbican-worker-6667c7bd65-7bw4v" Jan 20 17:39:48 crc kubenswrapper[4558]: I0120 17:39:48.297962 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-44k45\" (UniqueName: \"kubernetes.io/projected/dfa1a7bb-2900-49f0-94ee-fe789d4ffec8-kube-api-access-44k45\") pod \"barbican-keystone-listener-5f469dd86b-h6dq9\" (UID: \"dfa1a7bb-2900-49f0-94ee-fe789d4ffec8\") " pod="openstack-kuttl-tests/barbican-keystone-listener-5f469dd86b-h6dq9" Jan 20 17:39:48 crc kubenswrapper[4558]: I0120 17:39:48.297982 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/dfa1a7bb-2900-49f0-94ee-fe789d4ffec8-config-data-custom\") pod \"barbican-keystone-listener-5f469dd86b-h6dq9\" (UID: \"dfa1a7bb-2900-49f0-94ee-fe789d4ffec8\") " pod="openstack-kuttl-tests/barbican-keystone-listener-5f469dd86b-h6dq9" Jan 20 17:39:48 crc kubenswrapper[4558]: I0120 17:39:48.298058 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dfa1a7bb-2900-49f0-94ee-fe789d4ffec8-config-data\") pod \"barbican-keystone-listener-5f469dd86b-h6dq9\" (UID: \"dfa1a7bb-2900-49f0-94ee-fe789d4ffec8\") " pod="openstack-kuttl-tests/barbican-keystone-listener-5f469dd86b-h6dq9" Jan 20 17:39:48 crc kubenswrapper[4558]: I0120 17:39:48.298076 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ba27bcbb-36f0-4575-afba-d6997d667fef-logs\") pod \"barbican-worker-6667c7bd65-7bw4v\" (UID: \"ba27bcbb-36f0-4575-afba-d6997d667fef\") " pod="openstack-kuttl-tests/barbican-worker-6667c7bd65-7bw4v" Jan 20 17:39:48 crc kubenswrapper[4558]: I0120 17:39:48.306459 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-db-sync-ndrzj" event={"ID":"86a50da2-ef44-4072-b680-d56cc4ee67c5","Type":"ContainerDied","Data":"23f00391047468e4d9278d4c196659b045b6caece849b4789c2b2f0d6d4fecec"} Jan 20 17:39:48 crc kubenswrapper[4558]: I0120 17:39:48.306509 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="23f00391047468e4d9278d4c196659b045b6caece849b4789c2b2f0d6d4fecec" Jan 20 17:39:48 crc kubenswrapper[4558]: I0120 17:39:48.306586 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-db-sync-ndrzj" Jan 20 17:39:48 crc kubenswrapper[4558]: I0120 17:39:48.328189 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/barbican-api-7699c67d4d-fsf9d"] Jan 20 17:39:48 crc kubenswrapper[4558]: I0120 17:39:48.329786 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-7699c67d4d-fsf9d" Jan 20 17:39:48 crc kubenswrapper[4558]: I0120 17:39:48.333396 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"9d8e0386-7f68-436f-8a8a-4d79ec346619","Type":"ContainerStarted","Data":"559254e40d7dbfa36c2c33588e1128bf691456804f1b56b56b8cf9aa21e0bbde"} Jan 20 17:39:48 crc kubenswrapper[4558]: I0120 17:39:48.333445 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"9d8e0386-7f68-436f-8a8a-4d79ec346619","Type":"ContainerStarted","Data":"ed9e9ae1da41245717b6c23a3a531fa27db15d89dce18231370de36a62198e30"} Jan 20 17:39:48 crc kubenswrapper[4558]: I0120 17:39:48.339476 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"barbican-api-config-data" Jan 20 17:39:48 crc kubenswrapper[4558]: I0120 17:39:48.342925 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"32ebfd5d-3392-4ab0-b1f7-1c12001b2ef2","Type":"ContainerStarted","Data":"33e8449b98d41f9cae4c7cb96d6febee4c0e3a93c6b8ac12297d03f549d1a23b"} Jan 20 17:39:48 crc kubenswrapper[4558]: I0120 17:39:48.348117 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-api-7699c67d4d-fsf9d"] Jan 20 17:39:48 crc kubenswrapper[4558]: I0120 17:39:48.369076 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-bootstrap-7ww48" event={"ID":"227c3622-b4eb-4862-9b18-a90df7ea75b9","Type":"ContainerStarted","Data":"17c497a986172b78ad18acace156e9379a29097ef6815bcb71c2e62b15079de6"} Jan 20 17:39:48 crc kubenswrapper[4558]: I0120 17:39:48.402709 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/67e97807-4d7b-4c6c-8e65-148173bfe17c-logs\") pod \"barbican-api-7699c67d4d-fsf9d\" (UID: \"67e97807-4d7b-4c6c-8e65-148173bfe17c\") " pod="openstack-kuttl-tests/barbican-api-7699c67d4d-fsf9d" Jan 20 17:39:48 crc kubenswrapper[4558]: I0120 17:39:48.402761 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-44k45\" (UniqueName: \"kubernetes.io/projected/dfa1a7bb-2900-49f0-94ee-fe789d4ffec8-kube-api-access-44k45\") pod \"barbican-keystone-listener-5f469dd86b-h6dq9\" (UID: \"dfa1a7bb-2900-49f0-94ee-fe789d4ffec8\") " pod="openstack-kuttl-tests/barbican-keystone-listener-5f469dd86b-h6dq9" Jan 20 17:39:48 crc kubenswrapper[4558]: I0120 17:39:48.402789 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/dfa1a7bb-2900-49f0-94ee-fe789d4ffec8-config-data-custom\") pod \"barbican-keystone-listener-5f469dd86b-h6dq9\" (UID: \"dfa1a7bb-2900-49f0-94ee-fe789d4ffec8\") " pod="openstack-kuttl-tests/barbican-keystone-listener-5f469dd86b-h6dq9" Jan 20 17:39:48 crc kubenswrapper[4558]: I0120 17:39:48.402835 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/67e97807-4d7b-4c6c-8e65-148173bfe17c-combined-ca-bundle\") pod \"barbican-api-7699c67d4d-fsf9d\" (UID: \"67e97807-4d7b-4c6c-8e65-148173bfe17c\") " pod="openstack-kuttl-tests/barbican-api-7699c67d4d-fsf9d" Jan 20 17:39:48 crc kubenswrapper[4558]: I0120 17:39:48.402871 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dfa1a7bb-2900-49f0-94ee-fe789d4ffec8-config-data\") pod \"barbican-keystone-listener-5f469dd86b-h6dq9\" (UID: \"dfa1a7bb-2900-49f0-94ee-fe789d4ffec8\") " pod="openstack-kuttl-tests/barbican-keystone-listener-5f469dd86b-h6dq9" Jan 20 17:39:48 crc kubenswrapper[4558]: I0120 17:39:48.402900 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ba27bcbb-36f0-4575-afba-d6997d667fef-logs\") pod \"barbican-worker-6667c7bd65-7bw4v\" (UID: \"ba27bcbb-36f0-4575-afba-d6997d667fef\") " pod="openstack-kuttl-tests/barbican-worker-6667c7bd65-7bw4v" Jan 20 17:39:48 crc kubenswrapper[4558]: I0120 17:39:48.402939 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q77xc\" (UniqueName: \"kubernetes.io/projected/67e97807-4d7b-4c6c-8e65-148173bfe17c-kube-api-access-q77xc\") pod \"barbican-api-7699c67d4d-fsf9d\" (UID: \"67e97807-4d7b-4c6c-8e65-148173bfe17c\") " pod="openstack-kuttl-tests/barbican-api-7699c67d4d-fsf9d" Jan 20 17:39:48 crc kubenswrapper[4558]: I0120 17:39:48.402974 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ba27bcbb-36f0-4575-afba-d6997d667fef-config-data\") pod \"barbican-worker-6667c7bd65-7bw4v\" (UID: \"ba27bcbb-36f0-4575-afba-d6997d667fef\") " pod="openstack-kuttl-tests/barbican-worker-6667c7bd65-7bw4v" Jan 20 17:39:48 crc kubenswrapper[4558]: I0120 17:39:48.402994 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ba27bcbb-36f0-4575-afba-d6997d667fef-combined-ca-bundle\") pod \"barbican-worker-6667c7bd65-7bw4v\" (UID: \"ba27bcbb-36f0-4575-afba-d6997d667fef\") " pod="openstack-kuttl-tests/barbican-worker-6667c7bd65-7bw4v" Jan 20 17:39:48 crc kubenswrapper[4558]: I0120 17:39:48.403015 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dfa1a7bb-2900-49f0-94ee-fe789d4ffec8-logs\") pod \"barbican-keystone-listener-5f469dd86b-h6dq9\" (UID: \"dfa1a7bb-2900-49f0-94ee-fe789d4ffec8\") " pod="openstack-kuttl-tests/barbican-keystone-listener-5f469dd86b-h6dq9" Jan 20 17:39:48 crc kubenswrapper[4558]: I0120 17:39:48.403043 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ba27bcbb-36f0-4575-afba-d6997d667fef-config-data-custom\") pod \"barbican-worker-6667c7bd65-7bw4v\" (UID: \"ba27bcbb-36f0-4575-afba-d6997d667fef\") " pod="openstack-kuttl-tests/barbican-worker-6667c7bd65-7bw4v" Jan 20 17:39:48 crc kubenswrapper[4558]: I0120 17:39:48.403069 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/67e97807-4d7b-4c6c-8e65-148173bfe17c-config-data-custom\") pod \"barbican-api-7699c67d4d-fsf9d\" (UID: \"67e97807-4d7b-4c6c-8e65-148173bfe17c\") " pod="openstack-kuttl-tests/barbican-api-7699c67d4d-fsf9d" Jan 20 17:39:48 crc kubenswrapper[4558]: I0120 17:39:48.403104 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dfa1a7bb-2900-49f0-94ee-fe789d4ffec8-combined-ca-bundle\") pod \"barbican-keystone-listener-5f469dd86b-h6dq9\" (UID: \"dfa1a7bb-2900-49f0-94ee-fe789d4ffec8\") " pod="openstack-kuttl-tests/barbican-keystone-listener-5f469dd86b-h6dq9" Jan 20 17:39:48 crc kubenswrapper[4558]: I0120 17:39:48.403135 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/67e97807-4d7b-4c6c-8e65-148173bfe17c-config-data\") pod \"barbican-api-7699c67d4d-fsf9d\" (UID: \"67e97807-4d7b-4c6c-8e65-148173bfe17c\") " pod="openstack-kuttl-tests/barbican-api-7699c67d4d-fsf9d" Jan 20 17:39:48 crc kubenswrapper[4558]: I0120 17:39:48.403180 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dh4jh\" (UniqueName: \"kubernetes.io/projected/ba27bcbb-36f0-4575-afba-d6997d667fef-kube-api-access-dh4jh\") pod \"barbican-worker-6667c7bd65-7bw4v\" (UID: \"ba27bcbb-36f0-4575-afba-d6997d667fef\") " pod="openstack-kuttl-tests/barbican-worker-6667c7bd65-7bw4v" Jan 20 17:39:48 crc kubenswrapper[4558]: I0120 17:39:48.404084 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dfa1a7bb-2900-49f0-94ee-fe789d4ffec8-logs\") pod \"barbican-keystone-listener-5f469dd86b-h6dq9\" (UID: \"dfa1a7bb-2900-49f0-94ee-fe789d4ffec8\") " pod="openstack-kuttl-tests/barbican-keystone-listener-5f469dd86b-h6dq9" Jan 20 17:39:48 crc kubenswrapper[4558]: I0120 17:39:48.405555 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ba27bcbb-36f0-4575-afba-d6997d667fef-logs\") pod \"barbican-worker-6667c7bd65-7bw4v\" (UID: \"ba27bcbb-36f0-4575-afba-d6997d667fef\") " pod="openstack-kuttl-tests/barbican-worker-6667c7bd65-7bw4v" Jan 20 17:39:48 crc kubenswrapper[4558]: I0120 17:39:48.414951 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dfa1a7bb-2900-49f0-94ee-fe789d4ffec8-config-data\") pod \"barbican-keystone-listener-5f469dd86b-h6dq9\" (UID: \"dfa1a7bb-2900-49f0-94ee-fe789d4ffec8\") " pod="openstack-kuttl-tests/barbican-keystone-listener-5f469dd86b-h6dq9" Jan 20 17:39:48 crc kubenswrapper[4558]: I0120 17:39:48.416009 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-44k45\" (UniqueName: \"kubernetes.io/projected/dfa1a7bb-2900-49f0-94ee-fe789d4ffec8-kube-api-access-44k45\") pod \"barbican-keystone-listener-5f469dd86b-h6dq9\" (UID: \"dfa1a7bb-2900-49f0-94ee-fe789d4ffec8\") " pod="openstack-kuttl-tests/barbican-keystone-listener-5f469dd86b-h6dq9" Jan 20 17:39:48 crc kubenswrapper[4558]: I0120 17:39:48.416220 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/dfa1a7bb-2900-49f0-94ee-fe789d4ffec8-config-data-custom\") pod \"barbican-keystone-listener-5f469dd86b-h6dq9\" (UID: \"dfa1a7bb-2900-49f0-94ee-fe789d4ffec8\") " pod="openstack-kuttl-tests/barbican-keystone-listener-5f469dd86b-h6dq9" Jan 20 17:39:48 crc kubenswrapper[4558]: I0120 17:39:48.417006 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ba27bcbb-36f0-4575-afba-d6997d667fef-combined-ca-bundle\") pod \"barbican-worker-6667c7bd65-7bw4v\" (UID: \"ba27bcbb-36f0-4575-afba-d6997d667fef\") " pod="openstack-kuttl-tests/barbican-worker-6667c7bd65-7bw4v" Jan 20 17:39:48 crc kubenswrapper[4558]: I0120 17:39:48.417420 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ba27bcbb-36f0-4575-afba-d6997d667fef-config-data-custom\") pod \"barbican-worker-6667c7bd65-7bw4v\" (UID: \"ba27bcbb-36f0-4575-afba-d6997d667fef\") " pod="openstack-kuttl-tests/barbican-worker-6667c7bd65-7bw4v" Jan 20 17:39:48 crc kubenswrapper[4558]: I0120 17:39:48.417622 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ba27bcbb-36f0-4575-afba-d6997d667fef-config-data\") pod \"barbican-worker-6667c7bd65-7bw4v\" (UID: \"ba27bcbb-36f0-4575-afba-d6997d667fef\") " pod="openstack-kuttl-tests/barbican-worker-6667c7bd65-7bw4v" Jan 20 17:39:48 crc kubenswrapper[4558]: I0120 17:39:48.420947 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dfa1a7bb-2900-49f0-94ee-fe789d4ffec8-combined-ca-bundle\") pod \"barbican-keystone-listener-5f469dd86b-h6dq9\" (UID: \"dfa1a7bb-2900-49f0-94ee-fe789d4ffec8\") " pod="openstack-kuttl-tests/barbican-keystone-listener-5f469dd86b-h6dq9" Jan 20 17:39:48 crc kubenswrapper[4558]: I0120 17:39:48.421683 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/keystone-bootstrap-7ww48" podStartSLOduration=3.4216695599999998 podStartE2EDuration="3.42166956s" podCreationTimestamp="2026-01-20 17:39:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:39:48.383907037 +0000 UTC m=+3482.144245005" watchObservedRunningTime="2026-01-20 17:39:48.42166956 +0000 UTC m=+3482.182007528" Jan 20 17:39:48 crc kubenswrapper[4558]: I0120 17:39:48.427650 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dh4jh\" (UniqueName: \"kubernetes.io/projected/ba27bcbb-36f0-4575-afba-d6997d667fef-kube-api-access-dh4jh\") pod \"barbican-worker-6667c7bd65-7bw4v\" (UID: \"ba27bcbb-36f0-4575-afba-d6997d667fef\") " pod="openstack-kuttl-tests/barbican-worker-6667c7bd65-7bw4v" Jan 20 17:39:48 crc kubenswrapper[4558]: I0120 17:39:48.504914 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/67e97807-4d7b-4c6c-8e65-148173bfe17c-config-data\") pod \"barbican-api-7699c67d4d-fsf9d\" (UID: \"67e97807-4d7b-4c6c-8e65-148173bfe17c\") " pod="openstack-kuttl-tests/barbican-api-7699c67d4d-fsf9d" Jan 20 17:39:48 crc kubenswrapper[4558]: I0120 17:39:48.505057 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/67e97807-4d7b-4c6c-8e65-148173bfe17c-logs\") pod \"barbican-api-7699c67d4d-fsf9d\" (UID: \"67e97807-4d7b-4c6c-8e65-148173bfe17c\") " pod="openstack-kuttl-tests/barbican-api-7699c67d4d-fsf9d" Jan 20 17:39:48 crc kubenswrapper[4558]: I0120 17:39:48.505159 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/67e97807-4d7b-4c6c-8e65-148173bfe17c-combined-ca-bundle\") pod \"barbican-api-7699c67d4d-fsf9d\" (UID: \"67e97807-4d7b-4c6c-8e65-148173bfe17c\") " pod="openstack-kuttl-tests/barbican-api-7699c67d4d-fsf9d" Jan 20 17:39:48 crc kubenswrapper[4558]: I0120 17:39:48.506065 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/67e97807-4d7b-4c6c-8e65-148173bfe17c-logs\") pod \"barbican-api-7699c67d4d-fsf9d\" (UID: \"67e97807-4d7b-4c6c-8e65-148173bfe17c\") " pod="openstack-kuttl-tests/barbican-api-7699c67d4d-fsf9d" Jan 20 17:39:48 crc kubenswrapper[4558]: I0120 17:39:48.506584 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q77xc\" (UniqueName: \"kubernetes.io/projected/67e97807-4d7b-4c6c-8e65-148173bfe17c-kube-api-access-q77xc\") pod \"barbican-api-7699c67d4d-fsf9d\" (UID: \"67e97807-4d7b-4c6c-8e65-148173bfe17c\") " pod="openstack-kuttl-tests/barbican-api-7699c67d4d-fsf9d" Jan 20 17:39:48 crc kubenswrapper[4558]: I0120 17:39:48.508048 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/67e97807-4d7b-4c6c-8e65-148173bfe17c-config-data-custom\") pod \"barbican-api-7699c67d4d-fsf9d\" (UID: \"67e97807-4d7b-4c6c-8e65-148173bfe17c\") " pod="openstack-kuttl-tests/barbican-api-7699c67d4d-fsf9d" Jan 20 17:39:48 crc kubenswrapper[4558]: I0120 17:39:48.519765 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/67e97807-4d7b-4c6c-8e65-148173bfe17c-combined-ca-bundle\") pod \"barbican-api-7699c67d4d-fsf9d\" (UID: \"67e97807-4d7b-4c6c-8e65-148173bfe17c\") " pod="openstack-kuttl-tests/barbican-api-7699c67d4d-fsf9d" Jan 20 17:39:48 crc kubenswrapper[4558]: I0120 17:39:48.519874 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/67e97807-4d7b-4c6c-8e65-148173bfe17c-config-data-custom\") pod \"barbican-api-7699c67d4d-fsf9d\" (UID: \"67e97807-4d7b-4c6c-8e65-148173bfe17c\") " pod="openstack-kuttl-tests/barbican-api-7699c67d4d-fsf9d" Jan 20 17:39:48 crc kubenswrapper[4558]: I0120 17:39:48.520211 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/67e97807-4d7b-4c6c-8e65-148173bfe17c-config-data\") pod \"barbican-api-7699c67d4d-fsf9d\" (UID: \"67e97807-4d7b-4c6c-8e65-148173bfe17c\") " pod="openstack-kuttl-tests/barbican-api-7699c67d4d-fsf9d" Jan 20 17:39:48 crc kubenswrapper[4558]: I0120 17:39:48.520535 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-keystone-listener-5f469dd86b-h6dq9" Jan 20 17:39:48 crc kubenswrapper[4558]: I0120 17:39:48.523612 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q77xc\" (UniqueName: \"kubernetes.io/projected/67e97807-4d7b-4c6c-8e65-148173bfe17c-kube-api-access-q77xc\") pod \"barbican-api-7699c67d4d-fsf9d\" (UID: \"67e97807-4d7b-4c6c-8e65-148173bfe17c\") " pod="openstack-kuttl-tests/barbican-api-7699c67d4d-fsf9d" Jan 20 17:39:48 crc kubenswrapper[4558]: I0120 17:39:48.528453 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-worker-6667c7bd65-7bw4v" Jan 20 17:39:48 crc kubenswrapper[4558]: I0120 17:39:48.690709 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-7699c67d4d-fsf9d" Jan 20 17:39:49 crc kubenswrapper[4558]: I0120 17:39:49.038758 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-keystone-listener-5f469dd86b-h6dq9"] Jan 20 17:39:49 crc kubenswrapper[4558]: W0120 17:39:49.082912 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podba27bcbb_36f0_4575_afba_d6997d667fef.slice/crio-fc3acc9f98565078ec2d7658865735dfb1b52da59b3f94de0815d13fac56297b WatchSource:0}: Error finding container fc3acc9f98565078ec2d7658865735dfb1b52da59b3f94de0815d13fac56297b: Status 404 returned error can't find the container with id fc3acc9f98565078ec2d7658865735dfb1b52da59b3f94de0815d13fac56297b Jan 20 17:39:49 crc kubenswrapper[4558]: I0120 17:39:49.088622 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-worker-6667c7bd65-7bw4v"] Jan 20 17:39:49 crc kubenswrapper[4558]: W0120 17:39:49.213866 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod67e97807_4d7b_4c6c_8e65_148173bfe17c.slice/crio-da183d67b9b9b720a1acbbf847a7c17e97698884989329bb3d5164945e6657cb WatchSource:0}: Error finding container da183d67b9b9b720a1acbbf847a7c17e97698884989329bb3d5164945e6657cb: Status 404 returned error can't find the container with id da183d67b9b9b720a1acbbf847a7c17e97698884989329bb3d5164945e6657cb Jan 20 17:39:49 crc kubenswrapper[4558]: I0120 17:39:49.232315 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-api-7699c67d4d-fsf9d"] Jan 20 17:39:49 crc kubenswrapper[4558]: I0120 17:39:49.375385 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:39:49 crc kubenswrapper[4558]: I0120 17:39:49.413660 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-7699c67d4d-fsf9d" event={"ID":"67e97807-4d7b-4c6c-8e65-148173bfe17c","Type":"ContainerStarted","Data":"da183d67b9b9b720a1acbbf847a7c17e97698884989329bb3d5164945e6657cb"} Jan 20 17:39:49 crc kubenswrapper[4558]: I0120 17:39:49.419635 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-keystone-listener-5f469dd86b-h6dq9" event={"ID":"dfa1a7bb-2900-49f0-94ee-fe789d4ffec8","Type":"ContainerStarted","Data":"41a835374e97b54b247e3d01bbee52c1fb97c1dfd37e923ebe958c81a0dbe228"} Jan 20 17:39:49 crc kubenswrapper[4558]: I0120 17:39:49.419692 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-keystone-listener-5f469dd86b-h6dq9" event={"ID":"dfa1a7bb-2900-49f0-94ee-fe789d4ffec8","Type":"ContainerStarted","Data":"6b5206546241e722df21bc36a7bfbdffafe84c8538c062bc41ea4da1087d3d5e"} Jan 20 17:39:49 crc kubenswrapper[4558]: I0120 17:39:49.422197 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"9d8e0386-7f68-436f-8a8a-4d79ec346619","Type":"ContainerStarted","Data":"76f8bd55a26af960845a639a020f01797699e43d0948244891a6e9a96a1f6f04"} Jan 20 17:39:49 crc kubenswrapper[4558]: I0120 17:39:49.422628 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:39:49 crc kubenswrapper[4558]: I0120 17:39:49.424522 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-worker-6667c7bd65-7bw4v" event={"ID":"ba27bcbb-36f0-4575-afba-d6997d667fef","Type":"ContainerStarted","Data":"ce43e3bb6306e86eacbf56d2bb8a010f8f3998ab291973f5ad5e72276c882cdf"} Jan 20 17:39:49 crc kubenswrapper[4558]: I0120 17:39:49.424552 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-worker-6667c7bd65-7bw4v" event={"ID":"ba27bcbb-36f0-4575-afba-d6997d667fef","Type":"ContainerStarted","Data":"fc3acc9f98565078ec2d7658865735dfb1b52da59b3f94de0815d13fac56297b"} Jan 20 17:39:49 crc kubenswrapper[4558]: I0120 17:39:49.426965 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"81080ef7-fd3d-487f-b6aa-0fb9eaebc855","Type":"ContainerStarted","Data":"d0baa8aa43ffe509db57324997a20747c31164ce17f510abfb37beba893c7f2e"} Jan 20 17:39:49 crc kubenswrapper[4558]: I0120 17:39:49.426993 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"81080ef7-fd3d-487f-b6aa-0fb9eaebc855","Type":"ContainerStarted","Data":"983340e3c0b3a463b995c14657063c9f1a54b71dae69af7f610060ce42550225"} Jan 20 17:39:49 crc kubenswrapper[4558]: I0120 17:39:49.431724 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"32ebfd5d-3392-4ab0-b1f7-1c12001b2ef2","Type":"ContainerStarted","Data":"aa0ca993872e33ad8a732487b7655b5b2c29e6e5ca4080a8dec4fd3d9941851a"} Jan 20 17:39:49 crc kubenswrapper[4558]: I0120 17:39:49.449459 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/cinder-api-0" podStartSLOduration=3.449444523 podStartE2EDuration="3.449444523s" podCreationTimestamp="2026-01-20 17:39:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:39:49.440525012 +0000 UTC m=+3483.200862980" watchObservedRunningTime="2026-01-20 17:39:49.449444523 +0000 UTC m=+3483.209782490" Jan 20 17:39:49 crc kubenswrapper[4558]: I0120 17:39:49.476546 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/cinder-scheduler-0" podStartSLOduration=3.4765267140000002 podStartE2EDuration="3.476526714s" podCreationTimestamp="2026-01-20 17:39:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:39:49.457616879 +0000 UTC m=+3483.217954846" watchObservedRunningTime="2026-01-20 17:39:49.476526714 +0000 UTC m=+3483.236864681" Jan 20 17:39:50 crc kubenswrapper[4558]: I0120 17:39:50.446148 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-worker-6667c7bd65-7bw4v" event={"ID":"ba27bcbb-36f0-4575-afba-d6997d667fef","Type":"ContainerStarted","Data":"7a8e9aa388298d08f4da43fd2c878f6be898c08220911c53d8f06152ad76440d"} Jan 20 17:39:50 crc kubenswrapper[4558]: I0120 17:39:50.449841 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"32ebfd5d-3392-4ab0-b1f7-1c12001b2ef2","Type":"ContainerStarted","Data":"cd433ab19101a55fe28a4b5f5f546b83f5d3862cab4f05189daa9c99ec18eb21"} Jan 20 17:39:50 crc kubenswrapper[4558]: I0120 17:39:50.450273 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"32ebfd5d-3392-4ab0-b1f7-1c12001b2ef2","Type":"ContainerStarted","Data":"26f5c1167982dbbb9e0e3adf24d031f01e322c717b64319de2cfccd54308f098"} Jan 20 17:39:50 crc kubenswrapper[4558]: I0120 17:39:50.451861 4558 generic.go:334] "Generic (PLEG): container finished" podID="227c3622-b4eb-4862-9b18-a90df7ea75b9" containerID="17c497a986172b78ad18acace156e9379a29097ef6815bcb71c2e62b15079de6" exitCode=0 Jan 20 17:39:50 crc kubenswrapper[4558]: I0120 17:39:50.451953 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-bootstrap-7ww48" event={"ID":"227c3622-b4eb-4862-9b18-a90df7ea75b9","Type":"ContainerDied","Data":"17c497a986172b78ad18acace156e9379a29097ef6815bcb71c2e62b15079de6"} Jan 20 17:39:50 crc kubenswrapper[4558]: I0120 17:39:50.453795 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-7699c67d4d-fsf9d" event={"ID":"67e97807-4d7b-4c6c-8e65-148173bfe17c","Type":"ContainerStarted","Data":"ff2f4d8084422579f65c3d1bff4f2f2acd7aa144e45d23176cd336a0a11fa1f7"} Jan 20 17:39:50 crc kubenswrapper[4558]: I0120 17:39:50.453862 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-7699c67d4d-fsf9d" event={"ID":"67e97807-4d7b-4c6c-8e65-148173bfe17c","Type":"ContainerStarted","Data":"1847a95077e724637eb802a28e7bb0e7273c6c379ffa3f114dc22cb641c18b8e"} Jan 20 17:39:50 crc kubenswrapper[4558]: I0120 17:39:50.453955 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/barbican-api-7699c67d4d-fsf9d" Jan 20 17:39:50 crc kubenswrapper[4558]: I0120 17:39:50.453993 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/barbican-api-7699c67d4d-fsf9d" Jan 20 17:39:50 crc kubenswrapper[4558]: I0120 17:39:50.455990 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-keystone-listener-5f469dd86b-h6dq9" event={"ID":"dfa1a7bb-2900-49f0-94ee-fe789d4ffec8","Type":"ContainerStarted","Data":"4918d6999246b14e34bbf966f6c160e07d3a28ecedad21b571990984b63736da"} Jan 20 17:39:50 crc kubenswrapper[4558]: I0120 17:39:50.456403 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/cinder-api-0" podUID="9d8e0386-7f68-436f-8a8a-4d79ec346619" containerName="cinder-api-log" containerID="cri-o://559254e40d7dbfa36c2c33588e1128bf691456804f1b56b56b8cf9aa21e0bbde" gracePeriod=30 Jan 20 17:39:50 crc kubenswrapper[4558]: I0120 17:39:50.456416 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/cinder-api-0" podUID="9d8e0386-7f68-436f-8a8a-4d79ec346619" containerName="cinder-api" containerID="cri-o://76f8bd55a26af960845a639a020f01797699e43d0948244891a6e9a96a1f6f04" gracePeriod=30 Jan 20 17:39:50 crc kubenswrapper[4558]: I0120 17:39:50.474204 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/barbican-worker-6667c7bd65-7bw4v" podStartSLOduration=2.474157709 podStartE2EDuration="2.474157709s" podCreationTimestamp="2026-01-20 17:39:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:39:50.464889002 +0000 UTC m=+3484.225226969" watchObservedRunningTime="2026-01-20 17:39:50.474157709 +0000 UTC m=+3484.234495676" Jan 20 17:39:50 crc kubenswrapper[4558]: I0120 17:39:50.496752 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/barbican-api-7699c67d4d-fsf9d" podStartSLOduration=2.496735671 podStartE2EDuration="2.496735671s" podCreationTimestamp="2026-01-20 17:39:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:39:50.48704459 +0000 UTC m=+3484.247382557" watchObservedRunningTime="2026-01-20 17:39:50.496735671 +0000 UTC m=+3484.257073639" Jan 20 17:39:50 crc kubenswrapper[4558]: I0120 17:39:50.532277 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/barbican-keystone-listener-5f469dd86b-h6dq9" podStartSLOduration=2.532259645 podStartE2EDuration="2.532259645s" podCreationTimestamp="2026-01-20 17:39:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:39:50.526427769 +0000 UTC m=+3484.286765736" watchObservedRunningTime="2026-01-20 17:39:50.532259645 +0000 UTC m=+3484.292597611" Jan 20 17:39:51 crc kubenswrapper[4558]: I0120 17:39:51.013777 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:39:51 crc kubenswrapper[4558]: I0120 17:39:51.074769 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9d8e0386-7f68-436f-8a8a-4d79ec346619-config-data\") pod \"9d8e0386-7f68-436f-8a8a-4d79ec346619\" (UID: \"9d8e0386-7f68-436f-8a8a-4d79ec346619\") " Jan 20 17:39:51 crc kubenswrapper[4558]: I0120 17:39:51.074974 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9d8e0386-7f68-436f-8a8a-4d79ec346619-logs\") pod \"9d8e0386-7f68-436f-8a8a-4d79ec346619\" (UID: \"9d8e0386-7f68-436f-8a8a-4d79ec346619\") " Jan 20 17:39:51 crc kubenswrapper[4558]: I0120 17:39:51.075090 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9d8e0386-7f68-436f-8a8a-4d79ec346619-scripts\") pod \"9d8e0386-7f68-436f-8a8a-4d79ec346619\" (UID: \"9d8e0386-7f68-436f-8a8a-4d79ec346619\") " Jan 20 17:39:51 crc kubenswrapper[4558]: I0120 17:39:51.075143 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9d8e0386-7f68-436f-8a8a-4d79ec346619-combined-ca-bundle\") pod \"9d8e0386-7f68-436f-8a8a-4d79ec346619\" (UID: \"9d8e0386-7f68-436f-8a8a-4d79ec346619\") " Jan 20 17:39:51 crc kubenswrapper[4558]: I0120 17:39:51.075187 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9d8e0386-7f68-436f-8a8a-4d79ec346619-etc-machine-id\") pod \"9d8e0386-7f68-436f-8a8a-4d79ec346619\" (UID: \"9d8e0386-7f68-436f-8a8a-4d79ec346619\") " Jan 20 17:39:51 crc kubenswrapper[4558]: I0120 17:39:51.075344 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9d8e0386-7f68-436f-8a8a-4d79ec346619-config-data-custom\") pod \"9d8e0386-7f68-436f-8a8a-4d79ec346619\" (UID: \"9d8e0386-7f68-436f-8a8a-4d79ec346619\") " Jan 20 17:39:51 crc kubenswrapper[4558]: I0120 17:39:51.075363 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9d8e0386-7f68-436f-8a8a-4d79ec346619-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "9d8e0386-7f68-436f-8a8a-4d79ec346619" (UID: "9d8e0386-7f68-436f-8a8a-4d79ec346619"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 17:39:51 crc kubenswrapper[4558]: I0120 17:39:51.075393 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v8b7n\" (UniqueName: \"kubernetes.io/projected/9d8e0386-7f68-436f-8a8a-4d79ec346619-kube-api-access-v8b7n\") pod \"9d8e0386-7f68-436f-8a8a-4d79ec346619\" (UID: \"9d8e0386-7f68-436f-8a8a-4d79ec346619\") " Jan 20 17:39:51 crc kubenswrapper[4558]: I0120 17:39:51.075438 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9d8e0386-7f68-436f-8a8a-4d79ec346619-logs" (OuterVolumeSpecName: "logs") pod "9d8e0386-7f68-436f-8a8a-4d79ec346619" (UID: "9d8e0386-7f68-436f-8a8a-4d79ec346619"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:39:51 crc kubenswrapper[4558]: I0120 17:39:51.076032 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9d8e0386-7f68-436f-8a8a-4d79ec346619-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:39:51 crc kubenswrapper[4558]: I0120 17:39:51.076051 4558 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9d8e0386-7f68-436f-8a8a-4d79ec346619-etc-machine-id\") on node \"crc\" DevicePath \"\"" Jan 20 17:39:51 crc kubenswrapper[4558]: I0120 17:39:51.080531 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d8e0386-7f68-436f-8a8a-4d79ec346619-kube-api-access-v8b7n" (OuterVolumeSpecName: "kube-api-access-v8b7n") pod "9d8e0386-7f68-436f-8a8a-4d79ec346619" (UID: "9d8e0386-7f68-436f-8a8a-4d79ec346619"). InnerVolumeSpecName "kube-api-access-v8b7n". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:39:51 crc kubenswrapper[4558]: I0120 17:39:51.081236 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d8e0386-7f68-436f-8a8a-4d79ec346619-scripts" (OuterVolumeSpecName: "scripts") pod "9d8e0386-7f68-436f-8a8a-4d79ec346619" (UID: "9d8e0386-7f68-436f-8a8a-4d79ec346619"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:39:51 crc kubenswrapper[4558]: I0120 17:39:51.083636 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d8e0386-7f68-436f-8a8a-4d79ec346619-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "9d8e0386-7f68-436f-8a8a-4d79ec346619" (UID: "9d8e0386-7f68-436f-8a8a-4d79ec346619"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:39:51 crc kubenswrapper[4558]: I0120 17:39:51.113242 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d8e0386-7f68-436f-8a8a-4d79ec346619-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9d8e0386-7f68-436f-8a8a-4d79ec346619" (UID: "9d8e0386-7f68-436f-8a8a-4d79ec346619"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:39:51 crc kubenswrapper[4558]: I0120 17:39:51.119765 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d8e0386-7f68-436f-8a8a-4d79ec346619-config-data" (OuterVolumeSpecName: "config-data") pod "9d8e0386-7f68-436f-8a8a-4d79ec346619" (UID: "9d8e0386-7f68-436f-8a8a-4d79ec346619"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:39:51 crc kubenswrapper[4558]: I0120 17:39:51.179095 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9d8e0386-7f68-436f-8a8a-4d79ec346619-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:39:51 crc kubenswrapper[4558]: I0120 17:39:51.179216 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9d8e0386-7f68-436f-8a8a-4d79ec346619-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:39:51 crc kubenswrapper[4558]: I0120 17:39:51.179282 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9d8e0386-7f68-436f-8a8a-4d79ec346619-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:39:51 crc kubenswrapper[4558]: I0120 17:39:51.179336 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v8b7n\" (UniqueName: \"kubernetes.io/projected/9d8e0386-7f68-436f-8a8a-4d79ec346619-kube-api-access-v8b7n\") on node \"crc\" DevicePath \"\"" Jan 20 17:39:51 crc kubenswrapper[4558]: I0120 17:39:51.179383 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9d8e0386-7f68-436f-8a8a-4d79ec346619-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:39:51 crc kubenswrapper[4558]: I0120 17:39:51.443432 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:39:51 crc kubenswrapper[4558]: I0120 17:39:51.481952 4558 generic.go:334] "Generic (PLEG): container finished" podID="9d8e0386-7f68-436f-8a8a-4d79ec346619" containerID="76f8bd55a26af960845a639a020f01797699e43d0948244891a6e9a96a1f6f04" exitCode=0 Jan 20 17:39:51 crc kubenswrapper[4558]: I0120 17:39:51.482008 4558 generic.go:334] "Generic (PLEG): container finished" podID="9d8e0386-7f68-436f-8a8a-4d79ec346619" containerID="559254e40d7dbfa36c2c33588e1128bf691456804f1b56b56b8cf9aa21e0bbde" exitCode=143 Jan 20 17:39:51 crc kubenswrapper[4558]: I0120 17:39:51.484878 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:39:51 crc kubenswrapper[4558]: I0120 17:39:51.489637 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"9d8e0386-7f68-436f-8a8a-4d79ec346619","Type":"ContainerDied","Data":"76f8bd55a26af960845a639a020f01797699e43d0948244891a6e9a96a1f6f04"} Jan 20 17:39:51 crc kubenswrapper[4558]: I0120 17:39:51.489719 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"9d8e0386-7f68-436f-8a8a-4d79ec346619","Type":"ContainerDied","Data":"559254e40d7dbfa36c2c33588e1128bf691456804f1b56b56b8cf9aa21e0bbde"} Jan 20 17:39:51 crc kubenswrapper[4558]: I0120 17:39:51.489738 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"9d8e0386-7f68-436f-8a8a-4d79ec346619","Type":"ContainerDied","Data":"ed9e9ae1da41245717b6c23a3a531fa27db15d89dce18231370de36a62198e30"} Jan 20 17:39:51 crc kubenswrapper[4558]: I0120 17:39:51.489779 4558 scope.go:117] "RemoveContainer" containerID="76f8bd55a26af960845a639a020f01797699e43d0948244891a6e9a96a1f6f04" Jan 20 17:39:51 crc kubenswrapper[4558]: I0120 17:39:51.544834 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:39:51 crc kubenswrapper[4558]: I0120 17:39:51.554474 4558 scope.go:117] "RemoveContainer" containerID="559254e40d7dbfa36c2c33588e1128bf691456804f1b56b56b8cf9aa21e0bbde" Jan 20 17:39:51 crc kubenswrapper[4558]: I0120 17:39:51.570545 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:39:51 crc kubenswrapper[4558]: I0120 17:39:51.577921 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:39:51 crc kubenswrapper[4558]: E0120 17:39:51.578502 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9d8e0386-7f68-436f-8a8a-4d79ec346619" containerName="cinder-api-log" Jan 20 17:39:51 crc kubenswrapper[4558]: I0120 17:39:51.578518 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="9d8e0386-7f68-436f-8a8a-4d79ec346619" containerName="cinder-api-log" Jan 20 17:39:51 crc kubenswrapper[4558]: E0120 17:39:51.578541 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9d8e0386-7f68-436f-8a8a-4d79ec346619" containerName="cinder-api" Jan 20 17:39:51 crc kubenswrapper[4558]: I0120 17:39:51.578549 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="9d8e0386-7f68-436f-8a8a-4d79ec346619" containerName="cinder-api" Jan 20 17:39:51 crc kubenswrapper[4558]: I0120 17:39:51.578734 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="9d8e0386-7f68-436f-8a8a-4d79ec346619" containerName="cinder-api" Jan 20 17:39:51 crc kubenswrapper[4558]: I0120 17:39:51.578756 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="9d8e0386-7f68-436f-8a8a-4d79ec346619" containerName="cinder-api-log" Jan 20 17:39:51 crc kubenswrapper[4558]: I0120 17:39:51.581636 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:39:51 crc kubenswrapper[4558]: I0120 17:39:51.583954 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cinder-api-config-data" Jan 20 17:39:51 crc kubenswrapper[4558]: I0120 17:39:51.587621 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:39:51 crc kubenswrapper[4558]: I0120 17:39:51.590367 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-cinder-public-svc" Jan 20 17:39:51 crc kubenswrapper[4558]: I0120 17:39:51.590619 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-cinder-internal-svc" Jan 20 17:39:51 crc kubenswrapper[4558]: I0120 17:39:51.614875 4558 scope.go:117] "RemoveContainer" containerID="76f8bd55a26af960845a639a020f01797699e43d0948244891a6e9a96a1f6f04" Jan 20 17:39:51 crc kubenswrapper[4558]: E0120 17:39:51.624307 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"76f8bd55a26af960845a639a020f01797699e43d0948244891a6e9a96a1f6f04\": container with ID starting with 76f8bd55a26af960845a639a020f01797699e43d0948244891a6e9a96a1f6f04 not found: ID does not exist" containerID="76f8bd55a26af960845a639a020f01797699e43d0948244891a6e9a96a1f6f04" Jan 20 17:39:51 crc kubenswrapper[4558]: I0120 17:39:51.624356 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"76f8bd55a26af960845a639a020f01797699e43d0948244891a6e9a96a1f6f04"} err="failed to get container status \"76f8bd55a26af960845a639a020f01797699e43d0948244891a6e9a96a1f6f04\": rpc error: code = NotFound desc = could not find container \"76f8bd55a26af960845a639a020f01797699e43d0948244891a6e9a96a1f6f04\": container with ID starting with 76f8bd55a26af960845a639a020f01797699e43d0948244891a6e9a96a1f6f04 not found: ID does not exist" Jan 20 17:39:51 crc kubenswrapper[4558]: I0120 17:39:51.624395 4558 scope.go:117] "RemoveContainer" containerID="559254e40d7dbfa36c2c33588e1128bf691456804f1b56b56b8cf9aa21e0bbde" Jan 20 17:39:51 crc kubenswrapper[4558]: E0120 17:39:51.625315 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"559254e40d7dbfa36c2c33588e1128bf691456804f1b56b56b8cf9aa21e0bbde\": container with ID starting with 559254e40d7dbfa36c2c33588e1128bf691456804f1b56b56b8cf9aa21e0bbde not found: ID does not exist" containerID="559254e40d7dbfa36c2c33588e1128bf691456804f1b56b56b8cf9aa21e0bbde" Jan 20 17:39:51 crc kubenswrapper[4558]: I0120 17:39:51.625363 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"559254e40d7dbfa36c2c33588e1128bf691456804f1b56b56b8cf9aa21e0bbde"} err="failed to get container status \"559254e40d7dbfa36c2c33588e1128bf691456804f1b56b56b8cf9aa21e0bbde\": rpc error: code = NotFound desc = could not find container \"559254e40d7dbfa36c2c33588e1128bf691456804f1b56b56b8cf9aa21e0bbde\": container with ID starting with 559254e40d7dbfa36c2c33588e1128bf691456804f1b56b56b8cf9aa21e0bbde not found: ID does not exist" Jan 20 17:39:51 crc kubenswrapper[4558]: I0120 17:39:51.625393 4558 scope.go:117] "RemoveContainer" containerID="76f8bd55a26af960845a639a020f01797699e43d0948244891a6e9a96a1f6f04" Jan 20 17:39:51 crc kubenswrapper[4558]: I0120 17:39:51.625841 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"76f8bd55a26af960845a639a020f01797699e43d0948244891a6e9a96a1f6f04"} err="failed to get container status \"76f8bd55a26af960845a639a020f01797699e43d0948244891a6e9a96a1f6f04\": rpc error: code = NotFound desc = could not find container \"76f8bd55a26af960845a639a020f01797699e43d0948244891a6e9a96a1f6f04\": container with ID starting with 76f8bd55a26af960845a639a020f01797699e43d0948244891a6e9a96a1f6f04 not found: ID does not exist" Jan 20 17:39:51 crc kubenswrapper[4558]: I0120 17:39:51.625902 4558 scope.go:117] "RemoveContainer" containerID="559254e40d7dbfa36c2c33588e1128bf691456804f1b56b56b8cf9aa21e0bbde" Jan 20 17:39:51 crc kubenswrapper[4558]: I0120 17:39:51.626203 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"559254e40d7dbfa36c2c33588e1128bf691456804f1b56b56b8cf9aa21e0bbde"} err="failed to get container status \"559254e40d7dbfa36c2c33588e1128bf691456804f1b56b56b8cf9aa21e0bbde\": rpc error: code = NotFound desc = could not find container \"559254e40d7dbfa36c2c33588e1128bf691456804f1b56b56b8cf9aa21e0bbde\": container with ID starting with 559254e40d7dbfa36c2c33588e1128bf691456804f1b56b56b8cf9aa21e0bbde not found: ID does not exist" Jan 20 17:39:51 crc kubenswrapper[4558]: I0120 17:39:51.696559 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/70cf1941-8f7a-4328-9338-e5d7739173ac-config-data-custom\") pod \"cinder-api-0\" (UID: \"70cf1941-8f7a-4328-9338-e5d7739173ac\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:39:51 crc kubenswrapper[4558]: I0120 17:39:51.696648 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/70cf1941-8f7a-4328-9338-e5d7739173ac-scripts\") pod \"cinder-api-0\" (UID: \"70cf1941-8f7a-4328-9338-e5d7739173ac\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:39:51 crc kubenswrapper[4558]: I0120 17:39:51.696693 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bmv29\" (UniqueName: \"kubernetes.io/projected/70cf1941-8f7a-4328-9338-e5d7739173ac-kube-api-access-bmv29\") pod \"cinder-api-0\" (UID: \"70cf1941-8f7a-4328-9338-e5d7739173ac\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:39:51 crc kubenswrapper[4558]: I0120 17:39:51.696755 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/70cf1941-8f7a-4328-9338-e5d7739173ac-config-data\") pod \"cinder-api-0\" (UID: \"70cf1941-8f7a-4328-9338-e5d7739173ac\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:39:51 crc kubenswrapper[4558]: I0120 17:39:51.696784 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/70cf1941-8f7a-4328-9338-e5d7739173ac-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"70cf1941-8f7a-4328-9338-e5d7739173ac\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:39:51 crc kubenswrapper[4558]: I0120 17:39:51.696822 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/70cf1941-8f7a-4328-9338-e5d7739173ac-logs\") pod \"cinder-api-0\" (UID: \"70cf1941-8f7a-4328-9338-e5d7739173ac\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:39:51 crc kubenswrapper[4558]: I0120 17:39:51.696879 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/70cf1941-8f7a-4328-9338-e5d7739173ac-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"70cf1941-8f7a-4328-9338-e5d7739173ac\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:39:51 crc kubenswrapper[4558]: I0120 17:39:51.696904 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/70cf1941-8f7a-4328-9338-e5d7739173ac-etc-machine-id\") pod \"cinder-api-0\" (UID: \"70cf1941-8f7a-4328-9338-e5d7739173ac\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:39:51 crc kubenswrapper[4558]: I0120 17:39:51.696972 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/70cf1941-8f7a-4328-9338-e5d7739173ac-public-tls-certs\") pod \"cinder-api-0\" (UID: \"70cf1941-8f7a-4328-9338-e5d7739173ac\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:39:51 crc kubenswrapper[4558]: I0120 17:39:51.798493 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/70cf1941-8f7a-4328-9338-e5d7739173ac-config-data-custom\") pod \"cinder-api-0\" (UID: \"70cf1941-8f7a-4328-9338-e5d7739173ac\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:39:51 crc kubenswrapper[4558]: I0120 17:39:51.798586 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/70cf1941-8f7a-4328-9338-e5d7739173ac-scripts\") pod \"cinder-api-0\" (UID: \"70cf1941-8f7a-4328-9338-e5d7739173ac\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:39:51 crc kubenswrapper[4558]: I0120 17:39:51.798649 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bmv29\" (UniqueName: \"kubernetes.io/projected/70cf1941-8f7a-4328-9338-e5d7739173ac-kube-api-access-bmv29\") pod \"cinder-api-0\" (UID: \"70cf1941-8f7a-4328-9338-e5d7739173ac\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:39:51 crc kubenswrapper[4558]: I0120 17:39:51.798692 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/70cf1941-8f7a-4328-9338-e5d7739173ac-config-data\") pod \"cinder-api-0\" (UID: \"70cf1941-8f7a-4328-9338-e5d7739173ac\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:39:51 crc kubenswrapper[4558]: I0120 17:39:51.798727 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/70cf1941-8f7a-4328-9338-e5d7739173ac-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"70cf1941-8f7a-4328-9338-e5d7739173ac\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:39:51 crc kubenswrapper[4558]: I0120 17:39:51.798801 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/70cf1941-8f7a-4328-9338-e5d7739173ac-logs\") pod \"cinder-api-0\" (UID: \"70cf1941-8f7a-4328-9338-e5d7739173ac\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:39:51 crc kubenswrapper[4558]: I0120 17:39:51.798854 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/70cf1941-8f7a-4328-9338-e5d7739173ac-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"70cf1941-8f7a-4328-9338-e5d7739173ac\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:39:51 crc kubenswrapper[4558]: I0120 17:39:51.798899 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/70cf1941-8f7a-4328-9338-e5d7739173ac-etc-machine-id\") pod \"cinder-api-0\" (UID: \"70cf1941-8f7a-4328-9338-e5d7739173ac\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:39:51 crc kubenswrapper[4558]: I0120 17:39:51.798993 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/70cf1941-8f7a-4328-9338-e5d7739173ac-public-tls-certs\") pod \"cinder-api-0\" (UID: \"70cf1941-8f7a-4328-9338-e5d7739173ac\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:39:51 crc kubenswrapper[4558]: I0120 17:39:51.800634 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/70cf1941-8f7a-4328-9338-e5d7739173ac-logs\") pod \"cinder-api-0\" (UID: \"70cf1941-8f7a-4328-9338-e5d7739173ac\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:39:51 crc kubenswrapper[4558]: I0120 17:39:51.803716 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/70cf1941-8f7a-4328-9338-e5d7739173ac-etc-machine-id\") pod \"cinder-api-0\" (UID: \"70cf1941-8f7a-4328-9338-e5d7739173ac\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:39:51 crc kubenswrapper[4558]: I0120 17:39:51.953860 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/70cf1941-8f7a-4328-9338-e5d7739173ac-public-tls-certs\") pod \"cinder-api-0\" (UID: \"70cf1941-8f7a-4328-9338-e5d7739173ac\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:39:51 crc kubenswrapper[4558]: I0120 17:39:51.954875 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/70cf1941-8f7a-4328-9338-e5d7739173ac-config-data-custom\") pod \"cinder-api-0\" (UID: \"70cf1941-8f7a-4328-9338-e5d7739173ac\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:39:51 crc kubenswrapper[4558]: I0120 17:39:51.955074 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/70cf1941-8f7a-4328-9338-e5d7739173ac-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"70cf1941-8f7a-4328-9338-e5d7739173ac\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:39:51 crc kubenswrapper[4558]: I0120 17:39:51.955109 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/70cf1941-8f7a-4328-9338-e5d7739173ac-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"70cf1941-8f7a-4328-9338-e5d7739173ac\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:39:51 crc kubenswrapper[4558]: I0120 17:39:51.955642 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bmv29\" (UniqueName: \"kubernetes.io/projected/70cf1941-8f7a-4328-9338-e5d7739173ac-kube-api-access-bmv29\") pod \"cinder-api-0\" (UID: \"70cf1941-8f7a-4328-9338-e5d7739173ac\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:39:51 crc kubenswrapper[4558]: I0120 17:39:51.955728 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/70cf1941-8f7a-4328-9338-e5d7739173ac-scripts\") pod \"cinder-api-0\" (UID: \"70cf1941-8f7a-4328-9338-e5d7739173ac\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:39:51 crc kubenswrapper[4558]: I0120 17:39:51.955892 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/70cf1941-8f7a-4328-9338-e5d7739173ac-config-data\") pod \"cinder-api-0\" (UID: \"70cf1941-8f7a-4328-9338-e5d7739173ac\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:39:51 crc kubenswrapper[4558]: I0120 17:39:51.967292 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-bootstrap-7ww48" Jan 20 17:39:52 crc kubenswrapper[4558]: I0120 17:39:52.012325 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:39:52 crc kubenswrapper[4558]: I0120 17:39:52.106953 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ht6g2\" (UniqueName: \"kubernetes.io/projected/227c3622-b4eb-4862-9b18-a90df7ea75b9-kube-api-access-ht6g2\") pod \"227c3622-b4eb-4862-9b18-a90df7ea75b9\" (UID: \"227c3622-b4eb-4862-9b18-a90df7ea75b9\") " Jan 20 17:39:52 crc kubenswrapper[4558]: I0120 17:39:52.107248 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/227c3622-b4eb-4862-9b18-a90df7ea75b9-fernet-keys\") pod \"227c3622-b4eb-4862-9b18-a90df7ea75b9\" (UID: \"227c3622-b4eb-4862-9b18-a90df7ea75b9\") " Jan 20 17:39:52 crc kubenswrapper[4558]: I0120 17:39:52.107362 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/227c3622-b4eb-4862-9b18-a90df7ea75b9-credential-keys\") pod \"227c3622-b4eb-4862-9b18-a90df7ea75b9\" (UID: \"227c3622-b4eb-4862-9b18-a90df7ea75b9\") " Jan 20 17:39:52 crc kubenswrapper[4558]: I0120 17:39:52.107447 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/227c3622-b4eb-4862-9b18-a90df7ea75b9-combined-ca-bundle\") pod \"227c3622-b4eb-4862-9b18-a90df7ea75b9\" (UID: \"227c3622-b4eb-4862-9b18-a90df7ea75b9\") " Jan 20 17:39:52 crc kubenswrapper[4558]: I0120 17:39:52.107608 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/227c3622-b4eb-4862-9b18-a90df7ea75b9-scripts\") pod \"227c3622-b4eb-4862-9b18-a90df7ea75b9\" (UID: \"227c3622-b4eb-4862-9b18-a90df7ea75b9\") " Jan 20 17:39:52 crc kubenswrapper[4558]: I0120 17:39:52.107672 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/227c3622-b4eb-4862-9b18-a90df7ea75b9-config-data\") pod \"227c3622-b4eb-4862-9b18-a90df7ea75b9\" (UID: \"227c3622-b4eb-4862-9b18-a90df7ea75b9\") " Jan 20 17:39:52 crc kubenswrapper[4558]: I0120 17:39:52.111342 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/227c3622-b4eb-4862-9b18-a90df7ea75b9-kube-api-access-ht6g2" (OuterVolumeSpecName: "kube-api-access-ht6g2") pod "227c3622-b4eb-4862-9b18-a90df7ea75b9" (UID: "227c3622-b4eb-4862-9b18-a90df7ea75b9"). InnerVolumeSpecName "kube-api-access-ht6g2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:39:52 crc kubenswrapper[4558]: I0120 17:39:52.114297 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/227c3622-b4eb-4862-9b18-a90df7ea75b9-scripts" (OuterVolumeSpecName: "scripts") pod "227c3622-b4eb-4862-9b18-a90df7ea75b9" (UID: "227c3622-b4eb-4862-9b18-a90df7ea75b9"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:39:52 crc kubenswrapper[4558]: I0120 17:39:52.116629 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/227c3622-b4eb-4862-9b18-a90df7ea75b9-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "227c3622-b4eb-4862-9b18-a90df7ea75b9" (UID: "227c3622-b4eb-4862-9b18-a90df7ea75b9"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:39:52 crc kubenswrapper[4558]: I0120 17:39:52.117268 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/227c3622-b4eb-4862-9b18-a90df7ea75b9-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "227c3622-b4eb-4862-9b18-a90df7ea75b9" (UID: "227c3622-b4eb-4862-9b18-a90df7ea75b9"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:39:52 crc kubenswrapper[4558]: I0120 17:39:52.135379 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/227c3622-b4eb-4862-9b18-a90df7ea75b9-config-data" (OuterVolumeSpecName: "config-data") pod "227c3622-b4eb-4862-9b18-a90df7ea75b9" (UID: "227c3622-b4eb-4862-9b18-a90df7ea75b9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:39:52 crc kubenswrapper[4558]: I0120 17:39:52.135467 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/227c3622-b4eb-4862-9b18-a90df7ea75b9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "227c3622-b4eb-4862-9b18-a90df7ea75b9" (UID: "227c3622-b4eb-4862-9b18-a90df7ea75b9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:39:52 crc kubenswrapper[4558]: I0120 17:39:52.211247 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/227c3622-b4eb-4862-9b18-a90df7ea75b9-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:39:52 crc kubenswrapper[4558]: I0120 17:39:52.211293 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/227c3622-b4eb-4862-9b18-a90df7ea75b9-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:39:52 crc kubenswrapper[4558]: I0120 17:39:52.211310 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ht6g2\" (UniqueName: \"kubernetes.io/projected/227c3622-b4eb-4862-9b18-a90df7ea75b9-kube-api-access-ht6g2\") on node \"crc\" DevicePath \"\"" Jan 20 17:39:52 crc kubenswrapper[4558]: I0120 17:39:52.211326 4558 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/227c3622-b4eb-4862-9b18-a90df7ea75b9-fernet-keys\") on node \"crc\" DevicePath \"\"" Jan 20 17:39:52 crc kubenswrapper[4558]: I0120 17:39:52.211338 4558 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/227c3622-b4eb-4862-9b18-a90df7ea75b9-credential-keys\") on node \"crc\" DevicePath \"\"" Jan 20 17:39:52 crc kubenswrapper[4558]: I0120 17:39:52.211350 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/227c3622-b4eb-4862-9b18-a90df7ea75b9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:39:52 crc kubenswrapper[4558]: I0120 17:39:52.453929 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:39:52 crc kubenswrapper[4558]: I0120 17:39:52.494329 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"32ebfd5d-3392-4ab0-b1f7-1c12001b2ef2","Type":"ContainerStarted","Data":"bcbdfa37349f1e89eaec79cbd97fa05f054554461a223bbe18052651d462d9db"} Jan 20 17:39:52 crc kubenswrapper[4558]: I0120 17:39:52.494605 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:39:52 crc kubenswrapper[4558]: I0120 17:39:52.496275 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-bootstrap-7ww48" Jan 20 17:39:52 crc kubenswrapper[4558]: I0120 17:39:52.496556 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-bootstrap-7ww48" event={"ID":"227c3622-b4eb-4862-9b18-a90df7ea75b9","Type":"ContainerDied","Data":"0a7ed0e81119a78626ce3742f6e9e51a55adcd610bda36f42e6739e5bb4253ad"} Jan 20 17:39:52 crc kubenswrapper[4558]: I0120 17:39:52.496615 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0a7ed0e81119a78626ce3742f6e9e51a55adcd610bda36f42e6739e5bb4253ad" Jan 20 17:39:52 crc kubenswrapper[4558]: I0120 17:39:52.497941 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"70cf1941-8f7a-4328-9338-e5d7739173ac","Type":"ContainerStarted","Data":"03d011d8bc46061b6c11957a3832fad93110bfd8e9fda1e1907aff3afde5ba04"} Jan 20 17:39:52 crc kubenswrapper[4558]: I0120 17:39:52.521335 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ceilometer-0" podStartSLOduration=2.545174149 podStartE2EDuration="6.521314945s" podCreationTimestamp="2026-01-20 17:39:46 +0000 UTC" firstStartedPulling="2026-01-20 17:39:47.593250664 +0000 UTC m=+3481.353588632" lastFinishedPulling="2026-01-20 17:39:51.569391461 +0000 UTC m=+3485.329729428" observedRunningTime="2026-01-20 17:39:52.508609265 +0000 UTC m=+3486.268947232" watchObservedRunningTime="2026-01-20 17:39:52.521314945 +0000 UTC m=+3486.281652911" Jan 20 17:39:52 crc kubenswrapper[4558]: I0120 17:39:52.584479 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d8e0386-7f68-436f-8a8a-4d79ec346619" path="/var/lib/kubelet/pods/9d8e0386-7f68-436f-8a8a-4d79ec346619/volumes" Jan 20 17:39:52 crc kubenswrapper[4558]: I0120 17:39:52.600239 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/keystone-7dffbf585-vg2sk"] Jan 20 17:39:52 crc kubenswrapper[4558]: E0120 17:39:52.600602 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="227c3622-b4eb-4862-9b18-a90df7ea75b9" containerName="keystone-bootstrap" Jan 20 17:39:52 crc kubenswrapper[4558]: I0120 17:39:52.600620 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="227c3622-b4eb-4862-9b18-a90df7ea75b9" containerName="keystone-bootstrap" Jan 20 17:39:52 crc kubenswrapper[4558]: I0120 17:39:52.600810 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="227c3622-b4eb-4862-9b18-a90df7ea75b9" containerName="keystone-bootstrap" Jan 20 17:39:52 crc kubenswrapper[4558]: I0120 17:39:52.601435 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-7dffbf585-vg2sk" Jan 20 17:39:52 crc kubenswrapper[4558]: I0120 17:39:52.606330 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-config-data" Jan 20 17:39:52 crc kubenswrapper[4558]: I0120 17:39:52.606500 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-scripts" Jan 20 17:39:52 crc kubenswrapper[4558]: I0120 17:39:52.606665 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone" Jan 20 17:39:52 crc kubenswrapper[4558]: I0120 17:39:52.607380 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-keystone-dockercfg-2rkcv" Jan 20 17:39:52 crc kubenswrapper[4558]: I0120 17:39:52.608419 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-keystone-internal-svc" Jan 20 17:39:52 crc kubenswrapper[4558]: I0120 17:39:52.608478 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-keystone-public-svc" Jan 20 17:39:52 crc kubenswrapper[4558]: I0120 17:39:52.614912 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-7dffbf585-vg2sk"] Jan 20 17:39:52 crc kubenswrapper[4558]: I0120 17:39:52.724274 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/8f7d7846-1b56-4ab5-a0ad-63b179a1a797-credential-keys\") pod \"keystone-7dffbf585-vg2sk\" (UID: \"8f7d7846-1b56-4ab5-a0ad-63b179a1a797\") " pod="openstack-kuttl-tests/keystone-7dffbf585-vg2sk" Jan 20 17:39:52 crc kubenswrapper[4558]: I0120 17:39:52.724509 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8f7d7846-1b56-4ab5-a0ad-63b179a1a797-scripts\") pod \"keystone-7dffbf585-vg2sk\" (UID: \"8f7d7846-1b56-4ab5-a0ad-63b179a1a797\") " pod="openstack-kuttl-tests/keystone-7dffbf585-vg2sk" Jan 20 17:39:52 crc kubenswrapper[4558]: I0120 17:39:52.724676 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8f7d7846-1b56-4ab5-a0ad-63b179a1a797-public-tls-certs\") pod \"keystone-7dffbf585-vg2sk\" (UID: \"8f7d7846-1b56-4ab5-a0ad-63b179a1a797\") " pod="openstack-kuttl-tests/keystone-7dffbf585-vg2sk" Jan 20 17:39:52 crc kubenswrapper[4558]: I0120 17:39:52.724852 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8f7d7846-1b56-4ab5-a0ad-63b179a1a797-internal-tls-certs\") pod \"keystone-7dffbf585-vg2sk\" (UID: \"8f7d7846-1b56-4ab5-a0ad-63b179a1a797\") " pod="openstack-kuttl-tests/keystone-7dffbf585-vg2sk" Jan 20 17:39:52 crc kubenswrapper[4558]: I0120 17:39:52.724996 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8f7d7846-1b56-4ab5-a0ad-63b179a1a797-config-data\") pod \"keystone-7dffbf585-vg2sk\" (UID: \"8f7d7846-1b56-4ab5-a0ad-63b179a1a797\") " pod="openstack-kuttl-tests/keystone-7dffbf585-vg2sk" Jan 20 17:39:52 crc kubenswrapper[4558]: I0120 17:39:52.725087 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b2dvl\" (UniqueName: \"kubernetes.io/projected/8f7d7846-1b56-4ab5-a0ad-63b179a1a797-kube-api-access-b2dvl\") pod \"keystone-7dffbf585-vg2sk\" (UID: \"8f7d7846-1b56-4ab5-a0ad-63b179a1a797\") " pod="openstack-kuttl-tests/keystone-7dffbf585-vg2sk" Jan 20 17:39:52 crc kubenswrapper[4558]: I0120 17:39:52.725120 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/8f7d7846-1b56-4ab5-a0ad-63b179a1a797-fernet-keys\") pod \"keystone-7dffbf585-vg2sk\" (UID: \"8f7d7846-1b56-4ab5-a0ad-63b179a1a797\") " pod="openstack-kuttl-tests/keystone-7dffbf585-vg2sk" Jan 20 17:39:52 crc kubenswrapper[4558]: I0120 17:39:52.725312 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8f7d7846-1b56-4ab5-a0ad-63b179a1a797-combined-ca-bundle\") pod \"keystone-7dffbf585-vg2sk\" (UID: \"8f7d7846-1b56-4ab5-a0ad-63b179a1a797\") " pod="openstack-kuttl-tests/keystone-7dffbf585-vg2sk" Jan 20 17:39:52 crc kubenswrapper[4558]: I0120 17:39:52.827013 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8f7d7846-1b56-4ab5-a0ad-63b179a1a797-scripts\") pod \"keystone-7dffbf585-vg2sk\" (UID: \"8f7d7846-1b56-4ab5-a0ad-63b179a1a797\") " pod="openstack-kuttl-tests/keystone-7dffbf585-vg2sk" Jan 20 17:39:52 crc kubenswrapper[4558]: I0120 17:39:52.827095 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8f7d7846-1b56-4ab5-a0ad-63b179a1a797-public-tls-certs\") pod \"keystone-7dffbf585-vg2sk\" (UID: \"8f7d7846-1b56-4ab5-a0ad-63b179a1a797\") " pod="openstack-kuttl-tests/keystone-7dffbf585-vg2sk" Jan 20 17:39:52 crc kubenswrapper[4558]: I0120 17:39:52.827191 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8f7d7846-1b56-4ab5-a0ad-63b179a1a797-internal-tls-certs\") pod \"keystone-7dffbf585-vg2sk\" (UID: \"8f7d7846-1b56-4ab5-a0ad-63b179a1a797\") " pod="openstack-kuttl-tests/keystone-7dffbf585-vg2sk" Jan 20 17:39:52 crc kubenswrapper[4558]: I0120 17:39:52.827230 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8f7d7846-1b56-4ab5-a0ad-63b179a1a797-config-data\") pod \"keystone-7dffbf585-vg2sk\" (UID: \"8f7d7846-1b56-4ab5-a0ad-63b179a1a797\") " pod="openstack-kuttl-tests/keystone-7dffbf585-vg2sk" Jan 20 17:39:52 crc kubenswrapper[4558]: I0120 17:39:52.827258 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b2dvl\" (UniqueName: \"kubernetes.io/projected/8f7d7846-1b56-4ab5-a0ad-63b179a1a797-kube-api-access-b2dvl\") pod \"keystone-7dffbf585-vg2sk\" (UID: \"8f7d7846-1b56-4ab5-a0ad-63b179a1a797\") " pod="openstack-kuttl-tests/keystone-7dffbf585-vg2sk" Jan 20 17:39:52 crc kubenswrapper[4558]: I0120 17:39:52.827578 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/8f7d7846-1b56-4ab5-a0ad-63b179a1a797-fernet-keys\") pod \"keystone-7dffbf585-vg2sk\" (UID: \"8f7d7846-1b56-4ab5-a0ad-63b179a1a797\") " pod="openstack-kuttl-tests/keystone-7dffbf585-vg2sk" Jan 20 17:39:52 crc kubenswrapper[4558]: I0120 17:39:52.827730 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8f7d7846-1b56-4ab5-a0ad-63b179a1a797-combined-ca-bundle\") pod \"keystone-7dffbf585-vg2sk\" (UID: \"8f7d7846-1b56-4ab5-a0ad-63b179a1a797\") " pod="openstack-kuttl-tests/keystone-7dffbf585-vg2sk" Jan 20 17:39:52 crc kubenswrapper[4558]: I0120 17:39:52.828173 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/8f7d7846-1b56-4ab5-a0ad-63b179a1a797-credential-keys\") pod \"keystone-7dffbf585-vg2sk\" (UID: \"8f7d7846-1b56-4ab5-a0ad-63b179a1a797\") " pod="openstack-kuttl-tests/keystone-7dffbf585-vg2sk" Jan 20 17:39:52 crc kubenswrapper[4558]: I0120 17:39:52.832780 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8f7d7846-1b56-4ab5-a0ad-63b179a1a797-combined-ca-bundle\") pod \"keystone-7dffbf585-vg2sk\" (UID: \"8f7d7846-1b56-4ab5-a0ad-63b179a1a797\") " pod="openstack-kuttl-tests/keystone-7dffbf585-vg2sk" Jan 20 17:39:52 crc kubenswrapper[4558]: I0120 17:39:52.833103 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8f7d7846-1b56-4ab5-a0ad-63b179a1a797-scripts\") pod \"keystone-7dffbf585-vg2sk\" (UID: \"8f7d7846-1b56-4ab5-a0ad-63b179a1a797\") " pod="openstack-kuttl-tests/keystone-7dffbf585-vg2sk" Jan 20 17:39:52 crc kubenswrapper[4558]: I0120 17:39:52.834116 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8f7d7846-1b56-4ab5-a0ad-63b179a1a797-config-data\") pod \"keystone-7dffbf585-vg2sk\" (UID: \"8f7d7846-1b56-4ab5-a0ad-63b179a1a797\") " pod="openstack-kuttl-tests/keystone-7dffbf585-vg2sk" Jan 20 17:39:52 crc kubenswrapper[4558]: I0120 17:39:52.834144 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8f7d7846-1b56-4ab5-a0ad-63b179a1a797-internal-tls-certs\") pod \"keystone-7dffbf585-vg2sk\" (UID: \"8f7d7846-1b56-4ab5-a0ad-63b179a1a797\") " pod="openstack-kuttl-tests/keystone-7dffbf585-vg2sk" Jan 20 17:39:52 crc kubenswrapper[4558]: I0120 17:39:52.834413 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/8f7d7846-1b56-4ab5-a0ad-63b179a1a797-fernet-keys\") pod \"keystone-7dffbf585-vg2sk\" (UID: \"8f7d7846-1b56-4ab5-a0ad-63b179a1a797\") " pod="openstack-kuttl-tests/keystone-7dffbf585-vg2sk" Jan 20 17:39:52 crc kubenswrapper[4558]: I0120 17:39:52.836634 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8f7d7846-1b56-4ab5-a0ad-63b179a1a797-public-tls-certs\") pod \"keystone-7dffbf585-vg2sk\" (UID: \"8f7d7846-1b56-4ab5-a0ad-63b179a1a797\") " pod="openstack-kuttl-tests/keystone-7dffbf585-vg2sk" Jan 20 17:39:52 crc kubenswrapper[4558]: I0120 17:39:52.836968 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/8f7d7846-1b56-4ab5-a0ad-63b179a1a797-credential-keys\") pod \"keystone-7dffbf585-vg2sk\" (UID: \"8f7d7846-1b56-4ab5-a0ad-63b179a1a797\") " pod="openstack-kuttl-tests/keystone-7dffbf585-vg2sk" Jan 20 17:39:52 crc kubenswrapper[4558]: I0120 17:39:52.841246 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b2dvl\" (UniqueName: \"kubernetes.io/projected/8f7d7846-1b56-4ab5-a0ad-63b179a1a797-kube-api-access-b2dvl\") pod \"keystone-7dffbf585-vg2sk\" (UID: \"8f7d7846-1b56-4ab5-a0ad-63b179a1a797\") " pod="openstack-kuttl-tests/keystone-7dffbf585-vg2sk" Jan 20 17:39:52 crc kubenswrapper[4558]: I0120 17:39:52.923810 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-7dffbf585-vg2sk" Jan 20 17:39:53 crc kubenswrapper[4558]: I0120 17:39:53.364320 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-7dffbf585-vg2sk"] Jan 20 17:39:53 crc kubenswrapper[4558]: I0120 17:39:53.536812 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"70cf1941-8f7a-4328-9338-e5d7739173ac","Type":"ContainerStarted","Data":"9fe05fc8fb03d4b0c68d34c739144ce4f22e171fb9d33638aacaab26959ef775"} Jan 20 17:39:53 crc kubenswrapper[4558]: I0120 17:39:53.539795 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-7dffbf585-vg2sk" event={"ID":"8f7d7846-1b56-4ab5-a0ad-63b179a1a797","Type":"ContainerStarted","Data":"c19ebe9aeb11e93747060a750808dbaf41ffff83c463556fac370fec9e9071b5"} Jan 20 17:39:53 crc kubenswrapper[4558]: I0120 17:39:53.542766 4558 generic.go:334] "Generic (PLEG): container finished" podID="ae3119fb-1355-4da8-a005-3610a1733b90" containerID="ef33eca59ed0960fac7c18dbc983784852c470d4f156ce3a7db2a36e9f8dde75" exitCode=0 Jan 20 17:39:53 crc kubenswrapper[4558]: I0120 17:39:53.542869 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-db-sync-fv6q5" event={"ID":"ae3119fb-1355-4da8-a005-3610a1733b90","Type":"ContainerDied","Data":"ef33eca59ed0960fac7c18dbc983784852c470d4f156ce3a7db2a36e9f8dde75"} Jan 20 17:39:53 crc kubenswrapper[4558]: I0120 17:39:53.937848 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/barbican-api-6575ff8c88-kqv4m"] Jan 20 17:39:53 crc kubenswrapper[4558]: I0120 17:39:53.939609 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-6575ff8c88-kqv4m" Jan 20 17:39:53 crc kubenswrapper[4558]: I0120 17:39:53.941820 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-barbican-internal-svc" Jan 20 17:39:53 crc kubenswrapper[4558]: I0120 17:39:53.941993 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-barbican-public-svc" Jan 20 17:39:53 crc kubenswrapper[4558]: I0120 17:39:53.948688 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-api-6575ff8c88-kqv4m"] Jan 20 17:39:54 crc kubenswrapper[4558]: I0120 17:39:54.064627 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f6d8d5b2-af92-43f7-ad6f-74ac3121776c-config-data-custom\") pod \"barbican-api-6575ff8c88-kqv4m\" (UID: \"f6d8d5b2-af92-43f7-ad6f-74ac3121776c\") " pod="openstack-kuttl-tests/barbican-api-6575ff8c88-kqv4m" Jan 20 17:39:54 crc kubenswrapper[4558]: I0120 17:39:54.064714 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f6d8d5b2-af92-43f7-ad6f-74ac3121776c-logs\") pod \"barbican-api-6575ff8c88-kqv4m\" (UID: \"f6d8d5b2-af92-43f7-ad6f-74ac3121776c\") " pod="openstack-kuttl-tests/barbican-api-6575ff8c88-kqv4m" Jan 20 17:39:54 crc kubenswrapper[4558]: I0120 17:39:54.064796 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tl7zd\" (UniqueName: \"kubernetes.io/projected/f6d8d5b2-af92-43f7-ad6f-74ac3121776c-kube-api-access-tl7zd\") pod \"barbican-api-6575ff8c88-kqv4m\" (UID: \"f6d8d5b2-af92-43f7-ad6f-74ac3121776c\") " pod="openstack-kuttl-tests/barbican-api-6575ff8c88-kqv4m" Jan 20 17:39:54 crc kubenswrapper[4558]: I0120 17:39:54.064898 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f6d8d5b2-af92-43f7-ad6f-74ac3121776c-internal-tls-certs\") pod \"barbican-api-6575ff8c88-kqv4m\" (UID: \"f6d8d5b2-af92-43f7-ad6f-74ac3121776c\") " pod="openstack-kuttl-tests/barbican-api-6575ff8c88-kqv4m" Jan 20 17:39:54 crc kubenswrapper[4558]: I0120 17:39:54.064945 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f6d8d5b2-af92-43f7-ad6f-74ac3121776c-config-data\") pod \"barbican-api-6575ff8c88-kqv4m\" (UID: \"f6d8d5b2-af92-43f7-ad6f-74ac3121776c\") " pod="openstack-kuttl-tests/barbican-api-6575ff8c88-kqv4m" Jan 20 17:39:54 crc kubenswrapper[4558]: I0120 17:39:54.064970 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f6d8d5b2-af92-43f7-ad6f-74ac3121776c-public-tls-certs\") pod \"barbican-api-6575ff8c88-kqv4m\" (UID: \"f6d8d5b2-af92-43f7-ad6f-74ac3121776c\") " pod="openstack-kuttl-tests/barbican-api-6575ff8c88-kqv4m" Jan 20 17:39:54 crc kubenswrapper[4558]: I0120 17:39:54.065018 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f6d8d5b2-af92-43f7-ad6f-74ac3121776c-combined-ca-bundle\") pod \"barbican-api-6575ff8c88-kqv4m\" (UID: \"f6d8d5b2-af92-43f7-ad6f-74ac3121776c\") " pod="openstack-kuttl-tests/barbican-api-6575ff8c88-kqv4m" Jan 20 17:39:54 crc kubenswrapper[4558]: I0120 17:39:54.167146 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f6d8d5b2-af92-43f7-ad6f-74ac3121776c-config-data-custom\") pod \"barbican-api-6575ff8c88-kqv4m\" (UID: \"f6d8d5b2-af92-43f7-ad6f-74ac3121776c\") " pod="openstack-kuttl-tests/barbican-api-6575ff8c88-kqv4m" Jan 20 17:39:54 crc kubenswrapper[4558]: I0120 17:39:54.167237 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f6d8d5b2-af92-43f7-ad6f-74ac3121776c-logs\") pod \"barbican-api-6575ff8c88-kqv4m\" (UID: \"f6d8d5b2-af92-43f7-ad6f-74ac3121776c\") " pod="openstack-kuttl-tests/barbican-api-6575ff8c88-kqv4m" Jan 20 17:39:54 crc kubenswrapper[4558]: I0120 17:39:54.167305 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tl7zd\" (UniqueName: \"kubernetes.io/projected/f6d8d5b2-af92-43f7-ad6f-74ac3121776c-kube-api-access-tl7zd\") pod \"barbican-api-6575ff8c88-kqv4m\" (UID: \"f6d8d5b2-af92-43f7-ad6f-74ac3121776c\") " pod="openstack-kuttl-tests/barbican-api-6575ff8c88-kqv4m" Jan 20 17:39:54 crc kubenswrapper[4558]: I0120 17:39:54.167339 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f6d8d5b2-af92-43f7-ad6f-74ac3121776c-internal-tls-certs\") pod \"barbican-api-6575ff8c88-kqv4m\" (UID: \"f6d8d5b2-af92-43f7-ad6f-74ac3121776c\") " pod="openstack-kuttl-tests/barbican-api-6575ff8c88-kqv4m" Jan 20 17:39:54 crc kubenswrapper[4558]: I0120 17:39:54.167986 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f6d8d5b2-af92-43f7-ad6f-74ac3121776c-logs\") pod \"barbican-api-6575ff8c88-kqv4m\" (UID: \"f6d8d5b2-af92-43f7-ad6f-74ac3121776c\") " pod="openstack-kuttl-tests/barbican-api-6575ff8c88-kqv4m" Jan 20 17:39:54 crc kubenswrapper[4558]: I0120 17:39:54.168416 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f6d8d5b2-af92-43f7-ad6f-74ac3121776c-config-data\") pod \"barbican-api-6575ff8c88-kqv4m\" (UID: \"f6d8d5b2-af92-43f7-ad6f-74ac3121776c\") " pod="openstack-kuttl-tests/barbican-api-6575ff8c88-kqv4m" Jan 20 17:39:54 crc kubenswrapper[4558]: I0120 17:39:54.168458 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f6d8d5b2-af92-43f7-ad6f-74ac3121776c-public-tls-certs\") pod \"barbican-api-6575ff8c88-kqv4m\" (UID: \"f6d8d5b2-af92-43f7-ad6f-74ac3121776c\") " pod="openstack-kuttl-tests/barbican-api-6575ff8c88-kqv4m" Jan 20 17:39:54 crc kubenswrapper[4558]: I0120 17:39:54.168513 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f6d8d5b2-af92-43f7-ad6f-74ac3121776c-combined-ca-bundle\") pod \"barbican-api-6575ff8c88-kqv4m\" (UID: \"f6d8d5b2-af92-43f7-ad6f-74ac3121776c\") " pod="openstack-kuttl-tests/barbican-api-6575ff8c88-kqv4m" Jan 20 17:39:54 crc kubenswrapper[4558]: I0120 17:39:54.172154 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f6d8d5b2-af92-43f7-ad6f-74ac3121776c-public-tls-certs\") pod \"barbican-api-6575ff8c88-kqv4m\" (UID: \"f6d8d5b2-af92-43f7-ad6f-74ac3121776c\") " pod="openstack-kuttl-tests/barbican-api-6575ff8c88-kqv4m" Jan 20 17:39:54 crc kubenswrapper[4558]: I0120 17:39:54.173214 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f6d8d5b2-af92-43f7-ad6f-74ac3121776c-config-data\") pod \"barbican-api-6575ff8c88-kqv4m\" (UID: \"f6d8d5b2-af92-43f7-ad6f-74ac3121776c\") " pod="openstack-kuttl-tests/barbican-api-6575ff8c88-kqv4m" Jan 20 17:39:54 crc kubenswrapper[4558]: I0120 17:39:54.173670 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f6d8d5b2-af92-43f7-ad6f-74ac3121776c-internal-tls-certs\") pod \"barbican-api-6575ff8c88-kqv4m\" (UID: \"f6d8d5b2-af92-43f7-ad6f-74ac3121776c\") " pod="openstack-kuttl-tests/barbican-api-6575ff8c88-kqv4m" Jan 20 17:39:54 crc kubenswrapper[4558]: I0120 17:39:54.174733 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f6d8d5b2-af92-43f7-ad6f-74ac3121776c-config-data-custom\") pod \"barbican-api-6575ff8c88-kqv4m\" (UID: \"f6d8d5b2-af92-43f7-ad6f-74ac3121776c\") " pod="openstack-kuttl-tests/barbican-api-6575ff8c88-kqv4m" Jan 20 17:39:54 crc kubenswrapper[4558]: I0120 17:39:54.181004 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f6d8d5b2-af92-43f7-ad6f-74ac3121776c-combined-ca-bundle\") pod \"barbican-api-6575ff8c88-kqv4m\" (UID: \"f6d8d5b2-af92-43f7-ad6f-74ac3121776c\") " pod="openstack-kuttl-tests/barbican-api-6575ff8c88-kqv4m" Jan 20 17:39:54 crc kubenswrapper[4558]: I0120 17:39:54.183722 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tl7zd\" (UniqueName: \"kubernetes.io/projected/f6d8d5b2-af92-43f7-ad6f-74ac3121776c-kube-api-access-tl7zd\") pod \"barbican-api-6575ff8c88-kqv4m\" (UID: \"f6d8d5b2-af92-43f7-ad6f-74ac3121776c\") " pod="openstack-kuttl-tests/barbican-api-6575ff8c88-kqv4m" Jan 20 17:39:54 crc kubenswrapper[4558]: I0120 17:39:54.255337 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-6575ff8c88-kqv4m" Jan 20 17:39:54 crc kubenswrapper[4558]: I0120 17:39:54.301676 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:39:54 crc kubenswrapper[4558]: I0120 17:39:54.301728 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:39:54 crc kubenswrapper[4558]: I0120 17:39:54.348655 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:39:54 crc kubenswrapper[4558]: I0120 17:39:54.348979 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:39:54 crc kubenswrapper[4558]: I0120 17:39:54.559354 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"70cf1941-8f7a-4328-9338-e5d7739173ac","Type":"ContainerStarted","Data":"f6da7d2cb6d5c94ace71a8c784725894f1856e6ba2322d49bb6f31837cd6c608"} Jan 20 17:39:54 crc kubenswrapper[4558]: I0120 17:39:54.559471 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:39:54 crc kubenswrapper[4558]: I0120 17:39:54.560815 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-7dffbf585-vg2sk" event={"ID":"8f7d7846-1b56-4ab5-a0ad-63b179a1a797","Type":"ContainerStarted","Data":"e527f3739bfb93778f627c6e5f5e80d497ddd06b49ce87cb15b8ecf4e23a0bc4"} Jan 20 17:39:54 crc kubenswrapper[4558]: I0120 17:39:54.561460 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:39:54 crc kubenswrapper[4558]: I0120 17:39:54.561507 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:39:54 crc kubenswrapper[4558]: I0120 17:39:54.588862 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/cinder-api-0" podStartSLOduration=3.588846479 podStartE2EDuration="3.588846479s" podCreationTimestamp="2026-01-20 17:39:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:39:54.588207137 +0000 UTC m=+3488.348545104" watchObservedRunningTime="2026-01-20 17:39:54.588846479 +0000 UTC m=+3488.349184445" Jan 20 17:39:54 crc kubenswrapper[4558]: I0120 17:39:54.590747 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:39:54 crc kubenswrapper[4558]: I0120 17:39:54.590788 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:39:54 crc kubenswrapper[4558]: I0120 17:39:54.621640 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/keystone-7dffbf585-vg2sk" podStartSLOduration=2.621623187 podStartE2EDuration="2.621623187s" podCreationTimestamp="2026-01-20 17:39:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:39:54.615262607 +0000 UTC m=+3488.375600574" watchObservedRunningTime="2026-01-20 17:39:54.621623187 +0000 UTC m=+3488.381961154" Jan 20 17:39:54 crc kubenswrapper[4558]: I0120 17:39:54.630817 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:39:54 crc kubenswrapper[4558]: I0120 17:39:54.631270 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:39:54 crc kubenswrapper[4558]: I0120 17:39:54.655453 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-api-6575ff8c88-kqv4m"] Jan 20 17:39:54 crc kubenswrapper[4558]: I0120 17:39:54.993997 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-db-sync-fv6q5" Jan 20 17:39:55 crc kubenswrapper[4558]: I0120 17:39:55.090023 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/ae3119fb-1355-4da8-a005-3610a1733b90-config\") pod \"ae3119fb-1355-4da8-a005-3610a1733b90\" (UID: \"ae3119fb-1355-4da8-a005-3610a1733b90\") " Jan 20 17:39:55 crc kubenswrapper[4558]: I0120 17:39:55.090152 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m48bs\" (UniqueName: \"kubernetes.io/projected/ae3119fb-1355-4da8-a005-3610a1733b90-kube-api-access-m48bs\") pod \"ae3119fb-1355-4da8-a005-3610a1733b90\" (UID: \"ae3119fb-1355-4da8-a005-3610a1733b90\") " Jan 20 17:39:55 crc kubenswrapper[4558]: I0120 17:39:55.090268 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae3119fb-1355-4da8-a005-3610a1733b90-combined-ca-bundle\") pod \"ae3119fb-1355-4da8-a005-3610a1733b90\" (UID: \"ae3119fb-1355-4da8-a005-3610a1733b90\") " Jan 20 17:39:55 crc kubenswrapper[4558]: I0120 17:39:55.097477 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ae3119fb-1355-4da8-a005-3610a1733b90-kube-api-access-m48bs" (OuterVolumeSpecName: "kube-api-access-m48bs") pod "ae3119fb-1355-4da8-a005-3610a1733b90" (UID: "ae3119fb-1355-4da8-a005-3610a1733b90"). InnerVolumeSpecName "kube-api-access-m48bs". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:39:55 crc kubenswrapper[4558]: I0120 17:39:55.128149 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ae3119fb-1355-4da8-a005-3610a1733b90-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ae3119fb-1355-4da8-a005-3610a1733b90" (UID: "ae3119fb-1355-4da8-a005-3610a1733b90"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:39:55 crc kubenswrapper[4558]: I0120 17:39:55.136510 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ae3119fb-1355-4da8-a005-3610a1733b90-config" (OuterVolumeSpecName: "config") pod "ae3119fb-1355-4da8-a005-3610a1733b90" (UID: "ae3119fb-1355-4da8-a005-3610a1733b90"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:39:55 crc kubenswrapper[4558]: I0120 17:39:55.193298 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m48bs\" (UniqueName: \"kubernetes.io/projected/ae3119fb-1355-4da8-a005-3610a1733b90-kube-api-access-m48bs\") on node \"crc\" DevicePath \"\"" Jan 20 17:39:55 crc kubenswrapper[4558]: I0120 17:39:55.193413 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae3119fb-1355-4da8-a005-3610a1733b90-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:39:55 crc kubenswrapper[4558]: I0120 17:39:55.193469 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/ae3119fb-1355-4da8-a005-3610a1733b90-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:39:55 crc kubenswrapper[4558]: I0120 17:39:55.575640 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-db-sync-fv6q5" event={"ID":"ae3119fb-1355-4da8-a005-3610a1733b90","Type":"ContainerDied","Data":"7d77a5a90bafb070ab764dc73ad08392d0e8fca1442bd472a3965e3294e4bcc1"} Jan 20 17:39:55 crc kubenswrapper[4558]: I0120 17:39:55.576871 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7d77a5a90bafb070ab764dc73ad08392d0e8fca1442bd472a3965e3294e4bcc1" Jan 20 17:39:55 crc kubenswrapper[4558]: I0120 17:39:55.577003 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-db-sync-fv6q5" Jan 20 17:39:55 crc kubenswrapper[4558]: I0120 17:39:55.590340 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-6575ff8c88-kqv4m" event={"ID":"f6d8d5b2-af92-43f7-ad6f-74ac3121776c","Type":"ContainerStarted","Data":"d7d531af3aef1970f34aacda0d423e7527ba5090f37fc6e8a3ca8f6d6677a41c"} Jan 20 17:39:55 crc kubenswrapper[4558]: I0120 17:39:55.590410 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-6575ff8c88-kqv4m" event={"ID":"f6d8d5b2-af92-43f7-ad6f-74ac3121776c","Type":"ContainerStarted","Data":"7f70312ea849351c5fb0d6a0dd1c2e2239b767de0d5e6cefd7d82f31ec5e5f4b"} Jan 20 17:39:55 crc kubenswrapper[4558]: I0120 17:39:55.590423 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-6575ff8c88-kqv4m" event={"ID":"f6d8d5b2-af92-43f7-ad6f-74ac3121776c","Type":"ContainerStarted","Data":"7659e7aeec6184c025b64668f5ed1fc3170af02df30635a3254681e913a849e1"} Jan 20 17:39:55 crc kubenswrapper[4558]: I0120 17:39:55.590893 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:39:55 crc kubenswrapper[4558]: I0120 17:39:55.590921 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/keystone-7dffbf585-vg2sk" Jan 20 17:39:55 crc kubenswrapper[4558]: I0120 17:39:55.590932 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:39:55 crc kubenswrapper[4558]: I0120 17:39:55.590943 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/barbican-api-6575ff8c88-kqv4m" Jan 20 17:39:55 crc kubenswrapper[4558]: I0120 17:39:55.591015 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/barbican-api-6575ff8c88-kqv4m" Jan 20 17:39:55 crc kubenswrapper[4558]: I0120 17:39:55.615371 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/barbican-api-6575ff8c88-kqv4m" podStartSLOduration=2.615350833 podStartE2EDuration="2.615350833s" podCreationTimestamp="2026-01-20 17:39:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:39:55.606380637 +0000 UTC m=+3489.366718604" watchObservedRunningTime="2026-01-20 17:39:55.615350833 +0000 UTC m=+3489.375688800" Jan 20 17:39:56 crc kubenswrapper[4558]: I0120 17:39:56.013193 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/neutron-6ddffc77f7-gjhlj"] Jan 20 17:39:56 crc kubenswrapper[4558]: E0120 17:39:56.013815 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ae3119fb-1355-4da8-a005-3610a1733b90" containerName="neutron-db-sync" Jan 20 17:39:56 crc kubenswrapper[4558]: I0120 17:39:56.013834 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ae3119fb-1355-4da8-a005-3610a1733b90" containerName="neutron-db-sync" Jan 20 17:39:56 crc kubenswrapper[4558]: I0120 17:39:56.014057 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="ae3119fb-1355-4da8-a005-3610a1733b90" containerName="neutron-db-sync" Jan 20 17:39:56 crc kubenswrapper[4558]: I0120 17:39:56.014991 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-6ddffc77f7-gjhlj" Jan 20 17:39:56 crc kubenswrapper[4558]: I0120 17:39:56.020874 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"neutron-httpd-config" Jan 20 17:39:56 crc kubenswrapper[4558]: I0120 17:39:56.021038 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"neutron-neutron-dockercfg-dz7n9" Jan 20 17:39:56 crc kubenswrapper[4558]: I0120 17:39:56.021152 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-neutron-ovndbs" Jan 20 17:39:56 crc kubenswrapper[4558]: I0120 17:39:56.021292 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"neutron-config" Jan 20 17:39:56 crc kubenswrapper[4558]: I0120 17:39:56.045726 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-6ddffc77f7-gjhlj"] Jan 20 17:39:56 crc kubenswrapper[4558]: I0120 17:39:56.115230 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/3a66aaee-5aa8-4eb9-bda7-d66d0e98080a-ovndb-tls-certs\") pod \"neutron-6ddffc77f7-gjhlj\" (UID: \"3a66aaee-5aa8-4eb9-bda7-d66d0e98080a\") " pod="openstack-kuttl-tests/neutron-6ddffc77f7-gjhlj" Jan 20 17:39:56 crc kubenswrapper[4558]: I0120 17:39:56.115302 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l2hpk\" (UniqueName: \"kubernetes.io/projected/3a66aaee-5aa8-4eb9-bda7-d66d0e98080a-kube-api-access-l2hpk\") pod \"neutron-6ddffc77f7-gjhlj\" (UID: \"3a66aaee-5aa8-4eb9-bda7-d66d0e98080a\") " pod="openstack-kuttl-tests/neutron-6ddffc77f7-gjhlj" Jan 20 17:39:56 crc kubenswrapper[4558]: I0120 17:39:56.115350 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/3a66aaee-5aa8-4eb9-bda7-d66d0e98080a-httpd-config\") pod \"neutron-6ddffc77f7-gjhlj\" (UID: \"3a66aaee-5aa8-4eb9-bda7-d66d0e98080a\") " pod="openstack-kuttl-tests/neutron-6ddffc77f7-gjhlj" Jan 20 17:39:56 crc kubenswrapper[4558]: I0120 17:39:56.115482 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3a66aaee-5aa8-4eb9-bda7-d66d0e98080a-combined-ca-bundle\") pod \"neutron-6ddffc77f7-gjhlj\" (UID: \"3a66aaee-5aa8-4eb9-bda7-d66d0e98080a\") " pod="openstack-kuttl-tests/neutron-6ddffc77f7-gjhlj" Jan 20 17:39:56 crc kubenswrapper[4558]: I0120 17:39:56.115507 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/3a66aaee-5aa8-4eb9-bda7-d66d0e98080a-config\") pod \"neutron-6ddffc77f7-gjhlj\" (UID: \"3a66aaee-5aa8-4eb9-bda7-d66d0e98080a\") " pod="openstack-kuttl-tests/neutron-6ddffc77f7-gjhlj" Jan 20 17:39:56 crc kubenswrapper[4558]: I0120 17:39:56.217075 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/3a66aaee-5aa8-4eb9-bda7-d66d0e98080a-httpd-config\") pod \"neutron-6ddffc77f7-gjhlj\" (UID: \"3a66aaee-5aa8-4eb9-bda7-d66d0e98080a\") " pod="openstack-kuttl-tests/neutron-6ddffc77f7-gjhlj" Jan 20 17:39:56 crc kubenswrapper[4558]: I0120 17:39:56.217283 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3a66aaee-5aa8-4eb9-bda7-d66d0e98080a-combined-ca-bundle\") pod \"neutron-6ddffc77f7-gjhlj\" (UID: \"3a66aaee-5aa8-4eb9-bda7-d66d0e98080a\") " pod="openstack-kuttl-tests/neutron-6ddffc77f7-gjhlj" Jan 20 17:39:56 crc kubenswrapper[4558]: I0120 17:39:56.217308 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/3a66aaee-5aa8-4eb9-bda7-d66d0e98080a-config\") pod \"neutron-6ddffc77f7-gjhlj\" (UID: \"3a66aaee-5aa8-4eb9-bda7-d66d0e98080a\") " pod="openstack-kuttl-tests/neutron-6ddffc77f7-gjhlj" Jan 20 17:39:56 crc kubenswrapper[4558]: I0120 17:39:56.217373 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/3a66aaee-5aa8-4eb9-bda7-d66d0e98080a-ovndb-tls-certs\") pod \"neutron-6ddffc77f7-gjhlj\" (UID: \"3a66aaee-5aa8-4eb9-bda7-d66d0e98080a\") " pod="openstack-kuttl-tests/neutron-6ddffc77f7-gjhlj" Jan 20 17:39:56 crc kubenswrapper[4558]: I0120 17:39:56.217428 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l2hpk\" (UniqueName: \"kubernetes.io/projected/3a66aaee-5aa8-4eb9-bda7-d66d0e98080a-kube-api-access-l2hpk\") pod \"neutron-6ddffc77f7-gjhlj\" (UID: \"3a66aaee-5aa8-4eb9-bda7-d66d0e98080a\") " pod="openstack-kuttl-tests/neutron-6ddffc77f7-gjhlj" Jan 20 17:39:56 crc kubenswrapper[4558]: I0120 17:39:56.222408 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/3a66aaee-5aa8-4eb9-bda7-d66d0e98080a-httpd-config\") pod \"neutron-6ddffc77f7-gjhlj\" (UID: \"3a66aaee-5aa8-4eb9-bda7-d66d0e98080a\") " pod="openstack-kuttl-tests/neutron-6ddffc77f7-gjhlj" Jan 20 17:39:56 crc kubenswrapper[4558]: I0120 17:39:56.225824 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3a66aaee-5aa8-4eb9-bda7-d66d0e98080a-combined-ca-bundle\") pod \"neutron-6ddffc77f7-gjhlj\" (UID: \"3a66aaee-5aa8-4eb9-bda7-d66d0e98080a\") " pod="openstack-kuttl-tests/neutron-6ddffc77f7-gjhlj" Jan 20 17:39:56 crc kubenswrapper[4558]: I0120 17:39:56.233570 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/3a66aaee-5aa8-4eb9-bda7-d66d0e98080a-ovndb-tls-certs\") pod \"neutron-6ddffc77f7-gjhlj\" (UID: \"3a66aaee-5aa8-4eb9-bda7-d66d0e98080a\") " pod="openstack-kuttl-tests/neutron-6ddffc77f7-gjhlj" Jan 20 17:39:56 crc kubenswrapper[4558]: I0120 17:39:56.233871 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/3a66aaee-5aa8-4eb9-bda7-d66d0e98080a-config\") pod \"neutron-6ddffc77f7-gjhlj\" (UID: \"3a66aaee-5aa8-4eb9-bda7-d66d0e98080a\") " pod="openstack-kuttl-tests/neutron-6ddffc77f7-gjhlj" Jan 20 17:39:56 crc kubenswrapper[4558]: I0120 17:39:56.238119 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l2hpk\" (UniqueName: \"kubernetes.io/projected/3a66aaee-5aa8-4eb9-bda7-d66d0e98080a-kube-api-access-l2hpk\") pod \"neutron-6ddffc77f7-gjhlj\" (UID: \"3a66aaee-5aa8-4eb9-bda7-d66d0e98080a\") " pod="openstack-kuttl-tests/neutron-6ddffc77f7-gjhlj" Jan 20 17:39:56 crc kubenswrapper[4558]: I0120 17:39:56.340845 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-6ddffc77f7-gjhlj" Jan 20 17:39:56 crc kubenswrapper[4558]: I0120 17:39:56.498560 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:39:56 crc kubenswrapper[4558]: I0120 17:39:56.610132 4558 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 20 17:39:56 crc kubenswrapper[4558]: I0120 17:39:56.693988 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:39:56 crc kubenswrapper[4558]: I0120 17:39:56.729003 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:39:56 crc kubenswrapper[4558]: I0120 17:39:56.781870 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-6ddffc77f7-gjhlj"] Jan 20 17:39:56 crc kubenswrapper[4558]: W0120 17:39:56.785043 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3a66aaee_5aa8_4eb9_bda7_d66d0e98080a.slice/crio-52af256f1f87c57a8177b8efb5db7d22207fabc65efdd122338fe09e120668c5 WatchSource:0}: Error finding container 52af256f1f87c57a8177b8efb5db7d22207fabc65efdd122338fe09e120668c5: Status 404 returned error can't find the container with id 52af256f1f87c57a8177b8efb5db7d22207fabc65efdd122338fe09e120668c5 Jan 20 17:39:56 crc kubenswrapper[4558]: I0120 17:39:56.931022 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:39:57 crc kubenswrapper[4558]: I0120 17:39:57.329612 4558 patch_prober.go:28] interesting pod/machine-config-daemon-2vr4r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 20 17:39:57 crc kubenswrapper[4558]: I0120 17:39:57.329946 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 20 17:39:57 crc kubenswrapper[4558]: I0120 17:39:57.619135 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-6ddffc77f7-gjhlj" event={"ID":"3a66aaee-5aa8-4eb9-bda7-d66d0e98080a","Type":"ContainerStarted","Data":"cc9c674e9ff89ec151bce4ce08e91b67b59f3d76de81cda809415d3d20cf9b1d"} Jan 20 17:39:57 crc kubenswrapper[4558]: I0120 17:39:57.619226 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-6ddffc77f7-gjhlj" event={"ID":"3a66aaee-5aa8-4eb9-bda7-d66d0e98080a","Type":"ContainerStarted","Data":"80ecb05a176d9dd2c2ba2cc06f79d46cd7281bd40c6011f536821eb2b2fed033"} Jan 20 17:39:57 crc kubenswrapper[4558]: I0120 17:39:57.619242 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-6ddffc77f7-gjhlj" event={"ID":"3a66aaee-5aa8-4eb9-bda7-d66d0e98080a","Type":"ContainerStarted","Data":"52af256f1f87c57a8177b8efb5db7d22207fabc65efdd122338fe09e120668c5"} Jan 20 17:39:57 crc kubenswrapper[4558]: I0120 17:39:57.619262 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/cinder-scheduler-0" podUID="81080ef7-fd3d-487f-b6aa-0fb9eaebc855" containerName="cinder-scheduler" containerID="cri-o://983340e3c0b3a463b995c14657063c9f1a54b71dae69af7f610060ce42550225" gracePeriod=30 Jan 20 17:39:57 crc kubenswrapper[4558]: I0120 17:39:57.619287 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/cinder-scheduler-0" podUID="81080ef7-fd3d-487f-b6aa-0fb9eaebc855" containerName="probe" containerID="cri-o://d0baa8aa43ffe509db57324997a20747c31164ce17f510abfb37beba893c7f2e" gracePeriod=30 Jan 20 17:39:57 crc kubenswrapper[4558]: I0120 17:39:57.638850 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/neutron-6ddffc77f7-gjhlj" podStartSLOduration=2.638830278 podStartE2EDuration="2.638830278s" podCreationTimestamp="2026-01-20 17:39:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:39:57.636925959 +0000 UTC m=+3491.397263925" watchObservedRunningTime="2026-01-20 17:39:57.638830278 +0000 UTC m=+3491.399168245" Jan 20 17:39:57 crc kubenswrapper[4558]: I0120 17:39:57.691227 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:39:57 crc kubenswrapper[4558]: I0120 17:39:57.691313 4558 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 20 17:39:57 crc kubenswrapper[4558]: I0120 17:39:57.698232 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:39:58 crc kubenswrapper[4558]: I0120 17:39:58.060548 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/neutron-5c4dc89b5d-qp5xf"] Jan 20 17:39:58 crc kubenswrapper[4558]: I0120 17:39:58.062037 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-5c4dc89b5d-qp5xf" Jan 20 17:39:58 crc kubenswrapper[4558]: I0120 17:39:58.064931 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-neutron-internal-svc" Jan 20 17:39:58 crc kubenswrapper[4558]: I0120 17:39:58.064967 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-neutron-public-svc" Jan 20 17:39:58 crc kubenswrapper[4558]: I0120 17:39:58.077059 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-5c4dc89b5d-qp5xf"] Jan 20 17:39:58 crc kubenswrapper[4558]: I0120 17:39:58.161320 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/7e627450-8edc-44bd-805d-b0034178dff2-httpd-config\") pod \"neutron-5c4dc89b5d-qp5xf\" (UID: \"7e627450-8edc-44bd-805d-b0034178dff2\") " pod="openstack-kuttl-tests/neutron-5c4dc89b5d-qp5xf" Jan 20 17:39:58 crc kubenswrapper[4558]: I0120 17:39:58.161522 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7e627450-8edc-44bd-805d-b0034178dff2-public-tls-certs\") pod \"neutron-5c4dc89b5d-qp5xf\" (UID: \"7e627450-8edc-44bd-805d-b0034178dff2\") " pod="openstack-kuttl-tests/neutron-5c4dc89b5d-qp5xf" Jan 20 17:39:58 crc kubenswrapper[4558]: I0120 17:39:58.161609 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7e627450-8edc-44bd-805d-b0034178dff2-internal-tls-certs\") pod \"neutron-5c4dc89b5d-qp5xf\" (UID: \"7e627450-8edc-44bd-805d-b0034178dff2\") " pod="openstack-kuttl-tests/neutron-5c4dc89b5d-qp5xf" Jan 20 17:39:58 crc kubenswrapper[4558]: I0120 17:39:58.161696 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/7e627450-8edc-44bd-805d-b0034178dff2-ovndb-tls-certs\") pod \"neutron-5c4dc89b5d-qp5xf\" (UID: \"7e627450-8edc-44bd-805d-b0034178dff2\") " pod="openstack-kuttl-tests/neutron-5c4dc89b5d-qp5xf" Jan 20 17:39:58 crc kubenswrapper[4558]: I0120 17:39:58.161780 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/7e627450-8edc-44bd-805d-b0034178dff2-config\") pod \"neutron-5c4dc89b5d-qp5xf\" (UID: \"7e627450-8edc-44bd-805d-b0034178dff2\") " pod="openstack-kuttl-tests/neutron-5c4dc89b5d-qp5xf" Jan 20 17:39:58 crc kubenswrapper[4558]: I0120 17:39:58.161897 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e627450-8edc-44bd-805d-b0034178dff2-combined-ca-bundle\") pod \"neutron-5c4dc89b5d-qp5xf\" (UID: \"7e627450-8edc-44bd-805d-b0034178dff2\") " pod="openstack-kuttl-tests/neutron-5c4dc89b5d-qp5xf" Jan 20 17:39:58 crc kubenswrapper[4558]: I0120 17:39:58.161996 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8d5jv\" (UniqueName: \"kubernetes.io/projected/7e627450-8edc-44bd-805d-b0034178dff2-kube-api-access-8d5jv\") pod \"neutron-5c4dc89b5d-qp5xf\" (UID: \"7e627450-8edc-44bd-805d-b0034178dff2\") " pod="openstack-kuttl-tests/neutron-5c4dc89b5d-qp5xf" Jan 20 17:39:58 crc kubenswrapper[4558]: I0120 17:39:58.263746 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e627450-8edc-44bd-805d-b0034178dff2-combined-ca-bundle\") pod \"neutron-5c4dc89b5d-qp5xf\" (UID: \"7e627450-8edc-44bd-805d-b0034178dff2\") " pod="openstack-kuttl-tests/neutron-5c4dc89b5d-qp5xf" Jan 20 17:39:58 crc kubenswrapper[4558]: I0120 17:39:58.263828 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8d5jv\" (UniqueName: \"kubernetes.io/projected/7e627450-8edc-44bd-805d-b0034178dff2-kube-api-access-8d5jv\") pod \"neutron-5c4dc89b5d-qp5xf\" (UID: \"7e627450-8edc-44bd-805d-b0034178dff2\") " pod="openstack-kuttl-tests/neutron-5c4dc89b5d-qp5xf" Jan 20 17:39:58 crc kubenswrapper[4558]: I0120 17:39:58.263875 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/7e627450-8edc-44bd-805d-b0034178dff2-httpd-config\") pod \"neutron-5c4dc89b5d-qp5xf\" (UID: \"7e627450-8edc-44bd-805d-b0034178dff2\") " pod="openstack-kuttl-tests/neutron-5c4dc89b5d-qp5xf" Jan 20 17:39:58 crc kubenswrapper[4558]: I0120 17:39:58.263905 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7e627450-8edc-44bd-805d-b0034178dff2-public-tls-certs\") pod \"neutron-5c4dc89b5d-qp5xf\" (UID: \"7e627450-8edc-44bd-805d-b0034178dff2\") " pod="openstack-kuttl-tests/neutron-5c4dc89b5d-qp5xf" Jan 20 17:39:58 crc kubenswrapper[4558]: I0120 17:39:58.263930 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7e627450-8edc-44bd-805d-b0034178dff2-internal-tls-certs\") pod \"neutron-5c4dc89b5d-qp5xf\" (UID: \"7e627450-8edc-44bd-805d-b0034178dff2\") " pod="openstack-kuttl-tests/neutron-5c4dc89b5d-qp5xf" Jan 20 17:39:58 crc kubenswrapper[4558]: I0120 17:39:58.263946 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/7e627450-8edc-44bd-805d-b0034178dff2-ovndb-tls-certs\") pod \"neutron-5c4dc89b5d-qp5xf\" (UID: \"7e627450-8edc-44bd-805d-b0034178dff2\") " pod="openstack-kuttl-tests/neutron-5c4dc89b5d-qp5xf" Jan 20 17:39:58 crc kubenswrapper[4558]: I0120 17:39:58.263968 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/7e627450-8edc-44bd-805d-b0034178dff2-config\") pod \"neutron-5c4dc89b5d-qp5xf\" (UID: \"7e627450-8edc-44bd-805d-b0034178dff2\") " pod="openstack-kuttl-tests/neutron-5c4dc89b5d-qp5xf" Jan 20 17:39:58 crc kubenswrapper[4558]: I0120 17:39:58.269410 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7e627450-8edc-44bd-805d-b0034178dff2-public-tls-certs\") pod \"neutron-5c4dc89b5d-qp5xf\" (UID: \"7e627450-8edc-44bd-805d-b0034178dff2\") " pod="openstack-kuttl-tests/neutron-5c4dc89b5d-qp5xf" Jan 20 17:39:58 crc kubenswrapper[4558]: I0120 17:39:58.269896 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/7e627450-8edc-44bd-805d-b0034178dff2-httpd-config\") pod \"neutron-5c4dc89b5d-qp5xf\" (UID: \"7e627450-8edc-44bd-805d-b0034178dff2\") " pod="openstack-kuttl-tests/neutron-5c4dc89b5d-qp5xf" Jan 20 17:39:58 crc kubenswrapper[4558]: I0120 17:39:58.272189 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/7e627450-8edc-44bd-805d-b0034178dff2-ovndb-tls-certs\") pod \"neutron-5c4dc89b5d-qp5xf\" (UID: \"7e627450-8edc-44bd-805d-b0034178dff2\") " pod="openstack-kuttl-tests/neutron-5c4dc89b5d-qp5xf" Jan 20 17:39:58 crc kubenswrapper[4558]: I0120 17:39:58.276773 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e627450-8edc-44bd-805d-b0034178dff2-combined-ca-bundle\") pod \"neutron-5c4dc89b5d-qp5xf\" (UID: \"7e627450-8edc-44bd-805d-b0034178dff2\") " pod="openstack-kuttl-tests/neutron-5c4dc89b5d-qp5xf" Jan 20 17:39:58 crc kubenswrapper[4558]: I0120 17:39:58.277231 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7e627450-8edc-44bd-805d-b0034178dff2-internal-tls-certs\") pod \"neutron-5c4dc89b5d-qp5xf\" (UID: \"7e627450-8edc-44bd-805d-b0034178dff2\") " pod="openstack-kuttl-tests/neutron-5c4dc89b5d-qp5xf" Jan 20 17:39:58 crc kubenswrapper[4558]: I0120 17:39:58.280943 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/7e627450-8edc-44bd-805d-b0034178dff2-config\") pod \"neutron-5c4dc89b5d-qp5xf\" (UID: \"7e627450-8edc-44bd-805d-b0034178dff2\") " pod="openstack-kuttl-tests/neutron-5c4dc89b5d-qp5xf" Jan 20 17:39:58 crc kubenswrapper[4558]: I0120 17:39:58.284806 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8d5jv\" (UniqueName: \"kubernetes.io/projected/7e627450-8edc-44bd-805d-b0034178dff2-kube-api-access-8d5jv\") pod \"neutron-5c4dc89b5d-qp5xf\" (UID: \"7e627450-8edc-44bd-805d-b0034178dff2\") " pod="openstack-kuttl-tests/neutron-5c4dc89b5d-qp5xf" Jan 20 17:39:58 crc kubenswrapper[4558]: I0120 17:39:58.376590 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-5c4dc89b5d-qp5xf" Jan 20 17:39:58 crc kubenswrapper[4558]: I0120 17:39:58.648596 4558 generic.go:334] "Generic (PLEG): container finished" podID="81080ef7-fd3d-487f-b6aa-0fb9eaebc855" containerID="d0baa8aa43ffe509db57324997a20747c31164ce17f510abfb37beba893c7f2e" exitCode=0 Jan 20 17:39:58 crc kubenswrapper[4558]: I0120 17:39:58.648762 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"81080ef7-fd3d-487f-b6aa-0fb9eaebc855","Type":"ContainerDied","Data":"d0baa8aa43ffe509db57324997a20747c31164ce17f510abfb37beba893c7f2e"} Jan 20 17:39:58 crc kubenswrapper[4558]: I0120 17:39:58.650710 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/neutron-6ddffc77f7-gjhlj" Jan 20 17:39:58 crc kubenswrapper[4558]: I0120 17:39:58.834761 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-5c4dc89b5d-qp5xf"] Jan 20 17:39:59 crc kubenswrapper[4558]: I0120 17:39:59.627346 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:39:59 crc kubenswrapper[4558]: I0120 17:39:59.702876 4558 generic.go:334] "Generic (PLEG): container finished" podID="81080ef7-fd3d-487f-b6aa-0fb9eaebc855" containerID="983340e3c0b3a463b995c14657063c9f1a54b71dae69af7f610060ce42550225" exitCode=0 Jan 20 17:39:59 crc kubenswrapper[4558]: I0120 17:39:59.702964 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"81080ef7-fd3d-487f-b6aa-0fb9eaebc855","Type":"ContainerDied","Data":"983340e3c0b3a463b995c14657063c9f1a54b71dae69af7f610060ce42550225"} Jan 20 17:39:59 crc kubenswrapper[4558]: I0120 17:39:59.702998 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"81080ef7-fd3d-487f-b6aa-0fb9eaebc855","Type":"ContainerDied","Data":"78f5b8223baae1365eb10c24bd5fee398f1120188db2f2929d2b878c06d4e9b9"} Jan 20 17:39:59 crc kubenswrapper[4558]: I0120 17:39:59.703015 4558 scope.go:117] "RemoveContainer" containerID="d0baa8aa43ffe509db57324997a20747c31164ce17f510abfb37beba893c7f2e" Jan 20 17:39:59 crc kubenswrapper[4558]: I0120 17:39:59.703156 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:39:59 crc kubenswrapper[4558]: I0120 17:39:59.711205 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-98t2c\" (UniqueName: \"kubernetes.io/projected/81080ef7-fd3d-487f-b6aa-0fb9eaebc855-kube-api-access-98t2c\") pod \"81080ef7-fd3d-487f-b6aa-0fb9eaebc855\" (UID: \"81080ef7-fd3d-487f-b6aa-0fb9eaebc855\") " Jan 20 17:39:59 crc kubenswrapper[4558]: I0120 17:39:59.711264 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/81080ef7-fd3d-487f-b6aa-0fb9eaebc855-config-data-custom\") pod \"81080ef7-fd3d-487f-b6aa-0fb9eaebc855\" (UID: \"81080ef7-fd3d-487f-b6aa-0fb9eaebc855\") " Jan 20 17:39:59 crc kubenswrapper[4558]: I0120 17:39:59.711462 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/81080ef7-fd3d-487f-b6aa-0fb9eaebc855-scripts\") pod \"81080ef7-fd3d-487f-b6aa-0fb9eaebc855\" (UID: \"81080ef7-fd3d-487f-b6aa-0fb9eaebc855\") " Jan 20 17:39:59 crc kubenswrapper[4558]: I0120 17:39:59.711507 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/81080ef7-fd3d-487f-b6aa-0fb9eaebc855-combined-ca-bundle\") pod \"81080ef7-fd3d-487f-b6aa-0fb9eaebc855\" (UID: \"81080ef7-fd3d-487f-b6aa-0fb9eaebc855\") " Jan 20 17:39:59 crc kubenswrapper[4558]: I0120 17:39:59.711557 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/81080ef7-fd3d-487f-b6aa-0fb9eaebc855-config-data\") pod \"81080ef7-fd3d-487f-b6aa-0fb9eaebc855\" (UID: \"81080ef7-fd3d-487f-b6aa-0fb9eaebc855\") " Jan 20 17:39:59 crc kubenswrapper[4558]: I0120 17:39:59.711605 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/81080ef7-fd3d-487f-b6aa-0fb9eaebc855-etc-machine-id\") pod \"81080ef7-fd3d-487f-b6aa-0fb9eaebc855\" (UID: \"81080ef7-fd3d-487f-b6aa-0fb9eaebc855\") " Jan 20 17:39:59 crc kubenswrapper[4558]: I0120 17:39:59.712394 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/81080ef7-fd3d-487f-b6aa-0fb9eaebc855-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "81080ef7-fd3d-487f-b6aa-0fb9eaebc855" (UID: "81080ef7-fd3d-487f-b6aa-0fb9eaebc855"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 17:39:59 crc kubenswrapper[4558]: I0120 17:39:59.739511 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-5c4dc89b5d-qp5xf" event={"ID":"7e627450-8edc-44bd-805d-b0034178dff2","Type":"ContainerStarted","Data":"5730181f255b51b17148b66b915f2f8f31d4d16baed50889d862d0d3799b5c21"} Jan 20 17:39:59 crc kubenswrapper[4558]: I0120 17:39:59.739567 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-5c4dc89b5d-qp5xf" event={"ID":"7e627450-8edc-44bd-805d-b0034178dff2","Type":"ContainerStarted","Data":"ada568b4f4a7ec3082384c4ebb4a4d6e1590ad65456658486086d4be942b31b7"} Jan 20 17:39:59 crc kubenswrapper[4558]: I0120 17:39:59.739579 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-5c4dc89b5d-qp5xf" event={"ID":"7e627450-8edc-44bd-805d-b0034178dff2","Type":"ContainerStarted","Data":"6d63a2e58dd9a4337b32860657909c10d07d29131a229a2cdf7cba461a126348"} Jan 20 17:39:59 crc kubenswrapper[4558]: I0120 17:39:59.739615 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/neutron-5c4dc89b5d-qp5xf" Jan 20 17:39:59 crc kubenswrapper[4558]: I0120 17:39:59.743258 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/81080ef7-fd3d-487f-b6aa-0fb9eaebc855-scripts" (OuterVolumeSpecName: "scripts") pod "81080ef7-fd3d-487f-b6aa-0fb9eaebc855" (UID: "81080ef7-fd3d-487f-b6aa-0fb9eaebc855"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:39:59 crc kubenswrapper[4558]: I0120 17:39:59.760373 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/81080ef7-fd3d-487f-b6aa-0fb9eaebc855-kube-api-access-98t2c" (OuterVolumeSpecName: "kube-api-access-98t2c") pod "81080ef7-fd3d-487f-b6aa-0fb9eaebc855" (UID: "81080ef7-fd3d-487f-b6aa-0fb9eaebc855"). InnerVolumeSpecName "kube-api-access-98t2c". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:39:59 crc kubenswrapper[4558]: I0120 17:39:59.760556 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/81080ef7-fd3d-487f-b6aa-0fb9eaebc855-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "81080ef7-fd3d-487f-b6aa-0fb9eaebc855" (UID: "81080ef7-fd3d-487f-b6aa-0fb9eaebc855"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:39:59 crc kubenswrapper[4558]: I0120 17:39:59.816094 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-98t2c\" (UniqueName: \"kubernetes.io/projected/81080ef7-fd3d-487f-b6aa-0fb9eaebc855-kube-api-access-98t2c\") on node \"crc\" DevicePath \"\"" Jan 20 17:39:59 crc kubenswrapper[4558]: I0120 17:39:59.816124 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/81080ef7-fd3d-487f-b6aa-0fb9eaebc855-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:39:59 crc kubenswrapper[4558]: I0120 17:39:59.816134 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/81080ef7-fd3d-487f-b6aa-0fb9eaebc855-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:39:59 crc kubenswrapper[4558]: I0120 17:39:59.816143 4558 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/81080ef7-fd3d-487f-b6aa-0fb9eaebc855-etc-machine-id\") on node \"crc\" DevicePath \"\"" Jan 20 17:39:59 crc kubenswrapper[4558]: I0120 17:39:59.843343 4558 scope.go:117] "RemoveContainer" containerID="983340e3c0b3a463b995c14657063c9f1a54b71dae69af7f610060ce42550225" Jan 20 17:39:59 crc kubenswrapper[4558]: I0120 17:39:59.873261 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/81080ef7-fd3d-487f-b6aa-0fb9eaebc855-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "81080ef7-fd3d-487f-b6aa-0fb9eaebc855" (UID: "81080ef7-fd3d-487f-b6aa-0fb9eaebc855"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:39:59 crc kubenswrapper[4558]: I0120 17:39:59.877813 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/81080ef7-fd3d-487f-b6aa-0fb9eaebc855-config-data" (OuterVolumeSpecName: "config-data") pod "81080ef7-fd3d-487f-b6aa-0fb9eaebc855" (UID: "81080ef7-fd3d-487f-b6aa-0fb9eaebc855"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:39:59 crc kubenswrapper[4558]: I0120 17:39:59.923137 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/81080ef7-fd3d-487f-b6aa-0fb9eaebc855-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:39:59 crc kubenswrapper[4558]: I0120 17:39:59.923190 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/81080ef7-fd3d-487f-b6aa-0fb9eaebc855-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:39:59 crc kubenswrapper[4558]: I0120 17:39:59.935663 4558 scope.go:117] "RemoveContainer" containerID="d0baa8aa43ffe509db57324997a20747c31164ce17f510abfb37beba893c7f2e" Jan 20 17:39:59 crc kubenswrapper[4558]: E0120 17:39:59.936546 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d0baa8aa43ffe509db57324997a20747c31164ce17f510abfb37beba893c7f2e\": container with ID starting with d0baa8aa43ffe509db57324997a20747c31164ce17f510abfb37beba893c7f2e not found: ID does not exist" containerID="d0baa8aa43ffe509db57324997a20747c31164ce17f510abfb37beba893c7f2e" Jan 20 17:39:59 crc kubenswrapper[4558]: I0120 17:39:59.936664 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d0baa8aa43ffe509db57324997a20747c31164ce17f510abfb37beba893c7f2e"} err="failed to get container status \"d0baa8aa43ffe509db57324997a20747c31164ce17f510abfb37beba893c7f2e\": rpc error: code = NotFound desc = could not find container \"d0baa8aa43ffe509db57324997a20747c31164ce17f510abfb37beba893c7f2e\": container with ID starting with d0baa8aa43ffe509db57324997a20747c31164ce17f510abfb37beba893c7f2e not found: ID does not exist" Jan 20 17:39:59 crc kubenswrapper[4558]: I0120 17:39:59.936739 4558 scope.go:117] "RemoveContainer" containerID="983340e3c0b3a463b995c14657063c9f1a54b71dae69af7f610060ce42550225" Jan 20 17:39:59 crc kubenswrapper[4558]: E0120 17:39:59.940442 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"983340e3c0b3a463b995c14657063c9f1a54b71dae69af7f610060ce42550225\": container with ID starting with 983340e3c0b3a463b995c14657063c9f1a54b71dae69af7f610060ce42550225 not found: ID does not exist" containerID="983340e3c0b3a463b995c14657063c9f1a54b71dae69af7f610060ce42550225" Jan 20 17:39:59 crc kubenswrapper[4558]: I0120 17:39:59.940485 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"983340e3c0b3a463b995c14657063c9f1a54b71dae69af7f610060ce42550225"} err="failed to get container status \"983340e3c0b3a463b995c14657063c9f1a54b71dae69af7f610060ce42550225\": rpc error: code = NotFound desc = could not find container \"983340e3c0b3a463b995c14657063c9f1a54b71dae69af7f610060ce42550225\": container with ID starting with 983340e3c0b3a463b995c14657063c9f1a54b71dae69af7f610060ce42550225 not found: ID does not exist" Jan 20 17:40:00 crc kubenswrapper[4558]: I0120 17:40:00.031978 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/neutron-5c4dc89b5d-qp5xf" podStartSLOduration=2.031957911 podStartE2EDuration="2.031957911s" podCreationTimestamp="2026-01-20 17:39:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:39:59.806946326 +0000 UTC m=+3493.567284293" watchObservedRunningTime="2026-01-20 17:40:00.031957911 +0000 UTC m=+3493.792295878" Jan 20 17:40:00 crc kubenswrapper[4558]: I0120 17:40:00.032317 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:40:00 crc kubenswrapper[4558]: I0120 17:40:00.037291 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:40:00 crc kubenswrapper[4558]: I0120 17:40:00.049845 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:40:00 crc kubenswrapper[4558]: E0120 17:40:00.050267 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="81080ef7-fd3d-487f-b6aa-0fb9eaebc855" containerName="cinder-scheduler" Jan 20 17:40:00 crc kubenswrapper[4558]: I0120 17:40:00.050288 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="81080ef7-fd3d-487f-b6aa-0fb9eaebc855" containerName="cinder-scheduler" Jan 20 17:40:00 crc kubenswrapper[4558]: E0120 17:40:00.050300 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="81080ef7-fd3d-487f-b6aa-0fb9eaebc855" containerName="probe" Jan 20 17:40:00 crc kubenswrapper[4558]: I0120 17:40:00.050307 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="81080ef7-fd3d-487f-b6aa-0fb9eaebc855" containerName="probe" Jan 20 17:40:00 crc kubenswrapper[4558]: I0120 17:40:00.050502 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="81080ef7-fd3d-487f-b6aa-0fb9eaebc855" containerName="probe" Jan 20 17:40:00 crc kubenswrapper[4558]: I0120 17:40:00.050527 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="81080ef7-fd3d-487f-b6aa-0fb9eaebc855" containerName="cinder-scheduler" Jan 20 17:40:00 crc kubenswrapper[4558]: I0120 17:40:00.051499 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:40:00 crc kubenswrapper[4558]: I0120 17:40:00.052970 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cinder-scheduler-config-data" Jan 20 17:40:00 crc kubenswrapper[4558]: I0120 17:40:00.066104 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:40:00 crc kubenswrapper[4558]: I0120 17:40:00.126266 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/25cfe04f-2194-405f-a9d7-82181e8ac22a-scripts\") pod \"cinder-scheduler-0\" (UID: \"25cfe04f-2194-405f-a9d7-82181e8ac22a\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:40:00 crc kubenswrapper[4558]: I0120 17:40:00.126551 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/25cfe04f-2194-405f-a9d7-82181e8ac22a-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"25cfe04f-2194-405f-a9d7-82181e8ac22a\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:40:00 crc kubenswrapper[4558]: I0120 17:40:00.126619 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/25cfe04f-2194-405f-a9d7-82181e8ac22a-config-data\") pod \"cinder-scheduler-0\" (UID: \"25cfe04f-2194-405f-a9d7-82181e8ac22a\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:40:00 crc kubenswrapper[4558]: I0120 17:40:00.126750 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/25cfe04f-2194-405f-a9d7-82181e8ac22a-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"25cfe04f-2194-405f-a9d7-82181e8ac22a\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:40:00 crc kubenswrapper[4558]: I0120 17:40:00.126983 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vbvr7\" (UniqueName: \"kubernetes.io/projected/25cfe04f-2194-405f-a9d7-82181e8ac22a-kube-api-access-vbvr7\") pod \"cinder-scheduler-0\" (UID: \"25cfe04f-2194-405f-a9d7-82181e8ac22a\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:40:00 crc kubenswrapper[4558]: I0120 17:40:00.127067 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/25cfe04f-2194-405f-a9d7-82181e8ac22a-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"25cfe04f-2194-405f-a9d7-82181e8ac22a\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:40:00 crc kubenswrapper[4558]: I0120 17:40:00.153797 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/barbican-api-7699c67d4d-fsf9d" Jan 20 17:40:00 crc kubenswrapper[4558]: I0120 17:40:00.229417 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vbvr7\" (UniqueName: \"kubernetes.io/projected/25cfe04f-2194-405f-a9d7-82181e8ac22a-kube-api-access-vbvr7\") pod \"cinder-scheduler-0\" (UID: \"25cfe04f-2194-405f-a9d7-82181e8ac22a\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:40:00 crc kubenswrapper[4558]: I0120 17:40:00.229505 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/25cfe04f-2194-405f-a9d7-82181e8ac22a-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"25cfe04f-2194-405f-a9d7-82181e8ac22a\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:40:00 crc kubenswrapper[4558]: I0120 17:40:00.229556 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/25cfe04f-2194-405f-a9d7-82181e8ac22a-scripts\") pod \"cinder-scheduler-0\" (UID: \"25cfe04f-2194-405f-a9d7-82181e8ac22a\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:40:00 crc kubenswrapper[4558]: I0120 17:40:00.229710 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/25cfe04f-2194-405f-a9d7-82181e8ac22a-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"25cfe04f-2194-405f-a9d7-82181e8ac22a\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:40:00 crc kubenswrapper[4558]: I0120 17:40:00.229733 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/25cfe04f-2194-405f-a9d7-82181e8ac22a-config-data\") pod \"cinder-scheduler-0\" (UID: \"25cfe04f-2194-405f-a9d7-82181e8ac22a\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:40:00 crc kubenswrapper[4558]: I0120 17:40:00.229842 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/25cfe04f-2194-405f-a9d7-82181e8ac22a-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"25cfe04f-2194-405f-a9d7-82181e8ac22a\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:40:00 crc kubenswrapper[4558]: I0120 17:40:00.230023 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/25cfe04f-2194-405f-a9d7-82181e8ac22a-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"25cfe04f-2194-405f-a9d7-82181e8ac22a\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:40:00 crc kubenswrapper[4558]: I0120 17:40:00.238607 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/25cfe04f-2194-405f-a9d7-82181e8ac22a-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"25cfe04f-2194-405f-a9d7-82181e8ac22a\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:40:00 crc kubenswrapper[4558]: I0120 17:40:00.239582 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/25cfe04f-2194-405f-a9d7-82181e8ac22a-scripts\") pod \"cinder-scheduler-0\" (UID: \"25cfe04f-2194-405f-a9d7-82181e8ac22a\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:40:00 crc kubenswrapper[4558]: I0120 17:40:00.239733 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/25cfe04f-2194-405f-a9d7-82181e8ac22a-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"25cfe04f-2194-405f-a9d7-82181e8ac22a\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:40:00 crc kubenswrapper[4558]: I0120 17:40:00.240984 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/25cfe04f-2194-405f-a9d7-82181e8ac22a-config-data\") pod \"cinder-scheduler-0\" (UID: \"25cfe04f-2194-405f-a9d7-82181e8ac22a\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:40:00 crc kubenswrapper[4558]: I0120 17:40:00.248760 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vbvr7\" (UniqueName: \"kubernetes.io/projected/25cfe04f-2194-405f-a9d7-82181e8ac22a-kube-api-access-vbvr7\") pod \"cinder-scheduler-0\" (UID: \"25cfe04f-2194-405f-a9d7-82181e8ac22a\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:40:00 crc kubenswrapper[4558]: I0120 17:40:00.348007 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/barbican-api-7699c67d4d-fsf9d" Jan 20 17:40:00 crc kubenswrapper[4558]: I0120 17:40:00.368770 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:40:00 crc kubenswrapper[4558]: I0120 17:40:00.581207 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="81080ef7-fd3d-487f-b6aa-0fb9eaebc855" path="/var/lib/kubelet/pods/81080ef7-fd3d-487f-b6aa-0fb9eaebc855/volumes" Jan 20 17:40:00 crc kubenswrapper[4558]: I0120 17:40:00.863582 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:40:00 crc kubenswrapper[4558]: W0120 17:40:00.869315 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod25cfe04f_2194_405f_a9d7_82181e8ac22a.slice/crio-9ef69efacb42b6ced654f8f8a2280a764154af708fca8dfecf47b47aa31aeedd WatchSource:0}: Error finding container 9ef69efacb42b6ced654f8f8a2280a764154af708fca8dfecf47b47aa31aeedd: Status 404 returned error can't find the container with id 9ef69efacb42b6ced654f8f8a2280a764154af708fca8dfecf47b47aa31aeedd Jan 20 17:40:01 crc kubenswrapper[4558]: I0120 17:40:01.161422 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/barbican-api-6575ff8c88-kqv4m" Jan 20 17:40:01 crc kubenswrapper[4558]: I0120 17:40:01.773993 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"25cfe04f-2194-405f-a9d7-82181e8ac22a","Type":"ContainerStarted","Data":"080004aa669f36ce8bb441200bb1b70a9222d0a13602ec478e6d8d26c1fbf426"} Jan 20 17:40:01 crc kubenswrapper[4558]: I0120 17:40:01.774353 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"25cfe04f-2194-405f-a9d7-82181e8ac22a","Type":"ContainerStarted","Data":"9ef69efacb42b6ced654f8f8a2280a764154af708fca8dfecf47b47aa31aeedd"} Jan 20 17:40:02 crc kubenswrapper[4558]: I0120 17:40:02.609623 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/barbican-api-6575ff8c88-kqv4m" Jan 20 17:40:02 crc kubenswrapper[4558]: I0120 17:40:02.665878 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-api-7699c67d4d-fsf9d"] Jan 20 17:40:02 crc kubenswrapper[4558]: I0120 17:40:02.666107 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-api-7699c67d4d-fsf9d" podUID="67e97807-4d7b-4c6c-8e65-148173bfe17c" containerName="barbican-api-log" containerID="cri-o://ff2f4d8084422579f65c3d1bff4f2f2acd7aa144e45d23176cd336a0a11fa1f7" gracePeriod=30 Jan 20 17:40:02 crc kubenswrapper[4558]: I0120 17:40:02.666209 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-api-7699c67d4d-fsf9d" podUID="67e97807-4d7b-4c6c-8e65-148173bfe17c" containerName="barbican-api" containerID="cri-o://1847a95077e724637eb802a28e7bb0e7273c6c379ffa3f114dc22cb641c18b8e" gracePeriod=30 Jan 20 17:40:02 crc kubenswrapper[4558]: I0120 17:40:02.790481 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"25cfe04f-2194-405f-a9d7-82181e8ac22a","Type":"ContainerStarted","Data":"9aec7229608a50e4ca2cab47655081015208900db81e7a5801eb3c70cf100002"} Jan 20 17:40:02 crc kubenswrapper[4558]: I0120 17:40:02.813708 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/cinder-scheduler-0" podStartSLOduration=2.813691587 podStartE2EDuration="2.813691587s" podCreationTimestamp="2026-01-20 17:40:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:40:02.80599768 +0000 UTC m=+3496.566335647" watchObservedRunningTime="2026-01-20 17:40:02.813691587 +0000 UTC m=+3496.574029554" Jan 20 17:40:03 crc kubenswrapper[4558]: I0120 17:40:03.748757 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:40:03 crc kubenswrapper[4558]: I0120 17:40:03.813902 4558 generic.go:334] "Generic (PLEG): container finished" podID="67e97807-4d7b-4c6c-8e65-148173bfe17c" containerID="ff2f4d8084422579f65c3d1bff4f2f2acd7aa144e45d23176cd336a0a11fa1f7" exitCode=143 Jan 20 17:40:03 crc kubenswrapper[4558]: I0120 17:40:03.814771 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-7699c67d4d-fsf9d" event={"ID":"67e97807-4d7b-4c6c-8e65-148173bfe17c","Type":"ContainerDied","Data":"ff2f4d8084422579f65c3d1bff4f2f2acd7aa144e45d23176cd336a0a11fa1f7"} Jan 20 17:40:05 crc kubenswrapper[4558]: I0120 17:40:05.369496 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:40:05 crc kubenswrapper[4558]: I0120 17:40:05.822317 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/barbican-api-7699c67d4d-fsf9d" podUID="67e97807-4d7b-4c6c-8e65-148173bfe17c" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.222:9311/healthcheck\": read tcp 10.217.0.2:50038->10.217.0.222:9311: read: connection reset by peer" Jan 20 17:40:05 crc kubenswrapper[4558]: I0120 17:40:05.822382 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/barbican-api-7699c67d4d-fsf9d" podUID="67e97807-4d7b-4c6c-8e65-148173bfe17c" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.222:9311/healthcheck\": read tcp 10.217.0.2:50040->10.217.0.222:9311: read: connection reset by peer" Jan 20 17:40:06 crc kubenswrapper[4558]: I0120 17:40:06.221836 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-7699c67d4d-fsf9d" Jan 20 17:40:06 crc kubenswrapper[4558]: I0120 17:40:06.266408 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/67e97807-4d7b-4c6c-8e65-148173bfe17c-logs\") pod \"67e97807-4d7b-4c6c-8e65-148173bfe17c\" (UID: \"67e97807-4d7b-4c6c-8e65-148173bfe17c\") " Jan 20 17:40:06 crc kubenswrapper[4558]: I0120 17:40:06.266456 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/67e97807-4d7b-4c6c-8e65-148173bfe17c-config-data-custom\") pod \"67e97807-4d7b-4c6c-8e65-148173bfe17c\" (UID: \"67e97807-4d7b-4c6c-8e65-148173bfe17c\") " Jan 20 17:40:06 crc kubenswrapper[4558]: I0120 17:40:06.266528 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/67e97807-4d7b-4c6c-8e65-148173bfe17c-combined-ca-bundle\") pod \"67e97807-4d7b-4c6c-8e65-148173bfe17c\" (UID: \"67e97807-4d7b-4c6c-8e65-148173bfe17c\") " Jan 20 17:40:06 crc kubenswrapper[4558]: I0120 17:40:06.266605 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q77xc\" (UniqueName: \"kubernetes.io/projected/67e97807-4d7b-4c6c-8e65-148173bfe17c-kube-api-access-q77xc\") pod \"67e97807-4d7b-4c6c-8e65-148173bfe17c\" (UID: \"67e97807-4d7b-4c6c-8e65-148173bfe17c\") " Jan 20 17:40:06 crc kubenswrapper[4558]: I0120 17:40:06.266645 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/67e97807-4d7b-4c6c-8e65-148173bfe17c-config-data\") pod \"67e97807-4d7b-4c6c-8e65-148173bfe17c\" (UID: \"67e97807-4d7b-4c6c-8e65-148173bfe17c\") " Jan 20 17:40:06 crc kubenswrapper[4558]: I0120 17:40:06.266972 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/67e97807-4d7b-4c6c-8e65-148173bfe17c-logs" (OuterVolumeSpecName: "logs") pod "67e97807-4d7b-4c6c-8e65-148173bfe17c" (UID: "67e97807-4d7b-4c6c-8e65-148173bfe17c"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:40:06 crc kubenswrapper[4558]: I0120 17:40:06.267403 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/67e97807-4d7b-4c6c-8e65-148173bfe17c-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:40:06 crc kubenswrapper[4558]: I0120 17:40:06.276743 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/67e97807-4d7b-4c6c-8e65-148173bfe17c-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "67e97807-4d7b-4c6c-8e65-148173bfe17c" (UID: "67e97807-4d7b-4c6c-8e65-148173bfe17c"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:40:06 crc kubenswrapper[4558]: I0120 17:40:06.276794 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/67e97807-4d7b-4c6c-8e65-148173bfe17c-kube-api-access-q77xc" (OuterVolumeSpecName: "kube-api-access-q77xc") pod "67e97807-4d7b-4c6c-8e65-148173bfe17c" (UID: "67e97807-4d7b-4c6c-8e65-148173bfe17c"). InnerVolumeSpecName "kube-api-access-q77xc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:40:06 crc kubenswrapper[4558]: I0120 17:40:06.290204 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/67e97807-4d7b-4c6c-8e65-148173bfe17c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "67e97807-4d7b-4c6c-8e65-148173bfe17c" (UID: "67e97807-4d7b-4c6c-8e65-148173bfe17c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:40:06 crc kubenswrapper[4558]: I0120 17:40:06.304929 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/67e97807-4d7b-4c6c-8e65-148173bfe17c-config-data" (OuterVolumeSpecName: "config-data") pod "67e97807-4d7b-4c6c-8e65-148173bfe17c" (UID: "67e97807-4d7b-4c6c-8e65-148173bfe17c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:40:06 crc kubenswrapper[4558]: I0120 17:40:06.369967 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/67e97807-4d7b-4c6c-8e65-148173bfe17c-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:40:06 crc kubenswrapper[4558]: I0120 17:40:06.370014 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/67e97807-4d7b-4c6c-8e65-148173bfe17c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:40:06 crc kubenswrapper[4558]: I0120 17:40:06.370029 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q77xc\" (UniqueName: \"kubernetes.io/projected/67e97807-4d7b-4c6c-8e65-148173bfe17c-kube-api-access-q77xc\") on node \"crc\" DevicePath \"\"" Jan 20 17:40:06 crc kubenswrapper[4558]: I0120 17:40:06.370045 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/67e97807-4d7b-4c6c-8e65-148173bfe17c-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:40:06 crc kubenswrapper[4558]: I0120 17:40:06.853498 4558 generic.go:334] "Generic (PLEG): container finished" podID="67e97807-4d7b-4c6c-8e65-148173bfe17c" containerID="1847a95077e724637eb802a28e7bb0e7273c6c379ffa3f114dc22cb641c18b8e" exitCode=0 Jan 20 17:40:06 crc kubenswrapper[4558]: I0120 17:40:06.853564 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-7699c67d4d-fsf9d" event={"ID":"67e97807-4d7b-4c6c-8e65-148173bfe17c","Type":"ContainerDied","Data":"1847a95077e724637eb802a28e7bb0e7273c6c379ffa3f114dc22cb641c18b8e"} Jan 20 17:40:06 crc kubenswrapper[4558]: I0120 17:40:06.853573 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-7699c67d4d-fsf9d" Jan 20 17:40:06 crc kubenswrapper[4558]: I0120 17:40:06.853616 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-7699c67d4d-fsf9d" event={"ID":"67e97807-4d7b-4c6c-8e65-148173bfe17c","Type":"ContainerDied","Data":"da183d67b9b9b720a1acbbf847a7c17e97698884989329bb3d5164945e6657cb"} Jan 20 17:40:06 crc kubenswrapper[4558]: I0120 17:40:06.853643 4558 scope.go:117] "RemoveContainer" containerID="1847a95077e724637eb802a28e7bb0e7273c6c379ffa3f114dc22cb641c18b8e" Jan 20 17:40:06 crc kubenswrapper[4558]: I0120 17:40:06.879182 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-api-7699c67d4d-fsf9d"] Jan 20 17:40:06 crc kubenswrapper[4558]: I0120 17:40:06.888607 4558 scope.go:117] "RemoveContainer" containerID="ff2f4d8084422579f65c3d1bff4f2f2acd7aa144e45d23176cd336a0a11fa1f7" Jan 20 17:40:06 crc kubenswrapper[4558]: I0120 17:40:06.891541 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/barbican-api-7699c67d4d-fsf9d"] Jan 20 17:40:06 crc kubenswrapper[4558]: I0120 17:40:06.904447 4558 scope.go:117] "RemoveContainer" containerID="1847a95077e724637eb802a28e7bb0e7273c6c379ffa3f114dc22cb641c18b8e" Jan 20 17:40:06 crc kubenswrapper[4558]: E0120 17:40:06.904908 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1847a95077e724637eb802a28e7bb0e7273c6c379ffa3f114dc22cb641c18b8e\": container with ID starting with 1847a95077e724637eb802a28e7bb0e7273c6c379ffa3f114dc22cb641c18b8e not found: ID does not exist" containerID="1847a95077e724637eb802a28e7bb0e7273c6c379ffa3f114dc22cb641c18b8e" Jan 20 17:40:06 crc kubenswrapper[4558]: I0120 17:40:06.904951 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1847a95077e724637eb802a28e7bb0e7273c6c379ffa3f114dc22cb641c18b8e"} err="failed to get container status \"1847a95077e724637eb802a28e7bb0e7273c6c379ffa3f114dc22cb641c18b8e\": rpc error: code = NotFound desc = could not find container \"1847a95077e724637eb802a28e7bb0e7273c6c379ffa3f114dc22cb641c18b8e\": container with ID starting with 1847a95077e724637eb802a28e7bb0e7273c6c379ffa3f114dc22cb641c18b8e not found: ID does not exist" Jan 20 17:40:06 crc kubenswrapper[4558]: I0120 17:40:06.904983 4558 scope.go:117] "RemoveContainer" containerID="ff2f4d8084422579f65c3d1bff4f2f2acd7aa144e45d23176cd336a0a11fa1f7" Jan 20 17:40:06 crc kubenswrapper[4558]: E0120 17:40:06.905385 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ff2f4d8084422579f65c3d1bff4f2f2acd7aa144e45d23176cd336a0a11fa1f7\": container with ID starting with ff2f4d8084422579f65c3d1bff4f2f2acd7aa144e45d23176cd336a0a11fa1f7 not found: ID does not exist" containerID="ff2f4d8084422579f65c3d1bff4f2f2acd7aa144e45d23176cd336a0a11fa1f7" Jan 20 17:40:06 crc kubenswrapper[4558]: I0120 17:40:06.905452 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ff2f4d8084422579f65c3d1bff4f2f2acd7aa144e45d23176cd336a0a11fa1f7"} err="failed to get container status \"ff2f4d8084422579f65c3d1bff4f2f2acd7aa144e45d23176cd336a0a11fa1f7\": rpc error: code = NotFound desc = could not find container \"ff2f4d8084422579f65c3d1bff4f2f2acd7aa144e45d23176cd336a0a11fa1f7\": container with ID starting with ff2f4d8084422579f65c3d1bff4f2f2acd7aa144e45d23176cd336a0a11fa1f7 not found: ID does not exist" Jan 20 17:40:08 crc kubenswrapper[4558]: I0120 17:40:08.578997 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="67e97807-4d7b-4c6c-8e65-148173bfe17c" path="/var/lib/kubelet/pods/67e97807-4d7b-4c6c-8e65-148173bfe17c/volumes" Jan 20 17:40:10 crc kubenswrapper[4558]: I0120 17:40:10.551921 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:40:16 crc kubenswrapper[4558]: I0120 17:40:16.154745 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/placement-85684d4d5b-ffd28" Jan 20 17:40:16 crc kubenswrapper[4558]: I0120 17:40:16.165237 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/placement-85684d4d5b-ffd28" Jan 20 17:40:16 crc kubenswrapper[4558]: I0120 17:40:16.669766 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/placement-69d7f65964-r46nm" Jan 20 17:40:16 crc kubenswrapper[4558]: I0120 17:40:16.676308 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/placement-69d7f65964-r46nm" Jan 20 17:40:16 crc kubenswrapper[4558]: I0120 17:40:16.748094 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/placement-85684d4d5b-ffd28"] Jan 20 17:40:17 crc kubenswrapper[4558]: I0120 17:40:17.038203 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:40:17 crc kubenswrapper[4558]: I0120 17:40:17.973114 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/placement-85684d4d5b-ffd28" podUID="68263afb-f6b4-4665-bd7d-6f3b622c5640" containerName="placement-log" containerID="cri-o://51207c618252938d282ce839f6d1b1d1e55bff3602a6cffa6c8e6977a2bf0fc0" gracePeriod=30 Jan 20 17:40:17 crc kubenswrapper[4558]: I0120 17:40:17.973197 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/placement-85684d4d5b-ffd28" podUID="68263afb-f6b4-4665-bd7d-6f3b622c5640" containerName="placement-api" containerID="cri-o://6173df580c0d2dc49bd3a6499431a0b48e8fd9a9364b67713c6871a2fee15ab6" gracePeriod=30 Jan 20 17:40:18 crc kubenswrapper[4558]: I0120 17:40:18.890489 4558 scope.go:117] "RemoveContainer" containerID="50d26ce4c22ef55a6ba6f240018a6850524c10427a08e374734fa5e7aa9279db" Jan 20 17:40:18 crc kubenswrapper[4558]: I0120 17:40:18.930325 4558 scope.go:117] "RemoveContainer" containerID="e1524456582814b161678d93ca35c12161b2ebbf4ff7b988cf13d97b61480ee9" Jan 20 17:40:18 crc kubenswrapper[4558]: I0120 17:40:18.985804 4558 generic.go:334] "Generic (PLEG): container finished" podID="68263afb-f6b4-4665-bd7d-6f3b622c5640" containerID="51207c618252938d282ce839f6d1b1d1e55bff3602a6cffa6c8e6977a2bf0fc0" exitCode=143 Jan 20 17:40:18 crc kubenswrapper[4558]: I0120 17:40:18.985906 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-85684d4d5b-ffd28" event={"ID":"68263afb-f6b4-4665-bd7d-6f3b622c5640","Type":"ContainerDied","Data":"51207c618252938d282ce839f6d1b1d1e55bff3602a6cffa6c8e6977a2bf0fc0"} Jan 20 17:40:21 crc kubenswrapper[4558]: I0120 17:40:21.519080 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-85684d4d5b-ffd28" Jan 20 17:40:21 crc kubenswrapper[4558]: I0120 17:40:21.571535 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/68263afb-f6b4-4665-bd7d-6f3b622c5640-scripts\") pod \"68263afb-f6b4-4665-bd7d-6f3b622c5640\" (UID: \"68263afb-f6b4-4665-bd7d-6f3b622c5640\") " Jan 20 17:40:21 crc kubenswrapper[4558]: I0120 17:40:21.571600 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-js4h2\" (UniqueName: \"kubernetes.io/projected/68263afb-f6b4-4665-bd7d-6f3b622c5640-kube-api-access-js4h2\") pod \"68263afb-f6b4-4665-bd7d-6f3b622c5640\" (UID: \"68263afb-f6b4-4665-bd7d-6f3b622c5640\") " Jan 20 17:40:21 crc kubenswrapper[4558]: I0120 17:40:21.571809 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/68263afb-f6b4-4665-bd7d-6f3b622c5640-logs\") pod \"68263afb-f6b4-4665-bd7d-6f3b622c5640\" (UID: \"68263afb-f6b4-4665-bd7d-6f3b622c5640\") " Jan 20 17:40:21 crc kubenswrapper[4558]: I0120 17:40:21.571864 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/68263afb-f6b4-4665-bd7d-6f3b622c5640-config-data\") pod \"68263afb-f6b4-4665-bd7d-6f3b622c5640\" (UID: \"68263afb-f6b4-4665-bd7d-6f3b622c5640\") " Jan 20 17:40:21 crc kubenswrapper[4558]: I0120 17:40:21.571913 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/68263afb-f6b4-4665-bd7d-6f3b622c5640-combined-ca-bundle\") pod \"68263afb-f6b4-4665-bd7d-6f3b622c5640\" (UID: \"68263afb-f6b4-4665-bd7d-6f3b622c5640\") " Jan 20 17:40:21 crc kubenswrapper[4558]: I0120 17:40:21.572440 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/68263afb-f6b4-4665-bd7d-6f3b622c5640-logs" (OuterVolumeSpecName: "logs") pod "68263afb-f6b4-4665-bd7d-6f3b622c5640" (UID: "68263afb-f6b4-4665-bd7d-6f3b622c5640"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:40:21 crc kubenswrapper[4558]: I0120 17:40:21.577472 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/68263afb-f6b4-4665-bd7d-6f3b622c5640-scripts" (OuterVolumeSpecName: "scripts") pod "68263afb-f6b4-4665-bd7d-6f3b622c5640" (UID: "68263afb-f6b4-4665-bd7d-6f3b622c5640"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:40:21 crc kubenswrapper[4558]: I0120 17:40:21.578182 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/68263afb-f6b4-4665-bd7d-6f3b622c5640-kube-api-access-js4h2" (OuterVolumeSpecName: "kube-api-access-js4h2") pod "68263afb-f6b4-4665-bd7d-6f3b622c5640" (UID: "68263afb-f6b4-4665-bd7d-6f3b622c5640"). InnerVolumeSpecName "kube-api-access-js4h2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:40:21 crc kubenswrapper[4558]: I0120 17:40:21.607822 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/68263afb-f6b4-4665-bd7d-6f3b622c5640-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "68263afb-f6b4-4665-bd7d-6f3b622c5640" (UID: "68263afb-f6b4-4665-bd7d-6f3b622c5640"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:40:21 crc kubenswrapper[4558]: I0120 17:40:21.611978 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/68263afb-f6b4-4665-bd7d-6f3b622c5640-config-data" (OuterVolumeSpecName: "config-data") pod "68263afb-f6b4-4665-bd7d-6f3b622c5640" (UID: "68263afb-f6b4-4665-bd7d-6f3b622c5640"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:40:21 crc kubenswrapper[4558]: I0120 17:40:21.673427 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/68263afb-f6b4-4665-bd7d-6f3b622c5640-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:40:21 crc kubenswrapper[4558]: I0120 17:40:21.673459 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/68263afb-f6b4-4665-bd7d-6f3b622c5640-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:40:21 crc kubenswrapper[4558]: I0120 17:40:21.673473 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/68263afb-f6b4-4665-bd7d-6f3b622c5640-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:40:21 crc kubenswrapper[4558]: I0120 17:40:21.673485 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/68263afb-f6b4-4665-bd7d-6f3b622c5640-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:40:21 crc kubenswrapper[4558]: I0120 17:40:21.673495 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-js4h2\" (UniqueName: \"kubernetes.io/projected/68263afb-f6b4-4665-bd7d-6f3b622c5640-kube-api-access-js4h2\") on node \"crc\" DevicePath \"\"" Jan 20 17:40:22 crc kubenswrapper[4558]: I0120 17:40:22.017255 4558 generic.go:334] "Generic (PLEG): container finished" podID="68263afb-f6b4-4665-bd7d-6f3b622c5640" containerID="6173df580c0d2dc49bd3a6499431a0b48e8fd9a9364b67713c6871a2fee15ab6" exitCode=0 Jan 20 17:40:22 crc kubenswrapper[4558]: I0120 17:40:22.017364 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-85684d4d5b-ffd28" Jan 20 17:40:22 crc kubenswrapper[4558]: I0120 17:40:22.017340 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-85684d4d5b-ffd28" event={"ID":"68263afb-f6b4-4665-bd7d-6f3b622c5640","Type":"ContainerDied","Data":"6173df580c0d2dc49bd3a6499431a0b48e8fd9a9364b67713c6871a2fee15ab6"} Jan 20 17:40:22 crc kubenswrapper[4558]: I0120 17:40:22.017503 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-85684d4d5b-ffd28" event={"ID":"68263afb-f6b4-4665-bd7d-6f3b622c5640","Type":"ContainerDied","Data":"6586d0345276362475021102ade4402b7f08952f8a3a2a950e33992dfed4af6f"} Jan 20 17:40:22 crc kubenswrapper[4558]: I0120 17:40:22.017531 4558 scope.go:117] "RemoveContainer" containerID="6173df580c0d2dc49bd3a6499431a0b48e8fd9a9364b67713c6871a2fee15ab6" Jan 20 17:40:22 crc kubenswrapper[4558]: I0120 17:40:22.042037 4558 scope.go:117] "RemoveContainer" containerID="51207c618252938d282ce839f6d1b1d1e55bff3602a6cffa6c8e6977a2bf0fc0" Jan 20 17:40:22 crc kubenswrapper[4558]: I0120 17:40:22.055521 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/placement-85684d4d5b-ffd28"] Jan 20 17:40:22 crc kubenswrapper[4558]: I0120 17:40:22.059553 4558 scope.go:117] "RemoveContainer" containerID="6173df580c0d2dc49bd3a6499431a0b48e8fd9a9364b67713c6871a2fee15ab6" Jan 20 17:40:22 crc kubenswrapper[4558]: E0120 17:40:22.060010 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6173df580c0d2dc49bd3a6499431a0b48e8fd9a9364b67713c6871a2fee15ab6\": container with ID starting with 6173df580c0d2dc49bd3a6499431a0b48e8fd9a9364b67713c6871a2fee15ab6 not found: ID does not exist" containerID="6173df580c0d2dc49bd3a6499431a0b48e8fd9a9364b67713c6871a2fee15ab6" Jan 20 17:40:22 crc kubenswrapper[4558]: I0120 17:40:22.060057 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6173df580c0d2dc49bd3a6499431a0b48e8fd9a9364b67713c6871a2fee15ab6"} err="failed to get container status \"6173df580c0d2dc49bd3a6499431a0b48e8fd9a9364b67713c6871a2fee15ab6\": rpc error: code = NotFound desc = could not find container \"6173df580c0d2dc49bd3a6499431a0b48e8fd9a9364b67713c6871a2fee15ab6\": container with ID starting with 6173df580c0d2dc49bd3a6499431a0b48e8fd9a9364b67713c6871a2fee15ab6 not found: ID does not exist" Jan 20 17:40:22 crc kubenswrapper[4558]: I0120 17:40:22.060087 4558 scope.go:117] "RemoveContainer" containerID="51207c618252938d282ce839f6d1b1d1e55bff3602a6cffa6c8e6977a2bf0fc0" Jan 20 17:40:22 crc kubenswrapper[4558]: E0120 17:40:22.060529 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"51207c618252938d282ce839f6d1b1d1e55bff3602a6cffa6c8e6977a2bf0fc0\": container with ID starting with 51207c618252938d282ce839f6d1b1d1e55bff3602a6cffa6c8e6977a2bf0fc0 not found: ID does not exist" containerID="51207c618252938d282ce839f6d1b1d1e55bff3602a6cffa6c8e6977a2bf0fc0" Jan 20 17:40:22 crc kubenswrapper[4558]: I0120 17:40:22.060622 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"51207c618252938d282ce839f6d1b1d1e55bff3602a6cffa6c8e6977a2bf0fc0"} err="failed to get container status \"51207c618252938d282ce839f6d1b1d1e55bff3602a6cffa6c8e6977a2bf0fc0\": rpc error: code = NotFound desc = could not find container \"51207c618252938d282ce839f6d1b1d1e55bff3602a6cffa6c8e6977a2bf0fc0\": container with ID starting with 51207c618252938d282ce839f6d1b1d1e55bff3602a6cffa6c8e6977a2bf0fc0 not found: ID does not exist" Jan 20 17:40:22 crc kubenswrapper[4558]: I0120 17:40:22.063710 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/placement-85684d4d5b-ffd28"] Jan 20 17:40:22 crc kubenswrapper[4558]: I0120 17:40:22.576237 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="68263afb-f6b4-4665-bd7d-6f3b622c5640" path="/var/lib/kubelet/pods/68263afb-f6b4-4665-bd7d-6f3b622c5640/volumes" Jan 20 17:40:24 crc kubenswrapper[4558]: I0120 17:40:24.313290 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/keystone-7dffbf585-vg2sk" Jan 20 17:40:26 crc kubenswrapper[4558]: I0120 17:40:26.349443 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/neutron-6ddffc77f7-gjhlj" Jan 20 17:40:27 crc kubenswrapper[4558]: I0120 17:40:27.330007 4558 patch_prober.go:28] interesting pod/machine-config-daemon-2vr4r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 20 17:40:27 crc kubenswrapper[4558]: I0120 17:40:27.330659 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 20 17:40:27 crc kubenswrapper[4558]: I0120 17:40:27.330727 4558 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" Jan 20 17:40:27 crc kubenswrapper[4558]: I0120 17:40:27.331553 4558 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"2d2a8989ca5a5af98b2c9dc8f801ea0675339ec14800f437c058c5a43c20146b"} pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 20 17:40:27 crc kubenswrapper[4558]: I0120 17:40:27.331633 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" containerID="cri-o://2d2a8989ca5a5af98b2c9dc8f801ea0675339ec14800f437c058c5a43c20146b" gracePeriod=600 Jan 20 17:40:27 crc kubenswrapper[4558]: I0120 17:40:27.364094 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/openstackclient"] Jan 20 17:40:27 crc kubenswrapper[4558]: E0120 17:40:27.364525 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="68263afb-f6b4-4665-bd7d-6f3b622c5640" containerName="placement-api" Jan 20 17:40:27 crc kubenswrapper[4558]: I0120 17:40:27.364546 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="68263afb-f6b4-4665-bd7d-6f3b622c5640" containerName="placement-api" Jan 20 17:40:27 crc kubenswrapper[4558]: E0120 17:40:27.364558 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="68263afb-f6b4-4665-bd7d-6f3b622c5640" containerName="placement-log" Jan 20 17:40:27 crc kubenswrapper[4558]: I0120 17:40:27.364563 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="68263afb-f6b4-4665-bd7d-6f3b622c5640" containerName="placement-log" Jan 20 17:40:27 crc kubenswrapper[4558]: E0120 17:40:27.364581 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="67e97807-4d7b-4c6c-8e65-148173bfe17c" containerName="barbican-api-log" Jan 20 17:40:27 crc kubenswrapper[4558]: I0120 17:40:27.364587 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="67e97807-4d7b-4c6c-8e65-148173bfe17c" containerName="barbican-api-log" Jan 20 17:40:27 crc kubenswrapper[4558]: E0120 17:40:27.364604 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="67e97807-4d7b-4c6c-8e65-148173bfe17c" containerName="barbican-api" Jan 20 17:40:27 crc kubenswrapper[4558]: I0120 17:40:27.364609 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="67e97807-4d7b-4c6c-8e65-148173bfe17c" containerName="barbican-api" Jan 20 17:40:27 crc kubenswrapper[4558]: I0120 17:40:27.364775 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="67e97807-4d7b-4c6c-8e65-148173bfe17c" containerName="barbican-api-log" Jan 20 17:40:27 crc kubenswrapper[4558]: I0120 17:40:27.364790 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="68263afb-f6b4-4665-bd7d-6f3b622c5640" containerName="placement-log" Jan 20 17:40:27 crc kubenswrapper[4558]: I0120 17:40:27.364802 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="67e97807-4d7b-4c6c-8e65-148173bfe17c" containerName="barbican-api" Jan 20 17:40:27 crc kubenswrapper[4558]: I0120 17:40:27.364813 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="68263afb-f6b4-4665-bd7d-6f3b622c5640" containerName="placement-api" Jan 20 17:40:27 crc kubenswrapper[4558]: I0120 17:40:27.365426 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstackclient" Jan 20 17:40:27 crc kubenswrapper[4558]: I0120 17:40:27.367855 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"openstack-config-secret" Jan 20 17:40:27 crc kubenswrapper[4558]: I0120 17:40:27.368207 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-config" Jan 20 17:40:27 crc kubenswrapper[4558]: I0120 17:40:27.368356 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"openstackclient-openstackclient-dockercfg-zzdj8" Jan 20 17:40:27 crc kubenswrapper[4558]: I0120 17:40:27.374347 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/openstackclient"] Jan 20 17:40:27 crc kubenswrapper[4558]: I0120 17:40:27.383317 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/b5ea5c8f-cbdb-4b97-87b6-f0375393f6b3-openstack-config-secret\") pod \"openstackclient\" (UID: \"b5ea5c8f-cbdb-4b97-87b6-f0375393f6b3\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:40:27 crc kubenswrapper[4558]: I0120 17:40:27.383413 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c75hn\" (UniqueName: \"kubernetes.io/projected/b5ea5c8f-cbdb-4b97-87b6-f0375393f6b3-kube-api-access-c75hn\") pod \"openstackclient\" (UID: \"b5ea5c8f-cbdb-4b97-87b6-f0375393f6b3\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:40:27 crc kubenswrapper[4558]: I0120 17:40:27.383463 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b5ea5c8f-cbdb-4b97-87b6-f0375393f6b3-combined-ca-bundle\") pod \"openstackclient\" (UID: \"b5ea5c8f-cbdb-4b97-87b6-f0375393f6b3\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:40:27 crc kubenswrapper[4558]: I0120 17:40:27.383525 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/b5ea5c8f-cbdb-4b97-87b6-f0375393f6b3-openstack-config\") pod \"openstackclient\" (UID: \"b5ea5c8f-cbdb-4b97-87b6-f0375393f6b3\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:40:27 crc kubenswrapper[4558]: E0120 17:40:27.458538 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 17:40:27 crc kubenswrapper[4558]: I0120 17:40:27.485934 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/b5ea5c8f-cbdb-4b97-87b6-f0375393f6b3-openstack-config-secret\") pod \"openstackclient\" (UID: \"b5ea5c8f-cbdb-4b97-87b6-f0375393f6b3\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:40:27 crc kubenswrapper[4558]: I0120 17:40:27.486192 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c75hn\" (UniqueName: \"kubernetes.io/projected/b5ea5c8f-cbdb-4b97-87b6-f0375393f6b3-kube-api-access-c75hn\") pod \"openstackclient\" (UID: \"b5ea5c8f-cbdb-4b97-87b6-f0375393f6b3\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:40:27 crc kubenswrapper[4558]: I0120 17:40:27.486284 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b5ea5c8f-cbdb-4b97-87b6-f0375393f6b3-combined-ca-bundle\") pod \"openstackclient\" (UID: \"b5ea5c8f-cbdb-4b97-87b6-f0375393f6b3\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:40:27 crc kubenswrapper[4558]: I0120 17:40:27.486447 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/b5ea5c8f-cbdb-4b97-87b6-f0375393f6b3-openstack-config\") pod \"openstackclient\" (UID: \"b5ea5c8f-cbdb-4b97-87b6-f0375393f6b3\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:40:27 crc kubenswrapper[4558]: I0120 17:40:27.487258 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/b5ea5c8f-cbdb-4b97-87b6-f0375393f6b3-openstack-config\") pod \"openstackclient\" (UID: \"b5ea5c8f-cbdb-4b97-87b6-f0375393f6b3\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:40:27 crc kubenswrapper[4558]: I0120 17:40:27.493693 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/b5ea5c8f-cbdb-4b97-87b6-f0375393f6b3-openstack-config-secret\") pod \"openstackclient\" (UID: \"b5ea5c8f-cbdb-4b97-87b6-f0375393f6b3\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:40:27 crc kubenswrapper[4558]: I0120 17:40:27.499427 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b5ea5c8f-cbdb-4b97-87b6-f0375393f6b3-combined-ca-bundle\") pod \"openstackclient\" (UID: \"b5ea5c8f-cbdb-4b97-87b6-f0375393f6b3\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:40:27 crc kubenswrapper[4558]: I0120 17:40:27.502788 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c75hn\" (UniqueName: \"kubernetes.io/projected/b5ea5c8f-cbdb-4b97-87b6-f0375393f6b3-kube-api-access-c75hn\") pod \"openstackclient\" (UID: \"b5ea5c8f-cbdb-4b97-87b6-f0375393f6b3\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:40:27 crc kubenswrapper[4558]: I0120 17:40:27.691693 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstackclient" Jan 20 17:40:28 crc kubenswrapper[4558]: I0120 17:40:28.071911 4558 generic.go:334] "Generic (PLEG): container finished" podID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerID="2d2a8989ca5a5af98b2c9dc8f801ea0675339ec14800f437c058c5a43c20146b" exitCode=0 Jan 20 17:40:28 crc kubenswrapper[4558]: I0120 17:40:28.071960 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" event={"ID":"68337d27-3fa6-4a29-88b0-82e60c3739eb","Type":"ContainerDied","Data":"2d2a8989ca5a5af98b2c9dc8f801ea0675339ec14800f437c058c5a43c20146b"} Jan 20 17:40:28 crc kubenswrapper[4558]: I0120 17:40:28.072007 4558 scope.go:117] "RemoveContainer" containerID="23984d013a068cbbd112be5ba4e29373c221b0f5de1715ba81ff9be27247b9c2" Jan 20 17:40:28 crc kubenswrapper[4558]: I0120 17:40:28.072432 4558 scope.go:117] "RemoveContainer" containerID="2d2a8989ca5a5af98b2c9dc8f801ea0675339ec14800f437c058c5a43c20146b" Jan 20 17:40:28 crc kubenswrapper[4558]: E0120 17:40:28.072694 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 17:40:28 crc kubenswrapper[4558]: I0120 17:40:28.144269 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/openstackclient"] Jan 20 17:40:28 crc kubenswrapper[4558]: I0120 17:40:28.387294 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/neutron-5c4dc89b5d-qp5xf" Jan 20 17:40:28 crc kubenswrapper[4558]: I0120 17:40:28.451927 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-6ddffc77f7-gjhlj"] Jan 20 17:40:28 crc kubenswrapper[4558]: I0120 17:40:28.452157 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/neutron-6ddffc77f7-gjhlj" podUID="3a66aaee-5aa8-4eb9-bda7-d66d0e98080a" containerName="neutron-api" containerID="cri-o://80ecb05a176d9dd2c2ba2cc06f79d46cd7281bd40c6011f536821eb2b2fed033" gracePeriod=30 Jan 20 17:40:28 crc kubenswrapper[4558]: I0120 17:40:28.452191 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/neutron-6ddffc77f7-gjhlj" podUID="3a66aaee-5aa8-4eb9-bda7-d66d0e98080a" containerName="neutron-httpd" containerID="cri-o://cc9c674e9ff89ec151bce4ce08e91b67b59f3d76de81cda809415d3d20cf9b1d" gracePeriod=30 Jan 20 17:40:29 crc kubenswrapper[4558]: I0120 17:40:29.083823 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstackclient" event={"ID":"b5ea5c8f-cbdb-4b97-87b6-f0375393f6b3","Type":"ContainerStarted","Data":"4213065b2ba71189cbc96ee29e5a13822ce35b5f3713247bb4eb5798f51eab0d"} Jan 20 17:40:29 crc kubenswrapper[4558]: I0120 17:40:29.084203 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstackclient" event={"ID":"b5ea5c8f-cbdb-4b97-87b6-f0375393f6b3","Type":"ContainerStarted","Data":"bb2855a8b0f98b82a583bfda8fff0dcd9423f5730d28e1bcc7c6692c525a5343"} Jan 20 17:40:29 crc kubenswrapper[4558]: I0120 17:40:29.085637 4558 generic.go:334] "Generic (PLEG): container finished" podID="3a66aaee-5aa8-4eb9-bda7-d66d0e98080a" containerID="cc9c674e9ff89ec151bce4ce08e91b67b59f3d76de81cda809415d3d20cf9b1d" exitCode=0 Jan 20 17:40:29 crc kubenswrapper[4558]: I0120 17:40:29.085668 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-6ddffc77f7-gjhlj" event={"ID":"3a66aaee-5aa8-4eb9-bda7-d66d0e98080a","Type":"ContainerDied","Data":"cc9c674e9ff89ec151bce4ce08e91b67b59f3d76de81cda809415d3d20cf9b1d"} Jan 20 17:40:29 crc kubenswrapper[4558]: I0120 17:40:29.108304 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/openstackclient" podStartSLOduration=2.108290337 podStartE2EDuration="2.108290337s" podCreationTimestamp="2026-01-20 17:40:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:40:29.101116267 +0000 UTC m=+3522.861454235" watchObservedRunningTime="2026-01-20 17:40:29.108290337 +0000 UTC m=+3522.868628303" Jan 20 17:40:30 crc kubenswrapper[4558]: I0120 17:40:30.203294 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/swift-proxy-6f98b8f5cb-8bwzs"] Jan 20 17:40:30 crc kubenswrapper[4558]: I0120 17:40:30.208420 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-proxy-6f98b8f5cb-8bwzs" Jan 20 17:40:30 crc kubenswrapper[4558]: I0120 17:40:30.216605 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-swift-internal-svc" Jan 20 17:40:30 crc kubenswrapper[4558]: I0120 17:40:30.216749 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-swift-public-svc" Jan 20 17:40:30 crc kubenswrapper[4558]: I0120 17:40:30.216876 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"swift-proxy-config-data" Jan 20 17:40:30 crc kubenswrapper[4558]: I0120 17:40:30.218820 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/swift-proxy-6f98b8f5cb-8bwzs"] Jan 20 17:40:30 crc kubenswrapper[4558]: I0120 17:40:30.244352 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/858ca371-2b02-46d8-879c-8d859c31d590-log-httpd\") pod \"swift-proxy-6f98b8f5cb-8bwzs\" (UID: \"858ca371-2b02-46d8-879c-8d859c31d590\") " pod="openstack-kuttl-tests/swift-proxy-6f98b8f5cb-8bwzs" Jan 20 17:40:30 crc kubenswrapper[4558]: I0120 17:40:30.244520 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/858ca371-2b02-46d8-879c-8d859c31d590-public-tls-certs\") pod \"swift-proxy-6f98b8f5cb-8bwzs\" (UID: \"858ca371-2b02-46d8-879c-8d859c31d590\") " pod="openstack-kuttl-tests/swift-proxy-6f98b8f5cb-8bwzs" Jan 20 17:40:30 crc kubenswrapper[4558]: I0120 17:40:30.244705 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/858ca371-2b02-46d8-879c-8d859c31d590-run-httpd\") pod \"swift-proxy-6f98b8f5cb-8bwzs\" (UID: \"858ca371-2b02-46d8-879c-8d859c31d590\") " pod="openstack-kuttl-tests/swift-proxy-6f98b8f5cb-8bwzs" Jan 20 17:40:30 crc kubenswrapper[4558]: I0120 17:40:30.244796 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/858ca371-2b02-46d8-879c-8d859c31d590-combined-ca-bundle\") pod \"swift-proxy-6f98b8f5cb-8bwzs\" (UID: \"858ca371-2b02-46d8-879c-8d859c31d590\") " pod="openstack-kuttl-tests/swift-proxy-6f98b8f5cb-8bwzs" Jan 20 17:40:30 crc kubenswrapper[4558]: I0120 17:40:30.244977 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/858ca371-2b02-46d8-879c-8d859c31d590-internal-tls-certs\") pod \"swift-proxy-6f98b8f5cb-8bwzs\" (UID: \"858ca371-2b02-46d8-879c-8d859c31d590\") " pod="openstack-kuttl-tests/swift-proxy-6f98b8f5cb-8bwzs" Jan 20 17:40:30 crc kubenswrapper[4558]: I0120 17:40:30.245225 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ps4km\" (UniqueName: \"kubernetes.io/projected/858ca371-2b02-46d8-879c-8d859c31d590-kube-api-access-ps4km\") pod \"swift-proxy-6f98b8f5cb-8bwzs\" (UID: \"858ca371-2b02-46d8-879c-8d859c31d590\") " pod="openstack-kuttl-tests/swift-proxy-6f98b8f5cb-8bwzs" Jan 20 17:40:30 crc kubenswrapper[4558]: I0120 17:40:30.245352 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/858ca371-2b02-46d8-879c-8d859c31d590-etc-swift\") pod \"swift-proxy-6f98b8f5cb-8bwzs\" (UID: \"858ca371-2b02-46d8-879c-8d859c31d590\") " pod="openstack-kuttl-tests/swift-proxy-6f98b8f5cb-8bwzs" Jan 20 17:40:30 crc kubenswrapper[4558]: I0120 17:40:30.245447 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/858ca371-2b02-46d8-879c-8d859c31d590-config-data\") pod \"swift-proxy-6f98b8f5cb-8bwzs\" (UID: \"858ca371-2b02-46d8-879c-8d859c31d590\") " pod="openstack-kuttl-tests/swift-proxy-6f98b8f5cb-8bwzs" Jan 20 17:40:30 crc kubenswrapper[4558]: I0120 17:40:30.348357 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/858ca371-2b02-46d8-879c-8d859c31d590-config-data\") pod \"swift-proxy-6f98b8f5cb-8bwzs\" (UID: \"858ca371-2b02-46d8-879c-8d859c31d590\") " pod="openstack-kuttl-tests/swift-proxy-6f98b8f5cb-8bwzs" Jan 20 17:40:30 crc kubenswrapper[4558]: I0120 17:40:30.348520 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/858ca371-2b02-46d8-879c-8d859c31d590-log-httpd\") pod \"swift-proxy-6f98b8f5cb-8bwzs\" (UID: \"858ca371-2b02-46d8-879c-8d859c31d590\") " pod="openstack-kuttl-tests/swift-proxy-6f98b8f5cb-8bwzs" Jan 20 17:40:30 crc kubenswrapper[4558]: I0120 17:40:30.348571 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/858ca371-2b02-46d8-879c-8d859c31d590-public-tls-certs\") pod \"swift-proxy-6f98b8f5cb-8bwzs\" (UID: \"858ca371-2b02-46d8-879c-8d859c31d590\") " pod="openstack-kuttl-tests/swift-proxy-6f98b8f5cb-8bwzs" Jan 20 17:40:30 crc kubenswrapper[4558]: I0120 17:40:30.348664 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/858ca371-2b02-46d8-879c-8d859c31d590-run-httpd\") pod \"swift-proxy-6f98b8f5cb-8bwzs\" (UID: \"858ca371-2b02-46d8-879c-8d859c31d590\") " pod="openstack-kuttl-tests/swift-proxy-6f98b8f5cb-8bwzs" Jan 20 17:40:30 crc kubenswrapper[4558]: I0120 17:40:30.348727 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/858ca371-2b02-46d8-879c-8d859c31d590-combined-ca-bundle\") pod \"swift-proxy-6f98b8f5cb-8bwzs\" (UID: \"858ca371-2b02-46d8-879c-8d859c31d590\") " pod="openstack-kuttl-tests/swift-proxy-6f98b8f5cb-8bwzs" Jan 20 17:40:30 crc kubenswrapper[4558]: I0120 17:40:30.349021 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/858ca371-2b02-46d8-879c-8d859c31d590-internal-tls-certs\") pod \"swift-proxy-6f98b8f5cb-8bwzs\" (UID: \"858ca371-2b02-46d8-879c-8d859c31d590\") " pod="openstack-kuttl-tests/swift-proxy-6f98b8f5cb-8bwzs" Jan 20 17:40:30 crc kubenswrapper[4558]: I0120 17:40:30.349155 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ps4km\" (UniqueName: \"kubernetes.io/projected/858ca371-2b02-46d8-879c-8d859c31d590-kube-api-access-ps4km\") pod \"swift-proxy-6f98b8f5cb-8bwzs\" (UID: \"858ca371-2b02-46d8-879c-8d859c31d590\") " pod="openstack-kuttl-tests/swift-proxy-6f98b8f5cb-8bwzs" Jan 20 17:40:30 crc kubenswrapper[4558]: I0120 17:40:30.349247 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/858ca371-2b02-46d8-879c-8d859c31d590-etc-swift\") pod \"swift-proxy-6f98b8f5cb-8bwzs\" (UID: \"858ca371-2b02-46d8-879c-8d859c31d590\") " pod="openstack-kuttl-tests/swift-proxy-6f98b8f5cb-8bwzs" Jan 20 17:40:30 crc kubenswrapper[4558]: I0120 17:40:30.350546 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/858ca371-2b02-46d8-879c-8d859c31d590-log-httpd\") pod \"swift-proxy-6f98b8f5cb-8bwzs\" (UID: \"858ca371-2b02-46d8-879c-8d859c31d590\") " pod="openstack-kuttl-tests/swift-proxy-6f98b8f5cb-8bwzs" Jan 20 17:40:30 crc kubenswrapper[4558]: I0120 17:40:30.350621 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/858ca371-2b02-46d8-879c-8d859c31d590-run-httpd\") pod \"swift-proxy-6f98b8f5cb-8bwzs\" (UID: \"858ca371-2b02-46d8-879c-8d859c31d590\") " pod="openstack-kuttl-tests/swift-proxy-6f98b8f5cb-8bwzs" Jan 20 17:40:30 crc kubenswrapper[4558]: I0120 17:40:30.357731 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/858ca371-2b02-46d8-879c-8d859c31d590-combined-ca-bundle\") pod \"swift-proxy-6f98b8f5cb-8bwzs\" (UID: \"858ca371-2b02-46d8-879c-8d859c31d590\") " pod="openstack-kuttl-tests/swift-proxy-6f98b8f5cb-8bwzs" Jan 20 17:40:30 crc kubenswrapper[4558]: I0120 17:40:30.358955 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/858ca371-2b02-46d8-879c-8d859c31d590-internal-tls-certs\") pod \"swift-proxy-6f98b8f5cb-8bwzs\" (UID: \"858ca371-2b02-46d8-879c-8d859c31d590\") " pod="openstack-kuttl-tests/swift-proxy-6f98b8f5cb-8bwzs" Jan 20 17:40:30 crc kubenswrapper[4558]: I0120 17:40:30.361729 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/858ca371-2b02-46d8-879c-8d859c31d590-config-data\") pod \"swift-proxy-6f98b8f5cb-8bwzs\" (UID: \"858ca371-2b02-46d8-879c-8d859c31d590\") " pod="openstack-kuttl-tests/swift-proxy-6f98b8f5cb-8bwzs" Jan 20 17:40:30 crc kubenswrapper[4558]: I0120 17:40:30.362438 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/858ca371-2b02-46d8-879c-8d859c31d590-public-tls-certs\") pod \"swift-proxy-6f98b8f5cb-8bwzs\" (UID: \"858ca371-2b02-46d8-879c-8d859c31d590\") " pod="openstack-kuttl-tests/swift-proxy-6f98b8f5cb-8bwzs" Jan 20 17:40:30 crc kubenswrapper[4558]: I0120 17:40:30.363962 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/858ca371-2b02-46d8-879c-8d859c31d590-etc-swift\") pod \"swift-proxy-6f98b8f5cb-8bwzs\" (UID: \"858ca371-2b02-46d8-879c-8d859c31d590\") " pod="openstack-kuttl-tests/swift-proxy-6f98b8f5cb-8bwzs" Jan 20 17:40:30 crc kubenswrapper[4558]: I0120 17:40:30.366048 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ps4km\" (UniqueName: \"kubernetes.io/projected/858ca371-2b02-46d8-879c-8d859c31d590-kube-api-access-ps4km\") pod \"swift-proxy-6f98b8f5cb-8bwzs\" (UID: \"858ca371-2b02-46d8-879c-8d859c31d590\") " pod="openstack-kuttl-tests/swift-proxy-6f98b8f5cb-8bwzs" Jan 20 17:40:30 crc kubenswrapper[4558]: I0120 17:40:30.540356 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-proxy-6f98b8f5cb-8bwzs" Jan 20 17:40:30 crc kubenswrapper[4558]: I0120 17:40:30.965547 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/swift-proxy-6f98b8f5cb-8bwzs"] Jan 20 17:40:30 crc kubenswrapper[4558]: W0120 17:40:30.966449 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod858ca371_2b02_46d8_879c_8d859c31d590.slice/crio-557c7da51a9a4439988eea322c95c064d80eec1313541d8d0ced282f3435d124 WatchSource:0}: Error finding container 557c7da51a9a4439988eea322c95c064d80eec1313541d8d0ced282f3435d124: Status 404 returned error can't find the container with id 557c7da51a9a4439988eea322c95c064d80eec1313541d8d0ced282f3435d124 Jan 20 17:40:31 crc kubenswrapper[4558]: I0120 17:40:31.124831 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-proxy-6f98b8f5cb-8bwzs" event={"ID":"858ca371-2b02-46d8-879c-8d859c31d590","Type":"ContainerStarted","Data":"557c7da51a9a4439988eea322c95c064d80eec1313541d8d0ced282f3435d124"} Jan 20 17:40:31 crc kubenswrapper[4558]: I0120 17:40:31.568569 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:40:31 crc kubenswrapper[4558]: I0120 17:40:31.569038 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="32ebfd5d-3392-4ab0-b1f7-1c12001b2ef2" containerName="sg-core" containerID="cri-o://cd433ab19101a55fe28a4b5f5f546b83f5d3862cab4f05189daa9c99ec18eb21" gracePeriod=30 Jan 20 17:40:31 crc kubenswrapper[4558]: I0120 17:40:31.569205 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="32ebfd5d-3392-4ab0-b1f7-1c12001b2ef2" containerName="proxy-httpd" containerID="cri-o://bcbdfa37349f1e89eaec79cbd97fa05f054554461a223bbe18052651d462d9db" gracePeriod=30 Jan 20 17:40:31 crc kubenswrapper[4558]: I0120 17:40:31.568956 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="32ebfd5d-3392-4ab0-b1f7-1c12001b2ef2" containerName="ceilometer-central-agent" containerID="cri-o://aa0ca993872e33ad8a732487b7655b5b2c29e6e5ca4080a8dec4fd3d9941851a" gracePeriod=30 Jan 20 17:40:31 crc kubenswrapper[4558]: I0120 17:40:31.569362 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="32ebfd5d-3392-4ab0-b1f7-1c12001b2ef2" containerName="ceilometer-notification-agent" containerID="cri-o://26f5c1167982dbbb9e0e3adf24d031f01e322c717b64319de2cfccd54308f098" gracePeriod=30 Jan 20 17:40:31 crc kubenswrapper[4558]: I0120 17:40:31.739736 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-6ddffc77f7-gjhlj" Jan 20 17:40:31 crc kubenswrapper[4558]: I0120 17:40:31.779999 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/3a66aaee-5aa8-4eb9-bda7-d66d0e98080a-config\") pod \"3a66aaee-5aa8-4eb9-bda7-d66d0e98080a\" (UID: \"3a66aaee-5aa8-4eb9-bda7-d66d0e98080a\") " Jan 20 17:40:31 crc kubenswrapper[4558]: I0120 17:40:31.780065 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/3a66aaee-5aa8-4eb9-bda7-d66d0e98080a-httpd-config\") pod \"3a66aaee-5aa8-4eb9-bda7-d66d0e98080a\" (UID: \"3a66aaee-5aa8-4eb9-bda7-d66d0e98080a\") " Jan 20 17:40:31 crc kubenswrapper[4558]: I0120 17:40:31.780108 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3a66aaee-5aa8-4eb9-bda7-d66d0e98080a-combined-ca-bundle\") pod \"3a66aaee-5aa8-4eb9-bda7-d66d0e98080a\" (UID: \"3a66aaee-5aa8-4eb9-bda7-d66d0e98080a\") " Jan 20 17:40:31 crc kubenswrapper[4558]: I0120 17:40:31.780324 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/3a66aaee-5aa8-4eb9-bda7-d66d0e98080a-ovndb-tls-certs\") pod \"3a66aaee-5aa8-4eb9-bda7-d66d0e98080a\" (UID: \"3a66aaee-5aa8-4eb9-bda7-d66d0e98080a\") " Jan 20 17:40:31 crc kubenswrapper[4558]: I0120 17:40:31.780620 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l2hpk\" (UniqueName: \"kubernetes.io/projected/3a66aaee-5aa8-4eb9-bda7-d66d0e98080a-kube-api-access-l2hpk\") pod \"3a66aaee-5aa8-4eb9-bda7-d66d0e98080a\" (UID: \"3a66aaee-5aa8-4eb9-bda7-d66d0e98080a\") " Jan 20 17:40:31 crc kubenswrapper[4558]: I0120 17:40:31.785069 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3a66aaee-5aa8-4eb9-bda7-d66d0e98080a-kube-api-access-l2hpk" (OuterVolumeSpecName: "kube-api-access-l2hpk") pod "3a66aaee-5aa8-4eb9-bda7-d66d0e98080a" (UID: "3a66aaee-5aa8-4eb9-bda7-d66d0e98080a"). InnerVolumeSpecName "kube-api-access-l2hpk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:40:31 crc kubenswrapper[4558]: I0120 17:40:31.788284 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3a66aaee-5aa8-4eb9-bda7-d66d0e98080a-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "3a66aaee-5aa8-4eb9-bda7-d66d0e98080a" (UID: "3a66aaee-5aa8-4eb9-bda7-d66d0e98080a"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:40:31 crc kubenswrapper[4558]: I0120 17:40:31.822805 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3a66aaee-5aa8-4eb9-bda7-d66d0e98080a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3a66aaee-5aa8-4eb9-bda7-d66d0e98080a" (UID: "3a66aaee-5aa8-4eb9-bda7-d66d0e98080a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:40:31 crc kubenswrapper[4558]: I0120 17:40:31.826618 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3a66aaee-5aa8-4eb9-bda7-d66d0e98080a-config" (OuterVolumeSpecName: "config") pod "3a66aaee-5aa8-4eb9-bda7-d66d0e98080a" (UID: "3a66aaee-5aa8-4eb9-bda7-d66d0e98080a"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:40:31 crc kubenswrapper[4558]: I0120 17:40:31.838228 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3a66aaee-5aa8-4eb9-bda7-d66d0e98080a-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "3a66aaee-5aa8-4eb9-bda7-d66d0e98080a" (UID: "3a66aaee-5aa8-4eb9-bda7-d66d0e98080a"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:40:31 crc kubenswrapper[4558]: I0120 17:40:31.882896 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l2hpk\" (UniqueName: \"kubernetes.io/projected/3a66aaee-5aa8-4eb9-bda7-d66d0e98080a-kube-api-access-l2hpk\") on node \"crc\" DevicePath \"\"" Jan 20 17:40:31 crc kubenswrapper[4558]: I0120 17:40:31.882931 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/3a66aaee-5aa8-4eb9-bda7-d66d0e98080a-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:40:31 crc kubenswrapper[4558]: I0120 17:40:31.882942 4558 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/3a66aaee-5aa8-4eb9-bda7-d66d0e98080a-httpd-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:40:31 crc kubenswrapper[4558]: I0120 17:40:31.882953 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3a66aaee-5aa8-4eb9-bda7-d66d0e98080a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:40:31 crc kubenswrapper[4558]: I0120 17:40:31.882961 4558 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/3a66aaee-5aa8-4eb9-bda7-d66d0e98080a-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:40:31 crc kubenswrapper[4558]: E0120 17:40:31.924476 4558 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod32ebfd5d_3392_4ab0_b1f7_1c12001b2ef2.slice/crio-bcbdfa37349f1e89eaec79cbd97fa05f054554461a223bbe18052651d462d9db.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod32ebfd5d_3392_4ab0_b1f7_1c12001b2ef2.slice/crio-conmon-bcbdfa37349f1e89eaec79cbd97fa05f054554461a223bbe18052651d462d9db.scope\": RecentStats: unable to find data in memory cache]" Jan 20 17:40:32 crc kubenswrapper[4558]: I0120 17:40:32.137990 4558 generic.go:334] "Generic (PLEG): container finished" podID="3a66aaee-5aa8-4eb9-bda7-d66d0e98080a" containerID="80ecb05a176d9dd2c2ba2cc06f79d46cd7281bd40c6011f536821eb2b2fed033" exitCode=0 Jan 20 17:40:32 crc kubenswrapper[4558]: I0120 17:40:32.138071 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-6ddffc77f7-gjhlj" Jan 20 17:40:32 crc kubenswrapper[4558]: I0120 17:40:32.138109 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-6ddffc77f7-gjhlj" event={"ID":"3a66aaee-5aa8-4eb9-bda7-d66d0e98080a","Type":"ContainerDied","Data":"80ecb05a176d9dd2c2ba2cc06f79d46cd7281bd40c6011f536821eb2b2fed033"} Jan 20 17:40:32 crc kubenswrapper[4558]: I0120 17:40:32.138614 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-6ddffc77f7-gjhlj" event={"ID":"3a66aaee-5aa8-4eb9-bda7-d66d0e98080a","Type":"ContainerDied","Data":"52af256f1f87c57a8177b8efb5db7d22207fabc65efdd122338fe09e120668c5"} Jan 20 17:40:32 crc kubenswrapper[4558]: I0120 17:40:32.138649 4558 scope.go:117] "RemoveContainer" containerID="cc9c674e9ff89ec151bce4ce08e91b67b59f3d76de81cda809415d3d20cf9b1d" Jan 20 17:40:32 crc kubenswrapper[4558]: I0120 17:40:32.154324 4558 generic.go:334] "Generic (PLEG): container finished" podID="32ebfd5d-3392-4ab0-b1f7-1c12001b2ef2" containerID="bcbdfa37349f1e89eaec79cbd97fa05f054554461a223bbe18052651d462d9db" exitCode=0 Jan 20 17:40:32 crc kubenswrapper[4558]: I0120 17:40:32.154354 4558 generic.go:334] "Generic (PLEG): container finished" podID="32ebfd5d-3392-4ab0-b1f7-1c12001b2ef2" containerID="cd433ab19101a55fe28a4b5f5f546b83f5d3862cab4f05189daa9c99ec18eb21" exitCode=2 Jan 20 17:40:32 crc kubenswrapper[4558]: I0120 17:40:32.154363 4558 generic.go:334] "Generic (PLEG): container finished" podID="32ebfd5d-3392-4ab0-b1f7-1c12001b2ef2" containerID="aa0ca993872e33ad8a732487b7655b5b2c29e6e5ca4080a8dec4fd3d9941851a" exitCode=0 Jan 20 17:40:32 crc kubenswrapper[4558]: I0120 17:40:32.154412 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"32ebfd5d-3392-4ab0-b1f7-1c12001b2ef2","Type":"ContainerDied","Data":"bcbdfa37349f1e89eaec79cbd97fa05f054554461a223bbe18052651d462d9db"} Jan 20 17:40:32 crc kubenswrapper[4558]: I0120 17:40:32.154440 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"32ebfd5d-3392-4ab0-b1f7-1c12001b2ef2","Type":"ContainerDied","Data":"cd433ab19101a55fe28a4b5f5f546b83f5d3862cab4f05189daa9c99ec18eb21"} Jan 20 17:40:32 crc kubenswrapper[4558]: I0120 17:40:32.154452 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"32ebfd5d-3392-4ab0-b1f7-1c12001b2ef2","Type":"ContainerDied","Data":"aa0ca993872e33ad8a732487b7655b5b2c29e6e5ca4080a8dec4fd3d9941851a"} Jan 20 17:40:32 crc kubenswrapper[4558]: I0120 17:40:32.159345 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-proxy-6f98b8f5cb-8bwzs" event={"ID":"858ca371-2b02-46d8-879c-8d859c31d590","Type":"ContainerStarted","Data":"33a031db5256c97f79957951d874d8efcad761b154e6543e7aeaa0d0878d4ddb"} Jan 20 17:40:32 crc kubenswrapper[4558]: I0120 17:40:32.159387 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-proxy-6f98b8f5cb-8bwzs" event={"ID":"858ca371-2b02-46d8-879c-8d859c31d590","Type":"ContainerStarted","Data":"97c43c6886d191e0702cb49b84ebfa79710357c4b83cb249c5fc746e2090b3d7"} Jan 20 17:40:32 crc kubenswrapper[4558]: I0120 17:40:32.159679 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/swift-proxy-6f98b8f5cb-8bwzs" Jan 20 17:40:32 crc kubenswrapper[4558]: I0120 17:40:32.161003 4558 scope.go:117] "RemoveContainer" containerID="80ecb05a176d9dd2c2ba2cc06f79d46cd7281bd40c6011f536821eb2b2fed033" Jan 20 17:40:32 crc kubenswrapper[4558]: I0120 17:40:32.185692 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/swift-proxy-6f98b8f5cb-8bwzs" podStartSLOduration=2.185670418 podStartE2EDuration="2.185670418s" podCreationTimestamp="2026-01-20 17:40:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:40:32.17483241 +0000 UTC m=+3525.935170376" watchObservedRunningTime="2026-01-20 17:40:32.185670418 +0000 UTC m=+3525.946008385" Jan 20 17:40:32 crc kubenswrapper[4558]: I0120 17:40:32.216666 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-6ddffc77f7-gjhlj"] Jan 20 17:40:32 crc kubenswrapper[4558]: I0120 17:40:32.218367 4558 scope.go:117] "RemoveContainer" containerID="cc9c674e9ff89ec151bce4ce08e91b67b59f3d76de81cda809415d3d20cf9b1d" Jan 20 17:40:32 crc kubenswrapper[4558]: E0120 17:40:32.223330 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cc9c674e9ff89ec151bce4ce08e91b67b59f3d76de81cda809415d3d20cf9b1d\": container with ID starting with cc9c674e9ff89ec151bce4ce08e91b67b59f3d76de81cda809415d3d20cf9b1d not found: ID does not exist" containerID="cc9c674e9ff89ec151bce4ce08e91b67b59f3d76de81cda809415d3d20cf9b1d" Jan 20 17:40:32 crc kubenswrapper[4558]: I0120 17:40:32.223375 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cc9c674e9ff89ec151bce4ce08e91b67b59f3d76de81cda809415d3d20cf9b1d"} err="failed to get container status \"cc9c674e9ff89ec151bce4ce08e91b67b59f3d76de81cda809415d3d20cf9b1d\": rpc error: code = NotFound desc = could not find container \"cc9c674e9ff89ec151bce4ce08e91b67b59f3d76de81cda809415d3d20cf9b1d\": container with ID starting with cc9c674e9ff89ec151bce4ce08e91b67b59f3d76de81cda809415d3d20cf9b1d not found: ID does not exist" Jan 20 17:40:32 crc kubenswrapper[4558]: I0120 17:40:32.223408 4558 scope.go:117] "RemoveContainer" containerID="80ecb05a176d9dd2c2ba2cc06f79d46cd7281bd40c6011f536821eb2b2fed033" Jan 20 17:40:32 crc kubenswrapper[4558]: E0120 17:40:32.229258 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"80ecb05a176d9dd2c2ba2cc06f79d46cd7281bd40c6011f536821eb2b2fed033\": container with ID starting with 80ecb05a176d9dd2c2ba2cc06f79d46cd7281bd40c6011f536821eb2b2fed033 not found: ID does not exist" containerID="80ecb05a176d9dd2c2ba2cc06f79d46cd7281bd40c6011f536821eb2b2fed033" Jan 20 17:40:32 crc kubenswrapper[4558]: I0120 17:40:32.229292 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"80ecb05a176d9dd2c2ba2cc06f79d46cd7281bd40c6011f536821eb2b2fed033"} err="failed to get container status \"80ecb05a176d9dd2c2ba2cc06f79d46cd7281bd40c6011f536821eb2b2fed033\": rpc error: code = NotFound desc = could not find container \"80ecb05a176d9dd2c2ba2cc06f79d46cd7281bd40c6011f536821eb2b2fed033\": container with ID starting with 80ecb05a176d9dd2c2ba2cc06f79d46cd7281bd40c6011f536821eb2b2fed033 not found: ID does not exist" Jan 20 17:40:32 crc kubenswrapper[4558]: I0120 17:40:32.251497 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/neutron-6ddffc77f7-gjhlj"] Jan 20 17:40:32 crc kubenswrapper[4558]: I0120 17:40:32.578928 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3a66aaee-5aa8-4eb9-bda7-d66d0e98080a" path="/var/lib/kubelet/pods/3a66aaee-5aa8-4eb9-bda7-d66d0e98080a/volumes" Jan 20 17:40:33 crc kubenswrapper[4558]: I0120 17:40:33.209327 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/swift-proxy-6f98b8f5cb-8bwzs" Jan 20 17:40:35 crc kubenswrapper[4558]: I0120 17:40:35.435950 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-api-db-create-vhnt4"] Jan 20 17:40:35 crc kubenswrapper[4558]: E0120 17:40:35.436777 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3a66aaee-5aa8-4eb9-bda7-d66d0e98080a" containerName="neutron-api" Jan 20 17:40:35 crc kubenswrapper[4558]: I0120 17:40:35.436793 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="3a66aaee-5aa8-4eb9-bda7-d66d0e98080a" containerName="neutron-api" Jan 20 17:40:35 crc kubenswrapper[4558]: E0120 17:40:35.436803 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3a66aaee-5aa8-4eb9-bda7-d66d0e98080a" containerName="neutron-httpd" Jan 20 17:40:35 crc kubenswrapper[4558]: I0120 17:40:35.436809 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="3a66aaee-5aa8-4eb9-bda7-d66d0e98080a" containerName="neutron-httpd" Jan 20 17:40:35 crc kubenswrapper[4558]: I0120 17:40:35.437040 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="3a66aaee-5aa8-4eb9-bda7-d66d0e98080a" containerName="neutron-httpd" Jan 20 17:40:35 crc kubenswrapper[4558]: I0120 17:40:35.437062 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="3a66aaee-5aa8-4eb9-bda7-d66d0e98080a" containerName="neutron-api" Jan 20 17:40:35 crc kubenswrapper[4558]: I0120 17:40:35.437698 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-db-create-vhnt4" Jan 20 17:40:35 crc kubenswrapper[4558]: I0120 17:40:35.446589 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-db-create-vhnt4"] Jan 20 17:40:35 crc kubenswrapper[4558]: I0120 17:40:35.461974 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e451d734-7167-4b92-8c0b-22b1e3f4ef67-operator-scripts\") pod \"nova-api-db-create-vhnt4\" (UID: \"e451d734-7167-4b92-8c0b-22b1e3f4ef67\") " pod="openstack-kuttl-tests/nova-api-db-create-vhnt4" Jan 20 17:40:35 crc kubenswrapper[4558]: I0120 17:40:35.462029 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kjkxj\" (UniqueName: \"kubernetes.io/projected/e451d734-7167-4b92-8c0b-22b1e3f4ef67-kube-api-access-kjkxj\") pod \"nova-api-db-create-vhnt4\" (UID: \"e451d734-7167-4b92-8c0b-22b1e3f4ef67\") " pod="openstack-kuttl-tests/nova-api-db-create-vhnt4" Jan 20 17:40:35 crc kubenswrapper[4558]: I0120 17:40:35.548712 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell0-db-create-75945"] Jan 20 17:40:35 crc kubenswrapper[4558]: I0120 17:40:35.550836 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-db-create-75945" Jan 20 17:40:35 crc kubenswrapper[4558]: I0120 17:40:35.564938 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-db-create-75945"] Jan 20 17:40:35 crc kubenswrapper[4558]: I0120 17:40:35.565363 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2w4gn\" (UniqueName: \"kubernetes.io/projected/a312b2d2-a836-40d2-8de8-9e75ef44ff9a-kube-api-access-2w4gn\") pod \"nova-cell0-db-create-75945\" (UID: \"a312b2d2-a836-40d2-8de8-9e75ef44ff9a\") " pod="openstack-kuttl-tests/nova-cell0-db-create-75945" Jan 20 17:40:35 crc kubenswrapper[4558]: I0120 17:40:35.565490 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a312b2d2-a836-40d2-8de8-9e75ef44ff9a-operator-scripts\") pod \"nova-cell0-db-create-75945\" (UID: \"a312b2d2-a836-40d2-8de8-9e75ef44ff9a\") " pod="openstack-kuttl-tests/nova-cell0-db-create-75945" Jan 20 17:40:35 crc kubenswrapper[4558]: I0120 17:40:35.565924 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e451d734-7167-4b92-8c0b-22b1e3f4ef67-operator-scripts\") pod \"nova-api-db-create-vhnt4\" (UID: \"e451d734-7167-4b92-8c0b-22b1e3f4ef67\") " pod="openstack-kuttl-tests/nova-api-db-create-vhnt4" Jan 20 17:40:35 crc kubenswrapper[4558]: I0120 17:40:35.566050 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kjkxj\" (UniqueName: \"kubernetes.io/projected/e451d734-7167-4b92-8c0b-22b1e3f4ef67-kube-api-access-kjkxj\") pod \"nova-api-db-create-vhnt4\" (UID: \"e451d734-7167-4b92-8c0b-22b1e3f4ef67\") " pod="openstack-kuttl-tests/nova-api-db-create-vhnt4" Jan 20 17:40:35 crc kubenswrapper[4558]: I0120 17:40:35.567025 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e451d734-7167-4b92-8c0b-22b1e3f4ef67-operator-scripts\") pod \"nova-api-db-create-vhnt4\" (UID: \"e451d734-7167-4b92-8c0b-22b1e3f4ef67\") " pod="openstack-kuttl-tests/nova-api-db-create-vhnt4" Jan 20 17:40:35 crc kubenswrapper[4558]: I0120 17:40:35.588751 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kjkxj\" (UniqueName: \"kubernetes.io/projected/e451d734-7167-4b92-8c0b-22b1e3f4ef67-kube-api-access-kjkxj\") pod \"nova-api-db-create-vhnt4\" (UID: \"e451d734-7167-4b92-8c0b-22b1e3f4ef67\") " pod="openstack-kuttl-tests/nova-api-db-create-vhnt4" Jan 20 17:40:35 crc kubenswrapper[4558]: I0120 17:40:35.650241 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-api-3b8d-account-create-update-s9plf"] Jan 20 17:40:35 crc kubenswrapper[4558]: I0120 17:40:35.651629 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-3b8d-account-create-update-s9plf" Jan 20 17:40:35 crc kubenswrapper[4558]: I0120 17:40:35.653267 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-api-db-secret" Jan 20 17:40:35 crc kubenswrapper[4558]: I0120 17:40:35.657545 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell1-db-create-2gc5v"] Jan 20 17:40:35 crc kubenswrapper[4558]: I0120 17:40:35.659095 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-db-create-2gc5v" Jan 20 17:40:35 crc kubenswrapper[4558]: I0120 17:40:35.664039 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-db-create-2gc5v"] Jan 20 17:40:35 crc kubenswrapper[4558]: I0120 17:40:35.669081 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-3b8d-account-create-update-s9plf"] Jan 20 17:40:35 crc kubenswrapper[4558]: I0120 17:40:35.669708 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2w4gn\" (UniqueName: \"kubernetes.io/projected/a312b2d2-a836-40d2-8de8-9e75ef44ff9a-kube-api-access-2w4gn\") pod \"nova-cell0-db-create-75945\" (UID: \"a312b2d2-a836-40d2-8de8-9e75ef44ff9a\") " pod="openstack-kuttl-tests/nova-cell0-db-create-75945" Jan 20 17:40:35 crc kubenswrapper[4558]: I0120 17:40:35.669777 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a312b2d2-a836-40d2-8de8-9e75ef44ff9a-operator-scripts\") pod \"nova-cell0-db-create-75945\" (UID: \"a312b2d2-a836-40d2-8de8-9e75ef44ff9a\") " pod="openstack-kuttl-tests/nova-cell0-db-create-75945" Jan 20 17:40:35 crc kubenswrapper[4558]: I0120 17:40:35.670531 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a312b2d2-a836-40d2-8de8-9e75ef44ff9a-operator-scripts\") pod \"nova-cell0-db-create-75945\" (UID: \"a312b2d2-a836-40d2-8de8-9e75ef44ff9a\") " pod="openstack-kuttl-tests/nova-cell0-db-create-75945" Jan 20 17:40:35 crc kubenswrapper[4558]: I0120 17:40:35.686746 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2w4gn\" (UniqueName: \"kubernetes.io/projected/a312b2d2-a836-40d2-8de8-9e75ef44ff9a-kube-api-access-2w4gn\") pod \"nova-cell0-db-create-75945\" (UID: \"a312b2d2-a836-40d2-8de8-9e75ef44ff9a\") " pod="openstack-kuttl-tests/nova-cell0-db-create-75945" Jan 20 17:40:35 crc kubenswrapper[4558]: I0120 17:40:35.753603 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-db-create-vhnt4" Jan 20 17:40:35 crc kubenswrapper[4558]: I0120 17:40:35.771544 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d0eb0bc4-4bc9-4c7a-ac58-c62d4674549d-operator-scripts\") pod \"nova-api-3b8d-account-create-update-s9plf\" (UID: \"d0eb0bc4-4bc9-4c7a-ac58-c62d4674549d\") " pod="openstack-kuttl-tests/nova-api-3b8d-account-create-update-s9plf" Jan 20 17:40:35 crc kubenswrapper[4558]: I0120 17:40:35.772735 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9ccbac5d-4369-44e5-9dfc-add1f7987639-operator-scripts\") pod \"nova-cell1-db-create-2gc5v\" (UID: \"9ccbac5d-4369-44e5-9dfc-add1f7987639\") " pod="openstack-kuttl-tests/nova-cell1-db-create-2gc5v" Jan 20 17:40:35 crc kubenswrapper[4558]: I0120 17:40:35.772865 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wzd95\" (UniqueName: \"kubernetes.io/projected/9ccbac5d-4369-44e5-9dfc-add1f7987639-kube-api-access-wzd95\") pod \"nova-cell1-db-create-2gc5v\" (UID: \"9ccbac5d-4369-44e5-9dfc-add1f7987639\") " pod="openstack-kuttl-tests/nova-cell1-db-create-2gc5v" Jan 20 17:40:35 crc kubenswrapper[4558]: I0120 17:40:35.772953 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rplvm\" (UniqueName: \"kubernetes.io/projected/d0eb0bc4-4bc9-4c7a-ac58-c62d4674549d-kube-api-access-rplvm\") pod \"nova-api-3b8d-account-create-update-s9plf\" (UID: \"d0eb0bc4-4bc9-4c7a-ac58-c62d4674549d\") " pod="openstack-kuttl-tests/nova-api-3b8d-account-create-update-s9plf" Jan 20 17:40:35 crc kubenswrapper[4558]: I0120 17:40:35.860343 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell0-bd94-account-create-update-n5ssg"] Jan 20 17:40:35 crc kubenswrapper[4558]: I0120 17:40:35.866874 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-bd94-account-create-update-n5ssg" Jan 20 17:40:35 crc kubenswrapper[4558]: I0120 17:40:35.868808 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell0-db-secret" Jan 20 17:40:35 crc kubenswrapper[4558]: I0120 17:40:35.872634 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-bd94-account-create-update-n5ssg"] Jan 20 17:40:35 crc kubenswrapper[4558]: I0120 17:40:35.872965 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-db-create-75945" Jan 20 17:40:35 crc kubenswrapper[4558]: I0120 17:40:35.876392 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9ccbac5d-4369-44e5-9dfc-add1f7987639-operator-scripts\") pod \"nova-cell1-db-create-2gc5v\" (UID: \"9ccbac5d-4369-44e5-9dfc-add1f7987639\") " pod="openstack-kuttl-tests/nova-cell1-db-create-2gc5v" Jan 20 17:40:35 crc kubenswrapper[4558]: I0120 17:40:35.876452 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wzd95\" (UniqueName: \"kubernetes.io/projected/9ccbac5d-4369-44e5-9dfc-add1f7987639-kube-api-access-wzd95\") pod \"nova-cell1-db-create-2gc5v\" (UID: \"9ccbac5d-4369-44e5-9dfc-add1f7987639\") " pod="openstack-kuttl-tests/nova-cell1-db-create-2gc5v" Jan 20 17:40:35 crc kubenswrapper[4558]: I0120 17:40:35.876495 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rplvm\" (UniqueName: \"kubernetes.io/projected/d0eb0bc4-4bc9-4c7a-ac58-c62d4674549d-kube-api-access-rplvm\") pod \"nova-api-3b8d-account-create-update-s9plf\" (UID: \"d0eb0bc4-4bc9-4c7a-ac58-c62d4674549d\") " pod="openstack-kuttl-tests/nova-api-3b8d-account-create-update-s9plf" Jan 20 17:40:35 crc kubenswrapper[4558]: I0120 17:40:35.876562 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/93c34095-c480-40ce-b40e-bf05101bef8f-operator-scripts\") pod \"nova-cell0-bd94-account-create-update-n5ssg\" (UID: \"93c34095-c480-40ce-b40e-bf05101bef8f\") " pod="openstack-kuttl-tests/nova-cell0-bd94-account-create-update-n5ssg" Jan 20 17:40:35 crc kubenswrapper[4558]: I0120 17:40:35.876609 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d0eb0bc4-4bc9-4c7a-ac58-c62d4674549d-operator-scripts\") pod \"nova-api-3b8d-account-create-update-s9plf\" (UID: \"d0eb0bc4-4bc9-4c7a-ac58-c62d4674549d\") " pod="openstack-kuttl-tests/nova-api-3b8d-account-create-update-s9plf" Jan 20 17:40:35 crc kubenswrapper[4558]: I0120 17:40:35.876711 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b5fx4\" (UniqueName: \"kubernetes.io/projected/93c34095-c480-40ce-b40e-bf05101bef8f-kube-api-access-b5fx4\") pod \"nova-cell0-bd94-account-create-update-n5ssg\" (UID: \"93c34095-c480-40ce-b40e-bf05101bef8f\") " pod="openstack-kuttl-tests/nova-cell0-bd94-account-create-update-n5ssg" Jan 20 17:40:35 crc kubenswrapper[4558]: I0120 17:40:35.877988 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9ccbac5d-4369-44e5-9dfc-add1f7987639-operator-scripts\") pod \"nova-cell1-db-create-2gc5v\" (UID: \"9ccbac5d-4369-44e5-9dfc-add1f7987639\") " pod="openstack-kuttl-tests/nova-cell1-db-create-2gc5v" Jan 20 17:40:35 crc kubenswrapper[4558]: I0120 17:40:35.883502 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d0eb0bc4-4bc9-4c7a-ac58-c62d4674549d-operator-scripts\") pod \"nova-api-3b8d-account-create-update-s9plf\" (UID: \"d0eb0bc4-4bc9-4c7a-ac58-c62d4674549d\") " pod="openstack-kuttl-tests/nova-api-3b8d-account-create-update-s9plf" Jan 20 17:40:35 crc kubenswrapper[4558]: I0120 17:40:35.892402 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wzd95\" (UniqueName: \"kubernetes.io/projected/9ccbac5d-4369-44e5-9dfc-add1f7987639-kube-api-access-wzd95\") pod \"nova-cell1-db-create-2gc5v\" (UID: \"9ccbac5d-4369-44e5-9dfc-add1f7987639\") " pod="openstack-kuttl-tests/nova-cell1-db-create-2gc5v" Jan 20 17:40:35 crc kubenswrapper[4558]: I0120 17:40:35.895613 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rplvm\" (UniqueName: \"kubernetes.io/projected/d0eb0bc4-4bc9-4c7a-ac58-c62d4674549d-kube-api-access-rplvm\") pod \"nova-api-3b8d-account-create-update-s9plf\" (UID: \"d0eb0bc4-4bc9-4c7a-ac58-c62d4674549d\") " pod="openstack-kuttl-tests/nova-api-3b8d-account-create-update-s9plf" Jan 20 17:40:35 crc kubenswrapper[4558]: I0120 17:40:35.976727 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-3b8d-account-create-update-s9plf" Jan 20 17:40:35 crc kubenswrapper[4558]: I0120 17:40:35.978652 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-db-create-2gc5v" Jan 20 17:40:35 crc kubenswrapper[4558]: I0120 17:40:35.980092 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/93c34095-c480-40ce-b40e-bf05101bef8f-operator-scripts\") pod \"nova-cell0-bd94-account-create-update-n5ssg\" (UID: \"93c34095-c480-40ce-b40e-bf05101bef8f\") " pod="openstack-kuttl-tests/nova-cell0-bd94-account-create-update-n5ssg" Jan 20 17:40:35 crc kubenswrapper[4558]: I0120 17:40:35.980214 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b5fx4\" (UniqueName: \"kubernetes.io/projected/93c34095-c480-40ce-b40e-bf05101bef8f-kube-api-access-b5fx4\") pod \"nova-cell0-bd94-account-create-update-n5ssg\" (UID: \"93c34095-c480-40ce-b40e-bf05101bef8f\") " pod="openstack-kuttl-tests/nova-cell0-bd94-account-create-update-n5ssg" Jan 20 17:40:35 crc kubenswrapper[4558]: I0120 17:40:35.981451 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/93c34095-c480-40ce-b40e-bf05101bef8f-operator-scripts\") pod \"nova-cell0-bd94-account-create-update-n5ssg\" (UID: \"93c34095-c480-40ce-b40e-bf05101bef8f\") " pod="openstack-kuttl-tests/nova-cell0-bd94-account-create-update-n5ssg" Jan 20 17:40:35 crc kubenswrapper[4558]: I0120 17:40:35.998605 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b5fx4\" (UniqueName: \"kubernetes.io/projected/93c34095-c480-40ce-b40e-bf05101bef8f-kube-api-access-b5fx4\") pod \"nova-cell0-bd94-account-create-update-n5ssg\" (UID: \"93c34095-c480-40ce-b40e-bf05101bef8f\") " pod="openstack-kuttl-tests/nova-cell0-bd94-account-create-update-n5ssg" Jan 20 17:40:36 crc kubenswrapper[4558]: I0120 17:40:36.047846 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-db-create-vhnt4"] Jan 20 17:40:36 crc kubenswrapper[4558]: I0120 17:40:36.056270 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell1-51af-account-create-update-96ttf"] Jan 20 17:40:36 crc kubenswrapper[4558]: I0120 17:40:36.058058 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-51af-account-create-update-96ttf" Jan 20 17:40:36 crc kubenswrapper[4558]: I0120 17:40:36.060246 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell1-db-secret" Jan 20 17:40:36 crc kubenswrapper[4558]: I0120 17:40:36.061826 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-51af-account-create-update-96ttf"] Jan 20 17:40:36 crc kubenswrapper[4558]: I0120 17:40:36.081349 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2fxsw\" (UniqueName: \"kubernetes.io/projected/4150b9b7-ff56-4e21-b3c6-62510856cc94-kube-api-access-2fxsw\") pod \"nova-cell1-51af-account-create-update-96ttf\" (UID: \"4150b9b7-ff56-4e21-b3c6-62510856cc94\") " pod="openstack-kuttl-tests/nova-cell1-51af-account-create-update-96ttf" Jan 20 17:40:36 crc kubenswrapper[4558]: I0120 17:40:36.081453 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4150b9b7-ff56-4e21-b3c6-62510856cc94-operator-scripts\") pod \"nova-cell1-51af-account-create-update-96ttf\" (UID: \"4150b9b7-ff56-4e21-b3c6-62510856cc94\") " pod="openstack-kuttl-tests/nova-cell1-51af-account-create-update-96ttf" Jan 20 17:40:36 crc kubenswrapper[4558]: I0120 17:40:36.183462 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4150b9b7-ff56-4e21-b3c6-62510856cc94-operator-scripts\") pod \"nova-cell1-51af-account-create-update-96ttf\" (UID: \"4150b9b7-ff56-4e21-b3c6-62510856cc94\") " pod="openstack-kuttl-tests/nova-cell1-51af-account-create-update-96ttf" Jan 20 17:40:36 crc kubenswrapper[4558]: I0120 17:40:36.183983 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2fxsw\" (UniqueName: \"kubernetes.io/projected/4150b9b7-ff56-4e21-b3c6-62510856cc94-kube-api-access-2fxsw\") pod \"nova-cell1-51af-account-create-update-96ttf\" (UID: \"4150b9b7-ff56-4e21-b3c6-62510856cc94\") " pod="openstack-kuttl-tests/nova-cell1-51af-account-create-update-96ttf" Jan 20 17:40:36 crc kubenswrapper[4558]: I0120 17:40:36.184123 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4150b9b7-ff56-4e21-b3c6-62510856cc94-operator-scripts\") pod \"nova-cell1-51af-account-create-update-96ttf\" (UID: \"4150b9b7-ff56-4e21-b3c6-62510856cc94\") " pod="openstack-kuttl-tests/nova-cell1-51af-account-create-update-96ttf" Jan 20 17:40:36 crc kubenswrapper[4558]: I0120 17:40:36.203787 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2fxsw\" (UniqueName: \"kubernetes.io/projected/4150b9b7-ff56-4e21-b3c6-62510856cc94-kube-api-access-2fxsw\") pod \"nova-cell1-51af-account-create-update-96ttf\" (UID: \"4150b9b7-ff56-4e21-b3c6-62510856cc94\") " pod="openstack-kuttl-tests/nova-cell1-51af-account-create-update-96ttf" Jan 20 17:40:36 crc kubenswrapper[4558]: I0120 17:40:36.229853 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-bd94-account-create-update-n5ssg" Jan 20 17:40:36 crc kubenswrapper[4558]: I0120 17:40:36.238205 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-db-create-vhnt4" event={"ID":"e451d734-7167-4b92-8c0b-22b1e3f4ef67","Type":"ContainerStarted","Data":"b8073107ad7ea7845349c804ef266cf79a76d892989926d112eb5a0d45a26d7c"} Jan 20 17:40:36 crc kubenswrapper[4558]: I0120 17:40:36.238254 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-db-create-vhnt4" event={"ID":"e451d734-7167-4b92-8c0b-22b1e3f4ef67","Type":"ContainerStarted","Data":"76147b0ab7d2976be2d0455911e27d674bc1ff69393f1873540f6a36aac1d1e3"} Jan 20 17:40:36 crc kubenswrapper[4558]: I0120 17:40:36.254728 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-api-db-create-vhnt4" podStartSLOduration=1.254707552 podStartE2EDuration="1.254707552s" podCreationTimestamp="2026-01-20 17:40:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:40:36.252005111 +0000 UTC m=+3530.012343077" watchObservedRunningTime="2026-01-20 17:40:36.254707552 +0000 UTC m=+3530.015045518" Jan 20 17:40:36 crc kubenswrapper[4558]: I0120 17:40:36.343396 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-db-create-75945"] Jan 20 17:40:36 crc kubenswrapper[4558]: I0120 17:40:36.382145 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-51af-account-create-update-96ttf" Jan 20 17:40:36 crc kubenswrapper[4558]: I0120 17:40:36.440858 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-3b8d-account-create-update-s9plf"] Jan 20 17:40:36 crc kubenswrapper[4558]: I0120 17:40:36.500584 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-db-create-2gc5v"] Jan 20 17:40:36 crc kubenswrapper[4558]: I0120 17:40:36.664230 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-bd94-account-create-update-n5ssg"] Jan 20 17:40:36 crc kubenswrapper[4558]: W0120 17:40:36.671559 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod93c34095_c480_40ce_b40e_bf05101bef8f.slice/crio-a5d11fe0979d62a453a3993e497c60b3e0caa44b4eb29f6f6ce6cf4dc81e7994 WatchSource:0}: Error finding container a5d11fe0979d62a453a3993e497c60b3e0caa44b4eb29f6f6ce6cf4dc81e7994: Status 404 returned error can't find the container with id a5d11fe0979d62a453a3993e497c60b3e0caa44b4eb29f6f6ce6cf4dc81e7994 Jan 20 17:40:36 crc kubenswrapper[4558]: I0120 17:40:36.799328 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-51af-account-create-update-96ttf"] Jan 20 17:40:36 crc kubenswrapper[4558]: W0120 17:40:36.830406 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4150b9b7_ff56_4e21_b3c6_62510856cc94.slice/crio-6ccf91d53c8e64c5a89239afba9b91661a93e658238035ff1d44a3fd54594419 WatchSource:0}: Error finding container 6ccf91d53c8e64c5a89239afba9b91661a93e658238035ff1d44a3fd54594419: Status 404 returned error can't find the container with id 6ccf91d53c8e64c5a89239afba9b91661a93e658238035ff1d44a3fd54594419 Jan 20 17:40:37 crc kubenswrapper[4558]: I0120 17:40:37.208084 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:40:37 crc kubenswrapper[4558]: I0120 17:40:37.212337 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/32ebfd5d-3392-4ab0-b1f7-1c12001b2ef2-log-httpd\") pod \"32ebfd5d-3392-4ab0-b1f7-1c12001b2ef2\" (UID: \"32ebfd5d-3392-4ab0-b1f7-1c12001b2ef2\") " Jan 20 17:40:37 crc kubenswrapper[4558]: I0120 17:40:37.212409 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/32ebfd5d-3392-4ab0-b1f7-1c12001b2ef2-run-httpd\") pod \"32ebfd5d-3392-4ab0-b1f7-1c12001b2ef2\" (UID: \"32ebfd5d-3392-4ab0-b1f7-1c12001b2ef2\") " Jan 20 17:40:37 crc kubenswrapper[4558]: I0120 17:40:37.212474 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/32ebfd5d-3392-4ab0-b1f7-1c12001b2ef2-config-data\") pod \"32ebfd5d-3392-4ab0-b1f7-1c12001b2ef2\" (UID: \"32ebfd5d-3392-4ab0-b1f7-1c12001b2ef2\") " Jan 20 17:40:37 crc kubenswrapper[4558]: I0120 17:40:37.212504 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/32ebfd5d-3392-4ab0-b1f7-1c12001b2ef2-scripts\") pod \"32ebfd5d-3392-4ab0-b1f7-1c12001b2ef2\" (UID: \"32ebfd5d-3392-4ab0-b1f7-1c12001b2ef2\") " Jan 20 17:40:37 crc kubenswrapper[4558]: I0120 17:40:37.212539 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/32ebfd5d-3392-4ab0-b1f7-1c12001b2ef2-sg-core-conf-yaml\") pod \"32ebfd5d-3392-4ab0-b1f7-1c12001b2ef2\" (UID: \"32ebfd5d-3392-4ab0-b1f7-1c12001b2ef2\") " Jan 20 17:40:37 crc kubenswrapper[4558]: I0120 17:40:37.212570 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t47hk\" (UniqueName: \"kubernetes.io/projected/32ebfd5d-3392-4ab0-b1f7-1c12001b2ef2-kube-api-access-t47hk\") pod \"32ebfd5d-3392-4ab0-b1f7-1c12001b2ef2\" (UID: \"32ebfd5d-3392-4ab0-b1f7-1c12001b2ef2\") " Jan 20 17:40:37 crc kubenswrapper[4558]: I0120 17:40:37.212616 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/32ebfd5d-3392-4ab0-b1f7-1c12001b2ef2-combined-ca-bundle\") pod \"32ebfd5d-3392-4ab0-b1f7-1c12001b2ef2\" (UID: \"32ebfd5d-3392-4ab0-b1f7-1c12001b2ef2\") " Jan 20 17:40:37 crc kubenswrapper[4558]: I0120 17:40:37.213064 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/32ebfd5d-3392-4ab0-b1f7-1c12001b2ef2-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "32ebfd5d-3392-4ab0-b1f7-1c12001b2ef2" (UID: "32ebfd5d-3392-4ab0-b1f7-1c12001b2ef2"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:40:37 crc kubenswrapper[4558]: I0120 17:40:37.213307 4558 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/32ebfd5d-3392-4ab0-b1f7-1c12001b2ef2-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:40:37 crc kubenswrapper[4558]: I0120 17:40:37.215507 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/32ebfd5d-3392-4ab0-b1f7-1c12001b2ef2-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "32ebfd5d-3392-4ab0-b1f7-1c12001b2ef2" (UID: "32ebfd5d-3392-4ab0-b1f7-1c12001b2ef2"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:40:37 crc kubenswrapper[4558]: I0120 17:40:37.253747 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/32ebfd5d-3392-4ab0-b1f7-1c12001b2ef2-kube-api-access-t47hk" (OuterVolumeSpecName: "kube-api-access-t47hk") pod "32ebfd5d-3392-4ab0-b1f7-1c12001b2ef2" (UID: "32ebfd5d-3392-4ab0-b1f7-1c12001b2ef2"). InnerVolumeSpecName "kube-api-access-t47hk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:40:37 crc kubenswrapper[4558]: I0120 17:40:37.254284 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/32ebfd5d-3392-4ab0-b1f7-1c12001b2ef2-scripts" (OuterVolumeSpecName: "scripts") pod "32ebfd5d-3392-4ab0-b1f7-1c12001b2ef2" (UID: "32ebfd5d-3392-4ab0-b1f7-1c12001b2ef2"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:40:37 crc kubenswrapper[4558]: I0120 17:40:37.255804 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-51af-account-create-update-96ttf" event={"ID":"4150b9b7-ff56-4e21-b3c6-62510856cc94","Type":"ContainerStarted","Data":"6ccf91d53c8e64c5a89239afba9b91661a93e658238035ff1d44a3fd54594419"} Jan 20 17:40:37 crc kubenswrapper[4558]: I0120 17:40:37.258433 4558 generic.go:334] "Generic (PLEG): container finished" podID="d0eb0bc4-4bc9-4c7a-ac58-c62d4674549d" containerID="ca8088e4b215e716484cafe5dc6cd2427095c82b587c9dd06837d05764131f03" exitCode=0 Jan 20 17:40:37 crc kubenswrapper[4558]: I0120 17:40:37.258508 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-3b8d-account-create-update-s9plf" event={"ID":"d0eb0bc4-4bc9-4c7a-ac58-c62d4674549d","Type":"ContainerDied","Data":"ca8088e4b215e716484cafe5dc6cd2427095c82b587c9dd06837d05764131f03"} Jan 20 17:40:37 crc kubenswrapper[4558]: I0120 17:40:37.258544 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-3b8d-account-create-update-s9plf" event={"ID":"d0eb0bc4-4bc9-4c7a-ac58-c62d4674549d","Type":"ContainerStarted","Data":"84bc272387d3bd53cb64528f046771bf55cb6995ba3726e9699398e887485542"} Jan 20 17:40:37 crc kubenswrapper[4558]: I0120 17:40:37.265175 4558 generic.go:334] "Generic (PLEG): container finished" podID="32ebfd5d-3392-4ab0-b1f7-1c12001b2ef2" containerID="26f5c1167982dbbb9e0e3adf24d031f01e322c717b64319de2cfccd54308f098" exitCode=0 Jan 20 17:40:37 crc kubenswrapper[4558]: I0120 17:40:37.265246 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"32ebfd5d-3392-4ab0-b1f7-1c12001b2ef2","Type":"ContainerDied","Data":"26f5c1167982dbbb9e0e3adf24d031f01e322c717b64319de2cfccd54308f098"} Jan 20 17:40:37 crc kubenswrapper[4558]: I0120 17:40:37.265281 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"32ebfd5d-3392-4ab0-b1f7-1c12001b2ef2","Type":"ContainerDied","Data":"33e8449b98d41f9cae4c7cb96d6febee4c0e3a93c6b8ac12297d03f549d1a23b"} Jan 20 17:40:37 crc kubenswrapper[4558]: I0120 17:40:37.265299 4558 scope.go:117] "RemoveContainer" containerID="bcbdfa37349f1e89eaec79cbd97fa05f054554461a223bbe18052651d462d9db" Jan 20 17:40:37 crc kubenswrapper[4558]: I0120 17:40:37.265312 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:40:37 crc kubenswrapper[4558]: I0120 17:40:37.268692 4558 generic.go:334] "Generic (PLEG): container finished" podID="a312b2d2-a836-40d2-8de8-9e75ef44ff9a" containerID="4a9490a5c5547167886edaefed1bfe4c918b053d999248c687a66c4bb4b18544" exitCode=0 Jan 20 17:40:37 crc kubenswrapper[4558]: I0120 17:40:37.268968 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-db-create-75945" event={"ID":"a312b2d2-a836-40d2-8de8-9e75ef44ff9a","Type":"ContainerDied","Data":"4a9490a5c5547167886edaefed1bfe4c918b053d999248c687a66c4bb4b18544"} Jan 20 17:40:37 crc kubenswrapper[4558]: I0120 17:40:37.269013 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-db-create-75945" event={"ID":"a312b2d2-a836-40d2-8de8-9e75ef44ff9a","Type":"ContainerStarted","Data":"a05e8eb925c9f1c9278580228108b9bcdca3fc3db06b95dc853f082e1ba2a265"} Jan 20 17:40:37 crc kubenswrapper[4558]: I0120 17:40:37.287958 4558 generic.go:334] "Generic (PLEG): container finished" podID="e451d734-7167-4b92-8c0b-22b1e3f4ef67" containerID="b8073107ad7ea7845349c804ef266cf79a76d892989926d112eb5a0d45a26d7c" exitCode=0 Jan 20 17:40:37 crc kubenswrapper[4558]: I0120 17:40:37.288087 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-db-create-vhnt4" event={"ID":"e451d734-7167-4b92-8c0b-22b1e3f4ef67","Type":"ContainerDied","Data":"b8073107ad7ea7845349c804ef266cf79a76d892989926d112eb5a0d45a26d7c"} Jan 20 17:40:37 crc kubenswrapper[4558]: I0120 17:40:37.309700 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-bd94-account-create-update-n5ssg" event={"ID":"93c34095-c480-40ce-b40e-bf05101bef8f","Type":"ContainerStarted","Data":"a5d11fe0979d62a453a3993e497c60b3e0caa44b4eb29f6f6ce6cf4dc81e7994"} Jan 20 17:40:37 crc kubenswrapper[4558]: I0120 17:40:37.310477 4558 scope.go:117] "RemoveContainer" containerID="cd433ab19101a55fe28a4b5f5f546b83f5d3862cab4f05189daa9c99ec18eb21" Jan 20 17:40:37 crc kubenswrapper[4558]: I0120 17:40:37.311187 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/32ebfd5d-3392-4ab0-b1f7-1c12001b2ef2-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "32ebfd5d-3392-4ab0-b1f7-1c12001b2ef2" (UID: "32ebfd5d-3392-4ab0-b1f7-1c12001b2ef2"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:40:37 crc kubenswrapper[4558]: I0120 17:40:37.316945 4558 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/32ebfd5d-3392-4ab0-b1f7-1c12001b2ef2-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:40:37 crc kubenswrapper[4558]: I0120 17:40:37.316973 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/32ebfd5d-3392-4ab0-b1f7-1c12001b2ef2-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:40:37 crc kubenswrapper[4558]: I0120 17:40:37.316985 4558 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/32ebfd5d-3392-4ab0-b1f7-1c12001b2ef2-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 20 17:40:37 crc kubenswrapper[4558]: I0120 17:40:37.316997 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t47hk\" (UniqueName: \"kubernetes.io/projected/32ebfd5d-3392-4ab0-b1f7-1c12001b2ef2-kube-api-access-t47hk\") on node \"crc\" DevicePath \"\"" Jan 20 17:40:37 crc kubenswrapper[4558]: I0120 17:40:37.322111 4558 generic.go:334] "Generic (PLEG): container finished" podID="9ccbac5d-4369-44e5-9dfc-add1f7987639" containerID="9232fe923f5a47f4d2c0139e8bcc5a7439fb78d669404cffd52a472a70adf94f" exitCode=0 Jan 20 17:40:37 crc kubenswrapper[4558]: I0120 17:40:37.322224 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-db-create-2gc5v" event={"ID":"9ccbac5d-4369-44e5-9dfc-add1f7987639","Type":"ContainerDied","Data":"9232fe923f5a47f4d2c0139e8bcc5a7439fb78d669404cffd52a472a70adf94f"} Jan 20 17:40:37 crc kubenswrapper[4558]: I0120 17:40:37.322312 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-db-create-2gc5v" event={"ID":"9ccbac5d-4369-44e5-9dfc-add1f7987639","Type":"ContainerStarted","Data":"ebfb7fa3b7999266d98bb70f20c9f3dce1e2524e4dce865e64c99836bafd50c5"} Jan 20 17:40:37 crc kubenswrapper[4558]: I0120 17:40:37.354312 4558 scope.go:117] "RemoveContainer" containerID="26f5c1167982dbbb9e0e3adf24d031f01e322c717b64319de2cfccd54308f098" Jan 20 17:40:37 crc kubenswrapper[4558]: I0120 17:40:37.392988 4558 scope.go:117] "RemoveContainer" containerID="aa0ca993872e33ad8a732487b7655b5b2c29e6e5ca4080a8dec4fd3d9941851a" Jan 20 17:40:37 crc kubenswrapper[4558]: I0120 17:40:37.399500 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/32ebfd5d-3392-4ab0-b1f7-1c12001b2ef2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "32ebfd5d-3392-4ab0-b1f7-1c12001b2ef2" (UID: "32ebfd5d-3392-4ab0-b1f7-1c12001b2ef2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:40:37 crc kubenswrapper[4558]: I0120 17:40:37.408065 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/32ebfd5d-3392-4ab0-b1f7-1c12001b2ef2-config-data" (OuterVolumeSpecName: "config-data") pod "32ebfd5d-3392-4ab0-b1f7-1c12001b2ef2" (UID: "32ebfd5d-3392-4ab0-b1f7-1c12001b2ef2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:40:37 crc kubenswrapper[4558]: I0120 17:40:37.419220 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/32ebfd5d-3392-4ab0-b1f7-1c12001b2ef2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:40:37 crc kubenswrapper[4558]: I0120 17:40:37.419252 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/32ebfd5d-3392-4ab0-b1f7-1c12001b2ef2-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:40:37 crc kubenswrapper[4558]: I0120 17:40:37.427333 4558 scope.go:117] "RemoveContainer" containerID="bcbdfa37349f1e89eaec79cbd97fa05f054554461a223bbe18052651d462d9db" Jan 20 17:40:37 crc kubenswrapper[4558]: E0120 17:40:37.433004 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bcbdfa37349f1e89eaec79cbd97fa05f054554461a223bbe18052651d462d9db\": container with ID starting with bcbdfa37349f1e89eaec79cbd97fa05f054554461a223bbe18052651d462d9db not found: ID does not exist" containerID="bcbdfa37349f1e89eaec79cbd97fa05f054554461a223bbe18052651d462d9db" Jan 20 17:40:37 crc kubenswrapper[4558]: I0120 17:40:37.433056 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bcbdfa37349f1e89eaec79cbd97fa05f054554461a223bbe18052651d462d9db"} err="failed to get container status \"bcbdfa37349f1e89eaec79cbd97fa05f054554461a223bbe18052651d462d9db\": rpc error: code = NotFound desc = could not find container \"bcbdfa37349f1e89eaec79cbd97fa05f054554461a223bbe18052651d462d9db\": container with ID starting with bcbdfa37349f1e89eaec79cbd97fa05f054554461a223bbe18052651d462d9db not found: ID does not exist" Jan 20 17:40:37 crc kubenswrapper[4558]: I0120 17:40:37.433087 4558 scope.go:117] "RemoveContainer" containerID="cd433ab19101a55fe28a4b5f5f546b83f5d3862cab4f05189daa9c99ec18eb21" Jan 20 17:40:37 crc kubenswrapper[4558]: E0120 17:40:37.439104 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cd433ab19101a55fe28a4b5f5f546b83f5d3862cab4f05189daa9c99ec18eb21\": container with ID starting with cd433ab19101a55fe28a4b5f5f546b83f5d3862cab4f05189daa9c99ec18eb21 not found: ID does not exist" containerID="cd433ab19101a55fe28a4b5f5f546b83f5d3862cab4f05189daa9c99ec18eb21" Jan 20 17:40:37 crc kubenswrapper[4558]: I0120 17:40:37.439138 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cd433ab19101a55fe28a4b5f5f546b83f5d3862cab4f05189daa9c99ec18eb21"} err="failed to get container status \"cd433ab19101a55fe28a4b5f5f546b83f5d3862cab4f05189daa9c99ec18eb21\": rpc error: code = NotFound desc = could not find container \"cd433ab19101a55fe28a4b5f5f546b83f5d3862cab4f05189daa9c99ec18eb21\": container with ID starting with cd433ab19101a55fe28a4b5f5f546b83f5d3862cab4f05189daa9c99ec18eb21 not found: ID does not exist" Jan 20 17:40:37 crc kubenswrapper[4558]: I0120 17:40:37.439157 4558 scope.go:117] "RemoveContainer" containerID="26f5c1167982dbbb9e0e3adf24d031f01e322c717b64319de2cfccd54308f098" Jan 20 17:40:37 crc kubenswrapper[4558]: E0120 17:40:37.439540 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"26f5c1167982dbbb9e0e3adf24d031f01e322c717b64319de2cfccd54308f098\": container with ID starting with 26f5c1167982dbbb9e0e3adf24d031f01e322c717b64319de2cfccd54308f098 not found: ID does not exist" containerID="26f5c1167982dbbb9e0e3adf24d031f01e322c717b64319de2cfccd54308f098" Jan 20 17:40:37 crc kubenswrapper[4558]: I0120 17:40:37.439591 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"26f5c1167982dbbb9e0e3adf24d031f01e322c717b64319de2cfccd54308f098"} err="failed to get container status \"26f5c1167982dbbb9e0e3adf24d031f01e322c717b64319de2cfccd54308f098\": rpc error: code = NotFound desc = could not find container \"26f5c1167982dbbb9e0e3adf24d031f01e322c717b64319de2cfccd54308f098\": container with ID starting with 26f5c1167982dbbb9e0e3adf24d031f01e322c717b64319de2cfccd54308f098 not found: ID does not exist" Jan 20 17:40:37 crc kubenswrapper[4558]: I0120 17:40:37.439605 4558 scope.go:117] "RemoveContainer" containerID="aa0ca993872e33ad8a732487b7655b5b2c29e6e5ca4080a8dec4fd3d9941851a" Jan 20 17:40:37 crc kubenswrapper[4558]: E0120 17:40:37.440341 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aa0ca993872e33ad8a732487b7655b5b2c29e6e5ca4080a8dec4fd3d9941851a\": container with ID starting with aa0ca993872e33ad8a732487b7655b5b2c29e6e5ca4080a8dec4fd3d9941851a not found: ID does not exist" containerID="aa0ca993872e33ad8a732487b7655b5b2c29e6e5ca4080a8dec4fd3d9941851a" Jan 20 17:40:37 crc kubenswrapper[4558]: I0120 17:40:37.440403 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aa0ca993872e33ad8a732487b7655b5b2c29e6e5ca4080a8dec4fd3d9941851a"} err="failed to get container status \"aa0ca993872e33ad8a732487b7655b5b2c29e6e5ca4080a8dec4fd3d9941851a\": rpc error: code = NotFound desc = could not find container \"aa0ca993872e33ad8a732487b7655b5b2c29e6e5ca4080a8dec4fd3d9941851a\": container with ID starting with aa0ca993872e33ad8a732487b7655b5b2c29e6e5ca4080a8dec4fd3d9941851a not found: ID does not exist" Jan 20 17:40:37 crc kubenswrapper[4558]: I0120 17:40:37.462966 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:40:37 crc kubenswrapper[4558]: I0120 17:40:37.463238 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-external-api-0" podUID="2328ffd8-4eb7-41b8-abfb-b5ee5a12f117" containerName="glance-log" containerID="cri-o://51fd39b0c7c20748a2724d7df276628b2e6749c56463b33881b466e963a20122" gracePeriod=30 Jan 20 17:40:37 crc kubenswrapper[4558]: I0120 17:40:37.463290 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-external-api-0" podUID="2328ffd8-4eb7-41b8-abfb-b5ee5a12f117" containerName="glance-httpd" containerID="cri-o://19edd3c6ba732244695f4759d0145fea4e245766ce9f1373b9b7c7005a8cd1a2" gracePeriod=30 Jan 20 17:40:37 crc kubenswrapper[4558]: I0120 17:40:37.705690 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:40:37 crc kubenswrapper[4558]: I0120 17:40:37.733837 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:40:37 crc kubenswrapper[4558]: I0120 17:40:37.742605 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:40:37 crc kubenswrapper[4558]: E0120 17:40:37.743120 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="32ebfd5d-3392-4ab0-b1f7-1c12001b2ef2" containerName="ceilometer-notification-agent" Jan 20 17:40:37 crc kubenswrapper[4558]: I0120 17:40:37.743142 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="32ebfd5d-3392-4ab0-b1f7-1c12001b2ef2" containerName="ceilometer-notification-agent" Jan 20 17:40:37 crc kubenswrapper[4558]: E0120 17:40:37.743179 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="32ebfd5d-3392-4ab0-b1f7-1c12001b2ef2" containerName="ceilometer-central-agent" Jan 20 17:40:37 crc kubenswrapper[4558]: I0120 17:40:37.743187 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="32ebfd5d-3392-4ab0-b1f7-1c12001b2ef2" containerName="ceilometer-central-agent" Jan 20 17:40:37 crc kubenswrapper[4558]: E0120 17:40:37.743203 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="32ebfd5d-3392-4ab0-b1f7-1c12001b2ef2" containerName="proxy-httpd" Jan 20 17:40:37 crc kubenswrapper[4558]: I0120 17:40:37.743211 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="32ebfd5d-3392-4ab0-b1f7-1c12001b2ef2" containerName="proxy-httpd" Jan 20 17:40:37 crc kubenswrapper[4558]: E0120 17:40:37.743250 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="32ebfd5d-3392-4ab0-b1f7-1c12001b2ef2" containerName="sg-core" Jan 20 17:40:37 crc kubenswrapper[4558]: I0120 17:40:37.743257 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="32ebfd5d-3392-4ab0-b1f7-1c12001b2ef2" containerName="sg-core" Jan 20 17:40:37 crc kubenswrapper[4558]: I0120 17:40:37.743470 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="32ebfd5d-3392-4ab0-b1f7-1c12001b2ef2" containerName="sg-core" Jan 20 17:40:37 crc kubenswrapper[4558]: I0120 17:40:37.743490 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="32ebfd5d-3392-4ab0-b1f7-1c12001b2ef2" containerName="ceilometer-central-agent" Jan 20 17:40:37 crc kubenswrapper[4558]: I0120 17:40:37.743505 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="32ebfd5d-3392-4ab0-b1f7-1c12001b2ef2" containerName="proxy-httpd" Jan 20 17:40:37 crc kubenswrapper[4558]: I0120 17:40:37.743521 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="32ebfd5d-3392-4ab0-b1f7-1c12001b2ef2" containerName="ceilometer-notification-agent" Jan 20 17:40:37 crc kubenswrapper[4558]: I0120 17:40:37.745287 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:40:37 crc kubenswrapper[4558]: I0120 17:40:37.747314 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-scripts" Jan 20 17:40:37 crc kubenswrapper[4558]: I0120 17:40:37.747409 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-config-data" Jan 20 17:40:37 crc kubenswrapper[4558]: I0120 17:40:37.752018 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:40:37 crc kubenswrapper[4558]: I0120 17:40:37.931490 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/708f2028-1ec7-47eb-95a2-3c6dde44fd83-config-data\") pod \"ceilometer-0\" (UID: \"708f2028-1ec7-47eb-95a2-3c6dde44fd83\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:40:37 crc kubenswrapper[4558]: I0120 17:40:37.931571 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/708f2028-1ec7-47eb-95a2-3c6dde44fd83-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"708f2028-1ec7-47eb-95a2-3c6dde44fd83\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:40:37 crc kubenswrapper[4558]: I0120 17:40:37.931659 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/708f2028-1ec7-47eb-95a2-3c6dde44fd83-log-httpd\") pod \"ceilometer-0\" (UID: \"708f2028-1ec7-47eb-95a2-3c6dde44fd83\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:40:37 crc kubenswrapper[4558]: I0120 17:40:37.931772 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/708f2028-1ec7-47eb-95a2-3c6dde44fd83-scripts\") pod \"ceilometer-0\" (UID: \"708f2028-1ec7-47eb-95a2-3c6dde44fd83\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:40:37 crc kubenswrapper[4558]: I0120 17:40:37.931813 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/708f2028-1ec7-47eb-95a2-3c6dde44fd83-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"708f2028-1ec7-47eb-95a2-3c6dde44fd83\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:40:37 crc kubenswrapper[4558]: I0120 17:40:37.931988 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gl62m\" (UniqueName: \"kubernetes.io/projected/708f2028-1ec7-47eb-95a2-3c6dde44fd83-kube-api-access-gl62m\") pod \"ceilometer-0\" (UID: \"708f2028-1ec7-47eb-95a2-3c6dde44fd83\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:40:37 crc kubenswrapper[4558]: I0120 17:40:37.932094 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/708f2028-1ec7-47eb-95a2-3c6dde44fd83-run-httpd\") pod \"ceilometer-0\" (UID: \"708f2028-1ec7-47eb-95a2-3c6dde44fd83\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:40:38 crc kubenswrapper[4558]: I0120 17:40:38.034156 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/708f2028-1ec7-47eb-95a2-3c6dde44fd83-config-data\") pod \"ceilometer-0\" (UID: \"708f2028-1ec7-47eb-95a2-3c6dde44fd83\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:40:38 crc kubenswrapper[4558]: I0120 17:40:38.034269 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/708f2028-1ec7-47eb-95a2-3c6dde44fd83-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"708f2028-1ec7-47eb-95a2-3c6dde44fd83\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:40:38 crc kubenswrapper[4558]: I0120 17:40:38.034314 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/708f2028-1ec7-47eb-95a2-3c6dde44fd83-log-httpd\") pod \"ceilometer-0\" (UID: \"708f2028-1ec7-47eb-95a2-3c6dde44fd83\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:40:38 crc kubenswrapper[4558]: I0120 17:40:38.034363 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/708f2028-1ec7-47eb-95a2-3c6dde44fd83-scripts\") pod \"ceilometer-0\" (UID: \"708f2028-1ec7-47eb-95a2-3c6dde44fd83\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:40:38 crc kubenswrapper[4558]: I0120 17:40:38.034391 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/708f2028-1ec7-47eb-95a2-3c6dde44fd83-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"708f2028-1ec7-47eb-95a2-3c6dde44fd83\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:40:38 crc kubenswrapper[4558]: I0120 17:40:38.034556 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gl62m\" (UniqueName: \"kubernetes.io/projected/708f2028-1ec7-47eb-95a2-3c6dde44fd83-kube-api-access-gl62m\") pod \"ceilometer-0\" (UID: \"708f2028-1ec7-47eb-95a2-3c6dde44fd83\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:40:38 crc kubenswrapper[4558]: I0120 17:40:38.034660 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/708f2028-1ec7-47eb-95a2-3c6dde44fd83-run-httpd\") pod \"ceilometer-0\" (UID: \"708f2028-1ec7-47eb-95a2-3c6dde44fd83\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:40:38 crc kubenswrapper[4558]: I0120 17:40:38.035363 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/708f2028-1ec7-47eb-95a2-3c6dde44fd83-run-httpd\") pod \"ceilometer-0\" (UID: \"708f2028-1ec7-47eb-95a2-3c6dde44fd83\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:40:38 crc kubenswrapper[4558]: I0120 17:40:38.035445 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/708f2028-1ec7-47eb-95a2-3c6dde44fd83-log-httpd\") pod \"ceilometer-0\" (UID: \"708f2028-1ec7-47eb-95a2-3c6dde44fd83\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:40:38 crc kubenswrapper[4558]: I0120 17:40:38.042357 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/708f2028-1ec7-47eb-95a2-3c6dde44fd83-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"708f2028-1ec7-47eb-95a2-3c6dde44fd83\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:40:38 crc kubenswrapper[4558]: I0120 17:40:38.042953 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/708f2028-1ec7-47eb-95a2-3c6dde44fd83-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"708f2028-1ec7-47eb-95a2-3c6dde44fd83\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:40:38 crc kubenswrapper[4558]: I0120 17:40:38.043066 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/708f2028-1ec7-47eb-95a2-3c6dde44fd83-scripts\") pod \"ceilometer-0\" (UID: \"708f2028-1ec7-47eb-95a2-3c6dde44fd83\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:40:38 crc kubenswrapper[4558]: I0120 17:40:38.043815 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/708f2028-1ec7-47eb-95a2-3c6dde44fd83-config-data\") pod \"ceilometer-0\" (UID: \"708f2028-1ec7-47eb-95a2-3c6dde44fd83\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:40:38 crc kubenswrapper[4558]: I0120 17:40:38.053342 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gl62m\" (UniqueName: \"kubernetes.io/projected/708f2028-1ec7-47eb-95a2-3c6dde44fd83-kube-api-access-gl62m\") pod \"ceilometer-0\" (UID: \"708f2028-1ec7-47eb-95a2-3c6dde44fd83\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:40:38 crc kubenswrapper[4558]: I0120 17:40:38.061424 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:40:38 crc kubenswrapper[4558]: I0120 17:40:38.166576 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:40:38 crc kubenswrapper[4558]: I0120 17:40:38.167036 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-internal-api-0" podUID="92c317c8-cf30-4046-af0b-8f6c36f69000" containerName="glance-log" containerID="cri-o://9139dd64147779f4a8760ed5d87509a43e88362aa5356602bf57b77210031913" gracePeriod=30 Jan 20 17:40:38 crc kubenswrapper[4558]: I0120 17:40:38.167483 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-internal-api-0" podUID="92c317c8-cf30-4046-af0b-8f6c36f69000" containerName="glance-httpd" containerID="cri-o://0c31383a0ae8e3556df5e5d344c5e4300ff998da9a0dcb05e9d60b095898581e" gracePeriod=30 Jan 20 17:40:38 crc kubenswrapper[4558]: I0120 17:40:38.332603 4558 generic.go:334] "Generic (PLEG): container finished" podID="93c34095-c480-40ce-b40e-bf05101bef8f" containerID="1560e725c8768e491198e715aead8e30fd05538bb7735f6fb6fd333f434e6c3e" exitCode=0 Jan 20 17:40:38 crc kubenswrapper[4558]: I0120 17:40:38.332710 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-bd94-account-create-update-n5ssg" event={"ID":"93c34095-c480-40ce-b40e-bf05101bef8f","Type":"ContainerDied","Data":"1560e725c8768e491198e715aead8e30fd05538bb7735f6fb6fd333f434e6c3e"} Jan 20 17:40:38 crc kubenswrapper[4558]: I0120 17:40:38.334960 4558 generic.go:334] "Generic (PLEG): container finished" podID="92c317c8-cf30-4046-af0b-8f6c36f69000" containerID="9139dd64147779f4a8760ed5d87509a43e88362aa5356602bf57b77210031913" exitCode=143 Jan 20 17:40:38 crc kubenswrapper[4558]: I0120 17:40:38.335051 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"92c317c8-cf30-4046-af0b-8f6c36f69000","Type":"ContainerDied","Data":"9139dd64147779f4a8760ed5d87509a43e88362aa5356602bf57b77210031913"} Jan 20 17:40:38 crc kubenswrapper[4558]: I0120 17:40:38.336797 4558 generic.go:334] "Generic (PLEG): container finished" podID="4150b9b7-ff56-4e21-b3c6-62510856cc94" containerID="eb8f5e7abf7b6f66d13dcca720c4c8b9939d9a624fccffa8e1235a042aeeec14" exitCode=0 Jan 20 17:40:38 crc kubenswrapper[4558]: I0120 17:40:38.336842 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-51af-account-create-update-96ttf" event={"ID":"4150b9b7-ff56-4e21-b3c6-62510856cc94","Type":"ContainerDied","Data":"eb8f5e7abf7b6f66d13dcca720c4c8b9939d9a624fccffa8e1235a042aeeec14"} Jan 20 17:40:38 crc kubenswrapper[4558]: I0120 17:40:38.338594 4558 generic.go:334] "Generic (PLEG): container finished" podID="2328ffd8-4eb7-41b8-abfb-b5ee5a12f117" containerID="51fd39b0c7c20748a2724d7df276628b2e6749c56463b33881b466e963a20122" exitCode=143 Jan 20 17:40:38 crc kubenswrapper[4558]: I0120 17:40:38.338670 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"2328ffd8-4eb7-41b8-abfb-b5ee5a12f117","Type":"ContainerDied","Data":"51fd39b0c7c20748a2724d7df276628b2e6749c56463b33881b466e963a20122"} Jan 20 17:40:38 crc kubenswrapper[4558]: I0120 17:40:38.498904 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:40:38 crc kubenswrapper[4558]: W0120 17:40:38.503547 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod708f2028_1ec7_47eb_95a2_3c6dde44fd83.slice/crio-8c0001781cf469232029c66008c49fa8039fb2f7584d8ca5b9ed02cd768ef256 WatchSource:0}: Error finding container 8c0001781cf469232029c66008c49fa8039fb2f7584d8ca5b9ed02cd768ef256: Status 404 returned error can't find the container with id 8c0001781cf469232029c66008c49fa8039fb2f7584d8ca5b9ed02cd768ef256 Jan 20 17:40:38 crc kubenswrapper[4558]: I0120 17:40:38.586495 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="32ebfd5d-3392-4ab0-b1f7-1c12001b2ef2" path="/var/lib/kubelet/pods/32ebfd5d-3392-4ab0-b1f7-1c12001b2ef2/volumes" Jan 20 17:40:38 crc kubenswrapper[4558]: I0120 17:40:38.611024 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-3b8d-account-create-update-s9plf" Jan 20 17:40:38 crc kubenswrapper[4558]: I0120 17:40:38.752787 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rplvm\" (UniqueName: \"kubernetes.io/projected/d0eb0bc4-4bc9-4c7a-ac58-c62d4674549d-kube-api-access-rplvm\") pod \"d0eb0bc4-4bc9-4c7a-ac58-c62d4674549d\" (UID: \"d0eb0bc4-4bc9-4c7a-ac58-c62d4674549d\") " Jan 20 17:40:38 crc kubenswrapper[4558]: I0120 17:40:38.753148 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d0eb0bc4-4bc9-4c7a-ac58-c62d4674549d-operator-scripts\") pod \"d0eb0bc4-4bc9-4c7a-ac58-c62d4674549d\" (UID: \"d0eb0bc4-4bc9-4c7a-ac58-c62d4674549d\") " Jan 20 17:40:38 crc kubenswrapper[4558]: I0120 17:40:38.754365 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d0eb0bc4-4bc9-4c7a-ac58-c62d4674549d-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "d0eb0bc4-4bc9-4c7a-ac58-c62d4674549d" (UID: "d0eb0bc4-4bc9-4c7a-ac58-c62d4674549d"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:40:38 crc kubenswrapper[4558]: I0120 17:40:38.760257 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d0eb0bc4-4bc9-4c7a-ac58-c62d4674549d-kube-api-access-rplvm" (OuterVolumeSpecName: "kube-api-access-rplvm") pod "d0eb0bc4-4bc9-4c7a-ac58-c62d4674549d" (UID: "d0eb0bc4-4bc9-4c7a-ac58-c62d4674549d"). InnerVolumeSpecName "kube-api-access-rplvm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:40:38 crc kubenswrapper[4558]: I0120 17:40:38.835000 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-db-create-vhnt4" Jan 20 17:40:38 crc kubenswrapper[4558]: I0120 17:40:38.839528 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-db-create-2gc5v" Jan 20 17:40:38 crc kubenswrapper[4558]: I0120 17:40:38.844475 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-db-create-75945" Jan 20 17:40:38 crc kubenswrapper[4558]: I0120 17:40:38.856122 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d0eb0bc4-4bc9-4c7a-ac58-c62d4674549d-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:40:38 crc kubenswrapper[4558]: I0120 17:40:38.856155 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rplvm\" (UniqueName: \"kubernetes.io/projected/d0eb0bc4-4bc9-4c7a-ac58-c62d4674549d-kube-api-access-rplvm\") on node \"crc\" DevicePath \"\"" Jan 20 17:40:38 crc kubenswrapper[4558]: I0120 17:40:38.956749 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9ccbac5d-4369-44e5-9dfc-add1f7987639-operator-scripts\") pod \"9ccbac5d-4369-44e5-9dfc-add1f7987639\" (UID: \"9ccbac5d-4369-44e5-9dfc-add1f7987639\") " Jan 20 17:40:38 crc kubenswrapper[4558]: I0120 17:40:38.956832 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e451d734-7167-4b92-8c0b-22b1e3f4ef67-operator-scripts\") pod \"e451d734-7167-4b92-8c0b-22b1e3f4ef67\" (UID: \"e451d734-7167-4b92-8c0b-22b1e3f4ef67\") " Jan 20 17:40:38 crc kubenswrapper[4558]: I0120 17:40:38.956924 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wzd95\" (UniqueName: \"kubernetes.io/projected/9ccbac5d-4369-44e5-9dfc-add1f7987639-kube-api-access-wzd95\") pod \"9ccbac5d-4369-44e5-9dfc-add1f7987639\" (UID: \"9ccbac5d-4369-44e5-9dfc-add1f7987639\") " Jan 20 17:40:38 crc kubenswrapper[4558]: I0120 17:40:38.957031 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w4gn\" (UniqueName: \"kubernetes.io/projected/a312b2d2-a836-40d2-8de8-9e75ef44ff9a-kube-api-access-2w4gn\") pod \"a312b2d2-a836-40d2-8de8-9e75ef44ff9a\" (UID: \"a312b2d2-a836-40d2-8de8-9e75ef44ff9a\") " Jan 20 17:40:38 crc kubenswrapper[4558]: I0120 17:40:38.957096 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a312b2d2-a836-40d2-8de8-9e75ef44ff9a-operator-scripts\") pod \"a312b2d2-a836-40d2-8de8-9e75ef44ff9a\" (UID: \"a312b2d2-a836-40d2-8de8-9e75ef44ff9a\") " Jan 20 17:40:38 crc kubenswrapper[4558]: I0120 17:40:38.957130 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kjkxj\" (UniqueName: \"kubernetes.io/projected/e451d734-7167-4b92-8c0b-22b1e3f4ef67-kube-api-access-kjkxj\") pod \"e451d734-7167-4b92-8c0b-22b1e3f4ef67\" (UID: \"e451d734-7167-4b92-8c0b-22b1e3f4ef67\") " Jan 20 17:40:38 crc kubenswrapper[4558]: I0120 17:40:38.957354 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9ccbac5d-4369-44e5-9dfc-add1f7987639-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "9ccbac5d-4369-44e5-9dfc-add1f7987639" (UID: "9ccbac5d-4369-44e5-9dfc-add1f7987639"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:40:38 crc kubenswrapper[4558]: I0120 17:40:38.957371 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e451d734-7167-4b92-8c0b-22b1e3f4ef67-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "e451d734-7167-4b92-8c0b-22b1e3f4ef67" (UID: "e451d734-7167-4b92-8c0b-22b1e3f4ef67"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:40:38 crc kubenswrapper[4558]: I0120 17:40:38.957690 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e451d734-7167-4b92-8c0b-22b1e3f4ef67-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:40:38 crc kubenswrapper[4558]: I0120 17:40:38.957708 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9ccbac5d-4369-44e5-9dfc-add1f7987639-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:40:38 crc kubenswrapper[4558]: I0120 17:40:38.958120 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a312b2d2-a836-40d2-8de8-9e75ef44ff9a-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "a312b2d2-a836-40d2-8de8-9e75ef44ff9a" (UID: "a312b2d2-a836-40d2-8de8-9e75ef44ff9a"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:40:38 crc kubenswrapper[4558]: I0120 17:40:38.963326 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9ccbac5d-4369-44e5-9dfc-add1f7987639-kube-api-access-wzd95" (OuterVolumeSpecName: "kube-api-access-wzd95") pod "9ccbac5d-4369-44e5-9dfc-add1f7987639" (UID: "9ccbac5d-4369-44e5-9dfc-add1f7987639"). InnerVolumeSpecName "kube-api-access-wzd95". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:40:38 crc kubenswrapper[4558]: I0120 17:40:38.963862 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a312b2d2-a836-40d2-8de8-9e75ef44ff9a-kube-api-access-2w4gn" (OuterVolumeSpecName: "kube-api-access-2w4gn") pod "a312b2d2-a836-40d2-8de8-9e75ef44ff9a" (UID: "a312b2d2-a836-40d2-8de8-9e75ef44ff9a"). InnerVolumeSpecName "kube-api-access-2w4gn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:40:38 crc kubenswrapper[4558]: I0120 17:40:38.964023 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e451d734-7167-4b92-8c0b-22b1e3f4ef67-kube-api-access-kjkxj" (OuterVolumeSpecName: "kube-api-access-kjkxj") pod "e451d734-7167-4b92-8c0b-22b1e3f4ef67" (UID: "e451d734-7167-4b92-8c0b-22b1e3f4ef67"). InnerVolumeSpecName "kube-api-access-kjkxj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:40:39 crc kubenswrapper[4558]: I0120 17:40:39.021675 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:40:39 crc kubenswrapper[4558]: I0120 17:40:39.059655 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wzd95\" (UniqueName: \"kubernetes.io/projected/9ccbac5d-4369-44e5-9dfc-add1f7987639-kube-api-access-wzd95\") on node \"crc\" DevicePath \"\"" Jan 20 17:40:39 crc kubenswrapper[4558]: I0120 17:40:39.059690 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w4gn\" (UniqueName: \"kubernetes.io/projected/a312b2d2-a836-40d2-8de8-9e75ef44ff9a-kube-api-access-2w4gn\") on node \"crc\" DevicePath \"\"" Jan 20 17:40:39 crc kubenswrapper[4558]: I0120 17:40:39.059705 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a312b2d2-a836-40d2-8de8-9e75ef44ff9a-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:40:39 crc kubenswrapper[4558]: I0120 17:40:39.059717 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kjkxj\" (UniqueName: \"kubernetes.io/projected/e451d734-7167-4b92-8c0b-22b1e3f4ef67-kube-api-access-kjkxj\") on node \"crc\" DevicePath \"\"" Jan 20 17:40:39 crc kubenswrapper[4558]: I0120 17:40:39.353982 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-db-create-75945" Jan 20 17:40:39 crc kubenswrapper[4558]: I0120 17:40:39.353971 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-db-create-75945" event={"ID":"a312b2d2-a836-40d2-8de8-9e75ef44ff9a","Type":"ContainerDied","Data":"a05e8eb925c9f1c9278580228108b9bcdca3fc3db06b95dc853f082e1ba2a265"} Jan 20 17:40:39 crc kubenswrapper[4558]: I0120 17:40:39.354194 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a05e8eb925c9f1c9278580228108b9bcdca3fc3db06b95dc853f082e1ba2a265" Jan 20 17:40:39 crc kubenswrapper[4558]: I0120 17:40:39.356847 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-db-create-vhnt4" event={"ID":"e451d734-7167-4b92-8c0b-22b1e3f4ef67","Type":"ContainerDied","Data":"76147b0ab7d2976be2d0455911e27d674bc1ff69393f1873540f6a36aac1d1e3"} Jan 20 17:40:39 crc kubenswrapper[4558]: I0120 17:40:39.356908 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="76147b0ab7d2976be2d0455911e27d674bc1ff69393f1873540f6a36aac1d1e3" Jan 20 17:40:39 crc kubenswrapper[4558]: I0120 17:40:39.357069 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-db-create-vhnt4" Jan 20 17:40:39 crc kubenswrapper[4558]: I0120 17:40:39.358931 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"708f2028-1ec7-47eb-95a2-3c6dde44fd83","Type":"ContainerStarted","Data":"ee91b319b67486aa1a08313a60875e479a6774cc5f6c5c2fd4d4bb4515c32da6"} Jan 20 17:40:39 crc kubenswrapper[4558]: I0120 17:40:39.358987 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"708f2028-1ec7-47eb-95a2-3c6dde44fd83","Type":"ContainerStarted","Data":"8c0001781cf469232029c66008c49fa8039fb2f7584d8ca5b9ed02cd768ef256"} Jan 20 17:40:39 crc kubenswrapper[4558]: I0120 17:40:39.361277 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-db-create-2gc5v" event={"ID":"9ccbac5d-4369-44e5-9dfc-add1f7987639","Type":"ContainerDied","Data":"ebfb7fa3b7999266d98bb70f20c9f3dce1e2524e4dce865e64c99836bafd50c5"} Jan 20 17:40:39 crc kubenswrapper[4558]: I0120 17:40:39.361329 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ebfb7fa3b7999266d98bb70f20c9f3dce1e2524e4dce865e64c99836bafd50c5" Jan 20 17:40:39 crc kubenswrapper[4558]: I0120 17:40:39.361284 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-db-create-2gc5v" Jan 20 17:40:39 crc kubenswrapper[4558]: I0120 17:40:39.363636 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-3b8d-account-create-update-s9plf" Jan 20 17:40:39 crc kubenswrapper[4558]: I0120 17:40:39.364043 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-3b8d-account-create-update-s9plf" event={"ID":"d0eb0bc4-4bc9-4c7a-ac58-c62d4674549d","Type":"ContainerDied","Data":"84bc272387d3bd53cb64528f046771bf55cb6995ba3726e9699398e887485542"} Jan 20 17:40:39 crc kubenswrapper[4558]: I0120 17:40:39.364080 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="84bc272387d3bd53cb64528f046771bf55cb6995ba3726e9699398e887485542" Jan 20 17:40:39 crc kubenswrapper[4558]: I0120 17:40:39.567665 4558 scope.go:117] "RemoveContainer" containerID="2d2a8989ca5a5af98b2c9dc8f801ea0675339ec14800f437c058c5a43c20146b" Jan 20 17:40:39 crc kubenswrapper[4558]: E0120 17:40:39.568424 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 17:40:39 crc kubenswrapper[4558]: I0120 17:40:39.815486 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-51af-account-create-update-96ttf" Jan 20 17:40:39 crc kubenswrapper[4558]: I0120 17:40:39.819454 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-bd94-account-create-update-n5ssg" Jan 20 17:40:39 crc kubenswrapper[4558]: I0120 17:40:39.984712 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4150b9b7-ff56-4e21-b3c6-62510856cc94-operator-scripts\") pod \"4150b9b7-ff56-4e21-b3c6-62510856cc94\" (UID: \"4150b9b7-ff56-4e21-b3c6-62510856cc94\") " Jan 20 17:40:39 crc kubenswrapper[4558]: I0120 17:40:39.984933 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2fxsw\" (UniqueName: \"kubernetes.io/projected/4150b9b7-ff56-4e21-b3c6-62510856cc94-kube-api-access-2fxsw\") pod \"4150b9b7-ff56-4e21-b3c6-62510856cc94\" (UID: \"4150b9b7-ff56-4e21-b3c6-62510856cc94\") " Jan 20 17:40:39 crc kubenswrapper[4558]: I0120 17:40:39.985045 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b5fx4\" (UniqueName: \"kubernetes.io/projected/93c34095-c480-40ce-b40e-bf05101bef8f-kube-api-access-b5fx4\") pod \"93c34095-c480-40ce-b40e-bf05101bef8f\" (UID: \"93c34095-c480-40ce-b40e-bf05101bef8f\") " Jan 20 17:40:39 crc kubenswrapper[4558]: I0120 17:40:39.985067 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/93c34095-c480-40ce-b40e-bf05101bef8f-operator-scripts\") pod \"93c34095-c480-40ce-b40e-bf05101bef8f\" (UID: \"93c34095-c480-40ce-b40e-bf05101bef8f\") " Jan 20 17:40:39 crc kubenswrapper[4558]: I0120 17:40:39.985519 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4150b9b7-ff56-4e21-b3c6-62510856cc94-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "4150b9b7-ff56-4e21-b3c6-62510856cc94" (UID: "4150b9b7-ff56-4e21-b3c6-62510856cc94"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:40:39 crc kubenswrapper[4558]: I0120 17:40:39.985640 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/93c34095-c480-40ce-b40e-bf05101bef8f-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "93c34095-c480-40ce-b40e-bf05101bef8f" (UID: "93c34095-c480-40ce-b40e-bf05101bef8f"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:40:39 crc kubenswrapper[4558]: I0120 17:40:39.986329 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/93c34095-c480-40ce-b40e-bf05101bef8f-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:40:39 crc kubenswrapper[4558]: I0120 17:40:39.986359 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4150b9b7-ff56-4e21-b3c6-62510856cc94-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:40:39 crc kubenswrapper[4558]: I0120 17:40:39.990388 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4150b9b7-ff56-4e21-b3c6-62510856cc94-kube-api-access-2fxsw" (OuterVolumeSpecName: "kube-api-access-2fxsw") pod "4150b9b7-ff56-4e21-b3c6-62510856cc94" (UID: "4150b9b7-ff56-4e21-b3c6-62510856cc94"). InnerVolumeSpecName "kube-api-access-2fxsw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:40:39 crc kubenswrapper[4558]: I0120 17:40:39.990581 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/93c34095-c480-40ce-b40e-bf05101bef8f-kube-api-access-b5fx4" (OuterVolumeSpecName: "kube-api-access-b5fx4") pod "93c34095-c480-40ce-b40e-bf05101bef8f" (UID: "93c34095-c480-40ce-b40e-bf05101bef8f"). InnerVolumeSpecName "kube-api-access-b5fx4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:40:40 crc kubenswrapper[4558]: I0120 17:40:40.088398 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2fxsw\" (UniqueName: \"kubernetes.io/projected/4150b9b7-ff56-4e21-b3c6-62510856cc94-kube-api-access-2fxsw\") on node \"crc\" DevicePath \"\"" Jan 20 17:40:40 crc kubenswrapper[4558]: I0120 17:40:40.088709 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b5fx4\" (UniqueName: \"kubernetes.io/projected/93c34095-c480-40ce-b40e-bf05101bef8f-kube-api-access-b5fx4\") on node \"crc\" DevicePath \"\"" Jan 20 17:40:40 crc kubenswrapper[4558]: I0120 17:40:40.387514 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-51af-account-create-update-96ttf" event={"ID":"4150b9b7-ff56-4e21-b3c6-62510856cc94","Type":"ContainerDied","Data":"6ccf91d53c8e64c5a89239afba9b91661a93e658238035ff1d44a3fd54594419"} Jan 20 17:40:40 crc kubenswrapper[4558]: I0120 17:40:40.387578 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6ccf91d53c8e64c5a89239afba9b91661a93e658238035ff1d44a3fd54594419" Jan 20 17:40:40 crc kubenswrapper[4558]: I0120 17:40:40.387576 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-51af-account-create-update-96ttf" Jan 20 17:40:40 crc kubenswrapper[4558]: I0120 17:40:40.393353 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-bd94-account-create-update-n5ssg" event={"ID":"93c34095-c480-40ce-b40e-bf05101bef8f","Type":"ContainerDied","Data":"a5d11fe0979d62a453a3993e497c60b3e0caa44b4eb29f6f6ce6cf4dc81e7994"} Jan 20 17:40:40 crc kubenswrapper[4558]: I0120 17:40:40.393387 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a5d11fe0979d62a453a3993e497c60b3e0caa44b4eb29f6f6ce6cf4dc81e7994" Jan 20 17:40:40 crc kubenswrapper[4558]: I0120 17:40:40.393462 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-bd94-account-create-update-n5ssg" Jan 20 17:40:40 crc kubenswrapper[4558]: I0120 17:40:40.398916 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"708f2028-1ec7-47eb-95a2-3c6dde44fd83","Type":"ContainerStarted","Data":"96bca6e91cb753e1acdd4ab804a541fab6834cae6c7d3cc56a68ca0bdf4bbcf3"} Jan 20 17:40:40 crc kubenswrapper[4558]: I0120 17:40:40.558398 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/swift-proxy-6f98b8f5cb-8bwzs" Jan 20 17:40:40 crc kubenswrapper[4558]: I0120 17:40:40.563275 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/swift-proxy-6f98b8f5cb-8bwzs" Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.011450 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.115700 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2328ffd8-4eb7-41b8-abfb-b5ee5a12f117-logs\") pod \"2328ffd8-4eb7-41b8-abfb-b5ee5a12f117\" (UID: \"2328ffd8-4eb7-41b8-abfb-b5ee5a12f117\") " Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.115818 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2328ffd8-4eb7-41b8-abfb-b5ee5a12f117-public-tls-certs\") pod \"2328ffd8-4eb7-41b8-abfb-b5ee5a12f117\" (UID: \"2328ffd8-4eb7-41b8-abfb-b5ee5a12f117\") " Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.115875 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/2328ffd8-4eb7-41b8-abfb-b5ee5a12f117-httpd-run\") pod \"2328ffd8-4eb7-41b8-abfb-b5ee5a12f117\" (UID: \"2328ffd8-4eb7-41b8-abfb-b5ee5a12f117\") " Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.115922 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"2328ffd8-4eb7-41b8-abfb-b5ee5a12f117\" (UID: \"2328ffd8-4eb7-41b8-abfb-b5ee5a12f117\") " Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.115963 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2328ffd8-4eb7-41b8-abfb-b5ee5a12f117-scripts\") pod \"2328ffd8-4eb7-41b8-abfb-b5ee5a12f117\" (UID: \"2328ffd8-4eb7-41b8-abfb-b5ee5a12f117\") " Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.116056 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2328ffd8-4eb7-41b8-abfb-b5ee5a12f117-config-data\") pod \"2328ffd8-4eb7-41b8-abfb-b5ee5a12f117\" (UID: \"2328ffd8-4eb7-41b8-abfb-b5ee5a12f117\") " Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.116094 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2328ffd8-4eb7-41b8-abfb-b5ee5a12f117-combined-ca-bundle\") pod \"2328ffd8-4eb7-41b8-abfb-b5ee5a12f117\" (UID: \"2328ffd8-4eb7-41b8-abfb-b5ee5a12f117\") " Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.116118 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4nd8m\" (UniqueName: \"kubernetes.io/projected/2328ffd8-4eb7-41b8-abfb-b5ee5a12f117-kube-api-access-4nd8m\") pod \"2328ffd8-4eb7-41b8-abfb-b5ee5a12f117\" (UID: \"2328ffd8-4eb7-41b8-abfb-b5ee5a12f117\") " Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.118224 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2328ffd8-4eb7-41b8-abfb-b5ee5a12f117-logs" (OuterVolumeSpecName: "logs") pod "2328ffd8-4eb7-41b8-abfb-b5ee5a12f117" (UID: "2328ffd8-4eb7-41b8-abfb-b5ee5a12f117"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.118518 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2328ffd8-4eb7-41b8-abfb-b5ee5a12f117-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "2328ffd8-4eb7-41b8-abfb-b5ee5a12f117" (UID: "2328ffd8-4eb7-41b8-abfb-b5ee5a12f117"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.126771 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2328ffd8-4eb7-41b8-abfb-b5ee5a12f117-kube-api-access-4nd8m" (OuterVolumeSpecName: "kube-api-access-4nd8m") pod "2328ffd8-4eb7-41b8-abfb-b5ee5a12f117" (UID: "2328ffd8-4eb7-41b8-abfb-b5ee5a12f117"). InnerVolumeSpecName "kube-api-access-4nd8m". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.134588 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2328ffd8-4eb7-41b8-abfb-b5ee5a12f117-scripts" (OuterVolumeSpecName: "scripts") pod "2328ffd8-4eb7-41b8-abfb-b5ee5a12f117" (UID: "2328ffd8-4eb7-41b8-abfb-b5ee5a12f117"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.134775 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage09-crc" (OuterVolumeSpecName: "glance") pod "2328ffd8-4eb7-41b8-abfb-b5ee5a12f117" (UID: "2328ffd8-4eb7-41b8-abfb-b5ee5a12f117"). InnerVolumeSpecName "local-storage09-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.169224 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-db-sync-5lxgd"] Jan 20 17:40:41 crc kubenswrapper[4558]: E0120 17:40:41.169729 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9ccbac5d-4369-44e5-9dfc-add1f7987639" containerName="mariadb-database-create" Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.169748 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="9ccbac5d-4369-44e5-9dfc-add1f7987639" containerName="mariadb-database-create" Jan 20 17:40:41 crc kubenswrapper[4558]: E0120 17:40:41.169769 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="93c34095-c480-40ce-b40e-bf05101bef8f" containerName="mariadb-account-create-update" Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.169776 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="93c34095-c480-40ce-b40e-bf05101bef8f" containerName="mariadb-account-create-update" Jan 20 17:40:41 crc kubenswrapper[4558]: E0120 17:40:41.169786 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a312b2d2-a836-40d2-8de8-9e75ef44ff9a" containerName="mariadb-database-create" Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.169793 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="a312b2d2-a836-40d2-8de8-9e75ef44ff9a" containerName="mariadb-database-create" Jan 20 17:40:41 crc kubenswrapper[4558]: E0120 17:40:41.169805 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2328ffd8-4eb7-41b8-abfb-b5ee5a12f117" containerName="glance-httpd" Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.169811 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="2328ffd8-4eb7-41b8-abfb-b5ee5a12f117" containerName="glance-httpd" Jan 20 17:40:41 crc kubenswrapper[4558]: E0120 17:40:41.169823 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4150b9b7-ff56-4e21-b3c6-62510856cc94" containerName="mariadb-account-create-update" Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.169828 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="4150b9b7-ff56-4e21-b3c6-62510856cc94" containerName="mariadb-account-create-update" Jan 20 17:40:41 crc kubenswrapper[4558]: E0120 17:40:41.169849 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e451d734-7167-4b92-8c0b-22b1e3f4ef67" containerName="mariadb-database-create" Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.169855 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="e451d734-7167-4b92-8c0b-22b1e3f4ef67" containerName="mariadb-database-create" Jan 20 17:40:41 crc kubenswrapper[4558]: E0120 17:40:41.169869 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2328ffd8-4eb7-41b8-abfb-b5ee5a12f117" containerName="glance-log" Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.169875 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="2328ffd8-4eb7-41b8-abfb-b5ee5a12f117" containerName="glance-log" Jan 20 17:40:41 crc kubenswrapper[4558]: E0120 17:40:41.169884 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d0eb0bc4-4bc9-4c7a-ac58-c62d4674549d" containerName="mariadb-account-create-update" Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.169897 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d0eb0bc4-4bc9-4c7a-ac58-c62d4674549d" containerName="mariadb-account-create-update" Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.170098 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="d0eb0bc4-4bc9-4c7a-ac58-c62d4674549d" containerName="mariadb-account-create-update" Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.170112 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="93c34095-c480-40ce-b40e-bf05101bef8f" containerName="mariadb-account-create-update" Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.170122 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="a312b2d2-a836-40d2-8de8-9e75ef44ff9a" containerName="mariadb-database-create" Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.170129 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="2328ffd8-4eb7-41b8-abfb-b5ee5a12f117" containerName="glance-httpd" Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.170136 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="e451d734-7167-4b92-8c0b-22b1e3f4ef67" containerName="mariadb-database-create" Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.170146 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="9ccbac5d-4369-44e5-9dfc-add1f7987639" containerName="mariadb-database-create" Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.170157 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="4150b9b7-ff56-4e21-b3c6-62510856cc94" containerName="mariadb-account-create-update" Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.170189 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="2328ffd8-4eb7-41b8-abfb-b5ee5a12f117" containerName="glance-log" Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.170850 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-5lxgd" Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.173469 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2328ffd8-4eb7-41b8-abfb-b5ee5a12f117-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2328ffd8-4eb7-41b8-abfb-b5ee5a12f117" (UID: "2328ffd8-4eb7-41b8-abfb-b5ee5a12f117"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.174709 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-nova-dockercfg-6hzq4" Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.176156 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell0-conductor-config-data" Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.176387 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell0-conductor-scripts" Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.180075 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-db-sync-5lxgd"] Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.180389 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2328ffd8-4eb7-41b8-abfb-b5ee5a12f117-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "2328ffd8-4eb7-41b8-abfb-b5ee5a12f117" (UID: "2328ffd8-4eb7-41b8-abfb-b5ee5a12f117"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.198994 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2328ffd8-4eb7-41b8-abfb-b5ee5a12f117-config-data" (OuterVolumeSpecName: "config-data") pod "2328ffd8-4eb7-41b8-abfb-b5ee5a12f117" (UID: "2328ffd8-4eb7-41b8-abfb-b5ee5a12f117"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.218702 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2328ffd8-4eb7-41b8-abfb-b5ee5a12f117-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.218732 4558 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/2328ffd8-4eb7-41b8-abfb-b5ee5a12f117-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.218764 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" " Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.218774 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2328ffd8-4eb7-41b8-abfb-b5ee5a12f117-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.218784 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2328ffd8-4eb7-41b8-abfb-b5ee5a12f117-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.218795 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2328ffd8-4eb7-41b8-abfb-b5ee5a12f117-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.218805 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4nd8m\" (UniqueName: \"kubernetes.io/projected/2328ffd8-4eb7-41b8-abfb-b5ee5a12f117-kube-api-access-4nd8m\") on node \"crc\" DevicePath \"\"" Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.218815 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2328ffd8-4eb7-41b8-abfb-b5ee5a12f117-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.234454 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage09-crc" (UniqueName: "kubernetes.io/local-volume/local-storage09-crc") on node "crc" Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.321586 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5zptg\" (UniqueName: \"kubernetes.io/projected/dc54928b-9e6c-45f3-b991-2c58d2a7592d-kube-api-access-5zptg\") pod \"nova-cell0-conductor-db-sync-5lxgd\" (UID: \"dc54928b-9e6c-45f3-b991-2c58d2a7592d\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-5lxgd" Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.321668 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc54928b-9e6c-45f3-b991-2c58d2a7592d-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-5lxgd\" (UID: \"dc54928b-9e6c-45f3-b991-2c58d2a7592d\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-5lxgd" Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.321715 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dc54928b-9e6c-45f3-b991-2c58d2a7592d-scripts\") pod \"nova-cell0-conductor-db-sync-5lxgd\" (UID: \"dc54928b-9e6c-45f3-b991-2c58d2a7592d\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-5lxgd" Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.322447 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dc54928b-9e6c-45f3-b991-2c58d2a7592d-config-data\") pod \"nova-cell0-conductor-db-sync-5lxgd\" (UID: \"dc54928b-9e6c-45f3-b991-2c58d2a7592d\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-5lxgd" Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.322636 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.415654 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"708f2028-1ec7-47eb-95a2-3c6dde44fd83","Type":"ContainerStarted","Data":"881f0e2513823cd252247d7fe6133eade39862d80b0e913543572b0aad225157"} Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.418250 4558 generic.go:334] "Generic (PLEG): container finished" podID="92c317c8-cf30-4046-af0b-8f6c36f69000" containerID="0c31383a0ae8e3556df5e5d344c5e4300ff998da9a0dcb05e9d60b095898581e" exitCode=0 Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.418333 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"92c317c8-cf30-4046-af0b-8f6c36f69000","Type":"ContainerDied","Data":"0c31383a0ae8e3556df5e5d344c5e4300ff998da9a0dcb05e9d60b095898581e"} Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.423141 4558 generic.go:334] "Generic (PLEG): container finished" podID="2328ffd8-4eb7-41b8-abfb-b5ee5a12f117" containerID="19edd3c6ba732244695f4759d0145fea4e245766ce9f1373b9b7c7005a8cd1a2" exitCode=0 Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.423334 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.423399 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"2328ffd8-4eb7-41b8-abfb-b5ee5a12f117","Type":"ContainerDied","Data":"19edd3c6ba732244695f4759d0145fea4e245766ce9f1373b9b7c7005a8cd1a2"} Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.423428 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"2328ffd8-4eb7-41b8-abfb-b5ee5a12f117","Type":"ContainerDied","Data":"01c975adf8400e28d35866331bd51462a577f8d7244921c6bb34e10e1ffa8769"} Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.423474 4558 scope.go:117] "RemoveContainer" containerID="19edd3c6ba732244695f4759d0145fea4e245766ce9f1373b9b7c7005a8cd1a2" Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.426521 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dc54928b-9e6c-45f3-b991-2c58d2a7592d-config-data\") pod \"nova-cell0-conductor-db-sync-5lxgd\" (UID: \"dc54928b-9e6c-45f3-b991-2c58d2a7592d\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-5lxgd" Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.427138 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5zptg\" (UniqueName: \"kubernetes.io/projected/dc54928b-9e6c-45f3-b991-2c58d2a7592d-kube-api-access-5zptg\") pod \"nova-cell0-conductor-db-sync-5lxgd\" (UID: \"dc54928b-9e6c-45f3-b991-2c58d2a7592d\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-5lxgd" Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.428303 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc54928b-9e6c-45f3-b991-2c58d2a7592d-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-5lxgd\" (UID: \"dc54928b-9e6c-45f3-b991-2c58d2a7592d\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-5lxgd" Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.428409 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dc54928b-9e6c-45f3-b991-2c58d2a7592d-scripts\") pod \"nova-cell0-conductor-db-sync-5lxgd\" (UID: \"dc54928b-9e6c-45f3-b991-2c58d2a7592d\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-5lxgd" Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.434883 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dc54928b-9e6c-45f3-b991-2c58d2a7592d-config-data\") pod \"nova-cell0-conductor-db-sync-5lxgd\" (UID: \"dc54928b-9e6c-45f3-b991-2c58d2a7592d\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-5lxgd" Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.437527 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dc54928b-9e6c-45f3-b991-2c58d2a7592d-scripts\") pod \"nova-cell0-conductor-db-sync-5lxgd\" (UID: \"dc54928b-9e6c-45f3-b991-2c58d2a7592d\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-5lxgd" Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.437579 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc54928b-9e6c-45f3-b991-2c58d2a7592d-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-5lxgd\" (UID: \"dc54928b-9e6c-45f3-b991-2c58d2a7592d\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-5lxgd" Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.448667 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5zptg\" (UniqueName: \"kubernetes.io/projected/dc54928b-9e6c-45f3-b991-2c58d2a7592d-kube-api-access-5zptg\") pod \"nova-cell0-conductor-db-sync-5lxgd\" (UID: \"dc54928b-9e6c-45f3-b991-2c58d2a7592d\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-5lxgd" Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.467452 4558 scope.go:117] "RemoveContainer" containerID="51fd39b0c7c20748a2724d7df276628b2e6749c56463b33881b466e963a20122" Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.480158 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.491262 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.493032 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-5lxgd" Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.497756 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.499501 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.501879 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-glance-default-public-svc" Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.503556 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-default-external-config-data" Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.515285 4558 scope.go:117] "RemoveContainer" containerID="19edd3c6ba732244695f4759d0145fea4e245766ce9f1373b9b7c7005a8cd1a2" Jan 20 17:40:41 crc kubenswrapper[4558]: E0120 17:40:41.516930 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"19edd3c6ba732244695f4759d0145fea4e245766ce9f1373b9b7c7005a8cd1a2\": container with ID starting with 19edd3c6ba732244695f4759d0145fea4e245766ce9f1373b9b7c7005a8cd1a2 not found: ID does not exist" containerID="19edd3c6ba732244695f4759d0145fea4e245766ce9f1373b9b7c7005a8cd1a2" Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.516973 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"19edd3c6ba732244695f4759d0145fea4e245766ce9f1373b9b7c7005a8cd1a2"} err="failed to get container status \"19edd3c6ba732244695f4759d0145fea4e245766ce9f1373b9b7c7005a8cd1a2\": rpc error: code = NotFound desc = could not find container \"19edd3c6ba732244695f4759d0145fea4e245766ce9f1373b9b7c7005a8cd1a2\": container with ID starting with 19edd3c6ba732244695f4759d0145fea4e245766ce9f1373b9b7c7005a8cd1a2 not found: ID does not exist" Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.517004 4558 scope.go:117] "RemoveContainer" containerID="51fd39b0c7c20748a2724d7df276628b2e6749c56463b33881b466e963a20122" Jan 20 17:40:41 crc kubenswrapper[4558]: E0120 17:40:41.518486 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"51fd39b0c7c20748a2724d7df276628b2e6749c56463b33881b466e963a20122\": container with ID starting with 51fd39b0c7c20748a2724d7df276628b2e6749c56463b33881b466e963a20122 not found: ID does not exist" containerID="51fd39b0c7c20748a2724d7df276628b2e6749c56463b33881b466e963a20122" Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.518518 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"51fd39b0c7c20748a2724d7df276628b2e6749c56463b33881b466e963a20122"} err="failed to get container status \"51fd39b0c7c20748a2724d7df276628b2e6749c56463b33881b466e963a20122\": rpc error: code = NotFound desc = could not find container \"51fd39b0c7c20748a2724d7df276628b2e6749c56463b33881b466e963a20122\": container with ID starting with 51fd39b0c7c20748a2724d7df276628b2e6749c56463b33881b466e963a20122 not found: ID does not exist" Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.518663 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.632220 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xw9hj\" (UniqueName: \"kubernetes.io/projected/552b666d-9d00-469a-9692-45a650c89509-kube-api-access-xw9hj\") pod \"glance-default-external-api-0\" (UID: \"552b666d-9d00-469a-9692-45a650c89509\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.632532 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/552b666d-9d00-469a-9692-45a650c89509-config-data\") pod \"glance-default-external-api-0\" (UID: \"552b666d-9d00-469a-9692-45a650c89509\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.632563 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/552b666d-9d00-469a-9692-45a650c89509-scripts\") pod \"glance-default-external-api-0\" (UID: \"552b666d-9d00-469a-9692-45a650c89509\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.632622 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/552b666d-9d00-469a-9692-45a650c89509-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"552b666d-9d00-469a-9692-45a650c89509\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.632687 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/552b666d-9d00-469a-9692-45a650c89509-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"552b666d-9d00-469a-9692-45a650c89509\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.632713 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/552b666d-9d00-469a-9692-45a650c89509-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"552b666d-9d00-469a-9692-45a650c89509\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.632883 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"552b666d-9d00-469a-9692-45a650c89509\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.632956 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/552b666d-9d00-469a-9692-45a650c89509-logs\") pod \"glance-default-external-api-0\" (UID: \"552b666d-9d00-469a-9692-45a650c89509\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.735201 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/552b666d-9d00-469a-9692-45a650c89509-logs\") pod \"glance-default-external-api-0\" (UID: \"552b666d-9d00-469a-9692-45a650c89509\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.735324 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xw9hj\" (UniqueName: \"kubernetes.io/projected/552b666d-9d00-469a-9692-45a650c89509-kube-api-access-xw9hj\") pod \"glance-default-external-api-0\" (UID: \"552b666d-9d00-469a-9692-45a650c89509\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.735384 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/552b666d-9d00-469a-9692-45a650c89509-config-data\") pod \"glance-default-external-api-0\" (UID: \"552b666d-9d00-469a-9692-45a650c89509\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.735404 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/552b666d-9d00-469a-9692-45a650c89509-scripts\") pod \"glance-default-external-api-0\" (UID: \"552b666d-9d00-469a-9692-45a650c89509\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.735471 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/552b666d-9d00-469a-9692-45a650c89509-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"552b666d-9d00-469a-9692-45a650c89509\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.735568 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/552b666d-9d00-469a-9692-45a650c89509-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"552b666d-9d00-469a-9692-45a650c89509\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.735674 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/552b666d-9d00-469a-9692-45a650c89509-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"552b666d-9d00-469a-9692-45a650c89509\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.735733 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"552b666d-9d00-469a-9692-45a650c89509\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.736002 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"552b666d-9d00-469a-9692-45a650c89509\") device mount path \"/mnt/openstack/pv09\"" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.739103 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/552b666d-9d00-469a-9692-45a650c89509-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"552b666d-9d00-469a-9692-45a650c89509\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.741658 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/552b666d-9d00-469a-9692-45a650c89509-logs\") pod \"glance-default-external-api-0\" (UID: \"552b666d-9d00-469a-9692-45a650c89509\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.744002 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/552b666d-9d00-469a-9692-45a650c89509-config-data\") pod \"glance-default-external-api-0\" (UID: \"552b666d-9d00-469a-9692-45a650c89509\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.749677 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/552b666d-9d00-469a-9692-45a650c89509-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"552b666d-9d00-469a-9692-45a650c89509\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.751569 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/552b666d-9d00-469a-9692-45a650c89509-scripts\") pod \"glance-default-external-api-0\" (UID: \"552b666d-9d00-469a-9692-45a650c89509\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.755228 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/552b666d-9d00-469a-9692-45a650c89509-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"552b666d-9d00-469a-9692-45a650c89509\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.764982 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xw9hj\" (UniqueName: \"kubernetes.io/projected/552b666d-9d00-469a-9692-45a650c89509-kube-api-access-xw9hj\") pod \"glance-default-external-api-0\" (UID: \"552b666d-9d00-469a-9692-45a650c89509\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.771758 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"552b666d-9d00-469a-9692-45a650c89509\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.818451 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.844750 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.938725 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/92c317c8-cf30-4046-af0b-8f6c36f69000-logs\") pod \"92c317c8-cf30-4046-af0b-8f6c36f69000\" (UID: \"92c317c8-cf30-4046-af0b-8f6c36f69000\") " Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.938861 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/92c317c8-cf30-4046-af0b-8f6c36f69000-combined-ca-bundle\") pod \"92c317c8-cf30-4046-af0b-8f6c36f69000\" (UID: \"92c317c8-cf30-4046-af0b-8f6c36f69000\") " Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.938934 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8fpts\" (UniqueName: \"kubernetes.io/projected/92c317c8-cf30-4046-af0b-8f6c36f69000-kube-api-access-8fpts\") pod \"92c317c8-cf30-4046-af0b-8f6c36f69000\" (UID: \"92c317c8-cf30-4046-af0b-8f6c36f69000\") " Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.938966 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/92c317c8-cf30-4046-af0b-8f6c36f69000-httpd-run\") pod \"92c317c8-cf30-4046-af0b-8f6c36f69000\" (UID: \"92c317c8-cf30-4046-af0b-8f6c36f69000\") " Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.939038 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/92c317c8-cf30-4046-af0b-8f6c36f69000-config-data\") pod \"92c317c8-cf30-4046-af0b-8f6c36f69000\" (UID: \"92c317c8-cf30-4046-af0b-8f6c36f69000\") " Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.939109 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"92c317c8-cf30-4046-af0b-8f6c36f69000\" (UID: \"92c317c8-cf30-4046-af0b-8f6c36f69000\") " Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.939129 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/92c317c8-cf30-4046-af0b-8f6c36f69000-scripts\") pod \"92c317c8-cf30-4046-af0b-8f6c36f69000\" (UID: \"92c317c8-cf30-4046-af0b-8f6c36f69000\") " Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.939204 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/92c317c8-cf30-4046-af0b-8f6c36f69000-internal-tls-certs\") pod \"92c317c8-cf30-4046-af0b-8f6c36f69000\" (UID: \"92c317c8-cf30-4046-af0b-8f6c36f69000\") " Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.939374 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/92c317c8-cf30-4046-af0b-8f6c36f69000-logs" (OuterVolumeSpecName: "logs") pod "92c317c8-cf30-4046-af0b-8f6c36f69000" (UID: "92c317c8-cf30-4046-af0b-8f6c36f69000"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.939595 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/92c317c8-cf30-4046-af0b-8f6c36f69000-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.942432 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/92c317c8-cf30-4046-af0b-8f6c36f69000-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "92c317c8-cf30-4046-af0b-8f6c36f69000" (UID: "92c317c8-cf30-4046-af0b-8f6c36f69000"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.949274 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/92c317c8-cf30-4046-af0b-8f6c36f69000-scripts" (OuterVolumeSpecName: "scripts") pod "92c317c8-cf30-4046-af0b-8f6c36f69000" (UID: "92c317c8-cf30-4046-af0b-8f6c36f69000"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.949414 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage11-crc" (OuterVolumeSpecName: "glance") pod "92c317c8-cf30-4046-af0b-8f6c36f69000" (UID: "92c317c8-cf30-4046-af0b-8f6c36f69000"). InnerVolumeSpecName "local-storage11-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.949825 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/92c317c8-cf30-4046-af0b-8f6c36f69000-kube-api-access-8fpts" (OuterVolumeSpecName: "kube-api-access-8fpts") pod "92c317c8-cf30-4046-af0b-8f6c36f69000" (UID: "92c317c8-cf30-4046-af0b-8f6c36f69000"). InnerVolumeSpecName "kube-api-access-8fpts". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.972665 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/92c317c8-cf30-4046-af0b-8f6c36f69000-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "92c317c8-cf30-4046-af0b-8f6c36f69000" (UID: "92c317c8-cf30-4046-af0b-8f6c36f69000"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:40:41 crc kubenswrapper[4558]: I0120 17:40:41.986308 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/92c317c8-cf30-4046-af0b-8f6c36f69000-config-data" (OuterVolumeSpecName: "config-data") pod "92c317c8-cf30-4046-af0b-8f6c36f69000" (UID: "92c317c8-cf30-4046-af0b-8f6c36f69000"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:40:42 crc kubenswrapper[4558]: I0120 17:40:42.005295 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/92c317c8-cf30-4046-af0b-8f6c36f69000-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "92c317c8-cf30-4046-af0b-8f6c36f69000" (UID: "92c317c8-cf30-4046-af0b-8f6c36f69000"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:40:42 crc kubenswrapper[4558]: I0120 17:40:42.030463 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-db-sync-5lxgd"] Jan 20 17:40:42 crc kubenswrapper[4558]: I0120 17:40:42.042534 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" " Jan 20 17:40:42 crc kubenswrapper[4558]: I0120 17:40:42.042571 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/92c317c8-cf30-4046-af0b-8f6c36f69000-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:40:42 crc kubenswrapper[4558]: I0120 17:40:42.042589 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/92c317c8-cf30-4046-af0b-8f6c36f69000-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:40:42 crc kubenswrapper[4558]: I0120 17:40:42.042602 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/92c317c8-cf30-4046-af0b-8f6c36f69000-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:40:42 crc kubenswrapper[4558]: I0120 17:40:42.042613 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8fpts\" (UniqueName: \"kubernetes.io/projected/92c317c8-cf30-4046-af0b-8f6c36f69000-kube-api-access-8fpts\") on node \"crc\" DevicePath \"\"" Jan 20 17:40:42 crc kubenswrapper[4558]: I0120 17:40:42.042625 4558 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/92c317c8-cf30-4046-af0b-8f6c36f69000-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 20 17:40:42 crc kubenswrapper[4558]: I0120 17:40:42.042633 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/92c317c8-cf30-4046-af0b-8f6c36f69000-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:40:42 crc kubenswrapper[4558]: I0120 17:40:42.066375 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage11-crc" (UniqueName: "kubernetes.io/local-volume/local-storage11-crc") on node "crc" Jan 20 17:40:42 crc kubenswrapper[4558]: I0120 17:40:42.144503 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:40:42 crc kubenswrapper[4558]: I0120 17:40:42.303534 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:40:42 crc kubenswrapper[4558]: I0120 17:40:42.452973 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-5lxgd" event={"ID":"dc54928b-9e6c-45f3-b991-2c58d2a7592d","Type":"ContainerStarted","Data":"034d451a2e550aee8b855a4ea6bfe4909c52aa2e4a24f9e676477858cba19916"} Jan 20 17:40:42 crc kubenswrapper[4558]: I0120 17:40:42.453056 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-5lxgd" event={"ID":"dc54928b-9e6c-45f3-b991-2c58d2a7592d","Type":"ContainerStarted","Data":"a621472962912ada52cbe316012f3187b15fab8ccdcfcd704463c38fb3b24aa1"} Jan 20 17:40:42 crc kubenswrapper[4558]: I0120 17:40:42.467374 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:40:42 crc kubenswrapper[4558]: I0120 17:40:42.467638 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"92c317c8-cf30-4046-af0b-8f6c36f69000","Type":"ContainerDied","Data":"060bea66f307028730316adbd8f3bfcf48d15dc279eb87ccc62b037d74642d41"} Jan 20 17:40:42 crc kubenswrapper[4558]: I0120 17:40:42.469238 4558 scope.go:117] "RemoveContainer" containerID="0c31383a0ae8e3556df5e5d344c5e4300ff998da9a0dcb05e9d60b095898581e" Jan 20 17:40:42 crc kubenswrapper[4558]: I0120 17:40:42.497789 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"708f2028-1ec7-47eb-95a2-3c6dde44fd83","Type":"ContainerStarted","Data":"7f360199ac4d131117fb2542204b79d49108b24a1b90bac50104ec7ecc3539bf"} Jan 20 17:40:42 crc kubenswrapper[4558]: I0120 17:40:42.501147 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"552b666d-9d00-469a-9692-45a650c89509","Type":"ContainerStarted","Data":"e5837a923aabd046f6d66689f88391fda1f7af743ca86d27cf061ae01952b409"} Jan 20 17:40:42 crc kubenswrapper[4558]: I0120 17:40:42.501510 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="708f2028-1ec7-47eb-95a2-3c6dde44fd83" containerName="ceilometer-central-agent" containerID="cri-o://ee91b319b67486aa1a08313a60875e479a6774cc5f6c5c2fd4d4bb4515c32da6" gracePeriod=30 Jan 20 17:40:42 crc kubenswrapper[4558]: I0120 17:40:42.502333 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:40:42 crc kubenswrapper[4558]: I0120 17:40:42.502797 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="708f2028-1ec7-47eb-95a2-3c6dde44fd83" containerName="proxy-httpd" containerID="cri-o://7f360199ac4d131117fb2542204b79d49108b24a1b90bac50104ec7ecc3539bf" gracePeriod=30 Jan 20 17:40:42 crc kubenswrapper[4558]: I0120 17:40:42.502860 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="708f2028-1ec7-47eb-95a2-3c6dde44fd83" containerName="sg-core" containerID="cri-o://881f0e2513823cd252247d7fe6133eade39862d80b0e913543572b0aad225157" gracePeriod=30 Jan 20 17:40:42 crc kubenswrapper[4558]: I0120 17:40:42.503255 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="708f2028-1ec7-47eb-95a2-3c6dde44fd83" containerName="ceilometer-notification-agent" containerID="cri-o://96bca6e91cb753e1acdd4ab804a541fab6834cae6c7d3cc56a68ca0bdf4bbcf3" gracePeriod=30 Jan 20 17:40:42 crc kubenswrapper[4558]: I0120 17:40:42.532969 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-5lxgd" podStartSLOduration=1.532852036 podStartE2EDuration="1.532852036s" podCreationTimestamp="2026-01-20 17:40:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:40:42.473761232 +0000 UTC m=+3536.234099199" watchObservedRunningTime="2026-01-20 17:40:42.532852036 +0000 UTC m=+3536.293190003" Jan 20 17:40:42 crc kubenswrapper[4558]: I0120 17:40:42.548953 4558 scope.go:117] "RemoveContainer" containerID="9139dd64147779f4a8760ed5d87509a43e88362aa5356602bf57b77210031913" Jan 20 17:40:42 crc kubenswrapper[4558]: I0120 17:40:42.566461 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ceilometer-0" podStartSLOduration=2.342184628 podStartE2EDuration="5.566443567s" podCreationTimestamp="2026-01-20 17:40:37 +0000 UTC" firstStartedPulling="2026-01-20 17:40:38.506811847 +0000 UTC m=+3532.267149813" lastFinishedPulling="2026-01-20 17:40:41.731070786 +0000 UTC m=+3535.491408752" observedRunningTime="2026-01-20 17:40:42.523191142 +0000 UTC m=+3536.283529099" watchObservedRunningTime="2026-01-20 17:40:42.566443567 +0000 UTC m=+3536.326781533" Jan 20 17:40:42 crc kubenswrapper[4558]: I0120 17:40:42.593508 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2328ffd8-4eb7-41b8-abfb-b5ee5a12f117" path="/var/lib/kubelet/pods/2328ffd8-4eb7-41b8-abfb-b5ee5a12f117/volumes" Jan 20 17:40:42 crc kubenswrapper[4558]: I0120 17:40:42.621735 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:40:42 crc kubenswrapper[4558]: I0120 17:40:42.645795 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:40:42 crc kubenswrapper[4558]: I0120 17:40:42.669285 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:40:42 crc kubenswrapper[4558]: E0120 17:40:42.669785 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="92c317c8-cf30-4046-af0b-8f6c36f69000" containerName="glance-httpd" Jan 20 17:40:42 crc kubenswrapper[4558]: I0120 17:40:42.669805 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="92c317c8-cf30-4046-af0b-8f6c36f69000" containerName="glance-httpd" Jan 20 17:40:42 crc kubenswrapper[4558]: E0120 17:40:42.669828 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="92c317c8-cf30-4046-af0b-8f6c36f69000" containerName="glance-log" Jan 20 17:40:42 crc kubenswrapper[4558]: I0120 17:40:42.669835 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="92c317c8-cf30-4046-af0b-8f6c36f69000" containerName="glance-log" Jan 20 17:40:42 crc kubenswrapper[4558]: I0120 17:40:42.670023 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="92c317c8-cf30-4046-af0b-8f6c36f69000" containerName="glance-httpd" Jan 20 17:40:42 crc kubenswrapper[4558]: I0120 17:40:42.670063 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="92c317c8-cf30-4046-af0b-8f6c36f69000" containerName="glance-log" Jan 20 17:40:42 crc kubenswrapper[4558]: I0120 17:40:42.675632 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:40:42 crc kubenswrapper[4558]: I0120 17:40:42.677876 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-default-internal-config-data" Jan 20 17:40:42 crc kubenswrapper[4558]: I0120 17:40:42.678794 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-glance-default-internal-svc" Jan 20 17:40:42 crc kubenswrapper[4558]: I0120 17:40:42.680805 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:40:42 crc kubenswrapper[4558]: I0120 17:40:42.773738 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qhcb7\" (UniqueName: \"kubernetes.io/projected/2206aa45-a065-4965-8540-a2ba7c707155-kube-api-access-qhcb7\") pod \"glance-default-internal-api-0\" (UID: \"2206aa45-a065-4965-8540-a2ba7c707155\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:40:42 crc kubenswrapper[4558]: I0120 17:40:42.773911 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2206aa45-a065-4965-8540-a2ba7c707155-config-data\") pod \"glance-default-internal-api-0\" (UID: \"2206aa45-a065-4965-8540-a2ba7c707155\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:40:42 crc kubenswrapper[4558]: I0120 17:40:42.773989 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2206aa45-a065-4965-8540-a2ba7c707155-logs\") pod \"glance-default-internal-api-0\" (UID: \"2206aa45-a065-4965-8540-a2ba7c707155\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:40:42 crc kubenswrapper[4558]: I0120 17:40:42.774176 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"2206aa45-a065-4965-8540-a2ba7c707155\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:40:42 crc kubenswrapper[4558]: I0120 17:40:42.774214 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/2206aa45-a065-4965-8540-a2ba7c707155-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"2206aa45-a065-4965-8540-a2ba7c707155\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:40:42 crc kubenswrapper[4558]: I0120 17:40:42.774378 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2206aa45-a065-4965-8540-a2ba7c707155-scripts\") pod \"glance-default-internal-api-0\" (UID: \"2206aa45-a065-4965-8540-a2ba7c707155\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:40:42 crc kubenswrapper[4558]: I0120 17:40:42.774441 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2206aa45-a065-4965-8540-a2ba7c707155-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"2206aa45-a065-4965-8540-a2ba7c707155\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:40:42 crc kubenswrapper[4558]: I0120 17:40:42.774594 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2206aa45-a065-4965-8540-a2ba7c707155-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"2206aa45-a065-4965-8540-a2ba7c707155\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:40:42 crc kubenswrapper[4558]: I0120 17:40:42.878805 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2206aa45-a065-4965-8540-a2ba7c707155-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"2206aa45-a065-4965-8540-a2ba7c707155\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:40:42 crc kubenswrapper[4558]: I0120 17:40:42.878904 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2206aa45-a065-4965-8540-a2ba7c707155-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"2206aa45-a065-4965-8540-a2ba7c707155\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:40:42 crc kubenswrapper[4558]: I0120 17:40:42.878983 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qhcb7\" (UniqueName: \"kubernetes.io/projected/2206aa45-a065-4965-8540-a2ba7c707155-kube-api-access-qhcb7\") pod \"glance-default-internal-api-0\" (UID: \"2206aa45-a065-4965-8540-a2ba7c707155\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:40:42 crc kubenswrapper[4558]: I0120 17:40:42.880187 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2206aa45-a065-4965-8540-a2ba7c707155-config-data\") pod \"glance-default-internal-api-0\" (UID: \"2206aa45-a065-4965-8540-a2ba7c707155\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:40:42 crc kubenswrapper[4558]: I0120 17:40:42.880263 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2206aa45-a065-4965-8540-a2ba7c707155-logs\") pod \"glance-default-internal-api-0\" (UID: \"2206aa45-a065-4965-8540-a2ba7c707155\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:40:42 crc kubenswrapper[4558]: I0120 17:40:42.880418 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"2206aa45-a065-4965-8540-a2ba7c707155\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:40:42 crc kubenswrapper[4558]: I0120 17:40:42.880435 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/2206aa45-a065-4965-8540-a2ba7c707155-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"2206aa45-a065-4965-8540-a2ba7c707155\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:40:42 crc kubenswrapper[4558]: I0120 17:40:42.880907 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"2206aa45-a065-4965-8540-a2ba7c707155\") device mount path \"/mnt/openstack/pv11\"" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:40:42 crc kubenswrapper[4558]: I0120 17:40:42.881149 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2206aa45-a065-4965-8540-a2ba7c707155-logs\") pod \"glance-default-internal-api-0\" (UID: \"2206aa45-a065-4965-8540-a2ba7c707155\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:40:42 crc kubenswrapper[4558]: I0120 17:40:42.881894 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/2206aa45-a065-4965-8540-a2ba7c707155-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"2206aa45-a065-4965-8540-a2ba7c707155\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:40:42 crc kubenswrapper[4558]: I0120 17:40:42.887055 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2206aa45-a065-4965-8540-a2ba7c707155-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"2206aa45-a065-4965-8540-a2ba7c707155\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:40:42 crc kubenswrapper[4558]: I0120 17:40:42.887510 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2206aa45-a065-4965-8540-a2ba7c707155-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"2206aa45-a065-4965-8540-a2ba7c707155\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:40:42 crc kubenswrapper[4558]: I0120 17:40:42.890575 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2206aa45-a065-4965-8540-a2ba7c707155-scripts\") pod \"glance-default-internal-api-0\" (UID: \"2206aa45-a065-4965-8540-a2ba7c707155\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:40:42 crc kubenswrapper[4558]: I0120 17:40:42.891451 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2206aa45-a065-4965-8540-a2ba7c707155-config-data\") pod \"glance-default-internal-api-0\" (UID: \"2206aa45-a065-4965-8540-a2ba7c707155\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:40:42 crc kubenswrapper[4558]: I0120 17:40:42.896295 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2206aa45-a065-4965-8540-a2ba7c707155-scripts\") pod \"glance-default-internal-api-0\" (UID: \"2206aa45-a065-4965-8540-a2ba7c707155\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:40:42 crc kubenswrapper[4558]: I0120 17:40:42.897486 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qhcb7\" (UniqueName: \"kubernetes.io/projected/2206aa45-a065-4965-8540-a2ba7c707155-kube-api-access-qhcb7\") pod \"glance-default-internal-api-0\" (UID: \"2206aa45-a065-4965-8540-a2ba7c707155\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:40:42 crc kubenswrapper[4558]: I0120 17:40:42.918388 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"2206aa45-a065-4965-8540-a2ba7c707155\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:40:43 crc kubenswrapper[4558]: I0120 17:40:43.022309 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:40:43 crc kubenswrapper[4558]: I0120 17:40:43.121499 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:40:43 crc kubenswrapper[4558]: I0120 17:40:43.203820 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/708f2028-1ec7-47eb-95a2-3c6dde44fd83-combined-ca-bundle\") pod \"708f2028-1ec7-47eb-95a2-3c6dde44fd83\" (UID: \"708f2028-1ec7-47eb-95a2-3c6dde44fd83\") " Jan 20 17:40:43 crc kubenswrapper[4558]: I0120 17:40:43.203903 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/708f2028-1ec7-47eb-95a2-3c6dde44fd83-config-data\") pod \"708f2028-1ec7-47eb-95a2-3c6dde44fd83\" (UID: \"708f2028-1ec7-47eb-95a2-3c6dde44fd83\") " Jan 20 17:40:43 crc kubenswrapper[4558]: I0120 17:40:43.204043 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/708f2028-1ec7-47eb-95a2-3c6dde44fd83-log-httpd\") pod \"708f2028-1ec7-47eb-95a2-3c6dde44fd83\" (UID: \"708f2028-1ec7-47eb-95a2-3c6dde44fd83\") " Jan 20 17:40:43 crc kubenswrapper[4558]: I0120 17:40:43.204216 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gl62m\" (UniqueName: \"kubernetes.io/projected/708f2028-1ec7-47eb-95a2-3c6dde44fd83-kube-api-access-gl62m\") pod \"708f2028-1ec7-47eb-95a2-3c6dde44fd83\" (UID: \"708f2028-1ec7-47eb-95a2-3c6dde44fd83\") " Jan 20 17:40:43 crc kubenswrapper[4558]: I0120 17:40:43.204292 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/708f2028-1ec7-47eb-95a2-3c6dde44fd83-scripts\") pod \"708f2028-1ec7-47eb-95a2-3c6dde44fd83\" (UID: \"708f2028-1ec7-47eb-95a2-3c6dde44fd83\") " Jan 20 17:40:43 crc kubenswrapper[4558]: I0120 17:40:43.204362 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/708f2028-1ec7-47eb-95a2-3c6dde44fd83-sg-core-conf-yaml\") pod \"708f2028-1ec7-47eb-95a2-3c6dde44fd83\" (UID: \"708f2028-1ec7-47eb-95a2-3c6dde44fd83\") " Jan 20 17:40:43 crc kubenswrapper[4558]: I0120 17:40:43.204384 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/708f2028-1ec7-47eb-95a2-3c6dde44fd83-run-httpd\") pod \"708f2028-1ec7-47eb-95a2-3c6dde44fd83\" (UID: \"708f2028-1ec7-47eb-95a2-3c6dde44fd83\") " Jan 20 17:40:43 crc kubenswrapper[4558]: I0120 17:40:43.208492 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/708f2028-1ec7-47eb-95a2-3c6dde44fd83-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "708f2028-1ec7-47eb-95a2-3c6dde44fd83" (UID: "708f2028-1ec7-47eb-95a2-3c6dde44fd83"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:40:43 crc kubenswrapper[4558]: I0120 17:40:43.208801 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/708f2028-1ec7-47eb-95a2-3c6dde44fd83-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "708f2028-1ec7-47eb-95a2-3c6dde44fd83" (UID: "708f2028-1ec7-47eb-95a2-3c6dde44fd83"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:40:43 crc kubenswrapper[4558]: I0120 17:40:43.213112 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/708f2028-1ec7-47eb-95a2-3c6dde44fd83-scripts" (OuterVolumeSpecName: "scripts") pod "708f2028-1ec7-47eb-95a2-3c6dde44fd83" (UID: "708f2028-1ec7-47eb-95a2-3c6dde44fd83"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:40:43 crc kubenswrapper[4558]: I0120 17:40:43.217402 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/708f2028-1ec7-47eb-95a2-3c6dde44fd83-kube-api-access-gl62m" (OuterVolumeSpecName: "kube-api-access-gl62m") pod "708f2028-1ec7-47eb-95a2-3c6dde44fd83" (UID: "708f2028-1ec7-47eb-95a2-3c6dde44fd83"). InnerVolumeSpecName "kube-api-access-gl62m". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:40:43 crc kubenswrapper[4558]: I0120 17:40:43.251826 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/708f2028-1ec7-47eb-95a2-3c6dde44fd83-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "708f2028-1ec7-47eb-95a2-3c6dde44fd83" (UID: "708f2028-1ec7-47eb-95a2-3c6dde44fd83"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:40:43 crc kubenswrapper[4558]: I0120 17:40:43.304141 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/708f2028-1ec7-47eb-95a2-3c6dde44fd83-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "708f2028-1ec7-47eb-95a2-3c6dde44fd83" (UID: "708f2028-1ec7-47eb-95a2-3c6dde44fd83"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:40:43 crc kubenswrapper[4558]: I0120 17:40:43.309895 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/708f2028-1ec7-47eb-95a2-3c6dde44fd83-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:40:43 crc kubenswrapper[4558]: I0120 17:40:43.309921 4558 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/708f2028-1ec7-47eb-95a2-3c6dde44fd83-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:40:43 crc kubenswrapper[4558]: I0120 17:40:43.309934 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gl62m\" (UniqueName: \"kubernetes.io/projected/708f2028-1ec7-47eb-95a2-3c6dde44fd83-kube-api-access-gl62m\") on node \"crc\" DevicePath \"\"" Jan 20 17:40:43 crc kubenswrapper[4558]: I0120 17:40:43.309946 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/708f2028-1ec7-47eb-95a2-3c6dde44fd83-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:40:43 crc kubenswrapper[4558]: I0120 17:40:43.309954 4558 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/708f2028-1ec7-47eb-95a2-3c6dde44fd83-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 20 17:40:43 crc kubenswrapper[4558]: I0120 17:40:43.309964 4558 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/708f2028-1ec7-47eb-95a2-3c6dde44fd83-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:40:43 crc kubenswrapper[4558]: I0120 17:40:43.324280 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/708f2028-1ec7-47eb-95a2-3c6dde44fd83-config-data" (OuterVolumeSpecName: "config-data") pod "708f2028-1ec7-47eb-95a2-3c6dde44fd83" (UID: "708f2028-1ec7-47eb-95a2-3c6dde44fd83"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:40:43 crc kubenswrapper[4558]: I0120 17:40:43.413052 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/708f2028-1ec7-47eb-95a2-3c6dde44fd83-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:40:43 crc kubenswrapper[4558]: I0120 17:40:43.470623 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:40:43 crc kubenswrapper[4558]: I0120 17:40:43.533489 4558 generic.go:334] "Generic (PLEG): container finished" podID="708f2028-1ec7-47eb-95a2-3c6dde44fd83" containerID="7f360199ac4d131117fb2542204b79d49108b24a1b90bac50104ec7ecc3539bf" exitCode=0 Jan 20 17:40:43 crc kubenswrapper[4558]: I0120 17:40:43.533543 4558 generic.go:334] "Generic (PLEG): container finished" podID="708f2028-1ec7-47eb-95a2-3c6dde44fd83" containerID="881f0e2513823cd252247d7fe6133eade39862d80b0e913543572b0aad225157" exitCode=2 Jan 20 17:40:43 crc kubenswrapper[4558]: I0120 17:40:43.533566 4558 generic.go:334] "Generic (PLEG): container finished" podID="708f2028-1ec7-47eb-95a2-3c6dde44fd83" containerID="96bca6e91cb753e1acdd4ab804a541fab6834cae6c7d3cc56a68ca0bdf4bbcf3" exitCode=0 Jan 20 17:40:43 crc kubenswrapper[4558]: I0120 17:40:43.533577 4558 generic.go:334] "Generic (PLEG): container finished" podID="708f2028-1ec7-47eb-95a2-3c6dde44fd83" containerID="ee91b319b67486aa1a08313a60875e479a6774cc5f6c5c2fd4d4bb4515c32da6" exitCode=0 Jan 20 17:40:43 crc kubenswrapper[4558]: I0120 17:40:43.533599 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"708f2028-1ec7-47eb-95a2-3c6dde44fd83","Type":"ContainerDied","Data":"7f360199ac4d131117fb2542204b79d49108b24a1b90bac50104ec7ecc3539bf"} Jan 20 17:40:43 crc kubenswrapper[4558]: I0120 17:40:43.533646 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:40:43 crc kubenswrapper[4558]: I0120 17:40:43.533666 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"708f2028-1ec7-47eb-95a2-3c6dde44fd83","Type":"ContainerDied","Data":"881f0e2513823cd252247d7fe6133eade39862d80b0e913543572b0aad225157"} Jan 20 17:40:43 crc kubenswrapper[4558]: I0120 17:40:43.533685 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"708f2028-1ec7-47eb-95a2-3c6dde44fd83","Type":"ContainerDied","Data":"96bca6e91cb753e1acdd4ab804a541fab6834cae6c7d3cc56a68ca0bdf4bbcf3"} Jan 20 17:40:43 crc kubenswrapper[4558]: I0120 17:40:43.533702 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"708f2028-1ec7-47eb-95a2-3c6dde44fd83","Type":"ContainerDied","Data":"ee91b319b67486aa1a08313a60875e479a6774cc5f6c5c2fd4d4bb4515c32da6"} Jan 20 17:40:43 crc kubenswrapper[4558]: I0120 17:40:43.533716 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"708f2028-1ec7-47eb-95a2-3c6dde44fd83","Type":"ContainerDied","Data":"8c0001781cf469232029c66008c49fa8039fb2f7584d8ca5b9ed02cd768ef256"} Jan 20 17:40:43 crc kubenswrapper[4558]: I0120 17:40:43.533735 4558 scope.go:117] "RemoveContainer" containerID="7f360199ac4d131117fb2542204b79d49108b24a1b90bac50104ec7ecc3539bf" Jan 20 17:40:43 crc kubenswrapper[4558]: I0120 17:40:43.536694 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"552b666d-9d00-469a-9692-45a650c89509","Type":"ContainerStarted","Data":"14506618325c0d6ec0a676afd800224b20b8f064115011601c645fa282561267"} Jan 20 17:40:43 crc kubenswrapper[4558]: I0120 17:40:43.538260 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"2206aa45-a065-4965-8540-a2ba7c707155","Type":"ContainerStarted","Data":"8799b33ed4ac8956f001e56712aa2a3930b6dab88b0275fc9af715ecd90381ae"} Jan 20 17:40:43 crc kubenswrapper[4558]: I0120 17:40:43.572128 4558 scope.go:117] "RemoveContainer" containerID="881f0e2513823cd252247d7fe6133eade39862d80b0e913543572b0aad225157" Jan 20 17:40:43 crc kubenswrapper[4558]: I0120 17:40:43.590243 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:40:43 crc kubenswrapper[4558]: I0120 17:40:43.616020 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:40:43 crc kubenswrapper[4558]: I0120 17:40:43.631520 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:40:43 crc kubenswrapper[4558]: E0120 17:40:43.632000 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="708f2028-1ec7-47eb-95a2-3c6dde44fd83" containerName="ceilometer-notification-agent" Jan 20 17:40:43 crc kubenswrapper[4558]: I0120 17:40:43.632018 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="708f2028-1ec7-47eb-95a2-3c6dde44fd83" containerName="ceilometer-notification-agent" Jan 20 17:40:43 crc kubenswrapper[4558]: E0120 17:40:43.632041 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="708f2028-1ec7-47eb-95a2-3c6dde44fd83" containerName="ceilometer-central-agent" Jan 20 17:40:43 crc kubenswrapper[4558]: I0120 17:40:43.632048 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="708f2028-1ec7-47eb-95a2-3c6dde44fd83" containerName="ceilometer-central-agent" Jan 20 17:40:43 crc kubenswrapper[4558]: E0120 17:40:43.632068 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="708f2028-1ec7-47eb-95a2-3c6dde44fd83" containerName="proxy-httpd" Jan 20 17:40:43 crc kubenswrapper[4558]: I0120 17:40:43.632073 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="708f2028-1ec7-47eb-95a2-3c6dde44fd83" containerName="proxy-httpd" Jan 20 17:40:43 crc kubenswrapper[4558]: E0120 17:40:43.632091 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="708f2028-1ec7-47eb-95a2-3c6dde44fd83" containerName="sg-core" Jan 20 17:40:43 crc kubenswrapper[4558]: I0120 17:40:43.632097 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="708f2028-1ec7-47eb-95a2-3c6dde44fd83" containerName="sg-core" Jan 20 17:40:43 crc kubenswrapper[4558]: I0120 17:40:43.632306 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="708f2028-1ec7-47eb-95a2-3c6dde44fd83" containerName="ceilometer-notification-agent" Jan 20 17:40:43 crc kubenswrapper[4558]: I0120 17:40:43.632321 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="708f2028-1ec7-47eb-95a2-3c6dde44fd83" containerName="ceilometer-central-agent" Jan 20 17:40:43 crc kubenswrapper[4558]: I0120 17:40:43.632336 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="708f2028-1ec7-47eb-95a2-3c6dde44fd83" containerName="sg-core" Jan 20 17:40:43 crc kubenswrapper[4558]: I0120 17:40:43.632353 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="708f2028-1ec7-47eb-95a2-3c6dde44fd83" containerName="proxy-httpd" Jan 20 17:40:43 crc kubenswrapper[4558]: I0120 17:40:43.634158 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:40:43 crc kubenswrapper[4558]: I0120 17:40:43.638591 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-config-data" Jan 20 17:40:43 crc kubenswrapper[4558]: I0120 17:40:43.638949 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-scripts" Jan 20 17:40:43 crc kubenswrapper[4558]: I0120 17:40:43.673849 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:40:43 crc kubenswrapper[4558]: I0120 17:40:43.721348 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/457cc366-8440-4726-a90c-802e898d0f84-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"457cc366-8440-4726-a90c-802e898d0f84\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:40:43 crc kubenswrapper[4558]: I0120 17:40:43.721443 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/457cc366-8440-4726-a90c-802e898d0f84-run-httpd\") pod \"ceilometer-0\" (UID: \"457cc366-8440-4726-a90c-802e898d0f84\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:40:43 crc kubenswrapper[4558]: I0120 17:40:43.721514 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/457cc366-8440-4726-a90c-802e898d0f84-config-data\") pod \"ceilometer-0\" (UID: \"457cc366-8440-4726-a90c-802e898d0f84\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:40:43 crc kubenswrapper[4558]: I0120 17:40:43.721556 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/457cc366-8440-4726-a90c-802e898d0f84-scripts\") pod \"ceilometer-0\" (UID: \"457cc366-8440-4726-a90c-802e898d0f84\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:40:43 crc kubenswrapper[4558]: I0120 17:40:43.721801 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/457cc366-8440-4726-a90c-802e898d0f84-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"457cc366-8440-4726-a90c-802e898d0f84\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:40:43 crc kubenswrapper[4558]: I0120 17:40:43.722075 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/457cc366-8440-4726-a90c-802e898d0f84-log-httpd\") pod \"ceilometer-0\" (UID: \"457cc366-8440-4726-a90c-802e898d0f84\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:40:43 crc kubenswrapper[4558]: I0120 17:40:43.722139 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ck8cl\" (UniqueName: \"kubernetes.io/projected/457cc366-8440-4726-a90c-802e898d0f84-kube-api-access-ck8cl\") pod \"ceilometer-0\" (UID: \"457cc366-8440-4726-a90c-802e898d0f84\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:40:43 crc kubenswrapper[4558]: I0120 17:40:43.730621 4558 scope.go:117] "RemoveContainer" containerID="96bca6e91cb753e1acdd4ab804a541fab6834cae6c7d3cc56a68ca0bdf4bbcf3" Jan 20 17:40:43 crc kubenswrapper[4558]: I0120 17:40:43.754128 4558 scope.go:117] "RemoveContainer" containerID="ee91b319b67486aa1a08313a60875e479a6774cc5f6c5c2fd4d4bb4515c32da6" Jan 20 17:40:43 crc kubenswrapper[4558]: I0120 17:40:43.778838 4558 scope.go:117] "RemoveContainer" containerID="7f360199ac4d131117fb2542204b79d49108b24a1b90bac50104ec7ecc3539bf" Jan 20 17:40:43 crc kubenswrapper[4558]: E0120 17:40:43.779373 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7f360199ac4d131117fb2542204b79d49108b24a1b90bac50104ec7ecc3539bf\": container with ID starting with 7f360199ac4d131117fb2542204b79d49108b24a1b90bac50104ec7ecc3539bf not found: ID does not exist" containerID="7f360199ac4d131117fb2542204b79d49108b24a1b90bac50104ec7ecc3539bf" Jan 20 17:40:43 crc kubenswrapper[4558]: I0120 17:40:43.779418 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7f360199ac4d131117fb2542204b79d49108b24a1b90bac50104ec7ecc3539bf"} err="failed to get container status \"7f360199ac4d131117fb2542204b79d49108b24a1b90bac50104ec7ecc3539bf\": rpc error: code = NotFound desc = could not find container \"7f360199ac4d131117fb2542204b79d49108b24a1b90bac50104ec7ecc3539bf\": container with ID starting with 7f360199ac4d131117fb2542204b79d49108b24a1b90bac50104ec7ecc3539bf not found: ID does not exist" Jan 20 17:40:43 crc kubenswrapper[4558]: I0120 17:40:43.779454 4558 scope.go:117] "RemoveContainer" containerID="881f0e2513823cd252247d7fe6133eade39862d80b0e913543572b0aad225157" Jan 20 17:40:43 crc kubenswrapper[4558]: E0120 17:40:43.779932 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"881f0e2513823cd252247d7fe6133eade39862d80b0e913543572b0aad225157\": container with ID starting with 881f0e2513823cd252247d7fe6133eade39862d80b0e913543572b0aad225157 not found: ID does not exist" containerID="881f0e2513823cd252247d7fe6133eade39862d80b0e913543572b0aad225157" Jan 20 17:40:43 crc kubenswrapper[4558]: I0120 17:40:43.779973 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"881f0e2513823cd252247d7fe6133eade39862d80b0e913543572b0aad225157"} err="failed to get container status \"881f0e2513823cd252247d7fe6133eade39862d80b0e913543572b0aad225157\": rpc error: code = NotFound desc = could not find container \"881f0e2513823cd252247d7fe6133eade39862d80b0e913543572b0aad225157\": container with ID starting with 881f0e2513823cd252247d7fe6133eade39862d80b0e913543572b0aad225157 not found: ID does not exist" Jan 20 17:40:43 crc kubenswrapper[4558]: I0120 17:40:43.780002 4558 scope.go:117] "RemoveContainer" containerID="96bca6e91cb753e1acdd4ab804a541fab6834cae6c7d3cc56a68ca0bdf4bbcf3" Jan 20 17:40:43 crc kubenswrapper[4558]: E0120 17:40:43.780381 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"96bca6e91cb753e1acdd4ab804a541fab6834cae6c7d3cc56a68ca0bdf4bbcf3\": container with ID starting with 96bca6e91cb753e1acdd4ab804a541fab6834cae6c7d3cc56a68ca0bdf4bbcf3 not found: ID does not exist" containerID="96bca6e91cb753e1acdd4ab804a541fab6834cae6c7d3cc56a68ca0bdf4bbcf3" Jan 20 17:40:43 crc kubenswrapper[4558]: I0120 17:40:43.780422 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"96bca6e91cb753e1acdd4ab804a541fab6834cae6c7d3cc56a68ca0bdf4bbcf3"} err="failed to get container status \"96bca6e91cb753e1acdd4ab804a541fab6834cae6c7d3cc56a68ca0bdf4bbcf3\": rpc error: code = NotFound desc = could not find container \"96bca6e91cb753e1acdd4ab804a541fab6834cae6c7d3cc56a68ca0bdf4bbcf3\": container with ID starting with 96bca6e91cb753e1acdd4ab804a541fab6834cae6c7d3cc56a68ca0bdf4bbcf3 not found: ID does not exist" Jan 20 17:40:43 crc kubenswrapper[4558]: I0120 17:40:43.780447 4558 scope.go:117] "RemoveContainer" containerID="ee91b319b67486aa1a08313a60875e479a6774cc5f6c5c2fd4d4bb4515c32da6" Jan 20 17:40:43 crc kubenswrapper[4558]: E0120 17:40:43.782043 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ee91b319b67486aa1a08313a60875e479a6774cc5f6c5c2fd4d4bb4515c32da6\": container with ID starting with ee91b319b67486aa1a08313a60875e479a6774cc5f6c5c2fd4d4bb4515c32da6 not found: ID does not exist" containerID="ee91b319b67486aa1a08313a60875e479a6774cc5f6c5c2fd4d4bb4515c32da6" Jan 20 17:40:43 crc kubenswrapper[4558]: I0120 17:40:43.782074 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ee91b319b67486aa1a08313a60875e479a6774cc5f6c5c2fd4d4bb4515c32da6"} err="failed to get container status \"ee91b319b67486aa1a08313a60875e479a6774cc5f6c5c2fd4d4bb4515c32da6\": rpc error: code = NotFound desc = could not find container \"ee91b319b67486aa1a08313a60875e479a6774cc5f6c5c2fd4d4bb4515c32da6\": container with ID starting with ee91b319b67486aa1a08313a60875e479a6774cc5f6c5c2fd4d4bb4515c32da6 not found: ID does not exist" Jan 20 17:40:43 crc kubenswrapper[4558]: I0120 17:40:43.782094 4558 scope.go:117] "RemoveContainer" containerID="7f360199ac4d131117fb2542204b79d49108b24a1b90bac50104ec7ecc3539bf" Jan 20 17:40:43 crc kubenswrapper[4558]: I0120 17:40:43.783558 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7f360199ac4d131117fb2542204b79d49108b24a1b90bac50104ec7ecc3539bf"} err="failed to get container status \"7f360199ac4d131117fb2542204b79d49108b24a1b90bac50104ec7ecc3539bf\": rpc error: code = NotFound desc = could not find container \"7f360199ac4d131117fb2542204b79d49108b24a1b90bac50104ec7ecc3539bf\": container with ID starting with 7f360199ac4d131117fb2542204b79d49108b24a1b90bac50104ec7ecc3539bf not found: ID does not exist" Jan 20 17:40:43 crc kubenswrapper[4558]: I0120 17:40:43.783593 4558 scope.go:117] "RemoveContainer" containerID="881f0e2513823cd252247d7fe6133eade39862d80b0e913543572b0aad225157" Jan 20 17:40:43 crc kubenswrapper[4558]: I0120 17:40:43.783853 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"881f0e2513823cd252247d7fe6133eade39862d80b0e913543572b0aad225157"} err="failed to get container status \"881f0e2513823cd252247d7fe6133eade39862d80b0e913543572b0aad225157\": rpc error: code = NotFound desc = could not find container \"881f0e2513823cd252247d7fe6133eade39862d80b0e913543572b0aad225157\": container with ID starting with 881f0e2513823cd252247d7fe6133eade39862d80b0e913543572b0aad225157 not found: ID does not exist" Jan 20 17:40:43 crc kubenswrapper[4558]: I0120 17:40:43.783879 4558 scope.go:117] "RemoveContainer" containerID="96bca6e91cb753e1acdd4ab804a541fab6834cae6c7d3cc56a68ca0bdf4bbcf3" Jan 20 17:40:43 crc kubenswrapper[4558]: I0120 17:40:43.786968 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"96bca6e91cb753e1acdd4ab804a541fab6834cae6c7d3cc56a68ca0bdf4bbcf3"} err="failed to get container status \"96bca6e91cb753e1acdd4ab804a541fab6834cae6c7d3cc56a68ca0bdf4bbcf3\": rpc error: code = NotFound desc = could not find container \"96bca6e91cb753e1acdd4ab804a541fab6834cae6c7d3cc56a68ca0bdf4bbcf3\": container with ID starting with 96bca6e91cb753e1acdd4ab804a541fab6834cae6c7d3cc56a68ca0bdf4bbcf3 not found: ID does not exist" Jan 20 17:40:43 crc kubenswrapper[4558]: I0120 17:40:43.787000 4558 scope.go:117] "RemoveContainer" containerID="ee91b319b67486aa1a08313a60875e479a6774cc5f6c5c2fd4d4bb4515c32da6" Jan 20 17:40:43 crc kubenswrapper[4558]: I0120 17:40:43.787432 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ee91b319b67486aa1a08313a60875e479a6774cc5f6c5c2fd4d4bb4515c32da6"} err="failed to get container status \"ee91b319b67486aa1a08313a60875e479a6774cc5f6c5c2fd4d4bb4515c32da6\": rpc error: code = NotFound desc = could not find container \"ee91b319b67486aa1a08313a60875e479a6774cc5f6c5c2fd4d4bb4515c32da6\": container with ID starting with ee91b319b67486aa1a08313a60875e479a6774cc5f6c5c2fd4d4bb4515c32da6 not found: ID does not exist" Jan 20 17:40:43 crc kubenswrapper[4558]: I0120 17:40:43.787462 4558 scope.go:117] "RemoveContainer" containerID="7f360199ac4d131117fb2542204b79d49108b24a1b90bac50104ec7ecc3539bf" Jan 20 17:40:43 crc kubenswrapper[4558]: I0120 17:40:43.790371 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7f360199ac4d131117fb2542204b79d49108b24a1b90bac50104ec7ecc3539bf"} err="failed to get container status \"7f360199ac4d131117fb2542204b79d49108b24a1b90bac50104ec7ecc3539bf\": rpc error: code = NotFound desc = could not find container \"7f360199ac4d131117fb2542204b79d49108b24a1b90bac50104ec7ecc3539bf\": container with ID starting with 7f360199ac4d131117fb2542204b79d49108b24a1b90bac50104ec7ecc3539bf not found: ID does not exist" Jan 20 17:40:43 crc kubenswrapper[4558]: I0120 17:40:43.790399 4558 scope.go:117] "RemoveContainer" containerID="881f0e2513823cd252247d7fe6133eade39862d80b0e913543572b0aad225157" Jan 20 17:40:43 crc kubenswrapper[4558]: I0120 17:40:43.795808 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"881f0e2513823cd252247d7fe6133eade39862d80b0e913543572b0aad225157"} err="failed to get container status \"881f0e2513823cd252247d7fe6133eade39862d80b0e913543572b0aad225157\": rpc error: code = NotFound desc = could not find container \"881f0e2513823cd252247d7fe6133eade39862d80b0e913543572b0aad225157\": container with ID starting with 881f0e2513823cd252247d7fe6133eade39862d80b0e913543572b0aad225157 not found: ID does not exist" Jan 20 17:40:43 crc kubenswrapper[4558]: I0120 17:40:43.795840 4558 scope.go:117] "RemoveContainer" containerID="96bca6e91cb753e1acdd4ab804a541fab6834cae6c7d3cc56a68ca0bdf4bbcf3" Jan 20 17:40:43 crc kubenswrapper[4558]: I0120 17:40:43.796193 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"96bca6e91cb753e1acdd4ab804a541fab6834cae6c7d3cc56a68ca0bdf4bbcf3"} err="failed to get container status \"96bca6e91cb753e1acdd4ab804a541fab6834cae6c7d3cc56a68ca0bdf4bbcf3\": rpc error: code = NotFound desc = could not find container \"96bca6e91cb753e1acdd4ab804a541fab6834cae6c7d3cc56a68ca0bdf4bbcf3\": container with ID starting with 96bca6e91cb753e1acdd4ab804a541fab6834cae6c7d3cc56a68ca0bdf4bbcf3 not found: ID does not exist" Jan 20 17:40:43 crc kubenswrapper[4558]: I0120 17:40:43.796222 4558 scope.go:117] "RemoveContainer" containerID="ee91b319b67486aa1a08313a60875e479a6774cc5f6c5c2fd4d4bb4515c32da6" Jan 20 17:40:43 crc kubenswrapper[4558]: I0120 17:40:43.796590 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ee91b319b67486aa1a08313a60875e479a6774cc5f6c5c2fd4d4bb4515c32da6"} err="failed to get container status \"ee91b319b67486aa1a08313a60875e479a6774cc5f6c5c2fd4d4bb4515c32da6\": rpc error: code = NotFound desc = could not find container \"ee91b319b67486aa1a08313a60875e479a6774cc5f6c5c2fd4d4bb4515c32da6\": container with ID starting with ee91b319b67486aa1a08313a60875e479a6774cc5f6c5c2fd4d4bb4515c32da6 not found: ID does not exist" Jan 20 17:40:43 crc kubenswrapper[4558]: I0120 17:40:43.796619 4558 scope.go:117] "RemoveContainer" containerID="7f360199ac4d131117fb2542204b79d49108b24a1b90bac50104ec7ecc3539bf" Jan 20 17:40:43 crc kubenswrapper[4558]: I0120 17:40:43.796920 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7f360199ac4d131117fb2542204b79d49108b24a1b90bac50104ec7ecc3539bf"} err="failed to get container status \"7f360199ac4d131117fb2542204b79d49108b24a1b90bac50104ec7ecc3539bf\": rpc error: code = NotFound desc = could not find container \"7f360199ac4d131117fb2542204b79d49108b24a1b90bac50104ec7ecc3539bf\": container with ID starting with 7f360199ac4d131117fb2542204b79d49108b24a1b90bac50104ec7ecc3539bf not found: ID does not exist" Jan 20 17:40:43 crc kubenswrapper[4558]: I0120 17:40:43.796960 4558 scope.go:117] "RemoveContainer" containerID="881f0e2513823cd252247d7fe6133eade39862d80b0e913543572b0aad225157" Jan 20 17:40:43 crc kubenswrapper[4558]: I0120 17:40:43.797277 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"881f0e2513823cd252247d7fe6133eade39862d80b0e913543572b0aad225157"} err="failed to get container status \"881f0e2513823cd252247d7fe6133eade39862d80b0e913543572b0aad225157\": rpc error: code = NotFound desc = could not find container \"881f0e2513823cd252247d7fe6133eade39862d80b0e913543572b0aad225157\": container with ID starting with 881f0e2513823cd252247d7fe6133eade39862d80b0e913543572b0aad225157 not found: ID does not exist" Jan 20 17:40:43 crc kubenswrapper[4558]: I0120 17:40:43.797302 4558 scope.go:117] "RemoveContainer" containerID="96bca6e91cb753e1acdd4ab804a541fab6834cae6c7d3cc56a68ca0bdf4bbcf3" Jan 20 17:40:43 crc kubenswrapper[4558]: I0120 17:40:43.797556 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"96bca6e91cb753e1acdd4ab804a541fab6834cae6c7d3cc56a68ca0bdf4bbcf3"} err="failed to get container status \"96bca6e91cb753e1acdd4ab804a541fab6834cae6c7d3cc56a68ca0bdf4bbcf3\": rpc error: code = NotFound desc = could not find container \"96bca6e91cb753e1acdd4ab804a541fab6834cae6c7d3cc56a68ca0bdf4bbcf3\": container with ID starting with 96bca6e91cb753e1acdd4ab804a541fab6834cae6c7d3cc56a68ca0bdf4bbcf3 not found: ID does not exist" Jan 20 17:40:43 crc kubenswrapper[4558]: I0120 17:40:43.797576 4558 scope.go:117] "RemoveContainer" containerID="ee91b319b67486aa1a08313a60875e479a6774cc5f6c5c2fd4d4bb4515c32da6" Jan 20 17:40:43 crc kubenswrapper[4558]: I0120 17:40:43.797867 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ee91b319b67486aa1a08313a60875e479a6774cc5f6c5c2fd4d4bb4515c32da6"} err="failed to get container status \"ee91b319b67486aa1a08313a60875e479a6774cc5f6c5c2fd4d4bb4515c32da6\": rpc error: code = NotFound desc = could not find container \"ee91b319b67486aa1a08313a60875e479a6774cc5f6c5c2fd4d4bb4515c32da6\": container with ID starting with ee91b319b67486aa1a08313a60875e479a6774cc5f6c5c2fd4d4bb4515c32da6 not found: ID does not exist" Jan 20 17:40:43 crc kubenswrapper[4558]: I0120 17:40:43.823573 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/457cc366-8440-4726-a90c-802e898d0f84-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"457cc366-8440-4726-a90c-802e898d0f84\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:40:43 crc kubenswrapper[4558]: I0120 17:40:43.823623 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/457cc366-8440-4726-a90c-802e898d0f84-run-httpd\") pod \"ceilometer-0\" (UID: \"457cc366-8440-4726-a90c-802e898d0f84\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:40:43 crc kubenswrapper[4558]: I0120 17:40:43.823663 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/457cc366-8440-4726-a90c-802e898d0f84-config-data\") pod \"ceilometer-0\" (UID: \"457cc366-8440-4726-a90c-802e898d0f84\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:40:43 crc kubenswrapper[4558]: I0120 17:40:43.823694 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/457cc366-8440-4726-a90c-802e898d0f84-scripts\") pod \"ceilometer-0\" (UID: \"457cc366-8440-4726-a90c-802e898d0f84\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:40:43 crc kubenswrapper[4558]: I0120 17:40:43.823765 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/457cc366-8440-4726-a90c-802e898d0f84-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"457cc366-8440-4726-a90c-802e898d0f84\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:40:43 crc kubenswrapper[4558]: I0120 17:40:43.823892 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/457cc366-8440-4726-a90c-802e898d0f84-log-httpd\") pod \"ceilometer-0\" (UID: \"457cc366-8440-4726-a90c-802e898d0f84\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:40:43 crc kubenswrapper[4558]: I0120 17:40:43.823928 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ck8cl\" (UniqueName: \"kubernetes.io/projected/457cc366-8440-4726-a90c-802e898d0f84-kube-api-access-ck8cl\") pod \"ceilometer-0\" (UID: \"457cc366-8440-4726-a90c-802e898d0f84\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:40:43 crc kubenswrapper[4558]: I0120 17:40:43.824686 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/457cc366-8440-4726-a90c-802e898d0f84-run-httpd\") pod \"ceilometer-0\" (UID: \"457cc366-8440-4726-a90c-802e898d0f84\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:40:43 crc kubenswrapper[4558]: I0120 17:40:43.824729 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/457cc366-8440-4726-a90c-802e898d0f84-log-httpd\") pod \"ceilometer-0\" (UID: \"457cc366-8440-4726-a90c-802e898d0f84\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:40:43 crc kubenswrapper[4558]: I0120 17:40:43.828690 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/457cc366-8440-4726-a90c-802e898d0f84-scripts\") pod \"ceilometer-0\" (UID: \"457cc366-8440-4726-a90c-802e898d0f84\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:40:43 crc kubenswrapper[4558]: I0120 17:40:43.828753 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/457cc366-8440-4726-a90c-802e898d0f84-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"457cc366-8440-4726-a90c-802e898d0f84\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:40:43 crc kubenswrapper[4558]: I0120 17:40:43.828857 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/457cc366-8440-4726-a90c-802e898d0f84-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"457cc366-8440-4726-a90c-802e898d0f84\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:40:43 crc kubenswrapper[4558]: I0120 17:40:43.838194 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/457cc366-8440-4726-a90c-802e898d0f84-config-data\") pod \"ceilometer-0\" (UID: \"457cc366-8440-4726-a90c-802e898d0f84\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:40:43 crc kubenswrapper[4558]: I0120 17:40:43.841086 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ck8cl\" (UniqueName: \"kubernetes.io/projected/457cc366-8440-4726-a90c-802e898d0f84-kube-api-access-ck8cl\") pod \"ceilometer-0\" (UID: \"457cc366-8440-4726-a90c-802e898d0f84\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:40:44 crc kubenswrapper[4558]: I0120 17:40:44.031000 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:40:44 crc kubenswrapper[4558]: I0120 17:40:44.361274 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:40:44 crc kubenswrapper[4558]: I0120 17:40:44.468805 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:40:44 crc kubenswrapper[4558]: I0120 17:40:44.552481 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"552b666d-9d00-469a-9692-45a650c89509","Type":"ContainerStarted","Data":"5afb53a255c36f87bdaa69785aabcf9ff9ac19df018da6ed05b316bca41bf72e"} Jan 20 17:40:44 crc kubenswrapper[4558]: I0120 17:40:44.558246 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"2206aa45-a065-4965-8540-a2ba7c707155","Type":"ContainerStarted","Data":"55844a34ec7ec510b2a22da96f2d4570e9b9f40732ea28ed1bd947615b0e17b3"} Jan 20 17:40:44 crc kubenswrapper[4558]: I0120 17:40:44.558294 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"2206aa45-a065-4965-8540-a2ba7c707155","Type":"ContainerStarted","Data":"5068001b8f2449f80ce36f3e31ba0522c81e3602c230aa8688078f521d43ddc6"} Jan 20 17:40:44 crc kubenswrapper[4558]: I0120 17:40:44.560442 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"457cc366-8440-4726-a90c-802e898d0f84","Type":"ContainerStarted","Data":"8d32ea3ecc33811499b0e4c0e0dd7cc5caf97de37a52eb5958f5444ee0576bf7"} Jan 20 17:40:44 crc kubenswrapper[4558]: I0120 17:40:44.583998 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="708f2028-1ec7-47eb-95a2-3c6dde44fd83" path="/var/lib/kubelet/pods/708f2028-1ec7-47eb-95a2-3c6dde44fd83/volumes" Jan 20 17:40:44 crc kubenswrapper[4558]: I0120 17:40:44.585851 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="92c317c8-cf30-4046-af0b-8f6c36f69000" path="/var/lib/kubelet/pods/92c317c8-cf30-4046-af0b-8f6c36f69000/volumes" Jan 20 17:40:44 crc kubenswrapper[4558]: I0120 17:40:44.590078 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/glance-default-external-api-0" podStartSLOduration=3.590043759 podStartE2EDuration="3.590043759s" podCreationTimestamp="2026-01-20 17:40:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:40:44.570596214 +0000 UTC m=+3538.330934180" watchObservedRunningTime="2026-01-20 17:40:44.590043759 +0000 UTC m=+3538.350381726" Jan 20 17:40:44 crc kubenswrapper[4558]: I0120 17:40:44.607285 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/glance-default-internal-api-0" podStartSLOduration=2.607266723 podStartE2EDuration="2.607266723s" podCreationTimestamp="2026-01-20 17:40:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:40:44.589036937 +0000 UTC m=+3538.349374904" watchObservedRunningTime="2026-01-20 17:40:44.607266723 +0000 UTC m=+3538.367604690" Jan 20 17:40:45 crc kubenswrapper[4558]: I0120 17:40:45.576315 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"457cc366-8440-4726-a90c-802e898d0f84","Type":"ContainerStarted","Data":"d4b0d41811dc6f17f139d4a0f49ae78ef01768feb7955046498d3ea378596a1a"} Jan 20 17:40:46 crc kubenswrapper[4558]: I0120 17:40:46.588042 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"457cc366-8440-4726-a90c-802e898d0f84","Type":"ContainerStarted","Data":"4a801927d2dcbde10e18b933214d32d7d87a4a051dfb55beedea678bc3623803"} Jan 20 17:40:47 crc kubenswrapper[4558]: I0120 17:40:47.599625 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"457cc366-8440-4726-a90c-802e898d0f84","Type":"ContainerStarted","Data":"8a9b68463b99b9617020c5593db8fac279ab4149d91a8414ac8e1be6453c802d"} Jan 20 17:40:48 crc kubenswrapper[4558]: I0120 17:40:48.609864 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"457cc366-8440-4726-a90c-802e898d0f84","Type":"ContainerStarted","Data":"b5233e268751b0797a5b84478b3d5c10f6691e979482bf322a47d667545c4d17"} Jan 20 17:40:48 crc kubenswrapper[4558]: I0120 17:40:48.610265 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:40:48 crc kubenswrapper[4558]: I0120 17:40:48.610044 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="457cc366-8440-4726-a90c-802e898d0f84" containerName="proxy-httpd" containerID="cri-o://b5233e268751b0797a5b84478b3d5c10f6691e979482bf322a47d667545c4d17" gracePeriod=30 Jan 20 17:40:48 crc kubenswrapper[4558]: I0120 17:40:48.610007 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="457cc366-8440-4726-a90c-802e898d0f84" containerName="ceilometer-central-agent" containerID="cri-o://d4b0d41811dc6f17f139d4a0f49ae78ef01768feb7955046498d3ea378596a1a" gracePeriod=30 Jan 20 17:40:48 crc kubenswrapper[4558]: I0120 17:40:48.610124 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="457cc366-8440-4726-a90c-802e898d0f84" containerName="ceilometer-notification-agent" containerID="cri-o://4a801927d2dcbde10e18b933214d32d7d87a4a051dfb55beedea678bc3623803" gracePeriod=30 Jan 20 17:40:48 crc kubenswrapper[4558]: I0120 17:40:48.610073 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="457cc366-8440-4726-a90c-802e898d0f84" containerName="sg-core" containerID="cri-o://8a9b68463b99b9617020c5593db8fac279ab4149d91a8414ac8e1be6453c802d" gracePeriod=30 Jan 20 17:40:48 crc kubenswrapper[4558]: I0120 17:40:48.612414 4558 generic.go:334] "Generic (PLEG): container finished" podID="dc54928b-9e6c-45f3-b991-2c58d2a7592d" containerID="034d451a2e550aee8b855a4ea6bfe4909c52aa2e4a24f9e676477858cba19916" exitCode=0 Jan 20 17:40:48 crc kubenswrapper[4558]: I0120 17:40:48.612449 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-5lxgd" event={"ID":"dc54928b-9e6c-45f3-b991-2c58d2a7592d","Type":"ContainerDied","Data":"034d451a2e550aee8b855a4ea6bfe4909c52aa2e4a24f9e676477858cba19916"} Jan 20 17:40:48 crc kubenswrapper[4558]: I0120 17:40:48.631953 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ceilometer-0" podStartSLOduration=2.107383581 podStartE2EDuration="5.631936756s" podCreationTimestamp="2026-01-20 17:40:43 +0000 UTC" firstStartedPulling="2026-01-20 17:40:44.476387439 +0000 UTC m=+3538.236725405" lastFinishedPulling="2026-01-20 17:40:48.000940603 +0000 UTC m=+3541.761278580" observedRunningTime="2026-01-20 17:40:48.625720919 +0000 UTC m=+3542.386058886" watchObservedRunningTime="2026-01-20 17:40:48.631936756 +0000 UTC m=+3542.392274724" Jan 20 17:40:49 crc kubenswrapper[4558]: I0120 17:40:49.286851 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:40:49 crc kubenswrapper[4558]: I0120 17:40:49.346046 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/457cc366-8440-4726-a90c-802e898d0f84-sg-core-conf-yaml\") pod \"457cc366-8440-4726-a90c-802e898d0f84\" (UID: \"457cc366-8440-4726-a90c-802e898d0f84\") " Jan 20 17:40:49 crc kubenswrapper[4558]: I0120 17:40:49.346111 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/457cc366-8440-4726-a90c-802e898d0f84-config-data\") pod \"457cc366-8440-4726-a90c-802e898d0f84\" (UID: \"457cc366-8440-4726-a90c-802e898d0f84\") " Jan 20 17:40:49 crc kubenswrapper[4558]: I0120 17:40:49.346274 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ck8cl\" (UniqueName: \"kubernetes.io/projected/457cc366-8440-4726-a90c-802e898d0f84-kube-api-access-ck8cl\") pod \"457cc366-8440-4726-a90c-802e898d0f84\" (UID: \"457cc366-8440-4726-a90c-802e898d0f84\") " Jan 20 17:40:49 crc kubenswrapper[4558]: I0120 17:40:49.346313 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/457cc366-8440-4726-a90c-802e898d0f84-log-httpd\") pod \"457cc366-8440-4726-a90c-802e898d0f84\" (UID: \"457cc366-8440-4726-a90c-802e898d0f84\") " Jan 20 17:40:49 crc kubenswrapper[4558]: I0120 17:40:49.347037 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/457cc366-8440-4726-a90c-802e898d0f84-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "457cc366-8440-4726-a90c-802e898d0f84" (UID: "457cc366-8440-4726-a90c-802e898d0f84"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:40:49 crc kubenswrapper[4558]: I0120 17:40:49.347269 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/457cc366-8440-4726-a90c-802e898d0f84-scripts\") pod \"457cc366-8440-4726-a90c-802e898d0f84\" (UID: \"457cc366-8440-4726-a90c-802e898d0f84\") " Jan 20 17:40:49 crc kubenswrapper[4558]: I0120 17:40:49.347700 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/457cc366-8440-4726-a90c-802e898d0f84-combined-ca-bundle\") pod \"457cc366-8440-4726-a90c-802e898d0f84\" (UID: \"457cc366-8440-4726-a90c-802e898d0f84\") " Jan 20 17:40:49 crc kubenswrapper[4558]: I0120 17:40:49.347792 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/457cc366-8440-4726-a90c-802e898d0f84-run-httpd\") pod \"457cc366-8440-4726-a90c-802e898d0f84\" (UID: \"457cc366-8440-4726-a90c-802e898d0f84\") " Jan 20 17:40:49 crc kubenswrapper[4558]: I0120 17:40:49.348221 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/457cc366-8440-4726-a90c-802e898d0f84-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "457cc366-8440-4726-a90c-802e898d0f84" (UID: "457cc366-8440-4726-a90c-802e898d0f84"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:40:49 crc kubenswrapper[4558]: I0120 17:40:49.348538 4558 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/457cc366-8440-4726-a90c-802e898d0f84-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:40:49 crc kubenswrapper[4558]: I0120 17:40:49.348566 4558 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/457cc366-8440-4726-a90c-802e898d0f84-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:40:49 crc kubenswrapper[4558]: I0120 17:40:49.357241 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/457cc366-8440-4726-a90c-802e898d0f84-kube-api-access-ck8cl" (OuterVolumeSpecName: "kube-api-access-ck8cl") pod "457cc366-8440-4726-a90c-802e898d0f84" (UID: "457cc366-8440-4726-a90c-802e898d0f84"). InnerVolumeSpecName "kube-api-access-ck8cl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:40:49 crc kubenswrapper[4558]: I0120 17:40:49.362684 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/457cc366-8440-4726-a90c-802e898d0f84-scripts" (OuterVolumeSpecName: "scripts") pod "457cc366-8440-4726-a90c-802e898d0f84" (UID: "457cc366-8440-4726-a90c-802e898d0f84"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:40:49 crc kubenswrapper[4558]: I0120 17:40:49.372733 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/457cc366-8440-4726-a90c-802e898d0f84-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "457cc366-8440-4726-a90c-802e898d0f84" (UID: "457cc366-8440-4726-a90c-802e898d0f84"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:40:49 crc kubenswrapper[4558]: I0120 17:40:49.407924 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/457cc366-8440-4726-a90c-802e898d0f84-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "457cc366-8440-4726-a90c-802e898d0f84" (UID: "457cc366-8440-4726-a90c-802e898d0f84"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:40:49 crc kubenswrapper[4558]: I0120 17:40:49.418645 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/457cc366-8440-4726-a90c-802e898d0f84-config-data" (OuterVolumeSpecName: "config-data") pod "457cc366-8440-4726-a90c-802e898d0f84" (UID: "457cc366-8440-4726-a90c-802e898d0f84"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:40:49 crc kubenswrapper[4558]: I0120 17:40:49.450861 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/457cc366-8440-4726-a90c-802e898d0f84-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:40:49 crc kubenswrapper[4558]: I0120 17:40:49.450902 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/457cc366-8440-4726-a90c-802e898d0f84-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:40:49 crc kubenswrapper[4558]: I0120 17:40:49.450915 4558 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/457cc366-8440-4726-a90c-802e898d0f84-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 20 17:40:49 crc kubenswrapper[4558]: I0120 17:40:49.450926 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/457cc366-8440-4726-a90c-802e898d0f84-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:40:49 crc kubenswrapper[4558]: I0120 17:40:49.450940 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ck8cl\" (UniqueName: \"kubernetes.io/projected/457cc366-8440-4726-a90c-802e898d0f84-kube-api-access-ck8cl\") on node \"crc\" DevicePath \"\"" Jan 20 17:40:49 crc kubenswrapper[4558]: I0120 17:40:49.626912 4558 generic.go:334] "Generic (PLEG): container finished" podID="457cc366-8440-4726-a90c-802e898d0f84" containerID="b5233e268751b0797a5b84478b3d5c10f6691e979482bf322a47d667545c4d17" exitCode=0 Jan 20 17:40:49 crc kubenswrapper[4558]: I0120 17:40:49.626959 4558 generic.go:334] "Generic (PLEG): container finished" podID="457cc366-8440-4726-a90c-802e898d0f84" containerID="8a9b68463b99b9617020c5593db8fac279ab4149d91a8414ac8e1be6453c802d" exitCode=2 Jan 20 17:40:49 crc kubenswrapper[4558]: I0120 17:40:49.626969 4558 generic.go:334] "Generic (PLEG): container finished" podID="457cc366-8440-4726-a90c-802e898d0f84" containerID="4a801927d2dcbde10e18b933214d32d7d87a4a051dfb55beedea678bc3623803" exitCode=0 Jan 20 17:40:49 crc kubenswrapper[4558]: I0120 17:40:49.626979 4558 generic.go:334] "Generic (PLEG): container finished" podID="457cc366-8440-4726-a90c-802e898d0f84" containerID="d4b0d41811dc6f17f139d4a0f49ae78ef01768feb7955046498d3ea378596a1a" exitCode=0 Jan 20 17:40:49 crc kubenswrapper[4558]: I0120 17:40:49.627021 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:40:49 crc kubenswrapper[4558]: I0120 17:40:49.626956 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"457cc366-8440-4726-a90c-802e898d0f84","Type":"ContainerDied","Data":"b5233e268751b0797a5b84478b3d5c10f6691e979482bf322a47d667545c4d17"} Jan 20 17:40:49 crc kubenswrapper[4558]: I0120 17:40:49.627090 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"457cc366-8440-4726-a90c-802e898d0f84","Type":"ContainerDied","Data":"8a9b68463b99b9617020c5593db8fac279ab4149d91a8414ac8e1be6453c802d"} Jan 20 17:40:49 crc kubenswrapper[4558]: I0120 17:40:49.627105 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"457cc366-8440-4726-a90c-802e898d0f84","Type":"ContainerDied","Data":"4a801927d2dcbde10e18b933214d32d7d87a4a051dfb55beedea678bc3623803"} Jan 20 17:40:49 crc kubenswrapper[4558]: I0120 17:40:49.627119 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"457cc366-8440-4726-a90c-802e898d0f84","Type":"ContainerDied","Data":"d4b0d41811dc6f17f139d4a0f49ae78ef01768feb7955046498d3ea378596a1a"} Jan 20 17:40:49 crc kubenswrapper[4558]: I0120 17:40:49.627133 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"457cc366-8440-4726-a90c-802e898d0f84","Type":"ContainerDied","Data":"8d32ea3ecc33811499b0e4c0e0dd7cc5caf97de37a52eb5958f5444ee0576bf7"} Jan 20 17:40:49 crc kubenswrapper[4558]: I0120 17:40:49.627151 4558 scope.go:117] "RemoveContainer" containerID="b5233e268751b0797a5b84478b3d5c10f6691e979482bf322a47d667545c4d17" Jan 20 17:40:49 crc kubenswrapper[4558]: I0120 17:40:49.652429 4558 scope.go:117] "RemoveContainer" containerID="8a9b68463b99b9617020c5593db8fac279ab4149d91a8414ac8e1be6453c802d" Jan 20 17:40:49 crc kubenswrapper[4558]: I0120 17:40:49.663727 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:40:49 crc kubenswrapper[4558]: I0120 17:40:49.671220 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:40:49 crc kubenswrapper[4558]: I0120 17:40:49.677217 4558 scope.go:117] "RemoveContainer" containerID="4a801927d2dcbde10e18b933214d32d7d87a4a051dfb55beedea678bc3623803" Jan 20 17:40:49 crc kubenswrapper[4558]: I0120 17:40:49.679507 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:40:49 crc kubenswrapper[4558]: E0120 17:40:49.679853 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="457cc366-8440-4726-a90c-802e898d0f84" containerName="ceilometer-notification-agent" Jan 20 17:40:49 crc kubenswrapper[4558]: I0120 17:40:49.679867 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="457cc366-8440-4726-a90c-802e898d0f84" containerName="ceilometer-notification-agent" Jan 20 17:40:49 crc kubenswrapper[4558]: E0120 17:40:49.679883 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="457cc366-8440-4726-a90c-802e898d0f84" containerName="sg-core" Jan 20 17:40:49 crc kubenswrapper[4558]: I0120 17:40:49.679902 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="457cc366-8440-4726-a90c-802e898d0f84" containerName="sg-core" Jan 20 17:40:49 crc kubenswrapper[4558]: E0120 17:40:49.679915 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="457cc366-8440-4726-a90c-802e898d0f84" containerName="proxy-httpd" Jan 20 17:40:49 crc kubenswrapper[4558]: I0120 17:40:49.679921 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="457cc366-8440-4726-a90c-802e898d0f84" containerName="proxy-httpd" Jan 20 17:40:49 crc kubenswrapper[4558]: E0120 17:40:49.679948 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="457cc366-8440-4726-a90c-802e898d0f84" containerName="ceilometer-central-agent" Jan 20 17:40:49 crc kubenswrapper[4558]: I0120 17:40:49.679954 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="457cc366-8440-4726-a90c-802e898d0f84" containerName="ceilometer-central-agent" Jan 20 17:40:49 crc kubenswrapper[4558]: I0120 17:40:49.680108 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="457cc366-8440-4726-a90c-802e898d0f84" containerName="ceilometer-central-agent" Jan 20 17:40:49 crc kubenswrapper[4558]: I0120 17:40:49.680135 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="457cc366-8440-4726-a90c-802e898d0f84" containerName="proxy-httpd" Jan 20 17:40:49 crc kubenswrapper[4558]: I0120 17:40:49.680154 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="457cc366-8440-4726-a90c-802e898d0f84" containerName="sg-core" Jan 20 17:40:49 crc kubenswrapper[4558]: I0120 17:40:49.681405 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="457cc366-8440-4726-a90c-802e898d0f84" containerName="ceilometer-notification-agent" Jan 20 17:40:49 crc kubenswrapper[4558]: I0120 17:40:49.683026 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:40:49 crc kubenswrapper[4558]: I0120 17:40:49.684969 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-scripts" Jan 20 17:40:49 crc kubenswrapper[4558]: I0120 17:40:49.685239 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-config-data" Jan 20 17:40:49 crc kubenswrapper[4558]: I0120 17:40:49.695547 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:40:49 crc kubenswrapper[4558]: I0120 17:40:49.718040 4558 scope.go:117] "RemoveContainer" containerID="d4b0d41811dc6f17f139d4a0f49ae78ef01768feb7955046498d3ea378596a1a" Jan 20 17:40:49 crc kubenswrapper[4558]: I0120 17:40:49.737733 4558 scope.go:117] "RemoveContainer" containerID="b5233e268751b0797a5b84478b3d5c10f6691e979482bf322a47d667545c4d17" Jan 20 17:40:49 crc kubenswrapper[4558]: E0120 17:40:49.738260 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b5233e268751b0797a5b84478b3d5c10f6691e979482bf322a47d667545c4d17\": container with ID starting with b5233e268751b0797a5b84478b3d5c10f6691e979482bf322a47d667545c4d17 not found: ID does not exist" containerID="b5233e268751b0797a5b84478b3d5c10f6691e979482bf322a47d667545c4d17" Jan 20 17:40:49 crc kubenswrapper[4558]: I0120 17:40:49.738294 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b5233e268751b0797a5b84478b3d5c10f6691e979482bf322a47d667545c4d17"} err="failed to get container status \"b5233e268751b0797a5b84478b3d5c10f6691e979482bf322a47d667545c4d17\": rpc error: code = NotFound desc = could not find container \"b5233e268751b0797a5b84478b3d5c10f6691e979482bf322a47d667545c4d17\": container with ID starting with b5233e268751b0797a5b84478b3d5c10f6691e979482bf322a47d667545c4d17 not found: ID does not exist" Jan 20 17:40:49 crc kubenswrapper[4558]: I0120 17:40:49.738318 4558 scope.go:117] "RemoveContainer" containerID="8a9b68463b99b9617020c5593db8fac279ab4149d91a8414ac8e1be6453c802d" Jan 20 17:40:49 crc kubenswrapper[4558]: E0120 17:40:49.738602 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8a9b68463b99b9617020c5593db8fac279ab4149d91a8414ac8e1be6453c802d\": container with ID starting with 8a9b68463b99b9617020c5593db8fac279ab4149d91a8414ac8e1be6453c802d not found: ID does not exist" containerID="8a9b68463b99b9617020c5593db8fac279ab4149d91a8414ac8e1be6453c802d" Jan 20 17:40:49 crc kubenswrapper[4558]: I0120 17:40:49.738654 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8a9b68463b99b9617020c5593db8fac279ab4149d91a8414ac8e1be6453c802d"} err="failed to get container status \"8a9b68463b99b9617020c5593db8fac279ab4149d91a8414ac8e1be6453c802d\": rpc error: code = NotFound desc = could not find container \"8a9b68463b99b9617020c5593db8fac279ab4149d91a8414ac8e1be6453c802d\": container with ID starting with 8a9b68463b99b9617020c5593db8fac279ab4149d91a8414ac8e1be6453c802d not found: ID does not exist" Jan 20 17:40:49 crc kubenswrapper[4558]: I0120 17:40:49.738685 4558 scope.go:117] "RemoveContainer" containerID="4a801927d2dcbde10e18b933214d32d7d87a4a051dfb55beedea678bc3623803" Jan 20 17:40:49 crc kubenswrapper[4558]: E0120 17:40:49.739121 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4a801927d2dcbde10e18b933214d32d7d87a4a051dfb55beedea678bc3623803\": container with ID starting with 4a801927d2dcbde10e18b933214d32d7d87a4a051dfb55beedea678bc3623803 not found: ID does not exist" containerID="4a801927d2dcbde10e18b933214d32d7d87a4a051dfb55beedea678bc3623803" Jan 20 17:40:49 crc kubenswrapper[4558]: I0120 17:40:49.739154 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4a801927d2dcbde10e18b933214d32d7d87a4a051dfb55beedea678bc3623803"} err="failed to get container status \"4a801927d2dcbde10e18b933214d32d7d87a4a051dfb55beedea678bc3623803\": rpc error: code = NotFound desc = could not find container \"4a801927d2dcbde10e18b933214d32d7d87a4a051dfb55beedea678bc3623803\": container with ID starting with 4a801927d2dcbde10e18b933214d32d7d87a4a051dfb55beedea678bc3623803 not found: ID does not exist" Jan 20 17:40:49 crc kubenswrapper[4558]: I0120 17:40:49.739186 4558 scope.go:117] "RemoveContainer" containerID="d4b0d41811dc6f17f139d4a0f49ae78ef01768feb7955046498d3ea378596a1a" Jan 20 17:40:49 crc kubenswrapper[4558]: E0120 17:40:49.739489 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d4b0d41811dc6f17f139d4a0f49ae78ef01768feb7955046498d3ea378596a1a\": container with ID starting with d4b0d41811dc6f17f139d4a0f49ae78ef01768feb7955046498d3ea378596a1a not found: ID does not exist" containerID="d4b0d41811dc6f17f139d4a0f49ae78ef01768feb7955046498d3ea378596a1a" Jan 20 17:40:49 crc kubenswrapper[4558]: I0120 17:40:49.739510 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d4b0d41811dc6f17f139d4a0f49ae78ef01768feb7955046498d3ea378596a1a"} err="failed to get container status \"d4b0d41811dc6f17f139d4a0f49ae78ef01768feb7955046498d3ea378596a1a\": rpc error: code = NotFound desc = could not find container \"d4b0d41811dc6f17f139d4a0f49ae78ef01768feb7955046498d3ea378596a1a\": container with ID starting with d4b0d41811dc6f17f139d4a0f49ae78ef01768feb7955046498d3ea378596a1a not found: ID does not exist" Jan 20 17:40:49 crc kubenswrapper[4558]: I0120 17:40:49.739526 4558 scope.go:117] "RemoveContainer" containerID="b5233e268751b0797a5b84478b3d5c10f6691e979482bf322a47d667545c4d17" Jan 20 17:40:49 crc kubenswrapper[4558]: I0120 17:40:49.739751 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b5233e268751b0797a5b84478b3d5c10f6691e979482bf322a47d667545c4d17"} err="failed to get container status \"b5233e268751b0797a5b84478b3d5c10f6691e979482bf322a47d667545c4d17\": rpc error: code = NotFound desc = could not find container \"b5233e268751b0797a5b84478b3d5c10f6691e979482bf322a47d667545c4d17\": container with ID starting with b5233e268751b0797a5b84478b3d5c10f6691e979482bf322a47d667545c4d17 not found: ID does not exist" Jan 20 17:40:49 crc kubenswrapper[4558]: I0120 17:40:49.739784 4558 scope.go:117] "RemoveContainer" containerID="8a9b68463b99b9617020c5593db8fac279ab4149d91a8414ac8e1be6453c802d" Jan 20 17:40:49 crc kubenswrapper[4558]: I0120 17:40:49.740157 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8a9b68463b99b9617020c5593db8fac279ab4149d91a8414ac8e1be6453c802d"} err="failed to get container status \"8a9b68463b99b9617020c5593db8fac279ab4149d91a8414ac8e1be6453c802d\": rpc error: code = NotFound desc = could not find container \"8a9b68463b99b9617020c5593db8fac279ab4149d91a8414ac8e1be6453c802d\": container with ID starting with 8a9b68463b99b9617020c5593db8fac279ab4149d91a8414ac8e1be6453c802d not found: ID does not exist" Jan 20 17:40:49 crc kubenswrapper[4558]: I0120 17:40:49.740194 4558 scope.go:117] "RemoveContainer" containerID="4a801927d2dcbde10e18b933214d32d7d87a4a051dfb55beedea678bc3623803" Jan 20 17:40:49 crc kubenswrapper[4558]: I0120 17:40:49.740407 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4a801927d2dcbde10e18b933214d32d7d87a4a051dfb55beedea678bc3623803"} err="failed to get container status \"4a801927d2dcbde10e18b933214d32d7d87a4a051dfb55beedea678bc3623803\": rpc error: code = NotFound desc = could not find container \"4a801927d2dcbde10e18b933214d32d7d87a4a051dfb55beedea678bc3623803\": container with ID starting with 4a801927d2dcbde10e18b933214d32d7d87a4a051dfb55beedea678bc3623803 not found: ID does not exist" Jan 20 17:40:49 crc kubenswrapper[4558]: I0120 17:40:49.740429 4558 scope.go:117] "RemoveContainer" containerID="d4b0d41811dc6f17f139d4a0f49ae78ef01768feb7955046498d3ea378596a1a" Jan 20 17:40:49 crc kubenswrapper[4558]: I0120 17:40:49.740791 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d4b0d41811dc6f17f139d4a0f49ae78ef01768feb7955046498d3ea378596a1a"} err="failed to get container status \"d4b0d41811dc6f17f139d4a0f49ae78ef01768feb7955046498d3ea378596a1a\": rpc error: code = NotFound desc = could not find container \"d4b0d41811dc6f17f139d4a0f49ae78ef01768feb7955046498d3ea378596a1a\": container with ID starting with d4b0d41811dc6f17f139d4a0f49ae78ef01768feb7955046498d3ea378596a1a not found: ID does not exist" Jan 20 17:40:49 crc kubenswrapper[4558]: I0120 17:40:49.740811 4558 scope.go:117] "RemoveContainer" containerID="b5233e268751b0797a5b84478b3d5c10f6691e979482bf322a47d667545c4d17" Jan 20 17:40:49 crc kubenswrapper[4558]: I0120 17:40:49.741084 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b5233e268751b0797a5b84478b3d5c10f6691e979482bf322a47d667545c4d17"} err="failed to get container status \"b5233e268751b0797a5b84478b3d5c10f6691e979482bf322a47d667545c4d17\": rpc error: code = NotFound desc = could not find container \"b5233e268751b0797a5b84478b3d5c10f6691e979482bf322a47d667545c4d17\": container with ID starting with b5233e268751b0797a5b84478b3d5c10f6691e979482bf322a47d667545c4d17 not found: ID does not exist" Jan 20 17:40:49 crc kubenswrapper[4558]: I0120 17:40:49.741110 4558 scope.go:117] "RemoveContainer" containerID="8a9b68463b99b9617020c5593db8fac279ab4149d91a8414ac8e1be6453c802d" Jan 20 17:40:49 crc kubenswrapper[4558]: I0120 17:40:49.741444 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8a9b68463b99b9617020c5593db8fac279ab4149d91a8414ac8e1be6453c802d"} err="failed to get container status \"8a9b68463b99b9617020c5593db8fac279ab4149d91a8414ac8e1be6453c802d\": rpc error: code = NotFound desc = could not find container \"8a9b68463b99b9617020c5593db8fac279ab4149d91a8414ac8e1be6453c802d\": container with ID starting with 8a9b68463b99b9617020c5593db8fac279ab4149d91a8414ac8e1be6453c802d not found: ID does not exist" Jan 20 17:40:49 crc kubenswrapper[4558]: I0120 17:40:49.741465 4558 scope.go:117] "RemoveContainer" containerID="4a801927d2dcbde10e18b933214d32d7d87a4a051dfb55beedea678bc3623803" Jan 20 17:40:49 crc kubenswrapper[4558]: I0120 17:40:49.741773 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4a801927d2dcbde10e18b933214d32d7d87a4a051dfb55beedea678bc3623803"} err="failed to get container status \"4a801927d2dcbde10e18b933214d32d7d87a4a051dfb55beedea678bc3623803\": rpc error: code = NotFound desc = could not find container \"4a801927d2dcbde10e18b933214d32d7d87a4a051dfb55beedea678bc3623803\": container with ID starting with 4a801927d2dcbde10e18b933214d32d7d87a4a051dfb55beedea678bc3623803 not found: ID does not exist" Jan 20 17:40:49 crc kubenswrapper[4558]: I0120 17:40:49.741797 4558 scope.go:117] "RemoveContainer" containerID="d4b0d41811dc6f17f139d4a0f49ae78ef01768feb7955046498d3ea378596a1a" Jan 20 17:40:49 crc kubenswrapper[4558]: I0120 17:40:49.742092 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d4b0d41811dc6f17f139d4a0f49ae78ef01768feb7955046498d3ea378596a1a"} err="failed to get container status \"d4b0d41811dc6f17f139d4a0f49ae78ef01768feb7955046498d3ea378596a1a\": rpc error: code = NotFound desc = could not find container \"d4b0d41811dc6f17f139d4a0f49ae78ef01768feb7955046498d3ea378596a1a\": container with ID starting with d4b0d41811dc6f17f139d4a0f49ae78ef01768feb7955046498d3ea378596a1a not found: ID does not exist" Jan 20 17:40:49 crc kubenswrapper[4558]: I0120 17:40:49.742112 4558 scope.go:117] "RemoveContainer" containerID="b5233e268751b0797a5b84478b3d5c10f6691e979482bf322a47d667545c4d17" Jan 20 17:40:49 crc kubenswrapper[4558]: I0120 17:40:49.742375 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b5233e268751b0797a5b84478b3d5c10f6691e979482bf322a47d667545c4d17"} err="failed to get container status \"b5233e268751b0797a5b84478b3d5c10f6691e979482bf322a47d667545c4d17\": rpc error: code = NotFound desc = could not find container \"b5233e268751b0797a5b84478b3d5c10f6691e979482bf322a47d667545c4d17\": container with ID starting with b5233e268751b0797a5b84478b3d5c10f6691e979482bf322a47d667545c4d17 not found: ID does not exist" Jan 20 17:40:49 crc kubenswrapper[4558]: I0120 17:40:49.742395 4558 scope.go:117] "RemoveContainer" containerID="8a9b68463b99b9617020c5593db8fac279ab4149d91a8414ac8e1be6453c802d" Jan 20 17:40:49 crc kubenswrapper[4558]: I0120 17:40:49.742763 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8a9b68463b99b9617020c5593db8fac279ab4149d91a8414ac8e1be6453c802d"} err="failed to get container status \"8a9b68463b99b9617020c5593db8fac279ab4149d91a8414ac8e1be6453c802d\": rpc error: code = NotFound desc = could not find container \"8a9b68463b99b9617020c5593db8fac279ab4149d91a8414ac8e1be6453c802d\": container with ID starting with 8a9b68463b99b9617020c5593db8fac279ab4149d91a8414ac8e1be6453c802d not found: ID does not exist" Jan 20 17:40:49 crc kubenswrapper[4558]: I0120 17:40:49.742781 4558 scope.go:117] "RemoveContainer" containerID="4a801927d2dcbde10e18b933214d32d7d87a4a051dfb55beedea678bc3623803" Jan 20 17:40:49 crc kubenswrapper[4558]: I0120 17:40:49.743007 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4a801927d2dcbde10e18b933214d32d7d87a4a051dfb55beedea678bc3623803"} err="failed to get container status \"4a801927d2dcbde10e18b933214d32d7d87a4a051dfb55beedea678bc3623803\": rpc error: code = NotFound desc = could not find container \"4a801927d2dcbde10e18b933214d32d7d87a4a051dfb55beedea678bc3623803\": container with ID starting with 4a801927d2dcbde10e18b933214d32d7d87a4a051dfb55beedea678bc3623803 not found: ID does not exist" Jan 20 17:40:49 crc kubenswrapper[4558]: I0120 17:40:49.743025 4558 scope.go:117] "RemoveContainer" containerID="d4b0d41811dc6f17f139d4a0f49ae78ef01768feb7955046498d3ea378596a1a" Jan 20 17:40:49 crc kubenswrapper[4558]: I0120 17:40:49.743304 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d4b0d41811dc6f17f139d4a0f49ae78ef01768feb7955046498d3ea378596a1a"} err="failed to get container status \"d4b0d41811dc6f17f139d4a0f49ae78ef01768feb7955046498d3ea378596a1a\": rpc error: code = NotFound desc = could not find container \"d4b0d41811dc6f17f139d4a0f49ae78ef01768feb7955046498d3ea378596a1a\": container with ID starting with d4b0d41811dc6f17f139d4a0f49ae78ef01768feb7955046498d3ea378596a1a not found: ID does not exist" Jan 20 17:40:49 crc kubenswrapper[4558]: I0120 17:40:49.861047 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-225t7\" (UniqueName: \"kubernetes.io/projected/06ff5e50-9b15-4254-b6a4-8413e47d13be-kube-api-access-225t7\") pod \"ceilometer-0\" (UID: \"06ff5e50-9b15-4254-b6a4-8413e47d13be\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:40:49 crc kubenswrapper[4558]: I0120 17:40:49.861150 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/06ff5e50-9b15-4254-b6a4-8413e47d13be-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"06ff5e50-9b15-4254-b6a4-8413e47d13be\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:40:49 crc kubenswrapper[4558]: I0120 17:40:49.861208 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/06ff5e50-9b15-4254-b6a4-8413e47d13be-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"06ff5e50-9b15-4254-b6a4-8413e47d13be\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:40:49 crc kubenswrapper[4558]: I0120 17:40:49.861276 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/06ff5e50-9b15-4254-b6a4-8413e47d13be-scripts\") pod \"ceilometer-0\" (UID: \"06ff5e50-9b15-4254-b6a4-8413e47d13be\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:40:49 crc kubenswrapper[4558]: I0120 17:40:49.861962 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/06ff5e50-9b15-4254-b6a4-8413e47d13be-run-httpd\") pod \"ceilometer-0\" (UID: \"06ff5e50-9b15-4254-b6a4-8413e47d13be\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:40:49 crc kubenswrapper[4558]: I0120 17:40:49.862216 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/06ff5e50-9b15-4254-b6a4-8413e47d13be-log-httpd\") pod \"ceilometer-0\" (UID: \"06ff5e50-9b15-4254-b6a4-8413e47d13be\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:40:49 crc kubenswrapper[4558]: I0120 17:40:49.862290 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/06ff5e50-9b15-4254-b6a4-8413e47d13be-config-data\") pod \"ceilometer-0\" (UID: \"06ff5e50-9b15-4254-b6a4-8413e47d13be\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:40:49 crc kubenswrapper[4558]: I0120 17:40:49.884191 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-5lxgd" Jan 20 17:40:49 crc kubenswrapper[4558]: I0120 17:40:49.963285 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dc54928b-9e6c-45f3-b991-2c58d2a7592d-scripts\") pod \"dc54928b-9e6c-45f3-b991-2c58d2a7592d\" (UID: \"dc54928b-9e6c-45f3-b991-2c58d2a7592d\") " Jan 20 17:40:49 crc kubenswrapper[4558]: I0120 17:40:49.963335 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc54928b-9e6c-45f3-b991-2c58d2a7592d-combined-ca-bundle\") pod \"dc54928b-9e6c-45f3-b991-2c58d2a7592d\" (UID: \"dc54928b-9e6c-45f3-b991-2c58d2a7592d\") " Jan 20 17:40:49 crc kubenswrapper[4558]: I0120 17:40:49.963579 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dc54928b-9e6c-45f3-b991-2c58d2a7592d-config-data\") pod \"dc54928b-9e6c-45f3-b991-2c58d2a7592d\" (UID: \"dc54928b-9e6c-45f3-b991-2c58d2a7592d\") " Jan 20 17:40:49 crc kubenswrapper[4558]: I0120 17:40:49.963625 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5zptg\" (UniqueName: \"kubernetes.io/projected/dc54928b-9e6c-45f3-b991-2c58d2a7592d-kube-api-access-5zptg\") pod \"dc54928b-9e6c-45f3-b991-2c58d2a7592d\" (UID: \"dc54928b-9e6c-45f3-b991-2c58d2a7592d\") " Jan 20 17:40:49 crc kubenswrapper[4558]: I0120 17:40:49.963943 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/06ff5e50-9b15-4254-b6a4-8413e47d13be-log-httpd\") pod \"ceilometer-0\" (UID: \"06ff5e50-9b15-4254-b6a4-8413e47d13be\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:40:49 crc kubenswrapper[4558]: I0120 17:40:49.963986 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/06ff5e50-9b15-4254-b6a4-8413e47d13be-config-data\") pod \"ceilometer-0\" (UID: \"06ff5e50-9b15-4254-b6a4-8413e47d13be\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:40:49 crc kubenswrapper[4558]: I0120 17:40:49.964057 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-225t7\" (UniqueName: \"kubernetes.io/projected/06ff5e50-9b15-4254-b6a4-8413e47d13be-kube-api-access-225t7\") pod \"ceilometer-0\" (UID: \"06ff5e50-9b15-4254-b6a4-8413e47d13be\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:40:49 crc kubenswrapper[4558]: I0120 17:40:49.964311 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/06ff5e50-9b15-4254-b6a4-8413e47d13be-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"06ff5e50-9b15-4254-b6a4-8413e47d13be\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:40:49 crc kubenswrapper[4558]: I0120 17:40:49.964426 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/06ff5e50-9b15-4254-b6a4-8413e47d13be-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"06ff5e50-9b15-4254-b6a4-8413e47d13be\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:40:49 crc kubenswrapper[4558]: I0120 17:40:49.964474 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/06ff5e50-9b15-4254-b6a4-8413e47d13be-scripts\") pod \"ceilometer-0\" (UID: \"06ff5e50-9b15-4254-b6a4-8413e47d13be\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:40:49 crc kubenswrapper[4558]: I0120 17:40:49.964678 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/06ff5e50-9b15-4254-b6a4-8413e47d13be-run-httpd\") pod \"ceilometer-0\" (UID: \"06ff5e50-9b15-4254-b6a4-8413e47d13be\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:40:49 crc kubenswrapper[4558]: I0120 17:40:49.965299 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/06ff5e50-9b15-4254-b6a4-8413e47d13be-run-httpd\") pod \"ceilometer-0\" (UID: \"06ff5e50-9b15-4254-b6a4-8413e47d13be\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:40:49 crc kubenswrapper[4558]: I0120 17:40:49.965554 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/06ff5e50-9b15-4254-b6a4-8413e47d13be-log-httpd\") pod \"ceilometer-0\" (UID: \"06ff5e50-9b15-4254-b6a4-8413e47d13be\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:40:49 crc kubenswrapper[4558]: I0120 17:40:49.969218 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dc54928b-9e6c-45f3-b991-2c58d2a7592d-scripts" (OuterVolumeSpecName: "scripts") pod "dc54928b-9e6c-45f3-b991-2c58d2a7592d" (UID: "dc54928b-9e6c-45f3-b991-2c58d2a7592d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:40:49 crc kubenswrapper[4558]: I0120 17:40:49.969266 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dc54928b-9e6c-45f3-b991-2c58d2a7592d-kube-api-access-5zptg" (OuterVolumeSpecName: "kube-api-access-5zptg") pod "dc54928b-9e6c-45f3-b991-2c58d2a7592d" (UID: "dc54928b-9e6c-45f3-b991-2c58d2a7592d"). InnerVolumeSpecName "kube-api-access-5zptg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:40:49 crc kubenswrapper[4558]: I0120 17:40:49.970308 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/06ff5e50-9b15-4254-b6a4-8413e47d13be-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"06ff5e50-9b15-4254-b6a4-8413e47d13be\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:40:49 crc kubenswrapper[4558]: I0120 17:40:49.970727 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/06ff5e50-9b15-4254-b6a4-8413e47d13be-config-data\") pod \"ceilometer-0\" (UID: \"06ff5e50-9b15-4254-b6a4-8413e47d13be\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:40:49 crc kubenswrapper[4558]: I0120 17:40:49.971701 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/06ff5e50-9b15-4254-b6a4-8413e47d13be-scripts\") pod \"ceilometer-0\" (UID: \"06ff5e50-9b15-4254-b6a4-8413e47d13be\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:40:49 crc kubenswrapper[4558]: I0120 17:40:49.976487 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/06ff5e50-9b15-4254-b6a4-8413e47d13be-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"06ff5e50-9b15-4254-b6a4-8413e47d13be\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:40:49 crc kubenswrapper[4558]: I0120 17:40:49.979015 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-225t7\" (UniqueName: \"kubernetes.io/projected/06ff5e50-9b15-4254-b6a4-8413e47d13be-kube-api-access-225t7\") pod \"ceilometer-0\" (UID: \"06ff5e50-9b15-4254-b6a4-8413e47d13be\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:40:49 crc kubenswrapper[4558]: I0120 17:40:49.987624 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dc54928b-9e6c-45f3-b991-2c58d2a7592d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "dc54928b-9e6c-45f3-b991-2c58d2a7592d" (UID: "dc54928b-9e6c-45f3-b991-2c58d2a7592d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:40:49 crc kubenswrapper[4558]: I0120 17:40:49.992992 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dc54928b-9e6c-45f3-b991-2c58d2a7592d-config-data" (OuterVolumeSpecName: "config-data") pod "dc54928b-9e6c-45f3-b991-2c58d2a7592d" (UID: "dc54928b-9e6c-45f3-b991-2c58d2a7592d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:40:49 crc kubenswrapper[4558]: I0120 17:40:49.998702 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:40:50 crc kubenswrapper[4558]: I0120 17:40:50.067195 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dc54928b-9e6c-45f3-b991-2c58d2a7592d-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:40:50 crc kubenswrapper[4558]: I0120 17:40:50.067243 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc54928b-9e6c-45f3-b991-2c58d2a7592d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:40:50 crc kubenswrapper[4558]: I0120 17:40:50.067257 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dc54928b-9e6c-45f3-b991-2c58d2a7592d-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:40:50 crc kubenswrapper[4558]: I0120 17:40:50.067270 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5zptg\" (UniqueName: \"kubernetes.io/projected/dc54928b-9e6c-45f3-b991-2c58d2a7592d-kube-api-access-5zptg\") on node \"crc\" DevicePath \"\"" Jan 20 17:40:50 crc kubenswrapper[4558]: I0120 17:40:50.439621 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:40:50 crc kubenswrapper[4558]: W0120 17:40:50.440252 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod06ff5e50_9b15_4254_b6a4_8413e47d13be.slice/crio-a72b1227999924cea94ce938c84c1733dc77adced1b8898813ac62fe4a6ffa5c WatchSource:0}: Error finding container a72b1227999924cea94ce938c84c1733dc77adced1b8898813ac62fe4a6ffa5c: Status 404 returned error can't find the container with id a72b1227999924cea94ce938c84c1733dc77adced1b8898813ac62fe4a6ffa5c Jan 20 17:40:50 crc kubenswrapper[4558]: I0120 17:40:50.598208 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="457cc366-8440-4726-a90c-802e898d0f84" path="/var/lib/kubelet/pods/457cc366-8440-4726-a90c-802e898d0f84/volumes" Jan 20 17:40:50 crc kubenswrapper[4558]: I0120 17:40:50.636202 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"06ff5e50-9b15-4254-b6a4-8413e47d13be","Type":"ContainerStarted","Data":"a72b1227999924cea94ce938c84c1733dc77adced1b8898813ac62fe4a6ffa5c"} Jan 20 17:40:50 crc kubenswrapper[4558]: I0120 17:40:50.637698 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-5lxgd" event={"ID":"dc54928b-9e6c-45f3-b991-2c58d2a7592d","Type":"ContainerDied","Data":"a621472962912ada52cbe316012f3187b15fab8ccdcfcd704463c38fb3b24aa1"} Jan 20 17:40:50 crc kubenswrapper[4558]: I0120 17:40:50.637759 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a621472962912ada52cbe316012f3187b15fab8ccdcfcd704463c38fb3b24aa1" Jan 20 17:40:50 crc kubenswrapper[4558]: I0120 17:40:50.637726 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-5lxgd" Jan 20 17:40:50 crc kubenswrapper[4558]: I0120 17:40:50.706561 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-0"] Jan 20 17:40:50 crc kubenswrapper[4558]: E0120 17:40:50.706962 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc54928b-9e6c-45f3-b991-2c58d2a7592d" containerName="nova-cell0-conductor-db-sync" Jan 20 17:40:50 crc kubenswrapper[4558]: I0120 17:40:50.706982 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc54928b-9e6c-45f3-b991-2c58d2a7592d" containerName="nova-cell0-conductor-db-sync" Jan 20 17:40:50 crc kubenswrapper[4558]: I0120 17:40:50.707187 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="dc54928b-9e6c-45f3-b991-2c58d2a7592d" containerName="nova-cell0-conductor-db-sync" Jan 20 17:40:50 crc kubenswrapper[4558]: I0120 17:40:50.707809 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:40:50 crc kubenswrapper[4558]: I0120 17:40:50.709950 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-nova-dockercfg-6hzq4" Jan 20 17:40:50 crc kubenswrapper[4558]: I0120 17:40:50.710783 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell0-conductor-config-data" Jan 20 17:40:50 crc kubenswrapper[4558]: I0120 17:40:50.730698 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-0"] Jan 20 17:40:50 crc kubenswrapper[4558]: I0120 17:40:50.790682 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/365754d1-535b-450b-a80e-1e7402cb28f8-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"365754d1-535b-450b-a80e-1e7402cb28f8\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:40:50 crc kubenswrapper[4558]: I0120 17:40:50.790757 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qnvrs\" (UniqueName: \"kubernetes.io/projected/365754d1-535b-450b-a80e-1e7402cb28f8-kube-api-access-qnvrs\") pod \"nova-cell0-conductor-0\" (UID: \"365754d1-535b-450b-a80e-1e7402cb28f8\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:40:50 crc kubenswrapper[4558]: I0120 17:40:50.790799 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/365754d1-535b-450b-a80e-1e7402cb28f8-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"365754d1-535b-450b-a80e-1e7402cb28f8\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:40:50 crc kubenswrapper[4558]: I0120 17:40:50.893095 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/365754d1-535b-450b-a80e-1e7402cb28f8-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"365754d1-535b-450b-a80e-1e7402cb28f8\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:40:50 crc kubenswrapper[4558]: I0120 17:40:50.893205 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qnvrs\" (UniqueName: \"kubernetes.io/projected/365754d1-535b-450b-a80e-1e7402cb28f8-kube-api-access-qnvrs\") pod \"nova-cell0-conductor-0\" (UID: \"365754d1-535b-450b-a80e-1e7402cb28f8\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:40:50 crc kubenswrapper[4558]: I0120 17:40:50.893239 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/365754d1-535b-450b-a80e-1e7402cb28f8-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"365754d1-535b-450b-a80e-1e7402cb28f8\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:40:50 crc kubenswrapper[4558]: I0120 17:40:50.897530 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/365754d1-535b-450b-a80e-1e7402cb28f8-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"365754d1-535b-450b-a80e-1e7402cb28f8\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:40:50 crc kubenswrapper[4558]: I0120 17:40:50.897691 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/365754d1-535b-450b-a80e-1e7402cb28f8-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"365754d1-535b-450b-a80e-1e7402cb28f8\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:40:50 crc kubenswrapper[4558]: I0120 17:40:50.907557 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qnvrs\" (UniqueName: \"kubernetes.io/projected/365754d1-535b-450b-a80e-1e7402cb28f8-kube-api-access-qnvrs\") pod \"nova-cell0-conductor-0\" (UID: \"365754d1-535b-450b-a80e-1e7402cb28f8\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:40:50 crc kubenswrapper[4558]: I0120 17:40:50.957536 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:40:51 crc kubenswrapper[4558]: I0120 17:40:51.047303 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:40:51 crc kubenswrapper[4558]: I0120 17:40:51.532909 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-0"] Jan 20 17:40:51 crc kubenswrapper[4558]: I0120 17:40:51.655181 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"06ff5e50-9b15-4254-b6a4-8413e47d13be","Type":"ContainerStarted","Data":"6892ceb2647c2ad78634f3b4f9e699894b2fcc258b2d61e8a849317030270554"} Jan 20 17:40:51 crc kubenswrapper[4558]: I0120 17:40:51.656877 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-0" event={"ID":"365754d1-535b-450b-a80e-1e7402cb28f8","Type":"ContainerStarted","Data":"01bba0b887c820027e9ec4400f080e9781ca31ba90a7eca0f8976aa0d03b2d78"} Jan 20 17:40:51 crc kubenswrapper[4558]: I0120 17:40:51.819061 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:40:51 crc kubenswrapper[4558]: I0120 17:40:51.819386 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:40:51 crc kubenswrapper[4558]: I0120 17:40:51.847478 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:40:51 crc kubenswrapper[4558]: I0120 17:40:51.860818 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:40:52 crc kubenswrapper[4558]: I0120 17:40:52.667560 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"06ff5e50-9b15-4254-b6a4-8413e47d13be","Type":"ContainerStarted","Data":"4a299d3a03e68f9af7e70aeb4d630699cd30789a8d0fc310e1cb1720f2d75490"} Jan 20 17:40:52 crc kubenswrapper[4558]: I0120 17:40:52.668596 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"06ff5e50-9b15-4254-b6a4-8413e47d13be","Type":"ContainerStarted","Data":"28f185786c0598eeb790435cd9687bbc69b4417bce8f98e235cb346b871b2014"} Jan 20 17:40:52 crc kubenswrapper[4558]: I0120 17:40:52.673088 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-0" event={"ID":"365754d1-535b-450b-a80e-1e7402cb28f8","Type":"ContainerStarted","Data":"9c38558131507676fb881bd62fab6cc179a513c6227f85a615c9705711240cbc"} Jan 20 17:40:52 crc kubenswrapper[4558]: I0120 17:40:52.673385 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:40:52 crc kubenswrapper[4558]: I0120 17:40:52.673456 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:40:52 crc kubenswrapper[4558]: I0120 17:40:52.673531 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:40:52 crc kubenswrapper[4558]: I0120 17:40:52.690709 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell0-conductor-0" podStartSLOduration=2.690696997 podStartE2EDuration="2.690696997s" podCreationTimestamp="2026-01-20 17:40:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:40:52.688228976 +0000 UTC m=+3546.448566963" watchObservedRunningTime="2026-01-20 17:40:52.690696997 +0000 UTC m=+3546.451034964" Jan 20 17:40:53 crc kubenswrapper[4558]: I0120 17:40:53.023357 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:40:53 crc kubenswrapper[4558]: I0120 17:40:53.023706 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:40:53 crc kubenswrapper[4558]: I0120 17:40:53.057640 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:40:53 crc kubenswrapper[4558]: I0120 17:40:53.061928 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:40:53 crc kubenswrapper[4558]: I0120 17:40:53.681460 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:40:53 crc kubenswrapper[4558]: I0120 17:40:53.681524 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:40:54 crc kubenswrapper[4558]: I0120 17:40:54.291836 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:40:54 crc kubenswrapper[4558]: I0120 17:40:54.294660 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:40:54 crc kubenswrapper[4558]: I0120 17:40:54.567234 4558 scope.go:117] "RemoveContainer" containerID="2d2a8989ca5a5af98b2c9dc8f801ea0675339ec14800f437c058c5a43c20146b" Jan 20 17:40:54 crc kubenswrapper[4558]: E0120 17:40:54.567687 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 17:40:54 crc kubenswrapper[4558]: I0120 17:40:54.706303 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"06ff5e50-9b15-4254-b6a4-8413e47d13be","Type":"ContainerStarted","Data":"80912960e71c247c67c1e328b5aba452ffd2335c93a1f9721e9b902ee7868ae2"} Jan 20 17:40:54 crc kubenswrapper[4558]: I0120 17:40:54.706886 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="06ff5e50-9b15-4254-b6a4-8413e47d13be" containerName="ceilometer-central-agent" containerID="cri-o://6892ceb2647c2ad78634f3b4f9e699894b2fcc258b2d61e8a849317030270554" gracePeriod=30 Jan 20 17:40:54 crc kubenswrapper[4558]: I0120 17:40:54.707041 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:40:54 crc kubenswrapper[4558]: I0120 17:40:54.707606 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="06ff5e50-9b15-4254-b6a4-8413e47d13be" containerName="sg-core" containerID="cri-o://4a299d3a03e68f9af7e70aeb4d630699cd30789a8d0fc310e1cb1720f2d75490" gracePeriod=30 Jan 20 17:40:54 crc kubenswrapper[4558]: I0120 17:40:54.707587 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="06ff5e50-9b15-4254-b6a4-8413e47d13be" containerName="ceilometer-notification-agent" containerID="cri-o://28f185786c0598eeb790435cd9687bbc69b4417bce8f98e235cb346b871b2014" gracePeriod=30 Jan 20 17:40:54 crc kubenswrapper[4558]: I0120 17:40:54.707439 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="06ff5e50-9b15-4254-b6a4-8413e47d13be" containerName="proxy-httpd" containerID="cri-o://80912960e71c247c67c1e328b5aba452ffd2335c93a1f9721e9b902ee7868ae2" gracePeriod=30 Jan 20 17:40:54 crc kubenswrapper[4558]: I0120 17:40:54.737184 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ceilometer-0" podStartSLOduration=2.20270742 podStartE2EDuration="5.737152001s" podCreationTimestamp="2026-01-20 17:40:49 +0000 UTC" firstStartedPulling="2026-01-20 17:40:50.443093774 +0000 UTC m=+3544.203431742" lastFinishedPulling="2026-01-20 17:40:53.977538356 +0000 UTC m=+3547.737876323" observedRunningTime="2026-01-20 17:40:54.727486359 +0000 UTC m=+3548.487824325" watchObservedRunningTime="2026-01-20 17:40:54.737152001 +0000 UTC m=+3548.497489969" Jan 20 17:40:55 crc kubenswrapper[4558]: I0120 17:40:55.304852 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:40:55 crc kubenswrapper[4558]: I0120 17:40:55.339713 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:40:55 crc kubenswrapper[4558]: I0120 17:40:55.723966 4558 generic.go:334] "Generic (PLEG): container finished" podID="06ff5e50-9b15-4254-b6a4-8413e47d13be" containerID="80912960e71c247c67c1e328b5aba452ffd2335c93a1f9721e9b902ee7868ae2" exitCode=0 Jan 20 17:40:55 crc kubenswrapper[4558]: I0120 17:40:55.723997 4558 generic.go:334] "Generic (PLEG): container finished" podID="06ff5e50-9b15-4254-b6a4-8413e47d13be" containerID="4a299d3a03e68f9af7e70aeb4d630699cd30789a8d0fc310e1cb1720f2d75490" exitCode=2 Jan 20 17:40:55 crc kubenswrapper[4558]: I0120 17:40:55.724005 4558 generic.go:334] "Generic (PLEG): container finished" podID="06ff5e50-9b15-4254-b6a4-8413e47d13be" containerID="28f185786c0598eeb790435cd9687bbc69b4417bce8f98e235cb346b871b2014" exitCode=0 Jan 20 17:40:55 crc kubenswrapper[4558]: I0120 17:40:55.724522 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"06ff5e50-9b15-4254-b6a4-8413e47d13be","Type":"ContainerDied","Data":"80912960e71c247c67c1e328b5aba452ffd2335c93a1f9721e9b902ee7868ae2"} Jan 20 17:40:55 crc kubenswrapper[4558]: I0120 17:40:55.724552 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"06ff5e50-9b15-4254-b6a4-8413e47d13be","Type":"ContainerDied","Data":"4a299d3a03e68f9af7e70aeb4d630699cd30789a8d0fc310e1cb1720f2d75490"} Jan 20 17:40:55 crc kubenswrapper[4558]: I0120 17:40:55.724561 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"06ff5e50-9b15-4254-b6a4-8413e47d13be","Type":"ContainerDied","Data":"28f185786c0598eeb790435cd9687bbc69b4417bce8f98e235cb346b871b2014"} Jan 20 17:40:56 crc kubenswrapper[4558]: I0120 17:40:56.075077 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:40:56 crc kubenswrapper[4558]: I0120 17:40:56.533855 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell0-cell-mapping-c79qh"] Jan 20 17:40:56 crc kubenswrapper[4558]: I0120 17:40:56.537021 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-cell-mapping-c79qh" Jan 20 17:40:56 crc kubenswrapper[4558]: I0120 17:40:56.539281 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell0-manage-scripts" Jan 20 17:40:56 crc kubenswrapper[4558]: I0120 17:40:56.540076 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell0-manage-config-data" Jan 20 17:40:56 crc kubenswrapper[4558]: I0120 17:40:56.545715 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-cell-mapping-c79qh"] Jan 20 17:40:56 crc kubenswrapper[4558]: I0120 17:40:56.633690 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cddf5\" (UniqueName: \"kubernetes.io/projected/2ffb225d-0c31-4f91-905e-27ce314ee5df-kube-api-access-cddf5\") pod \"nova-cell0-cell-mapping-c79qh\" (UID: \"2ffb225d-0c31-4f91-905e-27ce314ee5df\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-c79qh" Jan 20 17:40:56 crc kubenswrapper[4558]: I0120 17:40:56.636641 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2ffb225d-0c31-4f91-905e-27ce314ee5df-config-data\") pod \"nova-cell0-cell-mapping-c79qh\" (UID: \"2ffb225d-0c31-4f91-905e-27ce314ee5df\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-c79qh" Jan 20 17:40:56 crc kubenswrapper[4558]: I0120 17:40:56.636777 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2ffb225d-0c31-4f91-905e-27ce314ee5df-scripts\") pod \"nova-cell0-cell-mapping-c79qh\" (UID: \"2ffb225d-0c31-4f91-905e-27ce314ee5df\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-c79qh" Jan 20 17:40:56 crc kubenswrapper[4558]: I0120 17:40:56.636833 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2ffb225d-0c31-4f91-905e-27ce314ee5df-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-c79qh\" (UID: \"2ffb225d-0c31-4f91-905e-27ce314ee5df\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-c79qh" Jan 20 17:40:56 crc kubenswrapper[4558]: I0120 17:40:56.678114 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:40:56 crc kubenswrapper[4558]: I0120 17:40:56.685531 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:40:56 crc kubenswrapper[4558]: I0120 17:40:56.696187 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-scheduler-config-data" Jan 20 17:40:56 crc kubenswrapper[4558]: I0120 17:40:56.730254 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:40:56 crc kubenswrapper[4558]: I0120 17:40:56.738645 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2ffb225d-0c31-4f91-905e-27ce314ee5df-scripts\") pod \"nova-cell0-cell-mapping-c79qh\" (UID: \"2ffb225d-0c31-4f91-905e-27ce314ee5df\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-c79qh" Jan 20 17:40:56 crc kubenswrapper[4558]: I0120 17:40:56.738738 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2ffb225d-0c31-4f91-905e-27ce314ee5df-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-c79qh\" (UID: \"2ffb225d-0c31-4f91-905e-27ce314ee5df\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-c79qh" Jan 20 17:40:56 crc kubenswrapper[4558]: I0120 17:40:56.738837 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cddf5\" (UniqueName: \"kubernetes.io/projected/2ffb225d-0c31-4f91-905e-27ce314ee5df-kube-api-access-cddf5\") pod \"nova-cell0-cell-mapping-c79qh\" (UID: \"2ffb225d-0c31-4f91-905e-27ce314ee5df\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-c79qh" Jan 20 17:40:56 crc kubenswrapper[4558]: I0120 17:40:56.739004 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2ffb225d-0c31-4f91-905e-27ce314ee5df-config-data\") pod \"nova-cell0-cell-mapping-c79qh\" (UID: \"2ffb225d-0c31-4f91-905e-27ce314ee5df\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-c79qh" Jan 20 17:40:56 crc kubenswrapper[4558]: I0120 17:40:56.751815 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2ffb225d-0c31-4f91-905e-27ce314ee5df-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-c79qh\" (UID: \"2ffb225d-0c31-4f91-905e-27ce314ee5df\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-c79qh" Jan 20 17:40:56 crc kubenswrapper[4558]: I0120 17:40:56.756684 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:40:56 crc kubenswrapper[4558]: I0120 17:40:56.757785 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2ffb225d-0c31-4f91-905e-27ce314ee5df-config-data\") pod \"nova-cell0-cell-mapping-c79qh\" (UID: \"2ffb225d-0c31-4f91-905e-27ce314ee5df\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-c79qh" Jan 20 17:40:56 crc kubenswrapper[4558]: I0120 17:40:56.759642 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:40:56 crc kubenswrapper[4558]: I0120 17:40:56.763806 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell1-novncproxy-config-data" Jan 20 17:40:56 crc kubenswrapper[4558]: I0120 17:40:56.765552 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2ffb225d-0c31-4f91-905e-27ce314ee5df-scripts\") pod \"nova-cell0-cell-mapping-c79qh\" (UID: \"2ffb225d-0c31-4f91-905e-27ce314ee5df\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-c79qh" Jan 20 17:40:56 crc kubenswrapper[4558]: I0120 17:40:56.791148 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cddf5\" (UniqueName: \"kubernetes.io/projected/2ffb225d-0c31-4f91-905e-27ce314ee5df-kube-api-access-cddf5\") pod \"nova-cell0-cell-mapping-c79qh\" (UID: \"2ffb225d-0c31-4f91-905e-27ce314ee5df\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-c79qh" Jan 20 17:40:56 crc kubenswrapper[4558]: I0120 17:40:56.798047 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:40:56 crc kubenswrapper[4558]: I0120 17:40:56.840449 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zq9s8\" (UniqueName: \"kubernetes.io/projected/bc31a46f-26e7-465a-b9d4-ac64686608b1-kube-api-access-zq9s8\") pod \"nova-cell1-novncproxy-0\" (UID: \"bc31a46f-26e7-465a-b9d4-ac64686608b1\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:40:56 crc kubenswrapper[4558]: I0120 17:40:56.840516 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bc31a46f-26e7-465a-b9d4-ac64686608b1-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"bc31a46f-26e7-465a-b9d4-ac64686608b1\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:40:56 crc kubenswrapper[4558]: I0120 17:40:56.840570 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f5rvt\" (UniqueName: \"kubernetes.io/projected/3d12dcb7-19c5-4ece-a921-ee9aacbcc0ba-kube-api-access-f5rvt\") pod \"nova-scheduler-0\" (UID: \"3d12dcb7-19c5-4ece-a921-ee9aacbcc0ba\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:40:56 crc kubenswrapper[4558]: I0120 17:40:56.840601 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3d12dcb7-19c5-4ece-a921-ee9aacbcc0ba-config-data\") pod \"nova-scheduler-0\" (UID: \"3d12dcb7-19c5-4ece-a921-ee9aacbcc0ba\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:40:56 crc kubenswrapper[4558]: I0120 17:40:56.840920 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bc31a46f-26e7-465a-b9d4-ac64686608b1-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"bc31a46f-26e7-465a-b9d4-ac64686608b1\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:40:56 crc kubenswrapper[4558]: I0120 17:40:56.840970 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3d12dcb7-19c5-4ece-a921-ee9aacbcc0ba-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"3d12dcb7-19c5-4ece-a921-ee9aacbcc0ba\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:40:56 crc kubenswrapper[4558]: I0120 17:40:56.841592 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:40:56 crc kubenswrapper[4558]: I0120 17:40:56.843397 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:40:56 crc kubenswrapper[4558]: I0120 17:40:56.849631 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-metadata-config-data" Jan 20 17:40:56 crc kubenswrapper[4558]: I0120 17:40:56.857442 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-cell-mapping-c79qh" Jan 20 17:40:56 crc kubenswrapper[4558]: I0120 17:40:56.861825 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:40:56 crc kubenswrapper[4558]: I0120 17:40:56.875722 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:40:56 crc kubenswrapper[4558]: I0120 17:40:56.877183 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:40:56 crc kubenswrapper[4558]: I0120 17:40:56.878935 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-api-config-data" Jan 20 17:40:56 crc kubenswrapper[4558]: I0120 17:40:56.879634 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:40:56 crc kubenswrapper[4558]: I0120 17:40:56.943462 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7decf56f-896f-4423-90d6-ce028206d5e3-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"7decf56f-896f-4423-90d6-ce028206d5e3\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:40:56 crc kubenswrapper[4558]: I0120 17:40:56.943517 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zq9s8\" (UniqueName: \"kubernetes.io/projected/bc31a46f-26e7-465a-b9d4-ac64686608b1-kube-api-access-zq9s8\") pod \"nova-cell1-novncproxy-0\" (UID: \"bc31a46f-26e7-465a-b9d4-ac64686608b1\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:40:56 crc kubenswrapper[4558]: I0120 17:40:56.943560 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bc31a46f-26e7-465a-b9d4-ac64686608b1-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"bc31a46f-26e7-465a-b9d4-ac64686608b1\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:40:56 crc kubenswrapper[4558]: I0120 17:40:56.943614 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bhrs9\" (UniqueName: \"kubernetes.io/projected/7decf56f-896f-4423-90d6-ce028206d5e3-kube-api-access-bhrs9\") pod \"nova-metadata-0\" (UID: \"7decf56f-896f-4423-90d6-ce028206d5e3\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:40:56 crc kubenswrapper[4558]: I0120 17:40:56.943634 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f5rvt\" (UniqueName: \"kubernetes.io/projected/3d12dcb7-19c5-4ece-a921-ee9aacbcc0ba-kube-api-access-f5rvt\") pod \"nova-scheduler-0\" (UID: \"3d12dcb7-19c5-4ece-a921-ee9aacbcc0ba\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:40:56 crc kubenswrapper[4558]: I0120 17:40:56.943660 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7decf56f-896f-4423-90d6-ce028206d5e3-logs\") pod \"nova-metadata-0\" (UID: \"7decf56f-896f-4423-90d6-ce028206d5e3\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:40:56 crc kubenswrapper[4558]: I0120 17:40:56.943675 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/140f3226-9a72-4426-963a-8d8b3ad539e0-config-data\") pod \"nova-api-0\" (UID: \"140f3226-9a72-4426-963a-8d8b3ad539e0\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:40:56 crc kubenswrapper[4558]: I0120 17:40:56.943697 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3d12dcb7-19c5-4ece-a921-ee9aacbcc0ba-config-data\") pod \"nova-scheduler-0\" (UID: \"3d12dcb7-19c5-4ece-a921-ee9aacbcc0ba\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:40:56 crc kubenswrapper[4558]: I0120 17:40:56.943721 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/140f3226-9a72-4426-963a-8d8b3ad539e0-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"140f3226-9a72-4426-963a-8d8b3ad539e0\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:40:56 crc kubenswrapper[4558]: I0120 17:40:56.943746 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7decf56f-896f-4423-90d6-ce028206d5e3-config-data\") pod \"nova-metadata-0\" (UID: \"7decf56f-896f-4423-90d6-ce028206d5e3\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:40:56 crc kubenswrapper[4558]: I0120 17:40:56.943771 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/140f3226-9a72-4426-963a-8d8b3ad539e0-logs\") pod \"nova-api-0\" (UID: \"140f3226-9a72-4426-963a-8d8b3ad539e0\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:40:56 crc kubenswrapper[4558]: I0120 17:40:56.943789 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bc31a46f-26e7-465a-b9d4-ac64686608b1-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"bc31a46f-26e7-465a-b9d4-ac64686608b1\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:40:56 crc kubenswrapper[4558]: I0120 17:40:56.943807 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3d12dcb7-19c5-4ece-a921-ee9aacbcc0ba-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"3d12dcb7-19c5-4ece-a921-ee9aacbcc0ba\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:40:56 crc kubenswrapper[4558]: I0120 17:40:56.943828 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jmm96\" (UniqueName: \"kubernetes.io/projected/140f3226-9a72-4426-963a-8d8b3ad539e0-kube-api-access-jmm96\") pod \"nova-api-0\" (UID: \"140f3226-9a72-4426-963a-8d8b3ad539e0\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:40:56 crc kubenswrapper[4558]: I0120 17:40:56.947488 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bc31a46f-26e7-465a-b9d4-ac64686608b1-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"bc31a46f-26e7-465a-b9d4-ac64686608b1\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:40:56 crc kubenswrapper[4558]: I0120 17:40:56.950725 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bc31a46f-26e7-465a-b9d4-ac64686608b1-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"bc31a46f-26e7-465a-b9d4-ac64686608b1\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:40:56 crc kubenswrapper[4558]: I0120 17:40:56.951620 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3d12dcb7-19c5-4ece-a921-ee9aacbcc0ba-config-data\") pod \"nova-scheduler-0\" (UID: \"3d12dcb7-19c5-4ece-a921-ee9aacbcc0ba\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:40:56 crc kubenswrapper[4558]: I0120 17:40:56.962512 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zq9s8\" (UniqueName: \"kubernetes.io/projected/bc31a46f-26e7-465a-b9d4-ac64686608b1-kube-api-access-zq9s8\") pod \"nova-cell1-novncproxy-0\" (UID: \"bc31a46f-26e7-465a-b9d4-ac64686608b1\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:40:56 crc kubenswrapper[4558]: I0120 17:40:56.964663 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3d12dcb7-19c5-4ece-a921-ee9aacbcc0ba-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"3d12dcb7-19c5-4ece-a921-ee9aacbcc0ba\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:40:56 crc kubenswrapper[4558]: I0120 17:40:56.965729 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f5rvt\" (UniqueName: \"kubernetes.io/projected/3d12dcb7-19c5-4ece-a921-ee9aacbcc0ba-kube-api-access-f5rvt\") pod \"nova-scheduler-0\" (UID: \"3d12dcb7-19c5-4ece-a921-ee9aacbcc0ba\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:40:57 crc kubenswrapper[4558]: I0120 17:40:57.046295 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7decf56f-896f-4423-90d6-ce028206d5e3-logs\") pod \"nova-metadata-0\" (UID: \"7decf56f-896f-4423-90d6-ce028206d5e3\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:40:57 crc kubenswrapper[4558]: I0120 17:40:57.046389 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/140f3226-9a72-4426-963a-8d8b3ad539e0-config-data\") pod \"nova-api-0\" (UID: \"140f3226-9a72-4426-963a-8d8b3ad539e0\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:40:57 crc kubenswrapper[4558]: I0120 17:40:57.046448 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/140f3226-9a72-4426-963a-8d8b3ad539e0-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"140f3226-9a72-4426-963a-8d8b3ad539e0\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:40:57 crc kubenswrapper[4558]: I0120 17:40:57.046494 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7decf56f-896f-4423-90d6-ce028206d5e3-config-data\") pod \"nova-metadata-0\" (UID: \"7decf56f-896f-4423-90d6-ce028206d5e3\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:40:57 crc kubenswrapper[4558]: I0120 17:40:57.046552 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/140f3226-9a72-4426-963a-8d8b3ad539e0-logs\") pod \"nova-api-0\" (UID: \"140f3226-9a72-4426-963a-8d8b3ad539e0\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:40:57 crc kubenswrapper[4558]: I0120 17:40:57.046614 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jmm96\" (UniqueName: \"kubernetes.io/projected/140f3226-9a72-4426-963a-8d8b3ad539e0-kube-api-access-jmm96\") pod \"nova-api-0\" (UID: \"140f3226-9a72-4426-963a-8d8b3ad539e0\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:40:57 crc kubenswrapper[4558]: I0120 17:40:57.046667 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7decf56f-896f-4423-90d6-ce028206d5e3-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"7decf56f-896f-4423-90d6-ce028206d5e3\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:40:57 crc kubenswrapper[4558]: I0120 17:40:57.046767 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bhrs9\" (UniqueName: \"kubernetes.io/projected/7decf56f-896f-4423-90d6-ce028206d5e3-kube-api-access-bhrs9\") pod \"nova-metadata-0\" (UID: \"7decf56f-896f-4423-90d6-ce028206d5e3\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:40:57 crc kubenswrapper[4558]: I0120 17:40:57.047734 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7decf56f-896f-4423-90d6-ce028206d5e3-logs\") pod \"nova-metadata-0\" (UID: \"7decf56f-896f-4423-90d6-ce028206d5e3\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:40:57 crc kubenswrapper[4558]: I0120 17:40:57.047717 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/140f3226-9a72-4426-963a-8d8b3ad539e0-logs\") pod \"nova-api-0\" (UID: \"140f3226-9a72-4426-963a-8d8b3ad539e0\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:40:57 crc kubenswrapper[4558]: I0120 17:40:57.052013 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/140f3226-9a72-4426-963a-8d8b3ad539e0-config-data\") pod \"nova-api-0\" (UID: \"140f3226-9a72-4426-963a-8d8b3ad539e0\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:40:57 crc kubenswrapper[4558]: I0120 17:40:57.052335 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/140f3226-9a72-4426-963a-8d8b3ad539e0-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"140f3226-9a72-4426-963a-8d8b3ad539e0\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:40:57 crc kubenswrapper[4558]: I0120 17:40:57.055126 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:40:57 crc kubenswrapper[4558]: I0120 17:40:57.058788 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7decf56f-896f-4423-90d6-ce028206d5e3-config-data\") pod \"nova-metadata-0\" (UID: \"7decf56f-896f-4423-90d6-ce028206d5e3\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:40:57 crc kubenswrapper[4558]: I0120 17:40:57.058982 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7decf56f-896f-4423-90d6-ce028206d5e3-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"7decf56f-896f-4423-90d6-ce028206d5e3\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:40:57 crc kubenswrapper[4558]: I0120 17:40:57.071645 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bhrs9\" (UniqueName: \"kubernetes.io/projected/7decf56f-896f-4423-90d6-ce028206d5e3-kube-api-access-bhrs9\") pod \"nova-metadata-0\" (UID: \"7decf56f-896f-4423-90d6-ce028206d5e3\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:40:57 crc kubenswrapper[4558]: I0120 17:40:57.075578 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jmm96\" (UniqueName: \"kubernetes.io/projected/140f3226-9a72-4426-963a-8d8b3ad539e0-kube-api-access-jmm96\") pod \"nova-api-0\" (UID: \"140f3226-9a72-4426-963a-8d8b3ad539e0\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:40:57 crc kubenswrapper[4558]: I0120 17:40:57.145428 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:40:57 crc kubenswrapper[4558]: I0120 17:40:57.161088 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:40:57 crc kubenswrapper[4558]: I0120 17:40:57.293837 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:40:57 crc kubenswrapper[4558]: I0120 17:40:57.352618 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-cell-mapping-c79qh"] Jan 20 17:40:57 crc kubenswrapper[4558]: I0120 17:40:57.533813 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:40:57 crc kubenswrapper[4558]: I0120 17:40:57.712051 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:40:57 crc kubenswrapper[4558]: I0120 17:40:57.741346 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-db-sync-m2qbl"] Jan 20 17:40:57 crc kubenswrapper[4558]: I0120 17:40:57.742818 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-m2qbl" Jan 20 17:40:57 crc kubenswrapper[4558]: I0120 17:40:57.746412 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell1-conductor-scripts" Jan 20 17:40:57 crc kubenswrapper[4558]: I0120 17:40:57.750234 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-db-sync-m2qbl"] Jan 20 17:40:57 crc kubenswrapper[4558]: I0120 17:40:57.750358 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell1-conductor-config-data" Jan 20 17:40:57 crc kubenswrapper[4558]: I0120 17:40:57.759934 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:40:57 crc kubenswrapper[4558]: I0120 17:40:57.788365 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-cell-mapping-c79qh" event={"ID":"2ffb225d-0c31-4f91-905e-27ce314ee5df","Type":"ContainerStarted","Data":"57768a7bbd565b0a91483b9701925edcdb3062ad3e45489a7ff8722dabc67e7e"} Jan 20 17:40:57 crc kubenswrapper[4558]: I0120 17:40:57.788405 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-cell-mapping-c79qh" event={"ID":"2ffb225d-0c31-4f91-905e-27ce314ee5df","Type":"ContainerStarted","Data":"8c50f338169e591502829ce8829e70440d64e0507e39a5f32a279a6a303b42f4"} Jan 20 17:40:57 crc kubenswrapper[4558]: I0120 17:40:57.792055 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"bc31a46f-26e7-465a-b9d4-ac64686608b1","Type":"ContainerStarted","Data":"95a796f3d181da22bbbf8c515598b3d28b5d340fed34324dfd34cf40416b0d23"} Jan 20 17:40:57 crc kubenswrapper[4558]: I0120 17:40:57.796280 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"3d12dcb7-19c5-4ece-a921-ee9aacbcc0ba","Type":"ContainerStarted","Data":"c790c84cfffdc26f3d3bbc1a4556e89ca57b94dd0e7983d7c7dfb65f79b98c2e"} Jan 20 17:40:57 crc kubenswrapper[4558]: I0120 17:40:57.796324 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"3d12dcb7-19c5-4ece-a921-ee9aacbcc0ba","Type":"ContainerStarted","Data":"1334879e622039470e3b362259bd09aedf34ff6518ef48543cd21f56a3973156"} Jan 20 17:40:57 crc kubenswrapper[4558]: I0120 17:40:57.797738 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"7decf56f-896f-4423-90d6-ce028206d5e3","Type":"ContainerStarted","Data":"da45866d2c0d3a99890b76e9ce144fa843ecbf2ba78bccff08456a3f608edfc4"} Jan 20 17:40:57 crc kubenswrapper[4558]: I0120 17:40:57.815084 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell0-cell-mapping-c79qh" podStartSLOduration=1.8150442359999999 podStartE2EDuration="1.815044236s" podCreationTimestamp="2026-01-20 17:40:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:40:57.802362832 +0000 UTC m=+3551.562700800" watchObservedRunningTime="2026-01-20 17:40:57.815044236 +0000 UTC m=+3551.575382193" Jan 20 17:40:57 crc kubenswrapper[4558]: I0120 17:40:57.820596 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-scheduler-0" podStartSLOduration=1.8205831799999999 podStartE2EDuration="1.82058318s" podCreationTimestamp="2026-01-20 17:40:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:40:57.820498051 +0000 UTC m=+3551.580836018" watchObservedRunningTime="2026-01-20 17:40:57.82058318 +0000 UTC m=+3551.580921147" Jan 20 17:40:57 crc kubenswrapper[4558]: I0120 17:40:57.858559 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:40:57 crc kubenswrapper[4558]: I0120 17:40:57.875732 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c18579d1-589a-4752-9f16-30480da3c14f-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-m2qbl\" (UID: \"c18579d1-589a-4752-9f16-30480da3c14f\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-m2qbl" Jan 20 17:40:57 crc kubenswrapper[4558]: I0120 17:40:57.876066 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-27sq5\" (UniqueName: \"kubernetes.io/projected/c18579d1-589a-4752-9f16-30480da3c14f-kube-api-access-27sq5\") pod \"nova-cell1-conductor-db-sync-m2qbl\" (UID: \"c18579d1-589a-4752-9f16-30480da3c14f\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-m2qbl" Jan 20 17:40:57 crc kubenswrapper[4558]: I0120 17:40:57.876354 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c18579d1-589a-4752-9f16-30480da3c14f-config-data\") pod \"nova-cell1-conductor-db-sync-m2qbl\" (UID: \"c18579d1-589a-4752-9f16-30480da3c14f\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-m2qbl" Jan 20 17:40:57 crc kubenswrapper[4558]: I0120 17:40:57.876520 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c18579d1-589a-4752-9f16-30480da3c14f-scripts\") pod \"nova-cell1-conductor-db-sync-m2qbl\" (UID: \"c18579d1-589a-4752-9f16-30480da3c14f\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-m2qbl" Jan 20 17:40:57 crc kubenswrapper[4558]: W0120 17:40:57.878658 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod140f3226_9a72_4426_963a_8d8b3ad539e0.slice/crio-5487f4ea1e164f8665fc3bb114d792e6c230a3019fd2d1bb0939f5078c2a01de WatchSource:0}: Error finding container 5487f4ea1e164f8665fc3bb114d792e6c230a3019fd2d1bb0939f5078c2a01de: Status 404 returned error can't find the container with id 5487f4ea1e164f8665fc3bb114d792e6c230a3019fd2d1bb0939f5078c2a01de Jan 20 17:40:57 crc kubenswrapper[4558]: I0120 17:40:57.978665 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c18579d1-589a-4752-9f16-30480da3c14f-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-m2qbl\" (UID: \"c18579d1-589a-4752-9f16-30480da3c14f\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-m2qbl" Jan 20 17:40:57 crc kubenswrapper[4558]: I0120 17:40:57.978736 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-27sq5\" (UniqueName: \"kubernetes.io/projected/c18579d1-589a-4752-9f16-30480da3c14f-kube-api-access-27sq5\") pod \"nova-cell1-conductor-db-sync-m2qbl\" (UID: \"c18579d1-589a-4752-9f16-30480da3c14f\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-m2qbl" Jan 20 17:40:57 crc kubenswrapper[4558]: I0120 17:40:57.978859 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c18579d1-589a-4752-9f16-30480da3c14f-config-data\") pod \"nova-cell1-conductor-db-sync-m2qbl\" (UID: \"c18579d1-589a-4752-9f16-30480da3c14f\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-m2qbl" Jan 20 17:40:57 crc kubenswrapper[4558]: I0120 17:40:57.978946 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c18579d1-589a-4752-9f16-30480da3c14f-scripts\") pod \"nova-cell1-conductor-db-sync-m2qbl\" (UID: \"c18579d1-589a-4752-9f16-30480da3c14f\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-m2qbl" Jan 20 17:40:57 crc kubenswrapper[4558]: I0120 17:40:57.986520 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c18579d1-589a-4752-9f16-30480da3c14f-scripts\") pod \"nova-cell1-conductor-db-sync-m2qbl\" (UID: \"c18579d1-589a-4752-9f16-30480da3c14f\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-m2qbl" Jan 20 17:40:57 crc kubenswrapper[4558]: I0120 17:40:57.988737 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c18579d1-589a-4752-9f16-30480da3c14f-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-m2qbl\" (UID: \"c18579d1-589a-4752-9f16-30480da3c14f\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-m2qbl" Jan 20 17:40:57 crc kubenswrapper[4558]: I0120 17:40:57.989392 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c18579d1-589a-4752-9f16-30480da3c14f-config-data\") pod \"nova-cell1-conductor-db-sync-m2qbl\" (UID: \"c18579d1-589a-4752-9f16-30480da3c14f\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-m2qbl" Jan 20 17:40:57 crc kubenswrapper[4558]: I0120 17:40:57.998585 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-27sq5\" (UniqueName: \"kubernetes.io/projected/c18579d1-589a-4752-9f16-30480da3c14f-kube-api-access-27sq5\") pod \"nova-cell1-conductor-db-sync-m2qbl\" (UID: \"c18579d1-589a-4752-9f16-30480da3c14f\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-m2qbl" Jan 20 17:40:58 crc kubenswrapper[4558]: I0120 17:40:58.295958 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-m2qbl" Jan 20 17:40:58 crc kubenswrapper[4558]: I0120 17:40:58.718291 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-db-sync-m2qbl"] Jan 20 17:40:58 crc kubenswrapper[4558]: W0120 17:40:58.731225 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc18579d1_589a_4752_9f16_30480da3c14f.slice/crio-37a4f9064c277d7babef29e555383efc9b0c26ce9295442d2903cb57c6735a5d WatchSource:0}: Error finding container 37a4f9064c277d7babef29e555383efc9b0c26ce9295442d2903cb57c6735a5d: Status 404 returned error can't find the container with id 37a4f9064c277d7babef29e555383efc9b0c26ce9295442d2903cb57c6735a5d Jan 20 17:40:58 crc kubenswrapper[4558]: I0120 17:40:58.813475 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-m2qbl" event={"ID":"c18579d1-589a-4752-9f16-30480da3c14f","Type":"ContainerStarted","Data":"37a4f9064c277d7babef29e555383efc9b0c26ce9295442d2903cb57c6735a5d"} Jan 20 17:40:58 crc kubenswrapper[4558]: I0120 17:40:58.817591 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"bc31a46f-26e7-465a-b9d4-ac64686608b1","Type":"ContainerStarted","Data":"cd2295ce85ab323f2f2ba5f930f93f397889e2818b0608a160989e9e2ad519eb"} Jan 20 17:40:58 crc kubenswrapper[4558]: I0120 17:40:58.823212 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"7decf56f-896f-4423-90d6-ce028206d5e3","Type":"ContainerStarted","Data":"a8bbee4bc041d4b4b5b8b9119ef4887d06529f4f49afcea4e4838d77e40a87f7"} Jan 20 17:40:58 crc kubenswrapper[4558]: I0120 17:40:58.823269 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"7decf56f-896f-4423-90d6-ce028206d5e3","Type":"ContainerStarted","Data":"e166d08bc915a7f76f290b6329cdc3a178d7639f74a3e4040fe52988cee4dc0f"} Jan 20 17:40:58 crc kubenswrapper[4558]: I0120 17:40:58.826571 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"140f3226-9a72-4426-963a-8d8b3ad539e0","Type":"ContainerStarted","Data":"3345b3bd768ee87c19c2c95a125c933f947a8eb6f633265e3341105b9a77c7ed"} Jan 20 17:40:58 crc kubenswrapper[4558]: I0120 17:40:58.826616 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"140f3226-9a72-4426-963a-8d8b3ad539e0","Type":"ContainerStarted","Data":"b02c1231cba0c1220ed256e2fd10998e5ea769532b4f9ac5970957deba6598c9"} Jan 20 17:40:58 crc kubenswrapper[4558]: I0120 17:40:58.826628 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"140f3226-9a72-4426-963a-8d8b3ad539e0","Type":"ContainerStarted","Data":"5487f4ea1e164f8665fc3bb114d792e6c230a3019fd2d1bb0939f5078c2a01de"} Jan 20 17:40:58 crc kubenswrapper[4558]: I0120 17:40:58.852472 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" podStartSLOduration=2.85245137 podStartE2EDuration="2.85245137s" podCreationTimestamp="2026-01-20 17:40:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:40:58.831532508 +0000 UTC m=+3552.591870474" watchObservedRunningTime="2026-01-20 17:40:58.85245137 +0000 UTC m=+3552.612789338" Jan 20 17:40:58 crc kubenswrapper[4558]: I0120 17:40:58.854312 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-api-0" podStartSLOduration=2.854300787 podStartE2EDuration="2.854300787s" podCreationTimestamp="2026-01-20 17:40:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:40:58.848006503 +0000 UTC m=+3552.608344470" watchObservedRunningTime="2026-01-20 17:40:58.854300787 +0000 UTC m=+3552.614638754" Jan 20 17:40:58 crc kubenswrapper[4558]: I0120 17:40:58.873970 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-metadata-0" podStartSLOduration=2.873951775 podStartE2EDuration="2.873951775s" podCreationTimestamp="2026-01-20 17:40:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:40:58.869480198 +0000 UTC m=+3552.629818165" watchObservedRunningTime="2026-01-20 17:40:58.873951775 +0000 UTC m=+3552.634289742" Jan 20 17:40:59 crc kubenswrapper[4558]: I0120 17:40:59.835093 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-m2qbl" event={"ID":"c18579d1-589a-4752-9f16-30480da3c14f","Type":"ContainerStarted","Data":"30c26a31b65684929c8aa74de9a1153ce13cdc31951b546f411c4c7b676d7e42"} Jan 20 17:40:59 crc kubenswrapper[4558]: I0120 17:40:59.853517 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-m2qbl" podStartSLOduration=2.853497466 podStartE2EDuration="2.853497466s" podCreationTimestamp="2026-01-20 17:40:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:40:59.853145164 +0000 UTC m=+3553.613483131" watchObservedRunningTime="2026-01-20 17:40:59.853497466 +0000 UTC m=+3553.613835433" Jan 20 17:41:00 crc kubenswrapper[4558]: I0120 17:41:00.433457 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:41:00 crc kubenswrapper[4558]: I0120 17:41:00.448950 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:41:00 crc kubenswrapper[4558]: I0120 17:41:00.626102 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:41:00 crc kubenswrapper[4558]: I0120 17:41:00.755345 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-225t7\" (UniqueName: \"kubernetes.io/projected/06ff5e50-9b15-4254-b6a4-8413e47d13be-kube-api-access-225t7\") pod \"06ff5e50-9b15-4254-b6a4-8413e47d13be\" (UID: \"06ff5e50-9b15-4254-b6a4-8413e47d13be\") " Jan 20 17:41:00 crc kubenswrapper[4558]: I0120 17:41:00.755471 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/06ff5e50-9b15-4254-b6a4-8413e47d13be-scripts\") pod \"06ff5e50-9b15-4254-b6a4-8413e47d13be\" (UID: \"06ff5e50-9b15-4254-b6a4-8413e47d13be\") " Jan 20 17:41:00 crc kubenswrapper[4558]: I0120 17:41:00.755583 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/06ff5e50-9b15-4254-b6a4-8413e47d13be-log-httpd\") pod \"06ff5e50-9b15-4254-b6a4-8413e47d13be\" (UID: \"06ff5e50-9b15-4254-b6a4-8413e47d13be\") " Jan 20 17:41:00 crc kubenswrapper[4558]: I0120 17:41:00.755667 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/06ff5e50-9b15-4254-b6a4-8413e47d13be-sg-core-conf-yaml\") pod \"06ff5e50-9b15-4254-b6a4-8413e47d13be\" (UID: \"06ff5e50-9b15-4254-b6a4-8413e47d13be\") " Jan 20 17:41:00 crc kubenswrapper[4558]: I0120 17:41:00.755709 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/06ff5e50-9b15-4254-b6a4-8413e47d13be-config-data\") pod \"06ff5e50-9b15-4254-b6a4-8413e47d13be\" (UID: \"06ff5e50-9b15-4254-b6a4-8413e47d13be\") " Jan 20 17:41:00 crc kubenswrapper[4558]: I0120 17:41:00.755727 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/06ff5e50-9b15-4254-b6a4-8413e47d13be-run-httpd\") pod \"06ff5e50-9b15-4254-b6a4-8413e47d13be\" (UID: \"06ff5e50-9b15-4254-b6a4-8413e47d13be\") " Jan 20 17:41:00 crc kubenswrapper[4558]: I0120 17:41:00.755802 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/06ff5e50-9b15-4254-b6a4-8413e47d13be-combined-ca-bundle\") pod \"06ff5e50-9b15-4254-b6a4-8413e47d13be\" (UID: \"06ff5e50-9b15-4254-b6a4-8413e47d13be\") " Jan 20 17:41:00 crc kubenswrapper[4558]: I0120 17:41:00.756117 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/06ff5e50-9b15-4254-b6a4-8413e47d13be-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "06ff5e50-9b15-4254-b6a4-8413e47d13be" (UID: "06ff5e50-9b15-4254-b6a4-8413e47d13be"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:41:00 crc kubenswrapper[4558]: I0120 17:41:00.756145 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/06ff5e50-9b15-4254-b6a4-8413e47d13be-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "06ff5e50-9b15-4254-b6a4-8413e47d13be" (UID: "06ff5e50-9b15-4254-b6a4-8413e47d13be"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:41:00 crc kubenswrapper[4558]: I0120 17:41:00.756717 4558 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/06ff5e50-9b15-4254-b6a4-8413e47d13be-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:41:00 crc kubenswrapper[4558]: I0120 17:41:00.756739 4558 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/06ff5e50-9b15-4254-b6a4-8413e47d13be-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:41:00 crc kubenswrapper[4558]: I0120 17:41:00.763036 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/06ff5e50-9b15-4254-b6a4-8413e47d13be-scripts" (OuterVolumeSpecName: "scripts") pod "06ff5e50-9b15-4254-b6a4-8413e47d13be" (UID: "06ff5e50-9b15-4254-b6a4-8413e47d13be"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:41:00 crc kubenswrapper[4558]: I0120 17:41:00.764320 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/06ff5e50-9b15-4254-b6a4-8413e47d13be-kube-api-access-225t7" (OuterVolumeSpecName: "kube-api-access-225t7") pod "06ff5e50-9b15-4254-b6a4-8413e47d13be" (UID: "06ff5e50-9b15-4254-b6a4-8413e47d13be"). InnerVolumeSpecName "kube-api-access-225t7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:41:00 crc kubenswrapper[4558]: I0120 17:41:00.790709 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/06ff5e50-9b15-4254-b6a4-8413e47d13be-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "06ff5e50-9b15-4254-b6a4-8413e47d13be" (UID: "06ff5e50-9b15-4254-b6a4-8413e47d13be"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:41:00 crc kubenswrapper[4558]: I0120 17:41:00.818316 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/06ff5e50-9b15-4254-b6a4-8413e47d13be-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "06ff5e50-9b15-4254-b6a4-8413e47d13be" (UID: "06ff5e50-9b15-4254-b6a4-8413e47d13be"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:41:00 crc kubenswrapper[4558]: I0120 17:41:00.828724 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/06ff5e50-9b15-4254-b6a4-8413e47d13be-config-data" (OuterVolumeSpecName: "config-data") pod "06ff5e50-9b15-4254-b6a4-8413e47d13be" (UID: "06ff5e50-9b15-4254-b6a4-8413e47d13be"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:41:00 crc kubenswrapper[4558]: I0120 17:41:00.847325 4558 generic.go:334] "Generic (PLEG): container finished" podID="06ff5e50-9b15-4254-b6a4-8413e47d13be" containerID="6892ceb2647c2ad78634f3b4f9e699894b2fcc258b2d61e8a849317030270554" exitCode=0 Jan 20 17:41:00 crc kubenswrapper[4558]: I0120 17:41:00.847381 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:41:00 crc kubenswrapper[4558]: I0120 17:41:00.847432 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"06ff5e50-9b15-4254-b6a4-8413e47d13be","Type":"ContainerDied","Data":"6892ceb2647c2ad78634f3b4f9e699894b2fcc258b2d61e8a849317030270554"} Jan 20 17:41:00 crc kubenswrapper[4558]: I0120 17:41:00.847501 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"06ff5e50-9b15-4254-b6a4-8413e47d13be","Type":"ContainerDied","Data":"a72b1227999924cea94ce938c84c1733dc77adced1b8898813ac62fe4a6ffa5c"} Jan 20 17:41:00 crc kubenswrapper[4558]: I0120 17:41:00.847524 4558 scope.go:117] "RemoveContainer" containerID="80912960e71c247c67c1e328b5aba452ffd2335c93a1f9721e9b902ee7868ae2" Jan 20 17:41:00 crc kubenswrapper[4558]: I0120 17:41:00.847977 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" podUID="bc31a46f-26e7-465a-b9d4-ac64686608b1" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://cd2295ce85ab323f2f2ba5f930f93f397889e2818b0608a160989e9e2ad519eb" gracePeriod=30 Jan 20 17:41:00 crc kubenswrapper[4558]: I0120 17:41:00.848119 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-metadata-0" podUID="7decf56f-896f-4423-90d6-ce028206d5e3" containerName="nova-metadata-log" containerID="cri-o://e166d08bc915a7f76f290b6329cdc3a178d7639f74a3e4040fe52988cee4dc0f" gracePeriod=30 Jan 20 17:41:00 crc kubenswrapper[4558]: I0120 17:41:00.848429 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-metadata-0" podUID="7decf56f-896f-4423-90d6-ce028206d5e3" containerName="nova-metadata-metadata" containerID="cri-o://a8bbee4bc041d4b4b5b8b9119ef4887d06529f4f49afcea4e4838d77e40a87f7" gracePeriod=30 Jan 20 17:41:00 crc kubenswrapper[4558]: I0120 17:41:00.858047 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-225t7\" (UniqueName: \"kubernetes.io/projected/06ff5e50-9b15-4254-b6a4-8413e47d13be-kube-api-access-225t7\") on node \"crc\" DevicePath \"\"" Jan 20 17:41:00 crc kubenswrapper[4558]: I0120 17:41:00.858068 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/06ff5e50-9b15-4254-b6a4-8413e47d13be-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:41:00 crc kubenswrapper[4558]: I0120 17:41:00.858089 4558 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/06ff5e50-9b15-4254-b6a4-8413e47d13be-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 20 17:41:00 crc kubenswrapper[4558]: I0120 17:41:00.858099 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/06ff5e50-9b15-4254-b6a4-8413e47d13be-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:41:00 crc kubenswrapper[4558]: I0120 17:41:00.858111 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/06ff5e50-9b15-4254-b6a4-8413e47d13be-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:41:00 crc kubenswrapper[4558]: I0120 17:41:00.889445 4558 scope.go:117] "RemoveContainer" containerID="4a299d3a03e68f9af7e70aeb4d630699cd30789a8d0fc310e1cb1720f2d75490" Jan 20 17:41:00 crc kubenswrapper[4558]: I0120 17:41:00.895507 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:41:00 crc kubenswrapper[4558]: I0120 17:41:00.905028 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:41:00 crc kubenswrapper[4558]: I0120 17:41:00.916335 4558 scope.go:117] "RemoveContainer" containerID="28f185786c0598eeb790435cd9687bbc69b4417bce8f98e235cb346b871b2014" Jan 20 17:41:00 crc kubenswrapper[4558]: I0120 17:41:00.927799 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:41:00 crc kubenswrapper[4558]: E0120 17:41:00.928355 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="06ff5e50-9b15-4254-b6a4-8413e47d13be" containerName="proxy-httpd" Jan 20 17:41:00 crc kubenswrapper[4558]: I0120 17:41:00.928375 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="06ff5e50-9b15-4254-b6a4-8413e47d13be" containerName="proxy-httpd" Jan 20 17:41:00 crc kubenswrapper[4558]: E0120 17:41:00.928404 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="06ff5e50-9b15-4254-b6a4-8413e47d13be" containerName="ceilometer-notification-agent" Jan 20 17:41:00 crc kubenswrapper[4558]: I0120 17:41:00.928411 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="06ff5e50-9b15-4254-b6a4-8413e47d13be" containerName="ceilometer-notification-agent" Jan 20 17:41:00 crc kubenswrapper[4558]: E0120 17:41:00.928443 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="06ff5e50-9b15-4254-b6a4-8413e47d13be" containerName="ceilometer-central-agent" Jan 20 17:41:00 crc kubenswrapper[4558]: I0120 17:41:00.928451 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="06ff5e50-9b15-4254-b6a4-8413e47d13be" containerName="ceilometer-central-agent" Jan 20 17:41:00 crc kubenswrapper[4558]: E0120 17:41:00.928461 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="06ff5e50-9b15-4254-b6a4-8413e47d13be" containerName="sg-core" Jan 20 17:41:00 crc kubenswrapper[4558]: I0120 17:41:00.928467 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="06ff5e50-9b15-4254-b6a4-8413e47d13be" containerName="sg-core" Jan 20 17:41:00 crc kubenswrapper[4558]: I0120 17:41:00.928634 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="06ff5e50-9b15-4254-b6a4-8413e47d13be" containerName="ceilometer-notification-agent" Jan 20 17:41:00 crc kubenswrapper[4558]: I0120 17:41:00.928656 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="06ff5e50-9b15-4254-b6a4-8413e47d13be" containerName="ceilometer-central-agent" Jan 20 17:41:00 crc kubenswrapper[4558]: I0120 17:41:00.928666 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="06ff5e50-9b15-4254-b6a4-8413e47d13be" containerName="sg-core" Jan 20 17:41:00 crc kubenswrapper[4558]: I0120 17:41:00.928676 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="06ff5e50-9b15-4254-b6a4-8413e47d13be" containerName="proxy-httpd" Jan 20 17:41:00 crc kubenswrapper[4558]: I0120 17:41:00.931633 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:41:00 crc kubenswrapper[4558]: I0120 17:41:00.933950 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-scripts" Jan 20 17:41:00 crc kubenswrapper[4558]: I0120 17:41:00.934206 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-config-data" Jan 20 17:41:00 crc kubenswrapper[4558]: I0120 17:41:00.941798 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:41:00 crc kubenswrapper[4558]: I0120 17:41:00.959564 4558 scope.go:117] "RemoveContainer" containerID="6892ceb2647c2ad78634f3b4f9e699894b2fcc258b2d61e8a849317030270554" Jan 20 17:41:00 crc kubenswrapper[4558]: I0120 17:41:00.984604 4558 scope.go:117] "RemoveContainer" containerID="80912960e71c247c67c1e328b5aba452ffd2335c93a1f9721e9b902ee7868ae2" Jan 20 17:41:00 crc kubenswrapper[4558]: E0120 17:41:00.985562 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"80912960e71c247c67c1e328b5aba452ffd2335c93a1f9721e9b902ee7868ae2\": container with ID starting with 80912960e71c247c67c1e328b5aba452ffd2335c93a1f9721e9b902ee7868ae2 not found: ID does not exist" containerID="80912960e71c247c67c1e328b5aba452ffd2335c93a1f9721e9b902ee7868ae2" Jan 20 17:41:00 crc kubenswrapper[4558]: I0120 17:41:00.985607 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"80912960e71c247c67c1e328b5aba452ffd2335c93a1f9721e9b902ee7868ae2"} err="failed to get container status \"80912960e71c247c67c1e328b5aba452ffd2335c93a1f9721e9b902ee7868ae2\": rpc error: code = NotFound desc = could not find container \"80912960e71c247c67c1e328b5aba452ffd2335c93a1f9721e9b902ee7868ae2\": container with ID starting with 80912960e71c247c67c1e328b5aba452ffd2335c93a1f9721e9b902ee7868ae2 not found: ID does not exist" Jan 20 17:41:00 crc kubenswrapper[4558]: I0120 17:41:00.985640 4558 scope.go:117] "RemoveContainer" containerID="4a299d3a03e68f9af7e70aeb4d630699cd30789a8d0fc310e1cb1720f2d75490" Jan 20 17:41:00 crc kubenswrapper[4558]: E0120 17:41:00.986072 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4a299d3a03e68f9af7e70aeb4d630699cd30789a8d0fc310e1cb1720f2d75490\": container with ID starting with 4a299d3a03e68f9af7e70aeb4d630699cd30789a8d0fc310e1cb1720f2d75490 not found: ID does not exist" containerID="4a299d3a03e68f9af7e70aeb4d630699cd30789a8d0fc310e1cb1720f2d75490" Jan 20 17:41:00 crc kubenswrapper[4558]: I0120 17:41:00.986125 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4a299d3a03e68f9af7e70aeb4d630699cd30789a8d0fc310e1cb1720f2d75490"} err="failed to get container status \"4a299d3a03e68f9af7e70aeb4d630699cd30789a8d0fc310e1cb1720f2d75490\": rpc error: code = NotFound desc = could not find container \"4a299d3a03e68f9af7e70aeb4d630699cd30789a8d0fc310e1cb1720f2d75490\": container with ID starting with 4a299d3a03e68f9af7e70aeb4d630699cd30789a8d0fc310e1cb1720f2d75490 not found: ID does not exist" Jan 20 17:41:00 crc kubenswrapper[4558]: I0120 17:41:00.986231 4558 scope.go:117] "RemoveContainer" containerID="28f185786c0598eeb790435cd9687bbc69b4417bce8f98e235cb346b871b2014" Jan 20 17:41:00 crc kubenswrapper[4558]: E0120 17:41:00.986725 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"28f185786c0598eeb790435cd9687bbc69b4417bce8f98e235cb346b871b2014\": container with ID starting with 28f185786c0598eeb790435cd9687bbc69b4417bce8f98e235cb346b871b2014 not found: ID does not exist" containerID="28f185786c0598eeb790435cd9687bbc69b4417bce8f98e235cb346b871b2014" Jan 20 17:41:00 crc kubenswrapper[4558]: I0120 17:41:00.986757 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"28f185786c0598eeb790435cd9687bbc69b4417bce8f98e235cb346b871b2014"} err="failed to get container status \"28f185786c0598eeb790435cd9687bbc69b4417bce8f98e235cb346b871b2014\": rpc error: code = NotFound desc = could not find container \"28f185786c0598eeb790435cd9687bbc69b4417bce8f98e235cb346b871b2014\": container with ID starting with 28f185786c0598eeb790435cd9687bbc69b4417bce8f98e235cb346b871b2014 not found: ID does not exist" Jan 20 17:41:00 crc kubenswrapper[4558]: I0120 17:41:00.986777 4558 scope.go:117] "RemoveContainer" containerID="6892ceb2647c2ad78634f3b4f9e699894b2fcc258b2d61e8a849317030270554" Jan 20 17:41:00 crc kubenswrapper[4558]: E0120 17:41:00.987104 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6892ceb2647c2ad78634f3b4f9e699894b2fcc258b2d61e8a849317030270554\": container with ID starting with 6892ceb2647c2ad78634f3b4f9e699894b2fcc258b2d61e8a849317030270554 not found: ID does not exist" containerID="6892ceb2647c2ad78634f3b4f9e699894b2fcc258b2d61e8a849317030270554" Jan 20 17:41:00 crc kubenswrapper[4558]: I0120 17:41:00.987127 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6892ceb2647c2ad78634f3b4f9e699894b2fcc258b2d61e8a849317030270554"} err="failed to get container status \"6892ceb2647c2ad78634f3b4f9e699894b2fcc258b2d61e8a849317030270554\": rpc error: code = NotFound desc = could not find container \"6892ceb2647c2ad78634f3b4f9e699894b2fcc258b2d61e8a849317030270554\": container with ID starting with 6892ceb2647c2ad78634f3b4f9e699894b2fcc258b2d61e8a849317030270554 not found: ID does not exist" Jan 20 17:41:01 crc kubenswrapper[4558]: I0120 17:41:01.061196 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9056548c-189a-4152-9f2f-bc242e3d4065-scripts\") pod \"ceilometer-0\" (UID: \"9056548c-189a-4152-9f2f-bc242e3d4065\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:41:01 crc kubenswrapper[4558]: I0120 17:41:01.061270 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9056548c-189a-4152-9f2f-bc242e3d4065-config-data\") pod \"ceilometer-0\" (UID: \"9056548c-189a-4152-9f2f-bc242e3d4065\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:41:01 crc kubenswrapper[4558]: I0120 17:41:01.061310 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mfcsr\" (UniqueName: \"kubernetes.io/projected/9056548c-189a-4152-9f2f-bc242e3d4065-kube-api-access-mfcsr\") pod \"ceilometer-0\" (UID: \"9056548c-189a-4152-9f2f-bc242e3d4065\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:41:01 crc kubenswrapper[4558]: I0120 17:41:01.061413 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9056548c-189a-4152-9f2f-bc242e3d4065-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"9056548c-189a-4152-9f2f-bc242e3d4065\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:41:01 crc kubenswrapper[4558]: I0120 17:41:01.061469 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9056548c-189a-4152-9f2f-bc242e3d4065-run-httpd\") pod \"ceilometer-0\" (UID: \"9056548c-189a-4152-9f2f-bc242e3d4065\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:41:01 crc kubenswrapper[4558]: I0120 17:41:01.061616 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9056548c-189a-4152-9f2f-bc242e3d4065-log-httpd\") pod \"ceilometer-0\" (UID: \"9056548c-189a-4152-9f2f-bc242e3d4065\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:41:01 crc kubenswrapper[4558]: I0120 17:41:01.061655 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/9056548c-189a-4152-9f2f-bc242e3d4065-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"9056548c-189a-4152-9f2f-bc242e3d4065\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:41:01 crc kubenswrapper[4558]: I0120 17:41:01.165456 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9056548c-189a-4152-9f2f-bc242e3d4065-log-httpd\") pod \"ceilometer-0\" (UID: \"9056548c-189a-4152-9f2f-bc242e3d4065\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:41:01 crc kubenswrapper[4558]: I0120 17:41:01.165548 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/9056548c-189a-4152-9f2f-bc242e3d4065-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"9056548c-189a-4152-9f2f-bc242e3d4065\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:41:01 crc kubenswrapper[4558]: I0120 17:41:01.165627 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9056548c-189a-4152-9f2f-bc242e3d4065-scripts\") pod \"ceilometer-0\" (UID: \"9056548c-189a-4152-9f2f-bc242e3d4065\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:41:01 crc kubenswrapper[4558]: I0120 17:41:01.165703 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9056548c-189a-4152-9f2f-bc242e3d4065-config-data\") pod \"ceilometer-0\" (UID: \"9056548c-189a-4152-9f2f-bc242e3d4065\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:41:01 crc kubenswrapper[4558]: I0120 17:41:01.165772 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mfcsr\" (UniqueName: \"kubernetes.io/projected/9056548c-189a-4152-9f2f-bc242e3d4065-kube-api-access-mfcsr\") pod \"ceilometer-0\" (UID: \"9056548c-189a-4152-9f2f-bc242e3d4065\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:41:01 crc kubenswrapper[4558]: I0120 17:41:01.165800 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9056548c-189a-4152-9f2f-bc242e3d4065-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"9056548c-189a-4152-9f2f-bc242e3d4065\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:41:01 crc kubenswrapper[4558]: I0120 17:41:01.165860 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9056548c-189a-4152-9f2f-bc242e3d4065-run-httpd\") pod \"ceilometer-0\" (UID: \"9056548c-189a-4152-9f2f-bc242e3d4065\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:41:01 crc kubenswrapper[4558]: I0120 17:41:01.166473 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9056548c-189a-4152-9f2f-bc242e3d4065-log-httpd\") pod \"ceilometer-0\" (UID: \"9056548c-189a-4152-9f2f-bc242e3d4065\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:41:01 crc kubenswrapper[4558]: I0120 17:41:01.170086 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/9056548c-189a-4152-9f2f-bc242e3d4065-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"9056548c-189a-4152-9f2f-bc242e3d4065\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:41:01 crc kubenswrapper[4558]: I0120 17:41:01.170906 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9056548c-189a-4152-9f2f-bc242e3d4065-run-httpd\") pod \"ceilometer-0\" (UID: \"9056548c-189a-4152-9f2f-bc242e3d4065\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:41:01 crc kubenswrapper[4558]: I0120 17:41:01.172568 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9056548c-189a-4152-9f2f-bc242e3d4065-scripts\") pod \"ceilometer-0\" (UID: \"9056548c-189a-4152-9f2f-bc242e3d4065\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:41:01 crc kubenswrapper[4558]: I0120 17:41:01.173157 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9056548c-189a-4152-9f2f-bc242e3d4065-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"9056548c-189a-4152-9f2f-bc242e3d4065\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:41:01 crc kubenswrapper[4558]: I0120 17:41:01.173358 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9056548c-189a-4152-9f2f-bc242e3d4065-config-data\") pod \"ceilometer-0\" (UID: \"9056548c-189a-4152-9f2f-bc242e3d4065\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:41:01 crc kubenswrapper[4558]: I0120 17:41:01.183085 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mfcsr\" (UniqueName: \"kubernetes.io/projected/9056548c-189a-4152-9f2f-bc242e3d4065-kube-api-access-mfcsr\") pod \"ceilometer-0\" (UID: \"9056548c-189a-4152-9f2f-bc242e3d4065\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:41:01 crc kubenswrapper[4558]: I0120 17:41:01.251264 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:41:01 crc kubenswrapper[4558]: I0120 17:41:01.375009 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:41:01 crc kubenswrapper[4558]: I0120 17:41:01.471550 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bhrs9\" (UniqueName: \"kubernetes.io/projected/7decf56f-896f-4423-90d6-ce028206d5e3-kube-api-access-bhrs9\") pod \"7decf56f-896f-4423-90d6-ce028206d5e3\" (UID: \"7decf56f-896f-4423-90d6-ce028206d5e3\") " Jan 20 17:41:01 crc kubenswrapper[4558]: I0120 17:41:01.471920 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7decf56f-896f-4423-90d6-ce028206d5e3-config-data\") pod \"7decf56f-896f-4423-90d6-ce028206d5e3\" (UID: \"7decf56f-896f-4423-90d6-ce028206d5e3\") " Jan 20 17:41:01 crc kubenswrapper[4558]: I0120 17:41:01.472002 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7decf56f-896f-4423-90d6-ce028206d5e3-combined-ca-bundle\") pod \"7decf56f-896f-4423-90d6-ce028206d5e3\" (UID: \"7decf56f-896f-4423-90d6-ce028206d5e3\") " Jan 20 17:41:01 crc kubenswrapper[4558]: I0120 17:41:01.472188 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7decf56f-896f-4423-90d6-ce028206d5e3-logs\") pod \"7decf56f-896f-4423-90d6-ce028206d5e3\" (UID: \"7decf56f-896f-4423-90d6-ce028206d5e3\") " Jan 20 17:41:01 crc kubenswrapper[4558]: I0120 17:41:01.472466 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7decf56f-896f-4423-90d6-ce028206d5e3-logs" (OuterVolumeSpecName: "logs") pod "7decf56f-896f-4423-90d6-ce028206d5e3" (UID: "7decf56f-896f-4423-90d6-ce028206d5e3"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:41:01 crc kubenswrapper[4558]: I0120 17:41:01.473131 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7decf56f-896f-4423-90d6-ce028206d5e3-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:41:01 crc kubenswrapper[4558]: I0120 17:41:01.485039 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7decf56f-896f-4423-90d6-ce028206d5e3-kube-api-access-bhrs9" (OuterVolumeSpecName: "kube-api-access-bhrs9") pod "7decf56f-896f-4423-90d6-ce028206d5e3" (UID: "7decf56f-896f-4423-90d6-ce028206d5e3"). InnerVolumeSpecName "kube-api-access-bhrs9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:41:01 crc kubenswrapper[4558]: I0120 17:41:01.569883 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7decf56f-896f-4423-90d6-ce028206d5e3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7decf56f-896f-4423-90d6-ce028206d5e3" (UID: "7decf56f-896f-4423-90d6-ce028206d5e3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:41:01 crc kubenswrapper[4558]: I0120 17:41:01.577842 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bhrs9\" (UniqueName: \"kubernetes.io/projected/7decf56f-896f-4423-90d6-ce028206d5e3-kube-api-access-bhrs9\") on node \"crc\" DevicePath \"\"" Jan 20 17:41:01 crc kubenswrapper[4558]: I0120 17:41:01.577870 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7decf56f-896f-4423-90d6-ce028206d5e3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:41:01 crc kubenswrapper[4558]: I0120 17:41:01.602391 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7decf56f-896f-4423-90d6-ce028206d5e3-config-data" (OuterVolumeSpecName: "config-data") pod "7decf56f-896f-4423-90d6-ce028206d5e3" (UID: "7decf56f-896f-4423-90d6-ce028206d5e3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:41:01 crc kubenswrapper[4558]: I0120 17:41:01.637350 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:41:01 crc kubenswrapper[4558]: I0120 17:41:01.688744 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7decf56f-896f-4423-90d6-ce028206d5e3-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:41:01 crc kubenswrapper[4558]: I0120 17:41:01.749825 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:41:01 crc kubenswrapper[4558]: I0120 17:41:01.790229 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bc31a46f-26e7-465a-b9d4-ac64686608b1-config-data\") pod \"bc31a46f-26e7-465a-b9d4-ac64686608b1\" (UID: \"bc31a46f-26e7-465a-b9d4-ac64686608b1\") " Jan 20 17:41:01 crc kubenswrapper[4558]: I0120 17:41:01.790375 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zq9s8\" (UniqueName: \"kubernetes.io/projected/bc31a46f-26e7-465a-b9d4-ac64686608b1-kube-api-access-zq9s8\") pod \"bc31a46f-26e7-465a-b9d4-ac64686608b1\" (UID: \"bc31a46f-26e7-465a-b9d4-ac64686608b1\") " Jan 20 17:41:01 crc kubenswrapper[4558]: I0120 17:41:01.790448 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bc31a46f-26e7-465a-b9d4-ac64686608b1-combined-ca-bundle\") pod \"bc31a46f-26e7-465a-b9d4-ac64686608b1\" (UID: \"bc31a46f-26e7-465a-b9d4-ac64686608b1\") " Jan 20 17:41:01 crc kubenswrapper[4558]: I0120 17:41:01.794104 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc31a46f-26e7-465a-b9d4-ac64686608b1-kube-api-access-zq9s8" (OuterVolumeSpecName: "kube-api-access-zq9s8") pod "bc31a46f-26e7-465a-b9d4-ac64686608b1" (UID: "bc31a46f-26e7-465a-b9d4-ac64686608b1"). InnerVolumeSpecName "kube-api-access-zq9s8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:41:01 crc kubenswrapper[4558]: I0120 17:41:01.811440 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc31a46f-26e7-465a-b9d4-ac64686608b1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "bc31a46f-26e7-465a-b9d4-ac64686608b1" (UID: "bc31a46f-26e7-465a-b9d4-ac64686608b1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:41:01 crc kubenswrapper[4558]: I0120 17:41:01.812053 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc31a46f-26e7-465a-b9d4-ac64686608b1-config-data" (OuterVolumeSpecName: "config-data") pod "bc31a46f-26e7-465a-b9d4-ac64686608b1" (UID: "bc31a46f-26e7-465a-b9d4-ac64686608b1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:41:01 crc kubenswrapper[4558]: I0120 17:41:01.858106 4558 generic.go:334] "Generic (PLEG): container finished" podID="bc31a46f-26e7-465a-b9d4-ac64686608b1" containerID="cd2295ce85ab323f2f2ba5f930f93f397889e2818b0608a160989e9e2ad519eb" exitCode=0 Jan 20 17:41:01 crc kubenswrapper[4558]: I0120 17:41:01.858156 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:41:01 crc kubenswrapper[4558]: I0120 17:41:01.858228 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"bc31a46f-26e7-465a-b9d4-ac64686608b1","Type":"ContainerDied","Data":"cd2295ce85ab323f2f2ba5f930f93f397889e2818b0608a160989e9e2ad519eb"} Jan 20 17:41:01 crc kubenswrapper[4558]: I0120 17:41:01.858266 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"bc31a46f-26e7-465a-b9d4-ac64686608b1","Type":"ContainerDied","Data":"95a796f3d181da22bbbf8c515598b3d28b5d340fed34324dfd34cf40416b0d23"} Jan 20 17:41:01 crc kubenswrapper[4558]: I0120 17:41:01.858286 4558 scope.go:117] "RemoveContainer" containerID="cd2295ce85ab323f2f2ba5f930f93f397889e2818b0608a160989e9e2ad519eb" Jan 20 17:41:01 crc kubenswrapper[4558]: I0120 17:41:01.860548 4558 generic.go:334] "Generic (PLEG): container finished" podID="7decf56f-896f-4423-90d6-ce028206d5e3" containerID="a8bbee4bc041d4b4b5b8b9119ef4887d06529f4f49afcea4e4838d77e40a87f7" exitCode=0 Jan 20 17:41:01 crc kubenswrapper[4558]: I0120 17:41:01.860573 4558 generic.go:334] "Generic (PLEG): container finished" podID="7decf56f-896f-4423-90d6-ce028206d5e3" containerID="e166d08bc915a7f76f290b6329cdc3a178d7639f74a3e4040fe52988cee4dc0f" exitCode=143 Jan 20 17:41:01 crc kubenswrapper[4558]: I0120 17:41:01.860612 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"7decf56f-896f-4423-90d6-ce028206d5e3","Type":"ContainerDied","Data":"a8bbee4bc041d4b4b5b8b9119ef4887d06529f4f49afcea4e4838d77e40a87f7"} Jan 20 17:41:01 crc kubenswrapper[4558]: I0120 17:41:01.860633 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"7decf56f-896f-4423-90d6-ce028206d5e3","Type":"ContainerDied","Data":"e166d08bc915a7f76f290b6329cdc3a178d7639f74a3e4040fe52988cee4dc0f"} Jan 20 17:41:01 crc kubenswrapper[4558]: I0120 17:41:01.860646 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"7decf56f-896f-4423-90d6-ce028206d5e3","Type":"ContainerDied","Data":"da45866d2c0d3a99890b76e9ce144fa843ecbf2ba78bccff08456a3f608edfc4"} Jan 20 17:41:01 crc kubenswrapper[4558]: I0120 17:41:01.860704 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:41:01 crc kubenswrapper[4558]: I0120 17:41:01.863880 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"9056548c-189a-4152-9f2f-bc242e3d4065","Type":"ContainerStarted","Data":"8f2b5dbae6c64c19e8835dfe696c1cc6670c91916ab96ba2b2f118a5ad7d74b5"} Jan 20 17:41:01 crc kubenswrapper[4558]: I0120 17:41:01.878100 4558 scope.go:117] "RemoveContainer" containerID="cd2295ce85ab323f2f2ba5f930f93f397889e2818b0608a160989e9e2ad519eb" Jan 20 17:41:01 crc kubenswrapper[4558]: E0120 17:41:01.879094 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cd2295ce85ab323f2f2ba5f930f93f397889e2818b0608a160989e9e2ad519eb\": container with ID starting with cd2295ce85ab323f2f2ba5f930f93f397889e2818b0608a160989e9e2ad519eb not found: ID does not exist" containerID="cd2295ce85ab323f2f2ba5f930f93f397889e2818b0608a160989e9e2ad519eb" Jan 20 17:41:01 crc kubenswrapper[4558]: I0120 17:41:01.879244 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cd2295ce85ab323f2f2ba5f930f93f397889e2818b0608a160989e9e2ad519eb"} err="failed to get container status \"cd2295ce85ab323f2f2ba5f930f93f397889e2818b0608a160989e9e2ad519eb\": rpc error: code = NotFound desc = could not find container \"cd2295ce85ab323f2f2ba5f930f93f397889e2818b0608a160989e9e2ad519eb\": container with ID starting with cd2295ce85ab323f2f2ba5f930f93f397889e2818b0608a160989e9e2ad519eb not found: ID does not exist" Jan 20 17:41:01 crc kubenswrapper[4558]: I0120 17:41:01.879328 4558 scope.go:117] "RemoveContainer" containerID="a8bbee4bc041d4b4b5b8b9119ef4887d06529f4f49afcea4e4838d77e40a87f7" Jan 20 17:41:01 crc kubenswrapper[4558]: I0120 17:41:01.893496 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bc31a46f-26e7-465a-b9d4-ac64686608b1-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:41:01 crc kubenswrapper[4558]: I0120 17:41:01.893527 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zq9s8\" (UniqueName: \"kubernetes.io/projected/bc31a46f-26e7-465a-b9d4-ac64686608b1-kube-api-access-zq9s8\") on node \"crc\" DevicePath \"\"" Jan 20 17:41:01 crc kubenswrapper[4558]: I0120 17:41:01.893539 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bc31a46f-26e7-465a-b9d4-ac64686608b1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:41:01 crc kubenswrapper[4558]: I0120 17:41:01.899614 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:41:01 crc kubenswrapper[4558]: I0120 17:41:01.917967 4558 scope.go:117] "RemoveContainer" containerID="e166d08bc915a7f76f290b6329cdc3a178d7639f74a3e4040fe52988cee4dc0f" Jan 20 17:41:01 crc kubenswrapper[4558]: I0120 17:41:01.923843 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:41:01 crc kubenswrapper[4558]: I0120 17:41:01.941290 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:41:01 crc kubenswrapper[4558]: I0120 17:41:01.946089 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:41:01 crc kubenswrapper[4558]: E0120 17:41:01.946667 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bc31a46f-26e7-465a-b9d4-ac64686608b1" containerName="nova-cell1-novncproxy-novncproxy" Jan 20 17:41:01 crc kubenswrapper[4558]: I0120 17:41:01.946690 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="bc31a46f-26e7-465a-b9d4-ac64686608b1" containerName="nova-cell1-novncproxy-novncproxy" Jan 20 17:41:01 crc kubenswrapper[4558]: E0120 17:41:01.946702 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7decf56f-896f-4423-90d6-ce028206d5e3" containerName="nova-metadata-log" Jan 20 17:41:01 crc kubenswrapper[4558]: I0120 17:41:01.946711 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="7decf56f-896f-4423-90d6-ce028206d5e3" containerName="nova-metadata-log" Jan 20 17:41:01 crc kubenswrapper[4558]: E0120 17:41:01.946740 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7decf56f-896f-4423-90d6-ce028206d5e3" containerName="nova-metadata-metadata" Jan 20 17:41:01 crc kubenswrapper[4558]: I0120 17:41:01.946747 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="7decf56f-896f-4423-90d6-ce028206d5e3" containerName="nova-metadata-metadata" Jan 20 17:41:01 crc kubenswrapper[4558]: I0120 17:41:01.946952 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="7decf56f-896f-4423-90d6-ce028206d5e3" containerName="nova-metadata-metadata" Jan 20 17:41:01 crc kubenswrapper[4558]: I0120 17:41:01.946971 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="7decf56f-896f-4423-90d6-ce028206d5e3" containerName="nova-metadata-log" Jan 20 17:41:01 crc kubenswrapper[4558]: I0120 17:41:01.946988 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="bc31a46f-26e7-465a-b9d4-ac64686608b1" containerName="nova-cell1-novncproxy-novncproxy" Jan 20 17:41:01 crc kubenswrapper[4558]: I0120 17:41:01.947377 4558 scope.go:117] "RemoveContainer" containerID="a8bbee4bc041d4b4b5b8b9119ef4887d06529f4f49afcea4e4838d77e40a87f7" Jan 20 17:41:01 crc kubenswrapper[4558]: I0120 17:41:01.947769 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:41:01 crc kubenswrapper[4558]: E0120 17:41:01.947798 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a8bbee4bc041d4b4b5b8b9119ef4887d06529f4f49afcea4e4838d77e40a87f7\": container with ID starting with a8bbee4bc041d4b4b5b8b9119ef4887d06529f4f49afcea4e4838d77e40a87f7 not found: ID does not exist" containerID="a8bbee4bc041d4b4b5b8b9119ef4887d06529f4f49afcea4e4838d77e40a87f7" Jan 20 17:41:01 crc kubenswrapper[4558]: I0120 17:41:01.947824 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a8bbee4bc041d4b4b5b8b9119ef4887d06529f4f49afcea4e4838d77e40a87f7"} err="failed to get container status \"a8bbee4bc041d4b4b5b8b9119ef4887d06529f4f49afcea4e4838d77e40a87f7\": rpc error: code = NotFound desc = could not find container \"a8bbee4bc041d4b4b5b8b9119ef4887d06529f4f49afcea4e4838d77e40a87f7\": container with ID starting with a8bbee4bc041d4b4b5b8b9119ef4887d06529f4f49afcea4e4838d77e40a87f7 not found: ID does not exist" Jan 20 17:41:01 crc kubenswrapper[4558]: I0120 17:41:01.947846 4558 scope.go:117] "RemoveContainer" containerID="e166d08bc915a7f76f290b6329cdc3a178d7639f74a3e4040fe52988cee4dc0f" Jan 20 17:41:01 crc kubenswrapper[4558]: E0120 17:41:01.948498 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e166d08bc915a7f76f290b6329cdc3a178d7639f74a3e4040fe52988cee4dc0f\": container with ID starting with e166d08bc915a7f76f290b6329cdc3a178d7639f74a3e4040fe52988cee4dc0f not found: ID does not exist" containerID="e166d08bc915a7f76f290b6329cdc3a178d7639f74a3e4040fe52988cee4dc0f" Jan 20 17:41:01 crc kubenswrapper[4558]: I0120 17:41:01.948529 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e166d08bc915a7f76f290b6329cdc3a178d7639f74a3e4040fe52988cee4dc0f"} err="failed to get container status \"e166d08bc915a7f76f290b6329cdc3a178d7639f74a3e4040fe52988cee4dc0f\": rpc error: code = NotFound desc = could not find container \"e166d08bc915a7f76f290b6329cdc3a178d7639f74a3e4040fe52988cee4dc0f\": container with ID starting with e166d08bc915a7f76f290b6329cdc3a178d7639f74a3e4040fe52988cee4dc0f not found: ID does not exist" Jan 20 17:41:01 crc kubenswrapper[4558]: I0120 17:41:01.948545 4558 scope.go:117] "RemoveContainer" containerID="a8bbee4bc041d4b4b5b8b9119ef4887d06529f4f49afcea4e4838d77e40a87f7" Jan 20 17:41:01 crc kubenswrapper[4558]: I0120 17:41:01.949120 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a8bbee4bc041d4b4b5b8b9119ef4887d06529f4f49afcea4e4838d77e40a87f7"} err="failed to get container status \"a8bbee4bc041d4b4b5b8b9119ef4887d06529f4f49afcea4e4838d77e40a87f7\": rpc error: code = NotFound desc = could not find container \"a8bbee4bc041d4b4b5b8b9119ef4887d06529f4f49afcea4e4838d77e40a87f7\": container with ID starting with a8bbee4bc041d4b4b5b8b9119ef4887d06529f4f49afcea4e4838d77e40a87f7 not found: ID does not exist" Jan 20 17:41:01 crc kubenswrapper[4558]: I0120 17:41:01.949142 4558 scope.go:117] "RemoveContainer" containerID="e166d08bc915a7f76f290b6329cdc3a178d7639f74a3e4040fe52988cee4dc0f" Jan 20 17:41:01 crc kubenswrapper[4558]: I0120 17:41:01.949962 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e166d08bc915a7f76f290b6329cdc3a178d7639f74a3e4040fe52988cee4dc0f"} err="failed to get container status \"e166d08bc915a7f76f290b6329cdc3a178d7639f74a3e4040fe52988cee4dc0f\": rpc error: code = NotFound desc = could not find container \"e166d08bc915a7f76f290b6329cdc3a178d7639f74a3e4040fe52988cee4dc0f\": container with ID starting with e166d08bc915a7f76f290b6329cdc3a178d7639f74a3e4040fe52988cee4dc0f not found: ID does not exist" Jan 20 17:41:01 crc kubenswrapper[4558]: I0120 17:41:01.953540 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:41:01 crc kubenswrapper[4558]: I0120 17:41:01.953667 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-novncproxy-cell1-vencrypt" Jan 20 17:41:01 crc kubenswrapper[4558]: I0120 17:41:01.953758 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-novncproxy-cell1-public-svc" Jan 20 17:41:01 crc kubenswrapper[4558]: I0120 17:41:01.954108 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell1-novncproxy-config-data" Jan 20 17:41:01 crc kubenswrapper[4558]: I0120 17:41:01.960552 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:41:01 crc kubenswrapper[4558]: I0120 17:41:01.968440 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:41:01 crc kubenswrapper[4558]: I0120 17:41:01.970429 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:41:01 crc kubenswrapper[4558]: I0120 17:41:01.973514 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-metadata-internal-svc" Jan 20 17:41:01 crc kubenswrapper[4558]: I0120 17:41:01.973584 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-metadata-config-data" Jan 20 17:41:01 crc kubenswrapper[4558]: I0120 17:41:01.980024 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:41:02 crc kubenswrapper[4558]: I0120 17:41:02.056226 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:41:02 crc kubenswrapper[4558]: I0120 17:41:02.103128 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/16c17336-8a71-4dc0-af6b-4549cb220f1a-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"16c17336-8a71-4dc0-af6b-4549cb220f1a\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:41:02 crc kubenswrapper[4558]: I0120 17:41:02.103227 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/4a701398-e5b1-4706-bfbf-2045ee831509-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"4a701398-e5b1-4706-bfbf-2045ee831509\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:41:02 crc kubenswrapper[4558]: I0120 17:41:02.103273 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/16c17336-8a71-4dc0-af6b-4549cb220f1a-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"16c17336-8a71-4dc0-af6b-4549cb220f1a\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:41:02 crc kubenswrapper[4558]: I0120 17:41:02.103301 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xd87d\" (UniqueName: \"kubernetes.io/projected/4a701398-e5b1-4706-bfbf-2045ee831509-kube-api-access-xd87d\") pod \"nova-metadata-0\" (UID: \"4a701398-e5b1-4706-bfbf-2045ee831509\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:41:02 crc kubenswrapper[4558]: I0120 17:41:02.103485 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/16c17336-8a71-4dc0-af6b-4549cb220f1a-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"16c17336-8a71-4dc0-af6b-4549cb220f1a\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:41:02 crc kubenswrapper[4558]: I0120 17:41:02.103541 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/16c17336-8a71-4dc0-af6b-4549cb220f1a-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"16c17336-8a71-4dc0-af6b-4549cb220f1a\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:41:02 crc kubenswrapper[4558]: I0120 17:41:02.103672 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4a701398-e5b1-4706-bfbf-2045ee831509-logs\") pod \"nova-metadata-0\" (UID: \"4a701398-e5b1-4706-bfbf-2045ee831509\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:41:02 crc kubenswrapper[4558]: I0120 17:41:02.103817 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4a701398-e5b1-4706-bfbf-2045ee831509-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"4a701398-e5b1-4706-bfbf-2045ee831509\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:41:02 crc kubenswrapper[4558]: I0120 17:41:02.103934 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4a701398-e5b1-4706-bfbf-2045ee831509-config-data\") pod \"nova-metadata-0\" (UID: \"4a701398-e5b1-4706-bfbf-2045ee831509\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:41:02 crc kubenswrapper[4558]: I0120 17:41:02.104069 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7r6sq\" (UniqueName: \"kubernetes.io/projected/16c17336-8a71-4dc0-af6b-4549cb220f1a-kube-api-access-7r6sq\") pod \"nova-cell1-novncproxy-0\" (UID: \"16c17336-8a71-4dc0-af6b-4549cb220f1a\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:41:02 crc kubenswrapper[4558]: I0120 17:41:02.205577 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4a701398-e5b1-4706-bfbf-2045ee831509-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"4a701398-e5b1-4706-bfbf-2045ee831509\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:41:02 crc kubenswrapper[4558]: I0120 17:41:02.205658 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4a701398-e5b1-4706-bfbf-2045ee831509-config-data\") pod \"nova-metadata-0\" (UID: \"4a701398-e5b1-4706-bfbf-2045ee831509\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:41:02 crc kubenswrapper[4558]: I0120 17:41:02.205719 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7r6sq\" (UniqueName: \"kubernetes.io/projected/16c17336-8a71-4dc0-af6b-4549cb220f1a-kube-api-access-7r6sq\") pod \"nova-cell1-novncproxy-0\" (UID: \"16c17336-8a71-4dc0-af6b-4549cb220f1a\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:41:02 crc kubenswrapper[4558]: I0120 17:41:02.205750 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/16c17336-8a71-4dc0-af6b-4549cb220f1a-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"16c17336-8a71-4dc0-af6b-4549cb220f1a\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:41:02 crc kubenswrapper[4558]: I0120 17:41:02.206467 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/4a701398-e5b1-4706-bfbf-2045ee831509-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"4a701398-e5b1-4706-bfbf-2045ee831509\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:41:02 crc kubenswrapper[4558]: I0120 17:41:02.206617 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/16c17336-8a71-4dc0-af6b-4549cb220f1a-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"16c17336-8a71-4dc0-af6b-4549cb220f1a\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:41:02 crc kubenswrapper[4558]: I0120 17:41:02.206659 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xd87d\" (UniqueName: \"kubernetes.io/projected/4a701398-e5b1-4706-bfbf-2045ee831509-kube-api-access-xd87d\") pod \"nova-metadata-0\" (UID: \"4a701398-e5b1-4706-bfbf-2045ee831509\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:41:02 crc kubenswrapper[4558]: I0120 17:41:02.206902 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/16c17336-8a71-4dc0-af6b-4549cb220f1a-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"16c17336-8a71-4dc0-af6b-4549cb220f1a\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:41:02 crc kubenswrapper[4558]: I0120 17:41:02.206969 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/16c17336-8a71-4dc0-af6b-4549cb220f1a-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"16c17336-8a71-4dc0-af6b-4549cb220f1a\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:41:02 crc kubenswrapper[4558]: I0120 17:41:02.207037 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4a701398-e5b1-4706-bfbf-2045ee831509-logs\") pod \"nova-metadata-0\" (UID: \"4a701398-e5b1-4706-bfbf-2045ee831509\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:41:02 crc kubenswrapper[4558]: I0120 17:41:02.207582 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4a701398-e5b1-4706-bfbf-2045ee831509-logs\") pod \"nova-metadata-0\" (UID: \"4a701398-e5b1-4706-bfbf-2045ee831509\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:41:02 crc kubenswrapper[4558]: I0120 17:41:02.212377 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/16c17336-8a71-4dc0-af6b-4549cb220f1a-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"16c17336-8a71-4dc0-af6b-4549cb220f1a\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:41:02 crc kubenswrapper[4558]: I0120 17:41:02.213053 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/4a701398-e5b1-4706-bfbf-2045ee831509-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"4a701398-e5b1-4706-bfbf-2045ee831509\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:41:02 crc kubenswrapper[4558]: I0120 17:41:02.213303 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/16c17336-8a71-4dc0-af6b-4549cb220f1a-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"16c17336-8a71-4dc0-af6b-4549cb220f1a\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:41:02 crc kubenswrapper[4558]: I0120 17:41:02.213860 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4a701398-e5b1-4706-bfbf-2045ee831509-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"4a701398-e5b1-4706-bfbf-2045ee831509\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:41:02 crc kubenswrapper[4558]: I0120 17:41:02.214126 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/16c17336-8a71-4dc0-af6b-4549cb220f1a-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"16c17336-8a71-4dc0-af6b-4549cb220f1a\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:41:02 crc kubenswrapper[4558]: I0120 17:41:02.214361 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/16c17336-8a71-4dc0-af6b-4549cb220f1a-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"16c17336-8a71-4dc0-af6b-4549cb220f1a\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:41:02 crc kubenswrapper[4558]: I0120 17:41:02.214556 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4a701398-e5b1-4706-bfbf-2045ee831509-config-data\") pod \"nova-metadata-0\" (UID: \"4a701398-e5b1-4706-bfbf-2045ee831509\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:41:02 crc kubenswrapper[4558]: I0120 17:41:02.226683 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7r6sq\" (UniqueName: \"kubernetes.io/projected/16c17336-8a71-4dc0-af6b-4549cb220f1a-kube-api-access-7r6sq\") pod \"nova-cell1-novncproxy-0\" (UID: \"16c17336-8a71-4dc0-af6b-4549cb220f1a\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:41:02 crc kubenswrapper[4558]: I0120 17:41:02.227835 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xd87d\" (UniqueName: \"kubernetes.io/projected/4a701398-e5b1-4706-bfbf-2045ee831509-kube-api-access-xd87d\") pod \"nova-metadata-0\" (UID: \"4a701398-e5b1-4706-bfbf-2045ee831509\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:41:02 crc kubenswrapper[4558]: I0120 17:41:02.271390 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:41:02 crc kubenswrapper[4558]: I0120 17:41:02.287057 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:41:02 crc kubenswrapper[4558]: I0120 17:41:02.578950 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="06ff5e50-9b15-4254-b6a4-8413e47d13be" path="/var/lib/kubelet/pods/06ff5e50-9b15-4254-b6a4-8413e47d13be/volumes" Jan 20 17:41:02 crc kubenswrapper[4558]: I0120 17:41:02.580468 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7decf56f-896f-4423-90d6-ce028206d5e3" path="/var/lib/kubelet/pods/7decf56f-896f-4423-90d6-ce028206d5e3/volumes" Jan 20 17:41:02 crc kubenswrapper[4558]: I0120 17:41:02.581145 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc31a46f-26e7-465a-b9d4-ac64686608b1" path="/var/lib/kubelet/pods/bc31a46f-26e7-465a-b9d4-ac64686608b1/volumes" Jan 20 17:41:02 crc kubenswrapper[4558]: I0120 17:41:02.687107 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:41:02 crc kubenswrapper[4558]: I0120 17:41:02.763638 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:41:02 crc kubenswrapper[4558]: W0120 17:41:02.767865 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4a701398_e5b1_4706_bfbf_2045ee831509.slice/crio-820d97e9908fa25ecf573d78942a825bca6d83eb9f25d77bec9cee38e30c64fe WatchSource:0}: Error finding container 820d97e9908fa25ecf573d78942a825bca6d83eb9f25d77bec9cee38e30c64fe: Status 404 returned error can't find the container with id 820d97e9908fa25ecf573d78942a825bca6d83eb9f25d77bec9cee38e30c64fe Jan 20 17:41:02 crc kubenswrapper[4558]: I0120 17:41:02.876287 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"16c17336-8a71-4dc0-af6b-4549cb220f1a","Type":"ContainerStarted","Data":"195f7df2dd875000ad3b06f1e140d95338d2648e3e005bdc5cfdfa3e467de32d"} Jan 20 17:41:02 crc kubenswrapper[4558]: I0120 17:41:02.882005 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"4a701398-e5b1-4706-bfbf-2045ee831509","Type":"ContainerStarted","Data":"820d97e9908fa25ecf573d78942a825bca6d83eb9f25d77bec9cee38e30c64fe"} Jan 20 17:41:03 crc kubenswrapper[4558]: I0120 17:41:03.892946 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"9056548c-189a-4152-9f2f-bc242e3d4065","Type":"ContainerStarted","Data":"a4df86dc40ccb68556c1da99ff22e880e7e4c218b8777739f8a44091f82a17ae"} Jan 20 17:41:03 crc kubenswrapper[4558]: I0120 17:41:03.893233 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"9056548c-189a-4152-9f2f-bc242e3d4065","Type":"ContainerStarted","Data":"effecb70e8c8831ca8d03c15fd3b51392f6b4c26b8ce5779c07930278ade516b"} Jan 20 17:41:03 crc kubenswrapper[4558]: I0120 17:41:03.896120 4558 generic.go:334] "Generic (PLEG): container finished" podID="c18579d1-589a-4752-9f16-30480da3c14f" containerID="30c26a31b65684929c8aa74de9a1153ce13cdc31951b546f411c4c7b676d7e42" exitCode=0 Jan 20 17:41:03 crc kubenswrapper[4558]: I0120 17:41:03.896185 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-m2qbl" event={"ID":"c18579d1-589a-4752-9f16-30480da3c14f","Type":"ContainerDied","Data":"30c26a31b65684929c8aa74de9a1153ce13cdc31951b546f411c4c7b676d7e42"} Jan 20 17:41:03 crc kubenswrapper[4558]: I0120 17:41:03.898222 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"4a701398-e5b1-4706-bfbf-2045ee831509","Type":"ContainerStarted","Data":"4aad50a32a09350cc02cf4f74ae61a5194a0da5e08b7d7ab36e701fb0f1e7f3a"} Jan 20 17:41:03 crc kubenswrapper[4558]: I0120 17:41:03.898249 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"4a701398-e5b1-4706-bfbf-2045ee831509","Type":"ContainerStarted","Data":"41e6e89cee1a53a5c73fb4e71f8e25922ee2d4946a461c4c9549fcee2db09ffd"} Jan 20 17:41:03 crc kubenswrapper[4558]: I0120 17:41:03.900324 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"16c17336-8a71-4dc0-af6b-4549cb220f1a","Type":"ContainerStarted","Data":"c3d63a6587188dce074b663f11815f4d05e4f8b5c572279de91c62b99871fc68"} Jan 20 17:41:03 crc kubenswrapper[4558]: I0120 17:41:03.929141 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" podStartSLOduration=2.929123619 podStartE2EDuration="2.929123619s" podCreationTimestamp="2026-01-20 17:41:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:41:03.924682479 +0000 UTC m=+3557.685020446" watchObservedRunningTime="2026-01-20 17:41:03.929123619 +0000 UTC m=+3557.689461587" Jan 20 17:41:03 crc kubenswrapper[4558]: I0120 17:41:03.943382 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-metadata-0" podStartSLOduration=2.943363143 podStartE2EDuration="2.943363143s" podCreationTimestamp="2026-01-20 17:41:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:41:03.940994841 +0000 UTC m=+3557.701332807" watchObservedRunningTime="2026-01-20 17:41:03.943363143 +0000 UTC m=+3557.703701110" Jan 20 17:41:04 crc kubenswrapper[4558]: I0120 17:41:04.919872 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"9056548c-189a-4152-9f2f-bc242e3d4065","Type":"ContainerStarted","Data":"dd5eac1f346485e04a58a6704a4682b1b992d72c5da31bf96b25e9aa2bc3e97d"} Jan 20 17:41:04 crc kubenswrapper[4558]: I0120 17:41:04.921960 4558 generic.go:334] "Generic (PLEG): container finished" podID="2ffb225d-0c31-4f91-905e-27ce314ee5df" containerID="57768a7bbd565b0a91483b9701925edcdb3062ad3e45489a7ff8722dabc67e7e" exitCode=0 Jan 20 17:41:04 crc kubenswrapper[4558]: I0120 17:41:04.921999 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-cell-mapping-c79qh" event={"ID":"2ffb225d-0c31-4f91-905e-27ce314ee5df","Type":"ContainerDied","Data":"57768a7bbd565b0a91483b9701925edcdb3062ad3e45489a7ff8722dabc67e7e"} Jan 20 17:41:05 crc kubenswrapper[4558]: I0120 17:41:05.238028 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-m2qbl" Jan 20 17:41:05 crc kubenswrapper[4558]: I0120 17:41:05.382017 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c18579d1-589a-4752-9f16-30480da3c14f-combined-ca-bundle\") pod \"c18579d1-589a-4752-9f16-30480da3c14f\" (UID: \"c18579d1-589a-4752-9f16-30480da3c14f\") " Jan 20 17:41:05 crc kubenswrapper[4558]: I0120 17:41:05.382411 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c18579d1-589a-4752-9f16-30480da3c14f-config-data\") pod \"c18579d1-589a-4752-9f16-30480da3c14f\" (UID: \"c18579d1-589a-4752-9f16-30480da3c14f\") " Jan 20 17:41:05 crc kubenswrapper[4558]: I0120 17:41:05.382580 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-27sq5\" (UniqueName: \"kubernetes.io/projected/c18579d1-589a-4752-9f16-30480da3c14f-kube-api-access-27sq5\") pod \"c18579d1-589a-4752-9f16-30480da3c14f\" (UID: \"c18579d1-589a-4752-9f16-30480da3c14f\") " Jan 20 17:41:05 crc kubenswrapper[4558]: I0120 17:41:05.382648 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c18579d1-589a-4752-9f16-30480da3c14f-scripts\") pod \"c18579d1-589a-4752-9f16-30480da3c14f\" (UID: \"c18579d1-589a-4752-9f16-30480da3c14f\") " Jan 20 17:41:05 crc kubenswrapper[4558]: I0120 17:41:05.388723 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c18579d1-589a-4752-9f16-30480da3c14f-scripts" (OuterVolumeSpecName: "scripts") pod "c18579d1-589a-4752-9f16-30480da3c14f" (UID: "c18579d1-589a-4752-9f16-30480da3c14f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:41:05 crc kubenswrapper[4558]: I0120 17:41:05.401731 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c18579d1-589a-4752-9f16-30480da3c14f-kube-api-access-27sq5" (OuterVolumeSpecName: "kube-api-access-27sq5") pod "c18579d1-589a-4752-9f16-30480da3c14f" (UID: "c18579d1-589a-4752-9f16-30480da3c14f"). InnerVolumeSpecName "kube-api-access-27sq5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:41:05 crc kubenswrapper[4558]: I0120 17:41:05.408637 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c18579d1-589a-4752-9f16-30480da3c14f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c18579d1-589a-4752-9f16-30480da3c14f" (UID: "c18579d1-589a-4752-9f16-30480da3c14f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:41:05 crc kubenswrapper[4558]: I0120 17:41:05.409036 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c18579d1-589a-4752-9f16-30480da3c14f-config-data" (OuterVolumeSpecName: "config-data") pod "c18579d1-589a-4752-9f16-30480da3c14f" (UID: "c18579d1-589a-4752-9f16-30480da3c14f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:41:05 crc kubenswrapper[4558]: I0120 17:41:05.491966 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-27sq5\" (UniqueName: \"kubernetes.io/projected/c18579d1-589a-4752-9f16-30480da3c14f-kube-api-access-27sq5\") on node \"crc\" DevicePath \"\"" Jan 20 17:41:05 crc kubenswrapper[4558]: I0120 17:41:05.492005 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c18579d1-589a-4752-9f16-30480da3c14f-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:41:05 crc kubenswrapper[4558]: I0120 17:41:05.492019 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c18579d1-589a-4752-9f16-30480da3c14f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:41:05 crc kubenswrapper[4558]: I0120 17:41:05.492030 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c18579d1-589a-4752-9f16-30480da3c14f-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:41:05 crc kubenswrapper[4558]: I0120 17:41:05.941503 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"9056548c-189a-4152-9f2f-bc242e3d4065","Type":"ContainerStarted","Data":"9c2a9892c01f04cdd0fc01158c87b79e229ccc934e665e4307e05ca274eb8df6"} Jan 20 17:41:05 crc kubenswrapper[4558]: I0120 17:41:05.941962 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:41:05 crc kubenswrapper[4558]: I0120 17:41:05.943858 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-m2qbl" Jan 20 17:41:05 crc kubenswrapper[4558]: I0120 17:41:05.943854 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-m2qbl" event={"ID":"c18579d1-589a-4752-9f16-30480da3c14f","Type":"ContainerDied","Data":"37a4f9064c277d7babef29e555383efc9b0c26ce9295442d2903cb57c6735a5d"} Jan 20 17:41:05 crc kubenswrapper[4558]: I0120 17:41:05.944022 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="37a4f9064c277d7babef29e555383efc9b0c26ce9295442d2903cb57c6735a5d" Jan 20 17:41:05 crc kubenswrapper[4558]: I0120 17:41:05.966661 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ceilometer-0" podStartSLOduration=2.318328579 podStartE2EDuration="5.96664619s" podCreationTimestamp="2026-01-20 17:41:00 +0000 UTC" firstStartedPulling="2026-01-20 17:41:01.750728204 +0000 UTC m=+3555.511066171" lastFinishedPulling="2026-01-20 17:41:05.399045814 +0000 UTC m=+3559.159383782" observedRunningTime="2026-01-20 17:41:05.960041461 +0000 UTC m=+3559.720379428" watchObservedRunningTime="2026-01-20 17:41:05.96664619 +0000 UTC m=+3559.726984156" Jan 20 17:41:05 crc kubenswrapper[4558]: I0120 17:41:05.996758 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-0"] Jan 20 17:41:05 crc kubenswrapper[4558]: E0120 17:41:05.997308 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c18579d1-589a-4752-9f16-30480da3c14f" containerName="nova-cell1-conductor-db-sync" Jan 20 17:41:05 crc kubenswrapper[4558]: I0120 17:41:05.997329 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="c18579d1-589a-4752-9f16-30480da3c14f" containerName="nova-cell1-conductor-db-sync" Jan 20 17:41:05 crc kubenswrapper[4558]: I0120 17:41:05.997509 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="c18579d1-589a-4752-9f16-30480da3c14f" containerName="nova-cell1-conductor-db-sync" Jan 20 17:41:05 crc kubenswrapper[4558]: I0120 17:41:05.998268 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:41:06 crc kubenswrapper[4558]: I0120 17:41:06.002663 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell1-conductor-config-data" Jan 20 17:41:06 crc kubenswrapper[4558]: I0120 17:41:06.010064 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-0"] Jan 20 17:41:06 crc kubenswrapper[4558]: I0120 17:41:06.104425 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8df2ac8-5e44-4c2d-a1fb-11e7cdeda894-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"a8df2ac8-5e44-4c2d-a1fb-11e7cdeda894\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:41:06 crc kubenswrapper[4558]: I0120 17:41:06.104598 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a8df2ac8-5e44-4c2d-a1fb-11e7cdeda894-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"a8df2ac8-5e44-4c2d-a1fb-11e7cdeda894\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:41:06 crc kubenswrapper[4558]: I0120 17:41:06.104664 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jxvrv\" (UniqueName: \"kubernetes.io/projected/a8df2ac8-5e44-4c2d-a1fb-11e7cdeda894-kube-api-access-jxvrv\") pod \"nova-cell1-conductor-0\" (UID: \"a8df2ac8-5e44-4c2d-a1fb-11e7cdeda894\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:41:06 crc kubenswrapper[4558]: I0120 17:41:06.206923 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8df2ac8-5e44-4c2d-a1fb-11e7cdeda894-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"a8df2ac8-5e44-4c2d-a1fb-11e7cdeda894\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:41:06 crc kubenswrapper[4558]: I0120 17:41:06.207041 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a8df2ac8-5e44-4c2d-a1fb-11e7cdeda894-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"a8df2ac8-5e44-4c2d-a1fb-11e7cdeda894\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:41:06 crc kubenswrapper[4558]: I0120 17:41:06.207085 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jxvrv\" (UniqueName: \"kubernetes.io/projected/a8df2ac8-5e44-4c2d-a1fb-11e7cdeda894-kube-api-access-jxvrv\") pod \"nova-cell1-conductor-0\" (UID: \"a8df2ac8-5e44-4c2d-a1fb-11e7cdeda894\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:41:06 crc kubenswrapper[4558]: I0120 17:41:06.213392 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a8df2ac8-5e44-4c2d-a1fb-11e7cdeda894-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"a8df2ac8-5e44-4c2d-a1fb-11e7cdeda894\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:41:06 crc kubenswrapper[4558]: I0120 17:41:06.216827 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8df2ac8-5e44-4c2d-a1fb-11e7cdeda894-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"a8df2ac8-5e44-4c2d-a1fb-11e7cdeda894\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:41:06 crc kubenswrapper[4558]: I0120 17:41:06.229957 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jxvrv\" (UniqueName: \"kubernetes.io/projected/a8df2ac8-5e44-4c2d-a1fb-11e7cdeda894-kube-api-access-jxvrv\") pod \"nova-cell1-conductor-0\" (UID: \"a8df2ac8-5e44-4c2d-a1fb-11e7cdeda894\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:41:06 crc kubenswrapper[4558]: I0120 17:41:06.300135 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-cell-mapping-c79qh" Jan 20 17:41:06 crc kubenswrapper[4558]: I0120 17:41:06.313046 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:41:06 crc kubenswrapper[4558]: I0120 17:41:06.411361 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2ffb225d-0c31-4f91-905e-27ce314ee5df-combined-ca-bundle\") pod \"2ffb225d-0c31-4f91-905e-27ce314ee5df\" (UID: \"2ffb225d-0c31-4f91-905e-27ce314ee5df\") " Jan 20 17:41:06 crc kubenswrapper[4558]: I0120 17:41:06.411499 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2ffb225d-0c31-4f91-905e-27ce314ee5df-scripts\") pod \"2ffb225d-0c31-4f91-905e-27ce314ee5df\" (UID: \"2ffb225d-0c31-4f91-905e-27ce314ee5df\") " Jan 20 17:41:06 crc kubenswrapper[4558]: I0120 17:41:06.411614 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cddf5\" (UniqueName: \"kubernetes.io/projected/2ffb225d-0c31-4f91-905e-27ce314ee5df-kube-api-access-cddf5\") pod \"2ffb225d-0c31-4f91-905e-27ce314ee5df\" (UID: \"2ffb225d-0c31-4f91-905e-27ce314ee5df\") " Jan 20 17:41:06 crc kubenswrapper[4558]: I0120 17:41:06.411745 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2ffb225d-0c31-4f91-905e-27ce314ee5df-config-data\") pod \"2ffb225d-0c31-4f91-905e-27ce314ee5df\" (UID: \"2ffb225d-0c31-4f91-905e-27ce314ee5df\") " Jan 20 17:41:06 crc kubenswrapper[4558]: I0120 17:41:06.418633 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2ffb225d-0c31-4f91-905e-27ce314ee5df-kube-api-access-cddf5" (OuterVolumeSpecName: "kube-api-access-cddf5") pod "2ffb225d-0c31-4f91-905e-27ce314ee5df" (UID: "2ffb225d-0c31-4f91-905e-27ce314ee5df"). InnerVolumeSpecName "kube-api-access-cddf5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:41:06 crc kubenswrapper[4558]: I0120 17:41:06.418833 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2ffb225d-0c31-4f91-905e-27ce314ee5df-scripts" (OuterVolumeSpecName: "scripts") pod "2ffb225d-0c31-4f91-905e-27ce314ee5df" (UID: "2ffb225d-0c31-4f91-905e-27ce314ee5df"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:41:06 crc kubenswrapper[4558]: I0120 17:41:06.434765 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2ffb225d-0c31-4f91-905e-27ce314ee5df-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2ffb225d-0c31-4f91-905e-27ce314ee5df" (UID: "2ffb225d-0c31-4f91-905e-27ce314ee5df"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:41:06 crc kubenswrapper[4558]: I0120 17:41:06.442770 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2ffb225d-0c31-4f91-905e-27ce314ee5df-config-data" (OuterVolumeSpecName: "config-data") pod "2ffb225d-0c31-4f91-905e-27ce314ee5df" (UID: "2ffb225d-0c31-4f91-905e-27ce314ee5df"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:41:06 crc kubenswrapper[4558]: I0120 17:41:06.515379 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2ffb225d-0c31-4f91-905e-27ce314ee5df-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:41:06 crc kubenswrapper[4558]: I0120 17:41:06.515407 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2ffb225d-0c31-4f91-905e-27ce314ee5df-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:41:06 crc kubenswrapper[4558]: I0120 17:41:06.515420 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2ffb225d-0c31-4f91-905e-27ce314ee5df-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:41:06 crc kubenswrapper[4558]: I0120 17:41:06.515431 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cddf5\" (UniqueName: \"kubernetes.io/projected/2ffb225d-0c31-4f91-905e-27ce314ee5df-kube-api-access-cddf5\") on node \"crc\" DevicePath \"\"" Jan 20 17:41:06 crc kubenswrapper[4558]: I0120 17:41:06.572489 4558 scope.go:117] "RemoveContainer" containerID="2d2a8989ca5a5af98b2c9dc8f801ea0675339ec14800f437c058c5a43c20146b" Jan 20 17:41:06 crc kubenswrapper[4558]: E0120 17:41:06.572827 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 17:41:06 crc kubenswrapper[4558]: I0120 17:41:06.715268 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-0"] Jan 20 17:41:06 crc kubenswrapper[4558]: I0120 17:41:06.957747 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-cell-mapping-c79qh" Jan 20 17:41:06 crc kubenswrapper[4558]: I0120 17:41:06.957774 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-cell-mapping-c79qh" event={"ID":"2ffb225d-0c31-4f91-905e-27ce314ee5df","Type":"ContainerDied","Data":"8c50f338169e591502829ce8829e70440d64e0507e39a5f32a279a6a303b42f4"} Jan 20 17:41:06 crc kubenswrapper[4558]: I0120 17:41:06.957815 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8c50f338169e591502829ce8829e70440d64e0507e39a5f32a279a6a303b42f4" Jan 20 17:41:06 crc kubenswrapper[4558]: I0120 17:41:06.960987 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-0" event={"ID":"a8df2ac8-5e44-4c2d-a1fb-11e7cdeda894","Type":"ContainerStarted","Data":"5caa9c83e57be3928128e4ffcdc30d264c7d25b5dfa3de5488249ee33e587529"} Jan 20 17:41:06 crc kubenswrapper[4558]: I0120 17:41:06.961055 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-0" event={"ID":"a8df2ac8-5e44-4c2d-a1fb-11e7cdeda894","Type":"ContainerStarted","Data":"2e9dacc275888dedfec5e084ed7bdaf3e2249013808df66a2ec2a59ddb6bbb30"} Jan 20 17:41:06 crc kubenswrapper[4558]: I0120 17:41:06.961073 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:41:06 crc kubenswrapper[4558]: I0120 17:41:06.992253 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell1-conductor-0" podStartSLOduration=1.992234761 podStartE2EDuration="1.992234761s" podCreationTimestamp="2026-01-20 17:41:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:41:06.981120174 +0000 UTC m=+3560.741458141" watchObservedRunningTime="2026-01-20 17:41:06.992234761 +0000 UTC m=+3560.752572728" Jan 20 17:41:07 crc kubenswrapper[4558]: I0120 17:41:07.056192 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:41:07 crc kubenswrapper[4558]: I0120 17:41:07.096188 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:41:07 crc kubenswrapper[4558]: I0120 17:41:07.136063 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:41:07 crc kubenswrapper[4558]: I0120 17:41:07.136332 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-api-0" podUID="140f3226-9a72-4426-963a-8d8b3ad539e0" containerName="nova-api-log" containerID="cri-o://b02c1231cba0c1220ed256e2fd10998e5ea769532b4f9ac5970957deba6598c9" gracePeriod=30 Jan 20 17:41:07 crc kubenswrapper[4558]: I0120 17:41:07.136385 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-api-0" podUID="140f3226-9a72-4426-963a-8d8b3ad539e0" containerName="nova-api-api" containerID="cri-o://3345b3bd768ee87c19c2c95a125c933f947a8eb6f633265e3341105b9a77c7ed" gracePeriod=30 Jan 20 17:41:07 crc kubenswrapper[4558]: I0120 17:41:07.146814 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:41:07 crc kubenswrapper[4558]: I0120 17:41:07.166071 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:41:07 crc kubenswrapper[4558]: I0120 17:41:07.166597 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-metadata-0" podUID="4a701398-e5b1-4706-bfbf-2045ee831509" containerName="nova-metadata-log" containerID="cri-o://41e6e89cee1a53a5c73fb4e71f8e25922ee2d4946a461c4c9549fcee2db09ffd" gracePeriod=30 Jan 20 17:41:07 crc kubenswrapper[4558]: I0120 17:41:07.166660 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-metadata-0" podUID="4a701398-e5b1-4706-bfbf-2045ee831509" containerName="nova-metadata-metadata" containerID="cri-o://4aad50a32a09350cc02cf4f74ae61a5194a0da5e08b7d7ab36e701fb0f1e7f3a" gracePeriod=30 Jan 20 17:41:07 crc kubenswrapper[4558]: I0120 17:41:07.271488 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:41:07 crc kubenswrapper[4558]: I0120 17:41:07.288084 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:41:07 crc kubenswrapper[4558]: I0120 17:41:07.288198 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:41:07 crc kubenswrapper[4558]: I0120 17:41:07.754456 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:41:07 crc kubenswrapper[4558]: I0120 17:41:07.758848 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:41:07 crc kubenswrapper[4558]: I0120 17:41:07.850794 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/4a701398-e5b1-4706-bfbf-2045ee831509-nova-metadata-tls-certs\") pod \"4a701398-e5b1-4706-bfbf-2045ee831509\" (UID: \"4a701398-e5b1-4706-bfbf-2045ee831509\") " Jan 20 17:41:07 crc kubenswrapper[4558]: I0120 17:41:07.851241 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xd87d\" (UniqueName: \"kubernetes.io/projected/4a701398-e5b1-4706-bfbf-2045ee831509-kube-api-access-xd87d\") pod \"4a701398-e5b1-4706-bfbf-2045ee831509\" (UID: \"4a701398-e5b1-4706-bfbf-2045ee831509\") " Jan 20 17:41:07 crc kubenswrapper[4558]: I0120 17:41:07.851353 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/140f3226-9a72-4426-963a-8d8b3ad539e0-config-data\") pod \"140f3226-9a72-4426-963a-8d8b3ad539e0\" (UID: \"140f3226-9a72-4426-963a-8d8b3ad539e0\") " Jan 20 17:41:07 crc kubenswrapper[4558]: I0120 17:41:07.851434 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jmm96\" (UniqueName: \"kubernetes.io/projected/140f3226-9a72-4426-963a-8d8b3ad539e0-kube-api-access-jmm96\") pod \"140f3226-9a72-4426-963a-8d8b3ad539e0\" (UID: \"140f3226-9a72-4426-963a-8d8b3ad539e0\") " Jan 20 17:41:07 crc kubenswrapper[4558]: I0120 17:41:07.851509 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4a701398-e5b1-4706-bfbf-2045ee831509-logs\") pod \"4a701398-e5b1-4706-bfbf-2045ee831509\" (UID: \"4a701398-e5b1-4706-bfbf-2045ee831509\") " Jan 20 17:41:07 crc kubenswrapper[4558]: I0120 17:41:07.851652 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/140f3226-9a72-4426-963a-8d8b3ad539e0-logs\") pod \"140f3226-9a72-4426-963a-8d8b3ad539e0\" (UID: \"140f3226-9a72-4426-963a-8d8b3ad539e0\") " Jan 20 17:41:07 crc kubenswrapper[4558]: I0120 17:41:07.851980 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4a701398-e5b1-4706-bfbf-2045ee831509-config-data\") pod \"4a701398-e5b1-4706-bfbf-2045ee831509\" (UID: \"4a701398-e5b1-4706-bfbf-2045ee831509\") " Jan 20 17:41:07 crc kubenswrapper[4558]: I0120 17:41:07.852073 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/140f3226-9a72-4426-963a-8d8b3ad539e0-combined-ca-bundle\") pod \"140f3226-9a72-4426-963a-8d8b3ad539e0\" (UID: \"140f3226-9a72-4426-963a-8d8b3ad539e0\") " Jan 20 17:41:07 crc kubenswrapper[4558]: I0120 17:41:07.852153 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4a701398-e5b1-4706-bfbf-2045ee831509-combined-ca-bundle\") pod \"4a701398-e5b1-4706-bfbf-2045ee831509\" (UID: \"4a701398-e5b1-4706-bfbf-2045ee831509\") " Jan 20 17:41:07 crc kubenswrapper[4558]: I0120 17:41:07.852271 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4a701398-e5b1-4706-bfbf-2045ee831509-logs" (OuterVolumeSpecName: "logs") pod "4a701398-e5b1-4706-bfbf-2045ee831509" (UID: "4a701398-e5b1-4706-bfbf-2045ee831509"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:41:07 crc kubenswrapper[4558]: I0120 17:41:07.852352 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/140f3226-9a72-4426-963a-8d8b3ad539e0-logs" (OuterVolumeSpecName: "logs") pod "140f3226-9a72-4426-963a-8d8b3ad539e0" (UID: "140f3226-9a72-4426-963a-8d8b3ad539e0"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:41:07 crc kubenswrapper[4558]: I0120 17:41:07.852623 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4a701398-e5b1-4706-bfbf-2045ee831509-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:41:07 crc kubenswrapper[4558]: I0120 17:41:07.852701 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/140f3226-9a72-4426-963a-8d8b3ad539e0-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:41:07 crc kubenswrapper[4558]: I0120 17:41:07.858209 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/140f3226-9a72-4426-963a-8d8b3ad539e0-kube-api-access-jmm96" (OuterVolumeSpecName: "kube-api-access-jmm96") pod "140f3226-9a72-4426-963a-8d8b3ad539e0" (UID: "140f3226-9a72-4426-963a-8d8b3ad539e0"). InnerVolumeSpecName "kube-api-access-jmm96". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:41:07 crc kubenswrapper[4558]: I0120 17:41:07.858952 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4a701398-e5b1-4706-bfbf-2045ee831509-kube-api-access-xd87d" (OuterVolumeSpecName: "kube-api-access-xd87d") pod "4a701398-e5b1-4706-bfbf-2045ee831509" (UID: "4a701398-e5b1-4706-bfbf-2045ee831509"). InnerVolumeSpecName "kube-api-access-xd87d". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:41:07 crc kubenswrapper[4558]: I0120 17:41:07.885229 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/140f3226-9a72-4426-963a-8d8b3ad539e0-config-data" (OuterVolumeSpecName: "config-data") pod "140f3226-9a72-4426-963a-8d8b3ad539e0" (UID: "140f3226-9a72-4426-963a-8d8b3ad539e0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:41:07 crc kubenswrapper[4558]: I0120 17:41:07.887764 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4a701398-e5b1-4706-bfbf-2045ee831509-config-data" (OuterVolumeSpecName: "config-data") pod "4a701398-e5b1-4706-bfbf-2045ee831509" (UID: "4a701398-e5b1-4706-bfbf-2045ee831509"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:41:07 crc kubenswrapper[4558]: I0120 17:41:07.889388 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4a701398-e5b1-4706-bfbf-2045ee831509-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4a701398-e5b1-4706-bfbf-2045ee831509" (UID: "4a701398-e5b1-4706-bfbf-2045ee831509"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:41:07 crc kubenswrapper[4558]: I0120 17:41:07.889619 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/140f3226-9a72-4426-963a-8d8b3ad539e0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "140f3226-9a72-4426-963a-8d8b3ad539e0" (UID: "140f3226-9a72-4426-963a-8d8b3ad539e0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:41:07 crc kubenswrapper[4558]: I0120 17:41:07.899927 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4a701398-e5b1-4706-bfbf-2045ee831509-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "4a701398-e5b1-4706-bfbf-2045ee831509" (UID: "4a701398-e5b1-4706-bfbf-2045ee831509"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:41:07 crc kubenswrapper[4558]: I0120 17:41:07.955323 4558 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/4a701398-e5b1-4706-bfbf-2045ee831509-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:41:07 crc kubenswrapper[4558]: I0120 17:41:07.955368 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xd87d\" (UniqueName: \"kubernetes.io/projected/4a701398-e5b1-4706-bfbf-2045ee831509-kube-api-access-xd87d\") on node \"crc\" DevicePath \"\"" Jan 20 17:41:07 crc kubenswrapper[4558]: I0120 17:41:07.955383 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/140f3226-9a72-4426-963a-8d8b3ad539e0-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:41:07 crc kubenswrapper[4558]: I0120 17:41:07.955397 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jmm96\" (UniqueName: \"kubernetes.io/projected/140f3226-9a72-4426-963a-8d8b3ad539e0-kube-api-access-jmm96\") on node \"crc\" DevicePath \"\"" Jan 20 17:41:07 crc kubenswrapper[4558]: I0120 17:41:07.955409 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4a701398-e5b1-4706-bfbf-2045ee831509-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:41:07 crc kubenswrapper[4558]: I0120 17:41:07.955423 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/140f3226-9a72-4426-963a-8d8b3ad539e0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:41:07 crc kubenswrapper[4558]: I0120 17:41:07.955433 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4a701398-e5b1-4706-bfbf-2045ee831509-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:41:07 crc kubenswrapper[4558]: I0120 17:41:07.973990 4558 generic.go:334] "Generic (PLEG): container finished" podID="4a701398-e5b1-4706-bfbf-2045ee831509" containerID="4aad50a32a09350cc02cf4f74ae61a5194a0da5e08b7d7ab36e701fb0f1e7f3a" exitCode=0 Jan 20 17:41:07 crc kubenswrapper[4558]: I0120 17:41:07.974028 4558 generic.go:334] "Generic (PLEG): container finished" podID="4a701398-e5b1-4706-bfbf-2045ee831509" containerID="41e6e89cee1a53a5c73fb4e71f8e25922ee2d4946a461c4c9549fcee2db09ffd" exitCode=143 Jan 20 17:41:07 crc kubenswrapper[4558]: I0120 17:41:07.974057 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:41:07 crc kubenswrapper[4558]: I0120 17:41:07.974106 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"4a701398-e5b1-4706-bfbf-2045ee831509","Type":"ContainerDied","Data":"4aad50a32a09350cc02cf4f74ae61a5194a0da5e08b7d7ab36e701fb0f1e7f3a"} Jan 20 17:41:07 crc kubenswrapper[4558]: I0120 17:41:07.974141 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"4a701398-e5b1-4706-bfbf-2045ee831509","Type":"ContainerDied","Data":"41e6e89cee1a53a5c73fb4e71f8e25922ee2d4946a461c4c9549fcee2db09ffd"} Jan 20 17:41:07 crc kubenswrapper[4558]: I0120 17:41:07.974154 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"4a701398-e5b1-4706-bfbf-2045ee831509","Type":"ContainerDied","Data":"820d97e9908fa25ecf573d78942a825bca6d83eb9f25d77bec9cee38e30c64fe"} Jan 20 17:41:07 crc kubenswrapper[4558]: I0120 17:41:07.974193 4558 scope.go:117] "RemoveContainer" containerID="4aad50a32a09350cc02cf4f74ae61a5194a0da5e08b7d7ab36e701fb0f1e7f3a" Jan 20 17:41:07 crc kubenswrapper[4558]: I0120 17:41:07.978389 4558 generic.go:334] "Generic (PLEG): container finished" podID="140f3226-9a72-4426-963a-8d8b3ad539e0" containerID="3345b3bd768ee87c19c2c95a125c933f947a8eb6f633265e3341105b9a77c7ed" exitCode=0 Jan 20 17:41:07 crc kubenswrapper[4558]: I0120 17:41:07.978479 4558 generic.go:334] "Generic (PLEG): container finished" podID="140f3226-9a72-4426-963a-8d8b3ad539e0" containerID="b02c1231cba0c1220ed256e2fd10998e5ea769532b4f9ac5970957deba6598c9" exitCode=143 Jan 20 17:41:07 crc kubenswrapper[4558]: I0120 17:41:07.979726 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"140f3226-9a72-4426-963a-8d8b3ad539e0","Type":"ContainerDied","Data":"3345b3bd768ee87c19c2c95a125c933f947a8eb6f633265e3341105b9a77c7ed"} Jan 20 17:41:07 crc kubenswrapper[4558]: I0120 17:41:07.979802 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"140f3226-9a72-4426-963a-8d8b3ad539e0","Type":"ContainerDied","Data":"b02c1231cba0c1220ed256e2fd10998e5ea769532b4f9ac5970957deba6598c9"} Jan 20 17:41:07 crc kubenswrapper[4558]: I0120 17:41:07.979817 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"140f3226-9a72-4426-963a-8d8b3ad539e0","Type":"ContainerDied","Data":"5487f4ea1e164f8665fc3bb114d792e6c230a3019fd2d1bb0939f5078c2a01de"} Jan 20 17:41:07 crc kubenswrapper[4558]: I0120 17:41:07.979951 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:41:08 crc kubenswrapper[4558]: I0120 17:41:08.009621 4558 scope.go:117] "RemoveContainer" containerID="41e6e89cee1a53a5c73fb4e71f8e25922ee2d4946a461c4c9549fcee2db09ffd" Jan 20 17:41:08 crc kubenswrapper[4558]: I0120 17:41:08.017799 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:41:08 crc kubenswrapper[4558]: I0120 17:41:08.035673 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:41:08 crc kubenswrapper[4558]: I0120 17:41:08.035726 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:41:08 crc kubenswrapper[4558]: I0120 17:41:08.051425 4558 scope.go:117] "RemoveContainer" containerID="4aad50a32a09350cc02cf4f74ae61a5194a0da5e08b7d7ab36e701fb0f1e7f3a" Jan 20 17:41:08 crc kubenswrapper[4558]: I0120 17:41:08.051660 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:41:08 crc kubenswrapper[4558]: E0120 17:41:08.051861 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4aad50a32a09350cc02cf4f74ae61a5194a0da5e08b7d7ab36e701fb0f1e7f3a\": container with ID starting with 4aad50a32a09350cc02cf4f74ae61a5194a0da5e08b7d7ab36e701fb0f1e7f3a not found: ID does not exist" containerID="4aad50a32a09350cc02cf4f74ae61a5194a0da5e08b7d7ab36e701fb0f1e7f3a" Jan 20 17:41:08 crc kubenswrapper[4558]: I0120 17:41:08.051904 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4aad50a32a09350cc02cf4f74ae61a5194a0da5e08b7d7ab36e701fb0f1e7f3a"} err="failed to get container status \"4aad50a32a09350cc02cf4f74ae61a5194a0da5e08b7d7ab36e701fb0f1e7f3a\": rpc error: code = NotFound desc = could not find container \"4aad50a32a09350cc02cf4f74ae61a5194a0da5e08b7d7ab36e701fb0f1e7f3a\": container with ID starting with 4aad50a32a09350cc02cf4f74ae61a5194a0da5e08b7d7ab36e701fb0f1e7f3a not found: ID does not exist" Jan 20 17:41:08 crc kubenswrapper[4558]: I0120 17:41:08.051929 4558 scope.go:117] "RemoveContainer" containerID="41e6e89cee1a53a5c73fb4e71f8e25922ee2d4946a461c4c9549fcee2db09ffd" Jan 20 17:41:08 crc kubenswrapper[4558]: I0120 17:41:08.051865 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:41:08 crc kubenswrapper[4558]: E0120 17:41:08.052260 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"41e6e89cee1a53a5c73fb4e71f8e25922ee2d4946a461c4c9549fcee2db09ffd\": container with ID starting with 41e6e89cee1a53a5c73fb4e71f8e25922ee2d4946a461c4c9549fcee2db09ffd not found: ID does not exist" containerID="41e6e89cee1a53a5c73fb4e71f8e25922ee2d4946a461c4c9549fcee2db09ffd" Jan 20 17:41:08 crc kubenswrapper[4558]: I0120 17:41:08.052283 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"41e6e89cee1a53a5c73fb4e71f8e25922ee2d4946a461c4c9549fcee2db09ffd"} err="failed to get container status \"41e6e89cee1a53a5c73fb4e71f8e25922ee2d4946a461c4c9549fcee2db09ffd\": rpc error: code = NotFound desc = could not find container \"41e6e89cee1a53a5c73fb4e71f8e25922ee2d4946a461c4c9549fcee2db09ffd\": container with ID starting with 41e6e89cee1a53a5c73fb4e71f8e25922ee2d4946a461c4c9549fcee2db09ffd not found: ID does not exist" Jan 20 17:41:08 crc kubenswrapper[4558]: I0120 17:41:08.052298 4558 scope.go:117] "RemoveContainer" containerID="4aad50a32a09350cc02cf4f74ae61a5194a0da5e08b7d7ab36e701fb0f1e7f3a" Jan 20 17:41:08 crc kubenswrapper[4558]: I0120 17:41:08.052467 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4aad50a32a09350cc02cf4f74ae61a5194a0da5e08b7d7ab36e701fb0f1e7f3a"} err="failed to get container status \"4aad50a32a09350cc02cf4f74ae61a5194a0da5e08b7d7ab36e701fb0f1e7f3a\": rpc error: code = NotFound desc = could not find container \"4aad50a32a09350cc02cf4f74ae61a5194a0da5e08b7d7ab36e701fb0f1e7f3a\": container with ID starting with 4aad50a32a09350cc02cf4f74ae61a5194a0da5e08b7d7ab36e701fb0f1e7f3a not found: ID does not exist" Jan 20 17:41:08 crc kubenswrapper[4558]: I0120 17:41:08.052483 4558 scope.go:117] "RemoveContainer" containerID="41e6e89cee1a53a5c73fb4e71f8e25922ee2d4946a461c4c9549fcee2db09ffd" Jan 20 17:41:08 crc kubenswrapper[4558]: I0120 17:41:08.053607 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"41e6e89cee1a53a5c73fb4e71f8e25922ee2d4946a461c4c9549fcee2db09ffd"} err="failed to get container status \"41e6e89cee1a53a5c73fb4e71f8e25922ee2d4946a461c4c9549fcee2db09ffd\": rpc error: code = NotFound desc = could not find container \"41e6e89cee1a53a5c73fb4e71f8e25922ee2d4946a461c4c9549fcee2db09ffd\": container with ID starting with 41e6e89cee1a53a5c73fb4e71f8e25922ee2d4946a461c4c9549fcee2db09ffd not found: ID does not exist" Jan 20 17:41:08 crc kubenswrapper[4558]: I0120 17:41:08.053626 4558 scope.go:117] "RemoveContainer" containerID="3345b3bd768ee87c19c2c95a125c933f947a8eb6f633265e3341105b9a77c7ed" Jan 20 17:41:08 crc kubenswrapper[4558]: I0120 17:41:08.057577 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:41:08 crc kubenswrapper[4558]: E0120 17:41:08.058116 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2ffb225d-0c31-4f91-905e-27ce314ee5df" containerName="nova-manage" Jan 20 17:41:08 crc kubenswrapper[4558]: I0120 17:41:08.058223 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="2ffb225d-0c31-4f91-905e-27ce314ee5df" containerName="nova-manage" Jan 20 17:41:08 crc kubenswrapper[4558]: E0120 17:41:08.058316 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="140f3226-9a72-4426-963a-8d8b3ad539e0" containerName="nova-api-api" Jan 20 17:41:08 crc kubenswrapper[4558]: I0120 17:41:08.058385 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="140f3226-9a72-4426-963a-8d8b3ad539e0" containerName="nova-api-api" Jan 20 17:41:08 crc kubenswrapper[4558]: E0120 17:41:08.058445 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="140f3226-9a72-4426-963a-8d8b3ad539e0" containerName="nova-api-log" Jan 20 17:41:08 crc kubenswrapper[4558]: I0120 17:41:08.058497 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="140f3226-9a72-4426-963a-8d8b3ad539e0" containerName="nova-api-log" Jan 20 17:41:08 crc kubenswrapper[4558]: E0120 17:41:08.058564 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4a701398-e5b1-4706-bfbf-2045ee831509" containerName="nova-metadata-log" Jan 20 17:41:08 crc kubenswrapper[4558]: I0120 17:41:08.058605 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="4a701398-e5b1-4706-bfbf-2045ee831509" containerName="nova-metadata-log" Jan 20 17:41:08 crc kubenswrapper[4558]: E0120 17:41:08.058668 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4a701398-e5b1-4706-bfbf-2045ee831509" containerName="nova-metadata-metadata" Jan 20 17:41:08 crc kubenswrapper[4558]: I0120 17:41:08.058719 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="4a701398-e5b1-4706-bfbf-2045ee831509" containerName="nova-metadata-metadata" Jan 20 17:41:08 crc kubenswrapper[4558]: I0120 17:41:08.058967 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="2ffb225d-0c31-4f91-905e-27ce314ee5df" containerName="nova-manage" Jan 20 17:41:08 crc kubenswrapper[4558]: I0120 17:41:08.059029 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="4a701398-e5b1-4706-bfbf-2045ee831509" containerName="nova-metadata-log" Jan 20 17:41:08 crc kubenswrapper[4558]: I0120 17:41:08.059077 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="140f3226-9a72-4426-963a-8d8b3ad539e0" containerName="nova-api-api" Jan 20 17:41:08 crc kubenswrapper[4558]: I0120 17:41:08.059123 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="4a701398-e5b1-4706-bfbf-2045ee831509" containerName="nova-metadata-metadata" Jan 20 17:41:08 crc kubenswrapper[4558]: I0120 17:41:08.059182 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="140f3226-9a72-4426-963a-8d8b3ad539e0" containerName="nova-api-log" Jan 20 17:41:08 crc kubenswrapper[4558]: I0120 17:41:08.060467 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:41:08 crc kubenswrapper[4558]: I0120 17:41:08.067575 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-metadata-internal-svc" Jan 20 17:41:08 crc kubenswrapper[4558]: I0120 17:41:08.067647 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-metadata-config-data" Jan 20 17:41:08 crc kubenswrapper[4558]: I0120 17:41:08.087234 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:41:08 crc kubenswrapper[4558]: I0120 17:41:08.098369 4558 scope.go:117] "RemoveContainer" containerID="b02c1231cba0c1220ed256e2fd10998e5ea769532b4f9ac5970957deba6598c9" Jan 20 17:41:08 crc kubenswrapper[4558]: I0120 17:41:08.126954 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:41:08 crc kubenswrapper[4558]: I0120 17:41:08.132816 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-api-config-data" Jan 20 17:41:08 crc kubenswrapper[4558]: I0120 17:41:08.137993 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:41:08 crc kubenswrapper[4558]: I0120 17:41:08.149316 4558 scope.go:117] "RemoveContainer" containerID="3345b3bd768ee87c19c2c95a125c933f947a8eb6f633265e3341105b9a77c7ed" Jan 20 17:41:08 crc kubenswrapper[4558]: E0120 17:41:08.150006 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3345b3bd768ee87c19c2c95a125c933f947a8eb6f633265e3341105b9a77c7ed\": container with ID starting with 3345b3bd768ee87c19c2c95a125c933f947a8eb6f633265e3341105b9a77c7ed not found: ID does not exist" containerID="3345b3bd768ee87c19c2c95a125c933f947a8eb6f633265e3341105b9a77c7ed" Jan 20 17:41:08 crc kubenswrapper[4558]: I0120 17:41:08.150054 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3345b3bd768ee87c19c2c95a125c933f947a8eb6f633265e3341105b9a77c7ed"} err="failed to get container status \"3345b3bd768ee87c19c2c95a125c933f947a8eb6f633265e3341105b9a77c7ed\": rpc error: code = NotFound desc = could not find container \"3345b3bd768ee87c19c2c95a125c933f947a8eb6f633265e3341105b9a77c7ed\": container with ID starting with 3345b3bd768ee87c19c2c95a125c933f947a8eb6f633265e3341105b9a77c7ed not found: ID does not exist" Jan 20 17:41:08 crc kubenswrapper[4558]: I0120 17:41:08.150086 4558 scope.go:117] "RemoveContainer" containerID="b02c1231cba0c1220ed256e2fd10998e5ea769532b4f9ac5970957deba6598c9" Jan 20 17:41:08 crc kubenswrapper[4558]: E0120 17:41:08.151318 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b02c1231cba0c1220ed256e2fd10998e5ea769532b4f9ac5970957deba6598c9\": container with ID starting with b02c1231cba0c1220ed256e2fd10998e5ea769532b4f9ac5970957deba6598c9 not found: ID does not exist" containerID="b02c1231cba0c1220ed256e2fd10998e5ea769532b4f9ac5970957deba6598c9" Jan 20 17:41:08 crc kubenswrapper[4558]: I0120 17:41:08.151374 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b02c1231cba0c1220ed256e2fd10998e5ea769532b4f9ac5970957deba6598c9"} err="failed to get container status \"b02c1231cba0c1220ed256e2fd10998e5ea769532b4f9ac5970957deba6598c9\": rpc error: code = NotFound desc = could not find container \"b02c1231cba0c1220ed256e2fd10998e5ea769532b4f9ac5970957deba6598c9\": container with ID starting with b02c1231cba0c1220ed256e2fd10998e5ea769532b4f9ac5970957deba6598c9 not found: ID does not exist" Jan 20 17:41:08 crc kubenswrapper[4558]: I0120 17:41:08.151407 4558 scope.go:117] "RemoveContainer" containerID="3345b3bd768ee87c19c2c95a125c933f947a8eb6f633265e3341105b9a77c7ed" Jan 20 17:41:08 crc kubenswrapper[4558]: I0120 17:41:08.151783 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3345b3bd768ee87c19c2c95a125c933f947a8eb6f633265e3341105b9a77c7ed"} err="failed to get container status \"3345b3bd768ee87c19c2c95a125c933f947a8eb6f633265e3341105b9a77c7ed\": rpc error: code = NotFound desc = could not find container \"3345b3bd768ee87c19c2c95a125c933f947a8eb6f633265e3341105b9a77c7ed\": container with ID starting with 3345b3bd768ee87c19c2c95a125c933f947a8eb6f633265e3341105b9a77c7ed not found: ID does not exist" Jan 20 17:41:08 crc kubenswrapper[4558]: I0120 17:41:08.151835 4558 scope.go:117] "RemoveContainer" containerID="b02c1231cba0c1220ed256e2fd10998e5ea769532b4f9ac5970957deba6598c9" Jan 20 17:41:08 crc kubenswrapper[4558]: I0120 17:41:08.152710 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b02c1231cba0c1220ed256e2fd10998e5ea769532b4f9ac5970957deba6598c9"} err="failed to get container status \"b02c1231cba0c1220ed256e2fd10998e5ea769532b4f9ac5970957deba6598c9\": rpc error: code = NotFound desc = could not find container \"b02c1231cba0c1220ed256e2fd10998e5ea769532b4f9ac5970957deba6598c9\": container with ID starting with b02c1231cba0c1220ed256e2fd10998e5ea769532b4f9ac5970957deba6598c9 not found: ID does not exist" Jan 20 17:41:08 crc kubenswrapper[4558]: I0120 17:41:08.153384 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:41:08 crc kubenswrapper[4558]: I0120 17:41:08.161246 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9c5a6884-1d6c-48c1-ad01-fe2f9e1f32ab-config-data\") pod \"nova-metadata-0\" (UID: \"9c5a6884-1d6c-48c1-ad01-fe2f9e1f32ab\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:41:08 crc kubenswrapper[4558]: I0120 17:41:08.161285 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mlvtv\" (UniqueName: \"kubernetes.io/projected/2661e9c6-af76-4c45-89ec-bb2ab34b8c2c-kube-api-access-mlvtv\") pod \"nova-api-0\" (UID: \"2661e9c6-af76-4c45-89ec-bb2ab34b8c2c\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:41:08 crc kubenswrapper[4558]: I0120 17:41:08.161606 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9c5a6884-1d6c-48c1-ad01-fe2f9e1f32ab-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"9c5a6884-1d6c-48c1-ad01-fe2f9e1f32ab\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:41:08 crc kubenswrapper[4558]: I0120 17:41:08.161634 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9c5a6884-1d6c-48c1-ad01-fe2f9e1f32ab-logs\") pod \"nova-metadata-0\" (UID: \"9c5a6884-1d6c-48c1-ad01-fe2f9e1f32ab\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:41:08 crc kubenswrapper[4558]: I0120 17:41:08.161656 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2661e9c6-af76-4c45-89ec-bb2ab34b8c2c-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"2661e9c6-af76-4c45-89ec-bb2ab34b8c2c\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:41:08 crc kubenswrapper[4558]: I0120 17:41:08.161672 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2661e9c6-af76-4c45-89ec-bb2ab34b8c2c-config-data\") pod \"nova-api-0\" (UID: \"2661e9c6-af76-4c45-89ec-bb2ab34b8c2c\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:41:08 crc kubenswrapper[4558]: I0120 17:41:08.161697 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2661e9c6-af76-4c45-89ec-bb2ab34b8c2c-logs\") pod \"nova-api-0\" (UID: \"2661e9c6-af76-4c45-89ec-bb2ab34b8c2c\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:41:08 crc kubenswrapper[4558]: I0120 17:41:08.161716 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7wvjp\" (UniqueName: \"kubernetes.io/projected/9c5a6884-1d6c-48c1-ad01-fe2f9e1f32ab-kube-api-access-7wvjp\") pod \"nova-metadata-0\" (UID: \"9c5a6884-1d6c-48c1-ad01-fe2f9e1f32ab\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:41:08 crc kubenswrapper[4558]: I0120 17:41:08.161747 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/9c5a6884-1d6c-48c1-ad01-fe2f9e1f32ab-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"9c5a6884-1d6c-48c1-ad01-fe2f9e1f32ab\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:41:08 crc kubenswrapper[4558]: I0120 17:41:08.263661 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/9c5a6884-1d6c-48c1-ad01-fe2f9e1f32ab-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"9c5a6884-1d6c-48c1-ad01-fe2f9e1f32ab\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:41:08 crc kubenswrapper[4558]: I0120 17:41:08.263868 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9c5a6884-1d6c-48c1-ad01-fe2f9e1f32ab-config-data\") pod \"nova-metadata-0\" (UID: \"9c5a6884-1d6c-48c1-ad01-fe2f9e1f32ab\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:41:08 crc kubenswrapper[4558]: I0120 17:41:08.263986 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mlvtv\" (UniqueName: \"kubernetes.io/projected/2661e9c6-af76-4c45-89ec-bb2ab34b8c2c-kube-api-access-mlvtv\") pod \"nova-api-0\" (UID: \"2661e9c6-af76-4c45-89ec-bb2ab34b8c2c\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:41:08 crc kubenswrapper[4558]: I0120 17:41:08.264125 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9c5a6884-1d6c-48c1-ad01-fe2f9e1f32ab-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"9c5a6884-1d6c-48c1-ad01-fe2f9e1f32ab\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:41:08 crc kubenswrapper[4558]: I0120 17:41:08.264241 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9c5a6884-1d6c-48c1-ad01-fe2f9e1f32ab-logs\") pod \"nova-metadata-0\" (UID: \"9c5a6884-1d6c-48c1-ad01-fe2f9e1f32ab\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:41:08 crc kubenswrapper[4558]: I0120 17:41:08.264326 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2661e9c6-af76-4c45-89ec-bb2ab34b8c2c-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"2661e9c6-af76-4c45-89ec-bb2ab34b8c2c\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:41:08 crc kubenswrapper[4558]: I0120 17:41:08.264413 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2661e9c6-af76-4c45-89ec-bb2ab34b8c2c-config-data\") pod \"nova-api-0\" (UID: \"2661e9c6-af76-4c45-89ec-bb2ab34b8c2c\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:41:08 crc kubenswrapper[4558]: I0120 17:41:08.264483 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2661e9c6-af76-4c45-89ec-bb2ab34b8c2c-logs\") pod \"nova-api-0\" (UID: \"2661e9c6-af76-4c45-89ec-bb2ab34b8c2c\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:41:08 crc kubenswrapper[4558]: I0120 17:41:08.264570 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7wvjp\" (UniqueName: \"kubernetes.io/projected/9c5a6884-1d6c-48c1-ad01-fe2f9e1f32ab-kube-api-access-7wvjp\") pod \"nova-metadata-0\" (UID: \"9c5a6884-1d6c-48c1-ad01-fe2f9e1f32ab\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:41:08 crc kubenswrapper[4558]: I0120 17:41:08.265192 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2661e9c6-af76-4c45-89ec-bb2ab34b8c2c-logs\") pod \"nova-api-0\" (UID: \"2661e9c6-af76-4c45-89ec-bb2ab34b8c2c\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:41:08 crc kubenswrapper[4558]: I0120 17:41:08.265456 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9c5a6884-1d6c-48c1-ad01-fe2f9e1f32ab-logs\") pod \"nova-metadata-0\" (UID: \"9c5a6884-1d6c-48c1-ad01-fe2f9e1f32ab\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:41:08 crc kubenswrapper[4558]: I0120 17:41:08.269938 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9c5a6884-1d6c-48c1-ad01-fe2f9e1f32ab-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"9c5a6884-1d6c-48c1-ad01-fe2f9e1f32ab\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:41:08 crc kubenswrapper[4558]: I0120 17:41:08.270260 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/9c5a6884-1d6c-48c1-ad01-fe2f9e1f32ab-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"9c5a6884-1d6c-48c1-ad01-fe2f9e1f32ab\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:41:08 crc kubenswrapper[4558]: I0120 17:41:08.270652 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9c5a6884-1d6c-48c1-ad01-fe2f9e1f32ab-config-data\") pod \"nova-metadata-0\" (UID: \"9c5a6884-1d6c-48c1-ad01-fe2f9e1f32ab\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:41:08 crc kubenswrapper[4558]: I0120 17:41:08.271044 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2661e9c6-af76-4c45-89ec-bb2ab34b8c2c-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"2661e9c6-af76-4c45-89ec-bb2ab34b8c2c\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:41:08 crc kubenswrapper[4558]: I0120 17:41:08.271076 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2661e9c6-af76-4c45-89ec-bb2ab34b8c2c-config-data\") pod \"nova-api-0\" (UID: \"2661e9c6-af76-4c45-89ec-bb2ab34b8c2c\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:41:08 crc kubenswrapper[4558]: I0120 17:41:08.281338 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7wvjp\" (UniqueName: \"kubernetes.io/projected/9c5a6884-1d6c-48c1-ad01-fe2f9e1f32ab-kube-api-access-7wvjp\") pod \"nova-metadata-0\" (UID: \"9c5a6884-1d6c-48c1-ad01-fe2f9e1f32ab\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:41:08 crc kubenswrapper[4558]: I0120 17:41:08.281981 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mlvtv\" (UniqueName: \"kubernetes.io/projected/2661e9c6-af76-4c45-89ec-bb2ab34b8c2c-kube-api-access-mlvtv\") pod \"nova-api-0\" (UID: \"2661e9c6-af76-4c45-89ec-bb2ab34b8c2c\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:41:08 crc kubenswrapper[4558]: I0120 17:41:08.413307 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:41:08 crc kubenswrapper[4558]: I0120 17:41:08.445828 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:41:08 crc kubenswrapper[4558]: I0120 17:41:08.581821 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="140f3226-9a72-4426-963a-8d8b3ad539e0" path="/var/lib/kubelet/pods/140f3226-9a72-4426-963a-8d8b3ad539e0/volumes" Jan 20 17:41:08 crc kubenswrapper[4558]: I0120 17:41:08.582619 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4a701398-e5b1-4706-bfbf-2045ee831509" path="/var/lib/kubelet/pods/4a701398-e5b1-4706-bfbf-2045ee831509/volumes" Jan 20 17:41:08 crc kubenswrapper[4558]: I0120 17:41:08.908532 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:41:08 crc kubenswrapper[4558]: I0120 17:41:08.986249 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:41:09 crc kubenswrapper[4558]: I0120 17:41:09.002639 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"9c5a6884-1d6c-48c1-ad01-fe2f9e1f32ab","Type":"ContainerStarted","Data":"84f0fc1c0b4963fd52dc9d228e38684faff8a417904d4a8c1b1298d1a17f9e21"} Jan 20 17:41:09 crc kubenswrapper[4558]: I0120 17:41:09.005267 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-scheduler-0" podUID="3d12dcb7-19c5-4ece-a921-ee9aacbcc0ba" containerName="nova-scheduler-scheduler" containerID="cri-o://c790c84cfffdc26f3d3bbc1a4556e89ca57b94dd0e7983d7c7dfb65f79b98c2e" gracePeriod=30 Jan 20 17:41:10 crc kubenswrapper[4558]: I0120 17:41:10.017870 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"9c5a6884-1d6c-48c1-ad01-fe2f9e1f32ab","Type":"ContainerStarted","Data":"cb0a2516a41647bb2bb1488a113f812f6e540d08b05595a733b72dbe2a77f2c6"} Jan 20 17:41:10 crc kubenswrapper[4558]: I0120 17:41:10.019119 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"9c5a6884-1d6c-48c1-ad01-fe2f9e1f32ab","Type":"ContainerStarted","Data":"7ad1958802b19065958c6eb9a7103c2fd61e01ab422d0552f8116077ffdaf939"} Jan 20 17:41:10 crc kubenswrapper[4558]: I0120 17:41:10.020323 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"2661e9c6-af76-4c45-89ec-bb2ab34b8c2c","Type":"ContainerStarted","Data":"3609e88e7e846a5700ff87aa4de00bdb30b2ed3d38b5a522ac2be7016bcfab4e"} Jan 20 17:41:10 crc kubenswrapper[4558]: I0120 17:41:10.020409 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"2661e9c6-af76-4c45-89ec-bb2ab34b8c2c","Type":"ContainerStarted","Data":"1dbb1da8df3c6566ca04bcf0b706784344cc35acf44b110edf2ebb922a294075"} Jan 20 17:41:10 crc kubenswrapper[4558]: I0120 17:41:10.020488 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"2661e9c6-af76-4c45-89ec-bb2ab34b8c2c","Type":"ContainerStarted","Data":"a7397062f68b5059525a1797a931a6d903ba421943a19351aea8862d7c9b2142"} Jan 20 17:41:10 crc kubenswrapper[4558]: I0120 17:41:10.047257 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-metadata-0" podStartSLOduration=2.047227019 podStartE2EDuration="2.047227019s" podCreationTimestamp="2026-01-20 17:41:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:41:10.03468201 +0000 UTC m=+3563.795019978" watchObservedRunningTime="2026-01-20 17:41:10.047227019 +0000 UTC m=+3563.807564985" Jan 20 17:41:10 crc kubenswrapper[4558]: I0120 17:41:10.059028 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-api-0" podStartSLOduration=2.05900853 podStartE2EDuration="2.05900853s" podCreationTimestamp="2026-01-20 17:41:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:41:10.054063803 +0000 UTC m=+3563.814401770" watchObservedRunningTime="2026-01-20 17:41:10.05900853 +0000 UTC m=+3563.819346497" Jan 20 17:41:11 crc kubenswrapper[4558]: I0120 17:41:11.338986 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:41:12 crc kubenswrapper[4558]: E0120 17:41:12.059849 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="c790c84cfffdc26f3d3bbc1a4556e89ca57b94dd0e7983d7c7dfb65f79b98c2e" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 20 17:41:12 crc kubenswrapper[4558]: E0120 17:41:12.062002 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="c790c84cfffdc26f3d3bbc1a4556e89ca57b94dd0e7983d7c7dfb65f79b98c2e" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 20 17:41:12 crc kubenswrapper[4558]: E0120 17:41:12.063536 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="c790c84cfffdc26f3d3bbc1a4556e89ca57b94dd0e7983d7c7dfb65f79b98c2e" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 20 17:41:12 crc kubenswrapper[4558]: E0120 17:41:12.063623 4558 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack-kuttl-tests/nova-scheduler-0" podUID="3d12dcb7-19c5-4ece-a921-ee9aacbcc0ba" containerName="nova-scheduler-scheduler" Jan 20 17:41:12 crc kubenswrapper[4558]: I0120 17:41:12.272424 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:41:12 crc kubenswrapper[4558]: I0120 17:41:12.292796 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:41:13 crc kubenswrapper[4558]: I0120 17:41:13.069437 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:41:13 crc kubenswrapper[4558]: I0120 17:41:13.227696 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell1-cell-mapping-7m8mg"] Jan 20 17:41:13 crc kubenswrapper[4558]: I0120 17:41:13.229132 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-cell-mapping-7m8mg" Jan 20 17:41:13 crc kubenswrapper[4558]: I0120 17:41:13.231238 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell1-manage-config-data" Jan 20 17:41:13 crc kubenswrapper[4558]: I0120 17:41:13.232093 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell1-manage-scripts" Jan 20 17:41:13 crc kubenswrapper[4558]: I0120 17:41:13.237701 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-cell-mapping-7m8mg"] Jan 20 17:41:13 crc kubenswrapper[4558]: I0120 17:41:13.263330 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7hnzn\" (UniqueName: \"kubernetes.io/projected/e48cd5c7-5acf-4683-b6eb-6947b7b56706-kube-api-access-7hnzn\") pod \"nova-cell1-cell-mapping-7m8mg\" (UID: \"e48cd5c7-5acf-4683-b6eb-6947b7b56706\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-7m8mg" Jan 20 17:41:13 crc kubenswrapper[4558]: I0120 17:41:13.263432 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e48cd5c7-5acf-4683-b6eb-6947b7b56706-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-7m8mg\" (UID: \"e48cd5c7-5acf-4683-b6eb-6947b7b56706\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-7m8mg" Jan 20 17:41:13 crc kubenswrapper[4558]: I0120 17:41:13.263482 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e48cd5c7-5acf-4683-b6eb-6947b7b56706-scripts\") pod \"nova-cell1-cell-mapping-7m8mg\" (UID: \"e48cd5c7-5acf-4683-b6eb-6947b7b56706\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-7m8mg" Jan 20 17:41:13 crc kubenswrapper[4558]: I0120 17:41:13.263546 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e48cd5c7-5acf-4683-b6eb-6947b7b56706-config-data\") pod \"nova-cell1-cell-mapping-7m8mg\" (UID: \"e48cd5c7-5acf-4683-b6eb-6947b7b56706\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-7m8mg" Jan 20 17:41:13 crc kubenswrapper[4558]: I0120 17:41:13.369078 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e48cd5c7-5acf-4683-b6eb-6947b7b56706-scripts\") pod \"nova-cell1-cell-mapping-7m8mg\" (UID: \"e48cd5c7-5acf-4683-b6eb-6947b7b56706\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-7m8mg" Jan 20 17:41:13 crc kubenswrapper[4558]: I0120 17:41:13.369860 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e48cd5c7-5acf-4683-b6eb-6947b7b56706-config-data\") pod \"nova-cell1-cell-mapping-7m8mg\" (UID: \"e48cd5c7-5acf-4683-b6eb-6947b7b56706\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-7m8mg" Jan 20 17:41:13 crc kubenswrapper[4558]: I0120 17:41:13.370403 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7hnzn\" (UniqueName: \"kubernetes.io/projected/e48cd5c7-5acf-4683-b6eb-6947b7b56706-kube-api-access-7hnzn\") pod \"nova-cell1-cell-mapping-7m8mg\" (UID: \"e48cd5c7-5acf-4683-b6eb-6947b7b56706\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-7m8mg" Jan 20 17:41:13 crc kubenswrapper[4558]: I0120 17:41:13.371204 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e48cd5c7-5acf-4683-b6eb-6947b7b56706-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-7m8mg\" (UID: \"e48cd5c7-5acf-4683-b6eb-6947b7b56706\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-7m8mg" Jan 20 17:41:13 crc kubenswrapper[4558]: I0120 17:41:13.377118 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e48cd5c7-5acf-4683-b6eb-6947b7b56706-config-data\") pod \"nova-cell1-cell-mapping-7m8mg\" (UID: \"e48cd5c7-5acf-4683-b6eb-6947b7b56706\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-7m8mg" Jan 20 17:41:13 crc kubenswrapper[4558]: I0120 17:41:13.377980 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e48cd5c7-5acf-4683-b6eb-6947b7b56706-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-7m8mg\" (UID: \"e48cd5c7-5acf-4683-b6eb-6947b7b56706\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-7m8mg" Jan 20 17:41:13 crc kubenswrapper[4558]: I0120 17:41:13.382949 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e48cd5c7-5acf-4683-b6eb-6947b7b56706-scripts\") pod \"nova-cell1-cell-mapping-7m8mg\" (UID: \"e48cd5c7-5acf-4683-b6eb-6947b7b56706\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-7m8mg" Jan 20 17:41:13 crc kubenswrapper[4558]: I0120 17:41:13.385932 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7hnzn\" (UniqueName: \"kubernetes.io/projected/e48cd5c7-5acf-4683-b6eb-6947b7b56706-kube-api-access-7hnzn\") pod \"nova-cell1-cell-mapping-7m8mg\" (UID: \"e48cd5c7-5acf-4683-b6eb-6947b7b56706\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-7m8mg" Jan 20 17:41:13 crc kubenswrapper[4558]: I0120 17:41:13.414110 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:41:13 crc kubenswrapper[4558]: I0120 17:41:13.414542 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:41:13 crc kubenswrapper[4558]: I0120 17:41:13.552795 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-cell-mapping-7m8mg" Jan 20 17:41:13 crc kubenswrapper[4558]: I0120 17:41:13.968483 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-cell-mapping-7m8mg"] Jan 20 17:41:13 crc kubenswrapper[4558]: I0120 17:41:13.968959 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:41:13 crc kubenswrapper[4558]: W0120 17:41:13.970369 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode48cd5c7_5acf_4683_b6eb_6947b7b56706.slice/crio-fc083f1ede37b41dbb797b3b32bfd8ea3303e36cd90bd35f86e7dbf133a0b73d WatchSource:0}: Error finding container fc083f1ede37b41dbb797b3b32bfd8ea3303e36cd90bd35f86e7dbf133a0b73d: Status 404 returned error can't find the container with id fc083f1ede37b41dbb797b3b32bfd8ea3303e36cd90bd35f86e7dbf133a0b73d Jan 20 17:41:13 crc kubenswrapper[4558]: I0120 17:41:13.982846 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f5rvt\" (UniqueName: \"kubernetes.io/projected/3d12dcb7-19c5-4ece-a921-ee9aacbcc0ba-kube-api-access-f5rvt\") pod \"3d12dcb7-19c5-4ece-a921-ee9aacbcc0ba\" (UID: \"3d12dcb7-19c5-4ece-a921-ee9aacbcc0ba\") " Jan 20 17:41:13 crc kubenswrapper[4558]: I0120 17:41:13.983003 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3d12dcb7-19c5-4ece-a921-ee9aacbcc0ba-combined-ca-bundle\") pod \"3d12dcb7-19c5-4ece-a921-ee9aacbcc0ba\" (UID: \"3d12dcb7-19c5-4ece-a921-ee9aacbcc0ba\") " Jan 20 17:41:13 crc kubenswrapper[4558]: I0120 17:41:13.983077 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3d12dcb7-19c5-4ece-a921-ee9aacbcc0ba-config-data\") pod \"3d12dcb7-19c5-4ece-a921-ee9aacbcc0ba\" (UID: \"3d12dcb7-19c5-4ece-a921-ee9aacbcc0ba\") " Jan 20 17:41:13 crc kubenswrapper[4558]: I0120 17:41:13.987434 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3d12dcb7-19c5-4ece-a921-ee9aacbcc0ba-kube-api-access-f5rvt" (OuterVolumeSpecName: "kube-api-access-f5rvt") pod "3d12dcb7-19c5-4ece-a921-ee9aacbcc0ba" (UID: "3d12dcb7-19c5-4ece-a921-ee9aacbcc0ba"). InnerVolumeSpecName "kube-api-access-f5rvt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:41:14 crc kubenswrapper[4558]: I0120 17:41:14.008810 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3d12dcb7-19c5-4ece-a921-ee9aacbcc0ba-config-data" (OuterVolumeSpecName: "config-data") pod "3d12dcb7-19c5-4ece-a921-ee9aacbcc0ba" (UID: "3d12dcb7-19c5-4ece-a921-ee9aacbcc0ba"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:41:14 crc kubenswrapper[4558]: I0120 17:41:14.015366 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3d12dcb7-19c5-4ece-a921-ee9aacbcc0ba-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3d12dcb7-19c5-4ece-a921-ee9aacbcc0ba" (UID: "3d12dcb7-19c5-4ece-a921-ee9aacbcc0ba"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:41:14 crc kubenswrapper[4558]: I0120 17:41:14.061513 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-cell-mapping-7m8mg" event={"ID":"e48cd5c7-5acf-4683-b6eb-6947b7b56706","Type":"ContainerStarted","Data":"fc083f1ede37b41dbb797b3b32bfd8ea3303e36cd90bd35f86e7dbf133a0b73d"} Jan 20 17:41:14 crc kubenswrapper[4558]: I0120 17:41:14.063182 4558 generic.go:334] "Generic (PLEG): container finished" podID="3d12dcb7-19c5-4ece-a921-ee9aacbcc0ba" containerID="c790c84cfffdc26f3d3bbc1a4556e89ca57b94dd0e7983d7c7dfb65f79b98c2e" exitCode=0 Jan 20 17:41:14 crc kubenswrapper[4558]: I0120 17:41:14.063240 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"3d12dcb7-19c5-4ece-a921-ee9aacbcc0ba","Type":"ContainerDied","Data":"c790c84cfffdc26f3d3bbc1a4556e89ca57b94dd0e7983d7c7dfb65f79b98c2e"} Jan 20 17:41:14 crc kubenswrapper[4558]: I0120 17:41:14.063299 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"3d12dcb7-19c5-4ece-a921-ee9aacbcc0ba","Type":"ContainerDied","Data":"1334879e622039470e3b362259bd09aedf34ff6518ef48543cd21f56a3973156"} Jan 20 17:41:14 crc kubenswrapper[4558]: I0120 17:41:14.063315 4558 scope.go:117] "RemoveContainer" containerID="c790c84cfffdc26f3d3bbc1a4556e89ca57b94dd0e7983d7c7dfb65f79b98c2e" Jan 20 17:41:14 crc kubenswrapper[4558]: I0120 17:41:14.063409 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:41:14 crc kubenswrapper[4558]: I0120 17:41:14.091429 4558 scope.go:117] "RemoveContainer" containerID="c790c84cfffdc26f3d3bbc1a4556e89ca57b94dd0e7983d7c7dfb65f79b98c2e" Jan 20 17:41:14 crc kubenswrapper[4558]: E0120 17:41:14.092478 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c790c84cfffdc26f3d3bbc1a4556e89ca57b94dd0e7983d7c7dfb65f79b98c2e\": container with ID starting with c790c84cfffdc26f3d3bbc1a4556e89ca57b94dd0e7983d7c7dfb65f79b98c2e not found: ID does not exist" containerID="c790c84cfffdc26f3d3bbc1a4556e89ca57b94dd0e7983d7c7dfb65f79b98c2e" Jan 20 17:41:14 crc kubenswrapper[4558]: I0120 17:41:14.092525 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c790c84cfffdc26f3d3bbc1a4556e89ca57b94dd0e7983d7c7dfb65f79b98c2e"} err="failed to get container status \"c790c84cfffdc26f3d3bbc1a4556e89ca57b94dd0e7983d7c7dfb65f79b98c2e\": rpc error: code = NotFound desc = could not find container \"c790c84cfffdc26f3d3bbc1a4556e89ca57b94dd0e7983d7c7dfb65f79b98c2e\": container with ID starting with c790c84cfffdc26f3d3bbc1a4556e89ca57b94dd0e7983d7c7dfb65f79b98c2e not found: ID does not exist" Jan 20 17:41:14 crc kubenswrapper[4558]: I0120 17:41:14.093822 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3d12dcb7-19c5-4ece-a921-ee9aacbcc0ba-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:41:14 crc kubenswrapper[4558]: I0120 17:41:14.094262 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f5rvt\" (UniqueName: \"kubernetes.io/projected/3d12dcb7-19c5-4ece-a921-ee9aacbcc0ba-kube-api-access-f5rvt\") on node \"crc\" DevicePath \"\"" Jan 20 17:41:14 crc kubenswrapper[4558]: I0120 17:41:14.094350 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3d12dcb7-19c5-4ece-a921-ee9aacbcc0ba-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:41:14 crc kubenswrapper[4558]: I0120 17:41:14.109097 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:41:14 crc kubenswrapper[4558]: I0120 17:41:14.120428 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:41:14 crc kubenswrapper[4558]: I0120 17:41:14.130008 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:41:14 crc kubenswrapper[4558]: E0120 17:41:14.130761 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3d12dcb7-19c5-4ece-a921-ee9aacbcc0ba" containerName="nova-scheduler-scheduler" Jan 20 17:41:14 crc kubenswrapper[4558]: I0120 17:41:14.130798 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="3d12dcb7-19c5-4ece-a921-ee9aacbcc0ba" containerName="nova-scheduler-scheduler" Jan 20 17:41:14 crc kubenswrapper[4558]: I0120 17:41:14.132112 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="3d12dcb7-19c5-4ece-a921-ee9aacbcc0ba" containerName="nova-scheduler-scheduler" Jan 20 17:41:14 crc kubenswrapper[4558]: I0120 17:41:14.133175 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:41:14 crc kubenswrapper[4558]: I0120 17:41:14.135784 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:41:14 crc kubenswrapper[4558]: I0120 17:41:14.136103 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-scheduler-config-data" Jan 20 17:41:14 crc kubenswrapper[4558]: I0120 17:41:14.197001 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gj87n\" (UniqueName: \"kubernetes.io/projected/9694d2f6-3a30-4509-be7e-3fe6b84c8b74-kube-api-access-gj87n\") pod \"nova-scheduler-0\" (UID: \"9694d2f6-3a30-4509-be7e-3fe6b84c8b74\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:41:14 crc kubenswrapper[4558]: I0120 17:41:14.197280 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9694d2f6-3a30-4509-be7e-3fe6b84c8b74-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"9694d2f6-3a30-4509-be7e-3fe6b84c8b74\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:41:14 crc kubenswrapper[4558]: I0120 17:41:14.197490 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9694d2f6-3a30-4509-be7e-3fe6b84c8b74-config-data\") pod \"nova-scheduler-0\" (UID: \"9694d2f6-3a30-4509-be7e-3fe6b84c8b74\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:41:14 crc kubenswrapper[4558]: I0120 17:41:14.299299 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gj87n\" (UniqueName: \"kubernetes.io/projected/9694d2f6-3a30-4509-be7e-3fe6b84c8b74-kube-api-access-gj87n\") pod \"nova-scheduler-0\" (UID: \"9694d2f6-3a30-4509-be7e-3fe6b84c8b74\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:41:14 crc kubenswrapper[4558]: I0120 17:41:14.299388 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9694d2f6-3a30-4509-be7e-3fe6b84c8b74-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"9694d2f6-3a30-4509-be7e-3fe6b84c8b74\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:41:14 crc kubenswrapper[4558]: I0120 17:41:14.299425 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9694d2f6-3a30-4509-be7e-3fe6b84c8b74-config-data\") pod \"nova-scheduler-0\" (UID: \"9694d2f6-3a30-4509-be7e-3fe6b84c8b74\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:41:14 crc kubenswrapper[4558]: I0120 17:41:14.304825 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9694d2f6-3a30-4509-be7e-3fe6b84c8b74-config-data\") pod \"nova-scheduler-0\" (UID: \"9694d2f6-3a30-4509-be7e-3fe6b84c8b74\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:41:14 crc kubenswrapper[4558]: I0120 17:41:14.304934 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9694d2f6-3a30-4509-be7e-3fe6b84c8b74-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"9694d2f6-3a30-4509-be7e-3fe6b84c8b74\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:41:14 crc kubenswrapper[4558]: I0120 17:41:14.318102 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gj87n\" (UniqueName: \"kubernetes.io/projected/9694d2f6-3a30-4509-be7e-3fe6b84c8b74-kube-api-access-gj87n\") pod \"nova-scheduler-0\" (UID: \"9694d2f6-3a30-4509-be7e-3fe6b84c8b74\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:41:14 crc kubenswrapper[4558]: I0120 17:41:14.453703 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:41:14 crc kubenswrapper[4558]: I0120 17:41:14.578059 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3d12dcb7-19c5-4ece-a921-ee9aacbcc0ba" path="/var/lib/kubelet/pods/3d12dcb7-19c5-4ece-a921-ee9aacbcc0ba/volumes" Jan 20 17:41:14 crc kubenswrapper[4558]: I0120 17:41:14.878233 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:41:14 crc kubenswrapper[4558]: W0120 17:41:14.881310 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9694d2f6_3a30_4509_be7e_3fe6b84c8b74.slice/crio-f572af2e7a0c5e22088730e406cbaa7a2a2173452eae223a19633fa8765028c0 WatchSource:0}: Error finding container f572af2e7a0c5e22088730e406cbaa7a2a2173452eae223a19633fa8765028c0: Status 404 returned error can't find the container with id f572af2e7a0c5e22088730e406cbaa7a2a2173452eae223a19633fa8765028c0 Jan 20 17:41:15 crc kubenswrapper[4558]: I0120 17:41:15.077808 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"9694d2f6-3a30-4509-be7e-3fe6b84c8b74","Type":"ContainerStarted","Data":"28b93481b5264a09172ae2999ca4a5c8e1f3d6a68e2da4f930566d30a227619f"} Jan 20 17:41:15 crc kubenswrapper[4558]: I0120 17:41:15.077878 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"9694d2f6-3a30-4509-be7e-3fe6b84c8b74","Type":"ContainerStarted","Data":"f572af2e7a0c5e22088730e406cbaa7a2a2173452eae223a19633fa8765028c0"} Jan 20 17:41:15 crc kubenswrapper[4558]: I0120 17:41:15.080716 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-cell-mapping-7m8mg" event={"ID":"e48cd5c7-5acf-4683-b6eb-6947b7b56706","Type":"ContainerStarted","Data":"1b2406ac4844c018e4c181d32e0d4b3ff0b33492aeb8d275a7fb0dd3f2e899cc"} Jan 20 17:41:15 crc kubenswrapper[4558]: I0120 17:41:15.115762 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-scheduler-0" podStartSLOduration=1.115738372 podStartE2EDuration="1.115738372s" podCreationTimestamp="2026-01-20 17:41:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:41:15.098798622 +0000 UTC m=+3568.859136589" watchObservedRunningTime="2026-01-20 17:41:15.115738372 +0000 UTC m=+3568.876076340" Jan 20 17:41:15 crc kubenswrapper[4558]: I0120 17:41:15.123005 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell1-cell-mapping-7m8mg" podStartSLOduration=2.12299667 podStartE2EDuration="2.12299667s" podCreationTimestamp="2026-01-20 17:41:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:41:15.118328042 +0000 UTC m=+3568.878666009" watchObservedRunningTime="2026-01-20 17:41:15.12299667 +0000 UTC m=+3568.883334637" Jan 20 17:41:18 crc kubenswrapper[4558]: I0120 17:41:18.414119 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:41:18 crc kubenswrapper[4558]: I0120 17:41:18.415009 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:41:18 crc kubenswrapper[4558]: I0120 17:41:18.447021 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:41:18 crc kubenswrapper[4558]: I0120 17:41:18.447561 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:41:19 crc kubenswrapper[4558]: I0120 17:41:19.091749 4558 scope.go:117] "RemoveContainer" containerID="a668618c15e38bf505cbb7753467741c1052b1b1764b31556e8205a83f0c0c3b" Jan 20 17:41:19 crc kubenswrapper[4558]: I0120 17:41:19.116932 4558 scope.go:117] "RemoveContainer" containerID="f80866aad1b997b9edf8ab7c0962cd1436981eced3f24d965081cee0a0b92d9a" Jan 20 17:41:19 crc kubenswrapper[4558]: I0120 17:41:19.148310 4558 generic.go:334] "Generic (PLEG): container finished" podID="e48cd5c7-5acf-4683-b6eb-6947b7b56706" containerID="1b2406ac4844c018e4c181d32e0d4b3ff0b33492aeb8d275a7fb0dd3f2e899cc" exitCode=0 Jan 20 17:41:19 crc kubenswrapper[4558]: I0120 17:41:19.148408 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-cell-mapping-7m8mg" event={"ID":"e48cd5c7-5acf-4683-b6eb-6947b7b56706","Type":"ContainerDied","Data":"1b2406ac4844c018e4c181d32e0d4b3ff0b33492aeb8d275a7fb0dd3f2e899cc"} Jan 20 17:41:19 crc kubenswrapper[4558]: I0120 17:41:19.154764 4558 scope.go:117] "RemoveContainer" containerID="60ee7c69684601f241168e9490c910c322ce0ad1bc0d2f8e418a384d04073baa" Jan 20 17:41:19 crc kubenswrapper[4558]: I0120 17:41:19.204581 4558 scope.go:117] "RemoveContainer" containerID="c5868b91333e3f1c6356506d56295db65bbb6580ddcdb5dfaafeb5e8213164a5" Jan 20 17:41:19 crc kubenswrapper[4558]: I0120 17:41:19.224079 4558 scope.go:117] "RemoveContainer" containerID="885f382b361929255e7062afd4f25e3aac93e29a4f55784a8ee8a90d6b2f53d8" Jan 20 17:41:19 crc kubenswrapper[4558]: I0120 17:41:19.242381 4558 scope.go:117] "RemoveContainer" containerID="d73d4daa0fc88cb61145408b21e4b824ef93c1b31996aa4f8630de0f2161615b" Jan 20 17:41:19 crc kubenswrapper[4558]: I0120 17:41:19.260371 4558 scope.go:117] "RemoveContainer" containerID="37f082e53f69b60d82922862c4aabdec9b3cd30d3ff34d5ac037935a9c1bd638" Jan 20 17:41:19 crc kubenswrapper[4558]: I0120 17:41:19.276092 4558 scope.go:117] "RemoveContainer" containerID="c28767df71c970ea5e8653da53296b3b86a33c31261c457782efaa5c0c34b9aa" Jan 20 17:41:19 crc kubenswrapper[4558]: I0120 17:41:19.453772 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:41:19 crc kubenswrapper[4558]: I0120 17:41:19.461300 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-metadata-0" podUID="9c5a6884-1d6c-48c1-ad01-fe2f9e1f32ab" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.254:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 20 17:41:19 crc kubenswrapper[4558]: I0120 17:41:19.461370 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-metadata-0" podUID="9c5a6884-1d6c-48c1-ad01-fe2f9e1f32ab" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.254:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 20 17:41:19 crc kubenswrapper[4558]: I0120 17:41:19.544354 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-api-0" podUID="2661e9c6-af76-4c45-89ec-bb2ab34b8c2c" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.255:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 20 17:41:19 crc kubenswrapper[4558]: I0120 17:41:19.544367 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-api-0" podUID="2661e9c6-af76-4c45-89ec-bb2ab34b8c2c" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.255:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 20 17:41:19 crc kubenswrapper[4558]: I0120 17:41:19.566327 4558 scope.go:117] "RemoveContainer" containerID="2d2a8989ca5a5af98b2c9dc8f801ea0675339ec14800f437c058c5a43c20146b" Jan 20 17:41:19 crc kubenswrapper[4558]: E0120 17:41:19.566586 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 17:41:20 crc kubenswrapper[4558]: I0120 17:41:20.485793 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-cell-mapping-7m8mg" Jan 20 17:41:20 crc kubenswrapper[4558]: I0120 17:41:20.533380 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e48cd5c7-5acf-4683-b6eb-6947b7b56706-combined-ca-bundle\") pod \"e48cd5c7-5acf-4683-b6eb-6947b7b56706\" (UID: \"e48cd5c7-5acf-4683-b6eb-6947b7b56706\") " Jan 20 17:41:20 crc kubenswrapper[4558]: I0120 17:41:20.533482 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e48cd5c7-5acf-4683-b6eb-6947b7b56706-config-data\") pod \"e48cd5c7-5acf-4683-b6eb-6947b7b56706\" (UID: \"e48cd5c7-5acf-4683-b6eb-6947b7b56706\") " Jan 20 17:41:20 crc kubenswrapper[4558]: I0120 17:41:20.533568 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e48cd5c7-5acf-4683-b6eb-6947b7b56706-scripts\") pod \"e48cd5c7-5acf-4683-b6eb-6947b7b56706\" (UID: \"e48cd5c7-5acf-4683-b6eb-6947b7b56706\") " Jan 20 17:41:20 crc kubenswrapper[4558]: I0120 17:41:20.533599 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7hnzn\" (UniqueName: \"kubernetes.io/projected/e48cd5c7-5acf-4683-b6eb-6947b7b56706-kube-api-access-7hnzn\") pod \"e48cd5c7-5acf-4683-b6eb-6947b7b56706\" (UID: \"e48cd5c7-5acf-4683-b6eb-6947b7b56706\") " Jan 20 17:41:20 crc kubenswrapper[4558]: I0120 17:41:20.542026 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e48cd5c7-5acf-4683-b6eb-6947b7b56706-kube-api-access-7hnzn" (OuterVolumeSpecName: "kube-api-access-7hnzn") pod "e48cd5c7-5acf-4683-b6eb-6947b7b56706" (UID: "e48cd5c7-5acf-4683-b6eb-6947b7b56706"). InnerVolumeSpecName "kube-api-access-7hnzn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:41:20 crc kubenswrapper[4558]: I0120 17:41:20.542267 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e48cd5c7-5acf-4683-b6eb-6947b7b56706-scripts" (OuterVolumeSpecName: "scripts") pod "e48cd5c7-5acf-4683-b6eb-6947b7b56706" (UID: "e48cd5c7-5acf-4683-b6eb-6947b7b56706"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:41:20 crc kubenswrapper[4558]: I0120 17:41:20.561317 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e48cd5c7-5acf-4683-b6eb-6947b7b56706-config-data" (OuterVolumeSpecName: "config-data") pod "e48cd5c7-5acf-4683-b6eb-6947b7b56706" (UID: "e48cd5c7-5acf-4683-b6eb-6947b7b56706"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:41:20 crc kubenswrapper[4558]: I0120 17:41:20.562206 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e48cd5c7-5acf-4683-b6eb-6947b7b56706-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e48cd5c7-5acf-4683-b6eb-6947b7b56706" (UID: "e48cd5c7-5acf-4683-b6eb-6947b7b56706"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:41:20 crc kubenswrapper[4558]: I0120 17:41:20.635272 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e48cd5c7-5acf-4683-b6eb-6947b7b56706-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:41:20 crc kubenswrapper[4558]: I0120 17:41:20.635316 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e48cd5c7-5acf-4683-b6eb-6947b7b56706-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:41:20 crc kubenswrapper[4558]: I0120 17:41:20.635327 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7hnzn\" (UniqueName: \"kubernetes.io/projected/e48cd5c7-5acf-4683-b6eb-6947b7b56706-kube-api-access-7hnzn\") on node \"crc\" DevicePath \"\"" Jan 20 17:41:20 crc kubenswrapper[4558]: I0120 17:41:20.635337 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e48cd5c7-5acf-4683-b6eb-6947b7b56706-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:41:21 crc kubenswrapper[4558]: I0120 17:41:21.176927 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-cell-mapping-7m8mg" event={"ID":"e48cd5c7-5acf-4683-b6eb-6947b7b56706","Type":"ContainerDied","Data":"fc083f1ede37b41dbb797b3b32bfd8ea3303e36cd90bd35f86e7dbf133a0b73d"} Jan 20 17:41:21 crc kubenswrapper[4558]: I0120 17:41:21.177003 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fc083f1ede37b41dbb797b3b32bfd8ea3303e36cd90bd35f86e7dbf133a0b73d" Jan 20 17:41:21 crc kubenswrapper[4558]: I0120 17:41:21.177355 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-cell-mapping-7m8mg" Jan 20 17:41:21 crc kubenswrapper[4558]: I0120 17:41:21.367626 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:41:21 crc kubenswrapper[4558]: I0120 17:41:21.368024 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-api-0" podUID="2661e9c6-af76-4c45-89ec-bb2ab34b8c2c" containerName="nova-api-log" containerID="cri-o://1dbb1da8df3c6566ca04bcf0b706784344cc35acf44b110edf2ebb922a294075" gracePeriod=30 Jan 20 17:41:21 crc kubenswrapper[4558]: I0120 17:41:21.368192 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-api-0" podUID="2661e9c6-af76-4c45-89ec-bb2ab34b8c2c" containerName="nova-api-api" containerID="cri-o://3609e88e7e846a5700ff87aa4de00bdb30b2ed3d38b5a522ac2be7016bcfab4e" gracePeriod=30 Jan 20 17:41:21 crc kubenswrapper[4558]: I0120 17:41:21.383104 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:41:21 crc kubenswrapper[4558]: I0120 17:41:21.383591 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-scheduler-0" podUID="9694d2f6-3a30-4509-be7e-3fe6b84c8b74" containerName="nova-scheduler-scheduler" containerID="cri-o://28b93481b5264a09172ae2999ca4a5c8e1f3d6a68e2da4f930566d30a227619f" gracePeriod=30 Jan 20 17:41:21 crc kubenswrapper[4558]: I0120 17:41:21.431528 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:41:21 crc kubenswrapper[4558]: I0120 17:41:21.432050 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-metadata-0" podUID="9c5a6884-1d6c-48c1-ad01-fe2f9e1f32ab" containerName="nova-metadata-log" containerID="cri-o://7ad1958802b19065958c6eb9a7103c2fd61e01ab422d0552f8116077ffdaf939" gracePeriod=30 Jan 20 17:41:21 crc kubenswrapper[4558]: I0120 17:41:21.432113 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-metadata-0" podUID="9c5a6884-1d6c-48c1-ad01-fe2f9e1f32ab" containerName="nova-metadata-metadata" containerID="cri-o://cb0a2516a41647bb2bb1488a113f812f6e540d08b05595a733b72dbe2a77f2c6" gracePeriod=30 Jan 20 17:41:22 crc kubenswrapper[4558]: I0120 17:41:22.190819 4558 generic.go:334] "Generic (PLEG): container finished" podID="9c5a6884-1d6c-48c1-ad01-fe2f9e1f32ab" containerID="7ad1958802b19065958c6eb9a7103c2fd61e01ab422d0552f8116077ffdaf939" exitCode=143 Jan 20 17:41:22 crc kubenswrapper[4558]: I0120 17:41:22.190932 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"9c5a6884-1d6c-48c1-ad01-fe2f9e1f32ab","Type":"ContainerDied","Data":"7ad1958802b19065958c6eb9a7103c2fd61e01ab422d0552f8116077ffdaf939"} Jan 20 17:41:22 crc kubenswrapper[4558]: I0120 17:41:22.193761 4558 generic.go:334] "Generic (PLEG): container finished" podID="2661e9c6-af76-4c45-89ec-bb2ab34b8c2c" containerID="1dbb1da8df3c6566ca04bcf0b706784344cc35acf44b110edf2ebb922a294075" exitCode=143 Jan 20 17:41:22 crc kubenswrapper[4558]: I0120 17:41:22.193858 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"2661e9c6-af76-4c45-89ec-bb2ab34b8c2c","Type":"ContainerDied","Data":"1dbb1da8df3c6566ca04bcf0b706784344cc35acf44b110edf2ebb922a294075"} Jan 20 17:41:24 crc kubenswrapper[4558]: I0120 17:41:24.910561 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:41:24 crc kubenswrapper[4558]: I0120 17:41:24.959041 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:41:25 crc kubenswrapper[4558]: I0120 17:41:25.014817 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7wvjp\" (UniqueName: \"kubernetes.io/projected/9c5a6884-1d6c-48c1-ad01-fe2f9e1f32ab-kube-api-access-7wvjp\") pod \"9c5a6884-1d6c-48c1-ad01-fe2f9e1f32ab\" (UID: \"9c5a6884-1d6c-48c1-ad01-fe2f9e1f32ab\") " Jan 20 17:41:25 crc kubenswrapper[4558]: I0120 17:41:25.014906 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mlvtv\" (UniqueName: \"kubernetes.io/projected/2661e9c6-af76-4c45-89ec-bb2ab34b8c2c-kube-api-access-mlvtv\") pod \"2661e9c6-af76-4c45-89ec-bb2ab34b8c2c\" (UID: \"2661e9c6-af76-4c45-89ec-bb2ab34b8c2c\") " Jan 20 17:41:25 crc kubenswrapper[4558]: I0120 17:41:25.014963 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2661e9c6-af76-4c45-89ec-bb2ab34b8c2c-combined-ca-bundle\") pod \"2661e9c6-af76-4c45-89ec-bb2ab34b8c2c\" (UID: \"2661e9c6-af76-4c45-89ec-bb2ab34b8c2c\") " Jan 20 17:41:25 crc kubenswrapper[4558]: I0120 17:41:25.014989 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2661e9c6-af76-4c45-89ec-bb2ab34b8c2c-logs\") pod \"2661e9c6-af76-4c45-89ec-bb2ab34b8c2c\" (UID: \"2661e9c6-af76-4c45-89ec-bb2ab34b8c2c\") " Jan 20 17:41:25 crc kubenswrapper[4558]: I0120 17:41:25.015083 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9c5a6884-1d6c-48c1-ad01-fe2f9e1f32ab-combined-ca-bundle\") pod \"9c5a6884-1d6c-48c1-ad01-fe2f9e1f32ab\" (UID: \"9c5a6884-1d6c-48c1-ad01-fe2f9e1f32ab\") " Jan 20 17:41:25 crc kubenswrapper[4558]: I0120 17:41:25.015198 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9c5a6884-1d6c-48c1-ad01-fe2f9e1f32ab-config-data\") pod \"9c5a6884-1d6c-48c1-ad01-fe2f9e1f32ab\" (UID: \"9c5a6884-1d6c-48c1-ad01-fe2f9e1f32ab\") " Jan 20 17:41:25 crc kubenswrapper[4558]: I0120 17:41:25.015250 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2661e9c6-af76-4c45-89ec-bb2ab34b8c2c-config-data\") pod \"2661e9c6-af76-4c45-89ec-bb2ab34b8c2c\" (UID: \"2661e9c6-af76-4c45-89ec-bb2ab34b8c2c\") " Jan 20 17:41:25 crc kubenswrapper[4558]: I0120 17:41:25.015361 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9c5a6884-1d6c-48c1-ad01-fe2f9e1f32ab-logs\") pod \"9c5a6884-1d6c-48c1-ad01-fe2f9e1f32ab\" (UID: \"9c5a6884-1d6c-48c1-ad01-fe2f9e1f32ab\") " Jan 20 17:41:25 crc kubenswrapper[4558]: I0120 17:41:25.015526 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/9c5a6884-1d6c-48c1-ad01-fe2f9e1f32ab-nova-metadata-tls-certs\") pod \"9c5a6884-1d6c-48c1-ad01-fe2f9e1f32ab\" (UID: \"9c5a6884-1d6c-48c1-ad01-fe2f9e1f32ab\") " Jan 20 17:41:25 crc kubenswrapper[4558]: I0120 17:41:25.016933 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9c5a6884-1d6c-48c1-ad01-fe2f9e1f32ab-logs" (OuterVolumeSpecName: "logs") pod "9c5a6884-1d6c-48c1-ad01-fe2f9e1f32ab" (UID: "9c5a6884-1d6c-48c1-ad01-fe2f9e1f32ab"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:41:25 crc kubenswrapper[4558]: I0120 17:41:25.017000 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2661e9c6-af76-4c45-89ec-bb2ab34b8c2c-logs" (OuterVolumeSpecName: "logs") pod "2661e9c6-af76-4c45-89ec-bb2ab34b8c2c" (UID: "2661e9c6-af76-4c45-89ec-bb2ab34b8c2c"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:41:25 crc kubenswrapper[4558]: I0120 17:41:25.022983 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9c5a6884-1d6c-48c1-ad01-fe2f9e1f32ab-kube-api-access-7wvjp" (OuterVolumeSpecName: "kube-api-access-7wvjp") pod "9c5a6884-1d6c-48c1-ad01-fe2f9e1f32ab" (UID: "9c5a6884-1d6c-48c1-ad01-fe2f9e1f32ab"). InnerVolumeSpecName "kube-api-access-7wvjp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:41:25 crc kubenswrapper[4558]: I0120 17:41:25.023037 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2661e9c6-af76-4c45-89ec-bb2ab34b8c2c-kube-api-access-mlvtv" (OuterVolumeSpecName: "kube-api-access-mlvtv") pod "2661e9c6-af76-4c45-89ec-bb2ab34b8c2c" (UID: "2661e9c6-af76-4c45-89ec-bb2ab34b8c2c"). InnerVolumeSpecName "kube-api-access-mlvtv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:41:25 crc kubenswrapper[4558]: E0120 17:41:25.042040 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/2661e9c6-af76-4c45-89ec-bb2ab34b8c2c-combined-ca-bundle podName:2661e9c6-af76-4c45-89ec-bb2ab34b8c2c nodeName:}" failed. No retries permitted until 2026-01-20 17:41:25.541992997 +0000 UTC m=+3579.302330965 (durationBeforeRetry 500ms). Error: error cleaning subPath mounts for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/2661e9c6-af76-4c45-89ec-bb2ab34b8c2c-combined-ca-bundle") pod "2661e9c6-af76-4c45-89ec-bb2ab34b8c2c" (UID: "2661e9c6-af76-4c45-89ec-bb2ab34b8c2c") : error deleting /var/lib/kubelet/pods/2661e9c6-af76-4c45-89ec-bb2ab34b8c2c/volume-subpaths: remove /var/lib/kubelet/pods/2661e9c6-af76-4c45-89ec-bb2ab34b8c2c/volume-subpaths: no such file or directory Jan 20 17:41:25 crc kubenswrapper[4558]: I0120 17:41:25.045020 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2661e9c6-af76-4c45-89ec-bb2ab34b8c2c-config-data" (OuterVolumeSpecName: "config-data") pod "2661e9c6-af76-4c45-89ec-bb2ab34b8c2c" (UID: "2661e9c6-af76-4c45-89ec-bb2ab34b8c2c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:41:25 crc kubenswrapper[4558]: I0120 17:41:25.045082 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9c5a6884-1d6c-48c1-ad01-fe2f9e1f32ab-config-data" (OuterVolumeSpecName: "config-data") pod "9c5a6884-1d6c-48c1-ad01-fe2f9e1f32ab" (UID: "9c5a6884-1d6c-48c1-ad01-fe2f9e1f32ab"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:41:25 crc kubenswrapper[4558]: I0120 17:41:25.047428 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9c5a6884-1d6c-48c1-ad01-fe2f9e1f32ab-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9c5a6884-1d6c-48c1-ad01-fe2f9e1f32ab" (UID: "9c5a6884-1d6c-48c1-ad01-fe2f9e1f32ab"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:41:25 crc kubenswrapper[4558]: I0120 17:41:25.064368 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9c5a6884-1d6c-48c1-ad01-fe2f9e1f32ab-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "9c5a6884-1d6c-48c1-ad01-fe2f9e1f32ab" (UID: "9c5a6884-1d6c-48c1-ad01-fe2f9e1f32ab"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:41:25 crc kubenswrapper[4558]: I0120 17:41:25.118343 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2661e9c6-af76-4c45-89ec-bb2ab34b8c2c-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:41:25 crc kubenswrapper[4558]: I0120 17:41:25.118586 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9c5a6884-1d6c-48c1-ad01-fe2f9e1f32ab-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:41:25 crc kubenswrapper[4558]: I0120 17:41:25.118599 4558 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/9c5a6884-1d6c-48c1-ad01-fe2f9e1f32ab-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:41:25 crc kubenswrapper[4558]: I0120 17:41:25.118614 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7wvjp\" (UniqueName: \"kubernetes.io/projected/9c5a6884-1d6c-48c1-ad01-fe2f9e1f32ab-kube-api-access-7wvjp\") on node \"crc\" DevicePath \"\"" Jan 20 17:41:25 crc kubenswrapper[4558]: I0120 17:41:25.118623 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mlvtv\" (UniqueName: \"kubernetes.io/projected/2661e9c6-af76-4c45-89ec-bb2ab34b8c2c-kube-api-access-mlvtv\") on node \"crc\" DevicePath \"\"" Jan 20 17:41:25 crc kubenswrapper[4558]: I0120 17:41:25.118632 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2661e9c6-af76-4c45-89ec-bb2ab34b8c2c-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:41:25 crc kubenswrapper[4558]: I0120 17:41:25.118641 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9c5a6884-1d6c-48c1-ad01-fe2f9e1f32ab-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:41:25 crc kubenswrapper[4558]: I0120 17:41:25.118650 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9c5a6884-1d6c-48c1-ad01-fe2f9e1f32ab-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:41:25 crc kubenswrapper[4558]: I0120 17:41:25.226498 4558 generic.go:334] "Generic (PLEG): container finished" podID="2661e9c6-af76-4c45-89ec-bb2ab34b8c2c" containerID="3609e88e7e846a5700ff87aa4de00bdb30b2ed3d38b5a522ac2be7016bcfab4e" exitCode=0 Jan 20 17:41:25 crc kubenswrapper[4558]: I0120 17:41:25.226713 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:41:25 crc kubenswrapper[4558]: I0120 17:41:25.226707 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"2661e9c6-af76-4c45-89ec-bb2ab34b8c2c","Type":"ContainerDied","Data":"3609e88e7e846a5700ff87aa4de00bdb30b2ed3d38b5a522ac2be7016bcfab4e"} Jan 20 17:41:25 crc kubenswrapper[4558]: I0120 17:41:25.226797 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"2661e9c6-af76-4c45-89ec-bb2ab34b8c2c","Type":"ContainerDied","Data":"a7397062f68b5059525a1797a931a6d903ba421943a19351aea8862d7c9b2142"} Jan 20 17:41:25 crc kubenswrapper[4558]: I0120 17:41:25.226837 4558 scope.go:117] "RemoveContainer" containerID="3609e88e7e846a5700ff87aa4de00bdb30b2ed3d38b5a522ac2be7016bcfab4e" Jan 20 17:41:25 crc kubenswrapper[4558]: I0120 17:41:25.230724 4558 generic.go:334] "Generic (PLEG): container finished" podID="9c5a6884-1d6c-48c1-ad01-fe2f9e1f32ab" containerID="cb0a2516a41647bb2bb1488a113f812f6e540d08b05595a733b72dbe2a77f2c6" exitCode=0 Jan 20 17:41:25 crc kubenswrapper[4558]: I0120 17:41:25.230772 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"9c5a6884-1d6c-48c1-ad01-fe2f9e1f32ab","Type":"ContainerDied","Data":"cb0a2516a41647bb2bb1488a113f812f6e540d08b05595a733b72dbe2a77f2c6"} Jan 20 17:41:25 crc kubenswrapper[4558]: I0120 17:41:25.230808 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"9c5a6884-1d6c-48c1-ad01-fe2f9e1f32ab","Type":"ContainerDied","Data":"84f0fc1c0b4963fd52dc9d228e38684faff8a417904d4a8c1b1298d1a17f9e21"} Jan 20 17:41:25 crc kubenswrapper[4558]: I0120 17:41:25.230939 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:41:25 crc kubenswrapper[4558]: I0120 17:41:25.270269 4558 scope.go:117] "RemoveContainer" containerID="1dbb1da8df3c6566ca04bcf0b706784344cc35acf44b110edf2ebb922a294075" Jan 20 17:41:25 crc kubenswrapper[4558]: I0120 17:41:25.278283 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:41:25 crc kubenswrapper[4558]: I0120 17:41:25.298674 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:41:25 crc kubenswrapper[4558]: I0120 17:41:25.307101 4558 scope.go:117] "RemoveContainer" containerID="3609e88e7e846a5700ff87aa4de00bdb30b2ed3d38b5a522ac2be7016bcfab4e" Jan 20 17:41:25 crc kubenswrapper[4558]: E0120 17:41:25.308127 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3609e88e7e846a5700ff87aa4de00bdb30b2ed3d38b5a522ac2be7016bcfab4e\": container with ID starting with 3609e88e7e846a5700ff87aa4de00bdb30b2ed3d38b5a522ac2be7016bcfab4e not found: ID does not exist" containerID="3609e88e7e846a5700ff87aa4de00bdb30b2ed3d38b5a522ac2be7016bcfab4e" Jan 20 17:41:25 crc kubenswrapper[4558]: I0120 17:41:25.308203 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3609e88e7e846a5700ff87aa4de00bdb30b2ed3d38b5a522ac2be7016bcfab4e"} err="failed to get container status \"3609e88e7e846a5700ff87aa4de00bdb30b2ed3d38b5a522ac2be7016bcfab4e\": rpc error: code = NotFound desc = could not find container \"3609e88e7e846a5700ff87aa4de00bdb30b2ed3d38b5a522ac2be7016bcfab4e\": container with ID starting with 3609e88e7e846a5700ff87aa4de00bdb30b2ed3d38b5a522ac2be7016bcfab4e not found: ID does not exist" Jan 20 17:41:25 crc kubenswrapper[4558]: I0120 17:41:25.308232 4558 scope.go:117] "RemoveContainer" containerID="1dbb1da8df3c6566ca04bcf0b706784344cc35acf44b110edf2ebb922a294075" Jan 20 17:41:25 crc kubenswrapper[4558]: E0120 17:41:25.308470 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1dbb1da8df3c6566ca04bcf0b706784344cc35acf44b110edf2ebb922a294075\": container with ID starting with 1dbb1da8df3c6566ca04bcf0b706784344cc35acf44b110edf2ebb922a294075 not found: ID does not exist" containerID="1dbb1da8df3c6566ca04bcf0b706784344cc35acf44b110edf2ebb922a294075" Jan 20 17:41:25 crc kubenswrapper[4558]: I0120 17:41:25.308487 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1dbb1da8df3c6566ca04bcf0b706784344cc35acf44b110edf2ebb922a294075"} err="failed to get container status \"1dbb1da8df3c6566ca04bcf0b706784344cc35acf44b110edf2ebb922a294075\": rpc error: code = NotFound desc = could not find container \"1dbb1da8df3c6566ca04bcf0b706784344cc35acf44b110edf2ebb922a294075\": container with ID starting with 1dbb1da8df3c6566ca04bcf0b706784344cc35acf44b110edf2ebb922a294075 not found: ID does not exist" Jan 20 17:41:25 crc kubenswrapper[4558]: I0120 17:41:25.308500 4558 scope.go:117] "RemoveContainer" containerID="cb0a2516a41647bb2bb1488a113f812f6e540d08b05595a733b72dbe2a77f2c6" Jan 20 17:41:25 crc kubenswrapper[4558]: I0120 17:41:25.310492 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:41:25 crc kubenswrapper[4558]: E0120 17:41:25.311222 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9c5a6884-1d6c-48c1-ad01-fe2f9e1f32ab" containerName="nova-metadata-metadata" Jan 20 17:41:25 crc kubenswrapper[4558]: I0120 17:41:25.311246 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="9c5a6884-1d6c-48c1-ad01-fe2f9e1f32ab" containerName="nova-metadata-metadata" Jan 20 17:41:25 crc kubenswrapper[4558]: E0120 17:41:25.311289 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9c5a6884-1d6c-48c1-ad01-fe2f9e1f32ab" containerName="nova-metadata-log" Jan 20 17:41:25 crc kubenswrapper[4558]: I0120 17:41:25.311297 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="9c5a6884-1d6c-48c1-ad01-fe2f9e1f32ab" containerName="nova-metadata-log" Jan 20 17:41:25 crc kubenswrapper[4558]: E0120 17:41:25.311324 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2661e9c6-af76-4c45-89ec-bb2ab34b8c2c" containerName="nova-api-log" Jan 20 17:41:25 crc kubenswrapper[4558]: I0120 17:41:25.311331 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="2661e9c6-af76-4c45-89ec-bb2ab34b8c2c" containerName="nova-api-log" Jan 20 17:41:25 crc kubenswrapper[4558]: E0120 17:41:25.311345 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2661e9c6-af76-4c45-89ec-bb2ab34b8c2c" containerName="nova-api-api" Jan 20 17:41:25 crc kubenswrapper[4558]: I0120 17:41:25.311352 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="2661e9c6-af76-4c45-89ec-bb2ab34b8c2c" containerName="nova-api-api" Jan 20 17:41:25 crc kubenswrapper[4558]: E0120 17:41:25.311366 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e48cd5c7-5acf-4683-b6eb-6947b7b56706" containerName="nova-manage" Jan 20 17:41:25 crc kubenswrapper[4558]: I0120 17:41:25.311375 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="e48cd5c7-5acf-4683-b6eb-6947b7b56706" containerName="nova-manage" Jan 20 17:41:25 crc kubenswrapper[4558]: I0120 17:41:25.311754 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="2661e9c6-af76-4c45-89ec-bb2ab34b8c2c" containerName="nova-api-api" Jan 20 17:41:25 crc kubenswrapper[4558]: I0120 17:41:25.311787 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="e48cd5c7-5acf-4683-b6eb-6947b7b56706" containerName="nova-manage" Jan 20 17:41:25 crc kubenswrapper[4558]: I0120 17:41:25.311797 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="9c5a6884-1d6c-48c1-ad01-fe2f9e1f32ab" containerName="nova-metadata-log" Jan 20 17:41:25 crc kubenswrapper[4558]: I0120 17:41:25.311820 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="2661e9c6-af76-4c45-89ec-bb2ab34b8c2c" containerName="nova-api-log" Jan 20 17:41:25 crc kubenswrapper[4558]: I0120 17:41:25.311834 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="9c5a6884-1d6c-48c1-ad01-fe2f9e1f32ab" containerName="nova-metadata-metadata" Jan 20 17:41:25 crc kubenswrapper[4558]: I0120 17:41:25.313648 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:41:25 crc kubenswrapper[4558]: I0120 17:41:25.316147 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-metadata-internal-svc" Jan 20 17:41:25 crc kubenswrapper[4558]: I0120 17:41:25.316639 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-metadata-config-data" Jan 20 17:41:25 crc kubenswrapper[4558]: I0120 17:41:25.322862 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z6lv5\" (UniqueName: \"kubernetes.io/projected/a18f9da2-9c64-4047-ad02-206ac9c8aa61-kube-api-access-z6lv5\") pod \"nova-metadata-0\" (UID: \"a18f9da2-9c64-4047-ad02-206ac9c8aa61\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:41:25 crc kubenswrapper[4558]: I0120 17:41:25.323034 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a18f9da2-9c64-4047-ad02-206ac9c8aa61-logs\") pod \"nova-metadata-0\" (UID: \"a18f9da2-9c64-4047-ad02-206ac9c8aa61\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:41:25 crc kubenswrapper[4558]: I0120 17:41:25.323071 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/a18f9da2-9c64-4047-ad02-206ac9c8aa61-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"a18f9da2-9c64-4047-ad02-206ac9c8aa61\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:41:25 crc kubenswrapper[4558]: I0120 17:41:25.323111 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a18f9da2-9c64-4047-ad02-206ac9c8aa61-config-data\") pod \"nova-metadata-0\" (UID: \"a18f9da2-9c64-4047-ad02-206ac9c8aa61\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:41:25 crc kubenswrapper[4558]: I0120 17:41:25.323150 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a18f9da2-9c64-4047-ad02-206ac9c8aa61-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"a18f9da2-9c64-4047-ad02-206ac9c8aa61\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:41:25 crc kubenswrapper[4558]: I0120 17:41:25.333216 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:41:25 crc kubenswrapper[4558]: I0120 17:41:25.338224 4558 scope.go:117] "RemoveContainer" containerID="7ad1958802b19065958c6eb9a7103c2fd61e01ab422d0552f8116077ffdaf939" Jan 20 17:41:25 crc kubenswrapper[4558]: I0120 17:41:25.361441 4558 scope.go:117] "RemoveContainer" containerID="cb0a2516a41647bb2bb1488a113f812f6e540d08b05595a733b72dbe2a77f2c6" Jan 20 17:41:25 crc kubenswrapper[4558]: E0120 17:41:25.362025 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cb0a2516a41647bb2bb1488a113f812f6e540d08b05595a733b72dbe2a77f2c6\": container with ID starting with cb0a2516a41647bb2bb1488a113f812f6e540d08b05595a733b72dbe2a77f2c6 not found: ID does not exist" containerID="cb0a2516a41647bb2bb1488a113f812f6e540d08b05595a733b72dbe2a77f2c6" Jan 20 17:41:25 crc kubenswrapper[4558]: I0120 17:41:25.362077 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cb0a2516a41647bb2bb1488a113f812f6e540d08b05595a733b72dbe2a77f2c6"} err="failed to get container status \"cb0a2516a41647bb2bb1488a113f812f6e540d08b05595a733b72dbe2a77f2c6\": rpc error: code = NotFound desc = could not find container \"cb0a2516a41647bb2bb1488a113f812f6e540d08b05595a733b72dbe2a77f2c6\": container with ID starting with cb0a2516a41647bb2bb1488a113f812f6e540d08b05595a733b72dbe2a77f2c6 not found: ID does not exist" Jan 20 17:41:25 crc kubenswrapper[4558]: I0120 17:41:25.362115 4558 scope.go:117] "RemoveContainer" containerID="7ad1958802b19065958c6eb9a7103c2fd61e01ab422d0552f8116077ffdaf939" Jan 20 17:41:25 crc kubenswrapper[4558]: E0120 17:41:25.362546 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7ad1958802b19065958c6eb9a7103c2fd61e01ab422d0552f8116077ffdaf939\": container with ID starting with 7ad1958802b19065958c6eb9a7103c2fd61e01ab422d0552f8116077ffdaf939 not found: ID does not exist" containerID="7ad1958802b19065958c6eb9a7103c2fd61e01ab422d0552f8116077ffdaf939" Jan 20 17:41:25 crc kubenswrapper[4558]: I0120 17:41:25.362596 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7ad1958802b19065958c6eb9a7103c2fd61e01ab422d0552f8116077ffdaf939"} err="failed to get container status \"7ad1958802b19065958c6eb9a7103c2fd61e01ab422d0552f8116077ffdaf939\": rpc error: code = NotFound desc = could not find container \"7ad1958802b19065958c6eb9a7103c2fd61e01ab422d0552f8116077ffdaf939\": container with ID starting with 7ad1958802b19065958c6eb9a7103c2fd61e01ab422d0552f8116077ffdaf939 not found: ID does not exist" Jan 20 17:41:25 crc kubenswrapper[4558]: I0120 17:41:25.424824 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a18f9da2-9c64-4047-ad02-206ac9c8aa61-logs\") pod \"nova-metadata-0\" (UID: \"a18f9da2-9c64-4047-ad02-206ac9c8aa61\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:41:25 crc kubenswrapper[4558]: I0120 17:41:25.424868 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/a18f9da2-9c64-4047-ad02-206ac9c8aa61-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"a18f9da2-9c64-4047-ad02-206ac9c8aa61\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:41:25 crc kubenswrapper[4558]: I0120 17:41:25.424918 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a18f9da2-9c64-4047-ad02-206ac9c8aa61-config-data\") pod \"nova-metadata-0\" (UID: \"a18f9da2-9c64-4047-ad02-206ac9c8aa61\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:41:25 crc kubenswrapper[4558]: I0120 17:41:25.424954 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a18f9da2-9c64-4047-ad02-206ac9c8aa61-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"a18f9da2-9c64-4047-ad02-206ac9c8aa61\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:41:25 crc kubenswrapper[4558]: I0120 17:41:25.425096 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z6lv5\" (UniqueName: \"kubernetes.io/projected/a18f9da2-9c64-4047-ad02-206ac9c8aa61-kube-api-access-z6lv5\") pod \"nova-metadata-0\" (UID: \"a18f9da2-9c64-4047-ad02-206ac9c8aa61\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:41:25 crc kubenswrapper[4558]: I0120 17:41:25.425360 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a18f9da2-9c64-4047-ad02-206ac9c8aa61-logs\") pod \"nova-metadata-0\" (UID: \"a18f9da2-9c64-4047-ad02-206ac9c8aa61\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:41:25 crc kubenswrapper[4558]: I0120 17:41:25.431208 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a18f9da2-9c64-4047-ad02-206ac9c8aa61-config-data\") pod \"nova-metadata-0\" (UID: \"a18f9da2-9c64-4047-ad02-206ac9c8aa61\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:41:25 crc kubenswrapper[4558]: I0120 17:41:25.431721 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a18f9da2-9c64-4047-ad02-206ac9c8aa61-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"a18f9da2-9c64-4047-ad02-206ac9c8aa61\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:41:25 crc kubenswrapper[4558]: I0120 17:41:25.431948 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/a18f9da2-9c64-4047-ad02-206ac9c8aa61-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"a18f9da2-9c64-4047-ad02-206ac9c8aa61\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:41:25 crc kubenswrapper[4558]: I0120 17:41:25.441072 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z6lv5\" (UniqueName: \"kubernetes.io/projected/a18f9da2-9c64-4047-ad02-206ac9c8aa61-kube-api-access-z6lv5\") pod \"nova-metadata-0\" (UID: \"a18f9da2-9c64-4047-ad02-206ac9c8aa61\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:41:25 crc kubenswrapper[4558]: I0120 17:41:25.629924 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2661e9c6-af76-4c45-89ec-bb2ab34b8c2c-combined-ca-bundle\") pod \"2661e9c6-af76-4c45-89ec-bb2ab34b8c2c\" (UID: \"2661e9c6-af76-4c45-89ec-bb2ab34b8c2c\") " Jan 20 17:41:25 crc kubenswrapper[4558]: I0120 17:41:25.633253 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:41:25 crc kubenswrapper[4558]: I0120 17:41:25.634313 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2661e9c6-af76-4c45-89ec-bb2ab34b8c2c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2661e9c6-af76-4c45-89ec-bb2ab34b8c2c" (UID: "2661e9c6-af76-4c45-89ec-bb2ab34b8c2c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:41:25 crc kubenswrapper[4558]: I0120 17:41:25.733789 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2661e9c6-af76-4c45-89ec-bb2ab34b8c2c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:41:25 crc kubenswrapper[4558]: I0120 17:41:25.877212 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:41:25 crc kubenswrapper[4558]: I0120 17:41:25.886778 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:41:25 crc kubenswrapper[4558]: I0120 17:41:25.893534 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:41:25 crc kubenswrapper[4558]: I0120 17:41:25.901833 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:41:25 crc kubenswrapper[4558]: I0120 17:41:25.903716 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-api-config-data" Jan 20 17:41:25 crc kubenswrapper[4558]: I0120 17:41:25.920677 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:41:25 crc kubenswrapper[4558]: I0120 17:41:25.937856 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/daadab43-a59b-4a2e-8f07-0e0dc38c2ace-config-data\") pod \"nova-api-0\" (UID: \"daadab43-a59b-4a2e-8f07-0e0dc38c2ace\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:41:25 crc kubenswrapper[4558]: I0120 17:41:25.937953 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/daadab43-a59b-4a2e-8f07-0e0dc38c2ace-logs\") pod \"nova-api-0\" (UID: \"daadab43-a59b-4a2e-8f07-0e0dc38c2ace\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:41:25 crc kubenswrapper[4558]: I0120 17:41:25.938196 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gn29p\" (UniqueName: \"kubernetes.io/projected/daadab43-a59b-4a2e-8f07-0e0dc38c2ace-kube-api-access-gn29p\") pod \"nova-api-0\" (UID: \"daadab43-a59b-4a2e-8f07-0e0dc38c2ace\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:41:25 crc kubenswrapper[4558]: I0120 17:41:25.938439 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/daadab43-a59b-4a2e-8f07-0e0dc38c2ace-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"daadab43-a59b-4a2e-8f07-0e0dc38c2ace\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:41:26 crc kubenswrapper[4558]: I0120 17:41:26.040415 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gn29p\" (UniqueName: \"kubernetes.io/projected/daadab43-a59b-4a2e-8f07-0e0dc38c2ace-kube-api-access-gn29p\") pod \"nova-api-0\" (UID: \"daadab43-a59b-4a2e-8f07-0e0dc38c2ace\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:41:26 crc kubenswrapper[4558]: I0120 17:41:26.040593 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/daadab43-a59b-4a2e-8f07-0e0dc38c2ace-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"daadab43-a59b-4a2e-8f07-0e0dc38c2ace\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:41:26 crc kubenswrapper[4558]: I0120 17:41:26.040635 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/daadab43-a59b-4a2e-8f07-0e0dc38c2ace-config-data\") pod \"nova-api-0\" (UID: \"daadab43-a59b-4a2e-8f07-0e0dc38c2ace\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:41:26 crc kubenswrapper[4558]: I0120 17:41:26.040673 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/daadab43-a59b-4a2e-8f07-0e0dc38c2ace-logs\") pod \"nova-api-0\" (UID: \"daadab43-a59b-4a2e-8f07-0e0dc38c2ace\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:41:26 crc kubenswrapper[4558]: I0120 17:41:26.041091 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/daadab43-a59b-4a2e-8f07-0e0dc38c2ace-logs\") pod \"nova-api-0\" (UID: \"daadab43-a59b-4a2e-8f07-0e0dc38c2ace\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:41:26 crc kubenswrapper[4558]: I0120 17:41:26.048795 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/daadab43-a59b-4a2e-8f07-0e0dc38c2ace-config-data\") pod \"nova-api-0\" (UID: \"daadab43-a59b-4a2e-8f07-0e0dc38c2ace\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:41:26 crc kubenswrapper[4558]: I0120 17:41:26.052748 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/daadab43-a59b-4a2e-8f07-0e0dc38c2ace-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"daadab43-a59b-4a2e-8f07-0e0dc38c2ace\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:41:26 crc kubenswrapper[4558]: I0120 17:41:26.058136 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gn29p\" (UniqueName: \"kubernetes.io/projected/daadab43-a59b-4a2e-8f07-0e0dc38c2ace-kube-api-access-gn29p\") pod \"nova-api-0\" (UID: \"daadab43-a59b-4a2e-8f07-0e0dc38c2ace\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:41:26 crc kubenswrapper[4558]: I0120 17:41:26.059021 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:41:26 crc kubenswrapper[4558]: W0120 17:41:26.059694 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda18f9da2_9c64_4047_ad02_206ac9c8aa61.slice/crio-be02008edfea8ba65046d300ba5e2d6958232a81128fd832badf6ad34ab5cf4e WatchSource:0}: Error finding container be02008edfea8ba65046d300ba5e2d6958232a81128fd832badf6ad34ab5cf4e: Status 404 returned error can't find the container with id be02008edfea8ba65046d300ba5e2d6958232a81128fd832badf6ad34ab5cf4e Jan 20 17:41:26 crc kubenswrapper[4558]: I0120 17:41:26.132359 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:41:26 crc kubenswrapper[4558]: I0120 17:41:26.141663 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gj87n\" (UniqueName: \"kubernetes.io/projected/9694d2f6-3a30-4509-be7e-3fe6b84c8b74-kube-api-access-gj87n\") pod \"9694d2f6-3a30-4509-be7e-3fe6b84c8b74\" (UID: \"9694d2f6-3a30-4509-be7e-3fe6b84c8b74\") " Jan 20 17:41:26 crc kubenswrapper[4558]: I0120 17:41:26.141992 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9694d2f6-3a30-4509-be7e-3fe6b84c8b74-combined-ca-bundle\") pod \"9694d2f6-3a30-4509-be7e-3fe6b84c8b74\" (UID: \"9694d2f6-3a30-4509-be7e-3fe6b84c8b74\") " Jan 20 17:41:26 crc kubenswrapper[4558]: I0120 17:41:26.142034 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9694d2f6-3a30-4509-be7e-3fe6b84c8b74-config-data\") pod \"9694d2f6-3a30-4509-be7e-3fe6b84c8b74\" (UID: \"9694d2f6-3a30-4509-be7e-3fe6b84c8b74\") " Jan 20 17:41:26 crc kubenswrapper[4558]: I0120 17:41:26.146277 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9694d2f6-3a30-4509-be7e-3fe6b84c8b74-kube-api-access-gj87n" (OuterVolumeSpecName: "kube-api-access-gj87n") pod "9694d2f6-3a30-4509-be7e-3fe6b84c8b74" (UID: "9694d2f6-3a30-4509-be7e-3fe6b84c8b74"). InnerVolumeSpecName "kube-api-access-gj87n". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:41:26 crc kubenswrapper[4558]: I0120 17:41:26.174371 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9694d2f6-3a30-4509-be7e-3fe6b84c8b74-config-data" (OuterVolumeSpecName: "config-data") pod "9694d2f6-3a30-4509-be7e-3fe6b84c8b74" (UID: "9694d2f6-3a30-4509-be7e-3fe6b84c8b74"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:41:26 crc kubenswrapper[4558]: I0120 17:41:26.175793 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9694d2f6-3a30-4509-be7e-3fe6b84c8b74-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9694d2f6-3a30-4509-be7e-3fe6b84c8b74" (UID: "9694d2f6-3a30-4509-be7e-3fe6b84c8b74"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:41:26 crc kubenswrapper[4558]: I0120 17:41:26.224364 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:41:26 crc kubenswrapper[4558]: I0120 17:41:26.244900 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9694d2f6-3a30-4509-be7e-3fe6b84c8b74-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:41:26 crc kubenswrapper[4558]: I0120 17:41:26.244965 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9694d2f6-3a30-4509-be7e-3fe6b84c8b74-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:41:26 crc kubenswrapper[4558]: I0120 17:41:26.244982 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gj87n\" (UniqueName: \"kubernetes.io/projected/9694d2f6-3a30-4509-be7e-3fe6b84c8b74-kube-api-access-gj87n\") on node \"crc\" DevicePath \"\"" Jan 20 17:41:26 crc kubenswrapper[4558]: I0120 17:41:26.246400 4558 generic.go:334] "Generic (PLEG): container finished" podID="9694d2f6-3a30-4509-be7e-3fe6b84c8b74" containerID="28b93481b5264a09172ae2999ca4a5c8e1f3d6a68e2da4f930566d30a227619f" exitCode=0 Jan 20 17:41:26 crc kubenswrapper[4558]: I0120 17:41:26.246470 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:41:26 crc kubenswrapper[4558]: I0120 17:41:26.246469 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"9694d2f6-3a30-4509-be7e-3fe6b84c8b74","Type":"ContainerDied","Data":"28b93481b5264a09172ae2999ca4a5c8e1f3d6a68e2da4f930566d30a227619f"} Jan 20 17:41:26 crc kubenswrapper[4558]: I0120 17:41:26.246628 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"9694d2f6-3a30-4509-be7e-3fe6b84c8b74","Type":"ContainerDied","Data":"f572af2e7a0c5e22088730e406cbaa7a2a2173452eae223a19633fa8765028c0"} Jan 20 17:41:26 crc kubenswrapper[4558]: I0120 17:41:26.246669 4558 scope.go:117] "RemoveContainer" containerID="28b93481b5264a09172ae2999ca4a5c8e1f3d6a68e2da4f930566d30a227619f" Jan 20 17:41:26 crc kubenswrapper[4558]: I0120 17:41:26.251336 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"a18f9da2-9c64-4047-ad02-206ac9c8aa61","Type":"ContainerStarted","Data":"b1459a5f2896c316432b666d6a44cd9116c1172c341136db96a48899a0dd4884"} Jan 20 17:41:26 crc kubenswrapper[4558]: I0120 17:41:26.251373 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"a18f9da2-9c64-4047-ad02-206ac9c8aa61","Type":"ContainerStarted","Data":"be02008edfea8ba65046d300ba5e2d6958232a81128fd832badf6ad34ab5cf4e"} Jan 20 17:41:26 crc kubenswrapper[4558]: I0120 17:41:26.286831 4558 scope.go:117] "RemoveContainer" containerID="28b93481b5264a09172ae2999ca4a5c8e1f3d6a68e2da4f930566d30a227619f" Jan 20 17:41:26 crc kubenswrapper[4558]: E0120 17:41:26.288470 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"28b93481b5264a09172ae2999ca4a5c8e1f3d6a68e2da4f930566d30a227619f\": container with ID starting with 28b93481b5264a09172ae2999ca4a5c8e1f3d6a68e2da4f930566d30a227619f not found: ID does not exist" containerID="28b93481b5264a09172ae2999ca4a5c8e1f3d6a68e2da4f930566d30a227619f" Jan 20 17:41:26 crc kubenswrapper[4558]: I0120 17:41:26.288504 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"28b93481b5264a09172ae2999ca4a5c8e1f3d6a68e2da4f930566d30a227619f"} err="failed to get container status \"28b93481b5264a09172ae2999ca4a5c8e1f3d6a68e2da4f930566d30a227619f\": rpc error: code = NotFound desc = could not find container \"28b93481b5264a09172ae2999ca4a5c8e1f3d6a68e2da4f930566d30a227619f\": container with ID starting with 28b93481b5264a09172ae2999ca4a5c8e1f3d6a68e2da4f930566d30a227619f not found: ID does not exist" Jan 20 17:41:26 crc kubenswrapper[4558]: I0120 17:41:26.291193 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:41:26 crc kubenswrapper[4558]: I0120 17:41:26.302078 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:41:26 crc kubenswrapper[4558]: I0120 17:41:26.308202 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:41:26 crc kubenswrapper[4558]: E0120 17:41:26.308702 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9694d2f6-3a30-4509-be7e-3fe6b84c8b74" containerName="nova-scheduler-scheduler" Jan 20 17:41:26 crc kubenswrapper[4558]: I0120 17:41:26.308724 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="9694d2f6-3a30-4509-be7e-3fe6b84c8b74" containerName="nova-scheduler-scheduler" Jan 20 17:41:26 crc kubenswrapper[4558]: I0120 17:41:26.308973 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="9694d2f6-3a30-4509-be7e-3fe6b84c8b74" containerName="nova-scheduler-scheduler" Jan 20 17:41:26 crc kubenswrapper[4558]: I0120 17:41:26.309782 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:41:26 crc kubenswrapper[4558]: I0120 17:41:26.313208 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-scheduler-config-data" Jan 20 17:41:26 crc kubenswrapper[4558]: I0120 17:41:26.314649 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:41:26 crc kubenswrapper[4558]: I0120 17:41:26.346748 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ea5dfbd0-37c4-46a1-afad-77e85e35d7fb-config-data\") pod \"nova-scheduler-0\" (UID: \"ea5dfbd0-37c4-46a1-afad-77e85e35d7fb\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:41:26 crc kubenswrapper[4558]: I0120 17:41:26.346808 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r78kw\" (UniqueName: \"kubernetes.io/projected/ea5dfbd0-37c4-46a1-afad-77e85e35d7fb-kube-api-access-r78kw\") pod \"nova-scheduler-0\" (UID: \"ea5dfbd0-37c4-46a1-afad-77e85e35d7fb\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:41:26 crc kubenswrapper[4558]: I0120 17:41:26.346845 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ea5dfbd0-37c4-46a1-afad-77e85e35d7fb-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"ea5dfbd0-37c4-46a1-afad-77e85e35d7fb\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:41:26 crc kubenswrapper[4558]: I0120 17:41:26.448459 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ea5dfbd0-37c4-46a1-afad-77e85e35d7fb-config-data\") pod \"nova-scheduler-0\" (UID: \"ea5dfbd0-37c4-46a1-afad-77e85e35d7fb\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:41:26 crc kubenswrapper[4558]: I0120 17:41:26.448817 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r78kw\" (UniqueName: \"kubernetes.io/projected/ea5dfbd0-37c4-46a1-afad-77e85e35d7fb-kube-api-access-r78kw\") pod \"nova-scheduler-0\" (UID: \"ea5dfbd0-37c4-46a1-afad-77e85e35d7fb\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:41:26 crc kubenswrapper[4558]: I0120 17:41:26.448848 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ea5dfbd0-37c4-46a1-afad-77e85e35d7fb-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"ea5dfbd0-37c4-46a1-afad-77e85e35d7fb\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:41:26 crc kubenswrapper[4558]: I0120 17:41:26.453096 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ea5dfbd0-37c4-46a1-afad-77e85e35d7fb-config-data\") pod \"nova-scheduler-0\" (UID: \"ea5dfbd0-37c4-46a1-afad-77e85e35d7fb\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:41:26 crc kubenswrapper[4558]: I0120 17:41:26.460536 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ea5dfbd0-37c4-46a1-afad-77e85e35d7fb-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"ea5dfbd0-37c4-46a1-afad-77e85e35d7fb\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:41:26 crc kubenswrapper[4558]: I0120 17:41:26.464996 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r78kw\" (UniqueName: \"kubernetes.io/projected/ea5dfbd0-37c4-46a1-afad-77e85e35d7fb-kube-api-access-r78kw\") pod \"nova-scheduler-0\" (UID: \"ea5dfbd0-37c4-46a1-afad-77e85e35d7fb\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:41:26 crc kubenswrapper[4558]: I0120 17:41:26.586533 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2661e9c6-af76-4c45-89ec-bb2ab34b8c2c" path="/var/lib/kubelet/pods/2661e9c6-af76-4c45-89ec-bb2ab34b8c2c/volumes" Jan 20 17:41:26 crc kubenswrapper[4558]: I0120 17:41:26.587647 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9694d2f6-3a30-4509-be7e-3fe6b84c8b74" path="/var/lib/kubelet/pods/9694d2f6-3a30-4509-be7e-3fe6b84c8b74/volumes" Jan 20 17:41:26 crc kubenswrapper[4558]: I0120 17:41:26.588372 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9c5a6884-1d6c-48c1-ad01-fe2f9e1f32ab" path="/var/lib/kubelet/pods/9c5a6884-1d6c-48c1-ad01-fe2f9e1f32ab/volumes" Jan 20 17:41:26 crc kubenswrapper[4558]: I0120 17:41:26.631527 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:41:26 crc kubenswrapper[4558]: W0120 17:41:26.692856 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddaadab43_a59b_4a2e_8f07_0e0dc38c2ace.slice/crio-ad54fb20edc645dfec3bbeababec236b9df6fe2bf4fa6e6e0d6e93c34426db5a WatchSource:0}: Error finding container ad54fb20edc645dfec3bbeababec236b9df6fe2bf4fa6e6e0d6e93c34426db5a: Status 404 returned error can't find the container with id ad54fb20edc645dfec3bbeababec236b9df6fe2bf4fa6e6e0d6e93c34426db5a Jan 20 17:41:26 crc kubenswrapper[4558]: I0120 17:41:26.693800 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:41:27 crc kubenswrapper[4558]: I0120 17:41:27.047994 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:41:27 crc kubenswrapper[4558]: W0120 17:41:27.049305 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podea5dfbd0_37c4_46a1_afad_77e85e35d7fb.slice/crio-c062dc392074c9545555c522024a0a6b381d8f2010ab4c9aa6cba39a5637d18b WatchSource:0}: Error finding container c062dc392074c9545555c522024a0a6b381d8f2010ab4c9aa6cba39a5637d18b: Status 404 returned error can't find the container with id c062dc392074c9545555c522024a0a6b381d8f2010ab4c9aa6cba39a5637d18b Jan 20 17:41:27 crc kubenswrapper[4558]: I0120 17:41:27.264490 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"ea5dfbd0-37c4-46a1-afad-77e85e35d7fb","Type":"ContainerStarted","Data":"42c29ec80666322d27f85a6e6798fa5fcabfb0b124569ce2fbc426f02c3bf8fe"} Jan 20 17:41:27 crc kubenswrapper[4558]: I0120 17:41:27.264858 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"ea5dfbd0-37c4-46a1-afad-77e85e35d7fb","Type":"ContainerStarted","Data":"c062dc392074c9545555c522024a0a6b381d8f2010ab4c9aa6cba39a5637d18b"} Jan 20 17:41:27 crc kubenswrapper[4558]: I0120 17:41:27.278212 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"daadab43-a59b-4a2e-8f07-0e0dc38c2ace","Type":"ContainerStarted","Data":"38738b311a04b17fa7cb39640d4340a666a6e6d37b6c1d98146a92fd4c467786"} Jan 20 17:41:27 crc kubenswrapper[4558]: I0120 17:41:27.278245 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"daadab43-a59b-4a2e-8f07-0e0dc38c2ace","Type":"ContainerStarted","Data":"32d95cf62a8b0b26c56920fb7a65430e15d38f235bd166479a0e88da90bf77fb"} Jan 20 17:41:27 crc kubenswrapper[4558]: I0120 17:41:27.278255 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"daadab43-a59b-4a2e-8f07-0e0dc38c2ace","Type":"ContainerStarted","Data":"ad54fb20edc645dfec3bbeababec236b9df6fe2bf4fa6e6e0d6e93c34426db5a"} Jan 20 17:41:27 crc kubenswrapper[4558]: I0120 17:41:27.282610 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-scheduler-0" podStartSLOduration=1.282596709 podStartE2EDuration="1.282596709s" podCreationTimestamp="2026-01-20 17:41:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:41:27.28149119 +0000 UTC m=+3581.041829157" watchObservedRunningTime="2026-01-20 17:41:27.282596709 +0000 UTC m=+3581.042934676" Jan 20 17:41:27 crc kubenswrapper[4558]: I0120 17:41:27.285257 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"a18f9da2-9c64-4047-ad02-206ac9c8aa61","Type":"ContainerStarted","Data":"b93052a48246c97ba463ca4f80d69e9a79fa1ad0762a5e959855ffc2d454361b"} Jan 20 17:41:27 crc kubenswrapper[4558]: I0120 17:41:27.301837 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-api-0" podStartSLOduration=2.301817479 podStartE2EDuration="2.301817479s" podCreationTimestamp="2026-01-20 17:41:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:41:27.294002695 +0000 UTC m=+3581.054340651" watchObservedRunningTime="2026-01-20 17:41:27.301817479 +0000 UTC m=+3581.062155446" Jan 20 17:41:27 crc kubenswrapper[4558]: I0120 17:41:27.319760 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-metadata-0" podStartSLOduration=2.319742903 podStartE2EDuration="2.319742903s" podCreationTimestamp="2026-01-20 17:41:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:41:27.310432898 +0000 UTC m=+3581.070770864" watchObservedRunningTime="2026-01-20 17:41:27.319742903 +0000 UTC m=+3581.080080870" Jan 20 17:41:30 crc kubenswrapper[4558]: I0120 17:41:30.634192 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:41:30 crc kubenswrapper[4558]: I0120 17:41:30.634501 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:41:31 crc kubenswrapper[4558]: I0120 17:41:31.258290 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:41:31 crc kubenswrapper[4558]: I0120 17:41:31.632426 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:41:34 crc kubenswrapper[4558]: I0120 17:41:34.439069 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 17:41:34 crc kubenswrapper[4558]: I0120 17:41:34.439593 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/kube-state-metrics-0" podUID="d0c8cc0b-d00f-4b57-ab57-c70a68ff043a" containerName="kube-state-metrics" containerID="cri-o://14660d2bf1b03430df88f4eb5caeb6c51bea9e40847e3711aa7dcc49d73097fc" gracePeriod=30 Jan 20 17:41:34 crc kubenswrapper[4558]: I0120 17:41:34.566891 4558 scope.go:117] "RemoveContainer" containerID="2d2a8989ca5a5af98b2c9dc8f801ea0675339ec14800f437c058c5a43c20146b" Jan 20 17:41:34 crc kubenswrapper[4558]: E0120 17:41:34.567908 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 17:41:34 crc kubenswrapper[4558]: I0120 17:41:34.857607 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:41:34 crc kubenswrapper[4558]: I0120 17:41:34.948965 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bb8mv\" (UniqueName: \"kubernetes.io/projected/d0c8cc0b-d00f-4b57-ab57-c70a68ff043a-kube-api-access-bb8mv\") pod \"d0c8cc0b-d00f-4b57-ab57-c70a68ff043a\" (UID: \"d0c8cc0b-d00f-4b57-ab57-c70a68ff043a\") " Jan 20 17:41:34 crc kubenswrapper[4558]: I0120 17:41:34.962703 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d0c8cc0b-d00f-4b57-ab57-c70a68ff043a-kube-api-access-bb8mv" (OuterVolumeSpecName: "kube-api-access-bb8mv") pod "d0c8cc0b-d00f-4b57-ab57-c70a68ff043a" (UID: "d0c8cc0b-d00f-4b57-ab57-c70a68ff043a"). InnerVolumeSpecName "kube-api-access-bb8mv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:41:35 crc kubenswrapper[4558]: I0120 17:41:35.053306 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bb8mv\" (UniqueName: \"kubernetes.io/projected/d0c8cc0b-d00f-4b57-ab57-c70a68ff043a-kube-api-access-bb8mv\") on node \"crc\" DevicePath \"\"" Jan 20 17:41:35 crc kubenswrapper[4558]: I0120 17:41:35.358293 4558 generic.go:334] "Generic (PLEG): container finished" podID="d0c8cc0b-d00f-4b57-ab57-c70a68ff043a" containerID="14660d2bf1b03430df88f4eb5caeb6c51bea9e40847e3711aa7dcc49d73097fc" exitCode=2 Jan 20 17:41:35 crc kubenswrapper[4558]: I0120 17:41:35.358342 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/kube-state-metrics-0" event={"ID":"d0c8cc0b-d00f-4b57-ab57-c70a68ff043a","Type":"ContainerDied","Data":"14660d2bf1b03430df88f4eb5caeb6c51bea9e40847e3711aa7dcc49d73097fc"} Jan 20 17:41:35 crc kubenswrapper[4558]: I0120 17:41:35.358373 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/kube-state-metrics-0" event={"ID":"d0c8cc0b-d00f-4b57-ab57-c70a68ff043a","Type":"ContainerDied","Data":"b50f2f1e1ea94daf051f102c8a2333bc6f7926df4c7722562f05ec4cea9124bc"} Jan 20 17:41:35 crc kubenswrapper[4558]: I0120 17:41:35.358398 4558 scope.go:117] "RemoveContainer" containerID="14660d2bf1b03430df88f4eb5caeb6c51bea9e40847e3711aa7dcc49d73097fc" Jan 20 17:41:35 crc kubenswrapper[4558]: I0120 17:41:35.358917 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:41:35 crc kubenswrapper[4558]: I0120 17:41:35.389160 4558 scope.go:117] "RemoveContainer" containerID="14660d2bf1b03430df88f4eb5caeb6c51bea9e40847e3711aa7dcc49d73097fc" Jan 20 17:41:35 crc kubenswrapper[4558]: E0120 17:41:35.389630 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"14660d2bf1b03430df88f4eb5caeb6c51bea9e40847e3711aa7dcc49d73097fc\": container with ID starting with 14660d2bf1b03430df88f4eb5caeb6c51bea9e40847e3711aa7dcc49d73097fc not found: ID does not exist" containerID="14660d2bf1b03430df88f4eb5caeb6c51bea9e40847e3711aa7dcc49d73097fc" Jan 20 17:41:35 crc kubenswrapper[4558]: I0120 17:41:35.389727 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"14660d2bf1b03430df88f4eb5caeb6c51bea9e40847e3711aa7dcc49d73097fc"} err="failed to get container status \"14660d2bf1b03430df88f4eb5caeb6c51bea9e40847e3711aa7dcc49d73097fc\": rpc error: code = NotFound desc = could not find container \"14660d2bf1b03430df88f4eb5caeb6c51bea9e40847e3711aa7dcc49d73097fc\": container with ID starting with 14660d2bf1b03430df88f4eb5caeb6c51bea9e40847e3711aa7dcc49d73097fc not found: ID does not exist" Jan 20 17:41:35 crc kubenswrapper[4558]: I0120 17:41:35.391719 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 17:41:35 crc kubenswrapper[4558]: I0120 17:41:35.410035 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 17:41:35 crc kubenswrapper[4558]: I0120 17:41:35.423217 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 17:41:35 crc kubenswrapper[4558]: E0120 17:41:35.423775 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d0c8cc0b-d00f-4b57-ab57-c70a68ff043a" containerName="kube-state-metrics" Jan 20 17:41:35 crc kubenswrapper[4558]: I0120 17:41:35.423797 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d0c8cc0b-d00f-4b57-ab57-c70a68ff043a" containerName="kube-state-metrics" Jan 20 17:41:35 crc kubenswrapper[4558]: I0120 17:41:35.424011 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="d0c8cc0b-d00f-4b57-ab57-c70a68ff043a" containerName="kube-state-metrics" Jan 20 17:41:35 crc kubenswrapper[4558]: I0120 17:41:35.424754 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:41:35 crc kubenswrapper[4558]: I0120 17:41:35.427049 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"kube-state-metrics-tls-config" Jan 20 17:41:35 crc kubenswrapper[4558]: I0120 17:41:35.427118 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-kube-state-metrics-svc" Jan 20 17:41:35 crc kubenswrapper[4558]: I0120 17:41:35.427853 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 17:41:35 crc kubenswrapper[4558]: I0120 17:41:35.462369 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4affc06-2032-4a14-8422-3c3ca984eada-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"f4affc06-2032-4a14-8422-3c3ca984eada\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:41:35 crc kubenswrapper[4558]: I0120 17:41:35.462411 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8xqht\" (UniqueName: \"kubernetes.io/projected/f4affc06-2032-4a14-8422-3c3ca984eada-kube-api-access-8xqht\") pod \"kube-state-metrics-0\" (UID: \"f4affc06-2032-4a14-8422-3c3ca984eada\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:41:35 crc kubenswrapper[4558]: I0120 17:41:35.462435 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/f4affc06-2032-4a14-8422-3c3ca984eada-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"f4affc06-2032-4a14-8422-3c3ca984eada\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:41:35 crc kubenswrapper[4558]: I0120 17:41:35.462460 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/f4affc06-2032-4a14-8422-3c3ca984eada-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"f4affc06-2032-4a14-8422-3c3ca984eada\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:41:35 crc kubenswrapper[4558]: I0120 17:41:35.564768 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4affc06-2032-4a14-8422-3c3ca984eada-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"f4affc06-2032-4a14-8422-3c3ca984eada\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:41:35 crc kubenswrapper[4558]: I0120 17:41:35.564823 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8xqht\" (UniqueName: \"kubernetes.io/projected/f4affc06-2032-4a14-8422-3c3ca984eada-kube-api-access-8xqht\") pod \"kube-state-metrics-0\" (UID: \"f4affc06-2032-4a14-8422-3c3ca984eada\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:41:35 crc kubenswrapper[4558]: I0120 17:41:35.564856 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/f4affc06-2032-4a14-8422-3c3ca984eada-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"f4affc06-2032-4a14-8422-3c3ca984eada\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:41:35 crc kubenswrapper[4558]: I0120 17:41:35.564909 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/f4affc06-2032-4a14-8422-3c3ca984eada-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"f4affc06-2032-4a14-8422-3c3ca984eada\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:41:35 crc kubenswrapper[4558]: I0120 17:41:35.575056 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4affc06-2032-4a14-8422-3c3ca984eada-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"f4affc06-2032-4a14-8422-3c3ca984eada\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:41:35 crc kubenswrapper[4558]: I0120 17:41:35.575500 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/f4affc06-2032-4a14-8422-3c3ca984eada-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"f4affc06-2032-4a14-8422-3c3ca984eada\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:41:35 crc kubenswrapper[4558]: I0120 17:41:35.578189 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/f4affc06-2032-4a14-8422-3c3ca984eada-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"f4affc06-2032-4a14-8422-3c3ca984eada\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:41:35 crc kubenswrapper[4558]: I0120 17:41:35.584611 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8xqht\" (UniqueName: \"kubernetes.io/projected/f4affc06-2032-4a14-8422-3c3ca984eada-kube-api-access-8xqht\") pod \"kube-state-metrics-0\" (UID: \"f4affc06-2032-4a14-8422-3c3ca984eada\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:41:35 crc kubenswrapper[4558]: I0120 17:41:35.634269 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:41:35 crc kubenswrapper[4558]: I0120 17:41:35.634309 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:41:35 crc kubenswrapper[4558]: I0120 17:41:35.741316 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:41:35 crc kubenswrapper[4558]: I0120 17:41:35.976890 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:41:35 crc kubenswrapper[4558]: I0120 17:41:35.977564 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="9056548c-189a-4152-9f2f-bc242e3d4065" containerName="ceilometer-central-agent" containerID="cri-o://effecb70e8c8831ca8d03c15fd3b51392f6b4c26b8ce5779c07930278ade516b" gracePeriod=30 Jan 20 17:41:35 crc kubenswrapper[4558]: I0120 17:41:35.977633 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="9056548c-189a-4152-9f2f-bc242e3d4065" containerName="proxy-httpd" containerID="cri-o://9c2a9892c01f04cdd0fc01158c87b79e229ccc934e665e4307e05ca274eb8df6" gracePeriod=30 Jan 20 17:41:35 crc kubenswrapper[4558]: I0120 17:41:35.977751 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="9056548c-189a-4152-9f2f-bc242e3d4065" containerName="sg-core" containerID="cri-o://dd5eac1f346485e04a58a6704a4682b1b992d72c5da31bf96b25e9aa2bc3e97d" gracePeriod=30 Jan 20 17:41:35 crc kubenswrapper[4558]: I0120 17:41:35.977796 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="9056548c-189a-4152-9f2f-bc242e3d4065" containerName="ceilometer-notification-agent" containerID="cri-o://a4df86dc40ccb68556c1da99ff22e880e7e4c218b8777739f8a44091f82a17ae" gracePeriod=30 Jan 20 17:41:36 crc kubenswrapper[4558]: I0120 17:41:36.153767 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 17:41:36 crc kubenswrapper[4558]: I0120 17:41:36.224555 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:41:36 crc kubenswrapper[4558]: I0120 17:41:36.224902 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:41:36 crc kubenswrapper[4558]: I0120 17:41:36.373363 4558 generic.go:334] "Generic (PLEG): container finished" podID="9056548c-189a-4152-9f2f-bc242e3d4065" containerID="9c2a9892c01f04cdd0fc01158c87b79e229ccc934e665e4307e05ca274eb8df6" exitCode=0 Jan 20 17:41:36 crc kubenswrapper[4558]: I0120 17:41:36.373644 4558 generic.go:334] "Generic (PLEG): container finished" podID="9056548c-189a-4152-9f2f-bc242e3d4065" containerID="dd5eac1f346485e04a58a6704a4682b1b992d72c5da31bf96b25e9aa2bc3e97d" exitCode=2 Jan 20 17:41:36 crc kubenswrapper[4558]: I0120 17:41:36.373655 4558 generic.go:334] "Generic (PLEG): container finished" podID="9056548c-189a-4152-9f2f-bc242e3d4065" containerID="effecb70e8c8831ca8d03c15fd3b51392f6b4c26b8ce5779c07930278ade516b" exitCode=0 Jan 20 17:41:36 crc kubenswrapper[4558]: I0120 17:41:36.373446 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"9056548c-189a-4152-9f2f-bc242e3d4065","Type":"ContainerDied","Data":"9c2a9892c01f04cdd0fc01158c87b79e229ccc934e665e4307e05ca274eb8df6"} Jan 20 17:41:36 crc kubenswrapper[4558]: I0120 17:41:36.373755 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"9056548c-189a-4152-9f2f-bc242e3d4065","Type":"ContainerDied","Data":"dd5eac1f346485e04a58a6704a4682b1b992d72c5da31bf96b25e9aa2bc3e97d"} Jan 20 17:41:36 crc kubenswrapper[4558]: I0120 17:41:36.373774 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"9056548c-189a-4152-9f2f-bc242e3d4065","Type":"ContainerDied","Data":"effecb70e8c8831ca8d03c15fd3b51392f6b4c26b8ce5779c07930278ade516b"} Jan 20 17:41:36 crc kubenswrapper[4558]: I0120 17:41:36.375201 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/kube-state-metrics-0" event={"ID":"f4affc06-2032-4a14-8422-3c3ca984eada","Type":"ContainerStarted","Data":"639f935172ba8b884326d86075b718cbd7e3189750b38bbae2060f934ee19043"} Jan 20 17:41:36 crc kubenswrapper[4558]: I0120 17:41:36.586858 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d0c8cc0b-d00f-4b57-ab57-c70a68ff043a" path="/var/lib/kubelet/pods/d0c8cc0b-d00f-4b57-ab57-c70a68ff043a/volumes" Jan 20 17:41:36 crc kubenswrapper[4558]: I0120 17:41:36.632705 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:41:36 crc kubenswrapper[4558]: I0120 17:41:36.657332 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-metadata-0" podUID="a18f9da2-9c64-4047-ad02-206ac9c8aa61" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.1.2:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 20 17:41:36 crc kubenswrapper[4558]: I0120 17:41:36.657693 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-metadata-0" podUID="a18f9da2-9c64-4047-ad02-206ac9c8aa61" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.1.2:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 20 17:41:36 crc kubenswrapper[4558]: I0120 17:41:36.683540 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:41:37 crc kubenswrapper[4558]: I0120 17:41:37.307427 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-api-0" podUID="daadab43-a59b-4a2e-8f07-0e0dc38c2ace" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.1.3:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 20 17:41:37 crc kubenswrapper[4558]: I0120 17:41:37.307451 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-api-0" podUID="daadab43-a59b-4a2e-8f07-0e0dc38c2ace" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.1.3:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 20 17:41:37 crc kubenswrapper[4558]: I0120 17:41:37.386089 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/kube-state-metrics-0" event={"ID":"f4affc06-2032-4a14-8422-3c3ca984eada","Type":"ContainerStarted","Data":"3550557600f4dd7782ea6add771b6cef4f6b4aa0a7058c39c4e8a2f7f0f9a980"} Jan 20 17:41:37 crc kubenswrapper[4558]: I0120 17:41:37.414443 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/kube-state-metrics-0" podStartSLOduration=2.132514001 podStartE2EDuration="2.414423949s" podCreationTimestamp="2026-01-20 17:41:35 +0000 UTC" firstStartedPulling="2026-01-20 17:41:36.156332176 +0000 UTC m=+3589.916670143" lastFinishedPulling="2026-01-20 17:41:36.438242125 +0000 UTC m=+3590.198580091" observedRunningTime="2026-01-20 17:41:37.399699133 +0000 UTC m=+3591.160037100" watchObservedRunningTime="2026-01-20 17:41:37.414423949 +0000 UTC m=+3591.174761917" Jan 20 17:41:37 crc kubenswrapper[4558]: I0120 17:41:37.416356 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:41:38 crc kubenswrapper[4558]: I0120 17:41:38.393010 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:41:38 crc kubenswrapper[4558]: I0120 17:41:38.734116 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:41:38 crc kubenswrapper[4558]: I0120 17:41:38.855649 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mfcsr\" (UniqueName: \"kubernetes.io/projected/9056548c-189a-4152-9f2f-bc242e3d4065-kube-api-access-mfcsr\") pod \"9056548c-189a-4152-9f2f-bc242e3d4065\" (UID: \"9056548c-189a-4152-9f2f-bc242e3d4065\") " Jan 20 17:41:38 crc kubenswrapper[4558]: I0120 17:41:38.855759 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9056548c-189a-4152-9f2f-bc242e3d4065-run-httpd\") pod \"9056548c-189a-4152-9f2f-bc242e3d4065\" (UID: \"9056548c-189a-4152-9f2f-bc242e3d4065\") " Jan 20 17:41:38 crc kubenswrapper[4558]: I0120 17:41:38.855832 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9056548c-189a-4152-9f2f-bc242e3d4065-log-httpd\") pod \"9056548c-189a-4152-9f2f-bc242e3d4065\" (UID: \"9056548c-189a-4152-9f2f-bc242e3d4065\") " Jan 20 17:41:38 crc kubenswrapper[4558]: I0120 17:41:38.855855 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9056548c-189a-4152-9f2f-bc242e3d4065-combined-ca-bundle\") pod \"9056548c-189a-4152-9f2f-bc242e3d4065\" (UID: \"9056548c-189a-4152-9f2f-bc242e3d4065\") " Jan 20 17:41:38 crc kubenswrapper[4558]: I0120 17:41:38.856138 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9056548c-189a-4152-9f2f-bc242e3d4065-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "9056548c-189a-4152-9f2f-bc242e3d4065" (UID: "9056548c-189a-4152-9f2f-bc242e3d4065"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:41:38 crc kubenswrapper[4558]: I0120 17:41:38.856286 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9056548c-189a-4152-9f2f-bc242e3d4065-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "9056548c-189a-4152-9f2f-bc242e3d4065" (UID: "9056548c-189a-4152-9f2f-bc242e3d4065"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:41:38 crc kubenswrapper[4558]: I0120 17:41:38.856358 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/9056548c-189a-4152-9f2f-bc242e3d4065-sg-core-conf-yaml\") pod \"9056548c-189a-4152-9f2f-bc242e3d4065\" (UID: \"9056548c-189a-4152-9f2f-bc242e3d4065\") " Jan 20 17:41:38 crc kubenswrapper[4558]: I0120 17:41:38.856392 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9056548c-189a-4152-9f2f-bc242e3d4065-scripts\") pod \"9056548c-189a-4152-9f2f-bc242e3d4065\" (UID: \"9056548c-189a-4152-9f2f-bc242e3d4065\") " Jan 20 17:41:38 crc kubenswrapper[4558]: I0120 17:41:38.856769 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9056548c-189a-4152-9f2f-bc242e3d4065-config-data\") pod \"9056548c-189a-4152-9f2f-bc242e3d4065\" (UID: \"9056548c-189a-4152-9f2f-bc242e3d4065\") " Jan 20 17:41:38 crc kubenswrapper[4558]: I0120 17:41:38.857081 4558 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9056548c-189a-4152-9f2f-bc242e3d4065-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:41:38 crc kubenswrapper[4558]: I0120 17:41:38.857100 4558 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/9056548c-189a-4152-9f2f-bc242e3d4065-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:41:38 crc kubenswrapper[4558]: I0120 17:41:38.862343 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9056548c-189a-4152-9f2f-bc242e3d4065-kube-api-access-mfcsr" (OuterVolumeSpecName: "kube-api-access-mfcsr") pod "9056548c-189a-4152-9f2f-bc242e3d4065" (UID: "9056548c-189a-4152-9f2f-bc242e3d4065"). InnerVolumeSpecName "kube-api-access-mfcsr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:41:38 crc kubenswrapper[4558]: I0120 17:41:38.862528 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9056548c-189a-4152-9f2f-bc242e3d4065-scripts" (OuterVolumeSpecName: "scripts") pod "9056548c-189a-4152-9f2f-bc242e3d4065" (UID: "9056548c-189a-4152-9f2f-bc242e3d4065"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:41:38 crc kubenswrapper[4558]: I0120 17:41:38.885213 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9056548c-189a-4152-9f2f-bc242e3d4065-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "9056548c-189a-4152-9f2f-bc242e3d4065" (UID: "9056548c-189a-4152-9f2f-bc242e3d4065"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:41:38 crc kubenswrapper[4558]: I0120 17:41:38.917666 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9056548c-189a-4152-9f2f-bc242e3d4065-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9056548c-189a-4152-9f2f-bc242e3d4065" (UID: "9056548c-189a-4152-9f2f-bc242e3d4065"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:41:38 crc kubenswrapper[4558]: I0120 17:41:38.932685 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9056548c-189a-4152-9f2f-bc242e3d4065-config-data" (OuterVolumeSpecName: "config-data") pod "9056548c-189a-4152-9f2f-bc242e3d4065" (UID: "9056548c-189a-4152-9f2f-bc242e3d4065"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:41:38 crc kubenswrapper[4558]: I0120 17:41:38.960296 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9056548c-189a-4152-9f2f-bc242e3d4065-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:41:38 crc kubenswrapper[4558]: I0120 17:41:38.960332 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mfcsr\" (UniqueName: \"kubernetes.io/projected/9056548c-189a-4152-9f2f-bc242e3d4065-kube-api-access-mfcsr\") on node \"crc\" DevicePath \"\"" Jan 20 17:41:38 crc kubenswrapper[4558]: I0120 17:41:38.960346 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9056548c-189a-4152-9f2f-bc242e3d4065-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:41:38 crc kubenswrapper[4558]: I0120 17:41:38.960357 4558 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/9056548c-189a-4152-9f2f-bc242e3d4065-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 20 17:41:38 crc kubenswrapper[4558]: I0120 17:41:38.960370 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9056548c-189a-4152-9f2f-bc242e3d4065-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:41:39 crc kubenswrapper[4558]: I0120 17:41:39.404495 4558 generic.go:334] "Generic (PLEG): container finished" podID="9056548c-189a-4152-9f2f-bc242e3d4065" containerID="a4df86dc40ccb68556c1da99ff22e880e7e4c218b8777739f8a44091f82a17ae" exitCode=0 Jan 20 17:41:39 crc kubenswrapper[4558]: I0120 17:41:39.404561 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"9056548c-189a-4152-9f2f-bc242e3d4065","Type":"ContainerDied","Data":"a4df86dc40ccb68556c1da99ff22e880e7e4c218b8777739f8a44091f82a17ae"} Jan 20 17:41:39 crc kubenswrapper[4558]: I0120 17:41:39.404931 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"9056548c-189a-4152-9f2f-bc242e3d4065","Type":"ContainerDied","Data":"8f2b5dbae6c64c19e8835dfe696c1cc6670c91916ab96ba2b2f118a5ad7d74b5"} Jan 20 17:41:39 crc kubenswrapper[4558]: I0120 17:41:39.404968 4558 scope.go:117] "RemoveContainer" containerID="9c2a9892c01f04cdd0fc01158c87b79e229ccc934e665e4307e05ca274eb8df6" Jan 20 17:41:39 crc kubenswrapper[4558]: I0120 17:41:39.404596 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:41:39 crc kubenswrapper[4558]: I0120 17:41:39.441490 4558 scope.go:117] "RemoveContainer" containerID="dd5eac1f346485e04a58a6704a4682b1b992d72c5da31bf96b25e9aa2bc3e97d" Jan 20 17:41:39 crc kubenswrapper[4558]: I0120 17:41:39.451235 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:41:39 crc kubenswrapper[4558]: I0120 17:41:39.460346 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:41:39 crc kubenswrapper[4558]: I0120 17:41:39.470697 4558 scope.go:117] "RemoveContainer" containerID="a4df86dc40ccb68556c1da99ff22e880e7e4c218b8777739f8a44091f82a17ae" Jan 20 17:41:39 crc kubenswrapper[4558]: I0120 17:41:39.488329 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:41:39 crc kubenswrapper[4558]: E0120 17:41:39.488778 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9056548c-189a-4152-9f2f-bc242e3d4065" containerName="sg-core" Jan 20 17:41:39 crc kubenswrapper[4558]: I0120 17:41:39.488799 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="9056548c-189a-4152-9f2f-bc242e3d4065" containerName="sg-core" Jan 20 17:41:39 crc kubenswrapper[4558]: E0120 17:41:39.488830 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9056548c-189a-4152-9f2f-bc242e3d4065" containerName="ceilometer-notification-agent" Jan 20 17:41:39 crc kubenswrapper[4558]: I0120 17:41:39.488837 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="9056548c-189a-4152-9f2f-bc242e3d4065" containerName="ceilometer-notification-agent" Jan 20 17:41:39 crc kubenswrapper[4558]: E0120 17:41:39.488852 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9056548c-189a-4152-9f2f-bc242e3d4065" containerName="ceilometer-central-agent" Jan 20 17:41:39 crc kubenswrapper[4558]: I0120 17:41:39.488859 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="9056548c-189a-4152-9f2f-bc242e3d4065" containerName="ceilometer-central-agent" Jan 20 17:41:39 crc kubenswrapper[4558]: E0120 17:41:39.488877 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9056548c-189a-4152-9f2f-bc242e3d4065" containerName="proxy-httpd" Jan 20 17:41:39 crc kubenswrapper[4558]: I0120 17:41:39.488882 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="9056548c-189a-4152-9f2f-bc242e3d4065" containerName="proxy-httpd" Jan 20 17:41:39 crc kubenswrapper[4558]: I0120 17:41:39.489049 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="9056548c-189a-4152-9f2f-bc242e3d4065" containerName="ceilometer-notification-agent" Jan 20 17:41:39 crc kubenswrapper[4558]: I0120 17:41:39.489072 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="9056548c-189a-4152-9f2f-bc242e3d4065" containerName="ceilometer-central-agent" Jan 20 17:41:39 crc kubenswrapper[4558]: I0120 17:41:39.489080 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="9056548c-189a-4152-9f2f-bc242e3d4065" containerName="proxy-httpd" Jan 20 17:41:39 crc kubenswrapper[4558]: I0120 17:41:39.489093 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="9056548c-189a-4152-9f2f-bc242e3d4065" containerName="sg-core" Jan 20 17:41:39 crc kubenswrapper[4558]: I0120 17:41:39.490644 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:41:39 crc kubenswrapper[4558]: I0120 17:41:39.496631 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-config-data" Jan 20 17:41:39 crc kubenswrapper[4558]: I0120 17:41:39.496958 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-scripts" Jan 20 17:41:39 crc kubenswrapper[4558]: I0120 17:41:39.498690 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-ceilometer-internal-svc" Jan 20 17:41:39 crc kubenswrapper[4558]: I0120 17:41:39.505404 4558 scope.go:117] "RemoveContainer" containerID="effecb70e8c8831ca8d03c15fd3b51392f6b4c26b8ce5779c07930278ade516b" Jan 20 17:41:39 crc kubenswrapper[4558]: I0120 17:41:39.511379 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:41:39 crc kubenswrapper[4558]: I0120 17:41:39.529245 4558 scope.go:117] "RemoveContainer" containerID="9c2a9892c01f04cdd0fc01158c87b79e229ccc934e665e4307e05ca274eb8df6" Jan 20 17:41:39 crc kubenswrapper[4558]: E0120 17:41:39.529666 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9c2a9892c01f04cdd0fc01158c87b79e229ccc934e665e4307e05ca274eb8df6\": container with ID starting with 9c2a9892c01f04cdd0fc01158c87b79e229ccc934e665e4307e05ca274eb8df6 not found: ID does not exist" containerID="9c2a9892c01f04cdd0fc01158c87b79e229ccc934e665e4307e05ca274eb8df6" Jan 20 17:41:39 crc kubenswrapper[4558]: I0120 17:41:39.529704 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9c2a9892c01f04cdd0fc01158c87b79e229ccc934e665e4307e05ca274eb8df6"} err="failed to get container status \"9c2a9892c01f04cdd0fc01158c87b79e229ccc934e665e4307e05ca274eb8df6\": rpc error: code = NotFound desc = could not find container \"9c2a9892c01f04cdd0fc01158c87b79e229ccc934e665e4307e05ca274eb8df6\": container with ID starting with 9c2a9892c01f04cdd0fc01158c87b79e229ccc934e665e4307e05ca274eb8df6 not found: ID does not exist" Jan 20 17:41:39 crc kubenswrapper[4558]: I0120 17:41:39.529727 4558 scope.go:117] "RemoveContainer" containerID="dd5eac1f346485e04a58a6704a4682b1b992d72c5da31bf96b25e9aa2bc3e97d" Jan 20 17:41:39 crc kubenswrapper[4558]: E0120 17:41:39.530229 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dd5eac1f346485e04a58a6704a4682b1b992d72c5da31bf96b25e9aa2bc3e97d\": container with ID starting with dd5eac1f346485e04a58a6704a4682b1b992d72c5da31bf96b25e9aa2bc3e97d not found: ID does not exist" containerID="dd5eac1f346485e04a58a6704a4682b1b992d72c5da31bf96b25e9aa2bc3e97d" Jan 20 17:41:39 crc kubenswrapper[4558]: I0120 17:41:39.530286 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dd5eac1f346485e04a58a6704a4682b1b992d72c5da31bf96b25e9aa2bc3e97d"} err="failed to get container status \"dd5eac1f346485e04a58a6704a4682b1b992d72c5da31bf96b25e9aa2bc3e97d\": rpc error: code = NotFound desc = could not find container \"dd5eac1f346485e04a58a6704a4682b1b992d72c5da31bf96b25e9aa2bc3e97d\": container with ID starting with dd5eac1f346485e04a58a6704a4682b1b992d72c5da31bf96b25e9aa2bc3e97d not found: ID does not exist" Jan 20 17:41:39 crc kubenswrapper[4558]: I0120 17:41:39.530320 4558 scope.go:117] "RemoveContainer" containerID="a4df86dc40ccb68556c1da99ff22e880e7e4c218b8777739f8a44091f82a17ae" Jan 20 17:41:39 crc kubenswrapper[4558]: E0120 17:41:39.530600 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a4df86dc40ccb68556c1da99ff22e880e7e4c218b8777739f8a44091f82a17ae\": container with ID starting with a4df86dc40ccb68556c1da99ff22e880e7e4c218b8777739f8a44091f82a17ae not found: ID does not exist" containerID="a4df86dc40ccb68556c1da99ff22e880e7e4c218b8777739f8a44091f82a17ae" Jan 20 17:41:39 crc kubenswrapper[4558]: I0120 17:41:39.530627 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a4df86dc40ccb68556c1da99ff22e880e7e4c218b8777739f8a44091f82a17ae"} err="failed to get container status \"a4df86dc40ccb68556c1da99ff22e880e7e4c218b8777739f8a44091f82a17ae\": rpc error: code = NotFound desc = could not find container \"a4df86dc40ccb68556c1da99ff22e880e7e4c218b8777739f8a44091f82a17ae\": container with ID starting with a4df86dc40ccb68556c1da99ff22e880e7e4c218b8777739f8a44091f82a17ae not found: ID does not exist" Jan 20 17:41:39 crc kubenswrapper[4558]: I0120 17:41:39.530647 4558 scope.go:117] "RemoveContainer" containerID="effecb70e8c8831ca8d03c15fd3b51392f6b4c26b8ce5779c07930278ade516b" Jan 20 17:41:39 crc kubenswrapper[4558]: E0120 17:41:39.530920 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"effecb70e8c8831ca8d03c15fd3b51392f6b4c26b8ce5779c07930278ade516b\": container with ID starting with effecb70e8c8831ca8d03c15fd3b51392f6b4c26b8ce5779c07930278ade516b not found: ID does not exist" containerID="effecb70e8c8831ca8d03c15fd3b51392f6b4c26b8ce5779c07930278ade516b" Jan 20 17:41:39 crc kubenswrapper[4558]: I0120 17:41:39.530951 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"effecb70e8c8831ca8d03c15fd3b51392f6b4c26b8ce5779c07930278ade516b"} err="failed to get container status \"effecb70e8c8831ca8d03c15fd3b51392f6b4c26b8ce5779c07930278ade516b\": rpc error: code = NotFound desc = could not find container \"effecb70e8c8831ca8d03c15fd3b51392f6b4c26b8ce5779c07930278ade516b\": container with ID starting with effecb70e8c8831ca8d03c15fd3b51392f6b4c26b8ce5779c07930278ade516b not found: ID does not exist" Jan 20 17:41:39 crc kubenswrapper[4558]: I0120 17:41:39.574762 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/db5ab617-437a-468f-bb76-c4c02d96190c-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"db5ab617-437a-468f-bb76-c4c02d96190c\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:41:39 crc kubenswrapper[4558]: I0120 17:41:39.574805 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/db5ab617-437a-468f-bb76-c4c02d96190c-config-data\") pod \"ceilometer-0\" (UID: \"db5ab617-437a-468f-bb76-c4c02d96190c\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:41:39 crc kubenswrapper[4558]: I0120 17:41:39.574832 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5w77t\" (UniqueName: \"kubernetes.io/projected/db5ab617-437a-468f-bb76-c4c02d96190c-kube-api-access-5w77t\") pod \"ceilometer-0\" (UID: \"db5ab617-437a-468f-bb76-c4c02d96190c\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:41:39 crc kubenswrapper[4558]: I0120 17:41:39.574939 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/db5ab617-437a-468f-bb76-c4c02d96190c-log-httpd\") pod \"ceilometer-0\" (UID: \"db5ab617-437a-468f-bb76-c4c02d96190c\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:41:39 crc kubenswrapper[4558]: I0120 17:41:39.574976 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/db5ab617-437a-468f-bb76-c4c02d96190c-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"db5ab617-437a-468f-bb76-c4c02d96190c\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:41:39 crc kubenswrapper[4558]: I0120 17:41:39.575014 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/db5ab617-437a-468f-bb76-c4c02d96190c-run-httpd\") pod \"ceilometer-0\" (UID: \"db5ab617-437a-468f-bb76-c4c02d96190c\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:41:39 crc kubenswrapper[4558]: I0120 17:41:39.575565 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/db5ab617-437a-468f-bb76-c4c02d96190c-scripts\") pod \"ceilometer-0\" (UID: \"db5ab617-437a-468f-bb76-c4c02d96190c\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:41:39 crc kubenswrapper[4558]: I0120 17:41:39.575867 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/db5ab617-437a-468f-bb76-c4c02d96190c-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"db5ab617-437a-468f-bb76-c4c02d96190c\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:41:39 crc kubenswrapper[4558]: I0120 17:41:39.677772 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/db5ab617-437a-468f-bb76-c4c02d96190c-scripts\") pod \"ceilometer-0\" (UID: \"db5ab617-437a-468f-bb76-c4c02d96190c\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:41:39 crc kubenswrapper[4558]: I0120 17:41:39.678058 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/db5ab617-437a-468f-bb76-c4c02d96190c-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"db5ab617-437a-468f-bb76-c4c02d96190c\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:41:39 crc kubenswrapper[4558]: I0120 17:41:39.678215 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/db5ab617-437a-468f-bb76-c4c02d96190c-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"db5ab617-437a-468f-bb76-c4c02d96190c\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:41:39 crc kubenswrapper[4558]: I0120 17:41:39.678319 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/db5ab617-437a-468f-bb76-c4c02d96190c-config-data\") pod \"ceilometer-0\" (UID: \"db5ab617-437a-468f-bb76-c4c02d96190c\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:41:39 crc kubenswrapper[4558]: I0120 17:41:39.678413 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5w77t\" (UniqueName: \"kubernetes.io/projected/db5ab617-437a-468f-bb76-c4c02d96190c-kube-api-access-5w77t\") pod \"ceilometer-0\" (UID: \"db5ab617-437a-468f-bb76-c4c02d96190c\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:41:39 crc kubenswrapper[4558]: I0120 17:41:39.678505 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/db5ab617-437a-468f-bb76-c4c02d96190c-log-httpd\") pod \"ceilometer-0\" (UID: \"db5ab617-437a-468f-bb76-c4c02d96190c\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:41:39 crc kubenswrapper[4558]: I0120 17:41:39.678593 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/db5ab617-437a-468f-bb76-c4c02d96190c-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"db5ab617-437a-468f-bb76-c4c02d96190c\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:41:39 crc kubenswrapper[4558]: I0120 17:41:39.678692 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/db5ab617-437a-468f-bb76-c4c02d96190c-run-httpd\") pod \"ceilometer-0\" (UID: \"db5ab617-437a-468f-bb76-c4c02d96190c\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:41:39 crc kubenswrapper[4558]: I0120 17:41:39.679132 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/db5ab617-437a-468f-bb76-c4c02d96190c-run-httpd\") pod \"ceilometer-0\" (UID: \"db5ab617-437a-468f-bb76-c4c02d96190c\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:41:39 crc kubenswrapper[4558]: I0120 17:41:39.679241 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/db5ab617-437a-468f-bb76-c4c02d96190c-log-httpd\") pod \"ceilometer-0\" (UID: \"db5ab617-437a-468f-bb76-c4c02d96190c\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:41:39 crc kubenswrapper[4558]: I0120 17:41:39.683193 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/db5ab617-437a-468f-bb76-c4c02d96190c-scripts\") pod \"ceilometer-0\" (UID: \"db5ab617-437a-468f-bb76-c4c02d96190c\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:41:39 crc kubenswrapper[4558]: I0120 17:41:39.684067 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/db5ab617-437a-468f-bb76-c4c02d96190c-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"db5ab617-437a-468f-bb76-c4c02d96190c\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:41:39 crc kubenswrapper[4558]: I0120 17:41:39.684201 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/db5ab617-437a-468f-bb76-c4c02d96190c-config-data\") pod \"ceilometer-0\" (UID: \"db5ab617-437a-468f-bb76-c4c02d96190c\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:41:39 crc kubenswrapper[4558]: I0120 17:41:39.692812 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5w77t\" (UniqueName: \"kubernetes.io/projected/db5ab617-437a-468f-bb76-c4c02d96190c-kube-api-access-5w77t\") pod \"ceilometer-0\" (UID: \"db5ab617-437a-468f-bb76-c4c02d96190c\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:41:39 crc kubenswrapper[4558]: I0120 17:41:39.697521 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/db5ab617-437a-468f-bb76-c4c02d96190c-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"db5ab617-437a-468f-bb76-c4c02d96190c\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:41:39 crc kubenswrapper[4558]: I0120 17:41:39.697714 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/db5ab617-437a-468f-bb76-c4c02d96190c-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"db5ab617-437a-468f-bb76-c4c02d96190c\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:41:39 crc kubenswrapper[4558]: I0120 17:41:39.807523 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:41:40 crc kubenswrapper[4558]: I0120 17:41:40.214060 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:41:40 crc kubenswrapper[4558]: W0120 17:41:40.218256 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddb5ab617_437a_468f_bb76_c4c02d96190c.slice/crio-3a0dc90d8869ae097f548e98fb904ace5702d0210a2029359d1ea48175f78eae WatchSource:0}: Error finding container 3a0dc90d8869ae097f548e98fb904ace5702d0210a2029359d1ea48175f78eae: Status 404 returned error can't find the container with id 3a0dc90d8869ae097f548e98fb904ace5702d0210a2029359d1ea48175f78eae Jan 20 17:41:40 crc kubenswrapper[4558]: I0120 17:41:40.418907 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"db5ab617-437a-468f-bb76-c4c02d96190c","Type":"ContainerStarted","Data":"3a0dc90d8869ae097f548e98fb904ace5702d0210a2029359d1ea48175f78eae"} Jan 20 17:41:40 crc kubenswrapper[4558]: I0120 17:41:40.578816 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9056548c-189a-4152-9f2f-bc242e3d4065" path="/var/lib/kubelet/pods/9056548c-189a-4152-9f2f-bc242e3d4065/volumes" Jan 20 17:41:41 crc kubenswrapper[4558]: I0120 17:41:41.442812 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"db5ab617-437a-468f-bb76-c4c02d96190c","Type":"ContainerStarted","Data":"dd38a4fbd4be2117599754c25fd95f1a3aa1c17e4091c3a545f1ef336ba73cd1"} Jan 20 17:41:42 crc kubenswrapper[4558]: I0120 17:41:42.456903 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"db5ab617-437a-468f-bb76-c4c02d96190c","Type":"ContainerStarted","Data":"b6a8a8fe677dca3fb55a3087c114f807e73a3ddcc7414ef0a4614ec1fbdc5f09"} Jan 20 17:41:43 crc kubenswrapper[4558]: I0120 17:41:43.470774 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"db5ab617-437a-468f-bb76-c4c02d96190c","Type":"ContainerStarted","Data":"30b7e39adde9765f41737fc8e5ebdda365d48446701bf0021e92bfc419a85661"} Jan 20 17:41:44 crc kubenswrapper[4558]: I0120 17:41:44.483321 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"db5ab617-437a-468f-bb76-c4c02d96190c","Type":"ContainerStarted","Data":"386f50968d6c537976bd629cd725b50bf2f69e68fdc5fe86153821260b50b17d"} Jan 20 17:41:44 crc kubenswrapper[4558]: I0120 17:41:44.485931 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:41:44 crc kubenswrapper[4558]: I0120 17:41:44.527840 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ceilometer-0" podStartSLOduration=1.937192579 podStartE2EDuration="5.527817352s" podCreationTimestamp="2026-01-20 17:41:39 +0000 UTC" firstStartedPulling="2026-01-20 17:41:40.222224647 +0000 UTC m=+3593.982562615" lastFinishedPulling="2026-01-20 17:41:43.81284942 +0000 UTC m=+3597.573187388" observedRunningTime="2026-01-20 17:41:44.510598567 +0000 UTC m=+3598.270936533" watchObservedRunningTime="2026-01-20 17:41:44.527817352 +0000 UTC m=+3598.288155319" Jan 20 17:41:45 crc kubenswrapper[4558]: I0120 17:41:45.646546 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:41:45 crc kubenswrapper[4558]: I0120 17:41:45.646625 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:41:45 crc kubenswrapper[4558]: I0120 17:41:45.653628 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:41:45 crc kubenswrapper[4558]: I0120 17:41:45.654929 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:41:45 crc kubenswrapper[4558]: I0120 17:41:45.751761 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:41:46 crc kubenswrapper[4558]: I0120 17:41:46.228346 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:41:46 crc kubenswrapper[4558]: I0120 17:41:46.228426 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:41:46 crc kubenswrapper[4558]: I0120 17:41:46.228929 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:41:46 crc kubenswrapper[4558]: I0120 17:41:46.228980 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:41:46 crc kubenswrapper[4558]: I0120 17:41:46.232235 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:41:46 crc kubenswrapper[4558]: I0120 17:41:46.235054 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:41:47 crc kubenswrapper[4558]: I0120 17:41:47.365027 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:41:47 crc kubenswrapper[4558]: I0120 17:41:47.533962 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="db5ab617-437a-468f-bb76-c4c02d96190c" containerName="ceilometer-central-agent" containerID="cri-o://dd38a4fbd4be2117599754c25fd95f1a3aa1c17e4091c3a545f1ef336ba73cd1" gracePeriod=30 Jan 20 17:41:47 crc kubenswrapper[4558]: I0120 17:41:47.534326 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="db5ab617-437a-468f-bb76-c4c02d96190c" containerName="sg-core" containerID="cri-o://30b7e39adde9765f41737fc8e5ebdda365d48446701bf0021e92bfc419a85661" gracePeriod=30 Jan 20 17:41:47 crc kubenswrapper[4558]: I0120 17:41:47.534519 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="db5ab617-437a-468f-bb76-c4c02d96190c" containerName="proxy-httpd" containerID="cri-o://386f50968d6c537976bd629cd725b50bf2f69e68fdc5fe86153821260b50b17d" gracePeriod=30 Jan 20 17:41:47 crc kubenswrapper[4558]: I0120 17:41:47.534655 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="db5ab617-437a-468f-bb76-c4c02d96190c" containerName="ceilometer-notification-agent" containerID="cri-o://b6a8a8fe677dca3fb55a3087c114f807e73a3ddcc7414ef0a4614ec1fbdc5f09" gracePeriod=30 Jan 20 17:41:48 crc kubenswrapper[4558]: I0120 17:41:48.348458 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:41:48 crc kubenswrapper[4558]: I0120 17:41:48.482017 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/db5ab617-437a-468f-bb76-c4c02d96190c-run-httpd\") pod \"db5ab617-437a-468f-bb76-c4c02d96190c\" (UID: \"db5ab617-437a-468f-bb76-c4c02d96190c\") " Jan 20 17:41:48 crc kubenswrapper[4558]: I0120 17:41:48.482132 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/db5ab617-437a-468f-bb76-c4c02d96190c-log-httpd\") pod \"db5ab617-437a-468f-bb76-c4c02d96190c\" (UID: \"db5ab617-437a-468f-bb76-c4c02d96190c\") " Jan 20 17:41:48 crc kubenswrapper[4558]: I0120 17:41:48.482202 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/db5ab617-437a-468f-bb76-c4c02d96190c-scripts\") pod \"db5ab617-437a-468f-bb76-c4c02d96190c\" (UID: \"db5ab617-437a-468f-bb76-c4c02d96190c\") " Jan 20 17:41:48 crc kubenswrapper[4558]: I0120 17:41:48.482268 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/db5ab617-437a-468f-bb76-c4c02d96190c-config-data\") pod \"db5ab617-437a-468f-bb76-c4c02d96190c\" (UID: \"db5ab617-437a-468f-bb76-c4c02d96190c\") " Jan 20 17:41:48 crc kubenswrapper[4558]: I0120 17:41:48.482301 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/db5ab617-437a-468f-bb76-c4c02d96190c-combined-ca-bundle\") pod \"db5ab617-437a-468f-bb76-c4c02d96190c\" (UID: \"db5ab617-437a-468f-bb76-c4c02d96190c\") " Jan 20 17:41:48 crc kubenswrapper[4558]: I0120 17:41:48.482489 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/db5ab617-437a-468f-bb76-c4c02d96190c-sg-core-conf-yaml\") pod \"db5ab617-437a-468f-bb76-c4c02d96190c\" (UID: \"db5ab617-437a-468f-bb76-c4c02d96190c\") " Jan 20 17:41:48 crc kubenswrapper[4558]: I0120 17:41:48.482557 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5w77t\" (UniqueName: \"kubernetes.io/projected/db5ab617-437a-468f-bb76-c4c02d96190c-kube-api-access-5w77t\") pod \"db5ab617-437a-468f-bb76-c4c02d96190c\" (UID: \"db5ab617-437a-468f-bb76-c4c02d96190c\") " Jan 20 17:41:48 crc kubenswrapper[4558]: I0120 17:41:48.482618 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/db5ab617-437a-468f-bb76-c4c02d96190c-ceilometer-tls-certs\") pod \"db5ab617-437a-468f-bb76-c4c02d96190c\" (UID: \"db5ab617-437a-468f-bb76-c4c02d96190c\") " Jan 20 17:41:48 crc kubenswrapper[4558]: I0120 17:41:48.483641 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/db5ab617-437a-468f-bb76-c4c02d96190c-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "db5ab617-437a-468f-bb76-c4c02d96190c" (UID: "db5ab617-437a-468f-bb76-c4c02d96190c"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:41:48 crc kubenswrapper[4558]: I0120 17:41:48.483729 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/db5ab617-437a-468f-bb76-c4c02d96190c-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "db5ab617-437a-468f-bb76-c4c02d96190c" (UID: "db5ab617-437a-468f-bb76-c4c02d96190c"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:41:48 crc kubenswrapper[4558]: I0120 17:41:48.490525 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/db5ab617-437a-468f-bb76-c4c02d96190c-scripts" (OuterVolumeSpecName: "scripts") pod "db5ab617-437a-468f-bb76-c4c02d96190c" (UID: "db5ab617-437a-468f-bb76-c4c02d96190c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:41:48 crc kubenswrapper[4558]: I0120 17:41:48.491328 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/db5ab617-437a-468f-bb76-c4c02d96190c-kube-api-access-5w77t" (OuterVolumeSpecName: "kube-api-access-5w77t") pod "db5ab617-437a-468f-bb76-c4c02d96190c" (UID: "db5ab617-437a-468f-bb76-c4c02d96190c"). InnerVolumeSpecName "kube-api-access-5w77t". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:41:48 crc kubenswrapper[4558]: I0120 17:41:48.512253 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/db5ab617-437a-468f-bb76-c4c02d96190c-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "db5ab617-437a-468f-bb76-c4c02d96190c" (UID: "db5ab617-437a-468f-bb76-c4c02d96190c"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:41:48 crc kubenswrapper[4558]: I0120 17:41:48.537962 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/db5ab617-437a-468f-bb76-c4c02d96190c-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "db5ab617-437a-468f-bb76-c4c02d96190c" (UID: "db5ab617-437a-468f-bb76-c4c02d96190c"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:41:48 crc kubenswrapper[4558]: I0120 17:41:48.548292 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/db5ab617-437a-468f-bb76-c4c02d96190c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "db5ab617-437a-468f-bb76-c4c02d96190c" (UID: "db5ab617-437a-468f-bb76-c4c02d96190c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:41:48 crc kubenswrapper[4558]: I0120 17:41:48.548789 4558 generic.go:334] "Generic (PLEG): container finished" podID="db5ab617-437a-468f-bb76-c4c02d96190c" containerID="386f50968d6c537976bd629cd725b50bf2f69e68fdc5fe86153821260b50b17d" exitCode=0 Jan 20 17:41:48 crc kubenswrapper[4558]: I0120 17:41:48.548816 4558 generic.go:334] "Generic (PLEG): container finished" podID="db5ab617-437a-468f-bb76-c4c02d96190c" containerID="30b7e39adde9765f41737fc8e5ebdda365d48446701bf0021e92bfc419a85661" exitCode=2 Jan 20 17:41:48 crc kubenswrapper[4558]: I0120 17:41:48.548826 4558 generic.go:334] "Generic (PLEG): container finished" podID="db5ab617-437a-468f-bb76-c4c02d96190c" containerID="b6a8a8fe677dca3fb55a3087c114f807e73a3ddcc7414ef0a4614ec1fbdc5f09" exitCode=0 Jan 20 17:41:48 crc kubenswrapper[4558]: I0120 17:41:48.548833 4558 generic.go:334] "Generic (PLEG): container finished" podID="db5ab617-437a-468f-bb76-c4c02d96190c" containerID="dd38a4fbd4be2117599754c25fd95f1a3aa1c17e4091c3a545f1ef336ba73cd1" exitCode=0 Jan 20 17:41:48 crc kubenswrapper[4558]: I0120 17:41:48.548863 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"db5ab617-437a-468f-bb76-c4c02d96190c","Type":"ContainerDied","Data":"386f50968d6c537976bd629cd725b50bf2f69e68fdc5fe86153821260b50b17d"} Jan 20 17:41:48 crc kubenswrapper[4558]: I0120 17:41:48.548912 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"db5ab617-437a-468f-bb76-c4c02d96190c","Type":"ContainerDied","Data":"30b7e39adde9765f41737fc8e5ebdda365d48446701bf0021e92bfc419a85661"} Jan 20 17:41:48 crc kubenswrapper[4558]: I0120 17:41:48.548931 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"db5ab617-437a-468f-bb76-c4c02d96190c","Type":"ContainerDied","Data":"b6a8a8fe677dca3fb55a3087c114f807e73a3ddcc7414ef0a4614ec1fbdc5f09"} Jan 20 17:41:48 crc kubenswrapper[4558]: I0120 17:41:48.548948 4558 scope.go:117] "RemoveContainer" containerID="386f50968d6c537976bd629cd725b50bf2f69e68fdc5fe86153821260b50b17d" Jan 20 17:41:48 crc kubenswrapper[4558]: I0120 17:41:48.548955 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"db5ab617-437a-468f-bb76-c4c02d96190c","Type":"ContainerDied","Data":"dd38a4fbd4be2117599754c25fd95f1a3aa1c17e4091c3a545f1ef336ba73cd1"} Jan 20 17:41:48 crc kubenswrapper[4558]: I0120 17:41:48.549143 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"db5ab617-437a-468f-bb76-c4c02d96190c","Type":"ContainerDied","Data":"3a0dc90d8869ae097f548e98fb904ace5702d0210a2029359d1ea48175f78eae"} Jan 20 17:41:48 crc kubenswrapper[4558]: I0120 17:41:48.548906 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:41:48 crc kubenswrapper[4558]: I0120 17:41:48.566772 4558 scope.go:117] "RemoveContainer" containerID="2d2a8989ca5a5af98b2c9dc8f801ea0675339ec14800f437c058c5a43c20146b" Jan 20 17:41:48 crc kubenswrapper[4558]: E0120 17:41:48.567242 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 17:41:48 crc kubenswrapper[4558]: I0120 17:41:48.569565 4558 scope.go:117] "RemoveContainer" containerID="30b7e39adde9765f41737fc8e5ebdda365d48446701bf0021e92bfc419a85661" Jan 20 17:41:48 crc kubenswrapper[4558]: I0120 17:41:48.575106 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/db5ab617-437a-468f-bb76-c4c02d96190c-config-data" (OuterVolumeSpecName: "config-data") pod "db5ab617-437a-468f-bb76-c4c02d96190c" (UID: "db5ab617-437a-468f-bb76-c4c02d96190c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:41:48 crc kubenswrapper[4558]: I0120 17:41:48.587004 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/db5ab617-437a-468f-bb76-c4c02d96190c-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:41:48 crc kubenswrapper[4558]: I0120 17:41:48.587116 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/db5ab617-437a-468f-bb76-c4c02d96190c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:41:48 crc kubenswrapper[4558]: I0120 17:41:48.587202 4558 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/db5ab617-437a-468f-bb76-c4c02d96190c-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 20 17:41:48 crc kubenswrapper[4558]: I0120 17:41:48.587270 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5w77t\" (UniqueName: \"kubernetes.io/projected/db5ab617-437a-468f-bb76-c4c02d96190c-kube-api-access-5w77t\") on node \"crc\" DevicePath \"\"" Jan 20 17:41:48 crc kubenswrapper[4558]: I0120 17:41:48.587325 4558 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/db5ab617-437a-468f-bb76-c4c02d96190c-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:41:48 crc kubenswrapper[4558]: I0120 17:41:48.587376 4558 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/db5ab617-437a-468f-bb76-c4c02d96190c-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:41:48 crc kubenswrapper[4558]: I0120 17:41:48.587424 4558 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/db5ab617-437a-468f-bb76-c4c02d96190c-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:41:48 crc kubenswrapper[4558]: I0120 17:41:48.587485 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/db5ab617-437a-468f-bb76-c4c02d96190c-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:41:48 crc kubenswrapper[4558]: I0120 17:41:48.615186 4558 scope.go:117] "RemoveContainer" containerID="b6a8a8fe677dca3fb55a3087c114f807e73a3ddcc7414ef0a4614ec1fbdc5f09" Jan 20 17:41:48 crc kubenswrapper[4558]: I0120 17:41:48.674307 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:41:48 crc kubenswrapper[4558]: I0120 17:41:48.674598 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-api-0" podUID="daadab43-a59b-4a2e-8f07-0e0dc38c2ace" containerName="nova-api-log" containerID="cri-o://32d95cf62a8b0b26c56920fb7a65430e15d38f235bd166479a0e88da90bf77fb" gracePeriod=30 Jan 20 17:41:48 crc kubenswrapper[4558]: I0120 17:41:48.674730 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-api-0" podUID="daadab43-a59b-4a2e-8f07-0e0dc38c2ace" containerName="nova-api-api" containerID="cri-o://38738b311a04b17fa7cb39640d4340a666a6e6d37b6c1d98146a92fd4c467786" gracePeriod=30 Jan 20 17:41:48 crc kubenswrapper[4558]: I0120 17:41:48.678363 4558 scope.go:117] "RemoveContainer" containerID="dd38a4fbd4be2117599754c25fd95f1a3aa1c17e4091c3a545f1ef336ba73cd1" Jan 20 17:41:48 crc kubenswrapper[4558]: I0120 17:41:48.737378 4558 scope.go:117] "RemoveContainer" containerID="386f50968d6c537976bd629cd725b50bf2f69e68fdc5fe86153821260b50b17d" Jan 20 17:41:48 crc kubenswrapper[4558]: E0120 17:41:48.738130 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"386f50968d6c537976bd629cd725b50bf2f69e68fdc5fe86153821260b50b17d\": container with ID starting with 386f50968d6c537976bd629cd725b50bf2f69e68fdc5fe86153821260b50b17d not found: ID does not exist" containerID="386f50968d6c537976bd629cd725b50bf2f69e68fdc5fe86153821260b50b17d" Jan 20 17:41:48 crc kubenswrapper[4558]: I0120 17:41:48.738202 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"386f50968d6c537976bd629cd725b50bf2f69e68fdc5fe86153821260b50b17d"} err="failed to get container status \"386f50968d6c537976bd629cd725b50bf2f69e68fdc5fe86153821260b50b17d\": rpc error: code = NotFound desc = could not find container \"386f50968d6c537976bd629cd725b50bf2f69e68fdc5fe86153821260b50b17d\": container with ID starting with 386f50968d6c537976bd629cd725b50bf2f69e68fdc5fe86153821260b50b17d not found: ID does not exist" Jan 20 17:41:48 crc kubenswrapper[4558]: I0120 17:41:48.738234 4558 scope.go:117] "RemoveContainer" containerID="30b7e39adde9765f41737fc8e5ebdda365d48446701bf0021e92bfc419a85661" Jan 20 17:41:48 crc kubenswrapper[4558]: E0120 17:41:48.738641 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"30b7e39adde9765f41737fc8e5ebdda365d48446701bf0021e92bfc419a85661\": container with ID starting with 30b7e39adde9765f41737fc8e5ebdda365d48446701bf0021e92bfc419a85661 not found: ID does not exist" containerID="30b7e39adde9765f41737fc8e5ebdda365d48446701bf0021e92bfc419a85661" Jan 20 17:41:48 crc kubenswrapper[4558]: I0120 17:41:48.738682 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"30b7e39adde9765f41737fc8e5ebdda365d48446701bf0021e92bfc419a85661"} err="failed to get container status \"30b7e39adde9765f41737fc8e5ebdda365d48446701bf0021e92bfc419a85661\": rpc error: code = NotFound desc = could not find container \"30b7e39adde9765f41737fc8e5ebdda365d48446701bf0021e92bfc419a85661\": container with ID starting with 30b7e39adde9765f41737fc8e5ebdda365d48446701bf0021e92bfc419a85661 not found: ID does not exist" Jan 20 17:41:48 crc kubenswrapper[4558]: I0120 17:41:48.738708 4558 scope.go:117] "RemoveContainer" containerID="b6a8a8fe677dca3fb55a3087c114f807e73a3ddcc7414ef0a4614ec1fbdc5f09" Jan 20 17:41:48 crc kubenswrapper[4558]: E0120 17:41:48.739034 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b6a8a8fe677dca3fb55a3087c114f807e73a3ddcc7414ef0a4614ec1fbdc5f09\": container with ID starting with b6a8a8fe677dca3fb55a3087c114f807e73a3ddcc7414ef0a4614ec1fbdc5f09 not found: ID does not exist" containerID="b6a8a8fe677dca3fb55a3087c114f807e73a3ddcc7414ef0a4614ec1fbdc5f09" Jan 20 17:41:48 crc kubenswrapper[4558]: I0120 17:41:48.739072 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b6a8a8fe677dca3fb55a3087c114f807e73a3ddcc7414ef0a4614ec1fbdc5f09"} err="failed to get container status \"b6a8a8fe677dca3fb55a3087c114f807e73a3ddcc7414ef0a4614ec1fbdc5f09\": rpc error: code = NotFound desc = could not find container \"b6a8a8fe677dca3fb55a3087c114f807e73a3ddcc7414ef0a4614ec1fbdc5f09\": container with ID starting with b6a8a8fe677dca3fb55a3087c114f807e73a3ddcc7414ef0a4614ec1fbdc5f09 not found: ID does not exist" Jan 20 17:41:48 crc kubenswrapper[4558]: I0120 17:41:48.739102 4558 scope.go:117] "RemoveContainer" containerID="dd38a4fbd4be2117599754c25fd95f1a3aa1c17e4091c3a545f1ef336ba73cd1" Jan 20 17:41:48 crc kubenswrapper[4558]: E0120 17:41:48.739432 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dd38a4fbd4be2117599754c25fd95f1a3aa1c17e4091c3a545f1ef336ba73cd1\": container with ID starting with dd38a4fbd4be2117599754c25fd95f1a3aa1c17e4091c3a545f1ef336ba73cd1 not found: ID does not exist" containerID="dd38a4fbd4be2117599754c25fd95f1a3aa1c17e4091c3a545f1ef336ba73cd1" Jan 20 17:41:48 crc kubenswrapper[4558]: I0120 17:41:48.739479 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dd38a4fbd4be2117599754c25fd95f1a3aa1c17e4091c3a545f1ef336ba73cd1"} err="failed to get container status \"dd38a4fbd4be2117599754c25fd95f1a3aa1c17e4091c3a545f1ef336ba73cd1\": rpc error: code = NotFound desc = could not find container \"dd38a4fbd4be2117599754c25fd95f1a3aa1c17e4091c3a545f1ef336ba73cd1\": container with ID starting with dd38a4fbd4be2117599754c25fd95f1a3aa1c17e4091c3a545f1ef336ba73cd1 not found: ID does not exist" Jan 20 17:41:48 crc kubenswrapper[4558]: I0120 17:41:48.739499 4558 scope.go:117] "RemoveContainer" containerID="386f50968d6c537976bd629cd725b50bf2f69e68fdc5fe86153821260b50b17d" Jan 20 17:41:48 crc kubenswrapper[4558]: I0120 17:41:48.739877 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"386f50968d6c537976bd629cd725b50bf2f69e68fdc5fe86153821260b50b17d"} err="failed to get container status \"386f50968d6c537976bd629cd725b50bf2f69e68fdc5fe86153821260b50b17d\": rpc error: code = NotFound desc = could not find container \"386f50968d6c537976bd629cd725b50bf2f69e68fdc5fe86153821260b50b17d\": container with ID starting with 386f50968d6c537976bd629cd725b50bf2f69e68fdc5fe86153821260b50b17d not found: ID does not exist" Jan 20 17:41:48 crc kubenswrapper[4558]: I0120 17:41:48.739914 4558 scope.go:117] "RemoveContainer" containerID="30b7e39adde9765f41737fc8e5ebdda365d48446701bf0021e92bfc419a85661" Jan 20 17:41:48 crc kubenswrapper[4558]: I0120 17:41:48.740212 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"30b7e39adde9765f41737fc8e5ebdda365d48446701bf0021e92bfc419a85661"} err="failed to get container status \"30b7e39adde9765f41737fc8e5ebdda365d48446701bf0021e92bfc419a85661\": rpc error: code = NotFound desc = could not find container \"30b7e39adde9765f41737fc8e5ebdda365d48446701bf0021e92bfc419a85661\": container with ID starting with 30b7e39adde9765f41737fc8e5ebdda365d48446701bf0021e92bfc419a85661 not found: ID does not exist" Jan 20 17:41:48 crc kubenswrapper[4558]: I0120 17:41:48.740242 4558 scope.go:117] "RemoveContainer" containerID="b6a8a8fe677dca3fb55a3087c114f807e73a3ddcc7414ef0a4614ec1fbdc5f09" Jan 20 17:41:48 crc kubenswrapper[4558]: I0120 17:41:48.740510 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b6a8a8fe677dca3fb55a3087c114f807e73a3ddcc7414ef0a4614ec1fbdc5f09"} err="failed to get container status \"b6a8a8fe677dca3fb55a3087c114f807e73a3ddcc7414ef0a4614ec1fbdc5f09\": rpc error: code = NotFound desc = could not find container \"b6a8a8fe677dca3fb55a3087c114f807e73a3ddcc7414ef0a4614ec1fbdc5f09\": container with ID starting with b6a8a8fe677dca3fb55a3087c114f807e73a3ddcc7414ef0a4614ec1fbdc5f09 not found: ID does not exist" Jan 20 17:41:48 crc kubenswrapper[4558]: I0120 17:41:48.740529 4558 scope.go:117] "RemoveContainer" containerID="dd38a4fbd4be2117599754c25fd95f1a3aa1c17e4091c3a545f1ef336ba73cd1" Jan 20 17:41:48 crc kubenswrapper[4558]: I0120 17:41:48.740759 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dd38a4fbd4be2117599754c25fd95f1a3aa1c17e4091c3a545f1ef336ba73cd1"} err="failed to get container status \"dd38a4fbd4be2117599754c25fd95f1a3aa1c17e4091c3a545f1ef336ba73cd1\": rpc error: code = NotFound desc = could not find container \"dd38a4fbd4be2117599754c25fd95f1a3aa1c17e4091c3a545f1ef336ba73cd1\": container with ID starting with dd38a4fbd4be2117599754c25fd95f1a3aa1c17e4091c3a545f1ef336ba73cd1 not found: ID does not exist" Jan 20 17:41:48 crc kubenswrapper[4558]: I0120 17:41:48.740779 4558 scope.go:117] "RemoveContainer" containerID="386f50968d6c537976bd629cd725b50bf2f69e68fdc5fe86153821260b50b17d" Jan 20 17:41:48 crc kubenswrapper[4558]: I0120 17:41:48.741042 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"386f50968d6c537976bd629cd725b50bf2f69e68fdc5fe86153821260b50b17d"} err="failed to get container status \"386f50968d6c537976bd629cd725b50bf2f69e68fdc5fe86153821260b50b17d\": rpc error: code = NotFound desc = could not find container \"386f50968d6c537976bd629cd725b50bf2f69e68fdc5fe86153821260b50b17d\": container with ID starting with 386f50968d6c537976bd629cd725b50bf2f69e68fdc5fe86153821260b50b17d not found: ID does not exist" Jan 20 17:41:48 crc kubenswrapper[4558]: I0120 17:41:48.741061 4558 scope.go:117] "RemoveContainer" containerID="30b7e39adde9765f41737fc8e5ebdda365d48446701bf0021e92bfc419a85661" Jan 20 17:41:48 crc kubenswrapper[4558]: I0120 17:41:48.741363 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"30b7e39adde9765f41737fc8e5ebdda365d48446701bf0021e92bfc419a85661"} err="failed to get container status \"30b7e39adde9765f41737fc8e5ebdda365d48446701bf0021e92bfc419a85661\": rpc error: code = NotFound desc = could not find container \"30b7e39adde9765f41737fc8e5ebdda365d48446701bf0021e92bfc419a85661\": container with ID starting with 30b7e39adde9765f41737fc8e5ebdda365d48446701bf0021e92bfc419a85661 not found: ID does not exist" Jan 20 17:41:48 crc kubenswrapper[4558]: I0120 17:41:48.741399 4558 scope.go:117] "RemoveContainer" containerID="b6a8a8fe677dca3fb55a3087c114f807e73a3ddcc7414ef0a4614ec1fbdc5f09" Jan 20 17:41:48 crc kubenswrapper[4558]: I0120 17:41:48.741716 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b6a8a8fe677dca3fb55a3087c114f807e73a3ddcc7414ef0a4614ec1fbdc5f09"} err="failed to get container status \"b6a8a8fe677dca3fb55a3087c114f807e73a3ddcc7414ef0a4614ec1fbdc5f09\": rpc error: code = NotFound desc = could not find container \"b6a8a8fe677dca3fb55a3087c114f807e73a3ddcc7414ef0a4614ec1fbdc5f09\": container with ID starting with b6a8a8fe677dca3fb55a3087c114f807e73a3ddcc7414ef0a4614ec1fbdc5f09 not found: ID does not exist" Jan 20 17:41:48 crc kubenswrapper[4558]: I0120 17:41:48.741735 4558 scope.go:117] "RemoveContainer" containerID="dd38a4fbd4be2117599754c25fd95f1a3aa1c17e4091c3a545f1ef336ba73cd1" Jan 20 17:41:48 crc kubenswrapper[4558]: I0120 17:41:48.741992 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dd38a4fbd4be2117599754c25fd95f1a3aa1c17e4091c3a545f1ef336ba73cd1"} err="failed to get container status \"dd38a4fbd4be2117599754c25fd95f1a3aa1c17e4091c3a545f1ef336ba73cd1\": rpc error: code = NotFound desc = could not find container \"dd38a4fbd4be2117599754c25fd95f1a3aa1c17e4091c3a545f1ef336ba73cd1\": container with ID starting with dd38a4fbd4be2117599754c25fd95f1a3aa1c17e4091c3a545f1ef336ba73cd1 not found: ID does not exist" Jan 20 17:41:48 crc kubenswrapper[4558]: I0120 17:41:48.742020 4558 scope.go:117] "RemoveContainer" containerID="386f50968d6c537976bd629cd725b50bf2f69e68fdc5fe86153821260b50b17d" Jan 20 17:41:48 crc kubenswrapper[4558]: I0120 17:41:48.742297 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"386f50968d6c537976bd629cd725b50bf2f69e68fdc5fe86153821260b50b17d"} err="failed to get container status \"386f50968d6c537976bd629cd725b50bf2f69e68fdc5fe86153821260b50b17d\": rpc error: code = NotFound desc = could not find container \"386f50968d6c537976bd629cd725b50bf2f69e68fdc5fe86153821260b50b17d\": container with ID starting with 386f50968d6c537976bd629cd725b50bf2f69e68fdc5fe86153821260b50b17d not found: ID does not exist" Jan 20 17:41:48 crc kubenswrapper[4558]: I0120 17:41:48.742326 4558 scope.go:117] "RemoveContainer" containerID="30b7e39adde9765f41737fc8e5ebdda365d48446701bf0021e92bfc419a85661" Jan 20 17:41:48 crc kubenswrapper[4558]: I0120 17:41:48.742594 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"30b7e39adde9765f41737fc8e5ebdda365d48446701bf0021e92bfc419a85661"} err="failed to get container status \"30b7e39adde9765f41737fc8e5ebdda365d48446701bf0021e92bfc419a85661\": rpc error: code = NotFound desc = could not find container \"30b7e39adde9765f41737fc8e5ebdda365d48446701bf0021e92bfc419a85661\": container with ID starting with 30b7e39adde9765f41737fc8e5ebdda365d48446701bf0021e92bfc419a85661 not found: ID does not exist" Jan 20 17:41:48 crc kubenswrapper[4558]: I0120 17:41:48.742618 4558 scope.go:117] "RemoveContainer" containerID="b6a8a8fe677dca3fb55a3087c114f807e73a3ddcc7414ef0a4614ec1fbdc5f09" Jan 20 17:41:48 crc kubenswrapper[4558]: I0120 17:41:48.743013 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b6a8a8fe677dca3fb55a3087c114f807e73a3ddcc7414ef0a4614ec1fbdc5f09"} err="failed to get container status \"b6a8a8fe677dca3fb55a3087c114f807e73a3ddcc7414ef0a4614ec1fbdc5f09\": rpc error: code = NotFound desc = could not find container \"b6a8a8fe677dca3fb55a3087c114f807e73a3ddcc7414ef0a4614ec1fbdc5f09\": container with ID starting with b6a8a8fe677dca3fb55a3087c114f807e73a3ddcc7414ef0a4614ec1fbdc5f09 not found: ID does not exist" Jan 20 17:41:48 crc kubenswrapper[4558]: I0120 17:41:48.743034 4558 scope.go:117] "RemoveContainer" containerID="dd38a4fbd4be2117599754c25fd95f1a3aa1c17e4091c3a545f1ef336ba73cd1" Jan 20 17:41:48 crc kubenswrapper[4558]: I0120 17:41:48.743318 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dd38a4fbd4be2117599754c25fd95f1a3aa1c17e4091c3a545f1ef336ba73cd1"} err="failed to get container status \"dd38a4fbd4be2117599754c25fd95f1a3aa1c17e4091c3a545f1ef336ba73cd1\": rpc error: code = NotFound desc = could not find container \"dd38a4fbd4be2117599754c25fd95f1a3aa1c17e4091c3a545f1ef336ba73cd1\": container with ID starting with dd38a4fbd4be2117599754c25fd95f1a3aa1c17e4091c3a545f1ef336ba73cd1 not found: ID does not exist" Jan 20 17:41:48 crc kubenswrapper[4558]: I0120 17:41:48.868666 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:41:48 crc kubenswrapper[4558]: I0120 17:41:48.877319 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:41:48 crc kubenswrapper[4558]: I0120 17:41:48.890464 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:41:48 crc kubenswrapper[4558]: E0120 17:41:48.890817 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="db5ab617-437a-468f-bb76-c4c02d96190c" containerName="proxy-httpd" Jan 20 17:41:48 crc kubenswrapper[4558]: I0120 17:41:48.890835 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="db5ab617-437a-468f-bb76-c4c02d96190c" containerName="proxy-httpd" Jan 20 17:41:48 crc kubenswrapper[4558]: E0120 17:41:48.890860 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="db5ab617-437a-468f-bb76-c4c02d96190c" containerName="ceilometer-central-agent" Jan 20 17:41:48 crc kubenswrapper[4558]: I0120 17:41:48.890867 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="db5ab617-437a-468f-bb76-c4c02d96190c" containerName="ceilometer-central-agent" Jan 20 17:41:48 crc kubenswrapper[4558]: E0120 17:41:48.890880 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="db5ab617-437a-468f-bb76-c4c02d96190c" containerName="ceilometer-notification-agent" Jan 20 17:41:48 crc kubenswrapper[4558]: I0120 17:41:48.890888 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="db5ab617-437a-468f-bb76-c4c02d96190c" containerName="ceilometer-notification-agent" Jan 20 17:41:48 crc kubenswrapper[4558]: E0120 17:41:48.890906 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="db5ab617-437a-468f-bb76-c4c02d96190c" containerName="sg-core" Jan 20 17:41:48 crc kubenswrapper[4558]: I0120 17:41:48.890912 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="db5ab617-437a-468f-bb76-c4c02d96190c" containerName="sg-core" Jan 20 17:41:48 crc kubenswrapper[4558]: I0120 17:41:48.891075 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="db5ab617-437a-468f-bb76-c4c02d96190c" containerName="ceilometer-central-agent" Jan 20 17:41:48 crc kubenswrapper[4558]: I0120 17:41:48.891089 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="db5ab617-437a-468f-bb76-c4c02d96190c" containerName="ceilometer-notification-agent" Jan 20 17:41:48 crc kubenswrapper[4558]: I0120 17:41:48.891111 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="db5ab617-437a-468f-bb76-c4c02d96190c" containerName="sg-core" Jan 20 17:41:48 crc kubenswrapper[4558]: I0120 17:41:48.891125 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="db5ab617-437a-468f-bb76-c4c02d96190c" containerName="proxy-httpd" Jan 20 17:41:48 crc kubenswrapper[4558]: I0120 17:41:48.892686 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:41:48 crc kubenswrapper[4558]: I0120 17:41:48.895273 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-scripts" Jan 20 17:41:48 crc kubenswrapper[4558]: I0120 17:41:48.896224 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-ceilometer-internal-svc" Jan 20 17:41:48 crc kubenswrapper[4558]: I0120 17:41:48.896268 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-config-data" Jan 20 17:41:48 crc kubenswrapper[4558]: I0120 17:41:48.901414 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:41:49 crc kubenswrapper[4558]: I0120 17:41:49.003525 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c9014af3-2918-4f4c-b2e6-0cbda06f341b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"c9014af3-2918-4f4c-b2e6-0cbda06f341b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:41:49 crc kubenswrapper[4558]: I0120 17:41:49.003663 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/c9014af3-2918-4f4c-b2e6-0cbda06f341b-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"c9014af3-2918-4f4c-b2e6-0cbda06f341b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:41:49 crc kubenswrapper[4558]: I0120 17:41:49.003692 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9014af3-2918-4f4c-b2e6-0cbda06f341b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"c9014af3-2918-4f4c-b2e6-0cbda06f341b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:41:49 crc kubenswrapper[4558]: I0120 17:41:49.003726 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c9014af3-2918-4f4c-b2e6-0cbda06f341b-config-data\") pod \"ceilometer-0\" (UID: \"c9014af3-2918-4f4c-b2e6-0cbda06f341b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:41:49 crc kubenswrapper[4558]: I0120 17:41:49.003752 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zkrkh\" (UniqueName: \"kubernetes.io/projected/c9014af3-2918-4f4c-b2e6-0cbda06f341b-kube-api-access-zkrkh\") pod \"ceilometer-0\" (UID: \"c9014af3-2918-4f4c-b2e6-0cbda06f341b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:41:49 crc kubenswrapper[4558]: I0120 17:41:49.003900 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c9014af3-2918-4f4c-b2e6-0cbda06f341b-scripts\") pod \"ceilometer-0\" (UID: \"c9014af3-2918-4f4c-b2e6-0cbda06f341b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:41:49 crc kubenswrapper[4558]: I0120 17:41:49.003928 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c9014af3-2918-4f4c-b2e6-0cbda06f341b-log-httpd\") pod \"ceilometer-0\" (UID: \"c9014af3-2918-4f4c-b2e6-0cbda06f341b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:41:49 crc kubenswrapper[4558]: I0120 17:41:49.003963 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c9014af3-2918-4f4c-b2e6-0cbda06f341b-run-httpd\") pod \"ceilometer-0\" (UID: \"c9014af3-2918-4f4c-b2e6-0cbda06f341b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:41:49 crc kubenswrapper[4558]: I0120 17:41:49.107153 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c9014af3-2918-4f4c-b2e6-0cbda06f341b-run-httpd\") pod \"ceilometer-0\" (UID: \"c9014af3-2918-4f4c-b2e6-0cbda06f341b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:41:49 crc kubenswrapper[4558]: I0120 17:41:49.107270 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c9014af3-2918-4f4c-b2e6-0cbda06f341b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"c9014af3-2918-4f4c-b2e6-0cbda06f341b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:41:49 crc kubenswrapper[4558]: I0120 17:41:49.107344 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/c9014af3-2918-4f4c-b2e6-0cbda06f341b-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"c9014af3-2918-4f4c-b2e6-0cbda06f341b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:41:49 crc kubenswrapper[4558]: I0120 17:41:49.107373 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9014af3-2918-4f4c-b2e6-0cbda06f341b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"c9014af3-2918-4f4c-b2e6-0cbda06f341b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:41:49 crc kubenswrapper[4558]: I0120 17:41:49.107419 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c9014af3-2918-4f4c-b2e6-0cbda06f341b-config-data\") pod \"ceilometer-0\" (UID: \"c9014af3-2918-4f4c-b2e6-0cbda06f341b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:41:49 crc kubenswrapper[4558]: I0120 17:41:49.107456 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zkrkh\" (UniqueName: \"kubernetes.io/projected/c9014af3-2918-4f4c-b2e6-0cbda06f341b-kube-api-access-zkrkh\") pod \"ceilometer-0\" (UID: \"c9014af3-2918-4f4c-b2e6-0cbda06f341b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:41:49 crc kubenswrapper[4558]: I0120 17:41:49.107572 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c9014af3-2918-4f4c-b2e6-0cbda06f341b-scripts\") pod \"ceilometer-0\" (UID: \"c9014af3-2918-4f4c-b2e6-0cbda06f341b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:41:49 crc kubenswrapper[4558]: I0120 17:41:49.107602 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c9014af3-2918-4f4c-b2e6-0cbda06f341b-log-httpd\") pod \"ceilometer-0\" (UID: \"c9014af3-2918-4f4c-b2e6-0cbda06f341b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:41:49 crc kubenswrapper[4558]: I0120 17:41:49.107648 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c9014af3-2918-4f4c-b2e6-0cbda06f341b-run-httpd\") pod \"ceilometer-0\" (UID: \"c9014af3-2918-4f4c-b2e6-0cbda06f341b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:41:49 crc kubenswrapper[4558]: I0120 17:41:49.108118 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c9014af3-2918-4f4c-b2e6-0cbda06f341b-log-httpd\") pod \"ceilometer-0\" (UID: \"c9014af3-2918-4f4c-b2e6-0cbda06f341b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:41:49 crc kubenswrapper[4558]: I0120 17:41:49.114763 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c9014af3-2918-4f4c-b2e6-0cbda06f341b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"c9014af3-2918-4f4c-b2e6-0cbda06f341b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:41:49 crc kubenswrapper[4558]: I0120 17:41:49.115369 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c9014af3-2918-4f4c-b2e6-0cbda06f341b-config-data\") pod \"ceilometer-0\" (UID: \"c9014af3-2918-4f4c-b2e6-0cbda06f341b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:41:49 crc kubenswrapper[4558]: I0120 17:41:49.115688 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/c9014af3-2918-4f4c-b2e6-0cbda06f341b-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"c9014af3-2918-4f4c-b2e6-0cbda06f341b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:41:49 crc kubenswrapper[4558]: I0120 17:41:49.116037 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9014af3-2918-4f4c-b2e6-0cbda06f341b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"c9014af3-2918-4f4c-b2e6-0cbda06f341b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:41:49 crc kubenswrapper[4558]: I0120 17:41:49.123916 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c9014af3-2918-4f4c-b2e6-0cbda06f341b-scripts\") pod \"ceilometer-0\" (UID: \"c9014af3-2918-4f4c-b2e6-0cbda06f341b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:41:49 crc kubenswrapper[4558]: I0120 17:41:49.124569 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zkrkh\" (UniqueName: \"kubernetes.io/projected/c9014af3-2918-4f4c-b2e6-0cbda06f341b-kube-api-access-zkrkh\") pod \"ceilometer-0\" (UID: \"c9014af3-2918-4f4c-b2e6-0cbda06f341b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:41:49 crc kubenswrapper[4558]: I0120 17:41:49.209969 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:41:49 crc kubenswrapper[4558]: I0120 17:41:49.567730 4558 generic.go:334] "Generic (PLEG): container finished" podID="daadab43-a59b-4a2e-8f07-0e0dc38c2ace" containerID="32d95cf62a8b0b26c56920fb7a65430e15d38f235bd166479a0e88da90bf77fb" exitCode=143 Jan 20 17:41:49 crc kubenswrapper[4558]: I0120 17:41:49.568407 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"daadab43-a59b-4a2e-8f07-0e0dc38c2ace","Type":"ContainerDied","Data":"32d95cf62a8b0b26c56920fb7a65430e15d38f235bd166479a0e88da90bf77fb"} Jan 20 17:41:49 crc kubenswrapper[4558]: W0120 17:41:49.688309 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc9014af3_2918_4f4c_b2e6_0cbda06f341b.slice/crio-79bdcf4de01927299f773decaa8d00d840d3b37f8513aaf847207c171de8ca8e WatchSource:0}: Error finding container 79bdcf4de01927299f773decaa8d00d840d3b37f8513aaf847207c171de8ca8e: Status 404 returned error can't find the container with id 79bdcf4de01927299f773decaa8d00d840d3b37f8513aaf847207c171de8ca8e Jan 20 17:41:49 crc kubenswrapper[4558]: I0120 17:41:49.689734 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:41:50 crc kubenswrapper[4558]: I0120 17:41:50.577341 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="db5ab617-437a-468f-bb76-c4c02d96190c" path="/var/lib/kubelet/pods/db5ab617-437a-468f-bb76-c4c02d96190c/volumes" Jan 20 17:41:50 crc kubenswrapper[4558]: I0120 17:41:50.582154 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"c9014af3-2918-4f4c-b2e6-0cbda06f341b","Type":"ContainerStarted","Data":"f97b4e29f1e26182ff23d63a77976bee966ee5de4fbd05d044c364232dd96a98"} Jan 20 17:41:50 crc kubenswrapper[4558]: I0120 17:41:50.582222 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"c9014af3-2918-4f4c-b2e6-0cbda06f341b","Type":"ContainerStarted","Data":"79bdcf4de01927299f773decaa8d00d840d3b37f8513aaf847207c171de8ca8e"} Jan 20 17:41:50 crc kubenswrapper[4558]: I0120 17:41:50.756329 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:41:51 crc kubenswrapper[4558]: I0120 17:41:51.596497 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"c9014af3-2918-4f4c-b2e6-0cbda06f341b","Type":"ContainerStarted","Data":"b43209642693ee4cc8523f7585ac5ad962688ea7597d1070911a31cf4a28c244"} Jan 20 17:41:52 crc kubenswrapper[4558]: I0120 17:41:52.200395 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:41:52 crc kubenswrapper[4558]: I0120 17:41:52.383902 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/daadab43-a59b-4a2e-8f07-0e0dc38c2ace-logs\") pod \"daadab43-a59b-4a2e-8f07-0e0dc38c2ace\" (UID: \"daadab43-a59b-4a2e-8f07-0e0dc38c2ace\") " Jan 20 17:41:52 crc kubenswrapper[4558]: I0120 17:41:52.384280 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/daadab43-a59b-4a2e-8f07-0e0dc38c2ace-config-data\") pod \"daadab43-a59b-4a2e-8f07-0e0dc38c2ace\" (UID: \"daadab43-a59b-4a2e-8f07-0e0dc38c2ace\") " Jan 20 17:41:52 crc kubenswrapper[4558]: I0120 17:41:52.384438 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/daadab43-a59b-4a2e-8f07-0e0dc38c2ace-logs" (OuterVolumeSpecName: "logs") pod "daadab43-a59b-4a2e-8f07-0e0dc38c2ace" (UID: "daadab43-a59b-4a2e-8f07-0e0dc38c2ace"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:41:52 crc kubenswrapper[4558]: I0120 17:41:52.384573 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gn29p\" (UniqueName: \"kubernetes.io/projected/daadab43-a59b-4a2e-8f07-0e0dc38c2ace-kube-api-access-gn29p\") pod \"daadab43-a59b-4a2e-8f07-0e0dc38c2ace\" (UID: \"daadab43-a59b-4a2e-8f07-0e0dc38c2ace\") " Jan 20 17:41:52 crc kubenswrapper[4558]: I0120 17:41:52.384604 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/daadab43-a59b-4a2e-8f07-0e0dc38c2ace-combined-ca-bundle\") pod \"daadab43-a59b-4a2e-8f07-0e0dc38c2ace\" (UID: \"daadab43-a59b-4a2e-8f07-0e0dc38c2ace\") " Jan 20 17:41:52 crc kubenswrapper[4558]: I0120 17:41:52.385416 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/daadab43-a59b-4a2e-8f07-0e0dc38c2ace-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:41:52 crc kubenswrapper[4558]: I0120 17:41:52.389374 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/daadab43-a59b-4a2e-8f07-0e0dc38c2ace-kube-api-access-gn29p" (OuterVolumeSpecName: "kube-api-access-gn29p") pod "daadab43-a59b-4a2e-8f07-0e0dc38c2ace" (UID: "daadab43-a59b-4a2e-8f07-0e0dc38c2ace"). InnerVolumeSpecName "kube-api-access-gn29p". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:41:52 crc kubenswrapper[4558]: I0120 17:41:52.417852 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/daadab43-a59b-4a2e-8f07-0e0dc38c2ace-config-data" (OuterVolumeSpecName: "config-data") pod "daadab43-a59b-4a2e-8f07-0e0dc38c2ace" (UID: "daadab43-a59b-4a2e-8f07-0e0dc38c2ace"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:41:52 crc kubenswrapper[4558]: I0120 17:41:52.417906 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/daadab43-a59b-4a2e-8f07-0e0dc38c2ace-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "daadab43-a59b-4a2e-8f07-0e0dc38c2ace" (UID: "daadab43-a59b-4a2e-8f07-0e0dc38c2ace"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:41:52 crc kubenswrapper[4558]: I0120 17:41:52.489848 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/daadab43-a59b-4a2e-8f07-0e0dc38c2ace-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:41:52 crc kubenswrapper[4558]: I0120 17:41:52.490023 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gn29p\" (UniqueName: \"kubernetes.io/projected/daadab43-a59b-4a2e-8f07-0e0dc38c2ace-kube-api-access-gn29p\") on node \"crc\" DevicePath \"\"" Jan 20 17:41:52 crc kubenswrapper[4558]: I0120 17:41:52.490091 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/daadab43-a59b-4a2e-8f07-0e0dc38c2ace-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:41:52 crc kubenswrapper[4558]: I0120 17:41:52.612263 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"c9014af3-2918-4f4c-b2e6-0cbda06f341b","Type":"ContainerStarted","Data":"a0e2963e31a43f16b2fb75fbf0d0c78351e54b488c59c5286b4daae9b7480769"} Jan 20 17:41:52 crc kubenswrapper[4558]: I0120 17:41:52.614771 4558 generic.go:334] "Generic (PLEG): container finished" podID="daadab43-a59b-4a2e-8f07-0e0dc38c2ace" containerID="38738b311a04b17fa7cb39640d4340a666a6e6d37b6c1d98146a92fd4c467786" exitCode=0 Jan 20 17:41:52 crc kubenswrapper[4558]: I0120 17:41:52.614835 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"daadab43-a59b-4a2e-8f07-0e0dc38c2ace","Type":"ContainerDied","Data":"38738b311a04b17fa7cb39640d4340a666a6e6d37b6c1d98146a92fd4c467786"} Jan 20 17:41:52 crc kubenswrapper[4558]: I0120 17:41:52.614854 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:41:52 crc kubenswrapper[4558]: I0120 17:41:52.614888 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"daadab43-a59b-4a2e-8f07-0e0dc38c2ace","Type":"ContainerDied","Data":"ad54fb20edc645dfec3bbeababec236b9df6fe2bf4fa6e6e0d6e93c34426db5a"} Jan 20 17:41:52 crc kubenswrapper[4558]: I0120 17:41:52.614923 4558 scope.go:117] "RemoveContainer" containerID="38738b311a04b17fa7cb39640d4340a666a6e6d37b6c1d98146a92fd4c467786" Jan 20 17:41:52 crc kubenswrapper[4558]: I0120 17:41:52.646026 4558 scope.go:117] "RemoveContainer" containerID="32d95cf62a8b0b26c56920fb7a65430e15d38f235bd166479a0e88da90bf77fb" Jan 20 17:41:52 crc kubenswrapper[4558]: I0120 17:41:52.661701 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:41:52 crc kubenswrapper[4558]: I0120 17:41:52.672714 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:41:52 crc kubenswrapper[4558]: I0120 17:41:52.677255 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:41:52 crc kubenswrapper[4558]: E0120 17:41:52.677801 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="daadab43-a59b-4a2e-8f07-0e0dc38c2ace" containerName="nova-api-api" Jan 20 17:41:52 crc kubenswrapper[4558]: I0120 17:41:52.677819 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="daadab43-a59b-4a2e-8f07-0e0dc38c2ace" containerName="nova-api-api" Jan 20 17:41:52 crc kubenswrapper[4558]: E0120 17:41:52.677833 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="daadab43-a59b-4a2e-8f07-0e0dc38c2ace" containerName="nova-api-log" Jan 20 17:41:52 crc kubenswrapper[4558]: I0120 17:41:52.677840 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="daadab43-a59b-4a2e-8f07-0e0dc38c2ace" containerName="nova-api-log" Jan 20 17:41:52 crc kubenswrapper[4558]: I0120 17:41:52.678032 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="daadab43-a59b-4a2e-8f07-0e0dc38c2ace" containerName="nova-api-api" Jan 20 17:41:52 crc kubenswrapper[4558]: I0120 17:41:52.678056 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="daadab43-a59b-4a2e-8f07-0e0dc38c2ace" containerName="nova-api-log" Jan 20 17:41:52 crc kubenswrapper[4558]: I0120 17:41:52.679103 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:41:52 crc kubenswrapper[4558]: I0120 17:41:52.680944 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-internal-svc" Jan 20 17:41:52 crc kubenswrapper[4558]: I0120 17:41:52.681196 4558 scope.go:117] "RemoveContainer" containerID="38738b311a04b17fa7cb39640d4340a666a6e6d37b6c1d98146a92fd4c467786" Jan 20 17:41:52 crc kubenswrapper[4558]: I0120 17:41:52.681310 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-public-svc" Jan 20 17:41:52 crc kubenswrapper[4558]: I0120 17:41:52.681494 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-api-config-data" Jan 20 17:41:52 crc kubenswrapper[4558]: E0120 17:41:52.683698 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"38738b311a04b17fa7cb39640d4340a666a6e6d37b6c1d98146a92fd4c467786\": container with ID starting with 38738b311a04b17fa7cb39640d4340a666a6e6d37b6c1d98146a92fd4c467786 not found: ID does not exist" containerID="38738b311a04b17fa7cb39640d4340a666a6e6d37b6c1d98146a92fd4c467786" Jan 20 17:41:52 crc kubenswrapper[4558]: I0120 17:41:52.683844 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"38738b311a04b17fa7cb39640d4340a666a6e6d37b6c1d98146a92fd4c467786"} err="failed to get container status \"38738b311a04b17fa7cb39640d4340a666a6e6d37b6c1d98146a92fd4c467786\": rpc error: code = NotFound desc = could not find container \"38738b311a04b17fa7cb39640d4340a666a6e6d37b6c1d98146a92fd4c467786\": container with ID starting with 38738b311a04b17fa7cb39640d4340a666a6e6d37b6c1d98146a92fd4c467786 not found: ID does not exist" Jan 20 17:41:52 crc kubenswrapper[4558]: I0120 17:41:52.683879 4558 scope.go:117] "RemoveContainer" containerID="32d95cf62a8b0b26c56920fb7a65430e15d38f235bd166479a0e88da90bf77fb" Jan 20 17:41:52 crc kubenswrapper[4558]: E0120 17:41:52.685333 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"32d95cf62a8b0b26c56920fb7a65430e15d38f235bd166479a0e88da90bf77fb\": container with ID starting with 32d95cf62a8b0b26c56920fb7a65430e15d38f235bd166479a0e88da90bf77fb not found: ID does not exist" containerID="32d95cf62a8b0b26c56920fb7a65430e15d38f235bd166479a0e88da90bf77fb" Jan 20 17:41:52 crc kubenswrapper[4558]: I0120 17:41:52.685392 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"32d95cf62a8b0b26c56920fb7a65430e15d38f235bd166479a0e88da90bf77fb"} err="failed to get container status \"32d95cf62a8b0b26c56920fb7a65430e15d38f235bd166479a0e88da90bf77fb\": rpc error: code = NotFound desc = could not find container \"32d95cf62a8b0b26c56920fb7a65430e15d38f235bd166479a0e88da90bf77fb\": container with ID starting with 32d95cf62a8b0b26c56920fb7a65430e15d38f235bd166479a0e88da90bf77fb not found: ID does not exist" Jan 20 17:41:52 crc kubenswrapper[4558]: I0120 17:41:52.685570 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:41:52 crc kubenswrapper[4558]: I0120 17:41:52.802095 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d0f6996d-f07f-4cf5-8227-6bfed15e1370-config-data\") pod \"nova-api-0\" (UID: \"d0f6996d-f07f-4cf5-8227-6bfed15e1370\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:41:52 crc kubenswrapper[4558]: I0120 17:41:52.802147 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d0f6996d-f07f-4cf5-8227-6bfed15e1370-public-tls-certs\") pod \"nova-api-0\" (UID: \"d0f6996d-f07f-4cf5-8227-6bfed15e1370\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:41:52 crc kubenswrapper[4558]: I0120 17:41:52.802197 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mgts5\" (UniqueName: \"kubernetes.io/projected/d0f6996d-f07f-4cf5-8227-6bfed15e1370-kube-api-access-mgts5\") pod \"nova-api-0\" (UID: \"d0f6996d-f07f-4cf5-8227-6bfed15e1370\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:41:52 crc kubenswrapper[4558]: I0120 17:41:52.802227 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d0f6996d-f07f-4cf5-8227-6bfed15e1370-internal-tls-certs\") pod \"nova-api-0\" (UID: \"d0f6996d-f07f-4cf5-8227-6bfed15e1370\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:41:52 crc kubenswrapper[4558]: I0120 17:41:52.802641 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d0f6996d-f07f-4cf5-8227-6bfed15e1370-logs\") pod \"nova-api-0\" (UID: \"d0f6996d-f07f-4cf5-8227-6bfed15e1370\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:41:52 crc kubenswrapper[4558]: I0120 17:41:52.802718 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d0f6996d-f07f-4cf5-8227-6bfed15e1370-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"d0f6996d-f07f-4cf5-8227-6bfed15e1370\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:41:52 crc kubenswrapper[4558]: I0120 17:41:52.905137 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d0f6996d-f07f-4cf5-8227-6bfed15e1370-logs\") pod \"nova-api-0\" (UID: \"d0f6996d-f07f-4cf5-8227-6bfed15e1370\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:41:52 crc kubenswrapper[4558]: I0120 17:41:52.905206 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d0f6996d-f07f-4cf5-8227-6bfed15e1370-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"d0f6996d-f07f-4cf5-8227-6bfed15e1370\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:41:52 crc kubenswrapper[4558]: I0120 17:41:52.905294 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d0f6996d-f07f-4cf5-8227-6bfed15e1370-config-data\") pod \"nova-api-0\" (UID: \"d0f6996d-f07f-4cf5-8227-6bfed15e1370\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:41:52 crc kubenswrapper[4558]: I0120 17:41:52.905318 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d0f6996d-f07f-4cf5-8227-6bfed15e1370-public-tls-certs\") pod \"nova-api-0\" (UID: \"d0f6996d-f07f-4cf5-8227-6bfed15e1370\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:41:52 crc kubenswrapper[4558]: I0120 17:41:52.905354 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mgts5\" (UniqueName: \"kubernetes.io/projected/d0f6996d-f07f-4cf5-8227-6bfed15e1370-kube-api-access-mgts5\") pod \"nova-api-0\" (UID: \"d0f6996d-f07f-4cf5-8227-6bfed15e1370\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:41:52 crc kubenswrapper[4558]: I0120 17:41:52.905374 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d0f6996d-f07f-4cf5-8227-6bfed15e1370-internal-tls-certs\") pod \"nova-api-0\" (UID: \"d0f6996d-f07f-4cf5-8227-6bfed15e1370\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:41:52 crc kubenswrapper[4558]: I0120 17:41:52.906064 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d0f6996d-f07f-4cf5-8227-6bfed15e1370-logs\") pod \"nova-api-0\" (UID: \"d0f6996d-f07f-4cf5-8227-6bfed15e1370\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:41:52 crc kubenswrapper[4558]: I0120 17:41:52.911423 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d0f6996d-f07f-4cf5-8227-6bfed15e1370-public-tls-certs\") pod \"nova-api-0\" (UID: \"d0f6996d-f07f-4cf5-8227-6bfed15e1370\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:41:52 crc kubenswrapper[4558]: I0120 17:41:52.916199 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d0f6996d-f07f-4cf5-8227-6bfed15e1370-internal-tls-certs\") pod \"nova-api-0\" (UID: \"d0f6996d-f07f-4cf5-8227-6bfed15e1370\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:41:52 crc kubenswrapper[4558]: I0120 17:41:52.920030 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d0f6996d-f07f-4cf5-8227-6bfed15e1370-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"d0f6996d-f07f-4cf5-8227-6bfed15e1370\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:41:52 crc kubenswrapper[4558]: I0120 17:41:52.920249 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d0f6996d-f07f-4cf5-8227-6bfed15e1370-config-data\") pod \"nova-api-0\" (UID: \"d0f6996d-f07f-4cf5-8227-6bfed15e1370\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:41:52 crc kubenswrapper[4558]: I0120 17:41:52.920654 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mgts5\" (UniqueName: \"kubernetes.io/projected/d0f6996d-f07f-4cf5-8227-6bfed15e1370-kube-api-access-mgts5\") pod \"nova-api-0\" (UID: \"d0f6996d-f07f-4cf5-8227-6bfed15e1370\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:41:52 crc kubenswrapper[4558]: I0120 17:41:52.997327 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:41:53 crc kubenswrapper[4558]: I0120 17:41:53.464482 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:41:53 crc kubenswrapper[4558]: I0120 17:41:53.629330 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"d0f6996d-f07f-4cf5-8227-6bfed15e1370","Type":"ContainerStarted","Data":"a69487bda3f88f157ce0be8641de7cad991146d7e796fea8bda97415ea84ae41"} Jan 20 17:41:54 crc kubenswrapper[4558]: I0120 17:41:54.579798 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="daadab43-a59b-4a2e-8f07-0e0dc38c2ace" path="/var/lib/kubelet/pods/daadab43-a59b-4a2e-8f07-0e0dc38c2ace/volumes" Jan 20 17:41:54 crc kubenswrapper[4558]: I0120 17:41:54.643957 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"d0f6996d-f07f-4cf5-8227-6bfed15e1370","Type":"ContainerStarted","Data":"39b250fe2c01ede889dca2fd85bb4ba7ecb4ff567dec04dccfbfad33fc9e2ce1"} Jan 20 17:41:54 crc kubenswrapper[4558]: I0120 17:41:54.644031 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"d0f6996d-f07f-4cf5-8227-6bfed15e1370","Type":"ContainerStarted","Data":"acf769aacf60dcf25c180f7db8bbc004fddf751cbab36c2bcd5d1eac22d68fe8"} Jan 20 17:41:54 crc kubenswrapper[4558]: I0120 17:41:54.648473 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"c9014af3-2918-4f4c-b2e6-0cbda06f341b","Type":"ContainerStarted","Data":"c33fdcd3d341d1a1a1b440812f67c35ea95868909cb2776050a4363cfd9fd3d7"} Jan 20 17:41:54 crc kubenswrapper[4558]: I0120 17:41:54.648674 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:41:54 crc kubenswrapper[4558]: I0120 17:41:54.648677 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="c9014af3-2918-4f4c-b2e6-0cbda06f341b" containerName="ceilometer-central-agent" containerID="cri-o://f97b4e29f1e26182ff23d63a77976bee966ee5de4fbd05d044c364232dd96a98" gracePeriod=30 Jan 20 17:41:54 crc kubenswrapper[4558]: I0120 17:41:54.648745 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="c9014af3-2918-4f4c-b2e6-0cbda06f341b" containerName="ceilometer-notification-agent" containerID="cri-o://b43209642693ee4cc8523f7585ac5ad962688ea7597d1070911a31cf4a28c244" gracePeriod=30 Jan 20 17:41:54 crc kubenswrapper[4558]: I0120 17:41:54.648714 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="c9014af3-2918-4f4c-b2e6-0cbda06f341b" containerName="proxy-httpd" containerID="cri-o://c33fdcd3d341d1a1a1b440812f67c35ea95868909cb2776050a4363cfd9fd3d7" gracePeriod=30 Jan 20 17:41:54 crc kubenswrapper[4558]: I0120 17:41:54.648753 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="c9014af3-2918-4f4c-b2e6-0cbda06f341b" containerName="sg-core" containerID="cri-o://a0e2963e31a43f16b2fb75fbf0d0c78351e54b488c59c5286b4daae9b7480769" gracePeriod=30 Jan 20 17:41:54 crc kubenswrapper[4558]: I0120 17:41:54.667385 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-api-0" podStartSLOduration=2.667366402 podStartE2EDuration="2.667366402s" podCreationTimestamp="2026-01-20 17:41:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:41:54.662370908 +0000 UTC m=+3608.422708876" watchObservedRunningTime="2026-01-20 17:41:54.667366402 +0000 UTC m=+3608.427704368" Jan 20 17:41:54 crc kubenswrapper[4558]: I0120 17:41:54.701617 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ceilometer-0" podStartSLOduration=2.69265389 podStartE2EDuration="6.701585441s" podCreationTimestamp="2026-01-20 17:41:48 +0000 UTC" firstStartedPulling="2026-01-20 17:41:49.692389202 +0000 UTC m=+3603.452727170" lastFinishedPulling="2026-01-20 17:41:53.701320754 +0000 UTC m=+3607.461658721" observedRunningTime="2026-01-20 17:41:54.691016198 +0000 UTC m=+3608.451354166" watchObservedRunningTime="2026-01-20 17:41:54.701585441 +0000 UTC m=+3608.461923399" Jan 20 17:41:55 crc kubenswrapper[4558]: I0120 17:41:55.662942 4558 generic.go:334] "Generic (PLEG): container finished" podID="c9014af3-2918-4f4c-b2e6-0cbda06f341b" containerID="c33fdcd3d341d1a1a1b440812f67c35ea95868909cb2776050a4363cfd9fd3d7" exitCode=0 Jan 20 17:41:55 crc kubenswrapper[4558]: I0120 17:41:55.663331 4558 generic.go:334] "Generic (PLEG): container finished" podID="c9014af3-2918-4f4c-b2e6-0cbda06f341b" containerID="a0e2963e31a43f16b2fb75fbf0d0c78351e54b488c59c5286b4daae9b7480769" exitCode=2 Jan 20 17:41:55 crc kubenswrapper[4558]: I0120 17:41:55.663345 4558 generic.go:334] "Generic (PLEG): container finished" podID="c9014af3-2918-4f4c-b2e6-0cbda06f341b" containerID="b43209642693ee4cc8523f7585ac5ad962688ea7597d1070911a31cf4a28c244" exitCode=0 Jan 20 17:41:55 crc kubenswrapper[4558]: I0120 17:41:55.663010 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"c9014af3-2918-4f4c-b2e6-0cbda06f341b","Type":"ContainerDied","Data":"c33fdcd3d341d1a1a1b440812f67c35ea95868909cb2776050a4363cfd9fd3d7"} Jan 20 17:41:55 crc kubenswrapper[4558]: I0120 17:41:55.663446 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"c9014af3-2918-4f4c-b2e6-0cbda06f341b","Type":"ContainerDied","Data":"a0e2963e31a43f16b2fb75fbf0d0c78351e54b488c59c5286b4daae9b7480769"} Jan 20 17:41:55 crc kubenswrapper[4558]: I0120 17:41:55.663463 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"c9014af3-2918-4f4c-b2e6-0cbda06f341b","Type":"ContainerDied","Data":"b43209642693ee4cc8523f7585ac5ad962688ea7597d1070911a31cf4a28c244"} Jan 20 17:41:56 crc kubenswrapper[4558]: I0120 17:41:56.477615 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:41:56 crc kubenswrapper[4558]: I0120 17:41:56.602053 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c9014af3-2918-4f4c-b2e6-0cbda06f341b-scripts\") pod \"c9014af3-2918-4f4c-b2e6-0cbda06f341b\" (UID: \"c9014af3-2918-4f4c-b2e6-0cbda06f341b\") " Jan 20 17:41:56 crc kubenswrapper[4558]: I0120 17:41:56.602124 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkrkh\" (UniqueName: \"kubernetes.io/projected/c9014af3-2918-4f4c-b2e6-0cbda06f341b-kube-api-access-zkrkh\") pod \"c9014af3-2918-4f4c-b2e6-0cbda06f341b\" (UID: \"c9014af3-2918-4f4c-b2e6-0cbda06f341b\") " Jan 20 17:41:56 crc kubenswrapper[4558]: I0120 17:41:56.602383 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c9014af3-2918-4f4c-b2e6-0cbda06f341b-config-data\") pod \"c9014af3-2918-4f4c-b2e6-0cbda06f341b\" (UID: \"c9014af3-2918-4f4c-b2e6-0cbda06f341b\") " Jan 20 17:41:56 crc kubenswrapper[4558]: I0120 17:41:56.602439 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c9014af3-2918-4f4c-b2e6-0cbda06f341b-run-httpd\") pod \"c9014af3-2918-4f4c-b2e6-0cbda06f341b\" (UID: \"c9014af3-2918-4f4c-b2e6-0cbda06f341b\") " Jan 20 17:41:56 crc kubenswrapper[4558]: I0120 17:41:56.602514 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c9014af3-2918-4f4c-b2e6-0cbda06f341b-sg-core-conf-yaml\") pod \"c9014af3-2918-4f4c-b2e6-0cbda06f341b\" (UID: \"c9014af3-2918-4f4c-b2e6-0cbda06f341b\") " Jan 20 17:41:56 crc kubenswrapper[4558]: I0120 17:41:56.602701 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c9014af3-2918-4f4c-b2e6-0cbda06f341b-log-httpd\") pod \"c9014af3-2918-4f4c-b2e6-0cbda06f341b\" (UID: \"c9014af3-2918-4f4c-b2e6-0cbda06f341b\") " Jan 20 17:41:56 crc kubenswrapper[4558]: I0120 17:41:56.602735 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9014af3-2918-4f4c-b2e6-0cbda06f341b-combined-ca-bundle\") pod \"c9014af3-2918-4f4c-b2e6-0cbda06f341b\" (UID: \"c9014af3-2918-4f4c-b2e6-0cbda06f341b\") " Jan 20 17:41:56 crc kubenswrapper[4558]: I0120 17:41:56.603232 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/c9014af3-2918-4f4c-b2e6-0cbda06f341b-ceilometer-tls-certs\") pod \"c9014af3-2918-4f4c-b2e6-0cbda06f341b\" (UID: \"c9014af3-2918-4f4c-b2e6-0cbda06f341b\") " Jan 20 17:41:56 crc kubenswrapper[4558]: I0120 17:41:56.602736 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c9014af3-2918-4f4c-b2e6-0cbda06f341b-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "c9014af3-2918-4f4c-b2e6-0cbda06f341b" (UID: "c9014af3-2918-4f4c-b2e6-0cbda06f341b"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:41:56 crc kubenswrapper[4558]: I0120 17:41:56.603153 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c9014af3-2918-4f4c-b2e6-0cbda06f341b-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "c9014af3-2918-4f4c-b2e6-0cbda06f341b" (UID: "c9014af3-2918-4f4c-b2e6-0cbda06f341b"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:41:56 crc kubenswrapper[4558]: I0120 17:41:56.605247 4558 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c9014af3-2918-4f4c-b2e6-0cbda06f341b-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:41:56 crc kubenswrapper[4558]: I0120 17:41:56.605287 4558 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c9014af3-2918-4f4c-b2e6-0cbda06f341b-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:41:56 crc kubenswrapper[4558]: I0120 17:41:56.609825 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c9014af3-2918-4f4c-b2e6-0cbda06f341b-kube-api-access-zkrkh" (OuterVolumeSpecName: "kube-api-access-zkrkh") pod "c9014af3-2918-4f4c-b2e6-0cbda06f341b" (UID: "c9014af3-2918-4f4c-b2e6-0cbda06f341b"). InnerVolumeSpecName "kube-api-access-zkrkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:41:56 crc kubenswrapper[4558]: I0120 17:41:56.610003 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c9014af3-2918-4f4c-b2e6-0cbda06f341b-scripts" (OuterVolumeSpecName: "scripts") pod "c9014af3-2918-4f4c-b2e6-0cbda06f341b" (UID: "c9014af3-2918-4f4c-b2e6-0cbda06f341b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:41:56 crc kubenswrapper[4558]: I0120 17:41:56.631694 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c9014af3-2918-4f4c-b2e6-0cbda06f341b-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "c9014af3-2918-4f4c-b2e6-0cbda06f341b" (UID: "c9014af3-2918-4f4c-b2e6-0cbda06f341b"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:41:56 crc kubenswrapper[4558]: I0120 17:41:56.650463 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c9014af3-2918-4f4c-b2e6-0cbda06f341b-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "c9014af3-2918-4f4c-b2e6-0cbda06f341b" (UID: "c9014af3-2918-4f4c-b2e6-0cbda06f341b"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:41:56 crc kubenswrapper[4558]: I0120 17:41:56.670747 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c9014af3-2918-4f4c-b2e6-0cbda06f341b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c9014af3-2918-4f4c-b2e6-0cbda06f341b" (UID: "c9014af3-2918-4f4c-b2e6-0cbda06f341b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:41:56 crc kubenswrapper[4558]: I0120 17:41:56.679363 4558 generic.go:334] "Generic (PLEG): container finished" podID="c9014af3-2918-4f4c-b2e6-0cbda06f341b" containerID="f97b4e29f1e26182ff23d63a77976bee966ee5de4fbd05d044c364232dd96a98" exitCode=0 Jan 20 17:41:56 crc kubenswrapper[4558]: I0120 17:41:56.679419 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"c9014af3-2918-4f4c-b2e6-0cbda06f341b","Type":"ContainerDied","Data":"f97b4e29f1e26182ff23d63a77976bee966ee5de4fbd05d044c364232dd96a98"} Jan 20 17:41:56 crc kubenswrapper[4558]: I0120 17:41:56.679479 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"c9014af3-2918-4f4c-b2e6-0cbda06f341b","Type":"ContainerDied","Data":"79bdcf4de01927299f773decaa8d00d840d3b37f8513aaf847207c171de8ca8e"} Jan 20 17:41:56 crc kubenswrapper[4558]: I0120 17:41:56.679510 4558 scope.go:117] "RemoveContainer" containerID="c33fdcd3d341d1a1a1b440812f67c35ea95868909cb2776050a4363cfd9fd3d7" Jan 20 17:41:56 crc kubenswrapper[4558]: I0120 17:41:56.679792 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:41:56 crc kubenswrapper[4558]: I0120 17:41:56.683238 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c9014af3-2918-4f4c-b2e6-0cbda06f341b-config-data" (OuterVolumeSpecName: "config-data") pod "c9014af3-2918-4f4c-b2e6-0cbda06f341b" (UID: "c9014af3-2918-4f4c-b2e6-0cbda06f341b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:41:56 crc kubenswrapper[4558]: I0120 17:41:56.700035 4558 scope.go:117] "RemoveContainer" containerID="a0e2963e31a43f16b2fb75fbf0d0c78351e54b488c59c5286b4daae9b7480769" Jan 20 17:41:56 crc kubenswrapper[4558]: I0120 17:41:56.708017 4558 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c9014af3-2918-4f4c-b2e6-0cbda06f341b-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 20 17:41:56 crc kubenswrapper[4558]: I0120 17:41:56.708054 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9014af3-2918-4f4c-b2e6-0cbda06f341b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:41:56 crc kubenswrapper[4558]: I0120 17:41:56.708070 4558 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/c9014af3-2918-4f4c-b2e6-0cbda06f341b-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:41:56 crc kubenswrapper[4558]: I0120 17:41:56.708085 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c9014af3-2918-4f4c-b2e6-0cbda06f341b-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:41:56 crc kubenswrapper[4558]: I0120 17:41:56.708097 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkrkh\" (UniqueName: \"kubernetes.io/projected/c9014af3-2918-4f4c-b2e6-0cbda06f341b-kube-api-access-zkrkh\") on node \"crc\" DevicePath \"\"" Jan 20 17:41:56 crc kubenswrapper[4558]: I0120 17:41:56.708108 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c9014af3-2918-4f4c-b2e6-0cbda06f341b-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:41:56 crc kubenswrapper[4558]: I0120 17:41:56.720913 4558 scope.go:117] "RemoveContainer" containerID="b43209642693ee4cc8523f7585ac5ad962688ea7597d1070911a31cf4a28c244" Jan 20 17:41:56 crc kubenswrapper[4558]: I0120 17:41:56.739950 4558 scope.go:117] "RemoveContainer" containerID="f97b4e29f1e26182ff23d63a77976bee966ee5de4fbd05d044c364232dd96a98" Jan 20 17:41:56 crc kubenswrapper[4558]: I0120 17:41:56.757214 4558 scope.go:117] "RemoveContainer" containerID="c33fdcd3d341d1a1a1b440812f67c35ea95868909cb2776050a4363cfd9fd3d7" Jan 20 17:41:56 crc kubenswrapper[4558]: E0120 17:41:56.757802 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c33fdcd3d341d1a1a1b440812f67c35ea95868909cb2776050a4363cfd9fd3d7\": container with ID starting with c33fdcd3d341d1a1a1b440812f67c35ea95868909cb2776050a4363cfd9fd3d7 not found: ID does not exist" containerID="c33fdcd3d341d1a1a1b440812f67c35ea95868909cb2776050a4363cfd9fd3d7" Jan 20 17:41:56 crc kubenswrapper[4558]: I0120 17:41:56.757855 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c33fdcd3d341d1a1a1b440812f67c35ea95868909cb2776050a4363cfd9fd3d7"} err="failed to get container status \"c33fdcd3d341d1a1a1b440812f67c35ea95868909cb2776050a4363cfd9fd3d7\": rpc error: code = NotFound desc = could not find container \"c33fdcd3d341d1a1a1b440812f67c35ea95868909cb2776050a4363cfd9fd3d7\": container with ID starting with c33fdcd3d341d1a1a1b440812f67c35ea95868909cb2776050a4363cfd9fd3d7 not found: ID does not exist" Jan 20 17:41:56 crc kubenswrapper[4558]: I0120 17:41:56.757893 4558 scope.go:117] "RemoveContainer" containerID="a0e2963e31a43f16b2fb75fbf0d0c78351e54b488c59c5286b4daae9b7480769" Jan 20 17:41:56 crc kubenswrapper[4558]: E0120 17:41:56.758372 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a0e2963e31a43f16b2fb75fbf0d0c78351e54b488c59c5286b4daae9b7480769\": container with ID starting with a0e2963e31a43f16b2fb75fbf0d0c78351e54b488c59c5286b4daae9b7480769 not found: ID does not exist" containerID="a0e2963e31a43f16b2fb75fbf0d0c78351e54b488c59c5286b4daae9b7480769" Jan 20 17:41:56 crc kubenswrapper[4558]: I0120 17:41:56.758408 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a0e2963e31a43f16b2fb75fbf0d0c78351e54b488c59c5286b4daae9b7480769"} err="failed to get container status \"a0e2963e31a43f16b2fb75fbf0d0c78351e54b488c59c5286b4daae9b7480769\": rpc error: code = NotFound desc = could not find container \"a0e2963e31a43f16b2fb75fbf0d0c78351e54b488c59c5286b4daae9b7480769\": container with ID starting with a0e2963e31a43f16b2fb75fbf0d0c78351e54b488c59c5286b4daae9b7480769 not found: ID does not exist" Jan 20 17:41:56 crc kubenswrapper[4558]: I0120 17:41:56.758426 4558 scope.go:117] "RemoveContainer" containerID="b43209642693ee4cc8523f7585ac5ad962688ea7597d1070911a31cf4a28c244" Jan 20 17:41:56 crc kubenswrapper[4558]: E0120 17:41:56.758788 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b43209642693ee4cc8523f7585ac5ad962688ea7597d1070911a31cf4a28c244\": container with ID starting with b43209642693ee4cc8523f7585ac5ad962688ea7597d1070911a31cf4a28c244 not found: ID does not exist" containerID="b43209642693ee4cc8523f7585ac5ad962688ea7597d1070911a31cf4a28c244" Jan 20 17:41:56 crc kubenswrapper[4558]: I0120 17:41:56.758823 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b43209642693ee4cc8523f7585ac5ad962688ea7597d1070911a31cf4a28c244"} err="failed to get container status \"b43209642693ee4cc8523f7585ac5ad962688ea7597d1070911a31cf4a28c244\": rpc error: code = NotFound desc = could not find container \"b43209642693ee4cc8523f7585ac5ad962688ea7597d1070911a31cf4a28c244\": container with ID starting with b43209642693ee4cc8523f7585ac5ad962688ea7597d1070911a31cf4a28c244 not found: ID does not exist" Jan 20 17:41:56 crc kubenswrapper[4558]: I0120 17:41:56.758852 4558 scope.go:117] "RemoveContainer" containerID="f97b4e29f1e26182ff23d63a77976bee966ee5de4fbd05d044c364232dd96a98" Jan 20 17:41:56 crc kubenswrapper[4558]: E0120 17:41:56.759273 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f97b4e29f1e26182ff23d63a77976bee966ee5de4fbd05d044c364232dd96a98\": container with ID starting with f97b4e29f1e26182ff23d63a77976bee966ee5de4fbd05d044c364232dd96a98 not found: ID does not exist" containerID="f97b4e29f1e26182ff23d63a77976bee966ee5de4fbd05d044c364232dd96a98" Jan 20 17:41:56 crc kubenswrapper[4558]: I0120 17:41:56.759314 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f97b4e29f1e26182ff23d63a77976bee966ee5de4fbd05d044c364232dd96a98"} err="failed to get container status \"f97b4e29f1e26182ff23d63a77976bee966ee5de4fbd05d044c364232dd96a98\": rpc error: code = NotFound desc = could not find container \"f97b4e29f1e26182ff23d63a77976bee966ee5de4fbd05d044c364232dd96a98\": container with ID starting with f97b4e29f1e26182ff23d63a77976bee966ee5de4fbd05d044c364232dd96a98 not found: ID does not exist" Jan 20 17:41:57 crc kubenswrapper[4558]: I0120 17:41:57.011883 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:41:57 crc kubenswrapper[4558]: I0120 17:41:57.021406 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:41:57 crc kubenswrapper[4558]: I0120 17:41:57.035691 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:41:57 crc kubenswrapper[4558]: E0120 17:41:57.036460 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c9014af3-2918-4f4c-b2e6-0cbda06f341b" containerName="proxy-httpd" Jan 20 17:41:57 crc kubenswrapper[4558]: I0120 17:41:57.036495 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="c9014af3-2918-4f4c-b2e6-0cbda06f341b" containerName="proxy-httpd" Jan 20 17:41:57 crc kubenswrapper[4558]: E0120 17:41:57.036578 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c9014af3-2918-4f4c-b2e6-0cbda06f341b" containerName="ceilometer-central-agent" Jan 20 17:41:57 crc kubenswrapper[4558]: I0120 17:41:57.036586 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="c9014af3-2918-4f4c-b2e6-0cbda06f341b" containerName="ceilometer-central-agent" Jan 20 17:41:57 crc kubenswrapper[4558]: E0120 17:41:57.036598 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c9014af3-2918-4f4c-b2e6-0cbda06f341b" containerName="ceilometer-notification-agent" Jan 20 17:41:57 crc kubenswrapper[4558]: I0120 17:41:57.036605 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="c9014af3-2918-4f4c-b2e6-0cbda06f341b" containerName="ceilometer-notification-agent" Jan 20 17:41:57 crc kubenswrapper[4558]: E0120 17:41:57.036626 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c9014af3-2918-4f4c-b2e6-0cbda06f341b" containerName="sg-core" Jan 20 17:41:57 crc kubenswrapper[4558]: I0120 17:41:57.036634 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="c9014af3-2918-4f4c-b2e6-0cbda06f341b" containerName="sg-core" Jan 20 17:41:57 crc kubenswrapper[4558]: I0120 17:41:57.036923 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="c9014af3-2918-4f4c-b2e6-0cbda06f341b" containerName="ceilometer-central-agent" Jan 20 17:41:57 crc kubenswrapper[4558]: I0120 17:41:57.036980 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="c9014af3-2918-4f4c-b2e6-0cbda06f341b" containerName="proxy-httpd" Jan 20 17:41:57 crc kubenswrapper[4558]: I0120 17:41:57.036996 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="c9014af3-2918-4f4c-b2e6-0cbda06f341b" containerName="sg-core" Jan 20 17:41:57 crc kubenswrapper[4558]: I0120 17:41:57.037007 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="c9014af3-2918-4f4c-b2e6-0cbda06f341b" containerName="ceilometer-notification-agent" Jan 20 17:41:57 crc kubenswrapper[4558]: I0120 17:41:57.039300 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:41:57 crc kubenswrapper[4558]: I0120 17:41:57.041206 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-ceilometer-internal-svc" Jan 20 17:41:57 crc kubenswrapper[4558]: I0120 17:41:57.041607 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-scripts" Jan 20 17:41:57 crc kubenswrapper[4558]: I0120 17:41:57.042509 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-config-data" Jan 20 17:41:57 crc kubenswrapper[4558]: I0120 17:41:57.047607 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:41:57 crc kubenswrapper[4558]: I0120 17:41:57.219256 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6lg2c\" (UniqueName: \"kubernetes.io/projected/e43647af-b3c8-423f-b88c-d3a9b1aacef2-kube-api-access-6lg2c\") pod \"ceilometer-0\" (UID: \"e43647af-b3c8-423f-b88c-d3a9b1aacef2\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:41:57 crc kubenswrapper[4558]: I0120 17:41:57.219443 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e43647af-b3c8-423f-b88c-d3a9b1aacef2-config-data\") pod \"ceilometer-0\" (UID: \"e43647af-b3c8-423f-b88c-d3a9b1aacef2\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:41:57 crc kubenswrapper[4558]: I0120 17:41:57.219535 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e43647af-b3c8-423f-b88c-d3a9b1aacef2-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"e43647af-b3c8-423f-b88c-d3a9b1aacef2\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:41:57 crc kubenswrapper[4558]: I0120 17:41:57.219683 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/e43647af-b3c8-423f-b88c-d3a9b1aacef2-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"e43647af-b3c8-423f-b88c-d3a9b1aacef2\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:41:57 crc kubenswrapper[4558]: I0120 17:41:57.219822 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e43647af-b3c8-423f-b88c-d3a9b1aacef2-log-httpd\") pod \"ceilometer-0\" (UID: \"e43647af-b3c8-423f-b88c-d3a9b1aacef2\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:41:57 crc kubenswrapper[4558]: I0120 17:41:57.219963 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e43647af-b3c8-423f-b88c-d3a9b1aacef2-scripts\") pod \"ceilometer-0\" (UID: \"e43647af-b3c8-423f-b88c-d3a9b1aacef2\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:41:57 crc kubenswrapper[4558]: I0120 17:41:57.220061 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e43647af-b3c8-423f-b88c-d3a9b1aacef2-run-httpd\") pod \"ceilometer-0\" (UID: \"e43647af-b3c8-423f-b88c-d3a9b1aacef2\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:41:57 crc kubenswrapper[4558]: I0120 17:41:57.220177 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e43647af-b3c8-423f-b88c-d3a9b1aacef2-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"e43647af-b3c8-423f-b88c-d3a9b1aacef2\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:41:57 crc kubenswrapper[4558]: I0120 17:41:57.325315 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/e43647af-b3c8-423f-b88c-d3a9b1aacef2-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"e43647af-b3c8-423f-b88c-d3a9b1aacef2\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:41:57 crc kubenswrapper[4558]: I0120 17:41:57.325674 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e43647af-b3c8-423f-b88c-d3a9b1aacef2-log-httpd\") pod \"ceilometer-0\" (UID: \"e43647af-b3c8-423f-b88c-d3a9b1aacef2\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:41:57 crc kubenswrapper[4558]: I0120 17:41:57.325767 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e43647af-b3c8-423f-b88c-d3a9b1aacef2-scripts\") pod \"ceilometer-0\" (UID: \"e43647af-b3c8-423f-b88c-d3a9b1aacef2\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:41:57 crc kubenswrapper[4558]: I0120 17:41:57.325806 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e43647af-b3c8-423f-b88c-d3a9b1aacef2-run-httpd\") pod \"ceilometer-0\" (UID: \"e43647af-b3c8-423f-b88c-d3a9b1aacef2\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:41:57 crc kubenswrapper[4558]: I0120 17:41:57.325888 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e43647af-b3c8-423f-b88c-d3a9b1aacef2-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"e43647af-b3c8-423f-b88c-d3a9b1aacef2\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:41:57 crc kubenswrapper[4558]: I0120 17:41:57.326042 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6lg2c\" (UniqueName: \"kubernetes.io/projected/e43647af-b3c8-423f-b88c-d3a9b1aacef2-kube-api-access-6lg2c\") pod \"ceilometer-0\" (UID: \"e43647af-b3c8-423f-b88c-d3a9b1aacef2\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:41:57 crc kubenswrapper[4558]: I0120 17:41:57.326182 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e43647af-b3c8-423f-b88c-d3a9b1aacef2-config-data\") pod \"ceilometer-0\" (UID: \"e43647af-b3c8-423f-b88c-d3a9b1aacef2\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:41:57 crc kubenswrapper[4558]: I0120 17:41:57.326264 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e43647af-b3c8-423f-b88c-d3a9b1aacef2-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"e43647af-b3c8-423f-b88c-d3a9b1aacef2\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:41:57 crc kubenswrapper[4558]: I0120 17:41:57.326319 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e43647af-b3c8-423f-b88c-d3a9b1aacef2-log-httpd\") pod \"ceilometer-0\" (UID: \"e43647af-b3c8-423f-b88c-d3a9b1aacef2\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:41:57 crc kubenswrapper[4558]: I0120 17:41:57.326927 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e43647af-b3c8-423f-b88c-d3a9b1aacef2-run-httpd\") pod \"ceilometer-0\" (UID: \"e43647af-b3c8-423f-b88c-d3a9b1aacef2\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:41:57 crc kubenswrapper[4558]: I0120 17:41:57.331826 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e43647af-b3c8-423f-b88c-d3a9b1aacef2-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"e43647af-b3c8-423f-b88c-d3a9b1aacef2\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:41:57 crc kubenswrapper[4558]: I0120 17:41:57.332849 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/e43647af-b3c8-423f-b88c-d3a9b1aacef2-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"e43647af-b3c8-423f-b88c-d3a9b1aacef2\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:41:57 crc kubenswrapper[4558]: I0120 17:41:57.332939 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e43647af-b3c8-423f-b88c-d3a9b1aacef2-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"e43647af-b3c8-423f-b88c-d3a9b1aacef2\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:41:57 crc kubenswrapper[4558]: I0120 17:41:57.333907 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e43647af-b3c8-423f-b88c-d3a9b1aacef2-config-data\") pod \"ceilometer-0\" (UID: \"e43647af-b3c8-423f-b88c-d3a9b1aacef2\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:41:57 crc kubenswrapper[4558]: I0120 17:41:57.341419 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e43647af-b3c8-423f-b88c-d3a9b1aacef2-scripts\") pod \"ceilometer-0\" (UID: \"e43647af-b3c8-423f-b88c-d3a9b1aacef2\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:41:57 crc kubenswrapper[4558]: I0120 17:41:57.343568 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6lg2c\" (UniqueName: \"kubernetes.io/projected/e43647af-b3c8-423f-b88c-d3a9b1aacef2-kube-api-access-6lg2c\") pod \"ceilometer-0\" (UID: \"e43647af-b3c8-423f-b88c-d3a9b1aacef2\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:41:57 crc kubenswrapper[4558]: I0120 17:41:57.360465 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:41:57 crc kubenswrapper[4558]: I0120 17:41:57.764467 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:41:57 crc kubenswrapper[4558]: W0120 17:41:57.769382 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode43647af_b3c8_423f_b88c_d3a9b1aacef2.slice/crio-928bc3cf1457b7bb905c7b0d4ce7e8590f2c8e7e706290de1e5b59a2fa0403d2 WatchSource:0}: Error finding container 928bc3cf1457b7bb905c7b0d4ce7e8590f2c8e7e706290de1e5b59a2fa0403d2: Status 404 returned error can't find the container with id 928bc3cf1457b7bb905c7b0d4ce7e8590f2c8e7e706290de1e5b59a2fa0403d2 Jan 20 17:41:58 crc kubenswrapper[4558]: I0120 17:41:58.578333 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c9014af3-2918-4f4c-b2e6-0cbda06f341b" path="/var/lib/kubelet/pods/c9014af3-2918-4f4c-b2e6-0cbda06f341b/volumes" Jan 20 17:41:58 crc kubenswrapper[4558]: I0120 17:41:58.722691 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"e43647af-b3c8-423f-b88c-d3a9b1aacef2","Type":"ContainerStarted","Data":"bb7f865b92b1c710c22f66a0b1afc2b13b7529e74d6bbbbdbce454bf11a3c1a0"} Jan 20 17:41:58 crc kubenswrapper[4558]: I0120 17:41:58.722758 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"e43647af-b3c8-423f-b88c-d3a9b1aacef2","Type":"ContainerStarted","Data":"928bc3cf1457b7bb905c7b0d4ce7e8590f2c8e7e706290de1e5b59a2fa0403d2"} Jan 20 17:41:59 crc kubenswrapper[4558]: I0120 17:41:59.732923 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"e43647af-b3c8-423f-b88c-d3a9b1aacef2","Type":"ContainerStarted","Data":"4842b1e75542f7bbd13f59782fc4bdc7732e7ba2586beb53f10328006641cd69"} Jan 20 17:42:00 crc kubenswrapper[4558]: I0120 17:42:00.742807 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"e43647af-b3c8-423f-b88c-d3a9b1aacef2","Type":"ContainerStarted","Data":"c72b716d9037758e9baeb566c5ac04381df6b5dc61728fdf1da64996d680ebeb"} Jan 20 17:42:02 crc kubenswrapper[4558]: I0120 17:42:02.763436 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"e43647af-b3c8-423f-b88c-d3a9b1aacef2","Type":"ContainerStarted","Data":"8caf6b3ac50eb079a954e2fde982cd0bb14aa26097a6d791646bafef2d44e268"} Jan 20 17:42:02 crc kubenswrapper[4558]: I0120 17:42:02.765068 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:42:02 crc kubenswrapper[4558]: I0120 17:42:02.793862 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ceilometer-0" podStartSLOduration=1.787584418 podStartE2EDuration="5.793848653s" podCreationTimestamp="2026-01-20 17:41:57 +0000 UTC" firstStartedPulling="2026-01-20 17:41:57.771517488 +0000 UTC m=+3611.531855455" lastFinishedPulling="2026-01-20 17:42:01.777781724 +0000 UTC m=+3615.538119690" observedRunningTime="2026-01-20 17:42:02.787568575 +0000 UTC m=+3616.547906543" watchObservedRunningTime="2026-01-20 17:42:02.793848653 +0000 UTC m=+3616.554186620" Jan 20 17:42:02 crc kubenswrapper[4558]: I0120 17:42:02.998031 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:42:02 crc kubenswrapper[4558]: I0120 17:42:02.998091 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:42:03 crc kubenswrapper[4558]: I0120 17:42:03.566805 4558 scope.go:117] "RemoveContainer" containerID="2d2a8989ca5a5af98b2c9dc8f801ea0675339ec14800f437c058c5a43c20146b" Jan 20 17:42:03 crc kubenswrapper[4558]: E0120 17:42:03.567123 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 17:42:04 crc kubenswrapper[4558]: I0120 17:42:04.046611 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-api-0" podUID="d0f6996d-f07f-4cf5-8227-6bfed15e1370" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.1.8:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 20 17:42:04 crc kubenswrapper[4558]: I0120 17:42:04.047024 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-api-0" podUID="d0f6996d-f07f-4cf5-8227-6bfed15e1370" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.1.8:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 20 17:42:13 crc kubenswrapper[4558]: I0120 17:42:13.004668 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:42:13 crc kubenswrapper[4558]: I0120 17:42:13.005445 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:42:13 crc kubenswrapper[4558]: I0120 17:42:13.005729 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:42:13 crc kubenswrapper[4558]: I0120 17:42:13.005955 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:42:13 crc kubenswrapper[4558]: I0120 17:42:13.011695 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:42:13 crc kubenswrapper[4558]: I0120 17:42:13.011911 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:42:17 crc kubenswrapper[4558]: I0120 17:42:17.566053 4558 scope.go:117] "RemoveContainer" containerID="2d2a8989ca5a5af98b2c9dc8f801ea0675339ec14800f437c058c5a43c20146b" Jan 20 17:42:17 crc kubenswrapper[4558]: E0120 17:42:17.566968 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 17:42:19 crc kubenswrapper[4558]: I0120 17:42:19.586226 4558 scope.go:117] "RemoveContainer" containerID="4bd125dcc99d626eaea56657ad3f9225c43b7d93f078669af7ed4447f91d3330" Jan 20 17:42:19 crc kubenswrapper[4558]: I0120 17:42:19.609576 4558 scope.go:117] "RemoveContainer" containerID="1115d7fa22eeb3cf4d6e899963fd5f9d10ada47b94c34b2d601965731bba164c" Jan 20 17:42:19 crc kubenswrapper[4558]: I0120 17:42:19.652529 4558 scope.go:117] "RemoveContainer" containerID="4f3f925041f116a55af0c01739c0b06976097986bc12562c41828bd96d52c5f4" Jan 20 17:42:19 crc kubenswrapper[4558]: I0120 17:42:19.670984 4558 scope.go:117] "RemoveContainer" containerID="93d74d768c827cf81147355c461a6f8bd023a18e684bd30dab48966103e6de7e" Jan 20 17:42:19 crc kubenswrapper[4558]: I0120 17:42:19.690605 4558 scope.go:117] "RemoveContainer" containerID="c2621d3cab78726f6f01fe56a565e7406820ab18caa470c8455fa8fed99663ce" Jan 20 17:42:19 crc kubenswrapper[4558]: I0120 17:42:19.706942 4558 scope.go:117] "RemoveContainer" containerID="9940485ddf968cc8ec26bafdb2ca664eaf066521e0b63efd3dafb4c251d88a6e" Jan 20 17:42:19 crc kubenswrapper[4558]: I0120 17:42:19.722291 4558 scope.go:117] "RemoveContainer" containerID="ae2197c02d74d0b7fb6e5576d5e29ab4e2fe41416be7e0d896afd6625b8eed8a" Jan 20 17:42:19 crc kubenswrapper[4558]: I0120 17:42:19.735145 4558 scope.go:117] "RemoveContainer" containerID="7b466bcc44125b12b3728f5c2f0dd6ae9956b945bf663448fef7df702634964d" Jan 20 17:42:19 crc kubenswrapper[4558]: I0120 17:42:19.753547 4558 scope.go:117] "RemoveContainer" containerID="10f10d689a9d323a86b2d378fb03cb6850503e157f7b9f55a3eb04a8d068973c" Jan 20 17:42:19 crc kubenswrapper[4558]: I0120 17:42:19.777837 4558 scope.go:117] "RemoveContainer" containerID="6500342a069cbdc40f29bb0f7540a921249de5ac706a7008df5f940aa1c75eb9" Jan 20 17:42:19 crc kubenswrapper[4558]: I0120 17:42:19.810415 4558 scope.go:117] "RemoveContainer" containerID="8fb64177ca582dd6f77f7548f90b08a99b652fb60686912cfa1ae24e3d6b7093" Jan 20 17:42:27 crc kubenswrapper[4558]: I0120 17:42:27.369319 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:42:28 crc kubenswrapper[4558]: I0120 17:42:28.566070 4558 scope.go:117] "RemoveContainer" containerID="2d2a8989ca5a5af98b2c9dc8f801ea0675339ec14800f437c058c5a43c20146b" Jan 20 17:42:28 crc kubenswrapper[4558]: E0120 17:42:28.574832 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 17:42:30 crc kubenswrapper[4558]: E0120 17:42:30.689977 4558 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 192.168.25.8:60262->192.168.25.8:43883: write tcp 192.168.25.8:60262->192.168.25.8:43883: write: broken pipe Jan 20 17:42:31 crc kubenswrapper[4558]: E0120 17:42:31.080987 4558 upgradeaware.go:441] Error proxying data from backend to client: writeto tcp 192.168.25.8:60298->192.168.25.8:43883: read tcp 192.168.25.8:60298->192.168.25.8:43883: read: connection reset by peer Jan 20 17:42:31 crc kubenswrapper[4558]: E0120 17:42:31.477481 4558 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 192.168.25.8:60324->192.168.25.8:43883: write tcp 192.168.25.8:60324->192.168.25.8:43883: write: broken pipe Jan 20 17:42:41 crc kubenswrapper[4558]: I0120 17:42:41.566628 4558 scope.go:117] "RemoveContainer" containerID="2d2a8989ca5a5af98b2c9dc8f801ea0675339ec14800f437c058c5a43c20146b" Jan 20 17:42:41 crc kubenswrapper[4558]: E0120 17:42:41.567488 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 17:42:43 crc kubenswrapper[4558]: E0120 17:42:43.747466 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-ovn-metrics: secret "cert-ovn-metrics" not found Jan 20 17:42:43 crc kubenswrapper[4558]: E0120 17:42:43.747880 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b5ab3ab7-6929-4165-91ad-860f5f109147-metrics-certs-tls-certs podName:b5ab3ab7-6929-4165-91ad-860f5f109147 nodeName:}" failed. No retries permitted until 2026-01-20 17:42:44.247858819 +0000 UTC m=+3658.008196786 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs-tls-certs" (UniqueName: "kubernetes.io/secret/b5ab3ab7-6929-4165-91ad-860f5f109147-metrics-certs-tls-certs") pod "ovsdbserver-sb-0" (UID: "b5ab3ab7-6929-4165-91ad-860f5f109147") : secret "cert-ovn-metrics" not found Jan 20 17:42:43 crc kubenswrapper[4558]: E0120 17:42:43.748229 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-ovndbcluster-sb-ovndbs: secret "cert-ovndbcluster-sb-ovndbs" not found Jan 20 17:42:43 crc kubenswrapper[4558]: E0120 17:42:43.748288 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b5ab3ab7-6929-4165-91ad-860f5f109147-ovsdbserver-sb-tls-certs podName:b5ab3ab7-6929-4165-91ad-860f5f109147 nodeName:}" failed. No retries permitted until 2026-01-20 17:42:44.248271404 +0000 UTC m=+3658.008609371 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "ovsdbserver-sb-tls-certs" (UniqueName: "kubernetes.io/secret/b5ab3ab7-6929-4165-91ad-860f5f109147-ovsdbserver-sb-tls-certs") pod "ovsdbserver-sb-0" (UID: "b5ab3ab7-6929-4165-91ad-860f5f109147") : secret "cert-ovndbcluster-sb-ovndbs" not found Jan 20 17:42:44 crc kubenswrapper[4558]: E0120 17:42:44.259693 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-ovn-metrics: secret "cert-ovn-metrics" not found Jan 20 17:42:44 crc kubenswrapper[4558]: E0120 17:42:44.259779 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-ovndbcluster-sb-ovndbs: secret "cert-ovndbcluster-sb-ovndbs" not found Jan 20 17:42:44 crc kubenswrapper[4558]: E0120 17:42:44.259809 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b5ab3ab7-6929-4165-91ad-860f5f109147-metrics-certs-tls-certs podName:b5ab3ab7-6929-4165-91ad-860f5f109147 nodeName:}" failed. No retries permitted until 2026-01-20 17:42:45.259784482 +0000 UTC m=+3659.020122449 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs-tls-certs" (UniqueName: "kubernetes.io/secret/b5ab3ab7-6929-4165-91ad-860f5f109147-metrics-certs-tls-certs") pod "ovsdbserver-sb-0" (UID: "b5ab3ab7-6929-4165-91ad-860f5f109147") : secret "cert-ovn-metrics" not found Jan 20 17:42:44 crc kubenswrapper[4558]: E0120 17:42:44.259857 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b5ab3ab7-6929-4165-91ad-860f5f109147-ovsdbserver-sb-tls-certs podName:b5ab3ab7-6929-4165-91ad-860f5f109147 nodeName:}" failed. No retries permitted until 2026-01-20 17:42:45.259836691 +0000 UTC m=+3659.020174658 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "ovsdbserver-sb-tls-certs" (UniqueName: "kubernetes.io/secret/b5ab3ab7-6929-4165-91ad-860f5f109147-ovsdbserver-sb-tls-certs") pod "ovsdbserver-sb-0" (UID: "b5ab3ab7-6929-4165-91ad-860f5f109147") : secret "cert-ovndbcluster-sb-ovndbs" not found Jan 20 17:42:45 crc kubenswrapper[4558]: E0120 17:42:45.287133 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-ovn-metrics: secret "cert-ovn-metrics" not found Jan 20 17:42:45 crc kubenswrapper[4558]: E0120 17:42:45.287261 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b5ab3ab7-6929-4165-91ad-860f5f109147-metrics-certs-tls-certs podName:b5ab3ab7-6929-4165-91ad-860f5f109147 nodeName:}" failed. No retries permitted until 2026-01-20 17:42:47.287239313 +0000 UTC m=+3661.047577280 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs-tls-certs" (UniqueName: "kubernetes.io/secret/b5ab3ab7-6929-4165-91ad-860f5f109147-metrics-certs-tls-certs") pod "ovsdbserver-sb-0" (UID: "b5ab3ab7-6929-4165-91ad-860f5f109147") : secret "cert-ovn-metrics" not found Jan 20 17:42:45 crc kubenswrapper[4558]: E0120 17:42:45.287268 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-ovndbcluster-sb-ovndbs: secret "cert-ovndbcluster-sb-ovndbs" not found Jan 20 17:42:45 crc kubenswrapper[4558]: E0120 17:42:45.287344 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b5ab3ab7-6929-4165-91ad-860f5f109147-ovsdbserver-sb-tls-certs podName:b5ab3ab7-6929-4165-91ad-860f5f109147 nodeName:}" failed. No retries permitted until 2026-01-20 17:42:47.287324543 +0000 UTC m=+3661.047662510 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "ovsdbserver-sb-tls-certs" (UniqueName: "kubernetes.io/secret/b5ab3ab7-6929-4165-91ad-860f5f109147-ovsdbserver-sb-tls-certs") pod "ovsdbserver-sb-0" (UID: "b5ab3ab7-6929-4165-91ad-860f5f109147") : secret "cert-ovndbcluster-sb-ovndbs" not found Jan 20 17:42:46 crc kubenswrapper[4558]: E0120 17:42:46.617091 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-kube-state-metrics-svc: secret "cert-kube-state-metrics-svc" not found Jan 20 17:42:46 crc kubenswrapper[4558]: E0120 17:42:46.617195 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f4affc06-2032-4a14-8422-3c3ca984eada-kube-state-metrics-tls-certs podName:f4affc06-2032-4a14-8422-3c3ca984eada nodeName:}" failed. No retries permitted until 2026-01-20 17:42:47.117141347 +0000 UTC m=+3660.877479314 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-state-metrics-tls-certs" (UniqueName: "kubernetes.io/secret/f4affc06-2032-4a14-8422-3c3ca984eada-kube-state-metrics-tls-certs") pod "kube-state-metrics-0" (UID: "f4affc06-2032-4a14-8422-3c3ca984eada") : secret "cert-kube-state-metrics-svc" not found Jan 20 17:42:47 crc kubenswrapper[4558]: E0120 17:42:47.125511 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-kube-state-metrics-svc: secret "cert-kube-state-metrics-svc" not found Jan 20 17:42:47 crc kubenswrapper[4558]: E0120 17:42:47.125891 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f4affc06-2032-4a14-8422-3c3ca984eada-kube-state-metrics-tls-certs podName:f4affc06-2032-4a14-8422-3c3ca984eada nodeName:}" failed. No retries permitted until 2026-01-20 17:42:48.125869669 +0000 UTC m=+3661.886207635 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-state-metrics-tls-certs" (UniqueName: "kubernetes.io/secret/f4affc06-2032-4a14-8422-3c3ca984eada-kube-state-metrics-tls-certs") pod "kube-state-metrics-0" (UID: "f4affc06-2032-4a14-8422-3c3ca984eada") : secret "cert-kube-state-metrics-svc" not found Jan 20 17:42:47 crc kubenswrapper[4558]: E0120 17:42:47.330809 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-ovn-metrics: secret "cert-ovn-metrics" not found Jan 20 17:42:47 crc kubenswrapper[4558]: E0120 17:42:47.331373 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b5ab3ab7-6929-4165-91ad-860f5f109147-metrics-certs-tls-certs podName:b5ab3ab7-6929-4165-91ad-860f5f109147 nodeName:}" failed. No retries permitted until 2026-01-20 17:42:51.33133395 +0000 UTC m=+3665.091671917 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs-tls-certs" (UniqueName: "kubernetes.io/secret/b5ab3ab7-6929-4165-91ad-860f5f109147-metrics-certs-tls-certs") pod "ovsdbserver-sb-0" (UID: "b5ab3ab7-6929-4165-91ad-860f5f109147") : secret "cert-ovn-metrics" not found Jan 20 17:42:47 crc kubenswrapper[4558]: E0120 17:42:47.331471 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-ovndbcluster-sb-ovndbs: secret "cert-ovndbcluster-sb-ovndbs" not found Jan 20 17:42:47 crc kubenswrapper[4558]: E0120 17:42:47.331630 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b5ab3ab7-6929-4165-91ad-860f5f109147-ovsdbserver-sb-tls-certs podName:b5ab3ab7-6929-4165-91ad-860f5f109147 nodeName:}" failed. No retries permitted until 2026-01-20 17:42:51.331603027 +0000 UTC m=+3665.091940994 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "ovsdbserver-sb-tls-certs" (UniqueName: "kubernetes.io/secret/b5ab3ab7-6929-4165-91ad-860f5f109147-ovsdbserver-sb-tls-certs") pod "ovsdbserver-sb-0" (UID: "b5ab3ab7-6929-4165-91ad-860f5f109147") : secret "cert-ovndbcluster-sb-ovndbs" not found Jan 20 17:42:48 crc kubenswrapper[4558]: E0120 17:42:48.147663 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-kube-state-metrics-svc: secret "cert-kube-state-metrics-svc" not found Jan 20 17:42:48 crc kubenswrapper[4558]: E0120 17:42:48.148460 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f4affc06-2032-4a14-8422-3c3ca984eada-kube-state-metrics-tls-certs podName:f4affc06-2032-4a14-8422-3c3ca984eada nodeName:}" failed. No retries permitted until 2026-01-20 17:42:50.148409047 +0000 UTC m=+3663.908747025 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-state-metrics-tls-certs" (UniqueName: "kubernetes.io/secret/f4affc06-2032-4a14-8422-3c3ca984eada-kube-state-metrics-tls-certs") pod "kube-state-metrics-0" (UID: "f4affc06-2032-4a14-8422-3c3ca984eada") : secret "cert-kube-state-metrics-svc" not found Jan 20 17:42:50 crc kubenswrapper[4558]: E0120 17:42:50.186321 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-kube-state-metrics-svc: secret "cert-kube-state-metrics-svc" not found Jan 20 17:42:50 crc kubenswrapper[4558]: E0120 17:42:50.186416 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f4affc06-2032-4a14-8422-3c3ca984eada-kube-state-metrics-tls-certs podName:f4affc06-2032-4a14-8422-3c3ca984eada nodeName:}" failed. No retries permitted until 2026-01-20 17:42:54.186392814 +0000 UTC m=+3667.946730782 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-state-metrics-tls-certs" (UniqueName: "kubernetes.io/secret/f4affc06-2032-4a14-8422-3c3ca984eada-kube-state-metrics-tls-certs") pod "kube-state-metrics-0" (UID: "f4affc06-2032-4a14-8422-3c3ca984eada") : secret "cert-kube-state-metrics-svc" not found Jan 20 17:42:51 crc kubenswrapper[4558]: I0120 17:42:51.379758 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/openstack-galera-0"] Jan 20 17:42:51 crc kubenswrapper[4558]: E0120 17:42:51.418310 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-ovndbcluster-sb-ovndbs: secret "cert-ovndbcluster-sb-ovndbs" not found Jan 20 17:42:51 crc kubenswrapper[4558]: E0120 17:42:51.418393 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b5ab3ab7-6929-4165-91ad-860f5f109147-ovsdbserver-sb-tls-certs podName:b5ab3ab7-6929-4165-91ad-860f5f109147 nodeName:}" failed. No retries permitted until 2026-01-20 17:42:59.418373724 +0000 UTC m=+3673.178711690 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "ovsdbserver-sb-tls-certs" (UniqueName: "kubernetes.io/secret/b5ab3ab7-6929-4165-91ad-860f5f109147-ovsdbserver-sb-tls-certs") pod "ovsdbserver-sb-0" (UID: "b5ab3ab7-6929-4165-91ad-860f5f109147") : secret "cert-ovndbcluster-sb-ovndbs" not found Jan 20 17:42:51 crc kubenswrapper[4558]: E0120 17:42:51.418523 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-ovn-metrics: secret "cert-ovn-metrics" not found Jan 20 17:42:51 crc kubenswrapper[4558]: E0120 17:42:51.418595 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b5ab3ab7-6929-4165-91ad-860f5f109147-metrics-certs-tls-certs podName:b5ab3ab7-6929-4165-91ad-860f5f109147 nodeName:}" failed. No retries permitted until 2026-01-20 17:42:59.418575022 +0000 UTC m=+3673.178912989 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs-tls-certs" (UniqueName: "kubernetes.io/secret/b5ab3ab7-6929-4165-91ad-860f5f109147-metrics-certs-tls-certs") pod "ovsdbserver-sb-0" (UID: "b5ab3ab7-6929-4165-91ad-860f5f109147") : secret "cert-ovn-metrics" not found Jan 20 17:42:51 crc kubenswrapper[4558]: I0120 17:42:51.488615 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/openstack-galera-0" podUID="4bcc759d-3647-4194-9a91-acac49948173" containerName="galera" containerID="cri-o://1b4d88c338068dde213bad574869f30ce42019618f93ed6c25b82149cf965480" gracePeriod=30 Jan 20 17:42:51 crc kubenswrapper[4558]: I0120 17:42:51.584109 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/openstack-cell1-galera-0"] Jan 20 17:42:51 crc kubenswrapper[4558]: I0120 17:42:51.681285 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/openstack-cell1-galera-0" podUID="0b56d0cd-1994-4e39-9c78-82b5105222db" containerName="galera" containerID="cri-o://0155fd8ce44500446dfb48466cd9d3cc3b7b70429fb0d969d7884dee5362f7a9" gracePeriod=30 Jan 20 17:42:52 crc kubenswrapper[4558]: I0120 17:42:52.244995 4558 generic.go:334] "Generic (PLEG): container finished" podID="0b56d0cd-1994-4e39-9c78-82b5105222db" containerID="0155fd8ce44500446dfb48466cd9d3cc3b7b70429fb0d969d7884dee5362f7a9" exitCode=0 Jan 20 17:42:52 crc kubenswrapper[4558]: I0120 17:42:52.245065 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-cell1-galera-0" event={"ID":"0b56d0cd-1994-4e39-9c78-82b5105222db","Type":"ContainerDied","Data":"0155fd8ce44500446dfb48466cd9d3cc3b7b70429fb0d969d7884dee5362f7a9"} Jan 20 17:42:52 crc kubenswrapper[4558]: I0120 17:42:52.516523 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:42:52 crc kubenswrapper[4558]: I0120 17:42:52.610222 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:42:52 crc kubenswrapper[4558]: I0120 17:42:52.647489 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/0b56d0cd-1994-4e39-9c78-82b5105222db-kolla-config\") pod \"0b56d0cd-1994-4e39-9c78-82b5105222db\" (UID: \"0b56d0cd-1994-4e39-9c78-82b5105222db\") " Jan 20 17:42:52 crc kubenswrapper[4558]: I0120 17:42:52.647582 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/0b56d0cd-1994-4e39-9c78-82b5105222db-config-data-default\") pod \"0b56d0cd-1994-4e39-9c78-82b5105222db\" (UID: \"0b56d0cd-1994-4e39-9c78-82b5105222db\") " Jan 20 17:42:52 crc kubenswrapper[4558]: I0120 17:42:52.647651 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0b56d0cd-1994-4e39-9c78-82b5105222db-operator-scripts\") pod \"0b56d0cd-1994-4e39-9c78-82b5105222db\" (UID: \"0b56d0cd-1994-4e39-9c78-82b5105222db\") " Jan 20 17:42:52 crc kubenswrapper[4558]: I0120 17:42:52.647715 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/0b56d0cd-1994-4e39-9c78-82b5105222db-galera-tls-certs\") pod \"0b56d0cd-1994-4e39-9c78-82b5105222db\" (UID: \"0b56d0cd-1994-4e39-9c78-82b5105222db\") " Jan 20 17:42:52 crc kubenswrapper[4558]: I0120 17:42:52.647823 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0b56d0cd-1994-4e39-9c78-82b5105222db-combined-ca-bundle\") pod \"0b56d0cd-1994-4e39-9c78-82b5105222db\" (UID: \"0b56d0cd-1994-4e39-9c78-82b5105222db\") " Jan 20 17:42:52 crc kubenswrapper[4558]: I0120 17:42:52.647884 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mysql-db\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") pod \"0b56d0cd-1994-4e39-9c78-82b5105222db\" (UID: \"0b56d0cd-1994-4e39-9c78-82b5105222db\") " Jan 20 17:42:52 crc kubenswrapper[4558]: I0120 17:42:52.647940 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lg4jc\" (UniqueName: \"kubernetes.io/projected/0b56d0cd-1994-4e39-9c78-82b5105222db-kube-api-access-lg4jc\") pod \"0b56d0cd-1994-4e39-9c78-82b5105222db\" (UID: \"0b56d0cd-1994-4e39-9c78-82b5105222db\") " Jan 20 17:42:52 crc kubenswrapper[4558]: I0120 17:42:52.647994 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/0b56d0cd-1994-4e39-9c78-82b5105222db-config-data-generated\") pod \"0b56d0cd-1994-4e39-9c78-82b5105222db\" (UID: \"0b56d0cd-1994-4e39-9c78-82b5105222db\") " Jan 20 17:42:52 crc kubenswrapper[4558]: I0120 17:42:52.648669 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b56d0cd-1994-4e39-9c78-82b5105222db-config-data-default" (OuterVolumeSpecName: "config-data-default") pod "0b56d0cd-1994-4e39-9c78-82b5105222db" (UID: "0b56d0cd-1994-4e39-9c78-82b5105222db"). InnerVolumeSpecName "config-data-default". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:42:52 crc kubenswrapper[4558]: I0120 17:42:52.648751 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b56d0cd-1994-4e39-9c78-82b5105222db-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "0b56d0cd-1994-4e39-9c78-82b5105222db" (UID: "0b56d0cd-1994-4e39-9c78-82b5105222db"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:42:52 crc kubenswrapper[4558]: I0120 17:42:52.648912 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/0b56d0cd-1994-4e39-9c78-82b5105222db-config-data-default\") on node \"crc\" DevicePath \"\"" Jan 20 17:42:52 crc kubenswrapper[4558]: I0120 17:42:52.649068 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0b56d0cd-1994-4e39-9c78-82b5105222db-config-data-generated" (OuterVolumeSpecName: "config-data-generated") pod "0b56d0cd-1994-4e39-9c78-82b5105222db" (UID: "0b56d0cd-1994-4e39-9c78-82b5105222db"). InnerVolumeSpecName "config-data-generated". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:42:52 crc kubenswrapper[4558]: I0120 17:42:52.649348 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b56d0cd-1994-4e39-9c78-82b5105222db-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "0b56d0cd-1994-4e39-9c78-82b5105222db" (UID: "0b56d0cd-1994-4e39-9c78-82b5105222db"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:42:52 crc kubenswrapper[4558]: I0120 17:42:52.653738 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b56d0cd-1994-4e39-9c78-82b5105222db-kube-api-access-lg4jc" (OuterVolumeSpecName: "kube-api-access-lg4jc") pod "0b56d0cd-1994-4e39-9c78-82b5105222db" (UID: "0b56d0cd-1994-4e39-9c78-82b5105222db"). InnerVolumeSpecName "kube-api-access-lg4jc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:42:52 crc kubenswrapper[4558]: I0120 17:42:52.664751 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage15-crc" (OuterVolumeSpecName: "mysql-db") pod "0b56d0cd-1994-4e39-9c78-82b5105222db" (UID: "0b56d0cd-1994-4e39-9c78-82b5105222db"). InnerVolumeSpecName "local-storage15-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:42:52 crc kubenswrapper[4558]: I0120 17:42:52.674064 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b56d0cd-1994-4e39-9c78-82b5105222db-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0b56d0cd-1994-4e39-9c78-82b5105222db" (UID: "0b56d0cd-1994-4e39-9c78-82b5105222db"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:42:52 crc kubenswrapper[4558]: I0120 17:42:52.695250 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b56d0cd-1994-4e39-9c78-82b5105222db-galera-tls-certs" (OuterVolumeSpecName: "galera-tls-certs") pod "0b56d0cd-1994-4e39-9c78-82b5105222db" (UID: "0b56d0cd-1994-4e39-9c78-82b5105222db"). InnerVolumeSpecName "galera-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:42:52 crc kubenswrapper[4558]: I0120 17:42:52.750566 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/4bcc759d-3647-4194-9a91-acac49948173-kolla-config\") pod \"4bcc759d-3647-4194-9a91-acac49948173\" (UID: \"4bcc759d-3647-4194-9a91-acac49948173\") " Jan 20 17:42:52 crc kubenswrapper[4558]: I0120 17:42:52.750612 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4bcc759d-3647-4194-9a91-acac49948173-combined-ca-bundle\") pod \"4bcc759d-3647-4194-9a91-acac49948173\" (UID: \"4bcc759d-3647-4194-9a91-acac49948173\") " Jan 20 17:42:52 crc kubenswrapper[4558]: I0120 17:42:52.750663 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/4bcc759d-3647-4194-9a91-acac49948173-galera-tls-certs\") pod \"4bcc759d-3647-4194-9a91-acac49948173\" (UID: \"4bcc759d-3647-4194-9a91-acac49948173\") " Jan 20 17:42:52 crc kubenswrapper[4558]: I0120 17:42:52.750710 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mysql-db\" (UniqueName: \"kubernetes.io/local-volume/local-storage14-crc\") pod \"4bcc759d-3647-4194-9a91-acac49948173\" (UID: \"4bcc759d-3647-4194-9a91-acac49948173\") " Jan 20 17:42:52 crc kubenswrapper[4558]: I0120 17:42:52.750865 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/4bcc759d-3647-4194-9a91-acac49948173-config-data-default\") pod \"4bcc759d-3647-4194-9a91-acac49948173\" (UID: \"4bcc759d-3647-4194-9a91-acac49948173\") " Jan 20 17:42:52 crc kubenswrapper[4558]: I0120 17:42:52.750924 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x6vsj\" (UniqueName: \"kubernetes.io/projected/4bcc759d-3647-4194-9a91-acac49948173-kube-api-access-x6vsj\") pod \"4bcc759d-3647-4194-9a91-acac49948173\" (UID: \"4bcc759d-3647-4194-9a91-acac49948173\") " Jan 20 17:42:52 crc kubenswrapper[4558]: I0120 17:42:52.751019 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4bcc759d-3647-4194-9a91-acac49948173-operator-scripts\") pod \"4bcc759d-3647-4194-9a91-acac49948173\" (UID: \"4bcc759d-3647-4194-9a91-acac49948173\") " Jan 20 17:42:52 crc kubenswrapper[4558]: I0120 17:42:52.751106 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/4bcc759d-3647-4194-9a91-acac49948173-config-data-generated\") pod \"4bcc759d-3647-4194-9a91-acac49948173\" (UID: \"4bcc759d-3647-4194-9a91-acac49948173\") " Jan 20 17:42:52 crc kubenswrapper[4558]: I0120 17:42:52.751760 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4bcc759d-3647-4194-9a91-acac49948173-config-data-generated" (OuterVolumeSpecName: "config-data-generated") pod "4bcc759d-3647-4194-9a91-acac49948173" (UID: "4bcc759d-3647-4194-9a91-acac49948173"). InnerVolumeSpecName "config-data-generated". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:42:52 crc kubenswrapper[4558]: I0120 17:42:52.752001 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bcc759d-3647-4194-9a91-acac49948173-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "4bcc759d-3647-4194-9a91-acac49948173" (UID: "4bcc759d-3647-4194-9a91-acac49948173"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:42:52 crc kubenswrapper[4558]: I0120 17:42:52.752673 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage15-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") on node \"crc\" " Jan 20 17:42:52 crc kubenswrapper[4558]: I0120 17:42:52.752711 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lg4jc\" (UniqueName: \"kubernetes.io/projected/0b56d0cd-1994-4e39-9c78-82b5105222db-kube-api-access-lg4jc\") on node \"crc\" DevicePath \"\"" Jan 20 17:42:52 crc kubenswrapper[4558]: I0120 17:42:52.752724 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/0b56d0cd-1994-4e39-9c78-82b5105222db-config-data-generated\") on node \"crc\" DevicePath \"\"" Jan 20 17:42:52 crc kubenswrapper[4558]: I0120 17:42:52.752737 4558 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/0b56d0cd-1994-4e39-9c78-82b5105222db-kolla-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:42:52 crc kubenswrapper[4558]: I0120 17:42:52.752751 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0b56d0cd-1994-4e39-9c78-82b5105222db-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:42:52 crc kubenswrapper[4558]: I0120 17:42:52.752762 4558 reconciler_common.go:293] "Volume detached for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/0b56d0cd-1994-4e39-9c78-82b5105222db-galera-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:42:52 crc kubenswrapper[4558]: I0120 17:42:52.752773 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4bcc759d-3647-4194-9a91-acac49948173-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:42:52 crc kubenswrapper[4558]: I0120 17:42:52.752782 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/4bcc759d-3647-4194-9a91-acac49948173-config-data-generated\") on node \"crc\" DevicePath \"\"" Jan 20 17:42:52 crc kubenswrapper[4558]: I0120 17:42:52.752793 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0b56d0cd-1994-4e39-9c78-82b5105222db-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:42:52 crc kubenswrapper[4558]: I0120 17:42:52.753153 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bcc759d-3647-4194-9a91-acac49948173-config-data-default" (OuterVolumeSpecName: "config-data-default") pod "4bcc759d-3647-4194-9a91-acac49948173" (UID: "4bcc759d-3647-4194-9a91-acac49948173"). InnerVolumeSpecName "config-data-default". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:42:52 crc kubenswrapper[4558]: I0120 17:42:52.753474 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bcc759d-3647-4194-9a91-acac49948173-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "4bcc759d-3647-4194-9a91-acac49948173" (UID: "4bcc759d-3647-4194-9a91-acac49948173"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:42:52 crc kubenswrapper[4558]: I0120 17:42:52.755529 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bcc759d-3647-4194-9a91-acac49948173-kube-api-access-x6vsj" (OuterVolumeSpecName: "kube-api-access-x6vsj") pod "4bcc759d-3647-4194-9a91-acac49948173" (UID: "4bcc759d-3647-4194-9a91-acac49948173"). InnerVolumeSpecName "kube-api-access-x6vsj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:42:52 crc kubenswrapper[4558]: I0120 17:42:52.762393 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage14-crc" (OuterVolumeSpecName: "mysql-db") pod "4bcc759d-3647-4194-9a91-acac49948173" (UID: "4bcc759d-3647-4194-9a91-acac49948173"). InnerVolumeSpecName "local-storage14-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:42:52 crc kubenswrapper[4558]: I0120 17:42:52.772247 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage15-crc" (UniqueName: "kubernetes.io/local-volume/local-storage15-crc") on node "crc" Jan 20 17:42:52 crc kubenswrapper[4558]: I0120 17:42:52.778439 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4bcc759d-3647-4194-9a91-acac49948173-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4bcc759d-3647-4194-9a91-acac49948173" (UID: "4bcc759d-3647-4194-9a91-acac49948173"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:42:52 crc kubenswrapper[4558]: I0120 17:42:52.795128 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4bcc759d-3647-4194-9a91-acac49948173-galera-tls-certs" (OuterVolumeSpecName: "galera-tls-certs") pod "4bcc759d-3647-4194-9a91-acac49948173" (UID: "4bcc759d-3647-4194-9a91-acac49948173"). InnerVolumeSpecName "galera-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:42:52 crc kubenswrapper[4558]: I0120 17:42:52.856145 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage15-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:42:52 crc kubenswrapper[4558]: I0120 17:42:52.856235 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4bcc759d-3647-4194-9a91-acac49948173-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:42:52 crc kubenswrapper[4558]: I0120 17:42:52.856253 4558 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/4bcc759d-3647-4194-9a91-acac49948173-kolla-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:42:52 crc kubenswrapper[4558]: I0120 17:42:52.856266 4558 reconciler_common.go:293] "Volume detached for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/4bcc759d-3647-4194-9a91-acac49948173-galera-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:42:52 crc kubenswrapper[4558]: I0120 17:42:52.856320 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage14-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage14-crc\") on node \"crc\" " Jan 20 17:42:52 crc kubenswrapper[4558]: I0120 17:42:52.856331 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/4bcc759d-3647-4194-9a91-acac49948173-config-data-default\") on node \"crc\" DevicePath \"\"" Jan 20 17:42:52 crc kubenswrapper[4558]: I0120 17:42:52.856344 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x6vsj\" (UniqueName: \"kubernetes.io/projected/4bcc759d-3647-4194-9a91-acac49948173-kube-api-access-x6vsj\") on node \"crc\" DevicePath \"\"" Jan 20 17:42:52 crc kubenswrapper[4558]: I0120 17:42:52.876836 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage14-crc" (UniqueName: "kubernetes.io/local-volume/local-storage14-crc") on node "crc" Jan 20 17:42:52 crc kubenswrapper[4558]: I0120 17:42:52.958888 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage14-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage14-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:42:53 crc kubenswrapper[4558]: I0120 17:42:53.257539 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-cell1-galera-0" event={"ID":"0b56d0cd-1994-4e39-9c78-82b5105222db","Type":"ContainerDied","Data":"115e797d7f0e6b91193595613d09f836d34ed6f0c932ce0636d93690f028767f"} Jan 20 17:42:53 crc kubenswrapper[4558]: I0120 17:42:53.257620 4558 scope.go:117] "RemoveContainer" containerID="0155fd8ce44500446dfb48466cd9d3cc3b7b70429fb0d969d7884dee5362f7a9" Jan 20 17:42:53 crc kubenswrapper[4558]: I0120 17:42:53.257569 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:42:53 crc kubenswrapper[4558]: I0120 17:42:53.259791 4558 generic.go:334] "Generic (PLEG): container finished" podID="4bcc759d-3647-4194-9a91-acac49948173" containerID="1b4d88c338068dde213bad574869f30ce42019618f93ed6c25b82149cf965480" exitCode=0 Jan 20 17:42:53 crc kubenswrapper[4558]: I0120 17:42:53.259870 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-galera-0" event={"ID":"4bcc759d-3647-4194-9a91-acac49948173","Type":"ContainerDied","Data":"1b4d88c338068dde213bad574869f30ce42019618f93ed6c25b82149cf965480"} Jan 20 17:42:53 crc kubenswrapper[4558]: I0120 17:42:53.259889 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:42:53 crc kubenswrapper[4558]: I0120 17:42:53.259964 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-galera-0" event={"ID":"4bcc759d-3647-4194-9a91-acac49948173","Type":"ContainerDied","Data":"6bab203ff066f66d425ed0dfd061e3206febb47ae8836ab8898de6689a2e3105"} Jan 20 17:42:53 crc kubenswrapper[4558]: I0120 17:42:53.323851 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/openstack-galera-0"] Jan 20 17:42:53 crc kubenswrapper[4558]: I0120 17:42:53.334139 4558 scope.go:117] "RemoveContainer" containerID="b03018c6cafe6bfe05fbcfd442923f9ba65a46cf60d3b585db1ba6e573cc935a" Jan 20 17:42:53 crc kubenswrapper[4558]: I0120 17:42:53.339058 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/openstack-galera-0"] Jan 20 17:42:53 crc kubenswrapper[4558]: I0120 17:42:53.361651 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/openstack-cell1-galera-0"] Jan 20 17:42:53 crc kubenswrapper[4558]: I0120 17:42:53.382731 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/openstack-cell1-galera-0"] Jan 20 17:42:53 crc kubenswrapper[4558]: I0120 17:42:53.390774 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/openstack-galera-0"] Jan 20 17:42:53 crc kubenswrapper[4558]: I0120 17:42:53.391470 4558 scope.go:117] "RemoveContainer" containerID="1b4d88c338068dde213bad574869f30ce42019618f93ed6c25b82149cf965480" Jan 20 17:42:53 crc kubenswrapper[4558]: E0120 17:42:53.391599 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4bcc759d-3647-4194-9a91-acac49948173" containerName="galera" Jan 20 17:42:53 crc kubenswrapper[4558]: I0120 17:42:53.391625 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="4bcc759d-3647-4194-9a91-acac49948173" containerName="galera" Jan 20 17:42:53 crc kubenswrapper[4558]: E0120 17:42:53.391650 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0b56d0cd-1994-4e39-9c78-82b5105222db" containerName="galera" Jan 20 17:42:53 crc kubenswrapper[4558]: I0120 17:42:53.391658 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="0b56d0cd-1994-4e39-9c78-82b5105222db" containerName="galera" Jan 20 17:42:53 crc kubenswrapper[4558]: E0120 17:42:53.391670 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4bcc759d-3647-4194-9a91-acac49948173" containerName="mysql-bootstrap" Jan 20 17:42:53 crc kubenswrapper[4558]: I0120 17:42:53.391678 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="4bcc759d-3647-4194-9a91-acac49948173" containerName="mysql-bootstrap" Jan 20 17:42:53 crc kubenswrapper[4558]: E0120 17:42:53.391699 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0b56d0cd-1994-4e39-9c78-82b5105222db" containerName="mysql-bootstrap" Jan 20 17:42:53 crc kubenswrapper[4558]: I0120 17:42:53.391706 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="0b56d0cd-1994-4e39-9c78-82b5105222db" containerName="mysql-bootstrap" Jan 20 17:42:53 crc kubenswrapper[4558]: I0120 17:42:53.391890 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="0b56d0cd-1994-4e39-9c78-82b5105222db" containerName="galera" Jan 20 17:42:53 crc kubenswrapper[4558]: I0120 17:42:53.391919 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="4bcc759d-3647-4194-9a91-acac49948173" containerName="galera" Jan 20 17:42:53 crc kubenswrapper[4558]: I0120 17:42:53.395134 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:42:53 crc kubenswrapper[4558]: I0120 17:42:53.399196 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"galera-openstack-dockercfg-xjdvx" Jan 20 17:42:53 crc kubenswrapper[4558]: I0120 17:42:53.399533 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/openstack-galera-0"] Jan 20 17:42:53 crc kubenswrapper[4558]: I0120 17:42:53.399673 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-galera-openstack-svc" Jan 20 17:42:53 crc kubenswrapper[4558]: I0120 17:42:53.399895 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-config-data" Jan 20 17:42:53 crc kubenswrapper[4558]: I0120 17:42:53.400052 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-scripts" Jan 20 17:42:53 crc kubenswrapper[4558]: I0120 17:42:53.408605 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/openstack-cell1-galera-0"] Jan 20 17:42:53 crc kubenswrapper[4558]: I0120 17:42:53.416325 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:42:53 crc kubenswrapper[4558]: I0120 17:42:53.418404 4558 scope.go:117] "RemoveContainer" containerID="d8d925f249220118354cc5b256984244efa0db9e1f2f534a041c8fbdf41960e2" Jan 20 17:42:53 crc kubenswrapper[4558]: I0120 17:42:53.419620 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-cell1-scripts" Jan 20 17:42:53 crc kubenswrapper[4558]: I0120 17:42:53.419810 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"galera-openstack-cell1-dockercfg-mk4rp" Jan 20 17:42:53 crc kubenswrapper[4558]: I0120 17:42:53.420074 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-cell1-config-data" Jan 20 17:42:53 crc kubenswrapper[4558]: I0120 17:42:53.420893 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-galera-openstack-cell1-svc" Jan 20 17:42:53 crc kubenswrapper[4558]: I0120 17:42:53.432877 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/openstack-cell1-galera-0"] Jan 20 17:42:53 crc kubenswrapper[4558]: I0120 17:42:53.445333 4558 scope.go:117] "RemoveContainer" containerID="1b4d88c338068dde213bad574869f30ce42019618f93ed6c25b82149cf965480" Jan 20 17:42:53 crc kubenswrapper[4558]: E0120 17:42:53.446051 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1b4d88c338068dde213bad574869f30ce42019618f93ed6c25b82149cf965480\": container with ID starting with 1b4d88c338068dde213bad574869f30ce42019618f93ed6c25b82149cf965480 not found: ID does not exist" containerID="1b4d88c338068dde213bad574869f30ce42019618f93ed6c25b82149cf965480" Jan 20 17:42:53 crc kubenswrapper[4558]: I0120 17:42:53.446101 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1b4d88c338068dde213bad574869f30ce42019618f93ed6c25b82149cf965480"} err="failed to get container status \"1b4d88c338068dde213bad574869f30ce42019618f93ed6c25b82149cf965480\": rpc error: code = NotFound desc = could not find container \"1b4d88c338068dde213bad574869f30ce42019618f93ed6c25b82149cf965480\": container with ID starting with 1b4d88c338068dde213bad574869f30ce42019618f93ed6c25b82149cf965480 not found: ID does not exist" Jan 20 17:42:53 crc kubenswrapper[4558]: I0120 17:42:53.446129 4558 scope.go:117] "RemoveContainer" containerID="d8d925f249220118354cc5b256984244efa0db9e1f2f534a041c8fbdf41960e2" Jan 20 17:42:53 crc kubenswrapper[4558]: E0120 17:42:53.447035 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d8d925f249220118354cc5b256984244efa0db9e1f2f534a041c8fbdf41960e2\": container with ID starting with d8d925f249220118354cc5b256984244efa0db9e1f2f534a041c8fbdf41960e2 not found: ID does not exist" containerID="d8d925f249220118354cc5b256984244efa0db9e1f2f534a041c8fbdf41960e2" Jan 20 17:42:53 crc kubenswrapper[4558]: I0120 17:42:53.447080 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d8d925f249220118354cc5b256984244efa0db9e1f2f534a041c8fbdf41960e2"} err="failed to get container status \"d8d925f249220118354cc5b256984244efa0db9e1f2f534a041c8fbdf41960e2\": rpc error: code = NotFound desc = could not find container \"d8d925f249220118354cc5b256984244efa0db9e1f2f534a041c8fbdf41960e2\": container with ID starting with d8d925f249220118354cc5b256984244efa0db9e1f2f534a041c8fbdf41960e2 not found: ID does not exist" Jan 20 17:42:53 crc kubenswrapper[4558]: I0120 17:42:53.573414 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f68fbac2-1677-4664-a344-77a41044ea2a-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"f68fbac2-1677-4664-a344-77a41044ea2a\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:42:53 crc kubenswrapper[4558]: I0120 17:42:53.573490 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sttlg\" (UniqueName: \"kubernetes.io/projected/25fe26b1-d308-452c-b15f-b1b272de6869-kube-api-access-sttlg\") pod \"openstack-galera-0\" (UID: \"25fe26b1-d308-452c-b15f-b1b272de6869\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:42:53 crc kubenswrapper[4558]: I0120 17:42:53.573523 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage14-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage14-crc\") pod \"openstack-galera-0\" (UID: \"25fe26b1-d308-452c-b15f-b1b272de6869\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:42:53 crc kubenswrapper[4558]: I0120 17:42:53.573552 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/f68fbac2-1677-4664-a344-77a41044ea2a-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"f68fbac2-1677-4664-a344-77a41044ea2a\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:42:53 crc kubenswrapper[4558]: I0120 17:42:53.573586 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/f68fbac2-1677-4664-a344-77a41044ea2a-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"f68fbac2-1677-4664-a344-77a41044ea2a\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:42:53 crc kubenswrapper[4558]: I0120 17:42:53.573623 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/25fe26b1-d308-452c-b15f-b1b272de6869-kolla-config\") pod \"openstack-galera-0\" (UID: \"25fe26b1-d308-452c-b15f-b1b272de6869\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:42:53 crc kubenswrapper[4558]: I0120 17:42:53.573649 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/f68fbac2-1677-4664-a344-77a41044ea2a-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"f68fbac2-1677-4664-a344-77a41044ea2a\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:42:53 crc kubenswrapper[4558]: I0120 17:42:53.573700 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/25fe26b1-d308-452c-b15f-b1b272de6869-config-data-default\") pod \"openstack-galera-0\" (UID: \"25fe26b1-d308-452c-b15f-b1b272de6869\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:42:53 crc kubenswrapper[4558]: I0120 17:42:53.573939 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/25fe26b1-d308-452c-b15f-b1b272de6869-config-data-generated\") pod \"openstack-galera-0\" (UID: \"25fe26b1-d308-452c-b15f-b1b272de6869\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:42:53 crc kubenswrapper[4558]: I0120 17:42:53.574084 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/f68fbac2-1677-4664-a344-77a41044ea2a-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"f68fbac2-1677-4664-a344-77a41044ea2a\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:42:53 crc kubenswrapper[4558]: I0120 17:42:53.574196 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage15-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") pod \"openstack-cell1-galera-0\" (UID: \"f68fbac2-1677-4664-a344-77a41044ea2a\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:42:53 crc kubenswrapper[4558]: I0120 17:42:53.574273 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b2xql\" (UniqueName: \"kubernetes.io/projected/f68fbac2-1677-4664-a344-77a41044ea2a-kube-api-access-b2xql\") pod \"openstack-cell1-galera-0\" (UID: \"f68fbac2-1677-4664-a344-77a41044ea2a\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:42:53 crc kubenswrapper[4558]: I0120 17:42:53.574337 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f68fbac2-1677-4664-a344-77a41044ea2a-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"f68fbac2-1677-4664-a344-77a41044ea2a\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:42:53 crc kubenswrapper[4558]: I0120 17:42:53.574383 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/25fe26b1-d308-452c-b15f-b1b272de6869-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"25fe26b1-d308-452c-b15f-b1b272de6869\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:42:53 crc kubenswrapper[4558]: I0120 17:42:53.574492 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/25fe26b1-d308-452c-b15f-b1b272de6869-operator-scripts\") pod \"openstack-galera-0\" (UID: \"25fe26b1-d308-452c-b15f-b1b272de6869\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:42:53 crc kubenswrapper[4558]: I0120 17:42:53.574536 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/25fe26b1-d308-452c-b15f-b1b272de6869-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"25fe26b1-d308-452c-b15f-b1b272de6869\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:42:53 crc kubenswrapper[4558]: I0120 17:42:53.676509 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/25fe26b1-d308-452c-b15f-b1b272de6869-config-data-default\") pod \"openstack-galera-0\" (UID: \"25fe26b1-d308-452c-b15f-b1b272de6869\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:42:53 crc kubenswrapper[4558]: I0120 17:42:53.676566 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/25fe26b1-d308-452c-b15f-b1b272de6869-config-data-generated\") pod \"openstack-galera-0\" (UID: \"25fe26b1-d308-452c-b15f-b1b272de6869\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:42:53 crc kubenswrapper[4558]: I0120 17:42:53.676609 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/f68fbac2-1677-4664-a344-77a41044ea2a-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"f68fbac2-1677-4664-a344-77a41044ea2a\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:42:53 crc kubenswrapper[4558]: I0120 17:42:53.676639 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage15-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") pod \"openstack-cell1-galera-0\" (UID: \"f68fbac2-1677-4664-a344-77a41044ea2a\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:42:53 crc kubenswrapper[4558]: E0120 17:42:53.676685 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-placement-public-svc: secret "cert-placement-public-svc" not found Jan 20 17:42:53 crc kubenswrapper[4558]: E0120 17:42:53.676780 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/bf32468e-b0a7-4b81-bb49-c65a95997903-public-tls-certs podName:bf32468e-b0a7-4b81-bb49-c65a95997903 nodeName:}" failed. No retries permitted until 2026-01-20 17:42:54.176756835 +0000 UTC m=+3667.937094802 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "public-tls-certs" (UniqueName: "kubernetes.io/secret/bf32468e-b0a7-4b81-bb49-c65a95997903-public-tls-certs") pod "placement-69d7f65964-r46nm" (UID: "bf32468e-b0a7-4b81-bb49-c65a95997903") : secret "cert-placement-public-svc" not found Jan 20 17:42:53 crc kubenswrapper[4558]: I0120 17:42:53.676885 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage15-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") pod \"openstack-cell1-galera-0\" (UID: \"f68fbac2-1677-4664-a344-77a41044ea2a\") device mount path \"/mnt/openstack/pv15\"" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:42:53 crc kubenswrapper[4558]: I0120 17:42:53.677245 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/25fe26b1-d308-452c-b15f-b1b272de6869-config-data-generated\") pod \"openstack-galera-0\" (UID: \"25fe26b1-d308-452c-b15f-b1b272de6869\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:42:53 crc kubenswrapper[4558]: I0120 17:42:53.677534 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b2xql\" (UniqueName: \"kubernetes.io/projected/f68fbac2-1677-4664-a344-77a41044ea2a-kube-api-access-b2xql\") pod \"openstack-cell1-galera-0\" (UID: \"f68fbac2-1677-4664-a344-77a41044ea2a\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:42:53 crc kubenswrapper[4558]: I0120 17:42:53.677605 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f68fbac2-1677-4664-a344-77a41044ea2a-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"f68fbac2-1677-4664-a344-77a41044ea2a\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:42:53 crc kubenswrapper[4558]: I0120 17:42:53.677667 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/25fe26b1-d308-452c-b15f-b1b272de6869-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"25fe26b1-d308-452c-b15f-b1b272de6869\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:42:53 crc kubenswrapper[4558]: I0120 17:42:53.677705 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/25fe26b1-d308-452c-b15f-b1b272de6869-operator-scripts\") pod \"openstack-galera-0\" (UID: \"25fe26b1-d308-452c-b15f-b1b272de6869\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:42:53 crc kubenswrapper[4558]: I0120 17:42:53.677730 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/25fe26b1-d308-452c-b15f-b1b272de6869-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"25fe26b1-d308-452c-b15f-b1b272de6869\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:42:53 crc kubenswrapper[4558]: I0120 17:42:53.677827 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f68fbac2-1677-4664-a344-77a41044ea2a-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"f68fbac2-1677-4664-a344-77a41044ea2a\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:42:53 crc kubenswrapper[4558]: I0120 17:42:53.677876 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sttlg\" (UniqueName: \"kubernetes.io/projected/25fe26b1-d308-452c-b15f-b1b272de6869-kube-api-access-sttlg\") pod \"openstack-galera-0\" (UID: \"25fe26b1-d308-452c-b15f-b1b272de6869\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:42:53 crc kubenswrapper[4558]: I0120 17:42:53.677937 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage14-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage14-crc\") pod \"openstack-galera-0\" (UID: \"25fe26b1-d308-452c-b15f-b1b272de6869\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:42:53 crc kubenswrapper[4558]: I0120 17:42:53.677973 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/f68fbac2-1677-4664-a344-77a41044ea2a-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"f68fbac2-1677-4664-a344-77a41044ea2a\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:42:53 crc kubenswrapper[4558]: I0120 17:42:53.678024 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/f68fbac2-1677-4664-a344-77a41044ea2a-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"f68fbac2-1677-4664-a344-77a41044ea2a\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:42:53 crc kubenswrapper[4558]: I0120 17:42:53.678152 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/25fe26b1-d308-452c-b15f-b1b272de6869-kolla-config\") pod \"openstack-galera-0\" (UID: \"25fe26b1-d308-452c-b15f-b1b272de6869\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:42:53 crc kubenswrapper[4558]: I0120 17:42:53.678229 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/f68fbac2-1677-4664-a344-77a41044ea2a-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"f68fbac2-1677-4664-a344-77a41044ea2a\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:42:53 crc kubenswrapper[4558]: I0120 17:42:53.678568 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage14-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage14-crc\") pod \"openstack-galera-0\" (UID: \"25fe26b1-d308-452c-b15f-b1b272de6869\") device mount path \"/mnt/openstack/pv14\"" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:42:53 crc kubenswrapper[4558]: E0120 17:42:53.679186 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-placement-internal-svc: secret "cert-placement-internal-svc" not found Jan 20 17:42:53 crc kubenswrapper[4558]: E0120 17:42:53.679391 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/bf32468e-b0a7-4b81-bb49-c65a95997903-internal-tls-certs podName:bf32468e-b0a7-4b81-bb49-c65a95997903 nodeName:}" failed. No retries permitted until 2026-01-20 17:42:54.179369517 +0000 UTC m=+3667.939707484 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "internal-tls-certs" (UniqueName: "kubernetes.io/secret/bf32468e-b0a7-4b81-bb49-c65a95997903-internal-tls-certs") pod "placement-69d7f65964-r46nm" (UID: "bf32468e-b0a7-4b81-bb49-c65a95997903") : secret "cert-placement-internal-svc" not found Jan 20 17:42:53 crc kubenswrapper[4558]: I0120 17:42:53.679288 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/25fe26b1-d308-452c-b15f-b1b272de6869-config-data-default\") pod \"openstack-galera-0\" (UID: \"25fe26b1-d308-452c-b15f-b1b272de6869\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:42:53 crc kubenswrapper[4558]: E0120 17:42:53.679267 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-keystone-internal-svc: secret "cert-keystone-internal-svc" not found Jan 20 17:42:53 crc kubenswrapper[4558]: E0120 17:42:53.679486 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/8f7d7846-1b56-4ab5-a0ad-63b179a1a797-internal-tls-certs podName:8f7d7846-1b56-4ab5-a0ad-63b179a1a797 nodeName:}" failed. No retries permitted until 2026-01-20 17:42:54.17947729 +0000 UTC m=+3667.939815257 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "internal-tls-certs" (UniqueName: "kubernetes.io/secret/8f7d7846-1b56-4ab5-a0ad-63b179a1a797-internal-tls-certs") pod "keystone-7dffbf585-vg2sk" (UID: "8f7d7846-1b56-4ab5-a0ad-63b179a1a797") : secret "cert-keystone-internal-svc" not found Jan 20 17:42:53 crc kubenswrapper[4558]: I0120 17:42:53.680099 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/f68fbac2-1677-4664-a344-77a41044ea2a-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"f68fbac2-1677-4664-a344-77a41044ea2a\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:42:53 crc kubenswrapper[4558]: I0120 17:42:53.680098 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/f68fbac2-1677-4664-a344-77a41044ea2a-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"f68fbac2-1677-4664-a344-77a41044ea2a\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:42:53 crc kubenswrapper[4558]: I0120 17:42:53.680102 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/f68fbac2-1677-4664-a344-77a41044ea2a-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"f68fbac2-1677-4664-a344-77a41044ea2a\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:42:53 crc kubenswrapper[4558]: E0120 17:42:53.680157 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-keystone-public-svc: secret "cert-keystone-public-svc" not found Jan 20 17:42:53 crc kubenswrapper[4558]: I0120 17:42:53.680291 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/25fe26b1-d308-452c-b15f-b1b272de6869-operator-scripts\") pod \"openstack-galera-0\" (UID: \"25fe26b1-d308-452c-b15f-b1b272de6869\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:42:53 crc kubenswrapper[4558]: E0120 17:42:53.680419 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/8f7d7846-1b56-4ab5-a0ad-63b179a1a797-public-tls-certs podName:8f7d7846-1b56-4ab5-a0ad-63b179a1a797 nodeName:}" failed. No retries permitted until 2026-01-20 17:42:54.180402529 +0000 UTC m=+3667.940740496 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "public-tls-certs" (UniqueName: "kubernetes.io/secret/8f7d7846-1b56-4ab5-a0ad-63b179a1a797-public-tls-certs") pod "keystone-7dffbf585-vg2sk" (UID: "8f7d7846-1b56-4ab5-a0ad-63b179a1a797") : secret "cert-keystone-public-svc" not found Jan 20 17:42:53 crc kubenswrapper[4558]: I0120 17:42:53.680477 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/25fe26b1-d308-452c-b15f-b1b272de6869-kolla-config\") pod \"openstack-galera-0\" (UID: \"25fe26b1-d308-452c-b15f-b1b272de6869\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:42:53 crc kubenswrapper[4558]: I0120 17:42:53.680748 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f68fbac2-1677-4664-a344-77a41044ea2a-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"f68fbac2-1677-4664-a344-77a41044ea2a\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:42:53 crc kubenswrapper[4558]: I0120 17:42:53.682545 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f68fbac2-1677-4664-a344-77a41044ea2a-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"f68fbac2-1677-4664-a344-77a41044ea2a\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:42:53 crc kubenswrapper[4558]: I0120 17:42:53.684315 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/f68fbac2-1677-4664-a344-77a41044ea2a-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"f68fbac2-1677-4664-a344-77a41044ea2a\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:42:53 crc kubenswrapper[4558]: I0120 17:42:53.686524 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/25fe26b1-d308-452c-b15f-b1b272de6869-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"25fe26b1-d308-452c-b15f-b1b272de6869\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:42:53 crc kubenswrapper[4558]: I0120 17:42:53.688529 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/25fe26b1-d308-452c-b15f-b1b272de6869-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"25fe26b1-d308-452c-b15f-b1b272de6869\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:42:53 crc kubenswrapper[4558]: I0120 17:42:53.694215 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sttlg\" (UniqueName: \"kubernetes.io/projected/25fe26b1-d308-452c-b15f-b1b272de6869-kube-api-access-sttlg\") pod \"openstack-galera-0\" (UID: \"25fe26b1-d308-452c-b15f-b1b272de6869\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:42:53 crc kubenswrapper[4558]: I0120 17:42:53.695694 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b2xql\" (UniqueName: \"kubernetes.io/projected/f68fbac2-1677-4664-a344-77a41044ea2a-kube-api-access-b2xql\") pod \"openstack-cell1-galera-0\" (UID: \"f68fbac2-1677-4664-a344-77a41044ea2a\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:42:53 crc kubenswrapper[4558]: I0120 17:42:53.706638 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage15-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") pod \"openstack-cell1-galera-0\" (UID: \"f68fbac2-1677-4664-a344-77a41044ea2a\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:42:53 crc kubenswrapper[4558]: I0120 17:42:53.715401 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage14-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage14-crc\") pod \"openstack-galera-0\" (UID: \"25fe26b1-d308-452c-b15f-b1b272de6869\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:42:53 crc kubenswrapper[4558]: I0120 17:42:53.768388 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:42:54 crc kubenswrapper[4558]: I0120 17:42:54.017061 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:42:54 crc kubenswrapper[4558]: E0120 17:42:54.194667 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-keystone-internal-svc: secret "cert-keystone-internal-svc" not found Jan 20 17:42:54 crc kubenswrapper[4558]: E0120 17:42:54.195158 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/8f7d7846-1b56-4ab5-a0ad-63b179a1a797-internal-tls-certs podName:8f7d7846-1b56-4ab5-a0ad-63b179a1a797 nodeName:}" failed. No retries permitted until 2026-01-20 17:42:55.195131513 +0000 UTC m=+3668.955469480 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "internal-tls-certs" (UniqueName: "kubernetes.io/secret/8f7d7846-1b56-4ab5-a0ad-63b179a1a797-internal-tls-certs") pod "keystone-7dffbf585-vg2sk" (UID: "8f7d7846-1b56-4ab5-a0ad-63b179a1a797") : secret "cert-keystone-internal-svc" not found Jan 20 17:42:54 crc kubenswrapper[4558]: E0120 17:42:54.194804 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-kube-state-metrics-svc: secret "cert-kube-state-metrics-svc" not found Jan 20 17:42:54 crc kubenswrapper[4558]: E0120 17:42:54.195257 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-keystone-public-svc: secret "cert-keystone-public-svc" not found Jan 20 17:42:54 crc kubenswrapper[4558]: E0120 17:42:54.195287 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f4affc06-2032-4a14-8422-3c3ca984eada-kube-state-metrics-tls-certs podName:f4affc06-2032-4a14-8422-3c3ca984eada nodeName:}" failed. No retries permitted until 2026-01-20 17:43:02.195279291 +0000 UTC m=+3675.955617247 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-state-metrics-tls-certs" (UniqueName: "kubernetes.io/secret/f4affc06-2032-4a14-8422-3c3ca984eada-kube-state-metrics-tls-certs") pod "kube-state-metrics-0" (UID: "f4affc06-2032-4a14-8422-3c3ca984eada") : secret "cert-kube-state-metrics-svc" not found Jan 20 17:42:54 crc kubenswrapper[4558]: E0120 17:42:54.194985 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-placement-internal-svc: secret "cert-placement-internal-svc" not found Jan 20 17:42:54 crc kubenswrapper[4558]: E0120 17:42:54.195389 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/8f7d7846-1b56-4ab5-a0ad-63b179a1a797-public-tls-certs podName:8f7d7846-1b56-4ab5-a0ad-63b179a1a797 nodeName:}" failed. No retries permitted until 2026-01-20 17:42:55.195377665 +0000 UTC m=+3668.955715633 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "public-tls-certs" (UniqueName: "kubernetes.io/secret/8f7d7846-1b56-4ab5-a0ad-63b179a1a797-public-tls-certs") pod "keystone-7dffbf585-vg2sk" (UID: "8f7d7846-1b56-4ab5-a0ad-63b179a1a797") : secret "cert-keystone-public-svc" not found Jan 20 17:42:54 crc kubenswrapper[4558]: E0120 17:42:54.194932 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-placement-public-svc: secret "cert-placement-public-svc" not found Jan 20 17:42:54 crc kubenswrapper[4558]: E0120 17:42:54.195451 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/bf32468e-b0a7-4b81-bb49-c65a95997903-internal-tls-certs podName:bf32468e-b0a7-4b81-bb49-c65a95997903 nodeName:}" failed. No retries permitted until 2026-01-20 17:42:55.195425967 +0000 UTC m=+3668.955763923 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "internal-tls-certs" (UniqueName: "kubernetes.io/secret/bf32468e-b0a7-4b81-bb49-c65a95997903-internal-tls-certs") pod "placement-69d7f65964-r46nm" (UID: "bf32468e-b0a7-4b81-bb49-c65a95997903") : secret "cert-placement-internal-svc" not found Jan 20 17:42:54 crc kubenswrapper[4558]: E0120 17:42:54.196786 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/bf32468e-b0a7-4b81-bb49-c65a95997903-public-tls-certs podName:bf32468e-b0a7-4b81-bb49-c65a95997903 nodeName:}" failed. No retries permitted until 2026-01-20 17:42:55.196738244 +0000 UTC m=+3668.957076211 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "public-tls-certs" (UniqueName: "kubernetes.io/secret/bf32468e-b0a7-4b81-bb49-c65a95997903-public-tls-certs") pod "placement-69d7f65964-r46nm" (UID: "bf32468e-b0a7-4b81-bb49-c65a95997903") : secret "cert-placement-public-svc" not found Jan 20 17:42:54 crc kubenswrapper[4558]: I0120 17:42:54.221353 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/openstack-cell1-galera-0"] Jan 20 17:42:54 crc kubenswrapper[4558]: I0120 17:42:54.271316 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-cell1-galera-0" event={"ID":"f68fbac2-1677-4664-a344-77a41044ea2a","Type":"ContainerStarted","Data":"73ae02bb95ce2d6c9c3af2a3518478e8076c9da2ea396d56b8256107bc44689a"} Jan 20 17:42:54 crc kubenswrapper[4558]: I0120 17:42:54.396490 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack-kuttl-tests/cinder-scheduler-0" podUID="25cfe04f-2194-405f-a9d7-82181e8ac22a" containerName="cinder-scheduler" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 20 17:42:54 crc kubenswrapper[4558]: I0120 17:42:54.410721 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/openstack-galera-0"] Jan 20 17:42:54 crc kubenswrapper[4558]: W0120 17:42:54.417539 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod25fe26b1_d308_452c_b15f_b1b272de6869.slice/crio-2edec83ce852863bc2afc3e9bc1b9c0f6514797150c2774031d55f522cbcdf7d WatchSource:0}: Error finding container 2edec83ce852863bc2afc3e9bc1b9c0f6514797150c2774031d55f522cbcdf7d: Status 404 returned error can't find the container with id 2edec83ce852863bc2afc3e9bc1b9c0f6514797150c2774031d55f522cbcdf7d Jan 20 17:42:54 crc kubenswrapper[4558]: I0120 17:42:54.575861 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b56d0cd-1994-4e39-9c78-82b5105222db" path="/var/lib/kubelet/pods/0b56d0cd-1994-4e39-9c78-82b5105222db/volumes" Jan 20 17:42:54 crc kubenswrapper[4558]: I0120 17:42:54.576847 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bcc759d-3647-4194-9a91-acac49948173" path="/var/lib/kubelet/pods/4bcc759d-3647-4194-9a91-acac49948173/volumes" Jan 20 17:42:54 crc kubenswrapper[4558]: E0120 17:42:54.707279 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-ovnnorthd-ovndbs: secret "cert-ovnnorthd-ovndbs" not found Jan 20 17:42:54 crc kubenswrapper[4558]: E0120 17:42:54.707364 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4d2b64b6-6310-4348-a016-e7d8317e00d9-ovn-northd-tls-certs podName:4d2b64b6-6310-4348-a016-e7d8317e00d9 nodeName:}" failed. No retries permitted until 2026-01-20 17:42:55.207340438 +0000 UTC m=+3668.967678405 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "ovn-northd-tls-certs" (UniqueName: "kubernetes.io/secret/4d2b64b6-6310-4348-a016-e7d8317e00d9-ovn-northd-tls-certs") pod "ovn-northd-0" (UID: "4d2b64b6-6310-4348-a016-e7d8317e00d9") : secret "cert-ovnnorthd-ovndbs" not found Jan 20 17:42:54 crc kubenswrapper[4558]: E0120 17:42:54.707563 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-ovn-metrics: secret "cert-ovn-metrics" not found Jan 20 17:42:54 crc kubenswrapper[4558]: E0120 17:42:54.707664 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4d2b64b6-6310-4348-a016-e7d8317e00d9-metrics-certs-tls-certs podName:4d2b64b6-6310-4348-a016-e7d8317e00d9 nodeName:}" failed. No retries permitted until 2026-01-20 17:42:55.207638038 +0000 UTC m=+3668.967976005 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs-tls-certs" (UniqueName: "kubernetes.io/secret/4d2b64b6-6310-4348-a016-e7d8317e00d9-metrics-certs-tls-certs") pod "ovn-northd-0" (UID: "4d2b64b6-6310-4348-a016-e7d8317e00d9") : secret "cert-ovn-metrics" not found Jan 20 17:42:55 crc kubenswrapper[4558]: E0120 17:42:55.221786 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-keystone-public-svc: secret "cert-keystone-public-svc" not found Jan 20 17:42:55 crc kubenswrapper[4558]: E0120 17:42:55.222223 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/8f7d7846-1b56-4ab5-a0ad-63b179a1a797-public-tls-certs podName:8f7d7846-1b56-4ab5-a0ad-63b179a1a797 nodeName:}" failed. No retries permitted until 2026-01-20 17:42:57.222193084 +0000 UTC m=+3670.982531051 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "public-tls-certs" (UniqueName: "kubernetes.io/secret/8f7d7846-1b56-4ab5-a0ad-63b179a1a797-public-tls-certs") pod "keystone-7dffbf585-vg2sk" (UID: "8f7d7846-1b56-4ab5-a0ad-63b179a1a797") : secret "cert-keystone-public-svc" not found Jan 20 17:42:55 crc kubenswrapper[4558]: E0120 17:42:55.221801 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-ovnnorthd-ovndbs: secret "cert-ovnnorthd-ovndbs" not found Jan 20 17:42:55 crc kubenswrapper[4558]: E0120 17:42:55.222311 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4d2b64b6-6310-4348-a016-e7d8317e00d9-ovn-northd-tls-certs podName:4d2b64b6-6310-4348-a016-e7d8317e00d9 nodeName:}" failed. No retries permitted until 2026-01-20 17:42:56.222302811 +0000 UTC m=+3669.982640778 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "ovn-northd-tls-certs" (UniqueName: "kubernetes.io/secret/4d2b64b6-6310-4348-a016-e7d8317e00d9-ovn-northd-tls-certs") pod "ovn-northd-0" (UID: "4d2b64b6-6310-4348-a016-e7d8317e00d9") : secret "cert-ovnnorthd-ovndbs" not found Jan 20 17:42:55 crc kubenswrapper[4558]: E0120 17:42:55.221846 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-ovn-metrics: secret "cert-ovn-metrics" not found Jan 20 17:42:55 crc kubenswrapper[4558]: E0120 17:42:55.222457 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4d2b64b6-6310-4348-a016-e7d8317e00d9-metrics-certs-tls-certs podName:4d2b64b6-6310-4348-a016-e7d8317e00d9 nodeName:}" failed. No retries permitted until 2026-01-20 17:42:56.22242532 +0000 UTC m=+3669.982763288 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs-tls-certs" (UniqueName: "kubernetes.io/secret/4d2b64b6-6310-4348-a016-e7d8317e00d9-metrics-certs-tls-certs") pod "ovn-northd-0" (UID: "4d2b64b6-6310-4348-a016-e7d8317e00d9") : secret "cert-ovn-metrics" not found Jan 20 17:42:55 crc kubenswrapper[4558]: E0120 17:42:55.221937 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-placement-public-svc: secret "cert-placement-public-svc" not found Jan 20 17:42:55 crc kubenswrapper[4558]: E0120 17:42:55.222514 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/bf32468e-b0a7-4b81-bb49-c65a95997903-public-tls-certs podName:bf32468e-b0a7-4b81-bb49-c65a95997903 nodeName:}" failed. No retries permitted until 2026-01-20 17:42:57.222506374 +0000 UTC m=+3670.982844341 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "public-tls-certs" (UniqueName: "kubernetes.io/secret/bf32468e-b0a7-4b81-bb49-c65a95997903-public-tls-certs") pod "placement-69d7f65964-r46nm" (UID: "bf32468e-b0a7-4b81-bb49-c65a95997903") : secret "cert-placement-public-svc" not found Jan 20 17:42:55 crc kubenswrapper[4558]: E0120 17:42:55.222623 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-placement-internal-svc: secret "cert-placement-internal-svc" not found Jan 20 17:42:55 crc kubenswrapper[4558]: E0120 17:42:55.222703 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/bf32468e-b0a7-4b81-bb49-c65a95997903-internal-tls-certs podName:bf32468e-b0a7-4b81-bb49-c65a95997903 nodeName:}" failed. No retries permitted until 2026-01-20 17:42:57.22267986 +0000 UTC m=+3670.983017827 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "internal-tls-certs" (UniqueName: "kubernetes.io/secret/bf32468e-b0a7-4b81-bb49-c65a95997903-internal-tls-certs") pod "placement-69d7f65964-r46nm" (UID: "bf32468e-b0a7-4b81-bb49-c65a95997903") : secret "cert-placement-internal-svc" not found Jan 20 17:42:55 crc kubenswrapper[4558]: I0120 17:42:55.282368 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-cell1-galera-0" event={"ID":"f68fbac2-1677-4664-a344-77a41044ea2a","Type":"ContainerStarted","Data":"25d54c191dc33c7fed19acdcd580fbd43bb913261f66f192926427656b325ef2"} Jan 20 17:42:55 crc kubenswrapper[4558]: I0120 17:42:55.285191 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-galera-0" event={"ID":"25fe26b1-d308-452c-b15f-b1b272de6869","Type":"ContainerStarted","Data":"dcf9b9124b71d8258a72a33535b49292f6fb9585d02e200aa73d63e0ff9b6a33"} Jan 20 17:42:55 crc kubenswrapper[4558]: I0120 17:42:55.285251 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-galera-0" event={"ID":"25fe26b1-d308-452c-b15f-b1b272de6869","Type":"ContainerStarted","Data":"2edec83ce852863bc2afc3e9bc1b9c0f6514797150c2774031d55f522cbcdf7d"} Jan 20 17:42:55 crc kubenswrapper[4558]: E0120 17:42:55.738110 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-swift-internal-svc: secret "cert-swift-internal-svc" not found Jan 20 17:42:55 crc kubenswrapper[4558]: E0120 17:42:55.738448 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/858ca371-2b02-46d8-879c-8d859c31d590-internal-tls-certs podName:858ca371-2b02-46d8-879c-8d859c31d590 nodeName:}" failed. No retries permitted until 2026-01-20 17:42:56.23842278 +0000 UTC m=+3669.998760747 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "internal-tls-certs" (UniqueName: "kubernetes.io/secret/858ca371-2b02-46d8-879c-8d859c31d590-internal-tls-certs") pod "swift-proxy-6f98b8f5cb-8bwzs" (UID: "858ca371-2b02-46d8-879c-8d859c31d590") : secret "cert-swift-internal-svc" not found Jan 20 17:42:55 crc kubenswrapper[4558]: E0120 17:42:55.738531 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-neutron-ovndbs: secret "cert-neutron-ovndbs" not found Jan 20 17:42:55 crc kubenswrapper[4558]: E0120 17:42:55.738599 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7e627450-8edc-44bd-805d-b0034178dff2-ovndb-tls-certs podName:7e627450-8edc-44bd-805d-b0034178dff2 nodeName:}" failed. No retries permitted until 2026-01-20 17:42:56.238572211 +0000 UTC m=+3669.998910177 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "ovndb-tls-certs" (UniqueName: "kubernetes.io/secret/7e627450-8edc-44bd-805d-b0034178dff2-ovndb-tls-certs") pod "neutron-5c4dc89b5d-qp5xf" (UID: "7e627450-8edc-44bd-805d-b0034178dff2") : secret "cert-neutron-ovndbs" not found Jan 20 17:42:55 crc kubenswrapper[4558]: E0120 17:42:55.739213 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-neutron-public-svc: secret "cert-neutron-public-svc" not found Jan 20 17:42:55 crc kubenswrapper[4558]: E0120 17:42:55.739252 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7e627450-8edc-44bd-805d-b0034178dff2-public-tls-certs podName:7e627450-8edc-44bd-805d-b0034178dff2 nodeName:}" failed. No retries permitted until 2026-01-20 17:42:56.239240938 +0000 UTC m=+3669.999578906 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "public-tls-certs" (UniqueName: "kubernetes.io/secret/7e627450-8edc-44bd-805d-b0034178dff2-public-tls-certs") pod "neutron-5c4dc89b5d-qp5xf" (UID: "7e627450-8edc-44bd-805d-b0034178dff2") : secret "cert-neutron-public-svc" not found Jan 20 17:42:55 crc kubenswrapper[4558]: E0120 17:42:55.740004 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-neutron-internal-svc: secret "cert-neutron-internal-svc" not found Jan 20 17:42:55 crc kubenswrapper[4558]: E0120 17:42:55.740044 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7e627450-8edc-44bd-805d-b0034178dff2-internal-tls-certs podName:7e627450-8edc-44bd-805d-b0034178dff2 nodeName:}" failed. No retries permitted until 2026-01-20 17:42:56.240036174 +0000 UTC m=+3670.000374141 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "internal-tls-certs" (UniqueName: "kubernetes.io/secret/7e627450-8edc-44bd-805d-b0034178dff2-internal-tls-certs") pod "neutron-5c4dc89b5d-qp5xf" (UID: "7e627450-8edc-44bd-805d-b0034178dff2") : secret "cert-neutron-internal-svc" not found Jan 20 17:42:55 crc kubenswrapper[4558]: E0120 17:42:55.740408 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-swift-public-svc: secret "cert-swift-public-svc" not found Jan 20 17:42:55 crc kubenswrapper[4558]: E0120 17:42:55.740447 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/858ca371-2b02-46d8-879c-8d859c31d590-public-tls-certs podName:858ca371-2b02-46d8-879c-8d859c31d590 nodeName:}" failed. No retries permitted until 2026-01-20 17:42:56.240437809 +0000 UTC m=+3670.000775776 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "public-tls-certs" (UniqueName: "kubernetes.io/secret/858ca371-2b02-46d8-879c-8d859c31d590-public-tls-certs") pod "swift-proxy-6f98b8f5cb-8bwzs" (UID: "858ca371-2b02-46d8-879c-8d859c31d590") : secret "cert-swift-public-svc" not found Jan 20 17:42:56 crc kubenswrapper[4558]: E0120 17:42:56.251291 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-ovnnorthd-ovndbs: secret "cert-ovnnorthd-ovndbs" not found Jan 20 17:42:56 crc kubenswrapper[4558]: E0120 17:42:56.251681 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-neutron-internal-svc: secret "cert-neutron-internal-svc" not found Jan 20 17:42:56 crc kubenswrapper[4558]: E0120 17:42:56.251752 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4d2b64b6-6310-4348-a016-e7d8317e00d9-ovn-northd-tls-certs podName:4d2b64b6-6310-4348-a016-e7d8317e00d9 nodeName:}" failed. No retries permitted until 2026-01-20 17:42:58.251725943 +0000 UTC m=+3672.012063909 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "ovn-northd-tls-certs" (UniqueName: "kubernetes.io/secret/4d2b64b6-6310-4348-a016-e7d8317e00d9-ovn-northd-tls-certs") pod "ovn-northd-0" (UID: "4d2b64b6-6310-4348-a016-e7d8317e00d9") : secret "cert-ovnnorthd-ovndbs" not found Jan 20 17:42:56 crc kubenswrapper[4558]: E0120 17:42:56.251776 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-swift-public-svc: secret "cert-swift-public-svc" not found Jan 20 17:42:56 crc kubenswrapper[4558]: E0120 17:42:56.251848 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7e627450-8edc-44bd-805d-b0034178dff2-internal-tls-certs podName:7e627450-8edc-44bd-805d-b0034178dff2 nodeName:}" failed. No retries permitted until 2026-01-20 17:42:57.251782158 +0000 UTC m=+3671.012120126 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "internal-tls-certs" (UniqueName: "kubernetes.io/secret/7e627450-8edc-44bd-805d-b0034178dff2-internal-tls-certs") pod "neutron-5c4dc89b5d-qp5xf" (UID: "7e627450-8edc-44bd-805d-b0034178dff2") : secret "cert-neutron-internal-svc" not found Jan 20 17:42:56 crc kubenswrapper[4558]: E0120 17:42:56.251388 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-swift-internal-svc: secret "cert-swift-internal-svc" not found Jan 20 17:42:56 crc kubenswrapper[4558]: E0120 17:42:56.251615 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-ovn-metrics: secret "cert-ovn-metrics" not found Jan 20 17:42:56 crc kubenswrapper[4558]: E0120 17:42:56.251656 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-neutron-ovndbs: secret "cert-neutron-ovndbs" not found Jan 20 17:42:56 crc kubenswrapper[4558]: E0120 17:42:56.251897 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/858ca371-2b02-46d8-879c-8d859c31d590-public-tls-certs podName:858ca371-2b02-46d8-879c-8d859c31d590 nodeName:}" failed. No retries permitted until 2026-01-20 17:42:57.251886485 +0000 UTC m=+3671.012224451 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "public-tls-certs" (UniqueName: "kubernetes.io/secret/858ca371-2b02-46d8-879c-8d859c31d590-public-tls-certs") pod "swift-proxy-6f98b8f5cb-8bwzs" (UID: "858ca371-2b02-46d8-879c-8d859c31d590") : secret "cert-swift-public-svc" not found Jan 20 17:42:56 crc kubenswrapper[4558]: E0120 17:42:56.252000 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/858ca371-2b02-46d8-879c-8d859c31d590-internal-tls-certs podName:858ca371-2b02-46d8-879c-8d859c31d590 nodeName:}" failed. No retries permitted until 2026-01-20 17:42:57.251974029 +0000 UTC m=+3671.012311997 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "internal-tls-certs" (UniqueName: "kubernetes.io/secret/858ca371-2b02-46d8-879c-8d859c31d590-internal-tls-certs") pod "swift-proxy-6f98b8f5cb-8bwzs" (UID: "858ca371-2b02-46d8-879c-8d859c31d590") : secret "cert-swift-internal-svc" not found Jan 20 17:42:56 crc kubenswrapper[4558]: E0120 17:42:56.252015 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4d2b64b6-6310-4348-a016-e7d8317e00d9-metrics-certs-tls-certs podName:4d2b64b6-6310-4348-a016-e7d8317e00d9 nodeName:}" failed. No retries permitted until 2026-01-20 17:42:58.252008274 +0000 UTC m=+3672.012346231 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs-tls-certs" (UniqueName: "kubernetes.io/secret/4d2b64b6-6310-4348-a016-e7d8317e00d9-metrics-certs-tls-certs") pod "ovn-northd-0" (UID: "4d2b64b6-6310-4348-a016-e7d8317e00d9") : secret "cert-ovn-metrics" not found Jan 20 17:42:56 crc kubenswrapper[4558]: E0120 17:42:56.252037 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7e627450-8edc-44bd-805d-b0034178dff2-ovndb-tls-certs podName:7e627450-8edc-44bd-805d-b0034178dff2 nodeName:}" failed. No retries permitted until 2026-01-20 17:42:57.252031146 +0000 UTC m=+3671.012369114 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "ovndb-tls-certs" (UniqueName: "kubernetes.io/secret/7e627450-8edc-44bd-805d-b0034178dff2-ovndb-tls-certs") pod "neutron-5c4dc89b5d-qp5xf" (UID: "7e627450-8edc-44bd-805d-b0034178dff2") : secret "cert-neutron-ovndbs" not found Jan 20 17:42:56 crc kubenswrapper[4558]: E0120 17:42:56.252350 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-neutron-public-svc: secret "cert-neutron-public-svc" not found Jan 20 17:42:56 crc kubenswrapper[4558]: E0120 17:42:56.252493 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7e627450-8edc-44bd-805d-b0034178dff2-public-tls-certs podName:7e627450-8edc-44bd-805d-b0034178dff2 nodeName:}" failed. No retries permitted until 2026-01-20 17:42:57.252462497 +0000 UTC m=+3671.012800454 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "public-tls-certs" (UniqueName: "kubernetes.io/secret/7e627450-8edc-44bd-805d-b0034178dff2-public-tls-certs") pod "neutron-5c4dc89b5d-qp5xf" (UID: "7e627450-8edc-44bd-805d-b0034178dff2") : secret "cert-neutron-public-svc" not found Jan 20 17:42:56 crc kubenswrapper[4558]: I0120 17:42:56.573069 4558 scope.go:117] "RemoveContainer" containerID="2d2a8989ca5a5af98b2c9dc8f801ea0675339ec14800f437c058c5a43c20146b" Jan 20 17:42:56 crc kubenswrapper[4558]: E0120 17:42:56.573436 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 17:42:57 crc kubenswrapper[4558]: E0120 17:42:57.275870 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-placement-public-svc: secret "cert-placement-public-svc" not found Jan 20 17:42:57 crc kubenswrapper[4558]: E0120 17:42:57.275999 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/bf32468e-b0a7-4b81-bb49-c65a95997903-public-tls-certs podName:bf32468e-b0a7-4b81-bb49-c65a95997903 nodeName:}" failed. No retries permitted until 2026-01-20 17:43:01.275971759 +0000 UTC m=+3675.036309727 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "public-tls-certs" (UniqueName: "kubernetes.io/secret/bf32468e-b0a7-4b81-bb49-c65a95997903-public-tls-certs") pod "placement-69d7f65964-r46nm" (UID: "bf32468e-b0a7-4b81-bb49-c65a95997903") : secret "cert-placement-public-svc" not found Jan 20 17:42:57 crc kubenswrapper[4558]: E0120 17:42:57.276001 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-neutron-public-svc: secret "cert-neutron-public-svc" not found Jan 20 17:42:57 crc kubenswrapper[4558]: E0120 17:42:57.276036 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-neutron-internal-svc: secret "cert-neutron-internal-svc" not found Jan 20 17:42:57 crc kubenswrapper[4558]: E0120 17:42:57.276090 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7e627450-8edc-44bd-805d-b0034178dff2-public-tls-certs podName:7e627450-8edc-44bd-805d-b0034178dff2 nodeName:}" failed. No retries permitted until 2026-01-20 17:42:59.276064484 +0000 UTC m=+3673.036402452 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "public-tls-certs" (UniqueName: "kubernetes.io/secret/7e627450-8edc-44bd-805d-b0034178dff2-public-tls-certs") pod "neutron-5c4dc89b5d-qp5xf" (UID: "7e627450-8edc-44bd-805d-b0034178dff2") : secret "cert-neutron-public-svc" not found Jan 20 17:42:57 crc kubenswrapper[4558]: E0120 17:42:57.276144 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-placement-internal-svc: secret "cert-placement-internal-svc" not found Jan 20 17:42:57 crc kubenswrapper[4558]: E0120 17:42:57.276180 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-swift-public-svc: secret "cert-swift-public-svc" not found Jan 20 17:42:57 crc kubenswrapper[4558]: E0120 17:42:57.276211 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/bf32468e-b0a7-4b81-bb49-c65a95997903-internal-tls-certs podName:bf32468e-b0a7-4b81-bb49-c65a95997903 nodeName:}" failed. No retries permitted until 2026-01-20 17:43:01.276185601 +0000 UTC m=+3675.036523569 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "internal-tls-certs" (UniqueName: "kubernetes.io/secret/bf32468e-b0a7-4b81-bb49-c65a95997903-internal-tls-certs") pod "placement-69d7f65964-r46nm" (UID: "bf32468e-b0a7-4b81-bb49-c65a95997903") : secret "cert-placement-internal-svc" not found Jan 20 17:42:57 crc kubenswrapper[4558]: E0120 17:42:57.276254 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/858ca371-2b02-46d8-879c-8d859c31d590-public-tls-certs podName:858ca371-2b02-46d8-879c-8d859c31d590 nodeName:}" failed. No retries permitted until 2026-01-20 17:42:59.276235536 +0000 UTC m=+3673.036573503 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "public-tls-certs" (UniqueName: "kubernetes.io/secret/858ca371-2b02-46d8-879c-8d859c31d590-public-tls-certs") pod "swift-proxy-6f98b8f5cb-8bwzs" (UID: "858ca371-2b02-46d8-879c-8d859c31d590") : secret "cert-swift-public-svc" not found Jan 20 17:42:57 crc kubenswrapper[4558]: E0120 17:42:57.276266 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-neutron-ovndbs: secret "cert-neutron-ovndbs" not found Jan 20 17:42:57 crc kubenswrapper[4558]: E0120 17:42:57.276295 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7e627450-8edc-44bd-805d-b0034178dff2-ovndb-tls-certs podName:7e627450-8edc-44bd-805d-b0034178dff2 nodeName:}" failed. No retries permitted until 2026-01-20 17:42:59.276288646 +0000 UTC m=+3673.036626603 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "ovndb-tls-certs" (UniqueName: "kubernetes.io/secret/7e627450-8edc-44bd-805d-b0034178dff2-ovndb-tls-certs") pod "neutron-5c4dc89b5d-qp5xf" (UID: "7e627450-8edc-44bd-805d-b0034178dff2") : secret "cert-neutron-ovndbs" not found Jan 20 17:42:57 crc kubenswrapper[4558]: E0120 17:42:57.276307 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-keystone-public-svc: secret "cert-keystone-public-svc" not found Jan 20 17:42:57 crc kubenswrapper[4558]: E0120 17:42:57.276341 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/8f7d7846-1b56-4ab5-a0ad-63b179a1a797-public-tls-certs podName:8f7d7846-1b56-4ab5-a0ad-63b179a1a797 nodeName:}" failed. No retries permitted until 2026-01-20 17:43:01.276330975 +0000 UTC m=+3675.036668932 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "public-tls-certs" (UniqueName: "kubernetes.io/secret/8f7d7846-1b56-4ab5-a0ad-63b179a1a797-public-tls-certs") pod "keystone-7dffbf585-vg2sk" (UID: "8f7d7846-1b56-4ab5-a0ad-63b179a1a797") : secret "cert-keystone-public-svc" not found Jan 20 17:42:57 crc kubenswrapper[4558]: E0120 17:42:57.276436 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-swift-internal-svc: secret "cert-swift-internal-svc" not found Jan 20 17:42:57 crc kubenswrapper[4558]: E0120 17:42:57.276445 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7e627450-8edc-44bd-805d-b0034178dff2-internal-tls-certs podName:7e627450-8edc-44bd-805d-b0034178dff2 nodeName:}" failed. No retries permitted until 2026-01-20 17:42:59.276435662 +0000 UTC m=+3673.036773630 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "internal-tls-certs" (UniqueName: "kubernetes.io/secret/7e627450-8edc-44bd-805d-b0034178dff2-internal-tls-certs") pod "neutron-5c4dc89b5d-qp5xf" (UID: "7e627450-8edc-44bd-805d-b0034178dff2") : secret "cert-neutron-internal-svc" not found Jan 20 17:42:57 crc kubenswrapper[4558]: E0120 17:42:57.276764 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/858ca371-2b02-46d8-879c-8d859c31d590-internal-tls-certs podName:858ca371-2b02-46d8-879c-8d859c31d590 nodeName:}" failed. No retries permitted until 2026-01-20 17:42:59.276567259 +0000 UTC m=+3673.036905227 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "internal-tls-certs" (UniqueName: "kubernetes.io/secret/858ca371-2b02-46d8-879c-8d859c31d590-internal-tls-certs") pod "swift-proxy-6f98b8f5cb-8bwzs" (UID: "858ca371-2b02-46d8-879c-8d859c31d590") : secret "cert-swift-internal-svc" not found Jan 20 17:42:57 crc kubenswrapper[4558]: I0120 17:42:57.311374 4558 generic.go:334] "Generic (PLEG): container finished" podID="25fe26b1-d308-452c-b15f-b1b272de6869" containerID="dcf9b9124b71d8258a72a33535b49292f6fb9585d02e200aa73d63e0ff9b6a33" exitCode=0 Jan 20 17:42:57 crc kubenswrapper[4558]: I0120 17:42:57.311445 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-galera-0" event={"ID":"25fe26b1-d308-452c-b15f-b1b272de6869","Type":"ContainerDied","Data":"dcf9b9124b71d8258a72a33535b49292f6fb9585d02e200aa73d63e0ff9b6a33"} Jan 20 17:42:57 crc kubenswrapper[4558]: I0120 17:42:57.318692 4558 generic.go:334] "Generic (PLEG): container finished" podID="f68fbac2-1677-4664-a344-77a41044ea2a" containerID="25d54c191dc33c7fed19acdcd580fbd43bb913261f66f192926427656b325ef2" exitCode=0 Jan 20 17:42:57 crc kubenswrapper[4558]: I0120 17:42:57.318732 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-cell1-galera-0" event={"ID":"f68fbac2-1677-4664-a344-77a41044ea2a","Type":"ContainerDied","Data":"25d54c191dc33c7fed19acdcd580fbd43bb913261f66f192926427656b325ef2"} Jan 20 17:42:57 crc kubenswrapper[4558]: I0120 17:42:57.386978 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack-kuttl-tests/cinder-scheduler-0" podUID="25cfe04f-2194-405f-a9d7-82181e8ac22a" containerName="cinder-scheduler" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 20 17:42:58 crc kubenswrapper[4558]: E0120 17:42:58.302422 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-ovn-metrics: secret "cert-ovn-metrics" not found Jan 20 17:42:58 crc kubenswrapper[4558]: E0120 17:42:58.302549 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4d2b64b6-6310-4348-a016-e7d8317e00d9-metrics-certs-tls-certs podName:4d2b64b6-6310-4348-a016-e7d8317e00d9 nodeName:}" failed. No retries permitted until 2026-01-20 17:43:02.302525026 +0000 UTC m=+3676.062862993 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs-tls-certs" (UniqueName: "kubernetes.io/secret/4d2b64b6-6310-4348-a016-e7d8317e00d9-metrics-certs-tls-certs") pod "ovn-northd-0" (UID: "4d2b64b6-6310-4348-a016-e7d8317e00d9") : secret "cert-ovn-metrics" not found Jan 20 17:42:58 crc kubenswrapper[4558]: E0120 17:42:58.302451 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-ovnnorthd-ovndbs: secret "cert-ovnnorthd-ovndbs" not found Jan 20 17:42:58 crc kubenswrapper[4558]: E0120 17:42:58.302645 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4d2b64b6-6310-4348-a016-e7d8317e00d9-ovn-northd-tls-certs podName:4d2b64b6-6310-4348-a016-e7d8317e00d9 nodeName:}" failed. No retries permitted until 2026-01-20 17:43:02.302627619 +0000 UTC m=+3676.062965586 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "ovn-northd-tls-certs" (UniqueName: "kubernetes.io/secret/4d2b64b6-6310-4348-a016-e7d8317e00d9-ovn-northd-tls-certs") pod "ovn-northd-0" (UID: "4d2b64b6-6310-4348-a016-e7d8317e00d9") : secret "cert-ovnnorthd-ovndbs" not found Jan 20 17:42:58 crc kubenswrapper[4558]: I0120 17:42:58.331709 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-galera-0" event={"ID":"25fe26b1-d308-452c-b15f-b1b272de6869","Type":"ContainerStarted","Data":"5c3fbeabe3ae0665acc4caf4f15bfaffd0a0fffe254192c44ae1df936c59f917"} Jan 20 17:42:58 crc kubenswrapper[4558]: I0120 17:42:58.338113 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-cell1-galera-0" event={"ID":"f68fbac2-1677-4664-a344-77a41044ea2a","Type":"ContainerStarted","Data":"a154b825c6001b834fad5ef3b1acf5a64fc98844395614a8310c4485a3d01a62"} Jan 20 17:42:58 crc kubenswrapper[4558]: I0120 17:42:58.353543 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/openstack-galera-0" podStartSLOduration=5.353529777 podStartE2EDuration="5.353529777s" podCreationTimestamp="2026-01-20 17:42:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:42:58.350775679 +0000 UTC m=+3672.111113646" watchObservedRunningTime="2026-01-20 17:42:58.353529777 +0000 UTC m=+3672.113867744" Jan 20 17:42:58 crc kubenswrapper[4558]: I0120 17:42:58.368977 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/openstack-cell1-galera-0" podStartSLOduration=5.368953798 podStartE2EDuration="5.368953798s" podCreationTimestamp="2026-01-20 17:42:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:42:58.366039618 +0000 UTC m=+3672.126377585" watchObservedRunningTime="2026-01-20 17:42:58.368953798 +0000 UTC m=+3672.129291764" Jan 20 17:42:59 crc kubenswrapper[4558]: E0120 17:42:59.347007 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-neutron-public-svc: secret "cert-neutron-public-svc" not found Jan 20 17:42:59 crc kubenswrapper[4558]: E0120 17:42:59.347516 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7e627450-8edc-44bd-805d-b0034178dff2-public-tls-certs podName:7e627450-8edc-44bd-805d-b0034178dff2 nodeName:}" failed. No retries permitted until 2026-01-20 17:43:03.34748991 +0000 UTC m=+3677.107827877 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "public-tls-certs" (UniqueName: "kubernetes.io/secret/7e627450-8edc-44bd-805d-b0034178dff2-public-tls-certs") pod "neutron-5c4dc89b5d-qp5xf" (UID: "7e627450-8edc-44bd-805d-b0034178dff2") : secret "cert-neutron-public-svc" not found Jan 20 17:42:59 crc kubenswrapper[4558]: E0120 17:42:59.347024 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-neutron-internal-svc: secret "cert-neutron-internal-svc" not found Jan 20 17:42:59 crc kubenswrapper[4558]: E0120 17:42:59.347634 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7e627450-8edc-44bd-805d-b0034178dff2-internal-tls-certs podName:7e627450-8edc-44bd-805d-b0034178dff2 nodeName:}" failed. No retries permitted until 2026-01-20 17:43:03.347610817 +0000 UTC m=+3677.107948795 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "internal-tls-certs" (UniqueName: "kubernetes.io/secret/7e627450-8edc-44bd-805d-b0034178dff2-internal-tls-certs") pod "neutron-5c4dc89b5d-qp5xf" (UID: "7e627450-8edc-44bd-805d-b0034178dff2") : secret "cert-neutron-internal-svc" not found Jan 20 17:42:59 crc kubenswrapper[4558]: E0120 17:42:59.347205 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-swift-public-svc: secret "cert-swift-public-svc" not found Jan 20 17:42:59 crc kubenswrapper[4558]: E0120 17:42:59.347682 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/858ca371-2b02-46d8-879c-8d859c31d590-public-tls-certs podName:858ca371-2b02-46d8-879c-8d859c31d590 nodeName:}" failed. No retries permitted until 2026-01-20 17:43:03.347673014 +0000 UTC m=+3677.108010991 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "public-tls-certs" (UniqueName: "kubernetes.io/secret/858ca371-2b02-46d8-879c-8d859c31d590-public-tls-certs") pod "swift-proxy-6f98b8f5cb-8bwzs" (UID: "858ca371-2b02-46d8-879c-8d859c31d590") : secret "cert-swift-public-svc" not found Jan 20 17:42:59 crc kubenswrapper[4558]: E0120 17:42:59.347241 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-neutron-ovndbs: secret "cert-neutron-ovndbs" not found Jan 20 17:42:59 crc kubenswrapper[4558]: E0120 17:42:59.347721 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7e627450-8edc-44bd-805d-b0034178dff2-ovndb-tls-certs podName:7e627450-8edc-44bd-805d-b0034178dff2 nodeName:}" failed. No retries permitted until 2026-01-20 17:43:03.347714422 +0000 UTC m=+3677.108052399 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "ovndb-tls-certs" (UniqueName: "kubernetes.io/secret/7e627450-8edc-44bd-805d-b0034178dff2-ovndb-tls-certs") pod "neutron-5c4dc89b5d-qp5xf" (UID: "7e627450-8edc-44bd-805d-b0034178dff2") : secret "cert-neutron-ovndbs" not found Jan 20 17:42:59 crc kubenswrapper[4558]: E0120 17:42:59.347249 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-swift-internal-svc: secret "cert-swift-internal-svc" not found Jan 20 17:42:59 crc kubenswrapper[4558]: E0120 17:42:59.347810 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/858ca371-2b02-46d8-879c-8d859c31d590-internal-tls-certs podName:858ca371-2b02-46d8-879c-8d859c31d590 nodeName:}" failed. No retries permitted until 2026-01-20 17:43:03.347791938 +0000 UTC m=+3677.108129906 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "internal-tls-certs" (UniqueName: "kubernetes.io/secret/858ca371-2b02-46d8-879c-8d859c31d590-internal-tls-certs") pod "swift-proxy-6f98b8f5cb-8bwzs" (UID: "858ca371-2b02-46d8-879c-8d859c31d590") : secret "cert-swift-internal-svc" not found Jan 20 17:42:59 crc kubenswrapper[4558]: E0120 17:42:59.449331 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-ovn-metrics: secret "cert-ovn-metrics" not found Jan 20 17:42:59 crc kubenswrapper[4558]: E0120 17:42:59.449431 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b5ab3ab7-6929-4165-91ad-860f5f109147-metrics-certs-tls-certs podName:b5ab3ab7-6929-4165-91ad-860f5f109147 nodeName:}" failed. No retries permitted until 2026-01-20 17:43:15.449406839 +0000 UTC m=+3689.209744806 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs-tls-certs" (UniqueName: "kubernetes.io/secret/b5ab3ab7-6929-4165-91ad-860f5f109147-metrics-certs-tls-certs") pod "ovsdbserver-sb-0" (UID: "b5ab3ab7-6929-4165-91ad-860f5f109147") : secret "cert-ovn-metrics" not found Jan 20 17:42:59 crc kubenswrapper[4558]: E0120 17:42:59.449678 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-ovndbcluster-sb-ovndbs: secret "cert-ovndbcluster-sb-ovndbs" not found Jan 20 17:42:59 crc kubenswrapper[4558]: E0120 17:42:59.449762 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b5ab3ab7-6929-4165-91ad-860f5f109147-ovsdbserver-sb-tls-certs podName:b5ab3ab7-6929-4165-91ad-860f5f109147 nodeName:}" failed. No retries permitted until 2026-01-20 17:43:15.449743171 +0000 UTC m=+3689.210081128 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "ovsdbserver-sb-tls-certs" (UniqueName: "kubernetes.io/secret/b5ab3ab7-6929-4165-91ad-860f5f109147-ovsdbserver-sb-tls-certs") pod "ovsdbserver-sb-0" (UID: "b5ab3ab7-6929-4165-91ad-860f5f109147") : secret "cert-ovndbcluster-sb-ovndbs" not found Jan 20 17:42:59 crc kubenswrapper[4558]: I0120 17:42:59.815777 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/memcached-0"] Jan 20 17:42:59 crc kubenswrapper[4558]: I0120 17:42:59.816000 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/memcached-0" podUID="02756298-ea84-4151-8ce2-32d602e2f7a7" containerName="memcached" containerID="cri-o://04b878c52f22dc6b748fcd22473200dd00e366b507cfd99f30210f8c582eb868" gracePeriod=30 Jan 20 17:42:59 crc kubenswrapper[4558]: I0120 17:42:59.871100 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:42:59 crc kubenswrapper[4558]: I0120 17:42:59.871364 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-external-api-0" podUID="552b666d-9d00-469a-9692-45a650c89509" containerName="glance-log" containerID="cri-o://14506618325c0d6ec0a676afd800224b20b8f064115011601c645fa282561267" gracePeriod=30 Jan 20 17:42:59 crc kubenswrapper[4558]: I0120 17:42:59.871440 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-external-api-0" podUID="552b666d-9d00-469a-9692-45a650c89509" containerName="glance-httpd" containerID="cri-o://5afb53a255c36f87bdaa69785aabcf9ff9ac19df018da6ed05b316bca41bf72e" gracePeriod=30 Jan 20 17:42:59 crc kubenswrapper[4558]: I0120 17:42:59.930126 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:42:59 crc kubenswrapper[4558]: I0120 17:42:59.930375 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-internal-api-0" podUID="2206aa45-a065-4965-8540-a2ba7c707155" containerName="glance-log" containerID="cri-o://5068001b8f2449f80ce36f3e31ba0522c81e3602c230aa8688078f521d43ddc6" gracePeriod=30 Jan 20 17:42:59 crc kubenswrapper[4558]: I0120 17:42:59.930494 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-internal-api-0" podUID="2206aa45-a065-4965-8540-a2ba7c707155" containerName="glance-httpd" containerID="cri-o://55844a34ec7ec510b2a22da96f2d4570e9b9f40732ea28ed1bd947615b0e17b3" gracePeriod=30 Jan 20 17:43:00 crc kubenswrapper[4558]: I0120 17:43:00.079959 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/keystone-6bb454b456-wjhln"] Jan 20 17:43:00 crc kubenswrapper[4558]: I0120 17:43:00.084464 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-6bb454b456-wjhln" Jan 20 17:43:00 crc kubenswrapper[4558]: I0120 17:43:00.092873 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-6bb454b456-wjhln"] Jan 20 17:43:00 crc kubenswrapper[4558]: I0120 17:43:00.268718 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7935bdc8-1434-418a-a3ad-b165ee8be23e-config-data\") pod \"keystone-6bb454b456-wjhln\" (UID: \"7935bdc8-1434-418a-a3ad-b165ee8be23e\") " pod="openstack-kuttl-tests/keystone-6bb454b456-wjhln" Jan 20 17:43:00 crc kubenswrapper[4558]: I0120 17:43:00.268788 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7935bdc8-1434-418a-a3ad-b165ee8be23e-scripts\") pod \"keystone-6bb454b456-wjhln\" (UID: \"7935bdc8-1434-418a-a3ad-b165ee8be23e\") " pod="openstack-kuttl-tests/keystone-6bb454b456-wjhln" Jan 20 17:43:00 crc kubenswrapper[4558]: I0120 17:43:00.268811 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/7935bdc8-1434-418a-a3ad-b165ee8be23e-credential-keys\") pod \"keystone-6bb454b456-wjhln\" (UID: \"7935bdc8-1434-418a-a3ad-b165ee8be23e\") " pod="openstack-kuttl-tests/keystone-6bb454b456-wjhln" Jan 20 17:43:00 crc kubenswrapper[4558]: I0120 17:43:00.268858 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2xmfb\" (UniqueName: \"kubernetes.io/projected/7935bdc8-1434-418a-a3ad-b165ee8be23e-kube-api-access-2xmfb\") pod \"keystone-6bb454b456-wjhln\" (UID: \"7935bdc8-1434-418a-a3ad-b165ee8be23e\") " pod="openstack-kuttl-tests/keystone-6bb454b456-wjhln" Jan 20 17:43:00 crc kubenswrapper[4558]: I0120 17:43:00.268909 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7935bdc8-1434-418a-a3ad-b165ee8be23e-combined-ca-bundle\") pod \"keystone-6bb454b456-wjhln\" (UID: \"7935bdc8-1434-418a-a3ad-b165ee8be23e\") " pod="openstack-kuttl-tests/keystone-6bb454b456-wjhln" Jan 20 17:43:00 crc kubenswrapper[4558]: I0120 17:43:00.268956 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/7935bdc8-1434-418a-a3ad-b165ee8be23e-fernet-keys\") pod \"keystone-6bb454b456-wjhln\" (UID: \"7935bdc8-1434-418a-a3ad-b165ee8be23e\") " pod="openstack-kuttl-tests/keystone-6bb454b456-wjhln" Jan 20 17:43:00 crc kubenswrapper[4558]: I0120 17:43:00.269037 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7935bdc8-1434-418a-a3ad-b165ee8be23e-internal-tls-certs\") pod \"keystone-6bb454b456-wjhln\" (UID: \"7935bdc8-1434-418a-a3ad-b165ee8be23e\") " pod="openstack-kuttl-tests/keystone-6bb454b456-wjhln" Jan 20 17:43:00 crc kubenswrapper[4558]: I0120 17:43:00.269102 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7935bdc8-1434-418a-a3ad-b165ee8be23e-public-tls-certs\") pod \"keystone-6bb454b456-wjhln\" (UID: \"7935bdc8-1434-418a-a3ad-b165ee8be23e\") " pod="openstack-kuttl-tests/keystone-6bb454b456-wjhln" Jan 20 17:43:00 crc kubenswrapper[4558]: I0120 17:43:00.358689 4558 generic.go:334] "Generic (PLEG): container finished" podID="552b666d-9d00-469a-9692-45a650c89509" containerID="14506618325c0d6ec0a676afd800224b20b8f064115011601c645fa282561267" exitCode=143 Jan 20 17:43:00 crc kubenswrapper[4558]: I0120 17:43:00.358784 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"552b666d-9d00-469a-9692-45a650c89509","Type":"ContainerDied","Data":"14506618325c0d6ec0a676afd800224b20b8f064115011601c645fa282561267"} Jan 20 17:43:00 crc kubenswrapper[4558]: I0120 17:43:00.361660 4558 generic.go:334] "Generic (PLEG): container finished" podID="2206aa45-a065-4965-8540-a2ba7c707155" containerID="5068001b8f2449f80ce36f3e31ba0522c81e3602c230aa8688078f521d43ddc6" exitCode=143 Jan 20 17:43:00 crc kubenswrapper[4558]: I0120 17:43:00.361720 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"2206aa45-a065-4965-8540-a2ba7c707155","Type":"ContainerDied","Data":"5068001b8f2449f80ce36f3e31ba0522c81e3602c230aa8688078f521d43ddc6"} Jan 20 17:43:00 crc kubenswrapper[4558]: I0120 17:43:00.371337 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7935bdc8-1434-418a-a3ad-b165ee8be23e-combined-ca-bundle\") pod \"keystone-6bb454b456-wjhln\" (UID: \"7935bdc8-1434-418a-a3ad-b165ee8be23e\") " pod="openstack-kuttl-tests/keystone-6bb454b456-wjhln" Jan 20 17:43:00 crc kubenswrapper[4558]: I0120 17:43:00.371381 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/7935bdc8-1434-418a-a3ad-b165ee8be23e-fernet-keys\") pod \"keystone-6bb454b456-wjhln\" (UID: \"7935bdc8-1434-418a-a3ad-b165ee8be23e\") " pod="openstack-kuttl-tests/keystone-6bb454b456-wjhln" Jan 20 17:43:00 crc kubenswrapper[4558]: I0120 17:43:00.371454 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7935bdc8-1434-418a-a3ad-b165ee8be23e-internal-tls-certs\") pod \"keystone-6bb454b456-wjhln\" (UID: \"7935bdc8-1434-418a-a3ad-b165ee8be23e\") " pod="openstack-kuttl-tests/keystone-6bb454b456-wjhln" Jan 20 17:43:00 crc kubenswrapper[4558]: I0120 17:43:00.371520 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7935bdc8-1434-418a-a3ad-b165ee8be23e-public-tls-certs\") pod \"keystone-6bb454b456-wjhln\" (UID: \"7935bdc8-1434-418a-a3ad-b165ee8be23e\") " pod="openstack-kuttl-tests/keystone-6bb454b456-wjhln" Jan 20 17:43:00 crc kubenswrapper[4558]: I0120 17:43:00.371641 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7935bdc8-1434-418a-a3ad-b165ee8be23e-config-data\") pod \"keystone-6bb454b456-wjhln\" (UID: \"7935bdc8-1434-418a-a3ad-b165ee8be23e\") " pod="openstack-kuttl-tests/keystone-6bb454b456-wjhln" Jan 20 17:43:00 crc kubenswrapper[4558]: I0120 17:43:00.371686 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7935bdc8-1434-418a-a3ad-b165ee8be23e-scripts\") pod \"keystone-6bb454b456-wjhln\" (UID: \"7935bdc8-1434-418a-a3ad-b165ee8be23e\") " pod="openstack-kuttl-tests/keystone-6bb454b456-wjhln" Jan 20 17:43:00 crc kubenswrapper[4558]: I0120 17:43:00.371711 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/7935bdc8-1434-418a-a3ad-b165ee8be23e-credential-keys\") pod \"keystone-6bb454b456-wjhln\" (UID: \"7935bdc8-1434-418a-a3ad-b165ee8be23e\") " pod="openstack-kuttl-tests/keystone-6bb454b456-wjhln" Jan 20 17:43:00 crc kubenswrapper[4558]: I0120 17:43:00.371747 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2xmfb\" (UniqueName: \"kubernetes.io/projected/7935bdc8-1434-418a-a3ad-b165ee8be23e-kube-api-access-2xmfb\") pod \"keystone-6bb454b456-wjhln\" (UID: \"7935bdc8-1434-418a-a3ad-b165ee8be23e\") " pod="openstack-kuttl-tests/keystone-6bb454b456-wjhln" Jan 20 17:43:00 crc kubenswrapper[4558]: I0120 17:43:00.377712 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/7935bdc8-1434-418a-a3ad-b165ee8be23e-credential-keys\") pod \"keystone-6bb454b456-wjhln\" (UID: \"7935bdc8-1434-418a-a3ad-b165ee8be23e\") " pod="openstack-kuttl-tests/keystone-6bb454b456-wjhln" Jan 20 17:43:00 crc kubenswrapper[4558]: I0120 17:43:00.378489 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7935bdc8-1434-418a-a3ad-b165ee8be23e-config-data\") pod \"keystone-6bb454b456-wjhln\" (UID: \"7935bdc8-1434-418a-a3ad-b165ee8be23e\") " pod="openstack-kuttl-tests/keystone-6bb454b456-wjhln" Jan 20 17:43:00 crc kubenswrapper[4558]: I0120 17:43:00.379033 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7935bdc8-1434-418a-a3ad-b165ee8be23e-scripts\") pod \"keystone-6bb454b456-wjhln\" (UID: \"7935bdc8-1434-418a-a3ad-b165ee8be23e\") " pod="openstack-kuttl-tests/keystone-6bb454b456-wjhln" Jan 20 17:43:00 crc kubenswrapper[4558]: I0120 17:43:00.379039 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7935bdc8-1434-418a-a3ad-b165ee8be23e-internal-tls-certs\") pod \"keystone-6bb454b456-wjhln\" (UID: \"7935bdc8-1434-418a-a3ad-b165ee8be23e\") " pod="openstack-kuttl-tests/keystone-6bb454b456-wjhln" Jan 20 17:43:00 crc kubenswrapper[4558]: I0120 17:43:00.379241 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7935bdc8-1434-418a-a3ad-b165ee8be23e-combined-ca-bundle\") pod \"keystone-6bb454b456-wjhln\" (UID: \"7935bdc8-1434-418a-a3ad-b165ee8be23e\") " pod="openstack-kuttl-tests/keystone-6bb454b456-wjhln" Jan 20 17:43:00 crc kubenswrapper[4558]: I0120 17:43:00.379989 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/7935bdc8-1434-418a-a3ad-b165ee8be23e-fernet-keys\") pod \"keystone-6bb454b456-wjhln\" (UID: \"7935bdc8-1434-418a-a3ad-b165ee8be23e\") " pod="openstack-kuttl-tests/keystone-6bb454b456-wjhln" Jan 20 17:43:00 crc kubenswrapper[4558]: I0120 17:43:00.380573 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7935bdc8-1434-418a-a3ad-b165ee8be23e-public-tls-certs\") pod \"keystone-6bb454b456-wjhln\" (UID: \"7935bdc8-1434-418a-a3ad-b165ee8be23e\") " pod="openstack-kuttl-tests/keystone-6bb454b456-wjhln" Jan 20 17:43:00 crc kubenswrapper[4558]: I0120 17:43:00.390143 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2xmfb\" (UniqueName: \"kubernetes.io/projected/7935bdc8-1434-418a-a3ad-b165ee8be23e-kube-api-access-2xmfb\") pod \"keystone-6bb454b456-wjhln\" (UID: \"7935bdc8-1434-418a-a3ad-b165ee8be23e\") " pod="openstack-kuttl-tests/keystone-6bb454b456-wjhln" Jan 20 17:43:00 crc kubenswrapper[4558]: I0120 17:43:00.400970 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack-kuttl-tests/cinder-scheduler-0" podUID="25cfe04f-2194-405f-a9d7-82181e8ac22a" containerName="cinder-scheduler" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 20 17:43:00 crc kubenswrapper[4558]: I0120 17:43:00.401201 4558 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:43:00 crc kubenswrapper[4558]: I0120 17:43:00.402441 4558 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="cinder-scheduler" containerStatusID={"Type":"cri-o","ID":"080004aa669f36ce8bb441200bb1b70a9222d0a13602ec478e6d8d26c1fbf426"} pod="openstack-kuttl-tests/cinder-scheduler-0" containerMessage="Container cinder-scheduler failed liveness probe, will be restarted" Jan 20 17:43:00 crc kubenswrapper[4558]: I0120 17:43:00.402590 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/cinder-scheduler-0" podUID="25cfe04f-2194-405f-a9d7-82181e8ac22a" containerName="cinder-scheduler" containerID="cri-o://080004aa669f36ce8bb441200bb1b70a9222d0a13602ec478e6d8d26c1fbf426" gracePeriod=30 Jan 20 17:43:00 crc kubenswrapper[4558]: I0120 17:43:00.406354 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-6bb454b456-wjhln" Jan 20 17:43:00 crc kubenswrapper[4558]: I0120 17:43:00.818476 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-6bb454b456-wjhln"] Jan 20 17:43:00 crc kubenswrapper[4558]: W0120 17:43:00.823811 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7935bdc8_1434_418a_a3ad_b165ee8be23e.slice/crio-e925a826ed08dc1090056b5b6c39cc5d3e191110012fc2e1ee085377502021e6 WatchSource:0}: Error finding container e925a826ed08dc1090056b5b6c39cc5d3e191110012fc2e1ee085377502021e6: Status 404 returned error can't find the container with id e925a826ed08dc1090056b5b6c39cc5d3e191110012fc2e1ee085377502021e6 Jan 20 17:43:01 crc kubenswrapper[4558]: E0120 17:43:01.297512 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-placement-public-svc: secret "cert-placement-public-svc" not found Jan 20 17:43:01 crc kubenswrapper[4558]: E0120 17:43:01.297592 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/bf32468e-b0a7-4b81-bb49-c65a95997903-public-tls-certs podName:bf32468e-b0a7-4b81-bb49-c65a95997903 nodeName:}" failed. No retries permitted until 2026-01-20 17:43:09.297574627 +0000 UTC m=+3683.057912593 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "public-tls-certs" (UniqueName: "kubernetes.io/secret/bf32468e-b0a7-4b81-bb49-c65a95997903-public-tls-certs") pod "placement-69d7f65964-r46nm" (UID: "bf32468e-b0a7-4b81-bb49-c65a95997903") : secret "cert-placement-public-svc" not found Jan 20 17:43:01 crc kubenswrapper[4558]: E0120 17:43:01.297976 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-placement-internal-svc: secret "cert-placement-internal-svc" not found Jan 20 17:43:01 crc kubenswrapper[4558]: E0120 17:43:01.298013 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/bf32468e-b0a7-4b81-bb49-c65a95997903-internal-tls-certs podName:bf32468e-b0a7-4b81-bb49-c65a95997903 nodeName:}" failed. No retries permitted until 2026-01-20 17:43:09.298005086 +0000 UTC m=+3683.058343053 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "internal-tls-certs" (UniqueName: "kubernetes.io/secret/bf32468e-b0a7-4b81-bb49-c65a95997903-internal-tls-certs") pod "placement-69d7f65964-r46nm" (UID: "bf32468e-b0a7-4b81-bb49-c65a95997903") : secret "cert-placement-internal-svc" not found Jan 20 17:43:01 crc kubenswrapper[4558]: I0120 17:43:01.382078 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-6bb454b456-wjhln" event={"ID":"7935bdc8-1434-418a-a3ad-b165ee8be23e","Type":"ContainerStarted","Data":"d6a76d69efdb8d9de2921accc3eae11c4dd079ceb576dcf301178d78b9a86d25"} Jan 20 17:43:01 crc kubenswrapper[4558]: I0120 17:43:01.382125 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-6bb454b456-wjhln" event={"ID":"7935bdc8-1434-418a-a3ad-b165ee8be23e","Type":"ContainerStarted","Data":"e925a826ed08dc1090056b5b6c39cc5d3e191110012fc2e1ee085377502021e6"} Jan 20 17:43:01 crc kubenswrapper[4558]: I0120 17:43:01.383550 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/keystone-6bb454b456-wjhln" Jan 20 17:43:01 crc kubenswrapper[4558]: I0120 17:43:01.385627 4558 generic.go:334] "Generic (PLEG): container finished" podID="02756298-ea84-4151-8ce2-32d602e2f7a7" containerID="04b878c52f22dc6b748fcd22473200dd00e366b507cfd99f30210f8c582eb868" exitCode=0 Jan 20 17:43:01 crc kubenswrapper[4558]: I0120 17:43:01.385680 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/memcached-0" event={"ID":"02756298-ea84-4151-8ce2-32d602e2f7a7","Type":"ContainerDied","Data":"04b878c52f22dc6b748fcd22473200dd00e366b507cfd99f30210f8c582eb868"} Jan 20 17:43:01 crc kubenswrapper[4558]: I0120 17:43:01.414266 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/keystone-6bb454b456-wjhln" podStartSLOduration=1.414238763 podStartE2EDuration="1.414238763s" podCreationTimestamp="2026-01-20 17:43:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:43:01.400072066 +0000 UTC m=+3675.160410023" watchObservedRunningTime="2026-01-20 17:43:01.414238763 +0000 UTC m=+3675.174576730" Jan 20 17:43:01 crc kubenswrapper[4558]: I0120 17:43:01.626608 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/memcached-0" Jan 20 17:43:01 crc kubenswrapper[4558]: I0120 17:43:01.755617 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:43:01 crc kubenswrapper[4558]: I0120 17:43:01.756022 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="e43647af-b3c8-423f-b88c-d3a9b1aacef2" containerName="ceilometer-central-agent" containerID="cri-o://bb7f865b92b1c710c22f66a0b1afc2b13b7529e74d6bbbbdbce454bf11a3c1a0" gracePeriod=30 Jan 20 17:43:01 crc kubenswrapper[4558]: I0120 17:43:01.756120 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="e43647af-b3c8-423f-b88c-d3a9b1aacef2" containerName="sg-core" containerID="cri-o://c72b716d9037758e9baeb566c5ac04381df6b5dc61728fdf1da64996d680ebeb" gracePeriod=30 Jan 20 17:43:01 crc kubenswrapper[4558]: I0120 17:43:01.756199 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="e43647af-b3c8-423f-b88c-d3a9b1aacef2" containerName="proxy-httpd" containerID="cri-o://8caf6b3ac50eb079a954e2fde982cd0bb14aa26097a6d791646bafef2d44e268" gracePeriod=30 Jan 20 17:43:01 crc kubenswrapper[4558]: I0120 17:43:01.756226 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="e43647af-b3c8-423f-b88c-d3a9b1aacef2" containerName="ceilometer-notification-agent" containerID="cri-o://4842b1e75542f7bbd13f59782fc4bdc7732e7ba2586beb53f10328006641cd69" gracePeriod=30 Jan 20 17:43:01 crc kubenswrapper[4558]: I0120 17:43:01.811831 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/02756298-ea84-4151-8ce2-32d602e2f7a7-memcached-tls-certs\") pod \"02756298-ea84-4151-8ce2-32d602e2f7a7\" (UID: \"02756298-ea84-4151-8ce2-32d602e2f7a7\") " Jan 20 17:43:01 crc kubenswrapper[4558]: I0120 17:43:01.811931 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02756298-ea84-4151-8ce2-32d602e2f7a7-combined-ca-bundle\") pod \"02756298-ea84-4151-8ce2-32d602e2f7a7\" (UID: \"02756298-ea84-4151-8ce2-32d602e2f7a7\") " Jan 20 17:43:01 crc kubenswrapper[4558]: I0120 17:43:01.811981 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/02756298-ea84-4151-8ce2-32d602e2f7a7-config-data\") pod \"02756298-ea84-4151-8ce2-32d602e2f7a7\" (UID: \"02756298-ea84-4151-8ce2-32d602e2f7a7\") " Jan 20 17:43:01 crc kubenswrapper[4558]: I0120 17:43:01.812031 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gjnms\" (UniqueName: \"kubernetes.io/projected/02756298-ea84-4151-8ce2-32d602e2f7a7-kube-api-access-gjnms\") pod \"02756298-ea84-4151-8ce2-32d602e2f7a7\" (UID: \"02756298-ea84-4151-8ce2-32d602e2f7a7\") " Jan 20 17:43:01 crc kubenswrapper[4558]: I0120 17:43:01.812673 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/02756298-ea84-4151-8ce2-32d602e2f7a7-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "02756298-ea84-4151-8ce2-32d602e2f7a7" (UID: "02756298-ea84-4151-8ce2-32d602e2f7a7"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:43:01 crc kubenswrapper[4558]: I0120 17:43:01.812709 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/02756298-ea84-4151-8ce2-32d602e2f7a7-config-data" (OuterVolumeSpecName: "config-data") pod "02756298-ea84-4151-8ce2-32d602e2f7a7" (UID: "02756298-ea84-4151-8ce2-32d602e2f7a7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:43:01 crc kubenswrapper[4558]: I0120 17:43:01.812827 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/02756298-ea84-4151-8ce2-32d602e2f7a7-kolla-config\") pod \"02756298-ea84-4151-8ce2-32d602e2f7a7\" (UID: \"02756298-ea84-4151-8ce2-32d602e2f7a7\") " Jan 20 17:43:01 crc kubenswrapper[4558]: I0120 17:43:01.814023 4558 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/02756298-ea84-4151-8ce2-32d602e2f7a7-kolla-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:01 crc kubenswrapper[4558]: I0120 17:43:01.814145 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/02756298-ea84-4151-8ce2-32d602e2f7a7-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:01 crc kubenswrapper[4558]: I0120 17:43:01.821425 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/02756298-ea84-4151-8ce2-32d602e2f7a7-kube-api-access-gjnms" (OuterVolumeSpecName: "kube-api-access-gjnms") pod "02756298-ea84-4151-8ce2-32d602e2f7a7" (UID: "02756298-ea84-4151-8ce2-32d602e2f7a7"). InnerVolumeSpecName "kube-api-access-gjnms". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:43:01 crc kubenswrapper[4558]: I0120 17:43:01.851124 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/02756298-ea84-4151-8ce2-32d602e2f7a7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "02756298-ea84-4151-8ce2-32d602e2f7a7" (UID: "02756298-ea84-4151-8ce2-32d602e2f7a7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:01 crc kubenswrapper[4558]: I0120 17:43:01.866818 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/02756298-ea84-4151-8ce2-32d602e2f7a7-memcached-tls-certs" (OuterVolumeSpecName: "memcached-tls-certs") pod "02756298-ea84-4151-8ce2-32d602e2f7a7" (UID: "02756298-ea84-4151-8ce2-32d602e2f7a7"). InnerVolumeSpecName "memcached-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:01 crc kubenswrapper[4558]: I0120 17:43:01.916298 4558 reconciler_common.go:293] "Volume detached for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/02756298-ea84-4151-8ce2-32d602e2f7a7-memcached-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:01 crc kubenswrapper[4558]: I0120 17:43:01.916337 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02756298-ea84-4151-8ce2-32d602e2f7a7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:01 crc kubenswrapper[4558]: I0120 17:43:01.916347 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gjnms\" (UniqueName: \"kubernetes.io/projected/02756298-ea84-4151-8ce2-32d602e2f7a7-kube-api-access-gjnms\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:02 crc kubenswrapper[4558]: E0120 17:43:02.328771 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-ovnnorthd-ovndbs: secret "cert-ovnnorthd-ovndbs" not found Jan 20 17:43:02 crc kubenswrapper[4558]: E0120 17:43:02.328991 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-ovn-metrics: secret "cert-ovn-metrics" not found Jan 20 17:43:02 crc kubenswrapper[4558]: E0120 17:43:02.329001 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4d2b64b6-6310-4348-a016-e7d8317e00d9-ovn-northd-tls-certs podName:4d2b64b6-6310-4348-a016-e7d8317e00d9 nodeName:}" failed. No retries permitted until 2026-01-20 17:43:10.328980116 +0000 UTC m=+3684.089318083 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "ovn-northd-tls-certs" (UniqueName: "kubernetes.io/secret/4d2b64b6-6310-4348-a016-e7d8317e00d9-ovn-northd-tls-certs") pod "ovn-northd-0" (UID: "4d2b64b6-6310-4348-a016-e7d8317e00d9") : secret "cert-ovnnorthd-ovndbs" not found Jan 20 17:43:02 crc kubenswrapper[4558]: E0120 17:43:02.329044 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4d2b64b6-6310-4348-a016-e7d8317e00d9-metrics-certs-tls-certs podName:4d2b64b6-6310-4348-a016-e7d8317e00d9 nodeName:}" failed. No retries permitted until 2026-01-20 17:43:10.329028436 +0000 UTC m=+3684.089366404 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs-tls-certs" (UniqueName: "kubernetes.io/secret/4d2b64b6-6310-4348-a016-e7d8317e00d9-metrics-certs-tls-certs") pod "ovn-northd-0" (UID: "4d2b64b6-6310-4348-a016-e7d8317e00d9") : secret "cert-ovn-metrics" not found Jan 20 17:43:02 crc kubenswrapper[4558]: I0120 17:43:02.394969 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/memcached-0" event={"ID":"02756298-ea84-4151-8ce2-32d602e2f7a7","Type":"ContainerDied","Data":"618d6b185418a6c40e6fbef551b96474f87263cd7b28bbbd6723b9adc2e0f4b4"} Jan 20 17:43:02 crc kubenswrapper[4558]: I0120 17:43:02.395039 4558 scope.go:117] "RemoveContainer" containerID="04b878c52f22dc6b748fcd22473200dd00e366b507cfd99f30210f8c582eb868" Jan 20 17:43:02 crc kubenswrapper[4558]: I0120 17:43:02.395042 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/memcached-0" Jan 20 17:43:02 crc kubenswrapper[4558]: I0120 17:43:02.398115 4558 generic.go:334] "Generic (PLEG): container finished" podID="e43647af-b3c8-423f-b88c-d3a9b1aacef2" containerID="8caf6b3ac50eb079a954e2fde982cd0bb14aa26097a6d791646bafef2d44e268" exitCode=0 Jan 20 17:43:02 crc kubenswrapper[4558]: I0120 17:43:02.398142 4558 generic.go:334] "Generic (PLEG): container finished" podID="e43647af-b3c8-423f-b88c-d3a9b1aacef2" containerID="c72b716d9037758e9baeb566c5ac04381df6b5dc61728fdf1da64996d680ebeb" exitCode=2 Jan 20 17:43:02 crc kubenswrapper[4558]: I0120 17:43:02.398150 4558 generic.go:334] "Generic (PLEG): container finished" podID="e43647af-b3c8-423f-b88c-d3a9b1aacef2" containerID="bb7f865b92b1c710c22f66a0b1afc2b13b7529e74d6bbbbdbce454bf11a3c1a0" exitCode=0 Jan 20 17:43:02 crc kubenswrapper[4558]: I0120 17:43:02.398189 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"e43647af-b3c8-423f-b88c-d3a9b1aacef2","Type":"ContainerDied","Data":"8caf6b3ac50eb079a954e2fde982cd0bb14aa26097a6d791646bafef2d44e268"} Jan 20 17:43:02 crc kubenswrapper[4558]: I0120 17:43:02.398224 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"e43647af-b3c8-423f-b88c-d3a9b1aacef2","Type":"ContainerDied","Data":"c72b716d9037758e9baeb566c5ac04381df6b5dc61728fdf1da64996d680ebeb"} Jan 20 17:43:02 crc kubenswrapper[4558]: I0120 17:43:02.398236 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"e43647af-b3c8-423f-b88c-d3a9b1aacef2","Type":"ContainerDied","Data":"bb7f865b92b1c710c22f66a0b1afc2b13b7529e74d6bbbbdbce454bf11a3c1a0"} Jan 20 17:43:02 crc kubenswrapper[4558]: I0120 17:43:02.422021 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/memcached-0"] Jan 20 17:43:02 crc kubenswrapper[4558]: I0120 17:43:02.433409 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/memcached-0"] Jan 20 17:43:02 crc kubenswrapper[4558]: I0120 17:43:02.443087 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/memcached-0"] Jan 20 17:43:02 crc kubenswrapper[4558]: E0120 17:43:02.443525 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="02756298-ea84-4151-8ce2-32d602e2f7a7" containerName="memcached" Jan 20 17:43:02 crc kubenswrapper[4558]: I0120 17:43:02.443543 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="02756298-ea84-4151-8ce2-32d602e2f7a7" containerName="memcached" Jan 20 17:43:02 crc kubenswrapper[4558]: I0120 17:43:02.443768 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="02756298-ea84-4151-8ce2-32d602e2f7a7" containerName="memcached" Jan 20 17:43:02 crc kubenswrapper[4558]: I0120 17:43:02.444456 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/memcached-0" Jan 20 17:43:02 crc kubenswrapper[4558]: I0120 17:43:02.446205 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"memcached-config-data" Jan 20 17:43:02 crc kubenswrapper[4558]: I0120 17:43:02.446303 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"memcached-memcached-dockercfg-sqmsc" Jan 20 17:43:02 crc kubenswrapper[4558]: I0120 17:43:02.446328 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-memcached-svc" Jan 20 17:43:02 crc kubenswrapper[4558]: I0120 17:43:02.451100 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/memcached-0"] Jan 20 17:43:02 crc kubenswrapper[4558]: I0120 17:43:02.533730 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a5db7bd5-e0c6-4645-ab63-3a2a21fdc854-config-data\") pod \"memcached-0\" (UID: \"a5db7bd5-e0c6-4645-ab63-3a2a21fdc854\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:43:02 crc kubenswrapper[4558]: I0120 17:43:02.533838 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a5db7bd5-e0c6-4645-ab63-3a2a21fdc854-combined-ca-bundle\") pod \"memcached-0\" (UID: \"a5db7bd5-e0c6-4645-ab63-3a2a21fdc854\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:43:02 crc kubenswrapper[4558]: I0120 17:43:02.533865 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tqthz\" (UniqueName: \"kubernetes.io/projected/a5db7bd5-e0c6-4645-ab63-3a2a21fdc854-kube-api-access-tqthz\") pod \"memcached-0\" (UID: \"a5db7bd5-e0c6-4645-ab63-3a2a21fdc854\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:43:02 crc kubenswrapper[4558]: I0120 17:43:02.533899 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/a5db7bd5-e0c6-4645-ab63-3a2a21fdc854-kolla-config\") pod \"memcached-0\" (UID: \"a5db7bd5-e0c6-4645-ab63-3a2a21fdc854\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:43:02 crc kubenswrapper[4558]: I0120 17:43:02.533935 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/a5db7bd5-e0c6-4645-ab63-3a2a21fdc854-memcached-tls-certs\") pod \"memcached-0\" (UID: \"a5db7bd5-e0c6-4645-ab63-3a2a21fdc854\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:43:02 crc kubenswrapper[4558]: I0120 17:43:02.576514 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="02756298-ea84-4151-8ce2-32d602e2f7a7" path="/var/lib/kubelet/pods/02756298-ea84-4151-8ce2-32d602e2f7a7/volumes" Jan 20 17:43:02 crc kubenswrapper[4558]: I0120 17:43:02.636552 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a5db7bd5-e0c6-4645-ab63-3a2a21fdc854-config-data\") pod \"memcached-0\" (UID: \"a5db7bd5-e0c6-4645-ab63-3a2a21fdc854\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:43:02 crc kubenswrapper[4558]: I0120 17:43:02.636678 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a5db7bd5-e0c6-4645-ab63-3a2a21fdc854-combined-ca-bundle\") pod \"memcached-0\" (UID: \"a5db7bd5-e0c6-4645-ab63-3a2a21fdc854\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:43:02 crc kubenswrapper[4558]: I0120 17:43:02.636712 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tqthz\" (UniqueName: \"kubernetes.io/projected/a5db7bd5-e0c6-4645-ab63-3a2a21fdc854-kube-api-access-tqthz\") pod \"memcached-0\" (UID: \"a5db7bd5-e0c6-4645-ab63-3a2a21fdc854\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:43:02 crc kubenswrapper[4558]: I0120 17:43:02.636744 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/a5db7bd5-e0c6-4645-ab63-3a2a21fdc854-kolla-config\") pod \"memcached-0\" (UID: \"a5db7bd5-e0c6-4645-ab63-3a2a21fdc854\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:43:02 crc kubenswrapper[4558]: I0120 17:43:02.636799 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/a5db7bd5-e0c6-4645-ab63-3a2a21fdc854-memcached-tls-certs\") pod \"memcached-0\" (UID: \"a5db7bd5-e0c6-4645-ab63-3a2a21fdc854\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:43:02 crc kubenswrapper[4558]: I0120 17:43:02.640220 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/a5db7bd5-e0c6-4645-ab63-3a2a21fdc854-kolla-config\") pod \"memcached-0\" (UID: \"a5db7bd5-e0c6-4645-ab63-3a2a21fdc854\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:43:02 crc kubenswrapper[4558]: I0120 17:43:02.640236 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a5db7bd5-e0c6-4645-ab63-3a2a21fdc854-config-data\") pod \"memcached-0\" (UID: \"a5db7bd5-e0c6-4645-ab63-3a2a21fdc854\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:43:02 crc kubenswrapper[4558]: I0120 17:43:02.644700 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a5db7bd5-e0c6-4645-ab63-3a2a21fdc854-combined-ca-bundle\") pod \"memcached-0\" (UID: \"a5db7bd5-e0c6-4645-ab63-3a2a21fdc854\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:43:02 crc kubenswrapper[4558]: I0120 17:43:02.650465 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/a5db7bd5-e0c6-4645-ab63-3a2a21fdc854-memcached-tls-certs\") pod \"memcached-0\" (UID: \"a5db7bd5-e0c6-4645-ab63-3a2a21fdc854\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:43:02 crc kubenswrapper[4558]: I0120 17:43:02.652884 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tqthz\" (UniqueName: \"kubernetes.io/projected/a5db7bd5-e0c6-4645-ab63-3a2a21fdc854-kube-api-access-tqthz\") pod \"memcached-0\" (UID: \"a5db7bd5-e0c6-4645-ab63-3a2a21fdc854\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:43:02 crc kubenswrapper[4558]: I0120 17:43:02.759957 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/memcached-0" Jan 20 17:43:03 crc kubenswrapper[4558]: I0120 17:43:03.173700 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/memcached-0"] Jan 20 17:43:03 crc kubenswrapper[4558]: E0120 17:43:03.353394 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-neutron-public-svc: secret "cert-neutron-public-svc" not found Jan 20 17:43:03 crc kubenswrapper[4558]: E0120 17:43:03.353538 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-swift-public-svc: secret "cert-swift-public-svc" not found Jan 20 17:43:03 crc kubenswrapper[4558]: E0120 17:43:03.353816 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7e627450-8edc-44bd-805d-b0034178dff2-public-tls-certs podName:7e627450-8edc-44bd-805d-b0034178dff2 nodeName:}" failed. No retries permitted until 2026-01-20 17:43:11.353789583 +0000 UTC m=+3685.114127541 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "public-tls-certs" (UniqueName: "kubernetes.io/secret/7e627450-8edc-44bd-805d-b0034178dff2-public-tls-certs") pod "neutron-5c4dc89b5d-qp5xf" (UID: "7e627450-8edc-44bd-805d-b0034178dff2") : secret "cert-neutron-public-svc" not found Jan 20 17:43:03 crc kubenswrapper[4558]: E0120 17:43:03.353924 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/858ca371-2b02-46d8-879c-8d859c31d590-public-tls-certs podName:858ca371-2b02-46d8-879c-8d859c31d590 nodeName:}" failed. No retries permitted until 2026-01-20 17:43:11.353892957 +0000 UTC m=+3685.114230924 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "public-tls-certs" (UniqueName: "kubernetes.io/secret/858ca371-2b02-46d8-879c-8d859c31d590-public-tls-certs") pod "swift-proxy-6f98b8f5cb-8bwzs" (UID: "858ca371-2b02-46d8-879c-8d859c31d590") : secret "cert-swift-public-svc" not found Jan 20 17:43:03 crc kubenswrapper[4558]: E0120 17:43:03.354439 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-swift-internal-svc: secret "cert-swift-internal-svc" not found Jan 20 17:43:03 crc kubenswrapper[4558]: E0120 17:43:03.354827 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/858ca371-2b02-46d8-879c-8d859c31d590-internal-tls-certs podName:858ca371-2b02-46d8-879c-8d859c31d590 nodeName:}" failed. No retries permitted until 2026-01-20 17:43:11.354815682 +0000 UTC m=+3685.115153649 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "internal-tls-certs" (UniqueName: "kubernetes.io/secret/858ca371-2b02-46d8-879c-8d859c31d590-internal-tls-certs") pod "swift-proxy-6f98b8f5cb-8bwzs" (UID: "858ca371-2b02-46d8-879c-8d859c31d590") : secret "cert-swift-internal-svc" not found Jan 20 17:43:03 crc kubenswrapper[4558]: I0120 17:43:03.359356 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:03 crc kubenswrapper[4558]: I0120 17:43:03.415794 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/memcached-0" event={"ID":"a5db7bd5-e0c6-4645-ab63-3a2a21fdc854","Type":"ContainerStarted","Data":"a50b4edbabca8fd422d0afd87910c0ef1f309e8061f08ee60f8118612b4fe104"} Jan 20 17:43:03 crc kubenswrapper[4558]: I0120 17:43:03.419262 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/memcached-0" Jan 20 17:43:03 crc kubenswrapper[4558]: I0120 17:43:03.419296 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/memcached-0" event={"ID":"a5db7bd5-e0c6-4645-ab63-3a2a21fdc854","Type":"ContainerStarted","Data":"61933e699ad2f2864803ebb3f142ff1880d56b79c49386c63c0295f41e90b399"} Jan 20 17:43:03 crc kubenswrapper[4558]: I0120 17:43:03.419429 4558 generic.go:334] "Generic (PLEG): container finished" podID="552b666d-9d00-469a-9692-45a650c89509" containerID="5afb53a255c36f87bdaa69785aabcf9ff9ac19df018da6ed05b316bca41bf72e" exitCode=0 Jan 20 17:43:03 crc kubenswrapper[4558]: I0120 17:43:03.419488 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"552b666d-9d00-469a-9692-45a650c89509","Type":"ContainerDied","Data":"5afb53a255c36f87bdaa69785aabcf9ff9ac19df018da6ed05b316bca41bf72e"} Jan 20 17:43:03 crc kubenswrapper[4558]: I0120 17:43:03.419513 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"552b666d-9d00-469a-9692-45a650c89509","Type":"ContainerDied","Data":"e5837a923aabd046f6d66689f88391fda1f7af743ca86d27cf061ae01952b409"} Jan 20 17:43:03 crc kubenswrapper[4558]: I0120 17:43:03.419534 4558 scope.go:117] "RemoveContainer" containerID="5afb53a255c36f87bdaa69785aabcf9ff9ac19df018da6ed05b316bca41bf72e" Jan 20 17:43:03 crc kubenswrapper[4558]: I0120 17:43:03.419651 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:03 crc kubenswrapper[4558]: I0120 17:43:03.430088 4558 generic.go:334] "Generic (PLEG): container finished" podID="2206aa45-a065-4965-8540-a2ba7c707155" containerID="55844a34ec7ec510b2a22da96f2d4570e9b9f40732ea28ed1bd947615b0e17b3" exitCode=0 Jan 20 17:43:03 crc kubenswrapper[4558]: I0120 17:43:03.430176 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"2206aa45-a065-4965-8540-a2ba7c707155","Type":"ContainerDied","Data":"55844a34ec7ec510b2a22da96f2d4570e9b9f40732ea28ed1bd947615b0e17b3"} Jan 20 17:43:03 crc kubenswrapper[4558]: I0120 17:43:03.432525 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/memcached-0" podStartSLOduration=1.432508674 podStartE2EDuration="1.432508674s" podCreationTimestamp="2026-01-20 17:43:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:43:03.429528921 +0000 UTC m=+3677.189866888" watchObservedRunningTime="2026-01-20 17:43:03.432508674 +0000 UTC m=+3677.192846631" Jan 20 17:43:03 crc kubenswrapper[4558]: I0120 17:43:03.449643 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:03 crc kubenswrapper[4558]: I0120 17:43:03.454685 4558 scope.go:117] "RemoveContainer" containerID="14506618325c0d6ec0a676afd800224b20b8f064115011601c645fa282561267" Jan 20 17:43:03 crc kubenswrapper[4558]: I0120 17:43:03.454970 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/552b666d-9d00-469a-9692-45a650c89509-public-tls-certs\") pod \"552b666d-9d00-469a-9692-45a650c89509\" (UID: \"552b666d-9d00-469a-9692-45a650c89509\") " Jan 20 17:43:03 crc kubenswrapper[4558]: I0120 17:43:03.455048 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/552b666d-9d00-469a-9692-45a650c89509-logs\") pod \"552b666d-9d00-469a-9692-45a650c89509\" (UID: \"552b666d-9d00-469a-9692-45a650c89509\") " Jan 20 17:43:03 crc kubenswrapper[4558]: I0120 17:43:03.455157 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/552b666d-9d00-469a-9692-45a650c89509-scripts\") pod \"552b666d-9d00-469a-9692-45a650c89509\" (UID: \"552b666d-9d00-469a-9692-45a650c89509\") " Jan 20 17:43:03 crc kubenswrapper[4558]: I0120 17:43:03.455475 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"552b666d-9d00-469a-9692-45a650c89509\" (UID: \"552b666d-9d00-469a-9692-45a650c89509\") " Jan 20 17:43:03 crc kubenswrapper[4558]: I0120 17:43:03.455543 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/552b666d-9d00-469a-9692-45a650c89509-combined-ca-bundle\") pod \"552b666d-9d00-469a-9692-45a650c89509\" (UID: \"552b666d-9d00-469a-9692-45a650c89509\") " Jan 20 17:43:03 crc kubenswrapper[4558]: I0120 17:43:03.455573 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/552b666d-9d00-469a-9692-45a650c89509-httpd-run\") pod \"552b666d-9d00-469a-9692-45a650c89509\" (UID: \"552b666d-9d00-469a-9692-45a650c89509\") " Jan 20 17:43:03 crc kubenswrapper[4558]: I0120 17:43:03.455637 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/552b666d-9d00-469a-9692-45a650c89509-config-data\") pod \"552b666d-9d00-469a-9692-45a650c89509\" (UID: \"552b666d-9d00-469a-9692-45a650c89509\") " Jan 20 17:43:03 crc kubenswrapper[4558]: I0120 17:43:03.455643 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/552b666d-9d00-469a-9692-45a650c89509-logs" (OuterVolumeSpecName: "logs") pod "552b666d-9d00-469a-9692-45a650c89509" (UID: "552b666d-9d00-469a-9692-45a650c89509"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:43:03 crc kubenswrapper[4558]: I0120 17:43:03.455726 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xw9hj\" (UniqueName: \"kubernetes.io/projected/552b666d-9d00-469a-9692-45a650c89509-kube-api-access-xw9hj\") pod \"552b666d-9d00-469a-9692-45a650c89509\" (UID: \"552b666d-9d00-469a-9692-45a650c89509\") " Jan 20 17:43:03 crc kubenswrapper[4558]: I0120 17:43:03.456411 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/552b666d-9d00-469a-9692-45a650c89509-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "552b666d-9d00-469a-9692-45a650c89509" (UID: "552b666d-9d00-469a-9692-45a650c89509"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:43:03 crc kubenswrapper[4558]: I0120 17:43:03.457389 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/552b666d-9d00-469a-9692-45a650c89509-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:03 crc kubenswrapper[4558]: I0120 17:43:03.458224 4558 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/552b666d-9d00-469a-9692-45a650c89509-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:03 crc kubenswrapper[4558]: I0120 17:43:03.461507 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/552b666d-9d00-469a-9692-45a650c89509-scripts" (OuterVolumeSpecName: "scripts") pod "552b666d-9d00-469a-9692-45a650c89509" (UID: "552b666d-9d00-469a-9692-45a650c89509"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:03 crc kubenswrapper[4558]: I0120 17:43:03.462391 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/552b666d-9d00-469a-9692-45a650c89509-kube-api-access-xw9hj" (OuterVolumeSpecName: "kube-api-access-xw9hj") pod "552b666d-9d00-469a-9692-45a650c89509" (UID: "552b666d-9d00-469a-9692-45a650c89509"). InnerVolumeSpecName "kube-api-access-xw9hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:43:03 crc kubenswrapper[4558]: I0120 17:43:03.463326 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage09-crc" (OuterVolumeSpecName: "glance") pod "552b666d-9d00-469a-9692-45a650c89509" (UID: "552b666d-9d00-469a-9692-45a650c89509"). InnerVolumeSpecName "local-storage09-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:43:03 crc kubenswrapper[4558]: I0120 17:43:03.481420 4558 scope.go:117] "RemoveContainer" containerID="5afb53a255c36f87bdaa69785aabcf9ff9ac19df018da6ed05b316bca41bf72e" Jan 20 17:43:03 crc kubenswrapper[4558]: E0120 17:43:03.483386 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5afb53a255c36f87bdaa69785aabcf9ff9ac19df018da6ed05b316bca41bf72e\": container with ID starting with 5afb53a255c36f87bdaa69785aabcf9ff9ac19df018da6ed05b316bca41bf72e not found: ID does not exist" containerID="5afb53a255c36f87bdaa69785aabcf9ff9ac19df018da6ed05b316bca41bf72e" Jan 20 17:43:03 crc kubenswrapper[4558]: I0120 17:43:03.483439 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5afb53a255c36f87bdaa69785aabcf9ff9ac19df018da6ed05b316bca41bf72e"} err="failed to get container status \"5afb53a255c36f87bdaa69785aabcf9ff9ac19df018da6ed05b316bca41bf72e\": rpc error: code = NotFound desc = could not find container \"5afb53a255c36f87bdaa69785aabcf9ff9ac19df018da6ed05b316bca41bf72e\": container with ID starting with 5afb53a255c36f87bdaa69785aabcf9ff9ac19df018da6ed05b316bca41bf72e not found: ID does not exist" Jan 20 17:43:03 crc kubenswrapper[4558]: I0120 17:43:03.483466 4558 scope.go:117] "RemoveContainer" containerID="14506618325c0d6ec0a676afd800224b20b8f064115011601c645fa282561267" Jan 20 17:43:03 crc kubenswrapper[4558]: E0120 17:43:03.483885 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"14506618325c0d6ec0a676afd800224b20b8f064115011601c645fa282561267\": container with ID starting with 14506618325c0d6ec0a676afd800224b20b8f064115011601c645fa282561267 not found: ID does not exist" containerID="14506618325c0d6ec0a676afd800224b20b8f064115011601c645fa282561267" Jan 20 17:43:03 crc kubenswrapper[4558]: I0120 17:43:03.483922 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"14506618325c0d6ec0a676afd800224b20b8f064115011601c645fa282561267"} err="failed to get container status \"14506618325c0d6ec0a676afd800224b20b8f064115011601c645fa282561267\": rpc error: code = NotFound desc = could not find container \"14506618325c0d6ec0a676afd800224b20b8f064115011601c645fa282561267\": container with ID starting with 14506618325c0d6ec0a676afd800224b20b8f064115011601c645fa282561267 not found: ID does not exist" Jan 20 17:43:03 crc kubenswrapper[4558]: I0120 17:43:03.494140 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/552b666d-9d00-469a-9692-45a650c89509-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "552b666d-9d00-469a-9692-45a650c89509" (UID: "552b666d-9d00-469a-9692-45a650c89509"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:03 crc kubenswrapper[4558]: I0120 17:43:03.509227 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/552b666d-9d00-469a-9692-45a650c89509-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "552b666d-9d00-469a-9692-45a650c89509" (UID: "552b666d-9d00-469a-9692-45a650c89509"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:03 crc kubenswrapper[4558]: I0120 17:43:03.517322 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/552b666d-9d00-469a-9692-45a650c89509-config-data" (OuterVolumeSpecName: "config-data") pod "552b666d-9d00-469a-9692-45a650c89509" (UID: "552b666d-9d00-469a-9692-45a650c89509"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:03 crc kubenswrapper[4558]: I0120 17:43:03.560264 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2206aa45-a065-4965-8540-a2ba7c707155-config-data\") pod \"2206aa45-a065-4965-8540-a2ba7c707155\" (UID: \"2206aa45-a065-4965-8540-a2ba7c707155\") " Jan 20 17:43:03 crc kubenswrapper[4558]: I0120 17:43:03.560439 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qhcb7\" (UniqueName: \"kubernetes.io/projected/2206aa45-a065-4965-8540-a2ba7c707155-kube-api-access-qhcb7\") pod \"2206aa45-a065-4965-8540-a2ba7c707155\" (UID: \"2206aa45-a065-4965-8540-a2ba7c707155\") " Jan 20 17:43:03 crc kubenswrapper[4558]: I0120 17:43:03.560520 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2206aa45-a065-4965-8540-a2ba7c707155-combined-ca-bundle\") pod \"2206aa45-a065-4965-8540-a2ba7c707155\" (UID: \"2206aa45-a065-4965-8540-a2ba7c707155\") " Jan 20 17:43:03 crc kubenswrapper[4558]: I0120 17:43:03.560572 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/2206aa45-a065-4965-8540-a2ba7c707155-httpd-run\") pod \"2206aa45-a065-4965-8540-a2ba7c707155\" (UID: \"2206aa45-a065-4965-8540-a2ba7c707155\") " Jan 20 17:43:03 crc kubenswrapper[4558]: I0120 17:43:03.560598 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2206aa45-a065-4965-8540-a2ba7c707155-logs\") pod \"2206aa45-a065-4965-8540-a2ba7c707155\" (UID: \"2206aa45-a065-4965-8540-a2ba7c707155\") " Jan 20 17:43:03 crc kubenswrapper[4558]: I0120 17:43:03.560659 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2206aa45-a065-4965-8540-a2ba7c707155-internal-tls-certs\") pod \"2206aa45-a065-4965-8540-a2ba7c707155\" (UID: \"2206aa45-a065-4965-8540-a2ba7c707155\") " Jan 20 17:43:03 crc kubenswrapper[4558]: I0120 17:43:03.560714 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"2206aa45-a065-4965-8540-a2ba7c707155\" (UID: \"2206aa45-a065-4965-8540-a2ba7c707155\") " Jan 20 17:43:03 crc kubenswrapper[4558]: I0120 17:43:03.560843 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2206aa45-a065-4965-8540-a2ba7c707155-scripts\") pod \"2206aa45-a065-4965-8540-a2ba7c707155\" (UID: \"2206aa45-a065-4965-8540-a2ba7c707155\") " Jan 20 17:43:03 crc kubenswrapper[4558]: I0120 17:43:03.561204 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2206aa45-a065-4965-8540-a2ba7c707155-logs" (OuterVolumeSpecName: "logs") pod "2206aa45-a065-4965-8540-a2ba7c707155" (UID: "2206aa45-a065-4965-8540-a2ba7c707155"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:43:03 crc kubenswrapper[4558]: I0120 17:43:03.561237 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2206aa45-a065-4965-8540-a2ba7c707155-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "2206aa45-a065-4965-8540-a2ba7c707155" (UID: "2206aa45-a065-4965-8540-a2ba7c707155"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:43:03 crc kubenswrapper[4558]: I0120 17:43:03.561670 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/552b666d-9d00-469a-9692-45a650c89509-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:03 crc kubenswrapper[4558]: I0120 17:43:03.561723 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" " Jan 20 17:43:03 crc kubenswrapper[4558]: I0120 17:43:03.561738 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/552b666d-9d00-469a-9692-45a650c89509-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:03 crc kubenswrapper[4558]: I0120 17:43:03.561754 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/552b666d-9d00-469a-9692-45a650c89509-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:03 crc kubenswrapper[4558]: I0120 17:43:03.561765 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xw9hj\" (UniqueName: \"kubernetes.io/projected/552b666d-9d00-469a-9692-45a650c89509-kube-api-access-xw9hj\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:03 crc kubenswrapper[4558]: I0120 17:43:03.561776 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/552b666d-9d00-469a-9692-45a650c89509-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:03 crc kubenswrapper[4558]: I0120 17:43:03.561788 4558 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/2206aa45-a065-4965-8540-a2ba7c707155-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:03 crc kubenswrapper[4558]: I0120 17:43:03.561802 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2206aa45-a065-4965-8540-a2ba7c707155-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:03 crc kubenswrapper[4558]: I0120 17:43:03.564045 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage11-crc" (OuterVolumeSpecName: "glance") pod "2206aa45-a065-4965-8540-a2ba7c707155" (UID: "2206aa45-a065-4965-8540-a2ba7c707155"). InnerVolumeSpecName "local-storage11-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:43:03 crc kubenswrapper[4558]: I0120 17:43:03.564620 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2206aa45-a065-4965-8540-a2ba7c707155-scripts" (OuterVolumeSpecName: "scripts") pod "2206aa45-a065-4965-8540-a2ba7c707155" (UID: "2206aa45-a065-4965-8540-a2ba7c707155"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:03 crc kubenswrapper[4558]: I0120 17:43:03.573224 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2206aa45-a065-4965-8540-a2ba7c707155-kube-api-access-qhcb7" (OuterVolumeSpecName: "kube-api-access-qhcb7") pod "2206aa45-a065-4965-8540-a2ba7c707155" (UID: "2206aa45-a065-4965-8540-a2ba7c707155"). InnerVolumeSpecName "kube-api-access-qhcb7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:43:03 crc kubenswrapper[4558]: I0120 17:43:03.580588 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage09-crc" (UniqueName: "kubernetes.io/local-volume/local-storage09-crc") on node "crc" Jan 20 17:43:03 crc kubenswrapper[4558]: I0120 17:43:03.586178 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2206aa45-a065-4965-8540-a2ba7c707155-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2206aa45-a065-4965-8540-a2ba7c707155" (UID: "2206aa45-a065-4965-8540-a2ba7c707155"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:03 crc kubenswrapper[4558]: I0120 17:43:03.600715 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2206aa45-a065-4965-8540-a2ba7c707155-config-data" (OuterVolumeSpecName: "config-data") pod "2206aa45-a065-4965-8540-a2ba7c707155" (UID: "2206aa45-a065-4965-8540-a2ba7c707155"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:03 crc kubenswrapper[4558]: I0120 17:43:03.602421 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2206aa45-a065-4965-8540-a2ba7c707155-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "2206aa45-a065-4965-8540-a2ba7c707155" (UID: "2206aa45-a065-4965-8540-a2ba7c707155"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:03 crc kubenswrapper[4558]: I0120 17:43:03.663661 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qhcb7\" (UniqueName: \"kubernetes.io/projected/2206aa45-a065-4965-8540-a2ba7c707155-kube-api-access-qhcb7\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:03 crc kubenswrapper[4558]: I0120 17:43:03.663689 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2206aa45-a065-4965-8540-a2ba7c707155-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:03 crc kubenswrapper[4558]: I0120 17:43:03.663701 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2206aa45-a065-4965-8540-a2ba7c707155-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:03 crc kubenswrapper[4558]: I0120 17:43:03.663727 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" " Jan 20 17:43:03 crc kubenswrapper[4558]: I0120 17:43:03.663740 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:03 crc kubenswrapper[4558]: I0120 17:43:03.663750 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2206aa45-a065-4965-8540-a2ba7c707155-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:03 crc kubenswrapper[4558]: I0120 17:43:03.663763 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2206aa45-a065-4965-8540-a2ba7c707155-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:03 crc kubenswrapper[4558]: I0120 17:43:03.677130 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage11-crc" (UniqueName: "kubernetes.io/local-volume/local-storage11-crc") on node "crc" Jan 20 17:43:03 crc kubenswrapper[4558]: I0120 17:43:03.758044 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:43:03 crc kubenswrapper[4558]: I0120 17:43:03.769757 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:03 crc kubenswrapper[4558]: I0120 17:43:03.769847 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:43:03 crc kubenswrapper[4558]: I0120 17:43:03.770669 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:43:03 crc kubenswrapper[4558]: I0120 17:43:03.782240 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:43:03 crc kubenswrapper[4558]: I0120 17:43:03.796131 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:43:03 crc kubenswrapper[4558]: E0120 17:43:03.799273 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2206aa45-a065-4965-8540-a2ba7c707155" containerName="glance-httpd" Jan 20 17:43:03 crc kubenswrapper[4558]: I0120 17:43:03.799309 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="2206aa45-a065-4965-8540-a2ba7c707155" containerName="glance-httpd" Jan 20 17:43:03 crc kubenswrapper[4558]: E0120 17:43:03.799331 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="552b666d-9d00-469a-9692-45a650c89509" containerName="glance-log" Jan 20 17:43:03 crc kubenswrapper[4558]: I0120 17:43:03.799338 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="552b666d-9d00-469a-9692-45a650c89509" containerName="glance-log" Jan 20 17:43:03 crc kubenswrapper[4558]: E0120 17:43:03.799352 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="552b666d-9d00-469a-9692-45a650c89509" containerName="glance-httpd" Jan 20 17:43:03 crc kubenswrapper[4558]: I0120 17:43:03.799361 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="552b666d-9d00-469a-9692-45a650c89509" containerName="glance-httpd" Jan 20 17:43:03 crc kubenswrapper[4558]: E0120 17:43:03.799403 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2206aa45-a065-4965-8540-a2ba7c707155" containerName="glance-log" Jan 20 17:43:03 crc kubenswrapper[4558]: I0120 17:43:03.799409 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="2206aa45-a065-4965-8540-a2ba7c707155" containerName="glance-log" Jan 20 17:43:03 crc kubenswrapper[4558]: I0120 17:43:03.799833 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="552b666d-9d00-469a-9692-45a650c89509" containerName="glance-httpd" Jan 20 17:43:03 crc kubenswrapper[4558]: I0120 17:43:03.799848 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="2206aa45-a065-4965-8540-a2ba7c707155" containerName="glance-httpd" Jan 20 17:43:03 crc kubenswrapper[4558]: I0120 17:43:03.799859 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="552b666d-9d00-469a-9692-45a650c89509" containerName="glance-log" Jan 20 17:43:03 crc kubenswrapper[4558]: I0120 17:43:03.799870 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="2206aa45-a065-4965-8540-a2ba7c707155" containerName="glance-log" Jan 20 17:43:03 crc kubenswrapper[4558]: I0120 17:43:03.801678 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:03 crc kubenswrapper[4558]: I0120 17:43:03.804721 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:43:03 crc kubenswrapper[4558]: I0120 17:43:03.808875 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-glance-default-public-svc" Jan 20 17:43:03 crc kubenswrapper[4558]: I0120 17:43:03.810056 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-default-external-config-data" Jan 20 17:43:03 crc kubenswrapper[4558]: I0120 17:43:03.856746 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:43:03 crc kubenswrapper[4558]: I0120 17:43:03.973671 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c62cee5b-4dd8-4a07-995c-e1d0530d695b-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"c62cee5b-4dd8-4a07-995c-e1d0530d695b\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:03 crc kubenswrapper[4558]: I0120 17:43:03.973923 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"c62cee5b-4dd8-4a07-995c-e1d0530d695b\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:03 crc kubenswrapper[4558]: I0120 17:43:03.973961 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c62cee5b-4dd8-4a07-995c-e1d0530d695b-scripts\") pod \"glance-default-external-api-0\" (UID: \"c62cee5b-4dd8-4a07-995c-e1d0530d695b\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:03 crc kubenswrapper[4558]: I0120 17:43:03.974279 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c62cee5b-4dd8-4a07-995c-e1d0530d695b-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"c62cee5b-4dd8-4a07-995c-e1d0530d695b\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:03 crc kubenswrapper[4558]: I0120 17:43:03.974661 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c62cee5b-4dd8-4a07-995c-e1d0530d695b-config-data\") pod \"glance-default-external-api-0\" (UID: \"c62cee5b-4dd8-4a07-995c-e1d0530d695b\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:03 crc kubenswrapper[4558]: I0120 17:43:03.974952 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c62cee5b-4dd8-4a07-995c-e1d0530d695b-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"c62cee5b-4dd8-4a07-995c-e1d0530d695b\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:03 crc kubenswrapper[4558]: I0120 17:43:03.975132 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c62cee5b-4dd8-4a07-995c-e1d0530d695b-logs\") pod \"glance-default-external-api-0\" (UID: \"c62cee5b-4dd8-4a07-995c-e1d0530d695b\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:03 crc kubenswrapper[4558]: I0120 17:43:03.975317 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mwtgp\" (UniqueName: \"kubernetes.io/projected/c62cee5b-4dd8-4a07-995c-e1d0530d695b-kube-api-access-mwtgp\") pod \"glance-default-external-api-0\" (UID: \"c62cee5b-4dd8-4a07-995c-e1d0530d695b\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:04 crc kubenswrapper[4558]: I0120 17:43:04.017912 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:43:04 crc kubenswrapper[4558]: I0120 17:43:04.017966 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:43:04 crc kubenswrapper[4558]: I0120 17:43:04.104050 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:43:04 crc kubenswrapper[4558]: I0120 17:43:04.104348 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c62cee5b-4dd8-4a07-995c-e1d0530d695b-config-data\") pod \"glance-default-external-api-0\" (UID: \"c62cee5b-4dd8-4a07-995c-e1d0530d695b\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:04 crc kubenswrapper[4558]: I0120 17:43:04.104577 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c62cee5b-4dd8-4a07-995c-e1d0530d695b-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"c62cee5b-4dd8-4a07-995c-e1d0530d695b\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:04 crc kubenswrapper[4558]: I0120 17:43:04.105157 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c62cee5b-4dd8-4a07-995c-e1d0530d695b-logs\") pod \"glance-default-external-api-0\" (UID: \"c62cee5b-4dd8-4a07-995c-e1d0530d695b\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:04 crc kubenswrapper[4558]: I0120 17:43:04.105365 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mwtgp\" (UniqueName: \"kubernetes.io/projected/c62cee5b-4dd8-4a07-995c-e1d0530d695b-kube-api-access-mwtgp\") pod \"glance-default-external-api-0\" (UID: \"c62cee5b-4dd8-4a07-995c-e1d0530d695b\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:04 crc kubenswrapper[4558]: I0120 17:43:04.105473 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c62cee5b-4dd8-4a07-995c-e1d0530d695b-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"c62cee5b-4dd8-4a07-995c-e1d0530d695b\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:04 crc kubenswrapper[4558]: I0120 17:43:04.105607 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c62cee5b-4dd8-4a07-995c-e1d0530d695b-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"c62cee5b-4dd8-4a07-995c-e1d0530d695b\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:04 crc kubenswrapper[4558]: I0120 17:43:04.105672 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c62cee5b-4dd8-4a07-995c-e1d0530d695b-logs\") pod \"glance-default-external-api-0\" (UID: \"c62cee5b-4dd8-4a07-995c-e1d0530d695b\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:04 crc kubenswrapper[4558]: I0120 17:43:04.105856 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"c62cee5b-4dd8-4a07-995c-e1d0530d695b\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:04 crc kubenswrapper[4558]: I0120 17:43:04.105889 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c62cee5b-4dd8-4a07-995c-e1d0530d695b-scripts\") pod \"glance-default-external-api-0\" (UID: \"c62cee5b-4dd8-4a07-995c-e1d0530d695b\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:04 crc kubenswrapper[4558]: I0120 17:43:04.106101 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c62cee5b-4dd8-4a07-995c-e1d0530d695b-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"c62cee5b-4dd8-4a07-995c-e1d0530d695b\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:04 crc kubenswrapper[4558]: I0120 17:43:04.106115 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"c62cee5b-4dd8-4a07-995c-e1d0530d695b\") device mount path \"/mnt/openstack/pv09\"" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:04 crc kubenswrapper[4558]: I0120 17:43:04.110056 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c62cee5b-4dd8-4a07-995c-e1d0530d695b-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"c62cee5b-4dd8-4a07-995c-e1d0530d695b\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:04 crc kubenswrapper[4558]: I0120 17:43:04.110820 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c62cee5b-4dd8-4a07-995c-e1d0530d695b-scripts\") pod \"glance-default-external-api-0\" (UID: \"c62cee5b-4dd8-4a07-995c-e1d0530d695b\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:04 crc kubenswrapper[4558]: I0120 17:43:04.111312 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c62cee5b-4dd8-4a07-995c-e1d0530d695b-config-data\") pod \"glance-default-external-api-0\" (UID: \"c62cee5b-4dd8-4a07-995c-e1d0530d695b\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:04 crc kubenswrapper[4558]: I0120 17:43:04.116632 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c62cee5b-4dd8-4a07-995c-e1d0530d695b-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"c62cee5b-4dd8-4a07-995c-e1d0530d695b\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:04 crc kubenswrapper[4558]: I0120 17:43:04.126694 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mwtgp\" (UniqueName: \"kubernetes.io/projected/c62cee5b-4dd8-4a07-995c-e1d0530d695b-kube-api-access-mwtgp\") pod \"glance-default-external-api-0\" (UID: \"c62cee5b-4dd8-4a07-995c-e1d0530d695b\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:04 crc kubenswrapper[4558]: I0120 17:43:04.130441 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"c62cee5b-4dd8-4a07-995c-e1d0530d695b\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:04 crc kubenswrapper[4558]: I0120 17:43:04.422727 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:04 crc kubenswrapper[4558]: I0120 17:43:04.446239 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"2206aa45-a065-4965-8540-a2ba7c707155","Type":"ContainerDied","Data":"8799b33ed4ac8956f001e56712aa2a3930b6dab88b0275fc9af715ecd90381ae"} Jan 20 17:43:04 crc kubenswrapper[4558]: I0120 17:43:04.446311 4558 scope.go:117] "RemoveContainer" containerID="55844a34ec7ec510b2a22da96f2d4570e9b9f40732ea28ed1bd947615b0e17b3" Jan 20 17:43:04 crc kubenswrapper[4558]: I0120 17:43:04.446448 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:04 crc kubenswrapper[4558]: I0120 17:43:04.536156 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:43:04 crc kubenswrapper[4558]: I0120 17:43:04.538761 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:43:04 crc kubenswrapper[4558]: I0120 17:43:04.544224 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:43:04 crc kubenswrapper[4558]: I0120 17:43:04.550232 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:43:04 crc kubenswrapper[4558]: I0120 17:43:04.550526 4558 scope.go:117] "RemoveContainer" containerID="5068001b8f2449f80ce36f3e31ba0522c81e3602c230aa8688078f521d43ddc6" Jan 20 17:43:04 crc kubenswrapper[4558]: I0120 17:43:04.556279 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:43:04 crc kubenswrapper[4558]: I0120 17:43:04.557980 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:04 crc kubenswrapper[4558]: I0120 17:43:04.562139 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-default-internal-config-data" Jan 20 17:43:04 crc kubenswrapper[4558]: I0120 17:43:04.562573 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-glance-default-internal-svc" Jan 20 17:43:04 crc kubenswrapper[4558]: I0120 17:43:04.587926 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2206aa45-a065-4965-8540-a2ba7c707155" path="/var/lib/kubelet/pods/2206aa45-a065-4965-8540-a2ba7c707155/volumes" Jan 20 17:43:04 crc kubenswrapper[4558]: I0120 17:43:04.588691 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="552b666d-9d00-469a-9692-45a650c89509" path="/var/lib/kubelet/pods/552b666d-9d00-469a-9692-45a650c89509/volumes" Jan 20 17:43:04 crc kubenswrapper[4558]: I0120 17:43:04.592475 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:43:04 crc kubenswrapper[4558]: I0120 17:43:04.723224 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5fb29028-042a-4108-a63d-a6cd215a6c31-config-data\") pod \"glance-default-internal-api-0\" (UID: \"5fb29028-042a-4108-a63d-a6cd215a6c31\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:04 crc kubenswrapper[4558]: I0120 17:43:04.723536 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rrkw8\" (UniqueName: \"kubernetes.io/projected/5fb29028-042a-4108-a63d-a6cd215a6c31-kube-api-access-rrkw8\") pod \"glance-default-internal-api-0\" (UID: \"5fb29028-042a-4108-a63d-a6cd215a6c31\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:04 crc kubenswrapper[4558]: I0120 17:43:04.723570 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5fb29028-042a-4108-a63d-a6cd215a6c31-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"5fb29028-042a-4108-a63d-a6cd215a6c31\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:04 crc kubenswrapper[4558]: I0120 17:43:04.723787 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5fb29028-042a-4108-a63d-a6cd215a6c31-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"5fb29028-042a-4108-a63d-a6cd215a6c31\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:04 crc kubenswrapper[4558]: I0120 17:43:04.723866 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"5fb29028-042a-4108-a63d-a6cd215a6c31\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:04 crc kubenswrapper[4558]: I0120 17:43:04.723888 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5fb29028-042a-4108-a63d-a6cd215a6c31-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"5fb29028-042a-4108-a63d-a6cd215a6c31\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:04 crc kubenswrapper[4558]: I0120 17:43:04.723976 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5fb29028-042a-4108-a63d-a6cd215a6c31-scripts\") pod \"glance-default-internal-api-0\" (UID: \"5fb29028-042a-4108-a63d-a6cd215a6c31\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:04 crc kubenswrapper[4558]: I0120 17:43:04.724305 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5fb29028-042a-4108-a63d-a6cd215a6c31-logs\") pod \"glance-default-internal-api-0\" (UID: \"5fb29028-042a-4108-a63d-a6cd215a6c31\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:04 crc kubenswrapper[4558]: I0120 17:43:04.826919 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5fb29028-042a-4108-a63d-a6cd215a6c31-logs\") pod \"glance-default-internal-api-0\" (UID: \"5fb29028-042a-4108-a63d-a6cd215a6c31\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:04 crc kubenswrapper[4558]: I0120 17:43:04.827001 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5fb29028-042a-4108-a63d-a6cd215a6c31-config-data\") pod \"glance-default-internal-api-0\" (UID: \"5fb29028-042a-4108-a63d-a6cd215a6c31\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:04 crc kubenswrapper[4558]: I0120 17:43:04.827061 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rrkw8\" (UniqueName: \"kubernetes.io/projected/5fb29028-042a-4108-a63d-a6cd215a6c31-kube-api-access-rrkw8\") pod \"glance-default-internal-api-0\" (UID: \"5fb29028-042a-4108-a63d-a6cd215a6c31\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:04 crc kubenswrapper[4558]: I0120 17:43:04.827097 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5fb29028-042a-4108-a63d-a6cd215a6c31-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"5fb29028-042a-4108-a63d-a6cd215a6c31\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:04 crc kubenswrapper[4558]: I0120 17:43:04.827158 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5fb29028-042a-4108-a63d-a6cd215a6c31-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"5fb29028-042a-4108-a63d-a6cd215a6c31\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:04 crc kubenswrapper[4558]: I0120 17:43:04.827214 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"5fb29028-042a-4108-a63d-a6cd215a6c31\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:04 crc kubenswrapper[4558]: I0120 17:43:04.827239 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5fb29028-042a-4108-a63d-a6cd215a6c31-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"5fb29028-042a-4108-a63d-a6cd215a6c31\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:04 crc kubenswrapper[4558]: I0120 17:43:04.827281 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5fb29028-042a-4108-a63d-a6cd215a6c31-scripts\") pod \"glance-default-internal-api-0\" (UID: \"5fb29028-042a-4108-a63d-a6cd215a6c31\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:04 crc kubenswrapper[4558]: I0120 17:43:04.827556 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5fb29028-042a-4108-a63d-a6cd215a6c31-logs\") pod \"glance-default-internal-api-0\" (UID: \"5fb29028-042a-4108-a63d-a6cd215a6c31\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:04 crc kubenswrapper[4558]: I0120 17:43:04.827815 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5fb29028-042a-4108-a63d-a6cd215a6c31-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"5fb29028-042a-4108-a63d-a6cd215a6c31\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:04 crc kubenswrapper[4558]: I0120 17:43:04.828259 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"5fb29028-042a-4108-a63d-a6cd215a6c31\") device mount path \"/mnt/openstack/pv11\"" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:04 crc kubenswrapper[4558]: I0120 17:43:04.833320 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5fb29028-042a-4108-a63d-a6cd215a6c31-scripts\") pod \"glance-default-internal-api-0\" (UID: \"5fb29028-042a-4108-a63d-a6cd215a6c31\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:04 crc kubenswrapper[4558]: I0120 17:43:04.834318 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5fb29028-042a-4108-a63d-a6cd215a6c31-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"5fb29028-042a-4108-a63d-a6cd215a6c31\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:04 crc kubenswrapper[4558]: I0120 17:43:04.834368 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5fb29028-042a-4108-a63d-a6cd215a6c31-config-data\") pod \"glance-default-internal-api-0\" (UID: \"5fb29028-042a-4108-a63d-a6cd215a6c31\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:04 crc kubenswrapper[4558]: I0120 17:43:04.840227 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5fb29028-042a-4108-a63d-a6cd215a6c31-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"5fb29028-042a-4108-a63d-a6cd215a6c31\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:04 crc kubenswrapper[4558]: I0120 17:43:04.842962 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rrkw8\" (UniqueName: \"kubernetes.io/projected/5fb29028-042a-4108-a63d-a6cd215a6c31-kube-api-access-rrkw8\") pod \"glance-default-internal-api-0\" (UID: \"5fb29028-042a-4108-a63d-a6cd215a6c31\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:04 crc kubenswrapper[4558]: I0120 17:43:04.854897 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"5fb29028-042a-4108-a63d-a6cd215a6c31\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:04 crc kubenswrapper[4558]: W0120 17:43:04.881544 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc62cee5b_4dd8_4a07_995c_e1d0530d695b.slice/crio-f33ff773b75cf39ed89448dc8f541ef8ba419a59df23280ad0a9a8e0b4815a34 WatchSource:0}: Error finding container f33ff773b75cf39ed89448dc8f541ef8ba419a59df23280ad0a9a8e0b4815a34: Status 404 returned error can't find the container with id f33ff773b75cf39ed89448dc8f541ef8ba419a59df23280ad0a9a8e0b4815a34 Jan 20 17:43:04 crc kubenswrapper[4558]: I0120 17:43:04.884927 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:43:04 crc kubenswrapper[4558]: I0120 17:43:04.887461 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:05 crc kubenswrapper[4558]: I0120 17:43:05.306852 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:43:05 crc kubenswrapper[4558]: I0120 17:43:05.457356 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"5fb29028-042a-4108-a63d-a6cd215a6c31","Type":"ContainerStarted","Data":"b21900be3b919c3bb7fc99727d394d7a9b4ad2d3e34d5e4d78d620329e20cc86"} Jan 20 17:43:05 crc kubenswrapper[4558]: I0120 17:43:05.460580 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"c62cee5b-4dd8-4a07-995c-e1d0530d695b","Type":"ContainerStarted","Data":"5b28187d77dd8f75fd99f025ba3fddc2ac465afeab96893a305bda76c6c1c56c"} Jan 20 17:43:05 crc kubenswrapper[4558]: I0120 17:43:05.460659 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"c62cee5b-4dd8-4a07-995c-e1d0530d695b","Type":"ContainerStarted","Data":"f33ff773b75cf39ed89448dc8f541ef8ba419a59df23280ad0a9a8e0b4815a34"} Jan 20 17:43:06 crc kubenswrapper[4558]: I0120 17:43:06.482489 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"5fb29028-042a-4108-a63d-a6cd215a6c31","Type":"ContainerStarted","Data":"c5a1a31136c32563a3ef2c47370606c762a9054e380e6b74ad6d51b5c145876e"} Jan 20 17:43:06 crc kubenswrapper[4558]: I0120 17:43:06.482987 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"5fb29028-042a-4108-a63d-a6cd215a6c31","Type":"ContainerStarted","Data":"aa17c31d66a9fe9d9ba2ad19998790f47fa790295400fa6c7baccd8f6d22aa0e"} Jan 20 17:43:06 crc kubenswrapper[4558]: I0120 17:43:06.484656 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"c62cee5b-4dd8-4a07-995c-e1d0530d695b","Type":"ContainerStarted","Data":"9cd39e12244c6e213ae6f15945a045746c5236b6784d7e40141838e71fa71966"} Jan 20 17:43:06 crc kubenswrapper[4558]: I0120 17:43:06.503200 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/glance-default-internal-api-0" podStartSLOduration=2.5031846890000002 podStartE2EDuration="2.503184689s" podCreationTimestamp="2026-01-20 17:43:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:43:06.500071235 +0000 UTC m=+3680.260409202" watchObservedRunningTime="2026-01-20 17:43:06.503184689 +0000 UTC m=+3680.263522656" Jan 20 17:43:06 crc kubenswrapper[4558]: I0120 17:43:06.525775 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/glance-default-external-api-0" podStartSLOduration=3.525750538 podStartE2EDuration="3.525750538s" podCreationTimestamp="2026-01-20 17:43:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:43:06.519393175 +0000 UTC m=+3680.279731142" watchObservedRunningTime="2026-01-20 17:43:06.525750538 +0000 UTC m=+3680.286088506" Jan 20 17:43:09 crc kubenswrapper[4558]: I0120 17:43:09.063583 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-nb-0"] Jan 20 17:43:09 crc kubenswrapper[4558]: I0120 17:43:09.064397 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ovsdbserver-nb-0" podUID="64c80877-f13c-4a3d-b352-66dc008f4e1b" containerName="openstack-network-exporter" containerID="cri-o://cbe6e8e1b940e039a8a8149b56ab4e7de324c89518b562816f041b1e4ddc6121" gracePeriod=300 Jan 20 17:43:09 crc kubenswrapper[4558]: I0120 17:43:09.111357 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ovsdbserver-nb-0" podUID="64c80877-f13c-4a3d-b352-66dc008f4e1b" containerName="ovsdbserver-nb" containerID="cri-o://f112bf24b33d220f86c5571e1b43c010cb9948d3f6d700a30ce27b148b5f96bb" gracePeriod=300 Jan 20 17:43:09 crc kubenswrapper[4558]: E0120 17:43:09.361300 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-placement-public-svc: secret "cert-placement-public-svc" not found Jan 20 17:43:09 crc kubenswrapper[4558]: E0120 17:43:09.361589 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/bf32468e-b0a7-4b81-bb49-c65a95997903-public-tls-certs podName:bf32468e-b0a7-4b81-bb49-c65a95997903 nodeName:}" failed. No retries permitted until 2026-01-20 17:43:25.361569588 +0000 UTC m=+3699.121907554 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "public-tls-certs" (UniqueName: "kubernetes.io/secret/bf32468e-b0a7-4b81-bb49-c65a95997903-public-tls-certs") pod "placement-69d7f65964-r46nm" (UID: "bf32468e-b0a7-4b81-bb49-c65a95997903") : secret "cert-placement-public-svc" not found Jan 20 17:43:09 crc kubenswrapper[4558]: E0120 17:43:09.361610 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-placement-internal-svc: secret "cert-placement-internal-svc" not found Jan 20 17:43:09 crc kubenswrapper[4558]: E0120 17:43:09.362077 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/bf32468e-b0a7-4b81-bb49-c65a95997903-internal-tls-certs podName:bf32468e-b0a7-4b81-bb49-c65a95997903 nodeName:}" failed. No retries permitted until 2026-01-20 17:43:25.362067174 +0000 UTC m=+3699.122405141 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "internal-tls-certs" (UniqueName: "kubernetes.io/secret/bf32468e-b0a7-4b81-bb49-c65a95997903-internal-tls-certs") pod "placement-69d7f65964-r46nm" (UID: "bf32468e-b0a7-4b81-bb49-c65a95997903") : secret "cert-placement-internal-svc" not found Jan 20 17:43:09 crc kubenswrapper[4558]: I0120 17:43:09.402284 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-sb-0"] Jan 20 17:43:09 crc kubenswrapper[4558]: I0120 17:43:09.402658 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ovsdbserver-sb-0" podUID="b5ab3ab7-6929-4165-91ad-860f5f109147" containerName="openstack-network-exporter" containerID="cri-o://1c7d263212edab169a57b366b0d7bfd66e4b2a4fcfe052179e753959b5379043" gracePeriod=300 Jan 20 17:43:09 crc kubenswrapper[4558]: I0120 17:43:09.470715 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ovsdbserver-sb-0" podUID="b5ab3ab7-6929-4165-91ad-860f5f109147" containerName="ovsdbserver-sb" containerID="cri-o://66f66c6ec7d01615c200c8de367e75e38763936522bc9d564df7a1fc052430d3" gracePeriod=300 Jan 20 17:43:09 crc kubenswrapper[4558]: I0120 17:43:09.530597 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovsdbserver-nb-0_64c80877-f13c-4a3d-b352-66dc008f4e1b/ovsdbserver-nb/0.log" Jan 20 17:43:09 crc kubenswrapper[4558]: I0120 17:43:09.530636 4558 generic.go:334] "Generic (PLEG): container finished" podID="64c80877-f13c-4a3d-b352-66dc008f4e1b" containerID="cbe6e8e1b940e039a8a8149b56ab4e7de324c89518b562816f041b1e4ddc6121" exitCode=2 Jan 20 17:43:09 crc kubenswrapper[4558]: I0120 17:43:09.530652 4558 generic.go:334] "Generic (PLEG): container finished" podID="64c80877-f13c-4a3d-b352-66dc008f4e1b" containerID="f112bf24b33d220f86c5571e1b43c010cb9948d3f6d700a30ce27b148b5f96bb" exitCode=143 Jan 20 17:43:09 crc kubenswrapper[4558]: I0120 17:43:09.530706 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-nb-0" event={"ID":"64c80877-f13c-4a3d-b352-66dc008f4e1b","Type":"ContainerDied","Data":"cbe6e8e1b940e039a8a8149b56ab4e7de324c89518b562816f041b1e4ddc6121"} Jan 20 17:43:09 crc kubenswrapper[4558]: I0120 17:43:09.530737 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-nb-0" event={"ID":"64c80877-f13c-4a3d-b352-66dc008f4e1b","Type":"ContainerDied","Data":"f112bf24b33d220f86c5571e1b43c010cb9948d3f6d700a30ce27b148b5f96bb"} Jan 20 17:43:09 crc kubenswrapper[4558]: I0120 17:43:09.530746 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-nb-0" event={"ID":"64c80877-f13c-4a3d-b352-66dc008f4e1b","Type":"ContainerDied","Data":"2a03679a313abd285c2b2c5a0fb9ab4133d4834b509b22347420d2ccfbd78825"} Jan 20 17:43:09 crc kubenswrapper[4558]: I0120 17:43:09.530756 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2a03679a313abd285c2b2c5a0fb9ab4133d4834b509b22347420d2ccfbd78825" Jan 20 17:43:09 crc kubenswrapper[4558]: I0120 17:43:09.533189 4558 generic.go:334] "Generic (PLEG): container finished" podID="25cfe04f-2194-405f-a9d7-82181e8ac22a" containerID="080004aa669f36ce8bb441200bb1b70a9222d0a13602ec478e6d8d26c1fbf426" exitCode=0 Jan 20 17:43:09 crc kubenswrapper[4558]: I0120 17:43:09.533216 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"25cfe04f-2194-405f-a9d7-82181e8ac22a","Type":"ContainerDied","Data":"080004aa669f36ce8bb441200bb1b70a9222d0a13602ec478e6d8d26c1fbf426"} Jan 20 17:43:09 crc kubenswrapper[4558]: I0120 17:43:09.592534 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovsdbserver-nb-0_64c80877-f13c-4a3d-b352-66dc008f4e1b/ovsdbserver-nb/0.log" Jan 20 17:43:09 crc kubenswrapper[4558]: I0120 17:43:09.592694 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:43:09 crc kubenswrapper[4558]: I0120 17:43:09.772701 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jqplc\" (UniqueName: \"kubernetes.io/projected/64c80877-f13c-4a3d-b352-66dc008f4e1b-kube-api-access-jqplc\") pod \"64c80877-f13c-4a3d-b352-66dc008f4e1b\" (UID: \"64c80877-f13c-4a3d-b352-66dc008f4e1b\") " Jan 20 17:43:09 crc kubenswrapper[4558]: I0120 17:43:09.773016 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64c80877-f13c-4a3d-b352-66dc008f4e1b-combined-ca-bundle\") pod \"64c80877-f13c-4a3d-b352-66dc008f4e1b\" (UID: \"64c80877-f13c-4a3d-b352-66dc008f4e1b\") " Jan 20 17:43:09 crc kubenswrapper[4558]: I0120 17:43:09.773049 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/64c80877-f13c-4a3d-b352-66dc008f4e1b-metrics-certs-tls-certs\") pod \"64c80877-f13c-4a3d-b352-66dc008f4e1b\" (UID: \"64c80877-f13c-4a3d-b352-66dc008f4e1b\") " Jan 20 17:43:09 crc kubenswrapper[4558]: I0120 17:43:09.773141 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/64c80877-f13c-4a3d-b352-66dc008f4e1b-ovsdb-rundir\") pod \"64c80877-f13c-4a3d-b352-66dc008f4e1b\" (UID: \"64c80877-f13c-4a3d-b352-66dc008f4e1b\") " Jan 20 17:43:09 crc kubenswrapper[4558]: I0120 17:43:09.773210 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/64c80877-f13c-4a3d-b352-66dc008f4e1b-config\") pod \"64c80877-f13c-4a3d-b352-66dc008f4e1b\" (UID: \"64c80877-f13c-4a3d-b352-66dc008f4e1b\") " Jan 20 17:43:09 crc kubenswrapper[4558]: I0120 17:43:09.773278 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/64c80877-f13c-4a3d-b352-66dc008f4e1b-ovsdbserver-nb-tls-certs\") pod \"64c80877-f13c-4a3d-b352-66dc008f4e1b\" (UID: \"64c80877-f13c-4a3d-b352-66dc008f4e1b\") " Jan 20 17:43:09 crc kubenswrapper[4558]: I0120 17:43:09.773308 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndbcluster-nb-etc-ovn\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"64c80877-f13c-4a3d-b352-66dc008f4e1b\" (UID: \"64c80877-f13c-4a3d-b352-66dc008f4e1b\") " Jan 20 17:43:09 crc kubenswrapper[4558]: I0120 17:43:09.773334 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/64c80877-f13c-4a3d-b352-66dc008f4e1b-scripts\") pod \"64c80877-f13c-4a3d-b352-66dc008f4e1b\" (UID: \"64c80877-f13c-4a3d-b352-66dc008f4e1b\") " Jan 20 17:43:09 crc kubenswrapper[4558]: I0120 17:43:09.773712 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/64c80877-f13c-4a3d-b352-66dc008f4e1b-ovsdb-rundir" (OuterVolumeSpecName: "ovsdb-rundir") pod "64c80877-f13c-4a3d-b352-66dc008f4e1b" (UID: "64c80877-f13c-4a3d-b352-66dc008f4e1b"). InnerVolumeSpecName "ovsdb-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:43:09 crc kubenswrapper[4558]: I0120 17:43:09.774146 4558 reconciler_common.go:293] "Volume detached for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/64c80877-f13c-4a3d-b352-66dc008f4e1b-ovsdb-rundir\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:09 crc kubenswrapper[4558]: I0120 17:43:09.775367 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/64c80877-f13c-4a3d-b352-66dc008f4e1b-scripts" (OuterVolumeSpecName: "scripts") pod "64c80877-f13c-4a3d-b352-66dc008f4e1b" (UID: "64c80877-f13c-4a3d-b352-66dc008f4e1b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:43:09 crc kubenswrapper[4558]: I0120 17:43:09.776053 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/64c80877-f13c-4a3d-b352-66dc008f4e1b-config" (OuterVolumeSpecName: "config") pod "64c80877-f13c-4a3d-b352-66dc008f4e1b" (UID: "64c80877-f13c-4a3d-b352-66dc008f4e1b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:43:09 crc kubenswrapper[4558]: I0120 17:43:09.780584 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage02-crc" (OuterVolumeSpecName: "ovndbcluster-nb-etc-ovn") pod "64c80877-f13c-4a3d-b352-66dc008f4e1b" (UID: "64c80877-f13c-4a3d-b352-66dc008f4e1b"). InnerVolumeSpecName "local-storage02-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:43:09 crc kubenswrapper[4558]: I0120 17:43:09.780650 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/64c80877-f13c-4a3d-b352-66dc008f4e1b-kube-api-access-jqplc" (OuterVolumeSpecName: "kube-api-access-jqplc") pod "64c80877-f13c-4a3d-b352-66dc008f4e1b" (UID: "64c80877-f13c-4a3d-b352-66dc008f4e1b"). InnerVolumeSpecName "kube-api-access-jqplc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:43:09 crc kubenswrapper[4558]: I0120 17:43:09.812358 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/64c80877-f13c-4a3d-b352-66dc008f4e1b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "64c80877-f13c-4a3d-b352-66dc008f4e1b" (UID: "64c80877-f13c-4a3d-b352-66dc008f4e1b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:09 crc kubenswrapper[4558]: I0120 17:43:09.844659 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/64c80877-f13c-4a3d-b352-66dc008f4e1b-ovsdbserver-nb-tls-certs" (OuterVolumeSpecName: "ovsdbserver-nb-tls-certs") pod "64c80877-f13c-4a3d-b352-66dc008f4e1b" (UID: "64c80877-f13c-4a3d-b352-66dc008f4e1b"). InnerVolumeSpecName "ovsdbserver-nb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:09 crc kubenswrapper[4558]: I0120 17:43:09.844865 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/64c80877-f13c-4a3d-b352-66dc008f4e1b-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "64c80877-f13c-4a3d-b352-66dc008f4e1b" (UID: "64c80877-f13c-4a3d-b352-66dc008f4e1b"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:09 crc kubenswrapper[4558]: I0120 17:43:09.876812 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/64c80877-f13c-4a3d-b352-66dc008f4e1b-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:09 crc kubenswrapper[4558]: I0120 17:43:09.876835 4558 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/64c80877-f13c-4a3d-b352-66dc008f4e1b-ovsdbserver-nb-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:09 crc kubenswrapper[4558]: I0120 17:43:09.876862 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" " Jan 20 17:43:09 crc kubenswrapper[4558]: I0120 17:43:09.876873 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/64c80877-f13c-4a3d-b352-66dc008f4e1b-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:09 crc kubenswrapper[4558]: I0120 17:43:09.876882 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jqplc\" (UniqueName: \"kubernetes.io/projected/64c80877-f13c-4a3d-b352-66dc008f4e1b-kube-api-access-jqplc\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:09 crc kubenswrapper[4558]: I0120 17:43:09.876893 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64c80877-f13c-4a3d-b352-66dc008f4e1b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:09 crc kubenswrapper[4558]: I0120 17:43:09.876924 4558 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/64c80877-f13c-4a3d-b352-66dc008f4e1b-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:09 crc kubenswrapper[4558]: I0120 17:43:09.919958 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage02-crc" (UniqueName: "kubernetes.io/local-volume/local-storage02-crc") on node "crc" Jan 20 17:43:09 crc kubenswrapper[4558]: I0120 17:43:09.979143 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.132045 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovsdbserver-sb-0_b5ab3ab7-6929-4165-91ad-860f5f109147/ovsdbserver-sb/0.log" Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.132129 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.285616 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/b5ab3ab7-6929-4165-91ad-860f5f109147-metrics-certs-tls-certs\") pod \"b5ab3ab7-6929-4165-91ad-860f5f109147\" (UID: \"b5ab3ab7-6929-4165-91ad-860f5f109147\") " Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.286116 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b5ab3ab7-6929-4165-91ad-860f5f109147-config\") pod \"b5ab3ab7-6929-4165-91ad-860f5f109147\" (UID: \"b5ab3ab7-6929-4165-91ad-860f5f109147\") " Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.286216 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndbcluster-sb-etc-ovn\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"b5ab3ab7-6929-4165-91ad-860f5f109147\" (UID: \"b5ab3ab7-6929-4165-91ad-860f5f109147\") " Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.286277 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b5ab3ab7-6929-4165-91ad-860f5f109147-scripts\") pod \"b5ab3ab7-6929-4165-91ad-860f5f109147\" (UID: \"b5ab3ab7-6929-4165-91ad-860f5f109147\") " Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.286356 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/b5ab3ab7-6929-4165-91ad-860f5f109147-ovsdbserver-sb-tls-certs\") pod \"b5ab3ab7-6929-4165-91ad-860f5f109147\" (UID: \"b5ab3ab7-6929-4165-91ad-860f5f109147\") " Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.286931 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b5ab3ab7-6929-4165-91ad-860f5f109147-scripts" (OuterVolumeSpecName: "scripts") pod "b5ab3ab7-6929-4165-91ad-860f5f109147" (UID: "b5ab3ab7-6929-4165-91ad-860f5f109147"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.287043 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b5ab3ab7-6929-4165-91ad-860f5f109147-config" (OuterVolumeSpecName: "config") pod "b5ab3ab7-6929-4165-91ad-860f5f109147" (UID: "b5ab3ab7-6929-4165-91ad-860f5f109147"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.287080 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b5ab3ab7-6929-4165-91ad-860f5f109147-combined-ca-bundle\") pod \"b5ab3ab7-6929-4165-91ad-860f5f109147\" (UID: \"b5ab3ab7-6929-4165-91ad-860f5f109147\") " Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.287285 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pb654\" (UniqueName: \"kubernetes.io/projected/b5ab3ab7-6929-4165-91ad-860f5f109147-kube-api-access-pb654\") pod \"b5ab3ab7-6929-4165-91ad-860f5f109147\" (UID: \"b5ab3ab7-6929-4165-91ad-860f5f109147\") " Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.287378 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/b5ab3ab7-6929-4165-91ad-860f5f109147-ovsdb-rundir\") pod \"b5ab3ab7-6929-4165-91ad-860f5f109147\" (UID: \"b5ab3ab7-6929-4165-91ad-860f5f109147\") " Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.288231 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b5ab3ab7-6929-4165-91ad-860f5f109147-ovsdb-rundir" (OuterVolumeSpecName: "ovsdb-rundir") pod "b5ab3ab7-6929-4165-91ad-860f5f109147" (UID: "b5ab3ab7-6929-4165-91ad-860f5f109147"). InnerVolumeSpecName "ovsdb-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.289141 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b5ab3ab7-6929-4165-91ad-860f5f109147-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.289220 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b5ab3ab7-6929-4165-91ad-860f5f109147-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.289267 4558 reconciler_common.go:293] "Volume detached for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/b5ab3ab7-6929-4165-91ad-860f5f109147-ovsdb-rundir\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.292408 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage03-crc" (OuterVolumeSpecName: "ovndbcluster-sb-etc-ovn") pod "b5ab3ab7-6929-4165-91ad-860f5f109147" (UID: "b5ab3ab7-6929-4165-91ad-860f5f109147"). InnerVolumeSpecName "local-storage03-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.301396 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b5ab3ab7-6929-4165-91ad-860f5f109147-kube-api-access-pb654" (OuterVolumeSpecName: "kube-api-access-pb654") pod "b5ab3ab7-6929-4165-91ad-860f5f109147" (UID: "b5ab3ab7-6929-4165-91ad-860f5f109147"). InnerVolumeSpecName "kube-api-access-pb654". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.316388 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b5ab3ab7-6929-4165-91ad-860f5f109147-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b5ab3ab7-6929-4165-91ad-860f5f109147" (UID: "b5ab3ab7-6929-4165-91ad-860f5f109147"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.349452 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b5ab3ab7-6929-4165-91ad-860f5f109147-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "b5ab3ab7-6929-4165-91ad-860f5f109147" (UID: "b5ab3ab7-6929-4165-91ad-860f5f109147"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.352392 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b5ab3ab7-6929-4165-91ad-860f5f109147-ovsdbserver-sb-tls-certs" (OuterVolumeSpecName: "ovsdbserver-sb-tls-certs") pod "b5ab3ab7-6929-4165-91ad-860f5f109147" (UID: "b5ab3ab7-6929-4165-91ad-860f5f109147"). InnerVolumeSpecName "ovsdbserver-sb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.391281 4558 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/b5ab3ab7-6929-4165-91ad-860f5f109147-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.391508 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" " Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.395275 4558 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/b5ab3ab7-6929-4165-91ad-860f5f109147-ovsdbserver-sb-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.395369 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b5ab3ab7-6929-4165-91ad-860f5f109147-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.395425 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pb654\" (UniqueName: \"kubernetes.io/projected/b5ab3ab7-6929-4165-91ad-860f5f109147-kube-api-access-pb654\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:10 crc kubenswrapper[4558]: E0120 17:43:10.391466 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-ovnnorthd-ovndbs: secret "cert-ovnnorthd-ovndbs" not found Jan 20 17:43:10 crc kubenswrapper[4558]: E0120 17:43:10.395587 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4d2b64b6-6310-4348-a016-e7d8317e00d9-ovn-northd-tls-certs podName:4d2b64b6-6310-4348-a016-e7d8317e00d9 nodeName:}" failed. No retries permitted until 2026-01-20 17:43:26.395563774 +0000 UTC m=+3700.155901741 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "ovn-northd-tls-certs" (UniqueName: "kubernetes.io/secret/4d2b64b6-6310-4348-a016-e7d8317e00d9-ovn-northd-tls-certs") pod "ovn-northd-0" (UID: "4d2b64b6-6310-4348-a016-e7d8317e00d9") : secret "cert-ovnnorthd-ovndbs" not found Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.411365 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage03-crc" (UniqueName: "kubernetes.io/local-volume/local-storage03-crc") on node "crc" Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.497513 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.545219 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovsdbserver-sb-0_b5ab3ab7-6929-4165-91ad-860f5f109147/ovsdbserver-sb/0.log" Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.545330 4558 generic.go:334] "Generic (PLEG): container finished" podID="b5ab3ab7-6929-4165-91ad-860f5f109147" containerID="1c7d263212edab169a57b366b0d7bfd66e4b2a4fcfe052179e753959b5379043" exitCode=2 Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.545387 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-sb-0" event={"ID":"b5ab3ab7-6929-4165-91ad-860f5f109147","Type":"ContainerDied","Data":"1c7d263212edab169a57b366b0d7bfd66e4b2a4fcfe052179e753959b5379043"} Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.545415 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.545455 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-sb-0" event={"ID":"b5ab3ab7-6929-4165-91ad-860f5f109147","Type":"ContainerDied","Data":"66f66c6ec7d01615c200c8de367e75e38763936522bc9d564df7a1fc052430d3"} Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.545486 4558 scope.go:117] "RemoveContainer" containerID="1c7d263212edab169a57b366b0d7bfd66e4b2a4fcfe052179e753959b5379043" Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.545406 4558 generic.go:334] "Generic (PLEG): container finished" podID="b5ab3ab7-6929-4165-91ad-860f5f109147" containerID="66f66c6ec7d01615c200c8de367e75e38763936522bc9d564df7a1fc052430d3" exitCode=143 Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.545808 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-sb-0" event={"ID":"b5ab3ab7-6929-4165-91ad-860f5f109147","Type":"ContainerDied","Data":"36646009b07943d76f79ec7d2b41ba3a0295d123b4995172e48f7668d169667f"} Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.549884 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.551200 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"25cfe04f-2194-405f-a9d7-82181e8ac22a","Type":"ContainerStarted","Data":"d856400d868c5a18aeb6d59839565030a9d6d928c1563362781a0530b884b4e7"} Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.638340 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-sb-0"] Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.649558 4558 scope.go:117] "RemoveContainer" containerID="66f66c6ec7d01615c200c8de367e75e38763936522bc9d564df7a1fc052430d3" Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.653923 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-sb-0"] Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.666552 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-nb-0"] Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.674470 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-nb-0"] Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.684258 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ovsdbserver-sb-0"] Jan 20 17:43:10 crc kubenswrapper[4558]: E0120 17:43:10.684771 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64c80877-f13c-4a3d-b352-66dc008f4e1b" containerName="ovsdbserver-nb" Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.684787 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="64c80877-f13c-4a3d-b352-66dc008f4e1b" containerName="ovsdbserver-nb" Jan 20 17:43:10 crc kubenswrapper[4558]: E0120 17:43:10.684812 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b5ab3ab7-6929-4165-91ad-860f5f109147" containerName="openstack-network-exporter" Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.684818 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="b5ab3ab7-6929-4165-91ad-860f5f109147" containerName="openstack-network-exporter" Jan 20 17:43:10 crc kubenswrapper[4558]: E0120 17:43:10.684830 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64c80877-f13c-4a3d-b352-66dc008f4e1b" containerName="openstack-network-exporter" Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.684836 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="64c80877-f13c-4a3d-b352-66dc008f4e1b" containerName="openstack-network-exporter" Jan 20 17:43:10 crc kubenswrapper[4558]: E0120 17:43:10.684872 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b5ab3ab7-6929-4165-91ad-860f5f109147" containerName="ovsdbserver-sb" Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.684877 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="b5ab3ab7-6929-4165-91ad-860f5f109147" containerName="ovsdbserver-sb" Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.685082 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="64c80877-f13c-4a3d-b352-66dc008f4e1b" containerName="ovsdbserver-nb" Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.685106 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="b5ab3ab7-6929-4165-91ad-860f5f109147" containerName="openstack-network-exporter" Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.685114 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="b5ab3ab7-6929-4165-91ad-860f5f109147" containerName="ovsdbserver-sb" Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.685122 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="64c80877-f13c-4a3d-b352-66dc008f4e1b" containerName="openstack-network-exporter" Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.686219 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.688990 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"ovndbcluster-sb-config" Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.689142 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-ovndbcluster-sb-ovndbs" Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.689346 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ovncluster-ovndbcluster-sb-dockercfg-b24l8" Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.689424 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"ovndbcluster-sb-scripts" Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.690627 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ovsdbserver-nb-0"] Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.691966 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.695808 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-sb-0"] Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.696480 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"ovndbcluster-nb-config" Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.696806 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-ovndbcluster-nb-ovndbs" Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.696938 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ovncluster-ovndbcluster-nb-dockercfg-jdmxf" Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.697061 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"ovndbcluster-nb-scripts" Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.708421 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-nb-0"] Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.718287 4558 scope.go:117] "RemoveContainer" containerID="1c7d263212edab169a57b366b0d7bfd66e4b2a4fcfe052179e753959b5379043" Jan 20 17:43:10 crc kubenswrapper[4558]: E0120 17:43:10.721762 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1c7d263212edab169a57b366b0d7bfd66e4b2a4fcfe052179e753959b5379043\": container with ID starting with 1c7d263212edab169a57b366b0d7bfd66e4b2a4fcfe052179e753959b5379043 not found: ID does not exist" containerID="1c7d263212edab169a57b366b0d7bfd66e4b2a4fcfe052179e753959b5379043" Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.721802 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1c7d263212edab169a57b366b0d7bfd66e4b2a4fcfe052179e753959b5379043"} err="failed to get container status \"1c7d263212edab169a57b366b0d7bfd66e4b2a4fcfe052179e753959b5379043\": rpc error: code = NotFound desc = could not find container \"1c7d263212edab169a57b366b0d7bfd66e4b2a4fcfe052179e753959b5379043\": container with ID starting with 1c7d263212edab169a57b366b0d7bfd66e4b2a4fcfe052179e753959b5379043 not found: ID does not exist" Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.721828 4558 scope.go:117] "RemoveContainer" containerID="66f66c6ec7d01615c200c8de367e75e38763936522bc9d564df7a1fc052430d3" Jan 20 17:43:10 crc kubenswrapper[4558]: E0120 17:43:10.724682 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"66f66c6ec7d01615c200c8de367e75e38763936522bc9d564df7a1fc052430d3\": container with ID starting with 66f66c6ec7d01615c200c8de367e75e38763936522bc9d564df7a1fc052430d3 not found: ID does not exist" containerID="66f66c6ec7d01615c200c8de367e75e38763936522bc9d564df7a1fc052430d3" Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.724720 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"66f66c6ec7d01615c200c8de367e75e38763936522bc9d564df7a1fc052430d3"} err="failed to get container status \"66f66c6ec7d01615c200c8de367e75e38763936522bc9d564df7a1fc052430d3\": rpc error: code = NotFound desc = could not find container \"66f66c6ec7d01615c200c8de367e75e38763936522bc9d564df7a1fc052430d3\": container with ID starting with 66f66c6ec7d01615c200c8de367e75e38763936522bc9d564df7a1fc052430d3 not found: ID does not exist" Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.724745 4558 scope.go:117] "RemoveContainer" containerID="1c7d263212edab169a57b366b0d7bfd66e4b2a4fcfe052179e753959b5379043" Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.728436 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1c7d263212edab169a57b366b0d7bfd66e4b2a4fcfe052179e753959b5379043"} err="failed to get container status \"1c7d263212edab169a57b366b0d7bfd66e4b2a4fcfe052179e753959b5379043\": rpc error: code = NotFound desc = could not find container \"1c7d263212edab169a57b366b0d7bfd66e4b2a4fcfe052179e753959b5379043\": container with ID starting with 1c7d263212edab169a57b366b0d7bfd66e4b2a4fcfe052179e753959b5379043 not found: ID does not exist" Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.728467 4558 scope.go:117] "RemoveContainer" containerID="66f66c6ec7d01615c200c8de367e75e38763936522bc9d564df7a1fc052430d3" Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.731337 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"66f66c6ec7d01615c200c8de367e75e38763936522bc9d564df7a1fc052430d3"} err="failed to get container status \"66f66c6ec7d01615c200c8de367e75e38763936522bc9d564df7a1fc052430d3\": rpc error: code = NotFound desc = could not find container \"66f66c6ec7d01615c200c8de367e75e38763936522bc9d564df7a1fc052430d3\": container with ID starting with 66f66c6ec7d01615c200c8de367e75e38763936522bc9d564df7a1fc052430d3 not found: ID does not exist" Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.803578 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"ovsdbserver-nb-0\" (UID: \"5f087f7c-913a-4ab8-b905-5caa84469c77\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.803687 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x6tnw\" (UniqueName: \"kubernetes.io/projected/5f087f7c-913a-4ab8-b905-5caa84469c77-kube-api-access-x6tnw\") pod \"ovsdbserver-nb-0\" (UID: \"5f087f7c-913a-4ab8-b905-5caa84469c77\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.803734 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5f087f7c-913a-4ab8-b905-5caa84469c77-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"5f087f7c-913a-4ab8-b905-5caa84469c77\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.803755 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/5f087f7c-913a-4ab8-b905-5caa84469c77-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"5f087f7c-913a-4ab8-b905-5caa84469c77\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.803772 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"ovsdbserver-sb-0\" (UID: \"0394e10b-3811-4eef-a9ef-a785e1574649\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.803791 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/5f087f7c-913a-4ab8-b905-5caa84469c77-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"5f087f7c-913a-4ab8-b905-5caa84469c77\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.803858 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/5f087f7c-913a-4ab8-b905-5caa84469c77-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"5f087f7c-913a-4ab8-b905-5caa84469c77\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.803879 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/0394e10b-3811-4eef-a9ef-a785e1574649-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"0394e10b-3811-4eef-a9ef-a785e1574649\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.803893 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6mj6z\" (UniqueName: \"kubernetes.io/projected/0394e10b-3811-4eef-a9ef-a785e1574649-kube-api-access-6mj6z\") pod \"ovsdbserver-sb-0\" (UID: \"0394e10b-3811-4eef-a9ef-a785e1574649\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.803916 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f087f7c-913a-4ab8-b905-5caa84469c77-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"5f087f7c-913a-4ab8-b905-5caa84469c77\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.803944 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0394e10b-3811-4eef-a9ef-a785e1574649-config\") pod \"ovsdbserver-sb-0\" (UID: \"0394e10b-3811-4eef-a9ef-a785e1574649\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.803963 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0394e10b-3811-4eef-a9ef-a785e1574649-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"0394e10b-3811-4eef-a9ef-a785e1574649\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.803990 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5f087f7c-913a-4ab8-b905-5caa84469c77-config\") pod \"ovsdbserver-nb-0\" (UID: \"5f087f7c-913a-4ab8-b905-5caa84469c77\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.804012 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0394e10b-3811-4eef-a9ef-a785e1574649-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"0394e10b-3811-4eef-a9ef-a785e1574649\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.804037 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/0394e10b-3811-4eef-a9ef-a785e1574649-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"0394e10b-3811-4eef-a9ef-a785e1574649\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.804059 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/0394e10b-3811-4eef-a9ef-a785e1574649-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"0394e10b-3811-4eef-a9ef-a785e1574649\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.847026 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.906038 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0394e10b-3811-4eef-a9ef-a785e1574649-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"0394e10b-3811-4eef-a9ef-a785e1574649\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.906084 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/0394e10b-3811-4eef-a9ef-a785e1574649-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"0394e10b-3811-4eef-a9ef-a785e1574649\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.906115 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/0394e10b-3811-4eef-a9ef-a785e1574649-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"0394e10b-3811-4eef-a9ef-a785e1574649\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.907614 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0394e10b-3811-4eef-a9ef-a785e1574649-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"0394e10b-3811-4eef-a9ef-a785e1574649\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.909981 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"ovsdbserver-nb-0\" (UID: \"5f087f7c-913a-4ab8-b905-5caa84469c77\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.910245 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"ovsdbserver-nb-0\" (UID: \"5f087f7c-913a-4ab8-b905-5caa84469c77\") device mount path \"/mnt/openstack/pv02\"" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.910493 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/0394e10b-3811-4eef-a9ef-a785e1574649-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"0394e10b-3811-4eef-a9ef-a785e1574649\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.910791 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/0394e10b-3811-4eef-a9ef-a785e1574649-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"0394e10b-3811-4eef-a9ef-a785e1574649\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.914374 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x6tnw\" (UniqueName: \"kubernetes.io/projected/5f087f7c-913a-4ab8-b905-5caa84469c77-kube-api-access-x6tnw\") pod \"ovsdbserver-nb-0\" (UID: \"5f087f7c-913a-4ab8-b905-5caa84469c77\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.914432 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5f087f7c-913a-4ab8-b905-5caa84469c77-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"5f087f7c-913a-4ab8-b905-5caa84469c77\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.914451 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/5f087f7c-913a-4ab8-b905-5caa84469c77-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"5f087f7c-913a-4ab8-b905-5caa84469c77\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.914468 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"ovsdbserver-sb-0\" (UID: \"0394e10b-3811-4eef-a9ef-a785e1574649\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.914489 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/5f087f7c-913a-4ab8-b905-5caa84469c77-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"5f087f7c-913a-4ab8-b905-5caa84469c77\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.914563 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/5f087f7c-913a-4ab8-b905-5caa84469c77-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"5f087f7c-913a-4ab8-b905-5caa84469c77\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.914581 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/0394e10b-3811-4eef-a9ef-a785e1574649-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"0394e10b-3811-4eef-a9ef-a785e1574649\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.914597 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6mj6z\" (UniqueName: \"kubernetes.io/projected/0394e10b-3811-4eef-a9ef-a785e1574649-kube-api-access-6mj6z\") pod \"ovsdbserver-sb-0\" (UID: \"0394e10b-3811-4eef-a9ef-a785e1574649\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.914614 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f087f7c-913a-4ab8-b905-5caa84469c77-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"5f087f7c-913a-4ab8-b905-5caa84469c77\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.914640 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0394e10b-3811-4eef-a9ef-a785e1574649-config\") pod \"ovsdbserver-sb-0\" (UID: \"0394e10b-3811-4eef-a9ef-a785e1574649\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.914660 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0394e10b-3811-4eef-a9ef-a785e1574649-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"0394e10b-3811-4eef-a9ef-a785e1574649\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.914690 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5f087f7c-913a-4ab8-b905-5caa84469c77-config\") pod \"ovsdbserver-nb-0\" (UID: \"5f087f7c-913a-4ab8-b905-5caa84469c77\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.915368 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5f087f7c-913a-4ab8-b905-5caa84469c77-config\") pod \"ovsdbserver-nb-0\" (UID: \"5f087f7c-913a-4ab8-b905-5caa84469c77\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.916157 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/0394e10b-3811-4eef-a9ef-a785e1574649-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"0394e10b-3811-4eef-a9ef-a785e1574649\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.916284 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"ovsdbserver-sb-0\" (UID: \"0394e10b-3811-4eef-a9ef-a785e1574649\") device mount path \"/mnt/openstack/pv03\"" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.916397 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5f087f7c-913a-4ab8-b905-5caa84469c77-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"5f087f7c-913a-4ab8-b905-5caa84469c77\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.916602 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/5f087f7c-913a-4ab8-b905-5caa84469c77-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"5f087f7c-913a-4ab8-b905-5caa84469c77\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.918054 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0394e10b-3811-4eef-a9ef-a785e1574649-config\") pod \"ovsdbserver-sb-0\" (UID: \"0394e10b-3811-4eef-a9ef-a785e1574649\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.920686 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/5f087f7c-913a-4ab8-b905-5caa84469c77-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"5f087f7c-913a-4ab8-b905-5caa84469c77\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.921831 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/5f087f7c-913a-4ab8-b905-5caa84469c77-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"5f087f7c-913a-4ab8-b905-5caa84469c77\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.924416 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0394e10b-3811-4eef-a9ef-a785e1574649-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"0394e10b-3811-4eef-a9ef-a785e1574649\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.930059 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x6tnw\" (UniqueName: \"kubernetes.io/projected/5f087f7c-913a-4ab8-b905-5caa84469c77-kube-api-access-x6tnw\") pod \"ovsdbserver-nb-0\" (UID: \"5f087f7c-913a-4ab8-b905-5caa84469c77\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.931576 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f087f7c-913a-4ab8-b905-5caa84469c77-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"5f087f7c-913a-4ab8-b905-5caa84469c77\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.932883 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6mj6z\" (UniqueName: \"kubernetes.io/projected/0394e10b-3811-4eef-a9ef-a785e1574649-kube-api-access-6mj6z\") pod \"ovsdbserver-sb-0\" (UID: \"0394e10b-3811-4eef-a9ef-a785e1574649\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.944924 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"ovsdbserver-nb-0\" (UID: \"5f087f7c-913a-4ab8-b905-5caa84469c77\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:43:10 crc kubenswrapper[4558]: I0120 17:43:10.949370 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"ovsdbserver-sb-0\" (UID: \"0394e10b-3811-4eef-a9ef-a785e1574649\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:43:11 crc kubenswrapper[4558]: I0120 17:43:11.012195 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:43:11 crc kubenswrapper[4558]: I0120 17:43:11.016036 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e43647af-b3c8-423f-b88c-d3a9b1aacef2-config-data\") pod \"e43647af-b3c8-423f-b88c-d3a9b1aacef2\" (UID: \"e43647af-b3c8-423f-b88c-d3a9b1aacef2\") " Jan 20 17:43:11 crc kubenswrapper[4558]: I0120 17:43:11.016190 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e43647af-b3c8-423f-b88c-d3a9b1aacef2-scripts\") pod \"e43647af-b3c8-423f-b88c-d3a9b1aacef2\" (UID: \"e43647af-b3c8-423f-b88c-d3a9b1aacef2\") " Jan 20 17:43:11 crc kubenswrapper[4558]: I0120 17:43:11.016309 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e43647af-b3c8-423f-b88c-d3a9b1aacef2-sg-core-conf-yaml\") pod \"e43647af-b3c8-423f-b88c-d3a9b1aacef2\" (UID: \"e43647af-b3c8-423f-b88c-d3a9b1aacef2\") " Jan 20 17:43:11 crc kubenswrapper[4558]: I0120 17:43:11.016376 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e43647af-b3c8-423f-b88c-d3a9b1aacef2-combined-ca-bundle\") pod \"e43647af-b3c8-423f-b88c-d3a9b1aacef2\" (UID: \"e43647af-b3c8-423f-b88c-d3a9b1aacef2\") " Jan 20 17:43:11 crc kubenswrapper[4558]: I0120 17:43:11.016653 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/e43647af-b3c8-423f-b88c-d3a9b1aacef2-ceilometer-tls-certs\") pod \"e43647af-b3c8-423f-b88c-d3a9b1aacef2\" (UID: \"e43647af-b3c8-423f-b88c-d3a9b1aacef2\") " Jan 20 17:43:11 crc kubenswrapper[4558]: I0120 17:43:11.016695 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e43647af-b3c8-423f-b88c-d3a9b1aacef2-log-httpd\") pod \"e43647af-b3c8-423f-b88c-d3a9b1aacef2\" (UID: \"e43647af-b3c8-423f-b88c-d3a9b1aacef2\") " Jan 20 17:43:11 crc kubenswrapper[4558]: I0120 17:43:11.016746 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6lg2c\" (UniqueName: \"kubernetes.io/projected/e43647af-b3c8-423f-b88c-d3a9b1aacef2-kube-api-access-6lg2c\") pod \"e43647af-b3c8-423f-b88c-d3a9b1aacef2\" (UID: \"e43647af-b3c8-423f-b88c-d3a9b1aacef2\") " Jan 20 17:43:11 crc kubenswrapper[4558]: I0120 17:43:11.016825 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e43647af-b3c8-423f-b88c-d3a9b1aacef2-run-httpd\") pod \"e43647af-b3c8-423f-b88c-d3a9b1aacef2\" (UID: \"e43647af-b3c8-423f-b88c-d3a9b1aacef2\") " Jan 20 17:43:11 crc kubenswrapper[4558]: I0120 17:43:11.017391 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e43647af-b3c8-423f-b88c-d3a9b1aacef2-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "e43647af-b3c8-423f-b88c-d3a9b1aacef2" (UID: "e43647af-b3c8-423f-b88c-d3a9b1aacef2"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:43:11 crc kubenswrapper[4558]: I0120 17:43:11.017854 4558 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e43647af-b3c8-423f-b88c-d3a9b1aacef2-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:11 crc kubenswrapper[4558]: I0120 17:43:11.019308 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e43647af-b3c8-423f-b88c-d3a9b1aacef2-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "e43647af-b3c8-423f-b88c-d3a9b1aacef2" (UID: "e43647af-b3c8-423f-b88c-d3a9b1aacef2"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:43:11 crc kubenswrapper[4558]: I0120 17:43:11.020080 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:43:11 crc kubenswrapper[4558]: I0120 17:43:11.021461 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e43647af-b3c8-423f-b88c-d3a9b1aacef2-scripts" (OuterVolumeSpecName: "scripts") pod "e43647af-b3c8-423f-b88c-d3a9b1aacef2" (UID: "e43647af-b3c8-423f-b88c-d3a9b1aacef2"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:11 crc kubenswrapper[4558]: I0120 17:43:11.021582 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e43647af-b3c8-423f-b88c-d3a9b1aacef2-kube-api-access-6lg2c" (OuterVolumeSpecName: "kube-api-access-6lg2c") pod "e43647af-b3c8-423f-b88c-d3a9b1aacef2" (UID: "e43647af-b3c8-423f-b88c-d3a9b1aacef2"). InnerVolumeSpecName "kube-api-access-6lg2c". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:43:11 crc kubenswrapper[4558]: I0120 17:43:11.051130 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e43647af-b3c8-423f-b88c-d3a9b1aacef2-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "e43647af-b3c8-423f-b88c-d3a9b1aacef2" (UID: "e43647af-b3c8-423f-b88c-d3a9b1aacef2"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:11 crc kubenswrapper[4558]: I0120 17:43:11.070263 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e43647af-b3c8-423f-b88c-d3a9b1aacef2-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "e43647af-b3c8-423f-b88c-d3a9b1aacef2" (UID: "e43647af-b3c8-423f-b88c-d3a9b1aacef2"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:11 crc kubenswrapper[4558]: I0120 17:43:11.102145 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e43647af-b3c8-423f-b88c-d3a9b1aacef2-config-data" (OuterVolumeSpecName: "config-data") pod "e43647af-b3c8-423f-b88c-d3a9b1aacef2" (UID: "e43647af-b3c8-423f-b88c-d3a9b1aacef2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:11 crc kubenswrapper[4558]: I0120 17:43:11.105262 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e43647af-b3c8-423f-b88c-d3a9b1aacef2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e43647af-b3c8-423f-b88c-d3a9b1aacef2" (UID: "e43647af-b3c8-423f-b88c-d3a9b1aacef2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:11 crc kubenswrapper[4558]: I0120 17:43:11.121027 4558 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/e43647af-b3c8-423f-b88c-d3a9b1aacef2-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:11 crc kubenswrapper[4558]: I0120 17:43:11.121298 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6lg2c\" (UniqueName: \"kubernetes.io/projected/e43647af-b3c8-423f-b88c-d3a9b1aacef2-kube-api-access-6lg2c\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:11 crc kubenswrapper[4558]: I0120 17:43:11.121312 4558 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e43647af-b3c8-423f-b88c-d3a9b1aacef2-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:11 crc kubenswrapper[4558]: I0120 17:43:11.121321 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e43647af-b3c8-423f-b88c-d3a9b1aacef2-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:11 crc kubenswrapper[4558]: I0120 17:43:11.121331 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e43647af-b3c8-423f-b88c-d3a9b1aacef2-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:11 crc kubenswrapper[4558]: I0120 17:43:11.121339 4558 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e43647af-b3c8-423f-b88c-d3a9b1aacef2-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:11 crc kubenswrapper[4558]: I0120 17:43:11.121349 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e43647af-b3c8-423f-b88c-d3a9b1aacef2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:11 crc kubenswrapper[4558]: E0120 17:43:11.430832 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-swift-internal-svc: secret "cert-swift-internal-svc" not found Jan 20 17:43:11 crc kubenswrapper[4558]: E0120 17:43:11.430927 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/858ca371-2b02-46d8-879c-8d859c31d590-internal-tls-certs podName:858ca371-2b02-46d8-879c-8d859c31d590 nodeName:}" failed. No retries permitted until 2026-01-20 17:43:27.43089815 +0000 UTC m=+3701.191236118 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "internal-tls-certs" (UniqueName: "kubernetes.io/secret/858ca371-2b02-46d8-879c-8d859c31d590-internal-tls-certs") pod "swift-proxy-6f98b8f5cb-8bwzs" (UID: "858ca371-2b02-46d8-879c-8d859c31d590") : secret "cert-swift-internal-svc" not found Jan 20 17:43:11 crc kubenswrapper[4558]: E0120 17:43:11.433049 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-swift-public-svc: secret "cert-swift-public-svc" not found Jan 20 17:43:11 crc kubenswrapper[4558]: E0120 17:43:11.433122 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/858ca371-2b02-46d8-879c-8d859c31d590-public-tls-certs podName:858ca371-2b02-46d8-879c-8d859c31d590 nodeName:}" failed. No retries permitted until 2026-01-20 17:43:27.433104369 +0000 UTC m=+3701.193442336 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "public-tls-certs" (UniqueName: "kubernetes.io/secret/858ca371-2b02-46d8-879c-8d859c31d590-public-tls-certs") pod "swift-proxy-6f98b8f5cb-8bwzs" (UID: "858ca371-2b02-46d8-879c-8d859c31d590") : secret "cert-swift-public-svc" not found Jan 20 17:43:11 crc kubenswrapper[4558]: W0120 17:43:11.443472 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0394e10b_3811_4eef_a9ef_a785e1574649.slice/crio-70d870022e9e4c73f1789d3ea2e0a19e7d3c37bc077bf1720b078f21c8b49d8c WatchSource:0}: Error finding container 70d870022e9e4c73f1789d3ea2e0a19e7d3c37bc077bf1720b078f21c8b49d8c: Status 404 returned error can't find the container with id 70d870022e9e4c73f1789d3ea2e0a19e7d3c37bc077bf1720b078f21c8b49d8c Jan 20 17:43:11 crc kubenswrapper[4558]: I0120 17:43:11.449252 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-sb-0"] Jan 20 17:43:11 crc kubenswrapper[4558]: I0120 17:43:11.517466 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-nb-0"] Jan 20 17:43:11 crc kubenswrapper[4558]: W0120 17:43:11.521381 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5f087f7c_913a_4ab8_b905_5caa84469c77.slice/crio-912edc3f837ed8a890443ab315c041a927764484baaca34a5caeaf8eaef57250 WatchSource:0}: Error finding container 912edc3f837ed8a890443ab315c041a927764484baaca34a5caeaf8eaef57250: Status 404 returned error can't find the container with id 912edc3f837ed8a890443ab315c041a927764484baaca34a5caeaf8eaef57250 Jan 20 17:43:11 crc kubenswrapper[4558]: I0120 17:43:11.566295 4558 scope.go:117] "RemoveContainer" containerID="2d2a8989ca5a5af98b2c9dc8f801ea0675339ec14800f437c058c5a43c20146b" Jan 20 17:43:11 crc kubenswrapper[4558]: E0120 17:43:11.566722 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 17:43:11 crc kubenswrapper[4558]: I0120 17:43:11.570105 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-nb-0" event={"ID":"5f087f7c-913a-4ab8-b905-5caa84469c77","Type":"ContainerStarted","Data":"912edc3f837ed8a890443ab315c041a927764484baaca34a5caeaf8eaef57250"} Jan 20 17:43:11 crc kubenswrapper[4558]: I0120 17:43:11.574299 4558 generic.go:334] "Generic (PLEG): container finished" podID="e43647af-b3c8-423f-b88c-d3a9b1aacef2" containerID="4842b1e75542f7bbd13f59782fc4bdc7732e7ba2586beb53f10328006641cd69" exitCode=0 Jan 20 17:43:11 crc kubenswrapper[4558]: I0120 17:43:11.574388 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"e43647af-b3c8-423f-b88c-d3a9b1aacef2","Type":"ContainerDied","Data":"4842b1e75542f7bbd13f59782fc4bdc7732e7ba2586beb53f10328006641cd69"} Jan 20 17:43:11 crc kubenswrapper[4558]: I0120 17:43:11.574432 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:43:11 crc kubenswrapper[4558]: I0120 17:43:11.574471 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"e43647af-b3c8-423f-b88c-d3a9b1aacef2","Type":"ContainerDied","Data":"928bc3cf1457b7bb905c7b0d4ce7e8590f2c8e7e706290de1e5b59a2fa0403d2"} Jan 20 17:43:11 crc kubenswrapper[4558]: I0120 17:43:11.574500 4558 scope.go:117] "RemoveContainer" containerID="8caf6b3ac50eb079a954e2fde982cd0bb14aa26097a6d791646bafef2d44e268" Jan 20 17:43:11 crc kubenswrapper[4558]: I0120 17:43:11.579361 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-sb-0" event={"ID":"0394e10b-3811-4eef-a9ef-a785e1574649","Type":"ContainerStarted","Data":"70d870022e9e4c73f1789d3ea2e0a19e7d3c37bc077bf1720b078f21c8b49d8c"} Jan 20 17:43:11 crc kubenswrapper[4558]: I0120 17:43:11.631198 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:43:11 crc kubenswrapper[4558]: I0120 17:43:11.635630 4558 scope.go:117] "RemoveContainer" containerID="c72b716d9037758e9baeb566c5ac04381df6b5dc61728fdf1da64996d680ebeb" Jan 20 17:43:11 crc kubenswrapper[4558]: I0120 17:43:11.644957 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:43:11 crc kubenswrapper[4558]: I0120 17:43:11.674660 4558 scope.go:117] "RemoveContainer" containerID="4842b1e75542f7bbd13f59782fc4bdc7732e7ba2586beb53f10328006641cd69" Jan 20 17:43:11 crc kubenswrapper[4558]: I0120 17:43:11.674750 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:43:11 crc kubenswrapper[4558]: E0120 17:43:11.675915 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e43647af-b3c8-423f-b88c-d3a9b1aacef2" containerName="proxy-httpd" Jan 20 17:43:11 crc kubenswrapper[4558]: I0120 17:43:11.675938 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="e43647af-b3c8-423f-b88c-d3a9b1aacef2" containerName="proxy-httpd" Jan 20 17:43:11 crc kubenswrapper[4558]: E0120 17:43:11.675970 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e43647af-b3c8-423f-b88c-d3a9b1aacef2" containerName="sg-core" Jan 20 17:43:11 crc kubenswrapper[4558]: I0120 17:43:11.675977 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="e43647af-b3c8-423f-b88c-d3a9b1aacef2" containerName="sg-core" Jan 20 17:43:11 crc kubenswrapper[4558]: E0120 17:43:11.676001 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e43647af-b3c8-423f-b88c-d3a9b1aacef2" containerName="ceilometer-central-agent" Jan 20 17:43:11 crc kubenswrapper[4558]: I0120 17:43:11.676007 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="e43647af-b3c8-423f-b88c-d3a9b1aacef2" containerName="ceilometer-central-agent" Jan 20 17:43:11 crc kubenswrapper[4558]: E0120 17:43:11.676033 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e43647af-b3c8-423f-b88c-d3a9b1aacef2" containerName="ceilometer-notification-agent" Jan 20 17:43:11 crc kubenswrapper[4558]: I0120 17:43:11.676041 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="e43647af-b3c8-423f-b88c-d3a9b1aacef2" containerName="ceilometer-notification-agent" Jan 20 17:43:11 crc kubenswrapper[4558]: I0120 17:43:11.676454 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="e43647af-b3c8-423f-b88c-d3a9b1aacef2" containerName="proxy-httpd" Jan 20 17:43:11 crc kubenswrapper[4558]: I0120 17:43:11.676477 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="e43647af-b3c8-423f-b88c-d3a9b1aacef2" containerName="ceilometer-central-agent" Jan 20 17:43:11 crc kubenswrapper[4558]: I0120 17:43:11.676494 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="e43647af-b3c8-423f-b88c-d3a9b1aacef2" containerName="sg-core" Jan 20 17:43:11 crc kubenswrapper[4558]: I0120 17:43:11.676515 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="e43647af-b3c8-423f-b88c-d3a9b1aacef2" containerName="ceilometer-notification-agent" Jan 20 17:43:11 crc kubenswrapper[4558]: I0120 17:43:11.681186 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:43:11 crc kubenswrapper[4558]: I0120 17:43:11.684925 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-scripts" Jan 20 17:43:11 crc kubenswrapper[4558]: I0120 17:43:11.685117 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-config-data" Jan 20 17:43:11 crc kubenswrapper[4558]: I0120 17:43:11.685394 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-ceilometer-internal-svc" Jan 20 17:43:11 crc kubenswrapper[4558]: I0120 17:43:11.689607 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:43:11 crc kubenswrapper[4558]: I0120 17:43:11.703237 4558 scope.go:117] "RemoveContainer" containerID="bb7f865b92b1c710c22f66a0b1afc2b13b7529e74d6bbbbdbce454bf11a3c1a0" Jan 20 17:43:11 crc kubenswrapper[4558]: I0120 17:43:11.731704 4558 scope.go:117] "RemoveContainer" containerID="8caf6b3ac50eb079a954e2fde982cd0bb14aa26097a6d791646bafef2d44e268" Jan 20 17:43:11 crc kubenswrapper[4558]: E0120 17:43:11.732027 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8caf6b3ac50eb079a954e2fde982cd0bb14aa26097a6d791646bafef2d44e268\": container with ID starting with 8caf6b3ac50eb079a954e2fde982cd0bb14aa26097a6d791646bafef2d44e268 not found: ID does not exist" containerID="8caf6b3ac50eb079a954e2fde982cd0bb14aa26097a6d791646bafef2d44e268" Jan 20 17:43:11 crc kubenswrapper[4558]: I0120 17:43:11.732068 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8caf6b3ac50eb079a954e2fde982cd0bb14aa26097a6d791646bafef2d44e268"} err="failed to get container status \"8caf6b3ac50eb079a954e2fde982cd0bb14aa26097a6d791646bafef2d44e268\": rpc error: code = NotFound desc = could not find container \"8caf6b3ac50eb079a954e2fde982cd0bb14aa26097a6d791646bafef2d44e268\": container with ID starting with 8caf6b3ac50eb079a954e2fde982cd0bb14aa26097a6d791646bafef2d44e268 not found: ID does not exist" Jan 20 17:43:11 crc kubenswrapper[4558]: I0120 17:43:11.732094 4558 scope.go:117] "RemoveContainer" containerID="c72b716d9037758e9baeb566c5ac04381df6b5dc61728fdf1da64996d680ebeb" Jan 20 17:43:11 crc kubenswrapper[4558]: E0120 17:43:11.732675 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c72b716d9037758e9baeb566c5ac04381df6b5dc61728fdf1da64996d680ebeb\": container with ID starting with c72b716d9037758e9baeb566c5ac04381df6b5dc61728fdf1da64996d680ebeb not found: ID does not exist" containerID="c72b716d9037758e9baeb566c5ac04381df6b5dc61728fdf1da64996d680ebeb" Jan 20 17:43:11 crc kubenswrapper[4558]: I0120 17:43:11.732708 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c72b716d9037758e9baeb566c5ac04381df6b5dc61728fdf1da64996d680ebeb"} err="failed to get container status \"c72b716d9037758e9baeb566c5ac04381df6b5dc61728fdf1da64996d680ebeb\": rpc error: code = NotFound desc = could not find container \"c72b716d9037758e9baeb566c5ac04381df6b5dc61728fdf1da64996d680ebeb\": container with ID starting with c72b716d9037758e9baeb566c5ac04381df6b5dc61728fdf1da64996d680ebeb not found: ID does not exist" Jan 20 17:43:11 crc kubenswrapper[4558]: I0120 17:43:11.732730 4558 scope.go:117] "RemoveContainer" containerID="4842b1e75542f7bbd13f59782fc4bdc7732e7ba2586beb53f10328006641cd69" Jan 20 17:43:11 crc kubenswrapper[4558]: E0120 17:43:11.733046 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4842b1e75542f7bbd13f59782fc4bdc7732e7ba2586beb53f10328006641cd69\": container with ID starting with 4842b1e75542f7bbd13f59782fc4bdc7732e7ba2586beb53f10328006641cd69 not found: ID does not exist" containerID="4842b1e75542f7bbd13f59782fc4bdc7732e7ba2586beb53f10328006641cd69" Jan 20 17:43:11 crc kubenswrapper[4558]: I0120 17:43:11.733071 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4842b1e75542f7bbd13f59782fc4bdc7732e7ba2586beb53f10328006641cd69"} err="failed to get container status \"4842b1e75542f7bbd13f59782fc4bdc7732e7ba2586beb53f10328006641cd69\": rpc error: code = NotFound desc = could not find container \"4842b1e75542f7bbd13f59782fc4bdc7732e7ba2586beb53f10328006641cd69\": container with ID starting with 4842b1e75542f7bbd13f59782fc4bdc7732e7ba2586beb53f10328006641cd69 not found: ID does not exist" Jan 20 17:43:11 crc kubenswrapper[4558]: I0120 17:43:11.733087 4558 scope.go:117] "RemoveContainer" containerID="bb7f865b92b1c710c22f66a0b1afc2b13b7529e74d6bbbbdbce454bf11a3c1a0" Jan 20 17:43:11 crc kubenswrapper[4558]: E0120 17:43:11.733348 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bb7f865b92b1c710c22f66a0b1afc2b13b7529e74d6bbbbdbce454bf11a3c1a0\": container with ID starting with bb7f865b92b1c710c22f66a0b1afc2b13b7529e74d6bbbbdbce454bf11a3c1a0 not found: ID does not exist" containerID="bb7f865b92b1c710c22f66a0b1afc2b13b7529e74d6bbbbdbce454bf11a3c1a0" Jan 20 17:43:11 crc kubenswrapper[4558]: I0120 17:43:11.733376 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bb7f865b92b1c710c22f66a0b1afc2b13b7529e74d6bbbbdbce454bf11a3c1a0"} err="failed to get container status \"bb7f865b92b1c710c22f66a0b1afc2b13b7529e74d6bbbbdbce454bf11a3c1a0\": rpc error: code = NotFound desc = could not find container \"bb7f865b92b1c710c22f66a0b1afc2b13b7529e74d6bbbbdbce454bf11a3c1a0\": container with ID starting with bb7f865b92b1c710c22f66a0b1afc2b13b7529e74d6bbbbdbce454bf11a3c1a0 not found: ID does not exist" Jan 20 17:43:11 crc kubenswrapper[4558]: I0120 17:43:11.838208 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d791ceee-e87d-4238-9267-c2d2c53faf96-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"d791ceee-e87d-4238-9267-c2d2c53faf96\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:43:11 crc kubenswrapper[4558]: I0120 17:43:11.838296 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l8h4j\" (UniqueName: \"kubernetes.io/projected/d791ceee-e87d-4238-9267-c2d2c53faf96-kube-api-access-l8h4j\") pod \"ceilometer-0\" (UID: \"d791ceee-e87d-4238-9267-c2d2c53faf96\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:43:11 crc kubenswrapper[4558]: I0120 17:43:11.838471 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d791ceee-e87d-4238-9267-c2d2c53faf96-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"d791ceee-e87d-4238-9267-c2d2c53faf96\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:43:11 crc kubenswrapper[4558]: I0120 17:43:11.838561 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/d791ceee-e87d-4238-9267-c2d2c53faf96-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"d791ceee-e87d-4238-9267-c2d2c53faf96\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:43:11 crc kubenswrapper[4558]: I0120 17:43:11.838660 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d791ceee-e87d-4238-9267-c2d2c53faf96-scripts\") pod \"ceilometer-0\" (UID: \"d791ceee-e87d-4238-9267-c2d2c53faf96\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:43:11 crc kubenswrapper[4558]: I0120 17:43:11.838913 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d791ceee-e87d-4238-9267-c2d2c53faf96-config-data\") pod \"ceilometer-0\" (UID: \"d791ceee-e87d-4238-9267-c2d2c53faf96\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:43:11 crc kubenswrapper[4558]: I0120 17:43:11.839099 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d791ceee-e87d-4238-9267-c2d2c53faf96-run-httpd\") pod \"ceilometer-0\" (UID: \"d791ceee-e87d-4238-9267-c2d2c53faf96\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:43:11 crc kubenswrapper[4558]: I0120 17:43:11.839228 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d791ceee-e87d-4238-9267-c2d2c53faf96-log-httpd\") pod \"ceilometer-0\" (UID: \"d791ceee-e87d-4238-9267-c2d2c53faf96\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:43:11 crc kubenswrapper[4558]: I0120 17:43:11.941453 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d791ceee-e87d-4238-9267-c2d2c53faf96-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"d791ceee-e87d-4238-9267-c2d2c53faf96\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:43:11 crc kubenswrapper[4558]: I0120 17:43:11.942127 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l8h4j\" (UniqueName: \"kubernetes.io/projected/d791ceee-e87d-4238-9267-c2d2c53faf96-kube-api-access-l8h4j\") pod \"ceilometer-0\" (UID: \"d791ceee-e87d-4238-9267-c2d2c53faf96\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:43:11 crc kubenswrapper[4558]: I0120 17:43:11.942223 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d791ceee-e87d-4238-9267-c2d2c53faf96-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"d791ceee-e87d-4238-9267-c2d2c53faf96\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:43:11 crc kubenswrapper[4558]: I0120 17:43:11.942251 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/d791ceee-e87d-4238-9267-c2d2c53faf96-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"d791ceee-e87d-4238-9267-c2d2c53faf96\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:43:11 crc kubenswrapper[4558]: I0120 17:43:11.942272 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d791ceee-e87d-4238-9267-c2d2c53faf96-scripts\") pod \"ceilometer-0\" (UID: \"d791ceee-e87d-4238-9267-c2d2c53faf96\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:43:11 crc kubenswrapper[4558]: I0120 17:43:11.942371 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d791ceee-e87d-4238-9267-c2d2c53faf96-config-data\") pod \"ceilometer-0\" (UID: \"d791ceee-e87d-4238-9267-c2d2c53faf96\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:43:11 crc kubenswrapper[4558]: I0120 17:43:11.942442 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d791ceee-e87d-4238-9267-c2d2c53faf96-run-httpd\") pod \"ceilometer-0\" (UID: \"d791ceee-e87d-4238-9267-c2d2c53faf96\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:43:11 crc kubenswrapper[4558]: I0120 17:43:11.942474 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d791ceee-e87d-4238-9267-c2d2c53faf96-log-httpd\") pod \"ceilometer-0\" (UID: \"d791ceee-e87d-4238-9267-c2d2c53faf96\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:43:11 crc kubenswrapper[4558]: I0120 17:43:11.942866 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d791ceee-e87d-4238-9267-c2d2c53faf96-log-httpd\") pod \"ceilometer-0\" (UID: \"d791ceee-e87d-4238-9267-c2d2c53faf96\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:43:11 crc kubenswrapper[4558]: I0120 17:43:11.943117 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d791ceee-e87d-4238-9267-c2d2c53faf96-run-httpd\") pod \"ceilometer-0\" (UID: \"d791ceee-e87d-4238-9267-c2d2c53faf96\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:43:11 crc kubenswrapper[4558]: I0120 17:43:11.946150 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d791ceee-e87d-4238-9267-c2d2c53faf96-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"d791ceee-e87d-4238-9267-c2d2c53faf96\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:43:11 crc kubenswrapper[4558]: I0120 17:43:11.946157 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d791ceee-e87d-4238-9267-c2d2c53faf96-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"d791ceee-e87d-4238-9267-c2d2c53faf96\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:43:11 crc kubenswrapper[4558]: I0120 17:43:11.946573 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/d791ceee-e87d-4238-9267-c2d2c53faf96-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"d791ceee-e87d-4238-9267-c2d2c53faf96\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:43:11 crc kubenswrapper[4558]: I0120 17:43:11.947331 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d791ceee-e87d-4238-9267-c2d2c53faf96-scripts\") pod \"ceilometer-0\" (UID: \"d791ceee-e87d-4238-9267-c2d2c53faf96\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:43:11 crc kubenswrapper[4558]: I0120 17:43:11.948189 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d791ceee-e87d-4238-9267-c2d2c53faf96-config-data\") pod \"ceilometer-0\" (UID: \"d791ceee-e87d-4238-9267-c2d2c53faf96\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:43:11 crc kubenswrapper[4558]: I0120 17:43:11.957554 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l8h4j\" (UniqueName: \"kubernetes.io/projected/d791ceee-e87d-4238-9267-c2d2c53faf96-kube-api-access-l8h4j\") pod \"ceilometer-0\" (UID: \"d791ceee-e87d-4238-9267-c2d2c53faf96\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:43:12 crc kubenswrapper[4558]: I0120 17:43:12.011707 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:43:12 crc kubenswrapper[4558]: I0120 17:43:12.447619 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:43:12 crc kubenswrapper[4558]: I0120 17:43:12.581315 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="64c80877-f13c-4a3d-b352-66dc008f4e1b" path="/var/lib/kubelet/pods/64c80877-f13c-4a3d-b352-66dc008f4e1b/volumes" Jan 20 17:43:12 crc kubenswrapper[4558]: I0120 17:43:12.582226 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b5ab3ab7-6929-4165-91ad-860f5f109147" path="/var/lib/kubelet/pods/b5ab3ab7-6929-4165-91ad-860f5f109147/volumes" Jan 20 17:43:12 crc kubenswrapper[4558]: I0120 17:43:12.586477 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e43647af-b3c8-423f-b88c-d3a9b1aacef2" path="/var/lib/kubelet/pods/e43647af-b3c8-423f-b88c-d3a9b1aacef2/volumes" Jan 20 17:43:12 crc kubenswrapper[4558]: I0120 17:43:12.601362 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-nb-0" event={"ID":"5f087f7c-913a-4ab8-b905-5caa84469c77","Type":"ContainerStarted","Data":"4dd20571883b253e3f573e46ad0ecb50522ba2e589eece64ceebece4a1df813e"} Jan 20 17:43:12 crc kubenswrapper[4558]: I0120 17:43:12.601453 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-nb-0" event={"ID":"5f087f7c-913a-4ab8-b905-5caa84469c77","Type":"ContainerStarted","Data":"1abb2f52a1084fb284622dfe078fa9cc2613944319be12588b9b290276f35a83"} Jan 20 17:43:12 crc kubenswrapper[4558]: I0120 17:43:12.612999 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-sb-0" event={"ID":"0394e10b-3811-4eef-a9ef-a785e1574649","Type":"ContainerStarted","Data":"45124db692168b507b34896779f420550426a3011e2b00b8912ca2208f58799e"} Jan 20 17:43:12 crc kubenswrapper[4558]: I0120 17:43:12.613067 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-sb-0" event={"ID":"0394e10b-3811-4eef-a9ef-a785e1574649","Type":"ContainerStarted","Data":"baba51653eb144a4334b8651e3e9970d529abaea40101e2e03249e068d4b3b9b"} Jan 20 17:43:12 crc kubenswrapper[4558]: I0120 17:43:12.616383 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"d791ceee-e87d-4238-9267-c2d2c53faf96","Type":"ContainerStarted","Data":"ec03225994940c8abc9d0676a20add78ed6d76579350199ed0d95343231a9e01"} Jan 20 17:43:12 crc kubenswrapper[4558]: I0120 17:43:12.631271 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ovsdbserver-nb-0" podStartSLOduration=2.631251623 podStartE2EDuration="2.631251623s" podCreationTimestamp="2026-01-20 17:43:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:43:12.621802927 +0000 UTC m=+3686.382140893" watchObservedRunningTime="2026-01-20 17:43:12.631251623 +0000 UTC m=+3686.391589590" Jan 20 17:43:12 crc kubenswrapper[4558]: I0120 17:43:12.644607 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ovsdbserver-sb-0" podStartSLOduration=2.644593548 podStartE2EDuration="2.644593548s" podCreationTimestamp="2026-01-20 17:43:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:43:12.639917717 +0000 UTC m=+3686.400255684" watchObservedRunningTime="2026-01-20 17:43:12.644593548 +0000 UTC m=+3686.404931515" Jan 20 17:43:12 crc kubenswrapper[4558]: I0120 17:43:12.699841 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovn-northd-0"] Jan 20 17:43:12 crc kubenswrapper[4558]: I0120 17:43:12.700086 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ovn-northd-0" podUID="4d2b64b6-6310-4348-a016-e7d8317e00d9" containerName="ovn-northd" containerID="cri-o://80d1619f7e0d6d2bffa7bc9443ac70ae2f905fe8d2fed37e22b75d4144f64fe5" gracePeriod=30 Jan 20 17:43:12 crc kubenswrapper[4558]: I0120 17:43:12.700233 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ovn-northd-0" podUID="4d2b64b6-6310-4348-a016-e7d8317e00d9" containerName="openstack-network-exporter" containerID="cri-o://e7e585f1de0828e950c0e7d6180c3e24f82d1e80ebef4c5a53c3cd4c1b15c7a5" gracePeriod=30 Jan 20 17:43:12 crc kubenswrapper[4558]: I0120 17:43:12.764523 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/memcached-0" Jan 20 17:43:12 crc kubenswrapper[4558]: I0120 17:43:12.874327 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:43:12 crc kubenswrapper[4558]: I0120 17:43:12.874887 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-api-0" podUID="d0f6996d-f07f-4cf5-8227-6bfed15e1370" containerName="nova-api-log" containerID="cri-o://acf769aacf60dcf25c180f7db8bbc004fddf751cbab36c2bcd5d1eac22d68fe8" gracePeriod=30 Jan 20 17:43:12 crc kubenswrapper[4558]: I0120 17:43:12.874979 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-api-0" podUID="d0f6996d-f07f-4cf5-8227-6bfed15e1370" containerName="nova-api-api" containerID="cri-o://39b250fe2c01ede889dca2fd85bb4ba7ecb4ff567dec04dccfbfad33fc9e2ce1" gracePeriod=30 Jan 20 17:43:12 crc kubenswrapper[4558]: I0120 17:43:12.934727 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:43:12 crc kubenswrapper[4558]: I0120 17:43:12.934986 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" podUID="16c17336-8a71-4dc0-af6b-4549cb220f1a" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://c3d63a6587188dce074b663f11815f4d05e4f8b5c572279de91c62b99871fc68" gracePeriod=30 Jan 20 17:43:12 crc kubenswrapper[4558]: I0120 17:43:12.947010 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/placement-7b8cd49c48-k928p"] Jan 20 17:43:12 crc kubenswrapper[4558]: I0120 17:43:12.948695 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-7b8cd49c48-k928p" Jan 20 17:43:12 crc kubenswrapper[4558]: I0120 17:43:12.953571 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-7b8cd49c48-k928p"] Jan 20 17:43:13 crc kubenswrapper[4558]: I0120 17:43:13.075554 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/feedfa19-f182-4f7f-8b50-3d16c11e3510-config-data\") pod \"placement-7b8cd49c48-k928p\" (UID: \"feedfa19-f182-4f7f-8b50-3d16c11e3510\") " pod="openstack-kuttl-tests/placement-7b8cd49c48-k928p" Jan 20 17:43:13 crc kubenswrapper[4558]: I0120 17:43:13.075618 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fzns4\" (UniqueName: \"kubernetes.io/projected/feedfa19-f182-4f7f-8b50-3d16c11e3510-kube-api-access-fzns4\") pod \"placement-7b8cd49c48-k928p\" (UID: \"feedfa19-f182-4f7f-8b50-3d16c11e3510\") " pod="openstack-kuttl-tests/placement-7b8cd49c48-k928p" Jan 20 17:43:13 crc kubenswrapper[4558]: I0120 17:43:13.075657 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/feedfa19-f182-4f7f-8b50-3d16c11e3510-internal-tls-certs\") pod \"placement-7b8cd49c48-k928p\" (UID: \"feedfa19-f182-4f7f-8b50-3d16c11e3510\") " pod="openstack-kuttl-tests/placement-7b8cd49c48-k928p" Jan 20 17:43:13 crc kubenswrapper[4558]: I0120 17:43:13.075781 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/feedfa19-f182-4f7f-8b50-3d16c11e3510-combined-ca-bundle\") pod \"placement-7b8cd49c48-k928p\" (UID: \"feedfa19-f182-4f7f-8b50-3d16c11e3510\") " pod="openstack-kuttl-tests/placement-7b8cd49c48-k928p" Jan 20 17:43:13 crc kubenswrapper[4558]: I0120 17:43:13.075835 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/feedfa19-f182-4f7f-8b50-3d16c11e3510-public-tls-certs\") pod \"placement-7b8cd49c48-k928p\" (UID: \"feedfa19-f182-4f7f-8b50-3d16c11e3510\") " pod="openstack-kuttl-tests/placement-7b8cd49c48-k928p" Jan 20 17:43:13 crc kubenswrapper[4558]: I0120 17:43:13.075943 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/feedfa19-f182-4f7f-8b50-3d16c11e3510-logs\") pod \"placement-7b8cd49c48-k928p\" (UID: \"feedfa19-f182-4f7f-8b50-3d16c11e3510\") " pod="openstack-kuttl-tests/placement-7b8cd49c48-k928p" Jan 20 17:43:13 crc kubenswrapper[4558]: I0120 17:43:13.075988 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/feedfa19-f182-4f7f-8b50-3d16c11e3510-scripts\") pod \"placement-7b8cd49c48-k928p\" (UID: \"feedfa19-f182-4f7f-8b50-3d16c11e3510\") " pod="openstack-kuttl-tests/placement-7b8cd49c48-k928p" Jan 20 17:43:13 crc kubenswrapper[4558]: I0120 17:43:13.131215 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/neutron-5676986846-92lbc"] Jan 20 17:43:13 crc kubenswrapper[4558]: I0120 17:43:13.134007 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-5676986846-92lbc" Jan 20 17:43:13 crc kubenswrapper[4558]: I0120 17:43:13.158252 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-5676986846-92lbc"] Jan 20 17:43:13 crc kubenswrapper[4558]: I0120 17:43:13.176894 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/feedfa19-f182-4f7f-8b50-3d16c11e3510-logs\") pod \"placement-7b8cd49c48-k928p\" (UID: \"feedfa19-f182-4f7f-8b50-3d16c11e3510\") " pod="openstack-kuttl-tests/placement-7b8cd49c48-k928p" Jan 20 17:43:13 crc kubenswrapper[4558]: I0120 17:43:13.177176 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/feedfa19-f182-4f7f-8b50-3d16c11e3510-scripts\") pod \"placement-7b8cd49c48-k928p\" (UID: \"feedfa19-f182-4f7f-8b50-3d16c11e3510\") " pod="openstack-kuttl-tests/placement-7b8cd49c48-k928p" Jan 20 17:43:13 crc kubenswrapper[4558]: I0120 17:43:13.177262 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/feedfa19-f182-4f7f-8b50-3d16c11e3510-config-data\") pod \"placement-7b8cd49c48-k928p\" (UID: \"feedfa19-f182-4f7f-8b50-3d16c11e3510\") " pod="openstack-kuttl-tests/placement-7b8cd49c48-k928p" Jan 20 17:43:13 crc kubenswrapper[4558]: I0120 17:43:13.177300 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fzns4\" (UniqueName: \"kubernetes.io/projected/feedfa19-f182-4f7f-8b50-3d16c11e3510-kube-api-access-fzns4\") pod \"placement-7b8cd49c48-k928p\" (UID: \"feedfa19-f182-4f7f-8b50-3d16c11e3510\") " pod="openstack-kuttl-tests/placement-7b8cd49c48-k928p" Jan 20 17:43:13 crc kubenswrapper[4558]: I0120 17:43:13.177325 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/feedfa19-f182-4f7f-8b50-3d16c11e3510-internal-tls-certs\") pod \"placement-7b8cd49c48-k928p\" (UID: \"feedfa19-f182-4f7f-8b50-3d16c11e3510\") " pod="openstack-kuttl-tests/placement-7b8cd49c48-k928p" Jan 20 17:43:13 crc kubenswrapper[4558]: I0120 17:43:13.177389 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/feedfa19-f182-4f7f-8b50-3d16c11e3510-combined-ca-bundle\") pod \"placement-7b8cd49c48-k928p\" (UID: \"feedfa19-f182-4f7f-8b50-3d16c11e3510\") " pod="openstack-kuttl-tests/placement-7b8cd49c48-k928p" Jan 20 17:43:13 crc kubenswrapper[4558]: I0120 17:43:13.177431 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/feedfa19-f182-4f7f-8b50-3d16c11e3510-public-tls-certs\") pod \"placement-7b8cd49c48-k928p\" (UID: \"feedfa19-f182-4f7f-8b50-3d16c11e3510\") " pod="openstack-kuttl-tests/placement-7b8cd49c48-k928p" Jan 20 17:43:13 crc kubenswrapper[4558]: I0120 17:43:13.178741 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/feedfa19-f182-4f7f-8b50-3d16c11e3510-logs\") pod \"placement-7b8cd49c48-k928p\" (UID: \"feedfa19-f182-4f7f-8b50-3d16c11e3510\") " pod="openstack-kuttl-tests/placement-7b8cd49c48-k928p" Jan 20 17:43:13 crc kubenswrapper[4558]: I0120 17:43:13.181345 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/feedfa19-f182-4f7f-8b50-3d16c11e3510-public-tls-certs\") pod \"placement-7b8cd49c48-k928p\" (UID: \"feedfa19-f182-4f7f-8b50-3d16c11e3510\") " pod="openstack-kuttl-tests/placement-7b8cd49c48-k928p" Jan 20 17:43:13 crc kubenswrapper[4558]: I0120 17:43:13.182854 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/feedfa19-f182-4f7f-8b50-3d16c11e3510-combined-ca-bundle\") pod \"placement-7b8cd49c48-k928p\" (UID: \"feedfa19-f182-4f7f-8b50-3d16c11e3510\") " pod="openstack-kuttl-tests/placement-7b8cd49c48-k928p" Jan 20 17:43:13 crc kubenswrapper[4558]: I0120 17:43:13.184730 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/feedfa19-f182-4f7f-8b50-3d16c11e3510-config-data\") pod \"placement-7b8cd49c48-k928p\" (UID: \"feedfa19-f182-4f7f-8b50-3d16c11e3510\") " pod="openstack-kuttl-tests/placement-7b8cd49c48-k928p" Jan 20 17:43:13 crc kubenswrapper[4558]: I0120 17:43:13.199453 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/feedfa19-f182-4f7f-8b50-3d16c11e3510-scripts\") pod \"placement-7b8cd49c48-k928p\" (UID: \"feedfa19-f182-4f7f-8b50-3d16c11e3510\") " pod="openstack-kuttl-tests/placement-7b8cd49c48-k928p" Jan 20 17:43:13 crc kubenswrapper[4558]: I0120 17:43:13.199525 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/feedfa19-f182-4f7f-8b50-3d16c11e3510-internal-tls-certs\") pod \"placement-7b8cd49c48-k928p\" (UID: \"feedfa19-f182-4f7f-8b50-3d16c11e3510\") " pod="openstack-kuttl-tests/placement-7b8cd49c48-k928p" Jan 20 17:43:13 crc kubenswrapper[4558]: I0120 17:43:13.202462 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fzns4\" (UniqueName: \"kubernetes.io/projected/feedfa19-f182-4f7f-8b50-3d16c11e3510-kube-api-access-fzns4\") pod \"placement-7b8cd49c48-k928p\" (UID: \"feedfa19-f182-4f7f-8b50-3d16c11e3510\") " pod="openstack-kuttl-tests/placement-7b8cd49c48-k928p" Jan 20 17:43:13 crc kubenswrapper[4558]: I0120 17:43:13.276208 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-7b8cd49c48-k928p" Jan 20 17:43:13 crc kubenswrapper[4558]: I0120 17:43:13.287788 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/8877ab44-638b-4880-97fb-305726a5c1a6-httpd-config\") pod \"neutron-5676986846-92lbc\" (UID: \"8877ab44-638b-4880-97fb-305726a5c1a6\") " pod="openstack-kuttl-tests/neutron-5676986846-92lbc" Jan 20 17:43:13 crc kubenswrapper[4558]: I0120 17:43:13.287838 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8877ab44-638b-4880-97fb-305726a5c1a6-internal-tls-certs\") pod \"neutron-5676986846-92lbc\" (UID: \"8877ab44-638b-4880-97fb-305726a5c1a6\") " pod="openstack-kuttl-tests/neutron-5676986846-92lbc" Jan 20 17:43:13 crc kubenswrapper[4558]: I0120 17:43:13.287881 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8877ab44-638b-4880-97fb-305726a5c1a6-public-tls-certs\") pod \"neutron-5676986846-92lbc\" (UID: \"8877ab44-638b-4880-97fb-305726a5c1a6\") " pod="openstack-kuttl-tests/neutron-5676986846-92lbc" Jan 20 17:43:13 crc kubenswrapper[4558]: I0120 17:43:13.287942 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/8877ab44-638b-4880-97fb-305726a5c1a6-config\") pod \"neutron-5676986846-92lbc\" (UID: \"8877ab44-638b-4880-97fb-305726a5c1a6\") " pod="openstack-kuttl-tests/neutron-5676986846-92lbc" Jan 20 17:43:13 crc kubenswrapper[4558]: I0120 17:43:13.288010 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4cpgm\" (UniqueName: \"kubernetes.io/projected/8877ab44-638b-4880-97fb-305726a5c1a6-kube-api-access-4cpgm\") pod \"neutron-5676986846-92lbc\" (UID: \"8877ab44-638b-4880-97fb-305726a5c1a6\") " pod="openstack-kuttl-tests/neutron-5676986846-92lbc" Jan 20 17:43:13 crc kubenswrapper[4558]: I0120 17:43:13.288119 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8877ab44-638b-4880-97fb-305726a5c1a6-combined-ca-bundle\") pod \"neutron-5676986846-92lbc\" (UID: \"8877ab44-638b-4880-97fb-305726a5c1a6\") " pod="openstack-kuttl-tests/neutron-5676986846-92lbc" Jan 20 17:43:13 crc kubenswrapper[4558]: I0120 17:43:13.288134 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/8877ab44-638b-4880-97fb-305726a5c1a6-ovndb-tls-certs\") pod \"neutron-5676986846-92lbc\" (UID: \"8877ab44-638b-4880-97fb-305726a5c1a6\") " pod="openstack-kuttl-tests/neutron-5676986846-92lbc" Jan 20 17:43:13 crc kubenswrapper[4558]: I0120 17:43:13.389610 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/8877ab44-638b-4880-97fb-305726a5c1a6-httpd-config\") pod \"neutron-5676986846-92lbc\" (UID: \"8877ab44-638b-4880-97fb-305726a5c1a6\") " pod="openstack-kuttl-tests/neutron-5676986846-92lbc" Jan 20 17:43:13 crc kubenswrapper[4558]: I0120 17:43:13.389661 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8877ab44-638b-4880-97fb-305726a5c1a6-internal-tls-certs\") pod \"neutron-5676986846-92lbc\" (UID: \"8877ab44-638b-4880-97fb-305726a5c1a6\") " pod="openstack-kuttl-tests/neutron-5676986846-92lbc" Jan 20 17:43:13 crc kubenswrapper[4558]: I0120 17:43:13.389696 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8877ab44-638b-4880-97fb-305726a5c1a6-public-tls-certs\") pod \"neutron-5676986846-92lbc\" (UID: \"8877ab44-638b-4880-97fb-305726a5c1a6\") " pod="openstack-kuttl-tests/neutron-5676986846-92lbc" Jan 20 17:43:13 crc kubenswrapper[4558]: I0120 17:43:13.389736 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/8877ab44-638b-4880-97fb-305726a5c1a6-config\") pod \"neutron-5676986846-92lbc\" (UID: \"8877ab44-638b-4880-97fb-305726a5c1a6\") " pod="openstack-kuttl-tests/neutron-5676986846-92lbc" Jan 20 17:43:13 crc kubenswrapper[4558]: I0120 17:43:13.389779 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4cpgm\" (UniqueName: \"kubernetes.io/projected/8877ab44-638b-4880-97fb-305726a5c1a6-kube-api-access-4cpgm\") pod \"neutron-5676986846-92lbc\" (UID: \"8877ab44-638b-4880-97fb-305726a5c1a6\") " pod="openstack-kuttl-tests/neutron-5676986846-92lbc" Jan 20 17:43:13 crc kubenswrapper[4558]: I0120 17:43:13.389858 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8877ab44-638b-4880-97fb-305726a5c1a6-combined-ca-bundle\") pod \"neutron-5676986846-92lbc\" (UID: \"8877ab44-638b-4880-97fb-305726a5c1a6\") " pod="openstack-kuttl-tests/neutron-5676986846-92lbc" Jan 20 17:43:13 crc kubenswrapper[4558]: I0120 17:43:13.389877 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/8877ab44-638b-4880-97fb-305726a5c1a6-ovndb-tls-certs\") pod \"neutron-5676986846-92lbc\" (UID: \"8877ab44-638b-4880-97fb-305726a5c1a6\") " pod="openstack-kuttl-tests/neutron-5676986846-92lbc" Jan 20 17:43:13 crc kubenswrapper[4558]: I0120 17:43:13.395027 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/8877ab44-638b-4880-97fb-305726a5c1a6-httpd-config\") pod \"neutron-5676986846-92lbc\" (UID: \"8877ab44-638b-4880-97fb-305726a5c1a6\") " pod="openstack-kuttl-tests/neutron-5676986846-92lbc" Jan 20 17:43:13 crc kubenswrapper[4558]: I0120 17:43:13.395212 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8877ab44-638b-4880-97fb-305726a5c1a6-public-tls-certs\") pod \"neutron-5676986846-92lbc\" (UID: \"8877ab44-638b-4880-97fb-305726a5c1a6\") " pod="openstack-kuttl-tests/neutron-5676986846-92lbc" Jan 20 17:43:13 crc kubenswrapper[4558]: I0120 17:43:13.398785 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8877ab44-638b-4880-97fb-305726a5c1a6-internal-tls-certs\") pod \"neutron-5676986846-92lbc\" (UID: \"8877ab44-638b-4880-97fb-305726a5c1a6\") " pod="openstack-kuttl-tests/neutron-5676986846-92lbc" Jan 20 17:43:13 crc kubenswrapper[4558]: I0120 17:43:13.399686 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/8877ab44-638b-4880-97fb-305726a5c1a6-ovndb-tls-certs\") pod \"neutron-5676986846-92lbc\" (UID: \"8877ab44-638b-4880-97fb-305726a5c1a6\") " pod="openstack-kuttl-tests/neutron-5676986846-92lbc" Jan 20 17:43:13 crc kubenswrapper[4558]: I0120 17:43:13.410759 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/8877ab44-638b-4880-97fb-305726a5c1a6-config\") pod \"neutron-5676986846-92lbc\" (UID: \"8877ab44-638b-4880-97fb-305726a5c1a6\") " pod="openstack-kuttl-tests/neutron-5676986846-92lbc" Jan 20 17:43:13 crc kubenswrapper[4558]: I0120 17:43:13.415804 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8877ab44-638b-4880-97fb-305726a5c1a6-combined-ca-bundle\") pod \"neutron-5676986846-92lbc\" (UID: \"8877ab44-638b-4880-97fb-305726a5c1a6\") " pod="openstack-kuttl-tests/neutron-5676986846-92lbc" Jan 20 17:43:13 crc kubenswrapper[4558]: I0120 17:43:13.422790 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4cpgm\" (UniqueName: \"kubernetes.io/projected/8877ab44-638b-4880-97fb-305726a5c1a6-kube-api-access-4cpgm\") pod \"neutron-5676986846-92lbc\" (UID: \"8877ab44-638b-4880-97fb-305726a5c1a6\") " pod="openstack-kuttl-tests/neutron-5676986846-92lbc" Jan 20 17:43:13 crc kubenswrapper[4558]: I0120 17:43:13.477831 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-5676986846-92lbc" Jan 20 17:43:13 crc kubenswrapper[4558]: I0120 17:43:13.629975 4558 generic.go:334] "Generic (PLEG): container finished" podID="d0f6996d-f07f-4cf5-8227-6bfed15e1370" containerID="acf769aacf60dcf25c180f7db8bbc004fddf751cbab36c2bcd5d1eac22d68fe8" exitCode=143 Jan 20 17:43:13 crc kubenswrapper[4558]: I0120 17:43:13.630058 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"d0f6996d-f07f-4cf5-8227-6bfed15e1370","Type":"ContainerDied","Data":"acf769aacf60dcf25c180f7db8bbc004fddf751cbab36c2bcd5d1eac22d68fe8"} Jan 20 17:43:13 crc kubenswrapper[4558]: I0120 17:43:13.631956 4558 generic.go:334] "Generic (PLEG): container finished" podID="4d2b64b6-6310-4348-a016-e7d8317e00d9" containerID="e7e585f1de0828e950c0e7d6180c3e24f82d1e80ebef4c5a53c3cd4c1b15c7a5" exitCode=2 Jan 20 17:43:13 crc kubenswrapper[4558]: I0120 17:43:13.632007 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-northd-0" event={"ID":"4d2b64b6-6310-4348-a016-e7d8317e00d9","Type":"ContainerDied","Data":"e7e585f1de0828e950c0e7d6180c3e24f82d1e80ebef4c5a53c3cd4c1b15c7a5"} Jan 20 17:43:13 crc kubenswrapper[4558]: I0120 17:43:13.634352 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"d791ceee-e87d-4238-9267-c2d2c53faf96","Type":"ContainerStarted","Data":"dceb063becafd3ab16d08d70734f823d57aed693a7d29459757378db7e430997"} Jan 20 17:43:13 crc kubenswrapper[4558]: I0120 17:43:13.818747 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-7b8cd49c48-k928p"] Jan 20 17:43:13 crc kubenswrapper[4558]: I0120 17:43:13.937555 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-5676986846-92lbc"] Jan 20 17:43:13 crc kubenswrapper[4558]: I0120 17:43:13.945138 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:43:14 crc kubenswrapper[4558]: I0120 17:43:14.015782 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:43:14 crc kubenswrapper[4558]: I0120 17:43:14.020581 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:43:14 crc kubenswrapper[4558]: I0120 17:43:14.023991 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/16c17336-8a71-4dc0-af6b-4549cb220f1a-vencrypt-tls-certs\") pod \"16c17336-8a71-4dc0-af6b-4549cb220f1a\" (UID: \"16c17336-8a71-4dc0-af6b-4549cb220f1a\") " Jan 20 17:43:14 crc kubenswrapper[4558]: I0120 17:43:14.026054 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/16c17336-8a71-4dc0-af6b-4549cb220f1a-config-data\") pod \"16c17336-8a71-4dc0-af6b-4549cb220f1a\" (UID: \"16c17336-8a71-4dc0-af6b-4549cb220f1a\") " Jan 20 17:43:14 crc kubenswrapper[4558]: I0120 17:43:14.056841 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:43:14 crc kubenswrapper[4558]: I0120 17:43:14.064324 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/16c17336-8a71-4dc0-af6b-4549cb220f1a-config-data" (OuterVolumeSpecName: "config-data") pod "16c17336-8a71-4dc0-af6b-4549cb220f1a" (UID: "16c17336-8a71-4dc0-af6b-4549cb220f1a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:14 crc kubenswrapper[4558]: I0120 17:43:14.085276 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:43:14 crc kubenswrapper[4558]: I0120 17:43:14.119634 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/16c17336-8a71-4dc0-af6b-4549cb220f1a-vencrypt-tls-certs" (OuterVolumeSpecName: "vencrypt-tls-certs") pod "16c17336-8a71-4dc0-af6b-4549cb220f1a" (UID: "16c17336-8a71-4dc0-af6b-4549cb220f1a"). InnerVolumeSpecName "vencrypt-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:14 crc kubenswrapper[4558]: I0120 17:43:14.131363 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/16c17336-8a71-4dc0-af6b-4549cb220f1a-combined-ca-bundle\") pod \"16c17336-8a71-4dc0-af6b-4549cb220f1a\" (UID: \"16c17336-8a71-4dc0-af6b-4549cb220f1a\") " Jan 20 17:43:14 crc kubenswrapper[4558]: I0120 17:43:14.131403 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/16c17336-8a71-4dc0-af6b-4549cb220f1a-nova-novncproxy-tls-certs\") pod \"16c17336-8a71-4dc0-af6b-4549cb220f1a\" (UID: \"16c17336-8a71-4dc0-af6b-4549cb220f1a\") " Jan 20 17:43:14 crc kubenswrapper[4558]: I0120 17:43:14.131759 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7r6sq\" (UniqueName: \"kubernetes.io/projected/16c17336-8a71-4dc0-af6b-4549cb220f1a-kube-api-access-7r6sq\") pod \"16c17336-8a71-4dc0-af6b-4549cb220f1a\" (UID: \"16c17336-8a71-4dc0-af6b-4549cb220f1a\") " Jan 20 17:43:14 crc kubenswrapper[4558]: I0120 17:43:14.132552 4558 reconciler_common.go:293] "Volume detached for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/16c17336-8a71-4dc0-af6b-4549cb220f1a-vencrypt-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:14 crc kubenswrapper[4558]: I0120 17:43:14.132574 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/16c17336-8a71-4dc0-af6b-4549cb220f1a-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:14 crc kubenswrapper[4558]: I0120 17:43:14.134476 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/16c17336-8a71-4dc0-af6b-4549cb220f1a-kube-api-access-7r6sq" (OuterVolumeSpecName: "kube-api-access-7r6sq") pod "16c17336-8a71-4dc0-af6b-4549cb220f1a" (UID: "16c17336-8a71-4dc0-af6b-4549cb220f1a"). InnerVolumeSpecName "kube-api-access-7r6sq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:43:14 crc kubenswrapper[4558]: I0120 17:43:14.165433 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/16c17336-8a71-4dc0-af6b-4549cb220f1a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "16c17336-8a71-4dc0-af6b-4549cb220f1a" (UID: "16c17336-8a71-4dc0-af6b-4549cb220f1a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:14 crc kubenswrapper[4558]: I0120 17:43:14.190656 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/16c17336-8a71-4dc0-af6b-4549cb220f1a-nova-novncproxy-tls-certs" (OuterVolumeSpecName: "nova-novncproxy-tls-certs") pod "16c17336-8a71-4dc0-af6b-4549cb220f1a" (UID: "16c17336-8a71-4dc0-af6b-4549cb220f1a"). InnerVolumeSpecName "nova-novncproxy-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:14 crc kubenswrapper[4558]: I0120 17:43:14.234264 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7r6sq\" (UniqueName: \"kubernetes.io/projected/16c17336-8a71-4dc0-af6b-4549cb220f1a-kube-api-access-7r6sq\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:14 crc kubenswrapper[4558]: I0120 17:43:14.234294 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/16c17336-8a71-4dc0-af6b-4549cb220f1a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:14 crc kubenswrapper[4558]: I0120 17:43:14.234305 4558 reconciler_common.go:293] "Volume detached for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/16c17336-8a71-4dc0-af6b-4549cb220f1a-nova-novncproxy-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:14 crc kubenswrapper[4558]: I0120 17:43:14.423000 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:14 crc kubenswrapper[4558]: I0120 17:43:14.423336 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:14 crc kubenswrapper[4558]: I0120 17:43:14.456327 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:14 crc kubenswrapper[4558]: I0120 17:43:14.480668 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:14 crc kubenswrapper[4558]: E0120 17:43:14.559492 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="80d1619f7e0d6d2bffa7bc9443ac70ae2f905fe8d2fed37e22b75d4144f64fe5" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Jan 20 17:43:14 crc kubenswrapper[4558]: E0120 17:43:14.566934 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="80d1619f7e0d6d2bffa7bc9443ac70ae2f905fe8d2fed37e22b75d4144f64fe5" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Jan 20 17:43:14 crc kubenswrapper[4558]: E0120 17:43:14.570084 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="80d1619f7e0d6d2bffa7bc9443ac70ae2f905fe8d2fed37e22b75d4144f64fe5" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Jan 20 17:43:14 crc kubenswrapper[4558]: E0120 17:43:14.570123 4558 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack-kuttl-tests/ovn-northd-0" podUID="4d2b64b6-6310-4348-a016-e7d8317e00d9" containerName="ovn-northd" Jan 20 17:43:14 crc kubenswrapper[4558]: I0120 17:43:14.649369 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-5676986846-92lbc" event={"ID":"8877ab44-638b-4880-97fb-305726a5c1a6","Type":"ContainerStarted","Data":"99d260241a6e2248384349724b544d3292e8cbd5470d0de4208392622b91d851"} Jan 20 17:43:14 crc kubenswrapper[4558]: I0120 17:43:14.649439 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-5676986846-92lbc" event={"ID":"8877ab44-638b-4880-97fb-305726a5c1a6","Type":"ContainerStarted","Data":"418895b26e5117800f2a53dae040b43bb1b03510640c0a1a0ca6b67577a9df79"} Jan 20 17:43:14 crc kubenswrapper[4558]: I0120 17:43:14.649453 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-5676986846-92lbc" event={"ID":"8877ab44-638b-4880-97fb-305726a5c1a6","Type":"ContainerStarted","Data":"998da092b0ef60bbbaa87e0a678e6c3a3d5f8713be4b0cb67d0a9a3e1a65c932"} Jan 20 17:43:14 crc kubenswrapper[4558]: I0120 17:43:14.650378 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/neutron-5676986846-92lbc" Jan 20 17:43:14 crc kubenswrapper[4558]: I0120 17:43:14.653983 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-7b8cd49c48-k928p" event={"ID":"feedfa19-f182-4f7f-8b50-3d16c11e3510","Type":"ContainerStarted","Data":"4a06fdd89fd78aff5dd0753c417b307f7a680d4b1887883dc3eccee2ed84c9ec"} Jan 20 17:43:14 crc kubenswrapper[4558]: I0120 17:43:14.654018 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-7b8cd49c48-k928p" event={"ID":"feedfa19-f182-4f7f-8b50-3d16c11e3510","Type":"ContainerStarted","Data":"2238e5e4c9121a6f35ac99b7eefaed26a7b72dd8f5d91d2775ab73bb36849ee2"} Jan 20 17:43:14 crc kubenswrapper[4558]: I0120 17:43:14.654031 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-7b8cd49c48-k928p" event={"ID":"feedfa19-f182-4f7f-8b50-3d16c11e3510","Type":"ContainerStarted","Data":"0f4ba83b39effab14da1c98c5454784b0b707d6bde7057ceb9ddd18be15d6501"} Jan 20 17:43:14 crc kubenswrapper[4558]: I0120 17:43:14.654963 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/placement-7b8cd49c48-k928p" Jan 20 17:43:14 crc kubenswrapper[4558]: I0120 17:43:14.654993 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/placement-7b8cd49c48-k928p" Jan 20 17:43:14 crc kubenswrapper[4558]: I0120 17:43:14.663831 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"d791ceee-e87d-4238-9267-c2d2c53faf96","Type":"ContainerStarted","Data":"b225f9e53aaf9806843cb0212349c3e9e4362df912776534664c2dac9cdadb6b"} Jan 20 17:43:14 crc kubenswrapper[4558]: I0120 17:43:14.670956 4558 generic.go:334] "Generic (PLEG): container finished" podID="16c17336-8a71-4dc0-af6b-4549cb220f1a" containerID="c3d63a6587188dce074b663f11815f4d05e4f8b5c572279de91c62b99871fc68" exitCode=0 Jan 20 17:43:14 crc kubenswrapper[4558]: I0120 17:43:14.671274 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"16c17336-8a71-4dc0-af6b-4549cb220f1a","Type":"ContainerDied","Data":"c3d63a6587188dce074b663f11815f4d05e4f8b5c572279de91c62b99871fc68"} Jan 20 17:43:14 crc kubenswrapper[4558]: I0120 17:43:14.671348 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"16c17336-8a71-4dc0-af6b-4549cb220f1a","Type":"ContainerDied","Data":"195f7df2dd875000ad3b06f1e140d95338d2648e3e005bdc5cfdfa3e467de32d"} Jan 20 17:43:14 crc kubenswrapper[4558]: I0120 17:43:14.671372 4558 scope.go:117] "RemoveContainer" containerID="c3d63a6587188dce074b663f11815f4d05e4f8b5c572279de91c62b99871fc68" Jan 20 17:43:14 crc kubenswrapper[4558]: I0120 17:43:14.672367 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:43:14 crc kubenswrapper[4558]: I0120 17:43:14.672434 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:14 crc kubenswrapper[4558]: I0120 17:43:14.672449 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:14 crc kubenswrapper[4558]: I0120 17:43:14.672458 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:43:14 crc kubenswrapper[4558]: I0120 17:43:14.678592 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:43:14 crc kubenswrapper[4558]: I0120 17:43:14.691215 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/neutron-5676986846-92lbc" podStartSLOduration=1.691189729 podStartE2EDuration="1.691189729s" podCreationTimestamp="2026-01-20 17:43:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:43:14.680866268 +0000 UTC m=+3688.441204235" watchObservedRunningTime="2026-01-20 17:43:14.691189729 +0000 UTC m=+3688.451527687" Jan 20 17:43:14 crc kubenswrapper[4558]: I0120 17:43:14.709894 4558 scope.go:117] "RemoveContainer" containerID="c3d63a6587188dce074b663f11815f4d05e4f8b5c572279de91c62b99871fc68" Jan 20 17:43:14 crc kubenswrapper[4558]: E0120 17:43:14.711180 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c3d63a6587188dce074b663f11815f4d05e4f8b5c572279de91c62b99871fc68\": container with ID starting with c3d63a6587188dce074b663f11815f4d05e4f8b5c572279de91c62b99871fc68 not found: ID does not exist" containerID="c3d63a6587188dce074b663f11815f4d05e4f8b5c572279de91c62b99871fc68" Jan 20 17:43:14 crc kubenswrapper[4558]: I0120 17:43:14.711228 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c3d63a6587188dce074b663f11815f4d05e4f8b5c572279de91c62b99871fc68"} err="failed to get container status \"c3d63a6587188dce074b663f11815f4d05e4f8b5c572279de91c62b99871fc68\": rpc error: code = NotFound desc = could not find container \"c3d63a6587188dce074b663f11815f4d05e4f8b5c572279de91c62b99871fc68\": container with ID starting with c3d63a6587188dce074b663f11815f4d05e4f8b5c572279de91c62b99871fc68 not found: ID does not exist" Jan 20 17:43:14 crc kubenswrapper[4558]: I0120 17:43:14.733624 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/placement-7b8cd49c48-k928p" podStartSLOduration=2.733603317 podStartE2EDuration="2.733603317s" podCreationTimestamp="2026-01-20 17:43:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:43:14.707036294 +0000 UTC m=+3688.467374261" watchObservedRunningTime="2026-01-20 17:43:14.733603317 +0000 UTC m=+3688.493941284" Jan 20 17:43:14 crc kubenswrapper[4558]: I0120 17:43:14.739834 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:43:14 crc kubenswrapper[4558]: I0120 17:43:14.752243 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:43:14 crc kubenswrapper[4558]: I0120 17:43:14.762143 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:43:14 crc kubenswrapper[4558]: E0120 17:43:14.762632 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="16c17336-8a71-4dc0-af6b-4549cb220f1a" containerName="nova-cell1-novncproxy-novncproxy" Jan 20 17:43:14 crc kubenswrapper[4558]: I0120 17:43:14.762647 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="16c17336-8a71-4dc0-af6b-4549cb220f1a" containerName="nova-cell1-novncproxy-novncproxy" Jan 20 17:43:14 crc kubenswrapper[4558]: I0120 17:43:14.762888 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="16c17336-8a71-4dc0-af6b-4549cb220f1a" containerName="nova-cell1-novncproxy-novncproxy" Jan 20 17:43:14 crc kubenswrapper[4558]: I0120 17:43:14.763650 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:43:14 crc kubenswrapper[4558]: I0120 17:43:14.767421 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell1-novncproxy-config-data" Jan 20 17:43:14 crc kubenswrapper[4558]: I0120 17:43:14.767607 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-novncproxy-cell1-vencrypt" Jan 20 17:43:14 crc kubenswrapper[4558]: I0120 17:43:14.767793 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-novncproxy-cell1-public-svc" Jan 20 17:43:14 crc kubenswrapper[4558]: I0120 17:43:14.777801 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:43:14 crc kubenswrapper[4558]: I0120 17:43:14.888090 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:14 crc kubenswrapper[4558]: I0120 17:43:14.888227 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:14 crc kubenswrapper[4558]: I0120 17:43:14.939382 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:14 crc kubenswrapper[4558]: I0120 17:43:14.942146 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:14 crc kubenswrapper[4558]: I0120 17:43:14.953246 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fd5b8a4a-4f31-4b17-95a3-669d1d7661c4-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"fd5b8a4a-4f31-4b17-95a3-669d1d7661c4\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:43:14 crc kubenswrapper[4558]: I0120 17:43:14.953535 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd5b8a4a-4f31-4b17-95a3-669d1d7661c4-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"fd5b8a4a-4f31-4b17-95a3-669d1d7661c4\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:43:14 crc kubenswrapper[4558]: I0120 17:43:14.953674 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/fd5b8a4a-4f31-4b17-95a3-669d1d7661c4-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"fd5b8a4a-4f31-4b17-95a3-669d1d7661c4\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:43:14 crc kubenswrapper[4558]: I0120 17:43:14.953771 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/fd5b8a4a-4f31-4b17-95a3-669d1d7661c4-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"fd5b8a4a-4f31-4b17-95a3-669d1d7661c4\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:43:14 crc kubenswrapper[4558]: I0120 17:43:14.953880 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-plbhs\" (UniqueName: \"kubernetes.io/projected/fd5b8a4a-4f31-4b17-95a3-669d1d7661c4-kube-api-access-plbhs\") pod \"nova-cell1-novncproxy-0\" (UID: \"fd5b8a4a-4f31-4b17-95a3-669d1d7661c4\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:43:15 crc kubenswrapper[4558]: I0120 17:43:15.057799 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-plbhs\" (UniqueName: \"kubernetes.io/projected/fd5b8a4a-4f31-4b17-95a3-669d1d7661c4-kube-api-access-plbhs\") pod \"nova-cell1-novncproxy-0\" (UID: \"fd5b8a4a-4f31-4b17-95a3-669d1d7661c4\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:43:15 crc kubenswrapper[4558]: I0120 17:43:15.059055 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fd5b8a4a-4f31-4b17-95a3-669d1d7661c4-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"fd5b8a4a-4f31-4b17-95a3-669d1d7661c4\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:43:15 crc kubenswrapper[4558]: I0120 17:43:15.061901 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd5b8a4a-4f31-4b17-95a3-669d1d7661c4-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"fd5b8a4a-4f31-4b17-95a3-669d1d7661c4\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:43:15 crc kubenswrapper[4558]: I0120 17:43:15.062110 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/fd5b8a4a-4f31-4b17-95a3-669d1d7661c4-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"fd5b8a4a-4f31-4b17-95a3-669d1d7661c4\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:43:15 crc kubenswrapper[4558]: I0120 17:43:15.062356 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/fd5b8a4a-4f31-4b17-95a3-669d1d7661c4-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"fd5b8a4a-4f31-4b17-95a3-669d1d7661c4\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:43:15 crc kubenswrapper[4558]: I0120 17:43:15.069777 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fd5b8a4a-4f31-4b17-95a3-669d1d7661c4-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"fd5b8a4a-4f31-4b17-95a3-669d1d7661c4\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:43:15 crc kubenswrapper[4558]: I0120 17:43:15.073837 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/fd5b8a4a-4f31-4b17-95a3-669d1d7661c4-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"fd5b8a4a-4f31-4b17-95a3-669d1d7661c4\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:43:15 crc kubenswrapper[4558]: I0120 17:43:15.076219 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd5b8a4a-4f31-4b17-95a3-669d1d7661c4-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"fd5b8a4a-4f31-4b17-95a3-669d1d7661c4\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:43:15 crc kubenswrapper[4558]: I0120 17:43:15.076791 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-plbhs\" (UniqueName: \"kubernetes.io/projected/fd5b8a4a-4f31-4b17-95a3-669d1d7661c4-kube-api-access-plbhs\") pod \"nova-cell1-novncproxy-0\" (UID: \"fd5b8a4a-4f31-4b17-95a3-669d1d7661c4\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:43:15 crc kubenswrapper[4558]: I0120 17:43:15.085889 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/fd5b8a4a-4f31-4b17-95a3-669d1d7661c4-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"fd5b8a4a-4f31-4b17-95a3-669d1d7661c4\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:43:15 crc kubenswrapper[4558]: I0120 17:43:15.099049 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:43:15 crc kubenswrapper[4558]: I0120 17:43:15.369836 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:43:15 crc kubenswrapper[4558]: I0120 17:43:15.413431 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:43:15 crc kubenswrapper[4558]: I0120 17:43:15.657746 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:43:15 crc kubenswrapper[4558]: I0120 17:43:15.707858 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"d791ceee-e87d-4238-9267-c2d2c53faf96","Type":"ContainerStarted","Data":"335e25f3fbe4b65eb8180e118c21059bde36ef7ba7c2a18579f83e6691932e37"} Jan 20 17:43:15 crc kubenswrapper[4558]: I0120 17:43:15.724637 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"fd5b8a4a-4f31-4b17-95a3-669d1d7661c4","Type":"ContainerStarted","Data":"66f57cede9d940bc8f1d33c78e7888e002b1e2515352aa0c41ad245fb4ffba99"} Jan 20 17:43:15 crc kubenswrapper[4558]: I0120 17:43:15.734234 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:15 crc kubenswrapper[4558]: I0120 17:43:15.737727 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:16 crc kubenswrapper[4558]: I0120 17:43:16.052116 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:43:16 crc kubenswrapper[4558]: I0120 17:43:16.060268 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:43:16 crc kubenswrapper[4558]: I0120 17:43:16.581177 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="16c17336-8a71-4dc0-af6b-4549cb220f1a" path="/var/lib/kubelet/pods/16c17336-8a71-4dc0-af6b-4549cb220f1a/volumes" Jan 20 17:43:16 crc kubenswrapper[4558]: I0120 17:43:16.719873 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovn-northd-0_4d2b64b6-6310-4348-a016-e7d8317e00d9/ovn-northd/0.log" Jan 20 17:43:16 crc kubenswrapper[4558]: I0120 17:43:16.719958 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:43:16 crc kubenswrapper[4558]: I0120 17:43:16.734022 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:43:16 crc kubenswrapper[4558]: I0120 17:43:16.770503 4558 generic.go:334] "Generic (PLEG): container finished" podID="d0f6996d-f07f-4cf5-8227-6bfed15e1370" containerID="39b250fe2c01ede889dca2fd85bb4ba7ecb4ff567dec04dccfbfad33fc9e2ce1" exitCode=0 Jan 20 17:43:16 crc kubenswrapper[4558]: I0120 17:43:16.770604 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"d0f6996d-f07f-4cf5-8227-6bfed15e1370","Type":"ContainerDied","Data":"39b250fe2c01ede889dca2fd85bb4ba7ecb4ff567dec04dccfbfad33fc9e2ce1"} Jan 20 17:43:16 crc kubenswrapper[4558]: I0120 17:43:16.770612 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:43:16 crc kubenswrapper[4558]: I0120 17:43:16.770634 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"d0f6996d-f07f-4cf5-8227-6bfed15e1370","Type":"ContainerDied","Data":"a69487bda3f88f157ce0be8641de7cad991146d7e796fea8bda97415ea84ae41"} Jan 20 17:43:16 crc kubenswrapper[4558]: I0120 17:43:16.770653 4558 scope.go:117] "RemoveContainer" containerID="39b250fe2c01ede889dca2fd85bb4ba7ecb4ff567dec04dccfbfad33fc9e2ce1" Jan 20 17:43:16 crc kubenswrapper[4558]: I0120 17:43:16.788788 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovn-northd-0_4d2b64b6-6310-4348-a016-e7d8317e00d9/ovn-northd/0.log" Jan 20 17:43:16 crc kubenswrapper[4558]: I0120 17:43:16.788824 4558 generic.go:334] "Generic (PLEG): container finished" podID="4d2b64b6-6310-4348-a016-e7d8317e00d9" containerID="80d1619f7e0d6d2bffa7bc9443ac70ae2f905fe8d2fed37e22b75d4144f64fe5" exitCode=139 Jan 20 17:43:16 crc kubenswrapper[4558]: I0120 17:43:16.788866 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-northd-0" event={"ID":"4d2b64b6-6310-4348-a016-e7d8317e00d9","Type":"ContainerDied","Data":"80d1619f7e0d6d2bffa7bc9443ac70ae2f905fe8d2fed37e22b75d4144f64fe5"} Jan 20 17:43:16 crc kubenswrapper[4558]: I0120 17:43:16.788884 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-northd-0" event={"ID":"4d2b64b6-6310-4348-a016-e7d8317e00d9","Type":"ContainerDied","Data":"e1d56266d8d5673b7b6627971e1dc6465a7f9a444268dc8898704703a6240197"} Jan 20 17:43:16 crc kubenswrapper[4558]: I0120 17:43:16.788959 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:43:16 crc kubenswrapper[4558]: I0120 17:43:16.803394 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"fd5b8a4a-4f31-4b17-95a3-669d1d7661c4","Type":"ContainerStarted","Data":"a03dfb5ac0209a3bc1e4b1831999713e3583bebd07521f3e1596b419cc04ea21"} Jan 20 17:43:16 crc kubenswrapper[4558]: I0120 17:43:16.803470 4558 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 20 17:43:16 crc kubenswrapper[4558]: I0120 17:43:16.803482 4558 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 20 17:43:16 crc kubenswrapper[4558]: I0120 17:43:16.821807 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" podStartSLOduration=2.821788555 podStartE2EDuration="2.821788555s" podCreationTimestamp="2026-01-20 17:43:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:43:16.820969264 +0000 UTC m=+3690.581307232" watchObservedRunningTime="2026-01-20 17:43:16.821788555 +0000 UTC m=+3690.582126522" Jan 20 17:43:16 crc kubenswrapper[4558]: I0120 17:43:16.828742 4558 scope.go:117] "RemoveContainer" containerID="acf769aacf60dcf25c180f7db8bbc004fddf751cbab36c2bcd5d1eac22d68fe8" Jan 20 17:43:16 crc kubenswrapper[4558]: I0120 17:43:16.846900 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/4d2b64b6-6310-4348-a016-e7d8317e00d9-ovn-rundir\") pod \"4d2b64b6-6310-4348-a016-e7d8317e00d9\" (UID: \"4d2b64b6-6310-4348-a016-e7d8317e00d9\") " Jan 20 17:43:16 crc kubenswrapper[4558]: I0120 17:43:16.846956 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d0f6996d-f07f-4cf5-8227-6bfed15e1370-combined-ca-bundle\") pod \"d0f6996d-f07f-4cf5-8227-6bfed15e1370\" (UID: \"d0f6996d-f07f-4cf5-8227-6bfed15e1370\") " Jan 20 17:43:16 crc kubenswrapper[4558]: I0120 17:43:16.847309 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d0f6996d-f07f-4cf5-8227-6bfed15e1370-public-tls-certs\") pod \"d0f6996d-f07f-4cf5-8227-6bfed15e1370\" (UID: \"d0f6996d-f07f-4cf5-8227-6bfed15e1370\") " Jan 20 17:43:16 crc kubenswrapper[4558]: I0120 17:43:16.847342 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d0f6996d-f07f-4cf5-8227-6bfed15e1370-internal-tls-certs\") pod \"d0f6996d-f07f-4cf5-8227-6bfed15e1370\" (UID: \"d0f6996d-f07f-4cf5-8227-6bfed15e1370\") " Jan 20 17:43:16 crc kubenswrapper[4558]: I0120 17:43:16.847426 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d0f6996d-f07f-4cf5-8227-6bfed15e1370-logs\") pod \"d0f6996d-f07f-4cf5-8227-6bfed15e1370\" (UID: \"d0f6996d-f07f-4cf5-8227-6bfed15e1370\") " Jan 20 17:43:16 crc kubenswrapper[4558]: I0120 17:43:16.847461 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mgts5\" (UniqueName: \"kubernetes.io/projected/d0f6996d-f07f-4cf5-8227-6bfed15e1370-kube-api-access-mgts5\") pod \"d0f6996d-f07f-4cf5-8227-6bfed15e1370\" (UID: \"d0f6996d-f07f-4cf5-8227-6bfed15e1370\") " Jan 20 17:43:16 crc kubenswrapper[4558]: I0120 17:43:16.847501 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4d2b64b6-6310-4348-a016-e7d8317e00d9-scripts\") pod \"4d2b64b6-6310-4348-a016-e7d8317e00d9\" (UID: \"4d2b64b6-6310-4348-a016-e7d8317e00d9\") " Jan 20 17:43:16 crc kubenswrapper[4558]: I0120 17:43:16.847532 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4d2b64b6-6310-4348-a016-e7d8317e00d9-combined-ca-bundle\") pod \"4d2b64b6-6310-4348-a016-e7d8317e00d9\" (UID: \"4d2b64b6-6310-4348-a016-e7d8317e00d9\") " Jan 20 17:43:16 crc kubenswrapper[4558]: I0120 17:43:16.847576 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4d2b64b6-6310-4348-a016-e7d8317e00d9-config\") pod \"4d2b64b6-6310-4348-a016-e7d8317e00d9\" (UID: \"4d2b64b6-6310-4348-a016-e7d8317e00d9\") " Jan 20 17:43:16 crc kubenswrapper[4558]: I0120 17:43:16.847605 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d0f6996d-f07f-4cf5-8227-6bfed15e1370-config-data\") pod \"d0f6996d-f07f-4cf5-8227-6bfed15e1370\" (UID: \"d0f6996d-f07f-4cf5-8227-6bfed15e1370\") " Jan 20 17:43:16 crc kubenswrapper[4558]: I0120 17:43:16.847645 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htf4p\" (UniqueName: \"kubernetes.io/projected/4d2b64b6-6310-4348-a016-e7d8317e00d9-kube-api-access-htf4p\") pod \"4d2b64b6-6310-4348-a016-e7d8317e00d9\" (UID: \"4d2b64b6-6310-4348-a016-e7d8317e00d9\") " Jan 20 17:43:16 crc kubenswrapper[4558]: I0120 17:43:16.847690 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/4d2b64b6-6310-4348-a016-e7d8317e00d9-ovn-northd-tls-certs\") pod \"4d2b64b6-6310-4348-a016-e7d8317e00d9\" (UID: \"4d2b64b6-6310-4348-a016-e7d8317e00d9\") " Jan 20 17:43:16 crc kubenswrapper[4558]: I0120 17:43:16.847735 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/4d2b64b6-6310-4348-a016-e7d8317e00d9-metrics-certs-tls-certs\") pod \"4d2b64b6-6310-4348-a016-e7d8317e00d9\" (UID: \"4d2b64b6-6310-4348-a016-e7d8317e00d9\") " Jan 20 17:43:16 crc kubenswrapper[4558]: I0120 17:43:16.848149 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4d2b64b6-6310-4348-a016-e7d8317e00d9-ovn-rundir" (OuterVolumeSpecName: "ovn-rundir") pod "4d2b64b6-6310-4348-a016-e7d8317e00d9" (UID: "4d2b64b6-6310-4348-a016-e7d8317e00d9"). InnerVolumeSpecName "ovn-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:43:16 crc kubenswrapper[4558]: I0120 17:43:16.850050 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4d2b64b6-6310-4348-a016-e7d8317e00d9-config" (OuterVolumeSpecName: "config") pod "4d2b64b6-6310-4348-a016-e7d8317e00d9" (UID: "4d2b64b6-6310-4348-a016-e7d8317e00d9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:43:16 crc kubenswrapper[4558]: I0120 17:43:16.850395 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4d2b64b6-6310-4348-a016-e7d8317e00d9-scripts" (OuterVolumeSpecName: "scripts") pod "4d2b64b6-6310-4348-a016-e7d8317e00d9" (UID: "4d2b64b6-6310-4348-a016-e7d8317e00d9"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:43:16 crc kubenswrapper[4558]: I0120 17:43:16.852654 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d0f6996d-f07f-4cf5-8227-6bfed15e1370-kube-api-access-mgts5" (OuterVolumeSpecName: "kube-api-access-mgts5") pod "d0f6996d-f07f-4cf5-8227-6bfed15e1370" (UID: "d0f6996d-f07f-4cf5-8227-6bfed15e1370"). InnerVolumeSpecName "kube-api-access-mgts5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:43:16 crc kubenswrapper[4558]: I0120 17:43:16.853323 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d0f6996d-f07f-4cf5-8227-6bfed15e1370-logs" (OuterVolumeSpecName: "logs") pod "d0f6996d-f07f-4cf5-8227-6bfed15e1370" (UID: "d0f6996d-f07f-4cf5-8227-6bfed15e1370"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:43:16 crc kubenswrapper[4558]: I0120 17:43:16.853647 4558 reconciler_common.go:293] "Volume detached for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/4d2b64b6-6310-4348-a016-e7d8317e00d9-ovn-rundir\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:16 crc kubenswrapper[4558]: I0120 17:43:16.853670 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d0f6996d-f07f-4cf5-8227-6bfed15e1370-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:16 crc kubenswrapper[4558]: I0120 17:43:16.853681 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mgts5\" (UniqueName: \"kubernetes.io/projected/d0f6996d-f07f-4cf5-8227-6bfed15e1370-kube-api-access-mgts5\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:16 crc kubenswrapper[4558]: I0120 17:43:16.853691 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/4d2b64b6-6310-4348-a016-e7d8317e00d9-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:16 crc kubenswrapper[4558]: I0120 17:43:16.853699 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4d2b64b6-6310-4348-a016-e7d8317e00d9-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:16 crc kubenswrapper[4558]: I0120 17:43:16.855569 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4d2b64b6-6310-4348-a016-e7d8317e00d9-kube-api-access-htf4p" (OuterVolumeSpecName: "kube-api-access-htf4p") pod "4d2b64b6-6310-4348-a016-e7d8317e00d9" (UID: "4d2b64b6-6310-4348-a016-e7d8317e00d9"). InnerVolumeSpecName "kube-api-access-htf4p". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:43:16 crc kubenswrapper[4558]: I0120 17:43:16.878428 4558 scope.go:117] "RemoveContainer" containerID="39b250fe2c01ede889dca2fd85bb4ba7ecb4ff567dec04dccfbfad33fc9e2ce1" Jan 20 17:43:16 crc kubenswrapper[4558]: E0120 17:43:16.879351 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"39b250fe2c01ede889dca2fd85bb4ba7ecb4ff567dec04dccfbfad33fc9e2ce1\": container with ID starting with 39b250fe2c01ede889dca2fd85bb4ba7ecb4ff567dec04dccfbfad33fc9e2ce1 not found: ID does not exist" containerID="39b250fe2c01ede889dca2fd85bb4ba7ecb4ff567dec04dccfbfad33fc9e2ce1" Jan 20 17:43:16 crc kubenswrapper[4558]: I0120 17:43:16.879386 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"39b250fe2c01ede889dca2fd85bb4ba7ecb4ff567dec04dccfbfad33fc9e2ce1"} err="failed to get container status \"39b250fe2c01ede889dca2fd85bb4ba7ecb4ff567dec04dccfbfad33fc9e2ce1\": rpc error: code = NotFound desc = could not find container \"39b250fe2c01ede889dca2fd85bb4ba7ecb4ff567dec04dccfbfad33fc9e2ce1\": container with ID starting with 39b250fe2c01ede889dca2fd85bb4ba7ecb4ff567dec04dccfbfad33fc9e2ce1 not found: ID does not exist" Jan 20 17:43:16 crc kubenswrapper[4558]: I0120 17:43:16.879411 4558 scope.go:117] "RemoveContainer" containerID="acf769aacf60dcf25c180f7db8bbc004fddf751cbab36c2bcd5d1eac22d68fe8" Jan 20 17:43:16 crc kubenswrapper[4558]: E0120 17:43:16.884184 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"acf769aacf60dcf25c180f7db8bbc004fddf751cbab36c2bcd5d1eac22d68fe8\": container with ID starting with acf769aacf60dcf25c180f7db8bbc004fddf751cbab36c2bcd5d1eac22d68fe8 not found: ID does not exist" containerID="acf769aacf60dcf25c180f7db8bbc004fddf751cbab36c2bcd5d1eac22d68fe8" Jan 20 17:43:16 crc kubenswrapper[4558]: I0120 17:43:16.884223 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"acf769aacf60dcf25c180f7db8bbc004fddf751cbab36c2bcd5d1eac22d68fe8"} err="failed to get container status \"acf769aacf60dcf25c180f7db8bbc004fddf751cbab36c2bcd5d1eac22d68fe8\": rpc error: code = NotFound desc = could not find container \"acf769aacf60dcf25c180f7db8bbc004fddf751cbab36c2bcd5d1eac22d68fe8\": container with ID starting with acf769aacf60dcf25c180f7db8bbc004fddf751cbab36c2bcd5d1eac22d68fe8 not found: ID does not exist" Jan 20 17:43:16 crc kubenswrapper[4558]: I0120 17:43:16.884252 4558 scope.go:117] "RemoveContainer" containerID="e7e585f1de0828e950c0e7d6180c3e24f82d1e80ebef4c5a53c3cd4c1b15c7a5" Jan 20 17:43:16 crc kubenswrapper[4558]: I0120 17:43:16.887265 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4d2b64b6-6310-4348-a016-e7d8317e00d9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4d2b64b6-6310-4348-a016-e7d8317e00d9" (UID: "4d2b64b6-6310-4348-a016-e7d8317e00d9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:16 crc kubenswrapper[4558]: I0120 17:43:16.890316 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d0f6996d-f07f-4cf5-8227-6bfed15e1370-config-data" (OuterVolumeSpecName: "config-data") pod "d0f6996d-f07f-4cf5-8227-6bfed15e1370" (UID: "d0f6996d-f07f-4cf5-8227-6bfed15e1370"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:16 crc kubenswrapper[4558]: I0120 17:43:16.904508 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d0f6996d-f07f-4cf5-8227-6bfed15e1370-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d0f6996d-f07f-4cf5-8227-6bfed15e1370" (UID: "d0f6996d-f07f-4cf5-8227-6bfed15e1370"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:16 crc kubenswrapper[4558]: I0120 17:43:16.908186 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:16 crc kubenswrapper[4558]: I0120 17:43:16.908636 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:16 crc kubenswrapper[4558]: I0120 17:43:16.934614 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d0f6996d-f07f-4cf5-8227-6bfed15e1370-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "d0f6996d-f07f-4cf5-8227-6bfed15e1370" (UID: "d0f6996d-f07f-4cf5-8227-6bfed15e1370"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:16 crc kubenswrapper[4558]: I0120 17:43:16.966650 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d0f6996d-f07f-4cf5-8227-6bfed15e1370-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:16 crc kubenswrapper[4558]: I0120 17:43:16.966679 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d0f6996d-f07f-4cf5-8227-6bfed15e1370-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:16 crc kubenswrapper[4558]: I0120 17:43:16.966689 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4d2b64b6-6310-4348-a016-e7d8317e00d9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:16 crc kubenswrapper[4558]: I0120 17:43:16.966702 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d0f6996d-f07f-4cf5-8227-6bfed15e1370-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:16 crc kubenswrapper[4558]: I0120 17:43:16.966712 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htf4p\" (UniqueName: \"kubernetes.io/projected/4d2b64b6-6310-4348-a016-e7d8317e00d9-kube-api-access-htf4p\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:16 crc kubenswrapper[4558]: I0120 17:43:16.967370 4558 scope.go:117] "RemoveContainer" containerID="80d1619f7e0d6d2bffa7bc9443ac70ae2f905fe8d2fed37e22b75d4144f64fe5" Jan 20 17:43:16 crc kubenswrapper[4558]: I0120 17:43:16.970465 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d0f6996d-f07f-4cf5-8227-6bfed15e1370-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "d0f6996d-f07f-4cf5-8227-6bfed15e1370" (UID: "d0f6996d-f07f-4cf5-8227-6bfed15e1370"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:16 crc kubenswrapper[4558]: I0120 17:43:16.984287 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4d2b64b6-6310-4348-a016-e7d8317e00d9-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "4d2b64b6-6310-4348-a016-e7d8317e00d9" (UID: "4d2b64b6-6310-4348-a016-e7d8317e00d9"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:17 crc kubenswrapper[4558]: I0120 17:43:17.000347 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4d2b64b6-6310-4348-a016-e7d8317e00d9-ovn-northd-tls-certs" (OuterVolumeSpecName: "ovn-northd-tls-certs") pod "4d2b64b6-6310-4348-a016-e7d8317e00d9" (UID: "4d2b64b6-6310-4348-a016-e7d8317e00d9"). InnerVolumeSpecName "ovn-northd-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:17 crc kubenswrapper[4558]: I0120 17:43:17.000508 4558 scope.go:117] "RemoveContainer" containerID="e7e585f1de0828e950c0e7d6180c3e24f82d1e80ebef4c5a53c3cd4c1b15c7a5" Jan 20 17:43:17 crc kubenswrapper[4558]: E0120 17:43:17.004318 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e7e585f1de0828e950c0e7d6180c3e24f82d1e80ebef4c5a53c3cd4c1b15c7a5\": container with ID starting with e7e585f1de0828e950c0e7d6180c3e24f82d1e80ebef4c5a53c3cd4c1b15c7a5 not found: ID does not exist" containerID="e7e585f1de0828e950c0e7d6180c3e24f82d1e80ebef4c5a53c3cd4c1b15c7a5" Jan 20 17:43:17 crc kubenswrapper[4558]: I0120 17:43:17.004367 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e7e585f1de0828e950c0e7d6180c3e24f82d1e80ebef4c5a53c3cd4c1b15c7a5"} err="failed to get container status \"e7e585f1de0828e950c0e7d6180c3e24f82d1e80ebef4c5a53c3cd4c1b15c7a5\": rpc error: code = NotFound desc = could not find container \"e7e585f1de0828e950c0e7d6180c3e24f82d1e80ebef4c5a53c3cd4c1b15c7a5\": container with ID starting with e7e585f1de0828e950c0e7d6180c3e24f82d1e80ebef4c5a53c3cd4c1b15c7a5 not found: ID does not exist" Jan 20 17:43:17 crc kubenswrapper[4558]: I0120 17:43:17.004393 4558 scope.go:117] "RemoveContainer" containerID="80d1619f7e0d6d2bffa7bc9443ac70ae2f905fe8d2fed37e22b75d4144f64fe5" Jan 20 17:43:17 crc kubenswrapper[4558]: E0120 17:43:17.004708 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"80d1619f7e0d6d2bffa7bc9443ac70ae2f905fe8d2fed37e22b75d4144f64fe5\": container with ID starting with 80d1619f7e0d6d2bffa7bc9443ac70ae2f905fe8d2fed37e22b75d4144f64fe5 not found: ID does not exist" containerID="80d1619f7e0d6d2bffa7bc9443ac70ae2f905fe8d2fed37e22b75d4144f64fe5" Jan 20 17:43:17 crc kubenswrapper[4558]: I0120 17:43:17.004746 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"80d1619f7e0d6d2bffa7bc9443ac70ae2f905fe8d2fed37e22b75d4144f64fe5"} err="failed to get container status \"80d1619f7e0d6d2bffa7bc9443ac70ae2f905fe8d2fed37e22b75d4144f64fe5\": rpc error: code = NotFound desc = could not find container \"80d1619f7e0d6d2bffa7bc9443ac70ae2f905fe8d2fed37e22b75d4144f64fe5\": container with ID starting with 80d1619f7e0d6d2bffa7bc9443ac70ae2f905fe8d2fed37e22b75d4144f64fe5 not found: ID does not exist" Jan 20 17:43:17 crc kubenswrapper[4558]: I0120 17:43:17.067646 4558 reconciler_common.go:293] "Volume detached for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/4d2b64b6-6310-4348-a016-e7d8317e00d9-ovn-northd-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:17 crc kubenswrapper[4558]: I0120 17:43:17.067676 4558 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/4d2b64b6-6310-4348-a016-e7d8317e00d9-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:17 crc kubenswrapper[4558]: I0120 17:43:17.067689 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d0f6996d-f07f-4cf5-8227-6bfed15e1370-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:17 crc kubenswrapper[4558]: I0120 17:43:17.112606 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:43:17 crc kubenswrapper[4558]: I0120 17:43:17.118748 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:43:17 crc kubenswrapper[4558]: I0120 17:43:17.124758 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:43:17 crc kubenswrapper[4558]: E0120 17:43:17.125221 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4d2b64b6-6310-4348-a016-e7d8317e00d9" containerName="openstack-network-exporter" Jan 20 17:43:17 crc kubenswrapper[4558]: I0120 17:43:17.125240 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="4d2b64b6-6310-4348-a016-e7d8317e00d9" containerName="openstack-network-exporter" Jan 20 17:43:17 crc kubenswrapper[4558]: E0120 17:43:17.125264 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d0f6996d-f07f-4cf5-8227-6bfed15e1370" containerName="nova-api-api" Jan 20 17:43:17 crc kubenswrapper[4558]: I0120 17:43:17.125271 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d0f6996d-f07f-4cf5-8227-6bfed15e1370" containerName="nova-api-api" Jan 20 17:43:17 crc kubenswrapper[4558]: E0120 17:43:17.125296 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4d2b64b6-6310-4348-a016-e7d8317e00d9" containerName="ovn-northd" Jan 20 17:43:17 crc kubenswrapper[4558]: I0120 17:43:17.125302 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="4d2b64b6-6310-4348-a016-e7d8317e00d9" containerName="ovn-northd" Jan 20 17:43:17 crc kubenswrapper[4558]: E0120 17:43:17.125323 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d0f6996d-f07f-4cf5-8227-6bfed15e1370" containerName="nova-api-log" Jan 20 17:43:17 crc kubenswrapper[4558]: I0120 17:43:17.125329 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d0f6996d-f07f-4cf5-8227-6bfed15e1370" containerName="nova-api-log" Jan 20 17:43:17 crc kubenswrapper[4558]: I0120 17:43:17.125518 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="4d2b64b6-6310-4348-a016-e7d8317e00d9" containerName="ovn-northd" Jan 20 17:43:17 crc kubenswrapper[4558]: I0120 17:43:17.125536 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="4d2b64b6-6310-4348-a016-e7d8317e00d9" containerName="openstack-network-exporter" Jan 20 17:43:17 crc kubenswrapper[4558]: I0120 17:43:17.125561 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="d0f6996d-f07f-4cf5-8227-6bfed15e1370" containerName="nova-api-api" Jan 20 17:43:17 crc kubenswrapper[4558]: I0120 17:43:17.125572 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="d0f6996d-f07f-4cf5-8227-6bfed15e1370" containerName="nova-api-log" Jan 20 17:43:17 crc kubenswrapper[4558]: I0120 17:43:17.127508 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:43:17 crc kubenswrapper[4558]: I0120 17:43:17.132832 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-api-config-data" Jan 20 17:43:17 crc kubenswrapper[4558]: I0120 17:43:17.133028 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-internal-svc" Jan 20 17:43:17 crc kubenswrapper[4558]: I0120 17:43:17.133151 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-public-svc" Jan 20 17:43:17 crc kubenswrapper[4558]: I0120 17:43:17.145701 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovn-northd-0"] Jan 20 17:43:17 crc kubenswrapper[4558]: I0120 17:43:17.164198 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ovn-northd-0"] Jan 20 17:43:17 crc kubenswrapper[4558]: I0120 17:43:17.171750 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kwb8p\" (UniqueName: \"kubernetes.io/projected/0a246013-4fe7-4d92-a517-414b1c235db0-kube-api-access-kwb8p\") pod \"nova-api-0\" (UID: \"0a246013-4fe7-4d92-a517-414b1c235db0\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:43:17 crc kubenswrapper[4558]: I0120 17:43:17.171787 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0a246013-4fe7-4d92-a517-414b1c235db0-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"0a246013-4fe7-4d92-a517-414b1c235db0\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:43:17 crc kubenswrapper[4558]: I0120 17:43:17.171810 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0a246013-4fe7-4d92-a517-414b1c235db0-logs\") pod \"nova-api-0\" (UID: \"0a246013-4fe7-4d92-a517-414b1c235db0\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:43:17 crc kubenswrapper[4558]: I0120 17:43:17.171838 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0a246013-4fe7-4d92-a517-414b1c235db0-config-data\") pod \"nova-api-0\" (UID: \"0a246013-4fe7-4d92-a517-414b1c235db0\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:43:17 crc kubenswrapper[4558]: I0120 17:43:17.171890 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0a246013-4fe7-4d92-a517-414b1c235db0-public-tls-certs\") pod \"nova-api-0\" (UID: \"0a246013-4fe7-4d92-a517-414b1c235db0\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:43:17 crc kubenswrapper[4558]: I0120 17:43:17.171937 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0a246013-4fe7-4d92-a517-414b1c235db0-internal-tls-certs\") pod \"nova-api-0\" (UID: \"0a246013-4fe7-4d92-a517-414b1c235db0\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:43:17 crc kubenswrapper[4558]: I0120 17:43:17.181616 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:43:17 crc kubenswrapper[4558]: I0120 17:43:17.205196 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ovn-northd-0"] Jan 20 17:43:17 crc kubenswrapper[4558]: I0120 17:43:17.206703 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:43:17 crc kubenswrapper[4558]: I0120 17:43:17.233645 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"ovnnorthd-scripts" Jan 20 17:43:17 crc kubenswrapper[4558]: I0120 17:43:17.233923 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"ovnnorthd-config" Jan 20 17:43:17 crc kubenswrapper[4558]: I0120 17:43:17.234090 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ovnnorthd-ovnnorthd-dockercfg-rznpl" Jan 20 17:43:17 crc kubenswrapper[4558]: I0120 17:43:17.258301 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-ovnnorthd-ovndbs" Jan 20 17:43:17 crc kubenswrapper[4558]: I0120 17:43:17.258490 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovn-northd-0"] Jan 20 17:43:17 crc kubenswrapper[4558]: I0120 17:43:17.277222 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0a246013-4fe7-4d92-a517-414b1c235db0-logs\") pod \"nova-api-0\" (UID: \"0a246013-4fe7-4d92-a517-414b1c235db0\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:43:17 crc kubenswrapper[4558]: I0120 17:43:17.277427 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0a246013-4fe7-4d92-a517-414b1c235db0-config-data\") pod \"nova-api-0\" (UID: \"0a246013-4fe7-4d92-a517-414b1c235db0\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:43:17 crc kubenswrapper[4558]: I0120 17:43:17.277625 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0a246013-4fe7-4d92-a517-414b1c235db0-public-tls-certs\") pod \"nova-api-0\" (UID: \"0a246013-4fe7-4d92-a517-414b1c235db0\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:43:17 crc kubenswrapper[4558]: I0120 17:43:17.277669 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/9db31a7a-b4bb-4c3b-99f4-b9855af99342-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"9db31a7a-b4bb-4c3b-99f4-b9855af99342\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:43:17 crc kubenswrapper[4558]: I0120 17:43:17.277767 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0a246013-4fe7-4d92-a517-414b1c235db0-internal-tls-certs\") pod \"nova-api-0\" (UID: \"0a246013-4fe7-4d92-a517-414b1c235db0\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:43:17 crc kubenswrapper[4558]: I0120 17:43:17.277819 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9db31a7a-b4bb-4c3b-99f4-b9855af99342-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"9db31a7a-b4bb-4c3b-99f4-b9855af99342\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:43:17 crc kubenswrapper[4558]: I0120 17:43:17.277868 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9db31a7a-b4bb-4c3b-99f4-b9855af99342-scripts\") pod \"ovn-northd-0\" (UID: \"9db31a7a-b4bb-4c3b-99f4-b9855af99342\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:43:17 crc kubenswrapper[4558]: I0120 17:43:17.277927 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/9db31a7a-b4bb-4c3b-99f4-b9855af99342-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"9db31a7a-b4bb-4c3b-99f4-b9855af99342\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:43:17 crc kubenswrapper[4558]: I0120 17:43:17.278051 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9db31a7a-b4bb-4c3b-99f4-b9855af99342-config\") pod \"ovn-northd-0\" (UID: \"9db31a7a-b4bb-4c3b-99f4-b9855af99342\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:43:17 crc kubenswrapper[4558]: I0120 17:43:17.278122 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kwb8p\" (UniqueName: \"kubernetes.io/projected/0a246013-4fe7-4d92-a517-414b1c235db0-kube-api-access-kwb8p\") pod \"nova-api-0\" (UID: \"0a246013-4fe7-4d92-a517-414b1c235db0\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:43:17 crc kubenswrapper[4558]: I0120 17:43:17.278149 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zpf52\" (UniqueName: \"kubernetes.io/projected/9db31a7a-b4bb-4c3b-99f4-b9855af99342-kube-api-access-zpf52\") pod \"ovn-northd-0\" (UID: \"9db31a7a-b4bb-4c3b-99f4-b9855af99342\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:43:17 crc kubenswrapper[4558]: I0120 17:43:17.278288 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0a246013-4fe7-4d92-a517-414b1c235db0-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"0a246013-4fe7-4d92-a517-414b1c235db0\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:43:17 crc kubenswrapper[4558]: I0120 17:43:17.278319 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/9db31a7a-b4bb-4c3b-99f4-b9855af99342-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"9db31a7a-b4bb-4c3b-99f4-b9855af99342\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:43:17 crc kubenswrapper[4558]: I0120 17:43:17.279596 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0a246013-4fe7-4d92-a517-414b1c235db0-logs\") pod \"nova-api-0\" (UID: \"0a246013-4fe7-4d92-a517-414b1c235db0\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:43:17 crc kubenswrapper[4558]: I0120 17:43:17.283330 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0a246013-4fe7-4d92-a517-414b1c235db0-public-tls-certs\") pod \"nova-api-0\" (UID: \"0a246013-4fe7-4d92-a517-414b1c235db0\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:43:17 crc kubenswrapper[4558]: I0120 17:43:17.286659 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0a246013-4fe7-4d92-a517-414b1c235db0-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"0a246013-4fe7-4d92-a517-414b1c235db0\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:43:17 crc kubenswrapper[4558]: I0120 17:43:17.292590 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0a246013-4fe7-4d92-a517-414b1c235db0-config-data\") pod \"nova-api-0\" (UID: \"0a246013-4fe7-4d92-a517-414b1c235db0\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:43:17 crc kubenswrapper[4558]: I0120 17:43:17.308710 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kwb8p\" (UniqueName: \"kubernetes.io/projected/0a246013-4fe7-4d92-a517-414b1c235db0-kube-api-access-kwb8p\") pod \"nova-api-0\" (UID: \"0a246013-4fe7-4d92-a517-414b1c235db0\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:43:17 crc kubenswrapper[4558]: I0120 17:43:17.309518 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0a246013-4fe7-4d92-a517-414b1c235db0-internal-tls-certs\") pod \"nova-api-0\" (UID: \"0a246013-4fe7-4d92-a517-414b1c235db0\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:43:17 crc kubenswrapper[4558]: I0120 17:43:17.380847 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/9db31a7a-b4bb-4c3b-99f4-b9855af99342-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"9db31a7a-b4bb-4c3b-99f4-b9855af99342\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:43:17 crc kubenswrapper[4558]: I0120 17:43:17.380973 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9db31a7a-b4bb-4c3b-99f4-b9855af99342-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"9db31a7a-b4bb-4c3b-99f4-b9855af99342\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:43:17 crc kubenswrapper[4558]: I0120 17:43:17.381010 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9db31a7a-b4bb-4c3b-99f4-b9855af99342-scripts\") pod \"ovn-northd-0\" (UID: \"9db31a7a-b4bb-4c3b-99f4-b9855af99342\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:43:17 crc kubenswrapper[4558]: I0120 17:43:17.381042 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/9db31a7a-b4bb-4c3b-99f4-b9855af99342-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"9db31a7a-b4bb-4c3b-99f4-b9855af99342\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:43:17 crc kubenswrapper[4558]: I0120 17:43:17.381101 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9db31a7a-b4bb-4c3b-99f4-b9855af99342-config\") pod \"ovn-northd-0\" (UID: \"9db31a7a-b4bb-4c3b-99f4-b9855af99342\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:43:17 crc kubenswrapper[4558]: I0120 17:43:17.381140 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zpf52\" (UniqueName: \"kubernetes.io/projected/9db31a7a-b4bb-4c3b-99f4-b9855af99342-kube-api-access-zpf52\") pod \"ovn-northd-0\" (UID: \"9db31a7a-b4bb-4c3b-99f4-b9855af99342\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:43:17 crc kubenswrapper[4558]: I0120 17:43:17.381191 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/9db31a7a-b4bb-4c3b-99f4-b9855af99342-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"9db31a7a-b4bb-4c3b-99f4-b9855af99342\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:43:17 crc kubenswrapper[4558]: I0120 17:43:17.381519 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/9db31a7a-b4bb-4c3b-99f4-b9855af99342-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"9db31a7a-b4bb-4c3b-99f4-b9855af99342\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:43:17 crc kubenswrapper[4558]: I0120 17:43:17.382519 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9db31a7a-b4bb-4c3b-99f4-b9855af99342-config\") pod \"ovn-northd-0\" (UID: \"9db31a7a-b4bb-4c3b-99f4-b9855af99342\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:43:17 crc kubenswrapper[4558]: I0120 17:43:17.382931 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9db31a7a-b4bb-4c3b-99f4-b9855af99342-scripts\") pod \"ovn-northd-0\" (UID: \"9db31a7a-b4bb-4c3b-99f4-b9855af99342\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:43:17 crc kubenswrapper[4558]: I0120 17:43:17.385860 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/9db31a7a-b4bb-4c3b-99f4-b9855af99342-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"9db31a7a-b4bb-4c3b-99f4-b9855af99342\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:43:17 crc kubenswrapper[4558]: I0120 17:43:17.397379 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9db31a7a-b4bb-4c3b-99f4-b9855af99342-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"9db31a7a-b4bb-4c3b-99f4-b9855af99342\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:43:17 crc kubenswrapper[4558]: I0120 17:43:17.399571 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zpf52\" (UniqueName: \"kubernetes.io/projected/9db31a7a-b4bb-4c3b-99f4-b9855af99342-kube-api-access-zpf52\") pod \"ovn-northd-0\" (UID: \"9db31a7a-b4bb-4c3b-99f4-b9855af99342\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:43:17 crc kubenswrapper[4558]: I0120 17:43:17.399840 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/9db31a7a-b4bb-4c3b-99f4-b9855af99342-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"9db31a7a-b4bb-4c3b-99f4-b9855af99342\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:43:17 crc kubenswrapper[4558]: I0120 17:43:17.468150 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:43:17 crc kubenswrapper[4558]: I0120 17:43:17.545845 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:43:17 crc kubenswrapper[4558]: I0120 17:43:17.801288 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:17 crc kubenswrapper[4558]: I0120 17:43:17.813722 4558 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 20 17:43:17 crc kubenswrapper[4558]: I0120 17:43:17.920130 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:43:18 crc kubenswrapper[4558]: I0120 17:43:18.053088 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovn-northd-0"] Jan 20 17:43:18 crc kubenswrapper[4558]: I0120 17:43:18.057833 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:18 crc kubenswrapper[4558]: I0120 17:43:18.235934 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/barbican-api-5796d4c4d8-2xz7t"] Jan 20 17:43:18 crc kubenswrapper[4558]: I0120 17:43:18.239355 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-5796d4c4d8-2xz7t" Jan 20 17:43:18 crc kubenswrapper[4558]: I0120 17:43:18.257114 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-api-5796d4c4d8-2xz7t"] Jan 20 17:43:18 crc kubenswrapper[4558]: I0120 17:43:18.304987 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8738e339-fd1b-4122-899c-7b9521688aba-config-data-custom\") pod \"barbican-api-5796d4c4d8-2xz7t\" (UID: \"8738e339-fd1b-4122-899c-7b9521688aba\") " pod="openstack-kuttl-tests/barbican-api-5796d4c4d8-2xz7t" Jan 20 17:43:18 crc kubenswrapper[4558]: I0120 17:43:18.305030 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8738e339-fd1b-4122-899c-7b9521688aba-combined-ca-bundle\") pod \"barbican-api-5796d4c4d8-2xz7t\" (UID: \"8738e339-fd1b-4122-899c-7b9521688aba\") " pod="openstack-kuttl-tests/barbican-api-5796d4c4d8-2xz7t" Jan 20 17:43:18 crc kubenswrapper[4558]: I0120 17:43:18.305063 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8kfcr\" (UniqueName: \"kubernetes.io/projected/8738e339-fd1b-4122-899c-7b9521688aba-kube-api-access-8kfcr\") pod \"barbican-api-5796d4c4d8-2xz7t\" (UID: \"8738e339-fd1b-4122-899c-7b9521688aba\") " pod="openstack-kuttl-tests/barbican-api-5796d4c4d8-2xz7t" Jan 20 17:43:18 crc kubenswrapper[4558]: I0120 17:43:18.305448 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8738e339-fd1b-4122-899c-7b9521688aba-public-tls-certs\") pod \"barbican-api-5796d4c4d8-2xz7t\" (UID: \"8738e339-fd1b-4122-899c-7b9521688aba\") " pod="openstack-kuttl-tests/barbican-api-5796d4c4d8-2xz7t" Jan 20 17:43:18 crc kubenswrapper[4558]: I0120 17:43:18.305737 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8738e339-fd1b-4122-899c-7b9521688aba-config-data\") pod \"barbican-api-5796d4c4d8-2xz7t\" (UID: \"8738e339-fd1b-4122-899c-7b9521688aba\") " pod="openstack-kuttl-tests/barbican-api-5796d4c4d8-2xz7t" Jan 20 17:43:18 crc kubenswrapper[4558]: I0120 17:43:18.305832 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8738e339-fd1b-4122-899c-7b9521688aba-logs\") pod \"barbican-api-5796d4c4d8-2xz7t\" (UID: \"8738e339-fd1b-4122-899c-7b9521688aba\") " pod="openstack-kuttl-tests/barbican-api-5796d4c4d8-2xz7t" Jan 20 17:43:18 crc kubenswrapper[4558]: I0120 17:43:18.305963 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8738e339-fd1b-4122-899c-7b9521688aba-internal-tls-certs\") pod \"barbican-api-5796d4c4d8-2xz7t\" (UID: \"8738e339-fd1b-4122-899c-7b9521688aba\") " pod="openstack-kuttl-tests/barbican-api-5796d4c4d8-2xz7t" Jan 20 17:43:18 crc kubenswrapper[4558]: I0120 17:43:18.407853 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8738e339-fd1b-4122-899c-7b9521688aba-config-data\") pod \"barbican-api-5796d4c4d8-2xz7t\" (UID: \"8738e339-fd1b-4122-899c-7b9521688aba\") " pod="openstack-kuttl-tests/barbican-api-5796d4c4d8-2xz7t" Jan 20 17:43:18 crc kubenswrapper[4558]: I0120 17:43:18.407930 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8738e339-fd1b-4122-899c-7b9521688aba-logs\") pod \"barbican-api-5796d4c4d8-2xz7t\" (UID: \"8738e339-fd1b-4122-899c-7b9521688aba\") " pod="openstack-kuttl-tests/barbican-api-5796d4c4d8-2xz7t" Jan 20 17:43:18 crc kubenswrapper[4558]: I0120 17:43:18.407978 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8738e339-fd1b-4122-899c-7b9521688aba-internal-tls-certs\") pod \"barbican-api-5796d4c4d8-2xz7t\" (UID: \"8738e339-fd1b-4122-899c-7b9521688aba\") " pod="openstack-kuttl-tests/barbican-api-5796d4c4d8-2xz7t" Jan 20 17:43:18 crc kubenswrapper[4558]: I0120 17:43:18.408019 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8738e339-fd1b-4122-899c-7b9521688aba-config-data-custom\") pod \"barbican-api-5796d4c4d8-2xz7t\" (UID: \"8738e339-fd1b-4122-899c-7b9521688aba\") " pod="openstack-kuttl-tests/barbican-api-5796d4c4d8-2xz7t" Jan 20 17:43:18 crc kubenswrapper[4558]: I0120 17:43:18.408039 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8738e339-fd1b-4122-899c-7b9521688aba-combined-ca-bundle\") pod \"barbican-api-5796d4c4d8-2xz7t\" (UID: \"8738e339-fd1b-4122-899c-7b9521688aba\") " pod="openstack-kuttl-tests/barbican-api-5796d4c4d8-2xz7t" Jan 20 17:43:18 crc kubenswrapper[4558]: I0120 17:43:18.408655 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8kfcr\" (UniqueName: \"kubernetes.io/projected/8738e339-fd1b-4122-899c-7b9521688aba-kube-api-access-8kfcr\") pod \"barbican-api-5796d4c4d8-2xz7t\" (UID: \"8738e339-fd1b-4122-899c-7b9521688aba\") " pod="openstack-kuttl-tests/barbican-api-5796d4c4d8-2xz7t" Jan 20 17:43:18 crc kubenswrapper[4558]: I0120 17:43:18.408473 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8738e339-fd1b-4122-899c-7b9521688aba-logs\") pod \"barbican-api-5796d4c4d8-2xz7t\" (UID: \"8738e339-fd1b-4122-899c-7b9521688aba\") " pod="openstack-kuttl-tests/barbican-api-5796d4c4d8-2xz7t" Jan 20 17:43:18 crc kubenswrapper[4558]: I0120 17:43:18.408872 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8738e339-fd1b-4122-899c-7b9521688aba-public-tls-certs\") pod \"barbican-api-5796d4c4d8-2xz7t\" (UID: \"8738e339-fd1b-4122-899c-7b9521688aba\") " pod="openstack-kuttl-tests/barbican-api-5796d4c4d8-2xz7t" Jan 20 17:43:18 crc kubenswrapper[4558]: I0120 17:43:18.413265 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8738e339-fd1b-4122-899c-7b9521688aba-config-data\") pod \"barbican-api-5796d4c4d8-2xz7t\" (UID: \"8738e339-fd1b-4122-899c-7b9521688aba\") " pod="openstack-kuttl-tests/barbican-api-5796d4c4d8-2xz7t" Jan 20 17:43:18 crc kubenswrapper[4558]: I0120 17:43:18.415741 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8738e339-fd1b-4122-899c-7b9521688aba-config-data-custom\") pod \"barbican-api-5796d4c4d8-2xz7t\" (UID: \"8738e339-fd1b-4122-899c-7b9521688aba\") " pod="openstack-kuttl-tests/barbican-api-5796d4c4d8-2xz7t" Jan 20 17:43:18 crc kubenswrapper[4558]: I0120 17:43:18.415967 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8738e339-fd1b-4122-899c-7b9521688aba-internal-tls-certs\") pod \"barbican-api-5796d4c4d8-2xz7t\" (UID: \"8738e339-fd1b-4122-899c-7b9521688aba\") " pod="openstack-kuttl-tests/barbican-api-5796d4c4d8-2xz7t" Jan 20 17:43:18 crc kubenswrapper[4558]: I0120 17:43:18.416091 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8738e339-fd1b-4122-899c-7b9521688aba-combined-ca-bundle\") pod \"barbican-api-5796d4c4d8-2xz7t\" (UID: \"8738e339-fd1b-4122-899c-7b9521688aba\") " pod="openstack-kuttl-tests/barbican-api-5796d4c4d8-2xz7t" Jan 20 17:43:18 crc kubenswrapper[4558]: I0120 17:43:18.422618 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8738e339-fd1b-4122-899c-7b9521688aba-public-tls-certs\") pod \"barbican-api-5796d4c4d8-2xz7t\" (UID: \"8738e339-fd1b-4122-899c-7b9521688aba\") " pod="openstack-kuttl-tests/barbican-api-5796d4c4d8-2xz7t" Jan 20 17:43:18 crc kubenswrapper[4558]: I0120 17:43:18.428290 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8kfcr\" (UniqueName: \"kubernetes.io/projected/8738e339-fd1b-4122-899c-7b9521688aba-kube-api-access-8kfcr\") pod \"barbican-api-5796d4c4d8-2xz7t\" (UID: \"8738e339-fd1b-4122-899c-7b9521688aba\") " pod="openstack-kuttl-tests/barbican-api-5796d4c4d8-2xz7t" Jan 20 17:43:18 crc kubenswrapper[4558]: I0120 17:43:18.575304 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4d2b64b6-6310-4348-a016-e7d8317e00d9" path="/var/lib/kubelet/pods/4d2b64b6-6310-4348-a016-e7d8317e00d9/volumes" Jan 20 17:43:18 crc kubenswrapper[4558]: I0120 17:43:18.576073 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d0f6996d-f07f-4cf5-8227-6bfed15e1370" path="/var/lib/kubelet/pods/d0f6996d-f07f-4cf5-8227-6bfed15e1370/volumes" Jan 20 17:43:18 crc kubenswrapper[4558]: I0120 17:43:18.588145 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:43:18 crc kubenswrapper[4558]: I0120 17:43:18.588225 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-5796d4c4d8-2xz7t" Jan 20 17:43:18 crc kubenswrapper[4558]: I0120 17:43:18.588484 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/cinder-api-0" podUID="70cf1941-8f7a-4328-9338-e5d7739173ac" containerName="cinder-api" containerID="cri-o://f6da7d2cb6d5c94ace71a8c784725894f1856e6ba2322d49bb6f31837cd6c608" gracePeriod=30 Jan 20 17:43:18 crc kubenswrapper[4558]: I0120 17:43:18.588395 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/cinder-api-0" podUID="70cf1941-8f7a-4328-9338-e5d7739173ac" containerName="cinder-api-log" containerID="cri-o://9fe05fc8fb03d4b0c68d34c739144ce4f22e171fb9d33638aacaab26959ef775" gracePeriod=30 Jan 20 17:43:18 crc kubenswrapper[4558]: I0120 17:43:18.827129 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-northd-0" event={"ID":"9db31a7a-b4bb-4c3b-99f4-b9855af99342","Type":"ContainerStarted","Data":"a0ad02a3d302f3830db2cc15f3d92955d193e02b01825c96bcf3760f81f92872"} Jan 20 17:43:18 crc kubenswrapper[4558]: I0120 17:43:18.827609 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-northd-0" event={"ID":"9db31a7a-b4bb-4c3b-99f4-b9855af99342","Type":"ContainerStarted","Data":"4dca8ca5509273f800ab7c5cde1eec1ca33f8167df3366715098e139aa83d898"} Jan 20 17:43:18 crc kubenswrapper[4558]: I0120 17:43:18.827621 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-northd-0" event={"ID":"9db31a7a-b4bb-4c3b-99f4-b9855af99342","Type":"ContainerStarted","Data":"27cdc17700d66aa3bf250ce6fc169806feb0e709db51d96cb587c820f6435dbc"} Jan 20 17:43:18 crc kubenswrapper[4558]: I0120 17:43:18.827763 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:43:18 crc kubenswrapper[4558]: I0120 17:43:18.833503 4558 generic.go:334] "Generic (PLEG): container finished" podID="70cf1941-8f7a-4328-9338-e5d7739173ac" containerID="9fe05fc8fb03d4b0c68d34c739144ce4f22e171fb9d33638aacaab26959ef775" exitCode=143 Jan 20 17:43:18 crc kubenswrapper[4558]: I0120 17:43:18.833544 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"70cf1941-8f7a-4328-9338-e5d7739173ac","Type":"ContainerDied","Data":"9fe05fc8fb03d4b0c68d34c739144ce4f22e171fb9d33638aacaab26959ef775"} Jan 20 17:43:18 crc kubenswrapper[4558]: I0120 17:43:18.835253 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"0a246013-4fe7-4d92-a517-414b1c235db0","Type":"ContainerStarted","Data":"f073a0f7710dadab5f69a3b61d534dd16273339ca1f705b805b71959affdaedb"} Jan 20 17:43:18 crc kubenswrapper[4558]: I0120 17:43:18.835303 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"0a246013-4fe7-4d92-a517-414b1c235db0","Type":"ContainerStarted","Data":"3bca3c440d169db0b20179eec25960ffaa1be1f116d15347d51dd6ac49e4557f"} Jan 20 17:43:18 crc kubenswrapper[4558]: I0120 17:43:18.835316 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"0a246013-4fe7-4d92-a517-414b1c235db0","Type":"ContainerStarted","Data":"6841f72195dc77d3ac9b0b3acd8ea9360e5cf6e492f27fe2d62f58793c3de722"} Jan 20 17:43:18 crc kubenswrapper[4558]: I0120 17:43:18.847849 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ovn-northd-0" podStartSLOduration=1.847832613 podStartE2EDuration="1.847832613s" podCreationTimestamp="2026-01-20 17:43:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:43:18.844416099 +0000 UTC m=+3692.604754066" watchObservedRunningTime="2026-01-20 17:43:18.847832613 +0000 UTC m=+3692.608170580" Jan 20 17:43:18 crc kubenswrapper[4558]: I0120 17:43:18.866601 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-api-0" podStartSLOduration=1.866584731 podStartE2EDuration="1.866584731s" podCreationTimestamp="2026-01-20 17:43:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:43:18.862468411 +0000 UTC m=+3692.622806379" watchObservedRunningTime="2026-01-20 17:43:18.866584731 +0000 UTC m=+3692.626922698" Jan 20 17:43:19 crc kubenswrapper[4558]: I0120 17:43:19.011119 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-api-5796d4c4d8-2xz7t"] Jan 20 17:43:19 crc kubenswrapper[4558]: I0120 17:43:19.850835 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-5796d4c4d8-2xz7t" event={"ID":"8738e339-fd1b-4122-899c-7b9521688aba","Type":"ContainerStarted","Data":"1ef3a181c7498e05935b57f64f9e58a17d10768de064a519dd385d5add3525db"} Jan 20 17:43:19 crc kubenswrapper[4558]: I0120 17:43:19.851235 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-5796d4c4d8-2xz7t" event={"ID":"8738e339-fd1b-4122-899c-7b9521688aba","Type":"ContainerStarted","Data":"49eac06a5bbf59d10a57d3ba182a98399ea7f8d86e783046aa38fe5883748c5e"} Jan 20 17:43:19 crc kubenswrapper[4558]: I0120 17:43:19.851264 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-5796d4c4d8-2xz7t" event={"ID":"8738e339-fd1b-4122-899c-7b9521688aba","Type":"ContainerStarted","Data":"ab8da78490180f79fecc1c88007f9f076d9c4858abe2a0bc5d26311929546c92"} Jan 20 17:43:19 crc kubenswrapper[4558]: I0120 17:43:19.871441 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/barbican-api-5796d4c4d8-2xz7t" podStartSLOduration=1.8714255419999999 podStartE2EDuration="1.871425542s" podCreationTimestamp="2026-01-20 17:43:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:43:19.867825464 +0000 UTC m=+3693.628163430" watchObservedRunningTime="2026-01-20 17:43:19.871425542 +0000 UTC m=+3693.631763509" Jan 20 17:43:20 crc kubenswrapper[4558]: I0120 17:43:20.102951 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:43:20 crc kubenswrapper[4558]: I0120 17:43:20.860464 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/barbican-api-5796d4c4d8-2xz7t" Jan 20 17:43:20 crc kubenswrapper[4558]: I0120 17:43:20.860893 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/barbican-api-5796d4c4d8-2xz7t" Jan 20 17:43:21 crc kubenswrapper[4558]: I0120 17:43:21.870936 4558 generic.go:334] "Generic (PLEG): container finished" podID="70cf1941-8f7a-4328-9338-e5d7739173ac" containerID="f6da7d2cb6d5c94ace71a8c784725894f1856e6ba2322d49bb6f31837cd6c608" exitCode=0 Jan 20 17:43:21 crc kubenswrapper[4558]: I0120 17:43:21.871020 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"70cf1941-8f7a-4328-9338-e5d7739173ac","Type":"ContainerDied","Data":"f6da7d2cb6d5c94ace71a8c784725894f1856e6ba2322d49bb6f31837cd6c608"} Jan 20 17:43:22 crc kubenswrapper[4558]: I0120 17:43:22.158584 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:43:22 crc kubenswrapper[4558]: I0120 17:43:22.185763 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/70cf1941-8f7a-4328-9338-e5d7739173ac-config-data\") pod \"70cf1941-8f7a-4328-9338-e5d7739173ac\" (UID: \"70cf1941-8f7a-4328-9338-e5d7739173ac\") " Jan 20 17:43:22 crc kubenswrapper[4558]: I0120 17:43:22.185835 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/70cf1941-8f7a-4328-9338-e5d7739173ac-public-tls-certs\") pod \"70cf1941-8f7a-4328-9338-e5d7739173ac\" (UID: \"70cf1941-8f7a-4328-9338-e5d7739173ac\") " Jan 20 17:43:22 crc kubenswrapper[4558]: I0120 17:43:22.185902 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/70cf1941-8f7a-4328-9338-e5d7739173ac-etc-machine-id\") pod \"70cf1941-8f7a-4328-9338-e5d7739173ac\" (UID: \"70cf1941-8f7a-4328-9338-e5d7739173ac\") " Jan 20 17:43:22 crc kubenswrapper[4558]: I0120 17:43:22.185951 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/70cf1941-8f7a-4328-9338-e5d7739173ac-combined-ca-bundle\") pod \"70cf1941-8f7a-4328-9338-e5d7739173ac\" (UID: \"70cf1941-8f7a-4328-9338-e5d7739173ac\") " Jan 20 17:43:22 crc kubenswrapper[4558]: I0120 17:43:22.186016 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/70cf1941-8f7a-4328-9338-e5d7739173ac-config-data-custom\") pod \"70cf1941-8f7a-4328-9338-e5d7739173ac\" (UID: \"70cf1941-8f7a-4328-9338-e5d7739173ac\") " Jan 20 17:43:22 crc kubenswrapper[4558]: I0120 17:43:22.186041 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/70cf1941-8f7a-4328-9338-e5d7739173ac-internal-tls-certs\") pod \"70cf1941-8f7a-4328-9338-e5d7739173ac\" (UID: \"70cf1941-8f7a-4328-9338-e5d7739173ac\") " Jan 20 17:43:22 crc kubenswrapper[4558]: I0120 17:43:22.186132 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bmv29\" (UniqueName: \"kubernetes.io/projected/70cf1941-8f7a-4328-9338-e5d7739173ac-kube-api-access-bmv29\") pod \"70cf1941-8f7a-4328-9338-e5d7739173ac\" (UID: \"70cf1941-8f7a-4328-9338-e5d7739173ac\") " Jan 20 17:43:22 crc kubenswrapper[4558]: I0120 17:43:22.186105 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/70cf1941-8f7a-4328-9338-e5d7739173ac-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "70cf1941-8f7a-4328-9338-e5d7739173ac" (UID: "70cf1941-8f7a-4328-9338-e5d7739173ac"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 17:43:22 crc kubenswrapper[4558]: I0120 17:43:22.186191 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/70cf1941-8f7a-4328-9338-e5d7739173ac-logs\") pod \"70cf1941-8f7a-4328-9338-e5d7739173ac\" (UID: \"70cf1941-8f7a-4328-9338-e5d7739173ac\") " Jan 20 17:43:22 crc kubenswrapper[4558]: I0120 17:43:22.186223 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/70cf1941-8f7a-4328-9338-e5d7739173ac-scripts\") pod \"70cf1941-8f7a-4328-9338-e5d7739173ac\" (UID: \"70cf1941-8f7a-4328-9338-e5d7739173ac\") " Jan 20 17:43:22 crc kubenswrapper[4558]: I0120 17:43:22.186607 4558 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/70cf1941-8f7a-4328-9338-e5d7739173ac-etc-machine-id\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:22 crc kubenswrapper[4558]: I0120 17:43:22.187881 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/70cf1941-8f7a-4328-9338-e5d7739173ac-logs" (OuterVolumeSpecName: "logs") pod "70cf1941-8f7a-4328-9338-e5d7739173ac" (UID: "70cf1941-8f7a-4328-9338-e5d7739173ac"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:43:22 crc kubenswrapper[4558]: I0120 17:43:22.194249 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/70cf1941-8f7a-4328-9338-e5d7739173ac-kube-api-access-bmv29" (OuterVolumeSpecName: "kube-api-access-bmv29") pod "70cf1941-8f7a-4328-9338-e5d7739173ac" (UID: "70cf1941-8f7a-4328-9338-e5d7739173ac"). InnerVolumeSpecName "kube-api-access-bmv29". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:43:22 crc kubenswrapper[4558]: I0120 17:43:22.208304 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/70cf1941-8f7a-4328-9338-e5d7739173ac-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "70cf1941-8f7a-4328-9338-e5d7739173ac" (UID: "70cf1941-8f7a-4328-9338-e5d7739173ac"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:22 crc kubenswrapper[4558]: I0120 17:43:22.230199 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/70cf1941-8f7a-4328-9338-e5d7739173ac-scripts" (OuterVolumeSpecName: "scripts") pod "70cf1941-8f7a-4328-9338-e5d7739173ac" (UID: "70cf1941-8f7a-4328-9338-e5d7739173ac"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:22 crc kubenswrapper[4558]: I0120 17:43:22.243315 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/70cf1941-8f7a-4328-9338-e5d7739173ac-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "70cf1941-8f7a-4328-9338-e5d7739173ac" (UID: "70cf1941-8f7a-4328-9338-e5d7739173ac"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:22 crc kubenswrapper[4558]: I0120 17:43:22.279247 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/70cf1941-8f7a-4328-9338-e5d7739173ac-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "70cf1941-8f7a-4328-9338-e5d7739173ac" (UID: "70cf1941-8f7a-4328-9338-e5d7739173ac"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:22 crc kubenswrapper[4558]: I0120 17:43:22.282914 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/70cf1941-8f7a-4328-9338-e5d7739173ac-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "70cf1941-8f7a-4328-9338-e5d7739173ac" (UID: "70cf1941-8f7a-4328-9338-e5d7739173ac"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:22 crc kubenswrapper[4558]: I0120 17:43:22.286207 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/70cf1941-8f7a-4328-9338-e5d7739173ac-config-data" (OuterVolumeSpecName: "config-data") pod "70cf1941-8f7a-4328-9338-e5d7739173ac" (UID: "70cf1941-8f7a-4328-9338-e5d7739173ac"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:22 crc kubenswrapper[4558]: I0120 17:43:22.289142 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/70cf1941-8f7a-4328-9338-e5d7739173ac-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:22 crc kubenswrapper[4558]: I0120 17:43:22.289193 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/70cf1941-8f7a-4328-9338-e5d7739173ac-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:22 crc kubenswrapper[4558]: I0120 17:43:22.289209 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/70cf1941-8f7a-4328-9338-e5d7739173ac-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:22 crc kubenswrapper[4558]: I0120 17:43:22.289223 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bmv29\" (UniqueName: \"kubernetes.io/projected/70cf1941-8f7a-4328-9338-e5d7739173ac-kube-api-access-bmv29\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:22 crc kubenswrapper[4558]: I0120 17:43:22.289264 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/70cf1941-8f7a-4328-9338-e5d7739173ac-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:22 crc kubenswrapper[4558]: I0120 17:43:22.289275 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/70cf1941-8f7a-4328-9338-e5d7739173ac-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:22 crc kubenswrapper[4558]: I0120 17:43:22.289284 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/70cf1941-8f7a-4328-9338-e5d7739173ac-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:22 crc kubenswrapper[4558]: I0120 17:43:22.289295 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/70cf1941-8f7a-4328-9338-e5d7739173ac-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:22 crc kubenswrapper[4558]: I0120 17:43:22.888122 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:43:22 crc kubenswrapper[4558]: I0120 17:43:22.888615 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-scheduler-0" podUID="ea5dfbd0-37c4-46a1-afad-77e85e35d7fb" containerName="nova-scheduler-scheduler" containerID="cri-o://42c29ec80666322d27f85a6e6798fa5fcabfb0b124569ce2fbc426f02c3bf8fe" gracePeriod=30 Jan 20 17:43:22 crc kubenswrapper[4558]: I0120 17:43:22.901421 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"70cf1941-8f7a-4328-9338-e5d7739173ac","Type":"ContainerDied","Data":"03d011d8bc46061b6c11957a3832fad93110bfd8e9fda1e1907aff3afde5ba04"} Jan 20 17:43:22 crc kubenswrapper[4558]: I0120 17:43:22.901475 4558 scope.go:117] "RemoveContainer" containerID="f6da7d2cb6d5c94ace71a8c784725894f1856e6ba2322d49bb6f31837cd6c608" Jan 20 17:43:22 crc kubenswrapper[4558]: I0120 17:43:22.901514 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:43:22 crc kubenswrapper[4558]: I0120 17:43:22.938248 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:43:22 crc kubenswrapper[4558]: I0120 17:43:22.938434 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-api-0" podUID="0a246013-4fe7-4d92-a517-414b1c235db0" containerName="nova-api-log" containerID="cri-o://3bca3c440d169db0b20179eec25960ffaa1be1f116d15347d51dd6ac49e4557f" gracePeriod=30 Jan 20 17:43:22 crc kubenswrapper[4558]: I0120 17:43:22.938594 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-api-0" podUID="0a246013-4fe7-4d92-a517-414b1c235db0" containerName="nova-api-api" containerID="cri-o://f073a0f7710dadab5f69a3b61d534dd16273339ca1f705b805b71959affdaedb" gracePeriod=30 Jan 20 17:43:22 crc kubenswrapper[4558]: I0120 17:43:22.959238 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/barbican-keystone-listener-57496b565d-kwb5f"] Jan 20 17:43:22 crc kubenswrapper[4558]: E0120 17:43:22.959755 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="70cf1941-8f7a-4328-9338-e5d7739173ac" containerName="cinder-api" Jan 20 17:43:22 crc kubenswrapper[4558]: I0120 17:43:22.959771 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="70cf1941-8f7a-4328-9338-e5d7739173ac" containerName="cinder-api" Jan 20 17:43:22 crc kubenswrapper[4558]: E0120 17:43:22.959794 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="70cf1941-8f7a-4328-9338-e5d7739173ac" containerName="cinder-api-log" Jan 20 17:43:22 crc kubenswrapper[4558]: I0120 17:43:22.959801 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="70cf1941-8f7a-4328-9338-e5d7739173ac" containerName="cinder-api-log" Jan 20 17:43:22 crc kubenswrapper[4558]: I0120 17:43:22.960020 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="70cf1941-8f7a-4328-9338-e5d7739173ac" containerName="cinder-api" Jan 20 17:43:22 crc kubenswrapper[4558]: I0120 17:43:22.960055 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="70cf1941-8f7a-4328-9338-e5d7739173ac" containerName="cinder-api-log" Jan 20 17:43:22 crc kubenswrapper[4558]: I0120 17:43:22.961064 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-keystone-listener-57496b565d-kwb5f" Jan 20 17:43:22 crc kubenswrapper[4558]: I0120 17:43:22.990230 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovn-northd-0"] Jan 20 17:43:22 crc kubenswrapper[4558]: I0120 17:43:22.990462 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ovn-northd-0" podUID="9db31a7a-b4bb-4c3b-99f4-b9855af99342" containerName="ovn-northd" containerID="cri-o://4dca8ca5509273f800ab7c5cde1eec1ca33f8167df3366715098e139aa83d898" gracePeriod=30 Jan 20 17:43:22 crc kubenswrapper[4558]: I0120 17:43:22.990620 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ovn-northd-0" podUID="9db31a7a-b4bb-4c3b-99f4-b9855af99342" containerName="openstack-network-exporter" containerID="cri-o://a0ad02a3d302f3830db2cc15f3d92955d193e02b01825c96bcf3760f81f92872" gracePeriod=30 Jan 20 17:43:22 crc kubenswrapper[4558]: I0120 17:43:22.991638 4558 scope.go:117] "RemoveContainer" containerID="9fe05fc8fb03d4b0c68d34c739144ce4f22e171fb9d33638aacaab26959ef775" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.028248 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fd5cfc55-8cd9-4a76-87b8-4050b8f1030f-config-data\") pod \"barbican-keystone-listener-57496b565d-kwb5f\" (UID: \"fd5cfc55-8cd9-4a76-87b8-4050b8f1030f\") " pod="openstack-kuttl-tests/barbican-keystone-listener-57496b565d-kwb5f" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.028297 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/fd5cfc55-8cd9-4a76-87b8-4050b8f1030f-config-data-custom\") pod \"barbican-keystone-listener-57496b565d-kwb5f\" (UID: \"fd5cfc55-8cd9-4a76-87b8-4050b8f1030f\") " pod="openstack-kuttl-tests/barbican-keystone-listener-57496b565d-kwb5f" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.028455 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ssgsh\" (UniqueName: \"kubernetes.io/projected/fd5cfc55-8cd9-4a76-87b8-4050b8f1030f-kube-api-access-ssgsh\") pod \"barbican-keystone-listener-57496b565d-kwb5f\" (UID: \"fd5cfc55-8cd9-4a76-87b8-4050b8f1030f\") " pod="openstack-kuttl-tests/barbican-keystone-listener-57496b565d-kwb5f" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.028516 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fd5cfc55-8cd9-4a76-87b8-4050b8f1030f-logs\") pod \"barbican-keystone-listener-57496b565d-kwb5f\" (UID: \"fd5cfc55-8cd9-4a76-87b8-4050b8f1030f\") " pod="openstack-kuttl-tests/barbican-keystone-listener-57496b565d-kwb5f" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.028538 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd5cfc55-8cd9-4a76-87b8-4050b8f1030f-combined-ca-bundle\") pod \"barbican-keystone-listener-57496b565d-kwb5f\" (UID: \"fd5cfc55-8cd9-4a76-87b8-4050b8f1030f\") " pod="openstack-kuttl-tests/barbican-keystone-listener-57496b565d-kwb5f" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.032412 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/barbican-worker-7975b7ff57-c5czg"] Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.034318 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-worker-7975b7ff57-c5czg" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.118087 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-keystone-listener-57496b565d-kwb5f"] Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.141497 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fd5cfc55-8cd9-4a76-87b8-4050b8f1030f-config-data\") pod \"barbican-keystone-listener-57496b565d-kwb5f\" (UID: \"fd5cfc55-8cd9-4a76-87b8-4050b8f1030f\") " pod="openstack-kuttl-tests/barbican-keystone-listener-57496b565d-kwb5f" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.141601 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/fd5cfc55-8cd9-4a76-87b8-4050b8f1030f-config-data-custom\") pod \"barbican-keystone-listener-57496b565d-kwb5f\" (UID: \"fd5cfc55-8cd9-4a76-87b8-4050b8f1030f\") " pod="openstack-kuttl-tests/barbican-keystone-listener-57496b565d-kwb5f" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.141776 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1a1aeb0f-ba20-4c24-a8d3-f3f2765328c2-combined-ca-bundle\") pod \"barbican-worker-7975b7ff57-c5czg\" (UID: \"1a1aeb0f-ba20-4c24-a8d3-f3f2765328c2\") " pod="openstack-kuttl-tests/barbican-worker-7975b7ff57-c5czg" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.141898 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1a1aeb0f-ba20-4c24-a8d3-f3f2765328c2-logs\") pod \"barbican-worker-7975b7ff57-c5czg\" (UID: \"1a1aeb0f-ba20-4c24-a8d3-f3f2765328c2\") " pod="openstack-kuttl-tests/barbican-worker-7975b7ff57-c5czg" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.142054 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-prc26\" (UniqueName: \"kubernetes.io/projected/1a1aeb0f-ba20-4c24-a8d3-f3f2765328c2-kube-api-access-prc26\") pod \"barbican-worker-7975b7ff57-c5czg\" (UID: \"1a1aeb0f-ba20-4c24-a8d3-f3f2765328c2\") " pod="openstack-kuttl-tests/barbican-worker-7975b7ff57-c5czg" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.142077 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1a1aeb0f-ba20-4c24-a8d3-f3f2765328c2-config-data\") pod \"barbican-worker-7975b7ff57-c5czg\" (UID: \"1a1aeb0f-ba20-4c24-a8d3-f3f2765328c2\") " pod="openstack-kuttl-tests/barbican-worker-7975b7ff57-c5czg" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.146480 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ssgsh\" (UniqueName: \"kubernetes.io/projected/fd5cfc55-8cd9-4a76-87b8-4050b8f1030f-kube-api-access-ssgsh\") pod \"barbican-keystone-listener-57496b565d-kwb5f\" (UID: \"fd5cfc55-8cd9-4a76-87b8-4050b8f1030f\") " pod="openstack-kuttl-tests/barbican-keystone-listener-57496b565d-kwb5f" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.146550 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1a1aeb0f-ba20-4c24-a8d3-f3f2765328c2-config-data-custom\") pod \"barbican-worker-7975b7ff57-c5czg\" (UID: \"1a1aeb0f-ba20-4c24-a8d3-f3f2765328c2\") " pod="openstack-kuttl-tests/barbican-worker-7975b7ff57-c5czg" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.147023 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fd5cfc55-8cd9-4a76-87b8-4050b8f1030f-logs\") pod \"barbican-keystone-listener-57496b565d-kwb5f\" (UID: \"fd5cfc55-8cd9-4a76-87b8-4050b8f1030f\") " pod="openstack-kuttl-tests/barbican-keystone-listener-57496b565d-kwb5f" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.147075 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd5cfc55-8cd9-4a76-87b8-4050b8f1030f-combined-ca-bundle\") pod \"barbican-keystone-listener-57496b565d-kwb5f\" (UID: \"fd5cfc55-8cd9-4a76-87b8-4050b8f1030f\") " pod="openstack-kuttl-tests/barbican-keystone-listener-57496b565d-kwb5f" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.149217 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fd5cfc55-8cd9-4a76-87b8-4050b8f1030f-logs\") pod \"barbican-keystone-listener-57496b565d-kwb5f\" (UID: \"fd5cfc55-8cd9-4a76-87b8-4050b8f1030f\") " pod="openstack-kuttl-tests/barbican-keystone-listener-57496b565d-kwb5f" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.155965 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/fd5cfc55-8cd9-4a76-87b8-4050b8f1030f-config-data-custom\") pod \"barbican-keystone-listener-57496b565d-kwb5f\" (UID: \"fd5cfc55-8cd9-4a76-87b8-4050b8f1030f\") " pod="openstack-kuttl-tests/barbican-keystone-listener-57496b565d-kwb5f" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.156006 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd5cfc55-8cd9-4a76-87b8-4050b8f1030f-combined-ca-bundle\") pod \"barbican-keystone-listener-57496b565d-kwb5f\" (UID: \"fd5cfc55-8cd9-4a76-87b8-4050b8f1030f\") " pod="openstack-kuttl-tests/barbican-keystone-listener-57496b565d-kwb5f" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.194439 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-worker-7975b7ff57-c5czg"] Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.214416 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fd5cfc55-8cd9-4a76-87b8-4050b8f1030f-config-data\") pod \"barbican-keystone-listener-57496b565d-kwb5f\" (UID: \"fd5cfc55-8cd9-4a76-87b8-4050b8f1030f\") " pod="openstack-kuttl-tests/barbican-keystone-listener-57496b565d-kwb5f" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.237299 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.251561 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1a1aeb0f-ba20-4c24-a8d3-f3f2765328c2-logs\") pod \"barbican-worker-7975b7ff57-c5czg\" (UID: \"1a1aeb0f-ba20-4c24-a8d3-f3f2765328c2\") " pod="openstack-kuttl-tests/barbican-worker-7975b7ff57-c5czg" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.252234 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-prc26\" (UniqueName: \"kubernetes.io/projected/1a1aeb0f-ba20-4c24-a8d3-f3f2765328c2-kube-api-access-prc26\") pod \"barbican-worker-7975b7ff57-c5czg\" (UID: \"1a1aeb0f-ba20-4c24-a8d3-f3f2765328c2\") " pod="openstack-kuttl-tests/barbican-worker-7975b7ff57-c5czg" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.252302 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1a1aeb0f-ba20-4c24-a8d3-f3f2765328c2-config-data\") pod \"barbican-worker-7975b7ff57-c5czg\" (UID: \"1a1aeb0f-ba20-4c24-a8d3-f3f2765328c2\") " pod="openstack-kuttl-tests/barbican-worker-7975b7ff57-c5czg" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.252365 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1a1aeb0f-ba20-4c24-a8d3-f3f2765328c2-config-data-custom\") pod \"barbican-worker-7975b7ff57-c5czg\" (UID: \"1a1aeb0f-ba20-4c24-a8d3-f3f2765328c2\") " pod="openstack-kuttl-tests/barbican-worker-7975b7ff57-c5czg" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.253338 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1a1aeb0f-ba20-4c24-a8d3-f3f2765328c2-combined-ca-bundle\") pod \"barbican-worker-7975b7ff57-c5czg\" (UID: \"1a1aeb0f-ba20-4c24-a8d3-f3f2765328c2\") " pod="openstack-kuttl-tests/barbican-worker-7975b7ff57-c5czg" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.258365 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1a1aeb0f-ba20-4c24-a8d3-f3f2765328c2-combined-ca-bundle\") pod \"barbican-worker-7975b7ff57-c5czg\" (UID: \"1a1aeb0f-ba20-4c24-a8d3-f3f2765328c2\") " pod="openstack-kuttl-tests/barbican-worker-7975b7ff57-c5czg" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.259620 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1a1aeb0f-ba20-4c24-a8d3-f3f2765328c2-logs\") pod \"barbican-worker-7975b7ff57-c5czg\" (UID: \"1a1aeb0f-ba20-4c24-a8d3-f3f2765328c2\") " pod="openstack-kuttl-tests/barbican-worker-7975b7ff57-c5czg" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.264079 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ssgsh\" (UniqueName: \"kubernetes.io/projected/fd5cfc55-8cd9-4a76-87b8-4050b8f1030f-kube-api-access-ssgsh\") pod \"barbican-keystone-listener-57496b565d-kwb5f\" (UID: \"fd5cfc55-8cd9-4a76-87b8-4050b8f1030f\") " pod="openstack-kuttl-tests/barbican-keystone-listener-57496b565d-kwb5f" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.278189 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1a1aeb0f-ba20-4c24-a8d3-f3f2765328c2-config-data\") pod \"barbican-worker-7975b7ff57-c5czg\" (UID: \"1a1aeb0f-ba20-4c24-a8d3-f3f2765328c2\") " pod="openstack-kuttl-tests/barbican-worker-7975b7ff57-c5czg" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.298220 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.303069 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1a1aeb0f-ba20-4c24-a8d3-f3f2765328c2-config-data-custom\") pod \"barbican-worker-7975b7ff57-c5czg\" (UID: \"1a1aeb0f-ba20-4c24-a8d3-f3f2765328c2\") " pod="openstack-kuttl-tests/barbican-worker-7975b7ff57-c5czg" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.311390 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/memcached-0"] Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.311678 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/memcached-0" podUID="a5db7bd5-e0c6-4645-ab63-3a2a21fdc854" containerName="memcached" containerID="cri-o://a50b4edbabca8fd422d0afd87910c0ef1f309e8061f08ee60f8118612b4fe104" gracePeriod=30 Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.337642 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-keystone-listener-57496b565d-kwb5f" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.349845 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.350094 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/cinder-scheduler-0" podUID="25cfe04f-2194-405f-a9d7-82181e8ac22a" containerName="probe" containerID="cri-o://9aec7229608a50e4ca2cab47655081015208900db81e7a5801eb3c70cf100002" gracePeriod=30 Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.350297 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/cinder-scheduler-0" podUID="25cfe04f-2194-405f-a9d7-82181e8ac22a" containerName="cinder-scheduler" containerID="cri-o://d856400d868c5a18aeb6d59839565030a9d6d928c1563362781a0530b884b4e7" gracePeriod=30 Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.368454 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-prc26\" (UniqueName: \"kubernetes.io/projected/1a1aeb0f-ba20-4c24-a8d3-f3f2765328c2-kube-api-access-prc26\") pod \"barbican-worker-7975b7ff57-c5czg\" (UID: \"1a1aeb0f-ba20-4c24-a8d3-f3f2765328c2\") " pod="openstack-kuttl-tests/barbican-worker-7975b7ff57-c5czg" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.377559 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.379500 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.383936 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-cinder-internal-svc" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.384356 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cinder-api-config-data" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.384505 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.384736 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-internal-api-0" podUID="5fb29028-042a-4108-a63d-a6cd215a6c31" containerName="glance-log" containerID="cri-o://aa17c31d66a9fe9d9ba2ad19998790f47fa790295400fa6c7baccd8f6d22aa0e" gracePeriod=30 Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.384989 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-internal-api-0" podUID="5fb29028-042a-4108-a63d-a6cd215a6c31" containerName="glance-httpd" containerID="cri-o://c5a1a31136c32563a3ef2c47370606c762a9054e380e6b74ad6d51b5c145876e" gracePeriod=30 Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.392218 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-cinder-public-svc" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.396010 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-api-5796d4c4d8-2xz7t"] Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.396178 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-api-5796d4c4d8-2xz7t" podUID="8738e339-fd1b-4122-899c-7b9521688aba" containerName="barbican-api-log" containerID="cri-o://49eac06a5bbf59d10a57d3ba182a98399ea7f8d86e783046aa38fe5883748c5e" gracePeriod=30 Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.396273 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-api-5796d4c4d8-2xz7t" podUID="8738e339-fd1b-4122-899c-7b9521688aba" containerName="barbican-api" containerID="cri-o://1ef3a181c7498e05935b57f64f9e58a17d10768de064a519dd385d5add3525db" gracePeriod=30 Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.428347 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.461283 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a651cf04-6082-49fc-813c-f9de65420553-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"a651cf04-6082-49fc-813c-f9de65420553\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.461372 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a651cf04-6082-49fc-813c-f9de65420553-public-tls-certs\") pod \"cinder-api-0\" (UID: \"a651cf04-6082-49fc-813c-f9de65420553\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.461402 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a651cf04-6082-49fc-813c-f9de65420553-config-data\") pod \"cinder-api-0\" (UID: \"a651cf04-6082-49fc-813c-f9de65420553\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.461544 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a651cf04-6082-49fc-813c-f9de65420553-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"a651cf04-6082-49fc-813c-f9de65420553\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.461611 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a651cf04-6082-49fc-813c-f9de65420553-config-data-custom\") pod \"cinder-api-0\" (UID: \"a651cf04-6082-49fc-813c-f9de65420553\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.461653 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a651cf04-6082-49fc-813c-f9de65420553-logs\") pod \"cinder-api-0\" (UID: \"a651cf04-6082-49fc-813c-f9de65420553\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.461860 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vscgj\" (UniqueName: \"kubernetes.io/projected/a651cf04-6082-49fc-813c-f9de65420553-kube-api-access-vscgj\") pod \"cinder-api-0\" (UID: \"a651cf04-6082-49fc-813c-f9de65420553\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.461953 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a651cf04-6082-49fc-813c-f9de65420553-scripts\") pod \"cinder-api-0\" (UID: \"a651cf04-6082-49fc-813c-f9de65420553\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.462113 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a651cf04-6082-49fc-813c-f9de65420553-etc-machine-id\") pod \"cinder-api-0\" (UID: \"a651cf04-6082-49fc-813c-f9de65420553\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.462308 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-sb-0"] Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.462638 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ovsdbserver-sb-0" podUID="0394e10b-3811-4eef-a9ef-a785e1574649" containerName="openstack-network-exporter" containerID="cri-o://45124db692168b507b34896779f420550426a3011e2b00b8912ca2208f58799e" gracePeriod=300 Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.496541 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/barbican-api-7bc45756fb-278vp"] Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.499672 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-7bc45756fb-278vp" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.502131 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.502726 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-metadata-0" podUID="a18f9da2-9c64-4047-ad02-206ac9c8aa61" containerName="nova-metadata-log" containerID="cri-o://b1459a5f2896c316432b666d6a44cd9116c1172c341136db96a48899a0dd4884" gracePeriod=30 Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.503436 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-metadata-0" podUID="a18f9da2-9c64-4047-ad02-206ac9c8aa61" containerName="nova-metadata-metadata" containerID="cri-o://b93052a48246c97ba463ca4f80d69e9a79fa1ad0762a5e959855ffc2d454361b" gracePeriod=30 Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.514627 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-worker-7975b7ff57-c5czg" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.540238 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.540473 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" podUID="fd5b8a4a-4f31-4b17-95a3-669d1d7661c4" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://a03dfb5ac0209a3bc1e4b1831999713e3583bebd07521f3e1596b419cc04ea21" gracePeriod=30 Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.561338 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/openstack-galera-0"] Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.563882 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ovsdbserver-sb-0" podUID="0394e10b-3811-4eef-a9ef-a785e1574649" containerName="ovsdbserver-sb" containerID="cri-o://baba51653eb144a4334b8651e3e9970d529abaea40101e2e03249e068d4b3b9b" gracePeriod=300 Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.566924 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-api-7bc45756fb-278vp"] Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.571370 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a651cf04-6082-49fc-813c-f9de65420553-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"a651cf04-6082-49fc-813c-f9de65420553\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.571426 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/432ec648-18a7-416a-85cc-409a30976a67-logs\") pod \"barbican-api-7bc45756fb-278vp\" (UID: \"432ec648-18a7-416a-85cc-409a30976a67\") " pod="openstack-kuttl-tests/barbican-api-7bc45756fb-278vp" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.571459 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/432ec648-18a7-416a-85cc-409a30976a67-config-data-custom\") pod \"barbican-api-7bc45756fb-278vp\" (UID: \"432ec648-18a7-416a-85cc-409a30976a67\") " pod="openstack-kuttl-tests/barbican-api-7bc45756fb-278vp" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.571494 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a651cf04-6082-49fc-813c-f9de65420553-config-data-custom\") pod \"cinder-api-0\" (UID: \"a651cf04-6082-49fc-813c-f9de65420553\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.571537 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a651cf04-6082-49fc-813c-f9de65420553-logs\") pod \"cinder-api-0\" (UID: \"a651cf04-6082-49fc-813c-f9de65420553\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.571564 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/432ec648-18a7-416a-85cc-409a30976a67-internal-tls-certs\") pod \"barbican-api-7bc45756fb-278vp\" (UID: \"432ec648-18a7-416a-85cc-409a30976a67\") " pod="openstack-kuttl-tests/barbican-api-7bc45756fb-278vp" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.571593 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/432ec648-18a7-416a-85cc-409a30976a67-public-tls-certs\") pod \"barbican-api-7bc45756fb-278vp\" (UID: \"432ec648-18a7-416a-85cc-409a30976a67\") " pod="openstack-kuttl-tests/barbican-api-7bc45756fb-278vp" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.571628 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vscgj\" (UniqueName: \"kubernetes.io/projected/a651cf04-6082-49fc-813c-f9de65420553-kube-api-access-vscgj\") pod \"cinder-api-0\" (UID: \"a651cf04-6082-49fc-813c-f9de65420553\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.571668 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/432ec648-18a7-416a-85cc-409a30976a67-combined-ca-bundle\") pod \"barbican-api-7bc45756fb-278vp\" (UID: \"432ec648-18a7-416a-85cc-409a30976a67\") " pod="openstack-kuttl-tests/barbican-api-7bc45756fb-278vp" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.571810 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a651cf04-6082-49fc-813c-f9de65420553-scripts\") pod \"cinder-api-0\" (UID: \"a651cf04-6082-49fc-813c-f9de65420553\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.571886 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a651cf04-6082-49fc-813c-f9de65420553-etc-machine-id\") pod \"cinder-api-0\" (UID: \"a651cf04-6082-49fc-813c-f9de65420553\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.571935 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/432ec648-18a7-416a-85cc-409a30976a67-config-data\") pod \"barbican-api-7bc45756fb-278vp\" (UID: \"432ec648-18a7-416a-85cc-409a30976a67\") " pod="openstack-kuttl-tests/barbican-api-7bc45756fb-278vp" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.571952 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-78lmx\" (UniqueName: \"kubernetes.io/projected/432ec648-18a7-416a-85cc-409a30976a67-kube-api-access-78lmx\") pod \"barbican-api-7bc45756fb-278vp\" (UID: \"432ec648-18a7-416a-85cc-409a30976a67\") " pod="openstack-kuttl-tests/barbican-api-7bc45756fb-278vp" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.571984 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a651cf04-6082-49fc-813c-f9de65420553-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"a651cf04-6082-49fc-813c-f9de65420553\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.572013 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a651cf04-6082-49fc-813c-f9de65420553-public-tls-certs\") pod \"cinder-api-0\" (UID: \"a651cf04-6082-49fc-813c-f9de65420553\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.572041 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a651cf04-6082-49fc-813c-f9de65420553-config-data\") pod \"cinder-api-0\" (UID: \"a651cf04-6082-49fc-813c-f9de65420553\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.573801 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/placement-7b8cd49c48-k928p"] Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.574088 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/placement-7b8cd49c48-k928p" podUID="feedfa19-f182-4f7f-8b50-3d16c11e3510" containerName="placement-log" containerID="cri-o://2238e5e4c9121a6f35ac99b7eefaed26a7b72dd8f5d91d2775ab73bb36849ee2" gracePeriod=30 Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.574896 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/placement-7b8cd49c48-k928p" podUID="feedfa19-f182-4f7f-8b50-3d16c11e3510" containerName="placement-api" containerID="cri-o://4a06fdd89fd78aff5dd0753c417b307f7a680d4b1887883dc3eccee2ed84c9ec" gracePeriod=30 Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.577800 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a651cf04-6082-49fc-813c-f9de65420553-etc-machine-id\") pod \"cinder-api-0\" (UID: \"a651cf04-6082-49fc-813c-f9de65420553\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.578191 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a651cf04-6082-49fc-813c-f9de65420553-logs\") pod \"cinder-api-0\" (UID: \"a651cf04-6082-49fc-813c-f9de65420553\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.578230 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.578397 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-external-api-0" podUID="c62cee5b-4dd8-4a07-995c-e1d0530d695b" containerName="glance-log" containerID="cri-o://5b28187d77dd8f75fd99f025ba3fddc2ac465afeab96893a305bda76c6c1c56c" gracePeriod=30 Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.578528 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-external-api-0" podUID="c62cee5b-4dd8-4a07-995c-e1d0530d695b" containerName="glance-httpd" containerID="cri-o://9cd39e12244c6e213ae6f15945a045746c5236b6784d7e40141838e71fa71966" gracePeriod=30 Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.584830 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-5676986846-92lbc"] Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.585308 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/neutron-5676986846-92lbc" podUID="8877ab44-638b-4880-97fb-305726a5c1a6" containerName="neutron-api" containerID="cri-o://418895b26e5117800f2a53dae040b43bb1b03510640c0a1a0ca6b67577a9df79" gracePeriod=30 Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.586503 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/neutron-5676986846-92lbc" podUID="8877ab44-638b-4880-97fb-305726a5c1a6" containerName="neutron-httpd" containerID="cri-o://99d260241a6e2248384349724b544d3292e8cbd5470d0de4208392622b91d851" gracePeriod=30 Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.589375 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a651cf04-6082-49fc-813c-f9de65420553-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"a651cf04-6082-49fc-813c-f9de65420553\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.590382 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a651cf04-6082-49fc-813c-f9de65420553-public-tls-certs\") pod \"cinder-api-0\" (UID: \"a651cf04-6082-49fc-813c-f9de65420553\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.594901 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a651cf04-6082-49fc-813c-f9de65420553-scripts\") pod \"cinder-api-0\" (UID: \"a651cf04-6082-49fc-813c-f9de65420553\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.595629 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a651cf04-6082-49fc-813c-f9de65420553-config-data\") pod \"cinder-api-0\" (UID: \"a651cf04-6082-49fc-813c-f9de65420553\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.601343 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a651cf04-6082-49fc-813c-f9de65420553-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"a651cf04-6082-49fc-813c-f9de65420553\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.603239 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a651cf04-6082-49fc-813c-f9de65420553-config-data-custom\") pod \"cinder-api-0\" (UID: \"a651cf04-6082-49fc-813c-f9de65420553\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.605347 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:43:23 crc kubenswrapper[4558]: E0120 17:43:23.606376 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[kube-api-access-vscgj], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack-kuttl-tests/cinder-api-0" podUID="a651cf04-6082-49fc-813c-f9de65420553" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.610555 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/neutron-5676986846-92lbc" podUID="8877ab44-638b-4880-97fb-305726a5c1a6" containerName="neutron-httpd" probeResult="failure" output="Get \"https://10.217.1.20:9696/\": EOF" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.617782 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vscgj\" (UniqueName: \"kubernetes.io/projected/a651cf04-6082-49fc-813c-f9de65420553-kube-api-access-vscgj\") pod \"cinder-api-0\" (UID: \"a651cf04-6082-49fc-813c-f9de65420553\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.642493 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/openstack-cell1-galera-0"] Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.650856 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/placement-976c48458-wzswq"] Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.652702 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-976c48458-wzswq" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.656734 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-6bb454b456-wjhln"] Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.656956 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/keystone-6bb454b456-wjhln" podUID="7935bdc8-1434-418a-a3ad-b165ee8be23e" containerName="keystone-api" containerID="cri-o://d6a76d69efdb8d9de2921accc3eae11c4dd079ceb576dcf301178d78b9a86d25" gracePeriod=30 Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.674716 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/keystone-6bb454b456-wjhln" podUID="7935bdc8-1434-418a-a3ad-b165ee8be23e" containerName="keystone-api" probeResult="failure" output="Get \"https://10.217.1.12:5000/v3\": read tcp 10.217.0.2:52698->10.217.1.12:5000: read: connection reset by peer" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.676734 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/432ec648-18a7-416a-85cc-409a30976a67-combined-ca-bundle\") pod \"barbican-api-7bc45756fb-278vp\" (UID: \"432ec648-18a7-416a-85cc-409a30976a67\") " pod="openstack-kuttl-tests/barbican-api-7bc45756fb-278vp" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.676874 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/432ec648-18a7-416a-85cc-409a30976a67-config-data\") pod \"barbican-api-7bc45756fb-278vp\" (UID: \"432ec648-18a7-416a-85cc-409a30976a67\") " pod="openstack-kuttl-tests/barbican-api-7bc45756fb-278vp" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.676902 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-78lmx\" (UniqueName: \"kubernetes.io/projected/432ec648-18a7-416a-85cc-409a30976a67-kube-api-access-78lmx\") pod \"barbican-api-7bc45756fb-278vp\" (UID: \"432ec648-18a7-416a-85cc-409a30976a67\") " pod="openstack-kuttl-tests/barbican-api-7bc45756fb-278vp" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.677038 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/432ec648-18a7-416a-85cc-409a30976a67-logs\") pod \"barbican-api-7bc45756fb-278vp\" (UID: \"432ec648-18a7-416a-85cc-409a30976a67\") " pod="openstack-kuttl-tests/barbican-api-7bc45756fb-278vp" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.677067 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/432ec648-18a7-416a-85cc-409a30976a67-config-data-custom\") pod \"barbican-api-7bc45756fb-278vp\" (UID: \"432ec648-18a7-416a-85cc-409a30976a67\") " pod="openstack-kuttl-tests/barbican-api-7bc45756fb-278vp" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.677147 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/432ec648-18a7-416a-85cc-409a30976a67-internal-tls-certs\") pod \"barbican-api-7bc45756fb-278vp\" (UID: \"432ec648-18a7-416a-85cc-409a30976a67\") " pod="openstack-kuttl-tests/barbican-api-7bc45756fb-278vp" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.677198 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/432ec648-18a7-416a-85cc-409a30976a67-public-tls-certs\") pod \"barbican-api-7bc45756fb-278vp\" (UID: \"432ec648-18a7-416a-85cc-409a30976a67\") " pod="openstack-kuttl-tests/barbican-api-7bc45756fb-278vp" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.685770 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/432ec648-18a7-416a-85cc-409a30976a67-logs\") pod \"barbican-api-7bc45756fb-278vp\" (UID: \"432ec648-18a7-416a-85cc-409a30976a67\") " pod="openstack-kuttl-tests/barbican-api-7bc45756fb-278vp" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.689988 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/432ec648-18a7-416a-85cc-409a30976a67-combined-ca-bundle\") pod \"barbican-api-7bc45756fb-278vp\" (UID: \"432ec648-18a7-416a-85cc-409a30976a67\") " pod="openstack-kuttl-tests/barbican-api-7bc45756fb-278vp" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.697366 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-976c48458-wzswq"] Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.714623 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/neutron-66bfc8789d-nsc64"] Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.716436 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-66bfc8789d-nsc64" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.719692 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/432ec648-18a7-416a-85cc-409a30976a67-internal-tls-certs\") pod \"barbican-api-7bc45756fb-278vp\" (UID: \"432ec648-18a7-416a-85cc-409a30976a67\") " pod="openstack-kuttl-tests/barbican-api-7bc45756fb-278vp" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.719752 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/432ec648-18a7-416a-85cc-409a30976a67-public-tls-certs\") pod \"barbican-api-7bc45756fb-278vp\" (UID: \"432ec648-18a7-416a-85cc-409a30976a67\") " pod="openstack-kuttl-tests/barbican-api-7bc45756fb-278vp" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.719804 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/432ec648-18a7-416a-85cc-409a30976a67-config-data-custom\") pod \"barbican-api-7bc45756fb-278vp\" (UID: \"432ec648-18a7-416a-85cc-409a30976a67\") " pod="openstack-kuttl-tests/barbican-api-7bc45756fb-278vp" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.719873 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/keystone-685599669b-nv6cc"] Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.720237 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-78lmx\" (UniqueName: \"kubernetes.io/projected/432ec648-18a7-416a-85cc-409a30976a67-kube-api-access-78lmx\") pod \"barbican-api-7bc45756fb-278vp\" (UID: \"432ec648-18a7-416a-85cc-409a30976a67\") " pod="openstack-kuttl-tests/barbican-api-7bc45756fb-278vp" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.720404 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/432ec648-18a7-416a-85cc-409a30976a67-config-data\") pod \"barbican-api-7bc45756fb-278vp\" (UID: \"432ec648-18a7-416a-85cc-409a30976a67\") " pod="openstack-kuttl-tests/barbican-api-7bc45756fb-278vp" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.722259 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-685599669b-nv6cc" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.723108 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/placement-7b8cd49c48-k928p" podUID="feedfa19-f182-4f7f-8b50-3d16c11e3510" containerName="placement-api" probeResult="failure" output="Get \"https://10.217.1.19:8778/\": read tcp 10.217.0.2:55534->10.217.1.19:8778: read: connection reset by peer" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.723196 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/placement-7b8cd49c48-k928p" podUID="feedfa19-f182-4f7f-8b50-3d16c11e3510" containerName="placement-log" probeResult="failure" output="Get \"https://10.217.1.19:8778/\": read tcp 10.217.0.2:55544->10.217.1.19:8778: read: connection reset by peer" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.726546 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-66bfc8789d-nsc64"] Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.736597 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-685599669b-nv6cc"] Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.752194 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-nb-0"] Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.753067 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ovsdbserver-nb-0" podUID="5f087f7c-913a-4ab8-b905-5caa84469c77" containerName="openstack-network-exporter" containerID="cri-o://4dd20571883b253e3f573e46ad0ecb50522ba2e589eece64ceebece4a1df813e" gracePeriod=300 Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.779146 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b6ba5f2a-6486-4a84-bf85-23c00e907701-scripts\") pod \"placement-976c48458-wzswq\" (UID: \"b6ba5f2a-6486-4a84-bf85-23c00e907701\") " pod="openstack-kuttl-tests/placement-976c48458-wzswq" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.779210 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/33f7229d-bcae-434a-805e-530801e79b20-credential-keys\") pod \"keystone-685599669b-nv6cc\" (UID: \"33f7229d-bcae-434a-805e-530801e79b20\") " pod="openstack-kuttl-tests/keystone-685599669b-nv6cc" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.779284 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/33f7229d-bcae-434a-805e-530801e79b20-fernet-keys\") pod \"keystone-685599669b-nv6cc\" (UID: \"33f7229d-bcae-434a-805e-530801e79b20\") " pod="openstack-kuttl-tests/keystone-685599669b-nv6cc" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.779349 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xxgml\" (UniqueName: \"kubernetes.io/projected/33f7229d-bcae-434a-805e-530801e79b20-kube-api-access-xxgml\") pod \"keystone-685599669b-nv6cc\" (UID: \"33f7229d-bcae-434a-805e-530801e79b20\") " pod="openstack-kuttl-tests/keystone-685599669b-nv6cc" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.779377 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d9whj\" (UniqueName: \"kubernetes.io/projected/b6ba5f2a-6486-4a84-bf85-23c00e907701-kube-api-access-d9whj\") pod \"placement-976c48458-wzswq\" (UID: \"b6ba5f2a-6486-4a84-bf85-23c00e907701\") " pod="openstack-kuttl-tests/placement-976c48458-wzswq" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.779399 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b6ba5f2a-6486-4a84-bf85-23c00e907701-internal-tls-certs\") pod \"placement-976c48458-wzswq\" (UID: \"b6ba5f2a-6486-4a84-bf85-23c00e907701\") " pod="openstack-kuttl-tests/placement-976c48458-wzswq" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.779423 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/596f2da7-db14-44e3-8c93-5809b27e0cc6-combined-ca-bundle\") pod \"neutron-66bfc8789d-nsc64\" (UID: \"596f2da7-db14-44e3-8c93-5809b27e0cc6\") " pod="openstack-kuttl-tests/neutron-66bfc8789d-nsc64" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.779456 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b6ba5f2a-6486-4a84-bf85-23c00e907701-combined-ca-bundle\") pod \"placement-976c48458-wzswq\" (UID: \"b6ba5f2a-6486-4a84-bf85-23c00e907701\") " pod="openstack-kuttl-tests/placement-976c48458-wzswq" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.779477 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/596f2da7-db14-44e3-8c93-5809b27e0cc6-ovndb-tls-certs\") pod \"neutron-66bfc8789d-nsc64\" (UID: \"596f2da7-db14-44e3-8c93-5809b27e0cc6\") " pod="openstack-kuttl-tests/neutron-66bfc8789d-nsc64" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.779496 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/33f7229d-bcae-434a-805e-530801e79b20-scripts\") pod \"keystone-685599669b-nv6cc\" (UID: \"33f7229d-bcae-434a-805e-530801e79b20\") " pod="openstack-kuttl-tests/keystone-685599669b-nv6cc" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.779515 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/33f7229d-bcae-434a-805e-530801e79b20-combined-ca-bundle\") pod \"keystone-685599669b-nv6cc\" (UID: \"33f7229d-bcae-434a-805e-530801e79b20\") " pod="openstack-kuttl-tests/keystone-685599669b-nv6cc" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.779532 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/596f2da7-db14-44e3-8c93-5809b27e0cc6-config\") pod \"neutron-66bfc8789d-nsc64\" (UID: \"596f2da7-db14-44e3-8c93-5809b27e0cc6\") " pod="openstack-kuttl-tests/neutron-66bfc8789d-nsc64" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.779576 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/596f2da7-db14-44e3-8c93-5809b27e0cc6-public-tls-certs\") pod \"neutron-66bfc8789d-nsc64\" (UID: \"596f2da7-db14-44e3-8c93-5809b27e0cc6\") " pod="openstack-kuttl-tests/neutron-66bfc8789d-nsc64" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.779601 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b6ba5f2a-6486-4a84-bf85-23c00e907701-logs\") pod \"placement-976c48458-wzswq\" (UID: \"b6ba5f2a-6486-4a84-bf85-23c00e907701\") " pod="openstack-kuttl-tests/placement-976c48458-wzswq" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.779630 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b6ba5f2a-6486-4a84-bf85-23c00e907701-config-data\") pod \"placement-976c48458-wzswq\" (UID: \"b6ba5f2a-6486-4a84-bf85-23c00e907701\") " pod="openstack-kuttl-tests/placement-976c48458-wzswq" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.779672 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/33f7229d-bcae-434a-805e-530801e79b20-public-tls-certs\") pod \"keystone-685599669b-nv6cc\" (UID: \"33f7229d-bcae-434a-805e-530801e79b20\") " pod="openstack-kuttl-tests/keystone-685599669b-nv6cc" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.779691 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/596f2da7-db14-44e3-8c93-5809b27e0cc6-httpd-config\") pod \"neutron-66bfc8789d-nsc64\" (UID: \"596f2da7-db14-44e3-8c93-5809b27e0cc6\") " pod="openstack-kuttl-tests/neutron-66bfc8789d-nsc64" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.779721 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/596f2da7-db14-44e3-8c93-5809b27e0cc6-internal-tls-certs\") pod \"neutron-66bfc8789d-nsc64\" (UID: \"596f2da7-db14-44e3-8c93-5809b27e0cc6\") " pod="openstack-kuttl-tests/neutron-66bfc8789d-nsc64" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.779743 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b6ba5f2a-6486-4a84-bf85-23c00e907701-public-tls-certs\") pod \"placement-976c48458-wzswq\" (UID: \"b6ba5f2a-6486-4a84-bf85-23c00e907701\") " pod="openstack-kuttl-tests/placement-976c48458-wzswq" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.779768 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/33f7229d-bcae-434a-805e-530801e79b20-internal-tls-certs\") pod \"keystone-685599669b-nv6cc\" (UID: \"33f7229d-bcae-434a-805e-530801e79b20\") " pod="openstack-kuttl-tests/keystone-685599669b-nv6cc" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.779787 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5cffh\" (UniqueName: \"kubernetes.io/projected/596f2da7-db14-44e3-8c93-5809b27e0cc6-kube-api-access-5cffh\") pod \"neutron-66bfc8789d-nsc64\" (UID: \"596f2da7-db14-44e3-8c93-5809b27e0cc6\") " pod="openstack-kuttl-tests/neutron-66bfc8789d-nsc64" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.779843 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/33f7229d-bcae-434a-805e-530801e79b20-config-data\") pod \"keystone-685599669b-nv6cc\" (UID: \"33f7229d-bcae-434a-805e-530801e79b20\") " pod="openstack-kuttl-tests/keystone-685599669b-nv6cc" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.831111 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-7bc45756fb-278vp" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.885741 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b6ba5f2a-6486-4a84-bf85-23c00e907701-internal-tls-certs\") pod \"placement-976c48458-wzswq\" (UID: \"b6ba5f2a-6486-4a84-bf85-23c00e907701\") " pod="openstack-kuttl-tests/placement-976c48458-wzswq" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.885810 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/596f2da7-db14-44e3-8c93-5809b27e0cc6-combined-ca-bundle\") pod \"neutron-66bfc8789d-nsc64\" (UID: \"596f2da7-db14-44e3-8c93-5809b27e0cc6\") " pod="openstack-kuttl-tests/neutron-66bfc8789d-nsc64" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.885878 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b6ba5f2a-6486-4a84-bf85-23c00e907701-combined-ca-bundle\") pod \"placement-976c48458-wzswq\" (UID: \"b6ba5f2a-6486-4a84-bf85-23c00e907701\") " pod="openstack-kuttl-tests/placement-976c48458-wzswq" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.885902 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/596f2da7-db14-44e3-8c93-5809b27e0cc6-ovndb-tls-certs\") pod \"neutron-66bfc8789d-nsc64\" (UID: \"596f2da7-db14-44e3-8c93-5809b27e0cc6\") " pod="openstack-kuttl-tests/neutron-66bfc8789d-nsc64" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.885950 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/33f7229d-bcae-434a-805e-530801e79b20-scripts\") pod \"keystone-685599669b-nv6cc\" (UID: \"33f7229d-bcae-434a-805e-530801e79b20\") " pod="openstack-kuttl-tests/keystone-685599669b-nv6cc" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.885975 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/33f7229d-bcae-434a-805e-530801e79b20-combined-ca-bundle\") pod \"keystone-685599669b-nv6cc\" (UID: \"33f7229d-bcae-434a-805e-530801e79b20\") " pod="openstack-kuttl-tests/keystone-685599669b-nv6cc" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.885994 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/596f2da7-db14-44e3-8c93-5809b27e0cc6-config\") pod \"neutron-66bfc8789d-nsc64\" (UID: \"596f2da7-db14-44e3-8c93-5809b27e0cc6\") " pod="openstack-kuttl-tests/neutron-66bfc8789d-nsc64" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.886064 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/596f2da7-db14-44e3-8c93-5809b27e0cc6-public-tls-certs\") pod \"neutron-66bfc8789d-nsc64\" (UID: \"596f2da7-db14-44e3-8c93-5809b27e0cc6\") " pod="openstack-kuttl-tests/neutron-66bfc8789d-nsc64" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.886110 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b6ba5f2a-6486-4a84-bf85-23c00e907701-logs\") pod \"placement-976c48458-wzswq\" (UID: \"b6ba5f2a-6486-4a84-bf85-23c00e907701\") " pod="openstack-kuttl-tests/placement-976c48458-wzswq" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.886141 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b6ba5f2a-6486-4a84-bf85-23c00e907701-config-data\") pod \"placement-976c48458-wzswq\" (UID: \"b6ba5f2a-6486-4a84-bf85-23c00e907701\") " pod="openstack-kuttl-tests/placement-976c48458-wzswq" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.886196 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/33f7229d-bcae-434a-805e-530801e79b20-public-tls-certs\") pod \"keystone-685599669b-nv6cc\" (UID: \"33f7229d-bcae-434a-805e-530801e79b20\") " pod="openstack-kuttl-tests/keystone-685599669b-nv6cc" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.886216 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/596f2da7-db14-44e3-8c93-5809b27e0cc6-httpd-config\") pod \"neutron-66bfc8789d-nsc64\" (UID: \"596f2da7-db14-44e3-8c93-5809b27e0cc6\") " pod="openstack-kuttl-tests/neutron-66bfc8789d-nsc64" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.886236 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/596f2da7-db14-44e3-8c93-5809b27e0cc6-internal-tls-certs\") pod \"neutron-66bfc8789d-nsc64\" (UID: \"596f2da7-db14-44e3-8c93-5809b27e0cc6\") " pod="openstack-kuttl-tests/neutron-66bfc8789d-nsc64" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.886275 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b6ba5f2a-6486-4a84-bf85-23c00e907701-public-tls-certs\") pod \"placement-976c48458-wzswq\" (UID: \"b6ba5f2a-6486-4a84-bf85-23c00e907701\") " pod="openstack-kuttl-tests/placement-976c48458-wzswq" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.886302 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/33f7229d-bcae-434a-805e-530801e79b20-internal-tls-certs\") pod \"keystone-685599669b-nv6cc\" (UID: \"33f7229d-bcae-434a-805e-530801e79b20\") " pod="openstack-kuttl-tests/keystone-685599669b-nv6cc" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.886321 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5cffh\" (UniqueName: \"kubernetes.io/projected/596f2da7-db14-44e3-8c93-5809b27e0cc6-kube-api-access-5cffh\") pod \"neutron-66bfc8789d-nsc64\" (UID: \"596f2da7-db14-44e3-8c93-5809b27e0cc6\") " pod="openstack-kuttl-tests/neutron-66bfc8789d-nsc64" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.886538 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/33f7229d-bcae-434a-805e-530801e79b20-config-data\") pod \"keystone-685599669b-nv6cc\" (UID: \"33f7229d-bcae-434a-805e-530801e79b20\") " pod="openstack-kuttl-tests/keystone-685599669b-nv6cc" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.886575 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b6ba5f2a-6486-4a84-bf85-23c00e907701-scripts\") pod \"placement-976c48458-wzswq\" (UID: \"b6ba5f2a-6486-4a84-bf85-23c00e907701\") " pod="openstack-kuttl-tests/placement-976c48458-wzswq" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.886621 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/33f7229d-bcae-434a-805e-530801e79b20-credential-keys\") pod \"keystone-685599669b-nv6cc\" (UID: \"33f7229d-bcae-434a-805e-530801e79b20\") " pod="openstack-kuttl-tests/keystone-685599669b-nv6cc" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.886647 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/33f7229d-bcae-434a-805e-530801e79b20-fernet-keys\") pod \"keystone-685599669b-nv6cc\" (UID: \"33f7229d-bcae-434a-805e-530801e79b20\") " pod="openstack-kuttl-tests/keystone-685599669b-nv6cc" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.886699 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xxgml\" (UniqueName: \"kubernetes.io/projected/33f7229d-bcae-434a-805e-530801e79b20-kube-api-access-xxgml\") pod \"keystone-685599669b-nv6cc\" (UID: \"33f7229d-bcae-434a-805e-530801e79b20\") " pod="openstack-kuttl-tests/keystone-685599669b-nv6cc" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.886718 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d9whj\" (UniqueName: \"kubernetes.io/projected/b6ba5f2a-6486-4a84-bf85-23c00e907701-kube-api-access-d9whj\") pod \"placement-976c48458-wzswq\" (UID: \"b6ba5f2a-6486-4a84-bf85-23c00e907701\") " pod="openstack-kuttl-tests/placement-976c48458-wzswq" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.891890 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/596f2da7-db14-44e3-8c93-5809b27e0cc6-public-tls-certs\") pod \"neutron-66bfc8789d-nsc64\" (UID: \"596f2da7-db14-44e3-8c93-5809b27e0cc6\") " pod="openstack-kuttl-tests/neutron-66bfc8789d-nsc64" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.892215 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b6ba5f2a-6486-4a84-bf85-23c00e907701-logs\") pod \"placement-976c48458-wzswq\" (UID: \"b6ba5f2a-6486-4a84-bf85-23c00e907701\") " pod="openstack-kuttl-tests/placement-976c48458-wzswq" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.892992 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b6ba5f2a-6486-4a84-bf85-23c00e907701-internal-tls-certs\") pod \"placement-976c48458-wzswq\" (UID: \"b6ba5f2a-6486-4a84-bf85-23c00e907701\") " pod="openstack-kuttl-tests/placement-976c48458-wzswq" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.897599 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/33f7229d-bcae-434a-805e-530801e79b20-combined-ca-bundle\") pod \"keystone-685599669b-nv6cc\" (UID: \"33f7229d-bcae-434a-805e-530801e79b20\") " pod="openstack-kuttl-tests/keystone-685599669b-nv6cc" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.910013 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ovsdbserver-nb-0" podUID="5f087f7c-913a-4ab8-b905-5caa84469c77" containerName="ovsdbserver-nb" containerID="cri-o://1abb2f52a1084fb284622dfe078fa9cc2613944319be12588b9b290276f35a83" gracePeriod=300 Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.918008 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/596f2da7-db14-44e3-8c93-5809b27e0cc6-config\") pod \"neutron-66bfc8789d-nsc64\" (UID: \"596f2da7-db14-44e3-8c93-5809b27e0cc6\") " pod="openstack-kuttl-tests/neutron-66bfc8789d-nsc64" Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.942713 4558 generic.go:334] "Generic (PLEG): container finished" podID="5fb29028-042a-4108-a63d-a6cd215a6c31" containerID="aa17c31d66a9fe9d9ba2ad19998790f47fa790295400fa6c7baccd8f6d22aa0e" exitCode=143 Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.942780 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"5fb29028-042a-4108-a63d-a6cd215a6c31","Type":"ContainerDied","Data":"aa17c31d66a9fe9d9ba2ad19998790f47fa790295400fa6c7baccd8f6d22aa0e"} Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.962776 4558 generic.go:334] "Generic (PLEG): container finished" podID="c62cee5b-4dd8-4a07-995c-e1d0530d695b" containerID="5b28187d77dd8f75fd99f025ba3fddc2ac465afeab96893a305bda76c6c1c56c" exitCode=143 Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.962850 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"c62cee5b-4dd8-4a07-995c-e1d0530d695b","Type":"ContainerDied","Data":"5b28187d77dd8f75fd99f025ba3fddc2ac465afeab96893a305bda76c6c1c56c"} Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.968474 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/openstack-galera-0" podUID="25fe26b1-d308-452c-b15f-b1b272de6869" containerName="galera" containerID="cri-o://5c3fbeabe3ae0665acc4caf4f15bfaffd0a0fffe254192c44ae1df936c59f917" gracePeriod=30 Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.975571 4558 generic.go:334] "Generic (PLEG): container finished" podID="5f087f7c-913a-4ab8-b905-5caa84469c77" containerID="4dd20571883b253e3f573e46ad0ecb50522ba2e589eece64ceebece4a1df813e" exitCode=2 Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.975647 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-nb-0" event={"ID":"5f087f7c-913a-4ab8-b905-5caa84469c77","Type":"ContainerDied","Data":"4dd20571883b253e3f573e46ad0ecb50522ba2e589eece64ceebece4a1df813e"} Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.980187 4558 generic.go:334] "Generic (PLEG): container finished" podID="8877ab44-638b-4880-97fb-305726a5c1a6" containerID="99d260241a6e2248384349724b544d3292e8cbd5470d0de4208392622b91d851" exitCode=0 Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.980288 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-5676986846-92lbc" event={"ID":"8877ab44-638b-4880-97fb-305726a5c1a6","Type":"ContainerDied","Data":"99d260241a6e2248384349724b544d3292e8cbd5470d0de4208392622b91d851"} Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.982121 4558 generic.go:334] "Generic (PLEG): container finished" podID="feedfa19-f182-4f7f-8b50-3d16c11e3510" containerID="4a06fdd89fd78aff5dd0753c417b307f7a680d4b1887883dc3eccee2ed84c9ec" exitCode=0 Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.982138 4558 generic.go:334] "Generic (PLEG): container finished" podID="feedfa19-f182-4f7f-8b50-3d16c11e3510" containerID="2238e5e4c9121a6f35ac99b7eefaed26a7b72dd8f5d91d2775ab73bb36849ee2" exitCode=143 Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.982200 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-7b8cd49c48-k928p" event={"ID":"feedfa19-f182-4f7f-8b50-3d16c11e3510","Type":"ContainerDied","Data":"4a06fdd89fd78aff5dd0753c417b307f7a680d4b1887883dc3eccee2ed84c9ec"} Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.982217 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-7b8cd49c48-k928p" event={"ID":"feedfa19-f182-4f7f-8b50-3d16c11e3510","Type":"ContainerDied","Data":"2238e5e4c9121a6f35ac99b7eefaed26a7b72dd8f5d91d2775ab73bb36849ee2"} Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.995232 4558 generic.go:334] "Generic (PLEG): container finished" podID="8738e339-fd1b-4122-899c-7b9521688aba" containerID="49eac06a5bbf59d10a57d3ba182a98399ea7f8d86e783046aa38fe5883748c5e" exitCode=143 Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.995305 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-5796d4c4d8-2xz7t" event={"ID":"8738e339-fd1b-4122-899c-7b9521688aba","Type":"ContainerDied","Data":"49eac06a5bbf59d10a57d3ba182a98399ea7f8d86e783046aa38fe5883748c5e"} Jan 20 17:43:23 crc kubenswrapper[4558]: I0120 17:43:23.996597 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/33f7229d-bcae-434a-805e-530801e79b20-scripts\") pod \"keystone-685599669b-nv6cc\" (UID: \"33f7229d-bcae-434a-805e-530801e79b20\") " pod="openstack-kuttl-tests/keystone-685599669b-nv6cc" Jan 20 17:43:24 crc kubenswrapper[4558]: I0120 17:43:24.000890 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/596f2da7-db14-44e3-8c93-5809b27e0cc6-internal-tls-certs\") pod \"neutron-66bfc8789d-nsc64\" (UID: \"596f2da7-db14-44e3-8c93-5809b27e0cc6\") " pod="openstack-kuttl-tests/neutron-66bfc8789d-nsc64" Jan 20 17:43:24 crc kubenswrapper[4558]: I0120 17:43:24.001902 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/33f7229d-bcae-434a-805e-530801e79b20-credential-keys\") pod \"keystone-685599669b-nv6cc\" (UID: \"33f7229d-bcae-434a-805e-530801e79b20\") " pod="openstack-kuttl-tests/keystone-685599669b-nv6cc" Jan 20 17:43:24 crc kubenswrapper[4558]: I0120 17:43:24.001916 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/33f7229d-bcae-434a-805e-530801e79b20-fernet-keys\") pod \"keystone-685599669b-nv6cc\" (UID: \"33f7229d-bcae-434a-805e-530801e79b20\") " pod="openstack-kuttl-tests/keystone-685599669b-nv6cc" Jan 20 17:43:24 crc kubenswrapper[4558]: I0120 17:43:24.002204 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b6ba5f2a-6486-4a84-bf85-23c00e907701-combined-ca-bundle\") pod \"placement-976c48458-wzswq\" (UID: \"b6ba5f2a-6486-4a84-bf85-23c00e907701\") " pod="openstack-kuttl-tests/placement-976c48458-wzswq" Jan 20 17:43:24 crc kubenswrapper[4558]: I0120 17:43:24.002329 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/33f7229d-bcae-434a-805e-530801e79b20-internal-tls-certs\") pod \"keystone-685599669b-nv6cc\" (UID: \"33f7229d-bcae-434a-805e-530801e79b20\") " pod="openstack-kuttl-tests/keystone-685599669b-nv6cc" Jan 20 17:43:24 crc kubenswrapper[4558]: I0120 17:43:24.002486 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b6ba5f2a-6486-4a84-bf85-23c00e907701-config-data\") pod \"placement-976c48458-wzswq\" (UID: \"b6ba5f2a-6486-4a84-bf85-23c00e907701\") " pod="openstack-kuttl-tests/placement-976c48458-wzswq" Jan 20 17:43:24 crc kubenswrapper[4558]: I0120 17:43:24.002531 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/596f2da7-db14-44e3-8c93-5809b27e0cc6-ovndb-tls-certs\") pod \"neutron-66bfc8789d-nsc64\" (UID: \"596f2da7-db14-44e3-8c93-5809b27e0cc6\") " pod="openstack-kuttl-tests/neutron-66bfc8789d-nsc64" Jan 20 17:43:24 crc kubenswrapper[4558]: I0120 17:43:24.002861 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b6ba5f2a-6486-4a84-bf85-23c00e907701-public-tls-certs\") pod \"placement-976c48458-wzswq\" (UID: \"b6ba5f2a-6486-4a84-bf85-23c00e907701\") " pod="openstack-kuttl-tests/placement-976c48458-wzswq" Jan 20 17:43:24 crc kubenswrapper[4558]: I0120 17:43:24.003274 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/33f7229d-bcae-434a-805e-530801e79b20-config-data\") pod \"keystone-685599669b-nv6cc\" (UID: \"33f7229d-bcae-434a-805e-530801e79b20\") " pod="openstack-kuttl-tests/keystone-685599669b-nv6cc" Jan 20 17:43:24 crc kubenswrapper[4558]: I0120 17:43:24.003355 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/596f2da7-db14-44e3-8c93-5809b27e0cc6-combined-ca-bundle\") pod \"neutron-66bfc8789d-nsc64\" (UID: \"596f2da7-db14-44e3-8c93-5809b27e0cc6\") " pod="openstack-kuttl-tests/neutron-66bfc8789d-nsc64" Jan 20 17:43:24 crc kubenswrapper[4558]: I0120 17:43:24.003984 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b6ba5f2a-6486-4a84-bf85-23c00e907701-scripts\") pod \"placement-976c48458-wzswq\" (UID: \"b6ba5f2a-6486-4a84-bf85-23c00e907701\") " pod="openstack-kuttl-tests/placement-976c48458-wzswq" Jan 20 17:43:24 crc kubenswrapper[4558]: I0120 17:43:24.003993 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovsdbserver-sb-0_0394e10b-3811-4eef-a9ef-a785e1574649/ovsdbserver-sb/0.log" Jan 20 17:43:24 crc kubenswrapper[4558]: I0120 17:43:24.004057 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/596f2da7-db14-44e3-8c93-5809b27e0cc6-httpd-config\") pod \"neutron-66bfc8789d-nsc64\" (UID: \"596f2da7-db14-44e3-8c93-5809b27e0cc6\") " pod="openstack-kuttl-tests/neutron-66bfc8789d-nsc64" Jan 20 17:43:24 crc kubenswrapper[4558]: I0120 17:43:24.004081 4558 generic.go:334] "Generic (PLEG): container finished" podID="0394e10b-3811-4eef-a9ef-a785e1574649" containerID="45124db692168b507b34896779f420550426a3011e2b00b8912ca2208f58799e" exitCode=2 Jan 20 17:43:24 crc kubenswrapper[4558]: I0120 17:43:24.004099 4558 generic.go:334] "Generic (PLEG): container finished" podID="0394e10b-3811-4eef-a9ef-a785e1574649" containerID="baba51653eb144a4334b8651e3e9970d529abaea40101e2e03249e068d4b3b9b" exitCode=143 Jan 20 17:43:24 crc kubenswrapper[4558]: I0120 17:43:24.004105 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-sb-0" event={"ID":"0394e10b-3811-4eef-a9ef-a785e1574649","Type":"ContainerDied","Data":"45124db692168b507b34896779f420550426a3011e2b00b8912ca2208f58799e"} Jan 20 17:43:24 crc kubenswrapper[4558]: I0120 17:43:24.004144 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-sb-0" event={"ID":"0394e10b-3811-4eef-a9ef-a785e1574649","Type":"ContainerDied","Data":"baba51653eb144a4334b8651e3e9970d529abaea40101e2e03249e068d4b3b9b"} Jan 20 17:43:24 crc kubenswrapper[4558]: I0120 17:43:24.004838 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/33f7229d-bcae-434a-805e-530801e79b20-public-tls-certs\") pod \"keystone-685599669b-nv6cc\" (UID: \"33f7229d-bcae-434a-805e-530801e79b20\") " pod="openstack-kuttl-tests/keystone-685599669b-nv6cc" Jan 20 17:43:24 crc kubenswrapper[4558]: I0120 17:43:24.008393 4558 generic.go:334] "Generic (PLEG): container finished" podID="0a246013-4fe7-4d92-a517-414b1c235db0" containerID="3bca3c440d169db0b20179eec25960ffaa1be1f116d15347d51dd6ac49e4557f" exitCode=143 Jan 20 17:43:24 crc kubenswrapper[4558]: I0120 17:43:24.008471 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"0a246013-4fe7-4d92-a517-414b1c235db0","Type":"ContainerDied","Data":"3bca3c440d169db0b20179eec25960ffaa1be1f116d15347d51dd6ac49e4557f"} Jan 20 17:43:24 crc kubenswrapper[4558]: I0120 17:43:24.009467 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5cffh\" (UniqueName: \"kubernetes.io/projected/596f2da7-db14-44e3-8c93-5809b27e0cc6-kube-api-access-5cffh\") pod \"neutron-66bfc8789d-nsc64\" (UID: \"596f2da7-db14-44e3-8c93-5809b27e0cc6\") " pod="openstack-kuttl-tests/neutron-66bfc8789d-nsc64" Jan 20 17:43:24 crc kubenswrapper[4558]: I0120 17:43:24.009533 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xxgml\" (UniqueName: \"kubernetes.io/projected/33f7229d-bcae-434a-805e-530801e79b20-kube-api-access-xxgml\") pod \"keystone-685599669b-nv6cc\" (UID: \"33f7229d-bcae-434a-805e-530801e79b20\") " pod="openstack-kuttl-tests/keystone-685599669b-nv6cc" Jan 20 17:43:24 crc kubenswrapper[4558]: I0120 17:43:24.009693 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d9whj\" (UniqueName: \"kubernetes.io/projected/b6ba5f2a-6486-4a84-bf85-23c00e907701-kube-api-access-d9whj\") pod \"placement-976c48458-wzswq\" (UID: \"b6ba5f2a-6486-4a84-bf85-23c00e907701\") " pod="openstack-kuttl-tests/placement-976c48458-wzswq" Jan 20 17:43:24 crc kubenswrapper[4558]: I0120 17:43:24.010506 4558 generic.go:334] "Generic (PLEG): container finished" podID="9db31a7a-b4bb-4c3b-99f4-b9855af99342" containerID="a0ad02a3d302f3830db2cc15f3d92955d193e02b01825c96bcf3760f81f92872" exitCode=2 Jan 20 17:43:24 crc kubenswrapper[4558]: I0120 17:43:24.010562 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-northd-0" event={"ID":"9db31a7a-b4bb-4c3b-99f4-b9855af99342","Type":"ContainerDied","Data":"a0ad02a3d302f3830db2cc15f3d92955d193e02b01825c96bcf3760f81f92872"} Jan 20 17:43:24 crc kubenswrapper[4558]: E0120 17:43:24.040613 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="5c3fbeabe3ae0665acc4caf4f15bfaffd0a0fffe254192c44ae1df936c59f917" cmd=["/bin/bash","/var/lib/operator-scripts/mysql_probe.sh","readiness"] Jan 20 17:43:24 crc kubenswrapper[4558]: E0120 17:43:24.042534 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="5c3fbeabe3ae0665acc4caf4f15bfaffd0a0fffe254192c44ae1df936c59f917" cmd=["/bin/bash","/var/lib/operator-scripts/mysql_probe.sh","readiness"] Jan 20 17:43:24 crc kubenswrapper[4558]: I0120 17:43:24.042929 4558 generic.go:334] "Generic (PLEG): container finished" podID="a18f9da2-9c64-4047-ad02-206ac9c8aa61" containerID="b1459a5f2896c316432b666d6a44cd9116c1172c341136db96a48899a0dd4884" exitCode=143 Jan 20 17:43:24 crc kubenswrapper[4558]: I0120 17:43:24.042997 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:43:24 crc kubenswrapper[4558]: I0120 17:43:24.043462 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"a18f9da2-9c64-4047-ad02-206ac9c8aa61","Type":"ContainerDied","Data":"b1459a5f2896c316432b666d6a44cd9116c1172c341136db96a48899a0dd4884"} Jan 20 17:43:24 crc kubenswrapper[4558]: E0120 17:43:24.044140 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="5c3fbeabe3ae0665acc4caf4f15bfaffd0a0fffe254192c44ae1df936c59f917" cmd=["/bin/bash","/var/lib/operator-scripts/mysql_probe.sh","readiness"] Jan 20 17:43:24 crc kubenswrapper[4558]: E0120 17:43:24.044185 4558 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack-kuttl-tests/openstack-galera-0" podUID="25fe26b1-d308-452c-b15f-b1b272de6869" containerName="galera" Jan 20 17:43:24 crc kubenswrapper[4558]: I0120 17:43:24.076457 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-0"] Jan 20 17:43:24 crc kubenswrapper[4558]: I0120 17:43:24.076674 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-cell1-conductor-0" podUID="a8df2ac8-5e44-4c2d-a1fb-11e7cdeda894" containerName="nova-cell1-conductor-conductor" containerID="cri-o://5caa9c83e57be3928128e4ffcdc30d264c7d25b5dfa3de5488249ee33e587529" gracePeriod=30 Jan 20 17:43:24 crc kubenswrapper[4558]: I0120 17:43:24.162040 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/openstack-cell1-galera-0" podUID="f68fbac2-1677-4664-a344-77a41044ea2a" containerName="galera" containerID="cri-o://a154b825c6001b834fad5ef3b1acf5a64fc98844395614a8310c4485a3d01a62" gracePeriod=30 Jan 20 17:43:24 crc kubenswrapper[4558]: I0120 17:43:24.195364 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-0"] Jan 20 17:43:24 crc kubenswrapper[4558]: I0120 17:43:24.195652 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-cell0-conductor-0" podUID="365754d1-535b-450b-a80e-1e7402cb28f8" containerName="nova-cell0-conductor-conductor" containerID="cri-o://9c38558131507676fb881bd62fab6cc179a513c6227f85a615c9705711240cbc" gracePeriod=30 Jan 20 17:43:24 crc kubenswrapper[4558]: I0120 17:43:24.218556 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-keystone-listener-57496b565d-kwb5f"] Jan 20 17:43:24 crc kubenswrapper[4558]: I0120 17:43:24.235409 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:43:24 crc kubenswrapper[4558]: I0120 17:43:24.592590 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="70cf1941-8f7a-4328-9338-e5d7739173ac" path="/var/lib/kubelet/pods/70cf1941-8f7a-4328-9338-e5d7739173ac/volumes" Jan 20 17:43:24 crc kubenswrapper[4558]: I0120 17:43:24.799206 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-worker-7975b7ff57-c5czg"] Jan 20 17:43:24 crc kubenswrapper[4558]: I0120 17:43:24.826466 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-api-7bc45756fb-278vp"] Jan 20 17:43:24 crc kubenswrapper[4558]: W0120 17:43:24.846505 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod432ec648_18a7_416a_85cc_409a30976a67.slice/crio-e5df2a06a2815159e994faa556cca8f0fe0201d5baebedaccc83bd16ca88a66a WatchSource:0}: Error finding container e5df2a06a2815159e994faa556cca8f0fe0201d5baebedaccc83bd16ca88a66a: Status 404 returned error can't find the container with id e5df2a06a2815159e994faa556cca8f0fe0201d5baebedaccc83bd16ca88a66a Jan 20 17:43:25 crc kubenswrapper[4558]: I0120 17:43:25.054981 4558 generic.go:334] "Generic (PLEG): container finished" podID="8738e339-fd1b-4122-899c-7b9521688aba" containerID="1ef3a181c7498e05935b57f64f9e58a17d10768de064a519dd385d5add3525db" exitCode=0 Jan 20 17:43:25 crc kubenswrapper[4558]: I0120 17:43:25.055057 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-5796d4c4d8-2xz7t" event={"ID":"8738e339-fd1b-4122-899c-7b9521688aba","Type":"ContainerDied","Data":"1ef3a181c7498e05935b57f64f9e58a17d10768de064a519dd385d5add3525db"} Jan 20 17:43:25 crc kubenswrapper[4558]: I0120 17:43:25.055087 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-5796d4c4d8-2xz7t" event={"ID":"8738e339-fd1b-4122-899c-7b9521688aba","Type":"ContainerDied","Data":"ab8da78490180f79fecc1c88007f9f076d9c4858abe2a0bc5d26311929546c92"} Jan 20 17:43:25 crc kubenswrapper[4558]: I0120 17:43:25.055099 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ab8da78490180f79fecc1c88007f9f076d9c4858abe2a0bc5d26311929546c92" Jan 20 17:43:25 crc kubenswrapper[4558]: I0120 17:43:25.057561 4558 generic.go:334] "Generic (PLEG): container finished" podID="a5db7bd5-e0c6-4645-ab63-3a2a21fdc854" containerID="a50b4edbabca8fd422d0afd87910c0ef1f309e8061f08ee60f8118612b4fe104" exitCode=0 Jan 20 17:43:25 crc kubenswrapper[4558]: I0120 17:43:25.057611 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/memcached-0" event={"ID":"a5db7bd5-e0c6-4645-ab63-3a2a21fdc854","Type":"ContainerDied","Data":"a50b4edbabca8fd422d0afd87910c0ef1f309e8061f08ee60f8118612b4fe104"} Jan 20 17:43:25 crc kubenswrapper[4558]: I0120 17:43:25.057632 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/memcached-0" event={"ID":"a5db7bd5-e0c6-4645-ab63-3a2a21fdc854","Type":"ContainerDied","Data":"61933e699ad2f2864803ebb3f142ff1880d56b79c49386c63c0295f41e90b399"} Jan 20 17:43:25 crc kubenswrapper[4558]: I0120 17:43:25.057643 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="61933e699ad2f2864803ebb3f142ff1880d56b79c49386c63c0295f41e90b399" Jan 20 17:43:25 crc kubenswrapper[4558]: I0120 17:43:25.057728 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/openstack-cell1-galera-0" podUID="f68fbac2-1677-4664-a344-77a41044ea2a" containerName="galera" probeResult="failure" output="" Jan 20 17:43:25 crc kubenswrapper[4558]: I0120 17:43:25.068476 4558 generic.go:334] "Generic (PLEG): container finished" podID="25cfe04f-2194-405f-a9d7-82181e8ac22a" containerID="9aec7229608a50e4ca2cab47655081015208900db81e7a5801eb3c70cf100002" exitCode=0 Jan 20 17:43:25 crc kubenswrapper[4558]: I0120 17:43:25.068535 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"25cfe04f-2194-405f-a9d7-82181e8ac22a","Type":"ContainerDied","Data":"9aec7229608a50e4ca2cab47655081015208900db81e7a5801eb3c70cf100002"} Jan 20 17:43:25 crc kubenswrapper[4558]: I0120 17:43:25.070844 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-7bc45756fb-278vp" event={"ID":"432ec648-18a7-416a-85cc-409a30976a67","Type":"ContainerStarted","Data":"e5df2a06a2815159e994faa556cca8f0fe0201d5baebedaccc83bd16ca88a66a"} Jan 20 17:43:25 crc kubenswrapper[4558]: I0120 17:43:25.072325 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-worker-7975b7ff57-c5czg" event={"ID":"1a1aeb0f-ba20-4c24-a8d3-f3f2765328c2","Type":"ContainerStarted","Data":"fda6ea175768811455e1d8110992e145c45cdc20ae2763c7a7fc819b10fdc9aa"} Jan 20 17:43:25 crc kubenswrapper[4558]: I0120 17:43:25.075232 4558 generic.go:334] "Generic (PLEG): container finished" podID="ea5dfbd0-37c4-46a1-afad-77e85e35d7fb" containerID="42c29ec80666322d27f85a6e6798fa5fcabfb0b124569ce2fbc426f02c3bf8fe" exitCode=0 Jan 20 17:43:25 crc kubenswrapper[4558]: I0120 17:43:25.075344 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"ea5dfbd0-37c4-46a1-afad-77e85e35d7fb","Type":"ContainerDied","Data":"42c29ec80666322d27f85a6e6798fa5fcabfb0b124569ce2fbc426f02c3bf8fe"} Jan 20 17:43:25 crc kubenswrapper[4558]: I0120 17:43:25.077465 4558 generic.go:334] "Generic (PLEG): container finished" podID="7935bdc8-1434-418a-a3ad-b165ee8be23e" containerID="d6a76d69efdb8d9de2921accc3eae11c4dd079ceb576dcf301178d78b9a86d25" exitCode=0 Jan 20 17:43:25 crc kubenswrapper[4558]: I0120 17:43:25.077488 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-6bb454b456-wjhln" event={"ID":"7935bdc8-1434-418a-a3ad-b165ee8be23e","Type":"ContainerDied","Data":"d6a76d69efdb8d9de2921accc3eae11c4dd079ceb576dcf301178d78b9a86d25"} Jan 20 17:43:25 crc kubenswrapper[4558]: I0120 17:43:25.077516 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-6bb454b456-wjhln" event={"ID":"7935bdc8-1434-418a-a3ad-b165ee8be23e","Type":"ContainerDied","Data":"e925a826ed08dc1090056b5b6c39cc5d3e191110012fc2e1ee085377502021e6"} Jan 20 17:43:25 crc kubenswrapper[4558]: I0120 17:43:25.077529 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e925a826ed08dc1090056b5b6c39cc5d3e191110012fc2e1ee085377502021e6" Jan 20 17:43:25 crc kubenswrapper[4558]: I0120 17:43:25.079306 4558 generic.go:334] "Generic (PLEG): container finished" podID="a8df2ac8-5e44-4c2d-a1fb-11e7cdeda894" containerID="5caa9c83e57be3928128e4ffcdc30d264c7d25b5dfa3de5488249ee33e587529" exitCode=0 Jan 20 17:43:25 crc kubenswrapper[4558]: I0120 17:43:25.079387 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-0" event={"ID":"a8df2ac8-5e44-4c2d-a1fb-11e7cdeda894","Type":"ContainerDied","Data":"5caa9c83e57be3928128e4ffcdc30d264c7d25b5dfa3de5488249ee33e587529"} Jan 20 17:43:25 crc kubenswrapper[4558]: I0120 17:43:25.080989 4558 generic.go:334] "Generic (PLEG): container finished" podID="25fe26b1-d308-452c-b15f-b1b272de6869" containerID="5c3fbeabe3ae0665acc4caf4f15bfaffd0a0fffe254192c44ae1df936c59f917" exitCode=0 Jan 20 17:43:25 crc kubenswrapper[4558]: I0120 17:43:25.081070 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-galera-0" event={"ID":"25fe26b1-d308-452c-b15f-b1b272de6869","Type":"ContainerDied","Data":"5c3fbeabe3ae0665acc4caf4f15bfaffd0a0fffe254192c44ae1df936c59f917"} Jan 20 17:43:25 crc kubenswrapper[4558]: I0120 17:43:25.082429 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-keystone-listener-57496b565d-kwb5f" event={"ID":"fd5cfc55-8cd9-4a76-87b8-4050b8f1030f","Type":"ContainerStarted","Data":"9311298f7cfe04f1831aea792862986c0c1088cce20c3e8d99e421655af78143"} Jan 20 17:43:25 crc kubenswrapper[4558]: I0120 17:43:25.084320 4558 generic.go:334] "Generic (PLEG): container finished" podID="0a246013-4fe7-4d92-a517-414b1c235db0" containerID="f073a0f7710dadab5f69a3b61d534dd16273339ca1f705b805b71959affdaedb" exitCode=0 Jan 20 17:43:25 crc kubenswrapper[4558]: I0120 17:43:25.084375 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"0a246013-4fe7-4d92-a517-414b1c235db0","Type":"ContainerDied","Data":"f073a0f7710dadab5f69a3b61d534dd16273339ca1f705b805b71959affdaedb"} Jan 20 17:43:25 crc kubenswrapper[4558]: I0120 17:43:25.086270 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovsdbserver-nb-0_5f087f7c-913a-4ab8-b905-5caa84469c77/ovsdbserver-nb/0.log" Jan 20 17:43:25 crc kubenswrapper[4558]: I0120 17:43:25.086308 4558 generic.go:334] "Generic (PLEG): container finished" podID="5f087f7c-913a-4ab8-b905-5caa84469c77" containerID="1abb2f52a1084fb284622dfe078fa9cc2613944319be12588b9b290276f35a83" exitCode=143 Jan 20 17:43:25 crc kubenswrapper[4558]: I0120 17:43:25.086327 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-nb-0" event={"ID":"5f087f7c-913a-4ab8-b905-5caa84469c77","Type":"ContainerDied","Data":"1abb2f52a1084fb284622dfe078fa9cc2613944319be12588b9b290276f35a83"} Jan 20 17:43:25 crc kubenswrapper[4558]: I0120 17:43:25.086350 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-nb-0" event={"ID":"5f087f7c-913a-4ab8-b905-5caa84469c77","Type":"ContainerDied","Data":"912edc3f837ed8a890443ab315c041a927764484baaca34a5caeaf8eaef57250"} Jan 20 17:43:25 crc kubenswrapper[4558]: I0120 17:43:25.086362 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="912edc3f837ed8a890443ab315c041a927764484baaca34a5caeaf8eaef57250" Jan 20 17:43:25 crc kubenswrapper[4558]: I0120 17:43:25.088279 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-7b8cd49c48-k928p" event={"ID":"feedfa19-f182-4f7f-8b50-3d16c11e3510","Type":"ContainerDied","Data":"0f4ba83b39effab14da1c98c5454784b0b707d6bde7057ceb9ddd18be15d6501"} Jan 20 17:43:25 crc kubenswrapper[4558]: I0120 17:43:25.088307 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0f4ba83b39effab14da1c98c5454784b0b707d6bde7057ceb9ddd18be15d6501" Jan 20 17:43:25 crc kubenswrapper[4558]: I0120 17:43:25.090113 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovsdbserver-sb-0_0394e10b-3811-4eef-a9ef-a785e1574649/ovsdbserver-sb/0.log" Jan 20 17:43:25 crc kubenswrapper[4558]: I0120 17:43:25.090224 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-sb-0" event={"ID":"0394e10b-3811-4eef-a9ef-a785e1574649","Type":"ContainerDied","Data":"70d870022e9e4c73f1789d3ea2e0a19e7d3c37bc077bf1720b078f21c8b49d8c"} Jan 20 17:43:25 crc kubenswrapper[4558]: I0120 17:43:25.090272 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="70d870022e9e4c73f1789d3ea2e0a19e7d3c37bc077bf1720b078f21c8b49d8c" Jan 20 17:43:25 crc kubenswrapper[4558]: I0120 17:43:25.092281 4558 generic.go:334] "Generic (PLEG): container finished" podID="fd5b8a4a-4f31-4b17-95a3-669d1d7661c4" containerID="a03dfb5ac0209a3bc1e4b1831999713e3583bebd07521f3e1596b419cc04ea21" exitCode=0 Jan 20 17:43:25 crc kubenswrapper[4558]: I0120 17:43:25.092328 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"fd5b8a4a-4f31-4b17-95a3-669d1d7661c4","Type":"ContainerDied","Data":"a03dfb5ac0209a3bc1e4b1831999713e3583bebd07521f3e1596b419cc04ea21"} Jan 20 17:43:25 crc kubenswrapper[4558]: I0120 17:43:25.793982 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-976c48458-wzswq" Jan 20 17:43:25 crc kubenswrapper[4558]: E0120 17:43:25.798708 4558 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0394e10b_3811_4eef_a9ef_a785e1574649.slice/crio-baba51653eb144a4334b8651e3e9970d529abaea40101e2e03249e068d4b3b9b.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc62cee5b_4dd8_4a07_995c_e1d0530d695b.slice/crio-5b28187d77dd8f75fd99f025ba3fddc2ac465afeab96893a305bda76c6c1c56c.scope\": RecentStats: unable to find data in memory cache]" Jan 20 17:43:25 crc kubenswrapper[4558]: I0120 17:43:25.819514 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-66bfc8789d-nsc64" Jan 20 17:43:26 crc kubenswrapper[4558]: E0120 17:43:26.015739 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of baba51653eb144a4334b8651e3e9970d529abaea40101e2e03249e068d4b3b9b is running failed: container process not found" containerID="baba51653eb144a4334b8651e3e9970d529abaea40101e2e03249e068d4b3b9b" cmd=["/usr/bin/pidof","ovsdb-server"] Jan 20 17:43:26 crc kubenswrapper[4558]: E0120 17:43:26.016536 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of baba51653eb144a4334b8651e3e9970d529abaea40101e2e03249e068d4b3b9b is running failed: container process not found" containerID="baba51653eb144a4334b8651e3e9970d529abaea40101e2e03249e068d4b3b9b" cmd=["/usr/bin/pidof","ovsdb-server"] Jan 20 17:43:26 crc kubenswrapper[4558]: E0120 17:43:26.016921 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of baba51653eb144a4334b8651e3e9970d529abaea40101e2e03249e068d4b3b9b is running failed: container process not found" containerID="baba51653eb144a4334b8651e3e9970d529abaea40101e2e03249e068d4b3b9b" cmd=["/usr/bin/pidof","ovsdb-server"] Jan 20 17:43:26 crc kubenswrapper[4558]: E0120 17:43:26.016968 4558 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of baba51653eb144a4334b8651e3e9970d529abaea40101e2e03249e068d4b3b9b is running failed: container process not found" probeType="Readiness" pod="openstack-kuttl-tests/ovsdbserver-sb-0" podUID="0394e10b-3811-4eef-a9ef-a785e1574649" containerName="ovsdbserver-sb" Jan 20 17:43:26 crc kubenswrapper[4558]: E0120 17:43:26.020698 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 1abb2f52a1084fb284622dfe078fa9cc2613944319be12588b9b290276f35a83 is running failed: container process not found" containerID="1abb2f52a1084fb284622dfe078fa9cc2613944319be12588b9b290276f35a83" cmd=["/usr/bin/pidof","ovsdb-server"] Jan 20 17:43:26 crc kubenswrapper[4558]: E0120 17:43:26.020916 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 1abb2f52a1084fb284622dfe078fa9cc2613944319be12588b9b290276f35a83 is running failed: container process not found" containerID="1abb2f52a1084fb284622dfe078fa9cc2613944319be12588b9b290276f35a83" cmd=["/usr/bin/pidof","ovsdb-server"] Jan 20 17:43:26 crc kubenswrapper[4558]: E0120 17:43:26.021199 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 1abb2f52a1084fb284622dfe078fa9cc2613944319be12588b9b290276f35a83 is running failed: container process not found" containerID="1abb2f52a1084fb284622dfe078fa9cc2613944319be12588b9b290276f35a83" cmd=["/usr/bin/pidof","ovsdb-server"] Jan 20 17:43:26 crc kubenswrapper[4558]: E0120 17:43:26.021239 4558 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 1abb2f52a1084fb284622dfe078fa9cc2613944319be12588b9b290276f35a83 is running failed: container process not found" probeType="Readiness" pod="openstack-kuttl-tests/ovsdbserver-nb-0" podUID="5f087f7c-913a-4ab8-b905-5caa84469c77" containerName="ovsdbserver-nb" Jan 20 17:43:26 crc kubenswrapper[4558]: E0120 17:43:26.053834 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="9c38558131507676fb881bd62fab6cc179a513c6227f85a615c9705711240cbc" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:43:26 crc kubenswrapper[4558]: E0120 17:43:26.057328 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="9c38558131507676fb881bd62fab6cc179a513c6227f85a615c9705711240cbc" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:43:26 crc kubenswrapper[4558]: E0120 17:43:26.067540 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="9c38558131507676fb881bd62fab6cc179a513c6227f85a615c9705711240cbc" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:43:26 crc kubenswrapper[4558]: E0120 17:43:26.067818 4558 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack-kuttl-tests/nova-cell0-conductor-0" podUID="365754d1-535b-450b-a80e-1e7402cb28f8" containerName="nova-cell0-conductor-conductor" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.075246 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-685599669b-nv6cc" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.079357 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.100477 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovsdbserver-sb-0_0394e10b-3811-4eef-a9ef-a785e1574649/ovsdbserver-sb/0.log" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.100556 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.110485 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-worker-7975b7ff57-c5czg" event={"ID":"1a1aeb0f-ba20-4c24-a8d3-f3f2765328c2","Type":"ContainerStarted","Data":"ddb2774f5a646ced66660bb7d2fc7db683928585fea6955ddbda9cb09d41a20e"} Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.113362 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"ea5dfbd0-37c4-46a1-afad-77e85e35d7fb","Type":"ContainerDied","Data":"c062dc392074c9545555c522024a0a6b381d8f2010ab4c9aa6cba39a5637d18b"} Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.113412 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c062dc392074c9545555c522024a0a6b381d8f2010ab4c9aa6cba39a5637d18b" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.115239 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-0" event={"ID":"a8df2ac8-5e44-4c2d-a1fb-11e7cdeda894","Type":"ContainerDied","Data":"2e9dacc275888dedfec5e084ed7bdaf3e2249013808df66a2ec2a59ddb6bbb30"} Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.115262 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2e9dacc275888dedfec5e084ed7bdaf3e2249013808df66a2ec2a59ddb6bbb30" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.129105 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"fd5b8a4a-4f31-4b17-95a3-669d1d7661c4","Type":"ContainerDied","Data":"66f57cede9d940bc8f1d33c78e7888e002b1e2515352aa0c41ad245fb4ffba99"} Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.129132 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="66f57cede9d940bc8f1d33c78e7888e002b1e2515352aa0c41ad245fb4ffba99" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.133988 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-galera-0" event={"ID":"25fe26b1-d308-452c-b15f-b1b272de6869","Type":"ContainerDied","Data":"2edec83ce852863bc2afc3e9bc1b9c0f6514797150c2774031d55f522cbcdf7d"} Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.134010 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2edec83ce852863bc2afc3e9bc1b9c0f6514797150c2774031d55f522cbcdf7d" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.137709 4558 generic.go:334] "Generic (PLEG): container finished" podID="25cfe04f-2194-405f-a9d7-82181e8ac22a" containerID="d856400d868c5a18aeb6d59839565030a9d6d928c1563362781a0530b884b4e7" exitCode=0 Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.137760 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"25cfe04f-2194-405f-a9d7-82181e8ac22a","Type":"ContainerDied","Data":"d856400d868c5a18aeb6d59839565030a9d6d928c1563362781a0530b884b4e7"} Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.137780 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"25cfe04f-2194-405f-a9d7-82181e8ac22a","Type":"ContainerDied","Data":"9ef69efacb42b6ced654f8f8a2280a764154af708fca8dfecf47b47aa31aeedd"} Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.137789 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9ef69efacb42b6ced654f8f8a2280a764154af708fca8dfecf47b47aa31aeedd" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.137804 4558 scope.go:117] "RemoveContainer" containerID="080004aa669f36ce8bb441200bb1b70a9222d0a13602ec478e6d8d26c1fbf426" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.142628 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"0a246013-4fe7-4d92-a517-414b1c235db0","Type":"ContainerDied","Data":"6841f72195dc77d3ac9b0b3acd8ea9360e5cf6e492f27fe2d62f58793c3de722"} Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.142672 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6841f72195dc77d3ac9b0b3acd8ea9360e5cf6e492f27fe2d62f58793c3de722" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.151509 4558 generic.go:334] "Generic (PLEG): container finished" podID="f68fbac2-1677-4664-a344-77a41044ea2a" containerID="a154b825c6001b834fad5ef3b1acf5a64fc98844395614a8310c4485a3d01a62" exitCode=0 Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.151625 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-cell1-galera-0" event={"ID":"f68fbac2-1677-4664-a344-77a41044ea2a","Type":"ContainerDied","Data":"a154b825c6001b834fad5ef3b1acf5a64fc98844395614a8310c4485a3d01a62"} Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.151661 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.151668 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-cell1-galera-0" event={"ID":"f68fbac2-1677-4664-a344-77a41044ea2a","Type":"ContainerDied","Data":"73ae02bb95ce2d6c9c3af2a3518478e8076c9da2ea396d56b8256107bc44689a"} Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.151684 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="73ae02bb95ce2d6c9c3af2a3518478e8076c9da2ea396d56b8256107bc44689a" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.245193 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-7b8cd49c48-k928p" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.256550 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a651cf04-6082-49fc-813c-f9de65420553-internal-tls-certs\") pod \"a651cf04-6082-49fc-813c-f9de65420553\" (UID: \"a651cf04-6082-49fc-813c-f9de65420553\") " Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.257380 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a651cf04-6082-49fc-813c-f9de65420553-config-data\") pod \"a651cf04-6082-49fc-813c-f9de65420553\" (UID: \"a651cf04-6082-49fc-813c-f9de65420553\") " Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.257430 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a651cf04-6082-49fc-813c-f9de65420553-logs\") pod \"a651cf04-6082-49fc-813c-f9de65420553\" (UID: \"a651cf04-6082-49fc-813c-f9de65420553\") " Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.257473 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0394e10b-3811-4eef-a9ef-a785e1574649-config\") pod \"0394e10b-3811-4eef-a9ef-a785e1574649\" (UID: \"0394e10b-3811-4eef-a9ef-a785e1574649\") " Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.257502 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vscgj\" (UniqueName: \"kubernetes.io/projected/a651cf04-6082-49fc-813c-f9de65420553-kube-api-access-vscgj\") pod \"a651cf04-6082-49fc-813c-f9de65420553\" (UID: \"a651cf04-6082-49fc-813c-f9de65420553\") " Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.257554 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a651cf04-6082-49fc-813c-f9de65420553-etc-machine-id\") pod \"a651cf04-6082-49fc-813c-f9de65420553\" (UID: \"a651cf04-6082-49fc-813c-f9de65420553\") " Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.257576 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0394e10b-3811-4eef-a9ef-a785e1574649-combined-ca-bundle\") pod \"0394e10b-3811-4eef-a9ef-a785e1574649\" (UID: \"0394e10b-3811-4eef-a9ef-a785e1574649\") " Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.257612 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/0394e10b-3811-4eef-a9ef-a785e1574649-ovsdbserver-sb-tls-certs\") pod \"0394e10b-3811-4eef-a9ef-a785e1574649\" (UID: \"0394e10b-3811-4eef-a9ef-a785e1574649\") " Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.257667 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6mj6z\" (UniqueName: \"kubernetes.io/projected/0394e10b-3811-4eef-a9ef-a785e1574649-kube-api-access-6mj6z\") pod \"0394e10b-3811-4eef-a9ef-a785e1574649\" (UID: \"0394e10b-3811-4eef-a9ef-a785e1574649\") " Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.257740 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a651cf04-6082-49fc-813c-f9de65420553-public-tls-certs\") pod \"a651cf04-6082-49fc-813c-f9de65420553\" (UID: \"a651cf04-6082-49fc-813c-f9de65420553\") " Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.257758 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0394e10b-3811-4eef-a9ef-a785e1574649-scripts\") pod \"0394e10b-3811-4eef-a9ef-a785e1574649\" (UID: \"0394e10b-3811-4eef-a9ef-a785e1574649\") " Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.257794 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a651cf04-6082-49fc-813c-f9de65420553-config-data-custom\") pod \"a651cf04-6082-49fc-813c-f9de65420553\" (UID: \"a651cf04-6082-49fc-813c-f9de65420553\") " Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.257821 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/0394e10b-3811-4eef-a9ef-a785e1574649-ovsdb-rundir\") pod \"0394e10b-3811-4eef-a9ef-a785e1574649\" (UID: \"0394e10b-3811-4eef-a9ef-a785e1574649\") " Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.257861 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a651cf04-6082-49fc-813c-f9de65420553-combined-ca-bundle\") pod \"a651cf04-6082-49fc-813c-f9de65420553\" (UID: \"a651cf04-6082-49fc-813c-f9de65420553\") " Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.257879 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndbcluster-sb-etc-ovn\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"0394e10b-3811-4eef-a9ef-a785e1574649\" (UID: \"0394e10b-3811-4eef-a9ef-a785e1574649\") " Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.257941 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a651cf04-6082-49fc-813c-f9de65420553-scripts\") pod \"a651cf04-6082-49fc-813c-f9de65420553\" (UID: \"a651cf04-6082-49fc-813c-f9de65420553\") " Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.257969 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/0394e10b-3811-4eef-a9ef-a785e1574649-metrics-certs-tls-certs\") pod \"0394e10b-3811-4eef-a9ef-a785e1574649\" (UID: \"0394e10b-3811-4eef-a9ef-a785e1574649\") " Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.263118 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0394e10b-3811-4eef-a9ef-a785e1574649-config" (OuterVolumeSpecName: "config") pod "0394e10b-3811-4eef-a9ef-a785e1574649" (UID: "0394e10b-3811-4eef-a9ef-a785e1574649"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.264228 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a651cf04-6082-49fc-813c-f9de65420553-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "a651cf04-6082-49fc-813c-f9de65420553" (UID: "a651cf04-6082-49fc-813c-f9de65420553"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.264450 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a651cf04-6082-49fc-813c-f9de65420553-logs" (OuterVolumeSpecName: "logs") pod "a651cf04-6082-49fc-813c-f9de65420553" (UID: "a651cf04-6082-49fc-813c-f9de65420553"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.267563 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-976c48458-wzswq"] Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.270723 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0394e10b-3811-4eef-a9ef-a785e1574649-ovsdb-rundir" (OuterVolumeSpecName: "ovsdb-rundir") pod "0394e10b-3811-4eef-a9ef-a785e1574649" (UID: "0394e10b-3811-4eef-a9ef-a785e1574649"). InnerVolumeSpecName "ovsdb-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.276401 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a651cf04-6082-49fc-813c-f9de65420553-config-data" (OuterVolumeSpecName: "config-data") pod "a651cf04-6082-49fc-813c-f9de65420553" (UID: "a651cf04-6082-49fc-813c-f9de65420553"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.284461 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-6bb454b456-wjhln" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.285923 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage03-crc" (OuterVolumeSpecName: "ovndbcluster-sb-etc-ovn") pod "0394e10b-3811-4eef-a9ef-a785e1574649" (UID: "0394e10b-3811-4eef-a9ef-a785e1574649"). InnerVolumeSpecName "local-storage03-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.287446 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-5796d4c4d8-2xz7t" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.287781 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0394e10b-3811-4eef-a9ef-a785e1574649-scripts" (OuterVolumeSpecName: "scripts") pod "0394e10b-3811-4eef-a9ef-a785e1574649" (UID: "0394e10b-3811-4eef-a9ef-a785e1574649"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.292916 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a651cf04-6082-49fc-813c-f9de65420553-scripts" (OuterVolumeSpecName: "scripts") pod "a651cf04-6082-49fc-813c-f9de65420553" (UID: "a651cf04-6082-49fc-813c-f9de65420553"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.298568 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a651cf04-6082-49fc-813c-f9de65420553-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a651cf04-6082-49fc-813c-f9de65420553" (UID: "a651cf04-6082-49fc-813c-f9de65420553"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.298621 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0394e10b-3811-4eef-a9ef-a785e1574649-kube-api-access-6mj6z" (OuterVolumeSpecName: "kube-api-access-6mj6z") pod "0394e10b-3811-4eef-a9ef-a785e1574649" (UID: "0394e10b-3811-4eef-a9ef-a785e1574649"). InnerVolumeSpecName "kube-api-access-6mj6z". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.298686 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a651cf04-6082-49fc-813c-f9de65420553-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "a651cf04-6082-49fc-813c-f9de65420553" (UID: "a651cf04-6082-49fc-813c-f9de65420553"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.298778 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a651cf04-6082-49fc-813c-f9de65420553-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "a651cf04-6082-49fc-813c-f9de65420553" (UID: "a651cf04-6082-49fc-813c-f9de65420553"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.299037 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/memcached-0" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.299672 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a651cf04-6082-49fc-813c-f9de65420553-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "a651cf04-6082-49fc-813c-f9de65420553" (UID: "a651cf04-6082-49fc-813c-f9de65420553"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.306075 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a651cf04-6082-49fc-813c-f9de65420553-kube-api-access-vscgj" (OuterVolumeSpecName: "kube-api-access-vscgj") pod "a651cf04-6082-49fc-813c-f9de65420553" (UID: "a651cf04-6082-49fc-813c-f9de65420553"). InnerVolumeSpecName "kube-api-access-vscgj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.307612 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.334180 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovsdbserver-nb-0_5f087f7c-913a-4ab8-b905-5caa84469c77/ovsdbserver-nb/0.log" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.334235 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.338000 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.338196 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.338662 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.338791 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.353274 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.359682 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/feedfa19-f182-4f7f-8b50-3d16c11e3510-config-data\") pod \"feedfa19-f182-4f7f-8b50-3d16c11e3510\" (UID: \"feedfa19-f182-4f7f-8b50-3d16c11e3510\") " Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.359732 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8738e339-fd1b-4122-899c-7b9521688aba-public-tls-certs\") pod \"8738e339-fd1b-4122-899c-7b9521688aba\" (UID: \"8738e339-fd1b-4122-899c-7b9521688aba\") " Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.359796 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/feedfa19-f182-4f7f-8b50-3d16c11e3510-scripts\") pod \"feedfa19-f182-4f7f-8b50-3d16c11e3510\" (UID: \"feedfa19-f182-4f7f-8b50-3d16c11e3510\") " Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.359866 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/feedfa19-f182-4f7f-8b50-3d16c11e3510-combined-ca-bundle\") pod \"feedfa19-f182-4f7f-8b50-3d16c11e3510\" (UID: \"feedfa19-f182-4f7f-8b50-3d16c11e3510\") " Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.359941 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8738e339-fd1b-4122-899c-7b9521688aba-logs\") pod \"8738e339-fd1b-4122-899c-7b9521688aba\" (UID: \"8738e339-fd1b-4122-899c-7b9521688aba\") " Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.359970 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8kfcr\" (UniqueName: \"kubernetes.io/projected/8738e339-fd1b-4122-899c-7b9521688aba-kube-api-access-8kfcr\") pod \"8738e339-fd1b-4122-899c-7b9521688aba\" (UID: \"8738e339-fd1b-4122-899c-7b9521688aba\") " Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.359989 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8738e339-fd1b-4122-899c-7b9521688aba-config-data\") pod \"8738e339-fd1b-4122-899c-7b9521688aba\" (UID: \"8738e339-fd1b-4122-899c-7b9521688aba\") " Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.360012 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8738e339-fd1b-4122-899c-7b9521688aba-internal-tls-certs\") pod \"8738e339-fd1b-4122-899c-7b9521688aba\" (UID: \"8738e339-fd1b-4122-899c-7b9521688aba\") " Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.360029 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/feedfa19-f182-4f7f-8b50-3d16c11e3510-internal-tls-certs\") pod \"feedfa19-f182-4f7f-8b50-3d16c11e3510\" (UID: \"feedfa19-f182-4f7f-8b50-3d16c11e3510\") " Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.360053 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7935bdc8-1434-418a-a3ad-b165ee8be23e-combined-ca-bundle\") pod \"7935bdc8-1434-418a-a3ad-b165ee8be23e\" (UID: \"7935bdc8-1434-418a-a3ad-b165ee8be23e\") " Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.360097 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2xmfb\" (UniqueName: \"kubernetes.io/projected/7935bdc8-1434-418a-a3ad-b165ee8be23e-kube-api-access-2xmfb\") pod \"7935bdc8-1434-418a-a3ad-b165ee8be23e\" (UID: \"7935bdc8-1434-418a-a3ad-b165ee8be23e\") " Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.360147 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7935bdc8-1434-418a-a3ad-b165ee8be23e-internal-tls-certs\") pod \"7935bdc8-1434-418a-a3ad-b165ee8be23e\" (UID: \"7935bdc8-1434-418a-a3ad-b165ee8be23e\") " Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.360197 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8738e339-fd1b-4122-899c-7b9521688aba-config-data-custom\") pod \"8738e339-fd1b-4122-899c-7b9521688aba\" (UID: \"8738e339-fd1b-4122-899c-7b9521688aba\") " Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.360232 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8738e339-fd1b-4122-899c-7b9521688aba-combined-ca-bundle\") pod \"8738e339-fd1b-4122-899c-7b9521688aba\" (UID: \"8738e339-fd1b-4122-899c-7b9521688aba\") " Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.360252 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7935bdc8-1434-418a-a3ad-b165ee8be23e-public-tls-certs\") pod \"7935bdc8-1434-418a-a3ad-b165ee8be23e\" (UID: \"7935bdc8-1434-418a-a3ad-b165ee8be23e\") " Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.360281 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/feedfa19-f182-4f7f-8b50-3d16c11e3510-public-tls-certs\") pod \"feedfa19-f182-4f7f-8b50-3d16c11e3510\" (UID: \"feedfa19-f182-4f7f-8b50-3d16c11e3510\") " Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.360342 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7935bdc8-1434-418a-a3ad-b165ee8be23e-config-data\") pod \"7935bdc8-1434-418a-a3ad-b165ee8be23e\" (UID: \"7935bdc8-1434-418a-a3ad-b165ee8be23e\") " Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.360378 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fzns4\" (UniqueName: \"kubernetes.io/projected/feedfa19-f182-4f7f-8b50-3d16c11e3510-kube-api-access-fzns4\") pod \"feedfa19-f182-4f7f-8b50-3d16c11e3510\" (UID: \"feedfa19-f182-4f7f-8b50-3d16c11e3510\") " Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.360399 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/7935bdc8-1434-418a-a3ad-b165ee8be23e-credential-keys\") pod \"7935bdc8-1434-418a-a3ad-b165ee8be23e\" (UID: \"7935bdc8-1434-418a-a3ad-b165ee8be23e\") " Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.360433 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/feedfa19-f182-4f7f-8b50-3d16c11e3510-logs\") pod \"feedfa19-f182-4f7f-8b50-3d16c11e3510\" (UID: \"feedfa19-f182-4f7f-8b50-3d16c11e3510\") " Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.360508 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7935bdc8-1434-418a-a3ad-b165ee8be23e-scripts\") pod \"7935bdc8-1434-418a-a3ad-b165ee8be23e\" (UID: \"7935bdc8-1434-418a-a3ad-b165ee8be23e\") " Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.360571 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/7935bdc8-1434-418a-a3ad-b165ee8be23e-fernet-keys\") pod \"7935bdc8-1434-418a-a3ad-b165ee8be23e\" (UID: \"7935bdc8-1434-418a-a3ad-b165ee8be23e\") " Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.361524 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a651cf04-6082-49fc-813c-f9de65420553-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.361550 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a651cf04-6082-49fc-813c-f9de65420553-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.361563 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a651cf04-6082-49fc-813c-f9de65420553-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.361571 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a651cf04-6082-49fc-813c-f9de65420553-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.361615 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0394e10b-3811-4eef-a9ef-a785e1574649-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.361624 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vscgj\" (UniqueName: \"kubernetes.io/projected/a651cf04-6082-49fc-813c-f9de65420553-kube-api-access-vscgj\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.361633 4558 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a651cf04-6082-49fc-813c-f9de65420553-etc-machine-id\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.361642 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6mj6z\" (UniqueName: \"kubernetes.io/projected/0394e10b-3811-4eef-a9ef-a785e1574649-kube-api-access-6mj6z\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.361651 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0394e10b-3811-4eef-a9ef-a785e1574649-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.361662 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a651cf04-6082-49fc-813c-f9de65420553-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.361682 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a651cf04-6082-49fc-813c-f9de65420553-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.361691 4558 reconciler_common.go:293] "Volume detached for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/0394e10b-3811-4eef-a9ef-a785e1574649-ovsdb-rundir\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.361699 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a651cf04-6082-49fc-813c-f9de65420553-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.361721 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" " Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.365489 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8738e339-fd1b-4122-899c-7b9521688aba-logs" (OuterVolumeSpecName: "logs") pod "8738e339-fd1b-4122-899c-7b9521688aba" (UID: "8738e339-fd1b-4122-899c-7b9521688aba"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.368811 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-66bfc8789d-nsc64"] Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.374313 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7935bdc8-1434-418a-a3ad-b165ee8be23e-kube-api-access-2xmfb" (OuterVolumeSpecName: "kube-api-access-2xmfb") pod "7935bdc8-1434-418a-a3ad-b165ee8be23e" (UID: "7935bdc8-1434-418a-a3ad-b165ee8be23e"). InnerVolumeSpecName "kube-api-access-2xmfb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.374885 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.379069 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/feedfa19-f182-4f7f-8b50-3d16c11e3510-logs" (OuterVolumeSpecName: "logs") pod "feedfa19-f182-4f7f-8b50-3d16c11e3510" (UID: "feedfa19-f182-4f7f-8b50-3d16c11e3510"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.437135 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/feedfa19-f182-4f7f-8b50-3d16c11e3510-scripts" (OuterVolumeSpecName: "scripts") pod "feedfa19-f182-4f7f-8b50-3d16c11e3510" (UID: "feedfa19-f182-4f7f-8b50-3d16c11e3510"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.437761 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/feedfa19-f182-4f7f-8b50-3d16c11e3510-kube-api-access-fzns4" (OuterVolumeSpecName: "kube-api-access-fzns4") pod "feedfa19-f182-4f7f-8b50-3d16c11e3510" (UID: "feedfa19-f182-4f7f-8b50-3d16c11e3510"). InnerVolumeSpecName "kube-api-access-fzns4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.438025 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8738e339-fd1b-4122-899c-7b9521688aba-kube-api-access-8kfcr" (OuterVolumeSpecName: "kube-api-access-8kfcr") pod "8738e339-fd1b-4122-899c-7b9521688aba" (UID: "8738e339-fd1b-4122-899c-7b9521688aba"). InnerVolumeSpecName "kube-api-access-8kfcr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.438157 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8738e339-fd1b-4122-899c-7b9521688aba-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "8738e339-fd1b-4122-899c-7b9521688aba" (UID: "8738e339-fd1b-4122-899c-7b9521688aba"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.440434 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7935bdc8-1434-418a-a3ad-b165ee8be23e-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "7935bdc8-1434-418a-a3ad-b165ee8be23e" (UID: "7935bdc8-1434-418a-a3ad-b165ee8be23e"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.463708 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/25fe26b1-d308-452c-b15f-b1b272de6869-config-data-default\") pod \"25fe26b1-d308-452c-b15f-b1b272de6869\" (UID: \"25fe26b1-d308-452c-b15f-b1b272de6869\") " Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.463757 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/a5db7bd5-e0c6-4645-ab63-3a2a21fdc854-kolla-config\") pod \"a5db7bd5-e0c6-4645-ab63-3a2a21fdc854\" (UID: \"a5db7bd5-e0c6-4645-ab63-3a2a21fdc854\") " Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.463798 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sttlg\" (UniqueName: \"kubernetes.io/projected/25fe26b1-d308-452c-b15f-b1b272de6869-kube-api-access-sttlg\") pod \"25fe26b1-d308-452c-b15f-b1b272de6869\" (UID: \"25fe26b1-d308-452c-b15f-b1b272de6869\") " Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.463824 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/25cfe04f-2194-405f-a9d7-82181e8ac22a-config-data\") pod \"25cfe04f-2194-405f-a9d7-82181e8ac22a\" (UID: \"25cfe04f-2194-405f-a9d7-82181e8ac22a\") " Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.463851 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vbvr7\" (UniqueName: \"kubernetes.io/projected/25cfe04f-2194-405f-a9d7-82181e8ac22a-kube-api-access-vbvr7\") pod \"25cfe04f-2194-405f-a9d7-82181e8ac22a\" (UID: \"25cfe04f-2194-405f-a9d7-82181e8ac22a\") " Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.463885 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/f68fbac2-1677-4664-a344-77a41044ea2a-config-data-generated\") pod \"f68fbac2-1677-4664-a344-77a41044ea2a\" (UID: \"f68fbac2-1677-4664-a344-77a41044ea2a\") " Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.463940 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0a246013-4fe7-4d92-a517-414b1c235db0-combined-ca-bundle\") pod \"0a246013-4fe7-4d92-a517-414b1c235db0\" (UID: \"0a246013-4fe7-4d92-a517-414b1c235db0\") " Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.463963 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/25fe26b1-d308-452c-b15f-b1b272de6869-combined-ca-bundle\") pod \"25fe26b1-d308-452c-b15f-b1b272de6869\" (UID: \"25fe26b1-d308-452c-b15f-b1b272de6869\") " Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.463991 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/25cfe04f-2194-405f-a9d7-82181e8ac22a-config-data-custom\") pod \"25cfe04f-2194-405f-a9d7-82181e8ac22a\" (UID: \"25cfe04f-2194-405f-a9d7-82181e8ac22a\") " Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.464027 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/f68fbac2-1677-4664-a344-77a41044ea2a-galera-tls-certs\") pod \"f68fbac2-1677-4664-a344-77a41044ea2a\" (UID: \"f68fbac2-1677-4664-a344-77a41044ea2a\") " Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.464058 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5f087f7c-913a-4ab8-b905-5caa84469c77-config\") pod \"5f087f7c-913a-4ab8-b905-5caa84469c77\" (UID: \"5f087f7c-913a-4ab8-b905-5caa84469c77\") " Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.464081 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/25fe26b1-d308-452c-b15f-b1b272de6869-operator-scripts\") pod \"25fe26b1-d308-452c-b15f-b1b272de6869\" (UID: \"25fe26b1-d308-452c-b15f-b1b272de6869\") " Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.464103 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/5f087f7c-913a-4ab8-b905-5caa84469c77-ovsdb-rundir\") pod \"5f087f7c-913a-4ab8-b905-5caa84469c77\" (UID: \"5f087f7c-913a-4ab8-b905-5caa84469c77\") " Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.464128 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r78kw\" (UniqueName: \"kubernetes.io/projected/ea5dfbd0-37c4-46a1-afad-77e85e35d7fb-kube-api-access-r78kw\") pod \"ea5dfbd0-37c4-46a1-afad-77e85e35d7fb\" (UID: \"ea5dfbd0-37c4-46a1-afad-77e85e35d7fb\") " Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.464188 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f087f7c-913a-4ab8-b905-5caa84469c77-combined-ca-bundle\") pod \"5f087f7c-913a-4ab8-b905-5caa84469c77\" (UID: \"5f087f7c-913a-4ab8-b905-5caa84469c77\") " Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.467197 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25fe26b1-d308-452c-b15f-b1b272de6869-config-data-default" (OuterVolumeSpecName: "config-data-default") pod "25fe26b1-d308-452c-b15f-b1b272de6869" (UID: "25fe26b1-d308-452c-b15f-b1b272de6869"). InnerVolumeSpecName "config-data-default". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.468857 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mysql-db\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") pod \"f68fbac2-1677-4664-a344-77a41044ea2a\" (UID: \"f68fbac2-1677-4664-a344-77a41044ea2a\") " Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.468900 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/f68fbac2-1677-4664-a344-77a41044ea2a-kolla-config\") pod \"f68fbac2-1677-4664-a344-77a41044ea2a\" (UID: \"f68fbac2-1677-4664-a344-77a41044ea2a\") " Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.468943 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-plbhs\" (UniqueName: \"kubernetes.io/projected/fd5b8a4a-4f31-4b17-95a3-669d1d7661c4-kube-api-access-plbhs\") pod \"fd5b8a4a-4f31-4b17-95a3-669d1d7661c4\" (UID: \"fd5b8a4a-4f31-4b17-95a3-669d1d7661c4\") " Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.468978 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f68fbac2-1677-4664-a344-77a41044ea2a-combined-ca-bundle\") pod \"f68fbac2-1677-4664-a344-77a41044ea2a\" (UID: \"f68fbac2-1677-4664-a344-77a41044ea2a\") " Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.469020 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/5f087f7c-913a-4ab8-b905-5caa84469c77-metrics-certs-tls-certs\") pod \"5f087f7c-913a-4ab8-b905-5caa84469c77\" (UID: \"5f087f7c-913a-4ab8-b905-5caa84469c77\") " Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.469044 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fd5b8a4a-4f31-4b17-95a3-669d1d7661c4-config-data\") pod \"fd5b8a4a-4f31-4b17-95a3-669d1d7661c4\" (UID: \"fd5b8a4a-4f31-4b17-95a3-669d1d7661c4\") " Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.469034 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f68fbac2-1677-4664-a344-77a41044ea2a-config-data-generated" (OuterVolumeSpecName: "config-data-generated") pod "f68fbac2-1677-4664-a344-77a41044ea2a" (UID: "f68fbac2-1677-4664-a344-77a41044ea2a"). InnerVolumeSpecName "config-data-generated". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.469069 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/25fe26b1-d308-452c-b15f-b1b272de6869-galera-tls-certs\") pod \"25fe26b1-d308-452c-b15f-b1b272de6869\" (UID: \"25fe26b1-d308-452c-b15f-b1b272de6869\") " Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.469120 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a8df2ac8-5e44-4c2d-a1fb-11e7cdeda894-config-data\") pod \"a8df2ac8-5e44-4c2d-a1fb-11e7cdeda894\" (UID: \"a8df2ac8-5e44-4c2d-a1fb-11e7cdeda894\") " Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.469190 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f68fbac2-1677-4664-a344-77a41044ea2a-operator-scripts\") pod \"f68fbac2-1677-4664-a344-77a41044ea2a\" (UID: \"f68fbac2-1677-4664-a344-77a41044ea2a\") " Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.469213 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a5db7bd5-e0c6-4645-ab63-3a2a21fdc854-combined-ca-bundle\") pod \"a5db7bd5-e0c6-4645-ab63-3a2a21fdc854\" (UID: \"a5db7bd5-e0c6-4645-ab63-3a2a21fdc854\") " Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.469235 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b2xql\" (UniqueName: \"kubernetes.io/projected/f68fbac2-1677-4664-a344-77a41044ea2a-kube-api-access-b2xql\") pod \"f68fbac2-1677-4664-a344-77a41044ea2a\" (UID: \"f68fbac2-1677-4664-a344-77a41044ea2a\") " Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.469260 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a5db7bd5-e0c6-4645-ab63-3a2a21fdc854-config-data\") pod \"a5db7bd5-e0c6-4645-ab63-3a2a21fdc854\" (UID: \"a5db7bd5-e0c6-4645-ab63-3a2a21fdc854\") " Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.469300 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mysql-db\" (UniqueName: \"kubernetes.io/local-volume/local-storage14-crc\") pod \"25fe26b1-d308-452c-b15f-b1b272de6869\" (UID: \"25fe26b1-d308-452c-b15f-b1b272de6869\") " Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.469336 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/25cfe04f-2194-405f-a9d7-82181e8ac22a-combined-ca-bundle\") pod \"25cfe04f-2194-405f-a9d7-82181e8ac22a\" (UID: \"25cfe04f-2194-405f-a9d7-82181e8ac22a\") " Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.469369 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/a5db7bd5-e0c6-4645-ab63-3a2a21fdc854-memcached-tls-certs\") pod \"a5db7bd5-e0c6-4645-ab63-3a2a21fdc854\" (UID: \"a5db7bd5-e0c6-4645-ab63-3a2a21fdc854\") " Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.469390 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd5b8a4a-4f31-4b17-95a3-669d1d7661c4-combined-ca-bundle\") pod \"fd5b8a4a-4f31-4b17-95a3-669d1d7661c4\" (UID: \"fd5b8a4a-4f31-4b17-95a3-669d1d7661c4\") " Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.469418 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0a246013-4fe7-4d92-a517-414b1c235db0-config-data\") pod \"0a246013-4fe7-4d92-a517-414b1c235db0\" (UID: \"0a246013-4fe7-4d92-a517-414b1c235db0\") " Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.469462 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jxvrv\" (UniqueName: \"kubernetes.io/projected/a8df2ac8-5e44-4c2d-a1fb-11e7cdeda894-kube-api-access-jxvrv\") pod \"a8df2ac8-5e44-4c2d-a1fb-11e7cdeda894\" (UID: \"a8df2ac8-5e44-4c2d-a1fb-11e7cdeda894\") " Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.469483 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ea5dfbd0-37c4-46a1-afad-77e85e35d7fb-config-data\") pod \"ea5dfbd0-37c4-46a1-afad-77e85e35d7fb\" (UID: \"ea5dfbd0-37c4-46a1-afad-77e85e35d7fb\") " Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.469504 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kwb8p\" (UniqueName: \"kubernetes.io/projected/0a246013-4fe7-4d92-a517-414b1c235db0-kube-api-access-kwb8p\") pod \"0a246013-4fe7-4d92-a517-414b1c235db0\" (UID: \"0a246013-4fe7-4d92-a517-414b1c235db0\") " Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.469540 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/25fe26b1-d308-452c-b15f-b1b272de6869-config-data-generated\") pod \"25fe26b1-d308-452c-b15f-b1b272de6869\" (UID: \"25fe26b1-d308-452c-b15f-b1b272de6869\") " Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.469592 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0a246013-4fe7-4d92-a517-414b1c235db0-logs\") pod \"0a246013-4fe7-4d92-a517-414b1c235db0\" (UID: \"0a246013-4fe7-4d92-a517-414b1c235db0\") " Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.469624 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0a246013-4fe7-4d92-a517-414b1c235db0-public-tls-certs\") pod \"0a246013-4fe7-4d92-a517-414b1c235db0\" (UID: \"0a246013-4fe7-4d92-a517-414b1c235db0\") " Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.469647 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/f68fbac2-1677-4664-a344-77a41044ea2a-config-data-default\") pod \"f68fbac2-1677-4664-a344-77a41044ea2a\" (UID: \"f68fbac2-1677-4664-a344-77a41044ea2a\") " Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.469683 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/fd5b8a4a-4f31-4b17-95a3-669d1d7661c4-nova-novncproxy-tls-certs\") pod \"fd5b8a4a-4f31-4b17-95a3-669d1d7661c4\" (UID: \"fd5b8a4a-4f31-4b17-95a3-669d1d7661c4\") " Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.469712 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/25cfe04f-2194-405f-a9d7-82181e8ac22a-etc-machine-id\") pod \"25cfe04f-2194-405f-a9d7-82181e8ac22a\" (UID: \"25cfe04f-2194-405f-a9d7-82181e8ac22a\") " Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.469733 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/25cfe04f-2194-405f-a9d7-82181e8ac22a-scripts\") pod \"25cfe04f-2194-405f-a9d7-82181e8ac22a\" (UID: \"25cfe04f-2194-405f-a9d7-82181e8ac22a\") " Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.469802 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/fd5b8a4a-4f31-4b17-95a3-669d1d7661c4-vencrypt-tls-certs\") pod \"fd5b8a4a-4f31-4b17-95a3-669d1d7661c4\" (UID: \"fd5b8a4a-4f31-4b17-95a3-669d1d7661c4\") " Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.469829 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ea5dfbd0-37c4-46a1-afad-77e85e35d7fb-combined-ca-bundle\") pod \"ea5dfbd0-37c4-46a1-afad-77e85e35d7fb\" (UID: \"ea5dfbd0-37c4-46a1-afad-77e85e35d7fb\") " Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.469855 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndbcluster-nb-etc-ovn\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"5f087f7c-913a-4ab8-b905-5caa84469c77\" (UID: \"5f087f7c-913a-4ab8-b905-5caa84469c77\") " Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.469878 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tqthz\" (UniqueName: \"kubernetes.io/projected/a5db7bd5-e0c6-4645-ab63-3a2a21fdc854-kube-api-access-tqthz\") pod \"a5db7bd5-e0c6-4645-ab63-3a2a21fdc854\" (UID: \"a5db7bd5-e0c6-4645-ab63-3a2a21fdc854\") " Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.469901 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/25fe26b1-d308-452c-b15f-b1b272de6869-kolla-config\") pod \"25fe26b1-d308-452c-b15f-b1b272de6869\" (UID: \"25fe26b1-d308-452c-b15f-b1b272de6869\") " Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.469931 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/5f087f7c-913a-4ab8-b905-5caa84469c77-ovsdbserver-nb-tls-certs\") pod \"5f087f7c-913a-4ab8-b905-5caa84469c77\" (UID: \"5f087f7c-913a-4ab8-b905-5caa84469c77\") " Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.469952 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5f087f7c-913a-4ab8-b905-5caa84469c77-scripts\") pod \"5f087f7c-913a-4ab8-b905-5caa84469c77\" (UID: \"5f087f7c-913a-4ab8-b905-5caa84469c77\") " Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.469978 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8df2ac8-5e44-4c2d-a1fb-11e7cdeda894-combined-ca-bundle\") pod \"a8df2ac8-5e44-4c2d-a1fb-11e7cdeda894\" (UID: \"a8df2ac8-5e44-4c2d-a1fb-11e7cdeda894\") " Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.470002 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x6tnw\" (UniqueName: \"kubernetes.io/projected/5f087f7c-913a-4ab8-b905-5caa84469c77-kube-api-access-x6tnw\") pod \"5f087f7c-913a-4ab8-b905-5caa84469c77\" (UID: \"5f087f7c-913a-4ab8-b905-5caa84469c77\") " Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.470022 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0a246013-4fe7-4d92-a517-414b1c235db0-internal-tls-certs\") pod \"0a246013-4fe7-4d92-a517-414b1c235db0\" (UID: \"0a246013-4fe7-4d92-a517-414b1c235db0\") " Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.470611 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a5db7bd5-e0c6-4645-ab63-3a2a21fdc854-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "a5db7bd5-e0c6-4645-ab63-3a2a21fdc854" (UID: "a5db7bd5-e0c6-4645-ab63-3a2a21fdc854"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.471029 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/feedfa19-f182-4f7f-8b50-3d16c11e3510-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.471056 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/feedfa19-f182-4f7f-8b50-3d16c11e3510-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.471068 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/25fe26b1-d308-452c-b15f-b1b272de6869-config-data-default\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.471085 4558 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/a5db7bd5-e0c6-4645-ab63-3a2a21fdc854-kolla-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.471095 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/f68fbac2-1677-4664-a344-77a41044ea2a-config-data-generated\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.471106 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8738e339-fd1b-4122-899c-7b9521688aba-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.471116 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8kfcr\" (UniqueName: \"kubernetes.io/projected/8738e339-fd1b-4122-899c-7b9521688aba-kube-api-access-8kfcr\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.471129 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2xmfb\" (UniqueName: \"kubernetes.io/projected/7935bdc8-1434-418a-a3ad-b165ee8be23e-kube-api-access-2xmfb\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.471141 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8738e339-fd1b-4122-899c-7b9521688aba-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.471152 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fzns4\" (UniqueName: \"kubernetes.io/projected/feedfa19-f182-4f7f-8b50-3d16c11e3510-kube-api-access-fzns4\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.471178 4558 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/7935bdc8-1434-418a-a3ad-b165ee8be23e-credential-keys\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.471628 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5f087f7c-913a-4ab8-b905-5caa84469c77-config" (OuterVolumeSpecName: "config") pod "5f087f7c-913a-4ab8-b905-5caa84469c77" (UID: "5f087f7c-913a-4ab8-b905-5caa84469c77"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.472236 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5f087f7c-913a-4ab8-b905-5caa84469c77-ovsdb-rundir" (OuterVolumeSpecName: "ovsdb-rundir") pod "5f087f7c-913a-4ab8-b905-5caa84469c77" (UID: "5f087f7c-913a-4ab8-b905-5caa84469c77"). InnerVolumeSpecName "ovsdb-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.472283 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25fe26b1-d308-452c-b15f-b1b272de6869-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "25fe26b1-d308-452c-b15f-b1b272de6869" (UID: "25fe26b1-d308-452c-b15f-b1b272de6869"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.473626 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a5db7bd5-e0c6-4645-ab63-3a2a21fdc854-config-data" (OuterVolumeSpecName: "config-data") pod "a5db7bd5-e0c6-4645-ab63-3a2a21fdc854" (UID: "a5db7bd5-e0c6-4645-ab63-3a2a21fdc854"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.474110 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f68fbac2-1677-4664-a344-77a41044ea2a-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "f68fbac2-1677-4664-a344-77a41044ea2a" (UID: "f68fbac2-1677-4664-a344-77a41044ea2a"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.474568 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/25fe26b1-d308-452c-b15f-b1b272de6869-config-data-generated" (OuterVolumeSpecName: "config-data-generated") pod "25fe26b1-d308-452c-b15f-b1b272de6869" (UID: "25fe26b1-d308-452c-b15f-b1b272de6869"). InnerVolumeSpecName "config-data-generated". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.479384 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0a246013-4fe7-4d92-a517-414b1c235db0-logs" (OuterVolumeSpecName: "logs") pod "0a246013-4fe7-4d92-a517-414b1c235db0" (UID: "0a246013-4fe7-4d92-a517-414b1c235db0"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.490391 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f68fbac2-1677-4664-a344-77a41044ea2a-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "f68fbac2-1677-4664-a344-77a41044ea2a" (UID: "f68fbac2-1677-4664-a344-77a41044ea2a"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.500844 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7935bdc8-1434-418a-a3ad-b165ee8be23e-scripts" (OuterVolumeSpecName: "scripts") pod "7935bdc8-1434-418a-a3ad-b165ee8be23e" (UID: "7935bdc8-1434-418a-a3ad-b165ee8be23e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.519550 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a8df2ac8-5e44-4c2d-a1fb-11e7cdeda894-kube-api-access-jxvrv" (OuterVolumeSpecName: "kube-api-access-jxvrv") pod "a8df2ac8-5e44-4c2d-a1fb-11e7cdeda894" (UID: "a8df2ac8-5e44-4c2d-a1fb-11e7cdeda894"). InnerVolumeSpecName "kube-api-access-jxvrv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.520486 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/25cfe04f-2194-405f-a9d7-82181e8ac22a-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "25cfe04f-2194-405f-a9d7-82181e8ac22a" (UID: "25cfe04f-2194-405f-a9d7-82181e8ac22a"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.520980 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f68fbac2-1677-4664-a344-77a41044ea2a-config-data-default" (OuterVolumeSpecName: "config-data-default") pod "f68fbac2-1677-4664-a344-77a41044ea2a" (UID: "f68fbac2-1677-4664-a344-77a41044ea2a"). InnerVolumeSpecName "config-data-default". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.528346 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage02-crc" (OuterVolumeSpecName: "ovndbcluster-nb-etc-ovn") pod "5f087f7c-913a-4ab8-b905-5caa84469c77" (UID: "5f087f7c-913a-4ab8-b905-5caa84469c77"). InnerVolumeSpecName "local-storage02-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.528743 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0394e10b-3811-4eef-a9ef-a785e1574649-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0394e10b-3811-4eef-a9ef-a785e1574649" (UID: "0394e10b-3811-4eef-a9ef-a785e1574649"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.529495 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5f087f7c-913a-4ab8-b905-5caa84469c77-scripts" (OuterVolumeSpecName: "scripts") pod "5f087f7c-913a-4ab8-b905-5caa84469c77" (UID: "5f087f7c-913a-4ab8-b905-5caa84469c77"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.530198 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25fe26b1-d308-452c-b15f-b1b272de6869-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "25fe26b1-d308-452c-b15f-b1b272de6869" (UID: "25fe26b1-d308-452c-b15f-b1b272de6869"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.534283 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a5db7bd5-e0c6-4645-ab63-3a2a21fdc854-kube-api-access-tqthz" (OuterVolumeSpecName: "kube-api-access-tqthz") pod "a5db7bd5-e0c6-4645-ab63-3a2a21fdc854" (UID: "a5db7bd5-e0c6-4645-ab63-3a2a21fdc854"). InnerVolumeSpecName "kube-api-access-tqthz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.535772 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25fe26b1-d308-452c-b15f-b1b272de6869-kube-api-access-sttlg" (OuterVolumeSpecName: "kube-api-access-sttlg") pod "25fe26b1-d308-452c-b15f-b1b272de6869" (UID: "25fe26b1-d308-452c-b15f-b1b272de6869"). InnerVolumeSpecName "kube-api-access-sttlg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.535885 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25cfe04f-2194-405f-a9d7-82181e8ac22a-kube-api-access-vbvr7" (OuterVolumeSpecName: "kube-api-access-vbvr7") pod "25cfe04f-2194-405f-a9d7-82181e8ac22a" (UID: "25cfe04f-2194-405f-a9d7-82181e8ac22a"). InnerVolumeSpecName "kube-api-access-vbvr7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.535961 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7935bdc8-1434-418a-a3ad-b165ee8be23e-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "7935bdc8-1434-418a-a3ad-b165ee8be23e" (UID: "7935bdc8-1434-418a-a3ad-b165ee8be23e"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.541263 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25cfe04f-2194-405f-a9d7-82181e8ac22a-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "25cfe04f-2194-405f-a9d7-82181e8ac22a" (UID: "25cfe04f-2194-405f-a9d7-82181e8ac22a"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.545235 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5f087f7c-913a-4ab8-b905-5caa84469c77-kube-api-access-x6tnw" (OuterVolumeSpecName: "kube-api-access-x6tnw") pod "5f087f7c-913a-4ab8-b905-5caa84469c77" (UID: "5f087f7c-913a-4ab8-b905-5caa84469c77"). InnerVolumeSpecName "kube-api-access-x6tnw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.545463 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ea5dfbd0-37c4-46a1-afad-77e85e35d7fb-kube-api-access-r78kw" (OuterVolumeSpecName: "kube-api-access-r78kw") pod "ea5dfbd0-37c4-46a1-afad-77e85e35d7fb" (UID: "ea5dfbd0-37c4-46a1-afad-77e85e35d7fb"). InnerVolumeSpecName "kube-api-access-r78kw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.545520 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f68fbac2-1677-4664-a344-77a41044ea2a-kube-api-access-b2xql" (OuterVolumeSpecName: "kube-api-access-b2xql") pod "f68fbac2-1677-4664-a344-77a41044ea2a" (UID: "f68fbac2-1677-4664-a344-77a41044ea2a"). InnerVolumeSpecName "kube-api-access-b2xql". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.560876 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fd5b8a4a-4f31-4b17-95a3-669d1d7661c4-kube-api-access-plbhs" (OuterVolumeSpecName: "kube-api-access-plbhs") pod "fd5b8a4a-4f31-4b17-95a3-669d1d7661c4" (UID: "fd5b8a4a-4f31-4b17-95a3-669d1d7661c4"). InnerVolumeSpecName "kube-api-access-plbhs". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.562478 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0a246013-4fe7-4d92-a517-414b1c235db0-kube-api-access-kwb8p" (OuterVolumeSpecName: "kube-api-access-kwb8p") pod "0a246013-4fe7-4d92-a517-414b1c235db0" (UID: "0a246013-4fe7-4d92-a517-414b1c235db0"). InnerVolumeSpecName "kube-api-access-kwb8p". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.570488 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25cfe04f-2194-405f-a9d7-82181e8ac22a-scripts" (OuterVolumeSpecName: "scripts") pod "25cfe04f-2194-405f-a9d7-82181e8ac22a" (UID: "25cfe04f-2194-405f-a9d7-82181e8ac22a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.572360 4558 scope.go:117] "RemoveContainer" containerID="2d2a8989ca5a5af98b2c9dc8f801ea0675339ec14800f437c058c5a43c20146b" Jan 20 17:43:26 crc kubenswrapper[4558]: E0120 17:43:26.572867 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.593751 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sttlg\" (UniqueName: \"kubernetes.io/projected/25fe26b1-d308-452c-b15f-b1b272de6869-kube-api-access-sttlg\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.593782 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vbvr7\" (UniqueName: \"kubernetes.io/projected/25cfe04f-2194-405f-a9d7-82181e8ac22a-kube-api-access-vbvr7\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.593794 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/25cfe04f-2194-405f-a9d7-82181e8ac22a-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.593804 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5f087f7c-913a-4ab8-b905-5caa84469c77-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.593815 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/25fe26b1-d308-452c-b15f-b1b272de6869-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.593824 4558 reconciler_common.go:293] "Volume detached for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/5f087f7c-913a-4ab8-b905-5caa84469c77-ovsdb-rundir\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.593834 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r78kw\" (UniqueName: \"kubernetes.io/projected/ea5dfbd0-37c4-46a1-afad-77e85e35d7fb-kube-api-access-r78kw\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.593845 4558 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/f68fbac2-1677-4664-a344-77a41044ea2a-kolla-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.593854 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-plbhs\" (UniqueName: \"kubernetes.io/projected/fd5b8a4a-4f31-4b17-95a3-669d1d7661c4-kube-api-access-plbhs\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.593863 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f68fbac2-1677-4664-a344-77a41044ea2a-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.593873 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b2xql\" (UniqueName: \"kubernetes.io/projected/f68fbac2-1677-4664-a344-77a41044ea2a-kube-api-access-b2xql\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.593883 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a5db7bd5-e0c6-4645-ab63-3a2a21fdc854-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.593892 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jxvrv\" (UniqueName: \"kubernetes.io/projected/a8df2ac8-5e44-4c2d-a1fb-11e7cdeda894-kube-api-access-jxvrv\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.593901 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kwb8p\" (UniqueName: \"kubernetes.io/projected/0a246013-4fe7-4d92-a517-414b1c235db0-kube-api-access-kwb8p\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.593921 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7935bdc8-1434-418a-a3ad-b165ee8be23e-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.593930 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/25fe26b1-d308-452c-b15f-b1b272de6869-config-data-generated\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.593940 4558 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/7935bdc8-1434-418a-a3ad-b165ee8be23e-fernet-keys\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.593949 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0a246013-4fe7-4d92-a517-414b1c235db0-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.593959 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0394e10b-3811-4eef-a9ef-a785e1574649-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.593969 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/f68fbac2-1677-4664-a344-77a41044ea2a-config-data-default\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.593978 4558 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/25cfe04f-2194-405f-a9d7-82181e8ac22a-etc-machine-id\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.593985 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/25cfe04f-2194-405f-a9d7-82181e8ac22a-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.594008 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" " Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.594017 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tqthz\" (UniqueName: \"kubernetes.io/projected/a5db7bd5-e0c6-4645-ab63-3a2a21fdc854-kube-api-access-tqthz\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.594026 4558 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/25fe26b1-d308-452c-b15f-b1b272de6869-kolla-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.594034 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5f087f7c-913a-4ab8-b905-5caa84469c77-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.596843 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x6tnw\" (UniqueName: \"kubernetes.io/projected/5f087f7c-913a-4ab8-b905-5caa84469c77-kube-api-access-x6tnw\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.628497 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage15-crc" (OuterVolumeSpecName: "mysql-db") pod "f68fbac2-1677-4664-a344-77a41044ea2a" (UID: "f68fbac2-1677-4664-a344-77a41044ea2a"). InnerVolumeSpecName "local-storage15-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.632347 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage14-crc" (OuterVolumeSpecName: "mysql-db") pod "25fe26b1-d308-452c-b15f-b1b272de6869" (UID: "25fe26b1-d308-452c-b15f-b1b272de6869"). InnerVolumeSpecName "local-storage14-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.687078 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-685599669b-nv6cc"] Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.702442 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage15-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") on node \"crc\" " Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.702479 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage14-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage14-crc\") on node \"crc\" " Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.803302 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8738e339-fd1b-4122-899c-7b9521688aba-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8738e339-fd1b-4122-899c-7b9521688aba" (UID: "8738e339-fd1b-4122-899c-7b9521688aba"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.805711 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8738e339-fd1b-4122-899c-7b9521688aba-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.850497 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7935bdc8-1434-418a-a3ad-b165ee8be23e-config-data" (OuterVolumeSpecName: "config-data") pod "7935bdc8-1434-418a-a3ad-b165ee8be23e" (UID: "7935bdc8-1434-418a-a3ad-b165ee8be23e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:26 crc kubenswrapper[4558]: I0120 17:43:26.922463 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7935bdc8-1434-418a-a3ad-b165ee8be23e-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.018576 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/cinder-api-0" podUID="70cf1941-8f7a-4328-9338-e5d7739173ac" containerName="cinder-api" probeResult="failure" output="Get \"https://10.217.0.223:8776/healthcheck\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.021449 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.035227 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.039215 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:43:27 crc kubenswrapper[4558]: E0120 17:43:27.039732 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="25cfe04f-2194-405f-a9d7-82181e8ac22a" containerName="cinder-scheduler" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.039750 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="25cfe04f-2194-405f-a9d7-82181e8ac22a" containerName="cinder-scheduler" Jan 20 17:43:27 crc kubenswrapper[4558]: E0120 17:43:27.039763 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0394e10b-3811-4eef-a9ef-a785e1574649" containerName="ovsdbserver-sb" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.039770 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="0394e10b-3811-4eef-a9ef-a785e1574649" containerName="ovsdbserver-sb" Jan 20 17:43:27 crc kubenswrapper[4558]: E0120 17:43:27.039781 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="feedfa19-f182-4f7f-8b50-3d16c11e3510" containerName="placement-api" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.039788 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="feedfa19-f182-4f7f-8b50-3d16c11e3510" containerName="placement-api" Jan 20 17:43:27 crc kubenswrapper[4558]: E0120 17:43:27.039798 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f68fbac2-1677-4664-a344-77a41044ea2a" containerName="galera" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.039804 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="f68fbac2-1677-4664-a344-77a41044ea2a" containerName="galera" Jan 20 17:43:27 crc kubenswrapper[4558]: E0120 17:43:27.039812 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a8df2ac8-5e44-4c2d-a1fb-11e7cdeda894" containerName="nova-cell1-conductor-conductor" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.039818 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="a8df2ac8-5e44-4c2d-a1fb-11e7cdeda894" containerName="nova-cell1-conductor-conductor" Jan 20 17:43:27 crc kubenswrapper[4558]: E0120 17:43:27.039827 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ea5dfbd0-37c4-46a1-afad-77e85e35d7fb" containerName="nova-scheduler-scheduler" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.039833 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ea5dfbd0-37c4-46a1-afad-77e85e35d7fb" containerName="nova-scheduler-scheduler" Jan 20 17:43:27 crc kubenswrapper[4558]: E0120 17:43:27.039842 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f68fbac2-1677-4664-a344-77a41044ea2a" containerName="mysql-bootstrap" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.039848 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="f68fbac2-1677-4664-a344-77a41044ea2a" containerName="mysql-bootstrap" Jan 20 17:43:27 crc kubenswrapper[4558]: E0120 17:43:27.039855 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f087f7c-913a-4ab8-b905-5caa84469c77" containerName="openstack-network-exporter" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.039862 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f087f7c-913a-4ab8-b905-5caa84469c77" containerName="openstack-network-exporter" Jan 20 17:43:27 crc kubenswrapper[4558]: E0120 17:43:27.039873 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="25cfe04f-2194-405f-a9d7-82181e8ac22a" containerName="cinder-scheduler" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.039880 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="25cfe04f-2194-405f-a9d7-82181e8ac22a" containerName="cinder-scheduler" Jan 20 17:43:27 crc kubenswrapper[4558]: E0120 17:43:27.039892 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0a246013-4fe7-4d92-a517-414b1c235db0" containerName="nova-api-log" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.039898 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="0a246013-4fe7-4d92-a517-414b1c235db0" containerName="nova-api-log" Jan 20 17:43:27 crc kubenswrapper[4558]: E0120 17:43:27.039915 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="25cfe04f-2194-405f-a9d7-82181e8ac22a" containerName="probe" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.039922 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="25cfe04f-2194-405f-a9d7-82181e8ac22a" containerName="probe" Jan 20 17:43:27 crc kubenswrapper[4558]: E0120 17:43:27.039940 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a5db7bd5-e0c6-4645-ab63-3a2a21fdc854" containerName="memcached" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.039947 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="a5db7bd5-e0c6-4645-ab63-3a2a21fdc854" containerName="memcached" Jan 20 17:43:27 crc kubenswrapper[4558]: E0120 17:43:27.039957 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0394e10b-3811-4eef-a9ef-a785e1574649" containerName="openstack-network-exporter" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.039964 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="0394e10b-3811-4eef-a9ef-a785e1574649" containerName="openstack-network-exporter" Jan 20 17:43:27 crc kubenswrapper[4558]: E0120 17:43:27.039980 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f087f7c-913a-4ab8-b905-5caa84469c77" containerName="ovsdbserver-nb" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.039986 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f087f7c-913a-4ab8-b905-5caa84469c77" containerName="ovsdbserver-nb" Jan 20 17:43:27 crc kubenswrapper[4558]: E0120 17:43:27.039996 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="25fe26b1-d308-452c-b15f-b1b272de6869" containerName="mysql-bootstrap" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.040002 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="25fe26b1-d308-452c-b15f-b1b272de6869" containerName="mysql-bootstrap" Jan 20 17:43:27 crc kubenswrapper[4558]: E0120 17:43:27.040014 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="feedfa19-f182-4f7f-8b50-3d16c11e3510" containerName="placement-log" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.040021 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="feedfa19-f182-4f7f-8b50-3d16c11e3510" containerName="placement-log" Jan 20 17:43:27 crc kubenswrapper[4558]: E0120 17:43:27.040027 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="25fe26b1-d308-452c-b15f-b1b272de6869" containerName="galera" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.040032 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="25fe26b1-d308-452c-b15f-b1b272de6869" containerName="galera" Jan 20 17:43:27 crc kubenswrapper[4558]: E0120 17:43:27.040042 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fd5b8a4a-4f31-4b17-95a3-669d1d7661c4" containerName="nova-cell1-novncproxy-novncproxy" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.040048 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="fd5b8a4a-4f31-4b17-95a3-669d1d7661c4" containerName="nova-cell1-novncproxy-novncproxy" Jan 20 17:43:27 crc kubenswrapper[4558]: E0120 17:43:27.040059 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0a246013-4fe7-4d92-a517-414b1c235db0" containerName="nova-api-api" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.040064 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="0a246013-4fe7-4d92-a517-414b1c235db0" containerName="nova-api-api" Jan 20 17:43:27 crc kubenswrapper[4558]: E0120 17:43:27.040073 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8738e339-fd1b-4122-899c-7b9521688aba" containerName="barbican-api-log" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.040079 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="8738e339-fd1b-4122-899c-7b9521688aba" containerName="barbican-api-log" Jan 20 17:43:27 crc kubenswrapper[4558]: E0120 17:43:27.040089 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8738e339-fd1b-4122-899c-7b9521688aba" containerName="barbican-api" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.040095 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="8738e339-fd1b-4122-899c-7b9521688aba" containerName="barbican-api" Jan 20 17:43:27 crc kubenswrapper[4558]: E0120 17:43:27.040105 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7935bdc8-1434-418a-a3ad-b165ee8be23e" containerName="keystone-api" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.040110 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="7935bdc8-1434-418a-a3ad-b165ee8be23e" containerName="keystone-api" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.040359 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="25fe26b1-d308-452c-b15f-b1b272de6869" containerName="galera" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.040373 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="8738e339-fd1b-4122-899c-7b9521688aba" containerName="barbican-api-log" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.040387 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="0a246013-4fe7-4d92-a517-414b1c235db0" containerName="nova-api-log" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.040396 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="a8df2ac8-5e44-4c2d-a1fb-11e7cdeda894" containerName="nova-cell1-conductor-conductor" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.040405 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="fd5b8a4a-4f31-4b17-95a3-669d1d7661c4" containerName="nova-cell1-novncproxy-novncproxy" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.040429 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="ea5dfbd0-37c4-46a1-afad-77e85e35d7fb" containerName="nova-scheduler-scheduler" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.040438 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="5f087f7c-913a-4ab8-b905-5caa84469c77" containerName="openstack-network-exporter" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.040448 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="0394e10b-3811-4eef-a9ef-a785e1574649" containerName="openstack-network-exporter" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.040459 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="a5db7bd5-e0c6-4645-ab63-3a2a21fdc854" containerName="memcached" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.040465 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="feedfa19-f182-4f7f-8b50-3d16c11e3510" containerName="placement-log" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.040475 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="25cfe04f-2194-405f-a9d7-82181e8ac22a" containerName="cinder-scheduler" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.040484 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="feedfa19-f182-4f7f-8b50-3d16c11e3510" containerName="placement-api" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.040494 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="25cfe04f-2194-405f-a9d7-82181e8ac22a" containerName="cinder-scheduler" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.040501 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="f68fbac2-1677-4664-a344-77a41044ea2a" containerName="galera" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.040509 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="0394e10b-3811-4eef-a9ef-a785e1574649" containerName="ovsdbserver-sb" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.040521 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="8738e339-fd1b-4122-899c-7b9521688aba" containerName="barbican-api" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.040530 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="25cfe04f-2194-405f-a9d7-82181e8ac22a" containerName="probe" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.040542 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="5f087f7c-913a-4ab8-b905-5caa84469c77" containerName="ovsdbserver-nb" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.040548 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="0a246013-4fe7-4d92-a517-414b1c235db0" containerName="nova-api-api" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.040558 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="7935bdc8-1434-418a-a3ad-b165ee8be23e" containerName="keystone-api" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.041688 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.054600 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-cinder-internal-svc" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.054757 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-cinder-public-svc" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.055391 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cinder-api-config-data" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.056611 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.072050 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fd5b8a4a-4f31-4b17-95a3-669d1d7661c4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fd5b8a4a-4f31-4b17-95a3-669d1d7661c4" (UID: "fd5b8a4a-4f31-4b17-95a3-669d1d7661c4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.086282 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/nova-metadata-0" podUID="a18f9da2-9c64-4047-ad02-206ac9c8aa61" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.1.2:8775/\": read tcp 10.217.0.2:44688->10.217.1.2:8775: read: connection reset by peer" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.086546 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/nova-metadata-0" podUID="a18f9da2-9c64-4047-ad02-206ac9c8aa61" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.1.2:8775/\": read tcp 10.217.0.2:44686->10.217.1.2:8775: read: connection reset by peer" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.104498 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0394e10b-3811-4eef-a9ef-a785e1574649-ovsdbserver-sb-tls-certs" (OuterVolumeSpecName: "ovsdbserver-sb-tls-certs") pod "0394e10b-3811-4eef-a9ef-a785e1574649" (UID: "0394e10b-3811-4eef-a9ef-a785e1574649"). InnerVolumeSpecName "ovsdbserver-sb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.130106 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/56d39f72-48d3-4690-a20a-099cb41daa7e-config-data\") pod \"cinder-api-0\" (UID: \"56d39f72-48d3-4690-a20a-099cb41daa7e\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.130156 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/56d39f72-48d3-4690-a20a-099cb41daa7e-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"56d39f72-48d3-4690-a20a-099cb41daa7e\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.130194 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/56d39f72-48d3-4690-a20a-099cb41daa7e-scripts\") pod \"cinder-api-0\" (UID: \"56d39f72-48d3-4690-a20a-099cb41daa7e\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.130220 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/56d39f72-48d3-4690-a20a-099cb41daa7e-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"56d39f72-48d3-4690-a20a-099cb41daa7e\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.130249 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/56d39f72-48d3-4690-a20a-099cb41daa7e-config-data-custom\") pod \"cinder-api-0\" (UID: \"56d39f72-48d3-4690-a20a-099cb41daa7e\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.130279 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/56d39f72-48d3-4690-a20a-099cb41daa7e-logs\") pod \"cinder-api-0\" (UID: \"56d39f72-48d3-4690-a20a-099cb41daa7e\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.130304 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/56d39f72-48d3-4690-a20a-099cb41daa7e-etc-machine-id\") pod \"cinder-api-0\" (UID: \"56d39f72-48d3-4690-a20a-099cb41daa7e\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.130346 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/56d39f72-48d3-4690-a20a-099cb41daa7e-public-tls-certs\") pod \"cinder-api-0\" (UID: \"56d39f72-48d3-4690-a20a-099cb41daa7e\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.130452 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ntbmp\" (UniqueName: \"kubernetes.io/projected/56d39f72-48d3-4690-a20a-099cb41daa7e-kube-api-access-ntbmp\") pod \"cinder-api-0\" (UID: \"56d39f72-48d3-4690-a20a-099cb41daa7e\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.130732 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd5b8a4a-4f31-4b17-95a3-669d1d7661c4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.130751 4558 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/0394e10b-3811-4eef-a9ef-a785e1574649-ovsdbserver-sb-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.134591 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage03-crc" (UniqueName: "kubernetes.io/local-volume/local-storage03-crc") on node "crc" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.157285 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8738e339-fd1b-4122-899c-7b9521688aba-config-data" (OuterVolumeSpecName: "config-data") pod "8738e339-fd1b-4122-899c-7b9521688aba" (UID: "8738e339-fd1b-4122-899c-7b9521688aba"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.171091 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0a246013-4fe7-4d92-a517-414b1c235db0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0a246013-4fe7-4d92-a517-414b1c235db0" (UID: "0a246013-4fe7-4d92-a517-414b1c235db0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.179697 4558 generic.go:334] "Generic (PLEG): container finished" podID="c62cee5b-4dd8-4a07-995c-e1d0530d695b" containerID="9cd39e12244c6e213ae6f15945a045746c5236b6784d7e40141838e71fa71966" exitCode=0 Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.179764 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"c62cee5b-4dd8-4a07-995c-e1d0530d695b","Type":"ContainerDied","Data":"9cd39e12244c6e213ae6f15945a045746c5236b6784d7e40141838e71fa71966"} Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.183050 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-keystone-listener-57496b565d-kwb5f" event={"ID":"fd5cfc55-8cd9-4a76-87b8-4050b8f1030f","Type":"ContainerStarted","Data":"3ffb7d59fade4765056a811588a9c3857b85a1ef4b82cfda814dd74069ed2bf9"} Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.188880 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-worker-7975b7ff57-c5czg" event={"ID":"1a1aeb0f-ba20-4c24-a8d3-f3f2765328c2","Type":"ContainerStarted","Data":"ec57a9b625bd2a7169cdebad1646b567fad8da5b766be344abba4af1c17ab765"} Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.200271 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/feedfa19-f182-4f7f-8b50-3d16c11e3510-config-data" (OuterVolumeSpecName: "config-data") pod "feedfa19-f182-4f7f-8b50-3d16c11e3510" (UID: "feedfa19-f182-4f7f-8b50-3d16c11e3510"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.201411 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-66bfc8789d-nsc64" event={"ID":"596f2da7-db14-44e3-8c93-5809b27e0cc6","Type":"ContainerStarted","Data":"61ba0b2fbb9343023136e7477f26eb137e81910dc996a6b2e8469ad51cae7b89"} Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.214999 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/barbican-worker-7975b7ff57-c5czg" podStartSLOduration=5.214988298 podStartE2EDuration="5.214988298s" podCreationTimestamp="2026-01-20 17:43:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:43:27.208025737 +0000 UTC m=+3700.968363704" watchObservedRunningTime="2026-01-20 17:43:27.214988298 +0000 UTC m=+3700.975326266" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.221394 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"d791ceee-e87d-4238-9267-c2d2c53faf96","Type":"ContainerStarted","Data":"5b7ae61c3b22dd588e6b8c80eec1302b76e65100b06918983ae9e73032d1d9d3"} Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.221703 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="d791ceee-e87d-4238-9267-c2d2c53faf96" containerName="ceilometer-central-agent" containerID="cri-o://dceb063becafd3ab16d08d70734f823d57aed693a7d29459757378db7e430997" gracePeriod=30 Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.221978 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.222242 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="d791ceee-e87d-4238-9267-c2d2c53faf96" containerName="sg-core" containerID="cri-o://335e25f3fbe4b65eb8180e118c21059bde36ef7ba7c2a18579f83e6691932e37" gracePeriod=30 Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.222250 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="d791ceee-e87d-4238-9267-c2d2c53faf96" containerName="proxy-httpd" containerID="cri-o://5b7ae61c3b22dd588e6b8c80eec1302b76e65100b06918983ae9e73032d1d9d3" gracePeriod=30 Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.222311 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="d791ceee-e87d-4238-9267-c2d2c53faf96" containerName="ceilometer-notification-agent" containerID="cri-o://b225f9e53aaf9806843cb0212349c3e9e4362df912776534664c2dac9cdadb6b" gracePeriod=30 Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.246832 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/56d39f72-48d3-4690-a20a-099cb41daa7e-config-data\") pod \"cinder-api-0\" (UID: \"56d39f72-48d3-4690-a20a-099cb41daa7e\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.246886 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/56d39f72-48d3-4690-a20a-099cb41daa7e-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"56d39f72-48d3-4690-a20a-099cb41daa7e\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.246911 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/56d39f72-48d3-4690-a20a-099cb41daa7e-scripts\") pod \"cinder-api-0\" (UID: \"56d39f72-48d3-4690-a20a-099cb41daa7e\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.246936 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/56d39f72-48d3-4690-a20a-099cb41daa7e-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"56d39f72-48d3-4690-a20a-099cb41daa7e\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.246965 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/56d39f72-48d3-4690-a20a-099cb41daa7e-config-data-custom\") pod \"cinder-api-0\" (UID: \"56d39f72-48d3-4690-a20a-099cb41daa7e\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.247002 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/56d39f72-48d3-4690-a20a-099cb41daa7e-logs\") pod \"cinder-api-0\" (UID: \"56d39f72-48d3-4690-a20a-099cb41daa7e\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.247024 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/56d39f72-48d3-4690-a20a-099cb41daa7e-etc-machine-id\") pod \"cinder-api-0\" (UID: \"56d39f72-48d3-4690-a20a-099cb41daa7e\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.247070 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/56d39f72-48d3-4690-a20a-099cb41daa7e-public-tls-certs\") pod \"cinder-api-0\" (UID: \"56d39f72-48d3-4690-a20a-099cb41daa7e\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.247114 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ntbmp\" (UniqueName: \"kubernetes.io/projected/56d39f72-48d3-4690-a20a-099cb41daa7e-kube-api-access-ntbmp\") pod \"cinder-api-0\" (UID: \"56d39f72-48d3-4690-a20a-099cb41daa7e\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.247349 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/feedfa19-f182-4f7f-8b50-3d16c11e3510-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.247363 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0a246013-4fe7-4d92-a517-414b1c235db0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.247378 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.247388 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8738e339-fd1b-4122-899c-7b9521688aba-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.248234 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/56d39f72-48d3-4690-a20a-099cb41daa7e-etc-machine-id\") pod \"cinder-api-0\" (UID: \"56d39f72-48d3-4690-a20a-099cb41daa7e\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.251349 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/56d39f72-48d3-4690-a20a-099cb41daa7e-logs\") pod \"cinder-api-0\" (UID: \"56d39f72-48d3-4690-a20a-099cb41daa7e\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.251749 4558 generic.go:334] "Generic (PLEG): container finished" podID="a18f9da2-9c64-4047-ad02-206ac9c8aa61" containerID="b93052a48246c97ba463ca4f80d69e9a79fa1ad0762a5e959855ffc2d454361b" exitCode=0 Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.251826 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"a18f9da2-9c64-4047-ad02-206ac9c8aa61","Type":"ContainerDied","Data":"b93052a48246c97ba463ca4f80d69e9a79fa1ad0762a5e959855ffc2d454361b"} Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.273451 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ntbmp\" (UniqueName: \"kubernetes.io/projected/56d39f72-48d3-4690-a20a-099cb41daa7e-kube-api-access-ntbmp\") pod \"cinder-api-0\" (UID: \"56d39f72-48d3-4690-a20a-099cb41daa7e\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.276104 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-worker-6667c7bd65-7bw4v"] Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.276239 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ceilometer-0" podStartSLOduration=4.921893014 podStartE2EDuration="16.276218485s" podCreationTimestamp="2026-01-20 17:43:11 +0000 UTC" firstStartedPulling="2026-01-20 17:43:12.450538229 +0000 UTC m=+3686.210876185" lastFinishedPulling="2026-01-20 17:43:23.804863688 +0000 UTC m=+3697.565201656" observedRunningTime="2026-01-20 17:43:27.262037011 +0000 UTC m=+3701.022374978" watchObservedRunningTime="2026-01-20 17:43:27.276218485 +0000 UTC m=+3701.036556452" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.276352 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-worker-6667c7bd65-7bw4v" podUID="ba27bcbb-36f0-4575-afba-d6997d667fef" containerName="barbican-worker-log" containerID="cri-o://ce43e3bb6306e86eacbf56d2bb8a010f8f3998ab291973f5ad5e72276c882cdf" gracePeriod=30 Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.276662 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/56d39f72-48d3-4690-a20a-099cb41daa7e-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"56d39f72-48d3-4690-a20a-099cb41daa7e\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.276657 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-worker-6667c7bd65-7bw4v" podUID="ba27bcbb-36f0-4575-afba-d6997d667fef" containerName="barbican-worker" containerID="cri-o://7a8e9aa388298d08f4da43fd2c878f6be898c08220911c53d8f06152ad76440d" gracePeriod=30 Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.277310 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-976c48458-wzswq" event={"ID":"b6ba5f2a-6486-4a84-bf85-23c00e907701","Type":"ContainerStarted","Data":"40f50103f3b6fefc0e60b4ba58c1c864622943da91e82cbad5dd4a5d1cd1a4ce"} Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.285148 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-685599669b-nv6cc" event={"ID":"33f7229d-bcae-434a-805e-530801e79b20","Type":"ContainerStarted","Data":"635027f6932f791a8f9f3dd8d9ed7cf74692fd163bf3a523455b64501df6fdc7"} Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.285308 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/56d39f72-48d3-4690-a20a-099cb41daa7e-public-tls-certs\") pod \"cinder-api-0\" (UID: \"56d39f72-48d3-4690-a20a-099cb41daa7e\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.291963 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/56d39f72-48d3-4690-a20a-099cb41daa7e-scripts\") pod \"cinder-api-0\" (UID: \"56d39f72-48d3-4690-a20a-099cb41daa7e\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.292479 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/56d39f72-48d3-4690-a20a-099cb41daa7e-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"56d39f72-48d3-4690-a20a-099cb41daa7e\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.296501 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25fe26b1-d308-452c-b15f-b1b272de6869-galera-tls-certs" (OuterVolumeSpecName: "galera-tls-certs") pod "25fe26b1-d308-452c-b15f-b1b272de6869" (UID: "25fe26b1-d308-452c-b15f-b1b272de6869"). InnerVolumeSpecName "galera-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.297400 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/56d39f72-48d3-4690-a20a-099cb41daa7e-config-data\") pod \"cinder-api-0\" (UID: \"56d39f72-48d3-4690-a20a-099cb41daa7e\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.306517 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/56d39f72-48d3-4690-a20a-099cb41daa7e-config-data-custom\") pod \"cinder-api-0\" (UID: \"56d39f72-48d3-4690-a20a-099cb41daa7e\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.306758 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.343271 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.344265 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-7bc45756fb-278vp" event={"ID":"432ec648-18a7-416a-85cc-409a30976a67","Type":"ContainerStarted","Data":"37abef38ac07dd4f0829b9b454166615d0d813a32d985cccdcab65f0ba9f6534"} Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.344332 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.345287 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-6bb454b456-wjhln" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.345778 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.346036 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.346265 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.347247 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.348208 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.351742 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-5796d4c4d8-2xz7t" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.352137 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/memcached-0" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.352351 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-7b8cd49c48-k928p" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.363244 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.368287 4558 reconciler_common.go:293] "Volume detached for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/25fe26b1-d308-452c-b15f-b1b272de6869-galera-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.391539 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7935bdc8-1434-418a-a3ad-b165ee8be23e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7935bdc8-1434-418a-a3ad-b165ee8be23e" (UID: "7935bdc8-1434-418a-a3ad-b165ee8be23e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.398843 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0a246013-4fe7-4d92-a517-414b1c235db0-config-data" (OuterVolumeSpecName: "config-data") pod "0a246013-4fe7-4d92-a517-414b1c235db0" (UID: "0a246013-4fe7-4d92-a517-414b1c235db0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.401796 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ea5dfbd0-37c4-46a1-afad-77e85e35d7fb-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ea5dfbd0-37c4-46a1-afad-77e85e35d7fb" (UID: "ea5dfbd0-37c4-46a1-afad-77e85e35d7fb"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.410392 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a8df2ac8-5e44-4c2d-a1fb-11e7cdeda894-config-data" (OuterVolumeSpecName: "config-data") pod "a8df2ac8-5e44-4c2d-a1fb-11e7cdeda894" (UID: "a8df2ac8-5e44-4c2d-a1fb-11e7cdeda894"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.422387 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a8df2ac8-5e44-4c2d-a1fb-11e7cdeda894-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a8df2ac8-5e44-4c2d-a1fb-11e7cdeda894" (UID: "a8df2ac8-5e44-4c2d-a1fb-11e7cdeda894"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.444306 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5f087f7c-913a-4ab8-b905-5caa84469c77-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5f087f7c-913a-4ab8-b905-5caa84469c77" (UID: "5f087f7c-913a-4ab8-b905-5caa84469c77"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.444429 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f68fbac2-1677-4664-a344-77a41044ea2a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f68fbac2-1677-4664-a344-77a41044ea2a" (UID: "f68fbac2-1677-4664-a344-77a41044ea2a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.470355 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f087f7c-913a-4ab8-b905-5caa84469c77-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.470383 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7935bdc8-1434-418a-a3ad-b165ee8be23e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.470394 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f68fbac2-1677-4664-a344-77a41044ea2a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.470404 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a8df2ac8-5e44-4c2d-a1fb-11e7cdeda894-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.470415 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0a246013-4fe7-4d92-a517-414b1c235db0-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.470424 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ea5dfbd0-37c4-46a1-afad-77e85e35d7fb-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.470434 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8df2ac8-5e44-4c2d-a1fb-11e7cdeda894-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.489425 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fd5b8a4a-4f31-4b17-95a3-669d1d7661c4-config-data" (OuterVolumeSpecName: "config-data") pod "fd5b8a4a-4f31-4b17-95a3-669d1d7661c4" (UID: "fd5b8a4a-4f31-4b17-95a3-669d1d7661c4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.490122 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fd5b8a4a-4f31-4b17-95a3-669d1d7661c4-vencrypt-tls-certs" (OuterVolumeSpecName: "vencrypt-tls-certs") pod "fd5b8a4a-4f31-4b17-95a3-669d1d7661c4" (UID: "fd5b8a4a-4f31-4b17-95a3-669d1d7661c4"). InnerVolumeSpecName "vencrypt-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.516766 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0394e10b-3811-4eef-a9ef-a785e1574649-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "0394e10b-3811-4eef-a9ef-a785e1574649" (UID: "0394e10b-3811-4eef-a9ef-a785e1574649"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.521224 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage14-crc" (UniqueName: "kubernetes.io/local-volume/local-storage14-crc") on node "crc" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.522769 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7935bdc8-1434-418a-a3ad-b165ee8be23e-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "7935bdc8-1434-418a-a3ad-b165ee8be23e" (UID: "7935bdc8-1434-418a-a3ad-b165ee8be23e"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.533260 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage02-crc" (UniqueName: "kubernetes.io/local-volume/local-storage02-crc") on node "crc" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.535629 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ea5dfbd0-37c4-46a1-afad-77e85e35d7fb-config-data" (OuterVolumeSpecName: "config-data") pod "ea5dfbd0-37c4-46a1-afad-77e85e35d7fb" (UID: "ea5dfbd0-37c4-46a1-afad-77e85e35d7fb"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.538639 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage15-crc" (UniqueName: "kubernetes.io/local-volume/local-storage15-crc") on node "crc" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.544060 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25fe26b1-d308-452c-b15f-b1b272de6869-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "25fe26b1-d308-452c-b15f-b1b272de6869" (UID: "25fe26b1-d308-452c-b15f-b1b272de6869"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.556305 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a5db7bd5-e0c6-4645-ab63-3a2a21fdc854-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a5db7bd5-e0c6-4645-ab63-3a2a21fdc854" (UID: "a5db7bd5-e0c6-4645-ab63-3a2a21fdc854"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.563016 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25cfe04f-2194-405f-a9d7-82181e8ac22a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "25cfe04f-2194-405f-a9d7-82181e8ac22a" (UID: "25cfe04f-2194-405f-a9d7-82181e8ac22a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.591640 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage15-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.591670 4558 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/0394e10b-3811-4eef-a9ef-a785e1574649-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.591686 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7935bdc8-1434-418a-a3ad-b165ee8be23e-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.591699 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fd5b8a4a-4f31-4b17-95a3-669d1d7661c4-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.591713 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a5db7bd5-e0c6-4645-ab63-3a2a21fdc854-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.591723 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage14-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage14-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.591734 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/25cfe04f-2194-405f-a9d7-82181e8ac22a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.592293 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ea5dfbd0-37c4-46a1-afad-77e85e35d7fb-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.592315 4558 reconciler_common.go:293] "Volume detached for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/fd5b8a4a-4f31-4b17-95a3-669d1d7661c4-vencrypt-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.592327 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.592339 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/25fe26b1-d308-452c-b15f-b1b272de6869-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.594878 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0a246013-4fe7-4d92-a517-414b1c235db0-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "0a246013-4fe7-4d92-a517-414b1c235db0" (UID: "0a246013-4fe7-4d92-a517-414b1c235db0"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.596868 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fd5b8a4a-4f31-4b17-95a3-669d1d7661c4-nova-novncproxy-tls-certs" (OuterVolumeSpecName: "nova-novncproxy-tls-certs") pod "fd5b8a4a-4f31-4b17-95a3-669d1d7661c4" (UID: "fd5b8a4a-4f31-4b17-95a3-669d1d7661c4"). InnerVolumeSpecName "nova-novncproxy-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.610456 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8738e339-fd1b-4122-899c-7b9521688aba-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "8738e339-fd1b-4122-899c-7b9521688aba" (UID: "8738e339-fd1b-4122-899c-7b9521688aba"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.623327 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/feedfa19-f182-4f7f-8b50-3d16c11e3510-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "feedfa19-f182-4f7f-8b50-3d16c11e3510" (UID: "feedfa19-f182-4f7f-8b50-3d16c11e3510"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.624568 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7935bdc8-1434-418a-a3ad-b165ee8be23e-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "7935bdc8-1434-418a-a3ad-b165ee8be23e" (UID: "7935bdc8-1434-418a-a3ad-b165ee8be23e"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.629335 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0a246013-4fe7-4d92-a517-414b1c235db0-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "0a246013-4fe7-4d92-a517-414b1c235db0" (UID: "0a246013-4fe7-4d92-a517-414b1c235db0"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.639309 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f68fbac2-1677-4664-a344-77a41044ea2a-galera-tls-certs" (OuterVolumeSpecName: "galera-tls-certs") pod "f68fbac2-1677-4664-a344-77a41044ea2a" (UID: "f68fbac2-1677-4664-a344-77a41044ea2a"). InnerVolumeSpecName "galera-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.644151 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/feedfa19-f182-4f7f-8b50-3d16c11e3510-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "feedfa19-f182-4f7f-8b50-3d16c11e3510" (UID: "feedfa19-f182-4f7f-8b50-3d16c11e3510"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.654255 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a5db7bd5-e0c6-4645-ab63-3a2a21fdc854-memcached-tls-certs" (OuterVolumeSpecName: "memcached-tls-certs") pod "a5db7bd5-e0c6-4645-ab63-3a2a21fdc854" (UID: "a5db7bd5-e0c6-4645-ab63-3a2a21fdc854"). InnerVolumeSpecName "memcached-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.680600 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/feedfa19-f182-4f7f-8b50-3d16c11e3510-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "feedfa19-f182-4f7f-8b50-3d16c11e3510" (UID: "feedfa19-f182-4f7f-8b50-3d16c11e3510"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.693980 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7935bdc8-1434-418a-a3ad-b165ee8be23e-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.694003 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/feedfa19-f182-4f7f-8b50-3d16c11e3510-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.694017 4558 reconciler_common.go:293] "Volume detached for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/a5db7bd5-e0c6-4645-ab63-3a2a21fdc854-memcached-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.694026 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0a246013-4fe7-4d92-a517-414b1c235db0-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.694035 4558 reconciler_common.go:293] "Volume detached for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/fd5b8a4a-4f31-4b17-95a3-669d1d7661c4-nova-novncproxy-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.694045 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0a246013-4fe7-4d92-a517-414b1c235db0-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.694054 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/feedfa19-f182-4f7f-8b50-3d16c11e3510-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.694064 4558 reconciler_common.go:293] "Volume detached for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/f68fbac2-1677-4664-a344-77a41044ea2a-galera-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.694073 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8738e339-fd1b-4122-899c-7b9521688aba-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.694084 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/feedfa19-f182-4f7f-8b50-3d16c11e3510-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.715422 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5f087f7c-913a-4ab8-b905-5caa84469c77-ovsdbserver-nb-tls-certs" (OuterVolumeSpecName: "ovsdbserver-nb-tls-certs") pod "5f087f7c-913a-4ab8-b905-5caa84469c77" (UID: "5f087f7c-913a-4ab8-b905-5caa84469c77"). InnerVolumeSpecName "ovsdbserver-nb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.734373 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8738e339-fd1b-4122-899c-7b9521688aba-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "8738e339-fd1b-4122-899c-7b9521688aba" (UID: "8738e339-fd1b-4122-899c-7b9521688aba"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.751810 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5f087f7c-913a-4ab8-b905-5caa84469c77-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "5f087f7c-913a-4ab8-b905-5caa84469c77" (UID: "5f087f7c-913a-4ab8-b905-5caa84469c77"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.787371 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25cfe04f-2194-405f-a9d7-82181e8ac22a-config-data" (OuterVolumeSpecName: "config-data") pod "25cfe04f-2194-405f-a9d7-82181e8ac22a" (UID: "25cfe04f-2194-405f-a9d7-82181e8ac22a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.797386 4558 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/5f087f7c-913a-4ab8-b905-5caa84469c77-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.797417 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8738e339-fd1b-4122-899c-7b9521688aba-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.797433 4558 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/5f087f7c-913a-4ab8-b905-5caa84469c77-ovsdbserver-nb-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.797446 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/25cfe04f-2194-405f-a9d7-82181e8ac22a-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.825369 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.825898 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:43:27 crc kubenswrapper[4558]: I0120 17:43:27.855872 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.001478 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rrkw8\" (UniqueName: \"kubernetes.io/projected/5fb29028-042a-4108-a63d-a6cd215a6c31-kube-api-access-rrkw8\") pod \"5fb29028-042a-4108-a63d-a6cd215a6c31\" (UID: \"5fb29028-042a-4108-a63d-a6cd215a6c31\") " Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.001754 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5fb29028-042a-4108-a63d-a6cd215a6c31-config-data\") pod \"5fb29028-042a-4108-a63d-a6cd215a6c31\" (UID: \"5fb29028-042a-4108-a63d-a6cd215a6c31\") " Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.001808 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5fb29028-042a-4108-a63d-a6cd215a6c31-httpd-run\") pod \"5fb29028-042a-4108-a63d-a6cd215a6c31\" (UID: \"5fb29028-042a-4108-a63d-a6cd215a6c31\") " Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.001831 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5fb29028-042a-4108-a63d-a6cd215a6c31-internal-tls-certs\") pod \"5fb29028-042a-4108-a63d-a6cd215a6c31\" (UID: \"5fb29028-042a-4108-a63d-a6cd215a6c31\") " Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.001856 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a18f9da2-9c64-4047-ad02-206ac9c8aa61-logs\") pod \"a18f9da2-9c64-4047-ad02-206ac9c8aa61\" (UID: \"a18f9da2-9c64-4047-ad02-206ac9c8aa61\") " Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.001890 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5fb29028-042a-4108-a63d-a6cd215a6c31-scripts\") pod \"5fb29028-042a-4108-a63d-a6cd215a6c31\" (UID: \"5fb29028-042a-4108-a63d-a6cd215a6c31\") " Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.002092 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5fb29028-042a-4108-a63d-a6cd215a6c31-combined-ca-bundle\") pod \"5fb29028-042a-4108-a63d-a6cd215a6c31\" (UID: \"5fb29028-042a-4108-a63d-a6cd215a6c31\") " Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.002136 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a18f9da2-9c64-4047-ad02-206ac9c8aa61-combined-ca-bundle\") pod \"a18f9da2-9c64-4047-ad02-206ac9c8aa61\" (UID: \"a18f9da2-9c64-4047-ad02-206ac9c8aa61\") " Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.002201 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5fb29028-042a-4108-a63d-a6cd215a6c31-logs\") pod \"5fb29028-042a-4108-a63d-a6cd215a6c31\" (UID: \"5fb29028-042a-4108-a63d-a6cd215a6c31\") " Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.002226 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a18f9da2-9c64-4047-ad02-206ac9c8aa61-config-data\") pod \"a18f9da2-9c64-4047-ad02-206ac9c8aa61\" (UID: \"a18f9da2-9c64-4047-ad02-206ac9c8aa61\") " Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.002250 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"5fb29028-042a-4108-a63d-a6cd215a6c31\" (UID: \"5fb29028-042a-4108-a63d-a6cd215a6c31\") " Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.002335 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z6lv5\" (UniqueName: \"kubernetes.io/projected/a18f9da2-9c64-4047-ad02-206ac9c8aa61-kube-api-access-z6lv5\") pod \"a18f9da2-9c64-4047-ad02-206ac9c8aa61\" (UID: \"a18f9da2-9c64-4047-ad02-206ac9c8aa61\") " Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.002404 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/a18f9da2-9c64-4047-ad02-206ac9c8aa61-nova-metadata-tls-certs\") pod \"a18f9da2-9c64-4047-ad02-206ac9c8aa61\" (UID: \"a18f9da2-9c64-4047-ad02-206ac9c8aa61\") " Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.005655 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5fb29028-042a-4108-a63d-a6cd215a6c31-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "5fb29028-042a-4108-a63d-a6cd215a6c31" (UID: "5fb29028-042a-4108-a63d-a6cd215a6c31"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.007667 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage11-crc" (OuterVolumeSpecName: "glance") pod "5fb29028-042a-4108-a63d-a6cd215a6c31" (UID: "5fb29028-042a-4108-a63d-a6cd215a6c31"). InnerVolumeSpecName "local-storage11-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.010972 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5fb29028-042a-4108-a63d-a6cd215a6c31-logs" (OuterVolumeSpecName: "logs") pod "5fb29028-042a-4108-a63d-a6cd215a6c31" (UID: "5fb29028-042a-4108-a63d-a6cd215a6c31"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.022790 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a18f9da2-9c64-4047-ad02-206ac9c8aa61-logs" (OuterVolumeSpecName: "logs") pod "a18f9da2-9c64-4047-ad02-206ac9c8aa61" (UID: "a18f9da2-9c64-4047-ad02-206ac9c8aa61"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.024724 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fb29028-042a-4108-a63d-a6cd215a6c31-scripts" (OuterVolumeSpecName: "scripts") pod "5fb29028-042a-4108-a63d-a6cd215a6c31" (UID: "5fb29028-042a-4108-a63d-a6cd215a6c31"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.042391 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a18f9da2-9c64-4047-ad02-206ac9c8aa61-kube-api-access-z6lv5" (OuterVolumeSpecName: "kube-api-access-z6lv5") pod "a18f9da2-9c64-4047-ad02-206ac9c8aa61" (UID: "a18f9da2-9c64-4047-ad02-206ac9c8aa61"). InnerVolumeSpecName "kube-api-access-z6lv5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.042616 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fb29028-042a-4108-a63d-a6cd215a6c31-kube-api-access-rrkw8" (OuterVolumeSpecName: "kube-api-access-rrkw8") pod "5fb29028-042a-4108-a63d-a6cd215a6c31" (UID: "5fb29028-042a-4108-a63d-a6cd215a6c31"). InnerVolumeSpecName "kube-api-access-rrkw8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.042677 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a18f9da2-9c64-4047-ad02-206ac9c8aa61-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a18f9da2-9c64-4047-ad02-206ac9c8aa61" (UID: "a18f9da2-9c64-4047-ad02-206ac9c8aa61"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.078444 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fb29028-042a-4108-a63d-a6cd215a6c31-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5fb29028-042a-4108-a63d-a6cd215a6c31" (UID: "5fb29028-042a-4108-a63d-a6cd215a6c31"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.108561 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5fb29028-042a-4108-a63d-a6cd215a6c31-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.108588 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a18f9da2-9c64-4047-ad02-206ac9c8aa61-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.108599 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5fb29028-042a-4108-a63d-a6cd215a6c31-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.108638 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" " Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.108652 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z6lv5\" (UniqueName: \"kubernetes.io/projected/a18f9da2-9c64-4047-ad02-206ac9c8aa61-kube-api-access-z6lv5\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.108664 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rrkw8\" (UniqueName: \"kubernetes.io/projected/5fb29028-042a-4108-a63d-a6cd215a6c31-kube-api-access-rrkw8\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.108674 4558 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5fb29028-042a-4108-a63d-a6cd215a6c31-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.108684 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a18f9da2-9c64-4047-ad02-206ac9c8aa61-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.108693 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5fb29028-042a-4108-a63d-a6cd215a6c31-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.118119 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a18f9da2-9c64-4047-ad02-206ac9c8aa61-config-data" (OuterVolumeSpecName: "config-data") pod "a18f9da2-9c64-4047-ad02-206ac9c8aa61" (UID: "a18f9da2-9c64-4047-ad02-206ac9c8aa61"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.124631 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fb29028-042a-4108-a63d-a6cd215a6c31-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "5fb29028-042a-4108-a63d-a6cd215a6c31" (UID: "5fb29028-042a-4108-a63d-a6cd215a6c31"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.130879 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage11-crc" (UniqueName: "kubernetes.io/local-volume/local-storage11-crc") on node "crc" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.144799 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a18f9da2-9c64-4047-ad02-206ac9c8aa61-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "a18f9da2-9c64-4047-ad02-206ac9c8aa61" (UID: "a18f9da2-9c64-4047-ad02-206ac9c8aa61"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.151902 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fb29028-042a-4108-a63d-a6cd215a6c31-config-data" (OuterVolumeSpecName: "config-data") pod "5fb29028-042a-4108-a63d-a6cd215a6c31" (UID: "5fb29028-042a-4108-a63d-a6cd215a6c31"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.211148 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a18f9da2-9c64-4047-ad02-206ac9c8aa61-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.211333 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.211403 4558 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/a18f9da2-9c64-4047-ad02-206ac9c8aa61-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.211460 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5fb29028-042a-4108-a63d-a6cd215a6c31-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.211513 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5fb29028-042a-4108-a63d-a6cd215a6c31-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.247131 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.263930 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.285212 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.299145 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-6bb454b456-wjhln"] Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.361569 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/keystone-6bb454b456-wjhln"] Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.377225 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:43:28 crc kubenswrapper[4558]: E0120 17:43:28.377666 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a18f9da2-9c64-4047-ad02-206ac9c8aa61" containerName="nova-metadata-log" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.377680 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="a18f9da2-9c64-4047-ad02-206ac9c8aa61" containerName="nova-metadata-log" Jan 20 17:43:28 crc kubenswrapper[4558]: E0120 17:43:28.377699 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a18f9da2-9c64-4047-ad02-206ac9c8aa61" containerName="nova-metadata-metadata" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.377706 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="a18f9da2-9c64-4047-ad02-206ac9c8aa61" containerName="nova-metadata-metadata" Jan 20 17:43:28 crc kubenswrapper[4558]: E0120 17:43:28.377717 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5fb29028-042a-4108-a63d-a6cd215a6c31" containerName="glance-log" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.377723 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="5fb29028-042a-4108-a63d-a6cd215a6c31" containerName="glance-log" Jan 20 17:43:28 crc kubenswrapper[4558]: E0120 17:43:28.377738 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c62cee5b-4dd8-4a07-995c-e1d0530d695b" containerName="glance-log" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.377744 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="c62cee5b-4dd8-4a07-995c-e1d0530d695b" containerName="glance-log" Jan 20 17:43:28 crc kubenswrapper[4558]: E0120 17:43:28.377758 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c62cee5b-4dd8-4a07-995c-e1d0530d695b" containerName="glance-httpd" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.377764 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="c62cee5b-4dd8-4a07-995c-e1d0530d695b" containerName="glance-httpd" Jan 20 17:43:28 crc kubenswrapper[4558]: E0120 17:43:28.377771 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5fb29028-042a-4108-a63d-a6cd215a6c31" containerName="glance-httpd" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.377777 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="5fb29028-042a-4108-a63d-a6cd215a6c31" containerName="glance-httpd" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.377973 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="5fb29028-042a-4108-a63d-a6cd215a6c31" containerName="glance-httpd" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.377982 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="5fb29028-042a-4108-a63d-a6cd215a6c31" containerName="glance-log" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.377997 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="a18f9da2-9c64-4047-ad02-206ac9c8aa61" containerName="nova-metadata-log" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.378007 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="c62cee5b-4dd8-4a07-995c-e1d0530d695b" containerName="glance-httpd" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.378016 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="a18f9da2-9c64-4047-ad02-206ac9c8aa61" containerName="nova-metadata-metadata" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.378025 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="c62cee5b-4dd8-4a07-995c-e1d0530d695b" containerName="glance-log" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.378752 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.391158 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"c62cee5b-4dd8-4a07-995c-e1d0530d695b","Type":"ContainerDied","Data":"f33ff773b75cf39ed89448dc8f541ef8ba419a59df23280ad0a9a8e0b4815a34"} Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.391286 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.391578 4558 scope.go:117] "RemoveContainer" containerID="9cd39e12244c6e213ae6f15945a045746c5236b6784d7e40141838e71fa71966" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.393537 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-scheduler-config-data" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.421134 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c62cee5b-4dd8-4a07-995c-e1d0530d695b-logs\") pod \"c62cee5b-4dd8-4a07-995c-e1d0530d695b\" (UID: \"c62cee5b-4dd8-4a07-995c-e1d0530d695b\") " Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.422269 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c62cee5b-4dd8-4a07-995c-e1d0530d695b-logs" (OuterVolumeSpecName: "logs") pod "c62cee5b-4dd8-4a07-995c-e1d0530d695b" (UID: "c62cee5b-4dd8-4a07-995c-e1d0530d695b"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.422489 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mwtgp\" (UniqueName: \"kubernetes.io/projected/c62cee5b-4dd8-4a07-995c-e1d0530d695b-kube-api-access-mwtgp\") pod \"c62cee5b-4dd8-4a07-995c-e1d0530d695b\" (UID: \"c62cee5b-4dd8-4a07-995c-e1d0530d695b\") " Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.422582 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c62cee5b-4dd8-4a07-995c-e1d0530d695b-config-data\") pod \"c62cee5b-4dd8-4a07-995c-e1d0530d695b\" (UID: \"c62cee5b-4dd8-4a07-995c-e1d0530d695b\") " Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.422674 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c62cee5b-4dd8-4a07-995c-e1d0530d695b-httpd-run\") pod \"c62cee5b-4dd8-4a07-995c-e1d0530d695b\" (UID: \"c62cee5b-4dd8-4a07-995c-e1d0530d695b\") " Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.422695 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"c62cee5b-4dd8-4a07-995c-e1d0530d695b\" (UID: \"c62cee5b-4dd8-4a07-995c-e1d0530d695b\") " Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.422724 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c62cee5b-4dd8-4a07-995c-e1d0530d695b-scripts\") pod \"c62cee5b-4dd8-4a07-995c-e1d0530d695b\" (UID: \"c62cee5b-4dd8-4a07-995c-e1d0530d695b\") " Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.422756 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c62cee5b-4dd8-4a07-995c-e1d0530d695b-public-tls-certs\") pod \"c62cee5b-4dd8-4a07-995c-e1d0530d695b\" (UID: \"c62cee5b-4dd8-4a07-995c-e1d0530d695b\") " Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.422824 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c62cee5b-4dd8-4a07-995c-e1d0530d695b-combined-ca-bundle\") pod \"c62cee5b-4dd8-4a07-995c-e1d0530d695b\" (UID: \"c62cee5b-4dd8-4a07-995c-e1d0530d695b\") " Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.423668 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c62cee5b-4dd8-4a07-995c-e1d0530d695b-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.437922 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.437967 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-7bc45756fb-278vp" event={"ID":"432ec648-18a7-416a-85cc-409a30976a67","Type":"ContainerStarted","Data":"0dfc6d5c9d8e2b433cbe0bd6d9c9a1260fd3286262cd3972d6f5db23fdc3d982"} Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.438674 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c62cee5b-4dd8-4a07-995c-e1d0530d695b-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "c62cee5b-4dd8-4a07-995c-e1d0530d695b" (UID: "c62cee5b-4dd8-4a07-995c-e1d0530d695b"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.438811 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/barbican-api-7bc45756fb-278vp" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.438855 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/barbican-api-7bc45756fb-278vp" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.452919 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.463577 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c62cee5b-4dd8-4a07-995c-e1d0530d695b-scripts" (OuterVolumeSpecName: "scripts") pod "c62cee5b-4dd8-4a07-995c-e1d0530d695b" (UID: "c62cee5b-4dd8-4a07-995c-e1d0530d695b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.464083 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-66bfc8789d-nsc64" event={"ID":"596f2da7-db14-44e3-8c93-5809b27e0cc6","Type":"ContainerStarted","Data":"6f7e169a9c1c7c3746b3c5209448d3d9efd46fcac41efa29c9cdafd00ee4a9c7"} Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.466917 4558 generic.go:334] "Generic (PLEG): container finished" podID="d791ceee-e87d-4238-9267-c2d2c53faf96" containerID="5b7ae61c3b22dd588e6b8c80eec1302b76e65100b06918983ae9e73032d1d9d3" exitCode=0 Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.466945 4558 generic.go:334] "Generic (PLEG): container finished" podID="d791ceee-e87d-4238-9267-c2d2c53faf96" containerID="335e25f3fbe4b65eb8180e118c21059bde36ef7ba7c2a18579f83e6691932e37" exitCode=2 Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.466953 4558 generic.go:334] "Generic (PLEG): container finished" podID="d791ceee-e87d-4238-9267-c2d2c53faf96" containerID="dceb063becafd3ab16d08d70734f823d57aed693a7d29459757378db7e430997" exitCode=0 Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.466993 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"d791ceee-e87d-4238-9267-c2d2c53faf96","Type":"ContainerDied","Data":"5b7ae61c3b22dd588e6b8c80eec1302b76e65100b06918983ae9e73032d1d9d3"} Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.467017 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"d791ceee-e87d-4238-9267-c2d2c53faf96","Type":"ContainerDied","Data":"335e25f3fbe4b65eb8180e118c21059bde36ef7ba7c2a18579f83e6691932e37"} Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.467027 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"d791ceee-e87d-4238-9267-c2d2c53faf96","Type":"ContainerDied","Data":"dceb063becafd3ab16d08d70734f823d57aed693a7d29459757378db7e430997"} Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.470279 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"a18f9da2-9c64-4047-ad02-206ac9c8aa61","Type":"ContainerDied","Data":"be02008edfea8ba65046d300ba5e2d6958232a81128fd832badf6ad34ab5cf4e"} Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.470367 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.473929 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage09-crc" (OuterVolumeSpecName: "glance") pod "c62cee5b-4dd8-4a07-995c-e1d0530d695b" (UID: "c62cee5b-4dd8-4a07-995c-e1d0530d695b"). InnerVolumeSpecName "local-storage09-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.482985 4558 scope.go:117] "RemoveContainer" containerID="5b28187d77dd8f75fd99f025ba3fddc2ac465afeab96893a305bda76c6c1c56c" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.510913 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.517976 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c62cee5b-4dd8-4a07-995c-e1d0530d695b-kube-api-access-mwtgp" (OuterVolumeSpecName: "kube-api-access-mwtgp") pod "c62cee5b-4dd8-4a07-995c-e1d0530d695b" (UID: "c62cee5b-4dd8-4a07-995c-e1d0530d695b"). InnerVolumeSpecName "kube-api-access-mwtgp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.519008 4558 generic.go:334] "Generic (PLEG): container finished" podID="5fb29028-042a-4108-a63d-a6cd215a6c31" containerID="c5a1a31136c32563a3ef2c47370606c762a9054e380e6b74ad6d51b5c145876e" exitCode=0 Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.519068 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"5fb29028-042a-4108-a63d-a6cd215a6c31","Type":"ContainerDied","Data":"c5a1a31136c32563a3ef2c47370606c762a9054e380e6b74ad6d51b5c145876e"} Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.519955 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"5fb29028-042a-4108-a63d-a6cd215a6c31","Type":"ContainerDied","Data":"b21900be3b919c3bb7fc99727d394d7a9b4ad2d3e34d5e4d78d620329e20cc86"} Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.520062 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.527782 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ddd57d3b-c9de-46e3-897e-1a50ae49630e-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"ddd57d3b-c9de-46e3-897e-1a50ae49630e\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.527822 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v7wft\" (UniqueName: \"kubernetes.io/projected/ddd57d3b-c9de-46e3-897e-1a50ae49630e-kube-api-access-v7wft\") pod \"nova-scheduler-0\" (UID: \"ddd57d3b-c9de-46e3-897e-1a50ae49630e\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.527861 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ddd57d3b-c9de-46e3-897e-1a50ae49630e-config-data\") pod \"nova-scheduler-0\" (UID: \"ddd57d3b-c9de-46e3-897e-1a50ae49630e\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.527968 4558 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c62cee5b-4dd8-4a07-995c-e1d0530d695b-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.527996 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" " Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.528007 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c62cee5b-4dd8-4a07-995c-e1d0530d695b-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.528017 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mwtgp\" (UniqueName: \"kubernetes.io/projected/c62cee5b-4dd8-4a07-995c-e1d0530d695b-kube-api-access-mwtgp\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.558860 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-keystone-listener-57496b565d-kwb5f" event={"ID":"fd5cfc55-8cd9-4a76-87b8-4050b8f1030f","Type":"ContainerStarted","Data":"08599f71319860b69b6674beac66044f825719a01bdca428dd565ea148a25bb0"} Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.593442 4558 generic.go:334] "Generic (PLEG): container finished" podID="ba27bcbb-36f0-4575-afba-d6997d667fef" containerID="ce43e3bb6306e86eacbf56d2bb8a010f8f3998ab291973f5ad5e72276c882cdf" exitCode=143 Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.602094 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0a246013-4fe7-4d92-a517-414b1c235db0" path="/var/lib/kubelet/pods/0a246013-4fe7-4d92-a517-414b1c235db0/volumes" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.603496 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7935bdc8-1434-418a-a3ad-b165ee8be23e" path="/var/lib/kubelet/pods/7935bdc8-1434-418a-a3ad-b165ee8be23e/volumes" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.610245 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a651cf04-6082-49fc-813c-f9de65420553" path="/var/lib/kubelet/pods/a651cf04-6082-49fc-813c-f9de65420553/volumes" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.610770 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ea5dfbd0-37c4-46a1-afad-77e85e35d7fb" path="/var/lib/kubelet/pods/ea5dfbd0-37c4-46a1-afad-77e85e35d7fb/volumes" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.620086 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-worker-6667c7bd65-7bw4v" event={"ID":"ba27bcbb-36f0-4575-afba-d6997d667fef","Type":"ContainerDied","Data":"ce43e3bb6306e86eacbf56d2bb8a010f8f3998ab291973f5ad5e72276c882cdf"} Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.620150 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.621817 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.621837 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-api-5796d4c4d8-2xz7t"] Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.621931 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.624017 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-api-config-data" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.624216 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-public-svc" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.624345 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-internal-svc" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.631557 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ddd57d3b-c9de-46e3-897e-1a50ae49630e-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"ddd57d3b-c9de-46e3-897e-1a50ae49630e\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.631594 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v7wft\" (UniqueName: \"kubernetes.io/projected/ddd57d3b-c9de-46e3-897e-1a50ae49630e-kube-api-access-v7wft\") pod \"nova-scheduler-0\" (UID: \"ddd57d3b-c9de-46e3-897e-1a50ae49630e\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.631636 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ddd57d3b-c9de-46e3-897e-1a50ae49630e-config-data\") pod \"nova-scheduler-0\" (UID: \"ddd57d3b-c9de-46e3-897e-1a50ae49630e\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.637463 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/barbican-api-5796d4c4d8-2xz7t"] Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.666383 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ddd57d3b-c9de-46e3-897e-1a50ae49630e-config-data\") pod \"nova-scheduler-0\" (UID: \"ddd57d3b-c9de-46e3-897e-1a50ae49630e\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.671385 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ddd57d3b-c9de-46e3-897e-1a50ae49630e-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"ddd57d3b-c9de-46e3-897e-1a50ae49630e\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.674221 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/placement-7b8cd49c48-k928p"] Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.682062 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage09-crc" (UniqueName: "kubernetes.io/local-volume/local-storage09-crc") on node "crc" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.685495 4558 scope.go:117] "RemoveContainer" containerID="b93052a48246c97ba463ca4f80d69e9a79fa1ad0762a5e959855ffc2d454361b" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.691181 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/placement-7b8cd49c48-k928p"] Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.691708 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-976c48458-wzswq" event={"ID":"b6ba5f2a-6486-4a84-bf85-23c00e907701","Type":"ContainerStarted","Data":"793b7e60b027092ef8139183c3e6dd4b54469caf6cf02f94955f48fe3bbfbd9f"} Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.691954 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/placement-976c48458-wzswq" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.693218 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/placement-976c48458-wzswq" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.695233 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-685599669b-nv6cc" event={"ID":"33f7229d-bcae-434a-805e-530801e79b20","Type":"ContainerStarted","Data":"d3f01f262533ef464619db0861fecd0a8366a9c544f5d180d20689cd0e292a37"} Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.695290 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/keystone-685599669b-nv6cc" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.714643 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/openstack-galera-0"] Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.727964 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v7wft\" (UniqueName: \"kubernetes.io/projected/ddd57d3b-c9de-46e3-897e-1a50ae49630e-kube-api-access-v7wft\") pod \"nova-scheduler-0\" (UID: \"ddd57d3b-c9de-46e3-897e-1a50ae49630e\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.744861 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4ea608e7-9a19-47fb-8e14-a629451e7c03-config-data\") pod \"nova-api-0\" (UID: \"4ea608e7-9a19-47fb-8e14-a629451e7c03\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.744987 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4ea608e7-9a19-47fb-8e14-a629451e7c03-logs\") pod \"nova-api-0\" (UID: \"4ea608e7-9a19-47fb-8e14-a629451e7c03\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.745111 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vb7nd\" (UniqueName: \"kubernetes.io/projected/4ea608e7-9a19-47fb-8e14-a629451e7c03-kube-api-access-vb7nd\") pod \"nova-api-0\" (UID: \"4ea608e7-9a19-47fb-8e14-a629451e7c03\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.747087 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.750300 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/openstack-galera-0"] Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.751966 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4ea608e7-9a19-47fb-8e14-a629451e7c03-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"4ea608e7-9a19-47fb-8e14-a629451e7c03\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.752030 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4ea608e7-9a19-47fb-8e14-a629451e7c03-internal-tls-certs\") pod \"nova-api-0\" (UID: \"4ea608e7-9a19-47fb-8e14-a629451e7c03\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.752099 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4ea608e7-9a19-47fb-8e14-a629451e7c03-public-tls-certs\") pod \"nova-api-0\" (UID: \"4ea608e7-9a19-47fb-8e14-a629451e7c03\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.752640 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.766230 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.784215 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/openstack-galera-0"] Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.786088 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.787733 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-galera-openstack-svc" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.790698 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-config-data" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.790839 4558 scope.go:117] "RemoveContainer" containerID="b1459a5f2896c316432b666d6a44cd9116c1172c341136db96a48899a0dd4884" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.790932 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"galera-openstack-dockercfg-xjdvx" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.791063 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-scripts" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.806831 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/openstack-galera-0"] Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.818268 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.836019 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.840011 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c62cee5b-4dd8-4a07-995c-e1d0530d695b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c62cee5b-4dd8-4a07-995c-e1d0530d695b" (UID: "c62cee5b-4dd8-4a07-995c-e1d0530d695b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.844223 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.846741 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.851067 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.851343 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c62cee5b-4dd8-4a07-995c-e1d0530d695b-config-data" (OuterVolumeSpecName: "config-data") pod "c62cee5b-4dd8-4a07-995c-e1d0530d695b" (UID: "c62cee5b-4dd8-4a07-995c-e1d0530d695b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.851668 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-novncproxy-cell1-vencrypt" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.852413 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell1-novncproxy-config-data" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.852574 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-novncproxy-cell1-public-svc" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.855620 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4ea608e7-9a19-47fb-8e14-a629451e7c03-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"4ea608e7-9a19-47fb-8e14-a629451e7c03\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.855982 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4ea608e7-9a19-47fb-8e14-a629451e7c03-internal-tls-certs\") pod \"nova-api-0\" (UID: \"4ea608e7-9a19-47fb-8e14-a629451e7c03\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.856667 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4ea608e7-9a19-47fb-8e14-a629451e7c03-public-tls-certs\") pod \"nova-api-0\" (UID: \"4ea608e7-9a19-47fb-8e14-a629451e7c03\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.857776 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4ea608e7-9a19-47fb-8e14-a629451e7c03-config-data\") pod \"nova-api-0\" (UID: \"4ea608e7-9a19-47fb-8e14-a629451e7c03\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.857926 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4ea608e7-9a19-47fb-8e14-a629451e7c03-logs\") pod \"nova-api-0\" (UID: \"4ea608e7-9a19-47fb-8e14-a629451e7c03\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.858105 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vb7nd\" (UniqueName: \"kubernetes.io/projected/4ea608e7-9a19-47fb-8e14-a629451e7c03-kube-api-access-vb7nd\") pod \"nova-api-0\" (UID: \"4ea608e7-9a19-47fb-8e14-a629451e7c03\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.858277 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c62cee5b-4dd8-4a07-995c-e1d0530d695b-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.858349 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c62cee5b-4dd8-4a07-995c-e1d0530d695b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.859735 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4ea608e7-9a19-47fb-8e14-a629451e7c03-logs\") pod \"nova-api-0\" (UID: \"4ea608e7-9a19-47fb-8e14-a629451e7c03\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.861186 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4ea608e7-9a19-47fb-8e14-a629451e7c03-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"4ea608e7-9a19-47fb-8e14-a629451e7c03\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.872108 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4ea608e7-9a19-47fb-8e14-a629451e7c03-internal-tls-certs\") pod \"nova-api-0\" (UID: \"4ea608e7-9a19-47fb-8e14-a629451e7c03\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.872237 4558 scope.go:117] "RemoveContainer" containerID="c5a1a31136c32563a3ef2c47370606c762a9054e380e6b74ad6d51b5c145876e" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.876685 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/memcached-0"] Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.884286 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c62cee5b-4dd8-4a07-995c-e1d0530d695b-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "c62cee5b-4dd8-4a07-995c-e1d0530d695b" (UID: "c62cee5b-4dd8-4a07-995c-e1d0530d695b"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.885730 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/memcached-0"] Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.886013 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4ea608e7-9a19-47fb-8e14-a629451e7c03-config-data\") pod \"nova-api-0\" (UID: \"4ea608e7-9a19-47fb-8e14-a629451e7c03\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.886587 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4ea608e7-9a19-47fb-8e14-a629451e7c03-public-tls-certs\") pod \"nova-api-0\" (UID: \"4ea608e7-9a19-47fb-8e14-a629451e7c03\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.891615 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vb7nd\" (UniqueName: \"kubernetes.io/projected/4ea608e7-9a19-47fb-8e14-a629451e7c03-kube-api-access-vb7nd\") pod \"nova-api-0\" (UID: \"4ea608e7-9a19-47fb-8e14-a629451e7c03\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.897724 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.917294 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/memcached-0"] Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.918678 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/memcached-0" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.921430 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"memcached-config-data" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.921697 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-memcached-svc" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.921885 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"memcached-memcached-dockercfg-sqmsc" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.934989 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.935261 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/memcached-0"] Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.942267 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/openstack-cell1-galera-0"] Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.951891 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.955613 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.959416 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cinder-scheduler-config-data" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.961097 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/02038ce5-8236-4304-922b-39d5c9e2459e-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"02038ce5-8236-4304-922b-39d5c9e2459e\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.961147 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-76xbv\" (UniqueName: \"kubernetes.io/projected/02038ce5-8236-4304-922b-39d5c9e2459e-kube-api-access-76xbv\") pod \"nova-cell1-novncproxy-0\" (UID: \"02038ce5-8236-4304-922b-39d5c9e2459e\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.961207 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/02038ce5-8236-4304-922b-39d5c9e2459e-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"02038ce5-8236-4304-922b-39d5c9e2459e\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.961263 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/38a5808f-1fa5-49cb-afb7-d7676416cd26-operator-scripts\") pod \"openstack-galera-0\" (UID: \"38a5808f-1fa5-49cb-afb7-d7676416cd26\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.961373 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/38a5808f-1fa5-49cb-afb7-d7676416cd26-config-data-default\") pod \"openstack-galera-0\" (UID: \"38a5808f-1fa5-49cb-afb7-d7676416cd26\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.961406 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02038ce5-8236-4304-922b-39d5c9e2459e-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"02038ce5-8236-4304-922b-39d5c9e2459e\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.961423 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/38a5808f-1fa5-49cb-afb7-d7676416cd26-config-data-generated\") pod \"openstack-galera-0\" (UID: \"38a5808f-1fa5-49cb-afb7-d7676416cd26\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.961472 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gk8sj\" (UniqueName: \"kubernetes.io/projected/38a5808f-1fa5-49cb-afb7-d7676416cd26-kube-api-access-gk8sj\") pod \"openstack-galera-0\" (UID: \"38a5808f-1fa5-49cb-afb7-d7676416cd26\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.961495 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/38a5808f-1fa5-49cb-afb7-d7676416cd26-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"38a5808f-1fa5-49cb-afb7-d7676416cd26\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.961550 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/38a5808f-1fa5-49cb-afb7-d7676416cd26-kolla-config\") pod \"openstack-galera-0\" (UID: \"38a5808f-1fa5-49cb-afb7-d7676416cd26\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.961573 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/02038ce5-8236-4304-922b-39d5c9e2459e-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"02038ce5-8236-4304-922b-39d5c9e2459e\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.961620 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/38a5808f-1fa5-49cb-afb7-d7676416cd26-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"38a5808f-1fa5-49cb-afb7-d7676416cd26\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.961642 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage14-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage14-crc\") pod \"openstack-galera-0\" (UID: \"38a5808f-1fa5-49cb-afb7-d7676416cd26\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.961717 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c62cee5b-4dd8-4a07-995c-e1d0530d695b-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.972777 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.982114 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/openstack-cell1-galera-0"] Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.984285 4558 scope.go:117] "RemoveContainer" containerID="aa17c31d66a9fe9d9ba2ad19998790f47fa790295400fa6c7baccd8f6d22aa0e" Jan 20 17:43:28 crc kubenswrapper[4558]: I0120 17:43:28.991884 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.016514 4558 scope.go:117] "RemoveContainer" containerID="c5a1a31136c32563a3ef2c47370606c762a9054e380e6b74ad6d51b5c145876e" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.022218 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-0"] Jan 20 17:43:29 crc kubenswrapper[4558]: E0120 17:43:29.023971 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c5a1a31136c32563a3ef2c47370606c762a9054e380e6b74ad6d51b5c145876e\": container with ID starting with c5a1a31136c32563a3ef2c47370606c762a9054e380e6b74ad6d51b5c145876e not found: ID does not exist" containerID="c5a1a31136c32563a3ef2c47370606c762a9054e380e6b74ad6d51b5c145876e" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.024008 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c5a1a31136c32563a3ef2c47370606c762a9054e380e6b74ad6d51b5c145876e"} err="failed to get container status \"c5a1a31136c32563a3ef2c47370606c762a9054e380e6b74ad6d51b5c145876e\": rpc error: code = NotFound desc = could not find container \"c5a1a31136c32563a3ef2c47370606c762a9054e380e6b74ad6d51b5c145876e\": container with ID starting with c5a1a31136c32563a3ef2c47370606c762a9054e380e6b74ad6d51b5c145876e not found: ID does not exist" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.024035 4558 scope.go:117] "RemoveContainer" containerID="aa17c31d66a9fe9d9ba2ad19998790f47fa790295400fa6c7baccd8f6d22aa0e" Jan 20 17:43:29 crc kubenswrapper[4558]: E0120 17:43:29.024840 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aa17c31d66a9fe9d9ba2ad19998790f47fa790295400fa6c7baccd8f6d22aa0e\": container with ID starting with aa17c31d66a9fe9d9ba2ad19998790f47fa790295400fa6c7baccd8f6d22aa0e not found: ID does not exist" containerID="aa17c31d66a9fe9d9ba2ad19998790f47fa790295400fa6c7baccd8f6d22aa0e" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.024868 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aa17c31d66a9fe9d9ba2ad19998790f47fa790295400fa6c7baccd8f6d22aa0e"} err="failed to get container status \"aa17c31d66a9fe9d9ba2ad19998790f47fa790295400fa6c7baccd8f6d22aa0e\": rpc error: code = NotFound desc = could not find container \"aa17c31d66a9fe9d9ba2ad19998790f47fa790295400fa6c7baccd8f6d22aa0e\": container with ID starting with aa17c31d66a9fe9d9ba2ad19998790f47fa790295400fa6c7baccd8f6d22aa0e not found: ID does not exist" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.030415 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/openstack-cell1-galera-0"] Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.032188 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.034689 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"galera-openstack-cell1-dockercfg-mk4rp" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.034849 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-galera-openstack-cell1-svc" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.035032 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-cell1-config-data" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.035476 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-cell1-scripts" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.042335 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-0"] Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.051232 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/openstack-cell1-galera-0"] Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.069273 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-nb-0"] Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.073332 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-nb-0"] Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.072518 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/38a5808f-1fa5-49cb-afb7-d7676416cd26-operator-scripts\") pod \"openstack-galera-0\" (UID: \"38a5808f-1fa5-49cb-afb7-d7676416cd26\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.073449 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7faabe16-9de5-49dc-bab6-44e173f4403c-config-data\") pod \"cinder-scheduler-0\" (UID: \"7faabe16-9de5-49dc-bab6-44e173f4403c\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.073532 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/38a5808f-1fa5-49cb-afb7-d7676416cd26-config-data-default\") pod \"openstack-galera-0\" (UID: \"38a5808f-1fa5-49cb-afb7-d7676416cd26\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.073549 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7faabe16-9de5-49dc-bab6-44e173f4403c-scripts\") pod \"cinder-scheduler-0\" (UID: \"7faabe16-9de5-49dc-bab6-44e173f4403c\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.073581 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7faabe16-9de5-49dc-bab6-44e173f4403c-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"7faabe16-9de5-49dc-bab6-44e173f4403c\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.073618 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/38a5808f-1fa5-49cb-afb7-d7676416cd26-config-data-generated\") pod \"openstack-galera-0\" (UID: \"38a5808f-1fa5-49cb-afb7-d7676416cd26\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.073652 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02038ce5-8236-4304-922b-39d5c9e2459e-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"02038ce5-8236-4304-922b-39d5c9e2459e\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.073731 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gk8sj\" (UniqueName: \"kubernetes.io/projected/38a5808f-1fa5-49cb-afb7-d7676416cd26-kube-api-access-gk8sj\") pod \"openstack-galera-0\" (UID: \"38a5808f-1fa5-49cb-afb7-d7676416cd26\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.073735 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/38a5808f-1fa5-49cb-afb7-d7676416cd26-operator-scripts\") pod \"openstack-galera-0\" (UID: \"38a5808f-1fa5-49cb-afb7-d7676416cd26\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.073768 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/38a5808f-1fa5-49cb-afb7-d7676416cd26-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"38a5808f-1fa5-49cb-afb7-d7676416cd26\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.073818 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z8cj9\" (UniqueName: \"kubernetes.io/projected/b544295f-900d-4bff-adb5-fe0aaf262026-kube-api-access-z8cj9\") pod \"memcached-0\" (UID: \"b544295f-900d-4bff-adb5-fe0aaf262026\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.073864 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7faabe16-9de5-49dc-bab6-44e173f4403c-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"7faabe16-9de5-49dc-bab6-44e173f4403c\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.073898 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/38a5808f-1fa5-49cb-afb7-d7676416cd26-kolla-config\") pod \"openstack-galera-0\" (UID: \"38a5808f-1fa5-49cb-afb7-d7676416cd26\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.073928 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/b544295f-900d-4bff-adb5-fe0aaf262026-kolla-config\") pod \"memcached-0\" (UID: \"b544295f-900d-4bff-adb5-fe0aaf262026\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.073951 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/02038ce5-8236-4304-922b-39d5c9e2459e-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"02038ce5-8236-4304-922b-39d5c9e2459e\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.073969 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7faabe16-9de5-49dc-bab6-44e173f4403c-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"7faabe16-9de5-49dc-bab6-44e173f4403c\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.074043 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/38a5808f-1fa5-49cb-afb7-d7676416cd26-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"38a5808f-1fa5-49cb-afb7-d7676416cd26\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.074072 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage14-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage14-crc\") pod \"openstack-galera-0\" (UID: \"38a5808f-1fa5-49cb-afb7-d7676416cd26\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.074136 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/b544295f-900d-4bff-adb5-fe0aaf262026-memcached-tls-certs\") pod \"memcached-0\" (UID: \"b544295f-900d-4bff-adb5-fe0aaf262026\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.074277 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/02038ce5-8236-4304-922b-39d5c9e2459e-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"02038ce5-8236-4304-922b-39d5c9e2459e\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.074308 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b544295f-900d-4bff-adb5-fe0aaf262026-combined-ca-bundle\") pod \"memcached-0\" (UID: \"b544295f-900d-4bff-adb5-fe0aaf262026\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.074328 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-76xbv\" (UniqueName: \"kubernetes.io/projected/02038ce5-8236-4304-922b-39d5c9e2459e-kube-api-access-76xbv\") pod \"nova-cell1-novncproxy-0\" (UID: \"02038ce5-8236-4304-922b-39d5c9e2459e\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.074372 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/02038ce5-8236-4304-922b-39d5c9e2459e-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"02038ce5-8236-4304-922b-39d5c9e2459e\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.074393 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fmn5j\" (UniqueName: \"kubernetes.io/projected/7faabe16-9de5-49dc-bab6-44e173f4403c-kube-api-access-fmn5j\") pod \"cinder-scheduler-0\" (UID: \"7faabe16-9de5-49dc-bab6-44e173f4403c\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.074407 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b544295f-900d-4bff-adb5-fe0aaf262026-config-data\") pod \"memcached-0\" (UID: \"b544295f-900d-4bff-adb5-fe0aaf262026\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.075146 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/38a5808f-1fa5-49cb-afb7-d7676416cd26-config-data-default\") pod \"openstack-galera-0\" (UID: \"38a5808f-1fa5-49cb-afb7-d7676416cd26\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.075435 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/38a5808f-1fa5-49cb-afb7-d7676416cd26-config-data-generated\") pod \"openstack-galera-0\" (UID: \"38a5808f-1fa5-49cb-afb7-d7676416cd26\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.093558 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02038ce5-8236-4304-922b-39d5c9e2459e-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"02038ce5-8236-4304-922b-39d5c9e2459e\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.103312 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/38a5808f-1fa5-49cb-afb7-d7676416cd26-kolla-config\") pod \"openstack-galera-0\" (UID: \"38a5808f-1fa5-49cb-afb7-d7676416cd26\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.103693 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage14-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage14-crc\") pod \"openstack-galera-0\" (UID: \"38a5808f-1fa5-49cb-afb7-d7676416cd26\") device mount path \"/mnt/openstack/pv14\"" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.107338 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/02038ce5-8236-4304-922b-39d5c9e2459e-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"02038ce5-8236-4304-922b-39d5c9e2459e\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.107981 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/02038ce5-8236-4304-922b-39d5c9e2459e-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"02038ce5-8236-4304-922b-39d5c9e2459e\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.117840 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gk8sj\" (UniqueName: \"kubernetes.io/projected/38a5808f-1fa5-49cb-afb7-d7676416cd26-kube-api-access-gk8sj\") pod \"openstack-galera-0\" (UID: \"38a5808f-1fa5-49cb-afb7-d7676416cd26\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.118426 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/38a5808f-1fa5-49cb-afb7-d7676416cd26-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"38a5808f-1fa5-49cb-afb7-d7676416cd26\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.124950 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/02038ce5-8236-4304-922b-39d5c9e2459e-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"02038ce5-8236-4304-922b-39d5c9e2459e\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.125560 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-76xbv\" (UniqueName: \"kubernetes.io/projected/02038ce5-8236-4304-922b-39d5c9e2459e-kube-api-access-76xbv\") pod \"nova-cell1-novncproxy-0\" (UID: \"02038ce5-8236-4304-922b-39d5c9e2459e\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.126641 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/38a5808f-1fa5-49cb-afb7-d7676416cd26-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"38a5808f-1fa5-49cb-afb7-d7676416cd26\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.137195 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage14-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage14-crc\") pod \"openstack-galera-0\" (UID: \"38a5808f-1fa5-49cb-afb7-d7676416cd26\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.154569 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-0"] Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.156577 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.160385 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell1-conductor-config-data" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.183022 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z8cj9\" (UniqueName: \"kubernetes.io/projected/b544295f-900d-4bff-adb5-fe0aaf262026-kube-api-access-z8cj9\") pod \"memcached-0\" (UID: \"b544295f-900d-4bff-adb5-fe0aaf262026\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.183078 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/34a4b6ee-5830-4532-b829-a91cedcf8069-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"34a4b6ee-5830-4532-b829-a91cedcf8069\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.183109 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7faabe16-9de5-49dc-bab6-44e173f4403c-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"7faabe16-9de5-49dc-bab6-44e173f4403c\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.183136 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/b544295f-900d-4bff-adb5-fe0aaf262026-kolla-config\") pod \"memcached-0\" (UID: \"b544295f-900d-4bff-adb5-fe0aaf262026\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.183158 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7faabe16-9de5-49dc-bab6-44e173f4403c-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"7faabe16-9de5-49dc-bab6-44e173f4403c\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.183225 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/b544295f-900d-4bff-adb5-fe0aaf262026-memcached-tls-certs\") pod \"memcached-0\" (UID: \"b544295f-900d-4bff-adb5-fe0aaf262026\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.183269 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/34a4b6ee-5830-4532-b829-a91cedcf8069-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"34a4b6ee-5830-4532-b829-a91cedcf8069\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.183298 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b544295f-900d-4bff-adb5-fe0aaf262026-combined-ca-bundle\") pod \"memcached-0\" (UID: \"b544295f-900d-4bff-adb5-fe0aaf262026\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.183315 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/34a4b6ee-5830-4532-b829-a91cedcf8069-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"34a4b6ee-5830-4532-b829-a91cedcf8069\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.183336 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/34a4b6ee-5830-4532-b829-a91cedcf8069-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"34a4b6ee-5830-4532-b829-a91cedcf8069\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.183353 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7bk69\" (UniqueName: \"kubernetes.io/projected/34a4b6ee-5830-4532-b829-a91cedcf8069-kube-api-access-7bk69\") pod \"openstack-cell1-galera-0\" (UID: \"34a4b6ee-5830-4532-b829-a91cedcf8069\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.183378 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fmn5j\" (UniqueName: \"kubernetes.io/projected/7faabe16-9de5-49dc-bab6-44e173f4403c-kube-api-access-fmn5j\") pod \"cinder-scheduler-0\" (UID: \"7faabe16-9de5-49dc-bab6-44e173f4403c\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.183395 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b544295f-900d-4bff-adb5-fe0aaf262026-config-data\") pod \"memcached-0\" (UID: \"b544295f-900d-4bff-adb5-fe0aaf262026\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.183417 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/34a4b6ee-5830-4532-b829-a91cedcf8069-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"34a4b6ee-5830-4532-b829-a91cedcf8069\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.183450 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7faabe16-9de5-49dc-bab6-44e173f4403c-config-data\") pod \"cinder-scheduler-0\" (UID: \"7faabe16-9de5-49dc-bab6-44e173f4403c\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.183471 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/34a4b6ee-5830-4532-b829-a91cedcf8069-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"34a4b6ee-5830-4532-b829-a91cedcf8069\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.183496 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7faabe16-9de5-49dc-bab6-44e173f4403c-scripts\") pod \"cinder-scheduler-0\" (UID: \"7faabe16-9de5-49dc-bab6-44e173f4403c\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.183518 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7faabe16-9de5-49dc-bab6-44e173f4403c-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"7faabe16-9de5-49dc-bab6-44e173f4403c\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.183541 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage15-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") pod \"openstack-cell1-galera-0\" (UID: \"34a4b6ee-5830-4532-b829-a91cedcf8069\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.187123 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7faabe16-9de5-49dc-bab6-44e173f4403c-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"7faabe16-9de5-49dc-bab6-44e173f4403c\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.188974 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b544295f-900d-4bff-adb5-fe0aaf262026-config-data\") pod \"memcached-0\" (UID: \"b544295f-900d-4bff-adb5-fe0aaf262026\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.189111 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/b544295f-900d-4bff-adb5-fe0aaf262026-kolla-config\") pod \"memcached-0\" (UID: \"b544295f-900d-4bff-adb5-fe0aaf262026\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.190382 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-0"] Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.192664 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7faabe16-9de5-49dc-bab6-44e173f4403c-scripts\") pod \"cinder-scheduler-0\" (UID: \"7faabe16-9de5-49dc-bab6-44e173f4403c\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.196046 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7faabe16-9de5-49dc-bab6-44e173f4403c-config-data\") pod \"cinder-scheduler-0\" (UID: \"7faabe16-9de5-49dc-bab6-44e173f4403c\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.198640 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7faabe16-9de5-49dc-bab6-44e173f4403c-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"7faabe16-9de5-49dc-bab6-44e173f4403c\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.198661 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/b544295f-900d-4bff-adb5-fe0aaf262026-memcached-tls-certs\") pod \"memcached-0\" (UID: \"b544295f-900d-4bff-adb5-fe0aaf262026\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.201743 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b544295f-900d-4bff-adb5-fe0aaf262026-combined-ca-bundle\") pod \"memcached-0\" (UID: \"b544295f-900d-4bff-adb5-fe0aaf262026\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.203242 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7faabe16-9de5-49dc-bab6-44e173f4403c-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"7faabe16-9de5-49dc-bab6-44e173f4403c\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.206244 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fmn5j\" (UniqueName: \"kubernetes.io/projected/7faabe16-9de5-49dc-bab6-44e173f4403c-kube-api-access-fmn5j\") pod \"cinder-scheduler-0\" (UID: \"7faabe16-9de5-49dc-bab6-44e173f4403c\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.219155 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z8cj9\" (UniqueName: \"kubernetes.io/projected/b544295f-900d-4bff-adb5-fe0aaf262026-kube-api-access-z8cj9\") pod \"memcached-0\" (UID: \"b544295f-900d-4bff-adb5-fe0aaf262026\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.221248 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-sb-0"] Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.230283 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ovsdbserver-nb-0"] Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.231976 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.235402 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"ovndbcluster-nb-config" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.235407 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-ovndbcluster-nb-ovndbs" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.236481 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"ovndbcluster-nb-scripts" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.236680 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ovncluster-ovndbcluster-nb-dockercfg-jdmxf" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.236683 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-sb-0"] Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.243239 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-nb-0"] Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.260225 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ovsdbserver-sb-0"] Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.262116 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.272671 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ovncluster-ovndbcluster-sb-dockercfg-b24l8" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.272953 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-ovndbcluster-sb-ovndbs" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.273186 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"ovndbcluster-sb-config" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.274315 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"ovndbcluster-sb-scripts" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.282022 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.292877 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/memcached-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.295068 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/34a4b6ee-5830-4532-b829-a91cedcf8069-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"34a4b6ee-5830-4532-b829-a91cedcf8069\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.295188 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/34a4b6ee-5830-4532-b829-a91cedcf8069-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"34a4b6ee-5830-4532-b829-a91cedcf8069\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.295258 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage15-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") pod \"openstack-cell1-galera-0\" (UID: \"34a4b6ee-5830-4532-b829-a91cedcf8069\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.295362 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/34a4b6ee-5830-4532-b829-a91cedcf8069-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"34a4b6ee-5830-4532-b829-a91cedcf8069\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.295464 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ea72c008-3a66-4577-8042-4b1e0ed1cca6-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"ea72c008-3a66-4577-8042-4b1e0ed1cca6\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.295563 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/34a4b6ee-5830-4532-b829-a91cedcf8069-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"34a4b6ee-5830-4532-b829-a91cedcf8069\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.295612 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/34a4b6ee-5830-4532-b829-a91cedcf8069-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"34a4b6ee-5830-4532-b829-a91cedcf8069\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.295637 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ea72c008-3a66-4577-8042-4b1e0ed1cca6-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"ea72c008-3a66-4577-8042-4b1e0ed1cca6\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.295661 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b9gz4\" (UniqueName: \"kubernetes.io/projected/ea72c008-3a66-4577-8042-4b1e0ed1cca6-kube-api-access-b9gz4\") pod \"nova-cell1-conductor-0\" (UID: \"ea72c008-3a66-4577-8042-4b1e0ed1cca6\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.295679 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/34a4b6ee-5830-4532-b829-a91cedcf8069-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"34a4b6ee-5830-4532-b829-a91cedcf8069\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.295699 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7bk69\" (UniqueName: \"kubernetes.io/projected/34a4b6ee-5830-4532-b829-a91cedcf8069-kube-api-access-7bk69\") pod \"openstack-cell1-galera-0\" (UID: \"34a4b6ee-5830-4532-b829-a91cedcf8069\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.296506 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/34a4b6ee-5830-4532-b829-a91cedcf8069-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"34a4b6ee-5830-4532-b829-a91cedcf8069\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.298656 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/34a4b6ee-5830-4532-b829-a91cedcf8069-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"34a4b6ee-5830-4532-b829-a91cedcf8069\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.299150 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.302221 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/34a4b6ee-5830-4532-b829-a91cedcf8069-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"34a4b6ee-5830-4532-b829-a91cedcf8069\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.304879 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/34a4b6ee-5830-4532-b829-a91cedcf8069-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"34a4b6ee-5830-4532-b829-a91cedcf8069\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.305408 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/34a4b6ee-5830-4532-b829-a91cedcf8069-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"34a4b6ee-5830-4532-b829-a91cedcf8069\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.305542 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage15-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") pod \"openstack-cell1-galera-0\" (UID: \"34a4b6ee-5830-4532-b829-a91cedcf8069\") device mount path \"/mnt/openstack/pv15\"" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.305564 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-sb-0"] Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.313274 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7bk69\" (UniqueName: \"kubernetes.io/projected/34a4b6ee-5830-4532-b829-a91cedcf8069-kube-api-access-7bk69\") pod \"openstack-cell1-galera-0\" (UID: \"34a4b6ee-5830-4532-b829-a91cedcf8069\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.333336 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/barbican-api-7bc45756fb-278vp" podStartSLOduration=6.333321973 podStartE2EDuration="6.333321973s" podCreationTimestamp="2026-01-20 17:43:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:43:28.630095338 +0000 UTC m=+3702.390433305" watchObservedRunningTime="2026-01-20 17:43:29.333321973 +0000 UTC m=+3703.093659940" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.338920 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/34a4b6ee-5830-4532-b829-a91cedcf8069-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"34a4b6ee-5830-4532-b829-a91cedcf8069\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.341564 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage15-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") pod \"openstack-cell1-galera-0\" (UID: \"34a4b6ee-5830-4532-b829-a91cedcf8069\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.371814 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.390201 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.397741 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ea72c008-3a66-4577-8042-4b1e0ed1cca6-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"ea72c008-3a66-4577-8042-4b1e0ed1cca6\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.397838 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7qm5z\" (UniqueName: \"kubernetes.io/projected/924d7506-6027-4d31-b57a-19fc787ba356-kube-api-access-7qm5z\") pod \"ovsdbserver-sb-0\" (UID: \"924d7506-6027-4d31-b57a-19fc787ba356\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.397889 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/b2fb8e70-15aa-4b38-a44d-5f818dfc755c-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"b2fb8e70-15aa-4b38-a44d-5f818dfc755c\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.397927 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6dchb\" (UniqueName: \"kubernetes.io/projected/b2fb8e70-15aa-4b38-a44d-5f818dfc755c-kube-api-access-6dchb\") pod \"ovsdbserver-nb-0\" (UID: \"b2fb8e70-15aa-4b38-a44d-5f818dfc755c\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.397980 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b2fb8e70-15aa-4b38-a44d-5f818dfc755c-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"b2fb8e70-15aa-4b38-a44d-5f818dfc755c\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.398008 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ea72c008-3a66-4577-8042-4b1e0ed1cca6-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"ea72c008-3a66-4577-8042-4b1e0ed1cca6\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.398053 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b9gz4\" (UniqueName: \"kubernetes.io/projected/ea72c008-3a66-4577-8042-4b1e0ed1cca6-kube-api-access-b9gz4\") pod \"nova-cell1-conductor-0\" (UID: \"ea72c008-3a66-4577-8042-4b1e0ed1cca6\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.398096 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/924d7506-6027-4d31-b57a-19fc787ba356-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"924d7506-6027-4d31-b57a-19fc787ba356\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.398147 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b2fb8e70-15aa-4b38-a44d-5f818dfc755c-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"b2fb8e70-15aa-4b38-a44d-5f818dfc755c\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.398208 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/b2fb8e70-15aa-4b38-a44d-5f818dfc755c-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"b2fb8e70-15aa-4b38-a44d-5f818dfc755c\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.398233 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/924d7506-6027-4d31-b57a-19fc787ba356-config\") pod \"ovsdbserver-sb-0\" (UID: \"924d7506-6027-4d31-b57a-19fc787ba356\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.398291 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/b2fb8e70-15aa-4b38-a44d-5f818dfc755c-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"b2fb8e70-15aa-4b38-a44d-5f818dfc755c\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.398318 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"ovsdbserver-nb-0\" (UID: \"b2fb8e70-15aa-4b38-a44d-5f818dfc755c\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.398367 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/924d7506-6027-4d31-b57a-19fc787ba356-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"924d7506-6027-4d31-b57a-19fc787ba356\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.398425 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/924d7506-6027-4d31-b57a-19fc787ba356-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"924d7506-6027-4d31-b57a-19fc787ba356\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.398456 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/924d7506-6027-4d31-b57a-19fc787ba356-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"924d7506-6027-4d31-b57a-19fc787ba356\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.398477 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b2fb8e70-15aa-4b38-a44d-5f818dfc755c-config\") pod \"ovsdbserver-nb-0\" (UID: \"b2fb8e70-15aa-4b38-a44d-5f818dfc755c\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.398518 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"ovsdbserver-sb-0\" (UID: \"924d7506-6027-4d31-b57a-19fc787ba356\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.398536 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/924d7506-6027-4d31-b57a-19fc787ba356-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"924d7506-6027-4d31-b57a-19fc787ba356\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.400291 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.402135 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.402382 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ea72c008-3a66-4577-8042-4b1e0ed1cca6-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"ea72c008-3a66-4577-8042-4b1e0ed1cca6\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.406448 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-metadata-internal-svc" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.406830 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-metadata-config-data" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.407657 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ea72c008-3a66-4577-8042-4b1e0ed1cca6-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"ea72c008-3a66-4577-8042-4b1e0ed1cca6\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.412299 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.427479 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.431668 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b9gz4\" (UniqueName: \"kubernetes.io/projected/ea72c008-3a66-4577-8042-4b1e0ed1cca6-kube-api-access-b9gz4\") pod \"nova-cell1-conductor-0\" (UID: \"ea72c008-3a66-4577-8042-4b1e0ed1cca6\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.437756 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.471099 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.482089 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.485439 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/barbican-keystone-listener-57496b565d-kwb5f" podStartSLOduration=7.485420915 podStartE2EDuration="7.485420915s" podCreationTimestamp="2026-01-20 17:43:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:43:28.71600046 +0000 UTC m=+3702.476338427" watchObservedRunningTime="2026-01-20 17:43:29.485420915 +0000 UTC m=+3703.245758883" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.492209 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.493808 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.499281 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.500074 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-glance-default-internal-svc" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.500375 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-scripts" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.500630 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-default-internal-config-data" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.500801 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b2fb8e70-15aa-4b38-a44d-5f818dfc755c-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"b2fb8e70-15aa-4b38-a44d-5f818dfc755c\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.500845 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/b2fb8e70-15aa-4b38-a44d-5f818dfc755c-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"b2fb8e70-15aa-4b38-a44d-5f818dfc755c\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.500877 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/924d7506-6027-4d31-b57a-19fc787ba356-config\") pod \"ovsdbserver-sb-0\" (UID: \"924d7506-6027-4d31-b57a-19fc787ba356\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.500937 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/b2fb8e70-15aa-4b38-a44d-5f818dfc755c-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"b2fb8e70-15aa-4b38-a44d-5f818dfc755c\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.500963 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"ovsdbserver-nb-0\" (UID: \"b2fb8e70-15aa-4b38-a44d-5f818dfc755c\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.500990 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/924d7506-6027-4d31-b57a-19fc787ba356-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"924d7506-6027-4d31-b57a-19fc787ba356\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.501029 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/924d7506-6027-4d31-b57a-19fc787ba356-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"924d7506-6027-4d31-b57a-19fc787ba356\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.501053 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/924d7506-6027-4d31-b57a-19fc787ba356-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"924d7506-6027-4d31-b57a-19fc787ba356\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.501076 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/572d7a53-9a17-43e3-bc12-a04f994eb857-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"572d7a53-9a17-43e3-bc12-a04f994eb857\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.501098 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b2fb8e70-15aa-4b38-a44d-5f818dfc755c-config\") pod \"ovsdbserver-nb-0\" (UID: \"b2fb8e70-15aa-4b38-a44d-5f818dfc755c\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.501116 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"ovsdbserver-sb-0\" (UID: \"924d7506-6027-4d31-b57a-19fc787ba356\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.501135 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/924d7506-6027-4d31-b57a-19fc787ba356-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"924d7506-6027-4d31-b57a-19fc787ba356\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.501183 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/572d7a53-9a17-43e3-bc12-a04f994eb857-logs\") pod \"nova-metadata-0\" (UID: \"572d7a53-9a17-43e3-bc12-a04f994eb857\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.501214 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nt989\" (UniqueName: \"kubernetes.io/projected/572d7a53-9a17-43e3-bc12-a04f994eb857-kube-api-access-nt989\") pod \"nova-metadata-0\" (UID: \"572d7a53-9a17-43e3-bc12-a04f994eb857\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.501235 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7qm5z\" (UniqueName: \"kubernetes.io/projected/924d7506-6027-4d31-b57a-19fc787ba356-kube-api-access-7qm5z\") pod \"ovsdbserver-sb-0\" (UID: \"924d7506-6027-4d31-b57a-19fc787ba356\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.501251 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/b2fb8e70-15aa-4b38-a44d-5f818dfc755c-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"b2fb8e70-15aa-4b38-a44d-5f818dfc755c\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.501274 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6dchb\" (UniqueName: \"kubernetes.io/projected/b2fb8e70-15aa-4b38-a44d-5f818dfc755c-kube-api-access-6dchb\") pod \"ovsdbserver-nb-0\" (UID: \"b2fb8e70-15aa-4b38-a44d-5f818dfc755c\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.501309 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b2fb8e70-15aa-4b38-a44d-5f818dfc755c-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"b2fb8e70-15aa-4b38-a44d-5f818dfc755c\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.501337 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/572d7a53-9a17-43e3-bc12-a04f994eb857-config-data\") pod \"nova-metadata-0\" (UID: \"572d7a53-9a17-43e3-bc12-a04f994eb857\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.501357 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/572d7a53-9a17-43e3-bc12-a04f994eb857-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"572d7a53-9a17-43e3-bc12-a04f994eb857\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.501383 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/924d7506-6027-4d31-b57a-19fc787ba356-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"924d7506-6027-4d31-b57a-19fc787ba356\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.502297 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-glance-dockercfg-gnzn6" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.502440 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/924d7506-6027-4d31-b57a-19fc787ba356-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"924d7506-6027-4d31-b57a-19fc787ba356\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.502764 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"ovsdbserver-sb-0\" (UID: \"924d7506-6027-4d31-b57a-19fc787ba356\") device mount path \"/mnt/openstack/pv03\"" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.503505 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/924d7506-6027-4d31-b57a-19fc787ba356-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"924d7506-6027-4d31-b57a-19fc787ba356\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.505142 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/924d7506-6027-4d31-b57a-19fc787ba356-config\") pod \"ovsdbserver-sb-0\" (UID: \"924d7506-6027-4d31-b57a-19fc787ba356\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.510290 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/b2fb8e70-15aa-4b38-a44d-5f818dfc755c-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"b2fb8e70-15aa-4b38-a44d-5f818dfc755c\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.512557 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"ovsdbserver-nb-0\" (UID: \"b2fb8e70-15aa-4b38-a44d-5f818dfc755c\") device mount path \"/mnt/openstack/pv02\"" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.513250 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b2fb8e70-15aa-4b38-a44d-5f818dfc755c-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"b2fb8e70-15aa-4b38-a44d-5f818dfc755c\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.513828 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/924d7506-6027-4d31-b57a-19fc787ba356-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"924d7506-6027-4d31-b57a-19fc787ba356\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.514445 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b2fb8e70-15aa-4b38-a44d-5f818dfc755c-config\") pod \"ovsdbserver-nb-0\" (UID: \"b2fb8e70-15aa-4b38-a44d-5f818dfc755c\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.519422 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.522186 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/924d7506-6027-4d31-b57a-19fc787ba356-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"924d7506-6027-4d31-b57a-19fc787ba356\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.522296 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/b2fb8e70-15aa-4b38-a44d-5f818dfc755c-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"b2fb8e70-15aa-4b38-a44d-5f818dfc755c\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.533589 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/b2fb8e70-15aa-4b38-a44d-5f818dfc755c-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"b2fb8e70-15aa-4b38-a44d-5f818dfc755c\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.534511 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b2fb8e70-15aa-4b38-a44d-5f818dfc755c-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"b2fb8e70-15aa-4b38-a44d-5f818dfc755c\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.537711 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6dchb\" (UniqueName: \"kubernetes.io/projected/b2fb8e70-15aa-4b38-a44d-5f818dfc755c-kube-api-access-6dchb\") pod \"ovsdbserver-nb-0\" (UID: \"b2fb8e70-15aa-4b38-a44d-5f818dfc755c\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.538000 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/924d7506-6027-4d31-b57a-19fc787ba356-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"924d7506-6027-4d31-b57a-19fc787ba356\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.548742 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-keystone-listener-5f469dd86b-h6dq9"] Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.549023 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-keystone-listener-5f469dd86b-h6dq9" podUID="dfa1a7bb-2900-49f0-94ee-fe789d4ffec8" containerName="barbican-keystone-listener-log" containerID="cri-o://41a835374e97b54b247e3d01bbee52c1fb97c1dfd37e923ebe958c81a0dbe228" gracePeriod=30 Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.550987 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-keystone-listener-5f469dd86b-h6dq9" podUID="dfa1a7bb-2900-49f0-94ee-fe789d4ffec8" containerName="barbican-keystone-listener" containerID="cri-o://4918d6999246b14e34bbf966f6c160e07d3a28ecedad21b571990984b63736da" gracePeriod=30 Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.563892 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7qm5z\" (UniqueName: \"kubernetes.io/projected/924d7506-6027-4d31-b57a-19fc787ba356-kube-api-access-7qm5z\") pod \"ovsdbserver-sb-0\" (UID: \"924d7506-6027-4d31-b57a-19fc787ba356\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.566658 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/placement-976c48458-wzswq" podStartSLOduration=6.566621201 podStartE2EDuration="6.566621201s" podCreationTimestamp="2026-01-20 17:43:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:43:28.777274401 +0000 UTC m=+3702.537612367" watchObservedRunningTime="2026-01-20 17:43:29.566621201 +0000 UTC m=+3703.326959167" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.569682 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"ovsdbserver-sb-0\" (UID: \"924d7506-6027-4d31-b57a-19fc787ba356\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:43:29 crc kubenswrapper[4558]: W0120 17:43:29.582304 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4ea608e7_9a19_47fb_8e14_a629451e7c03.slice/crio-c8f702c47f61d39e56d80f4f6a07a49ca90831318685ac858718c1b72c2f66c2 WatchSource:0}: Error finding container c8f702c47f61d39e56d80f4f6a07a49ca90831318685ac858718c1b72c2f66c2: Status 404 returned error can't find the container with id c8f702c47f61d39e56d80f4f6a07a49ca90831318685ac858718c1b72c2f66c2 Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.588097 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/keystone-685599669b-nv6cc" podStartSLOduration=6.588083265 podStartE2EDuration="6.588083265s" podCreationTimestamp="2026-01-20 17:43:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:43:28.809210818 +0000 UTC m=+3702.569548796" watchObservedRunningTime="2026-01-20 17:43:29.588083265 +0000 UTC m=+3703.348421232" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.592561 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.604481 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5e0d404f-75d1-4be8-9c12-803d8db759e6-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"5e0d404f-75d1-4be8-9c12-803d8db759e6\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.604528 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/572d7a53-9a17-43e3-bc12-a04f994eb857-logs\") pod \"nova-metadata-0\" (UID: \"572d7a53-9a17-43e3-bc12-a04f994eb857\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.604565 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"5e0d404f-75d1-4be8-9c12-803d8db759e6\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.604595 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nt989\" (UniqueName: \"kubernetes.io/projected/572d7a53-9a17-43e3-bc12-a04f994eb857-kube-api-access-nt989\") pod \"nova-metadata-0\" (UID: \"572d7a53-9a17-43e3-bc12-a04f994eb857\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.604635 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/572d7a53-9a17-43e3-bc12-a04f994eb857-config-data\") pod \"nova-metadata-0\" (UID: \"572d7a53-9a17-43e3-bc12-a04f994eb857\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.604651 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5e0d404f-75d1-4be8-9c12-803d8db759e6-logs\") pod \"glance-default-internal-api-0\" (UID: \"5e0d404f-75d1-4be8-9c12-803d8db759e6\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.604674 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/572d7a53-9a17-43e3-bc12-a04f994eb857-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"572d7a53-9a17-43e3-bc12-a04f994eb857\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.604744 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5e0d404f-75d1-4be8-9c12-803d8db759e6-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"5e0d404f-75d1-4be8-9c12-803d8db759e6\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.604762 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e0d404f-75d1-4be8-9c12-803d8db759e6-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"5e0d404f-75d1-4be8-9c12-803d8db759e6\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.604800 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xzxfn\" (UniqueName: \"kubernetes.io/projected/5e0d404f-75d1-4be8-9c12-803d8db759e6-kube-api-access-xzxfn\") pod \"glance-default-internal-api-0\" (UID: \"5e0d404f-75d1-4be8-9c12-803d8db759e6\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.604832 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5e0d404f-75d1-4be8-9c12-803d8db759e6-config-data\") pod \"glance-default-internal-api-0\" (UID: \"5e0d404f-75d1-4be8-9c12-803d8db759e6\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.604864 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/572d7a53-9a17-43e3-bc12-a04f994eb857-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"572d7a53-9a17-43e3-bc12-a04f994eb857\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.604881 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5e0d404f-75d1-4be8-9c12-803d8db759e6-scripts\") pod \"glance-default-internal-api-0\" (UID: \"5e0d404f-75d1-4be8-9c12-803d8db759e6\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.605527 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/572d7a53-9a17-43e3-bc12-a04f994eb857-logs\") pod \"nova-metadata-0\" (UID: \"572d7a53-9a17-43e3-bc12-a04f994eb857\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.609565 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/572d7a53-9a17-43e3-bc12-a04f994eb857-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"572d7a53-9a17-43e3-bc12-a04f994eb857\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.613622 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/572d7a53-9a17-43e3-bc12-a04f994eb857-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"572d7a53-9a17-43e3-bc12-a04f994eb857\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.615614 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/572d7a53-9a17-43e3-bc12-a04f994eb857-config-data\") pod \"nova-metadata-0\" (UID: \"572d7a53-9a17-43e3-bc12-a04f994eb857\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.622304 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"ovsdbserver-nb-0\" (UID: \"b2fb8e70-15aa-4b38-a44d-5f818dfc755c\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.624533 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nt989\" (UniqueName: \"kubernetes.io/projected/572d7a53-9a17-43e3-bc12-a04f994eb857-kube-api-access-nt989\") pod \"nova-metadata-0\" (UID: \"572d7a53-9a17-43e3-bc12-a04f994eb857\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.652511 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.658263 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.668647 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.672118 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.675854 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-default-external-config-data" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.676042 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-glance-default-public-svc" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.679418 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.706767 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5e0d404f-75d1-4be8-9c12-803d8db759e6-logs\") pod \"glance-default-internal-api-0\" (UID: \"5e0d404f-75d1-4be8-9c12-803d8db759e6\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.706813 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a226d25c-395c-4744-a42e-46beee40d8e4-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"a226d25c-395c-4744-a42e-46beee40d8e4\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.706851 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a226d25c-395c-4744-a42e-46beee40d8e4-logs\") pod \"glance-default-external-api-0\" (UID: \"a226d25c-395c-4744-a42e-46beee40d8e4\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.706888 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a226d25c-395c-4744-a42e-46beee40d8e4-config-data\") pod \"glance-default-external-api-0\" (UID: \"a226d25c-395c-4744-a42e-46beee40d8e4\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.706937 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a226d25c-395c-4744-a42e-46beee40d8e4-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"a226d25c-395c-4744-a42e-46beee40d8e4\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.706967 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5e0d404f-75d1-4be8-9c12-803d8db759e6-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"5e0d404f-75d1-4be8-9c12-803d8db759e6\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.706987 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e0d404f-75d1-4be8-9c12-803d8db759e6-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"5e0d404f-75d1-4be8-9c12-803d8db759e6\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.707025 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xzxfn\" (UniqueName: \"kubernetes.io/projected/5e0d404f-75d1-4be8-9c12-803d8db759e6-kube-api-access-xzxfn\") pod \"glance-default-internal-api-0\" (UID: \"5e0d404f-75d1-4be8-9c12-803d8db759e6\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.707076 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5e0d404f-75d1-4be8-9c12-803d8db759e6-config-data\") pod \"glance-default-internal-api-0\" (UID: \"5e0d404f-75d1-4be8-9c12-803d8db759e6\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.707136 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rdzjm\" (UniqueName: \"kubernetes.io/projected/a226d25c-395c-4744-a42e-46beee40d8e4-kube-api-access-rdzjm\") pod \"glance-default-external-api-0\" (UID: \"a226d25c-395c-4744-a42e-46beee40d8e4\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.707182 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5e0d404f-75d1-4be8-9c12-803d8db759e6-scripts\") pod \"glance-default-internal-api-0\" (UID: \"5e0d404f-75d1-4be8-9c12-803d8db759e6\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.707222 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5e0d404f-75d1-4be8-9c12-803d8db759e6-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"5e0d404f-75d1-4be8-9c12-803d8db759e6\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.707254 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"a226d25c-395c-4744-a42e-46beee40d8e4\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.707292 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"5e0d404f-75d1-4be8-9c12-803d8db759e6\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.707316 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a226d25c-395c-4744-a42e-46beee40d8e4-scripts\") pod \"glance-default-external-api-0\" (UID: \"a226d25c-395c-4744-a42e-46beee40d8e4\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.707354 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a226d25c-395c-4744-a42e-46beee40d8e4-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"a226d25c-395c-4744-a42e-46beee40d8e4\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.707792 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"5e0d404f-75d1-4be8-9c12-803d8db759e6\") device mount path \"/mnt/openstack/pv11\"" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.711531 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5e0d404f-75d1-4be8-9c12-803d8db759e6-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"5e0d404f-75d1-4be8-9c12-803d8db759e6\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.711759 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5e0d404f-75d1-4be8-9c12-803d8db759e6-logs\") pod \"glance-default-internal-api-0\" (UID: \"5e0d404f-75d1-4be8-9c12-803d8db759e6\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.718621 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e0d404f-75d1-4be8-9c12-803d8db759e6-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"5e0d404f-75d1-4be8-9c12-803d8db759e6\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.720585 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5e0d404f-75d1-4be8-9c12-803d8db759e6-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"5e0d404f-75d1-4be8-9c12-803d8db759e6\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.726133 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5e0d404f-75d1-4be8-9c12-803d8db759e6-config-data\") pod \"glance-default-internal-api-0\" (UID: \"5e0d404f-75d1-4be8-9c12-803d8db759e6\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.726487 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.732967 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5e0d404f-75d1-4be8-9c12-803d8db759e6-scripts\") pod \"glance-default-internal-api-0\" (UID: \"5e0d404f-75d1-4be8-9c12-803d8db759e6\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.734535 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.737196 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"56d39f72-48d3-4690-a20a-099cb41daa7e","Type":"ContainerStarted","Data":"94d3c9d43f52c47d691791acedc0a34da7662d53b02a2453679cb72210409391"} Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.746285 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xzxfn\" (UniqueName: \"kubernetes.io/projected/5e0d404f-75d1-4be8-9c12-803d8db759e6-kube-api-access-xzxfn\") pod \"glance-default-internal-api-0\" (UID: \"5e0d404f-75d1-4be8-9c12-803d8db759e6\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.768421 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"5e0d404f-75d1-4be8-9c12-803d8db759e6\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.774397 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"ddd57d3b-c9de-46e3-897e-1a50ae49630e","Type":"ContainerStarted","Data":"5f2f816ffd1e3e218743f1527560baf025cde6fe3d755067facb36d4394755c3"} Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.780976 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.812932 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-66bfc8789d-nsc64" event={"ID":"596f2da7-db14-44e3-8c93-5809b27e0cc6","Type":"ContainerStarted","Data":"d2b6e31358d80bd665915aa4bc6aae644adca0fde94572c0a18379f3e0ce34e1"} Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.814546 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/neutron-66bfc8789d-nsc64" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.817893 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdzjm\" (UniqueName: \"kubernetes.io/projected/a226d25c-395c-4744-a42e-46beee40d8e4-kube-api-access-rdzjm\") pod \"glance-default-external-api-0\" (UID: \"a226d25c-395c-4744-a42e-46beee40d8e4\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.817973 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"a226d25c-395c-4744-a42e-46beee40d8e4\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.818021 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a226d25c-395c-4744-a42e-46beee40d8e4-scripts\") pod \"glance-default-external-api-0\" (UID: \"a226d25c-395c-4744-a42e-46beee40d8e4\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.818059 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a226d25c-395c-4744-a42e-46beee40d8e4-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"a226d25c-395c-4744-a42e-46beee40d8e4\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.818119 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a226d25c-395c-4744-a42e-46beee40d8e4-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"a226d25c-395c-4744-a42e-46beee40d8e4\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.818155 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a226d25c-395c-4744-a42e-46beee40d8e4-logs\") pod \"glance-default-external-api-0\" (UID: \"a226d25c-395c-4744-a42e-46beee40d8e4\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.818206 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a226d25c-395c-4744-a42e-46beee40d8e4-config-data\") pod \"glance-default-external-api-0\" (UID: \"a226d25c-395c-4744-a42e-46beee40d8e4\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.818250 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a226d25c-395c-4744-a42e-46beee40d8e4-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"a226d25c-395c-4744-a42e-46beee40d8e4\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.819140 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"a226d25c-395c-4744-a42e-46beee40d8e4\") device mount path \"/mnt/openstack/pv09\"" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.819234 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a226d25c-395c-4744-a42e-46beee40d8e4-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"a226d25c-395c-4744-a42e-46beee40d8e4\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.850376 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a226d25c-395c-4744-a42e-46beee40d8e4-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"a226d25c-395c-4744-a42e-46beee40d8e4\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.851023 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a226d25c-395c-4744-a42e-46beee40d8e4-scripts\") pod \"glance-default-external-api-0\" (UID: \"a226d25c-395c-4744-a42e-46beee40d8e4\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.851894 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a226d25c-395c-4744-a42e-46beee40d8e4-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"a226d25c-395c-4744-a42e-46beee40d8e4\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.853525 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.855854 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a226d25c-395c-4744-a42e-46beee40d8e4-logs\") pod \"glance-default-external-api-0\" (UID: \"a226d25c-395c-4744-a42e-46beee40d8e4\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.857587 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.878881 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-976c48458-wzswq" event={"ID":"b6ba5f2a-6486-4a84-bf85-23c00e907701","Type":"ContainerStarted","Data":"76ef1a400330d7e58be6b4ac9281c05593aa3360863f0b8430f1369f13006b6c"} Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.888331 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/neutron-66bfc8789d-nsc64" podStartSLOduration=6.888308961 podStartE2EDuration="6.888308961s" podCreationTimestamp="2026-01-20 17:43:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:43:29.853797001 +0000 UTC m=+3703.614134968" watchObservedRunningTime="2026-01-20 17:43:29.888308961 +0000 UTC m=+3703.648646928" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.895698 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"4ea608e7-9a19-47fb-8e14-a629451e7c03","Type":"ContainerStarted","Data":"c8f702c47f61d39e56d80f4f6a07a49ca90831318685ac858718c1b72c2f66c2"} Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.910467 4558 generic.go:334] "Generic (PLEG): container finished" podID="dfa1a7bb-2900-49f0-94ee-fe789d4ffec8" containerID="41a835374e97b54b247e3d01bbee52c1fb97c1dfd37e923ebe958c81a0dbe228" exitCode=143 Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.910755 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-keystone-listener-5f469dd86b-h6dq9" event={"ID":"dfa1a7bb-2900-49f0-94ee-fe789d4ffec8","Type":"ContainerDied","Data":"41a835374e97b54b247e3d01bbee52c1fb97c1dfd37e923ebe958c81a0dbe228"} Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.913951 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a226d25c-395c-4744-a42e-46beee40d8e4-config-data\") pod \"glance-default-external-api-0\" (UID: \"a226d25c-395c-4744-a42e-46beee40d8e4\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.926948 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdzjm\" (UniqueName: \"kubernetes.io/projected/a226d25c-395c-4744-a42e-46beee40d8e4-kube-api-access-rdzjm\") pod \"glance-default-external-api-0\" (UID: \"a226d25c-395c-4744-a42e-46beee40d8e4\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.979105 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:43:29 crc kubenswrapper[4558]: W0120 17:43:29.987358 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb544295f_900d_4bff_adb5_fe0aaf262026.slice/crio-14757c210cdd3d7beaa4bfde2e424ee26319af9cd6fbb62fc3225ff25b1cf0d6 WatchSource:0}: Error finding container 14757c210cdd3d7beaa4bfde2e424ee26319af9cd6fbb62fc3225ff25b1cf0d6: Status 404 returned error can't find the container with id 14757c210cdd3d7beaa4bfde2e424ee26319af9cd6fbb62fc3225ff25b1cf0d6 Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.994846 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"a226d25c-395c-4744-a42e-46beee40d8e4\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:29 crc kubenswrapper[4558]: I0120 17:43:29.998592 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/memcached-0"] Jan 20 17:43:30 crc kubenswrapper[4558]: I0120 17:43:30.044676 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:43:30 crc kubenswrapper[4558]: I0120 17:43:30.062283 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:30 crc kubenswrapper[4558]: I0120 17:43:30.213216 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/openstack-galera-0"] Jan 20 17:43:30 crc kubenswrapper[4558]: W0120 17:43:30.220282 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod38a5808f_1fa5_49cb_afb7_d7676416cd26.slice/crio-b29534492808dfe227690795f19ae32104cb9487956b5ac584f7fd62850c9707 WatchSource:0}: Error finding container b29534492808dfe227690795f19ae32104cb9487956b5ac584f7fd62850c9707: Status 404 returned error can't find the container with id b29534492808dfe227690795f19ae32104cb9487956b5ac584f7fd62850c9707 Jan 20 17:43:30 crc kubenswrapper[4558]: I0120 17:43:30.395946 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-0"] Jan 20 17:43:30 crc kubenswrapper[4558]: I0120 17:43:30.420237 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/openstack-cell1-galera-0"] Jan 20 17:43:30 crc kubenswrapper[4558]: I0120 17:43:30.451087 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovn-northd-0_9db31a7a-b4bb-4c3b-99f4-b9855af99342/ovn-northd/0.log" Jan 20 17:43:30 crc kubenswrapper[4558]: I0120 17:43:30.451187 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:43:30 crc kubenswrapper[4558]: I0120 17:43:30.475412 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:43:30 crc kubenswrapper[4558]: I0120 17:43:30.589136 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0394e10b-3811-4eef-a9ef-a785e1574649" path="/var/lib/kubelet/pods/0394e10b-3811-4eef-a9ef-a785e1574649/volumes" Jan 20 17:43:30 crc kubenswrapper[4558]: I0120 17:43:30.589780 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25cfe04f-2194-405f-a9d7-82181e8ac22a" path="/var/lib/kubelet/pods/25cfe04f-2194-405f-a9d7-82181e8ac22a/volumes" Jan 20 17:43:30 crc kubenswrapper[4558]: I0120 17:43:30.591115 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25fe26b1-d308-452c-b15f-b1b272de6869" path="/var/lib/kubelet/pods/25fe26b1-d308-452c-b15f-b1b272de6869/volumes" Jan 20 17:43:30 crc kubenswrapper[4558]: I0120 17:43:30.597725 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5f087f7c-913a-4ab8-b905-5caa84469c77" path="/var/lib/kubelet/pods/5f087f7c-913a-4ab8-b905-5caa84469c77/volumes" Jan 20 17:43:30 crc kubenswrapper[4558]: I0120 17:43:30.598785 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fb29028-042a-4108-a63d-a6cd215a6c31" path="/var/lib/kubelet/pods/5fb29028-042a-4108-a63d-a6cd215a6c31/volumes" Jan 20 17:43:30 crc kubenswrapper[4558]: I0120 17:43:30.606316 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8738e339-fd1b-4122-899c-7b9521688aba" path="/var/lib/kubelet/pods/8738e339-fd1b-4122-899c-7b9521688aba/volumes" Jan 20 17:43:30 crc kubenswrapper[4558]: I0120 17:43:30.609473 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a18f9da2-9c64-4047-ad02-206ac9c8aa61" path="/var/lib/kubelet/pods/a18f9da2-9c64-4047-ad02-206ac9c8aa61/volumes" Jan 20 17:43:30 crc kubenswrapper[4558]: I0120 17:43:30.616306 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a5db7bd5-e0c6-4645-ab63-3a2a21fdc854" path="/var/lib/kubelet/pods/a5db7bd5-e0c6-4645-ab63-3a2a21fdc854/volumes" Jan 20 17:43:30 crc kubenswrapper[4558]: I0120 17:43:30.616991 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a8df2ac8-5e44-4c2d-a1fb-11e7cdeda894" path="/var/lib/kubelet/pods/a8df2ac8-5e44-4c2d-a1fb-11e7cdeda894/volumes" Jan 20 17:43:30 crc kubenswrapper[4558]: I0120 17:43:30.617560 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c62cee5b-4dd8-4a07-995c-e1d0530d695b" path="/var/lib/kubelet/pods/c62cee5b-4dd8-4a07-995c-e1d0530d695b/volumes" Jan 20 17:43:30 crc kubenswrapper[4558]: I0120 17:43:30.619886 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f68fbac2-1677-4664-a344-77a41044ea2a" path="/var/lib/kubelet/pods/f68fbac2-1677-4664-a344-77a41044ea2a/volumes" Jan 20 17:43:30 crc kubenswrapper[4558]: I0120 17:43:30.620848 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fd5b8a4a-4f31-4b17-95a3-669d1d7661c4" path="/var/lib/kubelet/pods/fd5b8a4a-4f31-4b17-95a3-669d1d7661c4/volumes" Jan 20 17:43:30 crc kubenswrapper[4558]: I0120 17:43:30.622599 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="feedfa19-f182-4f7f-8b50-3d16c11e3510" path="/var/lib/kubelet/pods/feedfa19-f182-4f7f-8b50-3d16c11e3510/volumes" Jan 20 17:43:30 crc kubenswrapper[4558]: I0120 17:43:30.646083 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/365754d1-535b-450b-a80e-1e7402cb28f8-combined-ca-bundle\") pod \"365754d1-535b-450b-a80e-1e7402cb28f8\" (UID: \"365754d1-535b-450b-a80e-1e7402cb28f8\") " Jan 20 17:43:30 crc kubenswrapper[4558]: I0120 17:43:30.646138 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9db31a7a-b4bb-4c3b-99f4-b9855af99342-scripts\") pod \"9db31a7a-b4bb-4c3b-99f4-b9855af99342\" (UID: \"9db31a7a-b4bb-4c3b-99f4-b9855af99342\") " Jan 20 17:43:30 crc kubenswrapper[4558]: I0120 17:43:30.646296 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/9db31a7a-b4bb-4c3b-99f4-b9855af99342-ovn-northd-tls-certs\") pod \"9db31a7a-b4bb-4c3b-99f4-b9855af99342\" (UID: \"9db31a7a-b4bb-4c3b-99f4-b9855af99342\") " Jan 20 17:43:30 crc kubenswrapper[4558]: I0120 17:43:30.646326 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zpf52\" (UniqueName: \"kubernetes.io/projected/9db31a7a-b4bb-4c3b-99f4-b9855af99342-kube-api-access-zpf52\") pod \"9db31a7a-b4bb-4c3b-99f4-b9855af99342\" (UID: \"9db31a7a-b4bb-4c3b-99f4-b9855af99342\") " Jan 20 17:43:30 crc kubenswrapper[4558]: I0120 17:43:30.646359 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9db31a7a-b4bb-4c3b-99f4-b9855af99342-combined-ca-bundle\") pod \"9db31a7a-b4bb-4c3b-99f4-b9855af99342\" (UID: \"9db31a7a-b4bb-4c3b-99f4-b9855af99342\") " Jan 20 17:43:30 crc kubenswrapper[4558]: I0120 17:43:30.646406 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/9db31a7a-b4bb-4c3b-99f4-b9855af99342-metrics-certs-tls-certs\") pod \"9db31a7a-b4bb-4c3b-99f4-b9855af99342\" (UID: \"9db31a7a-b4bb-4c3b-99f4-b9855af99342\") " Jan 20 17:43:30 crc kubenswrapper[4558]: I0120 17:43:30.646454 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/9db31a7a-b4bb-4c3b-99f4-b9855af99342-ovn-rundir\") pod \"9db31a7a-b4bb-4c3b-99f4-b9855af99342\" (UID: \"9db31a7a-b4bb-4c3b-99f4-b9855af99342\") " Jan 20 17:43:30 crc kubenswrapper[4558]: I0120 17:43:30.646493 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qnvrs\" (UniqueName: \"kubernetes.io/projected/365754d1-535b-450b-a80e-1e7402cb28f8-kube-api-access-qnvrs\") pod \"365754d1-535b-450b-a80e-1e7402cb28f8\" (UID: \"365754d1-535b-450b-a80e-1e7402cb28f8\") " Jan 20 17:43:30 crc kubenswrapper[4558]: I0120 17:43:30.646824 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9db31a7a-b4bb-4c3b-99f4-b9855af99342-config\") pod \"9db31a7a-b4bb-4c3b-99f4-b9855af99342\" (UID: \"9db31a7a-b4bb-4c3b-99f4-b9855af99342\") " Jan 20 17:43:30 crc kubenswrapper[4558]: I0120 17:43:30.646912 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/365754d1-535b-450b-a80e-1e7402cb28f8-config-data\") pod \"365754d1-535b-450b-a80e-1e7402cb28f8\" (UID: \"365754d1-535b-450b-a80e-1e7402cb28f8\") " Jan 20 17:43:30 crc kubenswrapper[4558]: I0120 17:43:30.647158 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9db31a7a-b4bb-4c3b-99f4-b9855af99342-scripts" (OuterVolumeSpecName: "scripts") pod "9db31a7a-b4bb-4c3b-99f4-b9855af99342" (UID: "9db31a7a-b4bb-4c3b-99f4-b9855af99342"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:43:30 crc kubenswrapper[4558]: I0120 17:43:30.647458 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9db31a7a-b4bb-4c3b-99f4-b9855af99342-ovn-rundir" (OuterVolumeSpecName: "ovn-rundir") pod "9db31a7a-b4bb-4c3b-99f4-b9855af99342" (UID: "9db31a7a-b4bb-4c3b-99f4-b9855af99342"). InnerVolumeSpecName "ovn-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:43:30 crc kubenswrapper[4558]: I0120 17:43:30.647530 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9db31a7a-b4bb-4c3b-99f4-b9855af99342-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:30 crc kubenswrapper[4558]: I0120 17:43:30.661515 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9db31a7a-b4bb-4c3b-99f4-b9855af99342-config" (OuterVolumeSpecName: "config") pod "9db31a7a-b4bb-4c3b-99f4-b9855af99342" (UID: "9db31a7a-b4bb-4c3b-99f4-b9855af99342"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:43:30 crc kubenswrapper[4558]: I0120 17:43:30.683311 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9db31a7a-b4bb-4c3b-99f4-b9855af99342-kube-api-access-zpf52" (OuterVolumeSpecName: "kube-api-access-zpf52") pod "9db31a7a-b4bb-4c3b-99f4-b9855af99342" (UID: "9db31a7a-b4bb-4c3b-99f4-b9855af99342"). InnerVolumeSpecName "kube-api-access-zpf52". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:43:30 crc kubenswrapper[4558]: I0120 17:43:30.683404 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/365754d1-535b-450b-a80e-1e7402cb28f8-kube-api-access-qnvrs" (OuterVolumeSpecName: "kube-api-access-qnvrs") pod "365754d1-535b-450b-a80e-1e7402cb28f8" (UID: "365754d1-535b-450b-a80e-1e7402cb28f8"). InnerVolumeSpecName "kube-api-access-qnvrs". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:43:30 crc kubenswrapper[4558]: I0120 17:43:30.707373 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/365754d1-535b-450b-a80e-1e7402cb28f8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "365754d1-535b-450b-a80e-1e7402cb28f8" (UID: "365754d1-535b-450b-a80e-1e7402cb28f8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:30 crc kubenswrapper[4558]: I0120 17:43:30.718649 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/365754d1-535b-450b-a80e-1e7402cb28f8-config-data" (OuterVolumeSpecName: "config-data") pod "365754d1-535b-450b-a80e-1e7402cb28f8" (UID: "365754d1-535b-450b-a80e-1e7402cb28f8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:30 crc kubenswrapper[4558]: I0120 17:43:30.718760 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9db31a7a-b4bb-4c3b-99f4-b9855af99342-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9db31a7a-b4bb-4c3b-99f4-b9855af99342" (UID: "9db31a7a-b4bb-4c3b-99f4-b9855af99342"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:30 crc kubenswrapper[4558]: I0120 17:43:30.751669 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/365754d1-535b-450b-a80e-1e7402cb28f8-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:30 crc kubenswrapper[4558]: I0120 17:43:30.751703 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/365754d1-535b-450b-a80e-1e7402cb28f8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:30 crc kubenswrapper[4558]: I0120 17:43:30.751716 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zpf52\" (UniqueName: \"kubernetes.io/projected/9db31a7a-b4bb-4c3b-99f4-b9855af99342-kube-api-access-zpf52\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:30 crc kubenswrapper[4558]: I0120 17:43:30.751725 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9db31a7a-b4bb-4c3b-99f4-b9855af99342-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:30 crc kubenswrapper[4558]: I0120 17:43:30.751734 4558 reconciler_common.go:293] "Volume detached for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/9db31a7a-b4bb-4c3b-99f4-b9855af99342-ovn-rundir\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:30 crc kubenswrapper[4558]: I0120 17:43:30.751900 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qnvrs\" (UniqueName: \"kubernetes.io/projected/365754d1-535b-450b-a80e-1e7402cb28f8-kube-api-access-qnvrs\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:30 crc kubenswrapper[4558]: I0120 17:43:30.751921 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9db31a7a-b4bb-4c3b-99f4-b9855af99342-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:30 crc kubenswrapper[4558]: I0120 17:43:30.790117 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:43:30 crc kubenswrapper[4558]: I0120 17:43:30.799235 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-sb-0"] Jan 20 17:43:30 crc kubenswrapper[4558]: I0120 17:43:30.839727 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:43:30 crc kubenswrapper[4558]: I0120 17:43:30.870556 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-nb-0"] Jan 20 17:43:30 crc kubenswrapper[4558]: W0120 17:43:30.903699 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb2fb8e70_15aa_4b38_a44d_5f818dfc755c.slice/crio-efdde4f4bbe82a8186d4a66013c6519838164ff5a56531e54a7751d623251a61 WatchSource:0}: Error finding container efdde4f4bbe82a8186d4a66013c6519838164ff5a56531e54a7751d623251a61: Status 404 returned error can't find the container with id efdde4f4bbe82a8186d4a66013c6519838164ff5a56531e54a7751d623251a61 Jan 20 17:43:30 crc kubenswrapper[4558]: I0120 17:43:30.950667 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-nb-0" event={"ID":"b2fb8e70-15aa-4b38-a44d-5f818dfc755c","Type":"ContainerStarted","Data":"efdde4f4bbe82a8186d4a66013c6519838164ff5a56531e54a7751d623251a61"} Jan 20 17:43:30 crc kubenswrapper[4558]: I0120 17:43:30.966752 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-sb-0" event={"ID":"924d7506-6027-4d31-b57a-19fc787ba356","Type":"ContainerStarted","Data":"6d6366514407e5e8a1279936e2b130c692e531342e856e378104f547943b1fa3"} Jan 20 17:43:30 crc kubenswrapper[4558]: I0120 17:43:30.969428 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-cell1-galera-0" event={"ID":"34a4b6ee-5830-4532-b829-a91cedcf8069","Type":"ContainerStarted","Data":"f3945a9151d35b7fd0f1d79dc1260eebc823fc3581aa44238a46f808b13f16d3"} Jan 20 17:43:30 crc kubenswrapper[4558]: I0120 17:43:30.969885 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:43:30 crc kubenswrapper[4558]: I0120 17:43:30.976504 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"572d7a53-9a17-43e3-bc12-a04f994eb857","Type":"ContainerStarted","Data":"63594f53fd7e8175292564ee259baa322ec9835e023668dfc739e433a47e39df"} Jan 20 17:43:30 crc kubenswrapper[4558]: I0120 17:43:30.982818 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/memcached-0" event={"ID":"b544295f-900d-4bff-adb5-fe0aaf262026","Type":"ContainerStarted","Data":"2c5efb7f4785f24a40ae2c302910d84d294fabe818863ae49001cc8161f95672"} Jan 20 17:43:30 crc kubenswrapper[4558]: I0120 17:43:30.982867 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/memcached-0" event={"ID":"b544295f-900d-4bff-adb5-fe0aaf262026","Type":"ContainerStarted","Data":"14757c210cdd3d7beaa4bfde2e424ee26319af9cd6fbb62fc3225ff25b1cf0d6"} Jan 20 17:43:30 crc kubenswrapper[4558]: I0120 17:43:30.984316 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/memcached-0" Jan 20 17:43:31 crc kubenswrapper[4558]: I0120 17:43:30.999516 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"ddd57d3b-c9de-46e3-897e-1a50ae49630e","Type":"ContainerStarted","Data":"bfedd515a4cd08318345cf62dd830e7f182ab82a54ada6e840a63a36d085cdc9"} Jan 20 17:43:31 crc kubenswrapper[4558]: I0120 17:43:31.007516 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovn-northd-0_9db31a7a-b4bb-4c3b-99f4-b9855af99342/ovn-northd/0.log" Jan 20 17:43:31 crc kubenswrapper[4558]: I0120 17:43:31.007594 4558 generic.go:334] "Generic (PLEG): container finished" podID="9db31a7a-b4bb-4c3b-99f4-b9855af99342" containerID="4dca8ca5509273f800ab7c5cde1eec1ca33f8167df3366715098e139aa83d898" exitCode=139 Jan 20 17:43:31 crc kubenswrapper[4558]: I0120 17:43:31.007647 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-northd-0" event={"ID":"9db31a7a-b4bb-4c3b-99f4-b9855af99342","Type":"ContainerDied","Data":"4dca8ca5509273f800ab7c5cde1eec1ca33f8167df3366715098e139aa83d898"} Jan 20 17:43:31 crc kubenswrapper[4558]: I0120 17:43:31.007676 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-northd-0" event={"ID":"9db31a7a-b4bb-4c3b-99f4-b9855af99342","Type":"ContainerDied","Data":"27cdc17700d66aa3bf250ce6fc169806feb0e709db51d96cb587c820f6435dbc"} Jan 20 17:43:31 crc kubenswrapper[4558]: I0120 17:43:31.007696 4558 scope.go:117] "RemoveContainer" containerID="a0ad02a3d302f3830db2cc15f3d92955d193e02b01825c96bcf3760f81f92872" Jan 20 17:43:31 crc kubenswrapper[4558]: I0120 17:43:31.007829 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:43:31 crc kubenswrapper[4558]: I0120 17:43:31.010729 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/memcached-0" podStartSLOduration=3.010717749 podStartE2EDuration="3.010717749s" podCreationTimestamp="2026-01-20 17:43:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:43:31.00079916 +0000 UTC m=+3704.761137137" watchObservedRunningTime="2026-01-20 17:43:31.010717749 +0000 UTC m=+3704.771055716" Jan 20 17:43:31 crc kubenswrapper[4558]: I0120 17:43:31.027411 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-scheduler-0" podStartSLOduration=3.027388102 podStartE2EDuration="3.027388102s" podCreationTimestamp="2026-01-20 17:43:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:43:31.02737642 +0000 UTC m=+3704.787714388" watchObservedRunningTime="2026-01-20 17:43:31.027388102 +0000 UTC m=+3704.787726069" Jan 20 17:43:31 crc kubenswrapper[4558]: W0120 17:43:31.057274 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda226d25c_395c_4744_a42e_46beee40d8e4.slice/crio-c2d5f332e14bf6025abb08a1b2a602143e272b9ab74340dd7eb94b7c005b01d8 WatchSource:0}: Error finding container c2d5f332e14bf6025abb08a1b2a602143e272b9ab74340dd7eb94b7c005b01d8: Status 404 returned error can't find the container with id c2d5f332e14bf6025abb08a1b2a602143e272b9ab74340dd7eb94b7c005b01d8 Jan 20 17:43:31 crc kubenswrapper[4558]: I0120 17:43:31.071297 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"02038ce5-8236-4304-922b-39d5c9e2459e","Type":"ContainerStarted","Data":"3061bc017f3dd32adfde1ca3ae3220a662a18da25acbd815f7220bcb27f5d3e5"} Jan 20 17:43:31 crc kubenswrapper[4558]: I0120 17:43:31.071335 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"02038ce5-8236-4304-922b-39d5c9e2459e","Type":"ContainerStarted","Data":"ae3e4535b918315e06d397772c071a730e991089e1ec0d98521e62d3b2eac170"} Jan 20 17:43:31 crc kubenswrapper[4558]: I0120 17:43:31.105072 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"7faabe16-9de5-49dc-bab6-44e173f4403c","Type":"ContainerStarted","Data":"c3d11385387f16b6eaa78c51b6857cdc17ebee2005dc5130a7df1bd854cc3254"} Jan 20 17:43:31 crc kubenswrapper[4558]: I0120 17:43:31.124311 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"4ea608e7-9a19-47fb-8e14-a629451e7c03","Type":"ContainerStarted","Data":"b63f6fc151b46c87b40591ac97182073165528cb2330e3173933ce4b52b43f72"} Jan 20 17:43:31 crc kubenswrapper[4558]: I0120 17:43:31.134156 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" podStartSLOduration=3.13412831 podStartE2EDuration="3.13412831s" podCreationTimestamp="2026-01-20 17:43:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:43:31.105581055 +0000 UTC m=+3704.865919021" watchObservedRunningTime="2026-01-20 17:43:31.13412831 +0000 UTC m=+3704.894466277" Jan 20 17:43:31 crc kubenswrapper[4558]: I0120 17:43:31.164480 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-0" event={"ID":"ea72c008-3a66-4577-8042-4b1e0ed1cca6","Type":"ContainerStarted","Data":"1b9d5de1f46ed508a2caef11a3592fd5ba3a1a309d20922561e292e00539d3fb"} Jan 20 17:43:31 crc kubenswrapper[4558]: I0120 17:43:31.165995 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:43:31 crc kubenswrapper[4558]: I0120 17:43:31.181818 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"56d39f72-48d3-4690-a20a-099cb41daa7e","Type":"ContainerStarted","Data":"9d8c0d2d57ebdacd4f483efdc5e6ebefff5d373523eeebba687917ca86c856be"} Jan 20 17:43:31 crc kubenswrapper[4558]: I0120 17:43:31.189057 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-galera-0" event={"ID":"38a5808f-1fa5-49cb-afb7-d7676416cd26","Type":"ContainerStarted","Data":"b29534492808dfe227690795f19ae32104cb9487956b5ac584f7fd62850c9707"} Jan 20 17:43:31 crc kubenswrapper[4558]: I0120 17:43:31.190514 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell1-conductor-0" podStartSLOduration=3.190499972 podStartE2EDuration="3.190499972s" podCreationTimestamp="2026-01-20 17:43:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:43:31.181596731 +0000 UTC m=+3704.941934699" watchObservedRunningTime="2026-01-20 17:43:31.190499972 +0000 UTC m=+3704.950837939" Jan 20 17:43:31 crc kubenswrapper[4558]: I0120 17:43:31.198265 4558 generic.go:334] "Generic (PLEG): container finished" podID="365754d1-535b-450b-a80e-1e7402cb28f8" containerID="9c38558131507676fb881bd62fab6cc179a513c6227f85a615c9705711240cbc" exitCode=0 Jan 20 17:43:31 crc kubenswrapper[4558]: I0120 17:43:31.198469 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-0" event={"ID":"365754d1-535b-450b-a80e-1e7402cb28f8","Type":"ContainerDied","Data":"9c38558131507676fb881bd62fab6cc179a513c6227f85a615c9705711240cbc"} Jan 20 17:43:31 crc kubenswrapper[4558]: I0120 17:43:31.198520 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-0" event={"ID":"365754d1-535b-450b-a80e-1e7402cb28f8","Type":"ContainerDied","Data":"01bba0b887c820027e9ec4400f080e9781ca31ba90a7eca0f8976aa0d03b2d78"} Jan 20 17:43:31 crc kubenswrapper[4558]: I0120 17:43:31.199128 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:43:31 crc kubenswrapper[4558]: I0120 17:43:31.199341 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9db31a7a-b4bb-4c3b-99f4-b9855af99342-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "9db31a7a-b4bb-4c3b-99f4-b9855af99342" (UID: "9db31a7a-b4bb-4c3b-99f4-b9855af99342"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:31 crc kubenswrapper[4558]: I0120 17:43:31.276457 4558 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/9db31a7a-b4bb-4c3b-99f4-b9855af99342-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:31 crc kubenswrapper[4558]: I0120 17:43:31.436691 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9db31a7a-b4bb-4c3b-99f4-b9855af99342-ovn-northd-tls-certs" (OuterVolumeSpecName: "ovn-northd-tls-certs") pod "9db31a7a-b4bb-4c3b-99f4-b9855af99342" (UID: "9db31a7a-b4bb-4c3b-99f4-b9855af99342"). InnerVolumeSpecName "ovn-northd-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:31 crc kubenswrapper[4558]: I0120 17:43:31.485971 4558 reconciler_common.go:293] "Volume detached for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/9db31a7a-b4bb-4c3b-99f4-b9855af99342-ovn-northd-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:31 crc kubenswrapper[4558]: I0120 17:43:31.762432 4558 scope.go:117] "RemoveContainer" containerID="4dca8ca5509273f800ab7c5cde1eec1ca33f8167df3366715098e139aa83d898" Jan 20 17:43:31 crc kubenswrapper[4558]: I0120 17:43:31.834557 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-keystone-listener-5f469dd86b-h6dq9" Jan 20 17:43:31 crc kubenswrapper[4558]: I0120 17:43:31.869575 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovn-northd-0"] Jan 20 17:43:31 crc kubenswrapper[4558]: I0120 17:43:31.878829 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ovn-northd-0"] Jan 20 17:43:31 crc kubenswrapper[4558]: I0120 17:43:31.889222 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ovn-northd-0"] Jan 20 17:43:31 crc kubenswrapper[4558]: E0120 17:43:31.900499 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dfa1a7bb-2900-49f0-94ee-fe789d4ffec8" containerName="barbican-keystone-listener-log" Jan 20 17:43:31 crc kubenswrapper[4558]: I0120 17:43:31.900526 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="dfa1a7bb-2900-49f0-94ee-fe789d4ffec8" containerName="barbican-keystone-listener-log" Jan 20 17:43:31 crc kubenswrapper[4558]: E0120 17:43:31.900539 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9db31a7a-b4bb-4c3b-99f4-b9855af99342" containerName="ovn-northd" Jan 20 17:43:31 crc kubenswrapper[4558]: I0120 17:43:31.900547 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="9db31a7a-b4bb-4c3b-99f4-b9855af99342" containerName="ovn-northd" Jan 20 17:43:31 crc kubenswrapper[4558]: E0120 17:43:31.900561 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="365754d1-535b-450b-a80e-1e7402cb28f8" containerName="nova-cell0-conductor-conductor" Jan 20 17:43:31 crc kubenswrapper[4558]: I0120 17:43:31.900566 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="365754d1-535b-450b-a80e-1e7402cb28f8" containerName="nova-cell0-conductor-conductor" Jan 20 17:43:31 crc kubenswrapper[4558]: E0120 17:43:31.900584 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9db31a7a-b4bb-4c3b-99f4-b9855af99342" containerName="openstack-network-exporter" Jan 20 17:43:31 crc kubenswrapper[4558]: I0120 17:43:31.900589 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="9db31a7a-b4bb-4c3b-99f4-b9855af99342" containerName="openstack-network-exporter" Jan 20 17:43:31 crc kubenswrapper[4558]: E0120 17:43:31.900602 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dfa1a7bb-2900-49f0-94ee-fe789d4ffec8" containerName="barbican-keystone-listener" Jan 20 17:43:31 crc kubenswrapper[4558]: I0120 17:43:31.900608 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="dfa1a7bb-2900-49f0-94ee-fe789d4ffec8" containerName="barbican-keystone-listener" Jan 20 17:43:31 crc kubenswrapper[4558]: I0120 17:43:31.900771 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="9db31a7a-b4bb-4c3b-99f4-b9855af99342" containerName="openstack-network-exporter" Jan 20 17:43:31 crc kubenswrapper[4558]: I0120 17:43:31.900784 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="dfa1a7bb-2900-49f0-94ee-fe789d4ffec8" containerName="barbican-keystone-listener-log" Jan 20 17:43:31 crc kubenswrapper[4558]: I0120 17:43:31.900795 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="9db31a7a-b4bb-4c3b-99f4-b9855af99342" containerName="ovn-northd" Jan 20 17:43:31 crc kubenswrapper[4558]: I0120 17:43:31.900806 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="dfa1a7bb-2900-49f0-94ee-fe789d4ffec8" containerName="barbican-keystone-listener" Jan 20 17:43:31 crc kubenswrapper[4558]: I0120 17:43:31.900815 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="365754d1-535b-450b-a80e-1e7402cb28f8" containerName="nova-cell0-conductor-conductor" Jan 20 17:43:31 crc kubenswrapper[4558]: I0120 17:43:31.901950 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:43:31 crc kubenswrapper[4558]: I0120 17:43:31.903317 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovn-northd-0"] Jan 20 17:43:31 crc kubenswrapper[4558]: I0120 17:43:31.909704 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-0"] Jan 20 17:43:31 crc kubenswrapper[4558]: I0120 17:43:31.912939 4558 scope.go:117] "RemoveContainer" containerID="a0ad02a3d302f3830db2cc15f3d92955d193e02b01825c96bcf3760f81f92872" Jan 20 17:43:31 crc kubenswrapper[4558]: E0120 17:43:31.913986 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a0ad02a3d302f3830db2cc15f3d92955d193e02b01825c96bcf3760f81f92872\": container with ID starting with a0ad02a3d302f3830db2cc15f3d92955d193e02b01825c96bcf3760f81f92872 not found: ID does not exist" containerID="a0ad02a3d302f3830db2cc15f3d92955d193e02b01825c96bcf3760f81f92872" Jan 20 17:43:31 crc kubenswrapper[4558]: I0120 17:43:31.914014 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a0ad02a3d302f3830db2cc15f3d92955d193e02b01825c96bcf3760f81f92872"} err="failed to get container status \"a0ad02a3d302f3830db2cc15f3d92955d193e02b01825c96bcf3760f81f92872\": rpc error: code = NotFound desc = could not find container \"a0ad02a3d302f3830db2cc15f3d92955d193e02b01825c96bcf3760f81f92872\": container with ID starting with a0ad02a3d302f3830db2cc15f3d92955d193e02b01825c96bcf3760f81f92872 not found: ID does not exist" Jan 20 17:43:31 crc kubenswrapper[4558]: I0120 17:43:31.914035 4558 scope.go:117] "RemoveContainer" containerID="4dca8ca5509273f800ab7c5cde1eec1ca33f8167df3366715098e139aa83d898" Jan 20 17:43:31 crc kubenswrapper[4558]: E0120 17:43:31.914319 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4dca8ca5509273f800ab7c5cde1eec1ca33f8167df3366715098e139aa83d898\": container with ID starting with 4dca8ca5509273f800ab7c5cde1eec1ca33f8167df3366715098e139aa83d898 not found: ID does not exist" containerID="4dca8ca5509273f800ab7c5cde1eec1ca33f8167df3366715098e139aa83d898" Jan 20 17:43:31 crc kubenswrapper[4558]: I0120 17:43:31.914355 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4dca8ca5509273f800ab7c5cde1eec1ca33f8167df3366715098e139aa83d898"} err="failed to get container status \"4dca8ca5509273f800ab7c5cde1eec1ca33f8167df3366715098e139aa83d898\": rpc error: code = NotFound desc = could not find container \"4dca8ca5509273f800ab7c5cde1eec1ca33f8167df3366715098e139aa83d898\": container with ID starting with 4dca8ca5509273f800ab7c5cde1eec1ca33f8167df3366715098e139aa83d898 not found: ID does not exist" Jan 20 17:43:31 crc kubenswrapper[4558]: I0120 17:43:31.914368 4558 scope.go:117] "RemoveContainer" containerID="9c38558131507676fb881bd62fab6cc179a513c6227f85a615c9705711240cbc" Jan 20 17:43:31 crc kubenswrapper[4558]: I0120 17:43:31.915504 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"ovnnorthd-config" Jan 20 17:43:31 crc kubenswrapper[4558]: I0120 17:43:31.915834 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ovnnorthd-ovnnorthd-dockercfg-rznpl" Jan 20 17:43:31 crc kubenswrapper[4558]: I0120 17:43:31.915999 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-ovnnorthd-ovndbs" Jan 20 17:43:31 crc kubenswrapper[4558]: I0120 17:43:31.916281 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"ovnnorthd-scripts" Jan 20 17:43:31 crc kubenswrapper[4558]: I0120 17:43:31.920663 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-0"] Jan 20 17:43:31 crc kubenswrapper[4558]: I0120 17:43:31.929761 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-worker-6667c7bd65-7bw4v" Jan 20 17:43:31 crc kubenswrapper[4558]: I0120 17:43:31.951620 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-0"] Jan 20 17:43:31 crc kubenswrapper[4558]: E0120 17:43:31.951985 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ba27bcbb-36f0-4575-afba-d6997d667fef" containerName="barbican-worker" Jan 20 17:43:31 crc kubenswrapper[4558]: I0120 17:43:31.951997 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ba27bcbb-36f0-4575-afba-d6997d667fef" containerName="barbican-worker" Jan 20 17:43:31 crc kubenswrapper[4558]: E0120 17:43:31.952024 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ba27bcbb-36f0-4575-afba-d6997d667fef" containerName="barbican-worker-log" Jan 20 17:43:31 crc kubenswrapper[4558]: I0120 17:43:31.952030 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ba27bcbb-36f0-4575-afba-d6997d667fef" containerName="barbican-worker-log" Jan 20 17:43:31 crc kubenswrapper[4558]: I0120 17:43:31.952217 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="ba27bcbb-36f0-4575-afba-d6997d667fef" containerName="barbican-worker" Jan 20 17:43:31 crc kubenswrapper[4558]: I0120 17:43:31.952234 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="ba27bcbb-36f0-4575-afba-d6997d667fef" containerName="barbican-worker-log" Jan 20 17:43:31 crc kubenswrapper[4558]: I0120 17:43:31.965629 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:43:31 crc kubenswrapper[4558]: I0120 17:43:31.968462 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell0-conductor-config-data" Jan 20 17:43:31 crc kubenswrapper[4558]: I0120 17:43:31.983453 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-0"] Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.006156 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ba27bcbb-36f0-4575-afba-d6997d667fef-config-data-custom\") pod \"ba27bcbb-36f0-4575-afba-d6997d667fef\" (UID: \"ba27bcbb-36f0-4575-afba-d6997d667fef\") " Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.006238 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ba27bcbb-36f0-4575-afba-d6997d667fef-combined-ca-bundle\") pod \"ba27bcbb-36f0-4575-afba-d6997d667fef\" (UID: \"ba27bcbb-36f0-4575-afba-d6997d667fef\") " Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.006270 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dfa1a7bb-2900-49f0-94ee-fe789d4ffec8-config-data\") pod \"dfa1a7bb-2900-49f0-94ee-fe789d4ffec8\" (UID: \"dfa1a7bb-2900-49f0-94ee-fe789d4ffec8\") " Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.006314 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ba27bcbb-36f0-4575-afba-d6997d667fef-logs\") pod \"ba27bcbb-36f0-4575-afba-d6997d667fef\" (UID: \"ba27bcbb-36f0-4575-afba-d6997d667fef\") " Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.006365 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dh4jh\" (UniqueName: \"kubernetes.io/projected/ba27bcbb-36f0-4575-afba-d6997d667fef-kube-api-access-dh4jh\") pod \"ba27bcbb-36f0-4575-afba-d6997d667fef\" (UID: \"ba27bcbb-36f0-4575-afba-d6997d667fef\") " Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.006439 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dfa1a7bb-2900-49f0-94ee-fe789d4ffec8-combined-ca-bundle\") pod \"dfa1a7bb-2900-49f0-94ee-fe789d4ffec8\" (UID: \"dfa1a7bb-2900-49f0-94ee-fe789d4ffec8\") " Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.006483 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/dfa1a7bb-2900-49f0-94ee-fe789d4ffec8-config-data-custom\") pod \"dfa1a7bb-2900-49f0-94ee-fe789d4ffec8\" (UID: \"dfa1a7bb-2900-49f0-94ee-fe789d4ffec8\") " Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.006579 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dfa1a7bb-2900-49f0-94ee-fe789d4ffec8-logs\") pod \"dfa1a7bb-2900-49f0-94ee-fe789d4ffec8\" (UID: \"dfa1a7bb-2900-49f0-94ee-fe789d4ffec8\") " Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.006607 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ba27bcbb-36f0-4575-afba-d6997d667fef-config-data\") pod \"ba27bcbb-36f0-4575-afba-d6997d667fef\" (UID: \"ba27bcbb-36f0-4575-afba-d6997d667fef\") " Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.006637 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-44k45\" (UniqueName: \"kubernetes.io/projected/dfa1a7bb-2900-49f0-94ee-fe789d4ffec8-kube-api-access-44k45\") pod \"dfa1a7bb-2900-49f0-94ee-fe789d4ffec8\" (UID: \"dfa1a7bb-2900-49f0-94ee-fe789d4ffec8\") " Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.006964 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6wn7h\" (UniqueName: \"kubernetes.io/projected/d6e6a03e-c496-4589-8d51-f6d9e89e14ae-kube-api-access-6wn7h\") pod \"ovn-northd-0\" (UID: \"d6e6a03e-c496-4589-8d51-f6d9e89e14ae\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.007027 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d6e6a03e-c496-4589-8d51-f6d9e89e14ae-scripts\") pod \"ovn-northd-0\" (UID: \"d6e6a03e-c496-4589-8d51-f6d9e89e14ae\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.007053 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/d6e6a03e-c496-4589-8d51-f6d9e89e14ae-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"d6e6a03e-c496-4589-8d51-f6d9e89e14ae\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.007091 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d6e6a03e-c496-4589-8d51-f6d9e89e14ae-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"d6e6a03e-c496-4589-8d51-f6d9e89e14ae\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.007151 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d6e6a03e-c496-4589-8d51-f6d9e89e14ae-config\") pod \"ovn-northd-0\" (UID: \"d6e6a03e-c496-4589-8d51-f6d9e89e14ae\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.007216 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/d6e6a03e-c496-4589-8d51-f6d9e89e14ae-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"d6e6a03e-c496-4589-8d51-f6d9e89e14ae\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.007245 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/d6e6a03e-c496-4589-8d51-f6d9e89e14ae-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"d6e6a03e-c496-4589-8d51-f6d9e89e14ae\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.014266 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ba27bcbb-36f0-4575-afba-d6997d667fef-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "ba27bcbb-36f0-4575-afba-d6997d667fef" (UID: "ba27bcbb-36f0-4575-afba-d6997d667fef"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.029307 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dfa1a7bb-2900-49f0-94ee-fe789d4ffec8-kube-api-access-44k45" (OuterVolumeSpecName: "kube-api-access-44k45") pod "dfa1a7bb-2900-49f0-94ee-fe789d4ffec8" (UID: "dfa1a7bb-2900-49f0-94ee-fe789d4ffec8"). InnerVolumeSpecName "kube-api-access-44k45". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.031286 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dfa1a7bb-2900-49f0-94ee-fe789d4ffec8-logs" (OuterVolumeSpecName: "logs") pod "dfa1a7bb-2900-49f0-94ee-fe789d4ffec8" (UID: "dfa1a7bb-2900-49f0-94ee-fe789d4ffec8"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.031425 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ba27bcbb-36f0-4575-afba-d6997d667fef-logs" (OuterVolumeSpecName: "logs") pod "ba27bcbb-36f0-4575-afba-d6997d667fef" (UID: "ba27bcbb-36f0-4575-afba-d6997d667fef"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.045580 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ba27bcbb-36f0-4575-afba-d6997d667fef-kube-api-access-dh4jh" (OuterVolumeSpecName: "kube-api-access-dh4jh") pod "ba27bcbb-36f0-4575-afba-d6997d667fef" (UID: "ba27bcbb-36f0-4575-afba-d6997d667fef"). InnerVolumeSpecName "kube-api-access-dh4jh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.049875 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dfa1a7bb-2900-49f0-94ee-fe789d4ffec8-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "dfa1a7bb-2900-49f0-94ee-fe789d4ffec8" (UID: "dfa1a7bb-2900-49f0-94ee-fe789d4ffec8"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.054015 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dfa1a7bb-2900-49f0-94ee-fe789d4ffec8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "dfa1a7bb-2900-49f0-94ee-fe789d4ffec8" (UID: "dfa1a7bb-2900-49f0-94ee-fe789d4ffec8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.067632 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ba27bcbb-36f0-4575-afba-d6997d667fef-config-data" (OuterVolumeSpecName: "config-data") pod "ba27bcbb-36f0-4575-afba-d6997d667fef" (UID: "ba27bcbb-36f0-4575-afba-d6997d667fef"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.081346 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ba27bcbb-36f0-4575-afba-d6997d667fef-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ba27bcbb-36f0-4575-afba-d6997d667fef" (UID: "ba27bcbb-36f0-4575-afba-d6997d667fef"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.095382 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dfa1a7bb-2900-49f0-94ee-fe789d4ffec8-config-data" (OuterVolumeSpecName: "config-data") pod "dfa1a7bb-2900-49f0-94ee-fe789d4ffec8" (UID: "dfa1a7bb-2900-49f0-94ee-fe789d4ffec8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.111134 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/50cacddd-ebea-477f-af64-6e96a09a242e-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"50cacddd-ebea-477f-af64-6e96a09a242e\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.111203 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6wn7h\" (UniqueName: \"kubernetes.io/projected/d6e6a03e-c496-4589-8d51-f6d9e89e14ae-kube-api-access-6wn7h\") pod \"ovn-northd-0\" (UID: \"d6e6a03e-c496-4589-8d51-f6d9e89e14ae\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.111273 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d6e6a03e-c496-4589-8d51-f6d9e89e14ae-scripts\") pod \"ovn-northd-0\" (UID: \"d6e6a03e-c496-4589-8d51-f6d9e89e14ae\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.111299 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/d6e6a03e-c496-4589-8d51-f6d9e89e14ae-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"d6e6a03e-c496-4589-8d51-f6d9e89e14ae\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.111343 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d6e6a03e-c496-4589-8d51-f6d9e89e14ae-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"d6e6a03e-c496-4589-8d51-f6d9e89e14ae\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.111382 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50cacddd-ebea-477f-af64-6e96a09a242e-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"50cacddd-ebea-477f-af64-6e96a09a242e\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.111422 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d6e6a03e-c496-4589-8d51-f6d9e89e14ae-config\") pod \"ovn-northd-0\" (UID: \"d6e6a03e-c496-4589-8d51-f6d9e89e14ae\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.111470 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/d6e6a03e-c496-4589-8d51-f6d9e89e14ae-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"d6e6a03e-c496-4589-8d51-f6d9e89e14ae\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.112009 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/d6e6a03e-c496-4589-8d51-f6d9e89e14ae-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"d6e6a03e-c496-4589-8d51-f6d9e89e14ae\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.112706 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d6e6a03e-c496-4589-8d51-f6d9e89e14ae-scripts\") pod \"ovn-northd-0\" (UID: \"d6e6a03e-c496-4589-8d51-f6d9e89e14ae\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.112758 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8slx9\" (UniqueName: \"kubernetes.io/projected/50cacddd-ebea-477f-af64-6e96a09a242e-kube-api-access-8slx9\") pod \"nova-cell0-conductor-0\" (UID: \"50cacddd-ebea-477f-af64-6e96a09a242e\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.112803 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/d6e6a03e-c496-4589-8d51-f6d9e89e14ae-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"d6e6a03e-c496-4589-8d51-f6d9e89e14ae\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.112867 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dfa1a7bb-2900-49f0-94ee-fe789d4ffec8-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.112884 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ba27bcbb-36f0-4575-afba-d6997d667fef-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.112897 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-44k45\" (UniqueName: \"kubernetes.io/projected/dfa1a7bb-2900-49f0-94ee-fe789d4ffec8-kube-api-access-44k45\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.112917 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ba27bcbb-36f0-4575-afba-d6997d667fef-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.112929 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ba27bcbb-36f0-4575-afba-d6997d667fef-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.112938 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dfa1a7bb-2900-49f0-94ee-fe789d4ffec8-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.112947 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ba27bcbb-36f0-4575-afba-d6997d667fef-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.112956 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dh4jh\" (UniqueName: \"kubernetes.io/projected/ba27bcbb-36f0-4575-afba-d6997d667fef-kube-api-access-dh4jh\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.112966 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dfa1a7bb-2900-49f0-94ee-fe789d4ffec8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.112977 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/dfa1a7bb-2900-49f0-94ee-fe789d4ffec8-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.113668 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d6e6a03e-c496-4589-8d51-f6d9e89e14ae-config\") pod \"ovn-northd-0\" (UID: \"d6e6a03e-c496-4589-8d51-f6d9e89e14ae\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.116067 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d6e6a03e-c496-4589-8d51-f6d9e89e14ae-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"d6e6a03e-c496-4589-8d51-f6d9e89e14ae\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.116292 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/d6e6a03e-c496-4589-8d51-f6d9e89e14ae-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"d6e6a03e-c496-4589-8d51-f6d9e89e14ae\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.116543 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/d6e6a03e-c496-4589-8d51-f6d9e89e14ae-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"d6e6a03e-c496-4589-8d51-f6d9e89e14ae\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.127755 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6wn7h\" (UniqueName: \"kubernetes.io/projected/d6e6a03e-c496-4589-8d51-f6d9e89e14ae-kube-api-access-6wn7h\") pod \"ovn-northd-0\" (UID: \"d6e6a03e-c496-4589-8d51-f6d9e89e14ae\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.182195 4558 scope.go:117] "RemoveContainer" containerID="9c38558131507676fb881bd62fab6cc179a513c6227f85a615c9705711240cbc" Jan 20 17:43:32 crc kubenswrapper[4558]: E0120 17:43:32.183049 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9c38558131507676fb881bd62fab6cc179a513c6227f85a615c9705711240cbc\": container with ID starting with 9c38558131507676fb881bd62fab6cc179a513c6227f85a615c9705711240cbc not found: ID does not exist" containerID="9c38558131507676fb881bd62fab6cc179a513c6227f85a615c9705711240cbc" Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.183107 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9c38558131507676fb881bd62fab6cc179a513c6227f85a615c9705711240cbc"} err="failed to get container status \"9c38558131507676fb881bd62fab6cc179a513c6227f85a615c9705711240cbc\": rpc error: code = NotFound desc = could not find container \"9c38558131507676fb881bd62fab6cc179a513c6227f85a615c9705711240cbc\": container with ID starting with 9c38558131507676fb881bd62fab6cc179a513c6227f85a615c9705711240cbc not found: ID does not exist" Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.212785 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-cell1-galera-0" event={"ID":"34a4b6ee-5830-4532-b829-a91cedcf8069","Type":"ContainerStarted","Data":"6142a118577f4983494f4e5bded80a60eb86bde1b0e2aabfeb8afaeda6b2b894"} Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.215357 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8slx9\" (UniqueName: \"kubernetes.io/projected/50cacddd-ebea-477f-af64-6e96a09a242e-kube-api-access-8slx9\") pod \"nova-cell0-conductor-0\" (UID: \"50cacddd-ebea-477f-af64-6e96a09a242e\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.215477 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/50cacddd-ebea-477f-af64-6e96a09a242e-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"50cacddd-ebea-477f-af64-6e96a09a242e\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.219229 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50cacddd-ebea-477f-af64-6e96a09a242e-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"50cacddd-ebea-477f-af64-6e96a09a242e\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.223301 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"4ea608e7-9a19-47fb-8e14-a629451e7c03","Type":"ContainerStarted","Data":"2268bf37f5b535b0402d4e5d51dbdeabae160ee08d31b99321666536df207266"} Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.234530 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50cacddd-ebea-477f-af64-6e96a09a242e-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"50cacddd-ebea-477f-af64-6e96a09a242e\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.235568 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/50cacddd-ebea-477f-af64-6e96a09a242e-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"50cacddd-ebea-477f-af64-6e96a09a242e\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.239266 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-galera-0" event={"ID":"38a5808f-1fa5-49cb-afb7-d7676416cd26","Type":"ContainerStarted","Data":"9ce99dfe9c0f0478fe1e3bc7eb8d4a9fb46628bf1aaf1c9dfa808ff005dc5efc"} Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.241645 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-0" event={"ID":"ea72c008-3a66-4577-8042-4b1e0ed1cca6","Type":"ContainerStarted","Data":"f78a0d7e8dd2168ce02d05fa6fef206d25ece8bae681f33345521bfe6b0ff1e2"} Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.241720 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8slx9\" (UniqueName: \"kubernetes.io/projected/50cacddd-ebea-477f-af64-6e96a09a242e-kube-api-access-8slx9\") pod \"nova-cell0-conductor-0\" (UID: \"50cacddd-ebea-477f-af64-6e96a09a242e\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.246801 4558 generic.go:334] "Generic (PLEG): container finished" podID="ba27bcbb-36f0-4575-afba-d6997d667fef" containerID="7a8e9aa388298d08f4da43fd2c878f6be898c08220911c53d8f06152ad76440d" exitCode=0 Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.246882 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-worker-6667c7bd65-7bw4v" Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.246887 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-worker-6667c7bd65-7bw4v" event={"ID":"ba27bcbb-36f0-4575-afba-d6997d667fef","Type":"ContainerDied","Data":"7a8e9aa388298d08f4da43fd2c878f6be898c08220911c53d8f06152ad76440d"} Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.247052 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-worker-6667c7bd65-7bw4v" event={"ID":"ba27bcbb-36f0-4575-afba-d6997d667fef","Type":"ContainerDied","Data":"fc3acc9f98565078ec2d7658865735dfb1b52da59b3f94de0815d13fac56297b"} Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.247097 4558 scope.go:117] "RemoveContainer" containerID="7a8e9aa388298d08f4da43fd2c878f6be898c08220911c53d8f06152ad76440d" Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.255071 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-api-0" podStartSLOduration=4.255057795 podStartE2EDuration="4.255057795s" podCreationTimestamp="2026-01-20 17:43:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:43:32.250056572 +0000 UTC m=+3706.010394539" watchObservedRunningTime="2026-01-20 17:43:32.255057795 +0000 UTC m=+3706.015395762" Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.261627 4558 generic.go:334] "Generic (PLEG): container finished" podID="d791ceee-e87d-4238-9267-c2d2c53faf96" containerID="b225f9e53aaf9806843cb0212349c3e9e4362df912776534664c2dac9cdadb6b" exitCode=0 Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.261697 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"d791ceee-e87d-4238-9267-c2d2c53faf96","Type":"ContainerDied","Data":"b225f9e53aaf9806843cb0212349c3e9e4362df912776534664c2dac9cdadb6b"} Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.261729 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"d791ceee-e87d-4238-9267-c2d2c53faf96","Type":"ContainerDied","Data":"ec03225994940c8abc9d0676a20add78ed6d76579350199ed0d95343231a9e01"} Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.261741 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ec03225994940c8abc9d0676a20add78ed6d76579350199ed0d95343231a9e01" Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.264369 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"a226d25c-395c-4744-a42e-46beee40d8e4","Type":"ContainerStarted","Data":"c2d5f332e14bf6025abb08a1b2a602143e272b9ab74340dd7eb94b7c005b01d8"} Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.265856 4558 generic.go:334] "Generic (PLEG): container finished" podID="dfa1a7bb-2900-49f0-94ee-fe789d4ffec8" containerID="4918d6999246b14e34bbf966f6c160e07d3a28ecedad21b571990984b63736da" exitCode=0 Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.265903 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-keystone-listener-5f469dd86b-h6dq9" event={"ID":"dfa1a7bb-2900-49f0-94ee-fe789d4ffec8","Type":"ContainerDied","Data":"4918d6999246b14e34bbf966f6c160e07d3a28ecedad21b571990984b63736da"} Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.265932 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-keystone-listener-5f469dd86b-h6dq9" event={"ID":"dfa1a7bb-2900-49f0-94ee-fe789d4ffec8","Type":"ContainerDied","Data":"6b5206546241e722df21bc36a7bfbdffafe84c8538c062bc41ea4da1087d3d5e"} Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.266006 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-keystone-listener-5f469dd86b-h6dq9" Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.267306 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"5e0d404f-75d1-4be8-9c12-803d8db759e6","Type":"ContainerStarted","Data":"bab504222f0097ec4e1c41baba39adba55d431a6848496b810190dd205c98a8a"} Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.277084 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.295582 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.300901 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.311309 4558 scope.go:117] "RemoveContainer" containerID="ce43e3bb6306e86eacbf56d2bb8a010f8f3998ab291973f5ad5e72276c882cdf" Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.317674 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-worker-6667c7bd65-7bw4v"] Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.328214 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/barbican-worker-6667c7bd65-7bw4v"] Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.353611 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-keystone-listener-5f469dd86b-h6dq9"] Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.364186 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/barbican-keystone-listener-5f469dd86b-h6dq9"] Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.406435 4558 scope.go:117] "RemoveContainer" containerID="7a8e9aa388298d08f4da43fd2c878f6be898c08220911c53d8f06152ad76440d" Jan 20 17:43:32 crc kubenswrapper[4558]: E0120 17:43:32.407543 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7a8e9aa388298d08f4da43fd2c878f6be898c08220911c53d8f06152ad76440d\": container with ID starting with 7a8e9aa388298d08f4da43fd2c878f6be898c08220911c53d8f06152ad76440d not found: ID does not exist" containerID="7a8e9aa388298d08f4da43fd2c878f6be898c08220911c53d8f06152ad76440d" Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.407566 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7a8e9aa388298d08f4da43fd2c878f6be898c08220911c53d8f06152ad76440d"} err="failed to get container status \"7a8e9aa388298d08f4da43fd2c878f6be898c08220911c53d8f06152ad76440d\": rpc error: code = NotFound desc = could not find container \"7a8e9aa388298d08f4da43fd2c878f6be898c08220911c53d8f06152ad76440d\": container with ID starting with 7a8e9aa388298d08f4da43fd2c878f6be898c08220911c53d8f06152ad76440d not found: ID does not exist" Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.407594 4558 scope.go:117] "RemoveContainer" containerID="ce43e3bb6306e86eacbf56d2bb8a010f8f3998ab291973f5ad5e72276c882cdf" Jan 20 17:43:32 crc kubenswrapper[4558]: E0120 17:43:32.407785 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ce43e3bb6306e86eacbf56d2bb8a010f8f3998ab291973f5ad5e72276c882cdf\": container with ID starting with ce43e3bb6306e86eacbf56d2bb8a010f8f3998ab291973f5ad5e72276c882cdf not found: ID does not exist" containerID="ce43e3bb6306e86eacbf56d2bb8a010f8f3998ab291973f5ad5e72276c882cdf" Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.407808 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ce43e3bb6306e86eacbf56d2bb8a010f8f3998ab291973f5ad5e72276c882cdf"} err="failed to get container status \"ce43e3bb6306e86eacbf56d2bb8a010f8f3998ab291973f5ad5e72276c882cdf\": rpc error: code = NotFound desc = could not find container \"ce43e3bb6306e86eacbf56d2bb8a010f8f3998ab291973f5ad5e72276c882cdf\": container with ID starting with ce43e3bb6306e86eacbf56d2bb8a010f8f3998ab291973f5ad5e72276c882cdf not found: ID does not exist" Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.407823 4558 scope.go:117] "RemoveContainer" containerID="4918d6999246b14e34bbf966f6c160e07d3a28ecedad21b571990984b63736da" Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.423269 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d791ceee-e87d-4238-9267-c2d2c53faf96-run-httpd\") pod \"d791ceee-e87d-4238-9267-c2d2c53faf96\" (UID: \"d791ceee-e87d-4238-9267-c2d2c53faf96\") " Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.423325 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d791ceee-e87d-4238-9267-c2d2c53faf96-combined-ca-bundle\") pod \"d791ceee-e87d-4238-9267-c2d2c53faf96\" (UID: \"d791ceee-e87d-4238-9267-c2d2c53faf96\") " Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.423429 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d791ceee-e87d-4238-9267-c2d2c53faf96-log-httpd\") pod \"d791ceee-e87d-4238-9267-c2d2c53faf96\" (UID: \"d791ceee-e87d-4238-9267-c2d2c53faf96\") " Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.423523 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/d791ceee-e87d-4238-9267-c2d2c53faf96-ceilometer-tls-certs\") pod \"d791ceee-e87d-4238-9267-c2d2c53faf96\" (UID: \"d791ceee-e87d-4238-9267-c2d2c53faf96\") " Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.423586 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d791ceee-e87d-4238-9267-c2d2c53faf96-sg-core-conf-yaml\") pod \"d791ceee-e87d-4238-9267-c2d2c53faf96\" (UID: \"d791ceee-e87d-4238-9267-c2d2c53faf96\") " Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.423678 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d791ceee-e87d-4238-9267-c2d2c53faf96-config-data\") pod \"d791ceee-e87d-4238-9267-c2d2c53faf96\" (UID: \"d791ceee-e87d-4238-9267-c2d2c53faf96\") " Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.423759 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d791ceee-e87d-4238-9267-c2d2c53faf96-scripts\") pod \"d791ceee-e87d-4238-9267-c2d2c53faf96\" (UID: \"d791ceee-e87d-4238-9267-c2d2c53faf96\") " Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.423792 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l8h4j\" (UniqueName: \"kubernetes.io/projected/d791ceee-e87d-4238-9267-c2d2c53faf96-kube-api-access-l8h4j\") pod \"d791ceee-e87d-4238-9267-c2d2c53faf96\" (UID: \"d791ceee-e87d-4238-9267-c2d2c53faf96\") " Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.423798 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d791ceee-e87d-4238-9267-c2d2c53faf96-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "d791ceee-e87d-4238-9267-c2d2c53faf96" (UID: "d791ceee-e87d-4238-9267-c2d2c53faf96"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.425204 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d791ceee-e87d-4238-9267-c2d2c53faf96-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "d791ceee-e87d-4238-9267-c2d2c53faf96" (UID: "d791ceee-e87d-4238-9267-c2d2c53faf96"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.426417 4558 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d791ceee-e87d-4238-9267-c2d2c53faf96-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.430719 4558 scope.go:117] "RemoveContainer" containerID="41a835374e97b54b247e3d01bbee52c1fb97c1dfd37e923ebe958c81a0dbe228" Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.440221 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d791ceee-e87d-4238-9267-c2d2c53faf96-kube-api-access-l8h4j" (OuterVolumeSpecName: "kube-api-access-l8h4j") pod "d791ceee-e87d-4238-9267-c2d2c53faf96" (UID: "d791ceee-e87d-4238-9267-c2d2c53faf96"). InnerVolumeSpecName "kube-api-access-l8h4j". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.444861 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d791ceee-e87d-4238-9267-c2d2c53faf96-scripts" (OuterVolumeSpecName: "scripts") pod "d791ceee-e87d-4238-9267-c2d2c53faf96" (UID: "d791ceee-e87d-4238-9267-c2d2c53faf96"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.486953 4558 scope.go:117] "RemoveContainer" containerID="4918d6999246b14e34bbf966f6c160e07d3a28ecedad21b571990984b63736da" Jan 20 17:43:32 crc kubenswrapper[4558]: E0120 17:43:32.491262 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4918d6999246b14e34bbf966f6c160e07d3a28ecedad21b571990984b63736da\": container with ID starting with 4918d6999246b14e34bbf966f6c160e07d3a28ecedad21b571990984b63736da not found: ID does not exist" containerID="4918d6999246b14e34bbf966f6c160e07d3a28ecedad21b571990984b63736da" Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.491292 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4918d6999246b14e34bbf966f6c160e07d3a28ecedad21b571990984b63736da"} err="failed to get container status \"4918d6999246b14e34bbf966f6c160e07d3a28ecedad21b571990984b63736da\": rpc error: code = NotFound desc = could not find container \"4918d6999246b14e34bbf966f6c160e07d3a28ecedad21b571990984b63736da\": container with ID starting with 4918d6999246b14e34bbf966f6c160e07d3a28ecedad21b571990984b63736da not found: ID does not exist" Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.491324 4558 scope.go:117] "RemoveContainer" containerID="41a835374e97b54b247e3d01bbee52c1fb97c1dfd37e923ebe958c81a0dbe228" Jan 20 17:43:32 crc kubenswrapper[4558]: E0120 17:43:32.495293 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"41a835374e97b54b247e3d01bbee52c1fb97c1dfd37e923ebe958c81a0dbe228\": container with ID starting with 41a835374e97b54b247e3d01bbee52c1fb97c1dfd37e923ebe958c81a0dbe228 not found: ID does not exist" containerID="41a835374e97b54b247e3d01bbee52c1fb97c1dfd37e923ebe958c81a0dbe228" Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.495332 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"41a835374e97b54b247e3d01bbee52c1fb97c1dfd37e923ebe958c81a0dbe228"} err="failed to get container status \"41a835374e97b54b247e3d01bbee52c1fb97c1dfd37e923ebe958c81a0dbe228\": rpc error: code = NotFound desc = could not find container \"41a835374e97b54b247e3d01bbee52c1fb97c1dfd37e923ebe958c81a0dbe228\": container with ID starting with 41a835374e97b54b247e3d01bbee52c1fb97c1dfd37e923ebe958c81a0dbe228 not found: ID does not exist" Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.528847 4558 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d791ceee-e87d-4238-9267-c2d2c53faf96-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.528874 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d791ceee-e87d-4238-9267-c2d2c53faf96-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.528886 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l8h4j\" (UniqueName: \"kubernetes.io/projected/d791ceee-e87d-4238-9267-c2d2c53faf96-kube-api-access-l8h4j\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.577242 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="365754d1-535b-450b-a80e-1e7402cb28f8" path="/var/lib/kubelet/pods/365754d1-535b-450b-a80e-1e7402cb28f8/volumes" Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.577814 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9db31a7a-b4bb-4c3b-99f4-b9855af99342" path="/var/lib/kubelet/pods/9db31a7a-b4bb-4c3b-99f4-b9855af99342/volumes" Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.578522 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ba27bcbb-36f0-4575-afba-d6997d667fef" path="/var/lib/kubelet/pods/ba27bcbb-36f0-4575-afba-d6997d667fef/volumes" Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.583651 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dfa1a7bb-2900-49f0-94ee-fe789d4ffec8" path="/var/lib/kubelet/pods/dfa1a7bb-2900-49f0-94ee-fe789d4ffec8/volumes" Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.677085 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d791ceee-e87d-4238-9267-c2d2c53faf96-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "d791ceee-e87d-4238-9267-c2d2c53faf96" (UID: "d791ceee-e87d-4238-9267-c2d2c53faf96"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.750958 4558 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d791ceee-e87d-4238-9267-c2d2c53faf96-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.772210 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d791ceee-e87d-4238-9267-c2d2c53faf96-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "d791ceee-e87d-4238-9267-c2d2c53faf96" (UID: "d791ceee-e87d-4238-9267-c2d2c53faf96"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.781409 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d791ceee-e87d-4238-9267-c2d2c53faf96-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d791ceee-e87d-4238-9267-c2d2c53faf96" (UID: "d791ceee-e87d-4238-9267-c2d2c53faf96"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:32 crc kubenswrapper[4558]: W0120 17:43:32.806403 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd6e6a03e_c496_4589_8d51_f6d9e89e14ae.slice/crio-3bdcd97cc1aef4d310fabb378441cbbc789ba14dafc8d7966bbd82791291aed5 WatchSource:0}: Error finding container 3bdcd97cc1aef4d310fabb378441cbbc789ba14dafc8d7966bbd82791291aed5: Status 404 returned error can't find the container with id 3bdcd97cc1aef4d310fabb378441cbbc789ba14dafc8d7966bbd82791291aed5 Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.829638 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovn-northd-0"] Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.881931 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d791ceee-e87d-4238-9267-c2d2c53faf96-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.881969 4558 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/d791ceee-e87d-4238-9267-c2d2c53faf96-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:32 crc kubenswrapper[4558]: I0120 17:43:32.894039 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-0"] Jan 20 17:43:33 crc kubenswrapper[4558]: I0120 17:43:33.001320 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d791ceee-e87d-4238-9267-c2d2c53faf96-config-data" (OuterVolumeSpecName: "config-data") pod "d791ceee-e87d-4238-9267-c2d2c53faf96" (UID: "d791ceee-e87d-4238-9267-c2d2c53faf96"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:33 crc kubenswrapper[4558]: I0120 17:43:33.088646 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d791ceee-e87d-4238-9267-c2d2c53faf96-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:33 crc kubenswrapper[4558]: I0120 17:43:33.288458 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-0" event={"ID":"50cacddd-ebea-477f-af64-6e96a09a242e","Type":"ContainerStarted","Data":"730ae867cde8505304ebc18f48a08da8136835e4837c2c2e24d35a2c01d439a6"} Jan 20 17:43:33 crc kubenswrapper[4558]: I0120 17:43:33.293490 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"572d7a53-9a17-43e3-bc12-a04f994eb857","Type":"ContainerStarted","Data":"3df9ef85a677a70aa8631b76f358d8c7876e88242945b5d72d5b5a6a3a549fd0"} Jan 20 17:43:33 crc kubenswrapper[4558]: I0120 17:43:33.293546 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"572d7a53-9a17-43e3-bc12-a04f994eb857","Type":"ContainerStarted","Data":"a919d8c90acccccbc8402af93925861e0da53f38a61a01f63b79f7cffbdcd991"} Jan 20 17:43:33 crc kubenswrapper[4558]: I0120 17:43:33.311406 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-sb-0" event={"ID":"924d7506-6027-4d31-b57a-19fc787ba356","Type":"ContainerStarted","Data":"925992b2eef4db1ac2a2537f57dad717cf3ee3ea67cbe9cec01c652878bf3f97"} Jan 20 17:43:33 crc kubenswrapper[4558]: I0120 17:43:33.311444 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-sb-0" event={"ID":"924d7506-6027-4d31-b57a-19fc787ba356","Type":"ContainerStarted","Data":"0b23cac82af3ec3ee44a48b3126c8df9ad27e49f292999b18a09db1d3481441e"} Jan 20 17:43:33 crc kubenswrapper[4558]: I0120 17:43:33.319095 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-metadata-0" podStartSLOduration=5.319075874 podStartE2EDuration="5.319075874s" podCreationTimestamp="2026-01-20 17:43:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:43:33.312393751 +0000 UTC m=+3707.072731717" watchObservedRunningTime="2026-01-20 17:43:33.319075874 +0000 UTC m=+3707.079413841" Jan 20 17:43:33 crc kubenswrapper[4558]: I0120 17:43:33.337692 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ovsdbserver-sb-0" podStartSLOduration=5.33767741 podStartE2EDuration="5.33767741s" podCreationTimestamp="2026-01-20 17:43:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:43:33.336100304 +0000 UTC m=+3707.096438272" watchObservedRunningTime="2026-01-20 17:43:33.33767741 +0000 UTC m=+3707.098015376" Jan 20 17:43:33 crc kubenswrapper[4558]: I0120 17:43:33.352451 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-northd-0" event={"ID":"d6e6a03e-c496-4589-8d51-f6d9e89e14ae","Type":"ContainerStarted","Data":"3bdcd97cc1aef4d310fabb378441cbbc789ba14dafc8d7966bbd82791291aed5"} Jan 20 17:43:33 crc kubenswrapper[4558]: I0120 17:43:33.371067 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-nb-0" event={"ID":"b2fb8e70-15aa-4b38-a44d-5f818dfc755c","Type":"ContainerStarted","Data":"ead17a0d58c199bc2ba003661375148f8b9c61941971a8e8d61f318e35d6d553"} Jan 20 17:43:33 crc kubenswrapper[4558]: I0120 17:43:33.371111 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-nb-0" event={"ID":"b2fb8e70-15aa-4b38-a44d-5f818dfc755c","Type":"ContainerStarted","Data":"54155d91bfb6e6c18d3eb5afc095843ec5b7f0df04d6716cdc4199d6b21aa7d6"} Jan 20 17:43:33 crc kubenswrapper[4558]: I0120 17:43:33.377821 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"5e0d404f-75d1-4be8-9c12-803d8db759e6","Type":"ContainerStarted","Data":"a28f5d4301df9252556516004020ebc32a69c49a2a100beb6220ce8b200a5a7b"} Jan 20 17:43:33 crc kubenswrapper[4558]: I0120 17:43:33.379712 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"56d39f72-48d3-4690-a20a-099cb41daa7e","Type":"ContainerStarted","Data":"3358b6c2c0a492ce2df7675567ba772d472b5f09db162d54f169def48d48b722"} Jan 20 17:43:33 crc kubenswrapper[4558]: I0120 17:43:33.381105 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:43:33 crc kubenswrapper[4558]: I0120 17:43:33.383285 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"7faabe16-9de5-49dc-bab6-44e173f4403c","Type":"ContainerStarted","Data":"88e09b52c88812df27321ee3632982258e7318dfda1d7dfa673a282b0d3aea31"} Jan 20 17:43:33 crc kubenswrapper[4558]: I0120 17:43:33.386004 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:43:33 crc kubenswrapper[4558]: I0120 17:43:33.387734 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"a226d25c-395c-4744-a42e-46beee40d8e4","Type":"ContainerStarted","Data":"219c134e0aca9d6bec94b18c2506bf6bb9bb0bf7e39c133a7d64764bf6f7e5e2"} Jan 20 17:43:33 crc kubenswrapper[4558]: I0120 17:43:33.403832 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ovsdbserver-nb-0" podStartSLOduration=5.403809413 podStartE2EDuration="5.403809413s" podCreationTimestamp="2026-01-20 17:43:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:43:33.396234511 +0000 UTC m=+3707.156572477" watchObservedRunningTime="2026-01-20 17:43:33.403809413 +0000 UTC m=+3707.164147380" Jan 20 17:43:33 crc kubenswrapper[4558]: I0120 17:43:33.418859 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/cinder-api-0" podStartSLOduration=7.41883752 podStartE2EDuration="7.41883752s" podCreationTimestamp="2026-01-20 17:43:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:43:33.417371203 +0000 UTC m=+3707.177709170" watchObservedRunningTime="2026-01-20 17:43:33.41883752 +0000 UTC m=+3707.179175487" Jan 20 17:43:33 crc kubenswrapper[4558]: I0120 17:43:33.481128 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:43:33 crc kubenswrapper[4558]: I0120 17:43:33.498456 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:43:33 crc kubenswrapper[4558]: I0120 17:43:33.528385 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:43:33 crc kubenswrapper[4558]: E0120 17:43:33.529037 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d791ceee-e87d-4238-9267-c2d2c53faf96" containerName="sg-core" Jan 20 17:43:33 crc kubenswrapper[4558]: I0120 17:43:33.529054 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d791ceee-e87d-4238-9267-c2d2c53faf96" containerName="sg-core" Jan 20 17:43:33 crc kubenswrapper[4558]: E0120 17:43:33.529095 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d791ceee-e87d-4238-9267-c2d2c53faf96" containerName="proxy-httpd" Jan 20 17:43:33 crc kubenswrapper[4558]: I0120 17:43:33.529101 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d791ceee-e87d-4238-9267-c2d2c53faf96" containerName="proxy-httpd" Jan 20 17:43:33 crc kubenswrapper[4558]: E0120 17:43:33.529112 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d791ceee-e87d-4238-9267-c2d2c53faf96" containerName="ceilometer-central-agent" Jan 20 17:43:33 crc kubenswrapper[4558]: I0120 17:43:33.529117 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d791ceee-e87d-4238-9267-c2d2c53faf96" containerName="ceilometer-central-agent" Jan 20 17:43:33 crc kubenswrapper[4558]: E0120 17:43:33.529127 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d791ceee-e87d-4238-9267-c2d2c53faf96" containerName="ceilometer-notification-agent" Jan 20 17:43:33 crc kubenswrapper[4558]: I0120 17:43:33.529134 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d791ceee-e87d-4238-9267-c2d2c53faf96" containerName="ceilometer-notification-agent" Jan 20 17:43:33 crc kubenswrapper[4558]: I0120 17:43:33.529304 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="d791ceee-e87d-4238-9267-c2d2c53faf96" containerName="sg-core" Jan 20 17:43:33 crc kubenswrapper[4558]: I0120 17:43:33.529321 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="d791ceee-e87d-4238-9267-c2d2c53faf96" containerName="ceilometer-central-agent" Jan 20 17:43:33 crc kubenswrapper[4558]: I0120 17:43:33.529331 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="d791ceee-e87d-4238-9267-c2d2c53faf96" containerName="proxy-httpd" Jan 20 17:43:33 crc kubenswrapper[4558]: I0120 17:43:33.529342 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="d791ceee-e87d-4238-9267-c2d2c53faf96" containerName="ceilometer-notification-agent" Jan 20 17:43:33 crc kubenswrapper[4558]: I0120 17:43:33.531513 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:43:33 crc kubenswrapper[4558]: I0120 17:43:33.536539 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-config-data" Jan 20 17:43:33 crc kubenswrapper[4558]: I0120 17:43:33.536938 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-scripts" Jan 20 17:43:33 crc kubenswrapper[4558]: I0120 17:43:33.537236 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-ceilometer-internal-svc" Jan 20 17:43:33 crc kubenswrapper[4558]: I0120 17:43:33.544824 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:43:33 crc kubenswrapper[4558]: I0120 17:43:33.614081 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/48a3e4b6-ae17-449f-81f8-e521d7f16b7f-log-httpd\") pod \"ceilometer-0\" (UID: \"48a3e4b6-ae17-449f-81f8-e521d7f16b7f\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:43:33 crc kubenswrapper[4558]: I0120 17:43:33.614196 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/48a3e4b6-ae17-449f-81f8-e521d7f16b7f-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"48a3e4b6-ae17-449f-81f8-e521d7f16b7f\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:43:33 crc kubenswrapper[4558]: I0120 17:43:33.614229 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/48a3e4b6-ae17-449f-81f8-e521d7f16b7f-run-httpd\") pod \"ceilometer-0\" (UID: \"48a3e4b6-ae17-449f-81f8-e521d7f16b7f\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:43:33 crc kubenswrapper[4558]: I0120 17:43:33.614339 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/48a3e4b6-ae17-449f-81f8-e521d7f16b7f-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"48a3e4b6-ae17-449f-81f8-e521d7f16b7f\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:43:33 crc kubenswrapper[4558]: I0120 17:43:33.614362 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/48a3e4b6-ae17-449f-81f8-e521d7f16b7f-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"48a3e4b6-ae17-449f-81f8-e521d7f16b7f\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:43:33 crc kubenswrapper[4558]: I0120 17:43:33.614576 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f6ztb\" (UniqueName: \"kubernetes.io/projected/48a3e4b6-ae17-449f-81f8-e521d7f16b7f-kube-api-access-f6ztb\") pod \"ceilometer-0\" (UID: \"48a3e4b6-ae17-449f-81f8-e521d7f16b7f\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:43:33 crc kubenswrapper[4558]: I0120 17:43:33.614631 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/48a3e4b6-ae17-449f-81f8-e521d7f16b7f-config-data\") pod \"ceilometer-0\" (UID: \"48a3e4b6-ae17-449f-81f8-e521d7f16b7f\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:43:33 crc kubenswrapper[4558]: I0120 17:43:33.614678 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/48a3e4b6-ae17-449f-81f8-e521d7f16b7f-scripts\") pod \"ceilometer-0\" (UID: \"48a3e4b6-ae17-449f-81f8-e521d7f16b7f\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:43:33 crc kubenswrapper[4558]: I0120 17:43:33.718253 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/48a3e4b6-ae17-449f-81f8-e521d7f16b7f-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"48a3e4b6-ae17-449f-81f8-e521d7f16b7f\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:43:33 crc kubenswrapper[4558]: I0120 17:43:33.718294 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/48a3e4b6-ae17-449f-81f8-e521d7f16b7f-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"48a3e4b6-ae17-449f-81f8-e521d7f16b7f\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:43:33 crc kubenswrapper[4558]: I0120 17:43:33.718373 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f6ztb\" (UniqueName: \"kubernetes.io/projected/48a3e4b6-ae17-449f-81f8-e521d7f16b7f-kube-api-access-f6ztb\") pod \"ceilometer-0\" (UID: \"48a3e4b6-ae17-449f-81f8-e521d7f16b7f\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:43:33 crc kubenswrapper[4558]: I0120 17:43:33.718406 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/48a3e4b6-ae17-449f-81f8-e521d7f16b7f-config-data\") pod \"ceilometer-0\" (UID: \"48a3e4b6-ae17-449f-81f8-e521d7f16b7f\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:43:33 crc kubenswrapper[4558]: I0120 17:43:33.718434 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/48a3e4b6-ae17-449f-81f8-e521d7f16b7f-scripts\") pod \"ceilometer-0\" (UID: \"48a3e4b6-ae17-449f-81f8-e521d7f16b7f\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:43:33 crc kubenswrapper[4558]: I0120 17:43:33.718474 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/48a3e4b6-ae17-449f-81f8-e521d7f16b7f-log-httpd\") pod \"ceilometer-0\" (UID: \"48a3e4b6-ae17-449f-81f8-e521d7f16b7f\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:43:33 crc kubenswrapper[4558]: I0120 17:43:33.718496 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/48a3e4b6-ae17-449f-81f8-e521d7f16b7f-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"48a3e4b6-ae17-449f-81f8-e521d7f16b7f\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:43:33 crc kubenswrapper[4558]: I0120 17:43:33.718515 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/48a3e4b6-ae17-449f-81f8-e521d7f16b7f-run-httpd\") pod \"ceilometer-0\" (UID: \"48a3e4b6-ae17-449f-81f8-e521d7f16b7f\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:43:33 crc kubenswrapper[4558]: I0120 17:43:33.718994 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/48a3e4b6-ae17-449f-81f8-e521d7f16b7f-log-httpd\") pod \"ceilometer-0\" (UID: \"48a3e4b6-ae17-449f-81f8-e521d7f16b7f\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:43:33 crc kubenswrapper[4558]: I0120 17:43:33.719050 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/48a3e4b6-ae17-449f-81f8-e521d7f16b7f-run-httpd\") pod \"ceilometer-0\" (UID: \"48a3e4b6-ae17-449f-81f8-e521d7f16b7f\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:43:33 crc kubenswrapper[4558]: I0120 17:43:33.725570 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/48a3e4b6-ae17-449f-81f8-e521d7f16b7f-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"48a3e4b6-ae17-449f-81f8-e521d7f16b7f\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:43:33 crc kubenswrapper[4558]: I0120 17:43:33.726192 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/48a3e4b6-ae17-449f-81f8-e521d7f16b7f-config-data\") pod \"ceilometer-0\" (UID: \"48a3e4b6-ae17-449f-81f8-e521d7f16b7f\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:43:33 crc kubenswrapper[4558]: I0120 17:43:33.726650 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/48a3e4b6-ae17-449f-81f8-e521d7f16b7f-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"48a3e4b6-ae17-449f-81f8-e521d7f16b7f\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:43:33 crc kubenswrapper[4558]: I0120 17:43:33.727597 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/48a3e4b6-ae17-449f-81f8-e521d7f16b7f-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"48a3e4b6-ae17-449f-81f8-e521d7f16b7f\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:43:33 crc kubenswrapper[4558]: I0120 17:43:33.727823 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/48a3e4b6-ae17-449f-81f8-e521d7f16b7f-scripts\") pod \"ceilometer-0\" (UID: \"48a3e4b6-ae17-449f-81f8-e521d7f16b7f\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:43:33 crc kubenswrapper[4558]: I0120 17:43:33.734727 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f6ztb\" (UniqueName: \"kubernetes.io/projected/48a3e4b6-ae17-449f-81f8-e521d7f16b7f-kube-api-access-f6ztb\") pod \"ceilometer-0\" (UID: \"48a3e4b6-ae17-449f-81f8-e521d7f16b7f\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:43:33 crc kubenswrapper[4558]: I0120 17:43:33.747956 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:43:33 crc kubenswrapper[4558]: I0120 17:43:33.868680 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:43:34 crc kubenswrapper[4558]: I0120 17:43:34.282869 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:43:34 crc kubenswrapper[4558]: I0120 17:43:34.397780 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:43:34 crc kubenswrapper[4558]: W0120 17:43:34.403865 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod48a3e4b6_ae17_449f_81f8_e521d7f16b7f.slice/crio-2721b2581c108773c46b6cc13a2582297ea62b63b7e3b16a3dafaae823677fbe WatchSource:0}: Error finding container 2721b2581c108773c46b6cc13a2582297ea62b63b7e3b16a3dafaae823677fbe: Status 404 returned error can't find the container with id 2721b2581c108773c46b6cc13a2582297ea62b63b7e3b16a3dafaae823677fbe Jan 20 17:43:34 crc kubenswrapper[4558]: I0120 17:43:34.404532 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"7faabe16-9de5-49dc-bab6-44e173f4403c","Type":"ContainerStarted","Data":"3988e808e906ec72c1e2c7130ece7e18110316238273aacbead8355ff8099aa5"} Jan 20 17:43:34 crc kubenswrapper[4558]: I0120 17:43:34.409428 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"a226d25c-395c-4744-a42e-46beee40d8e4","Type":"ContainerStarted","Data":"6cd4eed6d850e065b4e90baa84aa2f605080fb6620a3f314df9331e9a7480679"} Jan 20 17:43:34 crc kubenswrapper[4558]: I0120 17:43:34.412145 4558 generic.go:334] "Generic (PLEG): container finished" podID="ea72c008-3a66-4577-8042-4b1e0ed1cca6" containerID="f78a0d7e8dd2168ce02d05fa6fef206d25ece8bae681f33345521bfe6b0ff1e2" exitCode=1 Jan 20 17:43:34 crc kubenswrapper[4558]: I0120 17:43:34.412227 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-0" event={"ID":"ea72c008-3a66-4577-8042-4b1e0ed1cca6","Type":"ContainerDied","Data":"f78a0d7e8dd2168ce02d05fa6fef206d25ece8bae681f33345521bfe6b0ff1e2"} Jan 20 17:43:34 crc kubenswrapper[4558]: I0120 17:43:34.412802 4558 scope.go:117] "RemoveContainer" containerID="f78a0d7e8dd2168ce02d05fa6fef206d25ece8bae681f33345521bfe6b0ff1e2" Jan 20 17:43:34 crc kubenswrapper[4558]: I0120 17:43:34.416957 4558 generic.go:334] "Generic (PLEG): container finished" podID="38a5808f-1fa5-49cb-afb7-d7676416cd26" containerID="9ce99dfe9c0f0478fe1e3bc7eb8d4a9fb46628bf1aaf1c9dfa808ff005dc5efc" exitCode=0 Jan 20 17:43:34 crc kubenswrapper[4558]: I0120 17:43:34.416989 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-galera-0" event={"ID":"38a5808f-1fa5-49cb-afb7-d7676416cd26","Type":"ContainerDied","Data":"9ce99dfe9c0f0478fe1e3bc7eb8d4a9fb46628bf1aaf1c9dfa808ff005dc5efc"} Jan 20 17:43:34 crc kubenswrapper[4558]: I0120 17:43:34.423499 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-0" event={"ID":"50cacddd-ebea-477f-af64-6e96a09a242e","Type":"ContainerStarted","Data":"3614cef23b8586e57abf934d633380dd75e1dc52694d97ee527c65e3ded2f54c"} Jan 20 17:43:34 crc kubenswrapper[4558]: I0120 17:43:34.425264 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:43:34 crc kubenswrapper[4558]: I0120 17:43:34.431294 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/cinder-scheduler-0" podStartSLOduration=6.431273131 podStartE2EDuration="6.431273131s" podCreationTimestamp="2026-01-20 17:43:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:43:34.420899156 +0000 UTC m=+3708.181237123" watchObservedRunningTime="2026-01-20 17:43:34.431273131 +0000 UTC m=+3708.191611099" Jan 20 17:43:34 crc kubenswrapper[4558]: I0120 17:43:34.442809 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"5e0d404f-75d1-4be8-9c12-803d8db759e6","Type":"ContainerStarted","Data":"97feb272063e6cb10279ddbd244ed9abed0f4fe2c8c6974b1016d61ab6e14c8d"} Jan 20 17:43:34 crc kubenswrapper[4558]: I0120 17:43:34.447811 4558 generic.go:334] "Generic (PLEG): container finished" podID="34a4b6ee-5830-4532-b829-a91cedcf8069" containerID="6142a118577f4983494f4e5bded80a60eb86bde1b0e2aabfeb8afaeda6b2b894" exitCode=0 Jan 20 17:43:34 crc kubenswrapper[4558]: I0120 17:43:34.447882 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-cell1-galera-0" event={"ID":"34a4b6ee-5830-4532-b829-a91cedcf8069","Type":"ContainerDied","Data":"6142a118577f4983494f4e5bded80a60eb86bde1b0e2aabfeb8afaeda6b2b894"} Jan 20 17:43:34 crc kubenswrapper[4558]: I0120 17:43:34.453927 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-northd-0" event={"ID":"d6e6a03e-c496-4589-8d51-f6d9e89e14ae","Type":"ContainerStarted","Data":"391bf6661c1200e7d892ab027500c3b569ef3b9e9fbd3bdab1b7ee5442423861"} Jan 20 17:43:34 crc kubenswrapper[4558]: I0120 17:43:34.453980 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-northd-0" event={"ID":"d6e6a03e-c496-4589-8d51-f6d9e89e14ae","Type":"ContainerStarted","Data":"7e87cd2bf2bf667d667a6643f4f7c164cbd3aeeebc96908debb2fae778b18645"} Jan 20 17:43:34 crc kubenswrapper[4558]: I0120 17:43:34.464701 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/glance-default-external-api-0" podStartSLOduration=5.464685835 podStartE2EDuration="5.464685835s" podCreationTimestamp="2026-01-20 17:43:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:43:34.462118509 +0000 UTC m=+3708.222456475" watchObservedRunningTime="2026-01-20 17:43:34.464685835 +0000 UTC m=+3708.225023803" Jan 20 17:43:34 crc kubenswrapper[4558]: I0120 17:43:34.559626 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell0-conductor-0" podStartSLOduration=3.559597571 podStartE2EDuration="3.559597571s" podCreationTimestamp="2026-01-20 17:43:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:43:34.508958649 +0000 UTC m=+3708.269296616" watchObservedRunningTime="2026-01-20 17:43:34.559597571 +0000 UTC m=+3708.319935539" Jan 20 17:43:34 crc kubenswrapper[4558]: I0120 17:43:34.590027 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ovn-northd-0" podStartSLOduration=3.590003763 podStartE2EDuration="3.590003763s" podCreationTimestamp="2026-01-20 17:43:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:43:34.520928985 +0000 UTC m=+3708.281266953" watchObservedRunningTime="2026-01-20 17:43:34.590003763 +0000 UTC m=+3708.350341901" Jan 20 17:43:34 crc kubenswrapper[4558]: I0120 17:43:34.613117 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/glance-default-internal-api-0" podStartSLOduration=6.612875137 podStartE2EDuration="6.612875137s" podCreationTimestamp="2026-01-20 17:43:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:43:34.559695887 +0000 UTC m=+3708.320033854" watchObservedRunningTime="2026-01-20 17:43:34.612875137 +0000 UTC m=+3708.373213104" Jan 20 17:43:34 crc kubenswrapper[4558]: I0120 17:43:34.641796 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d791ceee-e87d-4238-9267-c2d2c53faf96" path="/var/lib/kubelet/pods/d791ceee-e87d-4238-9267-c2d2c53faf96/volumes" Jan 20 17:43:34 crc kubenswrapper[4558]: I0120 17:43:34.643153 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:43:34 crc kubenswrapper[4558]: I0120 17:43:34.727285 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:43:34 crc kubenswrapper[4558]: I0120 17:43:34.727384 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:43:34 crc kubenswrapper[4558]: I0120 17:43:34.856487 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:43:35 crc kubenswrapper[4558]: I0120 17:43:35.467209 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-galera-0" event={"ID":"38a5808f-1fa5-49cb-afb7-d7676416cd26","Type":"ContainerStarted","Data":"18a3a7a71ea0a749f520d355bf36542849b6f575c3537150265c17afa8aafef5"} Jan 20 17:43:35 crc kubenswrapper[4558]: I0120 17:43:35.468751 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-0" event={"ID":"ea72c008-3a66-4577-8042-4b1e0ed1cca6","Type":"ContainerStarted","Data":"84973fad4d843f75c78030000ec0fa0ce538b6a078241540dc44a79641a6a858"} Jan 20 17:43:35 crc kubenswrapper[4558]: I0120 17:43:35.468945 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:43:35 crc kubenswrapper[4558]: I0120 17:43:35.471301 4558 generic.go:334] "Generic (PLEG): container finished" podID="50cacddd-ebea-477f-af64-6e96a09a242e" containerID="3614cef23b8586e57abf934d633380dd75e1dc52694d97ee527c65e3ded2f54c" exitCode=1 Jan 20 17:43:35 crc kubenswrapper[4558]: I0120 17:43:35.471373 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-0" event={"ID":"50cacddd-ebea-477f-af64-6e96a09a242e","Type":"ContainerDied","Data":"3614cef23b8586e57abf934d633380dd75e1dc52694d97ee527c65e3ded2f54c"} Jan 20 17:43:35 crc kubenswrapper[4558]: I0120 17:43:35.471604 4558 scope.go:117] "RemoveContainer" containerID="3614cef23b8586e57abf934d633380dd75e1dc52694d97ee527c65e3ded2f54c" Jan 20 17:43:35 crc kubenswrapper[4558]: I0120 17:43:35.474956 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-cell1-galera-0" event={"ID":"34a4b6ee-5830-4532-b829-a91cedcf8069","Type":"ContainerStarted","Data":"a66c79daa262ed9158df917bb1b14c58d89db5c7e59a38fe08277a4bfae9eb16"} Jan 20 17:43:35 crc kubenswrapper[4558]: I0120 17:43:35.476319 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"48a3e4b6-ae17-449f-81f8-e521d7f16b7f","Type":"ContainerStarted","Data":"2fe2d6033ceb5c948ed32e5c9455381d57dd67590fa46b232c29a099df3a1f5e"} Jan 20 17:43:35 crc kubenswrapper[4558]: I0120 17:43:35.476346 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"48a3e4b6-ae17-449f-81f8-e521d7f16b7f","Type":"ContainerStarted","Data":"2721b2581c108773c46b6cc13a2582297ea62b63b7e3b16a3dafaae823677fbe"} Jan 20 17:43:35 crc kubenswrapper[4558]: I0120 17:43:35.477234 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:43:35 crc kubenswrapper[4558]: I0120 17:43:35.497728 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/openstack-galera-0" podStartSLOduration=7.497706862 podStartE2EDuration="7.497706862s" podCreationTimestamp="2026-01-20 17:43:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:43:35.489562649 +0000 UTC m=+3709.249900616" watchObservedRunningTime="2026-01-20 17:43:35.497706862 +0000 UTC m=+3709.258044829" Jan 20 17:43:35 crc kubenswrapper[4558]: I0120 17:43:35.535009 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/openstack-cell1-galera-0" podStartSLOduration=7.534988701 podStartE2EDuration="7.534988701s" podCreationTimestamp="2026-01-20 17:43:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:43:35.519829798 +0000 UTC m=+3709.280167766" watchObservedRunningTime="2026-01-20 17:43:35.534988701 +0000 UTC m=+3709.295326658" Jan 20 17:43:35 crc kubenswrapper[4558]: I0120 17:43:35.593670 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:43:35 crc kubenswrapper[4558]: I0120 17:43:35.650097 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:43:35 crc kubenswrapper[4558]: I0120 17:43:35.856230 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:43:35 crc kubenswrapper[4558]: I0120 17:43:35.898764 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:43:36 crc kubenswrapper[4558]: I0120 17:43:36.207337 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/barbican-api-7bc45756fb-278vp" podUID="432ec648-18a7-416a-85cc-409a30976a67" containerName="barbican-api-log" probeResult="failure" output="Get \"https://10.217.1.28:9311/healthcheck\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 20 17:43:36 crc kubenswrapper[4558]: I0120 17:43:36.491424 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"48a3e4b6-ae17-449f-81f8-e521d7f16b7f","Type":"ContainerStarted","Data":"4c0d28cc228b6021507306e977f98df789f246ebd560ca7ac4eed76b974bca68"} Jan 20 17:43:36 crc kubenswrapper[4558]: I0120 17:43:36.493864 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-0" event={"ID":"50cacddd-ebea-477f-af64-6e96a09a242e","Type":"ContainerStarted","Data":"bb8488b5aee706047405a606b3a23b8bd6228cb32059c06329cbdb7eefcf16f2"} Jan 20 17:43:37 crc kubenswrapper[4558]: I0120 17:43:37.296796 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:43:37 crc kubenswrapper[4558]: I0120 17:43:37.503447 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"48a3e4b6-ae17-449f-81f8-e521d7f16b7f","Type":"ContainerStarted","Data":"6291ffd1b39b779da16b44b8132159644811a98d6ee34190d04a465d8cafa3e8"} Jan 20 17:43:37 crc kubenswrapper[4558]: I0120 17:43:37.505779 4558 generic.go:334] "Generic (PLEG): container finished" podID="ea72c008-3a66-4577-8042-4b1e0ed1cca6" containerID="84973fad4d843f75c78030000ec0fa0ce538b6a078241540dc44a79641a6a858" exitCode=1 Jan 20 17:43:37 crc kubenswrapper[4558]: I0120 17:43:37.507074 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-0" event={"ID":"ea72c008-3a66-4577-8042-4b1e0ed1cca6","Type":"ContainerDied","Data":"84973fad4d843f75c78030000ec0fa0ce538b6a078241540dc44a79641a6a858"} Jan 20 17:43:37 crc kubenswrapper[4558]: I0120 17:43:37.507117 4558 scope.go:117] "RemoveContainer" containerID="f78a0d7e8dd2168ce02d05fa6fef206d25ece8bae681f33345521bfe6b0ff1e2" Jan 20 17:43:37 crc kubenswrapper[4558]: I0120 17:43:37.507556 4558 scope.go:117] "RemoveContainer" containerID="84973fad4d843f75c78030000ec0fa0ce538b6a078241540dc44a79641a6a858" Jan 20 17:43:37 crc kubenswrapper[4558]: E0120 17:43:37.507831 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"nova-cell1-conductor-conductor\" with CrashLoopBackOff: \"back-off 10s restarting failed container=nova-cell1-conductor-conductor pod=nova-cell1-conductor-0_openstack-kuttl-tests(ea72c008-3a66-4577-8042-4b1e0ed1cca6)\"" pod="openstack-kuttl-tests/nova-cell1-conductor-0" podUID="ea72c008-3a66-4577-8042-4b1e0ed1cca6" Jan 20 17:43:37 crc kubenswrapper[4558]: I0120 17:43:37.544840 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:43:37 crc kubenswrapper[4558]: I0120 17:43:37.551004 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:43:37 crc kubenswrapper[4558]: I0120 17:43:37.836459 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack-kuttl-tests/barbican-api-7bc45756fb-278vp" podUID="432ec648-18a7-416a-85cc-409a30976a67" containerName="barbican-api-log" probeResult="failure" output="Get \"https://10.217.1.28:9311/healthcheck\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 20 17:43:38 crc kubenswrapper[4558]: E0120 17:43:38.297801 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of bb8488b5aee706047405a606b3a23b8bd6228cb32059c06329cbdb7eefcf16f2 is running failed: container process not found" containerID="bb8488b5aee706047405a606b3a23b8bd6228cb32059c06329cbdb7eefcf16f2" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:43:38 crc kubenswrapper[4558]: E0120 17:43:38.298665 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of bb8488b5aee706047405a606b3a23b8bd6228cb32059c06329cbdb7eefcf16f2 is running failed: container process not found" containerID="bb8488b5aee706047405a606b3a23b8bd6228cb32059c06329cbdb7eefcf16f2" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:43:38 crc kubenswrapper[4558]: E0120 17:43:38.298967 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of bb8488b5aee706047405a606b3a23b8bd6228cb32059c06329cbdb7eefcf16f2 is running failed: container process not found" containerID="bb8488b5aee706047405a606b3a23b8bd6228cb32059c06329cbdb7eefcf16f2" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:43:38 crc kubenswrapper[4558]: E0120 17:43:38.298997 4558 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of bb8488b5aee706047405a606b3a23b8bd6228cb32059c06329cbdb7eefcf16f2 is running failed: container process not found" probeType="Liveness" pod="openstack-kuttl-tests/nova-cell0-conductor-0" podUID="50cacddd-ebea-477f-af64-6e96a09a242e" containerName="nova-cell0-conductor-conductor" Jan 20 17:43:38 crc kubenswrapper[4558]: I0120 17:43:38.520991 4558 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:43:38 crc kubenswrapper[4558]: I0120 17:43:38.525235 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"48a3e4b6-ae17-449f-81f8-e521d7f16b7f","Type":"ContainerStarted","Data":"1f580fae1864e3169dd64566d04d693dc377a53c846267d9e12639cffaa324e9"} Jan 20 17:43:38 crc kubenswrapper[4558]: I0120 17:43:38.525403 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:43:38 crc kubenswrapper[4558]: I0120 17:43:38.531516 4558 scope.go:117] "RemoveContainer" containerID="84973fad4d843f75c78030000ec0fa0ce538b6a078241540dc44a79641a6a858" Jan 20 17:43:38 crc kubenswrapper[4558]: E0120 17:43:38.532025 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"nova-cell1-conductor-conductor\" with CrashLoopBackOff: \"back-off 10s restarting failed container=nova-cell1-conductor-conductor pod=nova-cell1-conductor-0_openstack-kuttl-tests(ea72c008-3a66-4577-8042-4b1e0ed1cca6)\"" pod="openstack-kuttl-tests/nova-cell1-conductor-0" podUID="ea72c008-3a66-4577-8042-4b1e0ed1cca6" Jan 20 17:43:38 crc kubenswrapper[4558]: I0120 17:43:38.535647 4558 generic.go:334] "Generic (PLEG): container finished" podID="50cacddd-ebea-477f-af64-6e96a09a242e" containerID="bb8488b5aee706047405a606b3a23b8bd6228cb32059c06329cbdb7eefcf16f2" exitCode=1 Jan 20 17:43:38 crc kubenswrapper[4558]: I0120 17:43:38.535729 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-0" event={"ID":"50cacddd-ebea-477f-af64-6e96a09a242e","Type":"ContainerDied","Data":"bb8488b5aee706047405a606b3a23b8bd6228cb32059c06329cbdb7eefcf16f2"} Jan 20 17:43:38 crc kubenswrapper[4558]: I0120 17:43:38.535782 4558 scope.go:117] "RemoveContainer" containerID="3614cef23b8586e57abf934d633380dd75e1dc52694d97ee527c65e3ded2f54c" Jan 20 17:43:38 crc kubenswrapper[4558]: I0120 17:43:38.536095 4558 scope.go:117] "RemoveContainer" containerID="bb8488b5aee706047405a606b3a23b8bd6228cb32059c06329cbdb7eefcf16f2" Jan 20 17:43:38 crc kubenswrapper[4558]: E0120 17:43:38.536439 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"nova-cell0-conductor-conductor\" with CrashLoopBackOff: \"back-off 10s restarting failed container=nova-cell0-conductor-conductor pod=nova-cell0-conductor-0_openstack-kuttl-tests(50cacddd-ebea-477f-af64-6e96a09a242e)\"" pod="openstack-kuttl-tests/nova-cell0-conductor-0" podUID="50cacddd-ebea-477f-af64-6e96a09a242e" Jan 20 17:43:38 crc kubenswrapper[4558]: I0120 17:43:38.551666 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ceilometer-0" podStartSLOduration=2.256256107 podStartE2EDuration="5.551638675s" podCreationTimestamp="2026-01-20 17:43:33 +0000 UTC" firstStartedPulling="2026-01-20 17:43:34.408304194 +0000 UTC m=+3708.168642162" lastFinishedPulling="2026-01-20 17:43:37.703686763 +0000 UTC m=+3711.464024730" observedRunningTime="2026-01-20 17:43:38.546252017 +0000 UTC m=+3712.306589984" watchObservedRunningTime="2026-01-20 17:43:38.551638675 +0000 UTC m=+3712.311976642" Jan 20 17:43:38 crc kubenswrapper[4558]: I0120 17:43:38.663238 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/openstack-cell1-galera-0"] Jan 20 17:43:38 crc kubenswrapper[4558]: I0120 17:43:38.748696 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:43:38 crc kubenswrapper[4558]: I0120 17:43:38.775625 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:43:38 crc kubenswrapper[4558]: I0120 17:43:38.804899 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/openstack-cell1-galera-0" podUID="34a4b6ee-5830-4532-b829-a91cedcf8069" containerName="galera" containerID="cri-o://a66c79daa262ed9158df917bb1b14c58d89db5c7e59a38fe08277a4bfae9eb16" gracePeriod=30 Jan 20 17:43:38 crc kubenswrapper[4558]: I0120 17:43:38.839342 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/barbican-api-7bc45756fb-278vp" podUID="432ec648-18a7-416a-85cc-409a30976a67" containerName="barbican-api" probeResult="failure" output="Get \"https://10.217.1.28:9311/healthcheck\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 20 17:43:38 crc kubenswrapper[4558]: I0120 17:43:38.973923 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:43:38 crc kubenswrapper[4558]: I0120 17:43:38.973985 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:43:39 crc kubenswrapper[4558]: I0120 17:43:39.281803 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:43:39 crc kubenswrapper[4558]: I0120 17:43:39.282274 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/cinder-api-0" podUID="56d39f72-48d3-4690-a20a-099cb41daa7e" containerName="cinder-api-log" containerID="cri-o://9d8c0d2d57ebdacd4f483efdc5e6ebefff5d373523eeebba687917ca86c856be" gracePeriod=30 Jan 20 17:43:39 crc kubenswrapper[4558]: I0120 17:43:39.282701 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/cinder-api-0" podUID="56d39f72-48d3-4690-a20a-099cb41daa7e" containerName="cinder-api" containerID="cri-o://3358b6c2c0a492ce2df7675567ba772d472b5f09db162d54f169def48d48b722" gracePeriod=30 Jan 20 17:43:39 crc kubenswrapper[4558]: I0120 17:43:39.283408 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:43:39 crc kubenswrapper[4558]: I0120 17:43:39.295332 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/memcached-0" Jan 20 17:43:39 crc kubenswrapper[4558]: I0120 17:43:39.301315 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/cinder-api-0" podUID="56d39f72-48d3-4690-a20a-099cb41daa7e" containerName="cinder-api" probeResult="failure" output="Get \"https://10.217.1.32:8776/healthcheck\": EOF" Jan 20 17:43:39 crc kubenswrapper[4558]: I0120 17:43:39.305965 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:43:39 crc kubenswrapper[4558]: I0120 17:43:39.343946 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:43:39 crc kubenswrapper[4558]: I0120 17:43:39.415971 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:43:39 crc kubenswrapper[4558]: I0120 17:43:39.416012 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:43:39 crc kubenswrapper[4558]: I0120 17:43:39.431020 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:43:39 crc kubenswrapper[4558]: I0120 17:43:39.437035 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:43:39 crc kubenswrapper[4558]: I0120 17:43:39.437292 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-external-api-0" podUID="a226d25c-395c-4744-a42e-46beee40d8e4" containerName="glance-log" containerID="cri-o://219c134e0aca9d6bec94b18c2506bf6bb9bb0bf7e39c133a7d64764bf6f7e5e2" gracePeriod=30 Jan 20 17:43:39 crc kubenswrapper[4558]: I0120 17:43:39.437432 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-external-api-0" podUID="a226d25c-395c-4744-a42e-46beee40d8e4" containerName="glance-httpd" containerID="cri-o://6cd4eed6d850e065b4e90baa84aa2f605080fb6620a3f314df9331e9a7480679" gracePeriod=30 Jan 20 17:43:39 crc kubenswrapper[4558]: I0120 17:43:39.549386 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:43:39 crc kubenswrapper[4558]: I0120 17:43:39.549644 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-internal-api-0" podUID="5e0d404f-75d1-4be8-9c12-803d8db759e6" containerName="glance-log" containerID="cri-o://a28f5d4301df9252556516004020ebc32a69c49a2a100beb6220ce8b200a5a7b" gracePeriod=30 Jan 20 17:43:39 crc kubenswrapper[4558]: I0120 17:43:39.550109 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-internal-api-0" podUID="5e0d404f-75d1-4be8-9c12-803d8db759e6" containerName="glance-httpd" containerID="cri-o://97feb272063e6cb10279ddbd244ed9abed0f4fe2c8c6974b1016d61ab6e14c8d" gracePeriod=30 Jan 20 17:43:39 crc kubenswrapper[4558]: I0120 17:43:39.567188 4558 scope.go:117] "RemoveContainer" containerID="bb8488b5aee706047405a606b3a23b8bd6228cb32059c06329cbdb7eefcf16f2" Jan 20 17:43:39 crc kubenswrapper[4558]: E0120 17:43:39.567512 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"nova-cell0-conductor-conductor\" with CrashLoopBackOff: \"back-off 10s restarting failed container=nova-cell0-conductor-conductor pod=nova-cell0-conductor-0_openstack-kuttl-tests(50cacddd-ebea-477f-af64-6e96a09a242e)\"" pod="openstack-kuttl-tests/nova-cell0-conductor-0" podUID="50cacddd-ebea-477f-af64-6e96a09a242e" Jan 20 17:43:39 crc kubenswrapper[4558]: I0120 17:43:39.600474 4558 generic.go:334] "Generic (PLEG): container finished" podID="56d39f72-48d3-4690-a20a-099cb41daa7e" containerID="9d8c0d2d57ebdacd4f483efdc5e6ebefff5d373523eeebba687917ca86c856be" exitCode=143 Jan 20 17:43:39 crc kubenswrapper[4558]: I0120 17:43:39.600544 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"56d39f72-48d3-4690-a20a-099cb41daa7e","Type":"ContainerDied","Data":"9d8c0d2d57ebdacd4f483efdc5e6ebefff5d373523eeebba687917ca86c856be"} Jan 20 17:43:39 crc kubenswrapper[4558]: I0120 17:43:39.682362 4558 generic.go:334] "Generic (PLEG): container finished" podID="34a4b6ee-5830-4532-b829-a91cedcf8069" containerID="a66c79daa262ed9158df917bb1b14c58d89db5c7e59a38fe08277a4bfae9eb16" exitCode=0 Jan 20 17:43:39 crc kubenswrapper[4558]: I0120 17:43:39.683704 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-cell1-galera-0" event={"ID":"34a4b6ee-5830-4532-b829-a91cedcf8069","Type":"ContainerDied","Data":"a66c79daa262ed9158df917bb1b14c58d89db5c7e59a38fe08277a4bfae9eb16"} Jan 20 17:43:39 crc kubenswrapper[4558]: I0120 17:43:39.683743 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-cell1-galera-0" event={"ID":"34a4b6ee-5830-4532-b829-a91cedcf8069","Type":"ContainerDied","Data":"f3945a9151d35b7fd0f1d79dc1260eebc823fc3581aa44238a46f808b13f16d3"} Jan 20 17:43:39 crc kubenswrapper[4558]: I0120 17:43:39.683757 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f3945a9151d35b7fd0f1d79dc1260eebc823fc3581aa44238a46f808b13f16d3" Jan 20 17:43:39 crc kubenswrapper[4558]: I0120 17:43:39.717422 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:43:39 crc kubenswrapper[4558]: I0120 17:43:39.728432 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:43:39 crc kubenswrapper[4558]: I0120 17:43:39.728483 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:43:39 crc kubenswrapper[4558]: I0120 17:43:39.733427 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:43:39 crc kubenswrapper[4558]: I0120 17:43:39.824006 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/34a4b6ee-5830-4532-b829-a91cedcf8069-config-data-default\") pod \"34a4b6ee-5830-4532-b829-a91cedcf8069\" (UID: \"34a4b6ee-5830-4532-b829-a91cedcf8069\") " Jan 20 17:43:39 crc kubenswrapper[4558]: I0120 17:43:39.824127 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/34a4b6ee-5830-4532-b829-a91cedcf8069-operator-scripts\") pod \"34a4b6ee-5830-4532-b829-a91cedcf8069\" (UID: \"34a4b6ee-5830-4532-b829-a91cedcf8069\") " Jan 20 17:43:39 crc kubenswrapper[4558]: I0120 17:43:39.824189 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/34a4b6ee-5830-4532-b829-a91cedcf8069-kolla-config\") pod \"34a4b6ee-5830-4532-b829-a91cedcf8069\" (UID: \"34a4b6ee-5830-4532-b829-a91cedcf8069\") " Jan 20 17:43:39 crc kubenswrapper[4558]: I0120 17:43:39.824253 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/34a4b6ee-5830-4532-b829-a91cedcf8069-config-data-generated\") pod \"34a4b6ee-5830-4532-b829-a91cedcf8069\" (UID: \"34a4b6ee-5830-4532-b829-a91cedcf8069\") " Jan 20 17:43:39 crc kubenswrapper[4558]: I0120 17:43:39.824373 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7bk69\" (UniqueName: \"kubernetes.io/projected/34a4b6ee-5830-4532-b829-a91cedcf8069-kube-api-access-7bk69\") pod \"34a4b6ee-5830-4532-b829-a91cedcf8069\" (UID: \"34a4b6ee-5830-4532-b829-a91cedcf8069\") " Jan 20 17:43:39 crc kubenswrapper[4558]: I0120 17:43:39.824461 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mysql-db\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") pod \"34a4b6ee-5830-4532-b829-a91cedcf8069\" (UID: \"34a4b6ee-5830-4532-b829-a91cedcf8069\") " Jan 20 17:43:39 crc kubenswrapper[4558]: I0120 17:43:39.824499 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/34a4b6ee-5830-4532-b829-a91cedcf8069-combined-ca-bundle\") pod \"34a4b6ee-5830-4532-b829-a91cedcf8069\" (UID: \"34a4b6ee-5830-4532-b829-a91cedcf8069\") " Jan 20 17:43:39 crc kubenswrapper[4558]: I0120 17:43:39.824546 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/34a4b6ee-5830-4532-b829-a91cedcf8069-galera-tls-certs\") pod \"34a4b6ee-5830-4532-b829-a91cedcf8069\" (UID: \"34a4b6ee-5830-4532-b829-a91cedcf8069\") " Jan 20 17:43:39 crc kubenswrapper[4558]: I0120 17:43:39.826438 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/34a4b6ee-5830-4532-b829-a91cedcf8069-config-data-default" (OuterVolumeSpecName: "config-data-default") pod "34a4b6ee-5830-4532-b829-a91cedcf8069" (UID: "34a4b6ee-5830-4532-b829-a91cedcf8069"). InnerVolumeSpecName "config-data-default". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:43:39 crc kubenswrapper[4558]: I0120 17:43:39.827651 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/34a4b6ee-5830-4532-b829-a91cedcf8069-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "34a4b6ee-5830-4532-b829-a91cedcf8069" (UID: "34a4b6ee-5830-4532-b829-a91cedcf8069"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:43:39 crc kubenswrapper[4558]: I0120 17:43:39.828099 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/34a4b6ee-5830-4532-b829-a91cedcf8069-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "34a4b6ee-5830-4532-b829-a91cedcf8069" (UID: "34a4b6ee-5830-4532-b829-a91cedcf8069"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:43:39 crc kubenswrapper[4558]: I0120 17:43:39.834375 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/34a4b6ee-5830-4532-b829-a91cedcf8069-config-data-generated" (OuterVolumeSpecName: "config-data-generated") pod "34a4b6ee-5830-4532-b829-a91cedcf8069" (UID: "34a4b6ee-5830-4532-b829-a91cedcf8069"). InnerVolumeSpecName "config-data-generated". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:43:39 crc kubenswrapper[4558]: I0120 17:43:39.859350 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/34a4b6ee-5830-4532-b829-a91cedcf8069-kube-api-access-7bk69" (OuterVolumeSpecName: "kube-api-access-7bk69") pod "34a4b6ee-5830-4532-b829-a91cedcf8069" (UID: "34a4b6ee-5830-4532-b829-a91cedcf8069"). InnerVolumeSpecName "kube-api-access-7bk69". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:43:39 crc kubenswrapper[4558]: I0120 17:43:39.890294 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/34a4b6ee-5830-4532-b829-a91cedcf8069-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "34a4b6ee-5830-4532-b829-a91cedcf8069" (UID: "34a4b6ee-5830-4532-b829-a91cedcf8069"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:39 crc kubenswrapper[4558]: I0120 17:43:39.923307 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage15-crc" (OuterVolumeSpecName: "mysql-db") pod "34a4b6ee-5830-4532-b829-a91cedcf8069" (UID: "34a4b6ee-5830-4532-b829-a91cedcf8069"). InnerVolumeSpecName "local-storage15-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:43:39 crc kubenswrapper[4558]: I0120 17:43:39.938464 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/34a4b6ee-5830-4532-b829-a91cedcf8069-config-data-default\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:39 crc kubenswrapper[4558]: I0120 17:43:39.938499 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/34a4b6ee-5830-4532-b829-a91cedcf8069-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:39 crc kubenswrapper[4558]: I0120 17:43:39.938510 4558 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/34a4b6ee-5830-4532-b829-a91cedcf8069-kolla-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:39 crc kubenswrapper[4558]: I0120 17:43:39.938521 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/34a4b6ee-5830-4532-b829-a91cedcf8069-config-data-generated\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:39 crc kubenswrapper[4558]: I0120 17:43:39.938533 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7bk69\" (UniqueName: \"kubernetes.io/projected/34a4b6ee-5830-4532-b829-a91cedcf8069-kube-api-access-7bk69\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:39 crc kubenswrapper[4558]: I0120 17:43:39.938556 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage15-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") on node \"crc\" " Jan 20 17:43:39 crc kubenswrapper[4558]: I0120 17:43:39.938567 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/34a4b6ee-5830-4532-b829-a91cedcf8069-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:39 crc kubenswrapper[4558]: I0120 17:43:39.961414 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/34a4b6ee-5830-4532-b829-a91cedcf8069-galera-tls-certs" (OuterVolumeSpecName: "galera-tls-certs") pod "34a4b6ee-5830-4532-b829-a91cedcf8069" (UID: "34a4b6ee-5830-4532-b829-a91cedcf8069"). InnerVolumeSpecName "galera-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:39 crc kubenswrapper[4558]: I0120 17:43:39.990607 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-api-0" podUID="4ea608e7-9a19-47fb-8e14-a629451e7c03" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.1.34:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 20 17:43:39 crc kubenswrapper[4558]: I0120 17:43:39.991104 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-api-0" podUID="4ea608e7-9a19-47fb-8e14-a629451e7c03" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.1.34:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 20 17:43:40 crc kubenswrapper[4558]: I0120 17:43:40.000876 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage15-crc" (UniqueName: "kubernetes.io/local-volume/local-storage15-crc") on node "crc" Jan 20 17:43:40 crc kubenswrapper[4558]: I0120 17:43:40.018647 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:43:40 crc kubenswrapper[4558]: I0120 17:43:40.041524 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage15-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:40 crc kubenswrapper[4558]: I0120 17:43:40.041567 4558 reconciler_common.go:293] "Volume detached for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/34a4b6ee-5830-4532-b829-a91cedcf8069-galera-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:40 crc kubenswrapper[4558]: I0120 17:43:40.080684 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:43:40 crc kubenswrapper[4558]: I0120 17:43:40.198061 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:43:40 crc kubenswrapper[4558]: I0120 17:43:40.703440 4558 generic.go:334] "Generic (PLEG): container finished" podID="5e0d404f-75d1-4be8-9c12-803d8db759e6" containerID="a28f5d4301df9252556516004020ebc32a69c49a2a100beb6220ce8b200a5a7b" exitCode=143 Jan 20 17:43:40 crc kubenswrapper[4558]: I0120 17:43:40.703737 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"5e0d404f-75d1-4be8-9c12-803d8db759e6","Type":"ContainerDied","Data":"a28f5d4301df9252556516004020ebc32a69c49a2a100beb6220ce8b200a5a7b"} Jan 20 17:43:40 crc kubenswrapper[4558]: I0120 17:43:40.703947 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:40 crc kubenswrapper[4558]: I0120 17:43:40.710038 4558 generic.go:334] "Generic (PLEG): container finished" podID="a226d25c-395c-4744-a42e-46beee40d8e4" containerID="6cd4eed6d850e065b4e90baa84aa2f605080fb6620a3f314df9331e9a7480679" exitCode=0 Jan 20 17:43:40 crc kubenswrapper[4558]: I0120 17:43:40.710058 4558 generic.go:334] "Generic (PLEG): container finished" podID="a226d25c-395c-4744-a42e-46beee40d8e4" containerID="219c134e0aca9d6bec94b18c2506bf6bb9bb0bf7e39c133a7d64764bf6f7e5e2" exitCode=143 Jan 20 17:43:40 crc kubenswrapper[4558]: I0120 17:43:40.710122 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:43:40 crc kubenswrapper[4558]: I0120 17:43:40.711212 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"a226d25c-395c-4744-a42e-46beee40d8e4","Type":"ContainerDied","Data":"6cd4eed6d850e065b4e90baa84aa2f605080fb6620a3f314df9331e9a7480679"} Jan 20 17:43:40 crc kubenswrapper[4558]: I0120 17:43:40.711269 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"a226d25c-395c-4744-a42e-46beee40d8e4","Type":"ContainerDied","Data":"219c134e0aca9d6bec94b18c2506bf6bb9bb0bf7e39c133a7d64764bf6f7e5e2"} Jan 20 17:43:40 crc kubenswrapper[4558]: I0120 17:43:40.711284 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"a226d25c-395c-4744-a42e-46beee40d8e4","Type":"ContainerDied","Data":"c2d5f332e14bf6025abb08a1b2a602143e272b9ab74340dd7eb94b7c005b01d8"} Jan 20 17:43:40 crc kubenswrapper[4558]: I0120 17:43:40.711305 4558 scope.go:117] "RemoveContainer" containerID="6cd4eed6d850e065b4e90baa84aa2f605080fb6620a3f314df9331e9a7480679" Jan 20 17:43:40 crc kubenswrapper[4558]: I0120 17:43:40.760844 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rdzjm\" (UniqueName: \"kubernetes.io/projected/a226d25c-395c-4744-a42e-46beee40d8e4-kube-api-access-rdzjm\") pod \"a226d25c-395c-4744-a42e-46beee40d8e4\" (UID: \"a226d25c-395c-4744-a42e-46beee40d8e4\") " Jan 20 17:43:40 crc kubenswrapper[4558]: I0120 17:43:40.760940 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a226d25c-395c-4744-a42e-46beee40d8e4-config-data\") pod \"a226d25c-395c-4744-a42e-46beee40d8e4\" (UID: \"a226d25c-395c-4744-a42e-46beee40d8e4\") " Jan 20 17:43:40 crc kubenswrapper[4558]: I0120 17:43:40.760989 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a226d25c-395c-4744-a42e-46beee40d8e4-public-tls-certs\") pod \"a226d25c-395c-4744-a42e-46beee40d8e4\" (UID: \"a226d25c-395c-4744-a42e-46beee40d8e4\") " Jan 20 17:43:40 crc kubenswrapper[4558]: I0120 17:43:40.761016 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"a226d25c-395c-4744-a42e-46beee40d8e4\" (UID: \"a226d25c-395c-4744-a42e-46beee40d8e4\") " Jan 20 17:43:40 crc kubenswrapper[4558]: I0120 17:43:40.761069 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a226d25c-395c-4744-a42e-46beee40d8e4-combined-ca-bundle\") pod \"a226d25c-395c-4744-a42e-46beee40d8e4\" (UID: \"a226d25c-395c-4744-a42e-46beee40d8e4\") " Jan 20 17:43:40 crc kubenswrapper[4558]: I0120 17:43:40.761122 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a226d25c-395c-4744-a42e-46beee40d8e4-httpd-run\") pod \"a226d25c-395c-4744-a42e-46beee40d8e4\" (UID: \"a226d25c-395c-4744-a42e-46beee40d8e4\") " Jan 20 17:43:40 crc kubenswrapper[4558]: I0120 17:43:40.761349 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a226d25c-395c-4744-a42e-46beee40d8e4-scripts\") pod \"a226d25c-395c-4744-a42e-46beee40d8e4\" (UID: \"a226d25c-395c-4744-a42e-46beee40d8e4\") " Jan 20 17:43:40 crc kubenswrapper[4558]: I0120 17:43:40.761371 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a226d25c-395c-4744-a42e-46beee40d8e4-logs\") pod \"a226d25c-395c-4744-a42e-46beee40d8e4\" (UID: \"a226d25c-395c-4744-a42e-46beee40d8e4\") " Jan 20 17:43:40 crc kubenswrapper[4558]: I0120 17:43:40.765700 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a226d25c-395c-4744-a42e-46beee40d8e4-logs" (OuterVolumeSpecName: "logs") pod "a226d25c-395c-4744-a42e-46beee40d8e4" (UID: "a226d25c-395c-4744-a42e-46beee40d8e4"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:43:40 crc kubenswrapper[4558]: I0120 17:43:40.766033 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a226d25c-395c-4744-a42e-46beee40d8e4-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "a226d25c-395c-4744-a42e-46beee40d8e4" (UID: "a226d25c-395c-4744-a42e-46beee40d8e4"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:43:40 crc kubenswrapper[4558]: I0120 17:43:40.767599 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-metadata-0" podUID="572d7a53-9a17-43e3-bc12-a04f994eb857" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.1.43:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 20 17:43:40 crc kubenswrapper[4558]: I0120 17:43:40.767878 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-metadata-0" podUID="572d7a53-9a17-43e3-bc12-a04f994eb857" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.1.43:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 20 17:43:40 crc kubenswrapper[4558]: I0120 17:43:40.768627 4558 scope.go:117] "RemoveContainer" containerID="219c134e0aca9d6bec94b18c2506bf6bb9bb0bf7e39c133a7d64764bf6f7e5e2" Jan 20 17:43:40 crc kubenswrapper[4558]: I0120 17:43:40.778422 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a226d25c-395c-4744-a42e-46beee40d8e4-kube-api-access-rdzjm" (OuterVolumeSpecName: "kube-api-access-rdzjm") pod "a226d25c-395c-4744-a42e-46beee40d8e4" (UID: "a226d25c-395c-4744-a42e-46beee40d8e4"). InnerVolumeSpecName "kube-api-access-rdzjm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:43:40 crc kubenswrapper[4558]: I0120 17:43:40.798324 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage09-crc" (OuterVolumeSpecName: "glance") pod "a226d25c-395c-4744-a42e-46beee40d8e4" (UID: "a226d25c-395c-4744-a42e-46beee40d8e4"). InnerVolumeSpecName "local-storage09-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:43:40 crc kubenswrapper[4558]: I0120 17:43:40.806095 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a226d25c-395c-4744-a42e-46beee40d8e4-scripts" (OuterVolumeSpecName: "scripts") pod "a226d25c-395c-4744-a42e-46beee40d8e4" (UID: "a226d25c-395c-4744-a42e-46beee40d8e4"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:40 crc kubenswrapper[4558]: I0120 17:43:40.838706 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/openstack-cell1-galera-0"] Jan 20 17:43:40 crc kubenswrapper[4558]: I0120 17:43:40.841468 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack-kuttl-tests/barbican-api-7bc45756fb-278vp" podUID="432ec648-18a7-416a-85cc-409a30976a67" containerName="barbican-api" probeResult="failure" output="Get \"https://10.217.1.28:9311/healthcheck\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 20 17:43:40 crc kubenswrapper[4558]: I0120 17:43:40.844466 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/openstack-cell1-galera-0"] Jan 20 17:43:40 crc kubenswrapper[4558]: I0120 17:43:40.850187 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/openstack-cell1-galera-0"] Jan 20 17:43:40 crc kubenswrapper[4558]: E0120 17:43:40.850618 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="34a4b6ee-5830-4532-b829-a91cedcf8069" containerName="galera" Jan 20 17:43:40 crc kubenswrapper[4558]: I0120 17:43:40.850631 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="34a4b6ee-5830-4532-b829-a91cedcf8069" containerName="galera" Jan 20 17:43:40 crc kubenswrapper[4558]: E0120 17:43:40.850653 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a226d25c-395c-4744-a42e-46beee40d8e4" containerName="glance-httpd" Jan 20 17:43:40 crc kubenswrapper[4558]: I0120 17:43:40.850659 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="a226d25c-395c-4744-a42e-46beee40d8e4" containerName="glance-httpd" Jan 20 17:43:40 crc kubenswrapper[4558]: E0120 17:43:40.850672 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="34a4b6ee-5830-4532-b829-a91cedcf8069" containerName="mysql-bootstrap" Jan 20 17:43:40 crc kubenswrapper[4558]: I0120 17:43:40.850678 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="34a4b6ee-5830-4532-b829-a91cedcf8069" containerName="mysql-bootstrap" Jan 20 17:43:40 crc kubenswrapper[4558]: E0120 17:43:40.850701 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a226d25c-395c-4744-a42e-46beee40d8e4" containerName="glance-log" Jan 20 17:43:40 crc kubenswrapper[4558]: I0120 17:43:40.850706 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="a226d25c-395c-4744-a42e-46beee40d8e4" containerName="glance-log" Jan 20 17:43:40 crc kubenswrapper[4558]: I0120 17:43:40.850894 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="34a4b6ee-5830-4532-b829-a91cedcf8069" containerName="galera" Jan 20 17:43:40 crc kubenswrapper[4558]: I0120 17:43:40.850925 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="a226d25c-395c-4744-a42e-46beee40d8e4" containerName="glance-httpd" Jan 20 17:43:40 crc kubenswrapper[4558]: I0120 17:43:40.850938 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="a226d25c-395c-4744-a42e-46beee40d8e4" containerName="glance-log" Jan 20 17:43:40 crc kubenswrapper[4558]: I0120 17:43:40.851999 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:43:40 crc kubenswrapper[4558]: I0120 17:43:40.854032 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/openstack-cell1-galera-0"] Jan 20 17:43:40 crc kubenswrapper[4558]: I0120 17:43:40.856192 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-cell1-scripts" Jan 20 17:43:40 crc kubenswrapper[4558]: I0120 17:43:40.856436 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"galera-openstack-cell1-dockercfg-mk4rp" Jan 20 17:43:40 crc kubenswrapper[4558]: I0120 17:43:40.856555 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-galera-openstack-cell1-svc" Jan 20 17:43:40 crc kubenswrapper[4558]: I0120 17:43:40.856676 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-cell1-config-data" Jan 20 17:43:40 crc kubenswrapper[4558]: I0120 17:43:40.864013 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a226d25c-395c-4744-a42e-46beee40d8e4-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:40 crc kubenswrapper[4558]: I0120 17:43:40.864035 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a226d25c-395c-4744-a42e-46beee40d8e4-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:40 crc kubenswrapper[4558]: I0120 17:43:40.864046 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rdzjm\" (UniqueName: \"kubernetes.io/projected/a226d25c-395c-4744-a42e-46beee40d8e4-kube-api-access-rdzjm\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:40 crc kubenswrapper[4558]: I0120 17:43:40.864069 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" " Jan 20 17:43:40 crc kubenswrapper[4558]: I0120 17:43:40.864082 4558 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a226d25c-395c-4744-a42e-46beee40d8e4-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:40 crc kubenswrapper[4558]: I0120 17:43:40.891585 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage09-crc" (UniqueName: "kubernetes.io/local-volume/local-storage09-crc") on node "crc" Jan 20 17:43:40 crc kubenswrapper[4558]: I0120 17:43:40.896088 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a226d25c-395c-4744-a42e-46beee40d8e4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a226d25c-395c-4744-a42e-46beee40d8e4" (UID: "a226d25c-395c-4744-a42e-46beee40d8e4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:40 crc kubenswrapper[4558]: I0120 17:43:40.898392 4558 scope.go:117] "RemoveContainer" containerID="6cd4eed6d850e065b4e90baa84aa2f605080fb6620a3f314df9331e9a7480679" Jan 20 17:43:40 crc kubenswrapper[4558]: I0120 17:43:40.899389 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a226d25c-395c-4744-a42e-46beee40d8e4-config-data" (OuterVolumeSpecName: "config-data") pod "a226d25c-395c-4744-a42e-46beee40d8e4" (UID: "a226d25c-395c-4744-a42e-46beee40d8e4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:40 crc kubenswrapper[4558]: E0120 17:43:40.899588 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6cd4eed6d850e065b4e90baa84aa2f605080fb6620a3f314df9331e9a7480679\": container with ID starting with 6cd4eed6d850e065b4e90baa84aa2f605080fb6620a3f314df9331e9a7480679 not found: ID does not exist" containerID="6cd4eed6d850e065b4e90baa84aa2f605080fb6620a3f314df9331e9a7480679" Jan 20 17:43:40 crc kubenswrapper[4558]: I0120 17:43:40.899619 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6cd4eed6d850e065b4e90baa84aa2f605080fb6620a3f314df9331e9a7480679"} err="failed to get container status \"6cd4eed6d850e065b4e90baa84aa2f605080fb6620a3f314df9331e9a7480679\": rpc error: code = NotFound desc = could not find container \"6cd4eed6d850e065b4e90baa84aa2f605080fb6620a3f314df9331e9a7480679\": container with ID starting with 6cd4eed6d850e065b4e90baa84aa2f605080fb6620a3f314df9331e9a7480679 not found: ID does not exist" Jan 20 17:43:40 crc kubenswrapper[4558]: I0120 17:43:40.899639 4558 scope.go:117] "RemoveContainer" containerID="219c134e0aca9d6bec94b18c2506bf6bb9bb0bf7e39c133a7d64764bf6f7e5e2" Jan 20 17:43:40 crc kubenswrapper[4558]: E0120 17:43:40.902888 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"219c134e0aca9d6bec94b18c2506bf6bb9bb0bf7e39c133a7d64764bf6f7e5e2\": container with ID starting with 219c134e0aca9d6bec94b18c2506bf6bb9bb0bf7e39c133a7d64764bf6f7e5e2 not found: ID does not exist" containerID="219c134e0aca9d6bec94b18c2506bf6bb9bb0bf7e39c133a7d64764bf6f7e5e2" Jan 20 17:43:40 crc kubenswrapper[4558]: I0120 17:43:40.902918 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"219c134e0aca9d6bec94b18c2506bf6bb9bb0bf7e39c133a7d64764bf6f7e5e2"} err="failed to get container status \"219c134e0aca9d6bec94b18c2506bf6bb9bb0bf7e39c133a7d64764bf6f7e5e2\": rpc error: code = NotFound desc = could not find container \"219c134e0aca9d6bec94b18c2506bf6bb9bb0bf7e39c133a7d64764bf6f7e5e2\": container with ID starting with 219c134e0aca9d6bec94b18c2506bf6bb9bb0bf7e39c133a7d64764bf6f7e5e2 not found: ID does not exist" Jan 20 17:43:40 crc kubenswrapper[4558]: I0120 17:43:40.902933 4558 scope.go:117] "RemoveContainer" containerID="6cd4eed6d850e065b4e90baa84aa2f605080fb6620a3f314df9331e9a7480679" Jan 20 17:43:40 crc kubenswrapper[4558]: I0120 17:43:40.907376 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6cd4eed6d850e065b4e90baa84aa2f605080fb6620a3f314df9331e9a7480679"} err="failed to get container status \"6cd4eed6d850e065b4e90baa84aa2f605080fb6620a3f314df9331e9a7480679\": rpc error: code = NotFound desc = could not find container \"6cd4eed6d850e065b4e90baa84aa2f605080fb6620a3f314df9331e9a7480679\": container with ID starting with 6cd4eed6d850e065b4e90baa84aa2f605080fb6620a3f314df9331e9a7480679 not found: ID does not exist" Jan 20 17:43:40 crc kubenswrapper[4558]: I0120 17:43:40.907398 4558 scope.go:117] "RemoveContainer" containerID="219c134e0aca9d6bec94b18c2506bf6bb9bb0bf7e39c133a7d64764bf6f7e5e2" Jan 20 17:43:40 crc kubenswrapper[4558]: I0120 17:43:40.908203 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"219c134e0aca9d6bec94b18c2506bf6bb9bb0bf7e39c133a7d64764bf6f7e5e2"} err="failed to get container status \"219c134e0aca9d6bec94b18c2506bf6bb9bb0bf7e39c133a7d64764bf6f7e5e2\": rpc error: code = NotFound desc = could not find container \"219c134e0aca9d6bec94b18c2506bf6bb9bb0bf7e39c133a7d64764bf6f7e5e2\": container with ID starting with 219c134e0aca9d6bec94b18c2506bf6bb9bb0bf7e39c133a7d64764bf6f7e5e2 not found: ID does not exist" Jan 20 17:43:40 crc kubenswrapper[4558]: I0120 17:43:40.912243 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a226d25c-395c-4744-a42e-46beee40d8e4-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "a226d25c-395c-4744-a42e-46beee40d8e4" (UID: "a226d25c-395c-4744-a42e-46beee40d8e4"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:40 crc kubenswrapper[4558]: I0120 17:43:40.968626 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/5bca4373-5f64-4803-94e7-28deeb5caad3-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"5bca4373-5f64-4803-94e7-28deeb5caad3\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:43:40 crc kubenswrapper[4558]: I0120 17:43:40.968691 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/5bca4373-5f64-4803-94e7-28deeb5caad3-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"5bca4373-5f64-4803-94e7-28deeb5caad3\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:43:40 crc kubenswrapper[4558]: I0120 17:43:40.968756 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/5bca4373-5f64-4803-94e7-28deeb5caad3-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"5bca4373-5f64-4803-94e7-28deeb5caad3\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:43:40 crc kubenswrapper[4558]: I0120 17:43:40.968801 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage15-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") pod \"openstack-cell1-galera-0\" (UID: \"5bca4373-5f64-4803-94e7-28deeb5caad3\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:43:40 crc kubenswrapper[4558]: I0120 17:43:40.968844 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g6qhw\" (UniqueName: \"kubernetes.io/projected/5bca4373-5f64-4803-94e7-28deeb5caad3-kube-api-access-g6qhw\") pod \"openstack-cell1-galera-0\" (UID: \"5bca4373-5f64-4803-94e7-28deeb5caad3\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:43:40 crc kubenswrapper[4558]: I0120 17:43:40.968963 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/5bca4373-5f64-4803-94e7-28deeb5caad3-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"5bca4373-5f64-4803-94e7-28deeb5caad3\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:43:40 crc kubenswrapper[4558]: I0120 17:43:40.969045 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5bca4373-5f64-4803-94e7-28deeb5caad3-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"5bca4373-5f64-4803-94e7-28deeb5caad3\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:43:40 crc kubenswrapper[4558]: I0120 17:43:40.969102 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5bca4373-5f64-4803-94e7-28deeb5caad3-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"5bca4373-5f64-4803-94e7-28deeb5caad3\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:43:40 crc kubenswrapper[4558]: I0120 17:43:40.969198 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a226d25c-395c-4744-a42e-46beee40d8e4-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:40 crc kubenswrapper[4558]: I0120 17:43:40.969215 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a226d25c-395c-4744-a42e-46beee40d8e4-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:40 crc kubenswrapper[4558]: I0120 17:43:40.969227 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:40 crc kubenswrapper[4558]: I0120 17:43:40.969238 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a226d25c-395c-4744-a42e-46beee40d8e4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:41 crc kubenswrapper[4558]: I0120 17:43:41.071378 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g6qhw\" (UniqueName: \"kubernetes.io/projected/5bca4373-5f64-4803-94e7-28deeb5caad3-kube-api-access-g6qhw\") pod \"openstack-cell1-galera-0\" (UID: \"5bca4373-5f64-4803-94e7-28deeb5caad3\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:43:41 crc kubenswrapper[4558]: I0120 17:43:41.071502 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/5bca4373-5f64-4803-94e7-28deeb5caad3-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"5bca4373-5f64-4803-94e7-28deeb5caad3\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:43:41 crc kubenswrapper[4558]: I0120 17:43:41.071582 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5bca4373-5f64-4803-94e7-28deeb5caad3-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"5bca4373-5f64-4803-94e7-28deeb5caad3\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:43:41 crc kubenswrapper[4558]: I0120 17:43:41.071621 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5bca4373-5f64-4803-94e7-28deeb5caad3-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"5bca4373-5f64-4803-94e7-28deeb5caad3\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:43:41 crc kubenswrapper[4558]: I0120 17:43:41.073541 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/5bca4373-5f64-4803-94e7-28deeb5caad3-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"5bca4373-5f64-4803-94e7-28deeb5caad3\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:43:41 crc kubenswrapper[4558]: I0120 17:43:41.072268 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/5bca4373-5f64-4803-94e7-28deeb5caad3-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"5bca4373-5f64-4803-94e7-28deeb5caad3\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:43:41 crc kubenswrapper[4558]: I0120 17:43:41.073848 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/5bca4373-5f64-4803-94e7-28deeb5caad3-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"5bca4373-5f64-4803-94e7-28deeb5caad3\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:43:41 crc kubenswrapper[4558]: I0120 17:43:41.073185 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5bca4373-5f64-4803-94e7-28deeb5caad3-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"5bca4373-5f64-4803-94e7-28deeb5caad3\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:43:41 crc kubenswrapper[4558]: I0120 17:43:41.075201 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/5bca4373-5f64-4803-94e7-28deeb5caad3-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"5bca4373-5f64-4803-94e7-28deeb5caad3\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:43:41 crc kubenswrapper[4558]: I0120 17:43:41.075251 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/5bca4373-5f64-4803-94e7-28deeb5caad3-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"5bca4373-5f64-4803-94e7-28deeb5caad3\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:43:41 crc kubenswrapper[4558]: I0120 17:43:41.075284 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage15-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") pod \"openstack-cell1-galera-0\" (UID: \"5bca4373-5f64-4803-94e7-28deeb5caad3\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:43:41 crc kubenswrapper[4558]: I0120 17:43:41.075397 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage15-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") pod \"openstack-cell1-galera-0\" (UID: \"5bca4373-5f64-4803-94e7-28deeb5caad3\") device mount path \"/mnt/openstack/pv15\"" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:43:41 crc kubenswrapper[4558]: I0120 17:43:41.077794 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/5bca4373-5f64-4803-94e7-28deeb5caad3-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"5bca4373-5f64-4803-94e7-28deeb5caad3\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:43:41 crc kubenswrapper[4558]: I0120 17:43:41.078562 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/5bca4373-5f64-4803-94e7-28deeb5caad3-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"5bca4373-5f64-4803-94e7-28deeb5caad3\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:43:41 crc kubenswrapper[4558]: I0120 17:43:41.079501 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5bca4373-5f64-4803-94e7-28deeb5caad3-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"5bca4373-5f64-4803-94e7-28deeb5caad3\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:43:41 crc kubenswrapper[4558]: I0120 17:43:41.098452 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g6qhw\" (UniqueName: \"kubernetes.io/projected/5bca4373-5f64-4803-94e7-28deeb5caad3-kube-api-access-g6qhw\") pod \"openstack-cell1-galera-0\" (UID: \"5bca4373-5f64-4803-94e7-28deeb5caad3\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:43:41 crc kubenswrapper[4558]: I0120 17:43:41.111269 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage15-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") pod \"openstack-cell1-galera-0\" (UID: \"5bca4373-5f64-4803-94e7-28deeb5caad3\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:43:41 crc kubenswrapper[4558]: I0120 17:43:41.179560 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:43:41 crc kubenswrapper[4558]: I0120 17:43:41.213833 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/barbican-api-7bc45756fb-278vp" podUID="432ec648-18a7-416a-85cc-409a30976a67" containerName="barbican-api-log" probeResult="failure" output="Get \"https://10.217.1.28:9311/healthcheck\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 20 17:43:41 crc kubenswrapper[4558]: I0120 17:43:41.297019 4558 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:43:41 crc kubenswrapper[4558]: I0120 17:43:41.297570 4558 scope.go:117] "RemoveContainer" containerID="bb8488b5aee706047405a606b3a23b8bd6228cb32059c06329cbdb7eefcf16f2" Jan 20 17:43:41 crc kubenswrapper[4558]: E0120 17:43:41.297881 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"nova-cell0-conductor-conductor\" with CrashLoopBackOff: \"back-off 10s restarting failed container=nova-cell0-conductor-conductor pod=nova-cell0-conductor-0_openstack-kuttl-tests(50cacddd-ebea-477f-af64-6e96a09a242e)\"" pod="openstack-kuttl-tests/nova-cell0-conductor-0" podUID="50cacddd-ebea-477f-af64-6e96a09a242e" Jan 20 17:43:41 crc kubenswrapper[4558]: I0120 17:43:41.373261 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/barbican-api-7bc45756fb-278vp" Jan 20 17:43:41 crc kubenswrapper[4558]: I0120 17:43:41.380272 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/barbican-api-7bc45756fb-278vp" Jan 20 17:43:41 crc kubenswrapper[4558]: I0120 17:43:41.488936 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:41 crc kubenswrapper[4558]: I0120 17:43:41.491378 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-api-6575ff8c88-kqv4m"] Jan 20 17:43:41 crc kubenswrapper[4558]: I0120 17:43:41.491656 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-api-6575ff8c88-kqv4m" podUID="f6d8d5b2-af92-43f7-ad6f-74ac3121776c" containerName="barbican-api-log" containerID="cri-o://7f70312ea849351c5fb0d6a0dd1c2e2239b767de0d5e6cefd7d82f31ec5e5f4b" gracePeriod=30 Jan 20 17:43:41 crc kubenswrapper[4558]: I0120 17:43:41.491817 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-api-6575ff8c88-kqv4m" podUID="f6d8d5b2-af92-43f7-ad6f-74ac3121776c" containerName="barbican-api" containerID="cri-o://d7d531af3aef1970f34aacda0d423e7527ba5090f37fc6e8a3ca8f6d6677a41c" gracePeriod=30 Jan 20 17:43:41 crc kubenswrapper[4558]: I0120 17:43:41.566553 4558 scope.go:117] "RemoveContainer" containerID="2d2a8989ca5a5af98b2c9dc8f801ea0675339ec14800f437c058c5a43c20146b" Jan 20 17:43:41 crc kubenswrapper[4558]: E0120 17:43:41.566882 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 17:43:41 crc kubenswrapper[4558]: I0120 17:43:41.592892 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5e0d404f-75d1-4be8-9c12-803d8db759e6-scripts\") pod \"5e0d404f-75d1-4be8-9c12-803d8db759e6\" (UID: \"5e0d404f-75d1-4be8-9c12-803d8db759e6\") " Jan 20 17:43:41 crc kubenswrapper[4558]: I0120 17:43:41.592956 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5e0d404f-75d1-4be8-9c12-803d8db759e6-httpd-run\") pod \"5e0d404f-75d1-4be8-9c12-803d8db759e6\" (UID: \"5e0d404f-75d1-4be8-9c12-803d8db759e6\") " Jan 20 17:43:41 crc kubenswrapper[4558]: I0120 17:43:41.593090 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5e0d404f-75d1-4be8-9c12-803d8db759e6-internal-tls-certs\") pod \"5e0d404f-75d1-4be8-9c12-803d8db759e6\" (UID: \"5e0d404f-75d1-4be8-9c12-803d8db759e6\") " Jan 20 17:43:41 crc kubenswrapper[4558]: I0120 17:43:41.593122 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xzxfn\" (UniqueName: \"kubernetes.io/projected/5e0d404f-75d1-4be8-9c12-803d8db759e6-kube-api-access-xzxfn\") pod \"5e0d404f-75d1-4be8-9c12-803d8db759e6\" (UID: \"5e0d404f-75d1-4be8-9c12-803d8db759e6\") " Jan 20 17:43:41 crc kubenswrapper[4558]: I0120 17:43:41.593146 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5e0d404f-75d1-4be8-9c12-803d8db759e6-logs\") pod \"5e0d404f-75d1-4be8-9c12-803d8db759e6\" (UID: \"5e0d404f-75d1-4be8-9c12-803d8db759e6\") " Jan 20 17:43:41 crc kubenswrapper[4558]: I0120 17:43:41.593179 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5e0d404f-75d1-4be8-9c12-803d8db759e6-config-data\") pod \"5e0d404f-75d1-4be8-9c12-803d8db759e6\" (UID: \"5e0d404f-75d1-4be8-9c12-803d8db759e6\") " Jan 20 17:43:41 crc kubenswrapper[4558]: I0120 17:43:41.593251 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e0d404f-75d1-4be8-9c12-803d8db759e6-combined-ca-bundle\") pod \"5e0d404f-75d1-4be8-9c12-803d8db759e6\" (UID: \"5e0d404f-75d1-4be8-9c12-803d8db759e6\") " Jan 20 17:43:41 crc kubenswrapper[4558]: I0120 17:43:41.593332 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"5e0d404f-75d1-4be8-9c12-803d8db759e6\" (UID: \"5e0d404f-75d1-4be8-9c12-803d8db759e6\") " Jan 20 17:43:41 crc kubenswrapper[4558]: I0120 17:43:41.594448 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5e0d404f-75d1-4be8-9c12-803d8db759e6-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "5e0d404f-75d1-4be8-9c12-803d8db759e6" (UID: "5e0d404f-75d1-4be8-9c12-803d8db759e6"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:43:41 crc kubenswrapper[4558]: I0120 17:43:41.596004 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5e0d404f-75d1-4be8-9c12-803d8db759e6-logs" (OuterVolumeSpecName: "logs") pod "5e0d404f-75d1-4be8-9c12-803d8db759e6" (UID: "5e0d404f-75d1-4be8-9c12-803d8db759e6"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:43:41 crc kubenswrapper[4558]: I0120 17:43:41.601829 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5e0d404f-75d1-4be8-9c12-803d8db759e6-kube-api-access-xzxfn" (OuterVolumeSpecName: "kube-api-access-xzxfn") pod "5e0d404f-75d1-4be8-9c12-803d8db759e6" (UID: "5e0d404f-75d1-4be8-9c12-803d8db759e6"). InnerVolumeSpecName "kube-api-access-xzxfn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:43:41 crc kubenswrapper[4558]: I0120 17:43:41.601917 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage11-crc" (OuterVolumeSpecName: "glance") pod "5e0d404f-75d1-4be8-9c12-803d8db759e6" (UID: "5e0d404f-75d1-4be8-9c12-803d8db759e6"). InnerVolumeSpecName "local-storage11-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:43:41 crc kubenswrapper[4558]: I0120 17:43:41.613798 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5e0d404f-75d1-4be8-9c12-803d8db759e6-scripts" (OuterVolumeSpecName: "scripts") pod "5e0d404f-75d1-4be8-9c12-803d8db759e6" (UID: "5e0d404f-75d1-4be8-9c12-803d8db759e6"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:41 crc kubenswrapper[4558]: I0120 17:43:41.655219 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5e0d404f-75d1-4be8-9c12-803d8db759e6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5e0d404f-75d1-4be8-9c12-803d8db759e6" (UID: "5e0d404f-75d1-4be8-9c12-803d8db759e6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:41 crc kubenswrapper[4558]: I0120 17:43:41.697395 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5e0d404f-75d1-4be8-9c12-803d8db759e6-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:41 crc kubenswrapper[4558]: I0120 17:43:41.697422 4558 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5e0d404f-75d1-4be8-9c12-803d8db759e6-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:41 crc kubenswrapper[4558]: I0120 17:43:41.697432 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xzxfn\" (UniqueName: \"kubernetes.io/projected/5e0d404f-75d1-4be8-9c12-803d8db759e6-kube-api-access-xzxfn\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:41 crc kubenswrapper[4558]: I0120 17:43:41.697441 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5e0d404f-75d1-4be8-9c12-803d8db759e6-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:41 crc kubenswrapper[4558]: I0120 17:43:41.697449 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e0d404f-75d1-4be8-9c12-803d8db759e6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:41 crc kubenswrapper[4558]: I0120 17:43:41.697467 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" " Jan 20 17:43:41 crc kubenswrapper[4558]: I0120 17:43:41.733403 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-685599669b-nv6cc"] Jan 20 17:43:41 crc kubenswrapper[4558]: I0120 17:43:41.734075 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/keystone-685599669b-nv6cc" podUID="33f7229d-bcae-434a-805e-530801e79b20" containerName="keystone-api" containerID="cri-o://d3f01f262533ef464619db0861fecd0a8366a9c544f5d180d20689cd0e292a37" gracePeriod=30 Jan 20 17:43:41 crc kubenswrapper[4558]: I0120 17:43:41.736252 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5e0d404f-75d1-4be8-9c12-803d8db759e6-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "5e0d404f-75d1-4be8-9c12-803d8db759e6" (UID: "5e0d404f-75d1-4be8-9c12-803d8db759e6"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:41 crc kubenswrapper[4558]: I0120 17:43:41.750774 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/keystone-7f8885c56d-wsxms"] Jan 20 17:43:41 crc kubenswrapper[4558]: E0120 17:43:41.751415 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5e0d404f-75d1-4be8-9c12-803d8db759e6" containerName="glance-log" Jan 20 17:43:41 crc kubenswrapper[4558]: I0120 17:43:41.751433 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="5e0d404f-75d1-4be8-9c12-803d8db759e6" containerName="glance-log" Jan 20 17:43:41 crc kubenswrapper[4558]: E0120 17:43:41.751444 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5e0d404f-75d1-4be8-9c12-803d8db759e6" containerName="glance-httpd" Jan 20 17:43:41 crc kubenswrapper[4558]: I0120 17:43:41.751452 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="5e0d404f-75d1-4be8-9c12-803d8db759e6" containerName="glance-httpd" Jan 20 17:43:41 crc kubenswrapper[4558]: I0120 17:43:41.751681 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="5e0d404f-75d1-4be8-9c12-803d8db759e6" containerName="glance-log" Jan 20 17:43:41 crc kubenswrapper[4558]: I0120 17:43:41.751725 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="5e0d404f-75d1-4be8-9c12-803d8db759e6" containerName="glance-httpd" Jan 20 17:43:41 crc kubenswrapper[4558]: I0120 17:43:41.752425 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-7f8885c56d-wsxms" Jan 20 17:43:41 crc kubenswrapper[4558]: I0120 17:43:41.756505 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5e0d404f-75d1-4be8-9c12-803d8db759e6-config-data" (OuterVolumeSpecName: "config-data") pod "5e0d404f-75d1-4be8-9c12-803d8db759e6" (UID: "5e0d404f-75d1-4be8-9c12-803d8db759e6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:41 crc kubenswrapper[4558]: I0120 17:43:41.756620 4558 generic.go:334] "Generic (PLEG): container finished" podID="5e0d404f-75d1-4be8-9c12-803d8db759e6" containerID="97feb272063e6cb10279ddbd244ed9abed0f4fe2c8c6974b1016d61ab6e14c8d" exitCode=0 Jan 20 17:43:41 crc kubenswrapper[4558]: I0120 17:43:41.756697 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"5e0d404f-75d1-4be8-9c12-803d8db759e6","Type":"ContainerDied","Data":"97feb272063e6cb10279ddbd244ed9abed0f4fe2c8c6974b1016d61ab6e14c8d"} Jan 20 17:43:41 crc kubenswrapper[4558]: I0120 17:43:41.756735 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"5e0d404f-75d1-4be8-9c12-803d8db759e6","Type":"ContainerDied","Data":"bab504222f0097ec4e1c41baba39adba55d431a6848496b810190dd205c98a8a"} Jan 20 17:43:41 crc kubenswrapper[4558]: I0120 17:43:41.756755 4558 scope.go:117] "RemoveContainer" containerID="97feb272063e6cb10279ddbd244ed9abed0f4fe2c8c6974b1016d61ab6e14c8d" Jan 20 17:43:41 crc kubenswrapper[4558]: I0120 17:43:41.756699 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:41 crc kubenswrapper[4558]: I0120 17:43:41.776785 4558 generic.go:334] "Generic (PLEG): container finished" podID="f6d8d5b2-af92-43f7-ad6f-74ac3121776c" containerID="7f70312ea849351c5fb0d6a0dd1c2e2239b767de0d5e6cefd7d82f31ec5e5f4b" exitCode=143 Jan 20 17:43:41 crc kubenswrapper[4558]: I0120 17:43:41.776869 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-6575ff8c88-kqv4m" event={"ID":"f6d8d5b2-af92-43f7-ad6f-74ac3121776c","Type":"ContainerDied","Data":"7f70312ea849351c5fb0d6a0dd1c2e2239b767de0d5e6cefd7d82f31ec5e5f4b"} Jan 20 17:43:41 crc kubenswrapper[4558]: I0120 17:43:41.787408 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-7f8885c56d-wsxms"] Jan 20 17:43:41 crc kubenswrapper[4558]: I0120 17:43:41.795233 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:41 crc kubenswrapper[4558]: I0120 17:43:41.801803 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b911f497-5616-416f-8aa2-73c7cce172eb-combined-ca-bundle\") pod \"keystone-7f8885c56d-wsxms\" (UID: \"b911f497-5616-416f-8aa2-73c7cce172eb\") " pod="openstack-kuttl-tests/keystone-7f8885c56d-wsxms" Jan 20 17:43:41 crc kubenswrapper[4558]: I0120 17:43:41.801872 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b911f497-5616-416f-8aa2-73c7cce172eb-scripts\") pod \"keystone-7f8885c56d-wsxms\" (UID: \"b911f497-5616-416f-8aa2-73c7cce172eb\") " pod="openstack-kuttl-tests/keystone-7f8885c56d-wsxms" Jan 20 17:43:41 crc kubenswrapper[4558]: I0120 17:43:41.801939 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b911f497-5616-416f-8aa2-73c7cce172eb-public-tls-certs\") pod \"keystone-7f8885c56d-wsxms\" (UID: \"b911f497-5616-416f-8aa2-73c7cce172eb\") " pod="openstack-kuttl-tests/keystone-7f8885c56d-wsxms" Jan 20 17:43:41 crc kubenswrapper[4558]: I0120 17:43:41.802030 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b911f497-5616-416f-8aa2-73c7cce172eb-config-data\") pod \"keystone-7f8885c56d-wsxms\" (UID: \"b911f497-5616-416f-8aa2-73c7cce172eb\") " pod="openstack-kuttl-tests/keystone-7f8885c56d-wsxms" Jan 20 17:43:41 crc kubenswrapper[4558]: I0120 17:43:41.802065 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rdh88\" (UniqueName: \"kubernetes.io/projected/b911f497-5616-416f-8aa2-73c7cce172eb-kube-api-access-rdh88\") pod \"keystone-7f8885c56d-wsxms\" (UID: \"b911f497-5616-416f-8aa2-73c7cce172eb\") " pod="openstack-kuttl-tests/keystone-7f8885c56d-wsxms" Jan 20 17:43:41 crc kubenswrapper[4558]: I0120 17:43:41.802096 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b911f497-5616-416f-8aa2-73c7cce172eb-internal-tls-certs\") pod \"keystone-7f8885c56d-wsxms\" (UID: \"b911f497-5616-416f-8aa2-73c7cce172eb\") " pod="openstack-kuttl-tests/keystone-7f8885c56d-wsxms" Jan 20 17:43:41 crc kubenswrapper[4558]: I0120 17:43:41.802262 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/b911f497-5616-416f-8aa2-73c7cce172eb-credential-keys\") pod \"keystone-7f8885c56d-wsxms\" (UID: \"b911f497-5616-416f-8aa2-73c7cce172eb\") " pod="openstack-kuttl-tests/keystone-7f8885c56d-wsxms" Jan 20 17:43:41 crc kubenswrapper[4558]: I0120 17:43:41.802307 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/b911f497-5616-416f-8aa2-73c7cce172eb-fernet-keys\") pod \"keystone-7f8885c56d-wsxms\" (UID: \"b911f497-5616-416f-8aa2-73c7cce172eb\") " pod="openstack-kuttl-tests/keystone-7f8885c56d-wsxms" Jan 20 17:43:41 crc kubenswrapper[4558]: I0120 17:43:41.805445 4558 scope.go:117] "RemoveContainer" containerID="a28f5d4301df9252556516004020ebc32a69c49a2a100beb6220ce8b200a5a7b" Jan 20 17:43:41 crc kubenswrapper[4558]: I0120 17:43:41.805459 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage11-crc" (UniqueName: "kubernetes.io/local-volume/local-storage11-crc") on node "crc" Jan 20 17:43:41 crc kubenswrapper[4558]: I0120 17:43:41.820461 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5e0d404f-75d1-4be8-9c12-803d8db759e6-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:41 crc kubenswrapper[4558]: I0120 17:43:41.821214 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5e0d404f-75d1-4be8-9c12-803d8db759e6-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:41 crc kubenswrapper[4558]: I0120 17:43:41.850337 4558 scope.go:117] "RemoveContainer" containerID="97feb272063e6cb10279ddbd244ed9abed0f4fe2c8c6974b1016d61ab6e14c8d" Jan 20 17:43:41 crc kubenswrapper[4558]: E0120 17:43:41.850765 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"97feb272063e6cb10279ddbd244ed9abed0f4fe2c8c6974b1016d61ab6e14c8d\": container with ID starting with 97feb272063e6cb10279ddbd244ed9abed0f4fe2c8c6974b1016d61ab6e14c8d not found: ID does not exist" containerID="97feb272063e6cb10279ddbd244ed9abed0f4fe2c8c6974b1016d61ab6e14c8d" Jan 20 17:43:41 crc kubenswrapper[4558]: I0120 17:43:41.850816 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"97feb272063e6cb10279ddbd244ed9abed0f4fe2c8c6974b1016d61ab6e14c8d"} err="failed to get container status \"97feb272063e6cb10279ddbd244ed9abed0f4fe2c8c6974b1016d61ab6e14c8d\": rpc error: code = NotFound desc = could not find container \"97feb272063e6cb10279ddbd244ed9abed0f4fe2c8c6974b1016d61ab6e14c8d\": container with ID starting with 97feb272063e6cb10279ddbd244ed9abed0f4fe2c8c6974b1016d61ab6e14c8d not found: ID does not exist" Jan 20 17:43:41 crc kubenswrapper[4558]: I0120 17:43:41.850849 4558 scope.go:117] "RemoveContainer" containerID="a28f5d4301df9252556516004020ebc32a69c49a2a100beb6220ce8b200a5a7b" Jan 20 17:43:41 crc kubenswrapper[4558]: E0120 17:43:41.854381 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a28f5d4301df9252556516004020ebc32a69c49a2a100beb6220ce8b200a5a7b\": container with ID starting with a28f5d4301df9252556516004020ebc32a69c49a2a100beb6220ce8b200a5a7b not found: ID does not exist" containerID="a28f5d4301df9252556516004020ebc32a69c49a2a100beb6220ce8b200a5a7b" Jan 20 17:43:41 crc kubenswrapper[4558]: I0120 17:43:41.854437 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a28f5d4301df9252556516004020ebc32a69c49a2a100beb6220ce8b200a5a7b"} err="failed to get container status \"a28f5d4301df9252556516004020ebc32a69c49a2a100beb6220ce8b200a5a7b\": rpc error: code = NotFound desc = could not find container \"a28f5d4301df9252556516004020ebc32a69c49a2a100beb6220ce8b200a5a7b\": container with ID starting with a28f5d4301df9252556516004020ebc32a69c49a2a100beb6220ce8b200a5a7b not found: ID does not exist" Jan 20 17:43:41 crc kubenswrapper[4558]: I0120 17:43:41.893377 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:43:41 crc kubenswrapper[4558]: I0120 17:43:41.915033 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:43:41 crc kubenswrapper[4558]: W0120 17:43:41.916097 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5bca4373_5f64_4803_94e7_28deeb5caad3.slice/crio-425b81dc5be769f40d2f4467240b30bd8fda9e28ac5df99280cec7c1c36eee5b WatchSource:0}: Error finding container 425b81dc5be769f40d2f4467240b30bd8fda9e28ac5df99280cec7c1c36eee5b: Status 404 returned error can't find the container with id 425b81dc5be769f40d2f4467240b30bd8fda9e28ac5df99280cec7c1c36eee5b Jan 20 17:43:41 crc kubenswrapper[4558]: I0120 17:43:41.923711 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b911f497-5616-416f-8aa2-73c7cce172eb-public-tls-certs\") pod \"keystone-7f8885c56d-wsxms\" (UID: \"b911f497-5616-416f-8aa2-73c7cce172eb\") " pod="openstack-kuttl-tests/keystone-7f8885c56d-wsxms" Jan 20 17:43:41 crc kubenswrapper[4558]: I0120 17:43:41.923828 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b911f497-5616-416f-8aa2-73c7cce172eb-config-data\") pod \"keystone-7f8885c56d-wsxms\" (UID: \"b911f497-5616-416f-8aa2-73c7cce172eb\") " pod="openstack-kuttl-tests/keystone-7f8885c56d-wsxms" Jan 20 17:43:41 crc kubenswrapper[4558]: I0120 17:43:41.923851 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdh88\" (UniqueName: \"kubernetes.io/projected/b911f497-5616-416f-8aa2-73c7cce172eb-kube-api-access-rdh88\") pod \"keystone-7f8885c56d-wsxms\" (UID: \"b911f497-5616-416f-8aa2-73c7cce172eb\") " pod="openstack-kuttl-tests/keystone-7f8885c56d-wsxms" Jan 20 17:43:41 crc kubenswrapper[4558]: I0120 17:43:41.923916 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b911f497-5616-416f-8aa2-73c7cce172eb-internal-tls-certs\") pod \"keystone-7f8885c56d-wsxms\" (UID: \"b911f497-5616-416f-8aa2-73c7cce172eb\") " pod="openstack-kuttl-tests/keystone-7f8885c56d-wsxms" Jan 20 17:43:41 crc kubenswrapper[4558]: I0120 17:43:41.924047 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/b911f497-5616-416f-8aa2-73c7cce172eb-credential-keys\") pod \"keystone-7f8885c56d-wsxms\" (UID: \"b911f497-5616-416f-8aa2-73c7cce172eb\") " pod="openstack-kuttl-tests/keystone-7f8885c56d-wsxms" Jan 20 17:43:41 crc kubenswrapper[4558]: I0120 17:43:41.924091 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/b911f497-5616-416f-8aa2-73c7cce172eb-fernet-keys\") pod \"keystone-7f8885c56d-wsxms\" (UID: \"b911f497-5616-416f-8aa2-73c7cce172eb\") " pod="openstack-kuttl-tests/keystone-7f8885c56d-wsxms" Jan 20 17:43:41 crc kubenswrapper[4558]: I0120 17:43:41.924218 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b911f497-5616-416f-8aa2-73c7cce172eb-combined-ca-bundle\") pod \"keystone-7f8885c56d-wsxms\" (UID: \"b911f497-5616-416f-8aa2-73c7cce172eb\") " pod="openstack-kuttl-tests/keystone-7f8885c56d-wsxms" Jan 20 17:43:41 crc kubenswrapper[4558]: I0120 17:43:41.924260 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b911f497-5616-416f-8aa2-73c7cce172eb-scripts\") pod \"keystone-7f8885c56d-wsxms\" (UID: \"b911f497-5616-416f-8aa2-73c7cce172eb\") " pod="openstack-kuttl-tests/keystone-7f8885c56d-wsxms" Jan 20 17:43:41 crc kubenswrapper[4558]: I0120 17:43:41.924345 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:41 crc kubenswrapper[4558]: I0120 17:43:41.928926 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b911f497-5616-416f-8aa2-73c7cce172eb-scripts\") pod \"keystone-7f8885c56d-wsxms\" (UID: \"b911f497-5616-416f-8aa2-73c7cce172eb\") " pod="openstack-kuttl-tests/keystone-7f8885c56d-wsxms" Jan 20 17:43:41 crc kubenswrapper[4558]: I0120 17:43:41.929593 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:43:41 crc kubenswrapper[4558]: I0120 17:43:41.931701 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b911f497-5616-416f-8aa2-73c7cce172eb-public-tls-certs\") pod \"keystone-7f8885c56d-wsxms\" (UID: \"b911f497-5616-416f-8aa2-73c7cce172eb\") " pod="openstack-kuttl-tests/keystone-7f8885c56d-wsxms" Jan 20 17:43:41 crc kubenswrapper[4558]: I0120 17:43:41.935710 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:43:41 crc kubenswrapper[4558]: I0120 17:43:41.935967 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:41 crc kubenswrapper[4558]: I0120 17:43:41.938947 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-default-internal-config-data" Jan 20 17:43:41 crc kubenswrapper[4558]: I0120 17:43:41.939878 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-scripts" Jan 20 17:43:41 crc kubenswrapper[4558]: I0120 17:43:41.939997 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:43:41 crc kubenswrapper[4558]: I0120 17:43:41.940079 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-glance-dockercfg-gnzn6" Jan 20 17:43:41 crc kubenswrapper[4558]: I0120 17:43:41.940328 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-glance-default-internal-svc" Jan 20 17:43:41 crc kubenswrapper[4558]: I0120 17:43:41.943865 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b911f497-5616-416f-8aa2-73c7cce172eb-combined-ca-bundle\") pod \"keystone-7f8885c56d-wsxms\" (UID: \"b911f497-5616-416f-8aa2-73c7cce172eb\") " pod="openstack-kuttl-tests/keystone-7f8885c56d-wsxms" Jan 20 17:43:41 crc kubenswrapper[4558]: I0120 17:43:41.945463 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/b911f497-5616-416f-8aa2-73c7cce172eb-fernet-keys\") pod \"keystone-7f8885c56d-wsxms\" (UID: \"b911f497-5616-416f-8aa2-73c7cce172eb\") " pod="openstack-kuttl-tests/keystone-7f8885c56d-wsxms" Jan 20 17:43:41 crc kubenswrapper[4558]: I0120 17:43:41.948654 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b911f497-5616-416f-8aa2-73c7cce172eb-internal-tls-certs\") pod \"keystone-7f8885c56d-wsxms\" (UID: \"b911f497-5616-416f-8aa2-73c7cce172eb\") " pod="openstack-kuttl-tests/keystone-7f8885c56d-wsxms" Jan 20 17:43:41 crc kubenswrapper[4558]: I0120 17:43:41.949671 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdh88\" (UniqueName: \"kubernetes.io/projected/b911f497-5616-416f-8aa2-73c7cce172eb-kube-api-access-rdh88\") pod \"keystone-7f8885c56d-wsxms\" (UID: \"b911f497-5616-416f-8aa2-73c7cce172eb\") " pod="openstack-kuttl-tests/keystone-7f8885c56d-wsxms" Jan 20 17:43:41 crc kubenswrapper[4558]: I0120 17:43:41.950311 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/b911f497-5616-416f-8aa2-73c7cce172eb-credential-keys\") pod \"keystone-7f8885c56d-wsxms\" (UID: \"b911f497-5616-416f-8aa2-73c7cce172eb\") " pod="openstack-kuttl-tests/keystone-7f8885c56d-wsxms" Jan 20 17:43:41 crc kubenswrapper[4558]: I0120 17:43:41.952477 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b911f497-5616-416f-8aa2-73c7cce172eb-config-data\") pod \"keystone-7f8885c56d-wsxms\" (UID: \"b911f497-5616-416f-8aa2-73c7cce172eb\") " pod="openstack-kuttl-tests/keystone-7f8885c56d-wsxms" Jan 20 17:43:41 crc kubenswrapper[4558]: I0120 17:43:41.964871 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:43:41 crc kubenswrapper[4558]: I0120 17:43:41.981696 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:43:41 crc kubenswrapper[4558]: I0120 17:43:41.986045 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:41 crc kubenswrapper[4558]: I0120 17:43:41.989533 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-default-external-config-data" Jan 20 17:43:41 crc kubenswrapper[4558]: I0120 17:43:41.990012 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-glance-default-public-svc" Jan 20 17:43:41 crc kubenswrapper[4558]: I0120 17:43:41.997812 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:43:42 crc kubenswrapper[4558]: I0120 17:43:42.006552 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/openstack-cell1-galera-0"] Jan 20 17:43:42 crc kubenswrapper[4558]: I0120 17:43:42.027401 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/33474adf-661b-42e9-bb7c-b629e9f0f7c8-config-data\") pod \"glance-default-internal-api-0\" (UID: \"33474adf-661b-42e9-bb7c-b629e9f0f7c8\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:42 crc kubenswrapper[4558]: I0120 17:43:42.027465 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/33474adf-661b-42e9-bb7c-b629e9f0f7c8-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"33474adf-661b-42e9-bb7c-b629e9f0f7c8\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:42 crc kubenswrapper[4558]: I0120 17:43:42.027520 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"33474adf-661b-42e9-bb7c-b629e9f0f7c8\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:42 crc kubenswrapper[4558]: I0120 17:43:42.027547 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/33474adf-661b-42e9-bb7c-b629e9f0f7c8-logs\") pod \"glance-default-internal-api-0\" (UID: \"33474adf-661b-42e9-bb7c-b629e9f0f7c8\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:42 crc kubenswrapper[4558]: I0120 17:43:42.027575 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-drlpd\" (UniqueName: \"kubernetes.io/projected/33474adf-661b-42e9-bb7c-b629e9f0f7c8-kube-api-access-drlpd\") pod \"glance-default-internal-api-0\" (UID: \"33474adf-661b-42e9-bb7c-b629e9f0f7c8\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:42 crc kubenswrapper[4558]: I0120 17:43:42.027626 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/33474adf-661b-42e9-bb7c-b629e9f0f7c8-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"33474adf-661b-42e9-bb7c-b629e9f0f7c8\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:42 crc kubenswrapper[4558]: I0120 17:43:42.027662 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/33474adf-661b-42e9-bb7c-b629e9f0f7c8-scripts\") pod \"glance-default-internal-api-0\" (UID: \"33474adf-661b-42e9-bb7c-b629e9f0f7c8\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:42 crc kubenswrapper[4558]: I0120 17:43:42.027683 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/33474adf-661b-42e9-bb7c-b629e9f0f7c8-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"33474adf-661b-42e9-bb7c-b629e9f0f7c8\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:42 crc kubenswrapper[4558]: I0120 17:43:42.027984 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:43:42 crc kubenswrapper[4558]: E0120 17:43:42.028335 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[combined-ca-bundle config-data glance httpd-run kube-api-access-h2lnv logs public-tls-certs scripts], unattached volumes=[], failed to process volumes=[combined-ca-bundle config-data glance httpd-run kube-api-access-h2lnv logs public-tls-certs scripts]: context canceled" pod="openstack-kuttl-tests/glance-default-external-api-0" podUID="f2491246-d56e-4aa7-beeb-c5e45b0ec55a" Jan 20 17:43:42 crc kubenswrapper[4558]: I0120 17:43:42.095272 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-7f8885c56d-wsxms" Jan 20 17:43:42 crc kubenswrapper[4558]: I0120 17:43:42.126324 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:43:42 crc kubenswrapper[4558]: I0120 17:43:42.129609 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f2491246-d56e-4aa7-beeb-c5e45b0ec55a-scripts\") pod \"glance-default-external-api-0\" (UID: \"f2491246-d56e-4aa7-beeb-c5e45b0ec55a\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:42 crc kubenswrapper[4558]: I0120 17:43:42.129703 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2491246-d56e-4aa7-beeb-c5e45b0ec55a-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"f2491246-d56e-4aa7-beeb-c5e45b0ec55a\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:42 crc kubenswrapper[4558]: I0120 17:43:42.129763 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/33474adf-661b-42e9-bb7c-b629e9f0f7c8-config-data\") pod \"glance-default-internal-api-0\" (UID: \"33474adf-661b-42e9-bb7c-b629e9f0f7c8\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:42 crc kubenswrapper[4558]: I0120 17:43:42.129828 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/33474adf-661b-42e9-bb7c-b629e9f0f7c8-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"33474adf-661b-42e9-bb7c-b629e9f0f7c8\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:42 crc kubenswrapper[4558]: I0120 17:43:42.129886 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"f2491246-d56e-4aa7-beeb-c5e45b0ec55a\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:42 crc kubenswrapper[4558]: I0120 17:43:42.129929 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f2491246-d56e-4aa7-beeb-c5e45b0ec55a-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"f2491246-d56e-4aa7-beeb-c5e45b0ec55a\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:42 crc kubenswrapper[4558]: I0120 17:43:42.129965 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"33474adf-661b-42e9-bb7c-b629e9f0f7c8\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:42 crc kubenswrapper[4558]: I0120 17:43:42.129989 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f2491246-d56e-4aa7-beeb-c5e45b0ec55a-logs\") pod \"glance-default-external-api-0\" (UID: \"f2491246-d56e-4aa7-beeb-c5e45b0ec55a\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:42 crc kubenswrapper[4558]: I0120 17:43:42.130009 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h2lnv\" (UniqueName: \"kubernetes.io/projected/f2491246-d56e-4aa7-beeb-c5e45b0ec55a-kube-api-access-h2lnv\") pod \"glance-default-external-api-0\" (UID: \"f2491246-d56e-4aa7-beeb-c5e45b0ec55a\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:42 crc kubenswrapper[4558]: I0120 17:43:42.130048 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/33474adf-661b-42e9-bb7c-b629e9f0f7c8-logs\") pod \"glance-default-internal-api-0\" (UID: \"33474adf-661b-42e9-bb7c-b629e9f0f7c8\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:42 crc kubenswrapper[4558]: I0120 17:43:42.130069 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f2491246-d56e-4aa7-beeb-c5e45b0ec55a-config-data\") pod \"glance-default-external-api-0\" (UID: \"f2491246-d56e-4aa7-beeb-c5e45b0ec55a\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:42 crc kubenswrapper[4558]: I0120 17:43:42.130094 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-drlpd\" (UniqueName: \"kubernetes.io/projected/33474adf-661b-42e9-bb7c-b629e9f0f7c8-kube-api-access-drlpd\") pod \"glance-default-internal-api-0\" (UID: \"33474adf-661b-42e9-bb7c-b629e9f0f7c8\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:42 crc kubenswrapper[4558]: I0120 17:43:42.130153 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f2491246-d56e-4aa7-beeb-c5e45b0ec55a-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"f2491246-d56e-4aa7-beeb-c5e45b0ec55a\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:42 crc kubenswrapper[4558]: I0120 17:43:42.130689 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/33474adf-661b-42e9-bb7c-b629e9f0f7c8-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"33474adf-661b-42e9-bb7c-b629e9f0f7c8\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:42 crc kubenswrapper[4558]: I0120 17:43:42.131294 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"33474adf-661b-42e9-bb7c-b629e9f0f7c8\") device mount path \"/mnt/openstack/pv11\"" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:42 crc kubenswrapper[4558]: I0120 17:43:42.131356 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/33474adf-661b-42e9-bb7c-b629e9f0f7c8-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"33474adf-661b-42e9-bb7c-b629e9f0f7c8\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:42 crc kubenswrapper[4558]: E0120 17:43:42.131561 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[combined-ca-bundle config-data glance httpd-run internal-tls-certs kube-api-access-drlpd logs scripts], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack-kuttl-tests/glance-default-internal-api-0" podUID="33474adf-661b-42e9-bb7c-b629e9f0f7c8" Jan 20 17:43:42 crc kubenswrapper[4558]: I0120 17:43:42.131633 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/33474adf-661b-42e9-bb7c-b629e9f0f7c8-logs\") pod \"glance-default-internal-api-0\" (UID: \"33474adf-661b-42e9-bb7c-b629e9f0f7c8\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:42 crc kubenswrapper[4558]: I0120 17:43:42.131755 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/33474adf-661b-42e9-bb7c-b629e9f0f7c8-scripts\") pod \"glance-default-internal-api-0\" (UID: \"33474adf-661b-42e9-bb7c-b629e9f0f7c8\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:42 crc kubenswrapper[4558]: I0120 17:43:42.131789 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/33474adf-661b-42e9-bb7c-b629e9f0f7c8-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"33474adf-661b-42e9-bb7c-b629e9f0f7c8\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:42 crc kubenswrapper[4558]: I0120 17:43:42.142541 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/33474adf-661b-42e9-bb7c-b629e9f0f7c8-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"33474adf-661b-42e9-bb7c-b629e9f0f7c8\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:42 crc kubenswrapper[4558]: I0120 17:43:42.142661 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/33474adf-661b-42e9-bb7c-b629e9f0f7c8-config-data\") pod \"glance-default-internal-api-0\" (UID: \"33474adf-661b-42e9-bb7c-b629e9f0f7c8\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:42 crc kubenswrapper[4558]: I0120 17:43:42.146407 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/33474adf-661b-42e9-bb7c-b629e9f0f7c8-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"33474adf-661b-42e9-bb7c-b629e9f0f7c8\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:42 crc kubenswrapper[4558]: I0120 17:43:42.150648 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/33474adf-661b-42e9-bb7c-b629e9f0f7c8-scripts\") pod \"glance-default-internal-api-0\" (UID: \"33474adf-661b-42e9-bb7c-b629e9f0f7c8\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:42 crc kubenswrapper[4558]: I0120 17:43:42.156665 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-drlpd\" (UniqueName: \"kubernetes.io/projected/33474adf-661b-42e9-bb7c-b629e9f0f7c8-kube-api-access-drlpd\") pod \"glance-default-internal-api-0\" (UID: \"33474adf-661b-42e9-bb7c-b629e9f0f7c8\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:42 crc kubenswrapper[4558]: I0120 17:43:42.200903 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"33474adf-661b-42e9-bb7c-b629e9f0f7c8\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:42 crc kubenswrapper[4558]: I0120 17:43:42.235610 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f2491246-d56e-4aa7-beeb-c5e45b0ec55a-config-data\") pod \"glance-default-external-api-0\" (UID: \"f2491246-d56e-4aa7-beeb-c5e45b0ec55a\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:42 crc kubenswrapper[4558]: I0120 17:43:42.236427 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f2491246-d56e-4aa7-beeb-c5e45b0ec55a-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"f2491246-d56e-4aa7-beeb-c5e45b0ec55a\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:42 crc kubenswrapper[4558]: I0120 17:43:42.236682 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f2491246-d56e-4aa7-beeb-c5e45b0ec55a-scripts\") pod \"glance-default-external-api-0\" (UID: \"f2491246-d56e-4aa7-beeb-c5e45b0ec55a\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:42 crc kubenswrapper[4558]: I0120 17:43:42.236771 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2491246-d56e-4aa7-beeb-c5e45b0ec55a-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"f2491246-d56e-4aa7-beeb-c5e45b0ec55a\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:42 crc kubenswrapper[4558]: I0120 17:43:42.237022 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"f2491246-d56e-4aa7-beeb-c5e45b0ec55a\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:42 crc kubenswrapper[4558]: I0120 17:43:42.237079 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f2491246-d56e-4aa7-beeb-c5e45b0ec55a-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"f2491246-d56e-4aa7-beeb-c5e45b0ec55a\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:42 crc kubenswrapper[4558]: I0120 17:43:42.237122 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f2491246-d56e-4aa7-beeb-c5e45b0ec55a-logs\") pod \"glance-default-external-api-0\" (UID: \"f2491246-d56e-4aa7-beeb-c5e45b0ec55a\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:42 crc kubenswrapper[4558]: I0120 17:43:42.237142 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h2lnv\" (UniqueName: \"kubernetes.io/projected/f2491246-d56e-4aa7-beeb-c5e45b0ec55a-kube-api-access-h2lnv\") pod \"glance-default-external-api-0\" (UID: \"f2491246-d56e-4aa7-beeb-c5e45b0ec55a\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:42 crc kubenswrapper[4558]: I0120 17:43:42.238792 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f2491246-d56e-4aa7-beeb-c5e45b0ec55a-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"f2491246-d56e-4aa7-beeb-c5e45b0ec55a\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:42 crc kubenswrapper[4558]: I0120 17:43:42.239154 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f2491246-d56e-4aa7-beeb-c5e45b0ec55a-logs\") pod \"glance-default-external-api-0\" (UID: \"f2491246-d56e-4aa7-beeb-c5e45b0ec55a\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:42 crc kubenswrapper[4558]: I0120 17:43:42.239260 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"f2491246-d56e-4aa7-beeb-c5e45b0ec55a\") device mount path \"/mnt/openstack/pv09\"" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:42 crc kubenswrapper[4558]: I0120 17:43:42.243422 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f2491246-d56e-4aa7-beeb-c5e45b0ec55a-config-data\") pod \"glance-default-external-api-0\" (UID: \"f2491246-d56e-4aa7-beeb-c5e45b0ec55a\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:42 crc kubenswrapper[4558]: I0120 17:43:42.246661 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f2491246-d56e-4aa7-beeb-c5e45b0ec55a-scripts\") pod \"glance-default-external-api-0\" (UID: \"f2491246-d56e-4aa7-beeb-c5e45b0ec55a\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:42 crc kubenswrapper[4558]: I0120 17:43:42.247214 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2491246-d56e-4aa7-beeb-c5e45b0ec55a-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"f2491246-d56e-4aa7-beeb-c5e45b0ec55a\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:42 crc kubenswrapper[4558]: I0120 17:43:42.247650 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f2491246-d56e-4aa7-beeb-c5e45b0ec55a-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"f2491246-d56e-4aa7-beeb-c5e45b0ec55a\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:42 crc kubenswrapper[4558]: I0120 17:43:42.254724 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h2lnv\" (UniqueName: \"kubernetes.io/projected/f2491246-d56e-4aa7-beeb-c5e45b0ec55a-kube-api-access-h2lnv\") pod \"glance-default-external-api-0\" (UID: \"f2491246-d56e-4aa7-beeb-c5e45b0ec55a\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:42 crc kubenswrapper[4558]: I0120 17:43:42.291843 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"f2491246-d56e-4aa7-beeb-c5e45b0ec55a\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:42 crc kubenswrapper[4558]: I0120 17:43:42.403998 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:43:42 crc kubenswrapper[4558]: I0120 17:43:42.588626 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="34a4b6ee-5830-4532-b829-a91cedcf8069" path="/var/lib/kubelet/pods/34a4b6ee-5830-4532-b829-a91cedcf8069/volumes" Jan 20 17:43:42 crc kubenswrapper[4558]: I0120 17:43:42.589998 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5e0d404f-75d1-4be8-9c12-803d8db759e6" path="/var/lib/kubelet/pods/5e0d404f-75d1-4be8-9c12-803d8db759e6/volumes" Jan 20 17:43:42 crc kubenswrapper[4558]: I0120 17:43:42.590857 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a226d25c-395c-4744-a42e-46beee40d8e4" path="/var/lib/kubelet/pods/a226d25c-395c-4744-a42e-46beee40d8e4/volumes" Jan 20 17:43:42 crc kubenswrapper[4558]: I0120 17:43:42.592381 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-7f8885c56d-wsxms"] Jan 20 17:43:42 crc kubenswrapper[4558]: W0120 17:43:42.599933 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb911f497_5616_416f_8aa2_73c7cce172eb.slice/crio-343e48f725b8296c7153d9b50c1479be2bc4ff75469989667da521990a0d8174 WatchSource:0}: Error finding container 343e48f725b8296c7153d9b50c1479be2bc4ff75469989667da521990a0d8174: Status 404 returned error can't find the container with id 343e48f725b8296c7153d9b50c1479be2bc4ff75469989667da521990a0d8174 Jan 20 17:43:42 crc kubenswrapper[4558]: I0120 17:43:42.803375 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-7f8885c56d-wsxms" event={"ID":"b911f497-5616-416f-8aa2-73c7cce172eb","Type":"ContainerStarted","Data":"2d6106f888df62c14a5adbf7426959067a2a5587b57672909aebbf2d13dffda8"} Jan 20 17:43:42 crc kubenswrapper[4558]: I0120 17:43:42.803424 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-7f8885c56d-wsxms" event={"ID":"b911f497-5616-416f-8aa2-73c7cce172eb","Type":"ContainerStarted","Data":"343e48f725b8296c7153d9b50c1479be2bc4ff75469989667da521990a0d8174"} Jan 20 17:43:42 crc kubenswrapper[4558]: I0120 17:43:42.803639 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/keystone-7f8885c56d-wsxms" Jan 20 17:43:42 crc kubenswrapper[4558]: I0120 17:43:42.805303 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-cell1-galera-0" event={"ID":"5bca4373-5f64-4803-94e7-28deeb5caad3","Type":"ContainerStarted","Data":"0b3041e3ccb3e21db251ca0a2a8a3e1254c945a681f499b2db394f2eaa02be66"} Jan 20 17:43:42 crc kubenswrapper[4558]: I0120 17:43:42.805330 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-cell1-galera-0" event={"ID":"5bca4373-5f64-4803-94e7-28deeb5caad3","Type":"ContainerStarted","Data":"425b81dc5be769f40d2f4467240b30bd8fda9e28ac5df99280cec7c1c36eee5b"} Jan 20 17:43:42 crc kubenswrapper[4558]: I0120 17:43:42.818870 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:42 crc kubenswrapper[4558]: I0120 17:43:42.818923 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:42 crc kubenswrapper[4558]: I0120 17:43:42.828193 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:42 crc kubenswrapper[4558]: I0120 17:43:42.831894 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/keystone-7f8885c56d-wsxms" podStartSLOduration=1.831876657 podStartE2EDuration="1.831876657s" podCreationTimestamp="2026-01-20 17:43:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:43:42.827855366 +0000 UTC m=+3716.588193333" watchObservedRunningTime="2026-01-20 17:43:42.831876657 +0000 UTC m=+3716.592214623" Jan 20 17:43:42 crc kubenswrapper[4558]: I0120 17:43:42.840315 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack-kuttl-tests/barbican-api-7bc45756fb-278vp" podUID="432ec648-18a7-416a-85cc-409a30976a67" containerName="barbican-api-log" probeResult="failure" output="Get \"https://10.217.1.28:9311/healthcheck\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 20 17:43:42 crc kubenswrapper[4558]: I0120 17:43:42.841867 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:42 crc kubenswrapper[4558]: I0120 17:43:42.962426 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/33474adf-661b-42e9-bb7c-b629e9f0f7c8-internal-tls-certs\") pod \"33474adf-661b-42e9-bb7c-b629e9f0f7c8\" (UID: \"33474adf-661b-42e9-bb7c-b629e9f0f7c8\") " Jan 20 17:43:42 crc kubenswrapper[4558]: I0120 17:43:42.962514 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f2491246-d56e-4aa7-beeb-c5e45b0ec55a-logs\") pod \"f2491246-d56e-4aa7-beeb-c5e45b0ec55a\" (UID: \"f2491246-d56e-4aa7-beeb-c5e45b0ec55a\") " Jan 20 17:43:42 crc kubenswrapper[4558]: I0120 17:43:42.962547 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/33474adf-661b-42e9-bb7c-b629e9f0f7c8-logs\") pod \"33474adf-661b-42e9-bb7c-b629e9f0f7c8\" (UID: \"33474adf-661b-42e9-bb7c-b629e9f0f7c8\") " Jan 20 17:43:42 crc kubenswrapper[4558]: I0120 17:43:42.962569 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f2491246-d56e-4aa7-beeb-c5e45b0ec55a-scripts\") pod \"f2491246-d56e-4aa7-beeb-c5e45b0ec55a\" (UID: \"f2491246-d56e-4aa7-beeb-c5e45b0ec55a\") " Jan 20 17:43:42 crc kubenswrapper[4558]: I0120 17:43:42.962605 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f2491246-d56e-4aa7-beeb-c5e45b0ec55a-httpd-run\") pod \"f2491246-d56e-4aa7-beeb-c5e45b0ec55a\" (UID: \"f2491246-d56e-4aa7-beeb-c5e45b0ec55a\") " Jan 20 17:43:42 crc kubenswrapper[4558]: I0120 17:43:42.962650 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/33474adf-661b-42e9-bb7c-b629e9f0f7c8-combined-ca-bundle\") pod \"33474adf-661b-42e9-bb7c-b629e9f0f7c8\" (UID: \"33474adf-661b-42e9-bb7c-b629e9f0f7c8\") " Jan 20 17:43:42 crc kubenswrapper[4558]: I0120 17:43:42.962671 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/33474adf-661b-42e9-bb7c-b629e9f0f7c8-config-data\") pod \"33474adf-661b-42e9-bb7c-b629e9f0f7c8\" (UID: \"33474adf-661b-42e9-bb7c-b629e9f0f7c8\") " Jan 20 17:43:42 crc kubenswrapper[4558]: I0120 17:43:42.962738 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"33474adf-661b-42e9-bb7c-b629e9f0f7c8\" (UID: \"33474adf-661b-42e9-bb7c-b629e9f0f7c8\") " Jan 20 17:43:42 crc kubenswrapper[4558]: I0120 17:43:42.962767 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2491246-d56e-4aa7-beeb-c5e45b0ec55a-combined-ca-bundle\") pod \"f2491246-d56e-4aa7-beeb-c5e45b0ec55a\" (UID: \"f2491246-d56e-4aa7-beeb-c5e45b0ec55a\") " Jan 20 17:43:42 crc kubenswrapper[4558]: I0120 17:43:42.962802 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f2491246-d56e-4aa7-beeb-c5e45b0ec55a-public-tls-certs\") pod \"f2491246-d56e-4aa7-beeb-c5e45b0ec55a\" (UID: \"f2491246-d56e-4aa7-beeb-c5e45b0ec55a\") " Jan 20 17:43:42 crc kubenswrapper[4558]: I0120 17:43:42.962826 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"f2491246-d56e-4aa7-beeb-c5e45b0ec55a\" (UID: \"f2491246-d56e-4aa7-beeb-c5e45b0ec55a\") " Jan 20 17:43:42 crc kubenswrapper[4558]: I0120 17:43:42.962868 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f2491246-d56e-4aa7-beeb-c5e45b0ec55a-logs" (OuterVolumeSpecName: "logs") pod "f2491246-d56e-4aa7-beeb-c5e45b0ec55a" (UID: "f2491246-d56e-4aa7-beeb-c5e45b0ec55a"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:43:42 crc kubenswrapper[4558]: I0120 17:43:42.962920 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/33474adf-661b-42e9-bb7c-b629e9f0f7c8-logs" (OuterVolumeSpecName: "logs") pod "33474adf-661b-42e9-bb7c-b629e9f0f7c8" (UID: "33474adf-661b-42e9-bb7c-b629e9f0f7c8"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:43:42 crc kubenswrapper[4558]: I0120 17:43:42.962949 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/33474adf-661b-42e9-bb7c-b629e9f0f7c8-scripts\") pod \"33474adf-661b-42e9-bb7c-b629e9f0f7c8\" (UID: \"33474adf-661b-42e9-bb7c-b629e9f0f7c8\") " Jan 20 17:43:42 crc kubenswrapper[4558]: I0120 17:43:42.963083 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-drlpd\" (UniqueName: \"kubernetes.io/projected/33474adf-661b-42e9-bb7c-b629e9f0f7c8-kube-api-access-drlpd\") pod \"33474adf-661b-42e9-bb7c-b629e9f0f7c8\" (UID: \"33474adf-661b-42e9-bb7c-b629e9f0f7c8\") " Jan 20 17:43:42 crc kubenswrapper[4558]: I0120 17:43:42.963122 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/33474adf-661b-42e9-bb7c-b629e9f0f7c8-httpd-run\") pod \"33474adf-661b-42e9-bb7c-b629e9f0f7c8\" (UID: \"33474adf-661b-42e9-bb7c-b629e9f0f7c8\") " Jan 20 17:43:42 crc kubenswrapper[4558]: I0120 17:43:42.963149 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f2491246-d56e-4aa7-beeb-c5e45b0ec55a-config-data\") pod \"f2491246-d56e-4aa7-beeb-c5e45b0ec55a\" (UID: \"f2491246-d56e-4aa7-beeb-c5e45b0ec55a\") " Jan 20 17:43:42 crc kubenswrapper[4558]: I0120 17:43:42.963214 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h2lnv\" (UniqueName: \"kubernetes.io/projected/f2491246-d56e-4aa7-beeb-c5e45b0ec55a-kube-api-access-h2lnv\") pod \"f2491246-d56e-4aa7-beeb-c5e45b0ec55a\" (UID: \"f2491246-d56e-4aa7-beeb-c5e45b0ec55a\") " Jan 20 17:43:42 crc kubenswrapper[4558]: I0120 17:43:42.964511 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f2491246-d56e-4aa7-beeb-c5e45b0ec55a-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:42 crc kubenswrapper[4558]: I0120 17:43:42.964533 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/33474adf-661b-42e9-bb7c-b629e9f0f7c8-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:42 crc kubenswrapper[4558]: I0120 17:43:42.966783 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f2491246-d56e-4aa7-beeb-c5e45b0ec55a-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "f2491246-d56e-4aa7-beeb-c5e45b0ec55a" (UID: "f2491246-d56e-4aa7-beeb-c5e45b0ec55a"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:43:42 crc kubenswrapper[4558]: I0120 17:43:42.966854 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/33474adf-661b-42e9-bb7c-b629e9f0f7c8-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "33474adf-661b-42e9-bb7c-b629e9f0f7c8" (UID: "33474adf-661b-42e9-bb7c-b629e9f0f7c8"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:43:42 crc kubenswrapper[4558]: I0120 17:43:42.977265 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f2491246-d56e-4aa7-beeb-c5e45b0ec55a-scripts" (OuterVolumeSpecName: "scripts") pod "f2491246-d56e-4aa7-beeb-c5e45b0ec55a" (UID: "f2491246-d56e-4aa7-beeb-c5e45b0ec55a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:42 crc kubenswrapper[4558]: I0120 17:43:42.977547 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f2491246-d56e-4aa7-beeb-c5e45b0ec55a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f2491246-d56e-4aa7-beeb-c5e45b0ec55a" (UID: "f2491246-d56e-4aa7-beeb-c5e45b0ec55a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:42 crc kubenswrapper[4558]: I0120 17:43:42.977926 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/33474adf-661b-42e9-bb7c-b629e9f0f7c8-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "33474adf-661b-42e9-bb7c-b629e9f0f7c8" (UID: "33474adf-661b-42e9-bb7c-b629e9f0f7c8"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:42 crc kubenswrapper[4558]: I0120 17:43:42.977972 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/33474adf-661b-42e9-bb7c-b629e9f0f7c8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "33474adf-661b-42e9-bb7c-b629e9f0f7c8" (UID: "33474adf-661b-42e9-bb7c-b629e9f0f7c8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:42 crc kubenswrapper[4558]: I0120 17:43:42.979072 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage11-crc" (OuterVolumeSpecName: "glance") pod "33474adf-661b-42e9-bb7c-b629e9f0f7c8" (UID: "33474adf-661b-42e9-bb7c-b629e9f0f7c8"). InnerVolumeSpecName "local-storage11-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:43:42 crc kubenswrapper[4558]: I0120 17:43:42.979365 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/33474adf-661b-42e9-bb7c-b629e9f0f7c8-kube-api-access-drlpd" (OuterVolumeSpecName: "kube-api-access-drlpd") pod "33474adf-661b-42e9-bb7c-b629e9f0f7c8" (UID: "33474adf-661b-42e9-bb7c-b629e9f0f7c8"). InnerVolumeSpecName "kube-api-access-drlpd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:43:42 crc kubenswrapper[4558]: I0120 17:43:42.979425 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/33474adf-661b-42e9-bb7c-b629e9f0f7c8-config-data" (OuterVolumeSpecName: "config-data") pod "33474adf-661b-42e9-bb7c-b629e9f0f7c8" (UID: "33474adf-661b-42e9-bb7c-b629e9f0f7c8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:42 crc kubenswrapper[4558]: I0120 17:43:42.980314 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/33474adf-661b-42e9-bb7c-b629e9f0f7c8-scripts" (OuterVolumeSpecName: "scripts") pod "33474adf-661b-42e9-bb7c-b629e9f0f7c8" (UID: "33474adf-661b-42e9-bb7c-b629e9f0f7c8"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:42 crc kubenswrapper[4558]: I0120 17:43:42.983851 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f2491246-d56e-4aa7-beeb-c5e45b0ec55a-kube-api-access-h2lnv" (OuterVolumeSpecName: "kube-api-access-h2lnv") pod "f2491246-d56e-4aa7-beeb-c5e45b0ec55a" (UID: "f2491246-d56e-4aa7-beeb-c5e45b0ec55a"). InnerVolumeSpecName "kube-api-access-h2lnv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:43:42 crc kubenswrapper[4558]: I0120 17:43:42.984001 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage09-crc" (OuterVolumeSpecName: "glance") pod "f2491246-d56e-4aa7-beeb-c5e45b0ec55a" (UID: "f2491246-d56e-4aa7-beeb-c5e45b0ec55a"). InnerVolumeSpecName "local-storage09-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:43:42 crc kubenswrapper[4558]: I0120 17:43:42.984751 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f2491246-d56e-4aa7-beeb-c5e45b0ec55a-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "f2491246-d56e-4aa7-beeb-c5e45b0ec55a" (UID: "f2491246-d56e-4aa7-beeb-c5e45b0ec55a"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:42 crc kubenswrapper[4558]: I0120 17:43:42.994268 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f2491246-d56e-4aa7-beeb-c5e45b0ec55a-config-data" (OuterVolumeSpecName: "config-data") pod "f2491246-d56e-4aa7-beeb-c5e45b0ec55a" (UID: "f2491246-d56e-4aa7-beeb-c5e45b0ec55a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:43 crc kubenswrapper[4558]: I0120 17:43:43.066085 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/33474adf-661b-42e9-bb7c-b629e9f0f7c8-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:43 crc kubenswrapper[4558]: I0120 17:43:43.066240 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f2491246-d56e-4aa7-beeb-c5e45b0ec55a-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:43 crc kubenswrapper[4558]: I0120 17:43:43.066322 4558 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f2491246-d56e-4aa7-beeb-c5e45b0ec55a-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:43 crc kubenswrapper[4558]: I0120 17:43:43.066378 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/33474adf-661b-42e9-bb7c-b629e9f0f7c8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:43 crc kubenswrapper[4558]: I0120 17:43:43.066434 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/33474adf-661b-42e9-bb7c-b629e9f0f7c8-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:43 crc kubenswrapper[4558]: I0120 17:43:43.066512 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" " Jan 20 17:43:43 crc kubenswrapper[4558]: I0120 17:43:43.066565 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2491246-d56e-4aa7-beeb-c5e45b0ec55a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:43 crc kubenswrapper[4558]: I0120 17:43:43.066614 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f2491246-d56e-4aa7-beeb-c5e45b0ec55a-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:43 crc kubenswrapper[4558]: I0120 17:43:43.066679 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" " Jan 20 17:43:43 crc kubenswrapper[4558]: I0120 17:43:43.066729 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/33474adf-661b-42e9-bb7c-b629e9f0f7c8-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:43 crc kubenswrapper[4558]: I0120 17:43:43.066790 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-drlpd\" (UniqueName: \"kubernetes.io/projected/33474adf-661b-42e9-bb7c-b629e9f0f7c8-kube-api-access-drlpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:43 crc kubenswrapper[4558]: I0120 17:43:43.066849 4558 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/33474adf-661b-42e9-bb7c-b629e9f0f7c8-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:43 crc kubenswrapper[4558]: I0120 17:43:43.066900 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f2491246-d56e-4aa7-beeb-c5e45b0ec55a-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:43 crc kubenswrapper[4558]: I0120 17:43:43.066958 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h2lnv\" (UniqueName: \"kubernetes.io/projected/f2491246-d56e-4aa7-beeb-c5e45b0ec55a-kube-api-access-h2lnv\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:43 crc kubenswrapper[4558]: I0120 17:43:43.091075 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage11-crc" (UniqueName: "kubernetes.io/local-volume/local-storage11-crc") on node "crc" Jan 20 17:43:43 crc kubenswrapper[4558]: I0120 17:43:43.091276 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage09-crc" (UniqueName: "kubernetes.io/local-volume/local-storage09-crc") on node "crc" Jan 20 17:43:43 crc kubenswrapper[4558]: I0120 17:43:43.169142 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:43 crc kubenswrapper[4558]: I0120 17:43:43.169196 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:43 crc kubenswrapper[4558]: I0120 17:43:43.171973 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/memcached-0"] Jan 20 17:43:43 crc kubenswrapper[4558]: I0120 17:43:43.172201 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/memcached-0" podUID="b544295f-900d-4bff-adb5-fe0aaf262026" containerName="memcached" containerID="cri-o://2c5efb7f4785f24a40ae2c302910d84d294fabe818863ae49001cc8161f95672" gracePeriod=30 Jan 20 17:43:43 crc kubenswrapper[4558]: I0120 17:43:43.480022 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/neutron-5676986846-92lbc" podUID="8877ab44-638b-4880-97fb-305726a5c1a6" containerName="neutron-httpd" probeResult="failure" output="Get \"https://10.217.1.20:9696/\": dial tcp 10.217.1.20:9696: connect: connection refused" Jan 20 17:43:43 crc kubenswrapper[4558]: I0120 17:43:43.827030 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:43 crc kubenswrapper[4558]: I0120 17:43:43.827195 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:43 crc kubenswrapper[4558]: I0120 17:43:43.876540 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:43:43 crc kubenswrapper[4558]: I0120 17:43:43.889598 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:43:43 crc kubenswrapper[4558]: I0120 17:43:43.929023 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:43:43 crc kubenswrapper[4558]: I0120 17:43:43.932870 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:43 crc kubenswrapper[4558]: I0120 17:43:43.938387 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-default-internal-config-data" Jan 20 17:43:43 crc kubenswrapper[4558]: I0120 17:43:43.938651 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-scripts" Jan 20 17:43:43 crc kubenswrapper[4558]: I0120 17:43:43.940939 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-glance-default-internal-svc" Jan 20 17:43:43 crc kubenswrapper[4558]: I0120 17:43:43.941144 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-glance-dockercfg-gnzn6" Jan 20 17:43:43 crc kubenswrapper[4558]: I0120 17:43:43.946618 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:43:43 crc kubenswrapper[4558]: I0120 17:43:43.967467 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:43:43 crc kubenswrapper[4558]: I0120 17:43:43.974047 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:43:43 crc kubenswrapper[4558]: I0120 17:43:43.979223 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:43:43 crc kubenswrapper[4558]: I0120 17:43:43.981105 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:43 crc kubenswrapper[4558]: I0120 17:43:43.983371 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-default-external-config-data" Jan 20 17:43:43 crc kubenswrapper[4558]: I0120 17:43:43.983608 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-glance-default-public-svc" Jan 20 17:43:43 crc kubenswrapper[4558]: I0120 17:43:43.985307 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:43:43 crc kubenswrapper[4558]: I0120 17:43:43.999552 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f87490e8-557d-41e3-a07b-8fe12147b315-config-data\") pod \"glance-default-internal-api-0\" (UID: \"f87490e8-557d-41e3-a07b-8fe12147b315\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:43 crc kubenswrapper[4558]: I0120 17:43:43.999617 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f87490e8-557d-41e3-a07b-8fe12147b315-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"f87490e8-557d-41e3-a07b-8fe12147b315\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:43 crc kubenswrapper[4558]: I0120 17:43:43.999759 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f87490e8-557d-41e3-a07b-8fe12147b315-scripts\") pod \"glance-default-internal-api-0\" (UID: \"f87490e8-557d-41e3-a07b-8fe12147b315\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:44 crc kubenswrapper[4558]: I0120 17:43:44.000003 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f87490e8-557d-41e3-a07b-8fe12147b315-logs\") pod \"glance-default-internal-api-0\" (UID: \"f87490e8-557d-41e3-a07b-8fe12147b315\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:44 crc kubenswrapper[4558]: I0120 17:43:44.000075 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f87490e8-557d-41e3-a07b-8fe12147b315-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"f87490e8-557d-41e3-a07b-8fe12147b315\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:44 crc kubenswrapper[4558]: I0120 17:43:44.000113 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6q8rh\" (UniqueName: \"kubernetes.io/projected/f87490e8-557d-41e3-a07b-8fe12147b315-kube-api-access-6q8rh\") pod \"glance-default-internal-api-0\" (UID: \"f87490e8-557d-41e3-a07b-8fe12147b315\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:44 crc kubenswrapper[4558]: I0120 17:43:44.000217 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"f87490e8-557d-41e3-a07b-8fe12147b315\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:44 crc kubenswrapper[4558]: I0120 17:43:44.000251 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f87490e8-557d-41e3-a07b-8fe12147b315-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"f87490e8-557d-41e3-a07b-8fe12147b315\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:44 crc kubenswrapper[4558]: I0120 17:43:44.102489 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k55s4\" (UniqueName: \"kubernetes.io/projected/56173817-8246-4f37-b157-3890912004ca-kube-api-access-k55s4\") pod \"glance-default-external-api-0\" (UID: \"56173817-8246-4f37-b157-3890912004ca\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:44 crc kubenswrapper[4558]: I0120 17:43:44.102533 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"f87490e8-557d-41e3-a07b-8fe12147b315\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:44 crc kubenswrapper[4558]: I0120 17:43:44.102562 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f87490e8-557d-41e3-a07b-8fe12147b315-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"f87490e8-557d-41e3-a07b-8fe12147b315\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:44 crc kubenswrapper[4558]: I0120 17:43:44.102594 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/56173817-8246-4f37-b157-3890912004ca-scripts\") pod \"glance-default-external-api-0\" (UID: \"56173817-8246-4f37-b157-3890912004ca\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:44 crc kubenswrapper[4558]: I0120 17:43:44.102621 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f87490e8-557d-41e3-a07b-8fe12147b315-config-data\") pod \"glance-default-internal-api-0\" (UID: \"f87490e8-557d-41e3-a07b-8fe12147b315\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:44 crc kubenswrapper[4558]: I0120 17:43:44.102659 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/56173817-8246-4f37-b157-3890912004ca-logs\") pod \"glance-default-external-api-0\" (UID: \"56173817-8246-4f37-b157-3890912004ca\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:44 crc kubenswrapper[4558]: I0120 17:43:44.102685 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f87490e8-557d-41e3-a07b-8fe12147b315-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"f87490e8-557d-41e3-a07b-8fe12147b315\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:44 crc kubenswrapper[4558]: I0120 17:43:44.102714 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f87490e8-557d-41e3-a07b-8fe12147b315-scripts\") pod \"glance-default-internal-api-0\" (UID: \"f87490e8-557d-41e3-a07b-8fe12147b315\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:44 crc kubenswrapper[4558]: I0120 17:43:44.102746 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/56173817-8246-4f37-b157-3890912004ca-config-data\") pod \"glance-default-external-api-0\" (UID: \"56173817-8246-4f37-b157-3890912004ca\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:44 crc kubenswrapper[4558]: I0120 17:43:44.102765 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/56173817-8246-4f37-b157-3890912004ca-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"56173817-8246-4f37-b157-3890912004ca\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:44 crc kubenswrapper[4558]: I0120 17:43:44.102810 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/56173817-8246-4f37-b157-3890912004ca-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"56173817-8246-4f37-b157-3890912004ca\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:44 crc kubenswrapper[4558]: I0120 17:43:44.102827 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f87490e8-557d-41e3-a07b-8fe12147b315-logs\") pod \"glance-default-internal-api-0\" (UID: \"f87490e8-557d-41e3-a07b-8fe12147b315\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:44 crc kubenswrapper[4558]: I0120 17:43:44.102857 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f87490e8-557d-41e3-a07b-8fe12147b315-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"f87490e8-557d-41e3-a07b-8fe12147b315\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:44 crc kubenswrapper[4558]: I0120 17:43:44.102879 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6q8rh\" (UniqueName: \"kubernetes.io/projected/f87490e8-557d-41e3-a07b-8fe12147b315-kube-api-access-6q8rh\") pod \"glance-default-internal-api-0\" (UID: \"f87490e8-557d-41e3-a07b-8fe12147b315\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:44 crc kubenswrapper[4558]: I0120 17:43:44.102899 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"56173817-8246-4f37-b157-3890912004ca\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:44 crc kubenswrapper[4558]: I0120 17:43:44.102920 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/56173817-8246-4f37-b157-3890912004ca-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"56173817-8246-4f37-b157-3890912004ca\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:44 crc kubenswrapper[4558]: I0120 17:43:44.103542 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"f87490e8-557d-41e3-a07b-8fe12147b315\") device mount path \"/mnt/openstack/pv11\"" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:44 crc kubenswrapper[4558]: I0120 17:43:44.107325 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f87490e8-557d-41e3-a07b-8fe12147b315-logs\") pod \"glance-default-internal-api-0\" (UID: \"f87490e8-557d-41e3-a07b-8fe12147b315\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:44 crc kubenswrapper[4558]: I0120 17:43:44.107401 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f87490e8-557d-41e3-a07b-8fe12147b315-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"f87490e8-557d-41e3-a07b-8fe12147b315\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:44 crc kubenswrapper[4558]: I0120 17:43:44.108437 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f87490e8-557d-41e3-a07b-8fe12147b315-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"f87490e8-557d-41e3-a07b-8fe12147b315\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:44 crc kubenswrapper[4558]: I0120 17:43:44.109623 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f87490e8-557d-41e3-a07b-8fe12147b315-config-data\") pod \"glance-default-internal-api-0\" (UID: \"f87490e8-557d-41e3-a07b-8fe12147b315\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:44 crc kubenswrapper[4558]: I0120 17:43:44.109819 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f87490e8-557d-41e3-a07b-8fe12147b315-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"f87490e8-557d-41e3-a07b-8fe12147b315\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:44 crc kubenswrapper[4558]: I0120 17:43:44.117681 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f87490e8-557d-41e3-a07b-8fe12147b315-scripts\") pod \"glance-default-internal-api-0\" (UID: \"f87490e8-557d-41e3-a07b-8fe12147b315\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:44 crc kubenswrapper[4558]: I0120 17:43:44.119790 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6q8rh\" (UniqueName: \"kubernetes.io/projected/f87490e8-557d-41e3-a07b-8fe12147b315-kube-api-access-6q8rh\") pod \"glance-default-internal-api-0\" (UID: \"f87490e8-557d-41e3-a07b-8fe12147b315\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:44 crc kubenswrapper[4558]: I0120 17:43:44.127680 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"f87490e8-557d-41e3-a07b-8fe12147b315\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:44 crc kubenswrapper[4558]: I0120 17:43:44.160728 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/keystone-685599669b-nv6cc" Jan 20 17:43:44 crc kubenswrapper[4558]: I0120 17:43:44.205763 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/56173817-8246-4f37-b157-3890912004ca-logs\") pod \"glance-default-external-api-0\" (UID: \"56173817-8246-4f37-b157-3890912004ca\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:44 crc kubenswrapper[4558]: I0120 17:43:44.205937 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/56173817-8246-4f37-b157-3890912004ca-config-data\") pod \"glance-default-external-api-0\" (UID: \"56173817-8246-4f37-b157-3890912004ca\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:44 crc kubenswrapper[4558]: I0120 17:43:44.205964 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/56173817-8246-4f37-b157-3890912004ca-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"56173817-8246-4f37-b157-3890912004ca\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:44 crc kubenswrapper[4558]: I0120 17:43:44.206070 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/56173817-8246-4f37-b157-3890912004ca-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"56173817-8246-4f37-b157-3890912004ca\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:44 crc kubenswrapper[4558]: I0120 17:43:44.206174 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"56173817-8246-4f37-b157-3890912004ca\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:44 crc kubenswrapper[4558]: I0120 17:43:44.206203 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/56173817-8246-4f37-b157-3890912004ca-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"56173817-8246-4f37-b157-3890912004ca\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:44 crc kubenswrapper[4558]: I0120 17:43:44.206215 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/56173817-8246-4f37-b157-3890912004ca-logs\") pod \"glance-default-external-api-0\" (UID: \"56173817-8246-4f37-b157-3890912004ca\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:44 crc kubenswrapper[4558]: I0120 17:43:44.206293 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k55s4\" (UniqueName: \"kubernetes.io/projected/56173817-8246-4f37-b157-3890912004ca-kube-api-access-k55s4\") pod \"glance-default-external-api-0\" (UID: \"56173817-8246-4f37-b157-3890912004ca\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:44 crc kubenswrapper[4558]: I0120 17:43:44.206387 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/56173817-8246-4f37-b157-3890912004ca-scripts\") pod \"glance-default-external-api-0\" (UID: \"56173817-8246-4f37-b157-3890912004ca\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:44 crc kubenswrapper[4558]: I0120 17:43:44.206445 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/56173817-8246-4f37-b157-3890912004ca-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"56173817-8246-4f37-b157-3890912004ca\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:44 crc kubenswrapper[4558]: I0120 17:43:44.206748 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"56173817-8246-4f37-b157-3890912004ca\") device mount path \"/mnt/openstack/pv09\"" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:44 crc kubenswrapper[4558]: I0120 17:43:44.210622 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/56173817-8246-4f37-b157-3890912004ca-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"56173817-8246-4f37-b157-3890912004ca\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:44 crc kubenswrapper[4558]: I0120 17:43:44.211856 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/56173817-8246-4f37-b157-3890912004ca-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"56173817-8246-4f37-b157-3890912004ca\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:44 crc kubenswrapper[4558]: I0120 17:43:44.212392 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/56173817-8246-4f37-b157-3890912004ca-scripts\") pod \"glance-default-external-api-0\" (UID: \"56173817-8246-4f37-b157-3890912004ca\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:44 crc kubenswrapper[4558]: I0120 17:43:44.213090 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/56173817-8246-4f37-b157-3890912004ca-config-data\") pod \"glance-default-external-api-0\" (UID: \"56173817-8246-4f37-b157-3890912004ca\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:44 crc kubenswrapper[4558]: I0120 17:43:44.221268 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k55s4\" (UniqueName: \"kubernetes.io/projected/56173817-8246-4f37-b157-3890912004ca-kube-api-access-k55s4\") pod \"glance-default-external-api-0\" (UID: \"56173817-8246-4f37-b157-3890912004ca\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:44 crc kubenswrapper[4558]: I0120 17:43:44.228598 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"56173817-8246-4f37-b157-3890912004ca\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:44 crc kubenswrapper[4558]: I0120 17:43:44.249480 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:44 crc kubenswrapper[4558]: I0120 17:43:44.305846 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:44 crc kubenswrapper[4558]: I0120 17:43:44.347354 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/cinder-scheduler-0" podUID="7faabe16-9de5-49dc-bab6-44e173f4403c" containerName="cinder-scheduler" probeResult="failure" output="Get \"http://10.217.1.38:8080/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 20 17:43:44 crc kubenswrapper[4558]: I0120 17:43:44.580351 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="33474adf-661b-42e9-bb7c-b629e9f0f7c8" path="/var/lib/kubelet/pods/33474adf-661b-42e9-bb7c-b629e9f0f7c8/volumes" Jan 20 17:43:44 crc kubenswrapper[4558]: I0120 17:43:44.580801 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f2491246-d56e-4aa7-beeb-c5e45b0ec55a" path="/var/lib/kubelet/pods/f2491246-d56e-4aa7-beeb-c5e45b0ec55a/volumes" Jan 20 17:43:44 crc kubenswrapper[4558]: I0120 17:43:44.622821 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:43:44 crc kubenswrapper[4558]: I0120 17:43:44.631372 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="48a3e4b6-ae17-449f-81f8-e521d7f16b7f" containerName="ceilometer-central-agent" containerID="cri-o://2fe2d6033ceb5c948ed32e5c9455381d57dd67590fa46b232c29a099df3a1f5e" gracePeriod=30 Jan 20 17:43:44 crc kubenswrapper[4558]: I0120 17:43:44.632376 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="48a3e4b6-ae17-449f-81f8-e521d7f16b7f" containerName="sg-core" containerID="cri-o://6291ffd1b39b779da16b44b8132159644811a98d6ee34190d04a465d8cafa3e8" gracePeriod=30 Jan 20 17:43:44 crc kubenswrapper[4558]: I0120 17:43:44.632733 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="48a3e4b6-ae17-449f-81f8-e521d7f16b7f" containerName="ceilometer-notification-agent" containerID="cri-o://4c0d28cc228b6021507306e977f98df789f246ebd560ca7ac4eed76b974bca68" gracePeriod=30 Jan 20 17:43:44 crc kubenswrapper[4558]: I0120 17:43:44.633116 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="48a3e4b6-ae17-449f-81f8-e521d7f16b7f" containerName="proxy-httpd" containerID="cri-o://1f580fae1864e3169dd64566d04d693dc377a53c846267d9e12639cffaa324e9" gracePeriod=30 Jan 20 17:43:44 crc kubenswrapper[4558]: I0120 17:43:44.699732 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:43:44 crc kubenswrapper[4558]: W0120 17:43:44.705728 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf87490e8_557d_41e3_a07b_8fe12147b315.slice/crio-9a68e1fbd923ab0acd23acc52effcf2d9054b760cec4c8b2e69e6dc2ff7323d5 WatchSource:0}: Error finding container 9a68e1fbd923ab0acd23acc52effcf2d9054b760cec4c8b2e69e6dc2ff7323d5: Status 404 returned error can't find the container with id 9a68e1fbd923ab0acd23acc52effcf2d9054b760cec4c8b2e69e6dc2ff7323d5 Jan 20 17:43:44 crc kubenswrapper[4558]: I0120 17:43:44.780896 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/memcached-0" Jan 20 17:43:44 crc kubenswrapper[4558]: W0120 17:43:44.786933 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod56173817_8246_4f37_b157_3890912004ca.slice/crio-e902ae63d5f168b0370ac50e98e7f85ed43d5a6e8c5a145738789c24c17611f1 WatchSource:0}: Error finding container e902ae63d5f168b0370ac50e98e7f85ed43d5a6e8c5a145738789c24c17611f1: Status 404 returned error can't find the container with id e902ae63d5f168b0370ac50e98e7f85ed43d5a6e8c5a145738789c24c17611f1 Jan 20 17:43:44 crc kubenswrapper[4558]: I0120 17:43:44.795367 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:43:44 crc kubenswrapper[4558]: I0120 17:43:44.837560 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b544295f-900d-4bff-adb5-fe0aaf262026-config-data\") pod \"b544295f-900d-4bff-adb5-fe0aaf262026\" (UID: \"b544295f-900d-4bff-adb5-fe0aaf262026\") " Jan 20 17:43:44 crc kubenswrapper[4558]: I0120 17:43:44.837760 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z8cj9\" (UniqueName: \"kubernetes.io/projected/b544295f-900d-4bff-adb5-fe0aaf262026-kube-api-access-z8cj9\") pod \"b544295f-900d-4bff-adb5-fe0aaf262026\" (UID: \"b544295f-900d-4bff-adb5-fe0aaf262026\") " Jan 20 17:43:44 crc kubenswrapper[4558]: I0120 17:43:44.837818 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/b544295f-900d-4bff-adb5-fe0aaf262026-kolla-config\") pod \"b544295f-900d-4bff-adb5-fe0aaf262026\" (UID: \"b544295f-900d-4bff-adb5-fe0aaf262026\") " Jan 20 17:43:44 crc kubenswrapper[4558]: I0120 17:43:44.837857 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b544295f-900d-4bff-adb5-fe0aaf262026-combined-ca-bundle\") pod \"b544295f-900d-4bff-adb5-fe0aaf262026\" (UID: \"b544295f-900d-4bff-adb5-fe0aaf262026\") " Jan 20 17:43:44 crc kubenswrapper[4558]: I0120 17:43:44.837927 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/b544295f-900d-4bff-adb5-fe0aaf262026-memcached-tls-certs\") pod \"b544295f-900d-4bff-adb5-fe0aaf262026\" (UID: \"b544295f-900d-4bff-adb5-fe0aaf262026\") " Jan 20 17:43:44 crc kubenswrapper[4558]: I0120 17:43:44.841930 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b544295f-900d-4bff-adb5-fe0aaf262026-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "b544295f-900d-4bff-adb5-fe0aaf262026" (UID: "b544295f-900d-4bff-adb5-fe0aaf262026"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:43:44 crc kubenswrapper[4558]: I0120 17:43:44.841966 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b544295f-900d-4bff-adb5-fe0aaf262026-config-data" (OuterVolumeSpecName: "config-data") pod "b544295f-900d-4bff-adb5-fe0aaf262026" (UID: "b544295f-900d-4bff-adb5-fe0aaf262026"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:43:44 crc kubenswrapper[4558]: I0120 17:43:44.854805 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b544295f-900d-4bff-adb5-fe0aaf262026-kube-api-access-z8cj9" (OuterVolumeSpecName: "kube-api-access-z8cj9") pod "b544295f-900d-4bff-adb5-fe0aaf262026" (UID: "b544295f-900d-4bff-adb5-fe0aaf262026"). InnerVolumeSpecName "kube-api-access-z8cj9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:43:44 crc kubenswrapper[4558]: I0120 17:43:44.904475 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b544295f-900d-4bff-adb5-fe0aaf262026-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b544295f-900d-4bff-adb5-fe0aaf262026" (UID: "b544295f-900d-4bff-adb5-fe0aaf262026"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:44 crc kubenswrapper[4558]: I0120 17:43:44.925453 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b544295f-900d-4bff-adb5-fe0aaf262026-memcached-tls-certs" (OuterVolumeSpecName: "memcached-tls-certs") pod "b544295f-900d-4bff-adb5-fe0aaf262026" (UID: "b544295f-900d-4bff-adb5-fe0aaf262026"). InnerVolumeSpecName "memcached-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:44 crc kubenswrapper[4558]: I0120 17:43:44.927791 4558 generic.go:334] "Generic (PLEG): container finished" podID="48a3e4b6-ae17-449f-81f8-e521d7f16b7f" containerID="1f580fae1864e3169dd64566d04d693dc377a53c846267d9e12639cffaa324e9" exitCode=0 Jan 20 17:43:44 crc kubenswrapper[4558]: I0120 17:43:44.927816 4558 generic.go:334] "Generic (PLEG): container finished" podID="48a3e4b6-ae17-449f-81f8-e521d7f16b7f" containerID="6291ffd1b39b779da16b44b8132159644811a98d6ee34190d04a465d8cafa3e8" exitCode=2 Jan 20 17:43:44 crc kubenswrapper[4558]: I0120 17:43:44.927862 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"48a3e4b6-ae17-449f-81f8-e521d7f16b7f","Type":"ContainerDied","Data":"1f580fae1864e3169dd64566d04d693dc377a53c846267d9e12639cffaa324e9"} Jan 20 17:43:44 crc kubenswrapper[4558]: I0120 17:43:44.927893 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"48a3e4b6-ae17-449f-81f8-e521d7f16b7f","Type":"ContainerDied","Data":"6291ffd1b39b779da16b44b8132159644811a98d6ee34190d04a465d8cafa3e8"} Jan 20 17:43:44 crc kubenswrapper[4558]: I0120 17:43:44.934685 4558 generic.go:334] "Generic (PLEG): container finished" podID="b544295f-900d-4bff-adb5-fe0aaf262026" containerID="2c5efb7f4785f24a40ae2c302910d84d294fabe818863ae49001cc8161f95672" exitCode=0 Jan 20 17:43:44 crc kubenswrapper[4558]: I0120 17:43:44.934754 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/memcached-0" event={"ID":"b544295f-900d-4bff-adb5-fe0aaf262026","Type":"ContainerDied","Data":"2c5efb7f4785f24a40ae2c302910d84d294fabe818863ae49001cc8161f95672"} Jan 20 17:43:44 crc kubenswrapper[4558]: I0120 17:43:44.934789 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/memcached-0" event={"ID":"b544295f-900d-4bff-adb5-fe0aaf262026","Type":"ContainerDied","Data":"14757c210cdd3d7beaa4bfde2e424ee26319af9cd6fbb62fc3225ff25b1cf0d6"} Jan 20 17:43:44 crc kubenswrapper[4558]: I0120 17:43:44.934806 4558 scope.go:117] "RemoveContainer" containerID="2c5efb7f4785f24a40ae2c302910d84d294fabe818863ae49001cc8161f95672" Jan 20 17:43:44 crc kubenswrapper[4558]: I0120 17:43:44.934940 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/memcached-0" Jan 20 17:43:44 crc kubenswrapper[4558]: I0120 17:43:44.944084 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z8cj9\" (UniqueName: \"kubernetes.io/projected/b544295f-900d-4bff-adb5-fe0aaf262026-kube-api-access-z8cj9\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:44 crc kubenswrapper[4558]: I0120 17:43:44.944110 4558 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/b544295f-900d-4bff-adb5-fe0aaf262026-kolla-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:44 crc kubenswrapper[4558]: I0120 17:43:44.944123 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b544295f-900d-4bff-adb5-fe0aaf262026-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:44 crc kubenswrapper[4558]: I0120 17:43:44.944134 4558 reconciler_common.go:293] "Volume detached for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/b544295f-900d-4bff-adb5-fe0aaf262026-memcached-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:44 crc kubenswrapper[4558]: I0120 17:43:44.944146 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b544295f-900d-4bff-adb5-fe0aaf262026-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:44 crc kubenswrapper[4558]: I0120 17:43:44.945381 4558 generic.go:334] "Generic (PLEG): container finished" podID="5bca4373-5f64-4803-94e7-28deeb5caad3" containerID="0b3041e3ccb3e21db251ca0a2a8a3e1254c945a681f499b2db394f2eaa02be66" exitCode=0 Jan 20 17:43:44 crc kubenswrapper[4558]: I0120 17:43:44.945412 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-cell1-galera-0" event={"ID":"5bca4373-5f64-4803-94e7-28deeb5caad3","Type":"ContainerDied","Data":"0b3041e3ccb3e21db251ca0a2a8a3e1254c945a681f499b2db394f2eaa02be66"} Jan 20 17:43:44 crc kubenswrapper[4558]: I0120 17:43:44.950973 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"f87490e8-557d-41e3-a07b-8fe12147b315","Type":"ContainerStarted","Data":"9a68e1fbd923ab0acd23acc52effcf2d9054b760cec4c8b2e69e6dc2ff7323d5"} Jan 20 17:43:44 crc kubenswrapper[4558]: I0120 17:43:44.953781 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"56173817-8246-4f37-b157-3890912004ca","Type":"ContainerStarted","Data":"e902ae63d5f168b0370ac50e98e7f85ed43d5a6e8c5a145738789c24c17611f1"} Jan 20 17:43:44 crc kubenswrapper[4558]: I0120 17:43:44.962470 4558 scope.go:117] "RemoveContainer" containerID="2c5efb7f4785f24a40ae2c302910d84d294fabe818863ae49001cc8161f95672" Jan 20 17:43:44 crc kubenswrapper[4558]: E0120 17:43:44.963079 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2c5efb7f4785f24a40ae2c302910d84d294fabe818863ae49001cc8161f95672\": container with ID starting with 2c5efb7f4785f24a40ae2c302910d84d294fabe818863ae49001cc8161f95672 not found: ID does not exist" containerID="2c5efb7f4785f24a40ae2c302910d84d294fabe818863ae49001cc8161f95672" Jan 20 17:43:44 crc kubenswrapper[4558]: I0120 17:43:44.963127 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2c5efb7f4785f24a40ae2c302910d84d294fabe818863ae49001cc8161f95672"} err="failed to get container status \"2c5efb7f4785f24a40ae2c302910d84d294fabe818863ae49001cc8161f95672\": rpc error: code = NotFound desc = could not find container \"2c5efb7f4785f24a40ae2c302910d84d294fabe818863ae49001cc8161f95672\": container with ID starting with 2c5efb7f4785f24a40ae2c302910d84d294fabe818863ae49001cc8161f95672 not found: ID does not exist" Jan 20 17:43:44 crc kubenswrapper[4558]: I0120 17:43:44.991700 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/barbican-api-6575ff8c88-kqv4m" podUID="f6d8d5b2-af92-43f7-ad6f-74ac3121776c" containerName="barbican-api-log" probeResult="failure" output="Get \"https://10.217.0.225:9311/healthcheck\": read tcp 10.217.0.2:50654->10.217.0.225:9311: read: connection reset by peer" Jan 20 17:43:44 crc kubenswrapper[4558]: I0120 17:43:44.992011 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/barbican-api-6575ff8c88-kqv4m" podUID="f6d8d5b2-af92-43f7-ad6f-74ac3121776c" containerName="barbican-api" probeResult="failure" output="Get \"https://10.217.0.225:9311/healthcheck\": read tcp 10.217.0.2:50656->10.217.0.225:9311: read: connection reset by peer" Jan 20 17:43:45 crc kubenswrapper[4558]: I0120 17:43:45.005009 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/memcached-0"] Jan 20 17:43:45 crc kubenswrapper[4558]: I0120 17:43:45.024667 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/memcached-0"] Jan 20 17:43:45 crc kubenswrapper[4558]: I0120 17:43:45.035306 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/memcached-0"] Jan 20 17:43:45 crc kubenswrapper[4558]: E0120 17:43:45.035860 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b544295f-900d-4bff-adb5-fe0aaf262026" containerName="memcached" Jan 20 17:43:45 crc kubenswrapper[4558]: I0120 17:43:45.035880 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="b544295f-900d-4bff-adb5-fe0aaf262026" containerName="memcached" Jan 20 17:43:45 crc kubenswrapper[4558]: I0120 17:43:45.036093 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="b544295f-900d-4bff-adb5-fe0aaf262026" containerName="memcached" Jan 20 17:43:45 crc kubenswrapper[4558]: I0120 17:43:45.036877 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/memcached-0" Jan 20 17:43:45 crc kubenswrapper[4558]: I0120 17:43:45.040274 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-memcached-svc" Jan 20 17:43:45 crc kubenswrapper[4558]: I0120 17:43:45.041412 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"memcached-config-data" Jan 20 17:43:45 crc kubenswrapper[4558]: I0120 17:43:45.041417 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"memcached-memcached-dockercfg-sqmsc" Jan 20 17:43:45 crc kubenswrapper[4558]: I0120 17:43:45.042441 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/memcached-0"] Jan 20 17:43:45 crc kubenswrapper[4558]: I0120 17:43:45.148325 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p65cv\" (UniqueName: \"kubernetes.io/projected/131735a5-8043-40d7-a15d-f0024356e584-kube-api-access-p65cv\") pod \"memcached-0\" (UID: \"131735a5-8043-40d7-a15d-f0024356e584\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:43:45 crc kubenswrapper[4558]: I0120 17:43:45.148566 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/131735a5-8043-40d7-a15d-f0024356e584-kolla-config\") pod \"memcached-0\" (UID: \"131735a5-8043-40d7-a15d-f0024356e584\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:43:45 crc kubenswrapper[4558]: I0120 17:43:45.148711 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/131735a5-8043-40d7-a15d-f0024356e584-memcached-tls-certs\") pod \"memcached-0\" (UID: \"131735a5-8043-40d7-a15d-f0024356e584\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:43:45 crc kubenswrapper[4558]: I0120 17:43:45.148845 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/131735a5-8043-40d7-a15d-f0024356e584-combined-ca-bundle\") pod \"memcached-0\" (UID: \"131735a5-8043-40d7-a15d-f0024356e584\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:43:45 crc kubenswrapper[4558]: I0120 17:43:45.149000 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/131735a5-8043-40d7-a15d-f0024356e584-config-data\") pod \"memcached-0\" (UID: \"131735a5-8043-40d7-a15d-f0024356e584\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:43:45 crc kubenswrapper[4558]: I0120 17:43:45.251766 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/131735a5-8043-40d7-a15d-f0024356e584-config-data\") pod \"memcached-0\" (UID: \"131735a5-8043-40d7-a15d-f0024356e584\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:43:45 crc kubenswrapper[4558]: I0120 17:43:45.252124 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p65cv\" (UniqueName: \"kubernetes.io/projected/131735a5-8043-40d7-a15d-f0024356e584-kube-api-access-p65cv\") pod \"memcached-0\" (UID: \"131735a5-8043-40d7-a15d-f0024356e584\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:43:45 crc kubenswrapper[4558]: I0120 17:43:45.252209 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/131735a5-8043-40d7-a15d-f0024356e584-kolla-config\") pod \"memcached-0\" (UID: \"131735a5-8043-40d7-a15d-f0024356e584\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:43:45 crc kubenswrapper[4558]: I0120 17:43:45.252346 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/131735a5-8043-40d7-a15d-f0024356e584-memcached-tls-certs\") pod \"memcached-0\" (UID: \"131735a5-8043-40d7-a15d-f0024356e584\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:43:45 crc kubenswrapper[4558]: I0120 17:43:45.252433 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/131735a5-8043-40d7-a15d-f0024356e584-combined-ca-bundle\") pod \"memcached-0\" (UID: \"131735a5-8043-40d7-a15d-f0024356e584\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:43:45 crc kubenswrapper[4558]: I0120 17:43:45.254118 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/131735a5-8043-40d7-a15d-f0024356e584-config-data\") pod \"memcached-0\" (UID: \"131735a5-8043-40d7-a15d-f0024356e584\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:43:45 crc kubenswrapper[4558]: I0120 17:43:45.254300 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/131735a5-8043-40d7-a15d-f0024356e584-kolla-config\") pod \"memcached-0\" (UID: \"131735a5-8043-40d7-a15d-f0024356e584\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:43:45 crc kubenswrapper[4558]: I0120 17:43:45.258033 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/131735a5-8043-40d7-a15d-f0024356e584-memcached-tls-certs\") pod \"memcached-0\" (UID: \"131735a5-8043-40d7-a15d-f0024356e584\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:43:45 crc kubenswrapper[4558]: I0120 17:43:45.272346 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/131735a5-8043-40d7-a15d-f0024356e584-combined-ca-bundle\") pod \"memcached-0\" (UID: \"131735a5-8043-40d7-a15d-f0024356e584\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:43:45 crc kubenswrapper[4558]: I0120 17:43:45.279300 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p65cv\" (UniqueName: \"kubernetes.io/projected/131735a5-8043-40d7-a15d-f0024356e584-kube-api-access-p65cv\") pod \"memcached-0\" (UID: \"131735a5-8043-40d7-a15d-f0024356e584\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:43:45 crc kubenswrapper[4558]: I0120 17:43:45.389611 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/memcached-0" Jan 20 17:43:45 crc kubenswrapper[4558]: I0120 17:43:45.483460 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-6575ff8c88-kqv4m" Jan 20 17:43:45 crc kubenswrapper[4558]: I0120 17:43:45.560376 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f6d8d5b2-af92-43f7-ad6f-74ac3121776c-public-tls-certs\") pod \"f6d8d5b2-af92-43f7-ad6f-74ac3121776c\" (UID: \"f6d8d5b2-af92-43f7-ad6f-74ac3121776c\") " Jan 20 17:43:45 crc kubenswrapper[4558]: I0120 17:43:45.560458 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tl7zd\" (UniqueName: \"kubernetes.io/projected/f6d8d5b2-af92-43f7-ad6f-74ac3121776c-kube-api-access-tl7zd\") pod \"f6d8d5b2-af92-43f7-ad6f-74ac3121776c\" (UID: \"f6d8d5b2-af92-43f7-ad6f-74ac3121776c\") " Jan 20 17:43:45 crc kubenswrapper[4558]: I0120 17:43:45.560509 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f6d8d5b2-af92-43f7-ad6f-74ac3121776c-config-data\") pod \"f6d8d5b2-af92-43f7-ad6f-74ac3121776c\" (UID: \"f6d8d5b2-af92-43f7-ad6f-74ac3121776c\") " Jan 20 17:43:45 crc kubenswrapper[4558]: I0120 17:43:45.560543 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f6d8d5b2-af92-43f7-ad6f-74ac3121776c-combined-ca-bundle\") pod \"f6d8d5b2-af92-43f7-ad6f-74ac3121776c\" (UID: \"f6d8d5b2-af92-43f7-ad6f-74ac3121776c\") " Jan 20 17:43:45 crc kubenswrapper[4558]: I0120 17:43:45.560781 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f6d8d5b2-af92-43f7-ad6f-74ac3121776c-config-data-custom\") pod \"f6d8d5b2-af92-43f7-ad6f-74ac3121776c\" (UID: \"f6d8d5b2-af92-43f7-ad6f-74ac3121776c\") " Jan 20 17:43:45 crc kubenswrapper[4558]: I0120 17:43:45.560961 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f6d8d5b2-af92-43f7-ad6f-74ac3121776c-internal-tls-certs\") pod \"f6d8d5b2-af92-43f7-ad6f-74ac3121776c\" (UID: \"f6d8d5b2-af92-43f7-ad6f-74ac3121776c\") " Jan 20 17:43:45 crc kubenswrapper[4558]: I0120 17:43:45.560996 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f6d8d5b2-af92-43f7-ad6f-74ac3121776c-logs\") pod \"f6d8d5b2-af92-43f7-ad6f-74ac3121776c\" (UID: \"f6d8d5b2-af92-43f7-ad6f-74ac3121776c\") " Jan 20 17:43:45 crc kubenswrapper[4558]: I0120 17:43:45.569606 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f6d8d5b2-af92-43f7-ad6f-74ac3121776c-logs" (OuterVolumeSpecName: "logs") pod "f6d8d5b2-af92-43f7-ad6f-74ac3121776c" (UID: "f6d8d5b2-af92-43f7-ad6f-74ac3121776c"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:43:45 crc kubenswrapper[4558]: I0120 17:43:45.576919 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f6d8d5b2-af92-43f7-ad6f-74ac3121776c-kube-api-access-tl7zd" (OuterVolumeSpecName: "kube-api-access-tl7zd") pod "f6d8d5b2-af92-43f7-ad6f-74ac3121776c" (UID: "f6d8d5b2-af92-43f7-ad6f-74ac3121776c"). InnerVolumeSpecName "kube-api-access-tl7zd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:43:45 crc kubenswrapper[4558]: I0120 17:43:45.589358 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f6d8d5b2-af92-43f7-ad6f-74ac3121776c-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "f6d8d5b2-af92-43f7-ad6f-74ac3121776c" (UID: "f6d8d5b2-af92-43f7-ad6f-74ac3121776c"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:45 crc kubenswrapper[4558]: I0120 17:43:45.627777 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f6d8d5b2-af92-43f7-ad6f-74ac3121776c-config-data" (OuterVolumeSpecName: "config-data") pod "f6d8d5b2-af92-43f7-ad6f-74ac3121776c" (UID: "f6d8d5b2-af92-43f7-ad6f-74ac3121776c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:45 crc kubenswrapper[4558]: I0120 17:43:45.636368 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f6d8d5b2-af92-43f7-ad6f-74ac3121776c-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "f6d8d5b2-af92-43f7-ad6f-74ac3121776c" (UID: "f6d8d5b2-af92-43f7-ad6f-74ac3121776c"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:45 crc kubenswrapper[4558]: I0120 17:43:45.641991 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f6d8d5b2-af92-43f7-ad6f-74ac3121776c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f6d8d5b2-af92-43f7-ad6f-74ac3121776c" (UID: "f6d8d5b2-af92-43f7-ad6f-74ac3121776c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:45 crc kubenswrapper[4558]: I0120 17:43:45.660110 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f6d8d5b2-af92-43f7-ad6f-74ac3121776c-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "f6d8d5b2-af92-43f7-ad6f-74ac3121776c" (UID: "f6d8d5b2-af92-43f7-ad6f-74ac3121776c"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:45 crc kubenswrapper[4558]: I0120 17:43:45.667827 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f6d8d5b2-af92-43f7-ad6f-74ac3121776c-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:45 crc kubenswrapper[4558]: I0120 17:43:45.667854 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f6d8d5b2-af92-43f7-ad6f-74ac3121776c-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:45 crc kubenswrapper[4558]: I0120 17:43:45.667866 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f6d8d5b2-af92-43f7-ad6f-74ac3121776c-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:45 crc kubenswrapper[4558]: I0120 17:43:45.667876 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tl7zd\" (UniqueName: \"kubernetes.io/projected/f6d8d5b2-af92-43f7-ad6f-74ac3121776c-kube-api-access-tl7zd\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:45 crc kubenswrapper[4558]: I0120 17:43:45.667887 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f6d8d5b2-af92-43f7-ad6f-74ac3121776c-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:45 crc kubenswrapper[4558]: I0120 17:43:45.667895 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f6d8d5b2-af92-43f7-ad6f-74ac3121776c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:45 crc kubenswrapper[4558]: I0120 17:43:45.667904 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f6d8d5b2-af92-43f7-ad6f-74ac3121776c-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:45 crc kubenswrapper[4558]: I0120 17:43:45.856853 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/memcached-0"] Jan 20 17:43:45 crc kubenswrapper[4558]: I0120 17:43:45.994895 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-cell1-galera-0" event={"ID":"5bca4373-5f64-4803-94e7-28deeb5caad3","Type":"ContainerStarted","Data":"7905c465186edb142be037f724712184999953459f1d4e54418a375cbfe42e59"} Jan 20 17:43:45 crc kubenswrapper[4558]: I0120 17:43:45.999658 4558 generic.go:334] "Generic (PLEG): container finished" podID="f6d8d5b2-af92-43f7-ad6f-74ac3121776c" containerID="d7d531af3aef1970f34aacda0d423e7527ba5090f37fc6e8a3ca8f6d6677a41c" exitCode=0 Jan 20 17:43:45 crc kubenswrapper[4558]: I0120 17:43:45.999716 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-6575ff8c88-kqv4m" event={"ID":"f6d8d5b2-af92-43f7-ad6f-74ac3121776c","Type":"ContainerDied","Data":"d7d531af3aef1970f34aacda0d423e7527ba5090f37fc6e8a3ca8f6d6677a41c"} Jan 20 17:43:45 crc kubenswrapper[4558]: I0120 17:43:45.999749 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-6575ff8c88-kqv4m" event={"ID":"f6d8d5b2-af92-43f7-ad6f-74ac3121776c","Type":"ContainerDied","Data":"7659e7aeec6184c025b64668f5ed1fc3170af02df30635a3254681e913a849e1"} Jan 20 17:43:45 crc kubenswrapper[4558]: I0120 17:43:45.999771 4558 scope.go:117] "RemoveContainer" containerID="d7d531af3aef1970f34aacda0d423e7527ba5090f37fc6e8a3ca8f6d6677a41c" Jan 20 17:43:45 crc kubenswrapper[4558]: I0120 17:43:45.999869 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-6575ff8c88-kqv4m" Jan 20 17:43:46 crc kubenswrapper[4558]: I0120 17:43:46.022983 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/openstack-cell1-galera-0" podStartSLOduration=6.022963294 podStartE2EDuration="6.022963294s" podCreationTimestamp="2026-01-20 17:43:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:43:46.017130698 +0000 UTC m=+3719.777468664" watchObservedRunningTime="2026-01-20 17:43:46.022963294 +0000 UTC m=+3719.783301251" Jan 20 17:43:46 crc kubenswrapper[4558]: I0120 17:43:46.028709 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"f87490e8-557d-41e3-a07b-8fe12147b315","Type":"ContainerStarted","Data":"dea187ec3d51aa9fd2fd3318b66ce5429ee704c1f761dae601b4234a786d24c7"} Jan 20 17:43:46 crc kubenswrapper[4558]: I0120 17:43:46.037356 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/memcached-0" event={"ID":"131735a5-8043-40d7-a15d-f0024356e584","Type":"ContainerStarted","Data":"00887ddcbc7ce052ace0ca68010332c4c0342d0819780f60c62b3f04ba6d24fd"} Jan 20 17:43:46 crc kubenswrapper[4558]: I0120 17:43:46.041390 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"56173817-8246-4f37-b157-3890912004ca","Type":"ContainerStarted","Data":"65d88d20f8ddd45d36845fd06af8255069c3338120c182ed2e8bfd0b56484bec"} Jan 20 17:43:46 crc kubenswrapper[4558]: I0120 17:43:46.065108 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-api-6575ff8c88-kqv4m"] Jan 20 17:43:46 crc kubenswrapper[4558]: I0120 17:43:46.065411 4558 scope.go:117] "RemoveContainer" containerID="7f70312ea849351c5fb0d6a0dd1c2e2239b767de0d5e6cefd7d82f31ec5e5f4b" Jan 20 17:43:46 crc kubenswrapper[4558]: I0120 17:43:46.065652 4558 generic.go:334] "Generic (PLEG): container finished" podID="48a3e4b6-ae17-449f-81f8-e521d7f16b7f" containerID="4c0d28cc228b6021507306e977f98df789f246ebd560ca7ac4eed76b974bca68" exitCode=0 Jan 20 17:43:46 crc kubenswrapper[4558]: I0120 17:43:46.065675 4558 generic.go:334] "Generic (PLEG): container finished" podID="48a3e4b6-ae17-449f-81f8-e521d7f16b7f" containerID="2fe2d6033ceb5c948ed32e5c9455381d57dd67590fa46b232c29a099df3a1f5e" exitCode=0 Jan 20 17:43:46 crc kubenswrapper[4558]: I0120 17:43:46.065706 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"48a3e4b6-ae17-449f-81f8-e521d7f16b7f","Type":"ContainerDied","Data":"4c0d28cc228b6021507306e977f98df789f246ebd560ca7ac4eed76b974bca68"} Jan 20 17:43:46 crc kubenswrapper[4558]: I0120 17:43:46.065735 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"48a3e4b6-ae17-449f-81f8-e521d7f16b7f","Type":"ContainerDied","Data":"2fe2d6033ceb5c948ed32e5c9455381d57dd67590fa46b232c29a099df3a1f5e"} Jan 20 17:43:46 crc kubenswrapper[4558]: I0120 17:43:46.091558 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/barbican-api-6575ff8c88-kqv4m"] Jan 20 17:43:46 crc kubenswrapper[4558]: I0120 17:43:46.093246 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:43:46 crc kubenswrapper[4558]: I0120 17:43:46.100674 4558 scope.go:117] "RemoveContainer" containerID="d7d531af3aef1970f34aacda0d423e7527ba5090f37fc6e8a3ca8f6d6677a41c" Jan 20 17:43:46 crc kubenswrapper[4558]: E0120 17:43:46.102500 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d7d531af3aef1970f34aacda0d423e7527ba5090f37fc6e8a3ca8f6d6677a41c\": container with ID starting with d7d531af3aef1970f34aacda0d423e7527ba5090f37fc6e8a3ca8f6d6677a41c not found: ID does not exist" containerID="d7d531af3aef1970f34aacda0d423e7527ba5090f37fc6e8a3ca8f6d6677a41c" Jan 20 17:43:46 crc kubenswrapper[4558]: I0120 17:43:46.102533 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d7d531af3aef1970f34aacda0d423e7527ba5090f37fc6e8a3ca8f6d6677a41c"} err="failed to get container status \"d7d531af3aef1970f34aacda0d423e7527ba5090f37fc6e8a3ca8f6d6677a41c\": rpc error: code = NotFound desc = could not find container \"d7d531af3aef1970f34aacda0d423e7527ba5090f37fc6e8a3ca8f6d6677a41c\": container with ID starting with d7d531af3aef1970f34aacda0d423e7527ba5090f37fc6e8a3ca8f6d6677a41c not found: ID does not exist" Jan 20 17:43:46 crc kubenswrapper[4558]: I0120 17:43:46.102559 4558 scope.go:117] "RemoveContainer" containerID="7f70312ea849351c5fb0d6a0dd1c2e2239b767de0d5e6cefd7d82f31ec5e5f4b" Jan 20 17:43:46 crc kubenswrapper[4558]: E0120 17:43:46.103118 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7f70312ea849351c5fb0d6a0dd1c2e2239b767de0d5e6cefd7d82f31ec5e5f4b\": container with ID starting with 7f70312ea849351c5fb0d6a0dd1c2e2239b767de0d5e6cefd7d82f31ec5e5f4b not found: ID does not exist" containerID="7f70312ea849351c5fb0d6a0dd1c2e2239b767de0d5e6cefd7d82f31ec5e5f4b" Jan 20 17:43:46 crc kubenswrapper[4558]: I0120 17:43:46.103140 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7f70312ea849351c5fb0d6a0dd1c2e2239b767de0d5e6cefd7d82f31ec5e5f4b"} err="failed to get container status \"7f70312ea849351c5fb0d6a0dd1c2e2239b767de0d5e6cefd7d82f31ec5e5f4b\": rpc error: code = NotFound desc = could not find container \"7f70312ea849351c5fb0d6a0dd1c2e2239b767de0d5e6cefd7d82f31ec5e5f4b\": container with ID starting with 7f70312ea849351c5fb0d6a0dd1c2e2239b767de0d5e6cefd7d82f31ec5e5f4b not found: ID does not exist" Jan 20 17:43:46 crc kubenswrapper[4558]: I0120 17:43:46.185173 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/48a3e4b6-ae17-449f-81f8-e521d7f16b7f-log-httpd\") pod \"48a3e4b6-ae17-449f-81f8-e521d7f16b7f\" (UID: \"48a3e4b6-ae17-449f-81f8-e521d7f16b7f\") " Jan 20 17:43:46 crc kubenswrapper[4558]: I0120 17:43:46.185277 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f6ztb\" (UniqueName: \"kubernetes.io/projected/48a3e4b6-ae17-449f-81f8-e521d7f16b7f-kube-api-access-f6ztb\") pod \"48a3e4b6-ae17-449f-81f8-e521d7f16b7f\" (UID: \"48a3e4b6-ae17-449f-81f8-e521d7f16b7f\") " Jan 20 17:43:46 crc kubenswrapper[4558]: I0120 17:43:46.185338 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/48a3e4b6-ae17-449f-81f8-e521d7f16b7f-config-data\") pod \"48a3e4b6-ae17-449f-81f8-e521d7f16b7f\" (UID: \"48a3e4b6-ae17-449f-81f8-e521d7f16b7f\") " Jan 20 17:43:46 crc kubenswrapper[4558]: I0120 17:43:46.185376 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/48a3e4b6-ae17-449f-81f8-e521d7f16b7f-run-httpd\") pod \"48a3e4b6-ae17-449f-81f8-e521d7f16b7f\" (UID: \"48a3e4b6-ae17-449f-81f8-e521d7f16b7f\") " Jan 20 17:43:46 crc kubenswrapper[4558]: I0120 17:43:46.187013 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/48a3e4b6-ae17-449f-81f8-e521d7f16b7f-scripts\") pod \"48a3e4b6-ae17-449f-81f8-e521d7f16b7f\" (UID: \"48a3e4b6-ae17-449f-81f8-e521d7f16b7f\") " Jan 20 17:43:46 crc kubenswrapper[4558]: I0120 17:43:46.187083 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/48a3e4b6-ae17-449f-81f8-e521d7f16b7f-ceilometer-tls-certs\") pod \"48a3e4b6-ae17-449f-81f8-e521d7f16b7f\" (UID: \"48a3e4b6-ae17-449f-81f8-e521d7f16b7f\") " Jan 20 17:43:46 crc kubenswrapper[4558]: I0120 17:43:46.187143 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/48a3e4b6-ae17-449f-81f8-e521d7f16b7f-sg-core-conf-yaml\") pod \"48a3e4b6-ae17-449f-81f8-e521d7f16b7f\" (UID: \"48a3e4b6-ae17-449f-81f8-e521d7f16b7f\") " Jan 20 17:43:46 crc kubenswrapper[4558]: I0120 17:43:46.187216 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/48a3e4b6-ae17-449f-81f8-e521d7f16b7f-combined-ca-bundle\") pod \"48a3e4b6-ae17-449f-81f8-e521d7f16b7f\" (UID: \"48a3e4b6-ae17-449f-81f8-e521d7f16b7f\") " Jan 20 17:43:46 crc kubenswrapper[4558]: I0120 17:43:46.189960 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/48a3e4b6-ae17-449f-81f8-e521d7f16b7f-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "48a3e4b6-ae17-449f-81f8-e521d7f16b7f" (UID: "48a3e4b6-ae17-449f-81f8-e521d7f16b7f"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:43:46 crc kubenswrapper[4558]: I0120 17:43:46.194950 4558 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/48a3e4b6-ae17-449f-81f8-e521d7f16b7f-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:46 crc kubenswrapper[4558]: I0120 17:43:46.195841 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/48a3e4b6-ae17-449f-81f8-e521d7f16b7f-kube-api-access-f6ztb" (OuterVolumeSpecName: "kube-api-access-f6ztb") pod "48a3e4b6-ae17-449f-81f8-e521d7f16b7f" (UID: "48a3e4b6-ae17-449f-81f8-e521d7f16b7f"). InnerVolumeSpecName "kube-api-access-f6ztb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:43:46 crc kubenswrapper[4558]: I0120 17:43:46.197629 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/48a3e4b6-ae17-449f-81f8-e521d7f16b7f-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "48a3e4b6-ae17-449f-81f8-e521d7f16b7f" (UID: "48a3e4b6-ae17-449f-81f8-e521d7f16b7f"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:43:46 crc kubenswrapper[4558]: I0120 17:43:46.207015 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/48a3e4b6-ae17-449f-81f8-e521d7f16b7f-scripts" (OuterVolumeSpecName: "scripts") pod "48a3e4b6-ae17-449f-81f8-e521d7f16b7f" (UID: "48a3e4b6-ae17-449f-81f8-e521d7f16b7f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:46 crc kubenswrapper[4558]: I0120 17:43:46.246836 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/48a3e4b6-ae17-449f-81f8-e521d7f16b7f-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "48a3e4b6-ae17-449f-81f8-e521d7f16b7f" (UID: "48a3e4b6-ae17-449f-81f8-e521d7f16b7f"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:46 crc kubenswrapper[4558]: I0120 17:43:46.296775 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/48a3e4b6-ae17-449f-81f8-e521d7f16b7f-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:46 crc kubenswrapper[4558]: I0120 17:43:46.296807 4558 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/48a3e4b6-ae17-449f-81f8-e521d7f16b7f-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:46 crc kubenswrapper[4558]: I0120 17:43:46.296820 4558 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/48a3e4b6-ae17-449f-81f8-e521d7f16b7f-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:46 crc kubenswrapper[4558]: I0120 17:43:46.296832 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f6ztb\" (UniqueName: \"kubernetes.io/projected/48a3e4b6-ae17-449f-81f8-e521d7f16b7f-kube-api-access-f6ztb\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:46 crc kubenswrapper[4558]: I0120 17:43:46.300248 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/48a3e4b6-ae17-449f-81f8-e521d7f16b7f-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "48a3e4b6-ae17-449f-81f8-e521d7f16b7f" (UID: "48a3e4b6-ae17-449f-81f8-e521d7f16b7f"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:46 crc kubenswrapper[4558]: I0120 17:43:46.319230 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/48a3e4b6-ae17-449f-81f8-e521d7f16b7f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "48a3e4b6-ae17-449f-81f8-e521d7f16b7f" (UID: "48a3e4b6-ae17-449f-81f8-e521d7f16b7f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:46 crc kubenswrapper[4558]: I0120 17:43:46.356242 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/48a3e4b6-ae17-449f-81f8-e521d7f16b7f-config-data" (OuterVolumeSpecName: "config-data") pod "48a3e4b6-ae17-449f-81f8-e521d7f16b7f" (UID: "48a3e4b6-ae17-449f-81f8-e521d7f16b7f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:46 crc kubenswrapper[4558]: I0120 17:43:46.399568 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/48a3e4b6-ae17-449f-81f8-e521d7f16b7f-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:46 crc kubenswrapper[4558]: I0120 17:43:46.399610 4558 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/48a3e4b6-ae17-449f-81f8-e521d7f16b7f-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:46 crc kubenswrapper[4558]: I0120 17:43:46.399625 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/48a3e4b6-ae17-449f-81f8-e521d7f16b7f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:46 crc kubenswrapper[4558]: I0120 17:43:46.590379 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b544295f-900d-4bff-adb5-fe0aaf262026" path="/var/lib/kubelet/pods/b544295f-900d-4bff-adb5-fe0aaf262026/volumes" Jan 20 17:43:46 crc kubenswrapper[4558]: I0120 17:43:46.592257 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f6d8d5b2-af92-43f7-ad6f-74ac3121776c" path="/var/lib/kubelet/pods/f6d8d5b2-af92-43f7-ad6f-74ac3121776c/volumes" Jan 20 17:43:47 crc kubenswrapper[4558]: I0120 17:43:47.078995 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"f87490e8-557d-41e3-a07b-8fe12147b315","Type":"ContainerStarted","Data":"72f0e46d318dc78a9e49d3aa7f3c509c43a243da567f0f20b70c09a82e812667"} Jan 20 17:43:47 crc kubenswrapper[4558]: I0120 17:43:47.081779 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/memcached-0" event={"ID":"131735a5-8043-40d7-a15d-f0024356e584","Type":"ContainerStarted","Data":"63bcb2a3acf1f845b98608c468529b05f04d46a72a3434b91049daf4e38d78b3"} Jan 20 17:43:47 crc kubenswrapper[4558]: I0120 17:43:47.082359 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/memcached-0" Jan 20 17:43:47 crc kubenswrapper[4558]: I0120 17:43:47.084354 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"56173817-8246-4f37-b157-3890912004ca","Type":"ContainerStarted","Data":"26fcabccd365630b0df7a2013b030db7a8a52ed18fb890da73b7a737f80da0ba"} Jan 20 17:43:47 crc kubenswrapper[4558]: I0120 17:43:47.087112 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"48a3e4b6-ae17-449f-81f8-e521d7f16b7f","Type":"ContainerDied","Data":"2721b2581c108773c46b6cc13a2582297ea62b63b7e3b16a3dafaae823677fbe"} Jan 20 17:43:47 crc kubenswrapper[4558]: I0120 17:43:47.087148 4558 scope.go:117] "RemoveContainer" containerID="1f580fae1864e3169dd64566d04d693dc377a53c846267d9e12639cffaa324e9" Jan 20 17:43:47 crc kubenswrapper[4558]: I0120 17:43:47.087282 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:43:47 crc kubenswrapper[4558]: I0120 17:43:47.103887 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/glance-default-internal-api-0" podStartSLOduration=4.103870087 podStartE2EDuration="4.103870087s" podCreationTimestamp="2026-01-20 17:43:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:43:47.102751434 +0000 UTC m=+3720.863089401" watchObservedRunningTime="2026-01-20 17:43:47.103870087 +0000 UTC m=+3720.864208054" Jan 20 17:43:47 crc kubenswrapper[4558]: I0120 17:43:47.126702 4558 scope.go:117] "RemoveContainer" containerID="6291ffd1b39b779da16b44b8132159644811a98d6ee34190d04a465d8cafa3e8" Jan 20 17:43:47 crc kubenswrapper[4558]: I0120 17:43:47.126990 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:43:47 crc kubenswrapper[4558]: I0120 17:43:47.141117 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/memcached-0" podStartSLOduration=3.141098355 podStartE2EDuration="3.141098355s" podCreationTimestamp="2026-01-20 17:43:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:43:47.126840598 +0000 UTC m=+3720.887178565" watchObservedRunningTime="2026-01-20 17:43:47.141098355 +0000 UTC m=+3720.901436323" Jan 20 17:43:47 crc kubenswrapper[4558]: I0120 17:43:47.142590 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:43:47 crc kubenswrapper[4558]: I0120 17:43:47.161370 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:43:47 crc kubenswrapper[4558]: E0120 17:43:47.161759 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f6d8d5b2-af92-43f7-ad6f-74ac3121776c" containerName="barbican-api" Jan 20 17:43:47 crc kubenswrapper[4558]: I0120 17:43:47.161776 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="f6d8d5b2-af92-43f7-ad6f-74ac3121776c" containerName="barbican-api" Jan 20 17:43:47 crc kubenswrapper[4558]: E0120 17:43:47.161806 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f6d8d5b2-af92-43f7-ad6f-74ac3121776c" containerName="barbican-api-log" Jan 20 17:43:47 crc kubenswrapper[4558]: I0120 17:43:47.161813 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="f6d8d5b2-af92-43f7-ad6f-74ac3121776c" containerName="barbican-api-log" Jan 20 17:43:47 crc kubenswrapper[4558]: E0120 17:43:47.161822 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="48a3e4b6-ae17-449f-81f8-e521d7f16b7f" containerName="ceilometer-central-agent" Jan 20 17:43:47 crc kubenswrapper[4558]: I0120 17:43:47.161828 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="48a3e4b6-ae17-449f-81f8-e521d7f16b7f" containerName="ceilometer-central-agent" Jan 20 17:43:47 crc kubenswrapper[4558]: E0120 17:43:47.161843 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="48a3e4b6-ae17-449f-81f8-e521d7f16b7f" containerName="proxy-httpd" Jan 20 17:43:47 crc kubenswrapper[4558]: I0120 17:43:47.161849 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="48a3e4b6-ae17-449f-81f8-e521d7f16b7f" containerName="proxy-httpd" Jan 20 17:43:47 crc kubenswrapper[4558]: E0120 17:43:47.161858 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="48a3e4b6-ae17-449f-81f8-e521d7f16b7f" containerName="sg-core" Jan 20 17:43:47 crc kubenswrapper[4558]: I0120 17:43:47.161863 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="48a3e4b6-ae17-449f-81f8-e521d7f16b7f" containerName="sg-core" Jan 20 17:43:47 crc kubenswrapper[4558]: E0120 17:43:47.161876 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="48a3e4b6-ae17-449f-81f8-e521d7f16b7f" containerName="ceilometer-notification-agent" Jan 20 17:43:47 crc kubenswrapper[4558]: I0120 17:43:47.161881 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="48a3e4b6-ae17-449f-81f8-e521d7f16b7f" containerName="ceilometer-notification-agent" Jan 20 17:43:47 crc kubenswrapper[4558]: I0120 17:43:47.162041 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="f6d8d5b2-af92-43f7-ad6f-74ac3121776c" containerName="barbican-api-log" Jan 20 17:43:47 crc kubenswrapper[4558]: I0120 17:43:47.162054 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="48a3e4b6-ae17-449f-81f8-e521d7f16b7f" containerName="ceilometer-central-agent" Jan 20 17:43:47 crc kubenswrapper[4558]: I0120 17:43:47.162062 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="48a3e4b6-ae17-449f-81f8-e521d7f16b7f" containerName="proxy-httpd" Jan 20 17:43:47 crc kubenswrapper[4558]: I0120 17:43:47.162078 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="48a3e4b6-ae17-449f-81f8-e521d7f16b7f" containerName="sg-core" Jan 20 17:43:47 crc kubenswrapper[4558]: I0120 17:43:47.162085 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="f6d8d5b2-af92-43f7-ad6f-74ac3121776c" containerName="barbican-api" Jan 20 17:43:47 crc kubenswrapper[4558]: I0120 17:43:47.162093 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="48a3e4b6-ae17-449f-81f8-e521d7f16b7f" containerName="ceilometer-notification-agent" Jan 20 17:43:47 crc kubenswrapper[4558]: I0120 17:43:47.168681 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:43:47 crc kubenswrapper[4558]: I0120 17:43:47.172768 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-config-data" Jan 20 17:43:47 crc kubenswrapper[4558]: I0120 17:43:47.173014 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-ceilometer-internal-svc" Jan 20 17:43:47 crc kubenswrapper[4558]: I0120 17:43:47.173156 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-scripts" Jan 20 17:43:47 crc kubenswrapper[4558]: I0120 17:43:47.181794 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:43:47 crc kubenswrapper[4558]: I0120 17:43:47.184348 4558 scope.go:117] "RemoveContainer" containerID="4c0d28cc228b6021507306e977f98df789f246ebd560ca7ac4eed76b974bca68" Jan 20 17:43:47 crc kubenswrapper[4558]: I0120 17:43:47.214421 4558 scope.go:117] "RemoveContainer" containerID="2fe2d6033ceb5c948ed32e5c9455381d57dd67590fa46b232c29a099df3a1f5e" Jan 20 17:43:47 crc kubenswrapper[4558]: I0120 17:43:47.222631 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/glance-default-external-api-0" podStartSLOduration=4.222606549 podStartE2EDuration="4.222606549s" podCreationTimestamp="2026-01-20 17:43:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:43:47.156560177 +0000 UTC m=+3720.916898143" watchObservedRunningTime="2026-01-20 17:43:47.222606549 +0000 UTC m=+3720.982944517" Jan 20 17:43:47 crc kubenswrapper[4558]: I0120 17:43:47.324620 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/046d6bef-b034-4093-9a77-f23074beaf20-config-data\") pod \"ceilometer-0\" (UID: \"046d6bef-b034-4093-9a77-f23074beaf20\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:43:47 crc kubenswrapper[4558]: I0120 17:43:47.324668 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/046d6bef-b034-4093-9a77-f23074beaf20-log-httpd\") pod \"ceilometer-0\" (UID: \"046d6bef-b034-4093-9a77-f23074beaf20\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:43:47 crc kubenswrapper[4558]: I0120 17:43:47.324706 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xgqsl\" (UniqueName: \"kubernetes.io/projected/046d6bef-b034-4093-9a77-f23074beaf20-kube-api-access-xgqsl\") pod \"ceilometer-0\" (UID: \"046d6bef-b034-4093-9a77-f23074beaf20\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:43:47 crc kubenswrapper[4558]: I0120 17:43:47.324845 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/046d6bef-b034-4093-9a77-f23074beaf20-run-httpd\") pod \"ceilometer-0\" (UID: \"046d6bef-b034-4093-9a77-f23074beaf20\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:43:47 crc kubenswrapper[4558]: I0120 17:43:47.324934 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/046d6bef-b034-4093-9a77-f23074beaf20-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"046d6bef-b034-4093-9a77-f23074beaf20\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:43:47 crc kubenswrapper[4558]: I0120 17:43:47.324992 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/046d6bef-b034-4093-9a77-f23074beaf20-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"046d6bef-b034-4093-9a77-f23074beaf20\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:43:47 crc kubenswrapper[4558]: I0120 17:43:47.325057 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/046d6bef-b034-4093-9a77-f23074beaf20-scripts\") pod \"ceilometer-0\" (UID: \"046d6bef-b034-4093-9a77-f23074beaf20\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:43:47 crc kubenswrapper[4558]: I0120 17:43:47.325126 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/046d6bef-b034-4093-9a77-f23074beaf20-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"046d6bef-b034-4093-9a77-f23074beaf20\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:43:47 crc kubenswrapper[4558]: I0120 17:43:47.341298 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:43:47 crc kubenswrapper[4558]: I0120 17:43:47.427625 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/046d6bef-b034-4093-9a77-f23074beaf20-config-data\") pod \"ceilometer-0\" (UID: \"046d6bef-b034-4093-9a77-f23074beaf20\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:43:47 crc kubenswrapper[4558]: I0120 17:43:47.427673 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/046d6bef-b034-4093-9a77-f23074beaf20-log-httpd\") pod \"ceilometer-0\" (UID: \"046d6bef-b034-4093-9a77-f23074beaf20\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:43:47 crc kubenswrapper[4558]: I0120 17:43:47.427752 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xgqsl\" (UniqueName: \"kubernetes.io/projected/046d6bef-b034-4093-9a77-f23074beaf20-kube-api-access-xgqsl\") pod \"ceilometer-0\" (UID: \"046d6bef-b034-4093-9a77-f23074beaf20\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:43:47 crc kubenswrapper[4558]: I0120 17:43:47.427917 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/046d6bef-b034-4093-9a77-f23074beaf20-run-httpd\") pod \"ceilometer-0\" (UID: \"046d6bef-b034-4093-9a77-f23074beaf20\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:43:47 crc kubenswrapper[4558]: I0120 17:43:47.427969 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/046d6bef-b034-4093-9a77-f23074beaf20-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"046d6bef-b034-4093-9a77-f23074beaf20\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:43:47 crc kubenswrapper[4558]: I0120 17:43:47.427989 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/046d6bef-b034-4093-9a77-f23074beaf20-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"046d6bef-b034-4093-9a77-f23074beaf20\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:43:47 crc kubenswrapper[4558]: I0120 17:43:47.428050 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/046d6bef-b034-4093-9a77-f23074beaf20-scripts\") pod \"ceilometer-0\" (UID: \"046d6bef-b034-4093-9a77-f23074beaf20\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:43:47 crc kubenswrapper[4558]: I0120 17:43:47.428095 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/046d6bef-b034-4093-9a77-f23074beaf20-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"046d6bef-b034-4093-9a77-f23074beaf20\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:43:47 crc kubenswrapper[4558]: I0120 17:43:47.428639 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/046d6bef-b034-4093-9a77-f23074beaf20-run-httpd\") pod \"ceilometer-0\" (UID: \"046d6bef-b034-4093-9a77-f23074beaf20\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:43:47 crc kubenswrapper[4558]: I0120 17:43:47.428904 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/046d6bef-b034-4093-9a77-f23074beaf20-log-httpd\") pod \"ceilometer-0\" (UID: \"046d6bef-b034-4093-9a77-f23074beaf20\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:43:47 crc kubenswrapper[4558]: I0120 17:43:47.436996 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/046d6bef-b034-4093-9a77-f23074beaf20-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"046d6bef-b034-4093-9a77-f23074beaf20\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:43:47 crc kubenswrapper[4558]: I0120 17:43:47.442044 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/046d6bef-b034-4093-9a77-f23074beaf20-config-data\") pod \"ceilometer-0\" (UID: \"046d6bef-b034-4093-9a77-f23074beaf20\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:43:47 crc kubenswrapper[4558]: I0120 17:43:47.442636 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/046d6bef-b034-4093-9a77-f23074beaf20-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"046d6bef-b034-4093-9a77-f23074beaf20\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:43:47 crc kubenswrapper[4558]: I0120 17:43:47.442643 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/046d6bef-b034-4093-9a77-f23074beaf20-scripts\") pod \"ceilometer-0\" (UID: \"046d6bef-b034-4093-9a77-f23074beaf20\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:43:47 crc kubenswrapper[4558]: I0120 17:43:47.467807 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/046d6bef-b034-4093-9a77-f23074beaf20-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"046d6bef-b034-4093-9a77-f23074beaf20\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:43:47 crc kubenswrapper[4558]: I0120 17:43:47.497270 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xgqsl\" (UniqueName: \"kubernetes.io/projected/046d6bef-b034-4093-9a77-f23074beaf20-kube-api-access-xgqsl\") pod \"ceilometer-0\" (UID: \"046d6bef-b034-4093-9a77-f23074beaf20\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:43:47 crc kubenswrapper[4558]: I0120 17:43:47.795275 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:43:47 crc kubenswrapper[4558]: I0120 17:43:47.827334 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/cinder-api-0" podUID="56d39f72-48d3-4690-a20a-099cb41daa7e" containerName="cinder-api" probeResult="failure" output="Get \"https://10.217.1.32:8776/healthcheck\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Jan 20 17:43:48 crc kubenswrapper[4558]: I0120 17:43:48.208627 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:43:48 crc kubenswrapper[4558]: W0120 17:43:48.220729 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod046d6bef_b034_4093_9a77_f23074beaf20.slice/crio-8022a11c8ceeb8f4996ac1e42c93e327ff75465dbfaaa8ae9b6589d7405a0803 WatchSource:0}: Error finding container 8022a11c8ceeb8f4996ac1e42c93e327ff75465dbfaaa8ae9b6589d7405a0803: Status 404 returned error can't find the container with id 8022a11c8ceeb8f4996ac1e42c93e327ff75465dbfaaa8ae9b6589d7405a0803 Jan 20 17:43:48 crc kubenswrapper[4558]: I0120 17:43:48.576479 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="48a3e4b6-ae17-449f-81f8-e521d7f16b7f" path="/var/lib/kubelet/pods/48a3e4b6-ae17-449f-81f8-e521d7f16b7f/volumes" Jan 20 17:43:48 crc kubenswrapper[4558]: I0120 17:43:48.599462 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-685599669b-nv6cc" Jan 20 17:43:48 crc kubenswrapper[4558]: I0120 17:43:48.636607 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-5676986846-92lbc" Jan 20 17:43:48 crc kubenswrapper[4558]: I0120 17:43:48.662692 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/33f7229d-bcae-434a-805e-530801e79b20-internal-tls-certs\") pod \"33f7229d-bcae-434a-805e-530801e79b20\" (UID: \"33f7229d-bcae-434a-805e-530801e79b20\") " Jan 20 17:43:48 crc kubenswrapper[4558]: I0120 17:43:48.662739 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/33f7229d-bcae-434a-805e-530801e79b20-combined-ca-bundle\") pod \"33f7229d-bcae-434a-805e-530801e79b20\" (UID: \"33f7229d-bcae-434a-805e-530801e79b20\") " Jan 20 17:43:48 crc kubenswrapper[4558]: I0120 17:43:48.662862 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/33f7229d-bcae-434a-805e-530801e79b20-credential-keys\") pod \"33f7229d-bcae-434a-805e-530801e79b20\" (UID: \"33f7229d-bcae-434a-805e-530801e79b20\") " Jan 20 17:43:48 crc kubenswrapper[4558]: I0120 17:43:48.662940 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xxgml\" (UniqueName: \"kubernetes.io/projected/33f7229d-bcae-434a-805e-530801e79b20-kube-api-access-xxgml\") pod \"33f7229d-bcae-434a-805e-530801e79b20\" (UID: \"33f7229d-bcae-434a-805e-530801e79b20\") " Jan 20 17:43:48 crc kubenswrapper[4558]: I0120 17:43:48.663047 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/33f7229d-bcae-434a-805e-530801e79b20-fernet-keys\") pod \"33f7229d-bcae-434a-805e-530801e79b20\" (UID: \"33f7229d-bcae-434a-805e-530801e79b20\") " Jan 20 17:43:48 crc kubenswrapper[4558]: I0120 17:43:48.663104 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/33f7229d-bcae-434a-805e-530801e79b20-public-tls-certs\") pod \"33f7229d-bcae-434a-805e-530801e79b20\" (UID: \"33f7229d-bcae-434a-805e-530801e79b20\") " Jan 20 17:43:48 crc kubenswrapper[4558]: I0120 17:43:48.663135 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/33f7229d-bcae-434a-805e-530801e79b20-config-data\") pod \"33f7229d-bcae-434a-805e-530801e79b20\" (UID: \"33f7229d-bcae-434a-805e-530801e79b20\") " Jan 20 17:43:48 crc kubenswrapper[4558]: I0120 17:43:48.663244 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/33f7229d-bcae-434a-805e-530801e79b20-scripts\") pod \"33f7229d-bcae-434a-805e-530801e79b20\" (UID: \"33f7229d-bcae-434a-805e-530801e79b20\") " Jan 20 17:43:48 crc kubenswrapper[4558]: I0120 17:43:48.677769 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/33f7229d-bcae-434a-805e-530801e79b20-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "33f7229d-bcae-434a-805e-530801e79b20" (UID: "33f7229d-bcae-434a-805e-530801e79b20"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:48 crc kubenswrapper[4558]: I0120 17:43:48.677796 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/33f7229d-bcae-434a-805e-530801e79b20-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "33f7229d-bcae-434a-805e-530801e79b20" (UID: "33f7229d-bcae-434a-805e-530801e79b20"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:48 crc kubenswrapper[4558]: I0120 17:43:48.679035 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/33f7229d-bcae-434a-805e-530801e79b20-kube-api-access-xxgml" (OuterVolumeSpecName: "kube-api-access-xxgml") pod "33f7229d-bcae-434a-805e-530801e79b20" (UID: "33f7229d-bcae-434a-805e-530801e79b20"). InnerVolumeSpecName "kube-api-access-xxgml". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:43:48 crc kubenswrapper[4558]: I0120 17:43:48.679256 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/33f7229d-bcae-434a-805e-530801e79b20-scripts" (OuterVolumeSpecName: "scripts") pod "33f7229d-bcae-434a-805e-530801e79b20" (UID: "33f7229d-bcae-434a-805e-530801e79b20"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:48 crc kubenswrapper[4558]: I0120 17:43:48.699287 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/33f7229d-bcae-434a-805e-530801e79b20-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "33f7229d-bcae-434a-805e-530801e79b20" (UID: "33f7229d-bcae-434a-805e-530801e79b20"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:48 crc kubenswrapper[4558]: I0120 17:43:48.717459 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/33f7229d-bcae-434a-805e-530801e79b20-config-data" (OuterVolumeSpecName: "config-data") pod "33f7229d-bcae-434a-805e-530801e79b20" (UID: "33f7229d-bcae-434a-805e-530801e79b20"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:48 crc kubenswrapper[4558]: I0120 17:43:48.724810 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/33f7229d-bcae-434a-805e-530801e79b20-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "33f7229d-bcae-434a-805e-530801e79b20" (UID: "33f7229d-bcae-434a-805e-530801e79b20"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:48 crc kubenswrapper[4558]: I0120 17:43:48.729864 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/33f7229d-bcae-434a-805e-530801e79b20-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "33f7229d-bcae-434a-805e-530801e79b20" (UID: "33f7229d-bcae-434a-805e-530801e79b20"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:48 crc kubenswrapper[4558]: I0120 17:43:48.766859 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4cpgm\" (UniqueName: \"kubernetes.io/projected/8877ab44-638b-4880-97fb-305726a5c1a6-kube-api-access-4cpgm\") pod \"8877ab44-638b-4880-97fb-305726a5c1a6\" (UID: \"8877ab44-638b-4880-97fb-305726a5c1a6\") " Jan 20 17:43:48 crc kubenswrapper[4558]: I0120 17:43:48.766936 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8877ab44-638b-4880-97fb-305726a5c1a6-public-tls-certs\") pod \"8877ab44-638b-4880-97fb-305726a5c1a6\" (UID: \"8877ab44-638b-4880-97fb-305726a5c1a6\") " Jan 20 17:43:48 crc kubenswrapper[4558]: I0120 17:43:48.767122 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/8877ab44-638b-4880-97fb-305726a5c1a6-config\") pod \"8877ab44-638b-4880-97fb-305726a5c1a6\" (UID: \"8877ab44-638b-4880-97fb-305726a5c1a6\") " Jan 20 17:43:48 crc kubenswrapper[4558]: I0120 17:43:48.767190 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8877ab44-638b-4880-97fb-305726a5c1a6-combined-ca-bundle\") pod \"8877ab44-638b-4880-97fb-305726a5c1a6\" (UID: \"8877ab44-638b-4880-97fb-305726a5c1a6\") " Jan 20 17:43:48 crc kubenswrapper[4558]: I0120 17:43:48.767279 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/8877ab44-638b-4880-97fb-305726a5c1a6-httpd-config\") pod \"8877ab44-638b-4880-97fb-305726a5c1a6\" (UID: \"8877ab44-638b-4880-97fb-305726a5c1a6\") " Jan 20 17:43:48 crc kubenswrapper[4558]: I0120 17:43:48.767340 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8877ab44-638b-4880-97fb-305726a5c1a6-internal-tls-certs\") pod \"8877ab44-638b-4880-97fb-305726a5c1a6\" (UID: \"8877ab44-638b-4880-97fb-305726a5c1a6\") " Jan 20 17:43:48 crc kubenswrapper[4558]: I0120 17:43:48.767389 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/8877ab44-638b-4880-97fb-305726a5c1a6-ovndb-tls-certs\") pod \"8877ab44-638b-4880-97fb-305726a5c1a6\" (UID: \"8877ab44-638b-4880-97fb-305726a5c1a6\") " Jan 20 17:43:48 crc kubenswrapper[4558]: I0120 17:43:48.768267 4558 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/33f7229d-bcae-434a-805e-530801e79b20-credential-keys\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:48 crc kubenswrapper[4558]: I0120 17:43:48.768284 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xxgml\" (UniqueName: \"kubernetes.io/projected/33f7229d-bcae-434a-805e-530801e79b20-kube-api-access-xxgml\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:48 crc kubenswrapper[4558]: I0120 17:43:48.768296 4558 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/33f7229d-bcae-434a-805e-530801e79b20-fernet-keys\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:48 crc kubenswrapper[4558]: I0120 17:43:48.768305 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/33f7229d-bcae-434a-805e-530801e79b20-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:48 crc kubenswrapper[4558]: I0120 17:43:48.768314 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/33f7229d-bcae-434a-805e-530801e79b20-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:48 crc kubenswrapper[4558]: I0120 17:43:48.768322 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/33f7229d-bcae-434a-805e-530801e79b20-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:48 crc kubenswrapper[4558]: I0120 17:43:48.768332 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/33f7229d-bcae-434a-805e-530801e79b20-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:48 crc kubenswrapper[4558]: I0120 17:43:48.768344 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/33f7229d-bcae-434a-805e-530801e79b20-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:48 crc kubenswrapper[4558]: I0120 17:43:48.771038 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8877ab44-638b-4880-97fb-305726a5c1a6-kube-api-access-4cpgm" (OuterVolumeSpecName: "kube-api-access-4cpgm") pod "8877ab44-638b-4880-97fb-305726a5c1a6" (UID: "8877ab44-638b-4880-97fb-305726a5c1a6"). InnerVolumeSpecName "kube-api-access-4cpgm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:43:48 crc kubenswrapper[4558]: I0120 17:43:48.772609 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8877ab44-638b-4880-97fb-305726a5c1a6-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "8877ab44-638b-4880-97fb-305726a5c1a6" (UID: "8877ab44-638b-4880-97fb-305726a5c1a6"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:48 crc kubenswrapper[4558]: I0120 17:43:48.812250 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8877ab44-638b-4880-97fb-305726a5c1a6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8877ab44-638b-4880-97fb-305726a5c1a6" (UID: "8877ab44-638b-4880-97fb-305726a5c1a6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:48 crc kubenswrapper[4558]: I0120 17:43:48.822704 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8877ab44-638b-4880-97fb-305726a5c1a6-config" (OuterVolumeSpecName: "config") pod "8877ab44-638b-4880-97fb-305726a5c1a6" (UID: "8877ab44-638b-4880-97fb-305726a5c1a6"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:48 crc kubenswrapper[4558]: I0120 17:43:48.824946 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8877ab44-638b-4880-97fb-305726a5c1a6-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "8877ab44-638b-4880-97fb-305726a5c1a6" (UID: "8877ab44-638b-4880-97fb-305726a5c1a6"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:48 crc kubenswrapper[4558]: I0120 17:43:48.838827 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8877ab44-638b-4880-97fb-305726a5c1a6-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "8877ab44-638b-4880-97fb-305726a5c1a6" (UID: "8877ab44-638b-4880-97fb-305726a5c1a6"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:48 crc kubenswrapper[4558]: I0120 17:43:48.843372 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8877ab44-638b-4880-97fb-305726a5c1a6-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "8877ab44-638b-4880-97fb-305726a5c1a6" (UID: "8877ab44-638b-4880-97fb-305726a5c1a6"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:48 crc kubenswrapper[4558]: I0120 17:43:48.870304 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8877ab44-638b-4880-97fb-305726a5c1a6-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:48 crc kubenswrapper[4558]: I0120 17:43:48.870391 4558 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/8877ab44-638b-4880-97fb-305726a5c1a6-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:48 crc kubenswrapper[4558]: I0120 17:43:48.870452 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4cpgm\" (UniqueName: \"kubernetes.io/projected/8877ab44-638b-4880-97fb-305726a5c1a6-kube-api-access-4cpgm\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:48 crc kubenswrapper[4558]: I0120 17:43:48.870508 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8877ab44-638b-4880-97fb-305726a5c1a6-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:48 crc kubenswrapper[4558]: I0120 17:43:48.870557 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/8877ab44-638b-4880-97fb-305726a5c1a6-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:48 crc kubenswrapper[4558]: I0120 17:43:48.870622 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8877ab44-638b-4880-97fb-305726a5c1a6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:48 crc kubenswrapper[4558]: I0120 17:43:48.870679 4558 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/8877ab44-638b-4880-97fb-305726a5c1a6-httpd-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:49 crc kubenswrapper[4558]: I0120 17:43:49.122586 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"046d6bef-b034-4093-9a77-f23074beaf20","Type":"ContainerStarted","Data":"77777fba94586efb6610ba10279160fb83ea9a44be2ecca14f8d05427010707c"} Jan 20 17:43:49 crc kubenswrapper[4558]: I0120 17:43:49.122636 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"046d6bef-b034-4093-9a77-f23074beaf20","Type":"ContainerStarted","Data":"8022a11c8ceeb8f4996ac1e42c93e327ff75465dbfaaa8ae9b6589d7405a0803"} Jan 20 17:43:49 crc kubenswrapper[4558]: I0120 17:43:49.124641 4558 generic.go:334] "Generic (PLEG): container finished" podID="8877ab44-638b-4880-97fb-305726a5c1a6" containerID="418895b26e5117800f2a53dae040b43bb1b03510640c0a1a0ca6b67577a9df79" exitCode=0 Jan 20 17:43:49 crc kubenswrapper[4558]: I0120 17:43:49.124713 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-5676986846-92lbc" Jan 20 17:43:49 crc kubenswrapper[4558]: I0120 17:43:49.124732 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-5676986846-92lbc" event={"ID":"8877ab44-638b-4880-97fb-305726a5c1a6","Type":"ContainerDied","Data":"418895b26e5117800f2a53dae040b43bb1b03510640c0a1a0ca6b67577a9df79"} Jan 20 17:43:49 crc kubenswrapper[4558]: I0120 17:43:49.124767 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-5676986846-92lbc" event={"ID":"8877ab44-638b-4880-97fb-305726a5c1a6","Type":"ContainerDied","Data":"998da092b0ef60bbbaa87e0a678e6c3a3d5f8713be4b0cb67d0a9a3e1a65c932"} Jan 20 17:43:49 crc kubenswrapper[4558]: I0120 17:43:49.124784 4558 scope.go:117] "RemoveContainer" containerID="99d260241a6e2248384349724b544d3292e8cbd5470d0de4208392622b91d851" Jan 20 17:43:49 crc kubenswrapper[4558]: I0120 17:43:49.126139 4558 generic.go:334] "Generic (PLEG): container finished" podID="33f7229d-bcae-434a-805e-530801e79b20" containerID="d3f01f262533ef464619db0861fecd0a8366a9c544f5d180d20689cd0e292a37" exitCode=0 Jan 20 17:43:49 crc kubenswrapper[4558]: I0120 17:43:49.126193 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-685599669b-nv6cc" event={"ID":"33f7229d-bcae-434a-805e-530801e79b20","Type":"ContainerDied","Data":"d3f01f262533ef464619db0861fecd0a8366a9c544f5d180d20689cd0e292a37"} Jan 20 17:43:49 crc kubenswrapper[4558]: I0120 17:43:49.126240 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-685599669b-nv6cc" event={"ID":"33f7229d-bcae-434a-805e-530801e79b20","Type":"ContainerDied","Data":"635027f6932f791a8f9f3dd8d9ed7cf74692fd163bf3a523455b64501df6fdc7"} Jan 20 17:43:49 crc kubenswrapper[4558]: I0120 17:43:49.126366 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-685599669b-nv6cc" Jan 20 17:43:49 crc kubenswrapper[4558]: I0120 17:43:49.196132 4558 scope.go:117] "RemoveContainer" containerID="418895b26e5117800f2a53dae040b43bb1b03510640c0a1a0ca6b67577a9df79" Jan 20 17:43:49 crc kubenswrapper[4558]: I0120 17:43:49.203556 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-5676986846-92lbc"] Jan 20 17:43:49 crc kubenswrapper[4558]: I0120 17:43:49.215158 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/neutron-5676986846-92lbc"] Jan 20 17:43:49 crc kubenswrapper[4558]: I0120 17:43:49.219667 4558 scope.go:117] "RemoveContainer" containerID="99d260241a6e2248384349724b544d3292e8cbd5470d0de4208392622b91d851" Jan 20 17:43:49 crc kubenswrapper[4558]: E0120 17:43:49.220135 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"99d260241a6e2248384349724b544d3292e8cbd5470d0de4208392622b91d851\": container with ID starting with 99d260241a6e2248384349724b544d3292e8cbd5470d0de4208392622b91d851 not found: ID does not exist" containerID="99d260241a6e2248384349724b544d3292e8cbd5470d0de4208392622b91d851" Jan 20 17:43:49 crc kubenswrapper[4558]: I0120 17:43:49.220252 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"99d260241a6e2248384349724b544d3292e8cbd5470d0de4208392622b91d851"} err="failed to get container status \"99d260241a6e2248384349724b544d3292e8cbd5470d0de4208392622b91d851\": rpc error: code = NotFound desc = could not find container \"99d260241a6e2248384349724b544d3292e8cbd5470d0de4208392622b91d851\": container with ID starting with 99d260241a6e2248384349724b544d3292e8cbd5470d0de4208392622b91d851 not found: ID does not exist" Jan 20 17:43:49 crc kubenswrapper[4558]: I0120 17:43:49.220328 4558 scope.go:117] "RemoveContainer" containerID="418895b26e5117800f2a53dae040b43bb1b03510640c0a1a0ca6b67577a9df79" Jan 20 17:43:49 crc kubenswrapper[4558]: I0120 17:43:49.220330 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-685599669b-nv6cc"] Jan 20 17:43:49 crc kubenswrapper[4558]: E0120 17:43:49.220803 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"418895b26e5117800f2a53dae040b43bb1b03510640c0a1a0ca6b67577a9df79\": container with ID starting with 418895b26e5117800f2a53dae040b43bb1b03510640c0a1a0ca6b67577a9df79 not found: ID does not exist" containerID="418895b26e5117800f2a53dae040b43bb1b03510640c0a1a0ca6b67577a9df79" Jan 20 17:43:49 crc kubenswrapper[4558]: I0120 17:43:49.220882 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"418895b26e5117800f2a53dae040b43bb1b03510640c0a1a0ca6b67577a9df79"} err="failed to get container status \"418895b26e5117800f2a53dae040b43bb1b03510640c0a1a0ca6b67577a9df79\": rpc error: code = NotFound desc = could not find container \"418895b26e5117800f2a53dae040b43bb1b03510640c0a1a0ca6b67577a9df79\": container with ID starting with 418895b26e5117800f2a53dae040b43bb1b03510640c0a1a0ca6b67577a9df79 not found: ID does not exist" Jan 20 17:43:49 crc kubenswrapper[4558]: I0120 17:43:49.220961 4558 scope.go:117] "RemoveContainer" containerID="d3f01f262533ef464619db0861fecd0a8366a9c544f5d180d20689cd0e292a37" Jan 20 17:43:49 crc kubenswrapper[4558]: I0120 17:43:49.227249 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/keystone-685599669b-nv6cc"] Jan 20 17:43:49 crc kubenswrapper[4558]: I0120 17:43:49.241849 4558 scope.go:117] "RemoveContainer" containerID="d3f01f262533ef464619db0861fecd0a8366a9c544f5d180d20689cd0e292a37" Jan 20 17:43:49 crc kubenswrapper[4558]: E0120 17:43:49.242465 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d3f01f262533ef464619db0861fecd0a8366a9c544f5d180d20689cd0e292a37\": container with ID starting with d3f01f262533ef464619db0861fecd0a8366a9c544f5d180d20689cd0e292a37 not found: ID does not exist" containerID="d3f01f262533ef464619db0861fecd0a8366a9c544f5d180d20689cd0e292a37" Jan 20 17:43:49 crc kubenswrapper[4558]: I0120 17:43:49.242516 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d3f01f262533ef464619db0861fecd0a8366a9c544f5d180d20689cd0e292a37"} err="failed to get container status \"d3f01f262533ef464619db0861fecd0a8366a9c544f5d180d20689cd0e292a37\": rpc error: code = NotFound desc = could not find container \"d3f01f262533ef464619db0861fecd0a8366a9c544f5d180d20689cd0e292a37\": container with ID starting with d3f01f262533ef464619db0861fecd0a8366a9c544f5d180d20689cd0e292a37 not found: ID does not exist" Jan 20 17:43:49 crc kubenswrapper[4558]: I0120 17:43:49.389345 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/cinder-scheduler-0" podUID="7faabe16-9de5-49dc-bab6-44e173f4403c" containerName="cinder-scheduler" probeResult="failure" output="Get \"http://10.217.1.38:8080/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 20 17:43:49 crc kubenswrapper[4558]: I0120 17:43:49.887508 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/placement-976c48458-wzswq"] Jan 20 17:43:49 crc kubenswrapper[4558]: I0120 17:43:49.888225 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/placement-976c48458-wzswq" podUID="b6ba5f2a-6486-4a84-bf85-23c00e907701" containerName="placement-log" containerID="cri-o://793b7e60b027092ef8139183c3e6dd4b54469caf6cf02f94955f48fe3bbfbd9f" gracePeriod=30 Jan 20 17:43:49 crc kubenswrapper[4558]: I0120 17:43:49.888335 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/placement-976c48458-wzswq" podUID="b6ba5f2a-6486-4a84-bf85-23c00e907701" containerName="placement-api" containerID="cri-o://76ef1a400330d7e58be6b4ac9281c05593aa3360863f0b8430f1369f13006b6c" gracePeriod=30 Jan 20 17:43:49 crc kubenswrapper[4558]: I0120 17:43:49.914815 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/placement-976c48458-wzswq" podUID="b6ba5f2a-6486-4a84-bf85-23c00e907701" containerName="placement-log" probeResult="failure" output="Get \"https://10.217.1.29:8778/\": EOF" Jan 20 17:43:49 crc kubenswrapper[4558]: I0120 17:43:49.915462 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/placement-976c48458-wzswq" podUID="b6ba5f2a-6486-4a84-bf85-23c00e907701" containerName="placement-api" probeResult="failure" output="Get \"https://10.217.1.29:8778/\": EOF" Jan 20 17:43:49 crc kubenswrapper[4558]: I0120 17:43:49.924248 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/placement-7f9f9c9d78-s5gpm"] Jan 20 17:43:49 crc kubenswrapper[4558]: E0120 17:43:49.924838 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="33f7229d-bcae-434a-805e-530801e79b20" containerName="keystone-api" Jan 20 17:43:49 crc kubenswrapper[4558]: I0120 17:43:49.924860 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="33f7229d-bcae-434a-805e-530801e79b20" containerName="keystone-api" Jan 20 17:43:49 crc kubenswrapper[4558]: E0120 17:43:49.924872 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8877ab44-638b-4880-97fb-305726a5c1a6" containerName="neutron-api" Jan 20 17:43:49 crc kubenswrapper[4558]: I0120 17:43:49.924879 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="8877ab44-638b-4880-97fb-305726a5c1a6" containerName="neutron-api" Jan 20 17:43:49 crc kubenswrapper[4558]: E0120 17:43:49.924898 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8877ab44-638b-4880-97fb-305726a5c1a6" containerName="neutron-httpd" Jan 20 17:43:49 crc kubenswrapper[4558]: I0120 17:43:49.924906 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="8877ab44-638b-4880-97fb-305726a5c1a6" containerName="neutron-httpd" Jan 20 17:43:49 crc kubenswrapper[4558]: I0120 17:43:49.925189 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="33f7229d-bcae-434a-805e-530801e79b20" containerName="keystone-api" Jan 20 17:43:49 crc kubenswrapper[4558]: I0120 17:43:49.925210 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="8877ab44-638b-4880-97fb-305726a5c1a6" containerName="neutron-httpd" Jan 20 17:43:49 crc kubenswrapper[4558]: I0120 17:43:49.925228 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="8877ab44-638b-4880-97fb-305726a5c1a6" containerName="neutron-api" Jan 20 17:43:49 crc kubenswrapper[4558]: I0120 17:43:49.926433 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-7f9f9c9d78-s5gpm" Jan 20 17:43:49 crc kubenswrapper[4558]: I0120 17:43:49.942154 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-7f9f9c9d78-s5gpm"] Jan 20 17:43:49 crc kubenswrapper[4558]: I0120 17:43:49.984450 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-api-0" podUID="4ea608e7-9a19-47fb-8e14-a629451e7c03" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.1.34:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 20 17:43:49 crc kubenswrapper[4558]: I0120 17:43:49.984455 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-api-0" podUID="4ea608e7-9a19-47fb-8e14-a629451e7c03" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.1.34:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 20 17:43:49 crc kubenswrapper[4558]: I0120 17:43:49.995151 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3872f3be-7968-4412-8140-c8251291a7ed-scripts\") pod \"placement-7f9f9c9d78-s5gpm\" (UID: \"3872f3be-7968-4412-8140-c8251291a7ed\") " pod="openstack-kuttl-tests/placement-7f9f9c9d78-s5gpm" Jan 20 17:43:49 crc kubenswrapper[4558]: I0120 17:43:49.995459 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3872f3be-7968-4412-8140-c8251291a7ed-combined-ca-bundle\") pod \"placement-7f9f9c9d78-s5gpm\" (UID: \"3872f3be-7968-4412-8140-c8251291a7ed\") " pod="openstack-kuttl-tests/placement-7f9f9c9d78-s5gpm" Jan 20 17:43:49 crc kubenswrapper[4558]: I0120 17:43:49.995604 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3872f3be-7968-4412-8140-c8251291a7ed-internal-tls-certs\") pod \"placement-7f9f9c9d78-s5gpm\" (UID: \"3872f3be-7968-4412-8140-c8251291a7ed\") " pod="openstack-kuttl-tests/placement-7f9f9c9d78-s5gpm" Jan 20 17:43:49 crc kubenswrapper[4558]: I0120 17:43:49.995751 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3872f3be-7968-4412-8140-c8251291a7ed-public-tls-certs\") pod \"placement-7f9f9c9d78-s5gpm\" (UID: \"3872f3be-7968-4412-8140-c8251291a7ed\") " pod="openstack-kuttl-tests/placement-7f9f9c9d78-s5gpm" Jan 20 17:43:49 crc kubenswrapper[4558]: I0120 17:43:49.995954 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3872f3be-7968-4412-8140-c8251291a7ed-logs\") pod \"placement-7f9f9c9d78-s5gpm\" (UID: \"3872f3be-7968-4412-8140-c8251291a7ed\") " pod="openstack-kuttl-tests/placement-7f9f9c9d78-s5gpm" Jan 20 17:43:49 crc kubenswrapper[4558]: I0120 17:43:49.996256 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3872f3be-7968-4412-8140-c8251291a7ed-config-data\") pod \"placement-7f9f9c9d78-s5gpm\" (UID: \"3872f3be-7968-4412-8140-c8251291a7ed\") " pod="openstack-kuttl-tests/placement-7f9f9c9d78-s5gpm" Jan 20 17:43:49 crc kubenswrapper[4558]: I0120 17:43:49.996371 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dxkkj\" (UniqueName: \"kubernetes.io/projected/3872f3be-7968-4412-8140-c8251291a7ed-kube-api-access-dxkkj\") pod \"placement-7f9f9c9d78-s5gpm\" (UID: \"3872f3be-7968-4412-8140-c8251291a7ed\") " pod="openstack-kuttl-tests/placement-7f9f9c9d78-s5gpm" Jan 20 17:43:50 crc kubenswrapper[4558]: I0120 17:43:50.098135 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3872f3be-7968-4412-8140-c8251291a7ed-logs\") pod \"placement-7f9f9c9d78-s5gpm\" (UID: \"3872f3be-7968-4412-8140-c8251291a7ed\") " pod="openstack-kuttl-tests/placement-7f9f9c9d78-s5gpm" Jan 20 17:43:50 crc kubenswrapper[4558]: I0120 17:43:50.098289 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3872f3be-7968-4412-8140-c8251291a7ed-config-data\") pod \"placement-7f9f9c9d78-s5gpm\" (UID: \"3872f3be-7968-4412-8140-c8251291a7ed\") " pod="openstack-kuttl-tests/placement-7f9f9c9d78-s5gpm" Jan 20 17:43:50 crc kubenswrapper[4558]: I0120 17:43:50.098323 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dxkkj\" (UniqueName: \"kubernetes.io/projected/3872f3be-7968-4412-8140-c8251291a7ed-kube-api-access-dxkkj\") pod \"placement-7f9f9c9d78-s5gpm\" (UID: \"3872f3be-7968-4412-8140-c8251291a7ed\") " pod="openstack-kuttl-tests/placement-7f9f9c9d78-s5gpm" Jan 20 17:43:50 crc kubenswrapper[4558]: I0120 17:43:50.098937 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3872f3be-7968-4412-8140-c8251291a7ed-logs\") pod \"placement-7f9f9c9d78-s5gpm\" (UID: \"3872f3be-7968-4412-8140-c8251291a7ed\") " pod="openstack-kuttl-tests/placement-7f9f9c9d78-s5gpm" Jan 20 17:43:50 crc kubenswrapper[4558]: I0120 17:43:50.099587 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3872f3be-7968-4412-8140-c8251291a7ed-scripts\") pod \"placement-7f9f9c9d78-s5gpm\" (UID: \"3872f3be-7968-4412-8140-c8251291a7ed\") " pod="openstack-kuttl-tests/placement-7f9f9c9d78-s5gpm" Jan 20 17:43:50 crc kubenswrapper[4558]: I0120 17:43:50.099786 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3872f3be-7968-4412-8140-c8251291a7ed-combined-ca-bundle\") pod \"placement-7f9f9c9d78-s5gpm\" (UID: \"3872f3be-7968-4412-8140-c8251291a7ed\") " pod="openstack-kuttl-tests/placement-7f9f9c9d78-s5gpm" Jan 20 17:43:50 crc kubenswrapper[4558]: I0120 17:43:50.099828 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3872f3be-7968-4412-8140-c8251291a7ed-internal-tls-certs\") pod \"placement-7f9f9c9d78-s5gpm\" (UID: \"3872f3be-7968-4412-8140-c8251291a7ed\") " pod="openstack-kuttl-tests/placement-7f9f9c9d78-s5gpm" Jan 20 17:43:50 crc kubenswrapper[4558]: I0120 17:43:50.099854 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3872f3be-7968-4412-8140-c8251291a7ed-public-tls-certs\") pod \"placement-7f9f9c9d78-s5gpm\" (UID: \"3872f3be-7968-4412-8140-c8251291a7ed\") " pod="openstack-kuttl-tests/placement-7f9f9c9d78-s5gpm" Jan 20 17:43:50 crc kubenswrapper[4558]: I0120 17:43:50.104090 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3872f3be-7968-4412-8140-c8251291a7ed-public-tls-certs\") pod \"placement-7f9f9c9d78-s5gpm\" (UID: \"3872f3be-7968-4412-8140-c8251291a7ed\") " pod="openstack-kuttl-tests/placement-7f9f9c9d78-s5gpm" Jan 20 17:43:50 crc kubenswrapper[4558]: I0120 17:43:50.104238 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3872f3be-7968-4412-8140-c8251291a7ed-combined-ca-bundle\") pod \"placement-7f9f9c9d78-s5gpm\" (UID: \"3872f3be-7968-4412-8140-c8251291a7ed\") " pod="openstack-kuttl-tests/placement-7f9f9c9d78-s5gpm" Jan 20 17:43:50 crc kubenswrapper[4558]: I0120 17:43:50.107433 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3872f3be-7968-4412-8140-c8251291a7ed-config-data\") pod \"placement-7f9f9c9d78-s5gpm\" (UID: \"3872f3be-7968-4412-8140-c8251291a7ed\") " pod="openstack-kuttl-tests/placement-7f9f9c9d78-s5gpm" Jan 20 17:43:50 crc kubenswrapper[4558]: I0120 17:43:50.108553 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3872f3be-7968-4412-8140-c8251291a7ed-internal-tls-certs\") pod \"placement-7f9f9c9d78-s5gpm\" (UID: \"3872f3be-7968-4412-8140-c8251291a7ed\") " pod="openstack-kuttl-tests/placement-7f9f9c9d78-s5gpm" Jan 20 17:43:50 crc kubenswrapper[4558]: I0120 17:43:50.110969 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3872f3be-7968-4412-8140-c8251291a7ed-scripts\") pod \"placement-7f9f9c9d78-s5gpm\" (UID: \"3872f3be-7968-4412-8140-c8251291a7ed\") " pod="openstack-kuttl-tests/placement-7f9f9c9d78-s5gpm" Jan 20 17:43:50 crc kubenswrapper[4558]: I0120 17:43:50.119378 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dxkkj\" (UniqueName: \"kubernetes.io/projected/3872f3be-7968-4412-8140-c8251291a7ed-kube-api-access-dxkkj\") pod \"placement-7f9f9c9d78-s5gpm\" (UID: \"3872f3be-7968-4412-8140-c8251291a7ed\") " pod="openstack-kuttl-tests/placement-7f9f9c9d78-s5gpm" Jan 20 17:43:50 crc kubenswrapper[4558]: I0120 17:43:50.136855 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"046d6bef-b034-4093-9a77-f23074beaf20","Type":"ContainerStarted","Data":"0341e4fb184d688b59bd82831888ea8b9d6cff92bd2c69cad322485ad1a47834"} Jan 20 17:43:50 crc kubenswrapper[4558]: I0120 17:43:50.139547 4558 generic.go:334] "Generic (PLEG): container finished" podID="b6ba5f2a-6486-4a84-bf85-23c00e907701" containerID="793b7e60b027092ef8139183c3e6dd4b54469caf6cf02f94955f48fe3bbfbd9f" exitCode=143 Jan 20 17:43:50 crc kubenswrapper[4558]: I0120 17:43:50.139609 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-976c48458-wzswq" event={"ID":"b6ba5f2a-6486-4a84-bf85-23c00e907701","Type":"ContainerDied","Data":"793b7e60b027092ef8139183c3e6dd4b54469caf6cf02f94955f48fe3bbfbd9f"} Jan 20 17:43:50 crc kubenswrapper[4558]: I0120 17:43:50.151840 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:43:50 crc kubenswrapper[4558]: I0120 17:43:50.264297 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-7f9f9c9d78-s5gpm" Jan 20 17:43:50 crc kubenswrapper[4558]: I0120 17:43:50.571768 4558 scope.go:117] "RemoveContainer" containerID="84973fad4d843f75c78030000ec0fa0ce538b6a078241540dc44a79641a6a858" Jan 20 17:43:50 crc kubenswrapper[4558]: I0120 17:43:50.575317 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="33f7229d-bcae-434a-805e-530801e79b20" path="/var/lib/kubelet/pods/33f7229d-bcae-434a-805e-530801e79b20/volumes" Jan 20 17:43:50 crc kubenswrapper[4558]: I0120 17:43:50.575854 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8877ab44-638b-4880-97fb-305726a5c1a6" path="/var/lib/kubelet/pods/8877ab44-638b-4880-97fb-305726a5c1a6/volumes" Jan 20 17:43:50 crc kubenswrapper[4558]: W0120 17:43:50.699619 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3872f3be_7968_4412_8140_c8251291a7ed.slice/crio-ad8a009f4c19c959ce255125d1526b193f8f6d30dc44f751cdceb0ed7b0ec4de WatchSource:0}: Error finding container ad8a009f4c19c959ce255125d1526b193f8f6d30dc44f751cdceb0ed7b0ec4de: Status 404 returned error can't find the container with id ad8a009f4c19c959ce255125d1526b193f8f6d30dc44f751cdceb0ed7b0ec4de Jan 20 17:43:50 crc kubenswrapper[4558]: I0120 17:43:50.701343 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-7f9f9c9d78-s5gpm"] Jan 20 17:43:50 crc kubenswrapper[4558]: I0120 17:43:50.711104 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/placement-7f9f9c9d78-s5gpm"] Jan 20 17:43:50 crc kubenswrapper[4558]: I0120 17:43:50.723609 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/placement-5b5f5cd6d-6rk7z"] Jan 20 17:43:50 crc kubenswrapper[4558]: I0120 17:43:50.725107 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-5b5f5cd6d-6rk7z" Jan 20 17:43:50 crc kubenswrapper[4558]: I0120 17:43:50.739002 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-metadata-0" podUID="572d7a53-9a17-43e3-bc12-a04f994eb857" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.1.43:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 20 17:43:50 crc kubenswrapper[4558]: I0120 17:43:50.739410 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-metadata-0" podUID="572d7a53-9a17-43e3-bc12-a04f994eb857" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.1.43:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 20 17:43:50 crc kubenswrapper[4558]: I0120 17:43:50.745122 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-5b5f5cd6d-6rk7z"] Jan 20 17:43:50 crc kubenswrapper[4558]: I0120 17:43:50.814212 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f856q\" (UniqueName: \"kubernetes.io/projected/50804fa8-d09d-49f8-a143-d7ec24ec542a-kube-api-access-f856q\") pod \"placement-5b5f5cd6d-6rk7z\" (UID: \"50804fa8-d09d-49f8-a143-d7ec24ec542a\") " pod="openstack-kuttl-tests/placement-5b5f5cd6d-6rk7z" Jan 20 17:43:50 crc kubenswrapper[4558]: I0120 17:43:50.814293 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/50804fa8-d09d-49f8-a143-d7ec24ec542a-scripts\") pod \"placement-5b5f5cd6d-6rk7z\" (UID: \"50804fa8-d09d-49f8-a143-d7ec24ec542a\") " pod="openstack-kuttl-tests/placement-5b5f5cd6d-6rk7z" Jan 20 17:43:50 crc kubenswrapper[4558]: I0120 17:43:50.814422 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/50804fa8-d09d-49f8-a143-d7ec24ec542a-logs\") pod \"placement-5b5f5cd6d-6rk7z\" (UID: \"50804fa8-d09d-49f8-a143-d7ec24ec542a\") " pod="openstack-kuttl-tests/placement-5b5f5cd6d-6rk7z" Jan 20 17:43:50 crc kubenswrapper[4558]: I0120 17:43:50.814447 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50804fa8-d09d-49f8-a143-d7ec24ec542a-combined-ca-bundle\") pod \"placement-5b5f5cd6d-6rk7z\" (UID: \"50804fa8-d09d-49f8-a143-d7ec24ec542a\") " pod="openstack-kuttl-tests/placement-5b5f5cd6d-6rk7z" Jan 20 17:43:50 crc kubenswrapper[4558]: I0120 17:43:50.814496 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/50804fa8-d09d-49f8-a143-d7ec24ec542a-internal-tls-certs\") pod \"placement-5b5f5cd6d-6rk7z\" (UID: \"50804fa8-d09d-49f8-a143-d7ec24ec542a\") " pod="openstack-kuttl-tests/placement-5b5f5cd6d-6rk7z" Jan 20 17:43:50 crc kubenswrapper[4558]: I0120 17:43:50.814586 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/50804fa8-d09d-49f8-a143-d7ec24ec542a-public-tls-certs\") pod \"placement-5b5f5cd6d-6rk7z\" (UID: \"50804fa8-d09d-49f8-a143-d7ec24ec542a\") " pod="openstack-kuttl-tests/placement-5b5f5cd6d-6rk7z" Jan 20 17:43:50 crc kubenswrapper[4558]: I0120 17:43:50.814627 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/50804fa8-d09d-49f8-a143-d7ec24ec542a-config-data\") pod \"placement-5b5f5cd6d-6rk7z\" (UID: \"50804fa8-d09d-49f8-a143-d7ec24ec542a\") " pod="openstack-kuttl-tests/placement-5b5f5cd6d-6rk7z" Jan 20 17:43:50 crc kubenswrapper[4558]: I0120 17:43:50.917057 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/50804fa8-d09d-49f8-a143-d7ec24ec542a-public-tls-certs\") pod \"placement-5b5f5cd6d-6rk7z\" (UID: \"50804fa8-d09d-49f8-a143-d7ec24ec542a\") " pod="openstack-kuttl-tests/placement-5b5f5cd6d-6rk7z" Jan 20 17:43:50 crc kubenswrapper[4558]: I0120 17:43:50.917337 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/50804fa8-d09d-49f8-a143-d7ec24ec542a-config-data\") pod \"placement-5b5f5cd6d-6rk7z\" (UID: \"50804fa8-d09d-49f8-a143-d7ec24ec542a\") " pod="openstack-kuttl-tests/placement-5b5f5cd6d-6rk7z" Jan 20 17:43:50 crc kubenswrapper[4558]: I0120 17:43:50.917518 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f856q\" (UniqueName: \"kubernetes.io/projected/50804fa8-d09d-49f8-a143-d7ec24ec542a-kube-api-access-f856q\") pod \"placement-5b5f5cd6d-6rk7z\" (UID: \"50804fa8-d09d-49f8-a143-d7ec24ec542a\") " pod="openstack-kuttl-tests/placement-5b5f5cd6d-6rk7z" Jan 20 17:43:50 crc kubenswrapper[4558]: I0120 17:43:50.917647 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/50804fa8-d09d-49f8-a143-d7ec24ec542a-scripts\") pod \"placement-5b5f5cd6d-6rk7z\" (UID: \"50804fa8-d09d-49f8-a143-d7ec24ec542a\") " pod="openstack-kuttl-tests/placement-5b5f5cd6d-6rk7z" Jan 20 17:43:50 crc kubenswrapper[4558]: I0120 17:43:50.917860 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/50804fa8-d09d-49f8-a143-d7ec24ec542a-logs\") pod \"placement-5b5f5cd6d-6rk7z\" (UID: \"50804fa8-d09d-49f8-a143-d7ec24ec542a\") " pod="openstack-kuttl-tests/placement-5b5f5cd6d-6rk7z" Jan 20 17:43:50 crc kubenswrapper[4558]: I0120 17:43:50.917942 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50804fa8-d09d-49f8-a143-d7ec24ec542a-combined-ca-bundle\") pod \"placement-5b5f5cd6d-6rk7z\" (UID: \"50804fa8-d09d-49f8-a143-d7ec24ec542a\") " pod="openstack-kuttl-tests/placement-5b5f5cd6d-6rk7z" Jan 20 17:43:50 crc kubenswrapper[4558]: I0120 17:43:50.918052 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/50804fa8-d09d-49f8-a143-d7ec24ec542a-internal-tls-certs\") pod \"placement-5b5f5cd6d-6rk7z\" (UID: \"50804fa8-d09d-49f8-a143-d7ec24ec542a\") " pod="openstack-kuttl-tests/placement-5b5f5cd6d-6rk7z" Jan 20 17:43:50 crc kubenswrapper[4558]: I0120 17:43:50.918326 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/50804fa8-d09d-49f8-a143-d7ec24ec542a-logs\") pod \"placement-5b5f5cd6d-6rk7z\" (UID: \"50804fa8-d09d-49f8-a143-d7ec24ec542a\") " pod="openstack-kuttl-tests/placement-5b5f5cd6d-6rk7z" Jan 20 17:43:50 crc kubenswrapper[4558]: I0120 17:43:50.921644 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/50804fa8-d09d-49f8-a143-d7ec24ec542a-internal-tls-certs\") pod \"placement-5b5f5cd6d-6rk7z\" (UID: \"50804fa8-d09d-49f8-a143-d7ec24ec542a\") " pod="openstack-kuttl-tests/placement-5b5f5cd6d-6rk7z" Jan 20 17:43:50 crc kubenswrapper[4558]: I0120 17:43:50.922312 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/50804fa8-d09d-49f8-a143-d7ec24ec542a-public-tls-certs\") pod \"placement-5b5f5cd6d-6rk7z\" (UID: \"50804fa8-d09d-49f8-a143-d7ec24ec542a\") " pod="openstack-kuttl-tests/placement-5b5f5cd6d-6rk7z" Jan 20 17:43:50 crc kubenswrapper[4558]: I0120 17:43:50.923891 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50804fa8-d09d-49f8-a143-d7ec24ec542a-combined-ca-bundle\") pod \"placement-5b5f5cd6d-6rk7z\" (UID: \"50804fa8-d09d-49f8-a143-d7ec24ec542a\") " pod="openstack-kuttl-tests/placement-5b5f5cd6d-6rk7z" Jan 20 17:43:50 crc kubenswrapper[4558]: I0120 17:43:50.925246 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/50804fa8-d09d-49f8-a143-d7ec24ec542a-scripts\") pod \"placement-5b5f5cd6d-6rk7z\" (UID: \"50804fa8-d09d-49f8-a143-d7ec24ec542a\") " pod="openstack-kuttl-tests/placement-5b5f5cd6d-6rk7z" Jan 20 17:43:50 crc kubenswrapper[4558]: I0120 17:43:50.934642 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f856q\" (UniqueName: \"kubernetes.io/projected/50804fa8-d09d-49f8-a143-d7ec24ec542a-kube-api-access-f856q\") pod \"placement-5b5f5cd6d-6rk7z\" (UID: \"50804fa8-d09d-49f8-a143-d7ec24ec542a\") " pod="openstack-kuttl-tests/placement-5b5f5cd6d-6rk7z" Jan 20 17:43:50 crc kubenswrapper[4558]: I0120 17:43:50.935394 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/50804fa8-d09d-49f8-a143-d7ec24ec542a-config-data\") pod \"placement-5b5f5cd6d-6rk7z\" (UID: \"50804fa8-d09d-49f8-a143-d7ec24ec542a\") " pod="openstack-kuttl-tests/placement-5b5f5cd6d-6rk7z" Jan 20 17:43:51 crc kubenswrapper[4558]: I0120 17:43:51.077800 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-5b5f5cd6d-6rk7z" Jan 20 17:43:51 crc kubenswrapper[4558]: I0120 17:43:51.173317 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-7f9f9c9d78-s5gpm" event={"ID":"3872f3be-7968-4412-8140-c8251291a7ed","Type":"ContainerStarted","Data":"4e45332938c1fc9ba8d1484b89f589b471179ae20f172f3c0d3e3c976ac49898"} Jan 20 17:43:51 crc kubenswrapper[4558]: I0120 17:43:51.173366 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-7f9f9c9d78-s5gpm" event={"ID":"3872f3be-7968-4412-8140-c8251291a7ed","Type":"ContainerStarted","Data":"ad8a009f4c19c959ce255125d1526b193f8f6d30dc44f751cdceb0ed7b0ec4de"} Jan 20 17:43:51 crc kubenswrapper[4558]: I0120 17:43:51.181299 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:43:51 crc kubenswrapper[4558]: I0120 17:43:51.181330 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:43:51 crc kubenswrapper[4558]: I0120 17:43:51.239628 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-0" event={"ID":"ea72c008-3a66-4577-8042-4b1e0ed1cca6","Type":"ContainerStarted","Data":"f53b402a7de8ccb9267cb06efb993b2c9befd769af338febfa1cab472df02511"} Jan 20 17:43:51 crc kubenswrapper[4558]: I0120 17:43:51.240222 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:43:51 crc kubenswrapper[4558]: I0120 17:43:51.276504 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"046d6bef-b034-4093-9a77-f23074beaf20","Type":"ContainerStarted","Data":"cee97a0e46fb53a7035606e81d6282fb0ba2c0e098ff4e08ca4586445e9122cb"} Jan 20 17:43:51 crc kubenswrapper[4558]: I0120 17:43:51.497046 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:43:51 crc kubenswrapper[4558]: I0120 17:43:51.770289 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-5b5f5cd6d-6rk7z"] Jan 20 17:43:52 crc kubenswrapper[4558]: I0120 17:43:52.231216 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/barbican-api-69995768b6-hpdg2"] Jan 20 17:43:52 crc kubenswrapper[4558]: I0120 17:43:52.233107 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-69995768b6-hpdg2" Jan 20 17:43:52 crc kubenswrapper[4558]: I0120 17:43:52.241599 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-api-69995768b6-hpdg2"] Jan 20 17:43:52 crc kubenswrapper[4558]: I0120 17:43:52.310507 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-7f9f9c9d78-s5gpm" event={"ID":"3872f3be-7968-4412-8140-c8251291a7ed","Type":"ContainerStarted","Data":"5afcab9d11643826a55d42d96c24ef19bea486baf52a014b64095a4ea1de5fc1"} Jan 20 17:43:52 crc kubenswrapper[4558]: I0120 17:43:52.310768 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/placement-7f9f9c9d78-s5gpm" podUID="3872f3be-7968-4412-8140-c8251291a7ed" containerName="placement-log" containerID="cri-o://4e45332938c1fc9ba8d1484b89f589b471179ae20f172f3c0d3e3c976ac49898" gracePeriod=30 Jan 20 17:43:52 crc kubenswrapper[4558]: I0120 17:43:52.310821 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/placement-7f9f9c9d78-s5gpm" Jan 20 17:43:52 crc kubenswrapper[4558]: I0120 17:43:52.310894 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/placement-7f9f9c9d78-s5gpm" podUID="3872f3be-7968-4412-8140-c8251291a7ed" containerName="placement-api" containerID="cri-o://5afcab9d11643826a55d42d96c24ef19bea486baf52a014b64095a4ea1de5fc1" gracePeriod=30 Jan 20 17:43:52 crc kubenswrapper[4558]: I0120 17:43:52.310902 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/placement-7f9f9c9d78-s5gpm" Jan 20 17:43:52 crc kubenswrapper[4558]: I0120 17:43:52.317631 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-5b5f5cd6d-6rk7z" event={"ID":"50804fa8-d09d-49f8-a143-d7ec24ec542a","Type":"ContainerStarted","Data":"15c901b90eae1d8b686fb50b0222bcb667a3a9f6b6a35faa944d0560ae58cf3a"} Jan 20 17:43:52 crc kubenswrapper[4558]: I0120 17:43:52.317666 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-5b5f5cd6d-6rk7z" event={"ID":"50804fa8-d09d-49f8-a143-d7ec24ec542a","Type":"ContainerStarted","Data":"661b71951f10fd7eb454cf2a5617695ac503cf4d432c4017c7980d3e23a068f2"} Jan 20 17:43:52 crc kubenswrapper[4558]: I0120 17:43:52.317678 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-5b5f5cd6d-6rk7z" event={"ID":"50804fa8-d09d-49f8-a143-d7ec24ec542a","Type":"ContainerStarted","Data":"22fbf6e912d7ee94bdcb3da7c63db26b6dd11dfe2bf5d1deadf4c59e1be94239"} Jan 20 17:43:52 crc kubenswrapper[4558]: I0120 17:43:52.317872 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/placement-5b5f5cd6d-6rk7z" Jan 20 17:43:52 crc kubenswrapper[4558]: I0120 17:43:52.317935 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/placement-5b5f5cd6d-6rk7z" Jan 20 17:43:52 crc kubenswrapper[4558]: I0120 17:43:52.345036 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"046d6bef-b034-4093-9a77-f23074beaf20","Type":"ContainerStarted","Data":"d00d835dc1dbc2461d0c91743b94e5fad21abda74168aa06d622f30e4cb362b4"} Jan 20 17:43:52 crc kubenswrapper[4558]: I0120 17:43:52.346353 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:43:52 crc kubenswrapper[4558]: I0120 17:43:52.352652 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a6d1823f-8885-40f1-b85d-5ee22197dc09-public-tls-certs\") pod \"barbican-api-69995768b6-hpdg2\" (UID: \"a6d1823f-8885-40f1-b85d-5ee22197dc09\") " pod="openstack-kuttl-tests/barbican-api-69995768b6-hpdg2" Jan 20 17:43:52 crc kubenswrapper[4558]: I0120 17:43:52.352778 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gnnhz\" (UniqueName: \"kubernetes.io/projected/a6d1823f-8885-40f1-b85d-5ee22197dc09-kube-api-access-gnnhz\") pod \"barbican-api-69995768b6-hpdg2\" (UID: \"a6d1823f-8885-40f1-b85d-5ee22197dc09\") " pod="openstack-kuttl-tests/barbican-api-69995768b6-hpdg2" Jan 20 17:43:52 crc kubenswrapper[4558]: I0120 17:43:52.352834 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a6d1823f-8885-40f1-b85d-5ee22197dc09-logs\") pod \"barbican-api-69995768b6-hpdg2\" (UID: \"a6d1823f-8885-40f1-b85d-5ee22197dc09\") " pod="openstack-kuttl-tests/barbican-api-69995768b6-hpdg2" Jan 20 17:43:52 crc kubenswrapper[4558]: I0120 17:43:52.352866 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a6d1823f-8885-40f1-b85d-5ee22197dc09-config-data-custom\") pod \"barbican-api-69995768b6-hpdg2\" (UID: \"a6d1823f-8885-40f1-b85d-5ee22197dc09\") " pod="openstack-kuttl-tests/barbican-api-69995768b6-hpdg2" Jan 20 17:43:52 crc kubenswrapper[4558]: I0120 17:43:52.352904 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a6d1823f-8885-40f1-b85d-5ee22197dc09-combined-ca-bundle\") pod \"barbican-api-69995768b6-hpdg2\" (UID: \"a6d1823f-8885-40f1-b85d-5ee22197dc09\") " pod="openstack-kuttl-tests/barbican-api-69995768b6-hpdg2" Jan 20 17:43:52 crc kubenswrapper[4558]: I0120 17:43:52.352941 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a6d1823f-8885-40f1-b85d-5ee22197dc09-config-data\") pod \"barbican-api-69995768b6-hpdg2\" (UID: \"a6d1823f-8885-40f1-b85d-5ee22197dc09\") " pod="openstack-kuttl-tests/barbican-api-69995768b6-hpdg2" Jan 20 17:43:52 crc kubenswrapper[4558]: I0120 17:43:52.352962 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a6d1823f-8885-40f1-b85d-5ee22197dc09-internal-tls-certs\") pod \"barbican-api-69995768b6-hpdg2\" (UID: \"a6d1823f-8885-40f1-b85d-5ee22197dc09\") " pod="openstack-kuttl-tests/barbican-api-69995768b6-hpdg2" Jan 20 17:43:52 crc kubenswrapper[4558]: I0120 17:43:52.367882 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/placement-7f9f9c9d78-s5gpm" podStartSLOduration=3.367863324 podStartE2EDuration="3.367863324s" podCreationTimestamp="2026-01-20 17:43:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:43:52.344885832 +0000 UTC m=+3726.105223798" watchObservedRunningTime="2026-01-20 17:43:52.367863324 +0000 UTC m=+3726.128201281" Jan 20 17:43:52 crc kubenswrapper[4558]: I0120 17:43:52.384812 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/placement-5b5f5cd6d-6rk7z" podStartSLOduration=2.384794479 podStartE2EDuration="2.384794479s" podCreationTimestamp="2026-01-20 17:43:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:43:52.370994271 +0000 UTC m=+3726.131332238" watchObservedRunningTime="2026-01-20 17:43:52.384794479 +0000 UTC m=+3726.145132446" Jan 20 17:43:52 crc kubenswrapper[4558]: I0120 17:43:52.395107 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ceilometer-0" podStartSLOduration=1.913505765 podStartE2EDuration="5.39509159s" podCreationTimestamp="2026-01-20 17:43:47 +0000 UTC" firstStartedPulling="2026-01-20 17:43:48.223307217 +0000 UTC m=+3721.983645184" lastFinishedPulling="2026-01-20 17:43:51.704893042 +0000 UTC m=+3725.465231009" observedRunningTime="2026-01-20 17:43:52.392636955 +0000 UTC m=+3726.152974922" watchObservedRunningTime="2026-01-20 17:43:52.39509159 +0000 UTC m=+3726.155429557" Jan 20 17:43:52 crc kubenswrapper[4558]: I0120 17:43:52.454596 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gnnhz\" (UniqueName: \"kubernetes.io/projected/a6d1823f-8885-40f1-b85d-5ee22197dc09-kube-api-access-gnnhz\") pod \"barbican-api-69995768b6-hpdg2\" (UID: \"a6d1823f-8885-40f1-b85d-5ee22197dc09\") " pod="openstack-kuttl-tests/barbican-api-69995768b6-hpdg2" Jan 20 17:43:52 crc kubenswrapper[4558]: I0120 17:43:52.454712 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a6d1823f-8885-40f1-b85d-5ee22197dc09-logs\") pod \"barbican-api-69995768b6-hpdg2\" (UID: \"a6d1823f-8885-40f1-b85d-5ee22197dc09\") " pod="openstack-kuttl-tests/barbican-api-69995768b6-hpdg2" Jan 20 17:43:52 crc kubenswrapper[4558]: I0120 17:43:52.454756 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a6d1823f-8885-40f1-b85d-5ee22197dc09-config-data-custom\") pod \"barbican-api-69995768b6-hpdg2\" (UID: \"a6d1823f-8885-40f1-b85d-5ee22197dc09\") " pod="openstack-kuttl-tests/barbican-api-69995768b6-hpdg2" Jan 20 17:43:52 crc kubenswrapper[4558]: I0120 17:43:52.454802 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a6d1823f-8885-40f1-b85d-5ee22197dc09-combined-ca-bundle\") pod \"barbican-api-69995768b6-hpdg2\" (UID: \"a6d1823f-8885-40f1-b85d-5ee22197dc09\") " pod="openstack-kuttl-tests/barbican-api-69995768b6-hpdg2" Jan 20 17:43:52 crc kubenswrapper[4558]: I0120 17:43:52.454856 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a6d1823f-8885-40f1-b85d-5ee22197dc09-config-data\") pod \"barbican-api-69995768b6-hpdg2\" (UID: \"a6d1823f-8885-40f1-b85d-5ee22197dc09\") " pod="openstack-kuttl-tests/barbican-api-69995768b6-hpdg2" Jan 20 17:43:52 crc kubenswrapper[4558]: I0120 17:43:52.454896 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a6d1823f-8885-40f1-b85d-5ee22197dc09-internal-tls-certs\") pod \"barbican-api-69995768b6-hpdg2\" (UID: \"a6d1823f-8885-40f1-b85d-5ee22197dc09\") " pod="openstack-kuttl-tests/barbican-api-69995768b6-hpdg2" Jan 20 17:43:52 crc kubenswrapper[4558]: I0120 17:43:52.455029 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a6d1823f-8885-40f1-b85d-5ee22197dc09-public-tls-certs\") pod \"barbican-api-69995768b6-hpdg2\" (UID: \"a6d1823f-8885-40f1-b85d-5ee22197dc09\") " pod="openstack-kuttl-tests/barbican-api-69995768b6-hpdg2" Jan 20 17:43:52 crc kubenswrapper[4558]: I0120 17:43:52.460760 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a6d1823f-8885-40f1-b85d-5ee22197dc09-logs\") pod \"barbican-api-69995768b6-hpdg2\" (UID: \"a6d1823f-8885-40f1-b85d-5ee22197dc09\") " pod="openstack-kuttl-tests/barbican-api-69995768b6-hpdg2" Jan 20 17:43:52 crc kubenswrapper[4558]: I0120 17:43:52.460868 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a6d1823f-8885-40f1-b85d-5ee22197dc09-internal-tls-certs\") pod \"barbican-api-69995768b6-hpdg2\" (UID: \"a6d1823f-8885-40f1-b85d-5ee22197dc09\") " pod="openstack-kuttl-tests/barbican-api-69995768b6-hpdg2" Jan 20 17:43:52 crc kubenswrapper[4558]: I0120 17:43:52.461801 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a6d1823f-8885-40f1-b85d-5ee22197dc09-public-tls-certs\") pod \"barbican-api-69995768b6-hpdg2\" (UID: \"a6d1823f-8885-40f1-b85d-5ee22197dc09\") " pod="openstack-kuttl-tests/barbican-api-69995768b6-hpdg2" Jan 20 17:43:52 crc kubenswrapper[4558]: I0120 17:43:52.462632 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a6d1823f-8885-40f1-b85d-5ee22197dc09-config-data\") pod \"barbican-api-69995768b6-hpdg2\" (UID: \"a6d1823f-8885-40f1-b85d-5ee22197dc09\") " pod="openstack-kuttl-tests/barbican-api-69995768b6-hpdg2" Jan 20 17:43:52 crc kubenswrapper[4558]: I0120 17:43:52.467431 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a6d1823f-8885-40f1-b85d-5ee22197dc09-config-data-custom\") pod \"barbican-api-69995768b6-hpdg2\" (UID: \"a6d1823f-8885-40f1-b85d-5ee22197dc09\") " pod="openstack-kuttl-tests/barbican-api-69995768b6-hpdg2" Jan 20 17:43:52 crc kubenswrapper[4558]: I0120 17:43:52.469407 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a6d1823f-8885-40f1-b85d-5ee22197dc09-combined-ca-bundle\") pod \"barbican-api-69995768b6-hpdg2\" (UID: \"a6d1823f-8885-40f1-b85d-5ee22197dc09\") " pod="openstack-kuttl-tests/barbican-api-69995768b6-hpdg2" Jan 20 17:43:52 crc kubenswrapper[4558]: I0120 17:43:52.479027 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gnnhz\" (UniqueName: \"kubernetes.io/projected/a6d1823f-8885-40f1-b85d-5ee22197dc09-kube-api-access-gnnhz\") pod \"barbican-api-69995768b6-hpdg2\" (UID: \"a6d1823f-8885-40f1-b85d-5ee22197dc09\") " pod="openstack-kuttl-tests/barbican-api-69995768b6-hpdg2" Jan 20 17:43:52 crc kubenswrapper[4558]: I0120 17:43:52.483662 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:43:52 crc kubenswrapper[4558]: I0120 17:43:52.549860 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-69995768b6-hpdg2" Jan 20 17:43:52 crc kubenswrapper[4558]: I0120 17:43:52.814628 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/cinder-api-0" podUID="56d39f72-48d3-4690-a20a-099cb41daa7e" containerName="cinder-api" probeResult="failure" output="Get \"https://10.217.1.32:8776/healthcheck\": read tcp 10.217.0.2:59074->10.217.1.32:8776: read: connection reset by peer" Jan 20 17:43:52 crc kubenswrapper[4558]: I0120 17:43:52.827077 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/cinder-api-0" podUID="56d39f72-48d3-4690-a20a-099cb41daa7e" containerName="cinder-api" probeResult="failure" output="Get \"https://10.217.1.32:8776/healthcheck\": dial tcp 10.217.1.32:8776: connect: connection refused" Jan 20 17:43:52 crc kubenswrapper[4558]: I0120 17:43:52.827224 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:43:52 crc kubenswrapper[4558]: I0120 17:43:52.875825 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-7f9f9c9d78-s5gpm" Jan 20 17:43:52 crc kubenswrapper[4558]: I0120 17:43:52.968964 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3872f3be-7968-4412-8140-c8251291a7ed-config-data\") pod \"3872f3be-7968-4412-8140-c8251291a7ed\" (UID: \"3872f3be-7968-4412-8140-c8251291a7ed\") " Jan 20 17:43:52 crc kubenswrapper[4558]: I0120 17:43:52.969047 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3872f3be-7968-4412-8140-c8251291a7ed-combined-ca-bundle\") pod \"3872f3be-7968-4412-8140-c8251291a7ed\" (UID: \"3872f3be-7968-4412-8140-c8251291a7ed\") " Jan 20 17:43:52 crc kubenswrapper[4558]: I0120 17:43:52.969645 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3872f3be-7968-4412-8140-c8251291a7ed-public-tls-certs\") pod \"3872f3be-7968-4412-8140-c8251291a7ed\" (UID: \"3872f3be-7968-4412-8140-c8251291a7ed\") " Jan 20 17:43:52 crc kubenswrapper[4558]: I0120 17:43:52.969745 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dxkkj\" (UniqueName: \"kubernetes.io/projected/3872f3be-7968-4412-8140-c8251291a7ed-kube-api-access-dxkkj\") pod \"3872f3be-7968-4412-8140-c8251291a7ed\" (UID: \"3872f3be-7968-4412-8140-c8251291a7ed\") " Jan 20 17:43:52 crc kubenswrapper[4558]: I0120 17:43:52.969771 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3872f3be-7968-4412-8140-c8251291a7ed-logs\") pod \"3872f3be-7968-4412-8140-c8251291a7ed\" (UID: \"3872f3be-7968-4412-8140-c8251291a7ed\") " Jan 20 17:43:52 crc kubenswrapper[4558]: I0120 17:43:52.969975 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3872f3be-7968-4412-8140-c8251291a7ed-scripts\") pod \"3872f3be-7968-4412-8140-c8251291a7ed\" (UID: \"3872f3be-7968-4412-8140-c8251291a7ed\") " Jan 20 17:43:52 crc kubenswrapper[4558]: I0120 17:43:52.970025 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3872f3be-7968-4412-8140-c8251291a7ed-internal-tls-certs\") pod \"3872f3be-7968-4412-8140-c8251291a7ed\" (UID: \"3872f3be-7968-4412-8140-c8251291a7ed\") " Jan 20 17:43:52 crc kubenswrapper[4558]: I0120 17:43:52.970440 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3872f3be-7968-4412-8140-c8251291a7ed-logs" (OuterVolumeSpecName: "logs") pod "3872f3be-7968-4412-8140-c8251291a7ed" (UID: "3872f3be-7968-4412-8140-c8251291a7ed"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:43:52 crc kubenswrapper[4558]: I0120 17:43:52.971121 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3872f3be-7968-4412-8140-c8251291a7ed-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:52 crc kubenswrapper[4558]: I0120 17:43:52.974961 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3872f3be-7968-4412-8140-c8251291a7ed-kube-api-access-dxkkj" (OuterVolumeSpecName: "kube-api-access-dxkkj") pod "3872f3be-7968-4412-8140-c8251291a7ed" (UID: "3872f3be-7968-4412-8140-c8251291a7ed"). InnerVolumeSpecName "kube-api-access-dxkkj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:43:52 crc kubenswrapper[4558]: I0120 17:43:52.975870 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3872f3be-7968-4412-8140-c8251291a7ed-scripts" (OuterVolumeSpecName: "scripts") pod "3872f3be-7968-4412-8140-c8251291a7ed" (UID: "3872f3be-7968-4412-8140-c8251291a7ed"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:53 crc kubenswrapper[4558]: I0120 17:43:53.020904 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3872f3be-7968-4412-8140-c8251291a7ed-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3872f3be-7968-4412-8140-c8251291a7ed" (UID: "3872f3be-7968-4412-8140-c8251291a7ed"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:53 crc kubenswrapper[4558]: I0120 17:43:53.052420 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3872f3be-7968-4412-8140-c8251291a7ed-config-data" (OuterVolumeSpecName: "config-data") pod "3872f3be-7968-4412-8140-c8251291a7ed" (UID: "3872f3be-7968-4412-8140-c8251291a7ed"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:53 crc kubenswrapper[4558]: I0120 17:43:53.053763 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-api-69995768b6-hpdg2"] Jan 20 17:43:53 crc kubenswrapper[4558]: I0120 17:43:53.074305 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3872f3be-7968-4412-8140-c8251291a7ed-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "3872f3be-7968-4412-8140-c8251291a7ed" (UID: "3872f3be-7968-4412-8140-c8251291a7ed"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:53 crc kubenswrapper[4558]: I0120 17:43:53.075937 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3872f3be-7968-4412-8140-c8251291a7ed-public-tls-certs\") pod \"3872f3be-7968-4412-8140-c8251291a7ed\" (UID: \"3872f3be-7968-4412-8140-c8251291a7ed\") " Jan 20 17:43:53 crc kubenswrapper[4558]: I0120 17:43:53.076891 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3872f3be-7968-4412-8140-c8251291a7ed-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:53 crc kubenswrapper[4558]: I0120 17:43:53.076920 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3872f3be-7968-4412-8140-c8251291a7ed-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:53 crc kubenswrapper[4558]: I0120 17:43:53.076933 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dxkkj\" (UniqueName: \"kubernetes.io/projected/3872f3be-7968-4412-8140-c8251291a7ed-kube-api-access-dxkkj\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:53 crc kubenswrapper[4558]: I0120 17:43:53.076943 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3872f3be-7968-4412-8140-c8251291a7ed-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:53 crc kubenswrapper[4558]: W0120 17:43:53.077026 4558 empty_dir.go:500] Warning: Unmount skipped because path does not exist: /var/lib/kubelet/pods/3872f3be-7968-4412-8140-c8251291a7ed/volumes/kubernetes.io~secret/public-tls-certs Jan 20 17:43:53 crc kubenswrapper[4558]: I0120 17:43:53.077042 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3872f3be-7968-4412-8140-c8251291a7ed-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "3872f3be-7968-4412-8140-c8251291a7ed" (UID: "3872f3be-7968-4412-8140-c8251291a7ed"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:53 crc kubenswrapper[4558]: I0120 17:43:53.091582 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3872f3be-7968-4412-8140-c8251291a7ed-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "3872f3be-7968-4412-8140-c8251291a7ed" (UID: "3872f3be-7968-4412-8140-c8251291a7ed"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:53 crc kubenswrapper[4558]: I0120 17:43:53.184498 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3872f3be-7968-4412-8140-c8251291a7ed-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:53 crc kubenswrapper[4558]: I0120 17:43:53.184644 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3872f3be-7968-4412-8140-c8251291a7ed-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:53 crc kubenswrapper[4558]: I0120 17:43:53.360497 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-69995768b6-hpdg2" event={"ID":"a6d1823f-8885-40f1-b85d-5ee22197dc09","Type":"ContainerStarted","Data":"18bf3ef21bb245e310dd8ac4c714bcd9fa323f6c0767dc644e8509693385ec1f"} Jan 20 17:43:53 crc kubenswrapper[4558]: I0120 17:43:53.360896 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-69995768b6-hpdg2" event={"ID":"a6d1823f-8885-40f1-b85d-5ee22197dc09","Type":"ContainerStarted","Data":"ea1419644ca2911289535338721264e2f99e8dcc7356d4bafe1793b6983dd157"} Jan 20 17:43:53 crc kubenswrapper[4558]: I0120 17:43:53.363791 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:43:53 crc kubenswrapper[4558]: I0120 17:43:53.365929 4558 generic.go:334] "Generic (PLEG): container finished" podID="56d39f72-48d3-4690-a20a-099cb41daa7e" containerID="3358b6c2c0a492ce2df7675567ba772d472b5f09db162d54f169def48d48b722" exitCode=0 Jan 20 17:43:53 crc kubenswrapper[4558]: I0120 17:43:53.366024 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"56d39f72-48d3-4690-a20a-099cb41daa7e","Type":"ContainerDied","Data":"3358b6c2c0a492ce2df7675567ba772d472b5f09db162d54f169def48d48b722"} Jan 20 17:43:53 crc kubenswrapper[4558]: I0120 17:43:53.366077 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"56d39f72-48d3-4690-a20a-099cb41daa7e","Type":"ContainerDied","Data":"94d3c9d43f52c47d691791acedc0a34da7662d53b02a2453679cb72210409391"} Jan 20 17:43:53 crc kubenswrapper[4558]: I0120 17:43:53.366103 4558 scope.go:117] "RemoveContainer" containerID="3358b6c2c0a492ce2df7675567ba772d472b5f09db162d54f169def48d48b722" Jan 20 17:43:53 crc kubenswrapper[4558]: I0120 17:43:53.379955 4558 generic.go:334] "Generic (PLEG): container finished" podID="3872f3be-7968-4412-8140-c8251291a7ed" containerID="5afcab9d11643826a55d42d96c24ef19bea486baf52a014b64095a4ea1de5fc1" exitCode=0 Jan 20 17:43:53 crc kubenswrapper[4558]: I0120 17:43:53.379978 4558 generic.go:334] "Generic (PLEG): container finished" podID="3872f3be-7968-4412-8140-c8251291a7ed" containerID="4e45332938c1fc9ba8d1484b89f589b471179ae20f172f3c0d3e3c976ac49898" exitCode=143 Jan 20 17:43:53 crc kubenswrapper[4558]: I0120 17:43:53.381540 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-7f9f9c9d78-s5gpm" Jan 20 17:43:53 crc kubenswrapper[4558]: I0120 17:43:53.382944 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-7f9f9c9d78-s5gpm" event={"ID":"3872f3be-7968-4412-8140-c8251291a7ed","Type":"ContainerDied","Data":"5afcab9d11643826a55d42d96c24ef19bea486baf52a014b64095a4ea1de5fc1"} Jan 20 17:43:53 crc kubenswrapper[4558]: I0120 17:43:53.383011 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-7f9f9c9d78-s5gpm" event={"ID":"3872f3be-7968-4412-8140-c8251291a7ed","Type":"ContainerDied","Data":"4e45332938c1fc9ba8d1484b89f589b471179ae20f172f3c0d3e3c976ac49898"} Jan 20 17:43:53 crc kubenswrapper[4558]: I0120 17:43:53.383027 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-7f9f9c9d78-s5gpm" event={"ID":"3872f3be-7968-4412-8140-c8251291a7ed","Type":"ContainerDied","Data":"ad8a009f4c19c959ce255125d1526b193f8f6d30dc44f751cdceb0ed7b0ec4de"} Jan 20 17:43:53 crc kubenswrapper[4558]: I0120 17:43:53.416120 4558 scope.go:117] "RemoveContainer" containerID="9d8c0d2d57ebdacd4f483efdc5e6ebefff5d373523eeebba687917ca86c856be" Jan 20 17:43:53 crc kubenswrapper[4558]: I0120 17:43:53.452527 4558 scope.go:117] "RemoveContainer" containerID="3358b6c2c0a492ce2df7675567ba772d472b5f09db162d54f169def48d48b722" Jan 20 17:43:53 crc kubenswrapper[4558]: I0120 17:43:53.452632 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/placement-7f9f9c9d78-s5gpm"] Jan 20 17:43:53 crc kubenswrapper[4558]: E0120 17:43:53.453021 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3358b6c2c0a492ce2df7675567ba772d472b5f09db162d54f169def48d48b722\": container with ID starting with 3358b6c2c0a492ce2df7675567ba772d472b5f09db162d54f169def48d48b722 not found: ID does not exist" containerID="3358b6c2c0a492ce2df7675567ba772d472b5f09db162d54f169def48d48b722" Jan 20 17:43:53 crc kubenswrapper[4558]: I0120 17:43:53.453052 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3358b6c2c0a492ce2df7675567ba772d472b5f09db162d54f169def48d48b722"} err="failed to get container status \"3358b6c2c0a492ce2df7675567ba772d472b5f09db162d54f169def48d48b722\": rpc error: code = NotFound desc = could not find container \"3358b6c2c0a492ce2df7675567ba772d472b5f09db162d54f169def48d48b722\": container with ID starting with 3358b6c2c0a492ce2df7675567ba772d472b5f09db162d54f169def48d48b722 not found: ID does not exist" Jan 20 17:43:53 crc kubenswrapper[4558]: I0120 17:43:53.453073 4558 scope.go:117] "RemoveContainer" containerID="9d8c0d2d57ebdacd4f483efdc5e6ebefff5d373523eeebba687917ca86c856be" Jan 20 17:43:53 crc kubenswrapper[4558]: E0120 17:43:53.453818 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9d8c0d2d57ebdacd4f483efdc5e6ebefff5d373523eeebba687917ca86c856be\": container with ID starting with 9d8c0d2d57ebdacd4f483efdc5e6ebefff5d373523eeebba687917ca86c856be not found: ID does not exist" containerID="9d8c0d2d57ebdacd4f483efdc5e6ebefff5d373523eeebba687917ca86c856be" Jan 20 17:43:53 crc kubenswrapper[4558]: I0120 17:43:53.453842 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9d8c0d2d57ebdacd4f483efdc5e6ebefff5d373523eeebba687917ca86c856be"} err="failed to get container status \"9d8c0d2d57ebdacd4f483efdc5e6ebefff5d373523eeebba687917ca86c856be\": rpc error: code = NotFound desc = could not find container \"9d8c0d2d57ebdacd4f483efdc5e6ebefff5d373523eeebba687917ca86c856be\": container with ID starting with 9d8c0d2d57ebdacd4f483efdc5e6ebefff5d373523eeebba687917ca86c856be not found: ID does not exist" Jan 20 17:43:53 crc kubenswrapper[4558]: I0120 17:43:53.453856 4558 scope.go:117] "RemoveContainer" containerID="5afcab9d11643826a55d42d96c24ef19bea486baf52a014b64095a4ea1de5fc1" Jan 20 17:43:53 crc kubenswrapper[4558]: I0120 17:43:53.491411 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/56d39f72-48d3-4690-a20a-099cb41daa7e-scripts\") pod \"56d39f72-48d3-4690-a20a-099cb41daa7e\" (UID: \"56d39f72-48d3-4690-a20a-099cb41daa7e\") " Jan 20 17:43:53 crc kubenswrapper[4558]: I0120 17:43:53.491586 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/56d39f72-48d3-4690-a20a-099cb41daa7e-logs\") pod \"56d39f72-48d3-4690-a20a-099cb41daa7e\" (UID: \"56d39f72-48d3-4690-a20a-099cb41daa7e\") " Jan 20 17:43:53 crc kubenswrapper[4558]: I0120 17:43:53.491618 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/56d39f72-48d3-4690-a20a-099cb41daa7e-etc-machine-id\") pod \"56d39f72-48d3-4690-a20a-099cb41daa7e\" (UID: \"56d39f72-48d3-4690-a20a-099cb41daa7e\") " Jan 20 17:43:53 crc kubenswrapper[4558]: I0120 17:43:53.491830 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/56d39f72-48d3-4690-a20a-099cb41daa7e-combined-ca-bundle\") pod \"56d39f72-48d3-4690-a20a-099cb41daa7e\" (UID: \"56d39f72-48d3-4690-a20a-099cb41daa7e\") " Jan 20 17:43:53 crc kubenswrapper[4558]: I0120 17:43:53.491931 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ntbmp\" (UniqueName: \"kubernetes.io/projected/56d39f72-48d3-4690-a20a-099cb41daa7e-kube-api-access-ntbmp\") pod \"56d39f72-48d3-4690-a20a-099cb41daa7e\" (UID: \"56d39f72-48d3-4690-a20a-099cb41daa7e\") " Jan 20 17:43:53 crc kubenswrapper[4558]: I0120 17:43:53.491969 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/56d39f72-48d3-4690-a20a-099cb41daa7e-public-tls-certs\") pod \"56d39f72-48d3-4690-a20a-099cb41daa7e\" (UID: \"56d39f72-48d3-4690-a20a-099cb41daa7e\") " Jan 20 17:43:53 crc kubenswrapper[4558]: I0120 17:43:53.492087 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/56d39f72-48d3-4690-a20a-099cb41daa7e-config-data\") pod \"56d39f72-48d3-4690-a20a-099cb41daa7e\" (UID: \"56d39f72-48d3-4690-a20a-099cb41daa7e\") " Jan 20 17:43:53 crc kubenswrapper[4558]: I0120 17:43:53.492130 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/56d39f72-48d3-4690-a20a-099cb41daa7e-config-data-custom\") pod \"56d39f72-48d3-4690-a20a-099cb41daa7e\" (UID: \"56d39f72-48d3-4690-a20a-099cb41daa7e\") " Jan 20 17:43:53 crc kubenswrapper[4558]: I0120 17:43:53.492219 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/56d39f72-48d3-4690-a20a-099cb41daa7e-internal-tls-certs\") pod \"56d39f72-48d3-4690-a20a-099cb41daa7e\" (UID: \"56d39f72-48d3-4690-a20a-099cb41daa7e\") " Jan 20 17:43:53 crc kubenswrapper[4558]: I0120 17:43:53.496555 4558 scope.go:117] "RemoveContainer" containerID="4e45332938c1fc9ba8d1484b89f589b471179ae20f172f3c0d3e3c976ac49898" Jan 20 17:43:53 crc kubenswrapper[4558]: I0120 17:43:53.496581 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/56d39f72-48d3-4690-a20a-099cb41daa7e-scripts" (OuterVolumeSpecName: "scripts") pod "56d39f72-48d3-4690-a20a-099cb41daa7e" (UID: "56d39f72-48d3-4690-a20a-099cb41daa7e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:53 crc kubenswrapper[4558]: I0120 17:43:53.496566 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/placement-7f9f9c9d78-s5gpm"] Jan 20 17:43:53 crc kubenswrapper[4558]: I0120 17:43:53.497129 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/56d39f72-48d3-4690-a20a-099cb41daa7e-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "56d39f72-48d3-4690-a20a-099cb41daa7e" (UID: "56d39f72-48d3-4690-a20a-099cb41daa7e"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 17:43:53 crc kubenswrapper[4558]: I0120 17:43:53.497512 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/56d39f72-48d3-4690-a20a-099cb41daa7e-logs" (OuterVolumeSpecName: "logs") pod "56d39f72-48d3-4690-a20a-099cb41daa7e" (UID: "56d39f72-48d3-4690-a20a-099cb41daa7e"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:43:53 crc kubenswrapper[4558]: I0120 17:43:53.497630 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/56d39f72-48d3-4690-a20a-099cb41daa7e-kube-api-access-ntbmp" (OuterVolumeSpecName: "kube-api-access-ntbmp") pod "56d39f72-48d3-4690-a20a-099cb41daa7e" (UID: "56d39f72-48d3-4690-a20a-099cb41daa7e"). InnerVolumeSpecName "kube-api-access-ntbmp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:43:53 crc kubenswrapper[4558]: I0120 17:43:53.500441 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/56d39f72-48d3-4690-a20a-099cb41daa7e-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "56d39f72-48d3-4690-a20a-099cb41daa7e" (UID: "56d39f72-48d3-4690-a20a-099cb41daa7e"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:53 crc kubenswrapper[4558]: I0120 17:43:53.533026 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/56d39f72-48d3-4690-a20a-099cb41daa7e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "56d39f72-48d3-4690-a20a-099cb41daa7e" (UID: "56d39f72-48d3-4690-a20a-099cb41daa7e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:53 crc kubenswrapper[4558]: I0120 17:43:53.538658 4558 scope.go:117] "RemoveContainer" containerID="5afcab9d11643826a55d42d96c24ef19bea486baf52a014b64095a4ea1de5fc1" Jan 20 17:43:53 crc kubenswrapper[4558]: E0120 17:43:53.539391 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5afcab9d11643826a55d42d96c24ef19bea486baf52a014b64095a4ea1de5fc1\": container with ID starting with 5afcab9d11643826a55d42d96c24ef19bea486baf52a014b64095a4ea1de5fc1 not found: ID does not exist" containerID="5afcab9d11643826a55d42d96c24ef19bea486baf52a014b64095a4ea1de5fc1" Jan 20 17:43:53 crc kubenswrapper[4558]: I0120 17:43:53.539423 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5afcab9d11643826a55d42d96c24ef19bea486baf52a014b64095a4ea1de5fc1"} err="failed to get container status \"5afcab9d11643826a55d42d96c24ef19bea486baf52a014b64095a4ea1de5fc1\": rpc error: code = NotFound desc = could not find container \"5afcab9d11643826a55d42d96c24ef19bea486baf52a014b64095a4ea1de5fc1\": container with ID starting with 5afcab9d11643826a55d42d96c24ef19bea486baf52a014b64095a4ea1de5fc1 not found: ID does not exist" Jan 20 17:43:53 crc kubenswrapper[4558]: I0120 17:43:53.539445 4558 scope.go:117] "RemoveContainer" containerID="4e45332938c1fc9ba8d1484b89f589b471179ae20f172f3c0d3e3c976ac49898" Jan 20 17:43:53 crc kubenswrapper[4558]: E0120 17:43:53.539713 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4e45332938c1fc9ba8d1484b89f589b471179ae20f172f3c0d3e3c976ac49898\": container with ID starting with 4e45332938c1fc9ba8d1484b89f589b471179ae20f172f3c0d3e3c976ac49898 not found: ID does not exist" containerID="4e45332938c1fc9ba8d1484b89f589b471179ae20f172f3c0d3e3c976ac49898" Jan 20 17:43:53 crc kubenswrapper[4558]: I0120 17:43:53.539754 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4e45332938c1fc9ba8d1484b89f589b471179ae20f172f3c0d3e3c976ac49898"} err="failed to get container status \"4e45332938c1fc9ba8d1484b89f589b471179ae20f172f3c0d3e3c976ac49898\": rpc error: code = NotFound desc = could not find container \"4e45332938c1fc9ba8d1484b89f589b471179ae20f172f3c0d3e3c976ac49898\": container with ID starting with 4e45332938c1fc9ba8d1484b89f589b471179ae20f172f3c0d3e3c976ac49898 not found: ID does not exist" Jan 20 17:43:53 crc kubenswrapper[4558]: I0120 17:43:53.539780 4558 scope.go:117] "RemoveContainer" containerID="5afcab9d11643826a55d42d96c24ef19bea486baf52a014b64095a4ea1de5fc1" Jan 20 17:43:53 crc kubenswrapper[4558]: I0120 17:43:53.540080 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5afcab9d11643826a55d42d96c24ef19bea486baf52a014b64095a4ea1de5fc1"} err="failed to get container status \"5afcab9d11643826a55d42d96c24ef19bea486baf52a014b64095a4ea1de5fc1\": rpc error: code = NotFound desc = could not find container \"5afcab9d11643826a55d42d96c24ef19bea486baf52a014b64095a4ea1de5fc1\": container with ID starting with 5afcab9d11643826a55d42d96c24ef19bea486baf52a014b64095a4ea1de5fc1 not found: ID does not exist" Jan 20 17:43:53 crc kubenswrapper[4558]: I0120 17:43:53.540104 4558 scope.go:117] "RemoveContainer" containerID="4e45332938c1fc9ba8d1484b89f589b471179ae20f172f3c0d3e3c976ac49898" Jan 20 17:43:53 crc kubenswrapper[4558]: I0120 17:43:53.540371 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4e45332938c1fc9ba8d1484b89f589b471179ae20f172f3c0d3e3c976ac49898"} err="failed to get container status \"4e45332938c1fc9ba8d1484b89f589b471179ae20f172f3c0d3e3c976ac49898\": rpc error: code = NotFound desc = could not find container \"4e45332938c1fc9ba8d1484b89f589b471179ae20f172f3c0d3e3c976ac49898\": container with ID starting with 4e45332938c1fc9ba8d1484b89f589b471179ae20f172f3c0d3e3c976ac49898 not found: ID does not exist" Jan 20 17:43:53 crc kubenswrapper[4558]: I0120 17:43:53.557285 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/56d39f72-48d3-4690-a20a-099cb41daa7e-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "56d39f72-48d3-4690-a20a-099cb41daa7e" (UID: "56d39f72-48d3-4690-a20a-099cb41daa7e"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:53 crc kubenswrapper[4558]: I0120 17:43:53.557476 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/56d39f72-48d3-4690-a20a-099cb41daa7e-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "56d39f72-48d3-4690-a20a-099cb41daa7e" (UID: "56d39f72-48d3-4690-a20a-099cb41daa7e"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:53 crc kubenswrapper[4558]: I0120 17:43:53.564965 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/56d39f72-48d3-4690-a20a-099cb41daa7e-config-data" (OuterVolumeSpecName: "config-data") pod "56d39f72-48d3-4690-a20a-099cb41daa7e" (UID: "56d39f72-48d3-4690-a20a-099cb41daa7e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:53 crc kubenswrapper[4558]: I0120 17:43:53.596538 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/56d39f72-48d3-4690-a20a-099cb41daa7e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:53 crc kubenswrapper[4558]: I0120 17:43:53.596654 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ntbmp\" (UniqueName: \"kubernetes.io/projected/56d39f72-48d3-4690-a20a-099cb41daa7e-kube-api-access-ntbmp\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:53 crc kubenswrapper[4558]: I0120 17:43:53.596733 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/56d39f72-48d3-4690-a20a-099cb41daa7e-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:53 crc kubenswrapper[4558]: I0120 17:43:53.596824 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/56d39f72-48d3-4690-a20a-099cb41daa7e-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:53 crc kubenswrapper[4558]: I0120 17:43:53.596876 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/56d39f72-48d3-4690-a20a-099cb41daa7e-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:53 crc kubenswrapper[4558]: I0120 17:43:53.596960 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/56d39f72-48d3-4690-a20a-099cb41daa7e-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:53 crc kubenswrapper[4558]: I0120 17:43:53.597028 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/56d39f72-48d3-4690-a20a-099cb41daa7e-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:53 crc kubenswrapper[4558]: I0120 17:43:53.597084 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/56d39f72-48d3-4690-a20a-099cb41daa7e-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:53 crc kubenswrapper[4558]: I0120 17:43:53.597138 4558 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/56d39f72-48d3-4690-a20a-099cb41daa7e-etc-machine-id\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:54 crc kubenswrapper[4558]: I0120 17:43:54.250059 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:54 crc kubenswrapper[4558]: I0120 17:43:54.250332 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:54 crc kubenswrapper[4558]: I0120 17:43:54.277807 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:54 crc kubenswrapper[4558]: I0120 17:43:54.289267 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:54 crc kubenswrapper[4558]: I0120 17:43:54.306871 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:54 crc kubenswrapper[4558]: I0120 17:43:54.306924 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:54 crc kubenswrapper[4558]: I0120 17:43:54.345417 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:54 crc kubenswrapper[4558]: I0120 17:43:54.351377 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:54 crc kubenswrapper[4558]: I0120 17:43:54.392674 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-69995768b6-hpdg2" event={"ID":"a6d1823f-8885-40f1-b85d-5ee22197dc09","Type":"ContainerStarted","Data":"73b904a1c9cb3b18a20eeaf72083790b7ae09f43aad729dd0878ee2ad744f046"} Jan 20 17:43:54 crc kubenswrapper[4558]: I0120 17:43:54.392738 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/barbican-api-69995768b6-hpdg2" Jan 20 17:43:54 crc kubenswrapper[4558]: I0120 17:43:54.392755 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/barbican-api-69995768b6-hpdg2" Jan 20 17:43:54 crc kubenswrapper[4558]: I0120 17:43:54.395531 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:43:54 crc kubenswrapper[4558]: I0120 17:43:54.398811 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:54 crc kubenswrapper[4558]: I0120 17:43:54.398839 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:54 crc kubenswrapper[4558]: I0120 17:43:54.398856 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:54 crc kubenswrapper[4558]: I0120 17:43:54.398865 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:54 crc kubenswrapper[4558]: I0120 17:43:54.443744 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/barbican-api-69995768b6-hpdg2" podStartSLOduration=2.443705424 podStartE2EDuration="2.443705424s" podCreationTimestamp="2026-01-20 17:43:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:43:54.416582025 +0000 UTC m=+3728.176919993" watchObservedRunningTime="2026-01-20 17:43:54.443705424 +0000 UTC m=+3728.204043392" Jan 20 17:43:54 crc kubenswrapper[4558]: I0120 17:43:54.464069 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:43:54 crc kubenswrapper[4558]: I0120 17:43:54.493432 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:43:54 crc kubenswrapper[4558]: I0120 17:43:54.549766 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:43:54 crc kubenswrapper[4558]: E0120 17:43:54.550521 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="56d39f72-48d3-4690-a20a-099cb41daa7e" containerName="cinder-api-log" Jan 20 17:43:54 crc kubenswrapper[4558]: I0120 17:43:54.550583 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="56d39f72-48d3-4690-a20a-099cb41daa7e" containerName="cinder-api-log" Jan 20 17:43:54 crc kubenswrapper[4558]: E0120 17:43:54.550655 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3872f3be-7968-4412-8140-c8251291a7ed" containerName="placement-log" Jan 20 17:43:54 crc kubenswrapper[4558]: I0120 17:43:54.550704 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="3872f3be-7968-4412-8140-c8251291a7ed" containerName="placement-log" Jan 20 17:43:54 crc kubenswrapper[4558]: E0120 17:43:54.550762 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="56d39f72-48d3-4690-a20a-099cb41daa7e" containerName="cinder-api" Jan 20 17:43:54 crc kubenswrapper[4558]: I0120 17:43:54.550805 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="56d39f72-48d3-4690-a20a-099cb41daa7e" containerName="cinder-api" Jan 20 17:43:54 crc kubenswrapper[4558]: E0120 17:43:54.550863 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3872f3be-7968-4412-8140-c8251291a7ed" containerName="placement-api" Jan 20 17:43:54 crc kubenswrapper[4558]: I0120 17:43:54.550920 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="3872f3be-7968-4412-8140-c8251291a7ed" containerName="placement-api" Jan 20 17:43:54 crc kubenswrapper[4558]: I0120 17:43:54.551185 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="3872f3be-7968-4412-8140-c8251291a7ed" containerName="placement-api" Jan 20 17:43:54 crc kubenswrapper[4558]: I0120 17:43:54.551256 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="56d39f72-48d3-4690-a20a-099cb41daa7e" containerName="cinder-api-log" Jan 20 17:43:54 crc kubenswrapper[4558]: I0120 17:43:54.551313 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="3872f3be-7968-4412-8140-c8251291a7ed" containerName="placement-log" Jan 20 17:43:54 crc kubenswrapper[4558]: I0120 17:43:54.551373 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="56d39f72-48d3-4690-a20a-099cb41daa7e" containerName="cinder-api" Jan 20 17:43:54 crc kubenswrapper[4558]: I0120 17:43:54.552740 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:43:54 crc kubenswrapper[4558]: I0120 17:43:54.559118 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-cinder-internal-svc" Jan 20 17:43:54 crc kubenswrapper[4558]: I0120 17:43:54.559352 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-cinder-public-svc" Jan 20 17:43:54 crc kubenswrapper[4558]: I0120 17:43:54.559425 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cinder-api-config-data" Jan 20 17:43:54 crc kubenswrapper[4558]: I0120 17:43:54.577074 4558 scope.go:117] "RemoveContainer" containerID="bb8488b5aee706047405a606b3a23b8bd6228cb32059c06329cbdb7eefcf16f2" Jan 20 17:43:54 crc kubenswrapper[4558]: I0120 17:43:54.595091 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3872f3be-7968-4412-8140-c8251291a7ed" path="/var/lib/kubelet/pods/3872f3be-7968-4412-8140-c8251291a7ed/volumes" Jan 20 17:43:54 crc kubenswrapper[4558]: I0120 17:43:54.595828 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="56d39f72-48d3-4690-a20a-099cb41daa7e" path="/var/lib/kubelet/pods/56d39f72-48d3-4690-a20a-099cb41daa7e/volumes" Jan 20 17:43:54 crc kubenswrapper[4558]: I0120 17:43:54.598103 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:43:54 crc kubenswrapper[4558]: I0120 17:43:54.720640 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b752af55-9c06-469f-9353-e1042300de3c-public-tls-certs\") pod \"cinder-api-0\" (UID: \"b752af55-9c06-469f-9353-e1042300de3c\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:43:54 crc kubenswrapper[4558]: I0120 17:43:54.720713 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b752af55-9c06-469f-9353-e1042300de3c-config-data\") pod \"cinder-api-0\" (UID: \"b752af55-9c06-469f-9353-e1042300de3c\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:43:54 crc kubenswrapper[4558]: I0120 17:43:54.720772 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b752af55-9c06-469f-9353-e1042300de3c-etc-machine-id\") pod \"cinder-api-0\" (UID: \"b752af55-9c06-469f-9353-e1042300de3c\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:43:54 crc kubenswrapper[4558]: I0120 17:43:54.720840 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b752af55-9c06-469f-9353-e1042300de3c-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"b752af55-9c06-469f-9353-e1042300de3c\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:43:54 crc kubenswrapper[4558]: I0120 17:43:54.720963 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b752af55-9c06-469f-9353-e1042300de3c-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"b752af55-9c06-469f-9353-e1042300de3c\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:43:54 crc kubenswrapper[4558]: I0120 17:43:54.721019 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hn2ts\" (UniqueName: \"kubernetes.io/projected/b752af55-9c06-469f-9353-e1042300de3c-kube-api-access-hn2ts\") pod \"cinder-api-0\" (UID: \"b752af55-9c06-469f-9353-e1042300de3c\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:43:54 crc kubenswrapper[4558]: I0120 17:43:54.721068 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b752af55-9c06-469f-9353-e1042300de3c-scripts\") pod \"cinder-api-0\" (UID: \"b752af55-9c06-469f-9353-e1042300de3c\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:43:54 crc kubenswrapper[4558]: I0120 17:43:54.721122 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b752af55-9c06-469f-9353-e1042300de3c-logs\") pod \"cinder-api-0\" (UID: \"b752af55-9c06-469f-9353-e1042300de3c\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:43:54 crc kubenswrapper[4558]: I0120 17:43:54.721437 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b752af55-9c06-469f-9353-e1042300de3c-config-data-custom\") pod \"cinder-api-0\" (UID: \"b752af55-9c06-469f-9353-e1042300de3c\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:43:54 crc kubenswrapper[4558]: I0120 17:43:54.823371 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hn2ts\" (UniqueName: \"kubernetes.io/projected/b752af55-9c06-469f-9353-e1042300de3c-kube-api-access-hn2ts\") pod \"cinder-api-0\" (UID: \"b752af55-9c06-469f-9353-e1042300de3c\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:43:54 crc kubenswrapper[4558]: I0120 17:43:54.823745 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b752af55-9c06-469f-9353-e1042300de3c-scripts\") pod \"cinder-api-0\" (UID: \"b752af55-9c06-469f-9353-e1042300de3c\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:43:54 crc kubenswrapper[4558]: I0120 17:43:54.823861 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b752af55-9c06-469f-9353-e1042300de3c-logs\") pod \"cinder-api-0\" (UID: \"b752af55-9c06-469f-9353-e1042300de3c\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:43:54 crc kubenswrapper[4558]: I0120 17:43:54.824157 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b752af55-9c06-469f-9353-e1042300de3c-config-data-custom\") pod \"cinder-api-0\" (UID: \"b752af55-9c06-469f-9353-e1042300de3c\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:43:54 crc kubenswrapper[4558]: I0120 17:43:54.824268 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b752af55-9c06-469f-9353-e1042300de3c-public-tls-certs\") pod \"cinder-api-0\" (UID: \"b752af55-9c06-469f-9353-e1042300de3c\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:43:54 crc kubenswrapper[4558]: I0120 17:43:54.824428 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b752af55-9c06-469f-9353-e1042300de3c-config-data\") pod \"cinder-api-0\" (UID: \"b752af55-9c06-469f-9353-e1042300de3c\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:43:54 crc kubenswrapper[4558]: I0120 17:43:54.824496 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b752af55-9c06-469f-9353-e1042300de3c-etc-machine-id\") pod \"cinder-api-0\" (UID: \"b752af55-9c06-469f-9353-e1042300de3c\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:43:54 crc kubenswrapper[4558]: I0120 17:43:54.824575 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b752af55-9c06-469f-9353-e1042300de3c-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"b752af55-9c06-469f-9353-e1042300de3c\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:43:54 crc kubenswrapper[4558]: I0120 17:43:54.824690 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b752af55-9c06-469f-9353-e1042300de3c-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"b752af55-9c06-469f-9353-e1042300de3c\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:43:54 crc kubenswrapper[4558]: I0120 17:43:54.824749 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b752af55-9c06-469f-9353-e1042300de3c-logs\") pod \"cinder-api-0\" (UID: \"b752af55-9c06-469f-9353-e1042300de3c\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:43:54 crc kubenswrapper[4558]: I0120 17:43:54.824896 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b752af55-9c06-469f-9353-e1042300de3c-etc-machine-id\") pod \"cinder-api-0\" (UID: \"b752af55-9c06-469f-9353-e1042300de3c\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:43:54 crc kubenswrapper[4558]: I0120 17:43:54.829561 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b752af55-9c06-469f-9353-e1042300de3c-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"b752af55-9c06-469f-9353-e1042300de3c\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:43:54 crc kubenswrapper[4558]: I0120 17:43:54.837390 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b752af55-9c06-469f-9353-e1042300de3c-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"b752af55-9c06-469f-9353-e1042300de3c\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:43:54 crc kubenswrapper[4558]: I0120 17:43:54.838452 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b752af55-9c06-469f-9353-e1042300de3c-scripts\") pod \"cinder-api-0\" (UID: \"b752af55-9c06-469f-9353-e1042300de3c\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:43:54 crc kubenswrapper[4558]: I0120 17:43:54.838558 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b752af55-9c06-469f-9353-e1042300de3c-config-data-custom\") pod \"cinder-api-0\" (UID: \"b752af55-9c06-469f-9353-e1042300de3c\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:43:54 crc kubenswrapper[4558]: I0120 17:43:54.838881 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b752af55-9c06-469f-9353-e1042300de3c-public-tls-certs\") pod \"cinder-api-0\" (UID: \"b752af55-9c06-469f-9353-e1042300de3c\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:43:54 crc kubenswrapper[4558]: I0120 17:43:54.839011 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b752af55-9c06-469f-9353-e1042300de3c-config-data\") pod \"cinder-api-0\" (UID: \"b752af55-9c06-469f-9353-e1042300de3c\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:43:54 crc kubenswrapper[4558]: I0120 17:43:54.842032 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hn2ts\" (UniqueName: \"kubernetes.io/projected/b752af55-9c06-469f-9353-e1042300de3c-kube-api-access-hn2ts\") pod \"cinder-api-0\" (UID: \"b752af55-9c06-469f-9353-e1042300de3c\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:43:54 crc kubenswrapper[4558]: I0120 17:43:54.876776 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-976c48458-wzswq" Jan 20 17:43:54 crc kubenswrapper[4558]: I0120 17:43:54.880054 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:43:55 crc kubenswrapper[4558]: I0120 17:43:55.030765 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b6ba5f2a-6486-4a84-bf85-23c00e907701-scripts\") pod \"b6ba5f2a-6486-4a84-bf85-23c00e907701\" (UID: \"b6ba5f2a-6486-4a84-bf85-23c00e907701\") " Jan 20 17:43:55 crc kubenswrapper[4558]: I0120 17:43:55.031046 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d9whj\" (UniqueName: \"kubernetes.io/projected/b6ba5f2a-6486-4a84-bf85-23c00e907701-kube-api-access-d9whj\") pod \"b6ba5f2a-6486-4a84-bf85-23c00e907701\" (UID: \"b6ba5f2a-6486-4a84-bf85-23c00e907701\") " Jan 20 17:43:55 crc kubenswrapper[4558]: I0120 17:43:55.031142 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b6ba5f2a-6486-4a84-bf85-23c00e907701-logs\") pod \"b6ba5f2a-6486-4a84-bf85-23c00e907701\" (UID: \"b6ba5f2a-6486-4a84-bf85-23c00e907701\") " Jan 20 17:43:55 crc kubenswrapper[4558]: I0120 17:43:55.031207 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b6ba5f2a-6486-4a84-bf85-23c00e907701-config-data\") pod \"b6ba5f2a-6486-4a84-bf85-23c00e907701\" (UID: \"b6ba5f2a-6486-4a84-bf85-23c00e907701\") " Jan 20 17:43:55 crc kubenswrapper[4558]: I0120 17:43:55.031250 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b6ba5f2a-6486-4a84-bf85-23c00e907701-public-tls-certs\") pod \"b6ba5f2a-6486-4a84-bf85-23c00e907701\" (UID: \"b6ba5f2a-6486-4a84-bf85-23c00e907701\") " Jan 20 17:43:55 crc kubenswrapper[4558]: I0120 17:43:55.031276 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b6ba5f2a-6486-4a84-bf85-23c00e907701-internal-tls-certs\") pod \"b6ba5f2a-6486-4a84-bf85-23c00e907701\" (UID: \"b6ba5f2a-6486-4a84-bf85-23c00e907701\") " Jan 20 17:43:55 crc kubenswrapper[4558]: I0120 17:43:55.031448 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b6ba5f2a-6486-4a84-bf85-23c00e907701-combined-ca-bundle\") pod \"b6ba5f2a-6486-4a84-bf85-23c00e907701\" (UID: \"b6ba5f2a-6486-4a84-bf85-23c00e907701\") " Jan 20 17:43:55 crc kubenswrapper[4558]: I0120 17:43:55.031670 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b6ba5f2a-6486-4a84-bf85-23c00e907701-logs" (OuterVolumeSpecName: "logs") pod "b6ba5f2a-6486-4a84-bf85-23c00e907701" (UID: "b6ba5f2a-6486-4a84-bf85-23c00e907701"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:43:55 crc kubenswrapper[4558]: I0120 17:43:55.032047 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b6ba5f2a-6486-4a84-bf85-23c00e907701-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:55 crc kubenswrapper[4558]: I0120 17:43:55.037517 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6ba5f2a-6486-4a84-bf85-23c00e907701-kube-api-access-d9whj" (OuterVolumeSpecName: "kube-api-access-d9whj") pod "b6ba5f2a-6486-4a84-bf85-23c00e907701" (UID: "b6ba5f2a-6486-4a84-bf85-23c00e907701"). InnerVolumeSpecName "kube-api-access-d9whj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:43:55 crc kubenswrapper[4558]: I0120 17:43:55.042289 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6ba5f2a-6486-4a84-bf85-23c00e907701-scripts" (OuterVolumeSpecName: "scripts") pod "b6ba5f2a-6486-4a84-bf85-23c00e907701" (UID: "b6ba5f2a-6486-4a84-bf85-23c00e907701"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:55 crc kubenswrapper[4558]: I0120 17:43:55.103278 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6ba5f2a-6486-4a84-bf85-23c00e907701-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b6ba5f2a-6486-4a84-bf85-23c00e907701" (UID: "b6ba5f2a-6486-4a84-bf85-23c00e907701"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:55 crc kubenswrapper[4558]: I0120 17:43:55.104519 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6ba5f2a-6486-4a84-bf85-23c00e907701-config-data" (OuterVolumeSpecName: "config-data") pod "b6ba5f2a-6486-4a84-bf85-23c00e907701" (UID: "b6ba5f2a-6486-4a84-bf85-23c00e907701"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:55 crc kubenswrapper[4558]: I0120 17:43:55.135958 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b6ba5f2a-6486-4a84-bf85-23c00e907701-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:55 crc kubenswrapper[4558]: I0120 17:43:55.135987 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b6ba5f2a-6486-4a84-bf85-23c00e907701-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:55 crc kubenswrapper[4558]: I0120 17:43:55.136000 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d9whj\" (UniqueName: \"kubernetes.io/projected/b6ba5f2a-6486-4a84-bf85-23c00e907701-kube-api-access-d9whj\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:55 crc kubenswrapper[4558]: I0120 17:43:55.136010 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b6ba5f2a-6486-4a84-bf85-23c00e907701-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:55 crc kubenswrapper[4558]: I0120 17:43:55.164874 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6ba5f2a-6486-4a84-bf85-23c00e907701-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "b6ba5f2a-6486-4a84-bf85-23c00e907701" (UID: "b6ba5f2a-6486-4a84-bf85-23c00e907701"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:55 crc kubenswrapper[4558]: I0120 17:43:55.206415 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6ba5f2a-6486-4a84-bf85-23c00e907701-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "b6ba5f2a-6486-4a84-bf85-23c00e907701" (UID: "b6ba5f2a-6486-4a84-bf85-23c00e907701"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:55 crc kubenswrapper[4558]: I0120 17:43:55.230880 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-api-69995768b6-hpdg2"] Jan 20 17:43:55 crc kubenswrapper[4558]: I0120 17:43:55.245760 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b6ba5f2a-6486-4a84-bf85-23c00e907701-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:55 crc kubenswrapper[4558]: I0120 17:43:55.245790 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b6ba5f2a-6486-4a84-bf85-23c00e907701-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:55 crc kubenswrapper[4558]: I0120 17:43:55.328127 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/barbican-api-74bb4f5f4-5ppmg"] Jan 20 17:43:55 crc kubenswrapper[4558]: E0120 17:43:55.328540 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b6ba5f2a-6486-4a84-bf85-23c00e907701" containerName="placement-log" Jan 20 17:43:55 crc kubenswrapper[4558]: I0120 17:43:55.328561 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="b6ba5f2a-6486-4a84-bf85-23c00e907701" containerName="placement-log" Jan 20 17:43:55 crc kubenswrapper[4558]: E0120 17:43:55.328588 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b6ba5f2a-6486-4a84-bf85-23c00e907701" containerName="placement-api" Jan 20 17:43:55 crc kubenswrapper[4558]: I0120 17:43:55.328595 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="b6ba5f2a-6486-4a84-bf85-23c00e907701" containerName="placement-api" Jan 20 17:43:55 crc kubenswrapper[4558]: I0120 17:43:55.328794 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="b6ba5f2a-6486-4a84-bf85-23c00e907701" containerName="placement-log" Jan 20 17:43:55 crc kubenswrapper[4558]: I0120 17:43:55.328811 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="b6ba5f2a-6486-4a84-bf85-23c00e907701" containerName="placement-api" Jan 20 17:43:55 crc kubenswrapper[4558]: I0120 17:43:55.329766 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-74bb4f5f4-5ppmg" Jan 20 17:43:55 crc kubenswrapper[4558]: I0120 17:43:55.376066 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-api-74bb4f5f4-5ppmg"] Jan 20 17:43:55 crc kubenswrapper[4558]: I0120 17:43:55.392318 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/memcached-0" Jan 20 17:43:55 crc kubenswrapper[4558]: I0120 17:43:55.417223 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:43:55 crc kubenswrapper[4558]: I0120 17:43:55.456675 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d619cd37-a474-4965-b382-749ed6d55d6d-combined-ca-bundle\") pod \"barbican-api-74bb4f5f4-5ppmg\" (UID: \"d619cd37-a474-4965-b382-749ed6d55d6d\") " pod="openstack-kuttl-tests/barbican-api-74bb4f5f4-5ppmg" Jan 20 17:43:55 crc kubenswrapper[4558]: I0120 17:43:55.456719 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d619cd37-a474-4965-b382-749ed6d55d6d-logs\") pod \"barbican-api-74bb4f5f4-5ppmg\" (UID: \"d619cd37-a474-4965-b382-749ed6d55d6d\") " pod="openstack-kuttl-tests/barbican-api-74bb4f5f4-5ppmg" Jan 20 17:43:55 crc kubenswrapper[4558]: I0120 17:43:55.456747 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d619cd37-a474-4965-b382-749ed6d55d6d-public-tls-certs\") pod \"barbican-api-74bb4f5f4-5ppmg\" (UID: \"d619cd37-a474-4965-b382-749ed6d55d6d\") " pod="openstack-kuttl-tests/barbican-api-74bb4f5f4-5ppmg" Jan 20 17:43:55 crc kubenswrapper[4558]: I0120 17:43:55.456780 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d619cd37-a474-4965-b382-749ed6d55d6d-config-data-custom\") pod \"barbican-api-74bb4f5f4-5ppmg\" (UID: \"d619cd37-a474-4965-b382-749ed6d55d6d\") " pod="openstack-kuttl-tests/barbican-api-74bb4f5f4-5ppmg" Jan 20 17:43:55 crc kubenswrapper[4558]: I0120 17:43:55.456947 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d619cd37-a474-4965-b382-749ed6d55d6d-config-data\") pod \"barbican-api-74bb4f5f4-5ppmg\" (UID: \"d619cd37-a474-4965-b382-749ed6d55d6d\") " pod="openstack-kuttl-tests/barbican-api-74bb4f5f4-5ppmg" Jan 20 17:43:55 crc kubenswrapper[4558]: I0120 17:43:55.457242 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d619cd37-a474-4965-b382-749ed6d55d6d-internal-tls-certs\") pod \"barbican-api-74bb4f5f4-5ppmg\" (UID: \"d619cd37-a474-4965-b382-749ed6d55d6d\") " pod="openstack-kuttl-tests/barbican-api-74bb4f5f4-5ppmg" Jan 20 17:43:55 crc kubenswrapper[4558]: I0120 17:43:55.458034 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4kqlf\" (UniqueName: \"kubernetes.io/projected/d619cd37-a474-4965-b382-749ed6d55d6d-kube-api-access-4kqlf\") pod \"barbican-api-74bb4f5f4-5ppmg\" (UID: \"d619cd37-a474-4965-b382-749ed6d55d6d\") " pod="openstack-kuttl-tests/barbican-api-74bb4f5f4-5ppmg" Jan 20 17:43:55 crc kubenswrapper[4558]: I0120 17:43:55.462323 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"b752af55-9c06-469f-9353-e1042300de3c","Type":"ContainerStarted","Data":"bccce61b5ba6d2aba1d38b7c559ba654190565ff2cbc1a51e114282f3188cc47"} Jan 20 17:43:55 crc kubenswrapper[4558]: I0120 17:43:55.484724 4558 generic.go:334] "Generic (PLEG): container finished" podID="b6ba5f2a-6486-4a84-bf85-23c00e907701" containerID="76ef1a400330d7e58be6b4ac9281c05593aa3360863f0b8430f1369f13006b6c" exitCode=0 Jan 20 17:43:55 crc kubenswrapper[4558]: I0120 17:43:55.484781 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-976c48458-wzswq" event={"ID":"b6ba5f2a-6486-4a84-bf85-23c00e907701","Type":"ContainerDied","Data":"76ef1a400330d7e58be6b4ac9281c05593aa3360863f0b8430f1369f13006b6c"} Jan 20 17:43:55 crc kubenswrapper[4558]: I0120 17:43:55.484807 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-976c48458-wzswq" event={"ID":"b6ba5f2a-6486-4a84-bf85-23c00e907701","Type":"ContainerDied","Data":"40f50103f3b6fefc0e60b4ba58c1c864622943da91e82cbad5dd4a5d1cd1a4ce"} Jan 20 17:43:55 crc kubenswrapper[4558]: I0120 17:43:55.484825 4558 scope.go:117] "RemoveContainer" containerID="76ef1a400330d7e58be6b4ac9281c05593aa3360863f0b8430f1369f13006b6c" Jan 20 17:43:55 crc kubenswrapper[4558]: I0120 17:43:55.484986 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-976c48458-wzswq" Jan 20 17:43:55 crc kubenswrapper[4558]: I0120 17:43:55.518356 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-0" event={"ID":"50cacddd-ebea-477f-af64-6e96a09a242e","Type":"ContainerStarted","Data":"696eca395715bd4cf46b8dd0ddb53ef0ad8c84c94f7e91863731528e53ac125e"} Jan 20 17:43:55 crc kubenswrapper[4558]: I0120 17:43:55.518811 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:43:55 crc kubenswrapper[4558]: I0120 17:43:55.541207 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/placement-976c48458-wzswq"] Jan 20 17:43:55 crc kubenswrapper[4558]: I0120 17:43:55.545699 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/placement-976c48458-wzswq"] Jan 20 17:43:55 crc kubenswrapper[4558]: I0120 17:43:55.550407 4558 scope.go:117] "RemoveContainer" containerID="793b7e60b027092ef8139183c3e6dd4b54469caf6cf02f94955f48fe3bbfbd9f" Jan 20 17:43:55 crc kubenswrapper[4558]: I0120 17:43:55.560695 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d619cd37-a474-4965-b382-749ed6d55d6d-combined-ca-bundle\") pod \"barbican-api-74bb4f5f4-5ppmg\" (UID: \"d619cd37-a474-4965-b382-749ed6d55d6d\") " pod="openstack-kuttl-tests/barbican-api-74bb4f5f4-5ppmg" Jan 20 17:43:55 crc kubenswrapper[4558]: I0120 17:43:55.560738 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d619cd37-a474-4965-b382-749ed6d55d6d-logs\") pod \"barbican-api-74bb4f5f4-5ppmg\" (UID: \"d619cd37-a474-4965-b382-749ed6d55d6d\") " pod="openstack-kuttl-tests/barbican-api-74bb4f5f4-5ppmg" Jan 20 17:43:55 crc kubenswrapper[4558]: I0120 17:43:55.560765 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d619cd37-a474-4965-b382-749ed6d55d6d-config-data-custom\") pod \"barbican-api-74bb4f5f4-5ppmg\" (UID: \"d619cd37-a474-4965-b382-749ed6d55d6d\") " pod="openstack-kuttl-tests/barbican-api-74bb4f5f4-5ppmg" Jan 20 17:43:55 crc kubenswrapper[4558]: I0120 17:43:55.560784 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d619cd37-a474-4965-b382-749ed6d55d6d-public-tls-certs\") pod \"barbican-api-74bb4f5f4-5ppmg\" (UID: \"d619cd37-a474-4965-b382-749ed6d55d6d\") " pod="openstack-kuttl-tests/barbican-api-74bb4f5f4-5ppmg" Jan 20 17:43:55 crc kubenswrapper[4558]: I0120 17:43:55.560970 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d619cd37-a474-4965-b382-749ed6d55d6d-config-data\") pod \"barbican-api-74bb4f5f4-5ppmg\" (UID: \"d619cd37-a474-4965-b382-749ed6d55d6d\") " pod="openstack-kuttl-tests/barbican-api-74bb4f5f4-5ppmg" Jan 20 17:43:55 crc kubenswrapper[4558]: I0120 17:43:55.561109 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d619cd37-a474-4965-b382-749ed6d55d6d-internal-tls-certs\") pod \"barbican-api-74bb4f5f4-5ppmg\" (UID: \"d619cd37-a474-4965-b382-749ed6d55d6d\") " pod="openstack-kuttl-tests/barbican-api-74bb4f5f4-5ppmg" Jan 20 17:43:55 crc kubenswrapper[4558]: I0120 17:43:55.561138 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4kqlf\" (UniqueName: \"kubernetes.io/projected/d619cd37-a474-4965-b382-749ed6d55d6d-kube-api-access-4kqlf\") pod \"barbican-api-74bb4f5f4-5ppmg\" (UID: \"d619cd37-a474-4965-b382-749ed6d55d6d\") " pod="openstack-kuttl-tests/barbican-api-74bb4f5f4-5ppmg" Jan 20 17:43:55 crc kubenswrapper[4558]: I0120 17:43:55.561670 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d619cd37-a474-4965-b382-749ed6d55d6d-logs\") pod \"barbican-api-74bb4f5f4-5ppmg\" (UID: \"d619cd37-a474-4965-b382-749ed6d55d6d\") " pod="openstack-kuttl-tests/barbican-api-74bb4f5f4-5ppmg" Jan 20 17:43:55 crc kubenswrapper[4558]: I0120 17:43:55.567126 4558 scope.go:117] "RemoveContainer" containerID="2d2a8989ca5a5af98b2c9dc8f801ea0675339ec14800f437c058c5a43c20146b" Jan 20 17:43:55 crc kubenswrapper[4558]: E0120 17:43:55.567548 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 17:43:55 crc kubenswrapper[4558]: I0120 17:43:55.574727 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d619cd37-a474-4965-b382-749ed6d55d6d-combined-ca-bundle\") pod \"barbican-api-74bb4f5f4-5ppmg\" (UID: \"d619cd37-a474-4965-b382-749ed6d55d6d\") " pod="openstack-kuttl-tests/barbican-api-74bb4f5f4-5ppmg" Jan 20 17:43:55 crc kubenswrapper[4558]: I0120 17:43:55.575273 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d619cd37-a474-4965-b382-749ed6d55d6d-config-data-custom\") pod \"barbican-api-74bb4f5f4-5ppmg\" (UID: \"d619cd37-a474-4965-b382-749ed6d55d6d\") " pod="openstack-kuttl-tests/barbican-api-74bb4f5f4-5ppmg" Jan 20 17:43:55 crc kubenswrapper[4558]: I0120 17:43:55.581826 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d619cd37-a474-4965-b382-749ed6d55d6d-internal-tls-certs\") pod \"barbican-api-74bb4f5f4-5ppmg\" (UID: \"d619cd37-a474-4965-b382-749ed6d55d6d\") " pod="openstack-kuttl-tests/barbican-api-74bb4f5f4-5ppmg" Jan 20 17:43:55 crc kubenswrapper[4558]: I0120 17:43:55.584141 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d619cd37-a474-4965-b382-749ed6d55d6d-config-data\") pod \"barbican-api-74bb4f5f4-5ppmg\" (UID: \"d619cd37-a474-4965-b382-749ed6d55d6d\") " pod="openstack-kuttl-tests/barbican-api-74bb4f5f4-5ppmg" Jan 20 17:43:55 crc kubenswrapper[4558]: I0120 17:43:55.585559 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d619cd37-a474-4965-b382-749ed6d55d6d-public-tls-certs\") pod \"barbican-api-74bb4f5f4-5ppmg\" (UID: \"d619cd37-a474-4965-b382-749ed6d55d6d\") " pod="openstack-kuttl-tests/barbican-api-74bb4f5f4-5ppmg" Jan 20 17:43:55 crc kubenswrapper[4558]: I0120 17:43:55.637659 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4kqlf\" (UniqueName: \"kubernetes.io/projected/d619cd37-a474-4965-b382-749ed6d55d6d-kube-api-access-4kqlf\") pod \"barbican-api-74bb4f5f4-5ppmg\" (UID: \"d619cd37-a474-4965-b382-749ed6d55d6d\") " pod="openstack-kuttl-tests/barbican-api-74bb4f5f4-5ppmg" Jan 20 17:43:55 crc kubenswrapper[4558]: I0120 17:43:55.654151 4558 scope.go:117] "RemoveContainer" containerID="76ef1a400330d7e58be6b4ac9281c05593aa3360863f0b8430f1369f13006b6c" Jan 20 17:43:55 crc kubenswrapper[4558]: I0120 17:43:55.669661 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-74bb4f5f4-5ppmg" Jan 20 17:43:55 crc kubenswrapper[4558]: E0120 17:43:55.673863 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"76ef1a400330d7e58be6b4ac9281c05593aa3360863f0b8430f1369f13006b6c\": container with ID starting with 76ef1a400330d7e58be6b4ac9281c05593aa3360863f0b8430f1369f13006b6c not found: ID does not exist" containerID="76ef1a400330d7e58be6b4ac9281c05593aa3360863f0b8430f1369f13006b6c" Jan 20 17:43:55 crc kubenswrapper[4558]: I0120 17:43:55.673904 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"76ef1a400330d7e58be6b4ac9281c05593aa3360863f0b8430f1369f13006b6c"} err="failed to get container status \"76ef1a400330d7e58be6b4ac9281c05593aa3360863f0b8430f1369f13006b6c\": rpc error: code = NotFound desc = could not find container \"76ef1a400330d7e58be6b4ac9281c05593aa3360863f0b8430f1369f13006b6c\": container with ID starting with 76ef1a400330d7e58be6b4ac9281c05593aa3360863f0b8430f1369f13006b6c not found: ID does not exist" Jan 20 17:43:55 crc kubenswrapper[4558]: I0120 17:43:55.673939 4558 scope.go:117] "RemoveContainer" containerID="793b7e60b027092ef8139183c3e6dd4b54469caf6cf02f94955f48fe3bbfbd9f" Jan 20 17:43:55 crc kubenswrapper[4558]: E0120 17:43:55.689278 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"793b7e60b027092ef8139183c3e6dd4b54469caf6cf02f94955f48fe3bbfbd9f\": container with ID starting with 793b7e60b027092ef8139183c3e6dd4b54469caf6cf02f94955f48fe3bbfbd9f not found: ID does not exist" containerID="793b7e60b027092ef8139183c3e6dd4b54469caf6cf02f94955f48fe3bbfbd9f" Jan 20 17:43:55 crc kubenswrapper[4558]: I0120 17:43:55.689319 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"793b7e60b027092ef8139183c3e6dd4b54469caf6cf02f94955f48fe3bbfbd9f"} err="failed to get container status \"793b7e60b027092ef8139183c3e6dd4b54469caf6cf02f94955f48fe3bbfbd9f\": rpc error: code = NotFound desc = could not find container \"793b7e60b027092ef8139183c3e6dd4b54469caf6cf02f94955f48fe3bbfbd9f\": container with ID starting with 793b7e60b027092ef8139183c3e6dd4b54469caf6cf02f94955f48fe3bbfbd9f not found: ID does not exist" Jan 20 17:43:55 crc kubenswrapper[4558]: I0120 17:43:55.704642 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:43:55 crc kubenswrapper[4558]: I0120 17:43:55.704876 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-metadata-0" podUID="572d7a53-9a17-43e3-bc12-a04f994eb857" containerName="nova-metadata-log" containerID="cri-o://a919d8c90acccccbc8402af93925861e0da53f38a61a01f63b79f7cffbdcd991" gracePeriod=30 Jan 20 17:43:55 crc kubenswrapper[4558]: I0120 17:43:55.705344 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-metadata-0" podUID="572d7a53-9a17-43e3-bc12-a04f994eb857" containerName="nova-metadata-metadata" containerID="cri-o://3df9ef85a677a70aa8631b76f358d8c7876e88242945b5d72d5b5a6a3a549fd0" gracePeriod=30 Jan 20 17:43:55 crc kubenswrapper[4558]: I0120 17:43:55.720286 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:43:55 crc kubenswrapper[4558]: I0120 17:43:55.720530 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-api-0" podUID="4ea608e7-9a19-47fb-8e14-a629451e7c03" containerName="nova-api-log" containerID="cri-o://b63f6fc151b46c87b40591ac97182073165528cb2330e3173933ce4b52b43f72" gracePeriod=30 Jan 20 17:43:55 crc kubenswrapper[4558]: I0120 17:43:55.720902 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-api-0" podUID="4ea608e7-9a19-47fb-8e14-a629451e7c03" containerName="nova-api-api" containerID="cri-o://2268bf37f5b535b0402d4e5d51dbdeabae160ee08d31b99321666536df207266" gracePeriod=30 Jan 20 17:43:55 crc kubenswrapper[4558]: I0120 17:43:55.867462 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/neutron-66bfc8789d-nsc64" Jan 20 17:43:55 crc kubenswrapper[4558]: I0120 17:43:55.979430 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-5c4dc89b5d-qp5xf"] Jan 20 17:43:55 crc kubenswrapper[4558]: I0120 17:43:55.979680 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/neutron-5c4dc89b5d-qp5xf" podUID="7e627450-8edc-44bd-805d-b0034178dff2" containerName="neutron-api" containerID="cri-o://ada568b4f4a7ec3082384c4ebb4a4d6e1590ad65456658486086d4be942b31b7" gracePeriod=30 Jan 20 17:43:55 crc kubenswrapper[4558]: I0120 17:43:55.979809 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/neutron-5c4dc89b5d-qp5xf" podUID="7e627450-8edc-44bd-805d-b0034178dff2" containerName="neutron-httpd" containerID="cri-o://5730181f255b51b17148b66b915f2f8f31d4d16baed50889d862d0d3799b5c21" gracePeriod=30 Jan 20 17:43:56 crc kubenswrapper[4558]: I0120 17:43:56.383285 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-api-74bb4f5f4-5ppmg"] Jan 20 17:43:56 crc kubenswrapper[4558]: I0120 17:43:56.560572 4558 generic.go:334] "Generic (PLEG): container finished" podID="572d7a53-9a17-43e3-bc12-a04f994eb857" containerID="a919d8c90acccccbc8402af93925861e0da53f38a61a01f63b79f7cffbdcd991" exitCode=143 Jan 20 17:43:56 crc kubenswrapper[4558]: I0120 17:43:56.560900 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"572d7a53-9a17-43e3-bc12-a04f994eb857","Type":"ContainerDied","Data":"a919d8c90acccccbc8402af93925861e0da53f38a61a01f63b79f7cffbdcd991"} Jan 20 17:43:56 crc kubenswrapper[4558]: I0120 17:43:56.579146 4558 generic.go:334] "Generic (PLEG): container finished" podID="7e627450-8edc-44bd-805d-b0034178dff2" containerID="5730181f255b51b17148b66b915f2f8f31d4d16baed50889d862d0d3799b5c21" exitCode=0 Jan 20 17:43:56 crc kubenswrapper[4558]: I0120 17:43:56.610321 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6ba5f2a-6486-4a84-bf85-23c00e907701" path="/var/lib/kubelet/pods/b6ba5f2a-6486-4a84-bf85-23c00e907701/volumes" Jan 20 17:43:56 crc kubenswrapper[4558]: I0120 17:43:56.611079 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-5c4dc89b5d-qp5xf" event={"ID":"7e627450-8edc-44bd-805d-b0034178dff2","Type":"ContainerDied","Data":"5730181f255b51b17148b66b915f2f8f31d4d16baed50889d862d0d3799b5c21"} Jan 20 17:43:56 crc kubenswrapper[4558]: I0120 17:43:56.619124 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"b752af55-9c06-469f-9353-e1042300de3c","Type":"ContainerStarted","Data":"93d7994154007967abeffaef97cbdbfea9fe0c9a249a8ebbcb1a6bb563d31477"} Jan 20 17:43:56 crc kubenswrapper[4558]: I0120 17:43:56.638532 4558 generic.go:334] "Generic (PLEG): container finished" podID="4ea608e7-9a19-47fb-8e14-a629451e7c03" containerID="b63f6fc151b46c87b40591ac97182073165528cb2330e3173933ce4b52b43f72" exitCode=143 Jan 20 17:43:56 crc kubenswrapper[4558]: I0120 17:43:56.638665 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"4ea608e7-9a19-47fb-8e14-a629451e7c03","Type":"ContainerDied","Data":"b63f6fc151b46c87b40591ac97182073165528cb2330e3173933ce4b52b43f72"} Jan 20 17:43:56 crc kubenswrapper[4558]: I0120 17:43:56.647469 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-api-69995768b6-hpdg2" podUID="a6d1823f-8885-40f1-b85d-5ee22197dc09" containerName="barbican-api-log" containerID="cri-o://18bf3ef21bb245e310dd8ac4c714bcd9fa323f6c0767dc644e8509693385ec1f" gracePeriod=30 Jan 20 17:43:56 crc kubenswrapper[4558]: I0120 17:43:56.647616 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-74bb4f5f4-5ppmg" event={"ID":"d619cd37-a474-4965-b382-749ed6d55d6d","Type":"ContainerStarted","Data":"aed528881c58fa0aa03f936e5977026a6585daedac4a872f81b019f155c31984"} Jan 20 17:43:56 crc kubenswrapper[4558]: I0120 17:43:56.648131 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-api-69995768b6-hpdg2" podUID="a6d1823f-8885-40f1-b85d-5ee22197dc09" containerName="barbican-api" containerID="cri-o://73b904a1c9cb3b18a20eeaf72083790b7ae09f43aad729dd0878ee2ad744f046" gracePeriod=30 Jan 20 17:43:56 crc kubenswrapper[4558]: I0120 17:43:56.648383 4558 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 20 17:43:56 crc kubenswrapper[4558]: I0120 17:43:56.648397 4558 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 20 17:43:56 crc kubenswrapper[4558]: E0120 17:43:56.795866 4558 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7e627450_8edc_44bd_805d_b0034178dff2.slice/crio-conmon-5730181f255b51b17148b66b915f2f8f31d4d16baed50889d862d0d3799b5c21.scope\": RecentStats: unable to find data in memory cache]" Jan 20 17:43:56 crc kubenswrapper[4558]: I0120 17:43:56.989962 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:56 crc kubenswrapper[4558]: I0120 17:43:56.990387 4558 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 20 17:43:56 crc kubenswrapper[4558]: I0120 17:43:56.994141 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:43:57 crc kubenswrapper[4558]: I0120 17:43:57.428816 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:57 crc kubenswrapper[4558]: I0120 17:43:57.430250 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/openstack-galera-0"] Jan 20 17:43:57 crc kubenswrapper[4558]: I0120 17:43:57.516190 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-69995768b6-hpdg2" Jan 20 17:43:57 crc kubenswrapper[4558]: I0120 17:43:57.618778 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:43:57 crc kubenswrapper[4558]: I0120 17:43:57.649950 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a6d1823f-8885-40f1-b85d-5ee22197dc09-logs\") pod \"a6d1823f-8885-40f1-b85d-5ee22197dc09\" (UID: \"a6d1823f-8885-40f1-b85d-5ee22197dc09\") " Jan 20 17:43:57 crc kubenswrapper[4558]: I0120 17:43:57.650131 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a6d1823f-8885-40f1-b85d-5ee22197dc09-public-tls-certs\") pod \"a6d1823f-8885-40f1-b85d-5ee22197dc09\" (UID: \"a6d1823f-8885-40f1-b85d-5ee22197dc09\") " Jan 20 17:43:57 crc kubenswrapper[4558]: I0120 17:43:57.650330 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a6d1823f-8885-40f1-b85d-5ee22197dc09-config-data\") pod \"a6d1823f-8885-40f1-b85d-5ee22197dc09\" (UID: \"a6d1823f-8885-40f1-b85d-5ee22197dc09\") " Jan 20 17:43:57 crc kubenswrapper[4558]: I0120 17:43:57.650418 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a6d1823f-8885-40f1-b85d-5ee22197dc09-config-data-custom\") pod \"a6d1823f-8885-40f1-b85d-5ee22197dc09\" (UID: \"a6d1823f-8885-40f1-b85d-5ee22197dc09\") " Jan 20 17:43:57 crc kubenswrapper[4558]: I0120 17:43:57.650592 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a6d1823f-8885-40f1-b85d-5ee22197dc09-internal-tls-certs\") pod \"a6d1823f-8885-40f1-b85d-5ee22197dc09\" (UID: \"a6d1823f-8885-40f1-b85d-5ee22197dc09\") " Jan 20 17:43:57 crc kubenswrapper[4558]: I0120 17:43:57.650701 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gnnhz\" (UniqueName: \"kubernetes.io/projected/a6d1823f-8885-40f1-b85d-5ee22197dc09-kube-api-access-gnnhz\") pod \"a6d1823f-8885-40f1-b85d-5ee22197dc09\" (UID: \"a6d1823f-8885-40f1-b85d-5ee22197dc09\") " Jan 20 17:43:57 crc kubenswrapper[4558]: I0120 17:43:57.650813 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a6d1823f-8885-40f1-b85d-5ee22197dc09-combined-ca-bundle\") pod \"a6d1823f-8885-40f1-b85d-5ee22197dc09\" (UID: \"a6d1823f-8885-40f1-b85d-5ee22197dc09\") " Jan 20 17:43:57 crc kubenswrapper[4558]: I0120 17:43:57.652838 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a6d1823f-8885-40f1-b85d-5ee22197dc09-logs" (OuterVolumeSpecName: "logs") pod "a6d1823f-8885-40f1-b85d-5ee22197dc09" (UID: "a6d1823f-8885-40f1-b85d-5ee22197dc09"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:43:57 crc kubenswrapper[4558]: I0120 17:43:57.674748 4558 generic.go:334] "Generic (PLEG): container finished" podID="a6d1823f-8885-40f1-b85d-5ee22197dc09" containerID="73b904a1c9cb3b18a20eeaf72083790b7ae09f43aad729dd0878ee2ad744f046" exitCode=0 Jan 20 17:43:57 crc kubenswrapper[4558]: I0120 17:43:57.674777 4558 generic.go:334] "Generic (PLEG): container finished" podID="a6d1823f-8885-40f1-b85d-5ee22197dc09" containerID="18bf3ef21bb245e310dd8ac4c714bcd9fa323f6c0767dc644e8509693385ec1f" exitCode=143 Jan 20 17:43:57 crc kubenswrapper[4558]: I0120 17:43:57.674816 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-69995768b6-hpdg2" event={"ID":"a6d1823f-8885-40f1-b85d-5ee22197dc09","Type":"ContainerDied","Data":"73b904a1c9cb3b18a20eeaf72083790b7ae09f43aad729dd0878ee2ad744f046"} Jan 20 17:43:57 crc kubenswrapper[4558]: I0120 17:43:57.674846 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-69995768b6-hpdg2" event={"ID":"a6d1823f-8885-40f1-b85d-5ee22197dc09","Type":"ContainerDied","Data":"18bf3ef21bb245e310dd8ac4c714bcd9fa323f6c0767dc644e8509693385ec1f"} Jan 20 17:43:57 crc kubenswrapper[4558]: I0120 17:43:57.674858 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-69995768b6-hpdg2" event={"ID":"a6d1823f-8885-40f1-b85d-5ee22197dc09","Type":"ContainerDied","Data":"ea1419644ca2911289535338721264e2f99e8dcc7356d4bafe1793b6983dd157"} Jan 20 17:43:57 crc kubenswrapper[4558]: I0120 17:43:57.674875 4558 scope.go:117] "RemoveContainer" containerID="73b904a1c9cb3b18a20eeaf72083790b7ae09f43aad729dd0878ee2ad744f046" Jan 20 17:43:57 crc kubenswrapper[4558]: I0120 17:43:57.675012 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-69995768b6-hpdg2" Jan 20 17:43:57 crc kubenswrapper[4558]: I0120 17:43:57.689292 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a6d1823f-8885-40f1-b85d-5ee22197dc09-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "a6d1823f-8885-40f1-b85d-5ee22197dc09" (UID: "a6d1823f-8885-40f1-b85d-5ee22197dc09"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:57 crc kubenswrapper[4558]: I0120 17:43:57.689659 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a6d1823f-8885-40f1-b85d-5ee22197dc09-kube-api-access-gnnhz" (OuterVolumeSpecName: "kube-api-access-gnnhz") pod "a6d1823f-8885-40f1-b85d-5ee22197dc09" (UID: "a6d1823f-8885-40f1-b85d-5ee22197dc09"). InnerVolumeSpecName "kube-api-access-gnnhz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:43:57 crc kubenswrapper[4558]: I0120 17:43:57.692806 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"b752af55-9c06-469f-9353-e1042300de3c","Type":"ContainerStarted","Data":"43f53e7f4f862cf970fa692178ae00cd40fe6c655fd001e46383dd72770d5cb5"} Jan 20 17:43:57 crc kubenswrapper[4558]: I0120 17:43:57.692854 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:43:57 crc kubenswrapper[4558]: I0120 17:43:57.723530 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-74bb4f5f4-5ppmg" event={"ID":"d619cd37-a474-4965-b382-749ed6d55d6d","Type":"ContainerStarted","Data":"dcd025269e0c4cae451a75917b3ffb9392b169d6ba1b4f275051e1022e3b09b6"} Jan 20 17:43:57 crc kubenswrapper[4558]: I0120 17:43:57.723590 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-74bb4f5f4-5ppmg" event={"ID":"d619cd37-a474-4965-b382-749ed6d55d6d","Type":"ContainerStarted","Data":"985673ff538dfdfff39427adadbf62ca67de938e3a41cbb308d4b6035782e705"} Jan 20 17:43:57 crc kubenswrapper[4558]: I0120 17:43:57.724482 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/barbican-api-74bb4f5f4-5ppmg" Jan 20 17:43:57 crc kubenswrapper[4558]: I0120 17:43:57.725502 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/barbican-api-74bb4f5f4-5ppmg" Jan 20 17:43:57 crc kubenswrapper[4558]: I0120 17:43:57.728121 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/cinder-api-0" podStartSLOduration=3.7281049509999997 podStartE2EDuration="3.728104951s" podCreationTimestamp="2026-01-20 17:43:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:43:57.712607082 +0000 UTC m=+3731.472945049" watchObservedRunningTime="2026-01-20 17:43:57.728104951 +0000 UTC m=+3731.488442919" Jan 20 17:43:57 crc kubenswrapper[4558]: I0120 17:43:57.742154 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a6d1823f-8885-40f1-b85d-5ee22197dc09-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a6d1823f-8885-40f1-b85d-5ee22197dc09" (UID: "a6d1823f-8885-40f1-b85d-5ee22197dc09"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:57 crc kubenswrapper[4558]: I0120 17:43:57.753801 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a6d1823f-8885-40f1-b85d-5ee22197dc09-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:57 crc kubenswrapper[4558]: I0120 17:43:57.753828 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gnnhz\" (UniqueName: \"kubernetes.io/projected/a6d1823f-8885-40f1-b85d-5ee22197dc09-kube-api-access-gnnhz\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:57 crc kubenswrapper[4558]: I0120 17:43:57.753841 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a6d1823f-8885-40f1-b85d-5ee22197dc09-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:57 crc kubenswrapper[4558]: I0120 17:43:57.753850 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a6d1823f-8885-40f1-b85d-5ee22197dc09-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:57 crc kubenswrapper[4558]: I0120 17:43:57.778322 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a6d1823f-8885-40f1-b85d-5ee22197dc09-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "a6d1823f-8885-40f1-b85d-5ee22197dc09" (UID: "a6d1823f-8885-40f1-b85d-5ee22197dc09"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:57 crc kubenswrapper[4558]: I0120 17:43:57.785324 4558 scope.go:117] "RemoveContainer" containerID="18bf3ef21bb245e310dd8ac4c714bcd9fa323f6c0767dc644e8509693385ec1f" Jan 20 17:43:57 crc kubenswrapper[4558]: I0120 17:43:57.785465 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a6d1823f-8885-40f1-b85d-5ee22197dc09-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "a6d1823f-8885-40f1-b85d-5ee22197dc09" (UID: "a6d1823f-8885-40f1-b85d-5ee22197dc09"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:57 crc kubenswrapper[4558]: I0120 17:43:57.799313 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/openstack-galera-0" podUID="38a5808f-1fa5-49cb-afb7-d7676416cd26" containerName="galera" containerID="cri-o://18a3a7a71ea0a749f520d355bf36542849b6f575c3537150265c17afa8aafef5" gracePeriod=30 Jan 20 17:43:57 crc kubenswrapper[4558]: I0120 17:43:57.805403 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a6d1823f-8885-40f1-b85d-5ee22197dc09-config-data" (OuterVolumeSpecName: "config-data") pod "a6d1823f-8885-40f1-b85d-5ee22197dc09" (UID: "a6d1823f-8885-40f1-b85d-5ee22197dc09"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:57 crc kubenswrapper[4558]: I0120 17:43:57.822752 4558 scope.go:117] "RemoveContainer" containerID="73b904a1c9cb3b18a20eeaf72083790b7ae09f43aad729dd0878ee2ad744f046" Jan 20 17:43:57 crc kubenswrapper[4558]: E0120 17:43:57.826276 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"73b904a1c9cb3b18a20eeaf72083790b7ae09f43aad729dd0878ee2ad744f046\": container with ID starting with 73b904a1c9cb3b18a20eeaf72083790b7ae09f43aad729dd0878ee2ad744f046 not found: ID does not exist" containerID="73b904a1c9cb3b18a20eeaf72083790b7ae09f43aad729dd0878ee2ad744f046" Jan 20 17:43:57 crc kubenswrapper[4558]: I0120 17:43:57.826384 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"73b904a1c9cb3b18a20eeaf72083790b7ae09f43aad729dd0878ee2ad744f046"} err="failed to get container status \"73b904a1c9cb3b18a20eeaf72083790b7ae09f43aad729dd0878ee2ad744f046\": rpc error: code = NotFound desc = could not find container \"73b904a1c9cb3b18a20eeaf72083790b7ae09f43aad729dd0878ee2ad744f046\": container with ID starting with 73b904a1c9cb3b18a20eeaf72083790b7ae09f43aad729dd0878ee2ad744f046 not found: ID does not exist" Jan 20 17:43:57 crc kubenswrapper[4558]: I0120 17:43:57.826465 4558 scope.go:117] "RemoveContainer" containerID="18bf3ef21bb245e310dd8ac4c714bcd9fa323f6c0767dc644e8509693385ec1f" Jan 20 17:43:57 crc kubenswrapper[4558]: E0120 17:43:57.827092 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"18bf3ef21bb245e310dd8ac4c714bcd9fa323f6c0767dc644e8509693385ec1f\": container with ID starting with 18bf3ef21bb245e310dd8ac4c714bcd9fa323f6c0767dc644e8509693385ec1f not found: ID does not exist" containerID="18bf3ef21bb245e310dd8ac4c714bcd9fa323f6c0767dc644e8509693385ec1f" Jan 20 17:43:57 crc kubenswrapper[4558]: I0120 17:43:57.827143 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"18bf3ef21bb245e310dd8ac4c714bcd9fa323f6c0767dc644e8509693385ec1f"} err="failed to get container status \"18bf3ef21bb245e310dd8ac4c714bcd9fa323f6c0767dc644e8509693385ec1f\": rpc error: code = NotFound desc = could not find container \"18bf3ef21bb245e310dd8ac4c714bcd9fa323f6c0767dc644e8509693385ec1f\": container with ID starting with 18bf3ef21bb245e310dd8ac4c714bcd9fa323f6c0767dc644e8509693385ec1f not found: ID does not exist" Jan 20 17:43:57 crc kubenswrapper[4558]: I0120 17:43:57.827199 4558 scope.go:117] "RemoveContainer" containerID="73b904a1c9cb3b18a20eeaf72083790b7ae09f43aad729dd0878ee2ad744f046" Jan 20 17:43:57 crc kubenswrapper[4558]: I0120 17:43:57.828598 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"73b904a1c9cb3b18a20eeaf72083790b7ae09f43aad729dd0878ee2ad744f046"} err="failed to get container status \"73b904a1c9cb3b18a20eeaf72083790b7ae09f43aad729dd0878ee2ad744f046\": rpc error: code = NotFound desc = could not find container \"73b904a1c9cb3b18a20eeaf72083790b7ae09f43aad729dd0878ee2ad744f046\": container with ID starting with 73b904a1c9cb3b18a20eeaf72083790b7ae09f43aad729dd0878ee2ad744f046 not found: ID does not exist" Jan 20 17:43:57 crc kubenswrapper[4558]: I0120 17:43:57.828644 4558 scope.go:117] "RemoveContainer" containerID="18bf3ef21bb245e310dd8ac4c714bcd9fa323f6c0767dc644e8509693385ec1f" Jan 20 17:43:57 crc kubenswrapper[4558]: I0120 17:43:57.832257 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"18bf3ef21bb245e310dd8ac4c714bcd9fa323f6c0767dc644e8509693385ec1f"} err="failed to get container status \"18bf3ef21bb245e310dd8ac4c714bcd9fa323f6c0767dc644e8509693385ec1f\": rpc error: code = NotFound desc = could not find container \"18bf3ef21bb245e310dd8ac4c714bcd9fa323f6c0767dc644e8509693385ec1f\": container with ID starting with 18bf3ef21bb245e310dd8ac4c714bcd9fa323f6c0767dc644e8509693385ec1f not found: ID does not exist" Jan 20 17:43:57 crc kubenswrapper[4558]: I0120 17:43:57.856449 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a6d1823f-8885-40f1-b85d-5ee22197dc09-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:57 crc kubenswrapper[4558]: I0120 17:43:57.856475 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a6d1823f-8885-40f1-b85d-5ee22197dc09-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:57 crc kubenswrapper[4558]: I0120 17:43:57.856486 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a6d1823f-8885-40f1-b85d-5ee22197dc09-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:57 crc kubenswrapper[4558]: I0120 17:43:57.936994 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/barbican-api-74bb4f5f4-5ppmg" podStartSLOduration=2.936977592 podStartE2EDuration="2.936977592s" podCreationTimestamp="2026-01-20 17:43:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:43:57.769553195 +0000 UTC m=+3731.529891162" watchObservedRunningTime="2026-01-20 17:43:57.936977592 +0000 UTC m=+3731.697315559" Jan 20 17:43:57 crc kubenswrapper[4558]: I0120 17:43:57.938946 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:43:57 crc kubenswrapper[4558]: I0120 17:43:57.939348 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="046d6bef-b034-4093-9a77-f23074beaf20" containerName="proxy-httpd" containerID="cri-o://d00d835dc1dbc2461d0c91743b94e5fad21abda74168aa06d622f30e4cb362b4" gracePeriod=30 Jan 20 17:43:57 crc kubenswrapper[4558]: I0120 17:43:57.939434 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="046d6bef-b034-4093-9a77-f23074beaf20" containerName="ceilometer-notification-agent" containerID="cri-o://0341e4fb184d688b59bd82831888ea8b9d6cff92bd2c69cad322485ad1a47834" gracePeriod=30 Jan 20 17:43:57 crc kubenswrapper[4558]: I0120 17:43:57.939309 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="046d6bef-b034-4093-9a77-f23074beaf20" containerName="ceilometer-central-agent" containerID="cri-o://77777fba94586efb6610ba10279160fb83ea9a44be2ecca14f8d05427010707c" gracePeriod=30 Jan 20 17:43:57 crc kubenswrapper[4558]: I0120 17:43:57.940028 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="046d6bef-b034-4093-9a77-f23074beaf20" containerName="sg-core" containerID="cri-o://cee97a0e46fb53a7035606e81d6282fb0ba2c0e098ff4e08ca4586445e9122cb" gracePeriod=30 Jan 20 17:43:58 crc kubenswrapper[4558]: I0120 17:43:58.002618 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-api-69995768b6-hpdg2"] Jan 20 17:43:58 crc kubenswrapper[4558]: I0120 17:43:58.012878 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/barbican-api-69995768b6-hpdg2"] Jan 20 17:43:58 crc kubenswrapper[4558]: I0120 17:43:58.380262 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/neutron-5c4dc89b5d-qp5xf" podUID="7e627450-8edc-44bd-805d-b0034178dff2" containerName="neutron-httpd" probeResult="failure" output="Get \"https://10.217.0.227:9696/\": dial tcp 10.217.0.227:9696: connect: connection refused" Jan 20 17:43:58 crc kubenswrapper[4558]: I0120 17:43:58.471725 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:43:58 crc kubenswrapper[4558]: I0120 17:43:58.573869 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/38a5808f-1fa5-49cb-afb7-d7676416cd26-config-data-generated\") pod \"38a5808f-1fa5-49cb-afb7-d7676416cd26\" (UID: \"38a5808f-1fa5-49cb-afb7-d7676416cd26\") " Jan 20 17:43:58 crc kubenswrapper[4558]: I0120 17:43:58.574630 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/38a5808f-1fa5-49cb-afb7-d7676416cd26-config-data-generated" (OuterVolumeSpecName: "config-data-generated") pod "38a5808f-1fa5-49cb-afb7-d7676416cd26" (UID: "38a5808f-1fa5-49cb-afb7-d7676416cd26"). InnerVolumeSpecName "config-data-generated". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:43:58 crc kubenswrapper[4558]: I0120 17:43:58.576802 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/38a5808f-1fa5-49cb-afb7-d7676416cd26-galera-tls-certs\") pod \"38a5808f-1fa5-49cb-afb7-d7676416cd26\" (UID: \"38a5808f-1fa5-49cb-afb7-d7676416cd26\") " Jan 20 17:43:58 crc kubenswrapper[4558]: I0120 17:43:58.576887 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/38a5808f-1fa5-49cb-afb7-d7676416cd26-kolla-config\") pod \"38a5808f-1fa5-49cb-afb7-d7676416cd26\" (UID: \"38a5808f-1fa5-49cb-afb7-d7676416cd26\") " Jan 20 17:43:58 crc kubenswrapper[4558]: I0120 17:43:58.576983 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gk8sj\" (UniqueName: \"kubernetes.io/projected/38a5808f-1fa5-49cb-afb7-d7676416cd26-kube-api-access-gk8sj\") pod \"38a5808f-1fa5-49cb-afb7-d7676416cd26\" (UID: \"38a5808f-1fa5-49cb-afb7-d7676416cd26\") " Jan 20 17:43:58 crc kubenswrapper[4558]: I0120 17:43:58.577054 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/38a5808f-1fa5-49cb-afb7-d7676416cd26-operator-scripts\") pod \"38a5808f-1fa5-49cb-afb7-d7676416cd26\" (UID: \"38a5808f-1fa5-49cb-afb7-d7676416cd26\") " Jan 20 17:43:58 crc kubenswrapper[4558]: I0120 17:43:58.577133 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/38a5808f-1fa5-49cb-afb7-d7676416cd26-config-data-default\") pod \"38a5808f-1fa5-49cb-afb7-d7676416cd26\" (UID: \"38a5808f-1fa5-49cb-afb7-d7676416cd26\") " Jan 20 17:43:58 crc kubenswrapper[4558]: I0120 17:43:58.577203 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/38a5808f-1fa5-49cb-afb7-d7676416cd26-combined-ca-bundle\") pod \"38a5808f-1fa5-49cb-afb7-d7676416cd26\" (UID: \"38a5808f-1fa5-49cb-afb7-d7676416cd26\") " Jan 20 17:43:58 crc kubenswrapper[4558]: I0120 17:43:58.577280 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mysql-db\" (UniqueName: \"kubernetes.io/local-volume/local-storage14-crc\") pod \"38a5808f-1fa5-49cb-afb7-d7676416cd26\" (UID: \"38a5808f-1fa5-49cb-afb7-d7676416cd26\") " Jan 20 17:43:58 crc kubenswrapper[4558]: I0120 17:43:58.578239 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/38a5808f-1fa5-49cb-afb7-d7676416cd26-config-data-generated\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:58 crc kubenswrapper[4558]: I0120 17:43:58.580268 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a6d1823f-8885-40f1-b85d-5ee22197dc09" path="/var/lib/kubelet/pods/a6d1823f-8885-40f1-b85d-5ee22197dc09/volumes" Jan 20 17:43:58 crc kubenswrapper[4558]: I0120 17:43:58.581065 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/38a5808f-1fa5-49cb-afb7-d7676416cd26-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "38a5808f-1fa5-49cb-afb7-d7676416cd26" (UID: "38a5808f-1fa5-49cb-afb7-d7676416cd26"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:43:58 crc kubenswrapper[4558]: I0120 17:43:58.581990 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/38a5808f-1fa5-49cb-afb7-d7676416cd26-config-data-default" (OuterVolumeSpecName: "config-data-default") pod "38a5808f-1fa5-49cb-afb7-d7676416cd26" (UID: "38a5808f-1fa5-49cb-afb7-d7676416cd26"). InnerVolumeSpecName "config-data-default". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:43:58 crc kubenswrapper[4558]: I0120 17:43:58.584289 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/38a5808f-1fa5-49cb-afb7-d7676416cd26-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "38a5808f-1fa5-49cb-afb7-d7676416cd26" (UID: "38a5808f-1fa5-49cb-afb7-d7676416cd26"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:43:58 crc kubenswrapper[4558]: I0120 17:43:58.587557 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/38a5808f-1fa5-49cb-afb7-d7676416cd26-kube-api-access-gk8sj" (OuterVolumeSpecName: "kube-api-access-gk8sj") pod "38a5808f-1fa5-49cb-afb7-d7676416cd26" (UID: "38a5808f-1fa5-49cb-afb7-d7676416cd26"). InnerVolumeSpecName "kube-api-access-gk8sj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:43:58 crc kubenswrapper[4558]: I0120 17:43:58.601264 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage14-crc" (OuterVolumeSpecName: "mysql-db") pod "38a5808f-1fa5-49cb-afb7-d7676416cd26" (UID: "38a5808f-1fa5-49cb-afb7-d7676416cd26"). InnerVolumeSpecName "local-storage14-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:43:58 crc kubenswrapper[4558]: I0120 17:43:58.628967 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/38a5808f-1fa5-49cb-afb7-d7676416cd26-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "38a5808f-1fa5-49cb-afb7-d7676416cd26" (UID: "38a5808f-1fa5-49cb-afb7-d7676416cd26"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:58 crc kubenswrapper[4558]: I0120 17:43:58.645244 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/38a5808f-1fa5-49cb-afb7-d7676416cd26-galera-tls-certs" (OuterVolumeSpecName: "galera-tls-certs") pod "38a5808f-1fa5-49cb-afb7-d7676416cd26" (UID: "38a5808f-1fa5-49cb-afb7-d7676416cd26"). InnerVolumeSpecName "galera-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:58 crc kubenswrapper[4558]: I0120 17:43:58.680659 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gk8sj\" (UniqueName: \"kubernetes.io/projected/38a5808f-1fa5-49cb-afb7-d7676416cd26-kube-api-access-gk8sj\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:58 crc kubenswrapper[4558]: I0120 17:43:58.680693 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/38a5808f-1fa5-49cb-afb7-d7676416cd26-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:58 crc kubenswrapper[4558]: I0120 17:43:58.680704 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/38a5808f-1fa5-49cb-afb7-d7676416cd26-config-data-default\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:58 crc kubenswrapper[4558]: I0120 17:43:58.680715 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/38a5808f-1fa5-49cb-afb7-d7676416cd26-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:58 crc kubenswrapper[4558]: I0120 17:43:58.680752 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage14-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage14-crc\") on node \"crc\" " Jan 20 17:43:58 crc kubenswrapper[4558]: I0120 17:43:58.680763 4558 reconciler_common.go:293] "Volume detached for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/38a5808f-1fa5-49cb-afb7-d7676416cd26-galera-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:58 crc kubenswrapper[4558]: I0120 17:43:58.680774 4558 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/38a5808f-1fa5-49cb-afb7-d7676416cd26-kolla-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:58 crc kubenswrapper[4558]: I0120 17:43:58.698951 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage14-crc" (UniqueName: "kubernetes.io/local-volume/local-storage14-crc") on node "crc" Jan 20 17:43:58 crc kubenswrapper[4558]: I0120 17:43:58.702833 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:43:58 crc kubenswrapper[4558]: I0120 17:43:58.769887 4558 generic.go:334] "Generic (PLEG): container finished" podID="38a5808f-1fa5-49cb-afb7-d7676416cd26" containerID="18a3a7a71ea0a749f520d355bf36542849b6f575c3537150265c17afa8aafef5" exitCode=0 Jan 20 17:43:58 crc kubenswrapper[4558]: I0120 17:43:58.770078 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:43:58 crc kubenswrapper[4558]: I0120 17:43:58.770079 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-galera-0" event={"ID":"38a5808f-1fa5-49cb-afb7-d7676416cd26","Type":"ContainerDied","Data":"18a3a7a71ea0a749f520d355bf36542849b6f575c3537150265c17afa8aafef5"} Jan 20 17:43:58 crc kubenswrapper[4558]: I0120 17:43:58.770133 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-galera-0" event={"ID":"38a5808f-1fa5-49cb-afb7-d7676416cd26","Type":"ContainerDied","Data":"b29534492808dfe227690795f19ae32104cb9487956b5ac584f7fd62850c9707"} Jan 20 17:43:58 crc kubenswrapper[4558]: I0120 17:43:58.770157 4558 scope.go:117] "RemoveContainer" containerID="18a3a7a71ea0a749f520d355bf36542849b6f575c3537150265c17afa8aafef5" Jan 20 17:43:58 crc kubenswrapper[4558]: I0120 17:43:58.776630 4558 generic.go:334] "Generic (PLEG): container finished" podID="50cacddd-ebea-477f-af64-6e96a09a242e" containerID="696eca395715bd4cf46b8dd0ddb53ef0ad8c84c94f7e91863731528e53ac125e" exitCode=1 Jan 20 17:43:58 crc kubenswrapper[4558]: I0120 17:43:58.776711 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-0" event={"ID":"50cacddd-ebea-477f-af64-6e96a09a242e","Type":"ContainerDied","Data":"696eca395715bd4cf46b8dd0ddb53ef0ad8c84c94f7e91863731528e53ac125e"} Jan 20 17:43:58 crc kubenswrapper[4558]: I0120 17:43:58.777569 4558 scope.go:117] "RemoveContainer" containerID="696eca395715bd4cf46b8dd0ddb53ef0ad8c84c94f7e91863731528e53ac125e" Jan 20 17:43:58 crc kubenswrapper[4558]: E0120 17:43:58.779919 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"nova-cell0-conductor-conductor\" with CrashLoopBackOff: \"back-off 20s restarting failed container=nova-cell0-conductor-conductor pod=nova-cell0-conductor-0_openstack-kuttl-tests(50cacddd-ebea-477f-af64-6e96a09a242e)\"" pod="openstack-kuttl-tests/nova-cell0-conductor-0" podUID="50cacddd-ebea-477f-af64-6e96a09a242e" Jan 20 17:43:58 crc kubenswrapper[4558]: I0120 17:43:58.781937 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/046d6bef-b034-4093-9a77-f23074beaf20-log-httpd\") pod \"046d6bef-b034-4093-9a77-f23074beaf20\" (UID: \"046d6bef-b034-4093-9a77-f23074beaf20\") " Jan 20 17:43:58 crc kubenswrapper[4558]: I0120 17:43:58.781986 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/046d6bef-b034-4093-9a77-f23074beaf20-ceilometer-tls-certs\") pod \"046d6bef-b034-4093-9a77-f23074beaf20\" (UID: \"046d6bef-b034-4093-9a77-f23074beaf20\") " Jan 20 17:43:58 crc kubenswrapper[4558]: I0120 17:43:58.782024 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/046d6bef-b034-4093-9a77-f23074beaf20-config-data\") pod \"046d6bef-b034-4093-9a77-f23074beaf20\" (UID: \"046d6bef-b034-4093-9a77-f23074beaf20\") " Jan 20 17:43:58 crc kubenswrapper[4558]: I0120 17:43:58.782100 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/046d6bef-b034-4093-9a77-f23074beaf20-combined-ca-bundle\") pod \"046d6bef-b034-4093-9a77-f23074beaf20\" (UID: \"046d6bef-b034-4093-9a77-f23074beaf20\") " Jan 20 17:43:58 crc kubenswrapper[4558]: I0120 17:43:58.782185 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/046d6bef-b034-4093-9a77-f23074beaf20-scripts\") pod \"046d6bef-b034-4093-9a77-f23074beaf20\" (UID: \"046d6bef-b034-4093-9a77-f23074beaf20\") " Jan 20 17:43:58 crc kubenswrapper[4558]: I0120 17:43:58.782246 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/046d6bef-b034-4093-9a77-f23074beaf20-run-httpd\") pod \"046d6bef-b034-4093-9a77-f23074beaf20\" (UID: \"046d6bef-b034-4093-9a77-f23074beaf20\") " Jan 20 17:43:58 crc kubenswrapper[4558]: I0120 17:43:58.782296 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xgqsl\" (UniqueName: \"kubernetes.io/projected/046d6bef-b034-4093-9a77-f23074beaf20-kube-api-access-xgqsl\") pod \"046d6bef-b034-4093-9a77-f23074beaf20\" (UID: \"046d6bef-b034-4093-9a77-f23074beaf20\") " Jan 20 17:43:58 crc kubenswrapper[4558]: I0120 17:43:58.782410 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/046d6bef-b034-4093-9a77-f23074beaf20-sg-core-conf-yaml\") pod \"046d6bef-b034-4093-9a77-f23074beaf20\" (UID: \"046d6bef-b034-4093-9a77-f23074beaf20\") " Jan 20 17:43:58 crc kubenswrapper[4558]: I0120 17:43:58.784003 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage14-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage14-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:58 crc kubenswrapper[4558]: I0120 17:43:58.784573 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/046d6bef-b034-4093-9a77-f23074beaf20-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "046d6bef-b034-4093-9a77-f23074beaf20" (UID: "046d6bef-b034-4093-9a77-f23074beaf20"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:43:58 crc kubenswrapper[4558]: I0120 17:43:58.785271 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/046d6bef-b034-4093-9a77-f23074beaf20-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "046d6bef-b034-4093-9a77-f23074beaf20" (UID: "046d6bef-b034-4093-9a77-f23074beaf20"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:43:58 crc kubenswrapper[4558]: I0120 17:43:58.788939 4558 generic.go:334] "Generic (PLEG): container finished" podID="046d6bef-b034-4093-9a77-f23074beaf20" containerID="d00d835dc1dbc2461d0c91743b94e5fad21abda74168aa06d622f30e4cb362b4" exitCode=0 Jan 20 17:43:58 crc kubenswrapper[4558]: I0120 17:43:58.788962 4558 generic.go:334] "Generic (PLEG): container finished" podID="046d6bef-b034-4093-9a77-f23074beaf20" containerID="cee97a0e46fb53a7035606e81d6282fb0ba2c0e098ff4e08ca4586445e9122cb" exitCode=2 Jan 20 17:43:58 crc kubenswrapper[4558]: I0120 17:43:58.788970 4558 generic.go:334] "Generic (PLEG): container finished" podID="046d6bef-b034-4093-9a77-f23074beaf20" containerID="0341e4fb184d688b59bd82831888ea8b9d6cff92bd2c69cad322485ad1a47834" exitCode=0 Jan 20 17:43:58 crc kubenswrapper[4558]: I0120 17:43:58.788979 4558 generic.go:334] "Generic (PLEG): container finished" podID="046d6bef-b034-4093-9a77-f23074beaf20" containerID="77777fba94586efb6610ba10279160fb83ea9a44be2ecca14f8d05427010707c" exitCode=0 Jan 20 17:43:58 crc kubenswrapper[4558]: I0120 17:43:58.790902 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:43:58 crc kubenswrapper[4558]: I0120 17:43:58.792351 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"046d6bef-b034-4093-9a77-f23074beaf20","Type":"ContainerDied","Data":"d00d835dc1dbc2461d0c91743b94e5fad21abda74168aa06d622f30e4cb362b4"} Jan 20 17:43:58 crc kubenswrapper[4558]: I0120 17:43:58.792413 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"046d6bef-b034-4093-9a77-f23074beaf20","Type":"ContainerDied","Data":"cee97a0e46fb53a7035606e81d6282fb0ba2c0e098ff4e08ca4586445e9122cb"} Jan 20 17:43:58 crc kubenswrapper[4558]: I0120 17:43:58.792429 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"046d6bef-b034-4093-9a77-f23074beaf20","Type":"ContainerDied","Data":"0341e4fb184d688b59bd82831888ea8b9d6cff92bd2c69cad322485ad1a47834"} Jan 20 17:43:58 crc kubenswrapper[4558]: I0120 17:43:58.792440 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"046d6bef-b034-4093-9a77-f23074beaf20","Type":"ContainerDied","Data":"77777fba94586efb6610ba10279160fb83ea9a44be2ecca14f8d05427010707c"} Jan 20 17:43:58 crc kubenswrapper[4558]: I0120 17:43:58.792458 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"046d6bef-b034-4093-9a77-f23074beaf20","Type":"ContainerDied","Data":"8022a11c8ceeb8f4996ac1e42c93e327ff75465dbfaaa8ae9b6589d7405a0803"} Jan 20 17:43:58 crc kubenswrapper[4558]: I0120 17:43:58.799958 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/046d6bef-b034-4093-9a77-f23074beaf20-kube-api-access-xgqsl" (OuterVolumeSpecName: "kube-api-access-xgqsl") pod "046d6bef-b034-4093-9a77-f23074beaf20" (UID: "046d6bef-b034-4093-9a77-f23074beaf20"). InnerVolumeSpecName "kube-api-access-xgqsl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:43:58 crc kubenswrapper[4558]: I0120 17:43:58.809730 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/046d6bef-b034-4093-9a77-f23074beaf20-scripts" (OuterVolumeSpecName: "scripts") pod "046d6bef-b034-4093-9a77-f23074beaf20" (UID: "046d6bef-b034-4093-9a77-f23074beaf20"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:58 crc kubenswrapper[4558]: I0120 17:43:58.849850 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/openstack-galera-0"] Jan 20 17:43:58 crc kubenswrapper[4558]: I0120 17:43:58.852898 4558 scope.go:117] "RemoveContainer" containerID="9ce99dfe9c0f0478fe1e3bc7eb8d4a9fb46628bf1aaf1c9dfa808ff005dc5efc" Jan 20 17:43:58 crc kubenswrapper[4558]: I0120 17:43:58.860592 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/openstack-galera-0"] Jan 20 17:43:58 crc kubenswrapper[4558]: I0120 17:43:58.865356 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/046d6bef-b034-4093-9a77-f23074beaf20-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "046d6bef-b034-4093-9a77-f23074beaf20" (UID: "046d6bef-b034-4093-9a77-f23074beaf20"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:58 crc kubenswrapper[4558]: I0120 17:43:58.874437 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/openstack-galera-0"] Jan 20 17:43:58 crc kubenswrapper[4558]: E0120 17:43:58.875060 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="38a5808f-1fa5-49cb-afb7-d7676416cd26" containerName="mysql-bootstrap" Jan 20 17:43:58 crc kubenswrapper[4558]: I0120 17:43:58.875079 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="38a5808f-1fa5-49cb-afb7-d7676416cd26" containerName="mysql-bootstrap" Jan 20 17:43:58 crc kubenswrapper[4558]: E0120 17:43:58.875095 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a6d1823f-8885-40f1-b85d-5ee22197dc09" containerName="barbican-api" Jan 20 17:43:58 crc kubenswrapper[4558]: I0120 17:43:58.875100 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="a6d1823f-8885-40f1-b85d-5ee22197dc09" containerName="barbican-api" Jan 20 17:43:58 crc kubenswrapper[4558]: E0120 17:43:58.875138 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="046d6bef-b034-4093-9a77-f23074beaf20" containerName="ceilometer-central-agent" Jan 20 17:43:58 crc kubenswrapper[4558]: I0120 17:43:58.875144 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="046d6bef-b034-4093-9a77-f23074beaf20" containerName="ceilometer-central-agent" Jan 20 17:43:58 crc kubenswrapper[4558]: E0120 17:43:58.875156 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="046d6bef-b034-4093-9a77-f23074beaf20" containerName="sg-core" Jan 20 17:43:58 crc kubenswrapper[4558]: I0120 17:43:58.875185 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="046d6bef-b034-4093-9a77-f23074beaf20" containerName="sg-core" Jan 20 17:43:58 crc kubenswrapper[4558]: E0120 17:43:58.875205 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="046d6bef-b034-4093-9a77-f23074beaf20" containerName="ceilometer-notification-agent" Jan 20 17:43:58 crc kubenswrapper[4558]: I0120 17:43:58.875210 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="046d6bef-b034-4093-9a77-f23074beaf20" containerName="ceilometer-notification-agent" Jan 20 17:43:58 crc kubenswrapper[4558]: E0120 17:43:58.875223 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="38a5808f-1fa5-49cb-afb7-d7676416cd26" containerName="galera" Jan 20 17:43:58 crc kubenswrapper[4558]: I0120 17:43:58.875228 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="38a5808f-1fa5-49cb-afb7-d7676416cd26" containerName="galera" Jan 20 17:43:58 crc kubenswrapper[4558]: E0120 17:43:58.875238 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="046d6bef-b034-4093-9a77-f23074beaf20" containerName="proxy-httpd" Jan 20 17:43:58 crc kubenswrapper[4558]: I0120 17:43:58.875242 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="046d6bef-b034-4093-9a77-f23074beaf20" containerName="proxy-httpd" Jan 20 17:43:58 crc kubenswrapper[4558]: E0120 17:43:58.875268 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a6d1823f-8885-40f1-b85d-5ee22197dc09" containerName="barbican-api-log" Jan 20 17:43:58 crc kubenswrapper[4558]: I0120 17:43:58.875275 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="a6d1823f-8885-40f1-b85d-5ee22197dc09" containerName="barbican-api-log" Jan 20 17:43:58 crc kubenswrapper[4558]: I0120 17:43:58.875511 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="046d6bef-b034-4093-9a77-f23074beaf20" containerName="ceilometer-notification-agent" Jan 20 17:43:58 crc kubenswrapper[4558]: I0120 17:43:58.875525 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="a6d1823f-8885-40f1-b85d-5ee22197dc09" containerName="barbican-api" Jan 20 17:43:58 crc kubenswrapper[4558]: I0120 17:43:58.875534 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="046d6bef-b034-4093-9a77-f23074beaf20" containerName="ceilometer-central-agent" Jan 20 17:43:58 crc kubenswrapper[4558]: I0120 17:43:58.875545 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="a6d1823f-8885-40f1-b85d-5ee22197dc09" containerName="barbican-api-log" Jan 20 17:43:58 crc kubenswrapper[4558]: I0120 17:43:58.875555 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="046d6bef-b034-4093-9a77-f23074beaf20" containerName="proxy-httpd" Jan 20 17:43:58 crc kubenswrapper[4558]: I0120 17:43:58.875580 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="38a5808f-1fa5-49cb-afb7-d7676416cd26" containerName="galera" Jan 20 17:43:58 crc kubenswrapper[4558]: I0120 17:43:58.875590 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="046d6bef-b034-4093-9a77-f23074beaf20" containerName="sg-core" Jan 20 17:43:58 crc kubenswrapper[4558]: I0120 17:43:58.877094 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:43:58 crc kubenswrapper[4558]: I0120 17:43:58.880325 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-config-data" Jan 20 17:43:58 crc kubenswrapper[4558]: I0120 17:43:58.880819 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"galera-openstack-dockercfg-xjdvx" Jan 20 17:43:58 crc kubenswrapper[4558]: I0120 17:43:58.881267 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-galera-openstack-svc" Jan 20 17:43:58 crc kubenswrapper[4558]: I0120 17:43:58.881735 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-scripts" Jan 20 17:43:58 crc kubenswrapper[4558]: I0120 17:43:58.884214 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/openstack-galera-0"] Jan 20 17:43:58 crc kubenswrapper[4558]: I0120 17:43:58.885824 4558 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/046d6bef-b034-4093-9a77-f23074beaf20-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:58 crc kubenswrapper[4558]: I0120 17:43:58.885844 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/046d6bef-b034-4093-9a77-f23074beaf20-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:58 crc kubenswrapper[4558]: I0120 17:43:58.885858 4558 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/046d6bef-b034-4093-9a77-f23074beaf20-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:58 crc kubenswrapper[4558]: I0120 17:43:58.885868 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xgqsl\" (UniqueName: \"kubernetes.io/projected/046d6bef-b034-4093-9a77-f23074beaf20-kube-api-access-xgqsl\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:58 crc kubenswrapper[4558]: I0120 17:43:58.885877 4558 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/046d6bef-b034-4093-9a77-f23074beaf20-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:58 crc kubenswrapper[4558]: I0120 17:43:58.902016 4558 scope.go:117] "RemoveContainer" containerID="18a3a7a71ea0a749f520d355bf36542849b6f575c3537150265c17afa8aafef5" Jan 20 17:43:58 crc kubenswrapper[4558]: E0120 17:43:58.905811 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"18a3a7a71ea0a749f520d355bf36542849b6f575c3537150265c17afa8aafef5\": container with ID starting with 18a3a7a71ea0a749f520d355bf36542849b6f575c3537150265c17afa8aafef5 not found: ID does not exist" containerID="18a3a7a71ea0a749f520d355bf36542849b6f575c3537150265c17afa8aafef5" Jan 20 17:43:58 crc kubenswrapper[4558]: I0120 17:43:58.905850 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"18a3a7a71ea0a749f520d355bf36542849b6f575c3537150265c17afa8aafef5"} err="failed to get container status \"18a3a7a71ea0a749f520d355bf36542849b6f575c3537150265c17afa8aafef5\": rpc error: code = NotFound desc = could not find container \"18a3a7a71ea0a749f520d355bf36542849b6f575c3537150265c17afa8aafef5\": container with ID starting with 18a3a7a71ea0a749f520d355bf36542849b6f575c3537150265c17afa8aafef5 not found: ID does not exist" Jan 20 17:43:58 crc kubenswrapper[4558]: I0120 17:43:58.905880 4558 scope.go:117] "RemoveContainer" containerID="9ce99dfe9c0f0478fe1e3bc7eb8d4a9fb46628bf1aaf1c9dfa808ff005dc5efc" Jan 20 17:43:58 crc kubenswrapper[4558]: E0120 17:43:58.906313 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9ce99dfe9c0f0478fe1e3bc7eb8d4a9fb46628bf1aaf1c9dfa808ff005dc5efc\": container with ID starting with 9ce99dfe9c0f0478fe1e3bc7eb8d4a9fb46628bf1aaf1c9dfa808ff005dc5efc not found: ID does not exist" containerID="9ce99dfe9c0f0478fe1e3bc7eb8d4a9fb46628bf1aaf1c9dfa808ff005dc5efc" Jan 20 17:43:58 crc kubenswrapper[4558]: I0120 17:43:58.906359 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9ce99dfe9c0f0478fe1e3bc7eb8d4a9fb46628bf1aaf1c9dfa808ff005dc5efc"} err="failed to get container status \"9ce99dfe9c0f0478fe1e3bc7eb8d4a9fb46628bf1aaf1c9dfa808ff005dc5efc\": rpc error: code = NotFound desc = could not find container \"9ce99dfe9c0f0478fe1e3bc7eb8d4a9fb46628bf1aaf1c9dfa808ff005dc5efc\": container with ID starting with 9ce99dfe9c0f0478fe1e3bc7eb8d4a9fb46628bf1aaf1c9dfa808ff005dc5efc not found: ID does not exist" Jan 20 17:43:58 crc kubenswrapper[4558]: I0120 17:43:58.906392 4558 scope.go:117] "RemoveContainer" containerID="bb8488b5aee706047405a606b3a23b8bd6228cb32059c06329cbdb7eefcf16f2" Jan 20 17:43:58 crc kubenswrapper[4558]: I0120 17:43:58.909879 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/046d6bef-b034-4093-9a77-f23074beaf20-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "046d6bef-b034-4093-9a77-f23074beaf20" (UID: "046d6bef-b034-4093-9a77-f23074beaf20"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:58 crc kubenswrapper[4558]: E0120 17:43:58.926391 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/046d6bef-b034-4093-9a77-f23074beaf20-combined-ca-bundle podName:046d6bef-b034-4093-9a77-f23074beaf20 nodeName:}" failed. No retries permitted until 2026-01-20 17:43:59.426371199 +0000 UTC m=+3733.186709166 (durationBeforeRetry 500ms). Error: error cleaning subPath mounts for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/046d6bef-b034-4093-9a77-f23074beaf20-combined-ca-bundle") pod "046d6bef-b034-4093-9a77-f23074beaf20" (UID: "046d6bef-b034-4093-9a77-f23074beaf20") : error deleting /var/lib/kubelet/pods/046d6bef-b034-4093-9a77-f23074beaf20/volume-subpaths: remove /var/lib/kubelet/pods/046d6bef-b034-4093-9a77-f23074beaf20/volume-subpaths: no such file or directory Jan 20 17:43:58 crc kubenswrapper[4558]: I0120 17:43:58.928887 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/046d6bef-b034-4093-9a77-f23074beaf20-config-data" (OuterVolumeSpecName: "config-data") pod "046d6bef-b034-4093-9a77-f23074beaf20" (UID: "046d6bef-b034-4093-9a77-f23074beaf20"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:58 crc kubenswrapper[4558]: I0120 17:43:58.943256 4558 scope.go:117] "RemoveContainer" containerID="d00d835dc1dbc2461d0c91743b94e5fad21abda74168aa06d622f30e4cb362b4" Jan 20 17:43:58 crc kubenswrapper[4558]: I0120 17:43:58.964459 4558 scope.go:117] "RemoveContainer" containerID="cee97a0e46fb53a7035606e81d6282fb0ba2c0e098ff4e08ca4586445e9122cb" Jan 20 17:43:58 crc kubenswrapper[4558]: I0120 17:43:58.974020 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:43:58 crc kubenswrapper[4558]: I0120 17:43:58.974085 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:43:58 crc kubenswrapper[4558]: I0120 17:43:58.988600 4558 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/046d6bef-b034-4093-9a77-f23074beaf20-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:58 crc kubenswrapper[4558]: I0120 17:43:58.988633 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/046d6bef-b034-4093-9a77-f23074beaf20-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:58 crc kubenswrapper[4558]: I0120 17:43:58.994894 4558 scope.go:117] "RemoveContainer" containerID="0341e4fb184d688b59bd82831888ea8b9d6cff92bd2c69cad322485ad1a47834" Jan 20 17:43:59 crc kubenswrapper[4558]: I0120 17:43:59.013211 4558 scope.go:117] "RemoveContainer" containerID="77777fba94586efb6610ba10279160fb83ea9a44be2ecca14f8d05427010707c" Jan 20 17:43:59 crc kubenswrapper[4558]: I0120 17:43:59.029649 4558 scope.go:117] "RemoveContainer" containerID="d00d835dc1dbc2461d0c91743b94e5fad21abda74168aa06d622f30e4cb362b4" Jan 20 17:43:59 crc kubenswrapper[4558]: E0120 17:43:59.029970 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d00d835dc1dbc2461d0c91743b94e5fad21abda74168aa06d622f30e4cb362b4\": container with ID starting with d00d835dc1dbc2461d0c91743b94e5fad21abda74168aa06d622f30e4cb362b4 not found: ID does not exist" containerID="d00d835dc1dbc2461d0c91743b94e5fad21abda74168aa06d622f30e4cb362b4" Jan 20 17:43:59 crc kubenswrapper[4558]: I0120 17:43:59.030007 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d00d835dc1dbc2461d0c91743b94e5fad21abda74168aa06d622f30e4cb362b4"} err="failed to get container status \"d00d835dc1dbc2461d0c91743b94e5fad21abda74168aa06d622f30e4cb362b4\": rpc error: code = NotFound desc = could not find container \"d00d835dc1dbc2461d0c91743b94e5fad21abda74168aa06d622f30e4cb362b4\": container with ID starting with d00d835dc1dbc2461d0c91743b94e5fad21abda74168aa06d622f30e4cb362b4 not found: ID does not exist" Jan 20 17:43:59 crc kubenswrapper[4558]: I0120 17:43:59.030035 4558 scope.go:117] "RemoveContainer" containerID="cee97a0e46fb53a7035606e81d6282fb0ba2c0e098ff4e08ca4586445e9122cb" Jan 20 17:43:59 crc kubenswrapper[4558]: E0120 17:43:59.030341 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cee97a0e46fb53a7035606e81d6282fb0ba2c0e098ff4e08ca4586445e9122cb\": container with ID starting with cee97a0e46fb53a7035606e81d6282fb0ba2c0e098ff4e08ca4586445e9122cb not found: ID does not exist" containerID="cee97a0e46fb53a7035606e81d6282fb0ba2c0e098ff4e08ca4586445e9122cb" Jan 20 17:43:59 crc kubenswrapper[4558]: I0120 17:43:59.030379 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cee97a0e46fb53a7035606e81d6282fb0ba2c0e098ff4e08ca4586445e9122cb"} err="failed to get container status \"cee97a0e46fb53a7035606e81d6282fb0ba2c0e098ff4e08ca4586445e9122cb\": rpc error: code = NotFound desc = could not find container \"cee97a0e46fb53a7035606e81d6282fb0ba2c0e098ff4e08ca4586445e9122cb\": container with ID starting with cee97a0e46fb53a7035606e81d6282fb0ba2c0e098ff4e08ca4586445e9122cb not found: ID does not exist" Jan 20 17:43:59 crc kubenswrapper[4558]: I0120 17:43:59.030409 4558 scope.go:117] "RemoveContainer" containerID="0341e4fb184d688b59bd82831888ea8b9d6cff92bd2c69cad322485ad1a47834" Jan 20 17:43:59 crc kubenswrapper[4558]: E0120 17:43:59.030660 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0341e4fb184d688b59bd82831888ea8b9d6cff92bd2c69cad322485ad1a47834\": container with ID starting with 0341e4fb184d688b59bd82831888ea8b9d6cff92bd2c69cad322485ad1a47834 not found: ID does not exist" containerID="0341e4fb184d688b59bd82831888ea8b9d6cff92bd2c69cad322485ad1a47834" Jan 20 17:43:59 crc kubenswrapper[4558]: I0120 17:43:59.030685 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0341e4fb184d688b59bd82831888ea8b9d6cff92bd2c69cad322485ad1a47834"} err="failed to get container status \"0341e4fb184d688b59bd82831888ea8b9d6cff92bd2c69cad322485ad1a47834\": rpc error: code = NotFound desc = could not find container \"0341e4fb184d688b59bd82831888ea8b9d6cff92bd2c69cad322485ad1a47834\": container with ID starting with 0341e4fb184d688b59bd82831888ea8b9d6cff92bd2c69cad322485ad1a47834 not found: ID does not exist" Jan 20 17:43:59 crc kubenswrapper[4558]: I0120 17:43:59.030700 4558 scope.go:117] "RemoveContainer" containerID="77777fba94586efb6610ba10279160fb83ea9a44be2ecca14f8d05427010707c" Jan 20 17:43:59 crc kubenswrapper[4558]: E0120 17:43:59.030901 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"77777fba94586efb6610ba10279160fb83ea9a44be2ecca14f8d05427010707c\": container with ID starting with 77777fba94586efb6610ba10279160fb83ea9a44be2ecca14f8d05427010707c not found: ID does not exist" containerID="77777fba94586efb6610ba10279160fb83ea9a44be2ecca14f8d05427010707c" Jan 20 17:43:59 crc kubenswrapper[4558]: I0120 17:43:59.030930 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"77777fba94586efb6610ba10279160fb83ea9a44be2ecca14f8d05427010707c"} err="failed to get container status \"77777fba94586efb6610ba10279160fb83ea9a44be2ecca14f8d05427010707c\": rpc error: code = NotFound desc = could not find container \"77777fba94586efb6610ba10279160fb83ea9a44be2ecca14f8d05427010707c\": container with ID starting with 77777fba94586efb6610ba10279160fb83ea9a44be2ecca14f8d05427010707c not found: ID does not exist" Jan 20 17:43:59 crc kubenswrapper[4558]: I0120 17:43:59.030945 4558 scope.go:117] "RemoveContainer" containerID="d00d835dc1dbc2461d0c91743b94e5fad21abda74168aa06d622f30e4cb362b4" Jan 20 17:43:59 crc kubenswrapper[4558]: I0120 17:43:59.031216 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d00d835dc1dbc2461d0c91743b94e5fad21abda74168aa06d622f30e4cb362b4"} err="failed to get container status \"d00d835dc1dbc2461d0c91743b94e5fad21abda74168aa06d622f30e4cb362b4\": rpc error: code = NotFound desc = could not find container \"d00d835dc1dbc2461d0c91743b94e5fad21abda74168aa06d622f30e4cb362b4\": container with ID starting with d00d835dc1dbc2461d0c91743b94e5fad21abda74168aa06d622f30e4cb362b4 not found: ID does not exist" Jan 20 17:43:59 crc kubenswrapper[4558]: I0120 17:43:59.031238 4558 scope.go:117] "RemoveContainer" containerID="cee97a0e46fb53a7035606e81d6282fb0ba2c0e098ff4e08ca4586445e9122cb" Jan 20 17:43:59 crc kubenswrapper[4558]: I0120 17:43:59.031440 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cee97a0e46fb53a7035606e81d6282fb0ba2c0e098ff4e08ca4586445e9122cb"} err="failed to get container status \"cee97a0e46fb53a7035606e81d6282fb0ba2c0e098ff4e08ca4586445e9122cb\": rpc error: code = NotFound desc = could not find container \"cee97a0e46fb53a7035606e81d6282fb0ba2c0e098ff4e08ca4586445e9122cb\": container with ID starting with cee97a0e46fb53a7035606e81d6282fb0ba2c0e098ff4e08ca4586445e9122cb not found: ID does not exist" Jan 20 17:43:59 crc kubenswrapper[4558]: I0120 17:43:59.031460 4558 scope.go:117] "RemoveContainer" containerID="0341e4fb184d688b59bd82831888ea8b9d6cff92bd2c69cad322485ad1a47834" Jan 20 17:43:59 crc kubenswrapper[4558]: I0120 17:43:59.031692 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0341e4fb184d688b59bd82831888ea8b9d6cff92bd2c69cad322485ad1a47834"} err="failed to get container status \"0341e4fb184d688b59bd82831888ea8b9d6cff92bd2c69cad322485ad1a47834\": rpc error: code = NotFound desc = could not find container \"0341e4fb184d688b59bd82831888ea8b9d6cff92bd2c69cad322485ad1a47834\": container with ID starting with 0341e4fb184d688b59bd82831888ea8b9d6cff92bd2c69cad322485ad1a47834 not found: ID does not exist" Jan 20 17:43:59 crc kubenswrapper[4558]: I0120 17:43:59.031713 4558 scope.go:117] "RemoveContainer" containerID="77777fba94586efb6610ba10279160fb83ea9a44be2ecca14f8d05427010707c" Jan 20 17:43:59 crc kubenswrapper[4558]: I0120 17:43:59.032061 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"77777fba94586efb6610ba10279160fb83ea9a44be2ecca14f8d05427010707c"} err="failed to get container status \"77777fba94586efb6610ba10279160fb83ea9a44be2ecca14f8d05427010707c\": rpc error: code = NotFound desc = could not find container \"77777fba94586efb6610ba10279160fb83ea9a44be2ecca14f8d05427010707c\": container with ID starting with 77777fba94586efb6610ba10279160fb83ea9a44be2ecca14f8d05427010707c not found: ID does not exist" Jan 20 17:43:59 crc kubenswrapper[4558]: I0120 17:43:59.032081 4558 scope.go:117] "RemoveContainer" containerID="d00d835dc1dbc2461d0c91743b94e5fad21abda74168aa06d622f30e4cb362b4" Jan 20 17:43:59 crc kubenswrapper[4558]: I0120 17:43:59.032351 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d00d835dc1dbc2461d0c91743b94e5fad21abda74168aa06d622f30e4cb362b4"} err="failed to get container status \"d00d835dc1dbc2461d0c91743b94e5fad21abda74168aa06d622f30e4cb362b4\": rpc error: code = NotFound desc = could not find container \"d00d835dc1dbc2461d0c91743b94e5fad21abda74168aa06d622f30e4cb362b4\": container with ID starting with d00d835dc1dbc2461d0c91743b94e5fad21abda74168aa06d622f30e4cb362b4 not found: ID does not exist" Jan 20 17:43:59 crc kubenswrapper[4558]: I0120 17:43:59.032370 4558 scope.go:117] "RemoveContainer" containerID="cee97a0e46fb53a7035606e81d6282fb0ba2c0e098ff4e08ca4586445e9122cb" Jan 20 17:43:59 crc kubenswrapper[4558]: I0120 17:43:59.032609 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cee97a0e46fb53a7035606e81d6282fb0ba2c0e098ff4e08ca4586445e9122cb"} err="failed to get container status \"cee97a0e46fb53a7035606e81d6282fb0ba2c0e098ff4e08ca4586445e9122cb\": rpc error: code = NotFound desc = could not find container \"cee97a0e46fb53a7035606e81d6282fb0ba2c0e098ff4e08ca4586445e9122cb\": container with ID starting with cee97a0e46fb53a7035606e81d6282fb0ba2c0e098ff4e08ca4586445e9122cb not found: ID does not exist" Jan 20 17:43:59 crc kubenswrapper[4558]: I0120 17:43:59.032628 4558 scope.go:117] "RemoveContainer" containerID="0341e4fb184d688b59bd82831888ea8b9d6cff92bd2c69cad322485ad1a47834" Jan 20 17:43:59 crc kubenswrapper[4558]: I0120 17:43:59.032835 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0341e4fb184d688b59bd82831888ea8b9d6cff92bd2c69cad322485ad1a47834"} err="failed to get container status \"0341e4fb184d688b59bd82831888ea8b9d6cff92bd2c69cad322485ad1a47834\": rpc error: code = NotFound desc = could not find container \"0341e4fb184d688b59bd82831888ea8b9d6cff92bd2c69cad322485ad1a47834\": container with ID starting with 0341e4fb184d688b59bd82831888ea8b9d6cff92bd2c69cad322485ad1a47834 not found: ID does not exist" Jan 20 17:43:59 crc kubenswrapper[4558]: I0120 17:43:59.032854 4558 scope.go:117] "RemoveContainer" containerID="77777fba94586efb6610ba10279160fb83ea9a44be2ecca14f8d05427010707c" Jan 20 17:43:59 crc kubenswrapper[4558]: I0120 17:43:59.033063 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"77777fba94586efb6610ba10279160fb83ea9a44be2ecca14f8d05427010707c"} err="failed to get container status \"77777fba94586efb6610ba10279160fb83ea9a44be2ecca14f8d05427010707c\": rpc error: code = NotFound desc = could not find container \"77777fba94586efb6610ba10279160fb83ea9a44be2ecca14f8d05427010707c\": container with ID starting with 77777fba94586efb6610ba10279160fb83ea9a44be2ecca14f8d05427010707c not found: ID does not exist" Jan 20 17:43:59 crc kubenswrapper[4558]: I0120 17:43:59.033091 4558 scope.go:117] "RemoveContainer" containerID="d00d835dc1dbc2461d0c91743b94e5fad21abda74168aa06d622f30e4cb362b4" Jan 20 17:43:59 crc kubenswrapper[4558]: I0120 17:43:59.033323 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d00d835dc1dbc2461d0c91743b94e5fad21abda74168aa06d622f30e4cb362b4"} err="failed to get container status \"d00d835dc1dbc2461d0c91743b94e5fad21abda74168aa06d622f30e4cb362b4\": rpc error: code = NotFound desc = could not find container \"d00d835dc1dbc2461d0c91743b94e5fad21abda74168aa06d622f30e4cb362b4\": container with ID starting with d00d835dc1dbc2461d0c91743b94e5fad21abda74168aa06d622f30e4cb362b4 not found: ID does not exist" Jan 20 17:43:59 crc kubenswrapper[4558]: I0120 17:43:59.033342 4558 scope.go:117] "RemoveContainer" containerID="cee97a0e46fb53a7035606e81d6282fb0ba2c0e098ff4e08ca4586445e9122cb" Jan 20 17:43:59 crc kubenswrapper[4558]: I0120 17:43:59.033559 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cee97a0e46fb53a7035606e81d6282fb0ba2c0e098ff4e08ca4586445e9122cb"} err="failed to get container status \"cee97a0e46fb53a7035606e81d6282fb0ba2c0e098ff4e08ca4586445e9122cb\": rpc error: code = NotFound desc = could not find container \"cee97a0e46fb53a7035606e81d6282fb0ba2c0e098ff4e08ca4586445e9122cb\": container with ID starting with cee97a0e46fb53a7035606e81d6282fb0ba2c0e098ff4e08ca4586445e9122cb not found: ID does not exist" Jan 20 17:43:59 crc kubenswrapper[4558]: I0120 17:43:59.033578 4558 scope.go:117] "RemoveContainer" containerID="0341e4fb184d688b59bd82831888ea8b9d6cff92bd2c69cad322485ad1a47834" Jan 20 17:43:59 crc kubenswrapper[4558]: I0120 17:43:59.033782 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0341e4fb184d688b59bd82831888ea8b9d6cff92bd2c69cad322485ad1a47834"} err="failed to get container status \"0341e4fb184d688b59bd82831888ea8b9d6cff92bd2c69cad322485ad1a47834\": rpc error: code = NotFound desc = could not find container \"0341e4fb184d688b59bd82831888ea8b9d6cff92bd2c69cad322485ad1a47834\": container with ID starting with 0341e4fb184d688b59bd82831888ea8b9d6cff92bd2c69cad322485ad1a47834 not found: ID does not exist" Jan 20 17:43:59 crc kubenswrapper[4558]: I0120 17:43:59.033801 4558 scope.go:117] "RemoveContainer" containerID="77777fba94586efb6610ba10279160fb83ea9a44be2ecca14f8d05427010707c" Jan 20 17:43:59 crc kubenswrapper[4558]: I0120 17:43:59.034098 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"77777fba94586efb6610ba10279160fb83ea9a44be2ecca14f8d05427010707c"} err="failed to get container status \"77777fba94586efb6610ba10279160fb83ea9a44be2ecca14f8d05427010707c\": rpc error: code = NotFound desc = could not find container \"77777fba94586efb6610ba10279160fb83ea9a44be2ecca14f8d05427010707c\": container with ID starting with 77777fba94586efb6610ba10279160fb83ea9a44be2ecca14f8d05427010707c not found: ID does not exist" Jan 20 17:43:59 crc kubenswrapper[4558]: I0120 17:43:59.092197 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2vzk4\" (UniqueName: \"kubernetes.io/projected/86aff100-d474-48ed-b673-4dae7c0722cf-kube-api-access-2vzk4\") pod \"openstack-galera-0\" (UID: \"86aff100-d474-48ed-b673-4dae7c0722cf\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:43:59 crc kubenswrapper[4558]: I0120 17:43:59.092345 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/86aff100-d474-48ed-b673-4dae7c0722cf-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"86aff100-d474-48ed-b673-4dae7c0722cf\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:43:59 crc kubenswrapper[4558]: I0120 17:43:59.092373 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/86aff100-d474-48ed-b673-4dae7c0722cf-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"86aff100-d474-48ed-b673-4dae7c0722cf\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:43:59 crc kubenswrapper[4558]: I0120 17:43:59.092553 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage14-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage14-crc\") pod \"openstack-galera-0\" (UID: \"86aff100-d474-48ed-b673-4dae7c0722cf\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:43:59 crc kubenswrapper[4558]: I0120 17:43:59.092713 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/86aff100-d474-48ed-b673-4dae7c0722cf-operator-scripts\") pod \"openstack-galera-0\" (UID: \"86aff100-d474-48ed-b673-4dae7c0722cf\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:43:59 crc kubenswrapper[4558]: I0120 17:43:59.092846 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/86aff100-d474-48ed-b673-4dae7c0722cf-config-data-generated\") pod \"openstack-galera-0\" (UID: \"86aff100-d474-48ed-b673-4dae7c0722cf\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:43:59 crc kubenswrapper[4558]: I0120 17:43:59.092875 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/86aff100-d474-48ed-b673-4dae7c0722cf-config-data-default\") pod \"openstack-galera-0\" (UID: \"86aff100-d474-48ed-b673-4dae7c0722cf\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:43:59 crc kubenswrapper[4558]: I0120 17:43:59.092974 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/86aff100-d474-48ed-b673-4dae7c0722cf-kolla-config\") pod \"openstack-galera-0\" (UID: \"86aff100-d474-48ed-b673-4dae7c0722cf\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:43:59 crc kubenswrapper[4558]: I0120 17:43:59.195414 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage14-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage14-crc\") pod \"openstack-galera-0\" (UID: \"86aff100-d474-48ed-b673-4dae7c0722cf\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:43:59 crc kubenswrapper[4558]: I0120 17:43:59.195519 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/86aff100-d474-48ed-b673-4dae7c0722cf-operator-scripts\") pod \"openstack-galera-0\" (UID: \"86aff100-d474-48ed-b673-4dae7c0722cf\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:43:59 crc kubenswrapper[4558]: I0120 17:43:59.195573 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/86aff100-d474-48ed-b673-4dae7c0722cf-config-data-generated\") pod \"openstack-galera-0\" (UID: \"86aff100-d474-48ed-b673-4dae7c0722cf\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:43:59 crc kubenswrapper[4558]: I0120 17:43:59.195598 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/86aff100-d474-48ed-b673-4dae7c0722cf-config-data-default\") pod \"openstack-galera-0\" (UID: \"86aff100-d474-48ed-b673-4dae7c0722cf\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:43:59 crc kubenswrapper[4558]: I0120 17:43:59.195634 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/86aff100-d474-48ed-b673-4dae7c0722cf-kolla-config\") pod \"openstack-galera-0\" (UID: \"86aff100-d474-48ed-b673-4dae7c0722cf\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:43:59 crc kubenswrapper[4558]: I0120 17:43:59.195678 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2vzk4\" (UniqueName: \"kubernetes.io/projected/86aff100-d474-48ed-b673-4dae7c0722cf-kube-api-access-2vzk4\") pod \"openstack-galera-0\" (UID: \"86aff100-d474-48ed-b673-4dae7c0722cf\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:43:59 crc kubenswrapper[4558]: I0120 17:43:59.195707 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage14-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage14-crc\") pod \"openstack-galera-0\" (UID: \"86aff100-d474-48ed-b673-4dae7c0722cf\") device mount path \"/mnt/openstack/pv14\"" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:43:59 crc kubenswrapper[4558]: I0120 17:43:59.195729 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/86aff100-d474-48ed-b673-4dae7c0722cf-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"86aff100-d474-48ed-b673-4dae7c0722cf\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:43:59 crc kubenswrapper[4558]: I0120 17:43:59.195958 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/86aff100-d474-48ed-b673-4dae7c0722cf-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"86aff100-d474-48ed-b673-4dae7c0722cf\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:43:59 crc kubenswrapper[4558]: I0120 17:43:59.196455 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/86aff100-d474-48ed-b673-4dae7c0722cf-config-data-generated\") pod \"openstack-galera-0\" (UID: \"86aff100-d474-48ed-b673-4dae7c0722cf\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:43:59 crc kubenswrapper[4558]: I0120 17:43:59.197390 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/86aff100-d474-48ed-b673-4dae7c0722cf-kolla-config\") pod \"openstack-galera-0\" (UID: \"86aff100-d474-48ed-b673-4dae7c0722cf\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:43:59 crc kubenswrapper[4558]: I0120 17:43:59.197538 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/86aff100-d474-48ed-b673-4dae7c0722cf-config-data-default\") pod \"openstack-galera-0\" (UID: \"86aff100-d474-48ed-b673-4dae7c0722cf\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:43:59 crc kubenswrapper[4558]: I0120 17:43:59.198706 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/86aff100-d474-48ed-b673-4dae7c0722cf-operator-scripts\") pod \"openstack-galera-0\" (UID: \"86aff100-d474-48ed-b673-4dae7c0722cf\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:43:59 crc kubenswrapper[4558]: I0120 17:43:59.200587 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/86aff100-d474-48ed-b673-4dae7c0722cf-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"86aff100-d474-48ed-b673-4dae7c0722cf\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:43:59 crc kubenswrapper[4558]: I0120 17:43:59.202645 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/86aff100-d474-48ed-b673-4dae7c0722cf-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"86aff100-d474-48ed-b673-4dae7c0722cf\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:43:59 crc kubenswrapper[4558]: I0120 17:43:59.215998 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2vzk4\" (UniqueName: \"kubernetes.io/projected/86aff100-d474-48ed-b673-4dae7c0722cf-kube-api-access-2vzk4\") pod \"openstack-galera-0\" (UID: \"86aff100-d474-48ed-b673-4dae7c0722cf\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:43:59 crc kubenswrapper[4558]: I0120 17:43:59.224520 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage14-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage14-crc\") pod \"openstack-galera-0\" (UID: \"86aff100-d474-48ed-b673-4dae7c0722cf\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:43:59 crc kubenswrapper[4558]: I0120 17:43:59.296474 4558 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:43:59 crc kubenswrapper[4558]: I0120 17:43:59.326130 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack-kuttl-tests/cinder-scheduler-0" podUID="7faabe16-9de5-49dc-bab6-44e173f4403c" containerName="cinder-scheduler" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 20 17:43:59 crc kubenswrapper[4558]: I0120 17:43:59.502571 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/046d6bef-b034-4093-9a77-f23074beaf20-combined-ca-bundle\") pod \"046d6bef-b034-4093-9a77-f23074beaf20\" (UID: \"046d6bef-b034-4093-9a77-f23074beaf20\") " Jan 20 17:43:59 crc kubenswrapper[4558]: I0120 17:43:59.506673 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/046d6bef-b034-4093-9a77-f23074beaf20-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "046d6bef-b034-4093-9a77-f23074beaf20" (UID: "046d6bef-b034-4093-9a77-f23074beaf20"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:59 crc kubenswrapper[4558]: I0120 17:43:59.509464 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:43:59 crc kubenswrapper[4558]: I0120 17:43:59.553233 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:43:59 crc kubenswrapper[4558]: I0120 17:43:59.593304 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:43:59 crc kubenswrapper[4558]: I0120 17:43:59.605382 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/046d6bef-b034-4093-9a77-f23074beaf20-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:59 crc kubenswrapper[4558]: I0120 17:43:59.707046 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/572d7a53-9a17-43e3-bc12-a04f994eb857-combined-ca-bundle\") pod \"572d7a53-9a17-43e3-bc12-a04f994eb857\" (UID: \"572d7a53-9a17-43e3-bc12-a04f994eb857\") " Jan 20 17:43:59 crc kubenswrapper[4558]: I0120 17:43:59.707128 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/572d7a53-9a17-43e3-bc12-a04f994eb857-config-data\") pod \"572d7a53-9a17-43e3-bc12-a04f994eb857\" (UID: \"572d7a53-9a17-43e3-bc12-a04f994eb857\") " Jan 20 17:43:59 crc kubenswrapper[4558]: I0120 17:43:59.707207 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nt989\" (UniqueName: \"kubernetes.io/projected/572d7a53-9a17-43e3-bc12-a04f994eb857-kube-api-access-nt989\") pod \"572d7a53-9a17-43e3-bc12-a04f994eb857\" (UID: \"572d7a53-9a17-43e3-bc12-a04f994eb857\") " Jan 20 17:43:59 crc kubenswrapper[4558]: I0120 17:43:59.707281 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/572d7a53-9a17-43e3-bc12-a04f994eb857-nova-metadata-tls-certs\") pod \"572d7a53-9a17-43e3-bc12-a04f994eb857\" (UID: \"572d7a53-9a17-43e3-bc12-a04f994eb857\") " Jan 20 17:43:59 crc kubenswrapper[4558]: I0120 17:43:59.707357 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/572d7a53-9a17-43e3-bc12-a04f994eb857-logs\") pod \"572d7a53-9a17-43e3-bc12-a04f994eb857\" (UID: \"572d7a53-9a17-43e3-bc12-a04f994eb857\") " Jan 20 17:43:59 crc kubenswrapper[4558]: I0120 17:43:59.710109 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/572d7a53-9a17-43e3-bc12-a04f994eb857-logs" (OuterVolumeSpecName: "logs") pod "572d7a53-9a17-43e3-bc12-a04f994eb857" (UID: "572d7a53-9a17-43e3-bc12-a04f994eb857"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:43:59 crc kubenswrapper[4558]: I0120 17:43:59.712977 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/572d7a53-9a17-43e3-bc12-a04f994eb857-kube-api-access-nt989" (OuterVolumeSpecName: "kube-api-access-nt989") pod "572d7a53-9a17-43e3-bc12-a04f994eb857" (UID: "572d7a53-9a17-43e3-bc12-a04f994eb857"). InnerVolumeSpecName "kube-api-access-nt989". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:43:59 crc kubenswrapper[4558]: I0120 17:43:59.736862 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:43:59 crc kubenswrapper[4558]: I0120 17:43:59.741752 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/572d7a53-9a17-43e3-bc12-a04f994eb857-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "572d7a53-9a17-43e3-bc12-a04f994eb857" (UID: "572d7a53-9a17-43e3-bc12-a04f994eb857"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:59 crc kubenswrapper[4558]: I0120 17:43:59.744656 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/572d7a53-9a17-43e3-bc12-a04f994eb857-config-data" (OuterVolumeSpecName: "config-data") pod "572d7a53-9a17-43e3-bc12-a04f994eb857" (UID: "572d7a53-9a17-43e3-bc12-a04f994eb857"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:59 crc kubenswrapper[4558]: I0120 17:43:59.744739 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:43:59 crc kubenswrapper[4558]: I0120 17:43:59.759119 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:43:59 crc kubenswrapper[4558]: E0120 17:43:59.762194 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="572d7a53-9a17-43e3-bc12-a04f994eb857" containerName="nova-metadata-log" Jan 20 17:43:59 crc kubenswrapper[4558]: I0120 17:43:59.762296 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="572d7a53-9a17-43e3-bc12-a04f994eb857" containerName="nova-metadata-log" Jan 20 17:43:59 crc kubenswrapper[4558]: E0120 17:43:59.762513 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="572d7a53-9a17-43e3-bc12-a04f994eb857" containerName="nova-metadata-metadata" Jan 20 17:43:59 crc kubenswrapper[4558]: I0120 17:43:59.762618 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="572d7a53-9a17-43e3-bc12-a04f994eb857" containerName="nova-metadata-metadata" Jan 20 17:43:59 crc kubenswrapper[4558]: I0120 17:43:59.763024 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="572d7a53-9a17-43e3-bc12-a04f994eb857" containerName="nova-metadata-metadata" Jan 20 17:43:59 crc kubenswrapper[4558]: I0120 17:43:59.763136 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="572d7a53-9a17-43e3-bc12-a04f994eb857" containerName="nova-metadata-log" Jan 20 17:43:59 crc kubenswrapper[4558]: I0120 17:43:59.771049 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:43:59 crc kubenswrapper[4558]: I0120 17:43:59.773530 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-scripts" Jan 20 17:43:59 crc kubenswrapper[4558]: I0120 17:43:59.777546 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-config-data" Jan 20 17:43:59 crc kubenswrapper[4558]: I0120 17:43:59.777737 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-ceilometer-internal-svc" Jan 20 17:43:59 crc kubenswrapper[4558]: I0120 17:43:59.782922 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:43:59 crc kubenswrapper[4558]: I0120 17:43:59.799338 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/572d7a53-9a17-43e3-bc12-a04f994eb857-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "572d7a53-9a17-43e3-bc12-a04f994eb857" (UID: "572d7a53-9a17-43e3-bc12-a04f994eb857"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:43:59 crc kubenswrapper[4558]: I0120 17:43:59.813026 4558 scope.go:117] "RemoveContainer" containerID="696eca395715bd4cf46b8dd0ddb53ef0ad8c84c94f7e91863731528e53ac125e" Jan 20 17:43:59 crc kubenswrapper[4558]: E0120 17:43:59.813435 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"nova-cell0-conductor-conductor\" with CrashLoopBackOff: \"back-off 20s restarting failed container=nova-cell0-conductor-conductor pod=nova-cell0-conductor-0_openstack-kuttl-tests(50cacddd-ebea-477f-af64-6e96a09a242e)\"" pod="openstack-kuttl-tests/nova-cell0-conductor-0" podUID="50cacddd-ebea-477f-af64-6e96a09a242e" Jan 20 17:43:59 crc kubenswrapper[4558]: I0120 17:43:59.813551 4558 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/572d7a53-9a17-43e3-bc12-a04f994eb857-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:59 crc kubenswrapper[4558]: I0120 17:43:59.813586 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/572d7a53-9a17-43e3-bc12-a04f994eb857-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:59 crc kubenswrapper[4558]: I0120 17:43:59.813600 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/572d7a53-9a17-43e3-bc12-a04f994eb857-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:59 crc kubenswrapper[4558]: I0120 17:43:59.813612 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/572d7a53-9a17-43e3-bc12-a04f994eb857-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:59 crc kubenswrapper[4558]: I0120 17:43:59.813623 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nt989\" (UniqueName: \"kubernetes.io/projected/572d7a53-9a17-43e3-bc12-a04f994eb857-kube-api-access-nt989\") on node \"crc\" DevicePath \"\"" Jan 20 17:43:59 crc kubenswrapper[4558]: I0120 17:43:59.817474 4558 generic.go:334] "Generic (PLEG): container finished" podID="572d7a53-9a17-43e3-bc12-a04f994eb857" containerID="3df9ef85a677a70aa8631b76f358d8c7876e88242945b5d72d5b5a6a3a549fd0" exitCode=0 Jan 20 17:43:59 crc kubenswrapper[4558]: I0120 17:43:59.817584 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:43:59 crc kubenswrapper[4558]: I0120 17:43:59.818220 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"572d7a53-9a17-43e3-bc12-a04f994eb857","Type":"ContainerDied","Data":"3df9ef85a677a70aa8631b76f358d8c7876e88242945b5d72d5b5a6a3a549fd0"} Jan 20 17:43:59 crc kubenswrapper[4558]: I0120 17:43:59.818262 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"572d7a53-9a17-43e3-bc12-a04f994eb857","Type":"ContainerDied","Data":"63594f53fd7e8175292564ee259baa322ec9835e023668dfc739e433a47e39df"} Jan 20 17:43:59 crc kubenswrapper[4558]: I0120 17:43:59.818295 4558 scope.go:117] "RemoveContainer" containerID="3df9ef85a677a70aa8631b76f358d8c7876e88242945b5d72d5b5a6a3a549fd0" Jan 20 17:43:59 crc kubenswrapper[4558]: I0120 17:43:59.848931 4558 scope.go:117] "RemoveContainer" containerID="a919d8c90acccccbc8402af93925861e0da53f38a61a01f63b79f7cffbdcd991" Jan 20 17:43:59 crc kubenswrapper[4558]: I0120 17:43:59.854042 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:43:59 crc kubenswrapper[4558]: I0120 17:43:59.872745 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:43:59 crc kubenswrapper[4558]: I0120 17:43:59.876882 4558 scope.go:117] "RemoveContainer" containerID="3df9ef85a677a70aa8631b76f358d8c7876e88242945b5d72d5b5a6a3a549fd0" Jan 20 17:43:59 crc kubenswrapper[4558]: E0120 17:43:59.877319 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3df9ef85a677a70aa8631b76f358d8c7876e88242945b5d72d5b5a6a3a549fd0\": container with ID starting with 3df9ef85a677a70aa8631b76f358d8c7876e88242945b5d72d5b5a6a3a549fd0 not found: ID does not exist" containerID="3df9ef85a677a70aa8631b76f358d8c7876e88242945b5d72d5b5a6a3a549fd0" Jan 20 17:43:59 crc kubenswrapper[4558]: I0120 17:43:59.877358 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3df9ef85a677a70aa8631b76f358d8c7876e88242945b5d72d5b5a6a3a549fd0"} err="failed to get container status \"3df9ef85a677a70aa8631b76f358d8c7876e88242945b5d72d5b5a6a3a549fd0\": rpc error: code = NotFound desc = could not find container \"3df9ef85a677a70aa8631b76f358d8c7876e88242945b5d72d5b5a6a3a549fd0\": container with ID starting with 3df9ef85a677a70aa8631b76f358d8c7876e88242945b5d72d5b5a6a3a549fd0 not found: ID does not exist" Jan 20 17:43:59 crc kubenswrapper[4558]: I0120 17:43:59.877380 4558 scope.go:117] "RemoveContainer" containerID="a919d8c90acccccbc8402af93925861e0da53f38a61a01f63b79f7cffbdcd991" Jan 20 17:43:59 crc kubenswrapper[4558]: E0120 17:43:59.877678 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a919d8c90acccccbc8402af93925861e0da53f38a61a01f63b79f7cffbdcd991\": container with ID starting with a919d8c90acccccbc8402af93925861e0da53f38a61a01f63b79f7cffbdcd991 not found: ID does not exist" containerID="a919d8c90acccccbc8402af93925861e0da53f38a61a01f63b79f7cffbdcd991" Jan 20 17:43:59 crc kubenswrapper[4558]: I0120 17:43:59.877703 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a919d8c90acccccbc8402af93925861e0da53f38a61a01f63b79f7cffbdcd991"} err="failed to get container status \"a919d8c90acccccbc8402af93925861e0da53f38a61a01f63b79f7cffbdcd991\": rpc error: code = NotFound desc = could not find container \"a919d8c90acccccbc8402af93925861e0da53f38a61a01f63b79f7cffbdcd991\": container with ID starting with a919d8c90acccccbc8402af93925861e0da53f38a61a01f63b79f7cffbdcd991 not found: ID does not exist" Jan 20 17:43:59 crc kubenswrapper[4558]: I0120 17:43:59.892225 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:43:59 crc kubenswrapper[4558]: I0120 17:43:59.894021 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:43:59 crc kubenswrapper[4558]: I0120 17:43:59.898873 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:43:59 crc kubenswrapper[4558]: I0120 17:43:59.899355 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-metadata-config-data" Jan 20 17:43:59 crc kubenswrapper[4558]: I0120 17:43:59.902748 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-metadata-internal-svc" Jan 20 17:43:59 crc kubenswrapper[4558]: I0120 17:43:59.916815 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sn479\" (UniqueName: \"kubernetes.io/projected/bda29809-3f10-45ff-8fea-3d72f5182e7a-kube-api-access-sn479\") pod \"ceilometer-0\" (UID: \"bda29809-3f10-45ff-8fea-3d72f5182e7a\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:43:59 crc kubenswrapper[4558]: I0120 17:43:59.916977 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bda29809-3f10-45ff-8fea-3d72f5182e7a-run-httpd\") pod \"ceilometer-0\" (UID: \"bda29809-3f10-45ff-8fea-3d72f5182e7a\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:43:59 crc kubenswrapper[4558]: I0120 17:43:59.917017 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bda29809-3f10-45ff-8fea-3d72f5182e7a-scripts\") pod \"ceilometer-0\" (UID: \"bda29809-3f10-45ff-8fea-3d72f5182e7a\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:43:59 crc kubenswrapper[4558]: I0120 17:43:59.917079 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bda29809-3f10-45ff-8fea-3d72f5182e7a-log-httpd\") pod \"ceilometer-0\" (UID: \"bda29809-3f10-45ff-8fea-3d72f5182e7a\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:43:59 crc kubenswrapper[4558]: I0120 17:43:59.917100 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/bda29809-3f10-45ff-8fea-3d72f5182e7a-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"bda29809-3f10-45ff-8fea-3d72f5182e7a\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:43:59 crc kubenswrapper[4558]: I0120 17:43:59.917190 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/bda29809-3f10-45ff-8fea-3d72f5182e7a-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"bda29809-3f10-45ff-8fea-3d72f5182e7a\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:43:59 crc kubenswrapper[4558]: I0120 17:43:59.917919 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bda29809-3f10-45ff-8fea-3d72f5182e7a-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"bda29809-3f10-45ff-8fea-3d72f5182e7a\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:43:59 crc kubenswrapper[4558]: I0120 17:43:59.917954 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bda29809-3f10-45ff-8fea-3d72f5182e7a-config-data\") pod \"ceilometer-0\" (UID: \"bda29809-3f10-45ff-8fea-3d72f5182e7a\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:43:59 crc kubenswrapper[4558]: I0120 17:43:59.991398 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-7f8885c56d-wsxms"] Jan 20 17:43:59 crc kubenswrapper[4558]: I0120 17:43:59.991757 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/keystone-7f8885c56d-wsxms" podUID="b911f497-5616-416f-8aa2-73c7cce172eb" containerName="keystone-api" containerID="cri-o://2d6106f888df62c14a5adbf7426959067a2a5587b57672909aebbf2d13dffda8" gracePeriod=30 Jan 20 17:44:00 crc kubenswrapper[4558]: I0120 17:44:00.020726 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-scd9s\" (UniqueName: \"kubernetes.io/projected/ff0d0703-8173-4ac0-afc0-e673feaef286-kube-api-access-scd9s\") pod \"nova-metadata-0\" (UID: \"ff0d0703-8173-4ac0-afc0-e673feaef286\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:44:00 crc kubenswrapper[4558]: I0120 17:44:00.020933 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sn479\" (UniqueName: \"kubernetes.io/projected/bda29809-3f10-45ff-8fea-3d72f5182e7a-kube-api-access-sn479\") pod \"ceilometer-0\" (UID: \"bda29809-3f10-45ff-8fea-3d72f5182e7a\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:44:00 crc kubenswrapper[4558]: I0120 17:44:00.021046 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ff0d0703-8173-4ac0-afc0-e673feaef286-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"ff0d0703-8173-4ac0-afc0-e673feaef286\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:44:00 crc kubenswrapper[4558]: I0120 17:44:00.021187 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bda29809-3f10-45ff-8fea-3d72f5182e7a-run-httpd\") pod \"ceilometer-0\" (UID: \"bda29809-3f10-45ff-8fea-3d72f5182e7a\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:44:00 crc kubenswrapper[4558]: I0120 17:44:00.021279 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/ff0d0703-8173-4ac0-afc0-e673feaef286-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"ff0d0703-8173-4ac0-afc0-e673feaef286\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:44:00 crc kubenswrapper[4558]: I0120 17:44:00.021345 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ff0d0703-8173-4ac0-afc0-e673feaef286-config-data\") pod \"nova-metadata-0\" (UID: \"ff0d0703-8173-4ac0-afc0-e673feaef286\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:44:00 crc kubenswrapper[4558]: I0120 17:44:00.021405 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ff0d0703-8173-4ac0-afc0-e673feaef286-logs\") pod \"nova-metadata-0\" (UID: \"ff0d0703-8173-4ac0-afc0-e673feaef286\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:44:00 crc kubenswrapper[4558]: I0120 17:44:00.021463 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bda29809-3f10-45ff-8fea-3d72f5182e7a-scripts\") pod \"ceilometer-0\" (UID: \"bda29809-3f10-45ff-8fea-3d72f5182e7a\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:44:00 crc kubenswrapper[4558]: I0120 17:44:00.021579 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bda29809-3f10-45ff-8fea-3d72f5182e7a-log-httpd\") pod \"ceilometer-0\" (UID: \"bda29809-3f10-45ff-8fea-3d72f5182e7a\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:44:00 crc kubenswrapper[4558]: I0120 17:44:00.021638 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/bda29809-3f10-45ff-8fea-3d72f5182e7a-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"bda29809-3f10-45ff-8fea-3d72f5182e7a\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:44:00 crc kubenswrapper[4558]: I0120 17:44:00.021816 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/bda29809-3f10-45ff-8fea-3d72f5182e7a-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"bda29809-3f10-45ff-8fea-3d72f5182e7a\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:44:00 crc kubenswrapper[4558]: I0120 17:44:00.021981 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bda29809-3f10-45ff-8fea-3d72f5182e7a-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"bda29809-3f10-45ff-8fea-3d72f5182e7a\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:44:00 crc kubenswrapper[4558]: I0120 17:44:00.022063 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bda29809-3f10-45ff-8fea-3d72f5182e7a-config-data\") pod \"ceilometer-0\" (UID: \"bda29809-3f10-45ff-8fea-3d72f5182e7a\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:44:00 crc kubenswrapper[4558]: I0120 17:44:00.033016 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bda29809-3f10-45ff-8fea-3d72f5182e7a-log-httpd\") pod \"ceilometer-0\" (UID: \"bda29809-3f10-45ff-8fea-3d72f5182e7a\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:44:00 crc kubenswrapper[4558]: I0120 17:44:00.033556 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bda29809-3f10-45ff-8fea-3d72f5182e7a-run-httpd\") pod \"ceilometer-0\" (UID: \"bda29809-3f10-45ff-8fea-3d72f5182e7a\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:44:00 crc kubenswrapper[4558]: I0120 17:44:00.041324 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bda29809-3f10-45ff-8fea-3d72f5182e7a-config-data\") pod \"ceilometer-0\" (UID: \"bda29809-3f10-45ff-8fea-3d72f5182e7a\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:44:00 crc kubenswrapper[4558]: I0120 17:44:00.041785 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/bda29809-3f10-45ff-8fea-3d72f5182e7a-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"bda29809-3f10-45ff-8fea-3d72f5182e7a\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:44:00 crc kubenswrapper[4558]: I0120 17:44:00.042517 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/keystone-f7d8994df-dzxmh"] Jan 20 17:44:00 crc kubenswrapper[4558]: I0120 17:44:00.043974 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-f7d8994df-dzxmh" Jan 20 17:44:00 crc kubenswrapper[4558]: I0120 17:44:00.045524 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bda29809-3f10-45ff-8fea-3d72f5182e7a-scripts\") pod \"ceilometer-0\" (UID: \"bda29809-3f10-45ff-8fea-3d72f5182e7a\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:44:00 crc kubenswrapper[4558]: I0120 17:44:00.045955 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/bda29809-3f10-45ff-8fea-3d72f5182e7a-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"bda29809-3f10-45ff-8fea-3d72f5182e7a\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:44:00 crc kubenswrapper[4558]: I0120 17:44:00.060541 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sn479\" (UniqueName: \"kubernetes.io/projected/bda29809-3f10-45ff-8fea-3d72f5182e7a-kube-api-access-sn479\") pod \"ceilometer-0\" (UID: \"bda29809-3f10-45ff-8fea-3d72f5182e7a\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:44:00 crc kubenswrapper[4558]: I0120 17:44:00.065801 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bda29809-3f10-45ff-8fea-3d72f5182e7a-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"bda29809-3f10-45ff-8fea-3d72f5182e7a\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:44:00 crc kubenswrapper[4558]: I0120 17:44:00.074966 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/openstack-galera-0"] Jan 20 17:44:00 crc kubenswrapper[4558]: I0120 17:44:00.101248 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:44:00 crc kubenswrapper[4558]: I0120 17:44:00.118235 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-f7d8994df-dzxmh"] Jan 20 17:44:00 crc kubenswrapper[4558]: I0120 17:44:00.126250 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-scd9s\" (UniqueName: \"kubernetes.io/projected/ff0d0703-8173-4ac0-afc0-e673feaef286-kube-api-access-scd9s\") pod \"nova-metadata-0\" (UID: \"ff0d0703-8173-4ac0-afc0-e673feaef286\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:44:00 crc kubenswrapper[4558]: I0120 17:44:00.126350 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ff0d0703-8173-4ac0-afc0-e673feaef286-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"ff0d0703-8173-4ac0-afc0-e673feaef286\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:44:00 crc kubenswrapper[4558]: I0120 17:44:00.126444 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/ff0d0703-8173-4ac0-afc0-e673feaef286-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"ff0d0703-8173-4ac0-afc0-e673feaef286\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:44:00 crc kubenswrapper[4558]: I0120 17:44:00.126465 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ff0d0703-8173-4ac0-afc0-e673feaef286-config-data\") pod \"nova-metadata-0\" (UID: \"ff0d0703-8173-4ac0-afc0-e673feaef286\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:44:00 crc kubenswrapper[4558]: I0120 17:44:00.126501 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ff0d0703-8173-4ac0-afc0-e673feaef286-logs\") pod \"nova-metadata-0\" (UID: \"ff0d0703-8173-4ac0-afc0-e673feaef286\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:44:00 crc kubenswrapper[4558]: I0120 17:44:00.127127 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ff0d0703-8173-4ac0-afc0-e673feaef286-logs\") pod \"nova-metadata-0\" (UID: \"ff0d0703-8173-4ac0-afc0-e673feaef286\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:44:00 crc kubenswrapper[4558]: I0120 17:44:00.135209 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ff0d0703-8173-4ac0-afc0-e673feaef286-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"ff0d0703-8173-4ac0-afc0-e673feaef286\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:44:00 crc kubenswrapper[4558]: I0120 17:44:00.137520 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/ff0d0703-8173-4ac0-afc0-e673feaef286-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"ff0d0703-8173-4ac0-afc0-e673feaef286\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:44:00 crc kubenswrapper[4558]: I0120 17:44:00.175396 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-scd9s\" (UniqueName: \"kubernetes.io/projected/ff0d0703-8173-4ac0-afc0-e673feaef286-kube-api-access-scd9s\") pod \"nova-metadata-0\" (UID: \"ff0d0703-8173-4ac0-afc0-e673feaef286\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:44:00 crc kubenswrapper[4558]: I0120 17:44:00.181582 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ff0d0703-8173-4ac0-afc0-e673feaef286-config-data\") pod \"nova-metadata-0\" (UID: \"ff0d0703-8173-4ac0-afc0-e673feaef286\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:44:00 crc kubenswrapper[4558]: I0120 17:44:00.217863 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:44:00 crc kubenswrapper[4558]: I0120 17:44:00.240993 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/5cbfd26c-728d-420c-9d40-b6f7870cff60-credential-keys\") pod \"keystone-f7d8994df-dzxmh\" (UID: \"5cbfd26c-728d-420c-9d40-b6f7870cff60\") " pod="openstack-kuttl-tests/keystone-f7d8994df-dzxmh" Jan 20 17:44:00 crc kubenswrapper[4558]: I0120 17:44:00.241065 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5cbfd26c-728d-420c-9d40-b6f7870cff60-internal-tls-certs\") pod \"keystone-f7d8994df-dzxmh\" (UID: \"5cbfd26c-728d-420c-9d40-b6f7870cff60\") " pod="openstack-kuttl-tests/keystone-f7d8994df-dzxmh" Jan 20 17:44:00 crc kubenswrapper[4558]: I0120 17:44:00.241095 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5cbfd26c-728d-420c-9d40-b6f7870cff60-combined-ca-bundle\") pod \"keystone-f7d8994df-dzxmh\" (UID: \"5cbfd26c-728d-420c-9d40-b6f7870cff60\") " pod="openstack-kuttl-tests/keystone-f7d8994df-dzxmh" Jan 20 17:44:00 crc kubenswrapper[4558]: I0120 17:44:00.241119 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5cbfd26c-728d-420c-9d40-b6f7870cff60-scripts\") pod \"keystone-f7d8994df-dzxmh\" (UID: \"5cbfd26c-728d-420c-9d40-b6f7870cff60\") " pod="openstack-kuttl-tests/keystone-f7d8994df-dzxmh" Jan 20 17:44:00 crc kubenswrapper[4558]: I0120 17:44:00.241219 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j4z9c\" (UniqueName: \"kubernetes.io/projected/5cbfd26c-728d-420c-9d40-b6f7870cff60-kube-api-access-j4z9c\") pod \"keystone-f7d8994df-dzxmh\" (UID: \"5cbfd26c-728d-420c-9d40-b6f7870cff60\") " pod="openstack-kuttl-tests/keystone-f7d8994df-dzxmh" Jan 20 17:44:00 crc kubenswrapper[4558]: I0120 17:44:00.241252 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/5cbfd26c-728d-420c-9d40-b6f7870cff60-fernet-keys\") pod \"keystone-f7d8994df-dzxmh\" (UID: \"5cbfd26c-728d-420c-9d40-b6f7870cff60\") " pod="openstack-kuttl-tests/keystone-f7d8994df-dzxmh" Jan 20 17:44:00 crc kubenswrapper[4558]: I0120 17:44:00.241268 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5cbfd26c-728d-420c-9d40-b6f7870cff60-public-tls-certs\") pod \"keystone-f7d8994df-dzxmh\" (UID: \"5cbfd26c-728d-420c-9d40-b6f7870cff60\") " pod="openstack-kuttl-tests/keystone-f7d8994df-dzxmh" Jan 20 17:44:00 crc kubenswrapper[4558]: I0120 17:44:00.241300 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5cbfd26c-728d-420c-9d40-b6f7870cff60-config-data\") pod \"keystone-f7d8994df-dzxmh\" (UID: \"5cbfd26c-728d-420c-9d40-b6f7870cff60\") " pod="openstack-kuttl-tests/keystone-f7d8994df-dzxmh" Jan 20 17:44:00 crc kubenswrapper[4558]: I0120 17:44:00.348950 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j4z9c\" (UniqueName: \"kubernetes.io/projected/5cbfd26c-728d-420c-9d40-b6f7870cff60-kube-api-access-j4z9c\") pod \"keystone-f7d8994df-dzxmh\" (UID: \"5cbfd26c-728d-420c-9d40-b6f7870cff60\") " pod="openstack-kuttl-tests/keystone-f7d8994df-dzxmh" Jan 20 17:44:00 crc kubenswrapper[4558]: I0120 17:44:00.349024 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/5cbfd26c-728d-420c-9d40-b6f7870cff60-fernet-keys\") pod \"keystone-f7d8994df-dzxmh\" (UID: \"5cbfd26c-728d-420c-9d40-b6f7870cff60\") " pod="openstack-kuttl-tests/keystone-f7d8994df-dzxmh" Jan 20 17:44:00 crc kubenswrapper[4558]: I0120 17:44:00.349049 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5cbfd26c-728d-420c-9d40-b6f7870cff60-public-tls-certs\") pod \"keystone-f7d8994df-dzxmh\" (UID: \"5cbfd26c-728d-420c-9d40-b6f7870cff60\") " pod="openstack-kuttl-tests/keystone-f7d8994df-dzxmh" Jan 20 17:44:00 crc kubenswrapper[4558]: I0120 17:44:00.349089 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5cbfd26c-728d-420c-9d40-b6f7870cff60-config-data\") pod \"keystone-f7d8994df-dzxmh\" (UID: \"5cbfd26c-728d-420c-9d40-b6f7870cff60\") " pod="openstack-kuttl-tests/keystone-f7d8994df-dzxmh" Jan 20 17:44:00 crc kubenswrapper[4558]: I0120 17:44:00.349159 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/5cbfd26c-728d-420c-9d40-b6f7870cff60-credential-keys\") pod \"keystone-f7d8994df-dzxmh\" (UID: \"5cbfd26c-728d-420c-9d40-b6f7870cff60\") " pod="openstack-kuttl-tests/keystone-f7d8994df-dzxmh" Jan 20 17:44:00 crc kubenswrapper[4558]: I0120 17:44:00.349242 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5cbfd26c-728d-420c-9d40-b6f7870cff60-internal-tls-certs\") pod \"keystone-f7d8994df-dzxmh\" (UID: \"5cbfd26c-728d-420c-9d40-b6f7870cff60\") " pod="openstack-kuttl-tests/keystone-f7d8994df-dzxmh" Jan 20 17:44:00 crc kubenswrapper[4558]: I0120 17:44:00.349276 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5cbfd26c-728d-420c-9d40-b6f7870cff60-combined-ca-bundle\") pod \"keystone-f7d8994df-dzxmh\" (UID: \"5cbfd26c-728d-420c-9d40-b6f7870cff60\") " pod="openstack-kuttl-tests/keystone-f7d8994df-dzxmh" Jan 20 17:44:00 crc kubenswrapper[4558]: I0120 17:44:00.349311 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5cbfd26c-728d-420c-9d40-b6f7870cff60-scripts\") pod \"keystone-f7d8994df-dzxmh\" (UID: \"5cbfd26c-728d-420c-9d40-b6f7870cff60\") " pod="openstack-kuttl-tests/keystone-f7d8994df-dzxmh" Jan 20 17:44:00 crc kubenswrapper[4558]: I0120 17:44:00.357207 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5cbfd26c-728d-420c-9d40-b6f7870cff60-internal-tls-certs\") pod \"keystone-f7d8994df-dzxmh\" (UID: \"5cbfd26c-728d-420c-9d40-b6f7870cff60\") " pod="openstack-kuttl-tests/keystone-f7d8994df-dzxmh" Jan 20 17:44:00 crc kubenswrapper[4558]: I0120 17:44:00.360788 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5cbfd26c-728d-420c-9d40-b6f7870cff60-config-data\") pod \"keystone-f7d8994df-dzxmh\" (UID: \"5cbfd26c-728d-420c-9d40-b6f7870cff60\") " pod="openstack-kuttl-tests/keystone-f7d8994df-dzxmh" Jan 20 17:44:00 crc kubenswrapper[4558]: I0120 17:44:00.362869 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/5cbfd26c-728d-420c-9d40-b6f7870cff60-fernet-keys\") pod \"keystone-f7d8994df-dzxmh\" (UID: \"5cbfd26c-728d-420c-9d40-b6f7870cff60\") " pod="openstack-kuttl-tests/keystone-f7d8994df-dzxmh" Jan 20 17:44:00 crc kubenswrapper[4558]: I0120 17:44:00.363526 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5cbfd26c-728d-420c-9d40-b6f7870cff60-combined-ca-bundle\") pod \"keystone-f7d8994df-dzxmh\" (UID: \"5cbfd26c-728d-420c-9d40-b6f7870cff60\") " pod="openstack-kuttl-tests/keystone-f7d8994df-dzxmh" Jan 20 17:44:00 crc kubenswrapper[4558]: I0120 17:44:00.363877 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5cbfd26c-728d-420c-9d40-b6f7870cff60-public-tls-certs\") pod \"keystone-f7d8994df-dzxmh\" (UID: \"5cbfd26c-728d-420c-9d40-b6f7870cff60\") " pod="openstack-kuttl-tests/keystone-f7d8994df-dzxmh" Jan 20 17:44:00 crc kubenswrapper[4558]: I0120 17:44:00.366585 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5cbfd26c-728d-420c-9d40-b6f7870cff60-scripts\") pod \"keystone-f7d8994df-dzxmh\" (UID: \"5cbfd26c-728d-420c-9d40-b6f7870cff60\") " pod="openstack-kuttl-tests/keystone-f7d8994df-dzxmh" Jan 20 17:44:00 crc kubenswrapper[4558]: I0120 17:44:00.375110 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j4z9c\" (UniqueName: \"kubernetes.io/projected/5cbfd26c-728d-420c-9d40-b6f7870cff60-kube-api-access-j4z9c\") pod \"keystone-f7d8994df-dzxmh\" (UID: \"5cbfd26c-728d-420c-9d40-b6f7870cff60\") " pod="openstack-kuttl-tests/keystone-f7d8994df-dzxmh" Jan 20 17:44:00 crc kubenswrapper[4558]: I0120 17:44:00.378614 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/5cbfd26c-728d-420c-9d40-b6f7870cff60-credential-keys\") pod \"keystone-f7d8994df-dzxmh\" (UID: \"5cbfd26c-728d-420c-9d40-b6f7870cff60\") " pod="openstack-kuttl-tests/keystone-f7d8994df-dzxmh" Jan 20 17:44:00 crc kubenswrapper[4558]: I0120 17:44:00.404835 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-f7d8994df-dzxmh" Jan 20 17:44:00 crc kubenswrapper[4558]: I0120 17:44:00.576683 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="046d6bef-b034-4093-9a77-f23074beaf20" path="/var/lib/kubelet/pods/046d6bef-b034-4093-9a77-f23074beaf20/volumes" Jan 20 17:44:00 crc kubenswrapper[4558]: I0120 17:44:00.578139 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="38a5808f-1fa5-49cb-afb7-d7676416cd26" path="/var/lib/kubelet/pods/38a5808f-1fa5-49cb-afb7-d7676416cd26/volumes" Jan 20 17:44:00 crc kubenswrapper[4558]: I0120 17:44:00.579290 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="572d7a53-9a17-43e3-bc12-a04f994eb857" path="/var/lib/kubelet/pods/572d7a53-9a17-43e3-bc12-a04f994eb857/volumes" Jan 20 17:44:00 crc kubenswrapper[4558]: W0120 17:44:00.796009 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbda29809_3f10_45ff_8fea_3d72f5182e7a.slice/crio-bb74eaf3d9d63fe4bbfb3bcb86df029a7b879488573a5e69be6648db054cfb49 WatchSource:0}: Error finding container bb74eaf3d9d63fe4bbfb3bcb86df029a7b879488573a5e69be6648db054cfb49: Status 404 returned error can't find the container with id bb74eaf3d9d63fe4bbfb3bcb86df029a7b879488573a5e69be6648db054cfb49 Jan 20 17:44:00 crc kubenswrapper[4558]: I0120 17:44:00.797025 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:44:00 crc kubenswrapper[4558]: I0120 17:44:00.834396 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-galera-0" event={"ID":"86aff100-d474-48ed-b673-4dae7c0722cf","Type":"ContainerStarted","Data":"67cda9ffae9461498a3181421c8f34810fd6c215622487c92fe9030c49337aa1"} Jan 20 17:44:00 crc kubenswrapper[4558]: I0120 17:44:00.834447 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-galera-0" event={"ID":"86aff100-d474-48ed-b673-4dae7c0722cf","Type":"ContainerStarted","Data":"eb396e0181b1779a1ab284f1bdeb43ca4f283e2a736a3210e70deeb9de80c4c1"} Jan 20 17:44:00 crc kubenswrapper[4558]: I0120 17:44:00.840298 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"bda29809-3f10-45ff-8fea-3d72f5182e7a","Type":"ContainerStarted","Data":"bb74eaf3d9d63fe4bbfb3bcb86df029a7b879488573a5e69be6648db054cfb49"} Jan 20 17:44:00 crc kubenswrapper[4558]: W0120 17:44:00.901614 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podff0d0703_8173_4ac0_afc0_e673feaef286.slice/crio-fe15b8466909629ec54bddf0b729e623676456b8d764dc2e8e08622acf62dd38 WatchSource:0}: Error finding container fe15b8466909629ec54bddf0b729e623676456b8d764dc2e8e08622acf62dd38: Status 404 returned error can't find the container with id fe15b8466909629ec54bddf0b729e623676456b8d764dc2e8e08622acf62dd38 Jan 20 17:44:00 crc kubenswrapper[4558]: I0120 17:44:00.903419 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:44:00 crc kubenswrapper[4558]: I0120 17:44:00.983238 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-f7d8994df-dzxmh"] Jan 20 17:44:01 crc kubenswrapper[4558]: I0120 17:44:01.044696 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/neutron-7bf45b8998-2828m"] Jan 20 17:44:01 crc kubenswrapper[4558]: I0120 17:44:01.046578 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-7bf45b8998-2828m" Jan 20 17:44:01 crc kubenswrapper[4558]: I0120 17:44:01.059932 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-7bf45b8998-2828m"] Jan 20 17:44:01 crc kubenswrapper[4558]: I0120 17:44:01.178713 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/afc2fbf0-3153-4896-bfc2-bb2382600893-internal-tls-certs\") pod \"neutron-7bf45b8998-2828m\" (UID: \"afc2fbf0-3153-4896-bfc2-bb2382600893\") " pod="openstack-kuttl-tests/neutron-7bf45b8998-2828m" Jan 20 17:44:01 crc kubenswrapper[4558]: I0120 17:44:01.178755 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/afc2fbf0-3153-4896-bfc2-bb2382600893-httpd-config\") pod \"neutron-7bf45b8998-2828m\" (UID: \"afc2fbf0-3153-4896-bfc2-bb2382600893\") " pod="openstack-kuttl-tests/neutron-7bf45b8998-2828m" Jan 20 17:44:01 crc kubenswrapper[4558]: I0120 17:44:01.178775 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/afc2fbf0-3153-4896-bfc2-bb2382600893-combined-ca-bundle\") pod \"neutron-7bf45b8998-2828m\" (UID: \"afc2fbf0-3153-4896-bfc2-bb2382600893\") " pod="openstack-kuttl-tests/neutron-7bf45b8998-2828m" Jan 20 17:44:01 crc kubenswrapper[4558]: I0120 17:44:01.178822 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4t6pg\" (UniqueName: \"kubernetes.io/projected/afc2fbf0-3153-4896-bfc2-bb2382600893-kube-api-access-4t6pg\") pod \"neutron-7bf45b8998-2828m\" (UID: \"afc2fbf0-3153-4896-bfc2-bb2382600893\") " pod="openstack-kuttl-tests/neutron-7bf45b8998-2828m" Jan 20 17:44:01 crc kubenswrapper[4558]: I0120 17:44:01.178849 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/afc2fbf0-3153-4896-bfc2-bb2382600893-public-tls-certs\") pod \"neutron-7bf45b8998-2828m\" (UID: \"afc2fbf0-3153-4896-bfc2-bb2382600893\") " pod="openstack-kuttl-tests/neutron-7bf45b8998-2828m" Jan 20 17:44:01 crc kubenswrapper[4558]: I0120 17:44:01.178925 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/afc2fbf0-3153-4896-bfc2-bb2382600893-config\") pod \"neutron-7bf45b8998-2828m\" (UID: \"afc2fbf0-3153-4896-bfc2-bb2382600893\") " pod="openstack-kuttl-tests/neutron-7bf45b8998-2828m" Jan 20 17:44:01 crc kubenswrapper[4558]: I0120 17:44:01.178944 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/afc2fbf0-3153-4896-bfc2-bb2382600893-ovndb-tls-certs\") pod \"neutron-7bf45b8998-2828m\" (UID: \"afc2fbf0-3153-4896-bfc2-bb2382600893\") " pod="openstack-kuttl-tests/neutron-7bf45b8998-2828m" Jan 20 17:44:01 crc kubenswrapper[4558]: I0120 17:44:01.281125 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4t6pg\" (UniqueName: \"kubernetes.io/projected/afc2fbf0-3153-4896-bfc2-bb2382600893-kube-api-access-4t6pg\") pod \"neutron-7bf45b8998-2828m\" (UID: \"afc2fbf0-3153-4896-bfc2-bb2382600893\") " pod="openstack-kuttl-tests/neutron-7bf45b8998-2828m" Jan 20 17:44:01 crc kubenswrapper[4558]: I0120 17:44:01.281269 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/afc2fbf0-3153-4896-bfc2-bb2382600893-public-tls-certs\") pod \"neutron-7bf45b8998-2828m\" (UID: \"afc2fbf0-3153-4896-bfc2-bb2382600893\") " pod="openstack-kuttl-tests/neutron-7bf45b8998-2828m" Jan 20 17:44:01 crc kubenswrapper[4558]: I0120 17:44:01.281357 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/afc2fbf0-3153-4896-bfc2-bb2382600893-config\") pod \"neutron-7bf45b8998-2828m\" (UID: \"afc2fbf0-3153-4896-bfc2-bb2382600893\") " pod="openstack-kuttl-tests/neutron-7bf45b8998-2828m" Jan 20 17:44:01 crc kubenswrapper[4558]: I0120 17:44:01.281408 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/afc2fbf0-3153-4896-bfc2-bb2382600893-ovndb-tls-certs\") pod \"neutron-7bf45b8998-2828m\" (UID: \"afc2fbf0-3153-4896-bfc2-bb2382600893\") " pod="openstack-kuttl-tests/neutron-7bf45b8998-2828m" Jan 20 17:44:01 crc kubenswrapper[4558]: I0120 17:44:01.281489 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/afc2fbf0-3153-4896-bfc2-bb2382600893-internal-tls-certs\") pod \"neutron-7bf45b8998-2828m\" (UID: \"afc2fbf0-3153-4896-bfc2-bb2382600893\") " pod="openstack-kuttl-tests/neutron-7bf45b8998-2828m" Jan 20 17:44:01 crc kubenswrapper[4558]: I0120 17:44:01.281520 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/afc2fbf0-3153-4896-bfc2-bb2382600893-combined-ca-bundle\") pod \"neutron-7bf45b8998-2828m\" (UID: \"afc2fbf0-3153-4896-bfc2-bb2382600893\") " pod="openstack-kuttl-tests/neutron-7bf45b8998-2828m" Jan 20 17:44:01 crc kubenswrapper[4558]: I0120 17:44:01.282304 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/afc2fbf0-3153-4896-bfc2-bb2382600893-httpd-config\") pod \"neutron-7bf45b8998-2828m\" (UID: \"afc2fbf0-3153-4896-bfc2-bb2382600893\") " pod="openstack-kuttl-tests/neutron-7bf45b8998-2828m" Jan 20 17:44:01 crc kubenswrapper[4558]: I0120 17:44:01.288594 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/afc2fbf0-3153-4896-bfc2-bb2382600893-internal-tls-certs\") pod \"neutron-7bf45b8998-2828m\" (UID: \"afc2fbf0-3153-4896-bfc2-bb2382600893\") " pod="openstack-kuttl-tests/neutron-7bf45b8998-2828m" Jan 20 17:44:01 crc kubenswrapper[4558]: I0120 17:44:01.288934 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/afc2fbf0-3153-4896-bfc2-bb2382600893-ovndb-tls-certs\") pod \"neutron-7bf45b8998-2828m\" (UID: \"afc2fbf0-3153-4896-bfc2-bb2382600893\") " pod="openstack-kuttl-tests/neutron-7bf45b8998-2828m" Jan 20 17:44:01 crc kubenswrapper[4558]: I0120 17:44:01.297727 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/afc2fbf0-3153-4896-bfc2-bb2382600893-config\") pod \"neutron-7bf45b8998-2828m\" (UID: \"afc2fbf0-3153-4896-bfc2-bb2382600893\") " pod="openstack-kuttl-tests/neutron-7bf45b8998-2828m" Jan 20 17:44:01 crc kubenswrapper[4558]: I0120 17:44:01.297880 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/afc2fbf0-3153-4896-bfc2-bb2382600893-public-tls-certs\") pod \"neutron-7bf45b8998-2828m\" (UID: \"afc2fbf0-3153-4896-bfc2-bb2382600893\") " pod="openstack-kuttl-tests/neutron-7bf45b8998-2828m" Jan 20 17:44:01 crc kubenswrapper[4558]: I0120 17:44:01.298661 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/afc2fbf0-3153-4896-bfc2-bb2382600893-httpd-config\") pod \"neutron-7bf45b8998-2828m\" (UID: \"afc2fbf0-3153-4896-bfc2-bb2382600893\") " pod="openstack-kuttl-tests/neutron-7bf45b8998-2828m" Jan 20 17:44:01 crc kubenswrapper[4558]: I0120 17:44:01.300632 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/afc2fbf0-3153-4896-bfc2-bb2382600893-combined-ca-bundle\") pod \"neutron-7bf45b8998-2828m\" (UID: \"afc2fbf0-3153-4896-bfc2-bb2382600893\") " pod="openstack-kuttl-tests/neutron-7bf45b8998-2828m" Jan 20 17:44:01 crc kubenswrapper[4558]: I0120 17:44:01.301779 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4t6pg\" (UniqueName: \"kubernetes.io/projected/afc2fbf0-3153-4896-bfc2-bb2382600893-kube-api-access-4t6pg\") pod \"neutron-7bf45b8998-2828m\" (UID: \"afc2fbf0-3153-4896-bfc2-bb2382600893\") " pod="openstack-kuttl-tests/neutron-7bf45b8998-2828m" Jan 20 17:44:01 crc kubenswrapper[4558]: I0120 17:44:01.377301 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-7bf45b8998-2828m" Jan 20 17:44:01 crc kubenswrapper[4558]: I0120 17:44:01.822822 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/keystone-7f8885c56d-wsxms" Jan 20 17:44:01 crc kubenswrapper[4558]: I0120 17:44:01.851861 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"bda29809-3f10-45ff-8fea-3d72f5182e7a","Type":"ContainerStarted","Data":"668a1ae92bc40563b48efc1b28beb17af8908f733ffd3f92479647bd7d0d6282"} Jan 20 17:44:01 crc kubenswrapper[4558]: I0120 17:44:01.853327 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-f7d8994df-dzxmh" event={"ID":"5cbfd26c-728d-420c-9d40-b6f7870cff60","Type":"ContainerStarted","Data":"ffacb5c26d85f9daa0accf5b48ad054e1df3c95a5c4bb991f93025c2fc91b9b0"} Jan 20 17:44:01 crc kubenswrapper[4558]: I0120 17:44:01.853373 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-f7d8994df-dzxmh" event={"ID":"5cbfd26c-728d-420c-9d40-b6f7870cff60","Type":"ContainerStarted","Data":"14969461ef1c49e3f6027ed7a315f54b58ba88d461278fb5b80f7de2ea26a3e6"} Jan 20 17:44:01 crc kubenswrapper[4558]: I0120 17:44:01.853786 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/keystone-f7d8994df-dzxmh" Jan 20 17:44:01 crc kubenswrapper[4558]: I0120 17:44:01.855638 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"ff0d0703-8173-4ac0-afc0-e673feaef286","Type":"ContainerStarted","Data":"29a14fecb73a5e8e41c942ef00e6566817b5c2f5ec120abe410dce21f8142ccd"} Jan 20 17:44:01 crc kubenswrapper[4558]: I0120 17:44:01.855677 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"ff0d0703-8173-4ac0-afc0-e673feaef286","Type":"ContainerStarted","Data":"eebdfbd12d120bd4d4adeb0cbba616d24ce5f4438b3c54b84c67c895df587e6d"} Jan 20 17:44:01 crc kubenswrapper[4558]: I0120 17:44:01.855690 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"ff0d0703-8173-4ac0-afc0-e673feaef286","Type":"ContainerStarted","Data":"fe15b8466909629ec54bddf0b729e623676456b8d764dc2e8e08622acf62dd38"} Jan 20 17:44:01 crc kubenswrapper[4558]: I0120 17:44:01.874600 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-7bf45b8998-2828m"] Jan 20 17:44:01 crc kubenswrapper[4558]: W0120 17:44:01.878898 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podafc2fbf0_3153_4896_bfc2_bb2382600893.slice/crio-ae80761ff7322eea37b8609645a45d0abd1c5268d3a0acf9da88e996c4933ea6 WatchSource:0}: Error finding container ae80761ff7322eea37b8609645a45d0abd1c5268d3a0acf9da88e996c4933ea6: Status 404 returned error can't find the container with id ae80761ff7322eea37b8609645a45d0abd1c5268d3a0acf9da88e996c4933ea6 Jan 20 17:44:01 crc kubenswrapper[4558]: I0120 17:44:01.880501 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/keystone-f7d8994df-dzxmh" podStartSLOduration=2.8804890370000003 podStartE2EDuration="2.880489037s" podCreationTimestamp="2026-01-20 17:43:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:44:01.867060178 +0000 UTC m=+3735.627398145" watchObservedRunningTime="2026-01-20 17:44:01.880489037 +0000 UTC m=+3735.640827004" Jan 20 17:44:01 crc kubenswrapper[4558]: I0120 17:44:01.891801 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-metadata-0" podStartSLOduration=2.891783405 podStartE2EDuration="2.891783405s" podCreationTimestamp="2026-01-20 17:43:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:44:01.890586924 +0000 UTC m=+3735.650924891" watchObservedRunningTime="2026-01-20 17:44:01.891783405 +0000 UTC m=+3735.652121371" Jan 20 17:44:02 crc kubenswrapper[4558]: I0120 17:44:02.154349 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-7bf45b8998-2828m"] Jan 20 17:44:02 crc kubenswrapper[4558]: I0120 17:44:02.196223 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/neutron-b9f9b6874-4cxdp"] Jan 20 17:44:02 crc kubenswrapper[4558]: I0120 17:44:02.198123 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-b9f9b6874-4cxdp" Jan 20 17:44:02 crc kubenswrapper[4558]: I0120 17:44:02.207810 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-b9f9b6874-4cxdp"] Jan 20 17:44:02 crc kubenswrapper[4558]: I0120 17:44:02.313426 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/8361ba4f-e976-4c04-82ad-81e9412ba84c-httpd-config\") pod \"neutron-b9f9b6874-4cxdp\" (UID: \"8361ba4f-e976-4c04-82ad-81e9412ba84c\") " pod="openstack-kuttl-tests/neutron-b9f9b6874-4cxdp" Jan 20 17:44:02 crc kubenswrapper[4558]: I0120 17:44:02.313544 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8361ba4f-e976-4c04-82ad-81e9412ba84c-combined-ca-bundle\") pod \"neutron-b9f9b6874-4cxdp\" (UID: \"8361ba4f-e976-4c04-82ad-81e9412ba84c\") " pod="openstack-kuttl-tests/neutron-b9f9b6874-4cxdp" Jan 20 17:44:02 crc kubenswrapper[4558]: I0120 17:44:02.313605 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8361ba4f-e976-4c04-82ad-81e9412ba84c-public-tls-certs\") pod \"neutron-b9f9b6874-4cxdp\" (UID: \"8361ba4f-e976-4c04-82ad-81e9412ba84c\") " pod="openstack-kuttl-tests/neutron-b9f9b6874-4cxdp" Jan 20 17:44:02 crc kubenswrapper[4558]: I0120 17:44:02.313663 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bkhrf\" (UniqueName: \"kubernetes.io/projected/8361ba4f-e976-4c04-82ad-81e9412ba84c-kube-api-access-bkhrf\") pod \"neutron-b9f9b6874-4cxdp\" (UID: \"8361ba4f-e976-4c04-82ad-81e9412ba84c\") " pod="openstack-kuttl-tests/neutron-b9f9b6874-4cxdp" Jan 20 17:44:02 crc kubenswrapper[4558]: I0120 17:44:02.313695 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/8361ba4f-e976-4c04-82ad-81e9412ba84c-ovndb-tls-certs\") pod \"neutron-b9f9b6874-4cxdp\" (UID: \"8361ba4f-e976-4c04-82ad-81e9412ba84c\") " pod="openstack-kuttl-tests/neutron-b9f9b6874-4cxdp" Jan 20 17:44:02 crc kubenswrapper[4558]: I0120 17:44:02.313769 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/8361ba4f-e976-4c04-82ad-81e9412ba84c-config\") pod \"neutron-b9f9b6874-4cxdp\" (UID: \"8361ba4f-e976-4c04-82ad-81e9412ba84c\") " pod="openstack-kuttl-tests/neutron-b9f9b6874-4cxdp" Jan 20 17:44:02 crc kubenswrapper[4558]: I0120 17:44:02.313809 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8361ba4f-e976-4c04-82ad-81e9412ba84c-internal-tls-certs\") pod \"neutron-b9f9b6874-4cxdp\" (UID: \"8361ba4f-e976-4c04-82ad-81e9412ba84c\") " pod="openstack-kuttl-tests/neutron-b9f9b6874-4cxdp" Jan 20 17:44:02 crc kubenswrapper[4558]: I0120 17:44:02.364482 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack-kuttl-tests/cinder-scheduler-0" podUID="7faabe16-9de5-49dc-bab6-44e173f4403c" containerName="cinder-scheduler" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 20 17:44:02 crc kubenswrapper[4558]: I0120 17:44:02.422436 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8361ba4f-e976-4c04-82ad-81e9412ba84c-combined-ca-bundle\") pod \"neutron-b9f9b6874-4cxdp\" (UID: \"8361ba4f-e976-4c04-82ad-81e9412ba84c\") " pod="openstack-kuttl-tests/neutron-b9f9b6874-4cxdp" Jan 20 17:44:02 crc kubenswrapper[4558]: I0120 17:44:02.422558 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8361ba4f-e976-4c04-82ad-81e9412ba84c-public-tls-certs\") pod \"neutron-b9f9b6874-4cxdp\" (UID: \"8361ba4f-e976-4c04-82ad-81e9412ba84c\") " pod="openstack-kuttl-tests/neutron-b9f9b6874-4cxdp" Jan 20 17:44:02 crc kubenswrapper[4558]: I0120 17:44:02.422640 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bkhrf\" (UniqueName: \"kubernetes.io/projected/8361ba4f-e976-4c04-82ad-81e9412ba84c-kube-api-access-bkhrf\") pod \"neutron-b9f9b6874-4cxdp\" (UID: \"8361ba4f-e976-4c04-82ad-81e9412ba84c\") " pod="openstack-kuttl-tests/neutron-b9f9b6874-4cxdp" Jan 20 17:44:02 crc kubenswrapper[4558]: I0120 17:44:02.422738 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/8361ba4f-e976-4c04-82ad-81e9412ba84c-ovndb-tls-certs\") pod \"neutron-b9f9b6874-4cxdp\" (UID: \"8361ba4f-e976-4c04-82ad-81e9412ba84c\") " pod="openstack-kuttl-tests/neutron-b9f9b6874-4cxdp" Jan 20 17:44:02 crc kubenswrapper[4558]: I0120 17:44:02.422856 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/8361ba4f-e976-4c04-82ad-81e9412ba84c-config\") pod \"neutron-b9f9b6874-4cxdp\" (UID: \"8361ba4f-e976-4c04-82ad-81e9412ba84c\") " pod="openstack-kuttl-tests/neutron-b9f9b6874-4cxdp" Jan 20 17:44:02 crc kubenswrapper[4558]: I0120 17:44:02.422892 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8361ba4f-e976-4c04-82ad-81e9412ba84c-internal-tls-certs\") pod \"neutron-b9f9b6874-4cxdp\" (UID: \"8361ba4f-e976-4c04-82ad-81e9412ba84c\") " pod="openstack-kuttl-tests/neutron-b9f9b6874-4cxdp" Jan 20 17:44:02 crc kubenswrapper[4558]: I0120 17:44:02.423398 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/8361ba4f-e976-4c04-82ad-81e9412ba84c-httpd-config\") pod \"neutron-b9f9b6874-4cxdp\" (UID: \"8361ba4f-e976-4c04-82ad-81e9412ba84c\") " pod="openstack-kuttl-tests/neutron-b9f9b6874-4cxdp" Jan 20 17:44:02 crc kubenswrapper[4558]: I0120 17:44:02.429345 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8361ba4f-e976-4c04-82ad-81e9412ba84c-combined-ca-bundle\") pod \"neutron-b9f9b6874-4cxdp\" (UID: \"8361ba4f-e976-4c04-82ad-81e9412ba84c\") " pod="openstack-kuttl-tests/neutron-b9f9b6874-4cxdp" Jan 20 17:44:02 crc kubenswrapper[4558]: I0120 17:44:02.433241 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/8361ba4f-e976-4c04-82ad-81e9412ba84c-ovndb-tls-certs\") pod \"neutron-b9f9b6874-4cxdp\" (UID: \"8361ba4f-e976-4c04-82ad-81e9412ba84c\") " pod="openstack-kuttl-tests/neutron-b9f9b6874-4cxdp" Jan 20 17:44:02 crc kubenswrapper[4558]: I0120 17:44:02.438212 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8361ba4f-e976-4c04-82ad-81e9412ba84c-internal-tls-certs\") pod \"neutron-b9f9b6874-4cxdp\" (UID: \"8361ba4f-e976-4c04-82ad-81e9412ba84c\") " pod="openstack-kuttl-tests/neutron-b9f9b6874-4cxdp" Jan 20 17:44:02 crc kubenswrapper[4558]: I0120 17:44:02.438931 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/8361ba4f-e976-4c04-82ad-81e9412ba84c-httpd-config\") pod \"neutron-b9f9b6874-4cxdp\" (UID: \"8361ba4f-e976-4c04-82ad-81e9412ba84c\") " pod="openstack-kuttl-tests/neutron-b9f9b6874-4cxdp" Jan 20 17:44:02 crc kubenswrapper[4558]: I0120 17:44:02.440552 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/8361ba4f-e976-4c04-82ad-81e9412ba84c-config\") pod \"neutron-b9f9b6874-4cxdp\" (UID: \"8361ba4f-e976-4c04-82ad-81e9412ba84c\") " pod="openstack-kuttl-tests/neutron-b9f9b6874-4cxdp" Jan 20 17:44:02 crc kubenswrapper[4558]: I0120 17:44:02.441923 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8361ba4f-e976-4c04-82ad-81e9412ba84c-public-tls-certs\") pod \"neutron-b9f9b6874-4cxdp\" (UID: \"8361ba4f-e976-4c04-82ad-81e9412ba84c\") " pod="openstack-kuttl-tests/neutron-b9f9b6874-4cxdp" Jan 20 17:44:02 crc kubenswrapper[4558]: I0120 17:44:02.447643 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bkhrf\" (UniqueName: \"kubernetes.io/projected/8361ba4f-e976-4c04-82ad-81e9412ba84c-kube-api-access-bkhrf\") pod \"neutron-b9f9b6874-4cxdp\" (UID: \"8361ba4f-e976-4c04-82ad-81e9412ba84c\") " pod="openstack-kuttl-tests/neutron-b9f9b6874-4cxdp" Jan 20 17:44:02 crc kubenswrapper[4558]: I0120 17:44:02.643953 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-b9f9b6874-4cxdp" Jan 20 17:44:02 crc kubenswrapper[4558]: I0120 17:44:02.885990 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-7bf45b8998-2828m" event={"ID":"afc2fbf0-3153-4896-bfc2-bb2382600893","Type":"ContainerStarted","Data":"cf3321bb362cd3f716ec0caf474addff8cb6ca6125cea6c7cf914ee7d57f8965"} Jan 20 17:44:02 crc kubenswrapper[4558]: I0120 17:44:02.886072 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/neutron-7bf45b8998-2828m" podUID="afc2fbf0-3153-4896-bfc2-bb2382600893" containerName="neutron-api" containerID="cri-o://8a25668d015893accd83bc05a496d2095af9e2191988393567d5b69588227076" gracePeriod=30 Jan 20 17:44:02 crc kubenswrapper[4558]: I0120 17:44:02.886335 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/neutron-7bf45b8998-2828m" podUID="afc2fbf0-3153-4896-bfc2-bb2382600893" containerName="neutron-httpd" containerID="cri-o://cf3321bb362cd3f716ec0caf474addff8cb6ca6125cea6c7cf914ee7d57f8965" gracePeriod=30 Jan 20 17:44:02 crc kubenswrapper[4558]: I0120 17:44:02.886567 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/neutron-7bf45b8998-2828m" Jan 20 17:44:02 crc kubenswrapper[4558]: I0120 17:44:02.886622 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-7bf45b8998-2828m" event={"ID":"afc2fbf0-3153-4896-bfc2-bb2382600893","Type":"ContainerStarted","Data":"8a25668d015893accd83bc05a496d2095af9e2191988393567d5b69588227076"} Jan 20 17:44:02 crc kubenswrapper[4558]: I0120 17:44:02.886645 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-7bf45b8998-2828m" event={"ID":"afc2fbf0-3153-4896-bfc2-bb2382600893","Type":"ContainerStarted","Data":"ae80761ff7322eea37b8609645a45d0abd1c5268d3a0acf9da88e996c4933ea6"} Jan 20 17:44:02 crc kubenswrapper[4558]: I0120 17:44:02.900823 4558 generic.go:334] "Generic (PLEG): container finished" podID="86aff100-d474-48ed-b673-4dae7c0722cf" containerID="67cda9ffae9461498a3181421c8f34810fd6c215622487c92fe9030c49337aa1" exitCode=0 Jan 20 17:44:02 crc kubenswrapper[4558]: I0120 17:44:02.900889 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-galera-0" event={"ID":"86aff100-d474-48ed-b673-4dae7c0722cf","Type":"ContainerDied","Data":"67cda9ffae9461498a3181421c8f34810fd6c215622487c92fe9030c49337aa1"} Jan 20 17:44:02 crc kubenswrapper[4558]: I0120 17:44:02.915361 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"bda29809-3f10-45ff-8fea-3d72f5182e7a","Type":"ContainerStarted","Data":"0ad6dbb2fa84d1311b6f039beff159bd8b238efc373ccc984d847df4c505d640"} Jan 20 17:44:02 crc kubenswrapper[4558]: I0120 17:44:02.921977 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/neutron-7bf45b8998-2828m" podStartSLOduration=1.9219575880000002 podStartE2EDuration="1.921957588s" podCreationTimestamp="2026-01-20 17:44:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:44:02.903704588 +0000 UTC m=+3736.664042555" watchObservedRunningTime="2026-01-20 17:44:02.921957588 +0000 UTC m=+3736.682295555" Jan 20 17:44:03 crc kubenswrapper[4558]: I0120 17:44:03.105641 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-b9f9b6874-4cxdp"] Jan 20 17:44:03 crc kubenswrapper[4558]: I0120 17:44:03.400448 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:44:03 crc kubenswrapper[4558]: I0120 17:44:03.400757 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" podUID="02038ce5-8236-4304-922b-39d5c9e2459e" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://3061bc017f3dd32adfde1ca3ae3220a662a18da25acbd815f7220bcb27f5d3e5" gracePeriod=30 Jan 20 17:44:03 crc kubenswrapper[4558]: I0120 17:44:03.964490 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-galera-0" event={"ID":"86aff100-d474-48ed-b673-4dae7c0722cf","Type":"ContainerStarted","Data":"f0fb0d6dfc95147b44b7be05faeba0d259d63b173ea99ddb9b134ce40f87cb33"} Jan 20 17:44:03 crc kubenswrapper[4558]: I0120 17:44:03.987644 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"bda29809-3f10-45ff-8fea-3d72f5182e7a","Type":"ContainerStarted","Data":"402c846f2a2392a0dd6b1cc212dd5c6cf7c9c1691b1c0f3fc55e7fb3430cf054"} Jan 20 17:44:04 crc kubenswrapper[4558]: I0120 17:44:04.000268 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/openstack-galera-0" podStartSLOduration=6.000256358 podStartE2EDuration="6.000256358s" podCreationTimestamp="2026-01-20 17:43:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:44:03.990601995 +0000 UTC m=+3737.750939963" watchObservedRunningTime="2026-01-20 17:44:04.000256358 +0000 UTC m=+3737.760594324" Jan 20 17:44:04 crc kubenswrapper[4558]: I0120 17:44:04.010635 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-b9f9b6874-4cxdp" event={"ID":"8361ba4f-e976-4c04-82ad-81e9412ba84c","Type":"ContainerStarted","Data":"df990974fa4d7bdeba24386c7a7548b9c42f83f4e37902446477a1561329d066"} Jan 20 17:44:04 crc kubenswrapper[4558]: I0120 17:44:04.010691 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-b9f9b6874-4cxdp" event={"ID":"8361ba4f-e976-4c04-82ad-81e9412ba84c","Type":"ContainerStarted","Data":"988e9fa129b4be2f46d004a547d691ae8c9171ce34c21f6c5e3dc859fbe3c5da"} Jan 20 17:44:04 crc kubenswrapper[4558]: I0120 17:44:04.010703 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-b9f9b6874-4cxdp" event={"ID":"8361ba4f-e976-4c04-82ad-81e9412ba84c","Type":"ContainerStarted","Data":"f84eb3dc0ed3f72a5dfdbe61bf42c4f075af06b8d4634598cdd8ae6cf3ba9956"} Jan 20 17:44:04 crc kubenswrapper[4558]: I0120 17:44:04.011261 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/neutron-b9f9b6874-4cxdp" Jan 20 17:44:04 crc kubenswrapper[4558]: I0120 17:44:04.045107 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/neutron-b9f9b6874-4cxdp" podStartSLOduration=2.04509207 podStartE2EDuration="2.04509207s" podCreationTimestamp="2026-01-20 17:44:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:44:04.040521866 +0000 UTC m=+3737.800859833" watchObservedRunningTime="2026-01-20 17:44:04.04509207 +0000 UTC m=+3737.805430037" Jan 20 17:44:04 crc kubenswrapper[4558]: I0120 17:44:04.048819 4558 generic.go:334] "Generic (PLEG): container finished" podID="afc2fbf0-3153-4896-bfc2-bb2382600893" containerID="cf3321bb362cd3f716ec0caf474addff8cb6ca6125cea6c7cf914ee7d57f8965" exitCode=0 Jan 20 17:44:04 crc kubenswrapper[4558]: I0120 17:44:04.048871 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-7bf45b8998-2828m" event={"ID":"afc2fbf0-3153-4896-bfc2-bb2382600893","Type":"ContainerDied","Data":"cf3321bb362cd3f716ec0caf474addff8cb6ca6125cea6c7cf914ee7d57f8965"} Jan 20 17:44:04 crc kubenswrapper[4558]: I0120 17:44:04.283902 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" podUID="02038ce5-8236-4304-922b-39d5c9e2459e" containerName="nova-cell1-novncproxy-novncproxy" probeResult="failure" output="Get \"https://10.217.1.36:6080/vnc_lite.html\": dial tcp 10.217.1.36:6080: connect: connection refused" Jan 20 17:44:04 crc kubenswrapper[4558]: I0120 17:44:04.510218 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:44:04 crc kubenswrapper[4558]: I0120 17:44:04.682529 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/02038ce5-8236-4304-922b-39d5c9e2459e-config-data\") pod \"02038ce5-8236-4304-922b-39d5c9e2459e\" (UID: \"02038ce5-8236-4304-922b-39d5c9e2459e\") " Jan 20 17:44:04 crc kubenswrapper[4558]: I0120 17:44:04.683761 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-76xbv\" (UniqueName: \"kubernetes.io/projected/02038ce5-8236-4304-922b-39d5c9e2459e-kube-api-access-76xbv\") pod \"02038ce5-8236-4304-922b-39d5c9e2459e\" (UID: \"02038ce5-8236-4304-922b-39d5c9e2459e\") " Jan 20 17:44:04 crc kubenswrapper[4558]: I0120 17:44:04.683824 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02038ce5-8236-4304-922b-39d5c9e2459e-combined-ca-bundle\") pod \"02038ce5-8236-4304-922b-39d5c9e2459e\" (UID: \"02038ce5-8236-4304-922b-39d5c9e2459e\") " Jan 20 17:44:04 crc kubenswrapper[4558]: I0120 17:44:04.683867 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/02038ce5-8236-4304-922b-39d5c9e2459e-nova-novncproxy-tls-certs\") pod \"02038ce5-8236-4304-922b-39d5c9e2459e\" (UID: \"02038ce5-8236-4304-922b-39d5c9e2459e\") " Jan 20 17:44:04 crc kubenswrapper[4558]: I0120 17:44:04.684003 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/02038ce5-8236-4304-922b-39d5c9e2459e-vencrypt-tls-certs\") pod \"02038ce5-8236-4304-922b-39d5c9e2459e\" (UID: \"02038ce5-8236-4304-922b-39d5c9e2459e\") " Jan 20 17:44:04 crc kubenswrapper[4558]: I0120 17:44:04.735849 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/02038ce5-8236-4304-922b-39d5c9e2459e-kube-api-access-76xbv" (OuterVolumeSpecName: "kube-api-access-76xbv") pod "02038ce5-8236-4304-922b-39d5c9e2459e" (UID: "02038ce5-8236-4304-922b-39d5c9e2459e"). InnerVolumeSpecName "kube-api-access-76xbv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:44:04 crc kubenswrapper[4558]: I0120 17:44:04.748715 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/02038ce5-8236-4304-922b-39d5c9e2459e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "02038ce5-8236-4304-922b-39d5c9e2459e" (UID: "02038ce5-8236-4304-922b-39d5c9e2459e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:44:04 crc kubenswrapper[4558]: I0120 17:44:04.749304 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/02038ce5-8236-4304-922b-39d5c9e2459e-config-data" (OuterVolumeSpecName: "config-data") pod "02038ce5-8236-4304-922b-39d5c9e2459e" (UID: "02038ce5-8236-4304-922b-39d5c9e2459e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:44:04 crc kubenswrapper[4558]: I0120 17:44:04.772307 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/02038ce5-8236-4304-922b-39d5c9e2459e-vencrypt-tls-certs" (OuterVolumeSpecName: "vencrypt-tls-certs") pod "02038ce5-8236-4304-922b-39d5c9e2459e" (UID: "02038ce5-8236-4304-922b-39d5c9e2459e"). InnerVolumeSpecName "vencrypt-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:44:04 crc kubenswrapper[4558]: I0120 17:44:04.787739 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/02038ce5-8236-4304-922b-39d5c9e2459e-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:44:04 crc kubenswrapper[4558]: I0120 17:44:04.787767 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-76xbv\" (UniqueName: \"kubernetes.io/projected/02038ce5-8236-4304-922b-39d5c9e2459e-kube-api-access-76xbv\") on node \"crc\" DevicePath \"\"" Jan 20 17:44:04 crc kubenswrapper[4558]: I0120 17:44:04.787780 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02038ce5-8236-4304-922b-39d5c9e2459e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:44:04 crc kubenswrapper[4558]: I0120 17:44:04.787810 4558 reconciler_common.go:293] "Volume detached for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/02038ce5-8236-4304-922b-39d5c9e2459e-vencrypt-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:44:04 crc kubenswrapper[4558]: I0120 17:44:04.790328 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/02038ce5-8236-4304-922b-39d5c9e2459e-nova-novncproxy-tls-certs" (OuterVolumeSpecName: "nova-novncproxy-tls-certs") pod "02038ce5-8236-4304-922b-39d5c9e2459e" (UID: "02038ce5-8236-4304-922b-39d5c9e2459e"). InnerVolumeSpecName "nova-novncproxy-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:44:04 crc kubenswrapper[4558]: I0120 17:44:04.889872 4558 reconciler_common.go:293] "Volume detached for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/02038ce5-8236-4304-922b-39d5c9e2459e-nova-novncproxy-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:44:05 crc kubenswrapper[4558]: I0120 17:44:05.060089 4558 generic.go:334] "Generic (PLEG): container finished" podID="02038ce5-8236-4304-922b-39d5c9e2459e" containerID="3061bc017f3dd32adfde1ca3ae3220a662a18da25acbd815f7220bcb27f5d3e5" exitCode=0 Jan 20 17:44:05 crc kubenswrapper[4558]: I0120 17:44:05.060197 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:44:05 crc kubenswrapper[4558]: I0120 17:44:05.060217 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"02038ce5-8236-4304-922b-39d5c9e2459e","Type":"ContainerDied","Data":"3061bc017f3dd32adfde1ca3ae3220a662a18da25acbd815f7220bcb27f5d3e5"} Jan 20 17:44:05 crc kubenswrapper[4558]: I0120 17:44:05.060488 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"02038ce5-8236-4304-922b-39d5c9e2459e","Type":"ContainerDied","Data":"ae3e4535b918315e06d397772c071a730e991089e1ec0d98521e62d3b2eac170"} Jan 20 17:44:05 crc kubenswrapper[4558]: I0120 17:44:05.060515 4558 scope.go:117] "RemoveContainer" containerID="3061bc017f3dd32adfde1ca3ae3220a662a18da25acbd815f7220bcb27f5d3e5" Jan 20 17:44:05 crc kubenswrapper[4558]: I0120 17:44:05.063647 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"bda29809-3f10-45ff-8fea-3d72f5182e7a","Type":"ContainerStarted","Data":"8389290c9818f3ab0ed996cf1df18a17f4bc86f2ddee6a28823c5e3d138e7667"} Jan 20 17:44:05 crc kubenswrapper[4558]: I0120 17:44:05.082499 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ceilometer-0" podStartSLOduration=2.53562462 podStartE2EDuration="6.082478144s" podCreationTimestamp="2026-01-20 17:43:59 +0000 UTC" firstStartedPulling="2026-01-20 17:44:00.798513454 +0000 UTC m=+3734.558851422" lastFinishedPulling="2026-01-20 17:44:04.345366979 +0000 UTC m=+3738.105704946" observedRunningTime="2026-01-20 17:44:05.081262798 +0000 UTC m=+3738.841600766" watchObservedRunningTime="2026-01-20 17:44:05.082478144 +0000 UTC m=+3738.842816111" Jan 20 17:44:05 crc kubenswrapper[4558]: I0120 17:44:05.102338 4558 scope.go:117] "RemoveContainer" containerID="3061bc017f3dd32adfde1ca3ae3220a662a18da25acbd815f7220bcb27f5d3e5" Jan 20 17:44:05 crc kubenswrapper[4558]: E0120 17:44:05.110349 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3061bc017f3dd32adfde1ca3ae3220a662a18da25acbd815f7220bcb27f5d3e5\": container with ID starting with 3061bc017f3dd32adfde1ca3ae3220a662a18da25acbd815f7220bcb27f5d3e5 not found: ID does not exist" containerID="3061bc017f3dd32adfde1ca3ae3220a662a18da25acbd815f7220bcb27f5d3e5" Jan 20 17:44:05 crc kubenswrapper[4558]: I0120 17:44:05.110407 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3061bc017f3dd32adfde1ca3ae3220a662a18da25acbd815f7220bcb27f5d3e5"} err="failed to get container status \"3061bc017f3dd32adfde1ca3ae3220a662a18da25acbd815f7220bcb27f5d3e5\": rpc error: code = NotFound desc = could not find container \"3061bc017f3dd32adfde1ca3ae3220a662a18da25acbd815f7220bcb27f5d3e5\": container with ID starting with 3061bc017f3dd32adfde1ca3ae3220a662a18da25acbd815f7220bcb27f5d3e5 not found: ID does not exist" Jan 20 17:44:05 crc kubenswrapper[4558]: I0120 17:44:05.114218 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:44:05 crc kubenswrapper[4558]: I0120 17:44:05.171675 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:44:05 crc kubenswrapper[4558]: I0120 17:44:05.183895 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:44:05 crc kubenswrapper[4558]: E0120 17:44:05.184399 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="02038ce5-8236-4304-922b-39d5c9e2459e" containerName="nova-cell1-novncproxy-novncproxy" Jan 20 17:44:05 crc kubenswrapper[4558]: I0120 17:44:05.184419 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="02038ce5-8236-4304-922b-39d5c9e2459e" containerName="nova-cell1-novncproxy-novncproxy" Jan 20 17:44:05 crc kubenswrapper[4558]: I0120 17:44:05.184652 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="02038ce5-8236-4304-922b-39d5c9e2459e" containerName="nova-cell1-novncproxy-novncproxy" Jan 20 17:44:05 crc kubenswrapper[4558]: I0120 17:44:05.185549 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:44:05 crc kubenswrapper[4558]: I0120 17:44:05.187543 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-novncproxy-cell1-public-svc" Jan 20 17:44:05 crc kubenswrapper[4558]: I0120 17:44:05.187730 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell1-novncproxy-config-data" Jan 20 17:44:05 crc kubenswrapper[4558]: I0120 17:44:05.187867 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-novncproxy-cell1-vencrypt" Jan 20 17:44:05 crc kubenswrapper[4558]: I0120 17:44:05.195201 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:44:05 crc kubenswrapper[4558]: I0120 17:44:05.219497 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:44:05 crc kubenswrapper[4558]: I0120 17:44:05.219628 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:44:05 crc kubenswrapper[4558]: I0120 17:44:05.298891 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/423502e9-61bd-4da9-953c-48ab84b4f004-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"423502e9-61bd-4da9-953c-48ab84b4f004\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:44:05 crc kubenswrapper[4558]: I0120 17:44:05.299109 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/423502e9-61bd-4da9-953c-48ab84b4f004-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"423502e9-61bd-4da9-953c-48ab84b4f004\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:44:05 crc kubenswrapper[4558]: I0120 17:44:05.299232 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/423502e9-61bd-4da9-953c-48ab84b4f004-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"423502e9-61bd-4da9-953c-48ab84b4f004\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:44:05 crc kubenswrapper[4558]: I0120 17:44:05.299484 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/423502e9-61bd-4da9-953c-48ab84b4f004-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"423502e9-61bd-4da9-953c-48ab84b4f004\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:44:05 crc kubenswrapper[4558]: I0120 17:44:05.300439 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s4rpp\" (UniqueName: \"kubernetes.io/projected/423502e9-61bd-4da9-953c-48ab84b4f004-kube-api-access-s4rpp\") pod \"nova-cell1-novncproxy-0\" (UID: \"423502e9-61bd-4da9-953c-48ab84b4f004\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:44:05 crc kubenswrapper[4558]: I0120 17:44:05.330819 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack-kuttl-tests/cinder-scheduler-0" podUID="7faabe16-9de5-49dc-bab6-44e173f4403c" containerName="cinder-scheduler" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 20 17:44:05 crc kubenswrapper[4558]: I0120 17:44:05.330931 4558 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:44:05 crc kubenswrapper[4558]: I0120 17:44:05.332074 4558 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="cinder-scheduler" containerStatusID={"Type":"cri-o","ID":"88e09b52c88812df27321ee3632982258e7318dfda1d7dfa673a282b0d3aea31"} pod="openstack-kuttl-tests/cinder-scheduler-0" containerMessage="Container cinder-scheduler failed liveness probe, will be restarted" Jan 20 17:44:05 crc kubenswrapper[4558]: I0120 17:44:05.332142 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/cinder-scheduler-0" podUID="7faabe16-9de5-49dc-bab6-44e173f4403c" containerName="cinder-scheduler" containerID="cri-o://88e09b52c88812df27321ee3632982258e7318dfda1d7dfa673a282b0d3aea31" gracePeriod=30 Jan 20 17:44:05 crc kubenswrapper[4558]: I0120 17:44:05.403210 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/423502e9-61bd-4da9-953c-48ab84b4f004-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"423502e9-61bd-4da9-953c-48ab84b4f004\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:44:05 crc kubenswrapper[4558]: I0120 17:44:05.403326 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/423502e9-61bd-4da9-953c-48ab84b4f004-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"423502e9-61bd-4da9-953c-48ab84b4f004\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:44:05 crc kubenswrapper[4558]: I0120 17:44:05.403386 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/423502e9-61bd-4da9-953c-48ab84b4f004-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"423502e9-61bd-4da9-953c-48ab84b4f004\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:44:05 crc kubenswrapper[4558]: I0120 17:44:05.403588 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/423502e9-61bd-4da9-953c-48ab84b4f004-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"423502e9-61bd-4da9-953c-48ab84b4f004\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:44:05 crc kubenswrapper[4558]: I0120 17:44:05.403758 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s4rpp\" (UniqueName: \"kubernetes.io/projected/423502e9-61bd-4da9-953c-48ab84b4f004-kube-api-access-s4rpp\") pod \"nova-cell1-novncproxy-0\" (UID: \"423502e9-61bd-4da9-953c-48ab84b4f004\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:44:05 crc kubenswrapper[4558]: I0120 17:44:05.408150 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/423502e9-61bd-4da9-953c-48ab84b4f004-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"423502e9-61bd-4da9-953c-48ab84b4f004\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:44:05 crc kubenswrapper[4558]: I0120 17:44:05.410638 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/423502e9-61bd-4da9-953c-48ab84b4f004-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"423502e9-61bd-4da9-953c-48ab84b4f004\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:44:05 crc kubenswrapper[4558]: I0120 17:44:05.415681 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/423502e9-61bd-4da9-953c-48ab84b4f004-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"423502e9-61bd-4da9-953c-48ab84b4f004\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:44:05 crc kubenswrapper[4558]: I0120 17:44:05.416637 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/423502e9-61bd-4da9-953c-48ab84b4f004-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"423502e9-61bd-4da9-953c-48ab84b4f004\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:44:05 crc kubenswrapper[4558]: I0120 17:44:05.418811 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s4rpp\" (UniqueName: \"kubernetes.io/projected/423502e9-61bd-4da9-953c-48ab84b4f004-kube-api-access-s4rpp\") pod \"nova-cell1-novncproxy-0\" (UID: \"423502e9-61bd-4da9-953c-48ab84b4f004\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:44:05 crc kubenswrapper[4558]: I0120 17:44:05.506507 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:44:05 crc kubenswrapper[4558]: I0120 17:44:05.807666 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:44:06 crc kubenswrapper[4558]: I0120 17:44:06.081559 4558 generic.go:334] "Generic (PLEG): container finished" podID="b911f497-5616-416f-8aa2-73c7cce172eb" containerID="2d6106f888df62c14a5adbf7426959067a2a5587b57672909aebbf2d13dffda8" exitCode=0 Jan 20 17:44:06 crc kubenswrapper[4558]: I0120 17:44:06.081895 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-7f8885c56d-wsxms" event={"ID":"b911f497-5616-416f-8aa2-73c7cce172eb","Type":"ContainerDied","Data":"2d6106f888df62c14a5adbf7426959067a2a5587b57672909aebbf2d13dffda8"} Jan 20 17:44:06 crc kubenswrapper[4558]: I0120 17:44:06.088643 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-7f8885c56d-wsxms" Jan 20 17:44:06 crc kubenswrapper[4558]: I0120 17:44:06.089522 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"423502e9-61bd-4da9-953c-48ab84b4f004","Type":"ContainerStarted","Data":"5bb50dc1e160060e179121c814e65b7d5b022719607f19c151a7253e11a56151"} Jan 20 17:44:06 crc kubenswrapper[4558]: I0120 17:44:06.089652 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"423502e9-61bd-4da9-953c-48ab84b4f004","Type":"ContainerStarted","Data":"e265a3027052578336421a314a64da3b23502ab81bfed63b767f27088c202463"} Jan 20 17:44:06 crc kubenswrapper[4558]: I0120 17:44:06.091718 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:44:06 crc kubenswrapper[4558]: I0120 17:44:06.135619 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" podStartSLOduration=1.135604323 podStartE2EDuration="1.135604323s" podCreationTimestamp="2026-01-20 17:44:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:44:06.119279318 +0000 UTC m=+3739.879617275" watchObservedRunningTime="2026-01-20 17:44:06.135604323 +0000 UTC m=+3739.895942290" Jan 20 17:44:06 crc kubenswrapper[4558]: I0120 17:44:06.227528 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/b911f497-5616-416f-8aa2-73c7cce172eb-credential-keys\") pod \"b911f497-5616-416f-8aa2-73c7cce172eb\" (UID: \"b911f497-5616-416f-8aa2-73c7cce172eb\") " Jan 20 17:44:06 crc kubenswrapper[4558]: I0120 17:44:06.227594 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b911f497-5616-416f-8aa2-73c7cce172eb-config-data\") pod \"b911f497-5616-416f-8aa2-73c7cce172eb\" (UID: \"b911f497-5616-416f-8aa2-73c7cce172eb\") " Jan 20 17:44:06 crc kubenswrapper[4558]: I0120 17:44:06.227705 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b911f497-5616-416f-8aa2-73c7cce172eb-public-tls-certs\") pod \"b911f497-5616-416f-8aa2-73c7cce172eb\" (UID: \"b911f497-5616-416f-8aa2-73c7cce172eb\") " Jan 20 17:44:06 crc kubenswrapper[4558]: I0120 17:44:06.227762 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b911f497-5616-416f-8aa2-73c7cce172eb-combined-ca-bundle\") pod \"b911f497-5616-416f-8aa2-73c7cce172eb\" (UID: \"b911f497-5616-416f-8aa2-73c7cce172eb\") " Jan 20 17:44:06 crc kubenswrapper[4558]: I0120 17:44:06.227797 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b911f497-5616-416f-8aa2-73c7cce172eb-scripts\") pod \"b911f497-5616-416f-8aa2-73c7cce172eb\" (UID: \"b911f497-5616-416f-8aa2-73c7cce172eb\") " Jan 20 17:44:06 crc kubenswrapper[4558]: I0120 17:44:06.227847 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rdh88\" (UniqueName: \"kubernetes.io/projected/b911f497-5616-416f-8aa2-73c7cce172eb-kube-api-access-rdh88\") pod \"b911f497-5616-416f-8aa2-73c7cce172eb\" (UID: \"b911f497-5616-416f-8aa2-73c7cce172eb\") " Jan 20 17:44:06 crc kubenswrapper[4558]: I0120 17:44:06.227965 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b911f497-5616-416f-8aa2-73c7cce172eb-internal-tls-certs\") pod \"b911f497-5616-416f-8aa2-73c7cce172eb\" (UID: \"b911f497-5616-416f-8aa2-73c7cce172eb\") " Jan 20 17:44:06 crc kubenswrapper[4558]: I0120 17:44:06.228050 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/b911f497-5616-416f-8aa2-73c7cce172eb-fernet-keys\") pod \"b911f497-5616-416f-8aa2-73c7cce172eb\" (UID: \"b911f497-5616-416f-8aa2-73c7cce172eb\") " Jan 20 17:44:06 crc kubenswrapper[4558]: I0120 17:44:06.239555 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b911f497-5616-416f-8aa2-73c7cce172eb-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "b911f497-5616-416f-8aa2-73c7cce172eb" (UID: "b911f497-5616-416f-8aa2-73c7cce172eb"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:44:06 crc kubenswrapper[4558]: I0120 17:44:06.240615 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b911f497-5616-416f-8aa2-73c7cce172eb-kube-api-access-rdh88" (OuterVolumeSpecName: "kube-api-access-rdh88") pod "b911f497-5616-416f-8aa2-73c7cce172eb" (UID: "b911f497-5616-416f-8aa2-73c7cce172eb"). InnerVolumeSpecName "kube-api-access-rdh88". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:44:06 crc kubenswrapper[4558]: I0120 17:44:06.240722 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b911f497-5616-416f-8aa2-73c7cce172eb-scripts" (OuterVolumeSpecName: "scripts") pod "b911f497-5616-416f-8aa2-73c7cce172eb" (UID: "b911f497-5616-416f-8aa2-73c7cce172eb"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:44:06 crc kubenswrapper[4558]: I0120 17:44:06.244250 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b911f497-5616-416f-8aa2-73c7cce172eb-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "b911f497-5616-416f-8aa2-73c7cce172eb" (UID: "b911f497-5616-416f-8aa2-73c7cce172eb"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:44:06 crc kubenswrapper[4558]: I0120 17:44:06.266428 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b911f497-5616-416f-8aa2-73c7cce172eb-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b911f497-5616-416f-8aa2-73c7cce172eb" (UID: "b911f497-5616-416f-8aa2-73c7cce172eb"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:44:06 crc kubenswrapper[4558]: I0120 17:44:06.268199 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b911f497-5616-416f-8aa2-73c7cce172eb-config-data" (OuterVolumeSpecName: "config-data") pod "b911f497-5616-416f-8aa2-73c7cce172eb" (UID: "b911f497-5616-416f-8aa2-73c7cce172eb"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:44:06 crc kubenswrapper[4558]: I0120 17:44:06.287377 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b911f497-5616-416f-8aa2-73c7cce172eb-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "b911f497-5616-416f-8aa2-73c7cce172eb" (UID: "b911f497-5616-416f-8aa2-73c7cce172eb"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:44:06 crc kubenswrapper[4558]: I0120 17:44:06.299012 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b911f497-5616-416f-8aa2-73c7cce172eb-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "b911f497-5616-416f-8aa2-73c7cce172eb" (UID: "b911f497-5616-416f-8aa2-73c7cce172eb"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:44:06 crc kubenswrapper[4558]: I0120 17:44:06.331830 4558 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/b911f497-5616-416f-8aa2-73c7cce172eb-credential-keys\") on node \"crc\" DevicePath \"\"" Jan 20 17:44:06 crc kubenswrapper[4558]: I0120 17:44:06.331862 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b911f497-5616-416f-8aa2-73c7cce172eb-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:44:06 crc kubenswrapper[4558]: I0120 17:44:06.331872 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b911f497-5616-416f-8aa2-73c7cce172eb-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:44:06 crc kubenswrapper[4558]: I0120 17:44:06.331884 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b911f497-5616-416f-8aa2-73c7cce172eb-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:44:06 crc kubenswrapper[4558]: I0120 17:44:06.331892 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b911f497-5616-416f-8aa2-73c7cce172eb-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:44:06 crc kubenswrapper[4558]: I0120 17:44:06.331902 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rdh88\" (UniqueName: \"kubernetes.io/projected/b911f497-5616-416f-8aa2-73c7cce172eb-kube-api-access-rdh88\") on node \"crc\" DevicePath \"\"" Jan 20 17:44:06 crc kubenswrapper[4558]: I0120 17:44:06.331923 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b911f497-5616-416f-8aa2-73c7cce172eb-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:44:06 crc kubenswrapper[4558]: I0120 17:44:06.331931 4558 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/b911f497-5616-416f-8aa2-73c7cce172eb-fernet-keys\") on node \"crc\" DevicePath \"\"" Jan 20 17:44:06 crc kubenswrapper[4558]: I0120 17:44:06.577210 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="02038ce5-8236-4304-922b-39d5c9e2459e" path="/var/lib/kubelet/pods/02038ce5-8236-4304-922b-39d5c9e2459e/volumes" Jan 20 17:44:06 crc kubenswrapper[4558]: I0120 17:44:06.694332 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack-kuttl-tests/barbican-api-74bb4f5f4-5ppmg" podUID="d619cd37-a474-4965-b382-749ed6d55d6d" containerName="barbican-api-log" probeResult="failure" output="Get \"https://10.217.1.61:9311/healthcheck\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 20 17:44:06 crc kubenswrapper[4558]: I0120 17:44:06.694362 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack-kuttl-tests/barbican-api-74bb4f5f4-5ppmg" podUID="d619cd37-a474-4965-b382-749ed6d55d6d" containerName="barbican-api" probeResult="failure" output="Get \"https://10.217.1.61:9311/healthcheck\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 20 17:44:07 crc kubenswrapper[4558]: I0120 17:44:07.111375 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-7f8885c56d-wsxms" event={"ID":"b911f497-5616-416f-8aa2-73c7cce172eb","Type":"ContainerDied","Data":"343e48f725b8296c7153d9b50c1479be2bc4ff75469989667da521990a0d8174"} Jan 20 17:44:07 crc kubenswrapper[4558]: I0120 17:44:07.111445 4558 scope.go:117] "RemoveContainer" containerID="2d6106f888df62c14a5adbf7426959067a2a5587b57672909aebbf2d13dffda8" Jan 20 17:44:07 crc kubenswrapper[4558]: I0120 17:44:07.111635 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-7f8885c56d-wsxms" Jan 20 17:44:07 crc kubenswrapper[4558]: I0120 17:44:07.138746 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-7f8885c56d-wsxms"] Jan 20 17:44:07 crc kubenswrapper[4558]: I0120 17:44:07.147790 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/keystone-7f8885c56d-wsxms"] Jan 20 17:44:08 crc kubenswrapper[4558]: I0120 17:44:08.576556 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b911f497-5616-416f-8aa2-73c7cce172eb" path="/var/lib/kubelet/pods/b911f497-5616-416f-8aa2-73c7cce172eb/volumes" Jan 20 17:44:08 crc kubenswrapper[4558]: I0120 17:44:08.890370 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack-kuttl-tests/cinder-api-0" podUID="b752af55-9c06-469f-9353-e1042300de3c" containerName="cinder-api" probeResult="failure" output="Get \"https://10.217.1.60:8776/healthcheck\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 20 17:44:09 crc kubenswrapper[4558]: I0120 17:44:09.136254 4558 generic.go:334] "Generic (PLEG): container finished" podID="7faabe16-9de5-49dc-bab6-44e173f4403c" containerID="88e09b52c88812df27321ee3632982258e7318dfda1d7dfa673a282b0d3aea31" exitCode=0 Jan 20 17:44:09 crc kubenswrapper[4558]: I0120 17:44:09.136312 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"7faabe16-9de5-49dc-bab6-44e173f4403c","Type":"ContainerDied","Data":"88e09b52c88812df27321ee3632982258e7318dfda1d7dfa673a282b0d3aea31"} Jan 20 17:44:09 crc kubenswrapper[4558]: I0120 17:44:09.511124 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:44:09 crc kubenswrapper[4558]: I0120 17:44:09.511502 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:44:09 crc kubenswrapper[4558]: I0120 17:44:09.567262 4558 scope.go:117] "RemoveContainer" containerID="2d2a8989ca5a5af98b2c9dc8f801ea0675339ec14800f437c058c5a43c20146b" Jan 20 17:44:09 crc kubenswrapper[4558]: E0120 17:44:09.567632 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 17:44:09 crc kubenswrapper[4558]: I0120 17:44:09.593050 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:44:09 crc kubenswrapper[4558]: I0120 17:44:09.770007 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:44:09 crc kubenswrapper[4558]: I0120 17:44:09.889301 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/cinder-api-0" podUID="b752af55-9c06-469f-9353-e1042300de3c" containerName="cinder-api" probeResult="failure" output="Get \"https://10.217.1.60:8776/healthcheck\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 20 17:44:09 crc kubenswrapper[4558]: I0120 17:44:09.937525 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4ea608e7-9a19-47fb-8e14-a629451e7c03-logs\") pod \"4ea608e7-9a19-47fb-8e14-a629451e7c03\" (UID: \"4ea608e7-9a19-47fb-8e14-a629451e7c03\") " Jan 20 17:44:09 crc kubenswrapper[4558]: I0120 17:44:09.937646 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vb7nd\" (UniqueName: \"kubernetes.io/projected/4ea608e7-9a19-47fb-8e14-a629451e7c03-kube-api-access-vb7nd\") pod \"4ea608e7-9a19-47fb-8e14-a629451e7c03\" (UID: \"4ea608e7-9a19-47fb-8e14-a629451e7c03\") " Jan 20 17:44:09 crc kubenswrapper[4558]: I0120 17:44:09.937805 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4ea608e7-9a19-47fb-8e14-a629451e7c03-internal-tls-certs\") pod \"4ea608e7-9a19-47fb-8e14-a629451e7c03\" (UID: \"4ea608e7-9a19-47fb-8e14-a629451e7c03\") " Jan 20 17:44:09 crc kubenswrapper[4558]: I0120 17:44:09.937888 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4ea608e7-9a19-47fb-8e14-a629451e7c03-public-tls-certs\") pod \"4ea608e7-9a19-47fb-8e14-a629451e7c03\" (UID: \"4ea608e7-9a19-47fb-8e14-a629451e7c03\") " Jan 20 17:44:09 crc kubenswrapper[4558]: I0120 17:44:09.938063 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4ea608e7-9a19-47fb-8e14-a629451e7c03-combined-ca-bundle\") pod \"4ea608e7-9a19-47fb-8e14-a629451e7c03\" (UID: \"4ea608e7-9a19-47fb-8e14-a629451e7c03\") " Jan 20 17:44:09 crc kubenswrapper[4558]: I0120 17:44:09.938183 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4ea608e7-9a19-47fb-8e14-a629451e7c03-config-data\") pod \"4ea608e7-9a19-47fb-8e14-a629451e7c03\" (UID: \"4ea608e7-9a19-47fb-8e14-a629451e7c03\") " Jan 20 17:44:09 crc kubenswrapper[4558]: I0120 17:44:09.939851 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4ea608e7-9a19-47fb-8e14-a629451e7c03-logs" (OuterVolumeSpecName: "logs") pod "4ea608e7-9a19-47fb-8e14-a629451e7c03" (UID: "4ea608e7-9a19-47fb-8e14-a629451e7c03"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:44:09 crc kubenswrapper[4558]: I0120 17:44:09.947463 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4ea608e7-9a19-47fb-8e14-a629451e7c03-kube-api-access-vb7nd" (OuterVolumeSpecName: "kube-api-access-vb7nd") pod "4ea608e7-9a19-47fb-8e14-a629451e7c03" (UID: "4ea608e7-9a19-47fb-8e14-a629451e7c03"). InnerVolumeSpecName "kube-api-access-vb7nd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:44:09 crc kubenswrapper[4558]: I0120 17:44:09.970287 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4ea608e7-9a19-47fb-8e14-a629451e7c03-config-data" (OuterVolumeSpecName: "config-data") pod "4ea608e7-9a19-47fb-8e14-a629451e7c03" (UID: "4ea608e7-9a19-47fb-8e14-a629451e7c03"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:44:09 crc kubenswrapper[4558]: I0120 17:44:09.973723 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4ea608e7-9a19-47fb-8e14-a629451e7c03-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4ea608e7-9a19-47fb-8e14-a629451e7c03" (UID: "4ea608e7-9a19-47fb-8e14-a629451e7c03"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:44:09 crc kubenswrapper[4558]: E0120 17:44:09.994750 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4ea608e7-9a19-47fb-8e14-a629451e7c03-public-tls-certs podName:4ea608e7-9a19-47fb-8e14-a629451e7c03 nodeName:}" failed. No retries permitted until 2026-01-20 17:44:10.494717029 +0000 UTC m=+3744.255054996 (durationBeforeRetry 500ms). Error: error cleaning subPath mounts for volume "public-tls-certs" (UniqueName: "kubernetes.io/secret/4ea608e7-9a19-47fb-8e14-a629451e7c03-public-tls-certs") pod "4ea608e7-9a19-47fb-8e14-a629451e7c03" (UID: "4ea608e7-9a19-47fb-8e14-a629451e7c03") : error deleting /var/lib/kubelet/pods/4ea608e7-9a19-47fb-8e14-a629451e7c03/volume-subpaths: remove /var/lib/kubelet/pods/4ea608e7-9a19-47fb-8e14-a629451e7c03/volume-subpaths: no such file or directory Jan 20 17:44:09 crc kubenswrapper[4558]: I0120 17:44:09.997217 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4ea608e7-9a19-47fb-8e14-a629451e7c03-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "4ea608e7-9a19-47fb-8e14-a629451e7c03" (UID: "4ea608e7-9a19-47fb-8e14-a629451e7c03"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:44:10 crc kubenswrapper[4558]: I0120 17:44:10.041696 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4ea608e7-9a19-47fb-8e14-a629451e7c03-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:44:10 crc kubenswrapper[4558]: I0120 17:44:10.041734 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4ea608e7-9a19-47fb-8e14-a629451e7c03-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:44:10 crc kubenswrapper[4558]: I0120 17:44:10.041745 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4ea608e7-9a19-47fb-8e14-a629451e7c03-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:44:10 crc kubenswrapper[4558]: I0120 17:44:10.041756 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vb7nd\" (UniqueName: \"kubernetes.io/projected/4ea608e7-9a19-47fb-8e14-a629451e7c03-kube-api-access-vb7nd\") on node \"crc\" DevicePath \"\"" Jan 20 17:44:10 crc kubenswrapper[4558]: I0120 17:44:10.041767 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4ea608e7-9a19-47fb-8e14-a629451e7c03-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:44:10 crc kubenswrapper[4558]: I0120 17:44:10.162684 4558 generic.go:334] "Generic (PLEG): container finished" podID="4ea608e7-9a19-47fb-8e14-a629451e7c03" containerID="2268bf37f5b535b0402d4e5d51dbdeabae160ee08d31b99321666536df207266" exitCode=0 Jan 20 17:44:10 crc kubenswrapper[4558]: I0120 17:44:10.162757 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:44:10 crc kubenswrapper[4558]: I0120 17:44:10.162768 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"4ea608e7-9a19-47fb-8e14-a629451e7c03","Type":"ContainerDied","Data":"2268bf37f5b535b0402d4e5d51dbdeabae160ee08d31b99321666536df207266"} Jan 20 17:44:10 crc kubenswrapper[4558]: I0120 17:44:10.162802 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"4ea608e7-9a19-47fb-8e14-a629451e7c03","Type":"ContainerDied","Data":"c8f702c47f61d39e56d80f4f6a07a49ca90831318685ac858718c1b72c2f66c2"} Jan 20 17:44:10 crc kubenswrapper[4558]: I0120 17:44:10.162822 4558 scope.go:117] "RemoveContainer" containerID="2268bf37f5b535b0402d4e5d51dbdeabae160ee08d31b99321666536df207266" Jan 20 17:44:10 crc kubenswrapper[4558]: I0120 17:44:10.169656 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"7faabe16-9de5-49dc-bab6-44e173f4403c","Type":"ContainerStarted","Data":"7dfa9f0fec16a0a5a1903b314539ce49eb8943fbf0152c319ba83f4512000ab4"} Jan 20 17:44:10 crc kubenswrapper[4558]: I0120 17:44:10.199697 4558 scope.go:117] "RemoveContainer" containerID="b63f6fc151b46c87b40591ac97182073165528cb2330e3173933ce4b52b43f72" Jan 20 17:44:10 crc kubenswrapper[4558]: I0120 17:44:10.219881 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:44:10 crc kubenswrapper[4558]: I0120 17:44:10.219928 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:44:10 crc kubenswrapper[4558]: I0120 17:44:10.227105 4558 scope.go:117] "RemoveContainer" containerID="2268bf37f5b535b0402d4e5d51dbdeabae160ee08d31b99321666536df207266" Jan 20 17:44:10 crc kubenswrapper[4558]: E0120 17:44:10.227502 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2268bf37f5b535b0402d4e5d51dbdeabae160ee08d31b99321666536df207266\": container with ID starting with 2268bf37f5b535b0402d4e5d51dbdeabae160ee08d31b99321666536df207266 not found: ID does not exist" containerID="2268bf37f5b535b0402d4e5d51dbdeabae160ee08d31b99321666536df207266" Jan 20 17:44:10 crc kubenswrapper[4558]: I0120 17:44:10.227528 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2268bf37f5b535b0402d4e5d51dbdeabae160ee08d31b99321666536df207266"} err="failed to get container status \"2268bf37f5b535b0402d4e5d51dbdeabae160ee08d31b99321666536df207266\": rpc error: code = NotFound desc = could not find container \"2268bf37f5b535b0402d4e5d51dbdeabae160ee08d31b99321666536df207266\": container with ID starting with 2268bf37f5b535b0402d4e5d51dbdeabae160ee08d31b99321666536df207266 not found: ID does not exist" Jan 20 17:44:10 crc kubenswrapper[4558]: I0120 17:44:10.227569 4558 scope.go:117] "RemoveContainer" containerID="b63f6fc151b46c87b40591ac97182073165528cb2330e3173933ce4b52b43f72" Jan 20 17:44:10 crc kubenswrapper[4558]: E0120 17:44:10.228203 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b63f6fc151b46c87b40591ac97182073165528cb2330e3173933ce4b52b43f72\": container with ID starting with b63f6fc151b46c87b40591ac97182073165528cb2330e3173933ce4b52b43f72 not found: ID does not exist" containerID="b63f6fc151b46c87b40591ac97182073165528cb2330e3173933ce4b52b43f72" Jan 20 17:44:10 crc kubenswrapper[4558]: I0120 17:44:10.228220 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b63f6fc151b46c87b40591ac97182073165528cb2330e3173933ce4b52b43f72"} err="failed to get container status \"b63f6fc151b46c87b40591ac97182073165528cb2330e3173933ce4b52b43f72\": rpc error: code = NotFound desc = could not find container \"b63f6fc151b46c87b40591ac97182073165528cb2330e3173933ce4b52b43f72\": container with ID starting with b63f6fc151b46c87b40591ac97182073165528cb2330e3173933ce4b52b43f72 not found: ID does not exist" Jan 20 17:44:10 crc kubenswrapper[4558]: I0120 17:44:10.263064 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:44:10 crc kubenswrapper[4558]: I0120 17:44:10.507600 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:44:10 crc kubenswrapper[4558]: I0120 17:44:10.555940 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4ea608e7-9a19-47fb-8e14-a629451e7c03-public-tls-certs\") pod \"4ea608e7-9a19-47fb-8e14-a629451e7c03\" (UID: \"4ea608e7-9a19-47fb-8e14-a629451e7c03\") " Jan 20 17:44:10 crc kubenswrapper[4558]: I0120 17:44:10.561063 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4ea608e7-9a19-47fb-8e14-a629451e7c03-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "4ea608e7-9a19-47fb-8e14-a629451e7c03" (UID: "4ea608e7-9a19-47fb-8e14-a629451e7c03"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:44:10 crc kubenswrapper[4558]: I0120 17:44:10.659745 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4ea608e7-9a19-47fb-8e14-a629451e7c03-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:44:10 crc kubenswrapper[4558]: I0120 17:44:10.675447 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-5c4dc89b5d-qp5xf" Jan 20 17:44:10 crc kubenswrapper[4558]: I0120 17:44:10.682517 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/barbican-api-74bb4f5f4-5ppmg" podUID="d619cd37-a474-4965-b382-749ed6d55d6d" containerName="barbican-api-log" probeResult="failure" output="Get \"https://10.217.1.61:9311/healthcheck\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 20 17:44:10 crc kubenswrapper[4558]: I0120 17:44:10.682520 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/barbican-api-74bb4f5f4-5ppmg" podUID="d619cd37-a474-4965-b382-749ed6d55d6d" containerName="barbican-api" probeResult="failure" output="Get \"https://10.217.1.61:9311/healthcheck\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 20 17:44:10 crc kubenswrapper[4558]: I0120 17:44:10.792045 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:44:10 crc kubenswrapper[4558]: I0120 17:44:10.800051 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:44:10 crc kubenswrapper[4558]: I0120 17:44:10.806800 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:44:10 crc kubenswrapper[4558]: E0120 17:44:10.807225 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7e627450-8edc-44bd-805d-b0034178dff2" containerName="neutron-api" Jan 20 17:44:10 crc kubenswrapper[4558]: I0120 17:44:10.807244 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="7e627450-8edc-44bd-805d-b0034178dff2" containerName="neutron-api" Jan 20 17:44:10 crc kubenswrapper[4558]: E0120 17:44:10.807264 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4ea608e7-9a19-47fb-8e14-a629451e7c03" containerName="nova-api-log" Jan 20 17:44:10 crc kubenswrapper[4558]: I0120 17:44:10.807271 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="4ea608e7-9a19-47fb-8e14-a629451e7c03" containerName="nova-api-log" Jan 20 17:44:10 crc kubenswrapper[4558]: E0120 17:44:10.807281 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b911f497-5616-416f-8aa2-73c7cce172eb" containerName="keystone-api" Jan 20 17:44:10 crc kubenswrapper[4558]: I0120 17:44:10.807287 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="b911f497-5616-416f-8aa2-73c7cce172eb" containerName="keystone-api" Jan 20 17:44:10 crc kubenswrapper[4558]: E0120 17:44:10.807303 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4ea608e7-9a19-47fb-8e14-a629451e7c03" containerName="nova-api-api" Jan 20 17:44:10 crc kubenswrapper[4558]: I0120 17:44:10.807309 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="4ea608e7-9a19-47fb-8e14-a629451e7c03" containerName="nova-api-api" Jan 20 17:44:10 crc kubenswrapper[4558]: E0120 17:44:10.807325 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7e627450-8edc-44bd-805d-b0034178dff2" containerName="neutron-httpd" Jan 20 17:44:10 crc kubenswrapper[4558]: I0120 17:44:10.807330 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="7e627450-8edc-44bd-805d-b0034178dff2" containerName="neutron-httpd" Jan 20 17:44:10 crc kubenswrapper[4558]: I0120 17:44:10.807493 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="4ea608e7-9a19-47fb-8e14-a629451e7c03" containerName="nova-api-log" Jan 20 17:44:10 crc kubenswrapper[4558]: I0120 17:44:10.807518 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="4ea608e7-9a19-47fb-8e14-a629451e7c03" containerName="nova-api-api" Jan 20 17:44:10 crc kubenswrapper[4558]: I0120 17:44:10.807528 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="7e627450-8edc-44bd-805d-b0034178dff2" containerName="neutron-api" Jan 20 17:44:10 crc kubenswrapper[4558]: I0120 17:44:10.807540 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="b911f497-5616-416f-8aa2-73c7cce172eb" containerName="keystone-api" Jan 20 17:44:10 crc kubenswrapper[4558]: I0120 17:44:10.807553 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="7e627450-8edc-44bd-805d-b0034178dff2" containerName="neutron-httpd" Jan 20 17:44:10 crc kubenswrapper[4558]: I0120 17:44:10.808530 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:44:10 crc kubenswrapper[4558]: I0120 17:44:10.810592 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-public-svc" Jan 20 17:44:10 crc kubenswrapper[4558]: I0120 17:44:10.810648 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-api-config-data" Jan 20 17:44:10 crc kubenswrapper[4558]: I0120 17:44:10.811079 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-internal-svc" Jan 20 17:44:10 crc kubenswrapper[4558]: I0120 17:44:10.816705 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:44:10 crc kubenswrapper[4558]: I0120 17:44:10.865440 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7e627450-8edc-44bd-805d-b0034178dff2-public-tls-certs\") pod \"7e627450-8edc-44bd-805d-b0034178dff2\" (UID: \"7e627450-8edc-44bd-805d-b0034178dff2\") " Jan 20 17:44:10 crc kubenswrapper[4558]: I0120 17:44:10.865556 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/7e627450-8edc-44bd-805d-b0034178dff2-config\") pod \"7e627450-8edc-44bd-805d-b0034178dff2\" (UID: \"7e627450-8edc-44bd-805d-b0034178dff2\") " Jan 20 17:44:10 crc kubenswrapper[4558]: I0120 17:44:10.865615 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/7e627450-8edc-44bd-805d-b0034178dff2-httpd-config\") pod \"7e627450-8edc-44bd-805d-b0034178dff2\" (UID: \"7e627450-8edc-44bd-805d-b0034178dff2\") " Jan 20 17:44:10 crc kubenswrapper[4558]: I0120 17:44:10.865647 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/7e627450-8edc-44bd-805d-b0034178dff2-ovndb-tls-certs\") pod \"7e627450-8edc-44bd-805d-b0034178dff2\" (UID: \"7e627450-8edc-44bd-805d-b0034178dff2\") " Jan 20 17:44:10 crc kubenswrapper[4558]: I0120 17:44:10.865857 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7e627450-8edc-44bd-805d-b0034178dff2-internal-tls-certs\") pod \"7e627450-8edc-44bd-805d-b0034178dff2\" (UID: \"7e627450-8edc-44bd-805d-b0034178dff2\") " Jan 20 17:44:10 crc kubenswrapper[4558]: I0120 17:44:10.865883 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8d5jv\" (UniqueName: \"kubernetes.io/projected/7e627450-8edc-44bd-805d-b0034178dff2-kube-api-access-8d5jv\") pod \"7e627450-8edc-44bd-805d-b0034178dff2\" (UID: \"7e627450-8edc-44bd-805d-b0034178dff2\") " Jan 20 17:44:10 crc kubenswrapper[4558]: I0120 17:44:10.865963 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e627450-8edc-44bd-805d-b0034178dff2-combined-ca-bundle\") pod \"7e627450-8edc-44bd-805d-b0034178dff2\" (UID: \"7e627450-8edc-44bd-805d-b0034178dff2\") " Jan 20 17:44:10 crc kubenswrapper[4558]: I0120 17:44:10.875043 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7e627450-8edc-44bd-805d-b0034178dff2-kube-api-access-8d5jv" (OuterVolumeSpecName: "kube-api-access-8d5jv") pod "7e627450-8edc-44bd-805d-b0034178dff2" (UID: "7e627450-8edc-44bd-805d-b0034178dff2"). InnerVolumeSpecName "kube-api-access-8d5jv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:44:10 crc kubenswrapper[4558]: I0120 17:44:10.885264 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7e627450-8edc-44bd-805d-b0034178dff2-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "7e627450-8edc-44bd-805d-b0034178dff2" (UID: "7e627450-8edc-44bd-805d-b0034178dff2"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:44:10 crc kubenswrapper[4558]: I0120 17:44:10.946758 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7e627450-8edc-44bd-805d-b0034178dff2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7e627450-8edc-44bd-805d-b0034178dff2" (UID: "7e627450-8edc-44bd-805d-b0034178dff2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:44:10 crc kubenswrapper[4558]: I0120 17:44:10.953770 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7e627450-8edc-44bd-805d-b0034178dff2-config" (OuterVolumeSpecName: "config") pod "7e627450-8edc-44bd-805d-b0034178dff2" (UID: "7e627450-8edc-44bd-805d-b0034178dff2"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:44:10 crc kubenswrapper[4558]: I0120 17:44:10.969243 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7e627450-8edc-44bd-805d-b0034178dff2-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "7e627450-8edc-44bd-805d-b0034178dff2" (UID: "7e627450-8edc-44bd-805d-b0034178dff2"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:44:10 crc kubenswrapper[4558]: I0120 17:44:10.969349 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6rmww\" (UniqueName: \"kubernetes.io/projected/70182fd8-a242-40f5-b20d-8ff4dd33e9b1-kube-api-access-6rmww\") pod \"nova-api-0\" (UID: \"70182fd8-a242-40f5-b20d-8ff4dd33e9b1\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:44:10 crc kubenswrapper[4558]: I0120 17:44:10.969526 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/70182fd8-a242-40f5-b20d-8ff4dd33e9b1-logs\") pod \"nova-api-0\" (UID: \"70182fd8-a242-40f5-b20d-8ff4dd33e9b1\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:44:10 crc kubenswrapper[4558]: I0120 17:44:10.969566 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/70182fd8-a242-40f5-b20d-8ff4dd33e9b1-public-tls-certs\") pod \"nova-api-0\" (UID: \"70182fd8-a242-40f5-b20d-8ff4dd33e9b1\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:44:10 crc kubenswrapper[4558]: I0120 17:44:10.969808 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/70182fd8-a242-40f5-b20d-8ff4dd33e9b1-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"70182fd8-a242-40f5-b20d-8ff4dd33e9b1\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:44:10 crc kubenswrapper[4558]: I0120 17:44:10.969904 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/70182fd8-a242-40f5-b20d-8ff4dd33e9b1-internal-tls-certs\") pod \"nova-api-0\" (UID: \"70182fd8-a242-40f5-b20d-8ff4dd33e9b1\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:44:10 crc kubenswrapper[4558]: I0120 17:44:10.969936 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/70182fd8-a242-40f5-b20d-8ff4dd33e9b1-config-data\") pod \"nova-api-0\" (UID: \"70182fd8-a242-40f5-b20d-8ff4dd33e9b1\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:44:10 crc kubenswrapper[4558]: I0120 17:44:10.970262 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8d5jv\" (UniqueName: \"kubernetes.io/projected/7e627450-8edc-44bd-805d-b0034178dff2-kube-api-access-8d5jv\") on node \"crc\" DevicePath \"\"" Jan 20 17:44:10 crc kubenswrapper[4558]: I0120 17:44:10.970285 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e627450-8edc-44bd-805d-b0034178dff2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:44:10 crc kubenswrapper[4558]: I0120 17:44:10.970294 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7e627450-8edc-44bd-805d-b0034178dff2-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:44:10 crc kubenswrapper[4558]: I0120 17:44:10.970303 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/7e627450-8edc-44bd-805d-b0034178dff2-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:44:10 crc kubenswrapper[4558]: I0120 17:44:10.970313 4558 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/7e627450-8edc-44bd-805d-b0034178dff2-httpd-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:44:10 crc kubenswrapper[4558]: I0120 17:44:10.986303 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7e627450-8edc-44bd-805d-b0034178dff2-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "7e627450-8edc-44bd-805d-b0034178dff2" (UID: "7e627450-8edc-44bd-805d-b0034178dff2"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:44:11 crc kubenswrapper[4558]: I0120 17:44:11.023865 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7e627450-8edc-44bd-805d-b0034178dff2-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "7e627450-8edc-44bd-805d-b0034178dff2" (UID: "7e627450-8edc-44bd-805d-b0034178dff2"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:44:11 crc kubenswrapper[4558]: I0120 17:44:11.072951 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6rmww\" (UniqueName: \"kubernetes.io/projected/70182fd8-a242-40f5-b20d-8ff4dd33e9b1-kube-api-access-6rmww\") pod \"nova-api-0\" (UID: \"70182fd8-a242-40f5-b20d-8ff4dd33e9b1\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:44:11 crc kubenswrapper[4558]: I0120 17:44:11.073120 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/70182fd8-a242-40f5-b20d-8ff4dd33e9b1-logs\") pod \"nova-api-0\" (UID: \"70182fd8-a242-40f5-b20d-8ff4dd33e9b1\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:44:11 crc kubenswrapper[4558]: I0120 17:44:11.073201 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/70182fd8-a242-40f5-b20d-8ff4dd33e9b1-public-tls-certs\") pod \"nova-api-0\" (UID: \"70182fd8-a242-40f5-b20d-8ff4dd33e9b1\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:44:11 crc kubenswrapper[4558]: I0120 17:44:11.073376 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/70182fd8-a242-40f5-b20d-8ff4dd33e9b1-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"70182fd8-a242-40f5-b20d-8ff4dd33e9b1\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:44:11 crc kubenswrapper[4558]: I0120 17:44:11.073460 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/70182fd8-a242-40f5-b20d-8ff4dd33e9b1-internal-tls-certs\") pod \"nova-api-0\" (UID: \"70182fd8-a242-40f5-b20d-8ff4dd33e9b1\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:44:11 crc kubenswrapper[4558]: I0120 17:44:11.073502 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/70182fd8-a242-40f5-b20d-8ff4dd33e9b1-config-data\") pod \"nova-api-0\" (UID: \"70182fd8-a242-40f5-b20d-8ff4dd33e9b1\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:44:11 crc kubenswrapper[4558]: I0120 17:44:11.073706 4558 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/7e627450-8edc-44bd-805d-b0034178dff2-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:44:11 crc kubenswrapper[4558]: I0120 17:44:11.073730 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7e627450-8edc-44bd-805d-b0034178dff2-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:44:11 crc kubenswrapper[4558]: I0120 17:44:11.073500 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/70182fd8-a242-40f5-b20d-8ff4dd33e9b1-logs\") pod \"nova-api-0\" (UID: \"70182fd8-a242-40f5-b20d-8ff4dd33e9b1\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:44:11 crc kubenswrapper[4558]: I0120 17:44:11.077937 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/70182fd8-a242-40f5-b20d-8ff4dd33e9b1-config-data\") pod \"nova-api-0\" (UID: \"70182fd8-a242-40f5-b20d-8ff4dd33e9b1\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:44:11 crc kubenswrapper[4558]: I0120 17:44:11.078810 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/70182fd8-a242-40f5-b20d-8ff4dd33e9b1-internal-tls-certs\") pod \"nova-api-0\" (UID: \"70182fd8-a242-40f5-b20d-8ff4dd33e9b1\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:44:11 crc kubenswrapper[4558]: I0120 17:44:11.078948 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/70182fd8-a242-40f5-b20d-8ff4dd33e9b1-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"70182fd8-a242-40f5-b20d-8ff4dd33e9b1\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:44:11 crc kubenswrapper[4558]: I0120 17:44:11.082617 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/70182fd8-a242-40f5-b20d-8ff4dd33e9b1-public-tls-certs\") pod \"nova-api-0\" (UID: \"70182fd8-a242-40f5-b20d-8ff4dd33e9b1\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:44:11 crc kubenswrapper[4558]: I0120 17:44:11.088731 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6rmww\" (UniqueName: \"kubernetes.io/projected/70182fd8-a242-40f5-b20d-8ff4dd33e9b1-kube-api-access-6rmww\") pod \"nova-api-0\" (UID: \"70182fd8-a242-40f5-b20d-8ff4dd33e9b1\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:44:11 crc kubenswrapper[4558]: I0120 17:44:11.123315 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:44:11 crc kubenswrapper[4558]: I0120 17:44:11.191154 4558 generic.go:334] "Generic (PLEG): container finished" podID="7e627450-8edc-44bd-805d-b0034178dff2" containerID="ada568b4f4a7ec3082384c4ebb4a4d6e1590ad65456658486086d4be942b31b7" exitCode=0 Jan 20 17:44:11 crc kubenswrapper[4558]: I0120 17:44:11.192341 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-5c4dc89b5d-qp5xf" Jan 20 17:44:11 crc kubenswrapper[4558]: I0120 17:44:11.196330 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-5c4dc89b5d-qp5xf" event={"ID":"7e627450-8edc-44bd-805d-b0034178dff2","Type":"ContainerDied","Data":"ada568b4f4a7ec3082384c4ebb4a4d6e1590ad65456658486086d4be942b31b7"} Jan 20 17:44:11 crc kubenswrapper[4558]: I0120 17:44:11.196382 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-5c4dc89b5d-qp5xf" event={"ID":"7e627450-8edc-44bd-805d-b0034178dff2","Type":"ContainerDied","Data":"6d63a2e58dd9a4337b32860657909c10d07d29131a229a2cdf7cba461a126348"} Jan 20 17:44:11 crc kubenswrapper[4558]: I0120 17:44:11.196405 4558 scope.go:117] "RemoveContainer" containerID="5730181f255b51b17148b66b915f2f8f31d4d16baed50889d862d0d3799b5c21" Jan 20 17:44:11 crc kubenswrapper[4558]: I0120 17:44:11.249192 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-5c4dc89b5d-qp5xf"] Jan 20 17:44:11 crc kubenswrapper[4558]: I0120 17:44:11.267323 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-metadata-0" podUID="ff0d0703-8173-4ac0-afc0-e673feaef286" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.1.64:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 20 17:44:11 crc kubenswrapper[4558]: I0120 17:44:11.267575 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-metadata-0" podUID="ff0d0703-8173-4ac0-afc0-e673feaef286" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.1.64:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 20 17:44:11 crc kubenswrapper[4558]: I0120 17:44:11.267834 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/neutron-5c4dc89b5d-qp5xf"] Jan 20 17:44:11 crc kubenswrapper[4558]: I0120 17:44:11.274423 4558 scope.go:117] "RemoveContainer" containerID="ada568b4f4a7ec3082384c4ebb4a4d6e1590ad65456658486086d4be942b31b7" Jan 20 17:44:11 crc kubenswrapper[4558]: I0120 17:44:11.335320 4558 scope.go:117] "RemoveContainer" containerID="5730181f255b51b17148b66b915f2f8f31d4d16baed50889d862d0d3799b5c21" Jan 20 17:44:11 crc kubenswrapper[4558]: E0120 17:44:11.336077 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5730181f255b51b17148b66b915f2f8f31d4d16baed50889d862d0d3799b5c21\": container with ID starting with 5730181f255b51b17148b66b915f2f8f31d4d16baed50889d862d0d3799b5c21 not found: ID does not exist" containerID="5730181f255b51b17148b66b915f2f8f31d4d16baed50889d862d0d3799b5c21" Jan 20 17:44:11 crc kubenswrapper[4558]: I0120 17:44:11.336108 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5730181f255b51b17148b66b915f2f8f31d4d16baed50889d862d0d3799b5c21"} err="failed to get container status \"5730181f255b51b17148b66b915f2f8f31d4d16baed50889d862d0d3799b5c21\": rpc error: code = NotFound desc = could not find container \"5730181f255b51b17148b66b915f2f8f31d4d16baed50889d862d0d3799b5c21\": container with ID starting with 5730181f255b51b17148b66b915f2f8f31d4d16baed50889d862d0d3799b5c21 not found: ID does not exist" Jan 20 17:44:11 crc kubenswrapper[4558]: I0120 17:44:11.336128 4558 scope.go:117] "RemoveContainer" containerID="ada568b4f4a7ec3082384c4ebb4a4d6e1590ad65456658486086d4be942b31b7" Jan 20 17:44:11 crc kubenswrapper[4558]: E0120 17:44:11.336381 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ada568b4f4a7ec3082384c4ebb4a4d6e1590ad65456658486086d4be942b31b7\": container with ID starting with ada568b4f4a7ec3082384c4ebb4a4d6e1590ad65456658486086d4be942b31b7 not found: ID does not exist" containerID="ada568b4f4a7ec3082384c4ebb4a4d6e1590ad65456658486086d4be942b31b7" Jan 20 17:44:11 crc kubenswrapper[4558]: I0120 17:44:11.336399 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ada568b4f4a7ec3082384c4ebb4a4d6e1590ad65456658486086d4be942b31b7"} err="failed to get container status \"ada568b4f4a7ec3082384c4ebb4a4d6e1590ad65456658486086d4be942b31b7\": rpc error: code = NotFound desc = could not find container \"ada568b4f4a7ec3082384c4ebb4a4d6e1590ad65456658486086d4be942b31b7\": container with ID starting with ada568b4f4a7ec3082384c4ebb4a4d6e1590ad65456658486086d4be942b31b7 not found: ID does not exist" Jan 20 17:44:11 crc kubenswrapper[4558]: I0120 17:44:11.581888 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:44:11 crc kubenswrapper[4558]: I0120 17:44:11.597472 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:44:11 crc kubenswrapper[4558]: W0120 17:44:11.604422 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod70182fd8_a242_40f5_b20d_8ff4dd33e9b1.slice/crio-ffbd520c8e0621c07e5bc545a014fc083db0874a597b335950d739345e51a889 WatchSource:0}: Error finding container ffbd520c8e0621c07e5bc545a014fc083db0874a597b335950d739345e51a889: Status 404 returned error can't find the container with id ffbd520c8e0621c07e5bc545a014fc083db0874a597b335950d739345e51a889 Jan 20 17:44:11 crc kubenswrapper[4558]: I0120 17:44:11.703284 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack-kuttl-tests/barbican-api-74bb4f5f4-5ppmg" podUID="d619cd37-a474-4965-b382-749ed6d55d6d" containerName="barbican-api-log" probeResult="failure" output="Get \"https://10.217.1.61:9311/healthcheck\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 20 17:44:11 crc kubenswrapper[4558]: I0120 17:44:11.703332 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack-kuttl-tests/barbican-api-74bb4f5f4-5ppmg" podUID="d619cd37-a474-4965-b382-749ed6d55d6d" containerName="barbican-api" probeResult="failure" output="Get \"https://10.217.1.61:9311/healthcheck\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 20 17:44:12 crc kubenswrapper[4558]: I0120 17:44:12.076866 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/barbican-api-74bb4f5f4-5ppmg" Jan 20 17:44:12 crc kubenswrapper[4558]: I0120 17:44:12.152346 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/barbican-api-74bb4f5f4-5ppmg" Jan 20 17:44:12 crc kubenswrapper[4558]: I0120 17:44:12.217543 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"70182fd8-a242-40f5-b20d-8ff4dd33e9b1","Type":"ContainerStarted","Data":"b3f67b171f63171671e95039a6c39cf3571069f1e72ec88de6862170e74914f6"} Jan 20 17:44:12 crc kubenswrapper[4558]: I0120 17:44:12.217585 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"70182fd8-a242-40f5-b20d-8ff4dd33e9b1","Type":"ContainerStarted","Data":"69302d70e78ca0d36f4a72bfbbc9246b0e5fece0f0fdbf172a8875985eda6a7c"} Jan 20 17:44:12 crc kubenswrapper[4558]: I0120 17:44:12.217596 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"70182fd8-a242-40f5-b20d-8ff4dd33e9b1","Type":"ContainerStarted","Data":"ffbd520c8e0621c07e5bc545a014fc083db0874a597b335950d739345e51a889"} Jan 20 17:44:12 crc kubenswrapper[4558]: I0120 17:44:12.223473 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-api-7bc45756fb-278vp"] Jan 20 17:44:12 crc kubenswrapper[4558]: I0120 17:44:12.223664 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-api-7bc45756fb-278vp" podUID="432ec648-18a7-416a-85cc-409a30976a67" containerName="barbican-api-log" containerID="cri-o://37abef38ac07dd4f0829b9b454166615d0d813a32d985cccdcab65f0ba9f6534" gracePeriod=30 Jan 20 17:44:12 crc kubenswrapper[4558]: I0120 17:44:12.223752 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-api-7bc45756fb-278vp" podUID="432ec648-18a7-416a-85cc-409a30976a67" containerName="barbican-api" containerID="cri-o://0dfc6d5c9d8e2b433cbe0bd6d9c9a1260fd3286262cd3972d6f5db23fdc3d982" gracePeriod=30 Jan 20 17:44:12 crc kubenswrapper[4558]: I0120 17:44:12.566621 4558 scope.go:117] "RemoveContainer" containerID="696eca395715bd4cf46b8dd0ddb53ef0ad8c84c94f7e91863731528e53ac125e" Jan 20 17:44:12 crc kubenswrapper[4558]: E0120 17:44:12.567311 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"nova-cell0-conductor-conductor\" with CrashLoopBackOff: \"back-off 20s restarting failed container=nova-cell0-conductor-conductor pod=nova-cell0-conductor-0_openstack-kuttl-tests(50cacddd-ebea-477f-af64-6e96a09a242e)\"" pod="openstack-kuttl-tests/nova-cell0-conductor-0" podUID="50cacddd-ebea-477f-af64-6e96a09a242e" Jan 20 17:44:12 crc kubenswrapper[4558]: I0120 17:44:12.575797 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4ea608e7-9a19-47fb-8e14-a629451e7c03" path="/var/lib/kubelet/pods/4ea608e7-9a19-47fb-8e14-a629451e7c03/volumes" Jan 20 17:44:12 crc kubenswrapper[4558]: I0120 17:44:12.576630 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7e627450-8edc-44bd-805d-b0034178dff2" path="/var/lib/kubelet/pods/7e627450-8edc-44bd-805d-b0034178dff2/volumes" Jan 20 17:44:13 crc kubenswrapper[4558]: I0120 17:44:13.228154 4558 generic.go:334] "Generic (PLEG): container finished" podID="432ec648-18a7-416a-85cc-409a30976a67" containerID="37abef38ac07dd4f0829b9b454166615d0d813a32d985cccdcab65f0ba9f6534" exitCode=143 Jan 20 17:44:13 crc kubenswrapper[4558]: I0120 17:44:13.228242 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-7bc45756fb-278vp" event={"ID":"432ec648-18a7-416a-85cc-409a30976a67","Type":"ContainerDied","Data":"37abef38ac07dd4f0829b9b454166615d0d813a32d985cccdcab65f0ba9f6534"} Jan 20 17:44:14 crc kubenswrapper[4558]: I0120 17:44:14.305816 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:44:14 crc kubenswrapper[4558]: I0120 17:44:14.336821 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:44:14 crc kubenswrapper[4558]: I0120 17:44:14.360547 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-api-0" podStartSLOduration=4.360530893 podStartE2EDuration="4.360530893s" podCreationTimestamp="2026-01-20 17:44:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:44:12.252683646 +0000 UTC m=+3746.013021613" watchObservedRunningTime="2026-01-20 17:44:14.360530893 +0000 UTC m=+3748.120868860" Jan 20 17:44:15 crc kubenswrapper[4558]: I0120 17:44:15.394214 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/barbican-api-7bc45756fb-278vp" podUID="432ec648-18a7-416a-85cc-409a30976a67" containerName="barbican-api" probeResult="failure" output="Get \"https://10.217.1.28:9311/healthcheck\": read tcp 10.217.0.2:36204->10.217.1.28:9311: read: connection reset by peer" Jan 20 17:44:15 crc kubenswrapper[4558]: I0120 17:44:15.394368 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/barbican-api-7bc45756fb-278vp" podUID="432ec648-18a7-416a-85cc-409a30976a67" containerName="barbican-api-log" probeResult="failure" output="Get \"https://10.217.1.28:9311/healthcheck\": read tcp 10.217.0.2:36210->10.217.1.28:9311: read: connection reset by peer" Jan 20 17:44:15 crc kubenswrapper[4558]: I0120 17:44:15.507878 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:44:15 crc kubenswrapper[4558]: I0120 17:44:15.534507 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:44:15 crc kubenswrapper[4558]: I0120 17:44:15.768715 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-7bc45756fb-278vp" Jan 20 17:44:15 crc kubenswrapper[4558]: I0120 17:44:15.912769 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/432ec648-18a7-416a-85cc-409a30976a67-internal-tls-certs\") pod \"432ec648-18a7-416a-85cc-409a30976a67\" (UID: \"432ec648-18a7-416a-85cc-409a30976a67\") " Jan 20 17:44:15 crc kubenswrapper[4558]: I0120 17:44:15.913048 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/432ec648-18a7-416a-85cc-409a30976a67-config-data-custom\") pod \"432ec648-18a7-416a-85cc-409a30976a67\" (UID: \"432ec648-18a7-416a-85cc-409a30976a67\") " Jan 20 17:44:15 crc kubenswrapper[4558]: I0120 17:44:15.913213 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/432ec648-18a7-416a-85cc-409a30976a67-public-tls-certs\") pod \"432ec648-18a7-416a-85cc-409a30976a67\" (UID: \"432ec648-18a7-416a-85cc-409a30976a67\") " Jan 20 17:44:15 crc kubenswrapper[4558]: I0120 17:44:15.913440 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-78lmx\" (UniqueName: \"kubernetes.io/projected/432ec648-18a7-416a-85cc-409a30976a67-kube-api-access-78lmx\") pod \"432ec648-18a7-416a-85cc-409a30976a67\" (UID: \"432ec648-18a7-416a-85cc-409a30976a67\") " Jan 20 17:44:15 crc kubenswrapper[4558]: I0120 17:44:15.913547 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/432ec648-18a7-416a-85cc-409a30976a67-logs\") pod \"432ec648-18a7-416a-85cc-409a30976a67\" (UID: \"432ec648-18a7-416a-85cc-409a30976a67\") " Jan 20 17:44:15 crc kubenswrapper[4558]: I0120 17:44:15.913682 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/432ec648-18a7-416a-85cc-409a30976a67-config-data\") pod \"432ec648-18a7-416a-85cc-409a30976a67\" (UID: \"432ec648-18a7-416a-85cc-409a30976a67\") " Jan 20 17:44:15 crc kubenswrapper[4558]: I0120 17:44:15.913769 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/432ec648-18a7-416a-85cc-409a30976a67-combined-ca-bundle\") pod \"432ec648-18a7-416a-85cc-409a30976a67\" (UID: \"432ec648-18a7-416a-85cc-409a30976a67\") " Jan 20 17:44:15 crc kubenswrapper[4558]: I0120 17:44:15.914267 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/432ec648-18a7-416a-85cc-409a30976a67-logs" (OuterVolumeSpecName: "logs") pod "432ec648-18a7-416a-85cc-409a30976a67" (UID: "432ec648-18a7-416a-85cc-409a30976a67"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:44:15 crc kubenswrapper[4558]: I0120 17:44:15.914719 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/432ec648-18a7-416a-85cc-409a30976a67-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:44:15 crc kubenswrapper[4558]: I0120 17:44:15.918799 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/432ec648-18a7-416a-85cc-409a30976a67-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "432ec648-18a7-416a-85cc-409a30976a67" (UID: "432ec648-18a7-416a-85cc-409a30976a67"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:44:15 crc kubenswrapper[4558]: I0120 17:44:15.922288 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/432ec648-18a7-416a-85cc-409a30976a67-kube-api-access-78lmx" (OuterVolumeSpecName: "kube-api-access-78lmx") pod "432ec648-18a7-416a-85cc-409a30976a67" (UID: "432ec648-18a7-416a-85cc-409a30976a67"). InnerVolumeSpecName "kube-api-access-78lmx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:44:15 crc kubenswrapper[4558]: I0120 17:44:15.941286 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/432ec648-18a7-416a-85cc-409a30976a67-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "432ec648-18a7-416a-85cc-409a30976a67" (UID: "432ec648-18a7-416a-85cc-409a30976a67"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:44:15 crc kubenswrapper[4558]: I0120 17:44:15.970269 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/432ec648-18a7-416a-85cc-409a30976a67-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "432ec648-18a7-416a-85cc-409a30976a67" (UID: "432ec648-18a7-416a-85cc-409a30976a67"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:44:15 crc kubenswrapper[4558]: I0120 17:44:15.979546 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/432ec648-18a7-416a-85cc-409a30976a67-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "432ec648-18a7-416a-85cc-409a30976a67" (UID: "432ec648-18a7-416a-85cc-409a30976a67"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:44:15 crc kubenswrapper[4558]: I0120 17:44:15.981994 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/432ec648-18a7-416a-85cc-409a30976a67-config-data" (OuterVolumeSpecName: "config-data") pod "432ec648-18a7-416a-85cc-409a30976a67" (UID: "432ec648-18a7-416a-85cc-409a30976a67"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:44:16 crc kubenswrapper[4558]: I0120 17:44:16.016565 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/432ec648-18a7-416a-85cc-409a30976a67-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:44:16 crc kubenswrapper[4558]: I0120 17:44:16.016592 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/432ec648-18a7-416a-85cc-409a30976a67-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:44:16 crc kubenswrapper[4558]: I0120 17:44:16.016604 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/432ec648-18a7-416a-85cc-409a30976a67-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:44:16 crc kubenswrapper[4558]: I0120 17:44:16.016617 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/432ec648-18a7-416a-85cc-409a30976a67-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:44:16 crc kubenswrapper[4558]: I0120 17:44:16.016628 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/432ec648-18a7-416a-85cc-409a30976a67-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:44:16 crc kubenswrapper[4558]: I0120 17:44:16.016641 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-78lmx\" (UniqueName: \"kubernetes.io/projected/432ec648-18a7-416a-85cc-409a30976a67-kube-api-access-78lmx\") on node \"crc\" DevicePath \"\"" Jan 20 17:44:16 crc kubenswrapper[4558]: I0120 17:44:16.260759 4558 generic.go:334] "Generic (PLEG): container finished" podID="432ec648-18a7-416a-85cc-409a30976a67" containerID="0dfc6d5c9d8e2b433cbe0bd6d9c9a1260fd3286262cd3972d6f5db23fdc3d982" exitCode=0 Jan 20 17:44:16 crc kubenswrapper[4558]: I0120 17:44:16.261813 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-7bc45756fb-278vp" Jan 20 17:44:16 crc kubenswrapper[4558]: I0120 17:44:16.261967 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-7bc45756fb-278vp" event={"ID":"432ec648-18a7-416a-85cc-409a30976a67","Type":"ContainerDied","Data":"0dfc6d5c9d8e2b433cbe0bd6d9c9a1260fd3286262cd3972d6f5db23fdc3d982"} Jan 20 17:44:16 crc kubenswrapper[4558]: I0120 17:44:16.262003 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-7bc45756fb-278vp" event={"ID":"432ec648-18a7-416a-85cc-409a30976a67","Type":"ContainerDied","Data":"e5df2a06a2815159e994faa556cca8f0fe0201d5baebedaccc83bd16ca88a66a"} Jan 20 17:44:16 crc kubenswrapper[4558]: I0120 17:44:16.262023 4558 scope.go:117] "RemoveContainer" containerID="0dfc6d5c9d8e2b433cbe0bd6d9c9a1260fd3286262cd3972d6f5db23fdc3d982" Jan 20 17:44:16 crc kubenswrapper[4558]: I0120 17:44:16.279252 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:44:16 crc kubenswrapper[4558]: I0120 17:44:16.291542 4558 scope.go:117] "RemoveContainer" containerID="37abef38ac07dd4f0829b9b454166615d0d813a32d985cccdcab65f0ba9f6534" Jan 20 17:44:16 crc kubenswrapper[4558]: I0120 17:44:16.308220 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-api-7bc45756fb-278vp"] Jan 20 17:44:16 crc kubenswrapper[4558]: I0120 17:44:16.310097 4558 scope.go:117] "RemoveContainer" containerID="0dfc6d5c9d8e2b433cbe0bd6d9c9a1260fd3286262cd3972d6f5db23fdc3d982" Jan 20 17:44:16 crc kubenswrapper[4558]: E0120 17:44:16.310539 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0dfc6d5c9d8e2b433cbe0bd6d9c9a1260fd3286262cd3972d6f5db23fdc3d982\": container with ID starting with 0dfc6d5c9d8e2b433cbe0bd6d9c9a1260fd3286262cd3972d6f5db23fdc3d982 not found: ID does not exist" containerID="0dfc6d5c9d8e2b433cbe0bd6d9c9a1260fd3286262cd3972d6f5db23fdc3d982" Jan 20 17:44:16 crc kubenswrapper[4558]: I0120 17:44:16.310571 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0dfc6d5c9d8e2b433cbe0bd6d9c9a1260fd3286262cd3972d6f5db23fdc3d982"} err="failed to get container status \"0dfc6d5c9d8e2b433cbe0bd6d9c9a1260fd3286262cd3972d6f5db23fdc3d982\": rpc error: code = NotFound desc = could not find container \"0dfc6d5c9d8e2b433cbe0bd6d9c9a1260fd3286262cd3972d6f5db23fdc3d982\": container with ID starting with 0dfc6d5c9d8e2b433cbe0bd6d9c9a1260fd3286262cd3972d6f5db23fdc3d982 not found: ID does not exist" Jan 20 17:44:16 crc kubenswrapper[4558]: I0120 17:44:16.310594 4558 scope.go:117] "RemoveContainer" containerID="37abef38ac07dd4f0829b9b454166615d0d813a32d985cccdcab65f0ba9f6534" Jan 20 17:44:16 crc kubenswrapper[4558]: E0120 17:44:16.311054 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"37abef38ac07dd4f0829b9b454166615d0d813a32d985cccdcab65f0ba9f6534\": container with ID starting with 37abef38ac07dd4f0829b9b454166615d0d813a32d985cccdcab65f0ba9f6534 not found: ID does not exist" containerID="37abef38ac07dd4f0829b9b454166615d0d813a32d985cccdcab65f0ba9f6534" Jan 20 17:44:16 crc kubenswrapper[4558]: I0120 17:44:16.311091 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"37abef38ac07dd4f0829b9b454166615d0d813a32d985cccdcab65f0ba9f6534"} err="failed to get container status \"37abef38ac07dd4f0829b9b454166615d0d813a32d985cccdcab65f0ba9f6534\": rpc error: code = NotFound desc = could not find container \"37abef38ac07dd4f0829b9b454166615d0d813a32d985cccdcab65f0ba9f6534\": container with ID starting with 37abef38ac07dd4f0829b9b454166615d0d813a32d985cccdcab65f0ba9f6534 not found: ID does not exist" Jan 20 17:44:16 crc kubenswrapper[4558]: I0120 17:44:16.319096 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/barbican-api-7bc45756fb-278vp"] Jan 20 17:44:16 crc kubenswrapper[4558]: I0120 17:44:16.604450 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="432ec648-18a7-416a-85cc-409a30976a67" path="/var/lib/kubelet/pods/432ec648-18a7-416a-85cc-409a30976a67/volumes" Jan 20 17:44:20 crc kubenswrapper[4558]: I0120 17:44:20.225987 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:44:20 crc kubenswrapper[4558]: I0120 17:44:20.226319 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:44:20 crc kubenswrapper[4558]: I0120 17:44:20.230599 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:44:20 crc kubenswrapper[4558]: I0120 17:44:20.231478 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:44:21 crc kubenswrapper[4558]: I0120 17:44:21.124684 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:44:21 crc kubenswrapper[4558]: I0120 17:44:21.124765 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:44:22 crc kubenswrapper[4558]: I0120 17:44:22.134321 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-api-0" podUID="70182fd8-a242-40f5-b20d-8ff4dd33e9b1" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.1.69:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 20 17:44:22 crc kubenswrapper[4558]: I0120 17:44:22.134339 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-api-0" podUID="70182fd8-a242-40f5-b20d-8ff4dd33e9b1" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.1.69:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 20 17:44:22 crc kubenswrapper[4558]: I0120 17:44:22.214359 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/placement-5b5f5cd6d-6rk7z" Jan 20 17:44:22 crc kubenswrapper[4558]: I0120 17:44:22.214796 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/placement-5b5f5cd6d-6rk7z" Jan 20 17:44:22 crc kubenswrapper[4558]: I0120 17:44:22.308950 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/placement-69d7f65964-r46nm"] Jan 20 17:44:22 crc kubenswrapper[4558]: I0120 17:44:22.309442 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/placement-69d7f65964-r46nm" podUID="bf32468e-b0a7-4b81-bb49-c65a95997903" containerName="placement-log" containerID="cri-o://1bea9ca407d948182088942a7baaa1288dbc8ed20180d840df071edc53c59c24" gracePeriod=30 Jan 20 17:44:22 crc kubenswrapper[4558]: I0120 17:44:22.309847 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/placement-69d7f65964-r46nm" podUID="bf32468e-b0a7-4b81-bb49-c65a95997903" containerName="placement-api" containerID="cri-o://fc1f3875679e2cd5005493bd3bd21c2f6acc53a59f363480f76d0371a068bda4" gracePeriod=30 Jan 20 17:44:22 crc kubenswrapper[4558]: I0120 17:44:22.568970 4558 scope.go:117] "RemoveContainer" containerID="2d2a8989ca5a5af98b2c9dc8f801ea0675339ec14800f437c058c5a43c20146b" Jan 20 17:44:22 crc kubenswrapper[4558]: E0120 17:44:22.569334 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 17:44:23 crc kubenswrapper[4558]: I0120 17:44:23.342769 4558 generic.go:334] "Generic (PLEG): container finished" podID="bf32468e-b0a7-4b81-bb49-c65a95997903" containerID="1bea9ca407d948182088942a7baaa1288dbc8ed20180d840df071edc53c59c24" exitCode=143 Jan 20 17:44:23 crc kubenswrapper[4558]: I0120 17:44:23.342831 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-69d7f65964-r46nm" event={"ID":"bf32468e-b0a7-4b81-bb49-c65a95997903","Type":"ContainerDied","Data":"1bea9ca407d948182088942a7baaa1288dbc8ed20180d840df071edc53c59c24"} Jan 20 17:44:23 crc kubenswrapper[4558]: I0120 17:44:23.566809 4558 scope.go:117] "RemoveContainer" containerID="696eca395715bd4cf46b8dd0ddb53ef0ad8c84c94f7e91863731528e53ac125e" Jan 20 17:44:24 crc kubenswrapper[4558]: I0120 17:44:24.354432 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-0" event={"ID":"50cacddd-ebea-477f-af64-6e96a09a242e","Type":"ContainerStarted","Data":"f175857b081b6702705f8a9a0de77c7f1bd59b4f0ad1848d4abf509a9f1c1ae7"} Jan 20 17:44:24 crc kubenswrapper[4558]: I0120 17:44:24.355039 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:44:25 crc kubenswrapper[4558]: I0120 17:44:25.853832 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-69d7f65964-r46nm" Jan 20 17:44:26 crc kubenswrapper[4558]: I0120 17:44:26.051523 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/bf32468e-b0a7-4b81-bb49-c65a95997903-public-tls-certs\") pod \"bf32468e-b0a7-4b81-bb49-c65a95997903\" (UID: \"bf32468e-b0a7-4b81-bb49-c65a95997903\") " Jan 20 17:44:26 crc kubenswrapper[4558]: I0120 17:44:26.051656 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/bf32468e-b0a7-4b81-bb49-c65a95997903-internal-tls-certs\") pod \"bf32468e-b0a7-4b81-bb49-c65a95997903\" (UID: \"bf32468e-b0a7-4b81-bb49-c65a95997903\") " Jan 20 17:44:26 crc kubenswrapper[4558]: I0120 17:44:26.051717 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-59g5m\" (UniqueName: \"kubernetes.io/projected/bf32468e-b0a7-4b81-bb49-c65a95997903-kube-api-access-59g5m\") pod \"bf32468e-b0a7-4b81-bb49-c65a95997903\" (UID: \"bf32468e-b0a7-4b81-bb49-c65a95997903\") " Jan 20 17:44:26 crc kubenswrapper[4558]: I0120 17:44:26.051781 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bf32468e-b0a7-4b81-bb49-c65a95997903-logs\") pod \"bf32468e-b0a7-4b81-bb49-c65a95997903\" (UID: \"bf32468e-b0a7-4b81-bb49-c65a95997903\") " Jan 20 17:44:26 crc kubenswrapper[4558]: I0120 17:44:26.051841 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bf32468e-b0a7-4b81-bb49-c65a95997903-scripts\") pod \"bf32468e-b0a7-4b81-bb49-c65a95997903\" (UID: \"bf32468e-b0a7-4b81-bb49-c65a95997903\") " Jan 20 17:44:26 crc kubenswrapper[4558]: I0120 17:44:26.051887 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bf32468e-b0a7-4b81-bb49-c65a95997903-config-data\") pod \"bf32468e-b0a7-4b81-bb49-c65a95997903\" (UID: \"bf32468e-b0a7-4b81-bb49-c65a95997903\") " Jan 20 17:44:26 crc kubenswrapper[4558]: I0120 17:44:26.051922 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bf32468e-b0a7-4b81-bb49-c65a95997903-combined-ca-bundle\") pod \"bf32468e-b0a7-4b81-bb49-c65a95997903\" (UID: \"bf32468e-b0a7-4b81-bb49-c65a95997903\") " Jan 20 17:44:26 crc kubenswrapper[4558]: I0120 17:44:26.052459 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bf32468e-b0a7-4b81-bb49-c65a95997903-logs" (OuterVolumeSpecName: "logs") pod "bf32468e-b0a7-4b81-bb49-c65a95997903" (UID: "bf32468e-b0a7-4b81-bb49-c65a95997903"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:44:26 crc kubenswrapper[4558]: I0120 17:44:26.054055 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bf32468e-b0a7-4b81-bb49-c65a95997903-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:44:26 crc kubenswrapper[4558]: I0120 17:44:26.059605 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf32468e-b0a7-4b81-bb49-c65a95997903-scripts" (OuterVolumeSpecName: "scripts") pod "bf32468e-b0a7-4b81-bb49-c65a95997903" (UID: "bf32468e-b0a7-4b81-bb49-c65a95997903"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:44:26 crc kubenswrapper[4558]: I0120 17:44:26.059804 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf32468e-b0a7-4b81-bb49-c65a95997903-kube-api-access-59g5m" (OuterVolumeSpecName: "kube-api-access-59g5m") pod "bf32468e-b0a7-4b81-bb49-c65a95997903" (UID: "bf32468e-b0a7-4b81-bb49-c65a95997903"). InnerVolumeSpecName "kube-api-access-59g5m". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:44:26 crc kubenswrapper[4558]: I0120 17:44:26.104575 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf32468e-b0a7-4b81-bb49-c65a95997903-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "bf32468e-b0a7-4b81-bb49-c65a95997903" (UID: "bf32468e-b0a7-4b81-bb49-c65a95997903"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:44:26 crc kubenswrapper[4558]: I0120 17:44:26.110111 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf32468e-b0a7-4b81-bb49-c65a95997903-config-data" (OuterVolumeSpecName: "config-data") pod "bf32468e-b0a7-4b81-bb49-c65a95997903" (UID: "bf32468e-b0a7-4b81-bb49-c65a95997903"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:44:26 crc kubenswrapper[4558]: I0120 17:44:26.141687 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf32468e-b0a7-4b81-bb49-c65a95997903-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "bf32468e-b0a7-4b81-bb49-c65a95997903" (UID: "bf32468e-b0a7-4b81-bb49-c65a95997903"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:44:26 crc kubenswrapper[4558]: I0120 17:44:26.143998 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf32468e-b0a7-4b81-bb49-c65a95997903-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "bf32468e-b0a7-4b81-bb49-c65a95997903" (UID: "bf32468e-b0a7-4b81-bb49-c65a95997903"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:44:26 crc kubenswrapper[4558]: I0120 17:44:26.156414 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/bf32468e-b0a7-4b81-bb49-c65a95997903-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:44:26 crc kubenswrapper[4558]: I0120 17:44:26.156451 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/bf32468e-b0a7-4b81-bb49-c65a95997903-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:44:26 crc kubenswrapper[4558]: I0120 17:44:26.156467 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-59g5m\" (UniqueName: \"kubernetes.io/projected/bf32468e-b0a7-4b81-bb49-c65a95997903-kube-api-access-59g5m\") on node \"crc\" DevicePath \"\"" Jan 20 17:44:26 crc kubenswrapper[4558]: I0120 17:44:26.156482 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bf32468e-b0a7-4b81-bb49-c65a95997903-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:44:26 crc kubenswrapper[4558]: I0120 17:44:26.156493 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bf32468e-b0a7-4b81-bb49-c65a95997903-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:44:26 crc kubenswrapper[4558]: I0120 17:44:26.156501 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bf32468e-b0a7-4b81-bb49-c65a95997903-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:44:26 crc kubenswrapper[4558]: E0120 17:44:26.297422 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of f175857b081b6702705f8a9a0de77c7f1bd59b4f0ad1848d4abf509a9f1c1ae7 is running failed: container process not found" containerID="f175857b081b6702705f8a9a0de77c7f1bd59b4f0ad1848d4abf509a9f1c1ae7" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:44:26 crc kubenswrapper[4558]: E0120 17:44:26.298200 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of f175857b081b6702705f8a9a0de77c7f1bd59b4f0ad1848d4abf509a9f1c1ae7 is running failed: container process not found" containerID="f175857b081b6702705f8a9a0de77c7f1bd59b4f0ad1848d4abf509a9f1c1ae7" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:44:26 crc kubenswrapper[4558]: E0120 17:44:26.298660 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of f175857b081b6702705f8a9a0de77c7f1bd59b4f0ad1848d4abf509a9f1c1ae7 is running failed: container process not found" containerID="f175857b081b6702705f8a9a0de77c7f1bd59b4f0ad1848d4abf509a9f1c1ae7" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:44:26 crc kubenswrapper[4558]: E0120 17:44:26.298731 4558 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of f175857b081b6702705f8a9a0de77c7f1bd59b4f0ad1848d4abf509a9f1c1ae7 is running failed: container process not found" probeType="Liveness" pod="openstack-kuttl-tests/nova-cell0-conductor-0" podUID="50cacddd-ebea-477f-af64-6e96a09a242e" containerName="nova-cell0-conductor-conductor" Jan 20 17:44:26 crc kubenswrapper[4558]: I0120 17:44:26.377780 4558 generic.go:334] "Generic (PLEG): container finished" podID="bf32468e-b0a7-4b81-bb49-c65a95997903" containerID="fc1f3875679e2cd5005493bd3bd21c2f6acc53a59f363480f76d0371a068bda4" exitCode=0 Jan 20 17:44:26 crc kubenswrapper[4558]: I0120 17:44:26.377862 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-69d7f65964-r46nm" Jan 20 17:44:26 crc kubenswrapper[4558]: I0120 17:44:26.377885 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-69d7f65964-r46nm" event={"ID":"bf32468e-b0a7-4b81-bb49-c65a95997903","Type":"ContainerDied","Data":"fc1f3875679e2cd5005493bd3bd21c2f6acc53a59f363480f76d0371a068bda4"} Jan 20 17:44:26 crc kubenswrapper[4558]: I0120 17:44:26.377972 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-69d7f65964-r46nm" event={"ID":"bf32468e-b0a7-4b81-bb49-c65a95997903","Type":"ContainerDied","Data":"4512eb627e22fd848aff2b908f742418cbcbdb935ba78319df317f4fe0107521"} Jan 20 17:44:26 crc kubenswrapper[4558]: I0120 17:44:26.378011 4558 scope.go:117] "RemoveContainer" containerID="fc1f3875679e2cd5005493bd3bd21c2f6acc53a59f363480f76d0371a068bda4" Jan 20 17:44:26 crc kubenswrapper[4558]: I0120 17:44:26.381906 4558 generic.go:334] "Generic (PLEG): container finished" podID="50cacddd-ebea-477f-af64-6e96a09a242e" containerID="f175857b081b6702705f8a9a0de77c7f1bd59b4f0ad1848d4abf509a9f1c1ae7" exitCode=1 Jan 20 17:44:26 crc kubenswrapper[4558]: I0120 17:44:26.381933 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-0" event={"ID":"50cacddd-ebea-477f-af64-6e96a09a242e","Type":"ContainerDied","Data":"f175857b081b6702705f8a9a0de77c7f1bd59b4f0ad1848d4abf509a9f1c1ae7"} Jan 20 17:44:26 crc kubenswrapper[4558]: I0120 17:44:26.382816 4558 scope.go:117] "RemoveContainer" containerID="f175857b081b6702705f8a9a0de77c7f1bd59b4f0ad1848d4abf509a9f1c1ae7" Jan 20 17:44:26 crc kubenswrapper[4558]: E0120 17:44:26.383767 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"nova-cell0-conductor-conductor\" with CrashLoopBackOff: \"back-off 40s restarting failed container=nova-cell0-conductor-conductor pod=nova-cell0-conductor-0_openstack-kuttl-tests(50cacddd-ebea-477f-af64-6e96a09a242e)\"" pod="openstack-kuttl-tests/nova-cell0-conductor-0" podUID="50cacddd-ebea-477f-af64-6e96a09a242e" Jan 20 17:44:26 crc kubenswrapper[4558]: I0120 17:44:26.411741 4558 scope.go:117] "RemoveContainer" containerID="1bea9ca407d948182088942a7baaa1288dbc8ed20180d840df071edc53c59c24" Jan 20 17:44:26 crc kubenswrapper[4558]: I0120 17:44:26.432139 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/placement-69d7f65964-r46nm"] Jan 20 17:44:26 crc kubenswrapper[4558]: I0120 17:44:26.438063 4558 scope.go:117] "RemoveContainer" containerID="fc1f3875679e2cd5005493bd3bd21c2f6acc53a59f363480f76d0371a068bda4" Jan 20 17:44:26 crc kubenswrapper[4558]: I0120 17:44:26.438629 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/placement-69d7f65964-r46nm"] Jan 20 17:44:26 crc kubenswrapper[4558]: E0120 17:44:26.438674 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fc1f3875679e2cd5005493bd3bd21c2f6acc53a59f363480f76d0371a068bda4\": container with ID starting with fc1f3875679e2cd5005493bd3bd21c2f6acc53a59f363480f76d0371a068bda4 not found: ID does not exist" containerID="fc1f3875679e2cd5005493bd3bd21c2f6acc53a59f363480f76d0371a068bda4" Jan 20 17:44:26 crc kubenswrapper[4558]: I0120 17:44:26.438725 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fc1f3875679e2cd5005493bd3bd21c2f6acc53a59f363480f76d0371a068bda4"} err="failed to get container status \"fc1f3875679e2cd5005493bd3bd21c2f6acc53a59f363480f76d0371a068bda4\": rpc error: code = NotFound desc = could not find container \"fc1f3875679e2cd5005493bd3bd21c2f6acc53a59f363480f76d0371a068bda4\": container with ID starting with fc1f3875679e2cd5005493bd3bd21c2f6acc53a59f363480f76d0371a068bda4 not found: ID does not exist" Jan 20 17:44:26 crc kubenswrapper[4558]: I0120 17:44:26.438761 4558 scope.go:117] "RemoveContainer" containerID="1bea9ca407d948182088942a7baaa1288dbc8ed20180d840df071edc53c59c24" Jan 20 17:44:26 crc kubenswrapper[4558]: E0120 17:44:26.439118 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1bea9ca407d948182088942a7baaa1288dbc8ed20180d840df071edc53c59c24\": container with ID starting with 1bea9ca407d948182088942a7baaa1288dbc8ed20180d840df071edc53c59c24 not found: ID does not exist" containerID="1bea9ca407d948182088942a7baaa1288dbc8ed20180d840df071edc53c59c24" Jan 20 17:44:26 crc kubenswrapper[4558]: I0120 17:44:26.439145 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1bea9ca407d948182088942a7baaa1288dbc8ed20180d840df071edc53c59c24"} err="failed to get container status \"1bea9ca407d948182088942a7baaa1288dbc8ed20180d840df071edc53c59c24\": rpc error: code = NotFound desc = could not find container \"1bea9ca407d948182088942a7baaa1288dbc8ed20180d840df071edc53c59c24\": container with ID starting with 1bea9ca407d948182088942a7baaa1288dbc8ed20180d840df071edc53c59c24 not found: ID does not exist" Jan 20 17:44:26 crc kubenswrapper[4558]: I0120 17:44:26.439174 4558 scope.go:117] "RemoveContainer" containerID="696eca395715bd4cf46b8dd0ddb53ef0ad8c84c94f7e91863731528e53ac125e" Jan 20 17:44:26 crc kubenswrapper[4558]: I0120 17:44:26.578646 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf32468e-b0a7-4b81-bb49-c65a95997903" path="/var/lib/kubelet/pods/bf32468e-b0a7-4b81-bb49-c65a95997903/volumes" Jan 20 17:44:29 crc kubenswrapper[4558]: I0120 17:44:29.296592 4558 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:44:29 crc kubenswrapper[4558]: I0120 17:44:29.297649 4558 scope.go:117] "RemoveContainer" containerID="f175857b081b6702705f8a9a0de77c7f1bd59b4f0ad1848d4abf509a9f1c1ae7" Jan 20 17:44:29 crc kubenswrapper[4558]: E0120 17:44:29.298002 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"nova-cell0-conductor-conductor\" with CrashLoopBackOff: \"back-off 40s restarting failed container=nova-cell0-conductor-conductor pod=nova-cell0-conductor-0_openstack-kuttl-tests(50cacddd-ebea-477f-af64-6e96a09a242e)\"" pod="openstack-kuttl-tests/nova-cell0-conductor-0" podUID="50cacddd-ebea-477f-af64-6e96a09a242e" Jan 20 17:44:30 crc kubenswrapper[4558]: I0120 17:44:30.110096 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:44:31 crc kubenswrapper[4558]: I0120 17:44:31.134220 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:44:31 crc kubenswrapper[4558]: I0120 17:44:31.136319 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:44:31 crc kubenswrapper[4558]: I0120 17:44:31.138312 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:44:31 crc kubenswrapper[4558]: I0120 17:44:31.143868 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:44:31 crc kubenswrapper[4558]: I0120 17:44:31.271832 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:44:31 crc kubenswrapper[4558]: I0120 17:44:31.272330 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="bda29809-3f10-45ff-8fea-3d72f5182e7a" containerName="ceilometer-central-agent" containerID="cri-o://668a1ae92bc40563b48efc1b28beb17af8908f733ffd3f92479647bd7d0d6282" gracePeriod=30 Jan 20 17:44:31 crc kubenswrapper[4558]: I0120 17:44:31.272421 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="bda29809-3f10-45ff-8fea-3d72f5182e7a" containerName="sg-core" containerID="cri-o://402c846f2a2392a0dd6b1cc212dd5c6cf7c9c1691b1c0f3fc55e7fb3430cf054" gracePeriod=30 Jan 20 17:44:31 crc kubenswrapper[4558]: I0120 17:44:31.272440 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="bda29809-3f10-45ff-8fea-3d72f5182e7a" containerName="ceilometer-notification-agent" containerID="cri-o://0ad6dbb2fa84d1311b6f039beff159bd8b238efc373ccc984d847df4c505d640" gracePeriod=30 Jan 20 17:44:31 crc kubenswrapper[4558]: I0120 17:44:31.272383 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="bda29809-3f10-45ff-8fea-3d72f5182e7a" containerName="proxy-httpd" containerID="cri-o://8389290c9818f3ab0ed996cf1df18a17f4bc86f2ddee6a28823c5e3d138e7667" gracePeriod=30 Jan 20 17:44:31 crc kubenswrapper[4558]: I0120 17:44:31.381017 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/neutron-7bf45b8998-2828m" podUID="afc2fbf0-3153-4896-bfc2-bb2382600893" containerName="neutron-httpd" probeResult="failure" output="Get \"https://10.217.1.66:9696/\": dial tcp 10.217.1.66:9696: connect: connection refused" Jan 20 17:44:31 crc kubenswrapper[4558]: I0120 17:44:31.476525 4558 generic.go:334] "Generic (PLEG): container finished" podID="bda29809-3f10-45ff-8fea-3d72f5182e7a" containerID="8389290c9818f3ab0ed996cf1df18a17f4bc86f2ddee6a28823c5e3d138e7667" exitCode=0 Jan 20 17:44:31 crc kubenswrapper[4558]: I0120 17:44:31.476923 4558 generic.go:334] "Generic (PLEG): container finished" podID="bda29809-3f10-45ff-8fea-3d72f5182e7a" containerID="402c846f2a2392a0dd6b1cc212dd5c6cf7c9c1691b1c0f3fc55e7fb3430cf054" exitCode=2 Jan 20 17:44:31 crc kubenswrapper[4558]: I0120 17:44:31.476570 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"bda29809-3f10-45ff-8fea-3d72f5182e7a","Type":"ContainerDied","Data":"8389290c9818f3ab0ed996cf1df18a17f4bc86f2ddee6a28823c5e3d138e7667"} Jan 20 17:44:31 crc kubenswrapper[4558]: I0120 17:44:31.477037 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"bda29809-3f10-45ff-8fea-3d72f5182e7a","Type":"ContainerDied","Data":"402c846f2a2392a0dd6b1cc212dd5c6cf7c9c1691b1c0f3fc55e7fb3430cf054"} Jan 20 17:44:31 crc kubenswrapper[4558]: I0120 17:44:31.477425 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:44:31 crc kubenswrapper[4558]: I0120 17:44:31.483718 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:44:31 crc kubenswrapper[4558]: I0120 17:44:31.844480 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/keystone-f7d8994df-dzxmh" Jan 20 17:44:31 crc kubenswrapper[4558]: I0120 17:44:31.897817 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-7dffbf585-vg2sk"] Jan 20 17:44:31 crc kubenswrapper[4558]: I0120 17:44:31.898047 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/keystone-7dffbf585-vg2sk" podUID="8f7d7846-1b56-4ab5-a0ad-63b179a1a797" containerName="keystone-api" containerID="cri-o://e527f3739bfb93778f627c6e5f5e80d497ddd06b49ce87cb15b8ecf4e23a0bc4" gracePeriod=30 Jan 20 17:44:32 crc kubenswrapper[4558]: I0120 17:44:32.487887 4558 generic.go:334] "Generic (PLEG): container finished" podID="bda29809-3f10-45ff-8fea-3d72f5182e7a" containerID="668a1ae92bc40563b48efc1b28beb17af8908f733ffd3f92479647bd7d0d6282" exitCode=0 Jan 20 17:44:32 crc kubenswrapper[4558]: I0120 17:44:32.487932 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"bda29809-3f10-45ff-8fea-3d72f5182e7a","Type":"ContainerDied","Data":"668a1ae92bc40563b48efc1b28beb17af8908f733ffd3f92479647bd7d0d6282"} Jan 20 17:44:32 crc kubenswrapper[4558]: I0120 17:44:32.720648 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/neutron-b9f9b6874-4cxdp" Jan 20 17:44:32 crc kubenswrapper[4558]: I0120 17:44:32.793703 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-66bfc8789d-nsc64"] Jan 20 17:44:32 crc kubenswrapper[4558]: I0120 17:44:32.794025 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/neutron-66bfc8789d-nsc64" podUID="596f2da7-db14-44e3-8c93-5809b27e0cc6" containerName="neutron-api" containerID="cri-o://6f7e169a9c1c7c3746b3c5209448d3d9efd46fcac41efa29c9cdafd00ee4a9c7" gracePeriod=30 Jan 20 17:44:32 crc kubenswrapper[4558]: I0120 17:44:32.794063 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/neutron-66bfc8789d-nsc64" podUID="596f2da7-db14-44e3-8c93-5809b27e0cc6" containerName="neutron-httpd" containerID="cri-o://d2b6e31358d80bd665915aa4bc6aae644adca0fde94572c0a18379f3e0ce34e1" gracePeriod=30 Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.112268 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/openstackclient"] Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.112837 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/openstackclient" podUID="b5ea5c8f-cbdb-4b97-87b6-f0375393f6b3" containerName="openstackclient" containerID="cri-o://4213065b2ba71189cbc96ee29e5a13822ce35b5f3713247bb4eb5798f51eab0d" gracePeriod=2 Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.119759 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/openstackclient"] Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.146415 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/openstackclient"] Jan 20 17:44:33 crc kubenswrapper[4558]: E0120 17:44:33.146961 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bf32468e-b0a7-4b81-bb49-c65a95997903" containerName="placement-log" Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.146975 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="bf32468e-b0a7-4b81-bb49-c65a95997903" containerName="placement-log" Jan 20 17:44:33 crc kubenswrapper[4558]: E0120 17:44:33.146992 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b5ea5c8f-cbdb-4b97-87b6-f0375393f6b3" containerName="openstackclient" Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.147000 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="b5ea5c8f-cbdb-4b97-87b6-f0375393f6b3" containerName="openstackclient" Jan 20 17:44:33 crc kubenswrapper[4558]: E0120 17:44:33.147012 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bf32468e-b0a7-4b81-bb49-c65a95997903" containerName="placement-api" Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.147019 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="bf32468e-b0a7-4b81-bb49-c65a95997903" containerName="placement-api" Jan 20 17:44:33 crc kubenswrapper[4558]: E0120 17:44:33.147049 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="432ec648-18a7-416a-85cc-409a30976a67" containerName="barbican-api" Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.147055 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="432ec648-18a7-416a-85cc-409a30976a67" containerName="barbican-api" Jan 20 17:44:33 crc kubenswrapper[4558]: E0120 17:44:33.147071 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="432ec648-18a7-416a-85cc-409a30976a67" containerName="barbican-api-log" Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.147076 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="432ec648-18a7-416a-85cc-409a30976a67" containerName="barbican-api-log" Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.147254 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="432ec648-18a7-416a-85cc-409a30976a67" containerName="barbican-api" Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.147268 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="bf32468e-b0a7-4b81-bb49-c65a95997903" containerName="placement-api" Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.147279 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="432ec648-18a7-416a-85cc-409a30976a67" containerName="barbican-api-log" Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.147289 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="bf32468e-b0a7-4b81-bb49-c65a95997903" containerName="placement-log" Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.147297 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="b5ea5c8f-cbdb-4b97-87b6-f0375393f6b3" containerName="openstackclient" Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.148033 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstackclient" Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.154589 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/openstackclient"] Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.164515 4558 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack-kuttl-tests/openstackclient" oldPodUID="b5ea5c8f-cbdb-4b97-87b6-f0375393f6b3" podUID="2bfab6e6-f4a2-4948-9715-43c6f13d88fd" Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.197950 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.238663 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bda29809-3f10-45ff-8fea-3d72f5182e7a-combined-ca-bundle\") pod \"bda29809-3f10-45ff-8fea-3d72f5182e7a\" (UID: \"bda29809-3f10-45ff-8fea-3d72f5182e7a\") " Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.238739 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/bda29809-3f10-45ff-8fea-3d72f5182e7a-ceilometer-tls-certs\") pod \"bda29809-3f10-45ff-8fea-3d72f5182e7a\" (UID: \"bda29809-3f10-45ff-8fea-3d72f5182e7a\") " Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.238766 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sn479\" (UniqueName: \"kubernetes.io/projected/bda29809-3f10-45ff-8fea-3d72f5182e7a-kube-api-access-sn479\") pod \"bda29809-3f10-45ff-8fea-3d72f5182e7a\" (UID: \"bda29809-3f10-45ff-8fea-3d72f5182e7a\") " Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.238916 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bda29809-3f10-45ff-8fea-3d72f5182e7a-config-data\") pod \"bda29809-3f10-45ff-8fea-3d72f5182e7a\" (UID: \"bda29809-3f10-45ff-8fea-3d72f5182e7a\") " Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.238962 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bda29809-3f10-45ff-8fea-3d72f5182e7a-run-httpd\") pod \"bda29809-3f10-45ff-8fea-3d72f5182e7a\" (UID: \"bda29809-3f10-45ff-8fea-3d72f5182e7a\") " Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.239077 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/bda29809-3f10-45ff-8fea-3d72f5182e7a-sg-core-conf-yaml\") pod \"bda29809-3f10-45ff-8fea-3d72f5182e7a\" (UID: \"bda29809-3f10-45ff-8fea-3d72f5182e7a\") " Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.239189 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bda29809-3f10-45ff-8fea-3d72f5182e7a-log-httpd\") pod \"bda29809-3f10-45ff-8fea-3d72f5182e7a\" (UID: \"bda29809-3f10-45ff-8fea-3d72f5182e7a\") " Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.239329 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bda29809-3f10-45ff-8fea-3d72f5182e7a-scripts\") pod \"bda29809-3f10-45ff-8fea-3d72f5182e7a\" (UID: \"bda29809-3f10-45ff-8fea-3d72f5182e7a\") " Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.239909 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/2bfab6e6-f4a2-4948-9715-43c6f13d88fd-openstack-config-secret\") pod \"openstackclient\" (UID: \"2bfab6e6-f4a2-4948-9715-43c6f13d88fd\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.240126 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2bfab6e6-f4a2-4948-9715-43c6f13d88fd-combined-ca-bundle\") pod \"openstackclient\" (UID: \"2bfab6e6-f4a2-4948-9715-43c6f13d88fd\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.240154 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rf49t\" (UniqueName: \"kubernetes.io/projected/2bfab6e6-f4a2-4948-9715-43c6f13d88fd-kube-api-access-rf49t\") pod \"openstackclient\" (UID: \"2bfab6e6-f4a2-4948-9715-43c6f13d88fd\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.240273 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/2bfab6e6-f4a2-4948-9715-43c6f13d88fd-openstack-config\") pod \"openstackclient\" (UID: \"2bfab6e6-f4a2-4948-9715-43c6f13d88fd\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.244074 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bda29809-3f10-45ff-8fea-3d72f5182e7a-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "bda29809-3f10-45ff-8fea-3d72f5182e7a" (UID: "bda29809-3f10-45ff-8fea-3d72f5182e7a"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.248966 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bda29809-3f10-45ff-8fea-3d72f5182e7a-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "bda29809-3f10-45ff-8fea-3d72f5182e7a" (UID: "bda29809-3f10-45ff-8fea-3d72f5182e7a"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.261300 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bda29809-3f10-45ff-8fea-3d72f5182e7a-scripts" (OuterVolumeSpecName: "scripts") pod "bda29809-3f10-45ff-8fea-3d72f5182e7a" (UID: "bda29809-3f10-45ff-8fea-3d72f5182e7a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.262333 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bda29809-3f10-45ff-8fea-3d72f5182e7a-kube-api-access-sn479" (OuterVolumeSpecName: "kube-api-access-sn479") pod "bda29809-3f10-45ff-8fea-3d72f5182e7a" (UID: "bda29809-3f10-45ff-8fea-3d72f5182e7a"). InnerVolumeSpecName "kube-api-access-sn479". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.297510 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bda29809-3f10-45ff-8fea-3d72f5182e7a-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "bda29809-3f10-45ff-8fea-3d72f5182e7a" (UID: "bda29809-3f10-45ff-8fea-3d72f5182e7a"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.310437 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bda29809-3f10-45ff-8fea-3d72f5182e7a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "bda29809-3f10-45ff-8fea-3d72f5182e7a" (UID: "bda29809-3f10-45ff-8fea-3d72f5182e7a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.324871 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bda29809-3f10-45ff-8fea-3d72f5182e7a-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "bda29809-3f10-45ff-8fea-3d72f5182e7a" (UID: "bda29809-3f10-45ff-8fea-3d72f5182e7a"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.326458 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_neutron-7bf45b8998-2828m_afc2fbf0-3153-4896-bfc2-bb2382600893/neutron-api/0.log" Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.326644 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-7bf45b8998-2828m" Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.342064 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/afc2fbf0-3153-4896-bfc2-bb2382600893-internal-tls-certs\") pod \"afc2fbf0-3153-4896-bfc2-bb2382600893\" (UID: \"afc2fbf0-3153-4896-bfc2-bb2382600893\") " Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.342151 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/afc2fbf0-3153-4896-bfc2-bb2382600893-combined-ca-bundle\") pod \"afc2fbf0-3153-4896-bfc2-bb2382600893\" (UID: \"afc2fbf0-3153-4896-bfc2-bb2382600893\") " Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.342248 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/afc2fbf0-3153-4896-bfc2-bb2382600893-httpd-config\") pod \"afc2fbf0-3153-4896-bfc2-bb2382600893\" (UID: \"afc2fbf0-3153-4896-bfc2-bb2382600893\") " Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.342360 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4t6pg\" (UniqueName: \"kubernetes.io/projected/afc2fbf0-3153-4896-bfc2-bb2382600893-kube-api-access-4t6pg\") pod \"afc2fbf0-3153-4896-bfc2-bb2382600893\" (UID: \"afc2fbf0-3153-4896-bfc2-bb2382600893\") " Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.342396 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/afc2fbf0-3153-4896-bfc2-bb2382600893-ovndb-tls-certs\") pod \"afc2fbf0-3153-4896-bfc2-bb2382600893\" (UID: \"afc2fbf0-3153-4896-bfc2-bb2382600893\") " Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.342446 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/afc2fbf0-3153-4896-bfc2-bb2382600893-public-tls-certs\") pod \"afc2fbf0-3153-4896-bfc2-bb2382600893\" (UID: \"afc2fbf0-3153-4896-bfc2-bb2382600893\") " Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.342489 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/afc2fbf0-3153-4896-bfc2-bb2382600893-config\") pod \"afc2fbf0-3153-4896-bfc2-bb2382600893\" (UID: \"afc2fbf0-3153-4896-bfc2-bb2382600893\") " Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.343239 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/2bfab6e6-f4a2-4948-9715-43c6f13d88fd-openstack-config-secret\") pod \"openstackclient\" (UID: \"2bfab6e6-f4a2-4948-9715-43c6f13d88fd\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.343325 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2bfab6e6-f4a2-4948-9715-43c6f13d88fd-combined-ca-bundle\") pod \"openstackclient\" (UID: \"2bfab6e6-f4a2-4948-9715-43c6f13d88fd\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.343353 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rf49t\" (UniqueName: \"kubernetes.io/projected/2bfab6e6-f4a2-4948-9715-43c6f13d88fd-kube-api-access-rf49t\") pod \"openstackclient\" (UID: \"2bfab6e6-f4a2-4948-9715-43c6f13d88fd\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.343394 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/2bfab6e6-f4a2-4948-9715-43c6f13d88fd-openstack-config\") pod \"openstackclient\" (UID: \"2bfab6e6-f4a2-4948-9715-43c6f13d88fd\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.343481 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bda29809-3f10-45ff-8fea-3d72f5182e7a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.343493 4558 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/bda29809-3f10-45ff-8fea-3d72f5182e7a-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.343511 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sn479\" (UniqueName: \"kubernetes.io/projected/bda29809-3f10-45ff-8fea-3d72f5182e7a-kube-api-access-sn479\") on node \"crc\" DevicePath \"\"" Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.343523 4558 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bda29809-3f10-45ff-8fea-3d72f5182e7a-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.343533 4558 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/bda29809-3f10-45ff-8fea-3d72f5182e7a-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.343542 4558 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bda29809-3f10-45ff-8fea-3d72f5182e7a-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.343551 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bda29809-3f10-45ff-8fea-3d72f5182e7a-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.345819 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/2bfab6e6-f4a2-4948-9715-43c6f13d88fd-openstack-config\") pod \"openstackclient\" (UID: \"2bfab6e6-f4a2-4948-9715-43c6f13d88fd\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.353268 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/afc2fbf0-3153-4896-bfc2-bb2382600893-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "afc2fbf0-3153-4896-bfc2-bb2382600893" (UID: "afc2fbf0-3153-4896-bfc2-bb2382600893"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.353536 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bda29809-3f10-45ff-8fea-3d72f5182e7a-config-data" (OuterVolumeSpecName: "config-data") pod "bda29809-3f10-45ff-8fea-3d72f5182e7a" (UID: "bda29809-3f10-45ff-8fea-3d72f5182e7a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.356216 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2bfab6e6-f4a2-4948-9715-43c6f13d88fd-combined-ca-bundle\") pod \"openstackclient\" (UID: \"2bfab6e6-f4a2-4948-9715-43c6f13d88fd\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.357521 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/2bfab6e6-f4a2-4948-9715-43c6f13d88fd-openstack-config-secret\") pod \"openstackclient\" (UID: \"2bfab6e6-f4a2-4948-9715-43c6f13d88fd\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.366805 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rf49t\" (UniqueName: \"kubernetes.io/projected/2bfab6e6-f4a2-4948-9715-43c6f13d88fd-kube-api-access-rf49t\") pod \"openstackclient\" (UID: \"2bfab6e6-f4a2-4948-9715-43c6f13d88fd\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.372348 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/afc2fbf0-3153-4896-bfc2-bb2382600893-kube-api-access-4t6pg" (OuterVolumeSpecName: "kube-api-access-4t6pg") pod "afc2fbf0-3153-4896-bfc2-bb2382600893" (UID: "afc2fbf0-3153-4896-bfc2-bb2382600893"). InnerVolumeSpecName "kube-api-access-4t6pg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.402234 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/afc2fbf0-3153-4896-bfc2-bb2382600893-config" (OuterVolumeSpecName: "config") pod "afc2fbf0-3153-4896-bfc2-bb2382600893" (UID: "afc2fbf0-3153-4896-bfc2-bb2382600893"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.405029 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/afc2fbf0-3153-4896-bfc2-bb2382600893-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "afc2fbf0-3153-4896-bfc2-bb2382600893" (UID: "afc2fbf0-3153-4896-bfc2-bb2382600893"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.407674 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/afc2fbf0-3153-4896-bfc2-bb2382600893-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "afc2fbf0-3153-4896-bfc2-bb2382600893" (UID: "afc2fbf0-3153-4896-bfc2-bb2382600893"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.411034 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/afc2fbf0-3153-4896-bfc2-bb2382600893-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "afc2fbf0-3153-4896-bfc2-bb2382600893" (UID: "afc2fbf0-3153-4896-bfc2-bb2382600893"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.426372 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/afc2fbf0-3153-4896-bfc2-bb2382600893-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "afc2fbf0-3153-4896-bfc2-bb2382600893" (UID: "afc2fbf0-3153-4896-bfc2-bb2382600893"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.444669 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/afc2fbf0-3153-4896-bfc2-bb2382600893-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.444700 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/afc2fbf0-3153-4896-bfc2-bb2382600893-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.444711 4558 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/afc2fbf0-3153-4896-bfc2-bb2382600893-httpd-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.444725 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4t6pg\" (UniqueName: \"kubernetes.io/projected/afc2fbf0-3153-4896-bfc2-bb2382600893-kube-api-access-4t6pg\") on node \"crc\" DevicePath \"\"" Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.444737 4558 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/afc2fbf0-3153-4896-bfc2-bb2382600893-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.444746 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/afc2fbf0-3153-4896-bfc2-bb2382600893-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.444754 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/afc2fbf0-3153-4896-bfc2-bb2382600893-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.444766 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bda29809-3f10-45ff-8fea-3d72f5182e7a-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.501977 4558 generic.go:334] "Generic (PLEG): container finished" podID="596f2da7-db14-44e3-8c93-5809b27e0cc6" containerID="d2b6e31358d80bd665915aa4bc6aae644adca0fde94572c0a18379f3e0ce34e1" exitCode=0 Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.502051 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-66bfc8789d-nsc64" event={"ID":"596f2da7-db14-44e3-8c93-5809b27e0cc6","Type":"ContainerDied","Data":"d2b6e31358d80bd665915aa4bc6aae644adca0fde94572c0a18379f3e0ce34e1"} Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.505726 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_neutron-7bf45b8998-2828m_afc2fbf0-3153-4896-bfc2-bb2382600893/neutron-api/0.log" Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.505765 4558 generic.go:334] "Generic (PLEG): container finished" podID="afc2fbf0-3153-4896-bfc2-bb2382600893" containerID="8a25668d015893accd83bc05a496d2095af9e2191988393567d5b69588227076" exitCode=137 Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.505811 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-7bf45b8998-2828m" event={"ID":"afc2fbf0-3153-4896-bfc2-bb2382600893","Type":"ContainerDied","Data":"8a25668d015893accd83bc05a496d2095af9e2191988393567d5b69588227076"} Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.505835 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-7bf45b8998-2828m" event={"ID":"afc2fbf0-3153-4896-bfc2-bb2382600893","Type":"ContainerDied","Data":"ae80761ff7322eea37b8609645a45d0abd1c5268d3a0acf9da88e996c4933ea6"} Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.505853 4558 scope.go:117] "RemoveContainer" containerID="cf3321bb362cd3f716ec0caf474addff8cb6ca6125cea6c7cf914ee7d57f8965" Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.505987 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-7bf45b8998-2828m" Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.508865 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstackclient" Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.511082 4558 generic.go:334] "Generic (PLEG): container finished" podID="bda29809-3f10-45ff-8fea-3d72f5182e7a" containerID="0ad6dbb2fa84d1311b6f039beff159bd8b238efc373ccc984d847df4c505d640" exitCode=0 Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.511239 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.511336 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"bda29809-3f10-45ff-8fea-3d72f5182e7a","Type":"ContainerDied","Data":"0ad6dbb2fa84d1311b6f039beff159bd8b238efc373ccc984d847df4c505d640"} Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.511450 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"bda29809-3f10-45ff-8fea-3d72f5182e7a","Type":"ContainerDied","Data":"bb74eaf3d9d63fe4bbfb3bcb86df029a7b879488573a5e69be6648db054cfb49"} Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.542188 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-7bf45b8998-2828m"] Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.564991 4558 scope.go:117] "RemoveContainer" containerID="8a25668d015893accd83bc05a496d2095af9e2191988393567d5b69588227076" Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.569455 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/neutron-7bf45b8998-2828m"] Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.596607 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.609792 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.618412 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:44:33 crc kubenswrapper[4558]: E0120 17:44:33.618865 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bda29809-3f10-45ff-8fea-3d72f5182e7a" containerName="proxy-httpd" Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.618885 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="bda29809-3f10-45ff-8fea-3d72f5182e7a" containerName="proxy-httpd" Jan 20 17:44:33 crc kubenswrapper[4558]: E0120 17:44:33.618910 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="afc2fbf0-3153-4896-bfc2-bb2382600893" containerName="neutron-api" Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.618925 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="afc2fbf0-3153-4896-bfc2-bb2382600893" containerName="neutron-api" Jan 20 17:44:33 crc kubenswrapper[4558]: E0120 17:44:33.618935 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bda29809-3f10-45ff-8fea-3d72f5182e7a" containerName="sg-core" Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.618942 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="bda29809-3f10-45ff-8fea-3d72f5182e7a" containerName="sg-core" Jan 20 17:44:33 crc kubenswrapper[4558]: E0120 17:44:33.618965 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bda29809-3f10-45ff-8fea-3d72f5182e7a" containerName="ceilometer-central-agent" Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.618972 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="bda29809-3f10-45ff-8fea-3d72f5182e7a" containerName="ceilometer-central-agent" Jan 20 17:44:33 crc kubenswrapper[4558]: E0120 17:44:33.618986 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bda29809-3f10-45ff-8fea-3d72f5182e7a" containerName="ceilometer-notification-agent" Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.618994 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="bda29809-3f10-45ff-8fea-3d72f5182e7a" containerName="ceilometer-notification-agent" Jan 20 17:44:33 crc kubenswrapper[4558]: E0120 17:44:33.619025 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="afc2fbf0-3153-4896-bfc2-bb2382600893" containerName="neutron-httpd" Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.619031 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="afc2fbf0-3153-4896-bfc2-bb2382600893" containerName="neutron-httpd" Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.619228 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="bda29809-3f10-45ff-8fea-3d72f5182e7a" containerName="proxy-httpd" Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.619251 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="afc2fbf0-3153-4896-bfc2-bb2382600893" containerName="neutron-api" Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.619259 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="afc2fbf0-3153-4896-bfc2-bb2382600893" containerName="neutron-httpd" Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.619275 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="bda29809-3f10-45ff-8fea-3d72f5182e7a" containerName="ceilometer-central-agent" Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.619287 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="bda29809-3f10-45ff-8fea-3d72f5182e7a" containerName="ceilometer-notification-agent" Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.619296 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="bda29809-3f10-45ff-8fea-3d72f5182e7a" containerName="sg-core" Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.620891 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.624016 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-ceilometer-internal-svc" Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.624361 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-scripts" Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.624535 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-config-data" Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.640854 4558 scope.go:117] "RemoveContainer" containerID="cf3321bb362cd3f716ec0caf474addff8cb6ca6125cea6c7cf914ee7d57f8965" Jan 20 17:44:33 crc kubenswrapper[4558]: E0120 17:44:33.647264 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cf3321bb362cd3f716ec0caf474addff8cb6ca6125cea6c7cf914ee7d57f8965\": container with ID starting with cf3321bb362cd3f716ec0caf474addff8cb6ca6125cea6c7cf914ee7d57f8965 not found: ID does not exist" containerID="cf3321bb362cd3f716ec0caf474addff8cb6ca6125cea6c7cf914ee7d57f8965" Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.647298 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cf3321bb362cd3f716ec0caf474addff8cb6ca6125cea6c7cf914ee7d57f8965"} err="failed to get container status \"cf3321bb362cd3f716ec0caf474addff8cb6ca6125cea6c7cf914ee7d57f8965\": rpc error: code = NotFound desc = could not find container \"cf3321bb362cd3f716ec0caf474addff8cb6ca6125cea6c7cf914ee7d57f8965\": container with ID starting with cf3321bb362cd3f716ec0caf474addff8cb6ca6125cea6c7cf914ee7d57f8965 not found: ID does not exist" Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.647319 4558 scope.go:117] "RemoveContainer" containerID="8a25668d015893accd83bc05a496d2095af9e2191988393567d5b69588227076" Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.649696 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/53683629-694a-4431-9fed-acc76d3f7cd3-run-httpd\") pod \"ceilometer-0\" (UID: \"53683629-694a-4431-9fed-acc76d3f7cd3\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.649729 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/53683629-694a-4431-9fed-acc76d3f7cd3-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"53683629-694a-4431-9fed-acc76d3f7cd3\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.649807 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/53683629-694a-4431-9fed-acc76d3f7cd3-scripts\") pod \"ceilometer-0\" (UID: \"53683629-694a-4431-9fed-acc76d3f7cd3\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.649845 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/53683629-694a-4431-9fed-acc76d3f7cd3-config-data\") pod \"ceilometer-0\" (UID: \"53683629-694a-4431-9fed-acc76d3f7cd3\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.649888 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/53683629-694a-4431-9fed-acc76d3f7cd3-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"53683629-694a-4431-9fed-acc76d3f7cd3\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.649909 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8zlng\" (UniqueName: \"kubernetes.io/projected/53683629-694a-4431-9fed-acc76d3f7cd3-kube-api-access-8zlng\") pod \"ceilometer-0\" (UID: \"53683629-694a-4431-9fed-acc76d3f7cd3\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.649945 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/53683629-694a-4431-9fed-acc76d3f7cd3-log-httpd\") pod \"ceilometer-0\" (UID: \"53683629-694a-4431-9fed-acc76d3f7cd3\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.649967 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/53683629-694a-4431-9fed-acc76d3f7cd3-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"53683629-694a-4431-9fed-acc76d3f7cd3\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.652296 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:44:33 crc kubenswrapper[4558]: E0120 17:44:33.659770 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8a25668d015893accd83bc05a496d2095af9e2191988393567d5b69588227076\": container with ID starting with 8a25668d015893accd83bc05a496d2095af9e2191988393567d5b69588227076 not found: ID does not exist" containerID="8a25668d015893accd83bc05a496d2095af9e2191988393567d5b69588227076" Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.659936 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8a25668d015893accd83bc05a496d2095af9e2191988393567d5b69588227076"} err="failed to get container status \"8a25668d015893accd83bc05a496d2095af9e2191988393567d5b69588227076\": rpc error: code = NotFound desc = could not find container \"8a25668d015893accd83bc05a496d2095af9e2191988393567d5b69588227076\": container with ID starting with 8a25668d015893accd83bc05a496d2095af9e2191988393567d5b69588227076 not found: ID does not exist" Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.660046 4558 scope.go:117] "RemoveContainer" containerID="8389290c9818f3ab0ed996cf1df18a17f4bc86f2ddee6a28823c5e3d138e7667" Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.700224 4558 scope.go:117] "RemoveContainer" containerID="402c846f2a2392a0dd6b1cc212dd5c6cf7c9c1691b1c0f3fc55e7fb3430cf054" Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.728397 4558 scope.go:117] "RemoveContainer" containerID="0ad6dbb2fa84d1311b6f039beff159bd8b238efc373ccc984d847df4c505d640" Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.751065 4558 scope.go:117] "RemoveContainer" containerID="668a1ae92bc40563b48efc1b28beb17af8908f733ffd3f92479647bd7d0d6282" Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.753627 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/53683629-694a-4431-9fed-acc76d3f7cd3-config-data\") pod \"ceilometer-0\" (UID: \"53683629-694a-4431-9fed-acc76d3f7cd3\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.753724 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/53683629-694a-4431-9fed-acc76d3f7cd3-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"53683629-694a-4431-9fed-acc76d3f7cd3\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.753770 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8zlng\" (UniqueName: \"kubernetes.io/projected/53683629-694a-4431-9fed-acc76d3f7cd3-kube-api-access-8zlng\") pod \"ceilometer-0\" (UID: \"53683629-694a-4431-9fed-acc76d3f7cd3\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.753828 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/53683629-694a-4431-9fed-acc76d3f7cd3-log-httpd\") pod \"ceilometer-0\" (UID: \"53683629-694a-4431-9fed-acc76d3f7cd3\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.753853 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/53683629-694a-4431-9fed-acc76d3f7cd3-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"53683629-694a-4431-9fed-acc76d3f7cd3\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.753922 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/53683629-694a-4431-9fed-acc76d3f7cd3-run-httpd\") pod \"ceilometer-0\" (UID: \"53683629-694a-4431-9fed-acc76d3f7cd3\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.753949 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/53683629-694a-4431-9fed-acc76d3f7cd3-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"53683629-694a-4431-9fed-acc76d3f7cd3\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.754079 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/53683629-694a-4431-9fed-acc76d3f7cd3-scripts\") pod \"ceilometer-0\" (UID: \"53683629-694a-4431-9fed-acc76d3f7cd3\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.756008 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/53683629-694a-4431-9fed-acc76d3f7cd3-run-httpd\") pod \"ceilometer-0\" (UID: \"53683629-694a-4431-9fed-acc76d3f7cd3\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.756761 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/53683629-694a-4431-9fed-acc76d3f7cd3-log-httpd\") pod \"ceilometer-0\" (UID: \"53683629-694a-4431-9fed-acc76d3f7cd3\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.759462 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/53683629-694a-4431-9fed-acc76d3f7cd3-scripts\") pod \"ceilometer-0\" (UID: \"53683629-694a-4431-9fed-acc76d3f7cd3\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.761904 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/53683629-694a-4431-9fed-acc76d3f7cd3-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"53683629-694a-4431-9fed-acc76d3f7cd3\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.763642 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/53683629-694a-4431-9fed-acc76d3f7cd3-config-data\") pod \"ceilometer-0\" (UID: \"53683629-694a-4431-9fed-acc76d3f7cd3\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.767981 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/53683629-694a-4431-9fed-acc76d3f7cd3-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"53683629-694a-4431-9fed-acc76d3f7cd3\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.769639 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/53683629-694a-4431-9fed-acc76d3f7cd3-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"53683629-694a-4431-9fed-acc76d3f7cd3\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.772368 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8zlng\" (UniqueName: \"kubernetes.io/projected/53683629-694a-4431-9fed-acc76d3f7cd3-kube-api-access-8zlng\") pod \"ceilometer-0\" (UID: \"53683629-694a-4431-9fed-acc76d3f7cd3\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.785343 4558 scope.go:117] "RemoveContainer" containerID="8389290c9818f3ab0ed996cf1df18a17f4bc86f2ddee6a28823c5e3d138e7667" Jan 20 17:44:33 crc kubenswrapper[4558]: E0120 17:44:33.786046 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8389290c9818f3ab0ed996cf1df18a17f4bc86f2ddee6a28823c5e3d138e7667\": container with ID starting with 8389290c9818f3ab0ed996cf1df18a17f4bc86f2ddee6a28823c5e3d138e7667 not found: ID does not exist" containerID="8389290c9818f3ab0ed996cf1df18a17f4bc86f2ddee6a28823c5e3d138e7667" Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.786086 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8389290c9818f3ab0ed996cf1df18a17f4bc86f2ddee6a28823c5e3d138e7667"} err="failed to get container status \"8389290c9818f3ab0ed996cf1df18a17f4bc86f2ddee6a28823c5e3d138e7667\": rpc error: code = NotFound desc = could not find container \"8389290c9818f3ab0ed996cf1df18a17f4bc86f2ddee6a28823c5e3d138e7667\": container with ID starting with 8389290c9818f3ab0ed996cf1df18a17f4bc86f2ddee6a28823c5e3d138e7667 not found: ID does not exist" Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.786110 4558 scope.go:117] "RemoveContainer" containerID="402c846f2a2392a0dd6b1cc212dd5c6cf7c9c1691b1c0f3fc55e7fb3430cf054" Jan 20 17:44:33 crc kubenswrapper[4558]: E0120 17:44:33.786471 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"402c846f2a2392a0dd6b1cc212dd5c6cf7c9c1691b1c0f3fc55e7fb3430cf054\": container with ID starting with 402c846f2a2392a0dd6b1cc212dd5c6cf7c9c1691b1c0f3fc55e7fb3430cf054 not found: ID does not exist" containerID="402c846f2a2392a0dd6b1cc212dd5c6cf7c9c1691b1c0f3fc55e7fb3430cf054" Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.786515 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"402c846f2a2392a0dd6b1cc212dd5c6cf7c9c1691b1c0f3fc55e7fb3430cf054"} err="failed to get container status \"402c846f2a2392a0dd6b1cc212dd5c6cf7c9c1691b1c0f3fc55e7fb3430cf054\": rpc error: code = NotFound desc = could not find container \"402c846f2a2392a0dd6b1cc212dd5c6cf7c9c1691b1c0f3fc55e7fb3430cf054\": container with ID starting with 402c846f2a2392a0dd6b1cc212dd5c6cf7c9c1691b1c0f3fc55e7fb3430cf054 not found: ID does not exist" Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.786542 4558 scope.go:117] "RemoveContainer" containerID="0ad6dbb2fa84d1311b6f039beff159bd8b238efc373ccc984d847df4c505d640" Jan 20 17:44:33 crc kubenswrapper[4558]: E0120 17:44:33.786890 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0ad6dbb2fa84d1311b6f039beff159bd8b238efc373ccc984d847df4c505d640\": container with ID starting with 0ad6dbb2fa84d1311b6f039beff159bd8b238efc373ccc984d847df4c505d640 not found: ID does not exist" containerID="0ad6dbb2fa84d1311b6f039beff159bd8b238efc373ccc984d847df4c505d640" Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.786968 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0ad6dbb2fa84d1311b6f039beff159bd8b238efc373ccc984d847df4c505d640"} err="failed to get container status \"0ad6dbb2fa84d1311b6f039beff159bd8b238efc373ccc984d847df4c505d640\": rpc error: code = NotFound desc = could not find container \"0ad6dbb2fa84d1311b6f039beff159bd8b238efc373ccc984d847df4c505d640\": container with ID starting with 0ad6dbb2fa84d1311b6f039beff159bd8b238efc373ccc984d847df4c505d640 not found: ID does not exist" Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.787002 4558 scope.go:117] "RemoveContainer" containerID="668a1ae92bc40563b48efc1b28beb17af8908f733ffd3f92479647bd7d0d6282" Jan 20 17:44:33 crc kubenswrapper[4558]: E0120 17:44:33.787992 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"668a1ae92bc40563b48efc1b28beb17af8908f733ffd3f92479647bd7d0d6282\": container with ID starting with 668a1ae92bc40563b48efc1b28beb17af8908f733ffd3f92479647bd7d0d6282 not found: ID does not exist" containerID="668a1ae92bc40563b48efc1b28beb17af8908f733ffd3f92479647bd7d0d6282" Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.788026 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"668a1ae92bc40563b48efc1b28beb17af8908f733ffd3f92479647bd7d0d6282"} err="failed to get container status \"668a1ae92bc40563b48efc1b28beb17af8908f733ffd3f92479647bd7d0d6282\": rpc error: code = NotFound desc = could not find container \"668a1ae92bc40563b48efc1b28beb17af8908f733ffd3f92479647bd7d0d6282\": container with ID starting with 668a1ae92bc40563b48efc1b28beb17af8908f733ffd3f92479647bd7d0d6282 not found: ID does not exist" Jan 20 17:44:33 crc kubenswrapper[4558]: I0120 17:44:33.955549 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:44:34 crc kubenswrapper[4558]: I0120 17:44:34.096450 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/openstackclient"] Jan 20 17:44:34 crc kubenswrapper[4558]: W0120 17:44:34.101827 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2bfab6e6_f4a2_4948_9715_43c6f13d88fd.slice/crio-8efc5cf80c9939ae2c1c92d82e0f20a76f5c5cc57c82219242a3e9ee620c5962 WatchSource:0}: Error finding container 8efc5cf80c9939ae2c1c92d82e0f20a76f5c5cc57c82219242a3e9ee620c5962: Status 404 returned error can't find the container with id 8efc5cf80c9939ae2c1c92d82e0f20a76f5c5cc57c82219242a3e9ee620c5962 Jan 20 17:44:34 crc kubenswrapper[4558]: W0120 17:44:34.424864 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod53683629_694a_4431_9fed_acc76d3f7cd3.slice/crio-5ea8b20d7425543b777b1bb32feb285c6be726f17128048186682846afe6d667 WatchSource:0}: Error finding container 5ea8b20d7425543b777b1bb32feb285c6be726f17128048186682846afe6d667: Status 404 returned error can't find the container with id 5ea8b20d7425543b777b1bb32feb285c6be726f17128048186682846afe6d667 Jan 20 17:44:34 crc kubenswrapper[4558]: I0120 17:44:34.429148 4558 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 20 17:44:34 crc kubenswrapper[4558]: I0120 17:44:34.429580 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:44:34 crc kubenswrapper[4558]: I0120 17:44:34.521057 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"53683629-694a-4431-9fed-acc76d3f7cd3","Type":"ContainerStarted","Data":"5ea8b20d7425543b777b1bb32feb285c6be726f17128048186682846afe6d667"} Jan 20 17:44:34 crc kubenswrapper[4558]: I0120 17:44:34.523928 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstackclient" event={"ID":"2bfab6e6-f4a2-4948-9715-43c6f13d88fd","Type":"ContainerStarted","Data":"dfb7f1325b9ebbf067b631d2aff33318d0cc9724f90cfc14dfab1a1ce2b6b6da"} Jan 20 17:44:34 crc kubenswrapper[4558]: I0120 17:44:34.523976 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstackclient" event={"ID":"2bfab6e6-f4a2-4948-9715-43c6f13d88fd","Type":"ContainerStarted","Data":"8efc5cf80c9939ae2c1c92d82e0f20a76f5c5cc57c82219242a3e9ee620c5962"} Jan 20 17:44:34 crc kubenswrapper[4558]: I0120 17:44:34.575623 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="afc2fbf0-3153-4896-bfc2-bb2382600893" path="/var/lib/kubelet/pods/afc2fbf0-3153-4896-bfc2-bb2382600893/volumes" Jan 20 17:44:34 crc kubenswrapper[4558]: I0120 17:44:34.576438 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bda29809-3f10-45ff-8fea-3d72f5182e7a" path="/var/lib/kubelet/pods/bda29809-3f10-45ff-8fea-3d72f5182e7a/volumes" Jan 20 17:44:35 crc kubenswrapper[4558]: I0120 17:44:35.369972 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstackclient" Jan 20 17:44:35 crc kubenswrapper[4558]: I0120 17:44:35.392445 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-7dffbf585-vg2sk" Jan 20 17:44:35 crc kubenswrapper[4558]: I0120 17:44:35.417399 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/openstackclient" podStartSLOduration=2.417376345 podStartE2EDuration="2.417376345s" podCreationTimestamp="2026-01-20 17:44:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:44:34.547316353 +0000 UTC m=+3768.307654320" watchObservedRunningTime="2026-01-20 17:44:35.417376345 +0000 UTC m=+3769.177714311" Jan 20 17:44:35 crc kubenswrapper[4558]: I0120 17:44:35.493822 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b2dvl\" (UniqueName: \"kubernetes.io/projected/8f7d7846-1b56-4ab5-a0ad-63b179a1a797-kube-api-access-b2dvl\") pod \"8f7d7846-1b56-4ab5-a0ad-63b179a1a797\" (UID: \"8f7d7846-1b56-4ab5-a0ad-63b179a1a797\") " Jan 20 17:44:35 crc kubenswrapper[4558]: I0120 17:44:35.494222 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/8f7d7846-1b56-4ab5-a0ad-63b179a1a797-fernet-keys\") pod \"8f7d7846-1b56-4ab5-a0ad-63b179a1a797\" (UID: \"8f7d7846-1b56-4ab5-a0ad-63b179a1a797\") " Jan 20 17:44:35 crc kubenswrapper[4558]: I0120 17:44:35.494274 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b5ea5c8f-cbdb-4b97-87b6-f0375393f6b3-combined-ca-bundle\") pod \"b5ea5c8f-cbdb-4b97-87b6-f0375393f6b3\" (UID: \"b5ea5c8f-cbdb-4b97-87b6-f0375393f6b3\") " Jan 20 17:44:35 crc kubenswrapper[4558]: I0120 17:44:35.494315 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8f7d7846-1b56-4ab5-a0ad-63b179a1a797-public-tls-certs\") pod \"8f7d7846-1b56-4ab5-a0ad-63b179a1a797\" (UID: \"8f7d7846-1b56-4ab5-a0ad-63b179a1a797\") " Jan 20 17:44:35 crc kubenswrapper[4558]: I0120 17:44:35.494339 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/8f7d7846-1b56-4ab5-a0ad-63b179a1a797-credential-keys\") pod \"8f7d7846-1b56-4ab5-a0ad-63b179a1a797\" (UID: \"8f7d7846-1b56-4ab5-a0ad-63b179a1a797\") " Jan 20 17:44:35 crc kubenswrapper[4558]: I0120 17:44:35.494361 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/b5ea5c8f-cbdb-4b97-87b6-f0375393f6b3-openstack-config\") pod \"b5ea5c8f-cbdb-4b97-87b6-f0375393f6b3\" (UID: \"b5ea5c8f-cbdb-4b97-87b6-f0375393f6b3\") " Jan 20 17:44:35 crc kubenswrapper[4558]: I0120 17:44:35.494431 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8f7d7846-1b56-4ab5-a0ad-63b179a1a797-combined-ca-bundle\") pod \"8f7d7846-1b56-4ab5-a0ad-63b179a1a797\" (UID: \"8f7d7846-1b56-4ab5-a0ad-63b179a1a797\") " Jan 20 17:44:35 crc kubenswrapper[4558]: I0120 17:44:35.494454 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8f7d7846-1b56-4ab5-a0ad-63b179a1a797-scripts\") pod \"8f7d7846-1b56-4ab5-a0ad-63b179a1a797\" (UID: \"8f7d7846-1b56-4ab5-a0ad-63b179a1a797\") " Jan 20 17:44:35 crc kubenswrapper[4558]: I0120 17:44:35.494540 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8f7d7846-1b56-4ab5-a0ad-63b179a1a797-config-data\") pod \"8f7d7846-1b56-4ab5-a0ad-63b179a1a797\" (UID: \"8f7d7846-1b56-4ab5-a0ad-63b179a1a797\") " Jan 20 17:44:35 crc kubenswrapper[4558]: I0120 17:44:35.494561 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/b5ea5c8f-cbdb-4b97-87b6-f0375393f6b3-openstack-config-secret\") pod \"b5ea5c8f-cbdb-4b97-87b6-f0375393f6b3\" (UID: \"b5ea5c8f-cbdb-4b97-87b6-f0375393f6b3\") " Jan 20 17:44:35 crc kubenswrapper[4558]: I0120 17:44:35.494626 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8f7d7846-1b56-4ab5-a0ad-63b179a1a797-internal-tls-certs\") pod \"8f7d7846-1b56-4ab5-a0ad-63b179a1a797\" (UID: \"8f7d7846-1b56-4ab5-a0ad-63b179a1a797\") " Jan 20 17:44:35 crc kubenswrapper[4558]: I0120 17:44:35.494654 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c75hn\" (UniqueName: \"kubernetes.io/projected/b5ea5c8f-cbdb-4b97-87b6-f0375393f6b3-kube-api-access-c75hn\") pod \"b5ea5c8f-cbdb-4b97-87b6-f0375393f6b3\" (UID: \"b5ea5c8f-cbdb-4b97-87b6-f0375393f6b3\") " Jan 20 17:44:35 crc kubenswrapper[4558]: I0120 17:44:35.500406 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f7d7846-1b56-4ab5-a0ad-63b179a1a797-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "8f7d7846-1b56-4ab5-a0ad-63b179a1a797" (UID: "8f7d7846-1b56-4ab5-a0ad-63b179a1a797"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:44:35 crc kubenswrapper[4558]: I0120 17:44:35.500526 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f7d7846-1b56-4ab5-a0ad-63b179a1a797-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "8f7d7846-1b56-4ab5-a0ad-63b179a1a797" (UID: "8f7d7846-1b56-4ab5-a0ad-63b179a1a797"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:44:35 crc kubenswrapper[4558]: I0120 17:44:35.500793 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b5ea5c8f-cbdb-4b97-87b6-f0375393f6b3-kube-api-access-c75hn" (OuterVolumeSpecName: "kube-api-access-c75hn") pod "b5ea5c8f-cbdb-4b97-87b6-f0375393f6b3" (UID: "b5ea5c8f-cbdb-4b97-87b6-f0375393f6b3"). InnerVolumeSpecName "kube-api-access-c75hn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:44:35 crc kubenswrapper[4558]: I0120 17:44:35.502728 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f7d7846-1b56-4ab5-a0ad-63b179a1a797-kube-api-access-b2dvl" (OuterVolumeSpecName: "kube-api-access-b2dvl") pod "8f7d7846-1b56-4ab5-a0ad-63b179a1a797" (UID: "8f7d7846-1b56-4ab5-a0ad-63b179a1a797"). InnerVolumeSpecName "kube-api-access-b2dvl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:44:35 crc kubenswrapper[4558]: I0120 17:44:35.503833 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f7d7846-1b56-4ab5-a0ad-63b179a1a797-scripts" (OuterVolumeSpecName: "scripts") pod "8f7d7846-1b56-4ab5-a0ad-63b179a1a797" (UID: "8f7d7846-1b56-4ab5-a0ad-63b179a1a797"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:44:35 crc kubenswrapper[4558]: I0120 17:44:35.524057 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b5ea5c8f-cbdb-4b97-87b6-f0375393f6b3-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "b5ea5c8f-cbdb-4b97-87b6-f0375393f6b3" (UID: "b5ea5c8f-cbdb-4b97-87b6-f0375393f6b3"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:44:35 crc kubenswrapper[4558]: I0120 17:44:35.527295 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f7d7846-1b56-4ab5-a0ad-63b179a1a797-config-data" (OuterVolumeSpecName: "config-data") pod "8f7d7846-1b56-4ab5-a0ad-63b179a1a797" (UID: "8f7d7846-1b56-4ab5-a0ad-63b179a1a797"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:44:35 crc kubenswrapper[4558]: I0120 17:44:35.529627 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b5ea5c8f-cbdb-4b97-87b6-f0375393f6b3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b5ea5c8f-cbdb-4b97-87b6-f0375393f6b3" (UID: "b5ea5c8f-cbdb-4b97-87b6-f0375393f6b3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:44:35 crc kubenswrapper[4558]: I0120 17:44:35.530074 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f7d7846-1b56-4ab5-a0ad-63b179a1a797-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8f7d7846-1b56-4ab5-a0ad-63b179a1a797" (UID: "8f7d7846-1b56-4ab5-a0ad-63b179a1a797"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:44:35 crc kubenswrapper[4558]: I0120 17:44:35.537241 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"53683629-694a-4431-9fed-acc76d3f7cd3","Type":"ContainerStarted","Data":"32905e44c09ace66c00f1ca2fd0fb9d8ed1543f669176d4879e968a7824d5015"} Jan 20 17:44:35 crc kubenswrapper[4558]: I0120 17:44:35.539366 4558 generic.go:334] "Generic (PLEG): container finished" podID="8f7d7846-1b56-4ab5-a0ad-63b179a1a797" containerID="e527f3739bfb93778f627c6e5f5e80d497ddd06b49ce87cb15b8ecf4e23a0bc4" exitCode=0 Jan 20 17:44:35 crc kubenswrapper[4558]: I0120 17:44:35.539439 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-7dffbf585-vg2sk" Jan 20 17:44:35 crc kubenswrapper[4558]: I0120 17:44:35.539442 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-7dffbf585-vg2sk" event={"ID":"8f7d7846-1b56-4ab5-a0ad-63b179a1a797","Type":"ContainerDied","Data":"e527f3739bfb93778f627c6e5f5e80d497ddd06b49ce87cb15b8ecf4e23a0bc4"} Jan 20 17:44:35 crc kubenswrapper[4558]: I0120 17:44:35.539554 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-7dffbf585-vg2sk" event={"ID":"8f7d7846-1b56-4ab5-a0ad-63b179a1a797","Type":"ContainerDied","Data":"c19ebe9aeb11e93747060a750808dbaf41ffff83c463556fac370fec9e9071b5"} Jan 20 17:44:35 crc kubenswrapper[4558]: I0120 17:44:35.539584 4558 scope.go:117] "RemoveContainer" containerID="e527f3739bfb93778f627c6e5f5e80d497ddd06b49ce87cb15b8ecf4e23a0bc4" Jan 20 17:44:35 crc kubenswrapper[4558]: I0120 17:44:35.541085 4558 generic.go:334] "Generic (PLEG): container finished" podID="b5ea5c8f-cbdb-4b97-87b6-f0375393f6b3" containerID="4213065b2ba71189cbc96ee29e5a13822ce35b5f3713247bb4eb5798f51eab0d" exitCode=137 Jan 20 17:44:35 crc kubenswrapper[4558]: I0120 17:44:35.541189 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstackclient" Jan 20 17:44:35 crc kubenswrapper[4558]: I0120 17:44:35.550313 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f7d7846-1b56-4ab5-a0ad-63b179a1a797-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "8f7d7846-1b56-4ab5-a0ad-63b179a1a797" (UID: "8f7d7846-1b56-4ab5-a0ad-63b179a1a797"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:44:35 crc kubenswrapper[4558]: I0120 17:44:35.557306 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b5ea5c8f-cbdb-4b97-87b6-f0375393f6b3-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "b5ea5c8f-cbdb-4b97-87b6-f0375393f6b3" (UID: "b5ea5c8f-cbdb-4b97-87b6-f0375393f6b3"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:44:35 crc kubenswrapper[4558]: I0120 17:44:35.557539 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f7d7846-1b56-4ab5-a0ad-63b179a1a797-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "8f7d7846-1b56-4ab5-a0ad-63b179a1a797" (UID: "8f7d7846-1b56-4ab5-a0ad-63b179a1a797"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:44:35 crc kubenswrapper[4558]: I0120 17:44:35.566155 4558 scope.go:117] "RemoveContainer" containerID="e527f3739bfb93778f627c6e5f5e80d497ddd06b49ce87cb15b8ecf4e23a0bc4" Jan 20 17:44:35 crc kubenswrapper[4558]: I0120 17:44:35.566269 4558 scope.go:117] "RemoveContainer" containerID="2d2a8989ca5a5af98b2c9dc8f801ea0675339ec14800f437c058c5a43c20146b" Jan 20 17:44:35 crc kubenswrapper[4558]: E0120 17:44:35.566606 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e527f3739bfb93778f627c6e5f5e80d497ddd06b49ce87cb15b8ecf4e23a0bc4\": container with ID starting with e527f3739bfb93778f627c6e5f5e80d497ddd06b49ce87cb15b8ecf4e23a0bc4 not found: ID does not exist" containerID="e527f3739bfb93778f627c6e5f5e80d497ddd06b49ce87cb15b8ecf4e23a0bc4" Jan 20 17:44:35 crc kubenswrapper[4558]: I0120 17:44:35.566645 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e527f3739bfb93778f627c6e5f5e80d497ddd06b49ce87cb15b8ecf4e23a0bc4"} err="failed to get container status \"e527f3739bfb93778f627c6e5f5e80d497ddd06b49ce87cb15b8ecf4e23a0bc4\": rpc error: code = NotFound desc = could not find container \"e527f3739bfb93778f627c6e5f5e80d497ddd06b49ce87cb15b8ecf4e23a0bc4\": container with ID starting with e527f3739bfb93778f627c6e5f5e80d497ddd06b49ce87cb15b8ecf4e23a0bc4 not found: ID does not exist" Jan 20 17:44:35 crc kubenswrapper[4558]: I0120 17:44:35.566672 4558 scope.go:117] "RemoveContainer" containerID="4213065b2ba71189cbc96ee29e5a13822ce35b5f3713247bb4eb5798f51eab0d" Jan 20 17:44:35 crc kubenswrapper[4558]: E0120 17:44:35.566757 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 17:44:35 crc kubenswrapper[4558]: I0120 17:44:35.588447 4558 scope.go:117] "RemoveContainer" containerID="4213065b2ba71189cbc96ee29e5a13822ce35b5f3713247bb4eb5798f51eab0d" Jan 20 17:44:35 crc kubenswrapper[4558]: E0120 17:44:35.588957 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4213065b2ba71189cbc96ee29e5a13822ce35b5f3713247bb4eb5798f51eab0d\": container with ID starting with 4213065b2ba71189cbc96ee29e5a13822ce35b5f3713247bb4eb5798f51eab0d not found: ID does not exist" containerID="4213065b2ba71189cbc96ee29e5a13822ce35b5f3713247bb4eb5798f51eab0d" Jan 20 17:44:35 crc kubenswrapper[4558]: I0120 17:44:35.589013 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4213065b2ba71189cbc96ee29e5a13822ce35b5f3713247bb4eb5798f51eab0d"} err="failed to get container status \"4213065b2ba71189cbc96ee29e5a13822ce35b5f3713247bb4eb5798f51eab0d\": rpc error: code = NotFound desc = could not find container \"4213065b2ba71189cbc96ee29e5a13822ce35b5f3713247bb4eb5798f51eab0d\": container with ID starting with 4213065b2ba71189cbc96ee29e5a13822ce35b5f3713247bb4eb5798f51eab0d not found: ID does not exist" Jan 20 17:44:35 crc kubenswrapper[4558]: I0120 17:44:35.598629 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8f7d7846-1b56-4ab5-a0ad-63b179a1a797-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:44:35 crc kubenswrapper[4558]: I0120 17:44:35.598656 4558 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/8f7d7846-1b56-4ab5-a0ad-63b179a1a797-credential-keys\") on node \"crc\" DevicePath \"\"" Jan 20 17:44:35 crc kubenswrapper[4558]: I0120 17:44:35.598669 4558 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/b5ea5c8f-cbdb-4b97-87b6-f0375393f6b3-openstack-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:44:35 crc kubenswrapper[4558]: I0120 17:44:35.598680 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8f7d7846-1b56-4ab5-a0ad-63b179a1a797-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:44:35 crc kubenswrapper[4558]: I0120 17:44:35.598689 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8f7d7846-1b56-4ab5-a0ad-63b179a1a797-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:44:35 crc kubenswrapper[4558]: I0120 17:44:35.598698 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8f7d7846-1b56-4ab5-a0ad-63b179a1a797-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:44:35 crc kubenswrapper[4558]: I0120 17:44:35.598709 4558 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/b5ea5c8f-cbdb-4b97-87b6-f0375393f6b3-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Jan 20 17:44:35 crc kubenswrapper[4558]: I0120 17:44:35.598722 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8f7d7846-1b56-4ab5-a0ad-63b179a1a797-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:44:35 crc kubenswrapper[4558]: I0120 17:44:35.598731 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c75hn\" (UniqueName: \"kubernetes.io/projected/b5ea5c8f-cbdb-4b97-87b6-f0375393f6b3-kube-api-access-c75hn\") on node \"crc\" DevicePath \"\"" Jan 20 17:44:35 crc kubenswrapper[4558]: I0120 17:44:35.598740 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b2dvl\" (UniqueName: \"kubernetes.io/projected/8f7d7846-1b56-4ab5-a0ad-63b179a1a797-kube-api-access-b2dvl\") on node \"crc\" DevicePath \"\"" Jan 20 17:44:35 crc kubenswrapper[4558]: I0120 17:44:35.598748 4558 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/8f7d7846-1b56-4ab5-a0ad-63b179a1a797-fernet-keys\") on node \"crc\" DevicePath \"\"" Jan 20 17:44:35 crc kubenswrapper[4558]: I0120 17:44:35.598756 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b5ea5c8f-cbdb-4b97-87b6-f0375393f6b3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:44:35 crc kubenswrapper[4558]: I0120 17:44:35.872271 4558 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack-kuttl-tests/openstackclient" oldPodUID="b5ea5c8f-cbdb-4b97-87b6-f0375393f6b3" podUID="2bfab6e6-f4a2-4948-9715-43c6f13d88fd" Jan 20 17:44:35 crc kubenswrapper[4558]: I0120 17:44:35.878079 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-7dffbf585-vg2sk"] Jan 20 17:44:35 crc kubenswrapper[4558]: I0120 17:44:35.883838 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/keystone-7dffbf585-vg2sk"] Jan 20 17:44:36 crc kubenswrapper[4558]: I0120 17:44:36.556982 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"53683629-694a-4431-9fed-acc76d3f7cd3","Type":"ContainerStarted","Data":"59fd501aa121cd918ab03737e2526cf05575ca3810eb4c9a6e0ef760b735a526"} Jan 20 17:44:36 crc kubenswrapper[4558]: I0120 17:44:36.578598 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f7d7846-1b56-4ab5-a0ad-63b179a1a797" path="/var/lib/kubelet/pods/8f7d7846-1b56-4ab5-a0ad-63b179a1a797/volumes" Jan 20 17:44:36 crc kubenswrapper[4558]: I0120 17:44:36.579402 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b5ea5c8f-cbdb-4b97-87b6-f0375393f6b3" path="/var/lib/kubelet/pods/b5ea5c8f-cbdb-4b97-87b6-f0375393f6b3/volumes" Jan 20 17:44:37 crc kubenswrapper[4558]: I0120 17:44:37.009460 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-66bfc8789d-nsc64" Jan 20 17:44:37 crc kubenswrapper[4558]: I0120 17:44:37.034274 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:44:37 crc kubenswrapper[4558]: I0120 17:44:37.124693 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5cffh\" (UniqueName: \"kubernetes.io/projected/596f2da7-db14-44e3-8c93-5809b27e0cc6-kube-api-access-5cffh\") pod \"596f2da7-db14-44e3-8c93-5809b27e0cc6\" (UID: \"596f2da7-db14-44e3-8c93-5809b27e0cc6\") " Jan 20 17:44:37 crc kubenswrapper[4558]: I0120 17:44:37.124765 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/596f2da7-db14-44e3-8c93-5809b27e0cc6-internal-tls-certs\") pod \"596f2da7-db14-44e3-8c93-5809b27e0cc6\" (UID: \"596f2da7-db14-44e3-8c93-5809b27e0cc6\") " Jan 20 17:44:37 crc kubenswrapper[4558]: I0120 17:44:37.125070 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/596f2da7-db14-44e3-8c93-5809b27e0cc6-httpd-config\") pod \"596f2da7-db14-44e3-8c93-5809b27e0cc6\" (UID: \"596f2da7-db14-44e3-8c93-5809b27e0cc6\") " Jan 20 17:44:37 crc kubenswrapper[4558]: I0120 17:44:37.125180 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/596f2da7-db14-44e3-8c93-5809b27e0cc6-combined-ca-bundle\") pod \"596f2da7-db14-44e3-8c93-5809b27e0cc6\" (UID: \"596f2da7-db14-44e3-8c93-5809b27e0cc6\") " Jan 20 17:44:37 crc kubenswrapper[4558]: I0120 17:44:37.125215 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/596f2da7-db14-44e3-8c93-5809b27e0cc6-config\") pod \"596f2da7-db14-44e3-8c93-5809b27e0cc6\" (UID: \"596f2da7-db14-44e3-8c93-5809b27e0cc6\") " Jan 20 17:44:37 crc kubenswrapper[4558]: I0120 17:44:37.125259 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/596f2da7-db14-44e3-8c93-5809b27e0cc6-public-tls-certs\") pod \"596f2da7-db14-44e3-8c93-5809b27e0cc6\" (UID: \"596f2da7-db14-44e3-8c93-5809b27e0cc6\") " Jan 20 17:44:37 crc kubenswrapper[4558]: I0120 17:44:37.125294 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/596f2da7-db14-44e3-8c93-5809b27e0cc6-ovndb-tls-certs\") pod \"596f2da7-db14-44e3-8c93-5809b27e0cc6\" (UID: \"596f2da7-db14-44e3-8c93-5809b27e0cc6\") " Jan 20 17:44:37 crc kubenswrapper[4558]: I0120 17:44:37.132116 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/596f2da7-db14-44e3-8c93-5809b27e0cc6-kube-api-access-5cffh" (OuterVolumeSpecName: "kube-api-access-5cffh") pod "596f2da7-db14-44e3-8c93-5809b27e0cc6" (UID: "596f2da7-db14-44e3-8c93-5809b27e0cc6"). InnerVolumeSpecName "kube-api-access-5cffh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:44:37 crc kubenswrapper[4558]: I0120 17:44:37.178778 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/596f2da7-db14-44e3-8c93-5809b27e0cc6-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "596f2da7-db14-44e3-8c93-5809b27e0cc6" (UID: "596f2da7-db14-44e3-8c93-5809b27e0cc6"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:44:37 crc kubenswrapper[4558]: I0120 17:44:37.234319 4558 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/596f2da7-db14-44e3-8c93-5809b27e0cc6-httpd-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:44:37 crc kubenswrapper[4558]: I0120 17:44:37.234605 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5cffh\" (UniqueName: \"kubernetes.io/projected/596f2da7-db14-44e3-8c93-5809b27e0cc6-kube-api-access-5cffh\") on node \"crc\" DevicePath \"\"" Jan 20 17:44:37 crc kubenswrapper[4558]: I0120 17:44:37.252284 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/596f2da7-db14-44e3-8c93-5809b27e0cc6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "596f2da7-db14-44e3-8c93-5809b27e0cc6" (UID: "596f2da7-db14-44e3-8c93-5809b27e0cc6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:44:37 crc kubenswrapper[4558]: I0120 17:44:37.284490 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/596f2da7-db14-44e3-8c93-5809b27e0cc6-config" (OuterVolumeSpecName: "config") pod "596f2da7-db14-44e3-8c93-5809b27e0cc6" (UID: "596f2da7-db14-44e3-8c93-5809b27e0cc6"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:44:37 crc kubenswrapper[4558]: I0120 17:44:37.284894 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/596f2da7-db14-44e3-8c93-5809b27e0cc6-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "596f2da7-db14-44e3-8c93-5809b27e0cc6" (UID: "596f2da7-db14-44e3-8c93-5809b27e0cc6"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:44:37 crc kubenswrapper[4558]: I0120 17:44:37.310306 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/596f2da7-db14-44e3-8c93-5809b27e0cc6-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "596f2da7-db14-44e3-8c93-5809b27e0cc6" (UID: "596f2da7-db14-44e3-8c93-5809b27e0cc6"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:44:37 crc kubenswrapper[4558]: I0120 17:44:37.338387 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/596f2da7-db14-44e3-8c93-5809b27e0cc6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:44:37 crc kubenswrapper[4558]: I0120 17:44:37.338427 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/596f2da7-db14-44e3-8c93-5809b27e0cc6-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:44:37 crc kubenswrapper[4558]: I0120 17:44:37.338439 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/596f2da7-db14-44e3-8c93-5809b27e0cc6-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:44:37 crc kubenswrapper[4558]: I0120 17:44:37.338451 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/596f2da7-db14-44e3-8c93-5809b27e0cc6-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:44:37 crc kubenswrapper[4558]: I0120 17:44:37.353305 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/596f2da7-db14-44e3-8c93-5809b27e0cc6-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "596f2da7-db14-44e3-8c93-5809b27e0cc6" (UID: "596f2da7-db14-44e3-8c93-5809b27e0cc6"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:44:37 crc kubenswrapper[4558]: I0120 17:44:37.440858 4558 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/596f2da7-db14-44e3-8c93-5809b27e0cc6-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:44:37 crc kubenswrapper[4558]: I0120 17:44:37.570942 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"53683629-694a-4431-9fed-acc76d3f7cd3","Type":"ContainerStarted","Data":"d26bfed01215b568d7b48379890381cce82188851203405cdabf1c45feeba58c"} Jan 20 17:44:37 crc kubenswrapper[4558]: I0120 17:44:37.573996 4558 generic.go:334] "Generic (PLEG): container finished" podID="596f2da7-db14-44e3-8c93-5809b27e0cc6" containerID="6f7e169a9c1c7c3746b3c5209448d3d9efd46fcac41efa29c9cdafd00ee4a9c7" exitCode=0 Jan 20 17:44:37 crc kubenswrapper[4558]: I0120 17:44:37.574056 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-66bfc8789d-nsc64" Jan 20 17:44:37 crc kubenswrapper[4558]: I0120 17:44:37.574061 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-66bfc8789d-nsc64" event={"ID":"596f2da7-db14-44e3-8c93-5809b27e0cc6","Type":"ContainerDied","Data":"6f7e169a9c1c7c3746b3c5209448d3d9efd46fcac41efa29c9cdafd00ee4a9c7"} Jan 20 17:44:37 crc kubenswrapper[4558]: I0120 17:44:37.574223 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-66bfc8789d-nsc64" event={"ID":"596f2da7-db14-44e3-8c93-5809b27e0cc6","Type":"ContainerDied","Data":"61ba0b2fbb9343023136e7477f26eb137e81910dc996a6b2e8469ad51cae7b89"} Jan 20 17:44:37 crc kubenswrapper[4558]: I0120 17:44:37.574273 4558 scope.go:117] "RemoveContainer" containerID="d2b6e31358d80bd665915aa4bc6aae644adca0fde94572c0a18379f3e0ce34e1" Jan 20 17:44:37 crc kubenswrapper[4558]: I0120 17:44:37.608111 4558 scope.go:117] "RemoveContainer" containerID="6f7e169a9c1c7c3746b3c5209448d3d9efd46fcac41efa29c9cdafd00ee4a9c7" Jan 20 17:44:37 crc kubenswrapper[4558]: I0120 17:44:37.625182 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-66bfc8789d-nsc64"] Jan 20 17:44:37 crc kubenswrapper[4558]: I0120 17:44:37.633192 4558 scope.go:117] "RemoveContainer" containerID="d2b6e31358d80bd665915aa4bc6aae644adca0fde94572c0a18379f3e0ce34e1" Jan 20 17:44:37 crc kubenswrapper[4558]: E0120 17:44:37.633611 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d2b6e31358d80bd665915aa4bc6aae644adca0fde94572c0a18379f3e0ce34e1\": container with ID starting with d2b6e31358d80bd665915aa4bc6aae644adca0fde94572c0a18379f3e0ce34e1 not found: ID does not exist" containerID="d2b6e31358d80bd665915aa4bc6aae644adca0fde94572c0a18379f3e0ce34e1" Jan 20 17:44:37 crc kubenswrapper[4558]: I0120 17:44:37.633656 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d2b6e31358d80bd665915aa4bc6aae644adca0fde94572c0a18379f3e0ce34e1"} err="failed to get container status \"d2b6e31358d80bd665915aa4bc6aae644adca0fde94572c0a18379f3e0ce34e1\": rpc error: code = NotFound desc = could not find container \"d2b6e31358d80bd665915aa4bc6aae644adca0fde94572c0a18379f3e0ce34e1\": container with ID starting with d2b6e31358d80bd665915aa4bc6aae644adca0fde94572c0a18379f3e0ce34e1 not found: ID does not exist" Jan 20 17:44:37 crc kubenswrapper[4558]: I0120 17:44:37.633685 4558 scope.go:117] "RemoveContainer" containerID="6f7e169a9c1c7c3746b3c5209448d3d9efd46fcac41efa29c9cdafd00ee4a9c7" Jan 20 17:44:37 crc kubenswrapper[4558]: I0120 17:44:37.634291 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/neutron-66bfc8789d-nsc64"] Jan 20 17:44:37 crc kubenswrapper[4558]: E0120 17:44:37.634443 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6f7e169a9c1c7c3746b3c5209448d3d9efd46fcac41efa29c9cdafd00ee4a9c7\": container with ID starting with 6f7e169a9c1c7c3746b3c5209448d3d9efd46fcac41efa29c9cdafd00ee4a9c7 not found: ID does not exist" containerID="6f7e169a9c1c7c3746b3c5209448d3d9efd46fcac41efa29c9cdafd00ee4a9c7" Jan 20 17:44:37 crc kubenswrapper[4558]: I0120 17:44:37.634491 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6f7e169a9c1c7c3746b3c5209448d3d9efd46fcac41efa29c9cdafd00ee4a9c7"} err="failed to get container status \"6f7e169a9c1c7c3746b3c5209448d3d9efd46fcac41efa29c9cdafd00ee4a9c7\": rpc error: code = NotFound desc = could not find container \"6f7e169a9c1c7c3746b3c5209448d3d9efd46fcac41efa29c9cdafd00ee4a9c7\": container with ID starting with 6f7e169a9c1c7c3746b3c5209448d3d9efd46fcac41efa29c9cdafd00ee4a9c7 not found: ID does not exist" Jan 20 17:44:37 crc kubenswrapper[4558]: E0120 17:44:37.680884 4558 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod596f2da7_db14_44e3_8c93_5809b27e0cc6.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod596f2da7_db14_44e3_8c93_5809b27e0cc6.slice/crio-61ba0b2fbb9343023136e7477f26eb137e81910dc996a6b2e8469ad51cae7b89\": RecentStats: unable to find data in memory cache]" Jan 20 17:44:37 crc kubenswrapper[4558]: I0120 17:44:37.918191 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/swift-proxy-6645dd7478-nfmw9"] Jan 20 17:44:37 crc kubenswrapper[4558]: E0120 17:44:37.918905 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="596f2da7-db14-44e3-8c93-5809b27e0cc6" containerName="neutron-api" Jan 20 17:44:37 crc kubenswrapper[4558]: I0120 17:44:37.918933 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="596f2da7-db14-44e3-8c93-5809b27e0cc6" containerName="neutron-api" Jan 20 17:44:37 crc kubenswrapper[4558]: E0120 17:44:37.918974 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8f7d7846-1b56-4ab5-a0ad-63b179a1a797" containerName="keystone-api" Jan 20 17:44:37 crc kubenswrapper[4558]: I0120 17:44:37.918981 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="8f7d7846-1b56-4ab5-a0ad-63b179a1a797" containerName="keystone-api" Jan 20 17:44:37 crc kubenswrapper[4558]: E0120 17:44:37.918999 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="596f2da7-db14-44e3-8c93-5809b27e0cc6" containerName="neutron-httpd" Jan 20 17:44:37 crc kubenswrapper[4558]: I0120 17:44:37.919005 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="596f2da7-db14-44e3-8c93-5809b27e0cc6" containerName="neutron-httpd" Jan 20 17:44:37 crc kubenswrapper[4558]: I0120 17:44:37.919216 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="596f2da7-db14-44e3-8c93-5809b27e0cc6" containerName="neutron-api" Jan 20 17:44:37 crc kubenswrapper[4558]: I0120 17:44:37.919240 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="8f7d7846-1b56-4ab5-a0ad-63b179a1a797" containerName="keystone-api" Jan 20 17:44:37 crc kubenswrapper[4558]: I0120 17:44:37.919251 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="596f2da7-db14-44e3-8c93-5809b27e0cc6" containerName="neutron-httpd" Jan 20 17:44:37 crc kubenswrapper[4558]: I0120 17:44:37.920437 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-proxy-6645dd7478-nfmw9" Jan 20 17:44:37 crc kubenswrapper[4558]: I0120 17:44:37.927036 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/swift-proxy-6645dd7478-nfmw9"] Jan 20 17:44:37 crc kubenswrapper[4558]: I0120 17:44:37.952702 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/321b2cf0-a7dd-4339-ab97-6327272614b2-log-httpd\") pod \"swift-proxy-6645dd7478-nfmw9\" (UID: \"321b2cf0-a7dd-4339-ab97-6327272614b2\") " pod="openstack-kuttl-tests/swift-proxy-6645dd7478-nfmw9" Jan 20 17:44:37 crc kubenswrapper[4558]: I0120 17:44:37.952789 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/321b2cf0-a7dd-4339-ab97-6327272614b2-config-data\") pod \"swift-proxy-6645dd7478-nfmw9\" (UID: \"321b2cf0-a7dd-4339-ab97-6327272614b2\") " pod="openstack-kuttl-tests/swift-proxy-6645dd7478-nfmw9" Jan 20 17:44:37 crc kubenswrapper[4558]: I0120 17:44:37.953013 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7thqn\" (UniqueName: \"kubernetes.io/projected/321b2cf0-a7dd-4339-ab97-6327272614b2-kube-api-access-7thqn\") pod \"swift-proxy-6645dd7478-nfmw9\" (UID: \"321b2cf0-a7dd-4339-ab97-6327272614b2\") " pod="openstack-kuttl-tests/swift-proxy-6645dd7478-nfmw9" Jan 20 17:44:37 crc kubenswrapper[4558]: I0120 17:44:37.953091 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/321b2cf0-a7dd-4339-ab97-6327272614b2-run-httpd\") pod \"swift-proxy-6645dd7478-nfmw9\" (UID: \"321b2cf0-a7dd-4339-ab97-6327272614b2\") " pod="openstack-kuttl-tests/swift-proxy-6645dd7478-nfmw9" Jan 20 17:44:37 crc kubenswrapper[4558]: I0120 17:44:37.953123 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/321b2cf0-a7dd-4339-ab97-6327272614b2-internal-tls-certs\") pod \"swift-proxy-6645dd7478-nfmw9\" (UID: \"321b2cf0-a7dd-4339-ab97-6327272614b2\") " pod="openstack-kuttl-tests/swift-proxy-6645dd7478-nfmw9" Jan 20 17:44:37 crc kubenswrapper[4558]: I0120 17:44:37.953155 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/321b2cf0-a7dd-4339-ab97-6327272614b2-etc-swift\") pod \"swift-proxy-6645dd7478-nfmw9\" (UID: \"321b2cf0-a7dd-4339-ab97-6327272614b2\") " pod="openstack-kuttl-tests/swift-proxy-6645dd7478-nfmw9" Jan 20 17:44:37 crc kubenswrapper[4558]: I0120 17:44:37.953229 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/321b2cf0-a7dd-4339-ab97-6327272614b2-public-tls-certs\") pod \"swift-proxy-6645dd7478-nfmw9\" (UID: \"321b2cf0-a7dd-4339-ab97-6327272614b2\") " pod="openstack-kuttl-tests/swift-proxy-6645dd7478-nfmw9" Jan 20 17:44:37 crc kubenswrapper[4558]: I0120 17:44:37.953263 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/321b2cf0-a7dd-4339-ab97-6327272614b2-combined-ca-bundle\") pod \"swift-proxy-6645dd7478-nfmw9\" (UID: \"321b2cf0-a7dd-4339-ab97-6327272614b2\") " pod="openstack-kuttl-tests/swift-proxy-6645dd7478-nfmw9" Jan 20 17:44:38 crc kubenswrapper[4558]: I0120 17:44:38.055832 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/321b2cf0-a7dd-4339-ab97-6327272614b2-log-httpd\") pod \"swift-proxy-6645dd7478-nfmw9\" (UID: \"321b2cf0-a7dd-4339-ab97-6327272614b2\") " pod="openstack-kuttl-tests/swift-proxy-6645dd7478-nfmw9" Jan 20 17:44:38 crc kubenswrapper[4558]: I0120 17:44:38.055899 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/321b2cf0-a7dd-4339-ab97-6327272614b2-config-data\") pod \"swift-proxy-6645dd7478-nfmw9\" (UID: \"321b2cf0-a7dd-4339-ab97-6327272614b2\") " pod="openstack-kuttl-tests/swift-proxy-6645dd7478-nfmw9" Jan 20 17:44:38 crc kubenswrapper[4558]: I0120 17:44:38.055982 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7thqn\" (UniqueName: \"kubernetes.io/projected/321b2cf0-a7dd-4339-ab97-6327272614b2-kube-api-access-7thqn\") pod \"swift-proxy-6645dd7478-nfmw9\" (UID: \"321b2cf0-a7dd-4339-ab97-6327272614b2\") " pod="openstack-kuttl-tests/swift-proxy-6645dd7478-nfmw9" Jan 20 17:44:38 crc kubenswrapper[4558]: I0120 17:44:38.056013 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/321b2cf0-a7dd-4339-ab97-6327272614b2-run-httpd\") pod \"swift-proxy-6645dd7478-nfmw9\" (UID: \"321b2cf0-a7dd-4339-ab97-6327272614b2\") " pod="openstack-kuttl-tests/swift-proxy-6645dd7478-nfmw9" Jan 20 17:44:38 crc kubenswrapper[4558]: I0120 17:44:38.056040 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/321b2cf0-a7dd-4339-ab97-6327272614b2-internal-tls-certs\") pod \"swift-proxy-6645dd7478-nfmw9\" (UID: \"321b2cf0-a7dd-4339-ab97-6327272614b2\") " pod="openstack-kuttl-tests/swift-proxy-6645dd7478-nfmw9" Jan 20 17:44:38 crc kubenswrapper[4558]: I0120 17:44:38.056064 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/321b2cf0-a7dd-4339-ab97-6327272614b2-etc-swift\") pod \"swift-proxy-6645dd7478-nfmw9\" (UID: \"321b2cf0-a7dd-4339-ab97-6327272614b2\") " pod="openstack-kuttl-tests/swift-proxy-6645dd7478-nfmw9" Jan 20 17:44:38 crc kubenswrapper[4558]: I0120 17:44:38.056104 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/321b2cf0-a7dd-4339-ab97-6327272614b2-public-tls-certs\") pod \"swift-proxy-6645dd7478-nfmw9\" (UID: \"321b2cf0-a7dd-4339-ab97-6327272614b2\") " pod="openstack-kuttl-tests/swift-proxy-6645dd7478-nfmw9" Jan 20 17:44:38 crc kubenswrapper[4558]: I0120 17:44:38.056129 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/321b2cf0-a7dd-4339-ab97-6327272614b2-combined-ca-bundle\") pod \"swift-proxy-6645dd7478-nfmw9\" (UID: \"321b2cf0-a7dd-4339-ab97-6327272614b2\") " pod="openstack-kuttl-tests/swift-proxy-6645dd7478-nfmw9" Jan 20 17:44:38 crc kubenswrapper[4558]: I0120 17:44:38.057197 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/321b2cf0-a7dd-4339-ab97-6327272614b2-run-httpd\") pod \"swift-proxy-6645dd7478-nfmw9\" (UID: \"321b2cf0-a7dd-4339-ab97-6327272614b2\") " pod="openstack-kuttl-tests/swift-proxy-6645dd7478-nfmw9" Jan 20 17:44:38 crc kubenswrapper[4558]: I0120 17:44:38.057500 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/321b2cf0-a7dd-4339-ab97-6327272614b2-log-httpd\") pod \"swift-proxy-6645dd7478-nfmw9\" (UID: \"321b2cf0-a7dd-4339-ab97-6327272614b2\") " pod="openstack-kuttl-tests/swift-proxy-6645dd7478-nfmw9" Jan 20 17:44:38 crc kubenswrapper[4558]: I0120 17:44:38.062250 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/321b2cf0-a7dd-4339-ab97-6327272614b2-combined-ca-bundle\") pod \"swift-proxy-6645dd7478-nfmw9\" (UID: \"321b2cf0-a7dd-4339-ab97-6327272614b2\") " pod="openstack-kuttl-tests/swift-proxy-6645dd7478-nfmw9" Jan 20 17:44:38 crc kubenswrapper[4558]: I0120 17:44:38.062507 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/321b2cf0-a7dd-4339-ab97-6327272614b2-etc-swift\") pod \"swift-proxy-6645dd7478-nfmw9\" (UID: \"321b2cf0-a7dd-4339-ab97-6327272614b2\") " pod="openstack-kuttl-tests/swift-proxy-6645dd7478-nfmw9" Jan 20 17:44:38 crc kubenswrapper[4558]: I0120 17:44:38.066472 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/321b2cf0-a7dd-4339-ab97-6327272614b2-config-data\") pod \"swift-proxy-6645dd7478-nfmw9\" (UID: \"321b2cf0-a7dd-4339-ab97-6327272614b2\") " pod="openstack-kuttl-tests/swift-proxy-6645dd7478-nfmw9" Jan 20 17:44:38 crc kubenswrapper[4558]: I0120 17:44:38.073604 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/321b2cf0-a7dd-4339-ab97-6327272614b2-internal-tls-certs\") pod \"swift-proxy-6645dd7478-nfmw9\" (UID: \"321b2cf0-a7dd-4339-ab97-6327272614b2\") " pod="openstack-kuttl-tests/swift-proxy-6645dd7478-nfmw9" Jan 20 17:44:38 crc kubenswrapper[4558]: I0120 17:44:38.074665 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7thqn\" (UniqueName: \"kubernetes.io/projected/321b2cf0-a7dd-4339-ab97-6327272614b2-kube-api-access-7thqn\") pod \"swift-proxy-6645dd7478-nfmw9\" (UID: \"321b2cf0-a7dd-4339-ab97-6327272614b2\") " pod="openstack-kuttl-tests/swift-proxy-6645dd7478-nfmw9" Jan 20 17:44:38 crc kubenswrapper[4558]: I0120 17:44:38.079675 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/321b2cf0-a7dd-4339-ab97-6327272614b2-public-tls-certs\") pod \"swift-proxy-6645dd7478-nfmw9\" (UID: \"321b2cf0-a7dd-4339-ab97-6327272614b2\") " pod="openstack-kuttl-tests/swift-proxy-6645dd7478-nfmw9" Jan 20 17:44:38 crc kubenswrapper[4558]: I0120 17:44:38.269312 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-proxy-6645dd7478-nfmw9" Jan 20 17:44:38 crc kubenswrapper[4558]: I0120 17:44:38.579545 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="596f2da7-db14-44e3-8c93-5809b27e0cc6" path="/var/lib/kubelet/pods/596f2da7-db14-44e3-8c93-5809b27e0cc6/volumes" Jan 20 17:44:38 crc kubenswrapper[4558]: I0120 17:44:38.707840 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/swift-proxy-6645dd7478-nfmw9"] Jan 20 17:44:38 crc kubenswrapper[4558]: W0120 17:44:38.717255 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod321b2cf0_a7dd_4339_ab97_6327272614b2.slice/crio-d4f96769815610a64430516a58fff20bd40a14044f4b04afdee697d28c6ab063 WatchSource:0}: Error finding container d4f96769815610a64430516a58fff20bd40a14044f4b04afdee697d28c6ab063: Status 404 returned error can't find the container with id d4f96769815610a64430516a58fff20bd40a14044f4b04afdee697d28c6ab063 Jan 20 17:44:39 crc kubenswrapper[4558]: I0120 17:44:39.602940 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-proxy-6645dd7478-nfmw9" event={"ID":"321b2cf0-a7dd-4339-ab97-6327272614b2","Type":"ContainerStarted","Data":"649e44f608af14cfa1fc698291c5750acd272dbe43c21d8bf64cbc026551b71b"} Jan 20 17:44:39 crc kubenswrapper[4558]: I0120 17:44:39.604756 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/swift-proxy-6645dd7478-nfmw9" Jan 20 17:44:39 crc kubenswrapper[4558]: I0120 17:44:39.605442 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/swift-proxy-6645dd7478-nfmw9" Jan 20 17:44:39 crc kubenswrapper[4558]: I0120 17:44:39.605548 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-proxy-6645dd7478-nfmw9" event={"ID":"321b2cf0-a7dd-4339-ab97-6327272614b2","Type":"ContainerStarted","Data":"d555fffc777c20179c07649221e67cf87d58b574768c17302a0db151ff0fe7f8"} Jan 20 17:44:39 crc kubenswrapper[4558]: I0120 17:44:39.605666 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-proxy-6645dd7478-nfmw9" event={"ID":"321b2cf0-a7dd-4339-ab97-6327272614b2","Type":"ContainerStarted","Data":"d4f96769815610a64430516a58fff20bd40a14044f4b04afdee697d28c6ab063"} Jan 20 17:44:39 crc kubenswrapper[4558]: I0120 17:44:39.611945 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"53683629-694a-4431-9fed-acc76d3f7cd3","Type":"ContainerStarted","Data":"bcea1c3d9e0a9d5d8596268b11e75b6a72ec481cef6fa080ea60730405b966df"} Jan 20 17:44:39 crc kubenswrapper[4558]: I0120 17:44:39.612128 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:44:39 crc kubenswrapper[4558]: I0120 17:44:39.612116 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="53683629-694a-4431-9fed-acc76d3f7cd3" containerName="ceilometer-central-agent" containerID="cri-o://32905e44c09ace66c00f1ca2fd0fb9d8ed1543f669176d4879e968a7824d5015" gracePeriod=30 Jan 20 17:44:39 crc kubenswrapper[4558]: I0120 17:44:39.612191 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="53683629-694a-4431-9fed-acc76d3f7cd3" containerName="sg-core" containerID="cri-o://d26bfed01215b568d7b48379890381cce82188851203405cdabf1c45feeba58c" gracePeriod=30 Jan 20 17:44:39 crc kubenswrapper[4558]: I0120 17:44:39.612218 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="53683629-694a-4431-9fed-acc76d3f7cd3" containerName="proxy-httpd" containerID="cri-o://bcea1c3d9e0a9d5d8596268b11e75b6a72ec481cef6fa080ea60730405b966df" gracePeriod=30 Jan 20 17:44:39 crc kubenswrapper[4558]: I0120 17:44:39.612237 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="53683629-694a-4431-9fed-acc76d3f7cd3" containerName="ceilometer-notification-agent" containerID="cri-o://59fd501aa121cd918ab03737e2526cf05575ca3810eb4c9a6e0ef760b735a526" gracePeriod=30 Jan 20 17:44:39 crc kubenswrapper[4558]: I0120 17:44:39.629145 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/swift-proxy-6645dd7478-nfmw9" podStartSLOduration=2.629122344 podStartE2EDuration="2.629122344s" podCreationTimestamp="2026-01-20 17:44:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:44:39.625152921 +0000 UTC m=+3773.385490888" watchObservedRunningTime="2026-01-20 17:44:39.629122344 +0000 UTC m=+3773.389460311" Jan 20 17:44:39 crc kubenswrapper[4558]: I0120 17:44:39.649853 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ceilometer-0" podStartSLOduration=2.660560829 podStartE2EDuration="6.649822215s" podCreationTimestamp="2026-01-20 17:44:33 +0000 UTC" firstStartedPulling="2026-01-20 17:44:34.428847403 +0000 UTC m=+3768.189185370" lastFinishedPulling="2026-01-20 17:44:38.418108789 +0000 UTC m=+3772.178446756" observedRunningTime="2026-01-20 17:44:39.642475873 +0000 UTC m=+3773.402813829" watchObservedRunningTime="2026-01-20 17:44:39.649822215 +0000 UTC m=+3773.410160183" Jan 20 17:44:40 crc kubenswrapper[4558]: I0120 17:44:40.624273 4558 generic.go:334] "Generic (PLEG): container finished" podID="53683629-694a-4431-9fed-acc76d3f7cd3" containerID="bcea1c3d9e0a9d5d8596268b11e75b6a72ec481cef6fa080ea60730405b966df" exitCode=0 Jan 20 17:44:40 crc kubenswrapper[4558]: I0120 17:44:40.624319 4558 generic.go:334] "Generic (PLEG): container finished" podID="53683629-694a-4431-9fed-acc76d3f7cd3" containerID="d26bfed01215b568d7b48379890381cce82188851203405cdabf1c45feeba58c" exitCode=2 Jan 20 17:44:40 crc kubenswrapper[4558]: I0120 17:44:40.624329 4558 generic.go:334] "Generic (PLEG): container finished" podID="53683629-694a-4431-9fed-acc76d3f7cd3" containerID="59fd501aa121cd918ab03737e2526cf05575ca3810eb4c9a6e0ef760b735a526" exitCode=0 Jan 20 17:44:40 crc kubenswrapper[4558]: I0120 17:44:40.624374 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"53683629-694a-4431-9fed-acc76d3f7cd3","Type":"ContainerDied","Data":"bcea1c3d9e0a9d5d8596268b11e75b6a72ec481cef6fa080ea60730405b966df"} Jan 20 17:44:40 crc kubenswrapper[4558]: I0120 17:44:40.624460 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"53683629-694a-4431-9fed-acc76d3f7cd3","Type":"ContainerDied","Data":"d26bfed01215b568d7b48379890381cce82188851203405cdabf1c45feeba58c"} Jan 20 17:44:40 crc kubenswrapper[4558]: I0120 17:44:40.624473 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"53683629-694a-4431-9fed-acc76d3f7cd3","Type":"ContainerDied","Data":"59fd501aa121cd918ab03737e2526cf05575ca3810eb4c9a6e0ef760b735a526"} Jan 20 17:44:42 crc kubenswrapper[4558]: I0120 17:44:42.566549 4558 scope.go:117] "RemoveContainer" containerID="f175857b081b6702705f8a9a0de77c7f1bd59b4f0ad1848d4abf509a9f1c1ae7" Jan 20 17:44:42 crc kubenswrapper[4558]: E0120 17:44:42.566850 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"nova-cell0-conductor-conductor\" with CrashLoopBackOff: \"back-off 40s restarting failed container=nova-cell0-conductor-conductor pod=nova-cell0-conductor-0_openstack-kuttl-tests(50cacddd-ebea-477f-af64-6e96a09a242e)\"" pod="openstack-kuttl-tests/nova-cell0-conductor-0" podUID="50cacddd-ebea-477f-af64-6e96a09a242e" Jan 20 17:44:43 crc kubenswrapper[4558]: I0120 17:44:43.281361 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/swift-proxy-6645dd7478-nfmw9" Jan 20 17:44:44 crc kubenswrapper[4558]: I0120 17:44:44.159359 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:44:44 crc kubenswrapper[4558]: I0120 17:44:44.210049 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/53683629-694a-4431-9fed-acc76d3f7cd3-ceilometer-tls-certs\") pod \"53683629-694a-4431-9fed-acc76d3f7cd3\" (UID: \"53683629-694a-4431-9fed-acc76d3f7cd3\") " Jan 20 17:44:44 crc kubenswrapper[4558]: I0120 17:44:44.210262 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/53683629-694a-4431-9fed-acc76d3f7cd3-sg-core-conf-yaml\") pod \"53683629-694a-4431-9fed-acc76d3f7cd3\" (UID: \"53683629-694a-4431-9fed-acc76d3f7cd3\") " Jan 20 17:44:44 crc kubenswrapper[4558]: I0120 17:44:44.210322 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/53683629-694a-4431-9fed-acc76d3f7cd3-combined-ca-bundle\") pod \"53683629-694a-4431-9fed-acc76d3f7cd3\" (UID: \"53683629-694a-4431-9fed-acc76d3f7cd3\") " Jan 20 17:44:44 crc kubenswrapper[4558]: I0120 17:44:44.210355 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/53683629-694a-4431-9fed-acc76d3f7cd3-scripts\") pod \"53683629-694a-4431-9fed-acc76d3f7cd3\" (UID: \"53683629-694a-4431-9fed-acc76d3f7cd3\") " Jan 20 17:44:44 crc kubenswrapper[4558]: I0120 17:44:44.210432 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/53683629-694a-4431-9fed-acc76d3f7cd3-run-httpd\") pod \"53683629-694a-4431-9fed-acc76d3f7cd3\" (UID: \"53683629-694a-4431-9fed-acc76d3f7cd3\") " Jan 20 17:44:44 crc kubenswrapper[4558]: I0120 17:44:44.210522 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/53683629-694a-4431-9fed-acc76d3f7cd3-config-data\") pod \"53683629-694a-4431-9fed-acc76d3f7cd3\" (UID: \"53683629-694a-4431-9fed-acc76d3f7cd3\") " Jan 20 17:44:44 crc kubenswrapper[4558]: I0120 17:44:44.210548 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/53683629-694a-4431-9fed-acc76d3f7cd3-log-httpd\") pod \"53683629-694a-4431-9fed-acc76d3f7cd3\" (UID: \"53683629-694a-4431-9fed-acc76d3f7cd3\") " Jan 20 17:44:44 crc kubenswrapper[4558]: I0120 17:44:44.210618 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8zlng\" (UniqueName: \"kubernetes.io/projected/53683629-694a-4431-9fed-acc76d3f7cd3-kube-api-access-8zlng\") pod \"53683629-694a-4431-9fed-acc76d3f7cd3\" (UID: \"53683629-694a-4431-9fed-acc76d3f7cd3\") " Jan 20 17:44:44 crc kubenswrapper[4558]: I0120 17:44:44.210995 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/53683629-694a-4431-9fed-acc76d3f7cd3-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "53683629-694a-4431-9fed-acc76d3f7cd3" (UID: "53683629-694a-4431-9fed-acc76d3f7cd3"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:44:44 crc kubenswrapper[4558]: I0120 17:44:44.211081 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/53683629-694a-4431-9fed-acc76d3f7cd3-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "53683629-694a-4431-9fed-acc76d3f7cd3" (UID: "53683629-694a-4431-9fed-acc76d3f7cd3"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:44:44 crc kubenswrapper[4558]: I0120 17:44:44.218087 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/53683629-694a-4431-9fed-acc76d3f7cd3-kube-api-access-8zlng" (OuterVolumeSpecName: "kube-api-access-8zlng") pod "53683629-694a-4431-9fed-acc76d3f7cd3" (UID: "53683629-694a-4431-9fed-acc76d3f7cd3"). InnerVolumeSpecName "kube-api-access-8zlng". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:44:44 crc kubenswrapper[4558]: I0120 17:44:44.225800 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/53683629-694a-4431-9fed-acc76d3f7cd3-scripts" (OuterVolumeSpecName: "scripts") pod "53683629-694a-4431-9fed-acc76d3f7cd3" (UID: "53683629-694a-4431-9fed-acc76d3f7cd3"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:44:44 crc kubenswrapper[4558]: I0120 17:44:44.235781 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/53683629-694a-4431-9fed-acc76d3f7cd3-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "53683629-694a-4431-9fed-acc76d3f7cd3" (UID: "53683629-694a-4431-9fed-acc76d3f7cd3"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:44:44 crc kubenswrapper[4558]: I0120 17:44:44.255031 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/53683629-694a-4431-9fed-acc76d3f7cd3-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "53683629-694a-4431-9fed-acc76d3f7cd3" (UID: "53683629-694a-4431-9fed-acc76d3f7cd3"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:44:44 crc kubenswrapper[4558]: I0120 17:44:44.268096 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/53683629-694a-4431-9fed-acc76d3f7cd3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "53683629-694a-4431-9fed-acc76d3f7cd3" (UID: "53683629-694a-4431-9fed-acc76d3f7cd3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:44:44 crc kubenswrapper[4558]: I0120 17:44:44.290278 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/53683629-694a-4431-9fed-acc76d3f7cd3-config-data" (OuterVolumeSpecName: "config-data") pod "53683629-694a-4431-9fed-acc76d3f7cd3" (UID: "53683629-694a-4431-9fed-acc76d3f7cd3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:44:44 crc kubenswrapper[4558]: I0120 17:44:44.313784 4558 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/53683629-694a-4431-9fed-acc76d3f7cd3-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 20 17:44:44 crc kubenswrapper[4558]: I0120 17:44:44.313820 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/53683629-694a-4431-9fed-acc76d3f7cd3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:44:44 crc kubenswrapper[4558]: I0120 17:44:44.313832 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/53683629-694a-4431-9fed-acc76d3f7cd3-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:44:44 crc kubenswrapper[4558]: I0120 17:44:44.313842 4558 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/53683629-694a-4431-9fed-acc76d3f7cd3-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:44:44 crc kubenswrapper[4558]: I0120 17:44:44.313853 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/53683629-694a-4431-9fed-acc76d3f7cd3-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:44:44 crc kubenswrapper[4558]: I0120 17:44:44.313862 4558 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/53683629-694a-4431-9fed-acc76d3f7cd3-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:44:44 crc kubenswrapper[4558]: I0120 17:44:44.313874 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8zlng\" (UniqueName: \"kubernetes.io/projected/53683629-694a-4431-9fed-acc76d3f7cd3-kube-api-access-8zlng\") on node \"crc\" DevicePath \"\"" Jan 20 17:44:44 crc kubenswrapper[4558]: I0120 17:44:44.313887 4558 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/53683629-694a-4431-9fed-acc76d3f7cd3-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:44:44 crc kubenswrapper[4558]: I0120 17:44:44.670667 4558 generic.go:334] "Generic (PLEG): container finished" podID="53683629-694a-4431-9fed-acc76d3f7cd3" containerID="32905e44c09ace66c00f1ca2fd0fb9d8ed1543f669176d4879e968a7824d5015" exitCode=0 Jan 20 17:44:44 crc kubenswrapper[4558]: I0120 17:44:44.670737 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"53683629-694a-4431-9fed-acc76d3f7cd3","Type":"ContainerDied","Data":"32905e44c09ace66c00f1ca2fd0fb9d8ed1543f669176d4879e968a7824d5015"} Jan 20 17:44:44 crc kubenswrapper[4558]: I0120 17:44:44.670790 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"53683629-694a-4431-9fed-acc76d3f7cd3","Type":"ContainerDied","Data":"5ea8b20d7425543b777b1bb32feb285c6be726f17128048186682846afe6d667"} Jan 20 17:44:44 crc kubenswrapper[4558]: I0120 17:44:44.670804 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:44:44 crc kubenswrapper[4558]: I0120 17:44:44.670810 4558 scope.go:117] "RemoveContainer" containerID="bcea1c3d9e0a9d5d8596268b11e75b6a72ec481cef6fa080ea60730405b966df" Jan 20 17:44:44 crc kubenswrapper[4558]: I0120 17:44:44.701136 4558 scope.go:117] "RemoveContainer" containerID="d26bfed01215b568d7b48379890381cce82188851203405cdabf1c45feeba58c" Jan 20 17:44:44 crc kubenswrapper[4558]: I0120 17:44:44.707749 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:44:44 crc kubenswrapper[4558]: I0120 17:44:44.721310 4558 scope.go:117] "RemoveContainer" containerID="59fd501aa121cd918ab03737e2526cf05575ca3810eb4c9a6e0ef760b735a526" Jan 20 17:44:44 crc kubenswrapper[4558]: I0120 17:44:44.722886 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:44:44 crc kubenswrapper[4558]: I0120 17:44:44.729868 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:44:44 crc kubenswrapper[4558]: E0120 17:44:44.730331 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="53683629-694a-4431-9fed-acc76d3f7cd3" containerName="ceilometer-notification-agent" Jan 20 17:44:44 crc kubenswrapper[4558]: I0120 17:44:44.730350 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="53683629-694a-4431-9fed-acc76d3f7cd3" containerName="ceilometer-notification-agent" Jan 20 17:44:44 crc kubenswrapper[4558]: E0120 17:44:44.730377 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="53683629-694a-4431-9fed-acc76d3f7cd3" containerName="sg-core" Jan 20 17:44:44 crc kubenswrapper[4558]: I0120 17:44:44.730383 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="53683629-694a-4431-9fed-acc76d3f7cd3" containerName="sg-core" Jan 20 17:44:44 crc kubenswrapper[4558]: E0120 17:44:44.730396 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="53683629-694a-4431-9fed-acc76d3f7cd3" containerName="ceilometer-central-agent" Jan 20 17:44:44 crc kubenswrapper[4558]: I0120 17:44:44.730402 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="53683629-694a-4431-9fed-acc76d3f7cd3" containerName="ceilometer-central-agent" Jan 20 17:44:44 crc kubenswrapper[4558]: E0120 17:44:44.730426 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="53683629-694a-4431-9fed-acc76d3f7cd3" containerName="proxy-httpd" Jan 20 17:44:44 crc kubenswrapper[4558]: I0120 17:44:44.730432 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="53683629-694a-4431-9fed-acc76d3f7cd3" containerName="proxy-httpd" Jan 20 17:44:44 crc kubenswrapper[4558]: I0120 17:44:44.730592 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="53683629-694a-4431-9fed-acc76d3f7cd3" containerName="ceilometer-central-agent" Jan 20 17:44:44 crc kubenswrapper[4558]: I0120 17:44:44.730620 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="53683629-694a-4431-9fed-acc76d3f7cd3" containerName="sg-core" Jan 20 17:44:44 crc kubenswrapper[4558]: I0120 17:44:44.730629 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="53683629-694a-4431-9fed-acc76d3f7cd3" containerName="ceilometer-notification-agent" Jan 20 17:44:44 crc kubenswrapper[4558]: I0120 17:44:44.730638 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="53683629-694a-4431-9fed-acc76d3f7cd3" containerName="proxy-httpd" Jan 20 17:44:44 crc kubenswrapper[4558]: I0120 17:44:44.733783 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:44:44 crc kubenswrapper[4558]: I0120 17:44:44.736825 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-scripts" Jan 20 17:44:44 crc kubenswrapper[4558]: I0120 17:44:44.737083 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-ceilometer-internal-svc" Jan 20 17:44:44 crc kubenswrapper[4558]: I0120 17:44:44.737250 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-config-data" Jan 20 17:44:44 crc kubenswrapper[4558]: I0120 17:44:44.740316 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:44:44 crc kubenswrapper[4558]: I0120 17:44:44.740427 4558 scope.go:117] "RemoveContainer" containerID="32905e44c09ace66c00f1ca2fd0fb9d8ed1543f669176d4879e968a7824d5015" Jan 20 17:44:44 crc kubenswrapper[4558]: I0120 17:44:44.766430 4558 scope.go:117] "RemoveContainer" containerID="bcea1c3d9e0a9d5d8596268b11e75b6a72ec481cef6fa080ea60730405b966df" Jan 20 17:44:44 crc kubenswrapper[4558]: E0120 17:44:44.770306 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bcea1c3d9e0a9d5d8596268b11e75b6a72ec481cef6fa080ea60730405b966df\": container with ID starting with bcea1c3d9e0a9d5d8596268b11e75b6a72ec481cef6fa080ea60730405b966df not found: ID does not exist" containerID="bcea1c3d9e0a9d5d8596268b11e75b6a72ec481cef6fa080ea60730405b966df" Jan 20 17:44:44 crc kubenswrapper[4558]: I0120 17:44:44.770362 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bcea1c3d9e0a9d5d8596268b11e75b6a72ec481cef6fa080ea60730405b966df"} err="failed to get container status \"bcea1c3d9e0a9d5d8596268b11e75b6a72ec481cef6fa080ea60730405b966df\": rpc error: code = NotFound desc = could not find container \"bcea1c3d9e0a9d5d8596268b11e75b6a72ec481cef6fa080ea60730405b966df\": container with ID starting with bcea1c3d9e0a9d5d8596268b11e75b6a72ec481cef6fa080ea60730405b966df not found: ID does not exist" Jan 20 17:44:44 crc kubenswrapper[4558]: I0120 17:44:44.770392 4558 scope.go:117] "RemoveContainer" containerID="d26bfed01215b568d7b48379890381cce82188851203405cdabf1c45feeba58c" Jan 20 17:44:44 crc kubenswrapper[4558]: E0120 17:44:44.770800 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d26bfed01215b568d7b48379890381cce82188851203405cdabf1c45feeba58c\": container with ID starting with d26bfed01215b568d7b48379890381cce82188851203405cdabf1c45feeba58c not found: ID does not exist" containerID="d26bfed01215b568d7b48379890381cce82188851203405cdabf1c45feeba58c" Jan 20 17:44:44 crc kubenswrapper[4558]: I0120 17:44:44.770836 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d26bfed01215b568d7b48379890381cce82188851203405cdabf1c45feeba58c"} err="failed to get container status \"d26bfed01215b568d7b48379890381cce82188851203405cdabf1c45feeba58c\": rpc error: code = NotFound desc = could not find container \"d26bfed01215b568d7b48379890381cce82188851203405cdabf1c45feeba58c\": container with ID starting with d26bfed01215b568d7b48379890381cce82188851203405cdabf1c45feeba58c not found: ID does not exist" Jan 20 17:44:44 crc kubenswrapper[4558]: I0120 17:44:44.770860 4558 scope.go:117] "RemoveContainer" containerID="59fd501aa121cd918ab03737e2526cf05575ca3810eb4c9a6e0ef760b735a526" Jan 20 17:44:44 crc kubenswrapper[4558]: E0120 17:44:44.771222 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"59fd501aa121cd918ab03737e2526cf05575ca3810eb4c9a6e0ef760b735a526\": container with ID starting with 59fd501aa121cd918ab03737e2526cf05575ca3810eb4c9a6e0ef760b735a526 not found: ID does not exist" containerID="59fd501aa121cd918ab03737e2526cf05575ca3810eb4c9a6e0ef760b735a526" Jan 20 17:44:44 crc kubenswrapper[4558]: I0120 17:44:44.771270 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"59fd501aa121cd918ab03737e2526cf05575ca3810eb4c9a6e0ef760b735a526"} err="failed to get container status \"59fd501aa121cd918ab03737e2526cf05575ca3810eb4c9a6e0ef760b735a526\": rpc error: code = NotFound desc = could not find container \"59fd501aa121cd918ab03737e2526cf05575ca3810eb4c9a6e0ef760b735a526\": container with ID starting with 59fd501aa121cd918ab03737e2526cf05575ca3810eb4c9a6e0ef760b735a526 not found: ID does not exist" Jan 20 17:44:44 crc kubenswrapper[4558]: I0120 17:44:44.771301 4558 scope.go:117] "RemoveContainer" containerID="32905e44c09ace66c00f1ca2fd0fb9d8ed1543f669176d4879e968a7824d5015" Jan 20 17:44:44 crc kubenswrapper[4558]: E0120 17:44:44.771621 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"32905e44c09ace66c00f1ca2fd0fb9d8ed1543f669176d4879e968a7824d5015\": container with ID starting with 32905e44c09ace66c00f1ca2fd0fb9d8ed1543f669176d4879e968a7824d5015 not found: ID does not exist" containerID="32905e44c09ace66c00f1ca2fd0fb9d8ed1543f669176d4879e968a7824d5015" Jan 20 17:44:44 crc kubenswrapper[4558]: I0120 17:44:44.771648 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"32905e44c09ace66c00f1ca2fd0fb9d8ed1543f669176d4879e968a7824d5015"} err="failed to get container status \"32905e44c09ace66c00f1ca2fd0fb9d8ed1543f669176d4879e968a7824d5015\": rpc error: code = NotFound desc = could not find container \"32905e44c09ace66c00f1ca2fd0fb9d8ed1543f669176d4879e968a7824d5015\": container with ID starting with 32905e44c09ace66c00f1ca2fd0fb9d8ed1543f669176d4879e968a7824d5015 not found: ID does not exist" Jan 20 17:44:44 crc kubenswrapper[4558]: I0120 17:44:44.823117 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2c77f6fd-736b-49e4-aa1c-ddffa451f3dc-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"2c77f6fd-736b-49e4-aa1c-ddffa451f3dc\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:44:44 crc kubenswrapper[4558]: I0120 17:44:44.823201 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/2c77f6fd-736b-49e4-aa1c-ddffa451f3dc-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"2c77f6fd-736b-49e4-aa1c-ddffa451f3dc\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:44:44 crc kubenswrapper[4558]: I0120 17:44:44.823483 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l7tcl\" (UniqueName: \"kubernetes.io/projected/2c77f6fd-736b-49e4-aa1c-ddffa451f3dc-kube-api-access-l7tcl\") pod \"ceilometer-0\" (UID: \"2c77f6fd-736b-49e4-aa1c-ddffa451f3dc\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:44:44 crc kubenswrapper[4558]: I0120 17:44:44.823684 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/2c77f6fd-736b-49e4-aa1c-ddffa451f3dc-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"2c77f6fd-736b-49e4-aa1c-ddffa451f3dc\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:44:44 crc kubenswrapper[4558]: I0120 17:44:44.823746 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2c77f6fd-736b-49e4-aa1c-ddffa451f3dc-log-httpd\") pod \"ceilometer-0\" (UID: \"2c77f6fd-736b-49e4-aa1c-ddffa451f3dc\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:44:44 crc kubenswrapper[4558]: I0120 17:44:44.823879 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2c77f6fd-736b-49e4-aa1c-ddffa451f3dc-config-data\") pod \"ceilometer-0\" (UID: \"2c77f6fd-736b-49e4-aa1c-ddffa451f3dc\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:44:44 crc kubenswrapper[4558]: I0120 17:44:44.823979 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2c77f6fd-736b-49e4-aa1c-ddffa451f3dc-scripts\") pod \"ceilometer-0\" (UID: \"2c77f6fd-736b-49e4-aa1c-ddffa451f3dc\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:44:44 crc kubenswrapper[4558]: I0120 17:44:44.824339 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2c77f6fd-736b-49e4-aa1c-ddffa451f3dc-run-httpd\") pod \"ceilometer-0\" (UID: \"2c77f6fd-736b-49e4-aa1c-ddffa451f3dc\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:44:44 crc kubenswrapper[4558]: I0120 17:44:44.926119 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2c77f6fd-736b-49e4-aa1c-ddffa451f3dc-run-httpd\") pod \"ceilometer-0\" (UID: \"2c77f6fd-736b-49e4-aa1c-ddffa451f3dc\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:44:44 crc kubenswrapper[4558]: I0120 17:44:44.926394 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2c77f6fd-736b-49e4-aa1c-ddffa451f3dc-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"2c77f6fd-736b-49e4-aa1c-ddffa451f3dc\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:44:44 crc kubenswrapper[4558]: I0120 17:44:44.926421 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/2c77f6fd-736b-49e4-aa1c-ddffa451f3dc-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"2c77f6fd-736b-49e4-aa1c-ddffa451f3dc\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:44:44 crc kubenswrapper[4558]: I0120 17:44:44.926464 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l7tcl\" (UniqueName: \"kubernetes.io/projected/2c77f6fd-736b-49e4-aa1c-ddffa451f3dc-kube-api-access-l7tcl\") pod \"ceilometer-0\" (UID: \"2c77f6fd-736b-49e4-aa1c-ddffa451f3dc\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:44:44 crc kubenswrapper[4558]: I0120 17:44:44.926500 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/2c77f6fd-736b-49e4-aa1c-ddffa451f3dc-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"2c77f6fd-736b-49e4-aa1c-ddffa451f3dc\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:44:44 crc kubenswrapper[4558]: I0120 17:44:44.926522 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2c77f6fd-736b-49e4-aa1c-ddffa451f3dc-log-httpd\") pod \"ceilometer-0\" (UID: \"2c77f6fd-736b-49e4-aa1c-ddffa451f3dc\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:44:44 crc kubenswrapper[4558]: I0120 17:44:44.926545 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2c77f6fd-736b-49e4-aa1c-ddffa451f3dc-config-data\") pod \"ceilometer-0\" (UID: \"2c77f6fd-736b-49e4-aa1c-ddffa451f3dc\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:44:44 crc kubenswrapper[4558]: I0120 17:44:44.926569 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2c77f6fd-736b-49e4-aa1c-ddffa451f3dc-scripts\") pod \"ceilometer-0\" (UID: \"2c77f6fd-736b-49e4-aa1c-ddffa451f3dc\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:44:44 crc kubenswrapper[4558]: I0120 17:44:44.926713 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2c77f6fd-736b-49e4-aa1c-ddffa451f3dc-run-httpd\") pod \"ceilometer-0\" (UID: \"2c77f6fd-736b-49e4-aa1c-ddffa451f3dc\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:44:44 crc kubenswrapper[4558]: I0120 17:44:44.928306 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2c77f6fd-736b-49e4-aa1c-ddffa451f3dc-log-httpd\") pod \"ceilometer-0\" (UID: \"2c77f6fd-736b-49e4-aa1c-ddffa451f3dc\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:44:44 crc kubenswrapper[4558]: I0120 17:44:44.931347 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2c77f6fd-736b-49e4-aa1c-ddffa451f3dc-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"2c77f6fd-736b-49e4-aa1c-ddffa451f3dc\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:44:44 crc kubenswrapper[4558]: I0120 17:44:44.931888 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/2c77f6fd-736b-49e4-aa1c-ddffa451f3dc-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"2c77f6fd-736b-49e4-aa1c-ddffa451f3dc\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:44:44 crc kubenswrapper[4558]: I0120 17:44:44.932215 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2c77f6fd-736b-49e4-aa1c-ddffa451f3dc-scripts\") pod \"ceilometer-0\" (UID: \"2c77f6fd-736b-49e4-aa1c-ddffa451f3dc\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:44:44 crc kubenswrapper[4558]: I0120 17:44:44.940695 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2c77f6fd-736b-49e4-aa1c-ddffa451f3dc-config-data\") pod \"ceilometer-0\" (UID: \"2c77f6fd-736b-49e4-aa1c-ddffa451f3dc\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:44:44 crc kubenswrapper[4558]: I0120 17:44:44.941538 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/2c77f6fd-736b-49e4-aa1c-ddffa451f3dc-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"2c77f6fd-736b-49e4-aa1c-ddffa451f3dc\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:44:44 crc kubenswrapper[4558]: I0120 17:44:44.942049 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l7tcl\" (UniqueName: \"kubernetes.io/projected/2c77f6fd-736b-49e4-aa1c-ddffa451f3dc-kube-api-access-l7tcl\") pod \"ceilometer-0\" (UID: \"2c77f6fd-736b-49e4-aa1c-ddffa451f3dc\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:44:45 crc kubenswrapper[4558]: I0120 17:44:45.058764 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:44:45 crc kubenswrapper[4558]: I0120 17:44:45.688488 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:44:46 crc kubenswrapper[4558]: I0120 17:44:46.580033 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="53683629-694a-4431-9fed-acc76d3f7cd3" path="/var/lib/kubelet/pods/53683629-694a-4431-9fed-acc76d3f7cd3/volumes" Jan 20 17:44:46 crc kubenswrapper[4558]: I0120 17:44:46.697349 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"2c77f6fd-736b-49e4-aa1c-ddffa451f3dc","Type":"ContainerStarted","Data":"592aa64db3a21b522ec99255c04163435c7f2ca3c55140c0e0521af26785d506"} Jan 20 17:44:46 crc kubenswrapper[4558]: I0120 17:44:46.697742 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"2c77f6fd-736b-49e4-aa1c-ddffa451f3dc","Type":"ContainerStarted","Data":"22812fda67cb9c71ccd4f7d25fafd12b6c841f77325fa0fa6b2065bc9ef9e53a"} Jan 20 17:44:47 crc kubenswrapper[4558]: I0120 17:44:47.711079 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"2c77f6fd-736b-49e4-aa1c-ddffa451f3dc","Type":"ContainerStarted","Data":"4bb502fc98a21930b2123d685d6ca4b42bbac87bd71f98a6fe668e8dfce8ed68"} Jan 20 17:44:48 crc kubenswrapper[4558]: I0120 17:44:48.279677 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/swift-proxy-6645dd7478-nfmw9" Jan 20 17:44:48 crc kubenswrapper[4558]: I0120 17:44:48.346244 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/swift-proxy-6f98b8f5cb-8bwzs"] Jan 20 17:44:48 crc kubenswrapper[4558]: I0120 17:44:48.346843 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-proxy-6f98b8f5cb-8bwzs" podUID="858ca371-2b02-46d8-879c-8d859c31d590" containerName="proxy-httpd" containerID="cri-o://97c43c6886d191e0702cb49b84ebfa79710357c4b83cb249c5fc746e2090b3d7" gracePeriod=30 Jan 20 17:44:48 crc kubenswrapper[4558]: I0120 17:44:48.346928 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-proxy-6f98b8f5cb-8bwzs" podUID="858ca371-2b02-46d8-879c-8d859c31d590" containerName="proxy-server" containerID="cri-o://33a031db5256c97f79957951d874d8efcad761b154e6543e7aeaa0d0878d4ddb" gracePeriod=30 Jan 20 17:44:48 crc kubenswrapper[4558]: I0120 17:44:48.756704 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"2c77f6fd-736b-49e4-aa1c-ddffa451f3dc","Type":"ContainerStarted","Data":"720bb601be9d0a73599645a6bba6fa27fea9bddfb2119833260b2abb7695bc2c"} Jan 20 17:44:48 crc kubenswrapper[4558]: I0120 17:44:48.761620 4558 generic.go:334] "Generic (PLEG): container finished" podID="858ca371-2b02-46d8-879c-8d859c31d590" containerID="33a031db5256c97f79957951d874d8efcad761b154e6543e7aeaa0d0878d4ddb" exitCode=0 Jan 20 17:44:48 crc kubenswrapper[4558]: I0120 17:44:48.761710 4558 generic.go:334] "Generic (PLEG): container finished" podID="858ca371-2b02-46d8-879c-8d859c31d590" containerID="97c43c6886d191e0702cb49b84ebfa79710357c4b83cb249c5fc746e2090b3d7" exitCode=0 Jan 20 17:44:48 crc kubenswrapper[4558]: I0120 17:44:48.761740 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-proxy-6f98b8f5cb-8bwzs" event={"ID":"858ca371-2b02-46d8-879c-8d859c31d590","Type":"ContainerDied","Data":"33a031db5256c97f79957951d874d8efcad761b154e6543e7aeaa0d0878d4ddb"} Jan 20 17:44:48 crc kubenswrapper[4558]: I0120 17:44:48.761820 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-proxy-6f98b8f5cb-8bwzs" event={"ID":"858ca371-2b02-46d8-879c-8d859c31d590","Type":"ContainerDied","Data":"97c43c6886d191e0702cb49b84ebfa79710357c4b83cb249c5fc746e2090b3d7"} Jan 20 17:44:49 crc kubenswrapper[4558]: I0120 17:44:49.020049 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-proxy-6f98b8f5cb-8bwzs" Jan 20 17:44:49 crc kubenswrapper[4558]: I0120 17:44:49.125385 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ps4km\" (UniqueName: \"kubernetes.io/projected/858ca371-2b02-46d8-879c-8d859c31d590-kube-api-access-ps4km\") pod \"858ca371-2b02-46d8-879c-8d859c31d590\" (UID: \"858ca371-2b02-46d8-879c-8d859c31d590\") " Jan 20 17:44:49 crc kubenswrapper[4558]: I0120 17:44:49.125561 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/858ca371-2b02-46d8-879c-8d859c31d590-etc-swift\") pod \"858ca371-2b02-46d8-879c-8d859c31d590\" (UID: \"858ca371-2b02-46d8-879c-8d859c31d590\") " Jan 20 17:44:49 crc kubenswrapper[4558]: I0120 17:44:49.125721 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/858ca371-2b02-46d8-879c-8d859c31d590-config-data\") pod \"858ca371-2b02-46d8-879c-8d859c31d590\" (UID: \"858ca371-2b02-46d8-879c-8d859c31d590\") " Jan 20 17:44:49 crc kubenswrapper[4558]: I0120 17:44:49.125773 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/858ca371-2b02-46d8-879c-8d859c31d590-log-httpd\") pod \"858ca371-2b02-46d8-879c-8d859c31d590\" (UID: \"858ca371-2b02-46d8-879c-8d859c31d590\") " Jan 20 17:44:49 crc kubenswrapper[4558]: I0120 17:44:49.125802 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/858ca371-2b02-46d8-879c-8d859c31d590-run-httpd\") pod \"858ca371-2b02-46d8-879c-8d859c31d590\" (UID: \"858ca371-2b02-46d8-879c-8d859c31d590\") " Jan 20 17:44:49 crc kubenswrapper[4558]: I0120 17:44:49.125839 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/858ca371-2b02-46d8-879c-8d859c31d590-public-tls-certs\") pod \"858ca371-2b02-46d8-879c-8d859c31d590\" (UID: \"858ca371-2b02-46d8-879c-8d859c31d590\") " Jan 20 17:44:49 crc kubenswrapper[4558]: I0120 17:44:49.125867 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/858ca371-2b02-46d8-879c-8d859c31d590-combined-ca-bundle\") pod \"858ca371-2b02-46d8-879c-8d859c31d590\" (UID: \"858ca371-2b02-46d8-879c-8d859c31d590\") " Jan 20 17:44:49 crc kubenswrapper[4558]: I0120 17:44:49.126044 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/858ca371-2b02-46d8-879c-8d859c31d590-internal-tls-certs\") pod \"858ca371-2b02-46d8-879c-8d859c31d590\" (UID: \"858ca371-2b02-46d8-879c-8d859c31d590\") " Jan 20 17:44:49 crc kubenswrapper[4558]: I0120 17:44:49.127704 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/858ca371-2b02-46d8-879c-8d859c31d590-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "858ca371-2b02-46d8-879c-8d859c31d590" (UID: "858ca371-2b02-46d8-879c-8d859c31d590"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:44:49 crc kubenswrapper[4558]: I0120 17:44:49.128220 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/858ca371-2b02-46d8-879c-8d859c31d590-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "858ca371-2b02-46d8-879c-8d859c31d590" (UID: "858ca371-2b02-46d8-879c-8d859c31d590"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:44:49 crc kubenswrapper[4558]: I0120 17:44:49.128699 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/858ca371-2b02-46d8-879c-8d859c31d590-kube-api-access-ps4km" (OuterVolumeSpecName: "kube-api-access-ps4km") pod "858ca371-2b02-46d8-879c-8d859c31d590" (UID: "858ca371-2b02-46d8-879c-8d859c31d590"). InnerVolumeSpecName "kube-api-access-ps4km". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:44:49 crc kubenswrapper[4558]: I0120 17:44:49.136420 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/858ca371-2b02-46d8-879c-8d859c31d590-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "858ca371-2b02-46d8-879c-8d859c31d590" (UID: "858ca371-2b02-46d8-879c-8d859c31d590"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:44:49 crc kubenswrapper[4558]: I0120 17:44:49.189463 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/858ca371-2b02-46d8-879c-8d859c31d590-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "858ca371-2b02-46d8-879c-8d859c31d590" (UID: "858ca371-2b02-46d8-879c-8d859c31d590"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:44:49 crc kubenswrapper[4558]: I0120 17:44:49.192766 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/858ca371-2b02-46d8-879c-8d859c31d590-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "858ca371-2b02-46d8-879c-8d859c31d590" (UID: "858ca371-2b02-46d8-879c-8d859c31d590"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:44:49 crc kubenswrapper[4558]: I0120 17:44:49.199760 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/858ca371-2b02-46d8-879c-8d859c31d590-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "858ca371-2b02-46d8-879c-8d859c31d590" (UID: "858ca371-2b02-46d8-879c-8d859c31d590"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:44:49 crc kubenswrapper[4558]: I0120 17:44:49.219298 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/858ca371-2b02-46d8-879c-8d859c31d590-config-data" (OuterVolumeSpecName: "config-data") pod "858ca371-2b02-46d8-879c-8d859c31d590" (UID: "858ca371-2b02-46d8-879c-8d859c31d590"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:44:49 crc kubenswrapper[4558]: I0120 17:44:49.228504 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ps4km\" (UniqueName: \"kubernetes.io/projected/858ca371-2b02-46d8-879c-8d859c31d590-kube-api-access-ps4km\") on node \"crc\" DevicePath \"\"" Jan 20 17:44:49 crc kubenswrapper[4558]: I0120 17:44:49.228536 4558 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/858ca371-2b02-46d8-879c-8d859c31d590-etc-swift\") on node \"crc\" DevicePath \"\"" Jan 20 17:44:49 crc kubenswrapper[4558]: I0120 17:44:49.228549 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/858ca371-2b02-46d8-879c-8d859c31d590-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:44:49 crc kubenswrapper[4558]: I0120 17:44:49.228561 4558 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/858ca371-2b02-46d8-879c-8d859c31d590-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:44:49 crc kubenswrapper[4558]: I0120 17:44:49.228570 4558 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/858ca371-2b02-46d8-879c-8d859c31d590-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:44:49 crc kubenswrapper[4558]: I0120 17:44:49.228592 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/858ca371-2b02-46d8-879c-8d859c31d590-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:44:49 crc kubenswrapper[4558]: I0120 17:44:49.228603 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/858ca371-2b02-46d8-879c-8d859c31d590-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:44:49 crc kubenswrapper[4558]: I0120 17:44:49.228612 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/858ca371-2b02-46d8-879c-8d859c31d590-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:44:49 crc kubenswrapper[4558]: I0120 17:44:49.773278 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"2c77f6fd-736b-49e4-aa1c-ddffa451f3dc","Type":"ContainerStarted","Data":"245499fc8d9dd24531eb844ac444830c3cbb3a8f2341bb67bd362bf2fe1577af"} Jan 20 17:44:49 crc kubenswrapper[4558]: I0120 17:44:49.774104 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:44:49 crc kubenswrapper[4558]: I0120 17:44:49.776371 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-proxy-6f98b8f5cb-8bwzs" event={"ID":"858ca371-2b02-46d8-879c-8d859c31d590","Type":"ContainerDied","Data":"557c7da51a9a4439988eea322c95c064d80eec1313541d8d0ced282f3435d124"} Jan 20 17:44:49 crc kubenswrapper[4558]: I0120 17:44:49.776510 4558 scope.go:117] "RemoveContainer" containerID="33a031db5256c97f79957951d874d8efcad761b154e6543e7aeaa0d0878d4ddb" Jan 20 17:44:49 crc kubenswrapper[4558]: I0120 17:44:49.776429 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-proxy-6f98b8f5cb-8bwzs" Jan 20 17:44:49 crc kubenswrapper[4558]: I0120 17:44:49.801994 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ceilometer-0" podStartSLOduration=2.380082105 podStartE2EDuration="5.8019736s" podCreationTimestamp="2026-01-20 17:44:44 +0000 UTC" firstStartedPulling="2026-01-20 17:44:45.695391203 +0000 UTC m=+3779.455729170" lastFinishedPulling="2026-01-20 17:44:49.117282698 +0000 UTC m=+3782.877620665" observedRunningTime="2026-01-20 17:44:49.795832814 +0000 UTC m=+3783.556170770" watchObservedRunningTime="2026-01-20 17:44:49.8019736 +0000 UTC m=+3783.562311567" Jan 20 17:44:49 crc kubenswrapper[4558]: I0120 17:44:49.806666 4558 scope.go:117] "RemoveContainer" containerID="97c43c6886d191e0702cb49b84ebfa79710357c4b83cb249c5fc746e2090b3d7" Jan 20 17:44:49 crc kubenswrapper[4558]: I0120 17:44:49.832720 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/swift-proxy-6f98b8f5cb-8bwzs"] Jan 20 17:44:49 crc kubenswrapper[4558]: I0120 17:44:49.842596 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/swift-proxy-6f98b8f5cb-8bwzs"] Jan 20 17:44:50 crc kubenswrapper[4558]: I0120 17:44:50.566415 4558 scope.go:117] "RemoveContainer" containerID="2d2a8989ca5a5af98b2c9dc8f801ea0675339ec14800f437c058c5a43c20146b" Jan 20 17:44:50 crc kubenswrapper[4558]: E0120 17:44:50.567269 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 17:44:50 crc kubenswrapper[4558]: I0120 17:44:50.575878 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="858ca371-2b02-46d8-879c-8d859c31d590" path="/var/lib/kubelet/pods/858ca371-2b02-46d8-879c-8d859c31d590/volumes" Jan 20 17:44:57 crc kubenswrapper[4558]: I0120 17:44:57.566796 4558 scope.go:117] "RemoveContainer" containerID="f175857b081b6702705f8a9a0de77c7f1bd59b4f0ad1848d4abf509a9f1c1ae7" Jan 20 17:44:57 crc kubenswrapper[4558]: E0120 17:44:57.568079 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"nova-cell0-conductor-conductor\" with CrashLoopBackOff: \"back-off 40s restarting failed container=nova-cell0-conductor-conductor pod=nova-cell0-conductor-0_openstack-kuttl-tests(50cacddd-ebea-477f-af64-6e96a09a242e)\"" pod="openstack-kuttl-tests/nova-cell0-conductor-0" podUID="50cacddd-ebea-477f-af64-6e96a09a242e" Jan 20 17:45:00 crc kubenswrapper[4558]: I0120 17:45:00.147549 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29482185-zcxm2"] Jan 20 17:45:00 crc kubenswrapper[4558]: E0120 17:45:00.148324 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="858ca371-2b02-46d8-879c-8d859c31d590" containerName="proxy-server" Jan 20 17:45:00 crc kubenswrapper[4558]: I0120 17:45:00.148346 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="858ca371-2b02-46d8-879c-8d859c31d590" containerName="proxy-server" Jan 20 17:45:00 crc kubenswrapper[4558]: E0120 17:45:00.148385 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="858ca371-2b02-46d8-879c-8d859c31d590" containerName="proxy-httpd" Jan 20 17:45:00 crc kubenswrapper[4558]: I0120 17:45:00.148391 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="858ca371-2b02-46d8-879c-8d859c31d590" containerName="proxy-httpd" Jan 20 17:45:00 crc kubenswrapper[4558]: I0120 17:45:00.148689 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="858ca371-2b02-46d8-879c-8d859c31d590" containerName="proxy-httpd" Jan 20 17:45:00 crc kubenswrapper[4558]: I0120 17:45:00.148704 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="858ca371-2b02-46d8-879c-8d859c31d590" containerName="proxy-server" Jan 20 17:45:00 crc kubenswrapper[4558]: I0120 17:45:00.149786 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29482185-zcxm2" Jan 20 17:45:00 crc kubenswrapper[4558]: I0120 17:45:00.152526 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 20 17:45:00 crc kubenswrapper[4558]: I0120 17:45:00.152687 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 20 17:45:00 crc kubenswrapper[4558]: I0120 17:45:00.157477 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29482185-zcxm2"] Jan 20 17:45:00 crc kubenswrapper[4558]: I0120 17:45:00.241789 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/39ad7d98-b1fa-4d75-bbbb-75b74a26170c-config-volume\") pod \"collect-profiles-29482185-zcxm2\" (UID: \"39ad7d98-b1fa-4d75-bbbb-75b74a26170c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482185-zcxm2" Jan 20 17:45:00 crc kubenswrapper[4558]: I0120 17:45:00.242080 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/39ad7d98-b1fa-4d75-bbbb-75b74a26170c-secret-volume\") pod \"collect-profiles-29482185-zcxm2\" (UID: \"39ad7d98-b1fa-4d75-bbbb-75b74a26170c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482185-zcxm2" Jan 20 17:45:00 crc kubenswrapper[4558]: I0120 17:45:00.242373 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wmblm\" (UniqueName: \"kubernetes.io/projected/39ad7d98-b1fa-4d75-bbbb-75b74a26170c-kube-api-access-wmblm\") pod \"collect-profiles-29482185-zcxm2\" (UID: \"39ad7d98-b1fa-4d75-bbbb-75b74a26170c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482185-zcxm2" Jan 20 17:45:00 crc kubenswrapper[4558]: I0120 17:45:00.345085 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wmblm\" (UniqueName: \"kubernetes.io/projected/39ad7d98-b1fa-4d75-bbbb-75b74a26170c-kube-api-access-wmblm\") pod \"collect-profiles-29482185-zcxm2\" (UID: \"39ad7d98-b1fa-4d75-bbbb-75b74a26170c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482185-zcxm2" Jan 20 17:45:00 crc kubenswrapper[4558]: I0120 17:45:00.345401 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/39ad7d98-b1fa-4d75-bbbb-75b74a26170c-config-volume\") pod \"collect-profiles-29482185-zcxm2\" (UID: \"39ad7d98-b1fa-4d75-bbbb-75b74a26170c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482185-zcxm2" Jan 20 17:45:00 crc kubenswrapper[4558]: I0120 17:45:00.345531 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/39ad7d98-b1fa-4d75-bbbb-75b74a26170c-secret-volume\") pod \"collect-profiles-29482185-zcxm2\" (UID: \"39ad7d98-b1fa-4d75-bbbb-75b74a26170c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482185-zcxm2" Jan 20 17:45:00 crc kubenswrapper[4558]: I0120 17:45:00.346406 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/39ad7d98-b1fa-4d75-bbbb-75b74a26170c-config-volume\") pod \"collect-profiles-29482185-zcxm2\" (UID: \"39ad7d98-b1fa-4d75-bbbb-75b74a26170c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482185-zcxm2" Jan 20 17:45:00 crc kubenswrapper[4558]: I0120 17:45:00.359089 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/39ad7d98-b1fa-4d75-bbbb-75b74a26170c-secret-volume\") pod \"collect-profiles-29482185-zcxm2\" (UID: \"39ad7d98-b1fa-4d75-bbbb-75b74a26170c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482185-zcxm2" Jan 20 17:45:00 crc kubenswrapper[4558]: I0120 17:45:00.363807 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wmblm\" (UniqueName: \"kubernetes.io/projected/39ad7d98-b1fa-4d75-bbbb-75b74a26170c-kube-api-access-wmblm\") pod \"collect-profiles-29482185-zcxm2\" (UID: \"39ad7d98-b1fa-4d75-bbbb-75b74a26170c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482185-zcxm2" Jan 20 17:45:00 crc kubenswrapper[4558]: I0120 17:45:00.471609 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29482185-zcxm2" Jan 20 17:45:00 crc kubenswrapper[4558]: I0120 17:45:00.885573 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29482185-zcxm2"] Jan 20 17:45:00 crc kubenswrapper[4558]: I0120 17:45:00.897743 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29482185-zcxm2" event={"ID":"39ad7d98-b1fa-4d75-bbbb-75b74a26170c","Type":"ContainerStarted","Data":"67f90ed392c78cf49dc1004c59a96ae8b76535f3c31a530dd289cabd70518ae9"} Jan 20 17:45:01 crc kubenswrapper[4558]: I0120 17:45:01.910261 4558 generic.go:334] "Generic (PLEG): container finished" podID="39ad7d98-b1fa-4d75-bbbb-75b74a26170c" containerID="9f1fd3eb3fe362feea3c236618713670db2e56630ed7a68938966713c5f2b528" exitCode=0 Jan 20 17:45:01 crc kubenswrapper[4558]: I0120 17:45:01.910445 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29482185-zcxm2" event={"ID":"39ad7d98-b1fa-4d75-bbbb-75b74a26170c","Type":"ContainerDied","Data":"9f1fd3eb3fe362feea3c236618713670db2e56630ed7a68938966713c5f2b528"} Jan 20 17:45:02 crc kubenswrapper[4558]: I0120 17:45:02.566220 4558 scope.go:117] "RemoveContainer" containerID="2d2a8989ca5a5af98b2c9dc8f801ea0675339ec14800f437c058c5a43c20146b" Jan 20 17:45:02 crc kubenswrapper[4558]: E0120 17:45:02.566729 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 17:45:03 crc kubenswrapper[4558]: I0120 17:45:03.225800 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29482185-zcxm2" Jan 20 17:45:03 crc kubenswrapper[4558]: I0120 17:45:03.315200 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/39ad7d98-b1fa-4d75-bbbb-75b74a26170c-secret-volume\") pod \"39ad7d98-b1fa-4d75-bbbb-75b74a26170c\" (UID: \"39ad7d98-b1fa-4d75-bbbb-75b74a26170c\") " Jan 20 17:45:03 crc kubenswrapper[4558]: I0120 17:45:03.315263 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/39ad7d98-b1fa-4d75-bbbb-75b74a26170c-config-volume\") pod \"39ad7d98-b1fa-4d75-bbbb-75b74a26170c\" (UID: \"39ad7d98-b1fa-4d75-bbbb-75b74a26170c\") " Jan 20 17:45:03 crc kubenswrapper[4558]: I0120 17:45:03.315348 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wmblm\" (UniqueName: \"kubernetes.io/projected/39ad7d98-b1fa-4d75-bbbb-75b74a26170c-kube-api-access-wmblm\") pod \"39ad7d98-b1fa-4d75-bbbb-75b74a26170c\" (UID: \"39ad7d98-b1fa-4d75-bbbb-75b74a26170c\") " Jan 20 17:45:03 crc kubenswrapper[4558]: I0120 17:45:03.315983 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/39ad7d98-b1fa-4d75-bbbb-75b74a26170c-config-volume" (OuterVolumeSpecName: "config-volume") pod "39ad7d98-b1fa-4d75-bbbb-75b74a26170c" (UID: "39ad7d98-b1fa-4d75-bbbb-75b74a26170c"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:45:03 crc kubenswrapper[4558]: I0120 17:45:03.321182 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/39ad7d98-b1fa-4d75-bbbb-75b74a26170c-kube-api-access-wmblm" (OuterVolumeSpecName: "kube-api-access-wmblm") pod "39ad7d98-b1fa-4d75-bbbb-75b74a26170c" (UID: "39ad7d98-b1fa-4d75-bbbb-75b74a26170c"). InnerVolumeSpecName "kube-api-access-wmblm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:45:03 crc kubenswrapper[4558]: I0120 17:45:03.322580 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/39ad7d98-b1fa-4d75-bbbb-75b74a26170c-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "39ad7d98-b1fa-4d75-bbbb-75b74a26170c" (UID: "39ad7d98-b1fa-4d75-bbbb-75b74a26170c"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:45:03 crc kubenswrapper[4558]: I0120 17:45:03.418048 4558 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/39ad7d98-b1fa-4d75-bbbb-75b74a26170c-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:03 crc kubenswrapper[4558]: I0120 17:45:03.418085 4558 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/39ad7d98-b1fa-4d75-bbbb-75b74a26170c-config-volume\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:03 crc kubenswrapper[4558]: I0120 17:45:03.418100 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wmblm\" (UniqueName: \"kubernetes.io/projected/39ad7d98-b1fa-4d75-bbbb-75b74a26170c-kube-api-access-wmblm\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:03 crc kubenswrapper[4558]: I0120 17:45:03.933246 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29482185-zcxm2" event={"ID":"39ad7d98-b1fa-4d75-bbbb-75b74a26170c","Type":"ContainerDied","Data":"67f90ed392c78cf49dc1004c59a96ae8b76535f3c31a530dd289cabd70518ae9"} Jan 20 17:45:03 crc kubenswrapper[4558]: I0120 17:45:03.933565 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="67f90ed392c78cf49dc1004c59a96ae8b76535f3c31a530dd289cabd70518ae9" Jan 20 17:45:03 crc kubenswrapper[4558]: I0120 17:45:03.933302 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29482185-zcxm2" Jan 20 17:45:04 crc kubenswrapper[4558]: I0120 17:45:04.296897 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29482140-szzm6"] Jan 20 17:45:04 crc kubenswrapper[4558]: I0120 17:45:04.305986 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29482140-szzm6"] Jan 20 17:45:04 crc kubenswrapper[4558]: I0120 17:45:04.578059 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e1aaf016-20be-4047-919d-cda81437437a" path="/var/lib/kubelet/pods/e1aaf016-20be-4047-919d-cda81437437a/volumes" Jan 20 17:45:12 crc kubenswrapper[4558]: I0120 17:45:12.566873 4558 scope.go:117] "RemoveContainer" containerID="f175857b081b6702705f8a9a0de77c7f1bd59b4f0ad1848d4abf509a9f1c1ae7" Jan 20 17:45:13 crc kubenswrapper[4558]: I0120 17:45:13.027366 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-0" event={"ID":"50cacddd-ebea-477f-af64-6e96a09a242e","Type":"ContainerStarted","Data":"4568dafb1cb4a5468a42a5482158e5c167d869bcb8c626b55be898275037c53d"} Jan 20 17:45:13 crc kubenswrapper[4558]: I0120 17:45:13.027982 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:45:14 crc kubenswrapper[4558]: I0120 17:45:14.732529 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-xdqfg"] Jan 20 17:45:14 crc kubenswrapper[4558]: E0120 17:45:14.732979 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="39ad7d98-b1fa-4d75-bbbb-75b74a26170c" containerName="collect-profiles" Jan 20 17:45:14 crc kubenswrapper[4558]: I0120 17:45:14.732993 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="39ad7d98-b1fa-4d75-bbbb-75b74a26170c" containerName="collect-profiles" Jan 20 17:45:14 crc kubenswrapper[4558]: I0120 17:45:14.733230 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="39ad7d98-b1fa-4d75-bbbb-75b74a26170c" containerName="collect-profiles" Jan 20 17:45:14 crc kubenswrapper[4558]: I0120 17:45:14.734545 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xdqfg" Jan 20 17:45:14 crc kubenswrapper[4558]: I0120 17:45:14.747207 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mwzqh\" (UniqueName: \"kubernetes.io/projected/33b38636-2daf-4067-b29c-ed41dcbd49d7-kube-api-access-mwzqh\") pod \"redhat-marketplace-xdqfg\" (UID: \"33b38636-2daf-4067-b29c-ed41dcbd49d7\") " pod="openshift-marketplace/redhat-marketplace-xdqfg" Jan 20 17:45:14 crc kubenswrapper[4558]: I0120 17:45:14.747418 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/33b38636-2daf-4067-b29c-ed41dcbd49d7-catalog-content\") pod \"redhat-marketplace-xdqfg\" (UID: \"33b38636-2daf-4067-b29c-ed41dcbd49d7\") " pod="openshift-marketplace/redhat-marketplace-xdqfg" Jan 20 17:45:14 crc kubenswrapper[4558]: I0120 17:45:14.747468 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/33b38636-2daf-4067-b29c-ed41dcbd49d7-utilities\") pod \"redhat-marketplace-xdqfg\" (UID: \"33b38636-2daf-4067-b29c-ed41dcbd49d7\") " pod="openshift-marketplace/redhat-marketplace-xdqfg" Jan 20 17:45:14 crc kubenswrapper[4558]: I0120 17:45:14.756737 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-xdqfg"] Jan 20 17:45:14 crc kubenswrapper[4558]: I0120 17:45:14.849547 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mwzqh\" (UniqueName: \"kubernetes.io/projected/33b38636-2daf-4067-b29c-ed41dcbd49d7-kube-api-access-mwzqh\") pod \"redhat-marketplace-xdqfg\" (UID: \"33b38636-2daf-4067-b29c-ed41dcbd49d7\") " pod="openshift-marketplace/redhat-marketplace-xdqfg" Jan 20 17:45:14 crc kubenswrapper[4558]: I0120 17:45:14.849773 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/33b38636-2daf-4067-b29c-ed41dcbd49d7-catalog-content\") pod \"redhat-marketplace-xdqfg\" (UID: \"33b38636-2daf-4067-b29c-ed41dcbd49d7\") " pod="openshift-marketplace/redhat-marketplace-xdqfg" Jan 20 17:45:14 crc kubenswrapper[4558]: I0120 17:45:14.849873 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/33b38636-2daf-4067-b29c-ed41dcbd49d7-utilities\") pod \"redhat-marketplace-xdqfg\" (UID: \"33b38636-2daf-4067-b29c-ed41dcbd49d7\") " pod="openshift-marketplace/redhat-marketplace-xdqfg" Jan 20 17:45:14 crc kubenswrapper[4558]: I0120 17:45:14.850300 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/33b38636-2daf-4067-b29c-ed41dcbd49d7-catalog-content\") pod \"redhat-marketplace-xdqfg\" (UID: \"33b38636-2daf-4067-b29c-ed41dcbd49d7\") " pod="openshift-marketplace/redhat-marketplace-xdqfg" Jan 20 17:45:14 crc kubenswrapper[4558]: I0120 17:45:14.850350 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/33b38636-2daf-4067-b29c-ed41dcbd49d7-utilities\") pod \"redhat-marketplace-xdqfg\" (UID: \"33b38636-2daf-4067-b29c-ed41dcbd49d7\") " pod="openshift-marketplace/redhat-marketplace-xdqfg" Jan 20 17:45:14 crc kubenswrapper[4558]: I0120 17:45:14.875088 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mwzqh\" (UniqueName: \"kubernetes.io/projected/33b38636-2daf-4067-b29c-ed41dcbd49d7-kube-api-access-mwzqh\") pod \"redhat-marketplace-xdqfg\" (UID: \"33b38636-2daf-4067-b29c-ed41dcbd49d7\") " pod="openshift-marketplace/redhat-marketplace-xdqfg" Jan 20 17:45:15 crc kubenswrapper[4558]: I0120 17:45:15.061139 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xdqfg" Jan 20 17:45:15 crc kubenswrapper[4558]: I0120 17:45:15.079951 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:45:15 crc kubenswrapper[4558]: I0120 17:45:15.524736 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-xdqfg"] Jan 20 17:45:15 crc kubenswrapper[4558]: I0120 17:45:15.566126 4558 scope.go:117] "RemoveContainer" containerID="2d2a8989ca5a5af98b2c9dc8f801ea0675339ec14800f437c058c5a43c20146b" Jan 20 17:45:15 crc kubenswrapper[4558]: E0120 17:45:15.566477 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 17:45:16 crc kubenswrapper[4558]: I0120 17:45:16.069646 4558 generic.go:334] "Generic (PLEG): container finished" podID="33b38636-2daf-4067-b29c-ed41dcbd49d7" containerID="741130739b0460d46603e2634a37e6587acafb775777dbcb6fc30713d1b02c54" exitCode=0 Jan 20 17:45:16 crc kubenswrapper[4558]: I0120 17:45:16.069707 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xdqfg" event={"ID":"33b38636-2daf-4067-b29c-ed41dcbd49d7","Type":"ContainerDied","Data":"741130739b0460d46603e2634a37e6587acafb775777dbcb6fc30713d1b02c54"} Jan 20 17:45:16 crc kubenswrapper[4558]: I0120 17:45:16.069743 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xdqfg" event={"ID":"33b38636-2daf-4067-b29c-ed41dcbd49d7","Type":"ContainerStarted","Data":"c63261a2a73af2c61c790a9f08a8fe647919ec6edd5792b562c8b5fb4f4d8dd8"} Jan 20 17:45:17 crc kubenswrapper[4558]: I0120 17:45:17.081359 4558 generic.go:334] "Generic (PLEG): container finished" podID="33b38636-2daf-4067-b29c-ed41dcbd49d7" containerID="97bc558595838ee4e0deddf6fe63d3195d6972eade1554acebca9bc2e26b1328" exitCode=0 Jan 20 17:45:17 crc kubenswrapper[4558]: I0120 17:45:17.081419 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xdqfg" event={"ID":"33b38636-2daf-4067-b29c-ed41dcbd49d7","Type":"ContainerDied","Data":"97bc558595838ee4e0deddf6fe63d3195d6972eade1554acebca9bc2e26b1328"} Jan 20 17:45:17 crc kubenswrapper[4558]: I0120 17:45:17.325583 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:45:18 crc kubenswrapper[4558]: I0120 17:45:18.094604 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xdqfg" event={"ID":"33b38636-2daf-4067-b29c-ed41dcbd49d7","Type":"ContainerStarted","Data":"9c6dd7cd1b5a87ce417cd16d1b6b6543238170bf7e7099a15726d0b3520193b0"} Jan 20 17:45:18 crc kubenswrapper[4558]: I0120 17:45:18.115316 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-xdqfg" podStartSLOduration=2.414573378 podStartE2EDuration="4.115294521s" podCreationTimestamp="2026-01-20 17:45:14 +0000 UTC" firstStartedPulling="2026-01-20 17:45:16.071943392 +0000 UTC m=+3809.832281359" lastFinishedPulling="2026-01-20 17:45:17.772664535 +0000 UTC m=+3811.533002502" observedRunningTime="2026-01-20 17:45:18.111010366 +0000 UTC m=+3811.871348333" watchObservedRunningTime="2026-01-20 17:45:18.115294521 +0000 UTC m=+3811.875632489" Jan 20 17:45:20 crc kubenswrapper[4558]: I0120 17:45:20.689106 4558 scope.go:117] "RemoveContainer" containerID="cbe6e8e1b940e039a8a8149b56ab4e7de324c89518b562816f041b1e4ddc6121" Jan 20 17:45:20 crc kubenswrapper[4558]: I0120 17:45:20.715188 4558 scope.go:117] "RemoveContainer" containerID="f112bf24b33d220f86c5571e1b43c010cb9948d3f6d700a30ce27b148b5f96bb" Jan 20 17:45:20 crc kubenswrapper[4558]: I0120 17:45:20.737469 4558 scope.go:117] "RemoveContainer" containerID="3274979f0e29cb3fdb36de68c7cbda0f264a0e0d285f39a08bf733de6ae50ef0" Jan 20 17:45:20 crc kubenswrapper[4558]: I0120 17:45:20.759088 4558 scope.go:117] "RemoveContainer" containerID="2229f0a8777c34d3e061821af388151b63ed5e3782ef9ab2ed353af97398eb48" Jan 20 17:45:20 crc kubenswrapper[4558]: I0120 17:45:20.807775 4558 scope.go:117] "RemoveContainer" containerID="1bc3200f596051008ebbe5f7b24025a7b896079f1476665b72b344bebfe77bf2" Jan 20 17:45:25 crc kubenswrapper[4558]: I0120 17:45:25.066064 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-xdqfg" Jan 20 17:45:25 crc kubenswrapper[4558]: I0120 17:45:25.066537 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-xdqfg" Jan 20 17:45:25 crc kubenswrapper[4558]: I0120 17:45:25.109134 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-xdqfg" Jan 20 17:45:25 crc kubenswrapper[4558]: I0120 17:45:25.212022 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-xdqfg" Jan 20 17:45:25 crc kubenswrapper[4558]: I0120 17:45:25.342954 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-xdqfg"] Jan 20 17:45:27 crc kubenswrapper[4558]: I0120 17:45:27.188264 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-xdqfg" podUID="33b38636-2daf-4067-b29c-ed41dcbd49d7" containerName="registry-server" containerID="cri-o://9c6dd7cd1b5a87ce417cd16d1b6b6543238170bf7e7099a15726d0b3520193b0" gracePeriod=2 Jan 20 17:45:27 crc kubenswrapper[4558]: I0120 17:45:27.605257 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xdqfg" Jan 20 17:45:27 crc kubenswrapper[4558]: I0120 17:45:27.654814 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mwzqh\" (UniqueName: \"kubernetes.io/projected/33b38636-2daf-4067-b29c-ed41dcbd49d7-kube-api-access-mwzqh\") pod \"33b38636-2daf-4067-b29c-ed41dcbd49d7\" (UID: \"33b38636-2daf-4067-b29c-ed41dcbd49d7\") " Jan 20 17:45:27 crc kubenswrapper[4558]: I0120 17:45:27.654906 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/33b38636-2daf-4067-b29c-ed41dcbd49d7-catalog-content\") pod \"33b38636-2daf-4067-b29c-ed41dcbd49d7\" (UID: \"33b38636-2daf-4067-b29c-ed41dcbd49d7\") " Jan 20 17:45:27 crc kubenswrapper[4558]: I0120 17:45:27.654976 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/33b38636-2daf-4067-b29c-ed41dcbd49d7-utilities\") pod \"33b38636-2daf-4067-b29c-ed41dcbd49d7\" (UID: \"33b38636-2daf-4067-b29c-ed41dcbd49d7\") " Jan 20 17:45:27 crc kubenswrapper[4558]: I0120 17:45:27.656037 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/33b38636-2daf-4067-b29c-ed41dcbd49d7-utilities" (OuterVolumeSpecName: "utilities") pod "33b38636-2daf-4067-b29c-ed41dcbd49d7" (UID: "33b38636-2daf-4067-b29c-ed41dcbd49d7"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:45:27 crc kubenswrapper[4558]: I0120 17:45:27.656733 4558 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/33b38636-2daf-4067-b29c-ed41dcbd49d7-utilities\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:27 crc kubenswrapper[4558]: I0120 17:45:27.664246 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/33b38636-2daf-4067-b29c-ed41dcbd49d7-kube-api-access-mwzqh" (OuterVolumeSpecName: "kube-api-access-mwzqh") pod "33b38636-2daf-4067-b29c-ed41dcbd49d7" (UID: "33b38636-2daf-4067-b29c-ed41dcbd49d7"). InnerVolumeSpecName "kube-api-access-mwzqh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:45:27 crc kubenswrapper[4558]: I0120 17:45:27.672442 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/33b38636-2daf-4067-b29c-ed41dcbd49d7-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "33b38636-2daf-4067-b29c-ed41dcbd49d7" (UID: "33b38636-2daf-4067-b29c-ed41dcbd49d7"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:45:27 crc kubenswrapper[4558]: I0120 17:45:27.759053 4558 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/33b38636-2daf-4067-b29c-ed41dcbd49d7-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:27 crc kubenswrapper[4558]: I0120 17:45:27.759089 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mwzqh\" (UniqueName: \"kubernetes.io/projected/33b38636-2daf-4067-b29c-ed41dcbd49d7-kube-api-access-mwzqh\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:28 crc kubenswrapper[4558]: I0120 17:45:28.201227 4558 generic.go:334] "Generic (PLEG): container finished" podID="33b38636-2daf-4067-b29c-ed41dcbd49d7" containerID="9c6dd7cd1b5a87ce417cd16d1b6b6543238170bf7e7099a15726d0b3520193b0" exitCode=0 Jan 20 17:45:28 crc kubenswrapper[4558]: I0120 17:45:28.201344 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xdqfg" Jan 20 17:45:28 crc kubenswrapper[4558]: I0120 17:45:28.201377 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xdqfg" event={"ID":"33b38636-2daf-4067-b29c-ed41dcbd49d7","Type":"ContainerDied","Data":"9c6dd7cd1b5a87ce417cd16d1b6b6543238170bf7e7099a15726d0b3520193b0"} Jan 20 17:45:28 crc kubenswrapper[4558]: I0120 17:45:28.202405 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xdqfg" event={"ID":"33b38636-2daf-4067-b29c-ed41dcbd49d7","Type":"ContainerDied","Data":"c63261a2a73af2c61c790a9f08a8fe647919ec6edd5792b562c8b5fb4f4d8dd8"} Jan 20 17:45:28 crc kubenswrapper[4558]: I0120 17:45:28.202428 4558 scope.go:117] "RemoveContainer" containerID="9c6dd7cd1b5a87ce417cd16d1b6b6543238170bf7e7099a15726d0b3520193b0" Jan 20 17:45:28 crc kubenswrapper[4558]: I0120 17:45:28.235017 4558 scope.go:117] "RemoveContainer" containerID="97bc558595838ee4e0deddf6fe63d3195d6972eade1554acebca9bc2e26b1328" Jan 20 17:45:28 crc kubenswrapper[4558]: I0120 17:45:28.240605 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-xdqfg"] Jan 20 17:45:28 crc kubenswrapper[4558]: I0120 17:45:28.248147 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-xdqfg"] Jan 20 17:45:28 crc kubenswrapper[4558]: I0120 17:45:28.260013 4558 scope.go:117] "RemoveContainer" containerID="741130739b0460d46603e2634a37e6587acafb775777dbcb6fc30713d1b02c54" Jan 20 17:45:28 crc kubenswrapper[4558]: I0120 17:45:28.287533 4558 scope.go:117] "RemoveContainer" containerID="9c6dd7cd1b5a87ce417cd16d1b6b6543238170bf7e7099a15726d0b3520193b0" Jan 20 17:45:28 crc kubenswrapper[4558]: E0120 17:45:28.288147 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9c6dd7cd1b5a87ce417cd16d1b6b6543238170bf7e7099a15726d0b3520193b0\": container with ID starting with 9c6dd7cd1b5a87ce417cd16d1b6b6543238170bf7e7099a15726d0b3520193b0 not found: ID does not exist" containerID="9c6dd7cd1b5a87ce417cd16d1b6b6543238170bf7e7099a15726d0b3520193b0" Jan 20 17:45:28 crc kubenswrapper[4558]: I0120 17:45:28.288206 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9c6dd7cd1b5a87ce417cd16d1b6b6543238170bf7e7099a15726d0b3520193b0"} err="failed to get container status \"9c6dd7cd1b5a87ce417cd16d1b6b6543238170bf7e7099a15726d0b3520193b0\": rpc error: code = NotFound desc = could not find container \"9c6dd7cd1b5a87ce417cd16d1b6b6543238170bf7e7099a15726d0b3520193b0\": container with ID starting with 9c6dd7cd1b5a87ce417cd16d1b6b6543238170bf7e7099a15726d0b3520193b0 not found: ID does not exist" Jan 20 17:45:28 crc kubenswrapper[4558]: I0120 17:45:28.288230 4558 scope.go:117] "RemoveContainer" containerID="97bc558595838ee4e0deddf6fe63d3195d6972eade1554acebca9bc2e26b1328" Jan 20 17:45:28 crc kubenswrapper[4558]: E0120 17:45:28.288561 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"97bc558595838ee4e0deddf6fe63d3195d6972eade1554acebca9bc2e26b1328\": container with ID starting with 97bc558595838ee4e0deddf6fe63d3195d6972eade1554acebca9bc2e26b1328 not found: ID does not exist" containerID="97bc558595838ee4e0deddf6fe63d3195d6972eade1554acebca9bc2e26b1328" Jan 20 17:45:28 crc kubenswrapper[4558]: I0120 17:45:28.288600 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"97bc558595838ee4e0deddf6fe63d3195d6972eade1554acebca9bc2e26b1328"} err="failed to get container status \"97bc558595838ee4e0deddf6fe63d3195d6972eade1554acebca9bc2e26b1328\": rpc error: code = NotFound desc = could not find container \"97bc558595838ee4e0deddf6fe63d3195d6972eade1554acebca9bc2e26b1328\": container with ID starting with 97bc558595838ee4e0deddf6fe63d3195d6972eade1554acebca9bc2e26b1328 not found: ID does not exist" Jan 20 17:45:28 crc kubenswrapper[4558]: I0120 17:45:28.288612 4558 scope.go:117] "RemoveContainer" containerID="741130739b0460d46603e2634a37e6587acafb775777dbcb6fc30713d1b02c54" Jan 20 17:45:28 crc kubenswrapper[4558]: E0120 17:45:28.288966 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"741130739b0460d46603e2634a37e6587acafb775777dbcb6fc30713d1b02c54\": container with ID starting with 741130739b0460d46603e2634a37e6587acafb775777dbcb6fc30713d1b02c54 not found: ID does not exist" containerID="741130739b0460d46603e2634a37e6587acafb775777dbcb6fc30713d1b02c54" Jan 20 17:45:28 crc kubenswrapper[4558]: I0120 17:45:28.288984 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"741130739b0460d46603e2634a37e6587acafb775777dbcb6fc30713d1b02c54"} err="failed to get container status \"741130739b0460d46603e2634a37e6587acafb775777dbcb6fc30713d1b02c54\": rpc error: code = NotFound desc = could not find container \"741130739b0460d46603e2634a37e6587acafb775777dbcb6fc30713d1b02c54\": container with ID starting with 741130739b0460d46603e2634a37e6587acafb775777dbcb6fc30713d1b02c54 not found: ID does not exist" Jan 20 17:45:28 crc kubenswrapper[4558]: I0120 17:45:28.578356 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="33b38636-2daf-4067-b29c-ed41dcbd49d7" path="/var/lib/kubelet/pods/33b38636-2daf-4067-b29c-ed41dcbd49d7/volumes" Jan 20 17:45:29 crc kubenswrapper[4558]: I0120 17:45:29.566604 4558 scope.go:117] "RemoveContainer" containerID="2d2a8989ca5a5af98b2c9dc8f801ea0675339ec14800f437c058c5a43c20146b" Jan 20 17:45:30 crc kubenswrapper[4558]: I0120 17:45:30.242754 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" event={"ID":"68337d27-3fa6-4a29-88b0-82e60c3739eb","Type":"ContainerStarted","Data":"cf4cc5411787db46440d23f9870d3296fdb39888c50ddb23f460003bc8a70072"} Jan 20 17:45:31 crc kubenswrapper[4558]: I0120 17:45:31.705508 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/openstackclient"] Jan 20 17:45:31 crc kubenswrapper[4558]: I0120 17:45:31.706028 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/openstackclient" podUID="2bfab6e6-f4a2-4948-9715-43c6f13d88fd" containerName="openstackclient" containerID="cri-o://dfb7f1325b9ebbf067b631d2aff33318d0cc9724f90cfc14dfab1a1ce2b6b6da" gracePeriod=2 Jan 20 17:45:31 crc kubenswrapper[4558]: I0120 17:45:31.720567 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/openstackclient"] Jan 20 17:45:31 crc kubenswrapper[4558]: I0120 17:45:31.952073 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/rabbitmq-server-0"] Jan 20 17:45:32 crc kubenswrapper[4558]: I0120 17:45:32.116151 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/glance-8c6b-account-create-update-qcnqx"] Jan 20 17:45:32 crc kubenswrapper[4558]: E0120 17:45:32.116857 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="33b38636-2daf-4067-b29c-ed41dcbd49d7" containerName="registry-server" Jan 20 17:45:32 crc kubenswrapper[4558]: I0120 17:45:32.116873 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="33b38636-2daf-4067-b29c-ed41dcbd49d7" containerName="registry-server" Jan 20 17:45:32 crc kubenswrapper[4558]: E0120 17:45:32.116895 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="33b38636-2daf-4067-b29c-ed41dcbd49d7" containerName="extract-content" Jan 20 17:45:32 crc kubenswrapper[4558]: I0120 17:45:32.116901 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="33b38636-2daf-4067-b29c-ed41dcbd49d7" containerName="extract-content" Jan 20 17:45:32 crc kubenswrapper[4558]: E0120 17:45:32.116924 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2bfab6e6-f4a2-4948-9715-43c6f13d88fd" containerName="openstackclient" Jan 20 17:45:32 crc kubenswrapper[4558]: I0120 17:45:32.116930 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="2bfab6e6-f4a2-4948-9715-43c6f13d88fd" containerName="openstackclient" Jan 20 17:45:32 crc kubenswrapper[4558]: E0120 17:45:32.116940 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="33b38636-2daf-4067-b29c-ed41dcbd49d7" containerName="extract-utilities" Jan 20 17:45:32 crc kubenswrapper[4558]: I0120 17:45:32.116947 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="33b38636-2daf-4067-b29c-ed41dcbd49d7" containerName="extract-utilities" Jan 20 17:45:32 crc kubenswrapper[4558]: I0120 17:45:32.117141 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="33b38636-2daf-4067-b29c-ed41dcbd49d7" containerName="registry-server" Jan 20 17:45:32 crc kubenswrapper[4558]: I0120 17:45:32.117176 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="2bfab6e6-f4a2-4948-9715-43c6f13d88fd" containerName="openstackclient" Jan 20 17:45:32 crc kubenswrapper[4558]: I0120 17:45:32.117823 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-8c6b-account-create-update-qcnqx" Jan 20 17:45:32 crc kubenswrapper[4558]: I0120 17:45:32.121699 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-db-secret" Jan 20 17:45:32 crc kubenswrapper[4558]: E0120 17:45:32.145866 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Jan 20 17:45:32 crc kubenswrapper[4558]: E0120 17:45:32.145946 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/4765e529-9729-4d27-a252-c0c9a7b67beb-config-data podName:4765e529-9729-4d27-a252-c0c9a7b67beb nodeName:}" failed. No retries permitted until 2026-01-20 17:45:32.645926939 +0000 UTC m=+3826.406264906 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/4765e529-9729-4d27-a252-c0c9a7b67beb-config-data") pod "rabbitmq-server-0" (UID: "4765e529-9729-4d27-a252-c0c9a7b67beb") : configmap "rabbitmq-config-data" not found Jan 20 17:45:32 crc kubenswrapper[4558]: I0120 17:45:32.150249 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-api-3b8d-account-create-update-75ws5"] Jan 20 17:45:32 crc kubenswrapper[4558]: I0120 17:45:32.151306 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-3b8d-account-create-update-75ws5" Jan 20 17:45:32 crc kubenswrapper[4558]: I0120 17:45:32.157348 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-api-db-secret" Jan 20 17:45:32 crc kubenswrapper[4558]: I0120 17:45:32.181024 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/root-account-create-update-fhpsv"] Jan 20 17:45:32 crc kubenswrapper[4558]: I0120 17:45:32.182621 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-fhpsv" Jan 20 17:45:32 crc kubenswrapper[4558]: I0120 17:45:32.188266 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/placement-12a5-account-create-update-6g9q5"] Jan 20 17:45:32 crc kubenswrapper[4558]: I0120 17:45:32.189382 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"openstack-cell1-mariadb-root-db-secret" Jan 20 17:45:32 crc kubenswrapper[4558]: I0120 17:45:32.205638 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-8c6b-account-create-update-qcnqx"] Jan 20 17:45:32 crc kubenswrapper[4558]: I0120 17:45:32.205733 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-12a5-account-create-update-6g9q5" Jan 20 17:45:32 crc kubenswrapper[4558]: I0120 17:45:32.212241 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-fhpsv"] Jan 20 17:45:32 crc kubenswrapper[4558]: I0120 17:45:32.215516 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"placement-db-secret" Jan 20 17:45:32 crc kubenswrapper[4558]: I0120 17:45:32.247861 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r4mrk\" (UniqueName: \"kubernetes.io/projected/d7ca0a7f-fd93-48de-b71b-b23d1aef2af7-kube-api-access-r4mrk\") pod \"nova-api-3b8d-account-create-update-75ws5\" (UID: \"d7ca0a7f-fd93-48de-b71b-b23d1aef2af7\") " pod="openstack-kuttl-tests/nova-api-3b8d-account-create-update-75ws5" Jan 20 17:45:32 crc kubenswrapper[4558]: I0120 17:45:32.247943 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d7ca0a7f-fd93-48de-b71b-b23d1aef2af7-operator-scripts\") pod \"nova-api-3b8d-account-create-update-75ws5\" (UID: \"d7ca0a7f-fd93-48de-b71b-b23d1aef2af7\") " pod="openstack-kuttl-tests/nova-api-3b8d-account-create-update-75ws5" Jan 20 17:45:32 crc kubenswrapper[4558]: I0120 17:45:32.247976 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/16a1e55d-37ca-4488-adc3-2602e7416d55-operator-scripts\") pod \"glance-8c6b-account-create-update-qcnqx\" (UID: \"16a1e55d-37ca-4488-adc3-2602e7416d55\") " pod="openstack-kuttl-tests/glance-8c6b-account-create-update-qcnqx" Jan 20 17:45:32 crc kubenswrapper[4558]: I0120 17:45:32.247995 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rh7sw\" (UniqueName: \"kubernetes.io/projected/16a1e55d-37ca-4488-adc3-2602e7416d55-kube-api-access-rh7sw\") pod \"glance-8c6b-account-create-update-qcnqx\" (UID: \"16a1e55d-37ca-4488-adc3-2602e7416d55\") " pod="openstack-kuttl-tests/glance-8c6b-account-create-update-qcnqx" Jan 20 17:45:32 crc kubenswrapper[4558]: I0120 17:45:32.251222 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-tkb42"] Jan 20 17:45:32 crc kubenswrapper[4558]: I0120 17:45:32.297314 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell0-bd94-account-create-update-stnv4"] Jan 20 17:45:32 crc kubenswrapper[4558]: I0120 17:45:32.303739 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-bd94-account-create-update-stnv4" Jan 20 17:45:32 crc kubenswrapper[4558]: I0120 17:45:32.309145 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell0-db-secret" Jan 20 17:45:32 crc kubenswrapper[4558]: I0120 17:45:32.372475 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wk6rd\" (UniqueName: \"kubernetes.io/projected/03ca5cbb-ca72-46f1-a44f-42ac0fb7cf17-kube-api-access-wk6rd\") pod \"root-account-create-update-fhpsv\" (UID: \"03ca5cbb-ca72-46f1-a44f-42ac0fb7cf17\") " pod="openstack-kuttl-tests/root-account-create-update-fhpsv" Jan 20 17:45:32 crc kubenswrapper[4558]: I0120 17:45:32.372538 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/03ca5cbb-ca72-46f1-a44f-42ac0fb7cf17-operator-scripts\") pod \"root-account-create-update-fhpsv\" (UID: \"03ca5cbb-ca72-46f1-a44f-42ac0fb7cf17\") " pod="openstack-kuttl-tests/root-account-create-update-fhpsv" Jan 20 17:45:32 crc kubenswrapper[4558]: I0120 17:45:32.372642 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a8cb168e-9c96-449b-a8d6-bcd8511f1a53-operator-scripts\") pod \"placement-12a5-account-create-update-6g9q5\" (UID: \"a8cb168e-9c96-449b-a8d6-bcd8511f1a53\") " pod="openstack-kuttl-tests/placement-12a5-account-create-update-6g9q5" Jan 20 17:45:32 crc kubenswrapper[4558]: I0120 17:45:32.373541 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qwxtd\" (UniqueName: \"kubernetes.io/projected/a8cb168e-9c96-449b-a8d6-bcd8511f1a53-kube-api-access-qwxtd\") pod \"placement-12a5-account-create-update-6g9q5\" (UID: \"a8cb168e-9c96-449b-a8d6-bcd8511f1a53\") " pod="openstack-kuttl-tests/placement-12a5-account-create-update-6g9q5" Jan 20 17:45:32 crc kubenswrapper[4558]: I0120 17:45:32.373663 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r4mrk\" (UniqueName: \"kubernetes.io/projected/d7ca0a7f-fd93-48de-b71b-b23d1aef2af7-kube-api-access-r4mrk\") pod \"nova-api-3b8d-account-create-update-75ws5\" (UID: \"d7ca0a7f-fd93-48de-b71b-b23d1aef2af7\") " pod="openstack-kuttl-tests/nova-api-3b8d-account-create-update-75ws5" Jan 20 17:45:32 crc kubenswrapper[4558]: I0120 17:45:32.373748 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d7ca0a7f-fd93-48de-b71b-b23d1aef2af7-operator-scripts\") pod \"nova-api-3b8d-account-create-update-75ws5\" (UID: \"d7ca0a7f-fd93-48de-b71b-b23d1aef2af7\") " pod="openstack-kuttl-tests/nova-api-3b8d-account-create-update-75ws5" Jan 20 17:45:32 crc kubenswrapper[4558]: I0120 17:45:32.373789 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/16a1e55d-37ca-4488-adc3-2602e7416d55-operator-scripts\") pod \"glance-8c6b-account-create-update-qcnqx\" (UID: \"16a1e55d-37ca-4488-adc3-2602e7416d55\") " pod="openstack-kuttl-tests/glance-8c6b-account-create-update-qcnqx" Jan 20 17:45:32 crc kubenswrapper[4558]: I0120 17:45:32.373812 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rh7sw\" (UniqueName: \"kubernetes.io/projected/16a1e55d-37ca-4488-adc3-2602e7416d55-kube-api-access-rh7sw\") pod \"glance-8c6b-account-create-update-qcnqx\" (UID: \"16a1e55d-37ca-4488-adc3-2602e7416d55\") " pod="openstack-kuttl-tests/glance-8c6b-account-create-update-qcnqx" Jan 20 17:45:32 crc kubenswrapper[4558]: I0120 17:45:32.375133 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d7ca0a7f-fd93-48de-b71b-b23d1aef2af7-operator-scripts\") pod \"nova-api-3b8d-account-create-update-75ws5\" (UID: \"d7ca0a7f-fd93-48de-b71b-b23d1aef2af7\") " pod="openstack-kuttl-tests/nova-api-3b8d-account-create-update-75ws5" Jan 20 17:45:32 crc kubenswrapper[4558]: I0120 17:45:32.375196 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-tkb42"] Jan 20 17:45:32 crc kubenswrapper[4558]: I0120 17:45:32.375784 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/16a1e55d-37ca-4488-adc3-2602e7416d55-operator-scripts\") pod \"glance-8c6b-account-create-update-qcnqx\" (UID: \"16a1e55d-37ca-4488-adc3-2602e7416d55\") " pod="openstack-kuttl-tests/glance-8c6b-account-create-update-qcnqx" Jan 20 17:45:32 crc kubenswrapper[4558]: I0120 17:45:32.429294 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r4mrk\" (UniqueName: \"kubernetes.io/projected/d7ca0a7f-fd93-48de-b71b-b23d1aef2af7-kube-api-access-r4mrk\") pod \"nova-api-3b8d-account-create-update-75ws5\" (UID: \"d7ca0a7f-fd93-48de-b71b-b23d1aef2af7\") " pod="openstack-kuttl-tests/nova-api-3b8d-account-create-update-75ws5" Jan 20 17:45:32 crc kubenswrapper[4558]: I0120 17:45:32.435785 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rh7sw\" (UniqueName: \"kubernetes.io/projected/16a1e55d-37ca-4488-adc3-2602e7416d55-kube-api-access-rh7sw\") pod \"glance-8c6b-account-create-update-qcnqx\" (UID: \"16a1e55d-37ca-4488-adc3-2602e7416d55\") " pod="openstack-kuttl-tests/glance-8c6b-account-create-update-qcnqx" Jan 20 17:45:32 crc kubenswrapper[4558]: I0120 17:45:32.438455 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-12a5-account-create-update-6g9q5"] Jan 20 17:45:32 crc kubenswrapper[4558]: I0120 17:45:32.453064 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-8c6b-account-create-update-qcnqx" Jan 20 17:45:32 crc kubenswrapper[4558]: I0120 17:45:32.478318 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-3b8d-account-create-update-75ws5" Jan 20 17:45:32 crc kubenswrapper[4558]: I0120 17:45:32.478776 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-bd94-account-create-update-stnv4"] Jan 20 17:45:32 crc kubenswrapper[4558]: I0120 17:45:32.479467 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qwxtd\" (UniqueName: \"kubernetes.io/projected/a8cb168e-9c96-449b-a8d6-bcd8511f1a53-kube-api-access-qwxtd\") pod \"placement-12a5-account-create-update-6g9q5\" (UID: \"a8cb168e-9c96-449b-a8d6-bcd8511f1a53\") " pod="openstack-kuttl-tests/placement-12a5-account-create-update-6g9q5" Jan 20 17:45:32 crc kubenswrapper[4558]: I0120 17:45:32.479583 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/44f856be-bb11-468b-a157-1f2e2851d04d-operator-scripts\") pod \"nova-cell0-bd94-account-create-update-stnv4\" (UID: \"44f856be-bb11-468b-a157-1f2e2851d04d\") " pod="openstack-kuttl-tests/nova-cell0-bd94-account-create-update-stnv4" Jan 20 17:45:32 crc kubenswrapper[4558]: I0120 17:45:32.479667 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wk6rd\" (UniqueName: \"kubernetes.io/projected/03ca5cbb-ca72-46f1-a44f-42ac0fb7cf17-kube-api-access-wk6rd\") pod \"root-account-create-update-fhpsv\" (UID: \"03ca5cbb-ca72-46f1-a44f-42ac0fb7cf17\") " pod="openstack-kuttl-tests/root-account-create-update-fhpsv" Jan 20 17:45:32 crc kubenswrapper[4558]: I0120 17:45:32.479691 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/03ca5cbb-ca72-46f1-a44f-42ac0fb7cf17-operator-scripts\") pod \"root-account-create-update-fhpsv\" (UID: \"03ca5cbb-ca72-46f1-a44f-42ac0fb7cf17\") " pod="openstack-kuttl-tests/root-account-create-update-fhpsv" Jan 20 17:45:32 crc kubenswrapper[4558]: I0120 17:45:32.479738 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a8cb168e-9c96-449b-a8d6-bcd8511f1a53-operator-scripts\") pod \"placement-12a5-account-create-update-6g9q5\" (UID: \"a8cb168e-9c96-449b-a8d6-bcd8511f1a53\") " pod="openstack-kuttl-tests/placement-12a5-account-create-update-6g9q5" Jan 20 17:45:32 crc kubenswrapper[4558]: I0120 17:45:32.479763 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4tw6s\" (UniqueName: \"kubernetes.io/projected/44f856be-bb11-468b-a157-1f2e2851d04d-kube-api-access-4tw6s\") pod \"nova-cell0-bd94-account-create-update-stnv4\" (UID: \"44f856be-bb11-468b-a157-1f2e2851d04d\") " pod="openstack-kuttl-tests/nova-cell0-bd94-account-create-update-stnv4" Jan 20 17:45:32 crc kubenswrapper[4558]: I0120 17:45:32.480808 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/03ca5cbb-ca72-46f1-a44f-42ac0fb7cf17-operator-scripts\") pod \"root-account-create-update-fhpsv\" (UID: \"03ca5cbb-ca72-46f1-a44f-42ac0fb7cf17\") " pod="openstack-kuttl-tests/root-account-create-update-fhpsv" Jan 20 17:45:32 crc kubenswrapper[4558]: I0120 17:45:32.487521 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a8cb168e-9c96-449b-a8d6-bcd8511f1a53-operator-scripts\") pod \"placement-12a5-account-create-update-6g9q5\" (UID: \"a8cb168e-9c96-449b-a8d6-bcd8511f1a53\") " pod="openstack-kuttl-tests/placement-12a5-account-create-update-6g9q5" Jan 20 17:45:32 crc kubenswrapper[4558]: I0120 17:45:32.525420 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qwxtd\" (UniqueName: \"kubernetes.io/projected/a8cb168e-9c96-449b-a8d6-bcd8511f1a53-kube-api-access-qwxtd\") pod \"placement-12a5-account-create-update-6g9q5\" (UID: \"a8cb168e-9c96-449b-a8d6-bcd8511f1a53\") " pod="openstack-kuttl-tests/placement-12a5-account-create-update-6g9q5" Jan 20 17:45:32 crc kubenswrapper[4558]: I0120 17:45:32.526729 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wk6rd\" (UniqueName: \"kubernetes.io/projected/03ca5cbb-ca72-46f1-a44f-42ac0fb7cf17-kube-api-access-wk6rd\") pod \"root-account-create-update-fhpsv\" (UID: \"03ca5cbb-ca72-46f1-a44f-42ac0fb7cf17\") " pod="openstack-kuttl-tests/root-account-create-update-fhpsv" Jan 20 17:45:32 crc kubenswrapper[4558]: I0120 17:45:32.528557 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-3b8d-account-create-update-75ws5"] Jan 20 17:45:32 crc kubenswrapper[4558]: I0120 17:45:32.557110 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-12a5-account-create-update-6g9q5" Jan 20 17:45:32 crc kubenswrapper[4558]: I0120 17:45:32.585189 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/44f856be-bb11-468b-a157-1f2e2851d04d-operator-scripts\") pod \"nova-cell0-bd94-account-create-update-stnv4\" (UID: \"44f856be-bb11-468b-a157-1f2e2851d04d\") " pod="openstack-kuttl-tests/nova-cell0-bd94-account-create-update-stnv4" Jan 20 17:45:32 crc kubenswrapper[4558]: I0120 17:45:32.585309 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4tw6s\" (UniqueName: \"kubernetes.io/projected/44f856be-bb11-468b-a157-1f2e2851d04d-kube-api-access-4tw6s\") pod \"nova-cell0-bd94-account-create-update-stnv4\" (UID: \"44f856be-bb11-468b-a157-1f2e2851d04d\") " pod="openstack-kuttl-tests/nova-cell0-bd94-account-create-update-stnv4" Jan 20 17:45:32 crc kubenswrapper[4558]: I0120 17:45:32.585972 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/44f856be-bb11-468b-a157-1f2e2851d04d-operator-scripts\") pod \"nova-cell0-bd94-account-create-update-stnv4\" (UID: \"44f856be-bb11-468b-a157-1f2e2851d04d\") " pod="openstack-kuttl-tests/nova-cell0-bd94-account-create-update-stnv4" Jan 20 17:45:32 crc kubenswrapper[4558]: I0120 17:45:32.608509 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4f7051ef-1b0c-4147-9dff-9e57de714b50" path="/var/lib/kubelet/pods/4f7051ef-1b0c-4147-9dff-9e57de714b50/volumes" Jan 20 17:45:32 crc kubenswrapper[4558]: I0120 17:45:32.611973 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4tw6s\" (UniqueName: \"kubernetes.io/projected/44f856be-bb11-468b-a157-1f2e2851d04d-kube-api-access-4tw6s\") pod \"nova-cell0-bd94-account-create-update-stnv4\" (UID: \"44f856be-bb11-468b-a157-1f2e2851d04d\") " pod="openstack-kuttl-tests/nova-cell0-bd94-account-create-update-stnv4" Jan 20 17:45:32 crc kubenswrapper[4558]: I0120 17:45:32.612608 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/placement-12a5-account-create-update-gsb28"] Jan 20 17:45:32 crc kubenswrapper[4558]: I0120 17:45:32.640850 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-sb-0"] Jan 20 17:45:32 crc kubenswrapper[4558]: I0120 17:45:32.641205 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ovsdbserver-sb-0" podUID="924d7506-6027-4d31-b57a-19fc787ba356" containerName="openstack-network-exporter" containerID="cri-o://925992b2eef4db1ac2a2537f57dad717cf3ee3ea67cbe9cec01c652878bf3f97" gracePeriod=300 Jan 20 17:45:32 crc kubenswrapper[4558]: I0120 17:45:32.659991 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-bd94-account-create-update-stnv4" Jan 20 17:45:32 crc kubenswrapper[4558]: E0120 17:45:32.688965 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Jan 20 17:45:32 crc kubenswrapper[4558]: E0120 17:45:32.689049 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/4765e529-9729-4d27-a252-c0c9a7b67beb-config-data podName:4765e529-9729-4d27-a252-c0c9a7b67beb nodeName:}" failed. No retries permitted until 2026-01-20 17:45:33.689027898 +0000 UTC m=+3827.449365865 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/4765e529-9729-4d27-a252-c0c9a7b67beb-config-data") pod "rabbitmq-server-0" (UID: "4765e529-9729-4d27-a252-c0c9a7b67beb") : configmap "rabbitmq-config-data" not found Jan 20 17:45:32 crc kubenswrapper[4558]: I0120 17:45:32.696892 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/placement-12a5-account-create-update-gsb28"] Jan 20 17:45:32 crc kubenswrapper[4558]: I0120 17:45:32.727362 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-3b8d-account-create-update-s9plf"] Jan 20 17:45:32 crc kubenswrapper[4558]: I0120 17:45:32.751370 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-api-3b8d-account-create-update-s9plf"] Jan 20 17:45:32 crc kubenswrapper[4558]: I0120 17:45:32.804096 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell1-51af-account-create-update-52f7p"] Jan 20 17:45:32 crc kubenswrapper[4558]: I0120 17:45:32.805530 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-51af-account-create-update-52f7p" Jan 20 17:45:32 crc kubenswrapper[4558]: I0120 17:45:32.808497 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ovsdbserver-sb-0" podUID="924d7506-6027-4d31-b57a-19fc787ba356" containerName="ovsdbserver-sb" containerID="cri-o://0b23cac82af3ec3ee44a48b3126c8df9ad27e49f292999b18a09db1d3481441e" gracePeriod=300 Jan 20 17:45:32 crc kubenswrapper[4558]: I0120 17:45:32.813605 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell1-db-secret" Jan 20 17:45:32 crc kubenswrapper[4558]: I0120 17:45:32.814322 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-fhpsv" Jan 20 17:45:32 crc kubenswrapper[4558]: I0120 17:45:32.865724 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-51af-account-create-update-52f7p"] Jan 20 17:45:32 crc kubenswrapper[4558]: I0120 17:45:32.877218 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovn-northd-0"] Jan 20 17:45:32 crc kubenswrapper[4558]: I0120 17:45:32.877476 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ovn-northd-0" podUID="d6e6a03e-c496-4589-8d51-f6d9e89e14ae" containerName="ovn-northd" containerID="cri-o://391bf6661c1200e7d892ab027500c3b569ef3b9e9fbd3bdab1b7ee5442423861" gracePeriod=30 Jan 20 17:45:32 crc kubenswrapper[4558]: I0120 17:45:32.877597 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ovn-northd-0" podUID="d6e6a03e-c496-4589-8d51-f6d9e89e14ae" containerName="openstack-network-exporter" containerID="cri-o://7e87cd2bf2bf667d667a6643f4f7c164cbd3aeeebc96908debb2fae778b18645" gracePeriod=30 Jan 20 17:45:32 crc kubenswrapper[4558]: I0120 17:45:32.883226 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-8c6b-account-create-update-hgzwx"] Jan 20 17:45:32 crc kubenswrapper[4558]: I0120 17:45:32.894411 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/glance-8c6b-account-create-update-hgzwx"] Jan 20 17:45:32 crc kubenswrapper[4558]: I0120 17:45:32.900145 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell0-bd94-account-create-update-n5ssg"] Jan 20 17:45:32 crc kubenswrapper[4558]: I0120 17:45:32.910944 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell0-bd94-account-create-update-n5ssg"] Jan 20 17:45:32 crc kubenswrapper[4558]: I0120 17:45:32.919774 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/rabbitmq-cell1-server-0"] Jan 20 17:45:32 crc kubenswrapper[4558]: I0120 17:45:32.925634 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-nb-0"] Jan 20 17:45:32 crc kubenswrapper[4558]: I0120 17:45:32.926041 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ovsdbserver-nb-0" podUID="b2fb8e70-15aa-4b38-a44d-5f818dfc755c" containerName="openstack-network-exporter" containerID="cri-o://ead17a0d58c199bc2ba003661375148f8b9c61941971a8e8d61f318e35d6d553" gracePeriod=300 Jan 20 17:45:32 crc kubenswrapper[4558]: I0120 17:45:32.942561 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-db-sync-2sxrh"] Jan 20 17:45:32 crc kubenswrapper[4558]: I0120 17:45:32.979823 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/cinder-db-sync-2sxrh"] Jan 20 17:45:32 crc kubenswrapper[4558]: I0120 17:45:32.987995 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-db-sync-fv6q5"] Jan 20 17:45:32 crc kubenswrapper[4558]: I0120 17:45:32.993897 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/neutron-db-sync-fv6q5"] Jan 20 17:45:33 crc kubenswrapper[4558]: I0120 17:45:33.000292 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/89fd10f3-4598-47c6-ad0e-59aa59d41894-operator-scripts\") pod \"nova-cell1-51af-account-create-update-52f7p\" (UID: \"89fd10f3-4598-47c6-ad0e-59aa59d41894\") " pod="openstack-kuttl-tests/nova-cell1-51af-account-create-update-52f7p" Jan 20 17:45:33 crc kubenswrapper[4558]: I0120 17:45:33.000368 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gl67k\" (UniqueName: \"kubernetes.io/projected/89fd10f3-4598-47c6-ad0e-59aa59d41894-kube-api-access-gl67k\") pod \"nova-cell1-51af-account-create-update-52f7p\" (UID: \"89fd10f3-4598-47c6-ad0e-59aa59d41894\") " pod="openstack-kuttl-tests/nova-cell1-51af-account-create-update-52f7p" Jan 20 17:45:33 crc kubenswrapper[4558]: I0120 17:45:33.000607 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-51af-account-create-update-96ttf"] Jan 20 17:45:33 crc kubenswrapper[4558]: I0120 17:45:33.006998 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-db-sync-ndrzj"] Jan 20 17:45:33 crc kubenswrapper[4558]: I0120 17:45:33.012768 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell1-51af-account-create-update-96ttf"] Jan 20 17:45:33 crc kubenswrapper[4558]: I0120 17:45:33.027416 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/barbican-db-sync-ndrzj"] Jan 20 17:45:33 crc kubenswrapper[4558]: I0120 17:45:33.064036 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/placement-db-sync-fq8jg"] Jan 20 17:45:33 crc kubenswrapper[4558]: I0120 17:45:33.064045 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ovsdbserver-nb-0" podUID="b2fb8e70-15aa-4b38-a44d-5f818dfc755c" containerName="ovsdbserver-nb" containerID="cri-o://54155d91bfb6e6c18d3eb5afc095843ec5b7f0df04d6716cdc4199d6b21aa7d6" gracePeriod=300 Jan 20 17:45:33 crc kubenswrapper[4558]: I0120 17:45:33.098550 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/placement-db-sync-fq8jg"] Jan 20 17:45:33 crc kubenswrapper[4558]: I0120 17:45:33.112095 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/89fd10f3-4598-47c6-ad0e-59aa59d41894-operator-scripts\") pod \"nova-cell1-51af-account-create-update-52f7p\" (UID: \"89fd10f3-4598-47c6-ad0e-59aa59d41894\") " pod="openstack-kuttl-tests/nova-cell1-51af-account-create-update-52f7p" Jan 20 17:45:33 crc kubenswrapper[4558]: I0120 17:45:33.112176 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gl67k\" (UniqueName: \"kubernetes.io/projected/89fd10f3-4598-47c6-ad0e-59aa59d41894-kube-api-access-gl67k\") pod \"nova-cell1-51af-account-create-update-52f7p\" (UID: \"89fd10f3-4598-47c6-ad0e-59aa59d41894\") " pod="openstack-kuttl-tests/nova-cell1-51af-account-create-update-52f7p" Jan 20 17:45:33 crc kubenswrapper[4558]: E0120 17:45:33.112807 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Jan 20 17:45:33 crc kubenswrapper[4558]: E0120 17:45:33.112858 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/de4d8126-91cf-4149-bec4-4accaf558308-config-data podName:de4d8126-91cf-4149-bec4-4accaf558308 nodeName:}" failed. No retries permitted until 2026-01-20 17:45:33.612840367 +0000 UTC m=+3827.373178335 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/de4d8126-91cf-4149-bec4-4accaf558308-config-data") pod "rabbitmq-cell1-server-0" (UID: "de4d8126-91cf-4149-bec4-4accaf558308") : configmap "rabbitmq-cell1-config-data" not found Jan 20 17:45:33 crc kubenswrapper[4558]: I0120 17:45:33.113612 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/89fd10f3-4598-47c6-ad0e-59aa59d41894-operator-scripts\") pod \"nova-cell1-51af-account-create-update-52f7p\" (UID: \"89fd10f3-4598-47c6-ad0e-59aa59d41894\") " pod="openstack-kuttl-tests/nova-cell1-51af-account-create-update-52f7p" Jan 20 17:45:33 crc kubenswrapper[4558]: I0120 17:45:33.137400 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-db-sync-dxp9v"] Jan 20 17:45:33 crc kubenswrapper[4558]: I0120 17:45:33.137442 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/glance-db-sync-dxp9v"] Jan 20 17:45:33 crc kubenswrapper[4558]: I0120 17:45:33.197873 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-cell-mapping-7m8mg"] Jan 20 17:45:33 crc kubenswrapper[4558]: I0120 17:45:33.198479 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gl67k\" (UniqueName: \"kubernetes.io/projected/89fd10f3-4598-47c6-ad0e-59aa59d41894-kube-api-access-gl67k\") pod \"nova-cell1-51af-account-create-update-52f7p\" (UID: \"89fd10f3-4598-47c6-ad0e-59aa59d41894\") " pod="openstack-kuttl-tests/nova-cell1-51af-account-create-update-52f7p" Jan 20 17:45:33 crc kubenswrapper[4558]: I0120 17:45:33.289817 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell1-cell-mapping-7m8mg"] Jan 20 17:45:33 crc kubenswrapper[4558]: E0120 17:45:33.320713 4558 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 20 17:45:33 crc kubenswrapper[4558]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[/bin/sh -c #!/bin/bash Jan 20 17:45:33 crc kubenswrapper[4558]: Jan 20 17:45:33 crc kubenswrapper[4558]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 20 17:45:33 crc kubenswrapper[4558]: Jan 20 17:45:33 crc kubenswrapper[4558]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 20 17:45:33 crc kubenswrapper[4558]: Jan 20 17:45:33 crc kubenswrapper[4558]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 20 17:45:33 crc kubenswrapper[4558]: Jan 20 17:45:33 crc kubenswrapper[4558]: if [ -n "glance" ]; then Jan 20 17:45:33 crc kubenswrapper[4558]: GRANT_DATABASE="glance" Jan 20 17:45:33 crc kubenswrapper[4558]: else Jan 20 17:45:33 crc kubenswrapper[4558]: GRANT_DATABASE="*" Jan 20 17:45:33 crc kubenswrapper[4558]: fi Jan 20 17:45:33 crc kubenswrapper[4558]: Jan 20 17:45:33 crc kubenswrapper[4558]: # going for maximum compatibility here: Jan 20 17:45:33 crc kubenswrapper[4558]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 20 17:45:33 crc kubenswrapper[4558]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 20 17:45:33 crc kubenswrapper[4558]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 20 17:45:33 crc kubenswrapper[4558]: # support updates Jan 20 17:45:33 crc kubenswrapper[4558]: Jan 20 17:45:33 crc kubenswrapper[4558]: $MYSQL_CMD < logger="UnhandledError" Jan 20 17:45:33 crc kubenswrapper[4558]: E0120 17:45:33.324475 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"glance-db-secret\\\" not found\"" pod="openstack-kuttl-tests/glance-8c6b-account-create-update-qcnqx" podUID="16a1e55d-37ca-4488-adc3-2602e7416d55" Jan 20 17:45:33 crc kubenswrapper[4558]: I0120 17:45:33.349064 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-8c6b-account-create-update-qcnqx"] Jan 20 17:45:33 crc kubenswrapper[4558]: I0120 17:45:33.427244 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell0-cell-mapping-c79qh"] Jan 20 17:45:33 crc kubenswrapper[4558]: I0120 17:45:33.444683 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-51af-account-create-update-52f7p" Jan 20 17:45:33 crc kubenswrapper[4558]: I0120 17:45:33.496312 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovsdbserver-sb-0_924d7506-6027-4d31-b57a-19fc787ba356/ovsdbserver-sb/0.log" Jan 20 17:45:33 crc kubenswrapper[4558]: I0120 17:45:33.496369 4558 generic.go:334] "Generic (PLEG): container finished" podID="924d7506-6027-4d31-b57a-19fc787ba356" containerID="925992b2eef4db1ac2a2537f57dad717cf3ee3ea67cbe9cec01c652878bf3f97" exitCode=2 Jan 20 17:45:33 crc kubenswrapper[4558]: I0120 17:45:33.496389 4558 generic.go:334] "Generic (PLEG): container finished" podID="924d7506-6027-4d31-b57a-19fc787ba356" containerID="0b23cac82af3ec3ee44a48b3126c8df9ad27e49f292999b18a09db1d3481441e" exitCode=143 Jan 20 17:45:33 crc kubenswrapper[4558]: I0120 17:45:33.496435 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-sb-0" event={"ID":"924d7506-6027-4d31-b57a-19fc787ba356","Type":"ContainerDied","Data":"925992b2eef4db1ac2a2537f57dad717cf3ee3ea67cbe9cec01c652878bf3f97"} Jan 20 17:45:33 crc kubenswrapper[4558]: I0120 17:45:33.496466 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-sb-0" event={"ID":"924d7506-6027-4d31-b57a-19fc787ba356","Type":"ContainerDied","Data":"0b23cac82af3ec3ee44a48b3126c8df9ad27e49f292999b18a09db1d3481441e"} Jan 20 17:45:33 crc kubenswrapper[4558]: I0120 17:45:33.513018 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell0-cell-mapping-c79qh"] Jan 20 17:45:33 crc kubenswrapper[4558]: I0120 17:45:33.523350 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovn-northd-0_d6e6a03e-c496-4589-8d51-f6d9e89e14ae/ovn-northd/0.log" Jan 20 17:45:33 crc kubenswrapper[4558]: I0120 17:45:33.523410 4558 generic.go:334] "Generic (PLEG): container finished" podID="d6e6a03e-c496-4589-8d51-f6d9e89e14ae" containerID="7e87cd2bf2bf667d667a6643f4f7c164cbd3aeeebc96908debb2fae778b18645" exitCode=2 Jan 20 17:45:33 crc kubenswrapper[4558]: I0120 17:45:33.523429 4558 generic.go:334] "Generic (PLEG): container finished" podID="d6e6a03e-c496-4589-8d51-f6d9e89e14ae" containerID="391bf6661c1200e7d892ab027500c3b569ef3b9e9fbd3bdab1b7ee5442423861" exitCode=143 Jan 20 17:45:33 crc kubenswrapper[4558]: I0120 17:45:33.523506 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-northd-0" event={"ID":"d6e6a03e-c496-4589-8d51-f6d9e89e14ae","Type":"ContainerDied","Data":"7e87cd2bf2bf667d667a6643f4f7c164cbd3aeeebc96908debb2fae778b18645"} Jan 20 17:45:33 crc kubenswrapper[4558]: I0120 17:45:33.523539 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-northd-0" event={"ID":"d6e6a03e-c496-4589-8d51-f6d9e89e14ae","Type":"ContainerDied","Data":"391bf6661c1200e7d892ab027500c3b569ef3b9e9fbd3bdab1b7ee5442423861"} Jan 20 17:45:33 crc kubenswrapper[4558]: E0120 17:45:33.538932 4558 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 20 17:45:33 crc kubenswrapper[4558]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[/bin/sh -c #!/bin/bash Jan 20 17:45:33 crc kubenswrapper[4558]: Jan 20 17:45:33 crc kubenswrapper[4558]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 20 17:45:33 crc kubenswrapper[4558]: Jan 20 17:45:33 crc kubenswrapper[4558]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 20 17:45:33 crc kubenswrapper[4558]: Jan 20 17:45:33 crc kubenswrapper[4558]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 20 17:45:33 crc kubenswrapper[4558]: Jan 20 17:45:33 crc kubenswrapper[4558]: if [ -n "placement" ]; then Jan 20 17:45:33 crc kubenswrapper[4558]: GRANT_DATABASE="placement" Jan 20 17:45:33 crc kubenswrapper[4558]: else Jan 20 17:45:33 crc kubenswrapper[4558]: GRANT_DATABASE="*" Jan 20 17:45:33 crc kubenswrapper[4558]: fi Jan 20 17:45:33 crc kubenswrapper[4558]: Jan 20 17:45:33 crc kubenswrapper[4558]: # going for maximum compatibility here: Jan 20 17:45:33 crc kubenswrapper[4558]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 20 17:45:33 crc kubenswrapper[4558]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 20 17:45:33 crc kubenswrapper[4558]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 20 17:45:33 crc kubenswrapper[4558]: # support updates Jan 20 17:45:33 crc kubenswrapper[4558]: Jan 20 17:45:33 crc kubenswrapper[4558]: $MYSQL_CMD < logger="UnhandledError" Jan 20 17:45:33 crc kubenswrapper[4558]: E0120 17:45:33.540947 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"placement-db-secret\\\" not found\"" pod="openstack-kuttl-tests/placement-12a5-account-create-update-6g9q5" podUID="a8cb168e-9c96-449b-a8d6-bcd8511f1a53" Jan 20 17:45:33 crc kubenswrapper[4558]: I0120 17:45:33.577329 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-8c6b-account-create-update-qcnqx" event={"ID":"16a1e55d-37ca-4488-adc3-2602e7416d55","Type":"ContainerStarted","Data":"388c5fc5f4d043d14eeaeb654f79dfd70495e80d866450bff22b5151712f8716"} Jan 20 17:45:33 crc kubenswrapper[4558]: E0120 17:45:33.585621 4558 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 20 17:45:33 crc kubenswrapper[4558]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[/bin/sh -c #!/bin/bash Jan 20 17:45:33 crc kubenswrapper[4558]: Jan 20 17:45:33 crc kubenswrapper[4558]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 20 17:45:33 crc kubenswrapper[4558]: Jan 20 17:45:33 crc kubenswrapper[4558]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 20 17:45:33 crc kubenswrapper[4558]: Jan 20 17:45:33 crc kubenswrapper[4558]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 20 17:45:33 crc kubenswrapper[4558]: Jan 20 17:45:33 crc kubenswrapper[4558]: if [ -n "nova_api" ]; then Jan 20 17:45:33 crc kubenswrapper[4558]: GRANT_DATABASE="nova_api" Jan 20 17:45:33 crc kubenswrapper[4558]: else Jan 20 17:45:33 crc kubenswrapper[4558]: GRANT_DATABASE="*" Jan 20 17:45:33 crc kubenswrapper[4558]: fi Jan 20 17:45:33 crc kubenswrapper[4558]: Jan 20 17:45:33 crc kubenswrapper[4558]: # going for maximum compatibility here: Jan 20 17:45:33 crc kubenswrapper[4558]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 20 17:45:33 crc kubenswrapper[4558]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 20 17:45:33 crc kubenswrapper[4558]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 20 17:45:33 crc kubenswrapper[4558]: # support updates Jan 20 17:45:33 crc kubenswrapper[4558]: Jan 20 17:45:33 crc kubenswrapper[4558]: $MYSQL_CMD < logger="UnhandledError" Jan 20 17:45:33 crc kubenswrapper[4558]: E0120 17:45:33.586944 4558 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 20 17:45:33 crc kubenswrapper[4558]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[/bin/sh -c #!/bin/bash Jan 20 17:45:33 crc kubenswrapper[4558]: Jan 20 17:45:33 crc kubenswrapper[4558]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 20 17:45:33 crc kubenswrapper[4558]: Jan 20 17:45:33 crc kubenswrapper[4558]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 20 17:45:33 crc kubenswrapper[4558]: Jan 20 17:45:33 crc kubenswrapper[4558]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 20 17:45:33 crc kubenswrapper[4558]: Jan 20 17:45:33 crc kubenswrapper[4558]: if [ -n "glance" ]; then Jan 20 17:45:33 crc kubenswrapper[4558]: GRANT_DATABASE="glance" Jan 20 17:45:33 crc kubenswrapper[4558]: else Jan 20 17:45:33 crc kubenswrapper[4558]: GRANT_DATABASE="*" Jan 20 17:45:33 crc kubenswrapper[4558]: fi Jan 20 17:45:33 crc kubenswrapper[4558]: Jan 20 17:45:33 crc kubenswrapper[4558]: # going for maximum compatibility here: Jan 20 17:45:33 crc kubenswrapper[4558]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 20 17:45:33 crc kubenswrapper[4558]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 20 17:45:33 crc kubenswrapper[4558]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 20 17:45:33 crc kubenswrapper[4558]: # support updates Jan 20 17:45:33 crc kubenswrapper[4558]: Jan 20 17:45:33 crc kubenswrapper[4558]: $MYSQL_CMD < logger="UnhandledError" Jan 20 17:45:33 crc kubenswrapper[4558]: E0120 17:45:33.587994 4558 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 20 17:45:33 crc kubenswrapper[4558]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[/bin/sh -c #!/bin/bash Jan 20 17:45:33 crc kubenswrapper[4558]: Jan 20 17:45:33 crc kubenswrapper[4558]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 20 17:45:33 crc kubenswrapper[4558]: Jan 20 17:45:33 crc kubenswrapper[4558]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 20 17:45:33 crc kubenswrapper[4558]: Jan 20 17:45:33 crc kubenswrapper[4558]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 20 17:45:33 crc kubenswrapper[4558]: Jan 20 17:45:33 crc kubenswrapper[4558]: if [ -n "nova_cell0" ]; then Jan 20 17:45:33 crc kubenswrapper[4558]: GRANT_DATABASE="nova_cell0" Jan 20 17:45:33 crc kubenswrapper[4558]: else Jan 20 17:45:33 crc kubenswrapper[4558]: GRANT_DATABASE="*" Jan 20 17:45:33 crc kubenswrapper[4558]: fi Jan 20 17:45:33 crc kubenswrapper[4558]: Jan 20 17:45:33 crc kubenswrapper[4558]: # going for maximum compatibility here: Jan 20 17:45:33 crc kubenswrapper[4558]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 20 17:45:33 crc kubenswrapper[4558]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 20 17:45:33 crc kubenswrapper[4558]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 20 17:45:33 crc kubenswrapper[4558]: # support updates Jan 20 17:45:33 crc kubenswrapper[4558]: Jan 20 17:45:33 crc kubenswrapper[4558]: $MYSQL_CMD < logger="UnhandledError" Jan 20 17:45:33 crc kubenswrapper[4558]: E0120 17:45:33.589122 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"glance-db-secret\\\" not found\"" pod="openstack-kuttl-tests/glance-8c6b-account-create-update-qcnqx" podUID="16a1e55d-37ca-4488-adc3-2602e7416d55" Jan 20 17:45:33 crc kubenswrapper[4558]: E0120 17:45:33.588288 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"nova-api-db-secret\\\" not found\"" pod="openstack-kuttl-tests/nova-api-3b8d-account-create-update-75ws5" podUID="d7ca0a7f-fd93-48de-b71b-b23d1aef2af7" Jan 20 17:45:33 crc kubenswrapper[4558]: E0120 17:45:33.595764 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"nova-cell0-db-secret\\\" not found\"" pod="openstack-kuttl-tests/nova-cell0-bd94-account-create-update-stnv4" podUID="44f856be-bb11-468b-a157-1f2e2851d04d" Jan 20 17:45:33 crc kubenswrapper[4558]: I0120 17:45:33.607645 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovsdbserver-nb-0_b2fb8e70-15aa-4b38-a44d-5f818dfc755c/ovsdbserver-nb/0.log" Jan 20 17:45:33 crc kubenswrapper[4558]: I0120 17:45:33.607742 4558 generic.go:334] "Generic (PLEG): container finished" podID="b2fb8e70-15aa-4b38-a44d-5f818dfc755c" containerID="ead17a0d58c199bc2ba003661375148f8b9c61941971a8e8d61f318e35d6d553" exitCode=2 Jan 20 17:45:33 crc kubenswrapper[4558]: I0120 17:45:33.607804 4558 generic.go:334] "Generic (PLEG): container finished" podID="b2fb8e70-15aa-4b38-a44d-5f818dfc755c" containerID="54155d91bfb6e6c18d3eb5afc095843ec5b7f0df04d6716cdc4199d6b21aa7d6" exitCode=143 Jan 20 17:45:33 crc kubenswrapper[4558]: I0120 17:45:33.607844 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-nb-0" event={"ID":"b2fb8e70-15aa-4b38-a44d-5f818dfc755c","Type":"ContainerDied","Data":"ead17a0d58c199bc2ba003661375148f8b9c61941971a8e8d61f318e35d6d553"} Jan 20 17:45:33 crc kubenswrapper[4558]: I0120 17:45:33.607910 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-nb-0" event={"ID":"b2fb8e70-15aa-4b38-a44d-5f818dfc755c","Type":"ContainerDied","Data":"54155d91bfb6e6c18d3eb5afc095843ec5b7f0df04d6716cdc4199d6b21aa7d6"} Jan 20 17:45:33 crc kubenswrapper[4558]: E0120 17:45:33.627060 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Jan 20 17:45:33 crc kubenswrapper[4558]: E0120 17:45:33.627105 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/de4d8126-91cf-4149-bec4-4accaf558308-config-data podName:de4d8126-91cf-4149-bec4-4accaf558308 nodeName:}" failed. No retries permitted until 2026-01-20 17:45:34.6270897 +0000 UTC m=+3828.387427666 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/de4d8126-91cf-4149-bec4-4accaf558308-config-data") pod "rabbitmq-cell1-server-0" (UID: "de4d8126-91cf-4149-bec4-4accaf558308") : configmap "rabbitmq-cell1-config-data" not found Jan 20 17:45:33 crc kubenswrapper[4558]: I0120 17:45:33.650973 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-12a5-account-create-update-6g9q5"] Jan 20 17:45:33 crc kubenswrapper[4558]: I0120 17:45:33.696615 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-3b8d-account-create-update-75ws5"] Jan 20 17:45:33 crc kubenswrapper[4558]: I0120 17:45:33.716853 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-bd94-account-create-update-stnv4"] Jan 20 17:45:33 crc kubenswrapper[4558]: E0120 17:45:33.729368 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Jan 20 17:45:33 crc kubenswrapper[4558]: E0120 17:45:33.729451 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/4765e529-9729-4d27-a252-c0c9a7b67beb-config-data podName:4765e529-9729-4d27-a252-c0c9a7b67beb nodeName:}" failed. No retries permitted until 2026-01-20 17:45:35.729431216 +0000 UTC m=+3829.489769184 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/4765e529-9729-4d27-a252-c0c9a7b67beb-config-data") pod "rabbitmq-server-0" (UID: "4765e529-9729-4d27-a252-c0c9a7b67beb") : configmap "rabbitmq-config-data" not found Jan 20 17:45:33 crc kubenswrapper[4558]: I0120 17:45:33.741243 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:45:33 crc kubenswrapper[4558]: I0120 17:45:33.741576 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/cinder-api-0" podUID="b752af55-9c06-469f-9353-e1042300de3c" containerName="cinder-api-log" containerID="cri-o://93d7994154007967abeffaef97cbdbfea9fe0c9a249a8ebbcb1a6bb563d31477" gracePeriod=30 Jan 20 17:45:33 crc kubenswrapper[4558]: I0120 17:45:33.742118 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/cinder-api-0" podUID="b752af55-9c06-469f-9353-e1042300de3c" containerName="cinder-api" containerID="cri-o://43f53e7f4f862cf970fa692178ae00cd40fe6c655fd001e46383dd72770d5cb5" gracePeriod=30 Jan 20 17:45:33 crc kubenswrapper[4558]: I0120 17:45:33.782053 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:45:33 crc kubenswrapper[4558]: I0120 17:45:33.782620 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/cinder-scheduler-0" podUID="7faabe16-9de5-49dc-bab6-44e173f4403c" containerName="probe" containerID="cri-o://3988e808e906ec72c1e2c7130ece7e18110316238273aacbead8355ff8099aa5" gracePeriod=30 Jan 20 17:45:33 crc kubenswrapper[4558]: I0120 17:45:33.783137 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/cinder-scheduler-0" podUID="7faabe16-9de5-49dc-bab6-44e173f4403c" containerName="cinder-scheduler" containerID="cri-o://7dfa9f0fec16a0a5a1903b314539ce49eb8943fbf0152c319ba83f4512000ab4" gracePeriod=30 Jan 20 17:45:33 crc kubenswrapper[4558]: I0120 17:45:33.800236 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-b9f9b6874-4cxdp"] Jan 20 17:45:33 crc kubenswrapper[4558]: I0120 17:45:33.800502 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/neutron-b9f9b6874-4cxdp" podUID="8361ba4f-e976-4c04-82ad-81e9412ba84c" containerName="neutron-api" containerID="cri-o://988e9fa129b4be2f46d004a547d691ae8c9171ce34c21f6c5e3dc859fbe3c5da" gracePeriod=30 Jan 20 17:45:33 crc kubenswrapper[4558]: I0120 17:45:33.800904 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/neutron-b9f9b6874-4cxdp" podUID="8361ba4f-e976-4c04-82ad-81e9412ba84c" containerName="neutron-httpd" containerID="cri-o://df990974fa4d7bdeba24386c7a7548b9c42f83f4e37902446477a1561329d066" gracePeriod=30 Jan 20 17:45:33 crc kubenswrapper[4558]: I0120 17:45:33.806224 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/swift-storage-0"] Jan 20 17:45:33 crc kubenswrapper[4558]: I0120 17:45:33.806710 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="fa551db1-9a4b-45c2-b640-4f3518b162f4" containerName="account-server" containerID="cri-o://7946458eae645c01b2edae8d93ef429443754ef2e0e2732538ed1fcc7aab4131" gracePeriod=30 Jan 20 17:45:33 crc kubenswrapper[4558]: I0120 17:45:33.806742 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="fa551db1-9a4b-45c2-b640-4f3518b162f4" containerName="object-auditor" containerID="cri-o://db06ae2f5dcfb82f3b41bda4c320f201a9c03b7b5c3d71c0c19a43661eba2751" gracePeriod=30 Jan 20 17:45:33 crc kubenswrapper[4558]: I0120 17:45:33.806848 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="fa551db1-9a4b-45c2-b640-4f3518b162f4" containerName="object-replicator" containerID="cri-o://1776b65beebb985be1fcd79b9aa133812c590cf0eb9a13dd5bf41981563e77b5" gracePeriod=30 Jan 20 17:45:33 crc kubenswrapper[4558]: I0120 17:45:33.806884 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="fa551db1-9a4b-45c2-b640-4f3518b162f4" containerName="object-server" containerID="cri-o://0b9af6175ca2fe405cf0a757e866a177d1e865ed2977ca637413cf1645033f14" gracePeriod=30 Jan 20 17:45:33 crc kubenswrapper[4558]: I0120 17:45:33.806927 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="fa551db1-9a4b-45c2-b640-4f3518b162f4" containerName="swift-recon-cron" containerID="cri-o://0978e6e1ade70cc41df3d45431dd51e1520b653984893d0992b4c5d6e1fb3e47" gracePeriod=30 Jan 20 17:45:33 crc kubenswrapper[4558]: I0120 17:45:33.806944 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="fa551db1-9a4b-45c2-b640-4f3518b162f4" containerName="container-auditor" containerID="cri-o://6a4f662ca1f9347dca843ebc69844d9f6f10b499e6d23197e88dd1c43db718b8" gracePeriod=30 Jan 20 17:45:33 crc kubenswrapper[4558]: I0120 17:45:33.806981 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="fa551db1-9a4b-45c2-b640-4f3518b162f4" containerName="container-replicator" containerID="cri-o://13c87e4a18a86e7c3b8ddb00e81ce8b0da89d4d0c280a7e98c30f1e03817bfc3" gracePeriod=30 Jan 20 17:45:33 crc kubenswrapper[4558]: I0120 17:45:33.806934 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="fa551db1-9a4b-45c2-b640-4f3518b162f4" containerName="container-updater" containerID="cri-o://35a4222bca9bef05ef01fd815e0b4f831f1296d08622fc5e89a2a361527c22e6" gracePeriod=30 Jan 20 17:45:33 crc kubenswrapper[4558]: I0120 17:45:33.807022 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="fa551db1-9a4b-45c2-b640-4f3518b162f4" containerName="container-server" containerID="cri-o://c784e598ad58ecd5a81e3bf765a648caa06b6e4d7a98116be1483ea1a1916bd6" gracePeriod=30 Jan 20 17:45:33 crc kubenswrapper[4558]: I0120 17:45:33.807057 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="fa551db1-9a4b-45c2-b640-4f3518b162f4" containerName="account-reaper" containerID="cri-o://cbaf44dfc0734a14fd6ecab8c4428e033aaee10d6fd6549b9c1af4209a3025c5" gracePeriod=30 Jan 20 17:45:33 crc kubenswrapper[4558]: I0120 17:45:33.807072 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="fa551db1-9a4b-45c2-b640-4f3518b162f4" containerName="object-expirer" containerID="cri-o://6ff615a38b1de5289443af49e872533c8fb33086c5c5f749a5757153445d80db" gracePeriod=30 Jan 20 17:45:33 crc kubenswrapper[4558]: I0120 17:45:33.807057 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="fa551db1-9a4b-45c2-b640-4f3518b162f4" containerName="rsync" containerID="cri-o://b5ee244c517bb7529681f18dd5be00352c8b0d2d264f9bf4dd993ae3ee88bc9f" gracePeriod=30 Jan 20 17:45:33 crc kubenswrapper[4558]: I0120 17:45:33.807117 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="fa551db1-9a4b-45c2-b640-4f3518b162f4" containerName="account-auditor" containerID="cri-o://967813e267c1874f848c851b80566f8d4b10ddb11ae1e902fcced650a357a740" gracePeriod=30 Jan 20 17:45:33 crc kubenswrapper[4558]: I0120 17:45:33.807113 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="fa551db1-9a4b-45c2-b640-4f3518b162f4" containerName="object-updater" containerID="cri-o://730e7a8305ea822bf786ff7759b1b076db78be0ddb28d589685ca62f80722994" gracePeriod=30 Jan 20 17:45:33 crc kubenswrapper[4558]: I0120 17:45:33.807292 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="fa551db1-9a4b-45c2-b640-4f3518b162f4" containerName="account-replicator" containerID="cri-o://c873af103469702cab190e42ac22b360e4b1c8addd7ea94631a91965a8840503" gracePeriod=30 Jan 20 17:45:33 crc kubenswrapper[4558]: I0120 17:45:33.810447 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-db-create-xrj4d"] Jan 20 17:45:33 crc kubenswrapper[4558]: I0120 17:45:33.857285 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/cinder-db-create-xrj4d"] Jan 20 17:45:33 crc kubenswrapper[4558]: I0120 17:45:33.904358 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-2lft9"] Jan 20 17:45:33 crc kubenswrapper[4558]: I0120 17:45:33.906280 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-2lft9" Jan 20 17:45:33 crc kubenswrapper[4558]: I0120 17:45:33.928549 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-2lft9"] Jan 20 17:45:33 crc kubenswrapper[4558]: I0120 17:45:33.947044 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-405d-account-create-update-hmk9w"] Jan 20 17:45:33 crc kubenswrapper[4558]: I0120 17:45:33.957967 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/cinder-405d-account-create-update-hmk9w"] Jan 20 17:45:33 crc kubenswrapper[4558]: I0120 17:45:33.975311 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/swift-ring-rebalance-5mvzb"] Jan 20 17:45:33 crc kubenswrapper[4558]: I0120 17:45:33.991038 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/swift-ring-rebalance-5mvzb"] Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:33.996873 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovsdbserver-sb-0_924d7506-6027-4d31-b57a-19fc787ba356/ovsdbserver-sb/0.log" Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:33.996967 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.009896 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovn-northd-0_d6e6a03e-c496-4589-8d51-f6d9e89e14ae/ovn-northd/0.log" Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.009972 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.014132 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/placement-5b5f5cd6d-6rk7z"] Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.014423 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/placement-5b5f5cd6d-6rk7z" podUID="50804fa8-d09d-49f8-a143-d7ec24ec542a" containerName="placement-log" containerID="cri-o://661b71951f10fd7eb454cf2a5617695ac503cf4d432c4017c7980d3e23a068f2" gracePeriod=30 Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.014571 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/placement-5b5f5cd6d-6rk7z" podUID="50804fa8-d09d-49f8-a143-d7ec24ec542a" containerName="placement-api" containerID="cri-o://15c901b90eae1d8b686fb50b0222bcb667a3a9f6b6a35faa944d0560ae58cf3a" gracePeriod=30 Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.050228 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d7f1ecf4-9630-44e7-a627-cc63de361385-config\") pod \"dnsmasq-dnsmasq-84b9f45d47-2lft9\" (UID: \"d7f1ecf4-9630-44e7-a627-cc63de361385\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-2lft9" Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.050407 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/d7f1ecf4-9630-44e7-a627-cc63de361385-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-84b9f45d47-2lft9\" (UID: \"d7f1ecf4-9630-44e7-a627-cc63de361385\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-2lft9" Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.053881 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q5625\" (UniqueName: \"kubernetes.io/projected/d7f1ecf4-9630-44e7-a627-cc63de361385-kube-api-access-q5625\") pod \"dnsmasq-dnsmasq-84b9f45d47-2lft9\" (UID: \"d7f1ecf4-9630-44e7-a627-cc63de361385\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-2lft9" Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.053990 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-db-create-p729z"] Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.064636 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/neutron-db-create-p729z"] Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.075680 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-185c-account-create-update-f294p"] Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.090354 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/neutron-185c-account-create-update-f294p"] Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.107218 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.107497 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-external-api-0" podUID="56173817-8246-4f37-b157-3890912004ca" containerName="glance-log" containerID="cri-o://65d88d20f8ddd45d36845fd06af8255069c3338120c182ed2e8bfd0b56484bec" gracePeriod=30 Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.107801 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-external-api-0" podUID="56173817-8246-4f37-b157-3890912004ca" containerName="glance-httpd" containerID="cri-o://26fcabccd365630b0df7a2013b030db7a8a52ed18fb890da73b7a737f80da0ba" gracePeriod=30 Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.124271 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-db-create-t2lsm"] Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.136466 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/barbican-db-create-t2lsm"] Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.136505 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.136745 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-internal-api-0" podUID="f87490e8-557d-41e3-a07b-8fe12147b315" containerName="glance-log" containerID="cri-o://dea187ec3d51aa9fd2fd3318b66ce5429ee704c1f761dae601b4234a786d24c7" gracePeriod=30 Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.137241 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-internal-api-0" podUID="f87490e8-557d-41e3-a07b-8fe12147b315" containerName="glance-httpd" containerID="cri-o://72f0e46d318dc78a9e49d3aa7f3c509c43a243da567f0f20b70c09a82e812667" gracePeriod=30 Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.144616 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/placement-db-create-jxmgh"] Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.150401 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-677c-account-create-update-c2md6"] Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.157675 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/placement-db-create-jxmgh"] Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.159246 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/924d7506-6027-4d31-b57a-19fc787ba356-metrics-certs-tls-certs\") pod \"924d7506-6027-4d31-b57a-19fc787ba356\" (UID: \"924d7506-6027-4d31-b57a-19fc787ba356\") " Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.159286 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndbcluster-sb-etc-ovn\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"924d7506-6027-4d31-b57a-19fc787ba356\" (UID: \"924d7506-6027-4d31-b57a-19fc787ba356\") " Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.159340 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/924d7506-6027-4d31-b57a-19fc787ba356-combined-ca-bundle\") pod \"924d7506-6027-4d31-b57a-19fc787ba356\" (UID: \"924d7506-6027-4d31-b57a-19fc787ba356\") " Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.159389 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d6e6a03e-c496-4589-8d51-f6d9e89e14ae-combined-ca-bundle\") pod \"d6e6a03e-c496-4589-8d51-f6d9e89e14ae\" (UID: \"d6e6a03e-c496-4589-8d51-f6d9e89e14ae\") " Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.159452 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/924d7506-6027-4d31-b57a-19fc787ba356-config\") pod \"924d7506-6027-4d31-b57a-19fc787ba356\" (UID: \"924d7506-6027-4d31-b57a-19fc787ba356\") " Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.159484 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/924d7506-6027-4d31-b57a-19fc787ba356-ovsdb-rundir\") pod \"924d7506-6027-4d31-b57a-19fc787ba356\" (UID: \"924d7506-6027-4d31-b57a-19fc787ba356\") " Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.159537 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d6e6a03e-c496-4589-8d51-f6d9e89e14ae-config\") pod \"d6e6a03e-c496-4589-8d51-f6d9e89e14ae\" (UID: \"d6e6a03e-c496-4589-8d51-f6d9e89e14ae\") " Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.159558 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7qm5z\" (UniqueName: \"kubernetes.io/projected/924d7506-6027-4d31-b57a-19fc787ba356-kube-api-access-7qm5z\") pod \"924d7506-6027-4d31-b57a-19fc787ba356\" (UID: \"924d7506-6027-4d31-b57a-19fc787ba356\") " Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.159618 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/924d7506-6027-4d31-b57a-19fc787ba356-ovsdbserver-sb-tls-certs\") pod \"924d7506-6027-4d31-b57a-19fc787ba356\" (UID: \"924d7506-6027-4d31-b57a-19fc787ba356\") " Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.159649 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/924d7506-6027-4d31-b57a-19fc787ba356-scripts\") pod \"924d7506-6027-4d31-b57a-19fc787ba356\" (UID: \"924d7506-6027-4d31-b57a-19fc787ba356\") " Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.159684 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d6e6a03e-c496-4589-8d51-f6d9e89e14ae-scripts\") pod \"d6e6a03e-c496-4589-8d51-f6d9e89e14ae\" (UID: \"d6e6a03e-c496-4589-8d51-f6d9e89e14ae\") " Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.159764 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/d6e6a03e-c496-4589-8d51-f6d9e89e14ae-metrics-certs-tls-certs\") pod \"d6e6a03e-c496-4589-8d51-f6d9e89e14ae\" (UID: \"d6e6a03e-c496-4589-8d51-f6d9e89e14ae\") " Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.160059 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/d6e6a03e-c496-4589-8d51-f6d9e89e14ae-ovn-rundir\") pod \"d6e6a03e-c496-4589-8d51-f6d9e89e14ae\" (UID: \"d6e6a03e-c496-4589-8d51-f6d9e89e14ae\") " Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.160173 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/d6e6a03e-c496-4589-8d51-f6d9e89e14ae-ovn-northd-tls-certs\") pod \"d6e6a03e-c496-4589-8d51-f6d9e89e14ae\" (UID: \"d6e6a03e-c496-4589-8d51-f6d9e89e14ae\") " Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.160192 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6wn7h\" (UniqueName: \"kubernetes.io/projected/d6e6a03e-c496-4589-8d51-f6d9e89e14ae-kube-api-access-6wn7h\") pod \"d6e6a03e-c496-4589-8d51-f6d9e89e14ae\" (UID: \"d6e6a03e-c496-4589-8d51-f6d9e89e14ae\") " Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.160629 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/d7f1ecf4-9630-44e7-a627-cc63de361385-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-84b9f45d47-2lft9\" (UID: \"d7f1ecf4-9630-44e7-a627-cc63de361385\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-2lft9" Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.160744 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q5625\" (UniqueName: \"kubernetes.io/projected/d7f1ecf4-9630-44e7-a627-cc63de361385-kube-api-access-q5625\") pod \"dnsmasq-dnsmasq-84b9f45d47-2lft9\" (UID: \"d7f1ecf4-9630-44e7-a627-cc63de361385\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-2lft9" Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.160939 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d7f1ecf4-9630-44e7-a627-cc63de361385-config\") pod \"dnsmasq-dnsmasq-84b9f45d47-2lft9\" (UID: \"d7f1ecf4-9630-44e7-a627-cc63de361385\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-2lft9" Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.161334 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/barbican-677c-account-create-update-c2md6"] Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.161935 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d7f1ecf4-9630-44e7-a627-cc63de361385-config\") pod \"dnsmasq-dnsmasq-84b9f45d47-2lft9\" (UID: \"d7f1ecf4-9630-44e7-a627-cc63de361385\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-2lft9" Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.162610 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/924d7506-6027-4d31-b57a-19fc787ba356-scripts" (OuterVolumeSpecName: "scripts") pod "924d7506-6027-4d31-b57a-19fc787ba356" (UID: "924d7506-6027-4d31-b57a-19fc787ba356"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.164202 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/placement-12a5-account-create-update-6g9q5"] Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.168579 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/924d7506-6027-4d31-b57a-19fc787ba356-ovsdb-rundir" (OuterVolumeSpecName: "ovsdb-rundir") pod "924d7506-6027-4d31-b57a-19fc787ba356" (UID: "924d7506-6027-4d31-b57a-19fc787ba356"). InnerVolumeSpecName "ovsdb-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.169418 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d6e6a03e-c496-4589-8d51-f6d9e89e14ae-scripts" (OuterVolumeSpecName: "scripts") pod "d6e6a03e-c496-4589-8d51-f6d9e89e14ae" (UID: "d6e6a03e-c496-4589-8d51-f6d9e89e14ae"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.174960 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/d7f1ecf4-9630-44e7-a627-cc63de361385-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-84b9f45d47-2lft9\" (UID: \"d7f1ecf4-9630-44e7-a627-cc63de361385\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-2lft9" Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.175570 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d6e6a03e-c496-4589-8d51-f6d9e89e14ae-config" (OuterVolumeSpecName: "config") pod "d6e6a03e-c496-4589-8d51-f6d9e89e14ae" (UID: "d6e6a03e-c496-4589-8d51-f6d9e89e14ae"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.175681 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/924d7506-6027-4d31-b57a-19fc787ba356-config" (OuterVolumeSpecName: "config") pod "924d7506-6027-4d31-b57a-19fc787ba356" (UID: "924d7506-6027-4d31-b57a-19fc787ba356"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.175998 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d6e6a03e-c496-4589-8d51-f6d9e89e14ae-ovn-rundir" (OuterVolumeSpecName: "ovn-rundir") pod "d6e6a03e-c496-4589-8d51-f6d9e89e14ae" (UID: "d6e6a03e-c496-4589-8d51-f6d9e89e14ae"). InnerVolumeSpecName "ovn-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.191990 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/openstack-cell1-galera-0"] Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.220672 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q5625\" (UniqueName: \"kubernetes.io/projected/d7f1ecf4-9630-44e7-a627-cc63de361385-kube-api-access-q5625\") pod \"dnsmasq-dnsmasq-84b9f45d47-2lft9\" (UID: \"d7f1ecf4-9630-44e7-a627-cc63de361385\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-2lft9" Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.220854 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage03-crc" (OuterVolumeSpecName: "ovndbcluster-sb-etc-ovn") pod "924d7506-6027-4d31-b57a-19fc787ba356" (UID: "924d7506-6027-4d31-b57a-19fc787ba356"). InnerVolumeSpecName "local-storage03-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.229393 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d6e6a03e-c496-4589-8d51-f6d9e89e14ae-kube-api-access-6wn7h" (OuterVolumeSpecName: "kube-api-access-6wn7h") pod "d6e6a03e-c496-4589-8d51-f6d9e89e14ae" (UID: "d6e6a03e-c496-4589-8d51-f6d9e89e14ae"). InnerVolumeSpecName "kube-api-access-6wn7h". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.260678 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-2lft9" Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.262849 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d6e6a03e-c496-4589-8d51-f6d9e89e14ae-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.262872 4558 reconciler_common.go:293] "Volume detached for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/d6e6a03e-c496-4589-8d51-f6d9e89e14ae-ovn-rundir\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.262886 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6wn7h\" (UniqueName: \"kubernetes.io/projected/d6e6a03e-c496-4589-8d51-f6d9e89e14ae-kube-api-access-6wn7h\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.262915 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" " Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.262933 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/924d7506-6027-4d31-b57a-19fc787ba356-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.262944 4558 reconciler_common.go:293] "Volume detached for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/924d7506-6027-4d31-b57a-19fc787ba356-ovsdb-rundir\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.262955 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d6e6a03e-c496-4589-8d51-f6d9e89e14ae-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.262967 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/924d7506-6027-4d31-b57a-19fc787ba356-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.272742 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-db-create-fznd5"] Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.290681 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/glance-db-create-fznd5"] Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.314358 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/924d7506-6027-4d31-b57a-19fc787ba356-kube-api-access-7qm5z" (OuterVolumeSpecName: "kube-api-access-7qm5z") pod "924d7506-6027-4d31-b57a-19fc787ba356" (UID: "924d7506-6027-4d31-b57a-19fc787ba356"). InnerVolumeSpecName "kube-api-access-7qm5z". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.368133 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7qm5z\" (UniqueName: \"kubernetes.io/projected/924d7506-6027-4d31-b57a-19fc787ba356-kube-api-access-7qm5z\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.378876 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovsdbserver-nb-0_b2fb8e70-15aa-4b38-a44d-5f818dfc755c/ovsdbserver-nb/0.log" Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.379003 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.379398 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/924d7506-6027-4d31-b57a-19fc787ba356-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "924d7506-6027-4d31-b57a-19fc787ba356" (UID: "924d7506-6027-4d31-b57a-19fc787ba356"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.401073 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d6e6a03e-c496-4589-8d51-f6d9e89e14ae-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d6e6a03e-c496-4589-8d51-f6d9e89e14ae" (UID: "d6e6a03e-c496-4589-8d51-f6d9e89e14ae"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.436202 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage03-crc" (UniqueName: "kubernetes.io/local-volume/local-storage03-crc") on node "crc" Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.451370 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-8c6b-account-create-update-qcnqx"] Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.478880 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6dchb\" (UniqueName: \"kubernetes.io/projected/b2fb8e70-15aa-4b38-a44d-5f818dfc755c-kube-api-access-6dchb\") pod \"b2fb8e70-15aa-4b38-a44d-5f818dfc755c\" (UID: \"b2fb8e70-15aa-4b38-a44d-5f818dfc755c\") " Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.479190 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndbcluster-nb-etc-ovn\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"b2fb8e70-15aa-4b38-a44d-5f818dfc755c\" (UID: \"b2fb8e70-15aa-4b38-a44d-5f818dfc755c\") " Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.480211 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/b2fb8e70-15aa-4b38-a44d-5f818dfc755c-ovsdbserver-nb-tls-certs\") pod \"b2fb8e70-15aa-4b38-a44d-5f818dfc755c\" (UID: \"b2fb8e70-15aa-4b38-a44d-5f818dfc755c\") " Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.480440 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/b2fb8e70-15aa-4b38-a44d-5f818dfc755c-metrics-certs-tls-certs\") pod \"b2fb8e70-15aa-4b38-a44d-5f818dfc755c\" (UID: \"b2fb8e70-15aa-4b38-a44d-5f818dfc755c\") " Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.480562 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b2fb8e70-15aa-4b38-a44d-5f818dfc755c-scripts\") pod \"b2fb8e70-15aa-4b38-a44d-5f818dfc755c\" (UID: \"b2fb8e70-15aa-4b38-a44d-5f818dfc755c\") " Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.480868 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b2fb8e70-15aa-4b38-a44d-5f818dfc755c-combined-ca-bundle\") pod \"b2fb8e70-15aa-4b38-a44d-5f818dfc755c\" (UID: \"b2fb8e70-15aa-4b38-a44d-5f818dfc755c\") " Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.480988 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b2fb8e70-15aa-4b38-a44d-5f818dfc755c-config\") pod \"b2fb8e70-15aa-4b38-a44d-5f818dfc755c\" (UID: \"b2fb8e70-15aa-4b38-a44d-5f818dfc755c\") " Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.485929 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell0-db-create-75945"] Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.486158 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b2fb8e70-15aa-4b38-a44d-5f818dfc755c-scripts" (OuterVolumeSpecName: "scripts") pod "b2fb8e70-15aa-4b38-a44d-5f818dfc755c" (UID: "b2fb8e70-15aa-4b38-a44d-5f818dfc755c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.490223 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/b2fb8e70-15aa-4b38-a44d-5f818dfc755c-ovsdb-rundir\") pod \"b2fb8e70-15aa-4b38-a44d-5f818dfc755c\" (UID: \"b2fb8e70-15aa-4b38-a44d-5f818dfc755c\") " Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.491281 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b2fb8e70-15aa-4b38-a44d-5f818dfc755c-config" (OuterVolumeSpecName: "config") pod "b2fb8e70-15aa-4b38-a44d-5f818dfc755c" (UID: "b2fb8e70-15aa-4b38-a44d-5f818dfc755c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.491643 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/924d7506-6027-4d31-b57a-19fc787ba356-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.491675 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d6e6a03e-c496-4589-8d51-f6d9e89e14ae-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.491688 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b2fb8e70-15aa-4b38-a44d-5f818dfc755c-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.491698 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b2fb8e70-15aa-4b38-a44d-5f818dfc755c-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.491708 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.510532 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage02-crc" (OuterVolumeSpecName: "ovndbcluster-nb-etc-ovn") pod "b2fb8e70-15aa-4b38-a44d-5f818dfc755c" (UID: "b2fb8e70-15aa-4b38-a44d-5f818dfc755c"). InnerVolumeSpecName "local-storage02-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.510633 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b2fb8e70-15aa-4b38-a44d-5f818dfc755c-kube-api-access-6dchb" (OuterVolumeSpecName: "kube-api-access-6dchb") pod "b2fb8e70-15aa-4b38-a44d-5f818dfc755c" (UID: "b2fb8e70-15aa-4b38-a44d-5f818dfc755c"). InnerVolumeSpecName "kube-api-access-6dchb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.510803 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b2fb8e70-15aa-4b38-a44d-5f818dfc755c-ovsdb-rundir" (OuterVolumeSpecName: "ovsdb-rundir") pod "b2fb8e70-15aa-4b38-a44d-5f818dfc755c" (UID: "b2fb8e70-15aa-4b38-a44d-5f818dfc755c"). InnerVolumeSpecName "ovsdb-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.517558 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell0-db-create-75945"] Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.527447 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell0-bd94-account-create-update-stnv4"] Jan 20 17:45:34 crc kubenswrapper[4558]: E0120 17:45:34.545127 4558 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 20 17:45:34 crc kubenswrapper[4558]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[/bin/sh -c #!/bin/bash Jan 20 17:45:34 crc kubenswrapper[4558]: Jan 20 17:45:34 crc kubenswrapper[4558]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 20 17:45:34 crc kubenswrapper[4558]: Jan 20 17:45:34 crc kubenswrapper[4558]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 20 17:45:34 crc kubenswrapper[4558]: Jan 20 17:45:34 crc kubenswrapper[4558]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 20 17:45:34 crc kubenswrapper[4558]: Jan 20 17:45:34 crc kubenswrapper[4558]: if [ -n "" ]; then Jan 20 17:45:34 crc kubenswrapper[4558]: GRANT_DATABASE="" Jan 20 17:45:34 crc kubenswrapper[4558]: else Jan 20 17:45:34 crc kubenswrapper[4558]: GRANT_DATABASE="*" Jan 20 17:45:34 crc kubenswrapper[4558]: fi Jan 20 17:45:34 crc kubenswrapper[4558]: Jan 20 17:45:34 crc kubenswrapper[4558]: # going for maximum compatibility here: Jan 20 17:45:34 crc kubenswrapper[4558]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 20 17:45:34 crc kubenswrapper[4558]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 20 17:45:34 crc kubenswrapper[4558]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 20 17:45:34 crc kubenswrapper[4558]: # support updates Jan 20 17:45:34 crc kubenswrapper[4558]: Jan 20 17:45:34 crc kubenswrapper[4558]: $MYSQL_CMD < logger="UnhandledError" Jan 20 17:45:34 crc kubenswrapper[4558]: E0120 17:45:34.547245 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"openstack-cell1-mariadb-root-db-secret\\\" not found\"" pod="openstack-kuttl-tests/root-account-create-update-fhpsv" podUID="03ca5cbb-ca72-46f1-a44f-42ac0fb7cf17" Jan 20 17:45:34 crc kubenswrapper[4558]: E0120 17:45:34.585145 4558 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 20 17:45:34 crc kubenswrapper[4558]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[/bin/sh -c #!/bin/bash Jan 20 17:45:34 crc kubenswrapper[4558]: Jan 20 17:45:34 crc kubenswrapper[4558]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 20 17:45:34 crc kubenswrapper[4558]: Jan 20 17:45:34 crc kubenswrapper[4558]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 20 17:45:34 crc kubenswrapper[4558]: Jan 20 17:45:34 crc kubenswrapper[4558]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 20 17:45:34 crc kubenswrapper[4558]: Jan 20 17:45:34 crc kubenswrapper[4558]: if [ -n "nova_cell1" ]; then Jan 20 17:45:34 crc kubenswrapper[4558]: GRANT_DATABASE="nova_cell1" Jan 20 17:45:34 crc kubenswrapper[4558]: else Jan 20 17:45:34 crc kubenswrapper[4558]: GRANT_DATABASE="*" Jan 20 17:45:34 crc kubenswrapper[4558]: fi Jan 20 17:45:34 crc kubenswrapper[4558]: Jan 20 17:45:34 crc kubenswrapper[4558]: # going for maximum compatibility here: Jan 20 17:45:34 crc kubenswrapper[4558]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 20 17:45:34 crc kubenswrapper[4558]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 20 17:45:34 crc kubenswrapper[4558]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 20 17:45:34 crc kubenswrapper[4558]: # support updates Jan 20 17:45:34 crc kubenswrapper[4558]: Jan 20 17:45:34 crc kubenswrapper[4558]: $MYSQL_CMD < logger="UnhandledError" Jan 20 17:45:34 crc kubenswrapper[4558]: E0120 17:45:34.586538 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"nova-cell1-db-secret\\\" not found\"" pod="openstack-kuttl-tests/nova-cell1-51af-account-create-update-52f7p" podUID="89fd10f3-4598-47c6-ad0e-59aa59d41894" Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.589121 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0a030561-c5c2-4b2b-87fd-677bde229db8" path="/var/lib/kubelet/pods/0a030561-c5c2-4b2b-87fd-677bde229db8/volumes" Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.599298 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b2fb8e70-15aa-4b38-a44d-5f818dfc755c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b2fb8e70-15aa-4b38-a44d-5f818dfc755c" (UID: "b2fb8e70-15aa-4b38-a44d-5f818dfc755c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.621202 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0cb0fadf-7873-4efe-b8b5-965e4482ebf2" path="/var/lib/kubelet/pods/0cb0fadf-7873-4efe-b8b5-965e4482ebf2/volumes" Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.625536 4558 reconciler_common.go:293] "Volume detached for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/b2fb8e70-15aa-4b38-a44d-5f818dfc755c-ovsdb-rundir\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.625577 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6dchb\" (UniqueName: \"kubernetes.io/projected/b2fb8e70-15aa-4b38-a44d-5f818dfc755c-kube-api-access-6dchb\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.625611 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" " Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.625623 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b2fb8e70-15aa-4b38-a44d-5f818dfc755c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.627100 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="11c1b0d7-a15e-431e-bcbf-e01ab670c81e" path="/var/lib/kubelet/pods/11c1b0d7-a15e-431e-bcbf-e01ab670c81e/volumes" Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.628239 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1889de01-2407-450e-b839-e4457e27e214" path="/var/lib/kubelet/pods/1889de01-2407-450e-b839-e4457e27e214/volumes" Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.651559 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2ffb225d-0c31-4f91-905e-27ce314ee5df" path="/var/lib/kubelet/pods/2ffb225d-0c31-4f91-905e-27ce314ee5df/volumes" Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.652210 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4150b9b7-ff56-4e21-b3c6-62510856cc94" path="/var/lib/kubelet/pods/4150b9b7-ff56-4e21-b3c6-62510856cc94/volumes" Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.667851 4558 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="openstack-kuttl-tests/nova-cell1-51af-account-create-update-52f7p" secret="" err="secret \"galera-openstack-cell1-dockercfg-mk4rp\" not found" Jan 20 17:45:34 crc kubenswrapper[4558]: E0120 17:45:34.676055 4558 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 20 17:45:34 crc kubenswrapper[4558]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[/bin/sh -c #!/bin/bash Jan 20 17:45:34 crc kubenswrapper[4558]: Jan 20 17:45:34 crc kubenswrapper[4558]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 20 17:45:34 crc kubenswrapper[4558]: Jan 20 17:45:34 crc kubenswrapper[4558]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 20 17:45:34 crc kubenswrapper[4558]: Jan 20 17:45:34 crc kubenswrapper[4558]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 20 17:45:34 crc kubenswrapper[4558]: Jan 20 17:45:34 crc kubenswrapper[4558]: if [ -n "nova_cell1" ]; then Jan 20 17:45:34 crc kubenswrapper[4558]: GRANT_DATABASE="nova_cell1" Jan 20 17:45:34 crc kubenswrapper[4558]: else Jan 20 17:45:34 crc kubenswrapper[4558]: GRANT_DATABASE="*" Jan 20 17:45:34 crc kubenswrapper[4558]: fi Jan 20 17:45:34 crc kubenswrapper[4558]: Jan 20 17:45:34 crc kubenswrapper[4558]: # going for maximum compatibility here: Jan 20 17:45:34 crc kubenswrapper[4558]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 20 17:45:34 crc kubenswrapper[4558]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 20 17:45:34 crc kubenswrapper[4558]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 20 17:45:34 crc kubenswrapper[4558]: # support updates Jan 20 17:45:34 crc kubenswrapper[4558]: Jan 20 17:45:34 crc kubenswrapper[4558]: $MYSQL_CMD < logger="UnhandledError" Jan 20 17:45:34 crc kubenswrapper[4558]: E0120 17:45:34.678573 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"nova-cell1-db-secret\\\" not found\"" pod="openstack-kuttl-tests/nova-cell1-51af-account-create-update-52f7p" podUID="89fd10f3-4598-47c6-ad0e-59aa59d41894" Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.689330 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57add5a3-3401-425c-aa80-d168c20c4cb9" path="/var/lib/kubelet/pods/57add5a3-3401-425c-aa80-d168c20c4cb9/volumes" Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.690332 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="71426e1b-db2d-4e6a-9382-81c6b239f747" path="/var/lib/kubelet/pods/71426e1b-db2d-4e6a-9382-81c6b239f747/volumes" Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.690826 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="86a50da2-ef44-4072-b680-d56cc4ee67c5" path="/var/lib/kubelet/pods/86a50da2-ef44-4072-b680-d56cc4ee67c5/volumes" Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.691932 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8b82b74a-6466-4751-9d8a-7fe34276cde3" path="/var/lib/kubelet/pods/8b82b74a-6466-4751-9d8a-7fe34276cde3/volumes" Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.692012 4558 generic.go:334] "Generic (PLEG): container finished" podID="f87490e8-557d-41e3-a07b-8fe12147b315" containerID="dea187ec3d51aa9fd2fd3318b66ce5429ee704c1f761dae601b4234a786d24c7" exitCode=143 Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.692451 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="93c34095-c480-40ce-b40e-bf05101bef8f" path="/var/lib/kubelet/pods/93c34095-c480-40ce-b40e-bf05101bef8f/volumes" Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.693227 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a2428daa-4062-491f-8da0-af1d11e7d327" path="/var/lib/kubelet/pods/a2428daa-4062-491f-8da0-af1d11e7d327/volumes" Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.693717 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a312b2d2-a836-40d2-8de8-9e75ef44ff9a" path="/var/lib/kubelet/pods/a312b2d2-a836-40d2-8de8-9e75ef44ff9a/volumes" Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.694257 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ae3119fb-1355-4da8-a005-3610a1733b90" path="/var/lib/kubelet/pods/ae3119fb-1355-4da8-a005-3610a1733b90/volumes" Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.694731 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="af51a5e8-6b1c-4d83-bcc0-6be598211cad" path="/var/lib/kubelet/pods/af51a5e8-6b1c-4d83-bcc0-6be598211cad/volumes" Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.695683 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c48697d3-0295-45c7-89d7-7d2c524f3b20" path="/var/lib/kubelet/pods/c48697d3-0295-45c7-89d7-7d2c524f3b20/volumes" Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.696184 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d0eb0bc4-4bc9-4c7a-ac58-c62d4674549d" path="/var/lib/kubelet/pods/d0eb0bc4-4bc9-4c7a-ac58-c62d4674549d/volumes" Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.696673 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d4e31f5d-cc80-4318-8249-aa2bef62b79a" path="/var/lib/kubelet/pods/d4e31f5d-cc80-4318-8249-aa2bef62b79a/volumes" Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.697552 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dc2e997b-bfd4-47cc-981e-30db4c1c8ceb" path="/var/lib/kubelet/pods/dc2e997b-bfd4-47cc-981e-30db4c1c8ceb/volumes" Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.698042 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e48cd5c7-5acf-4683-b6eb-6947b7b56706" path="/var/lib/kubelet/pods/e48cd5c7-5acf-4683-b6eb-6947b7b56706/volumes" Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.698548 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e68a5661-3dc2-48c0-9f39-d54304c4127d" path="/var/lib/kubelet/pods/e68a5661-3dc2-48c0-9f39-d54304c4127d/volumes" Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.699444 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fd4fc3e6-d57c-4c96-9109-8c8c3c833307" path="/var/lib/kubelet/pods/fd4fc3e6-d57c-4c96-9109-8c8c3c833307/volumes" Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.713948 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-51af-account-create-update-52f7p" event={"ID":"89fd10f3-4598-47c6-ad0e-59aa59d41894","Type":"ContainerStarted","Data":"7476dbbb0b15ae786d05551c50a9dfde849a39e3f040548072e414cf83856c34"} Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.714073 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"f87490e8-557d-41e3-a07b-8fe12147b315","Type":"ContainerDied","Data":"dea187ec3d51aa9fd2fd3318b66ce5429ee704c1f761dae601b4234a786d24c7"} Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.714460 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/rabbitmq-server-0"] Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.714551 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-db-create-2gc5v"] Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.714613 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell1-db-create-2gc5v"] Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.714670 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-51af-account-create-update-52f7p"] Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.714732 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.714801 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.715045 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-metadata-0" podUID="ff0d0703-8173-4ac0-afc0-e673feaef286" containerName="nova-metadata-log" containerID="cri-o://eebdfbd12d120bd4d4adeb0cbba616d24ce5f4438b3c54b84c67c895df587e6d" gracePeriod=30 Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.715248 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-metadata-0" podUID="ff0d0703-8173-4ac0-afc0-e673feaef286" containerName="nova-metadata-metadata" containerID="cri-o://29a14fecb73a5e8e41c942ef00e6566817b5c2f5ec120abe410dce21f8142ccd" gracePeriod=30 Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.715465 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-api-0" podUID="70182fd8-a242-40f5-b20d-8ff4dd33e9b1" containerName="nova-api-log" containerID="cri-o://69302d70e78ca0d36f4a72bfbbc9246b0e5fece0f0fdbf172a8875985eda6a7c" gracePeriod=30 Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.715660 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-api-0" podUID="70182fd8-a242-40f5-b20d-8ff4dd33e9b1" containerName="nova-api-api" containerID="cri-o://b3f67b171f63171671e95039a6c39cf3571069f1e72ec88de6862170e74914f6" gracePeriod=30 Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.720547 4558 generic.go:334] "Generic (PLEG): container finished" podID="56173817-8246-4f37-b157-3890912004ca" containerID="65d88d20f8ddd45d36845fd06af8255069c3338120c182ed2e8bfd0b56484bec" exitCode=143 Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.720986 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstackclient" Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.720682 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"56173817-8246-4f37-b157-3890912004ca","Type":"ContainerDied","Data":"65d88d20f8ddd45d36845fd06af8255069c3338120c182ed2e8bfd0b56484bec"} Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.725370 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/root-account-create-update-fhpsv" event={"ID":"03ca5cbb-ca72-46f1-a44f-42ac0fb7cf17","Type":"ContainerStarted","Data":"c208f9601a516395e4d9d65bc5d53e905f17f21efe78faa30eb47cc79340b2c3"} Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.726052 4558 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="openstack-kuttl-tests/root-account-create-update-fhpsv" secret="" err="secret \"galera-openstack-cell1-dockercfg-mk4rp\" not found" Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.728824 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d6e6a03e-c496-4589-8d51-f6d9e89e14ae-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "d6e6a03e-c496-4589-8d51-f6d9e89e14ae" (UID: "d6e6a03e-c496-4589-8d51-f6d9e89e14ae"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:45:34 crc kubenswrapper[4558]: E0120 17:45:34.732764 4558 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 20 17:45:34 crc kubenswrapper[4558]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[/bin/sh -c #!/bin/bash Jan 20 17:45:34 crc kubenswrapper[4558]: Jan 20 17:45:34 crc kubenswrapper[4558]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 20 17:45:34 crc kubenswrapper[4558]: Jan 20 17:45:34 crc kubenswrapper[4558]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 20 17:45:34 crc kubenswrapper[4558]: Jan 20 17:45:34 crc kubenswrapper[4558]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 20 17:45:34 crc kubenswrapper[4558]: Jan 20 17:45:34 crc kubenswrapper[4558]: if [ -n "" ]; then Jan 20 17:45:34 crc kubenswrapper[4558]: GRANT_DATABASE="" Jan 20 17:45:34 crc kubenswrapper[4558]: else Jan 20 17:45:34 crc kubenswrapper[4558]: GRANT_DATABASE="*" Jan 20 17:45:34 crc kubenswrapper[4558]: fi Jan 20 17:45:34 crc kubenswrapper[4558]: Jan 20 17:45:34 crc kubenswrapper[4558]: # going for maximum compatibility here: Jan 20 17:45:34 crc kubenswrapper[4558]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 20 17:45:34 crc kubenswrapper[4558]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 20 17:45:34 crc kubenswrapper[4558]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 20 17:45:34 crc kubenswrapper[4558]: # support updates Jan 20 17:45:34 crc kubenswrapper[4558]: Jan 20 17:45:34 crc kubenswrapper[4558]: $MYSQL_CMD < logger="UnhandledError" Jan 20 17:45:34 crc kubenswrapper[4558]: E0120 17:45:34.732829 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Jan 20 17:45:34 crc kubenswrapper[4558]: E0120 17:45:34.732871 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/de4d8126-91cf-4149-bec4-4accaf558308-config-data podName:de4d8126-91cf-4149-bec4-4accaf558308 nodeName:}" failed. No retries permitted until 2026-01-20 17:45:36.732855142 +0000 UTC m=+3830.493193110 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/de4d8126-91cf-4149-bec4-4accaf558308-config-data") pod "rabbitmq-cell1-server-0" (UID: "de4d8126-91cf-4149-bec4-4accaf558308") : configmap "rabbitmq-cell1-config-data" not found Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.733096 4558 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/d6e6a03e-c496-4589-8d51-f6d9e89e14ae-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:34 crc kubenswrapper[4558]: E0120 17:45:34.734634 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"openstack-cell1-mariadb-root-db-secret\\\" not found\"" pod="openstack-kuttl-tests/root-account-create-update-fhpsv" podUID="03ca5cbb-ca72-46f1-a44f-42ac0fb7cf17" Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.740928 4558 generic.go:334] "Generic (PLEG): container finished" podID="2bfab6e6-f4a2-4948-9715-43c6f13d88fd" containerID="dfb7f1325b9ebbf067b631d2aff33318d0cc9724f90cfc14dfab1a1ce2b6b6da" exitCode=137 Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.741114 4558 scope.go:117] "RemoveContainer" containerID="dfb7f1325b9ebbf067b631d2aff33318d0cc9724f90cfc14dfab1a1ce2b6b6da" Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.741412 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstackclient" Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.747826 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d6e6a03e-c496-4589-8d51-f6d9e89e14ae-ovn-northd-tls-certs" (OuterVolumeSpecName: "ovn-northd-tls-certs") pod "d6e6a03e-c496-4589-8d51-f6d9e89e14ae" (UID: "d6e6a03e-c496-4589-8d51-f6d9e89e14ae"). InnerVolumeSpecName "ovn-northd-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.748495 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-db-create-vhnt4"] Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.756844 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-api-db-create-vhnt4"] Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.756945 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-3b8d-account-create-update-75ws5" event={"ID":"d7ca0a7f-fd93-48de-b71b-b23d1aef2af7","Type":"ContainerStarted","Data":"f6b00365b958d533781cfbfb7fe87400a2e32d0f216458594a27ba81cf37d8d2"} Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.763105 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage02-crc" (UniqueName: "kubernetes.io/local-volume/local-storage02-crc") on node "crc" Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.766759 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-3b8d-account-create-update-75ws5"] Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.771774 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-keystone-listener-57496b565d-kwb5f"] Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.771985 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-keystone-listener-57496b565d-kwb5f" podUID="fd5cfc55-8cd9-4a76-87b8-4050b8f1030f" containerName="barbican-keystone-listener-log" containerID="cri-o://3ffb7d59fade4765056a811588a9c3857b85a1ef4b82cfda814dd74069ed2bf9" gracePeriod=30 Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.772317 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-keystone-listener-57496b565d-kwb5f" podUID="fd5cfc55-8cd9-4a76-87b8-4050b8f1030f" containerName="barbican-keystone-listener" containerID="cri-o://08599f71319860b69b6674beac66044f825719a01bdca428dd565ea148a25bb0" gracePeriod=30 Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.772806 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/924d7506-6027-4d31-b57a-19fc787ba356-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "924d7506-6027-4d31-b57a-19fc787ba356" (UID: "924d7506-6027-4d31-b57a-19fc787ba356"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.776886 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-12a5-account-create-update-6g9q5" event={"ID":"a8cb168e-9c96-449b-a8d6-bcd8511f1a53","Type":"ContainerStarted","Data":"c2ec65a49b0bd294556601dbcfb95af2053f3b1750bc11886392d0e084111241"} Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.780735 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-worker-7975b7ff57-c5czg"] Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.781086 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-worker-7975b7ff57-c5czg" podUID="1a1aeb0f-ba20-4c24-a8d3-f3f2765328c2" containerName="barbican-worker-log" containerID="cri-o://ddb2774f5a646ced66660bb7d2fc7db683928585fea6955ddbda9cb09d41a20e" gracePeriod=30 Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.781251 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-worker-7975b7ff57-c5czg" podUID="1a1aeb0f-ba20-4c24-a8d3-f3f2765328c2" containerName="barbican-worker" containerID="cri-o://ec57a9b625bd2a7169cdebad1646b567fad8da5b766be344abba4af1c17ab765" gracePeriod=30 Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.786738 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/swift-proxy-6645dd7478-nfmw9"] Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.787229 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-proxy-6645dd7478-nfmw9" podUID="321b2cf0-a7dd-4339-ab97-6327272614b2" containerName="proxy-httpd" containerID="cri-o://d555fffc777c20179c07649221e67cf87d58b574768c17302a0db151ff0fe7f8" gracePeriod=30 Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.787342 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-proxy-6645dd7478-nfmw9" podUID="321b2cf0-a7dd-4339-ab97-6327272614b2" containerName="proxy-server" containerID="cri-o://649e44f608af14cfa1fc698291c5750acd272dbe43c21d8bf64cbc026551b71b" gracePeriod=30 Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.794901 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-api-74bb4f5f4-5ppmg"] Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.795080 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-api-74bb4f5f4-5ppmg" podUID="d619cd37-a474-4965-b382-749ed6d55d6d" containerName="barbican-api-log" containerID="cri-o://985673ff538dfdfff39427adadbf62ca67de938e3a41cbb308d4b6035782e705" gracePeriod=30 Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.795157 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-api-74bb4f5f4-5ppmg" podUID="d619cd37-a474-4965-b382-749ed6d55d6d" containerName="barbican-api" containerID="cri-o://dcd025269e0c4cae451a75917b3ffb9392b169d6ba1b4f275051e1022e3b09b6" gracePeriod=30 Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.797146 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-fhpsv"] Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.798433 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovsdbserver-nb-0_b2fb8e70-15aa-4b38-a44d-5f818dfc755c/ovsdbserver-nb/0.log" Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.798489 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-nb-0" event={"ID":"b2fb8e70-15aa-4b38-a44d-5f818dfc755c","Type":"ContainerDied","Data":"efdde4f4bbe82a8186d4a66013c6519838164ff5a56531e54a7751d623251a61"} Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.798547 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.803660 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/924d7506-6027-4d31-b57a-19fc787ba356-ovsdbserver-sb-tls-certs" (OuterVolumeSpecName: "ovsdbserver-sb-tls-certs") pod "924d7506-6027-4d31-b57a-19fc787ba356" (UID: "924d7506-6027-4d31-b57a-19fc787ba356"). InnerVolumeSpecName "ovsdbserver-sb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.805460 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/rabbitmq-cell1-server-0"] Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.810686 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-fhpsv"] Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.819799 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovsdbserver-sb-0_924d7506-6027-4d31-b57a-19fc787ba356/ovsdbserver-sb/0.log" Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.819861 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-sb-0" event={"ID":"924d7506-6027-4d31-b57a-19fc787ba356","Type":"ContainerDied","Data":"6d6366514407e5e8a1279936e2b130c692e531342e856e378104f547943b1fa3"} Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.819935 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.829520 4558 generic.go:334] "Generic (PLEG): container finished" podID="50804fa8-d09d-49f8-a143-d7ec24ec542a" containerID="661b71951f10fd7eb454cf2a5617695ac503cf4d432c4017c7980d3e23a068f2" exitCode=143 Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.829631 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-5b5f5cd6d-6rk7z" event={"ID":"50804fa8-d09d-49f8-a143-d7ec24ec542a","Type":"ContainerDied","Data":"661b71951f10fd7eb454cf2a5617695ac503cf4d432c4017c7980d3e23a068f2"} Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.830316 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.830565 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" podUID="423502e9-61bd-4da9-953c-48ab84b4f004" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://5bb50dc1e160060e179121c814e65b7d5b022719607f19c151a7253e11a56151" gracePeriod=30 Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.834750 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b2fb8e70-15aa-4b38-a44d-5f818dfc755c-ovsdbserver-nb-tls-certs" (OuterVolumeSpecName: "ovsdbserver-nb-tls-certs") pod "b2fb8e70-15aa-4b38-a44d-5f818dfc755c" (UID: "b2fb8e70-15aa-4b38-a44d-5f818dfc755c"). InnerVolumeSpecName "ovsdbserver-nb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.835719 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/2bfab6e6-f4a2-4948-9715-43c6f13d88fd-openstack-config-secret\") pod \"2bfab6e6-f4a2-4948-9715-43c6f13d88fd\" (UID: \"2bfab6e6-f4a2-4948-9715-43c6f13d88fd\") " Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.835807 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/2bfab6e6-f4a2-4948-9715-43c6f13d88fd-openstack-config\") pod \"2bfab6e6-f4a2-4948-9715-43c6f13d88fd\" (UID: \"2bfab6e6-f4a2-4948-9715-43c6f13d88fd\") " Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.836013 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rf49t\" (UniqueName: \"kubernetes.io/projected/2bfab6e6-f4a2-4948-9715-43c6f13d88fd-kube-api-access-rf49t\") pod \"2bfab6e6-f4a2-4948-9715-43c6f13d88fd\" (UID: \"2bfab6e6-f4a2-4948-9715-43c6f13d88fd\") " Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.836124 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2bfab6e6-f4a2-4948-9715-43c6f13d88fd-combined-ca-bundle\") pod \"2bfab6e6-f4a2-4948-9715-43c6f13d88fd\" (UID: \"2bfab6e6-f4a2-4948-9715-43c6f13d88fd\") " Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.841313 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-51af-account-create-update-52f7p"] Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.846282 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/openstack-cell1-galera-0" podUID="5bca4373-5f64-4803-94e7-28deeb5caad3" containerName="galera" containerID="cri-o://7905c465186edb142be037f724712184999953459f1d4e54418a375cbfe42e59" gracePeriod=30 Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.848226 4558 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/924d7506-6027-4d31-b57a-19fc787ba356-ovsdbserver-sb-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.848245 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.848255 4558 reconciler_common.go:293] "Volume detached for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/d6e6a03e-c496-4589-8d51-f6d9e89e14ae-ovn-northd-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.848265 4558 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/924d7506-6027-4d31-b57a-19fc787ba356-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.848275 4558 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/b2fb8e70-15aa-4b38-a44d-5f818dfc755c-ovsdbserver-nb-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:34 crc kubenswrapper[4558]: E0120 17:45:34.849916 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/openstack-cell1-scripts: configmap "openstack-cell1-scripts" not found Jan 20 17:45:34 crc kubenswrapper[4558]: E0120 17:45:34.849981 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/89fd10f3-4598-47c6-ad0e-59aa59d41894-operator-scripts podName:89fd10f3-4598-47c6-ad0e-59aa59d41894 nodeName:}" failed. No retries permitted until 2026-01-20 17:45:35.349965137 +0000 UTC m=+3829.110303105 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/89fd10f3-4598-47c6-ad0e-59aa59d41894-operator-scripts") pod "nova-cell1-51af-account-create-update-52f7p" (UID: "89fd10f3-4598-47c6-ad0e-59aa59d41894") : configmap "openstack-cell1-scripts" not found Jan 20 17:45:34 crc kubenswrapper[4558]: E0120 17:45:34.850239 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/openstack-cell1-scripts: configmap "openstack-cell1-scripts" not found Jan 20 17:45:34 crc kubenswrapper[4558]: E0120 17:45:34.850272 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/03ca5cbb-ca72-46f1-a44f-42ac0fb7cf17-operator-scripts podName:03ca5cbb-ca72-46f1-a44f-42ac0fb7cf17 nodeName:}" failed. No retries permitted until 2026-01-20 17:45:35.350262507 +0000 UTC m=+3829.110600464 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/03ca5cbb-ca72-46f1-a44f-42ac0fb7cf17-operator-scripts") pod "root-account-create-update-fhpsv" (UID: "03ca5cbb-ca72-46f1-a44f-42ac0fb7cf17") : configmap "openstack-cell1-scripts" not found Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.855610 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2bfab6e6-f4a2-4948-9715-43c6f13d88fd-kube-api-access-rf49t" (OuterVolumeSpecName: "kube-api-access-rf49t") pod "2bfab6e6-f4a2-4948-9715-43c6f13d88fd" (UID: "2bfab6e6-f4a2-4948-9715-43c6f13d88fd"). InnerVolumeSpecName "kube-api-access-rf49t". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.867854 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" podUID="de4d8126-91cf-4149-bec4-4accaf558308" containerName="rabbitmq" containerID="cri-o://f79892859b62fff49932e58f16de3c5503fa6029ca060df20454e36b9e9f37db" gracePeriod=604800 Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.871037 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/rabbitmq-server-0" podUID="4765e529-9729-4d27-a252-c0c9a7b67beb" containerName="rabbitmq" containerID="cri-o://28200cb794147f2ba9210855e02aa06f0b33943f75d1f48cedc60cedc86b6d96" gracePeriod=604800 Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.876085 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-0"] Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.876415 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-cell0-conductor-0" podUID="50cacddd-ebea-477f-af64-6e96a09a242e" containerName="nova-cell0-conductor-conductor" containerID="cri-o://4568dafb1cb4a5468a42a5482158e5c167d869bcb8c626b55be898275037c53d" gracePeriod=30 Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.876513 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b2fb8e70-15aa-4b38-a44d-5f818dfc755c-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "b2fb8e70-15aa-4b38-a44d-5f818dfc755c" (UID: "b2fb8e70-15aa-4b38-a44d-5f818dfc755c"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.882827 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-db-sync-5lxgd"] Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.893607 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-db-sync-5lxgd"] Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.901309 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-0"] Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.904272 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-cell1-conductor-0" podUID="ea72c008-3a66-4577-8042-4b1e0ed1cca6" containerName="nova-cell1-conductor-conductor" containerID="cri-o://f53b402a7de8ccb9267cb06efb993b2c9befd769af338febfa1cab472df02511" gracePeriod=30 Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.906900 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-db-sync-m2qbl"] Jan 20 17:45:34 crc kubenswrapper[4558]: E0120 17:45:34.908840 4558 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 20 17:45:34 crc kubenswrapper[4558]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[/bin/sh -c #!/bin/bash Jan 20 17:45:34 crc kubenswrapper[4558]: Jan 20 17:45:34 crc kubenswrapper[4558]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 20 17:45:34 crc kubenswrapper[4558]: Jan 20 17:45:34 crc kubenswrapper[4558]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 20 17:45:34 crc kubenswrapper[4558]: Jan 20 17:45:34 crc kubenswrapper[4558]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 20 17:45:34 crc kubenswrapper[4558]: Jan 20 17:45:34 crc kubenswrapper[4558]: if [ -n "nova_api" ]; then Jan 20 17:45:34 crc kubenswrapper[4558]: GRANT_DATABASE="nova_api" Jan 20 17:45:34 crc kubenswrapper[4558]: else Jan 20 17:45:34 crc kubenswrapper[4558]: GRANT_DATABASE="*" Jan 20 17:45:34 crc kubenswrapper[4558]: fi Jan 20 17:45:34 crc kubenswrapper[4558]: Jan 20 17:45:34 crc kubenswrapper[4558]: # going for maximum compatibility here: Jan 20 17:45:34 crc kubenswrapper[4558]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 20 17:45:34 crc kubenswrapper[4558]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 20 17:45:34 crc kubenswrapper[4558]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 20 17:45:34 crc kubenswrapper[4558]: # support updates Jan 20 17:45:34 crc kubenswrapper[4558]: Jan 20 17:45:34 crc kubenswrapper[4558]: $MYSQL_CMD < logger="UnhandledError" Jan 20 17:45:34 crc kubenswrapper[4558]: E0120 17:45:34.910936 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"nova-api-db-secret\\\" not found\"" pod="openstack-kuttl-tests/nova-api-3b8d-account-create-update-75ws5" podUID="d7ca0a7f-fd93-48de-b71b-b23d1aef2af7" Jan 20 17:45:34 crc kubenswrapper[4558]: E0120 17:45:34.912332 4558 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 20 17:45:34 crc kubenswrapper[4558]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[/bin/sh -c #!/bin/bash Jan 20 17:45:34 crc kubenswrapper[4558]: Jan 20 17:45:34 crc kubenswrapper[4558]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 20 17:45:34 crc kubenswrapper[4558]: Jan 20 17:45:34 crc kubenswrapper[4558]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 20 17:45:34 crc kubenswrapper[4558]: Jan 20 17:45:34 crc kubenswrapper[4558]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 20 17:45:34 crc kubenswrapper[4558]: Jan 20 17:45:34 crc kubenswrapper[4558]: if [ -n "placement" ]; then Jan 20 17:45:34 crc kubenswrapper[4558]: GRANT_DATABASE="placement" Jan 20 17:45:34 crc kubenswrapper[4558]: else Jan 20 17:45:34 crc kubenswrapper[4558]: GRANT_DATABASE="*" Jan 20 17:45:34 crc kubenswrapper[4558]: fi Jan 20 17:45:34 crc kubenswrapper[4558]: Jan 20 17:45:34 crc kubenswrapper[4558]: # going for maximum compatibility here: Jan 20 17:45:34 crc kubenswrapper[4558]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 20 17:45:34 crc kubenswrapper[4558]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 20 17:45:34 crc kubenswrapper[4558]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 20 17:45:34 crc kubenswrapper[4558]: # support updates Jan 20 17:45:34 crc kubenswrapper[4558]: Jan 20 17:45:34 crc kubenswrapper[4558]: $MYSQL_CMD < logger="UnhandledError" Jan 20 17:45:34 crc kubenswrapper[4558]: E0120 17:45:34.914264 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"placement-db-secret\\\" not found\"" pod="openstack-kuttl-tests/placement-12a5-account-create-update-6g9q5" podUID="a8cb168e-9c96-449b-a8d6-bcd8511f1a53" Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.922263 4558 generic.go:334] "Generic (PLEG): container finished" podID="fa551db1-9a4b-45c2-b640-4f3518b162f4" containerID="b5ee244c517bb7529681f18dd5be00352c8b0d2d264f9bf4dd993ae3ee88bc9f" exitCode=0 Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.922281 4558 generic.go:334] "Generic (PLEG): container finished" podID="fa551db1-9a4b-45c2-b640-4f3518b162f4" containerID="6ff615a38b1de5289443af49e872533c8fb33086c5c5f749a5757153445d80db" exitCode=0 Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.922290 4558 generic.go:334] "Generic (PLEG): container finished" podID="fa551db1-9a4b-45c2-b640-4f3518b162f4" containerID="730e7a8305ea822bf786ff7759b1b076db78be0ddb28d589685ca62f80722994" exitCode=0 Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.922297 4558 generic.go:334] "Generic (PLEG): container finished" podID="fa551db1-9a4b-45c2-b640-4f3518b162f4" containerID="db06ae2f5dcfb82f3b41bda4c320f201a9c03b7b5c3d71c0c19a43661eba2751" exitCode=0 Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.922303 4558 generic.go:334] "Generic (PLEG): container finished" podID="fa551db1-9a4b-45c2-b640-4f3518b162f4" containerID="1776b65beebb985be1fcd79b9aa133812c590cf0eb9a13dd5bf41981563e77b5" exitCode=0 Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.922309 4558 generic.go:334] "Generic (PLEG): container finished" podID="fa551db1-9a4b-45c2-b640-4f3518b162f4" containerID="0b9af6175ca2fe405cf0a757e866a177d1e865ed2977ca637413cf1645033f14" exitCode=0 Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.922317 4558 generic.go:334] "Generic (PLEG): container finished" podID="fa551db1-9a4b-45c2-b640-4f3518b162f4" containerID="35a4222bca9bef05ef01fd815e0b4f831f1296d08622fc5e89a2a361527c22e6" exitCode=0 Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.922326 4558 generic.go:334] "Generic (PLEG): container finished" podID="fa551db1-9a4b-45c2-b640-4f3518b162f4" containerID="6a4f662ca1f9347dca843ebc69844d9f6f10b499e6d23197e88dd1c43db718b8" exitCode=0 Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.922333 4558 generic.go:334] "Generic (PLEG): container finished" podID="fa551db1-9a4b-45c2-b640-4f3518b162f4" containerID="13c87e4a18a86e7c3b8ddb00e81ce8b0da89d4d0c280a7e98c30f1e03817bfc3" exitCode=0 Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.922339 4558 generic.go:334] "Generic (PLEG): container finished" podID="fa551db1-9a4b-45c2-b640-4f3518b162f4" containerID="c784e598ad58ecd5a81e3bf765a648caa06b6e4d7a98116be1483ea1a1916bd6" exitCode=0 Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.922347 4558 generic.go:334] "Generic (PLEG): container finished" podID="fa551db1-9a4b-45c2-b640-4f3518b162f4" containerID="cbaf44dfc0734a14fd6ecab8c4428e033aaee10d6fd6549b9c1af4209a3025c5" exitCode=0 Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.922353 4558 generic.go:334] "Generic (PLEG): container finished" podID="fa551db1-9a4b-45c2-b640-4f3518b162f4" containerID="967813e267c1874f848c851b80566f8d4b10ddb11ae1e902fcced650a357a740" exitCode=0 Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.922359 4558 generic.go:334] "Generic (PLEG): container finished" podID="fa551db1-9a4b-45c2-b640-4f3518b162f4" containerID="c873af103469702cab190e42ac22b360e4b1c8addd7ea94631a91965a8840503" exitCode=0 Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.922365 4558 generic.go:334] "Generic (PLEG): container finished" podID="fa551db1-9a4b-45c2-b640-4f3518b162f4" containerID="7946458eae645c01b2edae8d93ef429443754ef2e0e2732538ed1fcc7aab4131" exitCode=0 Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.922413 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"fa551db1-9a4b-45c2-b640-4f3518b162f4","Type":"ContainerDied","Data":"b5ee244c517bb7529681f18dd5be00352c8b0d2d264f9bf4dd993ae3ee88bc9f"} Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.922445 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"fa551db1-9a4b-45c2-b640-4f3518b162f4","Type":"ContainerDied","Data":"6ff615a38b1de5289443af49e872533c8fb33086c5c5f749a5757153445d80db"} Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.922458 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"fa551db1-9a4b-45c2-b640-4f3518b162f4","Type":"ContainerDied","Data":"730e7a8305ea822bf786ff7759b1b076db78be0ddb28d589685ca62f80722994"} Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.922468 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"fa551db1-9a4b-45c2-b640-4f3518b162f4","Type":"ContainerDied","Data":"db06ae2f5dcfb82f3b41bda4c320f201a9c03b7b5c3d71c0c19a43661eba2751"} Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.922476 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"fa551db1-9a4b-45c2-b640-4f3518b162f4","Type":"ContainerDied","Data":"1776b65beebb985be1fcd79b9aa133812c590cf0eb9a13dd5bf41981563e77b5"} Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.922486 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"fa551db1-9a4b-45c2-b640-4f3518b162f4","Type":"ContainerDied","Data":"0b9af6175ca2fe405cf0a757e866a177d1e865ed2977ca637413cf1645033f14"} Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.922494 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"fa551db1-9a4b-45c2-b640-4f3518b162f4","Type":"ContainerDied","Data":"35a4222bca9bef05ef01fd815e0b4f831f1296d08622fc5e89a2a361527c22e6"} Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.922506 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"fa551db1-9a4b-45c2-b640-4f3518b162f4","Type":"ContainerDied","Data":"6a4f662ca1f9347dca843ebc69844d9f6f10b499e6d23197e88dd1c43db718b8"} Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.922515 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"fa551db1-9a4b-45c2-b640-4f3518b162f4","Type":"ContainerDied","Data":"13c87e4a18a86e7c3b8ddb00e81ce8b0da89d4d0c280a7e98c30f1e03817bfc3"} Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.922523 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"fa551db1-9a4b-45c2-b640-4f3518b162f4","Type":"ContainerDied","Data":"c784e598ad58ecd5a81e3bf765a648caa06b6e4d7a98116be1483ea1a1916bd6"} Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.922532 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"fa551db1-9a4b-45c2-b640-4f3518b162f4","Type":"ContainerDied","Data":"cbaf44dfc0734a14fd6ecab8c4428e033aaee10d6fd6549b9c1af4209a3025c5"} Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.922540 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"fa551db1-9a4b-45c2-b640-4f3518b162f4","Type":"ContainerDied","Data":"967813e267c1874f848c851b80566f8d4b10ddb11ae1e902fcced650a357a740"} Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.922552 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"fa551db1-9a4b-45c2-b640-4f3518b162f4","Type":"ContainerDied","Data":"c873af103469702cab190e42ac22b360e4b1c8addd7ea94631a91965a8840503"} Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.922562 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"fa551db1-9a4b-45c2-b640-4f3518b162f4","Type":"ContainerDied","Data":"7946458eae645c01b2edae8d93ef429443754ef2e0e2732538ed1fcc7aab4131"} Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.923864 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-db-sync-m2qbl"] Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.935035 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovn-northd-0_d6e6a03e-c496-4589-8d51-f6d9e89e14ae/ovn-northd/0.log" Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.935114 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-northd-0" event={"ID":"d6e6a03e-c496-4589-8d51-f6d9e89e14ae","Type":"ContainerDied","Data":"3bdcd97cc1aef4d310fabb378441cbbc789ba14dafc8d7966bbd82791291aed5"} Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.935240 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.937075 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2bfab6e6-f4a2-4948-9715-43c6f13d88fd-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "2bfab6e6-f4a2-4948-9715-43c6f13d88fd" (UID: "2bfab6e6-f4a2-4948-9715-43c6f13d88fd"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.949573 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rf49t\" (UniqueName: \"kubernetes.io/projected/2bfab6e6-f4a2-4948-9715-43c6f13d88fd-kube-api-access-rf49t\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.949603 4558 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/2bfab6e6-f4a2-4948-9715-43c6f13d88fd-openstack-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.949615 4558 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/b2fb8e70-15aa-4b38-a44d-5f818dfc755c-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.949779 4558 scope.go:117] "RemoveContainer" containerID="dfb7f1325b9ebbf067b631d2aff33318d0cc9724f90cfc14dfab1a1ce2b6b6da" Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.952191 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2bfab6e6-f4a2-4948-9715-43c6f13d88fd-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2bfab6e6-f4a2-4948-9715-43c6f13d88fd" (UID: "2bfab6e6-f4a2-4948-9715-43c6f13d88fd"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.952341 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.954811 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-scheduler-0" podUID="ddd57d3b-c9de-46e3-897e-1a50ae49630e" containerName="nova-scheduler-scheduler" containerID="cri-o://bfedd515a4cd08318345cf62dd830e7f182ab82a54ada6e840a63a36d085cdc9" gracePeriod=30 Jan 20 17:45:34 crc kubenswrapper[4558]: E0120 17:45:34.955415 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dfb7f1325b9ebbf067b631d2aff33318d0cc9724f90cfc14dfab1a1ce2b6b6da\": container with ID starting with dfb7f1325b9ebbf067b631d2aff33318d0cc9724f90cfc14dfab1a1ce2b6b6da not found: ID does not exist" containerID="dfb7f1325b9ebbf067b631d2aff33318d0cc9724f90cfc14dfab1a1ce2b6b6da" Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.955449 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dfb7f1325b9ebbf067b631d2aff33318d0cc9724f90cfc14dfab1a1ce2b6b6da"} err="failed to get container status \"dfb7f1325b9ebbf067b631d2aff33318d0cc9724f90cfc14dfab1a1ce2b6b6da\": rpc error: code = NotFound desc = could not find container \"dfb7f1325b9ebbf067b631d2aff33318d0cc9724f90cfc14dfab1a1ce2b6b6da\": container with ID starting with dfb7f1325b9ebbf067b631d2aff33318d0cc9724f90cfc14dfab1a1ce2b6b6da not found: ID does not exist" Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.955477 4558 scope.go:117] "RemoveContainer" containerID="ead17a0d58c199bc2ba003661375148f8b9c61941971a8e8d61f318e35d6d553" Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.957068 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-bd94-account-create-update-stnv4" event={"ID":"44f856be-bb11-468b-a157-1f2e2851d04d","Type":"ContainerStarted","Data":"69fbfb4b1319c3318255f6bddfd202a219fbe12d20ef86fff4bbc0a61137df21"} Jan 20 17:45:34 crc kubenswrapper[4558]: E0120 17:45:34.960803 4558 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 20 17:45:34 crc kubenswrapper[4558]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[/bin/sh -c #!/bin/bash Jan 20 17:45:34 crc kubenswrapper[4558]: Jan 20 17:45:34 crc kubenswrapper[4558]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 20 17:45:34 crc kubenswrapper[4558]: Jan 20 17:45:34 crc kubenswrapper[4558]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 20 17:45:34 crc kubenswrapper[4558]: Jan 20 17:45:34 crc kubenswrapper[4558]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 20 17:45:34 crc kubenswrapper[4558]: Jan 20 17:45:34 crc kubenswrapper[4558]: if [ -n "nova_cell0" ]; then Jan 20 17:45:34 crc kubenswrapper[4558]: GRANT_DATABASE="nova_cell0" Jan 20 17:45:34 crc kubenswrapper[4558]: else Jan 20 17:45:34 crc kubenswrapper[4558]: GRANT_DATABASE="*" Jan 20 17:45:34 crc kubenswrapper[4558]: fi Jan 20 17:45:34 crc kubenswrapper[4558]: Jan 20 17:45:34 crc kubenswrapper[4558]: # going for maximum compatibility here: Jan 20 17:45:34 crc kubenswrapper[4558]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 20 17:45:34 crc kubenswrapper[4558]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 20 17:45:34 crc kubenswrapper[4558]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 20 17:45:34 crc kubenswrapper[4558]: # support updates Jan 20 17:45:34 crc kubenswrapper[4558]: Jan 20 17:45:34 crc kubenswrapper[4558]: $MYSQL_CMD < logger="UnhandledError" Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.961328 4558 generic.go:334] "Generic (PLEG): container finished" podID="8361ba4f-e976-4c04-82ad-81e9412ba84c" containerID="df990974fa4d7bdeba24386c7a7548b9c42f83f4e37902446477a1561329d066" exitCode=0 Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.961405 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-b9f9b6874-4cxdp" event={"ID":"8361ba4f-e976-4c04-82ad-81e9412ba84c","Type":"ContainerDied","Data":"df990974fa4d7bdeba24386c7a7548b9c42f83f4e37902446477a1561329d066"} Jan 20 17:45:34 crc kubenswrapper[4558]: E0120 17:45:34.962131 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"nova-cell0-db-secret\\\" not found\"" pod="openstack-kuttl-tests/nova-cell0-bd94-account-create-update-stnv4" podUID="44f856be-bb11-468b-a157-1f2e2851d04d" Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.976222 4558 generic.go:334] "Generic (PLEG): container finished" podID="b752af55-9c06-469f-9353-e1042300de3c" containerID="93d7994154007967abeffaef97cbdbfea9fe0c9a249a8ebbcb1a6bb563d31477" exitCode=143 Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.977224 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"b752af55-9c06-469f-9353-e1042300de3c","Type":"ContainerDied","Data":"93d7994154007967abeffaef97cbdbfea9fe0c9a249a8ebbcb1a6bb563d31477"} Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.980593 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 17:45:34 crc kubenswrapper[4558]: I0120 17:45:34.980765 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/kube-state-metrics-0" podUID="f4affc06-2032-4a14-8422-3c3ca984eada" containerName="kube-state-metrics" containerID="cri-o://3550557600f4dd7782ea6add771b6cef4f6b4aa0a7058c39c4e8a2f7f0f9a980" gracePeriod=30 Jan 20 17:45:35 crc kubenswrapper[4558]: I0120 17:45:35.009036 4558 scope.go:117] "RemoveContainer" containerID="54155d91bfb6e6c18d3eb5afc095843ec5b7f0df04d6716cdc4199d6b21aa7d6" Jan 20 17:45:35 crc kubenswrapper[4558]: I0120 17:45:35.013829 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:45:35 crc kubenswrapper[4558]: I0120 17:45:35.015288 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="2c77f6fd-736b-49e4-aa1c-ddffa451f3dc" containerName="proxy-httpd" containerID="cri-o://245499fc8d9dd24531eb844ac444830c3cbb3a8f2341bb67bd362bf2fe1577af" gracePeriod=30 Jan 20 17:45:35 crc kubenswrapper[4558]: I0120 17:45:35.015544 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="2c77f6fd-736b-49e4-aa1c-ddffa451f3dc" containerName="sg-core" containerID="cri-o://720bb601be9d0a73599645a6bba6fa27fea9bddfb2119833260b2abb7695bc2c" gracePeriod=30 Jan 20 17:45:35 crc kubenswrapper[4558]: I0120 17:45:35.015786 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="2c77f6fd-736b-49e4-aa1c-ddffa451f3dc" containerName="ceilometer-notification-agent" containerID="cri-o://4bb502fc98a21930b2123d685d6ca4b42bbac87bd71f98a6fe668e8dfce8ed68" gracePeriod=30 Jan 20 17:45:35 crc kubenswrapper[4558]: E0120 17:45:35.018985 4558 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 20 17:45:35 crc kubenswrapper[4558]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[/bin/sh -c #!/bin/bash Jan 20 17:45:35 crc kubenswrapper[4558]: Jan 20 17:45:35 crc kubenswrapper[4558]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 20 17:45:35 crc kubenswrapper[4558]: Jan 20 17:45:35 crc kubenswrapper[4558]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 20 17:45:35 crc kubenswrapper[4558]: Jan 20 17:45:35 crc kubenswrapper[4558]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 20 17:45:35 crc kubenswrapper[4558]: Jan 20 17:45:35 crc kubenswrapper[4558]: if [ -n "glance" ]; then Jan 20 17:45:35 crc kubenswrapper[4558]: GRANT_DATABASE="glance" Jan 20 17:45:35 crc kubenswrapper[4558]: else Jan 20 17:45:35 crc kubenswrapper[4558]: GRANT_DATABASE="*" Jan 20 17:45:35 crc kubenswrapper[4558]: fi Jan 20 17:45:35 crc kubenswrapper[4558]: Jan 20 17:45:35 crc kubenswrapper[4558]: # going for maximum compatibility here: Jan 20 17:45:35 crc kubenswrapper[4558]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 20 17:45:35 crc kubenswrapper[4558]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 20 17:45:35 crc kubenswrapper[4558]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 20 17:45:35 crc kubenswrapper[4558]: # support updates Jan 20 17:45:35 crc kubenswrapper[4558]: Jan 20 17:45:35 crc kubenswrapper[4558]: $MYSQL_CMD < logger="UnhandledError" Jan 20 17:45:35 crc kubenswrapper[4558]: E0120 17:45:35.020254 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"glance-db-secret\\\" not found\"" pod="openstack-kuttl-tests/glance-8c6b-account-create-update-qcnqx" podUID="16a1e55d-37ca-4488-adc3-2602e7416d55" Jan 20 17:45:35 crc kubenswrapper[4558]: I0120 17:45:35.022267 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="2c77f6fd-736b-49e4-aa1c-ddffa451f3dc" containerName="ceilometer-central-agent" containerID="cri-o://592aa64db3a21b522ec99255c04163435c7f2ca3c55140c0e0521af26785d506" gracePeriod=30 Jan 20 17:45:35 crc kubenswrapper[4558]: I0120 17:45:35.042667 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2bfab6e6-f4a2-4948-9715-43c6f13d88fd-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "2bfab6e6-f4a2-4948-9715-43c6f13d88fd" (UID: "2bfab6e6-f4a2-4948-9715-43c6f13d88fd"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:45:35 crc kubenswrapper[4558]: I0120 17:45:35.063349 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2bfab6e6-f4a2-4948-9715-43c6f13d88fd-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:35 crc kubenswrapper[4558]: I0120 17:45:35.063472 4558 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/2bfab6e6-f4a2-4948-9715-43c6f13d88fd-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:35 crc kubenswrapper[4558]: I0120 17:45:35.072717 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-sb-0"] Jan 20 17:45:35 crc kubenswrapper[4558]: I0120 17:45:35.087009 4558 scope.go:117] "RemoveContainer" containerID="925992b2eef4db1ac2a2537f57dad717cf3ee3ea67cbe9cec01c652878bf3f97" Jan 20 17:45:35 crc kubenswrapper[4558]: I0120 17:45:35.102216 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-sb-0"] Jan 20 17:45:35 crc kubenswrapper[4558]: I0120 17:45:35.107989 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovn-northd-0"] Jan 20 17:45:35 crc kubenswrapper[4558]: I0120 17:45:35.116968 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ovn-northd-0"] Jan 20 17:45:35 crc kubenswrapper[4558]: I0120 17:45:35.137008 4558 scope.go:117] "RemoveContainer" containerID="0b23cac82af3ec3ee44a48b3126c8df9ad27e49f292999b18a09db1d3481441e" Jan 20 17:45:35 crc kubenswrapper[4558]: I0120 17:45:35.182758 4558 scope.go:117] "RemoveContainer" containerID="7e87cd2bf2bf667d667a6643f4f7c164cbd3aeeebc96908debb2fae778b18645" Jan 20 17:45:35 crc kubenswrapper[4558]: I0120 17:45:35.199606 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-nb-0"] Jan 20 17:45:35 crc kubenswrapper[4558]: I0120 17:45:35.218337 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-nb-0"] Jan 20 17:45:35 crc kubenswrapper[4558]: I0120 17:45:35.222099 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-2lft9"] Jan 20 17:45:35 crc kubenswrapper[4558]: I0120 17:45:35.253736 4558 scope.go:117] "RemoveContainer" containerID="391bf6661c1200e7d892ab027500c3b569ef3b9e9fbd3bdab1b7ee5442423861" Jan 20 17:45:35 crc kubenswrapper[4558]: E0120 17:45:35.386654 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/openstack-cell1-scripts: configmap "openstack-cell1-scripts" not found Jan 20 17:45:35 crc kubenswrapper[4558]: E0120 17:45:35.386729 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/89fd10f3-4598-47c6-ad0e-59aa59d41894-operator-scripts podName:89fd10f3-4598-47c6-ad0e-59aa59d41894 nodeName:}" failed. No retries permitted until 2026-01-20 17:45:36.38671182 +0000 UTC m=+3830.147049787 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/89fd10f3-4598-47c6-ad0e-59aa59d41894-operator-scripts") pod "nova-cell1-51af-account-create-update-52f7p" (UID: "89fd10f3-4598-47c6-ad0e-59aa59d41894") : configmap "openstack-cell1-scripts" not found Jan 20 17:45:35 crc kubenswrapper[4558]: E0120 17:45:35.387848 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/openstack-cell1-scripts: configmap "openstack-cell1-scripts" not found Jan 20 17:45:35 crc kubenswrapper[4558]: E0120 17:45:35.387887 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/03ca5cbb-ca72-46f1-a44f-42ac0fb7cf17-operator-scripts podName:03ca5cbb-ca72-46f1-a44f-42ac0fb7cf17 nodeName:}" failed. No retries permitted until 2026-01-20 17:45:36.38787679 +0000 UTC m=+3830.148214757 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/03ca5cbb-ca72-46f1-a44f-42ac0fb7cf17-operator-scripts") pod "root-account-create-update-fhpsv" (UID: "03ca5cbb-ca72-46f1-a44f-42ac0fb7cf17") : configmap "openstack-cell1-scripts" not found Jan 20 17:45:35 crc kubenswrapper[4558]: I0120 17:45:35.508695 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" podUID="423502e9-61bd-4da9-953c-48ab84b4f004" containerName="nova-cell1-novncproxy-novncproxy" probeResult="failure" output="Get \"https://10.217.1.68:6080/vnc_lite.html\": dial tcp 10.217.1.68:6080: connect: connection refused" Jan 20 17:45:35 crc kubenswrapper[4558]: I0120 17:45:35.609865 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:45:35 crc kubenswrapper[4558]: I0120 17:45:35.684342 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-proxy-6645dd7478-nfmw9" Jan 20 17:45:35 crc kubenswrapper[4558]: I0120 17:45:35.695453 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7thqn\" (UniqueName: \"kubernetes.io/projected/321b2cf0-a7dd-4339-ab97-6327272614b2-kube-api-access-7thqn\") pod \"321b2cf0-a7dd-4339-ab97-6327272614b2\" (UID: \"321b2cf0-a7dd-4339-ab97-6327272614b2\") " Jan 20 17:45:35 crc kubenswrapper[4558]: I0120 17:45:35.695578 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/321b2cf0-a7dd-4339-ab97-6327272614b2-combined-ca-bundle\") pod \"321b2cf0-a7dd-4339-ab97-6327272614b2\" (UID: \"321b2cf0-a7dd-4339-ab97-6327272614b2\") " Jan 20 17:45:35 crc kubenswrapper[4558]: I0120 17:45:35.695653 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/321b2cf0-a7dd-4339-ab97-6327272614b2-etc-swift\") pod \"321b2cf0-a7dd-4339-ab97-6327272614b2\" (UID: \"321b2cf0-a7dd-4339-ab97-6327272614b2\") " Jan 20 17:45:35 crc kubenswrapper[4558]: I0120 17:45:35.695678 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4affc06-2032-4a14-8422-3c3ca984eada-combined-ca-bundle\") pod \"f4affc06-2032-4a14-8422-3c3ca984eada\" (UID: \"f4affc06-2032-4a14-8422-3c3ca984eada\") " Jan 20 17:45:35 crc kubenswrapper[4558]: I0120 17:45:35.696423 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8xqht\" (UniqueName: \"kubernetes.io/projected/f4affc06-2032-4a14-8422-3c3ca984eada-kube-api-access-8xqht\") pod \"f4affc06-2032-4a14-8422-3c3ca984eada\" (UID: \"f4affc06-2032-4a14-8422-3c3ca984eada\") " Jan 20 17:45:35 crc kubenswrapper[4558]: I0120 17:45:35.696465 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/321b2cf0-a7dd-4339-ab97-6327272614b2-internal-tls-certs\") pod \"321b2cf0-a7dd-4339-ab97-6327272614b2\" (UID: \"321b2cf0-a7dd-4339-ab97-6327272614b2\") " Jan 20 17:45:35 crc kubenswrapper[4558]: I0120 17:45:35.696564 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/321b2cf0-a7dd-4339-ab97-6327272614b2-public-tls-certs\") pod \"321b2cf0-a7dd-4339-ab97-6327272614b2\" (UID: \"321b2cf0-a7dd-4339-ab97-6327272614b2\") " Jan 20 17:45:35 crc kubenswrapper[4558]: I0120 17:45:35.696625 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/321b2cf0-a7dd-4339-ab97-6327272614b2-config-data\") pod \"321b2cf0-a7dd-4339-ab97-6327272614b2\" (UID: \"321b2cf0-a7dd-4339-ab97-6327272614b2\") " Jan 20 17:45:35 crc kubenswrapper[4558]: I0120 17:45:35.696723 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/f4affc06-2032-4a14-8422-3c3ca984eada-kube-state-metrics-tls-certs\") pod \"f4affc06-2032-4a14-8422-3c3ca984eada\" (UID: \"f4affc06-2032-4a14-8422-3c3ca984eada\") " Jan 20 17:45:35 crc kubenswrapper[4558]: I0120 17:45:35.696796 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/321b2cf0-a7dd-4339-ab97-6327272614b2-run-httpd\") pod \"321b2cf0-a7dd-4339-ab97-6327272614b2\" (UID: \"321b2cf0-a7dd-4339-ab97-6327272614b2\") " Jan 20 17:45:35 crc kubenswrapper[4558]: I0120 17:45:35.696886 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/321b2cf0-a7dd-4339-ab97-6327272614b2-log-httpd\") pod \"321b2cf0-a7dd-4339-ab97-6327272614b2\" (UID: \"321b2cf0-a7dd-4339-ab97-6327272614b2\") " Jan 20 17:45:35 crc kubenswrapper[4558]: I0120 17:45:35.698198 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/321b2cf0-a7dd-4339-ab97-6327272614b2-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "321b2cf0-a7dd-4339-ab97-6327272614b2" (UID: "321b2cf0-a7dd-4339-ab97-6327272614b2"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:45:35 crc kubenswrapper[4558]: I0120 17:45:35.698457 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/321b2cf0-a7dd-4339-ab97-6327272614b2-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "321b2cf0-a7dd-4339-ab97-6327272614b2" (UID: "321b2cf0-a7dd-4339-ab97-6327272614b2"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:45:35 crc kubenswrapper[4558]: I0120 17:45:35.709636 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/321b2cf0-a7dd-4339-ab97-6327272614b2-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "321b2cf0-a7dd-4339-ab97-6327272614b2" (UID: "321b2cf0-a7dd-4339-ab97-6327272614b2"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:45:35 crc kubenswrapper[4558]: I0120 17:45:35.712451 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/321b2cf0-a7dd-4339-ab97-6327272614b2-kube-api-access-7thqn" (OuterVolumeSpecName: "kube-api-access-7thqn") pod "321b2cf0-a7dd-4339-ab97-6327272614b2" (UID: "321b2cf0-a7dd-4339-ab97-6327272614b2"). InnerVolumeSpecName "kube-api-access-7thqn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:45:35 crc kubenswrapper[4558]: I0120 17:45:35.717336 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f4affc06-2032-4a14-8422-3c3ca984eada-kube-api-access-8xqht" (OuterVolumeSpecName: "kube-api-access-8xqht") pod "f4affc06-2032-4a14-8422-3c3ca984eada" (UID: "f4affc06-2032-4a14-8422-3c3ca984eada"). InnerVolumeSpecName "kube-api-access-8xqht". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:45:35 crc kubenswrapper[4558]: I0120 17:45:35.741575 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f4affc06-2032-4a14-8422-3c3ca984eada-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f4affc06-2032-4a14-8422-3c3ca984eada" (UID: "f4affc06-2032-4a14-8422-3c3ca984eada"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:45:35 crc kubenswrapper[4558]: I0120 17:45:35.795451 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/321b2cf0-a7dd-4339-ab97-6327272614b2-config-data" (OuterVolumeSpecName: "config-data") pod "321b2cf0-a7dd-4339-ab97-6327272614b2" (UID: "321b2cf0-a7dd-4339-ab97-6327272614b2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:45:35 crc kubenswrapper[4558]: I0120 17:45:35.796675 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/321b2cf0-a7dd-4339-ab97-6327272614b2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "321b2cf0-a7dd-4339-ab97-6327272614b2" (UID: "321b2cf0-a7dd-4339-ab97-6327272614b2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:45:35 crc kubenswrapper[4558]: I0120 17:45:35.799464 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/f4affc06-2032-4a14-8422-3c3ca984eada-kube-state-metrics-tls-config\") pod \"f4affc06-2032-4a14-8422-3c3ca984eada\" (UID: \"f4affc06-2032-4a14-8422-3c3ca984eada\") " Jan 20 17:45:35 crc kubenswrapper[4558]: I0120 17:45:35.800319 4558 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/321b2cf0-a7dd-4339-ab97-6327272614b2-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:35 crc kubenswrapper[4558]: I0120 17:45:35.800342 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7thqn\" (UniqueName: \"kubernetes.io/projected/321b2cf0-a7dd-4339-ab97-6327272614b2-kube-api-access-7thqn\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:35 crc kubenswrapper[4558]: I0120 17:45:35.800352 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/321b2cf0-a7dd-4339-ab97-6327272614b2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:35 crc kubenswrapper[4558]: I0120 17:45:35.800361 4558 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/321b2cf0-a7dd-4339-ab97-6327272614b2-etc-swift\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:35 crc kubenswrapper[4558]: I0120 17:45:35.800370 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f4affc06-2032-4a14-8422-3c3ca984eada-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:35 crc kubenswrapper[4558]: I0120 17:45:35.800378 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8xqht\" (UniqueName: \"kubernetes.io/projected/f4affc06-2032-4a14-8422-3c3ca984eada-kube-api-access-8xqht\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:35 crc kubenswrapper[4558]: I0120 17:45:35.800387 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/321b2cf0-a7dd-4339-ab97-6327272614b2-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:35 crc kubenswrapper[4558]: I0120 17:45:35.800395 4558 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/321b2cf0-a7dd-4339-ab97-6327272614b2-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:35 crc kubenswrapper[4558]: E0120 17:45:35.800460 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Jan 20 17:45:35 crc kubenswrapper[4558]: E0120 17:45:35.800506 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/4765e529-9729-4d27-a252-c0c9a7b67beb-config-data podName:4765e529-9729-4d27-a252-c0c9a7b67beb nodeName:}" failed. No retries permitted until 2026-01-20 17:45:39.800489421 +0000 UTC m=+3833.560827389 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/4765e529-9729-4d27-a252-c0c9a7b67beb-config-data") pod "rabbitmq-server-0" (UID: "4765e529-9729-4d27-a252-c0c9a7b67beb") : configmap "rabbitmq-config-data" not found Jan 20 17:45:35 crc kubenswrapper[4558]: I0120 17:45:35.809626 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/321b2cf0-a7dd-4339-ab97-6327272614b2-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "321b2cf0-a7dd-4339-ab97-6327272614b2" (UID: "321b2cf0-a7dd-4339-ab97-6327272614b2"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:45:35 crc kubenswrapper[4558]: I0120 17:45:35.809804 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:45:35 crc kubenswrapper[4558]: I0120 17:45:35.810649 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f4affc06-2032-4a14-8422-3c3ca984eada-kube-state-metrics-tls-certs" (OuterVolumeSpecName: "kube-state-metrics-tls-certs") pod "f4affc06-2032-4a14-8422-3c3ca984eada" (UID: "f4affc06-2032-4a14-8422-3c3ca984eada"). InnerVolumeSpecName "kube-state-metrics-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:45:35 crc kubenswrapper[4558]: I0120 17:45:35.817011 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/321b2cf0-a7dd-4339-ab97-6327272614b2-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "321b2cf0-a7dd-4339-ab97-6327272614b2" (UID: "321b2cf0-a7dd-4339-ab97-6327272614b2"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:45:35 crc kubenswrapper[4558]: I0120 17:45:35.817570 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:45:35 crc kubenswrapper[4558]: I0120 17:45:35.822276 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f4affc06-2032-4a14-8422-3c3ca984eada-kube-state-metrics-tls-config" (OuterVolumeSpecName: "kube-state-metrics-tls-config") pod "f4affc06-2032-4a14-8422-3c3ca984eada" (UID: "f4affc06-2032-4a14-8422-3c3ca984eada"). InnerVolumeSpecName "kube-state-metrics-tls-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:45:35 crc kubenswrapper[4558]: I0120 17:45:35.901362 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/423502e9-61bd-4da9-953c-48ab84b4f004-config-data\") pod \"423502e9-61bd-4da9-953c-48ab84b4f004\" (UID: \"423502e9-61bd-4da9-953c-48ab84b4f004\") " Jan 20 17:45:35 crc kubenswrapper[4558]: I0120 17:45:35.901739 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5bca4373-5f64-4803-94e7-28deeb5caad3-combined-ca-bundle\") pod \"5bca4373-5f64-4803-94e7-28deeb5caad3\" (UID: \"5bca4373-5f64-4803-94e7-28deeb5caad3\") " Jan 20 17:45:35 crc kubenswrapper[4558]: I0120 17:45:35.901776 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/423502e9-61bd-4da9-953c-48ab84b4f004-vencrypt-tls-certs\") pod \"423502e9-61bd-4da9-953c-48ab84b4f004\" (UID: \"423502e9-61bd-4da9-953c-48ab84b4f004\") " Jan 20 17:45:35 crc kubenswrapper[4558]: I0120 17:45:35.901873 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/5bca4373-5f64-4803-94e7-28deeb5caad3-galera-tls-certs\") pod \"5bca4373-5f64-4803-94e7-28deeb5caad3\" (UID: \"5bca4373-5f64-4803-94e7-28deeb5caad3\") " Jan 20 17:45:35 crc kubenswrapper[4558]: I0120 17:45:35.901931 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g6qhw\" (UniqueName: \"kubernetes.io/projected/5bca4373-5f64-4803-94e7-28deeb5caad3-kube-api-access-g6qhw\") pod \"5bca4373-5f64-4803-94e7-28deeb5caad3\" (UID: \"5bca4373-5f64-4803-94e7-28deeb5caad3\") " Jan 20 17:45:35 crc kubenswrapper[4558]: I0120 17:45:35.901999 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5bca4373-5f64-4803-94e7-28deeb5caad3-operator-scripts\") pod \"5bca4373-5f64-4803-94e7-28deeb5caad3\" (UID: \"5bca4373-5f64-4803-94e7-28deeb5caad3\") " Jan 20 17:45:35 crc kubenswrapper[4558]: I0120 17:45:35.902030 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/5bca4373-5f64-4803-94e7-28deeb5caad3-config-data-default\") pod \"5bca4373-5f64-4803-94e7-28deeb5caad3\" (UID: \"5bca4373-5f64-4803-94e7-28deeb5caad3\") " Jan 20 17:45:35 crc kubenswrapper[4558]: I0120 17:45:35.902074 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/423502e9-61bd-4da9-953c-48ab84b4f004-nova-novncproxy-tls-certs\") pod \"423502e9-61bd-4da9-953c-48ab84b4f004\" (UID: \"423502e9-61bd-4da9-953c-48ab84b4f004\") " Jan 20 17:45:35 crc kubenswrapper[4558]: I0120 17:45:35.902340 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4rpp\" (UniqueName: \"kubernetes.io/projected/423502e9-61bd-4da9-953c-48ab84b4f004-kube-api-access-s4rpp\") pod \"423502e9-61bd-4da9-953c-48ab84b4f004\" (UID: \"423502e9-61bd-4da9-953c-48ab84b4f004\") " Jan 20 17:45:35 crc kubenswrapper[4558]: I0120 17:45:35.902368 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mysql-db\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") pod \"5bca4373-5f64-4803-94e7-28deeb5caad3\" (UID: \"5bca4373-5f64-4803-94e7-28deeb5caad3\") " Jan 20 17:45:35 crc kubenswrapper[4558]: I0120 17:45:35.902386 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/5bca4373-5f64-4803-94e7-28deeb5caad3-kolla-config\") pod \"5bca4373-5f64-4803-94e7-28deeb5caad3\" (UID: \"5bca4373-5f64-4803-94e7-28deeb5caad3\") " Jan 20 17:45:35 crc kubenswrapper[4558]: I0120 17:45:35.902438 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/423502e9-61bd-4da9-953c-48ab84b4f004-combined-ca-bundle\") pod \"423502e9-61bd-4da9-953c-48ab84b4f004\" (UID: \"423502e9-61bd-4da9-953c-48ab84b4f004\") " Jan 20 17:45:35 crc kubenswrapper[4558]: I0120 17:45:35.902475 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/5bca4373-5f64-4803-94e7-28deeb5caad3-config-data-generated\") pod \"5bca4373-5f64-4803-94e7-28deeb5caad3\" (UID: \"5bca4373-5f64-4803-94e7-28deeb5caad3\") " Jan 20 17:45:35 crc kubenswrapper[4558]: I0120 17:45:35.903199 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5bca4373-5f64-4803-94e7-28deeb5caad3-config-data-default" (OuterVolumeSpecName: "config-data-default") pod "5bca4373-5f64-4803-94e7-28deeb5caad3" (UID: "5bca4373-5f64-4803-94e7-28deeb5caad3"). InnerVolumeSpecName "config-data-default". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:45:35 crc kubenswrapper[4558]: I0120 17:45:35.903228 4558 reconciler_common.go:293] "Volume detached for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/f4affc06-2032-4a14-8422-3c3ca984eada-kube-state-metrics-tls-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:35 crc kubenswrapper[4558]: I0120 17:45:35.903244 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/321b2cf0-a7dd-4339-ab97-6327272614b2-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:35 crc kubenswrapper[4558]: I0120 17:45:35.903256 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/321b2cf0-a7dd-4339-ab97-6327272614b2-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:35 crc kubenswrapper[4558]: I0120 17:45:35.903266 4558 reconciler_common.go:293] "Volume detached for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/f4affc06-2032-4a14-8422-3c3ca984eada-kube-state-metrics-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:35 crc kubenswrapper[4558]: I0120 17:45:35.903639 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5bca4373-5f64-4803-94e7-28deeb5caad3-config-data-generated" (OuterVolumeSpecName: "config-data-generated") pod "5bca4373-5f64-4803-94e7-28deeb5caad3" (UID: "5bca4373-5f64-4803-94e7-28deeb5caad3"). InnerVolumeSpecName "config-data-generated". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:45:35 crc kubenswrapper[4558]: I0120 17:45:35.904617 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5bca4373-5f64-4803-94e7-28deeb5caad3-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "5bca4373-5f64-4803-94e7-28deeb5caad3" (UID: "5bca4373-5f64-4803-94e7-28deeb5caad3"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:45:35 crc kubenswrapper[4558]: I0120 17:45:35.906226 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5bca4373-5f64-4803-94e7-28deeb5caad3-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "5bca4373-5f64-4803-94e7-28deeb5caad3" (UID: "5bca4373-5f64-4803-94e7-28deeb5caad3"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:45:35 crc kubenswrapper[4558]: I0120 17:45:35.914885 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5bca4373-5f64-4803-94e7-28deeb5caad3-kube-api-access-g6qhw" (OuterVolumeSpecName: "kube-api-access-g6qhw") pod "5bca4373-5f64-4803-94e7-28deeb5caad3" (UID: "5bca4373-5f64-4803-94e7-28deeb5caad3"). InnerVolumeSpecName "kube-api-access-g6qhw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:45:35 crc kubenswrapper[4558]: I0120 17:45:35.918369 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/423502e9-61bd-4da9-953c-48ab84b4f004-kube-api-access-s4rpp" (OuterVolumeSpecName: "kube-api-access-s4rpp") pod "423502e9-61bd-4da9-953c-48ab84b4f004" (UID: "423502e9-61bd-4da9-953c-48ab84b4f004"). InnerVolumeSpecName "kube-api-access-s4rpp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:45:35 crc kubenswrapper[4558]: I0120 17:45:35.932400 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage15-crc" (OuterVolumeSpecName: "mysql-db") pod "5bca4373-5f64-4803-94e7-28deeb5caad3" (UID: "5bca4373-5f64-4803-94e7-28deeb5caad3"). InnerVolumeSpecName "local-storage15-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:45:35 crc kubenswrapper[4558]: I0120 17:45:35.959859 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/423502e9-61bd-4da9-953c-48ab84b4f004-config-data" (OuterVolumeSpecName: "config-data") pod "423502e9-61bd-4da9-953c-48ab84b4f004" (UID: "423502e9-61bd-4da9-953c-48ab84b4f004"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:45:35 crc kubenswrapper[4558]: I0120 17:45:35.960151 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5bca4373-5f64-4803-94e7-28deeb5caad3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5bca4373-5f64-4803-94e7-28deeb5caad3" (UID: "5bca4373-5f64-4803-94e7-28deeb5caad3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:45:35 crc kubenswrapper[4558]: I0120 17:45:35.967912 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/423502e9-61bd-4da9-953c-48ab84b4f004-nova-novncproxy-tls-certs" (OuterVolumeSpecName: "nova-novncproxy-tls-certs") pod "423502e9-61bd-4da9-953c-48ab84b4f004" (UID: "423502e9-61bd-4da9-953c-48ab84b4f004"). InnerVolumeSpecName "nova-novncproxy-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:45:35 crc kubenswrapper[4558]: I0120 17:45:35.972102 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/423502e9-61bd-4da9-953c-48ab84b4f004-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "423502e9-61bd-4da9-953c-48ab84b4f004" (UID: "423502e9-61bd-4da9-953c-48ab84b4f004"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:45:35 crc kubenswrapper[4558]: I0120 17:45:35.974338 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/423502e9-61bd-4da9-953c-48ab84b4f004-vencrypt-tls-certs" (OuterVolumeSpecName: "vencrypt-tls-certs") pod "423502e9-61bd-4da9-953c-48ab84b4f004" (UID: "423502e9-61bd-4da9-953c-48ab84b4f004"). InnerVolumeSpecName "vencrypt-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:45:35 crc kubenswrapper[4558]: I0120 17:45:35.976616 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5bca4373-5f64-4803-94e7-28deeb5caad3-galera-tls-certs" (OuterVolumeSpecName: "galera-tls-certs") pod "5bca4373-5f64-4803-94e7-28deeb5caad3" (UID: "5bca4373-5f64-4803-94e7-28deeb5caad3"). InnerVolumeSpecName "galera-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:45:35 crc kubenswrapper[4558]: I0120 17:45:35.997497 4558 generic.go:334] "Generic (PLEG): container finished" podID="ff0d0703-8173-4ac0-afc0-e673feaef286" containerID="eebdfbd12d120bd4d4adeb0cbba616d24ce5f4438b3c54b84c67c895df587e6d" exitCode=143 Jan 20 17:45:35 crc kubenswrapper[4558]: I0120 17:45:35.997567 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"ff0d0703-8173-4ac0-afc0-e673feaef286","Type":"ContainerDied","Data":"eebdfbd12d120bd4d4adeb0cbba616d24ce5f4438b3c54b84c67c895df587e6d"} Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.000495 4558 generic.go:334] "Generic (PLEG): container finished" podID="321b2cf0-a7dd-4339-ab97-6327272614b2" containerID="649e44f608af14cfa1fc698291c5750acd272dbe43c21d8bf64cbc026551b71b" exitCode=0 Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.000518 4558 generic.go:334] "Generic (PLEG): container finished" podID="321b2cf0-a7dd-4339-ab97-6327272614b2" containerID="d555fffc777c20179c07649221e67cf87d58b574768c17302a0db151ff0fe7f8" exitCode=0 Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.000562 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-proxy-6645dd7478-nfmw9" event={"ID":"321b2cf0-a7dd-4339-ab97-6327272614b2","Type":"ContainerDied","Data":"649e44f608af14cfa1fc698291c5750acd272dbe43c21d8bf64cbc026551b71b"} Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.000598 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-proxy-6645dd7478-nfmw9" event={"ID":"321b2cf0-a7dd-4339-ab97-6327272614b2","Type":"ContainerDied","Data":"d555fffc777c20179c07649221e67cf87d58b574768c17302a0db151ff0fe7f8"} Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.000612 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-proxy-6645dd7478-nfmw9" event={"ID":"321b2cf0-a7dd-4339-ab97-6327272614b2","Type":"ContainerDied","Data":"d4f96769815610a64430516a58fff20bd40a14044f4b04afdee697d28c6ab063"} Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.000633 4558 scope.go:117] "RemoveContainer" containerID="649e44f608af14cfa1fc698291c5750acd272dbe43c21d8bf64cbc026551b71b" Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.000832 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-proxy-6645dd7478-nfmw9" Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.005741 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/5bca4373-5f64-4803-94e7-28deeb5caad3-config-data-default\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.005762 4558 reconciler_common.go:293] "Volume detached for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/423502e9-61bd-4da9-953c-48ab84b4f004-nova-novncproxy-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.005778 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4rpp\" (UniqueName: \"kubernetes.io/projected/423502e9-61bd-4da9-953c-48ab84b4f004-kube-api-access-s4rpp\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.005810 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage15-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") on node \"crc\" " Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.005822 4558 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/5bca4373-5f64-4803-94e7-28deeb5caad3-kolla-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.005834 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/423502e9-61bd-4da9-953c-48ab84b4f004-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.005844 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/5bca4373-5f64-4803-94e7-28deeb5caad3-config-data-generated\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.005853 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/423502e9-61bd-4da9-953c-48ab84b4f004-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.005864 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5bca4373-5f64-4803-94e7-28deeb5caad3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.005875 4558 reconciler_common.go:293] "Volume detached for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/423502e9-61bd-4da9-953c-48ab84b4f004-vencrypt-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.005884 4558 reconciler_common.go:293] "Volume detached for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/5bca4373-5f64-4803-94e7-28deeb5caad3-galera-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.005895 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g6qhw\" (UniqueName: \"kubernetes.io/projected/5bca4373-5f64-4803-94e7-28deeb5caad3-kube-api-access-g6qhw\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.005905 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5bca4373-5f64-4803-94e7-28deeb5caad3-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.007959 4558 generic.go:334] "Generic (PLEG): container finished" podID="70182fd8-a242-40f5-b20d-8ff4dd33e9b1" containerID="69302d70e78ca0d36f4a72bfbbc9246b0e5fece0f0fdbf172a8875985eda6a7c" exitCode=143 Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.008024 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"70182fd8-a242-40f5-b20d-8ff4dd33e9b1","Type":"ContainerDied","Data":"69302d70e78ca0d36f4a72bfbbc9246b0e5fece0f0fdbf172a8875985eda6a7c"} Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.011983 4558 generic.go:334] "Generic (PLEG): container finished" podID="2c77f6fd-736b-49e4-aa1c-ddffa451f3dc" containerID="245499fc8d9dd24531eb844ac444830c3cbb3a8f2341bb67bd362bf2fe1577af" exitCode=0 Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.012010 4558 generic.go:334] "Generic (PLEG): container finished" podID="2c77f6fd-736b-49e4-aa1c-ddffa451f3dc" containerID="720bb601be9d0a73599645a6bba6fa27fea9bddfb2119833260b2abb7695bc2c" exitCode=2 Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.012020 4558 generic.go:334] "Generic (PLEG): container finished" podID="2c77f6fd-736b-49e4-aa1c-ddffa451f3dc" containerID="592aa64db3a21b522ec99255c04163435c7f2ca3c55140c0e0521af26785d506" exitCode=0 Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.012087 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"2c77f6fd-736b-49e4-aa1c-ddffa451f3dc","Type":"ContainerDied","Data":"245499fc8d9dd24531eb844ac444830c3cbb3a8f2341bb67bd362bf2fe1577af"} Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.012116 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"2c77f6fd-736b-49e4-aa1c-ddffa451f3dc","Type":"ContainerDied","Data":"720bb601be9d0a73599645a6bba6fa27fea9bddfb2119833260b2abb7695bc2c"} Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.012131 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"2c77f6fd-736b-49e4-aa1c-ddffa451f3dc","Type":"ContainerDied","Data":"592aa64db3a21b522ec99255c04163435c7f2ca3c55140c0e0521af26785d506"} Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.026457 4558 generic.go:334] "Generic (PLEG): container finished" podID="1a1aeb0f-ba20-4c24-a8d3-f3f2765328c2" containerID="ddb2774f5a646ced66660bb7d2fc7db683928585fea6955ddbda9cb09d41a20e" exitCode=143 Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.026536 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-worker-7975b7ff57-c5czg" event={"ID":"1a1aeb0f-ba20-4c24-a8d3-f3f2765328c2","Type":"ContainerDied","Data":"ddb2774f5a646ced66660bb7d2fc7db683928585fea6955ddbda9cb09d41a20e"} Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.031452 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage15-crc" (UniqueName: "kubernetes.io/local-volume/local-storage15-crc") on node "crc" Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.034439 4558 scope.go:117] "RemoveContainer" containerID="d555fffc777c20179c07649221e67cf87d58b574768c17302a0db151ff0fe7f8" Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.036433 4558 generic.go:334] "Generic (PLEG): container finished" podID="fd5cfc55-8cd9-4a76-87b8-4050b8f1030f" containerID="3ffb7d59fade4765056a811588a9c3857b85a1ef4b82cfda814dd74069ed2bf9" exitCode=143 Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.036497 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-keystone-listener-57496b565d-kwb5f" event={"ID":"fd5cfc55-8cd9-4a76-87b8-4050b8f1030f","Type":"ContainerDied","Data":"3ffb7d59fade4765056a811588a9c3857b85a1ef4b82cfda814dd74069ed2bf9"} Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.051234 4558 generic.go:334] "Generic (PLEG): container finished" podID="d7f1ecf4-9630-44e7-a627-cc63de361385" containerID="7286e2ef42d2b070bcebf08a04bdbce2188bb19a50dbfcd899a140b4bd8fc836" exitCode=0 Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.051303 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-2lft9" event={"ID":"d7f1ecf4-9630-44e7-a627-cc63de361385","Type":"ContainerDied","Data":"7286e2ef42d2b070bcebf08a04bdbce2188bb19a50dbfcd899a140b4bd8fc836"} Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.053073 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-2lft9" event={"ID":"d7f1ecf4-9630-44e7-a627-cc63de361385","Type":"ContainerStarted","Data":"5fd11ab40b18250ee0480851315eaa81fc8b968068accc8994df3c21d7aa3474"} Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.053095 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/swift-proxy-6645dd7478-nfmw9"] Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.055312 4558 generic.go:334] "Generic (PLEG): container finished" podID="f4affc06-2032-4a14-8422-3c3ca984eada" containerID="3550557600f4dd7782ea6add771b6cef4f6b4aa0a7058c39c4e8a2f7f0f9a980" exitCode=2 Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.055453 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/kube-state-metrics-0" event={"ID":"f4affc06-2032-4a14-8422-3c3ca984eada","Type":"ContainerDied","Data":"3550557600f4dd7782ea6add771b6cef4f6b4aa0a7058c39c4e8a2f7f0f9a980"} Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.055553 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/kube-state-metrics-0" event={"ID":"f4affc06-2032-4a14-8422-3c3ca984eada","Type":"ContainerDied","Data":"639f935172ba8b884326d86075b718cbd7e3189750b38bbae2060f934ee19043"} Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.055678 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.058897 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/swift-proxy-6645dd7478-nfmw9"] Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.059993 4558 generic.go:334] "Generic (PLEG): container finished" podID="7faabe16-9de5-49dc-bab6-44e173f4403c" containerID="3988e808e906ec72c1e2c7130ece7e18110316238273aacbead8355ff8099aa5" exitCode=0 Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.060067 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"7faabe16-9de5-49dc-bab6-44e173f4403c","Type":"ContainerDied","Data":"3988e808e906ec72c1e2c7130ece7e18110316238273aacbead8355ff8099aa5"} Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.061739 4558 generic.go:334] "Generic (PLEG): container finished" podID="d619cd37-a474-4965-b382-749ed6d55d6d" containerID="985673ff538dfdfff39427adadbf62ca67de938e3a41cbb308d4b6035782e705" exitCode=143 Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.061782 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-74bb4f5f4-5ppmg" event={"ID":"d619cd37-a474-4965-b382-749ed6d55d6d","Type":"ContainerDied","Data":"985673ff538dfdfff39427adadbf62ca67de938e3a41cbb308d4b6035782e705"} Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.063003 4558 scope.go:117] "RemoveContainer" containerID="649e44f608af14cfa1fc698291c5750acd272dbe43c21d8bf64cbc026551b71b" Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.063427 4558 generic.go:334] "Generic (PLEG): container finished" podID="423502e9-61bd-4da9-953c-48ab84b4f004" containerID="5bb50dc1e160060e179121c814e65b7d5b022719607f19c151a7253e11a56151" exitCode=0 Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.063467 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"423502e9-61bd-4da9-953c-48ab84b4f004","Type":"ContainerDied","Data":"5bb50dc1e160060e179121c814e65b7d5b022719607f19c151a7253e11a56151"} Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.063484 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"423502e9-61bd-4da9-953c-48ab84b4f004","Type":"ContainerDied","Data":"e265a3027052578336421a314a64da3b23502ab81bfed63b767f27088c202463"} Jan 20 17:45:36 crc kubenswrapper[4558]: E0120 17:45:36.063526 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"649e44f608af14cfa1fc698291c5750acd272dbe43c21d8bf64cbc026551b71b\": container with ID starting with 649e44f608af14cfa1fc698291c5750acd272dbe43c21d8bf64cbc026551b71b not found: ID does not exist" containerID="649e44f608af14cfa1fc698291c5750acd272dbe43c21d8bf64cbc026551b71b" Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.063548 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"649e44f608af14cfa1fc698291c5750acd272dbe43c21d8bf64cbc026551b71b"} err="failed to get container status \"649e44f608af14cfa1fc698291c5750acd272dbe43c21d8bf64cbc026551b71b\": rpc error: code = NotFound desc = could not find container \"649e44f608af14cfa1fc698291c5750acd272dbe43c21d8bf64cbc026551b71b\": container with ID starting with 649e44f608af14cfa1fc698291c5750acd272dbe43c21d8bf64cbc026551b71b not found: ID does not exist" Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.063569 4558 scope.go:117] "RemoveContainer" containerID="d555fffc777c20179c07649221e67cf87d58b574768c17302a0db151ff0fe7f8" Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.063620 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:45:36 crc kubenswrapper[4558]: E0120 17:45:36.064833 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d555fffc777c20179c07649221e67cf87d58b574768c17302a0db151ff0fe7f8\": container with ID starting with d555fffc777c20179c07649221e67cf87d58b574768c17302a0db151ff0fe7f8 not found: ID does not exist" containerID="d555fffc777c20179c07649221e67cf87d58b574768c17302a0db151ff0fe7f8" Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.064869 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d555fffc777c20179c07649221e67cf87d58b574768c17302a0db151ff0fe7f8"} err="failed to get container status \"d555fffc777c20179c07649221e67cf87d58b574768c17302a0db151ff0fe7f8\": rpc error: code = NotFound desc = could not find container \"d555fffc777c20179c07649221e67cf87d58b574768c17302a0db151ff0fe7f8\": container with ID starting with d555fffc777c20179c07649221e67cf87d58b574768c17302a0db151ff0fe7f8 not found: ID does not exist" Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.064897 4558 scope.go:117] "RemoveContainer" containerID="649e44f608af14cfa1fc698291c5750acd272dbe43c21d8bf64cbc026551b71b" Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.065245 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"649e44f608af14cfa1fc698291c5750acd272dbe43c21d8bf64cbc026551b71b"} err="failed to get container status \"649e44f608af14cfa1fc698291c5750acd272dbe43c21d8bf64cbc026551b71b\": rpc error: code = NotFound desc = could not find container \"649e44f608af14cfa1fc698291c5750acd272dbe43c21d8bf64cbc026551b71b\": container with ID starting with 649e44f608af14cfa1fc698291c5750acd272dbe43c21d8bf64cbc026551b71b not found: ID does not exist" Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.065274 4558 scope.go:117] "RemoveContainer" containerID="d555fffc777c20179c07649221e67cf87d58b574768c17302a0db151ff0fe7f8" Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.065808 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d555fffc777c20179c07649221e67cf87d58b574768c17302a0db151ff0fe7f8"} err="failed to get container status \"d555fffc777c20179c07649221e67cf87d58b574768c17302a0db151ff0fe7f8\": rpc error: code = NotFound desc = could not find container \"d555fffc777c20179c07649221e67cf87d58b574768c17302a0db151ff0fe7f8\": container with ID starting with d555fffc777c20179c07649221e67cf87d58b574768c17302a0db151ff0fe7f8 not found: ID does not exist" Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.065837 4558 scope.go:117] "RemoveContainer" containerID="3550557600f4dd7782ea6add771b6cef4f6b4aa0a7058c39c4e8a2f7f0f9a980" Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.068109 4558 generic.go:334] "Generic (PLEG): container finished" podID="5bca4373-5f64-4803-94e7-28deeb5caad3" containerID="7905c465186edb142be037f724712184999953459f1d4e54418a375cbfe42e59" exitCode=0 Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.068902 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.069352 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-cell1-galera-0" event={"ID":"5bca4373-5f64-4803-94e7-28deeb5caad3","Type":"ContainerDied","Data":"7905c465186edb142be037f724712184999953459f1d4e54418a375cbfe42e59"} Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.069395 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-cell1-galera-0" event={"ID":"5bca4373-5f64-4803-94e7-28deeb5caad3","Type":"ContainerDied","Data":"425b81dc5be769f40d2f4467240b30bd8fda9e28ac5df99280cec7c1c36eee5b"} Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.073492 4558 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="openstack-kuttl-tests/root-account-create-update-fhpsv" secret="" err="secret \"galera-openstack-cell1-dockercfg-mk4rp\" not found" Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.073684 4558 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="openstack-kuttl-tests/nova-cell1-51af-account-create-update-52f7p" secret="" err="secret \"galera-openstack-cell1-dockercfg-mk4rp\" not found" Jan 20 17:45:36 crc kubenswrapper[4558]: E0120 17:45:36.110604 4558 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 20 17:45:36 crc kubenswrapper[4558]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[/bin/sh -c #!/bin/bash Jan 20 17:45:36 crc kubenswrapper[4558]: Jan 20 17:45:36 crc kubenswrapper[4558]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 20 17:45:36 crc kubenswrapper[4558]: Jan 20 17:45:36 crc kubenswrapper[4558]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 20 17:45:36 crc kubenswrapper[4558]: Jan 20 17:45:36 crc kubenswrapper[4558]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 20 17:45:36 crc kubenswrapper[4558]: Jan 20 17:45:36 crc kubenswrapper[4558]: if [ -n "nova_cell1" ]; then Jan 20 17:45:36 crc kubenswrapper[4558]: GRANT_DATABASE="nova_cell1" Jan 20 17:45:36 crc kubenswrapper[4558]: else Jan 20 17:45:36 crc kubenswrapper[4558]: GRANT_DATABASE="*" Jan 20 17:45:36 crc kubenswrapper[4558]: fi Jan 20 17:45:36 crc kubenswrapper[4558]: Jan 20 17:45:36 crc kubenswrapper[4558]: # going for maximum compatibility here: Jan 20 17:45:36 crc kubenswrapper[4558]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 20 17:45:36 crc kubenswrapper[4558]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 20 17:45:36 crc kubenswrapper[4558]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 20 17:45:36 crc kubenswrapper[4558]: # support updates Jan 20 17:45:36 crc kubenswrapper[4558]: Jan 20 17:45:36 crc kubenswrapper[4558]: $MYSQL_CMD < logger="UnhandledError" Jan 20 17:45:36 crc kubenswrapper[4558]: E0120 17:45:36.110868 4558 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 20 17:45:36 crc kubenswrapper[4558]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[/bin/sh -c #!/bin/bash Jan 20 17:45:36 crc kubenswrapper[4558]: Jan 20 17:45:36 crc kubenswrapper[4558]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 20 17:45:36 crc kubenswrapper[4558]: Jan 20 17:45:36 crc kubenswrapper[4558]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 20 17:45:36 crc kubenswrapper[4558]: Jan 20 17:45:36 crc kubenswrapper[4558]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 20 17:45:36 crc kubenswrapper[4558]: Jan 20 17:45:36 crc kubenswrapper[4558]: if [ -n "" ]; then Jan 20 17:45:36 crc kubenswrapper[4558]: GRANT_DATABASE="" Jan 20 17:45:36 crc kubenswrapper[4558]: else Jan 20 17:45:36 crc kubenswrapper[4558]: GRANT_DATABASE="*" Jan 20 17:45:36 crc kubenswrapper[4558]: fi Jan 20 17:45:36 crc kubenswrapper[4558]: Jan 20 17:45:36 crc kubenswrapper[4558]: # going for maximum compatibility here: Jan 20 17:45:36 crc kubenswrapper[4558]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 20 17:45:36 crc kubenswrapper[4558]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 20 17:45:36 crc kubenswrapper[4558]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 20 17:45:36 crc kubenswrapper[4558]: # support updates Jan 20 17:45:36 crc kubenswrapper[4558]: Jan 20 17:45:36 crc kubenswrapper[4558]: $MYSQL_CMD < logger="UnhandledError" Jan 20 17:45:36 crc kubenswrapper[4558]: E0120 17:45:36.116359 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"openstack-cell1-mariadb-root-db-secret\\\" not found\"" pod="openstack-kuttl-tests/root-account-create-update-fhpsv" podUID="03ca5cbb-ca72-46f1-a44f-42ac0fb7cf17" Jan 20 17:45:36 crc kubenswrapper[4558]: E0120 17:45:36.116423 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"nova-cell1-db-secret\\\" not found\"" pod="openstack-kuttl-tests/nova-cell1-51af-account-create-update-52f7p" podUID="89fd10f3-4598-47c6-ad0e-59aa59d41894" Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.118855 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage15-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.141400 4558 scope.go:117] "RemoveContainer" containerID="3550557600f4dd7782ea6add771b6cef4f6b4aa0a7058c39c4e8a2f7f0f9a980" Jan 20 17:45:36 crc kubenswrapper[4558]: E0120 17:45:36.143087 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3550557600f4dd7782ea6add771b6cef4f6b4aa0a7058c39c4e8a2f7f0f9a980\": container with ID starting with 3550557600f4dd7782ea6add771b6cef4f6b4aa0a7058c39c4e8a2f7f0f9a980 not found: ID does not exist" containerID="3550557600f4dd7782ea6add771b6cef4f6b4aa0a7058c39c4e8a2f7f0f9a980" Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.143118 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3550557600f4dd7782ea6add771b6cef4f6b4aa0a7058c39c4e8a2f7f0f9a980"} err="failed to get container status \"3550557600f4dd7782ea6add771b6cef4f6b4aa0a7058c39c4e8a2f7f0f9a980\": rpc error: code = NotFound desc = could not find container \"3550557600f4dd7782ea6add771b6cef4f6b4aa0a7058c39c4e8a2f7f0f9a980\": container with ID starting with 3550557600f4dd7782ea6add771b6cef4f6b4aa0a7058c39c4e8a2f7f0f9a980 not found: ID does not exist" Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.143137 4558 scope.go:117] "RemoveContainer" containerID="5bb50dc1e160060e179121c814e65b7d5b022719607f19c151a7253e11a56151" Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.213355 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.225204 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.262905 4558 scope.go:117] "RemoveContainer" containerID="5bb50dc1e160060e179121c814e65b7d5b022719607f19c151a7253e11a56151" Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.263027 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.271219 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.277335 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/openstack-cell1-galera-0"] Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.277371 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/openstack-cell1-galera-0"] Jan 20 17:45:36 crc kubenswrapper[4558]: E0120 17:45:36.279691 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5bb50dc1e160060e179121c814e65b7d5b022719607f19c151a7253e11a56151\": container with ID starting with 5bb50dc1e160060e179121c814e65b7d5b022719607f19c151a7253e11a56151 not found: ID does not exist" containerID="5bb50dc1e160060e179121c814e65b7d5b022719607f19c151a7253e11a56151" Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.279723 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5bb50dc1e160060e179121c814e65b7d5b022719607f19c151a7253e11a56151"} err="failed to get container status \"5bb50dc1e160060e179121c814e65b7d5b022719607f19c151a7253e11a56151\": rpc error: code = NotFound desc = could not find container \"5bb50dc1e160060e179121c814e65b7d5b022719607f19c151a7253e11a56151\": container with ID starting with 5bb50dc1e160060e179121c814e65b7d5b022719607f19c151a7253e11a56151 not found: ID does not exist" Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.279747 4558 scope.go:117] "RemoveContainer" containerID="7905c465186edb142be037f724712184999953459f1d4e54418a375cbfe42e59" Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.337878 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-6ed6-account-create-update-vb72p"] Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.339227 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/memcached-0"] Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.339398 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/memcached-0" podUID="131735a5-8043-40d7-a15d-f0024356e584" containerName="memcached" containerID="cri-o://63bcb2a3acf1f845b98608c468529b05f04d46a72a3434b91049daf4e38d78b3" gracePeriod=30 Jan 20 17:45:36 crc kubenswrapper[4558]: E0120 17:45:36.450953 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/openstack-cell1-scripts: configmap "openstack-cell1-scripts" not found Jan 20 17:45:36 crc kubenswrapper[4558]: E0120 17:45:36.451296 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/03ca5cbb-ca72-46f1-a44f-42ac0fb7cf17-operator-scripts podName:03ca5cbb-ca72-46f1-a44f-42ac0fb7cf17 nodeName:}" failed. No retries permitted until 2026-01-20 17:45:38.451269063 +0000 UTC m=+3832.211607030 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/03ca5cbb-ca72-46f1-a44f-42ac0fb7cf17-operator-scripts") pod "root-account-create-update-fhpsv" (UID: "03ca5cbb-ca72-46f1-a44f-42ac0fb7cf17") : configmap "openstack-cell1-scripts" not found Jan 20 17:45:36 crc kubenswrapper[4558]: E0120 17:45:36.456692 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/openstack-cell1-scripts: configmap "openstack-cell1-scripts" not found Jan 20 17:45:36 crc kubenswrapper[4558]: E0120 17:45:36.456794 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/89fd10f3-4598-47c6-ad0e-59aa59d41894-operator-scripts podName:89fd10f3-4598-47c6-ad0e-59aa59d41894 nodeName:}" failed. No retries permitted until 2026-01-20 17:45:38.456774524 +0000 UTC m=+3832.217112491 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/89fd10f3-4598-47c6-ad0e-59aa59d41894-operator-scripts") pod "nova-cell1-51af-account-create-update-52f7p" (UID: "89fd10f3-4598-47c6-ad0e-59aa59d41894") : configmap "openstack-cell1-scripts" not found Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.501042 4558 scope.go:117] "RemoveContainer" containerID="0b3041e3ccb3e21db251ca0a2a8a3e1254c945a681f499b2db394f2eaa02be66" Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.501790 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/keystone-6ed6-account-create-update-vb72p"] Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.510934 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/keystone-6ed6-account-create-update-c7zxj"] Jan 20 17:45:36 crc kubenswrapper[4558]: E0120 17:45:36.511391 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d6e6a03e-c496-4589-8d51-f6d9e89e14ae" containerName="openstack-network-exporter" Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.511413 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d6e6a03e-c496-4589-8d51-f6d9e89e14ae" containerName="openstack-network-exporter" Jan 20 17:45:36 crc kubenswrapper[4558]: E0120 17:45:36.511432 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="321b2cf0-a7dd-4339-ab97-6327272614b2" containerName="proxy-httpd" Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.511438 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="321b2cf0-a7dd-4339-ab97-6327272614b2" containerName="proxy-httpd" Jan 20 17:45:36 crc kubenswrapper[4558]: E0120 17:45:36.511445 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5bca4373-5f64-4803-94e7-28deeb5caad3" containerName="galera" Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.511451 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="5bca4373-5f64-4803-94e7-28deeb5caad3" containerName="galera" Jan 20 17:45:36 crc kubenswrapper[4558]: E0120 17:45:36.511459 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b2fb8e70-15aa-4b38-a44d-5f818dfc755c" containerName="ovsdbserver-nb" Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.511465 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="b2fb8e70-15aa-4b38-a44d-5f818dfc755c" containerName="ovsdbserver-nb" Jan 20 17:45:36 crc kubenswrapper[4558]: E0120 17:45:36.511479 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="321b2cf0-a7dd-4339-ab97-6327272614b2" containerName="proxy-server" Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.511485 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="321b2cf0-a7dd-4339-ab97-6327272614b2" containerName="proxy-server" Jan 20 17:45:36 crc kubenswrapper[4558]: E0120 17:45:36.511492 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5bca4373-5f64-4803-94e7-28deeb5caad3" containerName="mysql-bootstrap" Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.511498 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="5bca4373-5f64-4803-94e7-28deeb5caad3" containerName="mysql-bootstrap" Jan 20 17:45:36 crc kubenswrapper[4558]: E0120 17:45:36.511511 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="423502e9-61bd-4da9-953c-48ab84b4f004" containerName="nova-cell1-novncproxy-novncproxy" Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.511517 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="423502e9-61bd-4da9-953c-48ab84b4f004" containerName="nova-cell1-novncproxy-novncproxy" Jan 20 17:45:36 crc kubenswrapper[4558]: E0120 17:45:36.511526 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4affc06-2032-4a14-8422-3c3ca984eada" containerName="kube-state-metrics" Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.511533 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4affc06-2032-4a14-8422-3c3ca984eada" containerName="kube-state-metrics" Jan 20 17:45:36 crc kubenswrapper[4558]: E0120 17:45:36.511541 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="924d7506-6027-4d31-b57a-19fc787ba356" containerName="openstack-network-exporter" Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.511547 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="924d7506-6027-4d31-b57a-19fc787ba356" containerName="openstack-network-exporter" Jan 20 17:45:36 crc kubenswrapper[4558]: E0120 17:45:36.511558 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d6e6a03e-c496-4589-8d51-f6d9e89e14ae" containerName="ovn-northd" Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.511563 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d6e6a03e-c496-4589-8d51-f6d9e89e14ae" containerName="ovn-northd" Jan 20 17:45:36 crc kubenswrapper[4558]: E0120 17:45:36.511584 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b2fb8e70-15aa-4b38-a44d-5f818dfc755c" containerName="openstack-network-exporter" Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.511590 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="b2fb8e70-15aa-4b38-a44d-5f818dfc755c" containerName="openstack-network-exporter" Jan 20 17:45:36 crc kubenswrapper[4558]: E0120 17:45:36.511600 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="924d7506-6027-4d31-b57a-19fc787ba356" containerName="ovsdbserver-sb" Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.511605 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="924d7506-6027-4d31-b57a-19fc787ba356" containerName="ovsdbserver-sb" Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.511800 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="d6e6a03e-c496-4589-8d51-f6d9e89e14ae" containerName="ovn-northd" Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.511813 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="321b2cf0-a7dd-4339-ab97-6327272614b2" containerName="proxy-server" Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.511824 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="b2fb8e70-15aa-4b38-a44d-5f818dfc755c" containerName="openstack-network-exporter" Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.511831 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4affc06-2032-4a14-8422-3c3ca984eada" containerName="kube-state-metrics" Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.511841 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="5bca4373-5f64-4803-94e7-28deeb5caad3" containerName="galera" Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.511850 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="d6e6a03e-c496-4589-8d51-f6d9e89e14ae" containerName="openstack-network-exporter" Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.511856 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="423502e9-61bd-4da9-953c-48ab84b4f004" containerName="nova-cell1-novncproxy-novncproxy" Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.511868 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="924d7506-6027-4d31-b57a-19fc787ba356" containerName="openstack-network-exporter" Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.511874 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="924d7506-6027-4d31-b57a-19fc787ba356" containerName="ovsdbserver-sb" Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.511882 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="b2fb8e70-15aa-4b38-a44d-5f818dfc755c" containerName="ovsdbserver-nb" Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.511890 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="321b2cf0-a7dd-4339-ab97-6327272614b2" containerName="proxy-httpd" Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.512556 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-6ed6-account-create-update-c7zxj" Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.516309 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-db-secret" Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.522798 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-6ed6-account-create-update-c7zxj"] Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.532409 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-bootstrap-7ww48"] Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.538455 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-db-sync-pq7q9"] Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.549217 4558 scope.go:117] "RemoveContainer" containerID="7905c465186edb142be037f724712184999953459f1d4e54418a375cbfe42e59" Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.549386 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/keystone-bootstrap-7ww48"] Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.549443 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/keystone-db-sync-pq7q9"] Jan 20 17:45:36 crc kubenswrapper[4558]: E0120 17:45:36.549875 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7905c465186edb142be037f724712184999953459f1d4e54418a375cbfe42e59\": container with ID starting with 7905c465186edb142be037f724712184999953459f1d4e54418a375cbfe42e59 not found: ID does not exist" containerID="7905c465186edb142be037f724712184999953459f1d4e54418a375cbfe42e59" Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.549993 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7905c465186edb142be037f724712184999953459f1d4e54418a375cbfe42e59"} err="failed to get container status \"7905c465186edb142be037f724712184999953459f1d4e54418a375cbfe42e59\": rpc error: code = NotFound desc = could not find container \"7905c465186edb142be037f724712184999953459f1d4e54418a375cbfe42e59\": container with ID starting with 7905c465186edb142be037f724712184999953459f1d4e54418a375cbfe42e59 not found: ID does not exist" Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.550041 4558 scope.go:117] "RemoveContainer" containerID="0b3041e3ccb3e21db251ca0a2a8a3e1254c945a681f499b2db394f2eaa02be66" Jan 20 17:45:36 crc kubenswrapper[4558]: E0120 17:45:36.550635 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0b3041e3ccb3e21db251ca0a2a8a3e1254c945a681f499b2db394f2eaa02be66\": container with ID starting with 0b3041e3ccb3e21db251ca0a2a8a3e1254c945a681f499b2db394f2eaa02be66 not found: ID does not exist" containerID="0b3041e3ccb3e21db251ca0a2a8a3e1254c945a681f499b2db394f2eaa02be66" Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.550679 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0b3041e3ccb3e21db251ca0a2a8a3e1254c945a681f499b2db394f2eaa02be66"} err="failed to get container status \"0b3041e3ccb3e21db251ca0a2a8a3e1254c945a681f499b2db394f2eaa02be66\": rpc error: code = NotFound desc = could not find container \"0b3041e3ccb3e21db251ca0a2a8a3e1254c945a681f499b2db394f2eaa02be66\": container with ID starting with 0b3041e3ccb3e21db251ca0a2a8a3e1254c945a681f499b2db394f2eaa02be66 not found: ID does not exist" Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.552559 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-db-create-hbtdf"] Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.557854 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-f7d8994df-dzxmh"] Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.558247 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/keystone-f7d8994df-dzxmh" podUID="5cbfd26c-728d-420c-9d40-b6f7870cff60" containerName="keystone-api" containerID="cri-o://ffacb5c26d85f9daa0accf5b48ad054e1df3c95a5c4bb991f93025c2fc91b9b0" gracePeriod=30 Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.565190 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/openstack-galera-0"] Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.590603 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="227c3622-b4eb-4862-9b18-a90df7ea75b9" path="/var/lib/kubelet/pods/227c3622-b4eb-4862-9b18-a90df7ea75b9/volumes" Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.591551 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25182339-1f27-4f20-9fee-e132979e83f3" path="/var/lib/kubelet/pods/25182339-1f27-4f20-9fee-e132979e83f3/volumes" Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.592275 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2bfab6e6-f4a2-4948-9715-43c6f13d88fd" path="/var/lib/kubelet/pods/2bfab6e6-f4a2-4948-9715-43c6f13d88fd/volumes" Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.593802 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="321b2cf0-a7dd-4339-ab97-6327272614b2" path="/var/lib/kubelet/pods/321b2cf0-a7dd-4339-ab97-6327272614b2/volumes" Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.595189 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="423502e9-61bd-4da9-953c-48ab84b4f004" path="/var/lib/kubelet/pods/423502e9-61bd-4da9-953c-48ab84b4f004/volumes" Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.596646 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5bca4373-5f64-4803-94e7-28deeb5caad3" path="/var/lib/kubelet/pods/5bca4373-5f64-4803-94e7-28deeb5caad3/volumes" Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.598211 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="924d7506-6027-4d31-b57a-19fc787ba356" path="/var/lib/kubelet/pods/924d7506-6027-4d31-b57a-19fc787ba356/volumes" Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.598807 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9ccbac5d-4369-44e5-9dfc-add1f7987639" path="/var/lib/kubelet/pods/9ccbac5d-4369-44e5-9dfc-add1f7987639/volumes" Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.599372 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b2fb8e70-15aa-4b38-a44d-5f818dfc755c" path="/var/lib/kubelet/pods/b2fb8e70-15aa-4b38-a44d-5f818dfc755c/volumes" Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.600931 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c18579d1-589a-4752-9f16-30480da3c14f" path="/var/lib/kubelet/pods/c18579d1-589a-4752-9f16-30480da3c14f/volumes" Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.601554 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d6e6a03e-c496-4589-8d51-f6d9e89e14ae" path="/var/lib/kubelet/pods/d6e6a03e-c496-4589-8d51-f6d9e89e14ae/volumes" Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.602436 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dc54928b-9e6c-45f3-b991-2c58d2a7592d" path="/var/lib/kubelet/pods/dc54928b-9e6c-45f3-b991-2c58d2a7592d/volumes" Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.602975 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e451d734-7167-4b92-8c0b-22b1e3f4ef67" path="/var/lib/kubelet/pods/e451d734-7167-4b92-8c0b-22b1e3f4ef67/volumes" Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.603507 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4affc06-2032-4a14-8422-3c3ca984eada" path="/var/lib/kubelet/pods/f4affc06-2032-4a14-8422-3c3ca984eada/volumes" Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.603941 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f9b71041-5a81-4d86-9e25-45ecfa08fd58" path="/var/lib/kubelet/pods/f9b71041-5a81-4d86-9e25-45ecfa08fd58/volumes" Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.604837 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/keystone-db-create-hbtdf"] Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.604867 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-6ed6-account-create-update-c7zxj"] Jan 20 17:45:36 crc kubenswrapper[4558]: E0120 17:45:36.621301 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[kube-api-access-2r6wv operator-scripts], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack-kuttl-tests/keystone-6ed6-account-create-update-c7zxj" podUID="8666f449-91a3-489d-bbc5-e7811619ea74" Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.623292 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-12a5-account-create-update-6g9q5" Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.675679 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8666f449-91a3-489d-bbc5-e7811619ea74-operator-scripts\") pod \"keystone-6ed6-account-create-update-c7zxj\" (UID: \"8666f449-91a3-489d-bbc5-e7811619ea74\") " pod="openstack-kuttl-tests/keystone-6ed6-account-create-update-c7zxj" Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.675847 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2r6wv\" (UniqueName: \"kubernetes.io/projected/8666f449-91a3-489d-bbc5-e7811619ea74-kube-api-access-2r6wv\") pod \"keystone-6ed6-account-create-update-c7zxj\" (UID: \"8666f449-91a3-489d-bbc5-e7811619ea74\") " pod="openstack-kuttl-tests/keystone-6ed6-account-create-update-c7zxj" Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.738560 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/openstack-galera-0" podUID="86aff100-d474-48ed-b673-4dae7c0722cf" containerName="galera" containerID="cri-o://f0fb0d6dfc95147b44b7be05faeba0d259d63b173ea99ddb9b134ce40f87cb33" gracePeriod=30 Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.777294 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a8cb168e-9c96-449b-a8d6-bcd8511f1a53-operator-scripts\") pod \"a8cb168e-9c96-449b-a8d6-bcd8511f1a53\" (UID: \"a8cb168e-9c96-449b-a8d6-bcd8511f1a53\") " Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.777575 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qwxtd\" (UniqueName: \"kubernetes.io/projected/a8cb168e-9c96-449b-a8d6-bcd8511f1a53-kube-api-access-qwxtd\") pod \"a8cb168e-9c96-449b-a8d6-bcd8511f1a53\" (UID: \"a8cb168e-9c96-449b-a8d6-bcd8511f1a53\") " Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.777945 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8666f449-91a3-489d-bbc5-e7811619ea74-operator-scripts\") pod \"keystone-6ed6-account-create-update-c7zxj\" (UID: \"8666f449-91a3-489d-bbc5-e7811619ea74\") " pod="openstack-kuttl-tests/keystone-6ed6-account-create-update-c7zxj" Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.778064 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2r6wv\" (UniqueName: \"kubernetes.io/projected/8666f449-91a3-489d-bbc5-e7811619ea74-kube-api-access-2r6wv\") pod \"keystone-6ed6-account-create-update-c7zxj\" (UID: \"8666f449-91a3-489d-bbc5-e7811619ea74\") " pod="openstack-kuttl-tests/keystone-6ed6-account-create-update-c7zxj" Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.779003 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a8cb168e-9c96-449b-a8d6-bcd8511f1a53-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "a8cb168e-9c96-449b-a8d6-bcd8511f1a53" (UID: "a8cb168e-9c96-449b-a8d6-bcd8511f1a53"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:45:36 crc kubenswrapper[4558]: E0120 17:45:36.779559 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/openstack-scripts: configmap "openstack-scripts" not found Jan 20 17:45:36 crc kubenswrapper[4558]: E0120 17:45:36.779581 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Jan 20 17:45:36 crc kubenswrapper[4558]: E0120 17:45:36.779647 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/8666f449-91a3-489d-bbc5-e7811619ea74-operator-scripts podName:8666f449-91a3-489d-bbc5-e7811619ea74 nodeName:}" failed. No retries permitted until 2026-01-20 17:45:37.279626033 +0000 UTC m=+3831.039964000 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/8666f449-91a3-489d-bbc5-e7811619ea74-operator-scripts") pod "keystone-6ed6-account-create-update-c7zxj" (UID: "8666f449-91a3-489d-bbc5-e7811619ea74") : configmap "openstack-scripts" not found Jan 20 17:45:36 crc kubenswrapper[4558]: E0120 17:45:36.779720 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/de4d8126-91cf-4149-bec4-4accaf558308-config-data podName:de4d8126-91cf-4149-bec4-4accaf558308 nodeName:}" failed. No retries permitted until 2026-01-20 17:45:40.779700733 +0000 UTC m=+3834.540038700 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/de4d8126-91cf-4149-bec4-4accaf558308-config-data") pod "rabbitmq-cell1-server-0" (UID: "de4d8126-91cf-4149-bec4-4accaf558308") : configmap "rabbitmq-cell1-config-data" not found Jan 20 17:45:36 crc kubenswrapper[4558]: E0120 17:45:36.784564 4558 projected.go:194] Error preparing data for projected volume kube-api-access-2r6wv for pod openstack-kuttl-tests/keystone-6ed6-account-create-update-c7zxj: failed to fetch token: serviceaccounts "galera-openstack" not found Jan 20 17:45:36 crc kubenswrapper[4558]: E0120 17:45:36.784905 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/8666f449-91a3-489d-bbc5-e7811619ea74-kube-api-access-2r6wv podName:8666f449-91a3-489d-bbc5-e7811619ea74 nodeName:}" failed. No retries permitted until 2026-01-20 17:45:37.284886874 +0000 UTC m=+3831.045224841 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-2r6wv" (UniqueName: "kubernetes.io/projected/8666f449-91a3-489d-bbc5-e7811619ea74-kube-api-access-2r6wv") pod "keystone-6ed6-account-create-update-c7zxj" (UID: "8666f449-91a3-489d-bbc5-e7811619ea74") : failed to fetch token: serviceaccounts "galera-openstack" not found Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.788215 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a8cb168e-9c96-449b-a8d6-bcd8511f1a53-kube-api-access-qwxtd" (OuterVolumeSpecName: "kube-api-access-qwxtd") pod "a8cb168e-9c96-449b-a8d6-bcd8511f1a53" (UID: "a8cb168e-9c96-449b-a8d6-bcd8511f1a53"). InnerVolumeSpecName "kube-api-access-qwxtd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.844136 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-bd94-account-create-update-stnv4" Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.856712 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-8c6b-account-create-update-qcnqx" Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.863446 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-3b8d-account-create-update-75ws5" Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.875390 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-keystone-listener-57496b565d-kwb5f" Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.882596 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a8cb168e-9c96-449b-a8d6-bcd8511f1a53-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.882648 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qwxtd\" (UniqueName: \"kubernetes.io/projected/a8cb168e-9c96-449b-a8d6-bcd8511f1a53-kube-api-access-qwxtd\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.983787 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d7ca0a7f-fd93-48de-b71b-b23d1aef2af7-operator-scripts\") pod \"d7ca0a7f-fd93-48de-b71b-b23d1aef2af7\" (UID: \"d7ca0a7f-fd93-48de-b71b-b23d1aef2af7\") " Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.983829 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r4mrk\" (UniqueName: \"kubernetes.io/projected/d7ca0a7f-fd93-48de-b71b-b23d1aef2af7-kube-api-access-r4mrk\") pod \"d7ca0a7f-fd93-48de-b71b-b23d1aef2af7\" (UID: \"d7ca0a7f-fd93-48de-b71b-b23d1aef2af7\") " Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.983913 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fd5cfc55-8cd9-4a76-87b8-4050b8f1030f-logs\") pod \"fd5cfc55-8cd9-4a76-87b8-4050b8f1030f\" (UID: \"fd5cfc55-8cd9-4a76-87b8-4050b8f1030f\") " Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.983958 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4tw6s\" (UniqueName: \"kubernetes.io/projected/44f856be-bb11-468b-a157-1f2e2851d04d-kube-api-access-4tw6s\") pod \"44f856be-bb11-468b-a157-1f2e2851d04d\" (UID: \"44f856be-bb11-468b-a157-1f2e2851d04d\") " Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.983995 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ssgsh\" (UniqueName: \"kubernetes.io/projected/fd5cfc55-8cd9-4a76-87b8-4050b8f1030f-kube-api-access-ssgsh\") pod \"fd5cfc55-8cd9-4a76-87b8-4050b8f1030f\" (UID: \"fd5cfc55-8cd9-4a76-87b8-4050b8f1030f\") " Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.984013 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fd5cfc55-8cd9-4a76-87b8-4050b8f1030f-config-data\") pod \"fd5cfc55-8cd9-4a76-87b8-4050b8f1030f\" (UID: \"fd5cfc55-8cd9-4a76-87b8-4050b8f1030f\") " Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.984054 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd5cfc55-8cd9-4a76-87b8-4050b8f1030f-combined-ca-bundle\") pod \"fd5cfc55-8cd9-4a76-87b8-4050b8f1030f\" (UID: \"fd5cfc55-8cd9-4a76-87b8-4050b8f1030f\") " Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.984076 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rh7sw\" (UniqueName: \"kubernetes.io/projected/16a1e55d-37ca-4488-adc3-2602e7416d55-kube-api-access-rh7sw\") pod \"16a1e55d-37ca-4488-adc3-2602e7416d55\" (UID: \"16a1e55d-37ca-4488-adc3-2602e7416d55\") " Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.984139 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/16a1e55d-37ca-4488-adc3-2602e7416d55-operator-scripts\") pod \"16a1e55d-37ca-4488-adc3-2602e7416d55\" (UID: \"16a1e55d-37ca-4488-adc3-2602e7416d55\") " Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.984197 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/fd5cfc55-8cd9-4a76-87b8-4050b8f1030f-config-data-custom\") pod \"fd5cfc55-8cd9-4a76-87b8-4050b8f1030f\" (UID: \"fd5cfc55-8cd9-4a76-87b8-4050b8f1030f\") " Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.984242 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/44f856be-bb11-468b-a157-1f2e2851d04d-operator-scripts\") pod \"44f856be-bb11-468b-a157-1f2e2851d04d\" (UID: \"44f856be-bb11-468b-a157-1f2e2851d04d\") " Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.984270 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d7ca0a7f-fd93-48de-b71b-b23d1aef2af7-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "d7ca0a7f-fd93-48de-b71b-b23d1aef2af7" (UID: "d7ca0a7f-fd93-48de-b71b-b23d1aef2af7"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.984892 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d7ca0a7f-fd93-48de-b71b-b23d1aef2af7-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.986784 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/16a1e55d-37ca-4488-adc3-2602e7416d55-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "16a1e55d-37ca-4488-adc3-2602e7416d55" (UID: "16a1e55d-37ca-4488-adc3-2602e7416d55"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.986783 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fd5cfc55-8cd9-4a76-87b8-4050b8f1030f-logs" (OuterVolumeSpecName: "logs") pod "fd5cfc55-8cd9-4a76-87b8-4050b8f1030f" (UID: "fd5cfc55-8cd9-4a76-87b8-4050b8f1030f"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.986818 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/44f856be-bb11-468b-a157-1f2e2851d04d-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "44f856be-bb11-468b-a157-1f2e2851d04d" (UID: "44f856be-bb11-468b-a157-1f2e2851d04d"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.988374 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d7ca0a7f-fd93-48de-b71b-b23d1aef2af7-kube-api-access-r4mrk" (OuterVolumeSpecName: "kube-api-access-r4mrk") pod "d7ca0a7f-fd93-48de-b71b-b23d1aef2af7" (UID: "d7ca0a7f-fd93-48de-b71b-b23d1aef2af7"). InnerVolumeSpecName "kube-api-access-r4mrk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.989338 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fd5cfc55-8cd9-4a76-87b8-4050b8f1030f-kube-api-access-ssgsh" (OuterVolumeSpecName: "kube-api-access-ssgsh") pod "fd5cfc55-8cd9-4a76-87b8-4050b8f1030f" (UID: "fd5cfc55-8cd9-4a76-87b8-4050b8f1030f"). InnerVolumeSpecName "kube-api-access-ssgsh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.990311 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44f856be-bb11-468b-a157-1f2e2851d04d-kube-api-access-4tw6s" (OuterVolumeSpecName: "kube-api-access-4tw6s") pod "44f856be-bb11-468b-a157-1f2e2851d04d" (UID: "44f856be-bb11-468b-a157-1f2e2851d04d"). InnerVolumeSpecName "kube-api-access-4tw6s". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.990489 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fd5cfc55-8cd9-4a76-87b8-4050b8f1030f-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "fd5cfc55-8cd9-4a76-87b8-4050b8f1030f" (UID: "fd5cfc55-8cd9-4a76-87b8-4050b8f1030f"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:45:36 crc kubenswrapper[4558]: I0120 17:45:36.991253 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/16a1e55d-37ca-4488-adc3-2602e7416d55-kube-api-access-rh7sw" (OuterVolumeSpecName: "kube-api-access-rh7sw") pod "16a1e55d-37ca-4488-adc3-2602e7416d55" (UID: "16a1e55d-37ca-4488-adc3-2602e7416d55"). InnerVolumeSpecName "kube-api-access-rh7sw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.011994 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fd5cfc55-8cd9-4a76-87b8-4050b8f1030f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fd5cfc55-8cd9-4a76-87b8-4050b8f1030f" (UID: "fd5cfc55-8cd9-4a76-87b8-4050b8f1030f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.070042 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fd5cfc55-8cd9-4a76-87b8-4050b8f1030f-config-data" (OuterVolumeSpecName: "config-data") pod "fd5cfc55-8cd9-4a76-87b8-4050b8f1030f" (UID: "fd5cfc55-8cd9-4a76-87b8-4050b8f1030f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.079840 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-8c6b-account-create-update-qcnqx" event={"ID":"16a1e55d-37ca-4488-adc3-2602e7416d55","Type":"ContainerDied","Data":"388c5fc5f4d043d14eeaeb654f79dfd70495e80d866450bff22b5151712f8716"} Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.079868 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-8c6b-account-create-update-qcnqx" Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.082895 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-3b8d-account-create-update-75ws5" event={"ID":"d7ca0a7f-fd93-48de-b71b-b23d1aef2af7","Type":"ContainerDied","Data":"f6b00365b958d533781cfbfb7fe87400a2e32d0f216458594a27ba81cf37d8d2"} Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.083001 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-3b8d-account-create-update-75ws5" Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.089819 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r4mrk\" (UniqueName: \"kubernetes.io/projected/d7ca0a7f-fd93-48de-b71b-b23d1aef2af7-kube-api-access-r4mrk\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.089850 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fd5cfc55-8cd9-4a76-87b8-4050b8f1030f-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.089862 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4tw6s\" (UniqueName: \"kubernetes.io/projected/44f856be-bb11-468b-a157-1f2e2851d04d-kube-api-access-4tw6s\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.089873 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ssgsh\" (UniqueName: \"kubernetes.io/projected/fd5cfc55-8cd9-4a76-87b8-4050b8f1030f-kube-api-access-ssgsh\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.089883 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fd5cfc55-8cd9-4a76-87b8-4050b8f1030f-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.089894 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd5cfc55-8cd9-4a76-87b8-4050b8f1030f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.089902 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rh7sw\" (UniqueName: \"kubernetes.io/projected/16a1e55d-37ca-4488-adc3-2602e7416d55-kube-api-access-rh7sw\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.089911 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/16a1e55d-37ca-4488-adc3-2602e7416d55-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.089930 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/fd5cfc55-8cd9-4a76-87b8-4050b8f1030f-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.089939 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/44f856be-bb11-468b-a157-1f2e2851d04d-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.090971 4558 generic.go:334] "Generic (PLEG): container finished" podID="131735a5-8043-40d7-a15d-f0024356e584" containerID="63bcb2a3acf1f845b98608c468529b05f04d46a72a3434b91049daf4e38d78b3" exitCode=0 Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.091007 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/memcached-0" event={"ID":"131735a5-8043-40d7-a15d-f0024356e584","Type":"ContainerDied","Data":"63bcb2a3acf1f845b98608c468529b05f04d46a72a3434b91049daf4e38d78b3"} Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.092430 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-12a5-account-create-update-6g9q5" event={"ID":"a8cb168e-9c96-449b-a8d6-bcd8511f1a53","Type":"ContainerDied","Data":"c2ec65a49b0bd294556601dbcfb95af2053f3b1750bc11886392d0e084111241"} Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.092484 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-12a5-account-create-update-6g9q5" Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.094661 4558 generic.go:334] "Generic (PLEG): container finished" podID="fd5cfc55-8cd9-4a76-87b8-4050b8f1030f" containerID="08599f71319860b69b6674beac66044f825719a01bdca428dd565ea148a25bb0" exitCode=0 Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.094717 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-keystone-listener-57496b565d-kwb5f" Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.094747 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-keystone-listener-57496b565d-kwb5f" event={"ID":"fd5cfc55-8cd9-4a76-87b8-4050b8f1030f","Type":"ContainerDied","Data":"08599f71319860b69b6674beac66044f825719a01bdca428dd565ea148a25bb0"} Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.094777 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-keystone-listener-57496b565d-kwb5f" event={"ID":"fd5cfc55-8cd9-4a76-87b8-4050b8f1030f","Type":"ContainerDied","Data":"9311298f7cfe04f1831aea792862986c0c1088cce20c3e8d99e421655af78143"} Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.094796 4558 scope.go:117] "RemoveContainer" containerID="08599f71319860b69b6674beac66044f825719a01bdca428dd565ea148a25bb0" Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.098195 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-2lft9" event={"ID":"d7f1ecf4-9630-44e7-a627-cc63de361385","Type":"ContainerStarted","Data":"e9587547b29543008af942c8543b23bbceae4368b19e52eef1b4d97570b7dd31"} Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.098345 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-2lft9" Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.102540 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-6ed6-account-create-update-c7zxj" Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.103222 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-bd94-account-create-update-stnv4" event={"ID":"44f856be-bb11-468b-a157-1f2e2851d04d","Type":"ContainerDied","Data":"69fbfb4b1319c3318255f6bddfd202a219fbe12d20ef86fff4bbc0a61137df21"} Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.103232 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-bd94-account-create-update-stnv4" Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.124019 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-6ed6-account-create-update-c7zxj" Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.133766 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-2lft9" podStartSLOduration=4.133738128 podStartE2EDuration="4.133738128s" podCreationTimestamp="2026-01-20 17:45:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:45:37.118609994 +0000 UTC m=+3830.878947961" watchObservedRunningTime="2026-01-20 17:45:37.133738128 +0000 UTC m=+3830.894076095" Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.134971 4558 scope.go:117] "RemoveContainer" containerID="3ffb7d59fade4765056a811588a9c3857b85a1ef4b82cfda814dd74069ed2bf9" Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.177892 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-8c6b-account-create-update-qcnqx"] Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.195229 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/cinder-api-0" podUID="b752af55-9c06-469f-9353-e1042300de3c" containerName="cinder-api" probeResult="failure" output="Get \"https://10.217.1.60:8776/healthcheck\": read tcp 10.217.0.2:53308->10.217.1.60:8776: read: connection reset by peer" Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.198051 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/glance-8c6b-account-create-update-qcnqx"] Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.203186 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-keystone-listener-57496b565d-kwb5f"] Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.207435 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/barbican-keystone-listener-57496b565d-kwb5f"] Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.229593 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-3b8d-account-create-update-75ws5"] Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.241360 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-api-3b8d-account-create-update-75ws5"] Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.281093 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell0-bd94-account-create-update-stnv4"] Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.284976 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/memcached-0" Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.289563 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell0-bd94-account-create-update-stnv4"] Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.294917 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p65cv\" (UniqueName: \"kubernetes.io/projected/131735a5-8043-40d7-a15d-f0024356e584-kube-api-access-p65cv\") pod \"131735a5-8043-40d7-a15d-f0024356e584\" (UID: \"131735a5-8043-40d7-a15d-f0024356e584\") " Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.295133 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/131735a5-8043-40d7-a15d-f0024356e584-memcached-tls-certs\") pod \"131735a5-8043-40d7-a15d-f0024356e584\" (UID: \"131735a5-8043-40d7-a15d-f0024356e584\") " Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.295530 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/131735a5-8043-40d7-a15d-f0024356e584-combined-ca-bundle\") pod \"131735a5-8043-40d7-a15d-f0024356e584\" (UID: \"131735a5-8043-40d7-a15d-f0024356e584\") " Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.295774 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/131735a5-8043-40d7-a15d-f0024356e584-kolla-config\") pod \"131735a5-8043-40d7-a15d-f0024356e584\" (UID: \"131735a5-8043-40d7-a15d-f0024356e584\") " Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.295799 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/131735a5-8043-40d7-a15d-f0024356e584-config-data\") pod \"131735a5-8043-40d7-a15d-f0024356e584\" (UID: \"131735a5-8043-40d7-a15d-f0024356e584\") " Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.296570 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/131735a5-8043-40d7-a15d-f0024356e584-config-data" (OuterVolumeSpecName: "config-data") pod "131735a5-8043-40d7-a15d-f0024356e584" (UID: "131735a5-8043-40d7-a15d-f0024356e584"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.297059 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/131735a5-8043-40d7-a15d-f0024356e584-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "131735a5-8043-40d7-a15d-f0024356e584" (UID: "131735a5-8043-40d7-a15d-f0024356e584"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.297646 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8666f449-91a3-489d-bbc5-e7811619ea74-operator-scripts\") pod \"keystone-6ed6-account-create-update-c7zxj\" (UID: \"8666f449-91a3-489d-bbc5-e7811619ea74\") " pod="openstack-kuttl-tests/keystone-6ed6-account-create-update-c7zxj" Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.297893 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2r6wv\" (UniqueName: \"kubernetes.io/projected/8666f449-91a3-489d-bbc5-e7811619ea74-kube-api-access-2r6wv\") pod \"keystone-6ed6-account-create-update-c7zxj\" (UID: \"8666f449-91a3-489d-bbc5-e7811619ea74\") " pod="openstack-kuttl-tests/keystone-6ed6-account-create-update-c7zxj" Jan 20 17:45:37 crc kubenswrapper[4558]: E0120 17:45:37.298870 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/openstack-scripts: configmap "openstack-scripts" not found Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.298890 4558 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/131735a5-8043-40d7-a15d-f0024356e584-kolla-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.298915 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/131735a5-8043-40d7-a15d-f0024356e584-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:37 crc kubenswrapper[4558]: E0120 17:45:37.298944 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/8666f449-91a3-489d-bbc5-e7811619ea74-operator-scripts podName:8666f449-91a3-489d-bbc5-e7811619ea74 nodeName:}" failed. No retries permitted until 2026-01-20 17:45:38.298926892 +0000 UTC m=+3832.059264860 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/8666f449-91a3-489d-bbc5-e7811619ea74-operator-scripts") pod "keystone-6ed6-account-create-update-c7zxj" (UID: "8666f449-91a3-489d-bbc5-e7811619ea74") : configmap "openstack-scripts" not found Jan 20 17:45:37 crc kubenswrapper[4558]: E0120 17:45:37.302027 4558 projected.go:194] Error preparing data for projected volume kube-api-access-2r6wv for pod openstack-kuttl-tests/keystone-6ed6-account-create-update-c7zxj: failed to fetch token: serviceaccounts "galera-openstack" not found Jan 20 17:45:37 crc kubenswrapper[4558]: E0120 17:45:37.302123 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/8666f449-91a3-489d-bbc5-e7811619ea74-kube-api-access-2r6wv podName:8666f449-91a3-489d-bbc5-e7811619ea74 nodeName:}" failed. No retries permitted until 2026-01-20 17:45:38.30209566 +0000 UTC m=+3832.062433627 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-2r6wv" (UniqueName: "kubernetes.io/projected/8666f449-91a3-489d-bbc5-e7811619ea74-kube-api-access-2r6wv") pod "keystone-6ed6-account-create-update-c7zxj" (UID: "8666f449-91a3-489d-bbc5-e7811619ea74") : failed to fetch token: serviceaccounts "galera-openstack" not found Jan 20 17:45:37 crc kubenswrapper[4558]: E0120 17:45:37.299497 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="4568dafb1cb4a5468a42a5482158e5c167d869bcb8c626b55be898275037c53d" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.314305 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/131735a5-8043-40d7-a15d-f0024356e584-kube-api-access-p65cv" (OuterVolumeSpecName: "kube-api-access-p65cv") pod "131735a5-8043-40d7-a15d-f0024356e584" (UID: "131735a5-8043-40d7-a15d-f0024356e584"). InnerVolumeSpecName "kube-api-access-p65cv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.314881 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/placement-12a5-account-create-update-6g9q5"] Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.321790 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/placement-12a5-account-create-update-6g9q5"] Jan 20 17:45:37 crc kubenswrapper[4558]: E0120 17:45:37.325817 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="4568dafb1cb4a5468a42a5482158e5c167d869bcb8c626b55be898275037c53d" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:45:37 crc kubenswrapper[4558]: E0120 17:45:37.327341 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="4568dafb1cb4a5468a42a5482158e5c167d869bcb8c626b55be898275037c53d" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:45:37 crc kubenswrapper[4558]: E0120 17:45:37.327378 4558 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack-kuttl-tests/nova-cell0-conductor-0" podUID="50cacddd-ebea-477f-af64-6e96a09a242e" containerName="nova-cell0-conductor-conductor" Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.329876 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/131735a5-8043-40d7-a15d-f0024356e584-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "131735a5-8043-40d7-a15d-f0024356e584" (UID: "131735a5-8043-40d7-a15d-f0024356e584"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.351401 4558 scope.go:117] "RemoveContainer" containerID="08599f71319860b69b6674beac66044f825719a01bdca428dd565ea148a25bb0" Jan 20 17:45:37 crc kubenswrapper[4558]: E0120 17:45:37.353206 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"08599f71319860b69b6674beac66044f825719a01bdca428dd565ea148a25bb0\": container with ID starting with 08599f71319860b69b6674beac66044f825719a01bdca428dd565ea148a25bb0 not found: ID does not exist" containerID="08599f71319860b69b6674beac66044f825719a01bdca428dd565ea148a25bb0" Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.353259 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"08599f71319860b69b6674beac66044f825719a01bdca428dd565ea148a25bb0"} err="failed to get container status \"08599f71319860b69b6674beac66044f825719a01bdca428dd565ea148a25bb0\": rpc error: code = NotFound desc = could not find container \"08599f71319860b69b6674beac66044f825719a01bdca428dd565ea148a25bb0\": container with ID starting with 08599f71319860b69b6674beac66044f825719a01bdca428dd565ea148a25bb0 not found: ID does not exist" Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.353282 4558 scope.go:117] "RemoveContainer" containerID="3ffb7d59fade4765056a811588a9c3857b85a1ef4b82cfda814dd74069ed2bf9" Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.356237 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/131735a5-8043-40d7-a15d-f0024356e584-memcached-tls-certs" (OuterVolumeSpecName: "memcached-tls-certs") pod "131735a5-8043-40d7-a15d-f0024356e584" (UID: "131735a5-8043-40d7-a15d-f0024356e584"). InnerVolumeSpecName "memcached-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:45:37 crc kubenswrapper[4558]: E0120 17:45:37.358966 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3ffb7d59fade4765056a811588a9c3857b85a1ef4b82cfda814dd74069ed2bf9\": container with ID starting with 3ffb7d59fade4765056a811588a9c3857b85a1ef4b82cfda814dd74069ed2bf9 not found: ID does not exist" containerID="3ffb7d59fade4765056a811588a9c3857b85a1ef4b82cfda814dd74069ed2bf9" Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.359015 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3ffb7d59fade4765056a811588a9c3857b85a1ef4b82cfda814dd74069ed2bf9"} err="failed to get container status \"3ffb7d59fade4765056a811588a9c3857b85a1ef4b82cfda814dd74069ed2bf9\": rpc error: code = NotFound desc = could not find container \"3ffb7d59fade4765056a811588a9c3857b85a1ef4b82cfda814dd74069ed2bf9\": container with ID starting with 3ffb7d59fade4765056a811588a9c3857b85a1ef4b82cfda814dd74069ed2bf9 not found: ID does not exist" Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.401227 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p65cv\" (UniqueName: \"kubernetes.io/projected/131735a5-8043-40d7-a15d-f0024356e584-kube-api-access-p65cv\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.401531 4558 reconciler_common.go:293] "Volume detached for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/131735a5-8043-40d7-a15d-f0024356e584-memcached-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.401543 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/131735a5-8043-40d7-a15d-f0024356e584-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.746459 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-51af-account-create-update-52f7p" Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.751841 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-fhpsv" Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.762732 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.767147 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.774209 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.810050 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2vzk4\" (UniqueName: \"kubernetes.io/projected/86aff100-d474-48ed-b673-4dae7c0722cf-kube-api-access-2vzk4\") pod \"86aff100-d474-48ed-b673-4dae7c0722cf\" (UID: \"86aff100-d474-48ed-b673-4dae7c0722cf\") " Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.810102 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b752af55-9c06-469f-9353-e1042300de3c-config-data-custom\") pod \"b752af55-9c06-469f-9353-e1042300de3c\" (UID: \"b752af55-9c06-469f-9353-e1042300de3c\") " Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.810144 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"f87490e8-557d-41e3-a07b-8fe12147b315\" (UID: \"f87490e8-557d-41e3-a07b-8fe12147b315\") " Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.810188 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b752af55-9c06-469f-9353-e1042300de3c-internal-tls-certs\") pod \"b752af55-9c06-469f-9353-e1042300de3c\" (UID: \"b752af55-9c06-469f-9353-e1042300de3c\") " Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.810210 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b752af55-9c06-469f-9353-e1042300de3c-etc-machine-id\") pod \"b752af55-9c06-469f-9353-e1042300de3c\" (UID: \"b752af55-9c06-469f-9353-e1042300de3c\") " Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.810248 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/86aff100-d474-48ed-b673-4dae7c0722cf-config-data-generated\") pod \"86aff100-d474-48ed-b673-4dae7c0722cf\" (UID: \"86aff100-d474-48ed-b673-4dae7c0722cf\") " Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.810269 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6q8rh\" (UniqueName: \"kubernetes.io/projected/f87490e8-557d-41e3-a07b-8fe12147b315-kube-api-access-6q8rh\") pod \"f87490e8-557d-41e3-a07b-8fe12147b315\" (UID: \"f87490e8-557d-41e3-a07b-8fe12147b315\") " Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.810294 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b752af55-9c06-469f-9353-e1042300de3c-public-tls-certs\") pod \"b752af55-9c06-469f-9353-e1042300de3c\" (UID: \"b752af55-9c06-469f-9353-e1042300de3c\") " Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.810322 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/86aff100-d474-48ed-b673-4dae7c0722cf-galera-tls-certs\") pod \"86aff100-d474-48ed-b673-4dae7c0722cf\" (UID: \"86aff100-d474-48ed-b673-4dae7c0722cf\") " Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.810349 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/03ca5cbb-ca72-46f1-a44f-42ac0fb7cf17-operator-scripts\") pod \"03ca5cbb-ca72-46f1-a44f-42ac0fb7cf17\" (UID: \"03ca5cbb-ca72-46f1-a44f-42ac0fb7cf17\") " Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.810382 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b752af55-9c06-469f-9353-e1042300de3c-logs\") pod \"b752af55-9c06-469f-9353-e1042300de3c\" (UID: \"b752af55-9c06-469f-9353-e1042300de3c\") " Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.810397 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/86aff100-d474-48ed-b673-4dae7c0722cf-kolla-config\") pod \"86aff100-d474-48ed-b673-4dae7c0722cf\" (UID: \"86aff100-d474-48ed-b673-4dae7c0722cf\") " Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.810415 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/86aff100-d474-48ed-b673-4dae7c0722cf-operator-scripts\") pod \"86aff100-d474-48ed-b673-4dae7c0722cf\" (UID: \"86aff100-d474-48ed-b673-4dae7c0722cf\") " Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.810436 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f87490e8-557d-41e3-a07b-8fe12147b315-combined-ca-bundle\") pod \"f87490e8-557d-41e3-a07b-8fe12147b315\" (UID: \"f87490e8-557d-41e3-a07b-8fe12147b315\") " Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.810811 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/86aff100-d474-48ed-b673-4dae7c0722cf-config-data-generated" (OuterVolumeSpecName: "config-data-generated") pod "86aff100-d474-48ed-b673-4dae7c0722cf" (UID: "86aff100-d474-48ed-b673-4dae7c0722cf"). InnerVolumeSpecName "config-data-generated". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.811359 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b752af55-9c06-469f-9353-e1042300de3c-logs" (OuterVolumeSpecName: "logs") pod "b752af55-9c06-469f-9353-e1042300de3c" (UID: "b752af55-9c06-469f-9353-e1042300de3c"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.811737 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/03ca5cbb-ca72-46f1-a44f-42ac0fb7cf17-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "03ca5cbb-ca72-46f1-a44f-42ac0fb7cf17" (UID: "03ca5cbb-ca72-46f1-a44f-42ac0fb7cf17"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.812254 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/86aff100-d474-48ed-b673-4dae7c0722cf-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "86aff100-d474-48ed-b673-4dae7c0722cf" (UID: "86aff100-d474-48ed-b673-4dae7c0722cf"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.812303 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b752af55-9c06-469f-9353-e1042300de3c-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "b752af55-9c06-469f-9353-e1042300de3c" (UID: "b752af55-9c06-469f-9353-e1042300de3c"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.812673 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gl67k\" (UniqueName: \"kubernetes.io/projected/89fd10f3-4598-47c6-ad0e-59aa59d41894-kube-api-access-gl67k\") pod \"89fd10f3-4598-47c6-ad0e-59aa59d41894\" (UID: \"89fd10f3-4598-47c6-ad0e-59aa59d41894\") " Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.812744 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f87490e8-557d-41e3-a07b-8fe12147b315-config-data\") pod \"f87490e8-557d-41e3-a07b-8fe12147b315\" (UID: \"f87490e8-557d-41e3-a07b-8fe12147b315\") " Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.812800 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f87490e8-557d-41e3-a07b-8fe12147b315-logs\") pod \"f87490e8-557d-41e3-a07b-8fe12147b315\" (UID: \"f87490e8-557d-41e3-a07b-8fe12147b315\") " Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.812858 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f87490e8-557d-41e3-a07b-8fe12147b315-internal-tls-certs\") pod \"f87490e8-557d-41e3-a07b-8fe12147b315\" (UID: \"f87490e8-557d-41e3-a07b-8fe12147b315\") " Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.812886 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b752af55-9c06-469f-9353-e1042300de3c-config-data\") pod \"b752af55-9c06-469f-9353-e1042300de3c\" (UID: \"b752af55-9c06-469f-9353-e1042300de3c\") " Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.812914 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f87490e8-557d-41e3-a07b-8fe12147b315-scripts\") pod \"f87490e8-557d-41e3-a07b-8fe12147b315\" (UID: \"f87490e8-557d-41e3-a07b-8fe12147b315\") " Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.812951 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/86aff100-d474-48ed-b673-4dae7c0722cf-combined-ca-bundle\") pod \"86aff100-d474-48ed-b673-4dae7c0722cf\" (UID: \"86aff100-d474-48ed-b673-4dae7c0722cf\") " Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.812981 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b752af55-9c06-469f-9353-e1042300de3c-combined-ca-bundle\") pod \"b752af55-9c06-469f-9353-e1042300de3c\" (UID: \"b752af55-9c06-469f-9353-e1042300de3c\") " Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.813009 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/89fd10f3-4598-47c6-ad0e-59aa59d41894-operator-scripts\") pod \"89fd10f3-4598-47c6-ad0e-59aa59d41894\" (UID: \"89fd10f3-4598-47c6-ad0e-59aa59d41894\") " Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.813045 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mysql-db\" (UniqueName: \"kubernetes.io/local-volume/local-storage14-crc\") pod \"86aff100-d474-48ed-b673-4dae7c0722cf\" (UID: \"86aff100-d474-48ed-b673-4dae7c0722cf\") " Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.813067 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hn2ts\" (UniqueName: \"kubernetes.io/projected/b752af55-9c06-469f-9353-e1042300de3c-kube-api-access-hn2ts\") pod \"b752af55-9c06-469f-9353-e1042300de3c\" (UID: \"b752af55-9c06-469f-9353-e1042300de3c\") " Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.813088 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b752af55-9c06-469f-9353-e1042300de3c-scripts\") pod \"b752af55-9c06-469f-9353-e1042300de3c\" (UID: \"b752af55-9c06-469f-9353-e1042300de3c\") " Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.813106 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/86aff100-d474-48ed-b673-4dae7c0722cf-config-data-default\") pod \"86aff100-d474-48ed-b673-4dae7c0722cf\" (UID: \"86aff100-d474-48ed-b673-4dae7c0722cf\") " Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.813128 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/86aff100-d474-48ed-b673-4dae7c0722cf-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "86aff100-d474-48ed-b673-4dae7c0722cf" (UID: "86aff100-d474-48ed-b673-4dae7c0722cf"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.813142 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wk6rd\" (UniqueName: \"kubernetes.io/projected/03ca5cbb-ca72-46f1-a44f-42ac0fb7cf17-kube-api-access-wk6rd\") pod \"03ca5cbb-ca72-46f1-a44f-42ac0fb7cf17\" (UID: \"03ca5cbb-ca72-46f1-a44f-42ac0fb7cf17\") " Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.813228 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f87490e8-557d-41e3-a07b-8fe12147b315-httpd-run\") pod \"f87490e8-557d-41e3-a07b-8fe12147b315\" (UID: \"f87490e8-557d-41e3-a07b-8fe12147b315\") " Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.814761 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f87490e8-557d-41e3-a07b-8fe12147b315-kube-api-access-6q8rh" (OuterVolumeSpecName: "kube-api-access-6q8rh") pod "f87490e8-557d-41e3-a07b-8fe12147b315" (UID: "f87490e8-557d-41e3-a07b-8fe12147b315"). InnerVolumeSpecName "kube-api-access-6q8rh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.815455 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f87490e8-557d-41e3-a07b-8fe12147b315-logs" (OuterVolumeSpecName: "logs") pod "f87490e8-557d-41e3-a07b-8fe12147b315" (UID: "f87490e8-557d-41e3-a07b-8fe12147b315"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.815851 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/03ca5cbb-ca72-46f1-a44f-42ac0fb7cf17-kube-api-access-wk6rd" (OuterVolumeSpecName: "kube-api-access-wk6rd") pod "03ca5cbb-ca72-46f1-a44f-42ac0fb7cf17" (UID: "03ca5cbb-ca72-46f1-a44f-42ac0fb7cf17"). InnerVolumeSpecName "kube-api-access-wk6rd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.816798 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/86aff100-d474-48ed-b673-4dae7c0722cf-kube-api-access-2vzk4" (OuterVolumeSpecName: "kube-api-access-2vzk4") pod "86aff100-d474-48ed-b673-4dae7c0722cf" (UID: "86aff100-d474-48ed-b673-4dae7c0722cf"). InnerVolumeSpecName "kube-api-access-2vzk4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.817514 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f87490e8-557d-41e3-a07b-8fe12147b315-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "f87490e8-557d-41e3-a07b-8fe12147b315" (UID: "f87490e8-557d-41e3-a07b-8fe12147b315"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.817880 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b752af55-9c06-469f-9353-e1042300de3c-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.817898 4558 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/86aff100-d474-48ed-b673-4dae7c0722cf-kolla-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.817909 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/86aff100-d474-48ed-b673-4dae7c0722cf-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.817926 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f87490e8-557d-41e3-a07b-8fe12147b315-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.817937 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wk6rd\" (UniqueName: \"kubernetes.io/projected/03ca5cbb-ca72-46f1-a44f-42ac0fb7cf17-kube-api-access-wk6rd\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.817946 4558 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f87490e8-557d-41e3-a07b-8fe12147b315-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.817956 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2vzk4\" (UniqueName: \"kubernetes.io/projected/86aff100-d474-48ed-b673-4dae7c0722cf-kube-api-access-2vzk4\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.817964 4558 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b752af55-9c06-469f-9353-e1042300de3c-etc-machine-id\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.817974 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/86aff100-d474-48ed-b673-4dae7c0722cf-config-data-generated\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.817983 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6q8rh\" (UniqueName: \"kubernetes.io/projected/f87490e8-557d-41e3-a07b-8fe12147b315-kube-api-access-6q8rh\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.817992 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/03ca5cbb-ca72-46f1-a44f-42ac0fb7cf17-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.821703 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/89fd10f3-4598-47c6-ad0e-59aa59d41894-kube-api-access-gl67k" (OuterVolumeSpecName: "kube-api-access-gl67k") pod "89fd10f3-4598-47c6-ad0e-59aa59d41894" (UID: "89fd10f3-4598-47c6-ad0e-59aa59d41894"). InnerVolumeSpecName "kube-api-access-gl67k". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.824850 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/89fd10f3-4598-47c6-ad0e-59aa59d41894-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "89fd10f3-4598-47c6-ad0e-59aa59d41894" (UID: "89fd10f3-4598-47c6-ad0e-59aa59d41894"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.824940 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/86aff100-d474-48ed-b673-4dae7c0722cf-config-data-default" (OuterVolumeSpecName: "config-data-default") pod "86aff100-d474-48ed-b673-4dae7c0722cf" (UID: "86aff100-d474-48ed-b673-4dae7c0722cf"). InnerVolumeSpecName "config-data-default". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.825442 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage11-crc" (OuterVolumeSpecName: "glance") pod "f87490e8-557d-41e3-a07b-8fe12147b315" (UID: "f87490e8-557d-41e3-a07b-8fe12147b315"). InnerVolumeSpecName "local-storage11-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.829481 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b752af55-9c06-469f-9353-e1042300de3c-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "b752af55-9c06-469f-9353-e1042300de3c" (UID: "b752af55-9c06-469f-9353-e1042300de3c"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.829839 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b752af55-9c06-469f-9353-e1042300de3c-kube-api-access-hn2ts" (OuterVolumeSpecName: "kube-api-access-hn2ts") pod "b752af55-9c06-469f-9353-e1042300de3c" (UID: "b752af55-9c06-469f-9353-e1042300de3c"). InnerVolumeSpecName "kube-api-access-hn2ts". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.830008 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b752af55-9c06-469f-9353-e1042300de3c-scripts" (OuterVolumeSpecName: "scripts") pod "b752af55-9c06-469f-9353-e1042300de3c" (UID: "b752af55-9c06-469f-9353-e1042300de3c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.850709 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f87490e8-557d-41e3-a07b-8fe12147b315-scripts" (OuterVolumeSpecName: "scripts") pod "f87490e8-557d-41e3-a07b-8fe12147b315" (UID: "f87490e8-557d-41e3-a07b-8fe12147b315"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.856763 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage14-crc" (OuterVolumeSpecName: "mysql-db") pod "86aff100-d474-48ed-b673-4dae7c0722cf" (UID: "86aff100-d474-48ed-b673-4dae7c0722cf"). InnerVolumeSpecName "local-storage14-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.862532 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/nova-metadata-0" podUID="ff0d0703-8173-4ac0-afc0-e673feaef286" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.1.64:8775/\": read tcp 10.217.0.2:53290->10.217.1.64:8775: read: connection reset by peer" Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.862568 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/nova-metadata-0" podUID="ff0d0703-8173-4ac0-afc0-e673feaef286" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.1.64:8775/\": read tcp 10.217.0.2:53286->10.217.1.64:8775: read: connection reset by peer" Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.869969 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/86aff100-d474-48ed-b673-4dae7c0722cf-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "86aff100-d474-48ed-b673-4dae7c0722cf" (UID: "86aff100-d474-48ed-b673-4dae7c0722cf"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.889682 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-5b5f5cd6d-6rk7z" Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.901284 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/86aff100-d474-48ed-b673-4dae7c0722cf-galera-tls-certs" (OuterVolumeSpecName: "galera-tls-certs") pod "86aff100-d474-48ed-b673-4dae7c0722cf" (UID: "86aff100-d474-48ed-b673-4dae7c0722cf"). InnerVolumeSpecName "galera-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.904635 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b752af55-9c06-469f-9353-e1042300de3c-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "b752af55-9c06-469f-9353-e1042300de3c" (UID: "b752af55-9c06-469f-9353-e1042300de3c"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.907361 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f87490e8-557d-41e3-a07b-8fe12147b315-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f87490e8-557d-41e3-a07b-8fe12147b315" (UID: "f87490e8-557d-41e3-a07b-8fe12147b315"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.911203 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b752af55-9c06-469f-9353-e1042300de3c-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "b752af55-9c06-469f-9353-e1042300de3c" (UID: "b752af55-9c06-469f-9353-e1042300de3c"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.913069 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b752af55-9c06-469f-9353-e1042300de3c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b752af55-9c06-469f-9353-e1042300de3c" (UID: "b752af55-9c06-469f-9353-e1042300de3c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.917769 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f87490e8-557d-41e3-a07b-8fe12147b315-config-data" (OuterVolumeSpecName: "config-data") pod "f87490e8-557d-41e3-a07b-8fe12147b315" (UID: "f87490e8-557d-41e3-a07b-8fe12147b315"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.918967 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/50804fa8-d09d-49f8-a143-d7ec24ec542a-logs\") pod \"50804fa8-d09d-49f8-a143-d7ec24ec542a\" (UID: \"50804fa8-d09d-49f8-a143-d7ec24ec542a\") " Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.919146 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/50804fa8-d09d-49f8-a143-d7ec24ec542a-scripts\") pod \"50804fa8-d09d-49f8-a143-d7ec24ec542a\" (UID: \"50804fa8-d09d-49f8-a143-d7ec24ec542a\") " Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.919223 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/50804fa8-d09d-49f8-a143-d7ec24ec542a-internal-tls-certs\") pod \"50804fa8-d09d-49f8-a143-d7ec24ec542a\" (UID: \"50804fa8-d09d-49f8-a143-d7ec24ec542a\") " Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.919310 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50804fa8-d09d-49f8-a143-d7ec24ec542a-combined-ca-bundle\") pod \"50804fa8-d09d-49f8-a143-d7ec24ec542a\" (UID: \"50804fa8-d09d-49f8-a143-d7ec24ec542a\") " Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.919374 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f856q\" (UniqueName: \"kubernetes.io/projected/50804fa8-d09d-49f8-a143-d7ec24ec542a-kube-api-access-f856q\") pod \"50804fa8-d09d-49f8-a143-d7ec24ec542a\" (UID: \"50804fa8-d09d-49f8-a143-d7ec24ec542a\") " Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.919404 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/50804fa8-d09d-49f8-a143-d7ec24ec542a-config-data\") pod \"50804fa8-d09d-49f8-a143-d7ec24ec542a\" (UID: \"50804fa8-d09d-49f8-a143-d7ec24ec542a\") " Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.919449 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/50804fa8-d09d-49f8-a143-d7ec24ec542a-public-tls-certs\") pod \"50804fa8-d09d-49f8-a143-d7ec24ec542a\" (UID: \"50804fa8-d09d-49f8-a143-d7ec24ec542a\") " Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.919867 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/86aff100-d474-48ed-b673-4dae7c0722cf-config-data-default\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.919884 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b752af55-9c06-469f-9353-e1042300de3c-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.919906 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" " Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.919915 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b752af55-9c06-469f-9353-e1042300de3c-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.919933 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b752af55-9c06-469f-9353-e1042300de3c-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.919943 4558 reconciler_common.go:293] "Volume detached for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/86aff100-d474-48ed-b673-4dae7c0722cf-galera-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.919955 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f87490e8-557d-41e3-a07b-8fe12147b315-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.919965 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gl67k\" (UniqueName: \"kubernetes.io/projected/89fd10f3-4598-47c6-ad0e-59aa59d41894-kube-api-access-gl67k\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.919975 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f87490e8-557d-41e3-a07b-8fe12147b315-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.919983 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f87490e8-557d-41e3-a07b-8fe12147b315-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.919992 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/86aff100-d474-48ed-b673-4dae7c0722cf-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.920000 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b752af55-9c06-469f-9353-e1042300de3c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.920009 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/89fd10f3-4598-47c6-ad0e-59aa59d41894-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.920023 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage14-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage14-crc\") on node \"crc\" " Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.920032 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hn2ts\" (UniqueName: \"kubernetes.io/projected/b752af55-9c06-469f-9353-e1042300de3c-kube-api-access-hn2ts\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.920039 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b752af55-9c06-469f-9353-e1042300de3c-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.920583 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/50804fa8-d09d-49f8-a143-d7ec24ec542a-logs" (OuterVolumeSpecName: "logs") pod "50804fa8-d09d-49f8-a143-d7ec24ec542a" (UID: "50804fa8-d09d-49f8-a143-d7ec24ec542a"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.929031 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/50804fa8-d09d-49f8-a143-d7ec24ec542a-kube-api-access-f856q" (OuterVolumeSpecName: "kube-api-access-f856q") pod "50804fa8-d09d-49f8-a143-d7ec24ec542a" (UID: "50804fa8-d09d-49f8-a143-d7ec24ec542a"). InnerVolumeSpecName "kube-api-access-f856q". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.933355 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/50804fa8-d09d-49f8-a143-d7ec24ec542a-scripts" (OuterVolumeSpecName: "scripts") pod "50804fa8-d09d-49f8-a143-d7ec24ec542a" (UID: "50804fa8-d09d-49f8-a143-d7ec24ec542a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.934868 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage14-crc" (UniqueName: "kubernetes.io/local-volume/local-storage14-crc") on node "crc" Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.937403 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b752af55-9c06-469f-9353-e1042300de3c-config-data" (OuterVolumeSpecName: "config-data") pod "b752af55-9c06-469f-9353-e1042300de3c" (UID: "b752af55-9c06-469f-9353-e1042300de3c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.938541 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f87490e8-557d-41e3-a07b-8fe12147b315-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "f87490e8-557d-41e3-a07b-8fe12147b315" (UID: "f87490e8-557d-41e3-a07b-8fe12147b315"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.948516 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage11-crc" (UniqueName: "kubernetes.io/local-volume/local-storage11-crc") on node "crc" Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.965863 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/50804fa8-d09d-49f8-a143-d7ec24ec542a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "50804fa8-d09d-49f8-a143-d7ec24ec542a" (UID: "50804fa8-d09d-49f8-a143-d7ec24ec542a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:45:37 crc kubenswrapper[4558]: I0120 17:45:37.973376 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/50804fa8-d09d-49f8-a143-d7ec24ec542a-config-data" (OuterVolumeSpecName: "config-data") pod "50804fa8-d09d-49f8-a143-d7ec24ec542a" (UID: "50804fa8-d09d-49f8-a143-d7ec24ec542a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.000796 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/50804fa8-d09d-49f8-a143-d7ec24ec542a-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "50804fa8-d09d-49f8-a143-d7ec24ec542a" (UID: "50804fa8-d09d-49f8-a143-d7ec24ec542a"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.014825 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/50804fa8-d09d-49f8-a143-d7ec24ec542a-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "50804fa8-d09d-49f8-a143-d7ec24ec542a" (UID: "50804fa8-d09d-49f8-a143-d7ec24ec542a"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.022585 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/50804fa8-d09d-49f8-a143-d7ec24ec542a-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.022615 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.022627 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50804fa8-d09d-49f8-a143-d7ec24ec542a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.022653 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f856q\" (UniqueName: \"kubernetes.io/projected/50804fa8-d09d-49f8-a143-d7ec24ec542a-kube-api-access-f856q\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.022663 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/50804fa8-d09d-49f8-a143-d7ec24ec542a-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.022673 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/50804fa8-d09d-49f8-a143-d7ec24ec542a-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.022680 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/50804fa8-d09d-49f8-a143-d7ec24ec542a-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.022689 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f87490e8-557d-41e3-a07b-8fe12147b315-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.022698 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b752af55-9c06-469f-9353-e1042300de3c-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.022707 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage14-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage14-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.022731 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/50804fa8-d09d-49f8-a143-d7ec24ec542a-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.028282 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.123532 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-51af-account-create-update-52f7p" Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.123571 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-51af-account-create-update-52f7p" event={"ID":"89fd10f3-4598-47c6-ad0e-59aa59d41894","Type":"ContainerDied","Data":"7476dbbb0b15ae786d05551c50a9dfde849a39e3f040548072e414cf83856c34"} Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.124244 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/56173817-8246-4f37-b157-3890912004ca-logs\") pod \"56173817-8246-4f37-b157-3890912004ca\" (UID: \"56173817-8246-4f37-b157-3890912004ca\") " Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.124340 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/56173817-8246-4f37-b157-3890912004ca-scripts\") pod \"56173817-8246-4f37-b157-3890912004ca\" (UID: \"56173817-8246-4f37-b157-3890912004ca\") " Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.124373 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/56173817-8246-4f37-b157-3890912004ca-combined-ca-bundle\") pod \"56173817-8246-4f37-b157-3890912004ca\" (UID: \"56173817-8246-4f37-b157-3890912004ca\") " Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.124421 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/56173817-8246-4f37-b157-3890912004ca-public-tls-certs\") pod \"56173817-8246-4f37-b157-3890912004ca\" (UID: \"56173817-8246-4f37-b157-3890912004ca\") " Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.124487 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/56173817-8246-4f37-b157-3890912004ca-config-data\") pod \"56173817-8246-4f37-b157-3890912004ca\" (UID: \"56173817-8246-4f37-b157-3890912004ca\") " Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.124591 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"56173817-8246-4f37-b157-3890912004ca\" (UID: \"56173817-8246-4f37-b157-3890912004ca\") " Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.124624 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/56173817-8246-4f37-b157-3890912004ca-httpd-run\") pod \"56173817-8246-4f37-b157-3890912004ca\" (UID: \"56173817-8246-4f37-b157-3890912004ca\") " Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.124660 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k55s4\" (UniqueName: \"kubernetes.io/projected/56173817-8246-4f37-b157-3890912004ca-kube-api-access-k55s4\") pod \"56173817-8246-4f37-b157-3890912004ca\" (UID: \"56173817-8246-4f37-b157-3890912004ca\") " Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.124960 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/56173817-8246-4f37-b157-3890912004ca-logs" (OuterVolumeSpecName: "logs") pod "56173817-8246-4f37-b157-3890912004ca" (UID: "56173817-8246-4f37-b157-3890912004ca"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.125636 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/56173817-8246-4f37-b157-3890912004ca-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "56173817-8246-4f37-b157-3890912004ca" (UID: "56173817-8246-4f37-b157-3890912004ca"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.126594 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/56173817-8246-4f37-b157-3890912004ca-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.126615 4558 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/56173817-8246-4f37-b157-3890912004ca-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.127522 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/56173817-8246-4f37-b157-3890912004ca-scripts" (OuterVolumeSpecName: "scripts") pod "56173817-8246-4f37-b157-3890912004ca" (UID: "56173817-8246-4f37-b157-3890912004ca"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.128372 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage09-crc" (OuterVolumeSpecName: "glance") pod "56173817-8246-4f37-b157-3890912004ca" (UID: "56173817-8246-4f37-b157-3890912004ca"). InnerVolumeSpecName "local-storage09-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.129221 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/56173817-8246-4f37-b157-3890912004ca-kube-api-access-k55s4" (OuterVolumeSpecName: "kube-api-access-k55s4") pod "56173817-8246-4f37-b157-3890912004ca" (UID: "56173817-8246-4f37-b157-3890912004ca"). InnerVolumeSpecName "kube-api-access-k55s4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.130053 4558 generic.go:334] "Generic (PLEG): container finished" podID="b752af55-9c06-469f-9353-e1042300de3c" containerID="43f53e7f4f862cf970fa692178ae00cd40fe6c655fd001e46383dd72770d5cb5" exitCode=0 Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.130111 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"b752af55-9c06-469f-9353-e1042300de3c","Type":"ContainerDied","Data":"43f53e7f4f862cf970fa692178ae00cd40fe6c655fd001e46383dd72770d5cb5"} Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.130143 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"b752af55-9c06-469f-9353-e1042300de3c","Type":"ContainerDied","Data":"bccce61b5ba6d2aba1d38b7c559ba654190565ff2cbc1a51e114282f3188cc47"} Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.130178 4558 scope.go:117] "RemoveContainer" containerID="43f53e7f4f862cf970fa692178ae00cd40fe6c655fd001e46383dd72770d5cb5" Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.130274 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.145041 4558 generic.go:334] "Generic (PLEG): container finished" podID="50804fa8-d09d-49f8-a143-d7ec24ec542a" containerID="15c901b90eae1d8b686fb50b0222bcb667a3a9f6b6a35faa944d0560ae58cf3a" exitCode=0 Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.145134 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-5b5f5cd6d-6rk7z" Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.146839 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-5b5f5cd6d-6rk7z" event={"ID":"50804fa8-d09d-49f8-a143-d7ec24ec542a","Type":"ContainerDied","Data":"15c901b90eae1d8b686fb50b0222bcb667a3a9f6b6a35faa944d0560ae58cf3a"} Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.147154 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-5b5f5cd6d-6rk7z" event={"ID":"50804fa8-d09d-49f8-a143-d7ec24ec542a","Type":"ContainerDied","Data":"22fbf6e912d7ee94bdcb3da7c63db26b6dd11dfe2bf5d1deadf4c59e1be94239"} Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.147860 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/56173817-8246-4f37-b157-3890912004ca-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "56173817-8246-4f37-b157-3890912004ca" (UID: "56173817-8246-4f37-b157-3890912004ca"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.177110 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/memcached-0" Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.177131 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/memcached-0" event={"ID":"131735a5-8043-40d7-a15d-f0024356e584","Type":"ContainerDied","Data":"00887ddcbc7ce052ace0ca68010332c4c0342d0819780f60c62b3f04ba6d24fd"} Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.180806 4558 generic.go:334] "Generic (PLEG): container finished" podID="56173817-8246-4f37-b157-3890912004ca" containerID="26fcabccd365630b0df7a2013b030db7a8a52ed18fb890da73b7a737f80da0ba" exitCode=0 Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.180869 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.180890 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"56173817-8246-4f37-b157-3890912004ca","Type":"ContainerDied","Data":"26fcabccd365630b0df7a2013b030db7a8a52ed18fb890da73b7a737f80da0ba"} Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.180981 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"56173817-8246-4f37-b157-3890912004ca","Type":"ContainerDied","Data":"e902ae63d5f168b0370ac50e98e7f85ed43d5a6e8c5a145738789c24c17611f1"} Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.191387 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-fhpsv" Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.196519 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/root-account-create-update-fhpsv" event={"ID":"03ca5cbb-ca72-46f1-a44f-42ac0fb7cf17","Type":"ContainerDied","Data":"c208f9601a516395e4d9d65bc5d53e905f17f21efe78faa30eb47cc79340b2c3"} Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.202646 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-51af-account-create-update-52f7p"] Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.202970 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/56173817-8246-4f37-b157-3890912004ca-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "56173817-8246-4f37-b157-3890912004ca" (UID: "56173817-8246-4f37-b157-3890912004ca"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.203212 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/56173817-8246-4f37-b157-3890912004ca-config-data" (OuterVolumeSpecName: "config-data") pod "56173817-8246-4f37-b157-3890912004ca" (UID: "56173817-8246-4f37-b157-3890912004ca"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.203220 4558 scope.go:117] "RemoveContainer" containerID="93d7994154007967abeffaef97cbdbfea9fe0c9a249a8ebbcb1a6bb563d31477" Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.208200 4558 generic.go:334] "Generic (PLEG): container finished" podID="86aff100-d474-48ed-b673-4dae7c0722cf" containerID="f0fb0d6dfc95147b44b7be05faeba0d259d63b173ea99ddb9b134ce40f87cb33" exitCode=0 Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.208261 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-galera-0" event={"ID":"86aff100-d474-48ed-b673-4dae7c0722cf","Type":"ContainerDied","Data":"f0fb0d6dfc95147b44b7be05faeba0d259d63b173ea99ddb9b134ce40f87cb33"} Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.208313 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-galera-0" event={"ID":"86aff100-d474-48ed-b673-4dae7c0722cf","Type":"ContainerDied","Data":"eb396e0181b1779a1ab284f1bdeb43ca4f283e2a736a3210e70deeb9de80c4c1"} Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.208367 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.209328 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell1-51af-account-create-update-52f7p"] Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.212745 4558 generic.go:334] "Generic (PLEG): container finished" podID="ff0d0703-8173-4ac0-afc0-e673feaef286" containerID="29a14fecb73a5e8e41c942ef00e6566817b5c2f5ec120abe410dce21f8142ccd" exitCode=0 Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.212788 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"ff0d0703-8173-4ac0-afc0-e673feaef286","Type":"ContainerDied","Data":"29a14fecb73a5e8e41c942ef00e6566817b5c2f5ec120abe410dce21f8142ccd"} Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.217197 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.219942 4558 generic.go:334] "Generic (PLEG): container finished" podID="70182fd8-a242-40f5-b20d-8ff4dd33e9b1" containerID="b3f67b171f63171671e95039a6c39cf3571069f1e72ec88de6862170e74914f6" exitCode=0 Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.220010 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"70182fd8-a242-40f5-b20d-8ff4dd33e9b1","Type":"ContainerDied","Data":"b3f67b171f63171671e95039a6c39cf3571069f1e72ec88de6862170e74914f6"} Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.224268 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.228144 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/56173817-8246-4f37-b157-3890912004ca-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.228195 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" " Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.228206 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k55s4\" (UniqueName: \"kubernetes.io/projected/56173817-8246-4f37-b157-3890912004ca-kube-api-access-k55s4\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.228216 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/56173817-8246-4f37-b157-3890912004ca-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.228227 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/56173817-8246-4f37-b157-3890912004ca-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.228235 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/56173817-8246-4f37-b157-3890912004ca-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.228386 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/placement-5b5f5cd6d-6rk7z"] Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.234650 4558 generic.go:334] "Generic (PLEG): container finished" podID="f87490e8-557d-41e3-a07b-8fe12147b315" containerID="72f0e46d318dc78a9e49d3aa7f3c509c43a243da567f0f20b70c09a82e812667" exitCode=0 Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.234764 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.234834 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-6ed6-account-create-update-c7zxj" Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.235156 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"f87490e8-557d-41e3-a07b-8fe12147b315","Type":"ContainerDied","Data":"72f0e46d318dc78a9e49d3aa7f3c509c43a243da567f0f20b70c09a82e812667"} Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.235199 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"f87490e8-557d-41e3-a07b-8fe12147b315","Type":"ContainerDied","Data":"9a68e1fbd923ab0acd23acc52effcf2d9054b760cec4c8b2e69e6dc2ff7323d5"} Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.245611 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage09-crc" (UniqueName: "kubernetes.io/local-volume/local-storage09-crc") on node "crc" Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.245629 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/placement-5b5f5cd6d-6rk7z"] Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.265989 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/barbican-api-74bb4f5f4-5ppmg" podUID="d619cd37-a474-4965-b382-749ed6d55d6d" containerName="barbican-api" probeResult="failure" output="Get \"https://10.217.1.61:9311/healthcheck\": read tcp 10.217.0.2:60652->10.217.1.61:9311: read: connection reset by peer" Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.266009 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/barbican-api-74bb4f5f4-5ppmg" podUID="d619cd37-a474-4965-b382-749ed6d55d6d" containerName="barbican-api-log" probeResult="failure" output="Get \"https://10.217.1.61:9311/healthcheck\": read tcp 10.217.0.2:60656->10.217.1.61:9311: read: connection reset by peer" Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.329974 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8666f449-91a3-489d-bbc5-e7811619ea74-operator-scripts\") pod \"keystone-6ed6-account-create-update-c7zxj\" (UID: \"8666f449-91a3-489d-bbc5-e7811619ea74\") " pod="openstack-kuttl-tests/keystone-6ed6-account-create-update-c7zxj" Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.330068 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2r6wv\" (UniqueName: \"kubernetes.io/projected/8666f449-91a3-489d-bbc5-e7811619ea74-kube-api-access-2r6wv\") pod \"keystone-6ed6-account-create-update-c7zxj\" (UID: \"8666f449-91a3-489d-bbc5-e7811619ea74\") " pod="openstack-kuttl-tests/keystone-6ed6-account-create-update-c7zxj" Jan 20 17:45:38 crc kubenswrapper[4558]: E0120 17:45:38.330202 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/openstack-scripts: object "openstack-kuttl-tests"/"openstack-scripts" not registered Jan 20 17:45:38 crc kubenswrapper[4558]: E0120 17:45:38.331347 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/8666f449-91a3-489d-bbc5-e7811619ea74-operator-scripts podName:8666f449-91a3-489d-bbc5-e7811619ea74 nodeName:}" failed. No retries permitted until 2026-01-20 17:45:40.331301083 +0000 UTC m=+3834.091639050 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/8666f449-91a3-489d-bbc5-e7811619ea74-operator-scripts") pod "keystone-6ed6-account-create-update-c7zxj" (UID: "8666f449-91a3-489d-bbc5-e7811619ea74") : object "openstack-kuttl-tests"/"openstack-scripts" not registered Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.331562 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:38 crc kubenswrapper[4558]: E0120 17:45:38.335723 4558 projected.go:194] Error preparing data for projected volume kube-api-access-2r6wv for pod openstack-kuttl-tests/keystone-6ed6-account-create-update-c7zxj: failed to fetch token: serviceaccounts "galera-openstack" not found Jan 20 17:45:38 crc kubenswrapper[4558]: E0120 17:45:38.335789 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/8666f449-91a3-489d-bbc5-e7811619ea74-kube-api-access-2r6wv podName:8666f449-91a3-489d-bbc5-e7811619ea74 nodeName:}" failed. No retries permitted until 2026-01-20 17:45:40.335774725 +0000 UTC m=+3834.096112692 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-2r6wv" (UniqueName: "kubernetes.io/projected/8666f449-91a3-489d-bbc5-e7811619ea74-kube-api-access-2r6wv") pod "keystone-6ed6-account-create-update-c7zxj" (UID: "8666f449-91a3-489d-bbc5-e7811619ea74") : failed to fetch token: serviceaccounts "galera-openstack" not found Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.415803 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.426849 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.430419 4558 scope.go:117] "RemoveContainer" containerID="43f53e7f4f862cf970fa692178ae00cd40fe6c655fd001e46383dd72770d5cb5" Jan 20 17:45:38 crc kubenswrapper[4558]: E0120 17:45:38.430827 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"43f53e7f4f862cf970fa692178ae00cd40fe6c655fd001e46383dd72770d5cb5\": container with ID starting with 43f53e7f4f862cf970fa692178ae00cd40fe6c655fd001e46383dd72770d5cb5 not found: ID does not exist" containerID="43f53e7f4f862cf970fa692178ae00cd40fe6c655fd001e46383dd72770d5cb5" Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.430856 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"43f53e7f4f862cf970fa692178ae00cd40fe6c655fd001e46383dd72770d5cb5"} err="failed to get container status \"43f53e7f4f862cf970fa692178ae00cd40fe6c655fd001e46383dd72770d5cb5\": rpc error: code = NotFound desc = could not find container \"43f53e7f4f862cf970fa692178ae00cd40fe6c655fd001e46383dd72770d5cb5\": container with ID starting with 43f53e7f4f862cf970fa692178ae00cd40fe6c655fd001e46383dd72770d5cb5 not found: ID does not exist" Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.430881 4558 scope.go:117] "RemoveContainer" containerID="93d7994154007967abeffaef97cbdbfea9fe0c9a249a8ebbcb1a6bb563d31477" Jan 20 17:45:38 crc kubenswrapper[4558]: E0120 17:45:38.431077 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"93d7994154007967abeffaef97cbdbfea9fe0c9a249a8ebbcb1a6bb563d31477\": container with ID starting with 93d7994154007967abeffaef97cbdbfea9fe0c9a249a8ebbcb1a6bb563d31477 not found: ID does not exist" containerID="93d7994154007967abeffaef97cbdbfea9fe0c9a249a8ebbcb1a6bb563d31477" Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.431095 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"93d7994154007967abeffaef97cbdbfea9fe0c9a249a8ebbcb1a6bb563d31477"} err="failed to get container status \"93d7994154007967abeffaef97cbdbfea9fe0c9a249a8ebbcb1a6bb563d31477\": rpc error: code = NotFound desc = could not find container \"93d7994154007967abeffaef97cbdbfea9fe0c9a249a8ebbcb1a6bb563d31477\": container with ID starting with 93d7994154007967abeffaef97cbdbfea9fe0c9a249a8ebbcb1a6bb563d31477 not found: ID does not exist" Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.431113 4558 scope.go:117] "RemoveContainer" containerID="15c901b90eae1d8b686fb50b0222bcb667a3a9f6b6a35faa944d0560ae58cf3a" Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.437954 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6rmww\" (UniqueName: \"kubernetes.io/projected/70182fd8-a242-40f5-b20d-8ff4dd33e9b1-kube-api-access-6rmww\") pod \"70182fd8-a242-40f5-b20d-8ff4dd33e9b1\" (UID: \"70182fd8-a242-40f5-b20d-8ff4dd33e9b1\") " Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.438002 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/70182fd8-a242-40f5-b20d-8ff4dd33e9b1-config-data\") pod \"70182fd8-a242-40f5-b20d-8ff4dd33e9b1\" (UID: \"70182fd8-a242-40f5-b20d-8ff4dd33e9b1\") " Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.439106 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/70182fd8-a242-40f5-b20d-8ff4dd33e9b1-combined-ca-bundle\") pod \"70182fd8-a242-40f5-b20d-8ff4dd33e9b1\" (UID: \"70182fd8-a242-40f5-b20d-8ff4dd33e9b1\") " Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.439245 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/70182fd8-a242-40f5-b20d-8ff4dd33e9b1-public-tls-certs\") pod \"70182fd8-a242-40f5-b20d-8ff4dd33e9b1\" (UID: \"70182fd8-a242-40f5-b20d-8ff4dd33e9b1\") " Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.439339 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/70182fd8-a242-40f5-b20d-8ff4dd33e9b1-internal-tls-certs\") pod \"70182fd8-a242-40f5-b20d-8ff4dd33e9b1\" (UID: \"70182fd8-a242-40f5-b20d-8ff4dd33e9b1\") " Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.439389 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/70182fd8-a242-40f5-b20d-8ff4dd33e9b1-logs\") pod \"70182fd8-a242-40f5-b20d-8ff4dd33e9b1\" (UID: \"70182fd8-a242-40f5-b20d-8ff4dd33e9b1\") " Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.441941 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/70182fd8-a242-40f5-b20d-8ff4dd33e9b1-logs" (OuterVolumeSpecName: "logs") pod "70182fd8-a242-40f5-b20d-8ff4dd33e9b1" (UID: "70182fd8-a242-40f5-b20d-8ff4dd33e9b1"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.449517 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/70182fd8-a242-40f5-b20d-8ff4dd33e9b1-kube-api-access-6rmww" (OuterVolumeSpecName: "kube-api-access-6rmww") pod "70182fd8-a242-40f5-b20d-8ff4dd33e9b1" (UID: "70182fd8-a242-40f5-b20d-8ff4dd33e9b1"). InnerVolumeSpecName "kube-api-access-6rmww". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.477669 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/memcached-0"] Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.489752 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/70182fd8-a242-40f5-b20d-8ff4dd33e9b1-config-data" (OuterVolumeSpecName: "config-data") pod "70182fd8-a242-40f5-b20d-8ff4dd33e9b1" (UID: "70182fd8-a242-40f5-b20d-8ff4dd33e9b1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.511377 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/70182fd8-a242-40f5-b20d-8ff4dd33e9b1-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "70182fd8-a242-40f5-b20d-8ff4dd33e9b1" (UID: "70182fd8-a242-40f5-b20d-8ff4dd33e9b1"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.519268 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/memcached-0"] Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.525299 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/70182fd8-a242-40f5-b20d-8ff4dd33e9b1-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "70182fd8-a242-40f5-b20d-8ff4dd33e9b1" (UID: "70182fd8-a242-40f5-b20d-8ff4dd33e9b1"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.544597 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ff0d0703-8173-4ac0-afc0-e673feaef286-config-data\") pod \"ff0d0703-8173-4ac0-afc0-e673feaef286\" (UID: \"ff0d0703-8173-4ac0-afc0-e673feaef286\") " Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.544706 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-scd9s\" (UniqueName: \"kubernetes.io/projected/ff0d0703-8173-4ac0-afc0-e673feaef286-kube-api-access-scd9s\") pod \"ff0d0703-8173-4ac0-afc0-e673feaef286\" (UID: \"ff0d0703-8173-4ac0-afc0-e673feaef286\") " Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.545049 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ff0d0703-8173-4ac0-afc0-e673feaef286-logs\") pod \"ff0d0703-8173-4ac0-afc0-e673feaef286\" (UID: \"ff0d0703-8173-4ac0-afc0-e673feaef286\") " Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.545272 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/ff0d0703-8173-4ac0-afc0-e673feaef286-nova-metadata-tls-certs\") pod \"ff0d0703-8173-4ac0-afc0-e673feaef286\" (UID: \"ff0d0703-8173-4ac0-afc0-e673feaef286\") " Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.545307 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ff0d0703-8173-4ac0-afc0-e673feaef286-combined-ca-bundle\") pod \"ff0d0703-8173-4ac0-afc0-e673feaef286\" (UID: \"ff0d0703-8173-4ac0-afc0-e673feaef286\") " Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.546203 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/70182fd8-a242-40f5-b20d-8ff4dd33e9b1-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.546218 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/70182fd8-a242-40f5-b20d-8ff4dd33e9b1-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.546230 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6rmww\" (UniqueName: \"kubernetes.io/projected/70182fd8-a242-40f5-b20d-8ff4dd33e9b1-kube-api-access-6rmww\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.546241 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/70182fd8-a242-40f5-b20d-8ff4dd33e9b1-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.546266 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/70182fd8-a242-40f5-b20d-8ff4dd33e9b1-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.546738 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ff0d0703-8173-4ac0-afc0-e673feaef286-logs" (OuterVolumeSpecName: "logs") pod "ff0d0703-8173-4ac0-afc0-e673feaef286" (UID: "ff0d0703-8173-4ac0-afc0-e673feaef286"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.550486 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ff0d0703-8173-4ac0-afc0-e673feaef286-kube-api-access-scd9s" (OuterVolumeSpecName: "kube-api-access-scd9s") pod "ff0d0703-8173-4ac0-afc0-e673feaef286" (UID: "ff0d0703-8173-4ac0-afc0-e673feaef286"). InnerVolumeSpecName "kube-api-access-scd9s". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.551132 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/70182fd8-a242-40f5-b20d-8ff4dd33e9b1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "70182fd8-a242-40f5-b20d-8ff4dd33e9b1" (UID: "70182fd8-a242-40f5-b20d-8ff4dd33e9b1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.571372 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ff0d0703-8173-4ac0-afc0-e673feaef286-config-data" (OuterVolumeSpecName: "config-data") pod "ff0d0703-8173-4ac0-afc0-e673feaef286" (UID: "ff0d0703-8173-4ac0-afc0-e673feaef286"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.573406 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ff0d0703-8173-4ac0-afc0-e673feaef286-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ff0d0703-8173-4ac0-afc0-e673feaef286" (UID: "ff0d0703-8173-4ac0-afc0-e673feaef286"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.584314 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="131735a5-8043-40d7-a15d-f0024356e584" path="/var/lib/kubelet/pods/131735a5-8043-40d7-a15d-f0024356e584/volumes" Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.584960 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="16a1e55d-37ca-4488-adc3-2602e7416d55" path="/var/lib/kubelet/pods/16a1e55d-37ca-4488-adc3-2602e7416d55/volumes" Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.585520 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44f856be-bb11-468b-a157-1f2e2851d04d" path="/var/lib/kubelet/pods/44f856be-bb11-468b-a157-1f2e2851d04d/volumes" Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.586037 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4b507c13-bb43-43c8-b5c6-7194f9355516" path="/var/lib/kubelet/pods/4b507c13-bb43-43c8-b5c6-7194f9355516/volumes" Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.587374 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="50804fa8-d09d-49f8-a143-d7ec24ec542a" path="/var/lib/kubelet/pods/50804fa8-d09d-49f8-a143-d7ec24ec542a/volumes" Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.587967 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="89fd10f3-4598-47c6-ad0e-59aa59d41894" path="/var/lib/kubelet/pods/89fd10f3-4598-47c6-ad0e-59aa59d41894/volumes" Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.588434 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a8cb168e-9c96-449b-a8d6-bcd8511f1a53" path="/var/lib/kubelet/pods/a8cb168e-9c96-449b-a8d6-bcd8511f1a53/volumes" Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.588829 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b752af55-9c06-469f-9353-e1042300de3c" path="/var/lib/kubelet/pods/b752af55-9c06-469f-9353-e1042300de3c/volumes" Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.589973 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d7ca0a7f-fd93-48de-b71b-b23d1aef2af7" path="/var/lib/kubelet/pods/d7ca0a7f-fd93-48de-b71b-b23d1aef2af7/volumes" Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.590571 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fd5cfc55-8cd9-4a76-87b8-4050b8f1030f" path="/var/lib/kubelet/pods/fd5cfc55-8cd9-4a76-87b8-4050b8f1030f/volumes" Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.597882 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ff0d0703-8173-4ac0-afc0-e673feaef286-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "ff0d0703-8173-4ac0-afc0-e673feaef286" (UID: "ff0d0703-8173-4ac0-afc0-e673feaef286"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.636056 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-fhpsv"] Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.636088 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-fhpsv"] Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.636103 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/openstack-galera-0"] Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.636115 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/openstack-galera-0"] Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.636130 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-6ed6-account-create-update-c7zxj"] Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.636141 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/keystone-6ed6-account-create-update-c7zxj"] Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.636151 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.636180 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.652692 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-scd9s\" (UniqueName: \"kubernetes.io/projected/ff0d0703-8173-4ac0-afc0-e673feaef286-kube-api-access-scd9s\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.652825 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ff0d0703-8173-4ac0-afc0-e673feaef286-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.652842 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/70182fd8-a242-40f5-b20d-8ff4dd33e9b1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.652851 4558 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/ff0d0703-8173-4ac0-afc0-e673feaef286-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.652859 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ff0d0703-8173-4ac0-afc0-e673feaef286-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.652868 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ff0d0703-8173-4ac0-afc0-e673feaef286-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.656150 4558 scope.go:117] "RemoveContainer" containerID="661b71951f10fd7eb454cf2a5617695ac503cf4d432c4017c7980d3e23a068f2" Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.658557 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.665390 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.677760 4558 scope.go:117] "RemoveContainer" containerID="15c901b90eae1d8b686fb50b0222bcb667a3a9f6b6a35faa944d0560ae58cf3a" Jan 20 17:45:38 crc kubenswrapper[4558]: E0120 17:45:38.678194 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"15c901b90eae1d8b686fb50b0222bcb667a3a9f6b6a35faa944d0560ae58cf3a\": container with ID starting with 15c901b90eae1d8b686fb50b0222bcb667a3a9f6b6a35faa944d0560ae58cf3a not found: ID does not exist" containerID="15c901b90eae1d8b686fb50b0222bcb667a3a9f6b6a35faa944d0560ae58cf3a" Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.678232 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"15c901b90eae1d8b686fb50b0222bcb667a3a9f6b6a35faa944d0560ae58cf3a"} err="failed to get container status \"15c901b90eae1d8b686fb50b0222bcb667a3a9f6b6a35faa944d0560ae58cf3a\": rpc error: code = NotFound desc = could not find container \"15c901b90eae1d8b686fb50b0222bcb667a3a9f6b6a35faa944d0560ae58cf3a\": container with ID starting with 15c901b90eae1d8b686fb50b0222bcb667a3a9f6b6a35faa944d0560ae58cf3a not found: ID does not exist" Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.678267 4558 scope.go:117] "RemoveContainer" containerID="661b71951f10fd7eb454cf2a5617695ac503cf4d432c4017c7980d3e23a068f2" Jan 20 17:45:38 crc kubenswrapper[4558]: E0120 17:45:38.678554 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"661b71951f10fd7eb454cf2a5617695ac503cf4d432c4017c7980d3e23a068f2\": container with ID starting with 661b71951f10fd7eb454cf2a5617695ac503cf4d432c4017c7980d3e23a068f2 not found: ID does not exist" containerID="661b71951f10fd7eb454cf2a5617695ac503cf4d432c4017c7980d3e23a068f2" Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.678610 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"661b71951f10fd7eb454cf2a5617695ac503cf4d432c4017c7980d3e23a068f2"} err="failed to get container status \"661b71951f10fd7eb454cf2a5617695ac503cf4d432c4017c7980d3e23a068f2\": rpc error: code = NotFound desc = could not find container \"661b71951f10fd7eb454cf2a5617695ac503cf4d432c4017c7980d3e23a068f2\": container with ID starting with 661b71951f10fd7eb454cf2a5617695ac503cf4d432c4017c7980d3e23a068f2 not found: ID does not exist" Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.678638 4558 scope.go:117] "RemoveContainer" containerID="63bcb2a3acf1f845b98608c468529b05f04d46a72a3434b91049daf4e38d78b3" Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.697967 4558 scope.go:117] "RemoveContainer" containerID="26fcabccd365630b0df7a2013b030db7a8a52ed18fb890da73b7a737f80da0ba" Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.714404 4558 scope.go:117] "RemoveContainer" containerID="65d88d20f8ddd45d36845fd06af8255069c3338120c182ed2e8bfd0b56484bec" Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.741359 4558 scope.go:117] "RemoveContainer" containerID="26fcabccd365630b0df7a2013b030db7a8a52ed18fb890da73b7a737f80da0ba" Jan 20 17:45:38 crc kubenswrapper[4558]: E0120 17:45:38.746290 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"26fcabccd365630b0df7a2013b030db7a8a52ed18fb890da73b7a737f80da0ba\": container with ID starting with 26fcabccd365630b0df7a2013b030db7a8a52ed18fb890da73b7a737f80da0ba not found: ID does not exist" containerID="26fcabccd365630b0df7a2013b030db7a8a52ed18fb890da73b7a737f80da0ba" Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.746335 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"26fcabccd365630b0df7a2013b030db7a8a52ed18fb890da73b7a737f80da0ba"} err="failed to get container status \"26fcabccd365630b0df7a2013b030db7a8a52ed18fb890da73b7a737f80da0ba\": rpc error: code = NotFound desc = could not find container \"26fcabccd365630b0df7a2013b030db7a8a52ed18fb890da73b7a737f80da0ba\": container with ID starting with 26fcabccd365630b0df7a2013b030db7a8a52ed18fb890da73b7a737f80da0ba not found: ID does not exist" Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.746377 4558 scope.go:117] "RemoveContainer" containerID="65d88d20f8ddd45d36845fd06af8255069c3338120c182ed2e8bfd0b56484bec" Jan 20 17:45:38 crc kubenswrapper[4558]: E0120 17:45:38.746885 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"65d88d20f8ddd45d36845fd06af8255069c3338120c182ed2e8bfd0b56484bec\": container with ID starting with 65d88d20f8ddd45d36845fd06af8255069c3338120c182ed2e8bfd0b56484bec not found: ID does not exist" containerID="65d88d20f8ddd45d36845fd06af8255069c3338120c182ed2e8bfd0b56484bec" Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.746931 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"65d88d20f8ddd45d36845fd06af8255069c3338120c182ed2e8bfd0b56484bec"} err="failed to get container status \"65d88d20f8ddd45d36845fd06af8255069c3338120c182ed2e8bfd0b56484bec\": rpc error: code = NotFound desc = could not find container \"65d88d20f8ddd45d36845fd06af8255069c3338120c182ed2e8bfd0b56484bec\": container with ID starting with 65d88d20f8ddd45d36845fd06af8255069c3338120c182ed2e8bfd0b56484bec not found: ID does not exist" Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.746960 4558 scope.go:117] "RemoveContainer" containerID="f0fb0d6dfc95147b44b7be05faeba0d259d63b173ea99ddb9b134ce40f87cb33" Jan 20 17:45:38 crc kubenswrapper[4558]: E0120 17:45:38.753615 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="bfedd515a4cd08318345cf62dd830e7f182ab82a54ada6e840a63a36d085cdc9" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.755290 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8666f449-91a3-489d-bbc5-e7811619ea74-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.755313 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2r6wv\" (UniqueName: \"kubernetes.io/projected/8666f449-91a3-489d-bbc5-e7811619ea74-kube-api-access-2r6wv\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:38 crc kubenswrapper[4558]: E0120 17:45:38.755492 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="bfedd515a4cd08318345cf62dd830e7f182ab82a54ada6e840a63a36d085cdc9" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 20 17:45:38 crc kubenswrapper[4558]: E0120 17:45:38.758591 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="bfedd515a4cd08318345cf62dd830e7f182ab82a54ada6e840a63a36d085cdc9" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 20 17:45:38 crc kubenswrapper[4558]: E0120 17:45:38.758643 4558 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack-kuttl-tests/nova-scheduler-0" podUID="ddd57d3b-c9de-46e3-897e-1a50ae49630e" containerName="nova-scheduler-scheduler" Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.773367 4558 scope.go:117] "RemoveContainer" containerID="67cda9ffae9461498a3181421c8f34810fd6c215622487c92fe9030c49337aa1" Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.804142 4558 scope.go:117] "RemoveContainer" containerID="f0fb0d6dfc95147b44b7be05faeba0d259d63b173ea99ddb9b134ce40f87cb33" Jan 20 17:45:38 crc kubenswrapper[4558]: E0120 17:45:38.804447 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f0fb0d6dfc95147b44b7be05faeba0d259d63b173ea99ddb9b134ce40f87cb33\": container with ID starting with f0fb0d6dfc95147b44b7be05faeba0d259d63b173ea99ddb9b134ce40f87cb33 not found: ID does not exist" containerID="f0fb0d6dfc95147b44b7be05faeba0d259d63b173ea99ddb9b134ce40f87cb33" Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.804470 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f0fb0d6dfc95147b44b7be05faeba0d259d63b173ea99ddb9b134ce40f87cb33"} err="failed to get container status \"f0fb0d6dfc95147b44b7be05faeba0d259d63b173ea99ddb9b134ce40f87cb33\": rpc error: code = NotFound desc = could not find container \"f0fb0d6dfc95147b44b7be05faeba0d259d63b173ea99ddb9b134ce40f87cb33\": container with ID starting with f0fb0d6dfc95147b44b7be05faeba0d259d63b173ea99ddb9b134ce40f87cb33 not found: ID does not exist" Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.804493 4558 scope.go:117] "RemoveContainer" containerID="67cda9ffae9461498a3181421c8f34810fd6c215622487c92fe9030c49337aa1" Jan 20 17:45:38 crc kubenswrapper[4558]: E0120 17:45:38.804723 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"67cda9ffae9461498a3181421c8f34810fd6c215622487c92fe9030c49337aa1\": container with ID starting with 67cda9ffae9461498a3181421c8f34810fd6c215622487c92fe9030c49337aa1 not found: ID does not exist" containerID="67cda9ffae9461498a3181421c8f34810fd6c215622487c92fe9030c49337aa1" Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.804741 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"67cda9ffae9461498a3181421c8f34810fd6c215622487c92fe9030c49337aa1"} err="failed to get container status \"67cda9ffae9461498a3181421c8f34810fd6c215622487c92fe9030c49337aa1\": rpc error: code = NotFound desc = could not find container \"67cda9ffae9461498a3181421c8f34810fd6c215622487c92fe9030c49337aa1\": container with ID starting with 67cda9ffae9461498a3181421c8f34810fd6c215622487c92fe9030c49337aa1 not found: ID does not exist" Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.804755 4558 scope.go:117] "RemoveContainer" containerID="72f0e46d318dc78a9e49d3aa7f3c509c43a243da567f0f20b70c09a82e812667" Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.826227 4558 scope.go:117] "RemoveContainer" containerID="dea187ec3d51aa9fd2fd3318b66ce5429ee704c1f761dae601b4234a786d24c7" Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.841311 4558 scope.go:117] "RemoveContainer" containerID="72f0e46d318dc78a9e49d3aa7f3c509c43a243da567f0f20b70c09a82e812667" Jan 20 17:45:38 crc kubenswrapper[4558]: E0120 17:45:38.841668 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"72f0e46d318dc78a9e49d3aa7f3c509c43a243da567f0f20b70c09a82e812667\": container with ID starting with 72f0e46d318dc78a9e49d3aa7f3c509c43a243da567f0f20b70c09a82e812667 not found: ID does not exist" containerID="72f0e46d318dc78a9e49d3aa7f3c509c43a243da567f0f20b70c09a82e812667" Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.841691 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"72f0e46d318dc78a9e49d3aa7f3c509c43a243da567f0f20b70c09a82e812667"} err="failed to get container status \"72f0e46d318dc78a9e49d3aa7f3c509c43a243da567f0f20b70c09a82e812667\": rpc error: code = NotFound desc = could not find container \"72f0e46d318dc78a9e49d3aa7f3c509c43a243da567f0f20b70c09a82e812667\": container with ID starting with 72f0e46d318dc78a9e49d3aa7f3c509c43a243da567f0f20b70c09a82e812667 not found: ID does not exist" Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.841712 4558 scope.go:117] "RemoveContainer" containerID="dea187ec3d51aa9fd2fd3318b66ce5429ee704c1f761dae601b4234a786d24c7" Jan 20 17:45:38 crc kubenswrapper[4558]: E0120 17:45:38.842011 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dea187ec3d51aa9fd2fd3318b66ce5429ee704c1f761dae601b4234a786d24c7\": container with ID starting with dea187ec3d51aa9fd2fd3318b66ce5429ee704c1f761dae601b4234a786d24c7 not found: ID does not exist" containerID="dea187ec3d51aa9fd2fd3318b66ce5429ee704c1f761dae601b4234a786d24c7" Jan 20 17:45:38 crc kubenswrapper[4558]: I0120 17:45:38.842029 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dea187ec3d51aa9fd2fd3318b66ce5429ee704c1f761dae601b4234a786d24c7"} err="failed to get container status \"dea187ec3d51aa9fd2fd3318b66ce5429ee704c1f761dae601b4234a786d24c7\": rpc error: code = NotFound desc = could not find container \"dea187ec3d51aa9fd2fd3318b66ce5429ee704c1f761dae601b4234a786d24c7\": container with ID starting with dea187ec3d51aa9fd2fd3318b66ce5429ee704c1f761dae601b4234a786d24c7 not found: ID does not exist" Jan 20 17:45:39 crc kubenswrapper[4558]: I0120 17:45:39.241236 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-worker-7975b7ff57-c5czg" Jan 20 17:45:39 crc kubenswrapper[4558]: I0120 17:45:39.247565 4558 generic.go:334] "Generic (PLEG): container finished" podID="d619cd37-a474-4965-b382-749ed6d55d6d" containerID="dcd025269e0c4cae451a75917b3ffb9392b169d6ba1b4f275051e1022e3b09b6" exitCode=0 Jan 20 17:45:39 crc kubenswrapper[4558]: I0120 17:45:39.247655 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-74bb4f5f4-5ppmg" event={"ID":"d619cd37-a474-4965-b382-749ed6d55d6d","Type":"ContainerDied","Data":"dcd025269e0c4cae451a75917b3ffb9392b169d6ba1b4f275051e1022e3b09b6"} Jan 20 17:45:39 crc kubenswrapper[4558]: I0120 17:45:39.249203 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"ff0d0703-8173-4ac0-afc0-e673feaef286","Type":"ContainerDied","Data":"fe15b8466909629ec54bddf0b729e623676456b8d764dc2e8e08622acf62dd38"} Jan 20 17:45:39 crc kubenswrapper[4558]: I0120 17:45:39.249251 4558 scope.go:117] "RemoveContainer" containerID="29a14fecb73a5e8e41c942ef00e6566817b5c2f5ec120abe410dce21f8142ccd" Jan 20 17:45:39 crc kubenswrapper[4558]: I0120 17:45:39.249313 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:45:39 crc kubenswrapper[4558]: I0120 17:45:39.251687 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"70182fd8-a242-40f5-b20d-8ff4dd33e9b1","Type":"ContainerDied","Data":"ffbd520c8e0621c07e5bc545a014fc083db0874a597b335950d739345e51a889"} Jan 20 17:45:39 crc kubenswrapper[4558]: I0120 17:45:39.251781 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:45:39 crc kubenswrapper[4558]: I0120 17:45:39.263580 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1a1aeb0f-ba20-4c24-a8d3-f3f2765328c2-combined-ca-bundle\") pod \"1a1aeb0f-ba20-4c24-a8d3-f3f2765328c2\" (UID: \"1a1aeb0f-ba20-4c24-a8d3-f3f2765328c2\") " Jan 20 17:45:39 crc kubenswrapper[4558]: I0120 17:45:39.263678 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1a1aeb0f-ba20-4c24-a8d3-f3f2765328c2-config-data-custom\") pod \"1a1aeb0f-ba20-4c24-a8d3-f3f2765328c2\" (UID: \"1a1aeb0f-ba20-4c24-a8d3-f3f2765328c2\") " Jan 20 17:45:39 crc kubenswrapper[4558]: I0120 17:45:39.263759 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1a1aeb0f-ba20-4c24-a8d3-f3f2765328c2-config-data\") pod \"1a1aeb0f-ba20-4c24-a8d3-f3f2765328c2\" (UID: \"1a1aeb0f-ba20-4c24-a8d3-f3f2765328c2\") " Jan 20 17:45:39 crc kubenswrapper[4558]: I0120 17:45:39.263824 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-prc26\" (UniqueName: \"kubernetes.io/projected/1a1aeb0f-ba20-4c24-a8d3-f3f2765328c2-kube-api-access-prc26\") pod \"1a1aeb0f-ba20-4c24-a8d3-f3f2765328c2\" (UID: \"1a1aeb0f-ba20-4c24-a8d3-f3f2765328c2\") " Jan 20 17:45:39 crc kubenswrapper[4558]: I0120 17:45:39.263896 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1a1aeb0f-ba20-4c24-a8d3-f3f2765328c2-logs\") pod \"1a1aeb0f-ba20-4c24-a8d3-f3f2765328c2\" (UID: \"1a1aeb0f-ba20-4c24-a8d3-f3f2765328c2\") " Jan 20 17:45:39 crc kubenswrapper[4558]: I0120 17:45:39.265025 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1a1aeb0f-ba20-4c24-a8d3-f3f2765328c2-logs" (OuterVolumeSpecName: "logs") pod "1a1aeb0f-ba20-4c24-a8d3-f3f2765328c2" (UID: "1a1aeb0f-ba20-4c24-a8d3-f3f2765328c2"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:45:39 crc kubenswrapper[4558]: I0120 17:45:39.265127 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1a1aeb0f-ba20-4c24-a8d3-f3f2765328c2-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:39 crc kubenswrapper[4558]: I0120 17:45:39.268816 4558 generic.go:334] "Generic (PLEG): container finished" podID="1a1aeb0f-ba20-4c24-a8d3-f3f2765328c2" containerID="ec57a9b625bd2a7169cdebad1646b567fad8da5b766be344abba4af1c17ab765" exitCode=0 Jan 20 17:45:39 crc kubenswrapper[4558]: I0120 17:45:39.268887 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-worker-7975b7ff57-c5czg" event={"ID":"1a1aeb0f-ba20-4c24-a8d3-f3f2765328c2","Type":"ContainerDied","Data":"ec57a9b625bd2a7169cdebad1646b567fad8da5b766be344abba4af1c17ab765"} Jan 20 17:45:39 crc kubenswrapper[4558]: I0120 17:45:39.268906 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-worker-7975b7ff57-c5czg" event={"ID":"1a1aeb0f-ba20-4c24-a8d3-f3f2765328c2","Type":"ContainerDied","Data":"fda6ea175768811455e1d8110992e145c45cdc20ae2763c7a7fc819b10fdc9aa"} Jan 20 17:45:39 crc kubenswrapper[4558]: I0120 17:45:39.268971 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-worker-7975b7ff57-c5czg" Jan 20 17:45:39 crc kubenswrapper[4558]: I0120 17:45:39.274488 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1a1aeb0f-ba20-4c24-a8d3-f3f2765328c2-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "1a1aeb0f-ba20-4c24-a8d3-f3f2765328c2" (UID: "1a1aeb0f-ba20-4c24-a8d3-f3f2765328c2"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:45:39 crc kubenswrapper[4558]: I0120 17:45:39.278084 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1a1aeb0f-ba20-4c24-a8d3-f3f2765328c2-kube-api-access-prc26" (OuterVolumeSpecName: "kube-api-access-prc26") pod "1a1aeb0f-ba20-4c24-a8d3-f3f2765328c2" (UID: "1a1aeb0f-ba20-4c24-a8d3-f3f2765328c2"). InnerVolumeSpecName "kube-api-access-prc26". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:45:39 crc kubenswrapper[4558]: I0120 17:45:39.278111 4558 scope.go:117] "RemoveContainer" containerID="eebdfbd12d120bd4d4adeb0cbba616d24ce5f4438b3c54b84c67c895df587e6d" Jan 20 17:45:39 crc kubenswrapper[4558]: I0120 17:45:39.284193 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:45:39 crc kubenswrapper[4558]: I0120 17:45:39.286552 4558 generic.go:334] "Generic (PLEG): container finished" podID="7faabe16-9de5-49dc-bab6-44e173f4403c" containerID="7dfa9f0fec16a0a5a1903b314539ce49eb8943fbf0152c319ba83f4512000ab4" exitCode=0 Jan 20 17:45:39 crc kubenswrapper[4558]: I0120 17:45:39.286620 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"7faabe16-9de5-49dc-bab6-44e173f4403c","Type":"ContainerDied","Data":"7dfa9f0fec16a0a5a1903b314539ce49eb8943fbf0152c319ba83f4512000ab4"} Jan 20 17:45:39 crc kubenswrapper[4558]: I0120 17:45:39.301148 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:45:39 crc kubenswrapper[4558]: I0120 17:45:39.307901 4558 scope.go:117] "RemoveContainer" containerID="b3f67b171f63171671e95039a6c39cf3571069f1e72ec88de6862170e74914f6" Jan 20 17:45:39 crc kubenswrapper[4558]: I0120 17:45:39.308726 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:45:39 crc kubenswrapper[4558]: I0120 17:45:39.315195 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:45:39 crc kubenswrapper[4558]: I0120 17:45:39.321594 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1a1aeb0f-ba20-4c24-a8d3-f3f2765328c2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1a1aeb0f-ba20-4c24-a8d3-f3f2765328c2" (UID: "1a1aeb0f-ba20-4c24-a8d3-f3f2765328c2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:45:39 crc kubenswrapper[4558]: I0120 17:45:39.338069 4558 scope.go:117] "RemoveContainer" containerID="69302d70e78ca0d36f4a72bfbbc9246b0e5fece0f0fdbf172a8875985eda6a7c" Jan 20 17:45:39 crc kubenswrapper[4558]: I0120 17:45:39.347474 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1a1aeb0f-ba20-4c24-a8d3-f3f2765328c2-config-data" (OuterVolumeSpecName: "config-data") pod "1a1aeb0f-ba20-4c24-a8d3-f3f2765328c2" (UID: "1a1aeb0f-ba20-4c24-a8d3-f3f2765328c2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:45:39 crc kubenswrapper[4558]: I0120 17:45:39.361738 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:45:39 crc kubenswrapper[4558]: I0120 17:45:39.366454 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-74bb4f5f4-5ppmg" Jan 20 17:45:39 crc kubenswrapper[4558]: I0120 17:45:39.366533 4558 scope.go:117] "RemoveContainer" containerID="ec57a9b625bd2a7169cdebad1646b567fad8da5b766be344abba4af1c17ab765" Jan 20 17:45:39 crc kubenswrapper[4558]: I0120 17:45:39.368340 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1a1aeb0f-ba20-4c24-a8d3-f3f2765328c2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:39 crc kubenswrapper[4558]: I0120 17:45:39.368362 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1a1aeb0f-ba20-4c24-a8d3-f3f2765328c2-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:39 crc kubenswrapper[4558]: I0120 17:45:39.368374 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1a1aeb0f-ba20-4c24-a8d3-f3f2765328c2-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:39 crc kubenswrapper[4558]: I0120 17:45:39.368387 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-prc26\" (UniqueName: \"kubernetes.io/projected/1a1aeb0f-ba20-4c24-a8d3-f3f2765328c2-kube-api-access-prc26\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:39 crc kubenswrapper[4558]: I0120 17:45:39.401728 4558 scope.go:117] "RemoveContainer" containerID="ddb2774f5a646ced66660bb7d2fc7db683928585fea6955ddbda9cb09d41a20e" Jan 20 17:45:39 crc kubenswrapper[4558]: I0120 17:45:39.424575 4558 scope.go:117] "RemoveContainer" containerID="ec57a9b625bd2a7169cdebad1646b567fad8da5b766be344abba4af1c17ab765" Jan 20 17:45:39 crc kubenswrapper[4558]: E0120 17:45:39.425196 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ec57a9b625bd2a7169cdebad1646b567fad8da5b766be344abba4af1c17ab765\": container with ID starting with ec57a9b625bd2a7169cdebad1646b567fad8da5b766be344abba4af1c17ab765 not found: ID does not exist" containerID="ec57a9b625bd2a7169cdebad1646b567fad8da5b766be344abba4af1c17ab765" Jan 20 17:45:39 crc kubenswrapper[4558]: I0120 17:45:39.425239 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ec57a9b625bd2a7169cdebad1646b567fad8da5b766be344abba4af1c17ab765"} err="failed to get container status \"ec57a9b625bd2a7169cdebad1646b567fad8da5b766be344abba4af1c17ab765\": rpc error: code = NotFound desc = could not find container \"ec57a9b625bd2a7169cdebad1646b567fad8da5b766be344abba4af1c17ab765\": container with ID starting with ec57a9b625bd2a7169cdebad1646b567fad8da5b766be344abba4af1c17ab765 not found: ID does not exist" Jan 20 17:45:39 crc kubenswrapper[4558]: I0120 17:45:39.425267 4558 scope.go:117] "RemoveContainer" containerID="ddb2774f5a646ced66660bb7d2fc7db683928585fea6955ddbda9cb09d41a20e" Jan 20 17:45:39 crc kubenswrapper[4558]: E0120 17:45:39.426077 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ddb2774f5a646ced66660bb7d2fc7db683928585fea6955ddbda9cb09d41a20e\": container with ID starting with ddb2774f5a646ced66660bb7d2fc7db683928585fea6955ddbda9cb09d41a20e not found: ID does not exist" containerID="ddb2774f5a646ced66660bb7d2fc7db683928585fea6955ddbda9cb09d41a20e" Jan 20 17:45:39 crc kubenswrapper[4558]: I0120 17:45:39.426128 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ddb2774f5a646ced66660bb7d2fc7db683928585fea6955ddbda9cb09d41a20e"} err="failed to get container status \"ddb2774f5a646ced66660bb7d2fc7db683928585fea6955ddbda9cb09d41a20e\": rpc error: code = NotFound desc = could not find container \"ddb2774f5a646ced66660bb7d2fc7db683928585fea6955ddbda9cb09d41a20e\": container with ID starting with ddb2774f5a646ced66660bb7d2fc7db683928585fea6955ddbda9cb09d41a20e not found: ID does not exist" Jan 20 17:45:39 crc kubenswrapper[4558]: I0120 17:45:39.426156 4558 scope.go:117] "RemoveContainer" containerID="88e09b52c88812df27321ee3632982258e7318dfda1d7dfa673a282b0d3aea31" Jan 20 17:45:39 crc kubenswrapper[4558]: I0120 17:45:39.468598 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d619cd37-a474-4965-b382-749ed6d55d6d-config-data-custom\") pod \"d619cd37-a474-4965-b382-749ed6d55d6d\" (UID: \"d619cd37-a474-4965-b382-749ed6d55d6d\") " Jan 20 17:45:39 crc kubenswrapper[4558]: I0120 17:45:39.468640 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d619cd37-a474-4965-b382-749ed6d55d6d-logs\") pod \"d619cd37-a474-4965-b382-749ed6d55d6d\" (UID: \"d619cd37-a474-4965-b382-749ed6d55d6d\") " Jan 20 17:45:39 crc kubenswrapper[4558]: I0120 17:45:39.468663 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7faabe16-9de5-49dc-bab6-44e173f4403c-combined-ca-bundle\") pod \"7faabe16-9de5-49dc-bab6-44e173f4403c\" (UID: \"7faabe16-9de5-49dc-bab6-44e173f4403c\") " Jan 20 17:45:39 crc kubenswrapper[4558]: I0120 17:45:39.468686 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7faabe16-9de5-49dc-bab6-44e173f4403c-config-data\") pod \"7faabe16-9de5-49dc-bab6-44e173f4403c\" (UID: \"7faabe16-9de5-49dc-bab6-44e173f4403c\") " Jan 20 17:45:39 crc kubenswrapper[4558]: I0120 17:45:39.469325 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d619cd37-a474-4965-b382-749ed6d55d6d-logs" (OuterVolumeSpecName: "logs") pod "d619cd37-a474-4965-b382-749ed6d55d6d" (UID: "d619cd37-a474-4965-b382-749ed6d55d6d"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:45:39 crc kubenswrapper[4558]: I0120 17:45:39.469492 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fmn5j\" (UniqueName: \"kubernetes.io/projected/7faabe16-9de5-49dc-bab6-44e173f4403c-kube-api-access-fmn5j\") pod \"7faabe16-9de5-49dc-bab6-44e173f4403c\" (UID: \"7faabe16-9de5-49dc-bab6-44e173f4403c\") " Jan 20 17:45:39 crc kubenswrapper[4558]: I0120 17:45:39.469566 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d619cd37-a474-4965-b382-749ed6d55d6d-public-tls-certs\") pod \"d619cd37-a474-4965-b382-749ed6d55d6d\" (UID: \"d619cd37-a474-4965-b382-749ed6d55d6d\") " Jan 20 17:45:39 crc kubenswrapper[4558]: I0120 17:45:39.469658 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7faabe16-9de5-49dc-bab6-44e173f4403c-scripts\") pod \"7faabe16-9de5-49dc-bab6-44e173f4403c\" (UID: \"7faabe16-9de5-49dc-bab6-44e173f4403c\") " Jan 20 17:45:39 crc kubenswrapper[4558]: I0120 17:45:39.469765 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d619cd37-a474-4965-b382-749ed6d55d6d-config-data\") pod \"d619cd37-a474-4965-b382-749ed6d55d6d\" (UID: \"d619cd37-a474-4965-b382-749ed6d55d6d\") " Jan 20 17:45:39 crc kubenswrapper[4558]: I0120 17:45:39.470191 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d619cd37-a474-4965-b382-749ed6d55d6d-internal-tls-certs\") pod \"d619cd37-a474-4965-b382-749ed6d55d6d\" (UID: \"d619cd37-a474-4965-b382-749ed6d55d6d\") " Jan 20 17:45:39 crc kubenswrapper[4558]: I0120 17:45:39.470242 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7faabe16-9de5-49dc-bab6-44e173f4403c-etc-machine-id\") pod \"7faabe16-9de5-49dc-bab6-44e173f4403c\" (UID: \"7faabe16-9de5-49dc-bab6-44e173f4403c\") " Jan 20 17:45:39 crc kubenswrapper[4558]: I0120 17:45:39.470281 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7faabe16-9de5-49dc-bab6-44e173f4403c-config-data-custom\") pod \"7faabe16-9de5-49dc-bab6-44e173f4403c\" (UID: \"7faabe16-9de5-49dc-bab6-44e173f4403c\") " Jan 20 17:45:39 crc kubenswrapper[4558]: I0120 17:45:39.470327 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4kqlf\" (UniqueName: \"kubernetes.io/projected/d619cd37-a474-4965-b382-749ed6d55d6d-kube-api-access-4kqlf\") pod \"d619cd37-a474-4965-b382-749ed6d55d6d\" (UID: \"d619cd37-a474-4965-b382-749ed6d55d6d\") " Jan 20 17:45:39 crc kubenswrapper[4558]: I0120 17:45:39.470445 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d619cd37-a474-4965-b382-749ed6d55d6d-combined-ca-bundle\") pod \"d619cd37-a474-4965-b382-749ed6d55d6d\" (UID: \"d619cd37-a474-4965-b382-749ed6d55d6d\") " Jan 20 17:45:39 crc kubenswrapper[4558]: I0120 17:45:39.472424 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d619cd37-a474-4965-b382-749ed6d55d6d-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:39 crc kubenswrapper[4558]: I0120 17:45:39.473356 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d619cd37-a474-4965-b382-749ed6d55d6d-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "d619cd37-a474-4965-b382-749ed6d55d6d" (UID: "d619cd37-a474-4965-b382-749ed6d55d6d"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:45:39 crc kubenswrapper[4558]: I0120 17:45:39.473623 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7faabe16-9de5-49dc-bab6-44e173f4403c-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "7faabe16-9de5-49dc-bab6-44e173f4403c" (UID: "7faabe16-9de5-49dc-bab6-44e173f4403c"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 17:45:39 crc kubenswrapper[4558]: I0120 17:45:39.476682 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7faabe16-9de5-49dc-bab6-44e173f4403c-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "7faabe16-9de5-49dc-bab6-44e173f4403c" (UID: "7faabe16-9de5-49dc-bab6-44e173f4403c"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:45:39 crc kubenswrapper[4558]: I0120 17:45:39.477537 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d619cd37-a474-4965-b382-749ed6d55d6d-kube-api-access-4kqlf" (OuterVolumeSpecName: "kube-api-access-4kqlf") pod "d619cd37-a474-4965-b382-749ed6d55d6d" (UID: "d619cd37-a474-4965-b382-749ed6d55d6d"). InnerVolumeSpecName "kube-api-access-4kqlf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:45:39 crc kubenswrapper[4558]: I0120 17:45:39.478648 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7faabe16-9de5-49dc-bab6-44e173f4403c-kube-api-access-fmn5j" (OuterVolumeSpecName: "kube-api-access-fmn5j") pod "7faabe16-9de5-49dc-bab6-44e173f4403c" (UID: "7faabe16-9de5-49dc-bab6-44e173f4403c"). InnerVolumeSpecName "kube-api-access-fmn5j". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:45:39 crc kubenswrapper[4558]: I0120 17:45:39.487118 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7faabe16-9de5-49dc-bab6-44e173f4403c-scripts" (OuterVolumeSpecName: "scripts") pod "7faabe16-9de5-49dc-bab6-44e173f4403c" (UID: "7faabe16-9de5-49dc-bab6-44e173f4403c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:45:39 crc kubenswrapper[4558]: I0120 17:45:39.499803 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d619cd37-a474-4965-b382-749ed6d55d6d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d619cd37-a474-4965-b382-749ed6d55d6d" (UID: "d619cd37-a474-4965-b382-749ed6d55d6d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:45:39 crc kubenswrapper[4558]: I0120 17:45:39.515603 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d619cd37-a474-4965-b382-749ed6d55d6d-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "d619cd37-a474-4965-b382-749ed6d55d6d" (UID: "d619cd37-a474-4965-b382-749ed6d55d6d"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:45:39 crc kubenswrapper[4558]: I0120 17:45:39.518300 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d619cd37-a474-4965-b382-749ed6d55d6d-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "d619cd37-a474-4965-b382-749ed6d55d6d" (UID: "d619cd37-a474-4965-b382-749ed6d55d6d"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:45:39 crc kubenswrapper[4558]: I0120 17:45:39.520014 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7faabe16-9de5-49dc-bab6-44e173f4403c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7faabe16-9de5-49dc-bab6-44e173f4403c" (UID: "7faabe16-9de5-49dc-bab6-44e173f4403c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:45:39 crc kubenswrapper[4558]: E0120 17:45:39.523040 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="f53b402a7de8ccb9267cb06efb993b2c9befd769af338febfa1cab472df02511" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:45:39 crc kubenswrapper[4558]: E0120 17:45:39.525552 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="f53b402a7de8ccb9267cb06efb993b2c9befd769af338febfa1cab472df02511" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:45:39 crc kubenswrapper[4558]: E0120 17:45:39.527323 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="f53b402a7de8ccb9267cb06efb993b2c9befd769af338febfa1cab472df02511" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:45:39 crc kubenswrapper[4558]: E0120 17:45:39.527412 4558 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack-kuttl-tests/nova-cell1-conductor-0" podUID="ea72c008-3a66-4577-8042-4b1e0ed1cca6" containerName="nova-cell1-conductor-conductor" Jan 20 17:45:39 crc kubenswrapper[4558]: I0120 17:45:39.530339 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d619cd37-a474-4965-b382-749ed6d55d6d-config-data" (OuterVolumeSpecName: "config-data") pod "d619cd37-a474-4965-b382-749ed6d55d6d" (UID: "d619cd37-a474-4965-b382-749ed6d55d6d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:45:39 crc kubenswrapper[4558]: I0120 17:45:39.548651 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7faabe16-9de5-49dc-bab6-44e173f4403c-config-data" (OuterVolumeSpecName: "config-data") pod "7faabe16-9de5-49dc-bab6-44e173f4403c" (UID: "7faabe16-9de5-49dc-bab6-44e173f4403c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:45:39 crc kubenswrapper[4558]: I0120 17:45:39.574767 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d619cd37-a474-4965-b382-749ed6d55d6d-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:39 crc kubenswrapper[4558]: I0120 17:45:39.574803 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d619cd37-a474-4965-b382-749ed6d55d6d-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:39 crc kubenswrapper[4558]: I0120 17:45:39.574819 4558 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7faabe16-9de5-49dc-bab6-44e173f4403c-etc-machine-id\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:39 crc kubenswrapper[4558]: I0120 17:45:39.574832 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7faabe16-9de5-49dc-bab6-44e173f4403c-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:39 crc kubenswrapper[4558]: I0120 17:45:39.574846 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4kqlf\" (UniqueName: \"kubernetes.io/projected/d619cd37-a474-4965-b382-749ed6d55d6d-kube-api-access-4kqlf\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:39 crc kubenswrapper[4558]: I0120 17:45:39.574855 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d619cd37-a474-4965-b382-749ed6d55d6d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:39 crc kubenswrapper[4558]: I0120 17:45:39.574865 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d619cd37-a474-4965-b382-749ed6d55d6d-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:39 crc kubenswrapper[4558]: I0120 17:45:39.574874 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7faabe16-9de5-49dc-bab6-44e173f4403c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:39 crc kubenswrapper[4558]: I0120 17:45:39.574885 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7faabe16-9de5-49dc-bab6-44e173f4403c-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:39 crc kubenswrapper[4558]: I0120 17:45:39.574897 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fmn5j\" (UniqueName: \"kubernetes.io/projected/7faabe16-9de5-49dc-bab6-44e173f4403c-kube-api-access-fmn5j\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:39 crc kubenswrapper[4558]: I0120 17:45:39.574907 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d619cd37-a474-4965-b382-749ed6d55d6d-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:39 crc kubenswrapper[4558]: I0120 17:45:39.574916 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7faabe16-9de5-49dc-bab6-44e173f4403c-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:39 crc kubenswrapper[4558]: I0120 17:45:39.610404 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-worker-7975b7ff57-c5czg"] Jan 20 17:45:39 crc kubenswrapper[4558]: I0120 17:45:39.619053 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/barbican-worker-7975b7ff57-c5czg"] Jan 20 17:45:39 crc kubenswrapper[4558]: E0120 17:45:39.893011 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Jan 20 17:45:39 crc kubenswrapper[4558]: E0120 17:45:39.893156 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/4765e529-9729-4d27-a252-c0c9a7b67beb-config-data podName:4765e529-9729-4d27-a252-c0c9a7b67beb nodeName:}" failed. No retries permitted until 2026-01-20 17:45:47.893129004 +0000 UTC m=+3841.653466971 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/4765e529-9729-4d27-a252-c0c9a7b67beb-config-data") pod "rabbitmq-server-0" (UID: "4765e529-9729-4d27-a252-c0c9a7b67beb") : configmap "rabbitmq-config-data" not found Jan 20 17:45:40 crc kubenswrapper[4558]: I0120 17:45:40.038884 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-f7d8994df-dzxmh" Jan 20 17:45:40 crc kubenswrapper[4558]: I0120 17:45:40.094935 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5cbfd26c-728d-420c-9d40-b6f7870cff60-config-data\") pod \"5cbfd26c-728d-420c-9d40-b6f7870cff60\" (UID: \"5cbfd26c-728d-420c-9d40-b6f7870cff60\") " Jan 20 17:45:40 crc kubenswrapper[4558]: I0120 17:45:40.095706 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/5cbfd26c-728d-420c-9d40-b6f7870cff60-credential-keys\") pod \"5cbfd26c-728d-420c-9d40-b6f7870cff60\" (UID: \"5cbfd26c-728d-420c-9d40-b6f7870cff60\") " Jan 20 17:45:40 crc kubenswrapper[4558]: I0120 17:45:40.095836 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5cbfd26c-728d-420c-9d40-b6f7870cff60-public-tls-certs\") pod \"5cbfd26c-728d-420c-9d40-b6f7870cff60\" (UID: \"5cbfd26c-728d-420c-9d40-b6f7870cff60\") " Jan 20 17:45:40 crc kubenswrapper[4558]: I0120 17:45:40.095929 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j4z9c\" (UniqueName: \"kubernetes.io/projected/5cbfd26c-728d-420c-9d40-b6f7870cff60-kube-api-access-j4z9c\") pod \"5cbfd26c-728d-420c-9d40-b6f7870cff60\" (UID: \"5cbfd26c-728d-420c-9d40-b6f7870cff60\") " Jan 20 17:45:40 crc kubenswrapper[4558]: I0120 17:45:40.096021 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5cbfd26c-728d-420c-9d40-b6f7870cff60-scripts\") pod \"5cbfd26c-728d-420c-9d40-b6f7870cff60\" (UID: \"5cbfd26c-728d-420c-9d40-b6f7870cff60\") " Jan 20 17:45:40 crc kubenswrapper[4558]: I0120 17:45:40.096106 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5cbfd26c-728d-420c-9d40-b6f7870cff60-internal-tls-certs\") pod \"5cbfd26c-728d-420c-9d40-b6f7870cff60\" (UID: \"5cbfd26c-728d-420c-9d40-b6f7870cff60\") " Jan 20 17:45:40 crc kubenswrapper[4558]: I0120 17:45:40.096219 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/5cbfd26c-728d-420c-9d40-b6f7870cff60-fernet-keys\") pod \"5cbfd26c-728d-420c-9d40-b6f7870cff60\" (UID: \"5cbfd26c-728d-420c-9d40-b6f7870cff60\") " Jan 20 17:45:40 crc kubenswrapper[4558]: I0120 17:45:40.096290 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5cbfd26c-728d-420c-9d40-b6f7870cff60-combined-ca-bundle\") pod \"5cbfd26c-728d-420c-9d40-b6f7870cff60\" (UID: \"5cbfd26c-728d-420c-9d40-b6f7870cff60\") " Jan 20 17:45:40 crc kubenswrapper[4558]: I0120 17:45:40.101593 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5cbfd26c-728d-420c-9d40-b6f7870cff60-scripts" (OuterVolumeSpecName: "scripts") pod "5cbfd26c-728d-420c-9d40-b6f7870cff60" (UID: "5cbfd26c-728d-420c-9d40-b6f7870cff60"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:45:40 crc kubenswrapper[4558]: I0120 17:45:40.101712 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5cbfd26c-728d-420c-9d40-b6f7870cff60-kube-api-access-j4z9c" (OuterVolumeSpecName: "kube-api-access-j4z9c") pod "5cbfd26c-728d-420c-9d40-b6f7870cff60" (UID: "5cbfd26c-728d-420c-9d40-b6f7870cff60"). InnerVolumeSpecName "kube-api-access-j4z9c". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:45:40 crc kubenswrapper[4558]: I0120 17:45:40.102699 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5cbfd26c-728d-420c-9d40-b6f7870cff60-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "5cbfd26c-728d-420c-9d40-b6f7870cff60" (UID: "5cbfd26c-728d-420c-9d40-b6f7870cff60"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:45:40 crc kubenswrapper[4558]: I0120 17:45:40.105093 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5cbfd26c-728d-420c-9d40-b6f7870cff60-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "5cbfd26c-728d-420c-9d40-b6f7870cff60" (UID: "5cbfd26c-728d-420c-9d40-b6f7870cff60"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:45:40 crc kubenswrapper[4558]: I0120 17:45:40.118787 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5cbfd26c-728d-420c-9d40-b6f7870cff60-config-data" (OuterVolumeSpecName: "config-data") pod "5cbfd26c-728d-420c-9d40-b6f7870cff60" (UID: "5cbfd26c-728d-420c-9d40-b6f7870cff60"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:45:40 crc kubenswrapper[4558]: I0120 17:45:40.120882 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5cbfd26c-728d-420c-9d40-b6f7870cff60-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5cbfd26c-728d-420c-9d40-b6f7870cff60" (UID: "5cbfd26c-728d-420c-9d40-b6f7870cff60"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:45:40 crc kubenswrapper[4558]: I0120 17:45:40.133757 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5cbfd26c-728d-420c-9d40-b6f7870cff60-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "5cbfd26c-728d-420c-9d40-b6f7870cff60" (UID: "5cbfd26c-728d-420c-9d40-b6f7870cff60"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:45:40 crc kubenswrapper[4558]: I0120 17:45:40.135327 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5cbfd26c-728d-420c-9d40-b6f7870cff60-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "5cbfd26c-728d-420c-9d40-b6f7870cff60" (UID: "5cbfd26c-728d-420c-9d40-b6f7870cff60"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:45:40 crc kubenswrapper[4558]: I0120 17:45:40.198295 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5cbfd26c-728d-420c-9d40-b6f7870cff60-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:40 crc kubenswrapper[4558]: I0120 17:45:40.198387 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j4z9c\" (UniqueName: \"kubernetes.io/projected/5cbfd26c-728d-420c-9d40-b6f7870cff60-kube-api-access-j4z9c\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:40 crc kubenswrapper[4558]: I0120 17:45:40.198442 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5cbfd26c-728d-420c-9d40-b6f7870cff60-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:40 crc kubenswrapper[4558]: I0120 17:45:40.198492 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5cbfd26c-728d-420c-9d40-b6f7870cff60-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:40 crc kubenswrapper[4558]: I0120 17:45:40.198537 4558 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/5cbfd26c-728d-420c-9d40-b6f7870cff60-fernet-keys\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:40 crc kubenswrapper[4558]: I0120 17:45:40.198598 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5cbfd26c-728d-420c-9d40-b6f7870cff60-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:40 crc kubenswrapper[4558]: I0120 17:45:40.198645 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5cbfd26c-728d-420c-9d40-b6f7870cff60-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:40 crc kubenswrapper[4558]: I0120 17:45:40.198689 4558 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/5cbfd26c-728d-420c-9d40-b6f7870cff60-credential-keys\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:40 crc kubenswrapper[4558]: I0120 17:45:40.305288 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"7faabe16-9de5-49dc-bab6-44e173f4403c","Type":"ContainerDied","Data":"c3d11385387f16b6eaa78c51b6857cdc17ebee2005dc5130a7df1bd854cc3254"} Jan 20 17:45:40 crc kubenswrapper[4558]: I0120 17:45:40.305360 4558 scope.go:117] "RemoveContainer" containerID="7dfa9f0fec16a0a5a1903b314539ce49eb8943fbf0152c319ba83f4512000ab4" Jan 20 17:45:40 crc kubenswrapper[4558]: I0120 17:45:40.305364 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:45:40 crc kubenswrapper[4558]: I0120 17:45:40.310220 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-74bb4f5f4-5ppmg" Jan 20 17:45:40 crc kubenswrapper[4558]: I0120 17:45:40.310238 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-74bb4f5f4-5ppmg" event={"ID":"d619cd37-a474-4965-b382-749ed6d55d6d","Type":"ContainerDied","Data":"aed528881c58fa0aa03f936e5977026a6585daedac4a872f81b019f155c31984"} Jan 20 17:45:40 crc kubenswrapper[4558]: I0120 17:45:40.317723 4558 generic.go:334] "Generic (PLEG): container finished" podID="5cbfd26c-728d-420c-9d40-b6f7870cff60" containerID="ffacb5c26d85f9daa0accf5b48ad054e1df3c95a5c4bb991f93025c2fc91b9b0" exitCode=0 Jan 20 17:45:40 crc kubenswrapper[4558]: I0120 17:45:40.318381 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-f7d8994df-dzxmh" Jan 20 17:45:40 crc kubenswrapper[4558]: I0120 17:45:40.318462 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-f7d8994df-dzxmh" event={"ID":"5cbfd26c-728d-420c-9d40-b6f7870cff60","Type":"ContainerDied","Data":"ffacb5c26d85f9daa0accf5b48ad054e1df3c95a5c4bb991f93025c2fc91b9b0"} Jan 20 17:45:40 crc kubenswrapper[4558]: I0120 17:45:40.318536 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-f7d8994df-dzxmh" event={"ID":"5cbfd26c-728d-420c-9d40-b6f7870cff60","Type":"ContainerDied","Data":"14969461ef1c49e3f6027ed7a315f54b58ba88d461278fb5b80f7de2ea26a3e6"} Jan 20 17:45:40 crc kubenswrapper[4558]: I0120 17:45:40.333668 4558 scope.go:117] "RemoveContainer" containerID="3988e808e906ec72c1e2c7130ece7e18110316238273aacbead8355ff8099aa5" Jan 20 17:45:40 crc kubenswrapper[4558]: I0120 17:45:40.354198 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:45:40 crc kubenswrapper[4558]: I0120 17:45:40.362058 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:45:40 crc kubenswrapper[4558]: I0120 17:45:40.369976 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-api-74bb4f5f4-5ppmg"] Jan 20 17:45:40 crc kubenswrapper[4558]: I0120 17:45:40.370044 4558 scope.go:117] "RemoveContainer" containerID="dcd025269e0c4cae451a75917b3ffb9392b169d6ba1b4f275051e1022e3b09b6" Jan 20 17:45:40 crc kubenswrapper[4558]: I0120 17:45:40.376242 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/barbican-api-74bb4f5f4-5ppmg"] Jan 20 17:45:40 crc kubenswrapper[4558]: I0120 17:45:40.381506 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-f7d8994df-dzxmh"] Jan 20 17:45:40 crc kubenswrapper[4558]: I0120 17:45:40.385549 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/keystone-f7d8994df-dzxmh"] Jan 20 17:45:40 crc kubenswrapper[4558]: I0120 17:45:40.396835 4558 scope.go:117] "RemoveContainer" containerID="985673ff538dfdfff39427adadbf62ca67de938e3a41cbb308d4b6035782e705" Jan 20 17:45:40 crc kubenswrapper[4558]: I0120 17:45:40.420027 4558 scope.go:117] "RemoveContainer" containerID="ffacb5c26d85f9daa0accf5b48ad054e1df3c95a5c4bb991f93025c2fc91b9b0" Jan 20 17:45:40 crc kubenswrapper[4558]: I0120 17:45:40.446886 4558 scope.go:117] "RemoveContainer" containerID="ffacb5c26d85f9daa0accf5b48ad054e1df3c95a5c4bb991f93025c2fc91b9b0" Jan 20 17:45:40 crc kubenswrapper[4558]: E0120 17:45:40.455671 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ffacb5c26d85f9daa0accf5b48ad054e1df3c95a5c4bb991f93025c2fc91b9b0\": container with ID starting with ffacb5c26d85f9daa0accf5b48ad054e1df3c95a5c4bb991f93025c2fc91b9b0 not found: ID does not exist" containerID="ffacb5c26d85f9daa0accf5b48ad054e1df3c95a5c4bb991f93025c2fc91b9b0" Jan 20 17:45:40 crc kubenswrapper[4558]: I0120 17:45:40.455730 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ffacb5c26d85f9daa0accf5b48ad054e1df3c95a5c4bb991f93025c2fc91b9b0"} err="failed to get container status \"ffacb5c26d85f9daa0accf5b48ad054e1df3c95a5c4bb991f93025c2fc91b9b0\": rpc error: code = NotFound desc = could not find container \"ffacb5c26d85f9daa0accf5b48ad054e1df3c95a5c4bb991f93025c2fc91b9b0\": container with ID starting with ffacb5c26d85f9daa0accf5b48ad054e1df3c95a5c4bb991f93025c2fc91b9b0 not found: ID does not exist" Jan 20 17:45:40 crc kubenswrapper[4558]: I0120 17:45:40.578998 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="03ca5cbb-ca72-46f1-a44f-42ac0fb7cf17" path="/var/lib/kubelet/pods/03ca5cbb-ca72-46f1-a44f-42ac0fb7cf17/volumes" Jan 20 17:45:40 crc kubenswrapper[4558]: I0120 17:45:40.579735 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1a1aeb0f-ba20-4c24-a8d3-f3f2765328c2" path="/var/lib/kubelet/pods/1a1aeb0f-ba20-4c24-a8d3-f3f2765328c2/volumes" Jan 20 17:45:40 crc kubenswrapper[4558]: I0120 17:45:40.580723 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="56173817-8246-4f37-b157-3890912004ca" path="/var/lib/kubelet/pods/56173817-8246-4f37-b157-3890912004ca/volumes" Jan 20 17:45:40 crc kubenswrapper[4558]: I0120 17:45:40.582339 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5cbfd26c-728d-420c-9d40-b6f7870cff60" path="/var/lib/kubelet/pods/5cbfd26c-728d-420c-9d40-b6f7870cff60/volumes" Jan 20 17:45:40 crc kubenswrapper[4558]: I0120 17:45:40.583104 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="70182fd8-a242-40f5-b20d-8ff4dd33e9b1" path="/var/lib/kubelet/pods/70182fd8-a242-40f5-b20d-8ff4dd33e9b1/volumes" Jan 20 17:45:40 crc kubenswrapper[4558]: I0120 17:45:40.584195 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7faabe16-9de5-49dc-bab6-44e173f4403c" path="/var/lib/kubelet/pods/7faabe16-9de5-49dc-bab6-44e173f4403c/volumes" Jan 20 17:45:40 crc kubenswrapper[4558]: I0120 17:45:40.587357 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8666f449-91a3-489d-bbc5-e7811619ea74" path="/var/lib/kubelet/pods/8666f449-91a3-489d-bbc5-e7811619ea74/volumes" Jan 20 17:45:40 crc kubenswrapper[4558]: I0120 17:45:40.588638 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="86aff100-d474-48ed-b673-4dae7c0722cf" path="/var/lib/kubelet/pods/86aff100-d474-48ed-b673-4dae7c0722cf/volumes" Jan 20 17:45:40 crc kubenswrapper[4558]: I0120 17:45:40.589254 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d619cd37-a474-4965-b382-749ed6d55d6d" path="/var/lib/kubelet/pods/d619cd37-a474-4965-b382-749ed6d55d6d/volumes" Jan 20 17:45:40 crc kubenswrapper[4558]: I0120 17:45:40.590312 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f87490e8-557d-41e3-a07b-8fe12147b315" path="/var/lib/kubelet/pods/f87490e8-557d-41e3-a07b-8fe12147b315/volumes" Jan 20 17:45:40 crc kubenswrapper[4558]: I0120 17:45:40.590979 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ff0d0703-8173-4ac0-afc0-e673feaef286" path="/var/lib/kubelet/pods/ff0d0703-8173-4ac0-afc0-e673feaef286/volumes" Jan 20 17:45:40 crc kubenswrapper[4558]: E0120 17:45:40.808730 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Jan 20 17:45:40 crc kubenswrapper[4558]: E0120 17:45:40.808816 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/de4d8126-91cf-4149-bec4-4accaf558308-config-data podName:de4d8126-91cf-4149-bec4-4accaf558308 nodeName:}" failed. No retries permitted until 2026-01-20 17:45:48.808791117 +0000 UTC m=+3842.569129085 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/de4d8126-91cf-4149-bec4-4accaf558308-config-data") pod "rabbitmq-cell1-server-0" (UID: "de4d8126-91cf-4149-bec4-4accaf558308") : configmap "rabbitmq-cell1-config-data" not found Jan 20 17:45:41 crc kubenswrapper[4558]: I0120 17:45:41.355248 4558 generic.go:334] "Generic (PLEG): container finished" podID="de4d8126-91cf-4149-bec4-4accaf558308" containerID="f79892859b62fff49932e58f16de3c5503fa6029ca060df20454e36b9e9f37db" exitCode=0 Jan 20 17:45:41 crc kubenswrapper[4558]: I0120 17:45:41.355305 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" event={"ID":"de4d8126-91cf-4149-bec4-4accaf558308","Type":"ContainerDied","Data":"f79892859b62fff49932e58f16de3c5503fa6029ca060df20454e36b9e9f37db"} Jan 20 17:45:41 crc kubenswrapper[4558]: I0120 17:45:41.355376 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" event={"ID":"de4d8126-91cf-4149-bec4-4accaf558308","Type":"ContainerDied","Data":"e82ed5c52b8d8654bdc167e0ef446792c35695ed1870b83a2674a302926cfaf9"} Jan 20 17:45:41 crc kubenswrapper[4558]: I0120 17:45:41.355393 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e82ed5c52b8d8654bdc167e0ef446792c35695ed1870b83a2674a302926cfaf9" Jan 20 17:45:41 crc kubenswrapper[4558]: I0120 17:45:41.357560 4558 generic.go:334] "Generic (PLEG): container finished" podID="4765e529-9729-4d27-a252-c0c9a7b67beb" containerID="28200cb794147f2ba9210855e02aa06f0b33943f75d1f48cedc60cedc86b6d96" exitCode=0 Jan 20 17:45:41 crc kubenswrapper[4558]: I0120 17:45:41.357628 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-server-0" event={"ID":"4765e529-9729-4d27-a252-c0c9a7b67beb","Type":"ContainerDied","Data":"28200cb794147f2ba9210855e02aa06f0b33943f75d1f48cedc60cedc86b6d96"} Jan 20 17:45:41 crc kubenswrapper[4558]: I0120 17:45:41.373151 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:45:41 crc kubenswrapper[4558]: I0120 17:45:41.423912 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/de4d8126-91cf-4149-bec4-4accaf558308-server-conf\") pod \"de4d8126-91cf-4149-bec4-4accaf558308\" (UID: \"de4d8126-91cf-4149-bec4-4accaf558308\") " Jan 20 17:45:41 crc kubenswrapper[4558]: I0120 17:45:41.424088 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/de4d8126-91cf-4149-bec4-4accaf558308-rabbitmq-erlang-cookie\") pod \"de4d8126-91cf-4149-bec4-4accaf558308\" (UID: \"de4d8126-91cf-4149-bec4-4accaf558308\") " Jan 20 17:45:41 crc kubenswrapper[4558]: I0120 17:45:41.424143 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/de4d8126-91cf-4149-bec4-4accaf558308-rabbitmq-confd\") pod \"de4d8126-91cf-4149-bec4-4accaf558308\" (UID: \"de4d8126-91cf-4149-bec4-4accaf558308\") " Jan 20 17:45:41 crc kubenswrapper[4558]: I0120 17:45:41.424274 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/de4d8126-91cf-4149-bec4-4accaf558308-erlang-cookie-secret\") pod \"de4d8126-91cf-4149-bec4-4accaf558308\" (UID: \"de4d8126-91cf-4149-bec4-4accaf558308\") " Jan 20 17:45:41 crc kubenswrapper[4558]: I0120 17:45:41.424322 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kqptr\" (UniqueName: \"kubernetes.io/projected/de4d8126-91cf-4149-bec4-4accaf558308-kube-api-access-kqptr\") pod \"de4d8126-91cf-4149-bec4-4accaf558308\" (UID: \"de4d8126-91cf-4149-bec4-4accaf558308\") " Jan 20 17:45:41 crc kubenswrapper[4558]: I0120 17:45:41.424353 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/de4d8126-91cf-4149-bec4-4accaf558308-config-data\") pod \"de4d8126-91cf-4149-bec4-4accaf558308\" (UID: \"de4d8126-91cf-4149-bec4-4accaf558308\") " Jan 20 17:45:41 crc kubenswrapper[4558]: I0120 17:45:41.424392 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"de4d8126-91cf-4149-bec4-4accaf558308\" (UID: \"de4d8126-91cf-4149-bec4-4accaf558308\") " Jan 20 17:45:41 crc kubenswrapper[4558]: I0120 17:45:41.424461 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/de4d8126-91cf-4149-bec4-4accaf558308-rabbitmq-plugins\") pod \"de4d8126-91cf-4149-bec4-4accaf558308\" (UID: \"de4d8126-91cf-4149-bec4-4accaf558308\") " Jan 20 17:45:41 crc kubenswrapper[4558]: I0120 17:45:41.424491 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/de4d8126-91cf-4149-bec4-4accaf558308-plugins-conf\") pod \"de4d8126-91cf-4149-bec4-4accaf558308\" (UID: \"de4d8126-91cf-4149-bec4-4accaf558308\") " Jan 20 17:45:41 crc kubenswrapper[4558]: I0120 17:45:41.424509 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/de4d8126-91cf-4149-bec4-4accaf558308-rabbitmq-tls\") pod \"de4d8126-91cf-4149-bec4-4accaf558308\" (UID: \"de4d8126-91cf-4149-bec4-4accaf558308\") " Jan 20 17:45:41 crc kubenswrapper[4558]: I0120 17:45:41.424595 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/de4d8126-91cf-4149-bec4-4accaf558308-pod-info\") pod \"de4d8126-91cf-4149-bec4-4accaf558308\" (UID: \"de4d8126-91cf-4149-bec4-4accaf558308\") " Jan 20 17:45:41 crc kubenswrapper[4558]: I0120 17:45:41.424797 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/de4d8126-91cf-4149-bec4-4accaf558308-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "de4d8126-91cf-4149-bec4-4accaf558308" (UID: "de4d8126-91cf-4149-bec4-4accaf558308"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:45:41 crc kubenswrapper[4558]: I0120 17:45:41.425111 4558 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/de4d8126-91cf-4149-bec4-4accaf558308-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:41 crc kubenswrapper[4558]: I0120 17:45:41.425601 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/de4d8126-91cf-4149-bec4-4accaf558308-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "de4d8126-91cf-4149-bec4-4accaf558308" (UID: "de4d8126-91cf-4149-bec4-4accaf558308"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:45:41 crc kubenswrapper[4558]: I0120 17:45:41.426281 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/de4d8126-91cf-4149-bec4-4accaf558308-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "de4d8126-91cf-4149-bec4-4accaf558308" (UID: "de4d8126-91cf-4149-bec4-4accaf558308"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:45:41 crc kubenswrapper[4558]: I0120 17:45:41.430771 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/de4d8126-91cf-4149-bec4-4accaf558308-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "de4d8126-91cf-4149-bec4-4accaf558308" (UID: "de4d8126-91cf-4149-bec4-4accaf558308"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:45:41 crc kubenswrapper[4558]: I0120 17:45:41.431899 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/de4d8126-91cf-4149-bec4-4accaf558308-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "de4d8126-91cf-4149-bec4-4accaf558308" (UID: "de4d8126-91cf-4149-bec4-4accaf558308"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:45:41 crc kubenswrapper[4558]: I0120 17:45:41.431948 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/de4d8126-91cf-4149-bec4-4accaf558308-pod-info" (OuterVolumeSpecName: "pod-info") pod "de4d8126-91cf-4149-bec4-4accaf558308" (UID: "de4d8126-91cf-4149-bec4-4accaf558308"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Jan 20 17:45:41 crc kubenswrapper[4558]: I0120 17:45:41.432441 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/de4d8126-91cf-4149-bec4-4accaf558308-kube-api-access-kqptr" (OuterVolumeSpecName: "kube-api-access-kqptr") pod "de4d8126-91cf-4149-bec4-4accaf558308" (UID: "de4d8126-91cf-4149-bec4-4accaf558308"). InnerVolumeSpecName "kube-api-access-kqptr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:45:41 crc kubenswrapper[4558]: I0120 17:45:41.432906 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage08-crc" (OuterVolumeSpecName: "persistence") pod "de4d8126-91cf-4149-bec4-4accaf558308" (UID: "de4d8126-91cf-4149-bec4-4accaf558308"). InnerVolumeSpecName "local-storage08-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:45:41 crc kubenswrapper[4558]: I0120 17:45:41.445655 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/de4d8126-91cf-4149-bec4-4accaf558308-config-data" (OuterVolumeSpecName: "config-data") pod "de4d8126-91cf-4149-bec4-4accaf558308" (UID: "de4d8126-91cf-4149-bec4-4accaf558308"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:45:41 crc kubenswrapper[4558]: I0120 17:45:41.457208 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/de4d8126-91cf-4149-bec4-4accaf558308-server-conf" (OuterVolumeSpecName: "server-conf") pod "de4d8126-91cf-4149-bec4-4accaf558308" (UID: "de4d8126-91cf-4149-bec4-4accaf558308"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:45:41 crc kubenswrapper[4558]: I0120 17:45:41.487360 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/de4d8126-91cf-4149-bec4-4accaf558308-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "de4d8126-91cf-4149-bec4-4accaf558308" (UID: "de4d8126-91cf-4149-bec4-4accaf558308"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:45:41 crc kubenswrapper[4558]: I0120 17:45:41.524033 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:45:41 crc kubenswrapper[4558]: I0120 17:45:41.527595 4558 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/de4d8126-91cf-4149-bec4-4accaf558308-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:41 crc kubenswrapper[4558]: I0120 17:45:41.527628 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kqptr\" (UniqueName: \"kubernetes.io/projected/de4d8126-91cf-4149-bec4-4accaf558308-kube-api-access-kqptr\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:41 crc kubenswrapper[4558]: I0120 17:45:41.527663 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/de4d8126-91cf-4149-bec4-4accaf558308-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:41 crc kubenswrapper[4558]: I0120 17:45:41.527702 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" " Jan 20 17:45:41 crc kubenswrapper[4558]: I0120 17:45:41.527716 4558 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/de4d8126-91cf-4149-bec4-4accaf558308-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:41 crc kubenswrapper[4558]: I0120 17:45:41.527727 4558 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/de4d8126-91cf-4149-bec4-4accaf558308-plugins-conf\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:41 crc kubenswrapper[4558]: I0120 17:45:41.527737 4558 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/de4d8126-91cf-4149-bec4-4accaf558308-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:41 crc kubenswrapper[4558]: I0120 17:45:41.527749 4558 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/de4d8126-91cf-4149-bec4-4accaf558308-pod-info\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:41 crc kubenswrapper[4558]: I0120 17:45:41.527763 4558 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/de4d8126-91cf-4149-bec4-4accaf558308-server-conf\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:41 crc kubenswrapper[4558]: I0120 17:45:41.527772 4558 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/de4d8126-91cf-4149-bec4-4accaf558308-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:41 crc kubenswrapper[4558]: I0120 17:45:41.544351 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage08-crc" (UniqueName: "kubernetes.io/local-volume/local-storage08-crc") on node "crc" Jan 20 17:45:41 crc kubenswrapper[4558]: I0120 17:45:41.628673 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/4765e529-9729-4d27-a252-c0c9a7b67beb-rabbitmq-erlang-cookie\") pod \"4765e529-9729-4d27-a252-c0c9a7b67beb\" (UID: \"4765e529-9729-4d27-a252-c0c9a7b67beb\") " Jan 20 17:45:41 crc kubenswrapper[4558]: I0120 17:45:41.628819 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/4765e529-9729-4d27-a252-c0c9a7b67beb-rabbitmq-confd\") pod \"4765e529-9729-4d27-a252-c0c9a7b67beb\" (UID: \"4765e529-9729-4d27-a252-c0c9a7b67beb\") " Jan 20 17:45:41 crc kubenswrapper[4558]: I0120 17:45:41.628894 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/4765e529-9729-4d27-a252-c0c9a7b67beb-rabbitmq-plugins\") pod \"4765e529-9729-4d27-a252-c0c9a7b67beb\" (UID: \"4765e529-9729-4d27-a252-c0c9a7b67beb\") " Jan 20 17:45:41 crc kubenswrapper[4558]: I0120 17:45:41.628996 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/4765e529-9729-4d27-a252-c0c9a7b67beb-plugins-conf\") pod \"4765e529-9729-4d27-a252-c0c9a7b67beb\" (UID: \"4765e529-9729-4d27-a252-c0c9a7b67beb\") " Jan 20 17:45:41 crc kubenswrapper[4558]: I0120 17:45:41.629089 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/4765e529-9729-4d27-a252-c0c9a7b67beb-rabbitmq-tls\") pod \"4765e529-9729-4d27-a252-c0c9a7b67beb\" (UID: \"4765e529-9729-4d27-a252-c0c9a7b67beb\") " Jan 20 17:45:41 crc kubenswrapper[4558]: I0120 17:45:41.629216 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4765e529-9729-4d27-a252-c0c9a7b67beb-config-data\") pod \"4765e529-9729-4d27-a252-c0c9a7b67beb\" (UID: \"4765e529-9729-4d27-a252-c0c9a7b67beb\") " Jan 20 17:45:41 crc kubenswrapper[4558]: I0120 17:45:41.629319 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/4765e529-9729-4d27-a252-c0c9a7b67beb-erlang-cookie-secret\") pod \"4765e529-9729-4d27-a252-c0c9a7b67beb\" (UID: \"4765e529-9729-4d27-a252-c0c9a7b67beb\") " Jan 20 17:45:41 crc kubenswrapper[4558]: I0120 17:45:41.629423 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xw7h7\" (UniqueName: \"kubernetes.io/projected/4765e529-9729-4d27-a252-c0c9a7b67beb-kube-api-access-xw7h7\") pod \"4765e529-9729-4d27-a252-c0c9a7b67beb\" (UID: \"4765e529-9729-4d27-a252-c0c9a7b67beb\") " Jan 20 17:45:41 crc kubenswrapper[4558]: I0120 17:45:41.629502 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"4765e529-9729-4d27-a252-c0c9a7b67beb\" (UID: \"4765e529-9729-4d27-a252-c0c9a7b67beb\") " Jan 20 17:45:41 crc kubenswrapper[4558]: I0120 17:45:41.629571 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4765e529-9729-4d27-a252-c0c9a7b67beb-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "4765e529-9729-4d27-a252-c0c9a7b67beb" (UID: "4765e529-9729-4d27-a252-c0c9a7b67beb"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:45:41 crc kubenswrapper[4558]: I0120 17:45:41.629591 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/4765e529-9729-4d27-a252-c0c9a7b67beb-server-conf\") pod \"4765e529-9729-4d27-a252-c0c9a7b67beb\" (UID: \"4765e529-9729-4d27-a252-c0c9a7b67beb\") " Jan 20 17:45:41 crc kubenswrapper[4558]: I0120 17:45:41.629695 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4765e529-9729-4d27-a252-c0c9a7b67beb-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "4765e529-9729-4d27-a252-c0c9a7b67beb" (UID: "4765e529-9729-4d27-a252-c0c9a7b67beb"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:45:41 crc kubenswrapper[4558]: I0120 17:45:41.629774 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/4765e529-9729-4d27-a252-c0c9a7b67beb-pod-info\") pod \"4765e529-9729-4d27-a252-c0c9a7b67beb\" (UID: \"4765e529-9729-4d27-a252-c0c9a7b67beb\") " Jan 20 17:45:41 crc kubenswrapper[4558]: I0120 17:45:41.630821 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:41 crc kubenswrapper[4558]: I0120 17:45:41.630865 4558 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/4765e529-9729-4d27-a252-c0c9a7b67beb-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:41 crc kubenswrapper[4558]: I0120 17:45:41.630877 4558 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/4765e529-9729-4d27-a252-c0c9a7b67beb-plugins-conf\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:41 crc kubenswrapper[4558]: I0120 17:45:41.631382 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4765e529-9729-4d27-a252-c0c9a7b67beb-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "4765e529-9729-4d27-a252-c0c9a7b67beb" (UID: "4765e529-9729-4d27-a252-c0c9a7b67beb"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:45:41 crc kubenswrapper[4558]: I0120 17:45:41.632888 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4765e529-9729-4d27-a252-c0c9a7b67beb-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "4765e529-9729-4d27-a252-c0c9a7b67beb" (UID: "4765e529-9729-4d27-a252-c0c9a7b67beb"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:45:41 crc kubenswrapper[4558]: I0120 17:45:41.635005 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4765e529-9729-4d27-a252-c0c9a7b67beb-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "4765e529-9729-4d27-a252-c0c9a7b67beb" (UID: "4765e529-9729-4d27-a252-c0c9a7b67beb"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:45:41 crc kubenswrapper[4558]: I0120 17:45:41.635112 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage05-crc" (OuterVolumeSpecName: "persistence") pod "4765e529-9729-4d27-a252-c0c9a7b67beb" (UID: "4765e529-9729-4d27-a252-c0c9a7b67beb"). InnerVolumeSpecName "local-storage05-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:45:41 crc kubenswrapper[4558]: I0120 17:45:41.635284 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/4765e529-9729-4d27-a252-c0c9a7b67beb-pod-info" (OuterVolumeSpecName: "pod-info") pod "4765e529-9729-4d27-a252-c0c9a7b67beb" (UID: "4765e529-9729-4d27-a252-c0c9a7b67beb"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Jan 20 17:45:41 crc kubenswrapper[4558]: I0120 17:45:41.636309 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4765e529-9729-4d27-a252-c0c9a7b67beb-kube-api-access-xw7h7" (OuterVolumeSpecName: "kube-api-access-xw7h7") pod "4765e529-9729-4d27-a252-c0c9a7b67beb" (UID: "4765e529-9729-4d27-a252-c0c9a7b67beb"). InnerVolumeSpecName "kube-api-access-xw7h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:45:41 crc kubenswrapper[4558]: I0120 17:45:41.658013 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4765e529-9729-4d27-a252-c0c9a7b67beb-server-conf" (OuterVolumeSpecName: "server-conf") pod "4765e529-9729-4d27-a252-c0c9a7b67beb" (UID: "4765e529-9729-4d27-a252-c0c9a7b67beb"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:45:41 crc kubenswrapper[4558]: I0120 17:45:41.661417 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4765e529-9729-4d27-a252-c0c9a7b67beb-config-data" (OuterVolumeSpecName: "config-data") pod "4765e529-9729-4d27-a252-c0c9a7b67beb" (UID: "4765e529-9729-4d27-a252-c0c9a7b67beb"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:45:41 crc kubenswrapper[4558]: I0120 17:45:41.685619 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4765e529-9729-4d27-a252-c0c9a7b67beb-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "4765e529-9729-4d27-a252-c0c9a7b67beb" (UID: "4765e529-9729-4d27-a252-c0c9a7b67beb"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:45:41 crc kubenswrapper[4558]: I0120 17:45:41.732902 4558 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/4765e529-9729-4d27-a252-c0c9a7b67beb-pod-info\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:41 crc kubenswrapper[4558]: I0120 17:45:41.732948 4558 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/4765e529-9729-4d27-a252-c0c9a7b67beb-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:41 crc kubenswrapper[4558]: I0120 17:45:41.732965 4558 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/4765e529-9729-4d27-a252-c0c9a7b67beb-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:41 crc kubenswrapper[4558]: I0120 17:45:41.732977 4558 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/4765e529-9729-4d27-a252-c0c9a7b67beb-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:41 crc kubenswrapper[4558]: I0120 17:45:41.732987 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4765e529-9729-4d27-a252-c0c9a7b67beb-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:41 crc kubenswrapper[4558]: I0120 17:45:41.732996 4558 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/4765e529-9729-4d27-a252-c0c9a7b67beb-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:41 crc kubenswrapper[4558]: I0120 17:45:41.733007 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xw7h7\" (UniqueName: \"kubernetes.io/projected/4765e529-9729-4d27-a252-c0c9a7b67beb-kube-api-access-xw7h7\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:41 crc kubenswrapper[4558]: I0120 17:45:41.733047 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" " Jan 20 17:45:41 crc kubenswrapper[4558]: I0120 17:45:41.733060 4558 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/4765e529-9729-4d27-a252-c0c9a7b67beb-server-conf\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:41 crc kubenswrapper[4558]: I0120 17:45:41.745385 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage05-crc" (UniqueName: "kubernetes.io/local-volume/local-storage05-crc") on node "crc" Jan 20 17:45:41 crc kubenswrapper[4558]: I0120 17:45:41.834531 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:42 crc kubenswrapper[4558]: E0120 17:45:42.297404 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 4568dafb1cb4a5468a42a5482158e5c167d869bcb8c626b55be898275037c53d is running failed: container process not found" containerID="4568dafb1cb4a5468a42a5482158e5c167d869bcb8c626b55be898275037c53d" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:45:42 crc kubenswrapper[4558]: E0120 17:45:42.297750 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 4568dafb1cb4a5468a42a5482158e5c167d869bcb8c626b55be898275037c53d is running failed: container process not found" containerID="4568dafb1cb4a5468a42a5482158e5c167d869bcb8c626b55be898275037c53d" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:45:42 crc kubenswrapper[4558]: E0120 17:45:42.298125 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 4568dafb1cb4a5468a42a5482158e5c167d869bcb8c626b55be898275037c53d is running failed: container process not found" containerID="4568dafb1cb4a5468a42a5482158e5c167d869bcb8c626b55be898275037c53d" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:45:42 crc kubenswrapper[4558]: E0120 17:45:42.298204 4558 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 4568dafb1cb4a5468a42a5482158e5c167d869bcb8c626b55be898275037c53d is running failed: container process not found" probeType="Readiness" pod="openstack-kuttl-tests/nova-cell0-conductor-0" podUID="50cacddd-ebea-477f-af64-6e96a09a242e" containerName="nova-cell0-conductor-conductor" Jan 20 17:45:42 crc kubenswrapper[4558]: I0120 17:45:42.396314 4558 generic.go:334] "Generic (PLEG): container finished" podID="2c77f6fd-736b-49e4-aa1c-ddffa451f3dc" containerID="4bb502fc98a21930b2123d685d6ca4b42bbac87bd71f98a6fe668e8dfce8ed68" exitCode=0 Jan 20 17:45:42 crc kubenswrapper[4558]: I0120 17:45:42.396377 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"2c77f6fd-736b-49e4-aa1c-ddffa451f3dc","Type":"ContainerDied","Data":"4bb502fc98a21930b2123d685d6ca4b42bbac87bd71f98a6fe668e8dfce8ed68"} Jan 20 17:45:42 crc kubenswrapper[4558]: I0120 17:45:42.398090 4558 generic.go:334] "Generic (PLEG): container finished" podID="ea72c008-3a66-4577-8042-4b1e0ed1cca6" containerID="f53b402a7de8ccb9267cb06efb993b2c9befd769af338febfa1cab472df02511" exitCode=0 Jan 20 17:45:42 crc kubenswrapper[4558]: I0120 17:45:42.398133 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-0" event={"ID":"ea72c008-3a66-4577-8042-4b1e0ed1cca6","Type":"ContainerDied","Data":"f53b402a7de8ccb9267cb06efb993b2c9befd769af338febfa1cab472df02511"} Jan 20 17:45:42 crc kubenswrapper[4558]: I0120 17:45:42.398177 4558 scope.go:117] "RemoveContainer" containerID="84973fad4d843f75c78030000ec0fa0ce538b6a078241540dc44a79641a6a858" Jan 20 17:45:42 crc kubenswrapper[4558]: I0120 17:45:42.401369 4558 generic.go:334] "Generic (PLEG): container finished" podID="50cacddd-ebea-477f-af64-6e96a09a242e" containerID="4568dafb1cb4a5468a42a5482158e5c167d869bcb8c626b55be898275037c53d" exitCode=0 Jan 20 17:45:42 crc kubenswrapper[4558]: I0120 17:45:42.401426 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-0" event={"ID":"50cacddd-ebea-477f-af64-6e96a09a242e","Type":"ContainerDied","Data":"4568dafb1cb4a5468a42a5482158e5c167d869bcb8c626b55be898275037c53d"} Jan 20 17:45:42 crc kubenswrapper[4558]: I0120 17:45:42.403179 4558 generic.go:334] "Generic (PLEG): container finished" podID="ddd57d3b-c9de-46e3-897e-1a50ae49630e" containerID="bfedd515a4cd08318345cf62dd830e7f182ab82a54ada6e840a63a36d085cdc9" exitCode=0 Jan 20 17:45:42 crc kubenswrapper[4558]: I0120 17:45:42.403220 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"ddd57d3b-c9de-46e3-897e-1a50ae49630e","Type":"ContainerDied","Data":"bfedd515a4cd08318345cf62dd830e7f182ab82a54ada6e840a63a36d085cdc9"} Jan 20 17:45:42 crc kubenswrapper[4558]: I0120 17:45:42.405681 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:45:42 crc kubenswrapper[4558]: I0120 17:45:42.405703 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:45:42 crc kubenswrapper[4558]: I0120 17:45:42.405674 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-server-0" event={"ID":"4765e529-9729-4d27-a252-c0c9a7b67beb","Type":"ContainerDied","Data":"4884b3dd50e53b21f38132f924e5c4c791bcc5f669fa27a312699a6c27ce6c02"} Jan 20 17:45:42 crc kubenswrapper[4558]: I0120 17:45:42.442147 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:45:42 crc kubenswrapper[4558]: I0120 17:45:42.447825 4558 scope.go:117] "RemoveContainer" containerID="f175857b081b6702705f8a9a0de77c7f1bd59b4f0ad1848d4abf509a9f1c1ae7" Jan 20 17:45:42 crc kubenswrapper[4558]: I0120 17:45:42.536971 4558 scope.go:117] "RemoveContainer" containerID="28200cb794147f2ba9210855e02aa06f0b33943f75d1f48cedc60cedc86b6d96" Jan 20 17:45:42 crc kubenswrapper[4558]: I0120 17:45:42.542003 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/rabbitmq-server-0"] Jan 20 17:45:42 crc kubenswrapper[4558]: I0120 17:45:42.544382 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2c77f6fd-736b-49e4-aa1c-ddffa451f3dc-combined-ca-bundle\") pod \"2c77f6fd-736b-49e4-aa1c-ddffa451f3dc\" (UID: \"2c77f6fd-736b-49e4-aa1c-ddffa451f3dc\") " Jan 20 17:45:42 crc kubenswrapper[4558]: I0120 17:45:42.544504 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2c77f6fd-736b-49e4-aa1c-ddffa451f3dc-config-data\") pod \"2c77f6fd-736b-49e4-aa1c-ddffa451f3dc\" (UID: \"2c77f6fd-736b-49e4-aa1c-ddffa451f3dc\") " Jan 20 17:45:42 crc kubenswrapper[4558]: I0120 17:45:42.544646 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2c77f6fd-736b-49e4-aa1c-ddffa451f3dc-run-httpd\") pod \"2c77f6fd-736b-49e4-aa1c-ddffa451f3dc\" (UID: \"2c77f6fd-736b-49e4-aa1c-ddffa451f3dc\") " Jan 20 17:45:42 crc kubenswrapper[4558]: I0120 17:45:42.544710 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2c77f6fd-736b-49e4-aa1c-ddffa451f3dc-scripts\") pod \"2c77f6fd-736b-49e4-aa1c-ddffa451f3dc\" (UID: \"2c77f6fd-736b-49e4-aa1c-ddffa451f3dc\") " Jan 20 17:45:42 crc kubenswrapper[4558]: I0120 17:45:42.544834 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/2c77f6fd-736b-49e4-aa1c-ddffa451f3dc-ceilometer-tls-certs\") pod \"2c77f6fd-736b-49e4-aa1c-ddffa451f3dc\" (UID: \"2c77f6fd-736b-49e4-aa1c-ddffa451f3dc\") " Jan 20 17:45:42 crc kubenswrapper[4558]: I0120 17:45:42.545254 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2c77f6fd-736b-49e4-aa1c-ddffa451f3dc-log-httpd\") pod \"2c77f6fd-736b-49e4-aa1c-ddffa451f3dc\" (UID: \"2c77f6fd-736b-49e4-aa1c-ddffa451f3dc\") " Jan 20 17:45:42 crc kubenswrapper[4558]: I0120 17:45:42.545339 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/2c77f6fd-736b-49e4-aa1c-ddffa451f3dc-sg-core-conf-yaml\") pod \"2c77f6fd-736b-49e4-aa1c-ddffa451f3dc\" (UID: \"2c77f6fd-736b-49e4-aa1c-ddffa451f3dc\") " Jan 20 17:45:42 crc kubenswrapper[4558]: I0120 17:45:42.545523 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l7tcl\" (UniqueName: \"kubernetes.io/projected/2c77f6fd-736b-49e4-aa1c-ddffa451f3dc-kube-api-access-l7tcl\") pod \"2c77f6fd-736b-49e4-aa1c-ddffa451f3dc\" (UID: \"2c77f6fd-736b-49e4-aa1c-ddffa451f3dc\") " Jan 20 17:45:42 crc kubenswrapper[4558]: I0120 17:45:42.545918 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2c77f6fd-736b-49e4-aa1c-ddffa451f3dc-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "2c77f6fd-736b-49e4-aa1c-ddffa451f3dc" (UID: "2c77f6fd-736b-49e4-aa1c-ddffa451f3dc"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:45:42 crc kubenswrapper[4558]: I0120 17:45:42.546295 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2c77f6fd-736b-49e4-aa1c-ddffa451f3dc-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "2c77f6fd-736b-49e4-aa1c-ddffa451f3dc" (UID: "2c77f6fd-736b-49e4-aa1c-ddffa451f3dc"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:45:42 crc kubenswrapper[4558]: I0120 17:45:42.548543 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/rabbitmq-server-0"] Jan 20 17:45:42 crc kubenswrapper[4558]: I0120 17:45:42.553867 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/rabbitmq-cell1-server-0"] Jan 20 17:45:42 crc kubenswrapper[4558]: I0120 17:45:42.557146 4558 scope.go:117] "RemoveContainer" containerID="d93a1933ae3d32819f090f6a7b2f44526a5ddb9fbca636dcce7b4b06b41e979c" Jan 20 17:45:42 crc kubenswrapper[4558]: I0120 17:45:42.561032 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/rabbitmq-cell1-server-0"] Jan 20 17:45:42 crc kubenswrapper[4558]: I0120 17:45:42.562992 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2c77f6fd-736b-49e4-aa1c-ddffa451f3dc-scripts" (OuterVolumeSpecName: "scripts") pod "2c77f6fd-736b-49e4-aa1c-ddffa451f3dc" (UID: "2c77f6fd-736b-49e4-aa1c-ddffa451f3dc"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:45:42 crc kubenswrapper[4558]: I0120 17:45:42.563373 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2c77f6fd-736b-49e4-aa1c-ddffa451f3dc-kube-api-access-l7tcl" (OuterVolumeSpecName: "kube-api-access-l7tcl") pod "2c77f6fd-736b-49e4-aa1c-ddffa451f3dc" (UID: "2c77f6fd-736b-49e4-aa1c-ddffa451f3dc"). InnerVolumeSpecName "kube-api-access-l7tcl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:45:42 crc kubenswrapper[4558]: I0120 17:45:42.573663 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4765e529-9729-4d27-a252-c0c9a7b67beb" path="/var/lib/kubelet/pods/4765e529-9729-4d27-a252-c0c9a7b67beb/volumes" Jan 20 17:45:42 crc kubenswrapper[4558]: I0120 17:45:42.574729 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="de4d8126-91cf-4149-bec4-4accaf558308" path="/var/lib/kubelet/pods/de4d8126-91cf-4149-bec4-4accaf558308/volumes" Jan 20 17:45:42 crc kubenswrapper[4558]: I0120 17:45:42.596654 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2c77f6fd-736b-49e4-aa1c-ddffa451f3dc-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "2c77f6fd-736b-49e4-aa1c-ddffa451f3dc" (UID: "2c77f6fd-736b-49e4-aa1c-ddffa451f3dc"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:45:42 crc kubenswrapper[4558]: I0120 17:45:42.597610 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2c77f6fd-736b-49e4-aa1c-ddffa451f3dc-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "2c77f6fd-736b-49e4-aa1c-ddffa451f3dc" (UID: "2c77f6fd-736b-49e4-aa1c-ddffa451f3dc"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:45:42 crc kubenswrapper[4558]: I0120 17:45:42.612755 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2c77f6fd-736b-49e4-aa1c-ddffa451f3dc-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2c77f6fd-736b-49e4-aa1c-ddffa451f3dc" (UID: "2c77f6fd-736b-49e4-aa1c-ddffa451f3dc"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:45:42 crc kubenswrapper[4558]: I0120 17:45:42.644574 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:45:42 crc kubenswrapper[4558]: I0120 17:45:42.646738 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2c77f6fd-736b-49e4-aa1c-ddffa451f3dc-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:42 crc kubenswrapper[4558]: I0120 17:45:42.646774 4558 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2c77f6fd-736b-49e4-aa1c-ddffa451f3dc-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:42 crc kubenswrapper[4558]: I0120 17:45:42.646783 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2c77f6fd-736b-49e4-aa1c-ddffa451f3dc-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:42 crc kubenswrapper[4558]: I0120 17:45:42.646795 4558 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/2c77f6fd-736b-49e4-aa1c-ddffa451f3dc-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:42 crc kubenswrapper[4558]: I0120 17:45:42.646806 4558 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2c77f6fd-736b-49e4-aa1c-ddffa451f3dc-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:42 crc kubenswrapper[4558]: I0120 17:45:42.646816 4558 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/2c77f6fd-736b-49e4-aa1c-ddffa451f3dc-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:42 crc kubenswrapper[4558]: I0120 17:45:42.646826 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l7tcl\" (UniqueName: \"kubernetes.io/projected/2c77f6fd-736b-49e4-aa1c-ddffa451f3dc-kube-api-access-l7tcl\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:42 crc kubenswrapper[4558]: I0120 17:45:42.665545 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2c77f6fd-736b-49e4-aa1c-ddffa451f3dc-config-data" (OuterVolumeSpecName: "config-data") pod "2c77f6fd-736b-49e4-aa1c-ddffa451f3dc" (UID: "2c77f6fd-736b-49e4-aa1c-ddffa451f3dc"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:45:42 crc kubenswrapper[4558]: I0120 17:45:42.695547 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:45:42 crc kubenswrapper[4558]: I0120 17:45:42.697591 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:45:42 crc kubenswrapper[4558]: I0120 17:45:42.747841 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ea72c008-3a66-4577-8042-4b1e0ed1cca6-combined-ca-bundle\") pod \"ea72c008-3a66-4577-8042-4b1e0ed1cca6\" (UID: \"ea72c008-3a66-4577-8042-4b1e0ed1cca6\") " Jan 20 17:45:42 crc kubenswrapper[4558]: I0120 17:45:42.747911 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b9gz4\" (UniqueName: \"kubernetes.io/projected/ea72c008-3a66-4577-8042-4b1e0ed1cca6-kube-api-access-b9gz4\") pod \"ea72c008-3a66-4577-8042-4b1e0ed1cca6\" (UID: \"ea72c008-3a66-4577-8042-4b1e0ed1cca6\") " Jan 20 17:45:42 crc kubenswrapper[4558]: I0120 17:45:42.748086 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ddd57d3b-c9de-46e3-897e-1a50ae49630e-config-data\") pod \"ddd57d3b-c9de-46e3-897e-1a50ae49630e\" (UID: \"ddd57d3b-c9de-46e3-897e-1a50ae49630e\") " Jan 20 17:45:42 crc kubenswrapper[4558]: I0120 17:45:42.748133 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ddd57d3b-c9de-46e3-897e-1a50ae49630e-combined-ca-bundle\") pod \"ddd57d3b-c9de-46e3-897e-1a50ae49630e\" (UID: \"ddd57d3b-c9de-46e3-897e-1a50ae49630e\") " Jan 20 17:45:42 crc kubenswrapper[4558]: I0120 17:45:42.748186 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v7wft\" (UniqueName: \"kubernetes.io/projected/ddd57d3b-c9de-46e3-897e-1a50ae49630e-kube-api-access-v7wft\") pod \"ddd57d3b-c9de-46e3-897e-1a50ae49630e\" (UID: \"ddd57d3b-c9de-46e3-897e-1a50ae49630e\") " Jan 20 17:45:42 crc kubenswrapper[4558]: I0120 17:45:42.748235 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/50cacddd-ebea-477f-af64-6e96a09a242e-config-data\") pod \"50cacddd-ebea-477f-af64-6e96a09a242e\" (UID: \"50cacddd-ebea-477f-af64-6e96a09a242e\") " Jan 20 17:45:42 crc kubenswrapper[4558]: I0120 17:45:42.748299 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50cacddd-ebea-477f-af64-6e96a09a242e-combined-ca-bundle\") pod \"50cacddd-ebea-477f-af64-6e96a09a242e\" (UID: \"50cacddd-ebea-477f-af64-6e96a09a242e\") " Jan 20 17:45:42 crc kubenswrapper[4558]: I0120 17:45:42.748371 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ea72c008-3a66-4577-8042-4b1e0ed1cca6-config-data\") pod \"ea72c008-3a66-4577-8042-4b1e0ed1cca6\" (UID: \"ea72c008-3a66-4577-8042-4b1e0ed1cca6\") " Jan 20 17:45:42 crc kubenswrapper[4558]: I0120 17:45:42.748421 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8slx9\" (UniqueName: \"kubernetes.io/projected/50cacddd-ebea-477f-af64-6e96a09a242e-kube-api-access-8slx9\") pod \"50cacddd-ebea-477f-af64-6e96a09a242e\" (UID: \"50cacddd-ebea-477f-af64-6e96a09a242e\") " Jan 20 17:45:42 crc kubenswrapper[4558]: I0120 17:45:42.748961 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2c77f6fd-736b-49e4-aa1c-ddffa451f3dc-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:42 crc kubenswrapper[4558]: I0120 17:45:42.752714 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ddd57d3b-c9de-46e3-897e-1a50ae49630e-kube-api-access-v7wft" (OuterVolumeSpecName: "kube-api-access-v7wft") pod "ddd57d3b-c9de-46e3-897e-1a50ae49630e" (UID: "ddd57d3b-c9de-46e3-897e-1a50ae49630e"). InnerVolumeSpecName "kube-api-access-v7wft". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:45:42 crc kubenswrapper[4558]: I0120 17:45:42.753179 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/50cacddd-ebea-477f-af64-6e96a09a242e-kube-api-access-8slx9" (OuterVolumeSpecName: "kube-api-access-8slx9") pod "50cacddd-ebea-477f-af64-6e96a09a242e" (UID: "50cacddd-ebea-477f-af64-6e96a09a242e"). InnerVolumeSpecName "kube-api-access-8slx9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:45:42 crc kubenswrapper[4558]: I0120 17:45:42.754937 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ea72c008-3a66-4577-8042-4b1e0ed1cca6-kube-api-access-b9gz4" (OuterVolumeSpecName: "kube-api-access-b9gz4") pod "ea72c008-3a66-4577-8042-4b1e0ed1cca6" (UID: "ea72c008-3a66-4577-8042-4b1e0ed1cca6"). InnerVolumeSpecName "kube-api-access-b9gz4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:45:42 crc kubenswrapper[4558]: I0120 17:45:42.768573 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ea72c008-3a66-4577-8042-4b1e0ed1cca6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ea72c008-3a66-4577-8042-4b1e0ed1cca6" (UID: "ea72c008-3a66-4577-8042-4b1e0ed1cca6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:45:42 crc kubenswrapper[4558]: I0120 17:45:42.770053 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/50cacddd-ebea-477f-af64-6e96a09a242e-config-data" (OuterVolumeSpecName: "config-data") pod "50cacddd-ebea-477f-af64-6e96a09a242e" (UID: "50cacddd-ebea-477f-af64-6e96a09a242e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:45:42 crc kubenswrapper[4558]: I0120 17:45:42.770560 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/50cacddd-ebea-477f-af64-6e96a09a242e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "50cacddd-ebea-477f-af64-6e96a09a242e" (UID: "50cacddd-ebea-477f-af64-6e96a09a242e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:45:42 crc kubenswrapper[4558]: I0120 17:45:42.770984 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ddd57d3b-c9de-46e3-897e-1a50ae49630e-config-data" (OuterVolumeSpecName: "config-data") pod "ddd57d3b-c9de-46e3-897e-1a50ae49630e" (UID: "ddd57d3b-c9de-46e3-897e-1a50ae49630e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:45:42 crc kubenswrapper[4558]: I0120 17:45:42.771552 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ddd57d3b-c9de-46e3-897e-1a50ae49630e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ddd57d3b-c9de-46e3-897e-1a50ae49630e" (UID: "ddd57d3b-c9de-46e3-897e-1a50ae49630e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:45:42 crc kubenswrapper[4558]: I0120 17:45:42.773439 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ea72c008-3a66-4577-8042-4b1e0ed1cca6-config-data" (OuterVolumeSpecName: "config-data") pod "ea72c008-3a66-4577-8042-4b1e0ed1cca6" (UID: "ea72c008-3a66-4577-8042-4b1e0ed1cca6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:45:42 crc kubenswrapper[4558]: I0120 17:45:42.850844 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/50cacddd-ebea-477f-af64-6e96a09a242e-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:42 crc kubenswrapper[4558]: I0120 17:45:42.850936 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50cacddd-ebea-477f-af64-6e96a09a242e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:42 crc kubenswrapper[4558]: I0120 17:45:42.851006 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ea72c008-3a66-4577-8042-4b1e0ed1cca6-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:42 crc kubenswrapper[4558]: I0120 17:45:42.851067 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8slx9\" (UniqueName: \"kubernetes.io/projected/50cacddd-ebea-477f-af64-6e96a09a242e-kube-api-access-8slx9\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:42 crc kubenswrapper[4558]: I0120 17:45:42.851113 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ea72c008-3a66-4577-8042-4b1e0ed1cca6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:42 crc kubenswrapper[4558]: I0120 17:45:42.851155 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b9gz4\" (UniqueName: \"kubernetes.io/projected/ea72c008-3a66-4577-8042-4b1e0ed1cca6-kube-api-access-b9gz4\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:42 crc kubenswrapper[4558]: I0120 17:45:42.851227 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ddd57d3b-c9de-46e3-897e-1a50ae49630e-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:42 crc kubenswrapper[4558]: I0120 17:45:42.851269 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ddd57d3b-c9de-46e3-897e-1a50ae49630e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:42 crc kubenswrapper[4558]: I0120 17:45:42.851319 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v7wft\" (UniqueName: \"kubernetes.io/projected/ddd57d3b-c9de-46e3-897e-1a50ae49630e-kube-api-access-v7wft\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:43 crc kubenswrapper[4558]: I0120 17:45:43.420193 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"2c77f6fd-736b-49e4-aa1c-ddffa451f3dc","Type":"ContainerDied","Data":"22812fda67cb9c71ccd4f7d25fafd12b6c841f77325fa0fa6b2065bc9ef9e53a"} Jan 20 17:45:43 crc kubenswrapper[4558]: I0120 17:45:43.420992 4558 scope.go:117] "RemoveContainer" containerID="245499fc8d9dd24531eb844ac444830c3cbb3a8f2341bb67bd362bf2fe1577af" Jan 20 17:45:43 crc kubenswrapper[4558]: I0120 17:45:43.420627 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:45:43 crc kubenswrapper[4558]: I0120 17:45:43.421886 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-0" event={"ID":"ea72c008-3a66-4577-8042-4b1e0ed1cca6","Type":"ContainerDied","Data":"1b9d5de1f46ed508a2caef11a3592fd5ba3a1a309d20922561e292e00539d3fb"} Jan 20 17:45:43 crc kubenswrapper[4558]: I0120 17:45:43.422003 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:45:43 crc kubenswrapper[4558]: I0120 17:45:43.425284 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-0" event={"ID":"50cacddd-ebea-477f-af64-6e96a09a242e","Type":"ContainerDied","Data":"730ae867cde8505304ebc18f48a08da8136835e4837c2c2e24d35a2c01d439a6"} Jan 20 17:45:43 crc kubenswrapper[4558]: I0120 17:45:43.425297 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:45:43 crc kubenswrapper[4558]: I0120 17:45:43.428131 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:45:43 crc kubenswrapper[4558]: I0120 17:45:43.428156 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"ddd57d3b-c9de-46e3-897e-1a50ae49630e","Type":"ContainerDied","Data":"5f2f816ffd1e3e218743f1527560baf025cde6fe3d755067facb36d4394755c3"} Jan 20 17:45:43 crc kubenswrapper[4558]: I0120 17:45:43.456314 4558 scope.go:117] "RemoveContainer" containerID="720bb601be9d0a73599645a6bba6fa27fea9bddfb2119833260b2abb7695bc2c" Jan 20 17:45:43 crc kubenswrapper[4558]: I0120 17:45:43.464063 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-0"] Jan 20 17:45:43 crc kubenswrapper[4558]: I0120 17:45:43.471930 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-0"] Jan 20 17:45:43 crc kubenswrapper[4558]: I0120 17:45:43.529277 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:45:43 crc kubenswrapper[4558]: I0120 17:45:43.536510 4558 scope.go:117] "RemoveContainer" containerID="4bb502fc98a21930b2123d685d6ca4b42bbac87bd71f98a6fe668e8dfce8ed68" Jan 20 17:45:43 crc kubenswrapper[4558]: I0120 17:45:43.538137 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:45:43 crc kubenswrapper[4558]: I0120 17:45:43.545099 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-0"] Jan 20 17:45:43 crc kubenswrapper[4558]: I0120 17:45:43.549206 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-0"] Jan 20 17:45:43 crc kubenswrapper[4558]: I0120 17:45:43.552685 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:45:43 crc kubenswrapper[4558]: I0120 17:45:43.556017 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:45:43 crc kubenswrapper[4558]: I0120 17:45:43.558517 4558 scope.go:117] "RemoveContainer" containerID="592aa64db3a21b522ec99255c04163435c7f2ca3c55140c0e0521af26785d506" Jan 20 17:45:43 crc kubenswrapper[4558]: I0120 17:45:43.575050 4558 scope.go:117] "RemoveContainer" containerID="f53b402a7de8ccb9267cb06efb993b2c9befd769af338febfa1cab472df02511" Jan 20 17:45:43 crc kubenswrapper[4558]: I0120 17:45:43.591693 4558 scope.go:117] "RemoveContainer" containerID="4568dafb1cb4a5468a42a5482158e5c167d869bcb8c626b55be898275037c53d" Jan 20 17:45:43 crc kubenswrapper[4558]: I0120 17:45:43.609753 4558 scope.go:117] "RemoveContainer" containerID="bfedd515a4cd08318345cf62dd830e7f182ab82a54ada6e840a63a36d085cdc9" Jan 20 17:45:44 crc kubenswrapper[4558]: I0120 17:45:44.264074 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-2lft9" Jan 20 17:45:44 crc kubenswrapper[4558]: I0120 17:45:44.309425 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-7659b55585-bzrvg"] Jan 20 17:45:44 crc kubenswrapper[4558]: I0120 17:45:44.309670 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-7659b55585-bzrvg" podUID="0d54110a-6349-45a3-ad58-1bf105f65293" containerName="dnsmasq-dns" containerID="cri-o://83708f9e65c27738fe52f73ee0f27f750807ccf23623168fd5a88b2d1f7fefe6" gracePeriod=10 Jan 20 17:45:44 crc kubenswrapper[4558]: I0120 17:45:44.447695 4558 generic.go:334] "Generic (PLEG): container finished" podID="0d54110a-6349-45a3-ad58-1bf105f65293" containerID="83708f9e65c27738fe52f73ee0f27f750807ccf23623168fd5a88b2d1f7fefe6" exitCode=0 Jan 20 17:45:44 crc kubenswrapper[4558]: I0120 17:45:44.447778 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-7659b55585-bzrvg" event={"ID":"0d54110a-6349-45a3-ad58-1bf105f65293","Type":"ContainerDied","Data":"83708f9e65c27738fe52f73ee0f27f750807ccf23623168fd5a88b2d1f7fefe6"} Jan 20 17:45:44 crc kubenswrapper[4558]: I0120 17:45:44.577336 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2c77f6fd-736b-49e4-aa1c-ddffa451f3dc" path="/var/lib/kubelet/pods/2c77f6fd-736b-49e4-aa1c-ddffa451f3dc/volumes" Jan 20 17:45:44 crc kubenswrapper[4558]: I0120 17:45:44.578636 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="50cacddd-ebea-477f-af64-6e96a09a242e" path="/var/lib/kubelet/pods/50cacddd-ebea-477f-af64-6e96a09a242e/volumes" Jan 20 17:45:44 crc kubenswrapper[4558]: I0120 17:45:44.580308 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ddd57d3b-c9de-46e3-897e-1a50ae49630e" path="/var/lib/kubelet/pods/ddd57d3b-c9de-46e3-897e-1a50ae49630e/volumes" Jan 20 17:45:44 crc kubenswrapper[4558]: I0120 17:45:44.580799 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ea72c008-3a66-4577-8042-4b1e0ed1cca6" path="/var/lib/kubelet/pods/ea72c008-3a66-4577-8042-4b1e0ed1cca6/volumes" Jan 20 17:45:44 crc kubenswrapper[4558]: I0120 17:45:44.757557 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-7659b55585-bzrvg" Jan 20 17:45:44 crc kubenswrapper[4558]: I0120 17:45:44.782333 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/0d54110a-6349-45a3-ad58-1bf105f65293-dns-swift-storage-0\") pod \"0d54110a-6349-45a3-ad58-1bf105f65293\" (UID: \"0d54110a-6349-45a3-ad58-1bf105f65293\") " Jan 20 17:45:44 crc kubenswrapper[4558]: I0120 17:45:44.783144 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/0d54110a-6349-45a3-ad58-1bf105f65293-dnsmasq-svc\") pod \"0d54110a-6349-45a3-ad58-1bf105f65293\" (UID: \"0d54110a-6349-45a3-ad58-1bf105f65293\") " Jan 20 17:45:44 crc kubenswrapper[4558]: I0120 17:45:44.783313 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x6j7l\" (UniqueName: \"kubernetes.io/projected/0d54110a-6349-45a3-ad58-1bf105f65293-kube-api-access-x6j7l\") pod \"0d54110a-6349-45a3-ad58-1bf105f65293\" (UID: \"0d54110a-6349-45a3-ad58-1bf105f65293\") " Jan 20 17:45:44 crc kubenswrapper[4558]: I0120 17:45:44.783441 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0d54110a-6349-45a3-ad58-1bf105f65293-config\") pod \"0d54110a-6349-45a3-ad58-1bf105f65293\" (UID: \"0d54110a-6349-45a3-ad58-1bf105f65293\") " Jan 20 17:45:44 crc kubenswrapper[4558]: I0120 17:45:44.791940 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0d54110a-6349-45a3-ad58-1bf105f65293-kube-api-access-x6j7l" (OuterVolumeSpecName: "kube-api-access-x6j7l") pod "0d54110a-6349-45a3-ad58-1bf105f65293" (UID: "0d54110a-6349-45a3-ad58-1bf105f65293"). InnerVolumeSpecName "kube-api-access-x6j7l". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:45:44 crc kubenswrapper[4558]: I0120 17:45:44.814679 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0d54110a-6349-45a3-ad58-1bf105f65293-config" (OuterVolumeSpecName: "config") pod "0d54110a-6349-45a3-ad58-1bf105f65293" (UID: "0d54110a-6349-45a3-ad58-1bf105f65293"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:45:44 crc kubenswrapper[4558]: I0120 17:45:44.822932 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0d54110a-6349-45a3-ad58-1bf105f65293-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "0d54110a-6349-45a3-ad58-1bf105f65293" (UID: "0d54110a-6349-45a3-ad58-1bf105f65293"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:45:44 crc kubenswrapper[4558]: I0120 17:45:44.828768 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0d54110a-6349-45a3-ad58-1bf105f65293-dnsmasq-svc" (OuterVolumeSpecName: "dnsmasq-svc") pod "0d54110a-6349-45a3-ad58-1bf105f65293" (UID: "0d54110a-6349-45a3-ad58-1bf105f65293"). InnerVolumeSpecName "dnsmasq-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:45:44 crc kubenswrapper[4558]: I0120 17:45:44.889483 4558 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/0d54110a-6349-45a3-ad58-1bf105f65293-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:44 crc kubenswrapper[4558]: I0120 17:45:44.889611 4558 reconciler_common.go:293] "Volume detached for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/0d54110a-6349-45a3-ad58-1bf105f65293-dnsmasq-svc\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:44 crc kubenswrapper[4558]: I0120 17:45:44.889694 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x6j7l\" (UniqueName: \"kubernetes.io/projected/0d54110a-6349-45a3-ad58-1bf105f65293-kube-api-access-x6j7l\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:44 crc kubenswrapper[4558]: I0120 17:45:44.889797 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0d54110a-6349-45a3-ad58-1bf105f65293-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:45 crc kubenswrapper[4558]: I0120 17:45:45.465305 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-7659b55585-bzrvg" event={"ID":"0d54110a-6349-45a3-ad58-1bf105f65293","Type":"ContainerDied","Data":"2c8c5c07e92cb16568efe46870af2accc71746fe3ee4c0639e7cf51293a77b15"} Jan 20 17:45:45 crc kubenswrapper[4558]: I0120 17:45:45.465877 4558 scope.go:117] "RemoveContainer" containerID="83708f9e65c27738fe52f73ee0f27f750807ccf23623168fd5a88b2d1f7fefe6" Jan 20 17:45:45 crc kubenswrapper[4558]: I0120 17:45:45.465413 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-7659b55585-bzrvg" Jan 20 17:45:45 crc kubenswrapper[4558]: I0120 17:45:45.505101 4558 scope.go:117] "RemoveContainer" containerID="5647fbffdf9b85bb314b193f8160e0dcd8c0d803273190ff423a9a019f91e1ab" Jan 20 17:45:45 crc kubenswrapper[4558]: I0120 17:45:45.512772 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-7659b55585-bzrvg"] Jan 20 17:45:45 crc kubenswrapper[4558]: I0120 17:45:45.519408 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-7659b55585-bzrvg"] Jan 20 17:45:46 crc kubenswrapper[4558]: I0120 17:45:46.576427 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0d54110a-6349-45a3-ad58-1bf105f65293" path="/var/lib/kubelet/pods/0d54110a-6349-45a3-ad58-1bf105f65293/volumes" Jan 20 17:45:50 crc kubenswrapper[4558]: I0120 17:45:50.462544 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-b9f9b6874-4cxdp" Jan 20 17:45:50 crc kubenswrapper[4558]: I0120 17:45:50.471730 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/8361ba4f-e976-4c04-82ad-81e9412ba84c-ovndb-tls-certs\") pod \"8361ba4f-e976-4c04-82ad-81e9412ba84c\" (UID: \"8361ba4f-e976-4c04-82ad-81e9412ba84c\") " Jan 20 17:45:50 crc kubenswrapper[4558]: I0120 17:45:50.471861 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8361ba4f-e976-4c04-82ad-81e9412ba84c-combined-ca-bundle\") pod \"8361ba4f-e976-4c04-82ad-81e9412ba84c\" (UID: \"8361ba4f-e976-4c04-82ad-81e9412ba84c\") " Jan 20 17:45:50 crc kubenswrapper[4558]: I0120 17:45:50.471944 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8361ba4f-e976-4c04-82ad-81e9412ba84c-public-tls-certs\") pod \"8361ba4f-e976-4c04-82ad-81e9412ba84c\" (UID: \"8361ba4f-e976-4c04-82ad-81e9412ba84c\") " Jan 20 17:45:50 crc kubenswrapper[4558]: I0120 17:45:50.471971 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/8361ba4f-e976-4c04-82ad-81e9412ba84c-httpd-config\") pod \"8361ba4f-e976-4c04-82ad-81e9412ba84c\" (UID: \"8361ba4f-e976-4c04-82ad-81e9412ba84c\") " Jan 20 17:45:50 crc kubenswrapper[4558]: I0120 17:45:50.471993 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8361ba4f-e976-4c04-82ad-81e9412ba84c-internal-tls-certs\") pod \"8361ba4f-e976-4c04-82ad-81e9412ba84c\" (UID: \"8361ba4f-e976-4c04-82ad-81e9412ba84c\") " Jan 20 17:45:50 crc kubenswrapper[4558]: I0120 17:45:50.472016 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bkhrf\" (UniqueName: \"kubernetes.io/projected/8361ba4f-e976-4c04-82ad-81e9412ba84c-kube-api-access-bkhrf\") pod \"8361ba4f-e976-4c04-82ad-81e9412ba84c\" (UID: \"8361ba4f-e976-4c04-82ad-81e9412ba84c\") " Jan 20 17:45:50 crc kubenswrapper[4558]: I0120 17:45:50.472040 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/8361ba4f-e976-4c04-82ad-81e9412ba84c-config\") pod \"8361ba4f-e976-4c04-82ad-81e9412ba84c\" (UID: \"8361ba4f-e976-4c04-82ad-81e9412ba84c\") " Jan 20 17:45:50 crc kubenswrapper[4558]: I0120 17:45:50.478431 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8361ba4f-e976-4c04-82ad-81e9412ba84c-kube-api-access-bkhrf" (OuterVolumeSpecName: "kube-api-access-bkhrf") pod "8361ba4f-e976-4c04-82ad-81e9412ba84c" (UID: "8361ba4f-e976-4c04-82ad-81e9412ba84c"). InnerVolumeSpecName "kube-api-access-bkhrf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:45:50 crc kubenswrapper[4558]: I0120 17:45:50.479282 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8361ba4f-e976-4c04-82ad-81e9412ba84c-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "8361ba4f-e976-4c04-82ad-81e9412ba84c" (UID: "8361ba4f-e976-4c04-82ad-81e9412ba84c"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:45:50 crc kubenswrapper[4558]: I0120 17:45:50.515670 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8361ba4f-e976-4c04-82ad-81e9412ba84c-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "8361ba4f-e976-4c04-82ad-81e9412ba84c" (UID: "8361ba4f-e976-4c04-82ad-81e9412ba84c"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:45:50 crc kubenswrapper[4558]: I0120 17:45:50.519455 4558 generic.go:334] "Generic (PLEG): container finished" podID="8361ba4f-e976-4c04-82ad-81e9412ba84c" containerID="988e9fa129b4be2f46d004a547d691ae8c9171ce34c21f6c5e3dc859fbe3c5da" exitCode=0 Jan 20 17:45:50 crc kubenswrapper[4558]: I0120 17:45:50.519518 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-b9f9b6874-4cxdp" event={"ID":"8361ba4f-e976-4c04-82ad-81e9412ba84c","Type":"ContainerDied","Data":"988e9fa129b4be2f46d004a547d691ae8c9171ce34c21f6c5e3dc859fbe3c5da"} Jan 20 17:45:50 crc kubenswrapper[4558]: I0120 17:45:50.519539 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-b9f9b6874-4cxdp" Jan 20 17:45:50 crc kubenswrapper[4558]: I0120 17:45:50.519554 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-b9f9b6874-4cxdp" event={"ID":"8361ba4f-e976-4c04-82ad-81e9412ba84c","Type":"ContainerDied","Data":"f84eb3dc0ed3f72a5dfdbe61bf42c4f075af06b8d4634598cdd8ae6cf3ba9956"} Jan 20 17:45:50 crc kubenswrapper[4558]: I0120 17:45:50.519585 4558 scope.go:117] "RemoveContainer" containerID="df990974fa4d7bdeba24386c7a7548b9c42f83f4e37902446477a1561329d066" Jan 20 17:45:50 crc kubenswrapper[4558]: I0120 17:45:50.522071 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8361ba4f-e976-4c04-82ad-81e9412ba84c-config" (OuterVolumeSpecName: "config") pod "8361ba4f-e976-4c04-82ad-81e9412ba84c" (UID: "8361ba4f-e976-4c04-82ad-81e9412ba84c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:45:50 crc kubenswrapper[4558]: I0120 17:45:50.525815 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8361ba4f-e976-4c04-82ad-81e9412ba84c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8361ba4f-e976-4c04-82ad-81e9412ba84c" (UID: "8361ba4f-e976-4c04-82ad-81e9412ba84c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:45:50 crc kubenswrapper[4558]: I0120 17:45:50.532881 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8361ba4f-e976-4c04-82ad-81e9412ba84c-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "8361ba4f-e976-4c04-82ad-81e9412ba84c" (UID: "8361ba4f-e976-4c04-82ad-81e9412ba84c"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:45:50 crc kubenswrapper[4558]: I0120 17:45:50.539482 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8361ba4f-e976-4c04-82ad-81e9412ba84c-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "8361ba4f-e976-4c04-82ad-81e9412ba84c" (UID: "8361ba4f-e976-4c04-82ad-81e9412ba84c"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:45:50 crc kubenswrapper[4558]: I0120 17:45:50.543580 4558 scope.go:117] "RemoveContainer" containerID="988e9fa129b4be2f46d004a547d691ae8c9171ce34c21f6c5e3dc859fbe3c5da" Jan 20 17:45:50 crc kubenswrapper[4558]: I0120 17:45:50.563851 4558 scope.go:117] "RemoveContainer" containerID="df990974fa4d7bdeba24386c7a7548b9c42f83f4e37902446477a1561329d066" Jan 20 17:45:50 crc kubenswrapper[4558]: E0120 17:45:50.564368 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"df990974fa4d7bdeba24386c7a7548b9c42f83f4e37902446477a1561329d066\": container with ID starting with df990974fa4d7bdeba24386c7a7548b9c42f83f4e37902446477a1561329d066 not found: ID does not exist" containerID="df990974fa4d7bdeba24386c7a7548b9c42f83f4e37902446477a1561329d066" Jan 20 17:45:50 crc kubenswrapper[4558]: I0120 17:45:50.564412 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"df990974fa4d7bdeba24386c7a7548b9c42f83f4e37902446477a1561329d066"} err="failed to get container status \"df990974fa4d7bdeba24386c7a7548b9c42f83f4e37902446477a1561329d066\": rpc error: code = NotFound desc = could not find container \"df990974fa4d7bdeba24386c7a7548b9c42f83f4e37902446477a1561329d066\": container with ID starting with df990974fa4d7bdeba24386c7a7548b9c42f83f4e37902446477a1561329d066 not found: ID does not exist" Jan 20 17:45:50 crc kubenswrapper[4558]: I0120 17:45:50.564441 4558 scope.go:117] "RemoveContainer" containerID="988e9fa129b4be2f46d004a547d691ae8c9171ce34c21f6c5e3dc859fbe3c5da" Jan 20 17:45:50 crc kubenswrapper[4558]: E0120 17:45:50.564837 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"988e9fa129b4be2f46d004a547d691ae8c9171ce34c21f6c5e3dc859fbe3c5da\": container with ID starting with 988e9fa129b4be2f46d004a547d691ae8c9171ce34c21f6c5e3dc859fbe3c5da not found: ID does not exist" containerID="988e9fa129b4be2f46d004a547d691ae8c9171ce34c21f6c5e3dc859fbe3c5da" Jan 20 17:45:50 crc kubenswrapper[4558]: I0120 17:45:50.564929 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"988e9fa129b4be2f46d004a547d691ae8c9171ce34c21f6c5e3dc859fbe3c5da"} err="failed to get container status \"988e9fa129b4be2f46d004a547d691ae8c9171ce34c21f6c5e3dc859fbe3c5da\": rpc error: code = NotFound desc = could not find container \"988e9fa129b4be2f46d004a547d691ae8c9171ce34c21f6c5e3dc859fbe3c5da\": container with ID starting with 988e9fa129b4be2f46d004a547d691ae8c9171ce34c21f6c5e3dc859fbe3c5da not found: ID does not exist" Jan 20 17:45:50 crc kubenswrapper[4558]: I0120 17:45:50.574202 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8361ba4f-e976-4c04-82ad-81e9412ba84c-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:50 crc kubenswrapper[4558]: I0120 17:45:50.574230 4558 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/8361ba4f-e976-4c04-82ad-81e9412ba84c-httpd-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:50 crc kubenswrapper[4558]: I0120 17:45:50.574242 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8361ba4f-e976-4c04-82ad-81e9412ba84c-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:50 crc kubenswrapper[4558]: I0120 17:45:50.574254 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bkhrf\" (UniqueName: \"kubernetes.io/projected/8361ba4f-e976-4c04-82ad-81e9412ba84c-kube-api-access-bkhrf\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:50 crc kubenswrapper[4558]: I0120 17:45:50.574265 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/8361ba4f-e976-4c04-82ad-81e9412ba84c-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:50 crc kubenswrapper[4558]: I0120 17:45:50.574275 4558 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/8361ba4f-e976-4c04-82ad-81e9412ba84c-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:50 crc kubenswrapper[4558]: I0120 17:45:50.574284 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8361ba4f-e976-4c04-82ad-81e9412ba84c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:45:50 crc kubenswrapper[4558]: I0120 17:45:50.839969 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-b9f9b6874-4cxdp"] Jan 20 17:45:50 crc kubenswrapper[4558]: I0120 17:45:50.844815 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/neutron-b9f9b6874-4cxdp"] Jan 20 17:45:52 crc kubenswrapper[4558]: I0120 17:45:52.575508 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8361ba4f-e976-4c04-82ad-81e9412ba84c" path="/var/lib/kubelet/pods/8361ba4f-e976-4c04-82ad-81e9412ba84c/volumes" Jan 20 17:46:04 crc kubenswrapper[4558]: I0120 17:46:04.157843 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:46:04 crc kubenswrapper[4558]: I0120 17:46:04.284181 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/fa551db1-9a4b-45c2-b640-4f3518b162f4-cache\") pod \"fa551db1-9a4b-45c2-b640-4f3518b162f4\" (UID: \"fa551db1-9a4b-45c2-b640-4f3518b162f4\") " Jan 20 17:46:04 crc kubenswrapper[4558]: I0120 17:46:04.284298 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l6zjs\" (UniqueName: \"kubernetes.io/projected/fa551db1-9a4b-45c2-b640-4f3518b162f4-kube-api-access-l6zjs\") pod \"fa551db1-9a4b-45c2-b640-4f3518b162f4\" (UID: \"fa551db1-9a4b-45c2-b640-4f3518b162f4\") " Jan 20 17:46:04 crc kubenswrapper[4558]: I0120 17:46:04.284325 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/fa551db1-9a4b-45c2-b640-4f3518b162f4-etc-swift\") pod \"fa551db1-9a4b-45c2-b640-4f3518b162f4\" (UID: \"fa551db1-9a4b-45c2-b640-4f3518b162f4\") " Jan 20 17:46:04 crc kubenswrapper[4558]: I0120 17:46:04.284348 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/fa551db1-9a4b-45c2-b640-4f3518b162f4-lock\") pod \"fa551db1-9a4b-45c2-b640-4f3518b162f4\" (UID: \"fa551db1-9a4b-45c2-b640-4f3518b162f4\") " Jan 20 17:46:04 crc kubenswrapper[4558]: I0120 17:46:04.284438 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swift\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"fa551db1-9a4b-45c2-b640-4f3518b162f4\" (UID: \"fa551db1-9a4b-45c2-b640-4f3518b162f4\") " Jan 20 17:46:04 crc kubenswrapper[4558]: I0120 17:46:04.284830 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fa551db1-9a4b-45c2-b640-4f3518b162f4-lock" (OuterVolumeSpecName: "lock") pod "fa551db1-9a4b-45c2-b640-4f3518b162f4" (UID: "fa551db1-9a4b-45c2-b640-4f3518b162f4"). InnerVolumeSpecName "lock". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:46:04 crc kubenswrapper[4558]: I0120 17:46:04.284967 4558 reconciler_common.go:293] "Volume detached for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/fa551db1-9a4b-45c2-b640-4f3518b162f4-lock\") on node \"crc\" DevicePath \"\"" Jan 20 17:46:04 crc kubenswrapper[4558]: I0120 17:46:04.285255 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fa551db1-9a4b-45c2-b640-4f3518b162f4-cache" (OuterVolumeSpecName: "cache") pod "fa551db1-9a4b-45c2-b640-4f3518b162f4" (UID: "fa551db1-9a4b-45c2-b640-4f3518b162f4"). InnerVolumeSpecName "cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:46:04 crc kubenswrapper[4558]: I0120 17:46:04.289217 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage07-crc" (OuterVolumeSpecName: "swift") pod "fa551db1-9a4b-45c2-b640-4f3518b162f4" (UID: "fa551db1-9a4b-45c2-b640-4f3518b162f4"). InnerVolumeSpecName "local-storage07-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:46:04 crc kubenswrapper[4558]: I0120 17:46:04.289467 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fa551db1-9a4b-45c2-b640-4f3518b162f4-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "fa551db1-9a4b-45c2-b640-4f3518b162f4" (UID: "fa551db1-9a4b-45c2-b640-4f3518b162f4"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:46:04 crc kubenswrapper[4558]: I0120 17:46:04.290127 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fa551db1-9a4b-45c2-b640-4f3518b162f4-kube-api-access-l6zjs" (OuterVolumeSpecName: "kube-api-access-l6zjs") pod "fa551db1-9a4b-45c2-b640-4f3518b162f4" (UID: "fa551db1-9a4b-45c2-b640-4f3518b162f4"). InnerVolumeSpecName "kube-api-access-l6zjs". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:46:04 crc kubenswrapper[4558]: I0120 17:46:04.387019 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l6zjs\" (UniqueName: \"kubernetes.io/projected/fa551db1-9a4b-45c2-b640-4f3518b162f4-kube-api-access-l6zjs\") on node \"crc\" DevicePath \"\"" Jan 20 17:46:04 crc kubenswrapper[4558]: I0120 17:46:04.387122 4558 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/fa551db1-9a4b-45c2-b640-4f3518b162f4-etc-swift\") on node \"crc\" DevicePath \"\"" Jan 20 17:46:04 crc kubenswrapper[4558]: I0120 17:46:04.387237 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" " Jan 20 17:46:04 crc kubenswrapper[4558]: I0120 17:46:04.387294 4558 reconciler_common.go:293] "Volume detached for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/fa551db1-9a4b-45c2-b640-4f3518b162f4-cache\") on node \"crc\" DevicePath \"\"" Jan 20 17:46:04 crc kubenswrapper[4558]: I0120 17:46:04.399820 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage07-crc" (UniqueName: "kubernetes.io/local-volume/local-storage07-crc") on node "crc" Jan 20 17:46:04 crc kubenswrapper[4558]: I0120 17:46:04.488429 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:46:04 crc kubenswrapper[4558]: I0120 17:46:04.662535 4558 generic.go:334] "Generic (PLEG): container finished" podID="fa551db1-9a4b-45c2-b640-4f3518b162f4" containerID="0978e6e1ade70cc41df3d45431dd51e1520b653984893d0992b4c5d6e1fb3e47" exitCode=137 Jan 20 17:46:04 crc kubenswrapper[4558]: I0120 17:46:04.662712 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"fa551db1-9a4b-45c2-b640-4f3518b162f4","Type":"ContainerDied","Data":"0978e6e1ade70cc41df3d45431dd51e1520b653984893d0992b4c5d6e1fb3e47"} Jan 20 17:46:04 crc kubenswrapper[4558]: I0120 17:46:04.662945 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"fa551db1-9a4b-45c2-b640-4f3518b162f4","Type":"ContainerDied","Data":"c7e8a0f021c02540371f8f70542f7136469c5d2c70392aaa2060efc6c4587260"} Jan 20 17:46:04 crc kubenswrapper[4558]: I0120 17:46:04.662796 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:46:04 crc kubenswrapper[4558]: I0120 17:46:04.663040 4558 scope.go:117] "RemoveContainer" containerID="0978e6e1ade70cc41df3d45431dd51e1520b653984893d0992b4c5d6e1fb3e47" Jan 20 17:46:04 crc kubenswrapper[4558]: I0120 17:46:04.692673 4558 scope.go:117] "RemoveContainer" containerID="b5ee244c517bb7529681f18dd5be00352c8b0d2d264f9bf4dd993ae3ee88bc9f" Jan 20 17:46:04 crc kubenswrapper[4558]: I0120 17:46:04.696442 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/swift-storage-0"] Jan 20 17:46:04 crc kubenswrapper[4558]: I0120 17:46:04.701647 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/swift-storage-0"] Jan 20 17:46:04 crc kubenswrapper[4558]: I0120 17:46:04.708764 4558 scope.go:117] "RemoveContainer" containerID="6ff615a38b1de5289443af49e872533c8fb33086c5c5f749a5757153445d80db" Jan 20 17:46:04 crc kubenswrapper[4558]: I0120 17:46:04.725950 4558 scope.go:117] "RemoveContainer" containerID="730e7a8305ea822bf786ff7759b1b076db78be0ddb28d589685ca62f80722994" Jan 20 17:46:04 crc kubenswrapper[4558]: I0120 17:46:04.739893 4558 scope.go:117] "RemoveContainer" containerID="db06ae2f5dcfb82f3b41bda4c320f201a9c03b7b5c3d71c0c19a43661eba2751" Jan 20 17:46:04 crc kubenswrapper[4558]: I0120 17:46:04.754228 4558 scope.go:117] "RemoveContainer" containerID="1776b65beebb985be1fcd79b9aa133812c590cf0eb9a13dd5bf41981563e77b5" Jan 20 17:46:04 crc kubenswrapper[4558]: I0120 17:46:04.770533 4558 scope.go:117] "RemoveContainer" containerID="0b9af6175ca2fe405cf0a757e866a177d1e865ed2977ca637413cf1645033f14" Jan 20 17:46:04 crc kubenswrapper[4558]: I0120 17:46:04.789519 4558 scope.go:117] "RemoveContainer" containerID="35a4222bca9bef05ef01fd815e0b4f831f1296d08622fc5e89a2a361527c22e6" Jan 20 17:46:04 crc kubenswrapper[4558]: I0120 17:46:04.810033 4558 scope.go:117] "RemoveContainer" containerID="6a4f662ca1f9347dca843ebc69844d9f6f10b499e6d23197e88dd1c43db718b8" Jan 20 17:46:04 crc kubenswrapper[4558]: I0120 17:46:04.835820 4558 scope.go:117] "RemoveContainer" containerID="13c87e4a18a86e7c3b8ddb00e81ce8b0da89d4d0c280a7e98c30f1e03817bfc3" Jan 20 17:46:04 crc kubenswrapper[4558]: I0120 17:46:04.851945 4558 scope.go:117] "RemoveContainer" containerID="c784e598ad58ecd5a81e3bf765a648caa06b6e4d7a98116be1483ea1a1916bd6" Jan 20 17:46:04 crc kubenswrapper[4558]: I0120 17:46:04.867056 4558 scope.go:117] "RemoveContainer" containerID="cbaf44dfc0734a14fd6ecab8c4428e033aaee10d6fd6549b9c1af4209a3025c5" Jan 20 17:46:04 crc kubenswrapper[4558]: I0120 17:46:04.881668 4558 scope.go:117] "RemoveContainer" containerID="967813e267c1874f848c851b80566f8d4b10ddb11ae1e902fcced650a357a740" Jan 20 17:46:04 crc kubenswrapper[4558]: I0120 17:46:04.909531 4558 scope.go:117] "RemoveContainer" containerID="c873af103469702cab190e42ac22b360e4b1c8addd7ea94631a91965a8840503" Jan 20 17:46:04 crc kubenswrapper[4558]: I0120 17:46:04.929531 4558 scope.go:117] "RemoveContainer" containerID="7946458eae645c01b2edae8d93ef429443754ef2e0e2732538ed1fcc7aab4131" Jan 20 17:46:04 crc kubenswrapper[4558]: I0120 17:46:04.950775 4558 scope.go:117] "RemoveContainer" containerID="0978e6e1ade70cc41df3d45431dd51e1520b653984893d0992b4c5d6e1fb3e47" Jan 20 17:46:04 crc kubenswrapper[4558]: E0120 17:46:04.951461 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0978e6e1ade70cc41df3d45431dd51e1520b653984893d0992b4c5d6e1fb3e47\": container with ID starting with 0978e6e1ade70cc41df3d45431dd51e1520b653984893d0992b4c5d6e1fb3e47 not found: ID does not exist" containerID="0978e6e1ade70cc41df3d45431dd51e1520b653984893d0992b4c5d6e1fb3e47" Jan 20 17:46:04 crc kubenswrapper[4558]: I0120 17:46:04.951572 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0978e6e1ade70cc41df3d45431dd51e1520b653984893d0992b4c5d6e1fb3e47"} err="failed to get container status \"0978e6e1ade70cc41df3d45431dd51e1520b653984893d0992b4c5d6e1fb3e47\": rpc error: code = NotFound desc = could not find container \"0978e6e1ade70cc41df3d45431dd51e1520b653984893d0992b4c5d6e1fb3e47\": container with ID starting with 0978e6e1ade70cc41df3d45431dd51e1520b653984893d0992b4c5d6e1fb3e47 not found: ID does not exist" Jan 20 17:46:04 crc kubenswrapper[4558]: I0120 17:46:04.951670 4558 scope.go:117] "RemoveContainer" containerID="b5ee244c517bb7529681f18dd5be00352c8b0d2d264f9bf4dd993ae3ee88bc9f" Jan 20 17:46:04 crc kubenswrapper[4558]: E0120 17:46:04.952130 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b5ee244c517bb7529681f18dd5be00352c8b0d2d264f9bf4dd993ae3ee88bc9f\": container with ID starting with b5ee244c517bb7529681f18dd5be00352c8b0d2d264f9bf4dd993ae3ee88bc9f not found: ID does not exist" containerID="b5ee244c517bb7529681f18dd5be00352c8b0d2d264f9bf4dd993ae3ee88bc9f" Jan 20 17:46:04 crc kubenswrapper[4558]: I0120 17:46:04.952227 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b5ee244c517bb7529681f18dd5be00352c8b0d2d264f9bf4dd993ae3ee88bc9f"} err="failed to get container status \"b5ee244c517bb7529681f18dd5be00352c8b0d2d264f9bf4dd993ae3ee88bc9f\": rpc error: code = NotFound desc = could not find container \"b5ee244c517bb7529681f18dd5be00352c8b0d2d264f9bf4dd993ae3ee88bc9f\": container with ID starting with b5ee244c517bb7529681f18dd5be00352c8b0d2d264f9bf4dd993ae3ee88bc9f not found: ID does not exist" Jan 20 17:46:04 crc kubenswrapper[4558]: I0120 17:46:04.952286 4558 scope.go:117] "RemoveContainer" containerID="6ff615a38b1de5289443af49e872533c8fb33086c5c5f749a5757153445d80db" Jan 20 17:46:04 crc kubenswrapper[4558]: E0120 17:46:04.952800 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6ff615a38b1de5289443af49e872533c8fb33086c5c5f749a5757153445d80db\": container with ID starting with 6ff615a38b1de5289443af49e872533c8fb33086c5c5f749a5757153445d80db not found: ID does not exist" containerID="6ff615a38b1de5289443af49e872533c8fb33086c5c5f749a5757153445d80db" Jan 20 17:46:04 crc kubenswrapper[4558]: I0120 17:46:04.952846 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6ff615a38b1de5289443af49e872533c8fb33086c5c5f749a5757153445d80db"} err="failed to get container status \"6ff615a38b1de5289443af49e872533c8fb33086c5c5f749a5757153445d80db\": rpc error: code = NotFound desc = could not find container \"6ff615a38b1de5289443af49e872533c8fb33086c5c5f749a5757153445d80db\": container with ID starting with 6ff615a38b1de5289443af49e872533c8fb33086c5c5f749a5757153445d80db not found: ID does not exist" Jan 20 17:46:04 crc kubenswrapper[4558]: I0120 17:46:04.952880 4558 scope.go:117] "RemoveContainer" containerID="730e7a8305ea822bf786ff7759b1b076db78be0ddb28d589685ca62f80722994" Jan 20 17:46:04 crc kubenswrapper[4558]: E0120 17:46:04.953286 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"730e7a8305ea822bf786ff7759b1b076db78be0ddb28d589685ca62f80722994\": container with ID starting with 730e7a8305ea822bf786ff7759b1b076db78be0ddb28d589685ca62f80722994 not found: ID does not exist" containerID="730e7a8305ea822bf786ff7759b1b076db78be0ddb28d589685ca62f80722994" Jan 20 17:46:04 crc kubenswrapper[4558]: I0120 17:46:04.953310 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"730e7a8305ea822bf786ff7759b1b076db78be0ddb28d589685ca62f80722994"} err="failed to get container status \"730e7a8305ea822bf786ff7759b1b076db78be0ddb28d589685ca62f80722994\": rpc error: code = NotFound desc = could not find container \"730e7a8305ea822bf786ff7759b1b076db78be0ddb28d589685ca62f80722994\": container with ID starting with 730e7a8305ea822bf786ff7759b1b076db78be0ddb28d589685ca62f80722994 not found: ID does not exist" Jan 20 17:46:04 crc kubenswrapper[4558]: I0120 17:46:04.953327 4558 scope.go:117] "RemoveContainer" containerID="db06ae2f5dcfb82f3b41bda4c320f201a9c03b7b5c3d71c0c19a43661eba2751" Jan 20 17:46:04 crc kubenswrapper[4558]: E0120 17:46:04.954367 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"db06ae2f5dcfb82f3b41bda4c320f201a9c03b7b5c3d71c0c19a43661eba2751\": container with ID starting with db06ae2f5dcfb82f3b41bda4c320f201a9c03b7b5c3d71c0c19a43661eba2751 not found: ID does not exist" containerID="db06ae2f5dcfb82f3b41bda4c320f201a9c03b7b5c3d71c0c19a43661eba2751" Jan 20 17:46:04 crc kubenswrapper[4558]: I0120 17:46:04.954410 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"db06ae2f5dcfb82f3b41bda4c320f201a9c03b7b5c3d71c0c19a43661eba2751"} err="failed to get container status \"db06ae2f5dcfb82f3b41bda4c320f201a9c03b7b5c3d71c0c19a43661eba2751\": rpc error: code = NotFound desc = could not find container \"db06ae2f5dcfb82f3b41bda4c320f201a9c03b7b5c3d71c0c19a43661eba2751\": container with ID starting with db06ae2f5dcfb82f3b41bda4c320f201a9c03b7b5c3d71c0c19a43661eba2751 not found: ID does not exist" Jan 20 17:46:04 crc kubenswrapper[4558]: I0120 17:46:04.954442 4558 scope.go:117] "RemoveContainer" containerID="1776b65beebb985be1fcd79b9aa133812c590cf0eb9a13dd5bf41981563e77b5" Jan 20 17:46:04 crc kubenswrapper[4558]: E0120 17:46:04.954762 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1776b65beebb985be1fcd79b9aa133812c590cf0eb9a13dd5bf41981563e77b5\": container with ID starting with 1776b65beebb985be1fcd79b9aa133812c590cf0eb9a13dd5bf41981563e77b5 not found: ID does not exist" containerID="1776b65beebb985be1fcd79b9aa133812c590cf0eb9a13dd5bf41981563e77b5" Jan 20 17:46:04 crc kubenswrapper[4558]: I0120 17:46:04.954783 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1776b65beebb985be1fcd79b9aa133812c590cf0eb9a13dd5bf41981563e77b5"} err="failed to get container status \"1776b65beebb985be1fcd79b9aa133812c590cf0eb9a13dd5bf41981563e77b5\": rpc error: code = NotFound desc = could not find container \"1776b65beebb985be1fcd79b9aa133812c590cf0eb9a13dd5bf41981563e77b5\": container with ID starting with 1776b65beebb985be1fcd79b9aa133812c590cf0eb9a13dd5bf41981563e77b5 not found: ID does not exist" Jan 20 17:46:04 crc kubenswrapper[4558]: I0120 17:46:04.954799 4558 scope.go:117] "RemoveContainer" containerID="0b9af6175ca2fe405cf0a757e866a177d1e865ed2977ca637413cf1645033f14" Jan 20 17:46:04 crc kubenswrapper[4558]: E0120 17:46:04.955129 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0b9af6175ca2fe405cf0a757e866a177d1e865ed2977ca637413cf1645033f14\": container with ID starting with 0b9af6175ca2fe405cf0a757e866a177d1e865ed2977ca637413cf1645033f14 not found: ID does not exist" containerID="0b9af6175ca2fe405cf0a757e866a177d1e865ed2977ca637413cf1645033f14" Jan 20 17:46:04 crc kubenswrapper[4558]: I0120 17:46:04.955176 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0b9af6175ca2fe405cf0a757e866a177d1e865ed2977ca637413cf1645033f14"} err="failed to get container status \"0b9af6175ca2fe405cf0a757e866a177d1e865ed2977ca637413cf1645033f14\": rpc error: code = NotFound desc = could not find container \"0b9af6175ca2fe405cf0a757e866a177d1e865ed2977ca637413cf1645033f14\": container with ID starting with 0b9af6175ca2fe405cf0a757e866a177d1e865ed2977ca637413cf1645033f14 not found: ID does not exist" Jan 20 17:46:04 crc kubenswrapper[4558]: I0120 17:46:04.955196 4558 scope.go:117] "RemoveContainer" containerID="35a4222bca9bef05ef01fd815e0b4f831f1296d08622fc5e89a2a361527c22e6" Jan 20 17:46:04 crc kubenswrapper[4558]: E0120 17:46:04.955488 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"35a4222bca9bef05ef01fd815e0b4f831f1296d08622fc5e89a2a361527c22e6\": container with ID starting with 35a4222bca9bef05ef01fd815e0b4f831f1296d08622fc5e89a2a361527c22e6 not found: ID does not exist" containerID="35a4222bca9bef05ef01fd815e0b4f831f1296d08622fc5e89a2a361527c22e6" Jan 20 17:46:04 crc kubenswrapper[4558]: I0120 17:46:04.955517 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"35a4222bca9bef05ef01fd815e0b4f831f1296d08622fc5e89a2a361527c22e6"} err="failed to get container status \"35a4222bca9bef05ef01fd815e0b4f831f1296d08622fc5e89a2a361527c22e6\": rpc error: code = NotFound desc = could not find container \"35a4222bca9bef05ef01fd815e0b4f831f1296d08622fc5e89a2a361527c22e6\": container with ID starting with 35a4222bca9bef05ef01fd815e0b4f831f1296d08622fc5e89a2a361527c22e6 not found: ID does not exist" Jan 20 17:46:04 crc kubenswrapper[4558]: I0120 17:46:04.955540 4558 scope.go:117] "RemoveContainer" containerID="6a4f662ca1f9347dca843ebc69844d9f6f10b499e6d23197e88dd1c43db718b8" Jan 20 17:46:04 crc kubenswrapper[4558]: E0120 17:46:04.955788 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6a4f662ca1f9347dca843ebc69844d9f6f10b499e6d23197e88dd1c43db718b8\": container with ID starting with 6a4f662ca1f9347dca843ebc69844d9f6f10b499e6d23197e88dd1c43db718b8 not found: ID does not exist" containerID="6a4f662ca1f9347dca843ebc69844d9f6f10b499e6d23197e88dd1c43db718b8" Jan 20 17:46:04 crc kubenswrapper[4558]: I0120 17:46:04.955820 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6a4f662ca1f9347dca843ebc69844d9f6f10b499e6d23197e88dd1c43db718b8"} err="failed to get container status \"6a4f662ca1f9347dca843ebc69844d9f6f10b499e6d23197e88dd1c43db718b8\": rpc error: code = NotFound desc = could not find container \"6a4f662ca1f9347dca843ebc69844d9f6f10b499e6d23197e88dd1c43db718b8\": container with ID starting with 6a4f662ca1f9347dca843ebc69844d9f6f10b499e6d23197e88dd1c43db718b8 not found: ID does not exist" Jan 20 17:46:04 crc kubenswrapper[4558]: I0120 17:46:04.955835 4558 scope.go:117] "RemoveContainer" containerID="13c87e4a18a86e7c3b8ddb00e81ce8b0da89d4d0c280a7e98c30f1e03817bfc3" Jan 20 17:46:04 crc kubenswrapper[4558]: E0120 17:46:04.956104 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"13c87e4a18a86e7c3b8ddb00e81ce8b0da89d4d0c280a7e98c30f1e03817bfc3\": container with ID starting with 13c87e4a18a86e7c3b8ddb00e81ce8b0da89d4d0c280a7e98c30f1e03817bfc3 not found: ID does not exist" containerID="13c87e4a18a86e7c3b8ddb00e81ce8b0da89d4d0c280a7e98c30f1e03817bfc3" Jan 20 17:46:04 crc kubenswrapper[4558]: I0120 17:46:04.956134 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"13c87e4a18a86e7c3b8ddb00e81ce8b0da89d4d0c280a7e98c30f1e03817bfc3"} err="failed to get container status \"13c87e4a18a86e7c3b8ddb00e81ce8b0da89d4d0c280a7e98c30f1e03817bfc3\": rpc error: code = NotFound desc = could not find container \"13c87e4a18a86e7c3b8ddb00e81ce8b0da89d4d0c280a7e98c30f1e03817bfc3\": container with ID starting with 13c87e4a18a86e7c3b8ddb00e81ce8b0da89d4d0c280a7e98c30f1e03817bfc3 not found: ID does not exist" Jan 20 17:46:04 crc kubenswrapper[4558]: I0120 17:46:04.956154 4558 scope.go:117] "RemoveContainer" containerID="c784e598ad58ecd5a81e3bf765a648caa06b6e4d7a98116be1483ea1a1916bd6" Jan 20 17:46:04 crc kubenswrapper[4558]: E0120 17:46:04.956457 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c784e598ad58ecd5a81e3bf765a648caa06b6e4d7a98116be1483ea1a1916bd6\": container with ID starting with c784e598ad58ecd5a81e3bf765a648caa06b6e4d7a98116be1483ea1a1916bd6 not found: ID does not exist" containerID="c784e598ad58ecd5a81e3bf765a648caa06b6e4d7a98116be1483ea1a1916bd6" Jan 20 17:46:04 crc kubenswrapper[4558]: I0120 17:46:04.956480 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c784e598ad58ecd5a81e3bf765a648caa06b6e4d7a98116be1483ea1a1916bd6"} err="failed to get container status \"c784e598ad58ecd5a81e3bf765a648caa06b6e4d7a98116be1483ea1a1916bd6\": rpc error: code = NotFound desc = could not find container \"c784e598ad58ecd5a81e3bf765a648caa06b6e4d7a98116be1483ea1a1916bd6\": container with ID starting with c784e598ad58ecd5a81e3bf765a648caa06b6e4d7a98116be1483ea1a1916bd6 not found: ID does not exist" Jan 20 17:46:04 crc kubenswrapper[4558]: I0120 17:46:04.956495 4558 scope.go:117] "RemoveContainer" containerID="cbaf44dfc0734a14fd6ecab8c4428e033aaee10d6fd6549b9c1af4209a3025c5" Jan 20 17:46:04 crc kubenswrapper[4558]: E0120 17:46:04.956788 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cbaf44dfc0734a14fd6ecab8c4428e033aaee10d6fd6549b9c1af4209a3025c5\": container with ID starting with cbaf44dfc0734a14fd6ecab8c4428e033aaee10d6fd6549b9c1af4209a3025c5 not found: ID does not exist" containerID="cbaf44dfc0734a14fd6ecab8c4428e033aaee10d6fd6549b9c1af4209a3025c5" Jan 20 17:46:04 crc kubenswrapper[4558]: I0120 17:46:04.956828 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cbaf44dfc0734a14fd6ecab8c4428e033aaee10d6fd6549b9c1af4209a3025c5"} err="failed to get container status \"cbaf44dfc0734a14fd6ecab8c4428e033aaee10d6fd6549b9c1af4209a3025c5\": rpc error: code = NotFound desc = could not find container \"cbaf44dfc0734a14fd6ecab8c4428e033aaee10d6fd6549b9c1af4209a3025c5\": container with ID starting with cbaf44dfc0734a14fd6ecab8c4428e033aaee10d6fd6549b9c1af4209a3025c5 not found: ID does not exist" Jan 20 17:46:04 crc kubenswrapper[4558]: I0120 17:46:04.956846 4558 scope.go:117] "RemoveContainer" containerID="967813e267c1874f848c851b80566f8d4b10ddb11ae1e902fcced650a357a740" Jan 20 17:46:04 crc kubenswrapper[4558]: E0120 17:46:04.957454 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"967813e267c1874f848c851b80566f8d4b10ddb11ae1e902fcced650a357a740\": container with ID starting with 967813e267c1874f848c851b80566f8d4b10ddb11ae1e902fcced650a357a740 not found: ID does not exist" containerID="967813e267c1874f848c851b80566f8d4b10ddb11ae1e902fcced650a357a740" Jan 20 17:46:04 crc kubenswrapper[4558]: I0120 17:46:04.957482 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"967813e267c1874f848c851b80566f8d4b10ddb11ae1e902fcced650a357a740"} err="failed to get container status \"967813e267c1874f848c851b80566f8d4b10ddb11ae1e902fcced650a357a740\": rpc error: code = NotFound desc = could not find container \"967813e267c1874f848c851b80566f8d4b10ddb11ae1e902fcced650a357a740\": container with ID starting with 967813e267c1874f848c851b80566f8d4b10ddb11ae1e902fcced650a357a740 not found: ID does not exist" Jan 20 17:46:04 crc kubenswrapper[4558]: I0120 17:46:04.957499 4558 scope.go:117] "RemoveContainer" containerID="c873af103469702cab190e42ac22b360e4b1c8addd7ea94631a91965a8840503" Jan 20 17:46:04 crc kubenswrapper[4558]: E0120 17:46:04.957754 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c873af103469702cab190e42ac22b360e4b1c8addd7ea94631a91965a8840503\": container with ID starting with c873af103469702cab190e42ac22b360e4b1c8addd7ea94631a91965a8840503 not found: ID does not exist" containerID="c873af103469702cab190e42ac22b360e4b1c8addd7ea94631a91965a8840503" Jan 20 17:46:04 crc kubenswrapper[4558]: I0120 17:46:04.957776 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c873af103469702cab190e42ac22b360e4b1c8addd7ea94631a91965a8840503"} err="failed to get container status \"c873af103469702cab190e42ac22b360e4b1c8addd7ea94631a91965a8840503\": rpc error: code = NotFound desc = could not find container \"c873af103469702cab190e42ac22b360e4b1c8addd7ea94631a91965a8840503\": container with ID starting with c873af103469702cab190e42ac22b360e4b1c8addd7ea94631a91965a8840503 not found: ID does not exist" Jan 20 17:46:04 crc kubenswrapper[4558]: I0120 17:46:04.957791 4558 scope.go:117] "RemoveContainer" containerID="7946458eae645c01b2edae8d93ef429443754ef2e0e2732538ed1fcc7aab4131" Jan 20 17:46:04 crc kubenswrapper[4558]: E0120 17:46:04.958047 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7946458eae645c01b2edae8d93ef429443754ef2e0e2732538ed1fcc7aab4131\": container with ID starting with 7946458eae645c01b2edae8d93ef429443754ef2e0e2732538ed1fcc7aab4131 not found: ID does not exist" containerID="7946458eae645c01b2edae8d93ef429443754ef2e0e2732538ed1fcc7aab4131" Jan 20 17:46:04 crc kubenswrapper[4558]: I0120 17:46:04.958083 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7946458eae645c01b2edae8d93ef429443754ef2e0e2732538ed1fcc7aab4131"} err="failed to get container status \"7946458eae645c01b2edae8d93ef429443754ef2e0e2732538ed1fcc7aab4131\": rpc error: code = NotFound desc = could not find container \"7946458eae645c01b2edae8d93ef429443754ef2e0e2732538ed1fcc7aab4131\": container with ID starting with 7946458eae645c01b2edae8d93ef429443754ef2e0e2732538ed1fcc7aab4131 not found: ID does not exist" Jan 20 17:46:06 crc kubenswrapper[4558]: I0120 17:46:06.574801 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fa551db1-9a4b-45c2-b640-4f3518b162f4" path="/var/lib/kubelet/pods/fa551db1-9a4b-45c2-b640-4f3518b162f4/volumes" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.185322 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/rabbitmq-server-0"] Jan 20 17:46:14 crc kubenswrapper[4558]: E0120 17:46:14.186372 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="50804fa8-d09d-49f8-a143-d7ec24ec542a" containerName="placement-api" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.186391 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="50804fa8-d09d-49f8-a143-d7ec24ec542a" containerName="placement-api" Jan 20 17:46:14 crc kubenswrapper[4558]: E0120 17:46:14.186408 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="86aff100-d474-48ed-b673-4dae7c0722cf" containerName="mysql-bootstrap" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.186416 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="86aff100-d474-48ed-b673-4dae7c0722cf" containerName="mysql-bootstrap" Jan 20 17:46:14 crc kubenswrapper[4558]: E0120 17:46:14.186424 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ea72c008-3a66-4577-8042-4b1e0ed1cca6" containerName="nova-cell1-conductor-conductor" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.186430 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ea72c008-3a66-4577-8042-4b1e0ed1cca6" containerName="nova-cell1-conductor-conductor" Jan 20 17:46:14 crc kubenswrapper[4558]: E0120 17:46:14.186439 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="de4d8126-91cf-4149-bec4-4accaf558308" containerName="setup-container" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.186445 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="de4d8126-91cf-4149-bec4-4accaf558308" containerName="setup-container" Jan 20 17:46:14 crc kubenswrapper[4558]: E0120 17:46:14.186453 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fd5cfc55-8cd9-4a76-87b8-4050b8f1030f" containerName="barbican-keystone-listener-log" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.186458 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="fd5cfc55-8cd9-4a76-87b8-4050b8f1030f" containerName="barbican-keystone-listener-log" Jan 20 17:46:14 crc kubenswrapper[4558]: E0120 17:46:14.186470 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d619cd37-a474-4965-b382-749ed6d55d6d" containerName="barbican-api" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.186475 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d619cd37-a474-4965-b382-749ed6d55d6d" containerName="barbican-api" Jan 20 17:46:14 crc kubenswrapper[4558]: E0120 17:46:14.186490 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa551db1-9a4b-45c2-b640-4f3518b162f4" containerName="account-reaper" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.186496 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa551db1-9a4b-45c2-b640-4f3518b162f4" containerName="account-reaper" Jan 20 17:46:14 crc kubenswrapper[4558]: E0120 17:46:14.186503 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7faabe16-9de5-49dc-bab6-44e173f4403c" containerName="cinder-scheduler" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.186509 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="7faabe16-9de5-49dc-bab6-44e173f4403c" containerName="cinder-scheduler" Jan 20 17:46:14 crc kubenswrapper[4558]: E0120 17:46:14.186516 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4765e529-9729-4d27-a252-c0c9a7b67beb" containerName="setup-container" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.186522 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="4765e529-9729-4d27-a252-c0c9a7b67beb" containerName="setup-container" Jan 20 17:46:14 crc kubenswrapper[4558]: E0120 17:46:14.186530 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa551db1-9a4b-45c2-b640-4f3518b162f4" containerName="object-expirer" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.186536 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa551db1-9a4b-45c2-b640-4f3518b162f4" containerName="object-expirer" Jan 20 17:46:14 crc kubenswrapper[4558]: E0120 17:46:14.186542 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2c77f6fd-736b-49e4-aa1c-ddffa451f3dc" containerName="ceilometer-central-agent" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.186546 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="2c77f6fd-736b-49e4-aa1c-ddffa451f3dc" containerName="ceilometer-central-agent" Jan 20 17:46:14 crc kubenswrapper[4558]: E0120 17:46:14.186558 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="50cacddd-ebea-477f-af64-6e96a09a242e" containerName="nova-cell0-conductor-conductor" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.186563 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="50cacddd-ebea-477f-af64-6e96a09a242e" containerName="nova-cell0-conductor-conductor" Jan 20 17:46:14 crc kubenswrapper[4558]: E0120 17:46:14.186569 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa551db1-9a4b-45c2-b640-4f3518b162f4" containerName="object-server" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.186575 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa551db1-9a4b-45c2-b640-4f3518b162f4" containerName="object-server" Jan 20 17:46:14 crc kubenswrapper[4558]: E0120 17:46:14.186588 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa551db1-9a4b-45c2-b640-4f3518b162f4" containerName="swift-recon-cron" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.186593 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa551db1-9a4b-45c2-b640-4f3518b162f4" containerName="swift-recon-cron" Jan 20 17:46:14 crc kubenswrapper[4558]: E0120 17:46:14.186601 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="56173817-8246-4f37-b157-3890912004ca" containerName="glance-httpd" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.186606 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="56173817-8246-4f37-b157-3890912004ca" containerName="glance-httpd" Jan 20 17:46:14 crc kubenswrapper[4558]: E0120 17:46:14.186615 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="70182fd8-a242-40f5-b20d-8ff4dd33e9b1" containerName="nova-api-api" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.186620 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="70182fd8-a242-40f5-b20d-8ff4dd33e9b1" containerName="nova-api-api" Jan 20 17:46:14 crc kubenswrapper[4558]: E0120 17:46:14.186631 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ea72c008-3a66-4577-8042-4b1e0ed1cca6" containerName="nova-cell1-conductor-conductor" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.186636 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ea72c008-3a66-4577-8042-4b1e0ed1cca6" containerName="nova-cell1-conductor-conductor" Jan 20 17:46:14 crc kubenswrapper[4558]: E0120 17:46:14.186644 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa551db1-9a4b-45c2-b640-4f3518b162f4" containerName="container-replicator" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.186650 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa551db1-9a4b-45c2-b640-4f3518b162f4" containerName="container-replicator" Jan 20 17:46:14 crc kubenswrapper[4558]: E0120 17:46:14.186656 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fd5cfc55-8cd9-4a76-87b8-4050b8f1030f" containerName="barbican-keystone-listener" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.186662 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="fd5cfc55-8cd9-4a76-87b8-4050b8f1030f" containerName="barbican-keystone-listener" Jan 20 17:46:14 crc kubenswrapper[4558]: E0120 17:46:14.186671 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="50cacddd-ebea-477f-af64-6e96a09a242e" containerName="nova-cell0-conductor-conductor" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.186677 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="50cacddd-ebea-477f-af64-6e96a09a242e" containerName="nova-cell0-conductor-conductor" Jan 20 17:46:14 crc kubenswrapper[4558]: E0120 17:46:14.186683 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ff0d0703-8173-4ac0-afc0-e673feaef286" containerName="nova-metadata-log" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.186689 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ff0d0703-8173-4ac0-afc0-e673feaef286" containerName="nova-metadata-log" Jan 20 17:46:14 crc kubenswrapper[4558]: E0120 17:46:14.186698 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f87490e8-557d-41e3-a07b-8fe12147b315" containerName="glance-log" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.186704 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="f87490e8-557d-41e3-a07b-8fe12147b315" containerName="glance-log" Jan 20 17:46:14 crc kubenswrapper[4558]: E0120 17:46:14.186712 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa551db1-9a4b-45c2-b640-4f3518b162f4" containerName="account-server" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.186718 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa551db1-9a4b-45c2-b640-4f3518b162f4" containerName="account-server" Jan 20 17:46:14 crc kubenswrapper[4558]: E0120 17:46:14.186727 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="de4d8126-91cf-4149-bec4-4accaf558308" containerName="rabbitmq" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.186734 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="de4d8126-91cf-4149-bec4-4accaf558308" containerName="rabbitmq" Jan 20 17:46:14 crc kubenswrapper[4558]: E0120 17:46:14.186741 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa551db1-9a4b-45c2-b640-4f3518b162f4" containerName="rsync" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.186746 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa551db1-9a4b-45c2-b640-4f3518b162f4" containerName="rsync" Jan 20 17:46:14 crc kubenswrapper[4558]: E0120 17:46:14.186753 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8361ba4f-e976-4c04-82ad-81e9412ba84c" containerName="neutron-httpd" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.186760 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="8361ba4f-e976-4c04-82ad-81e9412ba84c" containerName="neutron-httpd" Jan 20 17:46:14 crc kubenswrapper[4558]: E0120 17:46:14.186769 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ff0d0703-8173-4ac0-afc0-e673feaef286" containerName="nova-metadata-metadata" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.186775 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ff0d0703-8173-4ac0-afc0-e673feaef286" containerName="nova-metadata-metadata" Jan 20 17:46:14 crc kubenswrapper[4558]: E0120 17:46:14.186786 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa551db1-9a4b-45c2-b640-4f3518b162f4" containerName="account-auditor" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.186793 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa551db1-9a4b-45c2-b640-4f3518b162f4" containerName="account-auditor" Jan 20 17:46:14 crc kubenswrapper[4558]: E0120 17:46:14.186800 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d54110a-6349-45a3-ad58-1bf105f65293" containerName="dnsmasq-dns" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.186807 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d54110a-6349-45a3-ad58-1bf105f65293" containerName="dnsmasq-dns" Jan 20 17:46:14 crc kubenswrapper[4558]: E0120 17:46:14.186817 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4765e529-9729-4d27-a252-c0c9a7b67beb" containerName="rabbitmq" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.186823 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="4765e529-9729-4d27-a252-c0c9a7b67beb" containerName="rabbitmq" Jan 20 17:46:14 crc kubenswrapper[4558]: E0120 17:46:14.186829 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7faabe16-9de5-49dc-bab6-44e173f4403c" containerName="cinder-scheduler" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.186835 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="7faabe16-9de5-49dc-bab6-44e173f4403c" containerName="cinder-scheduler" Jan 20 17:46:14 crc kubenswrapper[4558]: E0120 17:46:14.186840 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2c77f6fd-736b-49e4-aa1c-ddffa451f3dc" containerName="ceilometer-notification-agent" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.186847 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="2c77f6fd-736b-49e4-aa1c-ddffa451f3dc" containerName="ceilometer-notification-agent" Jan 20 17:46:14 crc kubenswrapper[4558]: E0120 17:46:14.186859 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f87490e8-557d-41e3-a07b-8fe12147b315" containerName="glance-httpd" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.186865 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="f87490e8-557d-41e3-a07b-8fe12147b315" containerName="glance-httpd" Jan 20 17:46:14 crc kubenswrapper[4558]: E0120 17:46:14.186875 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa551db1-9a4b-45c2-b640-4f3518b162f4" containerName="container-server" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.186881 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa551db1-9a4b-45c2-b640-4f3518b162f4" containerName="container-server" Jan 20 17:46:14 crc kubenswrapper[4558]: E0120 17:46:14.186889 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="86aff100-d474-48ed-b673-4dae7c0722cf" containerName="galera" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.186894 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="86aff100-d474-48ed-b673-4dae7c0722cf" containerName="galera" Jan 20 17:46:14 crc kubenswrapper[4558]: E0120 17:46:14.186900 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="50cacddd-ebea-477f-af64-6e96a09a242e" containerName="nova-cell0-conductor-conductor" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.186907 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="50cacddd-ebea-477f-af64-6e96a09a242e" containerName="nova-cell0-conductor-conductor" Jan 20 17:46:14 crc kubenswrapper[4558]: E0120 17:46:14.186916 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="131735a5-8043-40d7-a15d-f0024356e584" containerName="memcached" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.186942 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="131735a5-8043-40d7-a15d-f0024356e584" containerName="memcached" Jan 20 17:46:14 crc kubenswrapper[4558]: E0120 17:46:14.186950 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="70182fd8-a242-40f5-b20d-8ff4dd33e9b1" containerName="nova-api-log" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.186955 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="70182fd8-a242-40f5-b20d-8ff4dd33e9b1" containerName="nova-api-log" Jan 20 17:46:14 crc kubenswrapper[4558]: E0120 17:46:14.186966 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa551db1-9a4b-45c2-b640-4f3518b162f4" containerName="object-updater" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.186972 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa551db1-9a4b-45c2-b640-4f3518b162f4" containerName="object-updater" Jan 20 17:46:14 crc kubenswrapper[4558]: E0120 17:46:14.186982 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa551db1-9a4b-45c2-b640-4f3518b162f4" containerName="container-auditor" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.186987 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa551db1-9a4b-45c2-b640-4f3518b162f4" containerName="container-auditor" Jan 20 17:46:14 crc kubenswrapper[4558]: E0120 17:46:14.186994 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5cbfd26c-728d-420c-9d40-b6f7870cff60" containerName="keystone-api" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.186999 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="5cbfd26c-728d-420c-9d40-b6f7870cff60" containerName="keystone-api" Jan 20 17:46:14 crc kubenswrapper[4558]: E0120 17:46:14.187005 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="50804fa8-d09d-49f8-a143-d7ec24ec542a" containerName="placement-log" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.187011 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="50804fa8-d09d-49f8-a143-d7ec24ec542a" containerName="placement-log" Jan 20 17:46:14 crc kubenswrapper[4558]: E0120 17:46:14.187017 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1a1aeb0f-ba20-4c24-a8d3-f3f2765328c2" containerName="barbican-worker-log" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.187023 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="1a1aeb0f-ba20-4c24-a8d3-f3f2765328c2" containerName="barbican-worker-log" Jan 20 17:46:14 crc kubenswrapper[4558]: E0120 17:46:14.187032 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa551db1-9a4b-45c2-b640-4f3518b162f4" containerName="account-replicator" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.187038 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa551db1-9a4b-45c2-b640-4f3518b162f4" containerName="account-replicator" Jan 20 17:46:14 crc kubenswrapper[4558]: E0120 17:46:14.187047 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b752af55-9c06-469f-9353-e1042300de3c" containerName="cinder-api" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.187053 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="b752af55-9c06-469f-9353-e1042300de3c" containerName="cinder-api" Jan 20 17:46:14 crc kubenswrapper[4558]: E0120 17:46:14.187062 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7faabe16-9de5-49dc-bab6-44e173f4403c" containerName="probe" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.187067 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="7faabe16-9de5-49dc-bab6-44e173f4403c" containerName="probe" Jan 20 17:46:14 crc kubenswrapper[4558]: E0120 17:46:14.187072 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2c77f6fd-736b-49e4-aa1c-ddffa451f3dc" containerName="sg-core" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.187078 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="2c77f6fd-736b-49e4-aa1c-ddffa451f3dc" containerName="sg-core" Jan 20 17:46:14 crc kubenswrapper[4558]: E0120 17:46:14.187086 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa551db1-9a4b-45c2-b640-4f3518b162f4" containerName="object-auditor" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.187091 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa551db1-9a4b-45c2-b640-4f3518b162f4" containerName="object-auditor" Jan 20 17:46:14 crc kubenswrapper[4558]: E0120 17:46:14.187097 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1a1aeb0f-ba20-4c24-a8d3-f3f2765328c2" containerName="barbican-worker" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.187102 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="1a1aeb0f-ba20-4c24-a8d3-f3f2765328c2" containerName="barbican-worker" Jan 20 17:46:14 crc kubenswrapper[4558]: E0120 17:46:14.187109 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2c77f6fd-736b-49e4-aa1c-ddffa451f3dc" containerName="proxy-httpd" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.187114 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="2c77f6fd-736b-49e4-aa1c-ddffa451f3dc" containerName="proxy-httpd" Jan 20 17:46:14 crc kubenswrapper[4558]: E0120 17:46:14.187119 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa551db1-9a4b-45c2-b640-4f3518b162f4" containerName="object-replicator" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.187125 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa551db1-9a4b-45c2-b640-4f3518b162f4" containerName="object-replicator" Jan 20 17:46:14 crc kubenswrapper[4558]: E0120 17:46:14.187135 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa551db1-9a4b-45c2-b640-4f3518b162f4" containerName="container-updater" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.187142 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa551db1-9a4b-45c2-b640-4f3518b162f4" containerName="container-updater" Jan 20 17:46:14 crc kubenswrapper[4558]: E0120 17:46:14.187151 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8361ba4f-e976-4c04-82ad-81e9412ba84c" containerName="neutron-api" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.187157 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="8361ba4f-e976-4c04-82ad-81e9412ba84c" containerName="neutron-api" Jan 20 17:46:14 crc kubenswrapper[4558]: E0120 17:46:14.187183 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="56173817-8246-4f37-b157-3890912004ca" containerName="glance-log" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.187189 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="56173817-8246-4f37-b157-3890912004ca" containerName="glance-log" Jan 20 17:46:14 crc kubenswrapper[4558]: E0120 17:46:14.187199 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b752af55-9c06-469f-9353-e1042300de3c" containerName="cinder-api-log" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.187205 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="b752af55-9c06-469f-9353-e1042300de3c" containerName="cinder-api-log" Jan 20 17:46:14 crc kubenswrapper[4558]: E0120 17:46:14.187214 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d619cd37-a474-4965-b382-749ed6d55d6d" containerName="barbican-api-log" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.187220 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d619cd37-a474-4965-b382-749ed6d55d6d" containerName="barbican-api-log" Jan 20 17:46:14 crc kubenswrapper[4558]: E0120 17:46:14.187228 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ddd57d3b-c9de-46e3-897e-1a50ae49630e" containerName="nova-scheduler-scheduler" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.187233 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ddd57d3b-c9de-46e3-897e-1a50ae49630e" containerName="nova-scheduler-scheduler" Jan 20 17:46:14 crc kubenswrapper[4558]: E0120 17:46:14.187243 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d54110a-6349-45a3-ad58-1bf105f65293" containerName="init" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.187249 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d54110a-6349-45a3-ad58-1bf105f65293" containerName="init" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.187390 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="d619cd37-a474-4965-b382-749ed6d55d6d" containerName="barbican-api" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.187404 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="70182fd8-a242-40f5-b20d-8ff4dd33e9b1" containerName="nova-api-log" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.187411 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="50cacddd-ebea-477f-af64-6e96a09a242e" containerName="nova-cell0-conductor-conductor" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.187420 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="fa551db1-9a4b-45c2-b640-4f3518b162f4" containerName="container-updater" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.187427 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="50cacddd-ebea-477f-af64-6e96a09a242e" containerName="nova-cell0-conductor-conductor" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.187433 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="fa551db1-9a4b-45c2-b640-4f3518b162f4" containerName="object-auditor" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.187439 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="8361ba4f-e976-4c04-82ad-81e9412ba84c" containerName="neutron-httpd" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.187445 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="7faabe16-9de5-49dc-bab6-44e173f4403c" containerName="probe" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.187455 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="fa551db1-9a4b-45c2-b640-4f3518b162f4" containerName="account-auditor" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.187463 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="50804fa8-d09d-49f8-a143-d7ec24ec542a" containerName="placement-log" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.187470 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="fa551db1-9a4b-45c2-b640-4f3518b162f4" containerName="rsync" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.187475 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="f87490e8-557d-41e3-a07b-8fe12147b315" containerName="glance-log" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.187483 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="fd5cfc55-8cd9-4a76-87b8-4050b8f1030f" containerName="barbican-keystone-listener-log" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.187490 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="fa551db1-9a4b-45c2-b640-4f3518b162f4" containerName="container-server" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.187497 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="2c77f6fd-736b-49e4-aa1c-ddffa451f3dc" containerName="ceilometer-notification-agent" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.187504 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="50cacddd-ebea-477f-af64-6e96a09a242e" containerName="nova-cell0-conductor-conductor" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.187510 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="8361ba4f-e976-4c04-82ad-81e9412ba84c" containerName="neutron-api" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.187518 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="fa551db1-9a4b-45c2-b640-4f3518b162f4" containerName="account-replicator" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.187527 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="f87490e8-557d-41e3-a07b-8fe12147b315" containerName="glance-httpd" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.187535 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="ff0d0703-8173-4ac0-afc0-e673feaef286" containerName="nova-metadata-metadata" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.187541 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="2c77f6fd-736b-49e4-aa1c-ddffa451f3dc" containerName="sg-core" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.187547 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="0d54110a-6349-45a3-ad58-1bf105f65293" containerName="dnsmasq-dns" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.187552 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="4765e529-9729-4d27-a252-c0c9a7b67beb" containerName="rabbitmq" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.187560 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="fa551db1-9a4b-45c2-b640-4f3518b162f4" containerName="account-server" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.187567 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="56173817-8246-4f37-b157-3890912004ca" containerName="glance-httpd" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.187577 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="fa551db1-9a4b-45c2-b640-4f3518b162f4" containerName="account-reaper" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.187583 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="ea72c008-3a66-4577-8042-4b1e0ed1cca6" containerName="nova-cell1-conductor-conductor" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.187589 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="fa551db1-9a4b-45c2-b640-4f3518b162f4" containerName="swift-recon-cron" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.187595 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="fa551db1-9a4b-45c2-b640-4f3518b162f4" containerName="container-auditor" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.187603 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="d619cd37-a474-4965-b382-749ed6d55d6d" containerName="barbican-api-log" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.187610 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="2c77f6fd-736b-49e4-aa1c-ddffa451f3dc" containerName="proxy-httpd" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.187616 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="b752af55-9c06-469f-9353-e1042300de3c" containerName="cinder-api-log" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.187622 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="50804fa8-d09d-49f8-a143-d7ec24ec542a" containerName="placement-api" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.187629 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="fa551db1-9a4b-45c2-b640-4f3518b162f4" containerName="object-updater" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.187637 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="56173817-8246-4f37-b157-3890912004ca" containerName="glance-log" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.187645 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="fa551db1-9a4b-45c2-b640-4f3518b162f4" containerName="object-replicator" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.187652 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="ea72c008-3a66-4577-8042-4b1e0ed1cca6" containerName="nova-cell1-conductor-conductor" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.187660 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="131735a5-8043-40d7-a15d-f0024356e584" containerName="memcached" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.187665 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="70182fd8-a242-40f5-b20d-8ff4dd33e9b1" containerName="nova-api-api" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.187673 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="86aff100-d474-48ed-b673-4dae7c0722cf" containerName="galera" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.187680 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="1a1aeb0f-ba20-4c24-a8d3-f3f2765328c2" containerName="barbican-worker-log" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.187686 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="ff0d0703-8173-4ac0-afc0-e673feaef286" containerName="nova-metadata-log" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.187693 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="7faabe16-9de5-49dc-bab6-44e173f4403c" containerName="cinder-scheduler" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.187700 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="1a1aeb0f-ba20-4c24-a8d3-f3f2765328c2" containerName="barbican-worker" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.187706 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="fd5cfc55-8cd9-4a76-87b8-4050b8f1030f" containerName="barbican-keystone-listener" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.187714 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="ddd57d3b-c9de-46e3-897e-1a50ae49630e" containerName="nova-scheduler-scheduler" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.187720 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="de4d8126-91cf-4149-bec4-4accaf558308" containerName="rabbitmq" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.187729 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="5cbfd26c-728d-420c-9d40-b6f7870cff60" containerName="keystone-api" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.187736 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="fa551db1-9a4b-45c2-b640-4f3518b162f4" containerName="container-replicator" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.187745 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="fa551db1-9a4b-45c2-b640-4f3518b162f4" containerName="object-expirer" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.187753 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="ea72c008-3a66-4577-8042-4b1e0ed1cca6" containerName="nova-cell1-conductor-conductor" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.187761 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="7faabe16-9de5-49dc-bab6-44e173f4403c" containerName="cinder-scheduler" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.187769 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="fa551db1-9a4b-45c2-b640-4f3518b162f4" containerName="object-server" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.187779 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="b752af55-9c06-469f-9353-e1042300de3c" containerName="cinder-api" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.187786 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="2c77f6fd-736b-49e4-aa1c-ddffa451f3dc" containerName="ceilometer-central-agent" Jan 20 17:46:14 crc kubenswrapper[4558]: E0120 17:46:14.187947 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="50cacddd-ebea-477f-af64-6e96a09a242e" containerName="nova-cell0-conductor-conductor" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.187959 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="50cacddd-ebea-477f-af64-6e96a09a242e" containerName="nova-cell0-conductor-conductor" Jan 20 17:46:14 crc kubenswrapper[4558]: E0120 17:46:14.187979 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ea72c008-3a66-4577-8042-4b1e0ed1cca6" containerName="nova-cell1-conductor-conductor" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.187985 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ea72c008-3a66-4577-8042-4b1e0ed1cca6" containerName="nova-cell1-conductor-conductor" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.188119 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="50cacddd-ebea-477f-af64-6e96a09a242e" containerName="nova-cell0-conductor-conductor" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.188128 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="50cacddd-ebea-477f-af64-6e96a09a242e" containerName="nova-cell0-conductor-conductor" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.188836 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.193134 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"rabbitmq-erlang-cookie" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.193403 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-rabbitmq-svc" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.193627 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"rabbitmq-server-conf" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.193803 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"rabbitmq-default-user" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.194082 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"rabbitmq-server-dockercfg-prw6v" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.194283 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"rabbitmq-config-data" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.194459 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"rabbitmq-plugins-conf" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.197386 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/rabbitmq-server-0"] Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.342893 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/088e44cc-3515-4736-aa46-721774902209-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"088e44cc-3515-4736-aa46-721774902209\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.342960 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-server-0\" (UID: \"088e44cc-3515-4736-aa46-721774902209\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.343001 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/088e44cc-3515-4736-aa46-721774902209-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"088e44cc-3515-4736-aa46-721774902209\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.343021 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/088e44cc-3515-4736-aa46-721774902209-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"088e44cc-3515-4736-aa46-721774902209\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.343196 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/088e44cc-3515-4736-aa46-721774902209-pod-info\") pod \"rabbitmq-server-0\" (UID: \"088e44cc-3515-4736-aa46-721774902209\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.343544 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/088e44cc-3515-4736-aa46-721774902209-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"088e44cc-3515-4736-aa46-721774902209\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.343619 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/088e44cc-3515-4736-aa46-721774902209-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"088e44cc-3515-4736-aa46-721774902209\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.343820 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/088e44cc-3515-4736-aa46-721774902209-config-data\") pod \"rabbitmq-server-0\" (UID: \"088e44cc-3515-4736-aa46-721774902209\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.343895 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/088e44cc-3515-4736-aa46-721774902209-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"088e44cc-3515-4736-aa46-721774902209\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.343917 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/088e44cc-3515-4736-aa46-721774902209-server-conf\") pod \"rabbitmq-server-0\" (UID: \"088e44cc-3515-4736-aa46-721774902209\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.343991 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x65sw\" (UniqueName: \"kubernetes.io/projected/088e44cc-3515-4736-aa46-721774902209-kube-api-access-x65sw\") pod \"rabbitmq-server-0\" (UID: \"088e44cc-3515-4736-aa46-721774902209\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.435953 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/rabbitmq-cell1-server-0"] Jan 20 17:46:14 crc kubenswrapper[4558]: E0120 17:46:14.436306 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="50cacddd-ebea-477f-af64-6e96a09a242e" containerName="nova-cell0-conductor-conductor" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.436323 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="50cacddd-ebea-477f-af64-6e96a09a242e" containerName="nova-cell0-conductor-conductor" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.437189 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.438645 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"rabbitmq-cell1-config-data" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.439242 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"rabbitmq-cell1-default-user" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.439347 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"rabbitmq-cell1-server-dockercfg-tllf9" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.439407 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"rabbitmq-cell1-erlang-cookie" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.439604 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-rabbitmq-cell1-svc" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.439641 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"rabbitmq-cell1-server-conf" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.439640 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"rabbitmq-cell1-plugins-conf" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.444767 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/088e44cc-3515-4736-aa46-721774902209-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"088e44cc-3515-4736-aa46-721774902209\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.444812 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/088e44cc-3515-4736-aa46-721774902209-config-data\") pod \"rabbitmq-server-0\" (UID: \"088e44cc-3515-4736-aa46-721774902209\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.444840 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/088e44cc-3515-4736-aa46-721774902209-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"088e44cc-3515-4736-aa46-721774902209\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.444855 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/088e44cc-3515-4736-aa46-721774902209-server-conf\") pod \"rabbitmq-server-0\" (UID: \"088e44cc-3515-4736-aa46-721774902209\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.444883 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x65sw\" (UniqueName: \"kubernetes.io/projected/088e44cc-3515-4736-aa46-721774902209-kube-api-access-x65sw\") pod \"rabbitmq-server-0\" (UID: \"088e44cc-3515-4736-aa46-721774902209\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.444903 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/088e44cc-3515-4736-aa46-721774902209-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"088e44cc-3515-4736-aa46-721774902209\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.444935 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-server-0\" (UID: \"088e44cc-3515-4736-aa46-721774902209\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.444960 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/088e44cc-3515-4736-aa46-721774902209-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"088e44cc-3515-4736-aa46-721774902209\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.444977 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/088e44cc-3515-4736-aa46-721774902209-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"088e44cc-3515-4736-aa46-721774902209\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.444998 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/088e44cc-3515-4736-aa46-721774902209-pod-info\") pod \"rabbitmq-server-0\" (UID: \"088e44cc-3515-4736-aa46-721774902209\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.445049 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/088e44cc-3515-4736-aa46-721774902209-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"088e44cc-3515-4736-aa46-721774902209\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.445784 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/088e44cc-3515-4736-aa46-721774902209-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"088e44cc-3515-4736-aa46-721774902209\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.445866 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/088e44cc-3515-4736-aa46-721774902209-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"088e44cc-3515-4736-aa46-721774902209\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.446110 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-server-0\" (UID: \"088e44cc-3515-4736-aa46-721774902209\") device mount path \"/mnt/openstack/pv12\"" pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.446578 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/088e44cc-3515-4736-aa46-721774902209-config-data\") pod \"rabbitmq-server-0\" (UID: \"088e44cc-3515-4736-aa46-721774902209\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.447179 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/088e44cc-3515-4736-aa46-721774902209-server-conf\") pod \"rabbitmq-server-0\" (UID: \"088e44cc-3515-4736-aa46-721774902209\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.447920 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/088e44cc-3515-4736-aa46-721774902209-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"088e44cc-3515-4736-aa46-721774902209\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.449078 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/rabbitmq-cell1-server-0"] Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.451657 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/088e44cc-3515-4736-aa46-721774902209-pod-info\") pod \"rabbitmq-server-0\" (UID: \"088e44cc-3515-4736-aa46-721774902209\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.451983 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/088e44cc-3515-4736-aa46-721774902209-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"088e44cc-3515-4736-aa46-721774902209\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.453156 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/088e44cc-3515-4736-aa46-721774902209-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"088e44cc-3515-4736-aa46-721774902209\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.457824 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/088e44cc-3515-4736-aa46-721774902209-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"088e44cc-3515-4736-aa46-721774902209\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.463395 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x65sw\" (UniqueName: \"kubernetes.io/projected/088e44cc-3515-4736-aa46-721774902209-kube-api-access-x65sw\") pod \"rabbitmq-server-0\" (UID: \"088e44cc-3515-4736-aa46-721774902209\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.466312 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-server-0\" (UID: \"088e44cc-3515-4736-aa46-721774902209\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.518150 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.546736 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/c4a11c40-a157-4a28-b1a3-60c211d1d0bf-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"c4a11c40-a157-4a28-b1a3-60c211d1d0bf\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.546780 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/c4a11c40-a157-4a28-b1a3-60c211d1d0bf-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"c4a11c40-a157-4a28-b1a3-60c211d1d0bf\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.546815 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dwg9p\" (UniqueName: \"kubernetes.io/projected/c4a11c40-a157-4a28-b1a3-60c211d1d0bf-kube-api-access-dwg9p\") pod \"rabbitmq-cell1-server-0\" (UID: \"c4a11c40-a157-4a28-b1a3-60c211d1d0bf\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.546845 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/c4a11c40-a157-4a28-b1a3-60c211d1d0bf-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"c4a11c40-a157-4a28-b1a3-60c211d1d0bf\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.546944 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/c4a11c40-a157-4a28-b1a3-60c211d1d0bf-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"c4a11c40-a157-4a28-b1a3-60c211d1d0bf\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.546975 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/c4a11c40-a157-4a28-b1a3-60c211d1d0bf-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"c4a11c40-a157-4a28-b1a3-60c211d1d0bf\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.547049 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c4a11c40-a157-4a28-b1a3-60c211d1d0bf-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"c4a11c40-a157-4a28-b1a3-60c211d1d0bf\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.547117 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/c4a11c40-a157-4a28-b1a3-60c211d1d0bf-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"c4a11c40-a157-4a28-b1a3-60c211d1d0bf\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.547154 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"c4a11c40-a157-4a28-b1a3-60c211d1d0bf\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.547217 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/c4a11c40-a157-4a28-b1a3-60c211d1d0bf-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"c4a11c40-a157-4a28-b1a3-60c211d1d0bf\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.547288 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/c4a11c40-a157-4a28-b1a3-60c211d1d0bf-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"c4a11c40-a157-4a28-b1a3-60c211d1d0bf\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.651019 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/c4a11c40-a157-4a28-b1a3-60c211d1d0bf-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"c4a11c40-a157-4a28-b1a3-60c211d1d0bf\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.651120 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/c4a11c40-a157-4a28-b1a3-60c211d1d0bf-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"c4a11c40-a157-4a28-b1a3-60c211d1d0bf\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.651142 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/c4a11c40-a157-4a28-b1a3-60c211d1d0bf-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"c4a11c40-a157-4a28-b1a3-60c211d1d0bf\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.651264 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dwg9p\" (UniqueName: \"kubernetes.io/projected/c4a11c40-a157-4a28-b1a3-60c211d1d0bf-kube-api-access-dwg9p\") pod \"rabbitmq-cell1-server-0\" (UID: \"c4a11c40-a157-4a28-b1a3-60c211d1d0bf\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.651297 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/c4a11c40-a157-4a28-b1a3-60c211d1d0bf-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"c4a11c40-a157-4a28-b1a3-60c211d1d0bf\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.651322 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/c4a11c40-a157-4a28-b1a3-60c211d1d0bf-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"c4a11c40-a157-4a28-b1a3-60c211d1d0bf\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.651340 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/c4a11c40-a157-4a28-b1a3-60c211d1d0bf-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"c4a11c40-a157-4a28-b1a3-60c211d1d0bf\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.651364 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c4a11c40-a157-4a28-b1a3-60c211d1d0bf-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"c4a11c40-a157-4a28-b1a3-60c211d1d0bf\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.651390 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/c4a11c40-a157-4a28-b1a3-60c211d1d0bf-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"c4a11c40-a157-4a28-b1a3-60c211d1d0bf\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.651413 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"c4a11c40-a157-4a28-b1a3-60c211d1d0bf\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.651441 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/c4a11c40-a157-4a28-b1a3-60c211d1d0bf-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"c4a11c40-a157-4a28-b1a3-60c211d1d0bf\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.651752 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"c4a11c40-a157-4a28-b1a3-60c211d1d0bf\") device mount path \"/mnt/openstack/pv06\"" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.653035 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/c4a11c40-a157-4a28-b1a3-60c211d1d0bf-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"c4a11c40-a157-4a28-b1a3-60c211d1d0bf\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.653334 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/c4a11c40-a157-4a28-b1a3-60c211d1d0bf-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"c4a11c40-a157-4a28-b1a3-60c211d1d0bf\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.653582 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/c4a11c40-a157-4a28-b1a3-60c211d1d0bf-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"c4a11c40-a157-4a28-b1a3-60c211d1d0bf\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.653802 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c4a11c40-a157-4a28-b1a3-60c211d1d0bf-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"c4a11c40-a157-4a28-b1a3-60c211d1d0bf\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.653888 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/c4a11c40-a157-4a28-b1a3-60c211d1d0bf-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"c4a11c40-a157-4a28-b1a3-60c211d1d0bf\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.661831 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/c4a11c40-a157-4a28-b1a3-60c211d1d0bf-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"c4a11c40-a157-4a28-b1a3-60c211d1d0bf\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.661878 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/c4a11c40-a157-4a28-b1a3-60c211d1d0bf-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"c4a11c40-a157-4a28-b1a3-60c211d1d0bf\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.662410 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/c4a11c40-a157-4a28-b1a3-60c211d1d0bf-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"c4a11c40-a157-4a28-b1a3-60c211d1d0bf\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.665503 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/c4a11c40-a157-4a28-b1a3-60c211d1d0bf-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"c4a11c40-a157-4a28-b1a3-60c211d1d0bf\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.667895 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dwg9p\" (UniqueName: \"kubernetes.io/projected/c4a11c40-a157-4a28-b1a3-60c211d1d0bf-kube-api-access-dwg9p\") pod \"rabbitmq-cell1-server-0\" (UID: \"c4a11c40-a157-4a28-b1a3-60c211d1d0bf\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.671373 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"c4a11c40-a157-4a28-b1a3-60c211d1d0bf\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.796477 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:46:14 crc kubenswrapper[4558]: I0120 17:46:14.926722 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/rabbitmq-server-0"] Jan 20 17:46:15 crc kubenswrapper[4558]: I0120 17:46:15.057039 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/openstack-galera-0"] Jan 20 17:46:15 crc kubenswrapper[4558]: I0120 17:46:15.058434 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:46:15 crc kubenswrapper[4558]: I0120 17:46:15.061065 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-galera-openstack-svc" Jan 20 17:46:15 crc kubenswrapper[4558]: I0120 17:46:15.061455 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-scripts" Jan 20 17:46:15 crc kubenswrapper[4558]: I0120 17:46:15.061571 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-config-data" Jan 20 17:46:15 crc kubenswrapper[4558]: I0120 17:46:15.061569 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"galera-openstack-dockercfg-7rwrt" Jan 20 17:46:15 crc kubenswrapper[4558]: I0120 17:46:15.065248 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/openstack-galera-0"] Jan 20 17:46:15 crc kubenswrapper[4558]: I0120 17:46:15.067158 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"combined-ca-bundle" Jan 20 17:46:15 crc kubenswrapper[4558]: I0120 17:46:15.160952 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c43f6123-71d1-4cf0-b919-00b1e9836c8b-operator-scripts\") pod \"openstack-galera-0\" (UID: \"c43f6123-71d1-4cf0-b919-00b1e9836c8b\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:46:15 crc kubenswrapper[4558]: I0120 17:46:15.161006 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/c43f6123-71d1-4cf0-b919-00b1e9836c8b-config-data-generated\") pod \"openstack-galera-0\" (UID: \"c43f6123-71d1-4cf0-b919-00b1e9836c8b\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:46:15 crc kubenswrapper[4558]: I0120 17:46:15.161048 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/c43f6123-71d1-4cf0-b919-00b1e9836c8b-config-data-default\") pod \"openstack-galera-0\" (UID: \"c43f6123-71d1-4cf0-b919-00b1e9836c8b\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:46:15 crc kubenswrapper[4558]: I0120 17:46:15.161127 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/c43f6123-71d1-4cf0-b919-00b1e9836c8b-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"c43f6123-71d1-4cf0-b919-00b1e9836c8b\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:46:15 crc kubenswrapper[4558]: I0120 17:46:15.161155 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h5x4h\" (UniqueName: \"kubernetes.io/projected/c43f6123-71d1-4cf0-b919-00b1e9836c8b-kube-api-access-h5x4h\") pod \"openstack-galera-0\" (UID: \"c43f6123-71d1-4cf0-b919-00b1e9836c8b\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:46:15 crc kubenswrapper[4558]: I0120 17:46:15.161189 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/c43f6123-71d1-4cf0-b919-00b1e9836c8b-kolla-config\") pod \"openstack-galera-0\" (UID: \"c43f6123-71d1-4cf0-b919-00b1e9836c8b\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:46:15 crc kubenswrapper[4558]: I0120 17:46:15.161227 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage14-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage14-crc\") pod \"openstack-galera-0\" (UID: \"c43f6123-71d1-4cf0-b919-00b1e9836c8b\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:46:15 crc kubenswrapper[4558]: I0120 17:46:15.161266 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c43f6123-71d1-4cf0-b919-00b1e9836c8b-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"c43f6123-71d1-4cf0-b919-00b1e9836c8b\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:46:15 crc kubenswrapper[4558]: I0120 17:46:15.203778 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/rabbitmq-cell1-server-0"] Jan 20 17:46:15 crc kubenswrapper[4558]: W0120 17:46:15.212401 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc4a11c40_a157_4a28_b1a3_60c211d1d0bf.slice/crio-28ac71207cfb5bffba5917702d239d24dbca21efa7bf369a7b7619c0e415376f WatchSource:0}: Error finding container 28ac71207cfb5bffba5917702d239d24dbca21efa7bf369a7b7619c0e415376f: Status 404 returned error can't find the container with id 28ac71207cfb5bffba5917702d239d24dbca21efa7bf369a7b7619c0e415376f Jan 20 17:46:15 crc kubenswrapper[4558]: I0120 17:46:15.263096 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c43f6123-71d1-4cf0-b919-00b1e9836c8b-operator-scripts\") pod \"openstack-galera-0\" (UID: \"c43f6123-71d1-4cf0-b919-00b1e9836c8b\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:46:15 crc kubenswrapper[4558]: I0120 17:46:15.263198 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/c43f6123-71d1-4cf0-b919-00b1e9836c8b-config-data-generated\") pod \"openstack-galera-0\" (UID: \"c43f6123-71d1-4cf0-b919-00b1e9836c8b\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:46:15 crc kubenswrapper[4558]: I0120 17:46:15.263248 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/c43f6123-71d1-4cf0-b919-00b1e9836c8b-config-data-default\") pod \"openstack-galera-0\" (UID: \"c43f6123-71d1-4cf0-b919-00b1e9836c8b\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:46:15 crc kubenswrapper[4558]: I0120 17:46:15.263372 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/c43f6123-71d1-4cf0-b919-00b1e9836c8b-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"c43f6123-71d1-4cf0-b919-00b1e9836c8b\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:46:15 crc kubenswrapper[4558]: I0120 17:46:15.263423 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h5x4h\" (UniqueName: \"kubernetes.io/projected/c43f6123-71d1-4cf0-b919-00b1e9836c8b-kube-api-access-h5x4h\") pod \"openstack-galera-0\" (UID: \"c43f6123-71d1-4cf0-b919-00b1e9836c8b\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:46:15 crc kubenswrapper[4558]: I0120 17:46:15.263470 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/c43f6123-71d1-4cf0-b919-00b1e9836c8b-kolla-config\") pod \"openstack-galera-0\" (UID: \"c43f6123-71d1-4cf0-b919-00b1e9836c8b\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:46:15 crc kubenswrapper[4558]: I0120 17:46:15.263548 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage14-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage14-crc\") pod \"openstack-galera-0\" (UID: \"c43f6123-71d1-4cf0-b919-00b1e9836c8b\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:46:15 crc kubenswrapper[4558]: I0120 17:46:15.264046 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c43f6123-71d1-4cf0-b919-00b1e9836c8b-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"c43f6123-71d1-4cf0-b919-00b1e9836c8b\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:46:15 crc kubenswrapper[4558]: I0120 17:46:15.263975 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage14-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage14-crc\") pod \"openstack-galera-0\" (UID: \"c43f6123-71d1-4cf0-b919-00b1e9836c8b\") device mount path \"/mnt/openstack/pv14\"" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:46:15 crc kubenswrapper[4558]: I0120 17:46:15.264804 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/c43f6123-71d1-4cf0-b919-00b1e9836c8b-config-data-generated\") pod \"openstack-galera-0\" (UID: \"c43f6123-71d1-4cf0-b919-00b1e9836c8b\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:46:15 crc kubenswrapper[4558]: I0120 17:46:15.265090 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/c43f6123-71d1-4cf0-b919-00b1e9836c8b-kolla-config\") pod \"openstack-galera-0\" (UID: \"c43f6123-71d1-4cf0-b919-00b1e9836c8b\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:46:15 crc kubenswrapper[4558]: I0120 17:46:15.265394 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c43f6123-71d1-4cf0-b919-00b1e9836c8b-operator-scripts\") pod \"openstack-galera-0\" (UID: \"c43f6123-71d1-4cf0-b919-00b1e9836c8b\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:46:15 crc kubenswrapper[4558]: I0120 17:46:15.265717 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/c43f6123-71d1-4cf0-b919-00b1e9836c8b-config-data-default\") pod \"openstack-galera-0\" (UID: \"c43f6123-71d1-4cf0-b919-00b1e9836c8b\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:46:15 crc kubenswrapper[4558]: I0120 17:46:15.269110 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c43f6123-71d1-4cf0-b919-00b1e9836c8b-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"c43f6123-71d1-4cf0-b919-00b1e9836c8b\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:46:15 crc kubenswrapper[4558]: I0120 17:46:15.270275 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/c43f6123-71d1-4cf0-b919-00b1e9836c8b-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"c43f6123-71d1-4cf0-b919-00b1e9836c8b\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:46:15 crc kubenswrapper[4558]: I0120 17:46:15.281109 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h5x4h\" (UniqueName: \"kubernetes.io/projected/c43f6123-71d1-4cf0-b919-00b1e9836c8b-kube-api-access-h5x4h\") pod \"openstack-galera-0\" (UID: \"c43f6123-71d1-4cf0-b919-00b1e9836c8b\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:46:15 crc kubenswrapper[4558]: I0120 17:46:15.291947 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage14-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage14-crc\") pod \"openstack-galera-0\" (UID: \"c43f6123-71d1-4cf0-b919-00b1e9836c8b\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:46:15 crc kubenswrapper[4558]: I0120 17:46:15.376195 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:46:15 crc kubenswrapper[4558]: I0120 17:46:15.768507 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-server-0" event={"ID":"088e44cc-3515-4736-aa46-721774902209","Type":"ContainerStarted","Data":"4917e58b675184a62e14b5f3708376d411550b89d2c51d2c439c824d48cbd4de"} Jan 20 17:46:15 crc kubenswrapper[4558]: I0120 17:46:15.770082 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" event={"ID":"c4a11c40-a157-4a28-b1a3-60c211d1d0bf","Type":"ContainerStarted","Data":"28ac71207cfb5bffba5917702d239d24dbca21efa7bf369a7b7619c0e415376f"} Jan 20 17:46:15 crc kubenswrapper[4558]: I0120 17:46:15.818080 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/openstack-galera-0"] Jan 20 17:46:16 crc kubenswrapper[4558]: I0120 17:46:16.582011 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/openstack-cell1-galera-0"] Jan 20 17:46:16 crc kubenswrapper[4558]: I0120 17:46:16.584227 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:46:16 crc kubenswrapper[4558]: I0120 17:46:16.586757 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-galera-openstack-cell1-svc" Jan 20 17:46:16 crc kubenswrapper[4558]: I0120 17:46:16.587292 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-cell1-config-data" Jan 20 17:46:16 crc kubenswrapper[4558]: I0120 17:46:16.587693 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"galera-openstack-cell1-dockercfg-gzrvj" Jan 20 17:46:16 crc kubenswrapper[4558]: I0120 17:46:16.597792 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/openstack-cell1-galera-0"] Jan 20 17:46:16 crc kubenswrapper[4558]: I0120 17:46:16.598332 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-cell1-scripts" Jan 20 17:46:16 crc kubenswrapper[4558]: I0120 17:46:16.690110 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lfjsk\" (UniqueName: \"kubernetes.io/projected/9d54b98d-10d8-4300-89b7-031984825b5a-kube-api-access-lfjsk\") pod \"openstack-cell1-galera-0\" (UID: \"9d54b98d-10d8-4300-89b7-031984825b5a\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:46:16 crc kubenswrapper[4558]: I0120 17:46:16.690273 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9d54b98d-10d8-4300-89b7-031984825b5a-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"9d54b98d-10d8-4300-89b7-031984825b5a\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:46:16 crc kubenswrapper[4558]: I0120 17:46:16.690588 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/9d54b98d-10d8-4300-89b7-031984825b5a-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"9d54b98d-10d8-4300-89b7-031984825b5a\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:46:16 crc kubenswrapper[4558]: I0120 17:46:16.690704 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/9d54b98d-10d8-4300-89b7-031984825b5a-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"9d54b98d-10d8-4300-89b7-031984825b5a\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:46:16 crc kubenswrapper[4558]: I0120 17:46:16.690733 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/9d54b98d-10d8-4300-89b7-031984825b5a-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"9d54b98d-10d8-4300-89b7-031984825b5a\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:46:16 crc kubenswrapper[4558]: I0120 17:46:16.690777 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage15-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") pod \"openstack-cell1-galera-0\" (UID: \"9d54b98d-10d8-4300-89b7-031984825b5a\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:46:16 crc kubenswrapper[4558]: I0120 17:46:16.690808 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/9d54b98d-10d8-4300-89b7-031984825b5a-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"9d54b98d-10d8-4300-89b7-031984825b5a\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:46:16 crc kubenswrapper[4558]: I0120 17:46:16.690829 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9d54b98d-10d8-4300-89b7-031984825b5a-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"9d54b98d-10d8-4300-89b7-031984825b5a\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:46:16 crc kubenswrapper[4558]: I0120 17:46:16.782405 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-galera-0" event={"ID":"c43f6123-71d1-4cf0-b919-00b1e9836c8b","Type":"ContainerStarted","Data":"7977976a1e26937bc2db139e21b54dea89156dad7f528ce6155109dd256adfa2"} Jan 20 17:46:16 crc kubenswrapper[4558]: I0120 17:46:16.782483 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-galera-0" event={"ID":"c43f6123-71d1-4cf0-b919-00b1e9836c8b","Type":"ContainerStarted","Data":"852ff7584d7f888bbfb6f3fea7c51e4b3aa60cd86625c06b56888fae6d3dfd89"} Jan 20 17:46:16 crc kubenswrapper[4558]: I0120 17:46:16.784320 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-server-0" event={"ID":"088e44cc-3515-4736-aa46-721774902209","Type":"ContainerStarted","Data":"dfbb9c8e1cee5c7cb472c9c48b441fb80ff852d27db0124a79c1d7f68c8fa720"} Jan 20 17:46:16 crc kubenswrapper[4558]: I0120 17:46:16.786870 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" event={"ID":"c4a11c40-a157-4a28-b1a3-60c211d1d0bf","Type":"ContainerStarted","Data":"a4bc9f62c0f630c628a87ed4cdeafe946a3360d7bb2a8e7f9e501c633d62f580"} Jan 20 17:46:16 crc kubenswrapper[4558]: I0120 17:46:16.792620 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage15-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") pod \"openstack-cell1-galera-0\" (UID: \"9d54b98d-10d8-4300-89b7-031984825b5a\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:46:16 crc kubenswrapper[4558]: I0120 17:46:16.792676 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/9d54b98d-10d8-4300-89b7-031984825b5a-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"9d54b98d-10d8-4300-89b7-031984825b5a\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:46:16 crc kubenswrapper[4558]: I0120 17:46:16.792709 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9d54b98d-10d8-4300-89b7-031984825b5a-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"9d54b98d-10d8-4300-89b7-031984825b5a\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:46:16 crc kubenswrapper[4558]: I0120 17:46:16.792748 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lfjsk\" (UniqueName: \"kubernetes.io/projected/9d54b98d-10d8-4300-89b7-031984825b5a-kube-api-access-lfjsk\") pod \"openstack-cell1-galera-0\" (UID: \"9d54b98d-10d8-4300-89b7-031984825b5a\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:46:16 crc kubenswrapper[4558]: I0120 17:46:16.792775 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9d54b98d-10d8-4300-89b7-031984825b5a-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"9d54b98d-10d8-4300-89b7-031984825b5a\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:46:16 crc kubenswrapper[4558]: I0120 17:46:16.792827 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/9d54b98d-10d8-4300-89b7-031984825b5a-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"9d54b98d-10d8-4300-89b7-031984825b5a\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:46:16 crc kubenswrapper[4558]: I0120 17:46:16.792866 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/9d54b98d-10d8-4300-89b7-031984825b5a-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"9d54b98d-10d8-4300-89b7-031984825b5a\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:46:16 crc kubenswrapper[4558]: I0120 17:46:16.792885 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/9d54b98d-10d8-4300-89b7-031984825b5a-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"9d54b98d-10d8-4300-89b7-031984825b5a\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:46:16 crc kubenswrapper[4558]: I0120 17:46:16.794827 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage15-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") pod \"openstack-cell1-galera-0\" (UID: \"9d54b98d-10d8-4300-89b7-031984825b5a\") device mount path \"/mnt/openstack/pv15\"" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:46:16 crc kubenswrapper[4558]: I0120 17:46:16.795244 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/9d54b98d-10d8-4300-89b7-031984825b5a-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"9d54b98d-10d8-4300-89b7-031984825b5a\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:46:16 crc kubenswrapper[4558]: I0120 17:46:16.802552 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9d54b98d-10d8-4300-89b7-031984825b5a-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"9d54b98d-10d8-4300-89b7-031984825b5a\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:46:16 crc kubenswrapper[4558]: I0120 17:46:16.802699 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/9d54b98d-10d8-4300-89b7-031984825b5a-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"9d54b98d-10d8-4300-89b7-031984825b5a\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:46:16 crc kubenswrapper[4558]: I0120 17:46:16.803089 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/9d54b98d-10d8-4300-89b7-031984825b5a-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"9d54b98d-10d8-4300-89b7-031984825b5a\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:46:16 crc kubenswrapper[4558]: I0120 17:46:16.804105 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/9d54b98d-10d8-4300-89b7-031984825b5a-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"9d54b98d-10d8-4300-89b7-031984825b5a\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:46:16 crc kubenswrapper[4558]: I0120 17:46:16.805717 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9d54b98d-10d8-4300-89b7-031984825b5a-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"9d54b98d-10d8-4300-89b7-031984825b5a\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:46:16 crc kubenswrapper[4558]: I0120 17:46:16.835250 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage15-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") pod \"openstack-cell1-galera-0\" (UID: \"9d54b98d-10d8-4300-89b7-031984825b5a\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:46:16 crc kubenswrapper[4558]: I0120 17:46:16.844986 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lfjsk\" (UniqueName: \"kubernetes.io/projected/9d54b98d-10d8-4300-89b7-031984825b5a-kube-api-access-lfjsk\") pod \"openstack-cell1-galera-0\" (UID: \"9d54b98d-10d8-4300-89b7-031984825b5a\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:46:16 crc kubenswrapper[4558]: I0120 17:46:16.902774 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:46:17 crc kubenswrapper[4558]: I0120 17:46:17.303740 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/openstack-cell1-galera-0"] Jan 20 17:46:17 crc kubenswrapper[4558]: W0120 17:46:17.313571 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9d54b98d_10d8_4300_89b7_031984825b5a.slice/crio-b6838d9422cbda53f12c08f38617dd4acbb6865cba57a5eaa40969f51a7a5972 WatchSource:0}: Error finding container b6838d9422cbda53f12c08f38617dd4acbb6865cba57a5eaa40969f51a7a5972: Status 404 returned error can't find the container with id b6838d9422cbda53f12c08f38617dd4acbb6865cba57a5eaa40969f51a7a5972 Jan 20 17:46:17 crc kubenswrapper[4558]: I0120 17:46:17.798139 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-cell1-galera-0" event={"ID":"9d54b98d-10d8-4300-89b7-031984825b5a","Type":"ContainerStarted","Data":"aa9b9381bbdf5cb116e31b3db67d045917bc5ba77627d5ef1e20e4e91641261b"} Jan 20 17:46:17 crc kubenswrapper[4558]: I0120 17:46:17.798445 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-cell1-galera-0" event={"ID":"9d54b98d-10d8-4300-89b7-031984825b5a","Type":"ContainerStarted","Data":"b6838d9422cbda53f12c08f38617dd4acbb6865cba57a5eaa40969f51a7a5972"} Jan 20 17:46:17 crc kubenswrapper[4558]: I0120 17:46:17.811356 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/memcached-0"] Jan 20 17:46:17 crc kubenswrapper[4558]: I0120 17:46:17.812456 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/memcached-0" Jan 20 17:46:17 crc kubenswrapper[4558]: I0120 17:46:17.814069 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"memcached-memcached-dockercfg-lc49s" Jan 20 17:46:17 crc kubenswrapper[4558]: I0120 17:46:17.814069 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"memcached-config-data" Jan 20 17:46:17 crc kubenswrapper[4558]: I0120 17:46:17.815470 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-memcached-svc" Jan 20 17:46:17 crc kubenswrapper[4558]: I0120 17:46:17.839885 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/memcached-0"] Jan 20 17:46:17 crc kubenswrapper[4558]: I0120 17:46:17.910611 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/7c3fe526-7107-4cc4-aca2-eb809f317c5c-kolla-config\") pod \"memcached-0\" (UID: \"7c3fe526-7107-4cc4-aca2-eb809f317c5c\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:46:17 crc kubenswrapper[4558]: I0120 17:46:17.911879 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7c3fe526-7107-4cc4-aca2-eb809f317c5c-config-data\") pod \"memcached-0\" (UID: \"7c3fe526-7107-4cc4-aca2-eb809f317c5c\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:46:17 crc kubenswrapper[4558]: I0120 17:46:17.912275 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dd68x\" (UniqueName: \"kubernetes.io/projected/7c3fe526-7107-4cc4-aca2-eb809f317c5c-kube-api-access-dd68x\") pod \"memcached-0\" (UID: \"7c3fe526-7107-4cc4-aca2-eb809f317c5c\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:46:17 crc kubenswrapper[4558]: I0120 17:46:17.912362 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/7c3fe526-7107-4cc4-aca2-eb809f317c5c-memcached-tls-certs\") pod \"memcached-0\" (UID: \"7c3fe526-7107-4cc4-aca2-eb809f317c5c\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:46:17 crc kubenswrapper[4558]: I0120 17:46:17.912391 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c3fe526-7107-4cc4-aca2-eb809f317c5c-combined-ca-bundle\") pod \"memcached-0\" (UID: \"7c3fe526-7107-4cc4-aca2-eb809f317c5c\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:46:18 crc kubenswrapper[4558]: I0120 17:46:18.014035 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/7c3fe526-7107-4cc4-aca2-eb809f317c5c-kolla-config\") pod \"memcached-0\" (UID: \"7c3fe526-7107-4cc4-aca2-eb809f317c5c\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:46:18 crc kubenswrapper[4558]: I0120 17:46:18.014207 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7c3fe526-7107-4cc4-aca2-eb809f317c5c-config-data\") pod \"memcached-0\" (UID: \"7c3fe526-7107-4cc4-aca2-eb809f317c5c\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:46:18 crc kubenswrapper[4558]: I0120 17:46:18.014277 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dd68x\" (UniqueName: \"kubernetes.io/projected/7c3fe526-7107-4cc4-aca2-eb809f317c5c-kube-api-access-dd68x\") pod \"memcached-0\" (UID: \"7c3fe526-7107-4cc4-aca2-eb809f317c5c\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:46:18 crc kubenswrapper[4558]: I0120 17:46:18.014324 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/7c3fe526-7107-4cc4-aca2-eb809f317c5c-memcached-tls-certs\") pod \"memcached-0\" (UID: \"7c3fe526-7107-4cc4-aca2-eb809f317c5c\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:46:18 crc kubenswrapper[4558]: I0120 17:46:18.014348 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c3fe526-7107-4cc4-aca2-eb809f317c5c-combined-ca-bundle\") pod \"memcached-0\" (UID: \"7c3fe526-7107-4cc4-aca2-eb809f317c5c\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:46:18 crc kubenswrapper[4558]: I0120 17:46:18.014877 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/7c3fe526-7107-4cc4-aca2-eb809f317c5c-kolla-config\") pod \"memcached-0\" (UID: \"7c3fe526-7107-4cc4-aca2-eb809f317c5c\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:46:18 crc kubenswrapper[4558]: I0120 17:46:18.014985 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7c3fe526-7107-4cc4-aca2-eb809f317c5c-config-data\") pod \"memcached-0\" (UID: \"7c3fe526-7107-4cc4-aca2-eb809f317c5c\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:46:18 crc kubenswrapper[4558]: I0120 17:46:18.019568 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c3fe526-7107-4cc4-aca2-eb809f317c5c-combined-ca-bundle\") pod \"memcached-0\" (UID: \"7c3fe526-7107-4cc4-aca2-eb809f317c5c\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:46:18 crc kubenswrapper[4558]: I0120 17:46:18.022566 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/7c3fe526-7107-4cc4-aca2-eb809f317c5c-memcached-tls-certs\") pod \"memcached-0\" (UID: \"7c3fe526-7107-4cc4-aca2-eb809f317c5c\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:46:18 crc kubenswrapper[4558]: I0120 17:46:18.031675 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dd68x\" (UniqueName: \"kubernetes.io/projected/7c3fe526-7107-4cc4-aca2-eb809f317c5c-kube-api-access-dd68x\") pod \"memcached-0\" (UID: \"7c3fe526-7107-4cc4-aca2-eb809f317c5c\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:46:18 crc kubenswrapper[4558]: I0120 17:46:18.139887 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/memcached-0" Jan 20 17:46:18 crc kubenswrapper[4558]: I0120 17:46:18.556074 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/memcached-0"] Jan 20 17:46:18 crc kubenswrapper[4558]: I0120 17:46:18.833879 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/memcached-0" event={"ID":"7c3fe526-7107-4cc4-aca2-eb809f317c5c","Type":"ContainerStarted","Data":"c8f37f1ccc0f6204e6d44935b746f6c6ede31b796dd74e70992f82e8f5ab72db"} Jan 20 17:46:18 crc kubenswrapper[4558]: I0120 17:46:18.833950 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/memcached-0" event={"ID":"7c3fe526-7107-4cc4-aca2-eb809f317c5c","Type":"ContainerStarted","Data":"6a57bdb9acab7eeb022ba4c1315352137137390ddf195bb1d1e1d5c0b59c4eb6"} Jan 20 17:46:18 crc kubenswrapper[4558]: I0120 17:46:18.834007 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/memcached-0" Jan 20 17:46:18 crc kubenswrapper[4558]: I0120 17:46:18.838022 4558 generic.go:334] "Generic (PLEG): container finished" podID="c43f6123-71d1-4cf0-b919-00b1e9836c8b" containerID="7977976a1e26937bc2db139e21b54dea89156dad7f528ce6155109dd256adfa2" exitCode=0 Jan 20 17:46:18 crc kubenswrapper[4558]: I0120 17:46:18.838251 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-galera-0" event={"ID":"c43f6123-71d1-4cf0-b919-00b1e9836c8b","Type":"ContainerDied","Data":"7977976a1e26937bc2db139e21b54dea89156dad7f528ce6155109dd256adfa2"} Jan 20 17:46:18 crc kubenswrapper[4558]: I0120 17:46:18.858340 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/memcached-0" podStartSLOduration=1.858320472 podStartE2EDuration="1.858320472s" podCreationTimestamp="2026-01-20 17:46:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:46:18.854318818 +0000 UTC m=+3872.614656785" watchObservedRunningTime="2026-01-20 17:46:18.858320472 +0000 UTC m=+3872.618658439" Jan 20 17:46:19 crc kubenswrapper[4558]: I0120 17:46:19.833606 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 17:46:19 crc kubenswrapper[4558]: I0120 17:46:19.835033 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:46:19 crc kubenswrapper[4558]: I0120 17:46:19.839077 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"telemetry-ceilometer-dockercfg-xhv2v" Jan 20 17:46:19 crc kubenswrapper[4558]: I0120 17:46:19.863484 4558 generic.go:334] "Generic (PLEG): container finished" podID="9d54b98d-10d8-4300-89b7-031984825b5a" containerID="aa9b9381bbdf5cb116e31b3db67d045917bc5ba77627d5ef1e20e4e91641261b" exitCode=0 Jan 20 17:46:19 crc kubenswrapper[4558]: I0120 17:46:19.864041 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-cell1-galera-0" event={"ID":"9d54b98d-10d8-4300-89b7-031984825b5a","Type":"ContainerDied","Data":"aa9b9381bbdf5cb116e31b3db67d045917bc5ba77627d5ef1e20e4e91641261b"} Jan 20 17:46:19 crc kubenswrapper[4558]: I0120 17:46:19.869880 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-galera-0" event={"ID":"c43f6123-71d1-4cf0-b919-00b1e9836c8b","Type":"ContainerStarted","Data":"df22f4721c8d074f5b610fec573fca752b358557510a94a96c91992dbb2d68ef"} Jan 20 17:46:19 crc kubenswrapper[4558]: I0120 17:46:19.869945 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 17:46:19 crc kubenswrapper[4558]: I0120 17:46:19.951305 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bmt7c\" (UniqueName: \"kubernetes.io/projected/de434991-f9a6-4b49-ad42-bb25402f95b2-kube-api-access-bmt7c\") pod \"kube-state-metrics-0\" (UID: \"de434991-f9a6-4b49-ad42-bb25402f95b2\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:46:19 crc kubenswrapper[4558]: I0120 17:46:19.965388 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/openstack-galera-0" podStartSLOduration=4.965369449 podStartE2EDuration="4.965369449s" podCreationTimestamp="2026-01-20 17:46:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:46:19.944056085 +0000 UTC m=+3873.704394051" watchObservedRunningTime="2026-01-20 17:46:19.965369449 +0000 UTC m=+3873.725707416" Jan 20 17:46:20 crc kubenswrapper[4558]: I0120 17:46:20.054368 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bmt7c\" (UniqueName: \"kubernetes.io/projected/de434991-f9a6-4b49-ad42-bb25402f95b2-kube-api-access-bmt7c\") pod \"kube-state-metrics-0\" (UID: \"de434991-f9a6-4b49-ad42-bb25402f95b2\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:46:20 crc kubenswrapper[4558]: I0120 17:46:20.072158 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bmt7c\" (UniqueName: \"kubernetes.io/projected/de434991-f9a6-4b49-ad42-bb25402f95b2-kube-api-access-bmt7c\") pod \"kube-state-metrics-0\" (UID: \"de434991-f9a6-4b49-ad42-bb25402f95b2\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:46:20 crc kubenswrapper[4558]: I0120 17:46:20.162832 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:46:20 crc kubenswrapper[4558]: I0120 17:46:20.578427 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 17:46:20 crc kubenswrapper[4558]: W0120 17:46:20.578612 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podde434991_f9a6_4b49_ad42_bb25402f95b2.slice/crio-1468560ec49e10f61c01e72c8e1cd6619948a7dfc20c237e7cd8498400e7d49b WatchSource:0}: Error finding container 1468560ec49e10f61c01e72c8e1cd6619948a7dfc20c237e7cd8498400e7d49b: Status 404 returned error can't find the container with id 1468560ec49e10f61c01e72c8e1cd6619948a7dfc20c237e7cd8498400e7d49b Jan 20 17:46:20 crc kubenswrapper[4558]: I0120 17:46:20.884839 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-cell1-galera-0" event={"ID":"9d54b98d-10d8-4300-89b7-031984825b5a","Type":"ContainerStarted","Data":"8b80d38684bc886d3672b9eba3adc33250d75483d51261047f570be9beb3ce3c"} Jan 20 17:46:20 crc kubenswrapper[4558]: I0120 17:46:20.887081 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/kube-state-metrics-0" event={"ID":"de434991-f9a6-4b49-ad42-bb25402f95b2","Type":"ContainerStarted","Data":"1468560ec49e10f61c01e72c8e1cd6619948a7dfc20c237e7cd8498400e7d49b"} Jan 20 17:46:20 crc kubenswrapper[4558]: I0120 17:46:20.907354 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/openstack-cell1-galera-0" podStartSLOduration=4.907333928 podStartE2EDuration="4.907333928s" podCreationTimestamp="2026-01-20 17:46:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:46:20.904304993 +0000 UTC m=+3874.664642950" watchObservedRunningTime="2026-01-20 17:46:20.907333928 +0000 UTC m=+3874.667671896" Jan 20 17:46:21 crc kubenswrapper[4558]: I0120 17:46:21.089029 4558 scope.go:117] "RemoveContainer" containerID="b68f569a935c15b3257c38f101d44f0d8f64b1219ceeefd73b46c7e75346f66b" Jan 20 17:46:21 crc kubenswrapper[4558]: I0120 17:46:21.117458 4558 scope.go:117] "RemoveContainer" containerID="8a98a6a80e25b53e4ba0486d67f1bca724bbd186dd76d5aaf082369a41c0a1d4" Jan 20 17:46:21 crc kubenswrapper[4558]: I0120 17:46:21.138809 4558 scope.go:117] "RemoveContainer" containerID="17c497a986172b78ad18acace156e9379a29097ef6815bcb71c2e62b15079de6" Jan 20 17:46:21 crc kubenswrapper[4558]: I0120 17:46:21.176494 4558 scope.go:117] "RemoveContainer" containerID="6948f6a3a830cb0a407d66b31b655e5d16e46867673e25b1965dc7c563600f88" Jan 20 17:46:21 crc kubenswrapper[4558]: I0120 17:46:21.193200 4558 scope.go:117] "RemoveContainer" containerID="5ccb1048b6361c306cfb63ee87e682c10253e16a9c797cf219976e09c2dfdba2" Jan 20 17:46:21 crc kubenswrapper[4558]: I0120 17:46:21.211664 4558 scope.go:117] "RemoveContainer" containerID="873d559ff9be8bc43c9aa2eaec3294dcd239547229c462f8fe2d94e35ae04a73" Jan 20 17:46:21 crc kubenswrapper[4558]: I0120 17:46:21.236509 4558 scope.go:117] "RemoveContainer" containerID="4bebdee4f64ef3c3342aede8317035e0af61503dae7f2812fa875d2a9e4ae32e" Jan 20 17:46:21 crc kubenswrapper[4558]: I0120 17:46:21.262224 4558 scope.go:117] "RemoveContainer" containerID="0aab856afdd76049e42b2a1262000dd8299f32a30b1a1bde280bebad2ed6494f" Jan 20 17:46:21 crc kubenswrapper[4558]: I0120 17:46:21.294229 4558 scope.go:117] "RemoveContainer" containerID="64656c843094c952de334495564ba5189df632fc1d101b32c9cd7770f2e7f3be" Jan 20 17:46:21 crc kubenswrapper[4558]: I0120 17:46:21.322108 4558 scope.go:117] "RemoveContainer" containerID="850ea8d8634e47c9c83b6988812d43198cd155cda048ff84263b6c83e1b13513" Jan 20 17:46:21 crc kubenswrapper[4558]: I0120 17:46:21.338249 4558 scope.go:117] "RemoveContainer" containerID="c6211ef8501688a72f4964e04a066c8f91536f9834ec00cc06f62c4f60798f0c" Jan 20 17:46:21 crc kubenswrapper[4558]: I0120 17:46:21.355713 4558 scope.go:117] "RemoveContainer" containerID="9aec7229608a50e4ca2cab47655081015208900db81e7a5801eb3c70cf100002" Jan 20 17:46:21 crc kubenswrapper[4558]: I0120 17:46:21.373824 4558 scope.go:117] "RemoveContainer" containerID="f19974f989568d1e6a6c2323bcacffe749a6aded654469aa967eea0f4c6fc9ba" Jan 20 17:46:21 crc kubenswrapper[4558]: I0120 17:46:21.412264 4558 scope.go:117] "RemoveContainer" containerID="cf58cb958bdccb7801e6bad9e986f8b7ff2da356baccf3f2b510423139d72ab9" Jan 20 17:46:21 crc kubenswrapper[4558]: I0120 17:46:21.450376 4558 scope.go:117] "RemoveContainer" containerID="20d7e0258ccf6ed7848be8e1fccaa5e6f35c7acc697d6f6c239e54a0f6d52637" Jan 20 17:46:21 crc kubenswrapper[4558]: I0120 17:46:21.479969 4558 scope.go:117] "RemoveContainer" containerID="a1ac1631d78e025acf8dee79c56d1f3f9399f0b3799b9fac1a4445dbdb14b651" Jan 20 17:46:21 crc kubenswrapper[4558]: I0120 17:46:21.501395 4558 scope.go:117] "RemoveContainer" containerID="f79892859b62fff49932e58f16de3c5503fa6029ca060df20454e36b9e9f37db" Jan 20 17:46:21 crc kubenswrapper[4558]: I0120 17:46:21.521493 4558 scope.go:117] "RemoveContainer" containerID="9ae2d86b2d1cd3b71d7cfe893aa40d27c0a5f18e5bb59818007aa52b54a73e7d" Jan 20 17:46:21 crc kubenswrapper[4558]: I0120 17:46:21.573478 4558 scope.go:117] "RemoveContainer" containerID="ef33eca59ed0960fac7c18dbc983784852c470d4f156ce3a7db2a36e9f8dde75" Jan 20 17:46:21 crc kubenswrapper[4558]: I0120 17:46:21.621989 4558 scope.go:117] "RemoveContainer" containerID="6c282fad70062d85972c6adb482f23b17e657cdacbcb499fa61b4c6196a96f01" Jan 20 17:46:21 crc kubenswrapper[4558]: I0120 17:46:21.638906 4558 scope.go:117] "RemoveContainer" containerID="2e74159a59845803699da99c5974e7ed46127faee243f67df4a03b31c6f34d49" Jan 20 17:46:21 crc kubenswrapper[4558]: I0120 17:46:21.659240 4558 scope.go:117] "RemoveContainer" containerID="450b309d3cda95b42e643d3c9564588a64f72a1b0f850d770b4f59fc19c4efe5" Jan 20 17:46:21 crc kubenswrapper[4558]: I0120 17:46:21.677977 4558 scope.go:117] "RemoveContainer" containerID="b939e3792d1ca4dac4c7681e6b9728b9b8e64bdf4b0beba07b105d7abb383dd6" Jan 20 17:46:21 crc kubenswrapper[4558]: I0120 17:46:21.713972 4558 scope.go:117] "RemoveContainer" containerID="5dc968322d518b4efe9f4ead465577ff861955df682c068ecad0f7cd5f0e3331" Jan 20 17:46:21 crc kubenswrapper[4558]: I0120 17:46:21.741396 4558 scope.go:117] "RemoveContainer" containerID="1a8261f61c9b6fb46a3461edc080651cc99150b9dbc8b5de2458b82d4f428a9c" Jan 20 17:46:21 crc kubenswrapper[4558]: I0120 17:46:21.752737 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ovsdbserver-nb-0"] Jan 20 17:46:21 crc kubenswrapper[4558]: I0120 17:46:21.754001 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:46:21 crc kubenswrapper[4558]: I0120 17:46:21.756981 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-ovn-metrics" Jan 20 17:46:21 crc kubenswrapper[4558]: I0120 17:46:21.757607 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-ovndbcluster-nb-ovndbs" Jan 20 17:46:21 crc kubenswrapper[4558]: I0120 17:46:21.757774 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"ovndbcluster-nb-scripts" Jan 20 17:46:21 crc kubenswrapper[4558]: I0120 17:46:21.758142 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ovncluster-ovndbcluster-nb-dockercfg-fcsjv" Jan 20 17:46:21 crc kubenswrapper[4558]: I0120 17:46:21.758576 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"ovndbcluster-nb-config" Jan 20 17:46:21 crc kubenswrapper[4558]: I0120 17:46:21.763533 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-nb-0"] Jan 20 17:46:21 crc kubenswrapper[4558]: I0120 17:46:21.888758 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3f090098-7927-4f68-b49d-4cea135a041c-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"3f090098-7927-4f68-b49d-4cea135a041c\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:46:21 crc kubenswrapper[4558]: I0120 17:46:21.889974 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/3f090098-7927-4f68-b49d-4cea135a041c-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"3f090098-7927-4f68-b49d-4cea135a041c\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:46:21 crc kubenswrapper[4558]: I0120 17:46:21.890109 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f090098-7927-4f68-b49d-4cea135a041c-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"3f090098-7927-4f68-b49d-4cea135a041c\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:46:21 crc kubenswrapper[4558]: I0120 17:46:21.890396 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7p5tp\" (UniqueName: \"kubernetes.io/projected/3f090098-7927-4f68-b49d-4cea135a041c-kube-api-access-7p5tp\") pod \"ovsdbserver-nb-0\" (UID: \"3f090098-7927-4f68-b49d-4cea135a041c\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:46:21 crc kubenswrapper[4558]: I0120 17:46:21.890522 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/3f090098-7927-4f68-b49d-4cea135a041c-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"3f090098-7927-4f68-b49d-4cea135a041c\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:46:21 crc kubenswrapper[4558]: I0120 17:46:21.890613 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3f090098-7927-4f68-b49d-4cea135a041c-config\") pod \"ovsdbserver-nb-0\" (UID: \"3f090098-7927-4f68-b49d-4cea135a041c\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:46:21 crc kubenswrapper[4558]: I0120 17:46:21.890819 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"ovsdbserver-nb-0\" (UID: \"3f090098-7927-4f68-b49d-4cea135a041c\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:46:21 crc kubenswrapper[4558]: I0120 17:46:21.891192 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/3f090098-7927-4f68-b49d-4cea135a041c-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"3f090098-7927-4f68-b49d-4cea135a041c\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:46:21 crc kubenswrapper[4558]: I0120 17:46:21.932015 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/kube-state-metrics-0" event={"ID":"de434991-f9a6-4b49-ad42-bb25402f95b2","Type":"ContainerStarted","Data":"03e417adbb6df387398a6d3df365127e6ebd8cf6395f94f668cf4bd1c8c08559"} Jan 20 17:46:21 crc kubenswrapper[4558]: I0120 17:46:21.932399 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:46:21 crc kubenswrapper[4558]: I0120 17:46:21.950800 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/kube-state-metrics-0" podStartSLOduration=2.684686969 podStartE2EDuration="2.950783303s" podCreationTimestamp="2026-01-20 17:46:19 +0000 UTC" firstStartedPulling="2026-01-20 17:46:20.580720056 +0000 UTC m=+3874.341058024" lastFinishedPulling="2026-01-20 17:46:20.84681639 +0000 UTC m=+3874.607154358" observedRunningTime="2026-01-20 17:46:21.942372158 +0000 UTC m=+3875.702710124" watchObservedRunningTime="2026-01-20 17:46:21.950783303 +0000 UTC m=+3875.711121270" Jan 20 17:46:21 crc kubenswrapper[4558]: I0120 17:46:21.994889 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7p5tp\" (UniqueName: \"kubernetes.io/projected/3f090098-7927-4f68-b49d-4cea135a041c-kube-api-access-7p5tp\") pod \"ovsdbserver-nb-0\" (UID: \"3f090098-7927-4f68-b49d-4cea135a041c\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:46:21 crc kubenswrapper[4558]: I0120 17:46:21.994950 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/3f090098-7927-4f68-b49d-4cea135a041c-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"3f090098-7927-4f68-b49d-4cea135a041c\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:46:21 crc kubenswrapper[4558]: I0120 17:46:21.994977 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3f090098-7927-4f68-b49d-4cea135a041c-config\") pod \"ovsdbserver-nb-0\" (UID: \"3f090098-7927-4f68-b49d-4cea135a041c\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:46:21 crc kubenswrapper[4558]: I0120 17:46:21.995083 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"ovsdbserver-nb-0\" (UID: \"3f090098-7927-4f68-b49d-4cea135a041c\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:46:21 crc kubenswrapper[4558]: I0120 17:46:21.995114 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/3f090098-7927-4f68-b49d-4cea135a041c-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"3f090098-7927-4f68-b49d-4cea135a041c\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:46:21 crc kubenswrapper[4558]: I0120 17:46:21.995173 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3f090098-7927-4f68-b49d-4cea135a041c-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"3f090098-7927-4f68-b49d-4cea135a041c\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:46:21 crc kubenswrapper[4558]: I0120 17:46:21.995208 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/3f090098-7927-4f68-b49d-4cea135a041c-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"3f090098-7927-4f68-b49d-4cea135a041c\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:46:21 crc kubenswrapper[4558]: I0120 17:46:21.995237 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f090098-7927-4f68-b49d-4cea135a041c-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"3f090098-7927-4f68-b49d-4cea135a041c\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:46:21 crc kubenswrapper[4558]: I0120 17:46:21.995553 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"ovsdbserver-nb-0\" (UID: \"3f090098-7927-4f68-b49d-4cea135a041c\") device mount path \"/mnt/openstack/pv02\"" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:46:21 crc kubenswrapper[4558]: I0120 17:46:21.995651 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/3f090098-7927-4f68-b49d-4cea135a041c-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"3f090098-7927-4f68-b49d-4cea135a041c\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:46:21 crc kubenswrapper[4558]: I0120 17:46:21.995954 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3f090098-7927-4f68-b49d-4cea135a041c-config\") pod \"ovsdbserver-nb-0\" (UID: \"3f090098-7927-4f68-b49d-4cea135a041c\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:46:21 crc kubenswrapper[4558]: I0120 17:46:21.996390 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3f090098-7927-4f68-b49d-4cea135a041c-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"3f090098-7927-4f68-b49d-4cea135a041c\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:46:22 crc kubenswrapper[4558]: I0120 17:46:22.005047 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/3f090098-7927-4f68-b49d-4cea135a041c-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"3f090098-7927-4f68-b49d-4cea135a041c\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:46:22 crc kubenswrapper[4558]: I0120 17:46:22.005288 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f090098-7927-4f68-b49d-4cea135a041c-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"3f090098-7927-4f68-b49d-4cea135a041c\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:46:22 crc kubenswrapper[4558]: I0120 17:46:22.006943 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/3f090098-7927-4f68-b49d-4cea135a041c-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"3f090098-7927-4f68-b49d-4cea135a041c\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:46:22 crc kubenswrapper[4558]: I0120 17:46:22.010921 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7p5tp\" (UniqueName: \"kubernetes.io/projected/3f090098-7927-4f68-b49d-4cea135a041c-kube-api-access-7p5tp\") pod \"ovsdbserver-nb-0\" (UID: \"3f090098-7927-4f68-b49d-4cea135a041c\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:46:22 crc kubenswrapper[4558]: I0120 17:46:22.015015 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"ovsdbserver-nb-0\" (UID: \"3f090098-7927-4f68-b49d-4cea135a041c\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:46:22 crc kubenswrapper[4558]: I0120 17:46:22.069142 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:46:22 crc kubenswrapper[4558]: I0120 17:46:22.485371 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-nb-0"] Jan 20 17:46:22 crc kubenswrapper[4558]: W0120 17:46:22.486812 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3f090098_7927_4f68_b49d_4cea135a041c.slice/crio-658a5496dc4adc9a7e1bdfb91f9a14fd91e6867091f294ccb0da5966c441330c WatchSource:0}: Error finding container 658a5496dc4adc9a7e1bdfb91f9a14fd91e6867091f294ccb0da5966c441330c: Status 404 returned error can't find the container with id 658a5496dc4adc9a7e1bdfb91f9a14fd91e6867091f294ccb0da5966c441330c Jan 20 17:46:22 crc kubenswrapper[4558]: E0120 17:46:22.676989 4558 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 192.168.25.8:44822->192.168.25.8:43883: write tcp 192.168.25.8:44822->192.168.25.8:43883: write: broken pipe Jan 20 17:46:22 crc kubenswrapper[4558]: I0120 17:46:22.965535 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-nb-0" event={"ID":"3f090098-7927-4f68-b49d-4cea135a041c","Type":"ContainerStarted","Data":"dbdf111aabc4bc0df2386a3517cbb41e1a844ddfad496d4e09801f1998ac0555"} Jan 20 17:46:22 crc kubenswrapper[4558]: I0120 17:46:22.965609 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-nb-0" event={"ID":"3f090098-7927-4f68-b49d-4cea135a041c","Type":"ContainerStarted","Data":"fa3e63d15818b7accc78e0355d0414299134969d91a4885ca57ff950ae494f7e"} Jan 20 17:46:22 crc kubenswrapper[4558]: I0120 17:46:22.965623 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-nb-0" event={"ID":"3f090098-7927-4f68-b49d-4cea135a041c","Type":"ContainerStarted","Data":"658a5496dc4adc9a7e1bdfb91f9a14fd91e6867091f294ccb0da5966c441330c"} Jan 20 17:46:22 crc kubenswrapper[4558]: I0120 17:46:22.987176 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ovsdbserver-nb-0" podStartSLOduration=1.987140943 podStartE2EDuration="1.987140943s" podCreationTimestamp="2026-01-20 17:46:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:46:22.98313442 +0000 UTC m=+3876.743472387" watchObservedRunningTime="2026-01-20 17:46:22.987140943 +0000 UTC m=+3876.747478911" Jan 20 17:46:23 crc kubenswrapper[4558]: I0120 17:46:23.142351 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/memcached-0" Jan 20 17:46:24 crc kubenswrapper[4558]: I0120 17:46:24.307195 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ovsdbserver-sb-0"] Jan 20 17:46:24 crc kubenswrapper[4558]: I0120 17:46:24.308782 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:46:24 crc kubenswrapper[4558]: I0120 17:46:24.310851 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"ovndbcluster-sb-config" Jan 20 17:46:24 crc kubenswrapper[4558]: I0120 17:46:24.311073 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-ovndbcluster-sb-ovndbs" Jan 20 17:46:24 crc kubenswrapper[4558]: I0120 17:46:24.311123 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"ovndbcluster-sb-scripts" Jan 20 17:46:24 crc kubenswrapper[4558]: I0120 17:46:24.312007 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ovncluster-ovndbcluster-sb-dockercfg-f2cf9" Jan 20 17:46:24 crc kubenswrapper[4558]: I0120 17:46:24.319799 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-sb-0"] Jan 20 17:46:24 crc kubenswrapper[4558]: I0120 17:46:24.445938 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"ovsdbserver-sb-0\" (UID: \"9330efb9-cbc2-4c4a-9928-eedf93324d57\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:46:24 crc kubenswrapper[4558]: I0120 17:46:24.446100 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/9330efb9-cbc2-4c4a-9928-eedf93324d57-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"9330efb9-cbc2-4c4a-9928-eedf93324d57\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:46:24 crc kubenswrapper[4558]: I0120 17:46:24.446183 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9330efb9-cbc2-4c4a-9928-eedf93324d57-config\") pod \"ovsdbserver-sb-0\" (UID: \"9330efb9-cbc2-4c4a-9928-eedf93324d57\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:46:24 crc kubenswrapper[4558]: I0120 17:46:24.446412 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9330efb9-cbc2-4c4a-9928-eedf93324d57-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"9330efb9-cbc2-4c4a-9928-eedf93324d57\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:46:24 crc kubenswrapper[4558]: I0120 17:46:24.446484 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9330efb9-cbc2-4c4a-9928-eedf93324d57-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"9330efb9-cbc2-4c4a-9928-eedf93324d57\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:46:24 crc kubenswrapper[4558]: I0120 17:46:24.446546 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/9330efb9-cbc2-4c4a-9928-eedf93324d57-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"9330efb9-cbc2-4c4a-9928-eedf93324d57\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:46:24 crc kubenswrapper[4558]: I0120 17:46:24.446649 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/9330efb9-cbc2-4c4a-9928-eedf93324d57-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"9330efb9-cbc2-4c4a-9928-eedf93324d57\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:46:24 crc kubenswrapper[4558]: I0120 17:46:24.446686 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2fz5h\" (UniqueName: \"kubernetes.io/projected/9330efb9-cbc2-4c4a-9928-eedf93324d57-kube-api-access-2fz5h\") pod \"ovsdbserver-sb-0\" (UID: \"9330efb9-cbc2-4c4a-9928-eedf93324d57\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:46:24 crc kubenswrapper[4558]: I0120 17:46:24.547831 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9330efb9-cbc2-4c4a-9928-eedf93324d57-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"9330efb9-cbc2-4c4a-9928-eedf93324d57\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:46:24 crc kubenswrapper[4558]: I0120 17:46:24.547894 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9330efb9-cbc2-4c4a-9928-eedf93324d57-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"9330efb9-cbc2-4c4a-9928-eedf93324d57\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:46:24 crc kubenswrapper[4558]: I0120 17:46:24.547935 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/9330efb9-cbc2-4c4a-9928-eedf93324d57-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"9330efb9-cbc2-4c4a-9928-eedf93324d57\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:46:24 crc kubenswrapper[4558]: I0120 17:46:24.547967 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/9330efb9-cbc2-4c4a-9928-eedf93324d57-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"9330efb9-cbc2-4c4a-9928-eedf93324d57\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:46:24 crc kubenswrapper[4558]: I0120 17:46:24.547989 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2fz5h\" (UniqueName: \"kubernetes.io/projected/9330efb9-cbc2-4c4a-9928-eedf93324d57-kube-api-access-2fz5h\") pod \"ovsdbserver-sb-0\" (UID: \"9330efb9-cbc2-4c4a-9928-eedf93324d57\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:46:24 crc kubenswrapper[4558]: I0120 17:46:24.548016 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"ovsdbserver-sb-0\" (UID: \"9330efb9-cbc2-4c4a-9928-eedf93324d57\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:46:24 crc kubenswrapper[4558]: I0120 17:46:24.548059 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/9330efb9-cbc2-4c4a-9928-eedf93324d57-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"9330efb9-cbc2-4c4a-9928-eedf93324d57\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:46:24 crc kubenswrapper[4558]: I0120 17:46:24.548087 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9330efb9-cbc2-4c4a-9928-eedf93324d57-config\") pod \"ovsdbserver-sb-0\" (UID: \"9330efb9-cbc2-4c4a-9928-eedf93324d57\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:46:24 crc kubenswrapper[4558]: I0120 17:46:24.548546 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/9330efb9-cbc2-4c4a-9928-eedf93324d57-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"9330efb9-cbc2-4c4a-9928-eedf93324d57\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:46:24 crc kubenswrapper[4558]: I0120 17:46:24.548905 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"ovsdbserver-sb-0\" (UID: \"9330efb9-cbc2-4c4a-9928-eedf93324d57\") device mount path \"/mnt/openstack/pv03\"" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:46:24 crc kubenswrapper[4558]: I0120 17:46:24.549032 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9330efb9-cbc2-4c4a-9928-eedf93324d57-config\") pod \"ovsdbserver-sb-0\" (UID: \"9330efb9-cbc2-4c4a-9928-eedf93324d57\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:46:24 crc kubenswrapper[4558]: I0120 17:46:24.549097 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9330efb9-cbc2-4c4a-9928-eedf93324d57-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"9330efb9-cbc2-4c4a-9928-eedf93324d57\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:46:24 crc kubenswrapper[4558]: I0120 17:46:24.552669 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9330efb9-cbc2-4c4a-9928-eedf93324d57-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"9330efb9-cbc2-4c4a-9928-eedf93324d57\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:46:24 crc kubenswrapper[4558]: I0120 17:46:24.552680 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/9330efb9-cbc2-4c4a-9928-eedf93324d57-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"9330efb9-cbc2-4c4a-9928-eedf93324d57\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:46:24 crc kubenswrapper[4558]: I0120 17:46:24.553259 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/9330efb9-cbc2-4c4a-9928-eedf93324d57-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"9330efb9-cbc2-4c4a-9928-eedf93324d57\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:46:24 crc kubenswrapper[4558]: I0120 17:46:24.566006 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2fz5h\" (UniqueName: \"kubernetes.io/projected/9330efb9-cbc2-4c4a-9928-eedf93324d57-kube-api-access-2fz5h\") pod \"ovsdbserver-sb-0\" (UID: \"9330efb9-cbc2-4c4a-9928-eedf93324d57\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:46:24 crc kubenswrapper[4558]: I0120 17:46:24.568669 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"ovsdbserver-sb-0\" (UID: \"9330efb9-cbc2-4c4a-9928-eedf93324d57\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:46:24 crc kubenswrapper[4558]: I0120 17:46:24.625374 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:46:25 crc kubenswrapper[4558]: I0120 17:46:25.032032 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-sb-0"] Jan 20 17:46:25 crc kubenswrapper[4558]: W0120 17:46:25.036197 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9330efb9_cbc2_4c4a_9928_eedf93324d57.slice/crio-fcfdfce2caa98b6180b4e9bce9d6dad2bb4cb788e727536da1ec1f9c3b2a2599 WatchSource:0}: Error finding container fcfdfce2caa98b6180b4e9bce9d6dad2bb4cb788e727536da1ec1f9c3b2a2599: Status 404 returned error can't find the container with id fcfdfce2caa98b6180b4e9bce9d6dad2bb4cb788e727536da1ec1f9c3b2a2599 Jan 20 17:46:25 crc kubenswrapper[4558]: I0120 17:46:25.069856 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:46:25 crc kubenswrapper[4558]: I0120 17:46:25.102485 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:46:25 crc kubenswrapper[4558]: I0120 17:46:25.376473 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:46:25 crc kubenswrapper[4558]: I0120 17:46:25.376529 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:46:25 crc kubenswrapper[4558]: I0120 17:46:25.444144 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:46:25 crc kubenswrapper[4558]: I0120 17:46:25.993571 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-sb-0" event={"ID":"9330efb9-cbc2-4c4a-9928-eedf93324d57","Type":"ContainerStarted","Data":"7f96a30bdd54fd7b066425edfff74600fb47af1d01a01f1f5b5a15ce2844ce91"} Jan 20 17:46:25 crc kubenswrapper[4558]: I0120 17:46:25.993971 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:46:25 crc kubenswrapper[4558]: I0120 17:46:25.993995 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-sb-0" event={"ID":"9330efb9-cbc2-4c4a-9928-eedf93324d57","Type":"ContainerStarted","Data":"d05ee219c0f8bccc7fa261cda35e2938c1b449a43b564bf9b569dafd93b0c9a7"} Jan 20 17:46:25 crc kubenswrapper[4558]: I0120 17:46:25.994006 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-sb-0" event={"ID":"9330efb9-cbc2-4c4a-9928-eedf93324d57","Type":"ContainerStarted","Data":"fcfdfce2caa98b6180b4e9bce9d6dad2bb4cb788e727536da1ec1f9c3b2a2599"} Jan 20 17:46:26 crc kubenswrapper[4558]: I0120 17:46:26.019071 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ovsdbserver-sb-0" podStartSLOduration=2.019052263 podStartE2EDuration="2.019052263s" podCreationTimestamp="2026-01-20 17:46:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:46:26.012829342 +0000 UTC m=+3879.773167309" watchObservedRunningTime="2026-01-20 17:46:26.019052263 +0000 UTC m=+3879.779390230" Jan 20 17:46:26 crc kubenswrapper[4558]: I0120 17:46:26.056085 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:46:26 crc kubenswrapper[4558]: I0120 17:46:26.903578 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:46:26 crc kubenswrapper[4558]: I0120 17:46:26.904143 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:46:26 crc kubenswrapper[4558]: I0120 17:46:26.969494 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:46:27 crc kubenswrapper[4558]: I0120 17:46:27.029687 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:46:27 crc kubenswrapper[4558]: I0120 17:46:27.086768 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:46:27 crc kubenswrapper[4558]: I0120 17:46:27.626300 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:46:27 crc kubenswrapper[4558]: I0120 17:46:27.690641 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/keystone-db-create-2sj6c"] Jan 20 17:46:27 crc kubenswrapper[4558]: I0120 17:46:27.691753 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-db-create-2sj6c" Jan 20 17:46:27 crc kubenswrapper[4558]: I0120 17:46:27.698447 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-db-create-2sj6c"] Jan 20 17:46:27 crc kubenswrapper[4558]: I0120 17:46:27.788299 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/keystone-db47-account-create-update-qfpns"] Jan 20 17:46:27 crc kubenswrapper[4558]: I0120 17:46:27.789356 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-db47-account-create-update-qfpns" Jan 20 17:46:27 crc kubenswrapper[4558]: I0120 17:46:27.791714 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-db-secret" Jan 20 17:46:27 crc kubenswrapper[4558]: I0120 17:46:27.805185 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-db47-account-create-update-qfpns"] Jan 20 17:46:27 crc kubenswrapper[4558]: I0120 17:46:27.815432 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1d58948f-0486-4e4a-af1f-9021c760d5cc-operator-scripts\") pod \"keystone-db-create-2sj6c\" (UID: \"1d58948f-0486-4e4a-af1f-9021c760d5cc\") " pod="openstack-kuttl-tests/keystone-db-create-2sj6c" Jan 20 17:46:27 crc kubenswrapper[4558]: I0120 17:46:27.815492 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-svhmx\" (UniqueName: \"kubernetes.io/projected/1d58948f-0486-4e4a-af1f-9021c760d5cc-kube-api-access-svhmx\") pod \"keystone-db-create-2sj6c\" (UID: \"1d58948f-0486-4e4a-af1f-9021c760d5cc\") " pod="openstack-kuttl-tests/keystone-db-create-2sj6c" Jan 20 17:46:27 crc kubenswrapper[4558]: I0120 17:46:27.917207 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-svhmx\" (UniqueName: \"kubernetes.io/projected/1d58948f-0486-4e4a-af1f-9021c760d5cc-kube-api-access-svhmx\") pod \"keystone-db-create-2sj6c\" (UID: \"1d58948f-0486-4e4a-af1f-9021c760d5cc\") " pod="openstack-kuttl-tests/keystone-db-create-2sj6c" Jan 20 17:46:27 crc kubenswrapper[4558]: I0120 17:46:27.917338 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vpgnx\" (UniqueName: \"kubernetes.io/projected/efd20cca-38bd-467d-a1e0-5cbf4f0b1a14-kube-api-access-vpgnx\") pod \"keystone-db47-account-create-update-qfpns\" (UID: \"efd20cca-38bd-467d-a1e0-5cbf4f0b1a14\") " pod="openstack-kuttl-tests/keystone-db47-account-create-update-qfpns" Jan 20 17:46:27 crc kubenswrapper[4558]: I0120 17:46:27.917373 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/efd20cca-38bd-467d-a1e0-5cbf4f0b1a14-operator-scripts\") pod \"keystone-db47-account-create-update-qfpns\" (UID: \"efd20cca-38bd-467d-a1e0-5cbf4f0b1a14\") " pod="openstack-kuttl-tests/keystone-db47-account-create-update-qfpns" Jan 20 17:46:27 crc kubenswrapper[4558]: I0120 17:46:27.917405 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1d58948f-0486-4e4a-af1f-9021c760d5cc-operator-scripts\") pod \"keystone-db-create-2sj6c\" (UID: \"1d58948f-0486-4e4a-af1f-9021c760d5cc\") " pod="openstack-kuttl-tests/keystone-db-create-2sj6c" Jan 20 17:46:27 crc kubenswrapper[4558]: I0120 17:46:27.918115 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1d58948f-0486-4e4a-af1f-9021c760d5cc-operator-scripts\") pod \"keystone-db-create-2sj6c\" (UID: \"1d58948f-0486-4e4a-af1f-9021c760d5cc\") " pod="openstack-kuttl-tests/keystone-db-create-2sj6c" Jan 20 17:46:27 crc kubenswrapper[4558]: I0120 17:46:27.940622 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-svhmx\" (UniqueName: \"kubernetes.io/projected/1d58948f-0486-4e4a-af1f-9021c760d5cc-kube-api-access-svhmx\") pod \"keystone-db-create-2sj6c\" (UID: \"1d58948f-0486-4e4a-af1f-9021c760d5cc\") " pod="openstack-kuttl-tests/keystone-db-create-2sj6c" Jan 20 17:46:28 crc kubenswrapper[4558]: I0120 17:46:28.009074 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-db-create-2sj6c" Jan 20 17:46:28 crc kubenswrapper[4558]: I0120 17:46:28.018514 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vpgnx\" (UniqueName: \"kubernetes.io/projected/efd20cca-38bd-467d-a1e0-5cbf4f0b1a14-kube-api-access-vpgnx\") pod \"keystone-db47-account-create-update-qfpns\" (UID: \"efd20cca-38bd-467d-a1e0-5cbf4f0b1a14\") " pod="openstack-kuttl-tests/keystone-db47-account-create-update-qfpns" Jan 20 17:46:28 crc kubenswrapper[4558]: I0120 17:46:28.018565 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/efd20cca-38bd-467d-a1e0-5cbf4f0b1a14-operator-scripts\") pod \"keystone-db47-account-create-update-qfpns\" (UID: \"efd20cca-38bd-467d-a1e0-5cbf4f0b1a14\") " pod="openstack-kuttl-tests/keystone-db47-account-create-update-qfpns" Jan 20 17:46:28 crc kubenswrapper[4558]: I0120 17:46:28.019287 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/efd20cca-38bd-467d-a1e0-5cbf4f0b1a14-operator-scripts\") pod \"keystone-db47-account-create-update-qfpns\" (UID: \"efd20cca-38bd-467d-a1e0-5cbf4f0b1a14\") " pod="openstack-kuttl-tests/keystone-db47-account-create-update-qfpns" Jan 20 17:46:28 crc kubenswrapper[4558]: I0120 17:46:28.055045 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vpgnx\" (UniqueName: \"kubernetes.io/projected/efd20cca-38bd-467d-a1e0-5cbf4f0b1a14-kube-api-access-vpgnx\") pod \"keystone-db47-account-create-update-qfpns\" (UID: \"efd20cca-38bd-467d-a1e0-5cbf4f0b1a14\") " pod="openstack-kuttl-tests/keystone-db47-account-create-update-qfpns" Jan 20 17:46:28 crc kubenswrapper[4558]: I0120 17:46:28.103841 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-db47-account-create-update-qfpns" Jan 20 17:46:28 crc kubenswrapper[4558]: I0120 17:46:28.119733 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/placement-db-create-lmln8"] Jan 20 17:46:28 crc kubenswrapper[4558]: I0120 17:46:28.121020 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-db-create-lmln8" Jan 20 17:46:28 crc kubenswrapper[4558]: I0120 17:46:28.138437 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/placement-f441-account-create-update-w7t8p"] Jan 20 17:46:28 crc kubenswrapper[4558]: I0120 17:46:28.139334 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-f441-account-create-update-w7t8p" Jan 20 17:46:28 crc kubenswrapper[4558]: I0120 17:46:28.142372 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"placement-db-secret" Jan 20 17:46:28 crc kubenswrapper[4558]: I0120 17:46:28.157383 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-db-create-lmln8"] Jan 20 17:46:28 crc kubenswrapper[4558]: I0120 17:46:28.176946 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-f441-account-create-update-w7t8p"] Jan 20 17:46:28 crc kubenswrapper[4558]: I0120 17:46:28.229752 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ab7f32c5-66c5-47a0-a63f-530282c4db6a-operator-scripts\") pod \"placement-f441-account-create-update-w7t8p\" (UID: \"ab7f32c5-66c5-47a0-a63f-530282c4db6a\") " pod="openstack-kuttl-tests/placement-f441-account-create-update-w7t8p" Jan 20 17:46:28 crc kubenswrapper[4558]: I0120 17:46:28.229821 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nlnrd\" (UniqueName: \"kubernetes.io/projected/0fd41882-86eb-4969-8b15-43ea14ac1558-kube-api-access-nlnrd\") pod \"placement-db-create-lmln8\" (UID: \"0fd41882-86eb-4969-8b15-43ea14ac1558\") " pod="openstack-kuttl-tests/placement-db-create-lmln8" Jan 20 17:46:28 crc kubenswrapper[4558]: I0120 17:46:28.229868 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-djf5r\" (UniqueName: \"kubernetes.io/projected/ab7f32c5-66c5-47a0-a63f-530282c4db6a-kube-api-access-djf5r\") pod \"placement-f441-account-create-update-w7t8p\" (UID: \"ab7f32c5-66c5-47a0-a63f-530282c4db6a\") " pod="openstack-kuttl-tests/placement-f441-account-create-update-w7t8p" Jan 20 17:46:28 crc kubenswrapper[4558]: I0120 17:46:28.229888 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0fd41882-86eb-4969-8b15-43ea14ac1558-operator-scripts\") pod \"placement-db-create-lmln8\" (UID: \"0fd41882-86eb-4969-8b15-43ea14ac1558\") " pod="openstack-kuttl-tests/placement-db-create-lmln8" Jan 20 17:46:28 crc kubenswrapper[4558]: I0120 17:46:28.337042 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ab7f32c5-66c5-47a0-a63f-530282c4db6a-operator-scripts\") pod \"placement-f441-account-create-update-w7t8p\" (UID: \"ab7f32c5-66c5-47a0-a63f-530282c4db6a\") " pod="openstack-kuttl-tests/placement-f441-account-create-update-w7t8p" Jan 20 17:46:28 crc kubenswrapper[4558]: I0120 17:46:28.337136 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nlnrd\" (UniqueName: \"kubernetes.io/projected/0fd41882-86eb-4969-8b15-43ea14ac1558-kube-api-access-nlnrd\") pod \"placement-db-create-lmln8\" (UID: \"0fd41882-86eb-4969-8b15-43ea14ac1558\") " pod="openstack-kuttl-tests/placement-db-create-lmln8" Jan 20 17:46:28 crc kubenswrapper[4558]: I0120 17:46:28.337273 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-djf5r\" (UniqueName: \"kubernetes.io/projected/ab7f32c5-66c5-47a0-a63f-530282c4db6a-kube-api-access-djf5r\") pod \"placement-f441-account-create-update-w7t8p\" (UID: \"ab7f32c5-66c5-47a0-a63f-530282c4db6a\") " pod="openstack-kuttl-tests/placement-f441-account-create-update-w7t8p" Jan 20 17:46:28 crc kubenswrapper[4558]: I0120 17:46:28.337296 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0fd41882-86eb-4969-8b15-43ea14ac1558-operator-scripts\") pod \"placement-db-create-lmln8\" (UID: \"0fd41882-86eb-4969-8b15-43ea14ac1558\") " pod="openstack-kuttl-tests/placement-db-create-lmln8" Jan 20 17:46:28 crc kubenswrapper[4558]: I0120 17:46:28.338082 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0fd41882-86eb-4969-8b15-43ea14ac1558-operator-scripts\") pod \"placement-db-create-lmln8\" (UID: \"0fd41882-86eb-4969-8b15-43ea14ac1558\") " pod="openstack-kuttl-tests/placement-db-create-lmln8" Jan 20 17:46:28 crc kubenswrapper[4558]: I0120 17:46:28.355262 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ab7f32c5-66c5-47a0-a63f-530282c4db6a-operator-scripts\") pod \"placement-f441-account-create-update-w7t8p\" (UID: \"ab7f32c5-66c5-47a0-a63f-530282c4db6a\") " pod="openstack-kuttl-tests/placement-f441-account-create-update-w7t8p" Jan 20 17:46:28 crc kubenswrapper[4558]: I0120 17:46:28.362863 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nlnrd\" (UniqueName: \"kubernetes.io/projected/0fd41882-86eb-4969-8b15-43ea14ac1558-kube-api-access-nlnrd\") pod \"placement-db-create-lmln8\" (UID: \"0fd41882-86eb-4969-8b15-43ea14ac1558\") " pod="openstack-kuttl-tests/placement-db-create-lmln8" Jan 20 17:46:28 crc kubenswrapper[4558]: I0120 17:46:28.368642 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-djf5r\" (UniqueName: \"kubernetes.io/projected/ab7f32c5-66c5-47a0-a63f-530282c4db6a-kube-api-access-djf5r\") pod \"placement-f441-account-create-update-w7t8p\" (UID: \"ab7f32c5-66c5-47a0-a63f-530282c4db6a\") " pod="openstack-kuttl-tests/placement-f441-account-create-update-w7t8p" Jan 20 17:46:28 crc kubenswrapper[4558]: I0120 17:46:28.479597 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-db-create-lmln8" Jan 20 17:46:28 crc kubenswrapper[4558]: I0120 17:46:28.509646 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-f441-account-create-update-w7t8p" Jan 20 17:46:28 crc kubenswrapper[4558]: I0120 17:46:28.651613 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-db-create-2sj6c"] Jan 20 17:46:28 crc kubenswrapper[4558]: W0120 17:46:28.696407 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1d58948f_0486_4e4a_af1f_9021c760d5cc.slice/crio-ca89ce3c7fa7ff129a4c11ba80a5d57b39c90ad2c7fb257cf553d8b341676bf7 WatchSource:0}: Error finding container ca89ce3c7fa7ff129a4c11ba80a5d57b39c90ad2c7fb257cf553d8b341676bf7: Status 404 returned error can't find the container with id ca89ce3c7fa7ff129a4c11ba80a5d57b39c90ad2c7fb257cf553d8b341676bf7 Jan 20 17:46:28 crc kubenswrapper[4558]: I0120 17:46:28.902724 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-db47-account-create-update-qfpns"] Jan 20 17:46:29 crc kubenswrapper[4558]: I0120 17:46:29.012891 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-db-create-lmln8"] Jan 20 17:46:29 crc kubenswrapper[4558]: I0120 17:46:29.017725 4558 generic.go:334] "Generic (PLEG): container finished" podID="1d58948f-0486-4e4a-af1f-9021c760d5cc" containerID="89c254b2cc20564dd33cd7756e681d06ef6e49434611f128e7c7bd18d011d719" exitCode=0 Jan 20 17:46:29 crc kubenswrapper[4558]: I0120 17:46:29.017796 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-db-create-2sj6c" event={"ID":"1d58948f-0486-4e4a-af1f-9021c760d5cc","Type":"ContainerDied","Data":"89c254b2cc20564dd33cd7756e681d06ef6e49434611f128e7c7bd18d011d719"} Jan 20 17:46:29 crc kubenswrapper[4558]: I0120 17:46:29.017827 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-db-create-2sj6c" event={"ID":"1d58948f-0486-4e4a-af1f-9021c760d5cc","Type":"ContainerStarted","Data":"ca89ce3c7fa7ff129a4c11ba80a5d57b39c90ad2c7fb257cf553d8b341676bf7"} Jan 20 17:46:29 crc kubenswrapper[4558]: I0120 17:46:29.018961 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-db47-account-create-update-qfpns" event={"ID":"efd20cca-38bd-467d-a1e0-5cbf4f0b1a14","Type":"ContainerStarted","Data":"a54b5f73363ed9e7c3881dfb6b3821c4153826487c1c1aebcbab3dfec7875b26"} Jan 20 17:46:29 crc kubenswrapper[4558]: I0120 17:46:29.128810 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-f441-account-create-update-w7t8p"] Jan 20 17:46:29 crc kubenswrapper[4558]: W0120 17:46:29.134263 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podab7f32c5_66c5_47a0_a63f_530282c4db6a.slice/crio-5a32366adfd7692271e921cfbd623b99753662256abd55e258e4176cc5306926 WatchSource:0}: Error finding container 5a32366adfd7692271e921cfbd623b99753662256abd55e258e4176cc5306926: Status 404 returned error can't find the container with id 5a32366adfd7692271e921cfbd623b99753662256abd55e258e4176cc5306926 Jan 20 17:46:29 crc kubenswrapper[4558]: I0120 17:46:29.625560 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:46:30 crc kubenswrapper[4558]: I0120 17:46:30.027442 4558 generic.go:334] "Generic (PLEG): container finished" podID="ab7f32c5-66c5-47a0-a63f-530282c4db6a" containerID="156bb79d6f586adeacf516e5434e22447e062b6466c591681499cef3125750f9" exitCode=0 Jan 20 17:46:30 crc kubenswrapper[4558]: I0120 17:46:30.027854 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-f441-account-create-update-w7t8p" event={"ID":"ab7f32c5-66c5-47a0-a63f-530282c4db6a","Type":"ContainerDied","Data":"156bb79d6f586adeacf516e5434e22447e062b6466c591681499cef3125750f9"} Jan 20 17:46:30 crc kubenswrapper[4558]: I0120 17:46:30.027907 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-f441-account-create-update-w7t8p" event={"ID":"ab7f32c5-66c5-47a0-a63f-530282c4db6a","Type":"ContainerStarted","Data":"5a32366adfd7692271e921cfbd623b99753662256abd55e258e4176cc5306926"} Jan 20 17:46:30 crc kubenswrapper[4558]: I0120 17:46:30.029749 4558 generic.go:334] "Generic (PLEG): container finished" podID="0fd41882-86eb-4969-8b15-43ea14ac1558" containerID="1fa8ee6a85397df8fa8a9f702077e9146daa2b4897ff59434b1a88b468ac76f7" exitCode=0 Jan 20 17:46:30 crc kubenswrapper[4558]: I0120 17:46:30.029788 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-db-create-lmln8" event={"ID":"0fd41882-86eb-4969-8b15-43ea14ac1558","Type":"ContainerDied","Data":"1fa8ee6a85397df8fa8a9f702077e9146daa2b4897ff59434b1a88b468ac76f7"} Jan 20 17:46:30 crc kubenswrapper[4558]: I0120 17:46:30.029880 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-db-create-lmln8" event={"ID":"0fd41882-86eb-4969-8b15-43ea14ac1558","Type":"ContainerStarted","Data":"872d1c0d900c01c66faca6bd23ca7eec7238303bace173009269cf3d659e58d6"} Jan 20 17:46:30 crc kubenswrapper[4558]: I0120 17:46:30.032152 4558 generic.go:334] "Generic (PLEG): container finished" podID="efd20cca-38bd-467d-a1e0-5cbf4f0b1a14" containerID="f7bef3d7d2547ce273b172aca5d5e1a2df7c945d8bb078d5bc0c5b41f4c22b93" exitCode=0 Jan 20 17:46:30 crc kubenswrapper[4558]: I0120 17:46:30.032258 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-db47-account-create-update-qfpns" event={"ID":"efd20cca-38bd-467d-a1e0-5cbf4f0b1a14","Type":"ContainerDied","Data":"f7bef3d7d2547ce273b172aca5d5e1a2df7c945d8bb078d5bc0c5b41f4c22b93"} Jan 20 17:46:30 crc kubenswrapper[4558]: I0120 17:46:30.175942 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:46:30 crc kubenswrapper[4558]: I0120 17:46:30.203600 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/swift-storage-0"] Jan 20 17:46:30 crc kubenswrapper[4558]: I0120 17:46:30.208661 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:46:30 crc kubenswrapper[4558]: I0120 17:46:30.212581 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"swift-swift-dockercfg-kjhx2" Jan 20 17:46:30 crc kubenswrapper[4558]: I0120 17:46:30.212628 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"swift-storage-config-data" Jan 20 17:46:30 crc kubenswrapper[4558]: I0120 17:46:30.212769 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"swift-conf" Jan 20 17:46:30 crc kubenswrapper[4558]: I0120 17:46:30.224539 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"swift-ring-files" Jan 20 17:46:30 crc kubenswrapper[4558]: I0120 17:46:30.251351 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/swift-storage-0"] Jan 20 17:46:30 crc kubenswrapper[4558]: I0120 17:46:30.283865 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/swift-ring-rebalance-ksvjk"] Jan 20 17:46:30 crc kubenswrapper[4558]: I0120 17:46:30.285026 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-ring-rebalance-ksvjk" Jan 20 17:46:30 crc kubenswrapper[4558]: I0120 17:46:30.287873 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"swift-proxy-config-data" Jan 20 17:46:30 crc kubenswrapper[4558]: I0120 17:46:30.287914 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"swift-ring-scripts" Jan 20 17:46:30 crc kubenswrapper[4558]: I0120 17:46:30.288586 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"swift-ring-config-data" Jan 20 17:46:30 crc kubenswrapper[4558]: I0120 17:46:30.289280 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/swift-ring-rebalance-ksvjk"] Jan 20 17:46:30 crc kubenswrapper[4558]: I0120 17:46:30.376625 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/64d029c7-79c6-40fc-b4d2-ac5bfbc387a0-etc-swift\") pod \"swift-storage-0\" (UID: \"64d029c7-79c6-40fc-b4d2-ac5bfbc387a0\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:46:30 crc kubenswrapper[4558]: I0120 17:46:30.376818 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/64d029c7-79c6-40fc-b4d2-ac5bfbc387a0-cache\") pod \"swift-storage-0\" (UID: \"64d029c7-79c6-40fc-b4d2-ac5bfbc387a0\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:46:30 crc kubenswrapper[4558]: I0120 17:46:30.376856 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/64d029c7-79c6-40fc-b4d2-ac5bfbc387a0-lock\") pod \"swift-storage-0\" (UID: \"64d029c7-79c6-40fc-b4d2-ac5bfbc387a0\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:46:30 crc kubenswrapper[4558]: I0120 17:46:30.376911 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rw5ht\" (UniqueName: \"kubernetes.io/projected/64d029c7-79c6-40fc-b4d2-ac5bfbc387a0-kube-api-access-rw5ht\") pod \"swift-storage-0\" (UID: \"64d029c7-79c6-40fc-b4d2-ac5bfbc387a0\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:46:30 crc kubenswrapper[4558]: I0120 17:46:30.376966 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"swift-storage-0\" (UID: \"64d029c7-79c6-40fc-b4d2-ac5bfbc387a0\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:46:30 crc kubenswrapper[4558]: I0120 17:46:30.385154 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-db-create-2sj6c" Jan 20 17:46:30 crc kubenswrapper[4558]: I0120 17:46:30.479212 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/64d029c7-79c6-40fc-b4d2-ac5bfbc387a0-cache\") pod \"swift-storage-0\" (UID: \"64d029c7-79c6-40fc-b4d2-ac5bfbc387a0\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:46:30 crc kubenswrapper[4558]: I0120 17:46:30.479262 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/64d029c7-79c6-40fc-b4d2-ac5bfbc387a0-lock\") pod \"swift-storage-0\" (UID: \"64d029c7-79c6-40fc-b4d2-ac5bfbc387a0\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:46:30 crc kubenswrapper[4558]: I0120 17:46:30.479309 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rw5ht\" (UniqueName: \"kubernetes.io/projected/64d029c7-79c6-40fc-b4d2-ac5bfbc387a0-kube-api-access-rw5ht\") pod \"swift-storage-0\" (UID: \"64d029c7-79c6-40fc-b4d2-ac5bfbc387a0\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:46:30 crc kubenswrapper[4558]: I0120 17:46:30.479342 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"swift-storage-0\" (UID: \"64d029c7-79c6-40fc-b4d2-ac5bfbc387a0\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:46:30 crc kubenswrapper[4558]: I0120 17:46:30.479412 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/45770d07-3687-4b5d-b3a0-01112456b92f-ring-data-devices\") pod \"swift-ring-rebalance-ksvjk\" (UID: \"45770d07-3687-4b5d-b3a0-01112456b92f\") " pod="openstack-kuttl-tests/swift-ring-rebalance-ksvjk" Jan 20 17:46:30 crc kubenswrapper[4558]: I0120 17:46:30.479451 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/45770d07-3687-4b5d-b3a0-01112456b92f-scripts\") pod \"swift-ring-rebalance-ksvjk\" (UID: \"45770d07-3687-4b5d-b3a0-01112456b92f\") " pod="openstack-kuttl-tests/swift-ring-rebalance-ksvjk" Jan 20 17:46:30 crc kubenswrapper[4558]: I0120 17:46:30.479477 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dtvmb\" (UniqueName: \"kubernetes.io/projected/45770d07-3687-4b5d-b3a0-01112456b92f-kube-api-access-dtvmb\") pod \"swift-ring-rebalance-ksvjk\" (UID: \"45770d07-3687-4b5d-b3a0-01112456b92f\") " pod="openstack-kuttl-tests/swift-ring-rebalance-ksvjk" Jan 20 17:46:30 crc kubenswrapper[4558]: I0120 17:46:30.479532 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/45770d07-3687-4b5d-b3a0-01112456b92f-dispersionconf\") pod \"swift-ring-rebalance-ksvjk\" (UID: \"45770d07-3687-4b5d-b3a0-01112456b92f\") " pod="openstack-kuttl-tests/swift-ring-rebalance-ksvjk" Jan 20 17:46:30 crc kubenswrapper[4558]: I0120 17:46:30.479556 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/45770d07-3687-4b5d-b3a0-01112456b92f-swiftconf\") pod \"swift-ring-rebalance-ksvjk\" (UID: \"45770d07-3687-4b5d-b3a0-01112456b92f\") " pod="openstack-kuttl-tests/swift-ring-rebalance-ksvjk" Jan 20 17:46:30 crc kubenswrapper[4558]: I0120 17:46:30.479584 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/45770d07-3687-4b5d-b3a0-01112456b92f-etc-swift\") pod \"swift-ring-rebalance-ksvjk\" (UID: \"45770d07-3687-4b5d-b3a0-01112456b92f\") " pod="openstack-kuttl-tests/swift-ring-rebalance-ksvjk" Jan 20 17:46:30 crc kubenswrapper[4558]: I0120 17:46:30.479621 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/45770d07-3687-4b5d-b3a0-01112456b92f-combined-ca-bundle\") pod \"swift-ring-rebalance-ksvjk\" (UID: \"45770d07-3687-4b5d-b3a0-01112456b92f\") " pod="openstack-kuttl-tests/swift-ring-rebalance-ksvjk" Jan 20 17:46:30 crc kubenswrapper[4558]: I0120 17:46:30.479676 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/64d029c7-79c6-40fc-b4d2-ac5bfbc387a0-etc-swift\") pod \"swift-storage-0\" (UID: \"64d029c7-79c6-40fc-b4d2-ac5bfbc387a0\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:46:30 crc kubenswrapper[4558]: E0120 17:46:30.479845 4558 projected.go:288] Couldn't get configMap openstack-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Jan 20 17:46:30 crc kubenswrapper[4558]: E0120 17:46:30.479859 4558 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack-kuttl-tests/swift-storage-0: configmap "swift-ring-files" not found Jan 20 17:46:30 crc kubenswrapper[4558]: E0120 17:46:30.479904 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/64d029c7-79c6-40fc-b4d2-ac5bfbc387a0-etc-swift podName:64d029c7-79c6-40fc-b4d2-ac5bfbc387a0 nodeName:}" failed. No retries permitted until 2026-01-20 17:46:30.979889099 +0000 UTC m=+3884.740227066 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/64d029c7-79c6-40fc-b4d2-ac5bfbc387a0-etc-swift") pod "swift-storage-0" (UID: "64d029c7-79c6-40fc-b4d2-ac5bfbc387a0") : configmap "swift-ring-files" not found Jan 20 17:46:30 crc kubenswrapper[4558]: I0120 17:46:30.480539 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/64d029c7-79c6-40fc-b4d2-ac5bfbc387a0-cache\") pod \"swift-storage-0\" (UID: \"64d029c7-79c6-40fc-b4d2-ac5bfbc387a0\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:46:30 crc kubenswrapper[4558]: I0120 17:46:30.480648 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/64d029c7-79c6-40fc-b4d2-ac5bfbc387a0-lock\") pod \"swift-storage-0\" (UID: \"64d029c7-79c6-40fc-b4d2-ac5bfbc387a0\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:46:30 crc kubenswrapper[4558]: I0120 17:46:30.480672 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"swift-storage-0\" (UID: \"64d029c7-79c6-40fc-b4d2-ac5bfbc387a0\") device mount path \"/mnt/openstack/pv07\"" pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:46:30 crc kubenswrapper[4558]: I0120 17:46:30.510294 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rw5ht\" (UniqueName: \"kubernetes.io/projected/64d029c7-79c6-40fc-b4d2-ac5bfbc387a0-kube-api-access-rw5ht\") pod \"swift-storage-0\" (UID: \"64d029c7-79c6-40fc-b4d2-ac5bfbc387a0\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:46:30 crc kubenswrapper[4558]: I0120 17:46:30.524300 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"swift-storage-0\" (UID: \"64d029c7-79c6-40fc-b4d2-ac5bfbc387a0\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:46:30 crc kubenswrapper[4558]: I0120 17:46:30.580534 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1d58948f-0486-4e4a-af1f-9021c760d5cc-operator-scripts\") pod \"1d58948f-0486-4e4a-af1f-9021c760d5cc\" (UID: \"1d58948f-0486-4e4a-af1f-9021c760d5cc\") " Jan 20 17:46:30 crc kubenswrapper[4558]: I0120 17:46:30.580962 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1d58948f-0486-4e4a-af1f-9021c760d5cc-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "1d58948f-0486-4e4a-af1f-9021c760d5cc" (UID: "1d58948f-0486-4e4a-af1f-9021c760d5cc"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:46:30 crc kubenswrapper[4558]: I0120 17:46:30.580995 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-svhmx\" (UniqueName: \"kubernetes.io/projected/1d58948f-0486-4e4a-af1f-9021c760d5cc-kube-api-access-svhmx\") pod \"1d58948f-0486-4e4a-af1f-9021c760d5cc\" (UID: \"1d58948f-0486-4e4a-af1f-9021c760d5cc\") " Jan 20 17:46:30 crc kubenswrapper[4558]: I0120 17:46:30.581329 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/45770d07-3687-4b5d-b3a0-01112456b92f-etc-swift\") pod \"swift-ring-rebalance-ksvjk\" (UID: \"45770d07-3687-4b5d-b3a0-01112456b92f\") " pod="openstack-kuttl-tests/swift-ring-rebalance-ksvjk" Jan 20 17:46:30 crc kubenswrapper[4558]: I0120 17:46:30.581382 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/45770d07-3687-4b5d-b3a0-01112456b92f-combined-ca-bundle\") pod \"swift-ring-rebalance-ksvjk\" (UID: \"45770d07-3687-4b5d-b3a0-01112456b92f\") " pod="openstack-kuttl-tests/swift-ring-rebalance-ksvjk" Jan 20 17:46:30 crc kubenswrapper[4558]: I0120 17:46:30.581595 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/45770d07-3687-4b5d-b3a0-01112456b92f-ring-data-devices\") pod \"swift-ring-rebalance-ksvjk\" (UID: \"45770d07-3687-4b5d-b3a0-01112456b92f\") " pod="openstack-kuttl-tests/swift-ring-rebalance-ksvjk" Jan 20 17:46:30 crc kubenswrapper[4558]: I0120 17:46:30.581639 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/45770d07-3687-4b5d-b3a0-01112456b92f-scripts\") pod \"swift-ring-rebalance-ksvjk\" (UID: \"45770d07-3687-4b5d-b3a0-01112456b92f\") " pod="openstack-kuttl-tests/swift-ring-rebalance-ksvjk" Jan 20 17:46:30 crc kubenswrapper[4558]: I0120 17:46:30.581647 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/45770d07-3687-4b5d-b3a0-01112456b92f-etc-swift\") pod \"swift-ring-rebalance-ksvjk\" (UID: \"45770d07-3687-4b5d-b3a0-01112456b92f\") " pod="openstack-kuttl-tests/swift-ring-rebalance-ksvjk" Jan 20 17:46:30 crc kubenswrapper[4558]: I0120 17:46:30.581671 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dtvmb\" (UniqueName: \"kubernetes.io/projected/45770d07-3687-4b5d-b3a0-01112456b92f-kube-api-access-dtvmb\") pod \"swift-ring-rebalance-ksvjk\" (UID: \"45770d07-3687-4b5d-b3a0-01112456b92f\") " pod="openstack-kuttl-tests/swift-ring-rebalance-ksvjk" Jan 20 17:46:30 crc kubenswrapper[4558]: I0120 17:46:30.581733 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/45770d07-3687-4b5d-b3a0-01112456b92f-dispersionconf\") pod \"swift-ring-rebalance-ksvjk\" (UID: \"45770d07-3687-4b5d-b3a0-01112456b92f\") " pod="openstack-kuttl-tests/swift-ring-rebalance-ksvjk" Jan 20 17:46:30 crc kubenswrapper[4558]: I0120 17:46:30.581753 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/45770d07-3687-4b5d-b3a0-01112456b92f-swiftconf\") pod \"swift-ring-rebalance-ksvjk\" (UID: \"45770d07-3687-4b5d-b3a0-01112456b92f\") " pod="openstack-kuttl-tests/swift-ring-rebalance-ksvjk" Jan 20 17:46:30 crc kubenswrapper[4558]: I0120 17:46:30.581811 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1d58948f-0486-4e4a-af1f-9021c760d5cc-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:46:30 crc kubenswrapper[4558]: I0120 17:46:30.582242 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/45770d07-3687-4b5d-b3a0-01112456b92f-ring-data-devices\") pod \"swift-ring-rebalance-ksvjk\" (UID: \"45770d07-3687-4b5d-b3a0-01112456b92f\") " pod="openstack-kuttl-tests/swift-ring-rebalance-ksvjk" Jan 20 17:46:30 crc kubenswrapper[4558]: I0120 17:46:30.582826 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/45770d07-3687-4b5d-b3a0-01112456b92f-scripts\") pod \"swift-ring-rebalance-ksvjk\" (UID: \"45770d07-3687-4b5d-b3a0-01112456b92f\") " pod="openstack-kuttl-tests/swift-ring-rebalance-ksvjk" Jan 20 17:46:30 crc kubenswrapper[4558]: I0120 17:46:30.586414 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d58948f-0486-4e4a-af1f-9021c760d5cc-kube-api-access-svhmx" (OuterVolumeSpecName: "kube-api-access-svhmx") pod "1d58948f-0486-4e4a-af1f-9021c760d5cc" (UID: "1d58948f-0486-4e4a-af1f-9021c760d5cc"). InnerVolumeSpecName "kube-api-access-svhmx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:46:30 crc kubenswrapper[4558]: I0120 17:46:30.586497 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/45770d07-3687-4b5d-b3a0-01112456b92f-dispersionconf\") pod \"swift-ring-rebalance-ksvjk\" (UID: \"45770d07-3687-4b5d-b3a0-01112456b92f\") " pod="openstack-kuttl-tests/swift-ring-rebalance-ksvjk" Jan 20 17:46:30 crc kubenswrapper[4558]: I0120 17:46:30.586750 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/45770d07-3687-4b5d-b3a0-01112456b92f-swiftconf\") pod \"swift-ring-rebalance-ksvjk\" (UID: \"45770d07-3687-4b5d-b3a0-01112456b92f\") " pod="openstack-kuttl-tests/swift-ring-rebalance-ksvjk" Jan 20 17:46:30 crc kubenswrapper[4558]: I0120 17:46:30.587120 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/45770d07-3687-4b5d-b3a0-01112456b92f-combined-ca-bundle\") pod \"swift-ring-rebalance-ksvjk\" (UID: \"45770d07-3687-4b5d-b3a0-01112456b92f\") " pod="openstack-kuttl-tests/swift-ring-rebalance-ksvjk" Jan 20 17:46:30 crc kubenswrapper[4558]: I0120 17:46:30.613075 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dtvmb\" (UniqueName: \"kubernetes.io/projected/45770d07-3687-4b5d-b3a0-01112456b92f-kube-api-access-dtvmb\") pod \"swift-ring-rebalance-ksvjk\" (UID: \"45770d07-3687-4b5d-b3a0-01112456b92f\") " pod="openstack-kuttl-tests/swift-ring-rebalance-ksvjk" Jan 20 17:46:30 crc kubenswrapper[4558]: I0120 17:46:30.683522 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-svhmx\" (UniqueName: \"kubernetes.io/projected/1d58948f-0486-4e4a-af1f-9021c760d5cc-kube-api-access-svhmx\") on node \"crc\" DevicePath \"\"" Jan 20 17:46:30 crc kubenswrapper[4558]: I0120 17:46:30.697133 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:46:30 crc kubenswrapper[4558]: I0120 17:46:30.728604 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:46:30 crc kubenswrapper[4558]: I0120 17:46:30.909964 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-ring-rebalance-ksvjk" Jan 20 17:46:30 crc kubenswrapper[4558]: I0120 17:46:30.938332 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ovn-northd-0"] Jan 20 17:46:30 crc kubenswrapper[4558]: E0120 17:46:30.938730 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1d58948f-0486-4e4a-af1f-9021c760d5cc" containerName="mariadb-database-create" Jan 20 17:46:30 crc kubenswrapper[4558]: I0120 17:46:30.938748 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="1d58948f-0486-4e4a-af1f-9021c760d5cc" containerName="mariadb-database-create" Jan 20 17:46:30 crc kubenswrapper[4558]: I0120 17:46:30.938963 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="1d58948f-0486-4e4a-af1f-9021c760d5cc" containerName="mariadb-database-create" Jan 20 17:46:30 crc kubenswrapper[4558]: I0120 17:46:30.939813 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:46:30 crc kubenswrapper[4558]: I0120 17:46:30.943187 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"ovnnorthd-config" Jan 20 17:46:30 crc kubenswrapper[4558]: I0120 17:46:30.943256 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"ovnnorthd-scripts" Jan 20 17:46:30 crc kubenswrapper[4558]: I0120 17:46:30.943466 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ovnnorthd-ovnnorthd-dockercfg-lk4s2" Jan 20 17:46:30 crc kubenswrapper[4558]: I0120 17:46:30.943528 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-ovnnorthd-ovndbs" Jan 20 17:46:30 crc kubenswrapper[4558]: I0120 17:46:30.945297 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovn-northd-0"] Jan 20 17:46:30 crc kubenswrapper[4558]: I0120 17:46:30.989870 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/64d029c7-79c6-40fc-b4d2-ac5bfbc387a0-etc-swift\") pod \"swift-storage-0\" (UID: \"64d029c7-79c6-40fc-b4d2-ac5bfbc387a0\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:46:30 crc kubenswrapper[4558]: E0120 17:46:30.990295 4558 projected.go:288] Couldn't get configMap openstack-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Jan 20 17:46:30 crc kubenswrapper[4558]: E0120 17:46:30.990320 4558 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack-kuttl-tests/swift-storage-0: configmap "swift-ring-files" not found Jan 20 17:46:30 crc kubenswrapper[4558]: E0120 17:46:30.990362 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/64d029c7-79c6-40fc-b4d2-ac5bfbc387a0-etc-swift podName:64d029c7-79c6-40fc-b4d2-ac5bfbc387a0 nodeName:}" failed. No retries permitted until 2026-01-20 17:46:31.990346512 +0000 UTC m=+3885.750684479 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/64d029c7-79c6-40fc-b4d2-ac5bfbc387a0-etc-swift") pod "swift-storage-0" (UID: "64d029c7-79c6-40fc-b4d2-ac5bfbc387a0") : configmap "swift-ring-files" not found Jan 20 17:46:31 crc kubenswrapper[4558]: I0120 17:46:31.043567 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-db-create-2sj6c" event={"ID":"1d58948f-0486-4e4a-af1f-9021c760d5cc","Type":"ContainerDied","Data":"ca89ce3c7fa7ff129a4c11ba80a5d57b39c90ad2c7fb257cf553d8b341676bf7"} Jan 20 17:46:31 crc kubenswrapper[4558]: I0120 17:46:31.043858 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ca89ce3c7fa7ff129a4c11ba80a5d57b39c90ad2c7fb257cf553d8b341676bf7" Jan 20 17:46:31 crc kubenswrapper[4558]: I0120 17:46:31.043601 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-db-create-2sj6c" Jan 20 17:46:31 crc kubenswrapper[4558]: I0120 17:46:31.093737 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6dae3a04-db9e-48a2-bbe2-012c9ba55890-config\") pod \"ovn-northd-0\" (UID: \"6dae3a04-db9e-48a2-bbe2-012c9ba55890\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:46:31 crc kubenswrapper[4558]: I0120 17:46:31.093811 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6dae3a04-db9e-48a2-bbe2-012c9ba55890-scripts\") pod \"ovn-northd-0\" (UID: \"6dae3a04-db9e-48a2-bbe2-012c9ba55890\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:46:31 crc kubenswrapper[4558]: I0120 17:46:31.093940 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8h5nr\" (UniqueName: \"kubernetes.io/projected/6dae3a04-db9e-48a2-bbe2-012c9ba55890-kube-api-access-8h5nr\") pod \"ovn-northd-0\" (UID: \"6dae3a04-db9e-48a2-bbe2-012c9ba55890\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:46:31 crc kubenswrapper[4558]: I0120 17:46:31.094080 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/6dae3a04-db9e-48a2-bbe2-012c9ba55890-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"6dae3a04-db9e-48a2-bbe2-012c9ba55890\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:46:31 crc kubenswrapper[4558]: I0120 17:46:31.094100 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/6dae3a04-db9e-48a2-bbe2-012c9ba55890-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"6dae3a04-db9e-48a2-bbe2-012c9ba55890\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:46:31 crc kubenswrapper[4558]: I0120 17:46:31.094205 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6dae3a04-db9e-48a2-bbe2-012c9ba55890-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"6dae3a04-db9e-48a2-bbe2-012c9ba55890\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:46:31 crc kubenswrapper[4558]: I0120 17:46:31.094339 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/6dae3a04-db9e-48a2-bbe2-012c9ba55890-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"6dae3a04-db9e-48a2-bbe2-012c9ba55890\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:46:31 crc kubenswrapper[4558]: I0120 17:46:31.196795 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/6dae3a04-db9e-48a2-bbe2-012c9ba55890-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"6dae3a04-db9e-48a2-bbe2-012c9ba55890\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:46:31 crc kubenswrapper[4558]: I0120 17:46:31.196831 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/6dae3a04-db9e-48a2-bbe2-012c9ba55890-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"6dae3a04-db9e-48a2-bbe2-012c9ba55890\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:46:31 crc kubenswrapper[4558]: I0120 17:46:31.196869 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6dae3a04-db9e-48a2-bbe2-012c9ba55890-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"6dae3a04-db9e-48a2-bbe2-012c9ba55890\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:46:31 crc kubenswrapper[4558]: I0120 17:46:31.196944 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/6dae3a04-db9e-48a2-bbe2-012c9ba55890-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"6dae3a04-db9e-48a2-bbe2-012c9ba55890\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:46:31 crc kubenswrapper[4558]: I0120 17:46:31.197020 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6dae3a04-db9e-48a2-bbe2-012c9ba55890-config\") pod \"ovn-northd-0\" (UID: \"6dae3a04-db9e-48a2-bbe2-012c9ba55890\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:46:31 crc kubenswrapper[4558]: I0120 17:46:31.197083 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6dae3a04-db9e-48a2-bbe2-012c9ba55890-scripts\") pod \"ovn-northd-0\" (UID: \"6dae3a04-db9e-48a2-bbe2-012c9ba55890\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:46:31 crc kubenswrapper[4558]: I0120 17:46:31.197121 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8h5nr\" (UniqueName: \"kubernetes.io/projected/6dae3a04-db9e-48a2-bbe2-012c9ba55890-kube-api-access-8h5nr\") pod \"ovn-northd-0\" (UID: \"6dae3a04-db9e-48a2-bbe2-012c9ba55890\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:46:31 crc kubenswrapper[4558]: I0120 17:46:31.199200 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6dae3a04-db9e-48a2-bbe2-012c9ba55890-config\") pod \"ovn-northd-0\" (UID: \"6dae3a04-db9e-48a2-bbe2-012c9ba55890\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:46:31 crc kubenswrapper[4558]: I0120 17:46:31.199268 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6dae3a04-db9e-48a2-bbe2-012c9ba55890-scripts\") pod \"ovn-northd-0\" (UID: \"6dae3a04-db9e-48a2-bbe2-012c9ba55890\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:46:31 crc kubenswrapper[4558]: I0120 17:46:31.199312 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/6dae3a04-db9e-48a2-bbe2-012c9ba55890-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"6dae3a04-db9e-48a2-bbe2-012c9ba55890\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:46:31 crc kubenswrapper[4558]: I0120 17:46:31.204309 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/6dae3a04-db9e-48a2-bbe2-012c9ba55890-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"6dae3a04-db9e-48a2-bbe2-012c9ba55890\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:46:31 crc kubenswrapper[4558]: I0120 17:46:31.205785 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6dae3a04-db9e-48a2-bbe2-012c9ba55890-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"6dae3a04-db9e-48a2-bbe2-012c9ba55890\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:46:31 crc kubenswrapper[4558]: I0120 17:46:31.208842 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/6dae3a04-db9e-48a2-bbe2-012c9ba55890-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"6dae3a04-db9e-48a2-bbe2-012c9ba55890\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:46:31 crc kubenswrapper[4558]: I0120 17:46:31.215068 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8h5nr\" (UniqueName: \"kubernetes.io/projected/6dae3a04-db9e-48a2-bbe2-012c9ba55890-kube-api-access-8h5nr\") pod \"ovn-northd-0\" (UID: \"6dae3a04-db9e-48a2-bbe2-012c9ba55890\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:46:31 crc kubenswrapper[4558]: I0120 17:46:31.258118 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:46:31 crc kubenswrapper[4558]: I0120 17:46:31.335056 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/swift-ring-rebalance-ksvjk"] Jan 20 17:46:31 crc kubenswrapper[4558]: W0120 17:46:31.355594 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod45770d07_3687_4b5d_b3a0_01112456b92f.slice/crio-78227dde0e6492425aec15952b0f9b8dc3d076de788099094df08c5a4ca38087 WatchSource:0}: Error finding container 78227dde0e6492425aec15952b0f9b8dc3d076de788099094df08c5a4ca38087: Status 404 returned error can't find the container with id 78227dde0e6492425aec15952b0f9b8dc3d076de788099094df08c5a4ca38087 Jan 20 17:46:31 crc kubenswrapper[4558]: I0120 17:46:31.463524 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-f441-account-create-update-w7t8p" Jan 20 17:46:31 crc kubenswrapper[4558]: I0120 17:46:31.498062 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-db-create-lmln8" Jan 20 17:46:31 crc kubenswrapper[4558]: I0120 17:46:31.508525 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-db47-account-create-update-qfpns" Jan 20 17:46:31 crc kubenswrapper[4558]: I0120 17:46:31.611012 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nlnrd\" (UniqueName: \"kubernetes.io/projected/0fd41882-86eb-4969-8b15-43ea14ac1558-kube-api-access-nlnrd\") pod \"0fd41882-86eb-4969-8b15-43ea14ac1558\" (UID: \"0fd41882-86eb-4969-8b15-43ea14ac1558\") " Jan 20 17:46:31 crc kubenswrapper[4558]: I0120 17:46:31.611067 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/efd20cca-38bd-467d-a1e0-5cbf4f0b1a14-operator-scripts\") pod \"efd20cca-38bd-467d-a1e0-5cbf4f0b1a14\" (UID: \"efd20cca-38bd-467d-a1e0-5cbf4f0b1a14\") " Jan 20 17:46:31 crc kubenswrapper[4558]: I0120 17:46:31.611457 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ab7f32c5-66c5-47a0-a63f-530282c4db6a-operator-scripts\") pod \"ab7f32c5-66c5-47a0-a63f-530282c4db6a\" (UID: \"ab7f32c5-66c5-47a0-a63f-530282c4db6a\") " Jan 20 17:46:31 crc kubenswrapper[4558]: I0120 17:46:31.611641 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-djf5r\" (UniqueName: \"kubernetes.io/projected/ab7f32c5-66c5-47a0-a63f-530282c4db6a-kube-api-access-djf5r\") pod \"ab7f32c5-66c5-47a0-a63f-530282c4db6a\" (UID: \"ab7f32c5-66c5-47a0-a63f-530282c4db6a\") " Jan 20 17:46:31 crc kubenswrapper[4558]: I0120 17:46:31.611677 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0fd41882-86eb-4969-8b15-43ea14ac1558-operator-scripts\") pod \"0fd41882-86eb-4969-8b15-43ea14ac1558\" (UID: \"0fd41882-86eb-4969-8b15-43ea14ac1558\") " Jan 20 17:46:31 crc kubenswrapper[4558]: I0120 17:46:31.611988 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vpgnx\" (UniqueName: \"kubernetes.io/projected/efd20cca-38bd-467d-a1e0-5cbf4f0b1a14-kube-api-access-vpgnx\") pod \"efd20cca-38bd-467d-a1e0-5cbf4f0b1a14\" (UID: \"efd20cca-38bd-467d-a1e0-5cbf4f0b1a14\") " Jan 20 17:46:31 crc kubenswrapper[4558]: I0120 17:46:31.612080 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ab7f32c5-66c5-47a0-a63f-530282c4db6a-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "ab7f32c5-66c5-47a0-a63f-530282c4db6a" (UID: "ab7f32c5-66c5-47a0-a63f-530282c4db6a"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:46:31 crc kubenswrapper[4558]: I0120 17:46:31.612131 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0fd41882-86eb-4969-8b15-43ea14ac1558-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "0fd41882-86eb-4969-8b15-43ea14ac1558" (UID: "0fd41882-86eb-4969-8b15-43ea14ac1558"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:46:31 crc kubenswrapper[4558]: I0120 17:46:31.612271 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/efd20cca-38bd-467d-a1e0-5cbf4f0b1a14-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "efd20cca-38bd-467d-a1e0-5cbf4f0b1a14" (UID: "efd20cca-38bd-467d-a1e0-5cbf4f0b1a14"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:46:31 crc kubenswrapper[4558]: I0120 17:46:31.612801 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ab7f32c5-66c5-47a0-a63f-530282c4db6a-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:46:31 crc kubenswrapper[4558]: I0120 17:46:31.612826 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0fd41882-86eb-4969-8b15-43ea14ac1558-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:46:31 crc kubenswrapper[4558]: I0120 17:46:31.612838 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/efd20cca-38bd-467d-a1e0-5cbf4f0b1a14-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:46:31 crc kubenswrapper[4558]: I0120 17:46:31.614829 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0fd41882-86eb-4969-8b15-43ea14ac1558-kube-api-access-nlnrd" (OuterVolumeSpecName: "kube-api-access-nlnrd") pod "0fd41882-86eb-4969-8b15-43ea14ac1558" (UID: "0fd41882-86eb-4969-8b15-43ea14ac1558"). InnerVolumeSpecName "kube-api-access-nlnrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:46:31 crc kubenswrapper[4558]: I0120 17:46:31.615209 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efd20cca-38bd-467d-a1e0-5cbf4f0b1a14-kube-api-access-vpgnx" (OuterVolumeSpecName: "kube-api-access-vpgnx") pod "efd20cca-38bd-467d-a1e0-5cbf4f0b1a14" (UID: "efd20cca-38bd-467d-a1e0-5cbf4f0b1a14"). InnerVolumeSpecName "kube-api-access-vpgnx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:46:31 crc kubenswrapper[4558]: I0120 17:46:31.615263 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ab7f32c5-66c5-47a0-a63f-530282c4db6a-kube-api-access-djf5r" (OuterVolumeSpecName: "kube-api-access-djf5r") pod "ab7f32c5-66c5-47a0-a63f-530282c4db6a" (UID: "ab7f32c5-66c5-47a0-a63f-530282c4db6a"). InnerVolumeSpecName "kube-api-access-djf5r". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:46:31 crc kubenswrapper[4558]: I0120 17:46:31.714581 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-djf5r\" (UniqueName: \"kubernetes.io/projected/ab7f32c5-66c5-47a0-a63f-530282c4db6a-kube-api-access-djf5r\") on node \"crc\" DevicePath \"\"" Jan 20 17:46:31 crc kubenswrapper[4558]: I0120 17:46:31.714618 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vpgnx\" (UniqueName: \"kubernetes.io/projected/efd20cca-38bd-467d-a1e0-5cbf4f0b1a14-kube-api-access-vpgnx\") on node \"crc\" DevicePath \"\"" Jan 20 17:46:31 crc kubenswrapper[4558]: I0120 17:46:31.714630 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nlnrd\" (UniqueName: \"kubernetes.io/projected/0fd41882-86eb-4969-8b15-43ea14ac1558-kube-api-access-nlnrd\") on node \"crc\" DevicePath \"\"" Jan 20 17:46:31 crc kubenswrapper[4558]: I0120 17:46:31.828214 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovn-northd-0"] Jan 20 17:46:32 crc kubenswrapper[4558]: I0120 17:46:32.032348 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/64d029c7-79c6-40fc-b4d2-ac5bfbc387a0-etc-swift\") pod \"swift-storage-0\" (UID: \"64d029c7-79c6-40fc-b4d2-ac5bfbc387a0\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:46:32 crc kubenswrapper[4558]: E0120 17:46:32.032627 4558 projected.go:288] Couldn't get configMap openstack-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Jan 20 17:46:32 crc kubenswrapper[4558]: E0120 17:46:32.032667 4558 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack-kuttl-tests/swift-storage-0: configmap "swift-ring-files" not found Jan 20 17:46:32 crc kubenswrapper[4558]: E0120 17:46:32.032777 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/64d029c7-79c6-40fc-b4d2-ac5bfbc387a0-etc-swift podName:64d029c7-79c6-40fc-b4d2-ac5bfbc387a0 nodeName:}" failed. No retries permitted until 2026-01-20 17:46:34.032751366 +0000 UTC m=+3887.793089332 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/64d029c7-79c6-40fc-b4d2-ac5bfbc387a0-etc-swift") pod "swift-storage-0" (UID: "64d029c7-79c6-40fc-b4d2-ac5bfbc387a0") : configmap "swift-ring-files" not found Jan 20 17:46:32 crc kubenswrapper[4558]: I0120 17:46:32.057179 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-ring-rebalance-ksvjk" event={"ID":"45770d07-3687-4b5d-b3a0-01112456b92f","Type":"ContainerStarted","Data":"14c2e0d4dc0056dcda162ff4dd3917c5a10a8a300b1513c3f8b7fb9321d4a88e"} Jan 20 17:46:32 crc kubenswrapper[4558]: I0120 17:46:32.057248 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-ring-rebalance-ksvjk" event={"ID":"45770d07-3687-4b5d-b3a0-01112456b92f","Type":"ContainerStarted","Data":"78227dde0e6492425aec15952b0f9b8dc3d076de788099094df08c5a4ca38087"} Jan 20 17:46:32 crc kubenswrapper[4558]: I0120 17:46:32.062287 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-f441-account-create-update-w7t8p" event={"ID":"ab7f32c5-66c5-47a0-a63f-530282c4db6a","Type":"ContainerDied","Data":"5a32366adfd7692271e921cfbd623b99753662256abd55e258e4176cc5306926"} Jan 20 17:46:32 crc kubenswrapper[4558]: I0120 17:46:32.062347 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5a32366adfd7692271e921cfbd623b99753662256abd55e258e4176cc5306926" Jan 20 17:46:32 crc kubenswrapper[4558]: I0120 17:46:32.062357 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-f441-account-create-update-w7t8p" Jan 20 17:46:32 crc kubenswrapper[4558]: I0120 17:46:32.066535 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-db-create-lmln8" event={"ID":"0fd41882-86eb-4969-8b15-43ea14ac1558","Type":"ContainerDied","Data":"872d1c0d900c01c66faca6bd23ca7eec7238303bace173009269cf3d659e58d6"} Jan 20 17:46:32 crc kubenswrapper[4558]: I0120 17:46:32.066576 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="872d1c0d900c01c66faca6bd23ca7eec7238303bace173009269cf3d659e58d6" Jan 20 17:46:32 crc kubenswrapper[4558]: I0120 17:46:32.066733 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-db-create-lmln8" Jan 20 17:46:32 crc kubenswrapper[4558]: I0120 17:46:32.078633 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-northd-0" event={"ID":"6dae3a04-db9e-48a2-bbe2-012c9ba55890","Type":"ContainerStarted","Data":"9b54f5f4a254eb9083539aa947157b34dfeea7934f5c2b37060ecf2e8e7a304a"} Jan 20 17:46:32 crc kubenswrapper[4558]: I0120 17:46:32.078673 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-northd-0" event={"ID":"6dae3a04-db9e-48a2-bbe2-012c9ba55890","Type":"ContainerStarted","Data":"25c5077205531977be1269655185fb1d3e09b3b6d2d7b628d3c833bf0a9ae60b"} Jan 20 17:46:32 crc kubenswrapper[4558]: I0120 17:46:32.079796 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-db47-account-create-update-qfpns" event={"ID":"efd20cca-38bd-467d-a1e0-5cbf4f0b1a14","Type":"ContainerDied","Data":"a54b5f73363ed9e7c3881dfb6b3821c4153826487c1c1aebcbab3dfec7875b26"} Jan 20 17:46:32 crc kubenswrapper[4558]: I0120 17:46:32.079822 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a54b5f73363ed9e7c3881dfb6b3821c4153826487c1c1aebcbab3dfec7875b26" Jan 20 17:46:32 crc kubenswrapper[4558]: I0120 17:46:32.080060 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-db47-account-create-update-qfpns" Jan 20 17:46:32 crc kubenswrapper[4558]: I0120 17:46:32.084015 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/swift-ring-rebalance-ksvjk" podStartSLOduration=2.08400408 podStartE2EDuration="2.08400408s" podCreationTimestamp="2026-01-20 17:46:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:46:32.075707199 +0000 UTC m=+3885.836045166" watchObservedRunningTime="2026-01-20 17:46:32.08400408 +0000 UTC m=+3885.844342047" Jan 20 17:46:33 crc kubenswrapper[4558]: I0120 17:46:33.090465 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-northd-0" event={"ID":"6dae3a04-db9e-48a2-bbe2-012c9ba55890","Type":"ContainerStarted","Data":"bfbe340de7186cba3ca94e5cb5c951be9e3563e26cd26dd763147ac393eeec4f"} Jan 20 17:46:33 crc kubenswrapper[4558]: I0120 17:46:33.108845 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ovn-northd-0" podStartSLOduration=3.108819389 podStartE2EDuration="3.108819389s" podCreationTimestamp="2026-01-20 17:46:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:46:33.106047156 +0000 UTC m=+3886.866385123" watchObservedRunningTime="2026-01-20 17:46:33.108819389 +0000 UTC m=+3886.869157357" Jan 20 17:46:33 crc kubenswrapper[4558]: I0120 17:46:33.288466 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/glance-db-create-7jpvj"] Jan 20 17:46:33 crc kubenswrapper[4558]: E0120 17:46:33.288839 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0fd41882-86eb-4969-8b15-43ea14ac1558" containerName="mariadb-database-create" Jan 20 17:46:33 crc kubenswrapper[4558]: I0120 17:46:33.288856 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="0fd41882-86eb-4969-8b15-43ea14ac1558" containerName="mariadb-database-create" Jan 20 17:46:33 crc kubenswrapper[4558]: E0120 17:46:33.288865 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="efd20cca-38bd-467d-a1e0-5cbf4f0b1a14" containerName="mariadb-account-create-update" Jan 20 17:46:33 crc kubenswrapper[4558]: I0120 17:46:33.288872 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="efd20cca-38bd-467d-a1e0-5cbf4f0b1a14" containerName="mariadb-account-create-update" Jan 20 17:46:33 crc kubenswrapper[4558]: E0120 17:46:33.288884 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab7f32c5-66c5-47a0-a63f-530282c4db6a" containerName="mariadb-account-create-update" Jan 20 17:46:33 crc kubenswrapper[4558]: I0120 17:46:33.288891 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab7f32c5-66c5-47a0-a63f-530282c4db6a" containerName="mariadb-account-create-update" Jan 20 17:46:33 crc kubenswrapper[4558]: I0120 17:46:33.289044 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="0fd41882-86eb-4969-8b15-43ea14ac1558" containerName="mariadb-database-create" Jan 20 17:46:33 crc kubenswrapper[4558]: I0120 17:46:33.289059 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="ab7f32c5-66c5-47a0-a63f-530282c4db6a" containerName="mariadb-account-create-update" Jan 20 17:46:33 crc kubenswrapper[4558]: I0120 17:46:33.289067 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="efd20cca-38bd-467d-a1e0-5cbf4f0b1a14" containerName="mariadb-account-create-update" Jan 20 17:46:33 crc kubenswrapper[4558]: I0120 17:46:33.289612 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-db-create-7jpvj" Jan 20 17:46:33 crc kubenswrapper[4558]: I0120 17:46:33.301290 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-db-create-7jpvj"] Jan 20 17:46:33 crc kubenswrapper[4558]: I0120 17:46:33.354956 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/30bf2b72-7578-4d79-b11f-e7d8f34dc805-operator-scripts\") pod \"glance-db-create-7jpvj\" (UID: \"30bf2b72-7578-4d79-b11f-e7d8f34dc805\") " pod="openstack-kuttl-tests/glance-db-create-7jpvj" Jan 20 17:46:33 crc kubenswrapper[4558]: I0120 17:46:33.355003 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lc7tb\" (UniqueName: \"kubernetes.io/projected/30bf2b72-7578-4d79-b11f-e7d8f34dc805-kube-api-access-lc7tb\") pod \"glance-db-create-7jpvj\" (UID: \"30bf2b72-7578-4d79-b11f-e7d8f34dc805\") " pod="openstack-kuttl-tests/glance-db-create-7jpvj" Jan 20 17:46:33 crc kubenswrapper[4558]: I0120 17:46:33.401842 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/glance-9ded-account-create-update-l5qrm"] Jan 20 17:46:33 crc kubenswrapper[4558]: I0120 17:46:33.403068 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-9ded-account-create-update-l5qrm" Jan 20 17:46:33 crc kubenswrapper[4558]: I0120 17:46:33.404971 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-db-secret" Jan 20 17:46:33 crc kubenswrapper[4558]: I0120 17:46:33.408942 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-9ded-account-create-update-l5qrm"] Jan 20 17:46:33 crc kubenswrapper[4558]: I0120 17:46:33.457123 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9tfdx\" (UniqueName: \"kubernetes.io/projected/b26fb19f-321d-4dd1-99dd-a706e445d49c-kube-api-access-9tfdx\") pod \"glance-9ded-account-create-update-l5qrm\" (UID: \"b26fb19f-321d-4dd1-99dd-a706e445d49c\") " pod="openstack-kuttl-tests/glance-9ded-account-create-update-l5qrm" Jan 20 17:46:33 crc kubenswrapper[4558]: I0120 17:46:33.458330 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/30bf2b72-7578-4d79-b11f-e7d8f34dc805-operator-scripts\") pod \"glance-db-create-7jpvj\" (UID: \"30bf2b72-7578-4d79-b11f-e7d8f34dc805\") " pod="openstack-kuttl-tests/glance-db-create-7jpvj" Jan 20 17:46:33 crc kubenswrapper[4558]: I0120 17:46:33.458513 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lc7tb\" (UniqueName: \"kubernetes.io/projected/30bf2b72-7578-4d79-b11f-e7d8f34dc805-kube-api-access-lc7tb\") pod \"glance-db-create-7jpvj\" (UID: \"30bf2b72-7578-4d79-b11f-e7d8f34dc805\") " pod="openstack-kuttl-tests/glance-db-create-7jpvj" Jan 20 17:46:33 crc kubenswrapper[4558]: I0120 17:46:33.458795 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b26fb19f-321d-4dd1-99dd-a706e445d49c-operator-scripts\") pod \"glance-9ded-account-create-update-l5qrm\" (UID: \"b26fb19f-321d-4dd1-99dd-a706e445d49c\") " pod="openstack-kuttl-tests/glance-9ded-account-create-update-l5qrm" Jan 20 17:46:33 crc kubenswrapper[4558]: I0120 17:46:33.459473 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/30bf2b72-7578-4d79-b11f-e7d8f34dc805-operator-scripts\") pod \"glance-db-create-7jpvj\" (UID: \"30bf2b72-7578-4d79-b11f-e7d8f34dc805\") " pod="openstack-kuttl-tests/glance-db-create-7jpvj" Jan 20 17:46:33 crc kubenswrapper[4558]: I0120 17:46:33.476087 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lc7tb\" (UniqueName: \"kubernetes.io/projected/30bf2b72-7578-4d79-b11f-e7d8f34dc805-kube-api-access-lc7tb\") pod \"glance-db-create-7jpvj\" (UID: \"30bf2b72-7578-4d79-b11f-e7d8f34dc805\") " pod="openstack-kuttl-tests/glance-db-create-7jpvj" Jan 20 17:46:33 crc kubenswrapper[4558]: I0120 17:46:33.560377 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b26fb19f-321d-4dd1-99dd-a706e445d49c-operator-scripts\") pod \"glance-9ded-account-create-update-l5qrm\" (UID: \"b26fb19f-321d-4dd1-99dd-a706e445d49c\") " pod="openstack-kuttl-tests/glance-9ded-account-create-update-l5qrm" Jan 20 17:46:33 crc kubenswrapper[4558]: I0120 17:46:33.560473 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9tfdx\" (UniqueName: \"kubernetes.io/projected/b26fb19f-321d-4dd1-99dd-a706e445d49c-kube-api-access-9tfdx\") pod \"glance-9ded-account-create-update-l5qrm\" (UID: \"b26fb19f-321d-4dd1-99dd-a706e445d49c\") " pod="openstack-kuttl-tests/glance-9ded-account-create-update-l5qrm" Jan 20 17:46:33 crc kubenswrapper[4558]: I0120 17:46:33.561067 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b26fb19f-321d-4dd1-99dd-a706e445d49c-operator-scripts\") pod \"glance-9ded-account-create-update-l5qrm\" (UID: \"b26fb19f-321d-4dd1-99dd-a706e445d49c\") " pod="openstack-kuttl-tests/glance-9ded-account-create-update-l5qrm" Jan 20 17:46:33 crc kubenswrapper[4558]: I0120 17:46:33.578828 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9tfdx\" (UniqueName: \"kubernetes.io/projected/b26fb19f-321d-4dd1-99dd-a706e445d49c-kube-api-access-9tfdx\") pod \"glance-9ded-account-create-update-l5qrm\" (UID: \"b26fb19f-321d-4dd1-99dd-a706e445d49c\") " pod="openstack-kuttl-tests/glance-9ded-account-create-update-l5qrm" Jan 20 17:46:33 crc kubenswrapper[4558]: I0120 17:46:33.603083 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-db-create-7jpvj" Jan 20 17:46:33 crc kubenswrapper[4558]: I0120 17:46:33.731007 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-9ded-account-create-update-l5qrm" Jan 20 17:46:34 crc kubenswrapper[4558]: I0120 17:46:34.038602 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-db-create-7jpvj"] Jan 20 17:46:34 crc kubenswrapper[4558]: I0120 17:46:34.069338 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/64d029c7-79c6-40fc-b4d2-ac5bfbc387a0-etc-swift\") pod \"swift-storage-0\" (UID: \"64d029c7-79c6-40fc-b4d2-ac5bfbc387a0\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:46:34 crc kubenswrapper[4558]: E0120 17:46:34.070412 4558 projected.go:288] Couldn't get configMap openstack-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Jan 20 17:46:34 crc kubenswrapper[4558]: E0120 17:46:34.070438 4558 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack-kuttl-tests/swift-storage-0: configmap "swift-ring-files" not found Jan 20 17:46:34 crc kubenswrapper[4558]: E0120 17:46:34.070489 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/64d029c7-79c6-40fc-b4d2-ac5bfbc387a0-etc-swift podName:64d029c7-79c6-40fc-b4d2-ac5bfbc387a0 nodeName:}" failed. No retries permitted until 2026-01-20 17:46:38.070471106 +0000 UTC m=+3891.830809074 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/64d029c7-79c6-40fc-b4d2-ac5bfbc387a0-etc-swift") pod "swift-storage-0" (UID: "64d029c7-79c6-40fc-b4d2-ac5bfbc387a0") : configmap "swift-ring-files" not found Jan 20 17:46:34 crc kubenswrapper[4558]: I0120 17:46:34.100734 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-db-create-7jpvj" event={"ID":"30bf2b72-7578-4d79-b11f-e7d8f34dc805","Type":"ContainerStarted","Data":"71a11a6328b46908a9a09c2aaabed2eac151919e026804777213cb03398007cf"} Jan 20 17:46:34 crc kubenswrapper[4558]: I0120 17:46:34.100958 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:46:34 crc kubenswrapper[4558]: I0120 17:46:34.130045 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-9ded-account-create-update-l5qrm"] Jan 20 17:46:35 crc kubenswrapper[4558]: I0120 17:46:35.045628 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/root-account-create-update-dqhmm"] Jan 20 17:46:35 crc kubenswrapper[4558]: I0120 17:46:35.046856 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-dqhmm" Jan 20 17:46:35 crc kubenswrapper[4558]: I0120 17:46:35.048806 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"openstack-mariadb-root-db-secret" Jan 20 17:46:35 crc kubenswrapper[4558]: I0120 17:46:35.061185 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-dqhmm"] Jan 20 17:46:35 crc kubenswrapper[4558]: I0120 17:46:35.122904 4558 generic.go:334] "Generic (PLEG): container finished" podID="30bf2b72-7578-4d79-b11f-e7d8f34dc805" containerID="871c763a67bacf5aab84d0674fb5be5ddbcc187543e0fee9119ff7443f932ffc" exitCode=0 Jan 20 17:46:35 crc kubenswrapper[4558]: I0120 17:46:35.123013 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-db-create-7jpvj" event={"ID":"30bf2b72-7578-4d79-b11f-e7d8f34dc805","Type":"ContainerDied","Data":"871c763a67bacf5aab84d0674fb5be5ddbcc187543e0fee9119ff7443f932ffc"} Jan 20 17:46:35 crc kubenswrapper[4558]: I0120 17:46:35.125727 4558 generic.go:334] "Generic (PLEG): container finished" podID="b26fb19f-321d-4dd1-99dd-a706e445d49c" containerID="324d9081397c220fd77997997d3f091e61e407a5fbc6bb38370055c3f9f680f3" exitCode=0 Jan 20 17:46:35 crc kubenswrapper[4558]: I0120 17:46:35.125993 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-9ded-account-create-update-l5qrm" event={"ID":"b26fb19f-321d-4dd1-99dd-a706e445d49c","Type":"ContainerDied","Data":"324d9081397c220fd77997997d3f091e61e407a5fbc6bb38370055c3f9f680f3"} Jan 20 17:46:35 crc kubenswrapper[4558]: I0120 17:46:35.126029 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-9ded-account-create-update-l5qrm" event={"ID":"b26fb19f-321d-4dd1-99dd-a706e445d49c","Type":"ContainerStarted","Data":"55e9520780b0617cc5490cb47dc140bf6fc97ac83a36a316fd1b02cf5a61cd41"} Jan 20 17:46:35 crc kubenswrapper[4558]: I0120 17:46:35.192848 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c7d514a1-04de-4cbe-a4b8-bb57984169de-operator-scripts\") pod \"root-account-create-update-dqhmm\" (UID: \"c7d514a1-04de-4cbe-a4b8-bb57984169de\") " pod="openstack-kuttl-tests/root-account-create-update-dqhmm" Jan 20 17:46:35 crc kubenswrapper[4558]: I0120 17:46:35.192941 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hgrz2\" (UniqueName: \"kubernetes.io/projected/c7d514a1-04de-4cbe-a4b8-bb57984169de-kube-api-access-hgrz2\") pod \"root-account-create-update-dqhmm\" (UID: \"c7d514a1-04de-4cbe-a4b8-bb57984169de\") " pod="openstack-kuttl-tests/root-account-create-update-dqhmm" Jan 20 17:46:35 crc kubenswrapper[4558]: I0120 17:46:35.295389 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c7d514a1-04de-4cbe-a4b8-bb57984169de-operator-scripts\") pod \"root-account-create-update-dqhmm\" (UID: \"c7d514a1-04de-4cbe-a4b8-bb57984169de\") " pod="openstack-kuttl-tests/root-account-create-update-dqhmm" Jan 20 17:46:35 crc kubenswrapper[4558]: I0120 17:46:35.295778 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hgrz2\" (UniqueName: \"kubernetes.io/projected/c7d514a1-04de-4cbe-a4b8-bb57984169de-kube-api-access-hgrz2\") pod \"root-account-create-update-dqhmm\" (UID: \"c7d514a1-04de-4cbe-a4b8-bb57984169de\") " pod="openstack-kuttl-tests/root-account-create-update-dqhmm" Jan 20 17:46:35 crc kubenswrapper[4558]: I0120 17:46:35.296651 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c7d514a1-04de-4cbe-a4b8-bb57984169de-operator-scripts\") pod \"root-account-create-update-dqhmm\" (UID: \"c7d514a1-04de-4cbe-a4b8-bb57984169de\") " pod="openstack-kuttl-tests/root-account-create-update-dqhmm" Jan 20 17:46:35 crc kubenswrapper[4558]: I0120 17:46:35.314052 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hgrz2\" (UniqueName: \"kubernetes.io/projected/c7d514a1-04de-4cbe-a4b8-bb57984169de-kube-api-access-hgrz2\") pod \"root-account-create-update-dqhmm\" (UID: \"c7d514a1-04de-4cbe-a4b8-bb57984169de\") " pod="openstack-kuttl-tests/root-account-create-update-dqhmm" Jan 20 17:46:35 crc kubenswrapper[4558]: I0120 17:46:35.364513 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-dqhmm" Jan 20 17:46:35 crc kubenswrapper[4558]: I0120 17:46:35.773658 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-dqhmm"] Jan 20 17:46:35 crc kubenswrapper[4558]: W0120 17:46:35.776887 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc7d514a1_04de_4cbe_a4b8_bb57984169de.slice/crio-3ebc7428b41cc0f5bc90ca21f766dfac8e957a62ee7bf5e2f5e7d782c6b0b0bf WatchSource:0}: Error finding container 3ebc7428b41cc0f5bc90ca21f766dfac8e957a62ee7bf5e2f5e7d782c6b0b0bf: Status 404 returned error can't find the container with id 3ebc7428b41cc0f5bc90ca21f766dfac8e957a62ee7bf5e2f5e7d782c6b0b0bf Jan 20 17:46:36 crc kubenswrapper[4558]: I0120 17:46:36.140189 4558 generic.go:334] "Generic (PLEG): container finished" podID="c7d514a1-04de-4cbe-a4b8-bb57984169de" containerID="2dc44c6d293061ff251208fd3424c4f248fb871f863ac0ae98881f72b9a345ee" exitCode=0 Jan 20 17:46:36 crc kubenswrapper[4558]: I0120 17:46:36.140298 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/root-account-create-update-dqhmm" event={"ID":"c7d514a1-04de-4cbe-a4b8-bb57984169de","Type":"ContainerDied","Data":"2dc44c6d293061ff251208fd3424c4f248fb871f863ac0ae98881f72b9a345ee"} Jan 20 17:46:36 crc kubenswrapper[4558]: I0120 17:46:36.140383 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/root-account-create-update-dqhmm" event={"ID":"c7d514a1-04de-4cbe-a4b8-bb57984169de","Type":"ContainerStarted","Data":"3ebc7428b41cc0f5bc90ca21f766dfac8e957a62ee7bf5e2f5e7d782c6b0b0bf"} Jan 20 17:46:36 crc kubenswrapper[4558]: I0120 17:46:36.518869 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-db-create-7jpvj" Jan 20 17:46:36 crc kubenswrapper[4558]: I0120 17:46:36.524179 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-9ded-account-create-update-l5qrm" Jan 20 17:46:36 crc kubenswrapper[4558]: I0120 17:46:36.718006 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b26fb19f-321d-4dd1-99dd-a706e445d49c-operator-scripts\") pod \"b26fb19f-321d-4dd1-99dd-a706e445d49c\" (UID: \"b26fb19f-321d-4dd1-99dd-a706e445d49c\") " Jan 20 17:46:36 crc kubenswrapper[4558]: I0120 17:46:36.718373 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/30bf2b72-7578-4d79-b11f-e7d8f34dc805-operator-scripts\") pod \"30bf2b72-7578-4d79-b11f-e7d8f34dc805\" (UID: \"30bf2b72-7578-4d79-b11f-e7d8f34dc805\") " Jan 20 17:46:36 crc kubenswrapper[4558]: I0120 17:46:36.718419 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lc7tb\" (UniqueName: \"kubernetes.io/projected/30bf2b72-7578-4d79-b11f-e7d8f34dc805-kube-api-access-lc7tb\") pod \"30bf2b72-7578-4d79-b11f-e7d8f34dc805\" (UID: \"30bf2b72-7578-4d79-b11f-e7d8f34dc805\") " Jan 20 17:46:36 crc kubenswrapper[4558]: I0120 17:46:36.718458 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9tfdx\" (UniqueName: \"kubernetes.io/projected/b26fb19f-321d-4dd1-99dd-a706e445d49c-kube-api-access-9tfdx\") pod \"b26fb19f-321d-4dd1-99dd-a706e445d49c\" (UID: \"b26fb19f-321d-4dd1-99dd-a706e445d49c\") " Jan 20 17:46:36 crc kubenswrapper[4558]: I0120 17:46:36.720212 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/30bf2b72-7578-4d79-b11f-e7d8f34dc805-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "30bf2b72-7578-4d79-b11f-e7d8f34dc805" (UID: "30bf2b72-7578-4d79-b11f-e7d8f34dc805"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:46:36 crc kubenswrapper[4558]: I0120 17:46:36.721052 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b26fb19f-321d-4dd1-99dd-a706e445d49c-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "b26fb19f-321d-4dd1-99dd-a706e445d49c" (UID: "b26fb19f-321d-4dd1-99dd-a706e445d49c"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:46:36 crc kubenswrapper[4558]: I0120 17:46:36.725296 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b26fb19f-321d-4dd1-99dd-a706e445d49c-kube-api-access-9tfdx" (OuterVolumeSpecName: "kube-api-access-9tfdx") pod "b26fb19f-321d-4dd1-99dd-a706e445d49c" (UID: "b26fb19f-321d-4dd1-99dd-a706e445d49c"). InnerVolumeSpecName "kube-api-access-9tfdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:46:36 crc kubenswrapper[4558]: I0120 17:46:36.725425 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/30bf2b72-7578-4d79-b11f-e7d8f34dc805-kube-api-access-lc7tb" (OuterVolumeSpecName: "kube-api-access-lc7tb") pod "30bf2b72-7578-4d79-b11f-e7d8f34dc805" (UID: "30bf2b72-7578-4d79-b11f-e7d8f34dc805"). InnerVolumeSpecName "kube-api-access-lc7tb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:46:36 crc kubenswrapper[4558]: I0120 17:46:36.820266 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/30bf2b72-7578-4d79-b11f-e7d8f34dc805-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:46:36 crc kubenswrapper[4558]: I0120 17:46:36.820296 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lc7tb\" (UniqueName: \"kubernetes.io/projected/30bf2b72-7578-4d79-b11f-e7d8f34dc805-kube-api-access-lc7tb\") on node \"crc\" DevicePath \"\"" Jan 20 17:46:36 crc kubenswrapper[4558]: I0120 17:46:36.820310 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9tfdx\" (UniqueName: \"kubernetes.io/projected/b26fb19f-321d-4dd1-99dd-a706e445d49c-kube-api-access-9tfdx\") on node \"crc\" DevicePath \"\"" Jan 20 17:46:36 crc kubenswrapper[4558]: I0120 17:46:36.820322 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b26fb19f-321d-4dd1-99dd-a706e445d49c-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:46:37 crc kubenswrapper[4558]: I0120 17:46:37.151761 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-db-create-7jpvj" event={"ID":"30bf2b72-7578-4d79-b11f-e7d8f34dc805","Type":"ContainerDied","Data":"71a11a6328b46908a9a09c2aaabed2eac151919e026804777213cb03398007cf"} Jan 20 17:46:37 crc kubenswrapper[4558]: I0120 17:46:37.151802 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-db-create-7jpvj" Jan 20 17:46:37 crc kubenswrapper[4558]: I0120 17:46:37.151831 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="71a11a6328b46908a9a09c2aaabed2eac151919e026804777213cb03398007cf" Jan 20 17:46:37 crc kubenswrapper[4558]: I0120 17:46:37.153470 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-9ded-account-create-update-l5qrm" Jan 20 17:46:37 crc kubenswrapper[4558]: I0120 17:46:37.153545 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-9ded-account-create-update-l5qrm" event={"ID":"b26fb19f-321d-4dd1-99dd-a706e445d49c","Type":"ContainerDied","Data":"55e9520780b0617cc5490cb47dc140bf6fc97ac83a36a316fd1b02cf5a61cd41"} Jan 20 17:46:37 crc kubenswrapper[4558]: I0120 17:46:37.153740 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="55e9520780b0617cc5490cb47dc140bf6fc97ac83a36a316fd1b02cf5a61cd41" Jan 20 17:46:37 crc kubenswrapper[4558]: I0120 17:46:37.481523 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-dqhmm" Jan 20 17:46:37 crc kubenswrapper[4558]: I0120 17:46:37.634198 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c7d514a1-04de-4cbe-a4b8-bb57984169de-operator-scripts\") pod \"c7d514a1-04de-4cbe-a4b8-bb57984169de\" (UID: \"c7d514a1-04de-4cbe-a4b8-bb57984169de\") " Jan 20 17:46:37 crc kubenswrapper[4558]: I0120 17:46:37.634635 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hgrz2\" (UniqueName: \"kubernetes.io/projected/c7d514a1-04de-4cbe-a4b8-bb57984169de-kube-api-access-hgrz2\") pod \"c7d514a1-04de-4cbe-a4b8-bb57984169de\" (UID: \"c7d514a1-04de-4cbe-a4b8-bb57984169de\") " Jan 20 17:46:37 crc kubenswrapper[4558]: I0120 17:46:37.635041 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c7d514a1-04de-4cbe-a4b8-bb57984169de-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "c7d514a1-04de-4cbe-a4b8-bb57984169de" (UID: "c7d514a1-04de-4cbe-a4b8-bb57984169de"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:46:37 crc kubenswrapper[4558]: I0120 17:46:37.639488 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c7d514a1-04de-4cbe-a4b8-bb57984169de-kube-api-access-hgrz2" (OuterVolumeSpecName: "kube-api-access-hgrz2") pod "c7d514a1-04de-4cbe-a4b8-bb57984169de" (UID: "c7d514a1-04de-4cbe-a4b8-bb57984169de"). InnerVolumeSpecName "kube-api-access-hgrz2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:46:37 crc kubenswrapper[4558]: I0120 17:46:37.741357 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hgrz2\" (UniqueName: \"kubernetes.io/projected/c7d514a1-04de-4cbe-a4b8-bb57984169de-kube-api-access-hgrz2\") on node \"crc\" DevicePath \"\"" Jan 20 17:46:37 crc kubenswrapper[4558]: I0120 17:46:37.741428 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c7d514a1-04de-4cbe-a4b8-bb57984169de-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:46:38 crc kubenswrapper[4558]: I0120 17:46:38.148943 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/64d029c7-79c6-40fc-b4d2-ac5bfbc387a0-etc-swift\") pod \"swift-storage-0\" (UID: \"64d029c7-79c6-40fc-b4d2-ac5bfbc387a0\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:46:38 crc kubenswrapper[4558]: I0120 17:46:38.153564 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/64d029c7-79c6-40fc-b4d2-ac5bfbc387a0-etc-swift\") pod \"swift-storage-0\" (UID: \"64d029c7-79c6-40fc-b4d2-ac5bfbc387a0\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:46:38 crc kubenswrapper[4558]: I0120 17:46:38.163312 4558 generic.go:334] "Generic (PLEG): container finished" podID="45770d07-3687-4b5d-b3a0-01112456b92f" containerID="14c2e0d4dc0056dcda162ff4dd3917c5a10a8a300b1513c3f8b7fb9321d4a88e" exitCode=0 Jan 20 17:46:38 crc kubenswrapper[4558]: I0120 17:46:38.163418 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-ring-rebalance-ksvjk" event={"ID":"45770d07-3687-4b5d-b3a0-01112456b92f","Type":"ContainerDied","Data":"14c2e0d4dc0056dcda162ff4dd3917c5a10a8a300b1513c3f8b7fb9321d4a88e"} Jan 20 17:46:38 crc kubenswrapper[4558]: I0120 17:46:38.166135 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/root-account-create-update-dqhmm" event={"ID":"c7d514a1-04de-4cbe-a4b8-bb57984169de","Type":"ContainerDied","Data":"3ebc7428b41cc0f5bc90ca21f766dfac8e957a62ee7bf5e2f5e7d782c6b0b0bf"} Jan 20 17:46:38 crc kubenswrapper[4558]: I0120 17:46:38.166201 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3ebc7428b41cc0f5bc90ca21f766dfac8e957a62ee7bf5e2f5e7d782c6b0b0bf" Jan 20 17:46:38 crc kubenswrapper[4558]: I0120 17:46:38.166249 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-dqhmm" Jan 20 17:46:38 crc kubenswrapper[4558]: I0120 17:46:38.346118 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:46:38 crc kubenswrapper[4558]: I0120 17:46:38.541442 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/glance-db-sync-rht6q"] Jan 20 17:46:38 crc kubenswrapper[4558]: E0120 17:46:38.541816 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c7d514a1-04de-4cbe-a4b8-bb57984169de" containerName="mariadb-account-create-update" Jan 20 17:46:38 crc kubenswrapper[4558]: I0120 17:46:38.541834 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="c7d514a1-04de-4cbe-a4b8-bb57984169de" containerName="mariadb-account-create-update" Jan 20 17:46:38 crc kubenswrapper[4558]: E0120 17:46:38.541850 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b26fb19f-321d-4dd1-99dd-a706e445d49c" containerName="mariadb-account-create-update" Jan 20 17:46:38 crc kubenswrapper[4558]: I0120 17:46:38.541857 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="b26fb19f-321d-4dd1-99dd-a706e445d49c" containerName="mariadb-account-create-update" Jan 20 17:46:38 crc kubenswrapper[4558]: E0120 17:46:38.541887 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="30bf2b72-7578-4d79-b11f-e7d8f34dc805" containerName="mariadb-database-create" Jan 20 17:46:38 crc kubenswrapper[4558]: I0120 17:46:38.541892 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="30bf2b72-7578-4d79-b11f-e7d8f34dc805" containerName="mariadb-database-create" Jan 20 17:46:38 crc kubenswrapper[4558]: I0120 17:46:38.542028 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="30bf2b72-7578-4d79-b11f-e7d8f34dc805" containerName="mariadb-database-create" Jan 20 17:46:38 crc kubenswrapper[4558]: I0120 17:46:38.542043 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="c7d514a1-04de-4cbe-a4b8-bb57984169de" containerName="mariadb-account-create-update" Jan 20 17:46:38 crc kubenswrapper[4558]: I0120 17:46:38.542059 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="b26fb19f-321d-4dd1-99dd-a706e445d49c" containerName="mariadb-account-create-update" Jan 20 17:46:38 crc kubenswrapper[4558]: I0120 17:46:38.542606 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-db-sync-rht6q" Jan 20 17:46:38 crc kubenswrapper[4558]: I0120 17:46:38.545615 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-config-data" Jan 20 17:46:38 crc kubenswrapper[4558]: I0120 17:46:38.546785 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-glance-dockercfg-v4jnh" Jan 20 17:46:38 crc kubenswrapper[4558]: I0120 17:46:38.551627 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-db-sync-rht6q"] Jan 20 17:46:38 crc kubenswrapper[4558]: I0120 17:46:38.556294 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/f23cc4f0-d10f-43ee-8f56-d72294b6df8c-db-sync-config-data\") pod \"glance-db-sync-rht6q\" (UID: \"f23cc4f0-d10f-43ee-8f56-d72294b6df8c\") " pod="openstack-kuttl-tests/glance-db-sync-rht6q" Jan 20 17:46:38 crc kubenswrapper[4558]: I0120 17:46:38.556340 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6mfcr\" (UniqueName: \"kubernetes.io/projected/f23cc4f0-d10f-43ee-8f56-d72294b6df8c-kube-api-access-6mfcr\") pod \"glance-db-sync-rht6q\" (UID: \"f23cc4f0-d10f-43ee-8f56-d72294b6df8c\") " pod="openstack-kuttl-tests/glance-db-sync-rht6q" Jan 20 17:46:38 crc kubenswrapper[4558]: I0120 17:46:38.556391 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f23cc4f0-d10f-43ee-8f56-d72294b6df8c-config-data\") pod \"glance-db-sync-rht6q\" (UID: \"f23cc4f0-d10f-43ee-8f56-d72294b6df8c\") " pod="openstack-kuttl-tests/glance-db-sync-rht6q" Jan 20 17:46:38 crc kubenswrapper[4558]: I0120 17:46:38.556469 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f23cc4f0-d10f-43ee-8f56-d72294b6df8c-combined-ca-bundle\") pod \"glance-db-sync-rht6q\" (UID: \"f23cc4f0-d10f-43ee-8f56-d72294b6df8c\") " pod="openstack-kuttl-tests/glance-db-sync-rht6q" Jan 20 17:46:38 crc kubenswrapper[4558]: I0120 17:46:38.658270 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f23cc4f0-d10f-43ee-8f56-d72294b6df8c-combined-ca-bundle\") pod \"glance-db-sync-rht6q\" (UID: \"f23cc4f0-d10f-43ee-8f56-d72294b6df8c\") " pod="openstack-kuttl-tests/glance-db-sync-rht6q" Jan 20 17:46:38 crc kubenswrapper[4558]: I0120 17:46:38.658342 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/f23cc4f0-d10f-43ee-8f56-d72294b6df8c-db-sync-config-data\") pod \"glance-db-sync-rht6q\" (UID: \"f23cc4f0-d10f-43ee-8f56-d72294b6df8c\") " pod="openstack-kuttl-tests/glance-db-sync-rht6q" Jan 20 17:46:38 crc kubenswrapper[4558]: I0120 17:46:38.658394 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6mfcr\" (UniqueName: \"kubernetes.io/projected/f23cc4f0-d10f-43ee-8f56-d72294b6df8c-kube-api-access-6mfcr\") pod \"glance-db-sync-rht6q\" (UID: \"f23cc4f0-d10f-43ee-8f56-d72294b6df8c\") " pod="openstack-kuttl-tests/glance-db-sync-rht6q" Jan 20 17:46:38 crc kubenswrapper[4558]: I0120 17:46:38.658485 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f23cc4f0-d10f-43ee-8f56-d72294b6df8c-config-data\") pod \"glance-db-sync-rht6q\" (UID: \"f23cc4f0-d10f-43ee-8f56-d72294b6df8c\") " pod="openstack-kuttl-tests/glance-db-sync-rht6q" Jan 20 17:46:38 crc kubenswrapper[4558]: I0120 17:46:38.665075 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f23cc4f0-d10f-43ee-8f56-d72294b6df8c-config-data\") pod \"glance-db-sync-rht6q\" (UID: \"f23cc4f0-d10f-43ee-8f56-d72294b6df8c\") " pod="openstack-kuttl-tests/glance-db-sync-rht6q" Jan 20 17:46:38 crc kubenswrapper[4558]: I0120 17:46:38.665418 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f23cc4f0-d10f-43ee-8f56-d72294b6df8c-combined-ca-bundle\") pod \"glance-db-sync-rht6q\" (UID: \"f23cc4f0-d10f-43ee-8f56-d72294b6df8c\") " pod="openstack-kuttl-tests/glance-db-sync-rht6q" Jan 20 17:46:38 crc kubenswrapper[4558]: I0120 17:46:38.669765 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/f23cc4f0-d10f-43ee-8f56-d72294b6df8c-db-sync-config-data\") pod \"glance-db-sync-rht6q\" (UID: \"f23cc4f0-d10f-43ee-8f56-d72294b6df8c\") " pod="openstack-kuttl-tests/glance-db-sync-rht6q" Jan 20 17:46:38 crc kubenswrapper[4558]: I0120 17:46:38.685772 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6mfcr\" (UniqueName: \"kubernetes.io/projected/f23cc4f0-d10f-43ee-8f56-d72294b6df8c-kube-api-access-6mfcr\") pod \"glance-db-sync-rht6q\" (UID: \"f23cc4f0-d10f-43ee-8f56-d72294b6df8c\") " pod="openstack-kuttl-tests/glance-db-sync-rht6q" Jan 20 17:46:38 crc kubenswrapper[4558]: I0120 17:46:38.748019 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/swift-storage-0"] Jan 20 17:46:38 crc kubenswrapper[4558]: W0120 17:46:38.750859 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod64d029c7_79c6_40fc_b4d2_ac5bfbc387a0.slice/crio-d156fc744aaa2e9ee5916bc5532b984f60da0f513c57a66b67e105fb18197a1e WatchSource:0}: Error finding container d156fc744aaa2e9ee5916bc5532b984f60da0f513c57a66b67e105fb18197a1e: Status 404 returned error can't find the container with id d156fc744aaa2e9ee5916bc5532b984f60da0f513c57a66b67e105fb18197a1e Jan 20 17:46:38 crc kubenswrapper[4558]: I0120 17:46:38.864276 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-db-sync-rht6q" Jan 20 17:46:39 crc kubenswrapper[4558]: I0120 17:46:39.175502 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"64d029c7-79c6-40fc-b4d2-ac5bfbc387a0","Type":"ContainerStarted","Data":"34b81cfd7ee5d1e0183fe2711993e5340813f109d83301b4976cc429efc8bc12"} Jan 20 17:46:39 crc kubenswrapper[4558]: I0120 17:46:39.175848 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"64d029c7-79c6-40fc-b4d2-ac5bfbc387a0","Type":"ContainerStarted","Data":"9c8048073ccb7077c6ce8072e9a03b4c7c45eacbbfa51aa61971cd31bec984b4"} Jan 20 17:46:39 crc kubenswrapper[4558]: I0120 17:46:39.175861 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"64d029c7-79c6-40fc-b4d2-ac5bfbc387a0","Type":"ContainerStarted","Data":"2d0d3c4c9084f5b47b5268a7e1d20896d7cf8d59d93e03270dfa21fcb273c377"} Jan 20 17:46:39 crc kubenswrapper[4558]: I0120 17:46:39.175873 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"64d029c7-79c6-40fc-b4d2-ac5bfbc387a0","Type":"ContainerStarted","Data":"d156fc744aaa2e9ee5916bc5532b984f60da0f513c57a66b67e105fb18197a1e"} Jan 20 17:46:39 crc kubenswrapper[4558]: W0120 17:46:39.280560 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf23cc4f0_d10f_43ee_8f56_d72294b6df8c.slice/crio-f7abf6ee5a1aea06a163435b9a7f53d2513a5dc5adbf05f5be40da62347ebd1f WatchSource:0}: Error finding container f7abf6ee5a1aea06a163435b9a7f53d2513a5dc5adbf05f5be40da62347ebd1f: Status 404 returned error can't find the container with id f7abf6ee5a1aea06a163435b9a7f53d2513a5dc5adbf05f5be40da62347ebd1f Jan 20 17:46:39 crc kubenswrapper[4558]: I0120 17:46:39.283939 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-db-sync-rht6q"] Jan 20 17:46:39 crc kubenswrapper[4558]: I0120 17:46:39.526160 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-ring-rebalance-ksvjk" Jan 20 17:46:39 crc kubenswrapper[4558]: I0120 17:46:39.571372 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/45770d07-3687-4b5d-b3a0-01112456b92f-scripts\") pod \"45770d07-3687-4b5d-b3a0-01112456b92f\" (UID: \"45770d07-3687-4b5d-b3a0-01112456b92f\") " Jan 20 17:46:39 crc kubenswrapper[4558]: I0120 17:46:39.571447 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/45770d07-3687-4b5d-b3a0-01112456b92f-ring-data-devices\") pod \"45770d07-3687-4b5d-b3a0-01112456b92f\" (UID: \"45770d07-3687-4b5d-b3a0-01112456b92f\") " Jan 20 17:46:39 crc kubenswrapper[4558]: I0120 17:46:39.571655 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/45770d07-3687-4b5d-b3a0-01112456b92f-etc-swift\") pod \"45770d07-3687-4b5d-b3a0-01112456b92f\" (UID: \"45770d07-3687-4b5d-b3a0-01112456b92f\") " Jan 20 17:46:39 crc kubenswrapper[4558]: I0120 17:46:39.571678 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/45770d07-3687-4b5d-b3a0-01112456b92f-swiftconf\") pod \"45770d07-3687-4b5d-b3a0-01112456b92f\" (UID: \"45770d07-3687-4b5d-b3a0-01112456b92f\") " Jan 20 17:46:39 crc kubenswrapper[4558]: I0120 17:46:39.571781 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dtvmb\" (UniqueName: \"kubernetes.io/projected/45770d07-3687-4b5d-b3a0-01112456b92f-kube-api-access-dtvmb\") pod \"45770d07-3687-4b5d-b3a0-01112456b92f\" (UID: \"45770d07-3687-4b5d-b3a0-01112456b92f\") " Jan 20 17:46:39 crc kubenswrapper[4558]: I0120 17:46:39.571818 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/45770d07-3687-4b5d-b3a0-01112456b92f-dispersionconf\") pod \"45770d07-3687-4b5d-b3a0-01112456b92f\" (UID: \"45770d07-3687-4b5d-b3a0-01112456b92f\") " Jan 20 17:46:39 crc kubenswrapper[4558]: I0120 17:46:39.571838 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/45770d07-3687-4b5d-b3a0-01112456b92f-combined-ca-bundle\") pod \"45770d07-3687-4b5d-b3a0-01112456b92f\" (UID: \"45770d07-3687-4b5d-b3a0-01112456b92f\") " Jan 20 17:46:39 crc kubenswrapper[4558]: I0120 17:46:39.574488 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/45770d07-3687-4b5d-b3a0-01112456b92f-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "45770d07-3687-4b5d-b3a0-01112456b92f" (UID: "45770d07-3687-4b5d-b3a0-01112456b92f"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:46:39 crc kubenswrapper[4558]: I0120 17:46:39.579925 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/45770d07-3687-4b5d-b3a0-01112456b92f-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "45770d07-3687-4b5d-b3a0-01112456b92f" (UID: "45770d07-3687-4b5d-b3a0-01112456b92f"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:46:39 crc kubenswrapper[4558]: I0120 17:46:39.594692 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/45770d07-3687-4b5d-b3a0-01112456b92f-kube-api-access-dtvmb" (OuterVolumeSpecName: "kube-api-access-dtvmb") pod "45770d07-3687-4b5d-b3a0-01112456b92f" (UID: "45770d07-3687-4b5d-b3a0-01112456b92f"). InnerVolumeSpecName "kube-api-access-dtvmb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:46:39 crc kubenswrapper[4558]: I0120 17:46:39.595573 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/45770d07-3687-4b5d-b3a0-01112456b92f-scripts" (OuterVolumeSpecName: "scripts") pod "45770d07-3687-4b5d-b3a0-01112456b92f" (UID: "45770d07-3687-4b5d-b3a0-01112456b92f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:46:39 crc kubenswrapper[4558]: I0120 17:46:39.613306 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/45770d07-3687-4b5d-b3a0-01112456b92f-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "45770d07-3687-4b5d-b3a0-01112456b92f" (UID: "45770d07-3687-4b5d-b3a0-01112456b92f"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:46:39 crc kubenswrapper[4558]: I0120 17:46:39.633773 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/45770d07-3687-4b5d-b3a0-01112456b92f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "45770d07-3687-4b5d-b3a0-01112456b92f" (UID: "45770d07-3687-4b5d-b3a0-01112456b92f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:46:39 crc kubenswrapper[4558]: I0120 17:46:39.673936 4558 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/45770d07-3687-4b5d-b3a0-01112456b92f-etc-swift\") on node \"crc\" DevicePath \"\"" Jan 20 17:46:39 crc kubenswrapper[4558]: I0120 17:46:39.673977 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dtvmb\" (UniqueName: \"kubernetes.io/projected/45770d07-3687-4b5d-b3a0-01112456b92f-kube-api-access-dtvmb\") on node \"crc\" DevicePath \"\"" Jan 20 17:46:39 crc kubenswrapper[4558]: I0120 17:46:39.673988 4558 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/45770d07-3687-4b5d-b3a0-01112456b92f-dispersionconf\") on node \"crc\" DevicePath \"\"" Jan 20 17:46:39 crc kubenswrapper[4558]: I0120 17:46:39.673999 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/45770d07-3687-4b5d-b3a0-01112456b92f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:46:39 crc kubenswrapper[4558]: I0120 17:46:39.674009 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/45770d07-3687-4b5d-b3a0-01112456b92f-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:46:39 crc kubenswrapper[4558]: I0120 17:46:39.674017 4558 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/45770d07-3687-4b5d-b3a0-01112456b92f-ring-data-devices\") on node \"crc\" DevicePath \"\"" Jan 20 17:46:39 crc kubenswrapper[4558]: I0120 17:46:39.676312 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/45770d07-3687-4b5d-b3a0-01112456b92f-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "45770d07-3687-4b5d-b3a0-01112456b92f" (UID: "45770d07-3687-4b5d-b3a0-01112456b92f"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:46:39 crc kubenswrapper[4558]: I0120 17:46:39.775305 4558 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/45770d07-3687-4b5d-b3a0-01112456b92f-swiftconf\") on node \"crc\" DevicePath \"\"" Jan 20 17:46:40 crc kubenswrapper[4558]: I0120 17:46:40.192343 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-db-sync-rht6q" event={"ID":"f23cc4f0-d10f-43ee-8f56-d72294b6df8c","Type":"ContainerStarted","Data":"5c017decda645c6d0210efcf18e6ea25bbb27e76266cb4e262a3a03a5802f34c"} Jan 20 17:46:40 crc kubenswrapper[4558]: I0120 17:46:40.192589 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-db-sync-rht6q" event={"ID":"f23cc4f0-d10f-43ee-8f56-d72294b6df8c","Type":"ContainerStarted","Data":"f7abf6ee5a1aea06a163435b9a7f53d2513a5dc5adbf05f5be40da62347ebd1f"} Jan 20 17:46:40 crc kubenswrapper[4558]: I0120 17:46:40.194367 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-ring-rebalance-ksvjk" event={"ID":"45770d07-3687-4b5d-b3a0-01112456b92f","Type":"ContainerDied","Data":"78227dde0e6492425aec15952b0f9b8dc3d076de788099094df08c5a4ca38087"} Jan 20 17:46:40 crc kubenswrapper[4558]: I0120 17:46:40.194411 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="78227dde0e6492425aec15952b0f9b8dc3d076de788099094df08c5a4ca38087" Jan 20 17:46:40 crc kubenswrapper[4558]: I0120 17:46:40.194449 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-ring-rebalance-ksvjk" Jan 20 17:46:40 crc kubenswrapper[4558]: I0120 17:46:40.199647 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"64d029c7-79c6-40fc-b4d2-ac5bfbc387a0","Type":"ContainerStarted","Data":"5e23628ab2cc3c257412804853dd2671a2b679976e1f78c61ccacc4929dc21cd"} Jan 20 17:46:40 crc kubenswrapper[4558]: I0120 17:46:40.199696 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"64d029c7-79c6-40fc-b4d2-ac5bfbc387a0","Type":"ContainerStarted","Data":"d32a8a2e6495346715f9634ab4b1b4bc18d96e7451b5223fe9976afe7dd5d7c2"} Jan 20 17:46:40 crc kubenswrapper[4558]: I0120 17:46:40.199709 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"64d029c7-79c6-40fc-b4d2-ac5bfbc387a0","Type":"ContainerStarted","Data":"ce8b75288eb38358817832bfb419010893a3a4a082fc83e9ed669e3fe1c9a795"} Jan 20 17:46:40 crc kubenswrapper[4558]: I0120 17:46:40.199718 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"64d029c7-79c6-40fc-b4d2-ac5bfbc387a0","Type":"ContainerStarted","Data":"19a88da3e14609ec200568f698ff68961ad644b5fa7a972150ff49ec0d443ab1"} Jan 20 17:46:40 crc kubenswrapper[4558]: I0120 17:46:40.199726 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"64d029c7-79c6-40fc-b4d2-ac5bfbc387a0","Type":"ContainerStarted","Data":"4842ce500b116933451ba6ea6d30de4651175498d4768796b95de36768b44fc5"} Jan 20 17:46:40 crc kubenswrapper[4558]: I0120 17:46:40.199736 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"64d029c7-79c6-40fc-b4d2-ac5bfbc387a0","Type":"ContainerStarted","Data":"9f25ee1c308a320e8a43a0bc03a0bfd7c3d57cf22c17c10b894b0f930d53afe2"} Jan 20 17:46:40 crc kubenswrapper[4558]: I0120 17:46:40.217036 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/glance-db-sync-rht6q" podStartSLOduration=2.217023531 podStartE2EDuration="2.217023531s" podCreationTimestamp="2026-01-20 17:46:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:46:40.213370762 +0000 UTC m=+3893.973708729" watchObservedRunningTime="2026-01-20 17:46:40.217023531 +0000 UTC m=+3893.977361499" Jan 20 17:46:41 crc kubenswrapper[4558]: I0120 17:46:41.216698 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"64d029c7-79c6-40fc-b4d2-ac5bfbc387a0","Type":"ContainerStarted","Data":"3fca1090ab44eb73ee4987d4fe77236fd5fda45c847b185be75610aec8208529"} Jan 20 17:46:41 crc kubenswrapper[4558]: I0120 17:46:41.218013 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"64d029c7-79c6-40fc-b4d2-ac5bfbc387a0","Type":"ContainerStarted","Data":"049005bceb3d9c7f92a80adfa58232b99be025be046ebfa5fdd30c2501029dd6"} Jan 20 17:46:41 crc kubenswrapper[4558]: I0120 17:46:41.218114 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"64d029c7-79c6-40fc-b4d2-ac5bfbc387a0","Type":"ContainerStarted","Data":"e38f2eedbeb8bee77176fac3f5c4ecdcf1aef7aa98e160b84718ab6bb18bb8a4"} Jan 20 17:46:41 crc kubenswrapper[4558]: I0120 17:46:41.218203 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"64d029c7-79c6-40fc-b4d2-ac5bfbc387a0","Type":"ContainerStarted","Data":"a6e878f0584cff110a93e31a31995a6fbd71d401815d3d94bb3700d40da94020"} Jan 20 17:46:41 crc kubenswrapper[4558]: I0120 17:46:41.218266 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"64d029c7-79c6-40fc-b4d2-ac5bfbc387a0","Type":"ContainerStarted","Data":"0babdc19d278b5edc8070926b4a7ce96ef2d679f28771d60ffadb5ba187b8117"} Jan 20 17:46:41 crc kubenswrapper[4558]: I0120 17:46:41.218321 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"64d029c7-79c6-40fc-b4d2-ac5bfbc387a0","Type":"ContainerStarted","Data":"814558d14f3629d14a008df6a38667804146fe090de98d44f2c3c2447d69c787"} Jan 20 17:46:41 crc kubenswrapper[4558]: I0120 17:46:41.253315 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/swift-storage-0" podStartSLOduration=11.253290143 podStartE2EDuration="11.253290143s" podCreationTimestamp="2026-01-20 17:46:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:46:41.245280341 +0000 UTC m=+3895.005618298" watchObservedRunningTime="2026-01-20 17:46:41.253290143 +0000 UTC m=+3895.013628110" Jan 20 17:46:41 crc kubenswrapper[4558]: I0120 17:46:41.307797 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:46:41 crc kubenswrapper[4558]: I0120 17:46:41.369334 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-66c9456f9f-vp4hx"] Jan 20 17:46:41 crc kubenswrapper[4558]: E0120 17:46:41.369731 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="45770d07-3687-4b5d-b3a0-01112456b92f" containerName="swift-ring-rebalance" Jan 20 17:46:41 crc kubenswrapper[4558]: I0120 17:46:41.369751 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="45770d07-3687-4b5d-b3a0-01112456b92f" containerName="swift-ring-rebalance" Jan 20 17:46:41 crc kubenswrapper[4558]: I0120 17:46:41.369916 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="45770d07-3687-4b5d-b3a0-01112456b92f" containerName="swift-ring-rebalance" Jan 20 17:46:41 crc kubenswrapper[4558]: I0120 17:46:41.370745 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-66c9456f9f-vp4hx" Jan 20 17:46:41 crc kubenswrapper[4558]: I0120 17:46:41.376467 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"dns-swift-storage-0" Jan 20 17:46:41 crc kubenswrapper[4558]: I0120 17:46:41.400716 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-66c9456f9f-vp4hx"] Jan 20 17:46:41 crc kubenswrapper[4558]: I0120 17:46:41.508339 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/97bee565-aafb-4ccc-956b-d94ac410e50e-dns-swift-storage-0\") pod \"dnsmasq-dnsmasq-66c9456f9f-vp4hx\" (UID: \"97bee565-aafb-4ccc-956b-d94ac410e50e\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-66c9456f9f-vp4hx" Jan 20 17:46:41 crc kubenswrapper[4558]: I0120 17:46:41.508897 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/97bee565-aafb-4ccc-956b-d94ac410e50e-config\") pod \"dnsmasq-dnsmasq-66c9456f9f-vp4hx\" (UID: \"97bee565-aafb-4ccc-956b-d94ac410e50e\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-66c9456f9f-vp4hx" Jan 20 17:46:41 crc kubenswrapper[4558]: I0120 17:46:41.509091 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/97bee565-aafb-4ccc-956b-d94ac410e50e-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-66c9456f9f-vp4hx\" (UID: \"97bee565-aafb-4ccc-956b-d94ac410e50e\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-66c9456f9f-vp4hx" Jan 20 17:46:41 crc kubenswrapper[4558]: I0120 17:46:41.509224 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xbzsz\" (UniqueName: \"kubernetes.io/projected/97bee565-aafb-4ccc-956b-d94ac410e50e-kube-api-access-xbzsz\") pod \"dnsmasq-dnsmasq-66c9456f9f-vp4hx\" (UID: \"97bee565-aafb-4ccc-956b-d94ac410e50e\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-66c9456f9f-vp4hx" Jan 20 17:46:41 crc kubenswrapper[4558]: I0120 17:46:41.596653 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-dqhmm"] Jan 20 17:46:41 crc kubenswrapper[4558]: I0120 17:46:41.604995 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-dqhmm"] Jan 20 17:46:41 crc kubenswrapper[4558]: I0120 17:46:41.610194 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/97bee565-aafb-4ccc-956b-d94ac410e50e-config\") pod \"dnsmasq-dnsmasq-66c9456f9f-vp4hx\" (UID: \"97bee565-aafb-4ccc-956b-d94ac410e50e\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-66c9456f9f-vp4hx" Jan 20 17:46:41 crc kubenswrapper[4558]: I0120 17:46:41.610657 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/97bee565-aafb-4ccc-956b-d94ac410e50e-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-66c9456f9f-vp4hx\" (UID: \"97bee565-aafb-4ccc-956b-d94ac410e50e\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-66c9456f9f-vp4hx" Jan 20 17:46:41 crc kubenswrapper[4558]: I0120 17:46:41.610719 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xbzsz\" (UniqueName: \"kubernetes.io/projected/97bee565-aafb-4ccc-956b-d94ac410e50e-kube-api-access-xbzsz\") pod \"dnsmasq-dnsmasq-66c9456f9f-vp4hx\" (UID: \"97bee565-aafb-4ccc-956b-d94ac410e50e\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-66c9456f9f-vp4hx" Jan 20 17:46:41 crc kubenswrapper[4558]: I0120 17:46:41.610785 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/97bee565-aafb-4ccc-956b-d94ac410e50e-dns-swift-storage-0\") pod \"dnsmasq-dnsmasq-66c9456f9f-vp4hx\" (UID: \"97bee565-aafb-4ccc-956b-d94ac410e50e\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-66c9456f9f-vp4hx" Jan 20 17:46:41 crc kubenswrapper[4558]: I0120 17:46:41.611059 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/97bee565-aafb-4ccc-956b-d94ac410e50e-config\") pod \"dnsmasq-dnsmasq-66c9456f9f-vp4hx\" (UID: \"97bee565-aafb-4ccc-956b-d94ac410e50e\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-66c9456f9f-vp4hx" Jan 20 17:46:41 crc kubenswrapper[4558]: I0120 17:46:41.611436 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/97bee565-aafb-4ccc-956b-d94ac410e50e-dns-swift-storage-0\") pod \"dnsmasq-dnsmasq-66c9456f9f-vp4hx\" (UID: \"97bee565-aafb-4ccc-956b-d94ac410e50e\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-66c9456f9f-vp4hx" Jan 20 17:46:41 crc kubenswrapper[4558]: I0120 17:46:41.612188 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/97bee565-aafb-4ccc-956b-d94ac410e50e-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-66c9456f9f-vp4hx\" (UID: \"97bee565-aafb-4ccc-956b-d94ac410e50e\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-66c9456f9f-vp4hx" Jan 20 17:46:41 crc kubenswrapper[4558]: I0120 17:46:41.626392 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xbzsz\" (UniqueName: \"kubernetes.io/projected/97bee565-aafb-4ccc-956b-d94ac410e50e-kube-api-access-xbzsz\") pod \"dnsmasq-dnsmasq-66c9456f9f-vp4hx\" (UID: \"97bee565-aafb-4ccc-956b-d94ac410e50e\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-66c9456f9f-vp4hx" Jan 20 17:46:41 crc kubenswrapper[4558]: I0120 17:46:41.707821 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-66c9456f9f-vp4hx" Jan 20 17:46:42 crc kubenswrapper[4558]: I0120 17:46:42.124286 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-66c9456f9f-vp4hx"] Jan 20 17:46:42 crc kubenswrapper[4558]: I0120 17:46:42.229329 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-66c9456f9f-vp4hx" event={"ID":"97bee565-aafb-4ccc-956b-d94ac410e50e","Type":"ContainerStarted","Data":"1ec8c43a7de023dbee48405c043ff3e97f8d251cf1e1ca8acf387367a1804874"} Jan 20 17:46:42 crc kubenswrapper[4558]: I0120 17:46:42.574938 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c7d514a1-04de-4cbe-a4b8-bb57984169de" path="/var/lib/kubelet/pods/c7d514a1-04de-4cbe-a4b8-bb57984169de/volumes" Jan 20 17:46:43 crc kubenswrapper[4558]: I0120 17:46:43.240862 4558 generic.go:334] "Generic (PLEG): container finished" podID="97bee565-aafb-4ccc-956b-d94ac410e50e" containerID="20cd4a7cd58221648d8ea58e162919bdedcfaf4fc3ac5733ce7262c92ed5e83f" exitCode=0 Jan 20 17:46:43 crc kubenswrapper[4558]: I0120 17:46:43.240986 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-66c9456f9f-vp4hx" event={"ID":"97bee565-aafb-4ccc-956b-d94ac410e50e","Type":"ContainerDied","Data":"20cd4a7cd58221648d8ea58e162919bdedcfaf4fc3ac5733ce7262c92ed5e83f"} Jan 20 17:46:43 crc kubenswrapper[4558]: I0120 17:46:43.243223 4558 generic.go:334] "Generic (PLEG): container finished" podID="f23cc4f0-d10f-43ee-8f56-d72294b6df8c" containerID="5c017decda645c6d0210efcf18e6ea25bbb27e76266cb4e262a3a03a5802f34c" exitCode=0 Jan 20 17:46:43 crc kubenswrapper[4558]: I0120 17:46:43.243347 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-db-sync-rht6q" event={"ID":"f23cc4f0-d10f-43ee-8f56-d72294b6df8c","Type":"ContainerDied","Data":"5c017decda645c6d0210efcf18e6ea25bbb27e76266cb4e262a3a03a5802f34c"} Jan 20 17:46:44 crc kubenswrapper[4558]: I0120 17:46:44.257876 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-66c9456f9f-vp4hx" event={"ID":"97bee565-aafb-4ccc-956b-d94ac410e50e","Type":"ContainerStarted","Data":"9884ce142e2764c93ae8f86078f47582909d78eefe96fec6dbd1907c59c8b11e"} Jan 20 17:46:44 crc kubenswrapper[4558]: I0120 17:46:44.258237 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-66c9456f9f-vp4hx" Jan 20 17:46:44 crc kubenswrapper[4558]: I0120 17:46:44.545922 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-db-sync-rht6q" Jan 20 17:46:44 crc kubenswrapper[4558]: I0120 17:46:44.567709 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-66c9456f9f-vp4hx" podStartSLOduration=3.567687067 podStartE2EDuration="3.567687067s" podCreationTimestamp="2026-01-20 17:46:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:46:44.278220381 +0000 UTC m=+3898.038558347" watchObservedRunningTime="2026-01-20 17:46:44.567687067 +0000 UTC m=+3898.328025034" Jan 20 17:46:44 crc kubenswrapper[4558]: I0120 17:46:44.656877 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6mfcr\" (UniqueName: \"kubernetes.io/projected/f23cc4f0-d10f-43ee-8f56-d72294b6df8c-kube-api-access-6mfcr\") pod \"f23cc4f0-d10f-43ee-8f56-d72294b6df8c\" (UID: \"f23cc4f0-d10f-43ee-8f56-d72294b6df8c\") " Jan 20 17:46:44 crc kubenswrapper[4558]: I0120 17:46:44.656921 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f23cc4f0-d10f-43ee-8f56-d72294b6df8c-config-data\") pod \"f23cc4f0-d10f-43ee-8f56-d72294b6df8c\" (UID: \"f23cc4f0-d10f-43ee-8f56-d72294b6df8c\") " Jan 20 17:46:44 crc kubenswrapper[4558]: I0120 17:46:44.657015 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/f23cc4f0-d10f-43ee-8f56-d72294b6df8c-db-sync-config-data\") pod \"f23cc4f0-d10f-43ee-8f56-d72294b6df8c\" (UID: \"f23cc4f0-d10f-43ee-8f56-d72294b6df8c\") " Jan 20 17:46:44 crc kubenswrapper[4558]: I0120 17:46:44.657310 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f23cc4f0-d10f-43ee-8f56-d72294b6df8c-combined-ca-bundle\") pod \"f23cc4f0-d10f-43ee-8f56-d72294b6df8c\" (UID: \"f23cc4f0-d10f-43ee-8f56-d72294b6df8c\") " Jan 20 17:46:44 crc kubenswrapper[4558]: I0120 17:46:44.661924 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f23cc4f0-d10f-43ee-8f56-d72294b6df8c-kube-api-access-6mfcr" (OuterVolumeSpecName: "kube-api-access-6mfcr") pod "f23cc4f0-d10f-43ee-8f56-d72294b6df8c" (UID: "f23cc4f0-d10f-43ee-8f56-d72294b6df8c"). InnerVolumeSpecName "kube-api-access-6mfcr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:46:44 crc kubenswrapper[4558]: I0120 17:46:44.665421 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f23cc4f0-d10f-43ee-8f56-d72294b6df8c-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "f23cc4f0-d10f-43ee-8f56-d72294b6df8c" (UID: "f23cc4f0-d10f-43ee-8f56-d72294b6df8c"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:46:44 crc kubenswrapper[4558]: I0120 17:46:44.676146 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f23cc4f0-d10f-43ee-8f56-d72294b6df8c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f23cc4f0-d10f-43ee-8f56-d72294b6df8c" (UID: "f23cc4f0-d10f-43ee-8f56-d72294b6df8c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:46:44 crc kubenswrapper[4558]: I0120 17:46:44.688722 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f23cc4f0-d10f-43ee-8f56-d72294b6df8c-config-data" (OuterVolumeSpecName: "config-data") pod "f23cc4f0-d10f-43ee-8f56-d72294b6df8c" (UID: "f23cc4f0-d10f-43ee-8f56-d72294b6df8c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:46:44 crc kubenswrapper[4558]: I0120 17:46:44.759728 4558 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/f23cc4f0-d10f-43ee-8f56-d72294b6df8c-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:46:44 crc kubenswrapper[4558]: I0120 17:46:44.759764 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f23cc4f0-d10f-43ee-8f56-d72294b6df8c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:46:44 crc kubenswrapper[4558]: I0120 17:46:44.759777 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6mfcr\" (UniqueName: \"kubernetes.io/projected/f23cc4f0-d10f-43ee-8f56-d72294b6df8c-kube-api-access-6mfcr\") on node \"crc\" DevicePath \"\"" Jan 20 17:46:44 crc kubenswrapper[4558]: I0120 17:46:44.759789 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f23cc4f0-d10f-43ee-8f56-d72294b6df8c-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:46:45 crc kubenswrapper[4558]: I0120 17:46:45.269647 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-db-sync-rht6q" event={"ID":"f23cc4f0-d10f-43ee-8f56-d72294b6df8c","Type":"ContainerDied","Data":"f7abf6ee5a1aea06a163435b9a7f53d2513a5dc5adbf05f5be40da62347ebd1f"} Jan 20 17:46:45 crc kubenswrapper[4558]: I0120 17:46:45.269709 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f7abf6ee5a1aea06a163435b9a7f53d2513a5dc5adbf05f5be40da62347ebd1f" Jan 20 17:46:45 crc kubenswrapper[4558]: I0120 17:46:45.269662 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-db-sync-rht6q" Jan 20 17:46:46 crc kubenswrapper[4558]: I0120 17:46:46.605572 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/root-account-create-update-v9ld9"] Jan 20 17:46:46 crc kubenswrapper[4558]: E0120 17:46:46.606268 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f23cc4f0-d10f-43ee-8f56-d72294b6df8c" containerName="glance-db-sync" Jan 20 17:46:46 crc kubenswrapper[4558]: I0120 17:46:46.606285 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="f23cc4f0-d10f-43ee-8f56-d72294b6df8c" containerName="glance-db-sync" Jan 20 17:46:46 crc kubenswrapper[4558]: I0120 17:46:46.606529 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="f23cc4f0-d10f-43ee-8f56-d72294b6df8c" containerName="glance-db-sync" Jan 20 17:46:46 crc kubenswrapper[4558]: I0120 17:46:46.607291 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-v9ld9" Jan 20 17:46:46 crc kubenswrapper[4558]: I0120 17:46:46.609092 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"openstack-cell1-mariadb-root-db-secret" Jan 20 17:46:46 crc kubenswrapper[4558]: I0120 17:46:46.612753 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-v9ld9"] Jan 20 17:46:46 crc kubenswrapper[4558]: I0120 17:46:46.799696 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7ea6811a-c54b-451e-9705-5ba4e9b5622f-operator-scripts\") pod \"root-account-create-update-v9ld9\" (UID: \"7ea6811a-c54b-451e-9705-5ba4e9b5622f\") " pod="openstack-kuttl-tests/root-account-create-update-v9ld9" Jan 20 17:46:46 crc kubenswrapper[4558]: I0120 17:46:46.799852 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4twrj\" (UniqueName: \"kubernetes.io/projected/7ea6811a-c54b-451e-9705-5ba4e9b5622f-kube-api-access-4twrj\") pod \"root-account-create-update-v9ld9\" (UID: \"7ea6811a-c54b-451e-9705-5ba4e9b5622f\") " pod="openstack-kuttl-tests/root-account-create-update-v9ld9" Jan 20 17:46:46 crc kubenswrapper[4558]: I0120 17:46:46.901963 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7ea6811a-c54b-451e-9705-5ba4e9b5622f-operator-scripts\") pod \"root-account-create-update-v9ld9\" (UID: \"7ea6811a-c54b-451e-9705-5ba4e9b5622f\") " pod="openstack-kuttl-tests/root-account-create-update-v9ld9" Jan 20 17:46:46 crc kubenswrapper[4558]: I0120 17:46:46.902041 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4twrj\" (UniqueName: \"kubernetes.io/projected/7ea6811a-c54b-451e-9705-5ba4e9b5622f-kube-api-access-4twrj\") pod \"root-account-create-update-v9ld9\" (UID: \"7ea6811a-c54b-451e-9705-5ba4e9b5622f\") " pod="openstack-kuttl-tests/root-account-create-update-v9ld9" Jan 20 17:46:46 crc kubenswrapper[4558]: I0120 17:46:46.902815 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7ea6811a-c54b-451e-9705-5ba4e9b5622f-operator-scripts\") pod \"root-account-create-update-v9ld9\" (UID: \"7ea6811a-c54b-451e-9705-5ba4e9b5622f\") " pod="openstack-kuttl-tests/root-account-create-update-v9ld9" Jan 20 17:46:46 crc kubenswrapper[4558]: I0120 17:46:46.923307 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4twrj\" (UniqueName: \"kubernetes.io/projected/7ea6811a-c54b-451e-9705-5ba4e9b5622f-kube-api-access-4twrj\") pod \"root-account-create-update-v9ld9\" (UID: \"7ea6811a-c54b-451e-9705-5ba4e9b5622f\") " pod="openstack-kuttl-tests/root-account-create-update-v9ld9" Jan 20 17:46:47 crc kubenswrapper[4558]: I0120 17:46:47.222059 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-v9ld9" Jan 20 17:46:47 crc kubenswrapper[4558]: I0120 17:46:47.628911 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-v9ld9"] Jan 20 17:46:47 crc kubenswrapper[4558]: I0120 17:46:47.638773 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"openstack-cell1-mariadb-root-db-secret" Jan 20 17:46:48 crc kubenswrapper[4558]: I0120 17:46:48.302781 4558 generic.go:334] "Generic (PLEG): container finished" podID="c4a11c40-a157-4a28-b1a3-60c211d1d0bf" containerID="a4bc9f62c0f630c628a87ed4cdeafe946a3360d7bb2a8e7f9e501c633d62f580" exitCode=0 Jan 20 17:46:48 crc kubenswrapper[4558]: I0120 17:46:48.302893 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" event={"ID":"c4a11c40-a157-4a28-b1a3-60c211d1d0bf","Type":"ContainerDied","Data":"a4bc9f62c0f630c628a87ed4cdeafe946a3360d7bb2a8e7f9e501c633d62f580"} Jan 20 17:46:48 crc kubenswrapper[4558]: I0120 17:46:48.305860 4558 generic.go:334] "Generic (PLEG): container finished" podID="088e44cc-3515-4736-aa46-721774902209" containerID="dfbb9c8e1cee5c7cb472c9c48b441fb80ff852d27db0124a79c1d7f68c8fa720" exitCode=0 Jan 20 17:46:48 crc kubenswrapper[4558]: I0120 17:46:48.305981 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-server-0" event={"ID":"088e44cc-3515-4736-aa46-721774902209","Type":"ContainerDied","Data":"dfbb9c8e1cee5c7cb472c9c48b441fb80ff852d27db0124a79c1d7f68c8fa720"} Jan 20 17:46:48 crc kubenswrapper[4558]: I0120 17:46:48.310456 4558 generic.go:334] "Generic (PLEG): container finished" podID="7ea6811a-c54b-451e-9705-5ba4e9b5622f" containerID="a0597f6f3ecf954ffb70d092e87b6d76c3bab5e72a8a9f28787b4ad834a49992" exitCode=0 Jan 20 17:46:48 crc kubenswrapper[4558]: I0120 17:46:48.310526 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/root-account-create-update-v9ld9" event={"ID":"7ea6811a-c54b-451e-9705-5ba4e9b5622f","Type":"ContainerDied","Data":"a0597f6f3ecf954ffb70d092e87b6d76c3bab5e72a8a9f28787b4ad834a49992"} Jan 20 17:46:48 crc kubenswrapper[4558]: I0120 17:46:48.310553 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/root-account-create-update-v9ld9" event={"ID":"7ea6811a-c54b-451e-9705-5ba4e9b5622f","Type":"ContainerStarted","Data":"c1804a5acb5936603072f9d4ed85e2dc0bf40e0487450cc5a3b3e9fc7ed62ac7"} Jan 20 17:46:49 crc kubenswrapper[4558]: I0120 17:46:49.321749 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-server-0" event={"ID":"088e44cc-3515-4736-aa46-721774902209","Type":"ContainerStarted","Data":"0cda7f898a6fe83eef5b3e9a295e4cc99749a10d45642f677ab0d846c1aac497"} Jan 20 17:46:49 crc kubenswrapper[4558]: I0120 17:46:49.323062 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:46:49 crc kubenswrapper[4558]: I0120 17:46:49.325190 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" event={"ID":"c4a11c40-a157-4a28-b1a3-60c211d1d0bf","Type":"ContainerStarted","Data":"ca5aa63a108196583f0f9950193b9c697a06235d9a0ce251fae6acde947d41cf"} Jan 20 17:46:49 crc kubenswrapper[4558]: I0120 17:46:49.326119 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:46:49 crc kubenswrapper[4558]: I0120 17:46:49.342983 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/rabbitmq-server-0" podStartSLOduration=36.342972213 podStartE2EDuration="36.342972213s" podCreationTimestamp="2026-01-20 17:46:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:46:49.339802112 +0000 UTC m=+3903.100140079" watchObservedRunningTime="2026-01-20 17:46:49.342972213 +0000 UTC m=+3903.103310180" Jan 20 17:46:49 crc kubenswrapper[4558]: I0120 17:46:49.660222 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-v9ld9" Jan 20 17:46:49 crc kubenswrapper[4558]: I0120 17:46:49.679627 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" podStartSLOduration=36.679611291 podStartE2EDuration="36.679611291s" podCreationTimestamp="2026-01-20 17:46:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:46:49.363714415 +0000 UTC m=+3903.124052382" watchObservedRunningTime="2026-01-20 17:46:49.679611291 +0000 UTC m=+3903.439949258" Jan 20 17:46:49 crc kubenswrapper[4558]: I0120 17:46:49.758245 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4twrj\" (UniqueName: \"kubernetes.io/projected/7ea6811a-c54b-451e-9705-5ba4e9b5622f-kube-api-access-4twrj\") pod \"7ea6811a-c54b-451e-9705-5ba4e9b5622f\" (UID: \"7ea6811a-c54b-451e-9705-5ba4e9b5622f\") " Jan 20 17:46:49 crc kubenswrapper[4558]: I0120 17:46:49.758374 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7ea6811a-c54b-451e-9705-5ba4e9b5622f-operator-scripts\") pod \"7ea6811a-c54b-451e-9705-5ba4e9b5622f\" (UID: \"7ea6811a-c54b-451e-9705-5ba4e9b5622f\") " Jan 20 17:46:49 crc kubenswrapper[4558]: I0120 17:46:49.758783 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7ea6811a-c54b-451e-9705-5ba4e9b5622f-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "7ea6811a-c54b-451e-9705-5ba4e9b5622f" (UID: "7ea6811a-c54b-451e-9705-5ba4e9b5622f"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:46:49 crc kubenswrapper[4558]: I0120 17:46:49.759346 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7ea6811a-c54b-451e-9705-5ba4e9b5622f-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:46:49 crc kubenswrapper[4558]: I0120 17:46:49.764008 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7ea6811a-c54b-451e-9705-5ba4e9b5622f-kube-api-access-4twrj" (OuterVolumeSpecName: "kube-api-access-4twrj") pod "7ea6811a-c54b-451e-9705-5ba4e9b5622f" (UID: "7ea6811a-c54b-451e-9705-5ba4e9b5622f"). InnerVolumeSpecName "kube-api-access-4twrj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:46:49 crc kubenswrapper[4558]: I0120 17:46:49.861885 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4twrj\" (UniqueName: \"kubernetes.io/projected/7ea6811a-c54b-451e-9705-5ba4e9b5622f-kube-api-access-4twrj\") on node \"crc\" DevicePath \"\"" Jan 20 17:46:50 crc kubenswrapper[4558]: I0120 17:46:50.337837 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-v9ld9" Jan 20 17:46:50 crc kubenswrapper[4558]: I0120 17:46:50.345311 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/root-account-create-update-v9ld9" event={"ID":"7ea6811a-c54b-451e-9705-5ba4e9b5622f","Type":"ContainerDied","Data":"c1804a5acb5936603072f9d4ed85e2dc0bf40e0487450cc5a3b3e9fc7ed62ac7"} Jan 20 17:46:50 crc kubenswrapper[4558]: I0120 17:46:50.345728 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c1804a5acb5936603072f9d4ed85e2dc0bf40e0487450cc5a3b3e9fc7ed62ac7" Jan 20 17:46:51 crc kubenswrapper[4558]: I0120 17:46:51.710021 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-66c9456f9f-vp4hx" Jan 20 17:46:51 crc kubenswrapper[4558]: I0120 17:46:51.763756 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-2lft9"] Jan 20 17:46:51 crc kubenswrapper[4558]: I0120 17:46:51.768461 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-2lft9" podUID="d7f1ecf4-9630-44e7-a627-cc63de361385" containerName="dnsmasq-dns" containerID="cri-o://e9587547b29543008af942c8543b23bbceae4368b19e52eef1b4d97570b7dd31" gracePeriod=10 Jan 20 17:46:52 crc kubenswrapper[4558]: I0120 17:46:52.203407 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-2lft9" Jan 20 17:46:52 crc kubenswrapper[4558]: I0120 17:46:52.307917 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d7f1ecf4-9630-44e7-a627-cc63de361385-config\") pod \"d7f1ecf4-9630-44e7-a627-cc63de361385\" (UID: \"d7f1ecf4-9630-44e7-a627-cc63de361385\") " Jan 20 17:46:52 crc kubenswrapper[4558]: I0120 17:46:52.308103 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q5625\" (UniqueName: \"kubernetes.io/projected/d7f1ecf4-9630-44e7-a627-cc63de361385-kube-api-access-q5625\") pod \"d7f1ecf4-9630-44e7-a627-cc63de361385\" (UID: \"d7f1ecf4-9630-44e7-a627-cc63de361385\") " Jan 20 17:46:52 crc kubenswrapper[4558]: I0120 17:46:52.308305 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/d7f1ecf4-9630-44e7-a627-cc63de361385-dnsmasq-svc\") pod \"d7f1ecf4-9630-44e7-a627-cc63de361385\" (UID: \"d7f1ecf4-9630-44e7-a627-cc63de361385\") " Jan 20 17:46:52 crc kubenswrapper[4558]: I0120 17:46:52.313636 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d7f1ecf4-9630-44e7-a627-cc63de361385-kube-api-access-q5625" (OuterVolumeSpecName: "kube-api-access-q5625") pod "d7f1ecf4-9630-44e7-a627-cc63de361385" (UID: "d7f1ecf4-9630-44e7-a627-cc63de361385"). InnerVolumeSpecName "kube-api-access-q5625". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:46:52 crc kubenswrapper[4558]: I0120 17:46:52.341692 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d7f1ecf4-9630-44e7-a627-cc63de361385-dnsmasq-svc" (OuterVolumeSpecName: "dnsmasq-svc") pod "d7f1ecf4-9630-44e7-a627-cc63de361385" (UID: "d7f1ecf4-9630-44e7-a627-cc63de361385"). InnerVolumeSpecName "dnsmasq-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:46:52 crc kubenswrapper[4558]: I0120 17:46:52.350748 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d7f1ecf4-9630-44e7-a627-cc63de361385-config" (OuterVolumeSpecName: "config") pod "d7f1ecf4-9630-44e7-a627-cc63de361385" (UID: "d7f1ecf4-9630-44e7-a627-cc63de361385"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:46:52 crc kubenswrapper[4558]: I0120 17:46:52.359544 4558 generic.go:334] "Generic (PLEG): container finished" podID="d7f1ecf4-9630-44e7-a627-cc63de361385" containerID="e9587547b29543008af942c8543b23bbceae4368b19e52eef1b4d97570b7dd31" exitCode=0 Jan 20 17:46:52 crc kubenswrapper[4558]: I0120 17:46:52.359587 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-2lft9" event={"ID":"d7f1ecf4-9630-44e7-a627-cc63de361385","Type":"ContainerDied","Data":"e9587547b29543008af942c8543b23bbceae4368b19e52eef1b4d97570b7dd31"} Jan 20 17:46:52 crc kubenswrapper[4558]: I0120 17:46:52.359618 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-2lft9" event={"ID":"d7f1ecf4-9630-44e7-a627-cc63de361385","Type":"ContainerDied","Data":"5fd11ab40b18250ee0480851315eaa81fc8b968068accc8994df3c21d7aa3474"} Jan 20 17:46:52 crc kubenswrapper[4558]: I0120 17:46:52.359638 4558 scope.go:117] "RemoveContainer" containerID="e9587547b29543008af942c8543b23bbceae4368b19e52eef1b4d97570b7dd31" Jan 20 17:46:52 crc kubenswrapper[4558]: I0120 17:46:52.359773 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-2lft9" Jan 20 17:46:52 crc kubenswrapper[4558]: I0120 17:46:52.401423 4558 scope.go:117] "RemoveContainer" containerID="7286e2ef42d2b070bcebf08a04bdbce2188bb19a50dbfcd899a140b4bd8fc836" Jan 20 17:46:52 crc kubenswrapper[4558]: I0120 17:46:52.401512 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-2lft9"] Jan 20 17:46:52 crc kubenswrapper[4558]: I0120 17:46:52.410359 4558 reconciler_common.go:293] "Volume detached for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/d7f1ecf4-9630-44e7-a627-cc63de361385-dnsmasq-svc\") on node \"crc\" DevicePath \"\"" Jan 20 17:46:52 crc kubenswrapper[4558]: I0120 17:46:52.410391 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d7f1ecf4-9630-44e7-a627-cc63de361385-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:46:52 crc kubenswrapper[4558]: I0120 17:46:52.410406 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q5625\" (UniqueName: \"kubernetes.io/projected/d7f1ecf4-9630-44e7-a627-cc63de361385-kube-api-access-q5625\") on node \"crc\" DevicePath \"\"" Jan 20 17:46:52 crc kubenswrapper[4558]: I0120 17:46:52.412827 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-2lft9"] Jan 20 17:46:52 crc kubenswrapper[4558]: I0120 17:46:52.419328 4558 scope.go:117] "RemoveContainer" containerID="e9587547b29543008af942c8543b23bbceae4368b19e52eef1b4d97570b7dd31" Jan 20 17:46:52 crc kubenswrapper[4558]: E0120 17:46:52.420026 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e9587547b29543008af942c8543b23bbceae4368b19e52eef1b4d97570b7dd31\": container with ID starting with e9587547b29543008af942c8543b23bbceae4368b19e52eef1b4d97570b7dd31 not found: ID does not exist" containerID="e9587547b29543008af942c8543b23bbceae4368b19e52eef1b4d97570b7dd31" Jan 20 17:46:52 crc kubenswrapper[4558]: I0120 17:46:52.420060 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e9587547b29543008af942c8543b23bbceae4368b19e52eef1b4d97570b7dd31"} err="failed to get container status \"e9587547b29543008af942c8543b23bbceae4368b19e52eef1b4d97570b7dd31\": rpc error: code = NotFound desc = could not find container \"e9587547b29543008af942c8543b23bbceae4368b19e52eef1b4d97570b7dd31\": container with ID starting with e9587547b29543008af942c8543b23bbceae4368b19e52eef1b4d97570b7dd31 not found: ID does not exist" Jan 20 17:46:52 crc kubenswrapper[4558]: I0120 17:46:52.420085 4558 scope.go:117] "RemoveContainer" containerID="7286e2ef42d2b070bcebf08a04bdbce2188bb19a50dbfcd899a140b4bd8fc836" Jan 20 17:46:52 crc kubenswrapper[4558]: E0120 17:46:52.420373 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7286e2ef42d2b070bcebf08a04bdbce2188bb19a50dbfcd899a140b4bd8fc836\": container with ID starting with 7286e2ef42d2b070bcebf08a04bdbce2188bb19a50dbfcd899a140b4bd8fc836 not found: ID does not exist" containerID="7286e2ef42d2b070bcebf08a04bdbce2188bb19a50dbfcd899a140b4bd8fc836" Jan 20 17:46:52 crc kubenswrapper[4558]: I0120 17:46:52.420404 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7286e2ef42d2b070bcebf08a04bdbce2188bb19a50dbfcd899a140b4bd8fc836"} err="failed to get container status \"7286e2ef42d2b070bcebf08a04bdbce2188bb19a50dbfcd899a140b4bd8fc836\": rpc error: code = NotFound desc = could not find container \"7286e2ef42d2b070bcebf08a04bdbce2188bb19a50dbfcd899a140b4bd8fc836\": container with ID starting with 7286e2ef42d2b070bcebf08a04bdbce2188bb19a50dbfcd899a140b4bd8fc836 not found: ID does not exist" Jan 20 17:46:52 crc kubenswrapper[4558]: I0120 17:46:52.577642 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d7f1ecf4-9630-44e7-a627-cc63de361385" path="/var/lib/kubelet/pods/d7f1ecf4-9630-44e7-a627-cc63de361385/volumes" Jan 20 17:47:04 crc kubenswrapper[4558]: I0120 17:47:04.523481 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:47:04 crc kubenswrapper[4558]: I0120 17:47:04.801108 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:47:05 crc kubenswrapper[4558]: I0120 17:47:05.000412 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/cinder-db-create-z66dg"] Jan 20 17:47:05 crc kubenswrapper[4558]: E0120 17:47:05.000802 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7ea6811a-c54b-451e-9705-5ba4e9b5622f" containerName="mariadb-account-create-update" Jan 20 17:47:05 crc kubenswrapper[4558]: I0120 17:47:05.000821 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="7ea6811a-c54b-451e-9705-5ba4e9b5622f" containerName="mariadb-account-create-update" Jan 20 17:47:05 crc kubenswrapper[4558]: E0120 17:47:05.000839 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d7f1ecf4-9630-44e7-a627-cc63de361385" containerName="dnsmasq-dns" Jan 20 17:47:05 crc kubenswrapper[4558]: I0120 17:47:05.000846 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d7f1ecf4-9630-44e7-a627-cc63de361385" containerName="dnsmasq-dns" Jan 20 17:47:05 crc kubenswrapper[4558]: E0120 17:47:05.000873 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d7f1ecf4-9630-44e7-a627-cc63de361385" containerName="init" Jan 20 17:47:05 crc kubenswrapper[4558]: I0120 17:47:05.000878 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d7f1ecf4-9630-44e7-a627-cc63de361385" containerName="init" Jan 20 17:47:05 crc kubenswrapper[4558]: I0120 17:47:05.001073 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="7ea6811a-c54b-451e-9705-5ba4e9b5622f" containerName="mariadb-account-create-update" Jan 20 17:47:05 crc kubenswrapper[4558]: I0120 17:47:05.001099 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="d7f1ecf4-9630-44e7-a627-cc63de361385" containerName="dnsmasq-dns" Jan 20 17:47:05 crc kubenswrapper[4558]: I0120 17:47:05.001697 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-db-create-z66dg" Jan 20 17:47:05 crc kubenswrapper[4558]: I0120 17:47:05.024527 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-db-create-z66dg"] Jan 20 17:47:05 crc kubenswrapper[4558]: I0120 17:47:05.043429 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/barbican-a8ee-account-create-update-t5zdv"] Jan 20 17:47:05 crc kubenswrapper[4558]: I0120 17:47:05.044827 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-a8ee-account-create-update-t5zdv" Jan 20 17:47:05 crc kubenswrapper[4558]: I0120 17:47:05.046690 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"barbican-db-secret" Jan 20 17:47:05 crc kubenswrapper[4558]: I0120 17:47:05.049789 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-a8ee-account-create-update-t5zdv"] Jan 20 17:47:05 crc kubenswrapper[4558]: I0120 17:47:05.082240 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/barbican-db-create-9cjqc"] Jan 20 17:47:05 crc kubenswrapper[4558]: I0120 17:47:05.083326 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-db-create-9cjqc" Jan 20 17:47:05 crc kubenswrapper[4558]: I0120 17:47:05.099264 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-db-create-9cjqc"] Jan 20 17:47:05 crc kubenswrapper[4558]: I0120 17:47:05.113310 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/cinder-e4fd-account-create-update-zswcv"] Jan 20 17:47:05 crc kubenswrapper[4558]: I0120 17:47:05.114486 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-e4fd-account-create-update-zswcv" Jan 20 17:47:05 crc kubenswrapper[4558]: I0120 17:47:05.119555 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cinder-db-secret" Jan 20 17:47:05 crc kubenswrapper[4558]: I0120 17:47:05.123187 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-e4fd-account-create-update-zswcv"] Jan 20 17:47:05 crc kubenswrapper[4558]: I0120 17:47:05.135446 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/206eba3c-fb3b-4779-acfe-5d63f0dfc9fa-operator-scripts\") pod \"cinder-db-create-z66dg\" (UID: \"206eba3c-fb3b-4779-acfe-5d63f0dfc9fa\") " pod="openstack-kuttl-tests/cinder-db-create-z66dg" Jan 20 17:47:05 crc kubenswrapper[4558]: I0120 17:47:05.135500 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7nf28\" (UniqueName: \"kubernetes.io/projected/206eba3c-fb3b-4779-acfe-5d63f0dfc9fa-kube-api-access-7nf28\") pod \"cinder-db-create-z66dg\" (UID: \"206eba3c-fb3b-4779-acfe-5d63f0dfc9fa\") " pod="openstack-kuttl-tests/cinder-db-create-z66dg" Jan 20 17:47:05 crc kubenswrapper[4558]: I0120 17:47:05.135541 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/74ff4ade-5ef8-4f1d-81c1-ca1b0a85f84c-operator-scripts\") pod \"barbican-db-create-9cjqc\" (UID: \"74ff4ade-5ef8-4f1d-81c1-ca1b0a85f84c\") " pod="openstack-kuttl-tests/barbican-db-create-9cjqc" Jan 20 17:47:05 crc kubenswrapper[4558]: I0120 17:47:05.135574 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ch4xt\" (UniqueName: \"kubernetes.io/projected/74ff4ade-5ef8-4f1d-81c1-ca1b0a85f84c-kube-api-access-ch4xt\") pod \"barbican-db-create-9cjqc\" (UID: \"74ff4ade-5ef8-4f1d-81c1-ca1b0a85f84c\") " pod="openstack-kuttl-tests/barbican-db-create-9cjqc" Jan 20 17:47:05 crc kubenswrapper[4558]: I0120 17:47:05.135613 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2zl6c\" (UniqueName: \"kubernetes.io/projected/3b209c02-0fa1-4783-9b88-390400b5282c-kube-api-access-2zl6c\") pod \"barbican-a8ee-account-create-update-t5zdv\" (UID: \"3b209c02-0fa1-4783-9b88-390400b5282c\") " pod="openstack-kuttl-tests/barbican-a8ee-account-create-update-t5zdv" Jan 20 17:47:05 crc kubenswrapper[4558]: I0120 17:47:05.135633 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3b209c02-0fa1-4783-9b88-390400b5282c-operator-scripts\") pod \"barbican-a8ee-account-create-update-t5zdv\" (UID: \"3b209c02-0fa1-4783-9b88-390400b5282c\") " pod="openstack-kuttl-tests/barbican-a8ee-account-create-update-t5zdv" Jan 20 17:47:05 crc kubenswrapper[4558]: I0120 17:47:05.184774 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/neutron-db-create-wstvp"] Jan 20 17:47:05 crc kubenswrapper[4558]: I0120 17:47:05.185788 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-db-create-wstvp" Jan 20 17:47:05 crc kubenswrapper[4558]: I0120 17:47:05.195319 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-db-create-wstvp"] Jan 20 17:47:05 crc kubenswrapper[4558]: I0120 17:47:05.238097 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/74ff4ade-5ef8-4f1d-81c1-ca1b0a85f84c-operator-scripts\") pod \"barbican-db-create-9cjqc\" (UID: \"74ff4ade-5ef8-4f1d-81c1-ca1b0a85f84c\") " pod="openstack-kuttl-tests/barbican-db-create-9cjqc" Jan 20 17:47:05 crc kubenswrapper[4558]: I0120 17:47:05.238149 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0482f0bb-b64a-407b-954f-8f0f8a04572f-operator-scripts\") pod \"neutron-db-create-wstvp\" (UID: \"0482f0bb-b64a-407b-954f-8f0f8a04572f\") " pod="openstack-kuttl-tests/neutron-db-create-wstvp" Jan 20 17:47:05 crc kubenswrapper[4558]: I0120 17:47:05.238313 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ch4xt\" (UniqueName: \"kubernetes.io/projected/74ff4ade-5ef8-4f1d-81c1-ca1b0a85f84c-kube-api-access-ch4xt\") pod \"barbican-db-create-9cjqc\" (UID: \"74ff4ade-5ef8-4f1d-81c1-ca1b0a85f84c\") " pod="openstack-kuttl-tests/barbican-db-create-9cjqc" Jan 20 17:47:05 crc kubenswrapper[4558]: I0120 17:47:05.238400 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2zl6c\" (UniqueName: \"kubernetes.io/projected/3b209c02-0fa1-4783-9b88-390400b5282c-kube-api-access-2zl6c\") pod \"barbican-a8ee-account-create-update-t5zdv\" (UID: \"3b209c02-0fa1-4783-9b88-390400b5282c\") " pod="openstack-kuttl-tests/barbican-a8ee-account-create-update-t5zdv" Jan 20 17:47:05 crc kubenswrapper[4558]: I0120 17:47:05.238435 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3b209c02-0fa1-4783-9b88-390400b5282c-operator-scripts\") pod \"barbican-a8ee-account-create-update-t5zdv\" (UID: \"3b209c02-0fa1-4783-9b88-390400b5282c\") " pod="openstack-kuttl-tests/barbican-a8ee-account-create-update-t5zdv" Jan 20 17:47:05 crc kubenswrapper[4558]: I0120 17:47:05.238507 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x7tlb\" (UniqueName: \"kubernetes.io/projected/b72bfcae-4aa5-47d2-8e1c-b2e6bfbac76a-kube-api-access-x7tlb\") pod \"cinder-e4fd-account-create-update-zswcv\" (UID: \"b72bfcae-4aa5-47d2-8e1c-b2e6bfbac76a\") " pod="openstack-kuttl-tests/cinder-e4fd-account-create-update-zswcv" Jan 20 17:47:05 crc kubenswrapper[4558]: I0120 17:47:05.238558 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b72bfcae-4aa5-47d2-8e1c-b2e6bfbac76a-operator-scripts\") pod \"cinder-e4fd-account-create-update-zswcv\" (UID: \"b72bfcae-4aa5-47d2-8e1c-b2e6bfbac76a\") " pod="openstack-kuttl-tests/cinder-e4fd-account-create-update-zswcv" Jan 20 17:47:05 crc kubenswrapper[4558]: I0120 17:47:05.238810 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/206eba3c-fb3b-4779-acfe-5d63f0dfc9fa-operator-scripts\") pod \"cinder-db-create-z66dg\" (UID: \"206eba3c-fb3b-4779-acfe-5d63f0dfc9fa\") " pod="openstack-kuttl-tests/cinder-db-create-z66dg" Jan 20 17:47:05 crc kubenswrapper[4558]: I0120 17:47:05.238978 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2rdrk\" (UniqueName: \"kubernetes.io/projected/0482f0bb-b64a-407b-954f-8f0f8a04572f-kube-api-access-2rdrk\") pod \"neutron-db-create-wstvp\" (UID: \"0482f0bb-b64a-407b-954f-8f0f8a04572f\") " pod="openstack-kuttl-tests/neutron-db-create-wstvp" Jan 20 17:47:05 crc kubenswrapper[4558]: I0120 17:47:05.238982 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/74ff4ade-5ef8-4f1d-81c1-ca1b0a85f84c-operator-scripts\") pod \"barbican-db-create-9cjqc\" (UID: \"74ff4ade-5ef8-4f1d-81c1-ca1b0a85f84c\") " pod="openstack-kuttl-tests/barbican-db-create-9cjqc" Jan 20 17:47:05 crc kubenswrapper[4558]: I0120 17:47:05.239077 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7nf28\" (UniqueName: \"kubernetes.io/projected/206eba3c-fb3b-4779-acfe-5d63f0dfc9fa-kube-api-access-7nf28\") pod \"cinder-db-create-z66dg\" (UID: \"206eba3c-fb3b-4779-acfe-5d63f0dfc9fa\") " pod="openstack-kuttl-tests/cinder-db-create-z66dg" Jan 20 17:47:05 crc kubenswrapper[4558]: I0120 17:47:05.239681 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3b209c02-0fa1-4783-9b88-390400b5282c-operator-scripts\") pod \"barbican-a8ee-account-create-update-t5zdv\" (UID: \"3b209c02-0fa1-4783-9b88-390400b5282c\") " pod="openstack-kuttl-tests/barbican-a8ee-account-create-update-t5zdv" Jan 20 17:47:05 crc kubenswrapper[4558]: I0120 17:47:05.239712 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/206eba3c-fb3b-4779-acfe-5d63f0dfc9fa-operator-scripts\") pod \"cinder-db-create-z66dg\" (UID: \"206eba3c-fb3b-4779-acfe-5d63f0dfc9fa\") " pod="openstack-kuttl-tests/cinder-db-create-z66dg" Jan 20 17:47:05 crc kubenswrapper[4558]: I0120 17:47:05.258679 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2zl6c\" (UniqueName: \"kubernetes.io/projected/3b209c02-0fa1-4783-9b88-390400b5282c-kube-api-access-2zl6c\") pod \"barbican-a8ee-account-create-update-t5zdv\" (UID: \"3b209c02-0fa1-4783-9b88-390400b5282c\") " pod="openstack-kuttl-tests/barbican-a8ee-account-create-update-t5zdv" Jan 20 17:47:05 crc kubenswrapper[4558]: I0120 17:47:05.259803 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7nf28\" (UniqueName: \"kubernetes.io/projected/206eba3c-fb3b-4779-acfe-5d63f0dfc9fa-kube-api-access-7nf28\") pod \"cinder-db-create-z66dg\" (UID: \"206eba3c-fb3b-4779-acfe-5d63f0dfc9fa\") " pod="openstack-kuttl-tests/cinder-db-create-z66dg" Jan 20 17:47:05 crc kubenswrapper[4558]: I0120 17:47:05.260672 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ch4xt\" (UniqueName: \"kubernetes.io/projected/74ff4ade-5ef8-4f1d-81c1-ca1b0a85f84c-kube-api-access-ch4xt\") pod \"barbican-db-create-9cjqc\" (UID: \"74ff4ade-5ef8-4f1d-81c1-ca1b0a85f84c\") " pod="openstack-kuttl-tests/barbican-db-create-9cjqc" Jan 20 17:47:05 crc kubenswrapper[4558]: I0120 17:47:05.318339 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-db-create-z66dg" Jan 20 17:47:05 crc kubenswrapper[4558]: I0120 17:47:05.337644 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/keystone-db-sync-nx4l8"] Jan 20 17:47:05 crc kubenswrapper[4558]: I0120 17:47:05.339021 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-db-sync-nx4l8" Jan 20 17:47:05 crc kubenswrapper[4558]: I0120 17:47:05.341332 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0482f0bb-b64a-407b-954f-8f0f8a04572f-operator-scripts\") pod \"neutron-db-create-wstvp\" (UID: \"0482f0bb-b64a-407b-954f-8f0f8a04572f\") " pod="openstack-kuttl-tests/neutron-db-create-wstvp" Jan 20 17:47:05 crc kubenswrapper[4558]: I0120 17:47:05.341550 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x7tlb\" (UniqueName: \"kubernetes.io/projected/b72bfcae-4aa5-47d2-8e1c-b2e6bfbac76a-kube-api-access-x7tlb\") pod \"cinder-e4fd-account-create-update-zswcv\" (UID: \"b72bfcae-4aa5-47d2-8e1c-b2e6bfbac76a\") " pod="openstack-kuttl-tests/cinder-e4fd-account-create-update-zswcv" Jan 20 17:47:05 crc kubenswrapper[4558]: I0120 17:47:05.341769 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b72bfcae-4aa5-47d2-8e1c-b2e6bfbac76a-operator-scripts\") pod \"cinder-e4fd-account-create-update-zswcv\" (UID: \"b72bfcae-4aa5-47d2-8e1c-b2e6bfbac76a\") " pod="openstack-kuttl-tests/cinder-e4fd-account-create-update-zswcv" Jan 20 17:47:05 crc kubenswrapper[4558]: I0120 17:47:05.341917 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2rdrk\" (UniqueName: \"kubernetes.io/projected/0482f0bb-b64a-407b-954f-8f0f8a04572f-kube-api-access-2rdrk\") pod \"neutron-db-create-wstvp\" (UID: \"0482f0bb-b64a-407b-954f-8f0f8a04572f\") " pod="openstack-kuttl-tests/neutron-db-create-wstvp" Jan 20 17:47:05 crc kubenswrapper[4558]: I0120 17:47:05.342276 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0482f0bb-b64a-407b-954f-8f0f8a04572f-operator-scripts\") pod \"neutron-db-create-wstvp\" (UID: \"0482f0bb-b64a-407b-954f-8f0f8a04572f\") " pod="openstack-kuttl-tests/neutron-db-create-wstvp" Jan 20 17:47:05 crc kubenswrapper[4558]: I0120 17:47:05.343500 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b72bfcae-4aa5-47d2-8e1c-b2e6bfbac76a-operator-scripts\") pod \"cinder-e4fd-account-create-update-zswcv\" (UID: \"b72bfcae-4aa5-47d2-8e1c-b2e6bfbac76a\") " pod="openstack-kuttl-tests/cinder-e4fd-account-create-update-zswcv" Jan 20 17:47:05 crc kubenswrapper[4558]: I0120 17:47:05.344270 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone" Jan 20 17:47:05 crc kubenswrapper[4558]: I0120 17:47:05.344345 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-config-data" Jan 20 17:47:05 crc kubenswrapper[4558]: I0120 17:47:05.344425 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-scripts" Jan 20 17:47:05 crc kubenswrapper[4558]: I0120 17:47:05.344598 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-keystone-dockercfg-8dfwc" Jan 20 17:47:05 crc kubenswrapper[4558]: I0120 17:47:05.346568 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-db-sync-nx4l8"] Jan 20 17:47:05 crc kubenswrapper[4558]: I0120 17:47:05.374610 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-a8ee-account-create-update-t5zdv" Jan 20 17:47:05 crc kubenswrapper[4558]: I0120 17:47:05.383430 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x7tlb\" (UniqueName: \"kubernetes.io/projected/b72bfcae-4aa5-47d2-8e1c-b2e6bfbac76a-kube-api-access-x7tlb\") pod \"cinder-e4fd-account-create-update-zswcv\" (UID: \"b72bfcae-4aa5-47d2-8e1c-b2e6bfbac76a\") " pod="openstack-kuttl-tests/cinder-e4fd-account-create-update-zswcv" Jan 20 17:47:05 crc kubenswrapper[4558]: I0120 17:47:05.397781 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2rdrk\" (UniqueName: \"kubernetes.io/projected/0482f0bb-b64a-407b-954f-8f0f8a04572f-kube-api-access-2rdrk\") pod \"neutron-db-create-wstvp\" (UID: \"0482f0bb-b64a-407b-954f-8f0f8a04572f\") " pod="openstack-kuttl-tests/neutron-db-create-wstvp" Jan 20 17:47:05 crc kubenswrapper[4558]: I0120 17:47:05.415002 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-db-create-9cjqc" Jan 20 17:47:05 crc kubenswrapper[4558]: I0120 17:47:05.429655 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-e4fd-account-create-update-zswcv" Jan 20 17:47:05 crc kubenswrapper[4558]: I0120 17:47:05.445047 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/344d4281-d7b6-49a8-8a10-62cf1940fa5c-combined-ca-bundle\") pod \"keystone-db-sync-nx4l8\" (UID: \"344d4281-d7b6-49a8-8a10-62cf1940fa5c\") " pod="openstack-kuttl-tests/keystone-db-sync-nx4l8" Jan 20 17:47:05 crc kubenswrapper[4558]: I0120 17:47:05.445120 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-76vn5\" (UniqueName: \"kubernetes.io/projected/344d4281-d7b6-49a8-8a10-62cf1940fa5c-kube-api-access-76vn5\") pod \"keystone-db-sync-nx4l8\" (UID: \"344d4281-d7b6-49a8-8a10-62cf1940fa5c\") " pod="openstack-kuttl-tests/keystone-db-sync-nx4l8" Jan 20 17:47:05 crc kubenswrapper[4558]: I0120 17:47:05.445179 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/344d4281-d7b6-49a8-8a10-62cf1940fa5c-config-data\") pod \"keystone-db-sync-nx4l8\" (UID: \"344d4281-d7b6-49a8-8a10-62cf1940fa5c\") " pod="openstack-kuttl-tests/keystone-db-sync-nx4l8" Jan 20 17:47:05 crc kubenswrapper[4558]: I0120 17:47:05.460805 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/neutron-a1e2-account-create-update-rmdlk"] Jan 20 17:47:05 crc kubenswrapper[4558]: I0120 17:47:05.462201 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-a1e2-account-create-update-rmdlk" Jan 20 17:47:05 crc kubenswrapper[4558]: I0120 17:47:05.467110 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-a1e2-account-create-update-rmdlk"] Jan 20 17:47:05 crc kubenswrapper[4558]: I0120 17:47:05.467573 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"neutron-db-secret" Jan 20 17:47:05 crc kubenswrapper[4558]: I0120 17:47:05.498108 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-db-create-wstvp" Jan 20 17:47:05 crc kubenswrapper[4558]: I0120 17:47:05.551095 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-76vn5\" (UniqueName: \"kubernetes.io/projected/344d4281-d7b6-49a8-8a10-62cf1940fa5c-kube-api-access-76vn5\") pod \"keystone-db-sync-nx4l8\" (UID: \"344d4281-d7b6-49a8-8a10-62cf1940fa5c\") " pod="openstack-kuttl-tests/keystone-db-sync-nx4l8" Jan 20 17:47:05 crc kubenswrapper[4558]: I0120 17:47:05.551140 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/344d4281-d7b6-49a8-8a10-62cf1940fa5c-config-data\") pod \"keystone-db-sync-nx4l8\" (UID: \"344d4281-d7b6-49a8-8a10-62cf1940fa5c\") " pod="openstack-kuttl-tests/keystone-db-sync-nx4l8" Jan 20 17:47:05 crc kubenswrapper[4558]: I0120 17:47:05.551214 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k5f7l\" (UniqueName: \"kubernetes.io/projected/11851b63-4564-4f0e-b5f7-16e34e358eca-kube-api-access-k5f7l\") pod \"neutron-a1e2-account-create-update-rmdlk\" (UID: \"11851b63-4564-4f0e-b5f7-16e34e358eca\") " pod="openstack-kuttl-tests/neutron-a1e2-account-create-update-rmdlk" Jan 20 17:47:05 crc kubenswrapper[4558]: I0120 17:47:05.551257 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/11851b63-4564-4f0e-b5f7-16e34e358eca-operator-scripts\") pod \"neutron-a1e2-account-create-update-rmdlk\" (UID: \"11851b63-4564-4f0e-b5f7-16e34e358eca\") " pod="openstack-kuttl-tests/neutron-a1e2-account-create-update-rmdlk" Jan 20 17:47:05 crc kubenswrapper[4558]: I0120 17:47:05.551322 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/344d4281-d7b6-49a8-8a10-62cf1940fa5c-combined-ca-bundle\") pod \"keystone-db-sync-nx4l8\" (UID: \"344d4281-d7b6-49a8-8a10-62cf1940fa5c\") " pod="openstack-kuttl-tests/keystone-db-sync-nx4l8" Jan 20 17:47:05 crc kubenswrapper[4558]: I0120 17:47:05.558115 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/344d4281-d7b6-49a8-8a10-62cf1940fa5c-config-data\") pod \"keystone-db-sync-nx4l8\" (UID: \"344d4281-d7b6-49a8-8a10-62cf1940fa5c\") " pod="openstack-kuttl-tests/keystone-db-sync-nx4l8" Jan 20 17:47:05 crc kubenswrapper[4558]: I0120 17:47:05.569063 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/344d4281-d7b6-49a8-8a10-62cf1940fa5c-combined-ca-bundle\") pod \"keystone-db-sync-nx4l8\" (UID: \"344d4281-d7b6-49a8-8a10-62cf1940fa5c\") " pod="openstack-kuttl-tests/keystone-db-sync-nx4l8" Jan 20 17:47:05 crc kubenswrapper[4558]: I0120 17:47:05.573143 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-76vn5\" (UniqueName: \"kubernetes.io/projected/344d4281-d7b6-49a8-8a10-62cf1940fa5c-kube-api-access-76vn5\") pod \"keystone-db-sync-nx4l8\" (UID: \"344d4281-d7b6-49a8-8a10-62cf1940fa5c\") " pod="openstack-kuttl-tests/keystone-db-sync-nx4l8" Jan 20 17:47:05 crc kubenswrapper[4558]: I0120 17:47:05.652958 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k5f7l\" (UniqueName: \"kubernetes.io/projected/11851b63-4564-4f0e-b5f7-16e34e358eca-kube-api-access-k5f7l\") pod \"neutron-a1e2-account-create-update-rmdlk\" (UID: \"11851b63-4564-4f0e-b5f7-16e34e358eca\") " pod="openstack-kuttl-tests/neutron-a1e2-account-create-update-rmdlk" Jan 20 17:47:05 crc kubenswrapper[4558]: I0120 17:47:05.653080 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/11851b63-4564-4f0e-b5f7-16e34e358eca-operator-scripts\") pod \"neutron-a1e2-account-create-update-rmdlk\" (UID: \"11851b63-4564-4f0e-b5f7-16e34e358eca\") " pod="openstack-kuttl-tests/neutron-a1e2-account-create-update-rmdlk" Jan 20 17:47:05 crc kubenswrapper[4558]: I0120 17:47:05.653749 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/11851b63-4564-4f0e-b5f7-16e34e358eca-operator-scripts\") pod \"neutron-a1e2-account-create-update-rmdlk\" (UID: \"11851b63-4564-4f0e-b5f7-16e34e358eca\") " pod="openstack-kuttl-tests/neutron-a1e2-account-create-update-rmdlk" Jan 20 17:47:05 crc kubenswrapper[4558]: I0120 17:47:05.666081 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-db-create-z66dg"] Jan 20 17:47:05 crc kubenswrapper[4558]: I0120 17:47:05.666868 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k5f7l\" (UniqueName: \"kubernetes.io/projected/11851b63-4564-4f0e-b5f7-16e34e358eca-kube-api-access-k5f7l\") pod \"neutron-a1e2-account-create-update-rmdlk\" (UID: \"11851b63-4564-4f0e-b5f7-16e34e358eca\") " pod="openstack-kuttl-tests/neutron-a1e2-account-create-update-rmdlk" Jan 20 17:47:05 crc kubenswrapper[4558]: I0120 17:47:05.730911 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-db-sync-nx4l8" Jan 20 17:47:05 crc kubenswrapper[4558]: I0120 17:47:05.788229 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-a1e2-account-create-update-rmdlk" Jan 20 17:47:05 crc kubenswrapper[4558]: I0120 17:47:05.961076 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-a8ee-account-create-update-t5zdv"] Jan 20 17:47:05 crc kubenswrapper[4558]: I0120 17:47:05.996391 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-db-sync-nx4l8"] Jan 20 17:47:06 crc kubenswrapper[4558]: W0120 17:47:06.008438 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod344d4281_d7b6_49a8_8a10_62cf1940fa5c.slice/crio-9a5c0b95f32eed4ad9bb2f8f2de0988747a8794316dcd25d9ec92dd251f45e94 WatchSource:0}: Error finding container 9a5c0b95f32eed4ad9bb2f8f2de0988747a8794316dcd25d9ec92dd251f45e94: Status 404 returned error can't find the container with id 9a5c0b95f32eed4ad9bb2f8f2de0988747a8794316dcd25d9ec92dd251f45e94 Jan 20 17:47:06 crc kubenswrapper[4558]: I0120 17:47:06.037178 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-e4fd-account-create-update-zswcv"] Jan 20 17:47:06 crc kubenswrapper[4558]: I0120 17:47:06.067734 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-db-create-9cjqc"] Jan 20 17:47:06 crc kubenswrapper[4558]: I0120 17:47:06.123808 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-db-create-wstvp"] Jan 20 17:47:06 crc kubenswrapper[4558]: I0120 17:47:06.299374 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-a1e2-account-create-update-rmdlk"] Jan 20 17:47:06 crc kubenswrapper[4558]: W0120 17:47:06.307486 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod11851b63_4564_4f0e_b5f7_16e34e358eca.slice/crio-5a406d8af97adae2b4f86fc4bd9d0568a57e0125afdb48cd86d91e9f32d1a949 WatchSource:0}: Error finding container 5a406d8af97adae2b4f86fc4bd9d0568a57e0125afdb48cd86d91e9f32d1a949: Status 404 returned error can't find the container with id 5a406d8af97adae2b4f86fc4bd9d0568a57e0125afdb48cd86d91e9f32d1a949 Jan 20 17:47:06 crc kubenswrapper[4558]: I0120 17:47:06.506152 4558 generic.go:334] "Generic (PLEG): container finished" podID="0482f0bb-b64a-407b-954f-8f0f8a04572f" containerID="89888a23e0d26b9c3288dac8f4c5d490eb219f4b8f1f24588119483c51792e56" exitCode=0 Jan 20 17:47:06 crc kubenswrapper[4558]: I0120 17:47:06.506220 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-db-create-wstvp" event={"ID":"0482f0bb-b64a-407b-954f-8f0f8a04572f","Type":"ContainerDied","Data":"89888a23e0d26b9c3288dac8f4c5d490eb219f4b8f1f24588119483c51792e56"} Jan 20 17:47:06 crc kubenswrapper[4558]: I0120 17:47:06.506282 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-db-create-wstvp" event={"ID":"0482f0bb-b64a-407b-954f-8f0f8a04572f","Type":"ContainerStarted","Data":"5b6fff58c2c1d655fbafec27b1da24de3c7d89e79a44c000bf0a644740eb7881"} Jan 20 17:47:06 crc kubenswrapper[4558]: I0120 17:47:06.508145 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-db-sync-nx4l8" event={"ID":"344d4281-d7b6-49a8-8a10-62cf1940fa5c","Type":"ContainerStarted","Data":"c64c1c6b089bf0563dad7d86569ad1d73d4507d9ac97c8012b53e092021479e5"} Jan 20 17:47:06 crc kubenswrapper[4558]: I0120 17:47:06.508282 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-db-sync-nx4l8" event={"ID":"344d4281-d7b6-49a8-8a10-62cf1940fa5c","Type":"ContainerStarted","Data":"9a5c0b95f32eed4ad9bb2f8f2de0988747a8794316dcd25d9ec92dd251f45e94"} Jan 20 17:47:06 crc kubenswrapper[4558]: I0120 17:47:06.510363 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-a1e2-account-create-update-rmdlk" event={"ID":"11851b63-4564-4f0e-b5f7-16e34e358eca","Type":"ContainerStarted","Data":"99cb94cc5f3f7962fae34e8aeb4e78c1758c65b96c2f5f366e9c06f6ebfe81c9"} Jan 20 17:47:06 crc kubenswrapper[4558]: I0120 17:47:06.510405 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-a1e2-account-create-update-rmdlk" event={"ID":"11851b63-4564-4f0e-b5f7-16e34e358eca","Type":"ContainerStarted","Data":"5a406d8af97adae2b4f86fc4bd9d0568a57e0125afdb48cd86d91e9f32d1a949"} Jan 20 17:47:06 crc kubenswrapper[4558]: I0120 17:47:06.512024 4558 generic.go:334] "Generic (PLEG): container finished" podID="b72bfcae-4aa5-47d2-8e1c-b2e6bfbac76a" containerID="64972ee4dabb5ea14c0f36ff7a7b7a13891412258c4e910ce71c924bb926e69e" exitCode=0 Jan 20 17:47:06 crc kubenswrapper[4558]: I0120 17:47:06.512079 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-e4fd-account-create-update-zswcv" event={"ID":"b72bfcae-4aa5-47d2-8e1c-b2e6bfbac76a","Type":"ContainerDied","Data":"64972ee4dabb5ea14c0f36ff7a7b7a13891412258c4e910ce71c924bb926e69e"} Jan 20 17:47:06 crc kubenswrapper[4558]: I0120 17:47:06.512098 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-e4fd-account-create-update-zswcv" event={"ID":"b72bfcae-4aa5-47d2-8e1c-b2e6bfbac76a","Type":"ContainerStarted","Data":"bf8e9a97e964c5affcc53720e08661d22fbbaa2fb2897ad742750ae126a2e73c"} Jan 20 17:47:06 crc kubenswrapper[4558]: I0120 17:47:06.513767 4558 generic.go:334] "Generic (PLEG): container finished" podID="206eba3c-fb3b-4779-acfe-5d63f0dfc9fa" containerID="9bb296664d4062cc389adf791e64578806a256d24518a7353d119de85df45770" exitCode=0 Jan 20 17:47:06 crc kubenswrapper[4558]: I0120 17:47:06.513840 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-db-create-z66dg" event={"ID":"206eba3c-fb3b-4779-acfe-5d63f0dfc9fa","Type":"ContainerDied","Data":"9bb296664d4062cc389adf791e64578806a256d24518a7353d119de85df45770"} Jan 20 17:47:06 crc kubenswrapper[4558]: I0120 17:47:06.513869 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-db-create-z66dg" event={"ID":"206eba3c-fb3b-4779-acfe-5d63f0dfc9fa","Type":"ContainerStarted","Data":"23b339b35570439c761685415e27134b349ca5d6cb6957ea4570161df1c965b9"} Jan 20 17:47:06 crc kubenswrapper[4558]: I0120 17:47:06.515356 4558 generic.go:334] "Generic (PLEG): container finished" podID="3b209c02-0fa1-4783-9b88-390400b5282c" containerID="68558968f523e7ff4508593f398776870a248bc02f552e2e5df1ccbb58abf958" exitCode=0 Jan 20 17:47:06 crc kubenswrapper[4558]: I0120 17:47:06.515411 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-a8ee-account-create-update-t5zdv" event={"ID":"3b209c02-0fa1-4783-9b88-390400b5282c","Type":"ContainerDied","Data":"68558968f523e7ff4508593f398776870a248bc02f552e2e5df1ccbb58abf958"} Jan 20 17:47:06 crc kubenswrapper[4558]: I0120 17:47:06.515430 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-a8ee-account-create-update-t5zdv" event={"ID":"3b209c02-0fa1-4783-9b88-390400b5282c","Type":"ContainerStarted","Data":"101105bb6d9ebd72c2baa501acfa6715f6c764d4e7aadae0612de531b57338ee"} Jan 20 17:47:06 crc kubenswrapper[4558]: I0120 17:47:06.517059 4558 generic.go:334] "Generic (PLEG): container finished" podID="74ff4ade-5ef8-4f1d-81c1-ca1b0a85f84c" containerID="3e30e162193fcb14bcc9faaa352f5c790e274d276a5e03c359619caba0cc19d9" exitCode=0 Jan 20 17:47:06 crc kubenswrapper[4558]: I0120 17:47:06.517182 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-db-create-9cjqc" event={"ID":"74ff4ade-5ef8-4f1d-81c1-ca1b0a85f84c","Type":"ContainerDied","Data":"3e30e162193fcb14bcc9faaa352f5c790e274d276a5e03c359619caba0cc19d9"} Jan 20 17:47:06 crc kubenswrapper[4558]: I0120 17:47:06.517271 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-db-create-9cjqc" event={"ID":"74ff4ade-5ef8-4f1d-81c1-ca1b0a85f84c","Type":"ContainerStarted","Data":"82771e30f25744ab03480c4723fcf149f42afee9f49d843f3f3f2b287134857f"} Jan 20 17:47:06 crc kubenswrapper[4558]: I0120 17:47:06.553587 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/keystone-db-sync-nx4l8" podStartSLOduration=1.553573326 podStartE2EDuration="1.553573326s" podCreationTimestamp="2026-01-20 17:47:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:47:06.549853662 +0000 UTC m=+3920.310191629" watchObservedRunningTime="2026-01-20 17:47:06.553573326 +0000 UTC m=+3920.313911293" Jan 20 17:47:06 crc kubenswrapper[4558]: I0120 17:47:06.571296 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/neutron-a1e2-account-create-update-rmdlk" podStartSLOduration=1.571273049 podStartE2EDuration="1.571273049s" podCreationTimestamp="2026-01-20 17:47:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:47:06.563874587 +0000 UTC m=+3920.324212554" watchObservedRunningTime="2026-01-20 17:47:06.571273049 +0000 UTC m=+3920.331611016" Jan 20 17:47:07 crc kubenswrapper[4558]: I0120 17:47:07.527079 4558 generic.go:334] "Generic (PLEG): container finished" podID="11851b63-4564-4f0e-b5f7-16e34e358eca" containerID="99cb94cc5f3f7962fae34e8aeb4e78c1758c65b96c2f5f366e9c06f6ebfe81c9" exitCode=0 Jan 20 17:47:07 crc kubenswrapper[4558]: I0120 17:47:07.527191 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-a1e2-account-create-update-rmdlk" event={"ID":"11851b63-4564-4f0e-b5f7-16e34e358eca","Type":"ContainerDied","Data":"99cb94cc5f3f7962fae34e8aeb4e78c1758c65b96c2f5f366e9c06f6ebfe81c9"} Jan 20 17:47:07 crc kubenswrapper[4558]: I0120 17:47:07.918823 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-e4fd-account-create-update-zswcv" Jan 20 17:47:08 crc kubenswrapper[4558]: I0120 17:47:08.005143 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b72bfcae-4aa5-47d2-8e1c-b2e6bfbac76a-operator-scripts\") pod \"b72bfcae-4aa5-47d2-8e1c-b2e6bfbac76a\" (UID: \"b72bfcae-4aa5-47d2-8e1c-b2e6bfbac76a\") " Jan 20 17:47:08 crc kubenswrapper[4558]: I0120 17:47:08.005265 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7tlb\" (UniqueName: \"kubernetes.io/projected/b72bfcae-4aa5-47d2-8e1c-b2e6bfbac76a-kube-api-access-x7tlb\") pod \"b72bfcae-4aa5-47d2-8e1c-b2e6bfbac76a\" (UID: \"b72bfcae-4aa5-47d2-8e1c-b2e6bfbac76a\") " Jan 20 17:47:08 crc kubenswrapper[4558]: I0120 17:47:08.006374 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b72bfcae-4aa5-47d2-8e1c-b2e6bfbac76a-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "b72bfcae-4aa5-47d2-8e1c-b2e6bfbac76a" (UID: "b72bfcae-4aa5-47d2-8e1c-b2e6bfbac76a"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:47:08 crc kubenswrapper[4558]: I0120 17:47:08.011828 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b72bfcae-4aa5-47d2-8e1c-b2e6bfbac76a-kube-api-access-x7tlb" (OuterVolumeSpecName: "kube-api-access-x7tlb") pod "b72bfcae-4aa5-47d2-8e1c-b2e6bfbac76a" (UID: "b72bfcae-4aa5-47d2-8e1c-b2e6bfbac76a"). InnerVolumeSpecName "kube-api-access-x7tlb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:47:08 crc kubenswrapper[4558]: I0120 17:47:08.061145 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-a8ee-account-create-update-t5zdv" Jan 20 17:47:08 crc kubenswrapper[4558]: I0120 17:47:08.067413 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-db-create-z66dg" Jan 20 17:47:08 crc kubenswrapper[4558]: I0120 17:47:08.073072 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-db-create-9cjqc" Jan 20 17:47:08 crc kubenswrapper[4558]: I0120 17:47:08.079432 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-db-create-wstvp" Jan 20 17:47:08 crc kubenswrapper[4558]: I0120 17:47:08.107123 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3b209c02-0fa1-4783-9b88-390400b5282c-operator-scripts\") pod \"3b209c02-0fa1-4783-9b88-390400b5282c\" (UID: \"3b209c02-0fa1-4783-9b88-390400b5282c\") " Jan 20 17:47:08 crc kubenswrapper[4558]: I0120 17:47:08.107235 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2zl6c\" (UniqueName: \"kubernetes.io/projected/3b209c02-0fa1-4783-9b88-390400b5282c-kube-api-access-2zl6c\") pod \"3b209c02-0fa1-4783-9b88-390400b5282c\" (UID: \"3b209c02-0fa1-4783-9b88-390400b5282c\") " Jan 20 17:47:08 crc kubenswrapper[4558]: I0120 17:47:08.107448 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/206eba3c-fb3b-4779-acfe-5d63f0dfc9fa-operator-scripts\") pod \"206eba3c-fb3b-4779-acfe-5d63f0dfc9fa\" (UID: \"206eba3c-fb3b-4779-acfe-5d63f0dfc9fa\") " Jan 20 17:47:08 crc kubenswrapper[4558]: I0120 17:47:08.107514 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7nf28\" (UniqueName: \"kubernetes.io/projected/206eba3c-fb3b-4779-acfe-5d63f0dfc9fa-kube-api-access-7nf28\") pod \"206eba3c-fb3b-4779-acfe-5d63f0dfc9fa\" (UID: \"206eba3c-fb3b-4779-acfe-5d63f0dfc9fa\") " Jan 20 17:47:08 crc kubenswrapper[4558]: I0120 17:47:08.107606 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3b209c02-0fa1-4783-9b88-390400b5282c-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "3b209c02-0fa1-4783-9b88-390400b5282c" (UID: "3b209c02-0fa1-4783-9b88-390400b5282c"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:47:08 crc kubenswrapper[4558]: I0120 17:47:08.107893 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/206eba3c-fb3b-4779-acfe-5d63f0dfc9fa-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "206eba3c-fb3b-4779-acfe-5d63f0dfc9fa" (UID: "206eba3c-fb3b-4779-acfe-5d63f0dfc9fa"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:47:08 crc kubenswrapper[4558]: I0120 17:47:08.108266 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b72bfcae-4aa5-47d2-8e1c-b2e6bfbac76a-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:47:08 crc kubenswrapper[4558]: I0120 17:47:08.108284 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/206eba3c-fb3b-4779-acfe-5d63f0dfc9fa-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:47:08 crc kubenswrapper[4558]: I0120 17:47:08.108296 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7tlb\" (UniqueName: \"kubernetes.io/projected/b72bfcae-4aa5-47d2-8e1c-b2e6bfbac76a-kube-api-access-x7tlb\") on node \"crc\" DevicePath \"\"" Jan 20 17:47:08 crc kubenswrapper[4558]: I0120 17:47:08.108308 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3b209c02-0fa1-4783-9b88-390400b5282c-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:47:08 crc kubenswrapper[4558]: I0120 17:47:08.113202 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/206eba3c-fb3b-4779-acfe-5d63f0dfc9fa-kube-api-access-7nf28" (OuterVolumeSpecName: "kube-api-access-7nf28") pod "206eba3c-fb3b-4779-acfe-5d63f0dfc9fa" (UID: "206eba3c-fb3b-4779-acfe-5d63f0dfc9fa"). InnerVolumeSpecName "kube-api-access-7nf28". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:47:08 crc kubenswrapper[4558]: I0120 17:47:08.113316 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3b209c02-0fa1-4783-9b88-390400b5282c-kube-api-access-2zl6c" (OuterVolumeSpecName: "kube-api-access-2zl6c") pod "3b209c02-0fa1-4783-9b88-390400b5282c" (UID: "3b209c02-0fa1-4783-9b88-390400b5282c"). InnerVolumeSpecName "kube-api-access-2zl6c". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:47:08 crc kubenswrapper[4558]: I0120 17:47:08.208833 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0482f0bb-b64a-407b-954f-8f0f8a04572f-operator-scripts\") pod \"0482f0bb-b64a-407b-954f-8f0f8a04572f\" (UID: \"0482f0bb-b64a-407b-954f-8f0f8a04572f\") " Jan 20 17:47:08 crc kubenswrapper[4558]: I0120 17:47:08.208961 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/74ff4ade-5ef8-4f1d-81c1-ca1b0a85f84c-operator-scripts\") pod \"74ff4ade-5ef8-4f1d-81c1-ca1b0a85f84c\" (UID: \"74ff4ade-5ef8-4f1d-81c1-ca1b0a85f84c\") " Jan 20 17:47:08 crc kubenswrapper[4558]: I0120 17:47:08.209008 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2rdrk\" (UniqueName: \"kubernetes.io/projected/0482f0bb-b64a-407b-954f-8f0f8a04572f-kube-api-access-2rdrk\") pod \"0482f0bb-b64a-407b-954f-8f0f8a04572f\" (UID: \"0482f0bb-b64a-407b-954f-8f0f8a04572f\") " Jan 20 17:47:08 crc kubenswrapper[4558]: I0120 17:47:08.209049 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ch4xt\" (UniqueName: \"kubernetes.io/projected/74ff4ade-5ef8-4f1d-81c1-ca1b0a85f84c-kube-api-access-ch4xt\") pod \"74ff4ade-5ef8-4f1d-81c1-ca1b0a85f84c\" (UID: \"74ff4ade-5ef8-4f1d-81c1-ca1b0a85f84c\") " Jan 20 17:47:08 crc kubenswrapper[4558]: I0120 17:47:08.209229 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0482f0bb-b64a-407b-954f-8f0f8a04572f-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "0482f0bb-b64a-407b-954f-8f0f8a04572f" (UID: "0482f0bb-b64a-407b-954f-8f0f8a04572f"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:47:08 crc kubenswrapper[4558]: I0120 17:47:08.209780 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7nf28\" (UniqueName: \"kubernetes.io/projected/206eba3c-fb3b-4779-acfe-5d63f0dfc9fa-kube-api-access-7nf28\") on node \"crc\" DevicePath \"\"" Jan 20 17:47:08 crc kubenswrapper[4558]: I0120 17:47:08.209804 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0482f0bb-b64a-407b-954f-8f0f8a04572f-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:47:08 crc kubenswrapper[4558]: I0120 17:47:08.209813 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2zl6c\" (UniqueName: \"kubernetes.io/projected/3b209c02-0fa1-4783-9b88-390400b5282c-kube-api-access-2zl6c\") on node \"crc\" DevicePath \"\"" Jan 20 17:47:08 crc kubenswrapper[4558]: I0120 17:47:08.209895 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/74ff4ade-5ef8-4f1d-81c1-ca1b0a85f84c-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "74ff4ade-5ef8-4f1d-81c1-ca1b0a85f84c" (UID: "74ff4ade-5ef8-4f1d-81c1-ca1b0a85f84c"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:47:08 crc kubenswrapper[4558]: I0120 17:47:08.211652 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/74ff4ade-5ef8-4f1d-81c1-ca1b0a85f84c-kube-api-access-ch4xt" (OuterVolumeSpecName: "kube-api-access-ch4xt") pod "74ff4ade-5ef8-4f1d-81c1-ca1b0a85f84c" (UID: "74ff4ade-5ef8-4f1d-81c1-ca1b0a85f84c"). InnerVolumeSpecName "kube-api-access-ch4xt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:47:08 crc kubenswrapper[4558]: I0120 17:47:08.212309 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0482f0bb-b64a-407b-954f-8f0f8a04572f-kube-api-access-2rdrk" (OuterVolumeSpecName: "kube-api-access-2rdrk") pod "0482f0bb-b64a-407b-954f-8f0f8a04572f" (UID: "0482f0bb-b64a-407b-954f-8f0f8a04572f"). InnerVolumeSpecName "kube-api-access-2rdrk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:47:08 crc kubenswrapper[4558]: I0120 17:47:08.312432 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/74ff4ade-5ef8-4f1d-81c1-ca1b0a85f84c-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:47:08 crc kubenswrapper[4558]: I0120 17:47:08.312466 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2rdrk\" (UniqueName: \"kubernetes.io/projected/0482f0bb-b64a-407b-954f-8f0f8a04572f-kube-api-access-2rdrk\") on node \"crc\" DevicePath \"\"" Jan 20 17:47:08 crc kubenswrapper[4558]: I0120 17:47:08.312482 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ch4xt\" (UniqueName: \"kubernetes.io/projected/74ff4ade-5ef8-4f1d-81c1-ca1b0a85f84c-kube-api-access-ch4xt\") on node \"crc\" DevicePath \"\"" Jan 20 17:47:08 crc kubenswrapper[4558]: I0120 17:47:08.540357 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-a8ee-account-create-update-t5zdv" Jan 20 17:47:08 crc kubenswrapper[4558]: I0120 17:47:08.540339 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-a8ee-account-create-update-t5zdv" event={"ID":"3b209c02-0fa1-4783-9b88-390400b5282c","Type":"ContainerDied","Data":"101105bb6d9ebd72c2baa501acfa6715f6c764d4e7aadae0612de531b57338ee"} Jan 20 17:47:08 crc kubenswrapper[4558]: I0120 17:47:08.540528 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="101105bb6d9ebd72c2baa501acfa6715f6c764d4e7aadae0612de531b57338ee" Jan 20 17:47:08 crc kubenswrapper[4558]: I0120 17:47:08.542974 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-db-create-9cjqc" Jan 20 17:47:08 crc kubenswrapper[4558]: I0120 17:47:08.543298 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-db-create-9cjqc" event={"ID":"74ff4ade-5ef8-4f1d-81c1-ca1b0a85f84c","Type":"ContainerDied","Data":"82771e30f25744ab03480c4723fcf149f42afee9f49d843f3f3f2b287134857f"} Jan 20 17:47:08 crc kubenswrapper[4558]: I0120 17:47:08.543362 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="82771e30f25744ab03480c4723fcf149f42afee9f49d843f3f3f2b287134857f" Jan 20 17:47:08 crc kubenswrapper[4558]: I0120 17:47:08.545066 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-db-create-wstvp" Jan 20 17:47:08 crc kubenswrapper[4558]: I0120 17:47:08.545072 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-db-create-wstvp" event={"ID":"0482f0bb-b64a-407b-954f-8f0f8a04572f","Type":"ContainerDied","Data":"5b6fff58c2c1d655fbafec27b1da24de3c7d89e79a44c000bf0a644740eb7881"} Jan 20 17:47:08 crc kubenswrapper[4558]: I0120 17:47:08.545230 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5b6fff58c2c1d655fbafec27b1da24de3c7d89e79a44c000bf0a644740eb7881" Jan 20 17:47:08 crc kubenswrapper[4558]: I0120 17:47:08.547451 4558 generic.go:334] "Generic (PLEG): container finished" podID="344d4281-d7b6-49a8-8a10-62cf1940fa5c" containerID="c64c1c6b089bf0563dad7d86569ad1d73d4507d9ac97c8012b53e092021479e5" exitCode=0 Jan 20 17:47:08 crc kubenswrapper[4558]: I0120 17:47:08.547526 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-db-sync-nx4l8" event={"ID":"344d4281-d7b6-49a8-8a10-62cf1940fa5c","Type":"ContainerDied","Data":"c64c1c6b089bf0563dad7d86569ad1d73d4507d9ac97c8012b53e092021479e5"} Jan 20 17:47:08 crc kubenswrapper[4558]: I0120 17:47:08.549407 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-e4fd-account-create-update-zswcv" event={"ID":"b72bfcae-4aa5-47d2-8e1c-b2e6bfbac76a","Type":"ContainerDied","Data":"bf8e9a97e964c5affcc53720e08661d22fbbaa2fb2897ad742750ae126a2e73c"} Jan 20 17:47:08 crc kubenswrapper[4558]: I0120 17:47:08.549466 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bf8e9a97e964c5affcc53720e08661d22fbbaa2fb2897ad742750ae126a2e73c" Jan 20 17:47:08 crc kubenswrapper[4558]: I0120 17:47:08.549411 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-e4fd-account-create-update-zswcv" Jan 20 17:47:08 crc kubenswrapper[4558]: I0120 17:47:08.551465 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-db-create-z66dg" Jan 20 17:47:08 crc kubenswrapper[4558]: I0120 17:47:08.551471 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-db-create-z66dg" event={"ID":"206eba3c-fb3b-4779-acfe-5d63f0dfc9fa","Type":"ContainerDied","Data":"23b339b35570439c761685415e27134b349ca5d6cb6957ea4570161df1c965b9"} Jan 20 17:47:08 crc kubenswrapper[4558]: I0120 17:47:08.551623 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="23b339b35570439c761685415e27134b349ca5d6cb6957ea4570161df1c965b9" Jan 20 17:47:08 crc kubenswrapper[4558]: I0120 17:47:08.814441 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-a1e2-account-create-update-rmdlk" Jan 20 17:47:08 crc kubenswrapper[4558]: I0120 17:47:08.922883 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/11851b63-4564-4f0e-b5f7-16e34e358eca-operator-scripts\") pod \"11851b63-4564-4f0e-b5f7-16e34e358eca\" (UID: \"11851b63-4564-4f0e-b5f7-16e34e358eca\") " Jan 20 17:47:08 crc kubenswrapper[4558]: I0120 17:47:08.922948 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k5f7l\" (UniqueName: \"kubernetes.io/projected/11851b63-4564-4f0e-b5f7-16e34e358eca-kube-api-access-k5f7l\") pod \"11851b63-4564-4f0e-b5f7-16e34e358eca\" (UID: \"11851b63-4564-4f0e-b5f7-16e34e358eca\") " Jan 20 17:47:08 crc kubenswrapper[4558]: I0120 17:47:08.923741 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/11851b63-4564-4f0e-b5f7-16e34e358eca-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "11851b63-4564-4f0e-b5f7-16e34e358eca" (UID: "11851b63-4564-4f0e-b5f7-16e34e358eca"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:47:08 crc kubenswrapper[4558]: I0120 17:47:08.952361 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/11851b63-4564-4f0e-b5f7-16e34e358eca-kube-api-access-k5f7l" (OuterVolumeSpecName: "kube-api-access-k5f7l") pod "11851b63-4564-4f0e-b5f7-16e34e358eca" (UID: "11851b63-4564-4f0e-b5f7-16e34e358eca"). InnerVolumeSpecName "kube-api-access-k5f7l". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:47:09 crc kubenswrapper[4558]: I0120 17:47:09.024699 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k5f7l\" (UniqueName: \"kubernetes.io/projected/11851b63-4564-4f0e-b5f7-16e34e358eca-kube-api-access-k5f7l\") on node \"crc\" DevicePath \"\"" Jan 20 17:47:09 crc kubenswrapper[4558]: I0120 17:47:09.024724 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/11851b63-4564-4f0e-b5f7-16e34e358eca-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:47:09 crc kubenswrapper[4558]: I0120 17:47:09.562854 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-a1e2-account-create-update-rmdlk" event={"ID":"11851b63-4564-4f0e-b5f7-16e34e358eca","Type":"ContainerDied","Data":"5a406d8af97adae2b4f86fc4bd9d0568a57e0125afdb48cd86d91e9f32d1a949"} Jan 20 17:47:09 crc kubenswrapper[4558]: I0120 17:47:09.562923 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5a406d8af97adae2b4f86fc4bd9d0568a57e0125afdb48cd86d91e9f32d1a949" Jan 20 17:47:09 crc kubenswrapper[4558]: I0120 17:47:09.563074 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-a1e2-account-create-update-rmdlk" Jan 20 17:47:09 crc kubenswrapper[4558]: I0120 17:47:09.830559 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-db-sync-nx4l8" Jan 20 17:47:09 crc kubenswrapper[4558]: I0120 17:47:09.937767 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-76vn5\" (UniqueName: \"kubernetes.io/projected/344d4281-d7b6-49a8-8a10-62cf1940fa5c-kube-api-access-76vn5\") pod \"344d4281-d7b6-49a8-8a10-62cf1940fa5c\" (UID: \"344d4281-d7b6-49a8-8a10-62cf1940fa5c\") " Jan 20 17:47:09 crc kubenswrapper[4558]: I0120 17:47:09.938227 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/344d4281-d7b6-49a8-8a10-62cf1940fa5c-combined-ca-bundle\") pod \"344d4281-d7b6-49a8-8a10-62cf1940fa5c\" (UID: \"344d4281-d7b6-49a8-8a10-62cf1940fa5c\") " Jan 20 17:47:09 crc kubenswrapper[4558]: I0120 17:47:09.938296 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/344d4281-d7b6-49a8-8a10-62cf1940fa5c-config-data\") pod \"344d4281-d7b6-49a8-8a10-62cf1940fa5c\" (UID: \"344d4281-d7b6-49a8-8a10-62cf1940fa5c\") " Jan 20 17:47:09 crc kubenswrapper[4558]: I0120 17:47:09.943914 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/344d4281-d7b6-49a8-8a10-62cf1940fa5c-kube-api-access-76vn5" (OuterVolumeSpecName: "kube-api-access-76vn5") pod "344d4281-d7b6-49a8-8a10-62cf1940fa5c" (UID: "344d4281-d7b6-49a8-8a10-62cf1940fa5c"). InnerVolumeSpecName "kube-api-access-76vn5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:47:09 crc kubenswrapper[4558]: I0120 17:47:09.961662 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/344d4281-d7b6-49a8-8a10-62cf1940fa5c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "344d4281-d7b6-49a8-8a10-62cf1940fa5c" (UID: "344d4281-d7b6-49a8-8a10-62cf1940fa5c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:47:09 crc kubenswrapper[4558]: I0120 17:47:09.971802 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/344d4281-d7b6-49a8-8a10-62cf1940fa5c-config-data" (OuterVolumeSpecName: "config-data") pod "344d4281-d7b6-49a8-8a10-62cf1940fa5c" (UID: "344d4281-d7b6-49a8-8a10-62cf1940fa5c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.040330 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/344d4281-d7b6-49a8-8a10-62cf1940fa5c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.040520 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/344d4281-d7b6-49a8-8a10-62cf1940fa5c-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.040578 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-76vn5\" (UniqueName: \"kubernetes.io/projected/344d4281-d7b6-49a8-8a10-62cf1940fa5c-kube-api-access-76vn5\") on node \"crc\" DevicePath \"\"" Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.575454 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-db-sync-nx4l8" event={"ID":"344d4281-d7b6-49a8-8a10-62cf1940fa5c","Type":"ContainerDied","Data":"9a5c0b95f32eed4ad9bb2f8f2de0988747a8794316dcd25d9ec92dd251f45e94"} Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.575499 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9a5c0b95f32eed4ad9bb2f8f2de0988747a8794316dcd25d9ec92dd251f45e94" Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.575561 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-db-sync-nx4l8" Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.692457 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/keystone-bootstrap-x52px"] Jan 20 17:47:10 crc kubenswrapper[4558]: E0120 17:47:10.696717 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0482f0bb-b64a-407b-954f-8f0f8a04572f" containerName="mariadb-database-create" Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.696753 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="0482f0bb-b64a-407b-954f-8f0f8a04572f" containerName="mariadb-database-create" Jan 20 17:47:10 crc kubenswrapper[4558]: E0120 17:47:10.696813 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b72bfcae-4aa5-47d2-8e1c-b2e6bfbac76a" containerName="mariadb-account-create-update" Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.696820 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="b72bfcae-4aa5-47d2-8e1c-b2e6bfbac76a" containerName="mariadb-account-create-update" Jan 20 17:47:10 crc kubenswrapper[4558]: E0120 17:47:10.696835 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3b209c02-0fa1-4783-9b88-390400b5282c" containerName="mariadb-account-create-update" Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.696863 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="3b209c02-0fa1-4783-9b88-390400b5282c" containerName="mariadb-account-create-update" Jan 20 17:47:10 crc kubenswrapper[4558]: E0120 17:47:10.696901 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="11851b63-4564-4f0e-b5f7-16e34e358eca" containerName="mariadb-account-create-update" Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.696908 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="11851b63-4564-4f0e-b5f7-16e34e358eca" containerName="mariadb-account-create-update" Jan 20 17:47:10 crc kubenswrapper[4558]: E0120 17:47:10.696921 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="74ff4ade-5ef8-4f1d-81c1-ca1b0a85f84c" containerName="mariadb-database-create" Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.696952 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="74ff4ade-5ef8-4f1d-81c1-ca1b0a85f84c" containerName="mariadb-database-create" Jan 20 17:47:10 crc kubenswrapper[4558]: E0120 17:47:10.696974 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="344d4281-d7b6-49a8-8a10-62cf1940fa5c" containerName="keystone-db-sync" Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.696981 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="344d4281-d7b6-49a8-8a10-62cf1940fa5c" containerName="keystone-db-sync" Jan 20 17:47:10 crc kubenswrapper[4558]: E0120 17:47:10.697010 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="206eba3c-fb3b-4779-acfe-5d63f0dfc9fa" containerName="mariadb-database-create" Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.697035 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="206eba3c-fb3b-4779-acfe-5d63f0dfc9fa" containerName="mariadb-database-create" Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.697658 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="344d4281-d7b6-49a8-8a10-62cf1940fa5c" containerName="keystone-db-sync" Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.697680 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="b72bfcae-4aa5-47d2-8e1c-b2e6bfbac76a" containerName="mariadb-account-create-update" Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.697711 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="3b209c02-0fa1-4783-9b88-390400b5282c" containerName="mariadb-account-create-update" Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.697739 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="0482f0bb-b64a-407b-954f-8f0f8a04572f" containerName="mariadb-database-create" Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.697745 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="11851b63-4564-4f0e-b5f7-16e34e358eca" containerName="mariadb-account-create-update" Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.697757 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="74ff4ade-5ef8-4f1d-81c1-ca1b0a85f84c" containerName="mariadb-database-create" Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.697793 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="206eba3c-fb3b-4779-acfe-5d63f0dfc9fa" containerName="mariadb-database-create" Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.698907 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-bootstrap-x52px" Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.706561 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"osp-secret" Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.706817 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-keystone-dockercfg-8dfwc" Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.706759 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-scripts" Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.707261 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone" Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.730241 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-config-data" Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.742354 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-bootstrap-x52px"] Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.754444 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.756245 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.760396 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-glance-default-internal-svc" Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.761110 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-glance-dockercfg-v4jnh" Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.761235 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-default-internal-config-data" Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.761155 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-scripts" Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.762367 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.767518 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3a0e658d-6dc9-4d45-bf9a-1dd7fe5c56a6-combined-ca-bundle\") pod \"keystone-bootstrap-x52px\" (UID: \"3a0e658d-6dc9-4d45-bf9a-1dd7fe5c56a6\") " pod="openstack-kuttl-tests/keystone-bootstrap-x52px" Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.767824 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-swvjh\" (UniqueName: \"kubernetes.io/projected/3a0e658d-6dc9-4d45-bf9a-1dd7fe5c56a6-kube-api-access-swvjh\") pod \"keystone-bootstrap-x52px\" (UID: \"3a0e658d-6dc9-4d45-bf9a-1dd7fe5c56a6\") " pod="openstack-kuttl-tests/keystone-bootstrap-x52px" Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.767970 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3a0e658d-6dc9-4d45-bf9a-1dd7fe5c56a6-config-data\") pod \"keystone-bootstrap-x52px\" (UID: \"3a0e658d-6dc9-4d45-bf9a-1dd7fe5c56a6\") " pod="openstack-kuttl-tests/keystone-bootstrap-x52px" Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.768083 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3a0e658d-6dc9-4d45-bf9a-1dd7fe5c56a6-scripts\") pod \"keystone-bootstrap-x52px\" (UID: \"3a0e658d-6dc9-4d45-bf9a-1dd7fe5c56a6\") " pod="openstack-kuttl-tests/keystone-bootstrap-x52px" Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.768267 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/3a0e658d-6dc9-4d45-bf9a-1dd7fe5c56a6-credential-keys\") pod \"keystone-bootstrap-x52px\" (UID: \"3a0e658d-6dc9-4d45-bf9a-1dd7fe5c56a6\") " pod="openstack-kuttl-tests/keystone-bootstrap-x52px" Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.768463 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3a0e658d-6dc9-4d45-bf9a-1dd7fe5c56a6-fernet-keys\") pod \"keystone-bootstrap-x52px\" (UID: \"3a0e658d-6dc9-4d45-bf9a-1dd7fe5c56a6\") " pod="openstack-kuttl-tests/keystone-bootstrap-x52px" Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.814953 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.816431 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.823038 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-glance-default-public-svc" Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.823501 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-default-external-config-data" Jan 20 17:47:10 crc kubenswrapper[4558]: E0120 17:47:10.833849 4558 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod344d4281_d7b6_49a8_8a10_62cf1940fa5c.slice\": RecentStats: unable to find data in memory cache]" Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.838049 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.861538 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/cinder-db-sync-bdw77"] Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.862554 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-db-sync-bdw77" Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.869592 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cinder-cinder-dockercfg-cwfqv" Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.869793 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cinder-config-data" Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.870149 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cinder-scripts" Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.870528 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/254186c2-67b8-4439-b79d-55f755e4afde-config-data\") pod \"glance-default-internal-api-0\" (UID: \"254186c2-67b8-4439-b79d-55f755e4afde\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.870566 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f3dc733d-d501-42c3-af4b-ef30ec59ffb4-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"f3dc733d-d501-42c3-af4b-ef30ec59ffb4\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.870605 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"f3dc733d-d501-42c3-af4b-ef30ec59ffb4\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.870643 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/3a0e658d-6dc9-4d45-bf9a-1dd7fe5c56a6-credential-keys\") pod \"keystone-bootstrap-x52px\" (UID: \"3a0e658d-6dc9-4d45-bf9a-1dd7fe5c56a6\") " pod="openstack-kuttl-tests/keystone-bootstrap-x52px" Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.870679 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"254186c2-67b8-4439-b79d-55f755e4afde\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.870700 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f3dc733d-d501-42c3-af4b-ef30ec59ffb4-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"f3dc733d-d501-42c3-af4b-ef30ec59ffb4\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.870725 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3a0e658d-6dc9-4d45-bf9a-1dd7fe5c56a6-fernet-keys\") pod \"keystone-bootstrap-x52px\" (UID: \"3a0e658d-6dc9-4d45-bf9a-1dd7fe5c56a6\") " pod="openstack-kuttl-tests/keystone-bootstrap-x52px" Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.870751 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3a0e658d-6dc9-4d45-bf9a-1dd7fe5c56a6-combined-ca-bundle\") pod \"keystone-bootstrap-x52px\" (UID: \"3a0e658d-6dc9-4d45-bf9a-1dd7fe5c56a6\") " pod="openstack-kuttl-tests/keystone-bootstrap-x52px" Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.870778 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nglv6\" (UniqueName: \"kubernetes.io/projected/f3dc733d-d501-42c3-af4b-ef30ec59ffb4-kube-api-access-nglv6\") pod \"glance-default-external-api-0\" (UID: \"f3dc733d-d501-42c3-af4b-ef30ec59ffb4\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.870800 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/254186c2-67b8-4439-b79d-55f755e4afde-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"254186c2-67b8-4439-b79d-55f755e4afde\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.870815 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/254186c2-67b8-4439-b79d-55f755e4afde-logs\") pod \"glance-default-internal-api-0\" (UID: \"254186c2-67b8-4439-b79d-55f755e4afde\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.870841 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f3dc733d-d501-42c3-af4b-ef30ec59ffb4-scripts\") pod \"glance-default-external-api-0\" (UID: \"f3dc733d-d501-42c3-af4b-ef30ec59ffb4\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.870859 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/254186c2-67b8-4439-b79d-55f755e4afde-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"254186c2-67b8-4439-b79d-55f755e4afde\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.870878 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f3dc733d-d501-42c3-af4b-ef30ec59ffb4-logs\") pod \"glance-default-external-api-0\" (UID: \"f3dc733d-d501-42c3-af4b-ef30ec59ffb4\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.870897 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/254186c2-67b8-4439-b79d-55f755e4afde-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"254186c2-67b8-4439-b79d-55f755e4afde\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.870923 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f3dc733d-d501-42c3-af4b-ef30ec59ffb4-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"f3dc733d-d501-42c3-af4b-ef30ec59ffb4\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.870959 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f3dc733d-d501-42c3-af4b-ef30ec59ffb4-config-data\") pod \"glance-default-external-api-0\" (UID: \"f3dc733d-d501-42c3-af4b-ef30ec59ffb4\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.870982 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-swvjh\" (UniqueName: \"kubernetes.io/projected/3a0e658d-6dc9-4d45-bf9a-1dd7fe5c56a6-kube-api-access-swvjh\") pod \"keystone-bootstrap-x52px\" (UID: \"3a0e658d-6dc9-4d45-bf9a-1dd7fe5c56a6\") " pod="openstack-kuttl-tests/keystone-bootstrap-x52px" Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.871001 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t9fnm\" (UniqueName: \"kubernetes.io/projected/254186c2-67b8-4439-b79d-55f755e4afde-kube-api-access-t9fnm\") pod \"glance-default-internal-api-0\" (UID: \"254186c2-67b8-4439-b79d-55f755e4afde\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.871023 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/254186c2-67b8-4439-b79d-55f755e4afde-scripts\") pod \"glance-default-internal-api-0\" (UID: \"254186c2-67b8-4439-b79d-55f755e4afde\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.871039 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3a0e658d-6dc9-4d45-bf9a-1dd7fe5c56a6-config-data\") pod \"keystone-bootstrap-x52px\" (UID: \"3a0e658d-6dc9-4d45-bf9a-1dd7fe5c56a6\") " pod="openstack-kuttl-tests/keystone-bootstrap-x52px" Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.871063 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3a0e658d-6dc9-4d45-bf9a-1dd7fe5c56a6-scripts\") pod \"keystone-bootstrap-x52px\" (UID: \"3a0e658d-6dc9-4d45-bf9a-1dd7fe5c56a6\") " pod="openstack-kuttl-tests/keystone-bootstrap-x52px" Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.878001 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3a0e658d-6dc9-4d45-bf9a-1dd7fe5c56a6-scripts\") pod \"keystone-bootstrap-x52px\" (UID: \"3a0e658d-6dc9-4d45-bf9a-1dd7fe5c56a6\") " pod="openstack-kuttl-tests/keystone-bootstrap-x52px" Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.878628 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3a0e658d-6dc9-4d45-bf9a-1dd7fe5c56a6-fernet-keys\") pod \"keystone-bootstrap-x52px\" (UID: \"3a0e658d-6dc9-4d45-bf9a-1dd7fe5c56a6\") " pod="openstack-kuttl-tests/keystone-bootstrap-x52px" Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.879422 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/3a0e658d-6dc9-4d45-bf9a-1dd7fe5c56a6-credential-keys\") pod \"keystone-bootstrap-x52px\" (UID: \"3a0e658d-6dc9-4d45-bf9a-1dd7fe5c56a6\") " pod="openstack-kuttl-tests/keystone-bootstrap-x52px" Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.880616 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3a0e658d-6dc9-4d45-bf9a-1dd7fe5c56a6-combined-ca-bundle\") pod \"keystone-bootstrap-x52px\" (UID: \"3a0e658d-6dc9-4d45-bf9a-1dd7fe5c56a6\") " pod="openstack-kuttl-tests/keystone-bootstrap-x52px" Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.888020 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-db-sync-bdw77"] Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.893702 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3a0e658d-6dc9-4d45-bf9a-1dd7fe5c56a6-config-data\") pod \"keystone-bootstrap-x52px\" (UID: \"3a0e658d-6dc9-4d45-bf9a-1dd7fe5c56a6\") " pod="openstack-kuttl-tests/keystone-bootstrap-x52px" Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.912735 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-swvjh\" (UniqueName: \"kubernetes.io/projected/3a0e658d-6dc9-4d45-bf9a-1dd7fe5c56a6-kube-api-access-swvjh\") pod \"keystone-bootstrap-x52px\" (UID: \"3a0e658d-6dc9-4d45-bf9a-1dd7fe5c56a6\") " pod="openstack-kuttl-tests/keystone-bootstrap-x52px" Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.950407 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.953300 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.969713 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/neutron-db-sync-s4glp"] Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.970426 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-scripts" Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.970542 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-config-data" Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.971791 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-db-sync-s4glp" Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.973342 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"neutron-httpd-config" Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.973669 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"neutron-config" Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.973930 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t9fnm\" (UniqueName: \"kubernetes.io/projected/254186c2-67b8-4439-b79d-55f755e4afde-kube-api-access-t9fnm\") pod \"glance-default-internal-api-0\" (UID: \"254186c2-67b8-4439-b79d-55f755e4afde\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.973982 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/017cf5a8-b216-4c29-8dfc-7b5b6625591c-config-data\") pod \"ceilometer-0\" (UID: \"017cf5a8-b216-4c29-8dfc-7b5b6625591c\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.974007 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/254186c2-67b8-4439-b79d-55f755e4afde-scripts\") pod \"glance-default-internal-api-0\" (UID: \"254186c2-67b8-4439-b79d-55f755e4afde\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.974091 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/254186c2-67b8-4439-b79d-55f755e4afde-config-data\") pod \"glance-default-internal-api-0\" (UID: \"254186c2-67b8-4439-b79d-55f755e4afde\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.974118 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/20780c8d-f8bc-43c4-84ba-ba6f2e0fe601-scripts\") pod \"cinder-db-sync-bdw77\" (UID: \"20780c8d-f8bc-43c4-84ba-ba6f2e0fe601\") " pod="openstack-kuttl-tests/cinder-db-sync-bdw77" Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.974148 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f3dc733d-d501-42c3-af4b-ef30ec59ffb4-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"f3dc733d-d501-42c3-af4b-ef30ec59ffb4\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.974179 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/017cf5a8-b216-4c29-8dfc-7b5b6625591c-scripts\") pod \"ceilometer-0\" (UID: \"017cf5a8-b216-4c29-8dfc-7b5b6625591c\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.974206 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"f3dc733d-d501-42c3-af4b-ef30ec59ffb4\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.974244 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/017cf5a8-b216-4c29-8dfc-7b5b6625591c-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"017cf5a8-b216-4c29-8dfc-7b5b6625591c\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.974261 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9xzlt\" (UniqueName: \"kubernetes.io/projected/017cf5a8-b216-4c29-8dfc-7b5b6625591c-kube-api-access-9xzlt\") pod \"ceilometer-0\" (UID: \"017cf5a8-b216-4c29-8dfc-7b5b6625591c\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.974291 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"254186c2-67b8-4439-b79d-55f755e4afde\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.974309 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f3dc733d-d501-42c3-af4b-ef30ec59ffb4-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"f3dc733d-d501-42c3-af4b-ef30ec59ffb4\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.974331 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/017cf5a8-b216-4c29-8dfc-7b5b6625591c-log-httpd\") pod \"ceilometer-0\" (UID: \"017cf5a8-b216-4c29-8dfc-7b5b6625591c\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.974349 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8cbcl\" (UniqueName: \"kubernetes.io/projected/20780c8d-f8bc-43c4-84ba-ba6f2e0fe601-kube-api-access-8cbcl\") pod \"cinder-db-sync-bdw77\" (UID: \"20780c8d-f8bc-43c4-84ba-ba6f2e0fe601\") " pod="openstack-kuttl-tests/cinder-db-sync-bdw77" Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.974387 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/20780c8d-f8bc-43c4-84ba-ba6f2e0fe601-etc-machine-id\") pod \"cinder-db-sync-bdw77\" (UID: \"20780c8d-f8bc-43c4-84ba-ba6f2e0fe601\") " pod="openstack-kuttl-tests/cinder-db-sync-bdw77" Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.974408 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/20780c8d-f8bc-43c4-84ba-ba6f2e0fe601-db-sync-config-data\") pod \"cinder-db-sync-bdw77\" (UID: \"20780c8d-f8bc-43c4-84ba-ba6f2e0fe601\") " pod="openstack-kuttl-tests/cinder-db-sync-bdw77" Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.974449 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/20780c8d-f8bc-43c4-84ba-ba6f2e0fe601-combined-ca-bundle\") pod \"cinder-db-sync-bdw77\" (UID: \"20780c8d-f8bc-43c4-84ba-ba6f2e0fe601\") " pod="openstack-kuttl-tests/cinder-db-sync-bdw77" Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.974476 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nglv6\" (UniqueName: \"kubernetes.io/projected/f3dc733d-d501-42c3-af4b-ef30ec59ffb4-kube-api-access-nglv6\") pod \"glance-default-external-api-0\" (UID: \"f3dc733d-d501-42c3-af4b-ef30ec59ffb4\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.974501 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/254186c2-67b8-4439-b79d-55f755e4afde-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"254186c2-67b8-4439-b79d-55f755e4afde\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.974518 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/017cf5a8-b216-4c29-8dfc-7b5b6625591c-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"017cf5a8-b216-4c29-8dfc-7b5b6625591c\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.974538 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/254186c2-67b8-4439-b79d-55f755e4afde-logs\") pod \"glance-default-internal-api-0\" (UID: \"254186c2-67b8-4439-b79d-55f755e4afde\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.974585 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f3dc733d-d501-42c3-af4b-ef30ec59ffb4-scripts\") pod \"glance-default-external-api-0\" (UID: \"f3dc733d-d501-42c3-af4b-ef30ec59ffb4\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.974602 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/20780c8d-f8bc-43c4-84ba-ba6f2e0fe601-config-data\") pod \"cinder-db-sync-bdw77\" (UID: \"20780c8d-f8bc-43c4-84ba-ba6f2e0fe601\") " pod="openstack-kuttl-tests/cinder-db-sync-bdw77" Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.974622 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/254186c2-67b8-4439-b79d-55f755e4afde-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"254186c2-67b8-4439-b79d-55f755e4afde\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.974649 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f3dc733d-d501-42c3-af4b-ef30ec59ffb4-logs\") pod \"glance-default-external-api-0\" (UID: \"f3dc733d-d501-42c3-af4b-ef30ec59ffb4\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.974667 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/254186c2-67b8-4439-b79d-55f755e4afde-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"254186c2-67b8-4439-b79d-55f755e4afde\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.974714 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/017cf5a8-b216-4c29-8dfc-7b5b6625591c-run-httpd\") pod \"ceilometer-0\" (UID: \"017cf5a8-b216-4c29-8dfc-7b5b6625591c\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.974732 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f3dc733d-d501-42c3-af4b-ef30ec59ffb4-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"f3dc733d-d501-42c3-af4b-ef30ec59ffb4\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.974773 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f3dc733d-d501-42c3-af4b-ef30ec59ffb4-config-data\") pod \"glance-default-external-api-0\" (UID: \"f3dc733d-d501-42c3-af4b-ef30ec59ffb4\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.982095 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"neutron-neutron-dockercfg-zs5rs" Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.982334 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/254186c2-67b8-4439-b79d-55f755e4afde-scripts\") pod \"glance-default-internal-api-0\" (UID: \"254186c2-67b8-4439-b79d-55f755e4afde\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.982731 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"f3dc733d-d501-42c3-af4b-ef30ec59ffb4\") device mount path \"/mnt/openstack/pv09\"" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.987048 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"254186c2-67b8-4439-b79d-55f755e4afde\") device mount path \"/mnt/openstack/pv11\"" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.987095 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/254186c2-67b8-4439-b79d-55f755e4afde-logs\") pod \"glance-default-internal-api-0\" (UID: \"254186c2-67b8-4439-b79d-55f755e4afde\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.987660 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/254186c2-67b8-4439-b79d-55f755e4afde-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"254186c2-67b8-4439-b79d-55f755e4afde\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.987961 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f3dc733d-d501-42c3-af4b-ef30ec59ffb4-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"f3dc733d-d501-42c3-af4b-ef30ec59ffb4\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.988233 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f3dc733d-d501-42c3-af4b-ef30ec59ffb4-logs\") pod \"glance-default-external-api-0\" (UID: \"f3dc733d-d501-42c3-af4b-ef30ec59ffb4\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.989882 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/254186c2-67b8-4439-b79d-55f755e4afde-config-data\") pod \"glance-default-internal-api-0\" (UID: \"254186c2-67b8-4439-b79d-55f755e4afde\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.994518 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f3dc733d-d501-42c3-af4b-ef30ec59ffb4-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"f3dc733d-d501-42c3-af4b-ef30ec59ffb4\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.996761 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f3dc733d-d501-42c3-af4b-ef30ec59ffb4-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"f3dc733d-d501-42c3-af4b-ef30ec59ffb4\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.996818 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-db-sync-s4glp"] Jan 20 17:47:10 crc kubenswrapper[4558]: I0120 17:47:10.999867 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f3dc733d-d501-42c3-af4b-ef30ec59ffb4-scripts\") pod \"glance-default-external-api-0\" (UID: \"f3dc733d-d501-42c3-af4b-ef30ec59ffb4\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:47:11 crc kubenswrapper[4558]: I0120 17:47:11.000382 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f3dc733d-d501-42c3-af4b-ef30ec59ffb4-config-data\") pod \"glance-default-external-api-0\" (UID: \"f3dc733d-d501-42c3-af4b-ef30ec59ffb4\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:47:11 crc kubenswrapper[4558]: I0120 17:47:11.008248 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/254186c2-67b8-4439-b79d-55f755e4afde-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"254186c2-67b8-4439-b79d-55f755e4afde\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:47:11 crc kubenswrapper[4558]: I0120 17:47:11.017876 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/254186c2-67b8-4439-b79d-55f755e4afde-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"254186c2-67b8-4439-b79d-55f755e4afde\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:47:11 crc kubenswrapper[4558]: I0120 17:47:11.021959 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nglv6\" (UniqueName: \"kubernetes.io/projected/f3dc733d-d501-42c3-af4b-ef30ec59ffb4-kube-api-access-nglv6\") pod \"glance-default-external-api-0\" (UID: \"f3dc733d-d501-42c3-af4b-ef30ec59ffb4\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:47:11 crc kubenswrapper[4558]: I0120 17:47:11.035639 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:47:11 crc kubenswrapper[4558]: I0120 17:47:11.050061 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t9fnm\" (UniqueName: \"kubernetes.io/projected/254186c2-67b8-4439-b79d-55f755e4afde-kube-api-access-t9fnm\") pod \"glance-default-internal-api-0\" (UID: \"254186c2-67b8-4439-b79d-55f755e4afde\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:47:11 crc kubenswrapper[4558]: I0120 17:47:11.061424 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/placement-db-sync-mr5dl"] Jan 20 17:47:11 crc kubenswrapper[4558]: I0120 17:47:11.063365 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-db-sync-mr5dl" Jan 20 17:47:11 crc kubenswrapper[4558]: I0120 17:47:11.064883 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-bootstrap-x52px" Jan 20 17:47:11 crc kubenswrapper[4558]: I0120 17:47:11.066595 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"placement-placement-dockercfg-qmgb8" Jan 20 17:47:11 crc kubenswrapper[4558]: I0120 17:47:11.066834 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"placement-scripts" Jan 20 17:47:11 crc kubenswrapper[4558]: I0120 17:47:11.067045 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"placement-config-data" Jan 20 17:47:11 crc kubenswrapper[4558]: I0120 17:47:11.073372 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-db-sync-mr5dl"] Jan 20 17:47:11 crc kubenswrapper[4558]: I0120 17:47:11.078264 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/20780c8d-f8bc-43c4-84ba-ba6f2e0fe601-scripts\") pod \"cinder-db-sync-bdw77\" (UID: \"20780c8d-f8bc-43c4-84ba-ba6f2e0fe601\") " pod="openstack-kuttl-tests/cinder-db-sync-bdw77" Jan 20 17:47:11 crc kubenswrapper[4558]: I0120 17:47:11.078888 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/017cf5a8-b216-4c29-8dfc-7b5b6625591c-scripts\") pod \"ceilometer-0\" (UID: \"017cf5a8-b216-4c29-8dfc-7b5b6625591c\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:47:11 crc kubenswrapper[4558]: I0120 17:47:11.079043 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54a8e29a-d222-49e8-ab57-1aa0afb9648b-combined-ca-bundle\") pod \"placement-db-sync-mr5dl\" (UID: \"54a8e29a-d222-49e8-ab57-1aa0afb9648b\") " pod="openstack-kuttl-tests/placement-db-sync-mr5dl" Jan 20 17:47:11 crc kubenswrapper[4558]: I0120 17:47:11.079182 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-79rxs\" (UniqueName: \"kubernetes.io/projected/d1809d86-a9c3-46e9-96eb-06433832465c-kube-api-access-79rxs\") pod \"neutron-db-sync-s4glp\" (UID: \"d1809d86-a9c3-46e9-96eb-06433832465c\") " pod="openstack-kuttl-tests/neutron-db-sync-s4glp" Jan 20 17:47:11 crc kubenswrapper[4558]: I0120 17:47:11.079266 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/017cf5a8-b216-4c29-8dfc-7b5b6625591c-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"017cf5a8-b216-4c29-8dfc-7b5b6625591c\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:47:11 crc kubenswrapper[4558]: I0120 17:47:11.079358 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9xzlt\" (UniqueName: \"kubernetes.io/projected/017cf5a8-b216-4c29-8dfc-7b5b6625591c-kube-api-access-9xzlt\") pod \"ceilometer-0\" (UID: \"017cf5a8-b216-4c29-8dfc-7b5b6625591c\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:47:11 crc kubenswrapper[4558]: I0120 17:47:11.079463 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d1809d86-a9c3-46e9-96eb-06433832465c-combined-ca-bundle\") pod \"neutron-db-sync-s4glp\" (UID: \"d1809d86-a9c3-46e9-96eb-06433832465c\") " pod="openstack-kuttl-tests/neutron-db-sync-s4glp" Jan 20 17:47:11 crc kubenswrapper[4558]: I0120 17:47:11.079538 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/017cf5a8-b216-4c29-8dfc-7b5b6625591c-log-httpd\") pod \"ceilometer-0\" (UID: \"017cf5a8-b216-4c29-8dfc-7b5b6625591c\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:47:11 crc kubenswrapper[4558]: I0120 17:47:11.079605 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8cbcl\" (UniqueName: \"kubernetes.io/projected/20780c8d-f8bc-43c4-84ba-ba6f2e0fe601-kube-api-access-8cbcl\") pod \"cinder-db-sync-bdw77\" (UID: \"20780c8d-f8bc-43c4-84ba-ba6f2e0fe601\") " pod="openstack-kuttl-tests/cinder-db-sync-bdw77" Jan 20 17:47:11 crc kubenswrapper[4558]: I0120 17:47:11.079695 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/20780c8d-f8bc-43c4-84ba-ba6f2e0fe601-etc-machine-id\") pod \"cinder-db-sync-bdw77\" (UID: \"20780c8d-f8bc-43c4-84ba-ba6f2e0fe601\") " pod="openstack-kuttl-tests/cinder-db-sync-bdw77" Jan 20 17:47:11 crc kubenswrapper[4558]: I0120 17:47:11.079770 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/20780c8d-f8bc-43c4-84ba-ba6f2e0fe601-db-sync-config-data\") pod \"cinder-db-sync-bdw77\" (UID: \"20780c8d-f8bc-43c4-84ba-ba6f2e0fe601\") " pod="openstack-kuttl-tests/cinder-db-sync-bdw77" Jan 20 17:47:11 crc kubenswrapper[4558]: I0120 17:47:11.079883 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/20780c8d-f8bc-43c4-84ba-ba6f2e0fe601-combined-ca-bundle\") pod \"cinder-db-sync-bdw77\" (UID: \"20780c8d-f8bc-43c4-84ba-ba6f2e0fe601\") " pod="openstack-kuttl-tests/cinder-db-sync-bdw77" Jan 20 17:47:11 crc kubenswrapper[4558]: I0120 17:47:11.079981 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/d1809d86-a9c3-46e9-96eb-06433832465c-config\") pod \"neutron-db-sync-s4glp\" (UID: \"d1809d86-a9c3-46e9-96eb-06433832465c\") " pod="openstack-kuttl-tests/neutron-db-sync-s4glp" Jan 20 17:47:11 crc kubenswrapper[4558]: I0120 17:47:11.080053 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/017cf5a8-b216-4c29-8dfc-7b5b6625591c-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"017cf5a8-b216-4c29-8dfc-7b5b6625591c\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:47:11 crc kubenswrapper[4558]: I0120 17:47:11.080143 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/54a8e29a-d222-49e8-ab57-1aa0afb9648b-scripts\") pod \"placement-db-sync-mr5dl\" (UID: \"54a8e29a-d222-49e8-ab57-1aa0afb9648b\") " pod="openstack-kuttl-tests/placement-db-sync-mr5dl" Jan 20 17:47:11 crc kubenswrapper[4558]: I0120 17:47:11.080238 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/20780c8d-f8bc-43c4-84ba-ba6f2e0fe601-config-data\") pod \"cinder-db-sync-bdw77\" (UID: \"20780c8d-f8bc-43c4-84ba-ba6f2e0fe601\") " pod="openstack-kuttl-tests/cinder-db-sync-bdw77" Jan 20 17:47:11 crc kubenswrapper[4558]: I0120 17:47:11.080365 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/017cf5a8-b216-4c29-8dfc-7b5b6625591c-run-httpd\") pod \"ceilometer-0\" (UID: \"017cf5a8-b216-4c29-8dfc-7b5b6625591c\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:47:11 crc kubenswrapper[4558]: I0120 17:47:11.080440 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tb2b6\" (UniqueName: \"kubernetes.io/projected/54a8e29a-d222-49e8-ab57-1aa0afb9648b-kube-api-access-tb2b6\") pod \"placement-db-sync-mr5dl\" (UID: \"54a8e29a-d222-49e8-ab57-1aa0afb9648b\") " pod="openstack-kuttl-tests/placement-db-sync-mr5dl" Jan 20 17:47:11 crc kubenswrapper[4558]: I0120 17:47:11.080547 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/54a8e29a-d222-49e8-ab57-1aa0afb9648b-config-data\") pod \"placement-db-sync-mr5dl\" (UID: \"54a8e29a-d222-49e8-ab57-1aa0afb9648b\") " pod="openstack-kuttl-tests/placement-db-sync-mr5dl" Jan 20 17:47:11 crc kubenswrapper[4558]: I0120 17:47:11.080630 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/017cf5a8-b216-4c29-8dfc-7b5b6625591c-config-data\") pod \"ceilometer-0\" (UID: \"017cf5a8-b216-4c29-8dfc-7b5b6625591c\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:47:11 crc kubenswrapper[4558]: I0120 17:47:11.080749 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/54a8e29a-d222-49e8-ab57-1aa0afb9648b-logs\") pod \"placement-db-sync-mr5dl\" (UID: \"54a8e29a-d222-49e8-ab57-1aa0afb9648b\") " pod="openstack-kuttl-tests/placement-db-sync-mr5dl" Jan 20 17:47:11 crc kubenswrapper[4558]: I0120 17:47:11.081651 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/017cf5a8-b216-4c29-8dfc-7b5b6625591c-log-httpd\") pod \"ceilometer-0\" (UID: \"017cf5a8-b216-4c29-8dfc-7b5b6625591c\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:47:11 crc kubenswrapper[4558]: I0120 17:47:11.081895 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/20780c8d-f8bc-43c4-84ba-ba6f2e0fe601-etc-machine-id\") pod \"cinder-db-sync-bdw77\" (UID: \"20780c8d-f8bc-43c4-84ba-ba6f2e0fe601\") " pod="openstack-kuttl-tests/cinder-db-sync-bdw77" Jan 20 17:47:11 crc kubenswrapper[4558]: I0120 17:47:11.083487 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/20780c8d-f8bc-43c4-84ba-ba6f2e0fe601-scripts\") pod \"cinder-db-sync-bdw77\" (UID: \"20780c8d-f8bc-43c4-84ba-ba6f2e0fe601\") " pod="openstack-kuttl-tests/cinder-db-sync-bdw77" Jan 20 17:47:11 crc kubenswrapper[4558]: I0120 17:47:11.083812 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/017cf5a8-b216-4c29-8dfc-7b5b6625591c-run-httpd\") pod \"ceilometer-0\" (UID: \"017cf5a8-b216-4c29-8dfc-7b5b6625591c\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:47:11 crc kubenswrapper[4558]: I0120 17:47:11.085944 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/20780c8d-f8bc-43c4-84ba-ba6f2e0fe601-db-sync-config-data\") pod \"cinder-db-sync-bdw77\" (UID: \"20780c8d-f8bc-43c4-84ba-ba6f2e0fe601\") " pod="openstack-kuttl-tests/cinder-db-sync-bdw77" Jan 20 17:47:11 crc kubenswrapper[4558]: I0120 17:47:11.086494 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/017cf5a8-b216-4c29-8dfc-7b5b6625591c-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"017cf5a8-b216-4c29-8dfc-7b5b6625591c\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:47:11 crc kubenswrapper[4558]: I0120 17:47:11.086870 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/20780c8d-f8bc-43c4-84ba-ba6f2e0fe601-combined-ca-bundle\") pod \"cinder-db-sync-bdw77\" (UID: \"20780c8d-f8bc-43c4-84ba-ba6f2e0fe601\") " pod="openstack-kuttl-tests/cinder-db-sync-bdw77" Jan 20 17:47:11 crc kubenswrapper[4558]: I0120 17:47:11.090468 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/017cf5a8-b216-4c29-8dfc-7b5b6625591c-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"017cf5a8-b216-4c29-8dfc-7b5b6625591c\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:47:11 crc kubenswrapper[4558]: I0120 17:47:11.090946 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/017cf5a8-b216-4c29-8dfc-7b5b6625591c-scripts\") pod \"ceilometer-0\" (UID: \"017cf5a8-b216-4c29-8dfc-7b5b6625591c\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:47:11 crc kubenswrapper[4558]: I0120 17:47:11.091613 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/017cf5a8-b216-4c29-8dfc-7b5b6625591c-config-data\") pod \"ceilometer-0\" (UID: \"017cf5a8-b216-4c29-8dfc-7b5b6625591c\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:47:11 crc kubenswrapper[4558]: I0120 17:47:11.091847 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"254186c2-67b8-4439-b79d-55f755e4afde\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:47:11 crc kubenswrapper[4558]: I0120 17:47:11.099062 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/20780c8d-f8bc-43c4-84ba-ba6f2e0fe601-config-data\") pod \"cinder-db-sync-bdw77\" (UID: \"20780c8d-f8bc-43c4-84ba-ba6f2e0fe601\") " pod="openstack-kuttl-tests/cinder-db-sync-bdw77" Jan 20 17:47:11 crc kubenswrapper[4558]: I0120 17:47:11.100246 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8cbcl\" (UniqueName: \"kubernetes.io/projected/20780c8d-f8bc-43c4-84ba-ba6f2e0fe601-kube-api-access-8cbcl\") pod \"cinder-db-sync-bdw77\" (UID: \"20780c8d-f8bc-43c4-84ba-ba6f2e0fe601\") " pod="openstack-kuttl-tests/cinder-db-sync-bdw77" Jan 20 17:47:11 crc kubenswrapper[4558]: I0120 17:47:11.108685 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/barbican-db-sync-qcsmp"] Jan 20 17:47:11 crc kubenswrapper[4558]: I0120 17:47:11.110700 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-db-sync-qcsmp" Jan 20 17:47:11 crc kubenswrapper[4558]: I0120 17:47:11.113226 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9xzlt\" (UniqueName: \"kubernetes.io/projected/017cf5a8-b216-4c29-8dfc-7b5b6625591c-kube-api-access-9xzlt\") pod \"ceilometer-0\" (UID: \"017cf5a8-b216-4c29-8dfc-7b5b6625591c\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:47:11 crc kubenswrapper[4558]: I0120 17:47:11.119088 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"barbican-barbican-dockercfg-bm9pv" Jan 20 17:47:11 crc kubenswrapper[4558]: I0120 17:47:11.119269 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"barbican-config-data" Jan 20 17:47:11 crc kubenswrapper[4558]: I0120 17:47:11.124193 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"f3dc733d-d501-42c3-af4b-ef30ec59ffb4\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:47:11 crc kubenswrapper[4558]: I0120 17:47:11.135447 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:47:11 crc kubenswrapper[4558]: I0120 17:47:11.136141 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-db-sync-qcsmp"] Jan 20 17:47:11 crc kubenswrapper[4558]: I0120 17:47:11.151465 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:47:11 crc kubenswrapper[4558]: I0120 17:47:11.182603 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/51f6fade-ce82-468f-9a2c-79c733dddeb3-combined-ca-bundle\") pod \"barbican-db-sync-qcsmp\" (UID: \"51f6fade-ce82-468f-9a2c-79c733dddeb3\") " pod="openstack-kuttl-tests/barbican-db-sync-qcsmp" Jan 20 17:47:11 crc kubenswrapper[4558]: I0120 17:47:11.182858 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/54a8e29a-d222-49e8-ab57-1aa0afb9648b-logs\") pod \"placement-db-sync-mr5dl\" (UID: \"54a8e29a-d222-49e8-ab57-1aa0afb9648b\") " pod="openstack-kuttl-tests/placement-db-sync-mr5dl" Jan 20 17:47:11 crc kubenswrapper[4558]: I0120 17:47:11.182902 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54a8e29a-d222-49e8-ab57-1aa0afb9648b-combined-ca-bundle\") pod \"placement-db-sync-mr5dl\" (UID: \"54a8e29a-d222-49e8-ab57-1aa0afb9648b\") " pod="openstack-kuttl-tests/placement-db-sync-mr5dl" Jan 20 17:47:11 crc kubenswrapper[4558]: I0120 17:47:11.182953 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-79rxs\" (UniqueName: \"kubernetes.io/projected/d1809d86-a9c3-46e9-96eb-06433832465c-kube-api-access-79rxs\") pod \"neutron-db-sync-s4glp\" (UID: \"d1809d86-a9c3-46e9-96eb-06433832465c\") " pod="openstack-kuttl-tests/neutron-db-sync-s4glp" Jan 20 17:47:11 crc kubenswrapper[4558]: I0120 17:47:11.182988 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/51f6fade-ce82-468f-9a2c-79c733dddeb3-db-sync-config-data\") pod \"barbican-db-sync-qcsmp\" (UID: \"51f6fade-ce82-468f-9a2c-79c733dddeb3\") " pod="openstack-kuttl-tests/barbican-db-sync-qcsmp" Jan 20 17:47:11 crc kubenswrapper[4558]: I0120 17:47:11.183029 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d1809d86-a9c3-46e9-96eb-06433832465c-combined-ca-bundle\") pod \"neutron-db-sync-s4glp\" (UID: \"d1809d86-a9c3-46e9-96eb-06433832465c\") " pod="openstack-kuttl-tests/neutron-db-sync-s4glp" Jan 20 17:47:11 crc kubenswrapper[4558]: I0120 17:47:11.183088 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/d1809d86-a9c3-46e9-96eb-06433832465c-config\") pod \"neutron-db-sync-s4glp\" (UID: \"d1809d86-a9c3-46e9-96eb-06433832465c\") " pod="openstack-kuttl-tests/neutron-db-sync-s4glp" Jan 20 17:47:11 crc kubenswrapper[4558]: I0120 17:47:11.183121 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l9zhr\" (UniqueName: \"kubernetes.io/projected/51f6fade-ce82-468f-9a2c-79c733dddeb3-kube-api-access-l9zhr\") pod \"barbican-db-sync-qcsmp\" (UID: \"51f6fade-ce82-468f-9a2c-79c733dddeb3\") " pod="openstack-kuttl-tests/barbican-db-sync-qcsmp" Jan 20 17:47:11 crc kubenswrapper[4558]: I0120 17:47:11.183145 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/54a8e29a-d222-49e8-ab57-1aa0afb9648b-scripts\") pod \"placement-db-sync-mr5dl\" (UID: \"54a8e29a-d222-49e8-ab57-1aa0afb9648b\") " pod="openstack-kuttl-tests/placement-db-sync-mr5dl" Jan 20 17:47:11 crc kubenswrapper[4558]: I0120 17:47:11.183210 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tb2b6\" (UniqueName: \"kubernetes.io/projected/54a8e29a-d222-49e8-ab57-1aa0afb9648b-kube-api-access-tb2b6\") pod \"placement-db-sync-mr5dl\" (UID: \"54a8e29a-d222-49e8-ab57-1aa0afb9648b\") " pod="openstack-kuttl-tests/placement-db-sync-mr5dl" Jan 20 17:47:11 crc kubenswrapper[4558]: I0120 17:47:11.183255 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/54a8e29a-d222-49e8-ab57-1aa0afb9648b-config-data\") pod \"placement-db-sync-mr5dl\" (UID: \"54a8e29a-d222-49e8-ab57-1aa0afb9648b\") " pod="openstack-kuttl-tests/placement-db-sync-mr5dl" Jan 20 17:47:11 crc kubenswrapper[4558]: I0120 17:47:11.183847 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/54a8e29a-d222-49e8-ab57-1aa0afb9648b-logs\") pod \"placement-db-sync-mr5dl\" (UID: \"54a8e29a-d222-49e8-ab57-1aa0afb9648b\") " pod="openstack-kuttl-tests/placement-db-sync-mr5dl" Jan 20 17:47:11 crc kubenswrapper[4558]: I0120 17:47:11.187891 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d1809d86-a9c3-46e9-96eb-06433832465c-combined-ca-bundle\") pod \"neutron-db-sync-s4glp\" (UID: \"d1809d86-a9c3-46e9-96eb-06433832465c\") " pod="openstack-kuttl-tests/neutron-db-sync-s4glp" Jan 20 17:47:11 crc kubenswrapper[4558]: I0120 17:47:11.188780 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/54a8e29a-d222-49e8-ab57-1aa0afb9648b-config-data\") pod \"placement-db-sync-mr5dl\" (UID: \"54a8e29a-d222-49e8-ab57-1aa0afb9648b\") " pod="openstack-kuttl-tests/placement-db-sync-mr5dl" Jan 20 17:47:11 crc kubenswrapper[4558]: I0120 17:47:11.189837 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/54a8e29a-d222-49e8-ab57-1aa0afb9648b-scripts\") pod \"placement-db-sync-mr5dl\" (UID: \"54a8e29a-d222-49e8-ab57-1aa0afb9648b\") " pod="openstack-kuttl-tests/placement-db-sync-mr5dl" Jan 20 17:47:11 crc kubenswrapper[4558]: I0120 17:47:11.190354 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54a8e29a-d222-49e8-ab57-1aa0afb9648b-combined-ca-bundle\") pod \"placement-db-sync-mr5dl\" (UID: \"54a8e29a-d222-49e8-ab57-1aa0afb9648b\") " pod="openstack-kuttl-tests/placement-db-sync-mr5dl" Jan 20 17:47:11 crc kubenswrapper[4558]: I0120 17:47:11.190796 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/d1809d86-a9c3-46e9-96eb-06433832465c-config\") pod \"neutron-db-sync-s4glp\" (UID: \"d1809d86-a9c3-46e9-96eb-06433832465c\") " pod="openstack-kuttl-tests/neutron-db-sync-s4glp" Jan 20 17:47:11 crc kubenswrapper[4558]: I0120 17:47:11.198794 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-79rxs\" (UniqueName: \"kubernetes.io/projected/d1809d86-a9c3-46e9-96eb-06433832465c-kube-api-access-79rxs\") pod \"neutron-db-sync-s4glp\" (UID: \"d1809d86-a9c3-46e9-96eb-06433832465c\") " pod="openstack-kuttl-tests/neutron-db-sync-s4glp" Jan 20 17:47:11 crc kubenswrapper[4558]: I0120 17:47:11.199841 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tb2b6\" (UniqueName: \"kubernetes.io/projected/54a8e29a-d222-49e8-ab57-1aa0afb9648b-kube-api-access-tb2b6\") pod \"placement-db-sync-mr5dl\" (UID: \"54a8e29a-d222-49e8-ab57-1aa0afb9648b\") " pod="openstack-kuttl-tests/placement-db-sync-mr5dl" Jan 20 17:47:11 crc kubenswrapper[4558]: I0120 17:47:11.276203 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-db-sync-bdw77" Jan 20 17:47:11 crc kubenswrapper[4558]: I0120 17:47:11.287292 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l9zhr\" (UniqueName: \"kubernetes.io/projected/51f6fade-ce82-468f-9a2c-79c733dddeb3-kube-api-access-l9zhr\") pod \"barbican-db-sync-qcsmp\" (UID: \"51f6fade-ce82-468f-9a2c-79c733dddeb3\") " pod="openstack-kuttl-tests/barbican-db-sync-qcsmp" Jan 20 17:47:11 crc kubenswrapper[4558]: I0120 17:47:11.287593 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/51f6fade-ce82-468f-9a2c-79c733dddeb3-combined-ca-bundle\") pod \"barbican-db-sync-qcsmp\" (UID: \"51f6fade-ce82-468f-9a2c-79c733dddeb3\") " pod="openstack-kuttl-tests/barbican-db-sync-qcsmp" Jan 20 17:47:11 crc kubenswrapper[4558]: I0120 17:47:11.287699 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/51f6fade-ce82-468f-9a2c-79c733dddeb3-db-sync-config-data\") pod \"barbican-db-sync-qcsmp\" (UID: \"51f6fade-ce82-468f-9a2c-79c733dddeb3\") " pod="openstack-kuttl-tests/barbican-db-sync-qcsmp" Jan 20 17:47:11 crc kubenswrapper[4558]: I0120 17:47:11.293753 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/51f6fade-ce82-468f-9a2c-79c733dddeb3-db-sync-config-data\") pod \"barbican-db-sync-qcsmp\" (UID: \"51f6fade-ce82-468f-9a2c-79c733dddeb3\") " pod="openstack-kuttl-tests/barbican-db-sync-qcsmp" Jan 20 17:47:11 crc kubenswrapper[4558]: I0120 17:47:11.304189 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/51f6fade-ce82-468f-9a2c-79c733dddeb3-combined-ca-bundle\") pod \"barbican-db-sync-qcsmp\" (UID: \"51f6fade-ce82-468f-9a2c-79c733dddeb3\") " pod="openstack-kuttl-tests/barbican-db-sync-qcsmp" Jan 20 17:47:11 crc kubenswrapper[4558]: I0120 17:47:11.307786 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l9zhr\" (UniqueName: \"kubernetes.io/projected/51f6fade-ce82-468f-9a2c-79c733dddeb3-kube-api-access-l9zhr\") pod \"barbican-db-sync-qcsmp\" (UID: \"51f6fade-ce82-468f-9a2c-79c733dddeb3\") " pod="openstack-kuttl-tests/barbican-db-sync-qcsmp" Jan 20 17:47:11 crc kubenswrapper[4558]: I0120 17:47:11.373553 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:47:11 crc kubenswrapper[4558]: I0120 17:47:11.446896 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-db-sync-s4glp" Jan 20 17:47:11 crc kubenswrapper[4558]: I0120 17:47:11.461431 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-db-sync-mr5dl" Jan 20 17:47:11 crc kubenswrapper[4558]: I0120 17:47:11.477045 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-db-sync-qcsmp" Jan 20 17:47:11 crc kubenswrapper[4558]: I0120 17:47:11.549842 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-bootstrap-x52px"] Jan 20 17:47:11 crc kubenswrapper[4558]: I0120 17:47:11.590547 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-bootstrap-x52px" event={"ID":"3a0e658d-6dc9-4d45-bf9a-1dd7fe5c56a6","Type":"ContainerStarted","Data":"9dec1db615ab67091e1b48a1c531f2b6214c4c5eac5da1fb8adc3fe65b3ce9eb"} Jan 20 17:47:11 crc kubenswrapper[4558]: I0120 17:47:11.632872 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:47:11 crc kubenswrapper[4558]: W0120 17:47:11.637315 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod017cf5a8_b216_4c29_8dfc_7b5b6625591c.slice/crio-871ec88e32c2358b816ba9ab4759ab391ed91c802a84720b11986f5aac7e306a WatchSource:0}: Error finding container 871ec88e32c2358b816ba9ab4759ab391ed91c802a84720b11986f5aac7e306a: Status 404 returned error can't find the container with id 871ec88e32c2358b816ba9ab4759ab391ed91c802a84720b11986f5aac7e306a Jan 20 17:47:11 crc kubenswrapper[4558]: I0120 17:47:11.716124 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:47:11 crc kubenswrapper[4558]: I0120 17:47:11.838421 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-db-sync-bdw77"] Jan 20 17:47:11 crc kubenswrapper[4558]: W0120 17:47:11.871389 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod20780c8d_f8bc_43c4_84ba_ba6f2e0fe601.slice/crio-4efd9c5fa35355473c826ff73e13b012ccc486010a52a36b100ce509a255794b WatchSource:0}: Error finding container 4efd9c5fa35355473c826ff73e13b012ccc486010a52a36b100ce509a255794b: Status 404 returned error can't find the container with id 4efd9c5fa35355473c826ff73e13b012ccc486010a52a36b100ce509a255794b Jan 20 17:47:11 crc kubenswrapper[4558]: I0120 17:47:11.926473 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:47:11 crc kubenswrapper[4558]: W0120 17:47:11.930302 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod254186c2_67b8_4439_b79d_55f755e4afde.slice/crio-249b49d0f7956b764ff5096c1050490c78801cdcb23621c0741cbaee71788601 WatchSource:0}: Error finding container 249b49d0f7956b764ff5096c1050490c78801cdcb23621c0741cbaee71788601: Status 404 returned error can't find the container with id 249b49d0f7956b764ff5096c1050490c78801cdcb23621c0741cbaee71788601 Jan 20 17:47:12 crc kubenswrapper[4558]: I0120 17:47:12.020546 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-db-sync-qcsmp"] Jan 20 17:47:12 crc kubenswrapper[4558]: I0120 17:47:12.030547 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-db-sync-s4glp"] Jan 20 17:47:12 crc kubenswrapper[4558]: I0120 17:47:12.119218 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-db-sync-mr5dl"] Jan 20 17:47:12 crc kubenswrapper[4558]: I0120 17:47:12.619687 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"f3dc733d-d501-42c3-af4b-ef30ec59ffb4","Type":"ContainerStarted","Data":"8ab5cf79883901250b9f9fe240bedf9e000947263968b7bee03ff5d3c576be65"} Jan 20 17:47:12 crc kubenswrapper[4558]: I0120 17:47:12.620018 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"f3dc733d-d501-42c3-af4b-ef30ec59ffb4","Type":"ContainerStarted","Data":"bd44e243148ea396e1e2af3348ca76508a94082dfdf17d14f5e2045e4c0f955f"} Jan 20 17:47:12 crc kubenswrapper[4558]: I0120 17:47:12.622454 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"254186c2-67b8-4439-b79d-55f755e4afde","Type":"ContainerStarted","Data":"992a83a00edb8e0f4e1fa5fee5e2e985fcceb865ae060702e30119bd021501cf"} Jan 20 17:47:12 crc kubenswrapper[4558]: I0120 17:47:12.622479 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"254186c2-67b8-4439-b79d-55f755e4afde","Type":"ContainerStarted","Data":"249b49d0f7956b764ff5096c1050490c78801cdcb23621c0741cbaee71788601"} Jan 20 17:47:12 crc kubenswrapper[4558]: I0120 17:47:12.624875 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-db-sync-bdw77" event={"ID":"20780c8d-f8bc-43c4-84ba-ba6f2e0fe601","Type":"ContainerStarted","Data":"a5d3a8ef41b0fbf942dd8d188bb25b5d46afeed4c9868dfa05afacbece3f7157"} Jan 20 17:47:12 crc kubenswrapper[4558]: I0120 17:47:12.624923 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-db-sync-bdw77" event={"ID":"20780c8d-f8bc-43c4-84ba-ba6f2e0fe601","Type":"ContainerStarted","Data":"4efd9c5fa35355473c826ff73e13b012ccc486010a52a36b100ce509a255794b"} Jan 20 17:47:12 crc kubenswrapper[4558]: I0120 17:47:12.632674 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-db-sync-mr5dl" event={"ID":"54a8e29a-d222-49e8-ab57-1aa0afb9648b","Type":"ContainerStarted","Data":"074493bc02e61ac25a6b00e8358bebb521ffddbaf6ae6d939715d9c5ccbe54c7"} Jan 20 17:47:12 crc kubenswrapper[4558]: I0120 17:47:12.632725 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-db-sync-mr5dl" event={"ID":"54a8e29a-d222-49e8-ab57-1aa0afb9648b","Type":"ContainerStarted","Data":"86e0c3dcce877e4cd02f3d074d248b65536c236cf592afcf7e142ed5117f53c7"} Jan 20 17:47:12 crc kubenswrapper[4558]: I0120 17:47:12.634466 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-db-sync-s4glp" event={"ID":"d1809d86-a9c3-46e9-96eb-06433832465c","Type":"ContainerStarted","Data":"7dd5fbed1bb93ffab4c94f345e8862a47fbcd02213be363bc0764b0ebe5c1f83"} Jan 20 17:47:12 crc kubenswrapper[4558]: I0120 17:47:12.634521 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-db-sync-s4glp" event={"ID":"d1809d86-a9c3-46e9-96eb-06433832465c","Type":"ContainerStarted","Data":"63baaf3f91e7c83fb214e624652cd0b7a0f8266a6aeb4d11cd75c388eec52d73"} Jan 20 17:47:12 crc kubenswrapper[4558]: I0120 17:47:12.635708 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"017cf5a8-b216-4c29-8dfc-7b5b6625591c","Type":"ContainerStarted","Data":"871ec88e32c2358b816ba9ab4759ab391ed91c802a84720b11986f5aac7e306a"} Jan 20 17:47:12 crc kubenswrapper[4558]: I0120 17:47:12.636988 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-db-sync-qcsmp" event={"ID":"51f6fade-ce82-468f-9a2c-79c733dddeb3","Type":"ContainerStarted","Data":"94906da2e077fe5e9b0743c97f8ed215bd1787bdb22c85dbc8e9ff7e49407f3a"} Jan 20 17:47:12 crc kubenswrapper[4558]: I0120 17:47:12.637015 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-db-sync-qcsmp" event={"ID":"51f6fade-ce82-468f-9a2c-79c733dddeb3","Type":"ContainerStarted","Data":"2f6862a3c8ff3c11be404b9813e4e4acee97bfc8125ccbaec1a6f5c163d5b2e6"} Jan 20 17:47:12 crc kubenswrapper[4558]: I0120 17:47:12.646485 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-bootstrap-x52px" event={"ID":"3a0e658d-6dc9-4d45-bf9a-1dd7fe5c56a6","Type":"ContainerStarted","Data":"7064c70f51c5070bb50a04ba6121a31e6cb8e5af6fce497e461d140997c2e99c"} Jan 20 17:47:12 crc kubenswrapper[4558]: I0120 17:47:12.662413 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/cinder-db-sync-bdw77" podStartSLOduration=2.66239943 podStartE2EDuration="2.66239943s" podCreationTimestamp="2026-01-20 17:47:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:47:12.645922267 +0000 UTC m=+3926.406260234" watchObservedRunningTime="2026-01-20 17:47:12.66239943 +0000 UTC m=+3926.422737398" Jan 20 17:47:12 crc kubenswrapper[4558]: I0120 17:47:12.662495 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/barbican-db-sync-qcsmp" podStartSLOduration=1.662491194 podStartE2EDuration="1.662491194s" podCreationTimestamp="2026-01-20 17:47:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:47:12.661371438 +0000 UTC m=+3926.421709405" watchObservedRunningTime="2026-01-20 17:47:12.662491194 +0000 UTC m=+3926.422829161" Jan 20 17:47:12 crc kubenswrapper[4558]: I0120 17:47:12.699127 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/neutron-db-sync-s4glp" podStartSLOduration=2.699107365 podStartE2EDuration="2.699107365s" podCreationTimestamp="2026-01-20 17:47:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:47:12.678415877 +0000 UTC m=+3926.438753845" watchObservedRunningTime="2026-01-20 17:47:12.699107365 +0000 UTC m=+3926.459445332" Jan 20 17:47:12 crc kubenswrapper[4558]: I0120 17:47:12.721094 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/placement-db-sync-mr5dl" podStartSLOduration=2.721074712 podStartE2EDuration="2.721074712s" podCreationTimestamp="2026-01-20 17:47:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:47:12.697809485 +0000 UTC m=+3926.458147452" watchObservedRunningTime="2026-01-20 17:47:12.721074712 +0000 UTC m=+3926.481412699" Jan 20 17:47:12 crc kubenswrapper[4558]: I0120 17:47:12.725777 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/keystone-bootstrap-x52px" podStartSLOduration=2.725768879 podStartE2EDuration="2.725768879s" podCreationTimestamp="2026-01-20 17:47:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:47:12.719896336 +0000 UTC m=+3926.480234304" watchObservedRunningTime="2026-01-20 17:47:12.725768879 +0000 UTC m=+3926.486106846" Jan 20 17:47:12 crc kubenswrapper[4558]: I0120 17:47:12.855385 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:47:12 crc kubenswrapper[4558]: I0120 17:47:12.933794 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:47:12 crc kubenswrapper[4558]: I0120 17:47:12.942288 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:47:13 crc kubenswrapper[4558]: I0120 17:47:13.657687 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"017cf5a8-b216-4c29-8dfc-7b5b6625591c","Type":"ContainerStarted","Data":"39122fe68ccaf23346e6568ce60f5ad96343708ff688ebb76e80c587fa773a90"} Jan 20 17:47:13 crc kubenswrapper[4558]: I0120 17:47:13.658024 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"017cf5a8-b216-4c29-8dfc-7b5b6625591c","Type":"ContainerStarted","Data":"7049e294116c3dfe281795be83dd1fef9c5ce6bd1d8517d4e1c57247c067830c"} Jan 20 17:47:13 crc kubenswrapper[4558]: I0120 17:47:13.659297 4558 generic.go:334] "Generic (PLEG): container finished" podID="51f6fade-ce82-468f-9a2c-79c733dddeb3" containerID="94906da2e077fe5e9b0743c97f8ed215bd1787bdb22c85dbc8e9ff7e49407f3a" exitCode=0 Jan 20 17:47:13 crc kubenswrapper[4558]: I0120 17:47:13.659381 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-db-sync-qcsmp" event={"ID":"51f6fade-ce82-468f-9a2c-79c733dddeb3","Type":"ContainerDied","Data":"94906da2e077fe5e9b0743c97f8ed215bd1787bdb22c85dbc8e9ff7e49407f3a"} Jan 20 17:47:13 crc kubenswrapper[4558]: I0120 17:47:13.661538 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"f3dc733d-d501-42c3-af4b-ef30ec59ffb4","Type":"ContainerStarted","Data":"4e28944ac6eadad6ab16b254cdee35248432819d2aa21231e1ca5732266ca3fa"} Jan 20 17:47:13 crc kubenswrapper[4558]: I0120 17:47:13.661695 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-external-api-0" podUID="f3dc733d-d501-42c3-af4b-ef30ec59ffb4" containerName="glance-log" containerID="cri-o://8ab5cf79883901250b9f9fe240bedf9e000947263968b7bee03ff5d3c576be65" gracePeriod=30 Jan 20 17:47:13 crc kubenswrapper[4558]: I0120 17:47:13.661731 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-external-api-0" podUID="f3dc733d-d501-42c3-af4b-ef30ec59ffb4" containerName="glance-httpd" containerID="cri-o://4e28944ac6eadad6ab16b254cdee35248432819d2aa21231e1ca5732266ca3fa" gracePeriod=30 Jan 20 17:47:13 crc kubenswrapper[4558]: I0120 17:47:13.663256 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"254186c2-67b8-4439-b79d-55f755e4afde","Type":"ContainerStarted","Data":"08931ff76f23d212e9354e6cdfcefe519ba76ff2f0f1ce831ac2073ed866628e"} Jan 20 17:47:13 crc kubenswrapper[4558]: I0120 17:47:13.663377 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-internal-api-0" podUID="254186c2-67b8-4439-b79d-55f755e4afde" containerName="glance-log" containerID="cri-o://992a83a00edb8e0f4e1fa5fee5e2e985fcceb865ae060702e30119bd021501cf" gracePeriod=30 Jan 20 17:47:13 crc kubenswrapper[4558]: I0120 17:47:13.663410 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-internal-api-0" podUID="254186c2-67b8-4439-b79d-55f755e4afde" containerName="glance-httpd" containerID="cri-o://08931ff76f23d212e9354e6cdfcefe519ba76ff2f0f1ce831ac2073ed866628e" gracePeriod=30 Jan 20 17:47:13 crc kubenswrapper[4558]: I0120 17:47:13.668680 4558 generic.go:334] "Generic (PLEG): container finished" podID="54a8e29a-d222-49e8-ab57-1aa0afb9648b" containerID="074493bc02e61ac25a6b00e8358bebb521ffddbaf6ae6d939715d9c5ccbe54c7" exitCode=0 Jan 20 17:47:13 crc kubenswrapper[4558]: I0120 17:47:13.668800 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-db-sync-mr5dl" event={"ID":"54a8e29a-d222-49e8-ab57-1aa0afb9648b","Type":"ContainerDied","Data":"074493bc02e61ac25a6b00e8358bebb521ffddbaf6ae6d939715d9c5ccbe54c7"} Jan 20 17:47:13 crc kubenswrapper[4558]: I0120 17:47:13.694889 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/glance-default-internal-api-0" podStartSLOduration=3.694858041 podStartE2EDuration="3.694858041s" podCreationTimestamp="2026-01-20 17:47:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:47:13.692467156 +0000 UTC m=+3927.452805123" watchObservedRunningTime="2026-01-20 17:47:13.694858041 +0000 UTC m=+3927.455196018" Jan 20 17:47:13 crc kubenswrapper[4558]: I0120 17:47:13.711066 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/glance-default-external-api-0" podStartSLOduration=3.711042735 podStartE2EDuration="3.711042735s" podCreationTimestamp="2026-01-20 17:47:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:47:13.708050127 +0000 UTC m=+3927.468388095" watchObservedRunningTime="2026-01-20 17:47:13.711042735 +0000 UTC m=+3927.471380702" Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.405238 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.408743 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.473721 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f3dc733d-d501-42c3-af4b-ef30ec59ffb4-httpd-run\") pod \"f3dc733d-d501-42c3-af4b-ef30ec59ffb4\" (UID: \"f3dc733d-d501-42c3-af4b-ef30ec59ffb4\") " Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.473772 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"254186c2-67b8-4439-b79d-55f755e4afde\" (UID: \"254186c2-67b8-4439-b79d-55f755e4afde\") " Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.473792 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/254186c2-67b8-4439-b79d-55f755e4afde-httpd-run\") pod \"254186c2-67b8-4439-b79d-55f755e4afde\" (UID: \"254186c2-67b8-4439-b79d-55f755e4afde\") " Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.473817 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/254186c2-67b8-4439-b79d-55f755e4afde-internal-tls-certs\") pod \"254186c2-67b8-4439-b79d-55f755e4afde\" (UID: \"254186c2-67b8-4439-b79d-55f755e4afde\") " Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.473864 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"f3dc733d-d501-42c3-af4b-ef30ec59ffb4\" (UID: \"f3dc733d-d501-42c3-af4b-ef30ec59ffb4\") " Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.473882 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/254186c2-67b8-4439-b79d-55f755e4afde-scripts\") pod \"254186c2-67b8-4439-b79d-55f755e4afde\" (UID: \"254186c2-67b8-4439-b79d-55f755e4afde\") " Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.473910 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f3dc733d-d501-42c3-af4b-ef30ec59ffb4-scripts\") pod \"f3dc733d-d501-42c3-af4b-ef30ec59ffb4\" (UID: \"f3dc733d-d501-42c3-af4b-ef30ec59ffb4\") " Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.473925 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f3dc733d-d501-42c3-af4b-ef30ec59ffb4-combined-ca-bundle\") pod \"f3dc733d-d501-42c3-af4b-ef30ec59ffb4\" (UID: \"f3dc733d-d501-42c3-af4b-ef30ec59ffb4\") " Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.474022 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f3dc733d-d501-42c3-af4b-ef30ec59ffb4-logs\") pod \"f3dc733d-d501-42c3-af4b-ef30ec59ffb4\" (UID: \"f3dc733d-d501-42c3-af4b-ef30ec59ffb4\") " Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.474047 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nglv6\" (UniqueName: \"kubernetes.io/projected/f3dc733d-d501-42c3-af4b-ef30ec59ffb4-kube-api-access-nglv6\") pod \"f3dc733d-d501-42c3-af4b-ef30ec59ffb4\" (UID: \"f3dc733d-d501-42c3-af4b-ef30ec59ffb4\") " Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.474114 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f3dc733d-d501-42c3-af4b-ef30ec59ffb4-config-data\") pod \"f3dc733d-d501-42c3-af4b-ef30ec59ffb4\" (UID: \"f3dc733d-d501-42c3-af4b-ef30ec59ffb4\") " Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.474159 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t9fnm\" (UniqueName: \"kubernetes.io/projected/254186c2-67b8-4439-b79d-55f755e4afde-kube-api-access-t9fnm\") pod \"254186c2-67b8-4439-b79d-55f755e4afde\" (UID: \"254186c2-67b8-4439-b79d-55f755e4afde\") " Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.474201 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/254186c2-67b8-4439-b79d-55f755e4afde-combined-ca-bundle\") pod \"254186c2-67b8-4439-b79d-55f755e4afde\" (UID: \"254186c2-67b8-4439-b79d-55f755e4afde\") " Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.474228 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/254186c2-67b8-4439-b79d-55f755e4afde-logs\") pod \"254186c2-67b8-4439-b79d-55f755e4afde\" (UID: \"254186c2-67b8-4439-b79d-55f755e4afde\") " Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.474253 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f3dc733d-d501-42c3-af4b-ef30ec59ffb4-public-tls-certs\") pod \"f3dc733d-d501-42c3-af4b-ef30ec59ffb4\" (UID: \"f3dc733d-d501-42c3-af4b-ef30ec59ffb4\") " Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.474283 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/254186c2-67b8-4439-b79d-55f755e4afde-config-data\") pod \"254186c2-67b8-4439-b79d-55f755e4afde\" (UID: \"254186c2-67b8-4439-b79d-55f755e4afde\") " Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.477039 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/254186c2-67b8-4439-b79d-55f755e4afde-logs" (OuterVolumeSpecName: "logs") pod "254186c2-67b8-4439-b79d-55f755e4afde" (UID: "254186c2-67b8-4439-b79d-55f755e4afde"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.479017 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/254186c2-67b8-4439-b79d-55f755e4afde-kube-api-access-t9fnm" (OuterVolumeSpecName: "kube-api-access-t9fnm") pod "254186c2-67b8-4439-b79d-55f755e4afde" (UID: "254186c2-67b8-4439-b79d-55f755e4afde"). InnerVolumeSpecName "kube-api-access-t9fnm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.479327 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f3dc733d-d501-42c3-af4b-ef30ec59ffb4-logs" (OuterVolumeSpecName: "logs") pod "f3dc733d-d501-42c3-af4b-ef30ec59ffb4" (UID: "f3dc733d-d501-42c3-af4b-ef30ec59ffb4"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.489516 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/254186c2-67b8-4439-b79d-55f755e4afde-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "254186c2-67b8-4439-b79d-55f755e4afde" (UID: "254186c2-67b8-4439-b79d-55f755e4afde"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.489636 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f3dc733d-d501-42c3-af4b-ef30ec59ffb4-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "f3dc733d-d501-42c3-af4b-ef30ec59ffb4" (UID: "f3dc733d-d501-42c3-af4b-ef30ec59ffb4"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.494416 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage11-crc" (OuterVolumeSpecName: "glance") pod "254186c2-67b8-4439-b79d-55f755e4afde" (UID: "254186c2-67b8-4439-b79d-55f755e4afde"). InnerVolumeSpecName "local-storage11-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.494605 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f3dc733d-d501-42c3-af4b-ef30ec59ffb4-kube-api-access-nglv6" (OuterVolumeSpecName: "kube-api-access-nglv6") pod "f3dc733d-d501-42c3-af4b-ef30ec59ffb4" (UID: "f3dc733d-d501-42c3-af4b-ef30ec59ffb4"). InnerVolumeSpecName "kube-api-access-nglv6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.501402 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage09-crc" (OuterVolumeSpecName: "glance") pod "f3dc733d-d501-42c3-af4b-ef30ec59ffb4" (UID: "f3dc733d-d501-42c3-af4b-ef30ec59ffb4"). InnerVolumeSpecName "local-storage09-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.508648 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f3dc733d-d501-42c3-af4b-ef30ec59ffb4-scripts" (OuterVolumeSpecName: "scripts") pod "f3dc733d-d501-42c3-af4b-ef30ec59ffb4" (UID: "f3dc733d-d501-42c3-af4b-ef30ec59ffb4"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.509502 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/254186c2-67b8-4439-b79d-55f755e4afde-scripts" (OuterVolumeSpecName: "scripts") pod "254186c2-67b8-4439-b79d-55f755e4afde" (UID: "254186c2-67b8-4439-b79d-55f755e4afde"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.541028 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/254186c2-67b8-4439-b79d-55f755e4afde-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "254186c2-67b8-4439-b79d-55f755e4afde" (UID: "254186c2-67b8-4439-b79d-55f755e4afde"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.576656 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" " Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.576688 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/254186c2-67b8-4439-b79d-55f755e4afde-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.576698 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f3dc733d-d501-42c3-af4b-ef30ec59ffb4-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.576708 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f3dc733d-d501-42c3-af4b-ef30ec59ffb4-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.576718 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nglv6\" (UniqueName: \"kubernetes.io/projected/f3dc733d-d501-42c3-af4b-ef30ec59ffb4-kube-api-access-nglv6\") on node \"crc\" DevicePath \"\"" Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.576728 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t9fnm\" (UniqueName: \"kubernetes.io/projected/254186c2-67b8-4439-b79d-55f755e4afde-kube-api-access-t9fnm\") on node \"crc\" DevicePath \"\"" Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.576737 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/254186c2-67b8-4439-b79d-55f755e4afde-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.576745 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/254186c2-67b8-4439-b79d-55f755e4afde-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.576755 4558 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f3dc733d-d501-42c3-af4b-ef30ec59ffb4-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.576768 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" " Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.576777 4558 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/254186c2-67b8-4439-b79d-55f755e4afde-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.574563 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/254186c2-67b8-4439-b79d-55f755e4afde-config-data" (OuterVolumeSpecName: "config-data") pod "254186c2-67b8-4439-b79d-55f755e4afde" (UID: "254186c2-67b8-4439-b79d-55f755e4afde"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.588646 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f3dc733d-d501-42c3-af4b-ef30ec59ffb4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f3dc733d-d501-42c3-af4b-ef30ec59ffb4" (UID: "f3dc733d-d501-42c3-af4b-ef30ec59ffb4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.595107 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage09-crc" (UniqueName: "kubernetes.io/local-volume/local-storage09-crc") on node "crc" Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.596681 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage11-crc" (UniqueName: "kubernetes.io/local-volume/local-storage11-crc") on node "crc" Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.606432 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/254186c2-67b8-4439-b79d-55f755e4afde-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "254186c2-67b8-4439-b79d-55f755e4afde" (UID: "254186c2-67b8-4439-b79d-55f755e4afde"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.610255 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f3dc733d-d501-42c3-af4b-ef30ec59ffb4-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "f3dc733d-d501-42c3-af4b-ef30ec59ffb4" (UID: "f3dc733d-d501-42c3-af4b-ef30ec59ffb4"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.612287 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f3dc733d-d501-42c3-af4b-ef30ec59ffb4-config-data" (OuterVolumeSpecName: "config-data") pod "f3dc733d-d501-42c3-af4b-ef30ec59ffb4" (UID: "f3dc733d-d501-42c3-af4b-ef30ec59ffb4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.678597 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.678907 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/254186c2-67b8-4439-b79d-55f755e4afde-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.678920 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.678930 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f3dc733d-d501-42c3-af4b-ef30ec59ffb4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.678945 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f3dc733d-d501-42c3-af4b-ef30ec59ffb4-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.678957 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f3dc733d-d501-42c3-af4b-ef30ec59ffb4-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.678966 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/254186c2-67b8-4439-b79d-55f755e4afde-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.684863 4558 generic.go:334] "Generic (PLEG): container finished" podID="254186c2-67b8-4439-b79d-55f755e4afde" containerID="08931ff76f23d212e9354e6cdfcefe519ba76ff2f0f1ce831ac2073ed866628e" exitCode=0 Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.684902 4558 generic.go:334] "Generic (PLEG): container finished" podID="254186c2-67b8-4439-b79d-55f755e4afde" containerID="992a83a00edb8e0f4e1fa5fee5e2e985fcceb865ae060702e30119bd021501cf" exitCode=143 Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.684949 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"254186c2-67b8-4439-b79d-55f755e4afde","Type":"ContainerDied","Data":"08931ff76f23d212e9354e6cdfcefe519ba76ff2f0f1ce831ac2073ed866628e"} Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.685005 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"254186c2-67b8-4439-b79d-55f755e4afde","Type":"ContainerDied","Data":"992a83a00edb8e0f4e1fa5fee5e2e985fcceb865ae060702e30119bd021501cf"} Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.685018 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"254186c2-67b8-4439-b79d-55f755e4afde","Type":"ContainerDied","Data":"249b49d0f7956b764ff5096c1050490c78801cdcb23621c0741cbaee71788601"} Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.685039 4558 scope.go:117] "RemoveContainer" containerID="08931ff76f23d212e9354e6cdfcefe519ba76ff2f0f1ce831ac2073ed866628e" Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.685019 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.700432 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"017cf5a8-b216-4c29-8dfc-7b5b6625591c","Type":"ContainerStarted","Data":"c54b38a6355b94b185e84a1db7075c6f9bf63a358c709e1f336a4f64b4cea596"} Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.705562 4558 generic.go:334] "Generic (PLEG): container finished" podID="f3dc733d-d501-42c3-af4b-ef30ec59ffb4" containerID="4e28944ac6eadad6ab16b254cdee35248432819d2aa21231e1ca5732266ca3fa" exitCode=0 Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.705589 4558 generic.go:334] "Generic (PLEG): container finished" podID="f3dc733d-d501-42c3-af4b-ef30ec59ffb4" containerID="8ab5cf79883901250b9f9fe240bedf9e000947263968b7bee03ff5d3c576be65" exitCode=143 Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.705615 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"f3dc733d-d501-42c3-af4b-ef30ec59ffb4","Type":"ContainerDied","Data":"4e28944ac6eadad6ab16b254cdee35248432819d2aa21231e1ca5732266ca3fa"} Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.705668 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.705678 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"f3dc733d-d501-42c3-af4b-ef30ec59ffb4","Type":"ContainerDied","Data":"8ab5cf79883901250b9f9fe240bedf9e000947263968b7bee03ff5d3c576be65"} Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.705723 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"f3dc733d-d501-42c3-af4b-ef30ec59ffb4","Type":"ContainerDied","Data":"bd44e243148ea396e1e2af3348ca76508a94082dfdf17d14f5e2045e4c0f955f"} Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.717884 4558 scope.go:117] "RemoveContainer" containerID="992a83a00edb8e0f4e1fa5fee5e2e985fcceb865ae060702e30119bd021501cf" Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.717997 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.725983 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.739564 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.754950 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.767210 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:47:14 crc kubenswrapper[4558]: E0120 17:47:14.767631 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="254186c2-67b8-4439-b79d-55f755e4afde" containerName="glance-httpd" Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.767654 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="254186c2-67b8-4439-b79d-55f755e4afde" containerName="glance-httpd" Jan 20 17:47:14 crc kubenswrapper[4558]: E0120 17:47:14.767679 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f3dc733d-d501-42c3-af4b-ef30ec59ffb4" containerName="glance-log" Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.767687 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="f3dc733d-d501-42c3-af4b-ef30ec59ffb4" containerName="glance-log" Jan 20 17:47:14 crc kubenswrapper[4558]: E0120 17:47:14.767708 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f3dc733d-d501-42c3-af4b-ef30ec59ffb4" containerName="glance-httpd" Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.767714 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="f3dc733d-d501-42c3-af4b-ef30ec59ffb4" containerName="glance-httpd" Jan 20 17:47:14 crc kubenswrapper[4558]: E0120 17:47:14.767726 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="254186c2-67b8-4439-b79d-55f755e4afde" containerName="glance-log" Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.767732 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="254186c2-67b8-4439-b79d-55f755e4afde" containerName="glance-log" Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.767894 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="254186c2-67b8-4439-b79d-55f755e4afde" containerName="glance-httpd" Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.767911 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="f3dc733d-d501-42c3-af4b-ef30ec59ffb4" containerName="glance-log" Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.767918 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="254186c2-67b8-4439-b79d-55f755e4afde" containerName="glance-log" Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.767928 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="f3dc733d-d501-42c3-af4b-ef30ec59ffb4" containerName="glance-httpd" Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.768766 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.773547 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-scripts" Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.773581 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-glance-default-internal-svc" Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.773925 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-glance-dockercfg-v4jnh" Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.774330 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-default-internal-config-data" Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.781050 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.785079 4558 scope.go:117] "RemoveContainer" containerID="08931ff76f23d212e9354e6cdfcefe519ba76ff2f0f1ce831ac2073ed866628e" Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.785244 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.788014 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-default-external-config-data" Jan 20 17:47:14 crc kubenswrapper[4558]: E0120 17:47:14.788207 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"08931ff76f23d212e9354e6cdfcefe519ba76ff2f0f1ce831ac2073ed866628e\": container with ID starting with 08931ff76f23d212e9354e6cdfcefe519ba76ff2f0f1ce831ac2073ed866628e not found: ID does not exist" containerID="08931ff76f23d212e9354e6cdfcefe519ba76ff2f0f1ce831ac2073ed866628e" Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.788233 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"08931ff76f23d212e9354e6cdfcefe519ba76ff2f0f1ce831ac2073ed866628e"} err="failed to get container status \"08931ff76f23d212e9354e6cdfcefe519ba76ff2f0f1ce831ac2073ed866628e\": rpc error: code = NotFound desc = could not find container \"08931ff76f23d212e9354e6cdfcefe519ba76ff2f0f1ce831ac2073ed866628e\": container with ID starting with 08931ff76f23d212e9354e6cdfcefe519ba76ff2f0f1ce831ac2073ed866628e not found: ID does not exist" Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.788253 4558 scope.go:117] "RemoveContainer" containerID="992a83a00edb8e0f4e1fa5fee5e2e985fcceb865ae060702e30119bd021501cf" Jan 20 17:47:14 crc kubenswrapper[4558]: E0120 17:47:14.794535 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"992a83a00edb8e0f4e1fa5fee5e2e985fcceb865ae060702e30119bd021501cf\": container with ID starting with 992a83a00edb8e0f4e1fa5fee5e2e985fcceb865ae060702e30119bd021501cf not found: ID does not exist" containerID="992a83a00edb8e0f4e1fa5fee5e2e985fcceb865ae060702e30119bd021501cf" Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.794569 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"992a83a00edb8e0f4e1fa5fee5e2e985fcceb865ae060702e30119bd021501cf"} err="failed to get container status \"992a83a00edb8e0f4e1fa5fee5e2e985fcceb865ae060702e30119bd021501cf\": rpc error: code = NotFound desc = could not find container \"992a83a00edb8e0f4e1fa5fee5e2e985fcceb865ae060702e30119bd021501cf\": container with ID starting with 992a83a00edb8e0f4e1fa5fee5e2e985fcceb865ae060702e30119bd021501cf not found: ID does not exist" Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.794594 4558 scope.go:117] "RemoveContainer" containerID="08931ff76f23d212e9354e6cdfcefe519ba76ff2f0f1ce831ac2073ed866628e" Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.794608 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-glance-default-public-svc" Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.798727 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"08931ff76f23d212e9354e6cdfcefe519ba76ff2f0f1ce831ac2073ed866628e"} err="failed to get container status \"08931ff76f23d212e9354e6cdfcefe519ba76ff2f0f1ce831ac2073ed866628e\": rpc error: code = NotFound desc = could not find container \"08931ff76f23d212e9354e6cdfcefe519ba76ff2f0f1ce831ac2073ed866628e\": container with ID starting with 08931ff76f23d212e9354e6cdfcefe519ba76ff2f0f1ce831ac2073ed866628e not found: ID does not exist" Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.798767 4558 scope.go:117] "RemoveContainer" containerID="992a83a00edb8e0f4e1fa5fee5e2e985fcceb865ae060702e30119bd021501cf" Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.803530 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"992a83a00edb8e0f4e1fa5fee5e2e985fcceb865ae060702e30119bd021501cf"} err="failed to get container status \"992a83a00edb8e0f4e1fa5fee5e2e985fcceb865ae060702e30119bd021501cf\": rpc error: code = NotFound desc = could not find container \"992a83a00edb8e0f4e1fa5fee5e2e985fcceb865ae060702e30119bd021501cf\": container with ID starting with 992a83a00edb8e0f4e1fa5fee5e2e985fcceb865ae060702e30119bd021501cf not found: ID does not exist" Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.803560 4558 scope.go:117] "RemoveContainer" containerID="4e28944ac6eadad6ab16b254cdee35248432819d2aa21231e1ca5732266ca3fa" Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.805457 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.824044 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.879385 4558 scope.go:117] "RemoveContainer" containerID="8ab5cf79883901250b9f9fe240bedf9e000947263968b7bee03ff5d3c576be65" Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.883700 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/05f4b910-ff7c-4039-8ff7-24687eea5e74-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"05f4b910-ff7c-4039-8ff7-24687eea5e74\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.883788 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d3c4b74b-cd31-4245-91f0-9718f8896f61-logs\") pod \"glance-default-internal-api-0\" (UID: \"d3c4b74b-cd31-4245-91f0-9718f8896f61\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.883899 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d3c4b74b-cd31-4245-91f0-9718f8896f61-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"d3c4b74b-cd31-4245-91f0-9718f8896f61\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.883971 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d3c4b74b-cd31-4245-91f0-9718f8896f61-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"d3c4b74b-cd31-4245-91f0-9718f8896f61\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.883997 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d3c4b74b-cd31-4245-91f0-9718f8896f61-config-data\") pod \"glance-default-internal-api-0\" (UID: \"d3c4b74b-cd31-4245-91f0-9718f8896f61\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.884018 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/05f4b910-ff7c-4039-8ff7-24687eea5e74-scripts\") pod \"glance-default-external-api-0\" (UID: \"05f4b910-ff7c-4039-8ff7-24687eea5e74\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.884036 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/05f4b910-ff7c-4039-8ff7-24687eea5e74-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"05f4b910-ff7c-4039-8ff7-24687eea5e74\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.884069 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qqkgb\" (UniqueName: \"kubernetes.io/projected/d3c4b74b-cd31-4245-91f0-9718f8896f61-kube-api-access-qqkgb\") pod \"glance-default-internal-api-0\" (UID: \"d3c4b74b-cd31-4245-91f0-9718f8896f61\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.884233 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05f4b910-ff7c-4039-8ff7-24687eea5e74-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"05f4b910-ff7c-4039-8ff7-24687eea5e74\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.884300 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/05f4b910-ff7c-4039-8ff7-24687eea5e74-config-data\") pod \"glance-default-external-api-0\" (UID: \"05f4b910-ff7c-4039-8ff7-24687eea5e74\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.884336 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"05f4b910-ff7c-4039-8ff7-24687eea5e74\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.884363 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f5mnj\" (UniqueName: \"kubernetes.io/projected/05f4b910-ff7c-4039-8ff7-24687eea5e74-kube-api-access-f5mnj\") pod \"glance-default-external-api-0\" (UID: \"05f4b910-ff7c-4039-8ff7-24687eea5e74\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.884396 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"d3c4b74b-cd31-4245-91f0-9718f8896f61\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.884410 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d3c4b74b-cd31-4245-91f0-9718f8896f61-scripts\") pod \"glance-default-internal-api-0\" (UID: \"d3c4b74b-cd31-4245-91f0-9718f8896f61\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.884436 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/05f4b910-ff7c-4039-8ff7-24687eea5e74-logs\") pod \"glance-default-external-api-0\" (UID: \"05f4b910-ff7c-4039-8ff7-24687eea5e74\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.884467 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d3c4b74b-cd31-4245-91f0-9718f8896f61-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"d3c4b74b-cd31-4245-91f0-9718f8896f61\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.907182 4558 scope.go:117] "RemoveContainer" containerID="4e28944ac6eadad6ab16b254cdee35248432819d2aa21231e1ca5732266ca3fa" Jan 20 17:47:14 crc kubenswrapper[4558]: E0120 17:47:14.909650 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4e28944ac6eadad6ab16b254cdee35248432819d2aa21231e1ca5732266ca3fa\": container with ID starting with 4e28944ac6eadad6ab16b254cdee35248432819d2aa21231e1ca5732266ca3fa not found: ID does not exist" containerID="4e28944ac6eadad6ab16b254cdee35248432819d2aa21231e1ca5732266ca3fa" Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.909702 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4e28944ac6eadad6ab16b254cdee35248432819d2aa21231e1ca5732266ca3fa"} err="failed to get container status \"4e28944ac6eadad6ab16b254cdee35248432819d2aa21231e1ca5732266ca3fa\": rpc error: code = NotFound desc = could not find container \"4e28944ac6eadad6ab16b254cdee35248432819d2aa21231e1ca5732266ca3fa\": container with ID starting with 4e28944ac6eadad6ab16b254cdee35248432819d2aa21231e1ca5732266ca3fa not found: ID does not exist" Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.909742 4558 scope.go:117] "RemoveContainer" containerID="8ab5cf79883901250b9f9fe240bedf9e000947263968b7bee03ff5d3c576be65" Jan 20 17:47:14 crc kubenswrapper[4558]: E0120 17:47:14.910249 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8ab5cf79883901250b9f9fe240bedf9e000947263968b7bee03ff5d3c576be65\": container with ID starting with 8ab5cf79883901250b9f9fe240bedf9e000947263968b7bee03ff5d3c576be65 not found: ID does not exist" containerID="8ab5cf79883901250b9f9fe240bedf9e000947263968b7bee03ff5d3c576be65" Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.910280 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8ab5cf79883901250b9f9fe240bedf9e000947263968b7bee03ff5d3c576be65"} err="failed to get container status \"8ab5cf79883901250b9f9fe240bedf9e000947263968b7bee03ff5d3c576be65\": rpc error: code = NotFound desc = could not find container \"8ab5cf79883901250b9f9fe240bedf9e000947263968b7bee03ff5d3c576be65\": container with ID starting with 8ab5cf79883901250b9f9fe240bedf9e000947263968b7bee03ff5d3c576be65 not found: ID does not exist" Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.910297 4558 scope.go:117] "RemoveContainer" containerID="4e28944ac6eadad6ab16b254cdee35248432819d2aa21231e1ca5732266ca3fa" Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.910632 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4e28944ac6eadad6ab16b254cdee35248432819d2aa21231e1ca5732266ca3fa"} err="failed to get container status \"4e28944ac6eadad6ab16b254cdee35248432819d2aa21231e1ca5732266ca3fa\": rpc error: code = NotFound desc = could not find container \"4e28944ac6eadad6ab16b254cdee35248432819d2aa21231e1ca5732266ca3fa\": container with ID starting with 4e28944ac6eadad6ab16b254cdee35248432819d2aa21231e1ca5732266ca3fa not found: ID does not exist" Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.910656 4558 scope.go:117] "RemoveContainer" containerID="8ab5cf79883901250b9f9fe240bedf9e000947263968b7bee03ff5d3c576be65" Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.910954 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8ab5cf79883901250b9f9fe240bedf9e000947263968b7bee03ff5d3c576be65"} err="failed to get container status \"8ab5cf79883901250b9f9fe240bedf9e000947263968b7bee03ff5d3c576be65\": rpc error: code = NotFound desc = could not find container \"8ab5cf79883901250b9f9fe240bedf9e000947263968b7bee03ff5d3c576be65\": container with ID starting with 8ab5cf79883901250b9f9fe240bedf9e000947263968b7bee03ff5d3c576be65 not found: ID does not exist" Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.987839 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/05f4b910-ff7c-4039-8ff7-24687eea5e74-config-data\") pod \"glance-default-external-api-0\" (UID: \"05f4b910-ff7c-4039-8ff7-24687eea5e74\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.987898 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"05f4b910-ff7c-4039-8ff7-24687eea5e74\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.987924 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f5mnj\" (UniqueName: \"kubernetes.io/projected/05f4b910-ff7c-4039-8ff7-24687eea5e74-kube-api-access-f5mnj\") pod \"glance-default-external-api-0\" (UID: \"05f4b910-ff7c-4039-8ff7-24687eea5e74\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.987987 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"d3c4b74b-cd31-4245-91f0-9718f8896f61\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.988009 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d3c4b74b-cd31-4245-91f0-9718f8896f61-scripts\") pod \"glance-default-internal-api-0\" (UID: \"d3c4b74b-cd31-4245-91f0-9718f8896f61\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.988035 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/05f4b910-ff7c-4039-8ff7-24687eea5e74-logs\") pod \"glance-default-external-api-0\" (UID: \"05f4b910-ff7c-4039-8ff7-24687eea5e74\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.988062 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d3c4b74b-cd31-4245-91f0-9718f8896f61-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"d3c4b74b-cd31-4245-91f0-9718f8896f61\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.988091 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/05f4b910-ff7c-4039-8ff7-24687eea5e74-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"05f4b910-ff7c-4039-8ff7-24687eea5e74\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.988136 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d3c4b74b-cd31-4245-91f0-9718f8896f61-logs\") pod \"glance-default-internal-api-0\" (UID: \"d3c4b74b-cd31-4245-91f0-9718f8896f61\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.988185 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d3c4b74b-cd31-4245-91f0-9718f8896f61-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"d3c4b74b-cd31-4245-91f0-9718f8896f61\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.988210 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d3c4b74b-cd31-4245-91f0-9718f8896f61-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"d3c4b74b-cd31-4245-91f0-9718f8896f61\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.988229 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d3c4b74b-cd31-4245-91f0-9718f8896f61-config-data\") pod \"glance-default-internal-api-0\" (UID: \"d3c4b74b-cd31-4245-91f0-9718f8896f61\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.988246 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/05f4b910-ff7c-4039-8ff7-24687eea5e74-scripts\") pod \"glance-default-external-api-0\" (UID: \"05f4b910-ff7c-4039-8ff7-24687eea5e74\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.988261 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/05f4b910-ff7c-4039-8ff7-24687eea5e74-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"05f4b910-ff7c-4039-8ff7-24687eea5e74\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.988284 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qqkgb\" (UniqueName: \"kubernetes.io/projected/d3c4b74b-cd31-4245-91f0-9718f8896f61-kube-api-access-qqkgb\") pod \"glance-default-internal-api-0\" (UID: \"d3c4b74b-cd31-4245-91f0-9718f8896f61\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.988328 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05f4b910-ff7c-4039-8ff7-24687eea5e74-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"05f4b910-ff7c-4039-8ff7-24687eea5e74\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.990505 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"05f4b910-ff7c-4039-8ff7-24687eea5e74\") device mount path \"/mnt/openstack/pv09\"" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.990556 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"d3c4b74b-cd31-4245-91f0-9718f8896f61\") device mount path \"/mnt/openstack/pv11\"" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.990860 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d3c4b74b-cd31-4245-91f0-9718f8896f61-logs\") pod \"glance-default-internal-api-0\" (UID: \"d3c4b74b-cd31-4245-91f0-9718f8896f61\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.992298 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/05f4b910-ff7c-4039-8ff7-24687eea5e74-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"05f4b910-ff7c-4039-8ff7-24687eea5e74\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.992629 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d3c4b74b-cd31-4245-91f0-9718f8896f61-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"d3c4b74b-cd31-4245-91f0-9718f8896f61\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.992878 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/05f4b910-ff7c-4039-8ff7-24687eea5e74-logs\") pod \"glance-default-external-api-0\" (UID: \"05f4b910-ff7c-4039-8ff7-24687eea5e74\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.998027 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/05f4b910-ff7c-4039-8ff7-24687eea5e74-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"05f4b910-ff7c-4039-8ff7-24687eea5e74\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.999178 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d3c4b74b-cd31-4245-91f0-9718f8896f61-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"d3c4b74b-cd31-4245-91f0-9718f8896f61\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:47:14 crc kubenswrapper[4558]: I0120 17:47:14.999245 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d3c4b74b-cd31-4245-91f0-9718f8896f61-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"d3c4b74b-cd31-4245-91f0-9718f8896f61\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:47:15 crc kubenswrapper[4558]: I0120 17:47:14.999702 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d3c4b74b-cd31-4245-91f0-9718f8896f61-config-data\") pod \"glance-default-internal-api-0\" (UID: \"d3c4b74b-cd31-4245-91f0-9718f8896f61\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:47:15 crc kubenswrapper[4558]: I0120 17:47:15.005695 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/05f4b910-ff7c-4039-8ff7-24687eea5e74-scripts\") pod \"glance-default-external-api-0\" (UID: \"05f4b910-ff7c-4039-8ff7-24687eea5e74\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:47:15 crc kubenswrapper[4558]: I0120 17:47:15.006194 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05f4b910-ff7c-4039-8ff7-24687eea5e74-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"05f4b910-ff7c-4039-8ff7-24687eea5e74\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:47:15 crc kubenswrapper[4558]: I0120 17:47:15.007370 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d3c4b74b-cd31-4245-91f0-9718f8896f61-scripts\") pod \"glance-default-internal-api-0\" (UID: \"d3c4b74b-cd31-4245-91f0-9718f8896f61\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:47:15 crc kubenswrapper[4558]: I0120 17:47:15.010548 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/05f4b910-ff7c-4039-8ff7-24687eea5e74-config-data\") pod \"glance-default-external-api-0\" (UID: \"05f4b910-ff7c-4039-8ff7-24687eea5e74\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:47:15 crc kubenswrapper[4558]: I0120 17:47:15.011533 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f5mnj\" (UniqueName: \"kubernetes.io/projected/05f4b910-ff7c-4039-8ff7-24687eea5e74-kube-api-access-f5mnj\") pod \"glance-default-external-api-0\" (UID: \"05f4b910-ff7c-4039-8ff7-24687eea5e74\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:47:15 crc kubenswrapper[4558]: I0120 17:47:15.016875 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qqkgb\" (UniqueName: \"kubernetes.io/projected/d3c4b74b-cd31-4245-91f0-9718f8896f61-kube-api-access-qqkgb\") pod \"glance-default-internal-api-0\" (UID: \"d3c4b74b-cd31-4245-91f0-9718f8896f61\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:47:15 crc kubenswrapper[4558]: I0120 17:47:15.052735 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"05f4b910-ff7c-4039-8ff7-24687eea5e74\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:47:15 crc kubenswrapper[4558]: I0120 17:47:15.054908 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"d3c4b74b-cd31-4245-91f0-9718f8896f61\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:47:15 crc kubenswrapper[4558]: I0120 17:47:15.105379 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:47:15 crc kubenswrapper[4558]: I0120 17:47:15.120502 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:47:15 crc kubenswrapper[4558]: I0120 17:47:15.254948 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-db-sync-mr5dl" Jan 20 17:47:15 crc kubenswrapper[4558]: I0120 17:47:15.259267 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-db-sync-qcsmp" Jan 20 17:47:15 crc kubenswrapper[4558]: I0120 17:47:15.293417 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/51f6fade-ce82-468f-9a2c-79c733dddeb3-combined-ca-bundle\") pod \"51f6fade-ce82-468f-9a2c-79c733dddeb3\" (UID: \"51f6fade-ce82-468f-9a2c-79c733dddeb3\") " Jan 20 17:47:15 crc kubenswrapper[4558]: I0120 17:47:15.293506 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54a8e29a-d222-49e8-ab57-1aa0afb9648b-combined-ca-bundle\") pod \"54a8e29a-d222-49e8-ab57-1aa0afb9648b\" (UID: \"54a8e29a-d222-49e8-ab57-1aa0afb9648b\") " Jan 20 17:47:15 crc kubenswrapper[4558]: I0120 17:47:15.293566 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/54a8e29a-d222-49e8-ab57-1aa0afb9648b-scripts\") pod \"54a8e29a-d222-49e8-ab57-1aa0afb9648b\" (UID: \"54a8e29a-d222-49e8-ab57-1aa0afb9648b\") " Jan 20 17:47:15 crc kubenswrapper[4558]: I0120 17:47:15.293681 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l9zhr\" (UniqueName: \"kubernetes.io/projected/51f6fade-ce82-468f-9a2c-79c733dddeb3-kube-api-access-l9zhr\") pod \"51f6fade-ce82-468f-9a2c-79c733dddeb3\" (UID: \"51f6fade-ce82-468f-9a2c-79c733dddeb3\") " Jan 20 17:47:15 crc kubenswrapper[4558]: I0120 17:47:15.293730 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/54a8e29a-d222-49e8-ab57-1aa0afb9648b-logs\") pod \"54a8e29a-d222-49e8-ab57-1aa0afb9648b\" (UID: \"54a8e29a-d222-49e8-ab57-1aa0afb9648b\") " Jan 20 17:47:15 crc kubenswrapper[4558]: I0120 17:47:15.293761 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/54a8e29a-d222-49e8-ab57-1aa0afb9648b-config-data\") pod \"54a8e29a-d222-49e8-ab57-1aa0afb9648b\" (UID: \"54a8e29a-d222-49e8-ab57-1aa0afb9648b\") " Jan 20 17:47:15 crc kubenswrapper[4558]: I0120 17:47:15.293809 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/51f6fade-ce82-468f-9a2c-79c733dddeb3-db-sync-config-data\") pod \"51f6fade-ce82-468f-9a2c-79c733dddeb3\" (UID: \"51f6fade-ce82-468f-9a2c-79c733dddeb3\") " Jan 20 17:47:15 crc kubenswrapper[4558]: I0120 17:47:15.293830 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tb2b6\" (UniqueName: \"kubernetes.io/projected/54a8e29a-d222-49e8-ab57-1aa0afb9648b-kube-api-access-tb2b6\") pod \"54a8e29a-d222-49e8-ab57-1aa0afb9648b\" (UID: \"54a8e29a-d222-49e8-ab57-1aa0afb9648b\") " Jan 20 17:47:15 crc kubenswrapper[4558]: I0120 17:47:15.294637 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/54a8e29a-d222-49e8-ab57-1aa0afb9648b-logs" (OuterVolumeSpecName: "logs") pod "54a8e29a-d222-49e8-ab57-1aa0afb9648b" (UID: "54a8e29a-d222-49e8-ab57-1aa0afb9648b"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:47:15 crc kubenswrapper[4558]: I0120 17:47:15.302757 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/51f6fade-ce82-468f-9a2c-79c733dddeb3-kube-api-access-l9zhr" (OuterVolumeSpecName: "kube-api-access-l9zhr") pod "51f6fade-ce82-468f-9a2c-79c733dddeb3" (UID: "51f6fade-ce82-468f-9a2c-79c733dddeb3"). InnerVolumeSpecName "kube-api-access-l9zhr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:47:15 crc kubenswrapper[4558]: I0120 17:47:15.305300 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/54a8e29a-d222-49e8-ab57-1aa0afb9648b-scripts" (OuterVolumeSpecName: "scripts") pod "54a8e29a-d222-49e8-ab57-1aa0afb9648b" (UID: "54a8e29a-d222-49e8-ab57-1aa0afb9648b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:47:15 crc kubenswrapper[4558]: I0120 17:47:15.305333 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/51f6fade-ce82-468f-9a2c-79c733dddeb3-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "51f6fade-ce82-468f-9a2c-79c733dddeb3" (UID: "51f6fade-ce82-468f-9a2c-79c733dddeb3"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:47:15 crc kubenswrapper[4558]: I0120 17:47:15.305357 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/54a8e29a-d222-49e8-ab57-1aa0afb9648b-kube-api-access-tb2b6" (OuterVolumeSpecName: "kube-api-access-tb2b6") pod "54a8e29a-d222-49e8-ab57-1aa0afb9648b" (UID: "54a8e29a-d222-49e8-ab57-1aa0afb9648b"). InnerVolumeSpecName "kube-api-access-tb2b6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:47:15 crc kubenswrapper[4558]: I0120 17:47:15.320070 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/54a8e29a-d222-49e8-ab57-1aa0afb9648b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "54a8e29a-d222-49e8-ab57-1aa0afb9648b" (UID: "54a8e29a-d222-49e8-ab57-1aa0afb9648b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:47:15 crc kubenswrapper[4558]: I0120 17:47:15.325876 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/51f6fade-ce82-468f-9a2c-79c733dddeb3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "51f6fade-ce82-468f-9a2c-79c733dddeb3" (UID: "51f6fade-ce82-468f-9a2c-79c733dddeb3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:47:15 crc kubenswrapper[4558]: I0120 17:47:15.326279 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/54a8e29a-d222-49e8-ab57-1aa0afb9648b-config-data" (OuterVolumeSpecName: "config-data") pod "54a8e29a-d222-49e8-ab57-1aa0afb9648b" (UID: "54a8e29a-d222-49e8-ab57-1aa0afb9648b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:47:15 crc kubenswrapper[4558]: I0120 17:47:15.396073 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l9zhr\" (UniqueName: \"kubernetes.io/projected/51f6fade-ce82-468f-9a2c-79c733dddeb3-kube-api-access-l9zhr\") on node \"crc\" DevicePath \"\"" Jan 20 17:47:15 crc kubenswrapper[4558]: I0120 17:47:15.396098 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/54a8e29a-d222-49e8-ab57-1aa0afb9648b-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:47:15 crc kubenswrapper[4558]: I0120 17:47:15.396108 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/54a8e29a-d222-49e8-ab57-1aa0afb9648b-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:47:15 crc kubenswrapper[4558]: I0120 17:47:15.396121 4558 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/51f6fade-ce82-468f-9a2c-79c733dddeb3-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:47:15 crc kubenswrapper[4558]: I0120 17:47:15.396147 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tb2b6\" (UniqueName: \"kubernetes.io/projected/54a8e29a-d222-49e8-ab57-1aa0afb9648b-kube-api-access-tb2b6\") on node \"crc\" DevicePath \"\"" Jan 20 17:47:15 crc kubenswrapper[4558]: I0120 17:47:15.396156 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/51f6fade-ce82-468f-9a2c-79c733dddeb3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:47:15 crc kubenswrapper[4558]: I0120 17:47:15.396187 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54a8e29a-d222-49e8-ab57-1aa0afb9648b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:47:15 crc kubenswrapper[4558]: I0120 17:47:15.396196 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/54a8e29a-d222-49e8-ab57-1aa0afb9648b-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:47:15 crc kubenswrapper[4558]: I0120 17:47:15.553925 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:47:15 crc kubenswrapper[4558]: I0120 17:47:15.679174 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:47:15 crc kubenswrapper[4558]: I0120 17:47:15.718545 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"05f4b910-ff7c-4039-8ff7-24687eea5e74","Type":"ContainerStarted","Data":"caf46ce863cd2a09dadc3e1dcdc49882e5406d35f06c7fa3ca4e64eaaee261df"} Jan 20 17:47:15 crc kubenswrapper[4558]: I0120 17:47:15.721994 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-db-sync-mr5dl" event={"ID":"54a8e29a-d222-49e8-ab57-1aa0afb9648b","Type":"ContainerDied","Data":"86e0c3dcce877e4cd02f3d074d248b65536c236cf592afcf7e142ed5117f53c7"} Jan 20 17:47:15 crc kubenswrapper[4558]: I0120 17:47:15.722026 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="86e0c3dcce877e4cd02f3d074d248b65536c236cf592afcf7e142ed5117f53c7" Jan 20 17:47:15 crc kubenswrapper[4558]: I0120 17:47:15.722100 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-db-sync-mr5dl" Jan 20 17:47:15 crc kubenswrapper[4558]: I0120 17:47:15.729747 4558 generic.go:334] "Generic (PLEG): container finished" podID="d1809d86-a9c3-46e9-96eb-06433832465c" containerID="7dd5fbed1bb93ffab4c94f345e8862a47fbcd02213be363bc0764b0ebe5c1f83" exitCode=0 Jan 20 17:47:15 crc kubenswrapper[4558]: I0120 17:47:15.729804 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-db-sync-s4glp" event={"ID":"d1809d86-a9c3-46e9-96eb-06433832465c","Type":"ContainerDied","Data":"7dd5fbed1bb93ffab4c94f345e8862a47fbcd02213be363bc0764b0ebe5c1f83"} Jan 20 17:47:15 crc kubenswrapper[4558]: I0120 17:47:15.732640 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-db-sync-qcsmp" event={"ID":"51f6fade-ce82-468f-9a2c-79c733dddeb3","Type":"ContainerDied","Data":"2f6862a3c8ff3c11be404b9813e4e4acee97bfc8125ccbaec1a6f5c163d5b2e6"} Jan 20 17:47:15 crc kubenswrapper[4558]: I0120 17:47:15.732668 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2f6862a3c8ff3c11be404b9813e4e4acee97bfc8125ccbaec1a6f5c163d5b2e6" Jan 20 17:47:15 crc kubenswrapper[4558]: I0120 17:47:15.732724 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-db-sync-qcsmp" Jan 20 17:47:15 crc kubenswrapper[4558]: I0120 17:47:15.749335 4558 generic.go:334] "Generic (PLEG): container finished" podID="20780c8d-f8bc-43c4-84ba-ba6f2e0fe601" containerID="a5d3a8ef41b0fbf942dd8d188bb25b5d46afeed4c9868dfa05afacbece3f7157" exitCode=0 Jan 20 17:47:15 crc kubenswrapper[4558]: I0120 17:47:15.749459 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-db-sync-bdw77" event={"ID":"20780c8d-f8bc-43c4-84ba-ba6f2e0fe601","Type":"ContainerDied","Data":"a5d3a8ef41b0fbf942dd8d188bb25b5d46afeed4c9868dfa05afacbece3f7157"} Jan 20 17:47:15 crc kubenswrapper[4558]: I0120 17:47:15.766394 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"d3c4b74b-cd31-4245-91f0-9718f8896f61","Type":"ContainerStarted","Data":"6db9a7511d4ee8abaa86b286093d4b13d304e9f984a000068c383c1d65676f4a"} Jan 20 17:47:15 crc kubenswrapper[4558]: I0120 17:47:15.784386 4558 generic.go:334] "Generic (PLEG): container finished" podID="3a0e658d-6dc9-4d45-bf9a-1dd7fe5c56a6" containerID="7064c70f51c5070bb50a04ba6121a31e6cb8e5af6fce497e461d140997c2e99c" exitCode=0 Jan 20 17:47:15 crc kubenswrapper[4558]: I0120 17:47:15.784426 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-bootstrap-x52px" event={"ID":"3a0e658d-6dc9-4d45-bf9a-1dd7fe5c56a6","Type":"ContainerDied","Data":"7064c70f51c5070bb50a04ba6121a31e6cb8e5af6fce497e461d140997c2e99c"} Jan 20 17:47:15 crc kubenswrapper[4558]: I0120 17:47:15.859393 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/barbican-worker-97cdbfcb5-xwv8l"] Jan 20 17:47:15 crc kubenswrapper[4558]: E0120 17:47:15.859808 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="51f6fade-ce82-468f-9a2c-79c733dddeb3" containerName="barbican-db-sync" Jan 20 17:47:15 crc kubenswrapper[4558]: I0120 17:47:15.859823 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="51f6fade-ce82-468f-9a2c-79c733dddeb3" containerName="barbican-db-sync" Jan 20 17:47:15 crc kubenswrapper[4558]: E0120 17:47:15.859854 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="54a8e29a-d222-49e8-ab57-1aa0afb9648b" containerName="placement-db-sync" Jan 20 17:47:15 crc kubenswrapper[4558]: I0120 17:47:15.859860 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="54a8e29a-d222-49e8-ab57-1aa0afb9648b" containerName="placement-db-sync" Jan 20 17:47:15 crc kubenswrapper[4558]: I0120 17:47:15.860061 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="54a8e29a-d222-49e8-ab57-1aa0afb9648b" containerName="placement-db-sync" Jan 20 17:47:15 crc kubenswrapper[4558]: I0120 17:47:15.860089 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="51f6fade-ce82-468f-9a2c-79c733dddeb3" containerName="barbican-db-sync" Jan 20 17:47:15 crc kubenswrapper[4558]: I0120 17:47:15.861080 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-worker-97cdbfcb5-xwv8l" Jan 20 17:47:15 crc kubenswrapper[4558]: I0120 17:47:15.865522 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"barbican-worker-config-data" Jan 20 17:47:15 crc kubenswrapper[4558]: I0120 17:47:15.865575 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"barbican-barbican-dockercfg-bm9pv" Jan 20 17:47:15 crc kubenswrapper[4558]: I0120 17:47:15.865539 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"barbican-config-data" Jan 20 17:47:15 crc kubenswrapper[4558]: I0120 17:47:15.910291 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-worker-97cdbfcb5-xwv8l"] Jan 20 17:47:15 crc kubenswrapper[4558]: I0120 17:47:15.920456 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c5adf97d-8b90-49ca-bbeb-21dd8446a226-config-data-custom\") pod \"barbican-worker-97cdbfcb5-xwv8l\" (UID: \"c5adf97d-8b90-49ca-bbeb-21dd8446a226\") " pod="openstack-kuttl-tests/barbican-worker-97cdbfcb5-xwv8l" Jan 20 17:47:15 crc kubenswrapper[4558]: I0120 17:47:15.920825 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c5adf97d-8b90-49ca-bbeb-21dd8446a226-config-data\") pod \"barbican-worker-97cdbfcb5-xwv8l\" (UID: \"c5adf97d-8b90-49ca-bbeb-21dd8446a226\") " pod="openstack-kuttl-tests/barbican-worker-97cdbfcb5-xwv8l" Jan 20 17:47:15 crc kubenswrapper[4558]: I0120 17:47:15.920911 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c5adf97d-8b90-49ca-bbeb-21dd8446a226-logs\") pod \"barbican-worker-97cdbfcb5-xwv8l\" (UID: \"c5adf97d-8b90-49ca-bbeb-21dd8446a226\") " pod="openstack-kuttl-tests/barbican-worker-97cdbfcb5-xwv8l" Jan 20 17:47:15 crc kubenswrapper[4558]: I0120 17:47:15.921003 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c5adf97d-8b90-49ca-bbeb-21dd8446a226-combined-ca-bundle\") pod \"barbican-worker-97cdbfcb5-xwv8l\" (UID: \"c5adf97d-8b90-49ca-bbeb-21dd8446a226\") " pod="openstack-kuttl-tests/barbican-worker-97cdbfcb5-xwv8l" Jan 20 17:47:15 crc kubenswrapper[4558]: I0120 17:47:15.921065 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q9xhl\" (UniqueName: \"kubernetes.io/projected/c5adf97d-8b90-49ca-bbeb-21dd8446a226-kube-api-access-q9xhl\") pod \"barbican-worker-97cdbfcb5-xwv8l\" (UID: \"c5adf97d-8b90-49ca-bbeb-21dd8446a226\") " pod="openstack-kuttl-tests/barbican-worker-97cdbfcb5-xwv8l" Jan 20 17:47:15 crc kubenswrapper[4558]: I0120 17:47:15.933248 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/barbican-keystone-listener-7d4c95f78b-qjwvm"] Jan 20 17:47:15 crc kubenswrapper[4558]: I0120 17:47:15.934585 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-keystone-listener-7d4c95f78b-qjwvm" Jan 20 17:47:15 crc kubenswrapper[4558]: I0120 17:47:15.947203 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-keystone-listener-7d4c95f78b-qjwvm"] Jan 20 17:47:15 crc kubenswrapper[4558]: I0120 17:47:15.955483 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"barbican-keystone-listener-config-data" Jan 20 17:47:16 crc kubenswrapper[4558]: I0120 17:47:16.023092 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c5adf97d-8b90-49ca-bbeb-21dd8446a226-config-data\") pod \"barbican-worker-97cdbfcb5-xwv8l\" (UID: \"c5adf97d-8b90-49ca-bbeb-21dd8446a226\") " pod="openstack-kuttl-tests/barbican-worker-97cdbfcb5-xwv8l" Jan 20 17:47:16 crc kubenswrapper[4558]: I0120 17:47:16.023156 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e2e49d05-bd1d-49a2-a67f-17bdeb5c39c2-combined-ca-bundle\") pod \"barbican-keystone-listener-7d4c95f78b-qjwvm\" (UID: \"e2e49d05-bd1d-49a2-a67f-17bdeb5c39c2\") " pod="openstack-kuttl-tests/barbican-keystone-listener-7d4c95f78b-qjwvm" Jan 20 17:47:16 crc kubenswrapper[4558]: I0120 17:47:16.023201 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e2e49d05-bd1d-49a2-a67f-17bdeb5c39c2-config-data\") pod \"barbican-keystone-listener-7d4c95f78b-qjwvm\" (UID: \"e2e49d05-bd1d-49a2-a67f-17bdeb5c39c2\") " pod="openstack-kuttl-tests/barbican-keystone-listener-7d4c95f78b-qjwvm" Jan 20 17:47:16 crc kubenswrapper[4558]: I0120 17:47:16.023228 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c5adf97d-8b90-49ca-bbeb-21dd8446a226-logs\") pod \"barbican-worker-97cdbfcb5-xwv8l\" (UID: \"c5adf97d-8b90-49ca-bbeb-21dd8446a226\") " pod="openstack-kuttl-tests/barbican-worker-97cdbfcb5-xwv8l" Jan 20 17:47:16 crc kubenswrapper[4558]: I0120 17:47:16.023267 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-llqc6\" (UniqueName: \"kubernetes.io/projected/e2e49d05-bd1d-49a2-a67f-17bdeb5c39c2-kube-api-access-llqc6\") pod \"barbican-keystone-listener-7d4c95f78b-qjwvm\" (UID: \"e2e49d05-bd1d-49a2-a67f-17bdeb5c39c2\") " pod="openstack-kuttl-tests/barbican-keystone-listener-7d4c95f78b-qjwvm" Jan 20 17:47:16 crc kubenswrapper[4558]: I0120 17:47:16.023300 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c5adf97d-8b90-49ca-bbeb-21dd8446a226-combined-ca-bundle\") pod \"barbican-worker-97cdbfcb5-xwv8l\" (UID: \"c5adf97d-8b90-49ca-bbeb-21dd8446a226\") " pod="openstack-kuttl-tests/barbican-worker-97cdbfcb5-xwv8l" Jan 20 17:47:16 crc kubenswrapper[4558]: I0120 17:47:16.023344 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q9xhl\" (UniqueName: \"kubernetes.io/projected/c5adf97d-8b90-49ca-bbeb-21dd8446a226-kube-api-access-q9xhl\") pod \"barbican-worker-97cdbfcb5-xwv8l\" (UID: \"c5adf97d-8b90-49ca-bbeb-21dd8446a226\") " pod="openstack-kuttl-tests/barbican-worker-97cdbfcb5-xwv8l" Jan 20 17:47:16 crc kubenswrapper[4558]: I0120 17:47:16.023360 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e2e49d05-bd1d-49a2-a67f-17bdeb5c39c2-logs\") pod \"barbican-keystone-listener-7d4c95f78b-qjwvm\" (UID: \"e2e49d05-bd1d-49a2-a67f-17bdeb5c39c2\") " pod="openstack-kuttl-tests/barbican-keystone-listener-7d4c95f78b-qjwvm" Jan 20 17:47:16 crc kubenswrapper[4558]: I0120 17:47:16.023398 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e2e49d05-bd1d-49a2-a67f-17bdeb5c39c2-config-data-custom\") pod \"barbican-keystone-listener-7d4c95f78b-qjwvm\" (UID: \"e2e49d05-bd1d-49a2-a67f-17bdeb5c39c2\") " pod="openstack-kuttl-tests/barbican-keystone-listener-7d4c95f78b-qjwvm" Jan 20 17:47:16 crc kubenswrapper[4558]: I0120 17:47:16.023418 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c5adf97d-8b90-49ca-bbeb-21dd8446a226-config-data-custom\") pod \"barbican-worker-97cdbfcb5-xwv8l\" (UID: \"c5adf97d-8b90-49ca-bbeb-21dd8446a226\") " pod="openstack-kuttl-tests/barbican-worker-97cdbfcb5-xwv8l" Jan 20 17:47:16 crc kubenswrapper[4558]: I0120 17:47:16.038382 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c5adf97d-8b90-49ca-bbeb-21dd8446a226-logs\") pod \"barbican-worker-97cdbfcb5-xwv8l\" (UID: \"c5adf97d-8b90-49ca-bbeb-21dd8446a226\") " pod="openstack-kuttl-tests/barbican-worker-97cdbfcb5-xwv8l" Jan 20 17:47:16 crc kubenswrapper[4558]: I0120 17:47:16.042856 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c5adf97d-8b90-49ca-bbeb-21dd8446a226-config-data-custom\") pod \"barbican-worker-97cdbfcb5-xwv8l\" (UID: \"c5adf97d-8b90-49ca-bbeb-21dd8446a226\") " pod="openstack-kuttl-tests/barbican-worker-97cdbfcb5-xwv8l" Jan 20 17:47:16 crc kubenswrapper[4558]: I0120 17:47:16.050491 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c5adf97d-8b90-49ca-bbeb-21dd8446a226-config-data\") pod \"barbican-worker-97cdbfcb5-xwv8l\" (UID: \"c5adf97d-8b90-49ca-bbeb-21dd8446a226\") " pod="openstack-kuttl-tests/barbican-worker-97cdbfcb5-xwv8l" Jan 20 17:47:16 crc kubenswrapper[4558]: I0120 17:47:16.074652 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q9xhl\" (UniqueName: \"kubernetes.io/projected/c5adf97d-8b90-49ca-bbeb-21dd8446a226-kube-api-access-q9xhl\") pod \"barbican-worker-97cdbfcb5-xwv8l\" (UID: \"c5adf97d-8b90-49ca-bbeb-21dd8446a226\") " pod="openstack-kuttl-tests/barbican-worker-97cdbfcb5-xwv8l" Jan 20 17:47:16 crc kubenswrapper[4558]: I0120 17:47:16.094272 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/barbican-api-58bb6f688-8dbng"] Jan 20 17:47:16 crc kubenswrapper[4558]: I0120 17:47:16.096156 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-58bb6f688-8dbng" Jan 20 17:47:16 crc kubenswrapper[4558]: I0120 17:47:16.112218 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"barbican-api-config-data" Jan 20 17:47:16 crc kubenswrapper[4558]: I0120 17:47:16.119002 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-api-58bb6f688-8dbng"] Jan 20 17:47:16 crc kubenswrapper[4558]: I0120 17:47:16.139879 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c5adf97d-8b90-49ca-bbeb-21dd8446a226-combined-ca-bundle\") pod \"barbican-worker-97cdbfcb5-xwv8l\" (UID: \"c5adf97d-8b90-49ca-bbeb-21dd8446a226\") " pod="openstack-kuttl-tests/barbican-worker-97cdbfcb5-xwv8l" Jan 20 17:47:16 crc kubenswrapper[4558]: I0120 17:47:16.147671 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec317104-430d-4590-85a9-257448df4212-combined-ca-bundle\") pod \"barbican-api-58bb6f688-8dbng\" (UID: \"ec317104-430d-4590-85a9-257448df4212\") " pod="openstack-kuttl-tests/barbican-api-58bb6f688-8dbng" Jan 20 17:47:16 crc kubenswrapper[4558]: I0120 17:47:16.147815 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e2e49d05-bd1d-49a2-a67f-17bdeb5c39c2-combined-ca-bundle\") pod \"barbican-keystone-listener-7d4c95f78b-qjwvm\" (UID: \"e2e49d05-bd1d-49a2-a67f-17bdeb5c39c2\") " pod="openstack-kuttl-tests/barbican-keystone-listener-7d4c95f78b-qjwvm" Jan 20 17:47:16 crc kubenswrapper[4558]: I0120 17:47:16.147898 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e2e49d05-bd1d-49a2-a67f-17bdeb5c39c2-config-data\") pod \"barbican-keystone-listener-7d4c95f78b-qjwvm\" (UID: \"e2e49d05-bd1d-49a2-a67f-17bdeb5c39c2\") " pod="openstack-kuttl-tests/barbican-keystone-listener-7d4c95f78b-qjwvm" Jan 20 17:47:16 crc kubenswrapper[4558]: I0120 17:47:16.147962 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ec317104-430d-4590-85a9-257448df4212-config-data-custom\") pod \"barbican-api-58bb6f688-8dbng\" (UID: \"ec317104-430d-4590-85a9-257448df4212\") " pod="openstack-kuttl-tests/barbican-api-58bb6f688-8dbng" Jan 20 17:47:16 crc kubenswrapper[4558]: I0120 17:47:16.148043 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-llqc6\" (UniqueName: \"kubernetes.io/projected/e2e49d05-bd1d-49a2-a67f-17bdeb5c39c2-kube-api-access-llqc6\") pod \"barbican-keystone-listener-7d4c95f78b-qjwvm\" (UID: \"e2e49d05-bd1d-49a2-a67f-17bdeb5c39c2\") " pod="openstack-kuttl-tests/barbican-keystone-listener-7d4c95f78b-qjwvm" Jan 20 17:47:16 crc kubenswrapper[4558]: I0120 17:47:16.148184 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e2e49d05-bd1d-49a2-a67f-17bdeb5c39c2-logs\") pod \"barbican-keystone-listener-7d4c95f78b-qjwvm\" (UID: \"e2e49d05-bd1d-49a2-a67f-17bdeb5c39c2\") " pod="openstack-kuttl-tests/barbican-keystone-listener-7d4c95f78b-qjwvm" Jan 20 17:47:16 crc kubenswrapper[4558]: I0120 17:47:16.148209 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rlc2z\" (UniqueName: \"kubernetes.io/projected/ec317104-430d-4590-85a9-257448df4212-kube-api-access-rlc2z\") pod \"barbican-api-58bb6f688-8dbng\" (UID: \"ec317104-430d-4590-85a9-257448df4212\") " pod="openstack-kuttl-tests/barbican-api-58bb6f688-8dbng" Jan 20 17:47:16 crc kubenswrapper[4558]: I0120 17:47:16.148275 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ec317104-430d-4590-85a9-257448df4212-logs\") pod \"barbican-api-58bb6f688-8dbng\" (UID: \"ec317104-430d-4590-85a9-257448df4212\") " pod="openstack-kuttl-tests/barbican-api-58bb6f688-8dbng" Jan 20 17:47:16 crc kubenswrapper[4558]: I0120 17:47:16.148305 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e2e49d05-bd1d-49a2-a67f-17bdeb5c39c2-config-data-custom\") pod \"barbican-keystone-listener-7d4c95f78b-qjwvm\" (UID: \"e2e49d05-bd1d-49a2-a67f-17bdeb5c39c2\") " pod="openstack-kuttl-tests/barbican-keystone-listener-7d4c95f78b-qjwvm" Jan 20 17:47:16 crc kubenswrapper[4558]: I0120 17:47:16.148403 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec317104-430d-4590-85a9-257448df4212-config-data\") pod \"barbican-api-58bb6f688-8dbng\" (UID: \"ec317104-430d-4590-85a9-257448df4212\") " pod="openstack-kuttl-tests/barbican-api-58bb6f688-8dbng" Jan 20 17:47:16 crc kubenswrapper[4558]: I0120 17:47:16.159922 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e2e49d05-bd1d-49a2-a67f-17bdeb5c39c2-config-data-custom\") pod \"barbican-keystone-listener-7d4c95f78b-qjwvm\" (UID: \"e2e49d05-bd1d-49a2-a67f-17bdeb5c39c2\") " pod="openstack-kuttl-tests/barbican-keystone-listener-7d4c95f78b-qjwvm" Jan 20 17:47:16 crc kubenswrapper[4558]: I0120 17:47:16.164697 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e2e49d05-bd1d-49a2-a67f-17bdeb5c39c2-combined-ca-bundle\") pod \"barbican-keystone-listener-7d4c95f78b-qjwvm\" (UID: \"e2e49d05-bd1d-49a2-a67f-17bdeb5c39c2\") " pod="openstack-kuttl-tests/barbican-keystone-listener-7d4c95f78b-qjwvm" Jan 20 17:47:16 crc kubenswrapper[4558]: I0120 17:47:16.164852 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e2e49d05-bd1d-49a2-a67f-17bdeb5c39c2-logs\") pod \"barbican-keystone-listener-7d4c95f78b-qjwvm\" (UID: \"e2e49d05-bd1d-49a2-a67f-17bdeb5c39c2\") " pod="openstack-kuttl-tests/barbican-keystone-listener-7d4c95f78b-qjwvm" Jan 20 17:47:16 crc kubenswrapper[4558]: I0120 17:47:16.180426 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e2e49d05-bd1d-49a2-a67f-17bdeb5c39c2-config-data\") pod \"barbican-keystone-listener-7d4c95f78b-qjwvm\" (UID: \"e2e49d05-bd1d-49a2-a67f-17bdeb5c39c2\") " pod="openstack-kuttl-tests/barbican-keystone-listener-7d4c95f78b-qjwvm" Jan 20 17:47:16 crc kubenswrapper[4558]: I0120 17:47:16.186485 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-llqc6\" (UniqueName: \"kubernetes.io/projected/e2e49d05-bd1d-49a2-a67f-17bdeb5c39c2-kube-api-access-llqc6\") pod \"barbican-keystone-listener-7d4c95f78b-qjwvm\" (UID: \"e2e49d05-bd1d-49a2-a67f-17bdeb5c39c2\") " pod="openstack-kuttl-tests/barbican-keystone-listener-7d4c95f78b-qjwvm" Jan 20 17:47:16 crc kubenswrapper[4558]: I0120 17:47:16.213755 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-worker-97cdbfcb5-xwv8l" Jan 20 17:47:16 crc kubenswrapper[4558]: I0120 17:47:16.255644 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rlc2z\" (UniqueName: \"kubernetes.io/projected/ec317104-430d-4590-85a9-257448df4212-kube-api-access-rlc2z\") pod \"barbican-api-58bb6f688-8dbng\" (UID: \"ec317104-430d-4590-85a9-257448df4212\") " pod="openstack-kuttl-tests/barbican-api-58bb6f688-8dbng" Jan 20 17:47:16 crc kubenswrapper[4558]: I0120 17:47:16.255698 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ec317104-430d-4590-85a9-257448df4212-logs\") pod \"barbican-api-58bb6f688-8dbng\" (UID: \"ec317104-430d-4590-85a9-257448df4212\") " pod="openstack-kuttl-tests/barbican-api-58bb6f688-8dbng" Jan 20 17:47:16 crc kubenswrapper[4558]: I0120 17:47:16.255754 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec317104-430d-4590-85a9-257448df4212-config-data\") pod \"barbican-api-58bb6f688-8dbng\" (UID: \"ec317104-430d-4590-85a9-257448df4212\") " pod="openstack-kuttl-tests/barbican-api-58bb6f688-8dbng" Jan 20 17:47:16 crc kubenswrapper[4558]: I0120 17:47:16.255781 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec317104-430d-4590-85a9-257448df4212-combined-ca-bundle\") pod \"barbican-api-58bb6f688-8dbng\" (UID: \"ec317104-430d-4590-85a9-257448df4212\") " pod="openstack-kuttl-tests/barbican-api-58bb6f688-8dbng" Jan 20 17:47:16 crc kubenswrapper[4558]: I0120 17:47:16.255858 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ec317104-430d-4590-85a9-257448df4212-config-data-custom\") pod \"barbican-api-58bb6f688-8dbng\" (UID: \"ec317104-430d-4590-85a9-257448df4212\") " pod="openstack-kuttl-tests/barbican-api-58bb6f688-8dbng" Jan 20 17:47:16 crc kubenswrapper[4558]: I0120 17:47:16.261121 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ec317104-430d-4590-85a9-257448df4212-logs\") pod \"barbican-api-58bb6f688-8dbng\" (UID: \"ec317104-430d-4590-85a9-257448df4212\") " pod="openstack-kuttl-tests/barbican-api-58bb6f688-8dbng" Jan 20 17:47:16 crc kubenswrapper[4558]: I0120 17:47:16.311980 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec317104-430d-4590-85a9-257448df4212-config-data\") pod \"barbican-api-58bb6f688-8dbng\" (UID: \"ec317104-430d-4590-85a9-257448df4212\") " pod="openstack-kuttl-tests/barbican-api-58bb6f688-8dbng" Jan 20 17:47:16 crc kubenswrapper[4558]: I0120 17:47:16.324375 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rlc2z\" (UniqueName: \"kubernetes.io/projected/ec317104-430d-4590-85a9-257448df4212-kube-api-access-rlc2z\") pod \"barbican-api-58bb6f688-8dbng\" (UID: \"ec317104-430d-4590-85a9-257448df4212\") " pod="openstack-kuttl-tests/barbican-api-58bb6f688-8dbng" Jan 20 17:47:16 crc kubenswrapper[4558]: I0120 17:47:16.324821 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec317104-430d-4590-85a9-257448df4212-combined-ca-bundle\") pod \"barbican-api-58bb6f688-8dbng\" (UID: \"ec317104-430d-4590-85a9-257448df4212\") " pod="openstack-kuttl-tests/barbican-api-58bb6f688-8dbng" Jan 20 17:47:16 crc kubenswrapper[4558]: I0120 17:47:16.325731 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ec317104-430d-4590-85a9-257448df4212-config-data-custom\") pod \"barbican-api-58bb6f688-8dbng\" (UID: \"ec317104-430d-4590-85a9-257448df4212\") " pod="openstack-kuttl-tests/barbican-api-58bb6f688-8dbng" Jan 20 17:47:16 crc kubenswrapper[4558]: I0120 17:47:16.404298 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/placement-7dc79cc978-clpbw"] Jan 20 17:47:16 crc kubenswrapper[4558]: I0120 17:47:16.406330 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-7dc79cc978-clpbw" Jan 20 17:47:16 crc kubenswrapper[4558]: I0120 17:47:16.410830 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"placement-config-data" Jan 20 17:47:16 crc kubenswrapper[4558]: I0120 17:47:16.411144 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"placement-scripts" Jan 20 17:47:16 crc kubenswrapper[4558]: I0120 17:47:16.411838 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"placement-placement-dockercfg-qmgb8" Jan 20 17:47:16 crc kubenswrapper[4558]: I0120 17:47:16.417981 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-7dc79cc978-clpbw"] Jan 20 17:47:16 crc kubenswrapper[4558]: I0120 17:47:16.469743 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/76dc3695-3423-40fe-afcc-798d8a78c542-combined-ca-bundle\") pod \"placement-7dc79cc978-clpbw\" (UID: \"76dc3695-3423-40fe-afcc-798d8a78c542\") " pod="openstack-kuttl-tests/placement-7dc79cc978-clpbw" Jan 20 17:47:16 crc kubenswrapper[4558]: I0120 17:47:16.469828 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/76dc3695-3423-40fe-afcc-798d8a78c542-logs\") pod \"placement-7dc79cc978-clpbw\" (UID: \"76dc3695-3423-40fe-afcc-798d8a78c542\") " pod="openstack-kuttl-tests/placement-7dc79cc978-clpbw" Jan 20 17:47:16 crc kubenswrapper[4558]: I0120 17:47:16.469861 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xt9wp\" (UniqueName: \"kubernetes.io/projected/76dc3695-3423-40fe-afcc-798d8a78c542-kube-api-access-xt9wp\") pod \"placement-7dc79cc978-clpbw\" (UID: \"76dc3695-3423-40fe-afcc-798d8a78c542\") " pod="openstack-kuttl-tests/placement-7dc79cc978-clpbw" Jan 20 17:47:16 crc kubenswrapper[4558]: I0120 17:47:16.469932 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/76dc3695-3423-40fe-afcc-798d8a78c542-scripts\") pod \"placement-7dc79cc978-clpbw\" (UID: \"76dc3695-3423-40fe-afcc-798d8a78c542\") " pod="openstack-kuttl-tests/placement-7dc79cc978-clpbw" Jan 20 17:47:16 crc kubenswrapper[4558]: I0120 17:47:16.470005 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/76dc3695-3423-40fe-afcc-798d8a78c542-config-data\") pod \"placement-7dc79cc978-clpbw\" (UID: \"76dc3695-3423-40fe-afcc-798d8a78c542\") " pod="openstack-kuttl-tests/placement-7dc79cc978-clpbw" Jan 20 17:47:16 crc kubenswrapper[4558]: I0120 17:47:16.475304 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-keystone-listener-7d4c95f78b-qjwvm" Jan 20 17:47:16 crc kubenswrapper[4558]: I0120 17:47:16.556079 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-58bb6f688-8dbng" Jan 20 17:47:16 crc kubenswrapper[4558]: I0120 17:47:16.578699 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/76dc3695-3423-40fe-afcc-798d8a78c542-config-data\") pod \"placement-7dc79cc978-clpbw\" (UID: \"76dc3695-3423-40fe-afcc-798d8a78c542\") " pod="openstack-kuttl-tests/placement-7dc79cc978-clpbw" Jan 20 17:47:16 crc kubenswrapper[4558]: I0120 17:47:16.578767 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/76dc3695-3423-40fe-afcc-798d8a78c542-combined-ca-bundle\") pod \"placement-7dc79cc978-clpbw\" (UID: \"76dc3695-3423-40fe-afcc-798d8a78c542\") " pod="openstack-kuttl-tests/placement-7dc79cc978-clpbw" Jan 20 17:47:16 crc kubenswrapper[4558]: I0120 17:47:16.578845 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/76dc3695-3423-40fe-afcc-798d8a78c542-logs\") pod \"placement-7dc79cc978-clpbw\" (UID: \"76dc3695-3423-40fe-afcc-798d8a78c542\") " pod="openstack-kuttl-tests/placement-7dc79cc978-clpbw" Jan 20 17:47:16 crc kubenswrapper[4558]: I0120 17:47:16.578883 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xt9wp\" (UniqueName: \"kubernetes.io/projected/76dc3695-3423-40fe-afcc-798d8a78c542-kube-api-access-xt9wp\") pod \"placement-7dc79cc978-clpbw\" (UID: \"76dc3695-3423-40fe-afcc-798d8a78c542\") " pod="openstack-kuttl-tests/placement-7dc79cc978-clpbw" Jan 20 17:47:16 crc kubenswrapper[4558]: I0120 17:47:16.578964 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/76dc3695-3423-40fe-afcc-798d8a78c542-scripts\") pod \"placement-7dc79cc978-clpbw\" (UID: \"76dc3695-3423-40fe-afcc-798d8a78c542\") " pod="openstack-kuttl-tests/placement-7dc79cc978-clpbw" Jan 20 17:47:16 crc kubenswrapper[4558]: I0120 17:47:16.580236 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/76dc3695-3423-40fe-afcc-798d8a78c542-logs\") pod \"placement-7dc79cc978-clpbw\" (UID: \"76dc3695-3423-40fe-afcc-798d8a78c542\") " pod="openstack-kuttl-tests/placement-7dc79cc978-clpbw" Jan 20 17:47:16 crc kubenswrapper[4558]: I0120 17:47:16.582750 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/76dc3695-3423-40fe-afcc-798d8a78c542-combined-ca-bundle\") pod \"placement-7dc79cc978-clpbw\" (UID: \"76dc3695-3423-40fe-afcc-798d8a78c542\") " pod="openstack-kuttl-tests/placement-7dc79cc978-clpbw" Jan 20 17:47:16 crc kubenswrapper[4558]: I0120 17:47:16.583218 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/76dc3695-3423-40fe-afcc-798d8a78c542-config-data\") pod \"placement-7dc79cc978-clpbw\" (UID: \"76dc3695-3423-40fe-afcc-798d8a78c542\") " pod="openstack-kuttl-tests/placement-7dc79cc978-clpbw" Jan 20 17:47:16 crc kubenswrapper[4558]: I0120 17:47:16.591098 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/76dc3695-3423-40fe-afcc-798d8a78c542-scripts\") pod \"placement-7dc79cc978-clpbw\" (UID: \"76dc3695-3423-40fe-afcc-798d8a78c542\") " pod="openstack-kuttl-tests/placement-7dc79cc978-clpbw" Jan 20 17:47:16 crc kubenswrapper[4558]: I0120 17:47:16.598687 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xt9wp\" (UniqueName: \"kubernetes.io/projected/76dc3695-3423-40fe-afcc-798d8a78c542-kube-api-access-xt9wp\") pod \"placement-7dc79cc978-clpbw\" (UID: \"76dc3695-3423-40fe-afcc-798d8a78c542\") " pod="openstack-kuttl-tests/placement-7dc79cc978-clpbw" Jan 20 17:47:16 crc kubenswrapper[4558]: I0120 17:47:16.605213 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="254186c2-67b8-4439-b79d-55f755e4afde" path="/var/lib/kubelet/pods/254186c2-67b8-4439-b79d-55f755e4afde/volumes" Jan 20 17:47:16 crc kubenswrapper[4558]: I0120 17:47:16.605976 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f3dc733d-d501-42c3-af4b-ef30ec59ffb4" path="/var/lib/kubelet/pods/f3dc733d-d501-42c3-af4b-ef30ec59ffb4/volumes" Jan 20 17:47:16 crc kubenswrapper[4558]: I0120 17:47:16.777530 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-worker-97cdbfcb5-xwv8l"] Jan 20 17:47:16 crc kubenswrapper[4558]: I0120 17:47:16.792182 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-7dc79cc978-clpbw" Jan 20 17:47:16 crc kubenswrapper[4558]: W0120 17:47:16.795257 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc5adf97d_8b90_49ca_bbeb_21dd8446a226.slice/crio-9708763ddb6cdd14575c2edb6a39f5547296c6ec28017b7ffc35f3cb63b81ad6 WatchSource:0}: Error finding container 9708763ddb6cdd14575c2edb6a39f5547296c6ec28017b7ffc35f3cb63b81ad6: Status 404 returned error can't find the container with id 9708763ddb6cdd14575c2edb6a39f5547296c6ec28017b7ffc35f3cb63b81ad6 Jan 20 17:47:16 crc kubenswrapper[4558]: I0120 17:47:16.809302 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"017cf5a8-b216-4c29-8dfc-7b5b6625591c","Type":"ContainerStarted","Data":"5417f7ab774e651be1947ba57ee85fa32cb85abba514b48b86676238391f29eb"} Jan 20 17:47:16 crc kubenswrapper[4558]: I0120 17:47:16.809536 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="017cf5a8-b216-4c29-8dfc-7b5b6625591c" containerName="ceilometer-central-agent" containerID="cri-o://7049e294116c3dfe281795be83dd1fef9c5ce6bd1d8517d4e1c57247c067830c" gracePeriod=30 Jan 20 17:47:16 crc kubenswrapper[4558]: I0120 17:47:16.809628 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="017cf5a8-b216-4c29-8dfc-7b5b6625591c" containerName="proxy-httpd" containerID="cri-o://5417f7ab774e651be1947ba57ee85fa32cb85abba514b48b86676238391f29eb" gracePeriod=30 Jan 20 17:47:16 crc kubenswrapper[4558]: I0120 17:47:16.809665 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:47:16 crc kubenswrapper[4558]: I0120 17:47:16.809684 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="017cf5a8-b216-4c29-8dfc-7b5b6625591c" containerName="ceilometer-notification-agent" containerID="cri-o://39122fe68ccaf23346e6568ce60f5ad96343708ff688ebb76e80c587fa773a90" gracePeriod=30 Jan 20 17:47:16 crc kubenswrapper[4558]: I0120 17:47:16.809671 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="017cf5a8-b216-4c29-8dfc-7b5b6625591c" containerName="sg-core" containerID="cri-o://c54b38a6355b94b185e84a1db7075c6f9bf63a358c709e1f336a4f64b4cea596" gracePeriod=30 Jan 20 17:47:16 crc kubenswrapper[4558]: I0120 17:47:16.819280 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"05f4b910-ff7c-4039-8ff7-24687eea5e74","Type":"ContainerStarted","Data":"54ce3695fde6a7ec6419832384135e5cf1001a4b3826a28d0554442580b148c3"} Jan 20 17:47:16 crc kubenswrapper[4558]: I0120 17:47:16.825787 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"d3c4b74b-cd31-4245-91f0-9718f8896f61","Type":"ContainerStarted","Data":"73b8176cf31c48afe8b2940b6e65ec9b3de704e8761f5cbe0a0a28407ebce072"} Jan 20 17:47:16 crc kubenswrapper[4558]: I0120 17:47:16.838681 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ceilometer-0" podStartSLOduration=2.493536515 podStartE2EDuration="6.838661126s" podCreationTimestamp="2026-01-20 17:47:10 +0000 UTC" firstStartedPulling="2026-01-20 17:47:11.654128562 +0000 UTC m=+3925.414466529" lastFinishedPulling="2026-01-20 17:47:15.999253173 +0000 UTC m=+3929.759591140" observedRunningTime="2026-01-20 17:47:16.834251965 +0000 UTC m=+3930.594589933" watchObservedRunningTime="2026-01-20 17:47:16.838661126 +0000 UTC m=+3930.598999093" Jan 20 17:47:16 crc kubenswrapper[4558]: I0120 17:47:16.974399 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-keystone-listener-7d4c95f78b-qjwvm"] Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.084130 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-api-58bb6f688-8dbng"] Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.130325 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/placement-59975f6898-zxn8m"] Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.132320 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-59975f6898-zxn8m" Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.143365 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-placement-public-svc" Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.144082 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-placement-internal-svc" Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.149416 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-59975f6898-zxn8m"] Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.204809 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mnpsv\" (UniqueName: \"kubernetes.io/projected/a4f41b33-4de3-4a87-96ac-f7ea7bb1aac2-kube-api-access-mnpsv\") pod \"placement-59975f6898-zxn8m\" (UID: \"a4f41b33-4de3-4a87-96ac-f7ea7bb1aac2\") " pod="openstack-kuttl-tests/placement-59975f6898-zxn8m" Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.204882 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a4f41b33-4de3-4a87-96ac-f7ea7bb1aac2-logs\") pod \"placement-59975f6898-zxn8m\" (UID: \"a4f41b33-4de3-4a87-96ac-f7ea7bb1aac2\") " pod="openstack-kuttl-tests/placement-59975f6898-zxn8m" Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.204917 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a4f41b33-4de3-4a87-96ac-f7ea7bb1aac2-public-tls-certs\") pod \"placement-59975f6898-zxn8m\" (UID: \"a4f41b33-4de3-4a87-96ac-f7ea7bb1aac2\") " pod="openstack-kuttl-tests/placement-59975f6898-zxn8m" Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.205443 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a4f41b33-4de3-4a87-96ac-f7ea7bb1aac2-config-data\") pod \"placement-59975f6898-zxn8m\" (UID: \"a4f41b33-4de3-4a87-96ac-f7ea7bb1aac2\") " pod="openstack-kuttl-tests/placement-59975f6898-zxn8m" Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.205484 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a4f41b33-4de3-4a87-96ac-f7ea7bb1aac2-scripts\") pod \"placement-59975f6898-zxn8m\" (UID: \"a4f41b33-4de3-4a87-96ac-f7ea7bb1aac2\") " pod="openstack-kuttl-tests/placement-59975f6898-zxn8m" Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.205521 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a4f41b33-4de3-4a87-96ac-f7ea7bb1aac2-combined-ca-bundle\") pod \"placement-59975f6898-zxn8m\" (UID: \"a4f41b33-4de3-4a87-96ac-f7ea7bb1aac2\") " pod="openstack-kuttl-tests/placement-59975f6898-zxn8m" Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.206636 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a4f41b33-4de3-4a87-96ac-f7ea7bb1aac2-internal-tls-certs\") pod \"placement-59975f6898-zxn8m\" (UID: \"a4f41b33-4de3-4a87-96ac-f7ea7bb1aac2\") " pod="openstack-kuttl-tests/placement-59975f6898-zxn8m" Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.310560 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a4f41b33-4de3-4a87-96ac-f7ea7bb1aac2-logs\") pod \"placement-59975f6898-zxn8m\" (UID: \"a4f41b33-4de3-4a87-96ac-f7ea7bb1aac2\") " pod="openstack-kuttl-tests/placement-59975f6898-zxn8m" Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.310613 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a4f41b33-4de3-4a87-96ac-f7ea7bb1aac2-public-tls-certs\") pod \"placement-59975f6898-zxn8m\" (UID: \"a4f41b33-4de3-4a87-96ac-f7ea7bb1aac2\") " pod="openstack-kuttl-tests/placement-59975f6898-zxn8m" Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.310693 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a4f41b33-4de3-4a87-96ac-f7ea7bb1aac2-config-data\") pod \"placement-59975f6898-zxn8m\" (UID: \"a4f41b33-4de3-4a87-96ac-f7ea7bb1aac2\") " pod="openstack-kuttl-tests/placement-59975f6898-zxn8m" Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.310712 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a4f41b33-4de3-4a87-96ac-f7ea7bb1aac2-scripts\") pod \"placement-59975f6898-zxn8m\" (UID: \"a4f41b33-4de3-4a87-96ac-f7ea7bb1aac2\") " pod="openstack-kuttl-tests/placement-59975f6898-zxn8m" Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.310735 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a4f41b33-4de3-4a87-96ac-f7ea7bb1aac2-combined-ca-bundle\") pod \"placement-59975f6898-zxn8m\" (UID: \"a4f41b33-4de3-4a87-96ac-f7ea7bb1aac2\") " pod="openstack-kuttl-tests/placement-59975f6898-zxn8m" Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.310775 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a4f41b33-4de3-4a87-96ac-f7ea7bb1aac2-internal-tls-certs\") pod \"placement-59975f6898-zxn8m\" (UID: \"a4f41b33-4de3-4a87-96ac-f7ea7bb1aac2\") " pod="openstack-kuttl-tests/placement-59975f6898-zxn8m" Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.310805 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mnpsv\" (UniqueName: \"kubernetes.io/projected/a4f41b33-4de3-4a87-96ac-f7ea7bb1aac2-kube-api-access-mnpsv\") pod \"placement-59975f6898-zxn8m\" (UID: \"a4f41b33-4de3-4a87-96ac-f7ea7bb1aac2\") " pod="openstack-kuttl-tests/placement-59975f6898-zxn8m" Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.311401 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a4f41b33-4de3-4a87-96ac-f7ea7bb1aac2-logs\") pod \"placement-59975f6898-zxn8m\" (UID: \"a4f41b33-4de3-4a87-96ac-f7ea7bb1aac2\") " pod="openstack-kuttl-tests/placement-59975f6898-zxn8m" Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.319009 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-db-sync-bdw77" Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.319903 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a4f41b33-4de3-4a87-96ac-f7ea7bb1aac2-scripts\") pod \"placement-59975f6898-zxn8m\" (UID: \"a4f41b33-4de3-4a87-96ac-f7ea7bb1aac2\") " pod="openstack-kuttl-tests/placement-59975f6898-zxn8m" Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.323715 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a4f41b33-4de3-4a87-96ac-f7ea7bb1aac2-config-data\") pod \"placement-59975f6898-zxn8m\" (UID: \"a4f41b33-4de3-4a87-96ac-f7ea7bb1aac2\") " pod="openstack-kuttl-tests/placement-59975f6898-zxn8m" Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.327658 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a4f41b33-4de3-4a87-96ac-f7ea7bb1aac2-public-tls-certs\") pod \"placement-59975f6898-zxn8m\" (UID: \"a4f41b33-4de3-4a87-96ac-f7ea7bb1aac2\") " pod="openstack-kuttl-tests/placement-59975f6898-zxn8m" Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.331070 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a4f41b33-4de3-4a87-96ac-f7ea7bb1aac2-combined-ca-bundle\") pod \"placement-59975f6898-zxn8m\" (UID: \"a4f41b33-4de3-4a87-96ac-f7ea7bb1aac2\") " pod="openstack-kuttl-tests/placement-59975f6898-zxn8m" Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.364602 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a4f41b33-4de3-4a87-96ac-f7ea7bb1aac2-internal-tls-certs\") pod \"placement-59975f6898-zxn8m\" (UID: \"a4f41b33-4de3-4a87-96ac-f7ea7bb1aac2\") " pod="openstack-kuttl-tests/placement-59975f6898-zxn8m" Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.364755 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mnpsv\" (UniqueName: \"kubernetes.io/projected/a4f41b33-4de3-4a87-96ac-f7ea7bb1aac2-kube-api-access-mnpsv\") pod \"placement-59975f6898-zxn8m\" (UID: \"a4f41b33-4de3-4a87-96ac-f7ea7bb1aac2\") " pod="openstack-kuttl-tests/placement-59975f6898-zxn8m" Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.411378 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/20780c8d-f8bc-43c4-84ba-ba6f2e0fe601-db-sync-config-data\") pod \"20780c8d-f8bc-43c4-84ba-ba6f2e0fe601\" (UID: \"20780c8d-f8bc-43c4-84ba-ba6f2e0fe601\") " Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.411414 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/20780c8d-f8bc-43c4-84ba-ba6f2e0fe601-config-data\") pod \"20780c8d-f8bc-43c4-84ba-ba6f2e0fe601\" (UID: \"20780c8d-f8bc-43c4-84ba-ba6f2e0fe601\") " Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.411447 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8cbcl\" (UniqueName: \"kubernetes.io/projected/20780c8d-f8bc-43c4-84ba-ba6f2e0fe601-kube-api-access-8cbcl\") pod \"20780c8d-f8bc-43c4-84ba-ba6f2e0fe601\" (UID: \"20780c8d-f8bc-43c4-84ba-ba6f2e0fe601\") " Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.411467 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/20780c8d-f8bc-43c4-84ba-ba6f2e0fe601-scripts\") pod \"20780c8d-f8bc-43c4-84ba-ba6f2e0fe601\" (UID: \"20780c8d-f8bc-43c4-84ba-ba6f2e0fe601\") " Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.411533 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/20780c8d-f8bc-43c4-84ba-ba6f2e0fe601-etc-machine-id\") pod \"20780c8d-f8bc-43c4-84ba-ba6f2e0fe601\" (UID: \"20780c8d-f8bc-43c4-84ba-ba6f2e0fe601\") " Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.411588 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/20780c8d-f8bc-43c4-84ba-ba6f2e0fe601-combined-ca-bundle\") pod \"20780c8d-f8bc-43c4-84ba-ba6f2e0fe601\" (UID: \"20780c8d-f8bc-43c4-84ba-ba6f2e0fe601\") " Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.412266 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/20780c8d-f8bc-43c4-84ba-ba6f2e0fe601-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "20780c8d-f8bc-43c4-84ba-ba6f2e0fe601" (UID: "20780c8d-f8bc-43c4-84ba-ba6f2e0fe601"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.428853 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20780c8d-f8bc-43c4-84ba-ba6f2e0fe601-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "20780c8d-f8bc-43c4-84ba-ba6f2e0fe601" (UID: "20780c8d-f8bc-43c4-84ba-ba6f2e0fe601"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.433506 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20780c8d-f8bc-43c4-84ba-ba6f2e0fe601-scripts" (OuterVolumeSpecName: "scripts") pod "20780c8d-f8bc-43c4-84ba-ba6f2e0fe601" (UID: "20780c8d-f8bc-43c4-84ba-ba6f2e0fe601"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.433739 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20780c8d-f8bc-43c4-84ba-ba6f2e0fe601-kube-api-access-8cbcl" (OuterVolumeSpecName: "kube-api-access-8cbcl") pod "20780c8d-f8bc-43c4-84ba-ba6f2e0fe601" (UID: "20780c8d-f8bc-43c4-84ba-ba6f2e0fe601"). InnerVolumeSpecName "kube-api-access-8cbcl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.440683 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-bootstrap-x52px" Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.451893 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-db-sync-s4glp" Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.506099 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-59975f6898-zxn8m" Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.513244 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-swvjh\" (UniqueName: \"kubernetes.io/projected/3a0e658d-6dc9-4d45-bf9a-1dd7fe5c56a6-kube-api-access-swvjh\") pod \"3a0e658d-6dc9-4d45-bf9a-1dd7fe5c56a6\" (UID: \"3a0e658d-6dc9-4d45-bf9a-1dd7fe5c56a6\") " Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.513282 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3a0e658d-6dc9-4d45-bf9a-1dd7fe5c56a6-combined-ca-bundle\") pod \"3a0e658d-6dc9-4d45-bf9a-1dd7fe5c56a6\" (UID: \"3a0e658d-6dc9-4d45-bf9a-1dd7fe5c56a6\") " Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.513301 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3a0e658d-6dc9-4d45-bf9a-1dd7fe5c56a6-scripts\") pod \"3a0e658d-6dc9-4d45-bf9a-1dd7fe5c56a6\" (UID: \"3a0e658d-6dc9-4d45-bf9a-1dd7fe5c56a6\") " Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.513372 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3a0e658d-6dc9-4d45-bf9a-1dd7fe5c56a6-config-data\") pod \"3a0e658d-6dc9-4d45-bf9a-1dd7fe5c56a6\" (UID: \"3a0e658d-6dc9-4d45-bf9a-1dd7fe5c56a6\") " Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.513411 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/3a0e658d-6dc9-4d45-bf9a-1dd7fe5c56a6-credential-keys\") pod \"3a0e658d-6dc9-4d45-bf9a-1dd7fe5c56a6\" (UID: \"3a0e658d-6dc9-4d45-bf9a-1dd7fe5c56a6\") " Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.513452 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/d1809d86-a9c3-46e9-96eb-06433832465c-config\") pod \"d1809d86-a9c3-46e9-96eb-06433832465c\" (UID: \"d1809d86-a9c3-46e9-96eb-06433832465c\") " Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.513486 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-79rxs\" (UniqueName: \"kubernetes.io/projected/d1809d86-a9c3-46e9-96eb-06433832465c-kube-api-access-79rxs\") pod \"d1809d86-a9c3-46e9-96eb-06433832465c\" (UID: \"d1809d86-a9c3-46e9-96eb-06433832465c\") " Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.513616 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3a0e658d-6dc9-4d45-bf9a-1dd7fe5c56a6-fernet-keys\") pod \"3a0e658d-6dc9-4d45-bf9a-1dd7fe5c56a6\" (UID: \"3a0e658d-6dc9-4d45-bf9a-1dd7fe5c56a6\") " Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.513734 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d1809d86-a9c3-46e9-96eb-06433832465c-combined-ca-bundle\") pod \"d1809d86-a9c3-46e9-96eb-06433832465c\" (UID: \"d1809d86-a9c3-46e9-96eb-06433832465c\") " Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.514412 4558 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/20780c8d-f8bc-43c4-84ba-ba6f2e0fe601-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.514427 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8cbcl\" (UniqueName: \"kubernetes.io/projected/20780c8d-f8bc-43c4-84ba-ba6f2e0fe601-kube-api-access-8cbcl\") on node \"crc\" DevicePath \"\"" Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.514438 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/20780c8d-f8bc-43c4-84ba-ba6f2e0fe601-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.514447 4558 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/20780c8d-f8bc-43c4-84ba-ba6f2e0fe601-etc-machine-id\") on node \"crc\" DevicePath \"\"" Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.534354 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-7dc79cc978-clpbw"] Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.541761 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3a0e658d-6dc9-4d45-bf9a-1dd7fe5c56a6-kube-api-access-swvjh" (OuterVolumeSpecName: "kube-api-access-swvjh") pod "3a0e658d-6dc9-4d45-bf9a-1dd7fe5c56a6" (UID: "3a0e658d-6dc9-4d45-bf9a-1dd7fe5c56a6"). InnerVolumeSpecName "kube-api-access-swvjh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.548281 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d1809d86-a9c3-46e9-96eb-06433832465c-kube-api-access-79rxs" (OuterVolumeSpecName: "kube-api-access-79rxs") pod "d1809d86-a9c3-46e9-96eb-06433832465c" (UID: "d1809d86-a9c3-46e9-96eb-06433832465c"). InnerVolumeSpecName "kube-api-access-79rxs". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.548953 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3a0e658d-6dc9-4d45-bf9a-1dd7fe5c56a6-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "3a0e658d-6dc9-4d45-bf9a-1dd7fe5c56a6" (UID: "3a0e658d-6dc9-4d45-bf9a-1dd7fe5c56a6"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.549429 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3a0e658d-6dc9-4d45-bf9a-1dd7fe5c56a6-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "3a0e658d-6dc9-4d45-bf9a-1dd7fe5c56a6" (UID: "3a0e658d-6dc9-4d45-bf9a-1dd7fe5c56a6"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.550098 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3a0e658d-6dc9-4d45-bf9a-1dd7fe5c56a6-scripts" (OuterVolumeSpecName: "scripts") pod "3a0e658d-6dc9-4d45-bf9a-1dd7fe5c56a6" (UID: "3a0e658d-6dc9-4d45-bf9a-1dd7fe5c56a6"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:47:17 crc kubenswrapper[4558]: W0120 17:47:17.560700 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod76dc3695_3423_40fe_afcc_798d8a78c542.slice/crio-b542b9dce795f7b75d4ec149215a51a043aa7f7c1d433348c2c50561de7be860 WatchSource:0}: Error finding container b542b9dce795f7b75d4ec149215a51a043aa7f7c1d433348c2c50561de7be860: Status 404 returned error can't find the container with id b542b9dce795f7b75d4ec149215a51a043aa7f7c1d433348c2c50561de7be860 Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.622525 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-swvjh\" (UniqueName: \"kubernetes.io/projected/3a0e658d-6dc9-4d45-bf9a-1dd7fe5c56a6-kube-api-access-swvjh\") on node \"crc\" DevicePath \"\"" Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.622552 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3a0e658d-6dc9-4d45-bf9a-1dd7fe5c56a6-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.622563 4558 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/3a0e658d-6dc9-4d45-bf9a-1dd7fe5c56a6-credential-keys\") on node \"crc\" DevicePath \"\"" Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.622572 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-79rxs\" (UniqueName: \"kubernetes.io/projected/d1809d86-a9c3-46e9-96eb-06433832465c-kube-api-access-79rxs\") on node \"crc\" DevicePath \"\"" Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.622581 4558 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3a0e658d-6dc9-4d45-bf9a-1dd7fe5c56a6-fernet-keys\") on node \"crc\" DevicePath \"\"" Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.756374 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20780c8d-f8bc-43c4-84ba-ba6f2e0fe601-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "20780c8d-f8bc-43c4-84ba-ba6f2e0fe601" (UID: "20780c8d-f8bc-43c4-84ba-ba6f2e0fe601"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.761491 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.766425 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3a0e658d-6dc9-4d45-bf9a-1dd7fe5c56a6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3a0e658d-6dc9-4d45-bf9a-1dd7fe5c56a6" (UID: "3a0e658d-6dc9-4d45-bf9a-1dd7fe5c56a6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.766935 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d1809d86-a9c3-46e9-96eb-06433832465c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d1809d86-a9c3-46e9-96eb-06433832465c" (UID: "d1809d86-a9c3-46e9-96eb-06433832465c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.767502 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d1809d86-a9c3-46e9-96eb-06433832465c-config" (OuterVolumeSpecName: "config") pod "d1809d86-a9c3-46e9-96eb-06433832465c" (UID: "d1809d86-a9c3-46e9-96eb-06433832465c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.775766 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3a0e658d-6dc9-4d45-bf9a-1dd7fe5c56a6-config-data" (OuterVolumeSpecName: "config-data") pod "3a0e658d-6dc9-4d45-bf9a-1dd7fe5c56a6" (UID: "3a0e658d-6dc9-4d45-bf9a-1dd7fe5c56a6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.790036 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20780c8d-f8bc-43c4-84ba-ba6f2e0fe601-config-data" (OuterVolumeSpecName: "config-data") pod "20780c8d-f8bc-43c4-84ba-ba6f2e0fe601" (UID: "20780c8d-f8bc-43c4-84ba-ba6f2e0fe601"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.832362 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/017cf5a8-b216-4c29-8dfc-7b5b6625591c-scripts\") pod \"017cf5a8-b216-4c29-8dfc-7b5b6625591c\" (UID: \"017cf5a8-b216-4c29-8dfc-7b5b6625591c\") " Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.832491 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/017cf5a8-b216-4c29-8dfc-7b5b6625591c-config-data\") pod \"017cf5a8-b216-4c29-8dfc-7b5b6625591c\" (UID: \"017cf5a8-b216-4c29-8dfc-7b5b6625591c\") " Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.832535 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xzlt\" (UniqueName: \"kubernetes.io/projected/017cf5a8-b216-4c29-8dfc-7b5b6625591c-kube-api-access-9xzlt\") pod \"017cf5a8-b216-4c29-8dfc-7b5b6625591c\" (UID: \"017cf5a8-b216-4c29-8dfc-7b5b6625591c\") " Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.832600 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/017cf5a8-b216-4c29-8dfc-7b5b6625591c-combined-ca-bundle\") pod \"017cf5a8-b216-4c29-8dfc-7b5b6625591c\" (UID: \"017cf5a8-b216-4c29-8dfc-7b5b6625591c\") " Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.832627 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/017cf5a8-b216-4c29-8dfc-7b5b6625591c-sg-core-conf-yaml\") pod \"017cf5a8-b216-4c29-8dfc-7b5b6625591c\" (UID: \"017cf5a8-b216-4c29-8dfc-7b5b6625591c\") " Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.832675 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/017cf5a8-b216-4c29-8dfc-7b5b6625591c-run-httpd\") pod \"017cf5a8-b216-4c29-8dfc-7b5b6625591c\" (UID: \"017cf5a8-b216-4c29-8dfc-7b5b6625591c\") " Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.832707 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/017cf5a8-b216-4c29-8dfc-7b5b6625591c-log-httpd\") pod \"017cf5a8-b216-4c29-8dfc-7b5b6625591c\" (UID: \"017cf5a8-b216-4c29-8dfc-7b5b6625591c\") " Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.833035 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/20780c8d-f8bc-43c4-84ba-ba6f2e0fe601-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.833051 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3a0e658d-6dc9-4d45-bf9a-1dd7fe5c56a6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.833060 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3a0e658d-6dc9-4d45-bf9a-1dd7fe5c56a6-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.833069 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/20780c8d-f8bc-43c4-84ba-ba6f2e0fe601-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.833076 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/d1809d86-a9c3-46e9-96eb-06433832465c-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.833084 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d1809d86-a9c3-46e9-96eb-06433832465c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.833496 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/017cf5a8-b216-4c29-8dfc-7b5b6625591c-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "017cf5a8-b216-4c29-8dfc-7b5b6625591c" (UID: "017cf5a8-b216-4c29-8dfc-7b5b6625591c"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.838345 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/017cf5a8-b216-4c29-8dfc-7b5b6625591c-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "017cf5a8-b216-4c29-8dfc-7b5b6625591c" (UID: "017cf5a8-b216-4c29-8dfc-7b5b6625591c"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.850358 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/017cf5a8-b216-4c29-8dfc-7b5b6625591c-kube-api-access-9xzlt" (OuterVolumeSpecName: "kube-api-access-9xzlt") pod "017cf5a8-b216-4c29-8dfc-7b5b6625591c" (UID: "017cf5a8-b216-4c29-8dfc-7b5b6625591c"). InnerVolumeSpecName "kube-api-access-9xzlt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.850436 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/017cf5a8-b216-4c29-8dfc-7b5b6625591c-scripts" (OuterVolumeSpecName: "scripts") pod "017cf5a8-b216-4c29-8dfc-7b5b6625591c" (UID: "017cf5a8-b216-4c29-8dfc-7b5b6625591c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.867363 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-db-sync-s4glp" event={"ID":"d1809d86-a9c3-46e9-96eb-06433832465c","Type":"ContainerDied","Data":"63baaf3f91e7c83fb214e624652cd0b7a0f8266a6aeb4d11cd75c388eec52d73"} Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.867410 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="63baaf3f91e7c83fb214e624652cd0b7a0f8266a6aeb4d11cd75c388eec52d73" Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.867484 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-db-sync-s4glp" Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.880657 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-bootstrap-x52px" event={"ID":"3a0e658d-6dc9-4d45-bf9a-1dd7fe5c56a6","Type":"ContainerDied","Data":"9dec1db615ab67091e1b48a1c531f2b6214c4c5eac5da1fb8adc3fe65b3ce9eb"} Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.880702 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9dec1db615ab67091e1b48a1c531f2b6214c4c5eac5da1fb8adc3fe65b3ce9eb" Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.880773 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-bootstrap-x52px" Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.903179 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"05f4b910-ff7c-4039-8ff7-24687eea5e74","Type":"ContainerStarted","Data":"aa13f111b95eb70e30ae67cf411f532cb413f039d01f38c02d51d6a30252389b"} Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.906834 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-db-sync-bdw77" event={"ID":"20780c8d-f8bc-43c4-84ba-ba6f2e0fe601","Type":"ContainerDied","Data":"4efd9c5fa35355473c826ff73e13b012ccc486010a52a36b100ce509a255794b"} Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.906874 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4efd9c5fa35355473c826ff73e13b012ccc486010a52a36b100ce509a255794b" Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.906949 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-db-sync-bdw77" Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.932328 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"d3c4b74b-cd31-4245-91f0-9718f8896f61","Type":"ContainerStarted","Data":"c51450fc82fc389fd4479f32722f045d8117aac56951b6ea66a9b0c5dec8678c"} Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.935860 4558 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/017cf5a8-b216-4c29-8dfc-7b5b6625591c-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.935884 4558 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/017cf5a8-b216-4c29-8dfc-7b5b6625591c-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.935895 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/017cf5a8-b216-4c29-8dfc-7b5b6625591c-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.935907 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xzlt\" (UniqueName: \"kubernetes.io/projected/017cf5a8-b216-4c29-8dfc-7b5b6625591c-kube-api-access-9xzlt\") on node \"crc\" DevicePath \"\"" Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.947217 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-7dc79cc978-clpbw" event={"ID":"76dc3695-3423-40fe-afcc-798d8a78c542","Type":"ContainerStarted","Data":"b542b9dce795f7b75d4ec149215a51a043aa7f7c1d433348c2c50561de7be860"} Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.948332 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-58bb6f688-8dbng" event={"ID":"ec317104-430d-4590-85a9-257448df4212","Type":"ContainerStarted","Data":"4a6b4390944378517a1bce21b451f353d7142299513dc3c272bafd80bf42db8a"} Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.948368 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-58bb6f688-8dbng" event={"ID":"ec317104-430d-4590-85a9-257448df4212","Type":"ContainerStarted","Data":"d68edacd8a4cd4a1fe2b831e5cb664a262df5824816ffbad65cdaeedba898224"} Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.949308 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/glance-default-external-api-0" podStartSLOduration=3.9492868100000003 podStartE2EDuration="3.94928681s" podCreationTimestamp="2026-01-20 17:47:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:47:17.93762372 +0000 UTC m=+3931.697961677" watchObservedRunningTime="2026-01-20 17:47:17.94928681 +0000 UTC m=+3931.709624777" Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.971481 4558 generic.go:334] "Generic (PLEG): container finished" podID="017cf5a8-b216-4c29-8dfc-7b5b6625591c" containerID="5417f7ab774e651be1947ba57ee85fa32cb85abba514b48b86676238391f29eb" exitCode=0 Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.971511 4558 generic.go:334] "Generic (PLEG): container finished" podID="017cf5a8-b216-4c29-8dfc-7b5b6625591c" containerID="c54b38a6355b94b185e84a1db7075c6f9bf63a358c709e1f336a4f64b4cea596" exitCode=2 Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.971519 4558 generic.go:334] "Generic (PLEG): container finished" podID="017cf5a8-b216-4c29-8dfc-7b5b6625591c" containerID="39122fe68ccaf23346e6568ce60f5ad96343708ff688ebb76e80c587fa773a90" exitCode=0 Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.971527 4558 generic.go:334] "Generic (PLEG): container finished" podID="017cf5a8-b216-4c29-8dfc-7b5b6625591c" containerID="7049e294116c3dfe281795be83dd1fef9c5ce6bd1d8517d4e1c57247c067830c" exitCode=0 Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.971572 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"017cf5a8-b216-4c29-8dfc-7b5b6625591c","Type":"ContainerDied","Data":"5417f7ab774e651be1947ba57ee85fa32cb85abba514b48b86676238391f29eb"} Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.971599 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"017cf5a8-b216-4c29-8dfc-7b5b6625591c","Type":"ContainerDied","Data":"c54b38a6355b94b185e84a1db7075c6f9bf63a358c709e1f336a4f64b4cea596"} Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.971612 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"017cf5a8-b216-4c29-8dfc-7b5b6625591c","Type":"ContainerDied","Data":"39122fe68ccaf23346e6568ce60f5ad96343708ff688ebb76e80c587fa773a90"} Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.971622 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"017cf5a8-b216-4c29-8dfc-7b5b6625591c","Type":"ContainerDied","Data":"7049e294116c3dfe281795be83dd1fef9c5ce6bd1d8517d4e1c57247c067830c"} Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.971631 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"017cf5a8-b216-4c29-8dfc-7b5b6625591c","Type":"ContainerDied","Data":"871ec88e32c2358b816ba9ab4759ab391ed91c802a84720b11986f5aac7e306a"} Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.971650 4558 scope.go:117] "RemoveContainer" containerID="5417f7ab774e651be1947ba57ee85fa32cb85abba514b48b86676238391f29eb" Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.971670 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.987253 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-worker-97cdbfcb5-xwv8l" event={"ID":"c5adf97d-8b90-49ca-bbeb-21dd8446a226","Type":"ContainerStarted","Data":"5ad5d5673753b8c1e259ace28ba37c369cd951524f24fdfa1a91a5b011bae4ef"} Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.987298 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-worker-97cdbfcb5-xwv8l" event={"ID":"c5adf97d-8b90-49ca-bbeb-21dd8446a226","Type":"ContainerStarted","Data":"d5d1fc6f44bede9582e8d9e34323e73cfd6032c9c0e5c64c9c713f180efb4c1c"} Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.987311 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-worker-97cdbfcb5-xwv8l" event={"ID":"c5adf97d-8b90-49ca-bbeb-21dd8446a226","Type":"ContainerStarted","Data":"9708763ddb6cdd14575c2edb6a39f5547296c6ec28017b7ffc35f3cb63b81ad6"} Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.995532 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-bootstrap-x52px"] Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.998410 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-keystone-listener-7d4c95f78b-qjwvm" event={"ID":"e2e49d05-bd1d-49a2-a67f-17bdeb5c39c2","Type":"ContainerStarted","Data":"0eea0088c1de4416bcb9198c0646ec60717050f4f084f5e0c864e31658b39fd7"} Jan 20 17:47:17 crc kubenswrapper[4558]: I0120 17:47:17.998464 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-keystone-listener-7d4c95f78b-qjwvm" event={"ID":"e2e49d05-bd1d-49a2-a67f-17bdeb5c39c2","Type":"ContainerStarted","Data":"c0af33357852e67e5f3a19f6b35879cfd6cbfe786658104acea30a122af047f5"} Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.001200 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/keystone-bootstrap-x52px"] Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.042914 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/glance-default-internal-api-0" podStartSLOduration=4.04289551 podStartE2EDuration="4.04289551s" podCreationTimestamp="2026-01-20 17:47:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:47:17.980783737 +0000 UTC m=+3931.741121704" watchObservedRunningTime="2026-01-20 17:47:18.04289551 +0000 UTC m=+3931.803233477" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.047265 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-59975f6898-zxn8m"] Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.070220 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/keystone-bootstrap-knlxj"] Jan 20 17:47:18 crc kubenswrapper[4558]: E0120 17:47:18.070656 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="017cf5a8-b216-4c29-8dfc-7b5b6625591c" containerName="ceilometer-notification-agent" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.070672 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="017cf5a8-b216-4c29-8dfc-7b5b6625591c" containerName="ceilometer-notification-agent" Jan 20 17:47:18 crc kubenswrapper[4558]: E0120 17:47:18.070697 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="017cf5a8-b216-4c29-8dfc-7b5b6625591c" containerName="sg-core" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.070704 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="017cf5a8-b216-4c29-8dfc-7b5b6625591c" containerName="sg-core" Jan 20 17:47:18 crc kubenswrapper[4558]: E0120 17:47:18.070716 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d1809d86-a9c3-46e9-96eb-06433832465c" containerName="neutron-db-sync" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.070722 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d1809d86-a9c3-46e9-96eb-06433832465c" containerName="neutron-db-sync" Jan 20 17:47:18 crc kubenswrapper[4558]: E0120 17:47:18.070730 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="20780c8d-f8bc-43c4-84ba-ba6f2e0fe601" containerName="cinder-db-sync" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.070736 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="20780c8d-f8bc-43c4-84ba-ba6f2e0fe601" containerName="cinder-db-sync" Jan 20 17:47:18 crc kubenswrapper[4558]: E0120 17:47:18.070752 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="017cf5a8-b216-4c29-8dfc-7b5b6625591c" containerName="ceilometer-central-agent" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.070757 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="017cf5a8-b216-4c29-8dfc-7b5b6625591c" containerName="ceilometer-central-agent" Jan 20 17:47:18 crc kubenswrapper[4558]: E0120 17:47:18.070773 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="017cf5a8-b216-4c29-8dfc-7b5b6625591c" containerName="proxy-httpd" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.070778 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="017cf5a8-b216-4c29-8dfc-7b5b6625591c" containerName="proxy-httpd" Jan 20 17:47:18 crc kubenswrapper[4558]: E0120 17:47:18.070788 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3a0e658d-6dc9-4d45-bf9a-1dd7fe5c56a6" containerName="keystone-bootstrap" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.070794 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="3a0e658d-6dc9-4d45-bf9a-1dd7fe5c56a6" containerName="keystone-bootstrap" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.070969 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="017cf5a8-b216-4c29-8dfc-7b5b6625591c" containerName="sg-core" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.070980 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="3a0e658d-6dc9-4d45-bf9a-1dd7fe5c56a6" containerName="keystone-bootstrap" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.070989 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="017cf5a8-b216-4c29-8dfc-7b5b6625591c" containerName="ceilometer-central-agent" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.071000 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="017cf5a8-b216-4c29-8dfc-7b5b6625591c" containerName="proxy-httpd" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.071008 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="d1809d86-a9c3-46e9-96eb-06433832465c" containerName="neutron-db-sync" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.071016 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="017cf5a8-b216-4c29-8dfc-7b5b6625591c" containerName="ceilometer-notification-agent" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.071025 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="20780c8d-f8bc-43c4-84ba-ba6f2e0fe601" containerName="cinder-db-sync" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.071662 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-bootstrap-knlxj" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.075817 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.077281 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.078956 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-scripts" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.079197 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.079438 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-config-data" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.079555 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-keystone-dockercfg-8dfwc" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.079720 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"osp-secret" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.079910 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cinder-config-data" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.093213 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-bootstrap-knlxj"] Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.094776 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cinder-cinder-dockercfg-cwfqv" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.094957 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cinder-scripts" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.095100 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cinder-scheduler-config-data" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.096854 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.097918 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/barbican-worker-97cdbfcb5-xwv8l" podStartSLOduration=3.097902013 podStartE2EDuration="3.097902013s" podCreationTimestamp="2026-01-20 17:47:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:47:18.04484212 +0000 UTC m=+3931.805180088" watchObservedRunningTime="2026-01-20 17:47:18.097902013 +0000 UTC m=+3931.858239981" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.133257 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/neutron-5696778669-nc8rp"] Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.174476 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-5696778669-nc8rp" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.178009 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"neutron-httpd-config" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.178337 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"neutron-neutron-dockercfg-zs5rs" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.182967 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-neutron-ovndbs" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.183253 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"neutron-config" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.185129 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-5696778669-nc8rp"] Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.208267 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.210151 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.212800 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cinder-api-config-data" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.240102 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/017cf5a8-b216-4c29-8dfc-7b5b6625591c-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "017cf5a8-b216-4c29-8dfc-7b5b6625591c" (UID: "017cf5a8-b216-4c29-8dfc-7b5b6625591c"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.244189 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.255707 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/85f34e25-fd3f-47dc-9e2e-4150fb9d7a4c-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"85f34e25-fd3f-47dc-9e2e-4150fb9d7a4c\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.255770 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/83dbadc1-6bc5-4c25-a541-c0c6d4bdadd5-config-data\") pod \"keystone-bootstrap-knlxj\" (UID: \"83dbadc1-6bc5-4c25-a541-c0c6d4bdadd5\") " pod="openstack-kuttl-tests/keystone-bootstrap-knlxj" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.255801 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/85f34e25-fd3f-47dc-9e2e-4150fb9d7a4c-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"85f34e25-fd3f-47dc-9e2e-4150fb9d7a4c\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.255826 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/83dbadc1-6bc5-4c25-a541-c0c6d4bdadd5-credential-keys\") pod \"keystone-bootstrap-knlxj\" (UID: \"83dbadc1-6bc5-4c25-a541-c0c6d4bdadd5\") " pod="openstack-kuttl-tests/keystone-bootstrap-knlxj" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.256024 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/83dbadc1-6bc5-4c25-a541-c0c6d4bdadd5-fernet-keys\") pod \"keystone-bootstrap-knlxj\" (UID: \"83dbadc1-6bc5-4c25-a541-c0c6d4bdadd5\") " pod="openstack-kuttl-tests/keystone-bootstrap-knlxj" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.256103 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/83dbadc1-6bc5-4c25-a541-c0c6d4bdadd5-combined-ca-bundle\") pod \"keystone-bootstrap-knlxj\" (UID: \"83dbadc1-6bc5-4c25-a541-c0c6d4bdadd5\") " pod="openstack-kuttl-tests/keystone-bootstrap-knlxj" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.256226 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qjm8k\" (UniqueName: \"kubernetes.io/projected/85f34e25-fd3f-47dc-9e2e-4150fb9d7a4c-kube-api-access-qjm8k\") pod \"cinder-scheduler-0\" (UID: \"85f34e25-fd3f-47dc-9e2e-4150fb9d7a4c\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.256280 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/85f34e25-fd3f-47dc-9e2e-4150fb9d7a4c-scripts\") pod \"cinder-scheduler-0\" (UID: \"85f34e25-fd3f-47dc-9e2e-4150fb9d7a4c\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.256314 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/85f34e25-fd3f-47dc-9e2e-4150fb9d7a4c-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"85f34e25-fd3f-47dc-9e2e-4150fb9d7a4c\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.256402 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/85f34e25-fd3f-47dc-9e2e-4150fb9d7a4c-config-data\") pod \"cinder-scheduler-0\" (UID: \"85f34e25-fd3f-47dc-9e2e-4150fb9d7a4c\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.256439 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x77wj\" (UniqueName: \"kubernetes.io/projected/83dbadc1-6bc5-4c25-a541-c0c6d4bdadd5-kube-api-access-x77wj\") pod \"keystone-bootstrap-knlxj\" (UID: \"83dbadc1-6bc5-4c25-a541-c0c6d4bdadd5\") " pod="openstack-kuttl-tests/keystone-bootstrap-knlxj" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.256671 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/83dbadc1-6bc5-4c25-a541-c0c6d4bdadd5-scripts\") pod \"keystone-bootstrap-knlxj\" (UID: \"83dbadc1-6bc5-4c25-a541-c0c6d4bdadd5\") " pod="openstack-kuttl-tests/keystone-bootstrap-knlxj" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.256761 4558 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/017cf5a8-b216-4c29-8dfc-7b5b6625591c-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.353312 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/017cf5a8-b216-4c29-8dfc-7b5b6625591c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "017cf5a8-b216-4c29-8dfc-7b5b6625591c" (UID: "017cf5a8-b216-4c29-8dfc-7b5b6625591c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.359189 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a832b201-8940-43e4-b09b-40d410ea33ba-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"a832b201-8940-43e4-b09b-40d410ea33ba\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.359239 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/f0ce81a9-0d35-4813-b16a-74b5757bbffa-config\") pod \"neutron-5696778669-nc8rp\" (UID: \"f0ce81a9-0d35-4813-b16a-74b5757bbffa\") " pod="openstack-kuttl-tests/neutron-5696778669-nc8rp" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.360022 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-htcfz\" (UniqueName: \"kubernetes.io/projected/a832b201-8940-43e4-b09b-40d410ea33ba-kube-api-access-htcfz\") pod \"cinder-api-0\" (UID: \"a832b201-8940-43e4-b09b-40d410ea33ba\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.360078 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qjm8k\" (UniqueName: \"kubernetes.io/projected/85f34e25-fd3f-47dc-9e2e-4150fb9d7a4c-kube-api-access-qjm8k\") pod \"cinder-scheduler-0\" (UID: \"85f34e25-fd3f-47dc-9e2e-4150fb9d7a4c\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.360381 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a832b201-8940-43e4-b09b-40d410ea33ba-logs\") pod \"cinder-api-0\" (UID: \"a832b201-8940-43e4-b09b-40d410ea33ba\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.360418 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/85f34e25-fd3f-47dc-9e2e-4150fb9d7a4c-scripts\") pod \"cinder-scheduler-0\" (UID: \"85f34e25-fd3f-47dc-9e2e-4150fb9d7a4c\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.360452 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/85f34e25-fd3f-47dc-9e2e-4150fb9d7a4c-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"85f34e25-fd3f-47dc-9e2e-4150fb9d7a4c\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.360473 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/f0ce81a9-0d35-4813-b16a-74b5757bbffa-httpd-config\") pod \"neutron-5696778669-nc8rp\" (UID: \"f0ce81a9-0d35-4813-b16a-74b5757bbffa\") " pod="openstack-kuttl-tests/neutron-5696778669-nc8rp" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.360491 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a832b201-8940-43e4-b09b-40d410ea33ba-config-data-custom\") pod \"cinder-api-0\" (UID: \"a832b201-8940-43e4-b09b-40d410ea33ba\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.360517 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a832b201-8940-43e4-b09b-40d410ea33ba-etc-machine-id\") pod \"cinder-api-0\" (UID: \"a832b201-8940-43e4-b09b-40d410ea33ba\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.360728 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6k5lb\" (UniqueName: \"kubernetes.io/projected/f0ce81a9-0d35-4813-b16a-74b5757bbffa-kube-api-access-6k5lb\") pod \"neutron-5696778669-nc8rp\" (UID: \"f0ce81a9-0d35-4813-b16a-74b5757bbffa\") " pod="openstack-kuttl-tests/neutron-5696778669-nc8rp" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.360801 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f0ce81a9-0d35-4813-b16a-74b5757bbffa-combined-ca-bundle\") pod \"neutron-5696778669-nc8rp\" (UID: \"f0ce81a9-0d35-4813-b16a-74b5757bbffa\") " pod="openstack-kuttl-tests/neutron-5696778669-nc8rp" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.360844 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a832b201-8940-43e4-b09b-40d410ea33ba-scripts\") pod \"cinder-api-0\" (UID: \"a832b201-8940-43e4-b09b-40d410ea33ba\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.360875 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/85f34e25-fd3f-47dc-9e2e-4150fb9d7a4c-config-data\") pod \"cinder-scheduler-0\" (UID: \"85f34e25-fd3f-47dc-9e2e-4150fb9d7a4c\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.360932 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x77wj\" (UniqueName: \"kubernetes.io/projected/83dbadc1-6bc5-4c25-a541-c0c6d4bdadd5-kube-api-access-x77wj\") pod \"keystone-bootstrap-knlxj\" (UID: \"83dbadc1-6bc5-4c25-a541-c0c6d4bdadd5\") " pod="openstack-kuttl-tests/keystone-bootstrap-knlxj" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.360977 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a832b201-8940-43e4-b09b-40d410ea33ba-config-data\") pod \"cinder-api-0\" (UID: \"a832b201-8940-43e4-b09b-40d410ea33ba\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.361215 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/83dbadc1-6bc5-4c25-a541-c0c6d4bdadd5-scripts\") pod \"keystone-bootstrap-knlxj\" (UID: \"83dbadc1-6bc5-4c25-a541-c0c6d4bdadd5\") " pod="openstack-kuttl-tests/keystone-bootstrap-knlxj" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.361248 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/f0ce81a9-0d35-4813-b16a-74b5757bbffa-ovndb-tls-certs\") pod \"neutron-5696778669-nc8rp\" (UID: \"f0ce81a9-0d35-4813-b16a-74b5757bbffa\") " pod="openstack-kuttl-tests/neutron-5696778669-nc8rp" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.361687 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/85f34e25-fd3f-47dc-9e2e-4150fb9d7a4c-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"85f34e25-fd3f-47dc-9e2e-4150fb9d7a4c\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.361747 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/83dbadc1-6bc5-4c25-a541-c0c6d4bdadd5-config-data\") pod \"keystone-bootstrap-knlxj\" (UID: \"83dbadc1-6bc5-4c25-a541-c0c6d4bdadd5\") " pod="openstack-kuttl-tests/keystone-bootstrap-knlxj" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.361778 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/85f34e25-fd3f-47dc-9e2e-4150fb9d7a4c-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"85f34e25-fd3f-47dc-9e2e-4150fb9d7a4c\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.361808 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/83dbadc1-6bc5-4c25-a541-c0c6d4bdadd5-credential-keys\") pod \"keystone-bootstrap-knlxj\" (UID: \"83dbadc1-6bc5-4c25-a541-c0c6d4bdadd5\") " pod="openstack-kuttl-tests/keystone-bootstrap-knlxj" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.361859 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/83dbadc1-6bc5-4c25-a541-c0c6d4bdadd5-fernet-keys\") pod \"keystone-bootstrap-knlxj\" (UID: \"83dbadc1-6bc5-4c25-a541-c0c6d4bdadd5\") " pod="openstack-kuttl-tests/keystone-bootstrap-knlxj" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.361902 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/83dbadc1-6bc5-4c25-a541-c0c6d4bdadd5-combined-ca-bundle\") pod \"keystone-bootstrap-knlxj\" (UID: \"83dbadc1-6bc5-4c25-a541-c0c6d4bdadd5\") " pod="openstack-kuttl-tests/keystone-bootstrap-knlxj" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.361997 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/017cf5a8-b216-4c29-8dfc-7b5b6625591c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.364189 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/85f34e25-fd3f-47dc-9e2e-4150fb9d7a4c-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"85f34e25-fd3f-47dc-9e2e-4150fb9d7a4c\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.367025 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/83dbadc1-6bc5-4c25-a541-c0c6d4bdadd5-combined-ca-bundle\") pod \"keystone-bootstrap-knlxj\" (UID: \"83dbadc1-6bc5-4c25-a541-c0c6d4bdadd5\") " pod="openstack-kuttl-tests/keystone-bootstrap-knlxj" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.369883 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/85f34e25-fd3f-47dc-9e2e-4150fb9d7a4c-scripts\") pod \"cinder-scheduler-0\" (UID: \"85f34e25-fd3f-47dc-9e2e-4150fb9d7a4c\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.373382 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/83dbadc1-6bc5-4c25-a541-c0c6d4bdadd5-credential-keys\") pod \"keystone-bootstrap-knlxj\" (UID: \"83dbadc1-6bc5-4c25-a541-c0c6d4bdadd5\") " pod="openstack-kuttl-tests/keystone-bootstrap-knlxj" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.373388 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qjm8k\" (UniqueName: \"kubernetes.io/projected/85f34e25-fd3f-47dc-9e2e-4150fb9d7a4c-kube-api-access-qjm8k\") pod \"cinder-scheduler-0\" (UID: \"85f34e25-fd3f-47dc-9e2e-4150fb9d7a4c\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.375499 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/85f34e25-fd3f-47dc-9e2e-4150fb9d7a4c-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"85f34e25-fd3f-47dc-9e2e-4150fb9d7a4c\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.380848 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/83dbadc1-6bc5-4c25-a541-c0c6d4bdadd5-config-data\") pod \"keystone-bootstrap-knlxj\" (UID: \"83dbadc1-6bc5-4c25-a541-c0c6d4bdadd5\") " pod="openstack-kuttl-tests/keystone-bootstrap-knlxj" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.381078 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/85f34e25-fd3f-47dc-9e2e-4150fb9d7a4c-config-data\") pod \"cinder-scheduler-0\" (UID: \"85f34e25-fd3f-47dc-9e2e-4150fb9d7a4c\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.384013 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/85f34e25-fd3f-47dc-9e2e-4150fb9d7a4c-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"85f34e25-fd3f-47dc-9e2e-4150fb9d7a4c\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.384611 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/017cf5a8-b216-4c29-8dfc-7b5b6625591c-config-data" (OuterVolumeSpecName: "config-data") pod "017cf5a8-b216-4c29-8dfc-7b5b6625591c" (UID: "017cf5a8-b216-4c29-8dfc-7b5b6625591c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.385838 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/83dbadc1-6bc5-4c25-a541-c0c6d4bdadd5-fernet-keys\") pod \"keystone-bootstrap-knlxj\" (UID: \"83dbadc1-6bc5-4c25-a541-c0c6d4bdadd5\") " pod="openstack-kuttl-tests/keystone-bootstrap-knlxj" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.386011 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x77wj\" (UniqueName: \"kubernetes.io/projected/83dbadc1-6bc5-4c25-a541-c0c6d4bdadd5-kube-api-access-x77wj\") pod \"keystone-bootstrap-knlxj\" (UID: \"83dbadc1-6bc5-4c25-a541-c0c6d4bdadd5\") " pod="openstack-kuttl-tests/keystone-bootstrap-knlxj" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.386528 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/83dbadc1-6bc5-4c25-a541-c0c6d4bdadd5-scripts\") pod \"keystone-bootstrap-knlxj\" (UID: \"83dbadc1-6bc5-4c25-a541-c0c6d4bdadd5\") " pod="openstack-kuttl-tests/keystone-bootstrap-knlxj" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.452259 4558 scope.go:117] "RemoveContainer" containerID="c54b38a6355b94b185e84a1db7075c6f9bf63a358c709e1f336a4f64b4cea596" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.465480 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/f0ce81a9-0d35-4813-b16a-74b5757bbffa-config\") pod \"neutron-5696778669-nc8rp\" (UID: \"f0ce81a9-0d35-4813-b16a-74b5757bbffa\") " pod="openstack-kuttl-tests/neutron-5696778669-nc8rp" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.465521 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-htcfz\" (UniqueName: \"kubernetes.io/projected/a832b201-8940-43e4-b09b-40d410ea33ba-kube-api-access-htcfz\") pod \"cinder-api-0\" (UID: \"a832b201-8940-43e4-b09b-40d410ea33ba\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.465564 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a832b201-8940-43e4-b09b-40d410ea33ba-logs\") pod \"cinder-api-0\" (UID: \"a832b201-8940-43e4-b09b-40d410ea33ba\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.465594 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/f0ce81a9-0d35-4813-b16a-74b5757bbffa-httpd-config\") pod \"neutron-5696778669-nc8rp\" (UID: \"f0ce81a9-0d35-4813-b16a-74b5757bbffa\") " pod="openstack-kuttl-tests/neutron-5696778669-nc8rp" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.465610 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a832b201-8940-43e4-b09b-40d410ea33ba-config-data-custom\") pod \"cinder-api-0\" (UID: \"a832b201-8940-43e4-b09b-40d410ea33ba\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.465633 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a832b201-8940-43e4-b09b-40d410ea33ba-etc-machine-id\") pod \"cinder-api-0\" (UID: \"a832b201-8940-43e4-b09b-40d410ea33ba\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.465668 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6k5lb\" (UniqueName: \"kubernetes.io/projected/f0ce81a9-0d35-4813-b16a-74b5757bbffa-kube-api-access-6k5lb\") pod \"neutron-5696778669-nc8rp\" (UID: \"f0ce81a9-0d35-4813-b16a-74b5757bbffa\") " pod="openstack-kuttl-tests/neutron-5696778669-nc8rp" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.465687 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f0ce81a9-0d35-4813-b16a-74b5757bbffa-combined-ca-bundle\") pod \"neutron-5696778669-nc8rp\" (UID: \"f0ce81a9-0d35-4813-b16a-74b5757bbffa\") " pod="openstack-kuttl-tests/neutron-5696778669-nc8rp" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.465704 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a832b201-8940-43e4-b09b-40d410ea33ba-scripts\") pod \"cinder-api-0\" (UID: \"a832b201-8940-43e4-b09b-40d410ea33ba\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.465736 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a832b201-8940-43e4-b09b-40d410ea33ba-config-data\") pod \"cinder-api-0\" (UID: \"a832b201-8940-43e4-b09b-40d410ea33ba\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.465777 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/f0ce81a9-0d35-4813-b16a-74b5757bbffa-ovndb-tls-certs\") pod \"neutron-5696778669-nc8rp\" (UID: \"f0ce81a9-0d35-4813-b16a-74b5757bbffa\") " pod="openstack-kuttl-tests/neutron-5696778669-nc8rp" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.465902 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a832b201-8940-43e4-b09b-40d410ea33ba-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"a832b201-8940-43e4-b09b-40d410ea33ba\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.465968 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/017cf5a8-b216-4c29-8dfc-7b5b6625591c-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.466353 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a832b201-8940-43e4-b09b-40d410ea33ba-etc-machine-id\") pod \"cinder-api-0\" (UID: \"a832b201-8940-43e4-b09b-40d410ea33ba\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.467624 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a832b201-8940-43e4-b09b-40d410ea33ba-logs\") pod \"cinder-api-0\" (UID: \"a832b201-8940-43e4-b09b-40d410ea33ba\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.472906 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a832b201-8940-43e4-b09b-40d410ea33ba-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"a832b201-8940-43e4-b09b-40d410ea33ba\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.473714 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/f0ce81a9-0d35-4813-b16a-74b5757bbffa-config\") pod \"neutron-5696778669-nc8rp\" (UID: \"f0ce81a9-0d35-4813-b16a-74b5757bbffa\") " pod="openstack-kuttl-tests/neutron-5696778669-nc8rp" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.481098 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f0ce81a9-0d35-4813-b16a-74b5757bbffa-combined-ca-bundle\") pod \"neutron-5696778669-nc8rp\" (UID: \"f0ce81a9-0d35-4813-b16a-74b5757bbffa\") " pod="openstack-kuttl-tests/neutron-5696778669-nc8rp" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.481550 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a832b201-8940-43e4-b09b-40d410ea33ba-scripts\") pod \"cinder-api-0\" (UID: \"a832b201-8940-43e4-b09b-40d410ea33ba\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.486226 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a832b201-8940-43e4-b09b-40d410ea33ba-config-data\") pod \"cinder-api-0\" (UID: \"a832b201-8940-43e4-b09b-40d410ea33ba\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.486836 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6k5lb\" (UniqueName: \"kubernetes.io/projected/f0ce81a9-0d35-4813-b16a-74b5757bbffa-kube-api-access-6k5lb\") pod \"neutron-5696778669-nc8rp\" (UID: \"f0ce81a9-0d35-4813-b16a-74b5757bbffa\") " pod="openstack-kuttl-tests/neutron-5696778669-nc8rp" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.486860 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-htcfz\" (UniqueName: \"kubernetes.io/projected/a832b201-8940-43e4-b09b-40d410ea33ba-kube-api-access-htcfz\") pod \"cinder-api-0\" (UID: \"a832b201-8940-43e4-b09b-40d410ea33ba\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.487400 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/f0ce81a9-0d35-4813-b16a-74b5757bbffa-httpd-config\") pod \"neutron-5696778669-nc8rp\" (UID: \"f0ce81a9-0d35-4813-b16a-74b5757bbffa\") " pod="openstack-kuttl-tests/neutron-5696778669-nc8rp" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.487951 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/f0ce81a9-0d35-4813-b16a-74b5757bbffa-ovndb-tls-certs\") pod \"neutron-5696778669-nc8rp\" (UID: \"f0ce81a9-0d35-4813-b16a-74b5757bbffa\") " pod="openstack-kuttl-tests/neutron-5696778669-nc8rp" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.487966 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a832b201-8940-43e4-b09b-40d410ea33ba-config-data-custom\") pod \"cinder-api-0\" (UID: \"a832b201-8940-43e4-b09b-40d410ea33ba\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.535382 4558 scope.go:117] "RemoveContainer" containerID="39122fe68ccaf23346e6568ce60f5ad96343708ff688ebb76e80c587fa773a90" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.557767 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-bootstrap-knlxj" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.569682 4558 scope.go:117] "RemoveContainer" containerID="7049e294116c3dfe281795be83dd1fef9c5ce6bd1d8517d4e1c57247c067830c" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.574181 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.582275 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3a0e658d-6dc9-4d45-bf9a-1dd7fe5c56a6" path="/var/lib/kubelet/pods/3a0e658d-6dc9-4d45-bf9a-1dd7fe5c56a6/volumes" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.674219 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-5696778669-nc8rp" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.705148 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.712395 4558 scope.go:117] "RemoveContainer" containerID="5417f7ab774e651be1947ba57ee85fa32cb85abba514b48b86676238391f29eb" Jan 20 17:47:18 crc kubenswrapper[4558]: E0120 17:47:18.715194 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5417f7ab774e651be1947ba57ee85fa32cb85abba514b48b86676238391f29eb\": container with ID starting with 5417f7ab774e651be1947ba57ee85fa32cb85abba514b48b86676238391f29eb not found: ID does not exist" containerID="5417f7ab774e651be1947ba57ee85fa32cb85abba514b48b86676238391f29eb" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.715226 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5417f7ab774e651be1947ba57ee85fa32cb85abba514b48b86676238391f29eb"} err="failed to get container status \"5417f7ab774e651be1947ba57ee85fa32cb85abba514b48b86676238391f29eb\": rpc error: code = NotFound desc = could not find container \"5417f7ab774e651be1947ba57ee85fa32cb85abba514b48b86676238391f29eb\": container with ID starting with 5417f7ab774e651be1947ba57ee85fa32cb85abba514b48b86676238391f29eb not found: ID does not exist" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.715246 4558 scope.go:117] "RemoveContainer" containerID="c54b38a6355b94b185e84a1db7075c6f9bf63a358c709e1f336a4f64b4cea596" Jan 20 17:47:18 crc kubenswrapper[4558]: E0120 17:47:18.716266 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c54b38a6355b94b185e84a1db7075c6f9bf63a358c709e1f336a4f64b4cea596\": container with ID starting with c54b38a6355b94b185e84a1db7075c6f9bf63a358c709e1f336a4f64b4cea596 not found: ID does not exist" containerID="c54b38a6355b94b185e84a1db7075c6f9bf63a358c709e1f336a4f64b4cea596" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.716291 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c54b38a6355b94b185e84a1db7075c6f9bf63a358c709e1f336a4f64b4cea596"} err="failed to get container status \"c54b38a6355b94b185e84a1db7075c6f9bf63a358c709e1f336a4f64b4cea596\": rpc error: code = NotFound desc = could not find container \"c54b38a6355b94b185e84a1db7075c6f9bf63a358c709e1f336a4f64b4cea596\": container with ID starting with c54b38a6355b94b185e84a1db7075c6f9bf63a358c709e1f336a4f64b4cea596 not found: ID does not exist" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.716305 4558 scope.go:117] "RemoveContainer" containerID="39122fe68ccaf23346e6568ce60f5ad96343708ff688ebb76e80c587fa773a90" Jan 20 17:47:18 crc kubenswrapper[4558]: E0120 17:47:18.723337 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"39122fe68ccaf23346e6568ce60f5ad96343708ff688ebb76e80c587fa773a90\": container with ID starting with 39122fe68ccaf23346e6568ce60f5ad96343708ff688ebb76e80c587fa773a90 not found: ID does not exist" containerID="39122fe68ccaf23346e6568ce60f5ad96343708ff688ebb76e80c587fa773a90" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.723377 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"39122fe68ccaf23346e6568ce60f5ad96343708ff688ebb76e80c587fa773a90"} err="failed to get container status \"39122fe68ccaf23346e6568ce60f5ad96343708ff688ebb76e80c587fa773a90\": rpc error: code = NotFound desc = could not find container \"39122fe68ccaf23346e6568ce60f5ad96343708ff688ebb76e80c587fa773a90\": container with ID starting with 39122fe68ccaf23346e6568ce60f5ad96343708ff688ebb76e80c587fa773a90 not found: ID does not exist" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.723403 4558 scope.go:117] "RemoveContainer" containerID="7049e294116c3dfe281795be83dd1fef9c5ce6bd1d8517d4e1c57247c067830c" Jan 20 17:47:18 crc kubenswrapper[4558]: E0120 17:47:18.725304 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7049e294116c3dfe281795be83dd1fef9c5ce6bd1d8517d4e1c57247c067830c\": container with ID starting with 7049e294116c3dfe281795be83dd1fef9c5ce6bd1d8517d4e1c57247c067830c not found: ID does not exist" containerID="7049e294116c3dfe281795be83dd1fef9c5ce6bd1d8517d4e1c57247c067830c" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.725337 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7049e294116c3dfe281795be83dd1fef9c5ce6bd1d8517d4e1c57247c067830c"} err="failed to get container status \"7049e294116c3dfe281795be83dd1fef9c5ce6bd1d8517d4e1c57247c067830c\": rpc error: code = NotFound desc = could not find container \"7049e294116c3dfe281795be83dd1fef9c5ce6bd1d8517d4e1c57247c067830c\": container with ID starting with 7049e294116c3dfe281795be83dd1fef9c5ce6bd1d8517d4e1c57247c067830c not found: ID does not exist" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.725352 4558 scope.go:117] "RemoveContainer" containerID="5417f7ab774e651be1947ba57ee85fa32cb85abba514b48b86676238391f29eb" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.728755 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5417f7ab774e651be1947ba57ee85fa32cb85abba514b48b86676238391f29eb"} err="failed to get container status \"5417f7ab774e651be1947ba57ee85fa32cb85abba514b48b86676238391f29eb\": rpc error: code = NotFound desc = could not find container \"5417f7ab774e651be1947ba57ee85fa32cb85abba514b48b86676238391f29eb\": container with ID starting with 5417f7ab774e651be1947ba57ee85fa32cb85abba514b48b86676238391f29eb not found: ID does not exist" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.728780 4558 scope.go:117] "RemoveContainer" containerID="c54b38a6355b94b185e84a1db7075c6f9bf63a358c709e1f336a4f64b4cea596" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.736499 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c54b38a6355b94b185e84a1db7075c6f9bf63a358c709e1f336a4f64b4cea596"} err="failed to get container status \"c54b38a6355b94b185e84a1db7075c6f9bf63a358c709e1f336a4f64b4cea596\": rpc error: code = NotFound desc = could not find container \"c54b38a6355b94b185e84a1db7075c6f9bf63a358c709e1f336a4f64b4cea596\": container with ID starting with c54b38a6355b94b185e84a1db7075c6f9bf63a358c709e1f336a4f64b4cea596 not found: ID does not exist" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.736524 4558 scope.go:117] "RemoveContainer" containerID="39122fe68ccaf23346e6568ce60f5ad96343708ff688ebb76e80c587fa773a90" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.737019 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"39122fe68ccaf23346e6568ce60f5ad96343708ff688ebb76e80c587fa773a90"} err="failed to get container status \"39122fe68ccaf23346e6568ce60f5ad96343708ff688ebb76e80c587fa773a90\": rpc error: code = NotFound desc = could not find container \"39122fe68ccaf23346e6568ce60f5ad96343708ff688ebb76e80c587fa773a90\": container with ID starting with 39122fe68ccaf23346e6568ce60f5ad96343708ff688ebb76e80c587fa773a90 not found: ID does not exist" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.737062 4558 scope.go:117] "RemoveContainer" containerID="7049e294116c3dfe281795be83dd1fef9c5ce6bd1d8517d4e1c57247c067830c" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.739220 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7049e294116c3dfe281795be83dd1fef9c5ce6bd1d8517d4e1c57247c067830c"} err="failed to get container status \"7049e294116c3dfe281795be83dd1fef9c5ce6bd1d8517d4e1c57247c067830c\": rpc error: code = NotFound desc = could not find container \"7049e294116c3dfe281795be83dd1fef9c5ce6bd1d8517d4e1c57247c067830c\": container with ID starting with 7049e294116c3dfe281795be83dd1fef9c5ce6bd1d8517d4e1c57247c067830c not found: ID does not exist" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.739259 4558 scope.go:117] "RemoveContainer" containerID="5417f7ab774e651be1947ba57ee85fa32cb85abba514b48b86676238391f29eb" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.742209 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5417f7ab774e651be1947ba57ee85fa32cb85abba514b48b86676238391f29eb"} err="failed to get container status \"5417f7ab774e651be1947ba57ee85fa32cb85abba514b48b86676238391f29eb\": rpc error: code = NotFound desc = could not find container \"5417f7ab774e651be1947ba57ee85fa32cb85abba514b48b86676238391f29eb\": container with ID starting with 5417f7ab774e651be1947ba57ee85fa32cb85abba514b48b86676238391f29eb not found: ID does not exist" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.742233 4558 scope.go:117] "RemoveContainer" containerID="c54b38a6355b94b185e84a1db7075c6f9bf63a358c709e1f336a4f64b4cea596" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.742971 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c54b38a6355b94b185e84a1db7075c6f9bf63a358c709e1f336a4f64b4cea596"} err="failed to get container status \"c54b38a6355b94b185e84a1db7075c6f9bf63a358c709e1f336a4f64b4cea596\": rpc error: code = NotFound desc = could not find container \"c54b38a6355b94b185e84a1db7075c6f9bf63a358c709e1f336a4f64b4cea596\": container with ID starting with c54b38a6355b94b185e84a1db7075c6f9bf63a358c709e1f336a4f64b4cea596 not found: ID does not exist" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.743046 4558 scope.go:117] "RemoveContainer" containerID="39122fe68ccaf23346e6568ce60f5ad96343708ff688ebb76e80c587fa773a90" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.749219 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.750413 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"39122fe68ccaf23346e6568ce60f5ad96343708ff688ebb76e80c587fa773a90"} err="failed to get container status \"39122fe68ccaf23346e6568ce60f5ad96343708ff688ebb76e80c587fa773a90\": rpc error: code = NotFound desc = could not find container \"39122fe68ccaf23346e6568ce60f5ad96343708ff688ebb76e80c587fa773a90\": container with ID starting with 39122fe68ccaf23346e6568ce60f5ad96343708ff688ebb76e80c587fa773a90 not found: ID does not exist" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.750443 4558 scope.go:117] "RemoveContainer" containerID="7049e294116c3dfe281795be83dd1fef9c5ce6bd1d8517d4e1c57247c067830c" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.756249 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7049e294116c3dfe281795be83dd1fef9c5ce6bd1d8517d4e1c57247c067830c"} err="failed to get container status \"7049e294116c3dfe281795be83dd1fef9c5ce6bd1d8517d4e1c57247c067830c\": rpc error: code = NotFound desc = could not find container \"7049e294116c3dfe281795be83dd1fef9c5ce6bd1d8517d4e1c57247c067830c\": container with ID starting with 7049e294116c3dfe281795be83dd1fef9c5ce6bd1d8517d4e1c57247c067830c not found: ID does not exist" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.756278 4558 scope.go:117] "RemoveContainer" containerID="5417f7ab774e651be1947ba57ee85fa32cb85abba514b48b86676238391f29eb" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.756933 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.761393 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5417f7ab774e651be1947ba57ee85fa32cb85abba514b48b86676238391f29eb"} err="failed to get container status \"5417f7ab774e651be1947ba57ee85fa32cb85abba514b48b86676238391f29eb\": rpc error: code = NotFound desc = could not find container \"5417f7ab774e651be1947ba57ee85fa32cb85abba514b48b86676238391f29eb\": container with ID starting with 5417f7ab774e651be1947ba57ee85fa32cb85abba514b48b86676238391f29eb not found: ID does not exist" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.761421 4558 scope.go:117] "RemoveContainer" containerID="c54b38a6355b94b185e84a1db7075c6f9bf63a358c709e1f336a4f64b4cea596" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.765207 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.767798 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c54b38a6355b94b185e84a1db7075c6f9bf63a358c709e1f336a4f64b4cea596"} err="failed to get container status \"c54b38a6355b94b185e84a1db7075c6f9bf63a358c709e1f336a4f64b4cea596\": rpc error: code = NotFound desc = could not find container \"c54b38a6355b94b185e84a1db7075c6f9bf63a358c709e1f336a4f64b4cea596\": container with ID starting with c54b38a6355b94b185e84a1db7075c6f9bf63a358c709e1f336a4f64b4cea596 not found: ID does not exist" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.767844 4558 scope.go:117] "RemoveContainer" containerID="39122fe68ccaf23346e6568ce60f5ad96343708ff688ebb76e80c587fa773a90" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.769019 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.769110 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.770497 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-scripts" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.770753 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-config-data" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.774987 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"39122fe68ccaf23346e6568ce60f5ad96343708ff688ebb76e80c587fa773a90"} err="failed to get container status \"39122fe68ccaf23346e6568ce60f5ad96343708ff688ebb76e80c587fa773a90\": rpc error: code = NotFound desc = could not find container \"39122fe68ccaf23346e6568ce60f5ad96343708ff688ebb76e80c587fa773a90\": container with ID starting with 39122fe68ccaf23346e6568ce60f5ad96343708ff688ebb76e80c587fa773a90 not found: ID does not exist" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.775016 4558 scope.go:117] "RemoveContainer" containerID="7049e294116c3dfe281795be83dd1fef9c5ce6bd1d8517d4e1c57247c067830c" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.778123 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7049e294116c3dfe281795be83dd1fef9c5ce6bd1d8517d4e1c57247c067830c"} err="failed to get container status \"7049e294116c3dfe281795be83dd1fef9c5ce6bd1d8517d4e1c57247c067830c\": rpc error: code = NotFound desc = could not find container \"7049e294116c3dfe281795be83dd1fef9c5ce6bd1d8517d4e1c57247c067830c\": container with ID starting with 7049e294116c3dfe281795be83dd1fef9c5ce6bd1d8517d4e1c57247c067830c not found: ID does not exist" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.881266 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e45db124-f220-41eb-b1d0-d82affa8be89-scripts\") pod \"ceilometer-0\" (UID: \"e45db124-f220-41eb-b1d0-d82affa8be89\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.881320 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e45db124-f220-41eb-b1d0-d82affa8be89-config-data\") pod \"ceilometer-0\" (UID: \"e45db124-f220-41eb-b1d0-d82affa8be89\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.881403 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e45db124-f220-41eb-b1d0-d82affa8be89-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"e45db124-f220-41eb-b1d0-d82affa8be89\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.881464 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e45db124-f220-41eb-b1d0-d82affa8be89-log-httpd\") pod \"ceilometer-0\" (UID: \"e45db124-f220-41eb-b1d0-d82affa8be89\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.881511 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7sksf\" (UniqueName: \"kubernetes.io/projected/e45db124-f220-41eb-b1d0-d82affa8be89-kube-api-access-7sksf\") pod \"ceilometer-0\" (UID: \"e45db124-f220-41eb-b1d0-d82affa8be89\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.881537 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e45db124-f220-41eb-b1d0-d82affa8be89-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"e45db124-f220-41eb-b1d0-d82affa8be89\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.881584 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e45db124-f220-41eb-b1d0-d82affa8be89-run-httpd\") pod \"ceilometer-0\" (UID: \"e45db124-f220-41eb-b1d0-d82affa8be89\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.983815 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e45db124-f220-41eb-b1d0-d82affa8be89-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"e45db124-f220-41eb-b1d0-d82affa8be89\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.985452 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e45db124-f220-41eb-b1d0-d82affa8be89-run-httpd\") pod \"ceilometer-0\" (UID: \"e45db124-f220-41eb-b1d0-d82affa8be89\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.985551 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e45db124-f220-41eb-b1d0-d82affa8be89-scripts\") pod \"ceilometer-0\" (UID: \"e45db124-f220-41eb-b1d0-d82affa8be89\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.985591 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e45db124-f220-41eb-b1d0-d82affa8be89-config-data\") pod \"ceilometer-0\" (UID: \"e45db124-f220-41eb-b1d0-d82affa8be89\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.985720 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e45db124-f220-41eb-b1d0-d82affa8be89-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"e45db124-f220-41eb-b1d0-d82affa8be89\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.985827 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e45db124-f220-41eb-b1d0-d82affa8be89-log-httpd\") pod \"ceilometer-0\" (UID: \"e45db124-f220-41eb-b1d0-d82affa8be89\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.985876 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7sksf\" (UniqueName: \"kubernetes.io/projected/e45db124-f220-41eb-b1d0-d82affa8be89-kube-api-access-7sksf\") pod \"ceilometer-0\" (UID: \"e45db124-f220-41eb-b1d0-d82affa8be89\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.986647 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e45db124-f220-41eb-b1d0-d82affa8be89-run-httpd\") pod \"ceilometer-0\" (UID: \"e45db124-f220-41eb-b1d0-d82affa8be89\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.986897 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e45db124-f220-41eb-b1d0-d82affa8be89-log-httpd\") pod \"ceilometer-0\" (UID: \"e45db124-f220-41eb-b1d0-d82affa8be89\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.987917 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e45db124-f220-41eb-b1d0-d82affa8be89-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"e45db124-f220-41eb-b1d0-d82affa8be89\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.992593 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e45db124-f220-41eb-b1d0-d82affa8be89-scripts\") pod \"ceilometer-0\" (UID: \"e45db124-f220-41eb-b1d0-d82affa8be89\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.993057 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e45db124-f220-41eb-b1d0-d82affa8be89-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"e45db124-f220-41eb-b1d0-d82affa8be89\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:47:18 crc kubenswrapper[4558]: I0120 17:47:18.994284 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e45db124-f220-41eb-b1d0-d82affa8be89-config-data\") pod \"ceilometer-0\" (UID: \"e45db124-f220-41eb-b1d0-d82affa8be89\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:47:19 crc kubenswrapper[4558]: I0120 17:47:19.005425 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7sksf\" (UniqueName: \"kubernetes.io/projected/e45db124-f220-41eb-b1d0-d82affa8be89-kube-api-access-7sksf\") pod \"ceilometer-0\" (UID: \"e45db124-f220-41eb-b1d0-d82affa8be89\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:47:19 crc kubenswrapper[4558]: I0120 17:47:19.013359 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-7dc79cc978-clpbw" event={"ID":"76dc3695-3423-40fe-afcc-798d8a78c542","Type":"ContainerStarted","Data":"dc94f746e9dc1ee4a286f07430cbbe7edd91ff78805b40d08e514556cdc4d5e2"} Jan 20 17:47:19 crc kubenswrapper[4558]: I0120 17:47:19.013404 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-7dc79cc978-clpbw" event={"ID":"76dc3695-3423-40fe-afcc-798d8a78c542","Type":"ContainerStarted","Data":"6368279e916b13bd72d42c5d932eb5a8ffcdded7a96ea231fd08ff982fc45343"} Jan 20 17:47:19 crc kubenswrapper[4558]: I0120 17:47:19.014019 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/placement-7dc79cc978-clpbw" Jan 20 17:47:19 crc kubenswrapper[4558]: I0120 17:47:19.022740 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-58bb6f688-8dbng" event={"ID":"ec317104-430d-4590-85a9-257448df4212","Type":"ContainerStarted","Data":"1fd8e500782e2308db201caa8beedba31be54584b72f7b311e8a9a029cb348ef"} Jan 20 17:47:19 crc kubenswrapper[4558]: I0120 17:47:19.023076 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/barbican-api-58bb6f688-8dbng" Jan 20 17:47:19 crc kubenswrapper[4558]: I0120 17:47:19.023100 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/barbican-api-58bb6f688-8dbng" Jan 20 17:47:19 crc kubenswrapper[4558]: I0120 17:47:19.030083 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-keystone-listener-7d4c95f78b-qjwvm" event={"ID":"e2e49d05-bd1d-49a2-a67f-17bdeb5c39c2","Type":"ContainerStarted","Data":"7436f50dac4bab17580af1cbb38a2ef4c0a9b64f22e3397fd9aa8eeeb133fc29"} Jan 20 17:47:19 crc kubenswrapper[4558]: I0120 17:47:19.050834 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-59975f6898-zxn8m" event={"ID":"a4f41b33-4de3-4a87-96ac-f7ea7bb1aac2","Type":"ContainerStarted","Data":"845fa35e74757f51248608df722fce7c1028166d79229d0cc2abebe77b4abcbb"} Jan 20 17:47:19 crc kubenswrapper[4558]: I0120 17:47:19.050881 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-59975f6898-zxn8m" event={"ID":"a4f41b33-4de3-4a87-96ac-f7ea7bb1aac2","Type":"ContainerStarted","Data":"46acf2fe99b48c1acbe706dbb538dac56d4267574506bae9dae6740549980f96"} Jan 20 17:47:19 crc kubenswrapper[4558]: I0120 17:47:19.061894 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/placement-7dc79cc978-clpbw" podStartSLOduration=3.061871659 podStartE2EDuration="3.061871659s" podCreationTimestamp="2026-01-20 17:47:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:47:19.033402036 +0000 UTC m=+3932.793740033" watchObservedRunningTime="2026-01-20 17:47:19.061871659 +0000 UTC m=+3932.822209626" Jan 20 17:47:19 crc kubenswrapper[4558]: I0120 17:47:19.065967 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/barbican-keystone-listener-7d4c95f78b-qjwvm" podStartSLOduration=4.065953966 podStartE2EDuration="4.065953966s" podCreationTimestamp="2026-01-20 17:47:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:47:19.052218878 +0000 UTC m=+3932.812556845" watchObservedRunningTime="2026-01-20 17:47:19.065953966 +0000 UTC m=+3932.826291933" Jan 20 17:47:19 crc kubenswrapper[4558]: I0120 17:47:19.099146 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:47:19 crc kubenswrapper[4558]: I0120 17:47:19.099479 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/barbican-api-58bb6f688-8dbng" podStartSLOduration=3.099463326 podStartE2EDuration="3.099463326s" podCreationTimestamp="2026-01-20 17:47:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:47:19.077143286 +0000 UTC m=+3932.837481243" watchObservedRunningTime="2026-01-20 17:47:19.099463326 +0000 UTC m=+3932.859801293" Jan 20 17:47:19 crc kubenswrapper[4558]: I0120 17:47:19.137785 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:47:19 crc kubenswrapper[4558]: I0120 17:47:19.203107 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-bootstrap-knlxj"] Jan 20 17:47:19 crc kubenswrapper[4558]: W0120 17:47:19.229471 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod83dbadc1_6bc5_4c25_a541_c0c6d4bdadd5.slice/crio-d6ce6aadadfc2f8eecb60c4c67045efc2b26dd8bbbfaf20446c1b6670e279516 WatchSource:0}: Error finding container d6ce6aadadfc2f8eecb60c4c67045efc2b26dd8bbbfaf20446c1b6670e279516: Status 404 returned error can't find the container with id d6ce6aadadfc2f8eecb60c4c67045efc2b26dd8bbbfaf20446c1b6670e279516 Jan 20 17:47:19 crc kubenswrapper[4558]: I0120 17:47:19.367687 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-5696778669-nc8rp"] Jan 20 17:47:19 crc kubenswrapper[4558]: I0120 17:47:19.374564 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:47:19 crc kubenswrapper[4558]: W0120 17:47:19.376455 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf0ce81a9_0d35_4813_b16a_74b5757bbffa.slice/crio-c2c7945d7cc7ff591f208d7328412753fdddc6fe05b44155564b515f6902a9e7 WatchSource:0}: Error finding container c2c7945d7cc7ff591f208d7328412753fdddc6fe05b44155564b515f6902a9e7: Status 404 returned error can't find the container with id c2c7945d7cc7ff591f208d7328412753fdddc6fe05b44155564b515f6902a9e7 Jan 20 17:47:19 crc kubenswrapper[4558]: I0120 17:47:19.561688 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:47:20 crc kubenswrapper[4558]: I0120 17:47:20.091824 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"a832b201-8940-43e4-b09b-40d410ea33ba","Type":"ContainerStarted","Data":"fc90312ceb0df9a73011c391c468aa7e5dd472a3e279c249c6e61f63c411fd3f"} Jan 20 17:47:20 crc kubenswrapper[4558]: I0120 17:47:20.092818 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"a832b201-8940-43e4-b09b-40d410ea33ba","Type":"ContainerStarted","Data":"9e054ef500e01a30e40b05254e1cef6e638305b61e42d0b2e37fe55e14e375e1"} Jan 20 17:47:20 crc kubenswrapper[4558]: I0120 17:47:20.102248 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"e45db124-f220-41eb-b1d0-d82affa8be89","Type":"ContainerStarted","Data":"4db4f17b363449cec174ff9cbaca83a847dd8377727ce3c4f4b2ca5af1c49fa6"} Jan 20 17:47:20 crc kubenswrapper[4558]: I0120 17:47:20.105685 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"85f34e25-fd3f-47dc-9e2e-4150fb9d7a4c","Type":"ContainerStarted","Data":"1a264c7c6924592f510f33ea71a48b399db20043fd3d4ea422a9b2423c9ba2a7"} Jan 20 17:47:20 crc kubenswrapper[4558]: I0120 17:47:20.105741 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"85f34e25-fd3f-47dc-9e2e-4150fb9d7a4c","Type":"ContainerStarted","Data":"c1b021c25050d852d7956f674101429e63ad23933af560a0f81bfa7b4c096ffb"} Jan 20 17:47:20 crc kubenswrapper[4558]: I0120 17:47:20.110952 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-5696778669-nc8rp" event={"ID":"f0ce81a9-0d35-4813-b16a-74b5757bbffa","Type":"ContainerStarted","Data":"ce80cef01c83d809bb3070c10cc026abf5f54be5422d252f6f5f31c47b21a1bb"} Jan 20 17:47:20 crc kubenswrapper[4558]: I0120 17:47:20.110989 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-5696778669-nc8rp" event={"ID":"f0ce81a9-0d35-4813-b16a-74b5757bbffa","Type":"ContainerStarted","Data":"586bbbff36fe57de7cd9f2ecae0bec8f27031f35667892a8eb2376c25bb37cdf"} Jan 20 17:47:20 crc kubenswrapper[4558]: I0120 17:47:20.111002 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-5696778669-nc8rp" event={"ID":"f0ce81a9-0d35-4813-b16a-74b5757bbffa","Type":"ContainerStarted","Data":"c2c7945d7cc7ff591f208d7328412753fdddc6fe05b44155564b515f6902a9e7"} Jan 20 17:47:20 crc kubenswrapper[4558]: I0120 17:47:20.111762 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/neutron-5696778669-nc8rp" Jan 20 17:47:20 crc kubenswrapper[4558]: I0120 17:47:20.133440 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-59975f6898-zxn8m" event={"ID":"a4f41b33-4de3-4a87-96ac-f7ea7bb1aac2","Type":"ContainerStarted","Data":"c423e5ba761c266b37e8a9914f09c46db8c16aa75089222ed91b3723dd5c4ca5"} Jan 20 17:47:20 crc kubenswrapper[4558]: I0120 17:47:20.134380 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/placement-59975f6898-zxn8m" Jan 20 17:47:20 crc kubenswrapper[4558]: I0120 17:47:20.134414 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/placement-59975f6898-zxn8m" Jan 20 17:47:20 crc kubenswrapper[4558]: I0120 17:47:20.137640 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-bootstrap-knlxj" event={"ID":"83dbadc1-6bc5-4c25-a541-c0c6d4bdadd5","Type":"ContainerStarted","Data":"d464d7020e25ad80b4a7faeb918b86aad826276b94aea66bcc2f1c25be05ceaa"} Jan 20 17:47:20 crc kubenswrapper[4558]: I0120 17:47:20.138018 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-bootstrap-knlxj" event={"ID":"83dbadc1-6bc5-4c25-a541-c0c6d4bdadd5","Type":"ContainerStarted","Data":"d6ce6aadadfc2f8eecb60c4c67045efc2b26dd8bbbfaf20446c1b6670e279516"} Jan 20 17:47:20 crc kubenswrapper[4558]: I0120 17:47:20.144667 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/placement-7dc79cc978-clpbw" Jan 20 17:47:20 crc kubenswrapper[4558]: I0120 17:47:20.152202 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/neutron-5696778669-nc8rp" podStartSLOduration=2.150566586 podStartE2EDuration="2.150566586s" podCreationTimestamp="2026-01-20 17:47:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:47:20.12645139 +0000 UTC m=+3933.886789357" watchObservedRunningTime="2026-01-20 17:47:20.150566586 +0000 UTC m=+3933.910904553" Jan 20 17:47:20 crc kubenswrapper[4558]: I0120 17:47:20.166558 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/placement-59975f6898-zxn8m" podStartSLOduration=3.166539431 podStartE2EDuration="3.166539431s" podCreationTimestamp="2026-01-20 17:47:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:47:20.148120526 +0000 UTC m=+3933.908458494" watchObservedRunningTime="2026-01-20 17:47:20.166539431 +0000 UTC m=+3933.926877398" Jan 20 17:47:20 crc kubenswrapper[4558]: I0120 17:47:20.179829 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/keystone-bootstrap-knlxj" podStartSLOduration=3.179811858 podStartE2EDuration="3.179811858s" podCreationTimestamp="2026-01-20 17:47:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:47:20.174551235 +0000 UTC m=+3933.934889202" watchObservedRunningTime="2026-01-20 17:47:20.179811858 +0000 UTC m=+3933.940149824" Jan 20 17:47:20 crc kubenswrapper[4558]: I0120 17:47:20.575930 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="017cf5a8-b216-4c29-8dfc-7b5b6625591c" path="/var/lib/kubelet/pods/017cf5a8-b216-4c29-8dfc-7b5b6625591c/volumes" Jan 20 17:47:21 crc kubenswrapper[4558]: I0120 17:47:21.153044 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"a832b201-8940-43e4-b09b-40d410ea33ba","Type":"ContainerStarted","Data":"f63f565eb1c52c4ac8e4d32917421d675aeab4babd8d641cb8d344e26265c527"} Jan 20 17:47:21 crc kubenswrapper[4558]: I0120 17:47:21.153488 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:47:21 crc kubenswrapper[4558]: I0120 17:47:21.157370 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"e45db124-f220-41eb-b1d0-d82affa8be89","Type":"ContainerStarted","Data":"c3f0f48e19453567cc266442915bb53d5a8b0fd16b12f32fe41cf8a3cc87bb16"} Jan 20 17:47:21 crc kubenswrapper[4558]: I0120 17:47:21.157437 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"e45db124-f220-41eb-b1d0-d82affa8be89","Type":"ContainerStarted","Data":"49f438b00e8f6bc182801aa4facd16c61d68d52ed9eb66fae006e768b82b5e54"} Jan 20 17:47:21 crc kubenswrapper[4558]: I0120 17:47:21.165863 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"85f34e25-fd3f-47dc-9e2e-4150fb9d7a4c","Type":"ContainerStarted","Data":"c0c05ae7cdd27acea0025a9d26ff2baa4b34e3e0ade141c45e726cf972b0eb47"} Jan 20 17:47:21 crc kubenswrapper[4558]: I0120 17:47:21.209442 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/cinder-api-0" podStartSLOduration=3.209423115 podStartE2EDuration="3.209423115s" podCreationTimestamp="2026-01-20 17:47:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:47:21.186638661 +0000 UTC m=+3934.946976628" watchObservedRunningTime="2026-01-20 17:47:21.209423115 +0000 UTC m=+3934.969761081" Jan 20 17:47:21 crc kubenswrapper[4558]: I0120 17:47:21.213127 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/cinder-scheduler-0" podStartSLOduration=4.213115879 podStartE2EDuration="4.213115879s" podCreationTimestamp="2026-01-20 17:47:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:47:21.20459168 +0000 UTC m=+3934.964929647" watchObservedRunningTime="2026-01-20 17:47:21.213115879 +0000 UTC m=+3934.973453846" Jan 20 17:47:22 crc kubenswrapper[4558]: I0120 17:47:22.179819 4558 generic.go:334] "Generic (PLEG): container finished" podID="83dbadc1-6bc5-4c25-a541-c0c6d4bdadd5" containerID="d464d7020e25ad80b4a7faeb918b86aad826276b94aea66bcc2f1c25be05ceaa" exitCode=0 Jan 20 17:47:22 crc kubenswrapper[4558]: I0120 17:47:22.179988 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-bootstrap-knlxj" event={"ID":"83dbadc1-6bc5-4c25-a541-c0c6d4bdadd5","Type":"ContainerDied","Data":"d464d7020e25ad80b4a7faeb918b86aad826276b94aea66bcc2f1c25be05ceaa"} Jan 20 17:47:22 crc kubenswrapper[4558]: I0120 17:47:22.183744 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"e45db124-f220-41eb-b1d0-d82affa8be89","Type":"ContainerStarted","Data":"87c9036ed520e0ac775579ea9684b4e9331125999144fd57f7db606b1d4cfeee"} Jan 20 17:47:22 crc kubenswrapper[4558]: I0120 17:47:22.603672 4558 scope.go:117] "RemoveContainer" containerID="4a9490a5c5547167886edaefed1bfe4c918b053d999248c687a66c4bb4b18544" Jan 20 17:47:22 crc kubenswrapper[4558]: I0120 17:47:22.630873 4558 scope.go:117] "RemoveContainer" containerID="b8073107ad7ea7845349c804ef266cf79a76d892989926d112eb5a0d45a26d7c" Jan 20 17:47:22 crc kubenswrapper[4558]: I0120 17:47:22.666325 4558 scope.go:117] "RemoveContainer" containerID="5caa9c83e57be3928128e4ffcdc30d264c7d25b5dfa3de5488249ee33e587529" Jan 20 17:47:22 crc kubenswrapper[4558]: I0120 17:47:22.694793 4558 scope.go:117] "RemoveContainer" containerID="57768a7bbd565b0a91483b9701925edcdb3062ad3e45489a7ff8722dabc67e7e" Jan 20 17:47:22 crc kubenswrapper[4558]: I0120 17:47:22.739969 4558 scope.go:117] "RemoveContainer" containerID="1b2406ac4844c018e4c181d32e0d4b3ff0b33492aeb8d275a7fb0dd3f2e899cc" Jan 20 17:47:22 crc kubenswrapper[4558]: I0120 17:47:22.787299 4558 scope.go:117] "RemoveContainer" containerID="eb8f5e7abf7b6f66d13dcca720c4c8b9939d9a624fccffa8e1235a042aeeec14" Jan 20 17:47:22 crc kubenswrapper[4558]: I0120 17:47:22.807118 4558 scope.go:117] "RemoveContainer" containerID="ca8088e4b215e716484cafe5dc6cd2427095c82b587c9dd06837d05764131f03" Jan 20 17:47:22 crc kubenswrapper[4558]: I0120 17:47:22.837352 4558 scope.go:117] "RemoveContainer" containerID="1560e725c8768e491198e715aead8e30fd05538bb7735f6fb6fd333f434e6c3e" Jan 20 17:47:22 crc kubenswrapper[4558]: I0120 17:47:22.868196 4558 scope.go:117] "RemoveContainer" containerID="30c26a31b65684929c8aa74de9a1153ce13cdc31951b546f411c4c7b676d7e42" Jan 20 17:47:22 crc kubenswrapper[4558]: I0120 17:47:22.904352 4558 scope.go:117] "RemoveContainer" containerID="034d451a2e550aee8b855a4ea6bfe4909c52aa2e4a24f9e676477858cba19916" Jan 20 17:47:22 crc kubenswrapper[4558]: I0120 17:47:22.958324 4558 scope.go:117] "RemoveContainer" containerID="9232fe923f5a47f4d2c0139e8bcc5a7439fb78d669404cffd52a472a70adf94f" Jan 20 17:47:23 crc kubenswrapper[4558]: I0120 17:47:23.034859 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:47:23 crc kubenswrapper[4558]: I0120 17:47:23.257256 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:47:23 crc kubenswrapper[4558]: I0120 17:47:23.262536 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/cinder-api-0" podUID="a832b201-8940-43e4-b09b-40d410ea33ba" containerName="cinder-api-log" containerID="cri-o://fc90312ceb0df9a73011c391c468aa7e5dd472a3e279c249c6e61f63c411fd3f" gracePeriod=30 Jan 20 17:47:23 crc kubenswrapper[4558]: I0120 17:47:23.262681 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/cinder-api-0" podUID="a832b201-8940-43e4-b09b-40d410ea33ba" containerName="cinder-api" containerID="cri-o://f63f565eb1c52c4ac8e4d32917421d675aeab4babd8d641cb8d344e26265c527" gracePeriod=30 Jan 20 17:47:23 crc kubenswrapper[4558]: I0120 17:47:23.282052 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ceilometer-0" podStartSLOduration=1.869005806 podStartE2EDuration="5.282040988s" podCreationTimestamp="2026-01-20 17:47:18 +0000 UTC" firstStartedPulling="2026-01-20 17:47:19.573268277 +0000 UTC m=+3933.333606245" lastFinishedPulling="2026-01-20 17:47:22.986303459 +0000 UTC m=+3936.746641427" observedRunningTime="2026-01-20 17:47:23.275572915 +0000 UTC m=+3937.035910882" watchObservedRunningTime="2026-01-20 17:47:23.282040988 +0000 UTC m=+3937.042378955" Jan 20 17:47:23 crc kubenswrapper[4558]: I0120 17:47:23.549056 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-bootstrap-knlxj" Jan 20 17:47:23 crc kubenswrapper[4558]: I0120 17:47:23.576264 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:47:23 crc kubenswrapper[4558]: I0120 17:47:23.689663 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x77wj\" (UniqueName: \"kubernetes.io/projected/83dbadc1-6bc5-4c25-a541-c0c6d4bdadd5-kube-api-access-x77wj\") pod \"83dbadc1-6bc5-4c25-a541-c0c6d4bdadd5\" (UID: \"83dbadc1-6bc5-4c25-a541-c0c6d4bdadd5\") " Jan 20 17:47:23 crc kubenswrapper[4558]: I0120 17:47:23.689716 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/83dbadc1-6bc5-4c25-a541-c0c6d4bdadd5-fernet-keys\") pod \"83dbadc1-6bc5-4c25-a541-c0c6d4bdadd5\" (UID: \"83dbadc1-6bc5-4c25-a541-c0c6d4bdadd5\") " Jan 20 17:47:23 crc kubenswrapper[4558]: I0120 17:47:23.689813 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/83dbadc1-6bc5-4c25-a541-c0c6d4bdadd5-config-data\") pod \"83dbadc1-6bc5-4c25-a541-c0c6d4bdadd5\" (UID: \"83dbadc1-6bc5-4c25-a541-c0c6d4bdadd5\") " Jan 20 17:47:23 crc kubenswrapper[4558]: I0120 17:47:23.689892 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/83dbadc1-6bc5-4c25-a541-c0c6d4bdadd5-credential-keys\") pod \"83dbadc1-6bc5-4c25-a541-c0c6d4bdadd5\" (UID: \"83dbadc1-6bc5-4c25-a541-c0c6d4bdadd5\") " Jan 20 17:47:23 crc kubenswrapper[4558]: I0120 17:47:23.689963 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/83dbadc1-6bc5-4c25-a541-c0c6d4bdadd5-scripts\") pod \"83dbadc1-6bc5-4c25-a541-c0c6d4bdadd5\" (UID: \"83dbadc1-6bc5-4c25-a541-c0c6d4bdadd5\") " Jan 20 17:47:23 crc kubenswrapper[4558]: I0120 17:47:23.690013 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/83dbadc1-6bc5-4c25-a541-c0c6d4bdadd5-combined-ca-bundle\") pod \"83dbadc1-6bc5-4c25-a541-c0c6d4bdadd5\" (UID: \"83dbadc1-6bc5-4c25-a541-c0c6d4bdadd5\") " Jan 20 17:47:23 crc kubenswrapper[4558]: I0120 17:47:23.696351 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/83dbadc1-6bc5-4c25-a541-c0c6d4bdadd5-kube-api-access-x77wj" (OuterVolumeSpecName: "kube-api-access-x77wj") pod "83dbadc1-6bc5-4c25-a541-c0c6d4bdadd5" (UID: "83dbadc1-6bc5-4c25-a541-c0c6d4bdadd5"). InnerVolumeSpecName "kube-api-access-x77wj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:47:23 crc kubenswrapper[4558]: I0120 17:47:23.697524 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/83dbadc1-6bc5-4c25-a541-c0c6d4bdadd5-scripts" (OuterVolumeSpecName: "scripts") pod "83dbadc1-6bc5-4c25-a541-c0c6d4bdadd5" (UID: "83dbadc1-6bc5-4c25-a541-c0c6d4bdadd5"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:47:23 crc kubenswrapper[4558]: I0120 17:47:23.697601 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/83dbadc1-6bc5-4c25-a541-c0c6d4bdadd5-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "83dbadc1-6bc5-4c25-a541-c0c6d4bdadd5" (UID: "83dbadc1-6bc5-4c25-a541-c0c6d4bdadd5"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:47:23 crc kubenswrapper[4558]: I0120 17:47:23.699077 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/83dbadc1-6bc5-4c25-a541-c0c6d4bdadd5-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "83dbadc1-6bc5-4c25-a541-c0c6d4bdadd5" (UID: "83dbadc1-6bc5-4c25-a541-c0c6d4bdadd5"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:47:23 crc kubenswrapper[4558]: I0120 17:47:23.725416 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/83dbadc1-6bc5-4c25-a541-c0c6d4bdadd5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "83dbadc1-6bc5-4c25-a541-c0c6d4bdadd5" (UID: "83dbadc1-6bc5-4c25-a541-c0c6d4bdadd5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:47:23 crc kubenswrapper[4558]: I0120 17:47:23.734053 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/83dbadc1-6bc5-4c25-a541-c0c6d4bdadd5-config-data" (OuterVolumeSpecName: "config-data") pod "83dbadc1-6bc5-4c25-a541-c0c6d4bdadd5" (UID: "83dbadc1-6bc5-4c25-a541-c0c6d4bdadd5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:47:23 crc kubenswrapper[4558]: I0120 17:47:23.792992 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x77wj\" (UniqueName: \"kubernetes.io/projected/83dbadc1-6bc5-4c25-a541-c0c6d4bdadd5-kube-api-access-x77wj\") on node \"crc\" DevicePath \"\"" Jan 20 17:47:23 crc kubenswrapper[4558]: I0120 17:47:23.793235 4558 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/83dbadc1-6bc5-4c25-a541-c0c6d4bdadd5-fernet-keys\") on node \"crc\" DevicePath \"\"" Jan 20 17:47:23 crc kubenswrapper[4558]: I0120 17:47:23.793248 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/83dbadc1-6bc5-4c25-a541-c0c6d4bdadd5-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:47:23 crc kubenswrapper[4558]: I0120 17:47:23.793259 4558 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/83dbadc1-6bc5-4c25-a541-c0c6d4bdadd5-credential-keys\") on node \"crc\" DevicePath \"\"" Jan 20 17:47:23 crc kubenswrapper[4558]: I0120 17:47:23.793269 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/83dbadc1-6bc5-4c25-a541-c0c6d4bdadd5-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:47:23 crc kubenswrapper[4558]: I0120 17:47:23.793279 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/83dbadc1-6bc5-4c25-a541-c0c6d4bdadd5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:47:23 crc kubenswrapper[4558]: I0120 17:47:23.798285 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:47:23 crc kubenswrapper[4558]: I0120 17:47:23.895331 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a832b201-8940-43e4-b09b-40d410ea33ba-config-data\") pod \"a832b201-8940-43e4-b09b-40d410ea33ba\" (UID: \"a832b201-8940-43e4-b09b-40d410ea33ba\") " Jan 20 17:47:23 crc kubenswrapper[4558]: I0120 17:47:23.895558 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a832b201-8940-43e4-b09b-40d410ea33ba-scripts\") pod \"a832b201-8940-43e4-b09b-40d410ea33ba\" (UID: \"a832b201-8940-43e4-b09b-40d410ea33ba\") " Jan 20 17:47:23 crc kubenswrapper[4558]: I0120 17:47:23.895637 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a832b201-8940-43e4-b09b-40d410ea33ba-logs\") pod \"a832b201-8940-43e4-b09b-40d410ea33ba\" (UID: \"a832b201-8940-43e4-b09b-40d410ea33ba\") " Jan 20 17:47:23 crc kubenswrapper[4558]: I0120 17:47:23.895776 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a832b201-8940-43e4-b09b-40d410ea33ba-config-data-custom\") pod \"a832b201-8940-43e4-b09b-40d410ea33ba\" (UID: \"a832b201-8940-43e4-b09b-40d410ea33ba\") " Jan 20 17:47:23 crc kubenswrapper[4558]: I0120 17:47:23.895817 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htcfz\" (UniqueName: \"kubernetes.io/projected/a832b201-8940-43e4-b09b-40d410ea33ba-kube-api-access-htcfz\") pod \"a832b201-8940-43e4-b09b-40d410ea33ba\" (UID: \"a832b201-8940-43e4-b09b-40d410ea33ba\") " Jan 20 17:47:23 crc kubenswrapper[4558]: I0120 17:47:23.895853 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a832b201-8940-43e4-b09b-40d410ea33ba-combined-ca-bundle\") pod \"a832b201-8940-43e4-b09b-40d410ea33ba\" (UID: \"a832b201-8940-43e4-b09b-40d410ea33ba\") " Jan 20 17:47:23 crc kubenswrapper[4558]: I0120 17:47:23.895871 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a832b201-8940-43e4-b09b-40d410ea33ba-etc-machine-id\") pod \"a832b201-8940-43e4-b09b-40d410ea33ba\" (UID: \"a832b201-8940-43e4-b09b-40d410ea33ba\") " Jan 20 17:47:23 crc kubenswrapper[4558]: I0120 17:47:23.896222 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a832b201-8940-43e4-b09b-40d410ea33ba-logs" (OuterVolumeSpecName: "logs") pod "a832b201-8940-43e4-b09b-40d410ea33ba" (UID: "a832b201-8940-43e4-b09b-40d410ea33ba"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:47:23 crc kubenswrapper[4558]: I0120 17:47:23.896226 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a832b201-8940-43e4-b09b-40d410ea33ba-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "a832b201-8940-43e4-b09b-40d410ea33ba" (UID: "a832b201-8940-43e4-b09b-40d410ea33ba"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 17:47:23 crc kubenswrapper[4558]: I0120 17:47:23.896575 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a832b201-8940-43e4-b09b-40d410ea33ba-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:47:23 crc kubenswrapper[4558]: I0120 17:47:23.896599 4558 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a832b201-8940-43e4-b09b-40d410ea33ba-etc-machine-id\") on node \"crc\" DevicePath \"\"" Jan 20 17:47:23 crc kubenswrapper[4558]: I0120 17:47:23.900558 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a832b201-8940-43e4-b09b-40d410ea33ba-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "a832b201-8940-43e4-b09b-40d410ea33ba" (UID: "a832b201-8940-43e4-b09b-40d410ea33ba"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:47:23 crc kubenswrapper[4558]: I0120 17:47:23.900880 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a832b201-8940-43e4-b09b-40d410ea33ba-scripts" (OuterVolumeSpecName: "scripts") pod "a832b201-8940-43e4-b09b-40d410ea33ba" (UID: "a832b201-8940-43e4-b09b-40d410ea33ba"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:47:23 crc kubenswrapper[4558]: I0120 17:47:23.902026 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a832b201-8940-43e4-b09b-40d410ea33ba-kube-api-access-htcfz" (OuterVolumeSpecName: "kube-api-access-htcfz") pod "a832b201-8940-43e4-b09b-40d410ea33ba" (UID: "a832b201-8940-43e4-b09b-40d410ea33ba"). InnerVolumeSpecName "kube-api-access-htcfz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:47:23 crc kubenswrapper[4558]: I0120 17:47:23.922626 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a832b201-8940-43e4-b09b-40d410ea33ba-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a832b201-8940-43e4-b09b-40d410ea33ba" (UID: "a832b201-8940-43e4-b09b-40d410ea33ba"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:47:23 crc kubenswrapper[4558]: I0120 17:47:23.938875 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a832b201-8940-43e4-b09b-40d410ea33ba-config-data" (OuterVolumeSpecName: "config-data") pod "a832b201-8940-43e4-b09b-40d410ea33ba" (UID: "a832b201-8940-43e4-b09b-40d410ea33ba"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:47:23 crc kubenswrapper[4558]: I0120 17:47:23.998985 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a832b201-8940-43e4-b09b-40d410ea33ba-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:47:23 crc kubenswrapper[4558]: I0120 17:47:23.999079 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a832b201-8940-43e4-b09b-40d410ea33ba-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:47:23 crc kubenswrapper[4558]: I0120 17:47:23.999153 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a832b201-8940-43e4-b09b-40d410ea33ba-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:47:23 crc kubenswrapper[4558]: I0120 17:47:23.999234 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htcfz\" (UniqueName: \"kubernetes.io/projected/a832b201-8940-43e4-b09b-40d410ea33ba-kube-api-access-htcfz\") on node \"crc\" DevicePath \"\"" Jan 20 17:47:23 crc kubenswrapper[4558]: I0120 17:47:23.999287 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a832b201-8940-43e4-b09b-40d410ea33ba-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:47:24 crc kubenswrapper[4558]: I0120 17:47:24.272633 4558 generic.go:334] "Generic (PLEG): container finished" podID="a832b201-8940-43e4-b09b-40d410ea33ba" containerID="f63f565eb1c52c4ac8e4d32917421d675aeab4babd8d641cb8d344e26265c527" exitCode=0 Jan 20 17:47:24 crc kubenswrapper[4558]: I0120 17:47:24.272666 4558 generic.go:334] "Generic (PLEG): container finished" podID="a832b201-8940-43e4-b09b-40d410ea33ba" containerID="fc90312ceb0df9a73011c391c468aa7e5dd472a3e279c249c6e61f63c411fd3f" exitCode=143 Jan 20 17:47:24 crc kubenswrapper[4558]: I0120 17:47:24.272719 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:47:24 crc kubenswrapper[4558]: I0120 17:47:24.273740 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"a832b201-8940-43e4-b09b-40d410ea33ba","Type":"ContainerDied","Data":"f63f565eb1c52c4ac8e4d32917421d675aeab4babd8d641cb8d344e26265c527"} Jan 20 17:47:24 crc kubenswrapper[4558]: I0120 17:47:24.274046 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"a832b201-8940-43e4-b09b-40d410ea33ba","Type":"ContainerDied","Data":"fc90312ceb0df9a73011c391c468aa7e5dd472a3e279c249c6e61f63c411fd3f"} Jan 20 17:47:24 crc kubenswrapper[4558]: I0120 17:47:24.274406 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"a832b201-8940-43e4-b09b-40d410ea33ba","Type":"ContainerDied","Data":"9e054ef500e01a30e40b05254e1cef6e638305b61e42d0b2e37fe55e14e375e1"} Jan 20 17:47:24 crc kubenswrapper[4558]: I0120 17:47:24.274086 4558 scope.go:117] "RemoveContainer" containerID="f63f565eb1c52c4ac8e4d32917421d675aeab4babd8d641cb8d344e26265c527" Jan 20 17:47:24 crc kubenswrapper[4558]: I0120 17:47:24.277077 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"e45db124-f220-41eb-b1d0-d82affa8be89","Type":"ContainerStarted","Data":"c119d9682719148604dfa2a2fefe97cd5eb3a8f7b6356747042f0cb8ea62b412"} Jan 20 17:47:24 crc kubenswrapper[4558]: I0120 17:47:24.279341 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-bootstrap-knlxj" event={"ID":"83dbadc1-6bc5-4c25-a541-c0c6d4bdadd5","Type":"ContainerDied","Data":"d6ce6aadadfc2f8eecb60c4c67045efc2b26dd8bbbfaf20446c1b6670e279516"} Jan 20 17:47:24 crc kubenswrapper[4558]: I0120 17:47:24.279434 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d6ce6aadadfc2f8eecb60c4c67045efc2b26dd8bbbfaf20446c1b6670e279516" Jan 20 17:47:24 crc kubenswrapper[4558]: I0120 17:47:24.279519 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-bootstrap-knlxj" Jan 20 17:47:24 crc kubenswrapper[4558]: I0120 17:47:24.309134 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/keystone-7ff6469d47-k4mhb"] Jan 20 17:47:24 crc kubenswrapper[4558]: E0120 17:47:24.310264 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="83dbadc1-6bc5-4c25-a541-c0c6d4bdadd5" containerName="keystone-bootstrap" Jan 20 17:47:24 crc kubenswrapper[4558]: I0120 17:47:24.310289 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="83dbadc1-6bc5-4c25-a541-c0c6d4bdadd5" containerName="keystone-bootstrap" Jan 20 17:47:24 crc kubenswrapper[4558]: E0120 17:47:24.310301 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a832b201-8940-43e4-b09b-40d410ea33ba" containerName="cinder-api" Jan 20 17:47:24 crc kubenswrapper[4558]: I0120 17:47:24.310307 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="a832b201-8940-43e4-b09b-40d410ea33ba" containerName="cinder-api" Jan 20 17:47:24 crc kubenswrapper[4558]: E0120 17:47:24.310330 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a832b201-8940-43e4-b09b-40d410ea33ba" containerName="cinder-api-log" Jan 20 17:47:24 crc kubenswrapper[4558]: I0120 17:47:24.310335 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="a832b201-8940-43e4-b09b-40d410ea33ba" containerName="cinder-api-log" Jan 20 17:47:24 crc kubenswrapper[4558]: I0120 17:47:24.310750 4558 scope.go:117] "RemoveContainer" containerID="fc90312ceb0df9a73011c391c468aa7e5dd472a3e279c249c6e61f63c411fd3f" Jan 20 17:47:24 crc kubenswrapper[4558]: I0120 17:47:24.310996 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="a832b201-8940-43e4-b09b-40d410ea33ba" containerName="cinder-api" Jan 20 17:47:24 crc kubenswrapper[4558]: I0120 17:47:24.311021 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="a832b201-8940-43e4-b09b-40d410ea33ba" containerName="cinder-api-log" Jan 20 17:47:24 crc kubenswrapper[4558]: I0120 17:47:24.311041 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="83dbadc1-6bc5-4c25-a541-c0c6d4bdadd5" containerName="keystone-bootstrap" Jan 20 17:47:24 crc kubenswrapper[4558]: I0120 17:47:24.311611 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-7ff6469d47-k4mhb" Jan 20 17:47:24 crc kubenswrapper[4558]: I0120 17:47:24.320589 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-scripts" Jan 20 17:47:24 crc kubenswrapper[4558]: I0120 17:47:24.320900 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-config-data" Jan 20 17:47:24 crc kubenswrapper[4558]: I0120 17:47:24.321031 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone" Jan 20 17:47:24 crc kubenswrapper[4558]: I0120 17:47:24.321138 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-keystone-public-svc" Jan 20 17:47:24 crc kubenswrapper[4558]: I0120 17:47:24.321291 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-keystone-dockercfg-8dfwc" Jan 20 17:47:24 crc kubenswrapper[4558]: I0120 17:47:24.321544 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-keystone-internal-svc" Jan 20 17:47:24 crc kubenswrapper[4558]: I0120 17:47:24.344271 4558 scope.go:117] "RemoveContainer" containerID="f63f565eb1c52c4ac8e4d32917421d675aeab4babd8d641cb8d344e26265c527" Jan 20 17:47:24 crc kubenswrapper[4558]: E0120 17:47:24.345607 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f63f565eb1c52c4ac8e4d32917421d675aeab4babd8d641cb8d344e26265c527\": container with ID starting with f63f565eb1c52c4ac8e4d32917421d675aeab4babd8d641cb8d344e26265c527 not found: ID does not exist" containerID="f63f565eb1c52c4ac8e4d32917421d675aeab4babd8d641cb8d344e26265c527" Jan 20 17:47:24 crc kubenswrapper[4558]: I0120 17:47:24.345642 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f63f565eb1c52c4ac8e4d32917421d675aeab4babd8d641cb8d344e26265c527"} err="failed to get container status \"f63f565eb1c52c4ac8e4d32917421d675aeab4babd8d641cb8d344e26265c527\": rpc error: code = NotFound desc = could not find container \"f63f565eb1c52c4ac8e4d32917421d675aeab4babd8d641cb8d344e26265c527\": container with ID starting with f63f565eb1c52c4ac8e4d32917421d675aeab4babd8d641cb8d344e26265c527 not found: ID does not exist" Jan 20 17:47:24 crc kubenswrapper[4558]: I0120 17:47:24.345662 4558 scope.go:117] "RemoveContainer" containerID="fc90312ceb0df9a73011c391c468aa7e5dd472a3e279c249c6e61f63c411fd3f" Jan 20 17:47:24 crc kubenswrapper[4558]: E0120 17:47:24.345862 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fc90312ceb0df9a73011c391c468aa7e5dd472a3e279c249c6e61f63c411fd3f\": container with ID starting with fc90312ceb0df9a73011c391c468aa7e5dd472a3e279c249c6e61f63c411fd3f not found: ID does not exist" containerID="fc90312ceb0df9a73011c391c468aa7e5dd472a3e279c249c6e61f63c411fd3f" Jan 20 17:47:24 crc kubenswrapper[4558]: I0120 17:47:24.345877 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fc90312ceb0df9a73011c391c468aa7e5dd472a3e279c249c6e61f63c411fd3f"} err="failed to get container status \"fc90312ceb0df9a73011c391c468aa7e5dd472a3e279c249c6e61f63c411fd3f\": rpc error: code = NotFound desc = could not find container \"fc90312ceb0df9a73011c391c468aa7e5dd472a3e279c249c6e61f63c411fd3f\": container with ID starting with fc90312ceb0df9a73011c391c468aa7e5dd472a3e279c249c6e61f63c411fd3f not found: ID does not exist" Jan 20 17:47:24 crc kubenswrapper[4558]: I0120 17:47:24.345890 4558 scope.go:117] "RemoveContainer" containerID="f63f565eb1c52c4ac8e4d32917421d675aeab4babd8d641cb8d344e26265c527" Jan 20 17:47:24 crc kubenswrapper[4558]: I0120 17:47:24.346056 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f63f565eb1c52c4ac8e4d32917421d675aeab4babd8d641cb8d344e26265c527"} err="failed to get container status \"f63f565eb1c52c4ac8e4d32917421d675aeab4babd8d641cb8d344e26265c527\": rpc error: code = NotFound desc = could not find container \"f63f565eb1c52c4ac8e4d32917421d675aeab4babd8d641cb8d344e26265c527\": container with ID starting with f63f565eb1c52c4ac8e4d32917421d675aeab4babd8d641cb8d344e26265c527 not found: ID does not exist" Jan 20 17:47:24 crc kubenswrapper[4558]: I0120 17:47:24.346069 4558 scope.go:117] "RemoveContainer" containerID="fc90312ceb0df9a73011c391c468aa7e5dd472a3e279c249c6e61f63c411fd3f" Jan 20 17:47:24 crc kubenswrapper[4558]: I0120 17:47:24.346282 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fc90312ceb0df9a73011c391c468aa7e5dd472a3e279c249c6e61f63c411fd3f"} err="failed to get container status \"fc90312ceb0df9a73011c391c468aa7e5dd472a3e279c249c6e61f63c411fd3f\": rpc error: code = NotFound desc = could not find container \"fc90312ceb0df9a73011c391c468aa7e5dd472a3e279c249c6e61f63c411fd3f\": container with ID starting with fc90312ceb0df9a73011c391c468aa7e5dd472a3e279c249c6e61f63c411fd3f not found: ID does not exist" Jan 20 17:47:24 crc kubenswrapper[4558]: I0120 17:47:24.364581 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:47:24 crc kubenswrapper[4558]: I0120 17:47:24.391286 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-7ff6469d47-k4mhb"] Jan 20 17:47:24 crc kubenswrapper[4558]: I0120 17:47:24.396128 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:47:24 crc kubenswrapper[4558]: I0120 17:47:24.409309 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:47:24 crc kubenswrapper[4558]: I0120 17:47:24.411884 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:47:24 crc kubenswrapper[4558]: I0120 17:47:24.413711 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/28e95314-bdcd-4cd6-9f3b-10f29aad259d-credential-keys\") pod \"keystone-7ff6469d47-k4mhb\" (UID: \"28e95314-bdcd-4cd6-9f3b-10f29aad259d\") " pod="openstack-kuttl-tests/keystone-7ff6469d47-k4mhb" Jan 20 17:47:24 crc kubenswrapper[4558]: I0120 17:47:24.413822 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/28e95314-bdcd-4cd6-9f3b-10f29aad259d-fernet-keys\") pod \"keystone-7ff6469d47-k4mhb\" (UID: \"28e95314-bdcd-4cd6-9f3b-10f29aad259d\") " pod="openstack-kuttl-tests/keystone-7ff6469d47-k4mhb" Jan 20 17:47:24 crc kubenswrapper[4558]: I0120 17:47:24.414063 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-cinder-internal-svc" Jan 20 17:47:24 crc kubenswrapper[4558]: I0120 17:47:24.414083 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/28e95314-bdcd-4cd6-9f3b-10f29aad259d-internal-tls-certs\") pod \"keystone-7ff6469d47-k4mhb\" (UID: \"28e95314-bdcd-4cd6-9f3b-10f29aad259d\") " pod="openstack-kuttl-tests/keystone-7ff6469d47-k4mhb" Jan 20 17:47:24 crc kubenswrapper[4558]: I0120 17:47:24.414156 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cinder-api-config-data" Jan 20 17:47:24 crc kubenswrapper[4558]: I0120 17:47:24.414191 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/28e95314-bdcd-4cd6-9f3b-10f29aad259d-combined-ca-bundle\") pod \"keystone-7ff6469d47-k4mhb\" (UID: \"28e95314-bdcd-4cd6-9f3b-10f29aad259d\") " pod="openstack-kuttl-tests/keystone-7ff6469d47-k4mhb" Jan 20 17:47:24 crc kubenswrapper[4558]: I0120 17:47:24.414273 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/28e95314-bdcd-4cd6-9f3b-10f29aad259d-config-data\") pod \"keystone-7ff6469d47-k4mhb\" (UID: \"28e95314-bdcd-4cd6-9f3b-10f29aad259d\") " pod="openstack-kuttl-tests/keystone-7ff6469d47-k4mhb" Jan 20 17:47:24 crc kubenswrapper[4558]: I0120 17:47:24.415220 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-cinder-public-svc" Jan 20 17:47:24 crc kubenswrapper[4558]: I0120 17:47:24.415420 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/28e95314-bdcd-4cd6-9f3b-10f29aad259d-scripts\") pod \"keystone-7ff6469d47-k4mhb\" (UID: \"28e95314-bdcd-4cd6-9f3b-10f29aad259d\") " pod="openstack-kuttl-tests/keystone-7ff6469d47-k4mhb" Jan 20 17:47:24 crc kubenswrapper[4558]: I0120 17:47:24.415475 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/28e95314-bdcd-4cd6-9f3b-10f29aad259d-public-tls-certs\") pod \"keystone-7ff6469d47-k4mhb\" (UID: \"28e95314-bdcd-4cd6-9f3b-10f29aad259d\") " pod="openstack-kuttl-tests/keystone-7ff6469d47-k4mhb" Jan 20 17:47:24 crc kubenswrapper[4558]: I0120 17:47:24.415520 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z4fr9\" (UniqueName: \"kubernetes.io/projected/28e95314-bdcd-4cd6-9f3b-10f29aad259d-kube-api-access-z4fr9\") pod \"keystone-7ff6469d47-k4mhb\" (UID: \"28e95314-bdcd-4cd6-9f3b-10f29aad259d\") " pod="openstack-kuttl-tests/keystone-7ff6469d47-k4mhb" Jan 20 17:47:24 crc kubenswrapper[4558]: I0120 17:47:24.437848 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:47:24 crc kubenswrapper[4558]: I0120 17:47:24.517270 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/28e95314-bdcd-4cd6-9f3b-10f29aad259d-config-data\") pod \"keystone-7ff6469d47-k4mhb\" (UID: \"28e95314-bdcd-4cd6-9f3b-10f29aad259d\") " pod="openstack-kuttl-tests/keystone-7ff6469d47-k4mhb" Jan 20 17:47:24 crc kubenswrapper[4558]: I0120 17:47:24.517314 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/47de199d-0dde-4082-8b6c-99f3d202608b-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"47de199d-0dde-4082-8b6c-99f3d202608b\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:47:24 crc kubenswrapper[4558]: I0120 17:47:24.517361 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/47de199d-0dde-4082-8b6c-99f3d202608b-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"47de199d-0dde-4082-8b6c-99f3d202608b\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:47:24 crc kubenswrapper[4558]: I0120 17:47:24.517396 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/28e95314-bdcd-4cd6-9f3b-10f29aad259d-scripts\") pod \"keystone-7ff6469d47-k4mhb\" (UID: \"28e95314-bdcd-4cd6-9f3b-10f29aad259d\") " pod="openstack-kuttl-tests/keystone-7ff6469d47-k4mhb" Jan 20 17:47:24 crc kubenswrapper[4558]: I0120 17:47:24.517427 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/28e95314-bdcd-4cd6-9f3b-10f29aad259d-public-tls-certs\") pod \"keystone-7ff6469d47-k4mhb\" (UID: \"28e95314-bdcd-4cd6-9f3b-10f29aad259d\") " pod="openstack-kuttl-tests/keystone-7ff6469d47-k4mhb" Jan 20 17:47:24 crc kubenswrapper[4558]: I0120 17:47:24.517451 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z4fr9\" (UniqueName: \"kubernetes.io/projected/28e95314-bdcd-4cd6-9f3b-10f29aad259d-kube-api-access-z4fr9\") pod \"keystone-7ff6469d47-k4mhb\" (UID: \"28e95314-bdcd-4cd6-9f3b-10f29aad259d\") " pod="openstack-kuttl-tests/keystone-7ff6469d47-k4mhb" Jan 20 17:47:24 crc kubenswrapper[4558]: I0120 17:47:24.517467 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/47de199d-0dde-4082-8b6c-99f3d202608b-public-tls-certs\") pod \"cinder-api-0\" (UID: \"47de199d-0dde-4082-8b6c-99f3d202608b\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:47:24 crc kubenswrapper[4558]: I0120 17:47:24.517503 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/47de199d-0dde-4082-8b6c-99f3d202608b-config-data\") pod \"cinder-api-0\" (UID: \"47de199d-0dde-4082-8b6c-99f3d202608b\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:47:24 crc kubenswrapper[4558]: I0120 17:47:24.517554 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/47de199d-0dde-4082-8b6c-99f3d202608b-logs\") pod \"cinder-api-0\" (UID: \"47de199d-0dde-4082-8b6c-99f3d202608b\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:47:24 crc kubenswrapper[4558]: I0120 17:47:24.517576 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/47de199d-0dde-4082-8b6c-99f3d202608b-etc-machine-id\") pod \"cinder-api-0\" (UID: \"47de199d-0dde-4082-8b6c-99f3d202608b\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:47:24 crc kubenswrapper[4558]: I0120 17:47:24.517605 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/28e95314-bdcd-4cd6-9f3b-10f29aad259d-credential-keys\") pod \"keystone-7ff6469d47-k4mhb\" (UID: \"28e95314-bdcd-4cd6-9f3b-10f29aad259d\") " pod="openstack-kuttl-tests/keystone-7ff6469d47-k4mhb" Jan 20 17:47:24 crc kubenswrapper[4558]: I0120 17:47:24.517642 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-thnjs\" (UniqueName: \"kubernetes.io/projected/47de199d-0dde-4082-8b6c-99f3d202608b-kube-api-access-thnjs\") pod \"cinder-api-0\" (UID: \"47de199d-0dde-4082-8b6c-99f3d202608b\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:47:24 crc kubenswrapper[4558]: I0120 17:47:24.517662 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/28e95314-bdcd-4cd6-9f3b-10f29aad259d-fernet-keys\") pod \"keystone-7ff6469d47-k4mhb\" (UID: \"28e95314-bdcd-4cd6-9f3b-10f29aad259d\") " pod="openstack-kuttl-tests/keystone-7ff6469d47-k4mhb" Jan 20 17:47:24 crc kubenswrapper[4558]: I0120 17:47:24.517676 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/47de199d-0dde-4082-8b6c-99f3d202608b-config-data-custom\") pod \"cinder-api-0\" (UID: \"47de199d-0dde-4082-8b6c-99f3d202608b\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:47:24 crc kubenswrapper[4558]: I0120 17:47:24.517709 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/47de199d-0dde-4082-8b6c-99f3d202608b-scripts\") pod \"cinder-api-0\" (UID: \"47de199d-0dde-4082-8b6c-99f3d202608b\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:47:24 crc kubenswrapper[4558]: I0120 17:47:24.517752 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/28e95314-bdcd-4cd6-9f3b-10f29aad259d-internal-tls-certs\") pod \"keystone-7ff6469d47-k4mhb\" (UID: \"28e95314-bdcd-4cd6-9f3b-10f29aad259d\") " pod="openstack-kuttl-tests/keystone-7ff6469d47-k4mhb" Jan 20 17:47:24 crc kubenswrapper[4558]: I0120 17:47:24.517779 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/28e95314-bdcd-4cd6-9f3b-10f29aad259d-combined-ca-bundle\") pod \"keystone-7ff6469d47-k4mhb\" (UID: \"28e95314-bdcd-4cd6-9f3b-10f29aad259d\") " pod="openstack-kuttl-tests/keystone-7ff6469d47-k4mhb" Jan 20 17:47:24 crc kubenswrapper[4558]: I0120 17:47:24.522426 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/28e95314-bdcd-4cd6-9f3b-10f29aad259d-combined-ca-bundle\") pod \"keystone-7ff6469d47-k4mhb\" (UID: \"28e95314-bdcd-4cd6-9f3b-10f29aad259d\") " pod="openstack-kuttl-tests/keystone-7ff6469d47-k4mhb" Jan 20 17:47:24 crc kubenswrapper[4558]: I0120 17:47:24.522441 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/28e95314-bdcd-4cd6-9f3b-10f29aad259d-config-data\") pod \"keystone-7ff6469d47-k4mhb\" (UID: \"28e95314-bdcd-4cd6-9f3b-10f29aad259d\") " pod="openstack-kuttl-tests/keystone-7ff6469d47-k4mhb" Jan 20 17:47:24 crc kubenswrapper[4558]: I0120 17:47:24.522619 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/28e95314-bdcd-4cd6-9f3b-10f29aad259d-scripts\") pod \"keystone-7ff6469d47-k4mhb\" (UID: \"28e95314-bdcd-4cd6-9f3b-10f29aad259d\") " pod="openstack-kuttl-tests/keystone-7ff6469d47-k4mhb" Jan 20 17:47:24 crc kubenswrapper[4558]: I0120 17:47:24.523190 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/28e95314-bdcd-4cd6-9f3b-10f29aad259d-credential-keys\") pod \"keystone-7ff6469d47-k4mhb\" (UID: \"28e95314-bdcd-4cd6-9f3b-10f29aad259d\") " pod="openstack-kuttl-tests/keystone-7ff6469d47-k4mhb" Jan 20 17:47:24 crc kubenswrapper[4558]: I0120 17:47:24.523411 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/28e95314-bdcd-4cd6-9f3b-10f29aad259d-fernet-keys\") pod \"keystone-7ff6469d47-k4mhb\" (UID: \"28e95314-bdcd-4cd6-9f3b-10f29aad259d\") " pod="openstack-kuttl-tests/keystone-7ff6469d47-k4mhb" Jan 20 17:47:24 crc kubenswrapper[4558]: I0120 17:47:24.523526 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/28e95314-bdcd-4cd6-9f3b-10f29aad259d-internal-tls-certs\") pod \"keystone-7ff6469d47-k4mhb\" (UID: \"28e95314-bdcd-4cd6-9f3b-10f29aad259d\") " pod="openstack-kuttl-tests/keystone-7ff6469d47-k4mhb" Jan 20 17:47:24 crc kubenswrapper[4558]: I0120 17:47:24.523760 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/28e95314-bdcd-4cd6-9f3b-10f29aad259d-public-tls-certs\") pod \"keystone-7ff6469d47-k4mhb\" (UID: \"28e95314-bdcd-4cd6-9f3b-10f29aad259d\") " pod="openstack-kuttl-tests/keystone-7ff6469d47-k4mhb" Jan 20 17:47:24 crc kubenswrapper[4558]: I0120 17:47:24.533015 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z4fr9\" (UniqueName: \"kubernetes.io/projected/28e95314-bdcd-4cd6-9f3b-10f29aad259d-kube-api-access-z4fr9\") pod \"keystone-7ff6469d47-k4mhb\" (UID: \"28e95314-bdcd-4cd6-9f3b-10f29aad259d\") " pod="openstack-kuttl-tests/keystone-7ff6469d47-k4mhb" Jan 20 17:47:24 crc kubenswrapper[4558]: I0120 17:47:24.578434 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a832b201-8940-43e4-b09b-40d410ea33ba" path="/var/lib/kubelet/pods/a832b201-8940-43e4-b09b-40d410ea33ba/volumes" Jan 20 17:47:24 crc kubenswrapper[4558]: I0120 17:47:24.628711 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/47de199d-0dde-4082-8b6c-99f3d202608b-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"47de199d-0dde-4082-8b6c-99f3d202608b\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:47:24 crc kubenswrapper[4558]: I0120 17:47:24.628808 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/47de199d-0dde-4082-8b6c-99f3d202608b-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"47de199d-0dde-4082-8b6c-99f3d202608b\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:47:24 crc kubenswrapper[4558]: I0120 17:47:24.628894 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/47de199d-0dde-4082-8b6c-99f3d202608b-public-tls-certs\") pod \"cinder-api-0\" (UID: \"47de199d-0dde-4082-8b6c-99f3d202608b\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:47:24 crc kubenswrapper[4558]: I0120 17:47:24.628975 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/47de199d-0dde-4082-8b6c-99f3d202608b-config-data\") pod \"cinder-api-0\" (UID: \"47de199d-0dde-4082-8b6c-99f3d202608b\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:47:24 crc kubenswrapper[4558]: I0120 17:47:24.629077 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/47de199d-0dde-4082-8b6c-99f3d202608b-logs\") pod \"cinder-api-0\" (UID: \"47de199d-0dde-4082-8b6c-99f3d202608b\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:47:24 crc kubenswrapper[4558]: I0120 17:47:24.629108 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/47de199d-0dde-4082-8b6c-99f3d202608b-etc-machine-id\") pod \"cinder-api-0\" (UID: \"47de199d-0dde-4082-8b6c-99f3d202608b\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:47:24 crc kubenswrapper[4558]: I0120 17:47:24.629261 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-thnjs\" (UniqueName: \"kubernetes.io/projected/47de199d-0dde-4082-8b6c-99f3d202608b-kube-api-access-thnjs\") pod \"cinder-api-0\" (UID: \"47de199d-0dde-4082-8b6c-99f3d202608b\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:47:24 crc kubenswrapper[4558]: I0120 17:47:24.629296 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/47de199d-0dde-4082-8b6c-99f3d202608b-config-data-custom\") pod \"cinder-api-0\" (UID: \"47de199d-0dde-4082-8b6c-99f3d202608b\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:47:24 crc kubenswrapper[4558]: I0120 17:47:24.629461 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/47de199d-0dde-4082-8b6c-99f3d202608b-etc-machine-id\") pod \"cinder-api-0\" (UID: \"47de199d-0dde-4082-8b6c-99f3d202608b\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:47:24 crc kubenswrapper[4558]: I0120 17:47:24.629470 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/47de199d-0dde-4082-8b6c-99f3d202608b-scripts\") pod \"cinder-api-0\" (UID: \"47de199d-0dde-4082-8b6c-99f3d202608b\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:47:24 crc kubenswrapper[4558]: I0120 17:47:24.630206 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/47de199d-0dde-4082-8b6c-99f3d202608b-logs\") pod \"cinder-api-0\" (UID: \"47de199d-0dde-4082-8b6c-99f3d202608b\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:47:24 crc kubenswrapper[4558]: I0120 17:47:24.631428 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-7ff6469d47-k4mhb" Jan 20 17:47:24 crc kubenswrapper[4558]: I0120 17:47:24.633646 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/47de199d-0dde-4082-8b6c-99f3d202608b-config-data-custom\") pod \"cinder-api-0\" (UID: \"47de199d-0dde-4082-8b6c-99f3d202608b\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:47:24 crc kubenswrapper[4558]: I0120 17:47:24.634630 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/47de199d-0dde-4082-8b6c-99f3d202608b-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"47de199d-0dde-4082-8b6c-99f3d202608b\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:47:24 crc kubenswrapper[4558]: I0120 17:47:24.635635 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/47de199d-0dde-4082-8b6c-99f3d202608b-public-tls-certs\") pod \"cinder-api-0\" (UID: \"47de199d-0dde-4082-8b6c-99f3d202608b\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:47:24 crc kubenswrapper[4558]: I0120 17:47:24.635710 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/47de199d-0dde-4082-8b6c-99f3d202608b-scripts\") pod \"cinder-api-0\" (UID: \"47de199d-0dde-4082-8b6c-99f3d202608b\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:47:24 crc kubenswrapper[4558]: I0120 17:47:24.635825 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/47de199d-0dde-4082-8b6c-99f3d202608b-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"47de199d-0dde-4082-8b6c-99f3d202608b\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:47:24 crc kubenswrapper[4558]: I0120 17:47:24.638030 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/47de199d-0dde-4082-8b6c-99f3d202608b-config-data\") pod \"cinder-api-0\" (UID: \"47de199d-0dde-4082-8b6c-99f3d202608b\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:47:24 crc kubenswrapper[4558]: I0120 17:47:24.652072 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-thnjs\" (UniqueName: \"kubernetes.io/projected/47de199d-0dde-4082-8b6c-99f3d202608b-kube-api-access-thnjs\") pod \"cinder-api-0\" (UID: \"47de199d-0dde-4082-8b6c-99f3d202608b\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:47:24 crc kubenswrapper[4558]: I0120 17:47:24.727617 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:47:25 crc kubenswrapper[4558]: I0120 17:47:25.086618 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-7ff6469d47-k4mhb"] Jan 20 17:47:25 crc kubenswrapper[4558]: I0120 17:47:25.105879 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:47:25 crc kubenswrapper[4558]: I0120 17:47:25.105934 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:47:25 crc kubenswrapper[4558]: I0120 17:47:25.121105 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:47:25 crc kubenswrapper[4558]: I0120 17:47:25.121136 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:47:25 crc kubenswrapper[4558]: I0120 17:47:25.137207 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:47:25 crc kubenswrapper[4558]: I0120 17:47:25.188385 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:47:25 crc kubenswrapper[4558]: I0120 17:47:25.213567 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:47:25 crc kubenswrapper[4558]: I0120 17:47:25.252212 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:47:25 crc kubenswrapper[4558]: I0120 17:47:25.267404 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:47:25 crc kubenswrapper[4558]: I0120 17:47:25.335573 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-7ff6469d47-k4mhb" event={"ID":"28e95314-bdcd-4cd6-9f3b-10f29aad259d","Type":"ContainerStarted","Data":"37399d5b1aab12040dcaa1e2fbd0da0499be5dc684b0eac826b6ed9e5cc88b70"} Jan 20 17:47:25 crc kubenswrapper[4558]: I0120 17:47:25.348844 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"47de199d-0dde-4082-8b6c-99f3d202608b","Type":"ContainerStarted","Data":"bf0cc528aa590a5f695fb88d2024b7b55941f695dee5334550651ad86f374722"} Jan 20 17:47:25 crc kubenswrapper[4558]: I0120 17:47:25.348887 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:47:25 crc kubenswrapper[4558]: I0120 17:47:25.352575 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:47:25 crc kubenswrapper[4558]: I0120 17:47:25.353227 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:47:25 crc kubenswrapper[4558]: I0120 17:47:25.353267 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:47:26 crc kubenswrapper[4558]: I0120 17:47:26.366514 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-7ff6469d47-k4mhb" event={"ID":"28e95314-bdcd-4cd6-9f3b-10f29aad259d","Type":"ContainerStarted","Data":"6a26fa3d881fdae5f126b67ecdf3cd57c4714eeb300e6b941666a0c25f2581da"} Jan 20 17:47:26 crc kubenswrapper[4558]: I0120 17:47:26.366830 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/keystone-7ff6469d47-k4mhb" Jan 20 17:47:26 crc kubenswrapper[4558]: I0120 17:47:26.370723 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"47de199d-0dde-4082-8b6c-99f3d202608b","Type":"ContainerStarted","Data":"b497e638b4894f1c81e1773af34e888736bf132c67bc13eb676c4be84dea7235"} Jan 20 17:47:26 crc kubenswrapper[4558]: I0120 17:47:26.387817 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/keystone-7ff6469d47-k4mhb" podStartSLOduration=2.387799263 podStartE2EDuration="2.387799263s" podCreationTimestamp="2026-01-20 17:47:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:47:26.385563348 +0000 UTC m=+3940.145901315" watchObservedRunningTime="2026-01-20 17:47:26.387799263 +0000 UTC m=+3940.148137230" Jan 20 17:47:27 crc kubenswrapper[4558]: I0120 17:47:27.148860 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/neutron-78bcff9576-76n9h"] Jan 20 17:47:27 crc kubenswrapper[4558]: I0120 17:47:27.150691 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-78bcff9576-76n9h" Jan 20 17:47:27 crc kubenswrapper[4558]: I0120 17:47:27.153376 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-neutron-internal-svc" Jan 20 17:47:27 crc kubenswrapper[4558]: I0120 17:47:27.153568 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-neutron-public-svc" Jan 20 17:47:27 crc kubenswrapper[4558]: I0120 17:47:27.158978 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-78bcff9576-76n9h"] Jan 20 17:47:27 crc kubenswrapper[4558]: I0120 17:47:27.287113 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6a566db2-a941-48c9-9169-8e2c16cda1ac-public-tls-certs\") pod \"neutron-78bcff9576-76n9h\" (UID: \"6a566db2-a941-48c9-9169-8e2c16cda1ac\") " pod="openstack-kuttl-tests/neutron-78bcff9576-76n9h" Jan 20 17:47:27 crc kubenswrapper[4558]: I0120 17:47:27.287442 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r8mm9\" (UniqueName: \"kubernetes.io/projected/6a566db2-a941-48c9-9169-8e2c16cda1ac-kube-api-access-r8mm9\") pod \"neutron-78bcff9576-76n9h\" (UID: \"6a566db2-a941-48c9-9169-8e2c16cda1ac\") " pod="openstack-kuttl-tests/neutron-78bcff9576-76n9h" Jan 20 17:47:27 crc kubenswrapper[4558]: I0120 17:47:27.287504 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/6a566db2-a941-48c9-9169-8e2c16cda1ac-config\") pod \"neutron-78bcff9576-76n9h\" (UID: \"6a566db2-a941-48c9-9169-8e2c16cda1ac\") " pod="openstack-kuttl-tests/neutron-78bcff9576-76n9h" Jan 20 17:47:27 crc kubenswrapper[4558]: I0120 17:47:27.287534 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/6a566db2-a941-48c9-9169-8e2c16cda1ac-ovndb-tls-certs\") pod \"neutron-78bcff9576-76n9h\" (UID: \"6a566db2-a941-48c9-9169-8e2c16cda1ac\") " pod="openstack-kuttl-tests/neutron-78bcff9576-76n9h" Jan 20 17:47:27 crc kubenswrapper[4558]: I0120 17:47:27.287656 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6a566db2-a941-48c9-9169-8e2c16cda1ac-combined-ca-bundle\") pod \"neutron-78bcff9576-76n9h\" (UID: \"6a566db2-a941-48c9-9169-8e2c16cda1ac\") " pod="openstack-kuttl-tests/neutron-78bcff9576-76n9h" Jan 20 17:47:27 crc kubenswrapper[4558]: I0120 17:47:27.287741 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/6a566db2-a941-48c9-9169-8e2c16cda1ac-httpd-config\") pod \"neutron-78bcff9576-76n9h\" (UID: \"6a566db2-a941-48c9-9169-8e2c16cda1ac\") " pod="openstack-kuttl-tests/neutron-78bcff9576-76n9h" Jan 20 17:47:27 crc kubenswrapper[4558]: I0120 17:47:27.288023 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6a566db2-a941-48c9-9169-8e2c16cda1ac-internal-tls-certs\") pod \"neutron-78bcff9576-76n9h\" (UID: \"6a566db2-a941-48c9-9169-8e2c16cda1ac\") " pod="openstack-kuttl-tests/neutron-78bcff9576-76n9h" Jan 20 17:47:27 crc kubenswrapper[4558]: I0120 17:47:27.383155 4558 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 20 17:47:27 crc kubenswrapper[4558]: I0120 17:47:27.383196 4558 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 20 17:47:27 crc kubenswrapper[4558]: I0120 17:47:27.384446 4558 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 20 17:47:27 crc kubenswrapper[4558]: I0120 17:47:27.384472 4558 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 20 17:47:27 crc kubenswrapper[4558]: I0120 17:47:27.384474 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"47de199d-0dde-4082-8b6c-99f3d202608b","Type":"ContainerStarted","Data":"1644d78283b3c3dad2b68cd9a4d16b7b8c04de0ba6bfcedb6b4c04ac6f6297de"} Jan 20 17:47:27 crc kubenswrapper[4558]: I0120 17:47:27.384850 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:47:27 crc kubenswrapper[4558]: I0120 17:47:27.389183 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/6a566db2-a941-48c9-9169-8e2c16cda1ac-config\") pod \"neutron-78bcff9576-76n9h\" (UID: \"6a566db2-a941-48c9-9169-8e2c16cda1ac\") " pod="openstack-kuttl-tests/neutron-78bcff9576-76n9h" Jan 20 17:47:27 crc kubenswrapper[4558]: I0120 17:47:27.389220 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/6a566db2-a941-48c9-9169-8e2c16cda1ac-ovndb-tls-certs\") pod \"neutron-78bcff9576-76n9h\" (UID: \"6a566db2-a941-48c9-9169-8e2c16cda1ac\") " pod="openstack-kuttl-tests/neutron-78bcff9576-76n9h" Jan 20 17:47:27 crc kubenswrapper[4558]: I0120 17:47:27.389245 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6a566db2-a941-48c9-9169-8e2c16cda1ac-combined-ca-bundle\") pod \"neutron-78bcff9576-76n9h\" (UID: \"6a566db2-a941-48c9-9169-8e2c16cda1ac\") " pod="openstack-kuttl-tests/neutron-78bcff9576-76n9h" Jan 20 17:47:27 crc kubenswrapper[4558]: I0120 17:47:27.389273 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/6a566db2-a941-48c9-9169-8e2c16cda1ac-httpd-config\") pod \"neutron-78bcff9576-76n9h\" (UID: \"6a566db2-a941-48c9-9169-8e2c16cda1ac\") " pod="openstack-kuttl-tests/neutron-78bcff9576-76n9h" Jan 20 17:47:27 crc kubenswrapper[4558]: I0120 17:47:27.389346 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6a566db2-a941-48c9-9169-8e2c16cda1ac-internal-tls-certs\") pod \"neutron-78bcff9576-76n9h\" (UID: \"6a566db2-a941-48c9-9169-8e2c16cda1ac\") " pod="openstack-kuttl-tests/neutron-78bcff9576-76n9h" Jan 20 17:47:27 crc kubenswrapper[4558]: I0120 17:47:27.389381 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6a566db2-a941-48c9-9169-8e2c16cda1ac-public-tls-certs\") pod \"neutron-78bcff9576-76n9h\" (UID: \"6a566db2-a941-48c9-9169-8e2c16cda1ac\") " pod="openstack-kuttl-tests/neutron-78bcff9576-76n9h" Jan 20 17:47:27 crc kubenswrapper[4558]: I0120 17:47:27.389432 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r8mm9\" (UniqueName: \"kubernetes.io/projected/6a566db2-a941-48c9-9169-8e2c16cda1ac-kube-api-access-r8mm9\") pod \"neutron-78bcff9576-76n9h\" (UID: \"6a566db2-a941-48c9-9169-8e2c16cda1ac\") " pod="openstack-kuttl-tests/neutron-78bcff9576-76n9h" Jan 20 17:47:27 crc kubenswrapper[4558]: I0120 17:47:27.393285 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:47:27 crc kubenswrapper[4558]: I0120 17:47:27.397904 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/6a566db2-a941-48c9-9169-8e2c16cda1ac-httpd-config\") pod \"neutron-78bcff9576-76n9h\" (UID: \"6a566db2-a941-48c9-9169-8e2c16cda1ac\") " pod="openstack-kuttl-tests/neutron-78bcff9576-76n9h" Jan 20 17:47:27 crc kubenswrapper[4558]: I0120 17:47:27.399672 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/6a566db2-a941-48c9-9169-8e2c16cda1ac-ovndb-tls-certs\") pod \"neutron-78bcff9576-76n9h\" (UID: \"6a566db2-a941-48c9-9169-8e2c16cda1ac\") " pod="openstack-kuttl-tests/neutron-78bcff9576-76n9h" Jan 20 17:47:27 crc kubenswrapper[4558]: I0120 17:47:27.401923 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6a566db2-a941-48c9-9169-8e2c16cda1ac-combined-ca-bundle\") pod \"neutron-78bcff9576-76n9h\" (UID: \"6a566db2-a941-48c9-9169-8e2c16cda1ac\") " pod="openstack-kuttl-tests/neutron-78bcff9576-76n9h" Jan 20 17:47:27 crc kubenswrapper[4558]: I0120 17:47:27.402084 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6a566db2-a941-48c9-9169-8e2c16cda1ac-public-tls-certs\") pod \"neutron-78bcff9576-76n9h\" (UID: \"6a566db2-a941-48c9-9169-8e2c16cda1ac\") " pod="openstack-kuttl-tests/neutron-78bcff9576-76n9h" Jan 20 17:47:27 crc kubenswrapper[4558]: I0120 17:47:27.407290 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/6a566db2-a941-48c9-9169-8e2c16cda1ac-config\") pod \"neutron-78bcff9576-76n9h\" (UID: \"6a566db2-a941-48c9-9169-8e2c16cda1ac\") " pod="openstack-kuttl-tests/neutron-78bcff9576-76n9h" Jan 20 17:47:27 crc kubenswrapper[4558]: I0120 17:47:27.414460 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r8mm9\" (UniqueName: \"kubernetes.io/projected/6a566db2-a941-48c9-9169-8e2c16cda1ac-kube-api-access-r8mm9\") pod \"neutron-78bcff9576-76n9h\" (UID: \"6a566db2-a941-48c9-9169-8e2c16cda1ac\") " pod="openstack-kuttl-tests/neutron-78bcff9576-76n9h" Jan 20 17:47:27 crc kubenswrapper[4558]: I0120 17:47:27.434386 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6a566db2-a941-48c9-9169-8e2c16cda1ac-internal-tls-certs\") pod \"neutron-78bcff9576-76n9h\" (UID: \"6a566db2-a941-48c9-9169-8e2c16cda1ac\") " pod="openstack-kuttl-tests/neutron-78bcff9576-76n9h" Jan 20 17:47:27 crc kubenswrapper[4558]: I0120 17:47:27.434404 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/cinder-api-0" podStartSLOduration=3.4343895460000002 podStartE2EDuration="3.434389546s" podCreationTimestamp="2026-01-20 17:47:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:47:27.406369859 +0000 UTC m=+3941.166707826" watchObservedRunningTime="2026-01-20 17:47:27.434389546 +0000 UTC m=+3941.194727514" Jan 20 17:47:27 crc kubenswrapper[4558]: I0120 17:47:27.470996 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-78bcff9576-76n9h" Jan 20 17:47:27 crc kubenswrapper[4558]: I0120 17:47:27.563658 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:47:27 crc kubenswrapper[4558]: I0120 17:47:27.568691 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:47:27 crc kubenswrapper[4558]: I0120 17:47:27.924309 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:47:28 crc kubenswrapper[4558]: I0120 17:47:28.014863 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-78bcff9576-76n9h"] Jan 20 17:47:28 crc kubenswrapper[4558]: W0120 17:47:28.018037 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6a566db2_a941_48c9_9169_8e2c16cda1ac.slice/crio-e9c9be4776e024a33d47c27e4ba8c11de91438fa621402fd89269283b84d4e30 WatchSource:0}: Error finding container e9c9be4776e024a33d47c27e4ba8c11de91438fa621402fd89269283b84d4e30: Status 404 returned error can't find the container with id e9c9be4776e024a33d47c27e4ba8c11de91438fa621402fd89269283b84d4e30 Jan 20 17:47:28 crc kubenswrapper[4558]: I0120 17:47:28.065577 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/barbican-api-58bb6f688-8dbng" Jan 20 17:47:28 crc kubenswrapper[4558]: I0120 17:47:28.181861 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/barbican-api-58bb6f688-8dbng" Jan 20 17:47:28 crc kubenswrapper[4558]: I0120 17:47:28.409695 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-78bcff9576-76n9h" event={"ID":"6a566db2-a941-48c9-9169-8e2c16cda1ac","Type":"ContainerStarted","Data":"fe78548aaf34b40e51653f6f6cc0aba6de558e32992d14ccf662ce044e1157f4"} Jan 20 17:47:28 crc kubenswrapper[4558]: I0120 17:47:28.409777 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-78bcff9576-76n9h" event={"ID":"6a566db2-a941-48c9-9169-8e2c16cda1ac","Type":"ContainerStarted","Data":"d6eeb7c903fe5ccb79f100c77cfb1bc792eacae8fb9dc8447344d2655524771b"} Jan 20 17:47:28 crc kubenswrapper[4558]: I0120 17:47:28.409806 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-78bcff9576-76n9h" event={"ID":"6a566db2-a941-48c9-9169-8e2c16cda1ac","Type":"ContainerStarted","Data":"e9c9be4776e024a33d47c27e4ba8c11de91438fa621402fd89269283b84d4e30"} Jan 20 17:47:28 crc kubenswrapper[4558]: I0120 17:47:28.459193 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/neutron-78bcff9576-76n9h" podStartSLOduration=1.459155473 podStartE2EDuration="1.459155473s" podCreationTimestamp="2026-01-20 17:47:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:47:28.435377381 +0000 UTC m=+3942.195715348" watchObservedRunningTime="2026-01-20 17:47:28.459155473 +0000 UTC m=+3942.219493440" Jan 20 17:47:28 crc kubenswrapper[4558]: I0120 17:47:28.809960 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:47:28 crc kubenswrapper[4558]: I0120 17:47:28.861660 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:47:29 crc kubenswrapper[4558]: I0120 17:47:29.418437 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/cinder-scheduler-0" podUID="85f34e25-fd3f-47dc-9e2e-4150fb9d7a4c" containerName="cinder-scheduler" containerID="cri-o://1a264c7c6924592f510f33ea71a48b399db20043fd3d4ea422a9b2423c9ba2a7" gracePeriod=30 Jan 20 17:47:29 crc kubenswrapper[4558]: I0120 17:47:29.418535 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/cinder-scheduler-0" podUID="85f34e25-fd3f-47dc-9e2e-4150fb9d7a4c" containerName="probe" containerID="cri-o://c0c05ae7cdd27acea0025a9d26ff2baa4b34e3e0ade141c45e726cf972b0eb47" gracePeriod=30 Jan 20 17:47:29 crc kubenswrapper[4558]: I0120 17:47:29.419684 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/neutron-78bcff9576-76n9h" Jan 20 17:47:29 crc kubenswrapper[4558]: I0120 17:47:29.777257 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/barbican-api-6fcd55fbd-x56xq"] Jan 20 17:47:29 crc kubenswrapper[4558]: I0120 17:47:29.778751 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-6fcd55fbd-x56xq" Jan 20 17:47:29 crc kubenswrapper[4558]: I0120 17:47:29.781523 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-barbican-public-svc" Jan 20 17:47:29 crc kubenswrapper[4558]: I0120 17:47:29.785737 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-barbican-internal-svc" Jan 20 17:47:29 crc kubenswrapper[4558]: I0120 17:47:29.802201 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-api-6fcd55fbd-x56xq"] Jan 20 17:47:29 crc kubenswrapper[4558]: I0120 17:47:29.862211 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4a97d228-d2ed-4b25-9746-7ee8083e562b-config-data\") pod \"barbican-api-6fcd55fbd-x56xq\" (UID: \"4a97d228-d2ed-4b25-9746-7ee8083e562b\") " pod="openstack-kuttl-tests/barbican-api-6fcd55fbd-x56xq" Jan 20 17:47:29 crc kubenswrapper[4558]: I0120 17:47:29.862342 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4a97d228-d2ed-4b25-9746-7ee8083e562b-config-data-custom\") pod \"barbican-api-6fcd55fbd-x56xq\" (UID: \"4a97d228-d2ed-4b25-9746-7ee8083e562b\") " pod="openstack-kuttl-tests/barbican-api-6fcd55fbd-x56xq" Jan 20 17:47:29 crc kubenswrapper[4558]: I0120 17:47:29.862416 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-95pff\" (UniqueName: \"kubernetes.io/projected/4a97d228-d2ed-4b25-9746-7ee8083e562b-kube-api-access-95pff\") pod \"barbican-api-6fcd55fbd-x56xq\" (UID: \"4a97d228-d2ed-4b25-9746-7ee8083e562b\") " pod="openstack-kuttl-tests/barbican-api-6fcd55fbd-x56xq" Jan 20 17:47:29 crc kubenswrapper[4558]: I0120 17:47:29.862443 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4a97d228-d2ed-4b25-9746-7ee8083e562b-public-tls-certs\") pod \"barbican-api-6fcd55fbd-x56xq\" (UID: \"4a97d228-d2ed-4b25-9746-7ee8083e562b\") " pod="openstack-kuttl-tests/barbican-api-6fcd55fbd-x56xq" Jan 20 17:47:29 crc kubenswrapper[4558]: I0120 17:47:29.862519 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4a97d228-d2ed-4b25-9746-7ee8083e562b-combined-ca-bundle\") pod \"barbican-api-6fcd55fbd-x56xq\" (UID: \"4a97d228-d2ed-4b25-9746-7ee8083e562b\") " pod="openstack-kuttl-tests/barbican-api-6fcd55fbd-x56xq" Jan 20 17:47:29 crc kubenswrapper[4558]: I0120 17:47:29.862574 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4a97d228-d2ed-4b25-9746-7ee8083e562b-logs\") pod \"barbican-api-6fcd55fbd-x56xq\" (UID: \"4a97d228-d2ed-4b25-9746-7ee8083e562b\") " pod="openstack-kuttl-tests/barbican-api-6fcd55fbd-x56xq" Jan 20 17:47:29 crc kubenswrapper[4558]: I0120 17:47:29.862613 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4a97d228-d2ed-4b25-9746-7ee8083e562b-internal-tls-certs\") pod \"barbican-api-6fcd55fbd-x56xq\" (UID: \"4a97d228-d2ed-4b25-9746-7ee8083e562b\") " pod="openstack-kuttl-tests/barbican-api-6fcd55fbd-x56xq" Jan 20 17:47:29 crc kubenswrapper[4558]: I0120 17:47:29.965333 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4a97d228-d2ed-4b25-9746-7ee8083e562b-combined-ca-bundle\") pod \"barbican-api-6fcd55fbd-x56xq\" (UID: \"4a97d228-d2ed-4b25-9746-7ee8083e562b\") " pod="openstack-kuttl-tests/barbican-api-6fcd55fbd-x56xq" Jan 20 17:47:29 crc kubenswrapper[4558]: I0120 17:47:29.965389 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4a97d228-d2ed-4b25-9746-7ee8083e562b-logs\") pod \"barbican-api-6fcd55fbd-x56xq\" (UID: \"4a97d228-d2ed-4b25-9746-7ee8083e562b\") " pod="openstack-kuttl-tests/barbican-api-6fcd55fbd-x56xq" Jan 20 17:47:29 crc kubenswrapper[4558]: I0120 17:47:29.965413 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4a97d228-d2ed-4b25-9746-7ee8083e562b-internal-tls-certs\") pod \"barbican-api-6fcd55fbd-x56xq\" (UID: \"4a97d228-d2ed-4b25-9746-7ee8083e562b\") " pod="openstack-kuttl-tests/barbican-api-6fcd55fbd-x56xq" Jan 20 17:47:29 crc kubenswrapper[4558]: I0120 17:47:29.965495 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4a97d228-d2ed-4b25-9746-7ee8083e562b-config-data\") pod \"barbican-api-6fcd55fbd-x56xq\" (UID: \"4a97d228-d2ed-4b25-9746-7ee8083e562b\") " pod="openstack-kuttl-tests/barbican-api-6fcd55fbd-x56xq" Jan 20 17:47:29 crc kubenswrapper[4558]: I0120 17:47:29.965548 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4a97d228-d2ed-4b25-9746-7ee8083e562b-config-data-custom\") pod \"barbican-api-6fcd55fbd-x56xq\" (UID: \"4a97d228-d2ed-4b25-9746-7ee8083e562b\") " pod="openstack-kuttl-tests/barbican-api-6fcd55fbd-x56xq" Jan 20 17:47:29 crc kubenswrapper[4558]: I0120 17:47:29.965583 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-95pff\" (UniqueName: \"kubernetes.io/projected/4a97d228-d2ed-4b25-9746-7ee8083e562b-kube-api-access-95pff\") pod \"barbican-api-6fcd55fbd-x56xq\" (UID: \"4a97d228-d2ed-4b25-9746-7ee8083e562b\") " pod="openstack-kuttl-tests/barbican-api-6fcd55fbd-x56xq" Jan 20 17:47:29 crc kubenswrapper[4558]: I0120 17:47:29.965601 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4a97d228-d2ed-4b25-9746-7ee8083e562b-public-tls-certs\") pod \"barbican-api-6fcd55fbd-x56xq\" (UID: \"4a97d228-d2ed-4b25-9746-7ee8083e562b\") " pod="openstack-kuttl-tests/barbican-api-6fcd55fbd-x56xq" Jan 20 17:47:29 crc kubenswrapper[4558]: I0120 17:47:29.966155 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4a97d228-d2ed-4b25-9746-7ee8083e562b-logs\") pod \"barbican-api-6fcd55fbd-x56xq\" (UID: \"4a97d228-d2ed-4b25-9746-7ee8083e562b\") " pod="openstack-kuttl-tests/barbican-api-6fcd55fbd-x56xq" Jan 20 17:47:29 crc kubenswrapper[4558]: I0120 17:47:29.972498 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4a97d228-d2ed-4b25-9746-7ee8083e562b-internal-tls-certs\") pod \"barbican-api-6fcd55fbd-x56xq\" (UID: \"4a97d228-d2ed-4b25-9746-7ee8083e562b\") " pod="openstack-kuttl-tests/barbican-api-6fcd55fbd-x56xq" Jan 20 17:47:29 crc kubenswrapper[4558]: I0120 17:47:29.973522 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4a97d228-d2ed-4b25-9746-7ee8083e562b-config-data-custom\") pod \"barbican-api-6fcd55fbd-x56xq\" (UID: \"4a97d228-d2ed-4b25-9746-7ee8083e562b\") " pod="openstack-kuttl-tests/barbican-api-6fcd55fbd-x56xq" Jan 20 17:47:29 crc kubenswrapper[4558]: I0120 17:47:29.974888 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4a97d228-d2ed-4b25-9746-7ee8083e562b-config-data\") pod \"barbican-api-6fcd55fbd-x56xq\" (UID: \"4a97d228-d2ed-4b25-9746-7ee8083e562b\") " pod="openstack-kuttl-tests/barbican-api-6fcd55fbd-x56xq" Jan 20 17:47:29 crc kubenswrapper[4558]: I0120 17:47:29.982467 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4a97d228-d2ed-4b25-9746-7ee8083e562b-public-tls-certs\") pod \"barbican-api-6fcd55fbd-x56xq\" (UID: \"4a97d228-d2ed-4b25-9746-7ee8083e562b\") " pod="openstack-kuttl-tests/barbican-api-6fcd55fbd-x56xq" Jan 20 17:47:29 crc kubenswrapper[4558]: I0120 17:47:29.982525 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4a97d228-d2ed-4b25-9746-7ee8083e562b-combined-ca-bundle\") pod \"barbican-api-6fcd55fbd-x56xq\" (UID: \"4a97d228-d2ed-4b25-9746-7ee8083e562b\") " pod="openstack-kuttl-tests/barbican-api-6fcd55fbd-x56xq" Jan 20 17:47:29 crc kubenswrapper[4558]: I0120 17:47:29.983275 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-95pff\" (UniqueName: \"kubernetes.io/projected/4a97d228-d2ed-4b25-9746-7ee8083e562b-kube-api-access-95pff\") pod \"barbican-api-6fcd55fbd-x56xq\" (UID: \"4a97d228-d2ed-4b25-9746-7ee8083e562b\") " pod="openstack-kuttl-tests/barbican-api-6fcd55fbd-x56xq" Jan 20 17:47:30 crc kubenswrapper[4558]: I0120 17:47:30.096380 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-6fcd55fbd-x56xq" Jan 20 17:47:30 crc kubenswrapper[4558]: I0120 17:47:30.430612 4558 generic.go:334] "Generic (PLEG): container finished" podID="85f34e25-fd3f-47dc-9e2e-4150fb9d7a4c" containerID="c0c05ae7cdd27acea0025a9d26ff2baa4b34e3e0ade141c45e726cf972b0eb47" exitCode=0 Jan 20 17:47:30 crc kubenswrapper[4558]: I0120 17:47:30.430647 4558 generic.go:334] "Generic (PLEG): container finished" podID="85f34e25-fd3f-47dc-9e2e-4150fb9d7a4c" containerID="1a264c7c6924592f510f33ea71a48b399db20043fd3d4ea422a9b2423c9ba2a7" exitCode=0 Jan 20 17:47:30 crc kubenswrapper[4558]: I0120 17:47:30.430680 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"85f34e25-fd3f-47dc-9e2e-4150fb9d7a4c","Type":"ContainerDied","Data":"c0c05ae7cdd27acea0025a9d26ff2baa4b34e3e0ade141c45e726cf972b0eb47"} Jan 20 17:47:30 crc kubenswrapper[4558]: I0120 17:47:30.430737 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"85f34e25-fd3f-47dc-9e2e-4150fb9d7a4c","Type":"ContainerDied","Data":"1a264c7c6924592f510f33ea71a48b399db20043fd3d4ea422a9b2423c9ba2a7"} Jan 20 17:47:30 crc kubenswrapper[4558]: I0120 17:47:30.503562 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-api-6fcd55fbd-x56xq"] Jan 20 17:47:30 crc kubenswrapper[4558]: I0120 17:47:30.655493 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:47:30 crc kubenswrapper[4558]: I0120 17:47:30.798604 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/85f34e25-fd3f-47dc-9e2e-4150fb9d7a4c-config-data\") pod \"85f34e25-fd3f-47dc-9e2e-4150fb9d7a4c\" (UID: \"85f34e25-fd3f-47dc-9e2e-4150fb9d7a4c\") " Jan 20 17:47:30 crc kubenswrapper[4558]: I0120 17:47:30.798673 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/85f34e25-fd3f-47dc-9e2e-4150fb9d7a4c-etc-machine-id\") pod \"85f34e25-fd3f-47dc-9e2e-4150fb9d7a4c\" (UID: \"85f34e25-fd3f-47dc-9e2e-4150fb9d7a4c\") " Jan 20 17:47:30 crc kubenswrapper[4558]: I0120 17:47:30.798800 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/85f34e25-fd3f-47dc-9e2e-4150fb9d7a4c-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "85f34e25-fd3f-47dc-9e2e-4150fb9d7a4c" (UID: "85f34e25-fd3f-47dc-9e2e-4150fb9d7a4c"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 17:47:30 crc kubenswrapper[4558]: I0120 17:47:30.798822 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/85f34e25-fd3f-47dc-9e2e-4150fb9d7a4c-scripts\") pod \"85f34e25-fd3f-47dc-9e2e-4150fb9d7a4c\" (UID: \"85f34e25-fd3f-47dc-9e2e-4150fb9d7a4c\") " Jan 20 17:47:30 crc kubenswrapper[4558]: I0120 17:47:30.798991 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/85f34e25-fd3f-47dc-9e2e-4150fb9d7a4c-config-data-custom\") pod \"85f34e25-fd3f-47dc-9e2e-4150fb9d7a4c\" (UID: \"85f34e25-fd3f-47dc-9e2e-4150fb9d7a4c\") " Jan 20 17:47:30 crc kubenswrapper[4558]: I0120 17:47:30.799029 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qjm8k\" (UniqueName: \"kubernetes.io/projected/85f34e25-fd3f-47dc-9e2e-4150fb9d7a4c-kube-api-access-qjm8k\") pod \"85f34e25-fd3f-47dc-9e2e-4150fb9d7a4c\" (UID: \"85f34e25-fd3f-47dc-9e2e-4150fb9d7a4c\") " Jan 20 17:47:30 crc kubenswrapper[4558]: I0120 17:47:30.799071 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/85f34e25-fd3f-47dc-9e2e-4150fb9d7a4c-combined-ca-bundle\") pod \"85f34e25-fd3f-47dc-9e2e-4150fb9d7a4c\" (UID: \"85f34e25-fd3f-47dc-9e2e-4150fb9d7a4c\") " Jan 20 17:47:30 crc kubenswrapper[4558]: I0120 17:47:30.799723 4558 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/85f34e25-fd3f-47dc-9e2e-4150fb9d7a4c-etc-machine-id\") on node \"crc\" DevicePath \"\"" Jan 20 17:47:30 crc kubenswrapper[4558]: I0120 17:47:30.803693 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/85f34e25-fd3f-47dc-9e2e-4150fb9d7a4c-scripts" (OuterVolumeSpecName: "scripts") pod "85f34e25-fd3f-47dc-9e2e-4150fb9d7a4c" (UID: "85f34e25-fd3f-47dc-9e2e-4150fb9d7a4c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:47:30 crc kubenswrapper[4558]: I0120 17:47:30.804139 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/85f34e25-fd3f-47dc-9e2e-4150fb9d7a4c-kube-api-access-qjm8k" (OuterVolumeSpecName: "kube-api-access-qjm8k") pod "85f34e25-fd3f-47dc-9e2e-4150fb9d7a4c" (UID: "85f34e25-fd3f-47dc-9e2e-4150fb9d7a4c"). InnerVolumeSpecName "kube-api-access-qjm8k". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:47:30 crc kubenswrapper[4558]: I0120 17:47:30.804234 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/85f34e25-fd3f-47dc-9e2e-4150fb9d7a4c-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "85f34e25-fd3f-47dc-9e2e-4150fb9d7a4c" (UID: "85f34e25-fd3f-47dc-9e2e-4150fb9d7a4c"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:47:30 crc kubenswrapper[4558]: I0120 17:47:30.850697 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/85f34e25-fd3f-47dc-9e2e-4150fb9d7a4c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "85f34e25-fd3f-47dc-9e2e-4150fb9d7a4c" (UID: "85f34e25-fd3f-47dc-9e2e-4150fb9d7a4c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:47:30 crc kubenswrapper[4558]: I0120 17:47:30.877737 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/85f34e25-fd3f-47dc-9e2e-4150fb9d7a4c-config-data" (OuterVolumeSpecName: "config-data") pod "85f34e25-fd3f-47dc-9e2e-4150fb9d7a4c" (UID: "85f34e25-fd3f-47dc-9e2e-4150fb9d7a4c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:47:30 crc kubenswrapper[4558]: I0120 17:47:30.902138 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/85f34e25-fd3f-47dc-9e2e-4150fb9d7a4c-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:47:30 crc kubenswrapper[4558]: I0120 17:47:30.902194 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/85f34e25-fd3f-47dc-9e2e-4150fb9d7a4c-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:47:30 crc kubenswrapper[4558]: I0120 17:47:30.902212 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qjm8k\" (UniqueName: \"kubernetes.io/projected/85f34e25-fd3f-47dc-9e2e-4150fb9d7a4c-kube-api-access-qjm8k\") on node \"crc\" DevicePath \"\"" Jan 20 17:47:30 crc kubenswrapper[4558]: I0120 17:47:30.902225 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/85f34e25-fd3f-47dc-9e2e-4150fb9d7a4c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:47:30 crc kubenswrapper[4558]: I0120 17:47:30.902236 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/85f34e25-fd3f-47dc-9e2e-4150fb9d7a4c-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:47:31 crc kubenswrapper[4558]: I0120 17:47:31.444418 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"85f34e25-fd3f-47dc-9e2e-4150fb9d7a4c","Type":"ContainerDied","Data":"c1b021c25050d852d7956f674101429e63ad23933af560a0f81bfa7b4c096ffb"} Jan 20 17:47:31 crc kubenswrapper[4558]: I0120 17:47:31.444820 4558 scope.go:117] "RemoveContainer" containerID="c0c05ae7cdd27acea0025a9d26ff2baa4b34e3e0ade141c45e726cf972b0eb47" Jan 20 17:47:31 crc kubenswrapper[4558]: I0120 17:47:31.445067 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:47:31 crc kubenswrapper[4558]: I0120 17:47:31.459149 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-6fcd55fbd-x56xq" event={"ID":"4a97d228-d2ed-4b25-9746-7ee8083e562b","Type":"ContainerStarted","Data":"9eb510d65b859ba67d1fbebbd45639a22b783fc8a8eeb68ec8c537ed76f3ce64"} Jan 20 17:47:31 crc kubenswrapper[4558]: I0120 17:47:31.459217 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-6fcd55fbd-x56xq" event={"ID":"4a97d228-d2ed-4b25-9746-7ee8083e562b","Type":"ContainerStarted","Data":"4cc83e2afecb8ccf476b628cbe76038deafd826319b077c37d311481bf29f6f1"} Jan 20 17:47:31 crc kubenswrapper[4558]: I0120 17:47:31.459232 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-6fcd55fbd-x56xq" event={"ID":"4a97d228-d2ed-4b25-9746-7ee8083e562b","Type":"ContainerStarted","Data":"6040e54c3233b1d6c02cf5d17217893873b92cc727cdc887070c0d52ea5a930c"} Jan 20 17:47:31 crc kubenswrapper[4558]: I0120 17:47:31.460215 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/barbican-api-6fcd55fbd-x56xq" Jan 20 17:47:31 crc kubenswrapper[4558]: I0120 17:47:31.460255 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/barbican-api-6fcd55fbd-x56xq" Jan 20 17:47:31 crc kubenswrapper[4558]: I0120 17:47:31.500328 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/barbican-api-6fcd55fbd-x56xq" podStartSLOduration=2.5003060919999998 podStartE2EDuration="2.500306092s" podCreationTimestamp="2026-01-20 17:47:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:47:31.488134504 +0000 UTC m=+3945.248472471" watchObservedRunningTime="2026-01-20 17:47:31.500306092 +0000 UTC m=+3945.260644049" Jan 20 17:47:31 crc kubenswrapper[4558]: I0120 17:47:31.500507 4558 scope.go:117] "RemoveContainer" containerID="1a264c7c6924592f510f33ea71a48b399db20043fd3d4ea422a9b2423c9ba2a7" Jan 20 17:47:31 crc kubenswrapper[4558]: I0120 17:47:31.518569 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:47:31 crc kubenswrapper[4558]: I0120 17:47:31.529660 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:47:31 crc kubenswrapper[4558]: I0120 17:47:31.540832 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:47:31 crc kubenswrapper[4558]: E0120 17:47:31.541320 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="85f34e25-fd3f-47dc-9e2e-4150fb9d7a4c" containerName="cinder-scheduler" Jan 20 17:47:31 crc kubenswrapper[4558]: I0120 17:47:31.541340 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="85f34e25-fd3f-47dc-9e2e-4150fb9d7a4c" containerName="cinder-scheduler" Jan 20 17:47:31 crc kubenswrapper[4558]: E0120 17:47:31.541359 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="85f34e25-fd3f-47dc-9e2e-4150fb9d7a4c" containerName="probe" Jan 20 17:47:31 crc kubenswrapper[4558]: I0120 17:47:31.541366 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="85f34e25-fd3f-47dc-9e2e-4150fb9d7a4c" containerName="probe" Jan 20 17:47:31 crc kubenswrapper[4558]: I0120 17:47:31.541590 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="85f34e25-fd3f-47dc-9e2e-4150fb9d7a4c" containerName="probe" Jan 20 17:47:31 crc kubenswrapper[4558]: I0120 17:47:31.541616 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="85f34e25-fd3f-47dc-9e2e-4150fb9d7a4c" containerName="cinder-scheduler" Jan 20 17:47:31 crc kubenswrapper[4558]: I0120 17:47:31.542692 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:47:31 crc kubenswrapper[4558]: I0120 17:47:31.545655 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cinder-scheduler-config-data" Jan 20 17:47:31 crc kubenswrapper[4558]: I0120 17:47:31.568232 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:47:31 crc kubenswrapper[4558]: I0120 17:47:31.721861 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d0f82b75-bed6-4779-ad55-6bec50d7faa6-config-data\") pod \"cinder-scheduler-0\" (UID: \"d0f82b75-bed6-4779-ad55-6bec50d7faa6\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:47:31 crc kubenswrapper[4558]: I0120 17:47:31.722260 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d0f82b75-bed6-4779-ad55-6bec50d7faa6-scripts\") pod \"cinder-scheduler-0\" (UID: \"d0f82b75-bed6-4779-ad55-6bec50d7faa6\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:47:31 crc kubenswrapper[4558]: I0120 17:47:31.722304 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/d0f82b75-bed6-4779-ad55-6bec50d7faa6-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"d0f82b75-bed6-4779-ad55-6bec50d7faa6\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:47:31 crc kubenswrapper[4558]: I0120 17:47:31.722445 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d0f82b75-bed6-4779-ad55-6bec50d7faa6-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"d0f82b75-bed6-4779-ad55-6bec50d7faa6\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:47:31 crc kubenswrapper[4558]: I0120 17:47:31.722502 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lx6x9\" (UniqueName: \"kubernetes.io/projected/d0f82b75-bed6-4779-ad55-6bec50d7faa6-kube-api-access-lx6x9\") pod \"cinder-scheduler-0\" (UID: \"d0f82b75-bed6-4779-ad55-6bec50d7faa6\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:47:31 crc kubenswrapper[4558]: I0120 17:47:31.722538 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d0f82b75-bed6-4779-ad55-6bec50d7faa6-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"d0f82b75-bed6-4779-ad55-6bec50d7faa6\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:47:31 crc kubenswrapper[4558]: I0120 17:47:31.824158 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d0f82b75-bed6-4779-ad55-6bec50d7faa6-config-data\") pod \"cinder-scheduler-0\" (UID: \"d0f82b75-bed6-4779-ad55-6bec50d7faa6\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:47:31 crc kubenswrapper[4558]: I0120 17:47:31.824229 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d0f82b75-bed6-4779-ad55-6bec50d7faa6-scripts\") pod \"cinder-scheduler-0\" (UID: \"d0f82b75-bed6-4779-ad55-6bec50d7faa6\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:47:31 crc kubenswrapper[4558]: I0120 17:47:31.824256 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/d0f82b75-bed6-4779-ad55-6bec50d7faa6-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"d0f82b75-bed6-4779-ad55-6bec50d7faa6\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:47:31 crc kubenswrapper[4558]: I0120 17:47:31.824321 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d0f82b75-bed6-4779-ad55-6bec50d7faa6-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"d0f82b75-bed6-4779-ad55-6bec50d7faa6\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:47:31 crc kubenswrapper[4558]: I0120 17:47:31.824361 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lx6x9\" (UniqueName: \"kubernetes.io/projected/d0f82b75-bed6-4779-ad55-6bec50d7faa6-kube-api-access-lx6x9\") pod \"cinder-scheduler-0\" (UID: \"d0f82b75-bed6-4779-ad55-6bec50d7faa6\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:47:31 crc kubenswrapper[4558]: I0120 17:47:31.824384 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d0f82b75-bed6-4779-ad55-6bec50d7faa6-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"d0f82b75-bed6-4779-ad55-6bec50d7faa6\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:47:31 crc kubenswrapper[4558]: I0120 17:47:31.824476 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/d0f82b75-bed6-4779-ad55-6bec50d7faa6-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"d0f82b75-bed6-4779-ad55-6bec50d7faa6\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:47:31 crc kubenswrapper[4558]: I0120 17:47:31.831813 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d0f82b75-bed6-4779-ad55-6bec50d7faa6-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"d0f82b75-bed6-4779-ad55-6bec50d7faa6\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:47:31 crc kubenswrapper[4558]: I0120 17:47:31.832234 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d0f82b75-bed6-4779-ad55-6bec50d7faa6-scripts\") pod \"cinder-scheduler-0\" (UID: \"d0f82b75-bed6-4779-ad55-6bec50d7faa6\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:47:31 crc kubenswrapper[4558]: I0120 17:47:31.832330 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d0f82b75-bed6-4779-ad55-6bec50d7faa6-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"d0f82b75-bed6-4779-ad55-6bec50d7faa6\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:47:31 crc kubenswrapper[4558]: I0120 17:47:31.840989 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d0f82b75-bed6-4779-ad55-6bec50d7faa6-config-data\") pod \"cinder-scheduler-0\" (UID: \"d0f82b75-bed6-4779-ad55-6bec50d7faa6\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:47:31 crc kubenswrapper[4558]: I0120 17:47:31.849894 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lx6x9\" (UniqueName: \"kubernetes.io/projected/d0f82b75-bed6-4779-ad55-6bec50d7faa6-kube-api-access-lx6x9\") pod \"cinder-scheduler-0\" (UID: \"d0f82b75-bed6-4779-ad55-6bec50d7faa6\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:47:31 crc kubenswrapper[4558]: I0120 17:47:31.864446 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:47:32 crc kubenswrapper[4558]: I0120 17:47:32.255902 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:47:32 crc kubenswrapper[4558]: W0120 17:47:32.257665 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd0f82b75_bed6_4779_ad55_6bec50d7faa6.slice/crio-edfc1db1a714bc2dc0ed5e4ab536880a51509fd75e0ab53f9e4968659b41aa8b WatchSource:0}: Error finding container edfc1db1a714bc2dc0ed5e4ab536880a51509fd75e0ab53f9e4968659b41aa8b: Status 404 returned error can't find the container with id edfc1db1a714bc2dc0ed5e4ab536880a51509fd75e0ab53f9e4968659b41aa8b Jan 20 17:47:32 crc kubenswrapper[4558]: I0120 17:47:32.480206 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"d0f82b75-bed6-4779-ad55-6bec50d7faa6","Type":"ContainerStarted","Data":"edfc1db1a714bc2dc0ed5e4ab536880a51509fd75e0ab53f9e4968659b41aa8b"} Jan 20 17:47:32 crc kubenswrapper[4558]: I0120 17:47:32.585739 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="85f34e25-fd3f-47dc-9e2e-4150fb9d7a4c" path="/var/lib/kubelet/pods/85f34e25-fd3f-47dc-9e2e-4150fb9d7a4c/volumes" Jan 20 17:47:33 crc kubenswrapper[4558]: I0120 17:47:33.500615 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"d0f82b75-bed6-4779-ad55-6bec50d7faa6","Type":"ContainerStarted","Data":"7fb1cfce15f7b21b933180ce2945a9ba95c867c596b50488373fe0546cb8f714"} Jan 20 17:47:33 crc kubenswrapper[4558]: I0120 17:47:33.501007 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"d0f82b75-bed6-4779-ad55-6bec50d7faa6","Type":"ContainerStarted","Data":"a612c466a2d2fa8c608454134a7b85c4368226b9c8b498cce072561130074559"} Jan 20 17:47:33 crc kubenswrapper[4558]: I0120 17:47:33.523249 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/cinder-scheduler-0" podStartSLOduration=2.523232956 podStartE2EDuration="2.523232956s" podCreationTimestamp="2026-01-20 17:47:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:47:33.517637475 +0000 UTC m=+3947.277975441" watchObservedRunningTime="2026-01-20 17:47:33.523232956 +0000 UTC m=+3947.283570922" Jan 20 17:47:36 crc kubenswrapper[4558]: I0120 17:47:36.325740 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:47:36 crc kubenswrapper[4558]: I0120 17:47:36.468208 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/barbican-api-6fcd55fbd-x56xq" Jan 20 17:47:36 crc kubenswrapper[4558]: I0120 17:47:36.502351 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/barbican-api-6fcd55fbd-x56xq" Jan 20 17:47:36 crc kubenswrapper[4558]: I0120 17:47:36.558663 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-api-58bb6f688-8dbng"] Jan 20 17:47:36 crc kubenswrapper[4558]: I0120 17:47:36.562982 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-api-58bb6f688-8dbng" podUID="ec317104-430d-4590-85a9-257448df4212" containerName="barbican-api-log" containerID="cri-o://4a6b4390944378517a1bce21b451f353d7142299513dc3c272bafd80bf42db8a" gracePeriod=30 Jan 20 17:47:36 crc kubenswrapper[4558]: I0120 17:47:36.563461 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-api-58bb6f688-8dbng" podUID="ec317104-430d-4590-85a9-257448df4212" containerName="barbican-api" containerID="cri-o://1fd8e500782e2308db201caa8beedba31be54584b72f7b311e8a9a029cb348ef" gracePeriod=30 Jan 20 17:47:36 crc kubenswrapper[4558]: I0120 17:47:36.865784 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:47:37 crc kubenswrapper[4558]: I0120 17:47:37.537033 4558 generic.go:334] "Generic (PLEG): container finished" podID="ec317104-430d-4590-85a9-257448df4212" containerID="4a6b4390944378517a1bce21b451f353d7142299513dc3c272bafd80bf42db8a" exitCode=143 Jan 20 17:47:37 crc kubenswrapper[4558]: I0120 17:47:37.537195 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-58bb6f688-8dbng" event={"ID":"ec317104-430d-4590-85a9-257448df4212","Type":"ContainerDied","Data":"4a6b4390944378517a1bce21b451f353d7142299513dc3c272bafd80bf42db8a"} Jan 20 17:47:40 crc kubenswrapper[4558]: I0120 17:47:40.435244 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-58bb6f688-8dbng" Jan 20 17:47:40 crc kubenswrapper[4558]: I0120 17:47:40.508616 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec317104-430d-4590-85a9-257448df4212-config-data\") pod \"ec317104-430d-4590-85a9-257448df4212\" (UID: \"ec317104-430d-4590-85a9-257448df4212\") " Jan 20 17:47:40 crc kubenswrapper[4558]: I0120 17:47:40.508787 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec317104-430d-4590-85a9-257448df4212-combined-ca-bundle\") pod \"ec317104-430d-4590-85a9-257448df4212\" (UID: \"ec317104-430d-4590-85a9-257448df4212\") " Jan 20 17:47:40 crc kubenswrapper[4558]: I0120 17:47:40.508848 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ec317104-430d-4590-85a9-257448df4212-logs\") pod \"ec317104-430d-4590-85a9-257448df4212\" (UID: \"ec317104-430d-4590-85a9-257448df4212\") " Jan 20 17:47:40 crc kubenswrapper[4558]: I0120 17:47:40.508977 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ec317104-430d-4590-85a9-257448df4212-config-data-custom\") pod \"ec317104-430d-4590-85a9-257448df4212\" (UID: \"ec317104-430d-4590-85a9-257448df4212\") " Jan 20 17:47:40 crc kubenswrapper[4558]: I0120 17:47:40.509009 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rlc2z\" (UniqueName: \"kubernetes.io/projected/ec317104-430d-4590-85a9-257448df4212-kube-api-access-rlc2z\") pod \"ec317104-430d-4590-85a9-257448df4212\" (UID: \"ec317104-430d-4590-85a9-257448df4212\") " Jan 20 17:47:40 crc kubenswrapper[4558]: I0120 17:47:40.510078 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ec317104-430d-4590-85a9-257448df4212-logs" (OuterVolumeSpecName: "logs") pod "ec317104-430d-4590-85a9-257448df4212" (UID: "ec317104-430d-4590-85a9-257448df4212"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:47:40 crc kubenswrapper[4558]: I0120 17:47:40.514635 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ec317104-430d-4590-85a9-257448df4212-kube-api-access-rlc2z" (OuterVolumeSpecName: "kube-api-access-rlc2z") pod "ec317104-430d-4590-85a9-257448df4212" (UID: "ec317104-430d-4590-85a9-257448df4212"). InnerVolumeSpecName "kube-api-access-rlc2z". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:47:40 crc kubenswrapper[4558]: I0120 17:47:40.515053 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ec317104-430d-4590-85a9-257448df4212-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "ec317104-430d-4590-85a9-257448df4212" (UID: "ec317104-430d-4590-85a9-257448df4212"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:47:40 crc kubenswrapper[4558]: I0120 17:47:40.532650 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ec317104-430d-4590-85a9-257448df4212-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ec317104-430d-4590-85a9-257448df4212" (UID: "ec317104-430d-4590-85a9-257448df4212"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:47:40 crc kubenswrapper[4558]: I0120 17:47:40.549013 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ec317104-430d-4590-85a9-257448df4212-config-data" (OuterVolumeSpecName: "config-data") pod "ec317104-430d-4590-85a9-257448df4212" (UID: "ec317104-430d-4590-85a9-257448df4212"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:47:40 crc kubenswrapper[4558]: I0120 17:47:40.572636 4558 generic.go:334] "Generic (PLEG): container finished" podID="ec317104-430d-4590-85a9-257448df4212" containerID="1fd8e500782e2308db201caa8beedba31be54584b72f7b311e8a9a029cb348ef" exitCode=0 Jan 20 17:47:40 crc kubenswrapper[4558]: I0120 17:47:40.572744 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-58bb6f688-8dbng" Jan 20 17:47:40 crc kubenswrapper[4558]: I0120 17:47:40.593659 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-58bb6f688-8dbng" event={"ID":"ec317104-430d-4590-85a9-257448df4212","Type":"ContainerDied","Data":"1fd8e500782e2308db201caa8beedba31be54584b72f7b311e8a9a029cb348ef"} Jan 20 17:47:40 crc kubenswrapper[4558]: I0120 17:47:40.593713 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-58bb6f688-8dbng" event={"ID":"ec317104-430d-4590-85a9-257448df4212","Type":"ContainerDied","Data":"d68edacd8a4cd4a1fe2b831e5cb664a262df5824816ffbad65cdaeedba898224"} Jan 20 17:47:40 crc kubenswrapper[4558]: I0120 17:47:40.593735 4558 scope.go:117] "RemoveContainer" containerID="1fd8e500782e2308db201caa8beedba31be54584b72f7b311e8a9a029cb348ef" Jan 20 17:47:40 crc kubenswrapper[4558]: I0120 17:47:40.609452 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-api-58bb6f688-8dbng"] Jan 20 17:47:40 crc kubenswrapper[4558]: I0120 17:47:40.614128 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ec317104-430d-4590-85a9-257448df4212-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:47:40 crc kubenswrapper[4558]: I0120 17:47:40.614181 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ec317104-430d-4590-85a9-257448df4212-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:47:40 crc kubenswrapper[4558]: I0120 17:47:40.614197 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ec317104-430d-4590-85a9-257448df4212-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:47:40 crc kubenswrapper[4558]: I0120 17:47:40.614210 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rlc2z\" (UniqueName: \"kubernetes.io/projected/ec317104-430d-4590-85a9-257448df4212-kube-api-access-rlc2z\") on node \"crc\" DevicePath \"\"" Jan 20 17:47:40 crc kubenswrapper[4558]: I0120 17:47:40.614225 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec317104-430d-4590-85a9-257448df4212-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:47:40 crc kubenswrapper[4558]: I0120 17:47:40.619492 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/barbican-api-58bb6f688-8dbng"] Jan 20 17:47:40 crc kubenswrapper[4558]: I0120 17:47:40.634307 4558 scope.go:117] "RemoveContainer" containerID="4a6b4390944378517a1bce21b451f353d7142299513dc3c272bafd80bf42db8a" Jan 20 17:47:40 crc kubenswrapper[4558]: I0120 17:47:40.654817 4558 scope.go:117] "RemoveContainer" containerID="1fd8e500782e2308db201caa8beedba31be54584b72f7b311e8a9a029cb348ef" Jan 20 17:47:40 crc kubenswrapper[4558]: E0120 17:47:40.655341 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1fd8e500782e2308db201caa8beedba31be54584b72f7b311e8a9a029cb348ef\": container with ID starting with 1fd8e500782e2308db201caa8beedba31be54584b72f7b311e8a9a029cb348ef not found: ID does not exist" containerID="1fd8e500782e2308db201caa8beedba31be54584b72f7b311e8a9a029cb348ef" Jan 20 17:47:40 crc kubenswrapper[4558]: I0120 17:47:40.655378 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1fd8e500782e2308db201caa8beedba31be54584b72f7b311e8a9a029cb348ef"} err="failed to get container status \"1fd8e500782e2308db201caa8beedba31be54584b72f7b311e8a9a029cb348ef\": rpc error: code = NotFound desc = could not find container \"1fd8e500782e2308db201caa8beedba31be54584b72f7b311e8a9a029cb348ef\": container with ID starting with 1fd8e500782e2308db201caa8beedba31be54584b72f7b311e8a9a029cb348ef not found: ID does not exist" Jan 20 17:47:40 crc kubenswrapper[4558]: I0120 17:47:40.655398 4558 scope.go:117] "RemoveContainer" containerID="4a6b4390944378517a1bce21b451f353d7142299513dc3c272bafd80bf42db8a" Jan 20 17:47:40 crc kubenswrapper[4558]: E0120 17:47:40.655872 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4a6b4390944378517a1bce21b451f353d7142299513dc3c272bafd80bf42db8a\": container with ID starting with 4a6b4390944378517a1bce21b451f353d7142299513dc3c272bafd80bf42db8a not found: ID does not exist" containerID="4a6b4390944378517a1bce21b451f353d7142299513dc3c272bafd80bf42db8a" Jan 20 17:47:40 crc kubenswrapper[4558]: I0120 17:47:40.655912 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4a6b4390944378517a1bce21b451f353d7142299513dc3c272bafd80bf42db8a"} err="failed to get container status \"4a6b4390944378517a1bce21b451f353d7142299513dc3c272bafd80bf42db8a\": rpc error: code = NotFound desc = could not find container \"4a6b4390944378517a1bce21b451f353d7142299513dc3c272bafd80bf42db8a\": container with ID starting with 4a6b4390944378517a1bce21b451f353d7142299513dc3c272bafd80bf42db8a not found: ID does not exist" Jan 20 17:47:42 crc kubenswrapper[4558]: I0120 17:47:42.065055 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:47:42 crc kubenswrapper[4558]: I0120 17:47:42.577494 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ec317104-430d-4590-85a9-257448df4212" path="/var/lib/kubelet/pods/ec317104-430d-4590-85a9-257448df4212/volumes" Jan 20 17:47:47 crc kubenswrapper[4558]: I0120 17:47:47.717191 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/placement-7dc79cc978-clpbw" Jan 20 17:47:47 crc kubenswrapper[4558]: I0120 17:47:47.721525 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/placement-7dc79cc978-clpbw" Jan 20 17:47:48 crc kubenswrapper[4558]: I0120 17:47:48.445842 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/placement-59975f6898-zxn8m" Jan 20 17:47:48 crc kubenswrapper[4558]: I0120 17:47:48.448383 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/placement-59975f6898-zxn8m" Jan 20 17:47:48 crc kubenswrapper[4558]: I0120 17:47:48.517727 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/placement-7dc79cc978-clpbw"] Jan 20 17:47:48 crc kubenswrapper[4558]: I0120 17:47:48.684605 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/neutron-5696778669-nc8rp" Jan 20 17:47:49 crc kubenswrapper[4558]: I0120 17:47:49.106062 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:47:49 crc kubenswrapper[4558]: I0120 17:47:49.674022 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/placement-7dc79cc978-clpbw" podUID="76dc3695-3423-40fe-afcc-798d8a78c542" containerName="placement-log" containerID="cri-o://6368279e916b13bd72d42c5d932eb5a8ffcdded7a96ea231fd08ff982fc45343" gracePeriod=30 Jan 20 17:47:49 crc kubenswrapper[4558]: I0120 17:47:49.674102 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/placement-7dc79cc978-clpbw" podUID="76dc3695-3423-40fe-afcc-798d8a78c542" containerName="placement-api" containerID="cri-o://dc94f746e9dc1ee4a286f07430cbbe7edd91ff78805b40d08e514556cdc4d5e2" gracePeriod=30 Jan 20 17:47:50 crc kubenswrapper[4558]: I0120 17:47:50.687637 4558 generic.go:334] "Generic (PLEG): container finished" podID="76dc3695-3423-40fe-afcc-798d8a78c542" containerID="6368279e916b13bd72d42c5d932eb5a8ffcdded7a96ea231fd08ff982fc45343" exitCode=143 Jan 20 17:47:50 crc kubenswrapper[4558]: I0120 17:47:50.687691 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-7dc79cc978-clpbw" event={"ID":"76dc3695-3423-40fe-afcc-798d8a78c542","Type":"ContainerDied","Data":"6368279e916b13bd72d42c5d932eb5a8ffcdded7a96ea231fd08ff982fc45343"} Jan 20 17:47:53 crc kubenswrapper[4558]: I0120 17:47:53.207336 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-7dc79cc978-clpbw" Jan 20 17:47:53 crc kubenswrapper[4558]: I0120 17:47:53.261855 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xt9wp\" (UniqueName: \"kubernetes.io/projected/76dc3695-3423-40fe-afcc-798d8a78c542-kube-api-access-xt9wp\") pod \"76dc3695-3423-40fe-afcc-798d8a78c542\" (UID: \"76dc3695-3423-40fe-afcc-798d8a78c542\") " Jan 20 17:47:53 crc kubenswrapper[4558]: I0120 17:47:53.261910 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/76dc3695-3423-40fe-afcc-798d8a78c542-config-data\") pod \"76dc3695-3423-40fe-afcc-798d8a78c542\" (UID: \"76dc3695-3423-40fe-afcc-798d8a78c542\") " Jan 20 17:47:53 crc kubenswrapper[4558]: I0120 17:47:53.261952 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/76dc3695-3423-40fe-afcc-798d8a78c542-logs\") pod \"76dc3695-3423-40fe-afcc-798d8a78c542\" (UID: \"76dc3695-3423-40fe-afcc-798d8a78c542\") " Jan 20 17:47:53 crc kubenswrapper[4558]: I0120 17:47:53.262084 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/76dc3695-3423-40fe-afcc-798d8a78c542-scripts\") pod \"76dc3695-3423-40fe-afcc-798d8a78c542\" (UID: \"76dc3695-3423-40fe-afcc-798d8a78c542\") " Jan 20 17:47:53 crc kubenswrapper[4558]: I0120 17:47:53.262159 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/76dc3695-3423-40fe-afcc-798d8a78c542-combined-ca-bundle\") pod \"76dc3695-3423-40fe-afcc-798d8a78c542\" (UID: \"76dc3695-3423-40fe-afcc-798d8a78c542\") " Jan 20 17:47:53 crc kubenswrapper[4558]: I0120 17:47:53.262547 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/76dc3695-3423-40fe-afcc-798d8a78c542-logs" (OuterVolumeSpecName: "logs") pod "76dc3695-3423-40fe-afcc-798d8a78c542" (UID: "76dc3695-3423-40fe-afcc-798d8a78c542"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:47:53 crc kubenswrapper[4558]: I0120 17:47:53.262808 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/76dc3695-3423-40fe-afcc-798d8a78c542-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:47:53 crc kubenswrapper[4558]: I0120 17:47:53.267936 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/76dc3695-3423-40fe-afcc-798d8a78c542-scripts" (OuterVolumeSpecName: "scripts") pod "76dc3695-3423-40fe-afcc-798d8a78c542" (UID: "76dc3695-3423-40fe-afcc-798d8a78c542"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:47:53 crc kubenswrapper[4558]: I0120 17:47:53.267992 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/76dc3695-3423-40fe-afcc-798d8a78c542-kube-api-access-xt9wp" (OuterVolumeSpecName: "kube-api-access-xt9wp") pod "76dc3695-3423-40fe-afcc-798d8a78c542" (UID: "76dc3695-3423-40fe-afcc-798d8a78c542"). InnerVolumeSpecName "kube-api-access-xt9wp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:47:53 crc kubenswrapper[4558]: I0120 17:47:53.301879 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/76dc3695-3423-40fe-afcc-798d8a78c542-config-data" (OuterVolumeSpecName: "config-data") pod "76dc3695-3423-40fe-afcc-798d8a78c542" (UID: "76dc3695-3423-40fe-afcc-798d8a78c542"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:47:53 crc kubenswrapper[4558]: I0120 17:47:53.302060 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/76dc3695-3423-40fe-afcc-798d8a78c542-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "76dc3695-3423-40fe-afcc-798d8a78c542" (UID: "76dc3695-3423-40fe-afcc-798d8a78c542"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:47:53 crc kubenswrapper[4558]: I0120 17:47:53.365400 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/76dc3695-3423-40fe-afcc-798d8a78c542-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:47:53 crc kubenswrapper[4558]: I0120 17:47:53.365434 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/76dc3695-3423-40fe-afcc-798d8a78c542-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:47:53 crc kubenswrapper[4558]: I0120 17:47:53.365451 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xt9wp\" (UniqueName: \"kubernetes.io/projected/76dc3695-3423-40fe-afcc-798d8a78c542-kube-api-access-xt9wp\") on node \"crc\" DevicePath \"\"" Jan 20 17:47:53 crc kubenswrapper[4558]: I0120 17:47:53.365462 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/76dc3695-3423-40fe-afcc-798d8a78c542-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:47:53 crc kubenswrapper[4558]: I0120 17:47:53.731697 4558 generic.go:334] "Generic (PLEG): container finished" podID="76dc3695-3423-40fe-afcc-798d8a78c542" containerID="dc94f746e9dc1ee4a286f07430cbbe7edd91ff78805b40d08e514556cdc4d5e2" exitCode=0 Jan 20 17:47:53 crc kubenswrapper[4558]: I0120 17:47:53.731752 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-7dc79cc978-clpbw" event={"ID":"76dc3695-3423-40fe-afcc-798d8a78c542","Type":"ContainerDied","Data":"dc94f746e9dc1ee4a286f07430cbbe7edd91ff78805b40d08e514556cdc4d5e2"} Jan 20 17:47:53 crc kubenswrapper[4558]: I0120 17:47:53.731783 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-7dc79cc978-clpbw" Jan 20 17:47:53 crc kubenswrapper[4558]: I0120 17:47:53.731820 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-7dc79cc978-clpbw" event={"ID":"76dc3695-3423-40fe-afcc-798d8a78c542","Type":"ContainerDied","Data":"b542b9dce795f7b75d4ec149215a51a043aa7f7c1d433348c2c50561de7be860"} Jan 20 17:47:53 crc kubenswrapper[4558]: I0120 17:47:53.731876 4558 scope.go:117] "RemoveContainer" containerID="dc94f746e9dc1ee4a286f07430cbbe7edd91ff78805b40d08e514556cdc4d5e2" Jan 20 17:47:53 crc kubenswrapper[4558]: I0120 17:47:53.759046 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/placement-7dc79cc978-clpbw"] Jan 20 17:47:53 crc kubenswrapper[4558]: I0120 17:47:53.759560 4558 scope.go:117] "RemoveContainer" containerID="6368279e916b13bd72d42c5d932eb5a8ffcdded7a96ea231fd08ff982fc45343" Jan 20 17:47:53 crc kubenswrapper[4558]: I0120 17:47:53.766001 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/placement-7dc79cc978-clpbw"] Jan 20 17:47:53 crc kubenswrapper[4558]: I0120 17:47:53.777703 4558 scope.go:117] "RemoveContainer" containerID="dc94f746e9dc1ee4a286f07430cbbe7edd91ff78805b40d08e514556cdc4d5e2" Jan 20 17:47:53 crc kubenswrapper[4558]: E0120 17:47:53.778027 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dc94f746e9dc1ee4a286f07430cbbe7edd91ff78805b40d08e514556cdc4d5e2\": container with ID starting with dc94f746e9dc1ee4a286f07430cbbe7edd91ff78805b40d08e514556cdc4d5e2 not found: ID does not exist" containerID="dc94f746e9dc1ee4a286f07430cbbe7edd91ff78805b40d08e514556cdc4d5e2" Jan 20 17:47:53 crc kubenswrapper[4558]: I0120 17:47:53.778071 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dc94f746e9dc1ee4a286f07430cbbe7edd91ff78805b40d08e514556cdc4d5e2"} err="failed to get container status \"dc94f746e9dc1ee4a286f07430cbbe7edd91ff78805b40d08e514556cdc4d5e2\": rpc error: code = NotFound desc = could not find container \"dc94f746e9dc1ee4a286f07430cbbe7edd91ff78805b40d08e514556cdc4d5e2\": container with ID starting with dc94f746e9dc1ee4a286f07430cbbe7edd91ff78805b40d08e514556cdc4d5e2 not found: ID does not exist" Jan 20 17:47:53 crc kubenswrapper[4558]: I0120 17:47:53.778102 4558 scope.go:117] "RemoveContainer" containerID="6368279e916b13bd72d42c5d932eb5a8ffcdded7a96ea231fd08ff982fc45343" Jan 20 17:47:53 crc kubenswrapper[4558]: E0120 17:47:53.778447 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6368279e916b13bd72d42c5d932eb5a8ffcdded7a96ea231fd08ff982fc45343\": container with ID starting with 6368279e916b13bd72d42c5d932eb5a8ffcdded7a96ea231fd08ff982fc45343 not found: ID does not exist" containerID="6368279e916b13bd72d42c5d932eb5a8ffcdded7a96ea231fd08ff982fc45343" Jan 20 17:47:53 crc kubenswrapper[4558]: I0120 17:47:53.778480 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6368279e916b13bd72d42c5d932eb5a8ffcdded7a96ea231fd08ff982fc45343"} err="failed to get container status \"6368279e916b13bd72d42c5d932eb5a8ffcdded7a96ea231fd08ff982fc45343\": rpc error: code = NotFound desc = could not find container \"6368279e916b13bd72d42c5d932eb5a8ffcdded7a96ea231fd08ff982fc45343\": container with ID starting with 6368279e916b13bd72d42c5d932eb5a8ffcdded7a96ea231fd08ff982fc45343 not found: ID does not exist" Jan 20 17:47:54 crc kubenswrapper[4558]: I0120 17:47:54.577778 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="76dc3695-3423-40fe-afcc-798d8a78c542" path="/var/lib/kubelet/pods/76dc3695-3423-40fe-afcc-798d8a78c542/volumes" Jan 20 17:47:56 crc kubenswrapper[4558]: I0120 17:47:56.026880 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/keystone-7ff6469d47-k4mhb" Jan 20 17:47:57 crc kubenswrapper[4558]: I0120 17:47:57.329663 4558 patch_prober.go:28] interesting pod/machine-config-daemon-2vr4r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 20 17:47:57 crc kubenswrapper[4558]: I0120 17:47:57.330040 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 20 17:47:57 crc kubenswrapper[4558]: I0120 17:47:57.484706 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/neutron-78bcff9576-76n9h" Jan 20 17:47:57 crc kubenswrapper[4558]: I0120 17:47:57.548842 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-5696778669-nc8rp"] Jan 20 17:47:57 crc kubenswrapper[4558]: I0120 17:47:57.549178 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/neutron-5696778669-nc8rp" podUID="f0ce81a9-0d35-4813-b16a-74b5757bbffa" containerName="neutron-api" containerID="cri-o://586bbbff36fe57de7cd9f2ecae0bec8f27031f35667892a8eb2376c25bb37cdf" gracePeriod=30 Jan 20 17:47:57 crc kubenswrapper[4558]: I0120 17:47:57.549807 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/neutron-5696778669-nc8rp" podUID="f0ce81a9-0d35-4813-b16a-74b5757bbffa" containerName="neutron-httpd" containerID="cri-o://ce80cef01c83d809bb3070c10cc026abf5f54be5422d252f6f5f31c47b21a1bb" gracePeriod=30 Jan 20 17:47:57 crc kubenswrapper[4558]: I0120 17:47:57.777982 4558 generic.go:334] "Generic (PLEG): container finished" podID="f0ce81a9-0d35-4813-b16a-74b5757bbffa" containerID="ce80cef01c83d809bb3070c10cc026abf5f54be5422d252f6f5f31c47b21a1bb" exitCode=0 Jan 20 17:47:57 crc kubenswrapper[4558]: I0120 17:47:57.778031 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-5696778669-nc8rp" event={"ID":"f0ce81a9-0d35-4813-b16a-74b5757bbffa","Type":"ContainerDied","Data":"ce80cef01c83d809bb3070c10cc026abf5f54be5422d252f6f5f31c47b21a1bb"} Jan 20 17:47:59 crc kubenswrapper[4558]: I0120 17:47:59.459841 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/openstackclient"] Jan 20 17:47:59 crc kubenswrapper[4558]: E0120 17:47:59.461501 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ec317104-430d-4590-85a9-257448df4212" containerName="barbican-api-log" Jan 20 17:47:59 crc kubenswrapper[4558]: I0120 17:47:59.461536 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ec317104-430d-4590-85a9-257448df4212" containerName="barbican-api-log" Jan 20 17:47:59 crc kubenswrapper[4558]: E0120 17:47:59.461554 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="76dc3695-3423-40fe-afcc-798d8a78c542" containerName="placement-api" Jan 20 17:47:59 crc kubenswrapper[4558]: I0120 17:47:59.461563 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="76dc3695-3423-40fe-afcc-798d8a78c542" containerName="placement-api" Jan 20 17:47:59 crc kubenswrapper[4558]: E0120 17:47:59.461576 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ec317104-430d-4590-85a9-257448df4212" containerName="barbican-api" Jan 20 17:47:59 crc kubenswrapper[4558]: I0120 17:47:59.461582 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ec317104-430d-4590-85a9-257448df4212" containerName="barbican-api" Jan 20 17:47:59 crc kubenswrapper[4558]: E0120 17:47:59.461599 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="76dc3695-3423-40fe-afcc-798d8a78c542" containerName="placement-log" Jan 20 17:47:59 crc kubenswrapper[4558]: I0120 17:47:59.461605 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="76dc3695-3423-40fe-afcc-798d8a78c542" containerName="placement-log" Jan 20 17:47:59 crc kubenswrapper[4558]: I0120 17:47:59.461814 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="76dc3695-3423-40fe-afcc-798d8a78c542" containerName="placement-log" Jan 20 17:47:59 crc kubenswrapper[4558]: I0120 17:47:59.461832 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="ec317104-430d-4590-85a9-257448df4212" containerName="barbican-api" Jan 20 17:47:59 crc kubenswrapper[4558]: I0120 17:47:59.461840 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="76dc3695-3423-40fe-afcc-798d8a78c542" containerName="placement-api" Jan 20 17:47:59 crc kubenswrapper[4558]: I0120 17:47:59.461849 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="ec317104-430d-4590-85a9-257448df4212" containerName="barbican-api-log" Jan 20 17:47:59 crc kubenswrapper[4558]: I0120 17:47:59.462614 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstackclient" Jan 20 17:47:59 crc kubenswrapper[4558]: I0120 17:47:59.464473 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-config" Jan 20 17:47:59 crc kubenswrapper[4558]: I0120 17:47:59.464915 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"openstack-config-secret" Jan 20 17:47:59 crc kubenswrapper[4558]: I0120 17:47:59.465145 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"openstackclient-openstackclient-dockercfg-kb7bc" Jan 20 17:47:59 crc kubenswrapper[4558]: I0120 17:47:59.476097 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/openstackclient"] Jan 20 17:47:59 crc kubenswrapper[4558]: I0120 17:47:59.484050 4558 status_manager.go:875] "Failed to update status for pod" pod="openstack-kuttl-tests/openstackclient" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f2ebcc58-b9e1-48a9-97c1-ed86d2ec80d0\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T17:47:59Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T17:47:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T17:47:59Z\\\",\\\"message\\\":\\\"containers with unready status: [openstackclient]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-20T17:47:59Z\\\",\\\"message\\\":\\\"containers with unready status: [openstackclient]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/podified-antelope-centos9/openstack-openstackclient@sha256:2b4f8494513a3af102066fec5868ab167ac8664aceb2f0c639d7a0b60260a944\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"openstackclient\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/home/cloud-admin/.config/openstack/clouds.yaml\\\",\\\"name\\\":\\\"openstack-config\\\"},{\\\"mountPath\\\":\\\"/home/cloud-admin/.config/openstack/secure.yaml\\\",\\\"name\\\":\\\"openstack-config-secret\\\"},{\\\"mountPath\\\":\\\"/home/cloud-admin/cloudrc\\\",\\\"name\\\":\\\"openstack-config-secret\\\"},{\\\"mountPath\\\":\\\"/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem\\\",\\\"name\\\":\\\"combined-ca-bundle\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mg2f8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-20T17:47:59Z\\\"}}\" for pod \"openstack-kuttl-tests\"/\"openstackclient\": pods \"openstackclient\" not found" Jan 20 17:47:59 crc kubenswrapper[4558]: I0120 17:47:59.484894 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/openstackclient"] Jan 20 17:47:59 crc kubenswrapper[4558]: E0120 17:47:59.485938 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[combined-ca-bundle kube-api-access-mg2f8 openstack-config openstack-config-secret], unattached volumes=[], failed to process volumes=[combined-ca-bundle kube-api-access-mg2f8 openstack-config openstack-config-secret]: context canceled" pod="openstack-kuttl-tests/openstackclient" podUID="f2ebcc58-b9e1-48a9-97c1-ed86d2ec80d0" Jan 20 17:47:59 crc kubenswrapper[4558]: I0120 17:47:59.513602 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/openstackclient"] Jan 20 17:47:59 crc kubenswrapper[4558]: I0120 17:47:59.520225 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/openstackclient"] Jan 20 17:47:59 crc kubenswrapper[4558]: I0120 17:47:59.521567 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstackclient" Jan 20 17:47:59 crc kubenswrapper[4558]: I0120 17:47:59.541928 4558 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack-kuttl-tests/openstackclient" oldPodUID="f2ebcc58-b9e1-48a9-97c1-ed86d2ec80d0" podUID="a0ddca73-7f97-4ec0-9a04-4686f84eb2e6" Jan 20 17:47:59 crc kubenswrapper[4558]: I0120 17:47:59.542079 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/openstackclient"] Jan 20 17:47:59 crc kubenswrapper[4558]: I0120 17:47:59.588341 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/f2ebcc58-b9e1-48a9-97c1-ed86d2ec80d0-openstack-config-secret\") pod \"openstackclient\" (UID: \"f2ebcc58-b9e1-48a9-97c1-ed86d2ec80d0\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:47:59 crc kubenswrapper[4558]: I0120 17:47:59.588459 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mg2f8\" (UniqueName: \"kubernetes.io/projected/f2ebcc58-b9e1-48a9-97c1-ed86d2ec80d0-kube-api-access-mg2f8\") pod \"openstackclient\" (UID: \"f2ebcc58-b9e1-48a9-97c1-ed86d2ec80d0\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:47:59 crc kubenswrapper[4558]: I0120 17:47:59.588518 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2ebcc58-b9e1-48a9-97c1-ed86d2ec80d0-combined-ca-bundle\") pod \"openstackclient\" (UID: \"f2ebcc58-b9e1-48a9-97c1-ed86d2ec80d0\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:47:59 crc kubenswrapper[4558]: I0120 17:47:59.589864 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/f2ebcc58-b9e1-48a9-97c1-ed86d2ec80d0-openstack-config\") pod \"openstackclient\" (UID: \"f2ebcc58-b9e1-48a9-97c1-ed86d2ec80d0\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:47:59 crc kubenswrapper[4558]: I0120 17:47:59.691196 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/f2ebcc58-b9e1-48a9-97c1-ed86d2ec80d0-openstack-config\") pod \"openstackclient\" (UID: \"f2ebcc58-b9e1-48a9-97c1-ed86d2ec80d0\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:47:59 crc kubenswrapper[4558]: I0120 17:47:59.691282 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/f2ebcc58-b9e1-48a9-97c1-ed86d2ec80d0-openstack-config-secret\") pod \"openstackclient\" (UID: \"f2ebcc58-b9e1-48a9-97c1-ed86d2ec80d0\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:47:59 crc kubenswrapper[4558]: I0120 17:47:59.691329 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/a0ddca73-7f97-4ec0-9a04-4686f84eb2e6-openstack-config-secret\") pod \"openstackclient\" (UID: \"a0ddca73-7f97-4ec0-9a04-4686f84eb2e6\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:47:59 crc kubenswrapper[4558]: I0120 17:47:59.691368 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hpk48\" (UniqueName: \"kubernetes.io/projected/a0ddca73-7f97-4ec0-9a04-4686f84eb2e6-kube-api-access-hpk48\") pod \"openstackclient\" (UID: \"a0ddca73-7f97-4ec0-9a04-4686f84eb2e6\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:47:59 crc kubenswrapper[4558]: I0120 17:47:59.691417 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/a0ddca73-7f97-4ec0-9a04-4686f84eb2e6-openstack-config\") pod \"openstackclient\" (UID: \"a0ddca73-7f97-4ec0-9a04-4686f84eb2e6\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:47:59 crc kubenswrapper[4558]: I0120 17:47:59.691447 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mg2f8\" (UniqueName: \"kubernetes.io/projected/f2ebcc58-b9e1-48a9-97c1-ed86d2ec80d0-kube-api-access-mg2f8\") pod \"openstackclient\" (UID: \"f2ebcc58-b9e1-48a9-97c1-ed86d2ec80d0\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:47:59 crc kubenswrapper[4558]: I0120 17:47:59.691503 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0ddca73-7f97-4ec0-9a04-4686f84eb2e6-combined-ca-bundle\") pod \"openstackclient\" (UID: \"a0ddca73-7f97-4ec0-9a04-4686f84eb2e6\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:47:59 crc kubenswrapper[4558]: I0120 17:47:59.691535 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2ebcc58-b9e1-48a9-97c1-ed86d2ec80d0-combined-ca-bundle\") pod \"openstackclient\" (UID: \"f2ebcc58-b9e1-48a9-97c1-ed86d2ec80d0\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:47:59 crc kubenswrapper[4558]: I0120 17:47:59.692210 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/f2ebcc58-b9e1-48a9-97c1-ed86d2ec80d0-openstack-config\") pod \"openstackclient\" (UID: \"f2ebcc58-b9e1-48a9-97c1-ed86d2ec80d0\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:47:59 crc kubenswrapper[4558]: E0120 17:47:59.696488 4558 projected.go:194] Error preparing data for projected volume kube-api-access-mg2f8 for pod openstack-kuttl-tests/openstackclient: failed to fetch token: serviceaccounts "openstackclient-openstackclient" is forbidden: the UID in the bound object reference (f2ebcc58-b9e1-48a9-97c1-ed86d2ec80d0) does not match the UID in record. The object might have been deleted and then recreated Jan 20 17:47:59 crc kubenswrapper[4558]: E0120 17:47:59.696651 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/f2ebcc58-b9e1-48a9-97c1-ed86d2ec80d0-kube-api-access-mg2f8 podName:f2ebcc58-b9e1-48a9-97c1-ed86d2ec80d0 nodeName:}" failed. No retries permitted until 2026-01-20 17:48:00.196627337 +0000 UTC m=+3973.956965304 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-mg2f8" (UniqueName: "kubernetes.io/projected/f2ebcc58-b9e1-48a9-97c1-ed86d2ec80d0-kube-api-access-mg2f8") pod "openstackclient" (UID: "f2ebcc58-b9e1-48a9-97c1-ed86d2ec80d0") : failed to fetch token: serviceaccounts "openstackclient-openstackclient" is forbidden: the UID in the bound object reference (f2ebcc58-b9e1-48a9-97c1-ed86d2ec80d0) does not match the UID in record. The object might have been deleted and then recreated Jan 20 17:47:59 crc kubenswrapper[4558]: I0120 17:47:59.697265 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2ebcc58-b9e1-48a9-97c1-ed86d2ec80d0-combined-ca-bundle\") pod \"openstackclient\" (UID: \"f2ebcc58-b9e1-48a9-97c1-ed86d2ec80d0\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:47:59 crc kubenswrapper[4558]: I0120 17:47:59.697552 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/f2ebcc58-b9e1-48a9-97c1-ed86d2ec80d0-openstack-config-secret\") pod \"openstackclient\" (UID: \"f2ebcc58-b9e1-48a9-97c1-ed86d2ec80d0\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:47:59 crc kubenswrapper[4558]: I0120 17:47:59.793057 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/a0ddca73-7f97-4ec0-9a04-4686f84eb2e6-openstack-config-secret\") pod \"openstackclient\" (UID: \"a0ddca73-7f97-4ec0-9a04-4686f84eb2e6\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:47:59 crc kubenswrapper[4558]: I0120 17:47:59.793102 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hpk48\" (UniqueName: \"kubernetes.io/projected/a0ddca73-7f97-4ec0-9a04-4686f84eb2e6-kube-api-access-hpk48\") pod \"openstackclient\" (UID: \"a0ddca73-7f97-4ec0-9a04-4686f84eb2e6\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:47:59 crc kubenswrapper[4558]: I0120 17:47:59.793143 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/a0ddca73-7f97-4ec0-9a04-4686f84eb2e6-openstack-config\") pod \"openstackclient\" (UID: \"a0ddca73-7f97-4ec0-9a04-4686f84eb2e6\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:47:59 crc kubenswrapper[4558]: I0120 17:47:59.793206 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0ddca73-7f97-4ec0-9a04-4686f84eb2e6-combined-ca-bundle\") pod \"openstackclient\" (UID: \"a0ddca73-7f97-4ec0-9a04-4686f84eb2e6\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:47:59 crc kubenswrapper[4558]: I0120 17:47:59.794074 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/a0ddca73-7f97-4ec0-9a04-4686f84eb2e6-openstack-config\") pod \"openstackclient\" (UID: \"a0ddca73-7f97-4ec0-9a04-4686f84eb2e6\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:47:59 crc kubenswrapper[4558]: I0120 17:47:59.794473 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstackclient" Jan 20 17:47:59 crc kubenswrapper[4558]: I0120 17:47:59.797659 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0ddca73-7f97-4ec0-9a04-4686f84eb2e6-combined-ca-bundle\") pod \"openstackclient\" (UID: \"a0ddca73-7f97-4ec0-9a04-4686f84eb2e6\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:47:59 crc kubenswrapper[4558]: I0120 17:47:59.799306 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/a0ddca73-7f97-4ec0-9a04-4686f84eb2e6-openstack-config-secret\") pod \"openstackclient\" (UID: \"a0ddca73-7f97-4ec0-9a04-4686f84eb2e6\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:47:59 crc kubenswrapper[4558]: I0120 17:47:59.804718 4558 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack-kuttl-tests/openstackclient" oldPodUID="f2ebcc58-b9e1-48a9-97c1-ed86d2ec80d0" podUID="a0ddca73-7f97-4ec0-9a04-4686f84eb2e6" Jan 20 17:47:59 crc kubenswrapper[4558]: I0120 17:47:59.816340 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hpk48\" (UniqueName: \"kubernetes.io/projected/a0ddca73-7f97-4ec0-9a04-4686f84eb2e6-kube-api-access-hpk48\") pod \"openstackclient\" (UID: \"a0ddca73-7f97-4ec0-9a04-4686f84eb2e6\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:47:59 crc kubenswrapper[4558]: I0120 17:47:59.843502 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstackclient" Jan 20 17:47:59 crc kubenswrapper[4558]: I0120 17:47:59.868312 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstackclient" Jan 20 17:47:59 crc kubenswrapper[4558]: I0120 17:47:59.872470 4558 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack-kuttl-tests/openstackclient" oldPodUID="f2ebcc58-b9e1-48a9-97c1-ed86d2ec80d0" podUID="a0ddca73-7f97-4ec0-9a04-4686f84eb2e6" Jan 20 17:47:59 crc kubenswrapper[4558]: I0120 17:47:59.897872 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/f2ebcc58-b9e1-48a9-97c1-ed86d2ec80d0-openstack-config\") pod \"f2ebcc58-b9e1-48a9-97c1-ed86d2ec80d0\" (UID: \"f2ebcc58-b9e1-48a9-97c1-ed86d2ec80d0\") " Jan 20 17:47:59 crc kubenswrapper[4558]: I0120 17:47:59.898082 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2ebcc58-b9e1-48a9-97c1-ed86d2ec80d0-combined-ca-bundle\") pod \"f2ebcc58-b9e1-48a9-97c1-ed86d2ec80d0\" (UID: \"f2ebcc58-b9e1-48a9-97c1-ed86d2ec80d0\") " Jan 20 17:47:59 crc kubenswrapper[4558]: I0120 17:47:59.898349 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/f2ebcc58-b9e1-48a9-97c1-ed86d2ec80d0-openstack-config-secret\") pod \"f2ebcc58-b9e1-48a9-97c1-ed86d2ec80d0\" (UID: \"f2ebcc58-b9e1-48a9-97c1-ed86d2ec80d0\") " Jan 20 17:47:59 crc kubenswrapper[4558]: I0120 17:47:59.898574 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f2ebcc58-b9e1-48a9-97c1-ed86d2ec80d0-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "f2ebcc58-b9e1-48a9-97c1-ed86d2ec80d0" (UID: "f2ebcc58-b9e1-48a9-97c1-ed86d2ec80d0"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:47:59 crc kubenswrapper[4558]: I0120 17:47:59.899051 4558 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/f2ebcc58-b9e1-48a9-97c1-ed86d2ec80d0-openstack-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:47:59 crc kubenswrapper[4558]: I0120 17:47:59.899066 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg2f8\" (UniqueName: \"kubernetes.io/projected/f2ebcc58-b9e1-48a9-97c1-ed86d2ec80d0-kube-api-access-mg2f8\") on node \"crc\" DevicePath \"\"" Jan 20 17:47:59 crc kubenswrapper[4558]: I0120 17:47:59.901461 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f2ebcc58-b9e1-48a9-97c1-ed86d2ec80d0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f2ebcc58-b9e1-48a9-97c1-ed86d2ec80d0" (UID: "f2ebcc58-b9e1-48a9-97c1-ed86d2ec80d0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:47:59 crc kubenswrapper[4558]: I0120 17:47:59.913293 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f2ebcc58-b9e1-48a9-97c1-ed86d2ec80d0-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "f2ebcc58-b9e1-48a9-97c1-ed86d2ec80d0" (UID: "f2ebcc58-b9e1-48a9-97c1-ed86d2ec80d0"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:48:00 crc kubenswrapper[4558]: I0120 17:48:00.001374 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2ebcc58-b9e1-48a9-97c1-ed86d2ec80d0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:48:00 crc kubenswrapper[4558]: I0120 17:48:00.001431 4558 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/f2ebcc58-b9e1-48a9-97c1-ed86d2ec80d0-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Jan 20 17:48:00 crc kubenswrapper[4558]: I0120 17:48:00.353199 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/openstackclient"] Jan 20 17:48:00 crc kubenswrapper[4558]: I0120 17:48:00.580585 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f2ebcc58-b9e1-48a9-97c1-ed86d2ec80d0" path="/var/lib/kubelet/pods/f2ebcc58-b9e1-48a9-97c1-ed86d2ec80d0/volumes" Jan 20 17:48:00 crc kubenswrapper[4558]: I0120 17:48:00.806657 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstackclient" Jan 20 17:48:00 crc kubenswrapper[4558]: I0120 17:48:00.806658 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstackclient" event={"ID":"a0ddca73-7f97-4ec0-9a04-4686f84eb2e6","Type":"ContainerStarted","Data":"13f594567a5a674fbc4ad2781a948fc685f50566f533b0772fdb8535db472a29"} Jan 20 17:48:00 crc kubenswrapper[4558]: I0120 17:48:00.807003 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstackclient" event={"ID":"a0ddca73-7f97-4ec0-9a04-4686f84eb2e6","Type":"ContainerStarted","Data":"28e2fdbc7e2d77a562eea6cdf738c1eb030abfdfaa6070839f12eb6135f7d2ae"} Jan 20 17:48:00 crc kubenswrapper[4558]: I0120 17:48:00.812460 4558 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack-kuttl-tests/openstackclient" oldPodUID="f2ebcc58-b9e1-48a9-97c1-ed86d2ec80d0" podUID="a0ddca73-7f97-4ec0-9a04-4686f84eb2e6" Jan 20 17:48:00 crc kubenswrapper[4558]: I0120 17:48:00.847653 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/openstackclient" podStartSLOduration=1.8476306070000001 podStartE2EDuration="1.847630607s" podCreationTimestamp="2026-01-20 17:47:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:48:00.837866196 +0000 UTC m=+3974.598204162" watchObservedRunningTime="2026-01-20 17:48:00.847630607 +0000 UTC m=+3974.607968574" Jan 20 17:48:00 crc kubenswrapper[4558]: I0120 17:48:00.902387 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:48:00 crc kubenswrapper[4558]: I0120 17:48:00.902622 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="e45db124-f220-41eb-b1d0-d82affa8be89" containerName="ceilometer-central-agent" containerID="cri-o://49f438b00e8f6bc182801aa4facd16c61d68d52ed9eb66fae006e768b82b5e54" gracePeriod=30 Jan 20 17:48:00 crc kubenswrapper[4558]: I0120 17:48:00.903028 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="e45db124-f220-41eb-b1d0-d82affa8be89" containerName="proxy-httpd" containerID="cri-o://c119d9682719148604dfa2a2fefe97cd5eb3a8f7b6356747042f0cb8ea62b412" gracePeriod=30 Jan 20 17:48:00 crc kubenswrapper[4558]: I0120 17:48:00.903099 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="e45db124-f220-41eb-b1d0-d82affa8be89" containerName="sg-core" containerID="cri-o://87c9036ed520e0ac775579ea9684b4e9331125999144fd57f7db606b1d4cfeee" gracePeriod=30 Jan 20 17:48:00 crc kubenswrapper[4558]: I0120 17:48:00.903141 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="e45db124-f220-41eb-b1d0-d82affa8be89" containerName="ceilometer-notification-agent" containerID="cri-o://c3f0f48e19453567cc266442915bb53d5a8b0fd16b12f32fe41cf8a3cc87bb16" gracePeriod=30 Jan 20 17:48:01 crc kubenswrapper[4558]: I0120 17:48:01.821438 4558 generic.go:334] "Generic (PLEG): container finished" podID="e45db124-f220-41eb-b1d0-d82affa8be89" containerID="c119d9682719148604dfa2a2fefe97cd5eb3a8f7b6356747042f0cb8ea62b412" exitCode=0 Jan 20 17:48:01 crc kubenswrapper[4558]: I0120 17:48:01.821478 4558 generic.go:334] "Generic (PLEG): container finished" podID="e45db124-f220-41eb-b1d0-d82affa8be89" containerID="87c9036ed520e0ac775579ea9684b4e9331125999144fd57f7db606b1d4cfeee" exitCode=2 Jan 20 17:48:01 crc kubenswrapper[4558]: I0120 17:48:01.821488 4558 generic.go:334] "Generic (PLEG): container finished" podID="e45db124-f220-41eb-b1d0-d82affa8be89" containerID="49f438b00e8f6bc182801aa4facd16c61d68d52ed9eb66fae006e768b82b5e54" exitCode=0 Jan 20 17:48:01 crc kubenswrapper[4558]: I0120 17:48:01.822992 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"e45db124-f220-41eb-b1d0-d82affa8be89","Type":"ContainerDied","Data":"c119d9682719148604dfa2a2fefe97cd5eb3a8f7b6356747042f0cb8ea62b412"} Jan 20 17:48:01 crc kubenswrapper[4558]: I0120 17:48:01.823041 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"e45db124-f220-41eb-b1d0-d82affa8be89","Type":"ContainerDied","Data":"87c9036ed520e0ac775579ea9684b4e9331125999144fd57f7db606b1d4cfeee"} Jan 20 17:48:01 crc kubenswrapper[4558]: I0120 17:48:01.823055 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"e45db124-f220-41eb-b1d0-d82affa8be89","Type":"ContainerDied","Data":"49f438b00e8f6bc182801aa4facd16c61d68d52ed9eb66fae006e768b82b5e54"} Jan 20 17:48:02 crc kubenswrapper[4558]: I0120 17:48:02.424288 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/swift-proxy-67dccf5df6-qnbjc"] Jan 20 17:48:02 crc kubenswrapper[4558]: I0120 17:48:02.425743 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-proxy-67dccf5df6-qnbjc" Jan 20 17:48:02 crc kubenswrapper[4558]: I0120 17:48:02.435036 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-swift-internal-svc" Jan 20 17:48:02 crc kubenswrapper[4558]: I0120 17:48:02.435269 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"swift-proxy-config-data" Jan 20 17:48:02 crc kubenswrapper[4558]: I0120 17:48:02.435476 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-swift-public-svc" Jan 20 17:48:02 crc kubenswrapper[4558]: I0120 17:48:02.436217 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/swift-proxy-67dccf5df6-qnbjc"] Jan 20 17:48:02 crc kubenswrapper[4558]: I0120 17:48:02.453231 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6vbhv\" (UniqueName: \"kubernetes.io/projected/d824a74f-efe9-440c-8b03-f43303fb5923-kube-api-access-6vbhv\") pod \"swift-proxy-67dccf5df6-qnbjc\" (UID: \"d824a74f-efe9-440c-8b03-f43303fb5923\") " pod="openstack-kuttl-tests/swift-proxy-67dccf5df6-qnbjc" Jan 20 17:48:02 crc kubenswrapper[4558]: I0120 17:48:02.453409 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d824a74f-efe9-440c-8b03-f43303fb5923-run-httpd\") pod \"swift-proxy-67dccf5df6-qnbjc\" (UID: \"d824a74f-efe9-440c-8b03-f43303fb5923\") " pod="openstack-kuttl-tests/swift-proxy-67dccf5df6-qnbjc" Jan 20 17:48:02 crc kubenswrapper[4558]: I0120 17:48:02.453485 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/d824a74f-efe9-440c-8b03-f43303fb5923-etc-swift\") pod \"swift-proxy-67dccf5df6-qnbjc\" (UID: \"d824a74f-efe9-440c-8b03-f43303fb5923\") " pod="openstack-kuttl-tests/swift-proxy-67dccf5df6-qnbjc" Jan 20 17:48:02 crc kubenswrapper[4558]: I0120 17:48:02.453574 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d824a74f-efe9-440c-8b03-f43303fb5923-internal-tls-certs\") pod \"swift-proxy-67dccf5df6-qnbjc\" (UID: \"d824a74f-efe9-440c-8b03-f43303fb5923\") " pod="openstack-kuttl-tests/swift-proxy-67dccf5df6-qnbjc" Jan 20 17:48:02 crc kubenswrapper[4558]: I0120 17:48:02.453650 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d824a74f-efe9-440c-8b03-f43303fb5923-public-tls-certs\") pod \"swift-proxy-67dccf5df6-qnbjc\" (UID: \"d824a74f-efe9-440c-8b03-f43303fb5923\") " pod="openstack-kuttl-tests/swift-proxy-67dccf5df6-qnbjc" Jan 20 17:48:02 crc kubenswrapper[4558]: I0120 17:48:02.453720 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d824a74f-efe9-440c-8b03-f43303fb5923-combined-ca-bundle\") pod \"swift-proxy-67dccf5df6-qnbjc\" (UID: \"d824a74f-efe9-440c-8b03-f43303fb5923\") " pod="openstack-kuttl-tests/swift-proxy-67dccf5df6-qnbjc" Jan 20 17:48:02 crc kubenswrapper[4558]: I0120 17:48:02.453791 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d824a74f-efe9-440c-8b03-f43303fb5923-config-data\") pod \"swift-proxy-67dccf5df6-qnbjc\" (UID: \"d824a74f-efe9-440c-8b03-f43303fb5923\") " pod="openstack-kuttl-tests/swift-proxy-67dccf5df6-qnbjc" Jan 20 17:48:02 crc kubenswrapper[4558]: I0120 17:48:02.453872 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d824a74f-efe9-440c-8b03-f43303fb5923-log-httpd\") pod \"swift-proxy-67dccf5df6-qnbjc\" (UID: \"d824a74f-efe9-440c-8b03-f43303fb5923\") " pod="openstack-kuttl-tests/swift-proxy-67dccf5df6-qnbjc" Jan 20 17:48:02 crc kubenswrapper[4558]: I0120 17:48:02.555764 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6vbhv\" (UniqueName: \"kubernetes.io/projected/d824a74f-efe9-440c-8b03-f43303fb5923-kube-api-access-6vbhv\") pod \"swift-proxy-67dccf5df6-qnbjc\" (UID: \"d824a74f-efe9-440c-8b03-f43303fb5923\") " pod="openstack-kuttl-tests/swift-proxy-67dccf5df6-qnbjc" Jan 20 17:48:02 crc kubenswrapper[4558]: I0120 17:48:02.555863 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d824a74f-efe9-440c-8b03-f43303fb5923-run-httpd\") pod \"swift-proxy-67dccf5df6-qnbjc\" (UID: \"d824a74f-efe9-440c-8b03-f43303fb5923\") " pod="openstack-kuttl-tests/swift-proxy-67dccf5df6-qnbjc" Jan 20 17:48:02 crc kubenswrapper[4558]: I0120 17:48:02.555887 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/d824a74f-efe9-440c-8b03-f43303fb5923-etc-swift\") pod \"swift-proxy-67dccf5df6-qnbjc\" (UID: \"d824a74f-efe9-440c-8b03-f43303fb5923\") " pod="openstack-kuttl-tests/swift-proxy-67dccf5df6-qnbjc" Jan 20 17:48:02 crc kubenswrapper[4558]: I0120 17:48:02.555934 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d824a74f-efe9-440c-8b03-f43303fb5923-internal-tls-certs\") pod \"swift-proxy-67dccf5df6-qnbjc\" (UID: \"d824a74f-efe9-440c-8b03-f43303fb5923\") " pod="openstack-kuttl-tests/swift-proxy-67dccf5df6-qnbjc" Jan 20 17:48:02 crc kubenswrapper[4558]: I0120 17:48:02.555972 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d824a74f-efe9-440c-8b03-f43303fb5923-public-tls-certs\") pod \"swift-proxy-67dccf5df6-qnbjc\" (UID: \"d824a74f-efe9-440c-8b03-f43303fb5923\") " pod="openstack-kuttl-tests/swift-proxy-67dccf5df6-qnbjc" Jan 20 17:48:02 crc kubenswrapper[4558]: I0120 17:48:02.555998 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d824a74f-efe9-440c-8b03-f43303fb5923-combined-ca-bundle\") pod \"swift-proxy-67dccf5df6-qnbjc\" (UID: \"d824a74f-efe9-440c-8b03-f43303fb5923\") " pod="openstack-kuttl-tests/swift-proxy-67dccf5df6-qnbjc" Jan 20 17:48:02 crc kubenswrapper[4558]: I0120 17:48:02.556030 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d824a74f-efe9-440c-8b03-f43303fb5923-config-data\") pod \"swift-proxy-67dccf5df6-qnbjc\" (UID: \"d824a74f-efe9-440c-8b03-f43303fb5923\") " pod="openstack-kuttl-tests/swift-proxy-67dccf5df6-qnbjc" Jan 20 17:48:02 crc kubenswrapper[4558]: I0120 17:48:02.556076 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d824a74f-efe9-440c-8b03-f43303fb5923-log-httpd\") pod \"swift-proxy-67dccf5df6-qnbjc\" (UID: \"d824a74f-efe9-440c-8b03-f43303fb5923\") " pod="openstack-kuttl-tests/swift-proxy-67dccf5df6-qnbjc" Jan 20 17:48:02 crc kubenswrapper[4558]: I0120 17:48:02.556483 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d824a74f-efe9-440c-8b03-f43303fb5923-log-httpd\") pod \"swift-proxy-67dccf5df6-qnbjc\" (UID: \"d824a74f-efe9-440c-8b03-f43303fb5923\") " pod="openstack-kuttl-tests/swift-proxy-67dccf5df6-qnbjc" Jan 20 17:48:02 crc kubenswrapper[4558]: I0120 17:48:02.556517 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d824a74f-efe9-440c-8b03-f43303fb5923-run-httpd\") pod \"swift-proxy-67dccf5df6-qnbjc\" (UID: \"d824a74f-efe9-440c-8b03-f43303fb5923\") " pod="openstack-kuttl-tests/swift-proxy-67dccf5df6-qnbjc" Jan 20 17:48:02 crc kubenswrapper[4558]: I0120 17:48:02.562002 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d824a74f-efe9-440c-8b03-f43303fb5923-config-data\") pod \"swift-proxy-67dccf5df6-qnbjc\" (UID: \"d824a74f-efe9-440c-8b03-f43303fb5923\") " pod="openstack-kuttl-tests/swift-proxy-67dccf5df6-qnbjc" Jan 20 17:48:02 crc kubenswrapper[4558]: I0120 17:48:02.562371 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d824a74f-efe9-440c-8b03-f43303fb5923-combined-ca-bundle\") pod \"swift-proxy-67dccf5df6-qnbjc\" (UID: \"d824a74f-efe9-440c-8b03-f43303fb5923\") " pod="openstack-kuttl-tests/swift-proxy-67dccf5df6-qnbjc" Jan 20 17:48:02 crc kubenswrapper[4558]: I0120 17:48:02.563264 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d824a74f-efe9-440c-8b03-f43303fb5923-public-tls-certs\") pod \"swift-proxy-67dccf5df6-qnbjc\" (UID: \"d824a74f-efe9-440c-8b03-f43303fb5923\") " pod="openstack-kuttl-tests/swift-proxy-67dccf5df6-qnbjc" Jan 20 17:48:02 crc kubenswrapper[4558]: I0120 17:48:02.563708 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d824a74f-efe9-440c-8b03-f43303fb5923-internal-tls-certs\") pod \"swift-proxy-67dccf5df6-qnbjc\" (UID: \"d824a74f-efe9-440c-8b03-f43303fb5923\") " pod="openstack-kuttl-tests/swift-proxy-67dccf5df6-qnbjc" Jan 20 17:48:02 crc kubenswrapper[4558]: I0120 17:48:02.571599 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/d824a74f-efe9-440c-8b03-f43303fb5923-etc-swift\") pod \"swift-proxy-67dccf5df6-qnbjc\" (UID: \"d824a74f-efe9-440c-8b03-f43303fb5923\") " pod="openstack-kuttl-tests/swift-proxy-67dccf5df6-qnbjc" Jan 20 17:48:02 crc kubenswrapper[4558]: I0120 17:48:02.573110 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6vbhv\" (UniqueName: \"kubernetes.io/projected/d824a74f-efe9-440c-8b03-f43303fb5923-kube-api-access-6vbhv\") pod \"swift-proxy-67dccf5df6-qnbjc\" (UID: \"d824a74f-efe9-440c-8b03-f43303fb5923\") " pod="openstack-kuttl-tests/swift-proxy-67dccf5df6-qnbjc" Jan 20 17:48:02 crc kubenswrapper[4558]: I0120 17:48:02.754444 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-proxy-67dccf5df6-qnbjc" Jan 20 17:48:02 crc kubenswrapper[4558]: I0120 17:48:02.847148 4558 generic.go:334] "Generic (PLEG): container finished" podID="e45db124-f220-41eb-b1d0-d82affa8be89" containerID="c3f0f48e19453567cc266442915bb53d5a8b0fd16b12f32fe41cf8a3cc87bb16" exitCode=0 Jan 20 17:48:02 crc kubenswrapper[4558]: I0120 17:48:02.847236 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"e45db124-f220-41eb-b1d0-d82affa8be89","Type":"ContainerDied","Data":"c3f0f48e19453567cc266442915bb53d5a8b0fd16b12f32fe41cf8a3cc87bb16"} Jan 20 17:48:02 crc kubenswrapper[4558]: I0120 17:48:02.946155 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:48:02 crc kubenswrapper[4558]: I0120 17:48:02.968902 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e45db124-f220-41eb-b1d0-d82affa8be89-log-httpd\") pod \"e45db124-f220-41eb-b1d0-d82affa8be89\" (UID: \"e45db124-f220-41eb-b1d0-d82affa8be89\") " Jan 20 17:48:02 crc kubenswrapper[4558]: I0120 17:48:02.968965 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e45db124-f220-41eb-b1d0-d82affa8be89-sg-core-conf-yaml\") pod \"e45db124-f220-41eb-b1d0-d82affa8be89\" (UID: \"e45db124-f220-41eb-b1d0-d82affa8be89\") " Jan 20 17:48:02 crc kubenswrapper[4558]: I0120 17:48:02.968987 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e45db124-f220-41eb-b1d0-d82affa8be89-combined-ca-bundle\") pod \"e45db124-f220-41eb-b1d0-d82affa8be89\" (UID: \"e45db124-f220-41eb-b1d0-d82affa8be89\") " Jan 20 17:48:02 crc kubenswrapper[4558]: I0120 17:48:02.969010 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e45db124-f220-41eb-b1d0-d82affa8be89-scripts\") pod \"e45db124-f220-41eb-b1d0-d82affa8be89\" (UID: \"e45db124-f220-41eb-b1d0-d82affa8be89\") " Jan 20 17:48:02 crc kubenswrapper[4558]: I0120 17:48:02.969198 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e45db124-f220-41eb-b1d0-d82affa8be89-run-httpd\") pod \"e45db124-f220-41eb-b1d0-d82affa8be89\" (UID: \"e45db124-f220-41eb-b1d0-d82affa8be89\") " Jan 20 17:48:02 crc kubenswrapper[4558]: I0120 17:48:02.969305 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7sksf\" (UniqueName: \"kubernetes.io/projected/e45db124-f220-41eb-b1d0-d82affa8be89-kube-api-access-7sksf\") pod \"e45db124-f220-41eb-b1d0-d82affa8be89\" (UID: \"e45db124-f220-41eb-b1d0-d82affa8be89\") " Jan 20 17:48:02 crc kubenswrapper[4558]: I0120 17:48:02.969329 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e45db124-f220-41eb-b1d0-d82affa8be89-config-data\") pod \"e45db124-f220-41eb-b1d0-d82affa8be89\" (UID: \"e45db124-f220-41eb-b1d0-d82affa8be89\") " Jan 20 17:48:02 crc kubenswrapper[4558]: I0120 17:48:02.971150 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e45db124-f220-41eb-b1d0-d82affa8be89-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "e45db124-f220-41eb-b1d0-d82affa8be89" (UID: "e45db124-f220-41eb-b1d0-d82affa8be89"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:48:02 crc kubenswrapper[4558]: I0120 17:48:02.973638 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e45db124-f220-41eb-b1d0-d82affa8be89-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "e45db124-f220-41eb-b1d0-d82affa8be89" (UID: "e45db124-f220-41eb-b1d0-d82affa8be89"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:48:02 crc kubenswrapper[4558]: I0120 17:48:02.976344 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e45db124-f220-41eb-b1d0-d82affa8be89-kube-api-access-7sksf" (OuterVolumeSpecName: "kube-api-access-7sksf") pod "e45db124-f220-41eb-b1d0-d82affa8be89" (UID: "e45db124-f220-41eb-b1d0-d82affa8be89"). InnerVolumeSpecName "kube-api-access-7sksf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:48:02 crc kubenswrapper[4558]: I0120 17:48:02.976494 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e45db124-f220-41eb-b1d0-d82affa8be89-scripts" (OuterVolumeSpecName: "scripts") pod "e45db124-f220-41eb-b1d0-d82affa8be89" (UID: "e45db124-f220-41eb-b1d0-d82affa8be89"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:48:02 crc kubenswrapper[4558]: I0120 17:48:02.997440 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e45db124-f220-41eb-b1d0-d82affa8be89-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "e45db124-f220-41eb-b1d0-d82affa8be89" (UID: "e45db124-f220-41eb-b1d0-d82affa8be89"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:48:03 crc kubenswrapper[4558]: I0120 17:48:03.045633 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e45db124-f220-41eb-b1d0-d82affa8be89-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e45db124-f220-41eb-b1d0-d82affa8be89" (UID: "e45db124-f220-41eb-b1d0-d82affa8be89"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:48:03 crc kubenswrapper[4558]: I0120 17:48:03.055397 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e45db124-f220-41eb-b1d0-d82affa8be89-config-data" (OuterVolumeSpecName: "config-data") pod "e45db124-f220-41eb-b1d0-d82affa8be89" (UID: "e45db124-f220-41eb-b1d0-d82affa8be89"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:48:03 crc kubenswrapper[4558]: I0120 17:48:03.072137 4558 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e45db124-f220-41eb-b1d0-d82affa8be89-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:48:03 crc kubenswrapper[4558]: I0120 17:48:03.072179 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7sksf\" (UniqueName: \"kubernetes.io/projected/e45db124-f220-41eb-b1d0-d82affa8be89-kube-api-access-7sksf\") on node \"crc\" DevicePath \"\"" Jan 20 17:48:03 crc kubenswrapper[4558]: I0120 17:48:03.072192 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e45db124-f220-41eb-b1d0-d82affa8be89-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:48:03 crc kubenswrapper[4558]: I0120 17:48:03.072202 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e45db124-f220-41eb-b1d0-d82affa8be89-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:48:03 crc kubenswrapper[4558]: I0120 17:48:03.072210 4558 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e45db124-f220-41eb-b1d0-d82affa8be89-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:48:03 crc kubenswrapper[4558]: I0120 17:48:03.072219 4558 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e45db124-f220-41eb-b1d0-d82affa8be89-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 20 17:48:03 crc kubenswrapper[4558]: I0120 17:48:03.072227 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e45db124-f220-41eb-b1d0-d82affa8be89-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:48:03 crc kubenswrapper[4558]: I0120 17:48:03.221643 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/swift-proxy-67dccf5df6-qnbjc"] Jan 20 17:48:03 crc kubenswrapper[4558]: I0120 17:48:03.404439 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-5696778669-nc8rp" Jan 20 17:48:03 crc kubenswrapper[4558]: I0120 17:48:03.480235 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f0ce81a9-0d35-4813-b16a-74b5757bbffa-combined-ca-bundle\") pod \"f0ce81a9-0d35-4813-b16a-74b5757bbffa\" (UID: \"f0ce81a9-0d35-4813-b16a-74b5757bbffa\") " Jan 20 17:48:03 crc kubenswrapper[4558]: I0120 17:48:03.480381 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/f0ce81a9-0d35-4813-b16a-74b5757bbffa-config\") pod \"f0ce81a9-0d35-4813-b16a-74b5757bbffa\" (UID: \"f0ce81a9-0d35-4813-b16a-74b5757bbffa\") " Jan 20 17:48:03 crc kubenswrapper[4558]: I0120 17:48:03.480442 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/f0ce81a9-0d35-4813-b16a-74b5757bbffa-ovndb-tls-certs\") pod \"f0ce81a9-0d35-4813-b16a-74b5757bbffa\" (UID: \"f0ce81a9-0d35-4813-b16a-74b5757bbffa\") " Jan 20 17:48:03 crc kubenswrapper[4558]: I0120 17:48:03.480539 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6k5lb\" (UniqueName: \"kubernetes.io/projected/f0ce81a9-0d35-4813-b16a-74b5757bbffa-kube-api-access-6k5lb\") pod \"f0ce81a9-0d35-4813-b16a-74b5757bbffa\" (UID: \"f0ce81a9-0d35-4813-b16a-74b5757bbffa\") " Jan 20 17:48:03 crc kubenswrapper[4558]: I0120 17:48:03.480601 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/f0ce81a9-0d35-4813-b16a-74b5757bbffa-httpd-config\") pod \"f0ce81a9-0d35-4813-b16a-74b5757bbffa\" (UID: \"f0ce81a9-0d35-4813-b16a-74b5757bbffa\") " Jan 20 17:48:03 crc kubenswrapper[4558]: I0120 17:48:03.483971 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f0ce81a9-0d35-4813-b16a-74b5757bbffa-kube-api-access-6k5lb" (OuterVolumeSpecName: "kube-api-access-6k5lb") pod "f0ce81a9-0d35-4813-b16a-74b5757bbffa" (UID: "f0ce81a9-0d35-4813-b16a-74b5757bbffa"). InnerVolumeSpecName "kube-api-access-6k5lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:48:03 crc kubenswrapper[4558]: I0120 17:48:03.485677 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f0ce81a9-0d35-4813-b16a-74b5757bbffa-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "f0ce81a9-0d35-4813-b16a-74b5757bbffa" (UID: "f0ce81a9-0d35-4813-b16a-74b5757bbffa"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:48:03 crc kubenswrapper[4558]: I0120 17:48:03.533469 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f0ce81a9-0d35-4813-b16a-74b5757bbffa-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f0ce81a9-0d35-4813-b16a-74b5757bbffa" (UID: "f0ce81a9-0d35-4813-b16a-74b5757bbffa"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:48:03 crc kubenswrapper[4558]: I0120 17:48:03.536740 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f0ce81a9-0d35-4813-b16a-74b5757bbffa-config" (OuterVolumeSpecName: "config") pod "f0ce81a9-0d35-4813-b16a-74b5757bbffa" (UID: "f0ce81a9-0d35-4813-b16a-74b5757bbffa"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:48:03 crc kubenswrapper[4558]: I0120 17:48:03.547420 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f0ce81a9-0d35-4813-b16a-74b5757bbffa-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "f0ce81a9-0d35-4813-b16a-74b5757bbffa" (UID: "f0ce81a9-0d35-4813-b16a-74b5757bbffa"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:48:03 crc kubenswrapper[4558]: I0120 17:48:03.584743 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/f0ce81a9-0d35-4813-b16a-74b5757bbffa-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:48:03 crc kubenswrapper[4558]: I0120 17:48:03.584772 4558 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/f0ce81a9-0d35-4813-b16a-74b5757bbffa-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:48:03 crc kubenswrapper[4558]: I0120 17:48:03.584786 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6k5lb\" (UniqueName: \"kubernetes.io/projected/f0ce81a9-0d35-4813-b16a-74b5757bbffa-kube-api-access-6k5lb\") on node \"crc\" DevicePath \"\"" Jan 20 17:48:03 crc kubenswrapper[4558]: I0120 17:48:03.584797 4558 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/f0ce81a9-0d35-4813-b16a-74b5757bbffa-httpd-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:48:03 crc kubenswrapper[4558]: I0120 17:48:03.584809 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f0ce81a9-0d35-4813-b16a-74b5757bbffa-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:48:03 crc kubenswrapper[4558]: I0120 17:48:03.861174 4558 generic.go:334] "Generic (PLEG): container finished" podID="f0ce81a9-0d35-4813-b16a-74b5757bbffa" containerID="586bbbff36fe57de7cd9f2ecae0bec8f27031f35667892a8eb2376c25bb37cdf" exitCode=0 Jan 20 17:48:03 crc kubenswrapper[4558]: I0120 17:48:03.861217 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-5696778669-nc8rp" event={"ID":"f0ce81a9-0d35-4813-b16a-74b5757bbffa","Type":"ContainerDied","Data":"586bbbff36fe57de7cd9f2ecae0bec8f27031f35667892a8eb2376c25bb37cdf"} Jan 20 17:48:03 crc kubenswrapper[4558]: I0120 17:48:03.862529 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-5696778669-nc8rp" event={"ID":"f0ce81a9-0d35-4813-b16a-74b5757bbffa","Type":"ContainerDied","Data":"c2c7945d7cc7ff591f208d7328412753fdddc6fe05b44155564b515f6902a9e7"} Jan 20 17:48:03 crc kubenswrapper[4558]: I0120 17:48:03.861288 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-5696778669-nc8rp" Jan 20 17:48:03 crc kubenswrapper[4558]: I0120 17:48:03.862604 4558 scope.go:117] "RemoveContainer" containerID="ce80cef01c83d809bb3070c10cc026abf5f54be5422d252f6f5f31c47b21a1bb" Jan 20 17:48:03 crc kubenswrapper[4558]: I0120 17:48:03.866147 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-proxy-67dccf5df6-qnbjc" event={"ID":"d824a74f-efe9-440c-8b03-f43303fb5923","Type":"ContainerStarted","Data":"b7ac154d8dd8a84fc960440afd7af5e40789946075eb75c3e21c3d8f3ab318a4"} Jan 20 17:48:03 crc kubenswrapper[4558]: I0120 17:48:03.866257 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-proxy-67dccf5df6-qnbjc" event={"ID":"d824a74f-efe9-440c-8b03-f43303fb5923","Type":"ContainerStarted","Data":"29b09261b655df3694ff09435f242a8d50d59951e389e134a69944af2e32b744"} Jan 20 17:48:03 crc kubenswrapper[4558]: I0120 17:48:03.866275 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-proxy-67dccf5df6-qnbjc" event={"ID":"d824a74f-efe9-440c-8b03-f43303fb5923","Type":"ContainerStarted","Data":"d24aac2e275e31d037fd1782ec9d6823bbbc2085a7f02e62eb8fc6928ce4e3bf"} Jan 20 17:48:03 crc kubenswrapper[4558]: I0120 17:48:03.866436 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/swift-proxy-67dccf5df6-qnbjc" Jan 20 17:48:03 crc kubenswrapper[4558]: I0120 17:48:03.866517 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/swift-proxy-67dccf5df6-qnbjc" Jan 20 17:48:03 crc kubenswrapper[4558]: I0120 17:48:03.873655 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"e45db124-f220-41eb-b1d0-d82affa8be89","Type":"ContainerDied","Data":"4db4f17b363449cec174ff9cbaca83a847dd8377727ce3c4f4b2ca5af1c49fa6"} Jan 20 17:48:03 crc kubenswrapper[4558]: I0120 17:48:03.873730 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:48:03 crc kubenswrapper[4558]: I0120 17:48:03.903104 4558 scope.go:117] "RemoveContainer" containerID="586bbbff36fe57de7cd9f2ecae0bec8f27031f35667892a8eb2376c25bb37cdf" Jan 20 17:48:03 crc kubenswrapper[4558]: I0120 17:48:03.907419 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/swift-proxy-67dccf5df6-qnbjc" podStartSLOduration=1.907401418 podStartE2EDuration="1.907401418s" podCreationTimestamp="2026-01-20 17:48:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:48:03.892378801 +0000 UTC m=+3977.652716768" watchObservedRunningTime="2026-01-20 17:48:03.907401418 +0000 UTC m=+3977.667739386" Jan 20 17:48:03 crc kubenswrapper[4558]: I0120 17:48:03.924343 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-5696778669-nc8rp"] Jan 20 17:48:03 crc kubenswrapper[4558]: I0120 17:48:03.929775 4558 scope.go:117] "RemoveContainer" containerID="ce80cef01c83d809bb3070c10cc026abf5f54be5422d252f6f5f31c47b21a1bb" Jan 20 17:48:03 crc kubenswrapper[4558]: E0120 17:48:03.930048 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ce80cef01c83d809bb3070c10cc026abf5f54be5422d252f6f5f31c47b21a1bb\": container with ID starting with ce80cef01c83d809bb3070c10cc026abf5f54be5422d252f6f5f31c47b21a1bb not found: ID does not exist" containerID="ce80cef01c83d809bb3070c10cc026abf5f54be5422d252f6f5f31c47b21a1bb" Jan 20 17:48:03 crc kubenswrapper[4558]: I0120 17:48:03.930088 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ce80cef01c83d809bb3070c10cc026abf5f54be5422d252f6f5f31c47b21a1bb"} err="failed to get container status \"ce80cef01c83d809bb3070c10cc026abf5f54be5422d252f6f5f31c47b21a1bb\": rpc error: code = NotFound desc = could not find container \"ce80cef01c83d809bb3070c10cc026abf5f54be5422d252f6f5f31c47b21a1bb\": container with ID starting with ce80cef01c83d809bb3070c10cc026abf5f54be5422d252f6f5f31c47b21a1bb not found: ID does not exist" Jan 20 17:48:03 crc kubenswrapper[4558]: I0120 17:48:03.930110 4558 scope.go:117] "RemoveContainer" containerID="586bbbff36fe57de7cd9f2ecae0bec8f27031f35667892a8eb2376c25bb37cdf" Jan 20 17:48:03 crc kubenswrapper[4558]: E0120 17:48:03.930305 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"586bbbff36fe57de7cd9f2ecae0bec8f27031f35667892a8eb2376c25bb37cdf\": container with ID starting with 586bbbff36fe57de7cd9f2ecae0bec8f27031f35667892a8eb2376c25bb37cdf not found: ID does not exist" containerID="586bbbff36fe57de7cd9f2ecae0bec8f27031f35667892a8eb2376c25bb37cdf" Jan 20 17:48:03 crc kubenswrapper[4558]: I0120 17:48:03.930325 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"586bbbff36fe57de7cd9f2ecae0bec8f27031f35667892a8eb2376c25bb37cdf"} err="failed to get container status \"586bbbff36fe57de7cd9f2ecae0bec8f27031f35667892a8eb2376c25bb37cdf\": rpc error: code = NotFound desc = could not find container \"586bbbff36fe57de7cd9f2ecae0bec8f27031f35667892a8eb2376c25bb37cdf\": container with ID starting with 586bbbff36fe57de7cd9f2ecae0bec8f27031f35667892a8eb2376c25bb37cdf not found: ID does not exist" Jan 20 17:48:03 crc kubenswrapper[4558]: I0120 17:48:03.930339 4558 scope.go:117] "RemoveContainer" containerID="c119d9682719148604dfa2a2fefe97cd5eb3a8f7b6356747042f0cb8ea62b412" Jan 20 17:48:03 crc kubenswrapper[4558]: I0120 17:48:03.952411 4558 scope.go:117] "RemoveContainer" containerID="87c9036ed520e0ac775579ea9684b4e9331125999144fd57f7db606b1d4cfeee" Jan 20 17:48:03 crc kubenswrapper[4558]: I0120 17:48:03.954191 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/neutron-5696778669-nc8rp"] Jan 20 17:48:03 crc kubenswrapper[4558]: I0120 17:48:03.964963 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:48:03 crc kubenswrapper[4558]: I0120 17:48:03.972680 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:48:03 crc kubenswrapper[4558]: I0120 17:48:03.977978 4558 scope.go:117] "RemoveContainer" containerID="c3f0f48e19453567cc266442915bb53d5a8b0fd16b12f32fe41cf8a3cc87bb16" Jan 20 17:48:03 crc kubenswrapper[4558]: I0120 17:48:03.980506 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:48:03 crc kubenswrapper[4558]: E0120 17:48:03.980922 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e45db124-f220-41eb-b1d0-d82affa8be89" containerName="proxy-httpd" Jan 20 17:48:03 crc kubenswrapper[4558]: I0120 17:48:03.980942 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="e45db124-f220-41eb-b1d0-d82affa8be89" containerName="proxy-httpd" Jan 20 17:48:03 crc kubenswrapper[4558]: E0120 17:48:03.980968 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e45db124-f220-41eb-b1d0-d82affa8be89" containerName="ceilometer-central-agent" Jan 20 17:48:03 crc kubenswrapper[4558]: I0120 17:48:03.980975 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="e45db124-f220-41eb-b1d0-d82affa8be89" containerName="ceilometer-central-agent" Jan 20 17:48:03 crc kubenswrapper[4558]: E0120 17:48:03.980991 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e45db124-f220-41eb-b1d0-d82affa8be89" containerName="ceilometer-notification-agent" Jan 20 17:48:03 crc kubenswrapper[4558]: I0120 17:48:03.980996 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="e45db124-f220-41eb-b1d0-d82affa8be89" containerName="ceilometer-notification-agent" Jan 20 17:48:03 crc kubenswrapper[4558]: E0120 17:48:03.981013 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e45db124-f220-41eb-b1d0-d82affa8be89" containerName="sg-core" Jan 20 17:48:03 crc kubenswrapper[4558]: I0120 17:48:03.981018 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="e45db124-f220-41eb-b1d0-d82affa8be89" containerName="sg-core" Jan 20 17:48:03 crc kubenswrapper[4558]: E0120 17:48:03.981030 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f0ce81a9-0d35-4813-b16a-74b5757bbffa" containerName="neutron-api" Jan 20 17:48:03 crc kubenswrapper[4558]: I0120 17:48:03.981036 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="f0ce81a9-0d35-4813-b16a-74b5757bbffa" containerName="neutron-api" Jan 20 17:48:03 crc kubenswrapper[4558]: E0120 17:48:03.981045 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f0ce81a9-0d35-4813-b16a-74b5757bbffa" containerName="neutron-httpd" Jan 20 17:48:03 crc kubenswrapper[4558]: I0120 17:48:03.981051 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="f0ce81a9-0d35-4813-b16a-74b5757bbffa" containerName="neutron-httpd" Jan 20 17:48:03 crc kubenswrapper[4558]: I0120 17:48:03.981228 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="e45db124-f220-41eb-b1d0-d82affa8be89" containerName="ceilometer-central-agent" Jan 20 17:48:03 crc kubenswrapper[4558]: I0120 17:48:03.981243 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="f0ce81a9-0d35-4813-b16a-74b5757bbffa" containerName="neutron-httpd" Jan 20 17:48:03 crc kubenswrapper[4558]: I0120 17:48:03.981255 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="e45db124-f220-41eb-b1d0-d82affa8be89" containerName="ceilometer-notification-agent" Jan 20 17:48:03 crc kubenswrapper[4558]: I0120 17:48:03.981265 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="f0ce81a9-0d35-4813-b16a-74b5757bbffa" containerName="neutron-api" Jan 20 17:48:03 crc kubenswrapper[4558]: I0120 17:48:03.981279 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="e45db124-f220-41eb-b1d0-d82affa8be89" containerName="sg-core" Jan 20 17:48:03 crc kubenswrapper[4558]: I0120 17:48:03.981288 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="e45db124-f220-41eb-b1d0-d82affa8be89" containerName="proxy-httpd" Jan 20 17:48:03 crc kubenswrapper[4558]: I0120 17:48:03.982791 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:48:03 crc kubenswrapper[4558]: I0120 17:48:03.985077 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-config-data" Jan 20 17:48:03 crc kubenswrapper[4558]: I0120 17:48:03.985243 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-scripts" Jan 20 17:48:03 crc kubenswrapper[4558]: I0120 17:48:03.988061 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:48:04 crc kubenswrapper[4558]: I0120 17:48:04.004468 4558 scope.go:117] "RemoveContainer" containerID="49f438b00e8f6bc182801aa4facd16c61d68d52ed9eb66fae006e768b82b5e54" Jan 20 17:48:04 crc kubenswrapper[4558]: I0120 17:48:04.005989 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ab79df37-6e1e-48e4-9d74-1ca46c4ca054-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ab79df37-6e1e-48e4-9d74-1ca46c4ca054\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:48:04 crc kubenswrapper[4558]: I0120 17:48:04.006070 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ab79df37-6e1e-48e4-9d74-1ca46c4ca054-log-httpd\") pod \"ceilometer-0\" (UID: \"ab79df37-6e1e-48e4-9d74-1ca46c4ca054\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:48:04 crc kubenswrapper[4558]: I0120 17:48:04.006216 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ab79df37-6e1e-48e4-9d74-1ca46c4ca054-config-data\") pod \"ceilometer-0\" (UID: \"ab79df37-6e1e-48e4-9d74-1ca46c4ca054\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:48:04 crc kubenswrapper[4558]: I0120 17:48:04.006260 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ab79df37-6e1e-48e4-9d74-1ca46c4ca054-scripts\") pod \"ceilometer-0\" (UID: \"ab79df37-6e1e-48e4-9d74-1ca46c4ca054\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:48:04 crc kubenswrapper[4558]: I0120 17:48:04.006348 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ab79df37-6e1e-48e4-9d74-1ca46c4ca054-run-httpd\") pod \"ceilometer-0\" (UID: \"ab79df37-6e1e-48e4-9d74-1ca46c4ca054\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:48:04 crc kubenswrapper[4558]: I0120 17:48:04.006385 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab79df37-6e1e-48e4-9d74-1ca46c4ca054-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ab79df37-6e1e-48e4-9d74-1ca46c4ca054\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:48:04 crc kubenswrapper[4558]: I0120 17:48:04.006444 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-288ms\" (UniqueName: \"kubernetes.io/projected/ab79df37-6e1e-48e4-9d74-1ca46c4ca054-kube-api-access-288ms\") pod \"ceilometer-0\" (UID: \"ab79df37-6e1e-48e4-9d74-1ca46c4ca054\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:48:04 crc kubenswrapper[4558]: I0120 17:48:04.107889 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ab79df37-6e1e-48e4-9d74-1ca46c4ca054-run-httpd\") pod \"ceilometer-0\" (UID: \"ab79df37-6e1e-48e4-9d74-1ca46c4ca054\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:48:04 crc kubenswrapper[4558]: I0120 17:48:04.108144 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab79df37-6e1e-48e4-9d74-1ca46c4ca054-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ab79df37-6e1e-48e4-9d74-1ca46c4ca054\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:48:04 crc kubenswrapper[4558]: I0120 17:48:04.108234 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-288ms\" (UniqueName: \"kubernetes.io/projected/ab79df37-6e1e-48e4-9d74-1ca46c4ca054-kube-api-access-288ms\") pod \"ceilometer-0\" (UID: \"ab79df37-6e1e-48e4-9d74-1ca46c4ca054\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:48:04 crc kubenswrapper[4558]: I0120 17:48:04.108279 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ab79df37-6e1e-48e4-9d74-1ca46c4ca054-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ab79df37-6e1e-48e4-9d74-1ca46c4ca054\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:48:04 crc kubenswrapper[4558]: I0120 17:48:04.108470 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ab79df37-6e1e-48e4-9d74-1ca46c4ca054-log-httpd\") pod \"ceilometer-0\" (UID: \"ab79df37-6e1e-48e4-9d74-1ca46c4ca054\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:48:04 crc kubenswrapper[4558]: I0120 17:48:04.108822 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ab79df37-6e1e-48e4-9d74-1ca46c4ca054-run-httpd\") pod \"ceilometer-0\" (UID: \"ab79df37-6e1e-48e4-9d74-1ca46c4ca054\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:48:04 crc kubenswrapper[4558]: I0120 17:48:04.109019 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ab79df37-6e1e-48e4-9d74-1ca46c4ca054-log-httpd\") pod \"ceilometer-0\" (UID: \"ab79df37-6e1e-48e4-9d74-1ca46c4ca054\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:48:04 crc kubenswrapper[4558]: I0120 17:48:04.112830 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ab79df37-6e1e-48e4-9d74-1ca46c4ca054-config-data\") pod \"ceilometer-0\" (UID: \"ab79df37-6e1e-48e4-9d74-1ca46c4ca054\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:48:04 crc kubenswrapper[4558]: I0120 17:48:04.113412 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ab79df37-6e1e-48e4-9d74-1ca46c4ca054-scripts\") pod \"ceilometer-0\" (UID: \"ab79df37-6e1e-48e4-9d74-1ca46c4ca054\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:48:04 crc kubenswrapper[4558]: I0120 17:48:04.113831 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ab79df37-6e1e-48e4-9d74-1ca46c4ca054-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ab79df37-6e1e-48e4-9d74-1ca46c4ca054\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:48:04 crc kubenswrapper[4558]: I0120 17:48:04.118178 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ab79df37-6e1e-48e4-9d74-1ca46c4ca054-scripts\") pod \"ceilometer-0\" (UID: \"ab79df37-6e1e-48e4-9d74-1ca46c4ca054\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:48:04 crc kubenswrapper[4558]: I0120 17:48:04.121914 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab79df37-6e1e-48e4-9d74-1ca46c4ca054-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ab79df37-6e1e-48e4-9d74-1ca46c4ca054\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:48:04 crc kubenswrapper[4558]: I0120 17:48:04.123972 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ab79df37-6e1e-48e4-9d74-1ca46c4ca054-config-data\") pod \"ceilometer-0\" (UID: \"ab79df37-6e1e-48e4-9d74-1ca46c4ca054\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:48:04 crc kubenswrapper[4558]: I0120 17:48:04.126091 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-288ms\" (UniqueName: \"kubernetes.io/projected/ab79df37-6e1e-48e4-9d74-1ca46c4ca054-kube-api-access-288ms\") pod \"ceilometer-0\" (UID: \"ab79df37-6e1e-48e4-9d74-1ca46c4ca054\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:48:04 crc kubenswrapper[4558]: I0120 17:48:04.306230 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:48:04 crc kubenswrapper[4558]: I0120 17:48:04.593616 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e45db124-f220-41eb-b1d0-d82affa8be89" path="/var/lib/kubelet/pods/e45db124-f220-41eb-b1d0-d82affa8be89/volumes" Jan 20 17:48:04 crc kubenswrapper[4558]: I0120 17:48:04.594643 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f0ce81a9-0d35-4813-b16a-74b5757bbffa" path="/var/lib/kubelet/pods/f0ce81a9-0d35-4813-b16a-74b5757bbffa/volumes" Jan 20 17:48:04 crc kubenswrapper[4558]: I0120 17:48:04.729444 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:48:04 crc kubenswrapper[4558]: W0120 17:48:04.732076 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podab79df37_6e1e_48e4_9d74_1ca46c4ca054.slice/crio-4c6772e9b1fd65161e20996f358af9f3a40d8a70c47bf1f629980c90f3723d20 WatchSource:0}: Error finding container 4c6772e9b1fd65161e20996f358af9f3a40d8a70c47bf1f629980c90f3723d20: Status 404 returned error can't find the container with id 4c6772e9b1fd65161e20996f358af9f3a40d8a70c47bf1f629980c90f3723d20 Jan 20 17:48:04 crc kubenswrapper[4558]: I0120 17:48:04.885081 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"ab79df37-6e1e-48e4-9d74-1ca46c4ca054","Type":"ContainerStarted","Data":"4c6772e9b1fd65161e20996f358af9f3a40d8a70c47bf1f629980c90f3723d20"} Jan 20 17:48:05 crc kubenswrapper[4558]: I0120 17:48:05.898228 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"ab79df37-6e1e-48e4-9d74-1ca46c4ca054","Type":"ContainerStarted","Data":"7bfe93ad26e1d9b9f12d8d60414a087f7fa034a93f5a61859ced8c253e5990e3"} Jan 20 17:48:06 crc kubenswrapper[4558]: I0120 17:48:06.643540 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-api-db-create-swxb7"] Jan 20 17:48:06 crc kubenswrapper[4558]: I0120 17:48:06.645076 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-db-create-swxb7" Jan 20 17:48:06 crc kubenswrapper[4558]: I0120 17:48:06.654243 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-db-create-swxb7"] Jan 20 17:48:06 crc kubenswrapper[4558]: I0120 17:48:06.673249 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/734caadf-9e91-4acd-9c76-0948d33c4c20-operator-scripts\") pod \"nova-api-db-create-swxb7\" (UID: \"734caadf-9e91-4acd-9c76-0948d33c4c20\") " pod="openstack-kuttl-tests/nova-api-db-create-swxb7" Jan 20 17:48:06 crc kubenswrapper[4558]: I0120 17:48:06.673743 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7cqcj\" (UniqueName: \"kubernetes.io/projected/734caadf-9e91-4acd-9c76-0948d33c4c20-kube-api-access-7cqcj\") pod \"nova-api-db-create-swxb7\" (UID: \"734caadf-9e91-4acd-9c76-0948d33c4c20\") " pod="openstack-kuttl-tests/nova-api-db-create-swxb7" Jan 20 17:48:06 crc kubenswrapper[4558]: I0120 17:48:06.750430 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell0-db-create-smx77"] Jan 20 17:48:06 crc kubenswrapper[4558]: I0120 17:48:06.777684 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/734caadf-9e91-4acd-9c76-0948d33c4c20-operator-scripts\") pod \"nova-api-db-create-swxb7\" (UID: \"734caadf-9e91-4acd-9c76-0948d33c4c20\") " pod="openstack-kuttl-tests/nova-api-db-create-swxb7" Jan 20 17:48:06 crc kubenswrapper[4558]: I0120 17:48:06.777809 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7cqcj\" (UniqueName: \"kubernetes.io/projected/734caadf-9e91-4acd-9c76-0948d33c4c20-kube-api-access-7cqcj\") pod \"nova-api-db-create-swxb7\" (UID: \"734caadf-9e91-4acd-9c76-0948d33c4c20\") " pod="openstack-kuttl-tests/nova-api-db-create-swxb7" Jan 20 17:48:06 crc kubenswrapper[4558]: I0120 17:48:06.779355 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/734caadf-9e91-4acd-9c76-0948d33c4c20-operator-scripts\") pod \"nova-api-db-create-swxb7\" (UID: \"734caadf-9e91-4acd-9c76-0948d33c4c20\") " pod="openstack-kuttl-tests/nova-api-db-create-swxb7" Jan 20 17:48:06 crc kubenswrapper[4558]: I0120 17:48:06.795178 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7cqcj\" (UniqueName: \"kubernetes.io/projected/734caadf-9e91-4acd-9c76-0948d33c4c20-kube-api-access-7cqcj\") pod \"nova-api-db-create-swxb7\" (UID: \"734caadf-9e91-4acd-9c76-0948d33c4c20\") " pod="openstack-kuttl-tests/nova-api-db-create-swxb7" Jan 20 17:48:06 crc kubenswrapper[4558]: I0120 17:48:06.802802 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-db-create-smx77" Jan 20 17:48:06 crc kubenswrapper[4558]: I0120 17:48:06.864786 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-api-f275-account-create-update-nwv6j"] Jan 20 17:48:06 crc kubenswrapper[4558]: I0120 17:48:06.867085 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-f275-account-create-update-nwv6j" Jan 20 17:48:06 crc kubenswrapper[4558]: I0120 17:48:06.869764 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-api-db-secret" Jan 20 17:48:06 crc kubenswrapper[4558]: I0120 17:48:06.875757 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-db-create-smx77"] Jan 20 17:48:06 crc kubenswrapper[4558]: I0120 17:48:06.887846 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-f275-account-create-update-nwv6j"] Jan 20 17:48:06 crc kubenswrapper[4558]: I0120 17:48:06.890450 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5269b2ba-0ce2-4c5a-bd6f-6785afcf8c1b-operator-scripts\") pod \"nova-api-f275-account-create-update-nwv6j\" (UID: \"5269b2ba-0ce2-4c5a-bd6f-6785afcf8c1b\") " pod="openstack-kuttl-tests/nova-api-f275-account-create-update-nwv6j" Jan 20 17:48:06 crc kubenswrapper[4558]: I0120 17:48:06.890581 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/100de1e9-d90a-4b00-9e80-82656bc418c1-operator-scripts\") pod \"nova-cell0-db-create-smx77\" (UID: \"100de1e9-d90a-4b00-9e80-82656bc418c1\") " pod="openstack-kuttl-tests/nova-cell0-db-create-smx77" Jan 20 17:48:06 crc kubenswrapper[4558]: I0120 17:48:06.890632 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gk9k9\" (UniqueName: \"kubernetes.io/projected/100de1e9-d90a-4b00-9e80-82656bc418c1-kube-api-access-gk9k9\") pod \"nova-cell0-db-create-smx77\" (UID: \"100de1e9-d90a-4b00-9e80-82656bc418c1\") " pod="openstack-kuttl-tests/nova-cell0-db-create-smx77" Jan 20 17:48:06 crc kubenswrapper[4558]: I0120 17:48:06.890690 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2fwtn\" (UniqueName: \"kubernetes.io/projected/5269b2ba-0ce2-4c5a-bd6f-6785afcf8c1b-kube-api-access-2fwtn\") pod \"nova-api-f275-account-create-update-nwv6j\" (UID: \"5269b2ba-0ce2-4c5a-bd6f-6785afcf8c1b\") " pod="openstack-kuttl-tests/nova-api-f275-account-create-update-nwv6j" Jan 20 17:48:06 crc kubenswrapper[4558]: I0120 17:48:06.898924 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell1-db-create-2qpp6"] Jan 20 17:48:06 crc kubenswrapper[4558]: I0120 17:48:06.902278 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-db-create-2qpp6" Jan 20 17:48:06 crc kubenswrapper[4558]: I0120 17:48:06.906433 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-db-create-2qpp6"] Jan 20 17:48:06 crc kubenswrapper[4558]: I0120 17:48:06.923566 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"ab79df37-6e1e-48e4-9d74-1ca46c4ca054","Type":"ContainerStarted","Data":"5c04ffd13da673e70119bf213d6bb8d4336e3c922fc83b22bd92581824cbe960"} Jan 20 17:48:06 crc kubenswrapper[4558]: I0120 17:48:06.967547 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-db-create-swxb7" Jan 20 17:48:06 crc kubenswrapper[4558]: I0120 17:48:06.975253 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell0-a0eb-account-create-update-hpcq8"] Jan 20 17:48:06 crc kubenswrapper[4558]: I0120 17:48:06.976727 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-a0eb-account-create-update-hpcq8" Jan 20 17:48:06 crc kubenswrapper[4558]: I0120 17:48:06.982468 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell0-db-secret" Jan 20 17:48:06 crc kubenswrapper[4558]: I0120 17:48:06.984505 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-a0eb-account-create-update-hpcq8"] Jan 20 17:48:06 crc kubenswrapper[4558]: I0120 17:48:06.997864 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/100de1e9-d90a-4b00-9e80-82656bc418c1-operator-scripts\") pod \"nova-cell0-db-create-smx77\" (UID: \"100de1e9-d90a-4b00-9e80-82656bc418c1\") " pod="openstack-kuttl-tests/nova-cell0-db-create-smx77" Jan 20 17:48:06 crc kubenswrapper[4558]: I0120 17:48:06.997925 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gk9k9\" (UniqueName: \"kubernetes.io/projected/100de1e9-d90a-4b00-9e80-82656bc418c1-kube-api-access-gk9k9\") pod \"nova-cell0-db-create-smx77\" (UID: \"100de1e9-d90a-4b00-9e80-82656bc418c1\") " pod="openstack-kuttl-tests/nova-cell0-db-create-smx77" Jan 20 17:48:06 crc kubenswrapper[4558]: I0120 17:48:06.997974 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lk2vw\" (UniqueName: \"kubernetes.io/projected/594d0c54-940e-4220-ba99-bd5311ce96d0-kube-api-access-lk2vw\") pod \"nova-cell1-db-create-2qpp6\" (UID: \"594d0c54-940e-4220-ba99-bd5311ce96d0\") " pod="openstack-kuttl-tests/nova-cell1-db-create-2qpp6" Jan 20 17:48:06 crc kubenswrapper[4558]: I0120 17:48:06.998016 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2fwtn\" (UniqueName: \"kubernetes.io/projected/5269b2ba-0ce2-4c5a-bd6f-6785afcf8c1b-kube-api-access-2fwtn\") pod \"nova-api-f275-account-create-update-nwv6j\" (UID: \"5269b2ba-0ce2-4c5a-bd6f-6785afcf8c1b\") " pod="openstack-kuttl-tests/nova-api-f275-account-create-update-nwv6j" Jan 20 17:48:06 crc kubenswrapper[4558]: I0120 17:48:06.998042 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/594d0c54-940e-4220-ba99-bd5311ce96d0-operator-scripts\") pod \"nova-cell1-db-create-2qpp6\" (UID: \"594d0c54-940e-4220-ba99-bd5311ce96d0\") " pod="openstack-kuttl-tests/nova-cell1-db-create-2qpp6" Jan 20 17:48:06 crc kubenswrapper[4558]: I0120 17:48:06.998083 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5269b2ba-0ce2-4c5a-bd6f-6785afcf8c1b-operator-scripts\") pod \"nova-api-f275-account-create-update-nwv6j\" (UID: \"5269b2ba-0ce2-4c5a-bd6f-6785afcf8c1b\") " pod="openstack-kuttl-tests/nova-api-f275-account-create-update-nwv6j" Jan 20 17:48:06 crc kubenswrapper[4558]: I0120 17:48:06.998862 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5269b2ba-0ce2-4c5a-bd6f-6785afcf8c1b-operator-scripts\") pod \"nova-api-f275-account-create-update-nwv6j\" (UID: \"5269b2ba-0ce2-4c5a-bd6f-6785afcf8c1b\") " pod="openstack-kuttl-tests/nova-api-f275-account-create-update-nwv6j" Jan 20 17:48:07 crc kubenswrapper[4558]: I0120 17:48:07.001342 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/100de1e9-d90a-4b00-9e80-82656bc418c1-operator-scripts\") pod \"nova-cell0-db-create-smx77\" (UID: \"100de1e9-d90a-4b00-9e80-82656bc418c1\") " pod="openstack-kuttl-tests/nova-cell0-db-create-smx77" Jan 20 17:48:07 crc kubenswrapper[4558]: I0120 17:48:07.021157 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2fwtn\" (UniqueName: \"kubernetes.io/projected/5269b2ba-0ce2-4c5a-bd6f-6785afcf8c1b-kube-api-access-2fwtn\") pod \"nova-api-f275-account-create-update-nwv6j\" (UID: \"5269b2ba-0ce2-4c5a-bd6f-6785afcf8c1b\") " pod="openstack-kuttl-tests/nova-api-f275-account-create-update-nwv6j" Jan 20 17:48:07 crc kubenswrapper[4558]: I0120 17:48:07.021577 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gk9k9\" (UniqueName: \"kubernetes.io/projected/100de1e9-d90a-4b00-9e80-82656bc418c1-kube-api-access-gk9k9\") pod \"nova-cell0-db-create-smx77\" (UID: \"100de1e9-d90a-4b00-9e80-82656bc418c1\") " pod="openstack-kuttl-tests/nova-cell0-db-create-smx77" Jan 20 17:48:07 crc kubenswrapper[4558]: I0120 17:48:07.101698 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/594d0c54-940e-4220-ba99-bd5311ce96d0-operator-scripts\") pod \"nova-cell1-db-create-2qpp6\" (UID: \"594d0c54-940e-4220-ba99-bd5311ce96d0\") " pod="openstack-kuttl-tests/nova-cell1-db-create-2qpp6" Jan 20 17:48:07 crc kubenswrapper[4558]: I0120 17:48:07.101932 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-724ms\" (UniqueName: \"kubernetes.io/projected/5226a591-a9b2-48f7-82ed-f613fe64b5a9-kube-api-access-724ms\") pod \"nova-cell0-a0eb-account-create-update-hpcq8\" (UID: \"5226a591-a9b2-48f7-82ed-f613fe64b5a9\") " pod="openstack-kuttl-tests/nova-cell0-a0eb-account-create-update-hpcq8" Jan 20 17:48:07 crc kubenswrapper[4558]: I0120 17:48:07.102039 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5226a591-a9b2-48f7-82ed-f613fe64b5a9-operator-scripts\") pod \"nova-cell0-a0eb-account-create-update-hpcq8\" (UID: \"5226a591-a9b2-48f7-82ed-f613fe64b5a9\") " pod="openstack-kuttl-tests/nova-cell0-a0eb-account-create-update-hpcq8" Jan 20 17:48:07 crc kubenswrapper[4558]: I0120 17:48:07.102071 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lk2vw\" (UniqueName: \"kubernetes.io/projected/594d0c54-940e-4220-ba99-bd5311ce96d0-kube-api-access-lk2vw\") pod \"nova-cell1-db-create-2qpp6\" (UID: \"594d0c54-940e-4220-ba99-bd5311ce96d0\") " pod="openstack-kuttl-tests/nova-cell1-db-create-2qpp6" Jan 20 17:48:07 crc kubenswrapper[4558]: I0120 17:48:07.103035 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/594d0c54-940e-4220-ba99-bd5311ce96d0-operator-scripts\") pod \"nova-cell1-db-create-2qpp6\" (UID: \"594d0c54-940e-4220-ba99-bd5311ce96d0\") " pod="openstack-kuttl-tests/nova-cell1-db-create-2qpp6" Jan 20 17:48:07 crc kubenswrapper[4558]: I0120 17:48:07.129311 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-db-create-smx77" Jan 20 17:48:07 crc kubenswrapper[4558]: I0120 17:48:07.130789 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lk2vw\" (UniqueName: \"kubernetes.io/projected/594d0c54-940e-4220-ba99-bd5311ce96d0-kube-api-access-lk2vw\") pod \"nova-cell1-db-create-2qpp6\" (UID: \"594d0c54-940e-4220-ba99-bd5311ce96d0\") " pod="openstack-kuttl-tests/nova-cell1-db-create-2qpp6" Jan 20 17:48:07 crc kubenswrapper[4558]: I0120 17:48:07.154659 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell1-d1d9-account-create-update-6dzmm"] Jan 20 17:48:07 crc kubenswrapper[4558]: I0120 17:48:07.156075 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-d1d9-account-create-update-6dzmm" Jan 20 17:48:07 crc kubenswrapper[4558]: I0120 17:48:07.159006 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell1-db-secret" Jan 20 17:48:07 crc kubenswrapper[4558]: I0120 17:48:07.163785 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-d1d9-account-create-update-6dzmm"] Jan 20 17:48:07 crc kubenswrapper[4558]: I0120 17:48:07.205377 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-724ms\" (UniqueName: \"kubernetes.io/projected/5226a591-a9b2-48f7-82ed-f613fe64b5a9-kube-api-access-724ms\") pod \"nova-cell0-a0eb-account-create-update-hpcq8\" (UID: \"5226a591-a9b2-48f7-82ed-f613fe64b5a9\") " pod="openstack-kuttl-tests/nova-cell0-a0eb-account-create-update-hpcq8" Jan 20 17:48:07 crc kubenswrapper[4558]: I0120 17:48:07.205449 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ktlbt\" (UniqueName: \"kubernetes.io/projected/b942958c-b4fa-4db7-aa26-9db8bd06c776-kube-api-access-ktlbt\") pod \"nova-cell1-d1d9-account-create-update-6dzmm\" (UID: \"b942958c-b4fa-4db7-aa26-9db8bd06c776\") " pod="openstack-kuttl-tests/nova-cell1-d1d9-account-create-update-6dzmm" Jan 20 17:48:07 crc kubenswrapper[4558]: I0120 17:48:07.205534 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5226a591-a9b2-48f7-82ed-f613fe64b5a9-operator-scripts\") pod \"nova-cell0-a0eb-account-create-update-hpcq8\" (UID: \"5226a591-a9b2-48f7-82ed-f613fe64b5a9\") " pod="openstack-kuttl-tests/nova-cell0-a0eb-account-create-update-hpcq8" Jan 20 17:48:07 crc kubenswrapper[4558]: I0120 17:48:07.205601 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b942958c-b4fa-4db7-aa26-9db8bd06c776-operator-scripts\") pod \"nova-cell1-d1d9-account-create-update-6dzmm\" (UID: \"b942958c-b4fa-4db7-aa26-9db8bd06c776\") " pod="openstack-kuttl-tests/nova-cell1-d1d9-account-create-update-6dzmm" Jan 20 17:48:07 crc kubenswrapper[4558]: I0120 17:48:07.206354 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5226a591-a9b2-48f7-82ed-f613fe64b5a9-operator-scripts\") pod \"nova-cell0-a0eb-account-create-update-hpcq8\" (UID: \"5226a591-a9b2-48f7-82ed-f613fe64b5a9\") " pod="openstack-kuttl-tests/nova-cell0-a0eb-account-create-update-hpcq8" Jan 20 17:48:07 crc kubenswrapper[4558]: I0120 17:48:07.221176 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-f275-account-create-update-nwv6j" Jan 20 17:48:07 crc kubenswrapper[4558]: I0120 17:48:07.249149 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-db-create-2qpp6" Jan 20 17:48:07 crc kubenswrapper[4558]: I0120 17:48:07.253416 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-724ms\" (UniqueName: \"kubernetes.io/projected/5226a591-a9b2-48f7-82ed-f613fe64b5a9-kube-api-access-724ms\") pod \"nova-cell0-a0eb-account-create-update-hpcq8\" (UID: \"5226a591-a9b2-48f7-82ed-f613fe64b5a9\") " pod="openstack-kuttl-tests/nova-cell0-a0eb-account-create-update-hpcq8" Jan 20 17:48:07 crc kubenswrapper[4558]: I0120 17:48:07.310522 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b942958c-b4fa-4db7-aa26-9db8bd06c776-operator-scripts\") pod \"nova-cell1-d1d9-account-create-update-6dzmm\" (UID: \"b942958c-b4fa-4db7-aa26-9db8bd06c776\") " pod="openstack-kuttl-tests/nova-cell1-d1d9-account-create-update-6dzmm" Jan 20 17:48:07 crc kubenswrapper[4558]: I0120 17:48:07.310626 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ktlbt\" (UniqueName: \"kubernetes.io/projected/b942958c-b4fa-4db7-aa26-9db8bd06c776-kube-api-access-ktlbt\") pod \"nova-cell1-d1d9-account-create-update-6dzmm\" (UID: \"b942958c-b4fa-4db7-aa26-9db8bd06c776\") " pod="openstack-kuttl-tests/nova-cell1-d1d9-account-create-update-6dzmm" Jan 20 17:48:07 crc kubenswrapper[4558]: I0120 17:48:07.310875 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-a0eb-account-create-update-hpcq8" Jan 20 17:48:07 crc kubenswrapper[4558]: I0120 17:48:07.312076 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b942958c-b4fa-4db7-aa26-9db8bd06c776-operator-scripts\") pod \"nova-cell1-d1d9-account-create-update-6dzmm\" (UID: \"b942958c-b4fa-4db7-aa26-9db8bd06c776\") " pod="openstack-kuttl-tests/nova-cell1-d1d9-account-create-update-6dzmm" Jan 20 17:48:07 crc kubenswrapper[4558]: I0120 17:48:07.330866 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ktlbt\" (UniqueName: \"kubernetes.io/projected/b942958c-b4fa-4db7-aa26-9db8bd06c776-kube-api-access-ktlbt\") pod \"nova-cell1-d1d9-account-create-update-6dzmm\" (UID: \"b942958c-b4fa-4db7-aa26-9db8bd06c776\") " pod="openstack-kuttl-tests/nova-cell1-d1d9-account-create-update-6dzmm" Jan 20 17:48:07 crc kubenswrapper[4558]: I0120 17:48:07.441565 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-db-create-swxb7"] Jan 20 17:48:07 crc kubenswrapper[4558]: I0120 17:48:07.545795 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-d1d9-account-create-update-6dzmm" Jan 20 17:48:07 crc kubenswrapper[4558]: I0120 17:48:07.596850 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-db-create-smx77"] Jan 20 17:48:07 crc kubenswrapper[4558]: W0120 17:48:07.612988 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod100de1e9_d90a_4b00_9e80_82656bc418c1.slice/crio-bd92c0102f3acb2e307a205c06479b7574e072b2d039b9c921772c7ba14a15ba WatchSource:0}: Error finding container bd92c0102f3acb2e307a205c06479b7574e072b2d039b9c921772c7ba14a15ba: Status 404 returned error can't find the container with id bd92c0102f3acb2e307a205c06479b7574e072b2d039b9c921772c7ba14a15ba Jan 20 17:48:07 crc kubenswrapper[4558]: I0120 17:48:07.745337 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-f275-account-create-update-nwv6j"] Jan 20 17:48:07 crc kubenswrapper[4558]: W0120 17:48:07.745871 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5269b2ba_0ce2_4c5a_bd6f_6785afcf8c1b.slice/crio-f8ca9917317a77783d0c34def684b79c8c4c15617c712f15827fcf1e7977e908 WatchSource:0}: Error finding container f8ca9917317a77783d0c34def684b79c8c4c15617c712f15827fcf1e7977e908: Status 404 returned error can't find the container with id f8ca9917317a77783d0c34def684b79c8c4c15617c712f15827fcf1e7977e908 Jan 20 17:48:07 crc kubenswrapper[4558]: I0120 17:48:07.827068 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-db-create-2qpp6"] Jan 20 17:48:07 crc kubenswrapper[4558]: I0120 17:48:07.837657 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-a0eb-account-create-update-hpcq8"] Jan 20 17:48:07 crc kubenswrapper[4558]: I0120 17:48:07.933030 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-a0eb-account-create-update-hpcq8" event={"ID":"5226a591-a9b2-48f7-82ed-f613fe64b5a9","Type":"ContainerStarted","Data":"456b2d79212225c57f534ee524132a72f33cf156ea959d91fb74c37af78c55b8"} Jan 20 17:48:07 crc kubenswrapper[4558]: I0120 17:48:07.935997 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-db-create-smx77" event={"ID":"100de1e9-d90a-4b00-9e80-82656bc418c1","Type":"ContainerStarted","Data":"f13b8c904e10f66a85bae11d7194703136a1d193483d5222b8adc92a268e6dc0"} Jan 20 17:48:07 crc kubenswrapper[4558]: I0120 17:48:07.936048 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-db-create-smx77" event={"ID":"100de1e9-d90a-4b00-9e80-82656bc418c1","Type":"ContainerStarted","Data":"bd92c0102f3acb2e307a205c06479b7574e072b2d039b9c921772c7ba14a15ba"} Jan 20 17:48:07 crc kubenswrapper[4558]: I0120 17:48:07.939836 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-f275-account-create-update-nwv6j" event={"ID":"5269b2ba-0ce2-4c5a-bd6f-6785afcf8c1b","Type":"ContainerStarted","Data":"f8ca9917317a77783d0c34def684b79c8c4c15617c712f15827fcf1e7977e908"} Jan 20 17:48:07 crc kubenswrapper[4558]: I0120 17:48:07.946331 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"ab79df37-6e1e-48e4-9d74-1ca46c4ca054","Type":"ContainerStarted","Data":"37024b86cbc241dbb82a12fbede4737a0eed809370443a61b4d4719539dd6aaf"} Jan 20 17:48:07 crc kubenswrapper[4558]: I0120 17:48:07.948126 4558 generic.go:334] "Generic (PLEG): container finished" podID="734caadf-9e91-4acd-9c76-0948d33c4c20" containerID="7588231569d82d21bc037c8f9bdc99164c106acd49e9d29c96f7408401d928c4" exitCode=0 Jan 20 17:48:07 crc kubenswrapper[4558]: I0120 17:48:07.948191 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-db-create-swxb7" event={"ID":"734caadf-9e91-4acd-9c76-0948d33c4c20","Type":"ContainerDied","Data":"7588231569d82d21bc037c8f9bdc99164c106acd49e9d29c96f7408401d928c4"} Jan 20 17:48:07 crc kubenswrapper[4558]: I0120 17:48:07.948216 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-db-create-swxb7" event={"ID":"734caadf-9e91-4acd-9c76-0948d33c4c20","Type":"ContainerStarted","Data":"d2b6bff91acf696400eabaae03863293178215bc9ac7ab38cebf675286bb98ee"} Jan 20 17:48:07 crc kubenswrapper[4558]: I0120 17:48:07.954182 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-db-create-2qpp6" event={"ID":"594d0c54-940e-4220-ba99-bd5311ce96d0","Type":"ContainerStarted","Data":"621094734b81f81d70d1b6b9e4e7a2ca00f6a6394f064b19c188f946db1a2ec1"} Jan 20 17:48:07 crc kubenswrapper[4558]: I0120 17:48:07.955014 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell0-db-create-smx77" podStartSLOduration=1.955000199 podStartE2EDuration="1.955000199s" podCreationTimestamp="2026-01-20 17:48:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:48:07.953600566 +0000 UTC m=+3981.713938534" watchObservedRunningTime="2026-01-20 17:48:07.955000199 +0000 UTC m=+3981.715338156" Jan 20 17:48:07 crc kubenswrapper[4558]: I0120 17:48:07.993974 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-d1d9-account-create-update-6dzmm"] Jan 20 17:48:08 crc kubenswrapper[4558]: W0120 17:48:08.016220 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb942958c_b4fa_4db7_aa26_9db8bd06c776.slice/crio-783a86bb82d2109d330d5b292ce3e6d552cf21feb5d6dae3c8e9a79746687046 WatchSource:0}: Error finding container 783a86bb82d2109d330d5b292ce3e6d552cf21feb5d6dae3c8e9a79746687046: Status 404 returned error can't find the container with id 783a86bb82d2109d330d5b292ce3e6d552cf21feb5d6dae3c8e9a79746687046 Jan 20 17:48:08 crc kubenswrapper[4558]: I0120 17:48:08.974691 4558 generic.go:334] "Generic (PLEG): container finished" podID="5269b2ba-0ce2-4c5a-bd6f-6785afcf8c1b" containerID="441fa4054ae85c1a96a72c9f2e37597c22d876e5267c2bf5c34bbba3be42499c" exitCode=0 Jan 20 17:48:08 crc kubenswrapper[4558]: I0120 17:48:08.975421 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-f275-account-create-update-nwv6j" event={"ID":"5269b2ba-0ce2-4c5a-bd6f-6785afcf8c1b","Type":"ContainerDied","Data":"441fa4054ae85c1a96a72c9f2e37597c22d876e5267c2bf5c34bbba3be42499c"} Jan 20 17:48:08 crc kubenswrapper[4558]: I0120 17:48:08.977816 4558 generic.go:334] "Generic (PLEG): container finished" podID="b942958c-b4fa-4db7-aa26-9db8bd06c776" containerID="0e79f2cae3c4c0133a0b6788fe7df950d70f340957fa52cebd1533ef87c6bf7f" exitCode=0 Jan 20 17:48:08 crc kubenswrapper[4558]: I0120 17:48:08.977943 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-d1d9-account-create-update-6dzmm" event={"ID":"b942958c-b4fa-4db7-aa26-9db8bd06c776","Type":"ContainerDied","Data":"0e79f2cae3c4c0133a0b6788fe7df950d70f340957fa52cebd1533ef87c6bf7f"} Jan 20 17:48:08 crc kubenswrapper[4558]: I0120 17:48:08.977981 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-d1d9-account-create-update-6dzmm" event={"ID":"b942958c-b4fa-4db7-aa26-9db8bd06c776","Type":"ContainerStarted","Data":"783a86bb82d2109d330d5b292ce3e6d552cf21feb5d6dae3c8e9a79746687046"} Jan 20 17:48:08 crc kubenswrapper[4558]: I0120 17:48:08.981896 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"ab79df37-6e1e-48e4-9d74-1ca46c4ca054","Type":"ContainerStarted","Data":"bcb6b58b36bcd934a03592e1d510cacff275865ab16e77166996c319f2d3638e"} Jan 20 17:48:08 crc kubenswrapper[4558]: I0120 17:48:08.982206 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:48:08 crc kubenswrapper[4558]: I0120 17:48:08.983852 4558 generic.go:334] "Generic (PLEG): container finished" podID="594d0c54-940e-4220-ba99-bd5311ce96d0" containerID="57b3952a8f41464d4529f673a192a03c8051341064d2770a665439b22f8070fc" exitCode=0 Jan 20 17:48:08 crc kubenswrapper[4558]: I0120 17:48:08.983887 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-db-create-2qpp6" event={"ID":"594d0c54-940e-4220-ba99-bd5311ce96d0","Type":"ContainerDied","Data":"57b3952a8f41464d4529f673a192a03c8051341064d2770a665439b22f8070fc"} Jan 20 17:48:08 crc kubenswrapper[4558]: I0120 17:48:08.988592 4558 generic.go:334] "Generic (PLEG): container finished" podID="5226a591-a9b2-48f7-82ed-f613fe64b5a9" containerID="543acc9e8e6d663cae36ab5b04b6b497d854dfd494c3edf417b4d1f28c4e5338" exitCode=0 Jan 20 17:48:08 crc kubenswrapper[4558]: I0120 17:48:08.988660 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-a0eb-account-create-update-hpcq8" event={"ID":"5226a591-a9b2-48f7-82ed-f613fe64b5a9","Type":"ContainerDied","Data":"543acc9e8e6d663cae36ab5b04b6b497d854dfd494c3edf417b4d1f28c4e5338"} Jan 20 17:48:08 crc kubenswrapper[4558]: I0120 17:48:08.992748 4558 generic.go:334] "Generic (PLEG): container finished" podID="100de1e9-d90a-4b00-9e80-82656bc418c1" containerID="f13b8c904e10f66a85bae11d7194703136a1d193483d5222b8adc92a268e6dc0" exitCode=0 Jan 20 17:48:08 crc kubenswrapper[4558]: I0120 17:48:08.992934 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-db-create-smx77" event={"ID":"100de1e9-d90a-4b00-9e80-82656bc418c1","Type":"ContainerDied","Data":"f13b8c904e10f66a85bae11d7194703136a1d193483d5222b8adc92a268e6dc0"} Jan 20 17:48:09 crc kubenswrapper[4558]: I0120 17:48:09.033967 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ceilometer-0" podStartSLOduration=2.604615611 podStartE2EDuration="6.033942136s" podCreationTimestamp="2026-01-20 17:48:03 +0000 UTC" firstStartedPulling="2026-01-20 17:48:04.734638927 +0000 UTC m=+3978.494976894" lastFinishedPulling="2026-01-20 17:48:08.163965452 +0000 UTC m=+3981.924303419" observedRunningTime="2026-01-20 17:48:09.031547043 +0000 UTC m=+3982.791885009" watchObservedRunningTime="2026-01-20 17:48:09.033942136 +0000 UTC m=+3982.794280103" Jan 20 17:48:09 crc kubenswrapper[4558]: I0120 17:48:09.363762 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-db-create-swxb7" Jan 20 17:48:09 crc kubenswrapper[4558]: I0120 17:48:09.478502 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/734caadf-9e91-4acd-9c76-0948d33c4c20-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "734caadf-9e91-4acd-9c76-0948d33c4c20" (UID: "734caadf-9e91-4acd-9c76-0948d33c4c20"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:48:09 crc kubenswrapper[4558]: I0120 17:48:09.478742 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/734caadf-9e91-4acd-9c76-0948d33c4c20-operator-scripts\") pod \"734caadf-9e91-4acd-9c76-0948d33c4c20\" (UID: \"734caadf-9e91-4acd-9c76-0948d33c4c20\") " Jan 20 17:48:09 crc kubenswrapper[4558]: I0120 17:48:09.478977 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7cqcj\" (UniqueName: \"kubernetes.io/projected/734caadf-9e91-4acd-9c76-0948d33c4c20-kube-api-access-7cqcj\") pod \"734caadf-9e91-4acd-9c76-0948d33c4c20\" (UID: \"734caadf-9e91-4acd-9c76-0948d33c4c20\") " Jan 20 17:48:09 crc kubenswrapper[4558]: I0120 17:48:09.479547 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/734caadf-9e91-4acd-9c76-0948d33c4c20-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:48:09 crc kubenswrapper[4558]: I0120 17:48:09.485589 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/734caadf-9e91-4acd-9c76-0948d33c4c20-kube-api-access-7cqcj" (OuterVolumeSpecName: "kube-api-access-7cqcj") pod "734caadf-9e91-4acd-9c76-0948d33c4c20" (UID: "734caadf-9e91-4acd-9c76-0948d33c4c20"). InnerVolumeSpecName "kube-api-access-7cqcj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:48:09 crc kubenswrapper[4558]: I0120 17:48:09.581231 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7cqcj\" (UniqueName: \"kubernetes.io/projected/734caadf-9e91-4acd-9c76-0948d33c4c20-kube-api-access-7cqcj\") on node \"crc\" DevicePath \"\"" Jan 20 17:48:10 crc kubenswrapper[4558]: I0120 17:48:10.004607 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-db-create-swxb7" event={"ID":"734caadf-9e91-4acd-9c76-0948d33c4c20","Type":"ContainerDied","Data":"d2b6bff91acf696400eabaae03863293178215bc9ac7ab38cebf675286bb98ee"} Jan 20 17:48:10 crc kubenswrapper[4558]: I0120 17:48:10.004677 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d2b6bff91acf696400eabaae03863293178215bc9ac7ab38cebf675286bb98ee" Jan 20 17:48:10 crc kubenswrapper[4558]: I0120 17:48:10.004859 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-db-create-swxb7" Jan 20 17:48:10 crc kubenswrapper[4558]: I0120 17:48:10.529819 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-f275-account-create-update-nwv6j" Jan 20 17:48:10 crc kubenswrapper[4558]: I0120 17:48:10.611630 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5269b2ba-0ce2-4c5a-bd6f-6785afcf8c1b-operator-scripts\") pod \"5269b2ba-0ce2-4c5a-bd6f-6785afcf8c1b\" (UID: \"5269b2ba-0ce2-4c5a-bd6f-6785afcf8c1b\") " Jan 20 17:48:10 crc kubenswrapper[4558]: I0120 17:48:10.611787 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2fwtn\" (UniqueName: \"kubernetes.io/projected/5269b2ba-0ce2-4c5a-bd6f-6785afcf8c1b-kube-api-access-2fwtn\") pod \"5269b2ba-0ce2-4c5a-bd6f-6785afcf8c1b\" (UID: \"5269b2ba-0ce2-4c5a-bd6f-6785afcf8c1b\") " Jan 20 17:48:10 crc kubenswrapper[4558]: I0120 17:48:10.612176 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5269b2ba-0ce2-4c5a-bd6f-6785afcf8c1b-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "5269b2ba-0ce2-4c5a-bd6f-6785afcf8c1b" (UID: "5269b2ba-0ce2-4c5a-bd6f-6785afcf8c1b"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:48:10 crc kubenswrapper[4558]: I0120 17:48:10.612748 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5269b2ba-0ce2-4c5a-bd6f-6785afcf8c1b-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:48:10 crc kubenswrapper[4558]: I0120 17:48:10.620233 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5269b2ba-0ce2-4c5a-bd6f-6785afcf8c1b-kube-api-access-2fwtn" (OuterVolumeSpecName: "kube-api-access-2fwtn") pod "5269b2ba-0ce2-4c5a-bd6f-6785afcf8c1b" (UID: "5269b2ba-0ce2-4c5a-bd6f-6785afcf8c1b"). InnerVolumeSpecName "kube-api-access-2fwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:48:10 crc kubenswrapper[4558]: I0120 17:48:10.685639 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-db-create-smx77" Jan 20 17:48:10 crc kubenswrapper[4558]: I0120 17:48:10.698002 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-db-create-2qpp6" Jan 20 17:48:10 crc kubenswrapper[4558]: I0120 17:48:10.703206 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-a0eb-account-create-update-hpcq8" Jan 20 17:48:10 crc kubenswrapper[4558]: I0120 17:48:10.717895 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-d1d9-account-create-update-6dzmm" Jan 20 17:48:10 crc kubenswrapper[4558]: I0120 17:48:10.727600 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2fwtn\" (UniqueName: \"kubernetes.io/projected/5269b2ba-0ce2-4c5a-bd6f-6785afcf8c1b-kube-api-access-2fwtn\") on node \"crc\" DevicePath \"\"" Jan 20 17:48:10 crc kubenswrapper[4558]: I0120 17:48:10.829435 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-724ms\" (UniqueName: \"kubernetes.io/projected/5226a591-a9b2-48f7-82ed-f613fe64b5a9-kube-api-access-724ms\") pod \"5226a591-a9b2-48f7-82ed-f613fe64b5a9\" (UID: \"5226a591-a9b2-48f7-82ed-f613fe64b5a9\") " Jan 20 17:48:10 crc kubenswrapper[4558]: I0120 17:48:10.829636 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gk9k9\" (UniqueName: \"kubernetes.io/projected/100de1e9-d90a-4b00-9e80-82656bc418c1-kube-api-access-gk9k9\") pod \"100de1e9-d90a-4b00-9e80-82656bc418c1\" (UID: \"100de1e9-d90a-4b00-9e80-82656bc418c1\") " Jan 20 17:48:10 crc kubenswrapper[4558]: I0120 17:48:10.829716 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5226a591-a9b2-48f7-82ed-f613fe64b5a9-operator-scripts\") pod \"5226a591-a9b2-48f7-82ed-f613fe64b5a9\" (UID: \"5226a591-a9b2-48f7-82ed-f613fe64b5a9\") " Jan 20 17:48:10 crc kubenswrapper[4558]: I0120 17:48:10.829885 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ktlbt\" (UniqueName: \"kubernetes.io/projected/b942958c-b4fa-4db7-aa26-9db8bd06c776-kube-api-access-ktlbt\") pod \"b942958c-b4fa-4db7-aa26-9db8bd06c776\" (UID: \"b942958c-b4fa-4db7-aa26-9db8bd06c776\") " Jan 20 17:48:10 crc kubenswrapper[4558]: I0120 17:48:10.829937 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/100de1e9-d90a-4b00-9e80-82656bc418c1-operator-scripts\") pod \"100de1e9-d90a-4b00-9e80-82656bc418c1\" (UID: \"100de1e9-d90a-4b00-9e80-82656bc418c1\") " Jan 20 17:48:10 crc kubenswrapper[4558]: I0120 17:48:10.830069 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/594d0c54-940e-4220-ba99-bd5311ce96d0-operator-scripts\") pod \"594d0c54-940e-4220-ba99-bd5311ce96d0\" (UID: \"594d0c54-940e-4220-ba99-bd5311ce96d0\") " Jan 20 17:48:10 crc kubenswrapper[4558]: I0120 17:48:10.830094 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lk2vw\" (UniqueName: \"kubernetes.io/projected/594d0c54-940e-4220-ba99-bd5311ce96d0-kube-api-access-lk2vw\") pod \"594d0c54-940e-4220-ba99-bd5311ce96d0\" (UID: \"594d0c54-940e-4220-ba99-bd5311ce96d0\") " Jan 20 17:48:10 crc kubenswrapper[4558]: I0120 17:48:10.830208 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b942958c-b4fa-4db7-aa26-9db8bd06c776-operator-scripts\") pod \"b942958c-b4fa-4db7-aa26-9db8bd06c776\" (UID: \"b942958c-b4fa-4db7-aa26-9db8bd06c776\") " Jan 20 17:48:10 crc kubenswrapper[4558]: I0120 17:48:10.830611 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/100de1e9-d90a-4b00-9e80-82656bc418c1-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "100de1e9-d90a-4b00-9e80-82656bc418c1" (UID: "100de1e9-d90a-4b00-9e80-82656bc418c1"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:48:10 crc kubenswrapper[4558]: I0120 17:48:10.830664 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5226a591-a9b2-48f7-82ed-f613fe64b5a9-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "5226a591-a9b2-48f7-82ed-f613fe64b5a9" (UID: "5226a591-a9b2-48f7-82ed-f613fe64b5a9"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:48:10 crc kubenswrapper[4558]: I0120 17:48:10.830740 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/594d0c54-940e-4220-ba99-bd5311ce96d0-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "594d0c54-940e-4220-ba99-bd5311ce96d0" (UID: "594d0c54-940e-4220-ba99-bd5311ce96d0"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:48:10 crc kubenswrapper[4558]: I0120 17:48:10.831086 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b942958c-b4fa-4db7-aa26-9db8bd06c776-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "b942958c-b4fa-4db7-aa26-9db8bd06c776" (UID: "b942958c-b4fa-4db7-aa26-9db8bd06c776"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:48:10 crc kubenswrapper[4558]: I0120 17:48:10.831765 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5226a591-a9b2-48f7-82ed-f613fe64b5a9-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:48:10 crc kubenswrapper[4558]: I0120 17:48:10.831800 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/100de1e9-d90a-4b00-9e80-82656bc418c1-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:48:10 crc kubenswrapper[4558]: I0120 17:48:10.831811 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/594d0c54-940e-4220-ba99-bd5311ce96d0-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:48:10 crc kubenswrapper[4558]: I0120 17:48:10.831821 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b942958c-b4fa-4db7-aa26-9db8bd06c776-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:48:10 crc kubenswrapper[4558]: I0120 17:48:10.837084 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/100de1e9-d90a-4b00-9e80-82656bc418c1-kube-api-access-gk9k9" (OuterVolumeSpecName: "kube-api-access-gk9k9") pod "100de1e9-d90a-4b00-9e80-82656bc418c1" (UID: "100de1e9-d90a-4b00-9e80-82656bc418c1"). InnerVolumeSpecName "kube-api-access-gk9k9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:48:10 crc kubenswrapper[4558]: I0120 17:48:10.844612 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5226a591-a9b2-48f7-82ed-f613fe64b5a9-kube-api-access-724ms" (OuterVolumeSpecName: "kube-api-access-724ms") pod "5226a591-a9b2-48f7-82ed-f613fe64b5a9" (UID: "5226a591-a9b2-48f7-82ed-f613fe64b5a9"). InnerVolumeSpecName "kube-api-access-724ms". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:48:10 crc kubenswrapper[4558]: I0120 17:48:10.845792 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/594d0c54-940e-4220-ba99-bd5311ce96d0-kube-api-access-lk2vw" (OuterVolumeSpecName: "kube-api-access-lk2vw") pod "594d0c54-940e-4220-ba99-bd5311ce96d0" (UID: "594d0c54-940e-4220-ba99-bd5311ce96d0"). InnerVolumeSpecName "kube-api-access-lk2vw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:48:10 crc kubenswrapper[4558]: I0120 17:48:10.851228 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b942958c-b4fa-4db7-aa26-9db8bd06c776-kube-api-access-ktlbt" (OuterVolumeSpecName: "kube-api-access-ktlbt") pod "b942958c-b4fa-4db7-aa26-9db8bd06c776" (UID: "b942958c-b4fa-4db7-aa26-9db8bd06c776"). InnerVolumeSpecName "kube-api-access-ktlbt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:48:10 crc kubenswrapper[4558]: I0120 17:48:10.933517 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lk2vw\" (UniqueName: \"kubernetes.io/projected/594d0c54-940e-4220-ba99-bd5311ce96d0-kube-api-access-lk2vw\") on node \"crc\" DevicePath \"\"" Jan 20 17:48:10 crc kubenswrapper[4558]: I0120 17:48:10.933726 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-724ms\" (UniqueName: \"kubernetes.io/projected/5226a591-a9b2-48f7-82ed-f613fe64b5a9-kube-api-access-724ms\") on node \"crc\" DevicePath \"\"" Jan 20 17:48:10 crc kubenswrapper[4558]: I0120 17:48:10.933738 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gk9k9\" (UniqueName: \"kubernetes.io/projected/100de1e9-d90a-4b00-9e80-82656bc418c1-kube-api-access-gk9k9\") on node \"crc\" DevicePath \"\"" Jan 20 17:48:10 crc kubenswrapper[4558]: I0120 17:48:10.933750 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ktlbt\" (UniqueName: \"kubernetes.io/projected/b942958c-b4fa-4db7-aa26-9db8bd06c776-kube-api-access-ktlbt\") on node \"crc\" DevicePath \"\"" Jan 20 17:48:11 crc kubenswrapper[4558]: I0120 17:48:11.016998 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-a0eb-account-create-update-hpcq8" event={"ID":"5226a591-a9b2-48f7-82ed-f613fe64b5a9","Type":"ContainerDied","Data":"456b2d79212225c57f534ee524132a72f33cf156ea959d91fb74c37af78c55b8"} Jan 20 17:48:11 crc kubenswrapper[4558]: I0120 17:48:11.017063 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="456b2d79212225c57f534ee524132a72f33cf156ea959d91fb74c37af78c55b8" Jan 20 17:48:11 crc kubenswrapper[4558]: I0120 17:48:11.017018 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-a0eb-account-create-update-hpcq8" Jan 20 17:48:11 crc kubenswrapper[4558]: I0120 17:48:11.018559 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-db-create-smx77" event={"ID":"100de1e9-d90a-4b00-9e80-82656bc418c1","Type":"ContainerDied","Data":"bd92c0102f3acb2e307a205c06479b7574e072b2d039b9c921772c7ba14a15ba"} Jan 20 17:48:11 crc kubenswrapper[4558]: I0120 17:48:11.018631 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bd92c0102f3acb2e307a205c06479b7574e072b2d039b9c921772c7ba14a15ba" Jan 20 17:48:11 crc kubenswrapper[4558]: I0120 17:48:11.018589 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-db-create-smx77" Jan 20 17:48:11 crc kubenswrapper[4558]: I0120 17:48:11.020011 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-f275-account-create-update-nwv6j" event={"ID":"5269b2ba-0ce2-4c5a-bd6f-6785afcf8c1b","Type":"ContainerDied","Data":"f8ca9917317a77783d0c34def684b79c8c4c15617c712f15827fcf1e7977e908"} Jan 20 17:48:11 crc kubenswrapper[4558]: I0120 17:48:11.020060 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-f275-account-create-update-nwv6j" Jan 20 17:48:11 crc kubenswrapper[4558]: I0120 17:48:11.020082 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f8ca9917317a77783d0c34def684b79c8c4c15617c712f15827fcf1e7977e908" Jan 20 17:48:11 crc kubenswrapper[4558]: I0120 17:48:11.021485 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-d1d9-account-create-update-6dzmm" Jan 20 17:48:11 crc kubenswrapper[4558]: I0120 17:48:11.021510 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-d1d9-account-create-update-6dzmm" event={"ID":"b942958c-b4fa-4db7-aa26-9db8bd06c776","Type":"ContainerDied","Data":"783a86bb82d2109d330d5b292ce3e6d552cf21feb5d6dae3c8e9a79746687046"} Jan 20 17:48:11 crc kubenswrapper[4558]: I0120 17:48:11.021579 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="783a86bb82d2109d330d5b292ce3e6d552cf21feb5d6dae3c8e9a79746687046" Jan 20 17:48:11 crc kubenswrapper[4558]: I0120 17:48:11.022911 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-db-create-2qpp6" event={"ID":"594d0c54-940e-4220-ba99-bd5311ce96d0","Type":"ContainerDied","Data":"621094734b81f81d70d1b6b9e4e7a2ca00f6a6394f064b19c188f946db1a2ec1"} Jan 20 17:48:11 crc kubenswrapper[4558]: I0120 17:48:11.022944 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="621094734b81f81d70d1b6b9e4e7a2ca00f6a6394f064b19c188f946db1a2ec1" Jan 20 17:48:11 crc kubenswrapper[4558]: I0120 17:48:11.023003 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-db-create-2qpp6" Jan 20 17:48:11 crc kubenswrapper[4558]: I0120 17:48:11.583797 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:48:11 crc kubenswrapper[4558]: I0120 17:48:11.584186 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="ab79df37-6e1e-48e4-9d74-1ca46c4ca054" containerName="proxy-httpd" containerID="cri-o://bcb6b58b36bcd934a03592e1d510cacff275865ab16e77166996c319f2d3638e" gracePeriod=30 Jan 20 17:48:11 crc kubenswrapper[4558]: I0120 17:48:11.584315 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="ab79df37-6e1e-48e4-9d74-1ca46c4ca054" containerName="sg-core" containerID="cri-o://37024b86cbc241dbb82a12fbede4737a0eed809370443a61b4d4719539dd6aaf" gracePeriod=30 Jan 20 17:48:11 crc kubenswrapper[4558]: I0120 17:48:11.584390 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="ab79df37-6e1e-48e4-9d74-1ca46c4ca054" containerName="ceilometer-notification-agent" containerID="cri-o://5c04ffd13da673e70119bf213d6bb8d4336e3c922fc83b22bd92581824cbe960" gracePeriod=30 Jan 20 17:48:11 crc kubenswrapper[4558]: I0120 17:48:11.584446 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="ab79df37-6e1e-48e4-9d74-1ca46c4ca054" containerName="ceilometer-central-agent" containerID="cri-o://7bfe93ad26e1d9b9f12d8d60414a087f7fa034a93f5a61859ced8c253e5990e3" gracePeriod=30 Jan 20 17:48:12 crc kubenswrapper[4558]: I0120 17:48:12.033639 4558 generic.go:334] "Generic (PLEG): container finished" podID="ab79df37-6e1e-48e4-9d74-1ca46c4ca054" containerID="37024b86cbc241dbb82a12fbede4737a0eed809370443a61b4d4719539dd6aaf" exitCode=2 Jan 20 17:48:12 crc kubenswrapper[4558]: I0120 17:48:12.033714 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"ab79df37-6e1e-48e4-9d74-1ca46c4ca054","Type":"ContainerDied","Data":"37024b86cbc241dbb82a12fbede4737a0eed809370443a61b4d4719539dd6aaf"} Jan 20 17:48:12 crc kubenswrapper[4558]: E0120 17:48:12.174033 4558 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podab79df37_6e1e_48e4_9d74_1ca46c4ca054.slice/crio-bcb6b58b36bcd934a03592e1d510cacff275865ab16e77166996c319f2d3638e.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podab79df37_6e1e_48e4_9d74_1ca46c4ca054.slice/crio-conmon-5c04ffd13da673e70119bf213d6bb8d4336e3c922fc83b22bd92581824cbe960.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podab79df37_6e1e_48e4_9d74_1ca46c4ca054.slice/crio-conmon-bcb6b58b36bcd934a03592e1d510cacff275865ab16e77166996c319f2d3638e.scope\": RecentStats: unable to find data in memory cache]" Jan 20 17:48:12 crc kubenswrapper[4558]: I0120 17:48:12.227641 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-db-sync-nd9l5"] Jan 20 17:48:12 crc kubenswrapper[4558]: E0120 17:48:12.228334 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5226a591-a9b2-48f7-82ed-f613fe64b5a9" containerName="mariadb-account-create-update" Jan 20 17:48:12 crc kubenswrapper[4558]: I0120 17:48:12.228369 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="5226a591-a9b2-48f7-82ed-f613fe64b5a9" containerName="mariadb-account-create-update" Jan 20 17:48:12 crc kubenswrapper[4558]: E0120 17:48:12.228530 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="594d0c54-940e-4220-ba99-bd5311ce96d0" containerName="mariadb-database-create" Jan 20 17:48:12 crc kubenswrapper[4558]: I0120 17:48:12.228546 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="594d0c54-940e-4220-ba99-bd5311ce96d0" containerName="mariadb-database-create" Jan 20 17:48:12 crc kubenswrapper[4558]: E0120 17:48:12.228560 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="100de1e9-d90a-4b00-9e80-82656bc418c1" containerName="mariadb-database-create" Jan 20 17:48:12 crc kubenswrapper[4558]: I0120 17:48:12.228586 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="100de1e9-d90a-4b00-9e80-82656bc418c1" containerName="mariadb-database-create" Jan 20 17:48:12 crc kubenswrapper[4558]: E0120 17:48:12.228621 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5269b2ba-0ce2-4c5a-bd6f-6785afcf8c1b" containerName="mariadb-account-create-update" Jan 20 17:48:12 crc kubenswrapper[4558]: I0120 17:48:12.228627 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="5269b2ba-0ce2-4c5a-bd6f-6785afcf8c1b" containerName="mariadb-account-create-update" Jan 20 17:48:12 crc kubenswrapper[4558]: E0120 17:48:12.228663 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b942958c-b4fa-4db7-aa26-9db8bd06c776" containerName="mariadb-account-create-update" Jan 20 17:48:12 crc kubenswrapper[4558]: I0120 17:48:12.228670 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="b942958c-b4fa-4db7-aa26-9db8bd06c776" containerName="mariadb-account-create-update" Jan 20 17:48:12 crc kubenswrapper[4558]: E0120 17:48:12.228682 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="734caadf-9e91-4acd-9c76-0948d33c4c20" containerName="mariadb-database-create" Jan 20 17:48:12 crc kubenswrapper[4558]: I0120 17:48:12.228687 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="734caadf-9e91-4acd-9c76-0948d33c4c20" containerName="mariadb-database-create" Jan 20 17:48:12 crc kubenswrapper[4558]: I0120 17:48:12.228991 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="5269b2ba-0ce2-4c5a-bd6f-6785afcf8c1b" containerName="mariadb-account-create-update" Jan 20 17:48:12 crc kubenswrapper[4558]: I0120 17:48:12.229012 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="734caadf-9e91-4acd-9c76-0948d33c4c20" containerName="mariadb-database-create" Jan 20 17:48:12 crc kubenswrapper[4558]: I0120 17:48:12.229031 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="100de1e9-d90a-4b00-9e80-82656bc418c1" containerName="mariadb-database-create" Jan 20 17:48:12 crc kubenswrapper[4558]: I0120 17:48:12.229061 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="594d0c54-940e-4220-ba99-bd5311ce96d0" containerName="mariadb-database-create" Jan 20 17:48:12 crc kubenswrapper[4558]: I0120 17:48:12.229070 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="5226a591-a9b2-48f7-82ed-f613fe64b5a9" containerName="mariadb-account-create-update" Jan 20 17:48:12 crc kubenswrapper[4558]: I0120 17:48:12.229081 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="b942958c-b4fa-4db7-aa26-9db8bd06c776" containerName="mariadb-account-create-update" Jan 20 17:48:12 crc kubenswrapper[4558]: I0120 17:48:12.229967 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-nd9l5" Jan 20 17:48:12 crc kubenswrapper[4558]: I0120 17:48:12.233399 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell0-conductor-scripts" Jan 20 17:48:12 crc kubenswrapper[4558]: I0120 17:48:12.234529 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell0-conductor-config-data" Jan 20 17:48:12 crc kubenswrapper[4558]: I0120 17:48:12.234663 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-nova-dockercfg-pbs2c" Jan 20 17:48:12 crc kubenswrapper[4558]: I0120 17:48:12.247302 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-db-sync-nd9l5"] Jan 20 17:48:12 crc kubenswrapper[4558]: I0120 17:48:12.373152 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vkh5b\" (UniqueName: \"kubernetes.io/projected/5447b108-6890-4614-88fa-3df24ce0d9b5-kube-api-access-vkh5b\") pod \"nova-cell0-conductor-db-sync-nd9l5\" (UID: \"5447b108-6890-4614-88fa-3df24ce0d9b5\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-nd9l5" Jan 20 17:48:12 crc kubenswrapper[4558]: I0120 17:48:12.373224 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5447b108-6890-4614-88fa-3df24ce0d9b5-scripts\") pod \"nova-cell0-conductor-db-sync-nd9l5\" (UID: \"5447b108-6890-4614-88fa-3df24ce0d9b5\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-nd9l5" Jan 20 17:48:12 crc kubenswrapper[4558]: I0120 17:48:12.373299 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5447b108-6890-4614-88fa-3df24ce0d9b5-config-data\") pod \"nova-cell0-conductor-db-sync-nd9l5\" (UID: \"5447b108-6890-4614-88fa-3df24ce0d9b5\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-nd9l5" Jan 20 17:48:12 crc kubenswrapper[4558]: I0120 17:48:12.373324 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5447b108-6890-4614-88fa-3df24ce0d9b5-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-nd9l5\" (UID: \"5447b108-6890-4614-88fa-3df24ce0d9b5\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-nd9l5" Jan 20 17:48:12 crc kubenswrapper[4558]: I0120 17:48:12.475326 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5447b108-6890-4614-88fa-3df24ce0d9b5-config-data\") pod \"nova-cell0-conductor-db-sync-nd9l5\" (UID: \"5447b108-6890-4614-88fa-3df24ce0d9b5\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-nd9l5" Jan 20 17:48:12 crc kubenswrapper[4558]: I0120 17:48:12.475548 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5447b108-6890-4614-88fa-3df24ce0d9b5-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-nd9l5\" (UID: \"5447b108-6890-4614-88fa-3df24ce0d9b5\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-nd9l5" Jan 20 17:48:12 crc kubenswrapper[4558]: I0120 17:48:12.475842 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vkh5b\" (UniqueName: \"kubernetes.io/projected/5447b108-6890-4614-88fa-3df24ce0d9b5-kube-api-access-vkh5b\") pod \"nova-cell0-conductor-db-sync-nd9l5\" (UID: \"5447b108-6890-4614-88fa-3df24ce0d9b5\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-nd9l5" Jan 20 17:48:12 crc kubenswrapper[4558]: I0120 17:48:12.475913 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5447b108-6890-4614-88fa-3df24ce0d9b5-scripts\") pod \"nova-cell0-conductor-db-sync-nd9l5\" (UID: \"5447b108-6890-4614-88fa-3df24ce0d9b5\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-nd9l5" Jan 20 17:48:12 crc kubenswrapper[4558]: I0120 17:48:12.481505 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5447b108-6890-4614-88fa-3df24ce0d9b5-scripts\") pod \"nova-cell0-conductor-db-sync-nd9l5\" (UID: \"5447b108-6890-4614-88fa-3df24ce0d9b5\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-nd9l5" Jan 20 17:48:12 crc kubenswrapper[4558]: I0120 17:48:12.482584 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5447b108-6890-4614-88fa-3df24ce0d9b5-config-data\") pod \"nova-cell0-conductor-db-sync-nd9l5\" (UID: \"5447b108-6890-4614-88fa-3df24ce0d9b5\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-nd9l5" Jan 20 17:48:12 crc kubenswrapper[4558]: I0120 17:48:12.483634 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5447b108-6890-4614-88fa-3df24ce0d9b5-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-nd9l5\" (UID: \"5447b108-6890-4614-88fa-3df24ce0d9b5\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-nd9l5" Jan 20 17:48:12 crc kubenswrapper[4558]: I0120 17:48:12.492729 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vkh5b\" (UniqueName: \"kubernetes.io/projected/5447b108-6890-4614-88fa-3df24ce0d9b5-kube-api-access-vkh5b\") pod \"nova-cell0-conductor-db-sync-nd9l5\" (UID: \"5447b108-6890-4614-88fa-3df24ce0d9b5\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-nd9l5" Jan 20 17:48:12 crc kubenswrapper[4558]: I0120 17:48:12.551337 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-nd9l5" Jan 20 17:48:12 crc kubenswrapper[4558]: I0120 17:48:12.766924 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/swift-proxy-67dccf5df6-qnbjc" Jan 20 17:48:12 crc kubenswrapper[4558]: I0120 17:48:12.770713 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/swift-proxy-67dccf5df6-qnbjc" Jan 20 17:48:12 crc kubenswrapper[4558]: I0120 17:48:12.997960 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-db-sync-nd9l5"] Jan 20 17:48:12 crc kubenswrapper[4558]: W0120 17:48:12.998447 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5447b108_6890_4614_88fa_3df24ce0d9b5.slice/crio-b20fa1fa6725f9d7e7fbb7f49e4e9857f30680565cb6663e0a824c526960796d WatchSource:0}: Error finding container b20fa1fa6725f9d7e7fbb7f49e4e9857f30680565cb6663e0a824c526960796d: Status 404 returned error can't find the container with id b20fa1fa6725f9d7e7fbb7f49e4e9857f30680565cb6663e0a824c526960796d Jan 20 17:48:13 crc kubenswrapper[4558]: I0120 17:48:13.043674 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-nd9l5" event={"ID":"5447b108-6890-4614-88fa-3df24ce0d9b5","Type":"ContainerStarted","Data":"b20fa1fa6725f9d7e7fbb7f49e4e9857f30680565cb6663e0a824c526960796d"} Jan 20 17:48:13 crc kubenswrapper[4558]: I0120 17:48:13.047729 4558 generic.go:334] "Generic (PLEG): container finished" podID="ab79df37-6e1e-48e4-9d74-1ca46c4ca054" containerID="bcb6b58b36bcd934a03592e1d510cacff275865ab16e77166996c319f2d3638e" exitCode=0 Jan 20 17:48:13 crc kubenswrapper[4558]: I0120 17:48:13.047758 4558 generic.go:334] "Generic (PLEG): container finished" podID="ab79df37-6e1e-48e4-9d74-1ca46c4ca054" containerID="5c04ffd13da673e70119bf213d6bb8d4336e3c922fc83b22bd92581824cbe960" exitCode=0 Jan 20 17:48:13 crc kubenswrapper[4558]: I0120 17:48:13.047819 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"ab79df37-6e1e-48e4-9d74-1ca46c4ca054","Type":"ContainerDied","Data":"bcb6b58b36bcd934a03592e1d510cacff275865ab16e77166996c319f2d3638e"} Jan 20 17:48:13 crc kubenswrapper[4558]: I0120 17:48:13.047862 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"ab79df37-6e1e-48e4-9d74-1ca46c4ca054","Type":"ContainerDied","Data":"5c04ffd13da673e70119bf213d6bb8d4336e3c922fc83b22bd92581824cbe960"} Jan 20 17:48:14 crc kubenswrapper[4558]: I0120 17:48:14.057517 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-nd9l5" event={"ID":"5447b108-6890-4614-88fa-3df24ce0d9b5","Type":"ContainerStarted","Data":"cda249e7881a71ca863707e278b44e93b7b2ddc35df234ef16f4b2629fda0d1a"} Jan 20 17:48:14 crc kubenswrapper[4558]: I0120 17:48:14.072346 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-nd9l5" podStartSLOduration=2.072327358 podStartE2EDuration="2.072327358s" podCreationTimestamp="2026-01-20 17:48:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:48:14.071669812 +0000 UTC m=+3987.832007779" watchObservedRunningTime="2026-01-20 17:48:14.072327358 +0000 UTC m=+3987.832665325" Jan 20 17:48:15 crc kubenswrapper[4558]: I0120 17:48:15.137966 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:48:15 crc kubenswrapper[4558]: I0120 17:48:15.138749 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-external-api-0" podUID="05f4b910-ff7c-4039-8ff7-24687eea5e74" containerName="glance-log" containerID="cri-o://54ce3695fde6a7ec6419832384135e5cf1001a4b3826a28d0554442580b148c3" gracePeriod=30 Jan 20 17:48:15 crc kubenswrapper[4558]: I0120 17:48:15.138986 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-external-api-0" podUID="05f4b910-ff7c-4039-8ff7-24687eea5e74" containerName="glance-httpd" containerID="cri-o://aa13f111b95eb70e30ae67cf411f532cb413f039d01f38c02d51d6a30252389b" gracePeriod=30 Jan 20 17:48:15 crc kubenswrapper[4558]: I0120 17:48:15.144441 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/glance-default-external-api-0" podUID="05f4b910-ff7c-4039-8ff7-24687eea5e74" containerName="glance-httpd" probeResult="failure" output="Get \"https://10.217.1.121:9292/healthcheck\": EOF" Jan 20 17:48:15 crc kubenswrapper[4558]: I0120 17:48:15.147358 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/glance-default-external-api-0" podUID="05f4b910-ff7c-4039-8ff7-24687eea5e74" containerName="glance-log" probeResult="failure" output="Get \"https://10.217.1.121:9292/healthcheck\": EOF" Jan 20 17:48:15 crc kubenswrapper[4558]: I0120 17:48:15.147390 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack-kuttl-tests/glance-default-external-api-0" podUID="05f4b910-ff7c-4039-8ff7-24687eea5e74" containerName="glance-log" probeResult="failure" output="Get \"https://10.217.1.121:9292/healthcheck\": EOF" Jan 20 17:48:15 crc kubenswrapper[4558]: I0120 17:48:15.147636 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack-kuttl-tests/glance-default-external-api-0" podUID="05f4b910-ff7c-4039-8ff7-24687eea5e74" containerName="glance-httpd" probeResult="failure" output="Get \"https://10.217.1.121:9292/healthcheck\": EOF" Jan 20 17:48:15 crc kubenswrapper[4558]: I0120 17:48:15.840590 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:48:15 crc kubenswrapper[4558]: I0120 17:48:15.841128 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-internal-api-0" podUID="d3c4b74b-cd31-4245-91f0-9718f8896f61" containerName="glance-log" containerID="cri-o://73b8176cf31c48afe8b2940b6e65ec9b3de704e8761f5cbe0a0a28407ebce072" gracePeriod=30 Jan 20 17:48:15 crc kubenswrapper[4558]: I0120 17:48:15.841291 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-internal-api-0" podUID="d3c4b74b-cd31-4245-91f0-9718f8896f61" containerName="glance-httpd" containerID="cri-o://c51450fc82fc389fd4479f32722f045d8117aac56951b6ea66a9b0c5dec8678c" gracePeriod=30 Jan 20 17:48:16 crc kubenswrapper[4558]: I0120 17:48:16.098035 4558 generic.go:334] "Generic (PLEG): container finished" podID="ab79df37-6e1e-48e4-9d74-1ca46c4ca054" containerID="7bfe93ad26e1d9b9f12d8d60414a087f7fa034a93f5a61859ced8c253e5990e3" exitCode=0 Jan 20 17:48:16 crc kubenswrapper[4558]: I0120 17:48:16.098437 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"ab79df37-6e1e-48e4-9d74-1ca46c4ca054","Type":"ContainerDied","Data":"7bfe93ad26e1d9b9f12d8d60414a087f7fa034a93f5a61859ced8c253e5990e3"} Jan 20 17:48:16 crc kubenswrapper[4558]: I0120 17:48:16.104498 4558 generic.go:334] "Generic (PLEG): container finished" podID="05f4b910-ff7c-4039-8ff7-24687eea5e74" containerID="54ce3695fde6a7ec6419832384135e5cf1001a4b3826a28d0554442580b148c3" exitCode=143 Jan 20 17:48:16 crc kubenswrapper[4558]: I0120 17:48:16.104569 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"05f4b910-ff7c-4039-8ff7-24687eea5e74","Type":"ContainerDied","Data":"54ce3695fde6a7ec6419832384135e5cf1001a4b3826a28d0554442580b148c3"} Jan 20 17:48:16 crc kubenswrapper[4558]: I0120 17:48:16.106764 4558 generic.go:334] "Generic (PLEG): container finished" podID="d3c4b74b-cd31-4245-91f0-9718f8896f61" containerID="73b8176cf31c48afe8b2940b6e65ec9b3de704e8761f5cbe0a0a28407ebce072" exitCode=143 Jan 20 17:48:16 crc kubenswrapper[4558]: I0120 17:48:16.106860 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"d3c4b74b-cd31-4245-91f0-9718f8896f61","Type":"ContainerDied","Data":"73b8176cf31c48afe8b2940b6e65ec9b3de704e8761f5cbe0a0a28407ebce072"} Jan 20 17:48:16 crc kubenswrapper[4558]: I0120 17:48:16.162179 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:48:16 crc kubenswrapper[4558]: I0120 17:48:16.263123 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab79df37-6e1e-48e4-9d74-1ca46c4ca054-combined-ca-bundle\") pod \"ab79df37-6e1e-48e4-9d74-1ca46c4ca054\" (UID: \"ab79df37-6e1e-48e4-9d74-1ca46c4ca054\") " Jan 20 17:48:16 crc kubenswrapper[4558]: I0120 17:48:16.263244 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ab79df37-6e1e-48e4-9d74-1ca46c4ca054-config-data\") pod \"ab79df37-6e1e-48e4-9d74-1ca46c4ca054\" (UID: \"ab79df37-6e1e-48e4-9d74-1ca46c4ca054\") " Jan 20 17:48:16 crc kubenswrapper[4558]: I0120 17:48:16.263379 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ab79df37-6e1e-48e4-9d74-1ca46c4ca054-scripts\") pod \"ab79df37-6e1e-48e4-9d74-1ca46c4ca054\" (UID: \"ab79df37-6e1e-48e4-9d74-1ca46c4ca054\") " Jan 20 17:48:16 crc kubenswrapper[4558]: I0120 17:48:16.263427 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ab79df37-6e1e-48e4-9d74-1ca46c4ca054-sg-core-conf-yaml\") pod \"ab79df37-6e1e-48e4-9d74-1ca46c4ca054\" (UID: \"ab79df37-6e1e-48e4-9d74-1ca46c4ca054\") " Jan 20 17:48:16 crc kubenswrapper[4558]: I0120 17:48:16.263471 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-288ms\" (UniqueName: \"kubernetes.io/projected/ab79df37-6e1e-48e4-9d74-1ca46c4ca054-kube-api-access-288ms\") pod \"ab79df37-6e1e-48e4-9d74-1ca46c4ca054\" (UID: \"ab79df37-6e1e-48e4-9d74-1ca46c4ca054\") " Jan 20 17:48:16 crc kubenswrapper[4558]: I0120 17:48:16.263567 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ab79df37-6e1e-48e4-9d74-1ca46c4ca054-log-httpd\") pod \"ab79df37-6e1e-48e4-9d74-1ca46c4ca054\" (UID: \"ab79df37-6e1e-48e4-9d74-1ca46c4ca054\") " Jan 20 17:48:16 crc kubenswrapper[4558]: I0120 17:48:16.263668 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ab79df37-6e1e-48e4-9d74-1ca46c4ca054-run-httpd\") pod \"ab79df37-6e1e-48e4-9d74-1ca46c4ca054\" (UID: \"ab79df37-6e1e-48e4-9d74-1ca46c4ca054\") " Jan 20 17:48:16 crc kubenswrapper[4558]: I0120 17:48:16.264005 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ab79df37-6e1e-48e4-9d74-1ca46c4ca054-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "ab79df37-6e1e-48e4-9d74-1ca46c4ca054" (UID: "ab79df37-6e1e-48e4-9d74-1ca46c4ca054"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:48:16 crc kubenswrapper[4558]: I0120 17:48:16.264094 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ab79df37-6e1e-48e4-9d74-1ca46c4ca054-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "ab79df37-6e1e-48e4-9d74-1ca46c4ca054" (UID: "ab79df37-6e1e-48e4-9d74-1ca46c4ca054"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:48:16 crc kubenswrapper[4558]: I0120 17:48:16.264469 4558 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ab79df37-6e1e-48e4-9d74-1ca46c4ca054-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:48:16 crc kubenswrapper[4558]: I0120 17:48:16.264489 4558 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ab79df37-6e1e-48e4-9d74-1ca46c4ca054-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:48:16 crc kubenswrapper[4558]: I0120 17:48:16.272273 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ab79df37-6e1e-48e4-9d74-1ca46c4ca054-kube-api-access-288ms" (OuterVolumeSpecName: "kube-api-access-288ms") pod "ab79df37-6e1e-48e4-9d74-1ca46c4ca054" (UID: "ab79df37-6e1e-48e4-9d74-1ca46c4ca054"). InnerVolumeSpecName "kube-api-access-288ms". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:48:16 crc kubenswrapper[4558]: I0120 17:48:16.286323 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ab79df37-6e1e-48e4-9d74-1ca46c4ca054-scripts" (OuterVolumeSpecName: "scripts") pod "ab79df37-6e1e-48e4-9d74-1ca46c4ca054" (UID: "ab79df37-6e1e-48e4-9d74-1ca46c4ca054"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:48:16 crc kubenswrapper[4558]: I0120 17:48:16.291492 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ab79df37-6e1e-48e4-9d74-1ca46c4ca054-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "ab79df37-6e1e-48e4-9d74-1ca46c4ca054" (UID: "ab79df37-6e1e-48e4-9d74-1ca46c4ca054"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:48:16 crc kubenswrapper[4558]: I0120 17:48:16.338266 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ab79df37-6e1e-48e4-9d74-1ca46c4ca054-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ab79df37-6e1e-48e4-9d74-1ca46c4ca054" (UID: "ab79df37-6e1e-48e4-9d74-1ca46c4ca054"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:48:16 crc kubenswrapper[4558]: I0120 17:48:16.367062 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab79df37-6e1e-48e4-9d74-1ca46c4ca054-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:48:16 crc kubenswrapper[4558]: I0120 17:48:16.367094 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ab79df37-6e1e-48e4-9d74-1ca46c4ca054-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:48:16 crc kubenswrapper[4558]: I0120 17:48:16.367107 4558 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ab79df37-6e1e-48e4-9d74-1ca46c4ca054-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 20 17:48:16 crc kubenswrapper[4558]: I0120 17:48:16.367121 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-288ms\" (UniqueName: \"kubernetes.io/projected/ab79df37-6e1e-48e4-9d74-1ca46c4ca054-kube-api-access-288ms\") on node \"crc\" DevicePath \"\"" Jan 20 17:48:16 crc kubenswrapper[4558]: I0120 17:48:16.369239 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ab79df37-6e1e-48e4-9d74-1ca46c4ca054-config-data" (OuterVolumeSpecName: "config-data") pod "ab79df37-6e1e-48e4-9d74-1ca46c4ca054" (UID: "ab79df37-6e1e-48e4-9d74-1ca46c4ca054"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:48:16 crc kubenswrapper[4558]: I0120 17:48:16.469137 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ab79df37-6e1e-48e4-9d74-1ca46c4ca054-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:48:17 crc kubenswrapper[4558]: I0120 17:48:17.119129 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"ab79df37-6e1e-48e4-9d74-1ca46c4ca054","Type":"ContainerDied","Data":"4c6772e9b1fd65161e20996f358af9f3a40d8a70c47bf1f629980c90f3723d20"} Jan 20 17:48:17 crc kubenswrapper[4558]: I0120 17:48:17.119211 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:48:17 crc kubenswrapper[4558]: I0120 17:48:17.119512 4558 scope.go:117] "RemoveContainer" containerID="bcb6b58b36bcd934a03592e1d510cacff275865ab16e77166996c319f2d3638e" Jan 20 17:48:17 crc kubenswrapper[4558]: I0120 17:48:17.140702 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:48:17 crc kubenswrapper[4558]: I0120 17:48:17.146260 4558 scope.go:117] "RemoveContainer" containerID="37024b86cbc241dbb82a12fbede4737a0eed809370443a61b4d4719539dd6aaf" Jan 20 17:48:17 crc kubenswrapper[4558]: I0120 17:48:17.157631 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:48:17 crc kubenswrapper[4558]: I0120 17:48:17.170130 4558 scope.go:117] "RemoveContainer" containerID="5c04ffd13da673e70119bf213d6bb8d4336e3c922fc83b22bd92581824cbe960" Jan 20 17:48:17 crc kubenswrapper[4558]: I0120 17:48:17.171746 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:48:17 crc kubenswrapper[4558]: E0120 17:48:17.172305 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab79df37-6e1e-48e4-9d74-1ca46c4ca054" containerName="sg-core" Jan 20 17:48:17 crc kubenswrapper[4558]: I0120 17:48:17.172325 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab79df37-6e1e-48e4-9d74-1ca46c4ca054" containerName="sg-core" Jan 20 17:48:17 crc kubenswrapper[4558]: E0120 17:48:17.172350 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab79df37-6e1e-48e4-9d74-1ca46c4ca054" containerName="ceilometer-central-agent" Jan 20 17:48:17 crc kubenswrapper[4558]: I0120 17:48:17.172356 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab79df37-6e1e-48e4-9d74-1ca46c4ca054" containerName="ceilometer-central-agent" Jan 20 17:48:17 crc kubenswrapper[4558]: E0120 17:48:17.172379 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab79df37-6e1e-48e4-9d74-1ca46c4ca054" containerName="ceilometer-notification-agent" Jan 20 17:48:17 crc kubenswrapper[4558]: I0120 17:48:17.172384 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab79df37-6e1e-48e4-9d74-1ca46c4ca054" containerName="ceilometer-notification-agent" Jan 20 17:48:17 crc kubenswrapper[4558]: E0120 17:48:17.172398 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab79df37-6e1e-48e4-9d74-1ca46c4ca054" containerName="proxy-httpd" Jan 20 17:48:17 crc kubenswrapper[4558]: I0120 17:48:17.172403 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab79df37-6e1e-48e4-9d74-1ca46c4ca054" containerName="proxy-httpd" Jan 20 17:48:17 crc kubenswrapper[4558]: I0120 17:48:17.172582 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="ab79df37-6e1e-48e4-9d74-1ca46c4ca054" containerName="proxy-httpd" Jan 20 17:48:17 crc kubenswrapper[4558]: I0120 17:48:17.172608 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="ab79df37-6e1e-48e4-9d74-1ca46c4ca054" containerName="sg-core" Jan 20 17:48:17 crc kubenswrapper[4558]: I0120 17:48:17.172632 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="ab79df37-6e1e-48e4-9d74-1ca46c4ca054" containerName="ceilometer-central-agent" Jan 20 17:48:17 crc kubenswrapper[4558]: I0120 17:48:17.172644 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="ab79df37-6e1e-48e4-9d74-1ca46c4ca054" containerName="ceilometer-notification-agent" Jan 20 17:48:17 crc kubenswrapper[4558]: I0120 17:48:17.176257 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:48:17 crc kubenswrapper[4558]: I0120 17:48:17.185220 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-scripts" Jan 20 17:48:17 crc kubenswrapper[4558]: I0120 17:48:17.185242 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:48:17 crc kubenswrapper[4558]: I0120 17:48:17.185278 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-config-data" Jan 20 17:48:17 crc kubenswrapper[4558]: I0120 17:48:17.200557 4558 scope.go:117] "RemoveContainer" containerID="7bfe93ad26e1d9b9f12d8d60414a087f7fa034a93f5a61859ced8c253e5990e3" Jan 20 17:48:17 crc kubenswrapper[4558]: I0120 17:48:17.289083 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/86be6735-87cb-4609-9f3c-334ce0eac39b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"86be6735-87cb-4609-9f3c-334ce0eac39b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:48:17 crc kubenswrapper[4558]: I0120 17:48:17.289138 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/86be6735-87cb-4609-9f3c-334ce0eac39b-log-httpd\") pod \"ceilometer-0\" (UID: \"86be6735-87cb-4609-9f3c-334ce0eac39b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:48:17 crc kubenswrapper[4558]: I0120 17:48:17.289278 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/86be6735-87cb-4609-9f3c-334ce0eac39b-config-data\") pod \"ceilometer-0\" (UID: \"86be6735-87cb-4609-9f3c-334ce0eac39b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:48:17 crc kubenswrapper[4558]: I0120 17:48:17.289415 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mfh9x\" (UniqueName: \"kubernetes.io/projected/86be6735-87cb-4609-9f3c-334ce0eac39b-kube-api-access-mfh9x\") pod \"ceilometer-0\" (UID: \"86be6735-87cb-4609-9f3c-334ce0eac39b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:48:17 crc kubenswrapper[4558]: I0120 17:48:17.289576 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/86be6735-87cb-4609-9f3c-334ce0eac39b-run-httpd\") pod \"ceilometer-0\" (UID: \"86be6735-87cb-4609-9f3c-334ce0eac39b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:48:17 crc kubenswrapper[4558]: I0120 17:48:17.289670 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/86be6735-87cb-4609-9f3c-334ce0eac39b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"86be6735-87cb-4609-9f3c-334ce0eac39b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:48:17 crc kubenswrapper[4558]: I0120 17:48:17.289717 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/86be6735-87cb-4609-9f3c-334ce0eac39b-scripts\") pod \"ceilometer-0\" (UID: \"86be6735-87cb-4609-9f3c-334ce0eac39b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:48:17 crc kubenswrapper[4558]: I0120 17:48:17.391967 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/86be6735-87cb-4609-9f3c-334ce0eac39b-run-httpd\") pod \"ceilometer-0\" (UID: \"86be6735-87cb-4609-9f3c-334ce0eac39b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:48:17 crc kubenswrapper[4558]: I0120 17:48:17.392055 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/86be6735-87cb-4609-9f3c-334ce0eac39b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"86be6735-87cb-4609-9f3c-334ce0eac39b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:48:17 crc kubenswrapper[4558]: I0120 17:48:17.392095 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/86be6735-87cb-4609-9f3c-334ce0eac39b-scripts\") pod \"ceilometer-0\" (UID: \"86be6735-87cb-4609-9f3c-334ce0eac39b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:48:17 crc kubenswrapper[4558]: I0120 17:48:17.392188 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/86be6735-87cb-4609-9f3c-334ce0eac39b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"86be6735-87cb-4609-9f3c-334ce0eac39b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:48:17 crc kubenswrapper[4558]: I0120 17:48:17.392242 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/86be6735-87cb-4609-9f3c-334ce0eac39b-log-httpd\") pod \"ceilometer-0\" (UID: \"86be6735-87cb-4609-9f3c-334ce0eac39b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:48:17 crc kubenswrapper[4558]: I0120 17:48:17.392320 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/86be6735-87cb-4609-9f3c-334ce0eac39b-config-data\") pod \"ceilometer-0\" (UID: \"86be6735-87cb-4609-9f3c-334ce0eac39b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:48:17 crc kubenswrapper[4558]: I0120 17:48:17.392407 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mfh9x\" (UniqueName: \"kubernetes.io/projected/86be6735-87cb-4609-9f3c-334ce0eac39b-kube-api-access-mfh9x\") pod \"ceilometer-0\" (UID: \"86be6735-87cb-4609-9f3c-334ce0eac39b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:48:17 crc kubenswrapper[4558]: I0120 17:48:17.392638 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/86be6735-87cb-4609-9f3c-334ce0eac39b-run-httpd\") pod \"ceilometer-0\" (UID: \"86be6735-87cb-4609-9f3c-334ce0eac39b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:48:17 crc kubenswrapper[4558]: I0120 17:48:17.392984 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/86be6735-87cb-4609-9f3c-334ce0eac39b-log-httpd\") pod \"ceilometer-0\" (UID: \"86be6735-87cb-4609-9f3c-334ce0eac39b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:48:17 crc kubenswrapper[4558]: I0120 17:48:17.398014 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/86be6735-87cb-4609-9f3c-334ce0eac39b-scripts\") pod \"ceilometer-0\" (UID: \"86be6735-87cb-4609-9f3c-334ce0eac39b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:48:17 crc kubenswrapper[4558]: I0120 17:48:17.398655 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/86be6735-87cb-4609-9f3c-334ce0eac39b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"86be6735-87cb-4609-9f3c-334ce0eac39b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:48:17 crc kubenswrapper[4558]: I0120 17:48:17.399809 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/86be6735-87cb-4609-9f3c-334ce0eac39b-config-data\") pod \"ceilometer-0\" (UID: \"86be6735-87cb-4609-9f3c-334ce0eac39b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:48:17 crc kubenswrapper[4558]: I0120 17:48:17.401805 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/86be6735-87cb-4609-9f3c-334ce0eac39b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"86be6735-87cb-4609-9f3c-334ce0eac39b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:48:17 crc kubenswrapper[4558]: I0120 17:48:17.413335 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mfh9x\" (UniqueName: \"kubernetes.io/projected/86be6735-87cb-4609-9f3c-334ce0eac39b-kube-api-access-mfh9x\") pod \"ceilometer-0\" (UID: \"86be6735-87cb-4609-9f3c-334ce0eac39b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:48:17 crc kubenswrapper[4558]: I0120 17:48:17.502784 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:48:17 crc kubenswrapper[4558]: I0120 17:48:17.960635 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:48:18 crc kubenswrapper[4558]: I0120 17:48:18.132716 4558 generic.go:334] "Generic (PLEG): container finished" podID="5447b108-6890-4614-88fa-3df24ce0d9b5" containerID="cda249e7881a71ca863707e278b44e93b7b2ddc35df234ef16f4b2629fda0d1a" exitCode=0 Jan 20 17:48:18 crc kubenswrapper[4558]: I0120 17:48:18.132800 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-nd9l5" event={"ID":"5447b108-6890-4614-88fa-3df24ce0d9b5","Type":"ContainerDied","Data":"cda249e7881a71ca863707e278b44e93b7b2ddc35df234ef16f4b2629fda0d1a"} Jan 20 17:48:18 crc kubenswrapper[4558]: I0120 17:48:18.137430 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"86be6735-87cb-4609-9f3c-334ce0eac39b","Type":"ContainerStarted","Data":"eef1445fe3e2984ca1d60609ec107a59cfcfbea24b216432e04732ffad94c0a5"} Jan 20 17:48:18 crc kubenswrapper[4558]: I0120 17:48:18.594743 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ab79df37-6e1e-48e4-9d74-1ca46c4ca054" path="/var/lib/kubelet/pods/ab79df37-6e1e-48e4-9d74-1ca46c4ca054/volumes" Jan 20 17:48:19 crc kubenswrapper[4558]: I0120 17:48:19.155796 4558 generic.go:334] "Generic (PLEG): container finished" podID="d3c4b74b-cd31-4245-91f0-9718f8896f61" containerID="c51450fc82fc389fd4479f32722f045d8117aac56951b6ea66a9b0c5dec8678c" exitCode=0 Jan 20 17:48:19 crc kubenswrapper[4558]: I0120 17:48:19.155874 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"d3c4b74b-cd31-4245-91f0-9718f8896f61","Type":"ContainerDied","Data":"c51450fc82fc389fd4479f32722f045d8117aac56951b6ea66a9b0c5dec8678c"} Jan 20 17:48:19 crc kubenswrapper[4558]: I0120 17:48:19.161557 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"86be6735-87cb-4609-9f3c-334ce0eac39b","Type":"ContainerStarted","Data":"b2e5ad3a07fd9605c52daa88921db45628093c3ce26795b7339404cc43ea0e8e"} Jan 20 17:48:19 crc kubenswrapper[4558]: I0120 17:48:19.560683 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:48:19 crc kubenswrapper[4558]: I0120 17:48:19.650158 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d3c4b74b-cd31-4245-91f0-9718f8896f61-internal-tls-certs\") pod \"d3c4b74b-cd31-4245-91f0-9718f8896f61\" (UID: \"d3c4b74b-cd31-4245-91f0-9718f8896f61\") " Jan 20 17:48:19 crc kubenswrapper[4558]: I0120 17:48:19.650230 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d3c4b74b-cd31-4245-91f0-9718f8896f61-httpd-run\") pod \"d3c4b74b-cd31-4245-91f0-9718f8896f61\" (UID: \"d3c4b74b-cd31-4245-91f0-9718f8896f61\") " Jan 20 17:48:19 crc kubenswrapper[4558]: I0120 17:48:19.650289 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d3c4b74b-cd31-4245-91f0-9718f8896f61-config-data\") pod \"d3c4b74b-cd31-4245-91f0-9718f8896f61\" (UID: \"d3c4b74b-cd31-4245-91f0-9718f8896f61\") " Jan 20 17:48:19 crc kubenswrapper[4558]: I0120 17:48:19.650315 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"d3c4b74b-cd31-4245-91f0-9718f8896f61\" (UID: \"d3c4b74b-cd31-4245-91f0-9718f8896f61\") " Jan 20 17:48:19 crc kubenswrapper[4558]: I0120 17:48:19.650352 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d3c4b74b-cd31-4245-91f0-9718f8896f61-logs\") pod \"d3c4b74b-cd31-4245-91f0-9718f8896f61\" (UID: \"d3c4b74b-cd31-4245-91f0-9718f8896f61\") " Jan 20 17:48:19 crc kubenswrapper[4558]: I0120 17:48:19.650373 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d3c4b74b-cd31-4245-91f0-9718f8896f61-combined-ca-bundle\") pod \"d3c4b74b-cd31-4245-91f0-9718f8896f61\" (UID: \"d3c4b74b-cd31-4245-91f0-9718f8896f61\") " Jan 20 17:48:19 crc kubenswrapper[4558]: I0120 17:48:19.650469 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qqkgb\" (UniqueName: \"kubernetes.io/projected/d3c4b74b-cd31-4245-91f0-9718f8896f61-kube-api-access-qqkgb\") pod \"d3c4b74b-cd31-4245-91f0-9718f8896f61\" (UID: \"d3c4b74b-cd31-4245-91f0-9718f8896f61\") " Jan 20 17:48:19 crc kubenswrapper[4558]: I0120 17:48:19.650514 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d3c4b74b-cd31-4245-91f0-9718f8896f61-scripts\") pod \"d3c4b74b-cd31-4245-91f0-9718f8896f61\" (UID: \"d3c4b74b-cd31-4245-91f0-9718f8896f61\") " Jan 20 17:48:19 crc kubenswrapper[4558]: I0120 17:48:19.650821 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d3c4b74b-cd31-4245-91f0-9718f8896f61-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "d3c4b74b-cd31-4245-91f0-9718f8896f61" (UID: "d3c4b74b-cd31-4245-91f0-9718f8896f61"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:48:19 crc kubenswrapper[4558]: I0120 17:48:19.651656 4558 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d3c4b74b-cd31-4245-91f0-9718f8896f61-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 20 17:48:19 crc kubenswrapper[4558]: I0120 17:48:19.653987 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d3c4b74b-cd31-4245-91f0-9718f8896f61-logs" (OuterVolumeSpecName: "logs") pod "d3c4b74b-cd31-4245-91f0-9718f8896f61" (UID: "d3c4b74b-cd31-4245-91f0-9718f8896f61"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:48:19 crc kubenswrapper[4558]: I0120 17:48:19.655962 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage11-crc" (OuterVolumeSpecName: "glance") pod "d3c4b74b-cd31-4245-91f0-9718f8896f61" (UID: "d3c4b74b-cd31-4245-91f0-9718f8896f61"). InnerVolumeSpecName "local-storage11-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:48:19 crc kubenswrapper[4558]: I0120 17:48:19.656452 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d3c4b74b-cd31-4245-91f0-9718f8896f61-scripts" (OuterVolumeSpecName: "scripts") pod "d3c4b74b-cd31-4245-91f0-9718f8896f61" (UID: "d3c4b74b-cd31-4245-91f0-9718f8896f61"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:48:19 crc kubenswrapper[4558]: I0120 17:48:19.662807 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d3c4b74b-cd31-4245-91f0-9718f8896f61-kube-api-access-qqkgb" (OuterVolumeSpecName: "kube-api-access-qqkgb") pod "d3c4b74b-cd31-4245-91f0-9718f8896f61" (UID: "d3c4b74b-cd31-4245-91f0-9718f8896f61"). InnerVolumeSpecName "kube-api-access-qqkgb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:48:19 crc kubenswrapper[4558]: I0120 17:48:19.710978 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d3c4b74b-cd31-4245-91f0-9718f8896f61-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d3c4b74b-cd31-4245-91f0-9718f8896f61" (UID: "d3c4b74b-cd31-4245-91f0-9718f8896f61"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:48:19 crc kubenswrapper[4558]: I0120 17:48:19.715074 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d3c4b74b-cd31-4245-91f0-9718f8896f61-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "d3c4b74b-cd31-4245-91f0-9718f8896f61" (UID: "d3c4b74b-cd31-4245-91f0-9718f8896f61"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:48:19 crc kubenswrapper[4558]: I0120 17:48:19.718024 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d3c4b74b-cd31-4245-91f0-9718f8896f61-config-data" (OuterVolumeSpecName: "config-data") pod "d3c4b74b-cd31-4245-91f0-9718f8896f61" (UID: "d3c4b74b-cd31-4245-91f0-9718f8896f61"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:48:19 crc kubenswrapper[4558]: I0120 17:48:19.753779 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qqkgb\" (UniqueName: \"kubernetes.io/projected/d3c4b74b-cd31-4245-91f0-9718f8896f61-kube-api-access-qqkgb\") on node \"crc\" DevicePath \"\"" Jan 20 17:48:19 crc kubenswrapper[4558]: I0120 17:48:19.753814 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d3c4b74b-cd31-4245-91f0-9718f8896f61-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:48:19 crc kubenswrapper[4558]: I0120 17:48:19.753827 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d3c4b74b-cd31-4245-91f0-9718f8896f61-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:48:19 crc kubenswrapper[4558]: I0120 17:48:19.753841 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d3c4b74b-cd31-4245-91f0-9718f8896f61-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:48:19 crc kubenswrapper[4558]: I0120 17:48:19.753877 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" " Jan 20 17:48:19 crc kubenswrapper[4558]: I0120 17:48:19.753887 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d3c4b74b-cd31-4245-91f0-9718f8896f61-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:48:19 crc kubenswrapper[4558]: I0120 17:48:19.753896 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d3c4b74b-cd31-4245-91f0-9718f8896f61-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:48:19 crc kubenswrapper[4558]: I0120 17:48:19.764794 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-nd9l5" Jan 20 17:48:19 crc kubenswrapper[4558]: I0120 17:48:19.771469 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage11-crc" (UniqueName: "kubernetes.io/local-volume/local-storage11-crc") on node "crc" Jan 20 17:48:19 crc kubenswrapper[4558]: I0120 17:48:19.855249 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5447b108-6890-4614-88fa-3df24ce0d9b5-config-data\") pod \"5447b108-6890-4614-88fa-3df24ce0d9b5\" (UID: \"5447b108-6890-4614-88fa-3df24ce0d9b5\") " Jan 20 17:48:19 crc kubenswrapper[4558]: I0120 17:48:19.855379 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vkh5b\" (UniqueName: \"kubernetes.io/projected/5447b108-6890-4614-88fa-3df24ce0d9b5-kube-api-access-vkh5b\") pod \"5447b108-6890-4614-88fa-3df24ce0d9b5\" (UID: \"5447b108-6890-4614-88fa-3df24ce0d9b5\") " Jan 20 17:48:19 crc kubenswrapper[4558]: I0120 17:48:19.855513 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5447b108-6890-4614-88fa-3df24ce0d9b5-combined-ca-bundle\") pod \"5447b108-6890-4614-88fa-3df24ce0d9b5\" (UID: \"5447b108-6890-4614-88fa-3df24ce0d9b5\") " Jan 20 17:48:19 crc kubenswrapper[4558]: I0120 17:48:19.855561 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5447b108-6890-4614-88fa-3df24ce0d9b5-scripts\") pod \"5447b108-6890-4614-88fa-3df24ce0d9b5\" (UID: \"5447b108-6890-4614-88fa-3df24ce0d9b5\") " Jan 20 17:48:19 crc kubenswrapper[4558]: I0120 17:48:19.856683 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:48:19 crc kubenswrapper[4558]: I0120 17:48:19.861403 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5447b108-6890-4614-88fa-3df24ce0d9b5-kube-api-access-vkh5b" (OuterVolumeSpecName: "kube-api-access-vkh5b") pod "5447b108-6890-4614-88fa-3df24ce0d9b5" (UID: "5447b108-6890-4614-88fa-3df24ce0d9b5"). InnerVolumeSpecName "kube-api-access-vkh5b". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:48:19 crc kubenswrapper[4558]: I0120 17:48:19.863346 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5447b108-6890-4614-88fa-3df24ce0d9b5-scripts" (OuterVolumeSpecName: "scripts") pod "5447b108-6890-4614-88fa-3df24ce0d9b5" (UID: "5447b108-6890-4614-88fa-3df24ce0d9b5"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:48:19 crc kubenswrapper[4558]: I0120 17:48:19.880447 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5447b108-6890-4614-88fa-3df24ce0d9b5-config-data" (OuterVolumeSpecName: "config-data") pod "5447b108-6890-4614-88fa-3df24ce0d9b5" (UID: "5447b108-6890-4614-88fa-3df24ce0d9b5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:48:19 crc kubenswrapper[4558]: I0120 17:48:19.881632 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5447b108-6890-4614-88fa-3df24ce0d9b5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5447b108-6890-4614-88fa-3df24ce0d9b5" (UID: "5447b108-6890-4614-88fa-3df24ce0d9b5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:48:19 crc kubenswrapper[4558]: I0120 17:48:19.959938 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vkh5b\" (UniqueName: \"kubernetes.io/projected/5447b108-6890-4614-88fa-3df24ce0d9b5-kube-api-access-vkh5b\") on node \"crc\" DevicePath \"\"" Jan 20 17:48:19 crc kubenswrapper[4558]: I0120 17:48:19.959989 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5447b108-6890-4614-88fa-3df24ce0d9b5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:48:19 crc kubenswrapper[4558]: I0120 17:48:19.960001 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5447b108-6890-4614-88fa-3df24ce0d9b5-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:48:19 crc kubenswrapper[4558]: I0120 17:48:19.960016 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5447b108-6890-4614-88fa-3df24ce0d9b5-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:48:20 crc kubenswrapper[4558]: I0120 17:48:20.173309 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-nd9l5" Jan 20 17:48:20 crc kubenswrapper[4558]: I0120 17:48:20.173340 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-nd9l5" event={"ID":"5447b108-6890-4614-88fa-3df24ce0d9b5","Type":"ContainerDied","Data":"b20fa1fa6725f9d7e7fbb7f49e4e9857f30680565cb6663e0a824c526960796d"} Jan 20 17:48:20 crc kubenswrapper[4558]: I0120 17:48:20.173403 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b20fa1fa6725f9d7e7fbb7f49e4e9857f30680565cb6663e0a824c526960796d" Jan 20 17:48:20 crc kubenswrapper[4558]: I0120 17:48:20.175247 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"d3c4b74b-cd31-4245-91f0-9718f8896f61","Type":"ContainerDied","Data":"6db9a7511d4ee8abaa86b286093d4b13d304e9f984a000068c383c1d65676f4a"} Jan 20 17:48:20 crc kubenswrapper[4558]: I0120 17:48:20.175317 4558 scope.go:117] "RemoveContainer" containerID="c51450fc82fc389fd4479f32722f045d8117aac56951b6ea66a9b0c5dec8678c" Jan 20 17:48:20 crc kubenswrapper[4558]: I0120 17:48:20.175474 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:48:20 crc kubenswrapper[4558]: I0120 17:48:20.180923 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"86be6735-87cb-4609-9f3c-334ce0eac39b","Type":"ContainerStarted","Data":"aa04cd3729a7441135f6f64838857570ad851434f8797a1f8696ad095d2814a8"} Jan 20 17:48:20 crc kubenswrapper[4558]: I0120 17:48:20.200773 4558 scope.go:117] "RemoveContainer" containerID="73b8176cf31c48afe8b2940b6e65ec9b3de704e8761f5cbe0a0a28407ebce072" Jan 20 17:48:20 crc kubenswrapper[4558]: I0120 17:48:20.218448 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:48:20 crc kubenswrapper[4558]: I0120 17:48:20.228075 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:48:20 crc kubenswrapper[4558]: I0120 17:48:20.242870 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:48:20 crc kubenswrapper[4558]: E0120 17:48:20.243417 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d3c4b74b-cd31-4245-91f0-9718f8896f61" containerName="glance-httpd" Jan 20 17:48:20 crc kubenswrapper[4558]: I0120 17:48:20.243437 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d3c4b74b-cd31-4245-91f0-9718f8896f61" containerName="glance-httpd" Jan 20 17:48:20 crc kubenswrapper[4558]: E0120 17:48:20.243462 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5447b108-6890-4614-88fa-3df24ce0d9b5" containerName="nova-cell0-conductor-db-sync" Jan 20 17:48:20 crc kubenswrapper[4558]: I0120 17:48:20.243471 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="5447b108-6890-4614-88fa-3df24ce0d9b5" containerName="nova-cell0-conductor-db-sync" Jan 20 17:48:20 crc kubenswrapper[4558]: E0120 17:48:20.243504 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d3c4b74b-cd31-4245-91f0-9718f8896f61" containerName="glance-log" Jan 20 17:48:20 crc kubenswrapper[4558]: I0120 17:48:20.243510 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d3c4b74b-cd31-4245-91f0-9718f8896f61" containerName="glance-log" Jan 20 17:48:20 crc kubenswrapper[4558]: I0120 17:48:20.243685 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="5447b108-6890-4614-88fa-3df24ce0d9b5" containerName="nova-cell0-conductor-db-sync" Jan 20 17:48:20 crc kubenswrapper[4558]: I0120 17:48:20.243700 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="d3c4b74b-cd31-4245-91f0-9718f8896f61" containerName="glance-log" Jan 20 17:48:20 crc kubenswrapper[4558]: I0120 17:48:20.243712 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="d3c4b74b-cd31-4245-91f0-9718f8896f61" containerName="glance-httpd" Jan 20 17:48:20 crc kubenswrapper[4558]: I0120 17:48:20.244874 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:48:20 crc kubenswrapper[4558]: I0120 17:48:20.246543 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-glance-default-internal-svc" Jan 20 17:48:20 crc kubenswrapper[4558]: I0120 17:48:20.246639 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-default-internal-config-data" Jan 20 17:48:20 crc kubenswrapper[4558]: I0120 17:48:20.258967 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-0"] Jan 20 17:48:20 crc kubenswrapper[4558]: I0120 17:48:20.260307 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:48:20 crc kubenswrapper[4558]: I0120 17:48:20.264301 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-nova-dockercfg-pbs2c" Jan 20 17:48:20 crc kubenswrapper[4558]: I0120 17:48:20.264535 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell0-conductor-config-data" Jan 20 17:48:20 crc kubenswrapper[4558]: I0120 17:48:20.307629 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-0"] Jan 20 17:48:20 crc kubenswrapper[4558]: I0120 17:48:20.314794 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:48:20 crc kubenswrapper[4558]: I0120 17:48:20.368171 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d0e7ef26-ab3e-4c3d-b8b2-6b08007022fd-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"d0e7ef26-ab3e-4c3d-b8b2-6b08007022fd\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:48:20 crc kubenswrapper[4558]: I0120 17:48:20.368421 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dkhz2\" (UniqueName: \"kubernetes.io/projected/d0e7ef26-ab3e-4c3d-b8b2-6b08007022fd-kube-api-access-dkhz2\") pod \"glance-default-internal-api-0\" (UID: \"d0e7ef26-ab3e-4c3d-b8b2-6b08007022fd\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:48:20 crc kubenswrapper[4558]: I0120 17:48:20.368475 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/908a249f-13e5-4352-a038-9b9f4fc13fb2-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"908a249f-13e5-4352-a038-9b9f4fc13fb2\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:48:20 crc kubenswrapper[4558]: I0120 17:48:20.368538 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d0e7ef26-ab3e-4c3d-b8b2-6b08007022fd-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"d0e7ef26-ab3e-4c3d-b8b2-6b08007022fd\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:48:20 crc kubenswrapper[4558]: I0120 17:48:20.368623 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/908a249f-13e5-4352-a038-9b9f4fc13fb2-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"908a249f-13e5-4352-a038-9b9f4fc13fb2\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:48:20 crc kubenswrapper[4558]: I0120 17:48:20.368675 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zdnkr\" (UniqueName: \"kubernetes.io/projected/908a249f-13e5-4352-a038-9b9f4fc13fb2-kube-api-access-zdnkr\") pod \"nova-cell0-conductor-0\" (UID: \"908a249f-13e5-4352-a038-9b9f4fc13fb2\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:48:20 crc kubenswrapper[4558]: I0120 17:48:20.368711 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d0e7ef26-ab3e-4c3d-b8b2-6b08007022fd-config-data\") pod \"glance-default-internal-api-0\" (UID: \"d0e7ef26-ab3e-4c3d-b8b2-6b08007022fd\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:48:20 crc kubenswrapper[4558]: I0120 17:48:20.368761 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"d0e7ef26-ab3e-4c3d-b8b2-6b08007022fd\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:48:20 crc kubenswrapper[4558]: I0120 17:48:20.368838 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d0e7ef26-ab3e-4c3d-b8b2-6b08007022fd-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"d0e7ef26-ab3e-4c3d-b8b2-6b08007022fd\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:48:20 crc kubenswrapper[4558]: I0120 17:48:20.368860 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d0e7ef26-ab3e-4c3d-b8b2-6b08007022fd-logs\") pod \"glance-default-internal-api-0\" (UID: \"d0e7ef26-ab3e-4c3d-b8b2-6b08007022fd\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:48:20 crc kubenswrapper[4558]: I0120 17:48:20.368874 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d0e7ef26-ab3e-4c3d-b8b2-6b08007022fd-scripts\") pod \"glance-default-internal-api-0\" (UID: \"d0e7ef26-ab3e-4c3d-b8b2-6b08007022fd\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:48:20 crc kubenswrapper[4558]: I0120 17:48:20.469869 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dkhz2\" (UniqueName: \"kubernetes.io/projected/d0e7ef26-ab3e-4c3d-b8b2-6b08007022fd-kube-api-access-dkhz2\") pod \"glance-default-internal-api-0\" (UID: \"d0e7ef26-ab3e-4c3d-b8b2-6b08007022fd\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:48:20 crc kubenswrapper[4558]: I0120 17:48:20.469920 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/908a249f-13e5-4352-a038-9b9f4fc13fb2-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"908a249f-13e5-4352-a038-9b9f4fc13fb2\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:48:20 crc kubenswrapper[4558]: I0120 17:48:20.469942 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d0e7ef26-ab3e-4c3d-b8b2-6b08007022fd-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"d0e7ef26-ab3e-4c3d-b8b2-6b08007022fd\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:48:20 crc kubenswrapper[4558]: I0120 17:48:20.469970 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/908a249f-13e5-4352-a038-9b9f4fc13fb2-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"908a249f-13e5-4352-a038-9b9f4fc13fb2\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:48:20 crc kubenswrapper[4558]: I0120 17:48:20.469985 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zdnkr\" (UniqueName: \"kubernetes.io/projected/908a249f-13e5-4352-a038-9b9f4fc13fb2-kube-api-access-zdnkr\") pod \"nova-cell0-conductor-0\" (UID: \"908a249f-13e5-4352-a038-9b9f4fc13fb2\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:48:20 crc kubenswrapper[4558]: I0120 17:48:20.470001 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d0e7ef26-ab3e-4c3d-b8b2-6b08007022fd-config-data\") pod \"glance-default-internal-api-0\" (UID: \"d0e7ef26-ab3e-4c3d-b8b2-6b08007022fd\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:48:20 crc kubenswrapper[4558]: I0120 17:48:20.470023 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"d0e7ef26-ab3e-4c3d-b8b2-6b08007022fd\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:48:20 crc kubenswrapper[4558]: I0120 17:48:20.470067 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d0e7ef26-ab3e-4c3d-b8b2-6b08007022fd-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"d0e7ef26-ab3e-4c3d-b8b2-6b08007022fd\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:48:20 crc kubenswrapper[4558]: I0120 17:48:20.470085 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d0e7ef26-ab3e-4c3d-b8b2-6b08007022fd-logs\") pod \"glance-default-internal-api-0\" (UID: \"d0e7ef26-ab3e-4c3d-b8b2-6b08007022fd\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:48:20 crc kubenswrapper[4558]: I0120 17:48:20.470097 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d0e7ef26-ab3e-4c3d-b8b2-6b08007022fd-scripts\") pod \"glance-default-internal-api-0\" (UID: \"d0e7ef26-ab3e-4c3d-b8b2-6b08007022fd\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:48:20 crc kubenswrapper[4558]: I0120 17:48:20.470148 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d0e7ef26-ab3e-4c3d-b8b2-6b08007022fd-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"d0e7ef26-ab3e-4c3d-b8b2-6b08007022fd\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:48:20 crc kubenswrapper[4558]: I0120 17:48:20.470566 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d0e7ef26-ab3e-4c3d-b8b2-6b08007022fd-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"d0e7ef26-ab3e-4c3d-b8b2-6b08007022fd\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:48:20 crc kubenswrapper[4558]: I0120 17:48:20.471698 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"d0e7ef26-ab3e-4c3d-b8b2-6b08007022fd\") device mount path \"/mnt/openstack/pv11\"" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:48:20 crc kubenswrapper[4558]: I0120 17:48:20.472040 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d0e7ef26-ab3e-4c3d-b8b2-6b08007022fd-logs\") pod \"glance-default-internal-api-0\" (UID: \"d0e7ef26-ab3e-4c3d-b8b2-6b08007022fd\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:48:20 crc kubenswrapper[4558]: I0120 17:48:20.478778 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/908a249f-13e5-4352-a038-9b9f4fc13fb2-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"908a249f-13e5-4352-a038-9b9f4fc13fb2\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:48:20 crc kubenswrapper[4558]: I0120 17:48:20.479211 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/908a249f-13e5-4352-a038-9b9f4fc13fb2-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"908a249f-13e5-4352-a038-9b9f4fc13fb2\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:48:20 crc kubenswrapper[4558]: I0120 17:48:20.479221 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d0e7ef26-ab3e-4c3d-b8b2-6b08007022fd-config-data\") pod \"glance-default-internal-api-0\" (UID: \"d0e7ef26-ab3e-4c3d-b8b2-6b08007022fd\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:48:20 crc kubenswrapper[4558]: I0120 17:48:20.483968 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d0e7ef26-ab3e-4c3d-b8b2-6b08007022fd-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"d0e7ef26-ab3e-4c3d-b8b2-6b08007022fd\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:48:20 crc kubenswrapper[4558]: I0120 17:48:20.484191 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d0e7ef26-ab3e-4c3d-b8b2-6b08007022fd-scripts\") pod \"glance-default-internal-api-0\" (UID: \"d0e7ef26-ab3e-4c3d-b8b2-6b08007022fd\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:48:20 crc kubenswrapper[4558]: I0120 17:48:20.484543 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d0e7ef26-ab3e-4c3d-b8b2-6b08007022fd-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"d0e7ef26-ab3e-4c3d-b8b2-6b08007022fd\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:48:20 crc kubenswrapper[4558]: I0120 17:48:20.485861 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dkhz2\" (UniqueName: \"kubernetes.io/projected/d0e7ef26-ab3e-4c3d-b8b2-6b08007022fd-kube-api-access-dkhz2\") pod \"glance-default-internal-api-0\" (UID: \"d0e7ef26-ab3e-4c3d-b8b2-6b08007022fd\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:48:20 crc kubenswrapper[4558]: I0120 17:48:20.486274 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zdnkr\" (UniqueName: \"kubernetes.io/projected/908a249f-13e5-4352-a038-9b9f4fc13fb2-kube-api-access-zdnkr\") pod \"nova-cell0-conductor-0\" (UID: \"908a249f-13e5-4352-a038-9b9f4fc13fb2\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:48:20 crc kubenswrapper[4558]: I0120 17:48:20.505255 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"d0e7ef26-ab3e-4c3d-b8b2-6b08007022fd\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:48:20 crc kubenswrapper[4558]: I0120 17:48:20.561457 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:48:20 crc kubenswrapper[4558]: I0120 17:48:20.578740 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d3c4b74b-cd31-4245-91f0-9718f8896f61" path="/var/lib/kubelet/pods/d3c4b74b-cd31-4245-91f0-9718f8896f61/volumes" Jan 20 17:48:20 crc kubenswrapper[4558]: I0120 17:48:20.583568 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:48:21 crc kubenswrapper[4558]: I0120 17:48:21.096056 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:48:21 crc kubenswrapper[4558]: I0120 17:48:21.161240 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-0"] Jan 20 17:48:21 crc kubenswrapper[4558]: I0120 17:48:21.183978 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f5mnj\" (UniqueName: \"kubernetes.io/projected/05f4b910-ff7c-4039-8ff7-24687eea5e74-kube-api-access-f5mnj\") pod \"05f4b910-ff7c-4039-8ff7-24687eea5e74\" (UID: \"05f4b910-ff7c-4039-8ff7-24687eea5e74\") " Jan 20 17:48:21 crc kubenswrapper[4558]: I0120 17:48:21.184284 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/05f4b910-ff7c-4039-8ff7-24687eea5e74-config-data\") pod \"05f4b910-ff7c-4039-8ff7-24687eea5e74\" (UID: \"05f4b910-ff7c-4039-8ff7-24687eea5e74\") " Jan 20 17:48:21 crc kubenswrapper[4558]: I0120 17:48:21.184392 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05f4b910-ff7c-4039-8ff7-24687eea5e74-combined-ca-bundle\") pod \"05f4b910-ff7c-4039-8ff7-24687eea5e74\" (UID: \"05f4b910-ff7c-4039-8ff7-24687eea5e74\") " Jan 20 17:48:21 crc kubenswrapper[4558]: I0120 17:48:21.184426 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/05f4b910-ff7c-4039-8ff7-24687eea5e74-logs\") pod \"05f4b910-ff7c-4039-8ff7-24687eea5e74\" (UID: \"05f4b910-ff7c-4039-8ff7-24687eea5e74\") " Jan 20 17:48:21 crc kubenswrapper[4558]: I0120 17:48:21.184478 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/05f4b910-ff7c-4039-8ff7-24687eea5e74-httpd-run\") pod \"05f4b910-ff7c-4039-8ff7-24687eea5e74\" (UID: \"05f4b910-ff7c-4039-8ff7-24687eea5e74\") " Jan 20 17:48:21 crc kubenswrapper[4558]: I0120 17:48:21.184509 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/05f4b910-ff7c-4039-8ff7-24687eea5e74-scripts\") pod \"05f4b910-ff7c-4039-8ff7-24687eea5e74\" (UID: \"05f4b910-ff7c-4039-8ff7-24687eea5e74\") " Jan 20 17:48:21 crc kubenswrapper[4558]: I0120 17:48:21.184600 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/05f4b910-ff7c-4039-8ff7-24687eea5e74-public-tls-certs\") pod \"05f4b910-ff7c-4039-8ff7-24687eea5e74\" (UID: \"05f4b910-ff7c-4039-8ff7-24687eea5e74\") " Jan 20 17:48:21 crc kubenswrapper[4558]: I0120 17:48:21.184654 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"05f4b910-ff7c-4039-8ff7-24687eea5e74\" (UID: \"05f4b910-ff7c-4039-8ff7-24687eea5e74\") " Jan 20 17:48:21 crc kubenswrapper[4558]: I0120 17:48:21.186726 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/05f4b910-ff7c-4039-8ff7-24687eea5e74-logs" (OuterVolumeSpecName: "logs") pod "05f4b910-ff7c-4039-8ff7-24687eea5e74" (UID: "05f4b910-ff7c-4039-8ff7-24687eea5e74"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:48:21 crc kubenswrapper[4558]: I0120 17:48:21.187176 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/05f4b910-ff7c-4039-8ff7-24687eea5e74-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "05f4b910-ff7c-4039-8ff7-24687eea5e74" (UID: "05f4b910-ff7c-4039-8ff7-24687eea5e74"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:48:21 crc kubenswrapper[4558]: I0120 17:48:21.189038 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/05f4b910-ff7c-4039-8ff7-24687eea5e74-scripts" (OuterVolumeSpecName: "scripts") pod "05f4b910-ff7c-4039-8ff7-24687eea5e74" (UID: "05f4b910-ff7c-4039-8ff7-24687eea5e74"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:48:21 crc kubenswrapper[4558]: I0120 17:48:21.189708 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage09-crc" (OuterVolumeSpecName: "glance") pod "05f4b910-ff7c-4039-8ff7-24687eea5e74" (UID: "05f4b910-ff7c-4039-8ff7-24687eea5e74"). InnerVolumeSpecName "local-storage09-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:48:21 crc kubenswrapper[4558]: I0120 17:48:21.198360 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/05f4b910-ff7c-4039-8ff7-24687eea5e74-kube-api-access-f5mnj" (OuterVolumeSpecName: "kube-api-access-f5mnj") pod "05f4b910-ff7c-4039-8ff7-24687eea5e74" (UID: "05f4b910-ff7c-4039-8ff7-24687eea5e74"). InnerVolumeSpecName "kube-api-access-f5mnj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:48:21 crc kubenswrapper[4558]: I0120 17:48:21.220048 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:48:21 crc kubenswrapper[4558]: I0120 17:48:21.228791 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"05f4b910-ff7c-4039-8ff7-24687eea5e74","Type":"ContainerDied","Data":"aa13f111b95eb70e30ae67cf411f532cb413f039d01f38c02d51d6a30252389b"} Jan 20 17:48:21 crc kubenswrapper[4558]: I0120 17:48:21.228852 4558 scope.go:117] "RemoveContainer" containerID="aa13f111b95eb70e30ae67cf411f532cb413f039d01f38c02d51d6a30252389b" Jan 20 17:48:21 crc kubenswrapper[4558]: I0120 17:48:21.228961 4558 generic.go:334] "Generic (PLEG): container finished" podID="05f4b910-ff7c-4039-8ff7-24687eea5e74" containerID="aa13f111b95eb70e30ae67cf411f532cb413f039d01f38c02d51d6a30252389b" exitCode=0 Jan 20 17:48:21 crc kubenswrapper[4558]: I0120 17:48:21.229026 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:48:21 crc kubenswrapper[4558]: I0120 17:48:21.229067 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"05f4b910-ff7c-4039-8ff7-24687eea5e74","Type":"ContainerDied","Data":"caf46ce863cd2a09dadc3e1dcdc49882e5406d35f06c7fa3ca4e64eaaee261df"} Jan 20 17:48:21 crc kubenswrapper[4558]: I0120 17:48:21.233787 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/05f4b910-ff7c-4039-8ff7-24687eea5e74-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "05f4b910-ff7c-4039-8ff7-24687eea5e74" (UID: "05f4b910-ff7c-4039-8ff7-24687eea5e74"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:48:21 crc kubenswrapper[4558]: I0120 17:48:21.250814 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"86be6735-87cb-4609-9f3c-334ce0eac39b","Type":"ContainerStarted","Data":"758484ae32e097d503328cbf6264398243019befc00022cc989fa1163a4777e4"} Jan 20 17:48:21 crc kubenswrapper[4558]: I0120 17:48:21.254092 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-0" event={"ID":"908a249f-13e5-4352-a038-9b9f4fc13fb2","Type":"ContainerStarted","Data":"c16765ec841d8a528b8a592a48a7d87b70ba6cd547fdc490114374a5a197cd83"} Jan 20 17:48:21 crc kubenswrapper[4558]: I0120 17:48:21.262796 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/05f4b910-ff7c-4039-8ff7-24687eea5e74-config-data" (OuterVolumeSpecName: "config-data") pod "05f4b910-ff7c-4039-8ff7-24687eea5e74" (UID: "05f4b910-ff7c-4039-8ff7-24687eea5e74"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:48:21 crc kubenswrapper[4558]: I0120 17:48:21.273463 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/05f4b910-ff7c-4039-8ff7-24687eea5e74-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "05f4b910-ff7c-4039-8ff7-24687eea5e74" (UID: "05f4b910-ff7c-4039-8ff7-24687eea5e74"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:48:21 crc kubenswrapper[4558]: I0120 17:48:21.274906 4558 scope.go:117] "RemoveContainer" containerID="54ce3695fde6a7ec6419832384135e5cf1001a4b3826a28d0554442580b148c3" Jan 20 17:48:21 crc kubenswrapper[4558]: I0120 17:48:21.288604 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f5mnj\" (UniqueName: \"kubernetes.io/projected/05f4b910-ff7c-4039-8ff7-24687eea5e74-kube-api-access-f5mnj\") on node \"crc\" DevicePath \"\"" Jan 20 17:48:21 crc kubenswrapper[4558]: I0120 17:48:21.288634 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/05f4b910-ff7c-4039-8ff7-24687eea5e74-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:48:21 crc kubenswrapper[4558]: I0120 17:48:21.288648 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05f4b910-ff7c-4039-8ff7-24687eea5e74-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:48:21 crc kubenswrapper[4558]: I0120 17:48:21.288658 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/05f4b910-ff7c-4039-8ff7-24687eea5e74-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:48:21 crc kubenswrapper[4558]: I0120 17:48:21.288667 4558 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/05f4b910-ff7c-4039-8ff7-24687eea5e74-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 20 17:48:21 crc kubenswrapper[4558]: I0120 17:48:21.288676 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/05f4b910-ff7c-4039-8ff7-24687eea5e74-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:48:21 crc kubenswrapper[4558]: I0120 17:48:21.288684 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/05f4b910-ff7c-4039-8ff7-24687eea5e74-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:48:21 crc kubenswrapper[4558]: I0120 17:48:21.288712 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" " Jan 20 17:48:21 crc kubenswrapper[4558]: I0120 17:48:21.298774 4558 scope.go:117] "RemoveContainer" containerID="aa13f111b95eb70e30ae67cf411f532cb413f039d01f38c02d51d6a30252389b" Jan 20 17:48:21 crc kubenswrapper[4558]: E0120 17:48:21.300098 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aa13f111b95eb70e30ae67cf411f532cb413f039d01f38c02d51d6a30252389b\": container with ID starting with aa13f111b95eb70e30ae67cf411f532cb413f039d01f38c02d51d6a30252389b not found: ID does not exist" containerID="aa13f111b95eb70e30ae67cf411f532cb413f039d01f38c02d51d6a30252389b" Jan 20 17:48:21 crc kubenswrapper[4558]: I0120 17:48:21.300193 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aa13f111b95eb70e30ae67cf411f532cb413f039d01f38c02d51d6a30252389b"} err="failed to get container status \"aa13f111b95eb70e30ae67cf411f532cb413f039d01f38c02d51d6a30252389b\": rpc error: code = NotFound desc = could not find container \"aa13f111b95eb70e30ae67cf411f532cb413f039d01f38c02d51d6a30252389b\": container with ID starting with aa13f111b95eb70e30ae67cf411f532cb413f039d01f38c02d51d6a30252389b not found: ID does not exist" Jan 20 17:48:21 crc kubenswrapper[4558]: I0120 17:48:21.300218 4558 scope.go:117] "RemoveContainer" containerID="54ce3695fde6a7ec6419832384135e5cf1001a4b3826a28d0554442580b148c3" Jan 20 17:48:21 crc kubenswrapper[4558]: E0120 17:48:21.300923 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"54ce3695fde6a7ec6419832384135e5cf1001a4b3826a28d0554442580b148c3\": container with ID starting with 54ce3695fde6a7ec6419832384135e5cf1001a4b3826a28d0554442580b148c3 not found: ID does not exist" containerID="54ce3695fde6a7ec6419832384135e5cf1001a4b3826a28d0554442580b148c3" Jan 20 17:48:21 crc kubenswrapper[4558]: I0120 17:48:21.300976 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"54ce3695fde6a7ec6419832384135e5cf1001a4b3826a28d0554442580b148c3"} err="failed to get container status \"54ce3695fde6a7ec6419832384135e5cf1001a4b3826a28d0554442580b148c3\": rpc error: code = NotFound desc = could not find container \"54ce3695fde6a7ec6419832384135e5cf1001a4b3826a28d0554442580b148c3\": container with ID starting with 54ce3695fde6a7ec6419832384135e5cf1001a4b3826a28d0554442580b148c3 not found: ID does not exist" Jan 20 17:48:21 crc kubenswrapper[4558]: I0120 17:48:21.306515 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage09-crc" (UniqueName: "kubernetes.io/local-volume/local-storage09-crc") on node "crc" Jan 20 17:48:21 crc kubenswrapper[4558]: I0120 17:48:21.390902 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:48:21 crc kubenswrapper[4558]: I0120 17:48:21.567467 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:48:21 crc kubenswrapper[4558]: I0120 17:48:21.579370 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:48:21 crc kubenswrapper[4558]: I0120 17:48:21.596222 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:48:21 crc kubenswrapper[4558]: E0120 17:48:21.596761 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="05f4b910-ff7c-4039-8ff7-24687eea5e74" containerName="glance-httpd" Jan 20 17:48:21 crc kubenswrapper[4558]: I0120 17:48:21.596780 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="05f4b910-ff7c-4039-8ff7-24687eea5e74" containerName="glance-httpd" Jan 20 17:48:21 crc kubenswrapper[4558]: E0120 17:48:21.596802 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="05f4b910-ff7c-4039-8ff7-24687eea5e74" containerName="glance-log" Jan 20 17:48:21 crc kubenswrapper[4558]: I0120 17:48:21.596808 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="05f4b910-ff7c-4039-8ff7-24687eea5e74" containerName="glance-log" Jan 20 17:48:21 crc kubenswrapper[4558]: I0120 17:48:21.597013 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="05f4b910-ff7c-4039-8ff7-24687eea5e74" containerName="glance-httpd" Jan 20 17:48:21 crc kubenswrapper[4558]: I0120 17:48:21.597035 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="05f4b910-ff7c-4039-8ff7-24687eea5e74" containerName="glance-log" Jan 20 17:48:21 crc kubenswrapper[4558]: I0120 17:48:21.598177 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:48:21 crc kubenswrapper[4558]: I0120 17:48:21.610664 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:48:21 crc kubenswrapper[4558]: I0120 17:48:21.613361 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-glance-default-public-svc" Jan 20 17:48:21 crc kubenswrapper[4558]: I0120 17:48:21.613390 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-default-external-config-data" Jan 20 17:48:21 crc kubenswrapper[4558]: I0120 17:48:21.700648 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-82gpj\" (UniqueName: \"kubernetes.io/projected/f98db5df-2f7f-490b-b4a0-7e9f27b07a60-kube-api-access-82gpj\") pod \"glance-default-external-api-0\" (UID: \"f98db5df-2f7f-490b-b4a0-7e9f27b07a60\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:48:21 crc kubenswrapper[4558]: I0120 17:48:21.700994 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f98db5df-2f7f-490b-b4a0-7e9f27b07a60-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"f98db5df-2f7f-490b-b4a0-7e9f27b07a60\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:48:21 crc kubenswrapper[4558]: I0120 17:48:21.701217 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f98db5df-2f7f-490b-b4a0-7e9f27b07a60-config-data\") pod \"glance-default-external-api-0\" (UID: \"f98db5df-2f7f-490b-b4a0-7e9f27b07a60\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:48:21 crc kubenswrapper[4558]: I0120 17:48:21.701361 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f98db5df-2f7f-490b-b4a0-7e9f27b07a60-scripts\") pod \"glance-default-external-api-0\" (UID: \"f98db5df-2f7f-490b-b4a0-7e9f27b07a60\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:48:21 crc kubenswrapper[4558]: I0120 17:48:21.701546 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f98db5df-2f7f-490b-b4a0-7e9f27b07a60-logs\") pod \"glance-default-external-api-0\" (UID: \"f98db5df-2f7f-490b-b4a0-7e9f27b07a60\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:48:21 crc kubenswrapper[4558]: I0120 17:48:21.701658 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f98db5df-2f7f-490b-b4a0-7e9f27b07a60-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"f98db5df-2f7f-490b-b4a0-7e9f27b07a60\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:48:21 crc kubenswrapper[4558]: I0120 17:48:21.701704 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"f98db5df-2f7f-490b-b4a0-7e9f27b07a60\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:48:21 crc kubenswrapper[4558]: I0120 17:48:21.701825 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f98db5df-2f7f-490b-b4a0-7e9f27b07a60-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"f98db5df-2f7f-490b-b4a0-7e9f27b07a60\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:48:21 crc kubenswrapper[4558]: I0120 17:48:21.802796 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f98db5df-2f7f-490b-b4a0-7e9f27b07a60-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"f98db5df-2f7f-490b-b4a0-7e9f27b07a60\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:48:21 crc kubenswrapper[4558]: I0120 17:48:21.802837 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"f98db5df-2f7f-490b-b4a0-7e9f27b07a60\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:48:21 crc kubenswrapper[4558]: I0120 17:48:21.802898 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f98db5df-2f7f-490b-b4a0-7e9f27b07a60-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"f98db5df-2f7f-490b-b4a0-7e9f27b07a60\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:48:21 crc kubenswrapper[4558]: I0120 17:48:21.803109 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"f98db5df-2f7f-490b-b4a0-7e9f27b07a60\") device mount path \"/mnt/openstack/pv09\"" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:48:21 crc kubenswrapper[4558]: I0120 17:48:21.803228 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-82gpj\" (UniqueName: \"kubernetes.io/projected/f98db5df-2f7f-490b-b4a0-7e9f27b07a60-kube-api-access-82gpj\") pod \"glance-default-external-api-0\" (UID: \"f98db5df-2f7f-490b-b4a0-7e9f27b07a60\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:48:21 crc kubenswrapper[4558]: I0120 17:48:21.803259 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f98db5df-2f7f-490b-b4a0-7e9f27b07a60-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"f98db5df-2f7f-490b-b4a0-7e9f27b07a60\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:48:21 crc kubenswrapper[4558]: I0120 17:48:21.803315 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f98db5df-2f7f-490b-b4a0-7e9f27b07a60-config-data\") pod \"glance-default-external-api-0\" (UID: \"f98db5df-2f7f-490b-b4a0-7e9f27b07a60\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:48:21 crc kubenswrapper[4558]: I0120 17:48:21.803347 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f98db5df-2f7f-490b-b4a0-7e9f27b07a60-scripts\") pod \"glance-default-external-api-0\" (UID: \"f98db5df-2f7f-490b-b4a0-7e9f27b07a60\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:48:21 crc kubenswrapper[4558]: I0120 17:48:21.803398 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f98db5df-2f7f-490b-b4a0-7e9f27b07a60-logs\") pod \"glance-default-external-api-0\" (UID: \"f98db5df-2f7f-490b-b4a0-7e9f27b07a60\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:48:21 crc kubenswrapper[4558]: I0120 17:48:21.803565 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f98db5df-2f7f-490b-b4a0-7e9f27b07a60-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"f98db5df-2f7f-490b-b4a0-7e9f27b07a60\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:48:21 crc kubenswrapper[4558]: I0120 17:48:21.803761 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f98db5df-2f7f-490b-b4a0-7e9f27b07a60-logs\") pod \"glance-default-external-api-0\" (UID: \"f98db5df-2f7f-490b-b4a0-7e9f27b07a60\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:48:21 crc kubenswrapper[4558]: I0120 17:48:21.806311 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f98db5df-2f7f-490b-b4a0-7e9f27b07a60-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"f98db5df-2f7f-490b-b4a0-7e9f27b07a60\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:48:21 crc kubenswrapper[4558]: I0120 17:48:21.806504 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f98db5df-2f7f-490b-b4a0-7e9f27b07a60-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"f98db5df-2f7f-490b-b4a0-7e9f27b07a60\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:48:21 crc kubenswrapper[4558]: I0120 17:48:21.815518 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f98db5df-2f7f-490b-b4a0-7e9f27b07a60-config-data\") pod \"glance-default-external-api-0\" (UID: \"f98db5df-2f7f-490b-b4a0-7e9f27b07a60\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:48:21 crc kubenswrapper[4558]: I0120 17:48:21.818753 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f98db5df-2f7f-490b-b4a0-7e9f27b07a60-scripts\") pod \"glance-default-external-api-0\" (UID: \"f98db5df-2f7f-490b-b4a0-7e9f27b07a60\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:48:21 crc kubenswrapper[4558]: I0120 17:48:21.821808 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-82gpj\" (UniqueName: \"kubernetes.io/projected/f98db5df-2f7f-490b-b4a0-7e9f27b07a60-kube-api-access-82gpj\") pod \"glance-default-external-api-0\" (UID: \"f98db5df-2f7f-490b-b4a0-7e9f27b07a60\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:48:21 crc kubenswrapper[4558]: I0120 17:48:21.836804 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"f98db5df-2f7f-490b-b4a0-7e9f27b07a60\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:48:21 crc kubenswrapper[4558]: I0120 17:48:21.976338 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:48:22 crc kubenswrapper[4558]: I0120 17:48:22.274780 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"86be6735-87cb-4609-9f3c-334ce0eac39b","Type":"ContainerStarted","Data":"9392ae88af0e42747a8b668680f78f39f61098e62260af40bf833d1ea21d1ddf"} Jan 20 17:48:22 crc kubenswrapper[4558]: I0120 17:48:22.275375 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:48:22 crc kubenswrapper[4558]: I0120 17:48:22.285602 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-0" event={"ID":"908a249f-13e5-4352-a038-9b9f4fc13fb2","Type":"ContainerStarted","Data":"a147049f1291dd80e5fafe03c849a2ac3868f760cff0fd23bf49f363c9e00349"} Jan 20 17:48:22 crc kubenswrapper[4558]: I0120 17:48:22.287046 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:48:22 crc kubenswrapper[4558]: I0120 17:48:22.289088 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"d0e7ef26-ab3e-4c3d-b8b2-6b08007022fd","Type":"ContainerStarted","Data":"3216cb4a892da431a3d001352506b8205d9cde6cf6808df51a45283abdf32abc"} Jan 20 17:48:22 crc kubenswrapper[4558]: I0120 17:48:22.289109 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"d0e7ef26-ab3e-4c3d-b8b2-6b08007022fd","Type":"ContainerStarted","Data":"212a4fd973edb30677b4248e267dd6b57d185e117261fb43b7e48e9267a561a9"} Jan 20 17:48:22 crc kubenswrapper[4558]: I0120 17:48:22.289120 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"d0e7ef26-ab3e-4c3d-b8b2-6b08007022fd","Type":"ContainerStarted","Data":"dcc937b537b6caac9042738c055c3b5a3a3ca3ee317b2e42b3c67919c7685942"} Jan 20 17:48:22 crc kubenswrapper[4558]: I0120 17:48:22.329553 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ceilometer-0" podStartSLOduration=1.42501907 podStartE2EDuration="5.329533547s" podCreationTimestamp="2026-01-20 17:48:17 +0000 UTC" firstStartedPulling="2026-01-20 17:48:17.965212299 +0000 UTC m=+3991.725550266" lastFinishedPulling="2026-01-20 17:48:21.869726777 +0000 UTC m=+3995.630064743" observedRunningTime="2026-01-20 17:48:22.309472705 +0000 UTC m=+3996.069810671" watchObservedRunningTime="2026-01-20 17:48:22.329533547 +0000 UTC m=+3996.089871503" Jan 20 17:48:22 crc kubenswrapper[4558]: I0120 17:48:22.358187 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell0-conductor-0" podStartSLOduration=2.358152029 podStartE2EDuration="2.358152029s" podCreationTimestamp="2026-01-20 17:48:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:48:22.331292634 +0000 UTC m=+3996.091630601" watchObservedRunningTime="2026-01-20 17:48:22.358152029 +0000 UTC m=+3996.118489987" Jan 20 17:48:22 crc kubenswrapper[4558]: I0120 17:48:22.360789 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/glance-default-internal-api-0" podStartSLOduration=2.360776755 podStartE2EDuration="2.360776755s" podCreationTimestamp="2026-01-20 17:48:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:48:22.349023325 +0000 UTC m=+3996.109361292" watchObservedRunningTime="2026-01-20 17:48:22.360776755 +0000 UTC m=+3996.121114722" Jan 20 17:48:22 crc kubenswrapper[4558]: I0120 17:48:22.457447 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:48:22 crc kubenswrapper[4558]: I0120 17:48:22.586191 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="05f4b910-ff7c-4039-8ff7-24687eea5e74" path="/var/lib/kubelet/pods/05f4b910-ff7c-4039-8ff7-24687eea5e74/volumes" Jan 20 17:48:23 crc kubenswrapper[4558]: I0120 17:48:23.304736 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"f98db5df-2f7f-490b-b4a0-7e9f27b07a60","Type":"ContainerStarted","Data":"5f690c112c0735b8be4b3a6b80c6e2fc33c18a468c87d95a1c56b1f942029cc8"} Jan 20 17:48:23 crc kubenswrapper[4558]: I0120 17:48:23.305342 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"f98db5df-2f7f-490b-b4a0-7e9f27b07a60","Type":"ContainerStarted","Data":"94a48f514687c985e6c0de200dd5b7405ffe787b6f8eded064e73166a124cbb6"} Jan 20 17:48:23 crc kubenswrapper[4558]: I0120 17:48:23.365664 4558 scope.go:117] "RemoveContainer" containerID="42c29ec80666322d27f85a6e6798fa5fcabfb0b124569ce2fbc426f02c3bf8fe" Jan 20 17:48:24 crc kubenswrapper[4558]: I0120 17:48:24.315815 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"f98db5df-2f7f-490b-b4a0-7e9f27b07a60","Type":"ContainerStarted","Data":"37aadc76d1ea5916d8987f0d7d3856518a536ffe632acfd464eb225116abde18"} Jan 20 17:48:26 crc kubenswrapper[4558]: I0120 17:48:26.814238 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/glance-default-external-api-0" podStartSLOduration=5.814212192 podStartE2EDuration="5.814212192s" podCreationTimestamp="2026-01-20 17:48:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:48:24.344802451 +0000 UTC m=+3998.105140418" watchObservedRunningTime="2026-01-20 17:48:26.814212192 +0000 UTC m=+4000.574550159" Jan 20 17:48:26 crc kubenswrapper[4558]: I0120 17:48:26.819875 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-0"] Jan 20 17:48:26 crc kubenswrapper[4558]: I0120 17:48:26.820055 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-cell0-conductor-0" podUID="908a249f-13e5-4352-a038-9b9f4fc13fb2" containerName="nova-cell0-conductor-conductor" containerID="cri-o://a147049f1291dd80e5fafe03c849a2ac3868f760cff0fd23bf49f363c9e00349" gracePeriod=30 Jan 20 17:48:26 crc kubenswrapper[4558]: E0120 17:48:26.823795 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="a147049f1291dd80e5fafe03c849a2ac3868f760cff0fd23bf49f363c9e00349" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:48:26 crc kubenswrapper[4558]: E0120 17:48:26.825192 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="a147049f1291dd80e5fafe03c849a2ac3868f760cff0fd23bf49f363c9e00349" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:48:26 crc kubenswrapper[4558]: E0120 17:48:26.826238 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="a147049f1291dd80e5fafe03c849a2ac3868f760cff0fd23bf49f363c9e00349" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:48:26 crc kubenswrapper[4558]: E0120 17:48:26.826270 4558 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack-kuttl-tests/nova-cell0-conductor-0" podUID="908a249f-13e5-4352-a038-9b9f4fc13fb2" containerName="nova-cell0-conductor-conductor" Jan 20 17:48:27 crc kubenswrapper[4558]: I0120 17:48:27.329829 4558 patch_prober.go:28] interesting pod/machine-config-daemon-2vr4r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 20 17:48:27 crc kubenswrapper[4558]: I0120 17:48:27.329907 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 20 17:48:28 crc kubenswrapper[4558]: I0120 17:48:28.364231 4558 generic.go:334] "Generic (PLEG): container finished" podID="908a249f-13e5-4352-a038-9b9f4fc13fb2" containerID="a147049f1291dd80e5fafe03c849a2ac3868f760cff0fd23bf49f363c9e00349" exitCode=0 Jan 20 17:48:28 crc kubenswrapper[4558]: I0120 17:48:28.364315 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-0" event={"ID":"908a249f-13e5-4352-a038-9b9f4fc13fb2","Type":"ContainerDied","Data":"a147049f1291dd80e5fafe03c849a2ac3868f760cff0fd23bf49f363c9e00349"} Jan 20 17:48:28 crc kubenswrapper[4558]: I0120 17:48:28.452885 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:48:28 crc kubenswrapper[4558]: I0120 17:48:28.453266 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="86be6735-87cb-4609-9f3c-334ce0eac39b" containerName="ceilometer-central-agent" containerID="cri-o://b2e5ad3a07fd9605c52daa88921db45628093c3ce26795b7339404cc43ea0e8e" gracePeriod=30 Jan 20 17:48:28 crc kubenswrapper[4558]: I0120 17:48:28.453382 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="86be6735-87cb-4609-9f3c-334ce0eac39b" containerName="proxy-httpd" containerID="cri-o://9392ae88af0e42747a8b668680f78f39f61098e62260af40bf833d1ea21d1ddf" gracePeriod=30 Jan 20 17:48:28 crc kubenswrapper[4558]: I0120 17:48:28.453428 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="86be6735-87cb-4609-9f3c-334ce0eac39b" containerName="ceilometer-notification-agent" containerID="cri-o://aa04cd3729a7441135f6f64838857570ad851434f8797a1f8696ad095d2814a8" gracePeriod=30 Jan 20 17:48:28 crc kubenswrapper[4558]: I0120 17:48:28.453578 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="86be6735-87cb-4609-9f3c-334ce0eac39b" containerName="sg-core" containerID="cri-o://758484ae32e097d503328cbf6264398243019befc00022cc989fa1163a4777e4" gracePeriod=30 Jan 20 17:48:28 crc kubenswrapper[4558]: I0120 17:48:28.745232 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:48:28 crc kubenswrapper[4558]: I0120 17:48:28.877701 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/908a249f-13e5-4352-a038-9b9f4fc13fb2-config-data\") pod \"908a249f-13e5-4352-a038-9b9f4fc13fb2\" (UID: \"908a249f-13e5-4352-a038-9b9f4fc13fb2\") " Jan 20 17:48:28 crc kubenswrapper[4558]: I0120 17:48:28.877929 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/908a249f-13e5-4352-a038-9b9f4fc13fb2-combined-ca-bundle\") pod \"908a249f-13e5-4352-a038-9b9f4fc13fb2\" (UID: \"908a249f-13e5-4352-a038-9b9f4fc13fb2\") " Jan 20 17:48:28 crc kubenswrapper[4558]: I0120 17:48:28.877992 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zdnkr\" (UniqueName: \"kubernetes.io/projected/908a249f-13e5-4352-a038-9b9f4fc13fb2-kube-api-access-zdnkr\") pod \"908a249f-13e5-4352-a038-9b9f4fc13fb2\" (UID: \"908a249f-13e5-4352-a038-9b9f4fc13fb2\") " Jan 20 17:48:28 crc kubenswrapper[4558]: I0120 17:48:28.883101 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/908a249f-13e5-4352-a038-9b9f4fc13fb2-kube-api-access-zdnkr" (OuterVolumeSpecName: "kube-api-access-zdnkr") pod "908a249f-13e5-4352-a038-9b9f4fc13fb2" (UID: "908a249f-13e5-4352-a038-9b9f4fc13fb2"). InnerVolumeSpecName "kube-api-access-zdnkr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:48:28 crc kubenswrapper[4558]: I0120 17:48:28.902923 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/908a249f-13e5-4352-a038-9b9f4fc13fb2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "908a249f-13e5-4352-a038-9b9f4fc13fb2" (UID: "908a249f-13e5-4352-a038-9b9f4fc13fb2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:48:28 crc kubenswrapper[4558]: I0120 17:48:28.903564 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/908a249f-13e5-4352-a038-9b9f4fc13fb2-config-data" (OuterVolumeSpecName: "config-data") pod "908a249f-13e5-4352-a038-9b9f4fc13fb2" (UID: "908a249f-13e5-4352-a038-9b9f4fc13fb2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:48:28 crc kubenswrapper[4558]: I0120 17:48:28.982692 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/908a249f-13e5-4352-a038-9b9f4fc13fb2-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:48:28 crc kubenswrapper[4558]: I0120 17:48:28.982727 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/908a249f-13e5-4352-a038-9b9f4fc13fb2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:48:28 crc kubenswrapper[4558]: I0120 17:48:28.983154 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zdnkr\" (UniqueName: \"kubernetes.io/projected/908a249f-13e5-4352-a038-9b9f4fc13fb2-kube-api-access-zdnkr\") on node \"crc\" DevicePath \"\"" Jan 20 17:48:29 crc kubenswrapper[4558]: I0120 17:48:29.377352 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-0" event={"ID":"908a249f-13e5-4352-a038-9b9f4fc13fb2","Type":"ContainerDied","Data":"c16765ec841d8a528b8a592a48a7d87b70ba6cd547fdc490114374a5a197cd83"} Jan 20 17:48:29 crc kubenswrapper[4558]: I0120 17:48:29.377388 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:48:29 crc kubenswrapper[4558]: I0120 17:48:29.377438 4558 scope.go:117] "RemoveContainer" containerID="a147049f1291dd80e5fafe03c849a2ac3868f760cff0fd23bf49f363c9e00349" Jan 20 17:48:29 crc kubenswrapper[4558]: I0120 17:48:29.388682 4558 generic.go:334] "Generic (PLEG): container finished" podID="86be6735-87cb-4609-9f3c-334ce0eac39b" containerID="9392ae88af0e42747a8b668680f78f39f61098e62260af40bf833d1ea21d1ddf" exitCode=0 Jan 20 17:48:29 crc kubenswrapper[4558]: I0120 17:48:29.388716 4558 generic.go:334] "Generic (PLEG): container finished" podID="86be6735-87cb-4609-9f3c-334ce0eac39b" containerID="758484ae32e097d503328cbf6264398243019befc00022cc989fa1163a4777e4" exitCode=2 Jan 20 17:48:29 crc kubenswrapper[4558]: I0120 17:48:29.388726 4558 generic.go:334] "Generic (PLEG): container finished" podID="86be6735-87cb-4609-9f3c-334ce0eac39b" containerID="b2e5ad3a07fd9605c52daa88921db45628093c3ce26795b7339404cc43ea0e8e" exitCode=0 Jan 20 17:48:29 crc kubenswrapper[4558]: I0120 17:48:29.388743 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"86be6735-87cb-4609-9f3c-334ce0eac39b","Type":"ContainerDied","Data":"9392ae88af0e42747a8b668680f78f39f61098e62260af40bf833d1ea21d1ddf"} Jan 20 17:48:29 crc kubenswrapper[4558]: I0120 17:48:29.388774 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"86be6735-87cb-4609-9f3c-334ce0eac39b","Type":"ContainerDied","Data":"758484ae32e097d503328cbf6264398243019befc00022cc989fa1163a4777e4"} Jan 20 17:48:29 crc kubenswrapper[4558]: I0120 17:48:29.388789 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"86be6735-87cb-4609-9f3c-334ce0eac39b","Type":"ContainerDied","Data":"b2e5ad3a07fd9605c52daa88921db45628093c3ce26795b7339404cc43ea0e8e"} Jan 20 17:48:29 crc kubenswrapper[4558]: I0120 17:48:29.428731 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-0"] Jan 20 17:48:29 crc kubenswrapper[4558]: I0120 17:48:29.437420 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-0"] Jan 20 17:48:29 crc kubenswrapper[4558]: I0120 17:48:29.446035 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-0"] Jan 20 17:48:29 crc kubenswrapper[4558]: E0120 17:48:29.455559 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="908a249f-13e5-4352-a038-9b9f4fc13fb2" containerName="nova-cell0-conductor-conductor" Jan 20 17:48:29 crc kubenswrapper[4558]: I0120 17:48:29.455590 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="908a249f-13e5-4352-a038-9b9f4fc13fb2" containerName="nova-cell0-conductor-conductor" Jan 20 17:48:29 crc kubenswrapper[4558]: I0120 17:48:29.455796 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="908a249f-13e5-4352-a038-9b9f4fc13fb2" containerName="nova-cell0-conductor-conductor" Jan 20 17:48:29 crc kubenswrapper[4558]: I0120 17:48:29.456501 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:48:29 crc kubenswrapper[4558]: I0120 17:48:29.462855 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-nova-dockercfg-pbs2c" Jan 20 17:48:29 crc kubenswrapper[4558]: I0120 17:48:29.463076 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell0-conductor-config-data" Jan 20 17:48:29 crc kubenswrapper[4558]: I0120 17:48:29.467948 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-0"] Jan 20 17:48:29 crc kubenswrapper[4558]: I0120 17:48:29.594325 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3c44870c-4ab7-436a-9862-d6a2a4487bed-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"3c44870c-4ab7-436a-9862-d6a2a4487bed\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:48:29 crc kubenswrapper[4558]: I0120 17:48:29.594387 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3c44870c-4ab7-436a-9862-d6a2a4487bed-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"3c44870c-4ab7-436a-9862-d6a2a4487bed\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:48:29 crc kubenswrapper[4558]: I0120 17:48:29.594474 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nmsq5\" (UniqueName: \"kubernetes.io/projected/3c44870c-4ab7-436a-9862-d6a2a4487bed-kube-api-access-nmsq5\") pod \"nova-cell0-conductor-0\" (UID: \"3c44870c-4ab7-436a-9862-d6a2a4487bed\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:48:29 crc kubenswrapper[4558]: I0120 17:48:29.696279 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3c44870c-4ab7-436a-9862-d6a2a4487bed-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"3c44870c-4ab7-436a-9862-d6a2a4487bed\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:48:29 crc kubenswrapper[4558]: I0120 17:48:29.696330 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3c44870c-4ab7-436a-9862-d6a2a4487bed-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"3c44870c-4ab7-436a-9862-d6a2a4487bed\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:48:29 crc kubenswrapper[4558]: I0120 17:48:29.696411 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nmsq5\" (UniqueName: \"kubernetes.io/projected/3c44870c-4ab7-436a-9862-d6a2a4487bed-kube-api-access-nmsq5\") pod \"nova-cell0-conductor-0\" (UID: \"3c44870c-4ab7-436a-9862-d6a2a4487bed\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:48:29 crc kubenswrapper[4558]: I0120 17:48:29.700607 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3c44870c-4ab7-436a-9862-d6a2a4487bed-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"3c44870c-4ab7-436a-9862-d6a2a4487bed\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:48:29 crc kubenswrapper[4558]: I0120 17:48:29.702191 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3c44870c-4ab7-436a-9862-d6a2a4487bed-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"3c44870c-4ab7-436a-9862-d6a2a4487bed\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:48:29 crc kubenswrapper[4558]: I0120 17:48:29.718793 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nmsq5\" (UniqueName: \"kubernetes.io/projected/3c44870c-4ab7-436a-9862-d6a2a4487bed-kube-api-access-nmsq5\") pod \"nova-cell0-conductor-0\" (UID: \"3c44870c-4ab7-436a-9862-d6a2a4487bed\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:48:29 crc kubenswrapper[4558]: I0120 17:48:29.776786 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:48:30 crc kubenswrapper[4558]: I0120 17:48:30.180739 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-0"] Jan 20 17:48:30 crc kubenswrapper[4558]: I0120 17:48:30.298202 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:48:30 crc kubenswrapper[4558]: I0120 17:48:30.399712 4558 generic.go:334] "Generic (PLEG): container finished" podID="86be6735-87cb-4609-9f3c-334ce0eac39b" containerID="aa04cd3729a7441135f6f64838857570ad851434f8797a1f8696ad095d2814a8" exitCode=0 Jan 20 17:48:30 crc kubenswrapper[4558]: I0120 17:48:30.399785 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:48:30 crc kubenswrapper[4558]: I0120 17:48:30.399776 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"86be6735-87cb-4609-9f3c-334ce0eac39b","Type":"ContainerDied","Data":"aa04cd3729a7441135f6f64838857570ad851434f8797a1f8696ad095d2814a8"} Jan 20 17:48:30 crc kubenswrapper[4558]: I0120 17:48:30.399934 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"86be6735-87cb-4609-9f3c-334ce0eac39b","Type":"ContainerDied","Data":"eef1445fe3e2984ca1d60609ec107a59cfcfbea24b216432e04732ffad94c0a5"} Jan 20 17:48:30 crc kubenswrapper[4558]: I0120 17:48:30.399970 4558 scope.go:117] "RemoveContainer" containerID="9392ae88af0e42747a8b668680f78f39f61098e62260af40bf833d1ea21d1ddf" Jan 20 17:48:30 crc kubenswrapper[4558]: I0120 17:48:30.403730 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-0" event={"ID":"3c44870c-4ab7-436a-9862-d6a2a4487bed","Type":"ContainerStarted","Data":"7ab0ab6375e86fd4837f32d21526ca8433f782d8e6f806a0848eab5773b75fbd"} Jan 20 17:48:30 crc kubenswrapper[4558]: I0120 17:48:30.403765 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-0" event={"ID":"3c44870c-4ab7-436a-9862-d6a2a4487bed","Type":"ContainerStarted","Data":"56160cff8efb493a8c36c7bc956b5e7e7b70c0e9435e4b3f88f7f5af01e50d61"} Jan 20 17:48:30 crc kubenswrapper[4558]: I0120 17:48:30.409421 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/86be6735-87cb-4609-9f3c-334ce0eac39b-log-httpd\") pod \"86be6735-87cb-4609-9f3c-334ce0eac39b\" (UID: \"86be6735-87cb-4609-9f3c-334ce0eac39b\") " Jan 20 17:48:30 crc kubenswrapper[4558]: I0120 17:48:30.409506 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mfh9x\" (UniqueName: \"kubernetes.io/projected/86be6735-87cb-4609-9f3c-334ce0eac39b-kube-api-access-mfh9x\") pod \"86be6735-87cb-4609-9f3c-334ce0eac39b\" (UID: \"86be6735-87cb-4609-9f3c-334ce0eac39b\") " Jan 20 17:48:30 crc kubenswrapper[4558]: I0120 17:48:30.409544 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/86be6735-87cb-4609-9f3c-334ce0eac39b-run-httpd\") pod \"86be6735-87cb-4609-9f3c-334ce0eac39b\" (UID: \"86be6735-87cb-4609-9f3c-334ce0eac39b\") " Jan 20 17:48:30 crc kubenswrapper[4558]: I0120 17:48:30.409571 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/86be6735-87cb-4609-9f3c-334ce0eac39b-scripts\") pod \"86be6735-87cb-4609-9f3c-334ce0eac39b\" (UID: \"86be6735-87cb-4609-9f3c-334ce0eac39b\") " Jan 20 17:48:30 crc kubenswrapper[4558]: I0120 17:48:30.409619 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/86be6735-87cb-4609-9f3c-334ce0eac39b-sg-core-conf-yaml\") pod \"86be6735-87cb-4609-9f3c-334ce0eac39b\" (UID: \"86be6735-87cb-4609-9f3c-334ce0eac39b\") " Jan 20 17:48:30 crc kubenswrapper[4558]: I0120 17:48:30.409661 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/86be6735-87cb-4609-9f3c-334ce0eac39b-combined-ca-bundle\") pod \"86be6735-87cb-4609-9f3c-334ce0eac39b\" (UID: \"86be6735-87cb-4609-9f3c-334ce0eac39b\") " Jan 20 17:48:30 crc kubenswrapper[4558]: I0120 17:48:30.409786 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/86be6735-87cb-4609-9f3c-334ce0eac39b-config-data\") pod \"86be6735-87cb-4609-9f3c-334ce0eac39b\" (UID: \"86be6735-87cb-4609-9f3c-334ce0eac39b\") " Jan 20 17:48:30 crc kubenswrapper[4558]: I0120 17:48:30.409982 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/86be6735-87cb-4609-9f3c-334ce0eac39b-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "86be6735-87cb-4609-9f3c-334ce0eac39b" (UID: "86be6735-87cb-4609-9f3c-334ce0eac39b"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:48:30 crc kubenswrapper[4558]: I0120 17:48:30.410207 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/86be6735-87cb-4609-9f3c-334ce0eac39b-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "86be6735-87cb-4609-9f3c-334ce0eac39b" (UID: "86be6735-87cb-4609-9f3c-334ce0eac39b"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:48:30 crc kubenswrapper[4558]: I0120 17:48:30.410729 4558 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/86be6735-87cb-4609-9f3c-334ce0eac39b-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:48:30 crc kubenswrapper[4558]: I0120 17:48:30.410751 4558 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/86be6735-87cb-4609-9f3c-334ce0eac39b-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:48:30 crc kubenswrapper[4558]: I0120 17:48:30.413608 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/86be6735-87cb-4609-9f3c-334ce0eac39b-scripts" (OuterVolumeSpecName: "scripts") pod "86be6735-87cb-4609-9f3c-334ce0eac39b" (UID: "86be6735-87cb-4609-9f3c-334ce0eac39b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:48:30 crc kubenswrapper[4558]: I0120 17:48:30.425767 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/86be6735-87cb-4609-9f3c-334ce0eac39b-kube-api-access-mfh9x" (OuterVolumeSpecName: "kube-api-access-mfh9x") pod "86be6735-87cb-4609-9f3c-334ce0eac39b" (UID: "86be6735-87cb-4609-9f3c-334ce0eac39b"). InnerVolumeSpecName "kube-api-access-mfh9x". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:48:30 crc kubenswrapper[4558]: I0120 17:48:30.436991 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell0-conductor-0" podStartSLOduration=1.436944493 podStartE2EDuration="1.436944493s" podCreationTimestamp="2026-01-20 17:48:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:48:30.416526269 +0000 UTC m=+4004.176864236" watchObservedRunningTime="2026-01-20 17:48:30.436944493 +0000 UTC m=+4004.197282450" Jan 20 17:48:30 crc kubenswrapper[4558]: I0120 17:48:30.437770 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/86be6735-87cb-4609-9f3c-334ce0eac39b-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "86be6735-87cb-4609-9f3c-334ce0eac39b" (UID: "86be6735-87cb-4609-9f3c-334ce0eac39b"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:48:30 crc kubenswrapper[4558]: I0120 17:48:30.443157 4558 scope.go:117] "RemoveContainer" containerID="758484ae32e097d503328cbf6264398243019befc00022cc989fa1163a4777e4" Jan 20 17:48:30 crc kubenswrapper[4558]: I0120 17:48:30.484072 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/86be6735-87cb-4609-9f3c-334ce0eac39b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "86be6735-87cb-4609-9f3c-334ce0eac39b" (UID: "86be6735-87cb-4609-9f3c-334ce0eac39b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:48:30 crc kubenswrapper[4558]: I0120 17:48:30.499808 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/86be6735-87cb-4609-9f3c-334ce0eac39b-config-data" (OuterVolumeSpecName: "config-data") pod "86be6735-87cb-4609-9f3c-334ce0eac39b" (UID: "86be6735-87cb-4609-9f3c-334ce0eac39b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:48:30 crc kubenswrapper[4558]: I0120 17:48:30.513621 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mfh9x\" (UniqueName: \"kubernetes.io/projected/86be6735-87cb-4609-9f3c-334ce0eac39b-kube-api-access-mfh9x\") on node \"crc\" DevicePath \"\"" Jan 20 17:48:30 crc kubenswrapper[4558]: I0120 17:48:30.513652 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/86be6735-87cb-4609-9f3c-334ce0eac39b-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:48:30 crc kubenswrapper[4558]: I0120 17:48:30.513666 4558 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/86be6735-87cb-4609-9f3c-334ce0eac39b-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 20 17:48:30 crc kubenswrapper[4558]: I0120 17:48:30.513690 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/86be6735-87cb-4609-9f3c-334ce0eac39b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:48:30 crc kubenswrapper[4558]: I0120 17:48:30.513700 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/86be6735-87cb-4609-9f3c-334ce0eac39b-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:48:30 crc kubenswrapper[4558]: I0120 17:48:30.555727 4558 scope.go:117] "RemoveContainer" containerID="aa04cd3729a7441135f6f64838857570ad851434f8797a1f8696ad095d2814a8" Jan 20 17:48:30 crc kubenswrapper[4558]: I0120 17:48:30.563120 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:48:30 crc kubenswrapper[4558]: I0120 17:48:30.563158 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:48:30 crc kubenswrapper[4558]: I0120 17:48:30.580071 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="908a249f-13e5-4352-a038-9b9f4fc13fb2" path="/var/lib/kubelet/pods/908a249f-13e5-4352-a038-9b9f4fc13fb2/volumes" Jan 20 17:48:30 crc kubenswrapper[4558]: I0120 17:48:30.589572 4558 scope.go:117] "RemoveContainer" containerID="b2e5ad3a07fd9605c52daa88921db45628093c3ce26795b7339404cc43ea0e8e" Jan 20 17:48:30 crc kubenswrapper[4558]: I0120 17:48:30.593817 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:48:30 crc kubenswrapper[4558]: I0120 17:48:30.599779 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:48:30 crc kubenswrapper[4558]: I0120 17:48:30.612058 4558 scope.go:117] "RemoveContainer" containerID="9392ae88af0e42747a8b668680f78f39f61098e62260af40bf833d1ea21d1ddf" Jan 20 17:48:30 crc kubenswrapper[4558]: E0120 17:48:30.618489 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9392ae88af0e42747a8b668680f78f39f61098e62260af40bf833d1ea21d1ddf\": container with ID starting with 9392ae88af0e42747a8b668680f78f39f61098e62260af40bf833d1ea21d1ddf not found: ID does not exist" containerID="9392ae88af0e42747a8b668680f78f39f61098e62260af40bf833d1ea21d1ddf" Jan 20 17:48:30 crc kubenswrapper[4558]: I0120 17:48:30.618523 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9392ae88af0e42747a8b668680f78f39f61098e62260af40bf833d1ea21d1ddf"} err="failed to get container status \"9392ae88af0e42747a8b668680f78f39f61098e62260af40bf833d1ea21d1ddf\": rpc error: code = NotFound desc = could not find container \"9392ae88af0e42747a8b668680f78f39f61098e62260af40bf833d1ea21d1ddf\": container with ID starting with 9392ae88af0e42747a8b668680f78f39f61098e62260af40bf833d1ea21d1ddf not found: ID does not exist" Jan 20 17:48:30 crc kubenswrapper[4558]: I0120 17:48:30.618563 4558 scope.go:117] "RemoveContainer" containerID="758484ae32e097d503328cbf6264398243019befc00022cc989fa1163a4777e4" Jan 20 17:48:30 crc kubenswrapper[4558]: E0120 17:48:30.619199 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"758484ae32e097d503328cbf6264398243019befc00022cc989fa1163a4777e4\": container with ID starting with 758484ae32e097d503328cbf6264398243019befc00022cc989fa1163a4777e4 not found: ID does not exist" containerID="758484ae32e097d503328cbf6264398243019befc00022cc989fa1163a4777e4" Jan 20 17:48:30 crc kubenswrapper[4558]: I0120 17:48:30.619254 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"758484ae32e097d503328cbf6264398243019befc00022cc989fa1163a4777e4"} err="failed to get container status \"758484ae32e097d503328cbf6264398243019befc00022cc989fa1163a4777e4\": rpc error: code = NotFound desc = could not find container \"758484ae32e097d503328cbf6264398243019befc00022cc989fa1163a4777e4\": container with ID starting with 758484ae32e097d503328cbf6264398243019befc00022cc989fa1163a4777e4 not found: ID does not exist" Jan 20 17:48:30 crc kubenswrapper[4558]: I0120 17:48:30.619311 4558 scope.go:117] "RemoveContainer" containerID="aa04cd3729a7441135f6f64838857570ad851434f8797a1f8696ad095d2814a8" Jan 20 17:48:30 crc kubenswrapper[4558]: E0120 17:48:30.622279 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aa04cd3729a7441135f6f64838857570ad851434f8797a1f8696ad095d2814a8\": container with ID starting with aa04cd3729a7441135f6f64838857570ad851434f8797a1f8696ad095d2814a8 not found: ID does not exist" containerID="aa04cd3729a7441135f6f64838857570ad851434f8797a1f8696ad095d2814a8" Jan 20 17:48:30 crc kubenswrapper[4558]: I0120 17:48:30.622319 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aa04cd3729a7441135f6f64838857570ad851434f8797a1f8696ad095d2814a8"} err="failed to get container status \"aa04cd3729a7441135f6f64838857570ad851434f8797a1f8696ad095d2814a8\": rpc error: code = NotFound desc = could not find container \"aa04cd3729a7441135f6f64838857570ad851434f8797a1f8696ad095d2814a8\": container with ID starting with aa04cd3729a7441135f6f64838857570ad851434f8797a1f8696ad095d2814a8 not found: ID does not exist" Jan 20 17:48:30 crc kubenswrapper[4558]: I0120 17:48:30.622345 4558 scope.go:117] "RemoveContainer" containerID="b2e5ad3a07fd9605c52daa88921db45628093c3ce26795b7339404cc43ea0e8e" Jan 20 17:48:30 crc kubenswrapper[4558]: E0120 17:48:30.622636 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b2e5ad3a07fd9605c52daa88921db45628093c3ce26795b7339404cc43ea0e8e\": container with ID starting with b2e5ad3a07fd9605c52daa88921db45628093c3ce26795b7339404cc43ea0e8e not found: ID does not exist" containerID="b2e5ad3a07fd9605c52daa88921db45628093c3ce26795b7339404cc43ea0e8e" Jan 20 17:48:30 crc kubenswrapper[4558]: I0120 17:48:30.622658 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b2e5ad3a07fd9605c52daa88921db45628093c3ce26795b7339404cc43ea0e8e"} err="failed to get container status \"b2e5ad3a07fd9605c52daa88921db45628093c3ce26795b7339404cc43ea0e8e\": rpc error: code = NotFound desc = could not find container \"b2e5ad3a07fd9605c52daa88921db45628093c3ce26795b7339404cc43ea0e8e\": container with ID starting with b2e5ad3a07fd9605c52daa88921db45628093c3ce26795b7339404cc43ea0e8e not found: ID does not exist" Jan 20 17:48:30 crc kubenswrapper[4558]: I0120 17:48:30.721218 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:48:30 crc kubenswrapper[4558]: I0120 17:48:30.729816 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:48:30 crc kubenswrapper[4558]: I0120 17:48:30.754378 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:48:30 crc kubenswrapper[4558]: E0120 17:48:30.755012 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="86be6735-87cb-4609-9f3c-334ce0eac39b" containerName="proxy-httpd" Jan 20 17:48:30 crc kubenswrapper[4558]: I0120 17:48:30.755039 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="86be6735-87cb-4609-9f3c-334ce0eac39b" containerName="proxy-httpd" Jan 20 17:48:30 crc kubenswrapper[4558]: E0120 17:48:30.755072 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="86be6735-87cb-4609-9f3c-334ce0eac39b" containerName="ceilometer-central-agent" Jan 20 17:48:30 crc kubenswrapper[4558]: I0120 17:48:30.755079 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="86be6735-87cb-4609-9f3c-334ce0eac39b" containerName="ceilometer-central-agent" Jan 20 17:48:30 crc kubenswrapper[4558]: E0120 17:48:30.755105 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="86be6735-87cb-4609-9f3c-334ce0eac39b" containerName="ceilometer-notification-agent" Jan 20 17:48:30 crc kubenswrapper[4558]: I0120 17:48:30.755114 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="86be6735-87cb-4609-9f3c-334ce0eac39b" containerName="ceilometer-notification-agent" Jan 20 17:48:30 crc kubenswrapper[4558]: E0120 17:48:30.755128 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="86be6735-87cb-4609-9f3c-334ce0eac39b" containerName="sg-core" Jan 20 17:48:30 crc kubenswrapper[4558]: I0120 17:48:30.755133 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="86be6735-87cb-4609-9f3c-334ce0eac39b" containerName="sg-core" Jan 20 17:48:30 crc kubenswrapper[4558]: I0120 17:48:30.755454 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="86be6735-87cb-4609-9f3c-334ce0eac39b" containerName="proxy-httpd" Jan 20 17:48:30 crc kubenswrapper[4558]: I0120 17:48:30.755474 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="86be6735-87cb-4609-9f3c-334ce0eac39b" containerName="ceilometer-central-agent" Jan 20 17:48:30 crc kubenswrapper[4558]: I0120 17:48:30.755498 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="86be6735-87cb-4609-9f3c-334ce0eac39b" containerName="sg-core" Jan 20 17:48:30 crc kubenswrapper[4558]: I0120 17:48:30.755519 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="86be6735-87cb-4609-9f3c-334ce0eac39b" containerName="ceilometer-notification-agent" Jan 20 17:48:30 crc kubenswrapper[4558]: I0120 17:48:30.765822 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:48:30 crc kubenswrapper[4558]: I0120 17:48:30.772698 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-scripts" Jan 20 17:48:30 crc kubenswrapper[4558]: I0120 17:48:30.772904 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-config-data" Jan 20 17:48:30 crc kubenswrapper[4558]: I0120 17:48:30.796877 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:48:30 crc kubenswrapper[4558]: I0120 17:48:30.926608 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/846b8ff4-6a4d-4429-baf0-affe344f2443-scripts\") pod \"ceilometer-0\" (UID: \"846b8ff4-6a4d-4429-baf0-affe344f2443\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:48:30 crc kubenswrapper[4558]: I0120 17:48:30.926668 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/846b8ff4-6a4d-4429-baf0-affe344f2443-log-httpd\") pod \"ceilometer-0\" (UID: \"846b8ff4-6a4d-4429-baf0-affe344f2443\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:48:30 crc kubenswrapper[4558]: I0120 17:48:30.926839 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/846b8ff4-6a4d-4429-baf0-affe344f2443-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"846b8ff4-6a4d-4429-baf0-affe344f2443\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:48:30 crc kubenswrapper[4558]: I0120 17:48:30.926907 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/846b8ff4-6a4d-4429-baf0-affe344f2443-run-httpd\") pod \"ceilometer-0\" (UID: \"846b8ff4-6a4d-4429-baf0-affe344f2443\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:48:30 crc kubenswrapper[4558]: I0120 17:48:30.926994 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/846b8ff4-6a4d-4429-baf0-affe344f2443-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"846b8ff4-6a4d-4429-baf0-affe344f2443\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:48:30 crc kubenswrapper[4558]: I0120 17:48:30.927049 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/846b8ff4-6a4d-4429-baf0-affe344f2443-config-data\") pod \"ceilometer-0\" (UID: \"846b8ff4-6a4d-4429-baf0-affe344f2443\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:48:30 crc kubenswrapper[4558]: I0120 17:48:30.927493 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7bz5r\" (UniqueName: \"kubernetes.io/projected/846b8ff4-6a4d-4429-baf0-affe344f2443-kube-api-access-7bz5r\") pod \"ceilometer-0\" (UID: \"846b8ff4-6a4d-4429-baf0-affe344f2443\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:48:31 crc kubenswrapper[4558]: I0120 17:48:31.029115 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7bz5r\" (UniqueName: \"kubernetes.io/projected/846b8ff4-6a4d-4429-baf0-affe344f2443-kube-api-access-7bz5r\") pod \"ceilometer-0\" (UID: \"846b8ff4-6a4d-4429-baf0-affe344f2443\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:48:31 crc kubenswrapper[4558]: I0120 17:48:31.029192 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/846b8ff4-6a4d-4429-baf0-affe344f2443-scripts\") pod \"ceilometer-0\" (UID: \"846b8ff4-6a4d-4429-baf0-affe344f2443\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:48:31 crc kubenswrapper[4558]: I0120 17:48:31.029225 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/846b8ff4-6a4d-4429-baf0-affe344f2443-log-httpd\") pod \"ceilometer-0\" (UID: \"846b8ff4-6a4d-4429-baf0-affe344f2443\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:48:31 crc kubenswrapper[4558]: I0120 17:48:31.029290 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/846b8ff4-6a4d-4429-baf0-affe344f2443-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"846b8ff4-6a4d-4429-baf0-affe344f2443\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:48:31 crc kubenswrapper[4558]: I0120 17:48:31.029319 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/846b8ff4-6a4d-4429-baf0-affe344f2443-run-httpd\") pod \"ceilometer-0\" (UID: \"846b8ff4-6a4d-4429-baf0-affe344f2443\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:48:31 crc kubenswrapper[4558]: I0120 17:48:31.029345 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/846b8ff4-6a4d-4429-baf0-affe344f2443-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"846b8ff4-6a4d-4429-baf0-affe344f2443\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:48:31 crc kubenswrapper[4558]: I0120 17:48:31.029377 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/846b8ff4-6a4d-4429-baf0-affe344f2443-config-data\") pod \"ceilometer-0\" (UID: \"846b8ff4-6a4d-4429-baf0-affe344f2443\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:48:31 crc kubenswrapper[4558]: I0120 17:48:31.030029 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/846b8ff4-6a4d-4429-baf0-affe344f2443-log-httpd\") pod \"ceilometer-0\" (UID: \"846b8ff4-6a4d-4429-baf0-affe344f2443\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:48:31 crc kubenswrapper[4558]: I0120 17:48:31.030184 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/846b8ff4-6a4d-4429-baf0-affe344f2443-run-httpd\") pod \"ceilometer-0\" (UID: \"846b8ff4-6a4d-4429-baf0-affe344f2443\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:48:31 crc kubenswrapper[4558]: I0120 17:48:31.034981 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/846b8ff4-6a4d-4429-baf0-affe344f2443-config-data\") pod \"ceilometer-0\" (UID: \"846b8ff4-6a4d-4429-baf0-affe344f2443\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:48:31 crc kubenswrapper[4558]: I0120 17:48:31.035053 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/846b8ff4-6a4d-4429-baf0-affe344f2443-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"846b8ff4-6a4d-4429-baf0-affe344f2443\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:48:31 crc kubenswrapper[4558]: I0120 17:48:31.035069 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/846b8ff4-6a4d-4429-baf0-affe344f2443-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"846b8ff4-6a4d-4429-baf0-affe344f2443\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:48:31 crc kubenswrapper[4558]: I0120 17:48:31.042107 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/846b8ff4-6a4d-4429-baf0-affe344f2443-scripts\") pod \"ceilometer-0\" (UID: \"846b8ff4-6a4d-4429-baf0-affe344f2443\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:48:31 crc kubenswrapper[4558]: I0120 17:48:31.044746 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7bz5r\" (UniqueName: \"kubernetes.io/projected/846b8ff4-6a4d-4429-baf0-affe344f2443-kube-api-access-7bz5r\") pod \"ceilometer-0\" (UID: \"846b8ff4-6a4d-4429-baf0-affe344f2443\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:48:31 crc kubenswrapper[4558]: I0120 17:48:31.098104 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:48:31 crc kubenswrapper[4558]: I0120 17:48:31.419832 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:48:31 crc kubenswrapper[4558]: I0120 17:48:31.420195 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:48:31 crc kubenswrapper[4558]: I0120 17:48:31.420215 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:48:31 crc kubenswrapper[4558]: I0120 17:48:31.527901 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:48:31 crc kubenswrapper[4558]: W0120 17:48:31.529577 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod846b8ff4_6a4d_4429_baf0_affe344f2443.slice/crio-197285248b972a8e1cbf847911ca4cfe00f863c2018dc00c80b24115b79ca5be WatchSource:0}: Error finding container 197285248b972a8e1cbf847911ca4cfe00f863c2018dc00c80b24115b79ca5be: Status 404 returned error can't find the container with id 197285248b972a8e1cbf847911ca4cfe00f863c2018dc00c80b24115b79ca5be Jan 20 17:48:31 crc kubenswrapper[4558]: I0120 17:48:31.976730 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:48:31 crc kubenswrapper[4558]: I0120 17:48:31.977080 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:48:32 crc kubenswrapper[4558]: I0120 17:48:32.007911 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:48:32 crc kubenswrapper[4558]: I0120 17:48:32.016987 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:48:32 crc kubenswrapper[4558]: I0120 17:48:32.432995 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"846b8ff4-6a4d-4429-baf0-affe344f2443","Type":"ContainerStarted","Data":"a0200376b46aaa5f9285cd9453a248f2b1abe657063801fbd9128f361cc1773b"} Jan 20 17:48:32 crc kubenswrapper[4558]: I0120 17:48:32.433512 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:48:32 crc kubenswrapper[4558]: I0120 17:48:32.433593 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:48:32 crc kubenswrapper[4558]: I0120 17:48:32.433607 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"846b8ff4-6a4d-4429-baf0-affe344f2443","Type":"ContainerStarted","Data":"197285248b972a8e1cbf847911ca4cfe00f863c2018dc00c80b24115b79ca5be"} Jan 20 17:48:32 crc kubenswrapper[4558]: I0120 17:48:32.583060 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="86be6735-87cb-4609-9f3c-334ce0eac39b" path="/var/lib/kubelet/pods/86be6735-87cb-4609-9f3c-334ce0eac39b/volumes" Jan 20 17:48:33 crc kubenswrapper[4558]: I0120 17:48:33.057272 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:48:33 crc kubenswrapper[4558]: I0120 17:48:33.139451 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:48:33 crc kubenswrapper[4558]: I0120 17:48:33.445068 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"846b8ff4-6a4d-4429-baf0-affe344f2443","Type":"ContainerStarted","Data":"5b002afccf52b36ca6aaeb4952c36ef3a337dc87396bc522ab4018f88bc487d0"} Jan 20 17:48:34 crc kubenswrapper[4558]: I0120 17:48:34.119219 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:48:34 crc kubenswrapper[4558]: I0120 17:48:34.157800 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:48:34 crc kubenswrapper[4558]: I0120 17:48:34.459357 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"846b8ff4-6a4d-4429-baf0-affe344f2443","Type":"ContainerStarted","Data":"2fa9e5be032d97b5b704227d73210d4f13c21adfffb77cabee05509d24e7c834"} Jan 20 17:48:35 crc kubenswrapper[4558]: I0120 17:48:35.483075 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"846b8ff4-6a4d-4429-baf0-affe344f2443","Type":"ContainerStarted","Data":"01900246a5a0ba025c5a871a5361756e9505243d35ab1589a6d16c801bc7454a"} Jan 20 17:48:35 crc kubenswrapper[4558]: I0120 17:48:35.501881 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ceilometer-0" podStartSLOduration=1.89810759 podStartE2EDuration="5.501862808s" podCreationTimestamp="2026-01-20 17:48:30 +0000 UTC" firstStartedPulling="2026-01-20 17:48:31.533033003 +0000 UTC m=+4005.293370970" lastFinishedPulling="2026-01-20 17:48:35.13678822 +0000 UTC m=+4008.897126188" observedRunningTime="2026-01-20 17:48:35.50009829 +0000 UTC m=+4009.260436257" watchObservedRunningTime="2026-01-20 17:48:35.501862808 +0000 UTC m=+4009.262200774" Jan 20 17:48:36 crc kubenswrapper[4558]: I0120 17:48:36.493905 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:48:39 crc kubenswrapper[4558]: I0120 17:48:39.805206 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:48:40 crc kubenswrapper[4558]: I0120 17:48:40.209712 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell0-cell-mapping-28s5c"] Jan 20 17:48:40 crc kubenswrapper[4558]: I0120 17:48:40.211486 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-cell-mapping-28s5c" Jan 20 17:48:40 crc kubenswrapper[4558]: I0120 17:48:40.213124 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell0-manage-scripts" Jan 20 17:48:40 crc kubenswrapper[4558]: I0120 17:48:40.213387 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell0-manage-config-data" Jan 20 17:48:40 crc kubenswrapper[4558]: I0120 17:48:40.218075 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-cell-mapping-28s5c"] Jan 20 17:48:40 crc kubenswrapper[4558]: I0120 17:48:40.323452 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l68bq\" (UniqueName: \"kubernetes.io/projected/fa9f3a26-1b5a-4d02-a79d-67585f099131-kube-api-access-l68bq\") pod \"nova-cell0-cell-mapping-28s5c\" (UID: \"fa9f3a26-1b5a-4d02-a79d-67585f099131\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-28s5c" Jan 20 17:48:40 crc kubenswrapper[4558]: I0120 17:48:40.323768 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fa9f3a26-1b5a-4d02-a79d-67585f099131-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-28s5c\" (UID: \"fa9f3a26-1b5a-4d02-a79d-67585f099131\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-28s5c" Jan 20 17:48:40 crc kubenswrapper[4558]: I0120 17:48:40.323947 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fa9f3a26-1b5a-4d02-a79d-67585f099131-config-data\") pod \"nova-cell0-cell-mapping-28s5c\" (UID: \"fa9f3a26-1b5a-4d02-a79d-67585f099131\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-28s5c" Jan 20 17:48:40 crc kubenswrapper[4558]: I0120 17:48:40.324246 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fa9f3a26-1b5a-4d02-a79d-67585f099131-scripts\") pod \"nova-cell0-cell-mapping-28s5c\" (UID: \"fa9f3a26-1b5a-4d02-a79d-67585f099131\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-28s5c" Jan 20 17:48:40 crc kubenswrapper[4558]: I0120 17:48:40.334356 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:48:40 crc kubenswrapper[4558]: I0120 17:48:40.335978 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:48:40 crc kubenswrapper[4558]: I0120 17:48:40.343400 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-api-config-data" Jan 20 17:48:40 crc kubenswrapper[4558]: I0120 17:48:40.350768 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:48:40 crc kubenswrapper[4558]: I0120 17:48:40.406392 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:48:40 crc kubenswrapper[4558]: I0120 17:48:40.407980 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:48:40 crc kubenswrapper[4558]: I0120 17:48:40.416630 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-metadata-config-data" Jan 20 17:48:40 crc kubenswrapper[4558]: I0120 17:48:40.426199 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fa9f3a26-1b5a-4d02-a79d-67585f099131-config-data\") pod \"nova-cell0-cell-mapping-28s5c\" (UID: \"fa9f3a26-1b5a-4d02-a79d-67585f099131\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-28s5c" Jan 20 17:48:40 crc kubenswrapper[4558]: I0120 17:48:40.426265 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1f9f16e6-6a68-47b5-9f90-7275cdb64dd0-logs\") pod \"nova-api-0\" (UID: \"1f9f16e6-6a68-47b5-9f90-7275cdb64dd0\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:48:40 crc kubenswrapper[4558]: I0120 17:48:40.426366 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1f9f16e6-6a68-47b5-9f90-7275cdb64dd0-config-data\") pod \"nova-api-0\" (UID: \"1f9f16e6-6a68-47b5-9f90-7275cdb64dd0\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:48:40 crc kubenswrapper[4558]: I0120 17:48:40.426432 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fa9f3a26-1b5a-4d02-a79d-67585f099131-scripts\") pod \"nova-cell0-cell-mapping-28s5c\" (UID: \"fa9f3a26-1b5a-4d02-a79d-67585f099131\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-28s5c" Jan 20 17:48:40 crc kubenswrapper[4558]: I0120 17:48:40.426531 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l68bq\" (UniqueName: \"kubernetes.io/projected/fa9f3a26-1b5a-4d02-a79d-67585f099131-kube-api-access-l68bq\") pod \"nova-cell0-cell-mapping-28s5c\" (UID: \"fa9f3a26-1b5a-4d02-a79d-67585f099131\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-28s5c" Jan 20 17:48:40 crc kubenswrapper[4558]: I0120 17:48:40.426639 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fa9f3a26-1b5a-4d02-a79d-67585f099131-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-28s5c\" (UID: \"fa9f3a26-1b5a-4d02-a79d-67585f099131\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-28s5c" Jan 20 17:48:40 crc kubenswrapper[4558]: I0120 17:48:40.426665 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1f9f16e6-6a68-47b5-9f90-7275cdb64dd0-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"1f9f16e6-6a68-47b5-9f90-7275cdb64dd0\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:48:40 crc kubenswrapper[4558]: I0120 17:48:40.426683 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fjpqc\" (UniqueName: \"kubernetes.io/projected/1f9f16e6-6a68-47b5-9f90-7275cdb64dd0-kube-api-access-fjpqc\") pod \"nova-api-0\" (UID: \"1f9f16e6-6a68-47b5-9f90-7275cdb64dd0\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:48:40 crc kubenswrapper[4558]: I0120 17:48:40.443296 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:48:40 crc kubenswrapper[4558]: I0120 17:48:40.446941 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fa9f3a26-1b5a-4d02-a79d-67585f099131-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-28s5c\" (UID: \"fa9f3a26-1b5a-4d02-a79d-67585f099131\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-28s5c" Jan 20 17:48:40 crc kubenswrapper[4558]: I0120 17:48:40.446954 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fa9f3a26-1b5a-4d02-a79d-67585f099131-scripts\") pod \"nova-cell0-cell-mapping-28s5c\" (UID: \"fa9f3a26-1b5a-4d02-a79d-67585f099131\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-28s5c" Jan 20 17:48:40 crc kubenswrapper[4558]: I0120 17:48:40.451249 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l68bq\" (UniqueName: \"kubernetes.io/projected/fa9f3a26-1b5a-4d02-a79d-67585f099131-kube-api-access-l68bq\") pod \"nova-cell0-cell-mapping-28s5c\" (UID: \"fa9f3a26-1b5a-4d02-a79d-67585f099131\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-28s5c" Jan 20 17:48:40 crc kubenswrapper[4558]: I0120 17:48:40.454658 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fa9f3a26-1b5a-4d02-a79d-67585f099131-config-data\") pod \"nova-cell0-cell-mapping-28s5c\" (UID: \"fa9f3a26-1b5a-4d02-a79d-67585f099131\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-28s5c" Jan 20 17:48:40 crc kubenswrapper[4558]: I0120 17:48:40.513402 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:48:40 crc kubenswrapper[4558]: I0120 17:48:40.515667 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:48:40 crc kubenswrapper[4558]: I0120 17:48:40.518875 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-scheduler-config-data" Jan 20 17:48:40 crc kubenswrapper[4558]: I0120 17:48:40.521749 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:48:40 crc kubenswrapper[4558]: I0120 17:48:40.529295 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-cell-mapping-28s5c" Jan 20 17:48:40 crc kubenswrapper[4558]: I0120 17:48:40.529789 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3e9fc64b-291b-4710-8518-8a88e623cbc5-config-data\") pod \"nova-metadata-0\" (UID: \"3e9fc64b-291b-4710-8518-8a88e623cbc5\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:48:40 crc kubenswrapper[4558]: I0120 17:48:40.529830 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s46kv\" (UniqueName: \"kubernetes.io/projected/3e9fc64b-291b-4710-8518-8a88e623cbc5-kube-api-access-s46kv\") pod \"nova-metadata-0\" (UID: \"3e9fc64b-291b-4710-8518-8a88e623cbc5\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:48:40 crc kubenswrapper[4558]: I0120 17:48:40.529869 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1f9f16e6-6a68-47b5-9f90-7275cdb64dd0-config-data\") pod \"nova-api-0\" (UID: \"1f9f16e6-6a68-47b5-9f90-7275cdb64dd0\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:48:40 crc kubenswrapper[4558]: I0120 17:48:40.529902 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3e9fc64b-291b-4710-8518-8a88e623cbc5-logs\") pod \"nova-metadata-0\" (UID: \"3e9fc64b-291b-4710-8518-8a88e623cbc5\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:48:40 crc kubenswrapper[4558]: I0120 17:48:40.529986 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1f9f16e6-6a68-47b5-9f90-7275cdb64dd0-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"1f9f16e6-6a68-47b5-9f90-7275cdb64dd0\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:48:40 crc kubenswrapper[4558]: I0120 17:48:40.530004 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fjpqc\" (UniqueName: \"kubernetes.io/projected/1f9f16e6-6a68-47b5-9f90-7275cdb64dd0-kube-api-access-fjpqc\") pod \"nova-api-0\" (UID: \"1f9f16e6-6a68-47b5-9f90-7275cdb64dd0\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:48:40 crc kubenswrapper[4558]: I0120 17:48:40.530028 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e9fc64b-291b-4710-8518-8a88e623cbc5-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"3e9fc64b-291b-4710-8518-8a88e623cbc5\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:48:40 crc kubenswrapper[4558]: I0120 17:48:40.530064 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1f9f16e6-6a68-47b5-9f90-7275cdb64dd0-logs\") pod \"nova-api-0\" (UID: \"1f9f16e6-6a68-47b5-9f90-7275cdb64dd0\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:48:40 crc kubenswrapper[4558]: I0120 17:48:40.535698 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1f9f16e6-6a68-47b5-9f90-7275cdb64dd0-logs\") pod \"nova-api-0\" (UID: \"1f9f16e6-6a68-47b5-9f90-7275cdb64dd0\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:48:40 crc kubenswrapper[4558]: I0120 17:48:40.545374 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1f9f16e6-6a68-47b5-9f90-7275cdb64dd0-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"1f9f16e6-6a68-47b5-9f90-7275cdb64dd0\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:48:40 crc kubenswrapper[4558]: I0120 17:48:40.547778 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:48:40 crc kubenswrapper[4558]: I0120 17:48:40.549254 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:48:40 crc kubenswrapper[4558]: I0120 17:48:40.550717 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1f9f16e6-6a68-47b5-9f90-7275cdb64dd0-config-data\") pod \"nova-api-0\" (UID: \"1f9f16e6-6a68-47b5-9f90-7275cdb64dd0\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:48:40 crc kubenswrapper[4558]: I0120 17:48:40.554742 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fjpqc\" (UniqueName: \"kubernetes.io/projected/1f9f16e6-6a68-47b5-9f90-7275cdb64dd0-kube-api-access-fjpqc\") pod \"nova-api-0\" (UID: \"1f9f16e6-6a68-47b5-9f90-7275cdb64dd0\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:48:40 crc kubenswrapper[4558]: I0120 17:48:40.557589 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell1-novncproxy-config-data" Jan 20 17:48:40 crc kubenswrapper[4558]: I0120 17:48:40.565406 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:48:40 crc kubenswrapper[4558]: I0120 17:48:40.631155 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/60c9785f-c5f8-408e-ac84-12756b4ba66a-config-data\") pod \"nova-scheduler-0\" (UID: \"60c9785f-c5f8-408e-ac84-12756b4ba66a\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:48:40 crc kubenswrapper[4558]: I0120 17:48:40.631324 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/718bc2e9-d6bd-44b9-bbf0-4a4be370af4b-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"718bc2e9-d6bd-44b9-bbf0-4a4be370af4b\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:48:40 crc kubenswrapper[4558]: I0120 17:48:40.631396 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p6nqh\" (UniqueName: \"kubernetes.io/projected/60c9785f-c5f8-408e-ac84-12756b4ba66a-kube-api-access-p6nqh\") pod \"nova-scheduler-0\" (UID: \"60c9785f-c5f8-408e-ac84-12756b4ba66a\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:48:40 crc kubenswrapper[4558]: I0120 17:48:40.631467 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3e9fc64b-291b-4710-8518-8a88e623cbc5-logs\") pod \"nova-metadata-0\" (UID: \"3e9fc64b-291b-4710-8518-8a88e623cbc5\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:48:40 crc kubenswrapper[4558]: I0120 17:48:40.631911 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3e9fc64b-291b-4710-8518-8a88e623cbc5-logs\") pod \"nova-metadata-0\" (UID: \"3e9fc64b-291b-4710-8518-8a88e623cbc5\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:48:40 crc kubenswrapper[4558]: I0120 17:48:40.632904 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/60c9785f-c5f8-408e-ac84-12756b4ba66a-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"60c9785f-c5f8-408e-ac84-12756b4ba66a\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:48:40 crc kubenswrapper[4558]: I0120 17:48:40.633005 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e9fc64b-291b-4710-8518-8a88e623cbc5-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"3e9fc64b-291b-4710-8518-8a88e623cbc5\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:48:40 crc kubenswrapper[4558]: I0120 17:48:40.633059 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/718bc2e9-d6bd-44b9-bbf0-4a4be370af4b-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"718bc2e9-d6bd-44b9-bbf0-4a4be370af4b\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:48:40 crc kubenswrapper[4558]: I0120 17:48:40.633104 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9bjtq\" (UniqueName: \"kubernetes.io/projected/718bc2e9-d6bd-44b9-bbf0-4a4be370af4b-kube-api-access-9bjtq\") pod \"nova-cell1-novncproxy-0\" (UID: \"718bc2e9-d6bd-44b9-bbf0-4a4be370af4b\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:48:40 crc kubenswrapper[4558]: I0120 17:48:40.633221 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3e9fc64b-291b-4710-8518-8a88e623cbc5-config-data\") pod \"nova-metadata-0\" (UID: \"3e9fc64b-291b-4710-8518-8a88e623cbc5\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:48:40 crc kubenswrapper[4558]: I0120 17:48:40.633256 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s46kv\" (UniqueName: \"kubernetes.io/projected/3e9fc64b-291b-4710-8518-8a88e623cbc5-kube-api-access-s46kv\") pod \"nova-metadata-0\" (UID: \"3e9fc64b-291b-4710-8518-8a88e623cbc5\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:48:40 crc kubenswrapper[4558]: I0120 17:48:40.637753 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e9fc64b-291b-4710-8518-8a88e623cbc5-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"3e9fc64b-291b-4710-8518-8a88e623cbc5\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:48:40 crc kubenswrapper[4558]: I0120 17:48:40.637844 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3e9fc64b-291b-4710-8518-8a88e623cbc5-config-data\") pod \"nova-metadata-0\" (UID: \"3e9fc64b-291b-4710-8518-8a88e623cbc5\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:48:40 crc kubenswrapper[4558]: I0120 17:48:40.648276 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s46kv\" (UniqueName: \"kubernetes.io/projected/3e9fc64b-291b-4710-8518-8a88e623cbc5-kube-api-access-s46kv\") pod \"nova-metadata-0\" (UID: \"3e9fc64b-291b-4710-8518-8a88e623cbc5\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:48:40 crc kubenswrapper[4558]: I0120 17:48:40.654205 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:48:40 crc kubenswrapper[4558]: I0120 17:48:40.734149 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:48:40 crc kubenswrapper[4558]: I0120 17:48:40.738443 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9bjtq\" (UniqueName: \"kubernetes.io/projected/718bc2e9-d6bd-44b9-bbf0-4a4be370af4b-kube-api-access-9bjtq\") pod \"nova-cell1-novncproxy-0\" (UID: \"718bc2e9-d6bd-44b9-bbf0-4a4be370af4b\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:48:40 crc kubenswrapper[4558]: I0120 17:48:40.739435 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/60c9785f-c5f8-408e-ac84-12756b4ba66a-config-data\") pod \"nova-scheduler-0\" (UID: \"60c9785f-c5f8-408e-ac84-12756b4ba66a\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:48:40 crc kubenswrapper[4558]: I0120 17:48:40.739463 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/718bc2e9-d6bd-44b9-bbf0-4a4be370af4b-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"718bc2e9-d6bd-44b9-bbf0-4a4be370af4b\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:48:40 crc kubenswrapper[4558]: I0120 17:48:40.739486 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p6nqh\" (UniqueName: \"kubernetes.io/projected/60c9785f-c5f8-408e-ac84-12756b4ba66a-kube-api-access-p6nqh\") pod \"nova-scheduler-0\" (UID: \"60c9785f-c5f8-408e-ac84-12756b4ba66a\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:48:40 crc kubenswrapper[4558]: I0120 17:48:40.739612 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/60c9785f-c5f8-408e-ac84-12756b4ba66a-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"60c9785f-c5f8-408e-ac84-12756b4ba66a\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:48:40 crc kubenswrapper[4558]: I0120 17:48:40.739661 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/718bc2e9-d6bd-44b9-bbf0-4a4be370af4b-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"718bc2e9-d6bd-44b9-bbf0-4a4be370af4b\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:48:40 crc kubenswrapper[4558]: I0120 17:48:40.747695 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/60c9785f-c5f8-408e-ac84-12756b4ba66a-config-data\") pod \"nova-scheduler-0\" (UID: \"60c9785f-c5f8-408e-ac84-12756b4ba66a\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:48:40 crc kubenswrapper[4558]: I0120 17:48:40.747913 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/60c9785f-c5f8-408e-ac84-12756b4ba66a-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"60c9785f-c5f8-408e-ac84-12756b4ba66a\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:48:40 crc kubenswrapper[4558]: I0120 17:48:40.748135 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/718bc2e9-d6bd-44b9-bbf0-4a4be370af4b-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"718bc2e9-d6bd-44b9-bbf0-4a4be370af4b\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:48:40 crc kubenswrapper[4558]: I0120 17:48:40.755681 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/718bc2e9-d6bd-44b9-bbf0-4a4be370af4b-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"718bc2e9-d6bd-44b9-bbf0-4a4be370af4b\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:48:40 crc kubenswrapper[4558]: I0120 17:48:40.758879 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p6nqh\" (UniqueName: \"kubernetes.io/projected/60c9785f-c5f8-408e-ac84-12756b4ba66a-kube-api-access-p6nqh\") pod \"nova-scheduler-0\" (UID: \"60c9785f-c5f8-408e-ac84-12756b4ba66a\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:48:40 crc kubenswrapper[4558]: I0120 17:48:40.759481 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9bjtq\" (UniqueName: \"kubernetes.io/projected/718bc2e9-d6bd-44b9-bbf0-4a4be370af4b-kube-api-access-9bjtq\") pod \"nova-cell1-novncproxy-0\" (UID: \"718bc2e9-d6bd-44b9-bbf0-4a4be370af4b\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:48:40 crc kubenswrapper[4558]: I0120 17:48:40.919259 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:48:40 crc kubenswrapper[4558]: I0120 17:48:40.931516 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:48:41 crc kubenswrapper[4558]: I0120 17:48:41.025740 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-cell-mapping-28s5c"] Jan 20 17:48:41 crc kubenswrapper[4558]: I0120 17:48:41.109224 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-db-sync-zjq8p"] Jan 20 17:48:41 crc kubenswrapper[4558]: I0120 17:48:41.110674 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-zjq8p" Jan 20 17:48:41 crc kubenswrapper[4558]: I0120 17:48:41.125424 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell1-conductor-scripts" Jan 20 17:48:41 crc kubenswrapper[4558]: I0120 17:48:41.126081 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell1-conductor-config-data" Jan 20 17:48:41 crc kubenswrapper[4558]: I0120 17:48:41.127656 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-db-sync-zjq8p"] Jan 20 17:48:41 crc kubenswrapper[4558]: W0120 17:48:41.184440 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1f9f16e6_6a68_47b5_9f90_7275cdb64dd0.slice/crio-a9c2089f23726006d0f38990eab302ec9552cac80149272bd8c33b3219bdf518 WatchSource:0}: Error finding container a9c2089f23726006d0f38990eab302ec9552cac80149272bd8c33b3219bdf518: Status 404 returned error can't find the container with id a9c2089f23726006d0f38990eab302ec9552cac80149272bd8c33b3219bdf518 Jan 20 17:48:41 crc kubenswrapper[4558]: I0120 17:48:41.190298 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:48:41 crc kubenswrapper[4558]: I0120 17:48:41.206180 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:48:41 crc kubenswrapper[4558]: I0120 17:48:41.277783 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bebbe6bd-5855-4553-bbb4-7d994a90d025-scripts\") pod \"nova-cell1-conductor-db-sync-zjq8p\" (UID: \"bebbe6bd-5855-4553-bbb4-7d994a90d025\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-zjq8p" Jan 20 17:48:41 crc kubenswrapper[4558]: I0120 17:48:41.288302 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sdlfp\" (UniqueName: \"kubernetes.io/projected/bebbe6bd-5855-4553-bbb4-7d994a90d025-kube-api-access-sdlfp\") pod \"nova-cell1-conductor-db-sync-zjq8p\" (UID: \"bebbe6bd-5855-4553-bbb4-7d994a90d025\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-zjq8p" Jan 20 17:48:41 crc kubenswrapper[4558]: I0120 17:48:41.288373 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bebbe6bd-5855-4553-bbb4-7d994a90d025-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-zjq8p\" (UID: \"bebbe6bd-5855-4553-bbb4-7d994a90d025\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-zjq8p" Jan 20 17:48:41 crc kubenswrapper[4558]: I0120 17:48:41.288467 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bebbe6bd-5855-4553-bbb4-7d994a90d025-config-data\") pod \"nova-cell1-conductor-db-sync-zjq8p\" (UID: \"bebbe6bd-5855-4553-bbb4-7d994a90d025\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-zjq8p" Jan 20 17:48:41 crc kubenswrapper[4558]: I0120 17:48:41.390662 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sdlfp\" (UniqueName: \"kubernetes.io/projected/bebbe6bd-5855-4553-bbb4-7d994a90d025-kube-api-access-sdlfp\") pod \"nova-cell1-conductor-db-sync-zjq8p\" (UID: \"bebbe6bd-5855-4553-bbb4-7d994a90d025\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-zjq8p" Jan 20 17:48:41 crc kubenswrapper[4558]: I0120 17:48:41.390713 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bebbe6bd-5855-4553-bbb4-7d994a90d025-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-zjq8p\" (UID: \"bebbe6bd-5855-4553-bbb4-7d994a90d025\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-zjq8p" Jan 20 17:48:41 crc kubenswrapper[4558]: I0120 17:48:41.390768 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bebbe6bd-5855-4553-bbb4-7d994a90d025-config-data\") pod \"nova-cell1-conductor-db-sync-zjq8p\" (UID: \"bebbe6bd-5855-4553-bbb4-7d994a90d025\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-zjq8p" Jan 20 17:48:41 crc kubenswrapper[4558]: I0120 17:48:41.390872 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bebbe6bd-5855-4553-bbb4-7d994a90d025-scripts\") pod \"nova-cell1-conductor-db-sync-zjq8p\" (UID: \"bebbe6bd-5855-4553-bbb4-7d994a90d025\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-zjq8p" Jan 20 17:48:41 crc kubenswrapper[4558]: I0120 17:48:41.395428 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bebbe6bd-5855-4553-bbb4-7d994a90d025-scripts\") pod \"nova-cell1-conductor-db-sync-zjq8p\" (UID: \"bebbe6bd-5855-4553-bbb4-7d994a90d025\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-zjq8p" Jan 20 17:48:41 crc kubenswrapper[4558]: I0120 17:48:41.395521 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bebbe6bd-5855-4553-bbb4-7d994a90d025-config-data\") pod \"nova-cell1-conductor-db-sync-zjq8p\" (UID: \"bebbe6bd-5855-4553-bbb4-7d994a90d025\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-zjq8p" Jan 20 17:48:41 crc kubenswrapper[4558]: I0120 17:48:41.396088 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bebbe6bd-5855-4553-bbb4-7d994a90d025-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-zjq8p\" (UID: \"bebbe6bd-5855-4553-bbb4-7d994a90d025\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-zjq8p" Jan 20 17:48:41 crc kubenswrapper[4558]: I0120 17:48:41.412718 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sdlfp\" (UniqueName: \"kubernetes.io/projected/bebbe6bd-5855-4553-bbb4-7d994a90d025-kube-api-access-sdlfp\") pod \"nova-cell1-conductor-db-sync-zjq8p\" (UID: \"bebbe6bd-5855-4553-bbb4-7d994a90d025\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-zjq8p" Jan 20 17:48:41 crc kubenswrapper[4558]: I0120 17:48:41.442720 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-zjq8p" Jan 20 17:48:41 crc kubenswrapper[4558]: I0120 17:48:41.450747 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:48:41 crc kubenswrapper[4558]: W0120 17:48:41.462822 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod718bc2e9_d6bd_44b9_bbf0_4a4be370af4b.slice/crio-496ef5bcef8667df4361619a15079a5079138da091c90286ef96dab3e34e54df WatchSource:0}: Error finding container 496ef5bcef8667df4361619a15079a5079138da091c90286ef96dab3e34e54df: Status 404 returned error can't find the container with id 496ef5bcef8667df4361619a15079a5079138da091c90286ef96dab3e34e54df Jan 20 17:48:41 crc kubenswrapper[4558]: I0120 17:48:41.589577 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:48:41 crc kubenswrapper[4558]: I0120 17:48:41.602428 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-cell-mapping-28s5c" event={"ID":"fa9f3a26-1b5a-4d02-a79d-67585f099131","Type":"ContainerStarted","Data":"ca177c8c965ad48d0ae78252e2d314c13abe059f39572e5e736a0d0ce022c3b7"} Jan 20 17:48:41 crc kubenswrapper[4558]: I0120 17:48:41.602482 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-cell-mapping-28s5c" event={"ID":"fa9f3a26-1b5a-4d02-a79d-67585f099131","Type":"ContainerStarted","Data":"f07e457463e191a052ca6fdb1b7aa09a75cc39fe1a55354e855aa1b2009daa50"} Jan 20 17:48:41 crc kubenswrapper[4558]: I0120 17:48:41.616393 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"1f9f16e6-6a68-47b5-9f90-7275cdb64dd0","Type":"ContainerStarted","Data":"f8ea5c6a5574b26aaec9cf8a017c58068094a35c5f9b409bcb9bfc0e7da7aff8"} Jan 20 17:48:41 crc kubenswrapper[4558]: I0120 17:48:41.616439 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"1f9f16e6-6a68-47b5-9f90-7275cdb64dd0","Type":"ContainerStarted","Data":"a9c2089f23726006d0f38990eab302ec9552cac80149272bd8c33b3219bdf518"} Jan 20 17:48:41 crc kubenswrapper[4558]: I0120 17:48:41.617467 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell0-cell-mapping-28s5c" podStartSLOduration=1.6174543940000001 podStartE2EDuration="1.617454394s" podCreationTimestamp="2026-01-20 17:48:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:48:41.616662794 +0000 UTC m=+4015.377000762" watchObservedRunningTime="2026-01-20 17:48:41.617454394 +0000 UTC m=+4015.377792361" Jan 20 17:48:41 crc kubenswrapper[4558]: I0120 17:48:41.618249 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"718bc2e9-d6bd-44b9-bbf0-4a4be370af4b","Type":"ContainerStarted","Data":"496ef5bcef8667df4361619a15079a5079138da091c90286ef96dab3e34e54df"} Jan 20 17:48:41 crc kubenswrapper[4558]: W0120 17:48:41.618807 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod60c9785f_c5f8_408e_ac84_12756b4ba66a.slice/crio-0cfe88b8ad7d07036a7b71dec6291dcc2512e43798bdd1ca17e8b7bed048023b WatchSource:0}: Error finding container 0cfe88b8ad7d07036a7b71dec6291dcc2512e43798bdd1ca17e8b7bed048023b: Status 404 returned error can't find the container with id 0cfe88b8ad7d07036a7b71dec6291dcc2512e43798bdd1ca17e8b7bed048023b Jan 20 17:48:41 crc kubenswrapper[4558]: I0120 17:48:41.619872 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"3e9fc64b-291b-4710-8518-8a88e623cbc5","Type":"ContainerStarted","Data":"afb57f59ca3f137658ac2a110a0cb9b7538255ec8aa20e0588c2989b3cb473ce"} Jan 20 17:48:41 crc kubenswrapper[4558]: I0120 17:48:41.619893 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"3e9fc64b-291b-4710-8518-8a88e623cbc5","Type":"ContainerStarted","Data":"08e41f9d41f751e9577f9150bfd1a1253d0efe921c6627198b5c31ed66ac575f"} Jan 20 17:48:41 crc kubenswrapper[4558]: I0120 17:48:41.940911 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-db-sync-zjq8p"] Jan 20 17:48:42 crc kubenswrapper[4558]: I0120 17:48:42.635540 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"60c9785f-c5f8-408e-ac84-12756b4ba66a","Type":"ContainerStarted","Data":"96dbff4a5c402cf5402baf41517d6be17579aac62d5269d3b0024b1e9269835d"} Jan 20 17:48:42 crc kubenswrapper[4558]: I0120 17:48:42.635615 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"60c9785f-c5f8-408e-ac84-12756b4ba66a","Type":"ContainerStarted","Data":"0cfe88b8ad7d07036a7b71dec6291dcc2512e43798bdd1ca17e8b7bed048023b"} Jan 20 17:48:42 crc kubenswrapper[4558]: I0120 17:48:42.641097 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"1f9f16e6-6a68-47b5-9f90-7275cdb64dd0","Type":"ContainerStarted","Data":"00d072144c12ac7881939acf0044e57715712d5f7941513463ed22a19efbc679"} Jan 20 17:48:42 crc kubenswrapper[4558]: I0120 17:48:42.644128 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"718bc2e9-d6bd-44b9-bbf0-4a4be370af4b","Type":"ContainerStarted","Data":"881408a631c4245196eae3ce82685cf4d0bb0ce17b0c3d4fc2b5266e8498bc58"} Jan 20 17:48:42 crc kubenswrapper[4558]: I0120 17:48:42.646051 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"3e9fc64b-291b-4710-8518-8a88e623cbc5","Type":"ContainerStarted","Data":"7cf3766f60b867469a8d20c3b2ac377161d546dedaaa6b7bef58a20770a79799"} Jan 20 17:48:42 crc kubenswrapper[4558]: I0120 17:48:42.649736 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-zjq8p" event={"ID":"bebbe6bd-5855-4553-bbb4-7d994a90d025","Type":"ContainerStarted","Data":"e9d0cdfef66d95da79eeb71c5f5d77c2c15fec36a463b30437310581ba2767bc"} Jan 20 17:48:42 crc kubenswrapper[4558]: I0120 17:48:42.649794 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-zjq8p" event={"ID":"bebbe6bd-5855-4553-bbb4-7d994a90d025","Type":"ContainerStarted","Data":"f8c130af88dbe444fbb2dfdda88b431b7a333438c4618ad842b0fe09a8967ec4"} Jan 20 17:48:42 crc kubenswrapper[4558]: I0120 17:48:42.680320 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-api-0" podStartSLOduration=2.680295185 podStartE2EDuration="2.680295185s" podCreationTimestamp="2026-01-20 17:48:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:48:42.679157716 +0000 UTC m=+4016.439495684" watchObservedRunningTime="2026-01-20 17:48:42.680295185 +0000 UTC m=+4016.440633152" Jan 20 17:48:42 crc kubenswrapper[4558]: I0120 17:48:42.683134 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-scheduler-0" podStartSLOduration=2.683118102 podStartE2EDuration="2.683118102s" podCreationTimestamp="2026-01-20 17:48:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:48:42.661099008 +0000 UTC m=+4016.421436976" watchObservedRunningTime="2026-01-20 17:48:42.683118102 +0000 UTC m=+4016.443456069" Jan 20 17:48:42 crc kubenswrapper[4558]: I0120 17:48:42.692375 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-zjq8p" podStartSLOduration=1.692350984 podStartE2EDuration="1.692350984s" podCreationTimestamp="2026-01-20 17:48:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:48:42.690781974 +0000 UTC m=+4016.451119941" watchObservedRunningTime="2026-01-20 17:48:42.692350984 +0000 UTC m=+4016.452688951" Jan 20 17:48:42 crc kubenswrapper[4558]: I0120 17:48:42.719917 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-metadata-0" podStartSLOduration=2.719895509 podStartE2EDuration="2.719895509s" podCreationTimestamp="2026-01-20 17:48:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:48:42.702945976 +0000 UTC m=+4016.463283944" watchObservedRunningTime="2026-01-20 17:48:42.719895509 +0000 UTC m=+4016.480233475" Jan 20 17:48:42 crc kubenswrapper[4558]: I0120 17:48:42.732005 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" podStartSLOduration=2.731980863 podStartE2EDuration="2.731980863s" podCreationTimestamp="2026-01-20 17:48:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:48:42.718358679 +0000 UTC m=+4016.478696646" watchObservedRunningTime="2026-01-20 17:48:42.731980863 +0000 UTC m=+4016.492318830" Jan 20 17:48:42 crc kubenswrapper[4558]: I0120 17:48:42.969910 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:48:42 crc kubenswrapper[4558]: I0120 17:48:42.984484 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:48:44 crc kubenswrapper[4558]: I0120 17:48:44.684452 4558 generic.go:334] "Generic (PLEG): container finished" podID="bebbe6bd-5855-4553-bbb4-7d994a90d025" containerID="e9d0cdfef66d95da79eeb71c5f5d77c2c15fec36a463b30437310581ba2767bc" exitCode=0 Jan 20 17:48:44 crc kubenswrapper[4558]: I0120 17:48:44.684538 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-zjq8p" event={"ID":"bebbe6bd-5855-4553-bbb4-7d994a90d025","Type":"ContainerDied","Data":"e9d0cdfef66d95da79eeb71c5f5d77c2c15fec36a463b30437310581ba2767bc"} Jan 20 17:48:44 crc kubenswrapper[4558]: I0120 17:48:44.684927 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" podUID="718bc2e9-d6bd-44b9-bbf0-4a4be370af4b" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://881408a631c4245196eae3ce82685cf4d0bb0ce17b0c3d4fc2b5266e8498bc58" gracePeriod=30 Jan 20 17:48:44 crc kubenswrapper[4558]: I0120 17:48:44.685266 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-metadata-0" podUID="3e9fc64b-291b-4710-8518-8a88e623cbc5" containerName="nova-metadata-log" containerID="cri-o://afb57f59ca3f137658ac2a110a0cb9b7538255ec8aa20e0588c2989b3cb473ce" gracePeriod=30 Jan 20 17:48:44 crc kubenswrapper[4558]: I0120 17:48:44.685463 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-metadata-0" podUID="3e9fc64b-291b-4710-8518-8a88e623cbc5" containerName="nova-metadata-metadata" containerID="cri-o://7cf3766f60b867469a8d20c3b2ac377161d546dedaaa6b7bef58a20770a79799" gracePeriod=30 Jan 20 17:48:45 crc kubenswrapper[4558]: I0120 17:48:45.262968 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:48:45 crc kubenswrapper[4558]: I0120 17:48:45.374380 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s46kv\" (UniqueName: \"kubernetes.io/projected/3e9fc64b-291b-4710-8518-8a88e623cbc5-kube-api-access-s46kv\") pod \"3e9fc64b-291b-4710-8518-8a88e623cbc5\" (UID: \"3e9fc64b-291b-4710-8518-8a88e623cbc5\") " Jan 20 17:48:45 crc kubenswrapper[4558]: I0120 17:48:45.374491 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3e9fc64b-291b-4710-8518-8a88e623cbc5-config-data\") pod \"3e9fc64b-291b-4710-8518-8a88e623cbc5\" (UID: \"3e9fc64b-291b-4710-8518-8a88e623cbc5\") " Jan 20 17:48:45 crc kubenswrapper[4558]: I0120 17:48:45.374528 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e9fc64b-291b-4710-8518-8a88e623cbc5-combined-ca-bundle\") pod \"3e9fc64b-291b-4710-8518-8a88e623cbc5\" (UID: \"3e9fc64b-291b-4710-8518-8a88e623cbc5\") " Jan 20 17:48:45 crc kubenswrapper[4558]: I0120 17:48:45.374580 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3e9fc64b-291b-4710-8518-8a88e623cbc5-logs\") pod \"3e9fc64b-291b-4710-8518-8a88e623cbc5\" (UID: \"3e9fc64b-291b-4710-8518-8a88e623cbc5\") " Jan 20 17:48:45 crc kubenswrapper[4558]: I0120 17:48:45.375453 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3e9fc64b-291b-4710-8518-8a88e623cbc5-logs" (OuterVolumeSpecName: "logs") pod "3e9fc64b-291b-4710-8518-8a88e623cbc5" (UID: "3e9fc64b-291b-4710-8518-8a88e623cbc5"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:48:45 crc kubenswrapper[4558]: I0120 17:48:45.379911 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3e9fc64b-291b-4710-8518-8a88e623cbc5-kube-api-access-s46kv" (OuterVolumeSpecName: "kube-api-access-s46kv") pod "3e9fc64b-291b-4710-8518-8a88e623cbc5" (UID: "3e9fc64b-291b-4710-8518-8a88e623cbc5"). InnerVolumeSpecName "kube-api-access-s46kv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:48:45 crc kubenswrapper[4558]: I0120 17:48:45.402799 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3e9fc64b-291b-4710-8518-8a88e623cbc5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3e9fc64b-291b-4710-8518-8a88e623cbc5" (UID: "3e9fc64b-291b-4710-8518-8a88e623cbc5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:48:45 crc kubenswrapper[4558]: I0120 17:48:45.403384 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3e9fc64b-291b-4710-8518-8a88e623cbc5-config-data" (OuterVolumeSpecName: "config-data") pod "3e9fc64b-291b-4710-8518-8a88e623cbc5" (UID: "3e9fc64b-291b-4710-8518-8a88e623cbc5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:48:45 crc kubenswrapper[4558]: I0120 17:48:45.476015 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3e9fc64b-291b-4710-8518-8a88e623cbc5-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:48:45 crc kubenswrapper[4558]: I0120 17:48:45.476039 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e9fc64b-291b-4710-8518-8a88e623cbc5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:48:45 crc kubenswrapper[4558]: I0120 17:48:45.476054 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3e9fc64b-291b-4710-8518-8a88e623cbc5-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:48:45 crc kubenswrapper[4558]: I0120 17:48:45.476067 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s46kv\" (UniqueName: \"kubernetes.io/projected/3e9fc64b-291b-4710-8518-8a88e623cbc5-kube-api-access-s46kv\") on node \"crc\" DevicePath \"\"" Jan 20 17:48:45 crc kubenswrapper[4558]: I0120 17:48:45.698752 4558 generic.go:334] "Generic (PLEG): container finished" podID="718bc2e9-d6bd-44b9-bbf0-4a4be370af4b" containerID="881408a631c4245196eae3ce82685cf4d0bb0ce17b0c3d4fc2b5266e8498bc58" exitCode=0 Jan 20 17:48:45 crc kubenswrapper[4558]: I0120 17:48:45.698823 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"718bc2e9-d6bd-44b9-bbf0-4a4be370af4b","Type":"ContainerDied","Data":"881408a631c4245196eae3ce82685cf4d0bb0ce17b0c3d4fc2b5266e8498bc58"} Jan 20 17:48:45 crc kubenswrapper[4558]: I0120 17:48:45.701119 4558 generic.go:334] "Generic (PLEG): container finished" podID="3e9fc64b-291b-4710-8518-8a88e623cbc5" containerID="7cf3766f60b867469a8d20c3b2ac377161d546dedaaa6b7bef58a20770a79799" exitCode=0 Jan 20 17:48:45 crc kubenswrapper[4558]: I0120 17:48:45.701155 4558 generic.go:334] "Generic (PLEG): container finished" podID="3e9fc64b-291b-4710-8518-8a88e623cbc5" containerID="afb57f59ca3f137658ac2a110a0cb9b7538255ec8aa20e0588c2989b3cb473ce" exitCode=143 Jan 20 17:48:45 crc kubenswrapper[4558]: I0120 17:48:45.701195 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"3e9fc64b-291b-4710-8518-8a88e623cbc5","Type":"ContainerDied","Data":"7cf3766f60b867469a8d20c3b2ac377161d546dedaaa6b7bef58a20770a79799"} Jan 20 17:48:45 crc kubenswrapper[4558]: I0120 17:48:45.701270 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"3e9fc64b-291b-4710-8518-8a88e623cbc5","Type":"ContainerDied","Data":"afb57f59ca3f137658ac2a110a0cb9b7538255ec8aa20e0588c2989b3cb473ce"} Jan 20 17:48:45 crc kubenswrapper[4558]: I0120 17:48:45.701286 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"3e9fc64b-291b-4710-8518-8a88e623cbc5","Type":"ContainerDied","Data":"08e41f9d41f751e9577f9150bfd1a1253d0efe921c6627198b5c31ed66ac575f"} Jan 20 17:48:45 crc kubenswrapper[4558]: I0120 17:48:45.701204 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:48:45 crc kubenswrapper[4558]: I0120 17:48:45.701324 4558 scope.go:117] "RemoveContainer" containerID="7cf3766f60b867469a8d20c3b2ac377161d546dedaaa6b7bef58a20770a79799" Jan 20 17:48:45 crc kubenswrapper[4558]: I0120 17:48:45.702757 4558 generic.go:334] "Generic (PLEG): container finished" podID="fa9f3a26-1b5a-4d02-a79d-67585f099131" containerID="ca177c8c965ad48d0ae78252e2d314c13abe059f39572e5e736a0d0ce022c3b7" exitCode=0 Jan 20 17:48:45 crc kubenswrapper[4558]: I0120 17:48:45.702828 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-cell-mapping-28s5c" event={"ID":"fa9f3a26-1b5a-4d02-a79d-67585f099131","Type":"ContainerDied","Data":"ca177c8c965ad48d0ae78252e2d314c13abe059f39572e5e736a0d0ce022c3b7"} Jan 20 17:48:45 crc kubenswrapper[4558]: I0120 17:48:45.727122 4558 scope.go:117] "RemoveContainer" containerID="afb57f59ca3f137658ac2a110a0cb9b7538255ec8aa20e0588c2989b3cb473ce" Jan 20 17:48:45 crc kubenswrapper[4558]: I0120 17:48:45.745904 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:48:45 crc kubenswrapper[4558]: I0120 17:48:45.755642 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:48:45 crc kubenswrapper[4558]: I0120 17:48:45.764348 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:48:45 crc kubenswrapper[4558]: E0120 17:48:45.764850 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e9fc64b-291b-4710-8518-8a88e623cbc5" containerName="nova-metadata-log" Jan 20 17:48:45 crc kubenswrapper[4558]: I0120 17:48:45.764873 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e9fc64b-291b-4710-8518-8a88e623cbc5" containerName="nova-metadata-log" Jan 20 17:48:45 crc kubenswrapper[4558]: E0120 17:48:45.764888 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e9fc64b-291b-4710-8518-8a88e623cbc5" containerName="nova-metadata-metadata" Jan 20 17:48:45 crc kubenswrapper[4558]: I0120 17:48:45.764896 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e9fc64b-291b-4710-8518-8a88e623cbc5" containerName="nova-metadata-metadata" Jan 20 17:48:45 crc kubenswrapper[4558]: I0120 17:48:45.765096 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="3e9fc64b-291b-4710-8518-8a88e623cbc5" containerName="nova-metadata-log" Jan 20 17:48:45 crc kubenswrapper[4558]: I0120 17:48:45.765130 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="3e9fc64b-291b-4710-8518-8a88e623cbc5" containerName="nova-metadata-metadata" Jan 20 17:48:45 crc kubenswrapper[4558]: I0120 17:48:45.766174 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:48:45 crc kubenswrapper[4558]: I0120 17:48:45.768260 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-metadata-config-data" Jan 20 17:48:45 crc kubenswrapper[4558]: I0120 17:48:45.768532 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-metadata-internal-svc" Jan 20 17:48:45 crc kubenswrapper[4558]: I0120 17:48:45.770820 4558 scope.go:117] "RemoveContainer" containerID="7cf3766f60b867469a8d20c3b2ac377161d546dedaaa6b7bef58a20770a79799" Jan 20 17:48:45 crc kubenswrapper[4558]: I0120 17:48:45.770851 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:48:45 crc kubenswrapper[4558]: E0120 17:48:45.771527 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7cf3766f60b867469a8d20c3b2ac377161d546dedaaa6b7bef58a20770a79799\": container with ID starting with 7cf3766f60b867469a8d20c3b2ac377161d546dedaaa6b7bef58a20770a79799 not found: ID does not exist" containerID="7cf3766f60b867469a8d20c3b2ac377161d546dedaaa6b7bef58a20770a79799" Jan 20 17:48:45 crc kubenswrapper[4558]: I0120 17:48:45.771565 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7cf3766f60b867469a8d20c3b2ac377161d546dedaaa6b7bef58a20770a79799"} err="failed to get container status \"7cf3766f60b867469a8d20c3b2ac377161d546dedaaa6b7bef58a20770a79799\": rpc error: code = NotFound desc = could not find container \"7cf3766f60b867469a8d20c3b2ac377161d546dedaaa6b7bef58a20770a79799\": container with ID starting with 7cf3766f60b867469a8d20c3b2ac377161d546dedaaa6b7bef58a20770a79799 not found: ID does not exist" Jan 20 17:48:45 crc kubenswrapper[4558]: I0120 17:48:45.771589 4558 scope.go:117] "RemoveContainer" containerID="afb57f59ca3f137658ac2a110a0cb9b7538255ec8aa20e0588c2989b3cb473ce" Jan 20 17:48:45 crc kubenswrapper[4558]: E0120 17:48:45.771889 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"afb57f59ca3f137658ac2a110a0cb9b7538255ec8aa20e0588c2989b3cb473ce\": container with ID starting with afb57f59ca3f137658ac2a110a0cb9b7538255ec8aa20e0588c2989b3cb473ce not found: ID does not exist" containerID="afb57f59ca3f137658ac2a110a0cb9b7538255ec8aa20e0588c2989b3cb473ce" Jan 20 17:48:45 crc kubenswrapper[4558]: I0120 17:48:45.771915 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"afb57f59ca3f137658ac2a110a0cb9b7538255ec8aa20e0588c2989b3cb473ce"} err="failed to get container status \"afb57f59ca3f137658ac2a110a0cb9b7538255ec8aa20e0588c2989b3cb473ce\": rpc error: code = NotFound desc = could not find container \"afb57f59ca3f137658ac2a110a0cb9b7538255ec8aa20e0588c2989b3cb473ce\": container with ID starting with afb57f59ca3f137658ac2a110a0cb9b7538255ec8aa20e0588c2989b3cb473ce not found: ID does not exist" Jan 20 17:48:45 crc kubenswrapper[4558]: I0120 17:48:45.771929 4558 scope.go:117] "RemoveContainer" containerID="7cf3766f60b867469a8d20c3b2ac377161d546dedaaa6b7bef58a20770a79799" Jan 20 17:48:45 crc kubenswrapper[4558]: I0120 17:48:45.772788 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7cf3766f60b867469a8d20c3b2ac377161d546dedaaa6b7bef58a20770a79799"} err="failed to get container status \"7cf3766f60b867469a8d20c3b2ac377161d546dedaaa6b7bef58a20770a79799\": rpc error: code = NotFound desc = could not find container \"7cf3766f60b867469a8d20c3b2ac377161d546dedaaa6b7bef58a20770a79799\": container with ID starting with 7cf3766f60b867469a8d20c3b2ac377161d546dedaaa6b7bef58a20770a79799 not found: ID does not exist" Jan 20 17:48:45 crc kubenswrapper[4558]: I0120 17:48:45.772810 4558 scope.go:117] "RemoveContainer" containerID="afb57f59ca3f137658ac2a110a0cb9b7538255ec8aa20e0588c2989b3cb473ce" Jan 20 17:48:45 crc kubenswrapper[4558]: I0120 17:48:45.773112 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"afb57f59ca3f137658ac2a110a0cb9b7538255ec8aa20e0588c2989b3cb473ce"} err="failed to get container status \"afb57f59ca3f137658ac2a110a0cb9b7538255ec8aa20e0588c2989b3cb473ce\": rpc error: code = NotFound desc = could not find container \"afb57f59ca3f137658ac2a110a0cb9b7538255ec8aa20e0588c2989b3cb473ce\": container with ID starting with afb57f59ca3f137658ac2a110a0cb9b7538255ec8aa20e0588c2989b3cb473ce not found: ID does not exist" Jan 20 17:48:45 crc kubenswrapper[4558]: I0120 17:48:45.785330 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/bafae32a-2cec-4706-a85a-13650bc1e409-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"bafae32a-2cec-4706-a85a-13650bc1e409\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:48:45 crc kubenswrapper[4558]: I0120 17:48:45.785387 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bafae32a-2cec-4706-a85a-13650bc1e409-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"bafae32a-2cec-4706-a85a-13650bc1e409\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:48:45 crc kubenswrapper[4558]: I0120 17:48:45.785496 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bafae32a-2cec-4706-a85a-13650bc1e409-logs\") pod \"nova-metadata-0\" (UID: \"bafae32a-2cec-4706-a85a-13650bc1e409\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:48:45 crc kubenswrapper[4558]: I0120 17:48:45.785620 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bafae32a-2cec-4706-a85a-13650bc1e409-config-data\") pod \"nova-metadata-0\" (UID: \"bafae32a-2cec-4706-a85a-13650bc1e409\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:48:45 crc kubenswrapper[4558]: I0120 17:48:45.785770 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pvr5p\" (UniqueName: \"kubernetes.io/projected/bafae32a-2cec-4706-a85a-13650bc1e409-kube-api-access-pvr5p\") pod \"nova-metadata-0\" (UID: \"bafae32a-2cec-4706-a85a-13650bc1e409\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:48:45 crc kubenswrapper[4558]: I0120 17:48:45.879413 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:48:45 crc kubenswrapper[4558]: I0120 17:48:45.889139 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/718bc2e9-d6bd-44b9-bbf0-4a4be370af4b-combined-ca-bundle\") pod \"718bc2e9-d6bd-44b9-bbf0-4a4be370af4b\" (UID: \"718bc2e9-d6bd-44b9-bbf0-4a4be370af4b\") " Jan 20 17:48:45 crc kubenswrapper[4558]: I0120 17:48:45.889721 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/bafae32a-2cec-4706-a85a-13650bc1e409-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"bafae32a-2cec-4706-a85a-13650bc1e409\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:48:45 crc kubenswrapper[4558]: I0120 17:48:45.889820 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bafae32a-2cec-4706-a85a-13650bc1e409-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"bafae32a-2cec-4706-a85a-13650bc1e409\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:48:45 crc kubenswrapper[4558]: I0120 17:48:45.890048 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bafae32a-2cec-4706-a85a-13650bc1e409-logs\") pod \"nova-metadata-0\" (UID: \"bafae32a-2cec-4706-a85a-13650bc1e409\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:48:45 crc kubenswrapper[4558]: I0120 17:48:45.890296 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bafae32a-2cec-4706-a85a-13650bc1e409-config-data\") pod \"nova-metadata-0\" (UID: \"bafae32a-2cec-4706-a85a-13650bc1e409\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:48:45 crc kubenswrapper[4558]: I0120 17:48:45.890607 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pvr5p\" (UniqueName: \"kubernetes.io/projected/bafae32a-2cec-4706-a85a-13650bc1e409-kube-api-access-pvr5p\") pod \"nova-metadata-0\" (UID: \"bafae32a-2cec-4706-a85a-13650bc1e409\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:48:45 crc kubenswrapper[4558]: I0120 17:48:45.891071 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bafae32a-2cec-4706-a85a-13650bc1e409-logs\") pod \"nova-metadata-0\" (UID: \"bafae32a-2cec-4706-a85a-13650bc1e409\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:48:45 crc kubenswrapper[4558]: I0120 17:48:45.900124 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/bafae32a-2cec-4706-a85a-13650bc1e409-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"bafae32a-2cec-4706-a85a-13650bc1e409\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:48:45 crc kubenswrapper[4558]: I0120 17:48:45.900361 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bafae32a-2cec-4706-a85a-13650bc1e409-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"bafae32a-2cec-4706-a85a-13650bc1e409\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:48:45 crc kubenswrapper[4558]: I0120 17:48:45.900645 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bafae32a-2cec-4706-a85a-13650bc1e409-config-data\") pod \"nova-metadata-0\" (UID: \"bafae32a-2cec-4706-a85a-13650bc1e409\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:48:45 crc kubenswrapper[4558]: I0120 17:48:45.920274 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:48:45 crc kubenswrapper[4558]: I0120 17:48:45.921002 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/718bc2e9-d6bd-44b9-bbf0-4a4be370af4b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "718bc2e9-d6bd-44b9-bbf0-4a4be370af4b" (UID: "718bc2e9-d6bd-44b9-bbf0-4a4be370af4b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:48:45 crc kubenswrapper[4558]: I0120 17:48:45.928944 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pvr5p\" (UniqueName: \"kubernetes.io/projected/bafae32a-2cec-4706-a85a-13650bc1e409-kube-api-access-pvr5p\") pod \"nova-metadata-0\" (UID: \"bafae32a-2cec-4706-a85a-13650bc1e409\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:48:45 crc kubenswrapper[4558]: I0120 17:48:45.992234 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/718bc2e9-d6bd-44b9-bbf0-4a4be370af4b-config-data\") pod \"718bc2e9-d6bd-44b9-bbf0-4a4be370af4b\" (UID: \"718bc2e9-d6bd-44b9-bbf0-4a4be370af4b\") " Jan 20 17:48:45 crc kubenswrapper[4558]: I0120 17:48:45.992494 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9bjtq\" (UniqueName: \"kubernetes.io/projected/718bc2e9-d6bd-44b9-bbf0-4a4be370af4b-kube-api-access-9bjtq\") pod \"718bc2e9-d6bd-44b9-bbf0-4a4be370af4b\" (UID: \"718bc2e9-d6bd-44b9-bbf0-4a4be370af4b\") " Jan 20 17:48:45 crc kubenswrapper[4558]: I0120 17:48:45.993311 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/718bc2e9-d6bd-44b9-bbf0-4a4be370af4b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:48:45 crc kubenswrapper[4558]: I0120 17:48:45.996369 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/718bc2e9-d6bd-44b9-bbf0-4a4be370af4b-kube-api-access-9bjtq" (OuterVolumeSpecName: "kube-api-access-9bjtq") pod "718bc2e9-d6bd-44b9-bbf0-4a4be370af4b" (UID: "718bc2e9-d6bd-44b9-bbf0-4a4be370af4b"). InnerVolumeSpecName "kube-api-access-9bjtq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:48:46 crc kubenswrapper[4558]: I0120 17:48:46.018992 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/718bc2e9-d6bd-44b9-bbf0-4a4be370af4b-config-data" (OuterVolumeSpecName: "config-data") pod "718bc2e9-d6bd-44b9-bbf0-4a4be370af4b" (UID: "718bc2e9-d6bd-44b9-bbf0-4a4be370af4b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:48:46 crc kubenswrapper[4558]: I0120 17:48:46.040877 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-zjq8p" Jan 20 17:48:46 crc kubenswrapper[4558]: I0120 17:48:46.087247 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:48:46 crc kubenswrapper[4558]: I0120 17:48:46.093764 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bebbe6bd-5855-4553-bbb4-7d994a90d025-config-data\") pod \"bebbe6bd-5855-4553-bbb4-7d994a90d025\" (UID: \"bebbe6bd-5855-4553-bbb4-7d994a90d025\") " Jan 20 17:48:46 crc kubenswrapper[4558]: I0120 17:48:46.093816 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bebbe6bd-5855-4553-bbb4-7d994a90d025-combined-ca-bundle\") pod \"bebbe6bd-5855-4553-bbb4-7d994a90d025\" (UID: \"bebbe6bd-5855-4553-bbb4-7d994a90d025\") " Jan 20 17:48:46 crc kubenswrapper[4558]: I0120 17:48:46.093884 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bebbe6bd-5855-4553-bbb4-7d994a90d025-scripts\") pod \"bebbe6bd-5855-4553-bbb4-7d994a90d025\" (UID: \"bebbe6bd-5855-4553-bbb4-7d994a90d025\") " Jan 20 17:48:46 crc kubenswrapper[4558]: I0120 17:48:46.093941 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sdlfp\" (UniqueName: \"kubernetes.io/projected/bebbe6bd-5855-4553-bbb4-7d994a90d025-kube-api-access-sdlfp\") pod \"bebbe6bd-5855-4553-bbb4-7d994a90d025\" (UID: \"bebbe6bd-5855-4553-bbb4-7d994a90d025\") " Jan 20 17:48:46 crc kubenswrapper[4558]: I0120 17:48:46.094329 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9bjtq\" (UniqueName: \"kubernetes.io/projected/718bc2e9-d6bd-44b9-bbf0-4a4be370af4b-kube-api-access-9bjtq\") on node \"crc\" DevicePath \"\"" Jan 20 17:48:46 crc kubenswrapper[4558]: I0120 17:48:46.094353 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/718bc2e9-d6bd-44b9-bbf0-4a4be370af4b-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:48:46 crc kubenswrapper[4558]: I0120 17:48:46.098250 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bebbe6bd-5855-4553-bbb4-7d994a90d025-kube-api-access-sdlfp" (OuterVolumeSpecName: "kube-api-access-sdlfp") pod "bebbe6bd-5855-4553-bbb4-7d994a90d025" (UID: "bebbe6bd-5855-4553-bbb4-7d994a90d025"). InnerVolumeSpecName "kube-api-access-sdlfp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:48:46 crc kubenswrapper[4558]: I0120 17:48:46.099183 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bebbe6bd-5855-4553-bbb4-7d994a90d025-scripts" (OuterVolumeSpecName: "scripts") pod "bebbe6bd-5855-4553-bbb4-7d994a90d025" (UID: "bebbe6bd-5855-4553-bbb4-7d994a90d025"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:48:46 crc kubenswrapper[4558]: I0120 17:48:46.118719 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bebbe6bd-5855-4553-bbb4-7d994a90d025-config-data" (OuterVolumeSpecName: "config-data") pod "bebbe6bd-5855-4553-bbb4-7d994a90d025" (UID: "bebbe6bd-5855-4553-bbb4-7d994a90d025"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:48:46 crc kubenswrapper[4558]: I0120 17:48:46.119241 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bebbe6bd-5855-4553-bbb4-7d994a90d025-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "bebbe6bd-5855-4553-bbb4-7d994a90d025" (UID: "bebbe6bd-5855-4553-bbb4-7d994a90d025"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:48:46 crc kubenswrapper[4558]: I0120 17:48:46.197360 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bebbe6bd-5855-4553-bbb4-7d994a90d025-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:48:46 crc kubenswrapper[4558]: I0120 17:48:46.197587 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bebbe6bd-5855-4553-bbb4-7d994a90d025-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:48:46 crc kubenswrapper[4558]: I0120 17:48:46.197600 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bebbe6bd-5855-4553-bbb4-7d994a90d025-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:48:46 crc kubenswrapper[4558]: I0120 17:48:46.197614 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sdlfp\" (UniqueName: \"kubernetes.io/projected/bebbe6bd-5855-4553-bbb4-7d994a90d025-kube-api-access-sdlfp\") on node \"crc\" DevicePath \"\"" Jan 20 17:48:46 crc kubenswrapper[4558]: I0120 17:48:46.496214 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:48:46 crc kubenswrapper[4558]: W0120 17:48:46.496450 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbafae32a_2cec_4706_a85a_13650bc1e409.slice/crio-79b53f5df30c157f89aabebb7d3b03f2cf95e003d973a914606e80ac68c26434 WatchSource:0}: Error finding container 79b53f5df30c157f89aabebb7d3b03f2cf95e003d973a914606e80ac68c26434: Status 404 returned error can't find the container with id 79b53f5df30c157f89aabebb7d3b03f2cf95e003d973a914606e80ac68c26434 Jan 20 17:48:46 crc kubenswrapper[4558]: I0120 17:48:46.581941 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3e9fc64b-291b-4710-8518-8a88e623cbc5" path="/var/lib/kubelet/pods/3e9fc64b-291b-4710-8518-8a88e623cbc5/volumes" Jan 20 17:48:46 crc kubenswrapper[4558]: I0120 17:48:46.724780 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"718bc2e9-d6bd-44b9-bbf0-4a4be370af4b","Type":"ContainerDied","Data":"496ef5bcef8667df4361619a15079a5079138da091c90286ef96dab3e34e54df"} Jan 20 17:48:46 crc kubenswrapper[4558]: I0120 17:48:46.725150 4558 scope.go:117] "RemoveContainer" containerID="881408a631c4245196eae3ce82685cf4d0bb0ce17b0c3d4fc2b5266e8498bc58" Jan 20 17:48:46 crc kubenswrapper[4558]: I0120 17:48:46.725378 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:48:46 crc kubenswrapper[4558]: I0120 17:48:46.741464 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-zjq8p" Jan 20 17:48:46 crc kubenswrapper[4558]: I0120 17:48:46.741388 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-zjq8p" event={"ID":"bebbe6bd-5855-4553-bbb4-7d994a90d025","Type":"ContainerDied","Data":"f8c130af88dbe444fbb2dfdda88b431b7a333438c4618ad842b0fe09a8967ec4"} Jan 20 17:48:46 crc kubenswrapper[4558]: I0120 17:48:46.741703 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f8c130af88dbe444fbb2dfdda88b431b7a333438c4618ad842b0fe09a8967ec4" Jan 20 17:48:46 crc kubenswrapper[4558]: I0120 17:48:46.746613 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"bafae32a-2cec-4706-a85a-13650bc1e409","Type":"ContainerStarted","Data":"ce3d83d8c57279cf05bed707292a08c5e4c228ec188200bd2d06da880c6b6f98"} Jan 20 17:48:46 crc kubenswrapper[4558]: I0120 17:48:46.746662 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"bafae32a-2cec-4706-a85a-13650bc1e409","Type":"ContainerStarted","Data":"79b53f5df30c157f89aabebb7d3b03f2cf95e003d973a914606e80ac68c26434"} Jan 20 17:48:46 crc kubenswrapper[4558]: I0120 17:48:46.778857 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:48:46 crc kubenswrapper[4558]: I0120 17:48:46.796605 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:48:46 crc kubenswrapper[4558]: I0120 17:48:46.809267 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:48:46 crc kubenswrapper[4558]: E0120 17:48:46.809835 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bebbe6bd-5855-4553-bbb4-7d994a90d025" containerName="nova-cell1-conductor-db-sync" Jan 20 17:48:46 crc kubenswrapper[4558]: I0120 17:48:46.809856 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="bebbe6bd-5855-4553-bbb4-7d994a90d025" containerName="nova-cell1-conductor-db-sync" Jan 20 17:48:46 crc kubenswrapper[4558]: E0120 17:48:46.809870 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="718bc2e9-d6bd-44b9-bbf0-4a4be370af4b" containerName="nova-cell1-novncproxy-novncproxy" Jan 20 17:48:46 crc kubenswrapper[4558]: I0120 17:48:46.809877 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="718bc2e9-d6bd-44b9-bbf0-4a4be370af4b" containerName="nova-cell1-novncproxy-novncproxy" Jan 20 17:48:46 crc kubenswrapper[4558]: I0120 17:48:46.810117 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="718bc2e9-d6bd-44b9-bbf0-4a4be370af4b" containerName="nova-cell1-novncproxy-novncproxy" Jan 20 17:48:46 crc kubenswrapper[4558]: I0120 17:48:46.810139 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="bebbe6bd-5855-4553-bbb4-7d994a90d025" containerName="nova-cell1-conductor-db-sync" Jan 20 17:48:46 crc kubenswrapper[4558]: I0120 17:48:46.810928 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:48:46 crc kubenswrapper[4558]: I0120 17:48:46.814905 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-novncproxy-cell1-public-svc" Jan 20 17:48:46 crc kubenswrapper[4558]: I0120 17:48:46.815048 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-novncproxy-cell1-vencrypt" Jan 20 17:48:46 crc kubenswrapper[4558]: I0120 17:48:46.815363 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell1-novncproxy-config-data" Jan 20 17:48:46 crc kubenswrapper[4558]: I0120 17:48:46.822471 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-0"] Jan 20 17:48:46 crc kubenswrapper[4558]: I0120 17:48:46.824970 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:48:46 crc kubenswrapper[4558]: I0120 17:48:46.829854 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell1-conductor-config-data" Jan 20 17:48:46 crc kubenswrapper[4558]: I0120 17:48:46.836028 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-0"] Jan 20 17:48:46 crc kubenswrapper[4558]: I0120 17:48:46.850218 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:48:46 crc kubenswrapper[4558]: I0120 17:48:46.915698 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ns75v\" (UniqueName: \"kubernetes.io/projected/6254cefa-8c4a-472e-9faf-4633c6d1618e-kube-api-access-ns75v\") pod \"nova-cell1-novncproxy-0\" (UID: \"6254cefa-8c4a-472e-9faf-4633c6d1618e\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:48:46 crc kubenswrapper[4558]: I0120 17:48:46.915867 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/6254cefa-8c4a-472e-9faf-4633c6d1618e-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"6254cefa-8c4a-472e-9faf-4633c6d1618e\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:48:46 crc kubenswrapper[4558]: I0120 17:48:46.915974 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/6254cefa-8c4a-472e-9faf-4633c6d1618e-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"6254cefa-8c4a-472e-9faf-4633c6d1618e\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:48:46 crc kubenswrapper[4558]: I0120 17:48:46.916009 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6254cefa-8c4a-472e-9faf-4633c6d1618e-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"6254cefa-8c4a-472e-9faf-4633c6d1618e\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:48:46 crc kubenswrapper[4558]: I0120 17:48:46.916150 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6254cefa-8c4a-472e-9faf-4633c6d1618e-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"6254cefa-8c4a-472e-9faf-4633c6d1618e\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:48:47 crc kubenswrapper[4558]: I0120 17:48:47.020569 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b2ecae06-8e2e-4e1a-b788-606e519c8ff5-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"b2ecae06-8e2e-4e1a-b788-606e519c8ff5\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:48:47 crc kubenswrapper[4558]: I0120 17:48:47.020670 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sb8d9\" (UniqueName: \"kubernetes.io/projected/b2ecae06-8e2e-4e1a-b788-606e519c8ff5-kube-api-access-sb8d9\") pod \"nova-cell1-conductor-0\" (UID: \"b2ecae06-8e2e-4e1a-b788-606e519c8ff5\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:48:47 crc kubenswrapper[4558]: I0120 17:48:47.020759 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/6254cefa-8c4a-472e-9faf-4633c6d1618e-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"6254cefa-8c4a-472e-9faf-4633c6d1618e\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:48:47 crc kubenswrapper[4558]: I0120 17:48:47.020797 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b2ecae06-8e2e-4e1a-b788-606e519c8ff5-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"b2ecae06-8e2e-4e1a-b788-606e519c8ff5\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:48:47 crc kubenswrapper[4558]: I0120 17:48:47.020824 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/6254cefa-8c4a-472e-9faf-4633c6d1618e-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"6254cefa-8c4a-472e-9faf-4633c6d1618e\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:48:47 crc kubenswrapper[4558]: I0120 17:48:47.020853 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6254cefa-8c4a-472e-9faf-4633c6d1618e-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"6254cefa-8c4a-472e-9faf-4633c6d1618e\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:48:47 crc kubenswrapper[4558]: I0120 17:48:47.020987 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6254cefa-8c4a-472e-9faf-4633c6d1618e-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"6254cefa-8c4a-472e-9faf-4633c6d1618e\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:48:47 crc kubenswrapper[4558]: I0120 17:48:47.021336 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ns75v\" (UniqueName: \"kubernetes.io/projected/6254cefa-8c4a-472e-9faf-4633c6d1618e-kube-api-access-ns75v\") pod \"nova-cell1-novncproxy-0\" (UID: \"6254cefa-8c4a-472e-9faf-4633c6d1618e\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:48:47 crc kubenswrapper[4558]: I0120 17:48:47.026748 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/6254cefa-8c4a-472e-9faf-4633c6d1618e-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"6254cefa-8c4a-472e-9faf-4633c6d1618e\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:48:47 crc kubenswrapper[4558]: I0120 17:48:47.028658 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/6254cefa-8c4a-472e-9faf-4633c6d1618e-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"6254cefa-8c4a-472e-9faf-4633c6d1618e\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:48:47 crc kubenswrapper[4558]: I0120 17:48:47.036323 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ns75v\" (UniqueName: \"kubernetes.io/projected/6254cefa-8c4a-472e-9faf-4633c6d1618e-kube-api-access-ns75v\") pod \"nova-cell1-novncproxy-0\" (UID: \"6254cefa-8c4a-472e-9faf-4633c6d1618e\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:48:47 crc kubenswrapper[4558]: I0120 17:48:47.036996 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6254cefa-8c4a-472e-9faf-4633c6d1618e-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"6254cefa-8c4a-472e-9faf-4633c6d1618e\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:48:47 crc kubenswrapper[4558]: I0120 17:48:47.040015 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6254cefa-8c4a-472e-9faf-4633c6d1618e-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"6254cefa-8c4a-472e-9faf-4633c6d1618e\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:48:47 crc kubenswrapper[4558]: I0120 17:48:47.123072 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b2ecae06-8e2e-4e1a-b788-606e519c8ff5-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"b2ecae06-8e2e-4e1a-b788-606e519c8ff5\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:48:47 crc kubenswrapper[4558]: I0120 17:48:47.123152 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sb8d9\" (UniqueName: \"kubernetes.io/projected/b2ecae06-8e2e-4e1a-b788-606e519c8ff5-kube-api-access-sb8d9\") pod \"nova-cell1-conductor-0\" (UID: \"b2ecae06-8e2e-4e1a-b788-606e519c8ff5\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:48:47 crc kubenswrapper[4558]: I0120 17:48:47.123259 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b2ecae06-8e2e-4e1a-b788-606e519c8ff5-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"b2ecae06-8e2e-4e1a-b788-606e519c8ff5\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:48:47 crc kubenswrapper[4558]: I0120 17:48:47.127237 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b2ecae06-8e2e-4e1a-b788-606e519c8ff5-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"b2ecae06-8e2e-4e1a-b788-606e519c8ff5\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:48:47 crc kubenswrapper[4558]: I0120 17:48:47.127491 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b2ecae06-8e2e-4e1a-b788-606e519c8ff5-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"b2ecae06-8e2e-4e1a-b788-606e519c8ff5\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:48:47 crc kubenswrapper[4558]: I0120 17:48:47.139411 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sb8d9\" (UniqueName: \"kubernetes.io/projected/b2ecae06-8e2e-4e1a-b788-606e519c8ff5-kube-api-access-sb8d9\") pod \"nova-cell1-conductor-0\" (UID: \"b2ecae06-8e2e-4e1a-b788-606e519c8ff5\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:48:47 crc kubenswrapper[4558]: I0120 17:48:47.167719 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-cell-mapping-28s5c" Jan 20 17:48:47 crc kubenswrapper[4558]: I0120 17:48:47.176840 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:48:47 crc kubenswrapper[4558]: I0120 17:48:47.183986 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:48:47 crc kubenswrapper[4558]: I0120 17:48:47.327216 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fa9f3a26-1b5a-4d02-a79d-67585f099131-config-data\") pod \"fa9f3a26-1b5a-4d02-a79d-67585f099131\" (UID: \"fa9f3a26-1b5a-4d02-a79d-67585f099131\") " Jan 20 17:48:47 crc kubenswrapper[4558]: I0120 17:48:47.327317 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fa9f3a26-1b5a-4d02-a79d-67585f099131-scripts\") pod \"fa9f3a26-1b5a-4d02-a79d-67585f099131\" (UID: \"fa9f3a26-1b5a-4d02-a79d-67585f099131\") " Jan 20 17:48:47 crc kubenswrapper[4558]: I0120 17:48:47.327357 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l68bq\" (UniqueName: \"kubernetes.io/projected/fa9f3a26-1b5a-4d02-a79d-67585f099131-kube-api-access-l68bq\") pod \"fa9f3a26-1b5a-4d02-a79d-67585f099131\" (UID: \"fa9f3a26-1b5a-4d02-a79d-67585f099131\") " Jan 20 17:48:47 crc kubenswrapper[4558]: I0120 17:48:47.327412 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fa9f3a26-1b5a-4d02-a79d-67585f099131-combined-ca-bundle\") pod \"fa9f3a26-1b5a-4d02-a79d-67585f099131\" (UID: \"fa9f3a26-1b5a-4d02-a79d-67585f099131\") " Jan 20 17:48:47 crc kubenswrapper[4558]: I0120 17:48:47.333492 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fa9f3a26-1b5a-4d02-a79d-67585f099131-scripts" (OuterVolumeSpecName: "scripts") pod "fa9f3a26-1b5a-4d02-a79d-67585f099131" (UID: "fa9f3a26-1b5a-4d02-a79d-67585f099131"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:48:47 crc kubenswrapper[4558]: I0120 17:48:47.335591 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fa9f3a26-1b5a-4d02-a79d-67585f099131-kube-api-access-l68bq" (OuterVolumeSpecName: "kube-api-access-l68bq") pod "fa9f3a26-1b5a-4d02-a79d-67585f099131" (UID: "fa9f3a26-1b5a-4d02-a79d-67585f099131"). InnerVolumeSpecName "kube-api-access-l68bq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:48:47 crc kubenswrapper[4558]: I0120 17:48:47.356067 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fa9f3a26-1b5a-4d02-a79d-67585f099131-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fa9f3a26-1b5a-4d02-a79d-67585f099131" (UID: "fa9f3a26-1b5a-4d02-a79d-67585f099131"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:48:47 crc kubenswrapper[4558]: I0120 17:48:47.357137 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fa9f3a26-1b5a-4d02-a79d-67585f099131-config-data" (OuterVolumeSpecName: "config-data") pod "fa9f3a26-1b5a-4d02-a79d-67585f099131" (UID: "fa9f3a26-1b5a-4d02-a79d-67585f099131"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:48:47 crc kubenswrapper[4558]: I0120 17:48:47.430593 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fa9f3a26-1b5a-4d02-a79d-67585f099131-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:48:47 crc kubenswrapper[4558]: I0120 17:48:47.430628 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fa9f3a26-1b5a-4d02-a79d-67585f099131-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:48:47 crc kubenswrapper[4558]: I0120 17:48:47.430639 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fa9f3a26-1b5a-4d02-a79d-67585f099131-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:48:47 crc kubenswrapper[4558]: I0120 17:48:47.430654 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l68bq\" (UniqueName: \"kubernetes.io/projected/fa9f3a26-1b5a-4d02-a79d-67585f099131-kube-api-access-l68bq\") on node \"crc\" DevicePath \"\"" Jan 20 17:48:47 crc kubenswrapper[4558]: W0120 17:48:47.613283 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6254cefa_8c4a_472e_9faf_4633c6d1618e.slice/crio-b5a8b138f82bb2dcace9ca9aae5f18f97b2ddb2df351144b20183c3ff5086641 WatchSource:0}: Error finding container b5a8b138f82bb2dcace9ca9aae5f18f97b2ddb2df351144b20183c3ff5086641: Status 404 returned error can't find the container with id b5a8b138f82bb2dcace9ca9aae5f18f97b2ddb2df351144b20183c3ff5086641 Jan 20 17:48:47 crc kubenswrapper[4558]: I0120 17:48:47.615030 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:48:47 crc kubenswrapper[4558]: I0120 17:48:47.689929 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-0"] Jan 20 17:48:47 crc kubenswrapper[4558]: I0120 17:48:47.775358 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"6254cefa-8c4a-472e-9faf-4633c6d1618e","Type":"ContainerStarted","Data":"b5a8b138f82bb2dcace9ca9aae5f18f97b2ddb2df351144b20183c3ff5086641"} Jan 20 17:48:47 crc kubenswrapper[4558]: I0120 17:48:47.777191 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-0" event={"ID":"b2ecae06-8e2e-4e1a-b788-606e519c8ff5","Type":"ContainerStarted","Data":"31d194bd3c2c2a5cf54ff5a9562e3ae185573cca820cb09d293007b50bf8a7c1"} Jan 20 17:48:47 crc kubenswrapper[4558]: I0120 17:48:47.780521 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"bafae32a-2cec-4706-a85a-13650bc1e409","Type":"ContainerStarted","Data":"26235d1bf080c38fe8ddf3322b1b8e9e6501c1558566e900d254b216a80b1bd8"} Jan 20 17:48:47 crc kubenswrapper[4558]: I0120 17:48:47.786710 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-cell-mapping-28s5c" event={"ID":"fa9f3a26-1b5a-4d02-a79d-67585f099131","Type":"ContainerDied","Data":"f07e457463e191a052ca6fdb1b7aa09a75cc39fe1a55354e855aa1b2009daa50"} Jan 20 17:48:47 crc kubenswrapper[4558]: I0120 17:48:47.786745 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f07e457463e191a052ca6fdb1b7aa09a75cc39fe1a55354e855aa1b2009daa50" Jan 20 17:48:47 crc kubenswrapper[4558]: I0120 17:48:47.786794 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-cell-mapping-28s5c" Jan 20 17:48:47 crc kubenswrapper[4558]: I0120 17:48:47.811510 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-metadata-0" podStartSLOduration=2.8114848439999998 podStartE2EDuration="2.811484844s" podCreationTimestamp="2026-01-20 17:48:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:48:47.801864294 +0000 UTC m=+4021.562202261" watchObservedRunningTime="2026-01-20 17:48:47.811484844 +0000 UTC m=+4021.571822811" Jan 20 17:48:47 crc kubenswrapper[4558]: I0120 17:48:47.913863 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:48:47 crc kubenswrapper[4558]: I0120 17:48:47.914315 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-scheduler-0" podUID="60c9785f-c5f8-408e-ac84-12756b4ba66a" containerName="nova-scheduler-scheduler" containerID="cri-o://96dbff4a5c402cf5402baf41517d6be17579aac62d5269d3b0024b1e9269835d" gracePeriod=30 Jan 20 17:48:47 crc kubenswrapper[4558]: I0120 17:48:47.922760 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:48:47 crc kubenswrapper[4558]: I0120 17:48:47.922913 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-api-0" podUID="1f9f16e6-6a68-47b5-9f90-7275cdb64dd0" containerName="nova-api-log" containerID="cri-o://f8ea5c6a5574b26aaec9cf8a017c58068094a35c5f9b409bcb9bfc0e7da7aff8" gracePeriod=30 Jan 20 17:48:47 crc kubenswrapper[4558]: I0120 17:48:47.923299 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-api-0" podUID="1f9f16e6-6a68-47b5-9f90-7275cdb64dd0" containerName="nova-api-api" containerID="cri-o://00d072144c12ac7881939acf0044e57715712d5f7941513463ed22a19efbc679" gracePeriod=30 Jan 20 17:48:47 crc kubenswrapper[4558]: I0120 17:48:47.945211 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:48:48 crc kubenswrapper[4558]: I0120 17:48:48.559322 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:48:48 crc kubenswrapper[4558]: I0120 17:48:48.617040 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="718bc2e9-d6bd-44b9-bbf0-4a4be370af4b" path="/var/lib/kubelet/pods/718bc2e9-d6bd-44b9-bbf0-4a4be370af4b/volumes" Jan 20 17:48:48 crc kubenswrapper[4558]: I0120 17:48:48.693531 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:48:48 crc kubenswrapper[4558]: I0120 17:48:48.761411 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1f9f16e6-6a68-47b5-9f90-7275cdb64dd0-logs\") pod \"1f9f16e6-6a68-47b5-9f90-7275cdb64dd0\" (UID: \"1f9f16e6-6a68-47b5-9f90-7275cdb64dd0\") " Jan 20 17:48:48 crc kubenswrapper[4558]: I0120 17:48:48.761524 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1f9f16e6-6a68-47b5-9f90-7275cdb64dd0-combined-ca-bundle\") pod \"1f9f16e6-6a68-47b5-9f90-7275cdb64dd0\" (UID: \"1f9f16e6-6a68-47b5-9f90-7275cdb64dd0\") " Jan 20 17:48:48 crc kubenswrapper[4558]: I0120 17:48:48.761780 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fjpqc\" (UniqueName: \"kubernetes.io/projected/1f9f16e6-6a68-47b5-9f90-7275cdb64dd0-kube-api-access-fjpqc\") pod \"1f9f16e6-6a68-47b5-9f90-7275cdb64dd0\" (UID: \"1f9f16e6-6a68-47b5-9f90-7275cdb64dd0\") " Jan 20 17:48:48 crc kubenswrapper[4558]: I0120 17:48:48.761818 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1f9f16e6-6a68-47b5-9f90-7275cdb64dd0-config-data\") pod \"1f9f16e6-6a68-47b5-9f90-7275cdb64dd0\" (UID: \"1f9f16e6-6a68-47b5-9f90-7275cdb64dd0\") " Jan 20 17:48:48 crc kubenswrapper[4558]: I0120 17:48:48.761855 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1f9f16e6-6a68-47b5-9f90-7275cdb64dd0-logs" (OuterVolumeSpecName: "logs") pod "1f9f16e6-6a68-47b5-9f90-7275cdb64dd0" (UID: "1f9f16e6-6a68-47b5-9f90-7275cdb64dd0"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:48:48 crc kubenswrapper[4558]: I0120 17:48:48.762933 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1f9f16e6-6a68-47b5-9f90-7275cdb64dd0-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:48:48 crc kubenswrapper[4558]: I0120 17:48:48.769449 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1f9f16e6-6a68-47b5-9f90-7275cdb64dd0-kube-api-access-fjpqc" (OuterVolumeSpecName: "kube-api-access-fjpqc") pod "1f9f16e6-6a68-47b5-9f90-7275cdb64dd0" (UID: "1f9f16e6-6a68-47b5-9f90-7275cdb64dd0"). InnerVolumeSpecName "kube-api-access-fjpqc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:48:48 crc kubenswrapper[4558]: I0120 17:48:48.793518 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1f9f16e6-6a68-47b5-9f90-7275cdb64dd0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1f9f16e6-6a68-47b5-9f90-7275cdb64dd0" (UID: "1f9f16e6-6a68-47b5-9f90-7275cdb64dd0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:48:48 crc kubenswrapper[4558]: I0120 17:48:48.796929 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1f9f16e6-6a68-47b5-9f90-7275cdb64dd0-config-data" (OuterVolumeSpecName: "config-data") pod "1f9f16e6-6a68-47b5-9f90-7275cdb64dd0" (UID: "1f9f16e6-6a68-47b5-9f90-7275cdb64dd0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:48:48 crc kubenswrapper[4558]: I0120 17:48:48.816703 4558 generic.go:334] "Generic (PLEG): container finished" podID="1f9f16e6-6a68-47b5-9f90-7275cdb64dd0" containerID="00d072144c12ac7881939acf0044e57715712d5f7941513463ed22a19efbc679" exitCode=0 Jan 20 17:48:48 crc kubenswrapper[4558]: I0120 17:48:48.817159 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:48:48 crc kubenswrapper[4558]: I0120 17:48:48.817186 4558 generic.go:334] "Generic (PLEG): container finished" podID="1f9f16e6-6a68-47b5-9f90-7275cdb64dd0" containerID="f8ea5c6a5574b26aaec9cf8a017c58068094a35c5f9b409bcb9bfc0e7da7aff8" exitCode=143 Jan 20 17:48:48 crc kubenswrapper[4558]: I0120 17:48:48.817048 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"1f9f16e6-6a68-47b5-9f90-7275cdb64dd0","Type":"ContainerDied","Data":"00d072144c12ac7881939acf0044e57715712d5f7941513463ed22a19efbc679"} Jan 20 17:48:48 crc kubenswrapper[4558]: I0120 17:48:48.817375 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"1f9f16e6-6a68-47b5-9f90-7275cdb64dd0","Type":"ContainerDied","Data":"f8ea5c6a5574b26aaec9cf8a017c58068094a35c5f9b409bcb9bfc0e7da7aff8"} Jan 20 17:48:48 crc kubenswrapper[4558]: I0120 17:48:48.817392 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"1f9f16e6-6a68-47b5-9f90-7275cdb64dd0","Type":"ContainerDied","Data":"a9c2089f23726006d0f38990eab302ec9552cac80149272bd8c33b3219bdf518"} Jan 20 17:48:48 crc kubenswrapper[4558]: I0120 17:48:48.817477 4558 scope.go:117] "RemoveContainer" containerID="00d072144c12ac7881939acf0044e57715712d5f7941513463ed22a19efbc679" Jan 20 17:48:48 crc kubenswrapper[4558]: I0120 17:48:48.819829 4558 generic.go:334] "Generic (PLEG): container finished" podID="60c9785f-c5f8-408e-ac84-12756b4ba66a" containerID="96dbff4a5c402cf5402baf41517d6be17579aac62d5269d3b0024b1e9269835d" exitCode=0 Jan 20 17:48:48 crc kubenswrapper[4558]: I0120 17:48:48.819869 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"60c9785f-c5f8-408e-ac84-12756b4ba66a","Type":"ContainerDied","Data":"96dbff4a5c402cf5402baf41517d6be17579aac62d5269d3b0024b1e9269835d"} Jan 20 17:48:48 crc kubenswrapper[4558]: I0120 17:48:48.819892 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"60c9785f-c5f8-408e-ac84-12756b4ba66a","Type":"ContainerDied","Data":"0cfe88b8ad7d07036a7b71dec6291dcc2512e43798bdd1ca17e8b7bed048023b"} Jan 20 17:48:48 crc kubenswrapper[4558]: I0120 17:48:48.819928 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:48:48 crc kubenswrapper[4558]: I0120 17:48:48.821791 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"6254cefa-8c4a-472e-9faf-4633c6d1618e","Type":"ContainerStarted","Data":"5c8b0e075d6b80a58c44316d67ccf05a22f59a72ae3bca7872921f2e03c311bf"} Jan 20 17:48:48 crc kubenswrapper[4558]: I0120 17:48:48.827529 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-0" event={"ID":"b2ecae06-8e2e-4e1a-b788-606e519c8ff5","Type":"ContainerStarted","Data":"193468c685fa9ab1f363ecb0e7f771a966526b372a315288f277c9d2d2a6d417"} Jan 20 17:48:48 crc kubenswrapper[4558]: I0120 17:48:48.847560 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" podStartSLOduration=2.847549786 podStartE2EDuration="2.847549786s" podCreationTimestamp="2026-01-20 17:48:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:48:48.839735262 +0000 UTC m=+4022.600073229" watchObservedRunningTime="2026-01-20 17:48:48.847549786 +0000 UTC m=+4022.607887753" Jan 20 17:48:48 crc kubenswrapper[4558]: I0120 17:48:48.867133 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p6nqh\" (UniqueName: \"kubernetes.io/projected/60c9785f-c5f8-408e-ac84-12756b4ba66a-kube-api-access-p6nqh\") pod \"60c9785f-c5f8-408e-ac84-12756b4ba66a\" (UID: \"60c9785f-c5f8-408e-ac84-12756b4ba66a\") " Jan 20 17:48:48 crc kubenswrapper[4558]: I0120 17:48:48.867677 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/60c9785f-c5f8-408e-ac84-12756b4ba66a-combined-ca-bundle\") pod \"60c9785f-c5f8-408e-ac84-12756b4ba66a\" (UID: \"60c9785f-c5f8-408e-ac84-12756b4ba66a\") " Jan 20 17:48:48 crc kubenswrapper[4558]: I0120 17:48:48.867862 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/60c9785f-c5f8-408e-ac84-12756b4ba66a-config-data\") pod \"60c9785f-c5f8-408e-ac84-12756b4ba66a\" (UID: \"60c9785f-c5f8-408e-ac84-12756b4ba66a\") " Jan 20 17:48:48 crc kubenswrapper[4558]: I0120 17:48:48.867430 4558 scope.go:117] "RemoveContainer" containerID="f8ea5c6a5574b26aaec9cf8a017c58068094a35c5f9b409bcb9bfc0e7da7aff8" Jan 20 17:48:48 crc kubenswrapper[4558]: I0120 17:48:48.868750 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fjpqc\" (UniqueName: \"kubernetes.io/projected/1f9f16e6-6a68-47b5-9f90-7275cdb64dd0-kube-api-access-fjpqc\") on node \"crc\" DevicePath \"\"" Jan 20 17:48:48 crc kubenswrapper[4558]: I0120 17:48:48.869315 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1f9f16e6-6a68-47b5-9f90-7275cdb64dd0-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:48:48 crc kubenswrapper[4558]: I0120 17:48:48.869389 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1f9f16e6-6a68-47b5-9f90-7275cdb64dd0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:48:48 crc kubenswrapper[4558]: I0120 17:48:48.870231 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell1-conductor-0" podStartSLOduration=2.8702195550000003 podStartE2EDuration="2.870219555s" podCreationTimestamp="2026-01-20 17:48:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:48:48.858499255 +0000 UTC m=+4022.618837223" watchObservedRunningTime="2026-01-20 17:48:48.870219555 +0000 UTC m=+4022.630557521" Jan 20 17:48:48 crc kubenswrapper[4558]: I0120 17:48:48.872395 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/60c9785f-c5f8-408e-ac84-12756b4ba66a-kube-api-access-p6nqh" (OuterVolumeSpecName: "kube-api-access-p6nqh") pod "60c9785f-c5f8-408e-ac84-12756b4ba66a" (UID: "60c9785f-c5f8-408e-ac84-12756b4ba66a"). InnerVolumeSpecName "kube-api-access-p6nqh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:48:48 crc kubenswrapper[4558]: I0120 17:48:48.882029 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:48:48 crc kubenswrapper[4558]: I0120 17:48:48.902429 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:48:48 crc kubenswrapper[4558]: I0120 17:48:48.906482 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/60c9785f-c5f8-408e-ac84-12756b4ba66a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "60c9785f-c5f8-408e-ac84-12756b4ba66a" (UID: "60c9785f-c5f8-408e-ac84-12756b4ba66a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:48:48 crc kubenswrapper[4558]: I0120 17:48:48.932085 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/60c9785f-c5f8-408e-ac84-12756b4ba66a-config-data" (OuterVolumeSpecName: "config-data") pod "60c9785f-c5f8-408e-ac84-12756b4ba66a" (UID: "60c9785f-c5f8-408e-ac84-12756b4ba66a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:48:48 crc kubenswrapper[4558]: I0120 17:48:48.945335 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:48:48 crc kubenswrapper[4558]: E0120 17:48:48.945866 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1f9f16e6-6a68-47b5-9f90-7275cdb64dd0" containerName="nova-api-log" Jan 20 17:48:48 crc kubenswrapper[4558]: I0120 17:48:48.945892 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="1f9f16e6-6a68-47b5-9f90-7275cdb64dd0" containerName="nova-api-log" Jan 20 17:48:48 crc kubenswrapper[4558]: E0120 17:48:48.945921 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="60c9785f-c5f8-408e-ac84-12756b4ba66a" containerName="nova-scheduler-scheduler" Jan 20 17:48:48 crc kubenswrapper[4558]: I0120 17:48:48.945930 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="60c9785f-c5f8-408e-ac84-12756b4ba66a" containerName="nova-scheduler-scheduler" Jan 20 17:48:48 crc kubenswrapper[4558]: E0120 17:48:48.945995 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa9f3a26-1b5a-4d02-a79d-67585f099131" containerName="nova-manage" Jan 20 17:48:48 crc kubenswrapper[4558]: I0120 17:48:48.946002 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa9f3a26-1b5a-4d02-a79d-67585f099131" containerName="nova-manage" Jan 20 17:48:48 crc kubenswrapper[4558]: E0120 17:48:48.946012 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1f9f16e6-6a68-47b5-9f90-7275cdb64dd0" containerName="nova-api-api" Jan 20 17:48:48 crc kubenswrapper[4558]: I0120 17:48:48.946020 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="1f9f16e6-6a68-47b5-9f90-7275cdb64dd0" containerName="nova-api-api" Jan 20 17:48:48 crc kubenswrapper[4558]: I0120 17:48:48.946261 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="1f9f16e6-6a68-47b5-9f90-7275cdb64dd0" containerName="nova-api-log" Jan 20 17:48:48 crc kubenswrapper[4558]: I0120 17:48:48.946278 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="60c9785f-c5f8-408e-ac84-12756b4ba66a" containerName="nova-scheduler-scheduler" Jan 20 17:48:48 crc kubenswrapper[4558]: I0120 17:48:48.946291 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="1f9f16e6-6a68-47b5-9f90-7275cdb64dd0" containerName="nova-api-api" Jan 20 17:48:48 crc kubenswrapper[4558]: I0120 17:48:48.946302 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="fa9f3a26-1b5a-4d02-a79d-67585f099131" containerName="nova-manage" Jan 20 17:48:48 crc kubenswrapper[4558]: I0120 17:48:48.947585 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:48:48 crc kubenswrapper[4558]: I0120 17:48:48.949900 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-api-config-data" Jan 20 17:48:48 crc kubenswrapper[4558]: I0120 17:48:48.967547 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:48:48 crc kubenswrapper[4558]: I0120 17:48:48.975267 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p6nqh\" (UniqueName: \"kubernetes.io/projected/60c9785f-c5f8-408e-ac84-12756b4ba66a-kube-api-access-p6nqh\") on node \"crc\" DevicePath \"\"" Jan 20 17:48:48 crc kubenswrapper[4558]: I0120 17:48:48.975304 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/60c9785f-c5f8-408e-ac84-12756b4ba66a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:48:48 crc kubenswrapper[4558]: I0120 17:48:48.975317 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/60c9785f-c5f8-408e-ac84-12756b4ba66a-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:48:49 crc kubenswrapper[4558]: I0120 17:48:49.008372 4558 scope.go:117] "RemoveContainer" containerID="00d072144c12ac7881939acf0044e57715712d5f7941513463ed22a19efbc679" Jan 20 17:48:49 crc kubenswrapper[4558]: E0120 17:48:49.008660 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"00d072144c12ac7881939acf0044e57715712d5f7941513463ed22a19efbc679\": container with ID starting with 00d072144c12ac7881939acf0044e57715712d5f7941513463ed22a19efbc679 not found: ID does not exist" containerID="00d072144c12ac7881939acf0044e57715712d5f7941513463ed22a19efbc679" Jan 20 17:48:49 crc kubenswrapper[4558]: I0120 17:48:49.008701 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"00d072144c12ac7881939acf0044e57715712d5f7941513463ed22a19efbc679"} err="failed to get container status \"00d072144c12ac7881939acf0044e57715712d5f7941513463ed22a19efbc679\": rpc error: code = NotFound desc = could not find container \"00d072144c12ac7881939acf0044e57715712d5f7941513463ed22a19efbc679\": container with ID starting with 00d072144c12ac7881939acf0044e57715712d5f7941513463ed22a19efbc679 not found: ID does not exist" Jan 20 17:48:49 crc kubenswrapper[4558]: I0120 17:48:49.008731 4558 scope.go:117] "RemoveContainer" containerID="f8ea5c6a5574b26aaec9cf8a017c58068094a35c5f9b409bcb9bfc0e7da7aff8" Jan 20 17:48:49 crc kubenswrapper[4558]: E0120 17:48:49.009011 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f8ea5c6a5574b26aaec9cf8a017c58068094a35c5f9b409bcb9bfc0e7da7aff8\": container with ID starting with f8ea5c6a5574b26aaec9cf8a017c58068094a35c5f9b409bcb9bfc0e7da7aff8 not found: ID does not exist" containerID="f8ea5c6a5574b26aaec9cf8a017c58068094a35c5f9b409bcb9bfc0e7da7aff8" Jan 20 17:48:49 crc kubenswrapper[4558]: I0120 17:48:49.009037 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f8ea5c6a5574b26aaec9cf8a017c58068094a35c5f9b409bcb9bfc0e7da7aff8"} err="failed to get container status \"f8ea5c6a5574b26aaec9cf8a017c58068094a35c5f9b409bcb9bfc0e7da7aff8\": rpc error: code = NotFound desc = could not find container \"f8ea5c6a5574b26aaec9cf8a017c58068094a35c5f9b409bcb9bfc0e7da7aff8\": container with ID starting with f8ea5c6a5574b26aaec9cf8a017c58068094a35c5f9b409bcb9bfc0e7da7aff8 not found: ID does not exist" Jan 20 17:48:49 crc kubenswrapper[4558]: I0120 17:48:49.009052 4558 scope.go:117] "RemoveContainer" containerID="00d072144c12ac7881939acf0044e57715712d5f7941513463ed22a19efbc679" Jan 20 17:48:49 crc kubenswrapper[4558]: I0120 17:48:49.009357 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"00d072144c12ac7881939acf0044e57715712d5f7941513463ed22a19efbc679"} err="failed to get container status \"00d072144c12ac7881939acf0044e57715712d5f7941513463ed22a19efbc679\": rpc error: code = NotFound desc = could not find container \"00d072144c12ac7881939acf0044e57715712d5f7941513463ed22a19efbc679\": container with ID starting with 00d072144c12ac7881939acf0044e57715712d5f7941513463ed22a19efbc679 not found: ID does not exist" Jan 20 17:48:49 crc kubenswrapper[4558]: I0120 17:48:49.009380 4558 scope.go:117] "RemoveContainer" containerID="f8ea5c6a5574b26aaec9cf8a017c58068094a35c5f9b409bcb9bfc0e7da7aff8" Jan 20 17:48:49 crc kubenswrapper[4558]: I0120 17:48:49.009632 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f8ea5c6a5574b26aaec9cf8a017c58068094a35c5f9b409bcb9bfc0e7da7aff8"} err="failed to get container status \"f8ea5c6a5574b26aaec9cf8a017c58068094a35c5f9b409bcb9bfc0e7da7aff8\": rpc error: code = NotFound desc = could not find container \"f8ea5c6a5574b26aaec9cf8a017c58068094a35c5f9b409bcb9bfc0e7da7aff8\": container with ID starting with f8ea5c6a5574b26aaec9cf8a017c58068094a35c5f9b409bcb9bfc0e7da7aff8 not found: ID does not exist" Jan 20 17:48:49 crc kubenswrapper[4558]: I0120 17:48:49.009654 4558 scope.go:117] "RemoveContainer" containerID="96dbff4a5c402cf5402baf41517d6be17579aac62d5269d3b0024b1e9269835d" Jan 20 17:48:49 crc kubenswrapper[4558]: I0120 17:48:49.028493 4558 scope.go:117] "RemoveContainer" containerID="96dbff4a5c402cf5402baf41517d6be17579aac62d5269d3b0024b1e9269835d" Jan 20 17:48:49 crc kubenswrapper[4558]: E0120 17:48:49.028835 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"96dbff4a5c402cf5402baf41517d6be17579aac62d5269d3b0024b1e9269835d\": container with ID starting with 96dbff4a5c402cf5402baf41517d6be17579aac62d5269d3b0024b1e9269835d not found: ID does not exist" containerID="96dbff4a5c402cf5402baf41517d6be17579aac62d5269d3b0024b1e9269835d" Jan 20 17:48:49 crc kubenswrapper[4558]: I0120 17:48:49.028867 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"96dbff4a5c402cf5402baf41517d6be17579aac62d5269d3b0024b1e9269835d"} err="failed to get container status \"96dbff4a5c402cf5402baf41517d6be17579aac62d5269d3b0024b1e9269835d\": rpc error: code = NotFound desc = could not find container \"96dbff4a5c402cf5402baf41517d6be17579aac62d5269d3b0024b1e9269835d\": container with ID starting with 96dbff4a5c402cf5402baf41517d6be17579aac62d5269d3b0024b1e9269835d not found: ID does not exist" Jan 20 17:48:49 crc kubenswrapper[4558]: I0120 17:48:49.077027 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/05f17eb0-d7a8-4f7d-bf24-34e75e0e6a24-logs\") pod \"nova-api-0\" (UID: \"05f17eb0-d7a8-4f7d-bf24-34e75e0e6a24\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:48:49 crc kubenswrapper[4558]: I0120 17:48:49.077070 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/05f17eb0-d7a8-4f7d-bf24-34e75e0e6a24-config-data\") pod \"nova-api-0\" (UID: \"05f17eb0-d7a8-4f7d-bf24-34e75e0e6a24\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:48:49 crc kubenswrapper[4558]: I0120 17:48:49.077124 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r5d98\" (UniqueName: \"kubernetes.io/projected/05f17eb0-d7a8-4f7d-bf24-34e75e0e6a24-kube-api-access-r5d98\") pod \"nova-api-0\" (UID: \"05f17eb0-d7a8-4f7d-bf24-34e75e0e6a24\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:48:49 crc kubenswrapper[4558]: I0120 17:48:49.077190 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05f17eb0-d7a8-4f7d-bf24-34e75e0e6a24-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"05f17eb0-d7a8-4f7d-bf24-34e75e0e6a24\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:48:49 crc kubenswrapper[4558]: I0120 17:48:49.157110 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:48:49 crc kubenswrapper[4558]: I0120 17:48:49.167919 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:48:49 crc kubenswrapper[4558]: I0120 17:48:49.183350 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:48:49 crc kubenswrapper[4558]: I0120 17:48:49.184885 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:48:49 crc kubenswrapper[4558]: I0120 17:48:49.185274 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/05f17eb0-d7a8-4f7d-bf24-34e75e0e6a24-config-data\") pod \"nova-api-0\" (UID: \"05f17eb0-d7a8-4f7d-bf24-34e75e0e6a24\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:48:49 crc kubenswrapper[4558]: I0120 17:48:49.185426 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r5d98\" (UniqueName: \"kubernetes.io/projected/05f17eb0-d7a8-4f7d-bf24-34e75e0e6a24-kube-api-access-r5d98\") pod \"nova-api-0\" (UID: \"05f17eb0-d7a8-4f7d-bf24-34e75e0e6a24\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:48:49 crc kubenswrapper[4558]: I0120 17:48:49.185513 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05f17eb0-d7a8-4f7d-bf24-34e75e0e6a24-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"05f17eb0-d7a8-4f7d-bf24-34e75e0e6a24\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:48:49 crc kubenswrapper[4558]: I0120 17:48:49.185838 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/05f17eb0-d7a8-4f7d-bf24-34e75e0e6a24-logs\") pod \"nova-api-0\" (UID: \"05f17eb0-d7a8-4f7d-bf24-34e75e0e6a24\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:48:49 crc kubenswrapper[4558]: I0120 17:48:49.186304 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/05f17eb0-d7a8-4f7d-bf24-34e75e0e6a24-logs\") pod \"nova-api-0\" (UID: \"05f17eb0-d7a8-4f7d-bf24-34e75e0e6a24\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:48:49 crc kubenswrapper[4558]: I0120 17:48:49.189707 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/05f17eb0-d7a8-4f7d-bf24-34e75e0e6a24-config-data\") pod \"nova-api-0\" (UID: \"05f17eb0-d7a8-4f7d-bf24-34e75e0e6a24\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:48:49 crc kubenswrapper[4558]: I0120 17:48:49.190802 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-scheduler-config-data" Jan 20 17:48:49 crc kubenswrapper[4558]: I0120 17:48:49.193672 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:48:49 crc kubenswrapper[4558]: I0120 17:48:49.195324 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05f17eb0-d7a8-4f7d-bf24-34e75e0e6a24-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"05f17eb0-d7a8-4f7d-bf24-34e75e0e6a24\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:48:49 crc kubenswrapper[4558]: I0120 17:48:49.210551 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r5d98\" (UniqueName: \"kubernetes.io/projected/05f17eb0-d7a8-4f7d-bf24-34e75e0e6a24-kube-api-access-r5d98\") pod \"nova-api-0\" (UID: \"05f17eb0-d7a8-4f7d-bf24-34e75e0e6a24\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:48:49 crc kubenswrapper[4558]: I0120 17:48:49.288915 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f65j4\" (UniqueName: \"kubernetes.io/projected/e9c779bd-e0e1-4c5d-b250-19ea3af0ebd7-kube-api-access-f65j4\") pod \"nova-scheduler-0\" (UID: \"e9c779bd-e0e1-4c5d-b250-19ea3af0ebd7\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:48:49 crc kubenswrapper[4558]: I0120 17:48:49.289022 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e9c779bd-e0e1-4c5d-b250-19ea3af0ebd7-config-data\") pod \"nova-scheduler-0\" (UID: \"e9c779bd-e0e1-4c5d-b250-19ea3af0ebd7\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:48:49 crc kubenswrapper[4558]: I0120 17:48:49.289068 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e9c779bd-e0e1-4c5d-b250-19ea3af0ebd7-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"e9c779bd-e0e1-4c5d-b250-19ea3af0ebd7\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:48:49 crc kubenswrapper[4558]: I0120 17:48:49.299643 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:48:49 crc kubenswrapper[4558]: I0120 17:48:49.391784 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e9c779bd-e0e1-4c5d-b250-19ea3af0ebd7-config-data\") pod \"nova-scheduler-0\" (UID: \"e9c779bd-e0e1-4c5d-b250-19ea3af0ebd7\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:48:49 crc kubenswrapper[4558]: I0120 17:48:49.391917 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e9c779bd-e0e1-4c5d-b250-19ea3af0ebd7-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"e9c779bd-e0e1-4c5d-b250-19ea3af0ebd7\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:48:49 crc kubenswrapper[4558]: I0120 17:48:49.392290 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f65j4\" (UniqueName: \"kubernetes.io/projected/e9c779bd-e0e1-4c5d-b250-19ea3af0ebd7-kube-api-access-f65j4\") pod \"nova-scheduler-0\" (UID: \"e9c779bd-e0e1-4c5d-b250-19ea3af0ebd7\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:48:49 crc kubenswrapper[4558]: I0120 17:48:49.401158 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e9c779bd-e0e1-4c5d-b250-19ea3af0ebd7-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"e9c779bd-e0e1-4c5d-b250-19ea3af0ebd7\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:48:49 crc kubenswrapper[4558]: I0120 17:48:49.403533 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e9c779bd-e0e1-4c5d-b250-19ea3af0ebd7-config-data\") pod \"nova-scheduler-0\" (UID: \"e9c779bd-e0e1-4c5d-b250-19ea3af0ebd7\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:48:49 crc kubenswrapper[4558]: I0120 17:48:49.412057 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f65j4\" (UniqueName: \"kubernetes.io/projected/e9c779bd-e0e1-4c5d-b250-19ea3af0ebd7-kube-api-access-f65j4\") pod \"nova-scheduler-0\" (UID: \"e9c779bd-e0e1-4c5d-b250-19ea3af0ebd7\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:48:49 crc kubenswrapper[4558]: I0120 17:48:49.535614 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:48:49 crc kubenswrapper[4558]: I0120 17:48:49.713238 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:48:49 crc kubenswrapper[4558]: W0120 17:48:49.717772 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod05f17eb0_d7a8_4f7d_bf24_34e75e0e6a24.slice/crio-8aa4abab9aff5b5103ac7c39422239fd86921320c88794c5462d6ea1b6bf36eb WatchSource:0}: Error finding container 8aa4abab9aff5b5103ac7c39422239fd86921320c88794c5462d6ea1b6bf36eb: Status 404 returned error can't find the container with id 8aa4abab9aff5b5103ac7c39422239fd86921320c88794c5462d6ea1b6bf36eb Jan 20 17:48:49 crc kubenswrapper[4558]: I0120 17:48:49.846329 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"05f17eb0-d7a8-4f7d-bf24-34e75e0e6a24","Type":"ContainerStarted","Data":"8aa4abab9aff5b5103ac7c39422239fd86921320c88794c5462d6ea1b6bf36eb"} Jan 20 17:48:49 crc kubenswrapper[4558]: I0120 17:48:49.849405 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:48:49 crc kubenswrapper[4558]: I0120 17:48:49.849623 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-metadata-0" podUID="bafae32a-2cec-4706-a85a-13650bc1e409" containerName="nova-metadata-log" containerID="cri-o://ce3d83d8c57279cf05bed707292a08c5e4c228ec188200bd2d06da880c6b6f98" gracePeriod=30 Jan 20 17:48:49 crc kubenswrapper[4558]: I0120 17:48:49.849890 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-metadata-0" podUID="bafae32a-2cec-4706-a85a-13650bc1e409" containerName="nova-metadata-metadata" containerID="cri-o://26235d1bf080c38fe8ddf3322b1b8e9e6501c1558566e900d254b216a80b1bd8" gracePeriod=30 Jan 20 17:48:49 crc kubenswrapper[4558]: I0120 17:48:49.957891 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:48:50 crc kubenswrapper[4558]: I0120 17:48:50.284151 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:48:50 crc kubenswrapper[4558]: I0120 17:48:50.416662 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bafae32a-2cec-4706-a85a-13650bc1e409-config-data\") pod \"bafae32a-2cec-4706-a85a-13650bc1e409\" (UID: \"bafae32a-2cec-4706-a85a-13650bc1e409\") " Jan 20 17:48:50 crc kubenswrapper[4558]: I0120 17:48:50.417394 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bafae32a-2cec-4706-a85a-13650bc1e409-logs\") pod \"bafae32a-2cec-4706-a85a-13650bc1e409\" (UID: \"bafae32a-2cec-4706-a85a-13650bc1e409\") " Jan 20 17:48:50 crc kubenswrapper[4558]: I0120 17:48:50.417609 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bafae32a-2cec-4706-a85a-13650bc1e409-combined-ca-bundle\") pod \"bafae32a-2cec-4706-a85a-13650bc1e409\" (UID: \"bafae32a-2cec-4706-a85a-13650bc1e409\") " Jan 20 17:48:50 crc kubenswrapper[4558]: I0120 17:48:50.417710 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pvr5p\" (UniqueName: \"kubernetes.io/projected/bafae32a-2cec-4706-a85a-13650bc1e409-kube-api-access-pvr5p\") pod \"bafae32a-2cec-4706-a85a-13650bc1e409\" (UID: \"bafae32a-2cec-4706-a85a-13650bc1e409\") " Jan 20 17:48:50 crc kubenswrapper[4558]: I0120 17:48:50.417738 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/bafae32a-2cec-4706-a85a-13650bc1e409-nova-metadata-tls-certs\") pod \"bafae32a-2cec-4706-a85a-13650bc1e409\" (UID: \"bafae32a-2cec-4706-a85a-13650bc1e409\") " Jan 20 17:48:50 crc kubenswrapper[4558]: I0120 17:48:50.417855 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bafae32a-2cec-4706-a85a-13650bc1e409-logs" (OuterVolumeSpecName: "logs") pod "bafae32a-2cec-4706-a85a-13650bc1e409" (UID: "bafae32a-2cec-4706-a85a-13650bc1e409"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:48:50 crc kubenswrapper[4558]: I0120 17:48:50.418524 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/bafae32a-2cec-4706-a85a-13650bc1e409-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:48:50 crc kubenswrapper[4558]: I0120 17:48:50.421787 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bafae32a-2cec-4706-a85a-13650bc1e409-kube-api-access-pvr5p" (OuterVolumeSpecName: "kube-api-access-pvr5p") pod "bafae32a-2cec-4706-a85a-13650bc1e409" (UID: "bafae32a-2cec-4706-a85a-13650bc1e409"). InnerVolumeSpecName "kube-api-access-pvr5p". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:48:50 crc kubenswrapper[4558]: I0120 17:48:50.442701 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bafae32a-2cec-4706-a85a-13650bc1e409-config-data" (OuterVolumeSpecName: "config-data") pod "bafae32a-2cec-4706-a85a-13650bc1e409" (UID: "bafae32a-2cec-4706-a85a-13650bc1e409"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:48:50 crc kubenswrapper[4558]: I0120 17:48:50.443136 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bafae32a-2cec-4706-a85a-13650bc1e409-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "bafae32a-2cec-4706-a85a-13650bc1e409" (UID: "bafae32a-2cec-4706-a85a-13650bc1e409"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:48:50 crc kubenswrapper[4558]: I0120 17:48:50.459048 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bafae32a-2cec-4706-a85a-13650bc1e409-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "bafae32a-2cec-4706-a85a-13650bc1e409" (UID: "bafae32a-2cec-4706-a85a-13650bc1e409"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:48:50 crc kubenswrapper[4558]: I0120 17:48:50.521012 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bafae32a-2cec-4706-a85a-13650bc1e409-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:48:50 crc kubenswrapper[4558]: I0120 17:48:50.521040 4558 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/bafae32a-2cec-4706-a85a-13650bc1e409-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:48:50 crc kubenswrapper[4558]: I0120 17:48:50.521056 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pvr5p\" (UniqueName: \"kubernetes.io/projected/bafae32a-2cec-4706-a85a-13650bc1e409-kube-api-access-pvr5p\") on node \"crc\" DevicePath \"\"" Jan 20 17:48:50 crc kubenswrapper[4558]: I0120 17:48:50.521068 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bafae32a-2cec-4706-a85a-13650bc1e409-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:48:50 crc kubenswrapper[4558]: I0120 17:48:50.578899 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1f9f16e6-6a68-47b5-9f90-7275cdb64dd0" path="/var/lib/kubelet/pods/1f9f16e6-6a68-47b5-9f90-7275cdb64dd0/volumes" Jan 20 17:48:50 crc kubenswrapper[4558]: I0120 17:48:50.579530 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="60c9785f-c5f8-408e-ac84-12756b4ba66a" path="/var/lib/kubelet/pods/60c9785f-c5f8-408e-ac84-12756b4ba66a/volumes" Jan 20 17:48:50 crc kubenswrapper[4558]: I0120 17:48:50.870540 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"05f17eb0-d7a8-4f7d-bf24-34e75e0e6a24","Type":"ContainerStarted","Data":"c4ec83a5a4ed7f6b69650c3b13959b7f6df29650bd42d4526c4c3f55f297e5df"} Jan 20 17:48:50 crc kubenswrapper[4558]: I0120 17:48:50.870623 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"05f17eb0-d7a8-4f7d-bf24-34e75e0e6a24","Type":"ContainerStarted","Data":"3026297984a29af622bb8012206ae6219c884afff329a8fb7fc9b3b4a900461d"} Jan 20 17:48:50 crc kubenswrapper[4558]: I0120 17:48:50.873512 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"e9c779bd-e0e1-4c5d-b250-19ea3af0ebd7","Type":"ContainerStarted","Data":"5d8e949742027317779fbfe71db4f90347e84b2949237120e6fcab0a71145dc1"} Jan 20 17:48:50 crc kubenswrapper[4558]: I0120 17:48:50.873568 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"e9c779bd-e0e1-4c5d-b250-19ea3af0ebd7","Type":"ContainerStarted","Data":"8ade77e71b9094f950417081856fe47794aa7e99565a23d44f2b34c0a9056080"} Jan 20 17:48:50 crc kubenswrapper[4558]: I0120 17:48:50.875819 4558 generic.go:334] "Generic (PLEG): container finished" podID="bafae32a-2cec-4706-a85a-13650bc1e409" containerID="26235d1bf080c38fe8ddf3322b1b8e9e6501c1558566e900d254b216a80b1bd8" exitCode=0 Jan 20 17:48:50 crc kubenswrapper[4558]: I0120 17:48:50.875847 4558 generic.go:334] "Generic (PLEG): container finished" podID="bafae32a-2cec-4706-a85a-13650bc1e409" containerID="ce3d83d8c57279cf05bed707292a08c5e4c228ec188200bd2d06da880c6b6f98" exitCode=143 Jan 20 17:48:50 crc kubenswrapper[4558]: I0120 17:48:50.876479 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:48:50 crc kubenswrapper[4558]: I0120 17:48:50.876999 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"bafae32a-2cec-4706-a85a-13650bc1e409","Type":"ContainerDied","Data":"26235d1bf080c38fe8ddf3322b1b8e9e6501c1558566e900d254b216a80b1bd8"} Jan 20 17:48:50 crc kubenswrapper[4558]: I0120 17:48:50.877027 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"bafae32a-2cec-4706-a85a-13650bc1e409","Type":"ContainerDied","Data":"ce3d83d8c57279cf05bed707292a08c5e4c228ec188200bd2d06da880c6b6f98"} Jan 20 17:48:50 crc kubenswrapper[4558]: I0120 17:48:50.877040 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"bafae32a-2cec-4706-a85a-13650bc1e409","Type":"ContainerDied","Data":"79b53f5df30c157f89aabebb7d3b03f2cf95e003d973a914606e80ac68c26434"} Jan 20 17:48:50 crc kubenswrapper[4558]: I0120 17:48:50.877058 4558 scope.go:117] "RemoveContainer" containerID="26235d1bf080c38fe8ddf3322b1b8e9e6501c1558566e900d254b216a80b1bd8" Jan 20 17:48:50 crc kubenswrapper[4558]: I0120 17:48:50.900486 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-api-0" podStartSLOduration=2.9004667140000002 podStartE2EDuration="2.900466714s" podCreationTimestamp="2026-01-20 17:48:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:48:50.899886192 +0000 UTC m=+4024.660224159" watchObservedRunningTime="2026-01-20 17:48:50.900466714 +0000 UTC m=+4024.660804682" Jan 20 17:48:50 crc kubenswrapper[4558]: I0120 17:48:50.914676 4558 scope.go:117] "RemoveContainer" containerID="ce3d83d8c57279cf05bed707292a08c5e4c228ec188200bd2d06da880c6b6f98" Jan 20 17:48:50 crc kubenswrapper[4558]: I0120 17:48:50.931540 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-scheduler-0" podStartSLOduration=1.931518133 podStartE2EDuration="1.931518133s" podCreationTimestamp="2026-01-20 17:48:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:48:50.917350051 +0000 UTC m=+4024.677688018" watchObservedRunningTime="2026-01-20 17:48:50.931518133 +0000 UTC m=+4024.691856099" Jan 20 17:48:50 crc kubenswrapper[4558]: I0120 17:48:50.942078 4558 scope.go:117] "RemoveContainer" containerID="26235d1bf080c38fe8ddf3322b1b8e9e6501c1558566e900d254b216a80b1bd8" Jan 20 17:48:50 crc kubenswrapper[4558]: E0120 17:48:50.942489 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"26235d1bf080c38fe8ddf3322b1b8e9e6501c1558566e900d254b216a80b1bd8\": container with ID starting with 26235d1bf080c38fe8ddf3322b1b8e9e6501c1558566e900d254b216a80b1bd8 not found: ID does not exist" containerID="26235d1bf080c38fe8ddf3322b1b8e9e6501c1558566e900d254b216a80b1bd8" Jan 20 17:48:50 crc kubenswrapper[4558]: I0120 17:48:50.942538 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"26235d1bf080c38fe8ddf3322b1b8e9e6501c1558566e900d254b216a80b1bd8"} err="failed to get container status \"26235d1bf080c38fe8ddf3322b1b8e9e6501c1558566e900d254b216a80b1bd8\": rpc error: code = NotFound desc = could not find container \"26235d1bf080c38fe8ddf3322b1b8e9e6501c1558566e900d254b216a80b1bd8\": container with ID starting with 26235d1bf080c38fe8ddf3322b1b8e9e6501c1558566e900d254b216a80b1bd8 not found: ID does not exist" Jan 20 17:48:50 crc kubenswrapper[4558]: I0120 17:48:50.942581 4558 scope.go:117] "RemoveContainer" containerID="ce3d83d8c57279cf05bed707292a08c5e4c228ec188200bd2d06da880c6b6f98" Jan 20 17:48:50 crc kubenswrapper[4558]: E0120 17:48:50.942801 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ce3d83d8c57279cf05bed707292a08c5e4c228ec188200bd2d06da880c6b6f98\": container with ID starting with ce3d83d8c57279cf05bed707292a08c5e4c228ec188200bd2d06da880c6b6f98 not found: ID does not exist" containerID="ce3d83d8c57279cf05bed707292a08c5e4c228ec188200bd2d06da880c6b6f98" Jan 20 17:48:50 crc kubenswrapper[4558]: I0120 17:48:50.942826 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ce3d83d8c57279cf05bed707292a08c5e4c228ec188200bd2d06da880c6b6f98"} err="failed to get container status \"ce3d83d8c57279cf05bed707292a08c5e4c228ec188200bd2d06da880c6b6f98\": rpc error: code = NotFound desc = could not find container \"ce3d83d8c57279cf05bed707292a08c5e4c228ec188200bd2d06da880c6b6f98\": container with ID starting with ce3d83d8c57279cf05bed707292a08c5e4c228ec188200bd2d06da880c6b6f98 not found: ID does not exist" Jan 20 17:48:50 crc kubenswrapper[4558]: I0120 17:48:50.942839 4558 scope.go:117] "RemoveContainer" containerID="26235d1bf080c38fe8ddf3322b1b8e9e6501c1558566e900d254b216a80b1bd8" Jan 20 17:48:50 crc kubenswrapper[4558]: I0120 17:48:50.943594 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"26235d1bf080c38fe8ddf3322b1b8e9e6501c1558566e900d254b216a80b1bd8"} err="failed to get container status \"26235d1bf080c38fe8ddf3322b1b8e9e6501c1558566e900d254b216a80b1bd8\": rpc error: code = NotFound desc = could not find container \"26235d1bf080c38fe8ddf3322b1b8e9e6501c1558566e900d254b216a80b1bd8\": container with ID starting with 26235d1bf080c38fe8ddf3322b1b8e9e6501c1558566e900d254b216a80b1bd8 not found: ID does not exist" Jan 20 17:48:50 crc kubenswrapper[4558]: I0120 17:48:50.943647 4558 scope.go:117] "RemoveContainer" containerID="ce3d83d8c57279cf05bed707292a08c5e4c228ec188200bd2d06da880c6b6f98" Jan 20 17:48:50 crc kubenswrapper[4558]: I0120 17:48:50.943921 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ce3d83d8c57279cf05bed707292a08c5e4c228ec188200bd2d06da880c6b6f98"} err="failed to get container status \"ce3d83d8c57279cf05bed707292a08c5e4c228ec188200bd2d06da880c6b6f98\": rpc error: code = NotFound desc = could not find container \"ce3d83d8c57279cf05bed707292a08c5e4c228ec188200bd2d06da880c6b6f98\": container with ID starting with ce3d83d8c57279cf05bed707292a08c5e4c228ec188200bd2d06da880c6b6f98 not found: ID does not exist" Jan 20 17:48:50 crc kubenswrapper[4558]: I0120 17:48:50.944480 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:48:50 crc kubenswrapper[4558]: I0120 17:48:50.949826 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:48:50 crc kubenswrapper[4558]: I0120 17:48:50.961193 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:48:50 crc kubenswrapper[4558]: E0120 17:48:50.970225 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bafae32a-2cec-4706-a85a-13650bc1e409" containerName="nova-metadata-metadata" Jan 20 17:48:50 crc kubenswrapper[4558]: I0120 17:48:50.970253 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="bafae32a-2cec-4706-a85a-13650bc1e409" containerName="nova-metadata-metadata" Jan 20 17:48:50 crc kubenswrapper[4558]: E0120 17:48:50.970297 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bafae32a-2cec-4706-a85a-13650bc1e409" containerName="nova-metadata-log" Jan 20 17:48:50 crc kubenswrapper[4558]: I0120 17:48:50.970304 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="bafae32a-2cec-4706-a85a-13650bc1e409" containerName="nova-metadata-log" Jan 20 17:48:50 crc kubenswrapper[4558]: I0120 17:48:50.970671 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="bafae32a-2cec-4706-a85a-13650bc1e409" containerName="nova-metadata-log" Jan 20 17:48:50 crc kubenswrapper[4558]: I0120 17:48:50.970687 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="bafae32a-2cec-4706-a85a-13650bc1e409" containerName="nova-metadata-metadata" Jan 20 17:48:50 crc kubenswrapper[4558]: I0120 17:48:50.971782 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:48:50 crc kubenswrapper[4558]: I0120 17:48:50.971812 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:48:50 crc kubenswrapper[4558]: I0120 17:48:50.983595 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-metadata-internal-svc" Jan 20 17:48:50 crc kubenswrapper[4558]: I0120 17:48:50.983836 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-metadata-config-data" Jan 20 17:48:51 crc kubenswrapper[4558]: I0120 17:48:51.136526 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vdnqt\" (UniqueName: \"kubernetes.io/projected/a508c4d0-5049-47c6-a7f9-1ba399ad7fc2-kube-api-access-vdnqt\") pod \"nova-metadata-0\" (UID: \"a508c4d0-5049-47c6-a7f9-1ba399ad7fc2\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:48:51 crc kubenswrapper[4558]: I0120 17:48:51.136595 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a508c4d0-5049-47c6-a7f9-1ba399ad7fc2-config-data\") pod \"nova-metadata-0\" (UID: \"a508c4d0-5049-47c6-a7f9-1ba399ad7fc2\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:48:51 crc kubenswrapper[4558]: I0120 17:48:51.136723 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/a508c4d0-5049-47c6-a7f9-1ba399ad7fc2-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"a508c4d0-5049-47c6-a7f9-1ba399ad7fc2\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:48:51 crc kubenswrapper[4558]: I0120 17:48:51.136985 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a508c4d0-5049-47c6-a7f9-1ba399ad7fc2-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"a508c4d0-5049-47c6-a7f9-1ba399ad7fc2\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:48:51 crc kubenswrapper[4558]: I0120 17:48:51.137008 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a508c4d0-5049-47c6-a7f9-1ba399ad7fc2-logs\") pod \"nova-metadata-0\" (UID: \"a508c4d0-5049-47c6-a7f9-1ba399ad7fc2\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:48:51 crc kubenswrapper[4558]: I0120 17:48:51.238298 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a508c4d0-5049-47c6-a7f9-1ba399ad7fc2-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"a508c4d0-5049-47c6-a7f9-1ba399ad7fc2\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:48:51 crc kubenswrapper[4558]: I0120 17:48:51.238382 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a508c4d0-5049-47c6-a7f9-1ba399ad7fc2-logs\") pod \"nova-metadata-0\" (UID: \"a508c4d0-5049-47c6-a7f9-1ba399ad7fc2\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:48:51 crc kubenswrapper[4558]: I0120 17:48:51.238515 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vdnqt\" (UniqueName: \"kubernetes.io/projected/a508c4d0-5049-47c6-a7f9-1ba399ad7fc2-kube-api-access-vdnqt\") pod \"nova-metadata-0\" (UID: \"a508c4d0-5049-47c6-a7f9-1ba399ad7fc2\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:48:51 crc kubenswrapper[4558]: I0120 17:48:51.238554 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a508c4d0-5049-47c6-a7f9-1ba399ad7fc2-config-data\") pod \"nova-metadata-0\" (UID: \"a508c4d0-5049-47c6-a7f9-1ba399ad7fc2\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:48:51 crc kubenswrapper[4558]: I0120 17:48:51.238624 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/a508c4d0-5049-47c6-a7f9-1ba399ad7fc2-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"a508c4d0-5049-47c6-a7f9-1ba399ad7fc2\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:48:51 crc kubenswrapper[4558]: I0120 17:48:51.239080 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a508c4d0-5049-47c6-a7f9-1ba399ad7fc2-logs\") pod \"nova-metadata-0\" (UID: \"a508c4d0-5049-47c6-a7f9-1ba399ad7fc2\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:48:51 crc kubenswrapper[4558]: I0120 17:48:51.251853 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a508c4d0-5049-47c6-a7f9-1ba399ad7fc2-config-data\") pod \"nova-metadata-0\" (UID: \"a508c4d0-5049-47c6-a7f9-1ba399ad7fc2\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:48:51 crc kubenswrapper[4558]: I0120 17:48:51.256003 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vdnqt\" (UniqueName: \"kubernetes.io/projected/a508c4d0-5049-47c6-a7f9-1ba399ad7fc2-kube-api-access-vdnqt\") pod \"nova-metadata-0\" (UID: \"a508c4d0-5049-47c6-a7f9-1ba399ad7fc2\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:48:51 crc kubenswrapper[4558]: I0120 17:48:51.259153 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/a508c4d0-5049-47c6-a7f9-1ba399ad7fc2-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"a508c4d0-5049-47c6-a7f9-1ba399ad7fc2\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:48:51 crc kubenswrapper[4558]: I0120 17:48:51.264745 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a508c4d0-5049-47c6-a7f9-1ba399ad7fc2-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"a508c4d0-5049-47c6-a7f9-1ba399ad7fc2\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:48:51 crc kubenswrapper[4558]: I0120 17:48:51.296977 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:48:51 crc kubenswrapper[4558]: W0120 17:48:51.709543 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda508c4d0_5049_47c6_a7f9_1ba399ad7fc2.slice/crio-e45aef9a56d62f909be6316bb31e4d042b0ce25f307a1e61e248d2179b0ef229 WatchSource:0}: Error finding container e45aef9a56d62f909be6316bb31e4d042b0ce25f307a1e61e248d2179b0ef229: Status 404 returned error can't find the container with id e45aef9a56d62f909be6316bb31e4d042b0ce25f307a1e61e248d2179b0ef229 Jan 20 17:48:51 crc kubenswrapper[4558]: I0120 17:48:51.711716 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:48:51 crc kubenswrapper[4558]: I0120 17:48:51.888364 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"a508c4d0-5049-47c6-a7f9-1ba399ad7fc2","Type":"ContainerStarted","Data":"10350104f9f524bee4eacbb5f2a0be968b6dcc91c34a07012b9937e75daadec2"} Jan 20 17:48:51 crc kubenswrapper[4558]: I0120 17:48:51.888571 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"a508c4d0-5049-47c6-a7f9-1ba399ad7fc2","Type":"ContainerStarted","Data":"e45aef9a56d62f909be6316bb31e4d042b0ce25f307a1e61e248d2179b0ef229"} Jan 20 17:48:52 crc kubenswrapper[4558]: I0120 17:48:52.184231 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:48:52 crc kubenswrapper[4558]: I0120 17:48:52.209349 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:48:52 crc kubenswrapper[4558]: I0120 17:48:52.584301 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bafae32a-2cec-4706-a85a-13650bc1e409" path="/var/lib/kubelet/pods/bafae32a-2cec-4706-a85a-13650bc1e409/volumes" Jan 20 17:48:52 crc kubenswrapper[4558]: I0120 17:48:52.903986 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"a508c4d0-5049-47c6-a7f9-1ba399ad7fc2","Type":"ContainerStarted","Data":"b9aa397b914a20fcf309884840c2ce5fe7964e54da091fb0c04e24c43ff8302c"} Jan 20 17:48:52 crc kubenswrapper[4558]: I0120 17:48:52.923796 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-metadata-0" podStartSLOduration=2.923764586 podStartE2EDuration="2.923764586s" podCreationTimestamp="2026-01-20 17:48:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:48:52.920290614 +0000 UTC m=+4026.680628580" watchObservedRunningTime="2026-01-20 17:48:52.923764586 +0000 UTC m=+4026.684102553" Jan 20 17:48:54 crc kubenswrapper[4558]: I0120 17:48:54.536440 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:48:56 crc kubenswrapper[4558]: I0120 17:48:56.298201 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:48:56 crc kubenswrapper[4558]: I0120 17:48:56.298305 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:48:57 crc kubenswrapper[4558]: I0120 17:48:57.177906 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:48:57 crc kubenswrapper[4558]: I0120 17:48:57.196807 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:48:57 crc kubenswrapper[4558]: I0120 17:48:57.330442 4558 patch_prober.go:28] interesting pod/machine-config-daemon-2vr4r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 20 17:48:57 crc kubenswrapper[4558]: I0120 17:48:57.330546 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 20 17:48:57 crc kubenswrapper[4558]: I0120 17:48:57.330628 4558 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" Jan 20 17:48:57 crc kubenswrapper[4558]: I0120 17:48:57.332073 4558 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"cf4cc5411787db46440d23f9870d3296fdb39888c50ddb23f460003bc8a70072"} pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 20 17:48:57 crc kubenswrapper[4558]: I0120 17:48:57.332207 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" containerID="cri-o://cf4cc5411787db46440d23f9870d3296fdb39888c50ddb23f460003bc8a70072" gracePeriod=600 Jan 20 17:48:57 crc kubenswrapper[4558]: I0120 17:48:57.955986 4558 generic.go:334] "Generic (PLEG): container finished" podID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerID="cf4cc5411787db46440d23f9870d3296fdb39888c50ddb23f460003bc8a70072" exitCode=0 Jan 20 17:48:57 crc kubenswrapper[4558]: I0120 17:48:57.956062 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" event={"ID":"68337d27-3fa6-4a29-88b0-82e60c3739eb","Type":"ContainerDied","Data":"cf4cc5411787db46440d23f9870d3296fdb39888c50ddb23f460003bc8a70072"} Jan 20 17:48:57 crc kubenswrapper[4558]: I0120 17:48:57.956832 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" event={"ID":"68337d27-3fa6-4a29-88b0-82e60c3739eb","Type":"ContainerStarted","Data":"4ea7fa61d8b21007a044345acec89fcebe56c2921cf0f76ff2002f44bdc88ede"} Jan 20 17:48:57 crc kubenswrapper[4558]: I0120 17:48:57.956858 4558 scope.go:117] "RemoveContainer" containerID="2d2a8989ca5a5af98b2c9dc8f801ea0675339ec14800f437c058c5a43c20146b" Jan 20 17:48:57 crc kubenswrapper[4558]: I0120 17:48:57.981905 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:48:58 crc kubenswrapper[4558]: I0120 17:48:58.121691 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell1-cell-mapping-pt6lr"] Jan 20 17:48:58 crc kubenswrapper[4558]: I0120 17:48:58.123064 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-cell-mapping-pt6lr" Jan 20 17:48:58 crc kubenswrapper[4558]: I0120 17:48:58.129045 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell1-manage-config-data" Jan 20 17:48:58 crc kubenswrapper[4558]: I0120 17:48:58.129048 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell1-manage-scripts" Jan 20 17:48:58 crc kubenswrapper[4558]: I0120 17:48:58.142429 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-cell-mapping-pt6lr"] Jan 20 17:48:58 crc kubenswrapper[4558]: I0120 17:48:58.290094 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8ea2267f-dec5-48ce-8f86-275077b68e57-config-data\") pod \"nova-cell1-cell-mapping-pt6lr\" (UID: \"8ea2267f-dec5-48ce-8f86-275077b68e57\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-pt6lr" Jan 20 17:48:58 crc kubenswrapper[4558]: I0120 17:48:58.291671 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bmld4\" (UniqueName: \"kubernetes.io/projected/8ea2267f-dec5-48ce-8f86-275077b68e57-kube-api-access-bmld4\") pod \"nova-cell1-cell-mapping-pt6lr\" (UID: \"8ea2267f-dec5-48ce-8f86-275077b68e57\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-pt6lr" Jan 20 17:48:58 crc kubenswrapper[4558]: I0120 17:48:58.291802 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ea2267f-dec5-48ce-8f86-275077b68e57-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-pt6lr\" (UID: \"8ea2267f-dec5-48ce-8f86-275077b68e57\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-pt6lr" Jan 20 17:48:58 crc kubenswrapper[4558]: I0120 17:48:58.292051 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8ea2267f-dec5-48ce-8f86-275077b68e57-scripts\") pod \"nova-cell1-cell-mapping-pt6lr\" (UID: \"8ea2267f-dec5-48ce-8f86-275077b68e57\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-pt6lr" Jan 20 17:48:58 crc kubenswrapper[4558]: I0120 17:48:58.395266 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8ea2267f-dec5-48ce-8f86-275077b68e57-config-data\") pod \"nova-cell1-cell-mapping-pt6lr\" (UID: \"8ea2267f-dec5-48ce-8f86-275077b68e57\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-pt6lr" Jan 20 17:48:58 crc kubenswrapper[4558]: I0120 17:48:58.395342 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bmld4\" (UniqueName: \"kubernetes.io/projected/8ea2267f-dec5-48ce-8f86-275077b68e57-kube-api-access-bmld4\") pod \"nova-cell1-cell-mapping-pt6lr\" (UID: \"8ea2267f-dec5-48ce-8f86-275077b68e57\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-pt6lr" Jan 20 17:48:58 crc kubenswrapper[4558]: I0120 17:48:58.395379 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ea2267f-dec5-48ce-8f86-275077b68e57-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-pt6lr\" (UID: \"8ea2267f-dec5-48ce-8f86-275077b68e57\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-pt6lr" Jan 20 17:48:58 crc kubenswrapper[4558]: I0120 17:48:58.395444 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8ea2267f-dec5-48ce-8f86-275077b68e57-scripts\") pod \"nova-cell1-cell-mapping-pt6lr\" (UID: \"8ea2267f-dec5-48ce-8f86-275077b68e57\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-pt6lr" Jan 20 17:48:58 crc kubenswrapper[4558]: I0120 17:48:58.403055 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8ea2267f-dec5-48ce-8f86-275077b68e57-scripts\") pod \"nova-cell1-cell-mapping-pt6lr\" (UID: \"8ea2267f-dec5-48ce-8f86-275077b68e57\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-pt6lr" Jan 20 17:48:58 crc kubenswrapper[4558]: I0120 17:48:58.403287 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ea2267f-dec5-48ce-8f86-275077b68e57-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-pt6lr\" (UID: \"8ea2267f-dec5-48ce-8f86-275077b68e57\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-pt6lr" Jan 20 17:48:58 crc kubenswrapper[4558]: I0120 17:48:58.403528 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8ea2267f-dec5-48ce-8f86-275077b68e57-config-data\") pod \"nova-cell1-cell-mapping-pt6lr\" (UID: \"8ea2267f-dec5-48ce-8f86-275077b68e57\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-pt6lr" Jan 20 17:48:58 crc kubenswrapper[4558]: I0120 17:48:58.412383 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bmld4\" (UniqueName: \"kubernetes.io/projected/8ea2267f-dec5-48ce-8f86-275077b68e57-kube-api-access-bmld4\") pod \"nova-cell1-cell-mapping-pt6lr\" (UID: \"8ea2267f-dec5-48ce-8f86-275077b68e57\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-pt6lr" Jan 20 17:48:58 crc kubenswrapper[4558]: I0120 17:48:58.438159 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-cell-mapping-pt6lr" Jan 20 17:48:58 crc kubenswrapper[4558]: I0120 17:48:58.869420 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-cell-mapping-pt6lr"] Jan 20 17:48:58 crc kubenswrapper[4558]: I0120 17:48:58.973618 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-cell-mapping-pt6lr" event={"ID":"8ea2267f-dec5-48ce-8f86-275077b68e57","Type":"ContainerStarted","Data":"a22e158bdb6615e3e654354294e09d8193b94ae943c85a1e6aa7442fede904cf"} Jan 20 17:48:59 crc kubenswrapper[4558]: I0120 17:48:59.300495 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:48:59 crc kubenswrapper[4558]: I0120 17:48:59.300840 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:48:59 crc kubenswrapper[4558]: I0120 17:48:59.536241 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:48:59 crc kubenswrapper[4558]: I0120 17:48:59.572371 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:48:59 crc kubenswrapper[4558]: I0120 17:48:59.991065 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-cell-mapping-pt6lr" event={"ID":"8ea2267f-dec5-48ce-8f86-275077b68e57","Type":"ContainerStarted","Data":"d9bc6595c7c4e08347d4624266d02240749799eeb7a5dd344e6f490af50f47f5"} Jan 20 17:49:00 crc kubenswrapper[4558]: I0120 17:49:00.017142 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell1-cell-mapping-pt6lr" podStartSLOduration=2.017121381 podStartE2EDuration="2.017121381s" podCreationTimestamp="2026-01-20 17:48:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:49:00.010212911 +0000 UTC m=+4033.770550867" watchObservedRunningTime="2026-01-20 17:49:00.017121381 +0000 UTC m=+4033.777459348" Jan 20 17:49:00 crc kubenswrapper[4558]: I0120 17:49:00.019544 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:49:00 crc kubenswrapper[4558]: I0120 17:49:00.383333 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-api-0" podUID="05f17eb0-d7a8-4f7d-bf24-34e75e0e6a24" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.1.163:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 20 17:49:00 crc kubenswrapper[4558]: I0120 17:49:00.383386 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-api-0" podUID="05f17eb0-d7a8-4f7d-bf24-34e75e0e6a24" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.1.163:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 20 17:49:01 crc kubenswrapper[4558]: I0120 17:49:01.104357 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:49:01 crc kubenswrapper[4558]: I0120 17:49:01.297659 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:49:01 crc kubenswrapper[4558]: I0120 17:49:01.297850 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:49:02 crc kubenswrapper[4558]: I0120 17:49:02.309363 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-metadata-0" podUID="a508c4d0-5049-47c6-a7f9-1ba399ad7fc2" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.1.170:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 20 17:49:02 crc kubenswrapper[4558]: I0120 17:49:02.309380 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-metadata-0" podUID="a508c4d0-5049-47c6-a7f9-1ba399ad7fc2" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.1.170:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 20 17:49:03 crc kubenswrapper[4558]: I0120 17:49:03.020388 4558 generic.go:334] "Generic (PLEG): container finished" podID="8ea2267f-dec5-48ce-8f86-275077b68e57" containerID="d9bc6595c7c4e08347d4624266d02240749799eeb7a5dd344e6f490af50f47f5" exitCode=0 Jan 20 17:49:03 crc kubenswrapper[4558]: I0120 17:49:03.020452 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-cell-mapping-pt6lr" event={"ID":"8ea2267f-dec5-48ce-8f86-275077b68e57","Type":"ContainerDied","Data":"d9bc6595c7c4e08347d4624266d02240749799eeb7a5dd344e6f490af50f47f5"} Jan 20 17:49:04 crc kubenswrapper[4558]: I0120 17:49:04.373821 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-cell-mapping-pt6lr" Jan 20 17:49:04 crc kubenswrapper[4558]: I0120 17:49:04.435120 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ea2267f-dec5-48ce-8f86-275077b68e57-combined-ca-bundle\") pod \"8ea2267f-dec5-48ce-8f86-275077b68e57\" (UID: \"8ea2267f-dec5-48ce-8f86-275077b68e57\") " Jan 20 17:49:04 crc kubenswrapper[4558]: I0120 17:49:04.463681 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8ea2267f-dec5-48ce-8f86-275077b68e57-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8ea2267f-dec5-48ce-8f86-275077b68e57" (UID: "8ea2267f-dec5-48ce-8f86-275077b68e57"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:49:04 crc kubenswrapper[4558]: I0120 17:49:04.537260 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8ea2267f-dec5-48ce-8f86-275077b68e57-config-data\") pod \"8ea2267f-dec5-48ce-8f86-275077b68e57\" (UID: \"8ea2267f-dec5-48ce-8f86-275077b68e57\") " Jan 20 17:49:04 crc kubenswrapper[4558]: I0120 17:49:04.537456 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bmld4\" (UniqueName: \"kubernetes.io/projected/8ea2267f-dec5-48ce-8f86-275077b68e57-kube-api-access-bmld4\") pod \"8ea2267f-dec5-48ce-8f86-275077b68e57\" (UID: \"8ea2267f-dec5-48ce-8f86-275077b68e57\") " Jan 20 17:49:04 crc kubenswrapper[4558]: I0120 17:49:04.537680 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8ea2267f-dec5-48ce-8f86-275077b68e57-scripts\") pod \"8ea2267f-dec5-48ce-8f86-275077b68e57\" (UID: \"8ea2267f-dec5-48ce-8f86-275077b68e57\") " Jan 20 17:49:04 crc kubenswrapper[4558]: I0120 17:49:04.538525 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ea2267f-dec5-48ce-8f86-275077b68e57-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:49:04 crc kubenswrapper[4558]: I0120 17:49:04.544279 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8ea2267f-dec5-48ce-8f86-275077b68e57-kube-api-access-bmld4" (OuterVolumeSpecName: "kube-api-access-bmld4") pod "8ea2267f-dec5-48ce-8f86-275077b68e57" (UID: "8ea2267f-dec5-48ce-8f86-275077b68e57"). InnerVolumeSpecName "kube-api-access-bmld4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:49:04 crc kubenswrapper[4558]: I0120 17:49:04.544804 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8ea2267f-dec5-48ce-8f86-275077b68e57-scripts" (OuterVolumeSpecName: "scripts") pod "8ea2267f-dec5-48ce-8f86-275077b68e57" (UID: "8ea2267f-dec5-48ce-8f86-275077b68e57"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:49:04 crc kubenswrapper[4558]: I0120 17:49:04.558342 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8ea2267f-dec5-48ce-8f86-275077b68e57-config-data" (OuterVolumeSpecName: "config-data") pod "8ea2267f-dec5-48ce-8f86-275077b68e57" (UID: "8ea2267f-dec5-48ce-8f86-275077b68e57"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:49:04 crc kubenswrapper[4558]: I0120 17:49:04.641292 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8ea2267f-dec5-48ce-8f86-275077b68e57-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:49:04 crc kubenswrapper[4558]: I0120 17:49:04.641326 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8ea2267f-dec5-48ce-8f86-275077b68e57-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:49:04 crc kubenswrapper[4558]: I0120 17:49:04.641339 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bmld4\" (UniqueName: \"kubernetes.io/projected/8ea2267f-dec5-48ce-8f86-275077b68e57-kube-api-access-bmld4\") on node \"crc\" DevicePath \"\"" Jan 20 17:49:05 crc kubenswrapper[4558]: I0120 17:49:05.044055 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-cell-mapping-pt6lr" event={"ID":"8ea2267f-dec5-48ce-8f86-275077b68e57","Type":"ContainerDied","Data":"a22e158bdb6615e3e654354294e09d8193b94ae943c85a1e6aa7442fede904cf"} Jan 20 17:49:05 crc kubenswrapper[4558]: I0120 17:49:05.044421 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a22e158bdb6615e3e654354294e09d8193b94ae943c85a1e6aa7442fede904cf" Jan 20 17:49:05 crc kubenswrapper[4558]: I0120 17:49:05.044373 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-cell-mapping-pt6lr" Jan 20 17:49:05 crc kubenswrapper[4558]: I0120 17:49:05.223764 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:49:05 crc kubenswrapper[4558]: I0120 17:49:05.224052 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-api-0" podUID="05f17eb0-d7a8-4f7d-bf24-34e75e0e6a24" containerName="nova-api-log" containerID="cri-o://3026297984a29af622bb8012206ae6219c884afff329a8fb7fc9b3b4a900461d" gracePeriod=30 Jan 20 17:49:05 crc kubenswrapper[4558]: I0120 17:49:05.224125 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-api-0" podUID="05f17eb0-d7a8-4f7d-bf24-34e75e0e6a24" containerName="nova-api-api" containerID="cri-o://c4ec83a5a4ed7f6b69650c3b13959b7f6df29650bd42d4526c4c3f55f297e5df" gracePeriod=30 Jan 20 17:49:05 crc kubenswrapper[4558]: I0120 17:49:05.234929 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:49:05 crc kubenswrapper[4558]: I0120 17:49:05.235253 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-scheduler-0" podUID="e9c779bd-e0e1-4c5d-b250-19ea3af0ebd7" containerName="nova-scheduler-scheduler" containerID="cri-o://5d8e949742027317779fbfe71db4f90347e84b2949237120e6fcab0a71145dc1" gracePeriod=30 Jan 20 17:49:05 crc kubenswrapper[4558]: I0120 17:49:05.243021 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:49:05 crc kubenswrapper[4558]: I0120 17:49:05.243276 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-metadata-0" podUID="a508c4d0-5049-47c6-a7f9-1ba399ad7fc2" containerName="nova-metadata-log" containerID="cri-o://10350104f9f524bee4eacbb5f2a0be968b6dcc91c34a07012b9937e75daadec2" gracePeriod=30 Jan 20 17:49:05 crc kubenswrapper[4558]: I0120 17:49:05.243358 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-metadata-0" podUID="a508c4d0-5049-47c6-a7f9-1ba399ad7fc2" containerName="nova-metadata-metadata" containerID="cri-o://b9aa397b914a20fcf309884840c2ce5fe7964e54da091fb0c04e24c43ff8302c" gracePeriod=30 Jan 20 17:49:05 crc kubenswrapper[4558]: I0120 17:49:05.322904 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 17:49:05 crc kubenswrapper[4558]: I0120 17:49:05.323488 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/kube-state-metrics-0" podUID="de434991-f9a6-4b49-ad42-bb25402f95b2" containerName="kube-state-metrics" containerID="cri-o://03e417adbb6df387398a6d3df365127e6ebd8cf6395f94f668cf4bd1c8c08559" gracePeriod=30 Jan 20 17:49:05 crc kubenswrapper[4558]: I0120 17:49:05.843720 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:49:05 crc kubenswrapper[4558]: I0120 17:49:05.863071 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bmt7c\" (UniqueName: \"kubernetes.io/projected/de434991-f9a6-4b49-ad42-bb25402f95b2-kube-api-access-bmt7c\") pod \"de434991-f9a6-4b49-ad42-bb25402f95b2\" (UID: \"de434991-f9a6-4b49-ad42-bb25402f95b2\") " Jan 20 17:49:05 crc kubenswrapper[4558]: I0120 17:49:05.868308 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/de434991-f9a6-4b49-ad42-bb25402f95b2-kube-api-access-bmt7c" (OuterVolumeSpecName: "kube-api-access-bmt7c") pod "de434991-f9a6-4b49-ad42-bb25402f95b2" (UID: "de434991-f9a6-4b49-ad42-bb25402f95b2"). InnerVolumeSpecName "kube-api-access-bmt7c". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:49:05 crc kubenswrapper[4558]: I0120 17:49:05.965155 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bmt7c\" (UniqueName: \"kubernetes.io/projected/de434991-f9a6-4b49-ad42-bb25402f95b2-kube-api-access-bmt7c\") on node \"crc\" DevicePath \"\"" Jan 20 17:49:06 crc kubenswrapper[4558]: I0120 17:49:06.055105 4558 generic.go:334] "Generic (PLEG): container finished" podID="a508c4d0-5049-47c6-a7f9-1ba399ad7fc2" containerID="10350104f9f524bee4eacbb5f2a0be968b6dcc91c34a07012b9937e75daadec2" exitCode=143 Jan 20 17:49:06 crc kubenswrapper[4558]: I0120 17:49:06.055192 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"a508c4d0-5049-47c6-a7f9-1ba399ad7fc2","Type":"ContainerDied","Data":"10350104f9f524bee4eacbb5f2a0be968b6dcc91c34a07012b9937e75daadec2"} Jan 20 17:49:06 crc kubenswrapper[4558]: I0120 17:49:06.057132 4558 generic.go:334] "Generic (PLEG): container finished" podID="de434991-f9a6-4b49-ad42-bb25402f95b2" containerID="03e417adbb6df387398a6d3df365127e6ebd8cf6395f94f668cf4bd1c8c08559" exitCode=2 Jan 20 17:49:06 crc kubenswrapper[4558]: I0120 17:49:06.057195 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/kube-state-metrics-0" event={"ID":"de434991-f9a6-4b49-ad42-bb25402f95b2","Type":"ContainerDied","Data":"03e417adbb6df387398a6d3df365127e6ebd8cf6395f94f668cf4bd1c8c08559"} Jan 20 17:49:06 crc kubenswrapper[4558]: I0120 17:49:06.057214 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:49:06 crc kubenswrapper[4558]: I0120 17:49:06.057241 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/kube-state-metrics-0" event={"ID":"de434991-f9a6-4b49-ad42-bb25402f95b2","Type":"ContainerDied","Data":"1468560ec49e10f61c01e72c8e1cd6619948a7dfc20c237e7cd8498400e7d49b"} Jan 20 17:49:06 crc kubenswrapper[4558]: I0120 17:49:06.057264 4558 scope.go:117] "RemoveContainer" containerID="03e417adbb6df387398a6d3df365127e6ebd8cf6395f94f668cf4bd1c8c08559" Jan 20 17:49:06 crc kubenswrapper[4558]: I0120 17:49:06.060105 4558 generic.go:334] "Generic (PLEG): container finished" podID="05f17eb0-d7a8-4f7d-bf24-34e75e0e6a24" containerID="3026297984a29af622bb8012206ae6219c884afff329a8fb7fc9b3b4a900461d" exitCode=143 Jan 20 17:49:06 crc kubenswrapper[4558]: I0120 17:49:06.060158 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"05f17eb0-d7a8-4f7d-bf24-34e75e0e6a24","Type":"ContainerDied","Data":"3026297984a29af622bb8012206ae6219c884afff329a8fb7fc9b3b4a900461d"} Jan 20 17:49:06 crc kubenswrapper[4558]: I0120 17:49:06.066214 4558 generic.go:334] "Generic (PLEG): container finished" podID="e9c779bd-e0e1-4c5d-b250-19ea3af0ebd7" containerID="5d8e949742027317779fbfe71db4f90347e84b2949237120e6fcab0a71145dc1" exitCode=0 Jan 20 17:49:06 crc kubenswrapper[4558]: I0120 17:49:06.066255 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"e9c779bd-e0e1-4c5d-b250-19ea3af0ebd7","Type":"ContainerDied","Data":"5d8e949742027317779fbfe71db4f90347e84b2949237120e6fcab0a71145dc1"} Jan 20 17:49:06 crc kubenswrapper[4558]: I0120 17:49:06.115145 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 17:49:06 crc kubenswrapper[4558]: I0120 17:49:06.122341 4558 scope.go:117] "RemoveContainer" containerID="03e417adbb6df387398a6d3df365127e6ebd8cf6395f94f668cf4bd1c8c08559" Jan 20 17:49:06 crc kubenswrapper[4558]: E0120 17:49:06.123799 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"03e417adbb6df387398a6d3df365127e6ebd8cf6395f94f668cf4bd1c8c08559\": container with ID starting with 03e417adbb6df387398a6d3df365127e6ebd8cf6395f94f668cf4bd1c8c08559 not found: ID does not exist" containerID="03e417adbb6df387398a6d3df365127e6ebd8cf6395f94f668cf4bd1c8c08559" Jan 20 17:49:06 crc kubenswrapper[4558]: I0120 17:49:06.123842 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"03e417adbb6df387398a6d3df365127e6ebd8cf6395f94f668cf4bd1c8c08559"} err="failed to get container status \"03e417adbb6df387398a6d3df365127e6ebd8cf6395f94f668cf4bd1c8c08559\": rpc error: code = NotFound desc = could not find container \"03e417adbb6df387398a6d3df365127e6ebd8cf6395f94f668cf4bd1c8c08559\": container with ID starting with 03e417adbb6df387398a6d3df365127e6ebd8cf6395f94f668cf4bd1c8c08559 not found: ID does not exist" Jan 20 17:49:06 crc kubenswrapper[4558]: I0120 17:49:06.132729 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 17:49:06 crc kubenswrapper[4558]: I0120 17:49:06.144860 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 17:49:06 crc kubenswrapper[4558]: E0120 17:49:06.145424 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="de434991-f9a6-4b49-ad42-bb25402f95b2" containerName="kube-state-metrics" Jan 20 17:49:06 crc kubenswrapper[4558]: I0120 17:49:06.145457 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="de434991-f9a6-4b49-ad42-bb25402f95b2" containerName="kube-state-metrics" Jan 20 17:49:06 crc kubenswrapper[4558]: E0120 17:49:06.145485 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ea2267f-dec5-48ce-8f86-275077b68e57" containerName="nova-manage" Jan 20 17:49:06 crc kubenswrapper[4558]: I0120 17:49:06.145492 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ea2267f-dec5-48ce-8f86-275077b68e57" containerName="nova-manage" Jan 20 17:49:06 crc kubenswrapper[4558]: I0120 17:49:06.145693 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="8ea2267f-dec5-48ce-8f86-275077b68e57" containerName="nova-manage" Jan 20 17:49:06 crc kubenswrapper[4558]: I0120 17:49:06.145715 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="de434991-f9a6-4b49-ad42-bb25402f95b2" containerName="kube-state-metrics" Jan 20 17:49:06 crc kubenswrapper[4558]: I0120 17:49:06.146445 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:49:06 crc kubenswrapper[4558]: I0120 17:49:06.154366 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-kube-state-metrics-svc" Jan 20 17:49:06 crc kubenswrapper[4558]: I0120 17:49:06.154538 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"kube-state-metrics-tls-config" Jan 20 17:49:06 crc kubenswrapper[4558]: I0120 17:49:06.157677 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 17:49:06 crc kubenswrapper[4558]: I0120 17:49:06.270590 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/49822b01-63d0-40d3-b604-7980891b0683-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"49822b01-63d0-40d3-b604-7980891b0683\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:49:06 crc kubenswrapper[4558]: I0120 17:49:06.270692 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/49822b01-63d0-40d3-b604-7980891b0683-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"49822b01-63d0-40d3-b604-7980891b0683\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:49:06 crc kubenswrapper[4558]: I0120 17:49:06.270726 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-brxgv\" (UniqueName: \"kubernetes.io/projected/49822b01-63d0-40d3-b604-7980891b0683-kube-api-access-brxgv\") pod \"kube-state-metrics-0\" (UID: \"49822b01-63d0-40d3-b604-7980891b0683\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:49:06 crc kubenswrapper[4558]: I0120 17:49:06.270767 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/49822b01-63d0-40d3-b604-7980891b0683-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"49822b01-63d0-40d3-b604-7980891b0683\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:49:06 crc kubenswrapper[4558]: I0120 17:49:06.364801 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:49:06 crc kubenswrapper[4558]: I0120 17:49:06.372983 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/49822b01-63d0-40d3-b604-7980891b0683-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"49822b01-63d0-40d3-b604-7980891b0683\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:49:06 crc kubenswrapper[4558]: I0120 17:49:06.373860 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/49822b01-63d0-40d3-b604-7980891b0683-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"49822b01-63d0-40d3-b604-7980891b0683\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:49:06 crc kubenswrapper[4558]: I0120 17:49:06.374019 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-brxgv\" (UniqueName: \"kubernetes.io/projected/49822b01-63d0-40d3-b604-7980891b0683-kube-api-access-brxgv\") pod \"kube-state-metrics-0\" (UID: \"49822b01-63d0-40d3-b604-7980891b0683\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:49:06 crc kubenswrapper[4558]: I0120 17:49:06.374182 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/49822b01-63d0-40d3-b604-7980891b0683-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"49822b01-63d0-40d3-b604-7980891b0683\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:49:06 crc kubenswrapper[4558]: I0120 17:49:06.378972 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/49822b01-63d0-40d3-b604-7980891b0683-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"49822b01-63d0-40d3-b604-7980891b0683\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:49:06 crc kubenswrapper[4558]: I0120 17:49:06.382828 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/49822b01-63d0-40d3-b604-7980891b0683-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"49822b01-63d0-40d3-b604-7980891b0683\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:49:06 crc kubenswrapper[4558]: I0120 17:49:06.383445 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/49822b01-63d0-40d3-b604-7980891b0683-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"49822b01-63d0-40d3-b604-7980891b0683\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:49:06 crc kubenswrapper[4558]: I0120 17:49:06.397622 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-brxgv\" (UniqueName: \"kubernetes.io/projected/49822b01-63d0-40d3-b604-7980891b0683-kube-api-access-brxgv\") pod \"kube-state-metrics-0\" (UID: \"49822b01-63d0-40d3-b604-7980891b0683\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:49:06 crc kubenswrapper[4558]: I0120 17:49:06.464848 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:49:06 crc kubenswrapper[4558]: I0120 17:49:06.474927 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f65j4\" (UniqueName: \"kubernetes.io/projected/e9c779bd-e0e1-4c5d-b250-19ea3af0ebd7-kube-api-access-f65j4\") pod \"e9c779bd-e0e1-4c5d-b250-19ea3af0ebd7\" (UID: \"e9c779bd-e0e1-4c5d-b250-19ea3af0ebd7\") " Jan 20 17:49:06 crc kubenswrapper[4558]: I0120 17:49:06.475010 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e9c779bd-e0e1-4c5d-b250-19ea3af0ebd7-combined-ca-bundle\") pod \"e9c779bd-e0e1-4c5d-b250-19ea3af0ebd7\" (UID: \"e9c779bd-e0e1-4c5d-b250-19ea3af0ebd7\") " Jan 20 17:49:06 crc kubenswrapper[4558]: I0120 17:49:06.475131 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e9c779bd-e0e1-4c5d-b250-19ea3af0ebd7-config-data\") pod \"e9c779bd-e0e1-4c5d-b250-19ea3af0ebd7\" (UID: \"e9c779bd-e0e1-4c5d-b250-19ea3af0ebd7\") " Jan 20 17:49:06 crc kubenswrapper[4558]: I0120 17:49:06.480202 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e9c779bd-e0e1-4c5d-b250-19ea3af0ebd7-kube-api-access-f65j4" (OuterVolumeSpecName: "kube-api-access-f65j4") pod "e9c779bd-e0e1-4c5d-b250-19ea3af0ebd7" (UID: "e9c779bd-e0e1-4c5d-b250-19ea3af0ebd7"). InnerVolumeSpecName "kube-api-access-f65j4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:49:06 crc kubenswrapper[4558]: I0120 17:49:06.497349 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e9c779bd-e0e1-4c5d-b250-19ea3af0ebd7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e9c779bd-e0e1-4c5d-b250-19ea3af0ebd7" (UID: "e9c779bd-e0e1-4c5d-b250-19ea3af0ebd7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:49:06 crc kubenswrapper[4558]: I0120 17:49:06.498192 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e9c779bd-e0e1-4c5d-b250-19ea3af0ebd7-config-data" (OuterVolumeSpecName: "config-data") pod "e9c779bd-e0e1-4c5d-b250-19ea3af0ebd7" (UID: "e9c779bd-e0e1-4c5d-b250-19ea3af0ebd7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:49:06 crc kubenswrapper[4558]: I0120 17:49:06.581289 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f65j4\" (UniqueName: \"kubernetes.io/projected/e9c779bd-e0e1-4c5d-b250-19ea3af0ebd7-kube-api-access-f65j4\") on node \"crc\" DevicePath \"\"" Jan 20 17:49:06 crc kubenswrapper[4558]: I0120 17:49:06.581523 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e9c779bd-e0e1-4c5d-b250-19ea3af0ebd7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:49:06 crc kubenswrapper[4558]: I0120 17:49:06.581534 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e9c779bd-e0e1-4c5d-b250-19ea3af0ebd7-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:49:06 crc kubenswrapper[4558]: I0120 17:49:06.584931 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="de434991-f9a6-4b49-ad42-bb25402f95b2" path="/var/lib/kubelet/pods/de434991-f9a6-4b49-ad42-bb25402f95b2/volumes" Jan 20 17:49:06 crc kubenswrapper[4558]: W0120 17:49:06.874083 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod49822b01_63d0_40d3_b604_7980891b0683.slice/crio-590789da94b63822bbe31a06415d2201489b900e17908f80eb85919ce57a4b12 WatchSource:0}: Error finding container 590789da94b63822bbe31a06415d2201489b900e17908f80eb85919ce57a4b12: Status 404 returned error can't find the container with id 590789da94b63822bbe31a06415d2201489b900e17908f80eb85919ce57a4b12 Jan 20 17:49:06 crc kubenswrapper[4558]: I0120 17:49:06.874552 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 17:49:07 crc kubenswrapper[4558]: I0120 17:49:07.080631 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"e9c779bd-e0e1-4c5d-b250-19ea3af0ebd7","Type":"ContainerDied","Data":"8ade77e71b9094f950417081856fe47794aa7e99565a23d44f2b34c0a9056080"} Jan 20 17:49:07 crc kubenswrapper[4558]: I0120 17:49:07.080938 4558 scope.go:117] "RemoveContainer" containerID="5d8e949742027317779fbfe71db4f90347e84b2949237120e6fcab0a71145dc1" Jan 20 17:49:07 crc kubenswrapper[4558]: I0120 17:49:07.080681 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:49:07 crc kubenswrapper[4558]: I0120 17:49:07.084547 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/kube-state-metrics-0" event={"ID":"49822b01-63d0-40d3-b604-7980891b0683","Type":"ContainerStarted","Data":"590789da94b63822bbe31a06415d2201489b900e17908f80eb85919ce57a4b12"} Jan 20 17:49:07 crc kubenswrapper[4558]: I0120 17:49:07.104256 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:49:07 crc kubenswrapper[4558]: I0120 17:49:07.113385 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:49:07 crc kubenswrapper[4558]: I0120 17:49:07.128510 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:49:07 crc kubenswrapper[4558]: E0120 17:49:07.129069 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e9c779bd-e0e1-4c5d-b250-19ea3af0ebd7" containerName="nova-scheduler-scheduler" Jan 20 17:49:07 crc kubenswrapper[4558]: I0120 17:49:07.129101 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="e9c779bd-e0e1-4c5d-b250-19ea3af0ebd7" containerName="nova-scheduler-scheduler" Jan 20 17:49:07 crc kubenswrapper[4558]: I0120 17:49:07.129387 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="e9c779bd-e0e1-4c5d-b250-19ea3af0ebd7" containerName="nova-scheduler-scheduler" Jan 20 17:49:07 crc kubenswrapper[4558]: I0120 17:49:07.130185 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:49:07 crc kubenswrapper[4558]: I0120 17:49:07.134673 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-scheduler-config-data" Jan 20 17:49:07 crc kubenswrapper[4558]: I0120 17:49:07.137664 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:49:07 crc kubenswrapper[4558]: I0120 17:49:07.196527 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b13b98ea-20eb-452f-9316-a7acdeb18406-config-data\") pod \"nova-scheduler-0\" (UID: \"b13b98ea-20eb-452f-9316-a7acdeb18406\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:49:07 crc kubenswrapper[4558]: I0120 17:49:07.196736 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fjmng\" (UniqueName: \"kubernetes.io/projected/b13b98ea-20eb-452f-9316-a7acdeb18406-kube-api-access-fjmng\") pod \"nova-scheduler-0\" (UID: \"b13b98ea-20eb-452f-9316-a7acdeb18406\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:49:07 crc kubenswrapper[4558]: I0120 17:49:07.196846 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b13b98ea-20eb-452f-9316-a7acdeb18406-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"b13b98ea-20eb-452f-9316-a7acdeb18406\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:49:07 crc kubenswrapper[4558]: I0120 17:49:07.284819 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:49:07 crc kubenswrapper[4558]: I0120 17:49:07.285101 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="846b8ff4-6a4d-4429-baf0-affe344f2443" containerName="ceilometer-central-agent" containerID="cri-o://a0200376b46aaa5f9285cd9453a248f2b1abe657063801fbd9128f361cc1773b" gracePeriod=30 Jan 20 17:49:07 crc kubenswrapper[4558]: I0120 17:49:07.285183 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="846b8ff4-6a4d-4429-baf0-affe344f2443" containerName="proxy-httpd" containerID="cri-o://01900246a5a0ba025c5a871a5361756e9505243d35ab1589a6d16c801bc7454a" gracePeriod=30 Jan 20 17:49:07 crc kubenswrapper[4558]: I0120 17:49:07.285386 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="846b8ff4-6a4d-4429-baf0-affe344f2443" containerName="sg-core" containerID="cri-o://2fa9e5be032d97b5b704227d73210d4f13c21adfffb77cabee05509d24e7c834" gracePeriod=30 Jan 20 17:49:07 crc kubenswrapper[4558]: I0120 17:49:07.285461 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="846b8ff4-6a4d-4429-baf0-affe344f2443" containerName="ceilometer-notification-agent" containerID="cri-o://5b002afccf52b36ca6aaeb4952c36ef3a337dc87396bc522ab4018f88bc487d0" gracePeriod=30 Jan 20 17:49:07 crc kubenswrapper[4558]: I0120 17:49:07.299730 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fjmng\" (UniqueName: \"kubernetes.io/projected/b13b98ea-20eb-452f-9316-a7acdeb18406-kube-api-access-fjmng\") pod \"nova-scheduler-0\" (UID: \"b13b98ea-20eb-452f-9316-a7acdeb18406\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:49:07 crc kubenswrapper[4558]: I0120 17:49:07.299813 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b13b98ea-20eb-452f-9316-a7acdeb18406-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"b13b98ea-20eb-452f-9316-a7acdeb18406\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:49:07 crc kubenswrapper[4558]: I0120 17:49:07.299952 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b13b98ea-20eb-452f-9316-a7acdeb18406-config-data\") pod \"nova-scheduler-0\" (UID: \"b13b98ea-20eb-452f-9316-a7acdeb18406\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:49:07 crc kubenswrapper[4558]: I0120 17:49:07.303832 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b13b98ea-20eb-452f-9316-a7acdeb18406-config-data\") pod \"nova-scheduler-0\" (UID: \"b13b98ea-20eb-452f-9316-a7acdeb18406\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:49:07 crc kubenswrapper[4558]: I0120 17:49:07.304551 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b13b98ea-20eb-452f-9316-a7acdeb18406-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"b13b98ea-20eb-452f-9316-a7acdeb18406\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:49:07 crc kubenswrapper[4558]: I0120 17:49:07.322820 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fjmng\" (UniqueName: \"kubernetes.io/projected/b13b98ea-20eb-452f-9316-a7acdeb18406-kube-api-access-fjmng\") pod \"nova-scheduler-0\" (UID: \"b13b98ea-20eb-452f-9316-a7acdeb18406\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:49:07 crc kubenswrapper[4558]: I0120 17:49:07.451030 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:49:07 crc kubenswrapper[4558]: I0120 17:49:07.903852 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:49:07 crc kubenswrapper[4558]: W0120 17:49:07.904705 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb13b98ea_20eb_452f_9316_a7acdeb18406.slice/crio-b20692e4087358e9fee4d19b58456d0f8695163aa66850f3c0633fd5582f37b8 WatchSource:0}: Error finding container b20692e4087358e9fee4d19b58456d0f8695163aa66850f3c0633fd5582f37b8: Status 404 returned error can't find the container with id b20692e4087358e9fee4d19b58456d0f8695163aa66850f3c0633fd5582f37b8 Jan 20 17:49:08 crc kubenswrapper[4558]: I0120 17:49:08.098011 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"b13b98ea-20eb-452f-9316-a7acdeb18406","Type":"ContainerStarted","Data":"bccb9b380d5d7d133e1f9deb550275ef3efade0d9df8c3d33db8b87c1a6fd80b"} Jan 20 17:49:08 crc kubenswrapper[4558]: I0120 17:49:08.098303 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"b13b98ea-20eb-452f-9316-a7acdeb18406","Type":"ContainerStarted","Data":"b20692e4087358e9fee4d19b58456d0f8695163aa66850f3c0633fd5582f37b8"} Jan 20 17:49:08 crc kubenswrapper[4558]: I0120 17:49:08.100510 4558 generic.go:334] "Generic (PLEG): container finished" podID="846b8ff4-6a4d-4429-baf0-affe344f2443" containerID="01900246a5a0ba025c5a871a5361756e9505243d35ab1589a6d16c801bc7454a" exitCode=0 Jan 20 17:49:08 crc kubenswrapper[4558]: I0120 17:49:08.100544 4558 generic.go:334] "Generic (PLEG): container finished" podID="846b8ff4-6a4d-4429-baf0-affe344f2443" containerID="2fa9e5be032d97b5b704227d73210d4f13c21adfffb77cabee05509d24e7c834" exitCode=2 Jan 20 17:49:08 crc kubenswrapper[4558]: I0120 17:49:08.100553 4558 generic.go:334] "Generic (PLEG): container finished" podID="846b8ff4-6a4d-4429-baf0-affe344f2443" containerID="a0200376b46aaa5f9285cd9453a248f2b1abe657063801fbd9128f361cc1773b" exitCode=0 Jan 20 17:49:08 crc kubenswrapper[4558]: I0120 17:49:08.100597 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"846b8ff4-6a4d-4429-baf0-affe344f2443","Type":"ContainerDied","Data":"01900246a5a0ba025c5a871a5361756e9505243d35ab1589a6d16c801bc7454a"} Jan 20 17:49:08 crc kubenswrapper[4558]: I0120 17:49:08.100651 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"846b8ff4-6a4d-4429-baf0-affe344f2443","Type":"ContainerDied","Data":"2fa9e5be032d97b5b704227d73210d4f13c21adfffb77cabee05509d24e7c834"} Jan 20 17:49:08 crc kubenswrapper[4558]: I0120 17:49:08.100665 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"846b8ff4-6a4d-4429-baf0-affe344f2443","Type":"ContainerDied","Data":"a0200376b46aaa5f9285cd9453a248f2b1abe657063801fbd9128f361cc1773b"} Jan 20 17:49:08 crc kubenswrapper[4558]: I0120 17:49:08.103569 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/kube-state-metrics-0" event={"ID":"49822b01-63d0-40d3-b604-7980891b0683","Type":"ContainerStarted","Data":"7b033a817f9b0c0a5a28b3bb09ca9cc6417d0b599821e6bdbdcf0121964efdf3"} Jan 20 17:49:08 crc kubenswrapper[4558]: I0120 17:49:08.103691 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:49:08 crc kubenswrapper[4558]: I0120 17:49:08.119346 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-scheduler-0" podStartSLOduration=1.119335335 podStartE2EDuration="1.119335335s" podCreationTimestamp="2026-01-20 17:49:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:49:08.1113177 +0000 UTC m=+4041.871655667" watchObservedRunningTime="2026-01-20 17:49:08.119335335 +0000 UTC m=+4041.879673293" Jan 20 17:49:08 crc kubenswrapper[4558]: I0120 17:49:08.131254 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/kube-state-metrics-0" podStartSLOduration=1.8102950230000001 podStartE2EDuration="2.131242165s" podCreationTimestamp="2026-01-20 17:49:06 +0000 UTC" firstStartedPulling="2026-01-20 17:49:06.877005178 +0000 UTC m=+4040.637343144" lastFinishedPulling="2026-01-20 17:49:07.197952319 +0000 UTC m=+4040.958290286" observedRunningTime="2026-01-20 17:49:08.126411853 +0000 UTC m=+4041.886749820" watchObservedRunningTime="2026-01-20 17:49:08.131242165 +0000 UTC m=+4041.891580132" Jan 20 17:49:08 crc kubenswrapper[4558]: I0120 17:49:08.575414 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e9c779bd-e0e1-4c5d-b250-19ea3af0ebd7" path="/var/lib/kubelet/pods/e9c779bd-e0e1-4c5d-b250-19ea3af0ebd7/volumes" Jan 20 17:49:08 crc kubenswrapper[4558]: I0120 17:49:08.853703 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:49:08 crc kubenswrapper[4558]: I0120 17:49:08.859011 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:49:08 crc kubenswrapper[4558]: I0120 17:49:08.941250 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r5d98\" (UniqueName: \"kubernetes.io/projected/05f17eb0-d7a8-4f7d-bf24-34e75e0e6a24-kube-api-access-r5d98\") pod \"05f17eb0-d7a8-4f7d-bf24-34e75e0e6a24\" (UID: \"05f17eb0-d7a8-4f7d-bf24-34e75e0e6a24\") " Jan 20 17:49:08 crc kubenswrapper[4558]: I0120 17:49:08.941303 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05f17eb0-d7a8-4f7d-bf24-34e75e0e6a24-combined-ca-bundle\") pod \"05f17eb0-d7a8-4f7d-bf24-34e75e0e6a24\" (UID: \"05f17eb0-d7a8-4f7d-bf24-34e75e0e6a24\") " Jan 20 17:49:08 crc kubenswrapper[4558]: I0120 17:49:08.941350 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a508c4d0-5049-47c6-a7f9-1ba399ad7fc2-logs\") pod \"a508c4d0-5049-47c6-a7f9-1ba399ad7fc2\" (UID: \"a508c4d0-5049-47c6-a7f9-1ba399ad7fc2\") " Jan 20 17:49:08 crc kubenswrapper[4558]: I0120 17:49:08.941442 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a508c4d0-5049-47c6-a7f9-1ba399ad7fc2-config-data\") pod \"a508c4d0-5049-47c6-a7f9-1ba399ad7fc2\" (UID: \"a508c4d0-5049-47c6-a7f9-1ba399ad7fc2\") " Jan 20 17:49:08 crc kubenswrapper[4558]: I0120 17:49:08.941494 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a508c4d0-5049-47c6-a7f9-1ba399ad7fc2-combined-ca-bundle\") pod \"a508c4d0-5049-47c6-a7f9-1ba399ad7fc2\" (UID: \"a508c4d0-5049-47c6-a7f9-1ba399ad7fc2\") " Jan 20 17:49:08 crc kubenswrapper[4558]: I0120 17:49:08.941532 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/05f17eb0-d7a8-4f7d-bf24-34e75e0e6a24-config-data\") pod \"05f17eb0-d7a8-4f7d-bf24-34e75e0e6a24\" (UID: \"05f17eb0-d7a8-4f7d-bf24-34e75e0e6a24\") " Jan 20 17:49:08 crc kubenswrapper[4558]: I0120 17:49:08.941584 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/05f17eb0-d7a8-4f7d-bf24-34e75e0e6a24-logs\") pod \"05f17eb0-d7a8-4f7d-bf24-34e75e0e6a24\" (UID: \"05f17eb0-d7a8-4f7d-bf24-34e75e0e6a24\") " Jan 20 17:49:08 crc kubenswrapper[4558]: I0120 17:49:08.941607 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/a508c4d0-5049-47c6-a7f9-1ba399ad7fc2-nova-metadata-tls-certs\") pod \"a508c4d0-5049-47c6-a7f9-1ba399ad7fc2\" (UID: \"a508c4d0-5049-47c6-a7f9-1ba399ad7fc2\") " Jan 20 17:49:08 crc kubenswrapper[4558]: I0120 17:49:08.941712 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vdnqt\" (UniqueName: \"kubernetes.io/projected/a508c4d0-5049-47c6-a7f9-1ba399ad7fc2-kube-api-access-vdnqt\") pod \"a508c4d0-5049-47c6-a7f9-1ba399ad7fc2\" (UID: \"a508c4d0-5049-47c6-a7f9-1ba399ad7fc2\") " Jan 20 17:49:08 crc kubenswrapper[4558]: I0120 17:49:08.942817 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a508c4d0-5049-47c6-a7f9-1ba399ad7fc2-logs" (OuterVolumeSpecName: "logs") pod "a508c4d0-5049-47c6-a7f9-1ba399ad7fc2" (UID: "a508c4d0-5049-47c6-a7f9-1ba399ad7fc2"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:49:08 crc kubenswrapper[4558]: I0120 17:49:08.943270 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/05f17eb0-d7a8-4f7d-bf24-34e75e0e6a24-logs" (OuterVolumeSpecName: "logs") pod "05f17eb0-d7a8-4f7d-bf24-34e75e0e6a24" (UID: "05f17eb0-d7a8-4f7d-bf24-34e75e0e6a24"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:49:08 crc kubenswrapper[4558]: I0120 17:49:08.949890 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/05f17eb0-d7a8-4f7d-bf24-34e75e0e6a24-kube-api-access-r5d98" (OuterVolumeSpecName: "kube-api-access-r5d98") pod "05f17eb0-d7a8-4f7d-bf24-34e75e0e6a24" (UID: "05f17eb0-d7a8-4f7d-bf24-34e75e0e6a24"). InnerVolumeSpecName "kube-api-access-r5d98". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:49:08 crc kubenswrapper[4558]: I0120 17:49:08.950266 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a508c4d0-5049-47c6-a7f9-1ba399ad7fc2-kube-api-access-vdnqt" (OuterVolumeSpecName: "kube-api-access-vdnqt") pod "a508c4d0-5049-47c6-a7f9-1ba399ad7fc2" (UID: "a508c4d0-5049-47c6-a7f9-1ba399ad7fc2"). InnerVolumeSpecName "kube-api-access-vdnqt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:49:08 crc kubenswrapper[4558]: I0120 17:49:08.970536 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/05f17eb0-d7a8-4f7d-bf24-34e75e0e6a24-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "05f17eb0-d7a8-4f7d-bf24-34e75e0e6a24" (UID: "05f17eb0-d7a8-4f7d-bf24-34e75e0e6a24"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:49:08 crc kubenswrapper[4558]: I0120 17:49:08.978270 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/05f17eb0-d7a8-4f7d-bf24-34e75e0e6a24-config-data" (OuterVolumeSpecName: "config-data") pod "05f17eb0-d7a8-4f7d-bf24-34e75e0e6a24" (UID: "05f17eb0-d7a8-4f7d-bf24-34e75e0e6a24"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:49:08 crc kubenswrapper[4558]: I0120 17:49:08.980436 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a508c4d0-5049-47c6-a7f9-1ba399ad7fc2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a508c4d0-5049-47c6-a7f9-1ba399ad7fc2" (UID: "a508c4d0-5049-47c6-a7f9-1ba399ad7fc2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:49:08 crc kubenswrapper[4558]: I0120 17:49:08.980661 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a508c4d0-5049-47c6-a7f9-1ba399ad7fc2-config-data" (OuterVolumeSpecName: "config-data") pod "a508c4d0-5049-47c6-a7f9-1ba399ad7fc2" (UID: "a508c4d0-5049-47c6-a7f9-1ba399ad7fc2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:49:08 crc kubenswrapper[4558]: I0120 17:49:08.997727 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a508c4d0-5049-47c6-a7f9-1ba399ad7fc2-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "a508c4d0-5049-47c6-a7f9-1ba399ad7fc2" (UID: "a508c4d0-5049-47c6-a7f9-1ba399ad7fc2"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:49:09 crc kubenswrapper[4558]: I0120 17:49:09.044758 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r5d98\" (UniqueName: \"kubernetes.io/projected/05f17eb0-d7a8-4f7d-bf24-34e75e0e6a24-kube-api-access-r5d98\") on node \"crc\" DevicePath \"\"" Jan 20 17:49:09 crc kubenswrapper[4558]: I0120 17:49:09.044789 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05f17eb0-d7a8-4f7d-bf24-34e75e0e6a24-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:49:09 crc kubenswrapper[4558]: I0120 17:49:09.044799 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a508c4d0-5049-47c6-a7f9-1ba399ad7fc2-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:49:09 crc kubenswrapper[4558]: I0120 17:49:09.044808 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a508c4d0-5049-47c6-a7f9-1ba399ad7fc2-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:49:09 crc kubenswrapper[4558]: I0120 17:49:09.044816 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a508c4d0-5049-47c6-a7f9-1ba399ad7fc2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:49:09 crc kubenswrapper[4558]: I0120 17:49:09.044824 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/05f17eb0-d7a8-4f7d-bf24-34e75e0e6a24-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:49:09 crc kubenswrapper[4558]: I0120 17:49:09.044832 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/05f17eb0-d7a8-4f7d-bf24-34e75e0e6a24-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:49:09 crc kubenswrapper[4558]: I0120 17:49:09.044840 4558 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/a508c4d0-5049-47c6-a7f9-1ba399ad7fc2-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:49:09 crc kubenswrapper[4558]: I0120 17:49:09.044848 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vdnqt\" (UniqueName: \"kubernetes.io/projected/a508c4d0-5049-47c6-a7f9-1ba399ad7fc2-kube-api-access-vdnqt\") on node \"crc\" DevicePath \"\"" Jan 20 17:49:09 crc kubenswrapper[4558]: I0120 17:49:09.113436 4558 generic.go:334] "Generic (PLEG): container finished" podID="05f17eb0-d7a8-4f7d-bf24-34e75e0e6a24" containerID="c4ec83a5a4ed7f6b69650c3b13959b7f6df29650bd42d4526c4c3f55f297e5df" exitCode=0 Jan 20 17:49:09 crc kubenswrapper[4558]: I0120 17:49:09.113502 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:49:09 crc kubenswrapper[4558]: I0120 17:49:09.113533 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"05f17eb0-d7a8-4f7d-bf24-34e75e0e6a24","Type":"ContainerDied","Data":"c4ec83a5a4ed7f6b69650c3b13959b7f6df29650bd42d4526c4c3f55f297e5df"} Jan 20 17:49:09 crc kubenswrapper[4558]: I0120 17:49:09.113858 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"05f17eb0-d7a8-4f7d-bf24-34e75e0e6a24","Type":"ContainerDied","Data":"8aa4abab9aff5b5103ac7c39422239fd86921320c88794c5462d6ea1b6bf36eb"} Jan 20 17:49:09 crc kubenswrapper[4558]: I0120 17:49:09.113898 4558 scope.go:117] "RemoveContainer" containerID="c4ec83a5a4ed7f6b69650c3b13959b7f6df29650bd42d4526c4c3f55f297e5df" Jan 20 17:49:09 crc kubenswrapper[4558]: I0120 17:49:09.118197 4558 generic.go:334] "Generic (PLEG): container finished" podID="a508c4d0-5049-47c6-a7f9-1ba399ad7fc2" containerID="b9aa397b914a20fcf309884840c2ce5fe7964e54da091fb0c04e24c43ff8302c" exitCode=0 Jan 20 17:49:09 crc kubenswrapper[4558]: I0120 17:49:09.118301 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:49:09 crc kubenswrapper[4558]: I0120 17:49:09.118365 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"a508c4d0-5049-47c6-a7f9-1ba399ad7fc2","Type":"ContainerDied","Data":"b9aa397b914a20fcf309884840c2ce5fe7964e54da091fb0c04e24c43ff8302c"} Jan 20 17:49:09 crc kubenswrapper[4558]: I0120 17:49:09.118402 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"a508c4d0-5049-47c6-a7f9-1ba399ad7fc2","Type":"ContainerDied","Data":"e45aef9a56d62f909be6316bb31e4d042b0ce25f307a1e61e248d2179b0ef229"} Jan 20 17:49:09 crc kubenswrapper[4558]: I0120 17:49:09.136427 4558 scope.go:117] "RemoveContainer" containerID="3026297984a29af622bb8012206ae6219c884afff329a8fb7fc9b3b4a900461d" Jan 20 17:49:09 crc kubenswrapper[4558]: I0120 17:49:09.162576 4558 scope.go:117] "RemoveContainer" containerID="c4ec83a5a4ed7f6b69650c3b13959b7f6df29650bd42d4526c4c3f55f297e5df" Jan 20 17:49:09 crc kubenswrapper[4558]: I0120 17:49:09.165385 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:49:09 crc kubenswrapper[4558]: E0120 17:49:09.170259 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c4ec83a5a4ed7f6b69650c3b13959b7f6df29650bd42d4526c4c3f55f297e5df\": container with ID starting with c4ec83a5a4ed7f6b69650c3b13959b7f6df29650bd42d4526c4c3f55f297e5df not found: ID does not exist" containerID="c4ec83a5a4ed7f6b69650c3b13959b7f6df29650bd42d4526c4c3f55f297e5df" Jan 20 17:49:09 crc kubenswrapper[4558]: I0120 17:49:09.170317 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c4ec83a5a4ed7f6b69650c3b13959b7f6df29650bd42d4526c4c3f55f297e5df"} err="failed to get container status \"c4ec83a5a4ed7f6b69650c3b13959b7f6df29650bd42d4526c4c3f55f297e5df\": rpc error: code = NotFound desc = could not find container \"c4ec83a5a4ed7f6b69650c3b13959b7f6df29650bd42d4526c4c3f55f297e5df\": container with ID starting with c4ec83a5a4ed7f6b69650c3b13959b7f6df29650bd42d4526c4c3f55f297e5df not found: ID does not exist" Jan 20 17:49:09 crc kubenswrapper[4558]: I0120 17:49:09.170338 4558 scope.go:117] "RemoveContainer" containerID="3026297984a29af622bb8012206ae6219c884afff329a8fb7fc9b3b4a900461d" Jan 20 17:49:09 crc kubenswrapper[4558]: E0120 17:49:09.170644 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3026297984a29af622bb8012206ae6219c884afff329a8fb7fc9b3b4a900461d\": container with ID starting with 3026297984a29af622bb8012206ae6219c884afff329a8fb7fc9b3b4a900461d not found: ID does not exist" containerID="3026297984a29af622bb8012206ae6219c884afff329a8fb7fc9b3b4a900461d" Jan 20 17:49:09 crc kubenswrapper[4558]: I0120 17:49:09.170706 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3026297984a29af622bb8012206ae6219c884afff329a8fb7fc9b3b4a900461d"} err="failed to get container status \"3026297984a29af622bb8012206ae6219c884afff329a8fb7fc9b3b4a900461d\": rpc error: code = NotFound desc = could not find container \"3026297984a29af622bb8012206ae6219c884afff329a8fb7fc9b3b4a900461d\": container with ID starting with 3026297984a29af622bb8012206ae6219c884afff329a8fb7fc9b3b4a900461d not found: ID does not exist" Jan 20 17:49:09 crc kubenswrapper[4558]: I0120 17:49:09.170736 4558 scope.go:117] "RemoveContainer" containerID="b9aa397b914a20fcf309884840c2ce5fe7964e54da091fb0c04e24c43ff8302c" Jan 20 17:49:09 crc kubenswrapper[4558]: I0120 17:49:09.223917 4558 scope.go:117] "RemoveContainer" containerID="10350104f9f524bee4eacbb5f2a0be968b6dcc91c34a07012b9937e75daadec2" Jan 20 17:49:09 crc kubenswrapper[4558]: I0120 17:49:09.228177 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:49:09 crc kubenswrapper[4558]: I0120 17:49:09.242642 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:49:09 crc kubenswrapper[4558]: I0120 17:49:09.267210 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:49:09 crc kubenswrapper[4558]: I0120 17:49:09.276492 4558 scope.go:117] "RemoveContainer" containerID="b9aa397b914a20fcf309884840c2ce5fe7964e54da091fb0c04e24c43ff8302c" Jan 20 17:49:09 crc kubenswrapper[4558]: E0120 17:49:09.277647 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b9aa397b914a20fcf309884840c2ce5fe7964e54da091fb0c04e24c43ff8302c\": container with ID starting with b9aa397b914a20fcf309884840c2ce5fe7964e54da091fb0c04e24c43ff8302c not found: ID does not exist" containerID="b9aa397b914a20fcf309884840c2ce5fe7964e54da091fb0c04e24c43ff8302c" Jan 20 17:49:09 crc kubenswrapper[4558]: I0120 17:49:09.277730 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b9aa397b914a20fcf309884840c2ce5fe7964e54da091fb0c04e24c43ff8302c"} err="failed to get container status \"b9aa397b914a20fcf309884840c2ce5fe7964e54da091fb0c04e24c43ff8302c\": rpc error: code = NotFound desc = could not find container \"b9aa397b914a20fcf309884840c2ce5fe7964e54da091fb0c04e24c43ff8302c\": container with ID starting with b9aa397b914a20fcf309884840c2ce5fe7964e54da091fb0c04e24c43ff8302c not found: ID does not exist" Jan 20 17:49:09 crc kubenswrapper[4558]: I0120 17:49:09.277786 4558 scope.go:117] "RemoveContainer" containerID="10350104f9f524bee4eacbb5f2a0be968b6dcc91c34a07012b9937e75daadec2" Jan 20 17:49:09 crc kubenswrapper[4558]: E0120 17:49:09.278595 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"10350104f9f524bee4eacbb5f2a0be968b6dcc91c34a07012b9937e75daadec2\": container with ID starting with 10350104f9f524bee4eacbb5f2a0be968b6dcc91c34a07012b9937e75daadec2 not found: ID does not exist" containerID="10350104f9f524bee4eacbb5f2a0be968b6dcc91c34a07012b9937e75daadec2" Jan 20 17:49:09 crc kubenswrapper[4558]: I0120 17:49:09.278641 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"10350104f9f524bee4eacbb5f2a0be968b6dcc91c34a07012b9937e75daadec2"} err="failed to get container status \"10350104f9f524bee4eacbb5f2a0be968b6dcc91c34a07012b9937e75daadec2\": rpc error: code = NotFound desc = could not find container \"10350104f9f524bee4eacbb5f2a0be968b6dcc91c34a07012b9937e75daadec2\": container with ID starting with 10350104f9f524bee4eacbb5f2a0be968b6dcc91c34a07012b9937e75daadec2 not found: ID does not exist" Jan 20 17:49:09 crc kubenswrapper[4558]: I0120 17:49:09.282490 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:49:09 crc kubenswrapper[4558]: E0120 17:49:09.283358 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a508c4d0-5049-47c6-a7f9-1ba399ad7fc2" containerName="nova-metadata-metadata" Jan 20 17:49:09 crc kubenswrapper[4558]: I0120 17:49:09.283380 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="a508c4d0-5049-47c6-a7f9-1ba399ad7fc2" containerName="nova-metadata-metadata" Jan 20 17:49:09 crc kubenswrapper[4558]: E0120 17:49:09.283401 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="05f17eb0-d7a8-4f7d-bf24-34e75e0e6a24" containerName="nova-api-api" Jan 20 17:49:09 crc kubenswrapper[4558]: I0120 17:49:09.283408 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="05f17eb0-d7a8-4f7d-bf24-34e75e0e6a24" containerName="nova-api-api" Jan 20 17:49:09 crc kubenswrapper[4558]: E0120 17:49:09.283448 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="05f17eb0-d7a8-4f7d-bf24-34e75e0e6a24" containerName="nova-api-log" Jan 20 17:49:09 crc kubenswrapper[4558]: I0120 17:49:09.283454 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="05f17eb0-d7a8-4f7d-bf24-34e75e0e6a24" containerName="nova-api-log" Jan 20 17:49:09 crc kubenswrapper[4558]: E0120 17:49:09.283466 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a508c4d0-5049-47c6-a7f9-1ba399ad7fc2" containerName="nova-metadata-log" Jan 20 17:49:09 crc kubenswrapper[4558]: I0120 17:49:09.283472 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="a508c4d0-5049-47c6-a7f9-1ba399ad7fc2" containerName="nova-metadata-log" Jan 20 17:49:09 crc kubenswrapper[4558]: I0120 17:49:09.283687 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="a508c4d0-5049-47c6-a7f9-1ba399ad7fc2" containerName="nova-metadata-metadata" Jan 20 17:49:09 crc kubenswrapper[4558]: I0120 17:49:09.283718 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="05f17eb0-d7a8-4f7d-bf24-34e75e0e6a24" containerName="nova-api-api" Jan 20 17:49:09 crc kubenswrapper[4558]: I0120 17:49:09.283732 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="a508c4d0-5049-47c6-a7f9-1ba399ad7fc2" containerName="nova-metadata-log" Jan 20 17:49:09 crc kubenswrapper[4558]: I0120 17:49:09.283746 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="05f17eb0-d7a8-4f7d-bf24-34e75e0e6a24" containerName="nova-api-log" Jan 20 17:49:09 crc kubenswrapper[4558]: I0120 17:49:09.284931 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:49:09 crc kubenswrapper[4558]: I0120 17:49:09.287193 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-metadata-config-data" Jan 20 17:49:09 crc kubenswrapper[4558]: I0120 17:49:09.287259 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-metadata-internal-svc" Jan 20 17:49:09 crc kubenswrapper[4558]: I0120 17:49:09.290898 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:49:09 crc kubenswrapper[4558]: I0120 17:49:09.298767 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:49:09 crc kubenswrapper[4558]: I0120 17:49:09.300549 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:49:09 crc kubenswrapper[4558]: I0120 17:49:09.304381 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-api-config-data" Jan 20 17:49:09 crc kubenswrapper[4558]: I0120 17:49:09.304838 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:49:09 crc kubenswrapper[4558]: I0120 17:49:09.353279 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/7d2f1782-aa1c-4382-b682-27ce8f37d139-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"7d2f1782-aa1c-4382-b682-27ce8f37d139\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:49:09 crc kubenswrapper[4558]: I0120 17:49:09.353486 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/49faf171-8f46-4aed-add3-d40e6a82fc9c-config-data\") pod \"nova-api-0\" (UID: \"49faf171-8f46-4aed-add3-d40e6a82fc9c\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:49:09 crc kubenswrapper[4558]: I0120 17:49:09.353636 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7d2f1782-aa1c-4382-b682-27ce8f37d139-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"7d2f1782-aa1c-4382-b682-27ce8f37d139\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:49:09 crc kubenswrapper[4558]: I0120 17:49:09.353741 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7d2f1782-aa1c-4382-b682-27ce8f37d139-config-data\") pod \"nova-metadata-0\" (UID: \"7d2f1782-aa1c-4382-b682-27ce8f37d139\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:49:09 crc kubenswrapper[4558]: I0120 17:49:09.353823 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-65v4s\" (UniqueName: \"kubernetes.io/projected/49faf171-8f46-4aed-add3-d40e6a82fc9c-kube-api-access-65v4s\") pod \"nova-api-0\" (UID: \"49faf171-8f46-4aed-add3-d40e6a82fc9c\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:49:09 crc kubenswrapper[4558]: I0120 17:49:09.354021 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/49faf171-8f46-4aed-add3-d40e6a82fc9c-logs\") pod \"nova-api-0\" (UID: \"49faf171-8f46-4aed-add3-d40e6a82fc9c\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:49:09 crc kubenswrapper[4558]: I0120 17:49:09.354216 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7d2f1782-aa1c-4382-b682-27ce8f37d139-logs\") pod \"nova-metadata-0\" (UID: \"7d2f1782-aa1c-4382-b682-27ce8f37d139\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:49:09 crc kubenswrapper[4558]: I0120 17:49:09.354320 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/49faf171-8f46-4aed-add3-d40e6a82fc9c-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"49faf171-8f46-4aed-add3-d40e6a82fc9c\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:49:09 crc kubenswrapper[4558]: I0120 17:49:09.354481 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ds4zt\" (UniqueName: \"kubernetes.io/projected/7d2f1782-aa1c-4382-b682-27ce8f37d139-kube-api-access-ds4zt\") pod \"nova-metadata-0\" (UID: \"7d2f1782-aa1c-4382-b682-27ce8f37d139\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:49:09 crc kubenswrapper[4558]: I0120 17:49:09.455316 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7d2f1782-aa1c-4382-b682-27ce8f37d139-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"7d2f1782-aa1c-4382-b682-27ce8f37d139\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:49:09 crc kubenswrapper[4558]: I0120 17:49:09.455467 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7d2f1782-aa1c-4382-b682-27ce8f37d139-config-data\") pod \"nova-metadata-0\" (UID: \"7d2f1782-aa1c-4382-b682-27ce8f37d139\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:49:09 crc kubenswrapper[4558]: I0120 17:49:09.455573 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-65v4s\" (UniqueName: \"kubernetes.io/projected/49faf171-8f46-4aed-add3-d40e6a82fc9c-kube-api-access-65v4s\") pod \"nova-api-0\" (UID: \"49faf171-8f46-4aed-add3-d40e6a82fc9c\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:49:09 crc kubenswrapper[4558]: I0120 17:49:09.455699 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/49faf171-8f46-4aed-add3-d40e6a82fc9c-logs\") pod \"nova-api-0\" (UID: \"49faf171-8f46-4aed-add3-d40e6a82fc9c\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:49:09 crc kubenswrapper[4558]: I0120 17:49:09.455796 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7d2f1782-aa1c-4382-b682-27ce8f37d139-logs\") pod \"nova-metadata-0\" (UID: \"7d2f1782-aa1c-4382-b682-27ce8f37d139\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:49:09 crc kubenswrapper[4558]: I0120 17:49:09.455871 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/49faf171-8f46-4aed-add3-d40e6a82fc9c-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"49faf171-8f46-4aed-add3-d40e6a82fc9c\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:49:09 crc kubenswrapper[4558]: I0120 17:49:09.455992 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ds4zt\" (UniqueName: \"kubernetes.io/projected/7d2f1782-aa1c-4382-b682-27ce8f37d139-kube-api-access-ds4zt\") pod \"nova-metadata-0\" (UID: \"7d2f1782-aa1c-4382-b682-27ce8f37d139\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:49:09 crc kubenswrapper[4558]: I0120 17:49:09.456087 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/7d2f1782-aa1c-4382-b682-27ce8f37d139-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"7d2f1782-aa1c-4382-b682-27ce8f37d139\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:49:09 crc kubenswrapper[4558]: I0120 17:49:09.456213 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/49faf171-8f46-4aed-add3-d40e6a82fc9c-config-data\") pod \"nova-api-0\" (UID: \"49faf171-8f46-4aed-add3-d40e6a82fc9c\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:49:09 crc kubenswrapper[4558]: I0120 17:49:09.456096 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/49faf171-8f46-4aed-add3-d40e6a82fc9c-logs\") pod \"nova-api-0\" (UID: \"49faf171-8f46-4aed-add3-d40e6a82fc9c\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:49:09 crc kubenswrapper[4558]: I0120 17:49:09.456339 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7d2f1782-aa1c-4382-b682-27ce8f37d139-logs\") pod \"nova-metadata-0\" (UID: \"7d2f1782-aa1c-4382-b682-27ce8f37d139\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:49:09 crc kubenswrapper[4558]: I0120 17:49:09.459118 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7d2f1782-aa1c-4382-b682-27ce8f37d139-config-data\") pod \"nova-metadata-0\" (UID: \"7d2f1782-aa1c-4382-b682-27ce8f37d139\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:49:09 crc kubenswrapper[4558]: I0120 17:49:09.459725 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/49faf171-8f46-4aed-add3-d40e6a82fc9c-config-data\") pod \"nova-api-0\" (UID: \"49faf171-8f46-4aed-add3-d40e6a82fc9c\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:49:09 crc kubenswrapper[4558]: I0120 17:49:09.459970 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/49faf171-8f46-4aed-add3-d40e6a82fc9c-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"49faf171-8f46-4aed-add3-d40e6a82fc9c\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:49:09 crc kubenswrapper[4558]: I0120 17:49:09.468987 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/7d2f1782-aa1c-4382-b682-27ce8f37d139-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"7d2f1782-aa1c-4382-b682-27ce8f37d139\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:49:09 crc kubenswrapper[4558]: I0120 17:49:09.469053 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7d2f1782-aa1c-4382-b682-27ce8f37d139-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"7d2f1782-aa1c-4382-b682-27ce8f37d139\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:49:09 crc kubenswrapper[4558]: I0120 17:49:09.469680 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ds4zt\" (UniqueName: \"kubernetes.io/projected/7d2f1782-aa1c-4382-b682-27ce8f37d139-kube-api-access-ds4zt\") pod \"nova-metadata-0\" (UID: \"7d2f1782-aa1c-4382-b682-27ce8f37d139\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:49:09 crc kubenswrapper[4558]: I0120 17:49:09.469868 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-65v4s\" (UniqueName: \"kubernetes.io/projected/49faf171-8f46-4aed-add3-d40e6a82fc9c-kube-api-access-65v4s\") pod \"nova-api-0\" (UID: \"49faf171-8f46-4aed-add3-d40e6a82fc9c\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:49:09 crc kubenswrapper[4558]: I0120 17:49:09.656462 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:49:09 crc kubenswrapper[4558]: I0120 17:49:09.663113 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:49:10 crc kubenswrapper[4558]: I0120 17:49:10.143346 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:49:10 crc kubenswrapper[4558]: W0120 17:49:10.144128 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod49faf171_8f46_4aed_add3_d40e6a82fc9c.slice/crio-1eeed8c9576d22b8adbd03f8229ef2b3f7917ad17415f6931cf15e1ee1f4f405 WatchSource:0}: Error finding container 1eeed8c9576d22b8adbd03f8229ef2b3f7917ad17415f6931cf15e1ee1f4f405: Status 404 returned error can't find the container with id 1eeed8c9576d22b8adbd03f8229ef2b3f7917ad17415f6931cf15e1ee1f4f405 Jan 20 17:49:10 crc kubenswrapper[4558]: I0120 17:49:10.154439 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:49:10 crc kubenswrapper[4558]: W0120 17:49:10.167832 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7d2f1782_aa1c_4382_b682_27ce8f37d139.slice/crio-6142d5296dca7952e213a3cf92926d0aa17b375a3a069f0621dadfed64ec3b49 WatchSource:0}: Error finding container 6142d5296dca7952e213a3cf92926d0aa17b375a3a069f0621dadfed64ec3b49: Status 404 returned error can't find the container with id 6142d5296dca7952e213a3cf92926d0aa17b375a3a069f0621dadfed64ec3b49 Jan 20 17:49:10 crc kubenswrapper[4558]: I0120 17:49:10.580051 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="05f17eb0-d7a8-4f7d-bf24-34e75e0e6a24" path="/var/lib/kubelet/pods/05f17eb0-d7a8-4f7d-bf24-34e75e0e6a24/volumes" Jan 20 17:49:10 crc kubenswrapper[4558]: I0120 17:49:10.581114 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a508c4d0-5049-47c6-a7f9-1ba399ad7fc2" path="/var/lib/kubelet/pods/a508c4d0-5049-47c6-a7f9-1ba399ad7fc2/volumes" Jan 20 17:49:11 crc kubenswrapper[4558]: I0120 17:49:11.142316 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"7d2f1782-aa1c-4382-b682-27ce8f37d139","Type":"ContainerStarted","Data":"fdc46115e0c01656230d526da5789da8cef6a6ad4afd973eaf326c530afd94ab"} Jan 20 17:49:11 crc kubenswrapper[4558]: I0120 17:49:11.142373 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"7d2f1782-aa1c-4382-b682-27ce8f37d139","Type":"ContainerStarted","Data":"c66c2bcafd61422cbe12474d972cdfbe2b8064d7acd5baa2defb02dbaf1a697b"} Jan 20 17:49:11 crc kubenswrapper[4558]: I0120 17:49:11.142386 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"7d2f1782-aa1c-4382-b682-27ce8f37d139","Type":"ContainerStarted","Data":"6142d5296dca7952e213a3cf92926d0aa17b375a3a069f0621dadfed64ec3b49"} Jan 20 17:49:11 crc kubenswrapper[4558]: I0120 17:49:11.145005 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"49faf171-8f46-4aed-add3-d40e6a82fc9c","Type":"ContainerStarted","Data":"dff43300fab1207f3da7e3ddc3226bbde1bbe928933897986eeb0f64e830e380"} Jan 20 17:49:11 crc kubenswrapper[4558]: I0120 17:49:11.145077 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"49faf171-8f46-4aed-add3-d40e6a82fc9c","Type":"ContainerStarted","Data":"6bbfbb092115e67f03abc95423320945a7a59cbc83d760dcdade93b68b67f6f5"} Jan 20 17:49:11 crc kubenswrapper[4558]: I0120 17:49:11.145093 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"49faf171-8f46-4aed-add3-d40e6a82fc9c","Type":"ContainerStarted","Data":"1eeed8c9576d22b8adbd03f8229ef2b3f7917ad17415f6931cf15e1ee1f4f405"} Jan 20 17:49:11 crc kubenswrapper[4558]: I0120 17:49:11.162354 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-metadata-0" podStartSLOduration=2.162331085 podStartE2EDuration="2.162331085s" podCreationTimestamp="2026-01-20 17:49:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:49:11.15768067 +0000 UTC m=+4044.918018626" watchObservedRunningTime="2026-01-20 17:49:11.162331085 +0000 UTC m=+4044.922669051" Jan 20 17:49:11 crc kubenswrapper[4558]: I0120 17:49:11.178032 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-api-0" podStartSLOduration=2.178015748 podStartE2EDuration="2.178015748s" podCreationTimestamp="2026-01-20 17:49:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:49:11.175709391 +0000 UTC m=+4044.936047358" watchObservedRunningTime="2026-01-20 17:49:11.178015748 +0000 UTC m=+4044.938353715" Jan 20 17:49:12 crc kubenswrapper[4558]: I0120 17:49:12.452311 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:49:14 crc kubenswrapper[4558]: I0120 17:49:14.180441 4558 generic.go:334] "Generic (PLEG): container finished" podID="846b8ff4-6a4d-4429-baf0-affe344f2443" containerID="5b002afccf52b36ca6aaeb4952c36ef3a337dc87396bc522ab4018f88bc487d0" exitCode=0 Jan 20 17:49:14 crc kubenswrapper[4558]: I0120 17:49:14.180691 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"846b8ff4-6a4d-4429-baf0-affe344f2443","Type":"ContainerDied","Data":"5b002afccf52b36ca6aaeb4952c36ef3a337dc87396bc522ab4018f88bc487d0"} Jan 20 17:49:14 crc kubenswrapper[4558]: I0120 17:49:14.337628 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:49:14 crc kubenswrapper[4558]: I0120 17:49:14.362037 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/846b8ff4-6a4d-4429-baf0-affe344f2443-run-httpd\") pod \"846b8ff4-6a4d-4429-baf0-affe344f2443\" (UID: \"846b8ff4-6a4d-4429-baf0-affe344f2443\") " Jan 20 17:49:14 crc kubenswrapper[4558]: I0120 17:49:14.362102 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/846b8ff4-6a4d-4429-baf0-affe344f2443-sg-core-conf-yaml\") pod \"846b8ff4-6a4d-4429-baf0-affe344f2443\" (UID: \"846b8ff4-6a4d-4429-baf0-affe344f2443\") " Jan 20 17:49:14 crc kubenswrapper[4558]: I0120 17:49:14.362135 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7bz5r\" (UniqueName: \"kubernetes.io/projected/846b8ff4-6a4d-4429-baf0-affe344f2443-kube-api-access-7bz5r\") pod \"846b8ff4-6a4d-4429-baf0-affe344f2443\" (UID: \"846b8ff4-6a4d-4429-baf0-affe344f2443\") " Jan 20 17:49:14 crc kubenswrapper[4558]: I0120 17:49:14.362204 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/846b8ff4-6a4d-4429-baf0-affe344f2443-log-httpd\") pod \"846b8ff4-6a4d-4429-baf0-affe344f2443\" (UID: \"846b8ff4-6a4d-4429-baf0-affe344f2443\") " Jan 20 17:49:14 crc kubenswrapper[4558]: I0120 17:49:14.362247 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/846b8ff4-6a4d-4429-baf0-affe344f2443-combined-ca-bundle\") pod \"846b8ff4-6a4d-4429-baf0-affe344f2443\" (UID: \"846b8ff4-6a4d-4429-baf0-affe344f2443\") " Jan 20 17:49:14 crc kubenswrapper[4558]: I0120 17:49:14.362642 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/846b8ff4-6a4d-4429-baf0-affe344f2443-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "846b8ff4-6a4d-4429-baf0-affe344f2443" (UID: "846b8ff4-6a4d-4429-baf0-affe344f2443"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:49:14 crc kubenswrapper[4558]: I0120 17:49:14.362736 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/846b8ff4-6a4d-4429-baf0-affe344f2443-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "846b8ff4-6a4d-4429-baf0-affe344f2443" (UID: "846b8ff4-6a4d-4429-baf0-affe344f2443"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:49:14 crc kubenswrapper[4558]: I0120 17:49:14.362289 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/846b8ff4-6a4d-4429-baf0-affe344f2443-scripts\") pod \"846b8ff4-6a4d-4429-baf0-affe344f2443\" (UID: \"846b8ff4-6a4d-4429-baf0-affe344f2443\") " Jan 20 17:49:14 crc kubenswrapper[4558]: I0120 17:49:14.362944 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/846b8ff4-6a4d-4429-baf0-affe344f2443-config-data\") pod \"846b8ff4-6a4d-4429-baf0-affe344f2443\" (UID: \"846b8ff4-6a4d-4429-baf0-affe344f2443\") " Jan 20 17:49:14 crc kubenswrapper[4558]: I0120 17:49:14.363251 4558 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/846b8ff4-6a4d-4429-baf0-affe344f2443-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:49:14 crc kubenswrapper[4558]: I0120 17:49:14.363269 4558 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/846b8ff4-6a4d-4429-baf0-affe344f2443-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:49:14 crc kubenswrapper[4558]: I0120 17:49:14.369319 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/846b8ff4-6a4d-4429-baf0-affe344f2443-scripts" (OuterVolumeSpecName: "scripts") pod "846b8ff4-6a4d-4429-baf0-affe344f2443" (UID: "846b8ff4-6a4d-4429-baf0-affe344f2443"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:49:14 crc kubenswrapper[4558]: I0120 17:49:14.369357 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/846b8ff4-6a4d-4429-baf0-affe344f2443-kube-api-access-7bz5r" (OuterVolumeSpecName: "kube-api-access-7bz5r") pod "846b8ff4-6a4d-4429-baf0-affe344f2443" (UID: "846b8ff4-6a4d-4429-baf0-affe344f2443"). InnerVolumeSpecName "kube-api-access-7bz5r". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:49:14 crc kubenswrapper[4558]: I0120 17:49:14.395119 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/846b8ff4-6a4d-4429-baf0-affe344f2443-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "846b8ff4-6a4d-4429-baf0-affe344f2443" (UID: "846b8ff4-6a4d-4429-baf0-affe344f2443"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:49:14 crc kubenswrapper[4558]: I0120 17:49:14.426662 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/846b8ff4-6a4d-4429-baf0-affe344f2443-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "846b8ff4-6a4d-4429-baf0-affe344f2443" (UID: "846b8ff4-6a4d-4429-baf0-affe344f2443"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:49:14 crc kubenswrapper[4558]: I0120 17:49:14.435360 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/846b8ff4-6a4d-4429-baf0-affe344f2443-config-data" (OuterVolumeSpecName: "config-data") pod "846b8ff4-6a4d-4429-baf0-affe344f2443" (UID: "846b8ff4-6a4d-4429-baf0-affe344f2443"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:49:14 crc kubenswrapper[4558]: I0120 17:49:14.467586 4558 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/846b8ff4-6a4d-4429-baf0-affe344f2443-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 20 17:49:14 crc kubenswrapper[4558]: I0120 17:49:14.467620 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7bz5r\" (UniqueName: \"kubernetes.io/projected/846b8ff4-6a4d-4429-baf0-affe344f2443-kube-api-access-7bz5r\") on node \"crc\" DevicePath \"\"" Jan 20 17:49:14 crc kubenswrapper[4558]: I0120 17:49:14.467633 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/846b8ff4-6a4d-4429-baf0-affe344f2443-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:49:14 crc kubenswrapper[4558]: I0120 17:49:14.467646 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/846b8ff4-6a4d-4429-baf0-affe344f2443-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:49:14 crc kubenswrapper[4558]: I0120 17:49:14.467654 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/846b8ff4-6a4d-4429-baf0-affe344f2443-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:49:14 crc kubenswrapper[4558]: I0120 17:49:14.657157 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:49:14 crc kubenswrapper[4558]: I0120 17:49:14.657221 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:49:15 crc kubenswrapper[4558]: I0120 17:49:15.191724 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"846b8ff4-6a4d-4429-baf0-affe344f2443","Type":"ContainerDied","Data":"197285248b972a8e1cbf847911ca4cfe00f863c2018dc00c80b24115b79ca5be"} Jan 20 17:49:15 crc kubenswrapper[4558]: I0120 17:49:15.192039 4558 scope.go:117] "RemoveContainer" containerID="01900246a5a0ba025c5a871a5361756e9505243d35ab1589a6d16c801bc7454a" Jan 20 17:49:15 crc kubenswrapper[4558]: I0120 17:49:15.191806 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:49:15 crc kubenswrapper[4558]: I0120 17:49:15.217424 4558 scope.go:117] "RemoveContainer" containerID="2fa9e5be032d97b5b704227d73210d4f13c21adfffb77cabee05509d24e7c834" Jan 20 17:49:15 crc kubenswrapper[4558]: I0120 17:49:15.223443 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:49:15 crc kubenswrapper[4558]: I0120 17:49:15.244327 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:49:15 crc kubenswrapper[4558]: I0120 17:49:15.259210 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:49:15 crc kubenswrapper[4558]: E0120 17:49:15.259642 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="846b8ff4-6a4d-4429-baf0-affe344f2443" containerName="proxy-httpd" Jan 20 17:49:15 crc kubenswrapper[4558]: I0120 17:49:15.259660 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="846b8ff4-6a4d-4429-baf0-affe344f2443" containerName="proxy-httpd" Jan 20 17:49:15 crc kubenswrapper[4558]: E0120 17:49:15.259700 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="846b8ff4-6a4d-4429-baf0-affe344f2443" containerName="ceilometer-notification-agent" Jan 20 17:49:15 crc kubenswrapper[4558]: I0120 17:49:15.259706 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="846b8ff4-6a4d-4429-baf0-affe344f2443" containerName="ceilometer-notification-agent" Jan 20 17:49:15 crc kubenswrapper[4558]: E0120 17:49:15.259717 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="846b8ff4-6a4d-4429-baf0-affe344f2443" containerName="ceilometer-central-agent" Jan 20 17:49:15 crc kubenswrapper[4558]: I0120 17:49:15.259723 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="846b8ff4-6a4d-4429-baf0-affe344f2443" containerName="ceilometer-central-agent" Jan 20 17:49:15 crc kubenswrapper[4558]: E0120 17:49:15.259744 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="846b8ff4-6a4d-4429-baf0-affe344f2443" containerName="sg-core" Jan 20 17:49:15 crc kubenswrapper[4558]: I0120 17:49:15.259750 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="846b8ff4-6a4d-4429-baf0-affe344f2443" containerName="sg-core" Jan 20 17:49:15 crc kubenswrapper[4558]: I0120 17:49:15.259934 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="846b8ff4-6a4d-4429-baf0-affe344f2443" containerName="proxy-httpd" Jan 20 17:49:15 crc kubenswrapper[4558]: I0120 17:49:15.259943 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="846b8ff4-6a4d-4429-baf0-affe344f2443" containerName="sg-core" Jan 20 17:49:15 crc kubenswrapper[4558]: I0120 17:49:15.259953 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="846b8ff4-6a4d-4429-baf0-affe344f2443" containerName="ceilometer-notification-agent" Jan 20 17:49:15 crc kubenswrapper[4558]: I0120 17:49:15.259962 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="846b8ff4-6a4d-4429-baf0-affe344f2443" containerName="ceilometer-central-agent" Jan 20 17:49:15 crc kubenswrapper[4558]: I0120 17:49:15.261502 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:49:15 crc kubenswrapper[4558]: I0120 17:49:15.265630 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-ceilometer-internal-svc" Jan 20 17:49:15 crc kubenswrapper[4558]: I0120 17:49:15.266263 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-config-data" Jan 20 17:49:15 crc kubenswrapper[4558]: I0120 17:49:15.266509 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-scripts" Jan 20 17:49:15 crc kubenswrapper[4558]: I0120 17:49:15.277414 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:49:15 crc kubenswrapper[4558]: I0120 17:49:15.289311 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/268d5f08-af8d-499f-876d-30ba10fc3ca4-scripts\") pod \"ceilometer-0\" (UID: \"268d5f08-af8d-499f-876d-30ba10fc3ca4\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:49:15 crc kubenswrapper[4558]: I0120 17:49:15.289359 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/268d5f08-af8d-499f-876d-30ba10fc3ca4-log-httpd\") pod \"ceilometer-0\" (UID: \"268d5f08-af8d-499f-876d-30ba10fc3ca4\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:49:15 crc kubenswrapper[4558]: I0120 17:49:15.289384 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/268d5f08-af8d-499f-876d-30ba10fc3ca4-config-data\") pod \"ceilometer-0\" (UID: \"268d5f08-af8d-499f-876d-30ba10fc3ca4\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:49:15 crc kubenswrapper[4558]: I0120 17:49:15.289408 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/268d5f08-af8d-499f-876d-30ba10fc3ca4-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"268d5f08-af8d-499f-876d-30ba10fc3ca4\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:49:15 crc kubenswrapper[4558]: I0120 17:49:15.289439 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gk7ts\" (UniqueName: \"kubernetes.io/projected/268d5f08-af8d-499f-876d-30ba10fc3ca4-kube-api-access-gk7ts\") pod \"ceilometer-0\" (UID: \"268d5f08-af8d-499f-876d-30ba10fc3ca4\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:49:15 crc kubenswrapper[4558]: I0120 17:49:15.289458 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/268d5f08-af8d-499f-876d-30ba10fc3ca4-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"268d5f08-af8d-499f-876d-30ba10fc3ca4\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:49:15 crc kubenswrapper[4558]: I0120 17:49:15.289523 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/268d5f08-af8d-499f-876d-30ba10fc3ca4-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"268d5f08-af8d-499f-876d-30ba10fc3ca4\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:49:15 crc kubenswrapper[4558]: I0120 17:49:15.289545 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/268d5f08-af8d-499f-876d-30ba10fc3ca4-run-httpd\") pod \"ceilometer-0\" (UID: \"268d5f08-af8d-499f-876d-30ba10fc3ca4\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:49:15 crc kubenswrapper[4558]: I0120 17:49:15.294776 4558 scope.go:117] "RemoveContainer" containerID="5b002afccf52b36ca6aaeb4952c36ef3a337dc87396bc522ab4018f88bc487d0" Jan 20 17:49:15 crc kubenswrapper[4558]: I0120 17:49:15.366378 4558 scope.go:117] "RemoveContainer" containerID="a0200376b46aaa5f9285cd9453a248f2b1abe657063801fbd9128f361cc1773b" Jan 20 17:49:15 crc kubenswrapper[4558]: I0120 17:49:15.400350 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/268d5f08-af8d-499f-876d-30ba10fc3ca4-scripts\") pod \"ceilometer-0\" (UID: \"268d5f08-af8d-499f-876d-30ba10fc3ca4\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:49:15 crc kubenswrapper[4558]: I0120 17:49:15.400395 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/268d5f08-af8d-499f-876d-30ba10fc3ca4-log-httpd\") pod \"ceilometer-0\" (UID: \"268d5f08-af8d-499f-876d-30ba10fc3ca4\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:49:15 crc kubenswrapper[4558]: I0120 17:49:15.400426 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/268d5f08-af8d-499f-876d-30ba10fc3ca4-config-data\") pod \"ceilometer-0\" (UID: \"268d5f08-af8d-499f-876d-30ba10fc3ca4\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:49:15 crc kubenswrapper[4558]: I0120 17:49:15.400454 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/268d5f08-af8d-499f-876d-30ba10fc3ca4-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"268d5f08-af8d-499f-876d-30ba10fc3ca4\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:49:15 crc kubenswrapper[4558]: I0120 17:49:15.400485 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gk7ts\" (UniqueName: \"kubernetes.io/projected/268d5f08-af8d-499f-876d-30ba10fc3ca4-kube-api-access-gk7ts\") pod \"ceilometer-0\" (UID: \"268d5f08-af8d-499f-876d-30ba10fc3ca4\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:49:15 crc kubenswrapper[4558]: I0120 17:49:15.400506 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/268d5f08-af8d-499f-876d-30ba10fc3ca4-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"268d5f08-af8d-499f-876d-30ba10fc3ca4\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:49:15 crc kubenswrapper[4558]: I0120 17:49:15.400572 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/268d5f08-af8d-499f-876d-30ba10fc3ca4-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"268d5f08-af8d-499f-876d-30ba10fc3ca4\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:49:15 crc kubenswrapper[4558]: I0120 17:49:15.400597 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/268d5f08-af8d-499f-876d-30ba10fc3ca4-run-httpd\") pod \"ceilometer-0\" (UID: \"268d5f08-af8d-499f-876d-30ba10fc3ca4\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:49:15 crc kubenswrapper[4558]: I0120 17:49:15.401002 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/268d5f08-af8d-499f-876d-30ba10fc3ca4-log-httpd\") pod \"ceilometer-0\" (UID: \"268d5f08-af8d-499f-876d-30ba10fc3ca4\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:49:15 crc kubenswrapper[4558]: I0120 17:49:15.401240 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/268d5f08-af8d-499f-876d-30ba10fc3ca4-run-httpd\") pod \"ceilometer-0\" (UID: \"268d5f08-af8d-499f-876d-30ba10fc3ca4\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:49:15 crc kubenswrapper[4558]: I0120 17:49:15.407924 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/268d5f08-af8d-499f-876d-30ba10fc3ca4-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"268d5f08-af8d-499f-876d-30ba10fc3ca4\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:49:15 crc kubenswrapper[4558]: I0120 17:49:15.407937 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/268d5f08-af8d-499f-876d-30ba10fc3ca4-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"268d5f08-af8d-499f-876d-30ba10fc3ca4\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:49:15 crc kubenswrapper[4558]: I0120 17:49:15.408010 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/268d5f08-af8d-499f-876d-30ba10fc3ca4-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"268d5f08-af8d-499f-876d-30ba10fc3ca4\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:49:15 crc kubenswrapper[4558]: I0120 17:49:15.408235 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/268d5f08-af8d-499f-876d-30ba10fc3ca4-config-data\") pod \"ceilometer-0\" (UID: \"268d5f08-af8d-499f-876d-30ba10fc3ca4\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:49:15 crc kubenswrapper[4558]: I0120 17:49:15.408485 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/268d5f08-af8d-499f-876d-30ba10fc3ca4-scripts\") pod \"ceilometer-0\" (UID: \"268d5f08-af8d-499f-876d-30ba10fc3ca4\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:49:15 crc kubenswrapper[4558]: I0120 17:49:15.417634 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gk7ts\" (UniqueName: \"kubernetes.io/projected/268d5f08-af8d-499f-876d-30ba10fc3ca4-kube-api-access-gk7ts\") pod \"ceilometer-0\" (UID: \"268d5f08-af8d-499f-876d-30ba10fc3ca4\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:49:15 crc kubenswrapper[4558]: I0120 17:49:15.615412 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:49:16 crc kubenswrapper[4558]: I0120 17:49:16.060565 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:49:16 crc kubenswrapper[4558]: I0120 17:49:16.204357 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"268d5f08-af8d-499f-876d-30ba10fc3ca4","Type":"ContainerStarted","Data":"81859de4cdee33ed40af81e32910bd93dd0bc705939f1f4a3b38546910d2dfc0"} Jan 20 17:49:16 crc kubenswrapper[4558]: I0120 17:49:16.478861 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:49:16 crc kubenswrapper[4558]: I0120 17:49:16.576082 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="846b8ff4-6a4d-4429-baf0-affe344f2443" path="/var/lib/kubelet/pods/846b8ff4-6a4d-4429-baf0-affe344f2443/volumes" Jan 20 17:49:17 crc kubenswrapper[4558]: I0120 17:49:17.219606 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"268d5f08-af8d-499f-876d-30ba10fc3ca4","Type":"ContainerStarted","Data":"02dbf529236fac41f9a81e907bc058ec8f3295e7c1432a48daef15c61d725167"} Jan 20 17:49:17 crc kubenswrapper[4558]: I0120 17:49:17.453116 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:49:17 crc kubenswrapper[4558]: I0120 17:49:17.490004 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:49:18 crc kubenswrapper[4558]: I0120 17:49:18.229719 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"268d5f08-af8d-499f-876d-30ba10fc3ca4","Type":"ContainerStarted","Data":"e43bc8076015c3b514dfab6161a7d478e55d2f4a95ee199cf00a65a264d3857a"} Jan 20 17:49:18 crc kubenswrapper[4558]: I0120 17:49:18.253845 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:49:19 crc kubenswrapper[4558]: I0120 17:49:19.244261 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"268d5f08-af8d-499f-876d-30ba10fc3ca4","Type":"ContainerStarted","Data":"24a881ca3e01d2f799588052249d88bd52f8a9f62c4ccf2c919e3f0f4b2313e9"} Jan 20 17:49:19 crc kubenswrapper[4558]: I0120 17:49:19.656666 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:49:19 crc kubenswrapper[4558]: I0120 17:49:19.656706 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:49:19 crc kubenswrapper[4558]: I0120 17:49:19.665255 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:49:19 crc kubenswrapper[4558]: I0120 17:49:19.665301 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:49:20 crc kubenswrapper[4558]: I0120 17:49:20.669390 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-metadata-0" podUID="7d2f1782-aa1c-4382-b682-27ce8f37d139" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.1.174:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 20 17:49:20 crc kubenswrapper[4558]: I0120 17:49:20.669405 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-metadata-0" podUID="7d2f1782-aa1c-4382-b682-27ce8f37d139" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.1.174:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 20 17:49:20 crc kubenswrapper[4558]: I0120 17:49:20.752331 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-api-0" podUID="49faf171-8f46-4aed-add3-d40e6a82fc9c" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.1.175:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 20 17:49:20 crc kubenswrapper[4558]: I0120 17:49:20.752341 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-api-0" podUID="49faf171-8f46-4aed-add3-d40e6a82fc9c" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.1.175:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 20 17:49:21 crc kubenswrapper[4558]: I0120 17:49:21.280109 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"268d5f08-af8d-499f-876d-30ba10fc3ca4","Type":"ContainerStarted","Data":"640505093f304a18cd4f0f3e84a25ae60ed3656a2f35790accd480eca705cff6"} Jan 20 17:49:21 crc kubenswrapper[4558]: I0120 17:49:21.280981 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:49:21 crc kubenswrapper[4558]: I0120 17:49:21.314477 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ceilometer-0" podStartSLOduration=2.298727131 podStartE2EDuration="6.314462776s" podCreationTimestamp="2026-01-20 17:49:15 +0000 UTC" firstStartedPulling="2026-01-20 17:49:16.060099895 +0000 UTC m=+4049.820437862" lastFinishedPulling="2026-01-20 17:49:20.07583554 +0000 UTC m=+4053.836173507" observedRunningTime="2026-01-20 17:49:21.31188053 +0000 UTC m=+4055.072218497" watchObservedRunningTime="2026-01-20 17:49:21.314462776 +0000 UTC m=+4055.074800744" Jan 20 17:49:23 crc kubenswrapper[4558]: I0120 17:49:23.571932 4558 scope.go:117] "RemoveContainer" containerID="f073a0f7710dadab5f69a3b61d534dd16273339ca1f705b805b71959affdaedb" Jan 20 17:49:23 crc kubenswrapper[4558]: I0120 17:49:23.594374 4558 scope.go:117] "RemoveContainer" containerID="5c3fbeabe3ae0665acc4caf4f15bfaffd0a0fffe254192c44ae1df936c59f917" Jan 20 17:49:23 crc kubenswrapper[4558]: I0120 17:49:23.611307 4558 scope.go:117] "RemoveContainer" containerID="3bca3c440d169db0b20179eec25960ffaa1be1f116d15347d51dd6ac49e4557f" Jan 20 17:49:23 crc kubenswrapper[4558]: I0120 17:49:23.635114 4558 scope.go:117] "RemoveContainer" containerID="1abb2f52a1084fb284622dfe078fa9cc2613944319be12588b9b290276f35a83" Jan 20 17:49:23 crc kubenswrapper[4558]: I0120 17:49:23.649587 4558 scope.go:117] "RemoveContainer" containerID="dcf9b9124b71d8258a72a33535b49292f6fb9585d02e200aa73d63e0ff9b6a33" Jan 20 17:49:23 crc kubenswrapper[4558]: I0120 17:49:23.666533 4558 scope.go:117] "RemoveContainer" containerID="2238e5e4c9121a6f35ac99b7eefaed26a7b72dd8f5d91d2775ab73bb36849ee2" Jan 20 17:49:23 crc kubenswrapper[4558]: I0120 17:49:23.707123 4558 scope.go:117] "RemoveContainer" containerID="d856400d868c5a18aeb6d59839565030a9d6d928c1563362781a0530b884b4e7" Jan 20 17:49:23 crc kubenswrapper[4558]: I0120 17:49:23.741121 4558 scope.go:117] "RemoveContainer" containerID="b225f9e53aaf9806843cb0212349c3e9e4362df912776534664c2dac9cdadb6b" Jan 20 17:49:23 crc kubenswrapper[4558]: I0120 17:49:23.771079 4558 scope.go:117] "RemoveContainer" containerID="49eac06a5bbf59d10a57d3ba182a98399ea7f8d86e783046aa38fe5883748c5e" Jan 20 17:49:23 crc kubenswrapper[4558]: I0120 17:49:23.804744 4558 scope.go:117] "RemoveContainer" containerID="335e25f3fbe4b65eb8180e118c21059bde36ef7ba7c2a18579f83e6691932e37" Jan 20 17:49:23 crc kubenswrapper[4558]: I0120 17:49:23.823963 4558 scope.go:117] "RemoveContainer" containerID="45124db692168b507b34896779f420550426a3011e2b00b8912ca2208f58799e" Jan 20 17:49:23 crc kubenswrapper[4558]: I0120 17:49:23.840677 4558 scope.go:117] "RemoveContainer" containerID="4a06fdd89fd78aff5dd0753c417b307f7a680d4b1887883dc3eccee2ed84c9ec" Jan 20 17:49:23 crc kubenswrapper[4558]: I0120 17:49:23.860713 4558 scope.go:117] "RemoveContainer" containerID="a03dfb5ac0209a3bc1e4b1831999713e3583bebd07521f3e1596b419cc04ea21" Jan 20 17:49:23 crc kubenswrapper[4558]: I0120 17:49:23.887240 4558 scope.go:117] "RemoveContainer" containerID="4dd20571883b253e3f573e46ad0ecb50522ba2e589eece64ceebece4a1df813e" Jan 20 17:49:23 crc kubenswrapper[4558]: I0120 17:49:23.907780 4558 scope.go:117] "RemoveContainer" containerID="baba51653eb144a4334b8651e3e9970d529abaea40101e2e03249e068d4b3b9b" Jan 20 17:49:23 crc kubenswrapper[4558]: I0120 17:49:23.927474 4558 scope.go:117] "RemoveContainer" containerID="a50b4edbabca8fd422d0afd87910c0ef1f309e8061f08ee60f8118612b4fe104" Jan 20 17:49:23 crc kubenswrapper[4558]: I0120 17:49:23.946554 4558 scope.go:117] "RemoveContainer" containerID="25d54c191dc33c7fed19acdcd580fbd43bb913261f66f192926427656b325ef2" Jan 20 17:49:23 crc kubenswrapper[4558]: I0120 17:49:23.968419 4558 scope.go:117] "RemoveContainer" containerID="dceb063becafd3ab16d08d70734f823d57aed693a7d29459757378db7e430997" Jan 20 17:49:24 crc kubenswrapper[4558]: I0120 17:49:24.000391 4558 scope.go:117] "RemoveContainer" containerID="d6a76d69efdb8d9de2921accc3eae11c4dd079ceb576dcf301178d78b9a86d25" Jan 20 17:49:24 crc kubenswrapper[4558]: I0120 17:49:24.018909 4558 scope.go:117] "RemoveContainer" containerID="a154b825c6001b834fad5ef3b1acf5a64fc98844395614a8310c4485a3d01a62" Jan 20 17:49:24 crc kubenswrapper[4558]: I0120 17:49:24.036378 4558 scope.go:117] "RemoveContainer" containerID="1ef3a181c7498e05935b57f64f9e58a17d10768de064a519dd385d5add3525db" Jan 20 17:49:29 crc kubenswrapper[4558]: I0120 17:49:29.668331 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:49:29 crc kubenswrapper[4558]: I0120 17:49:29.669071 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:49:29 crc kubenswrapper[4558]: I0120 17:49:29.669114 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:49:29 crc kubenswrapper[4558]: I0120 17:49:29.670588 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:49:29 crc kubenswrapper[4558]: I0120 17:49:29.671892 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:49:29 crc kubenswrapper[4558]: I0120 17:49:29.675050 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:49:29 crc kubenswrapper[4558]: I0120 17:49:29.676136 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:49:29 crc kubenswrapper[4558]: I0120 17:49:29.678717 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:49:30 crc kubenswrapper[4558]: I0120 17:49:30.374760 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:49:30 crc kubenswrapper[4558]: I0120 17:49:30.379265 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:49:32 crc kubenswrapper[4558]: I0120 17:49:32.245369 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:49:32 crc kubenswrapper[4558]: I0120 17:49:32.246126 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="268d5f08-af8d-499f-876d-30ba10fc3ca4" containerName="ceilometer-central-agent" containerID="cri-o://02dbf529236fac41f9a81e907bc058ec8f3295e7c1432a48daef15c61d725167" gracePeriod=30 Jan 20 17:49:32 crc kubenswrapper[4558]: I0120 17:49:32.246475 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="268d5f08-af8d-499f-876d-30ba10fc3ca4" containerName="proxy-httpd" containerID="cri-o://640505093f304a18cd4f0f3e84a25ae60ed3656a2f35790accd480eca705cff6" gracePeriod=30 Jan 20 17:49:32 crc kubenswrapper[4558]: I0120 17:49:32.246546 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="268d5f08-af8d-499f-876d-30ba10fc3ca4" containerName="ceilometer-notification-agent" containerID="cri-o://e43bc8076015c3b514dfab6161a7d478e55d2f4a95ee199cf00a65a264d3857a" gracePeriod=30 Jan 20 17:49:32 crc kubenswrapper[4558]: I0120 17:49:32.246620 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="268d5f08-af8d-499f-876d-30ba10fc3ca4" containerName="sg-core" containerID="cri-o://24a881ca3e01d2f799588052249d88bd52f8a9f62c4ccf2c919e3f0f4b2313e9" gracePeriod=30 Jan 20 17:49:32 crc kubenswrapper[4558]: I0120 17:49:32.348186 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/ceilometer-0" podUID="268d5f08-af8d-499f-876d-30ba10fc3ca4" containerName="proxy-httpd" probeResult="failure" output="Get \"https://10.217.1.176:3000/\": read tcp 10.217.0.2:50214->10.217.1.176:3000: read: connection reset by peer" Jan 20 17:49:32 crc kubenswrapper[4558]: I0120 17:49:32.398330 4558 generic.go:334] "Generic (PLEG): container finished" podID="268d5f08-af8d-499f-876d-30ba10fc3ca4" containerID="640505093f304a18cd4f0f3e84a25ae60ed3656a2f35790accd480eca705cff6" exitCode=0 Jan 20 17:49:32 crc kubenswrapper[4558]: I0120 17:49:32.398364 4558 generic.go:334] "Generic (PLEG): container finished" podID="268d5f08-af8d-499f-876d-30ba10fc3ca4" containerID="24a881ca3e01d2f799588052249d88bd52f8a9f62c4ccf2c919e3f0f4b2313e9" exitCode=2 Jan 20 17:49:32 crc kubenswrapper[4558]: I0120 17:49:32.398410 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"268d5f08-af8d-499f-876d-30ba10fc3ca4","Type":"ContainerDied","Data":"640505093f304a18cd4f0f3e84a25ae60ed3656a2f35790accd480eca705cff6"} Jan 20 17:49:32 crc kubenswrapper[4558]: I0120 17:49:32.398460 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"268d5f08-af8d-499f-876d-30ba10fc3ca4","Type":"ContainerDied","Data":"24a881ca3e01d2f799588052249d88bd52f8a9f62c4ccf2c919e3f0f4b2313e9"} Jan 20 17:49:33 crc kubenswrapper[4558]: I0120 17:49:33.103364 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:49:33 crc kubenswrapper[4558]: I0120 17:49:33.407938 4558 generic.go:334] "Generic (PLEG): container finished" podID="268d5f08-af8d-499f-876d-30ba10fc3ca4" containerID="02dbf529236fac41f9a81e907bc058ec8f3295e7c1432a48daef15c61d725167" exitCode=0 Jan 20 17:49:33 crc kubenswrapper[4558]: I0120 17:49:33.408001 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"268d5f08-af8d-499f-876d-30ba10fc3ca4","Type":"ContainerDied","Data":"02dbf529236fac41f9a81e907bc058ec8f3295e7c1432a48daef15c61d725167"} Jan 20 17:49:33 crc kubenswrapper[4558]: I0120 17:49:33.408863 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-api-0" podUID="49faf171-8f46-4aed-add3-d40e6a82fc9c" containerName="nova-api-log" containerID="cri-o://6bbfbb092115e67f03abc95423320945a7a59cbc83d760dcdade93b68b67f6f5" gracePeriod=30 Jan 20 17:49:33 crc kubenswrapper[4558]: I0120 17:49:33.408936 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-api-0" podUID="49faf171-8f46-4aed-add3-d40e6a82fc9c" containerName="nova-api-api" containerID="cri-o://dff43300fab1207f3da7e3ddc3226bbde1bbe928933897986eeb0f64e830e380" gracePeriod=30 Jan 20 17:49:34 crc kubenswrapper[4558]: I0120 17:49:34.421642 4558 generic.go:334] "Generic (PLEG): container finished" podID="49faf171-8f46-4aed-add3-d40e6a82fc9c" containerID="6bbfbb092115e67f03abc95423320945a7a59cbc83d760dcdade93b68b67f6f5" exitCode=143 Jan 20 17:49:34 crc kubenswrapper[4558]: I0120 17:49:34.422069 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"49faf171-8f46-4aed-add3-d40e6a82fc9c","Type":"ContainerDied","Data":"6bbfbb092115e67f03abc95423320945a7a59cbc83d760dcdade93b68b67f6f5"} Jan 20 17:49:36 crc kubenswrapper[4558]: I0120 17:49:36.835726 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:49:36 crc kubenswrapper[4558]: I0120 17:49:36.954864 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/49faf171-8f46-4aed-add3-d40e6a82fc9c-logs\") pod \"49faf171-8f46-4aed-add3-d40e6a82fc9c\" (UID: \"49faf171-8f46-4aed-add3-d40e6a82fc9c\") " Jan 20 17:49:36 crc kubenswrapper[4558]: I0120 17:49:36.955048 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/49faf171-8f46-4aed-add3-d40e6a82fc9c-config-data\") pod \"49faf171-8f46-4aed-add3-d40e6a82fc9c\" (UID: \"49faf171-8f46-4aed-add3-d40e6a82fc9c\") " Jan 20 17:49:36 crc kubenswrapper[4558]: I0120 17:49:36.955081 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-65v4s\" (UniqueName: \"kubernetes.io/projected/49faf171-8f46-4aed-add3-d40e6a82fc9c-kube-api-access-65v4s\") pod \"49faf171-8f46-4aed-add3-d40e6a82fc9c\" (UID: \"49faf171-8f46-4aed-add3-d40e6a82fc9c\") " Jan 20 17:49:36 crc kubenswrapper[4558]: I0120 17:49:36.955245 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/49faf171-8f46-4aed-add3-d40e6a82fc9c-combined-ca-bundle\") pod \"49faf171-8f46-4aed-add3-d40e6a82fc9c\" (UID: \"49faf171-8f46-4aed-add3-d40e6a82fc9c\") " Jan 20 17:49:36 crc kubenswrapper[4558]: I0120 17:49:36.955399 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/49faf171-8f46-4aed-add3-d40e6a82fc9c-logs" (OuterVolumeSpecName: "logs") pod "49faf171-8f46-4aed-add3-d40e6a82fc9c" (UID: "49faf171-8f46-4aed-add3-d40e6a82fc9c"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:49:36 crc kubenswrapper[4558]: I0120 17:49:36.955733 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/49faf171-8f46-4aed-add3-d40e6a82fc9c-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:49:36 crc kubenswrapper[4558]: I0120 17:49:36.965284 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49faf171-8f46-4aed-add3-d40e6a82fc9c-kube-api-access-65v4s" (OuterVolumeSpecName: "kube-api-access-65v4s") pod "49faf171-8f46-4aed-add3-d40e6a82fc9c" (UID: "49faf171-8f46-4aed-add3-d40e6a82fc9c"). InnerVolumeSpecName "kube-api-access-65v4s". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:49:36 crc kubenswrapper[4558]: I0120 17:49:36.980339 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49faf171-8f46-4aed-add3-d40e6a82fc9c-config-data" (OuterVolumeSpecName: "config-data") pod "49faf171-8f46-4aed-add3-d40e6a82fc9c" (UID: "49faf171-8f46-4aed-add3-d40e6a82fc9c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:49:36 crc kubenswrapper[4558]: I0120 17:49:36.982112 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49faf171-8f46-4aed-add3-d40e6a82fc9c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "49faf171-8f46-4aed-add3-d40e6a82fc9c" (UID: "49faf171-8f46-4aed-add3-d40e6a82fc9c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:49:37 crc kubenswrapper[4558]: I0120 17:49:37.057689 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/49faf171-8f46-4aed-add3-d40e6a82fc9c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:49:37 crc kubenswrapper[4558]: I0120 17:49:37.057722 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/49faf171-8f46-4aed-add3-d40e6a82fc9c-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:49:37 crc kubenswrapper[4558]: I0120 17:49:37.057736 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-65v4s\" (UniqueName: \"kubernetes.io/projected/49faf171-8f46-4aed-add3-d40e6a82fc9c-kube-api-access-65v4s\") on node \"crc\" DevicePath \"\"" Jan 20 17:49:37 crc kubenswrapper[4558]: I0120 17:49:37.457458 4558 generic.go:334] "Generic (PLEG): container finished" podID="268d5f08-af8d-499f-876d-30ba10fc3ca4" containerID="e43bc8076015c3b514dfab6161a7d478e55d2f4a95ee199cf00a65a264d3857a" exitCode=0 Jan 20 17:49:37 crc kubenswrapper[4558]: I0120 17:49:37.457673 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"268d5f08-af8d-499f-876d-30ba10fc3ca4","Type":"ContainerDied","Data":"e43bc8076015c3b514dfab6161a7d478e55d2f4a95ee199cf00a65a264d3857a"} Jan 20 17:49:37 crc kubenswrapper[4558]: I0120 17:49:37.460002 4558 generic.go:334] "Generic (PLEG): container finished" podID="49faf171-8f46-4aed-add3-d40e6a82fc9c" containerID="dff43300fab1207f3da7e3ddc3226bbde1bbe928933897986eeb0f64e830e380" exitCode=0 Jan 20 17:49:37 crc kubenswrapper[4558]: I0120 17:49:37.460065 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"49faf171-8f46-4aed-add3-d40e6a82fc9c","Type":"ContainerDied","Data":"dff43300fab1207f3da7e3ddc3226bbde1bbe928933897986eeb0f64e830e380"} Jan 20 17:49:37 crc kubenswrapper[4558]: I0120 17:49:37.460079 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:49:37 crc kubenswrapper[4558]: I0120 17:49:37.460123 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"49faf171-8f46-4aed-add3-d40e6a82fc9c","Type":"ContainerDied","Data":"1eeed8c9576d22b8adbd03f8229ef2b3f7917ad17415f6931cf15e1ee1f4f405"} Jan 20 17:49:37 crc kubenswrapper[4558]: I0120 17:49:37.460151 4558 scope.go:117] "RemoveContainer" containerID="dff43300fab1207f3da7e3ddc3226bbde1bbe928933897986eeb0f64e830e380" Jan 20 17:49:37 crc kubenswrapper[4558]: I0120 17:49:37.490400 4558 scope.go:117] "RemoveContainer" containerID="6bbfbb092115e67f03abc95423320945a7a59cbc83d760dcdade93b68b67f6f5" Jan 20 17:49:37 crc kubenswrapper[4558]: I0120 17:49:37.496475 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:49:37 crc kubenswrapper[4558]: I0120 17:49:37.535662 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:49:37 crc kubenswrapper[4558]: I0120 17:49:37.543316 4558 scope.go:117] "RemoveContainer" containerID="dff43300fab1207f3da7e3ddc3226bbde1bbe928933897986eeb0f64e830e380" Jan 20 17:49:37 crc kubenswrapper[4558]: E0120 17:49:37.543949 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dff43300fab1207f3da7e3ddc3226bbde1bbe928933897986eeb0f64e830e380\": container with ID starting with dff43300fab1207f3da7e3ddc3226bbde1bbe928933897986eeb0f64e830e380 not found: ID does not exist" containerID="dff43300fab1207f3da7e3ddc3226bbde1bbe928933897986eeb0f64e830e380" Jan 20 17:49:37 crc kubenswrapper[4558]: I0120 17:49:37.544019 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dff43300fab1207f3da7e3ddc3226bbde1bbe928933897986eeb0f64e830e380"} err="failed to get container status \"dff43300fab1207f3da7e3ddc3226bbde1bbe928933897986eeb0f64e830e380\": rpc error: code = NotFound desc = could not find container \"dff43300fab1207f3da7e3ddc3226bbde1bbe928933897986eeb0f64e830e380\": container with ID starting with dff43300fab1207f3da7e3ddc3226bbde1bbe928933897986eeb0f64e830e380 not found: ID does not exist" Jan 20 17:49:37 crc kubenswrapper[4558]: I0120 17:49:37.544050 4558 scope.go:117] "RemoveContainer" containerID="6bbfbb092115e67f03abc95423320945a7a59cbc83d760dcdade93b68b67f6f5" Jan 20 17:49:37 crc kubenswrapper[4558]: E0120 17:49:37.544734 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6bbfbb092115e67f03abc95423320945a7a59cbc83d760dcdade93b68b67f6f5\": container with ID starting with 6bbfbb092115e67f03abc95423320945a7a59cbc83d760dcdade93b68b67f6f5 not found: ID does not exist" containerID="6bbfbb092115e67f03abc95423320945a7a59cbc83d760dcdade93b68b67f6f5" Jan 20 17:49:37 crc kubenswrapper[4558]: I0120 17:49:37.544791 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6bbfbb092115e67f03abc95423320945a7a59cbc83d760dcdade93b68b67f6f5"} err="failed to get container status \"6bbfbb092115e67f03abc95423320945a7a59cbc83d760dcdade93b68b67f6f5\": rpc error: code = NotFound desc = could not find container \"6bbfbb092115e67f03abc95423320945a7a59cbc83d760dcdade93b68b67f6f5\": container with ID starting with 6bbfbb092115e67f03abc95423320945a7a59cbc83d760dcdade93b68b67f6f5 not found: ID does not exist" Jan 20 17:49:37 crc kubenswrapper[4558]: I0120 17:49:37.548329 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:49:37 crc kubenswrapper[4558]: E0120 17:49:37.548989 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="49faf171-8f46-4aed-add3-d40e6a82fc9c" containerName="nova-api-log" Jan 20 17:49:37 crc kubenswrapper[4558]: I0120 17:49:37.549010 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="49faf171-8f46-4aed-add3-d40e6a82fc9c" containerName="nova-api-log" Jan 20 17:49:37 crc kubenswrapper[4558]: E0120 17:49:37.549037 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="49faf171-8f46-4aed-add3-d40e6a82fc9c" containerName="nova-api-api" Jan 20 17:49:37 crc kubenswrapper[4558]: I0120 17:49:37.549044 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="49faf171-8f46-4aed-add3-d40e6a82fc9c" containerName="nova-api-api" Jan 20 17:49:37 crc kubenswrapper[4558]: I0120 17:49:37.549396 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="49faf171-8f46-4aed-add3-d40e6a82fc9c" containerName="nova-api-log" Jan 20 17:49:37 crc kubenswrapper[4558]: I0120 17:49:37.549420 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="49faf171-8f46-4aed-add3-d40e6a82fc9c" containerName="nova-api-api" Jan 20 17:49:37 crc kubenswrapper[4558]: I0120 17:49:37.551497 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:49:37 crc kubenswrapper[4558]: I0120 17:49:37.554918 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-internal-svc" Jan 20 17:49:37 crc kubenswrapper[4558]: I0120 17:49:37.556337 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-api-config-data" Jan 20 17:49:37 crc kubenswrapper[4558]: I0120 17:49:37.559279 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-public-svc" Jan 20 17:49:37 crc kubenswrapper[4558]: I0120 17:49:37.560697 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:49:37 crc kubenswrapper[4558]: I0120 17:49:37.675996 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/28febf86-c729-498e-b3af-0321468d80f3-public-tls-certs\") pod \"nova-api-0\" (UID: \"28febf86-c729-498e-b3af-0321468d80f3\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:49:37 crc kubenswrapper[4558]: I0120 17:49:37.676176 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/28febf86-c729-498e-b3af-0321468d80f3-config-data\") pod \"nova-api-0\" (UID: \"28febf86-c729-498e-b3af-0321468d80f3\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:49:37 crc kubenswrapper[4558]: I0120 17:49:37.676290 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-28gph\" (UniqueName: \"kubernetes.io/projected/28febf86-c729-498e-b3af-0321468d80f3-kube-api-access-28gph\") pod \"nova-api-0\" (UID: \"28febf86-c729-498e-b3af-0321468d80f3\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:49:37 crc kubenswrapper[4558]: I0120 17:49:37.676318 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/28febf86-c729-498e-b3af-0321468d80f3-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"28febf86-c729-498e-b3af-0321468d80f3\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:49:37 crc kubenswrapper[4558]: I0120 17:49:37.676379 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/28febf86-c729-498e-b3af-0321468d80f3-logs\") pod \"nova-api-0\" (UID: \"28febf86-c729-498e-b3af-0321468d80f3\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:49:37 crc kubenswrapper[4558]: I0120 17:49:37.676560 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/28febf86-c729-498e-b3af-0321468d80f3-internal-tls-certs\") pod \"nova-api-0\" (UID: \"28febf86-c729-498e-b3af-0321468d80f3\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:49:37 crc kubenswrapper[4558]: I0120 17:49:37.778348 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/28febf86-c729-498e-b3af-0321468d80f3-internal-tls-certs\") pod \"nova-api-0\" (UID: \"28febf86-c729-498e-b3af-0321468d80f3\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:49:37 crc kubenswrapper[4558]: I0120 17:49:37.778406 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/28febf86-c729-498e-b3af-0321468d80f3-public-tls-certs\") pod \"nova-api-0\" (UID: \"28febf86-c729-498e-b3af-0321468d80f3\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:49:37 crc kubenswrapper[4558]: I0120 17:49:37.778495 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/28febf86-c729-498e-b3af-0321468d80f3-config-data\") pod \"nova-api-0\" (UID: \"28febf86-c729-498e-b3af-0321468d80f3\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:49:37 crc kubenswrapper[4558]: I0120 17:49:37.778560 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-28gph\" (UniqueName: \"kubernetes.io/projected/28febf86-c729-498e-b3af-0321468d80f3-kube-api-access-28gph\") pod \"nova-api-0\" (UID: \"28febf86-c729-498e-b3af-0321468d80f3\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:49:37 crc kubenswrapper[4558]: I0120 17:49:37.778581 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/28febf86-c729-498e-b3af-0321468d80f3-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"28febf86-c729-498e-b3af-0321468d80f3\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:49:37 crc kubenswrapper[4558]: I0120 17:49:37.778615 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/28febf86-c729-498e-b3af-0321468d80f3-logs\") pod \"nova-api-0\" (UID: \"28febf86-c729-498e-b3af-0321468d80f3\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:49:37 crc kubenswrapper[4558]: I0120 17:49:37.779079 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/28febf86-c729-498e-b3af-0321468d80f3-logs\") pod \"nova-api-0\" (UID: \"28febf86-c729-498e-b3af-0321468d80f3\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:49:37 crc kubenswrapper[4558]: I0120 17:49:37.779153 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:49:37 crc kubenswrapper[4558]: I0120 17:49:37.784422 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/28febf86-c729-498e-b3af-0321468d80f3-internal-tls-certs\") pod \"nova-api-0\" (UID: \"28febf86-c729-498e-b3af-0321468d80f3\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:49:37 crc kubenswrapper[4558]: I0120 17:49:37.784447 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/28febf86-c729-498e-b3af-0321468d80f3-public-tls-certs\") pod \"nova-api-0\" (UID: \"28febf86-c729-498e-b3af-0321468d80f3\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:49:37 crc kubenswrapper[4558]: I0120 17:49:37.785680 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/28febf86-c729-498e-b3af-0321468d80f3-config-data\") pod \"nova-api-0\" (UID: \"28febf86-c729-498e-b3af-0321468d80f3\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:49:37 crc kubenswrapper[4558]: I0120 17:49:37.785991 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/28febf86-c729-498e-b3af-0321468d80f3-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"28febf86-c729-498e-b3af-0321468d80f3\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:49:37 crc kubenswrapper[4558]: I0120 17:49:37.793661 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-28gph\" (UniqueName: \"kubernetes.io/projected/28febf86-c729-498e-b3af-0321468d80f3-kube-api-access-28gph\") pod \"nova-api-0\" (UID: \"28febf86-c729-498e-b3af-0321468d80f3\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:49:37 crc kubenswrapper[4558]: I0120 17:49:37.879573 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/268d5f08-af8d-499f-876d-30ba10fc3ca4-scripts\") pod \"268d5f08-af8d-499f-876d-30ba10fc3ca4\" (UID: \"268d5f08-af8d-499f-876d-30ba10fc3ca4\") " Jan 20 17:49:37 crc kubenswrapper[4558]: I0120 17:49:37.879957 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/268d5f08-af8d-499f-876d-30ba10fc3ca4-config-data\") pod \"268d5f08-af8d-499f-876d-30ba10fc3ca4\" (UID: \"268d5f08-af8d-499f-876d-30ba10fc3ca4\") " Jan 20 17:49:37 crc kubenswrapper[4558]: I0120 17:49:37.879993 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/268d5f08-af8d-499f-876d-30ba10fc3ca4-sg-core-conf-yaml\") pod \"268d5f08-af8d-499f-876d-30ba10fc3ca4\" (UID: \"268d5f08-af8d-499f-876d-30ba10fc3ca4\") " Jan 20 17:49:37 crc kubenswrapper[4558]: I0120 17:49:37.880083 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/268d5f08-af8d-499f-876d-30ba10fc3ca4-ceilometer-tls-certs\") pod \"268d5f08-af8d-499f-876d-30ba10fc3ca4\" (UID: \"268d5f08-af8d-499f-876d-30ba10fc3ca4\") " Jan 20 17:49:37 crc kubenswrapper[4558]: I0120 17:49:37.880121 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/268d5f08-af8d-499f-876d-30ba10fc3ca4-run-httpd\") pod \"268d5f08-af8d-499f-876d-30ba10fc3ca4\" (UID: \"268d5f08-af8d-499f-876d-30ba10fc3ca4\") " Jan 20 17:49:37 crc kubenswrapper[4558]: I0120 17:49:37.880197 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/268d5f08-af8d-499f-876d-30ba10fc3ca4-log-httpd\") pod \"268d5f08-af8d-499f-876d-30ba10fc3ca4\" (UID: \"268d5f08-af8d-499f-876d-30ba10fc3ca4\") " Jan 20 17:49:37 crc kubenswrapper[4558]: I0120 17:49:37.880271 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gk7ts\" (UniqueName: \"kubernetes.io/projected/268d5f08-af8d-499f-876d-30ba10fc3ca4-kube-api-access-gk7ts\") pod \"268d5f08-af8d-499f-876d-30ba10fc3ca4\" (UID: \"268d5f08-af8d-499f-876d-30ba10fc3ca4\") " Jan 20 17:49:37 crc kubenswrapper[4558]: I0120 17:49:37.880321 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/268d5f08-af8d-499f-876d-30ba10fc3ca4-combined-ca-bundle\") pod \"268d5f08-af8d-499f-876d-30ba10fc3ca4\" (UID: \"268d5f08-af8d-499f-876d-30ba10fc3ca4\") " Jan 20 17:49:37 crc kubenswrapper[4558]: I0120 17:49:37.880650 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/268d5f08-af8d-499f-876d-30ba10fc3ca4-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "268d5f08-af8d-499f-876d-30ba10fc3ca4" (UID: "268d5f08-af8d-499f-876d-30ba10fc3ca4"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:49:37 crc kubenswrapper[4558]: I0120 17:49:37.880776 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/268d5f08-af8d-499f-876d-30ba10fc3ca4-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "268d5f08-af8d-499f-876d-30ba10fc3ca4" (UID: "268d5f08-af8d-499f-876d-30ba10fc3ca4"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:49:37 crc kubenswrapper[4558]: I0120 17:49:37.884639 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/268d5f08-af8d-499f-876d-30ba10fc3ca4-scripts" (OuterVolumeSpecName: "scripts") pod "268d5f08-af8d-499f-876d-30ba10fc3ca4" (UID: "268d5f08-af8d-499f-876d-30ba10fc3ca4"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:49:37 crc kubenswrapper[4558]: I0120 17:49:37.885442 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/268d5f08-af8d-499f-876d-30ba10fc3ca4-kube-api-access-gk7ts" (OuterVolumeSpecName: "kube-api-access-gk7ts") pod "268d5f08-af8d-499f-876d-30ba10fc3ca4" (UID: "268d5f08-af8d-499f-876d-30ba10fc3ca4"). InnerVolumeSpecName "kube-api-access-gk7ts". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:49:37 crc kubenswrapper[4558]: I0120 17:49:37.891261 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:49:37 crc kubenswrapper[4558]: I0120 17:49:37.906301 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/268d5f08-af8d-499f-876d-30ba10fc3ca4-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "268d5f08-af8d-499f-876d-30ba10fc3ca4" (UID: "268d5f08-af8d-499f-876d-30ba10fc3ca4"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:49:37 crc kubenswrapper[4558]: I0120 17:49:37.932636 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/268d5f08-af8d-499f-876d-30ba10fc3ca4-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "268d5f08-af8d-499f-876d-30ba10fc3ca4" (UID: "268d5f08-af8d-499f-876d-30ba10fc3ca4"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:49:37 crc kubenswrapper[4558]: I0120 17:49:37.939985 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/268d5f08-af8d-499f-876d-30ba10fc3ca4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "268d5f08-af8d-499f-876d-30ba10fc3ca4" (UID: "268d5f08-af8d-499f-876d-30ba10fc3ca4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:49:37 crc kubenswrapper[4558]: I0120 17:49:37.969533 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/268d5f08-af8d-499f-876d-30ba10fc3ca4-config-data" (OuterVolumeSpecName: "config-data") pod "268d5f08-af8d-499f-876d-30ba10fc3ca4" (UID: "268d5f08-af8d-499f-876d-30ba10fc3ca4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:49:37 crc kubenswrapper[4558]: I0120 17:49:37.983075 4558 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/268d5f08-af8d-499f-876d-30ba10fc3ca4-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:49:37 crc kubenswrapper[4558]: I0120 17:49:37.983113 4558 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/268d5f08-af8d-499f-876d-30ba10fc3ca4-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:49:37 crc kubenswrapper[4558]: I0120 17:49:37.983125 4558 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/268d5f08-af8d-499f-876d-30ba10fc3ca4-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:49:37 crc kubenswrapper[4558]: I0120 17:49:37.983142 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gk7ts\" (UniqueName: \"kubernetes.io/projected/268d5f08-af8d-499f-876d-30ba10fc3ca4-kube-api-access-gk7ts\") on node \"crc\" DevicePath \"\"" Jan 20 17:49:37 crc kubenswrapper[4558]: I0120 17:49:37.983157 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/268d5f08-af8d-499f-876d-30ba10fc3ca4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:49:37 crc kubenswrapper[4558]: I0120 17:49:37.983180 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/268d5f08-af8d-499f-876d-30ba10fc3ca4-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:49:37 crc kubenswrapper[4558]: I0120 17:49:37.983196 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/268d5f08-af8d-499f-876d-30ba10fc3ca4-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:49:37 crc kubenswrapper[4558]: I0120 17:49:37.983206 4558 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/268d5f08-af8d-499f-876d-30ba10fc3ca4-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 20 17:49:38 crc kubenswrapper[4558]: I0120 17:49:38.317634 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:49:38 crc kubenswrapper[4558]: W0120 17:49:38.318538 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod28febf86_c729_498e_b3af_0321468d80f3.slice/crio-4ab94929b6db42289fce351b21eb7a8fab35f0e4c9c164866cbbbd10ba889df6 WatchSource:0}: Error finding container 4ab94929b6db42289fce351b21eb7a8fab35f0e4c9c164866cbbbd10ba889df6: Status 404 returned error can't find the container with id 4ab94929b6db42289fce351b21eb7a8fab35f0e4c9c164866cbbbd10ba889df6 Jan 20 17:49:38 crc kubenswrapper[4558]: I0120 17:49:38.472427 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"28febf86-c729-498e-b3af-0321468d80f3","Type":"ContainerStarted","Data":"8e218874514f86011edef030a16090a4d678fd4bb3ef20d8a141af1af291d2c9"} Jan 20 17:49:38 crc kubenswrapper[4558]: I0120 17:49:38.472688 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"28febf86-c729-498e-b3af-0321468d80f3","Type":"ContainerStarted","Data":"4ab94929b6db42289fce351b21eb7a8fab35f0e4c9c164866cbbbd10ba889df6"} Jan 20 17:49:38 crc kubenswrapper[4558]: I0120 17:49:38.476272 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"268d5f08-af8d-499f-876d-30ba10fc3ca4","Type":"ContainerDied","Data":"81859de4cdee33ed40af81e32910bd93dd0bc705939f1f4a3b38546910d2dfc0"} Jan 20 17:49:38 crc kubenswrapper[4558]: I0120 17:49:38.476330 4558 scope.go:117] "RemoveContainer" containerID="640505093f304a18cd4f0f3e84a25ae60ed3656a2f35790accd480eca705cff6" Jan 20 17:49:38 crc kubenswrapper[4558]: I0120 17:49:38.476373 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:49:38 crc kubenswrapper[4558]: I0120 17:49:38.517487 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:49:38 crc kubenswrapper[4558]: I0120 17:49:38.523835 4558 scope.go:117] "RemoveContainer" containerID="24a881ca3e01d2f799588052249d88bd52f8a9f62c4ccf2c919e3f0f4b2313e9" Jan 20 17:49:38 crc kubenswrapper[4558]: I0120 17:49:38.524990 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:49:38 crc kubenswrapper[4558]: I0120 17:49:38.551888 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:49:38 crc kubenswrapper[4558]: E0120 17:49:38.552367 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="268d5f08-af8d-499f-876d-30ba10fc3ca4" containerName="proxy-httpd" Jan 20 17:49:38 crc kubenswrapper[4558]: I0120 17:49:38.552388 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="268d5f08-af8d-499f-876d-30ba10fc3ca4" containerName="proxy-httpd" Jan 20 17:49:38 crc kubenswrapper[4558]: E0120 17:49:38.552425 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="268d5f08-af8d-499f-876d-30ba10fc3ca4" containerName="ceilometer-central-agent" Jan 20 17:49:38 crc kubenswrapper[4558]: I0120 17:49:38.552433 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="268d5f08-af8d-499f-876d-30ba10fc3ca4" containerName="ceilometer-central-agent" Jan 20 17:49:38 crc kubenswrapper[4558]: E0120 17:49:38.552454 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="268d5f08-af8d-499f-876d-30ba10fc3ca4" containerName="ceilometer-notification-agent" Jan 20 17:49:38 crc kubenswrapper[4558]: I0120 17:49:38.552461 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="268d5f08-af8d-499f-876d-30ba10fc3ca4" containerName="ceilometer-notification-agent" Jan 20 17:49:38 crc kubenswrapper[4558]: E0120 17:49:38.552470 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="268d5f08-af8d-499f-876d-30ba10fc3ca4" containerName="sg-core" Jan 20 17:49:38 crc kubenswrapper[4558]: I0120 17:49:38.552477 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="268d5f08-af8d-499f-876d-30ba10fc3ca4" containerName="sg-core" Jan 20 17:49:38 crc kubenswrapper[4558]: I0120 17:49:38.552667 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="268d5f08-af8d-499f-876d-30ba10fc3ca4" containerName="ceilometer-notification-agent" Jan 20 17:49:38 crc kubenswrapper[4558]: I0120 17:49:38.552680 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="268d5f08-af8d-499f-876d-30ba10fc3ca4" containerName="sg-core" Jan 20 17:49:38 crc kubenswrapper[4558]: I0120 17:49:38.552691 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="268d5f08-af8d-499f-876d-30ba10fc3ca4" containerName="ceilometer-central-agent" Jan 20 17:49:38 crc kubenswrapper[4558]: I0120 17:49:38.552706 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="268d5f08-af8d-499f-876d-30ba10fc3ca4" containerName="proxy-httpd" Jan 20 17:49:38 crc kubenswrapper[4558]: I0120 17:49:38.554323 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:49:38 crc kubenswrapper[4558]: I0120 17:49:38.557796 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-config-data" Jan 20 17:49:38 crc kubenswrapper[4558]: I0120 17:49:38.558087 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-ceilometer-internal-svc" Jan 20 17:49:38 crc kubenswrapper[4558]: I0120 17:49:38.558283 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-scripts" Jan 20 17:49:38 crc kubenswrapper[4558]: I0120 17:49:38.559636 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:49:38 crc kubenswrapper[4558]: I0120 17:49:38.566551 4558 scope.go:117] "RemoveContainer" containerID="e43bc8076015c3b514dfab6161a7d478e55d2f4a95ee199cf00a65a264d3857a" Jan 20 17:49:38 crc kubenswrapper[4558]: I0120 17:49:38.581728 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="268d5f08-af8d-499f-876d-30ba10fc3ca4" path="/var/lib/kubelet/pods/268d5f08-af8d-499f-876d-30ba10fc3ca4/volumes" Jan 20 17:49:38 crc kubenswrapper[4558]: I0120 17:49:38.582476 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49faf171-8f46-4aed-add3-d40e6a82fc9c" path="/var/lib/kubelet/pods/49faf171-8f46-4aed-add3-d40e6a82fc9c/volumes" Jan 20 17:49:38 crc kubenswrapper[4558]: I0120 17:49:38.594489 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a3e03093-9a03-4aac-bcde-475dda6c3dcc-scripts\") pod \"ceilometer-0\" (UID: \"a3e03093-9a03-4aac-bcde-475dda6c3dcc\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:49:38 crc kubenswrapper[4558]: I0120 17:49:38.594538 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a3e03093-9a03-4aac-bcde-475dda6c3dcc-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a3e03093-9a03-4aac-bcde-475dda6c3dcc\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:49:38 crc kubenswrapper[4558]: I0120 17:49:38.594591 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r7zcm\" (UniqueName: \"kubernetes.io/projected/a3e03093-9a03-4aac-bcde-475dda6c3dcc-kube-api-access-r7zcm\") pod \"ceilometer-0\" (UID: \"a3e03093-9a03-4aac-bcde-475dda6c3dcc\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:49:38 crc kubenswrapper[4558]: I0120 17:49:38.594666 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a3e03093-9a03-4aac-bcde-475dda6c3dcc-run-httpd\") pod \"ceilometer-0\" (UID: \"a3e03093-9a03-4aac-bcde-475dda6c3dcc\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:49:38 crc kubenswrapper[4558]: I0120 17:49:38.594700 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a3e03093-9a03-4aac-bcde-475dda6c3dcc-log-httpd\") pod \"ceilometer-0\" (UID: \"a3e03093-9a03-4aac-bcde-475dda6c3dcc\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:49:38 crc kubenswrapper[4558]: I0120 17:49:38.594759 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a3e03093-9a03-4aac-bcde-475dda6c3dcc-config-data\") pod \"ceilometer-0\" (UID: \"a3e03093-9a03-4aac-bcde-475dda6c3dcc\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:49:38 crc kubenswrapper[4558]: I0120 17:49:38.594790 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/a3e03093-9a03-4aac-bcde-475dda6c3dcc-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"a3e03093-9a03-4aac-bcde-475dda6c3dcc\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:49:38 crc kubenswrapper[4558]: I0120 17:49:38.594852 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a3e03093-9a03-4aac-bcde-475dda6c3dcc-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a3e03093-9a03-4aac-bcde-475dda6c3dcc\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:49:38 crc kubenswrapper[4558]: I0120 17:49:38.608078 4558 scope.go:117] "RemoveContainer" containerID="02dbf529236fac41f9a81e907bc058ec8f3295e7c1432a48daef15c61d725167" Jan 20 17:49:38 crc kubenswrapper[4558]: I0120 17:49:38.696586 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a3e03093-9a03-4aac-bcde-475dda6c3dcc-config-data\") pod \"ceilometer-0\" (UID: \"a3e03093-9a03-4aac-bcde-475dda6c3dcc\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:49:38 crc kubenswrapper[4558]: I0120 17:49:38.696652 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/a3e03093-9a03-4aac-bcde-475dda6c3dcc-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"a3e03093-9a03-4aac-bcde-475dda6c3dcc\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:49:38 crc kubenswrapper[4558]: I0120 17:49:38.696696 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a3e03093-9a03-4aac-bcde-475dda6c3dcc-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a3e03093-9a03-4aac-bcde-475dda6c3dcc\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:49:38 crc kubenswrapper[4558]: I0120 17:49:38.696732 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a3e03093-9a03-4aac-bcde-475dda6c3dcc-scripts\") pod \"ceilometer-0\" (UID: \"a3e03093-9a03-4aac-bcde-475dda6c3dcc\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:49:38 crc kubenswrapper[4558]: I0120 17:49:38.696758 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a3e03093-9a03-4aac-bcde-475dda6c3dcc-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a3e03093-9a03-4aac-bcde-475dda6c3dcc\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:49:38 crc kubenswrapper[4558]: I0120 17:49:38.696820 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r7zcm\" (UniqueName: \"kubernetes.io/projected/a3e03093-9a03-4aac-bcde-475dda6c3dcc-kube-api-access-r7zcm\") pod \"ceilometer-0\" (UID: \"a3e03093-9a03-4aac-bcde-475dda6c3dcc\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:49:38 crc kubenswrapper[4558]: I0120 17:49:38.696873 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a3e03093-9a03-4aac-bcde-475dda6c3dcc-run-httpd\") pod \"ceilometer-0\" (UID: \"a3e03093-9a03-4aac-bcde-475dda6c3dcc\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:49:38 crc kubenswrapper[4558]: I0120 17:49:38.696904 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a3e03093-9a03-4aac-bcde-475dda6c3dcc-log-httpd\") pod \"ceilometer-0\" (UID: \"a3e03093-9a03-4aac-bcde-475dda6c3dcc\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:49:38 crc kubenswrapper[4558]: I0120 17:49:38.698372 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a3e03093-9a03-4aac-bcde-475dda6c3dcc-log-httpd\") pod \"ceilometer-0\" (UID: \"a3e03093-9a03-4aac-bcde-475dda6c3dcc\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:49:38 crc kubenswrapper[4558]: I0120 17:49:38.698424 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a3e03093-9a03-4aac-bcde-475dda6c3dcc-run-httpd\") pod \"ceilometer-0\" (UID: \"a3e03093-9a03-4aac-bcde-475dda6c3dcc\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:49:38 crc kubenswrapper[4558]: I0120 17:49:38.700934 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/a3e03093-9a03-4aac-bcde-475dda6c3dcc-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"a3e03093-9a03-4aac-bcde-475dda6c3dcc\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:49:38 crc kubenswrapper[4558]: I0120 17:49:38.702914 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a3e03093-9a03-4aac-bcde-475dda6c3dcc-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a3e03093-9a03-4aac-bcde-475dda6c3dcc\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:49:38 crc kubenswrapper[4558]: I0120 17:49:38.703355 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a3e03093-9a03-4aac-bcde-475dda6c3dcc-config-data\") pod \"ceilometer-0\" (UID: \"a3e03093-9a03-4aac-bcde-475dda6c3dcc\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:49:38 crc kubenswrapper[4558]: I0120 17:49:38.703736 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a3e03093-9a03-4aac-bcde-475dda6c3dcc-scripts\") pod \"ceilometer-0\" (UID: \"a3e03093-9a03-4aac-bcde-475dda6c3dcc\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:49:38 crc kubenswrapper[4558]: I0120 17:49:38.704007 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a3e03093-9a03-4aac-bcde-475dda6c3dcc-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a3e03093-9a03-4aac-bcde-475dda6c3dcc\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:49:38 crc kubenswrapper[4558]: I0120 17:49:38.712342 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r7zcm\" (UniqueName: \"kubernetes.io/projected/a3e03093-9a03-4aac-bcde-475dda6c3dcc-kube-api-access-r7zcm\") pod \"ceilometer-0\" (UID: \"a3e03093-9a03-4aac-bcde-475dda6c3dcc\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:49:38 crc kubenswrapper[4558]: I0120 17:49:38.924045 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:49:39 crc kubenswrapper[4558]: I0120 17:49:39.356390 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:49:39 crc kubenswrapper[4558]: W0120 17:49:39.358963 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda3e03093_9a03_4aac_bcde_475dda6c3dcc.slice/crio-74e3d636e75e3cc0823c216010f7120c37ee7e3634f3be3d44dc1b2543c86161 WatchSource:0}: Error finding container 74e3d636e75e3cc0823c216010f7120c37ee7e3634f3be3d44dc1b2543c86161: Status 404 returned error can't find the container with id 74e3d636e75e3cc0823c216010f7120c37ee7e3634f3be3d44dc1b2543c86161 Jan 20 17:49:39 crc kubenswrapper[4558]: I0120 17:49:39.361675 4558 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 20 17:49:39 crc kubenswrapper[4558]: I0120 17:49:39.492706 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"28febf86-c729-498e-b3af-0321468d80f3","Type":"ContainerStarted","Data":"d57bd37afc98044774c1f3fa1bd0ea83a8cca9c7814e87365a229f5ac1765f05"} Jan 20 17:49:39 crc kubenswrapper[4558]: I0120 17:49:39.494690 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"a3e03093-9a03-4aac-bcde-475dda6c3dcc","Type":"ContainerStarted","Data":"74e3d636e75e3cc0823c216010f7120c37ee7e3634f3be3d44dc1b2543c86161"} Jan 20 17:49:39 crc kubenswrapper[4558]: I0120 17:49:39.515686 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-api-0" podStartSLOduration=2.5156722780000003 podStartE2EDuration="2.515672278s" podCreationTimestamp="2026-01-20 17:49:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:49:39.506776511 +0000 UTC m=+4073.267114478" watchObservedRunningTime="2026-01-20 17:49:39.515672278 +0000 UTC m=+4073.276010245" Jan 20 17:49:40 crc kubenswrapper[4558]: I0120 17:49:40.505817 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"a3e03093-9a03-4aac-bcde-475dda6c3dcc","Type":"ContainerStarted","Data":"518dffb0d9b0f0146e59f64982b965f84538ad2716a9c5d37e992ce46cfdadbb"} Jan 20 17:49:41 crc kubenswrapper[4558]: I0120 17:49:41.519404 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"a3e03093-9a03-4aac-bcde-475dda6c3dcc","Type":"ContainerStarted","Data":"0ef28b39a3024f10f373d8df6dbcc0e604d25efcb4ff0cd7b08a64f50ee2a001"} Jan 20 17:49:42 crc kubenswrapper[4558]: I0120 17:49:42.533375 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"a3e03093-9a03-4aac-bcde-475dda6c3dcc","Type":"ContainerStarted","Data":"dddc0d84255205f19bccd3e01061af9fd0d1e2f5dc72e122ea02fa340a0ca32c"} Jan 20 17:49:43 crc kubenswrapper[4558]: I0120 17:49:43.543041 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"a3e03093-9a03-4aac-bcde-475dda6c3dcc","Type":"ContainerStarted","Data":"8317dbc00d30b8e256eff4e4e631c97112ea996e280ba9576ca0f29a56a05240"} Jan 20 17:49:43 crc kubenswrapper[4558]: I0120 17:49:43.543673 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:49:43 crc kubenswrapper[4558]: I0120 17:49:43.563095 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ceilometer-0" podStartSLOduration=1.737767751 podStartE2EDuration="5.563084388s" podCreationTimestamp="2026-01-20 17:49:38 +0000 UTC" firstStartedPulling="2026-01-20 17:49:39.361429023 +0000 UTC m=+4073.121766990" lastFinishedPulling="2026-01-20 17:49:43.186745659 +0000 UTC m=+4076.947083627" observedRunningTime="2026-01-20 17:49:43.557513061 +0000 UTC m=+4077.317851028" watchObservedRunningTime="2026-01-20 17:49:43.563084388 +0000 UTC m=+4077.323422355" Jan 20 17:49:47 crc kubenswrapper[4558]: I0120 17:49:47.892505 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:49:47 crc kubenswrapper[4558]: I0120 17:49:47.892847 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:49:48 crc kubenswrapper[4558]: I0120 17:49:48.907326 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-api-0" podUID="28febf86-c729-498e-b3af-0321468d80f3" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.1.177:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 20 17:49:48 crc kubenswrapper[4558]: I0120 17:49:48.907481 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-api-0" podUID="28febf86-c729-498e-b3af-0321468d80f3" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.1.177:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 20 17:49:57 crc kubenswrapper[4558]: I0120 17:49:57.900227 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:49:57 crc kubenswrapper[4558]: I0120 17:49:57.900991 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:49:57 crc kubenswrapper[4558]: I0120 17:49:57.901556 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:49:57 crc kubenswrapper[4558]: I0120 17:49:57.902001 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:49:57 crc kubenswrapper[4558]: I0120 17:49:57.906122 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:49:57 crc kubenswrapper[4558]: I0120 17:49:57.907856 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:50:08 crc kubenswrapper[4558]: I0120 17:50:08.932539 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:50:16 crc kubenswrapper[4558]: E0120 17:50:16.596511 4558 upgradeaware.go:441] Error proxying data from backend to client: writeto tcp 192.168.25.8:36458->192.168.25.8:43883: read tcp 192.168.25.8:36458->192.168.25.8:43883: read: connection reset by peer Jan 20 17:50:24 crc kubenswrapper[4558]: I0120 17:50:24.400294 4558 scope.go:117] "RemoveContainer" containerID="a66c79daa262ed9158df917bb1b14c58d89db5c7e59a38fe08277a4bfae9eb16" Jan 20 17:50:24 crc kubenswrapper[4558]: I0120 17:50:24.428627 4558 scope.go:117] "RemoveContainer" containerID="5b7ae61c3b22dd588e6b8c80eec1302b76e65100b06918983ae9e73032d1d9d3" Jan 20 17:50:24 crc kubenswrapper[4558]: I0120 17:50:24.450998 4558 scope.go:117] "RemoveContainer" containerID="6142a118577f4983494f4e5bded80a60eb86bde1b0e2aabfeb8afaeda6b2b894" Jan 20 17:50:25 crc kubenswrapper[4558]: I0120 17:50:25.696938 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:50:25 crc kubenswrapper[4558]: I0120 17:50:25.697506 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-api-0" podUID="28febf86-c729-498e-b3af-0321468d80f3" containerName="nova-api-log" containerID="cri-o://8e218874514f86011edef030a16090a4d678fd4bb3ef20d8a141af1af291d2c9" gracePeriod=30 Jan 20 17:50:25 crc kubenswrapper[4558]: I0120 17:50:25.697595 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-api-0" podUID="28febf86-c729-498e-b3af-0321468d80f3" containerName="nova-api-api" containerID="cri-o://d57bd37afc98044774c1f3fa1bd0ea83a8cca9c7814e87365a229f5ac1765f05" gracePeriod=30 Jan 20 17:50:25 crc kubenswrapper[4558]: I0120 17:50:25.704847 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovn-northd-0"] Jan 20 17:50:25 crc kubenswrapper[4558]: I0120 17:50:25.705136 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ovn-northd-0" podUID="6dae3a04-db9e-48a2-bbe2-012c9ba55890" containerName="ovn-northd" containerID="cri-o://9b54f5f4a254eb9083539aa947157b34dfeea7934f5c2b37060ecf2e8e7a304a" gracePeriod=30 Jan 20 17:50:25 crc kubenswrapper[4558]: I0120 17:50:25.705146 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ovn-northd-0" podUID="6dae3a04-db9e-48a2-bbe2-012c9ba55890" containerName="openstack-network-exporter" containerID="cri-o://bfbe340de7186cba3ca94e5cb5c951be9e3563e26cd26dd763147ac393eeec4f" gracePeriod=30 Jan 20 17:50:25 crc kubenswrapper[4558]: I0120 17:50:25.719039 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-nb-0"] Jan 20 17:50:25 crc kubenswrapper[4558]: I0120 17:50:25.719430 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ovsdbserver-nb-0" podUID="3f090098-7927-4f68-b49d-4cea135a041c" containerName="openstack-network-exporter" containerID="cri-o://dbdf111aabc4bc0df2386a3517cbb41e1a844ddfad496d4e09801f1998ac0555" gracePeriod=300 Jan 20 17:50:25 crc kubenswrapper[4558]: I0120 17:50:25.740223 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/barbican-worker-6c48cb9cdf-9688c"] Jan 20 17:50:25 crc kubenswrapper[4558]: I0120 17:50:25.742199 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-worker-6c48cb9cdf-9688c" Jan 20 17:50:25 crc kubenswrapper[4558]: I0120 17:50:25.757086 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/769afcb7-df8b-4d95-b662-9f5032227822-config-data-custom\") pod \"barbican-worker-6c48cb9cdf-9688c\" (UID: \"769afcb7-df8b-4d95-b662-9f5032227822\") " pod="openstack-kuttl-tests/barbican-worker-6c48cb9cdf-9688c" Jan 20 17:50:25 crc kubenswrapper[4558]: I0120 17:50:25.757135 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/769afcb7-df8b-4d95-b662-9f5032227822-combined-ca-bundle\") pod \"barbican-worker-6c48cb9cdf-9688c\" (UID: \"769afcb7-df8b-4d95-b662-9f5032227822\") " pod="openstack-kuttl-tests/barbican-worker-6c48cb9cdf-9688c" Jan 20 17:50:25 crc kubenswrapper[4558]: I0120 17:50:25.757185 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zkpbk\" (UniqueName: \"kubernetes.io/projected/769afcb7-df8b-4d95-b662-9f5032227822-kube-api-access-zkpbk\") pod \"barbican-worker-6c48cb9cdf-9688c\" (UID: \"769afcb7-df8b-4d95-b662-9f5032227822\") " pod="openstack-kuttl-tests/barbican-worker-6c48cb9cdf-9688c" Jan 20 17:50:25 crc kubenswrapper[4558]: I0120 17:50:25.757329 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/769afcb7-df8b-4d95-b662-9f5032227822-config-data\") pod \"barbican-worker-6c48cb9cdf-9688c\" (UID: \"769afcb7-df8b-4d95-b662-9f5032227822\") " pod="openstack-kuttl-tests/barbican-worker-6c48cb9cdf-9688c" Jan 20 17:50:25 crc kubenswrapper[4558]: I0120 17:50:25.757399 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/769afcb7-df8b-4d95-b662-9f5032227822-logs\") pod \"barbican-worker-6c48cb9cdf-9688c\" (UID: \"769afcb7-df8b-4d95-b662-9f5032227822\") " pod="openstack-kuttl-tests/barbican-worker-6c48cb9cdf-9688c" Jan 20 17:50:25 crc kubenswrapper[4558]: I0120 17:50:25.760004 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/barbican-keystone-listener-5bb5dd7cd4-w4jnj"] Jan 20 17:50:25 crc kubenswrapper[4558]: I0120 17:50:25.761700 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-keystone-listener-5bb5dd7cd4-w4jnj" Jan 20 17:50:25 crc kubenswrapper[4558]: I0120 17:50:25.771059 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-worker-6c48cb9cdf-9688c"] Jan 20 17:50:25 crc kubenswrapper[4558]: I0120 17:50:25.802321 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:50:25 crc kubenswrapper[4558]: I0120 17:50:25.802563 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-external-api-0" podUID="f98db5df-2f7f-490b-b4a0-7e9f27b07a60" containerName="glance-log" containerID="cri-o://5f690c112c0735b8be4b3a6b80c6e2fc33c18a468c87d95a1c56b1f942029cc8" gracePeriod=30 Jan 20 17:50:25 crc kubenswrapper[4558]: I0120 17:50:25.802719 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-external-api-0" podUID="f98db5df-2f7f-490b-b4a0-7e9f27b07a60" containerName="glance-httpd" containerID="cri-o://37aadc76d1ea5916d8987f0d7d3856518a536ffe632acfd464eb225116abde18" gracePeriod=30 Jan 20 17:50:25 crc kubenswrapper[4558]: I0120 17:50:25.843260 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/openstackclient"] Jan 20 17:50:25 crc kubenswrapper[4558]: I0120 17:50:25.843432 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/openstackclient" podUID="a0ddca73-7f97-4ec0-9a04-4686f84eb2e6" containerName="openstackclient" containerID="cri-o://13f594567a5a674fbc4ad2781a948fc685f50566f533b0772fdb8535db472a29" gracePeriod=2 Jan 20 17:50:25 crc kubenswrapper[4558]: I0120 17:50:25.867528 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/769afcb7-df8b-4d95-b662-9f5032227822-config-data\") pod \"barbican-worker-6c48cb9cdf-9688c\" (UID: \"769afcb7-df8b-4d95-b662-9f5032227822\") " pod="openstack-kuttl-tests/barbican-worker-6c48cb9cdf-9688c" Jan 20 17:50:25 crc kubenswrapper[4558]: I0120 17:50:25.867612 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/769afcb7-df8b-4d95-b662-9f5032227822-logs\") pod \"barbican-worker-6c48cb9cdf-9688c\" (UID: \"769afcb7-df8b-4d95-b662-9f5032227822\") " pod="openstack-kuttl-tests/barbican-worker-6c48cb9cdf-9688c" Jan 20 17:50:25 crc kubenswrapper[4558]: I0120 17:50:25.867642 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/51c79956-4212-4d46-b3e9-e9d5e7a33c31-combined-ca-bundle\") pod \"barbican-keystone-listener-5bb5dd7cd4-w4jnj\" (UID: \"51c79956-4212-4d46-b3e9-e9d5e7a33c31\") " pod="openstack-kuttl-tests/barbican-keystone-listener-5bb5dd7cd4-w4jnj" Jan 20 17:50:25 crc kubenswrapper[4558]: I0120 17:50:25.867683 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-keystone-listener-5bb5dd7cd4-w4jnj"] Jan 20 17:50:25 crc kubenswrapper[4558]: I0120 17:50:25.867718 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/51c79956-4212-4d46-b3e9-e9d5e7a33c31-logs\") pod \"barbican-keystone-listener-5bb5dd7cd4-w4jnj\" (UID: \"51c79956-4212-4d46-b3e9-e9d5e7a33c31\") " pod="openstack-kuttl-tests/barbican-keystone-listener-5bb5dd7cd4-w4jnj" Jan 20 17:50:25 crc kubenswrapper[4558]: I0120 17:50:25.867798 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/51c79956-4212-4d46-b3e9-e9d5e7a33c31-config-data\") pod \"barbican-keystone-listener-5bb5dd7cd4-w4jnj\" (UID: \"51c79956-4212-4d46-b3e9-e9d5e7a33c31\") " pod="openstack-kuttl-tests/barbican-keystone-listener-5bb5dd7cd4-w4jnj" Jan 20 17:50:25 crc kubenswrapper[4558]: I0120 17:50:25.867876 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bkszk\" (UniqueName: \"kubernetes.io/projected/51c79956-4212-4d46-b3e9-e9d5e7a33c31-kube-api-access-bkszk\") pod \"barbican-keystone-listener-5bb5dd7cd4-w4jnj\" (UID: \"51c79956-4212-4d46-b3e9-e9d5e7a33c31\") " pod="openstack-kuttl-tests/barbican-keystone-listener-5bb5dd7cd4-w4jnj" Jan 20 17:50:25 crc kubenswrapper[4558]: I0120 17:50:25.867912 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/51c79956-4212-4d46-b3e9-e9d5e7a33c31-config-data-custom\") pod \"barbican-keystone-listener-5bb5dd7cd4-w4jnj\" (UID: \"51c79956-4212-4d46-b3e9-e9d5e7a33c31\") " pod="openstack-kuttl-tests/barbican-keystone-listener-5bb5dd7cd4-w4jnj" Jan 20 17:50:25 crc kubenswrapper[4558]: I0120 17:50:25.867964 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/769afcb7-df8b-4d95-b662-9f5032227822-config-data-custom\") pod \"barbican-worker-6c48cb9cdf-9688c\" (UID: \"769afcb7-df8b-4d95-b662-9f5032227822\") " pod="openstack-kuttl-tests/barbican-worker-6c48cb9cdf-9688c" Jan 20 17:50:25 crc kubenswrapper[4558]: I0120 17:50:25.867991 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/769afcb7-df8b-4d95-b662-9f5032227822-combined-ca-bundle\") pod \"barbican-worker-6c48cb9cdf-9688c\" (UID: \"769afcb7-df8b-4d95-b662-9f5032227822\") " pod="openstack-kuttl-tests/barbican-worker-6c48cb9cdf-9688c" Jan 20 17:50:25 crc kubenswrapper[4558]: I0120 17:50:25.868013 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zkpbk\" (UniqueName: \"kubernetes.io/projected/769afcb7-df8b-4d95-b662-9f5032227822-kube-api-access-zkpbk\") pod \"barbican-worker-6c48cb9cdf-9688c\" (UID: \"769afcb7-df8b-4d95-b662-9f5032227822\") " pod="openstack-kuttl-tests/barbican-worker-6c48cb9cdf-9688c" Jan 20 17:50:25 crc kubenswrapper[4558]: I0120 17:50:25.868111 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/769afcb7-df8b-4d95-b662-9f5032227822-logs\") pod \"barbican-worker-6c48cb9cdf-9688c\" (UID: \"769afcb7-df8b-4d95-b662-9f5032227822\") " pod="openstack-kuttl-tests/barbican-worker-6c48cb9cdf-9688c" Jan 20 17:50:25 crc kubenswrapper[4558]: I0120 17:50:25.875239 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/769afcb7-df8b-4d95-b662-9f5032227822-config-data\") pod \"barbican-worker-6c48cb9cdf-9688c\" (UID: \"769afcb7-df8b-4d95-b662-9f5032227822\") " pod="openstack-kuttl-tests/barbican-worker-6c48cb9cdf-9688c" Jan 20 17:50:25 crc kubenswrapper[4558]: I0120 17:50:25.884866 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/769afcb7-df8b-4d95-b662-9f5032227822-config-data-custom\") pod \"barbican-worker-6c48cb9cdf-9688c\" (UID: \"769afcb7-df8b-4d95-b662-9f5032227822\") " pod="openstack-kuttl-tests/barbican-worker-6c48cb9cdf-9688c" Jan 20 17:50:25 crc kubenswrapper[4558]: I0120 17:50:25.897400 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/769afcb7-df8b-4d95-b662-9f5032227822-combined-ca-bundle\") pod \"barbican-worker-6c48cb9cdf-9688c\" (UID: \"769afcb7-df8b-4d95-b662-9f5032227822\") " pod="openstack-kuttl-tests/barbican-worker-6c48cb9cdf-9688c" Jan 20 17:50:25 crc kubenswrapper[4558]: I0120 17:50:25.932667 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zkpbk\" (UniqueName: \"kubernetes.io/projected/769afcb7-df8b-4d95-b662-9f5032227822-kube-api-access-zkpbk\") pod \"barbican-worker-6c48cb9cdf-9688c\" (UID: \"769afcb7-df8b-4d95-b662-9f5032227822\") " pod="openstack-kuttl-tests/barbican-worker-6c48cb9cdf-9688c" Jan 20 17:50:25 crc kubenswrapper[4558]: I0120 17:50:25.940431 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/memcached-0"] Jan 20 17:50:25 crc kubenswrapper[4558]: I0120 17:50:25.940692 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/memcached-0" podUID="7c3fe526-7107-4cc4-aca2-eb809f317c5c" containerName="memcached" containerID="cri-o://c8f37f1ccc0f6204e6d44935b746f6c6ede31b796dd74e70992f82e8f5ab72db" gracePeriod=30 Jan 20 17:50:25 crc kubenswrapper[4558]: I0120 17:50:25.971460 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/51c79956-4212-4d46-b3e9-e9d5e7a33c31-combined-ca-bundle\") pod \"barbican-keystone-listener-5bb5dd7cd4-w4jnj\" (UID: \"51c79956-4212-4d46-b3e9-e9d5e7a33c31\") " pod="openstack-kuttl-tests/barbican-keystone-listener-5bb5dd7cd4-w4jnj" Jan 20 17:50:25 crc kubenswrapper[4558]: I0120 17:50:25.971544 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/51c79956-4212-4d46-b3e9-e9d5e7a33c31-logs\") pod \"barbican-keystone-listener-5bb5dd7cd4-w4jnj\" (UID: \"51c79956-4212-4d46-b3e9-e9d5e7a33c31\") " pod="openstack-kuttl-tests/barbican-keystone-listener-5bb5dd7cd4-w4jnj" Jan 20 17:50:25 crc kubenswrapper[4558]: I0120 17:50:25.971609 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/51c79956-4212-4d46-b3e9-e9d5e7a33c31-config-data\") pod \"barbican-keystone-listener-5bb5dd7cd4-w4jnj\" (UID: \"51c79956-4212-4d46-b3e9-e9d5e7a33c31\") " pod="openstack-kuttl-tests/barbican-keystone-listener-5bb5dd7cd4-w4jnj" Jan 20 17:50:25 crc kubenswrapper[4558]: I0120 17:50:25.971672 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bkszk\" (UniqueName: \"kubernetes.io/projected/51c79956-4212-4d46-b3e9-e9d5e7a33c31-kube-api-access-bkszk\") pod \"barbican-keystone-listener-5bb5dd7cd4-w4jnj\" (UID: \"51c79956-4212-4d46-b3e9-e9d5e7a33c31\") " pod="openstack-kuttl-tests/barbican-keystone-listener-5bb5dd7cd4-w4jnj" Jan 20 17:50:25 crc kubenswrapper[4558]: I0120 17:50:25.971701 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/51c79956-4212-4d46-b3e9-e9d5e7a33c31-config-data-custom\") pod \"barbican-keystone-listener-5bb5dd7cd4-w4jnj\" (UID: \"51c79956-4212-4d46-b3e9-e9d5e7a33c31\") " pod="openstack-kuttl-tests/barbican-keystone-listener-5bb5dd7cd4-w4jnj" Jan 20 17:50:25 crc kubenswrapper[4558]: I0120 17:50:25.972411 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/51c79956-4212-4d46-b3e9-e9d5e7a33c31-logs\") pod \"barbican-keystone-listener-5bb5dd7cd4-w4jnj\" (UID: \"51c79956-4212-4d46-b3e9-e9d5e7a33c31\") " pod="openstack-kuttl-tests/barbican-keystone-listener-5bb5dd7cd4-w4jnj" Jan 20 17:50:25 crc kubenswrapper[4558]: I0120 17:50:25.984709 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/51c79956-4212-4d46-b3e9-e9d5e7a33c31-combined-ca-bundle\") pod \"barbican-keystone-listener-5bb5dd7cd4-w4jnj\" (UID: \"51c79956-4212-4d46-b3e9-e9d5e7a33c31\") " pod="openstack-kuttl-tests/barbican-keystone-listener-5bb5dd7cd4-w4jnj" Jan 20 17:50:25 crc kubenswrapper[4558]: I0120 17:50:25.985954 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/51c79956-4212-4d46-b3e9-e9d5e7a33c31-config-data-custom\") pod \"barbican-keystone-listener-5bb5dd7cd4-w4jnj\" (UID: \"51c79956-4212-4d46-b3e9-e9d5e7a33c31\") " pod="openstack-kuttl-tests/barbican-keystone-listener-5bb5dd7cd4-w4jnj" Jan 20 17:50:25 crc kubenswrapper[4558]: I0120 17:50:25.996236 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/openstackclient"] Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.000041 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/51c79956-4212-4d46-b3e9-e9d5e7a33c31-config-data\") pod \"barbican-keystone-listener-5bb5dd7cd4-w4jnj\" (UID: \"51c79956-4212-4d46-b3e9-e9d5e7a33c31\") " pod="openstack-kuttl-tests/barbican-keystone-listener-5bb5dd7cd4-w4jnj" Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.070140 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-0"] Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.070342 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-worker-6c48cb9cdf-9688c" Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.070395 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-cell0-conductor-0" podUID="3c44870c-4ab7-436a-9862-d6a2a4487bed" containerName="nova-cell0-conductor-conductor" containerID="cri-o://7ab0ab6375e86fd4837f32d21526ca8433f782d8e6f806a0848eab5773b75fbd" gracePeriod=30 Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.078999 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ovsdbserver-nb-0" podUID="3f090098-7927-4f68-b49d-4cea135a041c" containerName="ovsdbserver-nb" containerID="cri-o://fa3e63d15818b7accc78e0355d0414299134969d91a4885ca57ff950ae494f7e" gracePeriod=300 Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.086634 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bkszk\" (UniqueName: \"kubernetes.io/projected/51c79956-4212-4d46-b3e9-e9d5e7a33c31-kube-api-access-bkszk\") pod \"barbican-keystone-listener-5bb5dd7cd4-w4jnj\" (UID: \"51c79956-4212-4d46-b3e9-e9d5e7a33c31\") " pod="openstack-kuttl-tests/barbican-keystone-listener-5bb5dd7cd4-w4jnj" Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.114196 4558 generic.go:334] "Generic (PLEG): container finished" podID="28febf86-c729-498e-b3af-0321468d80f3" containerID="8e218874514f86011edef030a16090a4d678fd4bb3ef20d8a141af1af291d2c9" exitCode=143 Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.114268 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"28febf86-c729-498e-b3af-0321468d80f3","Type":"ContainerDied","Data":"8e218874514f86011edef030a16090a4d678fd4bb3ef20d8a141af1af291d2c9"} Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.116121 4558 generic.go:334] "Generic (PLEG): container finished" podID="f98db5df-2f7f-490b-b4a0-7e9f27b07a60" containerID="5f690c112c0735b8be4b3a6b80c6e2fc33c18a468c87d95a1c56b1f942029cc8" exitCode=143 Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.116150 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"f98db5df-2f7f-490b-b4a0-7e9f27b07a60","Type":"ContainerDied","Data":"5f690c112c0735b8be4b3a6b80c6e2fc33c18a468c87d95a1c56b1f942029cc8"} Jan 20 17:50:26 crc kubenswrapper[4558]: E0120 17:50:26.305199 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="9b54f5f4a254eb9083539aa947157b34dfeea7934f5c2b37060ecf2e8e7a304a" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Jan 20 17:50:26 crc kubenswrapper[4558]: E0120 17:50:26.310607 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="9b54f5f4a254eb9083539aa947157b34dfeea7934f5c2b37060ecf2e8e7a304a" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.311655 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.311922 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-metadata-0" podUID="7d2f1782-aa1c-4382-b682-27ce8f37d139" containerName="nova-metadata-log" containerID="cri-o://c66c2bcafd61422cbe12474d972cdfbe2b8064d7acd5baa2defb02dbaf1a697b" gracePeriod=30 Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.312658 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-metadata-0" podUID="7d2f1782-aa1c-4382-b682-27ce8f37d139" containerName="nova-metadata-metadata" containerID="cri-o://fdc46115e0c01656230d526da5789da8cef6a6ad4afd973eaf326c530afd94ab" gracePeriod=30 Jan 20 17:50:26 crc kubenswrapper[4558]: E0120 17:50:26.360538 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="9b54f5f4a254eb9083539aa947157b34dfeea7934f5c2b37060ecf2e8e7a304a" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Jan 20 17:50:26 crc kubenswrapper[4558]: E0120 17:50:26.360665 4558 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack-kuttl-tests/ovn-northd-0" podUID="6dae3a04-db9e-48a2-bbe2-012c9ba55890" containerName="ovn-northd" Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.389207 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.389412 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/cinder-scheduler-0" podUID="d0f82b75-bed6-4779-ad55-6bec50d7faa6" containerName="cinder-scheduler" containerID="cri-o://a612c466a2d2fa8c608454134a7b85c4368226b9c8b498cce072561130074559" gracePeriod=30 Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.389662 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/cinder-scheduler-0" podUID="d0f82b75-bed6-4779-ad55-6bec50d7faa6" containerName="probe" containerID="cri-o://7fb1cfce15f7b21b933180ce2945a9ba95c867c596b50488373fe0546cb8f714" gracePeriod=30 Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.389845 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-keystone-listener-5bb5dd7cd4-w4jnj" Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.412683 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.412929 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-scheduler-0" podUID="b13b98ea-20eb-452f-9316-a7acdeb18406" containerName="nova-scheduler-scheduler" containerID="cri-o://bccb9b380d5d7d133e1f9deb550275ef3efade0d9df8c3d33db8b87c1a6fd80b" gracePeriod=30 Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.422628 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.422967 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/cinder-api-0" podUID="47de199d-0dde-4082-8b6c-99f3d202608b" containerName="cinder-api-log" containerID="cri-o://b497e638b4894f1c81e1773af34e888736bf132c67bc13eb676c4be84dea7235" gracePeriod=30 Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.423152 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/cinder-api-0" podUID="47de199d-0dde-4082-8b6c-99f3d202608b" containerName="cinder-api" containerID="cri-o://1644d78283b3c3dad2b68cd9a4d16b7b8c04de0ba6bfcedb6b4c04ac6f6297de" gracePeriod=30 Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.438712 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/placement-9cbb9c58b-jcwzc"] Jan 20 17:50:26 crc kubenswrapper[4558]: E0120 17:50:26.439197 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a0ddca73-7f97-4ec0-9a04-4686f84eb2e6" containerName="openstackclient" Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.439216 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="a0ddca73-7f97-4ec0-9a04-4686f84eb2e6" containerName="openstackclient" Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.439702 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="a0ddca73-7f97-4ec0-9a04-4686f84eb2e6" containerName="openstackclient" Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.440848 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-9cbb9c58b-jcwzc" Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.470724 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.471011 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-internal-api-0" podUID="d0e7ef26-ab3e-4c3d-b8b2-6b08007022fd" containerName="glance-log" containerID="cri-o://212a4fd973edb30677b4248e267dd6b57d185e117261fb43b7e48e9267a561a9" gracePeriod=30 Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.471080 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-internal-api-0" podUID="d0e7ef26-ab3e-4c3d-b8b2-6b08007022fd" containerName="glance-httpd" containerID="cri-o://3216cb4a892da431a3d001352506b8205d9cde6cf6808df51a45283abdf32abc" gracePeriod=30 Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.492250 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-9cbb9c58b-jcwzc"] Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.509330 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/38ce22a3-0b53-417f-ab78-77a3be2da04f-config-data\") pod \"placement-9cbb9c58b-jcwzc\" (UID: \"38ce22a3-0b53-417f-ab78-77a3be2da04f\") " pod="openstack-kuttl-tests/placement-9cbb9c58b-jcwzc" Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.509390 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/38ce22a3-0b53-417f-ab78-77a3be2da04f-scripts\") pod \"placement-9cbb9c58b-jcwzc\" (UID: \"38ce22a3-0b53-417f-ab78-77a3be2da04f\") " pod="openstack-kuttl-tests/placement-9cbb9c58b-jcwzc" Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.509421 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ztk6g\" (UniqueName: \"kubernetes.io/projected/38ce22a3-0b53-417f-ab78-77a3be2da04f-kube-api-access-ztk6g\") pod \"placement-9cbb9c58b-jcwzc\" (UID: \"38ce22a3-0b53-417f-ab78-77a3be2da04f\") " pod="openstack-kuttl-tests/placement-9cbb9c58b-jcwzc" Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.509450 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/38ce22a3-0b53-417f-ab78-77a3be2da04f-public-tls-certs\") pod \"placement-9cbb9c58b-jcwzc\" (UID: \"38ce22a3-0b53-417f-ab78-77a3be2da04f\") " pod="openstack-kuttl-tests/placement-9cbb9c58b-jcwzc" Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.509483 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/38ce22a3-0b53-417f-ab78-77a3be2da04f-combined-ca-bundle\") pod \"placement-9cbb9c58b-jcwzc\" (UID: \"38ce22a3-0b53-417f-ab78-77a3be2da04f\") " pod="openstack-kuttl-tests/placement-9cbb9c58b-jcwzc" Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.509635 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/38ce22a3-0b53-417f-ab78-77a3be2da04f-internal-tls-certs\") pod \"placement-9cbb9c58b-jcwzc\" (UID: \"38ce22a3-0b53-417f-ab78-77a3be2da04f\") " pod="openstack-kuttl-tests/placement-9cbb9c58b-jcwzc" Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.509710 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/38ce22a3-0b53-417f-ab78-77a3be2da04f-logs\") pod \"placement-9cbb9c58b-jcwzc\" (UID: \"38ce22a3-0b53-417f-ab78-77a3be2da04f\") " pod="openstack-kuttl-tests/placement-9cbb9c58b-jcwzc" Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.510585 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/openstack-galera-0"] Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.530863 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.530912 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/barbican-api-5968cb86f6-ctfg5"] Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.532571 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-5968cb86f6-ctfg5" Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.533115 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" podUID="6254cefa-8c4a-472e-9faf-4633c6d1618e" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://5c8b0e075d6b80a58c44316d67ccf05a22f59a72ae3bca7872921f2e03c311bf" gracePeriod=30 Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.556564 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/keystone-9795586d8-4dv95"] Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.561657 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-9795586d8-4dv95" Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.588604 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/neutron-7b9876b66d-znq49"] Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.590327 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-7b9876b66d-znq49" Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.600373 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-api-5968cb86f6-ctfg5"] Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.615731 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/38ce22a3-0b53-417f-ab78-77a3be2da04f-public-tls-certs\") pod \"placement-9cbb9c58b-jcwzc\" (UID: \"38ce22a3-0b53-417f-ab78-77a3be2da04f\") " pod="openstack-kuttl-tests/placement-9cbb9c58b-jcwzc" Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.615775 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/7458929c-bc4e-4f17-b199-4963b298f4e8-config\") pod \"neutron-7b9876b66d-znq49\" (UID: \"7458929c-bc4e-4f17-b199-4963b298f4e8\") " pod="openstack-kuttl-tests/neutron-7b9876b66d-znq49" Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.615825 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/38ce22a3-0b53-417f-ab78-77a3be2da04f-combined-ca-bundle\") pod \"placement-9cbb9c58b-jcwzc\" (UID: \"38ce22a3-0b53-417f-ab78-77a3be2da04f\") " pod="openstack-kuttl-tests/placement-9cbb9c58b-jcwzc" Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.615842 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7458929c-bc4e-4f17-b199-4963b298f4e8-public-tls-certs\") pod \"neutron-7b9876b66d-znq49\" (UID: \"7458929c-bc4e-4f17-b199-4963b298f4e8\") " pod="openstack-kuttl-tests/neutron-7b9876b66d-znq49" Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.615863 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/7458929c-bc4e-4f17-b199-4963b298f4e8-ovndb-tls-certs\") pod \"neutron-7b9876b66d-znq49\" (UID: \"7458929c-bc4e-4f17-b199-4963b298f4e8\") " pod="openstack-kuttl-tests/neutron-7b9876b66d-znq49" Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.615942 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7458929c-bc4e-4f17-b199-4963b298f4e8-internal-tls-certs\") pod \"neutron-7b9876b66d-znq49\" (UID: \"7458929c-bc4e-4f17-b199-4963b298f4e8\") " pod="openstack-kuttl-tests/neutron-7b9876b66d-znq49" Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.615972 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d127fb40-6eff-4ccd-922c-b209ac4edbda-logs\") pod \"barbican-api-5968cb86f6-ctfg5\" (UID: \"d127fb40-6eff-4ccd-922c-b209ac4edbda\") " pod="openstack-kuttl-tests/barbican-api-5968cb86f6-ctfg5" Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.615999 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7458929c-bc4e-4f17-b199-4963b298f4e8-combined-ca-bundle\") pod \"neutron-7b9876b66d-znq49\" (UID: \"7458929c-bc4e-4f17-b199-4963b298f4e8\") " pod="openstack-kuttl-tests/neutron-7b9876b66d-znq49" Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.616020 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d127fb40-6eff-4ccd-922c-b209ac4edbda-config-data-custom\") pod \"barbican-api-5968cb86f6-ctfg5\" (UID: \"d127fb40-6eff-4ccd-922c-b209ac4edbda\") " pod="openstack-kuttl-tests/barbican-api-5968cb86f6-ctfg5" Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.616047 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/0a3695d2-9733-430d-a489-694e9e97b586-credential-keys\") pod \"keystone-9795586d8-4dv95\" (UID: \"0a3695d2-9733-430d-a489-694e9e97b586\") " pod="openstack-kuttl-tests/keystone-9795586d8-4dv95" Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.616065 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d127fb40-6eff-4ccd-922c-b209ac4edbda-internal-tls-certs\") pod \"barbican-api-5968cb86f6-ctfg5\" (UID: \"d127fb40-6eff-4ccd-922c-b209ac4edbda\") " pod="openstack-kuttl-tests/barbican-api-5968cb86f6-ctfg5" Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.616093 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d127fb40-6eff-4ccd-922c-b209ac4edbda-combined-ca-bundle\") pod \"barbican-api-5968cb86f6-ctfg5\" (UID: \"d127fb40-6eff-4ccd-922c-b209ac4edbda\") " pod="openstack-kuttl-tests/barbican-api-5968cb86f6-ctfg5" Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.616113 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0a3695d2-9733-430d-a489-694e9e97b586-combined-ca-bundle\") pod \"keystone-9795586d8-4dv95\" (UID: \"0a3695d2-9733-430d-a489-694e9e97b586\") " pod="openstack-kuttl-tests/keystone-9795586d8-4dv95" Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.616138 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kbnl5\" (UniqueName: \"kubernetes.io/projected/d127fb40-6eff-4ccd-922c-b209ac4edbda-kube-api-access-kbnl5\") pod \"barbican-api-5968cb86f6-ctfg5\" (UID: \"d127fb40-6eff-4ccd-922c-b209ac4edbda\") " pod="openstack-kuttl-tests/barbican-api-5968cb86f6-ctfg5" Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.616156 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0a3695d2-9733-430d-a489-694e9e97b586-config-data\") pod \"keystone-9795586d8-4dv95\" (UID: \"0a3695d2-9733-430d-a489-694e9e97b586\") " pod="openstack-kuttl-tests/keystone-9795586d8-4dv95" Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.616231 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/38ce22a3-0b53-417f-ab78-77a3be2da04f-internal-tls-certs\") pod \"placement-9cbb9c58b-jcwzc\" (UID: \"38ce22a3-0b53-417f-ab78-77a3be2da04f\") " pod="openstack-kuttl-tests/placement-9cbb9c58b-jcwzc" Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.616275 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/7458929c-bc4e-4f17-b199-4963b298f4e8-httpd-config\") pod \"neutron-7b9876b66d-znq49\" (UID: \"7458929c-bc4e-4f17-b199-4963b298f4e8\") " pod="openstack-kuttl-tests/neutron-7b9876b66d-znq49" Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.616302 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x78ng\" (UniqueName: \"kubernetes.io/projected/7458929c-bc4e-4f17-b199-4963b298f4e8-kube-api-access-x78ng\") pod \"neutron-7b9876b66d-znq49\" (UID: \"7458929c-bc4e-4f17-b199-4963b298f4e8\") " pod="openstack-kuttl-tests/neutron-7b9876b66d-znq49" Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.616368 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/38ce22a3-0b53-417f-ab78-77a3be2da04f-logs\") pod \"placement-9cbb9c58b-jcwzc\" (UID: \"38ce22a3-0b53-417f-ab78-77a3be2da04f\") " pod="openstack-kuttl-tests/placement-9cbb9c58b-jcwzc" Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.616398 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jspp2\" (UniqueName: \"kubernetes.io/projected/0a3695d2-9733-430d-a489-694e9e97b586-kube-api-access-jspp2\") pod \"keystone-9795586d8-4dv95\" (UID: \"0a3695d2-9733-430d-a489-694e9e97b586\") " pod="openstack-kuttl-tests/keystone-9795586d8-4dv95" Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.616423 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/38ce22a3-0b53-417f-ab78-77a3be2da04f-config-data\") pod \"placement-9cbb9c58b-jcwzc\" (UID: \"38ce22a3-0b53-417f-ab78-77a3be2da04f\") " pod="openstack-kuttl-tests/placement-9cbb9c58b-jcwzc" Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.616436 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0a3695d2-9733-430d-a489-694e9e97b586-public-tls-certs\") pod \"keystone-9795586d8-4dv95\" (UID: \"0a3695d2-9733-430d-a489-694e9e97b586\") " pod="openstack-kuttl-tests/keystone-9795586d8-4dv95" Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.616456 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d127fb40-6eff-4ccd-922c-b209ac4edbda-public-tls-certs\") pod \"barbican-api-5968cb86f6-ctfg5\" (UID: \"d127fb40-6eff-4ccd-922c-b209ac4edbda\") " pod="openstack-kuttl-tests/barbican-api-5968cb86f6-ctfg5" Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.616475 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0a3695d2-9733-430d-a489-694e9e97b586-scripts\") pod \"keystone-9795586d8-4dv95\" (UID: \"0a3695d2-9733-430d-a489-694e9e97b586\") " pod="openstack-kuttl-tests/keystone-9795586d8-4dv95" Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.616492 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d127fb40-6eff-4ccd-922c-b209ac4edbda-config-data\") pod \"barbican-api-5968cb86f6-ctfg5\" (UID: \"d127fb40-6eff-4ccd-922c-b209ac4edbda\") " pod="openstack-kuttl-tests/barbican-api-5968cb86f6-ctfg5" Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.616515 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/0a3695d2-9733-430d-a489-694e9e97b586-fernet-keys\") pod \"keystone-9795586d8-4dv95\" (UID: \"0a3695d2-9733-430d-a489-694e9e97b586\") " pod="openstack-kuttl-tests/keystone-9795586d8-4dv95" Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.616534 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0a3695d2-9733-430d-a489-694e9e97b586-internal-tls-certs\") pod \"keystone-9795586d8-4dv95\" (UID: \"0a3695d2-9733-430d-a489-694e9e97b586\") " pod="openstack-kuttl-tests/keystone-9795586d8-4dv95" Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.616552 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/38ce22a3-0b53-417f-ab78-77a3be2da04f-scripts\") pod \"placement-9cbb9c58b-jcwzc\" (UID: \"38ce22a3-0b53-417f-ab78-77a3be2da04f\") " pod="openstack-kuttl-tests/placement-9cbb9c58b-jcwzc" Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.616583 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ztk6g\" (UniqueName: \"kubernetes.io/projected/38ce22a3-0b53-417f-ab78-77a3be2da04f-kube-api-access-ztk6g\") pod \"placement-9cbb9c58b-jcwzc\" (UID: \"38ce22a3-0b53-417f-ab78-77a3be2da04f\") " pod="openstack-kuttl-tests/placement-9cbb9c58b-jcwzc" Jan 20 17:50:26 crc kubenswrapper[4558]: E0120 17:50:26.617264 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-placement-public-svc: secret "cert-placement-public-svc" not found Jan 20 17:50:26 crc kubenswrapper[4558]: E0120 17:50:26.617314 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/38ce22a3-0b53-417f-ab78-77a3be2da04f-public-tls-certs podName:38ce22a3-0b53-417f-ab78-77a3be2da04f nodeName:}" failed. No retries permitted until 2026-01-20 17:50:27.117298092 +0000 UTC m=+4120.877636059 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "public-tls-certs" (UniqueName: "kubernetes.io/secret/38ce22a3-0b53-417f-ab78-77a3be2da04f-public-tls-certs") pod "placement-9cbb9c58b-jcwzc" (UID: "38ce22a3-0b53-417f-ab78-77a3be2da04f") : secret "cert-placement-public-svc" not found Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.619341 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-7b9876b66d-znq49"] Jan 20 17:50:26 crc kubenswrapper[4558]: E0120 17:50:26.620138 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-placement-internal-svc: secret "cert-placement-internal-svc" not found Jan 20 17:50:26 crc kubenswrapper[4558]: E0120 17:50:26.620206 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/38ce22a3-0b53-417f-ab78-77a3be2da04f-internal-tls-certs podName:38ce22a3-0b53-417f-ab78-77a3be2da04f nodeName:}" failed. No retries permitted until 2026-01-20 17:50:27.120188938 +0000 UTC m=+4120.880526905 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "internal-tls-certs" (UniqueName: "kubernetes.io/secret/38ce22a3-0b53-417f-ab78-77a3be2da04f-internal-tls-certs") pod "placement-9cbb9c58b-jcwzc" (UID: "38ce22a3-0b53-417f-ab78-77a3be2da04f") : secret "cert-placement-internal-svc" not found Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.620949 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/38ce22a3-0b53-417f-ab78-77a3be2da04f-logs\") pod \"placement-9cbb9c58b-jcwzc\" (UID: \"38ce22a3-0b53-417f-ab78-77a3be2da04f\") " pod="openstack-kuttl-tests/placement-9cbb9c58b-jcwzc" Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.624796 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/38ce22a3-0b53-417f-ab78-77a3be2da04f-combined-ca-bundle\") pod \"placement-9cbb9c58b-jcwzc\" (UID: \"38ce22a3-0b53-417f-ab78-77a3be2da04f\") " pod="openstack-kuttl-tests/placement-9cbb9c58b-jcwzc" Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.632598 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/38ce22a3-0b53-417f-ab78-77a3be2da04f-config-data\") pod \"placement-9cbb9c58b-jcwzc\" (UID: \"38ce22a3-0b53-417f-ab78-77a3be2da04f\") " pod="openstack-kuttl-tests/placement-9cbb9c58b-jcwzc" Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.632684 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/38ce22a3-0b53-417f-ab78-77a3be2da04f-scripts\") pod \"placement-9cbb9c58b-jcwzc\" (UID: \"38ce22a3-0b53-417f-ab78-77a3be2da04f\") " pod="openstack-kuttl-tests/placement-9cbb9c58b-jcwzc" Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.638624 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ztk6g\" (UniqueName: \"kubernetes.io/projected/38ce22a3-0b53-417f-ab78-77a3be2da04f-kube-api-access-ztk6g\") pod \"placement-9cbb9c58b-jcwzc\" (UID: \"38ce22a3-0b53-417f-ab78-77a3be2da04f\") " pod="openstack-kuttl-tests/placement-9cbb9c58b-jcwzc" Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.641130 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-9795586d8-4dv95"] Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.657881 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-sb-0"] Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.658687 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ovsdbserver-sb-0" podUID="9330efb9-cbc2-4c4a-9928-eedf93324d57" containerName="openstack-network-exporter" containerID="cri-o://7f96a30bdd54fd7b066425edfff74600fb47af1d01a01f1f5b5a15ce2844ce91" gracePeriod=300 Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.705787 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/openstack-galera-0" podUID="c43f6123-71d1-4cf0-b919-00b1e9836c8b" containerName="galera" containerID="cri-o://df22f4721c8d074f5b610fec573fca752b358557510a94a96c91992dbb2d68ef" gracePeriod=30 Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.718680 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jspp2\" (UniqueName: \"kubernetes.io/projected/0a3695d2-9733-430d-a489-694e9e97b586-kube-api-access-jspp2\") pod \"keystone-9795586d8-4dv95\" (UID: \"0a3695d2-9733-430d-a489-694e9e97b586\") " pod="openstack-kuttl-tests/keystone-9795586d8-4dv95" Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.718740 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0a3695d2-9733-430d-a489-694e9e97b586-public-tls-certs\") pod \"keystone-9795586d8-4dv95\" (UID: \"0a3695d2-9733-430d-a489-694e9e97b586\") " pod="openstack-kuttl-tests/keystone-9795586d8-4dv95" Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.718766 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d127fb40-6eff-4ccd-922c-b209ac4edbda-public-tls-certs\") pod \"barbican-api-5968cb86f6-ctfg5\" (UID: \"d127fb40-6eff-4ccd-922c-b209ac4edbda\") " pod="openstack-kuttl-tests/barbican-api-5968cb86f6-ctfg5" Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.718792 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0a3695d2-9733-430d-a489-694e9e97b586-scripts\") pod \"keystone-9795586d8-4dv95\" (UID: \"0a3695d2-9733-430d-a489-694e9e97b586\") " pod="openstack-kuttl-tests/keystone-9795586d8-4dv95" Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.718816 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d127fb40-6eff-4ccd-922c-b209ac4edbda-config-data\") pod \"barbican-api-5968cb86f6-ctfg5\" (UID: \"d127fb40-6eff-4ccd-922c-b209ac4edbda\") " pod="openstack-kuttl-tests/barbican-api-5968cb86f6-ctfg5" Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.718841 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/0a3695d2-9733-430d-a489-694e9e97b586-fernet-keys\") pod \"keystone-9795586d8-4dv95\" (UID: \"0a3695d2-9733-430d-a489-694e9e97b586\") " pod="openstack-kuttl-tests/keystone-9795586d8-4dv95" Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.718864 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0a3695d2-9733-430d-a489-694e9e97b586-internal-tls-certs\") pod \"keystone-9795586d8-4dv95\" (UID: \"0a3695d2-9733-430d-a489-694e9e97b586\") " pod="openstack-kuttl-tests/keystone-9795586d8-4dv95" Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.718932 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/7458929c-bc4e-4f17-b199-4963b298f4e8-config\") pod \"neutron-7b9876b66d-znq49\" (UID: \"7458929c-bc4e-4f17-b199-4963b298f4e8\") " pod="openstack-kuttl-tests/neutron-7b9876b66d-znq49" Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.718991 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7458929c-bc4e-4f17-b199-4963b298f4e8-public-tls-certs\") pod \"neutron-7b9876b66d-znq49\" (UID: \"7458929c-bc4e-4f17-b199-4963b298f4e8\") " pod="openstack-kuttl-tests/neutron-7b9876b66d-znq49" Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.719020 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/7458929c-bc4e-4f17-b199-4963b298f4e8-ovndb-tls-certs\") pod \"neutron-7b9876b66d-znq49\" (UID: \"7458929c-bc4e-4f17-b199-4963b298f4e8\") " pod="openstack-kuttl-tests/neutron-7b9876b66d-znq49" Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.719109 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7458929c-bc4e-4f17-b199-4963b298f4e8-internal-tls-certs\") pod \"neutron-7b9876b66d-znq49\" (UID: \"7458929c-bc4e-4f17-b199-4963b298f4e8\") " pod="openstack-kuttl-tests/neutron-7b9876b66d-znq49" Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.719143 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d127fb40-6eff-4ccd-922c-b209ac4edbda-logs\") pod \"barbican-api-5968cb86f6-ctfg5\" (UID: \"d127fb40-6eff-4ccd-922c-b209ac4edbda\") " pod="openstack-kuttl-tests/barbican-api-5968cb86f6-ctfg5" Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.719185 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7458929c-bc4e-4f17-b199-4963b298f4e8-combined-ca-bundle\") pod \"neutron-7b9876b66d-znq49\" (UID: \"7458929c-bc4e-4f17-b199-4963b298f4e8\") " pod="openstack-kuttl-tests/neutron-7b9876b66d-znq49" Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.719209 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d127fb40-6eff-4ccd-922c-b209ac4edbda-config-data-custom\") pod \"barbican-api-5968cb86f6-ctfg5\" (UID: \"d127fb40-6eff-4ccd-922c-b209ac4edbda\") " pod="openstack-kuttl-tests/barbican-api-5968cb86f6-ctfg5" Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.719240 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/0a3695d2-9733-430d-a489-694e9e97b586-credential-keys\") pod \"keystone-9795586d8-4dv95\" (UID: \"0a3695d2-9733-430d-a489-694e9e97b586\") " pod="openstack-kuttl-tests/keystone-9795586d8-4dv95" Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.719258 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d127fb40-6eff-4ccd-922c-b209ac4edbda-internal-tls-certs\") pod \"barbican-api-5968cb86f6-ctfg5\" (UID: \"d127fb40-6eff-4ccd-922c-b209ac4edbda\") " pod="openstack-kuttl-tests/barbican-api-5968cb86f6-ctfg5" Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.719279 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d127fb40-6eff-4ccd-922c-b209ac4edbda-combined-ca-bundle\") pod \"barbican-api-5968cb86f6-ctfg5\" (UID: \"d127fb40-6eff-4ccd-922c-b209ac4edbda\") " pod="openstack-kuttl-tests/barbican-api-5968cb86f6-ctfg5" Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.719303 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0a3695d2-9733-430d-a489-694e9e97b586-combined-ca-bundle\") pod \"keystone-9795586d8-4dv95\" (UID: \"0a3695d2-9733-430d-a489-694e9e97b586\") " pod="openstack-kuttl-tests/keystone-9795586d8-4dv95" Jan 20 17:50:26 crc kubenswrapper[4558]: E0120 17:50:26.719306 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-neutron-ovndbs: secret "cert-neutron-ovndbs" not found Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.719330 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kbnl5\" (UniqueName: \"kubernetes.io/projected/d127fb40-6eff-4ccd-922c-b209ac4edbda-kube-api-access-kbnl5\") pod \"barbican-api-5968cb86f6-ctfg5\" (UID: \"d127fb40-6eff-4ccd-922c-b209ac4edbda\") " pod="openstack-kuttl-tests/barbican-api-5968cb86f6-ctfg5" Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.719352 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0a3695d2-9733-430d-a489-694e9e97b586-config-data\") pod \"keystone-9795586d8-4dv95\" (UID: \"0a3695d2-9733-430d-a489-694e9e97b586\") " pod="openstack-kuttl-tests/keystone-9795586d8-4dv95" Jan 20 17:50:26 crc kubenswrapper[4558]: E0120 17:50:26.719371 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7458929c-bc4e-4f17-b199-4963b298f4e8-ovndb-tls-certs podName:7458929c-bc4e-4f17-b199-4963b298f4e8 nodeName:}" failed. No retries permitted until 2026-01-20 17:50:27.219351601 +0000 UTC m=+4120.979689558 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "ovndb-tls-certs" (UniqueName: "kubernetes.io/secret/7458929c-bc4e-4f17-b199-4963b298f4e8-ovndb-tls-certs") pod "neutron-7b9876b66d-znq49" (UID: "7458929c-bc4e-4f17-b199-4963b298f4e8") : secret "cert-neutron-ovndbs" not found Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.719446 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/7458929c-bc4e-4f17-b199-4963b298f4e8-httpd-config\") pod \"neutron-7b9876b66d-znq49\" (UID: \"7458929c-bc4e-4f17-b199-4963b298f4e8\") " pod="openstack-kuttl-tests/neutron-7b9876b66d-znq49" Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.719477 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x78ng\" (UniqueName: \"kubernetes.io/projected/7458929c-bc4e-4f17-b199-4963b298f4e8-kube-api-access-x78ng\") pod \"neutron-7b9876b66d-znq49\" (UID: \"7458929c-bc4e-4f17-b199-4963b298f4e8\") " pod="openstack-kuttl-tests/neutron-7b9876b66d-znq49" Jan 20 17:50:26 crc kubenswrapper[4558]: E0120 17:50:26.719613 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-keystone-internal-svc: secret "cert-keystone-internal-svc" not found Jan 20 17:50:26 crc kubenswrapper[4558]: E0120 17:50:26.719668 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0a3695d2-9733-430d-a489-694e9e97b586-internal-tls-certs podName:0a3695d2-9733-430d-a489-694e9e97b586 nodeName:}" failed. No retries permitted until 2026-01-20 17:50:27.219639793 +0000 UTC m=+4120.979977760 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "internal-tls-certs" (UniqueName: "kubernetes.io/secret/0a3695d2-9733-430d-a489-694e9e97b586-internal-tls-certs") pod "keystone-9795586d8-4dv95" (UID: "0a3695d2-9733-430d-a489-694e9e97b586") : secret "cert-keystone-internal-svc" not found Jan 20 17:50:26 crc kubenswrapper[4558]: E0120 17:50:26.719900 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-neutron-internal-svc: secret "cert-neutron-internal-svc" not found Jan 20 17:50:26 crc kubenswrapper[4558]: E0120 17:50:26.719945 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7458929c-bc4e-4f17-b199-4963b298f4e8-internal-tls-certs podName:7458929c-bc4e-4f17-b199-4963b298f4e8 nodeName:}" failed. No retries permitted until 2026-01-20 17:50:27.219931381 +0000 UTC m=+4120.980269348 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "internal-tls-certs" (UniqueName: "kubernetes.io/secret/7458929c-bc4e-4f17-b199-4963b298f4e8-internal-tls-certs") pod "neutron-7b9876b66d-znq49" (UID: "7458929c-bc4e-4f17-b199-4963b298f4e8") : secret "cert-neutron-internal-svc" not found Jan 20 17:50:26 crc kubenswrapper[4558]: E0120 17:50:26.720131 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-barbican-public-svc: secret "cert-barbican-public-svc" not found Jan 20 17:50:26 crc kubenswrapper[4558]: E0120 17:50:26.720201 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d127fb40-6eff-4ccd-922c-b209ac4edbda-public-tls-certs podName:d127fb40-6eff-4ccd-922c-b209ac4edbda nodeName:}" failed. No retries permitted until 2026-01-20 17:50:27.220190509 +0000 UTC m=+4120.980528476 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "public-tls-certs" (UniqueName: "kubernetes.io/secret/d127fb40-6eff-4ccd-922c-b209ac4edbda-public-tls-certs") pod "barbican-api-5968cb86f6-ctfg5" (UID: "d127fb40-6eff-4ccd-922c-b209ac4edbda") : secret "cert-barbican-public-svc" not found Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.720266 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d127fb40-6eff-4ccd-922c-b209ac4edbda-logs\") pod \"barbican-api-5968cb86f6-ctfg5\" (UID: \"d127fb40-6eff-4ccd-922c-b209ac4edbda\") " pod="openstack-kuttl-tests/barbican-api-5968cb86f6-ctfg5" Jan 20 17:50:26 crc kubenswrapper[4558]: E0120 17:50:26.720601 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-keystone-public-svc: secret "cert-keystone-public-svc" not found Jan 20 17:50:26 crc kubenswrapper[4558]: E0120 17:50:26.720649 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0a3695d2-9733-430d-a489-694e9e97b586-public-tls-certs podName:0a3695d2-9733-430d-a489-694e9e97b586 nodeName:}" failed. No retries permitted until 2026-01-20 17:50:27.220641507 +0000 UTC m=+4120.980979474 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "public-tls-certs" (UniqueName: "kubernetes.io/secret/0a3695d2-9733-430d-a489-694e9e97b586-public-tls-certs") pod "keystone-9795586d8-4dv95" (UID: "0a3695d2-9733-430d-a489-694e9e97b586") : secret "cert-keystone-public-svc" not found Jan 20 17:50:26 crc kubenswrapper[4558]: E0120 17:50:26.721578 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-neutron-public-svc: secret "cert-neutron-public-svc" not found Jan 20 17:50:26 crc kubenswrapper[4558]: E0120 17:50:26.721634 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7458929c-bc4e-4f17-b199-4963b298f4e8-public-tls-certs podName:7458929c-bc4e-4f17-b199-4963b298f4e8 nodeName:}" failed. No retries permitted until 2026-01-20 17:50:27.221626238 +0000 UTC m=+4120.981964205 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "public-tls-certs" (UniqueName: "kubernetes.io/secret/7458929c-bc4e-4f17-b199-4963b298f4e8-public-tls-certs") pod "neutron-7b9876b66d-znq49" (UID: "7458929c-bc4e-4f17-b199-4963b298f4e8") : secret "cert-neutron-public-svc" not found Jan 20 17:50:26 crc kubenswrapper[4558]: E0120 17:50:26.725815 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-barbican-internal-svc: secret "cert-barbican-internal-svc" not found Jan 20 17:50:26 crc kubenswrapper[4558]: E0120 17:50:26.725858 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d127fb40-6eff-4ccd-922c-b209ac4edbda-internal-tls-certs podName:d127fb40-6eff-4ccd-922c-b209ac4edbda nodeName:}" failed. No retries permitted until 2026-01-20 17:50:27.225848277 +0000 UTC m=+4120.986186245 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "internal-tls-certs" (UniqueName: "kubernetes.io/secret/d127fb40-6eff-4ccd-922c-b209ac4edbda-internal-tls-certs") pod "barbican-api-5968cb86f6-ctfg5" (UID: "d127fb40-6eff-4ccd-922c-b209ac4edbda") : secret "cert-barbican-internal-svc" not found Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.726281 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/7458929c-bc4e-4f17-b199-4963b298f4e8-config\") pod \"neutron-7b9876b66d-znq49\" (UID: \"7458929c-bc4e-4f17-b199-4963b298f4e8\") " pod="openstack-kuttl-tests/neutron-7b9876b66d-znq49" Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.726540 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/0a3695d2-9733-430d-a489-694e9e97b586-credential-keys\") pod \"keystone-9795586d8-4dv95\" (UID: \"0a3695d2-9733-430d-a489-694e9e97b586\") " pod="openstack-kuttl-tests/keystone-9795586d8-4dv95" Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.728497 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d127fb40-6eff-4ccd-922c-b209ac4edbda-config-data\") pod \"barbican-api-5968cb86f6-ctfg5\" (UID: \"d127fb40-6eff-4ccd-922c-b209ac4edbda\") " pod="openstack-kuttl-tests/barbican-api-5968cb86f6-ctfg5" Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.733070 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0a3695d2-9733-430d-a489-694e9e97b586-scripts\") pod \"keystone-9795586d8-4dv95\" (UID: \"0a3695d2-9733-430d-a489-694e9e97b586\") " pod="openstack-kuttl-tests/keystone-9795586d8-4dv95" Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.733892 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d127fb40-6eff-4ccd-922c-b209ac4edbda-config-data-custom\") pod \"barbican-api-5968cb86f6-ctfg5\" (UID: \"d127fb40-6eff-4ccd-922c-b209ac4edbda\") " pod="openstack-kuttl-tests/barbican-api-5968cb86f6-ctfg5" Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.735341 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7458929c-bc4e-4f17-b199-4963b298f4e8-combined-ca-bundle\") pod \"neutron-7b9876b66d-znq49\" (UID: \"7458929c-bc4e-4f17-b199-4963b298f4e8\") " pod="openstack-kuttl-tests/neutron-7b9876b66d-znq49" Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.735973 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x78ng\" (UniqueName: \"kubernetes.io/projected/7458929c-bc4e-4f17-b199-4963b298f4e8-kube-api-access-x78ng\") pod \"neutron-7b9876b66d-znq49\" (UID: \"7458929c-bc4e-4f17-b199-4963b298f4e8\") " pod="openstack-kuttl-tests/neutron-7b9876b66d-znq49" Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.736250 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0a3695d2-9733-430d-a489-694e9e97b586-combined-ca-bundle\") pod \"keystone-9795586d8-4dv95\" (UID: \"0a3695d2-9733-430d-a489-694e9e97b586\") " pod="openstack-kuttl-tests/keystone-9795586d8-4dv95" Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.736869 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ovsdbserver-sb-0" podUID="9330efb9-cbc2-4c4a-9928-eedf93324d57" containerName="ovsdbserver-sb" containerID="cri-o://d05ee219c0f8bccc7fa261cda35e2938c1b449a43b564bf9b569dafd93b0c9a7" gracePeriod=300 Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.737105 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/0a3695d2-9733-430d-a489-694e9e97b586-fernet-keys\") pod \"keystone-9795586d8-4dv95\" (UID: \"0a3695d2-9733-430d-a489-694e9e97b586\") " pod="openstack-kuttl-tests/keystone-9795586d8-4dv95" Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.737216 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/7458929c-bc4e-4f17-b199-4963b298f4e8-httpd-config\") pod \"neutron-7b9876b66d-znq49\" (UID: \"7458929c-bc4e-4f17-b199-4963b298f4e8\") " pod="openstack-kuttl-tests/neutron-7b9876b66d-znq49" Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.737535 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jspp2\" (UniqueName: \"kubernetes.io/projected/0a3695d2-9733-430d-a489-694e9e97b586-kube-api-access-jspp2\") pod \"keystone-9795586d8-4dv95\" (UID: \"0a3695d2-9733-430d-a489-694e9e97b586\") " pod="openstack-kuttl-tests/keystone-9795586d8-4dv95" Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.744378 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d127fb40-6eff-4ccd-922c-b209ac4edbda-combined-ca-bundle\") pod \"barbican-api-5968cb86f6-ctfg5\" (UID: \"d127fb40-6eff-4ccd-922c-b209ac4edbda\") " pod="openstack-kuttl-tests/barbican-api-5968cb86f6-ctfg5" Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.744538 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0a3695d2-9733-430d-a489-694e9e97b586-config-data\") pod \"keystone-9795586d8-4dv95\" (UID: \"0a3695d2-9733-430d-a489-694e9e97b586\") " pod="openstack-kuttl-tests/keystone-9795586d8-4dv95" Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.752137 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kbnl5\" (UniqueName: \"kubernetes.io/projected/d127fb40-6eff-4ccd-922c-b209ac4edbda-kube-api-access-kbnl5\") pod \"barbican-api-5968cb86f6-ctfg5\" (UID: \"d127fb40-6eff-4ccd-922c-b209ac4edbda\") " pod="openstack-kuttl-tests/barbican-api-5968cb86f6-ctfg5" Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.754081 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-worker-6c48cb9cdf-9688c"] Jan 20 17:50:26 crc kubenswrapper[4558]: I0120 17:50:26.927133 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-keystone-listener-5bb5dd7cd4-w4jnj"] Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.025047 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovsdbserver-nb-0_3f090098-7927-4f68-b49d-4cea135a041c/ovsdbserver-nb/0.log" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.025124 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.135318 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7p5tp\" (UniqueName: \"kubernetes.io/projected/3f090098-7927-4f68-b49d-4cea135a041c-kube-api-access-7p5tp\") pod \"3f090098-7927-4f68-b49d-4cea135a041c\" (UID: \"3f090098-7927-4f68-b49d-4cea135a041c\") " Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.135374 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/3f090098-7927-4f68-b49d-4cea135a041c-ovsdbserver-nb-tls-certs\") pod \"3f090098-7927-4f68-b49d-4cea135a041c\" (UID: \"3f090098-7927-4f68-b49d-4cea135a041c\") " Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.135644 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndbcluster-nb-etc-ovn\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"3f090098-7927-4f68-b49d-4cea135a041c\" (UID: \"3f090098-7927-4f68-b49d-4cea135a041c\") " Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.135717 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3f090098-7927-4f68-b49d-4cea135a041c-scripts\") pod \"3f090098-7927-4f68-b49d-4cea135a041c\" (UID: \"3f090098-7927-4f68-b49d-4cea135a041c\") " Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.135743 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/3f090098-7927-4f68-b49d-4cea135a041c-ovsdb-rundir\") pod \"3f090098-7927-4f68-b49d-4cea135a041c\" (UID: \"3f090098-7927-4f68-b49d-4cea135a041c\") " Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.135813 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f090098-7927-4f68-b49d-4cea135a041c-combined-ca-bundle\") pod \"3f090098-7927-4f68-b49d-4cea135a041c\" (UID: \"3f090098-7927-4f68-b49d-4cea135a041c\") " Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.136887 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3f090098-7927-4f68-b49d-4cea135a041c-config\") pod \"3f090098-7927-4f68-b49d-4cea135a041c\" (UID: \"3f090098-7927-4f68-b49d-4cea135a041c\") " Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.136970 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/3f090098-7927-4f68-b49d-4cea135a041c-metrics-certs-tls-certs\") pod \"3f090098-7927-4f68-b49d-4cea135a041c\" (UID: \"3f090098-7927-4f68-b49d-4cea135a041c\") " Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.137245 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3f090098-7927-4f68-b49d-4cea135a041c-scripts" (OuterVolumeSpecName: "scripts") pod "3f090098-7927-4f68-b49d-4cea135a041c" (UID: "3f090098-7927-4f68-b49d-4cea135a041c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.137556 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/38ce22a3-0b53-417f-ab78-77a3be2da04f-public-tls-certs\") pod \"placement-9cbb9c58b-jcwzc\" (UID: \"38ce22a3-0b53-417f-ab78-77a3be2da04f\") " pod="openstack-kuttl-tests/placement-9cbb9c58b-jcwzc" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.137732 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3f090098-7927-4f68-b49d-4cea135a041c-ovsdb-rundir" (OuterVolumeSpecName: "ovsdb-rundir") pod "3f090098-7927-4f68-b49d-4cea135a041c" (UID: "3f090098-7927-4f68-b49d-4cea135a041c"). InnerVolumeSpecName "ovsdb-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.137939 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/38ce22a3-0b53-417f-ab78-77a3be2da04f-internal-tls-certs\") pod \"placement-9cbb9c58b-jcwzc\" (UID: \"38ce22a3-0b53-417f-ab78-77a3be2da04f\") " pod="openstack-kuttl-tests/placement-9cbb9c58b-jcwzc" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.138058 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3f090098-7927-4f68-b49d-4cea135a041c-config" (OuterVolumeSpecName: "config") pod "3f090098-7927-4f68-b49d-4cea135a041c" (UID: "3f090098-7927-4f68-b49d-4cea135a041c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:50:27 crc kubenswrapper[4558]: E0120 17:50:27.138197 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-placement-internal-svc: secret "cert-placement-internal-svc" not found Jan 20 17:50:27 crc kubenswrapper[4558]: E0120 17:50:27.138260 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/38ce22a3-0b53-417f-ab78-77a3be2da04f-internal-tls-certs podName:38ce22a3-0b53-417f-ab78-77a3be2da04f nodeName:}" failed. No retries permitted until 2026-01-20 17:50:28.1382296 +0000 UTC m=+4121.898567566 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "internal-tls-certs" (UniqueName: "kubernetes.io/secret/38ce22a3-0b53-417f-ab78-77a3be2da04f-internal-tls-certs") pod "placement-9cbb9c58b-jcwzc" (UID: "38ce22a3-0b53-417f-ab78-77a3be2da04f") : secret "cert-placement-internal-svc" not found Jan 20 17:50:27 crc kubenswrapper[4558]: E0120 17:50:27.138644 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-placement-public-svc: secret "cert-placement-public-svc" not found Jan 20 17:50:27 crc kubenswrapper[4558]: E0120 17:50:27.138699 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/38ce22a3-0b53-417f-ab78-77a3be2da04f-public-tls-certs podName:38ce22a3-0b53-417f-ab78-77a3be2da04f nodeName:}" failed. No retries permitted until 2026-01-20 17:50:28.138689974 +0000 UTC m=+4121.899027942 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "public-tls-certs" (UniqueName: "kubernetes.io/secret/38ce22a3-0b53-417f-ab78-77a3be2da04f-public-tls-certs") pod "placement-9cbb9c58b-jcwzc" (UID: "38ce22a3-0b53-417f-ab78-77a3be2da04f") : secret "cert-placement-public-svc" not found Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.138852 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3f090098-7927-4f68-b49d-4cea135a041c-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.138871 4558 reconciler_common.go:293] "Volume detached for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/3f090098-7927-4f68-b49d-4cea135a041c-ovsdb-rundir\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.138883 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3f090098-7927-4f68-b49d-4cea135a041c-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.139121 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-keystone-listener-5bb5dd7cd4-w4jnj" event={"ID":"51c79956-4212-4d46-b3e9-e9d5e7a33c31","Type":"ContainerStarted","Data":"7a78a29587685c6b5db24fc2b2debe25c62d3bcff8bd1002751a66695746d8df"} Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.143608 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage02-crc" (OuterVolumeSpecName: "ovndbcluster-nb-etc-ovn") pod "3f090098-7927-4f68-b49d-4cea135a041c" (UID: "3f090098-7927-4f68-b49d-4cea135a041c"). InnerVolumeSpecName "local-storage02-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.149592 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3f090098-7927-4f68-b49d-4cea135a041c-kube-api-access-7p5tp" (OuterVolumeSpecName: "kube-api-access-7p5tp") pod "3f090098-7927-4f68-b49d-4cea135a041c" (UID: "3f090098-7927-4f68-b49d-4cea135a041c"). InnerVolumeSpecName "kube-api-access-7p5tp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.149994 4558 generic.go:334] "Generic (PLEG): container finished" podID="47de199d-0dde-4082-8b6c-99f3d202608b" containerID="b497e638b4894f1c81e1773af34e888736bf132c67bc13eb676c4be84dea7235" exitCode=143 Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.150050 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"47de199d-0dde-4082-8b6c-99f3d202608b","Type":"ContainerDied","Data":"b497e638b4894f1c81e1773af34e888736bf132c67bc13eb676c4be84dea7235"} Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.160473 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-worker-6c48cb9cdf-9688c" event={"ID":"769afcb7-df8b-4d95-b662-9f5032227822","Type":"ContainerStarted","Data":"5416068b976c483e0b787504b470fa498322dc028d84dc1f963156c425dba675"} Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.177139 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovsdbserver-nb-0_3f090098-7927-4f68-b49d-4cea135a041c/ovsdbserver-nb/0.log" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.177334 4558 generic.go:334] "Generic (PLEG): container finished" podID="3f090098-7927-4f68-b49d-4cea135a041c" containerID="dbdf111aabc4bc0df2386a3517cbb41e1a844ddfad496d4e09801f1998ac0555" exitCode=2 Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.177409 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.177412 4558 generic.go:334] "Generic (PLEG): container finished" podID="3f090098-7927-4f68-b49d-4cea135a041c" containerID="fa3e63d15818b7accc78e0355d0414299134969d91a4885ca57ff950ae494f7e" exitCode=143 Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.177373 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-nb-0" event={"ID":"3f090098-7927-4f68-b49d-4cea135a041c","Type":"ContainerDied","Data":"dbdf111aabc4bc0df2386a3517cbb41e1a844ddfad496d4e09801f1998ac0555"} Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.177651 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-nb-0" event={"ID":"3f090098-7927-4f68-b49d-4cea135a041c","Type":"ContainerDied","Data":"fa3e63d15818b7accc78e0355d0414299134969d91a4885ca57ff950ae494f7e"} Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.177710 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-nb-0" event={"ID":"3f090098-7927-4f68-b49d-4cea135a041c","Type":"ContainerDied","Data":"658a5496dc4adc9a7e1bdfb91f9a14fd91e6867091f294ccb0da5966c441330c"} Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.177736 4558 scope.go:117] "RemoveContainer" containerID="dbdf111aabc4bc0df2386a3517cbb41e1a844ddfad496d4e09801f1998ac0555" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.186391 4558 generic.go:334] "Generic (PLEG): container finished" podID="6dae3a04-db9e-48a2-bbe2-012c9ba55890" containerID="bfbe340de7186cba3ca94e5cb5c951be9e3563e26cd26dd763147ac393eeec4f" exitCode=2 Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.186458 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-northd-0" event={"ID":"6dae3a04-db9e-48a2-bbe2-012c9ba55890","Type":"ContainerDied","Data":"bfbe340de7186cba3ca94e5cb5c951be9e3563e26cd26dd763147ac393eeec4f"} Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.192739 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovsdbserver-sb-0_9330efb9-cbc2-4c4a-9928-eedf93324d57/ovsdbserver-sb/0.log" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.192786 4558 generic.go:334] "Generic (PLEG): container finished" podID="9330efb9-cbc2-4c4a-9928-eedf93324d57" containerID="7f96a30bdd54fd7b066425edfff74600fb47af1d01a01f1f5b5a15ce2844ce91" exitCode=2 Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.192802 4558 generic.go:334] "Generic (PLEG): container finished" podID="9330efb9-cbc2-4c4a-9928-eedf93324d57" containerID="d05ee219c0f8bccc7fa261cda35e2938c1b449a43b564bf9b569dafd93b0c9a7" exitCode=143 Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.192854 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-sb-0" event={"ID":"9330efb9-cbc2-4c4a-9928-eedf93324d57","Type":"ContainerDied","Data":"7f96a30bdd54fd7b066425edfff74600fb47af1d01a01f1f5b5a15ce2844ce91"} Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.192885 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-sb-0" event={"ID":"9330efb9-cbc2-4c4a-9928-eedf93324d57","Type":"ContainerDied","Data":"d05ee219c0f8bccc7fa261cda35e2938c1b449a43b564bf9b569dafd93b0c9a7"} Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.195462 4558 generic.go:334] "Generic (PLEG): container finished" podID="c43f6123-71d1-4cf0-b919-00b1e9836c8b" containerID="df22f4721c8d074f5b610fec573fca752b358557510a94a96c91992dbb2d68ef" exitCode=0 Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.195503 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-galera-0" event={"ID":"c43f6123-71d1-4cf0-b919-00b1e9836c8b","Type":"ContainerDied","Data":"df22f4721c8d074f5b610fec573fca752b358557510a94a96c91992dbb2d68ef"} Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.204511 4558 generic.go:334] "Generic (PLEG): container finished" podID="7d2f1782-aa1c-4382-b682-27ce8f37d139" containerID="c66c2bcafd61422cbe12474d972cdfbe2b8064d7acd5baa2defb02dbaf1a697b" exitCode=143 Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.204563 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"7d2f1782-aa1c-4382-b682-27ce8f37d139","Type":"ContainerDied","Data":"c66c2bcafd61422cbe12474d972cdfbe2b8064d7acd5baa2defb02dbaf1a697b"} Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.206141 4558 generic.go:334] "Generic (PLEG): container finished" podID="6254cefa-8c4a-472e-9faf-4633c6d1618e" containerID="5c8b0e075d6b80a58c44316d67ccf05a22f59a72ae3bca7872921f2e03c311bf" exitCode=0 Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.206216 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"6254cefa-8c4a-472e-9faf-4633c6d1618e","Type":"ContainerDied","Data":"5c8b0e075d6b80a58c44316d67ccf05a22f59a72ae3bca7872921f2e03c311bf"} Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.207815 4558 generic.go:334] "Generic (PLEG): container finished" podID="d0e7ef26-ab3e-4c3d-b8b2-6b08007022fd" containerID="212a4fd973edb30677b4248e267dd6b57d185e117261fb43b7e48e9267a561a9" exitCode=143 Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.207857 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"d0e7ef26-ab3e-4c3d-b8b2-6b08007022fd","Type":"ContainerDied","Data":"212a4fd973edb30677b4248e267dd6b57d185e117261fb43b7e48e9267a561a9"} Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.210717 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3f090098-7927-4f68-b49d-4cea135a041c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3f090098-7927-4f68-b49d-4cea135a041c" (UID: "3f090098-7927-4f68-b49d-4cea135a041c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.222567 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3f090098-7927-4f68-b49d-4cea135a041c-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "3f090098-7927-4f68-b49d-4cea135a041c" (UID: "3f090098-7927-4f68-b49d-4cea135a041c"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.227525 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3f090098-7927-4f68-b49d-4cea135a041c-ovsdbserver-nb-tls-certs" (OuterVolumeSpecName: "ovsdbserver-nb-tls-certs") pod "3f090098-7927-4f68-b49d-4cea135a041c" (UID: "3f090098-7927-4f68-b49d-4cea135a041c"). InnerVolumeSpecName "ovsdbserver-nb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.241911 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7458929c-bc4e-4f17-b199-4963b298f4e8-internal-tls-certs\") pod \"neutron-7b9876b66d-znq49\" (UID: \"7458929c-bc4e-4f17-b199-4963b298f4e8\") " pod="openstack-kuttl-tests/neutron-7b9876b66d-znq49" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.241979 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d127fb40-6eff-4ccd-922c-b209ac4edbda-internal-tls-certs\") pod \"barbican-api-5968cb86f6-ctfg5\" (UID: \"d127fb40-6eff-4ccd-922c-b209ac4edbda\") " pod="openstack-kuttl-tests/barbican-api-5968cb86f6-ctfg5" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.242100 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0a3695d2-9733-430d-a489-694e9e97b586-public-tls-certs\") pod \"keystone-9795586d8-4dv95\" (UID: \"0a3695d2-9733-430d-a489-694e9e97b586\") " pod="openstack-kuttl-tests/keystone-9795586d8-4dv95" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.242120 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d127fb40-6eff-4ccd-922c-b209ac4edbda-public-tls-certs\") pod \"barbican-api-5968cb86f6-ctfg5\" (UID: \"d127fb40-6eff-4ccd-922c-b209ac4edbda\") " pod="openstack-kuttl-tests/barbican-api-5968cb86f6-ctfg5" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.242146 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0a3695d2-9733-430d-a489-694e9e97b586-internal-tls-certs\") pod \"keystone-9795586d8-4dv95\" (UID: \"0a3695d2-9733-430d-a489-694e9e97b586\") " pod="openstack-kuttl-tests/keystone-9795586d8-4dv95" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.242222 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7458929c-bc4e-4f17-b199-4963b298f4e8-public-tls-certs\") pod \"neutron-7b9876b66d-znq49\" (UID: \"7458929c-bc4e-4f17-b199-4963b298f4e8\") " pod="openstack-kuttl-tests/neutron-7b9876b66d-znq49" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.242259 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/7458929c-bc4e-4f17-b199-4963b298f4e8-ovndb-tls-certs\") pod \"neutron-7b9876b66d-znq49\" (UID: \"7458929c-bc4e-4f17-b199-4963b298f4e8\") " pod="openstack-kuttl-tests/neutron-7b9876b66d-znq49" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.242335 4558 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/3f090098-7927-4f68-b49d-4cea135a041c-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.242346 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7p5tp\" (UniqueName: \"kubernetes.io/projected/3f090098-7927-4f68-b49d-4cea135a041c-kube-api-access-7p5tp\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.242357 4558 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/3f090098-7927-4f68-b49d-4cea135a041c-ovsdbserver-nb-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.242378 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" " Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.242388 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f090098-7927-4f68-b49d-4cea135a041c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:27 crc kubenswrapper[4558]: E0120 17:50:27.242699 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-neutron-internal-svc: secret "cert-neutron-internal-svc" not found Jan 20 17:50:27 crc kubenswrapper[4558]: E0120 17:50:27.242763 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7458929c-bc4e-4f17-b199-4963b298f4e8-internal-tls-certs podName:7458929c-bc4e-4f17-b199-4963b298f4e8 nodeName:}" failed. No retries permitted until 2026-01-20 17:50:28.242742693 +0000 UTC m=+4122.003080661 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "internal-tls-certs" (UniqueName: "kubernetes.io/secret/7458929c-bc4e-4f17-b199-4963b298f4e8-internal-tls-certs") pod "neutron-7b9876b66d-znq49" (UID: "7458929c-bc4e-4f17-b199-4963b298f4e8") : secret "cert-neutron-internal-svc" not found Jan 20 17:50:27 crc kubenswrapper[4558]: E0120 17:50:27.242863 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-barbican-public-svc: secret "cert-barbican-public-svc" not found Jan 20 17:50:27 crc kubenswrapper[4558]: E0120 17:50:27.242930 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d127fb40-6eff-4ccd-922c-b209ac4edbda-public-tls-certs podName:d127fb40-6eff-4ccd-922c-b209ac4edbda nodeName:}" failed. No retries permitted until 2026-01-20 17:50:28.242911941 +0000 UTC m=+4122.003249909 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "public-tls-certs" (UniqueName: "kubernetes.io/secret/d127fb40-6eff-4ccd-922c-b209ac4edbda-public-tls-certs") pod "barbican-api-5968cb86f6-ctfg5" (UID: "d127fb40-6eff-4ccd-922c-b209ac4edbda") : secret "cert-barbican-public-svc" not found Jan 20 17:50:27 crc kubenswrapper[4558]: E0120 17:50:27.242969 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-barbican-internal-svc: secret "cert-barbican-internal-svc" not found Jan 20 17:50:27 crc kubenswrapper[4558]: E0120 17:50:27.243001 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d127fb40-6eff-4ccd-922c-b209ac4edbda-internal-tls-certs podName:d127fb40-6eff-4ccd-922c-b209ac4edbda nodeName:}" failed. No retries permitted until 2026-01-20 17:50:28.242995219 +0000 UTC m=+4122.003333186 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "internal-tls-certs" (UniqueName: "kubernetes.io/secret/d127fb40-6eff-4ccd-922c-b209ac4edbda-internal-tls-certs") pod "barbican-api-5968cb86f6-ctfg5" (UID: "d127fb40-6eff-4ccd-922c-b209ac4edbda") : secret "cert-barbican-internal-svc" not found Jan 20 17:50:27 crc kubenswrapper[4558]: E0120 17:50:27.243034 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-keystone-public-svc: secret "cert-keystone-public-svc" not found Jan 20 17:50:27 crc kubenswrapper[4558]: E0120 17:50:27.243051 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0a3695d2-9733-430d-a489-694e9e97b586-public-tls-certs podName:0a3695d2-9733-430d-a489-694e9e97b586 nodeName:}" failed. No retries permitted until 2026-01-20 17:50:28.243046404 +0000 UTC m=+4122.003384372 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "public-tls-certs" (UniqueName: "kubernetes.io/secret/0a3695d2-9733-430d-a489-694e9e97b586-public-tls-certs") pod "keystone-9795586d8-4dv95" (UID: "0a3695d2-9733-430d-a489-694e9e97b586") : secret "cert-keystone-public-svc" not found Jan 20 17:50:27 crc kubenswrapper[4558]: E0120 17:50:27.243082 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-neutron-public-svc: secret "cert-neutron-public-svc" not found Jan 20 17:50:27 crc kubenswrapper[4558]: E0120 17:50:27.243101 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7458929c-bc4e-4f17-b199-4963b298f4e8-public-tls-certs podName:7458929c-bc4e-4f17-b199-4963b298f4e8 nodeName:}" failed. No retries permitted until 2026-01-20 17:50:28.243096268 +0000 UTC m=+4122.003434235 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "public-tls-certs" (UniqueName: "kubernetes.io/secret/7458929c-bc4e-4f17-b199-4963b298f4e8-public-tls-certs") pod "neutron-7b9876b66d-znq49" (UID: "7458929c-bc4e-4f17-b199-4963b298f4e8") : secret "cert-neutron-public-svc" not found Jan 20 17:50:27 crc kubenswrapper[4558]: E0120 17:50:27.243130 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-keystone-internal-svc: secret "cert-keystone-internal-svc" not found Jan 20 17:50:27 crc kubenswrapper[4558]: E0120 17:50:27.243151 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0a3695d2-9733-430d-a489-694e9e97b586-internal-tls-certs podName:0a3695d2-9733-430d-a489-694e9e97b586 nodeName:}" failed. No retries permitted until 2026-01-20 17:50:28.243144519 +0000 UTC m=+4122.003482485 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "internal-tls-certs" (UniqueName: "kubernetes.io/secret/0a3695d2-9733-430d-a489-694e9e97b586-internal-tls-certs") pod "keystone-9795586d8-4dv95" (UID: "0a3695d2-9733-430d-a489-694e9e97b586") : secret "cert-keystone-internal-svc" not found Jan 20 17:50:27 crc kubenswrapper[4558]: E0120 17:50:27.243130 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-neutron-ovndbs: secret "cert-neutron-ovndbs" not found Jan 20 17:50:27 crc kubenswrapper[4558]: E0120 17:50:27.243200 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7458929c-bc4e-4f17-b199-4963b298f4e8-ovndb-tls-certs podName:7458929c-bc4e-4f17-b199-4963b298f4e8 nodeName:}" failed. No retries permitted until 2026-01-20 17:50:28.243194714 +0000 UTC m=+4122.003532681 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "ovndb-tls-certs" (UniqueName: "kubernetes.io/secret/7458929c-bc4e-4f17-b199-4963b298f4e8-ovndb-tls-certs") pod "neutron-7b9876b66d-znq49" (UID: "7458929c-bc4e-4f17-b199-4963b298f4e8") : secret "cert-neutron-ovndbs" not found Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.271040 4558 scope.go:117] "RemoveContainer" containerID="fa3e63d15818b7accc78e0355d0414299134969d91a4885ca57ff950ae494f7e" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.278342 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovsdbserver-sb-0_9330efb9-cbc2-4c4a-9928-eedf93324d57/ovsdbserver-sb/0.log" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.278428 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.284330 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.292550 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage02-crc" (UniqueName: "kubernetes.io/local-volume/local-storage02-crc") on node "crc" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.310294 4558 scope.go:117] "RemoveContainer" containerID="dbdf111aabc4bc0df2386a3517cbb41e1a844ddfad496d4e09801f1998ac0555" Jan 20 17:50:27 crc kubenswrapper[4558]: E0120 17:50:27.332406 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dbdf111aabc4bc0df2386a3517cbb41e1a844ddfad496d4e09801f1998ac0555\": container with ID starting with dbdf111aabc4bc0df2386a3517cbb41e1a844ddfad496d4e09801f1998ac0555 not found: ID does not exist" containerID="dbdf111aabc4bc0df2386a3517cbb41e1a844ddfad496d4e09801f1998ac0555" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.332467 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dbdf111aabc4bc0df2386a3517cbb41e1a844ddfad496d4e09801f1998ac0555"} err="failed to get container status \"dbdf111aabc4bc0df2386a3517cbb41e1a844ddfad496d4e09801f1998ac0555\": rpc error: code = NotFound desc = could not find container \"dbdf111aabc4bc0df2386a3517cbb41e1a844ddfad496d4e09801f1998ac0555\": container with ID starting with dbdf111aabc4bc0df2386a3517cbb41e1a844ddfad496d4e09801f1998ac0555 not found: ID does not exist" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.332493 4558 scope.go:117] "RemoveContainer" containerID="fa3e63d15818b7accc78e0355d0414299134969d91a4885ca57ff950ae494f7e" Jan 20 17:50:27 crc kubenswrapper[4558]: E0120 17:50:27.333642 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fa3e63d15818b7accc78e0355d0414299134969d91a4885ca57ff950ae494f7e\": container with ID starting with fa3e63d15818b7accc78e0355d0414299134969d91a4885ca57ff950ae494f7e not found: ID does not exist" containerID="fa3e63d15818b7accc78e0355d0414299134969d91a4885ca57ff950ae494f7e" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.333679 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fa3e63d15818b7accc78e0355d0414299134969d91a4885ca57ff950ae494f7e"} err="failed to get container status \"fa3e63d15818b7accc78e0355d0414299134969d91a4885ca57ff950ae494f7e\": rpc error: code = NotFound desc = could not find container \"fa3e63d15818b7accc78e0355d0414299134969d91a4885ca57ff950ae494f7e\": container with ID starting with fa3e63d15818b7accc78e0355d0414299134969d91a4885ca57ff950ae494f7e not found: ID does not exist" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.333711 4558 scope.go:117] "RemoveContainer" containerID="dbdf111aabc4bc0df2386a3517cbb41e1a844ddfad496d4e09801f1998ac0555" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.333959 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dbdf111aabc4bc0df2386a3517cbb41e1a844ddfad496d4e09801f1998ac0555"} err="failed to get container status \"dbdf111aabc4bc0df2386a3517cbb41e1a844ddfad496d4e09801f1998ac0555\": rpc error: code = NotFound desc = could not find container \"dbdf111aabc4bc0df2386a3517cbb41e1a844ddfad496d4e09801f1998ac0555\": container with ID starting with dbdf111aabc4bc0df2386a3517cbb41e1a844ddfad496d4e09801f1998ac0555 not found: ID does not exist" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.333992 4558 scope.go:117] "RemoveContainer" containerID="fa3e63d15818b7accc78e0355d0414299134969d91a4885ca57ff950ae494f7e" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.334237 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fa3e63d15818b7accc78e0355d0414299134969d91a4885ca57ff950ae494f7e"} err="failed to get container status \"fa3e63d15818b7accc78e0355d0414299134969d91a4885ca57ff950ae494f7e\": rpc error: code = NotFound desc = could not find container \"fa3e63d15818b7accc78e0355d0414299134969d91a4885ca57ff950ae494f7e\": container with ID starting with fa3e63d15818b7accc78e0355d0414299134969d91a4885ca57ff950ae494f7e not found: ID does not exist" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.345082 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/9330efb9-cbc2-4c4a-9928-eedf93324d57-metrics-certs-tls-certs\") pod \"9330efb9-cbc2-4c4a-9928-eedf93324d57\" (UID: \"9330efb9-cbc2-4c4a-9928-eedf93324d57\") " Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.346422 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6254cefa-8c4a-472e-9faf-4633c6d1618e-combined-ca-bundle\") pod \"6254cefa-8c4a-472e-9faf-4633c6d1618e\" (UID: \"6254cefa-8c4a-472e-9faf-4633c6d1618e\") " Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.346480 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/9330efb9-cbc2-4c4a-9928-eedf93324d57-ovsdbserver-sb-tls-certs\") pod \"9330efb9-cbc2-4c4a-9928-eedf93324d57\" (UID: \"9330efb9-cbc2-4c4a-9928-eedf93324d57\") " Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.346511 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9330efb9-cbc2-4c4a-9928-eedf93324d57-config\") pod \"9330efb9-cbc2-4c4a-9928-eedf93324d57\" (UID: \"9330efb9-cbc2-4c4a-9928-eedf93324d57\") " Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.346546 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/9330efb9-cbc2-4c4a-9928-eedf93324d57-ovsdb-rundir\") pod \"9330efb9-cbc2-4c4a-9928-eedf93324d57\" (UID: \"9330efb9-cbc2-4c4a-9928-eedf93324d57\") " Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.346580 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2fz5h\" (UniqueName: \"kubernetes.io/projected/9330efb9-cbc2-4c4a-9928-eedf93324d57-kube-api-access-2fz5h\") pod \"9330efb9-cbc2-4c4a-9928-eedf93324d57\" (UID: \"9330efb9-cbc2-4c4a-9928-eedf93324d57\") " Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.346607 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9330efb9-cbc2-4c4a-9928-eedf93324d57-combined-ca-bundle\") pod \"9330efb9-cbc2-4c4a-9928-eedf93324d57\" (UID: \"9330efb9-cbc2-4c4a-9928-eedf93324d57\") " Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.346680 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndbcluster-sb-etc-ovn\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"9330efb9-cbc2-4c4a-9928-eedf93324d57\" (UID: \"9330efb9-cbc2-4c4a-9928-eedf93324d57\") " Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.346706 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ns75v\" (UniqueName: \"kubernetes.io/projected/6254cefa-8c4a-472e-9faf-4633c6d1618e-kube-api-access-ns75v\") pod \"6254cefa-8c4a-472e-9faf-4633c6d1618e\" (UID: \"6254cefa-8c4a-472e-9faf-4633c6d1618e\") " Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.346731 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/6254cefa-8c4a-472e-9faf-4633c6d1618e-nova-novncproxy-tls-certs\") pod \"6254cefa-8c4a-472e-9faf-4633c6d1618e\" (UID: \"6254cefa-8c4a-472e-9faf-4633c6d1618e\") " Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.346755 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6254cefa-8c4a-472e-9faf-4633c6d1618e-config-data\") pod \"6254cefa-8c4a-472e-9faf-4633c6d1618e\" (UID: \"6254cefa-8c4a-472e-9faf-4633c6d1618e\") " Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.346779 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/6254cefa-8c4a-472e-9faf-4633c6d1618e-vencrypt-tls-certs\") pod \"6254cefa-8c4a-472e-9faf-4633c6d1618e\" (UID: \"6254cefa-8c4a-472e-9faf-4633c6d1618e\") " Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.346803 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9330efb9-cbc2-4c4a-9928-eedf93324d57-scripts\") pod \"9330efb9-cbc2-4c4a-9928-eedf93324d57\" (UID: \"9330efb9-cbc2-4c4a-9928-eedf93324d57\") " Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.347354 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.348072 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9330efb9-cbc2-4c4a-9928-eedf93324d57-scripts" (OuterVolumeSpecName: "scripts") pod "9330efb9-cbc2-4c4a-9928-eedf93324d57" (UID: "9330efb9-cbc2-4c4a-9928-eedf93324d57"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.356902 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9330efb9-cbc2-4c4a-9928-eedf93324d57-config" (OuterVolumeSpecName: "config") pod "9330efb9-cbc2-4c4a-9928-eedf93324d57" (UID: "9330efb9-cbc2-4c4a-9928-eedf93324d57"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.357497 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9330efb9-cbc2-4c4a-9928-eedf93324d57-ovsdb-rundir" (OuterVolumeSpecName: "ovsdb-rundir") pod "9330efb9-cbc2-4c4a-9928-eedf93324d57" (UID: "9330efb9-cbc2-4c4a-9928-eedf93324d57"). InnerVolumeSpecName "ovsdb-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.381921 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage03-crc" (OuterVolumeSpecName: "ovndbcluster-sb-etc-ovn") pod "9330efb9-cbc2-4c4a-9928-eedf93324d57" (UID: "9330efb9-cbc2-4c4a-9928-eedf93324d57"). InnerVolumeSpecName "local-storage03-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.395349 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9330efb9-cbc2-4c4a-9928-eedf93324d57-kube-api-access-2fz5h" (OuterVolumeSpecName: "kube-api-access-2fz5h") pod "9330efb9-cbc2-4c4a-9928-eedf93324d57" (UID: "9330efb9-cbc2-4c4a-9928-eedf93324d57"). InnerVolumeSpecName "kube-api-access-2fz5h". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.409539 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6254cefa-8c4a-472e-9faf-4633c6d1618e-kube-api-access-ns75v" (OuterVolumeSpecName: "kube-api-access-ns75v") pod "6254cefa-8c4a-472e-9faf-4633c6d1618e" (UID: "6254cefa-8c4a-472e-9faf-4633c6d1618e"). InnerVolumeSpecName "kube-api-access-ns75v". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.440531 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6254cefa-8c4a-472e-9faf-4633c6d1618e-config-data" (OuterVolumeSpecName: "config-data") pod "6254cefa-8c4a-472e-9faf-4633c6d1618e" (UID: "6254cefa-8c4a-472e-9faf-4633c6d1618e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.448906 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9330efb9-cbc2-4c4a-9928-eedf93324d57-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.448931 4558 reconciler_common.go:293] "Volume detached for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/9330efb9-cbc2-4c4a-9928-eedf93324d57-ovsdb-rundir\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.448944 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2fz5h\" (UniqueName: \"kubernetes.io/projected/9330efb9-cbc2-4c4a-9928-eedf93324d57-kube-api-access-2fz5h\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.448967 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" " Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.448979 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ns75v\" (UniqueName: \"kubernetes.io/projected/6254cefa-8c4a-472e-9faf-4633c6d1618e-kube-api-access-ns75v\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.448999 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6254cefa-8c4a-472e-9faf-4633c6d1618e-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.449009 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9330efb9-cbc2-4c4a-9928-eedf93324d57-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.457056 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6254cefa-8c4a-472e-9faf-4633c6d1618e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6254cefa-8c4a-472e-9faf-4633c6d1618e" (UID: "6254cefa-8c4a-472e-9faf-4633c6d1618e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:50:27 crc kubenswrapper[4558]: E0120 17:50:27.457263 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="bccb9b380d5d7d133e1f9deb550275ef3efade0d9df8c3d33db8b87c1a6fd80b" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.458476 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:50:27 crc kubenswrapper[4558]: E0120 17:50:27.470528 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="bccb9b380d5d7d133e1f9deb550275ef3efade0d9df8c3d33db8b87c1a6fd80b" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.474011 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage03-crc" (UniqueName: "kubernetes.io/local-volume/local-storage03-crc") on node "crc" Jan 20 17:50:27 crc kubenswrapper[4558]: E0120 17:50:27.477236 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="bccb9b380d5d7d133e1f9deb550275ef3efade0d9df8c3d33db8b87c1a6fd80b" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 20 17:50:27 crc kubenswrapper[4558]: E0120 17:50:27.477368 4558 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack-kuttl-tests/nova-scheduler-0" podUID="b13b98ea-20eb-452f-9316-a7acdeb18406" containerName="nova-scheduler-scheduler" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.517061 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6254cefa-8c4a-472e-9faf-4633c6d1618e-nova-novncproxy-tls-certs" (OuterVolumeSpecName: "nova-novncproxy-tls-certs") pod "6254cefa-8c4a-472e-9faf-4633c6d1618e" (UID: "6254cefa-8c4a-472e-9faf-4633c6d1618e"). InnerVolumeSpecName "nova-novncproxy-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.545880 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6254cefa-8c4a-472e-9faf-4633c6d1618e-vencrypt-tls-certs" (OuterVolumeSpecName: "vencrypt-tls-certs") pod "6254cefa-8c4a-472e-9faf-4633c6d1618e" (UID: "6254cefa-8c4a-472e-9faf-4633c6d1618e"). InnerVolumeSpecName "vencrypt-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.551076 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mysql-db\" (UniqueName: \"kubernetes.io/local-volume/local-storage14-crc\") pod \"c43f6123-71d1-4cf0-b919-00b1e9836c8b\" (UID: \"c43f6123-71d1-4cf0-b919-00b1e9836c8b\") " Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.551213 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c43f6123-71d1-4cf0-b919-00b1e9836c8b-operator-scripts\") pod \"c43f6123-71d1-4cf0-b919-00b1e9836c8b\" (UID: \"c43f6123-71d1-4cf0-b919-00b1e9836c8b\") " Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.551651 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/c43f6123-71d1-4cf0-b919-00b1e9836c8b-config-data-default\") pod \"c43f6123-71d1-4cf0-b919-00b1e9836c8b\" (UID: \"c43f6123-71d1-4cf0-b919-00b1e9836c8b\") " Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.551720 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h5x4h\" (UniqueName: \"kubernetes.io/projected/c43f6123-71d1-4cf0-b919-00b1e9836c8b-kube-api-access-h5x4h\") pod \"c43f6123-71d1-4cf0-b919-00b1e9836c8b\" (UID: \"c43f6123-71d1-4cf0-b919-00b1e9836c8b\") " Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.551800 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/c43f6123-71d1-4cf0-b919-00b1e9836c8b-kolla-config\") pod \"c43f6123-71d1-4cf0-b919-00b1e9836c8b\" (UID: \"c43f6123-71d1-4cf0-b919-00b1e9836c8b\") " Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.551916 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/c43f6123-71d1-4cf0-b919-00b1e9836c8b-config-data-generated\") pod \"c43f6123-71d1-4cf0-b919-00b1e9836c8b\" (UID: \"c43f6123-71d1-4cf0-b919-00b1e9836c8b\") " Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.552015 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c43f6123-71d1-4cf0-b919-00b1e9836c8b-combined-ca-bundle\") pod \"c43f6123-71d1-4cf0-b919-00b1e9836c8b\" (UID: \"c43f6123-71d1-4cf0-b919-00b1e9836c8b\") " Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.552048 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/c43f6123-71d1-4cf0-b919-00b1e9836c8b-galera-tls-certs\") pod \"c43f6123-71d1-4cf0-b919-00b1e9836c8b\" (UID: \"c43f6123-71d1-4cf0-b919-00b1e9836c8b\") " Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.553720 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c43f6123-71d1-4cf0-b919-00b1e9836c8b-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "c43f6123-71d1-4cf0-b919-00b1e9836c8b" (UID: "c43f6123-71d1-4cf0-b919-00b1e9836c8b"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.553803 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9330efb9-cbc2-4c4a-9928-eedf93324d57-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9330efb9-cbc2-4c4a-9928-eedf93324d57" (UID: "9330efb9-cbc2-4c4a-9928-eedf93324d57"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.553942 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.553970 4558 reconciler_common.go:293] "Volume detached for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/6254cefa-8c4a-472e-9faf-4633c6d1618e-nova-novncproxy-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.554005 4558 reconciler_common.go:293] "Volume detached for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/6254cefa-8c4a-472e-9faf-4633c6d1618e-vencrypt-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.554018 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6254cefa-8c4a-472e-9faf-4633c6d1618e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.555124 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c43f6123-71d1-4cf0-b919-00b1e9836c8b-config-data-default" (OuterVolumeSpecName: "config-data-default") pod "c43f6123-71d1-4cf0-b919-00b1e9836c8b" (UID: "c43f6123-71d1-4cf0-b919-00b1e9836c8b"). InnerVolumeSpecName "config-data-default". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.562143 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c43f6123-71d1-4cf0-b919-00b1e9836c8b-config-data-generated" (OuterVolumeSpecName: "config-data-generated") pod "c43f6123-71d1-4cf0-b919-00b1e9836c8b" (UID: "c43f6123-71d1-4cf0-b919-00b1e9836c8b"). InnerVolumeSpecName "config-data-generated". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.562569 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c43f6123-71d1-4cf0-b919-00b1e9836c8b-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "c43f6123-71d1-4cf0-b919-00b1e9836c8b" (UID: "c43f6123-71d1-4cf0-b919-00b1e9836c8b"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.564584 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-nb-0"] Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.575202 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-nb-0"] Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.580695 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c43f6123-71d1-4cf0-b919-00b1e9836c8b-kube-api-access-h5x4h" (OuterVolumeSpecName: "kube-api-access-h5x4h") pod "c43f6123-71d1-4cf0-b919-00b1e9836c8b" (UID: "c43f6123-71d1-4cf0-b919-00b1e9836c8b"). InnerVolumeSpecName "kube-api-access-h5x4h". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.589229 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ovsdbserver-nb-0"] Jan 20 17:50:27 crc kubenswrapper[4558]: E0120 17:50:27.589765 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c43f6123-71d1-4cf0-b919-00b1e9836c8b" containerName="galera" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.589781 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="c43f6123-71d1-4cf0-b919-00b1e9836c8b" containerName="galera" Jan 20 17:50:27 crc kubenswrapper[4558]: E0120 17:50:27.589816 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9330efb9-cbc2-4c4a-9928-eedf93324d57" containerName="openstack-network-exporter" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.589824 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="9330efb9-cbc2-4c4a-9928-eedf93324d57" containerName="openstack-network-exporter" Jan 20 17:50:27 crc kubenswrapper[4558]: E0120 17:50:27.589839 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c43f6123-71d1-4cf0-b919-00b1e9836c8b" containerName="mysql-bootstrap" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.589845 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="c43f6123-71d1-4cf0-b919-00b1e9836c8b" containerName="mysql-bootstrap" Jan 20 17:50:27 crc kubenswrapper[4558]: E0120 17:50:27.589854 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3f090098-7927-4f68-b49d-4cea135a041c" containerName="openstack-network-exporter" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.589861 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="3f090098-7927-4f68-b49d-4cea135a041c" containerName="openstack-network-exporter" Jan 20 17:50:27 crc kubenswrapper[4558]: E0120 17:50:27.589877 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3f090098-7927-4f68-b49d-4cea135a041c" containerName="ovsdbserver-nb" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.589883 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="3f090098-7927-4f68-b49d-4cea135a041c" containerName="ovsdbserver-nb" Jan 20 17:50:27 crc kubenswrapper[4558]: E0120 17:50:27.589895 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9330efb9-cbc2-4c4a-9928-eedf93324d57" containerName="ovsdbserver-sb" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.589902 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="9330efb9-cbc2-4c4a-9928-eedf93324d57" containerName="ovsdbserver-sb" Jan 20 17:50:27 crc kubenswrapper[4558]: E0120 17:50:27.589914 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6254cefa-8c4a-472e-9faf-4633c6d1618e" containerName="nova-cell1-novncproxy-novncproxy" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.589920 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="6254cefa-8c4a-472e-9faf-4633c6d1618e" containerName="nova-cell1-novncproxy-novncproxy" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.590137 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="9330efb9-cbc2-4c4a-9928-eedf93324d57" containerName="ovsdbserver-sb" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.590148 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="6254cefa-8c4a-472e-9faf-4633c6d1618e" containerName="nova-cell1-novncproxy-novncproxy" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.590173 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="9330efb9-cbc2-4c4a-9928-eedf93324d57" containerName="openstack-network-exporter" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.590184 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="3f090098-7927-4f68-b49d-4cea135a041c" containerName="ovsdbserver-nb" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.590197 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="3f090098-7927-4f68-b49d-4cea135a041c" containerName="openstack-network-exporter" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.590209 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="c43f6123-71d1-4cf0-b919-00b1e9836c8b" containerName="galera" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.591332 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.592578 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-nb-0"] Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.599437 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ovncluster-ovndbcluster-nb-dockercfg-fcsjv" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.599620 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-ovndbcluster-nb-ovndbs" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.600258 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"ovndbcluster-nb-scripts" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.600481 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"ovndbcluster-nb-config" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.609139 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage14-crc" (OuterVolumeSpecName: "mysql-db") pod "c43f6123-71d1-4cf0-b919-00b1e9836c8b" (UID: "c43f6123-71d1-4cf0-b919-00b1e9836c8b"). InnerVolumeSpecName "local-storage14-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.637516 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9330efb9-cbc2-4c4a-9928-eedf93324d57-ovsdbserver-sb-tls-certs" (OuterVolumeSpecName: "ovsdbserver-sb-tls-certs") pod "9330efb9-cbc2-4c4a-9928-eedf93324d57" (UID: "9330efb9-cbc2-4c4a-9928-eedf93324d57"). InnerVolumeSpecName "ovsdbserver-sb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.639958 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9330efb9-cbc2-4c4a-9928-eedf93324d57-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "9330efb9-cbc2-4c4a-9928-eedf93324d57" (UID: "9330efb9-cbc2-4c4a-9928-eedf93324d57"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.649069 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c43f6123-71d1-4cf0-b919-00b1e9836c8b-galera-tls-certs" (OuterVolumeSpecName: "galera-tls-certs") pod "c43f6123-71d1-4cf0-b919-00b1e9836c8b" (UID: "c43f6123-71d1-4cf0-b919-00b1e9836c8b"). InnerVolumeSpecName "galera-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.655382 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.655460 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162-config\") pod \"ovsdbserver-nb-0\" (UID: \"bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.655560 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.655602 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lfb28\" (UniqueName: \"kubernetes.io/projected/bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162-kube-api-access-lfb28\") pod \"ovsdbserver-nb-0\" (UID: \"bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.655635 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"ovsdbserver-nb-0\" (UID: \"bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.655719 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.655862 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.655931 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.656009 4558 reconciler_common.go:293] "Volume detached for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/c43f6123-71d1-4cf0-b919-00b1e9836c8b-galera-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.656038 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage14-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage14-crc\") on node \"crc\" " Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.656050 4558 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/9330efb9-cbc2-4c4a-9928-eedf93324d57-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.656063 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c43f6123-71d1-4cf0-b919-00b1e9836c8b-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.656073 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/c43f6123-71d1-4cf0-b919-00b1e9836c8b-config-data-default\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.656083 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h5x4h\" (UniqueName: \"kubernetes.io/projected/c43f6123-71d1-4cf0-b919-00b1e9836c8b-kube-api-access-h5x4h\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.656091 4558 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/9330efb9-cbc2-4c4a-9928-eedf93324d57-ovsdbserver-sb-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.656100 4558 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/c43f6123-71d1-4cf0-b919-00b1e9836c8b-kolla-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.656111 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9330efb9-cbc2-4c4a-9928-eedf93324d57-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.656121 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/c43f6123-71d1-4cf0-b919-00b1e9836c8b-config-data-generated\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.673849 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c43f6123-71d1-4cf0-b919-00b1e9836c8b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c43f6123-71d1-4cf0-b919-00b1e9836c8b" (UID: "c43f6123-71d1-4cf0-b919-00b1e9836c8b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.677139 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage14-crc" (UniqueName: "kubernetes.io/local-volume/local-storage14-crc") on node "crc" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.713790 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/memcached-0" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.759895 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7c3fe526-7107-4cc4-aca2-eb809f317c5c-config-data\") pod \"7c3fe526-7107-4cc4-aca2-eb809f317c5c\" (UID: \"7c3fe526-7107-4cc4-aca2-eb809f317c5c\") " Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.760633 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7c3fe526-7107-4cc4-aca2-eb809f317c5c-config-data" (OuterVolumeSpecName: "config-data") pod "7c3fe526-7107-4cc4-aca2-eb809f317c5c" (UID: "7c3fe526-7107-4cc4-aca2-eb809f317c5c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.761144 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c3fe526-7107-4cc4-aca2-eb809f317c5c-combined-ca-bundle\") pod \"7c3fe526-7107-4cc4-aca2-eb809f317c5c\" (UID: \"7c3fe526-7107-4cc4-aca2-eb809f317c5c\") " Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.761204 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dd68x\" (UniqueName: \"kubernetes.io/projected/7c3fe526-7107-4cc4-aca2-eb809f317c5c-kube-api-access-dd68x\") pod \"7c3fe526-7107-4cc4-aca2-eb809f317c5c\" (UID: \"7c3fe526-7107-4cc4-aca2-eb809f317c5c\") " Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.761265 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/7c3fe526-7107-4cc4-aca2-eb809f317c5c-memcached-tls-certs\") pod \"7c3fe526-7107-4cc4-aca2-eb809f317c5c\" (UID: \"7c3fe526-7107-4cc4-aca2-eb809f317c5c\") " Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.761378 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/7c3fe526-7107-4cc4-aca2-eb809f317c5c-kolla-config\") pod \"7c3fe526-7107-4cc4-aca2-eb809f317c5c\" (UID: \"7c3fe526-7107-4cc4-aca2-eb809f317c5c\") " Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.761730 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162-config\") pod \"ovsdbserver-nb-0\" (UID: \"bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.761841 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.761879 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lfb28\" (UniqueName: \"kubernetes.io/projected/bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162-kube-api-access-lfb28\") pod \"ovsdbserver-nb-0\" (UID: \"bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.761917 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"ovsdbserver-nb-0\" (UID: \"bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.762000 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.762140 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.762223 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.762310 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.762390 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c43f6123-71d1-4cf0-b919-00b1e9836c8b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.762402 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7c3fe526-7107-4cc4-aca2-eb809f317c5c-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.762412 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage14-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage14-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:27 crc kubenswrapper[4558]: E0120 17:50:27.762515 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-ovndbcluster-nb-ovndbs: secret "cert-ovndbcluster-nb-ovndbs" not found Jan 20 17:50:27 crc kubenswrapper[4558]: E0120 17:50:27.762573 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162-ovsdbserver-nb-tls-certs podName:bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162 nodeName:}" failed. No retries permitted until 2026-01-20 17:50:28.262556088 +0000 UTC m=+4122.022894055 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "ovsdbserver-nb-tls-certs" (UniqueName: "kubernetes.io/secret/bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162-ovsdbserver-nb-tls-certs") pod "ovsdbserver-nb-0" (UID: "bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162") : secret "cert-ovndbcluster-nb-ovndbs" not found Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.763340 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"ovsdbserver-nb-0\" (UID: \"bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162\") device mount path \"/mnt/openstack/pv02\"" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:50:27 crc kubenswrapper[4558]: E0120 17:50:27.763552 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-ovn-metrics: secret "cert-ovn-metrics" not found Jan 20 17:50:27 crc kubenswrapper[4558]: E0120 17:50:27.763587 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162-metrics-certs-tls-certs podName:bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162 nodeName:}" failed. No retries permitted until 2026-01-20 17:50:28.263576557 +0000 UTC m=+4122.023914525 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs-tls-certs" (UniqueName: "kubernetes.io/secret/bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162-metrics-certs-tls-certs") pod "ovsdbserver-nb-0" (UID: "bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162") : secret "cert-ovn-metrics" not found Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.763904 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.764632 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162-config\") pod \"ovsdbserver-nb-0\" (UID: \"bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.764880 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7c3fe526-7107-4cc4-aca2-eb809f317c5c-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "7c3fe526-7107-4cc4-aca2-eb809f317c5c" (UID: "7c3fe526-7107-4cc4-aca2-eb809f317c5c"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.765140 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.774906 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7c3fe526-7107-4cc4-aca2-eb809f317c5c-kube-api-access-dd68x" (OuterVolumeSpecName: "kube-api-access-dd68x") pod "7c3fe526-7107-4cc4-aca2-eb809f317c5c" (UID: "7c3fe526-7107-4cc4-aca2-eb809f317c5c"). InnerVolumeSpecName "kube-api-access-dd68x". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.781512 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.786048 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lfb28\" (UniqueName: \"kubernetes.io/projected/bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162-kube-api-access-lfb28\") pod \"ovsdbserver-nb-0\" (UID: \"bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.790264 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"ovsdbserver-nb-0\" (UID: \"bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.796286 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7c3fe526-7107-4cc4-aca2-eb809f317c5c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7c3fe526-7107-4cc4-aca2-eb809f317c5c" (UID: "7c3fe526-7107-4cc4-aca2-eb809f317c5c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.802096 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7c3fe526-7107-4cc4-aca2-eb809f317c5c-memcached-tls-certs" (OuterVolumeSpecName: "memcached-tls-certs") pod "7c3fe526-7107-4cc4-aca2-eb809f317c5c" (UID: "7c3fe526-7107-4cc4-aca2-eb809f317c5c"). InnerVolumeSpecName "memcached-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.864176 4558 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/7c3fe526-7107-4cc4-aca2-eb809f317c5c-kolla-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.864277 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c3fe526-7107-4cc4-aca2-eb809f317c5c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.864299 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dd68x\" (UniqueName: \"kubernetes.io/projected/7c3fe526-7107-4cc4-aca2-eb809f317c5c-kube-api-access-dd68x\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:27 crc kubenswrapper[4558]: I0120 17:50:27.864314 4558 reconciler_common.go:293] "Volume detached for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/7c3fe526-7107-4cc4-aca2-eb809f317c5c-memcached-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.170513 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/38ce22a3-0b53-417f-ab78-77a3be2da04f-internal-tls-certs\") pod \"placement-9cbb9c58b-jcwzc\" (UID: \"38ce22a3-0b53-417f-ab78-77a3be2da04f\") " pod="openstack-kuttl-tests/placement-9cbb9c58b-jcwzc" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.170689 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/38ce22a3-0b53-417f-ab78-77a3be2da04f-public-tls-certs\") pod \"placement-9cbb9c58b-jcwzc\" (UID: \"38ce22a3-0b53-417f-ab78-77a3be2da04f\") " pod="openstack-kuttl-tests/placement-9cbb9c58b-jcwzc" Jan 20 17:50:28 crc kubenswrapper[4558]: E0120 17:50:28.170959 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-placement-public-svc: secret "cert-placement-public-svc" not found Jan 20 17:50:28 crc kubenswrapper[4558]: E0120 17:50:28.171049 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/38ce22a3-0b53-417f-ab78-77a3be2da04f-public-tls-certs podName:38ce22a3-0b53-417f-ab78-77a3be2da04f nodeName:}" failed. No retries permitted until 2026-01-20 17:50:30.171028671 +0000 UTC m=+4123.931366638 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "public-tls-certs" (UniqueName: "kubernetes.io/secret/38ce22a3-0b53-417f-ab78-77a3be2da04f-public-tls-certs") pod "placement-9cbb9c58b-jcwzc" (UID: "38ce22a3-0b53-417f-ab78-77a3be2da04f") : secret "cert-placement-public-svc" not found Jan 20 17:50:28 crc kubenswrapper[4558]: E0120 17:50:28.171613 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-placement-internal-svc: secret "cert-placement-internal-svc" not found Jan 20 17:50:28 crc kubenswrapper[4558]: E0120 17:50:28.171660 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/38ce22a3-0b53-417f-ab78-77a3be2da04f-internal-tls-certs podName:38ce22a3-0b53-417f-ab78-77a3be2da04f nodeName:}" failed. No retries permitted until 2026-01-20 17:50:30.171652694 +0000 UTC m=+4123.931990661 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "internal-tls-certs" (UniqueName: "kubernetes.io/secret/38ce22a3-0b53-417f-ab78-77a3be2da04f-internal-tls-certs") pod "placement-9cbb9c58b-jcwzc" (UID: "38ce22a3-0b53-417f-ab78-77a3be2da04f") : secret "cert-placement-internal-svc" not found Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.258870 4558 generic.go:334] "Generic (PLEG): container finished" podID="7c3fe526-7107-4cc4-aca2-eb809f317c5c" containerID="c8f37f1ccc0f6204e6d44935b746f6c6ede31b796dd74e70992f82e8f5ab72db" exitCode=0 Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.258981 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/memcached-0" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.259212 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstackclient" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.259339 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/memcached-0" event={"ID":"7c3fe526-7107-4cc4-aca2-eb809f317c5c","Type":"ContainerDied","Data":"c8f37f1ccc0f6204e6d44935b746f6c6ede31b796dd74e70992f82e8f5ab72db"} Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.259375 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/memcached-0" event={"ID":"7c3fe526-7107-4cc4-aca2-eb809f317c5c","Type":"ContainerDied","Data":"6a57bdb9acab7eeb022ba4c1315352137137390ddf195bb1d1e1d5c0b59c4eb6"} Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.259393 4558 scope.go:117] "RemoveContainer" containerID="c8f37f1ccc0f6204e6d44935b746f6c6ede31b796dd74e70992f82e8f5ab72db" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.261872 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-keystone-listener-5bb5dd7cd4-w4jnj" event={"ID":"51c79956-4212-4d46-b3e9-e9d5e7a33c31","Type":"ContainerStarted","Data":"b9ac5c3d167c83d97bcf4206fa59450cb566eb19a154b51f5884efb725cdab16"} Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.261925 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-keystone-listener-5bb5dd7cd4-w4jnj" event={"ID":"51c79956-4212-4d46-b3e9-e9d5e7a33c31","Type":"ContainerStarted","Data":"fbd3990f851ba4c232be9e8e9146ae26662c46c76d2e455f20e0dcc444b77578"} Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.270417 4558 generic.go:334] "Generic (PLEG): container finished" podID="d0f82b75-bed6-4779-ad55-6bec50d7faa6" containerID="7fb1cfce15f7b21b933180ce2945a9ba95c867c596b50488373fe0546cb8f714" exitCode=0 Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.270508 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"d0f82b75-bed6-4779-ad55-6bec50d7faa6","Type":"ContainerDied","Data":"7fb1cfce15f7b21b933180ce2945a9ba95c867c596b50488373fe0546cb8f714"} Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.273015 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7458929c-bc4e-4f17-b199-4963b298f4e8-public-tls-certs\") pod \"neutron-7b9876b66d-znq49\" (UID: \"7458929c-bc4e-4f17-b199-4963b298f4e8\") " pod="openstack-kuttl-tests/neutron-7b9876b66d-znq49" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.273076 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/7458929c-bc4e-4f17-b199-4963b298f4e8-ovndb-tls-certs\") pod \"neutron-7b9876b66d-znq49\" (UID: \"7458929c-bc4e-4f17-b199-4963b298f4e8\") " pod="openstack-kuttl-tests/neutron-7b9876b66d-znq49" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.273137 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7458929c-bc4e-4f17-b199-4963b298f4e8-internal-tls-certs\") pod \"neutron-7b9876b66d-znq49\" (UID: \"7458929c-bc4e-4f17-b199-4963b298f4e8\") " pod="openstack-kuttl-tests/neutron-7b9876b66d-znq49" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.273193 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d127fb40-6eff-4ccd-922c-b209ac4edbda-internal-tls-certs\") pod \"barbican-api-5968cb86f6-ctfg5\" (UID: \"d127fb40-6eff-4ccd-922c-b209ac4edbda\") " pod="openstack-kuttl-tests/barbican-api-5968cb86f6-ctfg5" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.273267 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.273317 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0a3695d2-9733-430d-a489-694e9e97b586-public-tls-certs\") pod \"keystone-9795586d8-4dv95\" (UID: \"0a3695d2-9733-430d-a489-694e9e97b586\") " pod="openstack-kuttl-tests/keystone-9795586d8-4dv95" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.273351 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d127fb40-6eff-4ccd-922c-b209ac4edbda-public-tls-certs\") pod \"barbican-api-5968cb86f6-ctfg5\" (UID: \"d127fb40-6eff-4ccd-922c-b209ac4edbda\") " pod="openstack-kuttl-tests/barbican-api-5968cb86f6-ctfg5" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.273374 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.273394 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0a3695d2-9733-430d-a489-694e9e97b586-internal-tls-certs\") pod \"keystone-9795586d8-4dv95\" (UID: \"0a3695d2-9733-430d-a489-694e9e97b586\") " pod="openstack-kuttl-tests/keystone-9795586d8-4dv95" Jan 20 17:50:28 crc kubenswrapper[4558]: E0120 17:50:28.273570 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-keystone-internal-svc: secret "cert-keystone-internal-svc" not found Jan 20 17:50:28 crc kubenswrapper[4558]: E0120 17:50:28.273626 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0a3695d2-9733-430d-a489-694e9e97b586-internal-tls-certs podName:0a3695d2-9733-430d-a489-694e9e97b586 nodeName:}" failed. No retries permitted until 2026-01-20 17:50:30.273609 +0000 UTC m=+4124.033946967 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "internal-tls-certs" (UniqueName: "kubernetes.io/secret/0a3695d2-9733-430d-a489-694e9e97b586-internal-tls-certs") pod "keystone-9795586d8-4dv95" (UID: "0a3695d2-9733-430d-a489-694e9e97b586") : secret "cert-keystone-internal-svc" not found Jan 20 17:50:28 crc kubenswrapper[4558]: E0120 17:50:28.273953 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-neutron-public-svc: secret "cert-neutron-public-svc" not found Jan 20 17:50:28 crc kubenswrapper[4558]: E0120 17:50:28.274011 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7458929c-bc4e-4f17-b199-4963b298f4e8-public-tls-certs podName:7458929c-bc4e-4f17-b199-4963b298f4e8 nodeName:}" failed. No retries permitted until 2026-01-20 17:50:30.274004353 +0000 UTC m=+4124.034342321 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "public-tls-certs" (UniqueName: "kubernetes.io/secret/7458929c-bc4e-4f17-b199-4963b298f4e8-public-tls-certs") pod "neutron-7b9876b66d-znq49" (UID: "7458929c-bc4e-4f17-b199-4963b298f4e8") : secret "cert-neutron-public-svc" not found Jan 20 17:50:28 crc kubenswrapper[4558]: E0120 17:50:28.274045 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-neutron-ovndbs: secret "cert-neutron-ovndbs" not found Jan 20 17:50:28 crc kubenswrapper[4558]: E0120 17:50:28.274084 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7458929c-bc4e-4f17-b199-4963b298f4e8-ovndb-tls-certs podName:7458929c-bc4e-4f17-b199-4963b298f4e8 nodeName:}" failed. No retries permitted until 2026-01-20 17:50:30.274058065 +0000 UTC m=+4124.034396031 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "ovndb-tls-certs" (UniqueName: "kubernetes.io/secret/7458929c-bc4e-4f17-b199-4963b298f4e8-ovndb-tls-certs") pod "neutron-7b9876b66d-znq49" (UID: "7458929c-bc4e-4f17-b199-4963b298f4e8") : secret "cert-neutron-ovndbs" not found Jan 20 17:50:28 crc kubenswrapper[4558]: E0120 17:50:28.274115 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-neutron-internal-svc: secret "cert-neutron-internal-svc" not found Jan 20 17:50:28 crc kubenswrapper[4558]: E0120 17:50:28.274134 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7458929c-bc4e-4f17-b199-4963b298f4e8-internal-tls-certs podName:7458929c-bc4e-4f17-b199-4963b298f4e8 nodeName:}" failed. No retries permitted until 2026-01-20 17:50:30.274128688 +0000 UTC m=+4124.034466655 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "internal-tls-certs" (UniqueName: "kubernetes.io/secret/7458929c-bc4e-4f17-b199-4963b298f4e8-internal-tls-certs") pod "neutron-7b9876b66d-znq49" (UID: "7458929c-bc4e-4f17-b199-4963b298f4e8") : secret "cert-neutron-internal-svc" not found Jan 20 17:50:28 crc kubenswrapper[4558]: E0120 17:50:28.274192 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-barbican-internal-svc: secret "cert-barbican-internal-svc" not found Jan 20 17:50:28 crc kubenswrapper[4558]: E0120 17:50:28.274212 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d127fb40-6eff-4ccd-922c-b209ac4edbda-internal-tls-certs podName:d127fb40-6eff-4ccd-922c-b209ac4edbda nodeName:}" failed. No retries permitted until 2026-01-20 17:50:30.274206874 +0000 UTC m=+4124.034544842 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "internal-tls-certs" (UniqueName: "kubernetes.io/secret/d127fb40-6eff-4ccd-922c-b209ac4edbda-internal-tls-certs") pod "barbican-api-5968cb86f6-ctfg5" (UID: "d127fb40-6eff-4ccd-922c-b209ac4edbda") : secret "cert-barbican-internal-svc" not found Jan 20 17:50:28 crc kubenswrapper[4558]: E0120 17:50:28.274261 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-ovn-metrics: secret "cert-ovn-metrics" not found Jan 20 17:50:28 crc kubenswrapper[4558]: E0120 17:50:28.274280 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162-metrics-certs-tls-certs podName:bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162 nodeName:}" failed. No retries permitted until 2026-01-20 17:50:29.274274553 +0000 UTC m=+4123.034612519 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs-tls-certs" (UniqueName: "kubernetes.io/secret/bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162-metrics-certs-tls-certs") pod "ovsdbserver-nb-0" (UID: "bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162") : secret "cert-ovn-metrics" not found Jan 20 17:50:28 crc kubenswrapper[4558]: E0120 17:50:28.274325 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-keystone-public-svc: secret "cert-keystone-public-svc" not found Jan 20 17:50:28 crc kubenswrapper[4558]: E0120 17:50:28.274343 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0a3695d2-9733-430d-a489-694e9e97b586-public-tls-certs podName:0a3695d2-9733-430d-a489-694e9e97b586 nodeName:}" failed. No retries permitted until 2026-01-20 17:50:30.27433699 +0000 UTC m=+4124.034674956 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "public-tls-certs" (UniqueName: "kubernetes.io/secret/0a3695d2-9733-430d-a489-694e9e97b586-public-tls-certs") pod "keystone-9795586d8-4dv95" (UID: "0a3695d2-9733-430d-a489-694e9e97b586") : secret "cert-keystone-public-svc" not found Jan 20 17:50:28 crc kubenswrapper[4558]: E0120 17:50:28.274372 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-barbican-public-svc: secret "cert-barbican-public-svc" not found Jan 20 17:50:28 crc kubenswrapper[4558]: E0120 17:50:28.274413 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d127fb40-6eff-4ccd-922c-b209ac4edbda-public-tls-certs podName:d127fb40-6eff-4ccd-922c-b209ac4edbda nodeName:}" failed. No retries permitted until 2026-01-20 17:50:30.27440095 +0000 UTC m=+4124.034738916 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "public-tls-certs" (UniqueName: "kubernetes.io/secret/d127fb40-6eff-4ccd-922c-b209ac4edbda-public-tls-certs") pod "barbican-api-5968cb86f6-ctfg5" (UID: "d127fb40-6eff-4ccd-922c-b209ac4edbda") : secret "cert-barbican-public-svc" not found Jan 20 17:50:28 crc kubenswrapper[4558]: E0120 17:50:28.274442 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-ovndbcluster-nb-ovndbs: secret "cert-ovndbcluster-nb-ovndbs" not found Jan 20 17:50:28 crc kubenswrapper[4558]: E0120 17:50:28.274459 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162-ovsdbserver-nb-tls-certs podName:bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162 nodeName:}" failed. No retries permitted until 2026-01-20 17:50:29.274453709 +0000 UTC m=+4123.034791676 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "ovsdbserver-nb-tls-certs" (UniqueName: "kubernetes.io/secret/bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162-ovsdbserver-nb-tls-certs") pod "ovsdbserver-nb-0" (UID: "bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162") : secret "cert-ovndbcluster-nb-ovndbs" not found Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.277979 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.278309 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovsdbserver-sb-0_9330efb9-cbc2-4c4a-9928-eedf93324d57/ovsdbserver-sb/0.log" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.278386 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-sb-0" event={"ID":"9330efb9-cbc2-4c4a-9928-eedf93324d57","Type":"ContainerDied","Data":"fcfdfce2caa98b6180b4e9bce9d6dad2bb4cb788e727536da1ec1f9c3b2a2599"} Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.278478 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.283513 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-galera-0" event={"ID":"c43f6123-71d1-4cf0-b919-00b1e9836c8b","Type":"ContainerDied","Data":"852ff7584d7f888bbfb6f3fea7c51e4b3aa60cd86625c06b56888fae6d3dfd89"} Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.283578 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.285178 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-worker-6c48cb9cdf-9688c" event={"ID":"769afcb7-df8b-4d95-b662-9f5032227822","Type":"ContainerStarted","Data":"a5d920e84b45309d975c5a55142f856928971b658393631084941d3b078f522a"} Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.285221 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-worker-6c48cb9cdf-9688c" event={"ID":"769afcb7-df8b-4d95-b662-9f5032227822","Type":"ContainerStarted","Data":"90b19c0fa576e6455d0cc3c15cd90c006b0ab179a1017800a7266892fa8e203e"} Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.287090 4558 generic.go:334] "Generic (PLEG): container finished" podID="3c44870c-4ab7-436a-9862-d6a2a4487bed" containerID="7ab0ab6375e86fd4837f32d21526ca8433f782d8e6f806a0848eab5773b75fbd" exitCode=0 Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.287204 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-0" event={"ID":"3c44870c-4ab7-436a-9862-d6a2a4487bed","Type":"ContainerDied","Data":"7ab0ab6375e86fd4837f32d21526ca8433f782d8e6f806a0848eab5773b75fbd"} Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.288247 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.289131 4558 generic.go:334] "Generic (PLEG): container finished" podID="a0ddca73-7f97-4ec0-9a04-4686f84eb2e6" containerID="13f594567a5a674fbc4ad2781a948fc685f50566f533b0772fdb8535db472a29" exitCode=137 Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.289224 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstackclient" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.290749 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"6254cefa-8c4a-472e-9faf-4633c6d1618e","Type":"ContainerDied","Data":"b5a8b138f82bb2dcace9ca9aae5f18f97b2ddb2df351144b20183c3ff5086641"} Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.290813 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.305817 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/barbican-keystone-listener-5bb5dd7cd4-w4jnj" podStartSLOduration=3.305803849 podStartE2EDuration="3.305803849s" podCreationTimestamp="2026-01-20 17:50:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:50:28.30307649 +0000 UTC m=+4122.063414458" watchObservedRunningTime="2026-01-20 17:50:28.305803849 +0000 UTC m=+4122.066141816" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.322762 4558 scope.go:117] "RemoveContainer" containerID="c8f37f1ccc0f6204e6d44935b746f6c6ede31b796dd74e70992f82e8f5ab72db" Jan 20 17:50:28 crc kubenswrapper[4558]: E0120 17:50:28.323257 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c8f37f1ccc0f6204e6d44935b746f6c6ede31b796dd74e70992f82e8f5ab72db\": container with ID starting with c8f37f1ccc0f6204e6d44935b746f6c6ede31b796dd74e70992f82e8f5ab72db not found: ID does not exist" containerID="c8f37f1ccc0f6204e6d44935b746f6c6ede31b796dd74e70992f82e8f5ab72db" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.323287 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c8f37f1ccc0f6204e6d44935b746f6c6ede31b796dd74e70992f82e8f5ab72db"} err="failed to get container status \"c8f37f1ccc0f6204e6d44935b746f6c6ede31b796dd74e70992f82e8f5ab72db\": rpc error: code = NotFound desc = could not find container \"c8f37f1ccc0f6204e6d44935b746f6c6ede31b796dd74e70992f82e8f5ab72db\": container with ID starting with c8f37f1ccc0f6204e6d44935b746f6c6ede31b796dd74e70992f82e8f5ab72db not found: ID does not exist" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.323308 4558 scope.go:117] "RemoveContainer" containerID="7f96a30bdd54fd7b066425edfff74600fb47af1d01a01f1f5b5a15ce2844ce91" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.359594 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/openstack-galera-0"] Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.361549 4558 scope.go:117] "RemoveContainer" containerID="d05ee219c0f8bccc7fa261cda35e2938c1b449a43b564bf9b569dafd93b0c9a7" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.374396 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/a0ddca73-7f97-4ec0-9a04-4686f84eb2e6-openstack-config\") pod \"a0ddca73-7f97-4ec0-9a04-4686f84eb2e6\" (UID: \"a0ddca73-7f97-4ec0-9a04-4686f84eb2e6\") " Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.374469 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3c44870c-4ab7-436a-9862-d6a2a4487bed-combined-ca-bundle\") pod \"3c44870c-4ab7-436a-9862-d6a2a4487bed\" (UID: \"3c44870c-4ab7-436a-9862-d6a2a4487bed\") " Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.374516 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nmsq5\" (UniqueName: \"kubernetes.io/projected/3c44870c-4ab7-436a-9862-d6a2a4487bed-kube-api-access-nmsq5\") pod \"3c44870c-4ab7-436a-9862-d6a2a4487bed\" (UID: \"3c44870c-4ab7-436a-9862-d6a2a4487bed\") " Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.374677 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hpk48\" (UniqueName: \"kubernetes.io/projected/a0ddca73-7f97-4ec0-9a04-4686f84eb2e6-kube-api-access-hpk48\") pod \"a0ddca73-7f97-4ec0-9a04-4686f84eb2e6\" (UID: \"a0ddca73-7f97-4ec0-9a04-4686f84eb2e6\") " Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.374721 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3c44870c-4ab7-436a-9862-d6a2a4487bed-config-data\") pod \"3c44870c-4ab7-436a-9862-d6a2a4487bed\" (UID: \"3c44870c-4ab7-436a-9862-d6a2a4487bed\") " Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.374764 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0ddca73-7f97-4ec0-9a04-4686f84eb2e6-combined-ca-bundle\") pod \"a0ddca73-7f97-4ec0-9a04-4686f84eb2e6\" (UID: \"a0ddca73-7f97-4ec0-9a04-4686f84eb2e6\") " Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.374871 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/a0ddca73-7f97-4ec0-9a04-4686f84eb2e6-openstack-config-secret\") pod \"a0ddca73-7f97-4ec0-9a04-4686f84eb2e6\" (UID: \"a0ddca73-7f97-4ec0-9a04-4686f84eb2e6\") " Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.377240 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/openstack-galera-0"] Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.412737 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-keystone-listener-7d4c95f78b-qjwvm"] Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.414044 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-keystone-listener-7d4c95f78b-qjwvm" podUID="e2e49d05-bd1d-49a2-a67f-17bdeb5c39c2" containerName="barbican-keystone-listener-log" containerID="cri-o://0eea0088c1de4416bcb9198c0646ec60717050f4f084f5e0c864e31658b39fd7" gracePeriod=30 Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.414192 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-keystone-listener-7d4c95f78b-qjwvm" podUID="e2e49d05-bd1d-49a2-a67f-17bdeb5c39c2" containerName="barbican-keystone-listener" containerID="cri-o://7436f50dac4bab17580af1cbb38a2ef4c0a9b64f22e3397fd9aa8eeeb133fc29" gracePeriod=30 Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.415329 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0ddca73-7f97-4ec0-9a04-4686f84eb2e6-kube-api-access-hpk48" (OuterVolumeSpecName: "kube-api-access-hpk48") pod "a0ddca73-7f97-4ec0-9a04-4686f84eb2e6" (UID: "a0ddca73-7f97-4ec0-9a04-4686f84eb2e6"). InnerVolumeSpecName "kube-api-access-hpk48". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.436027 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/openstack-galera-0"] Jan 20 17:50:28 crc kubenswrapper[4558]: E0120 17:50:28.436677 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7c3fe526-7107-4cc4-aca2-eb809f317c5c" containerName="memcached" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.436744 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="7c3fe526-7107-4cc4-aca2-eb809f317c5c" containerName="memcached" Jan 20 17:50:28 crc kubenswrapper[4558]: E0120 17:50:28.436804 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3c44870c-4ab7-436a-9862-d6a2a4487bed" containerName="nova-cell0-conductor-conductor" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.436857 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="3c44870c-4ab7-436a-9862-d6a2a4487bed" containerName="nova-cell0-conductor-conductor" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.437149 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="7c3fe526-7107-4cc4-aca2-eb809f317c5c" containerName="memcached" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.437257 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="3c44870c-4ab7-436a-9862-d6a2a4487bed" containerName="nova-cell0-conductor-conductor" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.438838 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.441407 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3c44870c-4ab7-436a-9862-d6a2a4487bed-kube-api-access-nmsq5" (OuterVolumeSpecName: "kube-api-access-nmsq5") pod "3c44870c-4ab7-436a-9862-d6a2a4487bed" (UID: "3c44870c-4ab7-436a-9862-d6a2a4487bed"). InnerVolumeSpecName "kube-api-access-nmsq5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.445502 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-galera-openstack-svc" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.445738 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-config-data" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.445863 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-scripts" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.446033 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"galera-openstack-dockercfg-7rwrt" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.446641 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a0ddca73-7f97-4ec0-9a04-4686f84eb2e6-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "a0ddca73-7f97-4ec0-9a04-4686f84eb2e6" (UID: "a0ddca73-7f97-4ec0-9a04-4686f84eb2e6"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.460968 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/barbican-worker-6c48cb9cdf-9688c" podStartSLOduration=3.4609418659999998 podStartE2EDuration="3.460941866s" podCreationTimestamp="2026-01-20 17:50:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:50:28.377644286 +0000 UTC m=+4122.137982253" watchObservedRunningTime="2026-01-20 17:50:28.460941866 +0000 UTC m=+4122.221279824" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.467765 4558 scope.go:117] "RemoveContainer" containerID="df22f4721c8d074f5b610fec573fca752b358557510a94a96c91992dbb2d68ef" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.468341 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0ddca73-7f97-4ec0-9a04-4686f84eb2e6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a0ddca73-7f97-4ec0-9a04-4686f84eb2e6" (UID: "a0ddca73-7f97-4ec0-9a04-4686f84eb2e6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.469694 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3c44870c-4ab7-436a-9862-d6a2a4487bed-config-data" (OuterVolumeSpecName: "config-data") pod "3c44870c-4ab7-436a-9862-d6a2a4487bed" (UID: "3c44870c-4ab7-436a-9862-d6a2a4487bed"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.474036 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3c44870c-4ab7-436a-9862-d6a2a4487bed-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3c44870c-4ab7-436a-9862-d6a2a4487bed" (UID: "3c44870c-4ab7-436a-9862-d6a2a4487bed"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.492098 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3c44870c-4ab7-436a-9862-d6a2a4487bed-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.492215 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nmsq5\" (UniqueName: \"kubernetes.io/projected/3c44870c-4ab7-436a-9862-d6a2a4487bed-kube-api-access-nmsq5\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.492280 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hpk48\" (UniqueName: \"kubernetes.io/projected/a0ddca73-7f97-4ec0-9a04-4686f84eb2e6-kube-api-access-hpk48\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.492346 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3c44870c-4ab7-436a-9862-d6a2a4487bed-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.492399 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0ddca73-7f97-4ec0-9a04-4686f84eb2e6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.492457 4558 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/a0ddca73-7f97-4ec0-9a04-4686f84eb2e6-openstack-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.510213 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/openstack-galera-0"] Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.513113 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-worker-97cdbfcb5-xwv8l"] Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.513451 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-worker-97cdbfcb5-xwv8l" podUID="c5adf97d-8b90-49ca-bbeb-21dd8446a226" containerName="barbican-worker-log" containerID="cri-o://d5d1fc6f44bede9582e8d9e34323e73cfd6032c9c0e5c64c9c713f180efb4c1c" gracePeriod=30 Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.513661 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-worker-97cdbfcb5-xwv8l" podUID="c5adf97d-8b90-49ca-bbeb-21dd8446a226" containerName="barbican-worker" containerID="cri-o://5ad5d5673753b8c1e259ace28ba37c369cd951524f24fdfa1a91a5b011bae4ef" gracePeriod=30 Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.522763 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/memcached-0"] Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.530525 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/memcached-0"] Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.530778 4558 scope.go:117] "RemoveContainer" containerID="7977976a1e26937bc2db139e21b54dea89156dad7f528ce6155109dd256adfa2" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.538511 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/memcached-0"] Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.542803 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/memcached-0" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.547149 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-memcached-svc" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.547369 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"memcached-config-data" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.547399 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"memcached-memcached-dockercfg-lc49s" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.555721 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-sb-0"] Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.563380 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/memcached-0"] Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.564301 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0ddca73-7f97-4ec0-9a04-4686f84eb2e6-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "a0ddca73-7f97-4ec0-9a04-4686f84eb2e6" (UID: "a0ddca73-7f97-4ec0-9a04-4686f84eb2e6"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.608083 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3f090098-7927-4f68-b49d-4cea135a041c" path="/var/lib/kubelet/pods/3f090098-7927-4f68-b49d-4cea135a041c/volumes" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.608993 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7c3fe526-7107-4cc4-aca2-eb809f317c5c" path="/var/lib/kubelet/pods/7c3fe526-7107-4cc4-aca2-eb809f317c5c/volumes" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.609375 4558 scope.go:117] "RemoveContainer" containerID="7ab0ab6375e86fd4837f32d21526ca8433f782d8e6f806a0848eab5773b75fbd" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.609743 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0ddca73-7f97-4ec0-9a04-4686f84eb2e6" path="/var/lib/kubelet/pods/a0ddca73-7f97-4ec0-9a04-4686f84eb2e6/volumes" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.609841 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/603501da-057d-4d3b-a682-e55e7e2916f4-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"603501da-057d-4d3b-a682-e55e7e2916f4\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.609906 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/603501da-057d-4d3b-a682-e55e7e2916f4-kolla-config\") pod \"openstack-galera-0\" (UID: \"603501da-057d-4d3b-a682-e55e7e2916f4\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.609939 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/603501da-057d-4d3b-a682-e55e7e2916f4-config-data-generated\") pod \"openstack-galera-0\" (UID: \"603501da-057d-4d3b-a682-e55e7e2916f4\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.610026 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/603501da-057d-4d3b-a682-e55e7e2916f4-operator-scripts\") pod \"openstack-galera-0\" (UID: \"603501da-057d-4d3b-a682-e55e7e2916f4\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.610061 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage14-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage14-crc\") pod \"openstack-galera-0\" (UID: \"603501da-057d-4d3b-a682-e55e7e2916f4\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.610100 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/603501da-057d-4d3b-a682-e55e7e2916f4-config-data-default\") pod \"openstack-galera-0\" (UID: \"603501da-057d-4d3b-a682-e55e7e2916f4\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.610145 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p4qbw\" (UniqueName: \"kubernetes.io/projected/603501da-057d-4d3b-a682-e55e7e2916f4-kube-api-access-p4qbw\") pod \"openstack-galera-0\" (UID: \"603501da-057d-4d3b-a682-e55e7e2916f4\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.610304 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/603501da-057d-4d3b-a682-e55e7e2916f4-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"603501da-057d-4d3b-a682-e55e7e2916f4\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.614780 4558 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/a0ddca73-7f97-4ec0-9a04-4686f84eb2e6-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.619315 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c43f6123-71d1-4cf0-b919-00b1e9836c8b" path="/var/lib/kubelet/pods/c43f6123-71d1-4cf0-b919-00b1e9836c8b/volumes" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.621554 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-sb-0"] Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.621595 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ovsdbserver-sb-0"] Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.624322 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.624341 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.624578 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.628139 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-ovndbcluster-sb-ovndbs" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.629355 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"ovndbcluster-sb-scripts" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.631173 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ovncluster-ovndbcluster-sb-dockercfg-f2cf9" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.631239 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"ovndbcluster-sb-config" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.655933 4558 scope.go:117] "RemoveContainer" containerID="13f594567a5a674fbc4ad2781a948fc685f50566f533b0772fdb8535db472a29" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.663207 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-sb-0"] Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.710700 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.715135 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.715303 4558 scope.go:117] "RemoveContainer" containerID="13f594567a5a674fbc4ad2781a948fc685f50566f533b0772fdb8535db472a29" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.716873 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-novncproxy-cell1-public-svc" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.717070 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-novncproxy-cell1-vencrypt" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.718624 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/5239f3e2-7f54-4042-b0da-02f7608224df-kolla-config\") pod \"memcached-0\" (UID: \"5239f3e2-7f54-4042-b0da-02f7608224df\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.718689 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5239f3e2-7f54-4042-b0da-02f7608224df-config-data\") pod \"memcached-0\" (UID: \"5239f3e2-7f54-4042-b0da-02f7608224df\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.718718 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/603501da-057d-4d3b-a682-e55e7e2916f4-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"603501da-057d-4d3b-a682-e55e7e2916f4\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.718753 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/5239f3e2-7f54-4042-b0da-02f7608224df-memcached-tls-certs\") pod \"memcached-0\" (UID: \"5239f3e2-7f54-4042-b0da-02f7608224df\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.718775 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/603501da-057d-4d3b-a682-e55e7e2916f4-kolla-config\") pod \"openstack-galera-0\" (UID: \"603501da-057d-4d3b-a682-e55e7e2916f4\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.718797 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/603501da-057d-4d3b-a682-e55e7e2916f4-config-data-generated\") pod \"openstack-galera-0\" (UID: \"603501da-057d-4d3b-a682-e55e7e2916f4\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.718816 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s7f6m\" (UniqueName: \"kubernetes.io/projected/5239f3e2-7f54-4042-b0da-02f7608224df-kube-api-access-s7f6m\") pod \"memcached-0\" (UID: \"5239f3e2-7f54-4042-b0da-02f7608224df\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.718859 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/fc60cc21-f159-40bf-beca-c62718f57b9c-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"fc60cc21-f159-40bf-beca-c62718f57b9c\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.718888 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n29rj\" (UniqueName: \"kubernetes.io/projected/fc60cc21-f159-40bf-beca-c62718f57b9c-kube-api-access-n29rj\") pod \"ovsdbserver-sb-0\" (UID: \"fc60cc21-f159-40bf-beca-c62718f57b9c\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.718914 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/fc60cc21-f159-40bf-beca-c62718f57b9c-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"fc60cc21-f159-40bf-beca-c62718f57b9c\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.718946 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc60cc21-f159-40bf-beca-c62718f57b9c-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"fc60cc21-f159-40bf-beca-c62718f57b9c\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.718996 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"ovsdbserver-sb-0\" (UID: \"fc60cc21-f159-40bf-beca-c62718f57b9c\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.719020 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/603501da-057d-4d3b-a682-e55e7e2916f4-operator-scripts\") pod \"openstack-galera-0\" (UID: \"603501da-057d-4d3b-a682-e55e7e2916f4\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.719047 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage14-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage14-crc\") pod \"openstack-galera-0\" (UID: \"603501da-057d-4d3b-a682-e55e7e2916f4\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.719089 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/603501da-057d-4d3b-a682-e55e7e2916f4-config-data-default\") pod \"openstack-galera-0\" (UID: \"603501da-057d-4d3b-a682-e55e7e2916f4\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.719124 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p4qbw\" (UniqueName: \"kubernetes.io/projected/603501da-057d-4d3b-a682-e55e7e2916f4-kube-api-access-p4qbw\") pod \"openstack-galera-0\" (UID: \"603501da-057d-4d3b-a682-e55e7e2916f4\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.719143 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/fc60cc21-f159-40bf-beca-c62718f57b9c-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"fc60cc21-f159-40bf-beca-c62718f57b9c\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.719191 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5239f3e2-7f54-4042-b0da-02f7608224df-combined-ca-bundle\") pod \"memcached-0\" (UID: \"5239f3e2-7f54-4042-b0da-02f7608224df\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.719215 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fc60cc21-f159-40bf-beca-c62718f57b9c-config\") pod \"ovsdbserver-sb-0\" (UID: \"fc60cc21-f159-40bf-beca-c62718f57b9c\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.719252 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fc60cc21-f159-40bf-beca-c62718f57b9c-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"fc60cc21-f159-40bf-beca-c62718f57b9c\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.719309 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/603501da-057d-4d3b-a682-e55e7e2916f4-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"603501da-057d-4d3b-a682-e55e7e2916f4\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:50:28 crc kubenswrapper[4558]: E0120 17:50:28.719756 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"13f594567a5a674fbc4ad2781a948fc685f50566f533b0772fdb8535db472a29\": container with ID starting with 13f594567a5a674fbc4ad2781a948fc685f50566f533b0772fdb8535db472a29 not found: ID does not exist" containerID="13f594567a5a674fbc4ad2781a948fc685f50566f533b0772fdb8535db472a29" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.719791 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"13f594567a5a674fbc4ad2781a948fc685f50566f533b0772fdb8535db472a29"} err="failed to get container status \"13f594567a5a674fbc4ad2781a948fc685f50566f533b0772fdb8535db472a29\": rpc error: code = NotFound desc = could not find container \"13f594567a5a674fbc4ad2781a948fc685f50566f533b0772fdb8535db472a29\": container with ID starting with 13f594567a5a674fbc4ad2781a948fc685f50566f533b0772fdb8535db472a29 not found: ID does not exist" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.719820 4558 scope.go:117] "RemoveContainer" containerID="5c8b0e075d6b80a58c44316d67ccf05a22f59a72ae3bca7872921f2e03c311bf" Jan 20 17:50:28 crc kubenswrapper[4558]: E0120 17:50:28.720358 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-galera-openstack-svc: secret "cert-galera-openstack-svc" not found Jan 20 17:50:28 crc kubenswrapper[4558]: E0120 17:50:28.720404 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/603501da-057d-4d3b-a682-e55e7e2916f4-galera-tls-certs podName:603501da-057d-4d3b-a682-e55e7e2916f4 nodeName:}" failed. No retries permitted until 2026-01-20 17:50:29.220389005 +0000 UTC m=+4122.980726972 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "galera-tls-certs" (UniqueName: "kubernetes.io/secret/603501da-057d-4d3b-a682-e55e7e2916f4-galera-tls-certs") pod "openstack-galera-0" (UID: "603501da-057d-4d3b-a682-e55e7e2916f4") : secret "cert-galera-openstack-svc" not found Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.720707 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell1-novncproxy-config-data" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.721340 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/603501da-057d-4d3b-a682-e55e7e2916f4-config-data-generated\") pod \"openstack-galera-0\" (UID: \"603501da-057d-4d3b-a682-e55e7e2916f4\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.721560 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/603501da-057d-4d3b-a682-e55e7e2916f4-kolla-config\") pod \"openstack-galera-0\" (UID: \"603501da-057d-4d3b-a682-e55e7e2916f4\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.722517 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage14-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage14-crc\") pod \"openstack-galera-0\" (UID: \"603501da-057d-4d3b-a682-e55e7e2916f4\") device mount path \"/mnt/openstack/pv14\"" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.727921 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/603501da-057d-4d3b-a682-e55e7e2916f4-config-data-default\") pod \"openstack-galera-0\" (UID: \"603501da-057d-4d3b-a682-e55e7e2916f4\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.727933 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/603501da-057d-4d3b-a682-e55e7e2916f4-operator-scripts\") pod \"openstack-galera-0\" (UID: \"603501da-057d-4d3b-a682-e55e7e2916f4\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.731217 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/603501da-057d-4d3b-a682-e55e7e2916f4-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"603501da-057d-4d3b-a682-e55e7e2916f4\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.732699 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.746960 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p4qbw\" (UniqueName: \"kubernetes.io/projected/603501da-057d-4d3b-a682-e55e7e2916f4-kube-api-access-p4qbw\") pod \"openstack-galera-0\" (UID: \"603501da-057d-4d3b-a682-e55e7e2916f4\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.765833 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage14-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage14-crc\") pod \"openstack-galera-0\" (UID: \"603501da-057d-4d3b-a682-e55e7e2916f4\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.801030 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-0"] Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.811324 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-0"] Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.817713 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-0"] Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.819413 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.821158 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fc60cc21-f159-40bf-beca-c62718f57b9c-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"fc60cc21-f159-40bf-beca-c62718f57b9c\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.821236 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/581c2419-48d9-4b7d-b71b-ce5cceb0326d-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"581c2419-48d9-4b7d-b71b-ce5cceb0326d\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.821385 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kcptj\" (UniqueName: \"kubernetes.io/projected/581c2419-48d9-4b7d-b71b-ce5cceb0326d-kube-api-access-kcptj\") pod \"nova-cell1-novncproxy-0\" (UID: \"581c2419-48d9-4b7d-b71b-ce5cceb0326d\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.821421 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/5239f3e2-7f54-4042-b0da-02f7608224df-kolla-config\") pod \"memcached-0\" (UID: \"5239f3e2-7f54-4042-b0da-02f7608224df\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.821466 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5239f3e2-7f54-4042-b0da-02f7608224df-config-data\") pod \"memcached-0\" (UID: \"5239f3e2-7f54-4042-b0da-02f7608224df\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.821503 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/581c2419-48d9-4b7d-b71b-ce5cceb0326d-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"581c2419-48d9-4b7d-b71b-ce5cceb0326d\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.821534 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/5239f3e2-7f54-4042-b0da-02f7608224df-memcached-tls-certs\") pod \"memcached-0\" (UID: \"5239f3e2-7f54-4042-b0da-02f7608224df\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.821569 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s7f6m\" (UniqueName: \"kubernetes.io/projected/5239f3e2-7f54-4042-b0da-02f7608224df-kube-api-access-s7f6m\") pod \"memcached-0\" (UID: \"5239f3e2-7f54-4042-b0da-02f7608224df\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.821600 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/fc60cc21-f159-40bf-beca-c62718f57b9c-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"fc60cc21-f159-40bf-beca-c62718f57b9c\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.821631 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n29rj\" (UniqueName: \"kubernetes.io/projected/fc60cc21-f159-40bf-beca-c62718f57b9c-kube-api-access-n29rj\") pod \"ovsdbserver-sb-0\" (UID: \"fc60cc21-f159-40bf-beca-c62718f57b9c\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.821665 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/fc60cc21-f159-40bf-beca-c62718f57b9c-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"fc60cc21-f159-40bf-beca-c62718f57b9c\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.821691 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/581c2419-48d9-4b7d-b71b-ce5cceb0326d-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"581c2419-48d9-4b7d-b71b-ce5cceb0326d\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.821725 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc60cc21-f159-40bf-beca-c62718f57b9c-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"fc60cc21-f159-40bf-beca-c62718f57b9c\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.821750 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/581c2419-48d9-4b7d-b71b-ce5cceb0326d-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"581c2419-48d9-4b7d-b71b-ce5cceb0326d\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.821783 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"ovsdbserver-sb-0\" (UID: \"fc60cc21-f159-40bf-beca-c62718f57b9c\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.821867 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/fc60cc21-f159-40bf-beca-c62718f57b9c-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"fc60cc21-f159-40bf-beca-c62718f57b9c\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.821897 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5239f3e2-7f54-4042-b0da-02f7608224df-combined-ca-bundle\") pod \"memcached-0\" (UID: \"5239f3e2-7f54-4042-b0da-02f7608224df\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.821919 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fc60cc21-f159-40bf-beca-c62718f57b9c-config\") pod \"ovsdbserver-sb-0\" (UID: \"fc60cc21-f159-40bf-beca-c62718f57b9c\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.822209 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fc60cc21-f159-40bf-beca-c62718f57b9c-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"fc60cc21-f159-40bf-beca-c62718f57b9c\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.822475 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/fc60cc21-f159-40bf-beca-c62718f57b9c-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"fc60cc21-f159-40bf-beca-c62718f57b9c\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.823002 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/5239f3e2-7f54-4042-b0da-02f7608224df-kolla-config\") pod \"memcached-0\" (UID: \"5239f3e2-7f54-4042-b0da-02f7608224df\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.823519 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell0-conductor-config-data" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.823519 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5239f3e2-7f54-4042-b0da-02f7608224df-config-data\") pod \"memcached-0\" (UID: \"5239f3e2-7f54-4042-b0da-02f7608224df\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:50:28 crc kubenswrapper[4558]: E0120 17:50:28.823620 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-memcached-svc: secret "cert-memcached-svc" not found Jan 20 17:50:28 crc kubenswrapper[4558]: E0120 17:50:28.823671 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5239f3e2-7f54-4042-b0da-02f7608224df-memcached-tls-certs podName:5239f3e2-7f54-4042-b0da-02f7608224df nodeName:}" failed. No retries permitted until 2026-01-20 17:50:29.323657218 +0000 UTC m=+4123.083995185 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memcached-tls-certs" (UniqueName: "kubernetes.io/secret/5239f3e2-7f54-4042-b0da-02f7608224df-memcached-tls-certs") pod "memcached-0" (UID: "5239f3e2-7f54-4042-b0da-02f7608224df") : secret "cert-memcached-svc" not found Jan 20 17:50:28 crc kubenswrapper[4558]: E0120 17:50:28.825121 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-ovn-metrics: secret "cert-ovn-metrics" not found Jan 20 17:50:28 crc kubenswrapper[4558]: E0120 17:50:28.825182 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/fc60cc21-f159-40bf-beca-c62718f57b9c-metrics-certs-tls-certs podName:fc60cc21-f159-40bf-beca-c62718f57b9c nodeName:}" failed. No retries permitted until 2026-01-20 17:50:29.325153572 +0000 UTC m=+4123.085491539 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs-tls-certs" (UniqueName: "kubernetes.io/secret/fc60cc21-f159-40bf-beca-c62718f57b9c-metrics-certs-tls-certs") pod "ovsdbserver-sb-0" (UID: "fc60cc21-f159-40bf-beca-c62718f57b9c") : secret "cert-ovn-metrics" not found Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.825376 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"ovsdbserver-sb-0\" (UID: \"fc60cc21-f159-40bf-beca-c62718f57b9c\") device mount path \"/mnt/openstack/pv03\"" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.825468 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fc60cc21-f159-40bf-beca-c62718f57b9c-config\") pod \"ovsdbserver-sb-0\" (UID: \"fc60cc21-f159-40bf-beca-c62718f57b9c\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:50:28 crc kubenswrapper[4558]: E0120 17:50:28.825570 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-ovndbcluster-sb-ovndbs: secret "cert-ovndbcluster-sb-ovndbs" not found Jan 20 17:50:28 crc kubenswrapper[4558]: E0120 17:50:28.825616 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/fc60cc21-f159-40bf-beca-c62718f57b9c-ovsdbserver-sb-tls-certs podName:fc60cc21-f159-40bf-beca-c62718f57b9c nodeName:}" failed. No retries permitted until 2026-01-20 17:50:29.325603958 +0000 UTC m=+4123.085941926 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "ovsdbserver-sb-tls-certs" (UniqueName: "kubernetes.io/secret/fc60cc21-f159-40bf-beca-c62718f57b9c-ovsdbserver-sb-tls-certs") pod "ovsdbserver-sb-0" (UID: "fc60cc21-f159-40bf-beca-c62718f57b9c") : secret "cert-ovndbcluster-sb-ovndbs" not found Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.826515 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-0"] Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.828858 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5239f3e2-7f54-4042-b0da-02f7608224df-combined-ca-bundle\") pod \"memcached-0\" (UID: \"5239f3e2-7f54-4042-b0da-02f7608224df\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.834214 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.834511 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="a3e03093-9a03-4aac-bcde-475dda6c3dcc" containerName="ceilometer-central-agent" containerID="cri-o://518dffb0d9b0f0146e59f64982b965f84538ad2716a9c5d37e992ce46cfdadbb" gracePeriod=30 Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.834681 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="a3e03093-9a03-4aac-bcde-475dda6c3dcc" containerName="proxy-httpd" containerID="cri-o://8317dbc00d30b8e256eff4e4e631c97112ea996e280ba9576ca0f29a56a05240" gracePeriod=30 Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.834721 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="a3e03093-9a03-4aac-bcde-475dda6c3dcc" containerName="sg-core" containerID="cri-o://dddc0d84255205f19bccd3e01061af9fd0d1e2f5dc72e122ea02fa340a0ca32c" gracePeriod=30 Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.834758 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="a3e03093-9a03-4aac-bcde-475dda6c3dcc" containerName="ceilometer-notification-agent" containerID="cri-o://0ef28b39a3024f10f373d8df6dbcc0e604d25efcb4ff0cd7b08a64f50ee2a001" gracePeriod=30 Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.837202 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc60cc21-f159-40bf-beca-c62718f57b9c-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"fc60cc21-f159-40bf-beca-c62718f57b9c\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.843834 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s7f6m\" (UniqueName: \"kubernetes.io/projected/5239f3e2-7f54-4042-b0da-02f7608224df-kube-api-access-s7f6m\") pod \"memcached-0\" (UID: \"5239f3e2-7f54-4042-b0da-02f7608224df\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.849816 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n29rj\" (UniqueName: \"kubernetes.io/projected/fc60cc21-f159-40bf-beca-c62718f57b9c-kube-api-access-n29rj\") pod \"ovsdbserver-sb-0\" (UID: \"fc60cc21-f159-40bf-beca-c62718f57b9c\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.885508 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"ovsdbserver-sb-0\" (UID: \"fc60cc21-f159-40bf-beca-c62718f57b9c\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.926900 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/924fa7ce-8d60-4b2f-b62b-d5e146474f71-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"924fa7ce-8d60-4b2f-b62b-d5e146474f71\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.926981 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/581c2419-48d9-4b7d-b71b-ce5cceb0326d-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"581c2419-48d9-4b7d-b71b-ce5cceb0326d\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.927155 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kcptj\" (UniqueName: \"kubernetes.io/projected/581c2419-48d9-4b7d-b71b-ce5cceb0326d-kube-api-access-kcptj\") pod \"nova-cell1-novncproxy-0\" (UID: \"581c2419-48d9-4b7d-b71b-ce5cceb0326d\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.927282 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/581c2419-48d9-4b7d-b71b-ce5cceb0326d-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"581c2419-48d9-4b7d-b71b-ce5cceb0326d\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.927401 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/581c2419-48d9-4b7d-b71b-ce5cceb0326d-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"581c2419-48d9-4b7d-b71b-ce5cceb0326d\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:50:28 crc kubenswrapper[4558]: E0120 17:50:28.927412 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-nova-novncproxy-cell1-vencrypt: secret "cert-nova-novncproxy-cell1-vencrypt" not found Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.927440 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/924fa7ce-8d60-4b2f-b62b-d5e146474f71-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"924fa7ce-8d60-4b2f-b62b-d5e146474f71\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:50:28 crc kubenswrapper[4558]: E0120 17:50:28.927493 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/581c2419-48d9-4b7d-b71b-ce5cceb0326d-vencrypt-tls-certs podName:581c2419-48d9-4b7d-b71b-ce5cceb0326d nodeName:}" failed. No retries permitted until 2026-01-20 17:50:29.427467732 +0000 UTC m=+4123.187805699 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "vencrypt-tls-certs" (UniqueName: "kubernetes.io/secret/581c2419-48d9-4b7d-b71b-ce5cceb0326d-vencrypt-tls-certs") pod "nova-cell1-novncproxy-0" (UID: "581c2419-48d9-4b7d-b71b-ce5cceb0326d") : secret "cert-nova-novncproxy-cell1-vencrypt" not found Jan 20 17:50:28 crc kubenswrapper[4558]: E0120 17:50:28.927505 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-nova-novncproxy-cell1-public-svc: secret "cert-nova-novncproxy-cell1-public-svc" not found Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.927529 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/581c2419-48d9-4b7d-b71b-ce5cceb0326d-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"581c2419-48d9-4b7d-b71b-ce5cceb0326d\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:50:28 crc kubenswrapper[4558]: E0120 17:50:28.927562 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/581c2419-48d9-4b7d-b71b-ce5cceb0326d-nova-novncproxy-tls-certs podName:581c2419-48d9-4b7d-b71b-ce5cceb0326d nodeName:}" failed. No retries permitted until 2026-01-20 17:50:29.427546258 +0000 UTC m=+4123.187884226 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nova-novncproxy-tls-certs" (UniqueName: "kubernetes.io/secret/581c2419-48d9-4b7d-b71b-ce5cceb0326d-nova-novncproxy-tls-certs") pod "nova-cell1-novncproxy-0" (UID: "581c2419-48d9-4b7d-b71b-ce5cceb0326d") : secret "cert-nova-novncproxy-cell1-public-svc" not found Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.927603 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-skmt5\" (UniqueName: \"kubernetes.io/projected/924fa7ce-8d60-4b2f-b62b-d5e146474f71-kube-api-access-skmt5\") pod \"nova-cell0-conductor-0\" (UID: \"924fa7ce-8d60-4b2f-b62b-d5e146474f71\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.932127 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/581c2419-48d9-4b7d-b71b-ce5cceb0326d-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"581c2419-48d9-4b7d-b71b-ce5cceb0326d\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.934638 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/581c2419-48d9-4b7d-b71b-ce5cceb0326d-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"581c2419-48d9-4b7d-b71b-ce5cceb0326d\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:50:28 crc kubenswrapper[4558]: I0120 17:50:28.943972 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kcptj\" (UniqueName: \"kubernetes.io/projected/581c2419-48d9-4b7d-b71b-ce5cceb0326d-kube-api-access-kcptj\") pod \"nova-cell1-novncproxy-0\" (UID: \"581c2419-48d9-4b7d-b71b-ce5cceb0326d\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:50:29 crc kubenswrapper[4558]: I0120 17:50:29.030330 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/924fa7ce-8d60-4b2f-b62b-d5e146474f71-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"924fa7ce-8d60-4b2f-b62b-d5e146474f71\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:50:29 crc kubenswrapper[4558]: I0120 17:50:29.030412 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-skmt5\" (UniqueName: \"kubernetes.io/projected/924fa7ce-8d60-4b2f-b62b-d5e146474f71-kube-api-access-skmt5\") pod \"nova-cell0-conductor-0\" (UID: \"924fa7ce-8d60-4b2f-b62b-d5e146474f71\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:50:29 crc kubenswrapper[4558]: I0120 17:50:29.030554 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/924fa7ce-8d60-4b2f-b62b-d5e146474f71-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"924fa7ce-8d60-4b2f-b62b-d5e146474f71\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:50:29 crc kubenswrapper[4558]: I0120 17:50:29.035733 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/924fa7ce-8d60-4b2f-b62b-d5e146474f71-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"924fa7ce-8d60-4b2f-b62b-d5e146474f71\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:50:29 crc kubenswrapper[4558]: I0120 17:50:29.036801 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/924fa7ce-8d60-4b2f-b62b-d5e146474f71-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"924fa7ce-8d60-4b2f-b62b-d5e146474f71\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:50:29 crc kubenswrapper[4558]: I0120 17:50:29.046623 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-skmt5\" (UniqueName: \"kubernetes.io/projected/924fa7ce-8d60-4b2f-b62b-d5e146474f71-kube-api-access-skmt5\") pod \"nova-cell0-conductor-0\" (UID: \"924fa7ce-8d60-4b2f-b62b-d5e146474f71\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:50:29 crc kubenswrapper[4558]: I0120 17:50:29.236761 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/603501da-057d-4d3b-a682-e55e7e2916f4-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"603501da-057d-4d3b-a682-e55e7e2916f4\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:50:29 crc kubenswrapper[4558]: E0120 17:50:29.236943 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-galera-openstack-svc: secret "cert-galera-openstack-svc" not found Jan 20 17:50:29 crc kubenswrapper[4558]: E0120 17:50:29.237033 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/603501da-057d-4d3b-a682-e55e7e2916f4-galera-tls-certs podName:603501da-057d-4d3b-a682-e55e7e2916f4 nodeName:}" failed. No retries permitted until 2026-01-20 17:50:30.237014146 +0000 UTC m=+4123.997352112 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "galera-tls-certs" (UniqueName: "kubernetes.io/secret/603501da-057d-4d3b-a682-e55e7e2916f4-galera-tls-certs") pod "openstack-galera-0" (UID: "603501da-057d-4d3b-a682-e55e7e2916f4") : secret "cert-galera-openstack-svc" not found Jan 20 17:50:29 crc kubenswrapper[4558]: I0120 17:50:29.303454 4558 generic.go:334] "Generic (PLEG): container finished" podID="e2e49d05-bd1d-49a2-a67f-17bdeb5c39c2" containerID="0eea0088c1de4416bcb9198c0646ec60717050f4f084f5e0c864e31658b39fd7" exitCode=143 Jan 20 17:50:29 crc kubenswrapper[4558]: I0120 17:50:29.303509 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-keystone-listener-7d4c95f78b-qjwvm" event={"ID":"e2e49d05-bd1d-49a2-a67f-17bdeb5c39c2","Type":"ContainerDied","Data":"0eea0088c1de4416bcb9198c0646ec60717050f4f084f5e0c864e31658b39fd7"} Jan 20 17:50:29 crc kubenswrapper[4558]: I0120 17:50:29.310633 4558 generic.go:334] "Generic (PLEG): container finished" podID="a3e03093-9a03-4aac-bcde-475dda6c3dcc" containerID="8317dbc00d30b8e256eff4e4e631c97112ea996e280ba9576ca0f29a56a05240" exitCode=0 Jan 20 17:50:29 crc kubenswrapper[4558]: I0120 17:50:29.310658 4558 generic.go:334] "Generic (PLEG): container finished" podID="a3e03093-9a03-4aac-bcde-475dda6c3dcc" containerID="dddc0d84255205f19bccd3e01061af9fd0d1e2f5dc72e122ea02fa340a0ca32c" exitCode=2 Jan 20 17:50:29 crc kubenswrapper[4558]: I0120 17:50:29.310666 4558 generic.go:334] "Generic (PLEG): container finished" podID="a3e03093-9a03-4aac-bcde-475dda6c3dcc" containerID="518dffb0d9b0f0146e59f64982b965f84538ad2716a9c5d37e992ce46cfdadbb" exitCode=0 Jan 20 17:50:29 crc kubenswrapper[4558]: I0120 17:50:29.310698 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"a3e03093-9a03-4aac-bcde-475dda6c3dcc","Type":"ContainerDied","Data":"8317dbc00d30b8e256eff4e4e631c97112ea996e280ba9576ca0f29a56a05240"} Jan 20 17:50:29 crc kubenswrapper[4558]: I0120 17:50:29.310716 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"a3e03093-9a03-4aac-bcde-475dda6c3dcc","Type":"ContainerDied","Data":"dddc0d84255205f19bccd3e01061af9fd0d1e2f5dc72e122ea02fa340a0ca32c"} Jan 20 17:50:29 crc kubenswrapper[4558]: I0120 17:50:29.310726 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"a3e03093-9a03-4aac-bcde-475dda6c3dcc","Type":"ContainerDied","Data":"518dffb0d9b0f0146e59f64982b965f84538ad2716a9c5d37e992ce46cfdadbb"} Jan 20 17:50:29 crc kubenswrapper[4558]: I0120 17:50:29.316272 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:50:29 crc kubenswrapper[4558]: I0120 17:50:29.317327 4558 generic.go:334] "Generic (PLEG): container finished" podID="c5adf97d-8b90-49ca-bbeb-21dd8446a226" containerID="d5d1fc6f44bede9582e8d9e34323e73cfd6032c9c0e5c64c9c713f180efb4c1c" exitCode=143 Jan 20 17:50:29 crc kubenswrapper[4558]: I0120 17:50:29.317397 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-worker-97cdbfcb5-xwv8l" event={"ID":"c5adf97d-8b90-49ca-bbeb-21dd8446a226","Type":"ContainerDied","Data":"d5d1fc6f44bede9582e8d9e34323e73cfd6032c9c0e5c64c9c713f180efb4c1c"} Jan 20 17:50:29 crc kubenswrapper[4558]: I0120 17:50:29.338149 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/fc60cc21-f159-40bf-beca-c62718f57b9c-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"fc60cc21-f159-40bf-beca-c62718f57b9c\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:50:29 crc kubenswrapper[4558]: I0120 17:50:29.338263 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:50:29 crc kubenswrapper[4558]: I0120 17:50:29.338298 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/fc60cc21-f159-40bf-beca-c62718f57b9c-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"fc60cc21-f159-40bf-beca-c62718f57b9c\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:50:29 crc kubenswrapper[4558]: I0120 17:50:29.338364 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:50:29 crc kubenswrapper[4558]: I0120 17:50:29.338477 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/5239f3e2-7f54-4042-b0da-02f7608224df-memcached-tls-certs\") pod \"memcached-0\" (UID: \"5239f3e2-7f54-4042-b0da-02f7608224df\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:50:29 crc kubenswrapper[4558]: E0120 17:50:29.338637 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-memcached-svc: secret "cert-memcached-svc" not found Jan 20 17:50:29 crc kubenswrapper[4558]: E0120 17:50:29.338698 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5239f3e2-7f54-4042-b0da-02f7608224df-memcached-tls-certs podName:5239f3e2-7f54-4042-b0da-02f7608224df nodeName:}" failed. No retries permitted until 2026-01-20 17:50:30.338681608 +0000 UTC m=+4124.099019576 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "memcached-tls-certs" (UniqueName: "kubernetes.io/secret/5239f3e2-7f54-4042-b0da-02f7608224df-memcached-tls-certs") pod "memcached-0" (UID: "5239f3e2-7f54-4042-b0da-02f7608224df") : secret "cert-memcached-svc" not found Jan 20 17:50:29 crc kubenswrapper[4558]: E0120 17:50:29.339085 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-ovn-metrics: secret "cert-ovn-metrics" not found Jan 20 17:50:29 crc kubenswrapper[4558]: E0120 17:50:29.339118 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/fc60cc21-f159-40bf-beca-c62718f57b9c-metrics-certs-tls-certs podName:fc60cc21-f159-40bf-beca-c62718f57b9c nodeName:}" failed. No retries permitted until 2026-01-20 17:50:30.339111096 +0000 UTC m=+4124.099449063 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs-tls-certs" (UniqueName: "kubernetes.io/secret/fc60cc21-f159-40bf-beca-c62718f57b9c-metrics-certs-tls-certs") pod "ovsdbserver-sb-0" (UID: "fc60cc21-f159-40bf-beca-c62718f57b9c") : secret "cert-ovn-metrics" not found Jan 20 17:50:29 crc kubenswrapper[4558]: E0120 17:50:29.339156 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-ovn-metrics: secret "cert-ovn-metrics" not found Jan 20 17:50:29 crc kubenswrapper[4558]: E0120 17:50:29.339196 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162-metrics-certs-tls-certs podName:bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162 nodeName:}" failed. No retries permitted until 2026-01-20 17:50:31.339189423 +0000 UTC m=+4125.099527390 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs-tls-certs" (UniqueName: "kubernetes.io/secret/bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162-metrics-certs-tls-certs") pod "ovsdbserver-nb-0" (UID: "bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162") : secret "cert-ovn-metrics" not found Jan 20 17:50:29 crc kubenswrapper[4558]: E0120 17:50:29.339236 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-ovndbcluster-sb-ovndbs: secret "cert-ovndbcluster-sb-ovndbs" not found Jan 20 17:50:29 crc kubenswrapper[4558]: E0120 17:50:29.339259 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/fc60cc21-f159-40bf-beca-c62718f57b9c-ovsdbserver-sb-tls-certs podName:fc60cc21-f159-40bf-beca-c62718f57b9c nodeName:}" failed. No retries permitted until 2026-01-20 17:50:30.339253053 +0000 UTC m=+4124.099591020 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "ovsdbserver-sb-tls-certs" (UniqueName: "kubernetes.io/secret/fc60cc21-f159-40bf-beca-c62718f57b9c-ovsdbserver-sb-tls-certs") pod "ovsdbserver-sb-0" (UID: "fc60cc21-f159-40bf-beca-c62718f57b9c") : secret "cert-ovndbcluster-sb-ovndbs" not found Jan 20 17:50:29 crc kubenswrapper[4558]: E0120 17:50:29.339296 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-ovndbcluster-nb-ovndbs: secret "cert-ovndbcluster-nb-ovndbs" not found Jan 20 17:50:29 crc kubenswrapper[4558]: E0120 17:50:29.339316 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162-ovsdbserver-nb-tls-certs podName:bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162 nodeName:}" failed. No retries permitted until 2026-01-20 17:50:31.339310381 +0000 UTC m=+4125.099648347 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "ovsdbserver-nb-tls-certs" (UniqueName: "kubernetes.io/secret/bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162-ovsdbserver-nb-tls-certs") pod "ovsdbserver-nb-0" (UID: "bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162") : secret "cert-ovndbcluster-nb-ovndbs" not found Jan 20 17:50:29 crc kubenswrapper[4558]: I0120 17:50:29.443701 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/581c2419-48d9-4b7d-b71b-ce5cceb0326d-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"581c2419-48d9-4b7d-b71b-ce5cceb0326d\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:50:29 crc kubenswrapper[4558]: E0120 17:50:29.443848 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-nova-novncproxy-cell1-vencrypt: secret "cert-nova-novncproxy-cell1-vencrypt" not found Jan 20 17:50:29 crc kubenswrapper[4558]: E0120 17:50:29.444293 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/581c2419-48d9-4b7d-b71b-ce5cceb0326d-vencrypt-tls-certs podName:581c2419-48d9-4b7d-b71b-ce5cceb0326d nodeName:}" failed. No retries permitted until 2026-01-20 17:50:30.444277208 +0000 UTC m=+4124.204615175 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "vencrypt-tls-certs" (UniqueName: "kubernetes.io/secret/581c2419-48d9-4b7d-b71b-ce5cceb0326d-vencrypt-tls-certs") pod "nova-cell1-novncproxy-0" (UID: "581c2419-48d9-4b7d-b71b-ce5cceb0326d") : secret "cert-nova-novncproxy-cell1-vencrypt" not found Jan 20 17:50:29 crc kubenswrapper[4558]: I0120 17:50:29.444468 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/581c2419-48d9-4b7d-b71b-ce5cceb0326d-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"581c2419-48d9-4b7d-b71b-ce5cceb0326d\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:50:29 crc kubenswrapper[4558]: E0120 17:50:29.444590 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-nova-novncproxy-cell1-public-svc: secret "cert-nova-novncproxy-cell1-public-svc" not found Jan 20 17:50:29 crc kubenswrapper[4558]: E0120 17:50:29.444629 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/581c2419-48d9-4b7d-b71b-ce5cceb0326d-nova-novncproxy-tls-certs podName:581c2419-48d9-4b7d-b71b-ce5cceb0326d nodeName:}" failed. No retries permitted until 2026-01-20 17:50:30.444619952 +0000 UTC m=+4124.204957919 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nova-novncproxy-tls-certs" (UniqueName: "kubernetes.io/secret/581c2419-48d9-4b7d-b71b-ce5cceb0326d-nova-novncproxy-tls-certs") pod "nova-cell1-novncproxy-0" (UID: "581c2419-48d9-4b7d-b71b-ce5cceb0326d") : secret "cert-nova-novncproxy-cell1-public-svc" not found Jan 20 17:50:29 crc kubenswrapper[4558]: I0120 17:50:29.660925 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/nova-metadata-0" podUID="7d2f1782-aa1c-4382-b682-27ce8f37d139" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.1.174:8775/\": dial tcp 10.217.1.174:8775: connect: connection refused" Jan 20 17:50:29 crc kubenswrapper[4558]: I0120 17:50:29.660931 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/nova-metadata-0" podUID="7d2f1782-aa1c-4382-b682-27ce8f37d139" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.1.174:8775/\": dial tcp 10.217.1.174:8775: connect: connection refused" Jan 20 17:50:29 crc kubenswrapper[4558]: I0120 17:50:29.728647 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/cinder-api-0" podUID="47de199d-0dde-4082-8b6c-99f3d202608b" containerName="cinder-api" probeResult="failure" output="Get \"https://10.217.1.133:8776/healthcheck\": dial tcp 10.217.1.133:8776: connect: connection refused" Jan 20 17:50:29 crc kubenswrapper[4558]: E0120 17:50:29.753606 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-kube-state-metrics-svc: secret "cert-kube-state-metrics-svc" not found Jan 20 17:50:29 crc kubenswrapper[4558]: E0120 17:50:29.753670 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/49822b01-63d0-40d3-b604-7980891b0683-kube-state-metrics-tls-certs podName:49822b01-63d0-40d3-b604-7980891b0683 nodeName:}" failed. No retries permitted until 2026-01-20 17:50:30.253650075 +0000 UTC m=+4124.013988042 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-state-metrics-tls-certs" (UniqueName: "kubernetes.io/secret/49822b01-63d0-40d3-b604-7980891b0683-kube-state-metrics-tls-certs") pod "kube-state-metrics-0" (UID: "49822b01-63d0-40d3-b604-7980891b0683") : secret "cert-kube-state-metrics-svc" not found Jan 20 17:50:29 crc kubenswrapper[4558]: I0120 17:50:29.772881 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-0"] Jan 20 17:50:29 crc kubenswrapper[4558]: W0120 17:50:29.835826 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod924fa7ce_8d60_4b2f_b62b_d5e146474f71.slice/crio-4ecb84690e1b7faec0f2af080daf12cc500aff5c5becfc57730c4fde59e298fb WatchSource:0}: Error finding container 4ecb84690e1b7faec0f2af080daf12cc500aff5c5becfc57730c4fde59e298fb: Status 404 returned error can't find the container with id 4ecb84690e1b7faec0f2af080daf12cc500aff5c5becfc57730c4fde59e298fb Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.137121 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.152573 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.152702 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.176710 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovn-northd-0_6dae3a04-db9e-48a2-bbe2-012c9ba55890/ovn-northd/0.log" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.176780 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.180391 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.198400 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-worker-97cdbfcb5-xwv8l" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.238071 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-keystone-listener-7d4c95f78b-qjwvm" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.280599 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7d2f1782-aa1c-4382-b682-27ce8f37d139-combined-ca-bundle\") pod \"7d2f1782-aa1c-4382-b682-27ce8f37d139\" (UID: \"7d2f1782-aa1c-4382-b682-27ce8f37d139\") " Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.280643 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f98db5df-2f7f-490b-b4a0-7e9f27b07a60-logs\") pod \"f98db5df-2f7f-490b-b4a0-7e9f27b07a60\" (UID: \"f98db5df-2f7f-490b-b4a0-7e9f27b07a60\") " Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.280670 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8h5nr\" (UniqueName: \"kubernetes.io/projected/6dae3a04-db9e-48a2-bbe2-012c9ba55890-kube-api-access-8h5nr\") pod \"6dae3a04-db9e-48a2-bbe2-012c9ba55890\" (UID: \"6dae3a04-db9e-48a2-bbe2-012c9ba55890\") " Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.280689 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"d0e7ef26-ab3e-4c3d-b8b2-6b08007022fd\" (UID: \"d0e7ef26-ab3e-4c3d-b8b2-6b08007022fd\") " Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.280707 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ds4zt\" (UniqueName: \"kubernetes.io/projected/7d2f1782-aa1c-4382-b682-27ce8f37d139-kube-api-access-ds4zt\") pod \"7d2f1782-aa1c-4382-b682-27ce8f37d139\" (UID: \"7d2f1782-aa1c-4382-b682-27ce8f37d139\") " Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.280726 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/47de199d-0dde-4082-8b6c-99f3d202608b-combined-ca-bundle\") pod \"47de199d-0dde-4082-8b6c-99f3d202608b\" (UID: \"47de199d-0dde-4082-8b6c-99f3d202608b\") " Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.280745 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7d2f1782-aa1c-4382-b682-27ce8f37d139-config-data\") pod \"7d2f1782-aa1c-4382-b682-27ce8f37d139\" (UID: \"7d2f1782-aa1c-4382-b682-27ce8f37d139\") " Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.280785 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6dae3a04-db9e-48a2-bbe2-012c9ba55890-combined-ca-bundle\") pod \"6dae3a04-db9e-48a2-bbe2-012c9ba55890\" (UID: \"6dae3a04-db9e-48a2-bbe2-012c9ba55890\") " Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.280802 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/6dae3a04-db9e-48a2-bbe2-012c9ba55890-ovn-rundir\") pod \"6dae3a04-db9e-48a2-bbe2-012c9ba55890\" (UID: \"6dae3a04-db9e-48a2-bbe2-012c9ba55890\") " Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.280826 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/47de199d-0dde-4082-8b6c-99f3d202608b-logs\") pod \"47de199d-0dde-4082-8b6c-99f3d202608b\" (UID: \"47de199d-0dde-4082-8b6c-99f3d202608b\") " Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.280844 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/47de199d-0dde-4082-8b6c-99f3d202608b-public-tls-certs\") pod \"47de199d-0dde-4082-8b6c-99f3d202608b\" (UID: \"47de199d-0dde-4082-8b6c-99f3d202608b\") " Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.280871 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f98db5df-2f7f-490b-b4a0-7e9f27b07a60-scripts\") pod \"f98db5df-2f7f-490b-b4a0-7e9f27b07a60\" (UID: \"f98db5df-2f7f-490b-b4a0-7e9f27b07a60\") " Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.280886 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"f98db5df-2f7f-490b-b4a0-7e9f27b07a60\" (UID: \"f98db5df-2f7f-490b-b4a0-7e9f27b07a60\") " Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.280903 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7d2f1782-aa1c-4382-b682-27ce8f37d139-logs\") pod \"7d2f1782-aa1c-4382-b682-27ce8f37d139\" (UID: \"7d2f1782-aa1c-4382-b682-27ce8f37d139\") " Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.280936 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6dae3a04-db9e-48a2-bbe2-012c9ba55890-config\") pod \"6dae3a04-db9e-48a2-bbe2-012c9ba55890\" (UID: \"6dae3a04-db9e-48a2-bbe2-012c9ba55890\") " Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.280956 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f98db5df-2f7f-490b-b4a0-7e9f27b07a60-httpd-run\") pod \"f98db5df-2f7f-490b-b4a0-7e9f27b07a60\" (UID: \"f98db5df-2f7f-490b-b4a0-7e9f27b07a60\") " Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.281000 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d0e7ef26-ab3e-4c3d-b8b2-6b08007022fd-combined-ca-bundle\") pod \"d0e7ef26-ab3e-4c3d-b8b2-6b08007022fd\" (UID: \"d0e7ef26-ab3e-4c3d-b8b2-6b08007022fd\") " Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.281020 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6dae3a04-db9e-48a2-bbe2-012c9ba55890-scripts\") pod \"6dae3a04-db9e-48a2-bbe2-012c9ba55890\" (UID: \"6dae3a04-db9e-48a2-bbe2-012c9ba55890\") " Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.281045 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/47de199d-0dde-4082-8b6c-99f3d202608b-config-data-custom\") pod \"47de199d-0dde-4082-8b6c-99f3d202608b\" (UID: \"47de199d-0dde-4082-8b6c-99f3d202608b\") " Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.281080 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/6dae3a04-db9e-48a2-bbe2-012c9ba55890-metrics-certs-tls-certs\") pod \"6dae3a04-db9e-48a2-bbe2-012c9ba55890\" (UID: \"6dae3a04-db9e-48a2-bbe2-012c9ba55890\") " Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.281119 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f98db5df-2f7f-490b-b4a0-7e9f27b07a60-public-tls-certs\") pod \"f98db5df-2f7f-490b-b4a0-7e9f27b07a60\" (UID: \"f98db5df-2f7f-490b-b4a0-7e9f27b07a60\") " Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.281139 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d0e7ef26-ab3e-4c3d-b8b2-6b08007022fd-config-data\") pod \"d0e7ef26-ab3e-4c3d-b8b2-6b08007022fd\" (UID: \"d0e7ef26-ab3e-4c3d-b8b2-6b08007022fd\") " Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.281155 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f98db5df-2f7f-490b-b4a0-7e9f27b07a60-config-data\") pod \"f98db5df-2f7f-490b-b4a0-7e9f27b07a60\" (UID: \"f98db5df-2f7f-490b-b4a0-7e9f27b07a60\") " Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.281208 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d0e7ef26-ab3e-4c3d-b8b2-6b08007022fd-logs\") pod \"d0e7ef26-ab3e-4c3d-b8b2-6b08007022fd\" (UID: \"d0e7ef26-ab3e-4c3d-b8b2-6b08007022fd\") " Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.281246 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dkhz2\" (UniqueName: \"kubernetes.io/projected/d0e7ef26-ab3e-4c3d-b8b2-6b08007022fd-kube-api-access-dkhz2\") pod \"d0e7ef26-ab3e-4c3d-b8b2-6b08007022fd\" (UID: \"d0e7ef26-ab3e-4c3d-b8b2-6b08007022fd\") " Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.281274 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-82gpj\" (UniqueName: \"kubernetes.io/projected/f98db5df-2f7f-490b-b4a0-7e9f27b07a60-kube-api-access-82gpj\") pod \"f98db5df-2f7f-490b-b4a0-7e9f27b07a60\" (UID: \"f98db5df-2f7f-490b-b4a0-7e9f27b07a60\") " Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.281310 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/47de199d-0dde-4082-8b6c-99f3d202608b-scripts\") pod \"47de199d-0dde-4082-8b6c-99f3d202608b\" (UID: \"47de199d-0dde-4082-8b6c-99f3d202608b\") " Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.281340 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/47de199d-0dde-4082-8b6c-99f3d202608b-etc-machine-id\") pod \"47de199d-0dde-4082-8b6c-99f3d202608b\" (UID: \"47de199d-0dde-4082-8b6c-99f3d202608b\") " Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.281365 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d0e7ef26-ab3e-4c3d-b8b2-6b08007022fd-internal-tls-certs\") pod \"d0e7ef26-ab3e-4c3d-b8b2-6b08007022fd\" (UID: \"d0e7ef26-ab3e-4c3d-b8b2-6b08007022fd\") " Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.281401 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-thnjs\" (UniqueName: \"kubernetes.io/projected/47de199d-0dde-4082-8b6c-99f3d202608b-kube-api-access-thnjs\") pod \"47de199d-0dde-4082-8b6c-99f3d202608b\" (UID: \"47de199d-0dde-4082-8b6c-99f3d202608b\") " Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.281423 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d0e7ef26-ab3e-4c3d-b8b2-6b08007022fd-scripts\") pod \"d0e7ef26-ab3e-4c3d-b8b2-6b08007022fd\" (UID: \"d0e7ef26-ab3e-4c3d-b8b2-6b08007022fd\") " Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.281457 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/6dae3a04-db9e-48a2-bbe2-012c9ba55890-ovn-northd-tls-certs\") pod \"6dae3a04-db9e-48a2-bbe2-012c9ba55890\" (UID: \"6dae3a04-db9e-48a2-bbe2-012c9ba55890\") " Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.281484 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f98db5df-2f7f-490b-b4a0-7e9f27b07a60-combined-ca-bundle\") pod \"f98db5df-2f7f-490b-b4a0-7e9f27b07a60\" (UID: \"f98db5df-2f7f-490b-b4a0-7e9f27b07a60\") " Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.281506 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/7d2f1782-aa1c-4382-b682-27ce8f37d139-nova-metadata-tls-certs\") pod \"7d2f1782-aa1c-4382-b682-27ce8f37d139\" (UID: \"7d2f1782-aa1c-4382-b682-27ce8f37d139\") " Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.281544 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/47de199d-0dde-4082-8b6c-99f3d202608b-config-data\") pod \"47de199d-0dde-4082-8b6c-99f3d202608b\" (UID: \"47de199d-0dde-4082-8b6c-99f3d202608b\") " Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.281600 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d0e7ef26-ab3e-4c3d-b8b2-6b08007022fd-httpd-run\") pod \"d0e7ef26-ab3e-4c3d-b8b2-6b08007022fd\" (UID: \"d0e7ef26-ab3e-4c3d-b8b2-6b08007022fd\") " Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.281628 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/47de199d-0dde-4082-8b6c-99f3d202608b-internal-tls-certs\") pod \"47de199d-0dde-4082-8b6c-99f3d202608b\" (UID: \"47de199d-0dde-4082-8b6c-99f3d202608b\") " Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.281972 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/38ce22a3-0b53-417f-ab78-77a3be2da04f-internal-tls-certs\") pod \"placement-9cbb9c58b-jcwzc\" (UID: \"38ce22a3-0b53-417f-ab78-77a3be2da04f\") " pod="openstack-kuttl-tests/placement-9cbb9c58b-jcwzc" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.282084 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0a3695d2-9733-430d-a489-694e9e97b586-public-tls-certs\") pod \"keystone-9795586d8-4dv95\" (UID: \"0a3695d2-9733-430d-a489-694e9e97b586\") " pod="openstack-kuttl-tests/keystone-9795586d8-4dv95" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.282125 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d127fb40-6eff-4ccd-922c-b209ac4edbda-public-tls-certs\") pod \"barbican-api-5968cb86f6-ctfg5\" (UID: \"d127fb40-6eff-4ccd-922c-b209ac4edbda\") " pod="openstack-kuttl-tests/barbican-api-5968cb86f6-ctfg5" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.282159 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0a3695d2-9733-430d-a489-694e9e97b586-internal-tls-certs\") pod \"keystone-9795586d8-4dv95\" (UID: \"0a3695d2-9733-430d-a489-694e9e97b586\") " pod="openstack-kuttl-tests/keystone-9795586d8-4dv95" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.282208 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/38ce22a3-0b53-417f-ab78-77a3be2da04f-public-tls-certs\") pod \"placement-9cbb9c58b-jcwzc\" (UID: \"38ce22a3-0b53-417f-ab78-77a3be2da04f\") " pod="openstack-kuttl-tests/placement-9cbb9c58b-jcwzc" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.282243 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7458929c-bc4e-4f17-b199-4963b298f4e8-public-tls-certs\") pod \"neutron-7b9876b66d-znq49\" (UID: \"7458929c-bc4e-4f17-b199-4963b298f4e8\") " pod="openstack-kuttl-tests/neutron-7b9876b66d-znq49" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.282260 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/7458929c-bc4e-4f17-b199-4963b298f4e8-ovndb-tls-certs\") pod \"neutron-7b9876b66d-znq49\" (UID: \"7458929c-bc4e-4f17-b199-4963b298f4e8\") " pod="openstack-kuttl-tests/neutron-7b9876b66d-znq49" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.282286 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/603501da-057d-4d3b-a682-e55e7e2916f4-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"603501da-057d-4d3b-a682-e55e7e2916f4\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.282343 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7458929c-bc4e-4f17-b199-4963b298f4e8-internal-tls-certs\") pod \"neutron-7b9876b66d-znq49\" (UID: \"7458929c-bc4e-4f17-b199-4963b298f4e8\") " pod="openstack-kuttl-tests/neutron-7b9876b66d-znq49" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.282397 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d127fb40-6eff-4ccd-922c-b209ac4edbda-internal-tls-certs\") pod \"barbican-api-5968cb86f6-ctfg5\" (UID: \"d127fb40-6eff-4ccd-922c-b209ac4edbda\") " pod="openstack-kuttl-tests/barbican-api-5968cb86f6-ctfg5" Jan 20 17:50:30 crc kubenswrapper[4558]: E0120 17:50:30.282532 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-barbican-internal-svc: secret "cert-barbican-internal-svc" not found Jan 20 17:50:30 crc kubenswrapper[4558]: E0120 17:50:30.282585 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d127fb40-6eff-4ccd-922c-b209ac4edbda-internal-tls-certs podName:d127fb40-6eff-4ccd-922c-b209ac4edbda nodeName:}" failed. No retries permitted until 2026-01-20 17:50:34.282570465 +0000 UTC m=+4128.042908422 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "internal-tls-certs" (UniqueName: "kubernetes.io/secret/d127fb40-6eff-4ccd-922c-b209ac4edbda-internal-tls-certs") pod "barbican-api-5968cb86f6-ctfg5" (UID: "d127fb40-6eff-4ccd-922c-b209ac4edbda") : secret "cert-barbican-internal-svc" not found Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.287630 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f98db5df-2f7f-490b-b4a0-7e9f27b07a60-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "f98db5df-2f7f-490b-b4a0-7e9f27b07a60" (UID: "f98db5df-2f7f-490b-b4a0-7e9f27b07a60"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:50:30 crc kubenswrapper[4558]: E0120 17:50:30.287770 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-placement-public-svc: secret "cert-placement-public-svc" not found Jan 20 17:50:30 crc kubenswrapper[4558]: E0120 17:50:30.287835 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/38ce22a3-0b53-417f-ab78-77a3be2da04f-public-tls-certs podName:38ce22a3-0b53-417f-ab78-77a3be2da04f nodeName:}" failed. No retries permitted until 2026-01-20 17:50:34.287816429 +0000 UTC m=+4128.048154397 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "public-tls-certs" (UniqueName: "kubernetes.io/secret/38ce22a3-0b53-417f-ab78-77a3be2da04f-public-tls-certs") pod "placement-9cbb9c58b-jcwzc" (UID: "38ce22a3-0b53-417f-ab78-77a3be2da04f") : secret "cert-placement-public-svc" not found Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.288112 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7d2f1782-aa1c-4382-b682-27ce8f37d139-logs" (OuterVolumeSpecName: "logs") pod "7d2f1782-aa1c-4382-b682-27ce8f37d139" (UID: "7d2f1782-aa1c-4382-b682-27ce8f37d139"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:50:30 crc kubenswrapper[4558]: E0120 17:50:30.288289 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-placement-internal-svc: secret "cert-placement-internal-svc" not found Jan 20 17:50:30 crc kubenswrapper[4558]: E0120 17:50:30.288351 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/38ce22a3-0b53-417f-ab78-77a3be2da04f-internal-tls-certs podName:38ce22a3-0b53-417f-ab78-77a3be2da04f nodeName:}" failed. No retries permitted until 2026-01-20 17:50:34.288341216 +0000 UTC m=+4128.048679184 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "internal-tls-certs" (UniqueName: "kubernetes.io/secret/38ce22a3-0b53-417f-ab78-77a3be2da04f-internal-tls-certs") pod "placement-9cbb9c58b-jcwzc" (UID: "38ce22a3-0b53-417f-ab78-77a3be2da04f") : secret "cert-placement-internal-svc" not found Jan 20 17:50:30 crc kubenswrapper[4558]: E0120 17:50:30.288389 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-keystone-public-svc: secret "cert-keystone-public-svc" not found Jan 20 17:50:30 crc kubenswrapper[4558]: E0120 17:50:30.288415 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0a3695d2-9733-430d-a489-694e9e97b586-public-tls-certs podName:0a3695d2-9733-430d-a489-694e9e97b586 nodeName:}" failed. No retries permitted until 2026-01-20 17:50:34.288410386 +0000 UTC m=+4128.048748353 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "public-tls-certs" (UniqueName: "kubernetes.io/secret/0a3695d2-9733-430d-a489-694e9e97b586-public-tls-certs") pod "keystone-9795586d8-4dv95" (UID: "0a3695d2-9733-430d-a489-694e9e97b586") : secret "cert-keystone-public-svc" not found Jan 20 17:50:30 crc kubenswrapper[4558]: E0120 17:50:30.288448 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-kube-state-metrics-svc: secret "cert-kube-state-metrics-svc" not found Jan 20 17:50:30 crc kubenswrapper[4558]: E0120 17:50:30.288468 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/49822b01-63d0-40d3-b604-7980891b0683-kube-state-metrics-tls-certs podName:49822b01-63d0-40d3-b604-7980891b0683 nodeName:}" failed. No retries permitted until 2026-01-20 17:50:31.288462013 +0000 UTC m=+4125.048799980 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-state-metrics-tls-certs" (UniqueName: "kubernetes.io/secret/49822b01-63d0-40d3-b604-7980891b0683-kube-state-metrics-tls-certs") pod "kube-state-metrics-0" (UID: "49822b01-63d0-40d3-b604-7980891b0683") : secret "cert-kube-state-metrics-svc" not found Jan 20 17:50:30 crc kubenswrapper[4558]: E0120 17:50:30.288499 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-barbican-public-svc: secret "cert-barbican-public-svc" not found Jan 20 17:50:30 crc kubenswrapper[4558]: E0120 17:50:30.288516 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d127fb40-6eff-4ccd-922c-b209ac4edbda-public-tls-certs podName:d127fb40-6eff-4ccd-922c-b209ac4edbda nodeName:}" failed. No retries permitted until 2026-01-20 17:50:34.288511175 +0000 UTC m=+4128.048849142 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "public-tls-certs" (UniqueName: "kubernetes.io/secret/d127fb40-6eff-4ccd-922c-b209ac4edbda-public-tls-certs") pod "barbican-api-5968cb86f6-ctfg5" (UID: "d127fb40-6eff-4ccd-922c-b209ac4edbda") : secret "cert-barbican-public-svc" not found Jan 20 17:50:30 crc kubenswrapper[4558]: E0120 17:50:30.288541 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-keystone-internal-svc: secret "cert-keystone-internal-svc" not found Jan 20 17:50:30 crc kubenswrapper[4558]: E0120 17:50:30.288558 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0a3695d2-9733-430d-a489-694e9e97b586-internal-tls-certs podName:0a3695d2-9733-430d-a489-694e9e97b586 nodeName:}" failed. No retries permitted until 2026-01-20 17:50:34.288554417 +0000 UTC m=+4128.048892384 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "internal-tls-certs" (UniqueName: "kubernetes.io/secret/0a3695d2-9733-430d-a489-694e9e97b586-internal-tls-certs") pod "keystone-9795586d8-4dv95" (UID: "0a3695d2-9733-430d-a489-694e9e97b586") : secret "cert-keystone-internal-svc" not found Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.290049 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6dae3a04-db9e-48a2-bbe2-012c9ba55890-config" (OuterVolumeSpecName: "config") pod "6dae3a04-db9e-48a2-bbe2-012c9ba55890" (UID: "6dae3a04-db9e-48a2-bbe2-012c9ba55890"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:50:30 crc kubenswrapper[4558]: E0120 17:50:30.290216 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-galera-openstack-svc: secret "cert-galera-openstack-svc" not found Jan 20 17:50:30 crc kubenswrapper[4558]: E0120 17:50:30.290267 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/603501da-057d-4d3b-a682-e55e7e2916f4-galera-tls-certs podName:603501da-057d-4d3b-a682-e55e7e2916f4 nodeName:}" failed. No retries permitted until 2026-01-20 17:50:32.290250396 +0000 UTC m=+4126.050588364 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "galera-tls-certs" (UniqueName: "kubernetes.io/secret/603501da-057d-4d3b-a682-e55e7e2916f4-galera-tls-certs") pod "openstack-galera-0" (UID: "603501da-057d-4d3b-a682-e55e7e2916f4") : secret "cert-galera-openstack-svc" not found Jan 20 17:50:30 crc kubenswrapper[4558]: E0120 17:50:30.290322 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-neutron-public-svc: secret "cert-neutron-public-svc" not found Jan 20 17:50:30 crc kubenswrapper[4558]: E0120 17:50:30.290344 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7458929c-bc4e-4f17-b199-4963b298f4e8-public-tls-certs podName:7458929c-bc4e-4f17-b199-4963b298f4e8 nodeName:}" failed. No retries permitted until 2026-01-20 17:50:34.290337419 +0000 UTC m=+4128.050675386 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "public-tls-certs" (UniqueName: "kubernetes.io/secret/7458929c-bc4e-4f17-b199-4963b298f4e8-public-tls-certs") pod "neutron-7b9876b66d-znq49" (UID: "7458929c-bc4e-4f17-b199-4963b298f4e8") : secret "cert-neutron-public-svc" not found Jan 20 17:50:30 crc kubenswrapper[4558]: E0120 17:50:30.290380 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-neutron-ovndbs: secret "cert-neutron-ovndbs" not found Jan 20 17:50:30 crc kubenswrapper[4558]: E0120 17:50:30.290397 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7458929c-bc4e-4f17-b199-4963b298f4e8-ovndb-tls-certs podName:7458929c-bc4e-4f17-b199-4963b298f4e8 nodeName:}" failed. No retries permitted until 2026-01-20 17:50:34.290392253 +0000 UTC m=+4128.050730219 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "ovndb-tls-certs" (UniqueName: "kubernetes.io/secret/7458929c-bc4e-4f17-b199-4963b298f4e8-ovndb-tls-certs") pod "neutron-7b9876b66d-znq49" (UID: "7458929c-bc4e-4f17-b199-4963b298f4e8") : secret "cert-neutron-ovndbs" not found Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.290896 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage09-crc" (OuterVolumeSpecName: "glance") pod "f98db5df-2f7f-490b-b4a0-7e9f27b07a60" (UID: "f98db5df-2f7f-490b-b4a0-7e9f27b07a60"). InnerVolumeSpecName "local-storage09-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.291284 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f98db5df-2f7f-490b-b4a0-7e9f27b07a60-logs" (OuterVolumeSpecName: "logs") pod "f98db5df-2f7f-490b-b4a0-7e9f27b07a60" (UID: "f98db5df-2f7f-490b-b4a0-7e9f27b07a60"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.297620 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d0e7ef26-ab3e-4c3d-b8b2-6b08007022fd-scripts" (OuterVolumeSpecName: "scripts") pod "d0e7ef26-ab3e-4c3d-b8b2-6b08007022fd" (UID: "d0e7ef26-ab3e-4c3d-b8b2-6b08007022fd"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.297699 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/47de199d-0dde-4082-8b6c-99f3d202608b-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "47de199d-0dde-4082-8b6c-99f3d202608b" (UID: "47de199d-0dde-4082-8b6c-99f3d202608b"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.298471 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d0e7ef26-ab3e-4c3d-b8b2-6b08007022fd-logs" (OuterVolumeSpecName: "logs") pod "d0e7ef26-ab3e-4c3d-b8b2-6b08007022fd" (UID: "d0e7ef26-ab3e-4c3d-b8b2-6b08007022fd"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.298820 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/47de199d-0dde-4082-8b6c-99f3d202608b-logs" (OuterVolumeSpecName: "logs") pod "47de199d-0dde-4082-8b6c-99f3d202608b" (UID: "47de199d-0dde-4082-8b6c-99f3d202608b"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.298904 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6dae3a04-db9e-48a2-bbe2-012c9ba55890-scripts" (OuterVolumeSpecName: "scripts") pod "6dae3a04-db9e-48a2-bbe2-012c9ba55890" (UID: "6dae3a04-db9e-48a2-bbe2-012c9ba55890"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.299419 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6dae3a04-db9e-48a2-bbe2-012c9ba55890-ovn-rundir" (OuterVolumeSpecName: "ovn-rundir") pod "6dae3a04-db9e-48a2-bbe2-012c9ba55890" (UID: "6dae3a04-db9e-48a2-bbe2-012c9ba55890"). InnerVolumeSpecName "ovn-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:50:30 crc kubenswrapper[4558]: E0120 17:50:30.303527 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-neutron-internal-svc: secret "cert-neutron-internal-svc" not found Jan 20 17:50:30 crc kubenswrapper[4558]: E0120 17:50:30.303590 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7458929c-bc4e-4f17-b199-4963b298f4e8-internal-tls-certs podName:7458929c-bc4e-4f17-b199-4963b298f4e8 nodeName:}" failed. No retries permitted until 2026-01-20 17:50:34.303573408 +0000 UTC m=+4128.063911365 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "internal-tls-certs" (UniqueName: "kubernetes.io/secret/7458929c-bc4e-4f17-b199-4963b298f4e8-internal-tls-certs") pod "neutron-7b9876b66d-znq49" (UID: "7458929c-bc4e-4f17-b199-4963b298f4e8") : secret "cert-neutron-internal-svc" not found Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.303889 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6dae3a04-db9e-48a2-bbe2-012c9ba55890-kube-api-access-8h5nr" (OuterVolumeSpecName: "kube-api-access-8h5nr") pod "6dae3a04-db9e-48a2-bbe2-012c9ba55890" (UID: "6dae3a04-db9e-48a2-bbe2-012c9ba55890"). InnerVolumeSpecName "kube-api-access-8h5nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.315074 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d0e7ef26-ab3e-4c3d-b8b2-6b08007022fd-kube-api-access-dkhz2" (OuterVolumeSpecName: "kube-api-access-dkhz2") pod "d0e7ef26-ab3e-4c3d-b8b2-6b08007022fd" (UID: "d0e7ef26-ab3e-4c3d-b8b2-6b08007022fd"). InnerVolumeSpecName "kube-api-access-dkhz2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.316568 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d0e7ef26-ab3e-4c3d-b8b2-6b08007022fd-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "d0e7ef26-ab3e-4c3d-b8b2-6b08007022fd" (UID: "d0e7ef26-ab3e-4c3d-b8b2-6b08007022fd"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.316664 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/47de199d-0dde-4082-8b6c-99f3d202608b-scripts" (OuterVolumeSpecName: "scripts") pod "47de199d-0dde-4082-8b6c-99f3d202608b" (UID: "47de199d-0dde-4082-8b6c-99f3d202608b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.325830 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/47de199d-0dde-4082-8b6c-99f3d202608b-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "47de199d-0dde-4082-8b6c-99f3d202608b" (UID: "47de199d-0dde-4082-8b6c-99f3d202608b"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.331756 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f98db5df-2f7f-490b-b4a0-7e9f27b07a60-kube-api-access-82gpj" (OuterVolumeSpecName: "kube-api-access-82gpj") pod "f98db5df-2f7f-490b-b4a0-7e9f27b07a60" (UID: "f98db5df-2f7f-490b-b4a0-7e9f27b07a60"). InnerVolumeSpecName "kube-api-access-82gpj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.352602 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage11-crc" (OuterVolumeSpecName: "glance") pod "d0e7ef26-ab3e-4c3d-b8b2-6b08007022fd" (UID: "d0e7ef26-ab3e-4c3d-b8b2-6b08007022fd"). InnerVolumeSpecName "local-storage11-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.353780 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/47de199d-0dde-4082-8b6c-99f3d202608b-kube-api-access-thnjs" (OuterVolumeSpecName: "kube-api-access-thnjs") pod "47de199d-0dde-4082-8b6c-99f3d202608b" (UID: "47de199d-0dde-4082-8b6c-99f3d202608b"). InnerVolumeSpecName "kube-api-access-thnjs". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.353978 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f98db5df-2f7f-490b-b4a0-7e9f27b07a60-scripts" (OuterVolumeSpecName: "scripts") pod "f98db5df-2f7f-490b-b4a0-7e9f27b07a60" (UID: "f98db5df-2f7f-490b-b4a0-7e9f27b07a60"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.368329 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7d2f1782-aa1c-4382-b682-27ce8f37d139-kube-api-access-ds4zt" (OuterVolumeSpecName: "kube-api-access-ds4zt") pod "7d2f1782-aa1c-4382-b682-27ce8f37d139" (UID: "7d2f1782-aa1c-4382-b682-27ce8f37d139"). InnerVolumeSpecName "kube-api-access-ds4zt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.376995 4558 generic.go:334] "Generic (PLEG): container finished" podID="d0e7ef26-ab3e-4c3d-b8b2-6b08007022fd" containerID="3216cb4a892da431a3d001352506b8205d9cde6cf6808df51a45283abdf32abc" exitCode=0 Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.377072 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"d0e7ef26-ab3e-4c3d-b8b2-6b08007022fd","Type":"ContainerDied","Data":"3216cb4a892da431a3d001352506b8205d9cde6cf6808df51a45283abdf32abc"} Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.377106 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"d0e7ef26-ab3e-4c3d-b8b2-6b08007022fd","Type":"ContainerDied","Data":"dcc937b537b6caac9042738c055c3b5a3a3ca3ee317b2e42b3c67919c7685942"} Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.377124 4558 scope.go:117] "RemoveContainer" containerID="3216cb4a892da431a3d001352506b8205d9cde6cf6808df51a45283abdf32abc" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.377289 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.381212 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-0" event={"ID":"924fa7ce-8d60-4b2f-b62b-d5e146474f71","Type":"ContainerStarted","Data":"7e2524778a402821cdb0aaf4be80d7e2f9282c3873490c09d8e4b929dc8c5e6e"} Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.381243 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-0" event={"ID":"924fa7ce-8d60-4b2f-b62b-d5e146474f71","Type":"ContainerStarted","Data":"4ecb84690e1b7faec0f2af080daf12cc500aff5c5becfc57730c4fde59e298fb"} Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.381837 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.383021 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c5adf97d-8b90-49ca-bbeb-21dd8446a226-config-data\") pod \"c5adf97d-8b90-49ca-bbeb-21dd8446a226\" (UID: \"c5adf97d-8b90-49ca-bbeb-21dd8446a226\") " Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.383067 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e2e49d05-bd1d-49a2-a67f-17bdeb5c39c2-config-data\") pod \"e2e49d05-bd1d-49a2-a67f-17bdeb5c39c2\" (UID: \"e2e49d05-bd1d-49a2-a67f-17bdeb5c39c2\") " Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.383697 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c5adf97d-8b90-49ca-bbeb-21dd8446a226-config-data-custom\") pod \"c5adf97d-8b90-49ca-bbeb-21dd8446a226\" (UID: \"c5adf97d-8b90-49ca-bbeb-21dd8446a226\") " Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.383811 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q9xhl\" (UniqueName: \"kubernetes.io/projected/c5adf97d-8b90-49ca-bbeb-21dd8446a226-kube-api-access-q9xhl\") pod \"c5adf97d-8b90-49ca-bbeb-21dd8446a226\" (UID: \"c5adf97d-8b90-49ca-bbeb-21dd8446a226\") " Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.383890 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c5adf97d-8b90-49ca-bbeb-21dd8446a226-combined-ca-bundle\") pod \"c5adf97d-8b90-49ca-bbeb-21dd8446a226\" (UID: \"c5adf97d-8b90-49ca-bbeb-21dd8446a226\") " Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.383950 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-llqc6\" (UniqueName: \"kubernetes.io/projected/e2e49d05-bd1d-49a2-a67f-17bdeb5c39c2-kube-api-access-llqc6\") pod \"e2e49d05-bd1d-49a2-a67f-17bdeb5c39c2\" (UID: \"e2e49d05-bd1d-49a2-a67f-17bdeb5c39c2\") " Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.384003 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e2e49d05-bd1d-49a2-a67f-17bdeb5c39c2-logs\") pod \"e2e49d05-bd1d-49a2-a67f-17bdeb5c39c2\" (UID: \"e2e49d05-bd1d-49a2-a67f-17bdeb5c39c2\") " Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.384040 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c5adf97d-8b90-49ca-bbeb-21dd8446a226-logs\") pod \"c5adf97d-8b90-49ca-bbeb-21dd8446a226\" (UID: \"c5adf97d-8b90-49ca-bbeb-21dd8446a226\") " Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.384086 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e2e49d05-bd1d-49a2-a67f-17bdeb5c39c2-config-data-custom\") pod \"e2e49d05-bd1d-49a2-a67f-17bdeb5c39c2\" (UID: \"e2e49d05-bd1d-49a2-a67f-17bdeb5c39c2\") " Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.384185 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e2e49d05-bd1d-49a2-a67f-17bdeb5c39c2-combined-ca-bundle\") pod \"e2e49d05-bd1d-49a2-a67f-17bdeb5c39c2\" (UID: \"e2e49d05-bd1d-49a2-a67f-17bdeb5c39c2\") " Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.384572 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/5239f3e2-7f54-4042-b0da-02f7608224df-memcached-tls-certs\") pod \"memcached-0\" (UID: \"5239f3e2-7f54-4042-b0da-02f7608224df\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.384627 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/fc60cc21-f159-40bf-beca-c62718f57b9c-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"fc60cc21-f159-40bf-beca-c62718f57b9c\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.384711 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/fc60cc21-f159-40bf-beca-c62718f57b9c-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"fc60cc21-f159-40bf-beca-c62718f57b9c\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.384915 4558 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d0e7ef26-ab3e-4c3d-b8b2-6b08007022fd-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.384933 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f98db5df-2f7f-490b-b4a0-7e9f27b07a60-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.384943 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8h5nr\" (UniqueName: \"kubernetes.io/projected/6dae3a04-db9e-48a2-bbe2-012c9ba55890-kube-api-access-8h5nr\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.384963 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" " Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.384973 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ds4zt\" (UniqueName: \"kubernetes.io/projected/7d2f1782-aa1c-4382-b682-27ce8f37d139-kube-api-access-ds4zt\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.384982 4558 reconciler_common.go:293] "Volume detached for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/6dae3a04-db9e-48a2-bbe2-012c9ba55890-ovn-rundir\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.384999 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/47de199d-0dde-4082-8b6c-99f3d202608b-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.385008 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f98db5df-2f7f-490b-b4a0-7e9f27b07a60-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.385022 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" " Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.385030 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7d2f1782-aa1c-4382-b682-27ce8f37d139-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.385040 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6dae3a04-db9e-48a2-bbe2-012c9ba55890-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.385048 4558 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f98db5df-2f7f-490b-b4a0-7e9f27b07a60-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.385057 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6dae3a04-db9e-48a2-bbe2-012c9ba55890-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.385067 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/47de199d-0dde-4082-8b6c-99f3d202608b-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.385077 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d0e7ef26-ab3e-4c3d-b8b2-6b08007022fd-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.385090 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dkhz2\" (UniqueName: \"kubernetes.io/projected/d0e7ef26-ab3e-4c3d-b8b2-6b08007022fd-kube-api-access-dkhz2\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.385099 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-82gpj\" (UniqueName: \"kubernetes.io/projected/f98db5df-2f7f-490b-b4a0-7e9f27b07a60-kube-api-access-82gpj\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.385108 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/47de199d-0dde-4082-8b6c-99f3d202608b-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.385116 4558 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/47de199d-0dde-4082-8b6c-99f3d202608b-etc-machine-id\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.385125 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-thnjs\" (UniqueName: \"kubernetes.io/projected/47de199d-0dde-4082-8b6c-99f3d202608b-kube-api-access-thnjs\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.385133 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d0e7ef26-ab3e-4c3d-b8b2-6b08007022fd-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.385649 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c5adf97d-8b90-49ca-bbeb-21dd8446a226-logs" (OuterVolumeSpecName: "logs") pod "c5adf97d-8b90-49ca-bbeb-21dd8446a226" (UID: "c5adf97d-8b90-49ca-bbeb-21dd8446a226"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:50:30 crc kubenswrapper[4558]: E0120 17:50:30.387261 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-memcached-svc: secret "cert-memcached-svc" not found Jan 20 17:50:30 crc kubenswrapper[4558]: E0120 17:50:30.387334 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5239f3e2-7f54-4042-b0da-02f7608224df-memcached-tls-certs podName:5239f3e2-7f54-4042-b0da-02f7608224df nodeName:}" failed. No retries permitted until 2026-01-20 17:50:32.387316908 +0000 UTC m=+4126.147654875 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "memcached-tls-certs" (UniqueName: "kubernetes.io/secret/5239f3e2-7f54-4042-b0da-02f7608224df-memcached-tls-certs") pod "memcached-0" (UID: "5239f3e2-7f54-4042-b0da-02f7608224df") : secret "cert-memcached-svc" not found Jan 20 17:50:30 crc kubenswrapper[4558]: E0120 17:50:30.388384 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-ovndbcluster-sb-ovndbs: secret "cert-ovndbcluster-sb-ovndbs" not found Jan 20 17:50:30 crc kubenswrapper[4558]: E0120 17:50:30.388441 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/fc60cc21-f159-40bf-beca-c62718f57b9c-ovsdbserver-sb-tls-certs podName:fc60cc21-f159-40bf-beca-c62718f57b9c nodeName:}" failed. No retries permitted until 2026-01-20 17:50:32.388425783 +0000 UTC m=+4126.148763750 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "ovsdbserver-sb-tls-certs" (UniqueName: "kubernetes.io/secret/fc60cc21-f159-40bf-beca-c62718f57b9c-ovsdbserver-sb-tls-certs") pod "ovsdbserver-sb-0" (UID: "fc60cc21-f159-40bf-beca-c62718f57b9c") : secret "cert-ovndbcluster-sb-ovndbs" not found Jan 20 17:50:30 crc kubenswrapper[4558]: E0120 17:50:30.388481 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-ovn-metrics: secret "cert-ovn-metrics" not found Jan 20 17:50:30 crc kubenswrapper[4558]: E0120 17:50:30.388501 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/fc60cc21-f159-40bf-beca-c62718f57b9c-metrics-certs-tls-certs podName:fc60cc21-f159-40bf-beca-c62718f57b9c nodeName:}" failed. No retries permitted until 2026-01-20 17:50:32.388495023 +0000 UTC m=+4126.148832990 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs-tls-certs" (UniqueName: "kubernetes.io/secret/fc60cc21-f159-40bf-beca-c62718f57b9c-metrics-certs-tls-certs") pod "ovsdbserver-sb-0" (UID: "fc60cc21-f159-40bf-beca-c62718f57b9c") : secret "cert-ovn-metrics" not found Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.397354 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e2e49d05-bd1d-49a2-a67f-17bdeb5c39c2-logs" (OuterVolumeSpecName: "logs") pod "e2e49d05-bd1d-49a2-a67f-17bdeb5c39c2" (UID: "e2e49d05-bd1d-49a2-a67f-17bdeb5c39c2"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.403506 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell0-conductor-0" podStartSLOduration=2.403489177 podStartE2EDuration="2.403489177s" podCreationTimestamp="2026-01-20 17:50:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:50:30.397777317 +0000 UTC m=+4124.158115284" watchObservedRunningTime="2026-01-20 17:50:30.403489177 +0000 UTC m=+4124.163827145" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.404060 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e2e49d05-bd1d-49a2-a67f-17bdeb5c39c2-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "e2e49d05-bd1d-49a2-a67f-17bdeb5c39c2" (UID: "e2e49d05-bd1d-49a2-a67f-17bdeb5c39c2"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.413094 4558 scope.go:117] "RemoveContainer" containerID="212a4fd973edb30677b4248e267dd6b57d185e117261fb43b7e48e9267a561a9" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.416726 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.420456 4558 generic.go:334] "Generic (PLEG): container finished" podID="28febf86-c729-498e-b3af-0321468d80f3" containerID="d57bd37afc98044774c1f3fa1bd0ea83a8cca9c7814e87365a229f5ac1765f05" exitCode=0 Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.420539 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"28febf86-c729-498e-b3af-0321468d80f3","Type":"ContainerDied","Data":"d57bd37afc98044774c1f3fa1bd0ea83a8cca9c7814e87365a229f5ac1765f05"} Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.421237 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7d2f1782-aa1c-4382-b682-27ce8f37d139-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7d2f1782-aa1c-4382-b682-27ce8f37d139" (UID: "7d2f1782-aa1c-4382-b682-27ce8f37d139"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.423561 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c5adf97d-8b90-49ca-bbeb-21dd8446a226-kube-api-access-q9xhl" (OuterVolumeSpecName: "kube-api-access-q9xhl") pod "c5adf97d-8b90-49ca-bbeb-21dd8446a226" (UID: "c5adf97d-8b90-49ca-bbeb-21dd8446a226"). InnerVolumeSpecName "kube-api-access-q9xhl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.426261 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c5adf97d-8b90-49ca-bbeb-21dd8446a226-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "c5adf97d-8b90-49ca-bbeb-21dd8446a226" (UID: "c5adf97d-8b90-49ca-bbeb-21dd8446a226"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.430412 4558 generic.go:334] "Generic (PLEG): container finished" podID="7d2f1782-aa1c-4382-b682-27ce8f37d139" containerID="fdc46115e0c01656230d526da5789da8cef6a6ad4afd973eaf326c530afd94ab" exitCode=0 Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.430468 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"7d2f1782-aa1c-4382-b682-27ce8f37d139","Type":"ContainerDied","Data":"fdc46115e0c01656230d526da5789da8cef6a6ad4afd973eaf326c530afd94ab"} Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.430488 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"7d2f1782-aa1c-4382-b682-27ce8f37d139","Type":"ContainerDied","Data":"6142d5296dca7952e213a3cf92926d0aa17b375a3a069f0621dadfed64ec3b49"} Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.430552 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.430867 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e2e49d05-bd1d-49a2-a67f-17bdeb5c39c2-kube-api-access-llqc6" (OuterVolumeSpecName: "kube-api-access-llqc6") pod "e2e49d05-bd1d-49a2-a67f-17bdeb5c39c2" (UID: "e2e49d05-bd1d-49a2-a67f-17bdeb5c39c2"). InnerVolumeSpecName "kube-api-access-llqc6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.435419 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/47de199d-0dde-4082-8b6c-99f3d202608b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "47de199d-0dde-4082-8b6c-99f3d202608b" (UID: "47de199d-0dde-4082-8b6c-99f3d202608b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.445218 4558 generic.go:334] "Generic (PLEG): container finished" podID="f98db5df-2f7f-490b-b4a0-7e9f27b07a60" containerID="37aadc76d1ea5916d8987f0d7d3856518a536ffe632acfd464eb225116abde18" exitCode=0 Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.445267 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"f98db5df-2f7f-490b-b4a0-7e9f27b07a60","Type":"ContainerDied","Data":"37aadc76d1ea5916d8987f0d7d3856518a536ffe632acfd464eb225116abde18"} Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.445287 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"f98db5df-2f7f-490b-b4a0-7e9f27b07a60","Type":"ContainerDied","Data":"94a48f514687c985e6c0de200dd5b7405ffe787b6f8eded064e73166a124cbb6"} Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.445349 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.482876 4558 generic.go:334] "Generic (PLEG): container finished" podID="47de199d-0dde-4082-8b6c-99f3d202608b" containerID="1644d78283b3c3dad2b68cd9a4d16b7b8c04de0ba6bfcedb6b4c04ac6f6297de" exitCode=0 Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.483354 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"47de199d-0dde-4082-8b6c-99f3d202608b","Type":"ContainerDied","Data":"1644d78283b3c3dad2b68cd9a4d16b7b8c04de0ba6bfcedb6b4c04ac6f6297de"} Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.483514 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.484603 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"47de199d-0dde-4082-8b6c-99f3d202608b","Type":"ContainerDied","Data":"bf0cc528aa590a5f695fb88d2024b7b55941f695dee5334550651ad86f374722"} Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.486258 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7d2f1782-aa1c-4382-b682-27ce8f37d139-config-data" (OuterVolumeSpecName: "config-data") pod "7d2f1782-aa1c-4382-b682-27ce8f37d139" (UID: "7d2f1782-aa1c-4382-b682-27ce8f37d139"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.486742 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/581c2419-48d9-4b7d-b71b-ce5cceb0326d-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"581c2419-48d9-4b7d-b71b-ce5cceb0326d\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.487479 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/581c2419-48d9-4b7d-b71b-ce5cceb0326d-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"581c2419-48d9-4b7d-b71b-ce5cceb0326d\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.487710 4558 generic.go:334] "Generic (PLEG): container finished" podID="c5adf97d-8b90-49ca-bbeb-21dd8446a226" containerID="5ad5d5673753b8c1e259ace28ba37c369cd951524f24fdfa1a91a5b011bae4ef" exitCode=0 Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.487742 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c5adf97d-8b90-49ca-bbeb-21dd8446a226-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.487759 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7d2f1782-aa1c-4382-b682-27ce8f37d139-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.487770 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/47de199d-0dde-4082-8b6c-99f3d202608b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.487781 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7d2f1782-aa1c-4382-b682-27ce8f37d139-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.487792 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q9xhl\" (UniqueName: \"kubernetes.io/projected/c5adf97d-8b90-49ca-bbeb-21dd8446a226-kube-api-access-q9xhl\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.487803 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-llqc6\" (UniqueName: \"kubernetes.io/projected/e2e49d05-bd1d-49a2-a67f-17bdeb5c39c2-kube-api-access-llqc6\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.487811 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e2e49d05-bd1d-49a2-a67f-17bdeb5c39c2-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.487820 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c5adf97d-8b90-49ca-bbeb-21dd8446a226-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.487830 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e2e49d05-bd1d-49a2-a67f-17bdeb5c39c2-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.487820 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-worker-97cdbfcb5-xwv8l" Jan 20 17:50:30 crc kubenswrapper[4558]: E0120 17:50:30.488274 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-nova-novncproxy-cell1-vencrypt: secret "cert-nova-novncproxy-cell1-vencrypt" not found Jan 20 17:50:30 crc kubenswrapper[4558]: E0120 17:50:30.488326 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/581c2419-48d9-4b7d-b71b-ce5cceb0326d-vencrypt-tls-certs podName:581c2419-48d9-4b7d-b71b-ce5cceb0326d nodeName:}" failed. No retries permitted until 2026-01-20 17:50:32.488311605 +0000 UTC m=+4126.248649572 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "vencrypt-tls-certs" (UniqueName: "kubernetes.io/secret/581c2419-48d9-4b7d-b71b-ce5cceb0326d-vencrypt-tls-certs") pod "nova-cell1-novncproxy-0" (UID: "581c2419-48d9-4b7d-b71b-ce5cceb0326d") : secret "cert-nova-novncproxy-cell1-vencrypt" not found Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.487768 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-worker-97cdbfcb5-xwv8l" event={"ID":"c5adf97d-8b90-49ca-bbeb-21dd8446a226","Type":"ContainerDied","Data":"5ad5d5673753b8c1e259ace28ba37c369cd951524f24fdfa1a91a5b011bae4ef"} Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.488405 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-worker-97cdbfcb5-xwv8l" event={"ID":"c5adf97d-8b90-49ca-bbeb-21dd8446a226","Type":"ContainerDied","Data":"9708763ddb6cdd14575c2edb6a39f5547296c6ec28017b7ffc35f3cb63b81ad6"} Jan 20 17:50:30 crc kubenswrapper[4558]: E0120 17:50:30.488571 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-nova-novncproxy-cell1-public-svc: secret "cert-nova-novncproxy-cell1-public-svc" not found Jan 20 17:50:30 crc kubenswrapper[4558]: E0120 17:50:30.488605 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/581c2419-48d9-4b7d-b71b-ce5cceb0326d-nova-novncproxy-tls-certs podName:581c2419-48d9-4b7d-b71b-ce5cceb0326d nodeName:}" failed. No retries permitted until 2026-01-20 17:50:32.488598756 +0000 UTC m=+4126.248936722 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nova-novncproxy-tls-certs" (UniqueName: "kubernetes.io/secret/581c2419-48d9-4b7d-b71b-ce5cceb0326d-nova-novncproxy-tls-certs") pod "nova-cell1-novncproxy-0" (UID: "581c2419-48d9-4b7d-b71b-ce5cceb0326d") : secret "cert-nova-novncproxy-cell1-public-svc" not found Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.494815 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6dae3a04-db9e-48a2-bbe2-012c9ba55890-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6dae3a04-db9e-48a2-bbe2-012c9ba55890" (UID: "6dae3a04-db9e-48a2-bbe2-012c9ba55890"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.495587 4558 scope.go:117] "RemoveContainer" containerID="3216cb4a892da431a3d001352506b8205d9cde6cf6808df51a45283abdf32abc" Jan 20 17:50:30 crc kubenswrapper[4558]: E0120 17:50:30.495893 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3216cb4a892da431a3d001352506b8205d9cde6cf6808df51a45283abdf32abc\": container with ID starting with 3216cb4a892da431a3d001352506b8205d9cde6cf6808df51a45283abdf32abc not found: ID does not exist" containerID="3216cb4a892da431a3d001352506b8205d9cde6cf6808df51a45283abdf32abc" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.495940 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3216cb4a892da431a3d001352506b8205d9cde6cf6808df51a45283abdf32abc"} err="failed to get container status \"3216cb4a892da431a3d001352506b8205d9cde6cf6808df51a45283abdf32abc\": rpc error: code = NotFound desc = could not find container \"3216cb4a892da431a3d001352506b8205d9cde6cf6808df51a45283abdf32abc\": container with ID starting with 3216cb4a892da431a3d001352506b8205d9cde6cf6808df51a45283abdf32abc not found: ID does not exist" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.495996 4558 scope.go:117] "RemoveContainer" containerID="212a4fd973edb30677b4248e267dd6b57d185e117261fb43b7e48e9267a561a9" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.496257 4558 generic.go:334] "Generic (PLEG): container finished" podID="d0f82b75-bed6-4779-ad55-6bec50d7faa6" containerID="a612c466a2d2fa8c608454134a7b85c4368226b9c8b498cce072561130074559" exitCode=0 Jan 20 17:50:30 crc kubenswrapper[4558]: E0120 17:50:30.496317 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"212a4fd973edb30677b4248e267dd6b57d185e117261fb43b7e48e9267a561a9\": container with ID starting with 212a4fd973edb30677b4248e267dd6b57d185e117261fb43b7e48e9267a561a9 not found: ID does not exist" containerID="212a4fd973edb30677b4248e267dd6b57d185e117261fb43b7e48e9267a561a9" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.496348 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"212a4fd973edb30677b4248e267dd6b57d185e117261fb43b7e48e9267a561a9"} err="failed to get container status \"212a4fd973edb30677b4248e267dd6b57d185e117261fb43b7e48e9267a561a9\": rpc error: code = NotFound desc = could not find container \"212a4fd973edb30677b4248e267dd6b57d185e117261fb43b7e48e9267a561a9\": container with ID starting with 212a4fd973edb30677b4248e267dd6b57d185e117261fb43b7e48e9267a561a9 not found: ID does not exist" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.496368 4558 scope.go:117] "RemoveContainer" containerID="fdc46115e0c01656230d526da5789da8cef6a6ad4afd973eaf326c530afd94ab" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.496351 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"d0f82b75-bed6-4779-ad55-6bec50d7faa6","Type":"ContainerDied","Data":"a612c466a2d2fa8c608454134a7b85c4368226b9c8b498cce072561130074559"} Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.496414 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.512751 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d0e7ef26-ab3e-4c3d-b8b2-6b08007022fd-config-data" (OuterVolumeSpecName: "config-data") pod "d0e7ef26-ab3e-4c3d-b8b2-6b08007022fd" (UID: "d0e7ef26-ab3e-4c3d-b8b2-6b08007022fd"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.514916 4558 generic.go:334] "Generic (PLEG): container finished" podID="e2e49d05-bd1d-49a2-a67f-17bdeb5c39c2" containerID="7436f50dac4bab17580af1cbb38a2ef4c0a9b64f22e3397fd9aa8eeeb133fc29" exitCode=0 Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.515054 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-keystone-listener-7d4c95f78b-qjwvm" event={"ID":"e2e49d05-bd1d-49a2-a67f-17bdeb5c39c2","Type":"ContainerDied","Data":"7436f50dac4bab17580af1cbb38a2ef4c0a9b64f22e3397fd9aa8eeeb133fc29"} Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.515138 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-keystone-listener-7d4c95f78b-qjwvm" event={"ID":"e2e49d05-bd1d-49a2-a67f-17bdeb5c39c2","Type":"ContainerDied","Data":"c0af33357852e67e5f3a19f6b35879cfd6cbfe786658104acea30a122af047f5"} Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.515395 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-keystone-listener-7d4c95f78b-qjwvm" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.518344 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovn-northd-0_6dae3a04-db9e-48a2-bbe2-012c9ba55890/ovn-northd/0.log" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.518389 4558 generic.go:334] "Generic (PLEG): container finished" podID="6dae3a04-db9e-48a2-bbe2-012c9ba55890" containerID="9b54f5f4a254eb9083539aa947157b34dfeea7934f5c2b37060ecf2e8e7a304a" exitCode=139 Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.518424 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-northd-0" event={"ID":"6dae3a04-db9e-48a2-bbe2-012c9ba55890","Type":"ContainerDied","Data":"9b54f5f4a254eb9083539aa947157b34dfeea7934f5c2b37060ecf2e8e7a304a"} Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.518455 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-northd-0" event={"ID":"6dae3a04-db9e-48a2-bbe2-012c9ba55890","Type":"ContainerDied","Data":"25c5077205531977be1269655185fb1d3e09b3b6d2d7b628d3c833bf0a9ae60b"} Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.518515 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.535105 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d0e7ef26-ab3e-4c3d-b8b2-6b08007022fd-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d0e7ef26-ab3e-4c3d-b8b2-6b08007022fd" (UID: "d0e7ef26-ab3e-4c3d-b8b2-6b08007022fd"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.537512 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage11-crc" (UniqueName: "kubernetes.io/local-volume/local-storage11-crc") on node "crc" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.538995 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e2e49d05-bd1d-49a2-a67f-17bdeb5c39c2-config-data" (OuterVolumeSpecName: "config-data") pod "e2e49d05-bd1d-49a2-a67f-17bdeb5c39c2" (UID: "e2e49d05-bd1d-49a2-a67f-17bdeb5c39c2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.539463 4558 scope.go:117] "RemoveContainer" containerID="c66c2bcafd61422cbe12474d972cdfbe2b8064d7acd5baa2defb02dbaf1a697b" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.555033 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage09-crc" (UniqueName: "kubernetes.io/local-volume/local-storage09-crc") on node "crc" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.556238 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f98db5df-2f7f-490b-b4a0-7e9f27b07a60-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "f98db5df-2f7f-490b-b4a0-7e9f27b07a60" (UID: "f98db5df-2f7f-490b-b4a0-7e9f27b07a60"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.563328 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c5adf97d-8b90-49ca-bbeb-21dd8446a226-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c5adf97d-8b90-49ca-bbeb-21dd8446a226" (UID: "c5adf97d-8b90-49ca-bbeb-21dd8446a226"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.565290 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f98db5df-2f7f-490b-b4a0-7e9f27b07a60-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f98db5df-2f7f-490b-b4a0-7e9f27b07a60" (UID: "f98db5df-2f7f-490b-b4a0-7e9f27b07a60"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.566952 4558 scope.go:117] "RemoveContainer" containerID="fdc46115e0c01656230d526da5789da8cef6a6ad4afd973eaf326c530afd94ab" Jan 20 17:50:30 crc kubenswrapper[4558]: E0120 17:50:30.567452 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fdc46115e0c01656230d526da5789da8cef6a6ad4afd973eaf326c530afd94ab\": container with ID starting with fdc46115e0c01656230d526da5789da8cef6a6ad4afd973eaf326c530afd94ab not found: ID does not exist" containerID="fdc46115e0c01656230d526da5789da8cef6a6ad4afd973eaf326c530afd94ab" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.567491 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fdc46115e0c01656230d526da5789da8cef6a6ad4afd973eaf326c530afd94ab"} err="failed to get container status \"fdc46115e0c01656230d526da5789da8cef6a6ad4afd973eaf326c530afd94ab\": rpc error: code = NotFound desc = could not find container \"fdc46115e0c01656230d526da5789da8cef6a6ad4afd973eaf326c530afd94ab\": container with ID starting with fdc46115e0c01656230d526da5789da8cef6a6ad4afd973eaf326c530afd94ab not found: ID does not exist" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.567522 4558 scope.go:117] "RemoveContainer" containerID="c66c2bcafd61422cbe12474d972cdfbe2b8064d7acd5baa2defb02dbaf1a697b" Jan 20 17:50:30 crc kubenswrapper[4558]: E0120 17:50:30.567763 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c66c2bcafd61422cbe12474d972cdfbe2b8064d7acd5baa2defb02dbaf1a697b\": container with ID starting with c66c2bcafd61422cbe12474d972cdfbe2b8064d7acd5baa2defb02dbaf1a697b not found: ID does not exist" containerID="c66c2bcafd61422cbe12474d972cdfbe2b8064d7acd5baa2defb02dbaf1a697b" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.567795 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c66c2bcafd61422cbe12474d972cdfbe2b8064d7acd5baa2defb02dbaf1a697b"} err="failed to get container status \"c66c2bcafd61422cbe12474d972cdfbe2b8064d7acd5baa2defb02dbaf1a697b\": rpc error: code = NotFound desc = could not find container \"c66c2bcafd61422cbe12474d972cdfbe2b8064d7acd5baa2defb02dbaf1a697b\": container with ID starting with c66c2bcafd61422cbe12474d972cdfbe2b8064d7acd5baa2defb02dbaf1a697b not found: ID does not exist" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.567808 4558 scope.go:117] "RemoveContainer" containerID="37aadc76d1ea5916d8987f0d7d3856518a536ffe632acfd464eb225116abde18" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.580571 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7d2f1782-aa1c-4382-b682-27ce8f37d139-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "7d2f1782-aa1c-4382-b682-27ce8f37d139" (UID: "7d2f1782-aa1c-4382-b682-27ce8f37d139"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.580612 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/47de199d-0dde-4082-8b6c-99f3d202608b-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "47de199d-0dde-4082-8b6c-99f3d202608b" (UID: "47de199d-0dde-4082-8b6c-99f3d202608b"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.590315 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3c44870c-4ab7-436a-9862-d6a2a4487bed" path="/var/lib/kubelet/pods/3c44870c-4ab7-436a-9862-d6a2a4487bed/volumes" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.590823 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6254cefa-8c4a-472e-9faf-4633c6d1618e" path="/var/lib/kubelet/pods/6254cefa-8c4a-472e-9faf-4633c6d1618e/volumes" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.591453 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9330efb9-cbc2-4c4a-9928-eedf93324d57" path="/var/lib/kubelet/pods/9330efb9-cbc2-4c4a-9928-eedf93324d57/volumes" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.591549 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lx6x9\" (UniqueName: \"kubernetes.io/projected/d0f82b75-bed6-4779-ad55-6bec50d7faa6-kube-api-access-lx6x9\") pod \"d0f82b75-bed6-4779-ad55-6bec50d7faa6\" (UID: \"d0f82b75-bed6-4779-ad55-6bec50d7faa6\") " Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.595801 4558 scope.go:117] "RemoveContainer" containerID="5f690c112c0735b8be4b3a6b80c6e2fc33c18a468c87d95a1c56b1f942029cc8" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.604604 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d0f82b75-bed6-4779-ad55-6bec50d7faa6-config-data\") pod \"d0f82b75-bed6-4779-ad55-6bec50d7faa6\" (UID: \"d0f82b75-bed6-4779-ad55-6bec50d7faa6\") " Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.604783 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d0f82b75-bed6-4779-ad55-6bec50d7faa6-scripts\") pod \"d0f82b75-bed6-4779-ad55-6bec50d7faa6\" (UID: \"d0f82b75-bed6-4779-ad55-6bec50d7faa6\") " Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.604859 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d0f82b75-bed6-4779-ad55-6bec50d7faa6-combined-ca-bundle\") pod \"d0f82b75-bed6-4779-ad55-6bec50d7faa6\" (UID: \"d0f82b75-bed6-4779-ad55-6bec50d7faa6\") " Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.604939 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/d0f82b75-bed6-4779-ad55-6bec50d7faa6-etc-machine-id\") pod \"d0f82b75-bed6-4779-ad55-6bec50d7faa6\" (UID: \"d0f82b75-bed6-4779-ad55-6bec50d7faa6\") " Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.605034 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d0f82b75-bed6-4779-ad55-6bec50d7faa6-config-data-custom\") pod \"d0f82b75-bed6-4779-ad55-6bec50d7faa6\" (UID: \"d0f82b75-bed6-4779-ad55-6bec50d7faa6\") " Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.605833 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6dae3a04-db9e-48a2-bbe2-012c9ba55890-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.609118 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/47de199d-0dde-4082-8b6c-99f3d202608b-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.609194 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.609276 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d0e7ef26-ab3e-4c3d-b8b2-6b08007022fd-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.609339 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c5adf97d-8b90-49ca-bbeb-21dd8446a226-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.609392 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f98db5df-2f7f-490b-b4a0-7e9f27b07a60-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.609443 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d0e7ef26-ab3e-4c3d-b8b2-6b08007022fd-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.609499 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f98db5df-2f7f-490b-b4a0-7e9f27b07a60-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.609555 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e2e49d05-bd1d-49a2-a67f-17bdeb5c39c2-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.609611 4558 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/7d2f1782-aa1c-4382-b682-27ce8f37d139-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.609663 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.609066 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d0f82b75-bed6-4779-ad55-6bec50d7faa6-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "d0f82b75-bed6-4779-ad55-6bec50d7faa6" (UID: "d0f82b75-bed6-4779-ad55-6bec50d7faa6"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.617404 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e2e49d05-bd1d-49a2-a67f-17bdeb5c39c2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e2e49d05-bd1d-49a2-a67f-17bdeb5c39c2" (UID: "e2e49d05-bd1d-49a2-a67f-17bdeb5c39c2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.617429 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d0f82b75-bed6-4779-ad55-6bec50d7faa6-kube-api-access-lx6x9" (OuterVolumeSpecName: "kube-api-access-lx6x9") pod "d0f82b75-bed6-4779-ad55-6bec50d7faa6" (UID: "d0f82b75-bed6-4779-ad55-6bec50d7faa6"). InnerVolumeSpecName "kube-api-access-lx6x9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.617436 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d0f82b75-bed6-4779-ad55-6bec50d7faa6-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "d0f82b75-bed6-4779-ad55-6bec50d7faa6" (UID: "d0f82b75-bed6-4779-ad55-6bec50d7faa6"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.617467 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/47de199d-0dde-4082-8b6c-99f3d202608b-config-data" (OuterVolumeSpecName: "config-data") pod "47de199d-0dde-4082-8b6c-99f3d202608b" (UID: "47de199d-0dde-4082-8b6c-99f3d202608b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.617485 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d0f82b75-bed6-4779-ad55-6bec50d7faa6-scripts" (OuterVolumeSpecName: "scripts") pod "d0f82b75-bed6-4779-ad55-6bec50d7faa6" (UID: "d0f82b75-bed6-4779-ad55-6bec50d7faa6"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.620593 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c5adf97d-8b90-49ca-bbeb-21dd8446a226-config-data" (OuterVolumeSpecName: "config-data") pod "c5adf97d-8b90-49ca-bbeb-21dd8446a226" (UID: "c5adf97d-8b90-49ca-bbeb-21dd8446a226"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.621240 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/47de199d-0dde-4082-8b6c-99f3d202608b-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "47de199d-0dde-4082-8b6c-99f3d202608b" (UID: "47de199d-0dde-4082-8b6c-99f3d202608b"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.627580 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d0e7ef26-ab3e-4c3d-b8b2-6b08007022fd-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "d0e7ef26-ab3e-4c3d-b8b2-6b08007022fd" (UID: "d0e7ef26-ab3e-4c3d-b8b2-6b08007022fd"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.628388 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f98db5df-2f7f-490b-b4a0-7e9f27b07a60-config-data" (OuterVolumeSpecName: "config-data") pod "f98db5df-2f7f-490b-b4a0-7e9f27b07a60" (UID: "f98db5df-2f7f-490b-b4a0-7e9f27b07a60"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.645339 4558 scope.go:117] "RemoveContainer" containerID="37aadc76d1ea5916d8987f0d7d3856518a536ffe632acfd464eb225116abde18" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.646509 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6dae3a04-db9e-48a2-bbe2-012c9ba55890-ovn-northd-tls-certs" (OuterVolumeSpecName: "ovn-northd-tls-certs") pod "6dae3a04-db9e-48a2-bbe2-012c9ba55890" (UID: "6dae3a04-db9e-48a2-bbe2-012c9ba55890"). InnerVolumeSpecName "ovn-northd-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:50:30 crc kubenswrapper[4558]: E0120 17:50:30.648069 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"37aadc76d1ea5916d8987f0d7d3856518a536ffe632acfd464eb225116abde18\": container with ID starting with 37aadc76d1ea5916d8987f0d7d3856518a536ffe632acfd464eb225116abde18 not found: ID does not exist" containerID="37aadc76d1ea5916d8987f0d7d3856518a536ffe632acfd464eb225116abde18" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.648126 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"37aadc76d1ea5916d8987f0d7d3856518a536ffe632acfd464eb225116abde18"} err="failed to get container status \"37aadc76d1ea5916d8987f0d7d3856518a536ffe632acfd464eb225116abde18\": rpc error: code = NotFound desc = could not find container \"37aadc76d1ea5916d8987f0d7d3856518a536ffe632acfd464eb225116abde18\": container with ID starting with 37aadc76d1ea5916d8987f0d7d3856518a536ffe632acfd464eb225116abde18 not found: ID does not exist" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.648154 4558 scope.go:117] "RemoveContainer" containerID="5f690c112c0735b8be4b3a6b80c6e2fc33c18a468c87d95a1c56b1f942029cc8" Jan 20 17:50:30 crc kubenswrapper[4558]: E0120 17:50:30.648514 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5f690c112c0735b8be4b3a6b80c6e2fc33c18a468c87d95a1c56b1f942029cc8\": container with ID starting with 5f690c112c0735b8be4b3a6b80c6e2fc33c18a468c87d95a1c56b1f942029cc8 not found: ID does not exist" containerID="5f690c112c0735b8be4b3a6b80c6e2fc33c18a468c87d95a1c56b1f942029cc8" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.648576 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5f690c112c0735b8be4b3a6b80c6e2fc33c18a468c87d95a1c56b1f942029cc8"} err="failed to get container status \"5f690c112c0735b8be4b3a6b80c6e2fc33c18a468c87d95a1c56b1f942029cc8\": rpc error: code = NotFound desc = could not find container \"5f690c112c0735b8be4b3a6b80c6e2fc33c18a468c87d95a1c56b1f942029cc8\": container with ID starting with 5f690c112c0735b8be4b3a6b80c6e2fc33c18a468c87d95a1c56b1f942029cc8 not found: ID does not exist" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.648597 4558 scope.go:117] "RemoveContainer" containerID="1644d78283b3c3dad2b68cd9a4d16b7b8c04de0ba6bfcedb6b4c04ac6f6297de" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.649094 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6dae3a04-db9e-48a2-bbe2-012c9ba55890-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "6dae3a04-db9e-48a2-bbe2-012c9ba55890" (UID: "6dae3a04-db9e-48a2-bbe2-012c9ba55890"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.672283 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d0f82b75-bed6-4779-ad55-6bec50d7faa6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d0f82b75-bed6-4779-ad55-6bec50d7faa6" (UID: "d0f82b75-bed6-4779-ad55-6bec50d7faa6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.673780 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.675082 4558 scope.go:117] "RemoveContainer" containerID="b497e638b4894f1c81e1773af34e888736bf132c67bc13eb676c4be84dea7235" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.716337 4558 scope.go:117] "RemoveContainer" containerID="1644d78283b3c3dad2b68cd9a4d16b7b8c04de0ba6bfcedb6b4c04ac6f6297de" Jan 20 17:50:30 crc kubenswrapper[4558]: E0120 17:50:30.717056 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1644d78283b3c3dad2b68cd9a4d16b7b8c04de0ba6bfcedb6b4c04ac6f6297de\": container with ID starting with 1644d78283b3c3dad2b68cd9a4d16b7b8c04de0ba6bfcedb6b4c04ac6f6297de not found: ID does not exist" containerID="1644d78283b3c3dad2b68cd9a4d16b7b8c04de0ba6bfcedb6b4c04ac6f6297de" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.717218 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1644d78283b3c3dad2b68cd9a4d16b7b8c04de0ba6bfcedb6b4c04ac6f6297de"} err="failed to get container status \"1644d78283b3c3dad2b68cd9a4d16b7b8c04de0ba6bfcedb6b4c04ac6f6297de\": rpc error: code = NotFound desc = could not find container \"1644d78283b3c3dad2b68cd9a4d16b7b8c04de0ba6bfcedb6b4c04ac6f6297de\": container with ID starting with 1644d78283b3c3dad2b68cd9a4d16b7b8c04de0ba6bfcedb6b4c04ac6f6297de not found: ID does not exist" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.717367 4558 scope.go:117] "RemoveContainer" containerID="b497e638b4894f1c81e1773af34e888736bf132c67bc13eb676c4be84dea7235" Jan 20 17:50:30 crc kubenswrapper[4558]: E0120 17:50:30.719227 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b497e638b4894f1c81e1773af34e888736bf132c67bc13eb676c4be84dea7235\": container with ID starting with b497e638b4894f1c81e1773af34e888736bf132c67bc13eb676c4be84dea7235 not found: ID does not exist" containerID="b497e638b4894f1c81e1773af34e888736bf132c67bc13eb676c4be84dea7235" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.719259 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b497e638b4894f1c81e1773af34e888736bf132c67bc13eb676c4be84dea7235"} err="failed to get container status \"b497e638b4894f1c81e1773af34e888736bf132c67bc13eb676c4be84dea7235\": rpc error: code = NotFound desc = could not find container \"b497e638b4894f1c81e1773af34e888736bf132c67bc13eb676c4be84dea7235\": container with ID starting with b497e638b4894f1c81e1773af34e888736bf132c67bc13eb676c4be84dea7235 not found: ID does not exist" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.719276 4558 scope.go:117] "RemoveContainer" containerID="5ad5d5673753b8c1e259ace28ba37c369cd951524f24fdfa1a91a5b011bae4ef" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.720613 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lx6x9\" (UniqueName: \"kubernetes.io/projected/d0f82b75-bed6-4779-ad55-6bec50d7faa6-kube-api-access-lx6x9\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.720635 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/47de199d-0dde-4082-8b6c-99f3d202608b-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.720648 4558 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/6dae3a04-db9e-48a2-bbe2-012c9ba55890-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.720657 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d0f82b75-bed6-4779-ad55-6bec50d7faa6-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.720667 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d0f82b75-bed6-4779-ad55-6bec50d7faa6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.720677 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f98db5df-2f7f-490b-b4a0-7e9f27b07a60-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.720686 4558 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/d0f82b75-bed6-4779-ad55-6bec50d7faa6-etc-machine-id\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.720696 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d0f82b75-bed6-4779-ad55-6bec50d7faa6-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.720705 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d0e7ef26-ab3e-4c3d-b8b2-6b08007022fd-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.720715 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e2e49d05-bd1d-49a2-a67f-17bdeb5c39c2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.720726 4558 reconciler_common.go:293] "Volume detached for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/6dae3a04-db9e-48a2-bbe2-012c9ba55890-ovn-northd-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.720734 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c5adf97d-8b90-49ca-bbeb-21dd8446a226-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.720746 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/47de199d-0dde-4082-8b6c-99f3d202608b-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.732324 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.739732 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d0f82b75-bed6-4779-ad55-6bec50d7faa6-config-data" (OuterVolumeSpecName: "config-data") pod "d0f82b75-bed6-4779-ad55-6bec50d7faa6" (UID: "d0f82b75-bed6-4779-ad55-6bec50d7faa6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.740628 4558 scope.go:117] "RemoveContainer" containerID="d5d1fc6f44bede9582e8d9e34323e73cfd6032c9c0e5c64c9c713f180efb4c1c" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.741263 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.751341 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:50:30 crc kubenswrapper[4558]: E0120 17:50:30.754559 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e2e49d05-bd1d-49a2-a67f-17bdeb5c39c2" containerName="barbican-keystone-listener" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.754649 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="e2e49d05-bd1d-49a2-a67f-17bdeb5c39c2" containerName="barbican-keystone-listener" Jan 20 17:50:30 crc kubenswrapper[4558]: E0120 17:50:30.754703 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="47de199d-0dde-4082-8b6c-99f3d202608b" containerName="cinder-api-log" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.754748 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="47de199d-0dde-4082-8b6c-99f3d202608b" containerName="cinder-api-log" Jan 20 17:50:30 crc kubenswrapper[4558]: E0120 17:50:30.755610 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d0e7ef26-ab3e-4c3d-b8b2-6b08007022fd" containerName="glance-log" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.755694 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d0e7ef26-ab3e-4c3d-b8b2-6b08007022fd" containerName="glance-log" Jan 20 17:50:30 crc kubenswrapper[4558]: E0120 17:50:30.755757 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c5adf97d-8b90-49ca-bbeb-21dd8446a226" containerName="barbican-worker-log" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.755811 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="c5adf97d-8b90-49ca-bbeb-21dd8446a226" containerName="barbican-worker-log" Jan 20 17:50:30 crc kubenswrapper[4558]: E0120 17:50:30.755867 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c5adf97d-8b90-49ca-bbeb-21dd8446a226" containerName="barbican-worker" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.755912 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="c5adf97d-8b90-49ca-bbeb-21dd8446a226" containerName="barbican-worker" Jan 20 17:50:30 crc kubenswrapper[4558]: E0120 17:50:30.755957 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d0f82b75-bed6-4779-ad55-6bec50d7faa6" containerName="cinder-scheduler" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.756010 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d0f82b75-bed6-4779-ad55-6bec50d7faa6" containerName="cinder-scheduler" Jan 20 17:50:30 crc kubenswrapper[4558]: E0120 17:50:30.756064 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f98db5df-2f7f-490b-b4a0-7e9f27b07a60" containerName="glance-httpd" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.756115 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="f98db5df-2f7f-490b-b4a0-7e9f27b07a60" containerName="glance-httpd" Jan 20 17:50:30 crc kubenswrapper[4558]: E0120 17:50:30.756261 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="28febf86-c729-498e-b3af-0321468d80f3" containerName="nova-api-log" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.756315 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="28febf86-c729-498e-b3af-0321468d80f3" containerName="nova-api-log" Jan 20 17:50:30 crc kubenswrapper[4558]: E0120 17:50:30.756369 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="47de199d-0dde-4082-8b6c-99f3d202608b" containerName="cinder-api" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.756411 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="47de199d-0dde-4082-8b6c-99f3d202608b" containerName="cinder-api" Jan 20 17:50:30 crc kubenswrapper[4558]: E0120 17:50:30.756466 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7d2f1782-aa1c-4382-b682-27ce8f37d139" containerName="nova-metadata-metadata" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.756515 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="7d2f1782-aa1c-4382-b682-27ce8f37d139" containerName="nova-metadata-metadata" Jan 20 17:50:30 crc kubenswrapper[4558]: E0120 17:50:30.756566 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6dae3a04-db9e-48a2-bbe2-012c9ba55890" containerName="openstack-network-exporter" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.756610 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="6dae3a04-db9e-48a2-bbe2-012c9ba55890" containerName="openstack-network-exporter" Jan 20 17:50:30 crc kubenswrapper[4558]: E0120 17:50:30.756655 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6dae3a04-db9e-48a2-bbe2-012c9ba55890" containerName="ovn-northd" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.756697 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="6dae3a04-db9e-48a2-bbe2-012c9ba55890" containerName="ovn-northd" Jan 20 17:50:30 crc kubenswrapper[4558]: E0120 17:50:30.756749 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d0f82b75-bed6-4779-ad55-6bec50d7faa6" containerName="probe" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.756797 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d0f82b75-bed6-4779-ad55-6bec50d7faa6" containerName="probe" Jan 20 17:50:30 crc kubenswrapper[4558]: E0120 17:50:30.756843 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7d2f1782-aa1c-4382-b682-27ce8f37d139" containerName="nova-metadata-log" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.756893 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="7d2f1782-aa1c-4382-b682-27ce8f37d139" containerName="nova-metadata-log" Jan 20 17:50:30 crc kubenswrapper[4558]: E0120 17:50:30.756947 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f98db5df-2f7f-490b-b4a0-7e9f27b07a60" containerName="glance-log" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.757000 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="f98db5df-2f7f-490b-b4a0-7e9f27b07a60" containerName="glance-log" Jan 20 17:50:30 crc kubenswrapper[4558]: E0120 17:50:30.759232 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="28febf86-c729-498e-b3af-0321468d80f3" containerName="nova-api-api" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.759299 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="28febf86-c729-498e-b3af-0321468d80f3" containerName="nova-api-api" Jan 20 17:50:30 crc kubenswrapper[4558]: E0120 17:50:30.759363 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e2e49d05-bd1d-49a2-a67f-17bdeb5c39c2" containerName="barbican-keystone-listener-log" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.759414 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="e2e49d05-bd1d-49a2-a67f-17bdeb5c39c2" containerName="barbican-keystone-listener-log" Jan 20 17:50:30 crc kubenswrapper[4558]: E0120 17:50:30.759464 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d0e7ef26-ab3e-4c3d-b8b2-6b08007022fd" containerName="glance-httpd" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.759517 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d0e7ef26-ab3e-4c3d-b8b2-6b08007022fd" containerName="glance-httpd" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.759729 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="e2e49d05-bd1d-49a2-a67f-17bdeb5c39c2" containerName="barbican-keystone-listener-log" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.759785 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="f98db5df-2f7f-490b-b4a0-7e9f27b07a60" containerName="glance-log" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.759843 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="28febf86-c729-498e-b3af-0321468d80f3" containerName="nova-api-log" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.759968 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="f98db5df-2f7f-490b-b4a0-7e9f27b07a60" containerName="glance-httpd" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.760033 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="47de199d-0dde-4082-8b6c-99f3d202608b" containerName="cinder-api-log" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.760088 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="6dae3a04-db9e-48a2-bbe2-012c9ba55890" containerName="ovn-northd" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.760132 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="7d2f1782-aa1c-4382-b682-27ce8f37d139" containerName="nova-metadata-metadata" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.760201 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="d0e7ef26-ab3e-4c3d-b8b2-6b08007022fd" containerName="glance-log" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.760261 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="28febf86-c729-498e-b3af-0321468d80f3" containerName="nova-api-api" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.760318 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="47de199d-0dde-4082-8b6c-99f3d202608b" containerName="cinder-api" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.760368 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="d0f82b75-bed6-4779-ad55-6bec50d7faa6" containerName="cinder-scheduler" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.760415 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="7d2f1782-aa1c-4382-b682-27ce8f37d139" containerName="nova-metadata-log" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.760458 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="c5adf97d-8b90-49ca-bbeb-21dd8446a226" containerName="barbican-worker" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.760510 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="d0f82b75-bed6-4779-ad55-6bec50d7faa6" containerName="probe" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.760652 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="c5adf97d-8b90-49ca-bbeb-21dd8446a226" containerName="barbican-worker-log" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.760706 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="6dae3a04-db9e-48a2-bbe2-012c9ba55890" containerName="openstack-network-exporter" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.760755 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="d0e7ef26-ab3e-4c3d-b8b2-6b08007022fd" containerName="glance-httpd" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.760799 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="e2e49d05-bd1d-49a2-a67f-17bdeb5c39c2" containerName="barbican-keystone-listener" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.761934 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.773575 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.775727 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-default-internal-config-data" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.776498 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-glance-default-internal-svc" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.786643 4558 scope.go:117] "RemoveContainer" containerID="5ad5d5673753b8c1e259ace28ba37c369cd951524f24fdfa1a91a5b011bae4ef" Jan 20 17:50:30 crc kubenswrapper[4558]: E0120 17:50:30.792491 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5ad5d5673753b8c1e259ace28ba37c369cd951524f24fdfa1a91a5b011bae4ef\": container with ID starting with 5ad5d5673753b8c1e259ace28ba37c369cd951524f24fdfa1a91a5b011bae4ef not found: ID does not exist" containerID="5ad5d5673753b8c1e259ace28ba37c369cd951524f24fdfa1a91a5b011bae4ef" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.792542 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5ad5d5673753b8c1e259ace28ba37c369cd951524f24fdfa1a91a5b011bae4ef"} err="failed to get container status \"5ad5d5673753b8c1e259ace28ba37c369cd951524f24fdfa1a91a5b011bae4ef\": rpc error: code = NotFound desc = could not find container \"5ad5d5673753b8c1e259ace28ba37c369cd951524f24fdfa1a91a5b011bae4ef\": container with ID starting with 5ad5d5673753b8c1e259ace28ba37c369cd951524f24fdfa1a91a5b011bae4ef not found: ID does not exist" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.792569 4558 scope.go:117] "RemoveContainer" containerID="d5d1fc6f44bede9582e8d9e34323e73cfd6032c9c0e5c64c9c713f180efb4c1c" Jan 20 17:50:30 crc kubenswrapper[4558]: E0120 17:50:30.796048 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d5d1fc6f44bede9582e8d9e34323e73cfd6032c9c0e5c64c9c713f180efb4c1c\": container with ID starting with d5d1fc6f44bede9582e8d9e34323e73cfd6032c9c0e5c64c9c713f180efb4c1c not found: ID does not exist" containerID="d5d1fc6f44bede9582e8d9e34323e73cfd6032c9c0e5c64c9c713f180efb4c1c" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.796094 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d5d1fc6f44bede9582e8d9e34323e73cfd6032c9c0e5c64c9c713f180efb4c1c"} err="failed to get container status \"d5d1fc6f44bede9582e8d9e34323e73cfd6032c9c0e5c64c9c713f180efb4c1c\": rpc error: code = NotFound desc = could not find container \"d5d1fc6f44bede9582e8d9e34323e73cfd6032c9c0e5c64c9c713f180efb4c1c\": container with ID starting with d5d1fc6f44bede9582e8d9e34323e73cfd6032c9c0e5c64c9c713f180efb4c1c not found: ID does not exist" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.796125 4558 scope.go:117] "RemoveContainer" containerID="7fb1cfce15f7b21b933180ce2945a9ba95c867c596b50488373fe0546cb8f714" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.801385 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.824729 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/28febf86-c729-498e-b3af-0321468d80f3-config-data\") pod \"28febf86-c729-498e-b3af-0321468d80f3\" (UID: \"28febf86-c729-498e-b3af-0321468d80f3\") " Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.824966 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/28febf86-c729-498e-b3af-0321468d80f3-logs\") pod \"28febf86-c729-498e-b3af-0321468d80f3\" (UID: \"28febf86-c729-498e-b3af-0321468d80f3\") " Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.825128 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-28gph\" (UniqueName: \"kubernetes.io/projected/28febf86-c729-498e-b3af-0321468d80f3-kube-api-access-28gph\") pod \"28febf86-c729-498e-b3af-0321468d80f3\" (UID: \"28febf86-c729-498e-b3af-0321468d80f3\") " Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.825232 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/28febf86-c729-498e-b3af-0321468d80f3-public-tls-certs\") pod \"28febf86-c729-498e-b3af-0321468d80f3\" (UID: \"28febf86-c729-498e-b3af-0321468d80f3\") " Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.825320 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/28febf86-c729-498e-b3af-0321468d80f3-combined-ca-bundle\") pod \"28febf86-c729-498e-b3af-0321468d80f3\" (UID: \"28febf86-c729-498e-b3af-0321468d80f3\") " Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.826018 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/28febf86-c729-498e-b3af-0321468d80f3-internal-tls-certs\") pod \"28febf86-c729-498e-b3af-0321468d80f3\" (UID: \"28febf86-c729-498e-b3af-0321468d80f3\") " Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.826713 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d0f82b75-bed6-4779-ad55-6bec50d7faa6-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.837296 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/28febf86-c729-498e-b3af-0321468d80f3-logs" (OuterVolumeSpecName: "logs") pod "28febf86-c729-498e-b3af-0321468d80f3" (UID: "28febf86-c729-498e-b3af-0321468d80f3"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.841619 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/28febf86-c729-498e-b3af-0321468d80f3-kube-api-access-28gph" (OuterVolumeSpecName: "kube-api-access-28gph") pod "28febf86-c729-498e-b3af-0321468d80f3" (UID: "28febf86-c729-498e-b3af-0321468d80f3"). InnerVolumeSpecName "kube-api-access-28gph". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.856487 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.858342 4558 scope.go:117] "RemoveContainer" containerID="a612c466a2d2fa8c608454134a7b85c4368226b9c8b498cce072561130074559" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.882256 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/28febf86-c729-498e-b3af-0321468d80f3-config-data" (OuterVolumeSpecName: "config-data") pod "28febf86-c729-498e-b3af-0321468d80f3" (UID: "28febf86-c729-498e-b3af-0321468d80f3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.885031 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.887342 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.890804 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-metadata-internal-svc" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.891049 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-metadata-config-data" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.903678 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.919757 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/28febf86-c729-498e-b3af-0321468d80f3-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "28febf86-c729-498e-b3af-0321468d80f3" (UID: "28febf86-c729-498e-b3af-0321468d80f3"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.927703 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/28febf86-c729-498e-b3af-0321468d80f3-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "28febf86-c729-498e-b3af-0321468d80f3" (UID: "28febf86-c729-498e-b3af-0321468d80f3"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.928915 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/28febf86-c729-498e-b3af-0321468d80f3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "28febf86-c729-498e-b3af-0321468d80f3" (UID: "28febf86-c729-498e-b3af-0321468d80f3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.932058 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/18c09849-702b-4939-9cf1-0e06c6adc889-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"18c09849-702b-4939-9cf1-0e06c6adc889\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.932106 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/18c09849-702b-4939-9cf1-0e06c6adc889-logs\") pod \"glance-default-internal-api-0\" (UID: \"18c09849-702b-4939-9cf1-0e06c6adc889\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.932484 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/18c09849-702b-4939-9cf1-0e06c6adc889-scripts\") pod \"glance-default-internal-api-0\" (UID: \"18c09849-702b-4939-9cf1-0e06c6adc889\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.932634 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/18c09849-702b-4939-9cf1-0e06c6adc889-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"18c09849-702b-4939-9cf1-0e06c6adc889\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.932704 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/18c09849-702b-4939-9cf1-0e06c6adc889-config-data\") pod \"glance-default-internal-api-0\" (UID: \"18c09849-702b-4939-9cf1-0e06c6adc889\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.932747 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/18c09849-702b-4939-9cf1-0e06c6adc889-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"18c09849-702b-4939-9cf1-0e06c6adc889\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.932851 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"18c09849-702b-4939-9cf1-0e06c6adc889\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.932916 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fg8wx\" (UniqueName: \"kubernetes.io/projected/18c09849-702b-4939-9cf1-0e06c6adc889-kube-api-access-fg8wx\") pod \"glance-default-internal-api-0\" (UID: \"18c09849-702b-4939-9cf1-0e06c6adc889\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.933153 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/28febf86-c729-498e-b3af-0321468d80f3-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.933198 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/28febf86-c729-498e-b3af-0321468d80f3-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.933236 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-28gph\" (UniqueName: \"kubernetes.io/projected/28febf86-c729-498e-b3af-0321468d80f3-kube-api-access-28gph\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.933268 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/28febf86-c729-498e-b3af-0321468d80f3-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.933280 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/28febf86-c729-498e-b3af-0321468d80f3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.933290 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/28febf86-c729-498e-b3af-0321468d80f3-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.936284 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.947622 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.956624 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.966964 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.975384 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.977557 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.980084 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-glance-default-public-svc" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.980382 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-default-external-config-data" Jan 20 17:50:30 crc kubenswrapper[4558]: I0120 17:50:30.993568 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:30.999101 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.003236 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-cinder-internal-svc" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.003853 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cinder-api-config-data" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.006516 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.007751 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-cinder-public-svc" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.008710 4558 scope.go:117] "RemoveContainer" containerID="7436f50dac4bab17580af1cbb38a2ef4c0a9b64f22e3397fd9aa8eeeb133fc29" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.008927 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cinder-config-data" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.009058 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cinder-cinder-dockercfg-cwfqv" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.010637 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cinder-scripts" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.038285 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.051500 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/18c09849-702b-4939-9cf1-0e06c6adc889-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"18c09849-702b-4939-9cf1-0e06c6adc889\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.051614 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/18c09849-702b-4939-9cf1-0e06c6adc889-config-data\") pod \"glance-default-internal-api-0\" (UID: \"18c09849-702b-4939-9cf1-0e06c6adc889\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.051682 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/18c09849-702b-4939-9cf1-0e06c6adc889-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"18c09849-702b-4939-9cf1-0e06c6adc889\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.051844 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"18c09849-702b-4939-9cf1-0e06c6adc889\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.051939 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fg8wx\" (UniqueName: \"kubernetes.io/projected/18c09849-702b-4939-9cf1-0e06c6adc889-kube-api-access-fg8wx\") pod \"glance-default-internal-api-0\" (UID: \"18c09849-702b-4939-9cf1-0e06c6adc889\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.052054 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/acb2f7f3-f6d0-4b4f-936f-baee3dfa3938-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"acb2f7f3-f6d0-4b4f-936f-baee3dfa3938\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.052081 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/acb2f7f3-f6d0-4b4f-936f-baee3dfa3938-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"acb2f7f3-f6d0-4b4f-936f-baee3dfa3938\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.052238 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/18c09849-702b-4939-9cf1-0e06c6adc889-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"18c09849-702b-4939-9cf1-0e06c6adc889\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.052347 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/18c09849-702b-4939-9cf1-0e06c6adc889-logs\") pod \"glance-default-internal-api-0\" (UID: \"18c09849-702b-4939-9cf1-0e06c6adc889\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.052516 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/acb2f7f3-f6d0-4b4f-936f-baee3dfa3938-config-data\") pod \"nova-metadata-0\" (UID: \"acb2f7f3-f6d0-4b4f-936f-baee3dfa3938\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:50:31 crc kubenswrapper[4558]: E0120 17:50:31.052627 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-glance-default-internal-svc: secret "cert-glance-default-internal-svc" not found Jan 20 17:50:31 crc kubenswrapper[4558]: E0120 17:50:31.052692 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/18c09849-702b-4939-9cf1-0e06c6adc889-internal-tls-certs podName:18c09849-702b-4939-9cf1-0e06c6adc889 nodeName:}" failed. No retries permitted until 2026-01-20 17:50:31.552669401 +0000 UTC m=+4125.313007368 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "internal-tls-certs" (UniqueName: "kubernetes.io/secret/18c09849-702b-4939-9cf1-0e06c6adc889-internal-tls-certs") pod "glance-default-internal-api-0" (UID: "18c09849-702b-4939-9cf1-0e06c6adc889") : secret "cert-glance-default-internal-svc" not found Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.052790 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/18c09849-702b-4939-9cf1-0e06c6adc889-scripts\") pod \"glance-default-internal-api-0\" (UID: \"18c09849-702b-4939-9cf1-0e06c6adc889\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.052936 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xpdkk\" (UniqueName: \"kubernetes.io/projected/acb2f7f3-f6d0-4b4f-936f-baee3dfa3938-kube-api-access-xpdkk\") pod \"nova-metadata-0\" (UID: \"acb2f7f3-f6d0-4b4f-936f-baee3dfa3938\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.053074 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/acb2f7f3-f6d0-4b4f-936f-baee3dfa3938-logs\") pod \"nova-metadata-0\" (UID: \"acb2f7f3-f6d0-4b4f-936f-baee3dfa3938\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.057041 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"18c09849-702b-4939-9cf1-0e06c6adc889\") device mount path \"/mnt/openstack/pv11\"" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.057648 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/18c09849-702b-4939-9cf1-0e06c6adc889-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"18c09849-702b-4939-9cf1-0e06c6adc889\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.074922 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/18c09849-702b-4939-9cf1-0e06c6adc889-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"18c09849-702b-4939-9cf1-0e06c6adc889\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.078123 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/18c09849-702b-4939-9cf1-0e06c6adc889-logs\") pod \"glance-default-internal-api-0\" (UID: \"18c09849-702b-4939-9cf1-0e06c6adc889\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.078545 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/18c09849-702b-4939-9cf1-0e06c6adc889-scripts\") pod \"glance-default-internal-api-0\" (UID: \"18c09849-702b-4939-9cf1-0e06c6adc889\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.079118 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fg8wx\" (UniqueName: \"kubernetes.io/projected/18c09849-702b-4939-9cf1-0e06c6adc889-kube-api-access-fg8wx\") pod \"glance-default-internal-api-0\" (UID: \"18c09849-702b-4939-9cf1-0e06c6adc889\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.097463 4558 scope.go:117] "RemoveContainer" containerID="0eea0088c1de4416bcb9198c0646ec60717050f4f084f5e0c864e31658b39fd7" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.108584 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/18c09849-702b-4939-9cf1-0e06c6adc889-config-data\") pod \"glance-default-internal-api-0\" (UID: \"18c09849-702b-4939-9cf1-0e06c6adc889\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.113348 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"18c09849-702b-4939-9cf1-0e06c6adc889\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.131356 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovn-northd-0"] Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.155155 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xpdkk\" (UniqueName: \"kubernetes.io/projected/acb2f7f3-f6d0-4b4f-936f-baee3dfa3938-kube-api-access-xpdkk\") pod \"nova-metadata-0\" (UID: \"acb2f7f3-f6d0-4b4f-936f-baee3dfa3938\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.155210 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/acb2f7f3-f6d0-4b4f-936f-baee3dfa3938-logs\") pod \"nova-metadata-0\" (UID: \"acb2f7f3-f6d0-4b4f-936f-baee3dfa3938\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.155232 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3ec0f915-5676-4d2a-b743-1f8ee11c01ac-config-data\") pod \"cinder-api-0\" (UID: \"3ec0f915-5676-4d2a-b743-1f8ee11c01ac\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.155269 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pq2q4\" (UniqueName: \"kubernetes.io/projected/07e11378-90b7-4994-99fa-6e2d78b61f63-kube-api-access-pq2q4\") pod \"glance-default-external-api-0\" (UID: \"07e11378-90b7-4994-99fa-6e2d78b61f63\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.155299 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/07e11378-90b7-4994-99fa-6e2d78b61f63-logs\") pod \"glance-default-external-api-0\" (UID: \"07e11378-90b7-4994-99fa-6e2d78b61f63\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.155355 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/07e11378-90b7-4994-99fa-6e2d78b61f63-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"07e11378-90b7-4994-99fa-6e2d78b61f63\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.155371 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3ec0f915-5676-4d2a-b743-1f8ee11c01ac-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"3ec0f915-5676-4d2a-b743-1f8ee11c01ac\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.155407 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"07e11378-90b7-4994-99fa-6e2d78b61f63\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.155431 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3ec0f915-5676-4d2a-b743-1f8ee11c01ac-config-data-custom\") pod \"cinder-api-0\" (UID: \"3ec0f915-5676-4d2a-b743-1f8ee11c01ac\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.155479 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3ec0f915-5676-4d2a-b743-1f8ee11c01ac-scripts\") pod \"cinder-api-0\" (UID: \"3ec0f915-5676-4d2a-b743-1f8ee11c01ac\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.155495 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3ec0f915-5676-4d2a-b743-1f8ee11c01ac-logs\") pod \"cinder-api-0\" (UID: \"3ec0f915-5676-4d2a-b743-1f8ee11c01ac\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.155519 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3ec0f915-5676-4d2a-b743-1f8ee11c01ac-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"3ec0f915-5676-4d2a-b743-1f8ee11c01ac\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.155558 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/07e11378-90b7-4994-99fa-6e2d78b61f63-config-data\") pod \"glance-default-external-api-0\" (UID: \"07e11378-90b7-4994-99fa-6e2d78b61f63\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.155576 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/acb2f7f3-f6d0-4b4f-936f-baee3dfa3938-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"acb2f7f3-f6d0-4b4f-936f-baee3dfa3938\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.156106 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/acb2f7f3-f6d0-4b4f-936f-baee3dfa3938-logs\") pod \"nova-metadata-0\" (UID: \"acb2f7f3-f6d0-4b4f-936f-baee3dfa3938\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.156455 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/acb2f7f3-f6d0-4b4f-936f-baee3dfa3938-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"acb2f7f3-f6d0-4b4f-936f-baee3dfa3938\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.156494 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/07e11378-90b7-4994-99fa-6e2d78b61f63-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"07e11378-90b7-4994-99fa-6e2d78b61f63\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.156531 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/07e11378-90b7-4994-99fa-6e2d78b61f63-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"07e11378-90b7-4994-99fa-6e2d78b61f63\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.156560 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/3ec0f915-5676-4d2a-b743-1f8ee11c01ac-etc-machine-id\") pod \"cinder-api-0\" (UID: \"3ec0f915-5676-4d2a-b743-1f8ee11c01ac\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.156579 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3ec0f915-5676-4d2a-b743-1f8ee11c01ac-public-tls-certs\") pod \"cinder-api-0\" (UID: \"3ec0f915-5676-4d2a-b743-1f8ee11c01ac\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.156597 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/07e11378-90b7-4994-99fa-6e2d78b61f63-scripts\") pod \"glance-default-external-api-0\" (UID: \"07e11378-90b7-4994-99fa-6e2d78b61f63\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.156616 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/acb2f7f3-f6d0-4b4f-936f-baee3dfa3938-config-data\") pod \"nova-metadata-0\" (UID: \"acb2f7f3-f6d0-4b4f-936f-baee3dfa3938\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.156660 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-drjj7\" (UniqueName: \"kubernetes.io/projected/3ec0f915-5676-4d2a-b743-1f8ee11c01ac-kube-api-access-drjj7\") pod \"cinder-api-0\" (UID: \"3ec0f915-5676-4d2a-b743-1f8ee11c01ac\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.160225 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/acb2f7f3-f6d0-4b4f-936f-baee3dfa3938-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"acb2f7f3-f6d0-4b4f-936f-baee3dfa3938\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.160657 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/acb2f7f3-f6d0-4b4f-936f-baee3dfa3938-config-data\") pod \"nova-metadata-0\" (UID: \"acb2f7f3-f6d0-4b4f-936f-baee3dfa3938\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.161052 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/acb2f7f3-f6d0-4b4f-936f-baee3dfa3938-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"acb2f7f3-f6d0-4b4f-936f-baee3dfa3938\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.166322 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ovn-northd-0"] Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.168425 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xpdkk\" (UniqueName: \"kubernetes.io/projected/acb2f7f3-f6d0-4b4f-936f-baee3dfa3938-kube-api-access-xpdkk\") pod \"nova-metadata-0\" (UID: \"acb2f7f3-f6d0-4b4f-936f-baee3dfa3938\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.176661 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ovn-northd-0"] Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.178430 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.183590 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"ovnnorthd-config" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.183704 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"ovnnorthd-scripts" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.183794 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ovnnorthd-ovnnorthd-dockercfg-lk4s2" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.184160 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-ovnnorthd-ovndbs" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.196919 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovn-northd-0"] Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.203058 4558 scope.go:117] "RemoveContainer" containerID="7436f50dac4bab17580af1cbb38a2ef4c0a9b64f22e3397fd9aa8eeeb133fc29" Jan 20 17:50:31 crc kubenswrapper[4558]: E0120 17:50:31.203592 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7436f50dac4bab17580af1cbb38a2ef4c0a9b64f22e3397fd9aa8eeeb133fc29\": container with ID starting with 7436f50dac4bab17580af1cbb38a2ef4c0a9b64f22e3397fd9aa8eeeb133fc29 not found: ID does not exist" containerID="7436f50dac4bab17580af1cbb38a2ef4c0a9b64f22e3397fd9aa8eeeb133fc29" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.203688 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7436f50dac4bab17580af1cbb38a2ef4c0a9b64f22e3397fd9aa8eeeb133fc29"} err="failed to get container status \"7436f50dac4bab17580af1cbb38a2ef4c0a9b64f22e3397fd9aa8eeeb133fc29\": rpc error: code = NotFound desc = could not find container \"7436f50dac4bab17580af1cbb38a2ef4c0a9b64f22e3397fd9aa8eeeb133fc29\": container with ID starting with 7436f50dac4bab17580af1cbb38a2ef4c0a9b64f22e3397fd9aa8eeeb133fc29 not found: ID does not exist" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.203780 4558 scope.go:117] "RemoveContainer" containerID="0eea0088c1de4416bcb9198c0646ec60717050f4f084f5e0c864e31658b39fd7" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.203972 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:50:31 crc kubenswrapper[4558]: E0120 17:50:31.204315 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0eea0088c1de4416bcb9198c0646ec60717050f4f084f5e0c864e31658b39fd7\": container with ID starting with 0eea0088c1de4416bcb9198c0646ec60717050f4f084f5e0c864e31658b39fd7 not found: ID does not exist" containerID="0eea0088c1de4416bcb9198c0646ec60717050f4f084f5e0c864e31658b39fd7" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.204394 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0eea0088c1de4416bcb9198c0646ec60717050f4f084f5e0c864e31658b39fd7"} err="failed to get container status \"0eea0088c1de4416bcb9198c0646ec60717050f4f084f5e0c864e31658b39fd7\": rpc error: code = NotFound desc = could not find container \"0eea0088c1de4416bcb9198c0646ec60717050f4f084f5e0c864e31658b39fd7\": container with ID starting with 0eea0088c1de4416bcb9198c0646ec60717050f4f084f5e0c864e31658b39fd7 not found: ID does not exist" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.204456 4558 scope.go:117] "RemoveContainer" containerID="bfbe340de7186cba3ca94e5cb5c951be9e3563e26cd26dd763147ac393eeec4f" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.217503 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.222876 4558 scope.go:117] "RemoveContainer" containerID="9b54f5f4a254eb9083539aa947157b34dfeea7934f5c2b37060ecf2e8e7a304a" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.231564 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-keystone-listener-7d4c95f78b-qjwvm"] Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.238352 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/barbican-keystone-listener-7d4c95f78b-qjwvm"] Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.244105 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.247221 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.250197 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.251847 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cinder-scheduler-config-data" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.256571 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-worker-97cdbfcb5-xwv8l"] Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.257894 4558 scope.go:117] "RemoveContainer" containerID="bfbe340de7186cba3ca94e5cb5c951be9e3563e26cd26dd763147ac393eeec4f" Jan 20 17:50:31 crc kubenswrapper[4558]: E0120 17:50:31.259589 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bfbe340de7186cba3ca94e5cb5c951be9e3563e26cd26dd763147ac393eeec4f\": container with ID starting with bfbe340de7186cba3ca94e5cb5c951be9e3563e26cd26dd763147ac393eeec4f not found: ID does not exist" containerID="bfbe340de7186cba3ca94e5cb5c951be9e3563e26cd26dd763147ac393eeec4f" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.259641 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bfbe340de7186cba3ca94e5cb5c951be9e3563e26cd26dd763147ac393eeec4f"} err="failed to get container status \"bfbe340de7186cba3ca94e5cb5c951be9e3563e26cd26dd763147ac393eeec4f\": rpc error: code = NotFound desc = could not find container \"bfbe340de7186cba3ca94e5cb5c951be9e3563e26cd26dd763147ac393eeec4f\": container with ID starting with bfbe340de7186cba3ca94e5cb5c951be9e3563e26cd26dd763147ac393eeec4f not found: ID does not exist" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.259681 4558 scope.go:117] "RemoveContainer" containerID="9b54f5f4a254eb9083539aa947157b34dfeea7934f5c2b37060ecf2e8e7a304a" Jan 20 17:50:31 crc kubenswrapper[4558]: E0120 17:50:31.260636 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9b54f5f4a254eb9083539aa947157b34dfeea7934f5c2b37060ecf2e8e7a304a\": container with ID starting with 9b54f5f4a254eb9083539aa947157b34dfeea7934f5c2b37060ecf2e8e7a304a not found: ID does not exist" containerID="9b54f5f4a254eb9083539aa947157b34dfeea7934f5c2b37060ecf2e8e7a304a" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.260731 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9b54f5f4a254eb9083539aa947157b34dfeea7934f5c2b37060ecf2e8e7a304a"} err="failed to get container status \"9b54f5f4a254eb9083539aa947157b34dfeea7934f5c2b37060ecf2e8e7a304a\": rpc error: code = NotFound desc = could not find container \"9b54f5f4a254eb9083539aa947157b34dfeea7934f5c2b37060ecf2e8e7a304a\": container with ID starting with 9b54f5f4a254eb9083539aa947157b34dfeea7934f5c2b37060ecf2e8e7a304a not found: ID does not exist" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.263147 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/barbican-worker-97cdbfcb5-xwv8l"] Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.264140 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3ec0f915-5676-4d2a-b743-1f8ee11c01ac-scripts\") pod \"cinder-api-0\" (UID: \"3ec0f915-5676-4d2a-b743-1f8ee11c01ac\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.264208 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3ec0f915-5676-4d2a-b743-1f8ee11c01ac-logs\") pod \"cinder-api-0\" (UID: \"3ec0f915-5676-4d2a-b743-1f8ee11c01ac\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.264291 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3ec0f915-5676-4d2a-b743-1f8ee11c01ac-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"3ec0f915-5676-4d2a-b743-1f8ee11c01ac\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.264390 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/07e11378-90b7-4994-99fa-6e2d78b61f63-config-data\") pod \"glance-default-external-api-0\" (UID: \"07e11378-90b7-4994-99fa-6e2d78b61f63\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.264452 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/07e11378-90b7-4994-99fa-6e2d78b61f63-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"07e11378-90b7-4994-99fa-6e2d78b61f63\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.264545 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/07e11378-90b7-4994-99fa-6e2d78b61f63-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"07e11378-90b7-4994-99fa-6e2d78b61f63\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.264608 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/3ec0f915-5676-4d2a-b743-1f8ee11c01ac-etc-machine-id\") pod \"cinder-api-0\" (UID: \"3ec0f915-5676-4d2a-b743-1f8ee11c01ac\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.264637 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3ec0f915-5676-4d2a-b743-1f8ee11c01ac-public-tls-certs\") pod \"cinder-api-0\" (UID: \"3ec0f915-5676-4d2a-b743-1f8ee11c01ac\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.264659 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/07e11378-90b7-4994-99fa-6e2d78b61f63-scripts\") pod \"glance-default-external-api-0\" (UID: \"07e11378-90b7-4994-99fa-6e2d78b61f63\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.264776 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-drjj7\" (UniqueName: \"kubernetes.io/projected/3ec0f915-5676-4d2a-b743-1f8ee11c01ac-kube-api-access-drjj7\") pod \"cinder-api-0\" (UID: \"3ec0f915-5676-4d2a-b743-1f8ee11c01ac\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.264876 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3ec0f915-5676-4d2a-b743-1f8ee11c01ac-config-data\") pod \"cinder-api-0\" (UID: \"3ec0f915-5676-4d2a-b743-1f8ee11c01ac\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.264949 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pq2q4\" (UniqueName: \"kubernetes.io/projected/07e11378-90b7-4994-99fa-6e2d78b61f63-kube-api-access-pq2q4\") pod \"glance-default-external-api-0\" (UID: \"07e11378-90b7-4994-99fa-6e2d78b61f63\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.265033 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/07e11378-90b7-4994-99fa-6e2d78b61f63-logs\") pod \"glance-default-external-api-0\" (UID: \"07e11378-90b7-4994-99fa-6e2d78b61f63\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.265247 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/3ec0f915-5676-4d2a-b743-1f8ee11c01ac-etc-machine-id\") pod \"cinder-api-0\" (UID: \"3ec0f915-5676-4d2a-b743-1f8ee11c01ac\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:50:31 crc kubenswrapper[4558]: E0120 17:50:31.265391 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-cinder-public-svc: secret "cert-cinder-public-svc" not found Jan 20 17:50:31 crc kubenswrapper[4558]: E0120 17:50:31.265455 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3ec0f915-5676-4d2a-b743-1f8ee11c01ac-public-tls-certs podName:3ec0f915-5676-4d2a-b743-1f8ee11c01ac nodeName:}" failed. No retries permitted until 2026-01-20 17:50:31.765434539 +0000 UTC m=+4125.525772506 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "public-tls-certs" (UniqueName: "kubernetes.io/secret/3ec0f915-5676-4d2a-b743-1f8ee11c01ac-public-tls-certs") pod "cinder-api-0" (UID: "3ec0f915-5676-4d2a-b743-1f8ee11c01ac") : secret "cert-cinder-public-svc" not found Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.265998 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/07e11378-90b7-4994-99fa-6e2d78b61f63-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"07e11378-90b7-4994-99fa-6e2d78b61f63\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.266043 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3ec0f915-5676-4d2a-b743-1f8ee11c01ac-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"3ec0f915-5676-4d2a-b743-1f8ee11c01ac\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.266222 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"07e11378-90b7-4994-99fa-6e2d78b61f63\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.266264 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3ec0f915-5676-4d2a-b743-1f8ee11c01ac-config-data-custom\") pod \"cinder-api-0\" (UID: \"3ec0f915-5676-4d2a-b743-1f8ee11c01ac\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.266039 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/07e11378-90b7-4994-99fa-6e2d78b61f63-logs\") pod \"glance-default-external-api-0\" (UID: \"07e11378-90b7-4994-99fa-6e2d78b61f63\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.267596 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"07e11378-90b7-4994-99fa-6e2d78b61f63\") device mount path \"/mnt/openstack/pv09\"" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:50:31 crc kubenswrapper[4558]: E0120 17:50:31.268250 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-glance-default-public-svc: secret "cert-glance-default-public-svc" not found Jan 20 17:50:31 crc kubenswrapper[4558]: E0120 17:50:31.268382 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/07e11378-90b7-4994-99fa-6e2d78b61f63-public-tls-certs podName:07e11378-90b7-4994-99fa-6e2d78b61f63 nodeName:}" failed. No retries permitted until 2026-01-20 17:50:31.768334022 +0000 UTC m=+4125.528671989 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "public-tls-certs" (UniqueName: "kubernetes.io/secret/07e11378-90b7-4994-99fa-6e2d78b61f63-public-tls-certs") pod "glance-default-external-api-0" (UID: "07e11378-90b7-4994-99fa-6e2d78b61f63") : secret "cert-glance-default-public-svc" not found Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.269214 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3ec0f915-5676-4d2a-b743-1f8ee11c01ac-logs\") pod \"cinder-api-0\" (UID: \"3ec0f915-5676-4d2a-b743-1f8ee11c01ac\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:50:31 crc kubenswrapper[4558]: E0120 17:50:31.269355 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-cinder-internal-svc: secret "cert-cinder-internal-svc" not found Jan 20 17:50:31 crc kubenswrapper[4558]: E0120 17:50:31.269393 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3ec0f915-5676-4d2a-b743-1f8ee11c01ac-internal-tls-certs podName:3ec0f915-5676-4d2a-b743-1f8ee11c01ac nodeName:}" failed. No retries permitted until 2026-01-20 17:50:31.769381871 +0000 UTC m=+4125.529719839 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "internal-tls-certs" (UniqueName: "kubernetes.io/secret/3ec0f915-5676-4d2a-b743-1f8ee11c01ac-internal-tls-certs") pod "cinder-api-0" (UID: "3ec0f915-5676-4d2a-b743-1f8ee11c01ac") : secret "cert-cinder-internal-svc" not found Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.270922 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3ec0f915-5676-4d2a-b743-1f8ee11c01ac-scripts\") pod \"cinder-api-0\" (UID: \"3ec0f915-5676-4d2a-b743-1f8ee11c01ac\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.272692 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/07e11378-90b7-4994-99fa-6e2d78b61f63-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"07e11378-90b7-4994-99fa-6e2d78b61f63\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.278292 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3ec0f915-5676-4d2a-b743-1f8ee11c01ac-config-data-custom\") pod \"cinder-api-0\" (UID: \"3ec0f915-5676-4d2a-b743-1f8ee11c01ac\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.278813 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3ec0f915-5676-4d2a-b743-1f8ee11c01ac-config-data\") pod \"cinder-api-0\" (UID: \"3ec0f915-5676-4d2a-b743-1f8ee11c01ac\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.280357 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/07e11378-90b7-4994-99fa-6e2d78b61f63-scripts\") pod \"glance-default-external-api-0\" (UID: \"07e11378-90b7-4994-99fa-6e2d78b61f63\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.280837 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-drjj7\" (UniqueName: \"kubernetes.io/projected/3ec0f915-5676-4d2a-b743-1f8ee11c01ac-kube-api-access-drjj7\") pod \"cinder-api-0\" (UID: \"3ec0f915-5676-4d2a-b743-1f8ee11c01ac\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.280850 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/07e11378-90b7-4994-99fa-6e2d78b61f63-config-data\") pod \"glance-default-external-api-0\" (UID: \"07e11378-90b7-4994-99fa-6e2d78b61f63\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.283579 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/07e11378-90b7-4994-99fa-6e2d78b61f63-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"07e11378-90b7-4994-99fa-6e2d78b61f63\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.284178 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pq2q4\" (UniqueName: \"kubernetes.io/projected/07e11378-90b7-4994-99fa-6e2d78b61f63-kube-api-access-pq2q4\") pod \"glance-default-external-api-0\" (UID: \"07e11378-90b7-4994-99fa-6e2d78b61f63\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.284432 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3ec0f915-5676-4d2a-b743-1f8ee11c01ac-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"3ec0f915-5676-4d2a-b743-1f8ee11c01ac\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.298145 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"07e11378-90b7-4994-99fa-6e2d78b61f63\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.312688 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.368459 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/51d19369-14ac-4d62-ab05-9c5830856622-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"51d19369-14ac-4d62-ab05-9c5830856622\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.368513 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.368538 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/80a7bd73-c63f-4a27-9d3d-3fef760c025f-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"80a7bd73-c63f-4a27-9d3d-3fef760c025f\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.368626 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/80a7bd73-c63f-4a27-9d3d-3fef760c025f-config\") pod \"ovn-northd-0\" (UID: \"80a7bd73-c63f-4a27-9d3d-3fef760c025f\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:50:31 crc kubenswrapper[4558]: E0120 17:50:31.368742 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-ovndbcluster-nb-ovndbs: secret "cert-ovndbcluster-nb-ovndbs" not found Jan 20 17:50:31 crc kubenswrapper[4558]: E0120 17:50:31.368795 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162-ovsdbserver-nb-tls-certs podName:bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162 nodeName:}" failed. No retries permitted until 2026-01-20 17:50:35.3687817 +0000 UTC m=+4129.129119658 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "ovsdbserver-nb-tls-certs" (UniqueName: "kubernetes.io/secret/bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162-ovsdbserver-nb-tls-certs") pod "ovsdbserver-nb-0" (UID: "bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162") : secret "cert-ovndbcluster-nb-ovndbs" not found Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.368900 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/51d19369-14ac-4d62-ab05-9c5830856622-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"51d19369-14ac-4d62-ab05-9c5830856622\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.368961 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rvzsg\" (UniqueName: \"kubernetes.io/projected/51d19369-14ac-4d62-ab05-9c5830856622-kube-api-access-rvzsg\") pod \"cinder-scheduler-0\" (UID: \"51d19369-14ac-4d62-ab05-9c5830856622\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.369006 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/80a7bd73-c63f-4a27-9d3d-3fef760c025f-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"80a7bd73-c63f-4a27-9d3d-3fef760c025f\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.369082 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/80a7bd73-c63f-4a27-9d3d-3fef760c025f-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"80a7bd73-c63f-4a27-9d3d-3fef760c025f\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.369453 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/80a7bd73-c63f-4a27-9d3d-3fef760c025f-scripts\") pod \"ovn-northd-0\" (UID: \"80a7bd73-c63f-4a27-9d3d-3fef760c025f\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.369518 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/80a7bd73-c63f-4a27-9d3d-3fef760c025f-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"80a7bd73-c63f-4a27-9d3d-3fef760c025f\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.369545 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.369568 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/51d19369-14ac-4d62-ab05-9c5830856622-config-data\") pod \"cinder-scheduler-0\" (UID: \"51d19369-14ac-4d62-ab05-9c5830856622\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.369595 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/51d19369-14ac-4d62-ab05-9c5830856622-scripts\") pod \"cinder-scheduler-0\" (UID: \"51d19369-14ac-4d62-ab05-9c5830856622\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.369666 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/51d19369-14ac-4d62-ab05-9c5830856622-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"51d19369-14ac-4d62-ab05-9c5830856622\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:50:31 crc kubenswrapper[4558]: E0120 17:50:31.369796 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-ovn-metrics: secret "cert-ovn-metrics" not found Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.369803 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-klmg7\" (UniqueName: \"kubernetes.io/projected/80a7bd73-c63f-4a27-9d3d-3fef760c025f-kube-api-access-klmg7\") pod \"ovn-northd-0\" (UID: \"80a7bd73-c63f-4a27-9d3d-3fef760c025f\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:50:31 crc kubenswrapper[4558]: E0120 17:50:31.369829 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162-metrics-certs-tls-certs podName:bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162 nodeName:}" failed. No retries permitted until 2026-01-20 17:50:35.369820605 +0000 UTC m=+4129.130158572 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs-tls-certs" (UniqueName: "kubernetes.io/secret/bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162-metrics-certs-tls-certs") pod "ovsdbserver-nb-0" (UID: "bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162") : secret "cert-ovn-metrics" not found Jan 20 17:50:31 crc kubenswrapper[4558]: E0120 17:50:31.369884 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-kube-state-metrics-svc: secret "cert-kube-state-metrics-svc" not found Jan 20 17:50:31 crc kubenswrapper[4558]: E0120 17:50:31.369907 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/49822b01-63d0-40d3-b604-7980891b0683-kube-state-metrics-tls-certs podName:49822b01-63d0-40d3-b604-7980891b0683 nodeName:}" failed. No retries permitted until 2026-01-20 17:50:33.369899032 +0000 UTC m=+4127.130236999 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-state-metrics-tls-certs" (UniqueName: "kubernetes.io/secret/49822b01-63d0-40d3-b604-7980891b0683-kube-state-metrics-tls-certs") pod "kube-state-metrics-0" (UID: "49822b01-63d0-40d3-b604-7980891b0683") : secret "cert-kube-state-metrics-svc" not found Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.433944 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.471298 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/80a7bd73-c63f-4a27-9d3d-3fef760c025f-scripts\") pod \"ovn-northd-0\" (UID: \"80a7bd73-c63f-4a27-9d3d-3fef760c025f\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.471360 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/80a7bd73-c63f-4a27-9d3d-3fef760c025f-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"80a7bd73-c63f-4a27-9d3d-3fef760c025f\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.471391 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/51d19369-14ac-4d62-ab05-9c5830856622-config-data\") pod \"cinder-scheduler-0\" (UID: \"51d19369-14ac-4d62-ab05-9c5830856622\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.471409 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/51d19369-14ac-4d62-ab05-9c5830856622-scripts\") pod \"cinder-scheduler-0\" (UID: \"51d19369-14ac-4d62-ab05-9c5830856622\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.471444 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/51d19369-14ac-4d62-ab05-9c5830856622-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"51d19369-14ac-4d62-ab05-9c5830856622\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.471501 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-klmg7\" (UniqueName: \"kubernetes.io/projected/80a7bd73-c63f-4a27-9d3d-3fef760c025f-kube-api-access-klmg7\") pod \"ovn-northd-0\" (UID: \"80a7bd73-c63f-4a27-9d3d-3fef760c025f\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.471529 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/51d19369-14ac-4d62-ab05-9c5830856622-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"51d19369-14ac-4d62-ab05-9c5830856622\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.471555 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/80a7bd73-c63f-4a27-9d3d-3fef760c025f-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"80a7bd73-c63f-4a27-9d3d-3fef760c025f\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.471580 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/80a7bd73-c63f-4a27-9d3d-3fef760c025f-config\") pod \"ovn-northd-0\" (UID: \"80a7bd73-c63f-4a27-9d3d-3fef760c025f\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.471625 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/51d19369-14ac-4d62-ab05-9c5830856622-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"51d19369-14ac-4d62-ab05-9c5830856622\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.471651 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rvzsg\" (UniqueName: \"kubernetes.io/projected/51d19369-14ac-4d62-ab05-9c5830856622-kube-api-access-rvzsg\") pod \"cinder-scheduler-0\" (UID: \"51d19369-14ac-4d62-ab05-9c5830856622\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.471666 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/80a7bd73-c63f-4a27-9d3d-3fef760c025f-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"80a7bd73-c63f-4a27-9d3d-3fef760c025f\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.471700 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/80a7bd73-c63f-4a27-9d3d-3fef760c025f-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"80a7bd73-c63f-4a27-9d3d-3fef760c025f\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:50:31 crc kubenswrapper[4558]: E0120 17:50:31.471885 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-ovn-metrics: secret "cert-ovn-metrics" not found Jan 20 17:50:31 crc kubenswrapper[4558]: E0120 17:50:31.471921 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/80a7bd73-c63f-4a27-9d3d-3fef760c025f-metrics-certs-tls-certs podName:80a7bd73-c63f-4a27-9d3d-3fef760c025f nodeName:}" failed. No retries permitted until 2026-01-20 17:50:31.971912136 +0000 UTC m=+4125.732250093 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs-tls-certs" (UniqueName: "kubernetes.io/secret/80a7bd73-c63f-4a27-9d3d-3fef760c025f-metrics-certs-tls-certs") pod "ovn-northd-0" (UID: "80a7bd73-c63f-4a27-9d3d-3fef760c025f") : secret "cert-ovn-metrics" not found Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.475750 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/80a7bd73-c63f-4a27-9d3d-3fef760c025f-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"80a7bd73-c63f-4a27-9d3d-3fef760c025f\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.477780 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/80a7bd73-c63f-4a27-9d3d-3fef760c025f-scripts\") pod \"ovn-northd-0\" (UID: \"80a7bd73-c63f-4a27-9d3d-3fef760c025f\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.477930 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/80a7bd73-c63f-4a27-9d3d-3fef760c025f-config\") pod \"ovn-northd-0\" (UID: \"80a7bd73-c63f-4a27-9d3d-3fef760c025f\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:50:31 crc kubenswrapper[4558]: E0120 17:50:31.478026 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-ovnnorthd-ovndbs: secret "cert-ovnnorthd-ovndbs" not found Jan 20 17:50:31 crc kubenswrapper[4558]: E0120 17:50:31.478058 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/80a7bd73-c63f-4a27-9d3d-3fef760c025f-ovn-northd-tls-certs podName:80a7bd73-c63f-4a27-9d3d-3fef760c025f nodeName:}" failed. No retries permitted until 2026-01-20 17:50:31.978047873 +0000 UTC m=+4125.738385840 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "ovn-northd-tls-certs" (UniqueName: "kubernetes.io/secret/80a7bd73-c63f-4a27-9d3d-3fef760c025f-ovn-northd-tls-certs") pod "ovn-northd-0" (UID: "80a7bd73-c63f-4a27-9d3d-3fef760c025f") : secret "cert-ovnnorthd-ovndbs" not found Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.478288 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/51d19369-14ac-4d62-ab05-9c5830856622-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"51d19369-14ac-4d62-ab05-9c5830856622\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.479542 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/51d19369-14ac-4d62-ab05-9c5830856622-config-data\") pod \"cinder-scheduler-0\" (UID: \"51d19369-14ac-4d62-ab05-9c5830856622\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.485611 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/51d19369-14ac-4d62-ab05-9c5830856622-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"51d19369-14ac-4d62-ab05-9c5830856622\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.485704 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/51d19369-14ac-4d62-ab05-9c5830856622-scripts\") pod \"cinder-scheduler-0\" (UID: \"51d19369-14ac-4d62-ab05-9c5830856622\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.485922 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/80a7bd73-c63f-4a27-9d3d-3fef760c025f-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"80a7bd73-c63f-4a27-9d3d-3fef760c025f\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.497644 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/51d19369-14ac-4d62-ab05-9c5830856622-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"51d19369-14ac-4d62-ab05-9c5830856622\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.502066 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rvzsg\" (UniqueName: \"kubernetes.io/projected/51d19369-14ac-4d62-ab05-9c5830856622-kube-api-access-rvzsg\") pod \"cinder-scheduler-0\" (UID: \"51d19369-14ac-4d62-ab05-9c5830856622\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.503109 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-klmg7\" (UniqueName: \"kubernetes.io/projected/80a7bd73-c63f-4a27-9d3d-3fef760c025f-kube-api-access-klmg7\") pod \"ovn-northd-0\" (UID: \"80a7bd73-c63f-4a27-9d3d-3fef760c025f\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:50:31 crc kubenswrapper[4558]: W0120 17:50:31.519459 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podacb2f7f3_f6d0_4b4f_936f_baee3dfa3938.slice/crio-58484e90e24ffc4180dd8312c466efe73ea03a93aeb8a64c6d07d2f682b11f04 WatchSource:0}: Error finding container 58484e90e24ffc4180dd8312c466efe73ea03a93aeb8a64c6d07d2f682b11f04: Status 404 returned error can't find the container with id 58484e90e24ffc4180dd8312c466efe73ea03a93aeb8a64c6d07d2f682b11f04 Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.521604 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.529484 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"28febf86-c729-498e-b3af-0321468d80f3","Type":"ContainerDied","Data":"4ab94929b6db42289fce351b21eb7a8fab35f0e4c9c164866cbbbd10ba889df6"} Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.529539 4558 scope.go:117] "RemoveContainer" containerID="d57bd37afc98044774c1f3fa1bd0ea83a8cca9c7814e87365a229f5ac1765f05" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.529675 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.539466 4558 generic.go:334] "Generic (PLEG): container finished" podID="b13b98ea-20eb-452f-9316-a7acdeb18406" containerID="bccb9b380d5d7d133e1f9deb550275ef3efade0d9df8c3d33db8b87c1a6fd80b" exitCode=0 Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.539764 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"b13b98ea-20eb-452f-9316-a7acdeb18406","Type":"ContainerDied","Data":"bccb9b380d5d7d133e1f9deb550275ef3efade0d9df8c3d33db8b87c1a6fd80b"} Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.539788 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"b13b98ea-20eb-452f-9316-a7acdeb18406","Type":"ContainerDied","Data":"b20692e4087358e9fee4d19b58456d0f8695163aa66850f3c0633fd5582f37b8"} Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.539839 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.554823 4558 scope.go:117] "RemoveContainer" containerID="8e218874514f86011edef030a16090a4d678fd4bb3ef20d8a141af1af291d2c9" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.562675 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.567263 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.572423 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fjmng\" (UniqueName: \"kubernetes.io/projected/b13b98ea-20eb-452f-9316-a7acdeb18406-kube-api-access-fjmng\") pod \"b13b98ea-20eb-452f-9316-a7acdeb18406\" (UID: \"b13b98ea-20eb-452f-9316-a7acdeb18406\") " Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.572563 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b13b98ea-20eb-452f-9316-a7acdeb18406-config-data\") pod \"b13b98ea-20eb-452f-9316-a7acdeb18406\" (UID: \"b13b98ea-20eb-452f-9316-a7acdeb18406\") " Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.572863 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b13b98ea-20eb-452f-9316-a7acdeb18406-combined-ca-bundle\") pod \"b13b98ea-20eb-452f-9316-a7acdeb18406\" (UID: \"b13b98ea-20eb-452f-9316-a7acdeb18406\") " Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.573338 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/18c09849-702b-4939-9cf1-0e06c6adc889-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"18c09849-702b-4939-9cf1-0e06c6adc889\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:50:31 crc kubenswrapper[4558]: E0120 17:50:31.573712 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-glance-default-internal-svc: secret "cert-glance-default-internal-svc" not found Jan 20 17:50:31 crc kubenswrapper[4558]: E0120 17:50:31.573764 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/18c09849-702b-4939-9cf1-0e06c6adc889-internal-tls-certs podName:18c09849-702b-4939-9cf1-0e06c6adc889 nodeName:}" failed. No retries permitted until 2026-01-20 17:50:32.573748827 +0000 UTC m=+4126.334086794 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "internal-tls-certs" (UniqueName: "kubernetes.io/secret/18c09849-702b-4939-9cf1-0e06c6adc889-internal-tls-certs") pod "glance-default-internal-api-0" (UID: "18c09849-702b-4939-9cf1-0e06c6adc889") : secret "cert-glance-default-internal-svc" not found Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.580154 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.580216 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b13b98ea-20eb-452f-9316-a7acdeb18406-kube-api-access-fjmng" (OuterVolumeSpecName: "kube-api-access-fjmng") pod "b13b98ea-20eb-452f-9316-a7acdeb18406" (UID: "b13b98ea-20eb-452f-9316-a7acdeb18406"). InnerVolumeSpecName "kube-api-access-fjmng". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.590582 4558 scope.go:117] "RemoveContainer" containerID="bccb9b380d5d7d133e1f9deb550275ef3efade0d9df8c3d33db8b87c1a6fd80b" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.598035 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:50:31 crc kubenswrapper[4558]: E0120 17:50:31.598618 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b13b98ea-20eb-452f-9316-a7acdeb18406" containerName="nova-scheduler-scheduler" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.598663 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="b13b98ea-20eb-452f-9316-a7acdeb18406" containerName="nova-scheduler-scheduler" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.599051 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="b13b98ea-20eb-452f-9316-a7acdeb18406" containerName="nova-scheduler-scheduler" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.600318 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.600752 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b13b98ea-20eb-452f-9316-a7acdeb18406-config-data" (OuterVolumeSpecName: "config-data") pod "b13b98ea-20eb-452f-9316-a7acdeb18406" (UID: "b13b98ea-20eb-452f-9316-a7acdeb18406"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.603841 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-public-svc" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.603981 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-api-config-data" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.604016 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-internal-svc" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.608472 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b13b98ea-20eb-452f-9316-a7acdeb18406-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b13b98ea-20eb-452f-9316-a7acdeb18406" (UID: "b13b98ea-20eb-452f-9316-a7acdeb18406"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.608993 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.630433 4558 scope.go:117] "RemoveContainer" containerID="bccb9b380d5d7d133e1f9deb550275ef3efade0d9df8c3d33db8b87c1a6fd80b" Jan 20 17:50:31 crc kubenswrapper[4558]: E0120 17:50:31.630909 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bccb9b380d5d7d133e1f9deb550275ef3efade0d9df8c3d33db8b87c1a6fd80b\": container with ID starting with bccb9b380d5d7d133e1f9deb550275ef3efade0d9df8c3d33db8b87c1a6fd80b not found: ID does not exist" containerID="bccb9b380d5d7d133e1f9deb550275ef3efade0d9df8c3d33db8b87c1a6fd80b" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.630976 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bccb9b380d5d7d133e1f9deb550275ef3efade0d9df8c3d33db8b87c1a6fd80b"} err="failed to get container status \"bccb9b380d5d7d133e1f9deb550275ef3efade0d9df8c3d33db8b87c1a6fd80b\": rpc error: code = NotFound desc = could not find container \"bccb9b380d5d7d133e1f9deb550275ef3efade0d9df8c3d33db8b87c1a6fd80b\": container with ID starting with bccb9b380d5d7d133e1f9deb550275ef3efade0d9df8c3d33db8b87c1a6fd80b not found: ID does not exist" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.677038 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b13b98ea-20eb-452f-9316-a7acdeb18406-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.677076 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b13b98ea-20eb-452f-9316-a7acdeb18406-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.677089 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fjmng\" (UniqueName: \"kubernetes.io/projected/b13b98ea-20eb-452f-9316-a7acdeb18406-kube-api-access-fjmng\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.778775 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/733a1ae1-a917-447a-afa6-67cf7d688f86-internal-tls-certs\") pod \"nova-api-0\" (UID: \"733a1ae1-a917-447a-afa6-67cf7d688f86\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.778900 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/07e11378-90b7-4994-99fa-6e2d78b61f63-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"07e11378-90b7-4994-99fa-6e2d78b61f63\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.778920 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5m8j8\" (UniqueName: \"kubernetes.io/projected/733a1ae1-a917-447a-afa6-67cf7d688f86-kube-api-access-5m8j8\") pod \"nova-api-0\" (UID: \"733a1ae1-a917-447a-afa6-67cf7d688f86\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.779021 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3ec0f915-5676-4d2a-b743-1f8ee11c01ac-public-tls-certs\") pod \"cinder-api-0\" (UID: \"3ec0f915-5676-4d2a-b743-1f8ee11c01ac\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:50:31 crc kubenswrapper[4558]: E0120 17:50:31.779145 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-cinder-public-svc: secret "cert-cinder-public-svc" not found Jan 20 17:50:31 crc kubenswrapper[4558]: E0120 17:50:31.779295 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3ec0f915-5676-4d2a-b743-1f8ee11c01ac-public-tls-certs podName:3ec0f915-5676-4d2a-b743-1f8ee11c01ac nodeName:}" failed. No retries permitted until 2026-01-20 17:50:32.779278339 +0000 UTC m=+4126.539616307 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "public-tls-certs" (UniqueName: "kubernetes.io/secret/3ec0f915-5676-4d2a-b743-1f8ee11c01ac-public-tls-certs") pod "cinder-api-0" (UID: "3ec0f915-5676-4d2a-b743-1f8ee11c01ac") : secret "cert-cinder-public-svc" not found Jan 20 17:50:31 crc kubenswrapper[4558]: E0120 17:50:31.779199 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-glance-default-public-svc: secret "cert-glance-default-public-svc" not found Jan 20 17:50:31 crc kubenswrapper[4558]: E0120 17:50:31.779547 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/07e11378-90b7-4994-99fa-6e2d78b61f63-public-tls-certs podName:07e11378-90b7-4994-99fa-6e2d78b61f63 nodeName:}" failed. No retries permitted until 2026-01-20 17:50:32.779518671 +0000 UTC m=+4126.539856638 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "public-tls-certs" (UniqueName: "kubernetes.io/secret/07e11378-90b7-4994-99fa-6e2d78b61f63-public-tls-certs") pod "glance-default-external-api-0" (UID: "07e11378-90b7-4994-99fa-6e2d78b61f63") : secret "cert-glance-default-public-svc" not found Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.779621 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/733a1ae1-a917-447a-afa6-67cf7d688f86-logs\") pod \"nova-api-0\" (UID: \"733a1ae1-a917-447a-afa6-67cf7d688f86\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.779650 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/733a1ae1-a917-447a-afa6-67cf7d688f86-config-data\") pod \"nova-api-0\" (UID: \"733a1ae1-a917-447a-afa6-67cf7d688f86\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.779943 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/733a1ae1-a917-447a-afa6-67cf7d688f86-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"733a1ae1-a917-447a-afa6-67cf7d688f86\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.780321 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3ec0f915-5676-4d2a-b743-1f8ee11c01ac-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"3ec0f915-5676-4d2a-b743-1f8ee11c01ac\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.780390 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/733a1ae1-a917-447a-afa6-67cf7d688f86-public-tls-certs\") pod \"nova-api-0\" (UID: \"733a1ae1-a917-447a-afa6-67cf7d688f86\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:50:31 crc kubenswrapper[4558]: E0120 17:50:31.780489 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-cinder-internal-svc: secret "cert-cinder-internal-svc" not found Jan 20 17:50:31 crc kubenswrapper[4558]: E0120 17:50:31.780522 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3ec0f915-5676-4d2a-b743-1f8ee11c01ac-internal-tls-certs podName:3ec0f915-5676-4d2a-b743-1f8ee11c01ac nodeName:}" failed. No retries permitted until 2026-01-20 17:50:32.780514223 +0000 UTC m=+4126.540852190 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "internal-tls-certs" (UniqueName: "kubernetes.io/secret/3ec0f915-5676-4d2a-b743-1f8ee11c01ac-internal-tls-certs") pod "cinder-api-0" (UID: "3ec0f915-5676-4d2a-b743-1f8ee11c01ac") : secret "cert-cinder-internal-svc" not found Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.836422 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:50:31 crc kubenswrapper[4558]: W0120 17:50:31.880078 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod51d19369_14ac_4d62_ab05_9c5830856622.slice/crio-c9ec0af9db1de5295d13166fc1e3cb91462f2ab0061206e844f1c3db14529e3e WatchSource:0}: Error finding container c9ec0af9db1de5295d13166fc1e3cb91462f2ab0061206e844f1c3db14529e3e: Status 404 returned error can't find the container with id c9ec0af9db1de5295d13166fc1e3cb91462f2ab0061206e844f1c3db14529e3e Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.884190 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/733a1ae1-a917-447a-afa6-67cf7d688f86-logs\") pod \"nova-api-0\" (UID: \"733a1ae1-a917-447a-afa6-67cf7d688f86\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.884257 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/733a1ae1-a917-447a-afa6-67cf7d688f86-config-data\") pod \"nova-api-0\" (UID: \"733a1ae1-a917-447a-afa6-67cf7d688f86\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.884394 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/733a1ae1-a917-447a-afa6-67cf7d688f86-logs\") pod \"nova-api-0\" (UID: \"733a1ae1-a917-447a-afa6-67cf7d688f86\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.884692 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/733a1ae1-a917-447a-afa6-67cf7d688f86-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"733a1ae1-a917-447a-afa6-67cf7d688f86\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.885058 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/733a1ae1-a917-447a-afa6-67cf7d688f86-public-tls-certs\") pod \"nova-api-0\" (UID: \"733a1ae1-a917-447a-afa6-67cf7d688f86\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.885095 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/733a1ae1-a917-447a-afa6-67cf7d688f86-internal-tls-certs\") pod \"nova-api-0\" (UID: \"733a1ae1-a917-447a-afa6-67cf7d688f86\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.885152 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5m8j8\" (UniqueName: \"kubernetes.io/projected/733a1ae1-a917-447a-afa6-67cf7d688f86-kube-api-access-5m8j8\") pod \"nova-api-0\" (UID: \"733a1ae1-a917-447a-afa6-67cf7d688f86\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:50:31 crc kubenswrapper[4558]: E0120 17:50:31.886422 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-nova-public-svc: secret "cert-nova-public-svc" not found Jan 20 17:50:31 crc kubenswrapper[4558]: E0120 17:50:31.886509 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/733a1ae1-a917-447a-afa6-67cf7d688f86-public-tls-certs podName:733a1ae1-a917-447a-afa6-67cf7d688f86 nodeName:}" failed. No retries permitted until 2026-01-20 17:50:32.386486491 +0000 UTC m=+4126.146824458 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "public-tls-certs" (UniqueName: "kubernetes.io/secret/733a1ae1-a917-447a-afa6-67cf7d688f86-public-tls-certs") pod "nova-api-0" (UID: "733a1ae1-a917-447a-afa6-67cf7d688f86") : secret "cert-nova-public-svc" not found Jan 20 17:50:31 crc kubenswrapper[4558]: E0120 17:50:31.886560 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-nova-internal-svc: secret "cert-nova-internal-svc" not found Jan 20 17:50:31 crc kubenswrapper[4558]: E0120 17:50:31.886587 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/733a1ae1-a917-447a-afa6-67cf7d688f86-internal-tls-certs podName:733a1ae1-a917-447a-afa6-67cf7d688f86 nodeName:}" failed. No retries permitted until 2026-01-20 17:50:32.386579706 +0000 UTC m=+4126.146917674 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "internal-tls-certs" (UniqueName: "kubernetes.io/secret/733a1ae1-a917-447a-afa6-67cf7d688f86-internal-tls-certs") pod "nova-api-0" (UID: "733a1ae1-a917-447a-afa6-67cf7d688f86") : secret "cert-nova-internal-svc" not found Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.904710 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/733a1ae1-a917-447a-afa6-67cf7d688f86-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"733a1ae1-a917-447a-afa6-67cf7d688f86\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.906503 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/733a1ae1-a917-447a-afa6-67cf7d688f86-config-data\") pod \"nova-api-0\" (UID: \"733a1ae1-a917-447a-afa6-67cf7d688f86\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.907293 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5m8j8\" (UniqueName: \"kubernetes.io/projected/733a1ae1-a917-447a-afa6-67cf7d688f86-kube-api-access-5m8j8\") pod \"nova-api-0\" (UID: \"733a1ae1-a917-447a-afa6-67cf7d688f86\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.988385 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/80a7bd73-c63f-4a27-9d3d-3fef760c025f-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"80a7bd73-c63f-4a27-9d3d-3fef760c025f\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:50:31 crc kubenswrapper[4558]: I0120 17:50:31.988466 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/80a7bd73-c63f-4a27-9d3d-3fef760c025f-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"80a7bd73-c63f-4a27-9d3d-3fef760c025f\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:50:31 crc kubenswrapper[4558]: E0120 17:50:31.990919 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-ovn-metrics: secret "cert-ovn-metrics" not found Jan 20 17:50:31 crc kubenswrapper[4558]: E0120 17:50:31.990999 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/80a7bd73-c63f-4a27-9d3d-3fef760c025f-metrics-certs-tls-certs podName:80a7bd73-c63f-4a27-9d3d-3fef760c025f nodeName:}" failed. No retries permitted until 2026-01-20 17:50:32.990971162 +0000 UTC m=+4126.751309129 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs-tls-certs" (UniqueName: "kubernetes.io/secret/80a7bd73-c63f-4a27-9d3d-3fef760c025f-metrics-certs-tls-certs") pod "ovn-northd-0" (UID: "80a7bd73-c63f-4a27-9d3d-3fef760c025f") : secret "cert-ovn-metrics" not found Jan 20 17:50:31 crc kubenswrapper[4558]: E0120 17:50:31.991571 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-ovnnorthd-ovndbs: secret "cert-ovnnorthd-ovndbs" not found Jan 20 17:50:31 crc kubenswrapper[4558]: E0120 17:50:31.991662 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/80a7bd73-c63f-4a27-9d3d-3fef760c025f-ovn-northd-tls-certs podName:80a7bd73-c63f-4a27-9d3d-3fef760c025f nodeName:}" failed. No retries permitted until 2026-01-20 17:50:32.991638917 +0000 UTC m=+4126.751976884 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "ovn-northd-tls-certs" (UniqueName: "kubernetes.io/secret/80a7bd73-c63f-4a27-9d3d-3fef760c025f-ovn-northd-tls-certs") pod "ovn-northd-0" (UID: "80a7bd73-c63f-4a27-9d3d-3fef760c025f") : secret "cert-ovnnorthd-ovndbs" not found Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.059731 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.080217 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.087341 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.088653 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.092609 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-scheduler-config-data" Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.095051 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.168540 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.177146 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" podUID="6254cefa-8c4a-472e-9faf-4633c6d1618e" containerName="nova-cell1-novncproxy-novncproxy" probeResult="failure" output="Get \"https://10.217.1.161:6080/vnc_lite.html\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.194417 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xzv9p\" (UniqueName: \"kubernetes.io/projected/d9321ea5-ab3b-4f00-b4d0-f2c2f8c2deb2-kube-api-access-xzv9p\") pod \"nova-scheduler-0\" (UID: \"d9321ea5-ab3b-4f00-b4d0-f2c2f8c2deb2\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.194625 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d9321ea5-ab3b-4f00-b4d0-f2c2f8c2deb2-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"d9321ea5-ab3b-4f00-b4d0-f2c2f8c2deb2\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.194875 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d9321ea5-ab3b-4f00-b4d0-f2c2f8c2deb2-config-data\") pod \"nova-scheduler-0\" (UID: \"d9321ea5-ab3b-4f00-b4d0-f2c2f8c2deb2\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.295955 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a3e03093-9a03-4aac-bcde-475dda6c3dcc-config-data\") pod \"a3e03093-9a03-4aac-bcde-475dda6c3dcc\" (UID: \"a3e03093-9a03-4aac-bcde-475dda6c3dcc\") " Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.296009 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r7zcm\" (UniqueName: \"kubernetes.io/projected/a3e03093-9a03-4aac-bcde-475dda6c3dcc-kube-api-access-r7zcm\") pod \"a3e03093-9a03-4aac-bcde-475dda6c3dcc\" (UID: \"a3e03093-9a03-4aac-bcde-475dda6c3dcc\") " Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.296081 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a3e03093-9a03-4aac-bcde-475dda6c3dcc-sg-core-conf-yaml\") pod \"a3e03093-9a03-4aac-bcde-475dda6c3dcc\" (UID: \"a3e03093-9a03-4aac-bcde-475dda6c3dcc\") " Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.296230 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a3e03093-9a03-4aac-bcde-475dda6c3dcc-scripts\") pod \"a3e03093-9a03-4aac-bcde-475dda6c3dcc\" (UID: \"a3e03093-9a03-4aac-bcde-475dda6c3dcc\") " Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.296271 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/a3e03093-9a03-4aac-bcde-475dda6c3dcc-ceilometer-tls-certs\") pod \"a3e03093-9a03-4aac-bcde-475dda6c3dcc\" (UID: \"a3e03093-9a03-4aac-bcde-475dda6c3dcc\") " Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.296308 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a3e03093-9a03-4aac-bcde-475dda6c3dcc-run-httpd\") pod \"a3e03093-9a03-4aac-bcde-475dda6c3dcc\" (UID: \"a3e03093-9a03-4aac-bcde-475dda6c3dcc\") " Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.296336 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a3e03093-9a03-4aac-bcde-475dda6c3dcc-combined-ca-bundle\") pod \"a3e03093-9a03-4aac-bcde-475dda6c3dcc\" (UID: \"a3e03093-9a03-4aac-bcde-475dda6c3dcc\") " Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.296364 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a3e03093-9a03-4aac-bcde-475dda6c3dcc-log-httpd\") pod \"a3e03093-9a03-4aac-bcde-475dda6c3dcc\" (UID: \"a3e03093-9a03-4aac-bcde-475dda6c3dcc\") " Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.296776 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d9321ea5-ab3b-4f00-b4d0-f2c2f8c2deb2-config-data\") pod \"nova-scheduler-0\" (UID: \"d9321ea5-ab3b-4f00-b4d0-f2c2f8c2deb2\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.296869 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/603501da-057d-4d3b-a682-e55e7e2916f4-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"603501da-057d-4d3b-a682-e55e7e2916f4\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.296953 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xzv9p\" (UniqueName: \"kubernetes.io/projected/d9321ea5-ab3b-4f00-b4d0-f2c2f8c2deb2-kube-api-access-xzv9p\") pod \"nova-scheduler-0\" (UID: \"d9321ea5-ab3b-4f00-b4d0-f2c2f8c2deb2\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.297053 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d9321ea5-ab3b-4f00-b4d0-f2c2f8c2deb2-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"d9321ea5-ab3b-4f00-b4d0-f2c2f8c2deb2\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.298192 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a3e03093-9a03-4aac-bcde-475dda6c3dcc-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "a3e03093-9a03-4aac-bcde-475dda6c3dcc" (UID: "a3e03093-9a03-4aac-bcde-475dda6c3dcc"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:50:32 crc kubenswrapper[4558]: E0120 17:50:32.298396 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-galera-openstack-svc: secret "cert-galera-openstack-svc" not found Jan 20 17:50:32 crc kubenswrapper[4558]: E0120 17:50:32.298463 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/603501da-057d-4d3b-a682-e55e7e2916f4-galera-tls-certs podName:603501da-057d-4d3b-a682-e55e7e2916f4 nodeName:}" failed. No retries permitted until 2026-01-20 17:50:36.298441812 +0000 UTC m=+4130.058779780 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "galera-tls-certs" (UniqueName: "kubernetes.io/secret/603501da-057d-4d3b-a682-e55e7e2916f4-galera-tls-certs") pod "openstack-galera-0" (UID: "603501da-057d-4d3b-a682-e55e7e2916f4") : secret "cert-galera-openstack-svc" not found Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.298764 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a3e03093-9a03-4aac-bcde-475dda6c3dcc-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "a3e03093-9a03-4aac-bcde-475dda6c3dcc" (UID: "a3e03093-9a03-4aac-bcde-475dda6c3dcc"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.303529 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a3e03093-9a03-4aac-bcde-475dda6c3dcc-kube-api-access-r7zcm" (OuterVolumeSpecName: "kube-api-access-r7zcm") pod "a3e03093-9a03-4aac-bcde-475dda6c3dcc" (UID: "a3e03093-9a03-4aac-bcde-475dda6c3dcc"). InnerVolumeSpecName "kube-api-access-r7zcm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.304301 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a3e03093-9a03-4aac-bcde-475dda6c3dcc-scripts" (OuterVolumeSpecName: "scripts") pod "a3e03093-9a03-4aac-bcde-475dda6c3dcc" (UID: "a3e03093-9a03-4aac-bcde-475dda6c3dcc"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.304679 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d9321ea5-ab3b-4f00-b4d0-f2c2f8c2deb2-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"d9321ea5-ab3b-4f00-b4d0-f2c2f8c2deb2\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.309521 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d9321ea5-ab3b-4f00-b4d0-f2c2f8c2deb2-config-data\") pod \"nova-scheduler-0\" (UID: \"d9321ea5-ab3b-4f00-b4d0-f2c2f8c2deb2\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.311532 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xzv9p\" (UniqueName: \"kubernetes.io/projected/d9321ea5-ab3b-4f00-b4d0-f2c2f8c2deb2-kube-api-access-xzv9p\") pod \"nova-scheduler-0\" (UID: \"d9321ea5-ab3b-4f00-b4d0-f2c2f8c2deb2\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.317954 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a3e03093-9a03-4aac-bcde-475dda6c3dcc-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "a3e03093-9a03-4aac-bcde-475dda6c3dcc" (UID: "a3e03093-9a03-4aac-bcde-475dda6c3dcc"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.336338 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a3e03093-9a03-4aac-bcde-475dda6c3dcc-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "a3e03093-9a03-4aac-bcde-475dda6c3dcc" (UID: "a3e03093-9a03-4aac-bcde-475dda6c3dcc"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.354083 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a3e03093-9a03-4aac-bcde-475dda6c3dcc-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a3e03093-9a03-4aac-bcde-475dda6c3dcc" (UID: "a3e03093-9a03-4aac-bcde-475dda6c3dcc"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.367265 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a3e03093-9a03-4aac-bcde-475dda6c3dcc-config-data" (OuterVolumeSpecName: "config-data") pod "a3e03093-9a03-4aac-bcde-475dda6c3dcc" (UID: "a3e03093-9a03-4aac-bcde-475dda6c3dcc"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.399461 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/fc60cc21-f159-40bf-beca-c62718f57b9c-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"fc60cc21-f159-40bf-beca-c62718f57b9c\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.399602 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/733a1ae1-a917-447a-afa6-67cf7d688f86-public-tls-certs\") pod \"nova-api-0\" (UID: \"733a1ae1-a917-447a-afa6-67cf7d688f86\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.399634 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/733a1ae1-a917-447a-afa6-67cf7d688f86-internal-tls-certs\") pod \"nova-api-0\" (UID: \"733a1ae1-a917-447a-afa6-67cf7d688f86\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:50:32 crc kubenswrapper[4558]: E0120 17:50:32.399688 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-ovndbcluster-sb-ovndbs: secret "cert-ovndbcluster-sb-ovndbs" not found Jan 20 17:50:32 crc kubenswrapper[4558]: E0120 17:50:32.399757 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/fc60cc21-f159-40bf-beca-c62718f57b9c-ovsdbserver-sb-tls-certs podName:fc60cc21-f159-40bf-beca-c62718f57b9c nodeName:}" failed. No retries permitted until 2026-01-20 17:50:36.399737106 +0000 UTC m=+4130.160075073 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "ovsdbserver-sb-tls-certs" (UniqueName: "kubernetes.io/secret/fc60cc21-f159-40bf-beca-c62718f57b9c-ovsdbserver-sb-tls-certs") pod "ovsdbserver-sb-0" (UID: "fc60cc21-f159-40bf-beca-c62718f57b9c") : secret "cert-ovndbcluster-sb-ovndbs" not found Jan 20 17:50:32 crc kubenswrapper[4558]: E0120 17:50:32.399802 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-nova-internal-svc: secret "cert-nova-internal-svc" not found Jan 20 17:50:32 crc kubenswrapper[4558]: E0120 17:50:32.399834 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/733a1ae1-a917-447a-afa6-67cf7d688f86-internal-tls-certs podName:733a1ae1-a917-447a-afa6-67cf7d688f86 nodeName:}" failed. No retries permitted until 2026-01-20 17:50:33.399823478 +0000 UTC m=+4127.160161445 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "internal-tls-certs" (UniqueName: "kubernetes.io/secret/733a1ae1-a917-447a-afa6-67cf7d688f86-internal-tls-certs") pod "nova-api-0" (UID: "733a1ae1-a917-447a-afa6-67cf7d688f86") : secret "cert-nova-internal-svc" not found Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.399910 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/5239f3e2-7f54-4042-b0da-02f7608224df-memcached-tls-certs\") pod \"memcached-0\" (UID: \"5239f3e2-7f54-4042-b0da-02f7608224df\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.400003 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/fc60cc21-f159-40bf-beca-c62718f57b9c-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"fc60cc21-f159-40bf-beca-c62718f57b9c\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.401205 4558 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/a3e03093-9a03-4aac-bcde-475dda6c3dcc-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:32 crc kubenswrapper[4558]: E0120 17:50:32.401309 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-memcached-svc: secret "cert-memcached-svc" not found Jan 20 17:50:32 crc kubenswrapper[4558]: E0120 17:50:32.401367 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5239f3e2-7f54-4042-b0da-02f7608224df-memcached-tls-certs podName:5239f3e2-7f54-4042-b0da-02f7608224df nodeName:}" failed. No retries permitted until 2026-01-20 17:50:36.401350599 +0000 UTC m=+4130.161688566 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "memcached-tls-certs" (UniqueName: "kubernetes.io/secret/5239f3e2-7f54-4042-b0da-02f7608224df-memcached-tls-certs") pod "memcached-0" (UID: "5239f3e2-7f54-4042-b0da-02f7608224df") : secret "cert-memcached-svc" not found Jan 20 17:50:32 crc kubenswrapper[4558]: E0120 17:50:32.401412 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-ovn-metrics: secret "cert-ovn-metrics" not found Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.401431 4558 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a3e03093-9a03-4aac-bcde-475dda6c3dcc-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:32 crc kubenswrapper[4558]: E0120 17:50:32.401457 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/fc60cc21-f159-40bf-beca-c62718f57b9c-metrics-certs-tls-certs podName:fc60cc21-f159-40bf-beca-c62718f57b9c nodeName:}" failed. No retries permitted until 2026-01-20 17:50:36.401451159 +0000 UTC m=+4130.161789125 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs-tls-certs" (UniqueName: "kubernetes.io/secret/fc60cc21-f159-40bf-beca-c62718f57b9c-metrics-certs-tls-certs") pod "ovsdbserver-sb-0" (UID: "fc60cc21-f159-40bf-beca-c62718f57b9c") : secret "cert-ovn-metrics" not found Jan 20 17:50:32 crc kubenswrapper[4558]: E0120 17:50:32.401491 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-nova-public-svc: secret "cert-nova-public-svc" not found Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.401501 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a3e03093-9a03-4aac-bcde-475dda6c3dcc-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:32 crc kubenswrapper[4558]: E0120 17:50:32.401522 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/733a1ae1-a917-447a-afa6-67cf7d688f86-public-tls-certs podName:733a1ae1-a917-447a-afa6-67cf7d688f86 nodeName:}" failed. No retries permitted until 2026-01-20 17:50:33.401516711 +0000 UTC m=+4127.161854679 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "public-tls-certs" (UniqueName: "kubernetes.io/secret/733a1ae1-a917-447a-afa6-67cf7d688f86-public-tls-certs") pod "nova-api-0" (UID: "733a1ae1-a917-447a-afa6-67cf7d688f86") : secret "cert-nova-public-svc" not found Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.401532 4558 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a3e03093-9a03-4aac-bcde-475dda6c3dcc-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.401542 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a3e03093-9a03-4aac-bcde-475dda6c3dcc-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.401554 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r7zcm\" (UniqueName: \"kubernetes.io/projected/a3e03093-9a03-4aac-bcde-475dda6c3dcc-kube-api-access-r7zcm\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.401568 4558 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a3e03093-9a03-4aac-bcde-475dda6c3dcc-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.401581 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a3e03093-9a03-4aac-bcde-475dda6c3dcc-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.461872 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.503836 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/581c2419-48d9-4b7d-b71b-ce5cceb0326d-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"581c2419-48d9-4b7d-b71b-ce5cceb0326d\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:50:32 crc kubenswrapper[4558]: E0120 17:50:32.504014 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-nova-novncproxy-cell1-vencrypt: secret "cert-nova-novncproxy-cell1-vencrypt" not found Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.504036 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/581c2419-48d9-4b7d-b71b-ce5cceb0326d-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"581c2419-48d9-4b7d-b71b-ce5cceb0326d\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:50:32 crc kubenswrapper[4558]: E0120 17:50:32.504155 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-nova-novncproxy-cell1-public-svc: secret "cert-nova-novncproxy-cell1-public-svc" not found Jan 20 17:50:32 crc kubenswrapper[4558]: E0120 17:50:32.504206 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/581c2419-48d9-4b7d-b71b-ce5cceb0326d-vencrypt-tls-certs podName:581c2419-48d9-4b7d-b71b-ce5cceb0326d nodeName:}" failed. No retries permitted until 2026-01-20 17:50:36.504179617 +0000 UTC m=+4130.264517584 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "vencrypt-tls-certs" (UniqueName: "kubernetes.io/secret/581c2419-48d9-4b7d-b71b-ce5cceb0326d-vencrypt-tls-certs") pod "nova-cell1-novncproxy-0" (UID: "581c2419-48d9-4b7d-b71b-ce5cceb0326d") : secret "cert-nova-novncproxy-cell1-vencrypt" not found Jan 20 17:50:32 crc kubenswrapper[4558]: E0120 17:50:32.504274 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/581c2419-48d9-4b7d-b71b-ce5cceb0326d-nova-novncproxy-tls-certs podName:581c2419-48d9-4b7d-b71b-ce5cceb0326d nodeName:}" failed. No retries permitted until 2026-01-20 17:50:36.504263544 +0000 UTC m=+4130.264601512 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nova-novncproxy-tls-certs" (UniqueName: "kubernetes.io/secret/581c2419-48d9-4b7d-b71b-ce5cceb0326d-nova-novncproxy-tls-certs") pod "nova-cell1-novncproxy-0" (UID: "581c2419-48d9-4b7d-b71b-ce5cceb0326d") : secret "cert-nova-novncproxy-cell1-public-svc" not found Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.556482 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"51d19369-14ac-4d62-ab05-9c5830856622","Type":"ContainerStarted","Data":"bb89698c06a3d8f0dc6700388a8558678523660d9134634c1c9e01a8b703cf50"} Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.556752 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"51d19369-14ac-4d62-ab05-9c5830856622","Type":"ContainerStarted","Data":"c9ec0af9db1de5295d13166fc1e3cb91462f2ab0061206e844f1c3db14529e3e"} Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.562029 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"acb2f7f3-f6d0-4b4f-936f-baee3dfa3938","Type":"ContainerStarted","Data":"06efd4d354098bbe1c72d647b10d6aecbc2f356d80570ca39ea3a76e7949f322"} Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.562086 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"acb2f7f3-f6d0-4b4f-936f-baee3dfa3938","Type":"ContainerStarted","Data":"e67785bf94379651bfb64558a92f8a11e520c0f771013a70d50dbc2e73a7d2fb"} Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.562097 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"acb2f7f3-f6d0-4b4f-936f-baee3dfa3938","Type":"ContainerStarted","Data":"58484e90e24ffc4180dd8312c466efe73ea03a93aeb8a64c6d07d2f682b11f04"} Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.574092 4558 generic.go:334] "Generic (PLEG): container finished" podID="924fa7ce-8d60-4b2f-b62b-d5e146474f71" containerID="7e2524778a402821cdb0aaf4be80d7e2f9282c3873490c09d8e4b929dc8c5e6e" exitCode=1 Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.574522 4558 scope.go:117] "RemoveContainer" containerID="7e2524778a402821cdb0aaf4be80d7e2f9282c3873490c09d8e4b929dc8c5e6e" Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.588949 4558 generic.go:334] "Generic (PLEG): container finished" podID="a3e03093-9a03-4aac-bcde-475dda6c3dcc" containerID="0ef28b39a3024f10f373d8df6dbcc0e604d25efcb4ff0cd7b08a64f50ee2a001" exitCode=0 Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.589028 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.589748 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="28febf86-c729-498e-b3af-0321468d80f3" path="/var/lib/kubelet/pods/28febf86-c729-498e-b3af-0321468d80f3/volumes" Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.590427 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="47de199d-0dde-4082-8b6c-99f3d202608b" path="/var/lib/kubelet/pods/47de199d-0dde-4082-8b6c-99f3d202608b/volumes" Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.591208 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6dae3a04-db9e-48a2-bbe2-012c9ba55890" path="/var/lib/kubelet/pods/6dae3a04-db9e-48a2-bbe2-012c9ba55890/volumes" Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.592646 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7d2f1782-aa1c-4382-b682-27ce8f37d139" path="/var/lib/kubelet/pods/7d2f1782-aa1c-4382-b682-27ce8f37d139/volumes" Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.593212 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b13b98ea-20eb-452f-9316-a7acdeb18406" path="/var/lib/kubelet/pods/b13b98ea-20eb-452f-9316-a7acdeb18406/volumes" Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.594224 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c5adf97d-8b90-49ca-bbeb-21dd8446a226" path="/var/lib/kubelet/pods/c5adf97d-8b90-49ca-bbeb-21dd8446a226/volumes" Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.595677 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d0e7ef26-ab3e-4c3d-b8b2-6b08007022fd" path="/var/lib/kubelet/pods/d0e7ef26-ab3e-4c3d-b8b2-6b08007022fd/volumes" Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.596500 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d0f82b75-bed6-4779-ad55-6bec50d7faa6" path="/var/lib/kubelet/pods/d0f82b75-bed6-4779-ad55-6bec50d7faa6/volumes" Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.597316 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-metadata-0" podStartSLOduration=2.597301852 podStartE2EDuration="2.597301852s" podCreationTimestamp="2026-01-20 17:50:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:50:32.586753537 +0000 UTC m=+4126.347091504" watchObservedRunningTime="2026-01-20 17:50:32.597301852 +0000 UTC m=+4126.357639819" Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.597841 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e2e49d05-bd1d-49a2-a67f-17bdeb5c39c2" path="/var/lib/kubelet/pods/e2e49d05-bd1d-49a2-a67f-17bdeb5c39c2/volumes" Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.602374 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f98db5df-2f7f-490b-b4a0-7e9f27b07a60" path="/var/lib/kubelet/pods/f98db5df-2f7f-490b-b4a0-7e9f27b07a60/volumes" Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.603194 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-0" event={"ID":"924fa7ce-8d60-4b2f-b62b-d5e146474f71","Type":"ContainerDied","Data":"7e2524778a402821cdb0aaf4be80d7e2f9282c3873490c09d8e4b929dc8c5e6e"} Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.603219 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"a3e03093-9a03-4aac-bcde-475dda6c3dcc","Type":"ContainerDied","Data":"0ef28b39a3024f10f373d8df6dbcc0e604d25efcb4ff0cd7b08a64f50ee2a001"} Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.603234 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"a3e03093-9a03-4aac-bcde-475dda6c3dcc","Type":"ContainerDied","Data":"74e3d636e75e3cc0823c216010f7120c37ee7e3634f3be3d44dc1b2543c86161"} Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.603253 4558 scope.go:117] "RemoveContainer" containerID="8317dbc00d30b8e256eff4e4e631c97112ea996e280ba9576ca0f29a56a05240" Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.607865 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/18c09849-702b-4939-9cf1-0e06c6adc889-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"18c09849-702b-4939-9cf1-0e06c6adc889\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:50:32 crc kubenswrapper[4558]: E0120 17:50:32.608077 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-glance-default-internal-svc: secret "cert-glance-default-internal-svc" not found Jan 20 17:50:32 crc kubenswrapper[4558]: E0120 17:50:32.608118 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/18c09849-702b-4939-9cf1-0e06c6adc889-internal-tls-certs podName:18c09849-702b-4939-9cf1-0e06c6adc889 nodeName:}" failed. No retries permitted until 2026-01-20 17:50:34.608105737 +0000 UTC m=+4128.368443704 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "internal-tls-certs" (UniqueName: "kubernetes.io/secret/18c09849-702b-4939-9cf1-0e06c6adc889-internal-tls-certs") pod "glance-default-internal-api-0" (UID: "18c09849-702b-4939-9cf1-0e06c6adc889") : secret "cert-glance-default-internal-svc" not found Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.627974 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.638626 4558 scope.go:117] "RemoveContainer" containerID="dddc0d84255205f19bccd3e01061af9fd0d1e2f5dc72e122ea02fa340a0ca32c" Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.643716 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.658382 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:50:32 crc kubenswrapper[4558]: E0120 17:50:32.658867 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a3e03093-9a03-4aac-bcde-475dda6c3dcc" containerName="sg-core" Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.658881 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="a3e03093-9a03-4aac-bcde-475dda6c3dcc" containerName="sg-core" Jan 20 17:50:32 crc kubenswrapper[4558]: E0120 17:50:32.658903 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a3e03093-9a03-4aac-bcde-475dda6c3dcc" containerName="ceilometer-central-agent" Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.658908 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="a3e03093-9a03-4aac-bcde-475dda6c3dcc" containerName="ceilometer-central-agent" Jan 20 17:50:32 crc kubenswrapper[4558]: E0120 17:50:32.658920 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a3e03093-9a03-4aac-bcde-475dda6c3dcc" containerName="proxy-httpd" Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.658925 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="a3e03093-9a03-4aac-bcde-475dda6c3dcc" containerName="proxy-httpd" Jan 20 17:50:32 crc kubenswrapper[4558]: E0120 17:50:32.658945 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a3e03093-9a03-4aac-bcde-475dda6c3dcc" containerName="ceilometer-notification-agent" Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.658951 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="a3e03093-9a03-4aac-bcde-475dda6c3dcc" containerName="ceilometer-notification-agent" Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.659177 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="a3e03093-9a03-4aac-bcde-475dda6c3dcc" containerName="proxy-httpd" Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.659200 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="a3e03093-9a03-4aac-bcde-475dda6c3dcc" containerName="ceilometer-notification-agent" Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.659209 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="a3e03093-9a03-4aac-bcde-475dda6c3dcc" containerName="sg-core" Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.659217 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="a3e03093-9a03-4aac-bcde-475dda6c3dcc" containerName="ceilometer-central-agent" Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.661113 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.663382 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-ceilometer-internal-svc" Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.664012 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-config-data" Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.664040 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-scripts" Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.667244 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.701062 4558 scope.go:117] "RemoveContainer" containerID="0ef28b39a3024f10f373d8df6dbcc0e604d25efcb4ff0cd7b08a64f50ee2a001" Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.726812 4558 scope.go:117] "RemoveContainer" containerID="518dffb0d9b0f0146e59f64982b965f84538ad2716a9c5d37e992ce46cfdadbb" Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.750920 4558 scope.go:117] "RemoveContainer" containerID="8317dbc00d30b8e256eff4e4e631c97112ea996e280ba9576ca0f29a56a05240" Jan 20 17:50:32 crc kubenswrapper[4558]: E0120 17:50:32.751302 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8317dbc00d30b8e256eff4e4e631c97112ea996e280ba9576ca0f29a56a05240\": container with ID starting with 8317dbc00d30b8e256eff4e4e631c97112ea996e280ba9576ca0f29a56a05240 not found: ID does not exist" containerID="8317dbc00d30b8e256eff4e4e631c97112ea996e280ba9576ca0f29a56a05240" Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.751336 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8317dbc00d30b8e256eff4e4e631c97112ea996e280ba9576ca0f29a56a05240"} err="failed to get container status \"8317dbc00d30b8e256eff4e4e631c97112ea996e280ba9576ca0f29a56a05240\": rpc error: code = NotFound desc = could not find container \"8317dbc00d30b8e256eff4e4e631c97112ea996e280ba9576ca0f29a56a05240\": container with ID starting with 8317dbc00d30b8e256eff4e4e631c97112ea996e280ba9576ca0f29a56a05240 not found: ID does not exist" Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.751355 4558 scope.go:117] "RemoveContainer" containerID="dddc0d84255205f19bccd3e01061af9fd0d1e2f5dc72e122ea02fa340a0ca32c" Jan 20 17:50:32 crc kubenswrapper[4558]: E0120 17:50:32.751643 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dddc0d84255205f19bccd3e01061af9fd0d1e2f5dc72e122ea02fa340a0ca32c\": container with ID starting with dddc0d84255205f19bccd3e01061af9fd0d1e2f5dc72e122ea02fa340a0ca32c not found: ID does not exist" containerID="dddc0d84255205f19bccd3e01061af9fd0d1e2f5dc72e122ea02fa340a0ca32c" Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.751706 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dddc0d84255205f19bccd3e01061af9fd0d1e2f5dc72e122ea02fa340a0ca32c"} err="failed to get container status \"dddc0d84255205f19bccd3e01061af9fd0d1e2f5dc72e122ea02fa340a0ca32c\": rpc error: code = NotFound desc = could not find container \"dddc0d84255205f19bccd3e01061af9fd0d1e2f5dc72e122ea02fa340a0ca32c\": container with ID starting with dddc0d84255205f19bccd3e01061af9fd0d1e2f5dc72e122ea02fa340a0ca32c not found: ID does not exist" Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.751745 4558 scope.go:117] "RemoveContainer" containerID="0ef28b39a3024f10f373d8df6dbcc0e604d25efcb4ff0cd7b08a64f50ee2a001" Jan 20 17:50:32 crc kubenswrapper[4558]: E0120 17:50:32.752158 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0ef28b39a3024f10f373d8df6dbcc0e604d25efcb4ff0cd7b08a64f50ee2a001\": container with ID starting with 0ef28b39a3024f10f373d8df6dbcc0e604d25efcb4ff0cd7b08a64f50ee2a001 not found: ID does not exist" containerID="0ef28b39a3024f10f373d8df6dbcc0e604d25efcb4ff0cd7b08a64f50ee2a001" Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.752199 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0ef28b39a3024f10f373d8df6dbcc0e604d25efcb4ff0cd7b08a64f50ee2a001"} err="failed to get container status \"0ef28b39a3024f10f373d8df6dbcc0e604d25efcb4ff0cd7b08a64f50ee2a001\": rpc error: code = NotFound desc = could not find container \"0ef28b39a3024f10f373d8df6dbcc0e604d25efcb4ff0cd7b08a64f50ee2a001\": container with ID starting with 0ef28b39a3024f10f373d8df6dbcc0e604d25efcb4ff0cd7b08a64f50ee2a001 not found: ID does not exist" Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.752216 4558 scope.go:117] "RemoveContainer" containerID="518dffb0d9b0f0146e59f64982b965f84538ad2716a9c5d37e992ce46cfdadbb" Jan 20 17:50:32 crc kubenswrapper[4558]: E0120 17:50:32.752586 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"518dffb0d9b0f0146e59f64982b965f84538ad2716a9c5d37e992ce46cfdadbb\": container with ID starting with 518dffb0d9b0f0146e59f64982b965f84538ad2716a9c5d37e992ce46cfdadbb not found: ID does not exist" containerID="518dffb0d9b0f0146e59f64982b965f84538ad2716a9c5d37e992ce46cfdadbb" Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.752620 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"518dffb0d9b0f0146e59f64982b965f84538ad2716a9c5d37e992ce46cfdadbb"} err="failed to get container status \"518dffb0d9b0f0146e59f64982b965f84538ad2716a9c5d37e992ce46cfdadbb\": rpc error: code = NotFound desc = could not find container \"518dffb0d9b0f0146e59f64982b965f84538ad2716a9c5d37e992ce46cfdadbb\": container with ID starting with 518dffb0d9b0f0146e59f64982b965f84538ad2716a9c5d37e992ce46cfdadbb not found: ID does not exist" Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.813857 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/06db8474-04b0-4525-90f0-c066ecbac0ae-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"06db8474-04b0-4525-90f0-c066ecbac0ae\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.813914 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3ec0f915-5676-4d2a-b743-1f8ee11c01ac-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"3ec0f915-5676-4d2a-b743-1f8ee11c01ac\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.813944 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/06db8474-04b0-4525-90f0-c066ecbac0ae-config-data\") pod \"ceilometer-0\" (UID: \"06db8474-04b0-4525-90f0-c066ecbac0ae\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.814034 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/07e11378-90b7-4994-99fa-6e2d78b61f63-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"07e11378-90b7-4994-99fa-6e2d78b61f63\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.814057 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/06db8474-04b0-4525-90f0-c066ecbac0ae-log-httpd\") pod \"ceilometer-0\" (UID: \"06db8474-04b0-4525-90f0-c066ecbac0ae\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:50:32 crc kubenswrapper[4558]: E0120 17:50:32.814275 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-cinder-internal-svc: secret "cert-cinder-internal-svc" not found Jan 20 17:50:32 crc kubenswrapper[4558]: E0120 17:50:32.814285 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-glance-default-public-svc: secret "cert-glance-default-public-svc" not found Jan 20 17:50:32 crc kubenswrapper[4558]: E0120 17:50:32.814336 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3ec0f915-5676-4d2a-b743-1f8ee11c01ac-internal-tls-certs podName:3ec0f915-5676-4d2a-b743-1f8ee11c01ac nodeName:}" failed. No retries permitted until 2026-01-20 17:50:34.814316021 +0000 UTC m=+4128.574653987 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "internal-tls-certs" (UniqueName: "kubernetes.io/secret/3ec0f915-5676-4d2a-b743-1f8ee11c01ac-internal-tls-certs") pod "cinder-api-0" (UID: "3ec0f915-5676-4d2a-b743-1f8ee11c01ac") : secret "cert-cinder-internal-svc" not found Jan 20 17:50:32 crc kubenswrapper[4558]: E0120 17:50:32.814393 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/07e11378-90b7-4994-99fa-6e2d78b61f63-public-tls-certs podName:07e11378-90b7-4994-99fa-6e2d78b61f63 nodeName:}" failed. No retries permitted until 2026-01-20 17:50:34.814368989 +0000 UTC m=+4128.574706957 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "public-tls-certs" (UniqueName: "kubernetes.io/secret/07e11378-90b7-4994-99fa-6e2d78b61f63-public-tls-certs") pod "glance-default-external-api-0" (UID: "07e11378-90b7-4994-99fa-6e2d78b61f63") : secret "cert-glance-default-public-svc" not found Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.814631 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3ec0f915-5676-4d2a-b743-1f8ee11c01ac-public-tls-certs\") pod \"cinder-api-0\" (UID: \"3ec0f915-5676-4d2a-b743-1f8ee11c01ac\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:50:32 crc kubenswrapper[4558]: E0120 17:50:32.814716 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-cinder-public-svc: secret "cert-cinder-public-svc" not found Jan 20 17:50:32 crc kubenswrapper[4558]: E0120 17:50:32.814754 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3ec0f915-5676-4d2a-b743-1f8ee11c01ac-public-tls-certs podName:3ec0f915-5676-4d2a-b743-1f8ee11c01ac nodeName:}" failed. No retries permitted until 2026-01-20 17:50:34.814744325 +0000 UTC m=+4128.575082293 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "public-tls-certs" (UniqueName: "kubernetes.io/secret/3ec0f915-5676-4d2a-b743-1f8ee11c01ac-public-tls-certs") pod "cinder-api-0" (UID: "3ec0f915-5676-4d2a-b743-1f8ee11c01ac") : secret "cert-cinder-public-svc" not found Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.814787 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/06db8474-04b0-4525-90f0-c066ecbac0ae-scripts\") pod \"ceilometer-0\" (UID: \"06db8474-04b0-4525-90f0-c066ecbac0ae\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.814885 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/06db8474-04b0-4525-90f0-c066ecbac0ae-run-httpd\") pod \"ceilometer-0\" (UID: \"06db8474-04b0-4525-90f0-c066ecbac0ae\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.814903 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/06db8474-04b0-4525-90f0-c066ecbac0ae-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"06db8474-04b0-4525-90f0-c066ecbac0ae\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.814948 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tf9wc\" (UniqueName: \"kubernetes.io/projected/06db8474-04b0-4525-90f0-c066ecbac0ae-kube-api-access-tf9wc\") pod \"ceilometer-0\" (UID: \"06db8474-04b0-4525-90f0-c066ecbac0ae\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.814968 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/06db8474-04b0-4525-90f0-c066ecbac0ae-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"06db8474-04b0-4525-90f0-c066ecbac0ae\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.916362 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/06db8474-04b0-4525-90f0-c066ecbac0ae-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"06db8474-04b0-4525-90f0-c066ecbac0ae\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.916417 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/06db8474-04b0-4525-90f0-c066ecbac0ae-config-data\") pod \"ceilometer-0\" (UID: \"06db8474-04b0-4525-90f0-c066ecbac0ae\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.916516 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/06db8474-04b0-4525-90f0-c066ecbac0ae-log-httpd\") pod \"ceilometer-0\" (UID: \"06db8474-04b0-4525-90f0-c066ecbac0ae\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.916942 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/06db8474-04b0-4525-90f0-c066ecbac0ae-scripts\") pod \"ceilometer-0\" (UID: \"06db8474-04b0-4525-90f0-c066ecbac0ae\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.917062 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/06db8474-04b0-4525-90f0-c066ecbac0ae-run-httpd\") pod \"ceilometer-0\" (UID: \"06db8474-04b0-4525-90f0-c066ecbac0ae\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.917091 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/06db8474-04b0-4525-90f0-c066ecbac0ae-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"06db8474-04b0-4525-90f0-c066ecbac0ae\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.917142 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tf9wc\" (UniqueName: \"kubernetes.io/projected/06db8474-04b0-4525-90f0-c066ecbac0ae-kube-api-access-tf9wc\") pod \"ceilometer-0\" (UID: \"06db8474-04b0-4525-90f0-c066ecbac0ae\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.917193 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/06db8474-04b0-4525-90f0-c066ecbac0ae-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"06db8474-04b0-4525-90f0-c066ecbac0ae\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:50:32 crc kubenswrapper[4558]: E0120 17:50:32.917636 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-ceilometer-internal-svc: secret "cert-ceilometer-internal-svc" not found Jan 20 17:50:32 crc kubenswrapper[4558]: E0120 17:50:32.917744 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/06db8474-04b0-4525-90f0-c066ecbac0ae-ceilometer-tls-certs podName:06db8474-04b0-4525-90f0-c066ecbac0ae nodeName:}" failed. No retries permitted until 2026-01-20 17:50:33.417717433 +0000 UTC m=+4127.178055401 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "ceilometer-tls-certs" (UniqueName: "kubernetes.io/secret/06db8474-04b0-4525-90f0-c066ecbac0ae-ceilometer-tls-certs") pod "ceilometer-0" (UID: "06db8474-04b0-4525-90f0-c066ecbac0ae") : secret "cert-ceilometer-internal-svc" not found Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.918560 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/06db8474-04b0-4525-90f0-c066ecbac0ae-run-httpd\") pod \"ceilometer-0\" (UID: \"06db8474-04b0-4525-90f0-c066ecbac0ae\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.918831 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/06db8474-04b0-4525-90f0-c066ecbac0ae-log-httpd\") pod \"ceilometer-0\" (UID: \"06db8474-04b0-4525-90f0-c066ecbac0ae\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.923549 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/06db8474-04b0-4525-90f0-c066ecbac0ae-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"06db8474-04b0-4525-90f0-c066ecbac0ae\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.926601 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/06db8474-04b0-4525-90f0-c066ecbac0ae-config-data\") pod \"ceilometer-0\" (UID: \"06db8474-04b0-4525-90f0-c066ecbac0ae\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.928571 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/06db8474-04b0-4525-90f0-c066ecbac0ae-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"06db8474-04b0-4525-90f0-c066ecbac0ae\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.930768 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/06db8474-04b0-4525-90f0-c066ecbac0ae-scripts\") pod \"ceilometer-0\" (UID: \"06db8474-04b0-4525-90f0-c066ecbac0ae\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.940115 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tf9wc\" (UniqueName: \"kubernetes.io/projected/06db8474-04b0-4525-90f0-c066ecbac0ae-kube-api-access-tf9wc\") pod \"ceilometer-0\" (UID: \"06db8474-04b0-4525-90f0-c066ecbac0ae\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:50:32 crc kubenswrapper[4558]: I0120 17:50:32.946789 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:50:32 crc kubenswrapper[4558]: W0120 17:50:32.947558 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd9321ea5_ab3b_4f00_b4d0_f2c2f8c2deb2.slice/crio-0240486ebec76254c8e91fdf408a294730073f9a2f2c48ced46cd50130092549 WatchSource:0}: Error finding container 0240486ebec76254c8e91fdf408a294730073f9a2f2c48ced46cd50130092549: Status 404 returned error can't find the container with id 0240486ebec76254c8e91fdf408a294730073f9a2f2c48ced46cd50130092549 Jan 20 17:50:33 crc kubenswrapper[4558]: I0120 17:50:33.019330 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/80a7bd73-c63f-4a27-9d3d-3fef760c025f-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"80a7bd73-c63f-4a27-9d3d-3fef760c025f\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:50:33 crc kubenswrapper[4558]: I0120 17:50:33.019412 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/80a7bd73-c63f-4a27-9d3d-3fef760c025f-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"80a7bd73-c63f-4a27-9d3d-3fef760c025f\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:50:33 crc kubenswrapper[4558]: E0120 17:50:33.019524 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-ovn-metrics: secret "cert-ovn-metrics" not found Jan 20 17:50:33 crc kubenswrapper[4558]: E0120 17:50:33.019577 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/80a7bd73-c63f-4a27-9d3d-3fef760c025f-metrics-certs-tls-certs podName:80a7bd73-c63f-4a27-9d3d-3fef760c025f nodeName:}" failed. No retries permitted until 2026-01-20 17:50:35.019564024 +0000 UTC m=+4128.779901991 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs-tls-certs" (UniqueName: "kubernetes.io/secret/80a7bd73-c63f-4a27-9d3d-3fef760c025f-metrics-certs-tls-certs") pod "ovn-northd-0" (UID: "80a7bd73-c63f-4a27-9d3d-3fef760c025f") : secret "cert-ovn-metrics" not found Jan 20 17:50:33 crc kubenswrapper[4558]: E0120 17:50:33.019848 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-ovnnorthd-ovndbs: secret "cert-ovnnorthd-ovndbs" not found Jan 20 17:50:33 crc kubenswrapper[4558]: E0120 17:50:33.019898 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/80a7bd73-c63f-4a27-9d3d-3fef760c025f-ovn-northd-tls-certs podName:80a7bd73-c63f-4a27-9d3d-3fef760c025f nodeName:}" failed. No retries permitted until 2026-01-20 17:50:35.019888614 +0000 UTC m=+4128.780226581 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "ovn-northd-tls-certs" (UniqueName: "kubernetes.io/secret/80a7bd73-c63f-4a27-9d3d-3fef760c025f-ovn-northd-tls-certs") pod "ovn-northd-0" (UID: "80a7bd73-c63f-4a27-9d3d-3fef760c025f") : secret "cert-ovnnorthd-ovndbs" not found Jan 20 17:50:33 crc kubenswrapper[4558]: I0120 17:50:33.429111 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/06db8474-04b0-4525-90f0-c066ecbac0ae-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"06db8474-04b0-4525-90f0-c066ecbac0ae\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:50:33 crc kubenswrapper[4558]: I0120 17:50:33.429640 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/733a1ae1-a917-447a-afa6-67cf7d688f86-public-tls-certs\") pod \"nova-api-0\" (UID: \"733a1ae1-a917-447a-afa6-67cf7d688f86\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:50:33 crc kubenswrapper[4558]: I0120 17:50:33.429674 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/733a1ae1-a917-447a-afa6-67cf7d688f86-internal-tls-certs\") pod \"nova-api-0\" (UID: \"733a1ae1-a917-447a-afa6-67cf7d688f86\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:50:33 crc kubenswrapper[4558]: E0120 17:50:33.429372 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-ceilometer-internal-svc: secret "cert-ceilometer-internal-svc" not found Jan 20 17:50:33 crc kubenswrapper[4558]: E0120 17:50:33.430041 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-nova-public-svc: secret "cert-nova-public-svc" not found Jan 20 17:50:33 crc kubenswrapper[4558]: E0120 17:50:33.429936 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-kube-state-metrics-svc: secret "cert-kube-state-metrics-svc" not found Jan 20 17:50:33 crc kubenswrapper[4558]: E0120 17:50:33.430128 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/06db8474-04b0-4525-90f0-c066ecbac0ae-ceilometer-tls-certs podName:06db8474-04b0-4525-90f0-c066ecbac0ae nodeName:}" failed. No retries permitted until 2026-01-20 17:50:34.430082843 +0000 UTC m=+4128.190420811 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "ceilometer-tls-certs" (UniqueName: "kubernetes.io/secret/06db8474-04b0-4525-90f0-c066ecbac0ae-ceilometer-tls-certs") pod "ceilometer-0" (UID: "06db8474-04b0-4525-90f0-c066ecbac0ae") : secret "cert-ceilometer-internal-svc" not found Jan 20 17:50:33 crc kubenswrapper[4558]: E0120 17:50:33.430137 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-nova-internal-svc: secret "cert-nova-internal-svc" not found Jan 20 17:50:33 crc kubenswrapper[4558]: E0120 17:50:33.430196 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/49822b01-63d0-40d3-b604-7980891b0683-kube-state-metrics-tls-certs podName:49822b01-63d0-40d3-b604-7980891b0683 nodeName:}" failed. No retries permitted until 2026-01-20 17:50:37.430151282 +0000 UTC m=+4131.190489249 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-state-metrics-tls-certs" (UniqueName: "kubernetes.io/secret/49822b01-63d0-40d3-b604-7980891b0683-kube-state-metrics-tls-certs") pod "kube-state-metrics-0" (UID: "49822b01-63d0-40d3-b604-7980891b0683") : secret "cert-kube-state-metrics-svc" not found Jan 20 17:50:33 crc kubenswrapper[4558]: E0120 17:50:33.430232 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/733a1ae1-a917-447a-afa6-67cf7d688f86-internal-tls-certs podName:733a1ae1-a917-447a-afa6-67cf7d688f86 nodeName:}" failed. No retries permitted until 2026-01-20 17:50:35.430210464 +0000 UTC m=+4129.190548431 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "internal-tls-certs" (UniqueName: "kubernetes.io/secret/733a1ae1-a917-447a-afa6-67cf7d688f86-internal-tls-certs") pod "nova-api-0" (UID: "733a1ae1-a917-447a-afa6-67cf7d688f86") : secret "cert-nova-internal-svc" not found Jan 20 17:50:33 crc kubenswrapper[4558]: E0120 17:50:33.430260 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/733a1ae1-a917-447a-afa6-67cf7d688f86-public-tls-certs podName:733a1ae1-a917-447a-afa6-67cf7d688f86 nodeName:}" failed. No retries permitted until 2026-01-20 17:50:35.430248555 +0000 UTC m=+4129.190586522 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "public-tls-certs" (UniqueName: "kubernetes.io/secret/733a1ae1-a917-447a-afa6-67cf7d688f86-public-tls-certs") pod "nova-api-0" (UID: "733a1ae1-a917-447a-afa6-67cf7d688f86") : secret "cert-nova-public-svc" not found Jan 20 17:50:33 crc kubenswrapper[4558]: I0120 17:50:33.605299 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"d9321ea5-ab3b-4f00-b4d0-f2c2f8c2deb2","Type":"ContainerStarted","Data":"6d47d3d41183f226f75449e13968a59a7a2d75eca49f1c5ad158b78e20b4ff2a"} Jan 20 17:50:33 crc kubenswrapper[4558]: I0120 17:50:33.605365 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"d9321ea5-ab3b-4f00-b4d0-f2c2f8c2deb2","Type":"ContainerStarted","Data":"0240486ebec76254c8e91fdf408a294730073f9a2f2c48ced46cd50130092549"} Jan 20 17:50:33 crc kubenswrapper[4558]: I0120 17:50:33.610781 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"51d19369-14ac-4d62-ab05-9c5830856622","Type":"ContainerStarted","Data":"dc3c169cf981fe359f960f5ef7f1e452664aacea8447f6ecb0fcdf2aa8023e7a"} Jan 20 17:50:33 crc kubenswrapper[4558]: I0120 17:50:33.613214 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-0" event={"ID":"924fa7ce-8d60-4b2f-b62b-d5e146474f71","Type":"ContainerStarted","Data":"51b725953cbe989b3d1801020e679e346a8ac5643918b80a84ec2b9a8ed5efcb"} Jan 20 17:50:33 crc kubenswrapper[4558]: I0120 17:50:33.613395 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:50:33 crc kubenswrapper[4558]: I0120 17:50:33.625459 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-scheduler-0" podStartSLOduration=1.6254409399999998 podStartE2EDuration="1.62544094s" podCreationTimestamp="2026-01-20 17:50:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:50:33.622807449 +0000 UTC m=+4127.383145416" watchObservedRunningTime="2026-01-20 17:50:33.62544094 +0000 UTC m=+4127.385778907" Jan 20 17:50:33 crc kubenswrapper[4558]: I0120 17:50:33.655840 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-stgv6"] Jan 20 17:50:33 crc kubenswrapper[4558]: I0120 17:50:33.658246 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-stgv6" Jan 20 17:50:33 crc kubenswrapper[4558]: I0120 17:50:33.664473 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/cinder-scheduler-0" podStartSLOduration=2.664451906 podStartE2EDuration="2.664451906s" podCreationTimestamp="2026-01-20 17:50:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:50:33.642921901 +0000 UTC m=+4127.403259868" watchObservedRunningTime="2026-01-20 17:50:33.664451906 +0000 UTC m=+4127.424789873" Jan 20 17:50:33 crc kubenswrapper[4558]: I0120 17:50:33.690456 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-stgv6"] Jan 20 17:50:33 crc kubenswrapper[4558]: I0120 17:50:33.838633 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-44zs5\" (UniqueName: \"kubernetes.io/projected/242c4cdd-2c26-40bd-acaf-782a26f5f6eb-kube-api-access-44zs5\") pod \"community-operators-stgv6\" (UID: \"242c4cdd-2c26-40bd-acaf-782a26f5f6eb\") " pod="openshift-marketplace/community-operators-stgv6" Jan 20 17:50:33 crc kubenswrapper[4558]: I0120 17:50:33.838764 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/242c4cdd-2c26-40bd-acaf-782a26f5f6eb-catalog-content\") pod \"community-operators-stgv6\" (UID: \"242c4cdd-2c26-40bd-acaf-782a26f5f6eb\") " pod="openshift-marketplace/community-operators-stgv6" Jan 20 17:50:33 crc kubenswrapper[4558]: I0120 17:50:33.839048 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/242c4cdd-2c26-40bd-acaf-782a26f5f6eb-utilities\") pod \"community-operators-stgv6\" (UID: \"242c4cdd-2c26-40bd-acaf-782a26f5f6eb\") " pod="openshift-marketplace/community-operators-stgv6" Jan 20 17:50:33 crc kubenswrapper[4558]: I0120 17:50:33.940585 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-44zs5\" (UniqueName: \"kubernetes.io/projected/242c4cdd-2c26-40bd-acaf-782a26f5f6eb-kube-api-access-44zs5\") pod \"community-operators-stgv6\" (UID: \"242c4cdd-2c26-40bd-acaf-782a26f5f6eb\") " pod="openshift-marketplace/community-operators-stgv6" Jan 20 17:50:33 crc kubenswrapper[4558]: I0120 17:50:33.940666 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/242c4cdd-2c26-40bd-acaf-782a26f5f6eb-catalog-content\") pod \"community-operators-stgv6\" (UID: \"242c4cdd-2c26-40bd-acaf-782a26f5f6eb\") " pod="openshift-marketplace/community-operators-stgv6" Jan 20 17:50:33 crc kubenswrapper[4558]: I0120 17:50:33.940859 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/242c4cdd-2c26-40bd-acaf-782a26f5f6eb-utilities\") pod \"community-operators-stgv6\" (UID: \"242c4cdd-2c26-40bd-acaf-782a26f5f6eb\") " pod="openshift-marketplace/community-operators-stgv6" Jan 20 17:50:33 crc kubenswrapper[4558]: I0120 17:50:33.941339 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/242c4cdd-2c26-40bd-acaf-782a26f5f6eb-utilities\") pod \"community-operators-stgv6\" (UID: \"242c4cdd-2c26-40bd-acaf-782a26f5f6eb\") " pod="openshift-marketplace/community-operators-stgv6" Jan 20 17:50:33 crc kubenswrapper[4558]: I0120 17:50:33.941832 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/242c4cdd-2c26-40bd-acaf-782a26f5f6eb-catalog-content\") pod \"community-operators-stgv6\" (UID: \"242c4cdd-2c26-40bd-acaf-782a26f5f6eb\") " pod="openshift-marketplace/community-operators-stgv6" Jan 20 17:50:33 crc kubenswrapper[4558]: I0120 17:50:33.961859 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-44zs5\" (UniqueName: \"kubernetes.io/projected/242c4cdd-2c26-40bd-acaf-782a26f5f6eb-kube-api-access-44zs5\") pod \"community-operators-stgv6\" (UID: \"242c4cdd-2c26-40bd-acaf-782a26f5f6eb\") " pod="openshift-marketplace/community-operators-stgv6" Jan 20 17:50:33 crc kubenswrapper[4558]: I0120 17:50:33.984865 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-stgv6" Jan 20 17:50:34 crc kubenswrapper[4558]: I0120 17:50:34.350592 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d127fb40-6eff-4ccd-922c-b209ac4edbda-internal-tls-certs\") pod \"barbican-api-5968cb86f6-ctfg5\" (UID: \"d127fb40-6eff-4ccd-922c-b209ac4edbda\") " pod="openstack-kuttl-tests/barbican-api-5968cb86f6-ctfg5" Jan 20 17:50:34 crc kubenswrapper[4558]: I0120 17:50:34.350915 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/38ce22a3-0b53-417f-ab78-77a3be2da04f-internal-tls-certs\") pod \"placement-9cbb9c58b-jcwzc\" (UID: \"38ce22a3-0b53-417f-ab78-77a3be2da04f\") " pod="openstack-kuttl-tests/placement-9cbb9c58b-jcwzc" Jan 20 17:50:34 crc kubenswrapper[4558]: I0120 17:50:34.351004 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0a3695d2-9733-430d-a489-694e9e97b586-public-tls-certs\") pod \"keystone-9795586d8-4dv95\" (UID: \"0a3695d2-9733-430d-a489-694e9e97b586\") " pod="openstack-kuttl-tests/keystone-9795586d8-4dv95" Jan 20 17:50:34 crc kubenswrapper[4558]: I0120 17:50:34.351044 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d127fb40-6eff-4ccd-922c-b209ac4edbda-public-tls-certs\") pod \"barbican-api-5968cb86f6-ctfg5\" (UID: \"d127fb40-6eff-4ccd-922c-b209ac4edbda\") " pod="openstack-kuttl-tests/barbican-api-5968cb86f6-ctfg5" Jan 20 17:50:34 crc kubenswrapper[4558]: I0120 17:50:34.351128 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0a3695d2-9733-430d-a489-694e9e97b586-internal-tls-certs\") pod \"keystone-9795586d8-4dv95\" (UID: \"0a3695d2-9733-430d-a489-694e9e97b586\") " pod="openstack-kuttl-tests/keystone-9795586d8-4dv95" Jan 20 17:50:34 crc kubenswrapper[4558]: I0120 17:50:34.351193 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/38ce22a3-0b53-417f-ab78-77a3be2da04f-public-tls-certs\") pod \"placement-9cbb9c58b-jcwzc\" (UID: \"38ce22a3-0b53-417f-ab78-77a3be2da04f\") " pod="openstack-kuttl-tests/placement-9cbb9c58b-jcwzc" Jan 20 17:50:34 crc kubenswrapper[4558]: I0120 17:50:34.351261 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7458929c-bc4e-4f17-b199-4963b298f4e8-public-tls-certs\") pod \"neutron-7b9876b66d-znq49\" (UID: \"7458929c-bc4e-4f17-b199-4963b298f4e8\") " pod="openstack-kuttl-tests/neutron-7b9876b66d-znq49" Jan 20 17:50:34 crc kubenswrapper[4558]: I0120 17:50:34.351293 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/7458929c-bc4e-4f17-b199-4963b298f4e8-ovndb-tls-certs\") pod \"neutron-7b9876b66d-znq49\" (UID: \"7458929c-bc4e-4f17-b199-4963b298f4e8\") " pod="openstack-kuttl-tests/neutron-7b9876b66d-znq49" Jan 20 17:50:34 crc kubenswrapper[4558]: I0120 17:50:34.351392 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7458929c-bc4e-4f17-b199-4963b298f4e8-internal-tls-certs\") pod \"neutron-7b9876b66d-znq49\" (UID: \"7458929c-bc4e-4f17-b199-4963b298f4e8\") " pod="openstack-kuttl-tests/neutron-7b9876b66d-znq49" Jan 20 17:50:34 crc kubenswrapper[4558]: E0120 17:50:34.352406 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-neutron-internal-svc: secret "cert-neutron-internal-svc" not found Jan 20 17:50:34 crc kubenswrapper[4558]: E0120 17:50:34.352436 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-barbican-internal-svc: secret "cert-barbican-internal-svc" not found Jan 20 17:50:34 crc kubenswrapper[4558]: E0120 17:50:34.352458 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7458929c-bc4e-4f17-b199-4963b298f4e8-internal-tls-certs podName:7458929c-bc4e-4f17-b199-4963b298f4e8 nodeName:}" failed. No retries permitted until 2026-01-20 17:50:42.352444591 +0000 UTC m=+4136.112782549 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "internal-tls-certs" (UniqueName: "kubernetes.io/secret/7458929c-bc4e-4f17-b199-4963b298f4e8-internal-tls-certs") pod "neutron-7b9876b66d-znq49" (UID: "7458929c-bc4e-4f17-b199-4963b298f4e8") : secret "cert-neutron-internal-svc" not found Jan 20 17:50:34 crc kubenswrapper[4558]: E0120 17:50:34.352481 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d127fb40-6eff-4ccd-922c-b209ac4edbda-internal-tls-certs podName:d127fb40-6eff-4ccd-922c-b209ac4edbda nodeName:}" failed. No retries permitted until 2026-01-20 17:50:42.352474848 +0000 UTC m=+4136.112812815 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "internal-tls-certs" (UniqueName: "kubernetes.io/secret/d127fb40-6eff-4ccd-922c-b209ac4edbda-internal-tls-certs") pod "barbican-api-5968cb86f6-ctfg5" (UID: "d127fb40-6eff-4ccd-922c-b209ac4edbda") : secret "cert-barbican-internal-svc" not found Jan 20 17:50:34 crc kubenswrapper[4558]: E0120 17:50:34.352487 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-placement-internal-svc: secret "cert-placement-internal-svc" not found Jan 20 17:50:34 crc kubenswrapper[4558]: E0120 17:50:34.352511 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/38ce22a3-0b53-417f-ab78-77a3be2da04f-internal-tls-certs podName:38ce22a3-0b53-417f-ab78-77a3be2da04f nodeName:}" failed. No retries permitted until 2026-01-20 17:50:42.352500657 +0000 UTC m=+4136.112838624 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "internal-tls-certs" (UniqueName: "kubernetes.io/secret/38ce22a3-0b53-417f-ab78-77a3be2da04f-internal-tls-certs") pod "placement-9cbb9c58b-jcwzc" (UID: "38ce22a3-0b53-417f-ab78-77a3be2da04f") : secret "cert-placement-internal-svc" not found Jan 20 17:50:34 crc kubenswrapper[4558]: E0120 17:50:34.352514 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-keystone-internal-svc: secret "cert-keystone-internal-svc" not found Jan 20 17:50:34 crc kubenswrapper[4558]: E0120 17:50:34.352534 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0a3695d2-9733-430d-a489-694e9e97b586-internal-tls-certs podName:0a3695d2-9733-430d-a489-694e9e97b586 nodeName:}" failed. No retries permitted until 2026-01-20 17:50:42.352528108 +0000 UTC m=+4136.112866075 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "internal-tls-certs" (UniqueName: "kubernetes.io/secret/0a3695d2-9733-430d-a489-694e9e97b586-internal-tls-certs") pod "keystone-9795586d8-4dv95" (UID: "0a3695d2-9733-430d-a489-694e9e97b586") : secret "cert-keystone-internal-svc" not found Jan 20 17:50:34 crc kubenswrapper[4558]: E0120 17:50:34.352541 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-keystone-public-svc: secret "cert-keystone-public-svc" not found Jan 20 17:50:34 crc kubenswrapper[4558]: E0120 17:50:34.352557 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0a3695d2-9733-430d-a489-694e9e97b586-public-tls-certs podName:0a3695d2-9733-430d-a489-694e9e97b586 nodeName:}" failed. No retries permitted until 2026-01-20 17:50:42.352551883 +0000 UTC m=+4136.112889850 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "public-tls-certs" (UniqueName: "kubernetes.io/secret/0a3695d2-9733-430d-a489-694e9e97b586-public-tls-certs") pod "keystone-9795586d8-4dv95" (UID: "0a3695d2-9733-430d-a489-694e9e97b586") : secret "cert-keystone-public-svc" not found Jan 20 17:50:34 crc kubenswrapper[4558]: E0120 17:50:34.352560 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-placement-public-svc: secret "cert-placement-public-svc" not found Jan 20 17:50:34 crc kubenswrapper[4558]: E0120 17:50:34.352577 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/38ce22a3-0b53-417f-ab78-77a3be2da04f-public-tls-certs podName:38ce22a3-0b53-417f-ab78-77a3be2da04f nodeName:}" failed. No retries permitted until 2026-01-20 17:50:42.352572522 +0000 UTC m=+4136.112910488 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "public-tls-certs" (UniqueName: "kubernetes.io/secret/38ce22a3-0b53-417f-ab78-77a3be2da04f-public-tls-certs") pod "placement-9cbb9c58b-jcwzc" (UID: "38ce22a3-0b53-417f-ab78-77a3be2da04f") : secret "cert-placement-public-svc" not found Jan 20 17:50:34 crc kubenswrapper[4558]: E0120 17:50:34.352407 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-barbican-public-svc: secret "cert-barbican-public-svc" not found Jan 20 17:50:34 crc kubenswrapper[4558]: E0120 17:50:34.352584 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-neutron-ovndbs: secret "cert-neutron-ovndbs" not found Jan 20 17:50:34 crc kubenswrapper[4558]: E0120 17:50:34.352595 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d127fb40-6eff-4ccd-922c-b209ac4edbda-public-tls-certs podName:d127fb40-6eff-4ccd-922c-b209ac4edbda nodeName:}" failed. No retries permitted until 2026-01-20 17:50:42.352590615 +0000 UTC m=+4136.112928583 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "public-tls-certs" (UniqueName: "kubernetes.io/secret/d127fb40-6eff-4ccd-922c-b209ac4edbda-public-tls-certs") pod "barbican-api-5968cb86f6-ctfg5" (UID: "d127fb40-6eff-4ccd-922c-b209ac4edbda") : secret "cert-barbican-public-svc" not found Jan 20 17:50:34 crc kubenswrapper[4558]: E0120 17:50:34.352604 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7458929c-bc4e-4f17-b199-4963b298f4e8-ovndb-tls-certs podName:7458929c-bc4e-4f17-b199-4963b298f4e8 nodeName:}" failed. No retries permitted until 2026-01-20 17:50:42.352599913 +0000 UTC m=+4136.112937880 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "ovndb-tls-certs" (UniqueName: "kubernetes.io/secret/7458929c-bc4e-4f17-b199-4963b298f4e8-ovndb-tls-certs") pod "neutron-7b9876b66d-znq49" (UID: "7458929c-bc4e-4f17-b199-4963b298f4e8") : secret "cert-neutron-ovndbs" not found Jan 20 17:50:34 crc kubenswrapper[4558]: E0120 17:50:34.352619 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-neutron-public-svc: secret "cert-neutron-public-svc" not found Jan 20 17:50:34 crc kubenswrapper[4558]: E0120 17:50:34.352635 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7458929c-bc4e-4f17-b199-4963b298f4e8-public-tls-certs podName:7458929c-bc4e-4f17-b199-4963b298f4e8 nodeName:}" failed. No retries permitted until 2026-01-20 17:50:42.35262997 +0000 UTC m=+4136.112967938 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "public-tls-certs" (UniqueName: "kubernetes.io/secret/7458929c-bc4e-4f17-b199-4963b298f4e8-public-tls-certs") pod "neutron-7b9876b66d-znq49" (UID: "7458929c-bc4e-4f17-b199-4963b298f4e8") : secret "cert-neutron-public-svc" not found Jan 20 17:50:34 crc kubenswrapper[4558]: I0120 17:50:34.433398 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-stgv6"] Jan 20 17:50:34 crc kubenswrapper[4558]: I0120 17:50:34.455236 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/06db8474-04b0-4525-90f0-c066ecbac0ae-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"06db8474-04b0-4525-90f0-c066ecbac0ae\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:50:34 crc kubenswrapper[4558]: E0120 17:50:34.456040 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-ceilometer-internal-svc: secret "cert-ceilometer-internal-svc" not found Jan 20 17:50:34 crc kubenswrapper[4558]: E0120 17:50:34.456116 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/06db8474-04b0-4525-90f0-c066ecbac0ae-ceilometer-tls-certs podName:06db8474-04b0-4525-90f0-c066ecbac0ae nodeName:}" failed. No retries permitted until 2026-01-20 17:50:36.456098379 +0000 UTC m=+4130.216436346 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "ceilometer-tls-certs" (UniqueName: "kubernetes.io/secret/06db8474-04b0-4525-90f0-c066ecbac0ae-ceilometer-tls-certs") pod "ceilometer-0" (UID: "06db8474-04b0-4525-90f0-c066ecbac0ae") : secret "cert-ceilometer-internal-svc" not found Jan 20 17:50:34 crc kubenswrapper[4558]: I0120 17:50:34.577443 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a3e03093-9a03-4aac-bcde-475dda6c3dcc" path="/var/lib/kubelet/pods/a3e03093-9a03-4aac-bcde-475dda6c3dcc/volumes" Jan 20 17:50:34 crc kubenswrapper[4558]: I0120 17:50:34.627613 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-stgv6" event={"ID":"242c4cdd-2c26-40bd-acaf-782a26f5f6eb","Type":"ContainerStarted","Data":"1cc253a74acddde0e725640e1376aa131de2d972e7b7f20d27b7188e3d47c379"} Jan 20 17:50:34 crc kubenswrapper[4558]: I0120 17:50:34.627666 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-stgv6" event={"ID":"242c4cdd-2c26-40bd-acaf-782a26f5f6eb","Type":"ContainerStarted","Data":"3810dfe81460292ca94ce3f75a6bd3a2a39d3b975c0e42e821b93ed1a50245c3"} Jan 20 17:50:34 crc kubenswrapper[4558]: I0120 17:50:34.665095 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/18c09849-702b-4939-9cf1-0e06c6adc889-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"18c09849-702b-4939-9cf1-0e06c6adc889\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:50:34 crc kubenswrapper[4558]: E0120 17:50:34.665302 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-glance-default-internal-svc: secret "cert-glance-default-internal-svc" not found Jan 20 17:50:34 crc kubenswrapper[4558]: E0120 17:50:34.665377 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/18c09849-702b-4939-9cf1-0e06c6adc889-internal-tls-certs podName:18c09849-702b-4939-9cf1-0e06c6adc889 nodeName:}" failed. No retries permitted until 2026-01-20 17:50:38.66535489 +0000 UTC m=+4132.425692857 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "internal-tls-certs" (UniqueName: "kubernetes.io/secret/18c09849-702b-4939-9cf1-0e06c6adc889-internal-tls-certs") pod "glance-default-internal-api-0" (UID: "18c09849-702b-4939-9cf1-0e06c6adc889") : secret "cert-glance-default-internal-svc" not found Jan 20 17:50:34 crc kubenswrapper[4558]: I0120 17:50:34.871800 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3ec0f915-5676-4d2a-b743-1f8ee11c01ac-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"3ec0f915-5676-4d2a-b743-1f8ee11c01ac\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:50:34 crc kubenswrapper[4558]: I0120 17:50:34.871922 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/07e11378-90b7-4994-99fa-6e2d78b61f63-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"07e11378-90b7-4994-99fa-6e2d78b61f63\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:50:34 crc kubenswrapper[4558]: I0120 17:50:34.872007 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3ec0f915-5676-4d2a-b743-1f8ee11c01ac-public-tls-certs\") pod \"cinder-api-0\" (UID: \"3ec0f915-5676-4d2a-b743-1f8ee11c01ac\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:50:34 crc kubenswrapper[4558]: E0120 17:50:34.872383 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-cinder-public-svc: secret "cert-cinder-public-svc" not found Jan 20 17:50:34 crc kubenswrapper[4558]: E0120 17:50:34.872438 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3ec0f915-5676-4d2a-b743-1f8ee11c01ac-public-tls-certs podName:3ec0f915-5676-4d2a-b743-1f8ee11c01ac nodeName:}" failed. No retries permitted until 2026-01-20 17:50:38.872422236 +0000 UTC m=+4132.632760202 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "public-tls-certs" (UniqueName: "kubernetes.io/secret/3ec0f915-5676-4d2a-b743-1f8ee11c01ac-public-tls-certs") pod "cinder-api-0" (UID: "3ec0f915-5676-4d2a-b743-1f8ee11c01ac") : secret "cert-cinder-public-svc" not found Jan 20 17:50:34 crc kubenswrapper[4558]: E0120 17:50:34.872865 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-cinder-internal-svc: secret "cert-cinder-internal-svc" not found Jan 20 17:50:34 crc kubenswrapper[4558]: E0120 17:50:34.872899 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3ec0f915-5676-4d2a-b743-1f8ee11c01ac-internal-tls-certs podName:3ec0f915-5676-4d2a-b743-1f8ee11c01ac nodeName:}" failed. No retries permitted until 2026-01-20 17:50:38.872889343 +0000 UTC m=+4132.633227310 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "internal-tls-certs" (UniqueName: "kubernetes.io/secret/3ec0f915-5676-4d2a-b743-1f8ee11c01ac-internal-tls-certs") pod "cinder-api-0" (UID: "3ec0f915-5676-4d2a-b743-1f8ee11c01ac") : secret "cert-cinder-internal-svc" not found Jan 20 17:50:34 crc kubenswrapper[4558]: I0120 17:50:34.880248 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/07e11378-90b7-4994-99fa-6e2d78b61f63-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"07e11378-90b7-4994-99fa-6e2d78b61f63\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:50:34 crc kubenswrapper[4558]: I0120 17:50:34.940545 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:50:35 crc kubenswrapper[4558]: I0120 17:50:35.076157 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/80a7bd73-c63f-4a27-9d3d-3fef760c025f-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"80a7bd73-c63f-4a27-9d3d-3fef760c025f\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:50:35 crc kubenswrapper[4558]: I0120 17:50:35.076244 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/80a7bd73-c63f-4a27-9d3d-3fef760c025f-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"80a7bd73-c63f-4a27-9d3d-3fef760c025f\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:50:35 crc kubenswrapper[4558]: E0120 17:50:35.076300 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-ovnnorthd-ovndbs: secret "cert-ovnnorthd-ovndbs" not found Jan 20 17:50:35 crc kubenswrapper[4558]: E0120 17:50:35.076388 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/80a7bd73-c63f-4a27-9d3d-3fef760c025f-ovn-northd-tls-certs podName:80a7bd73-c63f-4a27-9d3d-3fef760c025f nodeName:}" failed. No retries permitted until 2026-01-20 17:50:39.076368551 +0000 UTC m=+4132.836706518 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "ovn-northd-tls-certs" (UniqueName: "kubernetes.io/secret/80a7bd73-c63f-4a27-9d3d-3fef760c025f-ovn-northd-tls-certs") pod "ovn-northd-0" (UID: "80a7bd73-c63f-4a27-9d3d-3fef760c025f") : secret "cert-ovnnorthd-ovndbs" not found Jan 20 17:50:35 crc kubenswrapper[4558]: E0120 17:50:35.076477 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-ovn-metrics: secret "cert-ovn-metrics" not found Jan 20 17:50:35 crc kubenswrapper[4558]: E0120 17:50:35.076564 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/80a7bd73-c63f-4a27-9d3d-3fef760c025f-metrics-certs-tls-certs podName:80a7bd73-c63f-4a27-9d3d-3fef760c025f nodeName:}" failed. No retries permitted until 2026-01-20 17:50:39.076544472 +0000 UTC m=+4132.836882439 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs-tls-certs" (UniqueName: "kubernetes.io/secret/80a7bd73-c63f-4a27-9d3d-3fef760c025f-metrics-certs-tls-certs") pod "ovn-northd-0" (UID: "80a7bd73-c63f-4a27-9d3d-3fef760c025f") : secret "cert-ovn-metrics" not found Jan 20 17:50:35 crc kubenswrapper[4558]: I0120 17:50:35.315626 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:50:35 crc kubenswrapper[4558]: E0120 17:50:35.332027 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 51b725953cbe989b3d1801020e679e346a8ac5643918b80a84ec2b9a8ed5efcb is running failed: container process not found" containerID="51b725953cbe989b3d1801020e679e346a8ac5643918b80a84ec2b9a8ed5efcb" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:50:35 crc kubenswrapper[4558]: E0120 17:50:35.347704 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 51b725953cbe989b3d1801020e679e346a8ac5643918b80a84ec2b9a8ed5efcb is running failed: container process not found" containerID="51b725953cbe989b3d1801020e679e346a8ac5643918b80a84ec2b9a8ed5efcb" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:50:35 crc kubenswrapper[4558]: E0120 17:50:35.348305 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 51b725953cbe989b3d1801020e679e346a8ac5643918b80a84ec2b9a8ed5efcb is running failed: container process not found" containerID="51b725953cbe989b3d1801020e679e346a8ac5643918b80a84ec2b9a8ed5efcb" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:50:35 crc kubenswrapper[4558]: E0120 17:50:35.348467 4558 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 51b725953cbe989b3d1801020e679e346a8ac5643918b80a84ec2b9a8ed5efcb is running failed: container process not found" probeType="Liveness" pod="openstack-kuttl-tests/nova-cell0-conductor-0" podUID="924fa7ce-8d60-4b2f-b62b-d5e146474f71" containerName="nova-cell0-conductor-conductor" Jan 20 17:50:35 crc kubenswrapper[4558]: I0120 17:50:35.384653 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:50:35 crc kubenswrapper[4558]: I0120 17:50:35.385048 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:50:35 crc kubenswrapper[4558]: E0120 17:50:35.385228 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-ovn-metrics: secret "cert-ovn-metrics" not found Jan 20 17:50:35 crc kubenswrapper[4558]: E0120 17:50:35.385288 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162-metrics-certs-tls-certs podName:bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162 nodeName:}" failed. No retries permitted until 2026-01-20 17:50:43.385273218 +0000 UTC m=+4137.145611185 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs-tls-certs" (UniqueName: "kubernetes.io/secret/bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162-metrics-certs-tls-certs") pod "ovsdbserver-nb-0" (UID: "bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162") : secret "cert-ovn-metrics" not found Jan 20 17:50:35 crc kubenswrapper[4558]: E0120 17:50:35.386004 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-ovndbcluster-nb-ovndbs: secret "cert-ovndbcluster-nb-ovndbs" not found Jan 20 17:50:35 crc kubenswrapper[4558]: E0120 17:50:35.386087 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162-ovsdbserver-nb-tls-certs podName:bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162 nodeName:}" failed. No retries permitted until 2026-01-20 17:50:43.386067682 +0000 UTC m=+4137.146405649 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "ovsdbserver-nb-tls-certs" (UniqueName: "kubernetes.io/secret/bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162-ovsdbserver-nb-tls-certs") pod "ovsdbserver-nb-0" (UID: "bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162") : secret "cert-ovndbcluster-nb-ovndbs" not found Jan 20 17:50:35 crc kubenswrapper[4558]: I0120 17:50:35.486831 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/733a1ae1-a917-447a-afa6-67cf7d688f86-public-tls-certs\") pod \"nova-api-0\" (UID: \"733a1ae1-a917-447a-afa6-67cf7d688f86\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:50:35 crc kubenswrapper[4558]: I0120 17:50:35.486881 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/733a1ae1-a917-447a-afa6-67cf7d688f86-internal-tls-certs\") pod \"nova-api-0\" (UID: \"733a1ae1-a917-447a-afa6-67cf7d688f86\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:50:35 crc kubenswrapper[4558]: E0120 17:50:35.487101 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-nova-internal-svc: secret "cert-nova-internal-svc" not found Jan 20 17:50:35 crc kubenswrapper[4558]: E0120 17:50:35.487119 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-nova-public-svc: secret "cert-nova-public-svc" not found Jan 20 17:50:35 crc kubenswrapper[4558]: E0120 17:50:35.487830 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/733a1ae1-a917-447a-afa6-67cf7d688f86-public-tls-certs podName:733a1ae1-a917-447a-afa6-67cf7d688f86 nodeName:}" failed. No retries permitted until 2026-01-20 17:50:39.48775374 +0000 UTC m=+4133.248091707 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "public-tls-certs" (UniqueName: "kubernetes.io/secret/733a1ae1-a917-447a-afa6-67cf7d688f86-public-tls-certs") pod "nova-api-0" (UID: "733a1ae1-a917-447a-afa6-67cf7d688f86") : secret "cert-nova-public-svc" not found Jan 20 17:50:35 crc kubenswrapper[4558]: E0120 17:50:35.487878 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/733a1ae1-a917-447a-afa6-67cf7d688f86-internal-tls-certs podName:733a1ae1-a917-447a-afa6-67cf7d688f86 nodeName:}" failed. No retries permitted until 2026-01-20 17:50:39.4878653 +0000 UTC m=+4133.248203267 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "internal-tls-certs" (UniqueName: "kubernetes.io/secret/733a1ae1-a917-447a-afa6-67cf7d688f86-internal-tls-certs") pod "nova-api-0" (UID: "733a1ae1-a917-447a-afa6-67cf7d688f86") : secret "cert-nova-internal-svc" not found Jan 20 17:50:35 crc kubenswrapper[4558]: I0120 17:50:35.644815 4558 generic.go:334] "Generic (PLEG): container finished" podID="242c4cdd-2c26-40bd-acaf-782a26f5f6eb" containerID="1cc253a74acddde0e725640e1376aa131de2d972e7b7f20d27b7188e3d47c379" exitCode=0 Jan 20 17:50:35 crc kubenswrapper[4558]: I0120 17:50:35.644886 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-stgv6" event={"ID":"242c4cdd-2c26-40bd-acaf-782a26f5f6eb","Type":"ContainerDied","Data":"1cc253a74acddde0e725640e1376aa131de2d972e7b7f20d27b7188e3d47c379"} Jan 20 17:50:35 crc kubenswrapper[4558]: I0120 17:50:35.647244 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"07e11378-90b7-4994-99fa-6e2d78b61f63","Type":"ContainerStarted","Data":"79d48f60aa75df8826b507a34aa9de691798da80e280b206bcc79a7c617d08df"} Jan 20 17:50:35 crc kubenswrapper[4558]: I0120 17:50:35.651476 4558 generic.go:334] "Generic (PLEG): container finished" podID="924fa7ce-8d60-4b2f-b62b-d5e146474f71" containerID="51b725953cbe989b3d1801020e679e346a8ac5643918b80a84ec2b9a8ed5efcb" exitCode=1 Jan 20 17:50:35 crc kubenswrapper[4558]: I0120 17:50:35.651520 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-0" event={"ID":"924fa7ce-8d60-4b2f-b62b-d5e146474f71","Type":"ContainerDied","Data":"51b725953cbe989b3d1801020e679e346a8ac5643918b80a84ec2b9a8ed5efcb"} Jan 20 17:50:35 crc kubenswrapper[4558]: I0120 17:50:35.651549 4558 scope.go:117] "RemoveContainer" containerID="7e2524778a402821cdb0aaf4be80d7e2f9282c3873490c09d8e4b929dc8c5e6e" Jan 20 17:50:35 crc kubenswrapper[4558]: I0120 17:50:35.653082 4558 scope.go:117] "RemoveContainer" containerID="51b725953cbe989b3d1801020e679e346a8ac5643918b80a84ec2b9a8ed5efcb" Jan 20 17:50:35 crc kubenswrapper[4558]: E0120 17:50:35.653706 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"nova-cell0-conductor-conductor\" with CrashLoopBackOff: \"back-off 10s restarting failed container=nova-cell0-conductor-conductor pod=nova-cell0-conductor-0_openstack-kuttl-tests(924fa7ce-8d60-4b2f-b62b-d5e146474f71)\"" pod="openstack-kuttl-tests/nova-cell0-conductor-0" podUID="924fa7ce-8d60-4b2f-b62b-d5e146474f71" Jan 20 17:50:35 crc kubenswrapper[4558]: I0120 17:50:35.852348 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-8c8mp"] Jan 20 17:50:35 crc kubenswrapper[4558]: I0120 17:50:35.854720 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-8c8mp" Jan 20 17:50:35 crc kubenswrapper[4558]: I0120 17:50:35.872949 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-8c8mp"] Jan 20 17:50:35 crc kubenswrapper[4558]: I0120 17:50:35.899850 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nfwrc\" (UniqueName: \"kubernetes.io/projected/2bf2d1ac-2610-4caf-a3dc-eb6ef3d260a4-kube-api-access-nfwrc\") pod \"redhat-operators-8c8mp\" (UID: \"2bf2d1ac-2610-4caf-a3dc-eb6ef3d260a4\") " pod="openshift-marketplace/redhat-operators-8c8mp" Jan 20 17:50:35 crc kubenswrapper[4558]: I0120 17:50:35.900043 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2bf2d1ac-2610-4caf-a3dc-eb6ef3d260a4-utilities\") pod \"redhat-operators-8c8mp\" (UID: \"2bf2d1ac-2610-4caf-a3dc-eb6ef3d260a4\") " pod="openshift-marketplace/redhat-operators-8c8mp" Jan 20 17:50:35 crc kubenswrapper[4558]: I0120 17:50:35.900306 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2bf2d1ac-2610-4caf-a3dc-eb6ef3d260a4-catalog-content\") pod \"redhat-operators-8c8mp\" (UID: \"2bf2d1ac-2610-4caf-a3dc-eb6ef3d260a4\") " pod="openshift-marketplace/redhat-operators-8c8mp" Jan 20 17:50:36 crc kubenswrapper[4558]: I0120 17:50:36.001419 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2bf2d1ac-2610-4caf-a3dc-eb6ef3d260a4-catalog-content\") pod \"redhat-operators-8c8mp\" (UID: \"2bf2d1ac-2610-4caf-a3dc-eb6ef3d260a4\") " pod="openshift-marketplace/redhat-operators-8c8mp" Jan 20 17:50:36 crc kubenswrapper[4558]: I0120 17:50:36.001638 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nfwrc\" (UniqueName: \"kubernetes.io/projected/2bf2d1ac-2610-4caf-a3dc-eb6ef3d260a4-kube-api-access-nfwrc\") pod \"redhat-operators-8c8mp\" (UID: \"2bf2d1ac-2610-4caf-a3dc-eb6ef3d260a4\") " pod="openshift-marketplace/redhat-operators-8c8mp" Jan 20 17:50:36 crc kubenswrapper[4558]: I0120 17:50:36.001703 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2bf2d1ac-2610-4caf-a3dc-eb6ef3d260a4-utilities\") pod \"redhat-operators-8c8mp\" (UID: \"2bf2d1ac-2610-4caf-a3dc-eb6ef3d260a4\") " pod="openshift-marketplace/redhat-operators-8c8mp" Jan 20 17:50:36 crc kubenswrapper[4558]: I0120 17:50:36.001877 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2bf2d1ac-2610-4caf-a3dc-eb6ef3d260a4-catalog-content\") pod \"redhat-operators-8c8mp\" (UID: \"2bf2d1ac-2610-4caf-a3dc-eb6ef3d260a4\") " pod="openshift-marketplace/redhat-operators-8c8mp" Jan 20 17:50:36 crc kubenswrapper[4558]: I0120 17:50:36.002158 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2bf2d1ac-2610-4caf-a3dc-eb6ef3d260a4-utilities\") pod \"redhat-operators-8c8mp\" (UID: \"2bf2d1ac-2610-4caf-a3dc-eb6ef3d260a4\") " pod="openshift-marketplace/redhat-operators-8c8mp" Jan 20 17:50:36 crc kubenswrapper[4558]: I0120 17:50:36.018742 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nfwrc\" (UniqueName: \"kubernetes.io/projected/2bf2d1ac-2610-4caf-a3dc-eb6ef3d260a4-kube-api-access-nfwrc\") pod \"redhat-operators-8c8mp\" (UID: \"2bf2d1ac-2610-4caf-a3dc-eb6ef3d260a4\") " pod="openshift-marketplace/redhat-operators-8c8mp" Jan 20 17:50:36 crc kubenswrapper[4558]: I0120 17:50:36.044553 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-42qnz"] Jan 20 17:50:36 crc kubenswrapper[4558]: I0120 17:50:36.046596 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-42qnz" Jan 20 17:50:36 crc kubenswrapper[4558]: I0120 17:50:36.070195 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-42qnz"] Jan 20 17:50:36 crc kubenswrapper[4558]: I0120 17:50:36.173307 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-8c8mp" Jan 20 17:50:36 crc kubenswrapper[4558]: I0120 17:50:36.206460 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/317faea5-3184-4d0c-a9e5-7f038e779e08-utilities\") pod \"certified-operators-42qnz\" (UID: \"317faea5-3184-4d0c-a9e5-7f038e779e08\") " pod="openshift-marketplace/certified-operators-42qnz" Jan 20 17:50:36 crc kubenswrapper[4558]: I0120 17:50:36.206576 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4rsvj\" (UniqueName: \"kubernetes.io/projected/317faea5-3184-4d0c-a9e5-7f038e779e08-kube-api-access-4rsvj\") pod \"certified-operators-42qnz\" (UID: \"317faea5-3184-4d0c-a9e5-7f038e779e08\") " pod="openshift-marketplace/certified-operators-42qnz" Jan 20 17:50:36 crc kubenswrapper[4558]: I0120 17:50:36.206714 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/317faea5-3184-4d0c-a9e5-7f038e779e08-catalog-content\") pod \"certified-operators-42qnz\" (UID: \"317faea5-3184-4d0c-a9e5-7f038e779e08\") " pod="openshift-marketplace/certified-operators-42qnz" Jan 20 17:50:36 crc kubenswrapper[4558]: I0120 17:50:36.308423 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4rsvj\" (UniqueName: \"kubernetes.io/projected/317faea5-3184-4d0c-a9e5-7f038e779e08-kube-api-access-4rsvj\") pod \"certified-operators-42qnz\" (UID: \"317faea5-3184-4d0c-a9e5-7f038e779e08\") " pod="openshift-marketplace/certified-operators-42qnz" Jan 20 17:50:36 crc kubenswrapper[4558]: I0120 17:50:36.308590 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/317faea5-3184-4d0c-a9e5-7f038e779e08-catalog-content\") pod \"certified-operators-42qnz\" (UID: \"317faea5-3184-4d0c-a9e5-7f038e779e08\") " pod="openshift-marketplace/certified-operators-42qnz" Jan 20 17:50:36 crc kubenswrapper[4558]: I0120 17:50:36.308695 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/603501da-057d-4d3b-a682-e55e7e2916f4-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"603501da-057d-4d3b-a682-e55e7e2916f4\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:50:36 crc kubenswrapper[4558]: I0120 17:50:36.308831 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/317faea5-3184-4d0c-a9e5-7f038e779e08-utilities\") pod \"certified-operators-42qnz\" (UID: \"317faea5-3184-4d0c-a9e5-7f038e779e08\") " pod="openshift-marketplace/certified-operators-42qnz" Jan 20 17:50:36 crc kubenswrapper[4558]: I0120 17:50:36.309309 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/317faea5-3184-4d0c-a9e5-7f038e779e08-utilities\") pod \"certified-operators-42qnz\" (UID: \"317faea5-3184-4d0c-a9e5-7f038e779e08\") " pod="openshift-marketplace/certified-operators-42qnz" Jan 20 17:50:36 crc kubenswrapper[4558]: E0120 17:50:36.309412 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-galera-openstack-svc: secret "cert-galera-openstack-svc" not found Jan 20 17:50:36 crc kubenswrapper[4558]: E0120 17:50:36.309459 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/603501da-057d-4d3b-a682-e55e7e2916f4-galera-tls-certs podName:603501da-057d-4d3b-a682-e55e7e2916f4 nodeName:}" failed. No retries permitted until 2026-01-20 17:50:44.309444428 +0000 UTC m=+4138.069782396 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "galera-tls-certs" (UniqueName: "kubernetes.io/secret/603501da-057d-4d3b-a682-e55e7e2916f4-galera-tls-certs") pod "openstack-galera-0" (UID: "603501da-057d-4d3b-a682-e55e7e2916f4") : secret "cert-galera-openstack-svc" not found Jan 20 17:50:36 crc kubenswrapper[4558]: I0120 17:50:36.309802 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/317faea5-3184-4d0c-a9e5-7f038e779e08-catalog-content\") pod \"certified-operators-42qnz\" (UID: \"317faea5-3184-4d0c-a9e5-7f038e779e08\") " pod="openshift-marketplace/certified-operators-42qnz" Jan 20 17:50:36 crc kubenswrapper[4558]: I0120 17:50:36.313493 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:50:36 crc kubenswrapper[4558]: I0120 17:50:36.313963 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:50:36 crc kubenswrapper[4558]: I0120 17:50:36.328190 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4rsvj\" (UniqueName: \"kubernetes.io/projected/317faea5-3184-4d0c-a9e5-7f038e779e08-kube-api-access-4rsvj\") pod \"certified-operators-42qnz\" (UID: \"317faea5-3184-4d0c-a9e5-7f038e779e08\") " pod="openshift-marketplace/certified-operators-42qnz" Jan 20 17:50:36 crc kubenswrapper[4558]: I0120 17:50:36.373302 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-42qnz" Jan 20 17:50:36 crc kubenswrapper[4558]: I0120 17:50:36.414141 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/fc60cc21-f159-40bf-beca-c62718f57b9c-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"fc60cc21-f159-40bf-beca-c62718f57b9c\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:50:36 crc kubenswrapper[4558]: E0120 17:50:36.414325 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-ovndbcluster-sb-ovndbs: secret "cert-ovndbcluster-sb-ovndbs" not found Jan 20 17:50:36 crc kubenswrapper[4558]: E0120 17:50:36.414438 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/fc60cc21-f159-40bf-beca-c62718f57b9c-ovsdbserver-sb-tls-certs podName:fc60cc21-f159-40bf-beca-c62718f57b9c nodeName:}" failed. No retries permitted until 2026-01-20 17:50:44.41440838 +0000 UTC m=+4138.174746347 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "ovsdbserver-sb-tls-certs" (UniqueName: "kubernetes.io/secret/fc60cc21-f159-40bf-beca-c62718f57b9c-ovsdbserver-sb-tls-certs") pod "ovsdbserver-sb-0" (UID: "fc60cc21-f159-40bf-beca-c62718f57b9c") : secret "cert-ovndbcluster-sb-ovndbs" not found Jan 20 17:50:36 crc kubenswrapper[4558]: I0120 17:50:36.414717 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/5239f3e2-7f54-4042-b0da-02f7608224df-memcached-tls-certs\") pod \"memcached-0\" (UID: \"5239f3e2-7f54-4042-b0da-02f7608224df\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:50:36 crc kubenswrapper[4558]: I0120 17:50:36.414796 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/fc60cc21-f159-40bf-beca-c62718f57b9c-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"fc60cc21-f159-40bf-beca-c62718f57b9c\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:50:36 crc kubenswrapper[4558]: E0120 17:50:36.414909 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-ovn-metrics: secret "cert-ovn-metrics" not found Jan 20 17:50:36 crc kubenswrapper[4558]: E0120 17:50:36.414955 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/fc60cc21-f159-40bf-beca-c62718f57b9c-metrics-certs-tls-certs podName:fc60cc21-f159-40bf-beca-c62718f57b9c nodeName:}" failed. No retries permitted until 2026-01-20 17:50:44.414946451 +0000 UTC m=+4138.175284419 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs-tls-certs" (UniqueName: "kubernetes.io/secret/fc60cc21-f159-40bf-beca-c62718f57b9c-metrics-certs-tls-certs") pod "ovsdbserver-sb-0" (UID: "fc60cc21-f159-40bf-beca-c62718f57b9c") : secret "cert-ovn-metrics" not found Jan 20 17:50:36 crc kubenswrapper[4558]: E0120 17:50:36.415011 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-memcached-svc: secret "cert-memcached-svc" not found Jan 20 17:50:36 crc kubenswrapper[4558]: E0120 17:50:36.415037 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5239f3e2-7f54-4042-b0da-02f7608224df-memcached-tls-certs podName:5239f3e2-7f54-4042-b0da-02f7608224df nodeName:}" failed. No retries permitted until 2026-01-20 17:50:44.415030209 +0000 UTC m=+4138.175368177 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "memcached-tls-certs" (UniqueName: "kubernetes.io/secret/5239f3e2-7f54-4042-b0da-02f7608224df-memcached-tls-certs") pod "memcached-0" (UID: "5239f3e2-7f54-4042-b0da-02f7608224df") : secret "cert-memcached-svc" not found Jan 20 17:50:36 crc kubenswrapper[4558]: I0120 17:50:36.516927 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/581c2419-48d9-4b7d-b71b-ce5cceb0326d-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"581c2419-48d9-4b7d-b71b-ce5cceb0326d\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:50:36 crc kubenswrapper[4558]: E0120 17:50:36.517079 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-nova-novncproxy-cell1-public-svc: secret "cert-nova-novncproxy-cell1-public-svc" not found Jan 20 17:50:36 crc kubenswrapper[4558]: E0120 17:50:36.517177 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/581c2419-48d9-4b7d-b71b-ce5cceb0326d-nova-novncproxy-tls-certs podName:581c2419-48d9-4b7d-b71b-ce5cceb0326d nodeName:}" failed. No retries permitted until 2026-01-20 17:50:44.51714338 +0000 UTC m=+4138.277481347 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nova-novncproxy-tls-certs" (UniqueName: "kubernetes.io/secret/581c2419-48d9-4b7d-b71b-ce5cceb0326d-nova-novncproxy-tls-certs") pod "nova-cell1-novncproxy-0" (UID: "581c2419-48d9-4b7d-b71b-ce5cceb0326d") : secret "cert-nova-novncproxy-cell1-public-svc" not found Jan 20 17:50:36 crc kubenswrapper[4558]: I0120 17:50:36.517217 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/06db8474-04b0-4525-90f0-c066ecbac0ae-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"06db8474-04b0-4525-90f0-c066ecbac0ae\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:50:36 crc kubenswrapper[4558]: I0120 17:50:36.517546 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/581c2419-48d9-4b7d-b71b-ce5cceb0326d-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"581c2419-48d9-4b7d-b71b-ce5cceb0326d\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:50:36 crc kubenswrapper[4558]: E0120 17:50:36.517670 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-ceilometer-internal-svc: secret "cert-ceilometer-internal-svc" not found Jan 20 17:50:36 crc kubenswrapper[4558]: E0120 17:50:36.517739 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/06db8474-04b0-4525-90f0-c066ecbac0ae-ceilometer-tls-certs podName:06db8474-04b0-4525-90f0-c066ecbac0ae nodeName:}" failed. No retries permitted until 2026-01-20 17:50:40.517721648 +0000 UTC m=+4134.278059605 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "ceilometer-tls-certs" (UniqueName: "kubernetes.io/secret/06db8474-04b0-4525-90f0-c066ecbac0ae-ceilometer-tls-certs") pod "ceilometer-0" (UID: "06db8474-04b0-4525-90f0-c066ecbac0ae") : secret "cert-ceilometer-internal-svc" not found Jan 20 17:50:36 crc kubenswrapper[4558]: I0120 17:50:36.522616 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/581c2419-48d9-4b7d-b71b-ce5cceb0326d-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"581c2419-48d9-4b7d-b71b-ce5cceb0326d\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:50:36 crc kubenswrapper[4558]: I0120 17:50:36.585306 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:50:36 crc kubenswrapper[4558]: I0120 17:50:36.623417 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-8c8mp"] Jan 20 17:50:36 crc kubenswrapper[4558]: W0120 17:50:36.644308 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2bf2d1ac_2610_4caf_a3dc_eb6ef3d260a4.slice/crio-a70f5fae70bdaf3b5839e9ac09365830fe9a90304ac75300d7f3d2432e655ffe WatchSource:0}: Error finding container a70f5fae70bdaf3b5839e9ac09365830fe9a90304ac75300d7f3d2432e655ffe: Status 404 returned error can't find the container with id a70f5fae70bdaf3b5839e9ac09365830fe9a90304ac75300d7f3d2432e655ffe Jan 20 17:50:36 crc kubenswrapper[4558]: I0120 17:50:36.665138 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-stgv6" event={"ID":"242c4cdd-2c26-40bd-acaf-782a26f5f6eb","Type":"ContainerStarted","Data":"b4447fbaa951e6b88adafc2a305cce7fed9efe1179b0c8d07311bd910bb0f3d5"} Jan 20 17:50:36 crc kubenswrapper[4558]: I0120 17:50:36.674783 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"07e11378-90b7-4994-99fa-6e2d78b61f63","Type":"ContainerStarted","Data":"9ee9796db29af4c22a51a195683b27835e60fea772e6cf6721ee2e162cc239b4"} Jan 20 17:50:36 crc kubenswrapper[4558]: I0120 17:50:36.674813 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"07e11378-90b7-4994-99fa-6e2d78b61f63","Type":"ContainerStarted","Data":"7eefed10cf0d89cd2ed97e7df9a22ee9e031b097685b34b4e3323811ac699db8"} Jan 20 17:50:36 crc kubenswrapper[4558]: I0120 17:50:36.686304 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8c8mp" event={"ID":"2bf2d1ac-2610-4caf-a3dc-eb6ef3d260a4","Type":"ContainerStarted","Data":"a70f5fae70bdaf3b5839e9ac09365830fe9a90304ac75300d7f3d2432e655ffe"} Jan 20 17:50:36 crc kubenswrapper[4558]: I0120 17:50:36.734860 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/glance-default-external-api-0" podStartSLOduration=6.734835944 podStartE2EDuration="6.734835944s" podCreationTimestamp="2026-01-20 17:50:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:50:36.716569015 +0000 UTC m=+4130.476906983" watchObservedRunningTime="2026-01-20 17:50:36.734835944 +0000 UTC m=+4130.495173911" Jan 20 17:50:36 crc kubenswrapper[4558]: I0120 17:50:36.907101 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-42qnz"] Jan 20 17:50:36 crc kubenswrapper[4558]: W0120 17:50:36.943600 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod317faea5_3184_4d0c_a9e5_7f038e779e08.slice/crio-7bc4eaca3d4a1ba2f865568e57a76a7c9ca19d4330d05bdc0844cab6ad5fe695 WatchSource:0}: Error finding container 7bc4eaca3d4a1ba2f865568e57a76a7c9ca19d4330d05bdc0844cab6ad5fe695: Status 404 returned error can't find the container with id 7bc4eaca3d4a1ba2f865568e57a76a7c9ca19d4330d05bdc0844cab6ad5fe695 Jan 20 17:50:37 crc kubenswrapper[4558]: E0120 17:50:37.443311 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-kube-state-metrics-svc: secret "cert-kube-state-metrics-svc" not found Jan 20 17:50:37 crc kubenswrapper[4558]: E0120 17:50:37.443663 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/49822b01-63d0-40d3-b604-7980891b0683-kube-state-metrics-tls-certs podName:49822b01-63d0-40d3-b604-7980891b0683 nodeName:}" failed. No retries permitted until 2026-01-20 17:50:45.443642047 +0000 UTC m=+4139.203980004 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-state-metrics-tls-certs" (UniqueName: "kubernetes.io/secret/49822b01-63d0-40d3-b604-7980891b0683-kube-state-metrics-tls-certs") pod "kube-state-metrics-0" (UID: "49822b01-63d0-40d3-b604-7980891b0683") : secret "cert-kube-state-metrics-svc" not found Jan 20 17:50:37 crc kubenswrapper[4558]: I0120 17:50:37.462221 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:50:37 crc kubenswrapper[4558]: I0120 17:50:37.703183 4558 generic.go:334] "Generic (PLEG): container finished" podID="317faea5-3184-4d0c-a9e5-7f038e779e08" containerID="c03312061f7266dcd57196ddf4d0814f1c93b4dd0503b2de614de31491810448" exitCode=0 Jan 20 17:50:37 crc kubenswrapper[4558]: I0120 17:50:37.703313 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-42qnz" event={"ID":"317faea5-3184-4d0c-a9e5-7f038e779e08","Type":"ContainerDied","Data":"c03312061f7266dcd57196ddf4d0814f1c93b4dd0503b2de614de31491810448"} Jan 20 17:50:37 crc kubenswrapper[4558]: I0120 17:50:37.703409 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-42qnz" event={"ID":"317faea5-3184-4d0c-a9e5-7f038e779e08","Type":"ContainerStarted","Data":"7bc4eaca3d4a1ba2f865568e57a76a7c9ca19d4330d05bdc0844cab6ad5fe695"} Jan 20 17:50:37 crc kubenswrapper[4558]: I0120 17:50:37.706114 4558 generic.go:334] "Generic (PLEG): container finished" podID="2bf2d1ac-2610-4caf-a3dc-eb6ef3d260a4" containerID="f4a01de2c1d753787c210f38e884d45f84511cbc851253d826b2f3a47fb108ad" exitCode=0 Jan 20 17:50:37 crc kubenswrapper[4558]: I0120 17:50:37.706212 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8c8mp" event={"ID":"2bf2d1ac-2610-4caf-a3dc-eb6ef3d260a4","Type":"ContainerDied","Data":"f4a01de2c1d753787c210f38e884d45f84511cbc851253d826b2f3a47fb108ad"} Jan 20 17:50:37 crc kubenswrapper[4558]: I0120 17:50:37.709821 4558 generic.go:334] "Generic (PLEG): container finished" podID="242c4cdd-2c26-40bd-acaf-782a26f5f6eb" containerID="b4447fbaa951e6b88adafc2a305cce7fed9efe1179b0c8d07311bd910bb0f3d5" exitCode=0 Jan 20 17:50:37 crc kubenswrapper[4558]: I0120 17:50:37.710082 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-stgv6" event={"ID":"242c4cdd-2c26-40bd-acaf-782a26f5f6eb","Type":"ContainerDied","Data":"b4447fbaa951e6b88adafc2a305cce7fed9efe1179b0c8d07311bd910bb0f3d5"} Jan 20 17:50:38 crc kubenswrapper[4558]: I0120 17:50:38.316972 4558 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:50:38 crc kubenswrapper[4558]: I0120 17:50:38.317851 4558 scope.go:117] "RemoveContainer" containerID="51b725953cbe989b3d1801020e679e346a8ac5643918b80a84ec2b9a8ed5efcb" Jan 20 17:50:38 crc kubenswrapper[4558]: E0120 17:50:38.318362 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"nova-cell0-conductor-conductor\" with CrashLoopBackOff: \"back-off 10s restarting failed container=nova-cell0-conductor-conductor pod=nova-cell0-conductor-0_openstack-kuttl-tests(924fa7ce-8d60-4b2f-b62b-d5e146474f71)\"" pod="openstack-kuttl-tests/nova-cell0-conductor-0" podUID="924fa7ce-8d60-4b2f-b62b-d5e146474f71" Jan 20 17:50:38 crc kubenswrapper[4558]: I0120 17:50:38.672806 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/18c09849-702b-4939-9cf1-0e06c6adc889-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"18c09849-702b-4939-9cf1-0e06c6adc889\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:50:38 crc kubenswrapper[4558]: E0120 17:50:38.672943 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-glance-default-internal-svc: secret "cert-glance-default-internal-svc" not found Jan 20 17:50:38 crc kubenswrapper[4558]: E0120 17:50:38.673030 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/18c09849-702b-4939-9cf1-0e06c6adc889-internal-tls-certs podName:18c09849-702b-4939-9cf1-0e06c6adc889 nodeName:}" failed. No retries permitted until 2026-01-20 17:50:46.673010373 +0000 UTC m=+4140.433348339 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "internal-tls-certs" (UniqueName: "kubernetes.io/secret/18c09849-702b-4939-9cf1-0e06c6adc889-internal-tls-certs") pod "glance-default-internal-api-0" (UID: "18c09849-702b-4939-9cf1-0e06c6adc889") : secret "cert-glance-default-internal-svc" not found Jan 20 17:50:38 crc kubenswrapper[4558]: I0120 17:50:38.719983 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-stgv6" event={"ID":"242c4cdd-2c26-40bd-acaf-782a26f5f6eb","Type":"ContainerStarted","Data":"a88a2d6d3e971adede38d8e407afe9af7d898a947f168c5b689a31bd59a6d537"} Jan 20 17:50:38 crc kubenswrapper[4558]: I0120 17:50:38.722316 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-42qnz" event={"ID":"317faea5-3184-4d0c-a9e5-7f038e779e08","Type":"ContainerStarted","Data":"108f827cc83a5bd00deb8be0ea3d91e1817061559a969d618b8baa66e1908eb5"} Jan 20 17:50:38 crc kubenswrapper[4558]: I0120 17:50:38.724375 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8c8mp" event={"ID":"2bf2d1ac-2610-4caf-a3dc-eb6ef3d260a4","Type":"ContainerStarted","Data":"1b8de3585535a4e7fb57dcc46de5f5105fc40133c5f4639db15d5125b4f810e1"} Jan 20 17:50:38 crc kubenswrapper[4558]: I0120 17:50:38.738714 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-stgv6" podStartSLOduration=3.051974999 podStartE2EDuration="5.738697449s" podCreationTimestamp="2026-01-20 17:50:33 +0000 UTC" firstStartedPulling="2026-01-20 17:50:35.647627422 +0000 UTC m=+4129.407965390" lastFinishedPulling="2026-01-20 17:50:38.334349873 +0000 UTC m=+4132.094687840" observedRunningTime="2026-01-20 17:50:38.734224498 +0000 UTC m=+4132.494562465" watchObservedRunningTime="2026-01-20 17:50:38.738697449 +0000 UTC m=+4132.499035416" Jan 20 17:50:38 crc kubenswrapper[4558]: I0120 17:50:38.877751 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3ec0f915-5676-4d2a-b743-1f8ee11c01ac-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"3ec0f915-5676-4d2a-b743-1f8ee11c01ac\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:50:38 crc kubenswrapper[4558]: E0120 17:50:38.877934 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-cinder-internal-svc: secret "cert-cinder-internal-svc" not found Jan 20 17:50:38 crc kubenswrapper[4558]: I0120 17:50:38.878112 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3ec0f915-5676-4d2a-b743-1f8ee11c01ac-public-tls-certs\") pod \"cinder-api-0\" (UID: \"3ec0f915-5676-4d2a-b743-1f8ee11c01ac\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:50:38 crc kubenswrapper[4558]: E0120 17:50:38.878140 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3ec0f915-5676-4d2a-b743-1f8ee11c01ac-internal-tls-certs podName:3ec0f915-5676-4d2a-b743-1f8ee11c01ac nodeName:}" failed. No retries permitted until 2026-01-20 17:50:46.878114836 +0000 UTC m=+4140.638452803 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "internal-tls-certs" (UniqueName: "kubernetes.io/secret/3ec0f915-5676-4d2a-b743-1f8ee11c01ac-internal-tls-certs") pod "cinder-api-0" (UID: "3ec0f915-5676-4d2a-b743-1f8ee11c01ac") : secret "cert-cinder-internal-svc" not found Jan 20 17:50:38 crc kubenswrapper[4558]: E0120 17:50:38.878330 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-cinder-public-svc: secret "cert-cinder-public-svc" not found Jan 20 17:50:38 crc kubenswrapper[4558]: E0120 17:50:38.878435 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3ec0f915-5676-4d2a-b743-1f8ee11c01ac-public-tls-certs podName:3ec0f915-5676-4d2a-b743-1f8ee11c01ac nodeName:}" failed. No retries permitted until 2026-01-20 17:50:46.878407687 +0000 UTC m=+4140.638745654 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "public-tls-certs" (UniqueName: "kubernetes.io/secret/3ec0f915-5676-4d2a-b743-1f8ee11c01ac-public-tls-certs") pod "cinder-api-0" (UID: "3ec0f915-5676-4d2a-b743-1f8ee11c01ac") : secret "cert-cinder-public-svc" not found Jan 20 17:50:39 crc kubenswrapper[4558]: I0120 17:50:39.081655 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/80a7bd73-c63f-4a27-9d3d-3fef760c025f-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"80a7bd73-c63f-4a27-9d3d-3fef760c025f\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:50:39 crc kubenswrapper[4558]: I0120 17:50:39.081719 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/80a7bd73-c63f-4a27-9d3d-3fef760c025f-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"80a7bd73-c63f-4a27-9d3d-3fef760c025f\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:50:39 crc kubenswrapper[4558]: E0120 17:50:39.081914 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-ovn-metrics: secret "cert-ovn-metrics" not found Jan 20 17:50:39 crc kubenswrapper[4558]: E0120 17:50:39.081959 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/80a7bd73-c63f-4a27-9d3d-3fef760c025f-metrics-certs-tls-certs podName:80a7bd73-c63f-4a27-9d3d-3fef760c025f nodeName:}" failed. No retries permitted until 2026-01-20 17:50:47.081945214 +0000 UTC m=+4140.842283182 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs-tls-certs" (UniqueName: "kubernetes.io/secret/80a7bd73-c63f-4a27-9d3d-3fef760c025f-metrics-certs-tls-certs") pod "ovn-northd-0" (UID: "80a7bd73-c63f-4a27-9d3d-3fef760c025f") : secret "cert-ovn-metrics" not found Jan 20 17:50:39 crc kubenswrapper[4558]: I0120 17:50:39.087613 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/80a7bd73-c63f-4a27-9d3d-3fef760c025f-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"80a7bd73-c63f-4a27-9d3d-3fef760c025f\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:50:39 crc kubenswrapper[4558]: I0120 17:50:39.491894 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/733a1ae1-a917-447a-afa6-67cf7d688f86-public-tls-certs\") pod \"nova-api-0\" (UID: \"733a1ae1-a917-447a-afa6-67cf7d688f86\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:50:39 crc kubenswrapper[4558]: I0120 17:50:39.491958 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/733a1ae1-a917-447a-afa6-67cf7d688f86-internal-tls-certs\") pod \"nova-api-0\" (UID: \"733a1ae1-a917-447a-afa6-67cf7d688f86\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:50:39 crc kubenswrapper[4558]: E0120 17:50:39.492276 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-nova-internal-svc: secret "cert-nova-internal-svc" not found Jan 20 17:50:39 crc kubenswrapper[4558]: E0120 17:50:39.492391 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/733a1ae1-a917-447a-afa6-67cf7d688f86-internal-tls-certs podName:733a1ae1-a917-447a-afa6-67cf7d688f86 nodeName:}" failed. No retries permitted until 2026-01-20 17:50:47.492368525 +0000 UTC m=+4141.252706492 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "internal-tls-certs" (UniqueName: "kubernetes.io/secret/733a1ae1-a917-447a-afa6-67cf7d688f86-internal-tls-certs") pod "nova-api-0" (UID: "733a1ae1-a917-447a-afa6-67cf7d688f86") : secret "cert-nova-internal-svc" not found Jan 20 17:50:39 crc kubenswrapper[4558]: I0120 17:50:39.496113 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/733a1ae1-a917-447a-afa6-67cf7d688f86-public-tls-certs\") pod \"nova-api-0\" (UID: \"733a1ae1-a917-447a-afa6-67cf7d688f86\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:50:39 crc kubenswrapper[4558]: E0120 17:50:39.696180 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-neutron-public-svc: secret "cert-neutron-public-svc" not found Jan 20 17:50:39 crc kubenswrapper[4558]: E0120 17:50:39.696279 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6a566db2-a941-48c9-9169-8e2c16cda1ac-public-tls-certs podName:6a566db2-a941-48c9-9169-8e2c16cda1ac nodeName:}" failed. No retries permitted until 2026-01-20 17:50:40.196258033 +0000 UTC m=+4133.956596001 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "public-tls-certs" (UniqueName: "kubernetes.io/secret/6a566db2-a941-48c9-9169-8e2c16cda1ac-public-tls-certs") pod "neutron-78bcff9576-76n9h" (UID: "6a566db2-a941-48c9-9169-8e2c16cda1ac") : secret "cert-neutron-public-svc" not found Jan 20 17:50:39 crc kubenswrapper[4558]: E0120 17:50:39.696379 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-neutron-ovndbs: secret "cert-neutron-ovndbs" not found Jan 20 17:50:39 crc kubenswrapper[4558]: E0120 17:50:39.696471 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6a566db2-a941-48c9-9169-8e2c16cda1ac-ovndb-tls-certs podName:6a566db2-a941-48c9-9169-8e2c16cda1ac nodeName:}" failed. No retries permitted until 2026-01-20 17:50:40.196449264 +0000 UTC m=+4133.956787230 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "ovndb-tls-certs" (UniqueName: "kubernetes.io/secret/6a566db2-a941-48c9-9169-8e2c16cda1ac-ovndb-tls-certs") pod "neutron-78bcff9576-76n9h" (UID: "6a566db2-a941-48c9-9169-8e2c16cda1ac") : secret "cert-neutron-ovndbs" not found Jan 20 17:50:39 crc kubenswrapper[4558]: E0120 17:50:39.696603 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-neutron-internal-svc: secret "cert-neutron-internal-svc" not found Jan 20 17:50:39 crc kubenswrapper[4558]: E0120 17:50:39.696653 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6a566db2-a941-48c9-9169-8e2c16cda1ac-internal-tls-certs podName:6a566db2-a941-48c9-9169-8e2c16cda1ac nodeName:}" failed. No retries permitted until 2026-01-20 17:50:40.196641164 +0000 UTC m=+4133.956979132 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "internal-tls-certs" (UniqueName: "kubernetes.io/secret/6a566db2-a941-48c9-9169-8e2c16cda1ac-internal-tls-certs") pod "neutron-78bcff9576-76n9h" (UID: "6a566db2-a941-48c9-9169-8e2c16cda1ac") : secret "cert-neutron-internal-svc" not found Jan 20 17:50:39 crc kubenswrapper[4558]: I0120 17:50:39.734778 4558 generic.go:334] "Generic (PLEG): container finished" podID="317faea5-3184-4d0c-a9e5-7f038e779e08" containerID="108f827cc83a5bd00deb8be0ea3d91e1817061559a969d618b8baa66e1908eb5" exitCode=0 Jan 20 17:50:39 crc kubenswrapper[4558]: I0120 17:50:39.734853 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-42qnz" event={"ID":"317faea5-3184-4d0c-a9e5-7f038e779e08","Type":"ContainerDied","Data":"108f827cc83a5bd00deb8be0ea3d91e1817061559a969d618b8baa66e1908eb5"} Jan 20 17:50:40 crc kubenswrapper[4558]: E0120 17:50:40.208699 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-neutron-internal-svc: secret "cert-neutron-internal-svc" not found Jan 20 17:50:40 crc kubenswrapper[4558]: E0120 17:50:40.209221 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6a566db2-a941-48c9-9169-8e2c16cda1ac-internal-tls-certs podName:6a566db2-a941-48c9-9169-8e2c16cda1ac nodeName:}" failed. No retries permitted until 2026-01-20 17:50:41.20917993 +0000 UTC m=+4134.969517897 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "internal-tls-certs" (UniqueName: "kubernetes.io/secret/6a566db2-a941-48c9-9169-8e2c16cda1ac-internal-tls-certs") pod "neutron-78bcff9576-76n9h" (UID: "6a566db2-a941-48c9-9169-8e2c16cda1ac") : secret "cert-neutron-internal-svc" not found Jan 20 17:50:40 crc kubenswrapper[4558]: E0120 17:50:40.209293 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-neutron-public-svc: secret "cert-neutron-public-svc" not found Jan 20 17:50:40 crc kubenswrapper[4558]: E0120 17:50:40.209346 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6a566db2-a941-48c9-9169-8e2c16cda1ac-public-tls-certs podName:6a566db2-a941-48c9-9169-8e2c16cda1ac nodeName:}" failed. No retries permitted until 2026-01-20 17:50:41.209328329 +0000 UTC m=+4134.969666296 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "public-tls-certs" (UniqueName: "kubernetes.io/secret/6a566db2-a941-48c9-9169-8e2c16cda1ac-public-tls-certs") pod "neutron-78bcff9576-76n9h" (UID: "6a566db2-a941-48c9-9169-8e2c16cda1ac") : secret "cert-neutron-public-svc" not found Jan 20 17:50:40 crc kubenswrapper[4558]: E0120 17:50:40.209542 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-neutron-ovndbs: secret "cert-neutron-ovndbs" not found Jan 20 17:50:40 crc kubenswrapper[4558]: E0120 17:50:40.209587 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6a566db2-a941-48c9-9169-8e2c16cda1ac-ovndb-tls-certs podName:6a566db2-a941-48c9-9169-8e2c16cda1ac nodeName:}" failed. No retries permitted until 2026-01-20 17:50:41.209571386 +0000 UTC m=+4134.969909353 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "ovndb-tls-certs" (UniqueName: "kubernetes.io/secret/6a566db2-a941-48c9-9169-8e2c16cda1ac-ovndb-tls-certs") pod "neutron-78bcff9576-76n9h" (UID: "6a566db2-a941-48c9-9169-8e2c16cda1ac") : secret "cert-neutron-ovndbs" not found Jan 20 17:50:40 crc kubenswrapper[4558]: I0120 17:50:40.618777 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/06db8474-04b0-4525-90f0-c066ecbac0ae-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"06db8474-04b0-4525-90f0-c066ecbac0ae\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:50:40 crc kubenswrapper[4558]: E0120 17:50:40.618985 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-ceilometer-internal-svc: secret "cert-ceilometer-internal-svc" not found Jan 20 17:50:40 crc kubenswrapper[4558]: E0120 17:50:40.619296 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/06db8474-04b0-4525-90f0-c066ecbac0ae-ceilometer-tls-certs podName:06db8474-04b0-4525-90f0-c066ecbac0ae nodeName:}" failed. No retries permitted until 2026-01-20 17:50:48.619276506 +0000 UTC m=+4142.379614473 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "ceilometer-tls-certs" (UniqueName: "kubernetes.io/secret/06db8474-04b0-4525-90f0-c066ecbac0ae-ceilometer-tls-certs") pod "ceilometer-0" (UID: "06db8474-04b0-4525-90f0-c066ecbac0ae") : secret "cert-ceilometer-internal-svc" not found Jan 20 17:50:40 crc kubenswrapper[4558]: I0120 17:50:40.748467 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-42qnz" event={"ID":"317faea5-3184-4d0c-a9e5-7f038e779e08","Type":"ContainerStarted","Data":"2baf4f2136d18e4952d5247b3830cdd80252007fcbbfd6cab54cf44c3bb61971"} Jan 20 17:50:40 crc kubenswrapper[4558]: I0120 17:50:40.750277 4558 generic.go:334] "Generic (PLEG): container finished" podID="2bf2d1ac-2610-4caf-a3dc-eb6ef3d260a4" containerID="1b8de3585535a4e7fb57dcc46de5f5105fc40133c5f4639db15d5125b4f810e1" exitCode=0 Jan 20 17:50:40 crc kubenswrapper[4558]: I0120 17:50:40.750334 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8c8mp" event={"ID":"2bf2d1ac-2610-4caf-a3dc-eb6ef3d260a4","Type":"ContainerDied","Data":"1b8de3585535a4e7fb57dcc46de5f5105fc40133c5f4639db15d5125b4f810e1"} Jan 20 17:50:40 crc kubenswrapper[4558]: I0120 17:50:40.774131 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-42qnz" podStartSLOduration=2.153686037 podStartE2EDuration="4.774107817s" podCreationTimestamp="2026-01-20 17:50:36 +0000 UTC" firstStartedPulling="2026-01-20 17:50:37.707269265 +0000 UTC m=+4131.467607222" lastFinishedPulling="2026-01-20 17:50:40.327691035 +0000 UTC m=+4134.088029002" observedRunningTime="2026-01-20 17:50:40.764960576 +0000 UTC m=+4134.525298534" watchObservedRunningTime="2026-01-20 17:50:40.774107817 +0000 UTC m=+4134.534445785" Jan 20 17:50:41 crc kubenswrapper[4558]: E0120 17:50:41.232831 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-neutron-internal-svc: secret "cert-neutron-internal-svc" not found Jan 20 17:50:41 crc kubenswrapper[4558]: E0120 17:50:41.232948 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6a566db2-a941-48c9-9169-8e2c16cda1ac-internal-tls-certs podName:6a566db2-a941-48c9-9169-8e2c16cda1ac nodeName:}" failed. No retries permitted until 2026-01-20 17:50:43.232924986 +0000 UTC m=+4136.993262953 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "internal-tls-certs" (UniqueName: "kubernetes.io/secret/6a566db2-a941-48c9-9169-8e2c16cda1ac-internal-tls-certs") pod "neutron-78bcff9576-76n9h" (UID: "6a566db2-a941-48c9-9169-8e2c16cda1ac") : secret "cert-neutron-internal-svc" not found Jan 20 17:50:41 crc kubenswrapper[4558]: E0120 17:50:41.233010 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-neutron-ovndbs: secret "cert-neutron-ovndbs" not found Jan 20 17:50:41 crc kubenswrapper[4558]: E0120 17:50:41.233058 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6a566db2-a941-48c9-9169-8e2c16cda1ac-ovndb-tls-certs podName:6a566db2-a941-48c9-9169-8e2c16cda1ac nodeName:}" failed. No retries permitted until 2026-01-20 17:50:43.233047857 +0000 UTC m=+4136.993385825 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "ovndb-tls-certs" (UniqueName: "kubernetes.io/secret/6a566db2-a941-48c9-9169-8e2c16cda1ac-ovndb-tls-certs") pod "neutron-78bcff9576-76n9h" (UID: "6a566db2-a941-48c9-9169-8e2c16cda1ac") : secret "cert-neutron-ovndbs" not found Jan 20 17:50:41 crc kubenswrapper[4558]: E0120 17:50:41.233371 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-neutron-public-svc: secret "cert-neutron-public-svc" not found Jan 20 17:50:41 crc kubenswrapper[4558]: E0120 17:50:41.233511 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6a566db2-a941-48c9-9169-8e2c16cda1ac-public-tls-certs podName:6a566db2-a941-48c9-9169-8e2c16cda1ac nodeName:}" failed. No retries permitted until 2026-01-20 17:50:43.23349101 +0000 UTC m=+4136.993828978 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "public-tls-certs" (UniqueName: "kubernetes.io/secret/6a566db2-a941-48c9-9169-8e2c16cda1ac-public-tls-certs") pod "neutron-78bcff9576-76n9h" (UID: "6a566db2-a941-48c9-9169-8e2c16cda1ac") : secret "cert-neutron-public-svc" not found Jan 20 17:50:41 crc kubenswrapper[4558]: I0120 17:50:41.313231 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:50:41 crc kubenswrapper[4558]: I0120 17:50:41.313291 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:50:41 crc kubenswrapper[4558]: I0120 17:50:41.766185 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8c8mp" event={"ID":"2bf2d1ac-2610-4caf-a3dc-eb6ef3d260a4","Type":"ContainerStarted","Data":"19be01bcba053b0236e2ef0ae72c04da3fbabf3eacd020419e6616c5fca9ace1"} Jan 20 17:50:41 crc kubenswrapper[4558]: I0120 17:50:41.789419 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-8c8mp" podStartSLOduration=3.231473597 podStartE2EDuration="6.789398957s" podCreationTimestamp="2026-01-20 17:50:35 +0000 UTC" firstStartedPulling="2026-01-20 17:50:37.70796331 +0000 UTC m=+4131.468301278" lastFinishedPulling="2026-01-20 17:50:41.265888671 +0000 UTC m=+4135.026226638" observedRunningTime="2026-01-20 17:50:41.78402927 +0000 UTC m=+4135.544367237" watchObservedRunningTime="2026-01-20 17:50:41.789398957 +0000 UTC m=+4135.549736924" Jan 20 17:50:42 crc kubenswrapper[4558]: I0120 17:50:42.327321 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-metadata-0" podUID="acb2f7f3-f6d0-4b4f-936f-baee3dfa3938" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.1.192:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 20 17:50:42 crc kubenswrapper[4558]: I0120 17:50:42.327344 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-metadata-0" podUID="acb2f7f3-f6d0-4b4f-936f-baee3dfa3938" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.1.192:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 20 17:50:42 crc kubenswrapper[4558]: I0120 17:50:42.365125 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/38ce22a3-0b53-417f-ab78-77a3be2da04f-internal-tls-certs\") pod \"placement-9cbb9c58b-jcwzc\" (UID: \"38ce22a3-0b53-417f-ab78-77a3be2da04f\") " pod="openstack-kuttl-tests/placement-9cbb9c58b-jcwzc" Jan 20 17:50:42 crc kubenswrapper[4558]: I0120 17:50:42.365370 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0a3695d2-9733-430d-a489-694e9e97b586-public-tls-certs\") pod \"keystone-9795586d8-4dv95\" (UID: \"0a3695d2-9733-430d-a489-694e9e97b586\") " pod="openstack-kuttl-tests/keystone-9795586d8-4dv95" Jan 20 17:50:42 crc kubenswrapper[4558]: I0120 17:50:42.365405 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d127fb40-6eff-4ccd-922c-b209ac4edbda-public-tls-certs\") pod \"barbican-api-5968cb86f6-ctfg5\" (UID: \"d127fb40-6eff-4ccd-922c-b209ac4edbda\") " pod="openstack-kuttl-tests/barbican-api-5968cb86f6-ctfg5" Jan 20 17:50:42 crc kubenswrapper[4558]: I0120 17:50:42.365465 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0a3695d2-9733-430d-a489-694e9e97b586-internal-tls-certs\") pod \"keystone-9795586d8-4dv95\" (UID: \"0a3695d2-9733-430d-a489-694e9e97b586\") " pod="openstack-kuttl-tests/keystone-9795586d8-4dv95" Jan 20 17:50:42 crc kubenswrapper[4558]: I0120 17:50:42.365508 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/38ce22a3-0b53-417f-ab78-77a3be2da04f-public-tls-certs\") pod \"placement-9cbb9c58b-jcwzc\" (UID: \"38ce22a3-0b53-417f-ab78-77a3be2da04f\") " pod="openstack-kuttl-tests/placement-9cbb9c58b-jcwzc" Jan 20 17:50:42 crc kubenswrapper[4558]: I0120 17:50:42.365562 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7458929c-bc4e-4f17-b199-4963b298f4e8-public-tls-certs\") pod \"neutron-7b9876b66d-znq49\" (UID: \"7458929c-bc4e-4f17-b199-4963b298f4e8\") " pod="openstack-kuttl-tests/neutron-7b9876b66d-znq49" Jan 20 17:50:42 crc kubenswrapper[4558]: I0120 17:50:42.365586 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/7458929c-bc4e-4f17-b199-4963b298f4e8-ovndb-tls-certs\") pod \"neutron-7b9876b66d-znq49\" (UID: \"7458929c-bc4e-4f17-b199-4963b298f4e8\") " pod="openstack-kuttl-tests/neutron-7b9876b66d-znq49" Jan 20 17:50:42 crc kubenswrapper[4558]: I0120 17:50:42.365671 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7458929c-bc4e-4f17-b199-4963b298f4e8-internal-tls-certs\") pod \"neutron-7b9876b66d-znq49\" (UID: \"7458929c-bc4e-4f17-b199-4963b298f4e8\") " pod="openstack-kuttl-tests/neutron-7b9876b66d-znq49" Jan 20 17:50:42 crc kubenswrapper[4558]: I0120 17:50:42.365749 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d127fb40-6eff-4ccd-922c-b209ac4edbda-internal-tls-certs\") pod \"barbican-api-5968cb86f6-ctfg5\" (UID: \"d127fb40-6eff-4ccd-922c-b209ac4edbda\") " pod="openstack-kuttl-tests/barbican-api-5968cb86f6-ctfg5" Jan 20 17:50:42 crc kubenswrapper[4558]: E0120 17:50:42.365672 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-barbican-public-svc: secret "cert-barbican-public-svc" not found Jan 20 17:50:42 crc kubenswrapper[4558]: E0120 17:50:42.365951 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d127fb40-6eff-4ccd-922c-b209ac4edbda-public-tls-certs podName:d127fb40-6eff-4ccd-922c-b209ac4edbda nodeName:}" failed. No retries permitted until 2026-01-20 17:50:58.365932196 +0000 UTC m=+4152.126270164 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "public-tls-certs" (UniqueName: "kubernetes.io/secret/d127fb40-6eff-4ccd-922c-b209ac4edbda-public-tls-certs") pod "barbican-api-5968cb86f6-ctfg5" (UID: "d127fb40-6eff-4ccd-922c-b209ac4edbda") : secret "cert-barbican-public-svc" not found Jan 20 17:50:42 crc kubenswrapper[4558]: E0120 17:50:42.365741 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-placement-public-svc: secret "cert-placement-public-svc" not found Jan 20 17:50:42 crc kubenswrapper[4558]: E0120 17:50:42.366008 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/38ce22a3-0b53-417f-ab78-77a3be2da04f-public-tls-certs podName:38ce22a3-0b53-417f-ab78-77a3be2da04f nodeName:}" failed. No retries permitted until 2026-01-20 17:50:58.36599792 +0000 UTC m=+4152.126335887 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "public-tls-certs" (UniqueName: "kubernetes.io/secret/38ce22a3-0b53-417f-ab78-77a3be2da04f-public-tls-certs") pod "placement-9cbb9c58b-jcwzc" (UID: "38ce22a3-0b53-417f-ab78-77a3be2da04f") : secret "cert-placement-public-svc" not found Jan 20 17:50:42 crc kubenswrapper[4558]: E0120 17:50:42.365951 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-barbican-internal-svc: secret "cert-barbican-internal-svc" not found Jan 20 17:50:42 crc kubenswrapper[4558]: E0120 17:50:42.366048 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d127fb40-6eff-4ccd-922c-b209ac4edbda-internal-tls-certs podName:d127fb40-6eff-4ccd-922c-b209ac4edbda nodeName:}" failed. No retries permitted until 2026-01-20 17:50:58.366041402 +0000 UTC m=+4152.126379370 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "internal-tls-certs" (UniqueName: "kubernetes.io/secret/d127fb40-6eff-4ccd-922c-b209ac4edbda-internal-tls-certs") pod "barbican-api-5968cb86f6-ctfg5" (UID: "d127fb40-6eff-4ccd-922c-b209ac4edbda") : secret "cert-barbican-internal-svc" not found Jan 20 17:50:42 crc kubenswrapper[4558]: E0120 17:50:42.365792 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-neutron-ovndbs: secret "cert-neutron-ovndbs" not found Jan 20 17:50:42 crc kubenswrapper[4558]: E0120 17:50:42.366082 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7458929c-bc4e-4f17-b199-4963b298f4e8-ovndb-tls-certs podName:7458929c-bc4e-4f17-b199-4963b298f4e8 nodeName:}" failed. No retries permitted until 2026-01-20 17:50:58.366072882 +0000 UTC m=+4152.126410848 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "ovndb-tls-certs" (UniqueName: "kubernetes.io/secret/7458929c-bc4e-4f17-b199-4963b298f4e8-ovndb-tls-certs") pod "neutron-7b9876b66d-znq49" (UID: "7458929c-bc4e-4f17-b199-4963b298f4e8") : secret "cert-neutron-ovndbs" not found Jan 20 17:50:42 crc kubenswrapper[4558]: E0120 17:50:42.365829 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-neutron-public-svc: secret "cert-neutron-public-svc" not found Jan 20 17:50:42 crc kubenswrapper[4558]: E0120 17:50:42.366112 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7458929c-bc4e-4f17-b199-4963b298f4e8-public-tls-certs podName:7458929c-bc4e-4f17-b199-4963b298f4e8 nodeName:}" failed. No retries permitted until 2026-01-20 17:50:58.366103208 +0000 UTC m=+4152.126441175 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "public-tls-certs" (UniqueName: "kubernetes.io/secret/7458929c-bc4e-4f17-b199-4963b298f4e8-public-tls-certs") pod "neutron-7b9876b66d-znq49" (UID: "7458929c-bc4e-4f17-b199-4963b298f4e8") : secret "cert-neutron-public-svc" not found Jan 20 17:50:42 crc kubenswrapper[4558]: E0120 17:50:42.365870 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-neutron-internal-svc: secret "cert-neutron-internal-svc" not found Jan 20 17:50:42 crc kubenswrapper[4558]: E0120 17:50:42.366137 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7458929c-bc4e-4f17-b199-4963b298f4e8-internal-tls-certs podName:7458929c-bc4e-4f17-b199-4963b298f4e8 nodeName:}" failed. No retries permitted until 2026-01-20 17:50:58.366132884 +0000 UTC m=+4152.126470851 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "internal-tls-certs" (UniqueName: "kubernetes.io/secret/7458929c-bc4e-4f17-b199-4963b298f4e8-internal-tls-certs") pod "neutron-7b9876b66d-znq49" (UID: "7458929c-bc4e-4f17-b199-4963b298f4e8") : secret "cert-neutron-internal-svc" not found Jan 20 17:50:42 crc kubenswrapper[4558]: E0120 17:50:42.366291 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-keystone-internal-svc: secret "cert-keystone-internal-svc" not found Jan 20 17:50:42 crc kubenswrapper[4558]: E0120 17:50:42.367218 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0a3695d2-9733-430d-a489-694e9e97b586-internal-tls-certs podName:0a3695d2-9733-430d-a489-694e9e97b586 nodeName:}" failed. No retries permitted until 2026-01-20 17:50:58.366373767 +0000 UTC m=+4152.126711733 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "internal-tls-certs" (UniqueName: "kubernetes.io/secret/0a3695d2-9733-430d-a489-694e9e97b586-internal-tls-certs") pod "keystone-9795586d8-4dv95" (UID: "0a3695d2-9733-430d-a489-694e9e97b586") : secret "cert-keystone-internal-svc" not found Jan 20 17:50:42 crc kubenswrapper[4558]: I0120 17:50:42.373523 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/38ce22a3-0b53-417f-ab78-77a3be2da04f-internal-tls-certs\") pod \"placement-9cbb9c58b-jcwzc\" (UID: \"38ce22a3-0b53-417f-ab78-77a3be2da04f\") " pod="openstack-kuttl-tests/placement-9cbb9c58b-jcwzc" Jan 20 17:50:42 crc kubenswrapper[4558]: I0120 17:50:42.375433 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0a3695d2-9733-430d-a489-694e9e97b586-public-tls-certs\") pod \"keystone-9795586d8-4dv95\" (UID: \"0a3695d2-9733-430d-a489-694e9e97b586\") " pod="openstack-kuttl-tests/keystone-9795586d8-4dv95" Jan 20 17:50:42 crc kubenswrapper[4558]: I0120 17:50:42.462574 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:50:42 crc kubenswrapper[4558]: I0120 17:50:42.496947 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:50:42 crc kubenswrapper[4558]: I0120 17:50:42.804735 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:50:43 crc kubenswrapper[4558]: I0120 17:50:43.051999 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/placement-9cbb9c58b-jcwzc"] Jan 20 17:50:43 crc kubenswrapper[4558]: E0120 17:50:43.053004 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[public-tls-certs], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack-kuttl-tests/placement-9cbb9c58b-jcwzc" podUID="38ce22a3-0b53-417f-ab78-77a3be2da04f" Jan 20 17:50:43 crc kubenswrapper[4558]: I0120 17:50:43.077555 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/placement-764f5f95dd-qwm2f"] Jan 20 17:50:43 crc kubenswrapper[4558]: I0120 17:50:43.079060 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-764f5f95dd-qwm2f" Jan 20 17:50:43 crc kubenswrapper[4558]: I0120 17:50:43.090817 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-764f5f95dd-qwm2f"] Jan 20 17:50:43 crc kubenswrapper[4558]: I0120 17:50:43.188647 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tggtq\" (UniqueName: \"kubernetes.io/projected/f955e7e5-de96-4598-96b9-76573c42d8e5-kube-api-access-tggtq\") pod \"placement-764f5f95dd-qwm2f\" (UID: \"f955e7e5-de96-4598-96b9-76573c42d8e5\") " pod="openstack-kuttl-tests/placement-764f5f95dd-qwm2f" Jan 20 17:50:43 crc kubenswrapper[4558]: I0120 17:50:43.188783 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f955e7e5-de96-4598-96b9-76573c42d8e5-public-tls-certs\") pod \"placement-764f5f95dd-qwm2f\" (UID: \"f955e7e5-de96-4598-96b9-76573c42d8e5\") " pod="openstack-kuttl-tests/placement-764f5f95dd-qwm2f" Jan 20 17:50:43 crc kubenswrapper[4558]: I0120 17:50:43.188878 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f955e7e5-de96-4598-96b9-76573c42d8e5-scripts\") pod \"placement-764f5f95dd-qwm2f\" (UID: \"f955e7e5-de96-4598-96b9-76573c42d8e5\") " pod="openstack-kuttl-tests/placement-764f5f95dd-qwm2f" Jan 20 17:50:43 crc kubenswrapper[4558]: I0120 17:50:43.188967 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f955e7e5-de96-4598-96b9-76573c42d8e5-config-data\") pod \"placement-764f5f95dd-qwm2f\" (UID: \"f955e7e5-de96-4598-96b9-76573c42d8e5\") " pod="openstack-kuttl-tests/placement-764f5f95dd-qwm2f" Jan 20 17:50:43 crc kubenswrapper[4558]: I0120 17:50:43.189039 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f955e7e5-de96-4598-96b9-76573c42d8e5-combined-ca-bundle\") pod \"placement-764f5f95dd-qwm2f\" (UID: \"f955e7e5-de96-4598-96b9-76573c42d8e5\") " pod="openstack-kuttl-tests/placement-764f5f95dd-qwm2f" Jan 20 17:50:43 crc kubenswrapper[4558]: I0120 17:50:43.189087 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f955e7e5-de96-4598-96b9-76573c42d8e5-internal-tls-certs\") pod \"placement-764f5f95dd-qwm2f\" (UID: \"f955e7e5-de96-4598-96b9-76573c42d8e5\") " pod="openstack-kuttl-tests/placement-764f5f95dd-qwm2f" Jan 20 17:50:43 crc kubenswrapper[4558]: I0120 17:50:43.189199 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f955e7e5-de96-4598-96b9-76573c42d8e5-logs\") pod \"placement-764f5f95dd-qwm2f\" (UID: \"f955e7e5-de96-4598-96b9-76573c42d8e5\") " pod="openstack-kuttl-tests/placement-764f5f95dd-qwm2f" Jan 20 17:50:43 crc kubenswrapper[4558]: I0120 17:50:43.290700 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f955e7e5-de96-4598-96b9-76573c42d8e5-public-tls-certs\") pod \"placement-764f5f95dd-qwm2f\" (UID: \"f955e7e5-de96-4598-96b9-76573c42d8e5\") " pod="openstack-kuttl-tests/placement-764f5f95dd-qwm2f" Jan 20 17:50:43 crc kubenswrapper[4558]: I0120 17:50:43.290805 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f955e7e5-de96-4598-96b9-76573c42d8e5-scripts\") pod \"placement-764f5f95dd-qwm2f\" (UID: \"f955e7e5-de96-4598-96b9-76573c42d8e5\") " pod="openstack-kuttl-tests/placement-764f5f95dd-qwm2f" Jan 20 17:50:43 crc kubenswrapper[4558]: I0120 17:50:43.290914 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f955e7e5-de96-4598-96b9-76573c42d8e5-config-data\") pod \"placement-764f5f95dd-qwm2f\" (UID: \"f955e7e5-de96-4598-96b9-76573c42d8e5\") " pod="openstack-kuttl-tests/placement-764f5f95dd-qwm2f" Jan 20 17:50:43 crc kubenswrapper[4558]: I0120 17:50:43.291014 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f955e7e5-de96-4598-96b9-76573c42d8e5-combined-ca-bundle\") pod \"placement-764f5f95dd-qwm2f\" (UID: \"f955e7e5-de96-4598-96b9-76573c42d8e5\") " pod="openstack-kuttl-tests/placement-764f5f95dd-qwm2f" Jan 20 17:50:43 crc kubenswrapper[4558]: E0120 17:50:43.291025 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-neutron-internal-svc: secret "cert-neutron-internal-svc" not found Jan 20 17:50:43 crc kubenswrapper[4558]: I0120 17:50:43.291062 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f955e7e5-de96-4598-96b9-76573c42d8e5-internal-tls-certs\") pod \"placement-764f5f95dd-qwm2f\" (UID: \"f955e7e5-de96-4598-96b9-76573c42d8e5\") " pod="openstack-kuttl-tests/placement-764f5f95dd-qwm2f" Jan 20 17:50:43 crc kubenswrapper[4558]: E0120 17:50:43.291131 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6a566db2-a941-48c9-9169-8e2c16cda1ac-internal-tls-certs podName:6a566db2-a941-48c9-9169-8e2c16cda1ac nodeName:}" failed. No retries permitted until 2026-01-20 17:50:47.29111027 +0000 UTC m=+4141.051448237 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "internal-tls-certs" (UniqueName: "kubernetes.io/secret/6a566db2-a941-48c9-9169-8e2c16cda1ac-internal-tls-certs") pod "neutron-78bcff9576-76n9h" (UID: "6a566db2-a941-48c9-9169-8e2c16cda1ac") : secret "cert-neutron-internal-svc" not found Jan 20 17:50:43 crc kubenswrapper[4558]: E0120 17:50:43.291181 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-neutron-public-svc: secret "cert-neutron-public-svc" not found Jan 20 17:50:43 crc kubenswrapper[4558]: E0120 17:50:43.291261 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-neutron-ovndbs: secret "cert-neutron-ovndbs" not found Jan 20 17:50:43 crc kubenswrapper[4558]: E0120 17:50:43.291290 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6a566db2-a941-48c9-9169-8e2c16cda1ac-public-tls-certs podName:6a566db2-a941-48c9-9169-8e2c16cda1ac nodeName:}" failed. No retries permitted until 2026-01-20 17:50:47.291262967 +0000 UTC m=+4141.051600934 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "public-tls-certs" (UniqueName: "kubernetes.io/secret/6a566db2-a941-48c9-9169-8e2c16cda1ac-public-tls-certs") pod "neutron-78bcff9576-76n9h" (UID: "6a566db2-a941-48c9-9169-8e2c16cda1ac") : secret "cert-neutron-public-svc" not found Jan 20 17:50:43 crc kubenswrapper[4558]: I0120 17:50:43.291393 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f955e7e5-de96-4598-96b9-76573c42d8e5-logs\") pod \"placement-764f5f95dd-qwm2f\" (UID: \"f955e7e5-de96-4598-96b9-76573c42d8e5\") " pod="openstack-kuttl-tests/placement-764f5f95dd-qwm2f" Jan 20 17:50:43 crc kubenswrapper[4558]: E0120 17:50:43.291565 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6a566db2-a941-48c9-9169-8e2c16cda1ac-ovndb-tls-certs podName:6a566db2-a941-48c9-9169-8e2c16cda1ac nodeName:}" failed. No retries permitted until 2026-01-20 17:50:47.291501486 +0000 UTC m=+4141.051839453 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "ovndb-tls-certs" (UniqueName: "kubernetes.io/secret/6a566db2-a941-48c9-9169-8e2c16cda1ac-ovndb-tls-certs") pod "neutron-78bcff9576-76n9h" (UID: "6a566db2-a941-48c9-9169-8e2c16cda1ac") : secret "cert-neutron-ovndbs" not found Jan 20 17:50:43 crc kubenswrapper[4558]: I0120 17:50:43.291957 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tggtq\" (UniqueName: \"kubernetes.io/projected/f955e7e5-de96-4598-96b9-76573c42d8e5-kube-api-access-tggtq\") pod \"placement-764f5f95dd-qwm2f\" (UID: \"f955e7e5-de96-4598-96b9-76573c42d8e5\") " pod="openstack-kuttl-tests/placement-764f5f95dd-qwm2f" Jan 20 17:50:43 crc kubenswrapper[4558]: I0120 17:50:43.292019 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f955e7e5-de96-4598-96b9-76573c42d8e5-logs\") pod \"placement-764f5f95dd-qwm2f\" (UID: \"f955e7e5-de96-4598-96b9-76573c42d8e5\") " pod="openstack-kuttl-tests/placement-764f5f95dd-qwm2f" Jan 20 17:50:43 crc kubenswrapper[4558]: I0120 17:50:43.297877 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f955e7e5-de96-4598-96b9-76573c42d8e5-internal-tls-certs\") pod \"placement-764f5f95dd-qwm2f\" (UID: \"f955e7e5-de96-4598-96b9-76573c42d8e5\") " pod="openstack-kuttl-tests/placement-764f5f95dd-qwm2f" Jan 20 17:50:43 crc kubenswrapper[4558]: I0120 17:50:43.299049 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f955e7e5-de96-4598-96b9-76573c42d8e5-config-data\") pod \"placement-764f5f95dd-qwm2f\" (UID: \"f955e7e5-de96-4598-96b9-76573c42d8e5\") " pod="openstack-kuttl-tests/placement-764f5f95dd-qwm2f" Jan 20 17:50:43 crc kubenswrapper[4558]: I0120 17:50:43.299057 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f955e7e5-de96-4598-96b9-76573c42d8e5-scripts\") pod \"placement-764f5f95dd-qwm2f\" (UID: \"f955e7e5-de96-4598-96b9-76573c42d8e5\") " pod="openstack-kuttl-tests/placement-764f5f95dd-qwm2f" Jan 20 17:50:43 crc kubenswrapper[4558]: I0120 17:50:43.305632 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f955e7e5-de96-4598-96b9-76573c42d8e5-combined-ca-bundle\") pod \"placement-764f5f95dd-qwm2f\" (UID: \"f955e7e5-de96-4598-96b9-76573c42d8e5\") " pod="openstack-kuttl-tests/placement-764f5f95dd-qwm2f" Jan 20 17:50:43 crc kubenswrapper[4558]: I0120 17:50:43.307938 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f955e7e5-de96-4598-96b9-76573c42d8e5-public-tls-certs\") pod \"placement-764f5f95dd-qwm2f\" (UID: \"f955e7e5-de96-4598-96b9-76573c42d8e5\") " pod="openstack-kuttl-tests/placement-764f5f95dd-qwm2f" Jan 20 17:50:43 crc kubenswrapper[4558]: I0120 17:50:43.310351 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tggtq\" (UniqueName: \"kubernetes.io/projected/f955e7e5-de96-4598-96b9-76573c42d8e5-kube-api-access-tggtq\") pod \"placement-764f5f95dd-qwm2f\" (UID: \"f955e7e5-de96-4598-96b9-76573c42d8e5\") " pod="openstack-kuttl-tests/placement-764f5f95dd-qwm2f" Jan 20 17:50:43 crc kubenswrapper[4558]: I0120 17:50:43.394063 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:50:43 crc kubenswrapper[4558]: I0120 17:50:43.394206 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:50:43 crc kubenswrapper[4558]: E0120 17:50:43.394337 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-ovn-metrics: secret "cert-ovn-metrics" not found Jan 20 17:50:43 crc kubenswrapper[4558]: E0120 17:50:43.394452 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162-metrics-certs-tls-certs podName:bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162 nodeName:}" failed. No retries permitted until 2026-01-20 17:50:59.394436572 +0000 UTC m=+4153.154774539 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs-tls-certs" (UniqueName: "kubernetes.io/secret/bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162-metrics-certs-tls-certs") pod "ovsdbserver-nb-0" (UID: "bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162") : secret "cert-ovn-metrics" not found Jan 20 17:50:43 crc kubenswrapper[4558]: E0120 17:50:43.394344 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-ovndbcluster-nb-ovndbs: secret "cert-ovndbcluster-nb-ovndbs" not found Jan 20 17:50:43 crc kubenswrapper[4558]: E0120 17:50:43.394609 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162-ovsdbserver-nb-tls-certs podName:bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162 nodeName:}" failed. No retries permitted until 2026-01-20 17:50:59.394600961 +0000 UTC m=+4153.154938928 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "ovsdbserver-nb-tls-certs" (UniqueName: "kubernetes.io/secret/bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162-ovsdbserver-nb-tls-certs") pod "ovsdbserver-nb-0" (UID: "bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162") : secret "cert-ovndbcluster-nb-ovndbs" not found Jan 20 17:50:43 crc kubenswrapper[4558]: I0120 17:50:43.399098 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-764f5f95dd-qwm2f" Jan 20 17:50:43 crc kubenswrapper[4558]: I0120 17:50:43.793050 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-9cbb9c58b-jcwzc" Jan 20 17:50:43 crc kubenswrapper[4558]: I0120 17:50:43.808492 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-9cbb9c58b-jcwzc" Jan 20 17:50:43 crc kubenswrapper[4558]: I0120 17:50:43.844712 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-764f5f95dd-qwm2f"] Jan 20 17:50:43 crc kubenswrapper[4558]: W0120 17:50:43.848785 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf955e7e5_de96_4598_96b9_76573c42d8e5.slice/crio-d886adc55790c399dc46c571a58d19fce02aa9106ba0742781f5a78e49c66465 WatchSource:0}: Error finding container d886adc55790c399dc46c571a58d19fce02aa9106ba0742781f5a78e49c66465: Status 404 returned error can't find the container with id d886adc55790c399dc46c571a58d19fce02aa9106ba0742781f5a78e49c66465 Jan 20 17:50:43 crc kubenswrapper[4558]: I0120 17:50:43.907030 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/38ce22a3-0b53-417f-ab78-77a3be2da04f-scripts\") pod \"38ce22a3-0b53-417f-ab78-77a3be2da04f\" (UID: \"38ce22a3-0b53-417f-ab78-77a3be2da04f\") " Jan 20 17:50:43 crc kubenswrapper[4558]: I0120 17:50:43.907523 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ztk6g\" (UniqueName: \"kubernetes.io/projected/38ce22a3-0b53-417f-ab78-77a3be2da04f-kube-api-access-ztk6g\") pod \"38ce22a3-0b53-417f-ab78-77a3be2da04f\" (UID: \"38ce22a3-0b53-417f-ab78-77a3be2da04f\") " Jan 20 17:50:43 crc kubenswrapper[4558]: I0120 17:50:43.907592 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/38ce22a3-0b53-417f-ab78-77a3be2da04f-internal-tls-certs\") pod \"38ce22a3-0b53-417f-ab78-77a3be2da04f\" (UID: \"38ce22a3-0b53-417f-ab78-77a3be2da04f\") " Jan 20 17:50:43 crc kubenswrapper[4558]: I0120 17:50:43.907615 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/38ce22a3-0b53-417f-ab78-77a3be2da04f-logs\") pod \"38ce22a3-0b53-417f-ab78-77a3be2da04f\" (UID: \"38ce22a3-0b53-417f-ab78-77a3be2da04f\") " Jan 20 17:50:43 crc kubenswrapper[4558]: I0120 17:50:43.907654 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/38ce22a3-0b53-417f-ab78-77a3be2da04f-combined-ca-bundle\") pod \"38ce22a3-0b53-417f-ab78-77a3be2da04f\" (UID: \"38ce22a3-0b53-417f-ab78-77a3be2da04f\") " Jan 20 17:50:43 crc kubenswrapper[4558]: I0120 17:50:43.907681 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/38ce22a3-0b53-417f-ab78-77a3be2da04f-config-data\") pod \"38ce22a3-0b53-417f-ab78-77a3be2da04f\" (UID: \"38ce22a3-0b53-417f-ab78-77a3be2da04f\") " Jan 20 17:50:43 crc kubenswrapper[4558]: I0120 17:50:43.908008 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/38ce22a3-0b53-417f-ab78-77a3be2da04f-logs" (OuterVolumeSpecName: "logs") pod "38ce22a3-0b53-417f-ab78-77a3be2da04f" (UID: "38ce22a3-0b53-417f-ab78-77a3be2da04f"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:50:43 crc kubenswrapper[4558]: I0120 17:50:43.909136 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/38ce22a3-0b53-417f-ab78-77a3be2da04f-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:43 crc kubenswrapper[4558]: I0120 17:50:43.912231 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/38ce22a3-0b53-417f-ab78-77a3be2da04f-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "38ce22a3-0b53-417f-ab78-77a3be2da04f" (UID: "38ce22a3-0b53-417f-ab78-77a3be2da04f"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:50:43 crc kubenswrapper[4558]: I0120 17:50:43.913335 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/38ce22a3-0b53-417f-ab78-77a3be2da04f-scripts" (OuterVolumeSpecName: "scripts") pod "38ce22a3-0b53-417f-ab78-77a3be2da04f" (UID: "38ce22a3-0b53-417f-ab78-77a3be2da04f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:50:43 crc kubenswrapper[4558]: I0120 17:50:43.913381 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/38ce22a3-0b53-417f-ab78-77a3be2da04f-kube-api-access-ztk6g" (OuterVolumeSpecName: "kube-api-access-ztk6g") pod "38ce22a3-0b53-417f-ab78-77a3be2da04f" (UID: "38ce22a3-0b53-417f-ab78-77a3be2da04f"). InnerVolumeSpecName "kube-api-access-ztk6g". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:50:43 crc kubenswrapper[4558]: I0120 17:50:43.913781 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/38ce22a3-0b53-417f-ab78-77a3be2da04f-config-data" (OuterVolumeSpecName: "config-data") pod "38ce22a3-0b53-417f-ab78-77a3be2da04f" (UID: "38ce22a3-0b53-417f-ab78-77a3be2da04f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:50:43 crc kubenswrapper[4558]: I0120 17:50:43.913856 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/38ce22a3-0b53-417f-ab78-77a3be2da04f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "38ce22a3-0b53-417f-ab78-77a3be2da04f" (UID: "38ce22a3-0b53-417f-ab78-77a3be2da04f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:50:43 crc kubenswrapper[4558]: I0120 17:50:43.985771 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-stgv6" Jan 20 17:50:43 crc kubenswrapper[4558]: I0120 17:50:43.985818 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-stgv6" Jan 20 17:50:44 crc kubenswrapper[4558]: I0120 17:50:44.011950 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/38ce22a3-0b53-417f-ab78-77a3be2da04f-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:44 crc kubenswrapper[4558]: I0120 17:50:44.012001 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ztk6g\" (UniqueName: \"kubernetes.io/projected/38ce22a3-0b53-417f-ab78-77a3be2da04f-kube-api-access-ztk6g\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:44 crc kubenswrapper[4558]: I0120 17:50:44.012015 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/38ce22a3-0b53-417f-ab78-77a3be2da04f-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:44 crc kubenswrapper[4558]: I0120 17:50:44.012028 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/38ce22a3-0b53-417f-ab78-77a3be2da04f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:44 crc kubenswrapper[4558]: I0120 17:50:44.012039 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/38ce22a3-0b53-417f-ab78-77a3be2da04f-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:44 crc kubenswrapper[4558]: I0120 17:50:44.024693 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-stgv6" Jan 20 17:50:44 crc kubenswrapper[4558]: I0120 17:50:44.166771 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/openstack-cell1-galera-0"] Jan 20 17:50:44 crc kubenswrapper[4558]: I0120 17:50:44.272523 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/openstack-cell1-galera-0" podUID="9d54b98d-10d8-4300-89b7-031984825b5a" containerName="galera" containerID="cri-o://8b80d38684bc886d3672b9eba3adc33250d75483d51261047f570be9beb3ce3c" gracePeriod=30 Jan 20 17:50:44 crc kubenswrapper[4558]: I0120 17:50:44.319138 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/603501da-057d-4d3b-a682-e55e7e2916f4-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"603501da-057d-4d3b-a682-e55e7e2916f4\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:50:44 crc kubenswrapper[4558]: E0120 17:50:44.319337 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-galera-openstack-svc: secret "cert-galera-openstack-svc" not found Jan 20 17:50:44 crc kubenswrapper[4558]: E0120 17:50:44.319424 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/603501da-057d-4d3b-a682-e55e7e2916f4-galera-tls-certs podName:603501da-057d-4d3b-a682-e55e7e2916f4 nodeName:}" failed. No retries permitted until 2026-01-20 17:51:00.31940425 +0000 UTC m=+4154.079742218 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "galera-tls-certs" (UniqueName: "kubernetes.io/secret/603501da-057d-4d3b-a682-e55e7e2916f4-galera-tls-certs") pod "openstack-galera-0" (UID: "603501da-057d-4d3b-a682-e55e7e2916f4") : secret "cert-galera-openstack-svc" not found Jan 20 17:50:44 crc kubenswrapper[4558]: I0120 17:50:44.421880 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/5239f3e2-7f54-4042-b0da-02f7608224df-memcached-tls-certs\") pod \"memcached-0\" (UID: \"5239f3e2-7f54-4042-b0da-02f7608224df\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:50:44 crc kubenswrapper[4558]: I0120 17:50:44.422428 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/fc60cc21-f159-40bf-beca-c62718f57b9c-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"fc60cc21-f159-40bf-beca-c62718f57b9c\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:50:44 crc kubenswrapper[4558]: I0120 17:50:44.422557 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/fc60cc21-f159-40bf-beca-c62718f57b9c-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"fc60cc21-f159-40bf-beca-c62718f57b9c\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:50:44 crc kubenswrapper[4558]: E0120 17:50:44.422609 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-ovn-metrics: secret "cert-ovn-metrics" not found Jan 20 17:50:44 crc kubenswrapper[4558]: E0120 17:50:44.422713 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/fc60cc21-f159-40bf-beca-c62718f57b9c-metrics-certs-tls-certs podName:fc60cc21-f159-40bf-beca-c62718f57b9c nodeName:}" failed. No retries permitted until 2026-01-20 17:51:00.422689816 +0000 UTC m=+4154.183027783 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs-tls-certs" (UniqueName: "kubernetes.io/secret/fc60cc21-f159-40bf-beca-c62718f57b9c-metrics-certs-tls-certs") pod "ovsdbserver-sb-0" (UID: "fc60cc21-f159-40bf-beca-c62718f57b9c") : secret "cert-ovn-metrics" not found Jan 20 17:50:44 crc kubenswrapper[4558]: I0120 17:50:44.427656 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/5239f3e2-7f54-4042-b0da-02f7608224df-memcached-tls-certs\") pod \"memcached-0\" (UID: \"5239f3e2-7f54-4042-b0da-02f7608224df\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:50:44 crc kubenswrapper[4558]: I0120 17:50:44.430862 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/fc60cc21-f159-40bf-beca-c62718f57b9c-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"fc60cc21-f159-40bf-beca-c62718f57b9c\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:50:44 crc kubenswrapper[4558]: I0120 17:50:44.511323 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/memcached-0" Jan 20 17:50:44 crc kubenswrapper[4558]: I0120 17:50:44.524841 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/581c2419-48d9-4b7d-b71b-ce5cceb0326d-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"581c2419-48d9-4b7d-b71b-ce5cceb0326d\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:50:44 crc kubenswrapper[4558]: E0120 17:50:44.525360 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-nova-novncproxy-cell1-public-svc: secret "cert-nova-novncproxy-cell1-public-svc" not found Jan 20 17:50:44 crc kubenswrapper[4558]: E0120 17:50:44.525436 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/581c2419-48d9-4b7d-b71b-ce5cceb0326d-nova-novncproxy-tls-certs podName:581c2419-48d9-4b7d-b71b-ce5cceb0326d nodeName:}" failed. No retries permitted until 2026-01-20 17:51:00.525411561 +0000 UTC m=+4154.285749518 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nova-novncproxy-tls-certs" (UniqueName: "kubernetes.io/secret/581c2419-48d9-4b7d-b71b-ce5cceb0326d-nova-novncproxy-tls-certs") pod "nova-cell1-novncproxy-0" (UID: "581c2419-48d9-4b7d-b71b-ce5cceb0326d") : secret "cert-nova-novncproxy-cell1-public-svc" not found Jan 20 17:50:44 crc kubenswrapper[4558]: I0120 17:50:44.830140 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-764f5f95dd-qwm2f" event={"ID":"f955e7e5-de96-4598-96b9-76573c42d8e5","Type":"ContainerStarted","Data":"aaac3048ac7c85ff990616bebe2afbceb777be9814adb36b9554932f80d8482e"} Jan 20 17:50:44 crc kubenswrapper[4558]: I0120 17:50:44.830982 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-764f5f95dd-qwm2f" event={"ID":"f955e7e5-de96-4598-96b9-76573c42d8e5","Type":"ContainerStarted","Data":"3567a40831ca68954b26040c1b8fd81909b950423881e4378d24290b2d5b462b"} Jan 20 17:50:44 crc kubenswrapper[4558]: I0120 17:50:44.831087 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/placement-764f5f95dd-qwm2f" Jan 20 17:50:44 crc kubenswrapper[4558]: I0120 17:50:44.831151 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/placement-764f5f95dd-qwm2f" Jan 20 17:50:44 crc kubenswrapper[4558]: I0120 17:50:44.831257 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-764f5f95dd-qwm2f" event={"ID":"f955e7e5-de96-4598-96b9-76573c42d8e5","Type":"ContainerStarted","Data":"d886adc55790c399dc46c571a58d19fce02aa9106ba0742781f5a78e49c66465"} Jan 20 17:50:44 crc kubenswrapper[4558]: I0120 17:50:44.831091 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-9cbb9c58b-jcwzc" Jan 20 17:50:44 crc kubenswrapper[4558]: I0120 17:50:44.863550 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/placement-764f5f95dd-qwm2f" podStartSLOduration=1.863528699 podStartE2EDuration="1.863528699s" podCreationTimestamp="2026-01-20 17:50:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:50:44.848215164 +0000 UTC m=+4138.608553130" watchObservedRunningTime="2026-01-20 17:50:44.863528699 +0000 UTC m=+4138.623866666" Jan 20 17:50:44 crc kubenswrapper[4558]: I0120 17:50:44.884626 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-stgv6" Jan 20 17:50:44 crc kubenswrapper[4558]: I0120 17:50:44.899945 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/placement-9cbb9c58b-jcwzc"] Jan 20 17:50:44 crc kubenswrapper[4558]: I0120 17:50:44.908763 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/placement-9cbb9c58b-jcwzc"] Jan 20 17:50:44 crc kubenswrapper[4558]: I0120 17:50:44.932814 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/memcached-0"] Jan 20 17:50:44 crc kubenswrapper[4558]: I0120 17:50:44.943158 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:50:44 crc kubenswrapper[4558]: I0120 17:50:44.944476 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:50:44 crc kubenswrapper[4558]: I0120 17:50:44.977333 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:50:44 crc kubenswrapper[4558]: I0120 17:50:44.983878 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:50:45 crc kubenswrapper[4558]: I0120 17:50:45.038477 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/38ce22a3-0b53-417f-ab78-77a3be2da04f-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:45 crc kubenswrapper[4558]: I0120 17:50:45.838917 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/memcached-0" event={"ID":"5239f3e2-7f54-4042-b0da-02f7608224df","Type":"ContainerStarted","Data":"920c89eb591e28b1762567ed35128bdcddc70641d214ce72662434c0af29d721"} Jan 20 17:50:45 crc kubenswrapper[4558]: I0120 17:50:45.839474 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:50:45 crc kubenswrapper[4558]: I0120 17:50:45.839527 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/memcached-0" event={"ID":"5239f3e2-7f54-4042-b0da-02f7608224df","Type":"ContainerStarted","Data":"60dddbb52bd9b2712fc9a84c8ab6cfb34a321ccf6b9369800400b23c8b94a06e"} Jan 20 17:50:45 crc kubenswrapper[4558]: I0120 17:50:45.839621 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:50:45 crc kubenswrapper[4558]: I0120 17:50:45.855949 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/memcached-0" podStartSLOduration=17.855908377 podStartE2EDuration="17.855908377s" podCreationTimestamp="2026-01-20 17:50:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:50:45.854303568 +0000 UTC m=+4139.614641536" watchObservedRunningTime="2026-01-20 17:50:45.855908377 +0000 UTC m=+4139.616246344" Jan 20 17:50:46 crc kubenswrapper[4558]: I0120 17:50:46.174591 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-8c8mp" Jan 20 17:50:46 crc kubenswrapper[4558]: I0120 17:50:46.174967 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-8c8mp" Jan 20 17:50:46 crc kubenswrapper[4558]: I0120 17:50:46.373778 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-42qnz" Jan 20 17:50:46 crc kubenswrapper[4558]: I0120 17:50:46.373830 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-42qnz" Jan 20 17:50:46 crc kubenswrapper[4558]: I0120 17:50:46.410533 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-42qnz" Jan 20 17:50:46 crc kubenswrapper[4558]: I0120 17:50:46.577363 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="38ce22a3-0b53-417f-ab78-77a3be2da04f" path="/var/lib/kubelet/pods/38ce22a3-0b53-417f-ab78-77a3be2da04f/volumes" Jan 20 17:50:46 crc kubenswrapper[4558]: I0120 17:50:46.604642 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/cinder-scheduler-0" podUID="51d19369-14ac-4d62-ab05-9c5830856622" containerName="cinder-scheduler" probeResult="failure" output="Get \"http://10.217.1.196:8080/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 20 17:50:46 crc kubenswrapper[4558]: I0120 17:50:46.640897 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-stgv6"] Jan 20 17:50:46 crc kubenswrapper[4558]: I0120 17:50:46.676135 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/18c09849-702b-4939-9cf1-0e06c6adc889-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"18c09849-702b-4939-9cf1-0e06c6adc889\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:50:46 crc kubenswrapper[4558]: E0120 17:50:46.676485 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-glance-default-internal-svc: secret "cert-glance-default-internal-svc" not found Jan 20 17:50:46 crc kubenswrapper[4558]: E0120 17:50:46.676656 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/18c09849-702b-4939-9cf1-0e06c6adc889-internal-tls-certs podName:18c09849-702b-4939-9cf1-0e06c6adc889 nodeName:}" failed. No retries permitted until 2026-01-20 17:51:02.676625905 +0000 UTC m=+4156.436963871 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "internal-tls-certs" (UniqueName: "kubernetes.io/secret/18c09849-702b-4939-9cf1-0e06c6adc889-internal-tls-certs") pod "glance-default-internal-api-0" (UID: "18c09849-702b-4939-9cf1-0e06c6adc889") : secret "cert-glance-default-internal-svc" not found Jan 20 17:50:46 crc kubenswrapper[4558]: I0120 17:50:46.848273 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/memcached-0" Jan 20 17:50:46 crc kubenswrapper[4558]: I0120 17:50:46.849120 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-stgv6" podUID="242c4cdd-2c26-40bd-acaf-782a26f5f6eb" containerName="registry-server" containerID="cri-o://a88a2d6d3e971adede38d8e407afe9af7d898a947f168c5b689a31bd59a6d537" gracePeriod=2 Jan 20 17:50:46 crc kubenswrapper[4558]: I0120 17:50:46.880798 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3ec0f915-5676-4d2a-b743-1f8ee11c01ac-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"3ec0f915-5676-4d2a-b743-1f8ee11c01ac\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:50:46 crc kubenswrapper[4558]: I0120 17:50:46.883454 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3ec0f915-5676-4d2a-b743-1f8ee11c01ac-public-tls-certs\") pod \"cinder-api-0\" (UID: \"3ec0f915-5676-4d2a-b743-1f8ee11c01ac\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:50:46 crc kubenswrapper[4558]: E0120 17:50:46.880971 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-cinder-internal-svc: secret "cert-cinder-internal-svc" not found Jan 20 17:50:46 crc kubenswrapper[4558]: E0120 17:50:46.884366 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3ec0f915-5676-4d2a-b743-1f8ee11c01ac-internal-tls-certs podName:3ec0f915-5676-4d2a-b743-1f8ee11c01ac nodeName:}" failed. No retries permitted until 2026-01-20 17:51:02.88434755 +0000 UTC m=+4156.644685517 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "internal-tls-certs" (UniqueName: "kubernetes.io/secret/3ec0f915-5676-4d2a-b743-1f8ee11c01ac-internal-tls-certs") pod "cinder-api-0" (UID: "3ec0f915-5676-4d2a-b743-1f8ee11c01ac") : secret "cert-cinder-internal-svc" not found Jan 20 17:50:46 crc kubenswrapper[4558]: E0120 17:50:46.884226 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-cinder-public-svc: secret "cert-cinder-public-svc" not found Jan 20 17:50:46 crc kubenswrapper[4558]: E0120 17:50:46.884713 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3ec0f915-5676-4d2a-b743-1f8ee11c01ac-public-tls-certs podName:3ec0f915-5676-4d2a-b743-1f8ee11c01ac nodeName:}" failed. No retries permitted until 2026-01-20 17:51:02.88470378 +0000 UTC m=+4156.645041747 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "public-tls-certs" (UniqueName: "kubernetes.io/secret/3ec0f915-5676-4d2a-b743-1f8ee11c01ac-public-tls-certs") pod "cinder-api-0" (UID: "3ec0f915-5676-4d2a-b743-1f8ee11c01ac") : secret "cert-cinder-public-svc" not found Jan 20 17:50:46 crc kubenswrapper[4558]: E0120 17:50:46.904525 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 8b80d38684bc886d3672b9eba3adc33250d75483d51261047f570be9beb3ce3c is running failed: container process not found" containerID="8b80d38684bc886d3672b9eba3adc33250d75483d51261047f570be9beb3ce3c" cmd=["/bin/bash","/var/lib/operator-scripts/mysql_probe.sh","readiness"] Jan 20 17:50:46 crc kubenswrapper[4558]: E0120 17:50:46.905669 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 8b80d38684bc886d3672b9eba3adc33250d75483d51261047f570be9beb3ce3c is running failed: container process not found" containerID="8b80d38684bc886d3672b9eba3adc33250d75483d51261047f570be9beb3ce3c" cmd=["/bin/bash","/var/lib/operator-scripts/mysql_probe.sh","readiness"] Jan 20 17:50:46 crc kubenswrapper[4558]: E0120 17:50:46.906193 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 8b80d38684bc886d3672b9eba3adc33250d75483d51261047f570be9beb3ce3c is running failed: container process not found" containerID="8b80d38684bc886d3672b9eba3adc33250d75483d51261047f570be9beb3ce3c" cmd=["/bin/bash","/var/lib/operator-scripts/mysql_probe.sh","readiness"] Jan 20 17:50:46 crc kubenswrapper[4558]: E0120 17:50:46.906242 4558 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 8b80d38684bc886d3672b9eba3adc33250d75483d51261047f570be9beb3ce3c is running failed: container process not found" probeType="Readiness" pod="openstack-kuttl-tests/openstack-cell1-galera-0" podUID="9d54b98d-10d8-4300-89b7-031984825b5a" containerName="galera" Jan 20 17:50:46 crc kubenswrapper[4558]: I0120 17:50:46.909372 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-42qnz" Jan 20 17:50:47 crc kubenswrapper[4558]: I0120 17:50:47.089036 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/80a7bd73-c63f-4a27-9d3d-3fef760c025f-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"80a7bd73-c63f-4a27-9d3d-3fef760c025f\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:50:47 crc kubenswrapper[4558]: E0120 17:50:47.089210 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-ovn-metrics: secret "cert-ovn-metrics" not found Jan 20 17:50:47 crc kubenswrapper[4558]: E0120 17:50:47.089289 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/80a7bd73-c63f-4a27-9d3d-3fef760c025f-metrics-certs-tls-certs podName:80a7bd73-c63f-4a27-9d3d-3fef760c025f nodeName:}" failed. No retries permitted until 2026-01-20 17:51:03.089270422 +0000 UTC m=+4156.849608389 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs-tls-certs" (UniqueName: "kubernetes.io/secret/80a7bd73-c63f-4a27-9d3d-3fef760c025f-metrics-certs-tls-certs") pod "ovn-northd-0" (UID: "80a7bd73-c63f-4a27-9d3d-3fef760c025f") : secret "cert-ovn-metrics" not found Jan 20 17:50:47 crc kubenswrapper[4558]: I0120 17:50:47.213441 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-8c8mp" podUID="2bf2d1ac-2610-4caf-a3dc-eb6ef3d260a4" containerName="registry-server" probeResult="failure" output=< Jan 20 17:50:47 crc kubenswrapper[4558]: timeout: failed to connect service ":50051" within 1s Jan 20 17:50:47 crc kubenswrapper[4558]: > Jan 20 17:50:47 crc kubenswrapper[4558]: I0120 17:50:47.255917 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:50:47 crc kubenswrapper[4558]: I0120 17:50:47.260510 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-stgv6" Jan 20 17:50:47 crc kubenswrapper[4558]: E0120 17:50:47.295377 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-neutron-public-svc: secret "cert-neutron-public-svc" not found Jan 20 17:50:47 crc kubenswrapper[4558]: E0120 17:50:47.295474 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6a566db2-a941-48c9-9169-8e2c16cda1ac-public-tls-certs podName:6a566db2-a941-48c9-9169-8e2c16cda1ac nodeName:}" failed. No retries permitted until 2026-01-20 17:50:55.295454576 +0000 UTC m=+4149.055792543 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "public-tls-certs" (UniqueName: "kubernetes.io/secret/6a566db2-a941-48c9-9169-8e2c16cda1ac-public-tls-certs") pod "neutron-78bcff9576-76n9h" (UID: "6a566db2-a941-48c9-9169-8e2c16cda1ac") : secret "cert-neutron-public-svc" not found Jan 20 17:50:47 crc kubenswrapper[4558]: E0120 17:50:47.295399 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-neutron-ovndbs: secret "cert-neutron-ovndbs" not found Jan 20 17:50:47 crc kubenswrapper[4558]: E0120 17:50:47.295700 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6a566db2-a941-48c9-9169-8e2c16cda1ac-ovndb-tls-certs podName:6a566db2-a941-48c9-9169-8e2c16cda1ac nodeName:}" failed. No retries permitted until 2026-01-20 17:50:55.295679749 +0000 UTC m=+4149.056017716 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "ovndb-tls-certs" (UniqueName: "kubernetes.io/secret/6a566db2-a941-48c9-9169-8e2c16cda1ac-ovndb-tls-certs") pod "neutron-78bcff9576-76n9h" (UID: "6a566db2-a941-48c9-9169-8e2c16cda1ac") : secret "cert-neutron-ovndbs" not found Jan 20 17:50:47 crc kubenswrapper[4558]: E0120 17:50:47.295848 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-neutron-internal-svc: secret "cert-neutron-internal-svc" not found Jan 20 17:50:47 crc kubenswrapper[4558]: E0120 17:50:47.295936 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6a566db2-a941-48c9-9169-8e2c16cda1ac-internal-tls-certs podName:6a566db2-a941-48c9-9169-8e2c16cda1ac nodeName:}" failed. No retries permitted until 2026-01-20 17:50:55.295927265 +0000 UTC m=+4149.056265231 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "internal-tls-certs" (UniqueName: "kubernetes.io/secret/6a566db2-a941-48c9-9169-8e2c16cda1ac-internal-tls-certs") pod "neutron-78bcff9576-76n9h" (UID: "6a566db2-a941-48c9-9169-8e2c16cda1ac") : secret "cert-neutron-internal-svc" not found Jan 20 17:50:47 crc kubenswrapper[4558]: I0120 17:50:47.395598 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/242c4cdd-2c26-40bd-acaf-782a26f5f6eb-utilities\") pod \"242c4cdd-2c26-40bd-acaf-782a26f5f6eb\" (UID: \"242c4cdd-2c26-40bd-acaf-782a26f5f6eb\") " Jan 20 17:50:47 crc kubenswrapper[4558]: I0120 17:50:47.395700 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/9d54b98d-10d8-4300-89b7-031984825b5a-config-data-default\") pod \"9d54b98d-10d8-4300-89b7-031984825b5a\" (UID: \"9d54b98d-10d8-4300-89b7-031984825b5a\") " Jan 20 17:50:47 crc kubenswrapper[4558]: I0120 17:50:47.395726 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mysql-db\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") pod \"9d54b98d-10d8-4300-89b7-031984825b5a\" (UID: \"9d54b98d-10d8-4300-89b7-031984825b5a\") " Jan 20 17:50:47 crc kubenswrapper[4558]: I0120 17:50:47.395761 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9d54b98d-10d8-4300-89b7-031984825b5a-operator-scripts\") pod \"9d54b98d-10d8-4300-89b7-031984825b5a\" (UID: \"9d54b98d-10d8-4300-89b7-031984825b5a\") " Jan 20 17:50:47 crc kubenswrapper[4558]: I0120 17:50:47.395803 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/9d54b98d-10d8-4300-89b7-031984825b5a-config-data-generated\") pod \"9d54b98d-10d8-4300-89b7-031984825b5a\" (UID: \"9d54b98d-10d8-4300-89b7-031984825b5a\") " Jan 20 17:50:47 crc kubenswrapper[4558]: I0120 17:50:47.395919 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/242c4cdd-2c26-40bd-acaf-782a26f5f6eb-catalog-content\") pod \"242c4cdd-2c26-40bd-acaf-782a26f5f6eb\" (UID: \"242c4cdd-2c26-40bd-acaf-782a26f5f6eb\") " Jan 20 17:50:47 crc kubenswrapper[4558]: I0120 17:50:47.395960 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/9d54b98d-10d8-4300-89b7-031984825b5a-kolla-config\") pod \"9d54b98d-10d8-4300-89b7-031984825b5a\" (UID: \"9d54b98d-10d8-4300-89b7-031984825b5a\") " Jan 20 17:50:47 crc kubenswrapper[4558]: I0120 17:50:47.396004 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-44zs5\" (UniqueName: \"kubernetes.io/projected/242c4cdd-2c26-40bd-acaf-782a26f5f6eb-kube-api-access-44zs5\") pod \"242c4cdd-2c26-40bd-acaf-782a26f5f6eb\" (UID: \"242c4cdd-2c26-40bd-acaf-782a26f5f6eb\") " Jan 20 17:50:47 crc kubenswrapper[4558]: I0120 17:50:47.396135 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lfjsk\" (UniqueName: \"kubernetes.io/projected/9d54b98d-10d8-4300-89b7-031984825b5a-kube-api-access-lfjsk\") pod \"9d54b98d-10d8-4300-89b7-031984825b5a\" (UID: \"9d54b98d-10d8-4300-89b7-031984825b5a\") " Jan 20 17:50:47 crc kubenswrapper[4558]: I0120 17:50:47.396155 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/9d54b98d-10d8-4300-89b7-031984825b5a-galera-tls-certs\") pod \"9d54b98d-10d8-4300-89b7-031984825b5a\" (UID: \"9d54b98d-10d8-4300-89b7-031984825b5a\") " Jan 20 17:50:47 crc kubenswrapper[4558]: I0120 17:50:47.396200 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9d54b98d-10d8-4300-89b7-031984825b5a-combined-ca-bundle\") pod \"9d54b98d-10d8-4300-89b7-031984825b5a\" (UID: \"9d54b98d-10d8-4300-89b7-031984825b5a\") " Jan 20 17:50:47 crc kubenswrapper[4558]: I0120 17:50:47.396575 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d54b98d-10d8-4300-89b7-031984825b5a-config-data-default" (OuterVolumeSpecName: "config-data-default") pod "9d54b98d-10d8-4300-89b7-031984825b5a" (UID: "9d54b98d-10d8-4300-89b7-031984825b5a"). InnerVolumeSpecName "config-data-default". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:50:47 crc kubenswrapper[4558]: I0120 17:50:47.396906 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/9d54b98d-10d8-4300-89b7-031984825b5a-config-data-default\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:47 crc kubenswrapper[4558]: I0120 17:50:47.397588 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/242c4cdd-2c26-40bd-acaf-782a26f5f6eb-utilities" (OuterVolumeSpecName: "utilities") pod "242c4cdd-2c26-40bd-acaf-782a26f5f6eb" (UID: "242c4cdd-2c26-40bd-acaf-782a26f5f6eb"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:50:47 crc kubenswrapper[4558]: I0120 17:50:47.398122 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d54b98d-10d8-4300-89b7-031984825b5a-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "9d54b98d-10d8-4300-89b7-031984825b5a" (UID: "9d54b98d-10d8-4300-89b7-031984825b5a"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:50:47 crc kubenswrapper[4558]: I0120 17:50:47.399894 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d54b98d-10d8-4300-89b7-031984825b5a-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "9d54b98d-10d8-4300-89b7-031984825b5a" (UID: "9d54b98d-10d8-4300-89b7-031984825b5a"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:50:47 crc kubenswrapper[4558]: I0120 17:50:47.400121 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9d54b98d-10d8-4300-89b7-031984825b5a-config-data-generated" (OuterVolumeSpecName: "config-data-generated") pod "9d54b98d-10d8-4300-89b7-031984825b5a" (UID: "9d54b98d-10d8-4300-89b7-031984825b5a"). InnerVolumeSpecName "config-data-generated". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:50:47 crc kubenswrapper[4558]: I0120 17:50:47.403063 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d54b98d-10d8-4300-89b7-031984825b5a-kube-api-access-lfjsk" (OuterVolumeSpecName: "kube-api-access-lfjsk") pod "9d54b98d-10d8-4300-89b7-031984825b5a" (UID: "9d54b98d-10d8-4300-89b7-031984825b5a"). InnerVolumeSpecName "kube-api-access-lfjsk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:50:47 crc kubenswrapper[4558]: I0120 17:50:47.407193 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/242c4cdd-2c26-40bd-acaf-782a26f5f6eb-kube-api-access-44zs5" (OuterVolumeSpecName: "kube-api-access-44zs5") pod "242c4cdd-2c26-40bd-acaf-782a26f5f6eb" (UID: "242c4cdd-2c26-40bd-acaf-782a26f5f6eb"). InnerVolumeSpecName "kube-api-access-44zs5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:50:47 crc kubenswrapper[4558]: I0120 17:50:47.412888 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage15-crc" (OuterVolumeSpecName: "mysql-db") pod "9d54b98d-10d8-4300-89b7-031984825b5a" (UID: "9d54b98d-10d8-4300-89b7-031984825b5a"). InnerVolumeSpecName "local-storage15-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:50:47 crc kubenswrapper[4558]: I0120 17:50:47.428005 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d54b98d-10d8-4300-89b7-031984825b5a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9d54b98d-10d8-4300-89b7-031984825b5a" (UID: "9d54b98d-10d8-4300-89b7-031984825b5a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:50:47 crc kubenswrapper[4558]: I0120 17:50:47.446681 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/242c4cdd-2c26-40bd-acaf-782a26f5f6eb-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "242c4cdd-2c26-40bd-acaf-782a26f5f6eb" (UID: "242c4cdd-2c26-40bd-acaf-782a26f5f6eb"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:50:47 crc kubenswrapper[4558]: I0120 17:50:47.454407 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d54b98d-10d8-4300-89b7-031984825b5a-galera-tls-certs" (OuterVolumeSpecName: "galera-tls-certs") pod "9d54b98d-10d8-4300-89b7-031984825b5a" (UID: "9d54b98d-10d8-4300-89b7-031984825b5a"). InnerVolumeSpecName "galera-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:50:47 crc kubenswrapper[4558]: I0120 17:50:47.464354 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/openstack-galera-0"] Jan 20 17:50:47 crc kubenswrapper[4558]: E0120 17:50:47.465571 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[galera-tls-certs], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack-kuttl-tests/openstack-galera-0" podUID="603501da-057d-4d3b-a682-e55e7e2916f4" Jan 20 17:50:47 crc kubenswrapper[4558]: I0120 17:50:47.499069 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/733a1ae1-a917-447a-afa6-67cf7d688f86-internal-tls-certs\") pod \"nova-api-0\" (UID: \"733a1ae1-a917-447a-afa6-67cf7d688f86\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:50:47 crc kubenswrapper[4558]: E0120 17:50:47.499264 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-nova-internal-svc: secret "cert-nova-internal-svc" not found Jan 20 17:50:47 crc kubenswrapper[4558]: E0120 17:50:47.499359 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/733a1ae1-a917-447a-afa6-67cf7d688f86-internal-tls-certs podName:733a1ae1-a917-447a-afa6-67cf7d688f86 nodeName:}" failed. No retries permitted until 2026-01-20 17:51:03.499334456 +0000 UTC m=+4157.259672424 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "internal-tls-certs" (UniqueName: "kubernetes.io/secret/733a1ae1-a917-447a-afa6-67cf7d688f86-internal-tls-certs") pod "nova-api-0" (UID: "733a1ae1-a917-447a-afa6-67cf7d688f86") : secret "cert-nova-internal-svc" not found Jan 20 17:50:47 crc kubenswrapper[4558]: I0120 17:50:47.499818 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage15-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") on node \"crc\" " Jan 20 17:50:47 crc kubenswrapper[4558]: I0120 17:50:47.499840 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9d54b98d-10d8-4300-89b7-031984825b5a-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:47 crc kubenswrapper[4558]: I0120 17:50:47.499854 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/9d54b98d-10d8-4300-89b7-031984825b5a-config-data-generated\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:47 crc kubenswrapper[4558]: I0120 17:50:47.499867 4558 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/242c4cdd-2c26-40bd-acaf-782a26f5f6eb-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:47 crc kubenswrapper[4558]: I0120 17:50:47.499877 4558 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/9d54b98d-10d8-4300-89b7-031984825b5a-kolla-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:47 crc kubenswrapper[4558]: I0120 17:50:47.499890 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-44zs5\" (UniqueName: \"kubernetes.io/projected/242c4cdd-2c26-40bd-acaf-782a26f5f6eb-kube-api-access-44zs5\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:47 crc kubenswrapper[4558]: I0120 17:50:47.499903 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lfjsk\" (UniqueName: \"kubernetes.io/projected/9d54b98d-10d8-4300-89b7-031984825b5a-kube-api-access-lfjsk\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:47 crc kubenswrapper[4558]: I0120 17:50:47.499914 4558 reconciler_common.go:293] "Volume detached for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/9d54b98d-10d8-4300-89b7-031984825b5a-galera-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:47 crc kubenswrapper[4558]: I0120 17:50:47.499924 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9d54b98d-10d8-4300-89b7-031984825b5a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:47 crc kubenswrapper[4558]: I0120 17:50:47.499933 4558 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/242c4cdd-2c26-40bd-acaf-782a26f5f6eb-utilities\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:47 crc kubenswrapper[4558]: I0120 17:50:47.512079 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:50:47 crc kubenswrapper[4558]: I0120 17:50:47.517900 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage15-crc" (UniqueName: "kubernetes.io/local-volume/local-storage15-crc") on node "crc" Jan 20 17:50:47 crc kubenswrapper[4558]: I0120 17:50:47.549685 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:50:47 crc kubenswrapper[4558]: I0120 17:50:47.601723 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage15-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:47 crc kubenswrapper[4558]: E0120 17:50:47.705774 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-swift-public-svc: secret "cert-swift-public-svc" not found Jan 20 17:50:47 crc kubenswrapper[4558]: E0120 17:50:47.705850 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d824a74f-efe9-440c-8b03-f43303fb5923-public-tls-certs podName:d824a74f-efe9-440c-8b03-f43303fb5923 nodeName:}" failed. No retries permitted until 2026-01-20 17:50:48.205832661 +0000 UTC m=+4141.966170628 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "public-tls-certs" (UniqueName: "kubernetes.io/secret/d824a74f-efe9-440c-8b03-f43303fb5923-public-tls-certs") pod "swift-proxy-67dccf5df6-qnbjc" (UID: "d824a74f-efe9-440c-8b03-f43303fb5923") : secret "cert-swift-public-svc" not found Jan 20 17:50:47 crc kubenswrapper[4558]: I0120 17:50:47.862129 4558 generic.go:334] "Generic (PLEG): container finished" podID="242c4cdd-2c26-40bd-acaf-782a26f5f6eb" containerID="a88a2d6d3e971adede38d8e407afe9af7d898a947f168c5b689a31bd59a6d537" exitCode=0 Jan 20 17:50:47 crc kubenswrapper[4558]: I0120 17:50:47.862205 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-stgv6" Jan 20 17:50:47 crc kubenswrapper[4558]: I0120 17:50:47.862242 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-stgv6" event={"ID":"242c4cdd-2c26-40bd-acaf-782a26f5f6eb","Type":"ContainerDied","Data":"a88a2d6d3e971adede38d8e407afe9af7d898a947f168c5b689a31bd59a6d537"} Jan 20 17:50:47 crc kubenswrapper[4558]: I0120 17:50:47.864044 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-stgv6" event={"ID":"242c4cdd-2c26-40bd-acaf-782a26f5f6eb","Type":"ContainerDied","Data":"3810dfe81460292ca94ce3f75a6bd3a2a39d3b975c0e42e821b93ed1a50245c3"} Jan 20 17:50:47 crc kubenswrapper[4558]: I0120 17:50:47.864108 4558 scope.go:117] "RemoveContainer" containerID="a88a2d6d3e971adede38d8e407afe9af7d898a947f168c5b689a31bd59a6d537" Jan 20 17:50:47 crc kubenswrapper[4558]: I0120 17:50:47.867326 4558 generic.go:334] "Generic (PLEG): container finished" podID="9d54b98d-10d8-4300-89b7-031984825b5a" containerID="8b80d38684bc886d3672b9eba3adc33250d75483d51261047f570be9beb3ce3c" exitCode=0 Jan 20 17:50:47 crc kubenswrapper[4558]: I0120 17:50:47.868114 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:50:47 crc kubenswrapper[4558]: I0120 17:50:47.868857 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-cell1-galera-0" event={"ID":"9d54b98d-10d8-4300-89b7-031984825b5a","Type":"ContainerDied","Data":"8b80d38684bc886d3672b9eba3adc33250d75483d51261047f570be9beb3ce3c"} Jan 20 17:50:47 crc kubenswrapper[4558]: I0120 17:50:47.868885 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-cell1-galera-0" event={"ID":"9d54b98d-10d8-4300-89b7-031984825b5a","Type":"ContainerDied","Data":"b6838d9422cbda53f12c08f38617dd4acbb6865cba57a5eaa40969f51a7a5972"} Jan 20 17:50:47 crc kubenswrapper[4558]: I0120 17:50:47.876044 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:50:47 crc kubenswrapper[4558]: I0120 17:50:47.890705 4558 scope.go:117] "RemoveContainer" containerID="b4447fbaa951e6b88adafc2a305cce7fed9efe1179b0c8d07311bd910bb0f3d5" Jan 20 17:50:48 crc kubenswrapper[4558]: E0120 17:50:48.217416 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-swift-public-svc: secret "cert-swift-public-svc" not found Jan 20 17:50:48 crc kubenswrapper[4558]: E0120 17:50:48.217943 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d824a74f-efe9-440c-8b03-f43303fb5923-public-tls-certs podName:d824a74f-efe9-440c-8b03-f43303fb5923 nodeName:}" failed. No retries permitted until 2026-01-20 17:50:49.2179262 +0000 UTC m=+4142.978264157 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "public-tls-certs" (UniqueName: "kubernetes.io/secret/d824a74f-efe9-440c-8b03-f43303fb5923-public-tls-certs") pod "swift-proxy-67dccf5df6-qnbjc" (UID: "d824a74f-efe9-440c-8b03-f43303fb5923") : secret "cert-swift-public-svc" not found Jan 20 17:50:48 crc kubenswrapper[4558]: I0120 17:50:48.446550 4558 scope.go:117] "RemoveContainer" containerID="1cc253a74acddde0e725640e1376aa131de2d972e7b7f20d27b7188e3d47c379" Jan 20 17:50:48 crc kubenswrapper[4558]: I0120 17:50:48.466456 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:50:48 crc kubenswrapper[4558]: I0120 17:50:48.479640 4558 scope.go:117] "RemoveContainer" containerID="a88a2d6d3e971adede38d8e407afe9af7d898a947f168c5b689a31bd59a6d537" Jan 20 17:50:48 crc kubenswrapper[4558]: E0120 17:50:48.480201 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a88a2d6d3e971adede38d8e407afe9af7d898a947f168c5b689a31bd59a6d537\": container with ID starting with a88a2d6d3e971adede38d8e407afe9af7d898a947f168c5b689a31bd59a6d537 not found: ID does not exist" containerID="a88a2d6d3e971adede38d8e407afe9af7d898a947f168c5b689a31bd59a6d537" Jan 20 17:50:48 crc kubenswrapper[4558]: I0120 17:50:48.480237 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a88a2d6d3e971adede38d8e407afe9af7d898a947f168c5b689a31bd59a6d537"} err="failed to get container status \"a88a2d6d3e971adede38d8e407afe9af7d898a947f168c5b689a31bd59a6d537\": rpc error: code = NotFound desc = could not find container \"a88a2d6d3e971adede38d8e407afe9af7d898a947f168c5b689a31bd59a6d537\": container with ID starting with a88a2d6d3e971adede38d8e407afe9af7d898a947f168c5b689a31bd59a6d537 not found: ID does not exist" Jan 20 17:50:48 crc kubenswrapper[4558]: I0120 17:50:48.480259 4558 scope.go:117] "RemoveContainer" containerID="b4447fbaa951e6b88adafc2a305cce7fed9efe1179b0c8d07311bd910bb0f3d5" Jan 20 17:50:48 crc kubenswrapper[4558]: E0120 17:50:48.480598 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b4447fbaa951e6b88adafc2a305cce7fed9efe1179b0c8d07311bd910bb0f3d5\": container with ID starting with b4447fbaa951e6b88adafc2a305cce7fed9efe1179b0c8d07311bd910bb0f3d5 not found: ID does not exist" containerID="b4447fbaa951e6b88adafc2a305cce7fed9efe1179b0c8d07311bd910bb0f3d5" Jan 20 17:50:48 crc kubenswrapper[4558]: I0120 17:50:48.480619 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b4447fbaa951e6b88adafc2a305cce7fed9efe1179b0c8d07311bd910bb0f3d5"} err="failed to get container status \"b4447fbaa951e6b88adafc2a305cce7fed9efe1179b0c8d07311bd910bb0f3d5\": rpc error: code = NotFound desc = could not find container \"b4447fbaa951e6b88adafc2a305cce7fed9efe1179b0c8d07311bd910bb0f3d5\": container with ID starting with b4447fbaa951e6b88adafc2a305cce7fed9efe1179b0c8d07311bd910bb0f3d5 not found: ID does not exist" Jan 20 17:50:48 crc kubenswrapper[4558]: I0120 17:50:48.480633 4558 scope.go:117] "RemoveContainer" containerID="1cc253a74acddde0e725640e1376aa131de2d972e7b7f20d27b7188e3d47c379" Jan 20 17:50:48 crc kubenswrapper[4558]: E0120 17:50:48.481133 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1cc253a74acddde0e725640e1376aa131de2d972e7b7f20d27b7188e3d47c379\": container with ID starting with 1cc253a74acddde0e725640e1376aa131de2d972e7b7f20d27b7188e3d47c379 not found: ID does not exist" containerID="1cc253a74acddde0e725640e1376aa131de2d972e7b7f20d27b7188e3d47c379" Jan 20 17:50:48 crc kubenswrapper[4558]: I0120 17:50:48.481155 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1cc253a74acddde0e725640e1376aa131de2d972e7b7f20d27b7188e3d47c379"} err="failed to get container status \"1cc253a74acddde0e725640e1376aa131de2d972e7b7f20d27b7188e3d47c379\": rpc error: code = NotFound desc = could not find container \"1cc253a74acddde0e725640e1376aa131de2d972e7b7f20d27b7188e3d47c379\": container with ID starting with 1cc253a74acddde0e725640e1376aa131de2d972e7b7f20d27b7188e3d47c379 not found: ID does not exist" Jan 20 17:50:48 crc kubenswrapper[4558]: I0120 17:50:48.481187 4558 scope.go:117] "RemoveContainer" containerID="8b80d38684bc886d3672b9eba3adc33250d75483d51261047f570be9beb3ce3c" Jan 20 17:50:48 crc kubenswrapper[4558]: I0120 17:50:48.522655 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/openstack-cell1-galera-0"] Jan 20 17:50:48 crc kubenswrapper[4558]: I0120 17:50:48.524190 4558 scope.go:117] "RemoveContainer" containerID="aa9b9381bbdf5cb116e31b3db67d045917bc5ba77627d5ef1e20e4e91641261b" Jan 20 17:50:48 crc kubenswrapper[4558]: I0120 17:50:48.536642 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/openstack-cell1-galera-0"] Jan 20 17:50:48 crc kubenswrapper[4558]: I0120 17:50:48.544042 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-stgv6"] Jan 20 17:50:48 crc kubenswrapper[4558]: I0120 17:50:48.557900 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/openstack-cell1-galera-0"] Jan 20 17:50:48 crc kubenswrapper[4558]: E0120 17:50:48.558369 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="242c4cdd-2c26-40bd-acaf-782a26f5f6eb" containerName="registry-server" Jan 20 17:50:48 crc kubenswrapper[4558]: I0120 17:50:48.558389 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="242c4cdd-2c26-40bd-acaf-782a26f5f6eb" containerName="registry-server" Jan 20 17:50:48 crc kubenswrapper[4558]: E0120 17:50:48.558422 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="242c4cdd-2c26-40bd-acaf-782a26f5f6eb" containerName="extract-content" Jan 20 17:50:48 crc kubenswrapper[4558]: I0120 17:50:48.558429 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="242c4cdd-2c26-40bd-acaf-782a26f5f6eb" containerName="extract-content" Jan 20 17:50:48 crc kubenswrapper[4558]: E0120 17:50:48.558440 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9d54b98d-10d8-4300-89b7-031984825b5a" containerName="galera" Jan 20 17:50:48 crc kubenswrapper[4558]: I0120 17:50:48.558446 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="9d54b98d-10d8-4300-89b7-031984825b5a" containerName="galera" Jan 20 17:50:48 crc kubenswrapper[4558]: E0120 17:50:48.558453 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="242c4cdd-2c26-40bd-acaf-782a26f5f6eb" containerName="extract-utilities" Jan 20 17:50:48 crc kubenswrapper[4558]: I0120 17:50:48.558460 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="242c4cdd-2c26-40bd-acaf-782a26f5f6eb" containerName="extract-utilities" Jan 20 17:50:48 crc kubenswrapper[4558]: E0120 17:50:48.558480 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9d54b98d-10d8-4300-89b7-031984825b5a" containerName="mysql-bootstrap" Jan 20 17:50:48 crc kubenswrapper[4558]: I0120 17:50:48.558486 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="9d54b98d-10d8-4300-89b7-031984825b5a" containerName="mysql-bootstrap" Jan 20 17:50:48 crc kubenswrapper[4558]: I0120 17:50:48.558678 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="242c4cdd-2c26-40bd-acaf-782a26f5f6eb" containerName="registry-server" Jan 20 17:50:48 crc kubenswrapper[4558]: I0120 17:50:48.558733 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="9d54b98d-10d8-4300-89b7-031984825b5a" containerName="galera" Jan 20 17:50:48 crc kubenswrapper[4558]: I0120 17:50:48.559888 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:50:48 crc kubenswrapper[4558]: I0120 17:50:48.571641 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-cell1-config-data" Jan 20 17:50:48 crc kubenswrapper[4558]: I0120 17:50:48.571789 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-galera-openstack-cell1-svc" Jan 20 17:50:48 crc kubenswrapper[4558]: I0120 17:50:48.572139 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-cell1-scripts" Jan 20 17:50:48 crc kubenswrapper[4558]: I0120 17:50:48.609959 4558 scope.go:117] "RemoveContainer" containerID="8b80d38684bc886d3672b9eba3adc33250d75483d51261047f570be9beb3ce3c" Jan 20 17:50:48 crc kubenswrapper[4558]: E0120 17:50:48.612611 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8b80d38684bc886d3672b9eba3adc33250d75483d51261047f570be9beb3ce3c\": container with ID starting with 8b80d38684bc886d3672b9eba3adc33250d75483d51261047f570be9beb3ce3c not found: ID does not exist" containerID="8b80d38684bc886d3672b9eba3adc33250d75483d51261047f570be9beb3ce3c" Jan 20 17:50:48 crc kubenswrapper[4558]: I0120 17:50:48.612657 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8b80d38684bc886d3672b9eba3adc33250d75483d51261047f570be9beb3ce3c"} err="failed to get container status \"8b80d38684bc886d3672b9eba3adc33250d75483d51261047f570be9beb3ce3c\": rpc error: code = NotFound desc = could not find container \"8b80d38684bc886d3672b9eba3adc33250d75483d51261047f570be9beb3ce3c\": container with ID starting with 8b80d38684bc886d3672b9eba3adc33250d75483d51261047f570be9beb3ce3c not found: ID does not exist" Jan 20 17:50:48 crc kubenswrapper[4558]: I0120 17:50:48.612690 4558 scope.go:117] "RemoveContainer" containerID="aa9b9381bbdf5cb116e31b3db67d045917bc5ba77627d5ef1e20e4e91641261b" Jan 20 17:50:48 crc kubenswrapper[4558]: E0120 17:50:48.613688 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aa9b9381bbdf5cb116e31b3db67d045917bc5ba77627d5ef1e20e4e91641261b\": container with ID starting with aa9b9381bbdf5cb116e31b3db67d045917bc5ba77627d5ef1e20e4e91641261b not found: ID does not exist" containerID="aa9b9381bbdf5cb116e31b3db67d045917bc5ba77627d5ef1e20e4e91641261b" Jan 20 17:50:48 crc kubenswrapper[4558]: I0120 17:50:48.613807 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aa9b9381bbdf5cb116e31b3db67d045917bc5ba77627d5ef1e20e4e91641261b"} err="failed to get container status \"aa9b9381bbdf5cb116e31b3db67d045917bc5ba77627d5ef1e20e4e91641261b\": rpc error: code = NotFound desc = could not find container \"aa9b9381bbdf5cb116e31b3db67d045917bc5ba77627d5ef1e20e4e91641261b\": container with ID starting with aa9b9381bbdf5cb116e31b3db67d045917bc5ba77627d5ef1e20e4e91641261b not found: ID does not exist" Jan 20 17:50:48 crc kubenswrapper[4558]: I0120 17:50:48.617363 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d54b98d-10d8-4300-89b7-031984825b5a" path="/var/lib/kubelet/pods/9d54b98d-10d8-4300-89b7-031984825b5a/volumes" Jan 20 17:50:48 crc kubenswrapper[4558]: I0120 17:50:48.620333 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-stgv6"] Jan 20 17:50:48 crc kubenswrapper[4558]: I0120 17:50:48.613959 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"galera-openstack-cell1-dockercfg-gzrvj" Jan 20 17:50:48 crc kubenswrapper[4558]: I0120 17:50:48.630524 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/603501da-057d-4d3b-a682-e55e7e2916f4-operator-scripts\") pod \"603501da-057d-4d3b-a682-e55e7e2916f4\" (UID: \"603501da-057d-4d3b-a682-e55e7e2916f4\") " Jan 20 17:50:48 crc kubenswrapper[4558]: I0120 17:50:48.630663 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/603501da-057d-4d3b-a682-e55e7e2916f4-combined-ca-bundle\") pod \"603501da-057d-4d3b-a682-e55e7e2916f4\" (UID: \"603501da-057d-4d3b-a682-e55e7e2916f4\") " Jan 20 17:50:48 crc kubenswrapper[4558]: I0120 17:50:48.630832 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/603501da-057d-4d3b-a682-e55e7e2916f4-config-data-generated\") pod \"603501da-057d-4d3b-a682-e55e7e2916f4\" (UID: \"603501da-057d-4d3b-a682-e55e7e2916f4\") " Jan 20 17:50:48 crc kubenswrapper[4558]: I0120 17:50:48.630940 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p4qbw\" (UniqueName: \"kubernetes.io/projected/603501da-057d-4d3b-a682-e55e7e2916f4-kube-api-access-p4qbw\") pod \"603501da-057d-4d3b-a682-e55e7e2916f4\" (UID: \"603501da-057d-4d3b-a682-e55e7e2916f4\") " Jan 20 17:50:48 crc kubenswrapper[4558]: I0120 17:50:48.631118 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mysql-db\" (UniqueName: \"kubernetes.io/local-volume/local-storage14-crc\") pod \"603501da-057d-4d3b-a682-e55e7e2916f4\" (UID: \"603501da-057d-4d3b-a682-e55e7e2916f4\") " Jan 20 17:50:48 crc kubenswrapper[4558]: I0120 17:50:48.631367 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/603501da-057d-4d3b-a682-e55e7e2916f4-config-data-default\") pod \"603501da-057d-4d3b-a682-e55e7e2916f4\" (UID: \"603501da-057d-4d3b-a682-e55e7e2916f4\") " Jan 20 17:50:48 crc kubenswrapper[4558]: I0120 17:50:48.631461 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/603501da-057d-4d3b-a682-e55e7e2916f4-kolla-config\") pod \"603501da-057d-4d3b-a682-e55e7e2916f4\" (UID: \"603501da-057d-4d3b-a682-e55e7e2916f4\") " Jan 20 17:50:48 crc kubenswrapper[4558]: I0120 17:50:48.631675 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/603501da-057d-4d3b-a682-e55e7e2916f4-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "603501da-057d-4d3b-a682-e55e7e2916f4" (UID: "603501da-057d-4d3b-a682-e55e7e2916f4"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:50:48 crc kubenswrapper[4558]: I0120 17:50:48.632411 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/603501da-057d-4d3b-a682-e55e7e2916f4-config-data-generated" (OuterVolumeSpecName: "config-data-generated") pod "603501da-057d-4d3b-a682-e55e7e2916f4" (UID: "603501da-057d-4d3b-a682-e55e7e2916f4"). InnerVolumeSpecName "config-data-generated". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:50:48 crc kubenswrapper[4558]: I0120 17:50:48.632605 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/603501da-057d-4d3b-a682-e55e7e2916f4-config-data-default" (OuterVolumeSpecName: "config-data-default") pod "603501da-057d-4d3b-a682-e55e7e2916f4" (UID: "603501da-057d-4d3b-a682-e55e7e2916f4"). InnerVolumeSpecName "config-data-default". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:50:48 crc kubenswrapper[4558]: I0120 17:50:48.632837 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/603501da-057d-4d3b-a682-e55e7e2916f4-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "603501da-057d-4d3b-a682-e55e7e2916f4" (UID: "603501da-057d-4d3b-a682-e55e7e2916f4"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:50:48 crc kubenswrapper[4558]: I0120 17:50:48.632866 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/06db8474-04b0-4525-90f0-c066ecbac0ae-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"06db8474-04b0-4525-90f0-c066ecbac0ae\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:50:48 crc kubenswrapper[4558]: E0120 17:50:48.632981 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-ceilometer-internal-svc: secret "cert-ceilometer-internal-svc" not found Jan 20 17:50:48 crc kubenswrapper[4558]: E0120 17:50:48.633078 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/06db8474-04b0-4525-90f0-c066ecbac0ae-ceilometer-tls-certs podName:06db8474-04b0-4525-90f0-c066ecbac0ae nodeName:}" failed. No retries permitted until 2026-01-20 17:51:04.633055238 +0000 UTC m=+4158.393393205 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "ceilometer-tls-certs" (UniqueName: "kubernetes.io/secret/06db8474-04b0-4525-90f0-c066ecbac0ae-ceilometer-tls-certs") pod "ceilometer-0" (UID: "06db8474-04b0-4525-90f0-c066ecbac0ae") : secret "cert-ceilometer-internal-svc" not found Jan 20 17:50:48 crc kubenswrapper[4558]: I0120 17:50:48.633896 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/603501da-057d-4d3b-a682-e55e7e2916f4-config-data-generated\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:48 crc kubenswrapper[4558]: I0120 17:50:48.633924 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/603501da-057d-4d3b-a682-e55e7e2916f4-config-data-default\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:48 crc kubenswrapper[4558]: I0120 17:50:48.633934 4558 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/603501da-057d-4d3b-a682-e55e7e2916f4-kolla-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:48 crc kubenswrapper[4558]: I0120 17:50:48.633949 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/603501da-057d-4d3b-a682-e55e7e2916f4-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:48 crc kubenswrapper[4558]: I0120 17:50:48.638116 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage14-crc" (OuterVolumeSpecName: "mysql-db") pod "603501da-057d-4d3b-a682-e55e7e2916f4" (UID: "603501da-057d-4d3b-a682-e55e7e2916f4"). InnerVolumeSpecName "local-storage14-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:50:48 crc kubenswrapper[4558]: I0120 17:50:48.638198 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/603501da-057d-4d3b-a682-e55e7e2916f4-kube-api-access-p4qbw" (OuterVolumeSpecName: "kube-api-access-p4qbw") pod "603501da-057d-4d3b-a682-e55e7e2916f4" (UID: "603501da-057d-4d3b-a682-e55e7e2916f4"). InnerVolumeSpecName "kube-api-access-p4qbw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:50:48 crc kubenswrapper[4558]: I0120 17:50:48.638446 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/603501da-057d-4d3b-a682-e55e7e2916f4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "603501da-057d-4d3b-a682-e55e7e2916f4" (UID: "603501da-057d-4d3b-a682-e55e7e2916f4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:50:48 crc kubenswrapper[4558]: I0120 17:50:48.652895 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/openstack-cell1-galera-0"] Jan 20 17:50:48 crc kubenswrapper[4558]: I0120 17:50:48.736730 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05b3b028-51e2-49a9-9fb8-a10c096f3b27-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"05b3b028-51e2-49a9-9fb8-a10c096f3b27\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:50:48 crc kubenswrapper[4558]: I0120 17:50:48.736813 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8fd2c\" (UniqueName: \"kubernetes.io/projected/05b3b028-51e2-49a9-9fb8-a10c096f3b27-kube-api-access-8fd2c\") pod \"openstack-cell1-galera-0\" (UID: \"05b3b028-51e2-49a9-9fb8-a10c096f3b27\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:50:48 crc kubenswrapper[4558]: I0120 17:50:48.736910 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/05b3b028-51e2-49a9-9fb8-a10c096f3b27-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"05b3b028-51e2-49a9-9fb8-a10c096f3b27\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:50:48 crc kubenswrapper[4558]: I0120 17:50:48.737062 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/05b3b028-51e2-49a9-9fb8-a10c096f3b27-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"05b3b028-51e2-49a9-9fb8-a10c096f3b27\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:50:48 crc kubenswrapper[4558]: I0120 17:50:48.737110 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/05b3b028-51e2-49a9-9fb8-a10c096f3b27-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"05b3b028-51e2-49a9-9fb8-a10c096f3b27\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:50:48 crc kubenswrapper[4558]: I0120 17:50:48.737476 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/05b3b028-51e2-49a9-9fb8-a10c096f3b27-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"05b3b028-51e2-49a9-9fb8-a10c096f3b27\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:50:48 crc kubenswrapper[4558]: I0120 17:50:48.737543 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage15-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") pod \"openstack-cell1-galera-0\" (UID: \"05b3b028-51e2-49a9-9fb8-a10c096f3b27\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:50:48 crc kubenswrapper[4558]: I0120 17:50:48.738111 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/05b3b028-51e2-49a9-9fb8-a10c096f3b27-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"05b3b028-51e2-49a9-9fb8-a10c096f3b27\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:50:48 crc kubenswrapper[4558]: I0120 17:50:48.738433 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/603501da-057d-4d3b-a682-e55e7e2916f4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:48 crc kubenswrapper[4558]: I0120 17:50:48.738675 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p4qbw\" (UniqueName: \"kubernetes.io/projected/603501da-057d-4d3b-a682-e55e7e2916f4-kube-api-access-p4qbw\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:48 crc kubenswrapper[4558]: I0120 17:50:48.738732 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage14-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage14-crc\") on node \"crc\" " Jan 20 17:50:48 crc kubenswrapper[4558]: I0120 17:50:48.758054 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage14-crc" (UniqueName: "kubernetes.io/local-volume/local-storage14-crc") on node "crc" Jan 20 17:50:48 crc kubenswrapper[4558]: I0120 17:50:48.841784 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05b3b028-51e2-49a9-9fb8-a10c096f3b27-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"05b3b028-51e2-49a9-9fb8-a10c096f3b27\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:50:48 crc kubenswrapper[4558]: I0120 17:50:48.841862 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8fd2c\" (UniqueName: \"kubernetes.io/projected/05b3b028-51e2-49a9-9fb8-a10c096f3b27-kube-api-access-8fd2c\") pod \"openstack-cell1-galera-0\" (UID: \"05b3b028-51e2-49a9-9fb8-a10c096f3b27\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:50:48 crc kubenswrapper[4558]: I0120 17:50:48.841940 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/05b3b028-51e2-49a9-9fb8-a10c096f3b27-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"05b3b028-51e2-49a9-9fb8-a10c096f3b27\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:50:48 crc kubenswrapper[4558]: I0120 17:50:48.842020 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/05b3b028-51e2-49a9-9fb8-a10c096f3b27-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"05b3b028-51e2-49a9-9fb8-a10c096f3b27\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:50:48 crc kubenswrapper[4558]: I0120 17:50:48.842052 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/05b3b028-51e2-49a9-9fb8-a10c096f3b27-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"05b3b028-51e2-49a9-9fb8-a10c096f3b27\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:50:48 crc kubenswrapper[4558]: I0120 17:50:48.842236 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/05b3b028-51e2-49a9-9fb8-a10c096f3b27-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"05b3b028-51e2-49a9-9fb8-a10c096f3b27\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:50:48 crc kubenswrapper[4558]: I0120 17:50:48.842278 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage15-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") pod \"openstack-cell1-galera-0\" (UID: \"05b3b028-51e2-49a9-9fb8-a10c096f3b27\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:50:48 crc kubenswrapper[4558]: I0120 17:50:48.842598 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/05b3b028-51e2-49a9-9fb8-a10c096f3b27-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"05b3b028-51e2-49a9-9fb8-a10c096f3b27\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:50:48 crc kubenswrapper[4558]: I0120 17:50:48.842758 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage14-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage14-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:48 crc kubenswrapper[4558]: I0120 17:50:48.843219 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage15-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") pod \"openstack-cell1-galera-0\" (UID: \"05b3b028-51e2-49a9-9fb8-a10c096f3b27\") device mount path \"/mnt/openstack/pv15\"" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:50:48 crc kubenswrapper[4558]: I0120 17:50:48.843392 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/05b3b028-51e2-49a9-9fb8-a10c096f3b27-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"05b3b028-51e2-49a9-9fb8-a10c096f3b27\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:50:48 crc kubenswrapper[4558]: I0120 17:50:48.843532 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/05b3b028-51e2-49a9-9fb8-a10c096f3b27-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"05b3b028-51e2-49a9-9fb8-a10c096f3b27\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:50:48 crc kubenswrapper[4558]: I0120 17:50:48.843680 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/05b3b028-51e2-49a9-9fb8-a10c096f3b27-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"05b3b028-51e2-49a9-9fb8-a10c096f3b27\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:50:48 crc kubenswrapper[4558]: I0120 17:50:48.844253 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/05b3b028-51e2-49a9-9fb8-a10c096f3b27-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"05b3b028-51e2-49a9-9fb8-a10c096f3b27\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:50:48 crc kubenswrapper[4558]: I0120 17:50:48.845872 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-42qnz"] Jan 20 17:50:48 crc kubenswrapper[4558]: I0120 17:50:48.847318 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/05b3b028-51e2-49a9-9fb8-a10c096f3b27-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"05b3b028-51e2-49a9-9fb8-a10c096f3b27\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:50:48 crc kubenswrapper[4558]: I0120 17:50:48.847394 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05b3b028-51e2-49a9-9fb8-a10c096f3b27-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"05b3b028-51e2-49a9-9fb8-a10c096f3b27\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:50:48 crc kubenswrapper[4558]: I0120 17:50:48.862236 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8fd2c\" (UniqueName: \"kubernetes.io/projected/05b3b028-51e2-49a9-9fb8-a10c096f3b27-kube-api-access-8fd2c\") pod \"openstack-cell1-galera-0\" (UID: \"05b3b028-51e2-49a9-9fb8-a10c096f3b27\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:50:48 crc kubenswrapper[4558]: I0120 17:50:48.867726 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage15-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") pod \"openstack-cell1-galera-0\" (UID: \"05b3b028-51e2-49a9-9fb8-a10c096f3b27\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:50:48 crc kubenswrapper[4558]: I0120 17:50:48.879913 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:50:48 crc kubenswrapper[4558]: I0120 17:50:48.880242 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-42qnz" podUID="317faea5-3184-4d0c-a9e5-7f038e779e08" containerName="registry-server" containerID="cri-o://2baf4f2136d18e4952d5247b3830cdd80252007fcbbfd6cab54cf44c3bb61971" gracePeriod=2 Jan 20 17:50:48 crc kubenswrapper[4558]: I0120 17:50:48.916377 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:50:49 crc kubenswrapper[4558]: I0120 17:50:49.074672 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/openstack-galera-0"] Jan 20 17:50:49 crc kubenswrapper[4558]: I0120 17:50:49.080294 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/openstack-galera-0"] Jan 20 17:50:49 crc kubenswrapper[4558]: I0120 17:50:49.107471 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/openstack-galera-0"] Jan 20 17:50:49 crc kubenswrapper[4558]: I0120 17:50:49.109145 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:50:49 crc kubenswrapper[4558]: I0120 17:50:49.111770 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"galera-openstack-dockercfg-7rwrt" Jan 20 17:50:49 crc kubenswrapper[4558]: I0120 17:50:49.112095 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-config-data" Jan 20 17:50:49 crc kubenswrapper[4558]: I0120 17:50:49.112317 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-scripts" Jan 20 17:50:49 crc kubenswrapper[4558]: I0120 17:50:49.112334 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-galera-openstack-svc" Jan 20 17:50:49 crc kubenswrapper[4558]: I0120 17:50:49.123028 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/openstack-galera-0"] Jan 20 17:50:49 crc kubenswrapper[4558]: I0120 17:50:49.263525 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/a18e9b68-4683-415e-833e-5b363e31d461-kolla-config\") pod \"openstack-galera-0\" (UID: \"a18e9b68-4683-415e-833e-5b363e31d461\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:50:49 crc kubenswrapper[4558]: I0120 17:50:49.263644 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a18e9b68-4683-415e-833e-5b363e31d461-operator-scripts\") pod \"openstack-galera-0\" (UID: \"a18e9b68-4683-415e-833e-5b363e31d461\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:50:49 crc kubenswrapper[4558]: I0120 17:50:49.263674 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/a18e9b68-4683-415e-833e-5b363e31d461-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"a18e9b68-4683-415e-833e-5b363e31d461\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:50:49 crc kubenswrapper[4558]: I0120 17:50:49.263997 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a18e9b68-4683-415e-833e-5b363e31d461-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"a18e9b68-4683-415e-833e-5b363e31d461\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:50:49 crc kubenswrapper[4558]: I0120 17:50:49.264270 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/a18e9b68-4683-415e-833e-5b363e31d461-config-data-default\") pod \"openstack-galera-0\" (UID: \"a18e9b68-4683-415e-833e-5b363e31d461\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:50:49 crc kubenswrapper[4558]: I0120 17:50:49.264476 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mxfc8\" (UniqueName: \"kubernetes.io/projected/a18e9b68-4683-415e-833e-5b363e31d461-kube-api-access-mxfc8\") pod \"openstack-galera-0\" (UID: \"a18e9b68-4683-415e-833e-5b363e31d461\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:50:49 crc kubenswrapper[4558]: I0120 17:50:49.264603 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/a18e9b68-4683-415e-833e-5b363e31d461-config-data-generated\") pod \"openstack-galera-0\" (UID: \"a18e9b68-4683-415e-833e-5b363e31d461\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:50:49 crc kubenswrapper[4558]: I0120 17:50:49.264695 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage14-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage14-crc\") pod \"openstack-galera-0\" (UID: \"a18e9b68-4683-415e-833e-5b363e31d461\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:50:49 crc kubenswrapper[4558]: I0120 17:50:49.264985 4558 reconciler_common.go:293] "Volume detached for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/603501da-057d-4d3b-a682-e55e7e2916f4-galera-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:49 crc kubenswrapper[4558]: I0120 17:50:49.368528 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a18e9b68-4683-415e-833e-5b363e31d461-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"a18e9b68-4683-415e-833e-5b363e31d461\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:50:49 crc kubenswrapper[4558]: I0120 17:50:49.368750 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/a18e9b68-4683-415e-833e-5b363e31d461-config-data-default\") pod \"openstack-galera-0\" (UID: \"a18e9b68-4683-415e-833e-5b363e31d461\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:50:49 crc kubenswrapper[4558]: I0120 17:50:49.368948 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mxfc8\" (UniqueName: \"kubernetes.io/projected/a18e9b68-4683-415e-833e-5b363e31d461-kube-api-access-mxfc8\") pod \"openstack-galera-0\" (UID: \"a18e9b68-4683-415e-833e-5b363e31d461\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:50:49 crc kubenswrapper[4558]: I0120 17:50:49.369063 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/a18e9b68-4683-415e-833e-5b363e31d461-config-data-generated\") pod \"openstack-galera-0\" (UID: \"a18e9b68-4683-415e-833e-5b363e31d461\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:50:49 crc kubenswrapper[4558]: I0120 17:50:49.369127 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage14-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage14-crc\") pod \"openstack-galera-0\" (UID: \"a18e9b68-4683-415e-833e-5b363e31d461\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:50:49 crc kubenswrapper[4558]: I0120 17:50:49.369209 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/a18e9b68-4683-415e-833e-5b363e31d461-kolla-config\") pod \"openstack-galera-0\" (UID: \"a18e9b68-4683-415e-833e-5b363e31d461\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:50:49 crc kubenswrapper[4558]: I0120 17:50:49.369322 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a18e9b68-4683-415e-833e-5b363e31d461-operator-scripts\") pod \"openstack-galera-0\" (UID: \"a18e9b68-4683-415e-833e-5b363e31d461\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:50:49 crc kubenswrapper[4558]: I0120 17:50:49.369350 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/a18e9b68-4683-415e-833e-5b363e31d461-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"a18e9b68-4683-415e-833e-5b363e31d461\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:50:49 crc kubenswrapper[4558]: I0120 17:50:49.369553 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage14-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage14-crc\") pod \"openstack-galera-0\" (UID: \"a18e9b68-4683-415e-833e-5b363e31d461\") device mount path \"/mnt/openstack/pv14\"" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:50:49 crc kubenswrapper[4558]: I0120 17:50:49.370112 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/a18e9b68-4683-415e-833e-5b363e31d461-config-data-generated\") pod \"openstack-galera-0\" (UID: \"a18e9b68-4683-415e-833e-5b363e31d461\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:50:49 crc kubenswrapper[4558]: I0120 17:50:49.370809 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/a18e9b68-4683-415e-833e-5b363e31d461-kolla-config\") pod \"openstack-galera-0\" (UID: \"a18e9b68-4683-415e-833e-5b363e31d461\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:50:49 crc kubenswrapper[4558]: I0120 17:50:49.371675 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a18e9b68-4683-415e-833e-5b363e31d461-operator-scripts\") pod \"openstack-galera-0\" (UID: \"a18e9b68-4683-415e-833e-5b363e31d461\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:50:49 crc kubenswrapper[4558]: I0120 17:50:49.371985 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/a18e9b68-4683-415e-833e-5b363e31d461-config-data-default\") pod \"openstack-galera-0\" (UID: \"a18e9b68-4683-415e-833e-5b363e31d461\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:50:49 crc kubenswrapper[4558]: I0120 17:50:49.375486 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-42qnz" Jan 20 17:50:49 crc kubenswrapper[4558]: I0120 17:50:49.376494 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/a18e9b68-4683-415e-833e-5b363e31d461-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"a18e9b68-4683-415e-833e-5b363e31d461\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:50:49 crc kubenswrapper[4558]: I0120 17:50:49.377186 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a18e9b68-4683-415e-833e-5b363e31d461-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"a18e9b68-4683-415e-833e-5b363e31d461\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:50:49 crc kubenswrapper[4558]: I0120 17:50:49.389350 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mxfc8\" (UniqueName: \"kubernetes.io/projected/a18e9b68-4683-415e-833e-5b363e31d461-kube-api-access-mxfc8\") pod \"openstack-galera-0\" (UID: \"a18e9b68-4683-415e-833e-5b363e31d461\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:50:49 crc kubenswrapper[4558]: I0120 17:50:49.399998 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage14-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage14-crc\") pod \"openstack-galera-0\" (UID: \"a18e9b68-4683-415e-833e-5b363e31d461\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:50:49 crc kubenswrapper[4558]: I0120 17:50:49.406231 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/openstack-cell1-galera-0"] Jan 20 17:50:49 crc kubenswrapper[4558]: I0120 17:50:49.433314 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:50:49 crc kubenswrapper[4558]: I0120 17:50:49.472636 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/317faea5-3184-4d0c-a9e5-7f038e779e08-catalog-content\") pod \"317faea5-3184-4d0c-a9e5-7f038e779e08\" (UID: \"317faea5-3184-4d0c-a9e5-7f038e779e08\") " Jan 20 17:50:49 crc kubenswrapper[4558]: I0120 17:50:49.472847 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4rsvj\" (UniqueName: \"kubernetes.io/projected/317faea5-3184-4d0c-a9e5-7f038e779e08-kube-api-access-4rsvj\") pod \"317faea5-3184-4d0c-a9e5-7f038e779e08\" (UID: \"317faea5-3184-4d0c-a9e5-7f038e779e08\") " Jan 20 17:50:49 crc kubenswrapper[4558]: I0120 17:50:49.473025 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/317faea5-3184-4d0c-a9e5-7f038e779e08-utilities\") pod \"317faea5-3184-4d0c-a9e5-7f038e779e08\" (UID: \"317faea5-3184-4d0c-a9e5-7f038e779e08\") " Jan 20 17:50:49 crc kubenswrapper[4558]: I0120 17:50:49.473826 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/317faea5-3184-4d0c-a9e5-7f038e779e08-utilities" (OuterVolumeSpecName: "utilities") pod "317faea5-3184-4d0c-a9e5-7f038e779e08" (UID: "317faea5-3184-4d0c-a9e5-7f038e779e08"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:50:49 crc kubenswrapper[4558]: I0120 17:50:49.476030 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/317faea5-3184-4d0c-a9e5-7f038e779e08-kube-api-access-4rsvj" (OuterVolumeSpecName: "kube-api-access-4rsvj") pod "317faea5-3184-4d0c-a9e5-7f038e779e08" (UID: "317faea5-3184-4d0c-a9e5-7f038e779e08"). InnerVolumeSpecName "kube-api-access-4rsvj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:50:49 crc kubenswrapper[4558]: I0120 17:50:49.477111 4558 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/317faea5-3184-4d0c-a9e5-7f038e779e08-utilities\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:49 crc kubenswrapper[4558]: I0120 17:50:49.477146 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4rsvj\" (UniqueName: \"kubernetes.io/projected/317faea5-3184-4d0c-a9e5-7f038e779e08-kube-api-access-4rsvj\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:49 crc kubenswrapper[4558]: I0120 17:50:49.509959 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/317faea5-3184-4d0c-a9e5-7f038e779e08-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "317faea5-3184-4d0c-a9e5-7f038e779e08" (UID: "317faea5-3184-4d0c-a9e5-7f038e779e08"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:50:49 crc kubenswrapper[4558]: I0120 17:50:49.566530 4558 scope.go:117] "RemoveContainer" containerID="51b725953cbe989b3d1801020e679e346a8ac5643918b80a84ec2b9a8ed5efcb" Jan 20 17:50:49 crc kubenswrapper[4558]: I0120 17:50:49.579577 4558 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/317faea5-3184-4d0c-a9e5-7f038e779e08-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:49 crc kubenswrapper[4558]: I0120 17:50:49.856221 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/openstack-galera-0"] Jan 20 17:50:49 crc kubenswrapper[4558]: I0120 17:50:49.890030 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-cell1-galera-0" event={"ID":"05b3b028-51e2-49a9-9fb8-a10c096f3b27","Type":"ContainerStarted","Data":"a25982768f5cf2c38ae4bca2414ca592ef39c07924ab9b34e0d46ea3478509b7"} Jan 20 17:50:49 crc kubenswrapper[4558]: I0120 17:50:49.890083 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-cell1-galera-0" event={"ID":"05b3b028-51e2-49a9-9fb8-a10c096f3b27","Type":"ContainerStarted","Data":"7d089bb9ade270dd7bdd47aa19ebd6d991a0c4fc863b92f48fc4d9613fb8100f"} Jan 20 17:50:49 crc kubenswrapper[4558]: I0120 17:50:49.892851 4558 generic.go:334] "Generic (PLEG): container finished" podID="317faea5-3184-4d0c-a9e5-7f038e779e08" containerID="2baf4f2136d18e4952d5247b3830cdd80252007fcbbfd6cab54cf44c3bb61971" exitCode=0 Jan 20 17:50:49 crc kubenswrapper[4558]: I0120 17:50:49.892901 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-42qnz" Jan 20 17:50:49 crc kubenswrapper[4558]: I0120 17:50:49.892903 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-42qnz" event={"ID":"317faea5-3184-4d0c-a9e5-7f038e779e08","Type":"ContainerDied","Data":"2baf4f2136d18e4952d5247b3830cdd80252007fcbbfd6cab54cf44c3bb61971"} Jan 20 17:50:49 crc kubenswrapper[4558]: I0120 17:50:49.893051 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-42qnz" event={"ID":"317faea5-3184-4d0c-a9e5-7f038e779e08","Type":"ContainerDied","Data":"7bc4eaca3d4a1ba2f865568e57a76a7c9ca19d4330d05bdc0844cab6ad5fe695"} Jan 20 17:50:49 crc kubenswrapper[4558]: I0120 17:50:49.893077 4558 scope.go:117] "RemoveContainer" containerID="2baf4f2136d18e4952d5247b3830cdd80252007fcbbfd6cab54cf44c3bb61971" Jan 20 17:50:50 crc kubenswrapper[4558]: W0120 17:50:50.199150 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda18e9b68_4683_415e_833e_5b363e31d461.slice/crio-01578d9045580c439c227d018be1cb6d842df504a938d3e2e9739bff0eb9b11e WatchSource:0}: Error finding container 01578d9045580c439c227d018be1cb6d842df504a938d3e2e9739bff0eb9b11e: Status 404 returned error can't find the container with id 01578d9045580c439c227d018be1cb6d842df504a938d3e2e9739bff0eb9b11e Jan 20 17:50:50 crc kubenswrapper[4558]: I0120 17:50:50.473764 4558 scope.go:117] "RemoveContainer" containerID="108f827cc83a5bd00deb8be0ea3d91e1817061559a969d618b8baa66e1908eb5" Jan 20 17:50:50 crc kubenswrapper[4558]: I0120 17:50:50.501195 4558 scope.go:117] "RemoveContainer" containerID="c03312061f7266dcd57196ddf4d0814f1c93b4dd0503b2de614de31491810448" Jan 20 17:50:50 crc kubenswrapper[4558]: I0120 17:50:50.504207 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-42qnz"] Jan 20 17:50:50 crc kubenswrapper[4558]: I0120 17:50:50.517779 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-42qnz"] Jan 20 17:50:50 crc kubenswrapper[4558]: I0120 17:50:50.563425 4558 scope.go:117] "RemoveContainer" containerID="2baf4f2136d18e4952d5247b3830cdd80252007fcbbfd6cab54cf44c3bb61971" Jan 20 17:50:50 crc kubenswrapper[4558]: E0120 17:50:50.564300 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2baf4f2136d18e4952d5247b3830cdd80252007fcbbfd6cab54cf44c3bb61971\": container with ID starting with 2baf4f2136d18e4952d5247b3830cdd80252007fcbbfd6cab54cf44c3bb61971 not found: ID does not exist" containerID="2baf4f2136d18e4952d5247b3830cdd80252007fcbbfd6cab54cf44c3bb61971" Jan 20 17:50:50 crc kubenswrapper[4558]: I0120 17:50:50.564406 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2baf4f2136d18e4952d5247b3830cdd80252007fcbbfd6cab54cf44c3bb61971"} err="failed to get container status \"2baf4f2136d18e4952d5247b3830cdd80252007fcbbfd6cab54cf44c3bb61971\": rpc error: code = NotFound desc = could not find container \"2baf4f2136d18e4952d5247b3830cdd80252007fcbbfd6cab54cf44c3bb61971\": container with ID starting with 2baf4f2136d18e4952d5247b3830cdd80252007fcbbfd6cab54cf44c3bb61971 not found: ID does not exist" Jan 20 17:50:50 crc kubenswrapper[4558]: I0120 17:50:50.564626 4558 scope.go:117] "RemoveContainer" containerID="108f827cc83a5bd00deb8be0ea3d91e1817061559a969d618b8baa66e1908eb5" Jan 20 17:50:50 crc kubenswrapper[4558]: E0120 17:50:50.565537 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"108f827cc83a5bd00deb8be0ea3d91e1817061559a969d618b8baa66e1908eb5\": container with ID starting with 108f827cc83a5bd00deb8be0ea3d91e1817061559a969d618b8baa66e1908eb5 not found: ID does not exist" containerID="108f827cc83a5bd00deb8be0ea3d91e1817061559a969d618b8baa66e1908eb5" Jan 20 17:50:50 crc kubenswrapper[4558]: I0120 17:50:50.565590 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"108f827cc83a5bd00deb8be0ea3d91e1817061559a969d618b8baa66e1908eb5"} err="failed to get container status \"108f827cc83a5bd00deb8be0ea3d91e1817061559a969d618b8baa66e1908eb5\": rpc error: code = NotFound desc = could not find container \"108f827cc83a5bd00deb8be0ea3d91e1817061559a969d618b8baa66e1908eb5\": container with ID starting with 108f827cc83a5bd00deb8be0ea3d91e1817061559a969d618b8baa66e1908eb5 not found: ID does not exist" Jan 20 17:50:50 crc kubenswrapper[4558]: I0120 17:50:50.565619 4558 scope.go:117] "RemoveContainer" containerID="c03312061f7266dcd57196ddf4d0814f1c93b4dd0503b2de614de31491810448" Jan 20 17:50:50 crc kubenswrapper[4558]: E0120 17:50:50.566480 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c03312061f7266dcd57196ddf4d0814f1c93b4dd0503b2de614de31491810448\": container with ID starting with c03312061f7266dcd57196ddf4d0814f1c93b4dd0503b2de614de31491810448 not found: ID does not exist" containerID="c03312061f7266dcd57196ddf4d0814f1c93b4dd0503b2de614de31491810448" Jan 20 17:50:50 crc kubenswrapper[4558]: I0120 17:50:50.566524 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c03312061f7266dcd57196ddf4d0814f1c93b4dd0503b2de614de31491810448"} err="failed to get container status \"c03312061f7266dcd57196ddf4d0814f1c93b4dd0503b2de614de31491810448\": rpc error: code = NotFound desc = could not find container \"c03312061f7266dcd57196ddf4d0814f1c93b4dd0503b2de614de31491810448\": container with ID starting with c03312061f7266dcd57196ddf4d0814f1c93b4dd0503b2de614de31491810448 not found: ID does not exist" Jan 20 17:50:50 crc kubenswrapper[4558]: I0120 17:50:50.576133 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="242c4cdd-2c26-40bd-acaf-782a26f5f6eb" path="/var/lib/kubelet/pods/242c4cdd-2c26-40bd-acaf-782a26f5f6eb/volumes" Jan 20 17:50:50 crc kubenswrapper[4558]: I0120 17:50:50.576863 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="317faea5-3184-4d0c-a9e5-7f038e779e08" path="/var/lib/kubelet/pods/317faea5-3184-4d0c-a9e5-7f038e779e08/volumes" Jan 20 17:50:50 crc kubenswrapper[4558]: I0120 17:50:50.578055 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="603501da-057d-4d3b-a682-e55e7e2916f4" path="/var/lib/kubelet/pods/603501da-057d-4d3b-a682-e55e7e2916f4/volumes" Jan 20 17:50:50 crc kubenswrapper[4558]: I0120 17:50:50.903985 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-galera-0" event={"ID":"a18e9b68-4683-415e-833e-5b363e31d461","Type":"ContainerStarted","Data":"eedccb8254c4f03083611f17fbaf5f47532ace4c2ec8b61ac20041dbf695b528"} Jan 20 17:50:50 crc kubenswrapper[4558]: I0120 17:50:50.904060 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-galera-0" event={"ID":"a18e9b68-4683-415e-833e-5b363e31d461","Type":"ContainerStarted","Data":"01578d9045580c439c227d018be1cb6d842df504a938d3e2e9739bff0eb9b11e"} Jan 20 17:50:50 crc kubenswrapper[4558]: I0120 17:50:50.911972 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-0" event={"ID":"924fa7ce-8d60-4b2f-b62b-d5e146474f71","Type":"ContainerStarted","Data":"a9c70afb4065cfa5e1226d107bf8a809ffc6655bc0ed256d32ec2a30414f56e5"} Jan 20 17:50:50 crc kubenswrapper[4558]: I0120 17:50:50.912498 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:50:51 crc kubenswrapper[4558]: I0120 17:50:51.645344 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/cinder-scheduler-0" podUID="51d19369-14ac-4d62-ab05-9c5830856622" containerName="cinder-scheduler" probeResult="failure" output="Get \"http://10.217.1.196:8080/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 20 17:50:52 crc kubenswrapper[4558]: I0120 17:50:52.324303 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-metadata-0" podUID="acb2f7f3-f6d0-4b4f-936f-baee3dfa3938" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.1.192:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 20 17:50:52 crc kubenswrapper[4558]: I0120 17:50:52.324316 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-metadata-0" podUID="acb2f7f3-f6d0-4b4f-936f-baee3dfa3938" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.1.192:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 20 17:50:52 crc kubenswrapper[4558]: I0120 17:50:52.934238 4558 generic.go:334] "Generic (PLEG): container finished" podID="a18e9b68-4683-415e-833e-5b363e31d461" containerID="eedccb8254c4f03083611f17fbaf5f47532ace4c2ec8b61ac20041dbf695b528" exitCode=0 Jan 20 17:50:52 crc kubenswrapper[4558]: I0120 17:50:52.934329 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-galera-0" event={"ID":"a18e9b68-4683-415e-833e-5b363e31d461","Type":"ContainerDied","Data":"eedccb8254c4f03083611f17fbaf5f47532ace4c2ec8b61ac20041dbf695b528"} Jan 20 17:50:52 crc kubenswrapper[4558]: I0120 17:50:52.936601 4558 generic.go:334] "Generic (PLEG): container finished" podID="05b3b028-51e2-49a9-9fb8-a10c096f3b27" containerID="a25982768f5cf2c38ae4bca2414ca592ef39c07924ab9b34e0d46ea3478509b7" exitCode=0 Jan 20 17:50:52 crc kubenswrapper[4558]: I0120 17:50:52.936666 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-cell1-galera-0" event={"ID":"05b3b028-51e2-49a9-9fb8-a10c096f3b27","Type":"ContainerDied","Data":"a25982768f5cf2c38ae4bca2414ca592ef39c07924ab9b34e0d46ea3478509b7"} Jan 20 17:50:52 crc kubenswrapper[4558]: I0120 17:50:52.940215 4558 generic.go:334] "Generic (PLEG): container finished" podID="924fa7ce-8d60-4b2f-b62b-d5e146474f71" containerID="a9c70afb4065cfa5e1226d107bf8a809ffc6655bc0ed256d32ec2a30414f56e5" exitCode=1 Jan 20 17:50:52 crc kubenswrapper[4558]: I0120 17:50:52.940261 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-0" event={"ID":"924fa7ce-8d60-4b2f-b62b-d5e146474f71","Type":"ContainerDied","Data":"a9c70afb4065cfa5e1226d107bf8a809ffc6655bc0ed256d32ec2a30414f56e5"} Jan 20 17:50:52 crc kubenswrapper[4558]: I0120 17:50:52.940307 4558 scope.go:117] "RemoveContainer" containerID="51b725953cbe989b3d1801020e679e346a8ac5643918b80a84ec2b9a8ed5efcb" Jan 20 17:50:52 crc kubenswrapper[4558]: I0120 17:50:52.940958 4558 scope.go:117] "RemoveContainer" containerID="a9c70afb4065cfa5e1226d107bf8a809ffc6655bc0ed256d32ec2a30414f56e5" Jan 20 17:50:52 crc kubenswrapper[4558]: E0120 17:50:52.941292 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"nova-cell0-conductor-conductor\" with CrashLoopBackOff: \"back-off 20s restarting failed container=nova-cell0-conductor-conductor pod=nova-cell0-conductor-0_openstack-kuttl-tests(924fa7ce-8d60-4b2f-b62b-d5e146474f71)\"" pod="openstack-kuttl-tests/nova-cell0-conductor-0" podUID="924fa7ce-8d60-4b2f-b62b-d5e146474f71" Jan 20 17:50:53 crc kubenswrapper[4558]: I0120 17:50:53.316619 4558 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:50:53 crc kubenswrapper[4558]: I0120 17:50:53.956499 4558 scope.go:117] "RemoveContainer" containerID="a9c70afb4065cfa5e1226d107bf8a809ffc6655bc0ed256d32ec2a30414f56e5" Jan 20 17:50:53 crc kubenswrapper[4558]: E0120 17:50:53.957094 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"nova-cell0-conductor-conductor\" with CrashLoopBackOff: \"back-off 20s restarting failed container=nova-cell0-conductor-conductor pod=nova-cell0-conductor-0_openstack-kuttl-tests(924fa7ce-8d60-4b2f-b62b-d5e146474f71)\"" pod="openstack-kuttl-tests/nova-cell0-conductor-0" podUID="924fa7ce-8d60-4b2f-b62b-d5e146474f71" Jan 20 17:50:53 crc kubenswrapper[4558]: I0120 17:50:53.959084 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-galera-0" event={"ID":"a18e9b68-4683-415e-833e-5b363e31d461","Type":"ContainerStarted","Data":"142baadd97db60a56cd4c8eefd272ce6d8e94faf94a19e872d0adb86b7b6376a"} Jan 20 17:50:53 crc kubenswrapper[4558]: I0120 17:50:53.961427 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-cell1-galera-0" event={"ID":"05b3b028-51e2-49a9-9fb8-a10c096f3b27","Type":"ContainerStarted","Data":"5c52a309d4b859bba6422d3ae6fa7e9422d3b5d97dc982533c7cb8e876b2f184"} Jan 20 17:50:53 crc kubenswrapper[4558]: I0120 17:50:53.998257 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/openstack-cell1-galera-0" podStartSLOduration=5.998229277 podStartE2EDuration="5.998229277s" podCreationTimestamp="2026-01-20 17:50:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:50:53.991794698 +0000 UTC m=+4147.752132664" watchObservedRunningTime="2026-01-20 17:50:53.998229277 +0000 UTC m=+4147.758567244" Jan 20 17:50:54 crc kubenswrapper[4558]: I0120 17:50:54.027338 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/openstack-galera-0" podStartSLOduration=5.027319899 podStartE2EDuration="5.027319899s" podCreationTimestamp="2026-01-20 17:50:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:50:54.013753108 +0000 UTC m=+4147.774091064" watchObservedRunningTime="2026-01-20 17:50:54.027319899 +0000 UTC m=+4147.787657865" Jan 20 17:50:54 crc kubenswrapper[4558]: I0120 17:50:54.513216 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/memcached-0" Jan 20 17:50:54 crc kubenswrapper[4558]: I0120 17:50:54.550521 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/memcached-0"] Jan 20 17:50:54 crc kubenswrapper[4558]: I0120 17:50:54.661284 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-0"] Jan 20 17:50:54 crc kubenswrapper[4558]: I0120 17:50:54.661499 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-cell1-conductor-0" podUID="b2ecae06-8e2e-4e1a-b788-606e519c8ff5" containerName="nova-cell1-conductor-conductor" containerID="cri-o://193468c685fa9ab1f363ecb0e7f771a966526b372a315288f277c9d2d2a6d417" gracePeriod=30 Jan 20 17:50:54 crc kubenswrapper[4558]: I0120 17:50:54.755394 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-9795586d8-4dv95"] Jan 20 17:50:54 crc kubenswrapper[4558]: E0120 17:50:54.758008 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[internal-tls-certs], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack-kuttl-tests/keystone-9795586d8-4dv95" podUID="0a3695d2-9733-430d-a489-694e9e97b586" Jan 20 17:50:54 crc kubenswrapper[4558]: I0120 17:50:54.784398 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/keystone-64bbc794f7-mwswp"] Jan 20 17:50:54 crc kubenswrapper[4558]: E0120 17:50:54.785091 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="317faea5-3184-4d0c-a9e5-7f038e779e08" containerName="registry-server" Jan 20 17:50:54 crc kubenswrapper[4558]: I0120 17:50:54.785176 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="317faea5-3184-4d0c-a9e5-7f038e779e08" containerName="registry-server" Jan 20 17:50:54 crc kubenswrapper[4558]: E0120 17:50:54.785260 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="317faea5-3184-4d0c-a9e5-7f038e779e08" containerName="extract-content" Jan 20 17:50:54 crc kubenswrapper[4558]: I0120 17:50:54.785330 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="317faea5-3184-4d0c-a9e5-7f038e779e08" containerName="extract-content" Jan 20 17:50:54 crc kubenswrapper[4558]: E0120 17:50:54.785400 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="317faea5-3184-4d0c-a9e5-7f038e779e08" containerName="extract-utilities" Jan 20 17:50:54 crc kubenswrapper[4558]: I0120 17:50:54.785445 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="317faea5-3184-4d0c-a9e5-7f038e779e08" containerName="extract-utilities" Jan 20 17:50:54 crc kubenswrapper[4558]: I0120 17:50:54.785667 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="317faea5-3184-4d0c-a9e5-7f038e779e08" containerName="registry-server" Jan 20 17:50:54 crc kubenswrapper[4558]: I0120 17:50:54.786485 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-64bbc794f7-mwswp" Jan 20 17:50:54 crc kubenswrapper[4558]: I0120 17:50:54.797073 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-64bbc794f7-mwswp"] Jan 20 17:50:54 crc kubenswrapper[4558]: I0120 17:50:54.869373 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-7b9876b66d-znq49"] Jan 20 17:50:54 crc kubenswrapper[4558]: E0120 17:50:54.870190 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[internal-tls-certs ovndb-tls-certs public-tls-certs], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack-kuttl-tests/neutron-7b9876b66d-znq49" podUID="7458929c-bc4e-4f17-b199-4963b298f4e8" Jan 20 17:50:54 crc kubenswrapper[4558]: I0120 17:50:54.894293 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/neutron-6bd94d8c7d-2mkc6"] Jan 20 17:50:54 crc kubenswrapper[4558]: I0120 17:50:54.896198 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-6bd94d8c7d-2mkc6" Jan 20 17:50:54 crc kubenswrapper[4558]: I0120 17:50:54.906790 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cc15c42c-dd9c-4f60-b92a-7140713f32fb-combined-ca-bundle\") pod \"keystone-64bbc794f7-mwswp\" (UID: \"cc15c42c-dd9c-4f60-b92a-7140713f32fb\") " pod="openstack-kuttl-tests/keystone-64bbc794f7-mwswp" Jan 20 17:50:54 crc kubenswrapper[4558]: I0120 17:50:54.906997 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cc15c42c-dd9c-4f60-b92a-7140713f32fb-internal-tls-certs\") pod \"keystone-64bbc794f7-mwswp\" (UID: \"cc15c42c-dd9c-4f60-b92a-7140713f32fb\") " pod="openstack-kuttl-tests/keystone-64bbc794f7-mwswp" Jan 20 17:50:54 crc kubenswrapper[4558]: I0120 17:50:54.907125 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dqbrf\" (UniqueName: \"kubernetes.io/projected/cc15c42c-dd9c-4f60-b92a-7140713f32fb-kube-api-access-dqbrf\") pod \"keystone-64bbc794f7-mwswp\" (UID: \"cc15c42c-dd9c-4f60-b92a-7140713f32fb\") " pod="openstack-kuttl-tests/keystone-64bbc794f7-mwswp" Jan 20 17:50:54 crc kubenswrapper[4558]: I0120 17:50:54.907144 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-6bd94d8c7d-2mkc6"] Jan 20 17:50:54 crc kubenswrapper[4558]: I0120 17:50:54.907256 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/cc15c42c-dd9c-4f60-b92a-7140713f32fb-public-tls-certs\") pod \"keystone-64bbc794f7-mwswp\" (UID: \"cc15c42c-dd9c-4f60-b92a-7140713f32fb\") " pod="openstack-kuttl-tests/keystone-64bbc794f7-mwswp" Jan 20 17:50:54 crc kubenswrapper[4558]: I0120 17:50:54.907315 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cc15c42c-dd9c-4f60-b92a-7140713f32fb-scripts\") pod \"keystone-64bbc794f7-mwswp\" (UID: \"cc15c42c-dd9c-4f60-b92a-7140713f32fb\") " pod="openstack-kuttl-tests/keystone-64bbc794f7-mwswp" Jan 20 17:50:54 crc kubenswrapper[4558]: I0120 17:50:54.907452 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/cc15c42c-dd9c-4f60-b92a-7140713f32fb-credential-keys\") pod \"keystone-64bbc794f7-mwswp\" (UID: \"cc15c42c-dd9c-4f60-b92a-7140713f32fb\") " pod="openstack-kuttl-tests/keystone-64bbc794f7-mwswp" Jan 20 17:50:54 crc kubenswrapper[4558]: I0120 17:50:54.907562 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cc15c42c-dd9c-4f60-b92a-7140713f32fb-config-data\") pod \"keystone-64bbc794f7-mwswp\" (UID: \"cc15c42c-dd9c-4f60-b92a-7140713f32fb\") " pod="openstack-kuttl-tests/keystone-64bbc794f7-mwswp" Jan 20 17:50:54 crc kubenswrapper[4558]: I0120 17:50:54.907639 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/cc15c42c-dd9c-4f60-b92a-7140713f32fb-fernet-keys\") pod \"keystone-64bbc794f7-mwswp\" (UID: \"cc15c42c-dd9c-4f60-b92a-7140713f32fb\") " pod="openstack-kuttl-tests/keystone-64bbc794f7-mwswp" Jan 20 17:50:54 crc kubenswrapper[4558]: I0120 17:50:54.978415 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-9795586d8-4dv95" Jan 20 17:50:54 crc kubenswrapper[4558]: I0120 17:50:54.978445 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-7b9876b66d-znq49" Jan 20 17:50:54 crc kubenswrapper[4558]: I0120 17:50:54.978645 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/memcached-0" podUID="5239f3e2-7f54-4042-b0da-02f7608224df" containerName="memcached" containerID="cri-o://920c89eb591e28b1762567ed35128bdcddc70641d214ce72662434c0af29d721" gracePeriod=30 Jan 20 17:50:54 crc kubenswrapper[4558]: I0120 17:50:54.998366 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-9795586d8-4dv95" Jan 20 17:50:55 crc kubenswrapper[4558]: I0120 17:50:55.001193 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-7b9876b66d-znq49" Jan 20 17:50:55 crc kubenswrapper[4558]: I0120 17:50:55.011623 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/2ef9d4ef-ec42-4464-8744-4b3aea7b6eb9-ovndb-tls-certs\") pod \"neutron-6bd94d8c7d-2mkc6\" (UID: \"2ef9d4ef-ec42-4464-8744-4b3aea7b6eb9\") " pod="openstack-kuttl-tests/neutron-6bd94d8c7d-2mkc6" Jan 20 17:50:55 crc kubenswrapper[4558]: I0120 17:50:55.011680 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cc15c42c-dd9c-4f60-b92a-7140713f32fb-internal-tls-certs\") pod \"keystone-64bbc794f7-mwswp\" (UID: \"cc15c42c-dd9c-4f60-b92a-7140713f32fb\") " pod="openstack-kuttl-tests/keystone-64bbc794f7-mwswp" Jan 20 17:50:55 crc kubenswrapper[4558]: I0120 17:50:55.011740 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dqbrf\" (UniqueName: \"kubernetes.io/projected/cc15c42c-dd9c-4f60-b92a-7140713f32fb-kube-api-access-dqbrf\") pod \"keystone-64bbc794f7-mwswp\" (UID: \"cc15c42c-dd9c-4f60-b92a-7140713f32fb\") " pod="openstack-kuttl-tests/keystone-64bbc794f7-mwswp" Jan 20 17:50:55 crc kubenswrapper[4558]: I0120 17:50:55.011808 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q8mbz\" (UniqueName: \"kubernetes.io/projected/2ef9d4ef-ec42-4464-8744-4b3aea7b6eb9-kube-api-access-q8mbz\") pod \"neutron-6bd94d8c7d-2mkc6\" (UID: \"2ef9d4ef-ec42-4464-8744-4b3aea7b6eb9\") " pod="openstack-kuttl-tests/neutron-6bd94d8c7d-2mkc6" Jan 20 17:50:55 crc kubenswrapper[4558]: I0120 17:50:55.011837 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/cc15c42c-dd9c-4f60-b92a-7140713f32fb-public-tls-certs\") pod \"keystone-64bbc794f7-mwswp\" (UID: \"cc15c42c-dd9c-4f60-b92a-7140713f32fb\") " pod="openstack-kuttl-tests/keystone-64bbc794f7-mwswp" Jan 20 17:50:55 crc kubenswrapper[4558]: I0120 17:50:55.011863 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/2ef9d4ef-ec42-4464-8744-4b3aea7b6eb9-httpd-config\") pod \"neutron-6bd94d8c7d-2mkc6\" (UID: \"2ef9d4ef-ec42-4464-8744-4b3aea7b6eb9\") " pod="openstack-kuttl-tests/neutron-6bd94d8c7d-2mkc6" Jan 20 17:50:55 crc kubenswrapper[4558]: I0120 17:50:55.011881 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cc15c42c-dd9c-4f60-b92a-7140713f32fb-scripts\") pod \"keystone-64bbc794f7-mwswp\" (UID: \"cc15c42c-dd9c-4f60-b92a-7140713f32fb\") " pod="openstack-kuttl-tests/keystone-64bbc794f7-mwswp" Jan 20 17:50:55 crc kubenswrapper[4558]: I0120 17:50:55.011929 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/cc15c42c-dd9c-4f60-b92a-7140713f32fb-credential-keys\") pod \"keystone-64bbc794f7-mwswp\" (UID: \"cc15c42c-dd9c-4f60-b92a-7140713f32fb\") " pod="openstack-kuttl-tests/keystone-64bbc794f7-mwswp" Jan 20 17:50:55 crc kubenswrapper[4558]: I0120 17:50:55.011984 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2ef9d4ef-ec42-4464-8744-4b3aea7b6eb9-combined-ca-bundle\") pod \"neutron-6bd94d8c7d-2mkc6\" (UID: \"2ef9d4ef-ec42-4464-8744-4b3aea7b6eb9\") " pod="openstack-kuttl-tests/neutron-6bd94d8c7d-2mkc6" Jan 20 17:50:55 crc kubenswrapper[4558]: I0120 17:50:55.012013 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cc15c42c-dd9c-4f60-b92a-7140713f32fb-config-data\") pod \"keystone-64bbc794f7-mwswp\" (UID: \"cc15c42c-dd9c-4f60-b92a-7140713f32fb\") " pod="openstack-kuttl-tests/keystone-64bbc794f7-mwswp" Jan 20 17:50:55 crc kubenswrapper[4558]: I0120 17:50:55.012045 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/2ef9d4ef-ec42-4464-8744-4b3aea7b6eb9-config\") pod \"neutron-6bd94d8c7d-2mkc6\" (UID: \"2ef9d4ef-ec42-4464-8744-4b3aea7b6eb9\") " pod="openstack-kuttl-tests/neutron-6bd94d8c7d-2mkc6" Jan 20 17:50:55 crc kubenswrapper[4558]: I0120 17:50:55.012077 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/cc15c42c-dd9c-4f60-b92a-7140713f32fb-fernet-keys\") pod \"keystone-64bbc794f7-mwswp\" (UID: \"cc15c42c-dd9c-4f60-b92a-7140713f32fb\") " pod="openstack-kuttl-tests/keystone-64bbc794f7-mwswp" Jan 20 17:50:55 crc kubenswrapper[4558]: I0120 17:50:55.012100 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2ef9d4ef-ec42-4464-8744-4b3aea7b6eb9-internal-tls-certs\") pod \"neutron-6bd94d8c7d-2mkc6\" (UID: \"2ef9d4ef-ec42-4464-8744-4b3aea7b6eb9\") " pod="openstack-kuttl-tests/neutron-6bd94d8c7d-2mkc6" Jan 20 17:50:55 crc kubenswrapper[4558]: I0120 17:50:55.012123 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2ef9d4ef-ec42-4464-8744-4b3aea7b6eb9-public-tls-certs\") pod \"neutron-6bd94d8c7d-2mkc6\" (UID: \"2ef9d4ef-ec42-4464-8744-4b3aea7b6eb9\") " pod="openstack-kuttl-tests/neutron-6bd94d8c7d-2mkc6" Jan 20 17:50:55 crc kubenswrapper[4558]: I0120 17:50:55.012142 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cc15c42c-dd9c-4f60-b92a-7140713f32fb-combined-ca-bundle\") pod \"keystone-64bbc794f7-mwswp\" (UID: \"cc15c42c-dd9c-4f60-b92a-7140713f32fb\") " pod="openstack-kuttl-tests/keystone-64bbc794f7-mwswp" Jan 20 17:50:55 crc kubenswrapper[4558]: I0120 17:50:55.020313 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/cc15c42c-dd9c-4f60-b92a-7140713f32fb-fernet-keys\") pod \"keystone-64bbc794f7-mwswp\" (UID: \"cc15c42c-dd9c-4f60-b92a-7140713f32fb\") " pod="openstack-kuttl-tests/keystone-64bbc794f7-mwswp" Jan 20 17:50:55 crc kubenswrapper[4558]: I0120 17:50:55.020577 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cc15c42c-dd9c-4f60-b92a-7140713f32fb-internal-tls-certs\") pod \"keystone-64bbc794f7-mwswp\" (UID: \"cc15c42c-dd9c-4f60-b92a-7140713f32fb\") " pod="openstack-kuttl-tests/keystone-64bbc794f7-mwswp" Jan 20 17:50:55 crc kubenswrapper[4558]: I0120 17:50:55.023543 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cc15c42c-dd9c-4f60-b92a-7140713f32fb-scripts\") pod \"keystone-64bbc794f7-mwswp\" (UID: \"cc15c42c-dd9c-4f60-b92a-7140713f32fb\") " pod="openstack-kuttl-tests/keystone-64bbc794f7-mwswp" Jan 20 17:50:55 crc kubenswrapper[4558]: I0120 17:50:55.029191 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/cc15c42c-dd9c-4f60-b92a-7140713f32fb-public-tls-certs\") pod \"keystone-64bbc794f7-mwswp\" (UID: \"cc15c42c-dd9c-4f60-b92a-7140713f32fb\") " pod="openstack-kuttl-tests/keystone-64bbc794f7-mwswp" Jan 20 17:50:55 crc kubenswrapper[4558]: I0120 17:50:55.029536 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/cc15c42c-dd9c-4f60-b92a-7140713f32fb-credential-keys\") pod \"keystone-64bbc794f7-mwswp\" (UID: \"cc15c42c-dd9c-4f60-b92a-7140713f32fb\") " pod="openstack-kuttl-tests/keystone-64bbc794f7-mwswp" Jan 20 17:50:55 crc kubenswrapper[4558]: I0120 17:50:55.030116 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cc15c42c-dd9c-4f60-b92a-7140713f32fb-config-data\") pod \"keystone-64bbc794f7-mwswp\" (UID: \"cc15c42c-dd9c-4f60-b92a-7140713f32fb\") " pod="openstack-kuttl-tests/keystone-64bbc794f7-mwswp" Jan 20 17:50:55 crc kubenswrapper[4558]: I0120 17:50:55.032768 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cc15c42c-dd9c-4f60-b92a-7140713f32fb-combined-ca-bundle\") pod \"keystone-64bbc794f7-mwswp\" (UID: \"cc15c42c-dd9c-4f60-b92a-7140713f32fb\") " pod="openstack-kuttl-tests/keystone-64bbc794f7-mwswp" Jan 20 17:50:55 crc kubenswrapper[4558]: I0120 17:50:55.037583 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dqbrf\" (UniqueName: \"kubernetes.io/projected/cc15c42c-dd9c-4f60-b92a-7140713f32fb-kube-api-access-dqbrf\") pod \"keystone-64bbc794f7-mwswp\" (UID: \"cc15c42c-dd9c-4f60-b92a-7140713f32fb\") " pod="openstack-kuttl-tests/keystone-64bbc794f7-mwswp" Jan 20 17:50:55 crc kubenswrapper[4558]: I0120 17:50:55.116094 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0a3695d2-9733-430d-a489-694e9e97b586-public-tls-certs\") pod \"0a3695d2-9733-430d-a489-694e9e97b586\" (UID: \"0a3695d2-9733-430d-a489-694e9e97b586\") " Jan 20 17:50:55 crc kubenswrapper[4558]: I0120 17:50:55.116178 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/7458929c-bc4e-4f17-b199-4963b298f4e8-config\") pod \"7458929c-bc4e-4f17-b199-4963b298f4e8\" (UID: \"7458929c-bc4e-4f17-b199-4963b298f4e8\") " Jan 20 17:50:55 crc kubenswrapper[4558]: I0120 17:50:55.116221 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jspp2\" (UniqueName: \"kubernetes.io/projected/0a3695d2-9733-430d-a489-694e9e97b586-kube-api-access-jspp2\") pod \"0a3695d2-9733-430d-a489-694e9e97b586\" (UID: \"0a3695d2-9733-430d-a489-694e9e97b586\") " Jan 20 17:50:55 crc kubenswrapper[4558]: I0120 17:50:55.116358 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0a3695d2-9733-430d-a489-694e9e97b586-scripts\") pod \"0a3695d2-9733-430d-a489-694e9e97b586\" (UID: \"0a3695d2-9733-430d-a489-694e9e97b586\") " Jan 20 17:50:55 crc kubenswrapper[4558]: I0120 17:50:55.116395 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7458929c-bc4e-4f17-b199-4963b298f4e8-combined-ca-bundle\") pod \"7458929c-bc4e-4f17-b199-4963b298f4e8\" (UID: \"7458929c-bc4e-4f17-b199-4963b298f4e8\") " Jan 20 17:50:55 crc kubenswrapper[4558]: I0120 17:50:55.116450 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x78ng\" (UniqueName: \"kubernetes.io/projected/7458929c-bc4e-4f17-b199-4963b298f4e8-kube-api-access-x78ng\") pod \"7458929c-bc4e-4f17-b199-4963b298f4e8\" (UID: \"7458929c-bc4e-4f17-b199-4963b298f4e8\") " Jan 20 17:50:55 crc kubenswrapper[4558]: I0120 17:50:55.116523 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/0a3695d2-9733-430d-a489-694e9e97b586-fernet-keys\") pod \"0a3695d2-9733-430d-a489-694e9e97b586\" (UID: \"0a3695d2-9733-430d-a489-694e9e97b586\") " Jan 20 17:50:55 crc kubenswrapper[4558]: I0120 17:50:55.116570 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0a3695d2-9733-430d-a489-694e9e97b586-combined-ca-bundle\") pod \"0a3695d2-9733-430d-a489-694e9e97b586\" (UID: \"0a3695d2-9733-430d-a489-694e9e97b586\") " Jan 20 17:50:55 crc kubenswrapper[4558]: I0120 17:50:55.116607 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0a3695d2-9733-430d-a489-694e9e97b586-config-data\") pod \"0a3695d2-9733-430d-a489-694e9e97b586\" (UID: \"0a3695d2-9733-430d-a489-694e9e97b586\") " Jan 20 17:50:55 crc kubenswrapper[4558]: I0120 17:50:55.116669 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/7458929c-bc4e-4f17-b199-4963b298f4e8-httpd-config\") pod \"7458929c-bc4e-4f17-b199-4963b298f4e8\" (UID: \"7458929c-bc4e-4f17-b199-4963b298f4e8\") " Jan 20 17:50:55 crc kubenswrapper[4558]: I0120 17:50:55.116739 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/0a3695d2-9733-430d-a489-694e9e97b586-credential-keys\") pod \"0a3695d2-9733-430d-a489-694e9e97b586\" (UID: \"0a3695d2-9733-430d-a489-694e9e97b586\") " Jan 20 17:50:55 crc kubenswrapper[4558]: I0120 17:50:55.117142 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/2ef9d4ef-ec42-4464-8744-4b3aea7b6eb9-ovndb-tls-certs\") pod \"neutron-6bd94d8c7d-2mkc6\" (UID: \"2ef9d4ef-ec42-4464-8744-4b3aea7b6eb9\") " pod="openstack-kuttl-tests/neutron-6bd94d8c7d-2mkc6" Jan 20 17:50:55 crc kubenswrapper[4558]: I0120 17:50:55.117296 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q8mbz\" (UniqueName: \"kubernetes.io/projected/2ef9d4ef-ec42-4464-8744-4b3aea7b6eb9-kube-api-access-q8mbz\") pod \"neutron-6bd94d8c7d-2mkc6\" (UID: \"2ef9d4ef-ec42-4464-8744-4b3aea7b6eb9\") " pod="openstack-kuttl-tests/neutron-6bd94d8c7d-2mkc6" Jan 20 17:50:55 crc kubenswrapper[4558]: I0120 17:50:55.117336 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/2ef9d4ef-ec42-4464-8744-4b3aea7b6eb9-httpd-config\") pod \"neutron-6bd94d8c7d-2mkc6\" (UID: \"2ef9d4ef-ec42-4464-8744-4b3aea7b6eb9\") " pod="openstack-kuttl-tests/neutron-6bd94d8c7d-2mkc6" Jan 20 17:50:55 crc kubenswrapper[4558]: I0120 17:50:55.117427 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2ef9d4ef-ec42-4464-8744-4b3aea7b6eb9-combined-ca-bundle\") pod \"neutron-6bd94d8c7d-2mkc6\" (UID: \"2ef9d4ef-ec42-4464-8744-4b3aea7b6eb9\") " pod="openstack-kuttl-tests/neutron-6bd94d8c7d-2mkc6" Jan 20 17:50:55 crc kubenswrapper[4558]: I0120 17:50:55.117471 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/2ef9d4ef-ec42-4464-8744-4b3aea7b6eb9-config\") pod \"neutron-6bd94d8c7d-2mkc6\" (UID: \"2ef9d4ef-ec42-4464-8744-4b3aea7b6eb9\") " pod="openstack-kuttl-tests/neutron-6bd94d8c7d-2mkc6" Jan 20 17:50:55 crc kubenswrapper[4558]: I0120 17:50:55.117513 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2ef9d4ef-ec42-4464-8744-4b3aea7b6eb9-internal-tls-certs\") pod \"neutron-6bd94d8c7d-2mkc6\" (UID: \"2ef9d4ef-ec42-4464-8744-4b3aea7b6eb9\") " pod="openstack-kuttl-tests/neutron-6bd94d8c7d-2mkc6" Jan 20 17:50:55 crc kubenswrapper[4558]: I0120 17:50:55.117537 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2ef9d4ef-ec42-4464-8744-4b3aea7b6eb9-public-tls-certs\") pod \"neutron-6bd94d8c7d-2mkc6\" (UID: \"2ef9d4ef-ec42-4464-8744-4b3aea7b6eb9\") " pod="openstack-kuttl-tests/neutron-6bd94d8c7d-2mkc6" Jan 20 17:50:55 crc kubenswrapper[4558]: I0120 17:50:55.120828 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-64bbc794f7-mwswp" Jan 20 17:50:55 crc kubenswrapper[4558]: I0120 17:50:55.130327 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0a3695d2-9733-430d-a489-694e9e97b586-kube-api-access-jspp2" (OuterVolumeSpecName: "kube-api-access-jspp2") pod "0a3695d2-9733-430d-a489-694e9e97b586" (UID: "0a3695d2-9733-430d-a489-694e9e97b586"). InnerVolumeSpecName "kube-api-access-jspp2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:50:55 crc kubenswrapper[4558]: I0120 17:50:55.130523 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7458929c-bc4e-4f17-b199-4963b298f4e8-config" (OuterVolumeSpecName: "config") pod "7458929c-bc4e-4f17-b199-4963b298f4e8" (UID: "7458929c-bc4e-4f17-b199-4963b298f4e8"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:50:55 crc kubenswrapper[4558]: I0120 17:50:55.134100 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2ef9d4ef-ec42-4464-8744-4b3aea7b6eb9-combined-ca-bundle\") pod \"neutron-6bd94d8c7d-2mkc6\" (UID: \"2ef9d4ef-ec42-4464-8744-4b3aea7b6eb9\") " pod="openstack-kuttl-tests/neutron-6bd94d8c7d-2mkc6" Jan 20 17:50:55 crc kubenswrapper[4558]: I0120 17:50:55.136227 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/2ef9d4ef-ec42-4464-8744-4b3aea7b6eb9-httpd-config\") pod \"neutron-6bd94d8c7d-2mkc6\" (UID: \"2ef9d4ef-ec42-4464-8744-4b3aea7b6eb9\") " pod="openstack-kuttl-tests/neutron-6bd94d8c7d-2mkc6" Jan 20 17:50:55 crc kubenswrapper[4558]: I0120 17:50:55.139407 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7458929c-bc4e-4f17-b199-4963b298f4e8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7458929c-bc4e-4f17-b199-4963b298f4e8" (UID: "7458929c-bc4e-4f17-b199-4963b298f4e8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:50:55 crc kubenswrapper[4558]: I0120 17:50:55.140607 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2ef9d4ef-ec42-4464-8744-4b3aea7b6eb9-public-tls-certs\") pod \"neutron-6bd94d8c7d-2mkc6\" (UID: \"2ef9d4ef-ec42-4464-8744-4b3aea7b6eb9\") " pod="openstack-kuttl-tests/neutron-6bd94d8c7d-2mkc6" Jan 20 17:50:55 crc kubenswrapper[4558]: I0120 17:50:55.141548 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0a3695d2-9733-430d-a489-694e9e97b586-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "0a3695d2-9733-430d-a489-694e9e97b586" (UID: "0a3695d2-9733-430d-a489-694e9e97b586"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:50:55 crc kubenswrapper[4558]: I0120 17:50:55.142999 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/2ef9d4ef-ec42-4464-8744-4b3aea7b6eb9-config\") pod \"neutron-6bd94d8c7d-2mkc6\" (UID: \"2ef9d4ef-ec42-4464-8744-4b3aea7b6eb9\") " pod="openstack-kuttl-tests/neutron-6bd94d8c7d-2mkc6" Jan 20 17:50:55 crc kubenswrapper[4558]: I0120 17:50:55.146013 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/2ef9d4ef-ec42-4464-8744-4b3aea7b6eb9-ovndb-tls-certs\") pod \"neutron-6bd94d8c7d-2mkc6\" (UID: \"2ef9d4ef-ec42-4464-8744-4b3aea7b6eb9\") " pod="openstack-kuttl-tests/neutron-6bd94d8c7d-2mkc6" Jan 20 17:50:55 crc kubenswrapper[4558]: I0120 17:50:55.147867 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0a3695d2-9733-430d-a489-694e9e97b586-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0a3695d2-9733-430d-a489-694e9e97b586" (UID: "0a3695d2-9733-430d-a489-694e9e97b586"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:50:55 crc kubenswrapper[4558]: I0120 17:50:55.148094 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7458929c-bc4e-4f17-b199-4963b298f4e8-kube-api-access-x78ng" (OuterVolumeSpecName: "kube-api-access-x78ng") pod "7458929c-bc4e-4f17-b199-4963b298f4e8" (UID: "7458929c-bc4e-4f17-b199-4963b298f4e8"). InnerVolumeSpecName "kube-api-access-x78ng". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:50:55 crc kubenswrapper[4558]: I0120 17:50:55.148246 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0a3695d2-9733-430d-a489-694e9e97b586-config-data" (OuterVolumeSpecName: "config-data") pod "0a3695d2-9733-430d-a489-694e9e97b586" (UID: "0a3695d2-9733-430d-a489-694e9e97b586"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:50:55 crc kubenswrapper[4558]: I0120 17:50:55.158767 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2ef9d4ef-ec42-4464-8744-4b3aea7b6eb9-internal-tls-certs\") pod \"neutron-6bd94d8c7d-2mkc6\" (UID: \"2ef9d4ef-ec42-4464-8744-4b3aea7b6eb9\") " pod="openstack-kuttl-tests/neutron-6bd94d8c7d-2mkc6" Jan 20 17:50:55 crc kubenswrapper[4558]: I0120 17:50:55.158927 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0a3695d2-9733-430d-a489-694e9e97b586-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "0a3695d2-9733-430d-a489-694e9e97b586" (UID: "0a3695d2-9733-430d-a489-694e9e97b586"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:50:55 crc kubenswrapper[4558]: I0120 17:50:55.163796 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7458929c-bc4e-4f17-b199-4963b298f4e8-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "7458929c-bc4e-4f17-b199-4963b298f4e8" (UID: "7458929c-bc4e-4f17-b199-4963b298f4e8"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:50:55 crc kubenswrapper[4558]: I0120 17:50:55.163883 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0a3695d2-9733-430d-a489-694e9e97b586-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "0a3695d2-9733-430d-a489-694e9e97b586" (UID: "0a3695d2-9733-430d-a489-694e9e97b586"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:50:55 crc kubenswrapper[4558]: I0120 17:50:55.163945 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0a3695d2-9733-430d-a489-694e9e97b586-scripts" (OuterVolumeSpecName: "scripts") pod "0a3695d2-9733-430d-a489-694e9e97b586" (UID: "0a3695d2-9733-430d-a489-694e9e97b586"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:50:55 crc kubenswrapper[4558]: I0120 17:50:55.175175 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q8mbz\" (UniqueName: \"kubernetes.io/projected/2ef9d4ef-ec42-4464-8744-4b3aea7b6eb9-kube-api-access-q8mbz\") pod \"neutron-6bd94d8c7d-2mkc6\" (UID: \"2ef9d4ef-ec42-4464-8744-4b3aea7b6eb9\") " pod="openstack-kuttl-tests/neutron-6bd94d8c7d-2mkc6" Jan 20 17:50:55 crc kubenswrapper[4558]: I0120 17:50:55.221317 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0a3695d2-9733-430d-a489-694e9e97b586-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:55 crc kubenswrapper[4558]: I0120 17:50:55.221476 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7458929c-bc4e-4f17-b199-4963b298f4e8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:55 crc kubenswrapper[4558]: I0120 17:50:55.221598 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x78ng\" (UniqueName: \"kubernetes.io/projected/7458929c-bc4e-4f17-b199-4963b298f4e8-kube-api-access-x78ng\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:55 crc kubenswrapper[4558]: I0120 17:50:55.221697 4558 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/0a3695d2-9733-430d-a489-694e9e97b586-fernet-keys\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:55 crc kubenswrapper[4558]: I0120 17:50:55.221802 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0a3695d2-9733-430d-a489-694e9e97b586-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:55 crc kubenswrapper[4558]: I0120 17:50:55.221869 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0a3695d2-9733-430d-a489-694e9e97b586-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:55 crc kubenswrapper[4558]: I0120 17:50:55.222001 4558 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/7458929c-bc4e-4f17-b199-4963b298f4e8-httpd-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:55 crc kubenswrapper[4558]: I0120 17:50:55.222574 4558 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/0a3695d2-9733-430d-a489-694e9e97b586-credential-keys\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:55 crc kubenswrapper[4558]: I0120 17:50:55.222645 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0a3695d2-9733-430d-a489-694e9e97b586-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:55 crc kubenswrapper[4558]: I0120 17:50:55.222702 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/7458929c-bc4e-4f17-b199-4963b298f4e8-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:55 crc kubenswrapper[4558]: I0120 17:50:55.222763 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jspp2\" (UniqueName: \"kubernetes.io/projected/0a3695d2-9733-430d-a489-694e9e97b586-kube-api-access-jspp2\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:55 crc kubenswrapper[4558]: I0120 17:50:55.227599 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-6bd94d8c7d-2mkc6" Jan 20 17:50:55 crc kubenswrapper[4558]: W0120 17:50:55.649143 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcc15c42c_dd9c_4f60_b92a_7140713f32fb.slice/crio-ae37826d9118dcb89eddae7c9d8d01e17adb4ef2da527aa137848316cfdb54b7 WatchSource:0}: Error finding container ae37826d9118dcb89eddae7c9d8d01e17adb4ef2da527aa137848316cfdb54b7: Status 404 returned error can't find the container with id ae37826d9118dcb89eddae7c9d8d01e17adb4ef2da527aa137848316cfdb54b7 Jan 20 17:50:55 crc kubenswrapper[4558]: I0120 17:50:55.650693 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-64bbc794f7-mwswp"] Jan 20 17:50:55 crc kubenswrapper[4558]: I0120 17:50:55.713896 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-6bd94d8c7d-2mkc6"] Jan 20 17:50:55 crc kubenswrapper[4558]: W0120 17:50:55.729943 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2ef9d4ef_ec42_4464_8744_4b3aea7b6eb9.slice/crio-962b122f0c0dd826ce08dff87611b9153906227cfee3b82537d770cb585af29a WatchSource:0}: Error finding container 962b122f0c0dd826ce08dff87611b9153906227cfee3b82537d770cb585af29a: Status 404 returned error can't find the container with id 962b122f0c0dd826ce08dff87611b9153906227cfee3b82537d770cb585af29a Jan 20 17:50:55 crc kubenswrapper[4558]: I0120 17:50:55.834130 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:50:55 crc kubenswrapper[4558]: I0120 17:50:55.938906 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb8d9\" (UniqueName: \"kubernetes.io/projected/b2ecae06-8e2e-4e1a-b788-606e519c8ff5-kube-api-access-sb8d9\") pod \"b2ecae06-8e2e-4e1a-b788-606e519c8ff5\" (UID: \"b2ecae06-8e2e-4e1a-b788-606e519c8ff5\") " Jan 20 17:50:55 crc kubenswrapper[4558]: I0120 17:50:55.939237 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b2ecae06-8e2e-4e1a-b788-606e519c8ff5-config-data\") pod \"b2ecae06-8e2e-4e1a-b788-606e519c8ff5\" (UID: \"b2ecae06-8e2e-4e1a-b788-606e519c8ff5\") " Jan 20 17:50:55 crc kubenswrapper[4558]: I0120 17:50:55.939328 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b2ecae06-8e2e-4e1a-b788-606e519c8ff5-combined-ca-bundle\") pod \"b2ecae06-8e2e-4e1a-b788-606e519c8ff5\" (UID: \"b2ecae06-8e2e-4e1a-b788-606e519c8ff5\") " Jan 20 17:50:55 crc kubenswrapper[4558]: I0120 17:50:55.943454 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b2ecae06-8e2e-4e1a-b788-606e519c8ff5-kube-api-access-sb8d9" (OuterVolumeSpecName: "kube-api-access-sb8d9") pod "b2ecae06-8e2e-4e1a-b788-606e519c8ff5" (UID: "b2ecae06-8e2e-4e1a-b788-606e519c8ff5"). InnerVolumeSpecName "kube-api-access-sb8d9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:50:55 crc kubenswrapper[4558]: I0120 17:50:55.969792 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b2ecae06-8e2e-4e1a-b788-606e519c8ff5-config-data" (OuterVolumeSpecName: "config-data") pod "b2ecae06-8e2e-4e1a-b788-606e519c8ff5" (UID: "b2ecae06-8e2e-4e1a-b788-606e519c8ff5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:50:55 crc kubenswrapper[4558]: I0120 17:50:55.974372 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b2ecae06-8e2e-4e1a-b788-606e519c8ff5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b2ecae06-8e2e-4e1a-b788-606e519c8ff5" (UID: "b2ecae06-8e2e-4e1a-b788-606e519c8ff5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:50:55 crc kubenswrapper[4558]: I0120 17:50:55.987571 4558 generic.go:334] "Generic (PLEG): container finished" podID="b2ecae06-8e2e-4e1a-b788-606e519c8ff5" containerID="193468c685fa9ab1f363ecb0e7f771a966526b372a315288f277c9d2d2a6d417" exitCode=0 Jan 20 17:50:55 crc kubenswrapper[4558]: I0120 17:50:55.987682 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:50:55 crc kubenswrapper[4558]: I0120 17:50:55.987682 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-0" event={"ID":"b2ecae06-8e2e-4e1a-b788-606e519c8ff5","Type":"ContainerDied","Data":"193468c685fa9ab1f363ecb0e7f771a966526b372a315288f277c9d2d2a6d417"} Jan 20 17:50:55 crc kubenswrapper[4558]: I0120 17:50:55.987968 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-0" event={"ID":"b2ecae06-8e2e-4e1a-b788-606e519c8ff5","Type":"ContainerDied","Data":"31d194bd3c2c2a5cf54ff5a9562e3ae185573cca820cb09d293007b50bf8a7c1"} Jan 20 17:50:55 crc kubenswrapper[4558]: I0120 17:50:55.988014 4558 scope.go:117] "RemoveContainer" containerID="193468c685fa9ab1f363ecb0e7f771a966526b372a315288f277c9d2d2a6d417" Jan 20 17:50:55 crc kubenswrapper[4558]: I0120 17:50:55.989895 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-6bd94d8c7d-2mkc6" event={"ID":"2ef9d4ef-ec42-4464-8744-4b3aea7b6eb9","Type":"ContainerStarted","Data":"94aea94556475245fb3ed99b92c774a78bc94fc47ab42792ec2c8ac17ee6298a"} Jan 20 17:50:55 crc kubenswrapper[4558]: I0120 17:50:55.989923 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-6bd94d8c7d-2mkc6" event={"ID":"2ef9d4ef-ec42-4464-8744-4b3aea7b6eb9","Type":"ContainerStarted","Data":"962b122f0c0dd826ce08dff87611b9153906227cfee3b82537d770cb585af29a"} Jan 20 17:50:55 crc kubenswrapper[4558]: I0120 17:50:55.994439 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-9795586d8-4dv95" Jan 20 17:50:55 crc kubenswrapper[4558]: I0120 17:50:55.998209 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-64bbc794f7-mwswp" event={"ID":"cc15c42c-dd9c-4f60-b92a-7140713f32fb","Type":"ContainerStarted","Data":"b395c952bedfc004b11ffa76313700fbc18b300f2d6b43029c49f65465fa385b"} Jan 20 17:50:55 crc kubenswrapper[4558]: I0120 17:50:55.998235 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-64bbc794f7-mwswp" event={"ID":"cc15c42c-dd9c-4f60-b92a-7140713f32fb","Type":"ContainerStarted","Data":"ae37826d9118dcb89eddae7c9d8d01e17adb4ef2da527aa137848316cfdb54b7"} Jan 20 17:50:55 crc kubenswrapper[4558]: I0120 17:50:55.998306 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-7b9876b66d-znq49" Jan 20 17:50:55 crc kubenswrapper[4558]: I0120 17:50:55.998755 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/keystone-64bbc794f7-mwswp" Jan 20 17:50:56 crc kubenswrapper[4558]: I0120 17:50:56.012585 4558 scope.go:117] "RemoveContainer" containerID="193468c685fa9ab1f363ecb0e7f771a966526b372a315288f277c9d2d2a6d417" Jan 20 17:50:56 crc kubenswrapper[4558]: E0120 17:50:56.012952 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"193468c685fa9ab1f363ecb0e7f771a966526b372a315288f277c9d2d2a6d417\": container with ID starting with 193468c685fa9ab1f363ecb0e7f771a966526b372a315288f277c9d2d2a6d417 not found: ID does not exist" containerID="193468c685fa9ab1f363ecb0e7f771a966526b372a315288f277c9d2d2a6d417" Jan 20 17:50:56 crc kubenswrapper[4558]: I0120 17:50:56.012988 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"193468c685fa9ab1f363ecb0e7f771a966526b372a315288f277c9d2d2a6d417"} err="failed to get container status \"193468c685fa9ab1f363ecb0e7f771a966526b372a315288f277c9d2d2a6d417\": rpc error: code = NotFound desc = could not find container \"193468c685fa9ab1f363ecb0e7f771a966526b372a315288f277c9d2d2a6d417\": container with ID starting with 193468c685fa9ab1f363ecb0e7f771a966526b372a315288f277c9d2d2a6d417 not found: ID does not exist" Jan 20 17:50:56 crc kubenswrapper[4558]: I0120 17:50:56.016225 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/keystone-64bbc794f7-mwswp" podStartSLOduration=2.016213698 podStartE2EDuration="2.016213698s" podCreationTimestamp="2026-01-20 17:50:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:50:56.013632905 +0000 UTC m=+4149.773970872" watchObservedRunningTime="2026-01-20 17:50:56.016213698 +0000 UTC m=+4149.776551665" Jan 20 17:50:56 crc kubenswrapper[4558]: I0120 17:50:56.044332 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb8d9\" (UniqueName: \"kubernetes.io/projected/b2ecae06-8e2e-4e1a-b788-606e519c8ff5-kube-api-access-sb8d9\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:56 crc kubenswrapper[4558]: I0120 17:50:56.044360 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b2ecae06-8e2e-4e1a-b788-606e519c8ff5-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:56 crc kubenswrapper[4558]: I0120 17:50:56.044371 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b2ecae06-8e2e-4e1a-b788-606e519c8ff5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:56 crc kubenswrapper[4558]: I0120 17:50:56.047450 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-0"] Jan 20 17:50:56 crc kubenswrapper[4558]: I0120 17:50:56.065836 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-0"] Jan 20 17:50:56 crc kubenswrapper[4558]: I0120 17:50:56.080203 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-0"] Jan 20 17:50:56 crc kubenswrapper[4558]: E0120 17:50:56.080657 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b2ecae06-8e2e-4e1a-b788-606e519c8ff5" containerName="nova-cell1-conductor-conductor" Jan 20 17:50:56 crc kubenswrapper[4558]: I0120 17:50:56.080679 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="b2ecae06-8e2e-4e1a-b788-606e519c8ff5" containerName="nova-cell1-conductor-conductor" Jan 20 17:50:56 crc kubenswrapper[4558]: I0120 17:50:56.080916 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="b2ecae06-8e2e-4e1a-b788-606e519c8ff5" containerName="nova-cell1-conductor-conductor" Jan 20 17:50:56 crc kubenswrapper[4558]: I0120 17:50:56.081662 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:50:56 crc kubenswrapper[4558]: I0120 17:50:56.084612 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell1-conductor-config-data" Jan 20 17:50:56 crc kubenswrapper[4558]: I0120 17:50:56.115469 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-0"] Jan 20 17:50:56 crc kubenswrapper[4558]: I0120 17:50:56.128931 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-9795586d8-4dv95"] Jan 20 17:50:56 crc kubenswrapper[4558]: I0120 17:50:56.136222 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/keystone-9795586d8-4dv95"] Jan 20 17:50:56 crc kubenswrapper[4558]: I0120 17:50:56.145977 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-7b9876b66d-znq49"] Jan 20 17:50:56 crc kubenswrapper[4558]: I0120 17:50:56.151230 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/neutron-7b9876b66d-znq49"] Jan 20 17:50:56 crc kubenswrapper[4558]: I0120 17:50:56.213381 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-8c8mp" Jan 20 17:50:56 crc kubenswrapper[4558]: I0120 17:50:56.252360 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7c16f0c4-1462-4817-9f74-9e3d93193867-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"7c16f0c4-1462-4817-9f74-9e3d93193867\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:50:56 crc kubenswrapper[4558]: I0120 17:50:56.252490 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z8nz2\" (UniqueName: \"kubernetes.io/projected/7c16f0c4-1462-4817-9f74-9e3d93193867-kube-api-access-z8nz2\") pod \"nova-cell1-conductor-0\" (UID: \"7c16f0c4-1462-4817-9f74-9e3d93193867\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:50:56 crc kubenswrapper[4558]: I0120 17:50:56.252654 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c16f0c4-1462-4817-9f74-9e3d93193867-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"7c16f0c4-1462-4817-9f74-9e3d93193867\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:50:56 crc kubenswrapper[4558]: I0120 17:50:56.252747 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0a3695d2-9733-430d-a489-694e9e97b586-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:56 crc kubenswrapper[4558]: I0120 17:50:56.252768 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7458929c-bc4e-4f17-b199-4963b298f4e8-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:56 crc kubenswrapper[4558]: I0120 17:50:56.252782 4558 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/7458929c-bc4e-4f17-b199-4963b298f4e8-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:56 crc kubenswrapper[4558]: I0120 17:50:56.252792 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7458929c-bc4e-4f17-b199-4963b298f4e8-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:56 crc kubenswrapper[4558]: I0120 17:50:56.258027 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-8c8mp" Jan 20 17:50:56 crc kubenswrapper[4558]: I0120 17:50:56.355225 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z8nz2\" (UniqueName: \"kubernetes.io/projected/7c16f0c4-1462-4817-9f74-9e3d93193867-kube-api-access-z8nz2\") pod \"nova-cell1-conductor-0\" (UID: \"7c16f0c4-1462-4817-9f74-9e3d93193867\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:50:56 crc kubenswrapper[4558]: I0120 17:50:56.355396 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c16f0c4-1462-4817-9f74-9e3d93193867-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"7c16f0c4-1462-4817-9f74-9e3d93193867\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:50:56 crc kubenswrapper[4558]: I0120 17:50:56.355637 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7c16f0c4-1462-4817-9f74-9e3d93193867-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"7c16f0c4-1462-4817-9f74-9e3d93193867\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:50:56 crc kubenswrapper[4558]: I0120 17:50:56.360843 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7c16f0c4-1462-4817-9f74-9e3d93193867-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"7c16f0c4-1462-4817-9f74-9e3d93193867\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:50:56 crc kubenswrapper[4558]: I0120 17:50:56.361095 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c16f0c4-1462-4817-9f74-9e3d93193867-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"7c16f0c4-1462-4817-9f74-9e3d93193867\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:50:56 crc kubenswrapper[4558]: I0120 17:50:56.376686 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z8nz2\" (UniqueName: \"kubernetes.io/projected/7c16f0c4-1462-4817-9f74-9e3d93193867-kube-api-access-z8nz2\") pod \"nova-cell1-conductor-0\" (UID: \"7c16f0c4-1462-4817-9f74-9e3d93193867\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:50:56 crc kubenswrapper[4558]: I0120 17:50:56.438756 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:50:56 crc kubenswrapper[4558]: I0120 17:50:56.446430 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:50:56 crc kubenswrapper[4558]: E0120 17:50:56.447459 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[internal-tls-certs], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack-kuttl-tests/nova-api-0" podUID="733a1ae1-a917-447a-afa6-67cf7d688f86" Jan 20 17:50:56 crc kubenswrapper[4558]: I0120 17:50:56.454248 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-8c8mp"] Jan 20 17:50:56 crc kubenswrapper[4558]: I0120 17:50:56.579201 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0a3695d2-9733-430d-a489-694e9e97b586" path="/var/lib/kubelet/pods/0a3695d2-9733-430d-a489-694e9e97b586/volumes" Jan 20 17:50:56 crc kubenswrapper[4558]: I0120 17:50:56.579639 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7458929c-bc4e-4f17-b199-4963b298f4e8" path="/var/lib/kubelet/pods/7458929c-bc4e-4f17-b199-4963b298f4e8/volumes" Jan 20 17:50:56 crc kubenswrapper[4558]: I0120 17:50:56.580035 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b2ecae06-8e2e-4e1a-b788-606e519c8ff5" path="/var/lib/kubelet/pods/b2ecae06-8e2e-4e1a-b788-606e519c8ff5/volumes" Jan 20 17:50:56 crc kubenswrapper[4558]: I0120 17:50:56.686315 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/cinder-scheduler-0" podUID="51d19369-14ac-4d62-ab05-9c5830856622" containerName="cinder-scheduler" probeResult="failure" output="Get \"http://10.217.1.196:8080/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 20 17:50:56 crc kubenswrapper[4558]: I0120 17:50:56.882678 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-0"] Jan 20 17:50:56 crc kubenswrapper[4558]: I0120 17:50:56.983107 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/memcached-0" Jan 20 17:50:57 crc kubenswrapper[4558]: I0120 17:50:57.013972 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-6bd94d8c7d-2mkc6" event={"ID":"2ef9d4ef-ec42-4464-8744-4b3aea7b6eb9","Type":"ContainerStarted","Data":"241ee82c0eec1687f073958ac6fb291483d97abeacad8f032be37d7ad893499a"} Jan 20 17:50:57 crc kubenswrapper[4558]: I0120 17:50:57.015373 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/neutron-6bd94d8c7d-2mkc6" Jan 20 17:50:57 crc kubenswrapper[4558]: I0120 17:50:57.017545 4558 generic.go:334] "Generic (PLEG): container finished" podID="5239f3e2-7f54-4042-b0da-02f7608224df" containerID="920c89eb591e28b1762567ed35128bdcddc70641d214ce72662434c0af29d721" exitCode=0 Jan 20 17:50:57 crc kubenswrapper[4558]: I0120 17:50:57.017600 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/memcached-0" event={"ID":"5239f3e2-7f54-4042-b0da-02f7608224df","Type":"ContainerDied","Data":"920c89eb591e28b1762567ed35128bdcddc70641d214ce72662434c0af29d721"} Jan 20 17:50:57 crc kubenswrapper[4558]: I0120 17:50:57.017624 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/memcached-0" event={"ID":"5239f3e2-7f54-4042-b0da-02f7608224df","Type":"ContainerDied","Data":"60dddbb52bd9b2712fc9a84c8ab6cfb34a321ccf6b9369800400b23c8b94a06e"} Jan 20 17:50:57 crc kubenswrapper[4558]: I0120 17:50:57.017644 4558 scope.go:117] "RemoveContainer" containerID="920c89eb591e28b1762567ed35128bdcddc70641d214ce72662434c0af29d721" Jan 20 17:50:57 crc kubenswrapper[4558]: I0120 17:50:57.017761 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/memcached-0" Jan 20 17:50:57 crc kubenswrapper[4558]: I0120 17:50:57.027726 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-0" event={"ID":"7c16f0c4-1462-4817-9f74-9e3d93193867","Type":"ContainerStarted","Data":"9de1eff2e0c32d007cbfdafaced6282266cc5b1f6b59c6f9b84efec9b076c0a5"} Jan 20 17:50:57 crc kubenswrapper[4558]: I0120 17:50:57.042304 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:50:57 crc kubenswrapper[4558]: I0120 17:50:57.066482 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/neutron-6bd94d8c7d-2mkc6" podStartSLOduration=3.066460117 podStartE2EDuration="3.066460117s" podCreationTimestamp="2026-01-20 17:50:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:50:57.033098003 +0000 UTC m=+4150.793435971" watchObservedRunningTime="2026-01-20 17:50:57.066460117 +0000 UTC m=+4150.826798084" Jan 20 17:50:57 crc kubenswrapper[4558]: I0120 17:50:57.078877 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5239f3e2-7f54-4042-b0da-02f7608224df-config-data\") pod \"5239f3e2-7f54-4042-b0da-02f7608224df\" (UID: \"5239f3e2-7f54-4042-b0da-02f7608224df\") " Jan 20 17:50:57 crc kubenswrapper[4558]: I0120 17:50:57.078928 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/5239f3e2-7f54-4042-b0da-02f7608224df-memcached-tls-certs\") pod \"5239f3e2-7f54-4042-b0da-02f7608224df\" (UID: \"5239f3e2-7f54-4042-b0da-02f7608224df\") " Jan 20 17:50:57 crc kubenswrapper[4558]: I0120 17:50:57.079048 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s7f6m\" (UniqueName: \"kubernetes.io/projected/5239f3e2-7f54-4042-b0da-02f7608224df-kube-api-access-s7f6m\") pod \"5239f3e2-7f54-4042-b0da-02f7608224df\" (UID: \"5239f3e2-7f54-4042-b0da-02f7608224df\") " Jan 20 17:50:57 crc kubenswrapper[4558]: I0120 17:50:57.079102 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5239f3e2-7f54-4042-b0da-02f7608224df-combined-ca-bundle\") pod \"5239f3e2-7f54-4042-b0da-02f7608224df\" (UID: \"5239f3e2-7f54-4042-b0da-02f7608224df\") " Jan 20 17:50:57 crc kubenswrapper[4558]: I0120 17:50:57.079138 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/5239f3e2-7f54-4042-b0da-02f7608224df-kolla-config\") pod \"5239f3e2-7f54-4042-b0da-02f7608224df\" (UID: \"5239f3e2-7f54-4042-b0da-02f7608224df\") " Jan 20 17:50:57 crc kubenswrapper[4558]: I0120 17:50:57.080668 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5239f3e2-7f54-4042-b0da-02f7608224df-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "5239f3e2-7f54-4042-b0da-02f7608224df" (UID: "5239f3e2-7f54-4042-b0da-02f7608224df"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:50:57 crc kubenswrapper[4558]: I0120 17:50:57.081080 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5239f3e2-7f54-4042-b0da-02f7608224df-config-data" (OuterVolumeSpecName: "config-data") pod "5239f3e2-7f54-4042-b0da-02f7608224df" (UID: "5239f3e2-7f54-4042-b0da-02f7608224df"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:50:57 crc kubenswrapper[4558]: I0120 17:50:57.087179 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5239f3e2-7f54-4042-b0da-02f7608224df-kube-api-access-s7f6m" (OuterVolumeSpecName: "kube-api-access-s7f6m") pod "5239f3e2-7f54-4042-b0da-02f7608224df" (UID: "5239f3e2-7f54-4042-b0da-02f7608224df"). InnerVolumeSpecName "kube-api-access-s7f6m". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:50:57 crc kubenswrapper[4558]: I0120 17:50:57.096720 4558 scope.go:117] "RemoveContainer" containerID="920c89eb591e28b1762567ed35128bdcddc70641d214ce72662434c0af29d721" Jan 20 17:50:57 crc kubenswrapper[4558]: E0120 17:50:57.097103 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"920c89eb591e28b1762567ed35128bdcddc70641d214ce72662434c0af29d721\": container with ID starting with 920c89eb591e28b1762567ed35128bdcddc70641d214ce72662434c0af29d721 not found: ID does not exist" containerID="920c89eb591e28b1762567ed35128bdcddc70641d214ce72662434c0af29d721" Jan 20 17:50:57 crc kubenswrapper[4558]: I0120 17:50:57.097137 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"920c89eb591e28b1762567ed35128bdcddc70641d214ce72662434c0af29d721"} err="failed to get container status \"920c89eb591e28b1762567ed35128bdcddc70641d214ce72662434c0af29d721\": rpc error: code = NotFound desc = could not find container \"920c89eb591e28b1762567ed35128bdcddc70641d214ce72662434c0af29d721\": container with ID starting with 920c89eb591e28b1762567ed35128bdcddc70641d214ce72662434c0af29d721 not found: ID does not exist" Jan 20 17:50:57 crc kubenswrapper[4558]: I0120 17:50:57.107299 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5239f3e2-7f54-4042-b0da-02f7608224df-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5239f3e2-7f54-4042-b0da-02f7608224df" (UID: "5239f3e2-7f54-4042-b0da-02f7608224df"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:50:57 crc kubenswrapper[4558]: I0120 17:50:57.126918 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5239f3e2-7f54-4042-b0da-02f7608224df-memcached-tls-certs" (OuterVolumeSpecName: "memcached-tls-certs") pod "5239f3e2-7f54-4042-b0da-02f7608224df" (UID: "5239f3e2-7f54-4042-b0da-02f7608224df"). InnerVolumeSpecName "memcached-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:50:57 crc kubenswrapper[4558]: I0120 17:50:57.169534 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:50:57 crc kubenswrapper[4558]: I0120 17:50:57.182681 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5239f3e2-7f54-4042-b0da-02f7608224df-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:57 crc kubenswrapper[4558]: I0120 17:50:57.182719 4558 reconciler_common.go:293] "Volume detached for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/5239f3e2-7f54-4042-b0da-02f7608224df-memcached-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:57 crc kubenswrapper[4558]: I0120 17:50:57.182736 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s7f6m\" (UniqueName: \"kubernetes.io/projected/5239f3e2-7f54-4042-b0da-02f7608224df-kube-api-access-s7f6m\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:57 crc kubenswrapper[4558]: I0120 17:50:57.182801 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5239f3e2-7f54-4042-b0da-02f7608224df-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:57 crc kubenswrapper[4558]: I0120 17:50:57.182833 4558 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/5239f3e2-7f54-4042-b0da-02f7608224df-kolla-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:57 crc kubenswrapper[4558]: I0120 17:50:57.283759 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/733a1ae1-a917-447a-afa6-67cf7d688f86-logs\") pod \"733a1ae1-a917-447a-afa6-67cf7d688f86\" (UID: \"733a1ae1-a917-447a-afa6-67cf7d688f86\") " Jan 20 17:50:57 crc kubenswrapper[4558]: I0120 17:50:57.283918 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/733a1ae1-a917-447a-afa6-67cf7d688f86-config-data\") pod \"733a1ae1-a917-447a-afa6-67cf7d688f86\" (UID: \"733a1ae1-a917-447a-afa6-67cf7d688f86\") " Jan 20 17:50:57 crc kubenswrapper[4558]: I0120 17:50:57.284134 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/733a1ae1-a917-447a-afa6-67cf7d688f86-logs" (OuterVolumeSpecName: "logs") pod "733a1ae1-a917-447a-afa6-67cf7d688f86" (UID: "733a1ae1-a917-447a-afa6-67cf7d688f86"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:50:57 crc kubenswrapper[4558]: I0120 17:50:57.284496 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/733a1ae1-a917-447a-afa6-67cf7d688f86-combined-ca-bundle\") pod \"733a1ae1-a917-447a-afa6-67cf7d688f86\" (UID: \"733a1ae1-a917-447a-afa6-67cf7d688f86\") " Jan 20 17:50:57 crc kubenswrapper[4558]: I0120 17:50:57.284603 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5m8j8\" (UniqueName: \"kubernetes.io/projected/733a1ae1-a917-447a-afa6-67cf7d688f86-kube-api-access-5m8j8\") pod \"733a1ae1-a917-447a-afa6-67cf7d688f86\" (UID: \"733a1ae1-a917-447a-afa6-67cf7d688f86\") " Jan 20 17:50:57 crc kubenswrapper[4558]: I0120 17:50:57.284765 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/733a1ae1-a917-447a-afa6-67cf7d688f86-public-tls-certs\") pod \"733a1ae1-a917-447a-afa6-67cf7d688f86\" (UID: \"733a1ae1-a917-447a-afa6-67cf7d688f86\") " Jan 20 17:50:57 crc kubenswrapper[4558]: I0120 17:50:57.286011 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/733a1ae1-a917-447a-afa6-67cf7d688f86-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:57 crc kubenswrapper[4558]: I0120 17:50:57.288394 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/733a1ae1-a917-447a-afa6-67cf7d688f86-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "733a1ae1-a917-447a-afa6-67cf7d688f86" (UID: "733a1ae1-a917-447a-afa6-67cf7d688f86"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:50:57 crc kubenswrapper[4558]: I0120 17:50:57.288418 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/733a1ae1-a917-447a-afa6-67cf7d688f86-config-data" (OuterVolumeSpecName: "config-data") pod "733a1ae1-a917-447a-afa6-67cf7d688f86" (UID: "733a1ae1-a917-447a-afa6-67cf7d688f86"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:50:57 crc kubenswrapper[4558]: I0120 17:50:57.289724 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/733a1ae1-a917-447a-afa6-67cf7d688f86-kube-api-access-5m8j8" (OuterVolumeSpecName: "kube-api-access-5m8j8") pod "733a1ae1-a917-447a-afa6-67cf7d688f86" (UID: "733a1ae1-a917-447a-afa6-67cf7d688f86"). InnerVolumeSpecName "kube-api-access-5m8j8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:50:57 crc kubenswrapper[4558]: I0120 17:50:57.297313 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/733a1ae1-a917-447a-afa6-67cf7d688f86-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "733a1ae1-a917-447a-afa6-67cf7d688f86" (UID: "733a1ae1-a917-447a-afa6-67cf7d688f86"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:50:57 crc kubenswrapper[4558]: I0120 17:50:57.329882 4558 patch_prober.go:28] interesting pod/machine-config-daemon-2vr4r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 20 17:50:57 crc kubenswrapper[4558]: I0120 17:50:57.329960 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 20 17:50:57 crc kubenswrapper[4558]: I0120 17:50:57.357253 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/memcached-0"] Jan 20 17:50:57 crc kubenswrapper[4558]: I0120 17:50:57.372314 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/memcached-0"] Jan 20 17:50:57 crc kubenswrapper[4558]: I0120 17:50:57.381146 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/memcached-0"] Jan 20 17:50:57 crc kubenswrapper[4558]: E0120 17:50:57.381715 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5239f3e2-7f54-4042-b0da-02f7608224df" containerName="memcached" Jan 20 17:50:57 crc kubenswrapper[4558]: I0120 17:50:57.381734 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="5239f3e2-7f54-4042-b0da-02f7608224df" containerName="memcached" Jan 20 17:50:57 crc kubenswrapper[4558]: I0120 17:50:57.381954 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="5239f3e2-7f54-4042-b0da-02f7608224df" containerName="memcached" Jan 20 17:50:57 crc kubenswrapper[4558]: I0120 17:50:57.382715 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/memcached-0" Jan 20 17:50:57 crc kubenswrapper[4558]: I0120 17:50:57.384241 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"memcached-memcached-dockercfg-lc49s" Jan 20 17:50:57 crc kubenswrapper[4558]: I0120 17:50:57.384981 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"memcached-config-data" Jan 20 17:50:57 crc kubenswrapper[4558]: I0120 17:50:57.385054 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-memcached-svc" Jan 20 17:50:57 crc kubenswrapper[4558]: I0120 17:50:57.388350 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/733a1ae1-a917-447a-afa6-67cf7d688f86-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:57 crc kubenswrapper[4558]: I0120 17:50:57.388440 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/733a1ae1-a917-447a-afa6-67cf7d688f86-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:57 crc kubenswrapper[4558]: I0120 17:50:57.388515 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5m8j8\" (UniqueName: \"kubernetes.io/projected/733a1ae1-a917-447a-afa6-67cf7d688f86-kube-api-access-5m8j8\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:57 crc kubenswrapper[4558]: I0120 17:50:57.388584 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/733a1ae1-a917-447a-afa6-67cf7d688f86-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:57 crc kubenswrapper[4558]: I0120 17:50:57.393349 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/memcached-0"] Jan 20 17:50:57 crc kubenswrapper[4558]: I0120 17:50:57.461414 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:50:57 crc kubenswrapper[4558]: E0120 17:50:57.462469 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[internal-tls-certs public-tls-certs], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack-kuttl-tests/cinder-api-0" podUID="3ec0f915-5676-4d2a-b743-1f8ee11c01ac" Jan 20 17:50:57 crc kubenswrapper[4558]: I0120 17:50:57.490820 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/f95ad818-8ba9-4839-a613-2282bbe69ddc-config-data\") pod \"memcached-0\" (UID: \"f95ad818-8ba9-4839-a613-2282bbe69ddc\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:50:57 crc kubenswrapper[4558]: I0120 17:50:57.490875 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/f95ad818-8ba9-4839-a613-2282bbe69ddc-memcached-tls-certs\") pod \"memcached-0\" (UID: \"f95ad818-8ba9-4839-a613-2282bbe69ddc\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:50:57 crc kubenswrapper[4558]: I0120 17:50:57.491107 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f5h92\" (UniqueName: \"kubernetes.io/projected/f95ad818-8ba9-4839-a613-2282bbe69ddc-kube-api-access-f5h92\") pod \"memcached-0\" (UID: \"f95ad818-8ba9-4839-a613-2282bbe69ddc\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:50:57 crc kubenswrapper[4558]: I0120 17:50:57.491145 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f95ad818-8ba9-4839-a613-2282bbe69ddc-combined-ca-bundle\") pod \"memcached-0\" (UID: \"f95ad818-8ba9-4839-a613-2282bbe69ddc\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:50:57 crc kubenswrapper[4558]: I0120 17:50:57.491339 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/f95ad818-8ba9-4839-a613-2282bbe69ddc-kolla-config\") pod \"memcached-0\" (UID: \"f95ad818-8ba9-4839-a613-2282bbe69ddc\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:50:57 crc kubenswrapper[4558]: I0120 17:50:57.592946 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f5h92\" (UniqueName: \"kubernetes.io/projected/f95ad818-8ba9-4839-a613-2282bbe69ddc-kube-api-access-f5h92\") pod \"memcached-0\" (UID: \"f95ad818-8ba9-4839-a613-2282bbe69ddc\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:50:57 crc kubenswrapper[4558]: I0120 17:50:57.593001 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f95ad818-8ba9-4839-a613-2282bbe69ddc-combined-ca-bundle\") pod \"memcached-0\" (UID: \"f95ad818-8ba9-4839-a613-2282bbe69ddc\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:50:57 crc kubenswrapper[4558]: I0120 17:50:57.593068 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/f95ad818-8ba9-4839-a613-2282bbe69ddc-kolla-config\") pod \"memcached-0\" (UID: \"f95ad818-8ba9-4839-a613-2282bbe69ddc\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:50:57 crc kubenswrapper[4558]: I0120 17:50:57.593194 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/f95ad818-8ba9-4839-a613-2282bbe69ddc-config-data\") pod \"memcached-0\" (UID: \"f95ad818-8ba9-4839-a613-2282bbe69ddc\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:50:57 crc kubenswrapper[4558]: I0120 17:50:57.593222 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/f95ad818-8ba9-4839-a613-2282bbe69ddc-memcached-tls-certs\") pod \"memcached-0\" (UID: \"f95ad818-8ba9-4839-a613-2282bbe69ddc\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:50:57 crc kubenswrapper[4558]: I0120 17:50:57.594068 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/f95ad818-8ba9-4839-a613-2282bbe69ddc-kolla-config\") pod \"memcached-0\" (UID: \"f95ad818-8ba9-4839-a613-2282bbe69ddc\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:50:57 crc kubenswrapper[4558]: I0120 17:50:57.594099 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/f95ad818-8ba9-4839-a613-2282bbe69ddc-config-data\") pod \"memcached-0\" (UID: \"f95ad818-8ba9-4839-a613-2282bbe69ddc\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:50:57 crc kubenswrapper[4558]: I0120 17:50:57.599612 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/f95ad818-8ba9-4839-a613-2282bbe69ddc-memcached-tls-certs\") pod \"memcached-0\" (UID: \"f95ad818-8ba9-4839-a613-2282bbe69ddc\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:50:57 crc kubenswrapper[4558]: I0120 17:50:57.599721 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f95ad818-8ba9-4839-a613-2282bbe69ddc-combined-ca-bundle\") pod \"memcached-0\" (UID: \"f95ad818-8ba9-4839-a613-2282bbe69ddc\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:50:57 crc kubenswrapper[4558]: I0120 17:50:57.611788 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f5h92\" (UniqueName: \"kubernetes.io/projected/f95ad818-8ba9-4839-a613-2282bbe69ddc-kube-api-access-f5h92\") pod \"memcached-0\" (UID: \"f95ad818-8ba9-4839-a613-2282bbe69ddc\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:50:57 crc kubenswrapper[4558]: I0120 17:50:57.710345 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/memcached-0" Jan 20 17:50:57 crc kubenswrapper[4558]: I0120 17:50:57.718695 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:50:57 crc kubenswrapper[4558]: E0120 17:50:57.719611 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[ceilometer-tls-certs], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack-kuttl-tests/ceilometer-0" podUID="06db8474-04b0-4525-90f0-c066ecbac0ae" Jan 20 17:50:58 crc kubenswrapper[4558]: I0120 17:50:58.058338 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:50:58 crc kubenswrapper[4558]: I0120 17:50:58.058343 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:50:58 crc kubenswrapper[4558]: I0120 17:50:58.058371 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-0" event={"ID":"7c16f0c4-1462-4817-9f74-9e3d93193867","Type":"ContainerStarted","Data":"5c4ded7cac27e0c92726d8a6f3ee7686e2622233d98b3911837d546b85d42273"} Jan 20 17:50:58 crc kubenswrapper[4558]: I0120 17:50:58.058409 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:50:58 crc kubenswrapper[4558]: I0120 17:50:58.059300 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-8c8mp" podUID="2bf2d1ac-2610-4caf-a3dc-eb6ef3d260a4" containerName="registry-server" containerID="cri-o://19be01bcba053b0236e2ef0ae72c04da3fbabf3eacd020419e6616c5fca9ace1" gracePeriod=2 Jan 20 17:50:58 crc kubenswrapper[4558]: I0120 17:50:58.098793 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell1-conductor-0" podStartSLOduration=2.098770179 podStartE2EDuration="2.098770179s" podCreationTimestamp="2026-01-20 17:50:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:50:58.081875641 +0000 UTC m=+4151.842213608" watchObservedRunningTime="2026-01-20 17:50:58.098770179 +0000 UTC m=+4151.859108146" Jan 20 17:50:58 crc kubenswrapper[4558]: I0120 17:50:58.166766 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/memcached-0"] Jan 20 17:50:58 crc kubenswrapper[4558]: I0120 17:50:58.424002 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d127fb40-6eff-4ccd-922c-b209ac4edbda-internal-tls-certs\") pod \"barbican-api-5968cb86f6-ctfg5\" (UID: \"d127fb40-6eff-4ccd-922c-b209ac4edbda\") " pod="openstack-kuttl-tests/barbican-api-5968cb86f6-ctfg5" Jan 20 17:50:58 crc kubenswrapper[4558]: I0120 17:50:58.424457 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d127fb40-6eff-4ccd-922c-b209ac4edbda-public-tls-certs\") pod \"barbican-api-5968cb86f6-ctfg5\" (UID: \"d127fb40-6eff-4ccd-922c-b209ac4edbda\") " pod="openstack-kuttl-tests/barbican-api-5968cb86f6-ctfg5" Jan 20 17:50:58 crc kubenswrapper[4558]: E0120 17:50:58.424659 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-barbican-public-svc: secret "cert-barbican-public-svc" not found Jan 20 17:50:58 crc kubenswrapper[4558]: E0120 17:50:58.424723 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d127fb40-6eff-4ccd-922c-b209ac4edbda-public-tls-certs podName:d127fb40-6eff-4ccd-922c-b209ac4edbda nodeName:}" failed. No retries permitted until 2026-01-20 17:51:30.424704328 +0000 UTC m=+4184.185042295 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "public-tls-certs" (UniqueName: "kubernetes.io/secret/d127fb40-6eff-4ccd-922c-b209ac4edbda-public-tls-certs") pod "barbican-api-5968cb86f6-ctfg5" (UID: "d127fb40-6eff-4ccd-922c-b209ac4edbda") : secret "cert-barbican-public-svc" not found Jan 20 17:50:58 crc kubenswrapper[4558]: I0120 17:50:58.495003 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d127fb40-6eff-4ccd-922c-b209ac4edbda-internal-tls-certs\") pod \"barbican-api-5968cb86f6-ctfg5\" (UID: \"d127fb40-6eff-4ccd-922c-b209ac4edbda\") " pod="openstack-kuttl-tests/barbican-api-5968cb86f6-ctfg5" Jan 20 17:50:58 crc kubenswrapper[4558]: W0120 17:50:58.500256 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf95ad818_8ba9_4839_a613_2282bbe69ddc.slice/crio-5d5ed2dc391448a9107ff0fe485cf3b3a71a7a56f115a34498bed712729c9662 WatchSource:0}: Error finding container 5d5ed2dc391448a9107ff0fe485cf3b3a71a7a56f115a34498bed712729c9662: Status 404 returned error can't find the container with id 5d5ed2dc391448a9107ff0fe485cf3b3a71a7a56f115a34498bed712729c9662 Jan 20 17:50:58 crc kubenswrapper[4558]: I0120 17:50:58.575514 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5239f3e2-7f54-4042-b0da-02f7608224df" path="/var/lib/kubelet/pods/5239f3e2-7f54-4042-b0da-02f7608224df/volumes" Jan 20 17:50:58 crc kubenswrapper[4558]: I0120 17:50:58.721493 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:50:58 crc kubenswrapper[4558]: I0120 17:50:58.738546 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-drjj7\" (UniqueName: \"kubernetes.io/projected/3ec0f915-5676-4d2a-b743-1f8ee11c01ac-kube-api-access-drjj7\") pod \"3ec0f915-5676-4d2a-b743-1f8ee11c01ac\" (UID: \"3ec0f915-5676-4d2a-b743-1f8ee11c01ac\") " Jan 20 17:50:58 crc kubenswrapper[4558]: I0120 17:50:58.738703 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3ec0f915-5676-4d2a-b743-1f8ee11c01ac-config-data-custom\") pod \"3ec0f915-5676-4d2a-b743-1f8ee11c01ac\" (UID: \"3ec0f915-5676-4d2a-b743-1f8ee11c01ac\") " Jan 20 17:50:58 crc kubenswrapper[4558]: I0120 17:50:58.738736 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3ec0f915-5676-4d2a-b743-1f8ee11c01ac-config-data\") pod \"3ec0f915-5676-4d2a-b743-1f8ee11c01ac\" (UID: \"3ec0f915-5676-4d2a-b743-1f8ee11c01ac\") " Jan 20 17:50:58 crc kubenswrapper[4558]: I0120 17:50:58.738804 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3ec0f915-5676-4d2a-b743-1f8ee11c01ac-combined-ca-bundle\") pod \"3ec0f915-5676-4d2a-b743-1f8ee11c01ac\" (UID: \"3ec0f915-5676-4d2a-b743-1f8ee11c01ac\") " Jan 20 17:50:58 crc kubenswrapper[4558]: I0120 17:50:58.738861 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3ec0f915-5676-4d2a-b743-1f8ee11c01ac-logs\") pod \"3ec0f915-5676-4d2a-b743-1f8ee11c01ac\" (UID: \"3ec0f915-5676-4d2a-b743-1f8ee11c01ac\") " Jan 20 17:50:58 crc kubenswrapper[4558]: I0120 17:50:58.739123 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3ec0f915-5676-4d2a-b743-1f8ee11c01ac-scripts\") pod \"3ec0f915-5676-4d2a-b743-1f8ee11c01ac\" (UID: \"3ec0f915-5676-4d2a-b743-1f8ee11c01ac\") " Jan 20 17:50:58 crc kubenswrapper[4558]: I0120 17:50:58.739148 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/3ec0f915-5676-4d2a-b743-1f8ee11c01ac-etc-machine-id\") pod \"3ec0f915-5676-4d2a-b743-1f8ee11c01ac\" (UID: \"3ec0f915-5676-4d2a-b743-1f8ee11c01ac\") " Jan 20 17:50:58 crc kubenswrapper[4558]: I0120 17:50:58.740132 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3ec0f915-5676-4d2a-b743-1f8ee11c01ac-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "3ec0f915-5676-4d2a-b743-1f8ee11c01ac" (UID: "3ec0f915-5676-4d2a-b743-1f8ee11c01ac"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 17:50:58 crc kubenswrapper[4558]: I0120 17:50:58.742114 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3ec0f915-5676-4d2a-b743-1f8ee11c01ac-logs" (OuterVolumeSpecName: "logs") pod "3ec0f915-5676-4d2a-b743-1f8ee11c01ac" (UID: "3ec0f915-5676-4d2a-b743-1f8ee11c01ac"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:50:58 crc kubenswrapper[4558]: I0120 17:50:58.744205 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:50:58 crc kubenswrapper[4558]: I0120 17:50:58.747587 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ec0f915-5676-4d2a-b743-1f8ee11c01ac-scripts" (OuterVolumeSpecName: "scripts") pod "3ec0f915-5676-4d2a-b743-1f8ee11c01ac" (UID: "3ec0f915-5676-4d2a-b743-1f8ee11c01ac"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:50:58 crc kubenswrapper[4558]: I0120 17:50:58.747630 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ec0f915-5676-4d2a-b743-1f8ee11c01ac-kube-api-access-drjj7" (OuterVolumeSpecName: "kube-api-access-drjj7") pod "3ec0f915-5676-4d2a-b743-1f8ee11c01ac" (UID: "3ec0f915-5676-4d2a-b743-1f8ee11c01ac"). InnerVolumeSpecName "kube-api-access-drjj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:50:58 crc kubenswrapper[4558]: I0120 17:50:58.750686 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ec0f915-5676-4d2a-b743-1f8ee11c01ac-config-data" (OuterVolumeSpecName: "config-data") pod "3ec0f915-5676-4d2a-b743-1f8ee11c01ac" (UID: "3ec0f915-5676-4d2a-b743-1f8ee11c01ac"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:50:58 crc kubenswrapper[4558]: I0120 17:50:58.750784 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ec0f915-5676-4d2a-b743-1f8ee11c01ac-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3ec0f915-5676-4d2a-b743-1f8ee11c01ac" (UID: "3ec0f915-5676-4d2a-b743-1f8ee11c01ac"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:50:58 crc kubenswrapper[4558]: I0120 17:50:58.754097 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ec0f915-5676-4d2a-b743-1f8ee11c01ac-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "3ec0f915-5676-4d2a-b743-1f8ee11c01ac" (UID: "3ec0f915-5676-4d2a-b743-1f8ee11c01ac"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:50:58 crc kubenswrapper[4558]: I0120 17:50:58.841532 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/06db8474-04b0-4525-90f0-c066ecbac0ae-log-httpd\") pod \"06db8474-04b0-4525-90f0-c066ecbac0ae\" (UID: \"06db8474-04b0-4525-90f0-c066ecbac0ae\") " Jan 20 17:50:58 crc kubenswrapper[4558]: I0120 17:50:58.841770 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/06db8474-04b0-4525-90f0-c066ecbac0ae-sg-core-conf-yaml\") pod \"06db8474-04b0-4525-90f0-c066ecbac0ae\" (UID: \"06db8474-04b0-4525-90f0-c066ecbac0ae\") " Jan 20 17:50:58 crc kubenswrapper[4558]: I0120 17:50:58.841925 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/06db8474-04b0-4525-90f0-c066ecbac0ae-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "06db8474-04b0-4525-90f0-c066ecbac0ae" (UID: "06db8474-04b0-4525-90f0-c066ecbac0ae"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:50:58 crc kubenswrapper[4558]: I0120 17:50:58.842007 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/06db8474-04b0-4525-90f0-c066ecbac0ae-config-data\") pod \"06db8474-04b0-4525-90f0-c066ecbac0ae\" (UID: \"06db8474-04b0-4525-90f0-c066ecbac0ae\") " Jan 20 17:50:58 crc kubenswrapper[4558]: I0120 17:50:58.842034 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/06db8474-04b0-4525-90f0-c066ecbac0ae-combined-ca-bundle\") pod \"06db8474-04b0-4525-90f0-c066ecbac0ae\" (UID: \"06db8474-04b0-4525-90f0-c066ecbac0ae\") " Jan 20 17:50:58 crc kubenswrapper[4558]: I0120 17:50:58.842111 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/06db8474-04b0-4525-90f0-c066ecbac0ae-run-httpd\") pod \"06db8474-04b0-4525-90f0-c066ecbac0ae\" (UID: \"06db8474-04b0-4525-90f0-c066ecbac0ae\") " Jan 20 17:50:58 crc kubenswrapper[4558]: I0120 17:50:58.842238 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/06db8474-04b0-4525-90f0-c066ecbac0ae-scripts\") pod \"06db8474-04b0-4525-90f0-c066ecbac0ae\" (UID: \"06db8474-04b0-4525-90f0-c066ecbac0ae\") " Jan 20 17:50:58 crc kubenswrapper[4558]: I0120 17:50:58.842471 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tf9wc\" (UniqueName: \"kubernetes.io/projected/06db8474-04b0-4525-90f0-c066ecbac0ae-kube-api-access-tf9wc\") pod \"06db8474-04b0-4525-90f0-c066ecbac0ae\" (UID: \"06db8474-04b0-4525-90f0-c066ecbac0ae\") " Jan 20 17:50:58 crc kubenswrapper[4558]: I0120 17:50:58.843066 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/06db8474-04b0-4525-90f0-c066ecbac0ae-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "06db8474-04b0-4525-90f0-c066ecbac0ae" (UID: "06db8474-04b0-4525-90f0-c066ecbac0ae"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:50:58 crc kubenswrapper[4558]: I0120 17:50:58.844512 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3ec0f915-5676-4d2a-b743-1f8ee11c01ac-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:58 crc kubenswrapper[4558]: I0120 17:50:58.844540 4558 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/06db8474-04b0-4525-90f0-c066ecbac0ae-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:58 crc kubenswrapper[4558]: I0120 17:50:58.844556 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3ec0f915-5676-4d2a-b743-1f8ee11c01ac-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:58 crc kubenswrapper[4558]: I0120 17:50:58.844567 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3ec0f915-5676-4d2a-b743-1f8ee11c01ac-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:58 crc kubenswrapper[4558]: I0120 17:50:58.844594 4558 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/3ec0f915-5676-4d2a-b743-1f8ee11c01ac-etc-machine-id\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:58 crc kubenswrapper[4558]: I0120 17:50:58.844605 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-drjj7\" (UniqueName: \"kubernetes.io/projected/3ec0f915-5676-4d2a-b743-1f8ee11c01ac-kube-api-access-drjj7\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:58 crc kubenswrapper[4558]: I0120 17:50:58.844615 4558 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/06db8474-04b0-4525-90f0-c066ecbac0ae-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:58 crc kubenswrapper[4558]: I0120 17:50:58.844626 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3ec0f915-5676-4d2a-b743-1f8ee11c01ac-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:58 crc kubenswrapper[4558]: I0120 17:50:58.844637 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3ec0f915-5676-4d2a-b743-1f8ee11c01ac-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:58 crc kubenswrapper[4558]: I0120 17:50:58.846878 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/06db8474-04b0-4525-90f0-c066ecbac0ae-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "06db8474-04b0-4525-90f0-c066ecbac0ae" (UID: "06db8474-04b0-4525-90f0-c066ecbac0ae"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:50:58 crc kubenswrapper[4558]: I0120 17:50:58.847629 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/06db8474-04b0-4525-90f0-c066ecbac0ae-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "06db8474-04b0-4525-90f0-c066ecbac0ae" (UID: "06db8474-04b0-4525-90f0-c066ecbac0ae"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:50:58 crc kubenswrapper[4558]: I0120 17:50:58.848011 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/06db8474-04b0-4525-90f0-c066ecbac0ae-kube-api-access-tf9wc" (OuterVolumeSpecName: "kube-api-access-tf9wc") pod "06db8474-04b0-4525-90f0-c066ecbac0ae" (UID: "06db8474-04b0-4525-90f0-c066ecbac0ae"). InnerVolumeSpecName "kube-api-access-tf9wc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:50:58 crc kubenswrapper[4558]: I0120 17:50:58.848209 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/06db8474-04b0-4525-90f0-c066ecbac0ae-config-data" (OuterVolumeSpecName: "config-data") pod "06db8474-04b0-4525-90f0-c066ecbac0ae" (UID: "06db8474-04b0-4525-90f0-c066ecbac0ae"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:50:58 crc kubenswrapper[4558]: I0120 17:50:58.848876 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/06db8474-04b0-4525-90f0-c066ecbac0ae-scripts" (OuterVolumeSpecName: "scripts") pod "06db8474-04b0-4525-90f0-c066ecbac0ae" (UID: "06db8474-04b0-4525-90f0-c066ecbac0ae"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:50:58 crc kubenswrapper[4558]: I0120 17:50:58.856469 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-8c8mp" Jan 20 17:50:58 crc kubenswrapper[4558]: I0120 17:50:58.918095 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:50:58 crc kubenswrapper[4558]: I0120 17:50:58.919346 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:50:58 crc kubenswrapper[4558]: I0120 17:50:58.945966 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nfwrc\" (UniqueName: \"kubernetes.io/projected/2bf2d1ac-2610-4caf-a3dc-eb6ef3d260a4-kube-api-access-nfwrc\") pod \"2bf2d1ac-2610-4caf-a3dc-eb6ef3d260a4\" (UID: \"2bf2d1ac-2610-4caf-a3dc-eb6ef3d260a4\") " Jan 20 17:50:58 crc kubenswrapper[4558]: I0120 17:50:58.946148 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2bf2d1ac-2610-4caf-a3dc-eb6ef3d260a4-utilities\") pod \"2bf2d1ac-2610-4caf-a3dc-eb6ef3d260a4\" (UID: \"2bf2d1ac-2610-4caf-a3dc-eb6ef3d260a4\") " Jan 20 17:50:58 crc kubenswrapper[4558]: I0120 17:50:58.946327 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2bf2d1ac-2610-4caf-a3dc-eb6ef3d260a4-catalog-content\") pod \"2bf2d1ac-2610-4caf-a3dc-eb6ef3d260a4\" (UID: \"2bf2d1ac-2610-4caf-a3dc-eb6ef3d260a4\") " Jan 20 17:50:58 crc kubenswrapper[4558]: I0120 17:50:58.946822 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2bf2d1ac-2610-4caf-a3dc-eb6ef3d260a4-utilities" (OuterVolumeSpecName: "utilities") pod "2bf2d1ac-2610-4caf-a3dc-eb6ef3d260a4" (UID: "2bf2d1ac-2610-4caf-a3dc-eb6ef3d260a4"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:50:58 crc kubenswrapper[4558]: I0120 17:50:58.947017 4558 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/06db8474-04b0-4525-90f0-c066ecbac0ae-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:58 crc kubenswrapper[4558]: I0120 17:50:58.947032 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/06db8474-04b0-4525-90f0-c066ecbac0ae-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:58 crc kubenswrapper[4558]: I0120 17:50:58.947042 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/06db8474-04b0-4525-90f0-c066ecbac0ae-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:58 crc kubenswrapper[4558]: I0120 17:50:58.947050 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/06db8474-04b0-4525-90f0-c066ecbac0ae-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:58 crc kubenswrapper[4558]: I0120 17:50:58.947061 4558 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2bf2d1ac-2610-4caf-a3dc-eb6ef3d260a4-utilities\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:58 crc kubenswrapper[4558]: I0120 17:50:58.947070 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tf9wc\" (UniqueName: \"kubernetes.io/projected/06db8474-04b0-4525-90f0-c066ecbac0ae-kube-api-access-tf9wc\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:58 crc kubenswrapper[4558]: I0120 17:50:58.950317 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2bf2d1ac-2610-4caf-a3dc-eb6ef3d260a4-kube-api-access-nfwrc" (OuterVolumeSpecName: "kube-api-access-nfwrc") pod "2bf2d1ac-2610-4caf-a3dc-eb6ef3d260a4" (UID: "2bf2d1ac-2610-4caf-a3dc-eb6ef3d260a4"). InnerVolumeSpecName "kube-api-access-nfwrc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.012587 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.050695 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2bf2d1ac-2610-4caf-a3dc-eb6ef3d260a4-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2bf2d1ac-2610-4caf-a3dc-eb6ef3d260a4" (UID: "2bf2d1ac-2610-4caf-a3dc-eb6ef3d260a4"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.050842 4558 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2bf2d1ac-2610-4caf-a3dc-eb6ef3d260a4-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.050869 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nfwrc\" (UniqueName: \"kubernetes.io/projected/2bf2d1ac-2610-4caf-a3dc-eb6ef3d260a4-kube-api-access-nfwrc\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.068144 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/memcached-0" event={"ID":"f95ad818-8ba9-4839-a613-2282bbe69ddc","Type":"ContainerStarted","Data":"292c875950d826dfa041a9a0a8854a29181d17ff2dfd691ec2ca8359bf5cf9e7"} Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.068230 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/memcached-0" event={"ID":"f95ad818-8ba9-4839-a613-2282bbe69ddc","Type":"ContainerStarted","Data":"5d5ed2dc391448a9107ff0fe485cf3b3a71a7a56f115a34498bed712729c9662"} Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.068313 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/memcached-0" Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.071786 4558 generic.go:334] "Generic (PLEG): container finished" podID="2bf2d1ac-2610-4caf-a3dc-eb6ef3d260a4" containerID="19be01bcba053b0236e2ef0ae72c04da3fbabf3eacd020419e6616c5fca9ace1" exitCode=0 Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.071863 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-8c8mp" Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.071910 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.071963 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8c8mp" event={"ID":"2bf2d1ac-2610-4caf-a3dc-eb6ef3d260a4","Type":"ContainerDied","Data":"19be01bcba053b0236e2ef0ae72c04da3fbabf3eacd020419e6616c5fca9ace1"} Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.072004 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8c8mp" event={"ID":"2bf2d1ac-2610-4caf-a3dc-eb6ef3d260a4","Type":"ContainerDied","Data":"a70f5fae70bdaf3b5839e9ac09365830fe9a90304ac75300d7f3d2432e655ffe"} Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.072041 4558 scope.go:117] "RemoveContainer" containerID="19be01bcba053b0236e2ef0ae72c04da3fbabf3eacd020419e6616c5fca9ace1" Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.072223 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.073131 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.093015 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/memcached-0" podStartSLOduration=2.092990187 podStartE2EDuration="2.092990187s" podCreationTimestamp="2026-01-20 17:50:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:50:59.086932376 +0000 UTC m=+4152.847270343" watchObservedRunningTime="2026-01-20 17:50:59.092990187 +0000 UTC m=+4152.853328154" Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.110298 4558 scope.go:117] "RemoveContainer" containerID="1b8de3585535a4e7fb57dcc46de5f5105fc40133c5f4639db15d5125b4f810e1" Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.160723 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.174506 4558 scope.go:117] "RemoveContainer" containerID="f4a01de2c1d753787c210f38e884d45f84511cbc851253d826b2f3a47fb108ad" Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.174669 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.181195 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:50:59 crc kubenswrapper[4558]: E0120 17:50:59.181650 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2bf2d1ac-2610-4caf-a3dc-eb6ef3d260a4" containerName="extract-utilities" Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.181671 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="2bf2d1ac-2610-4caf-a3dc-eb6ef3d260a4" containerName="extract-utilities" Jan 20 17:50:59 crc kubenswrapper[4558]: E0120 17:50:59.181680 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2bf2d1ac-2610-4caf-a3dc-eb6ef3d260a4" containerName="extract-content" Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.181686 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="2bf2d1ac-2610-4caf-a3dc-eb6ef3d260a4" containerName="extract-content" Jan 20 17:50:59 crc kubenswrapper[4558]: E0120 17:50:59.181728 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2bf2d1ac-2610-4caf-a3dc-eb6ef3d260a4" containerName="registry-server" Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.181734 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="2bf2d1ac-2610-4caf-a3dc-eb6ef3d260a4" containerName="registry-server" Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.181946 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="2bf2d1ac-2610-4caf-a3dc-eb6ef3d260a4" containerName="registry-server" Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.183304 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.185915 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cinder-api-config-data" Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.186084 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-cinder-internal-svc" Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.188063 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.191486 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-cinder-public-svc" Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.202153 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.208706 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.215284 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-8c8mp"] Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.219356 4558 scope.go:117] "RemoveContainer" containerID="19be01bcba053b0236e2ef0ae72c04da3fbabf3eacd020419e6616c5fca9ace1" Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.223145 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:50:59 crc kubenswrapper[4558]: E0120 17:50:59.223189 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"19be01bcba053b0236e2ef0ae72c04da3fbabf3eacd020419e6616c5fca9ace1\": container with ID starting with 19be01bcba053b0236e2ef0ae72c04da3fbabf3eacd020419e6616c5fca9ace1 not found: ID does not exist" containerID="19be01bcba053b0236e2ef0ae72c04da3fbabf3eacd020419e6616c5fca9ace1" Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.223245 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"19be01bcba053b0236e2ef0ae72c04da3fbabf3eacd020419e6616c5fca9ace1"} err="failed to get container status \"19be01bcba053b0236e2ef0ae72c04da3fbabf3eacd020419e6616c5fca9ace1\": rpc error: code = NotFound desc = could not find container \"19be01bcba053b0236e2ef0ae72c04da3fbabf3eacd020419e6616c5fca9ace1\": container with ID starting with 19be01bcba053b0236e2ef0ae72c04da3fbabf3eacd020419e6616c5fca9ace1 not found: ID does not exist" Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.223277 4558 scope.go:117] "RemoveContainer" containerID="1b8de3585535a4e7fb57dcc46de5f5105fc40133c5f4639db15d5125b4f810e1" Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.223416 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-8c8mp"] Jan 20 17:50:59 crc kubenswrapper[4558]: E0120 17:50:59.223870 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1b8de3585535a4e7fb57dcc46de5f5105fc40133c5f4639db15d5125b4f810e1\": container with ID starting with 1b8de3585535a4e7fb57dcc46de5f5105fc40133c5f4639db15d5125b4f810e1 not found: ID does not exist" containerID="1b8de3585535a4e7fb57dcc46de5f5105fc40133c5f4639db15d5125b4f810e1" Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.223913 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1b8de3585535a4e7fb57dcc46de5f5105fc40133c5f4639db15d5125b4f810e1"} err="failed to get container status \"1b8de3585535a4e7fb57dcc46de5f5105fc40133c5f4639db15d5125b4f810e1\": rpc error: code = NotFound desc = could not find container \"1b8de3585535a4e7fb57dcc46de5f5105fc40133c5f4639db15d5125b4f810e1\": container with ID starting with 1b8de3585535a4e7fb57dcc46de5f5105fc40133c5f4639db15d5125b4f810e1 not found: ID does not exist" Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.223933 4558 scope.go:117] "RemoveContainer" containerID="f4a01de2c1d753787c210f38e884d45f84511cbc851253d826b2f3a47fb108ad" Jan 20 17:50:59 crc kubenswrapper[4558]: E0120 17:50:59.224207 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f4a01de2c1d753787c210f38e884d45f84511cbc851253d826b2f3a47fb108ad\": container with ID starting with f4a01de2c1d753787c210f38e884d45f84511cbc851253d826b2f3a47fb108ad not found: ID does not exist" containerID="f4a01de2c1d753787c210f38e884d45f84511cbc851253d826b2f3a47fb108ad" Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.224237 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f4a01de2c1d753787c210f38e884d45f84511cbc851253d826b2f3a47fb108ad"} err="failed to get container status \"f4a01de2c1d753787c210f38e884d45f84511cbc851253d826b2f3a47fb108ad\": rpc error: code = NotFound desc = could not find container \"f4a01de2c1d753787c210f38e884d45f84511cbc851253d826b2f3a47fb108ad\": container with ID starting with f4a01de2c1d753787c210f38e884d45f84511cbc851253d826b2f3a47fb108ad not found: ID does not exist" Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.232755 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.235792 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.240897 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.243880 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-ceilometer-internal-svc" Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.244039 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-scripts" Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.244087 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-config-data" Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.361805 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q6cnv\" (UniqueName: \"kubernetes.io/projected/01835185-5ca5-467a-9362-b8de3e8665ed-kube-api-access-q6cnv\") pod \"cinder-api-0\" (UID: \"01835185-5ca5-467a-9362-b8de3e8665ed\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.361916 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/01835185-5ca5-467a-9362-b8de3e8665ed-etc-machine-id\") pod \"cinder-api-0\" (UID: \"01835185-5ca5-467a-9362-b8de3e8665ed\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.361955 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/01835185-5ca5-467a-9362-b8de3e8665ed-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"01835185-5ca5-467a-9362-b8de3e8665ed\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.361979 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/01835185-5ca5-467a-9362-b8de3e8665ed-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"01835185-5ca5-467a-9362-b8de3e8665ed\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.362276 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/01835185-5ca5-467a-9362-b8de3e8665ed-config-data\") pod \"cinder-api-0\" (UID: \"01835185-5ca5-467a-9362-b8de3e8665ed\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.362364 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x5gqs\" (UniqueName: \"kubernetes.io/projected/0b958dc9-cb4f-4494-aa93-73ace6dcdc61-kube-api-access-x5gqs\") pod \"ceilometer-0\" (UID: \"0b958dc9-cb4f-4494-aa93-73ace6dcdc61\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.362448 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0b958dc9-cb4f-4494-aa93-73ace6dcdc61-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"0b958dc9-cb4f-4494-aa93-73ace6dcdc61\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.362506 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/0b958dc9-cb4f-4494-aa93-73ace6dcdc61-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"0b958dc9-cb4f-4494-aa93-73ace6dcdc61\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.362601 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/01835185-5ca5-467a-9362-b8de3e8665ed-public-tls-certs\") pod \"cinder-api-0\" (UID: \"01835185-5ca5-467a-9362-b8de3e8665ed\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.362805 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0b958dc9-cb4f-4494-aa93-73ace6dcdc61-scripts\") pod \"ceilometer-0\" (UID: \"0b958dc9-cb4f-4494-aa93-73ace6dcdc61\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.362854 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0b958dc9-cb4f-4494-aa93-73ace6dcdc61-run-httpd\") pod \"ceilometer-0\" (UID: \"0b958dc9-cb4f-4494-aa93-73ace6dcdc61\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.362883 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/01835185-5ca5-467a-9362-b8de3e8665ed-scripts\") pod \"cinder-api-0\" (UID: \"01835185-5ca5-467a-9362-b8de3e8665ed\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.362903 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0b958dc9-cb4f-4494-aa93-73ace6dcdc61-config-data\") pod \"ceilometer-0\" (UID: \"0b958dc9-cb4f-4494-aa93-73ace6dcdc61\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.362929 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0b958dc9-cb4f-4494-aa93-73ace6dcdc61-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"0b958dc9-cb4f-4494-aa93-73ace6dcdc61\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.362963 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/01835185-5ca5-467a-9362-b8de3e8665ed-logs\") pod \"cinder-api-0\" (UID: \"01835185-5ca5-467a-9362-b8de3e8665ed\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.363009 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0b958dc9-cb4f-4494-aa93-73ace6dcdc61-log-httpd\") pod \"ceilometer-0\" (UID: \"0b958dc9-cb4f-4494-aa93-73ace6dcdc61\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.363117 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/01835185-5ca5-467a-9362-b8de3e8665ed-config-data-custom\") pod \"cinder-api-0\" (UID: \"01835185-5ca5-467a-9362-b8de3e8665ed\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.363276 4558 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/06db8474-04b0-4525-90f0-c066ecbac0ae-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.363297 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3ec0f915-5676-4d2a-b743-1f8ee11c01ac-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.363308 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3ec0f915-5676-4d2a-b743-1f8ee11c01ac-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.415052 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-api-5968cb86f6-ctfg5"] Jan 20 17:50:59 crc kubenswrapper[4558]: E0120 17:50:59.415975 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[public-tls-certs], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack-kuttl-tests/barbican-api-5968cb86f6-ctfg5" podUID="d127fb40-6eff-4ccd-922c-b209ac4edbda" Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.433801 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.434114 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.439257 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/barbican-api-6fb4445c46-pmjxt"] Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.440717 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-6fb4445c46-pmjxt" Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.449380 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-api-6fb4445c46-pmjxt"] Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.474390 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b2ed54c7-3249-4a62-8d98-21618cb6dab6-logs\") pod \"barbican-api-6fb4445c46-pmjxt\" (UID: \"b2ed54c7-3249-4a62-8d98-21618cb6dab6\") " pod="openstack-kuttl-tests/barbican-api-6fb4445c46-pmjxt" Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.474420 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b2ed54c7-3249-4a62-8d98-21618cb6dab6-config-data\") pod \"barbican-api-6fb4445c46-pmjxt\" (UID: \"b2ed54c7-3249-4a62-8d98-21618cb6dab6\") " pod="openstack-kuttl-tests/barbican-api-6fb4445c46-pmjxt" Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.474455 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/01835185-5ca5-467a-9362-b8de3e8665ed-config-data\") pod \"cinder-api-0\" (UID: \"01835185-5ca5-467a-9362-b8de3e8665ed\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.474483 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x5gqs\" (UniqueName: \"kubernetes.io/projected/0b958dc9-cb4f-4494-aa93-73ace6dcdc61-kube-api-access-x5gqs\") pod \"ceilometer-0\" (UID: \"0b958dc9-cb4f-4494-aa93-73ace6dcdc61\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.474511 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.474536 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0b958dc9-cb4f-4494-aa93-73ace6dcdc61-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"0b958dc9-cb4f-4494-aa93-73ace6dcdc61\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.474563 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/0b958dc9-cb4f-4494-aa93-73ace6dcdc61-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"0b958dc9-cb4f-4494-aa93-73ace6dcdc61\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.474598 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/01835185-5ca5-467a-9362-b8de3e8665ed-public-tls-certs\") pod \"cinder-api-0\" (UID: \"01835185-5ca5-467a-9362-b8de3e8665ed\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.474646 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.474671 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b2ed54c7-3249-4a62-8d98-21618cb6dab6-public-tls-certs\") pod \"barbican-api-6fb4445c46-pmjxt\" (UID: \"b2ed54c7-3249-4a62-8d98-21618cb6dab6\") " pod="openstack-kuttl-tests/barbican-api-6fb4445c46-pmjxt" Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.474701 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l95vj\" (UniqueName: \"kubernetes.io/projected/b2ed54c7-3249-4a62-8d98-21618cb6dab6-kube-api-access-l95vj\") pod \"barbican-api-6fb4445c46-pmjxt\" (UID: \"b2ed54c7-3249-4a62-8d98-21618cb6dab6\") " pod="openstack-kuttl-tests/barbican-api-6fb4445c46-pmjxt" Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.474733 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0b958dc9-cb4f-4494-aa93-73ace6dcdc61-scripts\") pod \"ceilometer-0\" (UID: \"0b958dc9-cb4f-4494-aa93-73ace6dcdc61\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.474754 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0b958dc9-cb4f-4494-aa93-73ace6dcdc61-run-httpd\") pod \"ceilometer-0\" (UID: \"0b958dc9-cb4f-4494-aa93-73ace6dcdc61\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.474770 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/01835185-5ca5-467a-9362-b8de3e8665ed-scripts\") pod \"cinder-api-0\" (UID: \"01835185-5ca5-467a-9362-b8de3e8665ed\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.474785 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0b958dc9-cb4f-4494-aa93-73ace6dcdc61-config-data\") pod \"ceilometer-0\" (UID: \"0b958dc9-cb4f-4494-aa93-73ace6dcdc61\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.474801 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0b958dc9-cb4f-4494-aa93-73ace6dcdc61-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"0b958dc9-cb4f-4494-aa93-73ace6dcdc61\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.474825 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/01835185-5ca5-467a-9362-b8de3e8665ed-logs\") pod \"cinder-api-0\" (UID: \"01835185-5ca5-467a-9362-b8de3e8665ed\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.474844 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0b958dc9-cb4f-4494-aa93-73ace6dcdc61-log-httpd\") pod \"ceilometer-0\" (UID: \"0b958dc9-cb4f-4494-aa93-73ace6dcdc61\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.474876 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b2ed54c7-3249-4a62-8d98-21618cb6dab6-config-data-custom\") pod \"barbican-api-6fb4445c46-pmjxt\" (UID: \"b2ed54c7-3249-4a62-8d98-21618cb6dab6\") " pod="openstack-kuttl-tests/barbican-api-6fb4445c46-pmjxt" Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.474901 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b2ed54c7-3249-4a62-8d98-21618cb6dab6-internal-tls-certs\") pod \"barbican-api-6fb4445c46-pmjxt\" (UID: \"b2ed54c7-3249-4a62-8d98-21618cb6dab6\") " pod="openstack-kuttl-tests/barbican-api-6fb4445c46-pmjxt" Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.474921 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/01835185-5ca5-467a-9362-b8de3e8665ed-config-data-custom\") pod \"cinder-api-0\" (UID: \"01835185-5ca5-467a-9362-b8de3e8665ed\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.474940 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b2ed54c7-3249-4a62-8d98-21618cb6dab6-combined-ca-bundle\") pod \"barbican-api-6fb4445c46-pmjxt\" (UID: \"b2ed54c7-3249-4a62-8d98-21618cb6dab6\") " pod="openstack-kuttl-tests/barbican-api-6fb4445c46-pmjxt" Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.474964 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q6cnv\" (UniqueName: \"kubernetes.io/projected/01835185-5ca5-467a-9362-b8de3e8665ed-kube-api-access-q6cnv\") pod \"cinder-api-0\" (UID: \"01835185-5ca5-467a-9362-b8de3e8665ed\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.475013 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/01835185-5ca5-467a-9362-b8de3e8665ed-etc-machine-id\") pod \"cinder-api-0\" (UID: \"01835185-5ca5-467a-9362-b8de3e8665ed\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.475036 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/01835185-5ca5-467a-9362-b8de3e8665ed-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"01835185-5ca5-467a-9362-b8de3e8665ed\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.475053 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/01835185-5ca5-467a-9362-b8de3e8665ed-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"01835185-5ca5-467a-9362-b8de3e8665ed\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.475688 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0b958dc9-cb4f-4494-aa93-73ace6dcdc61-run-httpd\") pod \"ceilometer-0\" (UID: \"0b958dc9-cb4f-4494-aa93-73ace6dcdc61\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.477660 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/01835185-5ca5-467a-9362-b8de3e8665ed-etc-machine-id\") pod \"cinder-api-0\" (UID: \"01835185-5ca5-467a-9362-b8de3e8665ed\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:50:59 crc kubenswrapper[4558]: E0120 17:50:59.478712 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-ovndbcluster-nb-ovndbs: secret "cert-ovndbcluster-nb-ovndbs" not found Jan 20 17:50:59 crc kubenswrapper[4558]: E0120 17:50:59.478768 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162-ovsdbserver-nb-tls-certs podName:bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162 nodeName:}" failed. No retries permitted until 2026-01-20 17:51:31.478750843 +0000 UTC m=+4185.239088809 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "ovsdbserver-nb-tls-certs" (UniqueName: "kubernetes.io/secret/bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162-ovsdbserver-nb-tls-certs") pod "ovsdbserver-nb-0" (UID: "bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162") : secret "cert-ovndbcluster-nb-ovndbs" not found Jan 20 17:50:59 crc kubenswrapper[4558]: E0120 17:50:59.481044 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-ovn-metrics: secret "cert-ovn-metrics" not found Jan 20 17:50:59 crc kubenswrapper[4558]: E0120 17:50:59.481082 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162-metrics-certs-tls-certs podName:bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162 nodeName:}" failed. No retries permitted until 2026-01-20 17:51:31.481072298 +0000 UTC m=+4185.241410265 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs-tls-certs" (UniqueName: "kubernetes.io/secret/bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162-metrics-certs-tls-certs") pod "ovsdbserver-nb-0" (UID: "bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162") : secret "cert-ovn-metrics" not found Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.482613 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/01835185-5ca5-467a-9362-b8de3e8665ed-logs\") pod \"cinder-api-0\" (UID: \"01835185-5ca5-467a-9362-b8de3e8665ed\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.484218 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0b958dc9-cb4f-4494-aa93-73ace6dcdc61-log-httpd\") pod \"ceilometer-0\" (UID: \"0b958dc9-cb4f-4494-aa93-73ace6dcdc61\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.488369 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/01835185-5ca5-467a-9362-b8de3e8665ed-config-data-custom\") pod \"cinder-api-0\" (UID: \"01835185-5ca5-467a-9362-b8de3e8665ed\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.490803 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0b958dc9-cb4f-4494-aa93-73ace6dcdc61-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"0b958dc9-cb4f-4494-aa93-73ace6dcdc61\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.491924 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0b958dc9-cb4f-4494-aa93-73ace6dcdc61-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"0b958dc9-cb4f-4494-aa93-73ace6dcdc61\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.492445 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/01835185-5ca5-467a-9362-b8de3e8665ed-public-tls-certs\") pod \"cinder-api-0\" (UID: \"01835185-5ca5-467a-9362-b8de3e8665ed\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.494206 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/01835185-5ca5-467a-9362-b8de3e8665ed-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"01835185-5ca5-467a-9362-b8de3e8665ed\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.496314 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0b958dc9-cb4f-4494-aa93-73ace6dcdc61-config-data\") pod \"ceilometer-0\" (UID: \"0b958dc9-cb4f-4494-aa93-73ace6dcdc61\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.496542 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/01835185-5ca5-467a-9362-b8de3e8665ed-scripts\") pod \"cinder-api-0\" (UID: \"01835185-5ca5-467a-9362-b8de3e8665ed\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.499861 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/01835185-5ca5-467a-9362-b8de3e8665ed-config-data\") pod \"cinder-api-0\" (UID: \"01835185-5ca5-467a-9362-b8de3e8665ed\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.500286 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q6cnv\" (UniqueName: \"kubernetes.io/projected/01835185-5ca5-467a-9362-b8de3e8665ed-kube-api-access-q6cnv\") pod \"cinder-api-0\" (UID: \"01835185-5ca5-467a-9362-b8de3e8665ed\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.500571 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/01835185-5ca5-467a-9362-b8de3e8665ed-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"01835185-5ca5-467a-9362-b8de3e8665ed\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.504114 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/0b958dc9-cb4f-4494-aa93-73ace6dcdc61-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"0b958dc9-cb4f-4494-aa93-73ace6dcdc61\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.504728 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x5gqs\" (UniqueName: \"kubernetes.io/projected/0b958dc9-cb4f-4494-aa93-73ace6dcdc61-kube-api-access-x5gqs\") pod \"ceilometer-0\" (UID: \"0b958dc9-cb4f-4494-aa93-73ace6dcdc61\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.513761 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0b958dc9-cb4f-4494-aa93-73ace6dcdc61-scripts\") pod \"ceilometer-0\" (UID: \"0b958dc9-cb4f-4494-aa93-73ace6dcdc61\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.537768 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.561356 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.578454 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b2ed54c7-3249-4a62-8d98-21618cb6dab6-logs\") pod \"barbican-api-6fb4445c46-pmjxt\" (UID: \"b2ed54c7-3249-4a62-8d98-21618cb6dab6\") " pod="openstack-kuttl-tests/barbican-api-6fb4445c46-pmjxt" Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.578551 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b2ed54c7-3249-4a62-8d98-21618cb6dab6-config-data\") pod \"barbican-api-6fb4445c46-pmjxt\" (UID: \"b2ed54c7-3249-4a62-8d98-21618cb6dab6\") " pod="openstack-kuttl-tests/barbican-api-6fb4445c46-pmjxt" Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.578830 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b2ed54c7-3249-4a62-8d98-21618cb6dab6-public-tls-certs\") pod \"barbican-api-6fb4445c46-pmjxt\" (UID: \"b2ed54c7-3249-4a62-8d98-21618cb6dab6\") " pod="openstack-kuttl-tests/barbican-api-6fb4445c46-pmjxt" Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.578921 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l95vj\" (UniqueName: \"kubernetes.io/projected/b2ed54c7-3249-4a62-8d98-21618cb6dab6-kube-api-access-l95vj\") pod \"barbican-api-6fb4445c46-pmjxt\" (UID: \"b2ed54c7-3249-4a62-8d98-21618cb6dab6\") " pod="openstack-kuttl-tests/barbican-api-6fb4445c46-pmjxt" Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.579110 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b2ed54c7-3249-4a62-8d98-21618cb6dab6-config-data-custom\") pod \"barbican-api-6fb4445c46-pmjxt\" (UID: \"b2ed54c7-3249-4a62-8d98-21618cb6dab6\") " pod="openstack-kuttl-tests/barbican-api-6fb4445c46-pmjxt" Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.579210 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b2ed54c7-3249-4a62-8d98-21618cb6dab6-internal-tls-certs\") pod \"barbican-api-6fb4445c46-pmjxt\" (UID: \"b2ed54c7-3249-4a62-8d98-21618cb6dab6\") " pod="openstack-kuttl-tests/barbican-api-6fb4445c46-pmjxt" Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.579295 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b2ed54c7-3249-4a62-8d98-21618cb6dab6-combined-ca-bundle\") pod \"barbican-api-6fb4445c46-pmjxt\" (UID: \"b2ed54c7-3249-4a62-8d98-21618cb6dab6\") " pod="openstack-kuttl-tests/barbican-api-6fb4445c46-pmjxt" Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.580066 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b2ed54c7-3249-4a62-8d98-21618cb6dab6-logs\") pod \"barbican-api-6fb4445c46-pmjxt\" (UID: \"b2ed54c7-3249-4a62-8d98-21618cb6dab6\") " pod="openstack-kuttl-tests/barbican-api-6fb4445c46-pmjxt" Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.589295 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b2ed54c7-3249-4a62-8d98-21618cb6dab6-combined-ca-bundle\") pod \"barbican-api-6fb4445c46-pmjxt\" (UID: \"b2ed54c7-3249-4a62-8d98-21618cb6dab6\") " pod="openstack-kuttl-tests/barbican-api-6fb4445c46-pmjxt" Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.594629 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b2ed54c7-3249-4a62-8d98-21618cb6dab6-internal-tls-certs\") pod \"barbican-api-6fb4445c46-pmjxt\" (UID: \"b2ed54c7-3249-4a62-8d98-21618cb6dab6\") " pod="openstack-kuttl-tests/barbican-api-6fb4445c46-pmjxt" Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.595690 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b2ed54c7-3249-4a62-8d98-21618cb6dab6-config-data\") pod \"barbican-api-6fb4445c46-pmjxt\" (UID: \"b2ed54c7-3249-4a62-8d98-21618cb6dab6\") " pod="openstack-kuttl-tests/barbican-api-6fb4445c46-pmjxt" Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.613621 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b2ed54c7-3249-4a62-8d98-21618cb6dab6-public-tls-certs\") pod \"barbican-api-6fb4445c46-pmjxt\" (UID: \"b2ed54c7-3249-4a62-8d98-21618cb6dab6\") " pod="openstack-kuttl-tests/barbican-api-6fb4445c46-pmjxt" Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.626676 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b2ed54c7-3249-4a62-8d98-21618cb6dab6-config-data-custom\") pod \"barbican-api-6fb4445c46-pmjxt\" (UID: \"b2ed54c7-3249-4a62-8d98-21618cb6dab6\") " pod="openstack-kuttl-tests/barbican-api-6fb4445c46-pmjxt" Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.627757 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l95vj\" (UniqueName: \"kubernetes.io/projected/b2ed54c7-3249-4a62-8d98-21618cb6dab6-kube-api-access-l95vj\") pod \"barbican-api-6fb4445c46-pmjxt\" (UID: \"b2ed54c7-3249-4a62-8d98-21618cb6dab6\") " pod="openstack-kuttl-tests/barbican-api-6fb4445c46-pmjxt" Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.819760 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:50:59 crc kubenswrapper[4558]: I0120 17:50:59.864946 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-6fb4445c46-pmjxt" Jan 20 17:51:00 crc kubenswrapper[4558]: I0120 17:51:00.088223 4558 generic.go:334] "Generic (PLEG): container finished" podID="7c16f0c4-1462-4817-9f74-9e3d93193867" containerID="5c4ded7cac27e0c92726d8a6f3ee7686e2622233d98b3911837d546b85d42273" exitCode=1 Jan 20 17:51:00 crc kubenswrapper[4558]: I0120 17:51:00.088336 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-5968cb86f6-ctfg5" Jan 20 17:51:00 crc kubenswrapper[4558]: I0120 17:51:00.088419 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-0" event={"ID":"7c16f0c4-1462-4817-9f74-9e3d93193867","Type":"ContainerDied","Data":"5c4ded7cac27e0c92726d8a6f3ee7686e2622233d98b3911837d546b85d42273"} Jan 20 17:51:00 crc kubenswrapper[4558]: I0120 17:51:00.090609 4558 scope.go:117] "RemoveContainer" containerID="5c4ded7cac27e0c92726d8a6f3ee7686e2622233d98b3911837d546b85d42273" Jan 20 17:51:00 crc kubenswrapper[4558]: I0120 17:51:00.137582 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:51:00 crc kubenswrapper[4558]: I0120 17:51:00.147884 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:51:00 crc kubenswrapper[4558]: I0120 17:51:00.267750 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-api-6fb4445c46-pmjxt"] Jan 20 17:51:00 crc kubenswrapper[4558]: W0120 17:51:00.404449 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0b958dc9_cb4f_4494_aa93_73ace6dcdc61.slice/crio-06b10739b6519aa32e9883e5de2135cfd7587729ac30dc7d54920de4c316ee81 WatchSource:0}: Error finding container 06b10739b6519aa32e9883e5de2135cfd7587729ac30dc7d54920de4c316ee81: Status 404 returned error can't find the container with id 06b10739b6519aa32e9883e5de2135cfd7587729ac30dc7d54920de4c316ee81 Jan 20 17:51:00 crc kubenswrapper[4558]: I0120 17:51:00.490531 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:51:00 crc kubenswrapper[4558]: I0120 17:51:00.501634 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/fc60cc21-f159-40bf-beca-c62718f57b9c-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"fc60cc21-f159-40bf-beca-c62718f57b9c\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:51:00 crc kubenswrapper[4558]: E0120 17:51:00.501931 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-ovn-metrics: secret "cert-ovn-metrics" not found Jan 20 17:51:00 crc kubenswrapper[4558]: E0120 17:51:00.502009 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/fc60cc21-f159-40bf-beca-c62718f57b9c-metrics-certs-tls-certs podName:fc60cc21-f159-40bf-beca-c62718f57b9c nodeName:}" failed. No retries permitted until 2026-01-20 17:51:32.501984927 +0000 UTC m=+4186.262322895 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs-tls-certs" (UniqueName: "kubernetes.io/secret/fc60cc21-f159-40bf-beca-c62718f57b9c-metrics-certs-tls-certs") pod "ovsdbserver-sb-0" (UID: "fc60cc21-f159-40bf-beca-c62718f57b9c") : secret "cert-ovn-metrics" not found Jan 20 17:51:00 crc kubenswrapper[4558]: I0120 17:51:00.583094 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="06db8474-04b0-4525-90f0-c066ecbac0ae" path="/var/lib/kubelet/pods/06db8474-04b0-4525-90f0-c066ecbac0ae/volumes" Jan 20 17:51:00 crc kubenswrapper[4558]: I0120 17:51:00.584129 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2bf2d1ac-2610-4caf-a3dc-eb6ef3d260a4" path="/var/lib/kubelet/pods/2bf2d1ac-2610-4caf-a3dc-eb6ef3d260a4/volumes" Jan 20 17:51:00 crc kubenswrapper[4558]: I0120 17:51:00.585091 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ec0f915-5676-4d2a-b743-1f8ee11c01ac" path="/var/lib/kubelet/pods/3ec0f915-5676-4d2a-b743-1f8ee11c01ac/volumes" Jan 20 17:51:00 crc kubenswrapper[4558]: I0120 17:51:00.589145 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-5968cb86f6-ctfg5" Jan 20 17:51:00 crc kubenswrapper[4558]: I0120 17:51:00.604351 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/581c2419-48d9-4b7d-b71b-ce5cceb0326d-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"581c2419-48d9-4b7d-b71b-ce5cceb0326d\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:51:00 crc kubenswrapper[4558]: I0120 17:51:00.611431 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/581c2419-48d9-4b7d-b71b-ce5cceb0326d-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"581c2419-48d9-4b7d-b71b-ce5cceb0326d\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:51:00 crc kubenswrapper[4558]: I0120 17:51:00.699126 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovn-northd-0"] Jan 20 17:51:00 crc kubenswrapper[4558]: E0120 17:51:00.700627 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[metrics-certs-tls-certs], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack-kuttl-tests/ovn-northd-0" podUID="80a7bd73-c63f-4a27-9d3d-3fef760c025f" Jan 20 17:51:00 crc kubenswrapper[4558]: I0120 17:51:00.708524 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d127fb40-6eff-4ccd-922c-b209ac4edbda-internal-tls-certs\") pod \"d127fb40-6eff-4ccd-922c-b209ac4edbda\" (UID: \"d127fb40-6eff-4ccd-922c-b209ac4edbda\") " Jan 20 17:51:00 crc kubenswrapper[4558]: I0120 17:51:00.708593 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d127fb40-6eff-4ccd-922c-b209ac4edbda-combined-ca-bundle\") pod \"d127fb40-6eff-4ccd-922c-b209ac4edbda\" (UID: \"d127fb40-6eff-4ccd-922c-b209ac4edbda\") " Jan 20 17:51:00 crc kubenswrapper[4558]: I0120 17:51:00.708662 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d127fb40-6eff-4ccd-922c-b209ac4edbda-config-data\") pod \"d127fb40-6eff-4ccd-922c-b209ac4edbda\" (UID: \"d127fb40-6eff-4ccd-922c-b209ac4edbda\") " Jan 20 17:51:00 crc kubenswrapper[4558]: I0120 17:51:00.708707 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d127fb40-6eff-4ccd-922c-b209ac4edbda-config-data-custom\") pod \"d127fb40-6eff-4ccd-922c-b209ac4edbda\" (UID: \"d127fb40-6eff-4ccd-922c-b209ac4edbda\") " Jan 20 17:51:00 crc kubenswrapper[4558]: I0120 17:51:00.708750 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d127fb40-6eff-4ccd-922c-b209ac4edbda-logs\") pod \"d127fb40-6eff-4ccd-922c-b209ac4edbda\" (UID: \"d127fb40-6eff-4ccd-922c-b209ac4edbda\") " Jan 20 17:51:00 crc kubenswrapper[4558]: I0120 17:51:00.708799 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kbnl5\" (UniqueName: \"kubernetes.io/projected/d127fb40-6eff-4ccd-922c-b209ac4edbda-kube-api-access-kbnl5\") pod \"d127fb40-6eff-4ccd-922c-b209ac4edbda\" (UID: \"d127fb40-6eff-4ccd-922c-b209ac4edbda\") " Jan 20 17:51:00 crc kubenswrapper[4558]: I0120 17:51:00.710891 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d127fb40-6eff-4ccd-922c-b209ac4edbda-logs" (OuterVolumeSpecName: "logs") pod "d127fb40-6eff-4ccd-922c-b209ac4edbda" (UID: "d127fb40-6eff-4ccd-922c-b209ac4edbda"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:51:00 crc kubenswrapper[4558]: I0120 17:51:00.713340 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d127fb40-6eff-4ccd-922c-b209ac4edbda-kube-api-access-kbnl5" (OuterVolumeSpecName: "kube-api-access-kbnl5") pod "d127fb40-6eff-4ccd-922c-b209ac4edbda" (UID: "d127fb40-6eff-4ccd-922c-b209ac4edbda"). InnerVolumeSpecName "kube-api-access-kbnl5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:51:00 crc kubenswrapper[4558]: I0120 17:51:00.714296 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d127fb40-6eff-4ccd-922c-b209ac4edbda-config-data" (OuterVolumeSpecName: "config-data") pod "d127fb40-6eff-4ccd-922c-b209ac4edbda" (UID: "d127fb40-6eff-4ccd-922c-b209ac4edbda"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:51:00 crc kubenswrapper[4558]: I0120 17:51:00.714804 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d127fb40-6eff-4ccd-922c-b209ac4edbda-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "d127fb40-6eff-4ccd-922c-b209ac4edbda" (UID: "d127fb40-6eff-4ccd-922c-b209ac4edbda"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:51:00 crc kubenswrapper[4558]: I0120 17:51:00.715612 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d127fb40-6eff-4ccd-922c-b209ac4edbda-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d127fb40-6eff-4ccd-922c-b209ac4edbda" (UID: "d127fb40-6eff-4ccd-922c-b209ac4edbda"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:51:00 crc kubenswrapper[4558]: I0120 17:51:00.716466 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-sb-0"] Jan 20 17:51:00 crc kubenswrapper[4558]: E0120 17:51:00.717865 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[metrics-certs-tls-certs], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack-kuttl-tests/ovsdbserver-sb-0" podUID="fc60cc21-f159-40bf-beca-c62718f57b9c" Jan 20 17:51:00 crc kubenswrapper[4558]: I0120 17:51:00.717877 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d127fb40-6eff-4ccd-922c-b209ac4edbda-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "d127fb40-6eff-4ccd-922c-b209ac4edbda" (UID: "d127fb40-6eff-4ccd-922c-b209ac4edbda"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:51:00 crc kubenswrapper[4558]: I0120 17:51:00.811826 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d127fb40-6eff-4ccd-922c-b209ac4edbda-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:00 crc kubenswrapper[4558]: I0120 17:51:00.811859 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d127fb40-6eff-4ccd-922c-b209ac4edbda-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:00 crc kubenswrapper[4558]: I0120 17:51:00.811871 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d127fb40-6eff-4ccd-922c-b209ac4edbda-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:00 crc kubenswrapper[4558]: I0120 17:51:00.811881 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kbnl5\" (UniqueName: \"kubernetes.io/projected/d127fb40-6eff-4ccd-922c-b209ac4edbda-kube-api-access-kbnl5\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:00 crc kubenswrapper[4558]: I0120 17:51:00.811894 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d127fb40-6eff-4ccd-922c-b209ac4edbda-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:00 crc kubenswrapper[4558]: I0120 17:51:00.811903 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d127fb40-6eff-4ccd-922c-b209ac4edbda-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:00 crc kubenswrapper[4558]: I0120 17:51:00.846254 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:51:01 crc kubenswrapper[4558]: I0120 17:51:01.001763 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-nb-0"] Jan 20 17:51:01 crc kubenswrapper[4558]: E0120 17:51:01.003360 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[metrics-certs-tls-certs ovsdbserver-nb-tls-certs], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack-kuttl-tests/ovsdbserver-nb-0" podUID="bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162" Jan 20 17:51:01 crc kubenswrapper[4558]: I0120 17:51:01.138318 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"0b958dc9-cb4f-4494-aa93-73ace6dcdc61","Type":"ContainerStarted","Data":"06b10739b6519aa32e9883e5de2135cfd7587729ac30dc7d54920de4c316ee81"} Jan 20 17:51:01 crc kubenswrapper[4558]: I0120 17:51:01.156027 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"01835185-5ca5-467a-9362-b8de3e8665ed","Type":"ContainerStarted","Data":"d6739fd95d667740a4669ec34550d5b750a50b385b78c2f22b17ab41a028c741"} Jan 20 17:51:01 crc kubenswrapper[4558]: I0120 17:51:01.156092 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"01835185-5ca5-467a-9362-b8de3e8665ed","Type":"ContainerStarted","Data":"231ea0e429a19476119c68f59681f7d882c14f24b98fab5408725cdb9668b576"} Jan 20 17:51:01 crc kubenswrapper[4558]: I0120 17:51:01.175328 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-0" event={"ID":"7c16f0c4-1462-4817-9f74-9e3d93193867","Type":"ContainerStarted","Data":"e689e24841d6a2881535a371383cc0c5b2611638b0371340716b92a4ebfc8207"} Jan 20 17:51:01 crc kubenswrapper[4558]: I0120 17:51:01.176263 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:51:01 crc kubenswrapper[4558]: I0120 17:51:01.194334 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:51:01 crc kubenswrapper[4558]: I0120 17:51:01.194370 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-5968cb86f6-ctfg5" Jan 20 17:51:01 crc kubenswrapper[4558]: I0120 17:51:01.194424 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-6fb4445c46-pmjxt" event={"ID":"b2ed54c7-3249-4a62-8d98-21618cb6dab6","Type":"ContainerStarted","Data":"913497c86e1fd7122fec98b093c66895e1a8361995489d6c8fda3deef690c7ef"} Jan 20 17:51:01 crc kubenswrapper[4558]: I0120 17:51:01.194451 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-6fb4445c46-pmjxt" event={"ID":"b2ed54c7-3249-4a62-8d98-21618cb6dab6","Type":"ContainerStarted","Data":"b9249d1892527138cc235d58186150d42903080a9231b4c9631a998629089336"} Jan 20 17:51:01 crc kubenswrapper[4558]: I0120 17:51:01.194463 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-6fb4445c46-pmjxt" event={"ID":"b2ed54c7-3249-4a62-8d98-21618cb6dab6","Type":"ContainerStarted","Data":"584e3effde7d8b0dcf01a557c94e94e490024c54d4addf894cbea42208822cb1"} Jan 20 17:51:01 crc kubenswrapper[4558]: I0120 17:51:01.194485 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:51:01 crc kubenswrapper[4558]: I0120 17:51:01.194514 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:51:01 crc kubenswrapper[4558]: I0120 17:51:01.210307 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:51:01 crc kubenswrapper[4558]: I0120 17:51:01.217783 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:51:01 crc kubenswrapper[4558]: I0120 17:51:01.226590 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/barbican-api-6fb4445c46-pmjxt" podStartSLOduration=2.226573498 podStartE2EDuration="2.226573498s" podCreationTimestamp="2026-01-20 17:50:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:51:01.217764613 +0000 UTC m=+4154.978102581" watchObservedRunningTime="2026-01-20 17:51:01.226573498 +0000 UTC m=+4154.986911465" Jan 20 17:51:01 crc kubenswrapper[4558]: I0120 17:51:01.226869 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:51:01 crc kubenswrapper[4558]: I0120 17:51:01.271757 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-api-5968cb86f6-ctfg5"] Jan 20 17:51:01 crc kubenswrapper[4558]: I0120 17:51:01.277805 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/barbican-api-5968cb86f6-ctfg5"] Jan 20 17:51:01 crc kubenswrapper[4558]: I0120 17:51:01.318064 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:51:01 crc kubenswrapper[4558]: W0120 17:51:01.322928 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod581c2419_48d9_4b7d_b71b_ce5cceb0326d.slice/crio-f4adebd8014d94828341921446ab5b98374cb2ca14e4d4d3889259bba2f533fe WatchSource:0}: Error finding container f4adebd8014d94828341921446ab5b98374cb2ca14e4d4d3889259bba2f533fe: Status 404 returned error can't find the container with id f4adebd8014d94828341921446ab5b98374cb2ca14e4d4d3889259bba2f533fe Jan 20 17:51:01 crc kubenswrapper[4558]: I0120 17:51:01.328843 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-klmg7\" (UniqueName: \"kubernetes.io/projected/80a7bd73-c63f-4a27-9d3d-3fef760c025f-kube-api-access-klmg7\") pod \"80a7bd73-c63f-4a27-9d3d-3fef760c025f\" (UID: \"80a7bd73-c63f-4a27-9d3d-3fef760c025f\") " Jan 20 17:51:01 crc kubenswrapper[4558]: I0120 17:51:01.329006 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndbcluster-sb-etc-ovn\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"fc60cc21-f159-40bf-beca-c62718f57b9c\" (UID: \"fc60cc21-f159-40bf-beca-c62718f57b9c\") " Jan 20 17:51:01 crc kubenswrapper[4558]: I0120 17:51:01.329114 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/80a7bd73-c63f-4a27-9d3d-3fef760c025f-combined-ca-bundle\") pod \"80a7bd73-c63f-4a27-9d3d-3fef760c025f\" (UID: \"80a7bd73-c63f-4a27-9d3d-3fef760c025f\") " Jan 20 17:51:01 crc kubenswrapper[4558]: I0120 17:51:01.329232 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n29rj\" (UniqueName: \"kubernetes.io/projected/fc60cc21-f159-40bf-beca-c62718f57b9c-kube-api-access-n29rj\") pod \"fc60cc21-f159-40bf-beca-c62718f57b9c\" (UID: \"fc60cc21-f159-40bf-beca-c62718f57b9c\") " Jan 20 17:51:01 crc kubenswrapper[4558]: I0120 17:51:01.329341 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/80a7bd73-c63f-4a27-9d3d-3fef760c025f-config\") pod \"80a7bd73-c63f-4a27-9d3d-3fef760c025f\" (UID: \"80a7bd73-c63f-4a27-9d3d-3fef760c025f\") " Jan 20 17:51:01 crc kubenswrapper[4558]: I0120 17:51:01.329430 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lfb28\" (UniqueName: \"kubernetes.io/projected/bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162-kube-api-access-lfb28\") pod \"bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162\" (UID: \"bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162\") " Jan 20 17:51:01 crc kubenswrapper[4558]: I0120 17:51:01.329525 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc60cc21-f159-40bf-beca-c62718f57b9c-combined-ca-bundle\") pod \"fc60cc21-f159-40bf-beca-c62718f57b9c\" (UID: \"fc60cc21-f159-40bf-beca-c62718f57b9c\") " Jan 20 17:51:01 crc kubenswrapper[4558]: I0120 17:51:01.329599 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162-combined-ca-bundle\") pod \"bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162\" (UID: \"bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162\") " Jan 20 17:51:01 crc kubenswrapper[4558]: I0120 17:51:01.329670 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fc60cc21-f159-40bf-beca-c62718f57b9c-scripts\") pod \"fc60cc21-f159-40bf-beca-c62718f57b9c\" (UID: \"fc60cc21-f159-40bf-beca-c62718f57b9c\") " Jan 20 17:51:01 crc kubenswrapper[4558]: I0120 17:51:01.329770 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162-scripts\") pod \"bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162\" (UID: \"bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162\") " Jan 20 17:51:01 crc kubenswrapper[4558]: I0120 17:51:01.331064 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162-ovsdb-rundir\") pod \"bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162\" (UID: \"bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162\") " Jan 20 17:51:01 crc kubenswrapper[4558]: I0120 17:51:01.331245 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/80a7bd73-c63f-4a27-9d3d-3fef760c025f-scripts\") pod \"80a7bd73-c63f-4a27-9d3d-3fef760c025f\" (UID: \"80a7bd73-c63f-4a27-9d3d-3fef760c025f\") " Jan 20 17:51:01 crc kubenswrapper[4558]: I0120 17:51:01.331421 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/fc60cc21-f159-40bf-beca-c62718f57b9c-ovsdb-rundir\") pod \"fc60cc21-f159-40bf-beca-c62718f57b9c\" (UID: \"fc60cc21-f159-40bf-beca-c62718f57b9c\") " Jan 20 17:51:01 crc kubenswrapper[4558]: I0120 17:51:01.331700 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fc60cc21-f159-40bf-beca-c62718f57b9c-config\") pod \"fc60cc21-f159-40bf-beca-c62718f57b9c\" (UID: \"fc60cc21-f159-40bf-beca-c62718f57b9c\") " Jan 20 17:51:01 crc kubenswrapper[4558]: I0120 17:51:01.331980 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162-config\") pod \"bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162\" (UID: \"bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162\") " Jan 20 17:51:01 crc kubenswrapper[4558]: I0120 17:51:01.332095 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/80a7bd73-c63f-4a27-9d3d-3fef760c025f-ovn-northd-tls-certs\") pod \"80a7bd73-c63f-4a27-9d3d-3fef760c025f\" (UID: \"80a7bd73-c63f-4a27-9d3d-3fef760c025f\") " Jan 20 17:51:01 crc kubenswrapper[4558]: I0120 17:51:01.332185 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/fc60cc21-f159-40bf-beca-c62718f57b9c-ovsdbserver-sb-tls-certs\") pod \"fc60cc21-f159-40bf-beca-c62718f57b9c\" (UID: \"fc60cc21-f159-40bf-beca-c62718f57b9c\") " Jan 20 17:51:01 crc kubenswrapper[4558]: I0120 17:51:01.332262 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/80a7bd73-c63f-4a27-9d3d-3fef760c025f-ovn-rundir\") pod \"80a7bd73-c63f-4a27-9d3d-3fef760c025f\" (UID: \"80a7bd73-c63f-4a27-9d3d-3fef760c025f\") " Jan 20 17:51:01 crc kubenswrapper[4558]: I0120 17:51:01.332514 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndbcluster-nb-etc-ovn\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162\" (UID: \"bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162\") " Jan 20 17:51:01 crc kubenswrapper[4558]: I0120 17:51:01.329828 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/80a7bd73-c63f-4a27-9d3d-3fef760c025f-config" (OuterVolumeSpecName: "config") pod "80a7bd73-c63f-4a27-9d3d-3fef760c025f" (UID: "80a7bd73-c63f-4a27-9d3d-3fef760c025f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:51:01 crc kubenswrapper[4558]: I0120 17:51:01.330266 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fc60cc21-f159-40bf-beca-c62718f57b9c-scripts" (OuterVolumeSpecName: "scripts") pod "fc60cc21-f159-40bf-beca-c62718f57b9c" (UID: "fc60cc21-f159-40bf-beca-c62718f57b9c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:51:01 crc kubenswrapper[4558]: I0120 17:51:01.330358 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162-scripts" (OuterVolumeSpecName: "scripts") pod "bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162" (UID: "bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:51:01 crc kubenswrapper[4558]: I0120 17:51:01.332940 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162-config" (OuterVolumeSpecName: "config") pod "bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162" (UID: "bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:51:01 crc kubenswrapper[4558]: I0120 17:51:01.331352 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162-ovsdb-rundir" (OuterVolumeSpecName: "ovsdb-rundir") pod "bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162" (UID: "bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162"). InnerVolumeSpecName "ovsdb-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:51:01 crc kubenswrapper[4558]: I0120 17:51:01.332103 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fc60cc21-f159-40bf-beca-c62718f57b9c-ovsdb-rundir" (OuterVolumeSpecName: "ovsdb-rundir") pod "fc60cc21-f159-40bf-beca-c62718f57b9c" (UID: "fc60cc21-f159-40bf-beca-c62718f57b9c"). InnerVolumeSpecName "ovsdb-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:51:01 crc kubenswrapper[4558]: I0120 17:51:01.332133 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/80a7bd73-c63f-4a27-9d3d-3fef760c025f-scripts" (OuterVolumeSpecName: "scripts") pod "80a7bd73-c63f-4a27-9d3d-3fef760c025f" (UID: "80a7bd73-c63f-4a27-9d3d-3fef760c025f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:51:01 crc kubenswrapper[4558]: I0120 17:51:01.332884 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fc60cc21-f159-40bf-beca-c62718f57b9c-config" (OuterVolumeSpecName: "config") pod "fc60cc21-f159-40bf-beca-c62718f57b9c" (UID: "fc60cc21-f159-40bf-beca-c62718f57b9c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:51:01 crc kubenswrapper[4558]: I0120 17:51:01.332872 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fc60cc21-f159-40bf-beca-c62718f57b9c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fc60cc21-f159-40bf-beca-c62718f57b9c" (UID: "fc60cc21-f159-40bf-beca-c62718f57b9c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:51:01 crc kubenswrapper[4558]: I0120 17:51:01.333259 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/80a7bd73-c63f-4a27-9d3d-3fef760c025f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "80a7bd73-c63f-4a27-9d3d-3fef760c025f" (UID: "80a7bd73-c63f-4a27-9d3d-3fef760c025f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:51:01 crc kubenswrapper[4558]: I0120 17:51:01.333368 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/80a7bd73-c63f-4a27-9d3d-3fef760c025f-ovn-rundir" (OuterVolumeSpecName: "ovn-rundir") pod "80a7bd73-c63f-4a27-9d3d-3fef760c025f" (UID: "80a7bd73-c63f-4a27-9d3d-3fef760c025f"). InnerVolumeSpecName "ovn-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:51:01 crc kubenswrapper[4558]: I0120 17:51:01.333389 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162" (UID: "bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:51:01 crc kubenswrapper[4558]: I0120 17:51:01.334263 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage03-crc" (OuterVolumeSpecName: "ovndbcluster-sb-etc-ovn") pod "fc60cc21-f159-40bf-beca-c62718f57b9c" (UID: "fc60cc21-f159-40bf-beca-c62718f57b9c"). InnerVolumeSpecName "local-storage03-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:51:01 crc kubenswrapper[4558]: I0120 17:51:01.334629 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/80a7bd73-c63f-4a27-9d3d-3fef760c025f-kube-api-access-klmg7" (OuterVolumeSpecName: "kube-api-access-klmg7") pod "80a7bd73-c63f-4a27-9d3d-3fef760c025f" (UID: "80a7bd73-c63f-4a27-9d3d-3fef760c025f"). InnerVolumeSpecName "kube-api-access-klmg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:51:01 crc kubenswrapper[4558]: I0120 17:51:01.334850 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162-kube-api-access-lfb28" (OuterVolumeSpecName: "kube-api-access-lfb28") pod "bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162" (UID: "bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162"). InnerVolumeSpecName "kube-api-access-lfb28". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:51:01 crc kubenswrapper[4558]: I0120 17:51:01.335366 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fc60cc21-f159-40bf-beca-c62718f57b9c-ovsdbserver-sb-tls-certs" (OuterVolumeSpecName: "ovsdbserver-sb-tls-certs") pod "fc60cc21-f159-40bf-beca-c62718f57b9c" (UID: "fc60cc21-f159-40bf-beca-c62718f57b9c"). InnerVolumeSpecName "ovsdbserver-sb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:51:01 crc kubenswrapper[4558]: I0120 17:51:01.335721 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:01 crc kubenswrapper[4558]: I0120 17:51:01.335744 4558 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/fc60cc21-f159-40bf-beca-c62718f57b9c-ovsdbserver-sb-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:01 crc kubenswrapper[4558]: I0120 17:51:01.335758 4558 reconciler_common.go:293] "Volume detached for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/80a7bd73-c63f-4a27-9d3d-3fef760c025f-ovn-rundir\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:01 crc kubenswrapper[4558]: I0120 17:51:01.335769 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-klmg7\" (UniqueName: \"kubernetes.io/projected/80a7bd73-c63f-4a27-9d3d-3fef760c025f-kube-api-access-klmg7\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:01 crc kubenswrapper[4558]: I0120 17:51:01.335788 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" " Jan 20 17:51:01 crc kubenswrapper[4558]: I0120 17:51:01.335801 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/80a7bd73-c63f-4a27-9d3d-3fef760c025f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:01 crc kubenswrapper[4558]: I0120 17:51:01.335812 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/80a7bd73-c63f-4a27-9d3d-3fef760c025f-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:01 crc kubenswrapper[4558]: I0120 17:51:01.335822 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lfb28\" (UniqueName: \"kubernetes.io/projected/bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162-kube-api-access-lfb28\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:01 crc kubenswrapper[4558]: I0120 17:51:01.335832 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc60cc21-f159-40bf-beca-c62718f57b9c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:01 crc kubenswrapper[4558]: I0120 17:51:01.335841 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:01 crc kubenswrapper[4558]: I0120 17:51:01.335849 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fc60cc21-f159-40bf-beca-c62718f57b9c-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:01 crc kubenswrapper[4558]: I0120 17:51:01.335859 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:01 crc kubenswrapper[4558]: I0120 17:51:01.335867 4558 reconciler_common.go:293] "Volume detached for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162-ovsdb-rundir\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:01 crc kubenswrapper[4558]: I0120 17:51:01.335876 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/80a7bd73-c63f-4a27-9d3d-3fef760c025f-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:01 crc kubenswrapper[4558]: I0120 17:51:01.335885 4558 reconciler_common.go:293] "Volume detached for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/fc60cc21-f159-40bf-beca-c62718f57b9c-ovsdb-rundir\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:01 crc kubenswrapper[4558]: I0120 17:51:01.335894 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fc60cc21-f159-40bf-beca-c62718f57b9c-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:01 crc kubenswrapper[4558]: I0120 17:51:01.336988 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/80a7bd73-c63f-4a27-9d3d-3fef760c025f-ovn-northd-tls-certs" (OuterVolumeSpecName: "ovn-northd-tls-certs") pod "80a7bd73-c63f-4a27-9d3d-3fef760c025f" (UID: "80a7bd73-c63f-4a27-9d3d-3fef760c025f"). InnerVolumeSpecName "ovn-northd-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:51:01 crc kubenswrapper[4558]: I0120 17:51:01.337338 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fc60cc21-f159-40bf-beca-c62718f57b9c-kube-api-access-n29rj" (OuterVolumeSpecName: "kube-api-access-n29rj") pod "fc60cc21-f159-40bf-beca-c62718f57b9c" (UID: "fc60cc21-f159-40bf-beca-c62718f57b9c"). InnerVolumeSpecName "kube-api-access-n29rj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:51:01 crc kubenswrapper[4558]: I0120 17:51:01.339200 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage02-crc" (OuterVolumeSpecName: "ovndbcluster-nb-etc-ovn") pod "bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162" (UID: "bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162"). InnerVolumeSpecName "local-storage02-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:51:01 crc kubenswrapper[4558]: I0120 17:51:01.363665 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage03-crc" (UniqueName: "kubernetes.io/local-volume/local-storage03-crc") on node "crc" Jan 20 17:51:01 crc kubenswrapper[4558]: I0120 17:51:01.438932 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n29rj\" (UniqueName: \"kubernetes.io/projected/fc60cc21-f159-40bf-beca-c62718f57b9c-kube-api-access-n29rj\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:01 crc kubenswrapper[4558]: I0120 17:51:01.438963 4558 reconciler_common.go:293] "Volume detached for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/80a7bd73-c63f-4a27-9d3d-3fef760c025f-ovn-northd-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:01 crc kubenswrapper[4558]: I0120 17:51:01.439004 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" " Jan 20 17:51:01 crc kubenswrapper[4558]: I0120 17:51:01.439015 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d127fb40-6eff-4ccd-922c-b209ac4edbda-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:01 crc kubenswrapper[4558]: I0120 17:51:01.439025 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:01 crc kubenswrapper[4558]: I0120 17:51:01.464783 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage02-crc" (UniqueName: "kubernetes.io/local-volume/local-storage02-crc") on node "crc" Jan 20 17:51:01 crc kubenswrapper[4558]: I0120 17:51:01.541276 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:01 crc kubenswrapper[4558]: I0120 17:51:01.728337 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/cinder-scheduler-0" podUID="51d19369-14ac-4d62-ab05-9c5830856622" containerName="cinder-scheduler" probeResult="failure" output="Get \"http://10.217.1.196:8080/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 20 17:51:01 crc kubenswrapper[4558]: I0120 17:51:01.864187 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.206135 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"0b958dc9-cb4f-4494-aa93-73ace6dcdc61","Type":"ContainerStarted","Data":"8a78fc71a2e62ed8e4693ae28f1a90dfa8b0fa3f8be4e9a47bb03259d43a4b68"} Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.208787 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"01835185-5ca5-467a-9362-b8de3e8665ed","Type":"ContainerStarted","Data":"a8d84750152ad0dd7ec1ee98d8c4f4d1566801adb750bbf52fcbf29bfac3fcd6"} Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.208931 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.213255 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"581c2419-48d9-4b7d-b71b-ce5cceb0326d","Type":"ContainerStarted","Data":"cbf05244e311e00d3dfc678229a149f12f8b205c75c63c8b93a994123c6583c1"} Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.213293 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"581c2419-48d9-4b7d-b71b-ce5cceb0326d","Type":"ContainerStarted","Data":"f4adebd8014d94828341921446ab5b98374cb2ca14e4d4d3889259bba2f533fe"} Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.213343 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.213417 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.213551 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.213695 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/barbican-api-6fb4445c46-pmjxt" Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.213721 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/barbican-api-6fb4445c46-pmjxt" Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.241079 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/cinder-api-0" podStartSLOduration=3.241059523 podStartE2EDuration="3.241059523s" podCreationTimestamp="2026-01-20 17:50:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:51:02.22573121 +0000 UTC m=+4155.986069178" watchObservedRunningTime="2026-01-20 17:51:02.241059523 +0000 UTC m=+4156.001397490" Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.255044 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" podStartSLOduration=34.255028961 podStartE2EDuration="34.255028961s" podCreationTimestamp="2026-01-20 17:50:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:51:02.253444351 +0000 UTC m=+4156.013782318" watchObservedRunningTime="2026-01-20 17:51:02.255028961 +0000 UTC m=+4156.015366928" Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.317197 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-sb-0"] Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.325322 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-sb-0"] Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.329113 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-metadata-0" podUID="acb2f7f3-f6d0-4b4f-936f-baee3dfa3938" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.1.192:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.329451 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-metadata-0" podUID="acb2f7f3-f6d0-4b4f-936f-baee3dfa3938" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.1.192:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.415265 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ovsdbserver-sb-0"] Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.417560 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.421762 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"ovndbcluster-sb-scripts" Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.422023 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-ovndbcluster-sb-ovndbs" Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.422139 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ovncluster-ovndbcluster-sb-dockercfg-f2cf9" Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.422032 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-ovn-metrics" Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.427313 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"ovndbcluster-sb-config" Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.483670 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-sb-0"] Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.485006 4558 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/fc60cc21-f159-40bf-beca-c62718f57b9c-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.491307 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovn-northd-0"] Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.499852 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ovn-northd-0"] Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.511184 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ovn-northd-0"] Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.512796 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.514257 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ovnnorthd-ovnnorthd-dockercfg-lk4s2" Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.516548 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"ovnnorthd-scripts" Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.516615 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-nb-0"] Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.516632 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"ovnnorthd-config" Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.516717 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-ovnnorthd-ovndbs" Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.532227 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-nb-0"] Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.542447 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovn-northd-0"] Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.551827 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ovsdbserver-nb-0"] Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.554294 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.556115 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ovncluster-ovndbcluster-nb-dockercfg-fcsjv" Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.556353 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"ovndbcluster-nb-scripts" Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.557355 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"ovndbcluster-nb-config" Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.557647 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-ovndbcluster-nb-ovndbs" Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.558507 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-nb-0"] Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.580145 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="80a7bd73-c63f-4a27-9d3d-3fef760c025f" path="/var/lib/kubelet/pods/80a7bd73-c63f-4a27-9d3d-3fef760c025f/volumes" Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.580718 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162" path="/var/lib/kubelet/pods/bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162/volumes" Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.581156 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d127fb40-6eff-4ccd-922c-b209ac4edbda" path="/var/lib/kubelet/pods/d127fb40-6eff-4ccd-922c-b209ac4edbda/volumes" Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.581653 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fc60cc21-f159-40bf-beca-c62718f57b9c" path="/var/lib/kubelet/pods/fc60cc21-f159-40bf-beca-c62718f57b9c/volumes" Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.587220 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/634825c8-6d4e-42a0-83c6-c83d105a2a7f-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"634825c8-6d4e-42a0-83c6-c83d105a2a7f\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.587286 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dmp6v\" (UniqueName: \"kubernetes.io/projected/634825c8-6d4e-42a0-83c6-c83d105a2a7f-kube-api-access-dmp6v\") pod \"ovsdbserver-sb-0\" (UID: \"634825c8-6d4e-42a0-83c6-c83d105a2a7f\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.587617 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/634825c8-6d4e-42a0-83c6-c83d105a2a7f-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"634825c8-6d4e-42a0-83c6-c83d105a2a7f\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.587740 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/634825c8-6d4e-42a0-83c6-c83d105a2a7f-config\") pod \"ovsdbserver-sb-0\" (UID: \"634825c8-6d4e-42a0-83c6-c83d105a2a7f\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.587938 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/634825c8-6d4e-42a0-83c6-c83d105a2a7f-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"634825c8-6d4e-42a0-83c6-c83d105a2a7f\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.588014 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/634825c8-6d4e-42a0-83c6-c83d105a2a7f-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"634825c8-6d4e-42a0-83c6-c83d105a2a7f\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.588103 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/634825c8-6d4e-42a0-83c6-c83d105a2a7f-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"634825c8-6d4e-42a0-83c6-c83d105a2a7f\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.588156 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"ovsdbserver-sb-0\" (UID: \"634825c8-6d4e-42a0-83c6-c83d105a2a7f\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.588243 4558 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/80a7bd73-c63f-4a27-9d3d-3fef760c025f-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.689608 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/18c09849-702b-4939-9cf1-0e06c6adc889-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"18c09849-702b-4939-9cf1-0e06c6adc889\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.689655 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/04c2bb7a-e2e2-4f8f-bf20-e6c381d117ef-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"04c2bb7a-e2e2-4f8f-bf20-e6c381d117ef\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.689687 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/fc65f7eb-e162-44ae-8136-bdcb6baa7c0f-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"fc65f7eb-e162-44ae-8136-bdcb6baa7c0f\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.689718 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/634825c8-6d4e-42a0-83c6-c83d105a2a7f-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"634825c8-6d4e-42a0-83c6-c83d105a2a7f\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.689744 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fc65f7eb-e162-44ae-8136-bdcb6baa7c0f-config\") pod \"ovsdbserver-nb-0\" (UID: \"fc65f7eb-e162-44ae-8136-bdcb6baa7c0f\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.689765 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"ovsdbserver-sb-0\" (UID: \"634825c8-6d4e-42a0-83c6-c83d105a2a7f\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.689785 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/fc65f7eb-e162-44ae-8136-bdcb6baa7c0f-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"fc65f7eb-e162-44ae-8136-bdcb6baa7c0f\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.689802 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/04c2bb7a-e2e2-4f8f-bf20-e6c381d117ef-scripts\") pod \"ovn-northd-0\" (UID: \"04c2bb7a-e2e2-4f8f-bf20-e6c381d117ef\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.689840 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/04c2bb7a-e2e2-4f8f-bf20-e6c381d117ef-config\") pod \"ovn-northd-0\" (UID: \"04c2bb7a-e2e2-4f8f-bf20-e6c381d117ef\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.689861 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/634825c8-6d4e-42a0-83c6-c83d105a2a7f-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"634825c8-6d4e-42a0-83c6-c83d105a2a7f\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.689884 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/fc65f7eb-e162-44ae-8136-bdcb6baa7c0f-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"fc65f7eb-e162-44ae-8136-bdcb6baa7c0f\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.689913 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7szz6\" (UniqueName: \"kubernetes.io/projected/04c2bb7a-e2e2-4f8f-bf20-e6c381d117ef-kube-api-access-7szz6\") pod \"ovn-northd-0\" (UID: \"04c2bb7a-e2e2-4f8f-bf20-e6c381d117ef\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.689965 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dmp6v\" (UniqueName: \"kubernetes.io/projected/634825c8-6d4e-42a0-83c6-c83d105a2a7f-kube-api-access-dmp6v\") pod \"ovsdbserver-sb-0\" (UID: \"634825c8-6d4e-42a0-83c6-c83d105a2a7f\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.690025 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/634825c8-6d4e-42a0-83c6-c83d105a2a7f-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"634825c8-6d4e-42a0-83c6-c83d105a2a7f\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.690052 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc65f7eb-e162-44ae-8136-bdcb6baa7c0f-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"fc65f7eb-e162-44ae-8136-bdcb6baa7c0f\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.690079 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/634825c8-6d4e-42a0-83c6-c83d105a2a7f-config\") pod \"ovsdbserver-sb-0\" (UID: \"634825c8-6d4e-42a0-83c6-c83d105a2a7f\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.690103 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fc65f7eb-e162-44ae-8136-bdcb6baa7c0f-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"fc65f7eb-e162-44ae-8136-bdcb6baa7c0f\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.690122 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/04c2bb7a-e2e2-4f8f-bf20-e6c381d117ef-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"04c2bb7a-e2e2-4f8f-bf20-e6c381d117ef\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.690153 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"ovsdbserver-nb-0\" (UID: \"fc65f7eb-e162-44ae-8136-bdcb6baa7c0f\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.690190 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7hzv8\" (UniqueName: \"kubernetes.io/projected/fc65f7eb-e162-44ae-8136-bdcb6baa7c0f-kube-api-access-7hzv8\") pod \"ovsdbserver-nb-0\" (UID: \"fc65f7eb-e162-44ae-8136-bdcb6baa7c0f\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.690213 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/04c2bb7a-e2e2-4f8f-bf20-e6c381d117ef-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"04c2bb7a-e2e2-4f8f-bf20-e6c381d117ef\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.690236 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/634825c8-6d4e-42a0-83c6-c83d105a2a7f-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"634825c8-6d4e-42a0-83c6-c83d105a2a7f\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.690280 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/04c2bb7a-e2e2-4f8f-bf20-e6c381d117ef-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"04c2bb7a-e2e2-4f8f-bf20-e6c381d117ef\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.690303 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/634825c8-6d4e-42a0-83c6-c83d105a2a7f-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"634825c8-6d4e-42a0-83c6-c83d105a2a7f\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.690368 4558 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.690380 4558 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/bfaf0a8e-63cd-4c1d-81c2-fa6ea233a162-ovsdbserver-nb-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.693449 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/634825c8-6d4e-42a0-83c6-c83d105a2a7f-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"634825c8-6d4e-42a0-83c6-c83d105a2a7f\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.694096 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/634825c8-6d4e-42a0-83c6-c83d105a2a7f-config\") pod \"ovsdbserver-sb-0\" (UID: \"634825c8-6d4e-42a0-83c6-c83d105a2a7f\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.694209 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"ovsdbserver-sb-0\" (UID: \"634825c8-6d4e-42a0-83c6-c83d105a2a7f\") device mount path \"/mnt/openstack/pv03\"" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.694524 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/634825c8-6d4e-42a0-83c6-c83d105a2a7f-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"634825c8-6d4e-42a0-83c6-c83d105a2a7f\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.699851 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/634825c8-6d4e-42a0-83c6-c83d105a2a7f-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"634825c8-6d4e-42a0-83c6-c83d105a2a7f\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.699956 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/634825c8-6d4e-42a0-83c6-c83d105a2a7f-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"634825c8-6d4e-42a0-83c6-c83d105a2a7f\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.699648 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/18c09849-702b-4939-9cf1-0e06c6adc889-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"18c09849-702b-4939-9cf1-0e06c6adc889\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.703247 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/634825c8-6d4e-42a0-83c6-c83d105a2a7f-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"634825c8-6d4e-42a0-83c6-c83d105a2a7f\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.714988 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dmp6v\" (UniqueName: \"kubernetes.io/projected/634825c8-6d4e-42a0-83c6-c83d105a2a7f-kube-api-access-dmp6v\") pod \"ovsdbserver-sb-0\" (UID: \"634825c8-6d4e-42a0-83c6-c83d105a2a7f\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.719923 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"ovsdbserver-sb-0\" (UID: \"634825c8-6d4e-42a0-83c6-c83d105a2a7f\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.743086 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.794309 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fc65f7eb-e162-44ae-8136-bdcb6baa7c0f-config\") pod \"ovsdbserver-nb-0\" (UID: \"fc65f7eb-e162-44ae-8136-bdcb6baa7c0f\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.794366 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/fc65f7eb-e162-44ae-8136-bdcb6baa7c0f-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"fc65f7eb-e162-44ae-8136-bdcb6baa7c0f\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.794393 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/04c2bb7a-e2e2-4f8f-bf20-e6c381d117ef-scripts\") pod \"ovn-northd-0\" (UID: \"04c2bb7a-e2e2-4f8f-bf20-e6c381d117ef\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.794519 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/04c2bb7a-e2e2-4f8f-bf20-e6c381d117ef-config\") pod \"ovn-northd-0\" (UID: \"04c2bb7a-e2e2-4f8f-bf20-e6c381d117ef\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.794594 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/fc65f7eb-e162-44ae-8136-bdcb6baa7c0f-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"fc65f7eb-e162-44ae-8136-bdcb6baa7c0f\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.794660 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7szz6\" (UniqueName: \"kubernetes.io/projected/04c2bb7a-e2e2-4f8f-bf20-e6c381d117ef-kube-api-access-7szz6\") pod \"ovn-northd-0\" (UID: \"04c2bb7a-e2e2-4f8f-bf20-e6c381d117ef\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.794842 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc65f7eb-e162-44ae-8136-bdcb6baa7c0f-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"fc65f7eb-e162-44ae-8136-bdcb6baa7c0f\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.794901 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fc65f7eb-e162-44ae-8136-bdcb6baa7c0f-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"fc65f7eb-e162-44ae-8136-bdcb6baa7c0f\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.794924 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/04c2bb7a-e2e2-4f8f-bf20-e6c381d117ef-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"04c2bb7a-e2e2-4f8f-bf20-e6c381d117ef\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.794974 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"ovsdbserver-nb-0\" (UID: \"fc65f7eb-e162-44ae-8136-bdcb6baa7c0f\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.795026 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7hzv8\" (UniqueName: \"kubernetes.io/projected/fc65f7eb-e162-44ae-8136-bdcb6baa7c0f-kube-api-access-7hzv8\") pod \"ovsdbserver-nb-0\" (UID: \"fc65f7eb-e162-44ae-8136-bdcb6baa7c0f\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.795052 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/04c2bb7a-e2e2-4f8f-bf20-e6c381d117ef-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"04c2bb7a-e2e2-4f8f-bf20-e6c381d117ef\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.795089 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/04c2bb7a-e2e2-4f8f-bf20-e6c381d117ef-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"04c2bb7a-e2e2-4f8f-bf20-e6c381d117ef\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.795137 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/04c2bb7a-e2e2-4f8f-bf20-e6c381d117ef-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"04c2bb7a-e2e2-4f8f-bf20-e6c381d117ef\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.795185 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/fc65f7eb-e162-44ae-8136-bdcb6baa7c0f-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"fc65f7eb-e162-44ae-8136-bdcb6baa7c0f\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.796656 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/04c2bb7a-e2e2-4f8f-bf20-e6c381d117ef-scripts\") pod \"ovn-northd-0\" (UID: \"04c2bb7a-e2e2-4f8f-bf20-e6c381d117ef\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.796762 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fc65f7eb-e162-44ae-8136-bdcb6baa7c0f-config\") pod \"ovsdbserver-nb-0\" (UID: \"fc65f7eb-e162-44ae-8136-bdcb6baa7c0f\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.797095 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/04c2bb7a-e2e2-4f8f-bf20-e6c381d117ef-config\") pod \"ovn-northd-0\" (UID: \"04c2bb7a-e2e2-4f8f-bf20-e6c381d117ef\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.797227 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/04c2bb7a-e2e2-4f8f-bf20-e6c381d117ef-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"04c2bb7a-e2e2-4f8f-bf20-e6c381d117ef\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.796778 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/fc65f7eb-e162-44ae-8136-bdcb6baa7c0f-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"fc65f7eb-e162-44ae-8136-bdcb6baa7c0f\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.797740 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fc65f7eb-e162-44ae-8136-bdcb6baa7c0f-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"fc65f7eb-e162-44ae-8136-bdcb6baa7c0f\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.798031 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"ovsdbserver-nb-0\" (UID: \"fc65f7eb-e162-44ae-8136-bdcb6baa7c0f\") device mount path \"/mnt/openstack/pv02\"" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.800959 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/fc65f7eb-e162-44ae-8136-bdcb6baa7c0f-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"fc65f7eb-e162-44ae-8136-bdcb6baa7c0f\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.801462 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/04c2bb7a-e2e2-4f8f-bf20-e6c381d117ef-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"04c2bb7a-e2e2-4f8f-bf20-e6c381d117ef\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.804552 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/04c2bb7a-e2e2-4f8f-bf20-e6c381d117ef-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"04c2bb7a-e2e2-4f8f-bf20-e6c381d117ef\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.804907 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc65f7eb-e162-44ae-8136-bdcb6baa7c0f-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"fc65f7eb-e162-44ae-8136-bdcb6baa7c0f\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.806393 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/fc65f7eb-e162-44ae-8136-bdcb6baa7c0f-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"fc65f7eb-e162-44ae-8136-bdcb6baa7c0f\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.811373 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/04c2bb7a-e2e2-4f8f-bf20-e6c381d117ef-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"04c2bb7a-e2e2-4f8f-bf20-e6c381d117ef\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.815585 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7hzv8\" (UniqueName: \"kubernetes.io/projected/fc65f7eb-e162-44ae-8136-bdcb6baa7c0f-kube-api-access-7hzv8\") pod \"ovsdbserver-nb-0\" (UID: \"fc65f7eb-e162-44ae-8136-bdcb6baa7c0f\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.817348 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7szz6\" (UniqueName: \"kubernetes.io/projected/04c2bb7a-e2e2-4f8f-bf20-e6c381d117ef-kube-api-access-7szz6\") pod \"ovn-northd-0\" (UID: \"04c2bb7a-e2e2-4f8f-bf20-e6c381d117ef\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.824707 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"ovsdbserver-nb-0\" (UID: \"fc65f7eb-e162-44ae-8136-bdcb6baa7c0f\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.831375 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.875519 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:51:02 crc kubenswrapper[4558]: I0120 17:51:02.893915 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:51:03 crc kubenswrapper[4558]: I0120 17:51:03.224666 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"0b958dc9-cb4f-4494-aa93-73ace6dcdc61","Type":"ContainerStarted","Data":"66fa06375ac661894141d34db8dc370e508df42a50db850135c8f080e8692b6d"} Jan 20 17:51:03 crc kubenswrapper[4558]: I0120 17:51:03.224976 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"0b958dc9-cb4f-4494-aa93-73ace6dcdc61","Type":"ContainerStarted","Data":"4af2d6d8bcacf242d89251f17f26c244c27d416082e3b9078fac945c4afb9920"} Jan 20 17:51:03 crc kubenswrapper[4558]: I0120 17:51:03.228564 4558 generic.go:334] "Generic (PLEG): container finished" podID="7c16f0c4-1462-4817-9f74-9e3d93193867" containerID="e689e24841d6a2881535a371383cc0c5b2611638b0371340716b92a4ebfc8207" exitCode=1 Jan 20 17:51:03 crc kubenswrapper[4558]: I0120 17:51:03.229839 4558 scope.go:117] "RemoveContainer" containerID="e689e24841d6a2881535a371383cc0c5b2611638b0371340716b92a4ebfc8207" Jan 20 17:51:03 crc kubenswrapper[4558]: E0120 17:51:03.230045 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"nova-cell1-conductor-conductor\" with CrashLoopBackOff: \"back-off 10s restarting failed container=nova-cell1-conductor-conductor pod=nova-cell1-conductor-0_openstack-kuttl-tests(7c16f0c4-1462-4817-9f74-9e3d93193867)\"" pod="openstack-kuttl-tests/nova-cell1-conductor-0" podUID="7c16f0c4-1462-4817-9f74-9e3d93193867" Jan 20 17:51:03 crc kubenswrapper[4558]: I0120 17:51:03.230791 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-0" event={"ID":"7c16f0c4-1462-4817-9f74-9e3d93193867","Type":"ContainerDied","Data":"e689e24841d6a2881535a371383cc0c5b2611638b0371340716b92a4ebfc8207"} Jan 20 17:51:03 crc kubenswrapper[4558]: I0120 17:51:03.230875 4558 scope.go:117] "RemoveContainer" containerID="5c4ded7cac27e0c92726d8a6f3ee7686e2622233d98b3911837d546b85d42273" Jan 20 17:51:03 crc kubenswrapper[4558]: I0120 17:51:03.240348 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-sb-0"] Jan 20 17:51:03 crc kubenswrapper[4558]: W0120 17:51:03.267435 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod634825c8_6d4e_42a0_83c6_c83d105a2a7f.slice/crio-8ff36f9bef16f31e4c38cc4e8ef793a35d902f20f56292b5daa79459709bed27 WatchSource:0}: Error finding container 8ff36f9bef16f31e4c38cc4e8ef793a35d902f20f56292b5daa79459709bed27: Status 404 returned error can't find the container with id 8ff36f9bef16f31e4c38cc4e8ef793a35d902f20f56292b5daa79459709bed27 Jan 20 17:51:03 crc kubenswrapper[4558]: I0120 17:51:03.338338 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovn-northd-0"] Jan 20 17:51:03 crc kubenswrapper[4558]: W0120 17:51:03.340337 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod04c2bb7a_e2e2_4f8f_bf20_e6c381d117ef.slice/crio-faa5df1b3cebb05d0c7b3e1f9f523cf1c2ef4a55dc9f1334a82b76cdc35e20de WatchSource:0}: Error finding container faa5df1b3cebb05d0c7b3e1f9f523cf1c2ef4a55dc9f1334a82b76cdc35e20de: Status 404 returned error can't find the container with id faa5df1b3cebb05d0c7b3e1f9f523cf1c2ef4a55dc9f1334a82b76cdc35e20de Jan 20 17:51:03 crc kubenswrapper[4558]: I0120 17:51:03.425388 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-nb-0"] Jan 20 17:51:03 crc kubenswrapper[4558]: I0120 17:51:03.433346 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:51:03 crc kubenswrapper[4558]: I0120 17:51:03.514008 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/733a1ae1-a917-447a-afa6-67cf7d688f86-internal-tls-certs\") pod \"nova-api-0\" (UID: \"733a1ae1-a917-447a-afa6-67cf7d688f86\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:51:03 crc kubenswrapper[4558]: E0120 17:51:03.514205 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-nova-internal-svc: object "openstack-kuttl-tests"/"cert-nova-internal-svc" not registered Jan 20 17:51:03 crc kubenswrapper[4558]: E0120 17:51:03.514259 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/733a1ae1-a917-447a-afa6-67cf7d688f86-internal-tls-certs podName:733a1ae1-a917-447a-afa6-67cf7d688f86 nodeName:}" failed. No retries permitted until 2026-01-20 17:51:35.514243759 +0000 UTC m=+4189.274581727 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "internal-tls-certs" (UniqueName: "kubernetes.io/secret/733a1ae1-a917-447a-afa6-67cf7d688f86-internal-tls-certs") pod "nova-api-0" (UID: "733a1ae1-a917-447a-afa6-67cf7d688f86") : object "openstack-kuttl-tests"/"cert-nova-internal-svc" not registered Jan 20 17:51:04 crc kubenswrapper[4558]: I0120 17:51:04.246635 4558 scope.go:117] "RemoveContainer" containerID="e689e24841d6a2881535a371383cc0c5b2611638b0371340716b92a4ebfc8207" Jan 20 17:51:04 crc kubenswrapper[4558]: E0120 17:51:04.247357 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"nova-cell1-conductor-conductor\" with CrashLoopBackOff: \"back-off 10s restarting failed container=nova-cell1-conductor-conductor pod=nova-cell1-conductor-0_openstack-kuttl-tests(7c16f0c4-1462-4817-9f74-9e3d93193867)\"" pod="openstack-kuttl-tests/nova-cell1-conductor-0" podUID="7c16f0c4-1462-4817-9f74-9e3d93193867" Jan 20 17:51:04 crc kubenswrapper[4558]: I0120 17:51:04.249923 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-northd-0" event={"ID":"04c2bb7a-e2e2-4f8f-bf20-e6c381d117ef","Type":"ContainerStarted","Data":"4b9890fbbd9a3bfc034db961ae88fefa150c5ec801cba28d028682bd258b5ebd"} Jan 20 17:51:04 crc kubenswrapper[4558]: I0120 17:51:04.249967 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-northd-0" event={"ID":"04c2bb7a-e2e2-4f8f-bf20-e6c381d117ef","Type":"ContainerStarted","Data":"1035f6c74f13c042848a0da43ccb550222db3c6c766f2b87733c2701b6ec1a2e"} Jan 20 17:51:04 crc kubenswrapper[4558]: I0120 17:51:04.249978 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-northd-0" event={"ID":"04c2bb7a-e2e2-4f8f-bf20-e6c381d117ef","Type":"ContainerStarted","Data":"faa5df1b3cebb05d0c7b3e1f9f523cf1c2ef4a55dc9f1334a82b76cdc35e20de"} Jan 20 17:51:04 crc kubenswrapper[4558]: I0120 17:51:04.250155 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:51:04 crc kubenswrapper[4558]: I0120 17:51:04.254569 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-sb-0" event={"ID":"634825c8-6d4e-42a0-83c6-c83d105a2a7f","Type":"ContainerStarted","Data":"b80ad06d7261b9a0d3933816a91a930f6b5ef47fe5065a1e8d945d221fa487f7"} Jan 20 17:51:04 crc kubenswrapper[4558]: I0120 17:51:04.254611 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-sb-0" event={"ID":"634825c8-6d4e-42a0-83c6-c83d105a2a7f","Type":"ContainerStarted","Data":"2f7bc8bf8674f0da29404ad2947529694e0eaec86862f2864ca592a29b1fe552"} Jan 20 17:51:04 crc kubenswrapper[4558]: I0120 17:51:04.254623 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-sb-0" event={"ID":"634825c8-6d4e-42a0-83c6-c83d105a2a7f","Type":"ContainerStarted","Data":"8ff36f9bef16f31e4c38cc4e8ef793a35d902f20f56292b5daa79459709bed27"} Jan 20 17:51:04 crc kubenswrapper[4558]: I0120 17:51:04.258377 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"18c09849-702b-4939-9cf1-0e06c6adc889","Type":"ContainerStarted","Data":"e77fe97286d1a350d048ece8ea776baa3b6bb50555cead4bb9310050977c990c"} Jan 20 17:51:04 crc kubenswrapper[4558]: I0120 17:51:04.258396 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"18c09849-702b-4939-9cf1-0e06c6adc889","Type":"ContainerStarted","Data":"6a981162c7819e3aa3291e5b10f94a92729fd9ddc78ee4e6cabaa569e4490108"} Jan 20 17:51:04 crc kubenswrapper[4558]: I0120 17:51:04.259560 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-nb-0" event={"ID":"fc65f7eb-e162-44ae-8136-bdcb6baa7c0f","Type":"ContainerStarted","Data":"c90c4608d07fcc026b1da8048f2ccc5e2af6fd92d05975c8c1521388f7341569"} Jan 20 17:51:04 crc kubenswrapper[4558]: I0120 17:51:04.259577 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-nb-0" event={"ID":"fc65f7eb-e162-44ae-8136-bdcb6baa7c0f","Type":"ContainerStarted","Data":"5f7c09a23fc3d9fe92c0898786808da9cd4326e33ccea9ba8df24f4b5c6ab067"} Jan 20 17:51:04 crc kubenswrapper[4558]: I0120 17:51:04.259586 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-nb-0" event={"ID":"fc65f7eb-e162-44ae-8136-bdcb6baa7c0f","Type":"ContainerStarted","Data":"d1bff363e27d1138210b36c720191fdd963603ecb4d82dd076279062976f3c5f"} Jan 20 17:51:04 crc kubenswrapper[4558]: I0120 17:51:04.311826 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ovn-northd-0" podStartSLOduration=2.311811788 podStartE2EDuration="2.311811788s" podCreationTimestamp="2026-01-20 17:51:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:51:04.304963049 +0000 UTC m=+4158.065301016" watchObservedRunningTime="2026-01-20 17:51:04.311811788 +0000 UTC m=+4158.072149755" Jan 20 17:51:04 crc kubenswrapper[4558]: I0120 17:51:04.347144 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ovsdbserver-sb-0" podStartSLOduration=2.347131041 podStartE2EDuration="2.347131041s" podCreationTimestamp="2026-01-20 17:51:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:51:04.338555797 +0000 UTC m=+4158.098893764" watchObservedRunningTime="2026-01-20 17:51:04.347131041 +0000 UTC m=+4158.107469008" Jan 20 17:51:04 crc kubenswrapper[4558]: I0120 17:51:04.366725 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ovsdbserver-nb-0" podStartSLOduration=2.36670572 podStartE2EDuration="2.36670572s" podCreationTimestamp="2026-01-20 17:51:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:51:04.36358466 +0000 UTC m=+4158.123922627" watchObservedRunningTime="2026-01-20 17:51:04.36670572 +0000 UTC m=+4158.127043686" Jan 20 17:51:04 crc kubenswrapper[4558]: I0120 17:51:04.566635 4558 scope.go:117] "RemoveContainer" containerID="a9c70afb4065cfa5e1226d107bf8a809ffc6655bc0ed256d32ec2a30414f56e5" Jan 20 17:51:04 crc kubenswrapper[4558]: E0120 17:51:04.567271 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"nova-cell0-conductor-conductor\" with CrashLoopBackOff: \"back-off 20s restarting failed container=nova-cell0-conductor-conductor pod=nova-cell0-conductor-0_openstack-kuttl-tests(924fa7ce-8d60-4b2f-b62b-d5e146474f71)\"" pod="openstack-kuttl-tests/nova-cell0-conductor-0" podUID="924fa7ce-8d60-4b2f-b62b-d5e146474f71" Jan 20 17:51:05 crc kubenswrapper[4558]: I0120 17:51:05.273518 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"0b958dc9-cb4f-4494-aa93-73ace6dcdc61","Type":"ContainerStarted","Data":"a269df80b321b46782542c7267ea65a9ec0f5ce92626e5b0afab4be6fd049ba6"} Jan 20 17:51:05 crc kubenswrapper[4558]: I0120 17:51:05.273830 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:51:05 crc kubenswrapper[4558]: I0120 17:51:05.276743 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"18c09849-702b-4939-9cf1-0e06c6adc889","Type":"ContainerStarted","Data":"bf79c6fdc0dfbb952c1fe2ab45e02ef4b273e3d1165a858079afaf36ab6764cd"} Jan 20 17:51:05 crc kubenswrapper[4558]: I0120 17:51:05.323331 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ceilometer-0" podStartSLOduration=2.428462544 podStartE2EDuration="6.323313432s" podCreationTimestamp="2026-01-20 17:50:59 +0000 UTC" firstStartedPulling="2026-01-20 17:51:00.407710517 +0000 UTC m=+4154.168048483" lastFinishedPulling="2026-01-20 17:51:04.302561413 +0000 UTC m=+4158.062899371" observedRunningTime="2026-01-20 17:51:05.315148469 +0000 UTC m=+4159.075486435" watchObservedRunningTime="2026-01-20 17:51:05.323313432 +0000 UTC m=+4159.083651399" Jan 20 17:51:05 crc kubenswrapper[4558]: I0120 17:51:05.364878 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/glance-default-internal-api-0" podStartSLOduration=35.364859915 podStartE2EDuration="35.364859915s" podCreationTimestamp="2026-01-20 17:50:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:51:05.35315776 +0000 UTC m=+4159.113495728" watchObservedRunningTime="2026-01-20 17:51:05.364859915 +0000 UTC m=+4159.125197882" Jan 20 17:51:05 crc kubenswrapper[4558]: I0120 17:51:05.439100 4558 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:51:05 crc kubenswrapper[4558]: I0120 17:51:05.439659 4558 scope.go:117] "RemoveContainer" containerID="e689e24841d6a2881535a371383cc0c5b2611638b0371340716b92a4ebfc8207" Jan 20 17:51:05 crc kubenswrapper[4558]: E0120 17:51:05.439900 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"nova-cell1-conductor-conductor\" with CrashLoopBackOff: \"back-off 10s restarting failed container=nova-cell1-conductor-conductor pod=nova-cell1-conductor-0_openstack-kuttl-tests(7c16f0c4-1462-4817-9f74-9e3d93193867)\"" pod="openstack-kuttl-tests/nova-cell1-conductor-0" podUID="7c16f0c4-1462-4817-9f74-9e3d93193867" Jan 20 17:51:05 crc kubenswrapper[4558]: I0120 17:51:05.743573 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:51:05 crc kubenswrapper[4558]: I0120 17:51:05.847457 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:51:05 crc kubenswrapper[4558]: I0120 17:51:05.876594 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:51:06 crc kubenswrapper[4558]: I0120 17:51:06.288104 4558 generic.go:334] "Generic (PLEG): container finished" podID="d9321ea5-ab3b-4f00-b4d0-f2c2f8c2deb2" containerID="6d47d3d41183f226f75449e13968a59a7a2d75eca49f1c5ad158b78e20b4ff2a" exitCode=1 Jan 20 17:51:06 crc kubenswrapper[4558]: I0120 17:51:06.288890 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"d9321ea5-ab3b-4f00-b4d0-f2c2f8c2deb2","Type":"ContainerDied","Data":"6d47d3d41183f226f75449e13968a59a7a2d75eca49f1c5ad158b78e20b4ff2a"} Jan 20 17:51:06 crc kubenswrapper[4558]: I0120 17:51:06.289476 4558 scope.go:117] "RemoveContainer" containerID="6d47d3d41183f226f75449e13968a59a7a2d75eca49f1c5ad158b78e20b4ff2a" Jan 20 17:51:07 crc kubenswrapper[4558]: I0120 17:51:07.308264 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"d9321ea5-ab3b-4f00-b4d0-f2c2f8c2deb2","Type":"ContainerStarted","Data":"1070e671e60499543eff1520064c8d7b1151ff81f9de7c5dd18ebcbdbdbbe487"} Jan 20 17:51:07 crc kubenswrapper[4558]: I0120 17:51:07.462418 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:51:07 crc kubenswrapper[4558]: I0120 17:51:07.711736 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/memcached-0" Jan 20 17:51:07 crc kubenswrapper[4558]: I0120 17:51:07.743883 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:51:07 crc kubenswrapper[4558]: I0120 17:51:07.858351 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:51:07 crc kubenswrapper[4558]: I0120 17:51:07.858554 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" podUID="581c2419-48d9-4b7d-b71b-ce5cceb0326d" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://cbf05244e311e00d3dfc678229a149f12f8b205c75c63c8b93a994123c6583c1" gracePeriod=30 Jan 20 17:51:07 crc kubenswrapper[4558]: I0120 17:51:07.869261 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:51:07 crc kubenswrapper[4558]: I0120 17:51:07.869459 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-external-api-0" podUID="07e11378-90b7-4994-99fa-6e2d78b61f63" containerName="glance-log" containerID="cri-o://7eefed10cf0d89cd2ed97e7df9a22ee9e031b097685b34b4e3323811ac699db8" gracePeriod=30 Jan 20 17:51:07 crc kubenswrapper[4558]: I0120 17:51:07.869509 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-external-api-0" podUID="07e11378-90b7-4994-99fa-6e2d78b61f63" containerName="glance-httpd" containerID="cri-o://9ee9796db29af4c22a51a195683b27835e60fea772e6cf6721ee2e162cc239b4" gracePeriod=30 Jan 20 17:51:07 crc kubenswrapper[4558]: I0120 17:51:07.876252 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:51:07 crc kubenswrapper[4558]: I0120 17:51:07.921255 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:51:07 crc kubenswrapper[4558]: I0120 17:51:07.921450 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-internal-api-0" podUID="18c09849-702b-4939-9cf1-0e06c6adc889" containerName="glance-log" containerID="cri-o://e77fe97286d1a350d048ece8ea776baa3b6bb50555cead4bb9310050977c990c" gracePeriod=30 Jan 20 17:51:07 crc kubenswrapper[4558]: I0120 17:51:07.921558 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-internal-api-0" podUID="18c09849-702b-4939-9cf1-0e06c6adc889" containerName="glance-httpd" containerID="cri-o://bf79c6fdc0dfbb952c1fe2ab45e02ef4b273e3d1165a858079afaf36ab6764cd" gracePeriod=30 Jan 20 17:51:08 crc kubenswrapper[4558]: I0120 17:51:08.320353 4558 generic.go:334] "Generic (PLEG): container finished" podID="07e11378-90b7-4994-99fa-6e2d78b61f63" containerID="7eefed10cf0d89cd2ed97e7df9a22ee9e031b097685b34b4e3323811ac699db8" exitCode=143 Jan 20 17:51:08 crc kubenswrapper[4558]: I0120 17:51:08.320434 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"07e11378-90b7-4994-99fa-6e2d78b61f63","Type":"ContainerDied","Data":"7eefed10cf0d89cd2ed97e7df9a22ee9e031b097685b34b4e3323811ac699db8"} Jan 20 17:51:08 crc kubenswrapper[4558]: I0120 17:51:08.322925 4558 generic.go:334] "Generic (PLEG): container finished" podID="18c09849-702b-4939-9cf1-0e06c6adc889" containerID="e77fe97286d1a350d048ece8ea776baa3b6bb50555cead4bb9310050977c990c" exitCode=143 Jan 20 17:51:08 crc kubenswrapper[4558]: I0120 17:51:08.323052 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"18c09849-702b-4939-9cf1-0e06c6adc889","Type":"ContainerDied","Data":"e77fe97286d1a350d048ece8ea776baa3b6bb50555cead4bb9310050977c990c"} Jan 20 17:51:08 crc kubenswrapper[4558]: I0120 17:51:08.817597 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:51:08 crc kubenswrapper[4558]: I0120 17:51:08.839450 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:51:08 crc kubenswrapper[4558]: I0120 17:51:08.859717 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:51:08 crc kubenswrapper[4558]: I0120 17:51:08.874857 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:51:08 crc kubenswrapper[4558]: I0120 17:51:08.893443 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:51:08 crc kubenswrapper[4558]: I0120 17:51:08.893637 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/cinder-api-0" podUID="01835185-5ca5-467a-9362-b8de3e8665ed" containerName="cinder-api-log" containerID="cri-o://d6739fd95d667740a4669ec34550d5b750a50b385b78c2f22b17ab41a028c741" gracePeriod=30 Jan 20 17:51:08 crc kubenswrapper[4558]: I0120 17:51:08.893950 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/cinder-api-0" podUID="01835185-5ca5-467a-9362-b8de3e8665ed" containerName="cinder-api" containerID="cri-o://a8d84750152ad0dd7ec1ee98d8c4f4d1566801adb750bbf52fcbf29bfac3fcd6" gracePeriod=30 Jan 20 17:51:08 crc kubenswrapper[4558]: I0120 17:51:08.904263 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack-kuttl-tests/cinder-api-0" podUID="01835185-5ca5-467a-9362-b8de3e8665ed" containerName="cinder-api" probeResult="failure" output="Get \"https://10.217.1.210:8776/healthcheck\": EOF" Jan 20 17:51:08 crc kubenswrapper[4558]: I0120 17:51:08.904353 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/cinder-api-0" podUID="01835185-5ca5-467a-9362-b8de3e8665ed" containerName="cinder-api" probeResult="failure" output="Get \"https://10.217.1.210:8776/healthcheck\": EOF" Jan 20 17:51:08 crc kubenswrapper[4558]: I0120 17:51:08.945248 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/18c09849-702b-4939-9cf1-0e06c6adc889-combined-ca-bundle\") pod \"18c09849-702b-4939-9cf1-0e06c6adc889\" (UID: \"18c09849-702b-4939-9cf1-0e06c6adc889\") " Jan 20 17:51:08 crc kubenswrapper[4558]: I0120 17:51:08.945297 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kcptj\" (UniqueName: \"kubernetes.io/projected/581c2419-48d9-4b7d-b71b-ce5cceb0326d-kube-api-access-kcptj\") pod \"581c2419-48d9-4b7d-b71b-ce5cceb0326d\" (UID: \"581c2419-48d9-4b7d-b71b-ce5cceb0326d\") " Jan 20 17:51:08 crc kubenswrapper[4558]: I0120 17:51:08.945461 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/18c09849-702b-4939-9cf1-0e06c6adc889-config-data\") pod \"18c09849-702b-4939-9cf1-0e06c6adc889\" (UID: \"18c09849-702b-4939-9cf1-0e06c6adc889\") " Jan 20 17:51:08 crc kubenswrapper[4558]: I0120 17:51:08.945495 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/581c2419-48d9-4b7d-b71b-ce5cceb0326d-nova-novncproxy-tls-certs\") pod \"581c2419-48d9-4b7d-b71b-ce5cceb0326d\" (UID: \"581c2419-48d9-4b7d-b71b-ce5cceb0326d\") " Jan 20 17:51:08 crc kubenswrapper[4558]: I0120 17:51:08.945528 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/18c09849-702b-4939-9cf1-0e06c6adc889-scripts\") pod \"18c09849-702b-4939-9cf1-0e06c6adc889\" (UID: \"18c09849-702b-4939-9cf1-0e06c6adc889\") " Jan 20 17:51:08 crc kubenswrapper[4558]: I0120 17:51:08.945612 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"18c09849-702b-4939-9cf1-0e06c6adc889\" (UID: \"18c09849-702b-4939-9cf1-0e06c6adc889\") " Jan 20 17:51:08 crc kubenswrapper[4558]: I0120 17:51:08.945638 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/581c2419-48d9-4b7d-b71b-ce5cceb0326d-vencrypt-tls-certs\") pod \"581c2419-48d9-4b7d-b71b-ce5cceb0326d\" (UID: \"581c2419-48d9-4b7d-b71b-ce5cceb0326d\") " Jan 20 17:51:08 crc kubenswrapper[4558]: I0120 17:51:08.945674 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/18c09849-702b-4939-9cf1-0e06c6adc889-httpd-run\") pod \"18c09849-702b-4939-9cf1-0e06c6adc889\" (UID: \"18c09849-702b-4939-9cf1-0e06c6adc889\") " Jan 20 17:51:08 crc kubenswrapper[4558]: I0120 17:51:08.945729 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fg8wx\" (UniqueName: \"kubernetes.io/projected/18c09849-702b-4939-9cf1-0e06c6adc889-kube-api-access-fg8wx\") pod \"18c09849-702b-4939-9cf1-0e06c6adc889\" (UID: \"18c09849-702b-4939-9cf1-0e06c6adc889\") " Jan 20 17:51:08 crc kubenswrapper[4558]: I0120 17:51:08.945774 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/18c09849-702b-4939-9cf1-0e06c6adc889-internal-tls-certs\") pod \"18c09849-702b-4939-9cf1-0e06c6adc889\" (UID: \"18c09849-702b-4939-9cf1-0e06c6adc889\") " Jan 20 17:51:08 crc kubenswrapper[4558]: I0120 17:51:08.945797 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/581c2419-48d9-4b7d-b71b-ce5cceb0326d-combined-ca-bundle\") pod \"581c2419-48d9-4b7d-b71b-ce5cceb0326d\" (UID: \"581c2419-48d9-4b7d-b71b-ce5cceb0326d\") " Jan 20 17:51:08 crc kubenswrapper[4558]: I0120 17:51:08.945824 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/18c09849-702b-4939-9cf1-0e06c6adc889-logs\") pod \"18c09849-702b-4939-9cf1-0e06c6adc889\" (UID: \"18c09849-702b-4939-9cf1-0e06c6adc889\") " Jan 20 17:51:08 crc kubenswrapper[4558]: I0120 17:51:08.945824 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:51:08 crc kubenswrapper[4558]: I0120 17:51:08.945860 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/581c2419-48d9-4b7d-b71b-ce5cceb0326d-config-data\") pod \"581c2419-48d9-4b7d-b71b-ce5cceb0326d\" (UID: \"581c2419-48d9-4b7d-b71b-ce5cceb0326d\") " Jan 20 17:51:08 crc kubenswrapper[4558]: I0120 17:51:08.951905 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/18c09849-702b-4939-9cf1-0e06c6adc889-logs" (OuterVolumeSpecName: "logs") pod "18c09849-702b-4939-9cf1-0e06c6adc889" (UID: "18c09849-702b-4939-9cf1-0e06c6adc889"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:51:08 crc kubenswrapper[4558]: I0120 17:51:08.952070 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/18c09849-702b-4939-9cf1-0e06c6adc889-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "18c09849-702b-4939-9cf1-0e06c6adc889" (UID: "18c09849-702b-4939-9cf1-0e06c6adc889"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:51:08 crc kubenswrapper[4558]: I0120 17:51:08.981598 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/18c09849-702b-4939-9cf1-0e06c6adc889-scripts" (OuterVolumeSpecName: "scripts") pod "18c09849-702b-4939-9cf1-0e06c6adc889" (UID: "18c09849-702b-4939-9cf1-0e06c6adc889"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:51:08 crc kubenswrapper[4558]: I0120 17:51:08.995949 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage11-crc" (OuterVolumeSpecName: "glance") pod "18c09849-702b-4939-9cf1-0e06c6adc889" (UID: "18c09849-702b-4939-9cf1-0e06c6adc889"). InnerVolumeSpecName "local-storage11-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:51:09 crc kubenswrapper[4558]: I0120 17:51:09.006481 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/18c09849-702b-4939-9cf1-0e06c6adc889-kube-api-access-fg8wx" (OuterVolumeSpecName: "kube-api-access-fg8wx") pod "18c09849-702b-4939-9cf1-0e06c6adc889" (UID: "18c09849-702b-4939-9cf1-0e06c6adc889"). InnerVolumeSpecName "kube-api-access-fg8wx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:51:09 crc kubenswrapper[4558]: I0120 17:51:09.012412 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/581c2419-48d9-4b7d-b71b-ce5cceb0326d-kube-api-access-kcptj" (OuterVolumeSpecName: "kube-api-access-kcptj") pod "581c2419-48d9-4b7d-b71b-ce5cceb0326d" (UID: "581c2419-48d9-4b7d-b71b-ce5cceb0326d"). InnerVolumeSpecName "kube-api-access-kcptj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:51:09 crc kubenswrapper[4558]: I0120 17:51:09.029310 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/18c09849-702b-4939-9cf1-0e06c6adc889-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "18c09849-702b-4939-9cf1-0e06c6adc889" (UID: "18c09849-702b-4939-9cf1-0e06c6adc889"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:51:09 crc kubenswrapper[4558]: I0120 17:51:09.050244 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" " Jan 20 17:51:09 crc kubenswrapper[4558]: I0120 17:51:09.050272 4558 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/18c09849-702b-4939-9cf1-0e06c6adc889-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:09 crc kubenswrapper[4558]: I0120 17:51:09.050285 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fg8wx\" (UniqueName: \"kubernetes.io/projected/18c09849-702b-4939-9cf1-0e06c6adc889-kube-api-access-fg8wx\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:09 crc kubenswrapper[4558]: I0120 17:51:09.050297 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/18c09849-702b-4939-9cf1-0e06c6adc889-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:09 crc kubenswrapper[4558]: I0120 17:51:09.050306 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/18c09849-702b-4939-9cf1-0e06c6adc889-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:09 crc kubenswrapper[4558]: I0120 17:51:09.050316 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kcptj\" (UniqueName: \"kubernetes.io/projected/581c2419-48d9-4b7d-b71b-ce5cceb0326d-kube-api-access-kcptj\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:09 crc kubenswrapper[4558]: I0120 17:51:09.050325 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/18c09849-702b-4939-9cf1-0e06c6adc889-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:09 crc kubenswrapper[4558]: I0120 17:51:09.175275 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/581c2419-48d9-4b7d-b71b-ce5cceb0326d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "581c2419-48d9-4b7d-b71b-ce5cceb0326d" (UID: "581c2419-48d9-4b7d-b71b-ce5cceb0326d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:51:09 crc kubenswrapper[4558]: I0120 17:51:09.175612 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/581c2419-48d9-4b7d-b71b-ce5cceb0326d-config-data" (OuterVolumeSpecName: "config-data") pod "581c2419-48d9-4b7d-b71b-ce5cceb0326d" (UID: "581c2419-48d9-4b7d-b71b-ce5cceb0326d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:51:09 crc kubenswrapper[4558]: I0120 17:51:09.192608 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/581c2419-48d9-4b7d-b71b-ce5cceb0326d-vencrypt-tls-certs" (OuterVolumeSpecName: "vencrypt-tls-certs") pod "581c2419-48d9-4b7d-b71b-ce5cceb0326d" (UID: "581c2419-48d9-4b7d-b71b-ce5cceb0326d"). InnerVolumeSpecName "vencrypt-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:51:09 crc kubenswrapper[4558]: I0120 17:51:09.198304 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage11-crc" (UniqueName: "kubernetes.io/local-volume/local-storage11-crc") on node "crc" Jan 20 17:51:09 crc kubenswrapper[4558]: I0120 17:51:09.217289 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/18c09849-702b-4939-9cf1-0e06c6adc889-config-data" (OuterVolumeSpecName: "config-data") pod "18c09849-702b-4939-9cf1-0e06c6adc889" (UID: "18c09849-702b-4939-9cf1-0e06c6adc889"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:51:09 crc kubenswrapper[4558]: I0120 17:51:09.245299 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/18c09849-702b-4939-9cf1-0e06c6adc889-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "18c09849-702b-4939-9cf1-0e06c6adc889" (UID: "18c09849-702b-4939-9cf1-0e06c6adc889"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:51:09 crc kubenswrapper[4558]: I0120 17:51:09.257311 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/581c2419-48d9-4b7d-b71b-ce5cceb0326d-nova-novncproxy-tls-certs" (OuterVolumeSpecName: "nova-novncproxy-tls-certs") pod "581c2419-48d9-4b7d-b71b-ce5cceb0326d" (UID: "581c2419-48d9-4b7d-b71b-ce5cceb0326d"). InnerVolumeSpecName "nova-novncproxy-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:51:09 crc kubenswrapper[4558]: I0120 17:51:09.260907 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:09 crc kubenswrapper[4558]: I0120 17:51:09.260940 4558 reconciler_common.go:293] "Volume detached for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/581c2419-48d9-4b7d-b71b-ce5cceb0326d-vencrypt-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:09 crc kubenswrapper[4558]: I0120 17:51:09.260953 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/18c09849-702b-4939-9cf1-0e06c6adc889-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:09 crc kubenswrapper[4558]: I0120 17:51:09.260965 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/581c2419-48d9-4b7d-b71b-ce5cceb0326d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:09 crc kubenswrapper[4558]: I0120 17:51:09.260975 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/581c2419-48d9-4b7d-b71b-ce5cceb0326d-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:09 crc kubenswrapper[4558]: I0120 17:51:09.260983 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/18c09849-702b-4939-9cf1-0e06c6adc889-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:09 crc kubenswrapper[4558]: I0120 17:51:09.261002 4558 reconciler_common.go:293] "Volume detached for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/581c2419-48d9-4b7d-b71b-ce5cceb0326d-nova-novncproxy-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:09 crc kubenswrapper[4558]: I0120 17:51:09.306393 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:51:09 crc kubenswrapper[4558]: I0120 17:51:09.362240 4558 generic.go:334] "Generic (PLEG): container finished" podID="18c09849-702b-4939-9cf1-0e06c6adc889" containerID="bf79c6fdc0dfbb952c1fe2ab45e02ef4b273e3d1165a858079afaf36ab6764cd" exitCode=0 Jan 20 17:51:09 crc kubenswrapper[4558]: I0120 17:51:09.362322 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"18c09849-702b-4939-9cf1-0e06c6adc889","Type":"ContainerDied","Data":"bf79c6fdc0dfbb952c1fe2ab45e02ef4b273e3d1165a858079afaf36ab6764cd"} Jan 20 17:51:09 crc kubenswrapper[4558]: I0120 17:51:09.362358 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"18c09849-702b-4939-9cf1-0e06c6adc889","Type":"ContainerDied","Data":"6a981162c7819e3aa3291e5b10f94a92729fd9ddc78ee4e6cabaa569e4490108"} Jan 20 17:51:09 crc kubenswrapper[4558]: I0120 17:51:09.362377 4558 scope.go:117] "RemoveContainer" containerID="bf79c6fdc0dfbb952c1fe2ab45e02ef4b273e3d1165a858079afaf36ab6764cd" Jan 20 17:51:09 crc kubenswrapper[4558]: I0120 17:51:09.362648 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:51:09 crc kubenswrapper[4558]: I0120 17:51:09.390684 4558 generic.go:334] "Generic (PLEG): container finished" podID="d9321ea5-ab3b-4f00-b4d0-f2c2f8c2deb2" containerID="1070e671e60499543eff1520064c8d7b1151ff81f9de7c5dd18ebcbdbdbbe487" exitCode=1 Jan 20 17:51:09 crc kubenswrapper[4558]: I0120 17:51:09.390763 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"d9321ea5-ab3b-4f00-b4d0-f2c2f8c2deb2","Type":"ContainerDied","Data":"1070e671e60499543eff1520064c8d7b1151ff81f9de7c5dd18ebcbdbdbbe487"} Jan 20 17:51:09 crc kubenswrapper[4558]: I0120 17:51:09.391764 4558 scope.go:117] "RemoveContainer" containerID="1070e671e60499543eff1520064c8d7b1151ff81f9de7c5dd18ebcbdbdbbe487" Jan 20 17:51:09 crc kubenswrapper[4558]: E0120 17:51:09.392125 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"nova-scheduler-scheduler\" with CrashLoopBackOff: \"back-off 10s restarting failed container=nova-scheduler-scheduler pod=nova-scheduler-0_openstack-kuttl-tests(d9321ea5-ab3b-4f00-b4d0-f2c2f8c2deb2)\"" pod="openstack-kuttl-tests/nova-scheduler-0" podUID="d9321ea5-ab3b-4f00-b4d0-f2c2f8c2deb2" Jan 20 17:51:09 crc kubenswrapper[4558]: I0120 17:51:09.420404 4558 generic.go:334] "Generic (PLEG): container finished" podID="01835185-5ca5-467a-9362-b8de3e8665ed" containerID="d6739fd95d667740a4669ec34550d5b750a50b385b78c2f22b17ab41a028c741" exitCode=143 Jan 20 17:51:09 crc kubenswrapper[4558]: I0120 17:51:09.420494 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"01835185-5ca5-467a-9362-b8de3e8665ed","Type":"ContainerDied","Data":"d6739fd95d667740a4669ec34550d5b750a50b385b78c2f22b17ab41a028c741"} Jan 20 17:51:09 crc kubenswrapper[4558]: I0120 17:51:09.458804 4558 generic.go:334] "Generic (PLEG): container finished" podID="581c2419-48d9-4b7d-b71b-ce5cceb0326d" containerID="cbf05244e311e00d3dfc678229a149f12f8b205c75c63c8b93a994123c6583c1" exitCode=0 Jan 20 17:51:09 crc kubenswrapper[4558]: I0120 17:51:09.463707 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:51:09 crc kubenswrapper[4558]: I0120 17:51:09.478192 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"581c2419-48d9-4b7d-b71b-ce5cceb0326d","Type":"ContainerDied","Data":"cbf05244e311e00d3dfc678229a149f12f8b205c75c63c8b93a994123c6583c1"} Jan 20 17:51:09 crc kubenswrapper[4558]: I0120 17:51:09.482754 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"581c2419-48d9-4b7d-b71b-ce5cceb0326d","Type":"ContainerDied","Data":"f4adebd8014d94828341921446ab5b98374cb2ca14e4d4d3889259bba2f533fe"} Jan 20 17:51:09 crc kubenswrapper[4558]: I0120 17:51:09.492375 4558 scope.go:117] "RemoveContainer" containerID="e77fe97286d1a350d048ece8ea776baa3b6bb50555cead4bb9310050977c990c" Jan 20 17:51:09 crc kubenswrapper[4558]: I0120 17:51:09.515252 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:51:09 crc kubenswrapper[4558]: I0120 17:51:09.530559 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:51:09 crc kubenswrapper[4558]: I0120 17:51:09.534474 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:51:09 crc kubenswrapper[4558]: E0120 17:51:09.535703 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="581c2419-48d9-4b7d-b71b-ce5cceb0326d" containerName="nova-cell1-novncproxy-novncproxy" Jan 20 17:51:09 crc kubenswrapper[4558]: I0120 17:51:09.535729 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="581c2419-48d9-4b7d-b71b-ce5cceb0326d" containerName="nova-cell1-novncproxy-novncproxy" Jan 20 17:51:09 crc kubenswrapper[4558]: E0120 17:51:09.536030 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="18c09849-702b-4939-9cf1-0e06c6adc889" containerName="glance-log" Jan 20 17:51:09 crc kubenswrapper[4558]: I0120 17:51:09.536047 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="18c09849-702b-4939-9cf1-0e06c6adc889" containerName="glance-log" Jan 20 17:51:09 crc kubenswrapper[4558]: E0120 17:51:09.536077 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="18c09849-702b-4939-9cf1-0e06c6adc889" containerName="glance-httpd" Jan 20 17:51:09 crc kubenswrapper[4558]: I0120 17:51:09.536084 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="18c09849-702b-4939-9cf1-0e06c6adc889" containerName="glance-httpd" Jan 20 17:51:09 crc kubenswrapper[4558]: I0120 17:51:09.536258 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="581c2419-48d9-4b7d-b71b-ce5cceb0326d" containerName="nova-cell1-novncproxy-novncproxy" Jan 20 17:51:09 crc kubenswrapper[4558]: I0120 17:51:09.536294 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="18c09849-702b-4939-9cf1-0e06c6adc889" containerName="glance-httpd" Jan 20 17:51:09 crc kubenswrapper[4558]: I0120 17:51:09.536308 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="18c09849-702b-4939-9cf1-0e06c6adc889" containerName="glance-log" Jan 20 17:51:09 crc kubenswrapper[4558]: I0120 17:51:09.537330 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:51:09 crc kubenswrapper[4558]: I0120 17:51:09.556624 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-default-internal-config-data" Jan 20 17:51:09 crc kubenswrapper[4558]: I0120 17:51:09.556819 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-glance-default-internal-svc" Jan 20 17:51:09 crc kubenswrapper[4558]: I0120 17:51:09.565140 4558 scope.go:117] "RemoveContainer" containerID="bf79c6fdc0dfbb952c1fe2ab45e02ef4b273e3d1165a858079afaf36ab6764cd" Jan 20 17:51:09 crc kubenswrapper[4558]: E0120 17:51:09.568606 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bf79c6fdc0dfbb952c1fe2ab45e02ef4b273e3d1165a858079afaf36ab6764cd\": container with ID starting with bf79c6fdc0dfbb952c1fe2ab45e02ef4b273e3d1165a858079afaf36ab6764cd not found: ID does not exist" containerID="bf79c6fdc0dfbb952c1fe2ab45e02ef4b273e3d1165a858079afaf36ab6764cd" Jan 20 17:51:09 crc kubenswrapper[4558]: I0120 17:51:09.568660 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bf79c6fdc0dfbb952c1fe2ab45e02ef4b273e3d1165a858079afaf36ab6764cd"} err="failed to get container status \"bf79c6fdc0dfbb952c1fe2ab45e02ef4b273e3d1165a858079afaf36ab6764cd\": rpc error: code = NotFound desc = could not find container \"bf79c6fdc0dfbb952c1fe2ab45e02ef4b273e3d1165a858079afaf36ab6764cd\": container with ID starting with bf79c6fdc0dfbb952c1fe2ab45e02ef4b273e3d1165a858079afaf36ab6764cd not found: ID does not exist" Jan 20 17:51:09 crc kubenswrapper[4558]: I0120 17:51:09.568682 4558 scope.go:117] "RemoveContainer" containerID="e77fe97286d1a350d048ece8ea776baa3b6bb50555cead4bb9310050977c990c" Jan 20 17:51:09 crc kubenswrapper[4558]: E0120 17:51:09.575596 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e77fe97286d1a350d048ece8ea776baa3b6bb50555cead4bb9310050977c990c\": container with ID starting with e77fe97286d1a350d048ece8ea776baa3b6bb50555cead4bb9310050977c990c not found: ID does not exist" containerID="e77fe97286d1a350d048ece8ea776baa3b6bb50555cead4bb9310050977c990c" Jan 20 17:51:09 crc kubenswrapper[4558]: I0120 17:51:09.575640 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e77fe97286d1a350d048ece8ea776baa3b6bb50555cead4bb9310050977c990c"} err="failed to get container status \"e77fe97286d1a350d048ece8ea776baa3b6bb50555cead4bb9310050977c990c\": rpc error: code = NotFound desc = could not find container \"e77fe97286d1a350d048ece8ea776baa3b6bb50555cead4bb9310050977c990c\": container with ID starting with e77fe97286d1a350d048ece8ea776baa3b6bb50555cead4bb9310050977c990c not found: ID does not exist" Jan 20 17:51:09 crc kubenswrapper[4558]: I0120 17:51:09.575665 4558 scope.go:117] "RemoveContainer" containerID="6d47d3d41183f226f75449e13968a59a7a2d75eca49f1c5ad158b78e20b4ff2a" Jan 20 17:51:09 crc kubenswrapper[4558]: I0120 17:51:09.612439 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:51:09 crc kubenswrapper[4558]: I0120 17:51:09.640227 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:51:09 crc kubenswrapper[4558]: I0120 17:51:09.647057 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:51:09 crc kubenswrapper[4558]: I0120 17:51:09.650316 4558 scope.go:117] "RemoveContainer" containerID="cbf05244e311e00d3dfc678229a149f12f8b205c75c63c8b93a994123c6583c1" Jan 20 17:51:09 crc kubenswrapper[4558]: I0120 17:51:09.651574 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:51:09 crc kubenswrapper[4558]: I0120 17:51:09.652983 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:51:09 crc kubenswrapper[4558]: I0120 17:51:09.655401 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-novncproxy-cell1-public-svc" Jan 20 17:51:09 crc kubenswrapper[4558]: I0120 17:51:09.655954 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-novncproxy-cell1-vencrypt" Jan 20 17:51:09 crc kubenswrapper[4558]: I0120 17:51:09.656121 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell1-novncproxy-config-data" Jan 20 17:51:09 crc kubenswrapper[4558]: I0120 17:51:09.662897 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:51:09 crc kubenswrapper[4558]: I0120 17:51:09.678138 4558 scope.go:117] "RemoveContainer" containerID="cbf05244e311e00d3dfc678229a149f12f8b205c75c63c8b93a994123c6583c1" Jan 20 17:51:09 crc kubenswrapper[4558]: E0120 17:51:09.678592 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cbf05244e311e00d3dfc678229a149f12f8b205c75c63c8b93a994123c6583c1\": container with ID starting with cbf05244e311e00d3dfc678229a149f12f8b205c75c63c8b93a994123c6583c1 not found: ID does not exist" containerID="cbf05244e311e00d3dfc678229a149f12f8b205c75c63c8b93a994123c6583c1" Jan 20 17:51:09 crc kubenswrapper[4558]: I0120 17:51:09.678629 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cbf05244e311e00d3dfc678229a149f12f8b205c75c63c8b93a994123c6583c1"} err="failed to get container status \"cbf05244e311e00d3dfc678229a149f12f8b205c75c63c8b93a994123c6583c1\": rpc error: code = NotFound desc = could not find container \"cbf05244e311e00d3dfc678229a149f12f8b205c75c63c8b93a994123c6583c1\": container with ID starting with cbf05244e311e00d3dfc678229a149f12f8b205c75c63c8b93a994123c6583c1 not found: ID does not exist" Jan 20 17:51:09 crc kubenswrapper[4558]: I0120 17:51:09.694491 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"d73e13db-6a2c-4205-aabc-1c7671e07895\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:51:09 crc kubenswrapper[4558]: I0120 17:51:09.694572 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d73e13db-6a2c-4205-aabc-1c7671e07895-scripts\") pod \"glance-default-internal-api-0\" (UID: \"d73e13db-6a2c-4205-aabc-1c7671e07895\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:51:09 crc kubenswrapper[4558]: I0120 17:51:09.694636 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d73e13db-6a2c-4205-aabc-1c7671e07895-logs\") pod \"glance-default-internal-api-0\" (UID: \"d73e13db-6a2c-4205-aabc-1c7671e07895\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:51:09 crc kubenswrapper[4558]: I0120 17:51:09.694735 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d73e13db-6a2c-4205-aabc-1c7671e07895-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"d73e13db-6a2c-4205-aabc-1c7671e07895\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:51:09 crc kubenswrapper[4558]: I0120 17:51:09.695138 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d73e13db-6a2c-4205-aabc-1c7671e07895-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"d73e13db-6a2c-4205-aabc-1c7671e07895\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:51:09 crc kubenswrapper[4558]: I0120 17:51:09.695223 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4tnnc\" (UniqueName: \"kubernetes.io/projected/d73e13db-6a2c-4205-aabc-1c7671e07895-kube-api-access-4tnnc\") pod \"glance-default-internal-api-0\" (UID: \"d73e13db-6a2c-4205-aabc-1c7671e07895\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:51:09 crc kubenswrapper[4558]: I0120 17:51:09.695282 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d73e13db-6a2c-4205-aabc-1c7671e07895-config-data\") pod \"glance-default-internal-api-0\" (UID: \"d73e13db-6a2c-4205-aabc-1c7671e07895\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:51:09 crc kubenswrapper[4558]: I0120 17:51:09.695435 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d73e13db-6a2c-4205-aabc-1c7671e07895-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"d73e13db-6a2c-4205-aabc-1c7671e07895\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:51:09 crc kubenswrapper[4558]: I0120 17:51:09.760381 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:51:09 crc kubenswrapper[4558]: E0120 17:51:09.762101 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[combined-ca-bundle config-data kube-api-access-cqslf nova-novncproxy-tls-certs vencrypt-tls-certs], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" podUID="891838f3-4f66-4fe0-85c0-6a538dfbe364" Jan 20 17:51:09 crc kubenswrapper[4558]: I0120 17:51:09.797290 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d73e13db-6a2c-4205-aabc-1c7671e07895-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"d73e13db-6a2c-4205-aabc-1c7671e07895\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:51:09 crc kubenswrapper[4558]: I0120 17:51:09.797692 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d73e13db-6a2c-4205-aabc-1c7671e07895-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"d73e13db-6a2c-4205-aabc-1c7671e07895\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:51:09 crc kubenswrapper[4558]: I0120 17:51:09.797782 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/891838f3-4f66-4fe0-85c0-6a538dfbe364-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"891838f3-4f66-4fe0-85c0-6a538dfbe364\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:51:09 crc kubenswrapper[4558]: I0120 17:51:09.797851 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4tnnc\" (UniqueName: \"kubernetes.io/projected/d73e13db-6a2c-4205-aabc-1c7671e07895-kube-api-access-4tnnc\") pod \"glance-default-internal-api-0\" (UID: \"d73e13db-6a2c-4205-aabc-1c7671e07895\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:51:09 crc kubenswrapper[4558]: I0120 17:51:09.797876 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cqslf\" (UniqueName: \"kubernetes.io/projected/891838f3-4f66-4fe0-85c0-6a538dfbe364-kube-api-access-cqslf\") pod \"nova-cell1-novncproxy-0\" (UID: \"891838f3-4f66-4fe0-85c0-6a538dfbe364\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:51:09 crc kubenswrapper[4558]: I0120 17:51:09.797910 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d73e13db-6a2c-4205-aabc-1c7671e07895-config-data\") pod \"glance-default-internal-api-0\" (UID: \"d73e13db-6a2c-4205-aabc-1c7671e07895\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:51:09 crc kubenswrapper[4558]: I0120 17:51:09.797981 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/891838f3-4f66-4fe0-85c0-6a538dfbe364-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"891838f3-4f66-4fe0-85c0-6a538dfbe364\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:51:09 crc kubenswrapper[4558]: I0120 17:51:09.798012 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/891838f3-4f66-4fe0-85c0-6a538dfbe364-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"891838f3-4f66-4fe0-85c0-6a538dfbe364\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:51:09 crc kubenswrapper[4558]: I0120 17:51:09.798957 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d73e13db-6a2c-4205-aabc-1c7671e07895-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"d73e13db-6a2c-4205-aabc-1c7671e07895\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:51:09 crc kubenswrapper[4558]: I0120 17:51:09.799009 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"d73e13db-6a2c-4205-aabc-1c7671e07895\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:51:09 crc kubenswrapper[4558]: I0120 17:51:09.799039 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d73e13db-6a2c-4205-aabc-1c7671e07895-scripts\") pod \"glance-default-internal-api-0\" (UID: \"d73e13db-6a2c-4205-aabc-1c7671e07895\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:51:09 crc kubenswrapper[4558]: I0120 17:51:09.799066 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/891838f3-4f66-4fe0-85c0-6a538dfbe364-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"891838f3-4f66-4fe0-85c0-6a538dfbe364\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:51:09 crc kubenswrapper[4558]: I0120 17:51:09.799087 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d73e13db-6a2c-4205-aabc-1c7671e07895-logs\") pod \"glance-default-internal-api-0\" (UID: \"d73e13db-6a2c-4205-aabc-1c7671e07895\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:51:09 crc kubenswrapper[4558]: I0120 17:51:09.799110 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d73e13db-6a2c-4205-aabc-1c7671e07895-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"d73e13db-6a2c-4205-aabc-1c7671e07895\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:51:09 crc kubenswrapper[4558]: I0120 17:51:09.799238 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"d73e13db-6a2c-4205-aabc-1c7671e07895\") device mount path \"/mnt/openstack/pv11\"" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:51:09 crc kubenswrapper[4558]: I0120 17:51:09.799744 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d73e13db-6a2c-4205-aabc-1c7671e07895-logs\") pod \"glance-default-internal-api-0\" (UID: \"d73e13db-6a2c-4205-aabc-1c7671e07895\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:51:09 crc kubenswrapper[4558]: I0120 17:51:09.804279 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d73e13db-6a2c-4205-aabc-1c7671e07895-config-data\") pod \"glance-default-internal-api-0\" (UID: \"d73e13db-6a2c-4205-aabc-1c7671e07895\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:51:09 crc kubenswrapper[4558]: I0120 17:51:09.804552 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d73e13db-6a2c-4205-aabc-1c7671e07895-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"d73e13db-6a2c-4205-aabc-1c7671e07895\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:51:09 crc kubenswrapper[4558]: I0120 17:51:09.805581 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d73e13db-6a2c-4205-aabc-1c7671e07895-scripts\") pod \"glance-default-internal-api-0\" (UID: \"d73e13db-6a2c-4205-aabc-1c7671e07895\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:51:09 crc kubenswrapper[4558]: I0120 17:51:09.807964 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d73e13db-6a2c-4205-aabc-1c7671e07895-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"d73e13db-6a2c-4205-aabc-1c7671e07895\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:51:09 crc kubenswrapper[4558]: I0120 17:51:09.815865 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4tnnc\" (UniqueName: \"kubernetes.io/projected/d73e13db-6a2c-4205-aabc-1c7671e07895-kube-api-access-4tnnc\") pod \"glance-default-internal-api-0\" (UID: \"d73e13db-6a2c-4205-aabc-1c7671e07895\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:51:09 crc kubenswrapper[4558]: I0120 17:51:09.825553 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"d73e13db-6a2c-4205-aabc-1c7671e07895\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:51:09 crc kubenswrapper[4558]: I0120 17:51:09.877209 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:51:09 crc kubenswrapper[4558]: I0120 17:51:09.902739 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/891838f3-4f66-4fe0-85c0-6a538dfbe364-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"891838f3-4f66-4fe0-85c0-6a538dfbe364\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:51:09 crc kubenswrapper[4558]: I0120 17:51:09.903210 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqslf\" (UniqueName: \"kubernetes.io/projected/891838f3-4f66-4fe0-85c0-6a538dfbe364-kube-api-access-cqslf\") pod \"nova-cell1-novncproxy-0\" (UID: \"891838f3-4f66-4fe0-85c0-6a538dfbe364\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:51:09 crc kubenswrapper[4558]: I0120 17:51:09.903302 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/891838f3-4f66-4fe0-85c0-6a538dfbe364-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"891838f3-4f66-4fe0-85c0-6a538dfbe364\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:51:09 crc kubenswrapper[4558]: I0120 17:51:09.903325 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/891838f3-4f66-4fe0-85c0-6a538dfbe364-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"891838f3-4f66-4fe0-85c0-6a538dfbe364\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:51:09 crc kubenswrapper[4558]: I0120 17:51:09.903520 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/891838f3-4f66-4fe0-85c0-6a538dfbe364-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"891838f3-4f66-4fe0-85c0-6a538dfbe364\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:51:10 crc kubenswrapper[4558]: I0120 17:51:10.296076 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/891838f3-4f66-4fe0-85c0-6a538dfbe364-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"891838f3-4f66-4fe0-85c0-6a538dfbe364\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:51:10 crc kubenswrapper[4558]: I0120 17:51:10.296302 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/891838f3-4f66-4fe0-85c0-6a538dfbe364-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"891838f3-4f66-4fe0-85c0-6a538dfbe364\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:51:10 crc kubenswrapper[4558]: I0120 17:51:10.296420 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/891838f3-4f66-4fe0-85c0-6a538dfbe364-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"891838f3-4f66-4fe0-85c0-6a538dfbe364\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:51:10 crc kubenswrapper[4558]: I0120 17:51:10.297147 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/891838f3-4f66-4fe0-85c0-6a538dfbe364-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"891838f3-4f66-4fe0-85c0-6a538dfbe364\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:51:10 crc kubenswrapper[4558]: I0120 17:51:10.297682 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqslf\" (UniqueName: \"kubernetes.io/projected/891838f3-4f66-4fe0-85c0-6a538dfbe364-kube-api-access-cqslf\") pod \"nova-cell1-novncproxy-0\" (UID: \"891838f3-4f66-4fe0-85c0-6a538dfbe364\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:51:10 crc kubenswrapper[4558]: I0120 17:51:10.486062 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:51:10 crc kubenswrapper[4558]: I0120 17:51:10.506458 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-api-6fb4445c46-pmjxt"] Jan 20 17:51:10 crc kubenswrapper[4558]: I0120 17:51:10.506717 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-api-6fb4445c46-pmjxt" podUID="b2ed54c7-3249-4a62-8d98-21618cb6dab6" containerName="barbican-api-log" containerID="cri-o://b9249d1892527138cc235d58186150d42903080a9231b4c9631a998629089336" gracePeriod=30 Jan 20 17:51:10 crc kubenswrapper[4558]: I0120 17:51:10.507194 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-api-6fb4445c46-pmjxt" podUID="b2ed54c7-3249-4a62-8d98-21618cb6dab6" containerName="barbican-api" containerID="cri-o://913497c86e1fd7122fec98b093c66895e1a8361995489d6c8fda3deef690c7ef" gracePeriod=30 Jan 20 17:51:10 crc kubenswrapper[4558]: I0120 17:51:10.508512 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:51:10 crc kubenswrapper[4558]: I0120 17:51:10.528449 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack-kuttl-tests/barbican-api-6fb4445c46-pmjxt" podUID="b2ed54c7-3249-4a62-8d98-21618cb6dab6" containerName="barbican-api-log" probeResult="failure" output="Get \"https://10.217.1.212:9311/healthcheck\": EOF" Jan 20 17:51:10 crc kubenswrapper[4558]: I0120 17:51:10.528473 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/barbican-api-6fb4445c46-pmjxt" podUID="b2ed54c7-3249-4a62-8d98-21618cb6dab6" containerName="barbican-api-log" probeResult="failure" output="Get \"https://10.217.1.212:9311/healthcheck\": EOF" Jan 20 17:51:10 crc kubenswrapper[4558]: I0120 17:51:10.528891 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/barbican-api-6fb4445c46-pmjxt" podUID="b2ed54c7-3249-4a62-8d98-21618cb6dab6" containerName="barbican-api" probeResult="failure" output="Get \"https://10.217.1.212:9311/healthcheck\": EOF" Jan 20 17:51:10 crc kubenswrapper[4558]: I0120 17:51:10.529308 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack-kuttl-tests/barbican-api-6fb4445c46-pmjxt" podUID="b2ed54c7-3249-4a62-8d98-21618cb6dab6" containerName="barbican-api" probeResult="failure" output="Get \"https://10.217.1.212:9311/healthcheck\": EOF" Jan 20 17:51:10 crc kubenswrapper[4558]: I0120 17:51:10.538493 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/barbican-api-7cd9889bc6-72r2h"] Jan 20 17:51:10 crc kubenswrapper[4558]: I0120 17:51:10.543048 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-7cd9889bc6-72r2h" Jan 20 17:51:10 crc kubenswrapper[4558]: I0120 17:51:10.566974 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-api-7cd9889bc6-72r2h"] Jan 20 17:51:10 crc kubenswrapper[4558]: I0120 17:51:10.601493 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="18c09849-702b-4939-9cf1-0e06c6adc889" path="/var/lib/kubelet/pods/18c09849-702b-4939-9cf1-0e06c6adc889/volumes" Jan 20 17:51:10 crc kubenswrapper[4558]: I0120 17:51:10.602354 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="581c2419-48d9-4b7d-b71b-ce5cceb0326d" path="/var/lib/kubelet/pods/581c2419-48d9-4b7d-b71b-ce5cceb0326d/volumes" Jan 20 17:51:10 crc kubenswrapper[4558]: I0120 17:51:10.622850 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/891838f3-4f66-4fe0-85c0-6a538dfbe364-vencrypt-tls-certs\") pod \"891838f3-4f66-4fe0-85c0-6a538dfbe364\" (UID: \"891838f3-4f66-4fe0-85c0-6a538dfbe364\") " Jan 20 17:51:10 crc kubenswrapper[4558]: I0120 17:51:10.623058 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/891838f3-4f66-4fe0-85c0-6a538dfbe364-combined-ca-bundle\") pod \"891838f3-4f66-4fe0-85c0-6a538dfbe364\" (UID: \"891838f3-4f66-4fe0-85c0-6a538dfbe364\") " Jan 20 17:51:10 crc kubenswrapper[4558]: I0120 17:51:10.623691 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/891838f3-4f66-4fe0-85c0-6a538dfbe364-nova-novncproxy-tls-certs\") pod \"891838f3-4f66-4fe0-85c0-6a538dfbe364\" (UID: \"891838f3-4f66-4fe0-85c0-6a538dfbe364\") " Jan 20 17:51:10 crc kubenswrapper[4558]: I0120 17:51:10.623758 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cqslf\" (UniqueName: \"kubernetes.io/projected/891838f3-4f66-4fe0-85c0-6a538dfbe364-kube-api-access-cqslf\") pod \"891838f3-4f66-4fe0-85c0-6a538dfbe364\" (UID: \"891838f3-4f66-4fe0-85c0-6a538dfbe364\") " Jan 20 17:51:10 crc kubenswrapper[4558]: I0120 17:51:10.623874 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/891838f3-4f66-4fe0-85c0-6a538dfbe364-config-data\") pod \"891838f3-4f66-4fe0-85c0-6a538dfbe364\" (UID: \"891838f3-4f66-4fe0-85c0-6a538dfbe364\") " Jan 20 17:51:10 crc kubenswrapper[4558]: I0120 17:51:10.624292 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c1e7ea27-3179-4c51-a9d9-2422702f08fa-logs\") pod \"barbican-api-7cd9889bc6-72r2h\" (UID: \"c1e7ea27-3179-4c51-a9d9-2422702f08fa\") " pod="openstack-kuttl-tests/barbican-api-7cd9889bc6-72r2h" Jan 20 17:51:10 crc kubenswrapper[4558]: I0120 17:51:10.624342 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c1e7ea27-3179-4c51-a9d9-2422702f08fa-combined-ca-bundle\") pod \"barbican-api-7cd9889bc6-72r2h\" (UID: \"c1e7ea27-3179-4c51-a9d9-2422702f08fa\") " pod="openstack-kuttl-tests/barbican-api-7cd9889bc6-72r2h" Jan 20 17:51:10 crc kubenswrapper[4558]: I0120 17:51:10.624451 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c1e7ea27-3179-4c51-a9d9-2422702f08fa-config-data\") pod \"barbican-api-7cd9889bc6-72r2h\" (UID: \"c1e7ea27-3179-4c51-a9d9-2422702f08fa\") " pod="openstack-kuttl-tests/barbican-api-7cd9889bc6-72r2h" Jan 20 17:51:10 crc kubenswrapper[4558]: I0120 17:51:10.624540 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c1e7ea27-3179-4c51-a9d9-2422702f08fa-public-tls-certs\") pod \"barbican-api-7cd9889bc6-72r2h\" (UID: \"c1e7ea27-3179-4c51-a9d9-2422702f08fa\") " pod="openstack-kuttl-tests/barbican-api-7cd9889bc6-72r2h" Jan 20 17:51:10 crc kubenswrapper[4558]: I0120 17:51:10.624599 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4dtkf\" (UniqueName: \"kubernetes.io/projected/c1e7ea27-3179-4c51-a9d9-2422702f08fa-kube-api-access-4dtkf\") pod \"barbican-api-7cd9889bc6-72r2h\" (UID: \"c1e7ea27-3179-4c51-a9d9-2422702f08fa\") " pod="openstack-kuttl-tests/barbican-api-7cd9889bc6-72r2h" Jan 20 17:51:10 crc kubenswrapper[4558]: I0120 17:51:10.624754 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c1e7ea27-3179-4c51-a9d9-2422702f08fa-config-data-custom\") pod \"barbican-api-7cd9889bc6-72r2h\" (UID: \"c1e7ea27-3179-4c51-a9d9-2422702f08fa\") " pod="openstack-kuttl-tests/barbican-api-7cd9889bc6-72r2h" Jan 20 17:51:10 crc kubenswrapper[4558]: I0120 17:51:10.624887 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c1e7ea27-3179-4c51-a9d9-2422702f08fa-internal-tls-certs\") pod \"barbican-api-7cd9889bc6-72r2h\" (UID: \"c1e7ea27-3179-4c51-a9d9-2422702f08fa\") " pod="openstack-kuttl-tests/barbican-api-7cd9889bc6-72r2h" Jan 20 17:51:10 crc kubenswrapper[4558]: I0120 17:51:10.630293 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/891838f3-4f66-4fe0-85c0-6a538dfbe364-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "891838f3-4f66-4fe0-85c0-6a538dfbe364" (UID: "891838f3-4f66-4fe0-85c0-6a538dfbe364"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:51:10 crc kubenswrapper[4558]: I0120 17:51:10.640318 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/891838f3-4f66-4fe0-85c0-6a538dfbe364-config-data" (OuterVolumeSpecName: "config-data") pod "891838f3-4f66-4fe0-85c0-6a538dfbe364" (UID: "891838f3-4f66-4fe0-85c0-6a538dfbe364"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:51:10 crc kubenswrapper[4558]: I0120 17:51:10.640343 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/891838f3-4f66-4fe0-85c0-6a538dfbe364-vencrypt-tls-certs" (OuterVolumeSpecName: "vencrypt-tls-certs") pod "891838f3-4f66-4fe0-85c0-6a538dfbe364" (UID: "891838f3-4f66-4fe0-85c0-6a538dfbe364"). InnerVolumeSpecName "vencrypt-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:51:10 crc kubenswrapper[4558]: I0120 17:51:10.640289 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/891838f3-4f66-4fe0-85c0-6a538dfbe364-nova-novncproxy-tls-certs" (OuterVolumeSpecName: "nova-novncproxy-tls-certs") pod "891838f3-4f66-4fe0-85c0-6a538dfbe364" (UID: "891838f3-4f66-4fe0-85c0-6a538dfbe364"). InnerVolumeSpecName "nova-novncproxy-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:51:10 crc kubenswrapper[4558]: I0120 17:51:10.658068 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/891838f3-4f66-4fe0-85c0-6a538dfbe364-kube-api-access-cqslf" (OuterVolumeSpecName: "kube-api-access-cqslf") pod "891838f3-4f66-4fe0-85c0-6a538dfbe364" (UID: "891838f3-4f66-4fe0-85c0-6a538dfbe364"). InnerVolumeSpecName "kube-api-access-cqslf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:51:10 crc kubenswrapper[4558]: I0120 17:51:10.726981 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c1e7ea27-3179-4c51-a9d9-2422702f08fa-internal-tls-certs\") pod \"barbican-api-7cd9889bc6-72r2h\" (UID: \"c1e7ea27-3179-4c51-a9d9-2422702f08fa\") " pod="openstack-kuttl-tests/barbican-api-7cd9889bc6-72r2h" Jan 20 17:51:10 crc kubenswrapper[4558]: I0120 17:51:10.727096 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c1e7ea27-3179-4c51-a9d9-2422702f08fa-logs\") pod \"barbican-api-7cd9889bc6-72r2h\" (UID: \"c1e7ea27-3179-4c51-a9d9-2422702f08fa\") " pod="openstack-kuttl-tests/barbican-api-7cd9889bc6-72r2h" Jan 20 17:51:10 crc kubenswrapper[4558]: I0120 17:51:10.727139 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c1e7ea27-3179-4c51-a9d9-2422702f08fa-combined-ca-bundle\") pod \"barbican-api-7cd9889bc6-72r2h\" (UID: \"c1e7ea27-3179-4c51-a9d9-2422702f08fa\") " pod="openstack-kuttl-tests/barbican-api-7cd9889bc6-72r2h" Jan 20 17:51:10 crc kubenswrapper[4558]: I0120 17:51:10.727197 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c1e7ea27-3179-4c51-a9d9-2422702f08fa-config-data\") pod \"barbican-api-7cd9889bc6-72r2h\" (UID: \"c1e7ea27-3179-4c51-a9d9-2422702f08fa\") " pod="openstack-kuttl-tests/barbican-api-7cd9889bc6-72r2h" Jan 20 17:51:10 crc kubenswrapper[4558]: I0120 17:51:10.727247 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c1e7ea27-3179-4c51-a9d9-2422702f08fa-public-tls-certs\") pod \"barbican-api-7cd9889bc6-72r2h\" (UID: \"c1e7ea27-3179-4c51-a9d9-2422702f08fa\") " pod="openstack-kuttl-tests/barbican-api-7cd9889bc6-72r2h" Jan 20 17:51:10 crc kubenswrapper[4558]: I0120 17:51:10.727324 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4dtkf\" (UniqueName: \"kubernetes.io/projected/c1e7ea27-3179-4c51-a9d9-2422702f08fa-kube-api-access-4dtkf\") pod \"barbican-api-7cd9889bc6-72r2h\" (UID: \"c1e7ea27-3179-4c51-a9d9-2422702f08fa\") " pod="openstack-kuttl-tests/barbican-api-7cd9889bc6-72r2h" Jan 20 17:51:10 crc kubenswrapper[4558]: I0120 17:51:10.727439 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c1e7ea27-3179-4c51-a9d9-2422702f08fa-config-data-custom\") pod \"barbican-api-7cd9889bc6-72r2h\" (UID: \"c1e7ea27-3179-4c51-a9d9-2422702f08fa\") " pod="openstack-kuttl-tests/barbican-api-7cd9889bc6-72r2h" Jan 20 17:51:10 crc kubenswrapper[4558]: I0120 17:51:10.727510 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/891838f3-4f66-4fe0-85c0-6a538dfbe364-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:10 crc kubenswrapper[4558]: I0120 17:51:10.727526 4558 reconciler_common.go:293] "Volume detached for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/891838f3-4f66-4fe0-85c0-6a538dfbe364-vencrypt-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:10 crc kubenswrapper[4558]: I0120 17:51:10.727537 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/891838f3-4f66-4fe0-85c0-6a538dfbe364-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:10 crc kubenswrapper[4558]: I0120 17:51:10.727547 4558 reconciler_common.go:293] "Volume detached for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/891838f3-4f66-4fe0-85c0-6a538dfbe364-nova-novncproxy-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:10 crc kubenswrapper[4558]: I0120 17:51:10.727556 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cqslf\" (UniqueName: \"kubernetes.io/projected/891838f3-4f66-4fe0-85c0-6a538dfbe364-kube-api-access-cqslf\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:10 crc kubenswrapper[4558]: I0120 17:51:10.731857 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c1e7ea27-3179-4c51-a9d9-2422702f08fa-logs\") pod \"barbican-api-7cd9889bc6-72r2h\" (UID: \"c1e7ea27-3179-4c51-a9d9-2422702f08fa\") " pod="openstack-kuttl-tests/barbican-api-7cd9889bc6-72r2h" Jan 20 17:51:10 crc kubenswrapper[4558]: I0120 17:51:10.732428 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c1e7ea27-3179-4c51-a9d9-2422702f08fa-internal-tls-certs\") pod \"barbican-api-7cd9889bc6-72r2h\" (UID: \"c1e7ea27-3179-4c51-a9d9-2422702f08fa\") " pod="openstack-kuttl-tests/barbican-api-7cd9889bc6-72r2h" Jan 20 17:51:10 crc kubenswrapper[4558]: I0120 17:51:10.732652 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c1e7ea27-3179-4c51-a9d9-2422702f08fa-config-data-custom\") pod \"barbican-api-7cd9889bc6-72r2h\" (UID: \"c1e7ea27-3179-4c51-a9d9-2422702f08fa\") " pod="openstack-kuttl-tests/barbican-api-7cd9889bc6-72r2h" Jan 20 17:51:10 crc kubenswrapper[4558]: I0120 17:51:10.735100 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c1e7ea27-3179-4c51-a9d9-2422702f08fa-config-data\") pod \"barbican-api-7cd9889bc6-72r2h\" (UID: \"c1e7ea27-3179-4c51-a9d9-2422702f08fa\") " pod="openstack-kuttl-tests/barbican-api-7cd9889bc6-72r2h" Jan 20 17:51:10 crc kubenswrapper[4558]: I0120 17:51:10.735536 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c1e7ea27-3179-4c51-a9d9-2422702f08fa-combined-ca-bundle\") pod \"barbican-api-7cd9889bc6-72r2h\" (UID: \"c1e7ea27-3179-4c51-a9d9-2422702f08fa\") " pod="openstack-kuttl-tests/barbican-api-7cd9889bc6-72r2h" Jan 20 17:51:10 crc kubenswrapper[4558]: I0120 17:51:10.738842 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c1e7ea27-3179-4c51-a9d9-2422702f08fa-public-tls-certs\") pod \"barbican-api-7cd9889bc6-72r2h\" (UID: \"c1e7ea27-3179-4c51-a9d9-2422702f08fa\") " pod="openstack-kuttl-tests/barbican-api-7cd9889bc6-72r2h" Jan 20 17:51:10 crc kubenswrapper[4558]: I0120 17:51:10.749437 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4dtkf\" (UniqueName: \"kubernetes.io/projected/c1e7ea27-3179-4c51-a9d9-2422702f08fa-kube-api-access-4dtkf\") pod \"barbican-api-7cd9889bc6-72r2h\" (UID: \"c1e7ea27-3179-4c51-a9d9-2422702f08fa\") " pod="openstack-kuttl-tests/barbican-api-7cd9889bc6-72r2h" Jan 20 17:51:10 crc kubenswrapper[4558]: I0120 17:51:10.767775 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:51:10 crc kubenswrapper[4558]: I0120 17:51:10.867257 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-7cd9889bc6-72r2h" Jan 20 17:51:11 crc kubenswrapper[4558]: I0120 17:51:11.308880 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-api-7cd9889bc6-72r2h"] Jan 20 17:51:11 crc kubenswrapper[4558]: I0120 17:51:11.326511 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:51:11 crc kubenswrapper[4558]: I0120 17:51:11.338879 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:51:11 crc kubenswrapper[4558]: I0120 17:51:11.346950 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:51:11 crc kubenswrapper[4558]: I0120 17:51:11.506345 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:51:11 crc kubenswrapper[4558]: I0120 17:51:11.522241 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-7cd9889bc6-72r2h" event={"ID":"c1e7ea27-3179-4c51-a9d9-2422702f08fa","Type":"ContainerStarted","Data":"2f2abebcba696cfeaa6578be95ccfa2ecbfa3b1fa674fbde022073a9ed91eb41"} Jan 20 17:51:11 crc kubenswrapper[4558]: I0120 17:51:11.530451 4558 generic.go:334] "Generic (PLEG): container finished" podID="b2ed54c7-3249-4a62-8d98-21618cb6dab6" containerID="b9249d1892527138cc235d58186150d42903080a9231b4c9631a998629089336" exitCode=143 Jan 20 17:51:11 crc kubenswrapper[4558]: I0120 17:51:11.532902 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-6fb4445c46-pmjxt" event={"ID":"b2ed54c7-3249-4a62-8d98-21618cb6dab6","Type":"ContainerDied","Data":"b9249d1892527138cc235d58186150d42903080a9231b4c9631a998629089336"} Jan 20 17:51:11 crc kubenswrapper[4558]: I0120 17:51:11.547367 4558 generic.go:334] "Generic (PLEG): container finished" podID="07e11378-90b7-4994-99fa-6e2d78b61f63" containerID="9ee9796db29af4c22a51a195683b27835e60fea772e6cf6721ee2e162cc239b4" exitCode=0 Jan 20 17:51:11 crc kubenswrapper[4558]: I0120 17:51:11.547518 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:51:11 crc kubenswrapper[4558]: I0120 17:51:11.547553 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"07e11378-90b7-4994-99fa-6e2d78b61f63","Type":"ContainerDied","Data":"9ee9796db29af4c22a51a195683b27835e60fea772e6cf6721ee2e162cc239b4"} Jan 20 17:51:11 crc kubenswrapper[4558]: I0120 17:51:11.547610 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"07e11378-90b7-4994-99fa-6e2d78b61f63","Type":"ContainerDied","Data":"79d48f60aa75df8826b507a34aa9de691798da80e280b206bcc79a7c617d08df"} Jan 20 17:51:11 crc kubenswrapper[4558]: I0120 17:51:11.547628 4558 scope.go:117] "RemoveContainer" containerID="9ee9796db29af4c22a51a195683b27835e60fea772e6cf6721ee2e162cc239b4" Jan 20 17:51:11 crc kubenswrapper[4558]: I0120 17:51:11.554158 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"d73e13db-6a2c-4205-aabc-1c7671e07895","Type":"ContainerStarted","Data":"38a50cd3fde0de52b7453bb0226b6b2af8717868a6f1905514fcfb820cdea7f5"} Jan 20 17:51:11 crc kubenswrapper[4558]: I0120 17:51:11.554287 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"d73e13db-6a2c-4205-aabc-1c7671e07895","Type":"ContainerStarted","Data":"b2a7315c5227f9b741a26482cd0fe954f372a0ca69f99d2c3c40d61748d42885"} Jan 20 17:51:11 crc kubenswrapper[4558]: I0120 17:51:11.554207 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:51:11 crc kubenswrapper[4558]: I0120 17:51:11.560352 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:51:11 crc kubenswrapper[4558]: I0120 17:51:11.602193 4558 scope.go:117] "RemoveContainer" containerID="7eefed10cf0d89cd2ed97e7df9a22ee9e031b097685b34b4e3323811ac699db8" Jan 20 17:51:11 crc kubenswrapper[4558]: I0120 17:51:11.629986 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:51:11 crc kubenswrapper[4558]: I0120 17:51:11.651009 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/07e11378-90b7-4994-99fa-6e2d78b61f63-httpd-run\") pod \"07e11378-90b7-4994-99fa-6e2d78b61f63\" (UID: \"07e11378-90b7-4994-99fa-6e2d78b61f63\") " Jan 20 17:51:11 crc kubenswrapper[4558]: I0120 17:51:11.651069 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/07e11378-90b7-4994-99fa-6e2d78b61f63-scripts\") pod \"07e11378-90b7-4994-99fa-6e2d78b61f63\" (UID: \"07e11378-90b7-4994-99fa-6e2d78b61f63\") " Jan 20 17:51:11 crc kubenswrapper[4558]: I0120 17:51:11.651102 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/07e11378-90b7-4994-99fa-6e2d78b61f63-config-data\") pod \"07e11378-90b7-4994-99fa-6e2d78b61f63\" (UID: \"07e11378-90b7-4994-99fa-6e2d78b61f63\") " Jan 20 17:51:11 crc kubenswrapper[4558]: I0120 17:51:11.651155 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/07e11378-90b7-4994-99fa-6e2d78b61f63-logs\") pod \"07e11378-90b7-4994-99fa-6e2d78b61f63\" (UID: \"07e11378-90b7-4994-99fa-6e2d78b61f63\") " Jan 20 17:51:11 crc kubenswrapper[4558]: I0120 17:51:11.651217 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/07e11378-90b7-4994-99fa-6e2d78b61f63-combined-ca-bundle\") pod \"07e11378-90b7-4994-99fa-6e2d78b61f63\" (UID: \"07e11378-90b7-4994-99fa-6e2d78b61f63\") " Jan 20 17:51:11 crc kubenswrapper[4558]: I0120 17:51:11.651267 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"07e11378-90b7-4994-99fa-6e2d78b61f63\" (UID: \"07e11378-90b7-4994-99fa-6e2d78b61f63\") " Jan 20 17:51:11 crc kubenswrapper[4558]: I0120 17:51:11.651401 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/07e11378-90b7-4994-99fa-6e2d78b61f63-public-tls-certs\") pod \"07e11378-90b7-4994-99fa-6e2d78b61f63\" (UID: \"07e11378-90b7-4994-99fa-6e2d78b61f63\") " Jan 20 17:51:11 crc kubenswrapper[4558]: I0120 17:51:11.651448 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pq2q4\" (UniqueName: \"kubernetes.io/projected/07e11378-90b7-4994-99fa-6e2d78b61f63-kube-api-access-pq2q4\") pod \"07e11378-90b7-4994-99fa-6e2d78b61f63\" (UID: \"07e11378-90b7-4994-99fa-6e2d78b61f63\") " Jan 20 17:51:11 crc kubenswrapper[4558]: I0120 17:51:11.654053 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/07e11378-90b7-4994-99fa-6e2d78b61f63-logs" (OuterVolumeSpecName: "logs") pod "07e11378-90b7-4994-99fa-6e2d78b61f63" (UID: "07e11378-90b7-4994-99fa-6e2d78b61f63"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:51:11 crc kubenswrapper[4558]: I0120 17:51:11.654591 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/07e11378-90b7-4994-99fa-6e2d78b61f63-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "07e11378-90b7-4994-99fa-6e2d78b61f63" (UID: "07e11378-90b7-4994-99fa-6e2d78b61f63"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:51:11 crc kubenswrapper[4558]: I0120 17:51:11.656949 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/07e11378-90b7-4994-99fa-6e2d78b61f63-kube-api-access-pq2q4" (OuterVolumeSpecName: "kube-api-access-pq2q4") pod "07e11378-90b7-4994-99fa-6e2d78b61f63" (UID: "07e11378-90b7-4994-99fa-6e2d78b61f63"). InnerVolumeSpecName "kube-api-access-pq2q4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:51:11 crc kubenswrapper[4558]: I0120 17:51:11.659066 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:51:11 crc kubenswrapper[4558]: I0120 17:51:11.670566 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:51:11 crc kubenswrapper[4558]: E0120 17:51:11.671284 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="07e11378-90b7-4994-99fa-6e2d78b61f63" containerName="glance-httpd" Jan 20 17:51:11 crc kubenswrapper[4558]: I0120 17:51:11.671326 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="07e11378-90b7-4994-99fa-6e2d78b61f63" containerName="glance-httpd" Jan 20 17:51:11 crc kubenswrapper[4558]: E0120 17:51:11.671356 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="07e11378-90b7-4994-99fa-6e2d78b61f63" containerName="glance-log" Jan 20 17:51:11 crc kubenswrapper[4558]: I0120 17:51:11.671364 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="07e11378-90b7-4994-99fa-6e2d78b61f63" containerName="glance-log" Jan 20 17:51:11 crc kubenswrapper[4558]: I0120 17:51:11.671665 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="07e11378-90b7-4994-99fa-6e2d78b61f63" containerName="glance-httpd" Jan 20 17:51:11 crc kubenswrapper[4558]: I0120 17:51:11.671689 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="07e11378-90b7-4994-99fa-6e2d78b61f63" containerName="glance-log" Jan 20 17:51:11 crc kubenswrapper[4558]: I0120 17:51:11.671712 4558 scope.go:117] "RemoveContainer" containerID="9ee9796db29af4c22a51a195683b27835e60fea772e6cf6721ee2e162cc239b4" Jan 20 17:51:11 crc kubenswrapper[4558]: I0120 17:51:11.672480 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:51:11 crc kubenswrapper[4558]: E0120 17:51:11.672923 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9ee9796db29af4c22a51a195683b27835e60fea772e6cf6721ee2e162cc239b4\": container with ID starting with 9ee9796db29af4c22a51a195683b27835e60fea772e6cf6721ee2e162cc239b4 not found: ID does not exist" containerID="9ee9796db29af4c22a51a195683b27835e60fea772e6cf6721ee2e162cc239b4" Jan 20 17:51:11 crc kubenswrapper[4558]: I0120 17:51:11.672964 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9ee9796db29af4c22a51a195683b27835e60fea772e6cf6721ee2e162cc239b4"} err="failed to get container status \"9ee9796db29af4c22a51a195683b27835e60fea772e6cf6721ee2e162cc239b4\": rpc error: code = NotFound desc = could not find container \"9ee9796db29af4c22a51a195683b27835e60fea772e6cf6721ee2e162cc239b4\": container with ID starting with 9ee9796db29af4c22a51a195683b27835e60fea772e6cf6721ee2e162cc239b4 not found: ID does not exist" Jan 20 17:51:11 crc kubenswrapper[4558]: I0120 17:51:11.673007 4558 scope.go:117] "RemoveContainer" containerID="7eefed10cf0d89cd2ed97e7df9a22ee9e031b097685b34b4e3323811ac699db8" Jan 20 17:51:11 crc kubenswrapper[4558]: I0120 17:51:11.672928 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/07e11378-90b7-4994-99fa-6e2d78b61f63-scripts" (OuterVolumeSpecName: "scripts") pod "07e11378-90b7-4994-99fa-6e2d78b61f63" (UID: "07e11378-90b7-4994-99fa-6e2d78b61f63"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:51:11 crc kubenswrapper[4558]: E0120 17:51:11.673502 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7eefed10cf0d89cd2ed97e7df9a22ee9e031b097685b34b4e3323811ac699db8\": container with ID starting with 7eefed10cf0d89cd2ed97e7df9a22ee9e031b097685b34b4e3323811ac699db8 not found: ID does not exist" containerID="7eefed10cf0d89cd2ed97e7df9a22ee9e031b097685b34b4e3323811ac699db8" Jan 20 17:51:11 crc kubenswrapper[4558]: I0120 17:51:11.673545 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7eefed10cf0d89cd2ed97e7df9a22ee9e031b097685b34b4e3323811ac699db8"} err="failed to get container status \"7eefed10cf0d89cd2ed97e7df9a22ee9e031b097685b34b4e3323811ac699db8\": rpc error: code = NotFound desc = could not find container \"7eefed10cf0d89cd2ed97e7df9a22ee9e031b097685b34b4e3323811ac699db8\": container with ID starting with 7eefed10cf0d89cd2ed97e7df9a22ee9e031b097685b34b4e3323811ac699db8 not found: ID does not exist" Jan 20 17:51:11 crc kubenswrapper[4558]: I0120 17:51:11.674687 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell1-novncproxy-config-data" Jan 20 17:51:11 crc kubenswrapper[4558]: I0120 17:51:11.674752 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-novncproxy-cell1-public-svc" Jan 20 17:51:11 crc kubenswrapper[4558]: I0120 17:51:11.675891 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage09-crc" (OuterVolumeSpecName: "glance") pod "07e11378-90b7-4994-99fa-6e2d78b61f63" (UID: "07e11378-90b7-4994-99fa-6e2d78b61f63"). InnerVolumeSpecName "local-storage09-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:51:11 crc kubenswrapper[4558]: I0120 17:51:11.675939 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-novncproxy-cell1-vencrypt" Jan 20 17:51:11 crc kubenswrapper[4558]: I0120 17:51:11.676809 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:51:11 crc kubenswrapper[4558]: I0120 17:51:11.725736 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/07e11378-90b7-4994-99fa-6e2d78b61f63-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "07e11378-90b7-4994-99fa-6e2d78b61f63" (UID: "07e11378-90b7-4994-99fa-6e2d78b61f63"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:51:11 crc kubenswrapper[4558]: I0120 17:51:11.727590 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/07e11378-90b7-4994-99fa-6e2d78b61f63-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "07e11378-90b7-4994-99fa-6e2d78b61f63" (UID: "07e11378-90b7-4994-99fa-6e2d78b61f63"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:51:11 crc kubenswrapper[4558]: I0120 17:51:11.729001 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/07e11378-90b7-4994-99fa-6e2d78b61f63-config-data" (OuterVolumeSpecName: "config-data") pod "07e11378-90b7-4994-99fa-6e2d78b61f63" (UID: "07e11378-90b7-4994-99fa-6e2d78b61f63"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:51:11 crc kubenswrapper[4558]: I0120 17:51:11.754369 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e029b16a-f02b-40b4-82b9-5fa08fe62bc1-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"e029b16a-f02b-40b4-82b9-5fa08fe62bc1\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:51:11 crc kubenswrapper[4558]: I0120 17:51:11.754475 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/e029b16a-f02b-40b4-82b9-5fa08fe62bc1-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"e029b16a-f02b-40b4-82b9-5fa08fe62bc1\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:51:11 crc kubenswrapper[4558]: I0120 17:51:11.754753 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d6q8k\" (UniqueName: \"kubernetes.io/projected/e029b16a-f02b-40b4-82b9-5fa08fe62bc1-kube-api-access-d6q8k\") pod \"nova-cell1-novncproxy-0\" (UID: \"e029b16a-f02b-40b4-82b9-5fa08fe62bc1\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:51:11 crc kubenswrapper[4558]: I0120 17:51:11.754937 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e029b16a-f02b-40b4-82b9-5fa08fe62bc1-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"e029b16a-f02b-40b4-82b9-5fa08fe62bc1\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:51:11 crc kubenswrapper[4558]: I0120 17:51:11.755370 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/e029b16a-f02b-40b4-82b9-5fa08fe62bc1-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"e029b16a-f02b-40b4-82b9-5fa08fe62bc1\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:51:11 crc kubenswrapper[4558]: I0120 17:51:11.755516 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/07e11378-90b7-4994-99fa-6e2d78b61f63-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:11 crc kubenswrapper[4558]: I0120 17:51:11.755545 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pq2q4\" (UniqueName: \"kubernetes.io/projected/07e11378-90b7-4994-99fa-6e2d78b61f63-kube-api-access-pq2q4\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:11 crc kubenswrapper[4558]: I0120 17:51:11.755559 4558 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/07e11378-90b7-4994-99fa-6e2d78b61f63-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:11 crc kubenswrapper[4558]: I0120 17:51:11.755570 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/07e11378-90b7-4994-99fa-6e2d78b61f63-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:11 crc kubenswrapper[4558]: I0120 17:51:11.755579 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/07e11378-90b7-4994-99fa-6e2d78b61f63-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:11 crc kubenswrapper[4558]: I0120 17:51:11.755588 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/07e11378-90b7-4994-99fa-6e2d78b61f63-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:11 crc kubenswrapper[4558]: I0120 17:51:11.755597 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/07e11378-90b7-4994-99fa-6e2d78b61f63-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:11 crc kubenswrapper[4558]: I0120 17:51:11.755626 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" " Jan 20 17:51:11 crc kubenswrapper[4558]: I0120 17:51:11.772135 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage09-crc" (UniqueName: "kubernetes.io/local-volume/local-storage09-crc") on node "crc" Jan 20 17:51:11 crc kubenswrapper[4558]: I0120 17:51:11.857480 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/e029b16a-f02b-40b4-82b9-5fa08fe62bc1-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"e029b16a-f02b-40b4-82b9-5fa08fe62bc1\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:51:11 crc kubenswrapper[4558]: I0120 17:51:11.857551 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e029b16a-f02b-40b4-82b9-5fa08fe62bc1-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"e029b16a-f02b-40b4-82b9-5fa08fe62bc1\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:51:11 crc kubenswrapper[4558]: I0120 17:51:11.857593 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/e029b16a-f02b-40b4-82b9-5fa08fe62bc1-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"e029b16a-f02b-40b4-82b9-5fa08fe62bc1\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:51:11 crc kubenswrapper[4558]: I0120 17:51:11.857657 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d6q8k\" (UniqueName: \"kubernetes.io/projected/e029b16a-f02b-40b4-82b9-5fa08fe62bc1-kube-api-access-d6q8k\") pod \"nova-cell1-novncproxy-0\" (UID: \"e029b16a-f02b-40b4-82b9-5fa08fe62bc1\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:51:11 crc kubenswrapper[4558]: I0120 17:51:11.857718 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e029b16a-f02b-40b4-82b9-5fa08fe62bc1-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"e029b16a-f02b-40b4-82b9-5fa08fe62bc1\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:51:11 crc kubenswrapper[4558]: I0120 17:51:11.857872 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:11 crc kubenswrapper[4558]: I0120 17:51:11.861814 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/e029b16a-f02b-40b4-82b9-5fa08fe62bc1-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"e029b16a-f02b-40b4-82b9-5fa08fe62bc1\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:51:11 crc kubenswrapper[4558]: I0120 17:51:11.862664 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e029b16a-f02b-40b4-82b9-5fa08fe62bc1-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"e029b16a-f02b-40b4-82b9-5fa08fe62bc1\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:51:11 crc kubenswrapper[4558]: I0120 17:51:11.863078 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/e029b16a-f02b-40b4-82b9-5fa08fe62bc1-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"e029b16a-f02b-40b4-82b9-5fa08fe62bc1\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:51:11 crc kubenswrapper[4558]: I0120 17:51:11.863526 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e029b16a-f02b-40b4-82b9-5fa08fe62bc1-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"e029b16a-f02b-40b4-82b9-5fa08fe62bc1\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:51:11 crc kubenswrapper[4558]: I0120 17:51:11.881785 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d6q8k\" (UniqueName: \"kubernetes.io/projected/e029b16a-f02b-40b4-82b9-5fa08fe62bc1-kube-api-access-d6q8k\") pod \"nova-cell1-novncproxy-0\" (UID: \"e029b16a-f02b-40b4-82b9-5fa08fe62bc1\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:51:11 crc kubenswrapper[4558]: I0120 17:51:11.894254 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:51:11 crc kubenswrapper[4558]: I0120 17:51:11.914061 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:51:11 crc kubenswrapper[4558]: I0120 17:51:11.921212 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:51:11 crc kubenswrapper[4558]: I0120 17:51:11.922811 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:51:11 crc kubenswrapper[4558]: I0120 17:51:11.930688 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-glance-default-public-svc" Jan 20 17:51:11 crc kubenswrapper[4558]: I0120 17:51:11.931047 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-default-external-config-data" Jan 20 17:51:11 crc kubenswrapper[4558]: I0120 17:51:11.933251 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:51:11 crc kubenswrapper[4558]: I0120 17:51:11.991912 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:51:12 crc kubenswrapper[4558]: I0120 17:51:12.064655 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/82d257ef-8ea2-4bb0-9472-22d7531eea07-config-data\") pod \"glance-default-external-api-0\" (UID: \"82d257ef-8ea2-4bb0-9472-22d7531eea07\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:51:12 crc kubenswrapper[4558]: I0120 17:51:12.064722 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/82d257ef-8ea2-4bb0-9472-22d7531eea07-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"82d257ef-8ea2-4bb0-9472-22d7531eea07\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:51:12 crc kubenswrapper[4558]: I0120 17:51:12.064771 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/82d257ef-8ea2-4bb0-9472-22d7531eea07-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"82d257ef-8ea2-4bb0-9472-22d7531eea07\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:51:12 crc kubenswrapper[4558]: I0120 17:51:12.064881 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/82d257ef-8ea2-4bb0-9472-22d7531eea07-logs\") pod \"glance-default-external-api-0\" (UID: \"82d257ef-8ea2-4bb0-9472-22d7531eea07\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:51:12 crc kubenswrapper[4558]: I0120 17:51:12.064919 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"82d257ef-8ea2-4bb0-9472-22d7531eea07\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:51:12 crc kubenswrapper[4558]: I0120 17:51:12.065137 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7lflm\" (UniqueName: \"kubernetes.io/projected/82d257ef-8ea2-4bb0-9472-22d7531eea07-kube-api-access-7lflm\") pod \"glance-default-external-api-0\" (UID: \"82d257ef-8ea2-4bb0-9472-22d7531eea07\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:51:12 crc kubenswrapper[4558]: I0120 17:51:12.065214 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/82d257ef-8ea2-4bb0-9472-22d7531eea07-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"82d257ef-8ea2-4bb0-9472-22d7531eea07\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:51:12 crc kubenswrapper[4558]: I0120 17:51:12.065264 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/82d257ef-8ea2-4bb0-9472-22d7531eea07-scripts\") pod \"glance-default-external-api-0\" (UID: \"82d257ef-8ea2-4bb0-9472-22d7531eea07\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:51:12 crc kubenswrapper[4558]: I0120 17:51:12.167055 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/82d257ef-8ea2-4bb0-9472-22d7531eea07-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"82d257ef-8ea2-4bb0-9472-22d7531eea07\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:51:12 crc kubenswrapper[4558]: I0120 17:51:12.167106 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/82d257ef-8ea2-4bb0-9472-22d7531eea07-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"82d257ef-8ea2-4bb0-9472-22d7531eea07\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:51:12 crc kubenswrapper[4558]: I0120 17:51:12.167158 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/82d257ef-8ea2-4bb0-9472-22d7531eea07-logs\") pod \"glance-default-external-api-0\" (UID: \"82d257ef-8ea2-4bb0-9472-22d7531eea07\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:51:12 crc kubenswrapper[4558]: I0120 17:51:12.167218 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"82d257ef-8ea2-4bb0-9472-22d7531eea07\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:51:12 crc kubenswrapper[4558]: I0120 17:51:12.167239 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7lflm\" (UniqueName: \"kubernetes.io/projected/82d257ef-8ea2-4bb0-9472-22d7531eea07-kube-api-access-7lflm\") pod \"glance-default-external-api-0\" (UID: \"82d257ef-8ea2-4bb0-9472-22d7531eea07\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:51:12 crc kubenswrapper[4558]: I0120 17:51:12.167264 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/82d257ef-8ea2-4bb0-9472-22d7531eea07-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"82d257ef-8ea2-4bb0-9472-22d7531eea07\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:51:12 crc kubenswrapper[4558]: I0120 17:51:12.167310 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/82d257ef-8ea2-4bb0-9472-22d7531eea07-scripts\") pod \"glance-default-external-api-0\" (UID: \"82d257ef-8ea2-4bb0-9472-22d7531eea07\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:51:12 crc kubenswrapper[4558]: I0120 17:51:12.167362 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/82d257ef-8ea2-4bb0-9472-22d7531eea07-config-data\") pod \"glance-default-external-api-0\" (UID: \"82d257ef-8ea2-4bb0-9472-22d7531eea07\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:51:12 crc kubenswrapper[4558]: I0120 17:51:12.167642 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"82d257ef-8ea2-4bb0-9472-22d7531eea07\") device mount path \"/mnt/openstack/pv09\"" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:51:12 crc kubenswrapper[4558]: I0120 17:51:12.167857 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/82d257ef-8ea2-4bb0-9472-22d7531eea07-logs\") pod \"glance-default-external-api-0\" (UID: \"82d257ef-8ea2-4bb0-9472-22d7531eea07\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:51:12 crc kubenswrapper[4558]: I0120 17:51:12.169085 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/82d257ef-8ea2-4bb0-9472-22d7531eea07-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"82d257ef-8ea2-4bb0-9472-22d7531eea07\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:51:12 crc kubenswrapper[4558]: I0120 17:51:12.173102 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/82d257ef-8ea2-4bb0-9472-22d7531eea07-scripts\") pod \"glance-default-external-api-0\" (UID: \"82d257ef-8ea2-4bb0-9472-22d7531eea07\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:51:12 crc kubenswrapper[4558]: I0120 17:51:12.179873 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/82d257ef-8ea2-4bb0-9472-22d7531eea07-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"82d257ef-8ea2-4bb0-9472-22d7531eea07\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:51:12 crc kubenswrapper[4558]: I0120 17:51:12.182051 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/82d257ef-8ea2-4bb0-9472-22d7531eea07-config-data\") pod \"glance-default-external-api-0\" (UID: \"82d257ef-8ea2-4bb0-9472-22d7531eea07\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:51:12 crc kubenswrapper[4558]: I0120 17:51:12.182588 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/82d257ef-8ea2-4bb0-9472-22d7531eea07-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"82d257ef-8ea2-4bb0-9472-22d7531eea07\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:51:12 crc kubenswrapper[4558]: I0120 17:51:12.192866 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7lflm\" (UniqueName: \"kubernetes.io/projected/82d257ef-8ea2-4bb0-9472-22d7531eea07-kube-api-access-7lflm\") pod \"glance-default-external-api-0\" (UID: \"82d257ef-8ea2-4bb0-9472-22d7531eea07\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:51:12 crc kubenswrapper[4558]: I0120 17:51:12.231278 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"82d257ef-8ea2-4bb0-9472-22d7531eea07\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:51:12 crc kubenswrapper[4558]: I0120 17:51:12.256482 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:51:12 crc kubenswrapper[4558]: I0120 17:51:12.262040 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:51:12 crc kubenswrapper[4558]: I0120 17:51:12.462388 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:51:12 crc kubenswrapper[4558]: I0120 17:51:12.462686 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:51:12 crc kubenswrapper[4558]: I0120 17:51:12.462698 4558 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:51:12 crc kubenswrapper[4558]: I0120 17:51:12.463632 4558 scope.go:117] "RemoveContainer" containerID="1070e671e60499543eff1520064c8d7b1151ff81f9de7c5dd18ebcbdbdbbe487" Jan 20 17:51:12 crc kubenswrapper[4558]: E0120 17:51:12.463984 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"nova-scheduler-scheduler\" with CrashLoopBackOff: \"back-off 10s restarting failed container=nova-scheduler-scheduler pod=nova-scheduler-0_openstack-kuttl-tests(d9321ea5-ab3b-4f00-b4d0-f2c2f8c2deb2)\"" pod="openstack-kuttl-tests/nova-scheduler-0" podUID="d9321ea5-ab3b-4f00-b4d0-f2c2f8c2deb2" Jan 20 17:51:12 crc kubenswrapper[4558]: I0120 17:51:12.583076 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="07e11378-90b7-4994-99fa-6e2d78b61f63" path="/var/lib/kubelet/pods/07e11378-90b7-4994-99fa-6e2d78b61f63/volumes" Jan 20 17:51:12 crc kubenswrapper[4558]: I0120 17:51:12.584069 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="891838f3-4f66-4fe0-85c0-6a538dfbe364" path="/var/lib/kubelet/pods/891838f3-4f66-4fe0-85c0-6a538dfbe364/volumes" Jan 20 17:51:12 crc kubenswrapper[4558]: I0120 17:51:12.584985 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"d73e13db-6a2c-4205-aabc-1c7671e07895","Type":"ContainerStarted","Data":"74481199f7317052e5bb7b801f2c439c05786dab675a519ef2eba820484705a5"} Jan 20 17:51:12 crc kubenswrapper[4558]: I0120 17:51:12.585042 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-7cd9889bc6-72r2h" event={"ID":"c1e7ea27-3179-4c51-a9d9-2422702f08fa","Type":"ContainerStarted","Data":"db761370e2918d26be14f649374ac37f0e12f0edc29e9d9fbdd1676c2fcd0724"} Jan 20 17:51:12 crc kubenswrapper[4558]: I0120 17:51:12.585064 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/barbican-api-7cd9889bc6-72r2h" Jan 20 17:51:12 crc kubenswrapper[4558]: I0120 17:51:12.585078 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/barbican-api-7cd9889bc6-72r2h" Jan 20 17:51:12 crc kubenswrapper[4558]: I0120 17:51:12.585088 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-7cd9889bc6-72r2h" event={"ID":"c1e7ea27-3179-4c51-a9d9-2422702f08fa","Type":"ContainerStarted","Data":"9c6a604544c830e2edb70b9f7c0f4ad6688b94a6a3eaabf65c2198c2e9f86440"} Jan 20 17:51:12 crc kubenswrapper[4558]: I0120 17:51:12.585099 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"e029b16a-f02b-40b4-82b9-5fa08fe62bc1","Type":"ContainerStarted","Data":"71ba9af23a1c5424b7ecc46753bf8d593f4b1658ef2cbde792f66ad414383b91"} Jan 20 17:51:12 crc kubenswrapper[4558]: I0120 17:51:12.585113 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"e029b16a-f02b-40b4-82b9-5fa08fe62bc1","Type":"ContainerStarted","Data":"4f2b23c2454036de8491db24c5d242ec8e092c442a06ff5758ab7788c4eae2a3"} Jan 20 17:51:12 crc kubenswrapper[4558]: I0120 17:51:12.605790 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/glance-default-internal-api-0" podStartSLOduration=3.6057586820000003 podStartE2EDuration="3.605758682s" podCreationTimestamp="2026-01-20 17:51:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:51:12.595963705 +0000 UTC m=+4166.356301671" watchObservedRunningTime="2026-01-20 17:51:12.605758682 +0000 UTC m=+4166.366096649" Jan 20 17:51:12 crc kubenswrapper[4558]: I0120 17:51:12.615579 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" podStartSLOduration=1.615561836 podStartE2EDuration="1.615561836s" podCreationTimestamp="2026-01-20 17:51:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:51:12.613912865 +0000 UTC m=+4166.374250832" watchObservedRunningTime="2026-01-20 17:51:12.615561836 +0000 UTC m=+4166.375899802" Jan 20 17:51:12 crc kubenswrapper[4558]: I0120 17:51:12.636687 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/barbican-api-7cd9889bc6-72r2h" podStartSLOduration=2.636666762 podStartE2EDuration="2.636666762s" podCreationTimestamp="2026-01-20 17:51:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:51:12.629814396 +0000 UTC m=+4166.390152352" watchObservedRunningTime="2026-01-20 17:51:12.636666762 +0000 UTC m=+4166.397004728" Jan 20 17:51:12 crc kubenswrapper[4558]: I0120 17:51:12.710700 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:51:13 crc kubenswrapper[4558]: I0120 17:51:13.604869 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"82d257ef-8ea2-4bb0-9472-22d7531eea07","Type":"ContainerStarted","Data":"bef3d722731abd0d7f07da2d2d4e14517361d6bdfc566ecc88b996f262b48dc4"} Jan 20 17:51:13 crc kubenswrapper[4558]: I0120 17:51:13.605441 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"82d257ef-8ea2-4bb0-9472-22d7531eea07","Type":"ContainerStarted","Data":"c1099e7e0b56f6957d2dba5ba731274d5416bf0b20483c8ef7ee43b382df1d66"} Jan 20 17:51:14 crc kubenswrapper[4558]: I0120 17:51:14.392684 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/placement-764f5f95dd-qwm2f" Jan 20 17:51:14 crc kubenswrapper[4558]: I0120 17:51:14.543594 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/cinder-api-0" podUID="01835185-5ca5-467a-9362-b8de3e8665ed" containerName="cinder-api" probeResult="failure" output="Get \"https://10.217.1.210:8776/healthcheck\": context deadline exceeded" Jan 20 17:51:14 crc kubenswrapper[4558]: I0120 17:51:14.546988 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/placement-764f5f95dd-qwm2f" Jan 20 17:51:14 crc kubenswrapper[4558]: I0120 17:51:14.606330 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/placement-59975f6898-zxn8m"] Jan 20 17:51:14 crc kubenswrapper[4558]: I0120 17:51:14.606561 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/placement-59975f6898-zxn8m" podUID="a4f41b33-4de3-4a87-96ac-f7ea7bb1aac2" containerName="placement-log" containerID="cri-o://845fa35e74757f51248608df722fce7c1028166d79229d0cc2abebe77b4abcbb" gracePeriod=30 Jan 20 17:51:14 crc kubenswrapper[4558]: I0120 17:51:14.606950 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/placement-59975f6898-zxn8m" podUID="a4f41b33-4de3-4a87-96ac-f7ea7bb1aac2" containerName="placement-api" containerID="cri-o://c423e5ba761c266b37e8a9914f09c46db8c16aa75089222ed91b3723dd5c4ca5" gracePeriod=30 Jan 20 17:51:14 crc kubenswrapper[4558]: I0120 17:51:14.629329 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"82d257ef-8ea2-4bb0-9472-22d7531eea07","Type":"ContainerStarted","Data":"3c6f9efef9d7e4583e21b7502bc31ea663e343c528df9cc67e2e58c3e8ff34fb"} Jan 20 17:51:14 crc kubenswrapper[4558]: I0120 17:51:14.922238 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/barbican-api-6fb4445c46-pmjxt" podUID="b2ed54c7-3249-4a62-8d98-21618cb6dab6" containerName="barbican-api" probeResult="failure" output="Get \"https://10.217.1.212:9311/healthcheck\": read tcp 10.217.0.2:42618->10.217.1.212:9311: read: connection reset by peer" Jan 20 17:51:14 crc kubenswrapper[4558]: I0120 17:51:14.922243 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/barbican-api-6fb4445c46-pmjxt" podUID="b2ed54c7-3249-4a62-8d98-21618cb6dab6" containerName="barbican-api-log" probeResult="failure" output="Get \"https://10.217.1.212:9311/healthcheck\": read tcp 10.217.0.2:42612->10.217.1.212:9311: read: connection reset by peer" Jan 20 17:51:14 crc kubenswrapper[4558]: I0120 17:51:14.922664 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/barbican-api-6fb4445c46-pmjxt" podUID="b2ed54c7-3249-4a62-8d98-21618cb6dab6" containerName="barbican-api" probeResult="failure" output="Get \"https://10.217.1.212:9311/healthcheck\": dial tcp 10.217.1.212:9311: connect: connection refused" Jan 20 17:51:14 crc kubenswrapper[4558]: I0120 17:51:14.922788 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/barbican-api-6fb4445c46-pmjxt" podUID="b2ed54c7-3249-4a62-8d98-21618cb6dab6" containerName="barbican-api-log" probeResult="failure" output="Get \"https://10.217.1.212:9311/healthcheck\": dial tcp 10.217.1.212:9311: connect: connection refused" Jan 20 17:51:15 crc kubenswrapper[4558]: I0120 17:51:15.345900 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-6fb4445c46-pmjxt" Jan 20 17:51:15 crc kubenswrapper[4558]: I0120 17:51:15.363074 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/glance-default-external-api-0" podStartSLOduration=4.363055549 podStartE2EDuration="4.363055549s" podCreationTimestamp="2026-01-20 17:51:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:51:14.648355083 +0000 UTC m=+4168.408693060" watchObservedRunningTime="2026-01-20 17:51:15.363055549 +0000 UTC m=+4169.123393516" Jan 20 17:51:15 crc kubenswrapper[4558]: I0120 17:51:15.365099 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/cinder-api-0" podUID="01835185-5ca5-467a-9362-b8de3e8665ed" containerName="cinder-api" probeResult="failure" output="Get \"https://10.217.1.210:8776/healthcheck\": read tcp 10.217.0.2:37664->10.217.1.210:8776: read: connection reset by peer" Jan 20 17:51:15 crc kubenswrapper[4558]: I0120 17:51:15.461104 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b2ed54c7-3249-4a62-8d98-21618cb6dab6-public-tls-certs\") pod \"b2ed54c7-3249-4a62-8d98-21618cb6dab6\" (UID: \"b2ed54c7-3249-4a62-8d98-21618cb6dab6\") " Jan 20 17:51:15 crc kubenswrapper[4558]: I0120 17:51:15.461279 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b2ed54c7-3249-4a62-8d98-21618cb6dab6-internal-tls-certs\") pod \"b2ed54c7-3249-4a62-8d98-21618cb6dab6\" (UID: \"b2ed54c7-3249-4a62-8d98-21618cb6dab6\") " Jan 20 17:51:15 crc kubenswrapper[4558]: I0120 17:51:15.461323 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b2ed54c7-3249-4a62-8d98-21618cb6dab6-combined-ca-bundle\") pod \"b2ed54c7-3249-4a62-8d98-21618cb6dab6\" (UID: \"b2ed54c7-3249-4a62-8d98-21618cb6dab6\") " Jan 20 17:51:15 crc kubenswrapper[4558]: I0120 17:51:15.461361 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b2ed54c7-3249-4a62-8d98-21618cb6dab6-config-data\") pod \"b2ed54c7-3249-4a62-8d98-21618cb6dab6\" (UID: \"b2ed54c7-3249-4a62-8d98-21618cb6dab6\") " Jan 20 17:51:15 crc kubenswrapper[4558]: I0120 17:51:15.461431 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b2ed54c7-3249-4a62-8d98-21618cb6dab6-config-data-custom\") pod \"b2ed54c7-3249-4a62-8d98-21618cb6dab6\" (UID: \"b2ed54c7-3249-4a62-8d98-21618cb6dab6\") " Jan 20 17:51:15 crc kubenswrapper[4558]: I0120 17:51:15.461477 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b2ed54c7-3249-4a62-8d98-21618cb6dab6-logs\") pod \"b2ed54c7-3249-4a62-8d98-21618cb6dab6\" (UID: \"b2ed54c7-3249-4a62-8d98-21618cb6dab6\") " Jan 20 17:51:15 crc kubenswrapper[4558]: I0120 17:51:15.461592 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l95vj\" (UniqueName: \"kubernetes.io/projected/b2ed54c7-3249-4a62-8d98-21618cb6dab6-kube-api-access-l95vj\") pod \"b2ed54c7-3249-4a62-8d98-21618cb6dab6\" (UID: \"b2ed54c7-3249-4a62-8d98-21618cb6dab6\") " Jan 20 17:51:15 crc kubenswrapper[4558]: I0120 17:51:15.465543 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b2ed54c7-3249-4a62-8d98-21618cb6dab6-logs" (OuterVolumeSpecName: "logs") pod "b2ed54c7-3249-4a62-8d98-21618cb6dab6" (UID: "b2ed54c7-3249-4a62-8d98-21618cb6dab6"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:51:15 crc kubenswrapper[4558]: I0120 17:51:15.467439 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b2ed54c7-3249-4a62-8d98-21618cb6dab6-kube-api-access-l95vj" (OuterVolumeSpecName: "kube-api-access-l95vj") pod "b2ed54c7-3249-4a62-8d98-21618cb6dab6" (UID: "b2ed54c7-3249-4a62-8d98-21618cb6dab6"). InnerVolumeSpecName "kube-api-access-l95vj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:51:15 crc kubenswrapper[4558]: I0120 17:51:15.468304 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b2ed54c7-3249-4a62-8d98-21618cb6dab6-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "b2ed54c7-3249-4a62-8d98-21618cb6dab6" (UID: "b2ed54c7-3249-4a62-8d98-21618cb6dab6"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:51:15 crc kubenswrapper[4558]: I0120 17:51:15.487974 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b2ed54c7-3249-4a62-8d98-21618cb6dab6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b2ed54c7-3249-4a62-8d98-21618cb6dab6" (UID: "b2ed54c7-3249-4a62-8d98-21618cb6dab6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:51:15 crc kubenswrapper[4558]: I0120 17:51:15.514001 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b2ed54c7-3249-4a62-8d98-21618cb6dab6-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "b2ed54c7-3249-4a62-8d98-21618cb6dab6" (UID: "b2ed54c7-3249-4a62-8d98-21618cb6dab6"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:51:15 crc kubenswrapper[4558]: I0120 17:51:15.520473 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b2ed54c7-3249-4a62-8d98-21618cb6dab6-config-data" (OuterVolumeSpecName: "config-data") pod "b2ed54c7-3249-4a62-8d98-21618cb6dab6" (UID: "b2ed54c7-3249-4a62-8d98-21618cb6dab6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:51:15 crc kubenswrapper[4558]: I0120 17:51:15.528521 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b2ed54c7-3249-4a62-8d98-21618cb6dab6-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "b2ed54c7-3249-4a62-8d98-21618cb6dab6" (UID: "b2ed54c7-3249-4a62-8d98-21618cb6dab6"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:51:15 crc kubenswrapper[4558]: I0120 17:51:15.567729 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l95vj\" (UniqueName: \"kubernetes.io/projected/b2ed54c7-3249-4a62-8d98-21618cb6dab6-kube-api-access-l95vj\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:15 crc kubenswrapper[4558]: I0120 17:51:15.568506 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b2ed54c7-3249-4a62-8d98-21618cb6dab6-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:15 crc kubenswrapper[4558]: I0120 17:51:15.568549 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b2ed54c7-3249-4a62-8d98-21618cb6dab6-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:15 crc kubenswrapper[4558]: I0120 17:51:15.568565 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b2ed54c7-3249-4a62-8d98-21618cb6dab6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:15 crc kubenswrapper[4558]: I0120 17:51:15.568579 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b2ed54c7-3249-4a62-8d98-21618cb6dab6-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:15 crc kubenswrapper[4558]: I0120 17:51:15.568590 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b2ed54c7-3249-4a62-8d98-21618cb6dab6-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:15 crc kubenswrapper[4558]: I0120 17:51:15.568602 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b2ed54c7-3249-4a62-8d98-21618cb6dab6-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:15 crc kubenswrapper[4558]: I0120 17:51:15.646445 4558 generic.go:334] "Generic (PLEG): container finished" podID="b2ed54c7-3249-4a62-8d98-21618cb6dab6" containerID="913497c86e1fd7122fec98b093c66895e1a8361995489d6c8fda3deef690c7ef" exitCode=0 Jan 20 17:51:15 crc kubenswrapper[4558]: I0120 17:51:15.646505 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-6fb4445c46-pmjxt" Jan 20 17:51:15 crc kubenswrapper[4558]: I0120 17:51:15.646497 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-6fb4445c46-pmjxt" event={"ID":"b2ed54c7-3249-4a62-8d98-21618cb6dab6","Type":"ContainerDied","Data":"913497c86e1fd7122fec98b093c66895e1a8361995489d6c8fda3deef690c7ef"} Jan 20 17:51:15 crc kubenswrapper[4558]: I0120 17:51:15.646661 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-6fb4445c46-pmjxt" event={"ID":"b2ed54c7-3249-4a62-8d98-21618cb6dab6","Type":"ContainerDied","Data":"584e3effde7d8b0dcf01a557c94e94e490024c54d4addf894cbea42208822cb1"} Jan 20 17:51:15 crc kubenswrapper[4558]: I0120 17:51:15.646700 4558 scope.go:117] "RemoveContainer" containerID="913497c86e1fd7122fec98b093c66895e1a8361995489d6c8fda3deef690c7ef" Jan 20 17:51:15 crc kubenswrapper[4558]: I0120 17:51:15.649466 4558 generic.go:334] "Generic (PLEG): container finished" podID="a4f41b33-4de3-4a87-96ac-f7ea7bb1aac2" containerID="845fa35e74757f51248608df722fce7c1028166d79229d0cc2abebe77b4abcbb" exitCode=143 Jan 20 17:51:15 crc kubenswrapper[4558]: I0120 17:51:15.649532 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-59975f6898-zxn8m" event={"ID":"a4f41b33-4de3-4a87-96ac-f7ea7bb1aac2","Type":"ContainerDied","Data":"845fa35e74757f51248608df722fce7c1028166d79229d0cc2abebe77b4abcbb"} Jan 20 17:51:15 crc kubenswrapper[4558]: I0120 17:51:15.652370 4558 generic.go:334] "Generic (PLEG): container finished" podID="01835185-5ca5-467a-9362-b8de3e8665ed" containerID="a8d84750152ad0dd7ec1ee98d8c4f4d1566801adb750bbf52fcbf29bfac3fcd6" exitCode=0 Jan 20 17:51:15 crc kubenswrapper[4558]: I0120 17:51:15.652566 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"01835185-5ca5-467a-9362-b8de3e8665ed","Type":"ContainerDied","Data":"a8d84750152ad0dd7ec1ee98d8c4f4d1566801adb750bbf52fcbf29bfac3fcd6"} Jan 20 17:51:15 crc kubenswrapper[4558]: I0120 17:51:15.676833 4558 scope.go:117] "RemoveContainer" containerID="b9249d1892527138cc235d58186150d42903080a9231b4c9631a998629089336" Jan 20 17:51:15 crc kubenswrapper[4558]: I0120 17:51:15.679210 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-api-6fb4445c46-pmjxt"] Jan 20 17:51:15 crc kubenswrapper[4558]: I0120 17:51:15.690505 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/barbican-api-6fb4445c46-pmjxt"] Jan 20 17:51:15 crc kubenswrapper[4558]: I0120 17:51:15.690965 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:51:15 crc kubenswrapper[4558]: I0120 17:51:15.702198 4558 scope.go:117] "RemoveContainer" containerID="913497c86e1fd7122fec98b093c66895e1a8361995489d6c8fda3deef690c7ef" Jan 20 17:51:15 crc kubenswrapper[4558]: E0120 17:51:15.704384 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"913497c86e1fd7122fec98b093c66895e1a8361995489d6c8fda3deef690c7ef\": container with ID starting with 913497c86e1fd7122fec98b093c66895e1a8361995489d6c8fda3deef690c7ef not found: ID does not exist" containerID="913497c86e1fd7122fec98b093c66895e1a8361995489d6c8fda3deef690c7ef" Jan 20 17:51:15 crc kubenswrapper[4558]: I0120 17:51:15.704437 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"913497c86e1fd7122fec98b093c66895e1a8361995489d6c8fda3deef690c7ef"} err="failed to get container status \"913497c86e1fd7122fec98b093c66895e1a8361995489d6c8fda3deef690c7ef\": rpc error: code = NotFound desc = could not find container \"913497c86e1fd7122fec98b093c66895e1a8361995489d6c8fda3deef690c7ef\": container with ID starting with 913497c86e1fd7122fec98b093c66895e1a8361995489d6c8fda3deef690c7ef not found: ID does not exist" Jan 20 17:51:15 crc kubenswrapper[4558]: I0120 17:51:15.704470 4558 scope.go:117] "RemoveContainer" containerID="b9249d1892527138cc235d58186150d42903080a9231b4c9631a998629089336" Jan 20 17:51:15 crc kubenswrapper[4558]: E0120 17:51:15.704978 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b9249d1892527138cc235d58186150d42903080a9231b4c9631a998629089336\": container with ID starting with b9249d1892527138cc235d58186150d42903080a9231b4c9631a998629089336 not found: ID does not exist" containerID="b9249d1892527138cc235d58186150d42903080a9231b4c9631a998629089336" Jan 20 17:51:15 crc kubenswrapper[4558]: I0120 17:51:15.705045 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b9249d1892527138cc235d58186150d42903080a9231b4c9631a998629089336"} err="failed to get container status \"b9249d1892527138cc235d58186150d42903080a9231b4c9631a998629089336\": rpc error: code = NotFound desc = could not find container \"b9249d1892527138cc235d58186150d42903080a9231b4c9631a998629089336\": container with ID starting with b9249d1892527138cc235d58186150d42903080a9231b4c9631a998629089336 not found: ID does not exist" Jan 20 17:51:15 crc kubenswrapper[4558]: I0120 17:51:15.774143 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/01835185-5ca5-467a-9362-b8de3e8665ed-public-tls-certs\") pod \"01835185-5ca5-467a-9362-b8de3e8665ed\" (UID: \"01835185-5ca5-467a-9362-b8de3e8665ed\") " Jan 20 17:51:15 crc kubenswrapper[4558]: I0120 17:51:15.774191 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/01835185-5ca5-467a-9362-b8de3e8665ed-scripts\") pod \"01835185-5ca5-467a-9362-b8de3e8665ed\" (UID: \"01835185-5ca5-467a-9362-b8de3e8665ed\") " Jan 20 17:51:15 crc kubenswrapper[4558]: I0120 17:51:15.774329 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/01835185-5ca5-467a-9362-b8de3e8665ed-combined-ca-bundle\") pod \"01835185-5ca5-467a-9362-b8de3e8665ed\" (UID: \"01835185-5ca5-467a-9362-b8de3e8665ed\") " Jan 20 17:51:15 crc kubenswrapper[4558]: I0120 17:51:15.774372 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/01835185-5ca5-467a-9362-b8de3e8665ed-internal-tls-certs\") pod \"01835185-5ca5-467a-9362-b8de3e8665ed\" (UID: \"01835185-5ca5-467a-9362-b8de3e8665ed\") " Jan 20 17:51:15 crc kubenswrapper[4558]: I0120 17:51:15.774425 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/01835185-5ca5-467a-9362-b8de3e8665ed-etc-machine-id\") pod \"01835185-5ca5-467a-9362-b8de3e8665ed\" (UID: \"01835185-5ca5-467a-9362-b8de3e8665ed\") " Jan 20 17:51:15 crc kubenswrapper[4558]: I0120 17:51:15.774453 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/01835185-5ca5-467a-9362-b8de3e8665ed-logs\") pod \"01835185-5ca5-467a-9362-b8de3e8665ed\" (UID: \"01835185-5ca5-467a-9362-b8de3e8665ed\") " Jan 20 17:51:15 crc kubenswrapper[4558]: I0120 17:51:15.774468 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/01835185-5ca5-467a-9362-b8de3e8665ed-config-data-custom\") pod \"01835185-5ca5-467a-9362-b8de3e8665ed\" (UID: \"01835185-5ca5-467a-9362-b8de3e8665ed\") " Jan 20 17:51:15 crc kubenswrapper[4558]: I0120 17:51:15.774487 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/01835185-5ca5-467a-9362-b8de3e8665ed-config-data\") pod \"01835185-5ca5-467a-9362-b8de3e8665ed\" (UID: \"01835185-5ca5-467a-9362-b8de3e8665ed\") " Jan 20 17:51:15 crc kubenswrapper[4558]: I0120 17:51:15.774540 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q6cnv\" (UniqueName: \"kubernetes.io/projected/01835185-5ca5-467a-9362-b8de3e8665ed-kube-api-access-q6cnv\") pod \"01835185-5ca5-467a-9362-b8de3e8665ed\" (UID: \"01835185-5ca5-467a-9362-b8de3e8665ed\") " Jan 20 17:51:15 crc kubenswrapper[4558]: I0120 17:51:15.775535 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/01835185-5ca5-467a-9362-b8de3e8665ed-logs" (OuterVolumeSpecName: "logs") pod "01835185-5ca5-467a-9362-b8de3e8665ed" (UID: "01835185-5ca5-467a-9362-b8de3e8665ed"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:51:15 crc kubenswrapper[4558]: I0120 17:51:15.775603 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/01835185-5ca5-467a-9362-b8de3e8665ed-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "01835185-5ca5-467a-9362-b8de3e8665ed" (UID: "01835185-5ca5-467a-9362-b8de3e8665ed"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 17:51:15 crc kubenswrapper[4558]: I0120 17:51:15.780421 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01835185-5ca5-467a-9362-b8de3e8665ed-scripts" (OuterVolumeSpecName: "scripts") pod "01835185-5ca5-467a-9362-b8de3e8665ed" (UID: "01835185-5ca5-467a-9362-b8de3e8665ed"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:51:15 crc kubenswrapper[4558]: I0120 17:51:15.781693 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01835185-5ca5-467a-9362-b8de3e8665ed-kube-api-access-q6cnv" (OuterVolumeSpecName: "kube-api-access-q6cnv") pod "01835185-5ca5-467a-9362-b8de3e8665ed" (UID: "01835185-5ca5-467a-9362-b8de3e8665ed"). InnerVolumeSpecName "kube-api-access-q6cnv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:51:15 crc kubenswrapper[4558]: I0120 17:51:15.782243 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01835185-5ca5-467a-9362-b8de3e8665ed-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "01835185-5ca5-467a-9362-b8de3e8665ed" (UID: "01835185-5ca5-467a-9362-b8de3e8665ed"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:51:15 crc kubenswrapper[4558]: I0120 17:51:15.805892 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01835185-5ca5-467a-9362-b8de3e8665ed-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "01835185-5ca5-467a-9362-b8de3e8665ed" (UID: "01835185-5ca5-467a-9362-b8de3e8665ed"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:51:15 crc kubenswrapper[4558]: I0120 17:51:15.834862 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01835185-5ca5-467a-9362-b8de3e8665ed-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "01835185-5ca5-467a-9362-b8de3e8665ed" (UID: "01835185-5ca5-467a-9362-b8de3e8665ed"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:51:15 crc kubenswrapper[4558]: I0120 17:51:15.835681 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01835185-5ca5-467a-9362-b8de3e8665ed-config-data" (OuterVolumeSpecName: "config-data") pod "01835185-5ca5-467a-9362-b8de3e8665ed" (UID: "01835185-5ca5-467a-9362-b8de3e8665ed"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:51:15 crc kubenswrapper[4558]: I0120 17:51:15.844644 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01835185-5ca5-467a-9362-b8de3e8665ed-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "01835185-5ca5-467a-9362-b8de3e8665ed" (UID: "01835185-5ca5-467a-9362-b8de3e8665ed"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:51:15 crc kubenswrapper[4558]: I0120 17:51:15.877418 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/01835185-5ca5-467a-9362-b8de3e8665ed-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:15 crc kubenswrapper[4558]: I0120 17:51:15.877458 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/01835185-5ca5-467a-9362-b8de3e8665ed-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:15 crc kubenswrapper[4558]: I0120 17:51:15.877472 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/01835185-5ca5-467a-9362-b8de3e8665ed-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:15 crc kubenswrapper[4558]: I0120 17:51:15.877485 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/01835185-5ca5-467a-9362-b8de3e8665ed-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:15 crc kubenswrapper[4558]: I0120 17:51:15.877496 4558 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/01835185-5ca5-467a-9362-b8de3e8665ed-etc-machine-id\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:15 crc kubenswrapper[4558]: I0120 17:51:15.877507 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/01835185-5ca5-467a-9362-b8de3e8665ed-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:15 crc kubenswrapper[4558]: I0120 17:51:15.877516 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/01835185-5ca5-467a-9362-b8de3e8665ed-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:15 crc kubenswrapper[4558]: I0120 17:51:15.877525 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/01835185-5ca5-467a-9362-b8de3e8665ed-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:15 crc kubenswrapper[4558]: I0120 17:51:15.877538 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q6cnv\" (UniqueName: \"kubernetes.io/projected/01835185-5ca5-467a-9362-b8de3e8665ed-kube-api-access-q6cnv\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:16 crc kubenswrapper[4558]: I0120 17:51:16.208714 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:51:16 crc kubenswrapper[4558]: I0120 17:51:16.258667 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:51:16 crc kubenswrapper[4558]: I0120 17:51:16.259012 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-internal-api-0" podUID="d73e13db-6a2c-4205-aabc-1c7671e07895" containerName="glance-log" containerID="cri-o://38a50cd3fde0de52b7453bb0226b6b2af8717868a6f1905514fcfb820cdea7f5" gracePeriod=30 Jan 20 17:51:16 crc kubenswrapper[4558]: I0120 17:51:16.259437 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-internal-api-0" podUID="d73e13db-6a2c-4205-aabc-1c7671e07895" containerName="glance-httpd" containerID="cri-o://74481199f7317052e5bb7b801f2c439c05786dab675a519ef2eba820484705a5" gracePeriod=30 Jan 20 17:51:16 crc kubenswrapper[4558]: I0120 17:51:16.576762 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b2ed54c7-3249-4a62-8d98-21618cb6dab6" path="/var/lib/kubelet/pods/b2ed54c7-3249-4a62-8d98-21618cb6dab6/volumes" Jan 20 17:51:16 crc kubenswrapper[4558]: I0120 17:51:16.667729 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"01835185-5ca5-467a-9362-b8de3e8665ed","Type":"ContainerDied","Data":"231ea0e429a19476119c68f59681f7d882c14f24b98fab5408725cdb9668b576"} Jan 20 17:51:16 crc kubenswrapper[4558]: I0120 17:51:16.667815 4558 scope.go:117] "RemoveContainer" containerID="a8d84750152ad0dd7ec1ee98d8c4f4d1566801adb750bbf52fcbf29bfac3fcd6" Jan 20 17:51:16 crc kubenswrapper[4558]: I0120 17:51:16.667837 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:51:16 crc kubenswrapper[4558]: I0120 17:51:16.675380 4558 generic.go:334] "Generic (PLEG): container finished" podID="d73e13db-6a2c-4205-aabc-1c7671e07895" containerID="74481199f7317052e5bb7b801f2c439c05786dab675a519ef2eba820484705a5" exitCode=0 Jan 20 17:51:16 crc kubenswrapper[4558]: I0120 17:51:16.675438 4558 generic.go:334] "Generic (PLEG): container finished" podID="d73e13db-6a2c-4205-aabc-1c7671e07895" containerID="38a50cd3fde0de52b7453bb0226b6b2af8717868a6f1905514fcfb820cdea7f5" exitCode=143 Jan 20 17:51:16 crc kubenswrapper[4558]: I0120 17:51:16.675474 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"d73e13db-6a2c-4205-aabc-1c7671e07895","Type":"ContainerDied","Data":"74481199f7317052e5bb7b801f2c439c05786dab675a519ef2eba820484705a5"} Jan 20 17:51:16 crc kubenswrapper[4558]: I0120 17:51:16.675647 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"d73e13db-6a2c-4205-aabc-1c7671e07895","Type":"ContainerDied","Data":"38a50cd3fde0de52b7453bb0226b6b2af8717868a6f1905514fcfb820cdea7f5"} Jan 20 17:51:16 crc kubenswrapper[4558]: I0120 17:51:16.675750 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-external-api-0" podUID="82d257ef-8ea2-4bb0-9472-22d7531eea07" containerName="glance-log" containerID="cri-o://bef3d722731abd0d7f07da2d2d4e14517361d6bdfc566ecc88b996f262b48dc4" gracePeriod=30 Jan 20 17:51:16 crc kubenswrapper[4558]: I0120 17:51:16.675807 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-external-api-0" podUID="82d257ef-8ea2-4bb0-9472-22d7531eea07" containerName="glance-httpd" containerID="cri-o://3c6f9efef9d7e4583e21b7502bc31ea663e343c528df9cc67e2e58c3e8ff34fb" gracePeriod=30 Jan 20 17:51:16 crc kubenswrapper[4558]: I0120 17:51:16.698863 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:51:16 crc kubenswrapper[4558]: I0120 17:51:16.714065 4558 scope.go:117] "RemoveContainer" containerID="d6739fd95d667740a4669ec34550d5b750a50b385b78c2f22b17ab41a028c741" Jan 20 17:51:16 crc kubenswrapper[4558]: I0120 17:51:16.715211 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:51:16 crc kubenswrapper[4558]: I0120 17:51:16.722835 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:51:16 crc kubenswrapper[4558]: E0120 17:51:16.723314 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b2ed54c7-3249-4a62-8d98-21618cb6dab6" containerName="barbican-api" Jan 20 17:51:16 crc kubenswrapper[4558]: I0120 17:51:16.723328 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="b2ed54c7-3249-4a62-8d98-21618cb6dab6" containerName="barbican-api" Jan 20 17:51:16 crc kubenswrapper[4558]: E0120 17:51:16.723375 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="01835185-5ca5-467a-9362-b8de3e8665ed" containerName="cinder-api" Jan 20 17:51:16 crc kubenswrapper[4558]: I0120 17:51:16.723380 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="01835185-5ca5-467a-9362-b8de3e8665ed" containerName="cinder-api" Jan 20 17:51:16 crc kubenswrapper[4558]: E0120 17:51:16.723395 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b2ed54c7-3249-4a62-8d98-21618cb6dab6" containerName="barbican-api-log" Jan 20 17:51:16 crc kubenswrapper[4558]: I0120 17:51:16.723401 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="b2ed54c7-3249-4a62-8d98-21618cb6dab6" containerName="barbican-api-log" Jan 20 17:51:16 crc kubenswrapper[4558]: E0120 17:51:16.723417 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="01835185-5ca5-467a-9362-b8de3e8665ed" containerName="cinder-api-log" Jan 20 17:51:16 crc kubenswrapper[4558]: I0120 17:51:16.723423 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="01835185-5ca5-467a-9362-b8de3e8665ed" containerName="cinder-api-log" Jan 20 17:51:16 crc kubenswrapper[4558]: I0120 17:51:16.723595 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="b2ed54c7-3249-4a62-8d98-21618cb6dab6" containerName="barbican-api-log" Jan 20 17:51:16 crc kubenswrapper[4558]: I0120 17:51:16.723608 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="01835185-5ca5-467a-9362-b8de3e8665ed" containerName="cinder-api" Jan 20 17:51:16 crc kubenswrapper[4558]: I0120 17:51:16.723624 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="01835185-5ca5-467a-9362-b8de3e8665ed" containerName="cinder-api-log" Jan 20 17:51:16 crc kubenswrapper[4558]: I0120 17:51:16.723636 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="b2ed54c7-3249-4a62-8d98-21618cb6dab6" containerName="barbican-api" Jan 20 17:51:16 crc kubenswrapper[4558]: I0120 17:51:16.724662 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:51:16 crc kubenswrapper[4558]: I0120 17:51:16.728035 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:51:16 crc kubenswrapper[4558]: I0120 17:51:16.730523 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cinder-api-config-data" Jan 20 17:51:16 crc kubenswrapper[4558]: I0120 17:51:16.730703 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-cinder-public-svc" Jan 20 17:51:16 crc kubenswrapper[4558]: I0120 17:51:16.732011 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-cinder-internal-svc" Jan 20 17:51:16 crc kubenswrapper[4558]: I0120 17:51:16.814215 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dd518f67-fd09-4102-9934-594b833bf8c8-logs\") pod \"cinder-api-0\" (UID: \"dd518f67-fd09-4102-9934-594b833bf8c8\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:51:16 crc kubenswrapper[4558]: I0120 17:51:16.814541 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/dd518f67-fd09-4102-9934-594b833bf8c8-config-data-custom\") pod \"cinder-api-0\" (UID: \"dd518f67-fd09-4102-9934-594b833bf8c8\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:51:16 crc kubenswrapper[4558]: I0120 17:51:16.814595 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bblbr\" (UniqueName: \"kubernetes.io/projected/dd518f67-fd09-4102-9934-594b833bf8c8-kube-api-access-bblbr\") pod \"cinder-api-0\" (UID: \"dd518f67-fd09-4102-9934-594b833bf8c8\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:51:16 crc kubenswrapper[4558]: I0120 17:51:16.814629 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dd518f67-fd09-4102-9934-594b833bf8c8-config-data\") pod \"cinder-api-0\" (UID: \"dd518f67-fd09-4102-9934-594b833bf8c8\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:51:16 crc kubenswrapper[4558]: I0120 17:51:16.814656 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd518f67-fd09-4102-9934-594b833bf8c8-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"dd518f67-fd09-4102-9934-594b833bf8c8\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:51:16 crc kubenswrapper[4558]: I0120 17:51:16.814675 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/dd518f67-fd09-4102-9934-594b833bf8c8-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"dd518f67-fd09-4102-9934-594b833bf8c8\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:51:16 crc kubenswrapper[4558]: I0120 17:51:16.814722 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dd518f67-fd09-4102-9934-594b833bf8c8-scripts\") pod \"cinder-api-0\" (UID: \"dd518f67-fd09-4102-9934-594b833bf8c8\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:51:16 crc kubenswrapper[4558]: I0120 17:51:16.814894 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/dd518f67-fd09-4102-9934-594b833bf8c8-public-tls-certs\") pod \"cinder-api-0\" (UID: \"dd518f67-fd09-4102-9934-594b833bf8c8\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:51:16 crc kubenswrapper[4558]: I0120 17:51:16.815037 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/dd518f67-fd09-4102-9934-594b833bf8c8-etc-machine-id\") pod \"cinder-api-0\" (UID: \"dd518f67-fd09-4102-9934-594b833bf8c8\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:51:16 crc kubenswrapper[4558]: I0120 17:51:16.917006 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dd518f67-fd09-4102-9934-594b833bf8c8-logs\") pod \"cinder-api-0\" (UID: \"dd518f67-fd09-4102-9934-594b833bf8c8\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:51:16 crc kubenswrapper[4558]: I0120 17:51:16.917121 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/dd518f67-fd09-4102-9934-594b833bf8c8-config-data-custom\") pod \"cinder-api-0\" (UID: \"dd518f67-fd09-4102-9934-594b833bf8c8\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:51:16 crc kubenswrapper[4558]: I0120 17:51:16.917194 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bblbr\" (UniqueName: \"kubernetes.io/projected/dd518f67-fd09-4102-9934-594b833bf8c8-kube-api-access-bblbr\") pod \"cinder-api-0\" (UID: \"dd518f67-fd09-4102-9934-594b833bf8c8\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:51:16 crc kubenswrapper[4558]: I0120 17:51:16.917265 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dd518f67-fd09-4102-9934-594b833bf8c8-config-data\") pod \"cinder-api-0\" (UID: \"dd518f67-fd09-4102-9934-594b833bf8c8\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:51:16 crc kubenswrapper[4558]: I0120 17:51:16.917303 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd518f67-fd09-4102-9934-594b833bf8c8-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"dd518f67-fd09-4102-9934-594b833bf8c8\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:51:16 crc kubenswrapper[4558]: I0120 17:51:16.917324 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/dd518f67-fd09-4102-9934-594b833bf8c8-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"dd518f67-fd09-4102-9934-594b833bf8c8\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:51:16 crc kubenswrapper[4558]: I0120 17:51:16.917377 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dd518f67-fd09-4102-9934-594b833bf8c8-scripts\") pod \"cinder-api-0\" (UID: \"dd518f67-fd09-4102-9934-594b833bf8c8\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:51:16 crc kubenswrapper[4558]: I0120 17:51:16.917560 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dd518f67-fd09-4102-9934-594b833bf8c8-logs\") pod \"cinder-api-0\" (UID: \"dd518f67-fd09-4102-9934-594b833bf8c8\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:51:16 crc kubenswrapper[4558]: I0120 17:51:16.917588 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/dd518f67-fd09-4102-9934-594b833bf8c8-public-tls-certs\") pod \"cinder-api-0\" (UID: \"dd518f67-fd09-4102-9934-594b833bf8c8\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:51:16 crc kubenswrapper[4558]: I0120 17:51:16.917769 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/dd518f67-fd09-4102-9934-594b833bf8c8-etc-machine-id\") pod \"cinder-api-0\" (UID: \"dd518f67-fd09-4102-9934-594b833bf8c8\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:51:16 crc kubenswrapper[4558]: I0120 17:51:16.917913 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/dd518f67-fd09-4102-9934-594b833bf8c8-etc-machine-id\") pod \"cinder-api-0\" (UID: \"dd518f67-fd09-4102-9934-594b833bf8c8\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:51:16 crc kubenswrapper[4558]: I0120 17:51:16.923530 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:51:16 crc kubenswrapper[4558]: I0120 17:51:16.945061 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dd518f67-fd09-4102-9934-594b833bf8c8-scripts\") pod \"cinder-api-0\" (UID: \"dd518f67-fd09-4102-9934-594b833bf8c8\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:51:16 crc kubenswrapper[4558]: I0120 17:51:16.945948 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd518f67-fd09-4102-9934-594b833bf8c8-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"dd518f67-fd09-4102-9934-594b833bf8c8\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:51:16 crc kubenswrapper[4558]: I0120 17:51:16.946240 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/dd518f67-fd09-4102-9934-594b833bf8c8-config-data-custom\") pod \"cinder-api-0\" (UID: \"dd518f67-fd09-4102-9934-594b833bf8c8\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:51:16 crc kubenswrapper[4558]: I0120 17:51:16.947681 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/dd518f67-fd09-4102-9934-594b833bf8c8-public-tls-certs\") pod \"cinder-api-0\" (UID: \"dd518f67-fd09-4102-9934-594b833bf8c8\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:51:16 crc kubenswrapper[4558]: I0120 17:51:16.950719 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/dd518f67-fd09-4102-9934-594b833bf8c8-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"dd518f67-fd09-4102-9934-594b833bf8c8\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:51:16 crc kubenswrapper[4558]: I0120 17:51:16.961685 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bblbr\" (UniqueName: \"kubernetes.io/projected/dd518f67-fd09-4102-9934-594b833bf8c8-kube-api-access-bblbr\") pod \"cinder-api-0\" (UID: \"dd518f67-fd09-4102-9934-594b833bf8c8\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:51:16 crc kubenswrapper[4558]: I0120 17:51:16.968033 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dd518f67-fd09-4102-9934-594b833bf8c8-config-data\") pod \"cinder-api-0\" (UID: \"dd518f67-fd09-4102-9934-594b833bf8c8\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:51:16 crc kubenswrapper[4558]: I0120 17:51:16.998717 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:51:17 crc kubenswrapper[4558]: I0120 17:51:17.018847 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"d73e13db-6a2c-4205-aabc-1c7671e07895\" (UID: \"d73e13db-6a2c-4205-aabc-1c7671e07895\") " Jan 20 17:51:17 crc kubenswrapper[4558]: I0120 17:51:17.018901 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d73e13db-6a2c-4205-aabc-1c7671e07895-internal-tls-certs\") pod \"d73e13db-6a2c-4205-aabc-1c7671e07895\" (UID: \"d73e13db-6a2c-4205-aabc-1c7671e07895\") " Jan 20 17:51:17 crc kubenswrapper[4558]: I0120 17:51:17.018937 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4tnnc\" (UniqueName: \"kubernetes.io/projected/d73e13db-6a2c-4205-aabc-1c7671e07895-kube-api-access-4tnnc\") pod \"d73e13db-6a2c-4205-aabc-1c7671e07895\" (UID: \"d73e13db-6a2c-4205-aabc-1c7671e07895\") " Jan 20 17:51:17 crc kubenswrapper[4558]: I0120 17:51:17.018975 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d73e13db-6a2c-4205-aabc-1c7671e07895-scripts\") pod \"d73e13db-6a2c-4205-aabc-1c7671e07895\" (UID: \"d73e13db-6a2c-4205-aabc-1c7671e07895\") " Jan 20 17:51:17 crc kubenswrapper[4558]: I0120 17:51:17.019195 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d73e13db-6a2c-4205-aabc-1c7671e07895-combined-ca-bundle\") pod \"d73e13db-6a2c-4205-aabc-1c7671e07895\" (UID: \"d73e13db-6a2c-4205-aabc-1c7671e07895\") " Jan 20 17:51:17 crc kubenswrapper[4558]: I0120 17:51:17.019276 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d73e13db-6a2c-4205-aabc-1c7671e07895-httpd-run\") pod \"d73e13db-6a2c-4205-aabc-1c7671e07895\" (UID: \"d73e13db-6a2c-4205-aabc-1c7671e07895\") " Jan 20 17:51:17 crc kubenswrapper[4558]: I0120 17:51:17.019334 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d73e13db-6a2c-4205-aabc-1c7671e07895-config-data\") pod \"d73e13db-6a2c-4205-aabc-1c7671e07895\" (UID: \"d73e13db-6a2c-4205-aabc-1c7671e07895\") " Jan 20 17:51:17 crc kubenswrapper[4558]: I0120 17:51:17.019411 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d73e13db-6a2c-4205-aabc-1c7671e07895-logs\") pod \"d73e13db-6a2c-4205-aabc-1c7671e07895\" (UID: \"d73e13db-6a2c-4205-aabc-1c7671e07895\") " Jan 20 17:51:17 crc kubenswrapper[4558]: I0120 17:51:17.021480 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d73e13db-6a2c-4205-aabc-1c7671e07895-logs" (OuterVolumeSpecName: "logs") pod "d73e13db-6a2c-4205-aabc-1c7671e07895" (UID: "d73e13db-6a2c-4205-aabc-1c7671e07895"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:51:17 crc kubenswrapper[4558]: I0120 17:51:17.024420 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d73e13db-6a2c-4205-aabc-1c7671e07895-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "d73e13db-6a2c-4205-aabc-1c7671e07895" (UID: "d73e13db-6a2c-4205-aabc-1c7671e07895"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:51:17 crc kubenswrapper[4558]: I0120 17:51:17.052296 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d73e13db-6a2c-4205-aabc-1c7671e07895-scripts" (OuterVolumeSpecName: "scripts") pod "d73e13db-6a2c-4205-aabc-1c7671e07895" (UID: "d73e13db-6a2c-4205-aabc-1c7671e07895"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:51:17 crc kubenswrapper[4558]: I0120 17:51:17.053112 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage11-crc" (OuterVolumeSpecName: "glance") pod "d73e13db-6a2c-4205-aabc-1c7671e07895" (UID: "d73e13db-6a2c-4205-aabc-1c7671e07895"). InnerVolumeSpecName "local-storage11-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:51:17 crc kubenswrapper[4558]: I0120 17:51:17.054320 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d73e13db-6a2c-4205-aabc-1c7671e07895-kube-api-access-4tnnc" (OuterVolumeSpecName: "kube-api-access-4tnnc") pod "d73e13db-6a2c-4205-aabc-1c7671e07895" (UID: "d73e13db-6a2c-4205-aabc-1c7671e07895"). InnerVolumeSpecName "kube-api-access-4tnnc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:51:17 crc kubenswrapper[4558]: I0120 17:51:17.088408 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d73e13db-6a2c-4205-aabc-1c7671e07895-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d73e13db-6a2c-4205-aabc-1c7671e07895" (UID: "d73e13db-6a2c-4205-aabc-1c7671e07895"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:51:17 crc kubenswrapper[4558]: I0120 17:51:17.107365 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d73e13db-6a2c-4205-aabc-1c7671e07895-config-data" (OuterVolumeSpecName: "config-data") pod "d73e13db-6a2c-4205-aabc-1c7671e07895" (UID: "d73e13db-6a2c-4205-aabc-1c7671e07895"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:51:17 crc kubenswrapper[4558]: I0120 17:51:17.112005 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d73e13db-6a2c-4205-aabc-1c7671e07895-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "d73e13db-6a2c-4205-aabc-1c7671e07895" (UID: "d73e13db-6a2c-4205-aabc-1c7671e07895"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:51:17 crc kubenswrapper[4558]: I0120 17:51:17.123677 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d73e13db-6a2c-4205-aabc-1c7671e07895-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:17 crc kubenswrapper[4558]: I0120 17:51:17.123710 4558 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d73e13db-6a2c-4205-aabc-1c7671e07895-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:17 crc kubenswrapper[4558]: I0120 17:51:17.123722 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d73e13db-6a2c-4205-aabc-1c7671e07895-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:17 crc kubenswrapper[4558]: I0120 17:51:17.123733 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d73e13db-6a2c-4205-aabc-1c7671e07895-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:17 crc kubenswrapper[4558]: I0120 17:51:17.123756 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" " Jan 20 17:51:17 crc kubenswrapper[4558]: I0120 17:51:17.123769 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d73e13db-6a2c-4205-aabc-1c7671e07895-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:17 crc kubenswrapper[4558]: I0120 17:51:17.123780 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4tnnc\" (UniqueName: \"kubernetes.io/projected/d73e13db-6a2c-4205-aabc-1c7671e07895-kube-api-access-4tnnc\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:17 crc kubenswrapper[4558]: I0120 17:51:17.123790 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d73e13db-6a2c-4205-aabc-1c7671e07895-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:17 crc kubenswrapper[4558]: I0120 17:51:17.139685 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage11-crc" (UniqueName: "kubernetes.io/local-volume/local-storage11-crc") on node "crc" Jan 20 17:51:17 crc kubenswrapper[4558]: I0120 17:51:17.198495 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:51:17 crc kubenswrapper[4558]: I0120 17:51:17.225614 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:17 crc kubenswrapper[4558]: I0120 17:51:17.565823 4558 scope.go:117] "RemoveContainer" containerID="a9c70afb4065cfa5e1226d107bf8a809ffc6655bc0ed256d32ec2a30414f56e5" Jan 20 17:51:17 crc kubenswrapper[4558]: I0120 17:51:17.603830 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:51:17 crc kubenswrapper[4558]: I0120 17:51:17.687406 4558 generic.go:334] "Generic (PLEG): container finished" podID="82d257ef-8ea2-4bb0-9472-22d7531eea07" containerID="3c6f9efef9d7e4583e21b7502bc31ea663e343c528df9cc67e2e58c3e8ff34fb" exitCode=0 Jan 20 17:51:17 crc kubenswrapper[4558]: I0120 17:51:17.687440 4558 generic.go:334] "Generic (PLEG): container finished" podID="82d257ef-8ea2-4bb0-9472-22d7531eea07" containerID="bef3d722731abd0d7f07da2d2d4e14517361d6bdfc566ecc88b996f262b48dc4" exitCode=143 Jan 20 17:51:17 crc kubenswrapper[4558]: I0120 17:51:17.687485 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"82d257ef-8ea2-4bb0-9472-22d7531eea07","Type":"ContainerDied","Data":"3c6f9efef9d7e4583e21b7502bc31ea663e343c528df9cc67e2e58c3e8ff34fb"} Jan 20 17:51:17 crc kubenswrapper[4558]: I0120 17:51:17.687517 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"82d257ef-8ea2-4bb0-9472-22d7531eea07","Type":"ContainerDied","Data":"bef3d722731abd0d7f07da2d2d4e14517361d6bdfc566ecc88b996f262b48dc4"} Jan 20 17:51:17 crc kubenswrapper[4558]: I0120 17:51:17.690074 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"d73e13db-6a2c-4205-aabc-1c7671e07895","Type":"ContainerDied","Data":"b2a7315c5227f9b741a26482cd0fe954f372a0ca69f99d2c3c40d61748d42885"} Jan 20 17:51:17 crc kubenswrapper[4558]: I0120 17:51:17.690108 4558 scope.go:117] "RemoveContainer" containerID="74481199f7317052e5bb7b801f2c439c05786dab675a519ef2eba820484705a5" Jan 20 17:51:17 crc kubenswrapper[4558]: I0120 17:51:17.690231 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:51:17 crc kubenswrapper[4558]: I0120 17:51:17.721496 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:51:17 crc kubenswrapper[4558]: I0120 17:51:17.732701 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:51:17 crc kubenswrapper[4558]: I0120 17:51:17.759719 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:51:17 crc kubenswrapper[4558]: E0120 17:51:17.760253 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d73e13db-6a2c-4205-aabc-1c7671e07895" containerName="glance-httpd" Jan 20 17:51:17 crc kubenswrapper[4558]: I0120 17:51:17.760271 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d73e13db-6a2c-4205-aabc-1c7671e07895" containerName="glance-httpd" Jan 20 17:51:17 crc kubenswrapper[4558]: E0120 17:51:17.760312 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d73e13db-6a2c-4205-aabc-1c7671e07895" containerName="glance-log" Jan 20 17:51:17 crc kubenswrapper[4558]: I0120 17:51:17.760320 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d73e13db-6a2c-4205-aabc-1c7671e07895" containerName="glance-log" Jan 20 17:51:17 crc kubenswrapper[4558]: I0120 17:51:17.760529 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="d73e13db-6a2c-4205-aabc-1c7671e07895" containerName="glance-log" Jan 20 17:51:17 crc kubenswrapper[4558]: I0120 17:51:17.760551 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="d73e13db-6a2c-4205-aabc-1c7671e07895" containerName="glance-httpd" Jan 20 17:51:17 crc kubenswrapper[4558]: I0120 17:51:17.762042 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:51:17 crc kubenswrapper[4558]: I0120 17:51:17.765055 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-default-internal-config-data" Jan 20 17:51:17 crc kubenswrapper[4558]: I0120 17:51:17.765329 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-glance-default-internal-svc" Jan 20 17:51:17 crc kubenswrapper[4558]: I0120 17:51:17.794484 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:51:17 crc kubenswrapper[4558]: I0120 17:51:17.809724 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/placement-59975f6898-zxn8m" podUID="a4f41b33-4de3-4a87-96ac-f7ea7bb1aac2" containerName="placement-log" probeResult="failure" output="Get \"https://10.217.1.126:8778/\": read tcp 10.217.0.2:41804->10.217.1.126:8778: read: connection reset by peer" Jan 20 17:51:17 crc kubenswrapper[4558]: I0120 17:51:17.809950 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/placement-59975f6898-zxn8m" podUID="a4f41b33-4de3-4a87-96ac-f7ea7bb1aac2" containerName="placement-api" probeResult="failure" output="Get \"https://10.217.1.126:8778/\": read tcp 10.217.0.2:41788->10.217.1.126:8778: read: connection reset by peer" Jan 20 17:51:17 crc kubenswrapper[4558]: I0120 17:51:17.839315 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f30aa605-d974-4d0d-aa96-5cb0d96897d6-logs\") pod \"glance-default-internal-api-0\" (UID: \"f30aa605-d974-4d0d-aa96-5cb0d96897d6\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:51:17 crc kubenswrapper[4558]: I0120 17:51:17.839365 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f30aa605-d974-4d0d-aa96-5cb0d96897d6-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"f30aa605-d974-4d0d-aa96-5cb0d96897d6\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:51:17 crc kubenswrapper[4558]: I0120 17:51:17.839632 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xhmrl\" (UniqueName: \"kubernetes.io/projected/f30aa605-d974-4d0d-aa96-5cb0d96897d6-kube-api-access-xhmrl\") pod \"glance-default-internal-api-0\" (UID: \"f30aa605-d974-4d0d-aa96-5cb0d96897d6\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:51:17 crc kubenswrapper[4558]: I0120 17:51:17.839663 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f30aa605-d974-4d0d-aa96-5cb0d96897d6-scripts\") pod \"glance-default-internal-api-0\" (UID: \"f30aa605-d974-4d0d-aa96-5cb0d96897d6\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:51:17 crc kubenswrapper[4558]: I0120 17:51:17.839731 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f30aa605-d974-4d0d-aa96-5cb0d96897d6-config-data\") pod \"glance-default-internal-api-0\" (UID: \"f30aa605-d974-4d0d-aa96-5cb0d96897d6\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:51:17 crc kubenswrapper[4558]: I0120 17:51:17.839797 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f30aa605-d974-4d0d-aa96-5cb0d96897d6-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"f30aa605-d974-4d0d-aa96-5cb0d96897d6\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:51:17 crc kubenswrapper[4558]: I0120 17:51:17.839823 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f30aa605-d974-4d0d-aa96-5cb0d96897d6-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"f30aa605-d974-4d0d-aa96-5cb0d96897d6\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:51:17 crc kubenswrapper[4558]: I0120 17:51:17.839973 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"f30aa605-d974-4d0d-aa96-5cb0d96897d6\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:51:17 crc kubenswrapper[4558]: I0120 17:51:17.841378 4558 scope.go:117] "RemoveContainer" containerID="38a50cd3fde0de52b7453bb0226b6b2af8717868a6f1905514fcfb820cdea7f5" Jan 20 17:51:17 crc kubenswrapper[4558]: I0120 17:51:17.910266 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:51:17 crc kubenswrapper[4558]: I0120 17:51:17.941825 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xhmrl\" (UniqueName: \"kubernetes.io/projected/f30aa605-d974-4d0d-aa96-5cb0d96897d6-kube-api-access-xhmrl\") pod \"glance-default-internal-api-0\" (UID: \"f30aa605-d974-4d0d-aa96-5cb0d96897d6\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:51:17 crc kubenswrapper[4558]: I0120 17:51:17.942577 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f30aa605-d974-4d0d-aa96-5cb0d96897d6-scripts\") pod \"glance-default-internal-api-0\" (UID: \"f30aa605-d974-4d0d-aa96-5cb0d96897d6\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:51:17 crc kubenswrapper[4558]: I0120 17:51:17.943494 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f30aa605-d974-4d0d-aa96-5cb0d96897d6-config-data\") pod \"glance-default-internal-api-0\" (UID: \"f30aa605-d974-4d0d-aa96-5cb0d96897d6\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:51:17 crc kubenswrapper[4558]: I0120 17:51:17.943617 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f30aa605-d974-4d0d-aa96-5cb0d96897d6-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"f30aa605-d974-4d0d-aa96-5cb0d96897d6\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:51:17 crc kubenswrapper[4558]: I0120 17:51:17.943702 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f30aa605-d974-4d0d-aa96-5cb0d96897d6-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"f30aa605-d974-4d0d-aa96-5cb0d96897d6\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:51:17 crc kubenswrapper[4558]: I0120 17:51:17.944105 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"f30aa605-d974-4d0d-aa96-5cb0d96897d6\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:51:17 crc kubenswrapper[4558]: I0120 17:51:17.944267 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f30aa605-d974-4d0d-aa96-5cb0d96897d6-logs\") pod \"glance-default-internal-api-0\" (UID: \"f30aa605-d974-4d0d-aa96-5cb0d96897d6\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:51:17 crc kubenswrapper[4558]: I0120 17:51:17.944373 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f30aa605-d974-4d0d-aa96-5cb0d96897d6-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"f30aa605-d974-4d0d-aa96-5cb0d96897d6\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:51:17 crc kubenswrapper[4558]: I0120 17:51:17.944904 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f30aa605-d974-4d0d-aa96-5cb0d96897d6-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"f30aa605-d974-4d0d-aa96-5cb0d96897d6\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:51:17 crc kubenswrapper[4558]: I0120 17:51:17.950459 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f30aa605-d974-4d0d-aa96-5cb0d96897d6-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"f30aa605-d974-4d0d-aa96-5cb0d96897d6\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:51:17 crc kubenswrapper[4558]: I0120 17:51:17.950749 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f30aa605-d974-4d0d-aa96-5cb0d96897d6-logs\") pod \"glance-default-internal-api-0\" (UID: \"f30aa605-d974-4d0d-aa96-5cb0d96897d6\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:51:17 crc kubenswrapper[4558]: I0120 17:51:17.951870 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"f30aa605-d974-4d0d-aa96-5cb0d96897d6\") device mount path \"/mnt/openstack/pv11\"" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:51:17 crc kubenswrapper[4558]: I0120 17:51:17.952891 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f30aa605-d974-4d0d-aa96-5cb0d96897d6-scripts\") pod \"glance-default-internal-api-0\" (UID: \"f30aa605-d974-4d0d-aa96-5cb0d96897d6\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:51:17 crc kubenswrapper[4558]: I0120 17:51:17.964853 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f30aa605-d974-4d0d-aa96-5cb0d96897d6-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"f30aa605-d974-4d0d-aa96-5cb0d96897d6\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:51:17 crc kubenswrapper[4558]: I0120 17:51:17.965457 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xhmrl\" (UniqueName: \"kubernetes.io/projected/f30aa605-d974-4d0d-aa96-5cb0d96897d6-kube-api-access-xhmrl\") pod \"glance-default-internal-api-0\" (UID: \"f30aa605-d974-4d0d-aa96-5cb0d96897d6\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:51:17 crc kubenswrapper[4558]: I0120 17:51:17.970182 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f30aa605-d974-4d0d-aa96-5cb0d96897d6-config-data\") pod \"glance-default-internal-api-0\" (UID: \"f30aa605-d974-4d0d-aa96-5cb0d96897d6\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:51:17 crc kubenswrapper[4558]: I0120 17:51:17.994484 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"glance-default-internal-api-0\" (UID: \"f30aa605-d974-4d0d-aa96-5cb0d96897d6\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:51:18 crc kubenswrapper[4558]: I0120 17:51:18.106321 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:51:18 crc kubenswrapper[4558]: I0120 17:51:18.140212 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:51:18 crc kubenswrapper[4558]: I0120 17:51:18.253274 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/82d257ef-8ea2-4bb0-9472-22d7531eea07-httpd-run\") pod \"82d257ef-8ea2-4bb0-9472-22d7531eea07\" (UID: \"82d257ef-8ea2-4bb0-9472-22d7531eea07\") " Jan 20 17:51:18 crc kubenswrapper[4558]: I0120 17:51:18.253373 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/82d257ef-8ea2-4bb0-9472-22d7531eea07-config-data\") pod \"82d257ef-8ea2-4bb0-9472-22d7531eea07\" (UID: \"82d257ef-8ea2-4bb0-9472-22d7531eea07\") " Jan 20 17:51:18 crc kubenswrapper[4558]: I0120 17:51:18.253445 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/82d257ef-8ea2-4bb0-9472-22d7531eea07-public-tls-certs\") pod \"82d257ef-8ea2-4bb0-9472-22d7531eea07\" (UID: \"82d257ef-8ea2-4bb0-9472-22d7531eea07\") " Jan 20 17:51:18 crc kubenswrapper[4558]: I0120 17:51:18.253476 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/82d257ef-8ea2-4bb0-9472-22d7531eea07-combined-ca-bundle\") pod \"82d257ef-8ea2-4bb0-9472-22d7531eea07\" (UID: \"82d257ef-8ea2-4bb0-9472-22d7531eea07\") " Jan 20 17:51:18 crc kubenswrapper[4558]: I0120 17:51:18.253530 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"82d257ef-8ea2-4bb0-9472-22d7531eea07\" (UID: \"82d257ef-8ea2-4bb0-9472-22d7531eea07\") " Jan 20 17:51:18 crc kubenswrapper[4558]: I0120 17:51:18.253709 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7lflm\" (UniqueName: \"kubernetes.io/projected/82d257ef-8ea2-4bb0-9472-22d7531eea07-kube-api-access-7lflm\") pod \"82d257ef-8ea2-4bb0-9472-22d7531eea07\" (UID: \"82d257ef-8ea2-4bb0-9472-22d7531eea07\") " Jan 20 17:51:18 crc kubenswrapper[4558]: I0120 17:51:18.253757 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/82d257ef-8ea2-4bb0-9472-22d7531eea07-scripts\") pod \"82d257ef-8ea2-4bb0-9472-22d7531eea07\" (UID: \"82d257ef-8ea2-4bb0-9472-22d7531eea07\") " Jan 20 17:51:18 crc kubenswrapper[4558]: I0120 17:51:18.253850 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/82d257ef-8ea2-4bb0-9472-22d7531eea07-logs\") pod \"82d257ef-8ea2-4bb0-9472-22d7531eea07\" (UID: \"82d257ef-8ea2-4bb0-9472-22d7531eea07\") " Jan 20 17:51:18 crc kubenswrapper[4558]: I0120 17:51:18.254059 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/82d257ef-8ea2-4bb0-9472-22d7531eea07-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "82d257ef-8ea2-4bb0-9472-22d7531eea07" (UID: "82d257ef-8ea2-4bb0-9472-22d7531eea07"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:51:18 crc kubenswrapper[4558]: I0120 17:51:18.254402 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/82d257ef-8ea2-4bb0-9472-22d7531eea07-logs" (OuterVolumeSpecName: "logs") pod "82d257ef-8ea2-4bb0-9472-22d7531eea07" (UID: "82d257ef-8ea2-4bb0-9472-22d7531eea07"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:51:18 crc kubenswrapper[4558]: I0120 17:51:18.255321 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/82d257ef-8ea2-4bb0-9472-22d7531eea07-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:18 crc kubenswrapper[4558]: I0120 17:51:18.255346 4558 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/82d257ef-8ea2-4bb0-9472-22d7531eea07-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:18 crc kubenswrapper[4558]: I0120 17:51:18.294645 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage09-crc" (OuterVolumeSpecName: "glance") pod "82d257ef-8ea2-4bb0-9472-22d7531eea07" (UID: "82d257ef-8ea2-4bb0-9472-22d7531eea07"). InnerVolumeSpecName "local-storage09-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:51:18 crc kubenswrapper[4558]: I0120 17:51:18.295487 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/82d257ef-8ea2-4bb0-9472-22d7531eea07-kube-api-access-7lflm" (OuterVolumeSpecName: "kube-api-access-7lflm") pod "82d257ef-8ea2-4bb0-9472-22d7531eea07" (UID: "82d257ef-8ea2-4bb0-9472-22d7531eea07"). InnerVolumeSpecName "kube-api-access-7lflm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:51:18 crc kubenswrapper[4558]: I0120 17:51:18.303689 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/82d257ef-8ea2-4bb0-9472-22d7531eea07-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "82d257ef-8ea2-4bb0-9472-22d7531eea07" (UID: "82d257ef-8ea2-4bb0-9472-22d7531eea07"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:51:18 crc kubenswrapper[4558]: I0120 17:51:18.308033 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/82d257ef-8ea2-4bb0-9472-22d7531eea07-scripts" (OuterVolumeSpecName: "scripts") pod "82d257ef-8ea2-4bb0-9472-22d7531eea07" (UID: "82d257ef-8ea2-4bb0-9472-22d7531eea07"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:51:18 crc kubenswrapper[4558]: I0120 17:51:18.336134 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-59975f6898-zxn8m" Jan 20 17:51:18 crc kubenswrapper[4558]: I0120 17:51:18.342828 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/82d257ef-8ea2-4bb0-9472-22d7531eea07-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "82d257ef-8ea2-4bb0-9472-22d7531eea07" (UID: "82d257ef-8ea2-4bb0-9472-22d7531eea07"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:51:18 crc kubenswrapper[4558]: I0120 17:51:18.359123 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a4f41b33-4de3-4a87-96ac-f7ea7bb1aac2-internal-tls-certs\") pod \"a4f41b33-4de3-4a87-96ac-f7ea7bb1aac2\" (UID: \"a4f41b33-4de3-4a87-96ac-f7ea7bb1aac2\") " Jan 20 17:51:18 crc kubenswrapper[4558]: I0120 17:51:18.359206 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a4f41b33-4de3-4a87-96ac-f7ea7bb1aac2-combined-ca-bundle\") pod \"a4f41b33-4de3-4a87-96ac-f7ea7bb1aac2\" (UID: \"a4f41b33-4de3-4a87-96ac-f7ea7bb1aac2\") " Jan 20 17:51:18 crc kubenswrapper[4558]: I0120 17:51:18.359263 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnpsv\" (UniqueName: \"kubernetes.io/projected/a4f41b33-4de3-4a87-96ac-f7ea7bb1aac2-kube-api-access-mnpsv\") pod \"a4f41b33-4de3-4a87-96ac-f7ea7bb1aac2\" (UID: \"a4f41b33-4de3-4a87-96ac-f7ea7bb1aac2\") " Jan 20 17:51:18 crc kubenswrapper[4558]: I0120 17:51:18.359310 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a4f41b33-4de3-4a87-96ac-f7ea7bb1aac2-public-tls-certs\") pod \"a4f41b33-4de3-4a87-96ac-f7ea7bb1aac2\" (UID: \"a4f41b33-4de3-4a87-96ac-f7ea7bb1aac2\") " Jan 20 17:51:18 crc kubenswrapper[4558]: I0120 17:51:18.359368 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a4f41b33-4de3-4a87-96ac-f7ea7bb1aac2-config-data\") pod \"a4f41b33-4de3-4a87-96ac-f7ea7bb1aac2\" (UID: \"a4f41b33-4de3-4a87-96ac-f7ea7bb1aac2\") " Jan 20 17:51:18 crc kubenswrapper[4558]: I0120 17:51:18.359394 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a4f41b33-4de3-4a87-96ac-f7ea7bb1aac2-scripts\") pod \"a4f41b33-4de3-4a87-96ac-f7ea7bb1aac2\" (UID: \"a4f41b33-4de3-4a87-96ac-f7ea7bb1aac2\") " Jan 20 17:51:18 crc kubenswrapper[4558]: I0120 17:51:18.359506 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a4f41b33-4de3-4a87-96ac-f7ea7bb1aac2-logs\") pod \"a4f41b33-4de3-4a87-96ac-f7ea7bb1aac2\" (UID: \"a4f41b33-4de3-4a87-96ac-f7ea7bb1aac2\") " Jan 20 17:51:18 crc kubenswrapper[4558]: I0120 17:51:18.360193 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/82d257ef-8ea2-4bb0-9472-22d7531eea07-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:18 crc kubenswrapper[4558]: I0120 17:51:18.360209 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/82d257ef-8ea2-4bb0-9472-22d7531eea07-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:18 crc kubenswrapper[4558]: I0120 17:51:18.360230 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" " Jan 20 17:51:18 crc kubenswrapper[4558]: I0120 17:51:18.360239 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7lflm\" (UniqueName: \"kubernetes.io/projected/82d257ef-8ea2-4bb0-9472-22d7531eea07-kube-api-access-7lflm\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:18 crc kubenswrapper[4558]: I0120 17:51:18.360250 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/82d257ef-8ea2-4bb0-9472-22d7531eea07-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:18 crc kubenswrapper[4558]: I0120 17:51:18.369926 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a4f41b33-4de3-4a87-96ac-f7ea7bb1aac2-scripts" (OuterVolumeSpecName: "scripts") pod "a4f41b33-4de3-4a87-96ac-f7ea7bb1aac2" (UID: "a4f41b33-4de3-4a87-96ac-f7ea7bb1aac2"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:51:18 crc kubenswrapper[4558]: I0120 17:51:18.371892 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a4f41b33-4de3-4a87-96ac-f7ea7bb1aac2-logs" (OuterVolumeSpecName: "logs") pod "a4f41b33-4de3-4a87-96ac-f7ea7bb1aac2" (UID: "a4f41b33-4de3-4a87-96ac-f7ea7bb1aac2"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:51:18 crc kubenswrapper[4558]: I0120 17:51:18.373106 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a4f41b33-4de3-4a87-96ac-f7ea7bb1aac2-kube-api-access-mnpsv" (OuterVolumeSpecName: "kube-api-access-mnpsv") pod "a4f41b33-4de3-4a87-96ac-f7ea7bb1aac2" (UID: "a4f41b33-4de3-4a87-96ac-f7ea7bb1aac2"). InnerVolumeSpecName "kube-api-access-mnpsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:51:18 crc kubenswrapper[4558]: I0120 17:51:18.374451 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/82d257ef-8ea2-4bb0-9472-22d7531eea07-config-data" (OuterVolumeSpecName: "config-data") pod "82d257ef-8ea2-4bb0-9472-22d7531eea07" (UID: "82d257ef-8ea2-4bb0-9472-22d7531eea07"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:51:18 crc kubenswrapper[4558]: I0120 17:51:18.398477 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage09-crc" (UniqueName: "kubernetes.io/local-volume/local-storage09-crc") on node "crc" Jan 20 17:51:18 crc kubenswrapper[4558]: I0120 17:51:18.445267 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a4f41b33-4de3-4a87-96ac-f7ea7bb1aac2-config-data" (OuterVolumeSpecName: "config-data") pod "a4f41b33-4de3-4a87-96ac-f7ea7bb1aac2" (UID: "a4f41b33-4de3-4a87-96ac-f7ea7bb1aac2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:51:18 crc kubenswrapper[4558]: I0120 17:51:18.455270 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a4f41b33-4de3-4a87-96ac-f7ea7bb1aac2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a4f41b33-4de3-4a87-96ac-f7ea7bb1aac2" (UID: "a4f41b33-4de3-4a87-96ac-f7ea7bb1aac2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:51:18 crc kubenswrapper[4558]: I0120 17:51:18.462090 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnpsv\" (UniqueName: \"kubernetes.io/projected/a4f41b33-4de3-4a87-96ac-f7ea7bb1aac2-kube-api-access-mnpsv\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:18 crc kubenswrapper[4558]: I0120 17:51:18.462123 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a4f41b33-4de3-4a87-96ac-f7ea7bb1aac2-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:18 crc kubenswrapper[4558]: I0120 17:51:18.462136 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a4f41b33-4de3-4a87-96ac-f7ea7bb1aac2-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:18 crc kubenswrapper[4558]: I0120 17:51:18.462146 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a4f41b33-4de3-4a87-96ac-f7ea7bb1aac2-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:18 crc kubenswrapper[4558]: I0120 17:51:18.462156 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/82d257ef-8ea2-4bb0-9472-22d7531eea07-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:18 crc kubenswrapper[4558]: I0120 17:51:18.462180 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:18 crc kubenswrapper[4558]: I0120 17:51:18.462189 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a4f41b33-4de3-4a87-96ac-f7ea7bb1aac2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:18 crc kubenswrapper[4558]: I0120 17:51:18.510585 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a4f41b33-4de3-4a87-96ac-f7ea7bb1aac2-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "a4f41b33-4de3-4a87-96ac-f7ea7bb1aac2" (UID: "a4f41b33-4de3-4a87-96ac-f7ea7bb1aac2"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:51:18 crc kubenswrapper[4558]: I0120 17:51:18.529358 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a4f41b33-4de3-4a87-96ac-f7ea7bb1aac2-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "a4f41b33-4de3-4a87-96ac-f7ea7bb1aac2" (UID: "a4f41b33-4de3-4a87-96ac-f7ea7bb1aac2"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:51:18 crc kubenswrapper[4558]: I0120 17:51:18.564114 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a4f41b33-4de3-4a87-96ac-f7ea7bb1aac2-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:18 crc kubenswrapper[4558]: I0120 17:51:18.564144 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a4f41b33-4de3-4a87-96ac-f7ea7bb1aac2-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:18 crc kubenswrapper[4558]: I0120 17:51:18.567654 4558 scope.go:117] "RemoveContainer" containerID="e689e24841d6a2881535a371383cc0c5b2611638b0371340716b92a4ebfc8207" Jan 20 17:51:18 crc kubenswrapper[4558]: I0120 17:51:18.578896 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01835185-5ca5-467a-9362-b8de3e8665ed" path="/var/lib/kubelet/pods/01835185-5ca5-467a-9362-b8de3e8665ed/volumes" Jan 20 17:51:18 crc kubenswrapper[4558]: I0120 17:51:18.579711 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d73e13db-6a2c-4205-aabc-1c7671e07895" path="/var/lib/kubelet/pods/d73e13db-6a2c-4205-aabc-1c7671e07895/volumes" Jan 20 17:51:18 crc kubenswrapper[4558]: I0120 17:51:18.712516 4558 generic.go:334] "Generic (PLEG): container finished" podID="a4f41b33-4de3-4a87-96ac-f7ea7bb1aac2" containerID="c423e5ba761c266b37e8a9914f09c46db8c16aa75089222ed91b3723dd5c4ca5" exitCode=0 Jan 20 17:51:18 crc kubenswrapper[4558]: I0120 17:51:18.712687 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-59975f6898-zxn8m" Jan 20 17:51:18 crc kubenswrapper[4558]: I0120 17:51:18.713894 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-59975f6898-zxn8m" event={"ID":"a4f41b33-4de3-4a87-96ac-f7ea7bb1aac2","Type":"ContainerDied","Data":"c423e5ba761c266b37e8a9914f09c46db8c16aa75089222ed91b3723dd5c4ca5"} Jan 20 17:51:18 crc kubenswrapper[4558]: I0120 17:51:18.713932 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-59975f6898-zxn8m" event={"ID":"a4f41b33-4de3-4a87-96ac-f7ea7bb1aac2","Type":"ContainerDied","Data":"46acf2fe99b48c1acbe706dbb538dac56d4267574506bae9dae6740549980f96"} Jan 20 17:51:18 crc kubenswrapper[4558]: I0120 17:51:18.713951 4558 scope.go:117] "RemoveContainer" containerID="c423e5ba761c266b37e8a9914f09c46db8c16aa75089222ed91b3723dd5c4ca5" Jan 20 17:51:18 crc kubenswrapper[4558]: I0120 17:51:18.719765 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:51:18 crc kubenswrapper[4558]: I0120 17:51:18.719758 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"82d257ef-8ea2-4bb0-9472-22d7531eea07","Type":"ContainerDied","Data":"c1099e7e0b56f6957d2dba5ba731274d5416bf0b20483c8ef7ee43b382df1d66"} Jan 20 17:51:18 crc kubenswrapper[4558]: I0120 17:51:18.724121 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"dd518f67-fd09-4102-9934-594b833bf8c8","Type":"ContainerStarted","Data":"c1f8d8bf07ca82f670329868a579e9d926a00b6090b23758e7d9b8f7b4cae4d3"} Jan 20 17:51:18 crc kubenswrapper[4558]: I0120 17:51:18.724150 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"dd518f67-fd09-4102-9934-594b833bf8c8","Type":"ContainerStarted","Data":"37c7c64075849d1c723fe292d572371554c228e6e6e06063a5f3fe9f7008213e"} Jan 20 17:51:18 crc kubenswrapper[4558]: I0120 17:51:18.746359 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-0" event={"ID":"924fa7ce-8d60-4b2f-b62b-d5e146474f71","Type":"ContainerStarted","Data":"70539f03bf1b2ace01f24a9072ce8275fb4f5d92a181a991e07e1469387f4f02"} Jan 20 17:51:18 crc kubenswrapper[4558]: I0120 17:51:18.747140 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:51:18 crc kubenswrapper[4558]: I0120 17:51:18.748474 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:51:18 crc kubenswrapper[4558]: I0120 17:51:18.927209 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/placement-59975f6898-zxn8m"] Jan 20 17:51:18 crc kubenswrapper[4558]: I0120 17:51:18.937282 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/placement-59975f6898-zxn8m"] Jan 20 17:51:18 crc kubenswrapper[4558]: I0120 17:51:18.944387 4558 scope.go:117] "RemoveContainer" containerID="845fa35e74757f51248608df722fce7c1028166d79229d0cc2abebe77b4abcbb" Jan 20 17:51:18 crc kubenswrapper[4558]: I0120 17:51:18.947769 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:51:18 crc kubenswrapper[4558]: I0120 17:51:18.952726 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:51:18 crc kubenswrapper[4558]: I0120 17:51:18.970962 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:51:18 crc kubenswrapper[4558]: E0120 17:51:18.971516 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="82d257ef-8ea2-4bb0-9472-22d7531eea07" containerName="glance-httpd" Jan 20 17:51:18 crc kubenswrapper[4558]: I0120 17:51:18.971531 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="82d257ef-8ea2-4bb0-9472-22d7531eea07" containerName="glance-httpd" Jan 20 17:51:18 crc kubenswrapper[4558]: E0120 17:51:18.971546 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a4f41b33-4de3-4a87-96ac-f7ea7bb1aac2" containerName="placement-log" Jan 20 17:51:18 crc kubenswrapper[4558]: I0120 17:51:18.971552 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="a4f41b33-4de3-4a87-96ac-f7ea7bb1aac2" containerName="placement-log" Jan 20 17:51:18 crc kubenswrapper[4558]: E0120 17:51:18.971581 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="82d257ef-8ea2-4bb0-9472-22d7531eea07" containerName="glance-log" Jan 20 17:51:18 crc kubenswrapper[4558]: I0120 17:51:18.971587 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="82d257ef-8ea2-4bb0-9472-22d7531eea07" containerName="glance-log" Jan 20 17:51:18 crc kubenswrapper[4558]: E0120 17:51:18.971602 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a4f41b33-4de3-4a87-96ac-f7ea7bb1aac2" containerName="placement-api" Jan 20 17:51:18 crc kubenswrapper[4558]: I0120 17:51:18.971609 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="a4f41b33-4de3-4a87-96ac-f7ea7bb1aac2" containerName="placement-api" Jan 20 17:51:18 crc kubenswrapper[4558]: I0120 17:51:18.971799 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="a4f41b33-4de3-4a87-96ac-f7ea7bb1aac2" containerName="placement-api" Jan 20 17:51:18 crc kubenswrapper[4558]: I0120 17:51:18.971808 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="82d257ef-8ea2-4bb0-9472-22d7531eea07" containerName="glance-httpd" Jan 20 17:51:18 crc kubenswrapper[4558]: I0120 17:51:18.971825 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="a4f41b33-4de3-4a87-96ac-f7ea7bb1aac2" containerName="placement-log" Jan 20 17:51:18 crc kubenswrapper[4558]: I0120 17:51:18.971836 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="82d257ef-8ea2-4bb0-9472-22d7531eea07" containerName="glance-log" Jan 20 17:51:18 crc kubenswrapper[4558]: I0120 17:51:18.972947 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:51:18 crc kubenswrapper[4558]: I0120 17:51:18.976114 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-glance-default-public-svc" Jan 20 17:51:18 crc kubenswrapper[4558]: I0120 17:51:18.976277 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-default-external-config-data" Jan 20 17:51:18 crc kubenswrapper[4558]: I0120 17:51:18.982681 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:51:19 crc kubenswrapper[4558]: I0120 17:51:19.026113 4558 scope.go:117] "RemoveContainer" containerID="c423e5ba761c266b37e8a9914f09c46db8c16aa75089222ed91b3723dd5c4ca5" Jan 20 17:51:19 crc kubenswrapper[4558]: E0120 17:51:19.026718 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c423e5ba761c266b37e8a9914f09c46db8c16aa75089222ed91b3723dd5c4ca5\": container with ID starting with c423e5ba761c266b37e8a9914f09c46db8c16aa75089222ed91b3723dd5c4ca5 not found: ID does not exist" containerID="c423e5ba761c266b37e8a9914f09c46db8c16aa75089222ed91b3723dd5c4ca5" Jan 20 17:51:19 crc kubenswrapper[4558]: I0120 17:51:19.026750 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c423e5ba761c266b37e8a9914f09c46db8c16aa75089222ed91b3723dd5c4ca5"} err="failed to get container status \"c423e5ba761c266b37e8a9914f09c46db8c16aa75089222ed91b3723dd5c4ca5\": rpc error: code = NotFound desc = could not find container \"c423e5ba761c266b37e8a9914f09c46db8c16aa75089222ed91b3723dd5c4ca5\": container with ID starting with c423e5ba761c266b37e8a9914f09c46db8c16aa75089222ed91b3723dd5c4ca5 not found: ID does not exist" Jan 20 17:51:19 crc kubenswrapper[4558]: I0120 17:51:19.026791 4558 scope.go:117] "RemoveContainer" containerID="845fa35e74757f51248608df722fce7c1028166d79229d0cc2abebe77b4abcbb" Jan 20 17:51:19 crc kubenswrapper[4558]: E0120 17:51:19.027479 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"845fa35e74757f51248608df722fce7c1028166d79229d0cc2abebe77b4abcbb\": container with ID starting with 845fa35e74757f51248608df722fce7c1028166d79229d0cc2abebe77b4abcbb not found: ID does not exist" containerID="845fa35e74757f51248608df722fce7c1028166d79229d0cc2abebe77b4abcbb" Jan 20 17:51:19 crc kubenswrapper[4558]: I0120 17:51:19.027520 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"845fa35e74757f51248608df722fce7c1028166d79229d0cc2abebe77b4abcbb"} err="failed to get container status \"845fa35e74757f51248608df722fce7c1028166d79229d0cc2abebe77b4abcbb\": rpc error: code = NotFound desc = could not find container \"845fa35e74757f51248608df722fce7c1028166d79229d0cc2abebe77b4abcbb\": container with ID starting with 845fa35e74757f51248608df722fce7c1028166d79229d0cc2abebe77b4abcbb not found: ID does not exist" Jan 20 17:51:19 crc kubenswrapper[4558]: I0120 17:51:19.027535 4558 scope.go:117] "RemoveContainer" containerID="3c6f9efef9d7e4583e21b7502bc31ea663e343c528df9cc67e2e58c3e8ff34fb" Jan 20 17:51:19 crc kubenswrapper[4558]: I0120 17:51:19.051411 4558 scope.go:117] "RemoveContainer" containerID="bef3d722731abd0d7f07da2d2d4e14517361d6bdfc566ecc88b996f262b48dc4" Jan 20 17:51:19 crc kubenswrapper[4558]: I0120 17:51:19.074064 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"7d535532-403f-4d55-8138-7e09287d0108\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:51:19 crc kubenswrapper[4558]: I0120 17:51:19.074114 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/7d535532-403f-4d55-8138-7e09287d0108-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"7d535532-403f-4d55-8138-7e09287d0108\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:51:19 crc kubenswrapper[4558]: I0120 17:51:19.074192 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7d535532-403f-4d55-8138-7e09287d0108-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"7d535532-403f-4d55-8138-7e09287d0108\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:51:19 crc kubenswrapper[4558]: I0120 17:51:19.074280 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7d535532-403f-4d55-8138-7e09287d0108-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"7d535532-403f-4d55-8138-7e09287d0108\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:51:19 crc kubenswrapper[4558]: I0120 17:51:19.074308 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k7t8d\" (UniqueName: \"kubernetes.io/projected/7d535532-403f-4d55-8138-7e09287d0108-kube-api-access-k7t8d\") pod \"glance-default-external-api-0\" (UID: \"7d535532-403f-4d55-8138-7e09287d0108\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:51:19 crc kubenswrapper[4558]: I0120 17:51:19.074336 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7d535532-403f-4d55-8138-7e09287d0108-scripts\") pod \"glance-default-external-api-0\" (UID: \"7d535532-403f-4d55-8138-7e09287d0108\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:51:19 crc kubenswrapper[4558]: I0120 17:51:19.074445 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7d535532-403f-4d55-8138-7e09287d0108-logs\") pod \"glance-default-external-api-0\" (UID: \"7d535532-403f-4d55-8138-7e09287d0108\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:51:19 crc kubenswrapper[4558]: I0120 17:51:19.074467 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7d535532-403f-4d55-8138-7e09287d0108-config-data\") pod \"glance-default-external-api-0\" (UID: \"7d535532-403f-4d55-8138-7e09287d0108\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:51:19 crc kubenswrapper[4558]: I0120 17:51:19.179133 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7d535532-403f-4d55-8138-7e09287d0108-logs\") pod \"glance-default-external-api-0\" (UID: \"7d535532-403f-4d55-8138-7e09287d0108\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:51:19 crc kubenswrapper[4558]: I0120 17:51:19.179188 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7d535532-403f-4d55-8138-7e09287d0108-config-data\") pod \"glance-default-external-api-0\" (UID: \"7d535532-403f-4d55-8138-7e09287d0108\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:51:19 crc kubenswrapper[4558]: I0120 17:51:19.179227 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"7d535532-403f-4d55-8138-7e09287d0108\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:51:19 crc kubenswrapper[4558]: I0120 17:51:19.179288 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/7d535532-403f-4d55-8138-7e09287d0108-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"7d535532-403f-4d55-8138-7e09287d0108\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:51:19 crc kubenswrapper[4558]: I0120 17:51:19.179350 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7d535532-403f-4d55-8138-7e09287d0108-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"7d535532-403f-4d55-8138-7e09287d0108\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:51:19 crc kubenswrapper[4558]: I0120 17:51:19.179490 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7d535532-403f-4d55-8138-7e09287d0108-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"7d535532-403f-4d55-8138-7e09287d0108\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:51:19 crc kubenswrapper[4558]: I0120 17:51:19.179513 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k7t8d\" (UniqueName: \"kubernetes.io/projected/7d535532-403f-4d55-8138-7e09287d0108-kube-api-access-k7t8d\") pod \"glance-default-external-api-0\" (UID: \"7d535532-403f-4d55-8138-7e09287d0108\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:51:19 crc kubenswrapper[4558]: I0120 17:51:19.179569 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7d535532-403f-4d55-8138-7e09287d0108-scripts\") pod \"glance-default-external-api-0\" (UID: \"7d535532-403f-4d55-8138-7e09287d0108\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:51:19 crc kubenswrapper[4558]: I0120 17:51:19.179738 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7d535532-403f-4d55-8138-7e09287d0108-logs\") pod \"glance-default-external-api-0\" (UID: \"7d535532-403f-4d55-8138-7e09287d0108\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:51:19 crc kubenswrapper[4558]: I0120 17:51:19.179768 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/7d535532-403f-4d55-8138-7e09287d0108-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"7d535532-403f-4d55-8138-7e09287d0108\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:51:19 crc kubenswrapper[4558]: I0120 17:51:19.180100 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"7d535532-403f-4d55-8138-7e09287d0108\") device mount path \"/mnt/openstack/pv09\"" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:51:19 crc kubenswrapper[4558]: I0120 17:51:19.507468 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7d535532-403f-4d55-8138-7e09287d0108-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"7d535532-403f-4d55-8138-7e09287d0108\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:51:19 crc kubenswrapper[4558]: I0120 17:51:19.508253 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7d535532-403f-4d55-8138-7e09287d0108-config-data\") pod \"glance-default-external-api-0\" (UID: \"7d535532-403f-4d55-8138-7e09287d0108\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:51:19 crc kubenswrapper[4558]: I0120 17:51:19.508542 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7d535532-403f-4d55-8138-7e09287d0108-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"7d535532-403f-4d55-8138-7e09287d0108\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:51:19 crc kubenswrapper[4558]: I0120 17:51:19.509864 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7d535532-403f-4d55-8138-7e09287d0108-scripts\") pod \"glance-default-external-api-0\" (UID: \"7d535532-403f-4d55-8138-7e09287d0108\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:51:19 crc kubenswrapper[4558]: I0120 17:51:19.510606 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k7t8d\" (UniqueName: \"kubernetes.io/projected/7d535532-403f-4d55-8138-7e09287d0108-kube-api-access-k7t8d\") pod \"glance-default-external-api-0\" (UID: \"7d535532-403f-4d55-8138-7e09287d0108\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:51:19 crc kubenswrapper[4558]: I0120 17:51:19.548516 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"glance-default-external-api-0\" (UID: \"7d535532-403f-4d55-8138-7e09287d0108\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:51:19 crc kubenswrapper[4558]: I0120 17:51:19.598697 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:51:19 crc kubenswrapper[4558]: I0120 17:51:19.791157 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-0" event={"ID":"7c16f0c4-1462-4817-9f74-9e3d93193867","Type":"ContainerStarted","Data":"a83e386e23548bf8e99d8a3f739f604866a2baad6493f364fe0412f39f677f52"} Jan 20 17:51:19 crc kubenswrapper[4558]: I0120 17:51:19.792788 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:51:19 crc kubenswrapper[4558]: I0120 17:51:19.816233 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"dd518f67-fd09-4102-9934-594b833bf8c8","Type":"ContainerStarted","Data":"e7e61e40b3ceb7bcd0050ea1fa8a9bb9f8c5958158a076d7e7954bdc11cae1ef"} Jan 20 17:51:19 crc kubenswrapper[4558]: I0120 17:51:19.816383 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:51:19 crc kubenswrapper[4558]: I0120 17:51:19.826511 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"f30aa605-d974-4d0d-aa96-5cb0d96897d6","Type":"ContainerStarted","Data":"519c148165b8e5d8031cd18cf91e1dcfa134e4235207cac41f7dc13725bac816"} Jan 20 17:51:19 crc kubenswrapper[4558]: I0120 17:51:19.826544 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"f30aa605-d974-4d0d-aa96-5cb0d96897d6","Type":"ContainerStarted","Data":"742e919ef5796dd179f11bc4cd5cd77a5cafb4a774f5c6bb526160835ec72764"} Jan 20 17:51:19 crc kubenswrapper[4558]: I0120 17:51:19.856457 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/cinder-api-0" podStartSLOduration=3.8564329109999997 podStartE2EDuration="3.856432911s" podCreationTimestamp="2026-01-20 17:51:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:51:19.849797684 +0000 UTC m=+4173.610135651" watchObservedRunningTime="2026-01-20 17:51:19.856432911 +0000 UTC m=+4173.616770878" Jan 20 17:51:20 crc kubenswrapper[4558]: I0120 17:51:20.052154 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:51:20 crc kubenswrapper[4558]: I0120 17:51:20.575597 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="82d257ef-8ea2-4bb0-9472-22d7531eea07" path="/var/lib/kubelet/pods/82d257ef-8ea2-4bb0-9472-22d7531eea07/volumes" Jan 20 17:51:20 crc kubenswrapper[4558]: I0120 17:51:20.577415 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a4f41b33-4de3-4a87-96ac-f7ea7bb1aac2" path="/var/lib/kubelet/pods/a4f41b33-4de3-4a87-96ac-f7ea7bb1aac2/volumes" Jan 20 17:51:20 crc kubenswrapper[4558]: I0120 17:51:20.670693 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-64bbc794f7-mwswp"] Jan 20 17:51:20 crc kubenswrapper[4558]: I0120 17:51:20.670933 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/keystone-64bbc794f7-mwswp" podUID="cc15c42c-dd9c-4f60-b92a-7140713f32fb" containerName="keystone-api" containerID="cri-o://b395c952bedfc004b11ffa76313700fbc18b300f2d6b43029c49f65465fa385b" gracePeriod=30 Jan 20 17:51:20 crc kubenswrapper[4558]: I0120 17:51:20.689746 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/keystone-64bbc794f7-mwswp" podUID="cc15c42c-dd9c-4f60-b92a-7140713f32fb" containerName="keystone-api" probeResult="failure" output="Get \"https://10.217.1.206:5000/v3\": EOF" Jan 20 17:51:20 crc kubenswrapper[4558]: I0120 17:51:20.703079 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/keystone-9c6c687fd-44ldp"] Jan 20 17:51:20 crc kubenswrapper[4558]: I0120 17:51:20.704614 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-9c6c687fd-44ldp" Jan 20 17:51:20 crc kubenswrapper[4558]: I0120 17:51:20.717713 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3922e044-abc1-42e8-a5a0-dfd1fe256e14-fernet-keys\") pod \"keystone-9c6c687fd-44ldp\" (UID: \"3922e044-abc1-42e8-a5a0-dfd1fe256e14\") " pod="openstack-kuttl-tests/keystone-9c6c687fd-44ldp" Jan 20 17:51:20 crc kubenswrapper[4558]: I0120 17:51:20.717846 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3922e044-abc1-42e8-a5a0-dfd1fe256e14-combined-ca-bundle\") pod \"keystone-9c6c687fd-44ldp\" (UID: \"3922e044-abc1-42e8-a5a0-dfd1fe256e14\") " pod="openstack-kuttl-tests/keystone-9c6c687fd-44ldp" Jan 20 17:51:20 crc kubenswrapper[4558]: I0120 17:51:20.717945 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/3922e044-abc1-42e8-a5a0-dfd1fe256e14-credential-keys\") pod \"keystone-9c6c687fd-44ldp\" (UID: \"3922e044-abc1-42e8-a5a0-dfd1fe256e14\") " pod="openstack-kuttl-tests/keystone-9c6c687fd-44ldp" Jan 20 17:51:20 crc kubenswrapper[4558]: I0120 17:51:20.718106 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3922e044-abc1-42e8-a5a0-dfd1fe256e14-config-data\") pod \"keystone-9c6c687fd-44ldp\" (UID: \"3922e044-abc1-42e8-a5a0-dfd1fe256e14\") " pod="openstack-kuttl-tests/keystone-9c6c687fd-44ldp" Jan 20 17:51:20 crc kubenswrapper[4558]: I0120 17:51:20.718227 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3922e044-abc1-42e8-a5a0-dfd1fe256e14-scripts\") pod \"keystone-9c6c687fd-44ldp\" (UID: \"3922e044-abc1-42e8-a5a0-dfd1fe256e14\") " pod="openstack-kuttl-tests/keystone-9c6c687fd-44ldp" Jan 20 17:51:20 crc kubenswrapper[4558]: I0120 17:51:20.718346 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3922e044-abc1-42e8-a5a0-dfd1fe256e14-internal-tls-certs\") pod \"keystone-9c6c687fd-44ldp\" (UID: \"3922e044-abc1-42e8-a5a0-dfd1fe256e14\") " pod="openstack-kuttl-tests/keystone-9c6c687fd-44ldp" Jan 20 17:51:20 crc kubenswrapper[4558]: I0120 17:51:20.718410 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-thgvp\" (UniqueName: \"kubernetes.io/projected/3922e044-abc1-42e8-a5a0-dfd1fe256e14-kube-api-access-thgvp\") pod \"keystone-9c6c687fd-44ldp\" (UID: \"3922e044-abc1-42e8-a5a0-dfd1fe256e14\") " pod="openstack-kuttl-tests/keystone-9c6c687fd-44ldp" Jan 20 17:51:20 crc kubenswrapper[4558]: I0120 17:51:20.718500 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3922e044-abc1-42e8-a5a0-dfd1fe256e14-public-tls-certs\") pod \"keystone-9c6c687fd-44ldp\" (UID: \"3922e044-abc1-42e8-a5a0-dfd1fe256e14\") " pod="openstack-kuttl-tests/keystone-9c6c687fd-44ldp" Jan 20 17:51:20 crc kubenswrapper[4558]: I0120 17:51:20.722874 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-9c6c687fd-44ldp"] Jan 20 17:51:20 crc kubenswrapper[4558]: I0120 17:51:20.820696 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3922e044-abc1-42e8-a5a0-dfd1fe256e14-config-data\") pod \"keystone-9c6c687fd-44ldp\" (UID: \"3922e044-abc1-42e8-a5a0-dfd1fe256e14\") " pod="openstack-kuttl-tests/keystone-9c6c687fd-44ldp" Jan 20 17:51:20 crc kubenswrapper[4558]: I0120 17:51:20.820772 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3922e044-abc1-42e8-a5a0-dfd1fe256e14-scripts\") pod \"keystone-9c6c687fd-44ldp\" (UID: \"3922e044-abc1-42e8-a5a0-dfd1fe256e14\") " pod="openstack-kuttl-tests/keystone-9c6c687fd-44ldp" Jan 20 17:51:20 crc kubenswrapper[4558]: I0120 17:51:20.820828 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3922e044-abc1-42e8-a5a0-dfd1fe256e14-internal-tls-certs\") pod \"keystone-9c6c687fd-44ldp\" (UID: \"3922e044-abc1-42e8-a5a0-dfd1fe256e14\") " pod="openstack-kuttl-tests/keystone-9c6c687fd-44ldp" Jan 20 17:51:20 crc kubenswrapper[4558]: I0120 17:51:20.820851 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-thgvp\" (UniqueName: \"kubernetes.io/projected/3922e044-abc1-42e8-a5a0-dfd1fe256e14-kube-api-access-thgvp\") pod \"keystone-9c6c687fd-44ldp\" (UID: \"3922e044-abc1-42e8-a5a0-dfd1fe256e14\") " pod="openstack-kuttl-tests/keystone-9c6c687fd-44ldp" Jan 20 17:51:20 crc kubenswrapper[4558]: I0120 17:51:20.820874 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3922e044-abc1-42e8-a5a0-dfd1fe256e14-public-tls-certs\") pod \"keystone-9c6c687fd-44ldp\" (UID: \"3922e044-abc1-42e8-a5a0-dfd1fe256e14\") " pod="openstack-kuttl-tests/keystone-9c6c687fd-44ldp" Jan 20 17:51:20 crc kubenswrapper[4558]: I0120 17:51:20.820921 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3922e044-abc1-42e8-a5a0-dfd1fe256e14-fernet-keys\") pod \"keystone-9c6c687fd-44ldp\" (UID: \"3922e044-abc1-42e8-a5a0-dfd1fe256e14\") " pod="openstack-kuttl-tests/keystone-9c6c687fd-44ldp" Jan 20 17:51:20 crc kubenswrapper[4558]: I0120 17:51:20.820952 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3922e044-abc1-42e8-a5a0-dfd1fe256e14-combined-ca-bundle\") pod \"keystone-9c6c687fd-44ldp\" (UID: \"3922e044-abc1-42e8-a5a0-dfd1fe256e14\") " pod="openstack-kuttl-tests/keystone-9c6c687fd-44ldp" Jan 20 17:51:20 crc kubenswrapper[4558]: I0120 17:51:20.820972 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/3922e044-abc1-42e8-a5a0-dfd1fe256e14-credential-keys\") pod \"keystone-9c6c687fd-44ldp\" (UID: \"3922e044-abc1-42e8-a5a0-dfd1fe256e14\") " pod="openstack-kuttl-tests/keystone-9c6c687fd-44ldp" Jan 20 17:51:20 crc kubenswrapper[4558]: I0120 17:51:20.829500 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3922e044-abc1-42e8-a5a0-dfd1fe256e14-fernet-keys\") pod \"keystone-9c6c687fd-44ldp\" (UID: \"3922e044-abc1-42e8-a5a0-dfd1fe256e14\") " pod="openstack-kuttl-tests/keystone-9c6c687fd-44ldp" Jan 20 17:51:20 crc kubenswrapper[4558]: I0120 17:51:20.830646 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/3922e044-abc1-42e8-a5a0-dfd1fe256e14-credential-keys\") pod \"keystone-9c6c687fd-44ldp\" (UID: \"3922e044-abc1-42e8-a5a0-dfd1fe256e14\") " pod="openstack-kuttl-tests/keystone-9c6c687fd-44ldp" Jan 20 17:51:20 crc kubenswrapper[4558]: I0120 17:51:20.834600 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3922e044-abc1-42e8-a5a0-dfd1fe256e14-public-tls-certs\") pod \"keystone-9c6c687fd-44ldp\" (UID: \"3922e044-abc1-42e8-a5a0-dfd1fe256e14\") " pod="openstack-kuttl-tests/keystone-9c6c687fd-44ldp" Jan 20 17:51:20 crc kubenswrapper[4558]: I0120 17:51:20.837623 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3922e044-abc1-42e8-a5a0-dfd1fe256e14-combined-ca-bundle\") pod \"keystone-9c6c687fd-44ldp\" (UID: \"3922e044-abc1-42e8-a5a0-dfd1fe256e14\") " pod="openstack-kuttl-tests/keystone-9c6c687fd-44ldp" Jan 20 17:51:20 crc kubenswrapper[4558]: I0120 17:51:20.841729 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3922e044-abc1-42e8-a5a0-dfd1fe256e14-config-data\") pod \"keystone-9c6c687fd-44ldp\" (UID: \"3922e044-abc1-42e8-a5a0-dfd1fe256e14\") " pod="openstack-kuttl-tests/keystone-9c6c687fd-44ldp" Jan 20 17:51:20 crc kubenswrapper[4558]: I0120 17:51:20.846457 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3922e044-abc1-42e8-a5a0-dfd1fe256e14-internal-tls-certs\") pod \"keystone-9c6c687fd-44ldp\" (UID: \"3922e044-abc1-42e8-a5a0-dfd1fe256e14\") " pod="openstack-kuttl-tests/keystone-9c6c687fd-44ldp" Jan 20 17:51:20 crc kubenswrapper[4558]: I0120 17:51:20.850563 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3922e044-abc1-42e8-a5a0-dfd1fe256e14-scripts\") pod \"keystone-9c6c687fd-44ldp\" (UID: \"3922e044-abc1-42e8-a5a0-dfd1fe256e14\") " pod="openstack-kuttl-tests/keystone-9c6c687fd-44ldp" Jan 20 17:51:20 crc kubenswrapper[4558]: I0120 17:51:20.865735 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-thgvp\" (UniqueName: \"kubernetes.io/projected/3922e044-abc1-42e8-a5a0-dfd1fe256e14-kube-api-access-thgvp\") pod \"keystone-9c6c687fd-44ldp\" (UID: \"3922e044-abc1-42e8-a5a0-dfd1fe256e14\") " pod="openstack-kuttl-tests/keystone-9c6c687fd-44ldp" Jan 20 17:51:20 crc kubenswrapper[4558]: I0120 17:51:20.913552 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"7d535532-403f-4d55-8138-7e09287d0108","Type":"ContainerStarted","Data":"f6b470a99751a2192a7c08c8ea1e3240681879e8b22e9a2638b977811313762b"} Jan 20 17:51:20 crc kubenswrapper[4558]: I0120 17:51:20.913610 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"7d535532-403f-4d55-8138-7e09287d0108","Type":"ContainerStarted","Data":"8d04c26962083a326120f59b8588c7e99446de909dbf820fe5f993ff533b1b74"} Jan 20 17:51:20 crc kubenswrapper[4558]: I0120 17:51:20.942319 4558 generic.go:334] "Generic (PLEG): container finished" podID="924fa7ce-8d60-4b2f-b62b-d5e146474f71" containerID="70539f03bf1b2ace01f24a9072ce8275fb4f5d92a181a991e07e1469387f4f02" exitCode=1 Jan 20 17:51:20 crc kubenswrapper[4558]: I0120 17:51:20.942642 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-0" event={"ID":"924fa7ce-8d60-4b2f-b62b-d5e146474f71","Type":"ContainerDied","Data":"70539f03bf1b2ace01f24a9072ce8275fb4f5d92a181a991e07e1469387f4f02"} Jan 20 17:51:20 crc kubenswrapper[4558]: I0120 17:51:20.942675 4558 scope.go:117] "RemoveContainer" containerID="a9c70afb4065cfa5e1226d107bf8a809ffc6655bc0ed256d32ec2a30414f56e5" Jan 20 17:51:20 crc kubenswrapper[4558]: I0120 17:51:20.943125 4558 scope.go:117] "RemoveContainer" containerID="70539f03bf1b2ace01f24a9072ce8275fb4f5d92a181a991e07e1469387f4f02" Jan 20 17:51:20 crc kubenswrapper[4558]: E0120 17:51:20.943461 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"nova-cell0-conductor-conductor\" with CrashLoopBackOff: \"back-off 40s restarting failed container=nova-cell0-conductor-conductor pod=nova-cell0-conductor-0_openstack-kuttl-tests(924fa7ce-8d60-4b2f-b62b-d5e146474f71)\"" pod="openstack-kuttl-tests/nova-cell0-conductor-0" podUID="924fa7ce-8d60-4b2f-b62b-d5e146474f71" Jan 20 17:51:20 crc kubenswrapper[4558]: I0120 17:51:20.946982 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/memcached-0"] Jan 20 17:51:20 crc kubenswrapper[4558]: I0120 17:51:20.956283 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/memcached-0" podUID="f95ad818-8ba9-4839-a613-2282bbe69ddc" containerName="memcached" containerID="cri-o://292c875950d826dfa041a9a0a8854a29181d17ff2dfd691ec2ca8359bf5cf9e7" gracePeriod=30 Jan 20 17:51:20 crc kubenswrapper[4558]: I0120 17:51:20.988107 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"f30aa605-d974-4d0d-aa96-5cb0d96897d6","Type":"ContainerStarted","Data":"d68e5d5426e93fd2a2fa4b8833690231b1a3151adb135d0a227b1d81dba55d3c"} Jan 20 17:51:21 crc kubenswrapper[4558]: I0120 17:51:21.035510 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/glance-default-internal-api-0" podStartSLOduration=4.035489582 podStartE2EDuration="4.035489582s" podCreationTimestamp="2026-01-20 17:51:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:51:21.02441134 +0000 UTC m=+4174.784749308" watchObservedRunningTime="2026-01-20 17:51:21.035489582 +0000 UTC m=+4174.795827549" Jan 20 17:51:21 crc kubenswrapper[4558]: I0120 17:51:21.074545 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-9c6c687fd-44ldp" Jan 20 17:51:21 crc kubenswrapper[4558]: I0120 17:51:21.398678 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/placement-7f6b9c747b-m4nwt"] Jan 20 17:51:21 crc kubenswrapper[4558]: I0120 17:51:21.400345 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-7f6b9c747b-m4nwt" Jan 20 17:51:21 crc kubenswrapper[4558]: I0120 17:51:21.412055 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-7f6b9c747b-m4nwt"] Jan 20 17:51:21 crc kubenswrapper[4558]: I0120 17:51:21.570360 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qdq2c\" (UniqueName: \"kubernetes.io/projected/f9bdec5b-a12e-4c73-9cb6-3b50c6bf3b0f-kube-api-access-qdq2c\") pod \"placement-7f6b9c747b-m4nwt\" (UID: \"f9bdec5b-a12e-4c73-9cb6-3b50c6bf3b0f\") " pod="openstack-kuttl-tests/placement-7f6b9c747b-m4nwt" Jan 20 17:51:21 crc kubenswrapper[4558]: I0120 17:51:21.571339 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f9bdec5b-a12e-4c73-9cb6-3b50c6bf3b0f-logs\") pod \"placement-7f6b9c747b-m4nwt\" (UID: \"f9bdec5b-a12e-4c73-9cb6-3b50c6bf3b0f\") " pod="openstack-kuttl-tests/placement-7f6b9c747b-m4nwt" Jan 20 17:51:21 crc kubenswrapper[4558]: I0120 17:51:21.572969 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f9bdec5b-a12e-4c73-9cb6-3b50c6bf3b0f-config-data\") pod \"placement-7f6b9c747b-m4nwt\" (UID: \"f9bdec5b-a12e-4c73-9cb6-3b50c6bf3b0f\") " pod="openstack-kuttl-tests/placement-7f6b9c747b-m4nwt" Jan 20 17:51:21 crc kubenswrapper[4558]: I0120 17:51:21.573516 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f9bdec5b-a12e-4c73-9cb6-3b50c6bf3b0f-scripts\") pod \"placement-7f6b9c747b-m4nwt\" (UID: \"f9bdec5b-a12e-4c73-9cb6-3b50c6bf3b0f\") " pod="openstack-kuttl-tests/placement-7f6b9c747b-m4nwt" Jan 20 17:51:21 crc kubenswrapper[4558]: I0120 17:51:21.573539 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f9bdec5b-a12e-4c73-9cb6-3b50c6bf3b0f-combined-ca-bundle\") pod \"placement-7f6b9c747b-m4nwt\" (UID: \"f9bdec5b-a12e-4c73-9cb6-3b50c6bf3b0f\") " pod="openstack-kuttl-tests/placement-7f6b9c747b-m4nwt" Jan 20 17:51:21 crc kubenswrapper[4558]: I0120 17:51:21.573575 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f9bdec5b-a12e-4c73-9cb6-3b50c6bf3b0f-public-tls-certs\") pod \"placement-7f6b9c747b-m4nwt\" (UID: \"f9bdec5b-a12e-4c73-9cb6-3b50c6bf3b0f\") " pod="openstack-kuttl-tests/placement-7f6b9c747b-m4nwt" Jan 20 17:51:21 crc kubenswrapper[4558]: I0120 17:51:21.573611 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f9bdec5b-a12e-4c73-9cb6-3b50c6bf3b0f-internal-tls-certs\") pod \"placement-7f6b9c747b-m4nwt\" (UID: \"f9bdec5b-a12e-4c73-9cb6-3b50c6bf3b0f\") " pod="openstack-kuttl-tests/placement-7f6b9c747b-m4nwt" Jan 20 17:51:21 crc kubenswrapper[4558]: I0120 17:51:21.676016 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f9bdec5b-a12e-4c73-9cb6-3b50c6bf3b0f-config-data\") pod \"placement-7f6b9c747b-m4nwt\" (UID: \"f9bdec5b-a12e-4c73-9cb6-3b50c6bf3b0f\") " pod="openstack-kuttl-tests/placement-7f6b9c747b-m4nwt" Jan 20 17:51:21 crc kubenswrapper[4558]: I0120 17:51:21.676260 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f9bdec5b-a12e-4c73-9cb6-3b50c6bf3b0f-scripts\") pod \"placement-7f6b9c747b-m4nwt\" (UID: \"f9bdec5b-a12e-4c73-9cb6-3b50c6bf3b0f\") " pod="openstack-kuttl-tests/placement-7f6b9c747b-m4nwt" Jan 20 17:51:21 crc kubenswrapper[4558]: I0120 17:51:21.676364 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f9bdec5b-a12e-4c73-9cb6-3b50c6bf3b0f-combined-ca-bundle\") pod \"placement-7f6b9c747b-m4nwt\" (UID: \"f9bdec5b-a12e-4c73-9cb6-3b50c6bf3b0f\") " pod="openstack-kuttl-tests/placement-7f6b9c747b-m4nwt" Jan 20 17:51:21 crc kubenswrapper[4558]: I0120 17:51:21.676511 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f9bdec5b-a12e-4c73-9cb6-3b50c6bf3b0f-public-tls-certs\") pod \"placement-7f6b9c747b-m4nwt\" (UID: \"f9bdec5b-a12e-4c73-9cb6-3b50c6bf3b0f\") " pod="openstack-kuttl-tests/placement-7f6b9c747b-m4nwt" Jan 20 17:51:21 crc kubenswrapper[4558]: I0120 17:51:21.676640 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f9bdec5b-a12e-4c73-9cb6-3b50c6bf3b0f-internal-tls-certs\") pod \"placement-7f6b9c747b-m4nwt\" (UID: \"f9bdec5b-a12e-4c73-9cb6-3b50c6bf3b0f\") " pod="openstack-kuttl-tests/placement-7f6b9c747b-m4nwt" Jan 20 17:51:21 crc kubenswrapper[4558]: I0120 17:51:21.676901 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qdq2c\" (UniqueName: \"kubernetes.io/projected/f9bdec5b-a12e-4c73-9cb6-3b50c6bf3b0f-kube-api-access-qdq2c\") pod \"placement-7f6b9c747b-m4nwt\" (UID: \"f9bdec5b-a12e-4c73-9cb6-3b50c6bf3b0f\") " pod="openstack-kuttl-tests/placement-7f6b9c747b-m4nwt" Jan 20 17:51:21 crc kubenswrapper[4558]: I0120 17:51:21.677126 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f9bdec5b-a12e-4c73-9cb6-3b50c6bf3b0f-logs\") pod \"placement-7f6b9c747b-m4nwt\" (UID: \"f9bdec5b-a12e-4c73-9cb6-3b50c6bf3b0f\") " pod="openstack-kuttl-tests/placement-7f6b9c747b-m4nwt" Jan 20 17:51:21 crc kubenswrapper[4558]: I0120 17:51:21.677661 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f9bdec5b-a12e-4c73-9cb6-3b50c6bf3b0f-logs\") pod \"placement-7f6b9c747b-m4nwt\" (UID: \"f9bdec5b-a12e-4c73-9cb6-3b50c6bf3b0f\") " pod="openstack-kuttl-tests/placement-7f6b9c747b-m4nwt" Jan 20 17:51:21 crc kubenswrapper[4558]: I0120 17:51:21.682191 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f9bdec5b-a12e-4c73-9cb6-3b50c6bf3b0f-config-data\") pod \"placement-7f6b9c747b-m4nwt\" (UID: \"f9bdec5b-a12e-4c73-9cb6-3b50c6bf3b0f\") " pod="openstack-kuttl-tests/placement-7f6b9c747b-m4nwt" Jan 20 17:51:21 crc kubenswrapper[4558]: I0120 17:51:21.684525 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f9bdec5b-a12e-4c73-9cb6-3b50c6bf3b0f-scripts\") pod \"placement-7f6b9c747b-m4nwt\" (UID: \"f9bdec5b-a12e-4c73-9cb6-3b50c6bf3b0f\") " pod="openstack-kuttl-tests/placement-7f6b9c747b-m4nwt" Jan 20 17:51:21 crc kubenswrapper[4558]: I0120 17:51:21.684630 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f9bdec5b-a12e-4c73-9cb6-3b50c6bf3b0f-public-tls-certs\") pod \"placement-7f6b9c747b-m4nwt\" (UID: \"f9bdec5b-a12e-4c73-9cb6-3b50c6bf3b0f\") " pod="openstack-kuttl-tests/placement-7f6b9c747b-m4nwt" Jan 20 17:51:21 crc kubenswrapper[4558]: I0120 17:51:21.684875 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f9bdec5b-a12e-4c73-9cb6-3b50c6bf3b0f-combined-ca-bundle\") pod \"placement-7f6b9c747b-m4nwt\" (UID: \"f9bdec5b-a12e-4c73-9cb6-3b50c6bf3b0f\") " pod="openstack-kuttl-tests/placement-7f6b9c747b-m4nwt" Jan 20 17:51:21 crc kubenswrapper[4558]: I0120 17:51:21.684963 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f9bdec5b-a12e-4c73-9cb6-3b50c6bf3b0f-internal-tls-certs\") pod \"placement-7f6b9c747b-m4nwt\" (UID: \"f9bdec5b-a12e-4c73-9cb6-3b50c6bf3b0f\") " pod="openstack-kuttl-tests/placement-7f6b9c747b-m4nwt" Jan 20 17:51:21 crc kubenswrapper[4558]: I0120 17:51:21.692700 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qdq2c\" (UniqueName: \"kubernetes.io/projected/f9bdec5b-a12e-4c73-9cb6-3b50c6bf3b0f-kube-api-access-qdq2c\") pod \"placement-7f6b9c747b-m4nwt\" (UID: \"f9bdec5b-a12e-4c73-9cb6-3b50c6bf3b0f\") " pod="openstack-kuttl-tests/placement-7f6b9c747b-m4nwt" Jan 20 17:51:21 crc kubenswrapper[4558]: I0120 17:51:21.749234 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-7f6b9c747b-m4nwt" Jan 20 17:51:21 crc kubenswrapper[4558]: I0120 17:51:21.845241 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-9c6c687fd-44ldp"] Jan 20 17:51:21 crc kubenswrapper[4558]: I0120 17:51:21.994204 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:51:22 crc kubenswrapper[4558]: I0120 17:51:22.000282 4558 generic.go:334] "Generic (PLEG): container finished" podID="7c16f0c4-1462-4817-9f74-9e3d93193867" containerID="a83e386e23548bf8e99d8a3f739f604866a2baad6493f364fe0412f39f677f52" exitCode=1 Jan 20 17:51:22 crc kubenswrapper[4558]: I0120 17:51:22.000347 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-0" event={"ID":"7c16f0c4-1462-4817-9f74-9e3d93193867","Type":"ContainerDied","Data":"a83e386e23548bf8e99d8a3f739f604866a2baad6493f364fe0412f39f677f52"} Jan 20 17:51:22 crc kubenswrapper[4558]: I0120 17:51:22.000389 4558 scope.go:117] "RemoveContainer" containerID="e689e24841d6a2881535a371383cc0c5b2611638b0371340716b92a4ebfc8207" Jan 20 17:51:22 crc kubenswrapper[4558]: I0120 17:51:22.000763 4558 scope.go:117] "RemoveContainer" containerID="a83e386e23548bf8e99d8a3f739f604866a2baad6493f364fe0412f39f677f52" Jan 20 17:51:22 crc kubenswrapper[4558]: E0120 17:51:22.000988 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"nova-cell1-conductor-conductor\" with CrashLoopBackOff: \"back-off 20s restarting failed container=nova-cell1-conductor-conductor pod=nova-cell1-conductor-0_openstack-kuttl-tests(7c16f0c4-1462-4817-9f74-9e3d93193867)\"" pod="openstack-kuttl-tests/nova-cell1-conductor-0" podUID="7c16f0c4-1462-4817-9f74-9e3d93193867" Jan 20 17:51:22 crc kubenswrapper[4558]: I0120 17:51:22.002369 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-9c6c687fd-44ldp" event={"ID":"3922e044-abc1-42e8-a5a0-dfd1fe256e14","Type":"ContainerStarted","Data":"b220fed2dedac92807c8ade374027d759a1706b2330aaeca6872fadde547fffb"} Jan 20 17:51:22 crc kubenswrapper[4558]: I0120 17:51:22.023558 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:51:22 crc kubenswrapper[4558]: I0120 17:51:22.023605 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"7d535532-403f-4d55-8138-7e09287d0108","Type":"ContainerStarted","Data":"6d020a191147c34f2712820f7dd8475cc0193d6eeb83f724303b3f67c115e13e"} Jan 20 17:51:22 crc kubenswrapper[4558]: I0120 17:51:22.055654 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/glance-default-external-api-0" podStartSLOduration=4.055633667 podStartE2EDuration="4.055633667s" podCreationTimestamp="2026-01-20 17:51:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:51:22.052722733 +0000 UTC m=+4175.813060699" watchObservedRunningTime="2026-01-20 17:51:22.055633667 +0000 UTC m=+4175.815971634" Jan 20 17:51:22 crc kubenswrapper[4558]: I0120 17:51:22.194576 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-9c6c687fd-44ldp"] Jan 20 17:51:22 crc kubenswrapper[4558]: I0120 17:51:22.216221 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/keystone-667f6b846-wn5fk"] Jan 20 17:51:22 crc kubenswrapper[4558]: I0120 17:51:22.217443 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-667f6b846-wn5fk" Jan 20 17:51:22 crc kubenswrapper[4558]: I0120 17:51:22.236337 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-667f6b846-wn5fk"] Jan 20 17:51:22 crc kubenswrapper[4558]: W0120 17:51:22.282656 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf9bdec5b_a12e_4c73_9cb6_3b50c6bf3b0f.slice/crio-29d181e3de66fefecb7164a13f30217176cb217ded0bf92e9c42f2b23abd7dfc WatchSource:0}: Error finding container 29d181e3de66fefecb7164a13f30217176cb217ded0bf92e9c42f2b23abd7dfc: Status 404 returned error can't find the container with id 29d181e3de66fefecb7164a13f30217176cb217ded0bf92e9c42f2b23abd7dfc Jan 20 17:51:22 crc kubenswrapper[4558]: I0120 17:51:22.284752 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-7f6b9c747b-m4nwt"] Jan 20 17:51:22 crc kubenswrapper[4558]: I0120 17:51:22.403644 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/7db8891a-284a-47f5-b883-5aac563fc839-credential-keys\") pod \"keystone-667f6b846-wn5fk\" (UID: \"7db8891a-284a-47f5-b883-5aac563fc839\") " pod="openstack-kuttl-tests/keystone-667f6b846-wn5fk" Jan 20 17:51:22 crc kubenswrapper[4558]: I0120 17:51:22.404004 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7db8891a-284a-47f5-b883-5aac563fc839-combined-ca-bundle\") pod \"keystone-667f6b846-wn5fk\" (UID: \"7db8891a-284a-47f5-b883-5aac563fc839\") " pod="openstack-kuttl-tests/keystone-667f6b846-wn5fk" Jan 20 17:51:22 crc kubenswrapper[4558]: I0120 17:51:22.404042 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7db8891a-284a-47f5-b883-5aac563fc839-config-data\") pod \"keystone-667f6b846-wn5fk\" (UID: \"7db8891a-284a-47f5-b883-5aac563fc839\") " pod="openstack-kuttl-tests/keystone-667f6b846-wn5fk" Jan 20 17:51:22 crc kubenswrapper[4558]: I0120 17:51:22.404206 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7db8891a-284a-47f5-b883-5aac563fc839-public-tls-certs\") pod \"keystone-667f6b846-wn5fk\" (UID: \"7db8891a-284a-47f5-b883-5aac563fc839\") " pod="openstack-kuttl-tests/keystone-667f6b846-wn5fk" Jan 20 17:51:22 crc kubenswrapper[4558]: I0120 17:51:22.404268 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7db8891a-284a-47f5-b883-5aac563fc839-scripts\") pod \"keystone-667f6b846-wn5fk\" (UID: \"7db8891a-284a-47f5-b883-5aac563fc839\") " pod="openstack-kuttl-tests/keystone-667f6b846-wn5fk" Jan 20 17:51:22 crc kubenswrapper[4558]: I0120 17:51:22.404350 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/7db8891a-284a-47f5-b883-5aac563fc839-fernet-keys\") pod \"keystone-667f6b846-wn5fk\" (UID: \"7db8891a-284a-47f5-b883-5aac563fc839\") " pod="openstack-kuttl-tests/keystone-667f6b846-wn5fk" Jan 20 17:51:22 crc kubenswrapper[4558]: I0120 17:51:22.404432 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7db8891a-284a-47f5-b883-5aac563fc839-internal-tls-certs\") pod \"keystone-667f6b846-wn5fk\" (UID: \"7db8891a-284a-47f5-b883-5aac563fc839\") " pod="openstack-kuttl-tests/keystone-667f6b846-wn5fk" Jan 20 17:51:22 crc kubenswrapper[4558]: I0120 17:51:22.404463 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pp8gx\" (UniqueName: \"kubernetes.io/projected/7db8891a-284a-47f5-b883-5aac563fc839-kube-api-access-pp8gx\") pod \"keystone-667f6b846-wn5fk\" (UID: \"7db8891a-284a-47f5-b883-5aac563fc839\") " pod="openstack-kuttl-tests/keystone-667f6b846-wn5fk" Jan 20 17:51:22 crc kubenswrapper[4558]: I0120 17:51:22.507375 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7db8891a-284a-47f5-b883-5aac563fc839-combined-ca-bundle\") pod \"keystone-667f6b846-wn5fk\" (UID: \"7db8891a-284a-47f5-b883-5aac563fc839\") " pod="openstack-kuttl-tests/keystone-667f6b846-wn5fk" Jan 20 17:51:22 crc kubenswrapper[4558]: I0120 17:51:22.507424 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7db8891a-284a-47f5-b883-5aac563fc839-config-data\") pod \"keystone-667f6b846-wn5fk\" (UID: \"7db8891a-284a-47f5-b883-5aac563fc839\") " pod="openstack-kuttl-tests/keystone-667f6b846-wn5fk" Jan 20 17:51:22 crc kubenswrapper[4558]: I0120 17:51:22.507530 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7db8891a-284a-47f5-b883-5aac563fc839-public-tls-certs\") pod \"keystone-667f6b846-wn5fk\" (UID: \"7db8891a-284a-47f5-b883-5aac563fc839\") " pod="openstack-kuttl-tests/keystone-667f6b846-wn5fk" Jan 20 17:51:22 crc kubenswrapper[4558]: I0120 17:51:22.507580 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7db8891a-284a-47f5-b883-5aac563fc839-scripts\") pod \"keystone-667f6b846-wn5fk\" (UID: \"7db8891a-284a-47f5-b883-5aac563fc839\") " pod="openstack-kuttl-tests/keystone-667f6b846-wn5fk" Jan 20 17:51:22 crc kubenswrapper[4558]: I0120 17:51:22.507653 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/7db8891a-284a-47f5-b883-5aac563fc839-fernet-keys\") pod \"keystone-667f6b846-wn5fk\" (UID: \"7db8891a-284a-47f5-b883-5aac563fc839\") " pod="openstack-kuttl-tests/keystone-667f6b846-wn5fk" Jan 20 17:51:22 crc kubenswrapper[4558]: I0120 17:51:22.507776 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7db8891a-284a-47f5-b883-5aac563fc839-internal-tls-certs\") pod \"keystone-667f6b846-wn5fk\" (UID: \"7db8891a-284a-47f5-b883-5aac563fc839\") " pod="openstack-kuttl-tests/keystone-667f6b846-wn5fk" Jan 20 17:51:22 crc kubenswrapper[4558]: I0120 17:51:22.507826 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pp8gx\" (UniqueName: \"kubernetes.io/projected/7db8891a-284a-47f5-b883-5aac563fc839-kube-api-access-pp8gx\") pod \"keystone-667f6b846-wn5fk\" (UID: \"7db8891a-284a-47f5-b883-5aac563fc839\") " pod="openstack-kuttl-tests/keystone-667f6b846-wn5fk" Jan 20 17:51:22 crc kubenswrapper[4558]: I0120 17:51:22.508003 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/7db8891a-284a-47f5-b883-5aac563fc839-credential-keys\") pod \"keystone-667f6b846-wn5fk\" (UID: \"7db8891a-284a-47f5-b883-5aac563fc839\") " pod="openstack-kuttl-tests/keystone-667f6b846-wn5fk" Jan 20 17:51:22 crc kubenswrapper[4558]: I0120 17:51:22.515050 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/7db8891a-284a-47f5-b883-5aac563fc839-fernet-keys\") pod \"keystone-667f6b846-wn5fk\" (UID: \"7db8891a-284a-47f5-b883-5aac563fc839\") " pod="openstack-kuttl-tests/keystone-667f6b846-wn5fk" Jan 20 17:51:22 crc kubenswrapper[4558]: I0120 17:51:22.516604 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7db8891a-284a-47f5-b883-5aac563fc839-public-tls-certs\") pod \"keystone-667f6b846-wn5fk\" (UID: \"7db8891a-284a-47f5-b883-5aac563fc839\") " pod="openstack-kuttl-tests/keystone-667f6b846-wn5fk" Jan 20 17:51:22 crc kubenswrapper[4558]: I0120 17:51:22.517188 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7db8891a-284a-47f5-b883-5aac563fc839-combined-ca-bundle\") pod \"keystone-667f6b846-wn5fk\" (UID: \"7db8891a-284a-47f5-b883-5aac563fc839\") " pod="openstack-kuttl-tests/keystone-667f6b846-wn5fk" Jan 20 17:51:22 crc kubenswrapper[4558]: I0120 17:51:22.522387 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7db8891a-284a-47f5-b883-5aac563fc839-scripts\") pod \"keystone-667f6b846-wn5fk\" (UID: \"7db8891a-284a-47f5-b883-5aac563fc839\") " pod="openstack-kuttl-tests/keystone-667f6b846-wn5fk" Jan 20 17:51:22 crc kubenswrapper[4558]: I0120 17:51:22.522670 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/7db8891a-284a-47f5-b883-5aac563fc839-credential-keys\") pod \"keystone-667f6b846-wn5fk\" (UID: \"7db8891a-284a-47f5-b883-5aac563fc839\") " pod="openstack-kuttl-tests/keystone-667f6b846-wn5fk" Jan 20 17:51:22 crc kubenswrapper[4558]: I0120 17:51:22.524111 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7db8891a-284a-47f5-b883-5aac563fc839-internal-tls-certs\") pod \"keystone-667f6b846-wn5fk\" (UID: \"7db8891a-284a-47f5-b883-5aac563fc839\") " pod="openstack-kuttl-tests/keystone-667f6b846-wn5fk" Jan 20 17:51:22 crc kubenswrapper[4558]: I0120 17:51:22.524535 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7db8891a-284a-47f5-b883-5aac563fc839-config-data\") pod \"keystone-667f6b846-wn5fk\" (UID: \"7db8891a-284a-47f5-b883-5aac563fc839\") " pod="openstack-kuttl-tests/keystone-667f6b846-wn5fk" Jan 20 17:51:22 crc kubenswrapper[4558]: I0120 17:51:22.530992 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pp8gx\" (UniqueName: \"kubernetes.io/projected/7db8891a-284a-47f5-b883-5aac563fc839-kube-api-access-pp8gx\") pod \"keystone-667f6b846-wn5fk\" (UID: \"7db8891a-284a-47f5-b883-5aac563fc839\") " pod="openstack-kuttl-tests/keystone-667f6b846-wn5fk" Jan 20 17:51:22 crc kubenswrapper[4558]: I0120 17:51:22.532344 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-667f6b846-wn5fk" Jan 20 17:51:22 crc kubenswrapper[4558]: I0120 17:51:22.788560 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/barbican-api-7cd9889bc6-72r2h" Jan 20 17:51:22 crc kubenswrapper[4558]: I0120 17:51:22.828581 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/memcached-0" Jan 20 17:51:22 crc kubenswrapper[4558]: I0120 17:51:22.919055 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/f95ad818-8ba9-4839-a613-2282bbe69ddc-memcached-tls-certs\") pod \"f95ad818-8ba9-4839-a613-2282bbe69ddc\" (UID: \"f95ad818-8ba9-4839-a613-2282bbe69ddc\") " Jan 20 17:51:22 crc kubenswrapper[4558]: I0120 17:51:22.920147 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f5h92\" (UniqueName: \"kubernetes.io/projected/f95ad818-8ba9-4839-a613-2282bbe69ddc-kube-api-access-f5h92\") pod \"f95ad818-8ba9-4839-a613-2282bbe69ddc\" (UID: \"f95ad818-8ba9-4839-a613-2282bbe69ddc\") " Jan 20 17:51:22 crc kubenswrapper[4558]: I0120 17:51:22.920326 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f95ad818-8ba9-4839-a613-2282bbe69ddc-combined-ca-bundle\") pod \"f95ad818-8ba9-4839-a613-2282bbe69ddc\" (UID: \"f95ad818-8ba9-4839-a613-2282bbe69ddc\") " Jan 20 17:51:22 crc kubenswrapper[4558]: I0120 17:51:22.920415 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/f95ad818-8ba9-4839-a613-2282bbe69ddc-kolla-config\") pod \"f95ad818-8ba9-4839-a613-2282bbe69ddc\" (UID: \"f95ad818-8ba9-4839-a613-2282bbe69ddc\") " Jan 20 17:51:22 crc kubenswrapper[4558]: I0120 17:51:22.921145 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f95ad818-8ba9-4839-a613-2282bbe69ddc-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "f95ad818-8ba9-4839-a613-2282bbe69ddc" (UID: "f95ad818-8ba9-4839-a613-2282bbe69ddc"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:51:22 crc kubenswrapper[4558]: I0120 17:51:22.921493 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/f95ad818-8ba9-4839-a613-2282bbe69ddc-config-data\") pod \"f95ad818-8ba9-4839-a613-2282bbe69ddc\" (UID: \"f95ad818-8ba9-4839-a613-2282bbe69ddc\") " Jan 20 17:51:22 crc kubenswrapper[4558]: I0120 17:51:22.922837 4558 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/f95ad818-8ba9-4839-a613-2282bbe69ddc-kolla-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:22 crc kubenswrapper[4558]: I0120 17:51:22.927475 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f95ad818-8ba9-4839-a613-2282bbe69ddc-kube-api-access-f5h92" (OuterVolumeSpecName: "kube-api-access-f5h92") pod "f95ad818-8ba9-4839-a613-2282bbe69ddc" (UID: "f95ad818-8ba9-4839-a613-2282bbe69ddc"). InnerVolumeSpecName "kube-api-access-f5h92". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:51:22 crc kubenswrapper[4558]: I0120 17:51:22.929655 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f95ad818-8ba9-4839-a613-2282bbe69ddc-config-data" (OuterVolumeSpecName: "config-data") pod "f95ad818-8ba9-4839-a613-2282bbe69ddc" (UID: "f95ad818-8ba9-4839-a613-2282bbe69ddc"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:51:22 crc kubenswrapper[4558]: I0120 17:51:22.958264 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f95ad818-8ba9-4839-a613-2282bbe69ddc-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f95ad818-8ba9-4839-a613-2282bbe69ddc" (UID: "f95ad818-8ba9-4839-a613-2282bbe69ddc"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:51:23 crc kubenswrapper[4558]: I0120 17:51:23.002619 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f95ad818-8ba9-4839-a613-2282bbe69ddc-memcached-tls-certs" (OuterVolumeSpecName: "memcached-tls-certs") pod "f95ad818-8ba9-4839-a613-2282bbe69ddc" (UID: "f95ad818-8ba9-4839-a613-2282bbe69ddc"). InnerVolumeSpecName "memcached-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:51:23 crc kubenswrapper[4558]: I0120 17:51:23.009370 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-667f6b846-wn5fk"] Jan 20 17:51:23 crc kubenswrapper[4558]: W0120 17:51:23.012117 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7db8891a_284a_47f5_b883_5aac563fc839.slice/crio-96cf3ec354af83911559cda738591158baa9fb6d63952c3f99c67539a15a1ea7 WatchSource:0}: Error finding container 96cf3ec354af83911559cda738591158baa9fb6d63952c3f99c67539a15a1ea7: Status 404 returned error can't find the container with id 96cf3ec354af83911559cda738591158baa9fb6d63952c3f99c67539a15a1ea7 Jan 20 17:51:23 crc kubenswrapper[4558]: I0120 17:51:23.025089 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f5h92\" (UniqueName: \"kubernetes.io/projected/f95ad818-8ba9-4839-a613-2282bbe69ddc-kube-api-access-f5h92\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:23 crc kubenswrapper[4558]: I0120 17:51:23.025116 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f95ad818-8ba9-4839-a613-2282bbe69ddc-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:23 crc kubenswrapper[4558]: I0120 17:51:23.025129 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/f95ad818-8ba9-4839-a613-2282bbe69ddc-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:23 crc kubenswrapper[4558]: I0120 17:51:23.025140 4558 reconciler_common.go:293] "Volume detached for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/f95ad818-8ba9-4839-a613-2282bbe69ddc-memcached-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:23 crc kubenswrapper[4558]: I0120 17:51:23.035041 4558 generic.go:334] "Generic (PLEG): container finished" podID="f95ad818-8ba9-4839-a613-2282bbe69ddc" containerID="292c875950d826dfa041a9a0a8854a29181d17ff2dfd691ec2ca8359bf5cf9e7" exitCode=0 Jan 20 17:51:23 crc kubenswrapper[4558]: I0120 17:51:23.035098 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/memcached-0" Jan 20 17:51:23 crc kubenswrapper[4558]: I0120 17:51:23.035122 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/memcached-0" event={"ID":"f95ad818-8ba9-4839-a613-2282bbe69ddc","Type":"ContainerDied","Data":"292c875950d826dfa041a9a0a8854a29181d17ff2dfd691ec2ca8359bf5cf9e7"} Jan 20 17:51:23 crc kubenswrapper[4558]: I0120 17:51:23.035154 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/memcached-0" event={"ID":"f95ad818-8ba9-4839-a613-2282bbe69ddc","Type":"ContainerDied","Data":"5d5ed2dc391448a9107ff0fe485cf3b3a71a7a56f115a34498bed712729c9662"} Jan 20 17:51:23 crc kubenswrapper[4558]: I0120 17:51:23.035186 4558 scope.go:117] "RemoveContainer" containerID="292c875950d826dfa041a9a0a8854a29181d17ff2dfd691ec2ca8359bf5cf9e7" Jan 20 17:51:23 crc kubenswrapper[4558]: I0120 17:51:23.040333 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-9c6c687fd-44ldp" event={"ID":"3922e044-abc1-42e8-a5a0-dfd1fe256e14","Type":"ContainerStarted","Data":"c5147fd6cc62ef26b5eb7f80623ec144f99572e648edbc9fc439a486b6a29d02"} Jan 20 17:51:23 crc kubenswrapper[4558]: I0120 17:51:23.042020 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/keystone-9c6c687fd-44ldp" Jan 20 17:51:23 crc kubenswrapper[4558]: I0120 17:51:23.043491 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-667f6b846-wn5fk" event={"ID":"7db8891a-284a-47f5-b883-5aac563fc839","Type":"ContainerStarted","Data":"96cf3ec354af83911559cda738591158baa9fb6d63952c3f99c67539a15a1ea7"} Jan 20 17:51:23 crc kubenswrapper[4558]: I0120 17:51:23.048496 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-7f6b9c747b-m4nwt" event={"ID":"f9bdec5b-a12e-4c73-9cb6-3b50c6bf3b0f","Type":"ContainerStarted","Data":"aababf132d5446979839e0c577985c22fd9f60a631cd19ebf40fb35cac8af51c"} Jan 20 17:51:23 crc kubenswrapper[4558]: I0120 17:51:23.048531 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-7f6b9c747b-m4nwt" event={"ID":"f9bdec5b-a12e-4c73-9cb6-3b50c6bf3b0f","Type":"ContainerStarted","Data":"f05207f6b368474a1803d5134030c085eaccc7f1d38de03e11ae6754937b6bfd"} Jan 20 17:51:23 crc kubenswrapper[4558]: I0120 17:51:23.048548 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/placement-7f6b9c747b-m4nwt" Jan 20 17:51:23 crc kubenswrapper[4558]: I0120 17:51:23.048576 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/placement-7f6b9c747b-m4nwt" Jan 20 17:51:23 crc kubenswrapper[4558]: I0120 17:51:23.048586 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-7f6b9c747b-m4nwt" event={"ID":"f9bdec5b-a12e-4c73-9cb6-3b50c6bf3b0f","Type":"ContainerStarted","Data":"29d181e3de66fefecb7164a13f30217176cb217ded0bf92e9c42f2b23abd7dfc"} Jan 20 17:51:23 crc kubenswrapper[4558]: I0120 17:51:23.064964 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/keystone-9c6c687fd-44ldp" podStartSLOduration=3.064942007 podStartE2EDuration="3.064942007s" podCreationTimestamp="2026-01-20 17:51:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:51:23.063437267 +0000 UTC m=+4176.823775235" watchObservedRunningTime="2026-01-20 17:51:23.064942007 +0000 UTC m=+4176.825279974" Jan 20 17:51:23 crc kubenswrapper[4558]: I0120 17:51:23.067597 4558 scope.go:117] "RemoveContainer" containerID="292c875950d826dfa041a9a0a8854a29181d17ff2dfd691ec2ca8359bf5cf9e7" Jan 20 17:51:23 crc kubenswrapper[4558]: E0120 17:51:23.067952 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"292c875950d826dfa041a9a0a8854a29181d17ff2dfd691ec2ca8359bf5cf9e7\": container with ID starting with 292c875950d826dfa041a9a0a8854a29181d17ff2dfd691ec2ca8359bf5cf9e7 not found: ID does not exist" containerID="292c875950d826dfa041a9a0a8854a29181d17ff2dfd691ec2ca8359bf5cf9e7" Jan 20 17:51:23 crc kubenswrapper[4558]: I0120 17:51:23.068053 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"292c875950d826dfa041a9a0a8854a29181d17ff2dfd691ec2ca8359bf5cf9e7"} err="failed to get container status \"292c875950d826dfa041a9a0a8854a29181d17ff2dfd691ec2ca8359bf5cf9e7\": rpc error: code = NotFound desc = could not find container \"292c875950d826dfa041a9a0a8854a29181d17ff2dfd691ec2ca8359bf5cf9e7\": container with ID starting with 292c875950d826dfa041a9a0a8854a29181d17ff2dfd691ec2ca8359bf5cf9e7 not found: ID does not exist" Jan 20 17:51:23 crc kubenswrapper[4558]: I0120 17:51:23.080662 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:51:23 crc kubenswrapper[4558]: I0120 17:51:23.081042 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/barbican-api-7cd9889bc6-72r2h" Jan 20 17:51:23 crc kubenswrapper[4558]: I0120 17:51:23.102453 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/memcached-0"] Jan 20 17:51:23 crc kubenswrapper[4558]: I0120 17:51:23.104037 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/memcached-0"] Jan 20 17:51:23 crc kubenswrapper[4558]: I0120 17:51:23.111096 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/memcached-0"] Jan 20 17:51:23 crc kubenswrapper[4558]: I0120 17:51:23.111312 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/placement-7f6b9c747b-m4nwt" podStartSLOduration=2.111298124 podStartE2EDuration="2.111298124s" podCreationTimestamp="2026-01-20 17:51:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:51:23.111051659 +0000 UTC m=+4176.871389627" watchObservedRunningTime="2026-01-20 17:51:23.111298124 +0000 UTC m=+4176.871636091" Jan 20 17:51:23 crc kubenswrapper[4558]: E0120 17:51:23.111852 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f95ad818-8ba9-4839-a613-2282bbe69ddc" containerName="memcached" Jan 20 17:51:23 crc kubenswrapper[4558]: I0120 17:51:23.111883 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="f95ad818-8ba9-4839-a613-2282bbe69ddc" containerName="memcached" Jan 20 17:51:23 crc kubenswrapper[4558]: I0120 17:51:23.112153 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="f95ad818-8ba9-4839-a613-2282bbe69ddc" containerName="memcached" Jan 20 17:51:23 crc kubenswrapper[4558]: I0120 17:51:23.112946 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/memcached-0" Jan 20 17:51:23 crc kubenswrapper[4558]: I0120 17:51:23.117484 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"memcached-config-data" Jan 20 17:51:23 crc kubenswrapper[4558]: I0120 17:51:23.117696 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"memcached-memcached-dockercfg-lc49s" Jan 20 17:51:23 crc kubenswrapper[4558]: I0120 17:51:23.122309 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-memcached-svc" Jan 20 17:51:23 crc kubenswrapper[4558]: I0120 17:51:23.124938 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/memcached-0"] Jan 20 17:51:23 crc kubenswrapper[4558]: I0120 17:51:23.193798 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-api-6fcd55fbd-x56xq"] Jan 20 17:51:23 crc kubenswrapper[4558]: I0120 17:51:23.194118 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-api-6fcd55fbd-x56xq" podUID="4a97d228-d2ed-4b25-9746-7ee8083e562b" containerName="barbican-api-log" containerID="cri-o://4cc83e2afecb8ccf476b628cbe76038deafd826319b077c37d311481bf29f6f1" gracePeriod=30 Jan 20 17:51:23 crc kubenswrapper[4558]: I0120 17:51:23.194636 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-api-6fcd55fbd-x56xq" podUID="4a97d228-d2ed-4b25-9746-7ee8083e562b" containerName="barbican-api" containerID="cri-o://9eb510d65b859ba67d1fbebbd45639a22b783fc8a8eeb68ec8c537ed76f3ce64" gracePeriod=30 Jan 20 17:51:23 crc kubenswrapper[4558]: I0120 17:51:23.230156 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a905b3ac-e1b2-49f8-8034-912138028aca-combined-ca-bundle\") pod \"memcached-0\" (UID: \"a905b3ac-e1b2-49f8-8034-912138028aca\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:51:23 crc kubenswrapper[4558]: I0120 17:51:23.230518 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a905b3ac-e1b2-49f8-8034-912138028aca-config-data\") pod \"memcached-0\" (UID: \"a905b3ac-e1b2-49f8-8034-912138028aca\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:51:23 crc kubenswrapper[4558]: I0120 17:51:23.230638 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/a905b3ac-e1b2-49f8-8034-912138028aca-memcached-tls-certs\") pod \"memcached-0\" (UID: \"a905b3ac-e1b2-49f8-8034-912138028aca\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:51:23 crc kubenswrapper[4558]: I0120 17:51:23.230679 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zhpjg\" (UniqueName: \"kubernetes.io/projected/a905b3ac-e1b2-49f8-8034-912138028aca-kube-api-access-zhpjg\") pod \"memcached-0\" (UID: \"a905b3ac-e1b2-49f8-8034-912138028aca\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:51:23 crc kubenswrapper[4558]: I0120 17:51:23.231080 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/a905b3ac-e1b2-49f8-8034-912138028aca-kolla-config\") pod \"memcached-0\" (UID: \"a905b3ac-e1b2-49f8-8034-912138028aca\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:51:23 crc kubenswrapper[4558]: I0120 17:51:23.316943 4558 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:51:23 crc kubenswrapper[4558]: I0120 17:51:23.317892 4558 scope.go:117] "RemoveContainer" containerID="70539f03bf1b2ace01f24a9072ce8275fb4f5d92a181a991e07e1469387f4f02" Jan 20 17:51:23 crc kubenswrapper[4558]: E0120 17:51:23.318145 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"nova-cell0-conductor-conductor\" with CrashLoopBackOff: \"back-off 40s restarting failed container=nova-cell0-conductor-conductor pod=nova-cell0-conductor-0_openstack-kuttl-tests(924fa7ce-8d60-4b2f-b62b-d5e146474f71)\"" pod="openstack-kuttl-tests/nova-cell0-conductor-0" podUID="924fa7ce-8d60-4b2f-b62b-d5e146474f71" Jan 20 17:51:23 crc kubenswrapper[4558]: I0120 17:51:23.332293 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zhpjg\" (UniqueName: \"kubernetes.io/projected/a905b3ac-e1b2-49f8-8034-912138028aca-kube-api-access-zhpjg\") pod \"memcached-0\" (UID: \"a905b3ac-e1b2-49f8-8034-912138028aca\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:51:23 crc kubenswrapper[4558]: I0120 17:51:23.332410 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/a905b3ac-e1b2-49f8-8034-912138028aca-kolla-config\") pod \"memcached-0\" (UID: \"a905b3ac-e1b2-49f8-8034-912138028aca\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:51:23 crc kubenswrapper[4558]: I0120 17:51:23.332451 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a905b3ac-e1b2-49f8-8034-912138028aca-combined-ca-bundle\") pod \"memcached-0\" (UID: \"a905b3ac-e1b2-49f8-8034-912138028aca\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:51:23 crc kubenswrapper[4558]: I0120 17:51:23.332488 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a905b3ac-e1b2-49f8-8034-912138028aca-config-data\") pod \"memcached-0\" (UID: \"a905b3ac-e1b2-49f8-8034-912138028aca\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:51:23 crc kubenswrapper[4558]: I0120 17:51:23.332527 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/a905b3ac-e1b2-49f8-8034-912138028aca-memcached-tls-certs\") pod \"memcached-0\" (UID: \"a905b3ac-e1b2-49f8-8034-912138028aca\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:51:23 crc kubenswrapper[4558]: I0120 17:51:23.333394 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/a905b3ac-e1b2-49f8-8034-912138028aca-kolla-config\") pod \"memcached-0\" (UID: \"a905b3ac-e1b2-49f8-8034-912138028aca\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:51:23 crc kubenswrapper[4558]: I0120 17:51:23.333593 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a905b3ac-e1b2-49f8-8034-912138028aca-config-data\") pod \"memcached-0\" (UID: \"a905b3ac-e1b2-49f8-8034-912138028aca\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:51:23 crc kubenswrapper[4558]: I0120 17:51:23.336794 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/a905b3ac-e1b2-49f8-8034-912138028aca-memcached-tls-certs\") pod \"memcached-0\" (UID: \"a905b3ac-e1b2-49f8-8034-912138028aca\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:51:23 crc kubenswrapper[4558]: I0120 17:51:23.337179 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a905b3ac-e1b2-49f8-8034-912138028aca-combined-ca-bundle\") pod \"memcached-0\" (UID: \"a905b3ac-e1b2-49f8-8034-912138028aca\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:51:23 crc kubenswrapper[4558]: I0120 17:51:23.350741 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zhpjg\" (UniqueName: \"kubernetes.io/projected/a905b3ac-e1b2-49f8-8034-912138028aca-kube-api-access-zhpjg\") pod \"memcached-0\" (UID: \"a905b3ac-e1b2-49f8-8034-912138028aca\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:51:23 crc kubenswrapper[4558]: I0120 17:51:23.434444 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/memcached-0" Jan 20 17:51:23 crc kubenswrapper[4558]: I0120 17:51:23.439680 4558 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:51:23 crc kubenswrapper[4558]: I0120 17:51:23.440824 4558 scope.go:117] "RemoveContainer" containerID="a83e386e23548bf8e99d8a3f739f604866a2baad6493f364fe0412f39f677f52" Jan 20 17:51:23 crc kubenswrapper[4558]: E0120 17:51:23.441119 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"nova-cell1-conductor-conductor\" with CrashLoopBackOff: \"back-off 20s restarting failed container=nova-cell1-conductor-conductor pod=nova-cell1-conductor-0_openstack-kuttl-tests(7c16f0c4-1462-4817-9f74-9e3d93193867)\"" pod="openstack-kuttl-tests/nova-cell1-conductor-0" podUID="7c16f0c4-1462-4817-9f74-9e3d93193867" Jan 20 17:51:23 crc kubenswrapper[4558]: I0120 17:51:23.566885 4558 scope.go:117] "RemoveContainer" containerID="1070e671e60499543eff1520064c8d7b1151ff81f9de7c5dd18ebcbdbdbbe487" Jan 20 17:51:23 crc kubenswrapper[4558]: I0120 17:51:23.701376 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/memcached-0"] Jan 20 17:51:24 crc kubenswrapper[4558]: I0120 17:51:24.057093 4558 generic.go:334] "Generic (PLEG): container finished" podID="4a97d228-d2ed-4b25-9746-7ee8083e562b" containerID="4cc83e2afecb8ccf476b628cbe76038deafd826319b077c37d311481bf29f6f1" exitCode=143 Jan 20 17:51:24 crc kubenswrapper[4558]: I0120 17:51:24.057179 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-6fcd55fbd-x56xq" event={"ID":"4a97d228-d2ed-4b25-9746-7ee8083e562b","Type":"ContainerDied","Data":"4cc83e2afecb8ccf476b628cbe76038deafd826319b077c37d311481bf29f6f1"} Jan 20 17:51:24 crc kubenswrapper[4558]: I0120 17:51:24.059011 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"d9321ea5-ab3b-4f00-b4d0-f2c2f8c2deb2","Type":"ContainerStarted","Data":"4e744e9f021c29bfddbe31a0a527bc6946c7e7d3595c756668ddc79180474c63"} Jan 20 17:51:24 crc kubenswrapper[4558]: I0120 17:51:24.061259 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-667f6b846-wn5fk" event={"ID":"7db8891a-284a-47f5-b883-5aac563fc839","Type":"ContainerStarted","Data":"d6f9a9e764e6371a2dc8fd22991e16caacd06318d66716c401edaedf85f6c0d7"} Jan 20 17:51:24 crc kubenswrapper[4558]: I0120 17:51:24.061384 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/keystone-667f6b846-wn5fk" Jan 20 17:51:24 crc kubenswrapper[4558]: I0120 17:51:24.064139 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/memcached-0" event={"ID":"a905b3ac-e1b2-49f8-8034-912138028aca","Type":"ContainerStarted","Data":"516d63566e73522c69fe8fb7503a5f9b4f3300f2691369edc89aa5a577b600e5"} Jan 20 17:51:24 crc kubenswrapper[4558]: I0120 17:51:24.064197 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/memcached-0" Jan 20 17:51:24 crc kubenswrapper[4558]: I0120 17:51:24.064212 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/memcached-0" event={"ID":"a905b3ac-e1b2-49f8-8034-912138028aca","Type":"ContainerStarted","Data":"929204fe34e13dda9ed94b10d3f6026f38c68ea4979e1462ef3988f90fcf3c31"} Jan 20 17:51:24 crc kubenswrapper[4558]: I0120 17:51:24.064325 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/keystone-9c6c687fd-44ldp" podUID="3922e044-abc1-42e8-a5a0-dfd1fe256e14" containerName="keystone-api" containerID="cri-o://c5147fd6cc62ef26b5eb7f80623ec144f99572e648edbc9fc439a486b6a29d02" gracePeriod=30 Jan 20 17:51:24 crc kubenswrapper[4558]: I0120 17:51:24.098054 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/keystone-667f6b846-wn5fk" podStartSLOduration=2.098027326 podStartE2EDuration="2.098027326s" podCreationTimestamp="2026-01-20 17:51:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:51:24.09304634 +0000 UTC m=+4177.853384297" watchObservedRunningTime="2026-01-20 17:51:24.098027326 +0000 UTC m=+4177.858365293" Jan 20 17:51:24 crc kubenswrapper[4558]: I0120 17:51:24.121621 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/memcached-0" podStartSLOduration=1.121600432 podStartE2EDuration="1.121600432s" podCreationTimestamp="2026-01-20 17:51:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:51:24.116702994 +0000 UTC m=+4177.877040960" watchObservedRunningTime="2026-01-20 17:51:24.121600432 +0000 UTC m=+4177.881938400" Jan 20 17:51:24 crc kubenswrapper[4558]: I0120 17:51:24.588305 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f95ad818-8ba9-4839-a613-2282bbe69ddc" path="/var/lib/kubelet/pods/f95ad818-8ba9-4839-a613-2282bbe69ddc/volumes" Jan 20 17:51:24 crc kubenswrapper[4558]: I0120 17:51:24.671684 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-9c6c687fd-44ldp" Jan 20 17:51:24 crc kubenswrapper[4558]: I0120 17:51:24.764268 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3922e044-abc1-42e8-a5a0-dfd1fe256e14-config-data\") pod \"3922e044-abc1-42e8-a5a0-dfd1fe256e14\" (UID: \"3922e044-abc1-42e8-a5a0-dfd1fe256e14\") " Jan 20 17:51:24 crc kubenswrapper[4558]: I0120 17:51:24.764301 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3922e044-abc1-42e8-a5a0-dfd1fe256e14-scripts\") pod \"3922e044-abc1-42e8-a5a0-dfd1fe256e14\" (UID: \"3922e044-abc1-42e8-a5a0-dfd1fe256e14\") " Jan 20 17:51:24 crc kubenswrapper[4558]: I0120 17:51:24.764352 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3922e044-abc1-42e8-a5a0-dfd1fe256e14-internal-tls-certs\") pod \"3922e044-abc1-42e8-a5a0-dfd1fe256e14\" (UID: \"3922e044-abc1-42e8-a5a0-dfd1fe256e14\") " Jan 20 17:51:24 crc kubenswrapper[4558]: I0120 17:51:24.764380 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3922e044-abc1-42e8-a5a0-dfd1fe256e14-public-tls-certs\") pod \"3922e044-abc1-42e8-a5a0-dfd1fe256e14\" (UID: \"3922e044-abc1-42e8-a5a0-dfd1fe256e14\") " Jan 20 17:51:24 crc kubenswrapper[4558]: I0120 17:51:24.764586 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3922e044-abc1-42e8-a5a0-dfd1fe256e14-fernet-keys\") pod \"3922e044-abc1-42e8-a5a0-dfd1fe256e14\" (UID: \"3922e044-abc1-42e8-a5a0-dfd1fe256e14\") " Jan 20 17:51:24 crc kubenswrapper[4558]: I0120 17:51:24.764632 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-thgvp\" (UniqueName: \"kubernetes.io/projected/3922e044-abc1-42e8-a5a0-dfd1fe256e14-kube-api-access-thgvp\") pod \"3922e044-abc1-42e8-a5a0-dfd1fe256e14\" (UID: \"3922e044-abc1-42e8-a5a0-dfd1fe256e14\") " Jan 20 17:51:24 crc kubenswrapper[4558]: I0120 17:51:24.764664 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/3922e044-abc1-42e8-a5a0-dfd1fe256e14-credential-keys\") pod \"3922e044-abc1-42e8-a5a0-dfd1fe256e14\" (UID: \"3922e044-abc1-42e8-a5a0-dfd1fe256e14\") " Jan 20 17:51:24 crc kubenswrapper[4558]: I0120 17:51:24.764710 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3922e044-abc1-42e8-a5a0-dfd1fe256e14-combined-ca-bundle\") pod \"3922e044-abc1-42e8-a5a0-dfd1fe256e14\" (UID: \"3922e044-abc1-42e8-a5a0-dfd1fe256e14\") " Jan 20 17:51:24 crc kubenswrapper[4558]: I0120 17:51:24.770845 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3922e044-abc1-42e8-a5a0-dfd1fe256e14-scripts" (OuterVolumeSpecName: "scripts") pod "3922e044-abc1-42e8-a5a0-dfd1fe256e14" (UID: "3922e044-abc1-42e8-a5a0-dfd1fe256e14"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:51:24 crc kubenswrapper[4558]: I0120 17:51:24.772934 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3922e044-abc1-42e8-a5a0-dfd1fe256e14-kube-api-access-thgvp" (OuterVolumeSpecName: "kube-api-access-thgvp") pod "3922e044-abc1-42e8-a5a0-dfd1fe256e14" (UID: "3922e044-abc1-42e8-a5a0-dfd1fe256e14"). InnerVolumeSpecName "kube-api-access-thgvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:51:24 crc kubenswrapper[4558]: I0120 17:51:24.773483 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3922e044-abc1-42e8-a5a0-dfd1fe256e14-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "3922e044-abc1-42e8-a5a0-dfd1fe256e14" (UID: "3922e044-abc1-42e8-a5a0-dfd1fe256e14"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:51:24 crc kubenswrapper[4558]: I0120 17:51:24.774301 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3922e044-abc1-42e8-a5a0-dfd1fe256e14-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "3922e044-abc1-42e8-a5a0-dfd1fe256e14" (UID: "3922e044-abc1-42e8-a5a0-dfd1fe256e14"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:51:24 crc kubenswrapper[4558]: I0120 17:51:24.793368 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3922e044-abc1-42e8-a5a0-dfd1fe256e14-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3922e044-abc1-42e8-a5a0-dfd1fe256e14" (UID: "3922e044-abc1-42e8-a5a0-dfd1fe256e14"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:51:24 crc kubenswrapper[4558]: I0120 17:51:24.795651 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3922e044-abc1-42e8-a5a0-dfd1fe256e14-config-data" (OuterVolumeSpecName: "config-data") pod "3922e044-abc1-42e8-a5a0-dfd1fe256e14" (UID: "3922e044-abc1-42e8-a5a0-dfd1fe256e14"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:51:24 crc kubenswrapper[4558]: I0120 17:51:24.815308 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3922e044-abc1-42e8-a5a0-dfd1fe256e14-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "3922e044-abc1-42e8-a5a0-dfd1fe256e14" (UID: "3922e044-abc1-42e8-a5a0-dfd1fe256e14"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:51:24 crc kubenswrapper[4558]: I0120 17:51:24.822532 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3922e044-abc1-42e8-a5a0-dfd1fe256e14-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "3922e044-abc1-42e8-a5a0-dfd1fe256e14" (UID: "3922e044-abc1-42e8-a5a0-dfd1fe256e14"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:51:24 crc kubenswrapper[4558]: I0120 17:51:24.871831 4558 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3922e044-abc1-42e8-a5a0-dfd1fe256e14-fernet-keys\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:24 crc kubenswrapper[4558]: I0120 17:51:24.871858 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-thgvp\" (UniqueName: \"kubernetes.io/projected/3922e044-abc1-42e8-a5a0-dfd1fe256e14-kube-api-access-thgvp\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:24 crc kubenswrapper[4558]: I0120 17:51:24.871872 4558 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/3922e044-abc1-42e8-a5a0-dfd1fe256e14-credential-keys\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:24 crc kubenswrapper[4558]: I0120 17:51:24.871884 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3922e044-abc1-42e8-a5a0-dfd1fe256e14-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:24 crc kubenswrapper[4558]: I0120 17:51:24.871893 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3922e044-abc1-42e8-a5a0-dfd1fe256e14-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:24 crc kubenswrapper[4558]: I0120 17:51:24.871901 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3922e044-abc1-42e8-a5a0-dfd1fe256e14-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:24 crc kubenswrapper[4558]: I0120 17:51:24.871912 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3922e044-abc1-42e8-a5a0-dfd1fe256e14-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:24 crc kubenswrapper[4558]: I0120 17:51:24.871922 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3922e044-abc1-42e8-a5a0-dfd1fe256e14-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:25 crc kubenswrapper[4558]: I0120 17:51:25.099276 4558 generic.go:334] "Generic (PLEG): container finished" podID="3922e044-abc1-42e8-a5a0-dfd1fe256e14" containerID="c5147fd6cc62ef26b5eb7f80623ec144f99572e648edbc9fc439a486b6a29d02" exitCode=0 Jan 20 17:51:25 crc kubenswrapper[4558]: I0120 17:51:25.099711 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-9c6c687fd-44ldp" event={"ID":"3922e044-abc1-42e8-a5a0-dfd1fe256e14","Type":"ContainerDied","Data":"c5147fd6cc62ef26b5eb7f80623ec144f99572e648edbc9fc439a486b6a29d02"} Jan 20 17:51:25 crc kubenswrapper[4558]: I0120 17:51:25.099780 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-9c6c687fd-44ldp" event={"ID":"3922e044-abc1-42e8-a5a0-dfd1fe256e14","Type":"ContainerDied","Data":"b220fed2dedac92807c8ade374027d759a1706b2330aaeca6872fadde547fffb"} Jan 20 17:51:25 crc kubenswrapper[4558]: I0120 17:51:25.099830 4558 scope.go:117] "RemoveContainer" containerID="c5147fd6cc62ef26b5eb7f80623ec144f99572e648edbc9fc439a486b6a29d02" Jan 20 17:51:25 crc kubenswrapper[4558]: I0120 17:51:25.100077 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-9c6c687fd-44ldp" Jan 20 17:51:25 crc kubenswrapper[4558]: I0120 17:51:25.174223 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-9c6c687fd-44ldp"] Jan 20 17:51:25 crc kubenswrapper[4558]: I0120 17:51:25.191968 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/keystone-9c6c687fd-44ldp"] Jan 20 17:51:25 crc kubenswrapper[4558]: I0120 17:51:25.212473 4558 scope.go:117] "RemoveContainer" containerID="c5147fd6cc62ef26b5eb7f80623ec144f99572e648edbc9fc439a486b6a29d02" Jan 20 17:51:25 crc kubenswrapper[4558]: E0120 17:51:25.215294 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c5147fd6cc62ef26b5eb7f80623ec144f99572e648edbc9fc439a486b6a29d02\": container with ID starting with c5147fd6cc62ef26b5eb7f80623ec144f99572e648edbc9fc439a486b6a29d02 not found: ID does not exist" containerID="c5147fd6cc62ef26b5eb7f80623ec144f99572e648edbc9fc439a486b6a29d02" Jan 20 17:51:25 crc kubenswrapper[4558]: I0120 17:51:25.215330 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c5147fd6cc62ef26b5eb7f80623ec144f99572e648edbc9fc439a486b6a29d02"} err="failed to get container status \"c5147fd6cc62ef26b5eb7f80623ec144f99572e648edbc9fc439a486b6a29d02\": rpc error: code = NotFound desc = could not find container \"c5147fd6cc62ef26b5eb7f80623ec144f99572e648edbc9fc439a486b6a29d02\": container with ID starting with c5147fd6cc62ef26b5eb7f80623ec144f99572e648edbc9fc439a486b6a29d02 not found: ID does not exist" Jan 20 17:51:25 crc kubenswrapper[4558]: I0120 17:51:25.430284 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/neutron-6bd94d8c7d-2mkc6" Jan 20 17:51:25 crc kubenswrapper[4558]: I0120 17:51:25.482697 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-78bcff9576-76n9h"] Jan 20 17:51:25 crc kubenswrapper[4558]: I0120 17:51:25.482932 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/neutron-78bcff9576-76n9h" podUID="6a566db2-a941-48c9-9169-8e2c16cda1ac" containerName="neutron-api" containerID="cri-o://d6eeb7c903fe5ccb79f100c77cfb1bc792eacae8fb9dc8447344d2655524771b" gracePeriod=30 Jan 20 17:51:25 crc kubenswrapper[4558]: I0120 17:51:25.483375 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/neutron-78bcff9576-76n9h" podUID="6a566db2-a941-48c9-9169-8e2c16cda1ac" containerName="neutron-httpd" containerID="cri-o://fe78548aaf34b40e51653f6f6cc0aba6de558e32992d14ccf662ce044e1157f4" gracePeriod=30 Jan 20 17:51:26 crc kubenswrapper[4558]: I0120 17:51:26.111628 4558 generic.go:334] "Generic (PLEG): container finished" podID="d9321ea5-ab3b-4f00-b4d0-f2c2f8c2deb2" containerID="4e744e9f021c29bfddbe31a0a527bc6946c7e7d3595c756668ddc79180474c63" exitCode=1 Jan 20 17:51:26 crc kubenswrapper[4558]: I0120 17:51:26.111747 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"d9321ea5-ab3b-4f00-b4d0-f2c2f8c2deb2","Type":"ContainerDied","Data":"4e744e9f021c29bfddbe31a0a527bc6946c7e7d3595c756668ddc79180474c63"} Jan 20 17:51:26 crc kubenswrapper[4558]: I0120 17:51:26.111843 4558 scope.go:117] "RemoveContainer" containerID="1070e671e60499543eff1520064c8d7b1151ff81f9de7c5dd18ebcbdbdbbe487" Jan 20 17:51:26 crc kubenswrapper[4558]: I0120 17:51:26.112571 4558 scope.go:117] "RemoveContainer" containerID="4e744e9f021c29bfddbe31a0a527bc6946c7e7d3595c756668ddc79180474c63" Jan 20 17:51:26 crc kubenswrapper[4558]: E0120 17:51:26.113293 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"nova-scheduler-scheduler\" with CrashLoopBackOff: \"back-off 20s restarting failed container=nova-scheduler-scheduler pod=nova-scheduler-0_openstack-kuttl-tests(d9321ea5-ab3b-4f00-b4d0-f2c2f8c2deb2)\"" pod="openstack-kuttl-tests/nova-scheduler-0" podUID="d9321ea5-ab3b-4f00-b4d0-f2c2f8c2deb2" Jan 20 17:51:26 crc kubenswrapper[4558]: I0120 17:51:26.113749 4558 generic.go:334] "Generic (PLEG): container finished" podID="6a566db2-a941-48c9-9169-8e2c16cda1ac" containerID="fe78548aaf34b40e51653f6f6cc0aba6de558e32992d14ccf662ce044e1157f4" exitCode=0 Jan 20 17:51:26 crc kubenswrapper[4558]: I0120 17:51:26.113798 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-78bcff9576-76n9h" event={"ID":"6a566db2-a941-48c9-9169-8e2c16cda1ac","Type":"ContainerDied","Data":"fe78548aaf34b40e51653f6f6cc0aba6de558e32992d14ccf662ce044e1157f4"} Jan 20 17:51:26 crc kubenswrapper[4558]: I0120 17:51:26.114193 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/keystone-64bbc794f7-mwswp" podUID="cc15c42c-dd9c-4f60-b92a-7140713f32fb" containerName="keystone-api" probeResult="failure" output="Get \"https://10.217.1.206:5000/v3\": read tcp 10.217.0.2:56602->10.217.1.206:5000: read: connection reset by peer" Jan 20 17:51:26 crc kubenswrapper[4558]: I0120 17:51:26.354947 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/barbican-api-6fcd55fbd-x56xq" podUID="4a97d228-d2ed-4b25-9746-7ee8083e562b" containerName="barbican-api-log" probeResult="failure" output="Get \"https://10.217.1.135:9311/healthcheck\": read tcp 10.217.0.2:55476->10.217.1.135:9311: read: connection reset by peer" Jan 20 17:51:26 crc kubenswrapper[4558]: I0120 17:51:26.354974 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/barbican-api-6fcd55fbd-x56xq" podUID="4a97d228-d2ed-4b25-9746-7ee8083e562b" containerName="barbican-api" probeResult="failure" output="Get \"https://10.217.1.135:9311/healthcheck\": read tcp 10.217.0.2:55480->10.217.1.135:9311: read: connection reset by peer" Jan 20 17:51:26 crc kubenswrapper[4558]: I0120 17:51:26.511300 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-64bbc794f7-mwswp" Jan 20 17:51:26 crc kubenswrapper[4558]: I0120 17:51:26.581615 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3922e044-abc1-42e8-a5a0-dfd1fe256e14" path="/var/lib/kubelet/pods/3922e044-abc1-42e8-a5a0-dfd1fe256e14/volumes" Jan 20 17:51:26 crc kubenswrapper[4558]: I0120 17:51:26.616849 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cc15c42c-dd9c-4f60-b92a-7140713f32fb-internal-tls-certs\") pod \"cc15c42c-dd9c-4f60-b92a-7140713f32fb\" (UID: \"cc15c42c-dd9c-4f60-b92a-7140713f32fb\") " Jan 20 17:51:26 crc kubenswrapper[4558]: I0120 17:51:26.616927 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/cc15c42c-dd9c-4f60-b92a-7140713f32fb-fernet-keys\") pod \"cc15c42c-dd9c-4f60-b92a-7140713f32fb\" (UID: \"cc15c42c-dd9c-4f60-b92a-7140713f32fb\") " Jan 20 17:51:26 crc kubenswrapper[4558]: I0120 17:51:26.616964 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dqbrf\" (UniqueName: \"kubernetes.io/projected/cc15c42c-dd9c-4f60-b92a-7140713f32fb-kube-api-access-dqbrf\") pod \"cc15c42c-dd9c-4f60-b92a-7140713f32fb\" (UID: \"cc15c42c-dd9c-4f60-b92a-7140713f32fb\") " Jan 20 17:51:26 crc kubenswrapper[4558]: I0120 17:51:26.617040 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cc15c42c-dd9c-4f60-b92a-7140713f32fb-combined-ca-bundle\") pod \"cc15c42c-dd9c-4f60-b92a-7140713f32fb\" (UID: \"cc15c42c-dd9c-4f60-b92a-7140713f32fb\") " Jan 20 17:51:26 crc kubenswrapper[4558]: I0120 17:51:26.617218 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cc15c42c-dd9c-4f60-b92a-7140713f32fb-config-data\") pod \"cc15c42c-dd9c-4f60-b92a-7140713f32fb\" (UID: \"cc15c42c-dd9c-4f60-b92a-7140713f32fb\") " Jan 20 17:51:26 crc kubenswrapper[4558]: I0120 17:51:26.617263 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/cc15c42c-dd9c-4f60-b92a-7140713f32fb-public-tls-certs\") pod \"cc15c42c-dd9c-4f60-b92a-7140713f32fb\" (UID: \"cc15c42c-dd9c-4f60-b92a-7140713f32fb\") " Jan 20 17:51:26 crc kubenswrapper[4558]: I0120 17:51:26.617349 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cc15c42c-dd9c-4f60-b92a-7140713f32fb-scripts\") pod \"cc15c42c-dd9c-4f60-b92a-7140713f32fb\" (UID: \"cc15c42c-dd9c-4f60-b92a-7140713f32fb\") " Jan 20 17:51:26 crc kubenswrapper[4558]: I0120 17:51:26.617395 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/cc15c42c-dd9c-4f60-b92a-7140713f32fb-credential-keys\") pod \"cc15c42c-dd9c-4f60-b92a-7140713f32fb\" (UID: \"cc15c42c-dd9c-4f60-b92a-7140713f32fb\") " Jan 20 17:51:27 crc kubenswrapper[4558]: I0120 17:51:27.095291 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cc15c42c-dd9c-4f60-b92a-7140713f32fb-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "cc15c42c-dd9c-4f60-b92a-7140713f32fb" (UID: "cc15c42c-dd9c-4f60-b92a-7140713f32fb"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:51:27 crc kubenswrapper[4558]: I0120 17:51:27.097440 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cc15c42c-dd9c-4f60-b92a-7140713f32fb-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "cc15c42c-dd9c-4f60-b92a-7140713f32fb" (UID: "cc15c42c-dd9c-4f60-b92a-7140713f32fb"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:51:27 crc kubenswrapper[4558]: I0120 17:51:27.099351 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cc15c42c-dd9c-4f60-b92a-7140713f32fb-kube-api-access-dqbrf" (OuterVolumeSpecName: "kube-api-access-dqbrf") pod "cc15c42c-dd9c-4f60-b92a-7140713f32fb" (UID: "cc15c42c-dd9c-4f60-b92a-7140713f32fb"). InnerVolumeSpecName "kube-api-access-dqbrf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:51:27 crc kubenswrapper[4558]: I0120 17:51:27.101056 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cc15c42c-dd9c-4f60-b92a-7140713f32fb-scripts" (OuterVolumeSpecName: "scripts") pod "cc15c42c-dd9c-4f60-b92a-7140713f32fb" (UID: "cc15c42c-dd9c-4f60-b92a-7140713f32fb"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:51:27 crc kubenswrapper[4558]: I0120 17:51:27.125464 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cc15c42c-dd9c-4f60-b92a-7140713f32fb-config-data" (OuterVolumeSpecName: "config-data") pod "cc15c42c-dd9c-4f60-b92a-7140713f32fb" (UID: "cc15c42c-dd9c-4f60-b92a-7140713f32fb"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:51:27 crc kubenswrapper[4558]: I0120 17:51:27.129342 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cc15c42c-dd9c-4f60-b92a-7140713f32fb-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:27 crc kubenswrapper[4558]: I0120 17:51:27.129362 4558 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/cc15c42c-dd9c-4f60-b92a-7140713f32fb-credential-keys\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:27 crc kubenswrapper[4558]: I0120 17:51:27.129372 4558 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/cc15c42c-dd9c-4f60-b92a-7140713f32fb-fernet-keys\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:27 crc kubenswrapper[4558]: I0120 17:51:27.129383 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dqbrf\" (UniqueName: \"kubernetes.io/projected/cc15c42c-dd9c-4f60-b92a-7140713f32fb-kube-api-access-dqbrf\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:27 crc kubenswrapper[4558]: I0120 17:51:27.129392 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cc15c42c-dd9c-4f60-b92a-7140713f32fb-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:27 crc kubenswrapper[4558]: I0120 17:51:27.132324 4558 generic.go:334] "Generic (PLEG): container finished" podID="cc15c42c-dd9c-4f60-b92a-7140713f32fb" containerID="b395c952bedfc004b11ffa76313700fbc18b300f2d6b43029c49f65465fa385b" exitCode=0 Jan 20 17:51:27 crc kubenswrapper[4558]: I0120 17:51:27.132380 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-64bbc794f7-mwswp" event={"ID":"cc15c42c-dd9c-4f60-b92a-7140713f32fb","Type":"ContainerDied","Data":"b395c952bedfc004b11ffa76313700fbc18b300f2d6b43029c49f65465fa385b"} Jan 20 17:51:27 crc kubenswrapper[4558]: I0120 17:51:27.132406 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-64bbc794f7-mwswp" event={"ID":"cc15c42c-dd9c-4f60-b92a-7140713f32fb","Type":"ContainerDied","Data":"ae37826d9118dcb89eddae7c9d8d01e17adb4ef2da527aa137848316cfdb54b7"} Jan 20 17:51:27 crc kubenswrapper[4558]: I0120 17:51:27.132424 4558 scope.go:117] "RemoveContainer" containerID="b395c952bedfc004b11ffa76313700fbc18b300f2d6b43029c49f65465fa385b" Jan 20 17:51:27 crc kubenswrapper[4558]: I0120 17:51:27.132564 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-64bbc794f7-mwswp" Jan 20 17:51:27 crc kubenswrapper[4558]: I0120 17:51:27.140073 4558 generic.go:334] "Generic (PLEG): container finished" podID="4a97d228-d2ed-4b25-9746-7ee8083e562b" containerID="9eb510d65b859ba67d1fbebbd45639a22b783fc8a8eeb68ec8c537ed76f3ce64" exitCode=0 Jan 20 17:51:27 crc kubenswrapper[4558]: I0120 17:51:27.140118 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-6fcd55fbd-x56xq" event={"ID":"4a97d228-d2ed-4b25-9746-7ee8083e562b","Type":"ContainerDied","Data":"9eb510d65b859ba67d1fbebbd45639a22b783fc8a8eeb68ec8c537ed76f3ce64"} Jan 20 17:51:27 crc kubenswrapper[4558]: I0120 17:51:27.140146 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-6fcd55fbd-x56xq" event={"ID":"4a97d228-d2ed-4b25-9746-7ee8083e562b","Type":"ContainerDied","Data":"6040e54c3233b1d6c02cf5d17217893873b92cc727cdc887070c0d52ea5a930c"} Jan 20 17:51:27 crc kubenswrapper[4558]: I0120 17:51:27.140157 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6040e54c3233b1d6c02cf5d17217893873b92cc727cdc887070c0d52ea5a930c" Jan 20 17:51:27 crc kubenswrapper[4558]: I0120 17:51:27.148591 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cc15c42c-dd9c-4f60-b92a-7140713f32fb-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "cc15c42c-dd9c-4f60-b92a-7140713f32fb" (UID: "cc15c42c-dd9c-4f60-b92a-7140713f32fb"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:51:27 crc kubenswrapper[4558]: I0120 17:51:27.155227 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cc15c42c-dd9c-4f60-b92a-7140713f32fb-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "cc15c42c-dd9c-4f60-b92a-7140713f32fb" (UID: "cc15c42c-dd9c-4f60-b92a-7140713f32fb"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:51:27 crc kubenswrapper[4558]: I0120 17:51:27.159560 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cc15c42c-dd9c-4f60-b92a-7140713f32fb-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "cc15c42c-dd9c-4f60-b92a-7140713f32fb" (UID: "cc15c42c-dd9c-4f60-b92a-7140713f32fb"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:51:27 crc kubenswrapper[4558]: I0120 17:51:27.220975 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-6fcd55fbd-x56xq" Jan 20 17:51:27 crc kubenswrapper[4558]: I0120 17:51:27.227844 4558 scope.go:117] "RemoveContainer" containerID="b395c952bedfc004b11ffa76313700fbc18b300f2d6b43029c49f65465fa385b" Jan 20 17:51:27 crc kubenswrapper[4558]: E0120 17:51:27.228244 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b395c952bedfc004b11ffa76313700fbc18b300f2d6b43029c49f65465fa385b\": container with ID starting with b395c952bedfc004b11ffa76313700fbc18b300f2d6b43029c49f65465fa385b not found: ID does not exist" containerID="b395c952bedfc004b11ffa76313700fbc18b300f2d6b43029c49f65465fa385b" Jan 20 17:51:27 crc kubenswrapper[4558]: I0120 17:51:27.228296 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b395c952bedfc004b11ffa76313700fbc18b300f2d6b43029c49f65465fa385b"} err="failed to get container status \"b395c952bedfc004b11ffa76313700fbc18b300f2d6b43029c49f65465fa385b\": rpc error: code = NotFound desc = could not find container \"b395c952bedfc004b11ffa76313700fbc18b300f2d6b43029c49f65465fa385b\": container with ID starting with b395c952bedfc004b11ffa76313700fbc18b300f2d6b43029c49f65465fa385b not found: ID does not exist" Jan 20 17:51:27 crc kubenswrapper[4558]: I0120 17:51:27.232610 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cc15c42c-dd9c-4f60-b92a-7140713f32fb-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:27 crc kubenswrapper[4558]: I0120 17:51:27.232630 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cc15c42c-dd9c-4f60-b92a-7140713f32fb-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:27 crc kubenswrapper[4558]: I0120 17:51:27.232643 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/cc15c42c-dd9c-4f60-b92a-7140713f32fb-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:27 crc kubenswrapper[4558]: I0120 17:51:27.329908 4558 patch_prober.go:28] interesting pod/machine-config-daemon-2vr4r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 20 17:51:27 crc kubenswrapper[4558]: I0120 17:51:27.329993 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 20 17:51:27 crc kubenswrapper[4558]: I0120 17:51:27.334599 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4a97d228-d2ed-4b25-9746-7ee8083e562b-config-data\") pod \"4a97d228-d2ed-4b25-9746-7ee8083e562b\" (UID: \"4a97d228-d2ed-4b25-9746-7ee8083e562b\") " Jan 20 17:51:27 crc kubenswrapper[4558]: I0120 17:51:27.334764 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4a97d228-d2ed-4b25-9746-7ee8083e562b-logs\") pod \"4a97d228-d2ed-4b25-9746-7ee8083e562b\" (UID: \"4a97d228-d2ed-4b25-9746-7ee8083e562b\") " Jan 20 17:51:27 crc kubenswrapper[4558]: I0120 17:51:27.334852 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-95pff\" (UniqueName: \"kubernetes.io/projected/4a97d228-d2ed-4b25-9746-7ee8083e562b-kube-api-access-95pff\") pod \"4a97d228-d2ed-4b25-9746-7ee8083e562b\" (UID: \"4a97d228-d2ed-4b25-9746-7ee8083e562b\") " Jan 20 17:51:27 crc kubenswrapper[4558]: I0120 17:51:27.334962 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4a97d228-d2ed-4b25-9746-7ee8083e562b-public-tls-certs\") pod \"4a97d228-d2ed-4b25-9746-7ee8083e562b\" (UID: \"4a97d228-d2ed-4b25-9746-7ee8083e562b\") " Jan 20 17:51:27 crc kubenswrapper[4558]: I0120 17:51:27.335093 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4a97d228-d2ed-4b25-9746-7ee8083e562b-combined-ca-bundle\") pod \"4a97d228-d2ed-4b25-9746-7ee8083e562b\" (UID: \"4a97d228-d2ed-4b25-9746-7ee8083e562b\") " Jan 20 17:51:27 crc kubenswrapper[4558]: I0120 17:51:27.335269 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4a97d228-d2ed-4b25-9746-7ee8083e562b-internal-tls-certs\") pod \"4a97d228-d2ed-4b25-9746-7ee8083e562b\" (UID: \"4a97d228-d2ed-4b25-9746-7ee8083e562b\") " Jan 20 17:51:27 crc kubenswrapper[4558]: I0120 17:51:27.335355 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4a97d228-d2ed-4b25-9746-7ee8083e562b-config-data-custom\") pod \"4a97d228-d2ed-4b25-9746-7ee8083e562b\" (UID: \"4a97d228-d2ed-4b25-9746-7ee8083e562b\") " Jan 20 17:51:27 crc kubenswrapper[4558]: I0120 17:51:27.335417 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4a97d228-d2ed-4b25-9746-7ee8083e562b-logs" (OuterVolumeSpecName: "logs") pod "4a97d228-d2ed-4b25-9746-7ee8083e562b" (UID: "4a97d228-d2ed-4b25-9746-7ee8083e562b"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:51:27 crc kubenswrapper[4558]: I0120 17:51:27.336712 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4a97d228-d2ed-4b25-9746-7ee8083e562b-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:27 crc kubenswrapper[4558]: I0120 17:51:27.342909 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4a97d228-d2ed-4b25-9746-7ee8083e562b-kube-api-access-95pff" (OuterVolumeSpecName: "kube-api-access-95pff") pod "4a97d228-d2ed-4b25-9746-7ee8083e562b" (UID: "4a97d228-d2ed-4b25-9746-7ee8083e562b"). InnerVolumeSpecName "kube-api-access-95pff". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:51:27 crc kubenswrapper[4558]: I0120 17:51:27.348892 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4a97d228-d2ed-4b25-9746-7ee8083e562b-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "4a97d228-d2ed-4b25-9746-7ee8083e562b" (UID: "4a97d228-d2ed-4b25-9746-7ee8083e562b"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:51:27 crc kubenswrapper[4558]: I0120 17:51:27.370273 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4a97d228-d2ed-4b25-9746-7ee8083e562b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4a97d228-d2ed-4b25-9746-7ee8083e562b" (UID: "4a97d228-d2ed-4b25-9746-7ee8083e562b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:51:27 crc kubenswrapper[4558]: I0120 17:51:27.377080 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:51:27 crc kubenswrapper[4558]: I0120 17:51:27.377339 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/cinder-api-0" podUID="dd518f67-fd09-4102-9934-594b833bf8c8" containerName="cinder-api-log" containerID="cri-o://c1f8d8bf07ca82f670329868a579e9d926a00b6090b23758e7d9b8f7b4cae4d3" gracePeriod=30 Jan 20 17:51:27 crc kubenswrapper[4558]: I0120 17:51:27.377726 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/cinder-api-0" podUID="dd518f67-fd09-4102-9934-594b833bf8c8" containerName="cinder-api" containerID="cri-o://e7e61e40b3ceb7bcd0050ea1fa8a9bb9f8c5958158a076d7e7954bdc11cae1ef" gracePeriod=30 Jan 20 17:51:27 crc kubenswrapper[4558]: I0120 17:51:27.388705 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack-kuttl-tests/cinder-api-0" podUID="dd518f67-fd09-4102-9934-594b833bf8c8" containerName="cinder-api" probeResult="failure" output="Get \"https://10.217.1.221:8776/healthcheck\": EOF" Jan 20 17:51:27 crc kubenswrapper[4558]: I0120 17:51:27.388953 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/cinder-api-0" podUID="dd518f67-fd09-4102-9934-594b833bf8c8" containerName="cinder-api" probeResult="failure" output="Get \"https://10.217.1.221:8776/healthcheck\": EOF" Jan 20 17:51:27 crc kubenswrapper[4558]: I0120 17:51:27.394908 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4a97d228-d2ed-4b25-9746-7ee8083e562b-config-data" (OuterVolumeSpecName: "config-data") pod "4a97d228-d2ed-4b25-9746-7ee8083e562b" (UID: "4a97d228-d2ed-4b25-9746-7ee8083e562b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:51:27 crc kubenswrapper[4558]: I0120 17:51:27.420264 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4a97d228-d2ed-4b25-9746-7ee8083e562b-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "4a97d228-d2ed-4b25-9746-7ee8083e562b" (UID: "4a97d228-d2ed-4b25-9746-7ee8083e562b"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:51:27 crc kubenswrapper[4558]: I0120 17:51:27.438694 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4a97d228-d2ed-4b25-9746-7ee8083e562b-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:27 crc kubenswrapper[4558]: I0120 17:51:27.438732 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-95pff\" (UniqueName: \"kubernetes.io/projected/4a97d228-d2ed-4b25-9746-7ee8083e562b-kube-api-access-95pff\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:27 crc kubenswrapper[4558]: I0120 17:51:27.438746 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4a97d228-d2ed-4b25-9746-7ee8083e562b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:27 crc kubenswrapper[4558]: I0120 17:51:27.438758 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4a97d228-d2ed-4b25-9746-7ee8083e562b-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:27 crc kubenswrapper[4558]: I0120 17:51:27.438767 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4a97d228-d2ed-4b25-9746-7ee8083e562b-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:27 crc kubenswrapper[4558]: I0120 17:51:27.446275 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4a97d228-d2ed-4b25-9746-7ee8083e562b-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "4a97d228-d2ed-4b25-9746-7ee8083e562b" (UID: "4a97d228-d2ed-4b25-9746-7ee8083e562b"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:51:27 crc kubenswrapper[4558]: I0120 17:51:27.462643 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:51:27 crc kubenswrapper[4558]: I0120 17:51:27.463144 4558 scope.go:117] "RemoveContainer" containerID="4e744e9f021c29bfddbe31a0a527bc6946c7e7d3595c756668ddc79180474c63" Jan 20 17:51:27 crc kubenswrapper[4558]: E0120 17:51:27.463580 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"nova-scheduler-scheduler\" with CrashLoopBackOff: \"back-off 20s restarting failed container=nova-scheduler-scheduler pod=nova-scheduler-0_openstack-kuttl-tests(d9321ea5-ab3b-4f00-b4d0-f2c2f8c2deb2)\"" pod="openstack-kuttl-tests/nova-scheduler-0" podUID="d9321ea5-ab3b-4f00-b4d0-f2c2f8c2deb2" Jan 20 17:51:27 crc kubenswrapper[4558]: I0120 17:51:27.470857 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-64bbc794f7-mwswp"] Jan 20 17:51:27 crc kubenswrapper[4558]: I0120 17:51:27.474027 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/neutron-78bcff9576-76n9h" podUID="6a566db2-a941-48c9-9169-8e2c16cda1ac" containerName="neutron-httpd" probeResult="failure" output="Get \"https://10.217.1.134:9696/\": dial tcp 10.217.1.134:9696: connect: connection refused" Jan 20 17:51:27 crc kubenswrapper[4558]: I0120 17:51:27.477014 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/keystone-64bbc794f7-mwswp"] Jan 20 17:51:27 crc kubenswrapper[4558]: I0120 17:51:27.541633 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4a97d228-d2ed-4b25-9746-7ee8083e562b-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:27 crc kubenswrapper[4558]: I0120 17:51:27.653049 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/barbican-api-5bfc7dd8b8-pxpm9"] Jan 20 17:51:27 crc kubenswrapper[4558]: E0120 17:51:27.653719 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4a97d228-d2ed-4b25-9746-7ee8083e562b" containerName="barbican-api-log" Jan 20 17:51:27 crc kubenswrapper[4558]: I0120 17:51:27.653744 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="4a97d228-d2ed-4b25-9746-7ee8083e562b" containerName="barbican-api-log" Jan 20 17:51:27 crc kubenswrapper[4558]: E0120 17:51:27.653777 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3922e044-abc1-42e8-a5a0-dfd1fe256e14" containerName="keystone-api" Jan 20 17:51:27 crc kubenswrapper[4558]: I0120 17:51:27.653784 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="3922e044-abc1-42e8-a5a0-dfd1fe256e14" containerName="keystone-api" Jan 20 17:51:27 crc kubenswrapper[4558]: E0120 17:51:27.653798 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cc15c42c-dd9c-4f60-b92a-7140713f32fb" containerName="keystone-api" Jan 20 17:51:27 crc kubenswrapper[4558]: I0120 17:51:27.653807 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="cc15c42c-dd9c-4f60-b92a-7140713f32fb" containerName="keystone-api" Jan 20 17:51:27 crc kubenswrapper[4558]: E0120 17:51:27.653823 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4a97d228-d2ed-4b25-9746-7ee8083e562b" containerName="barbican-api" Jan 20 17:51:27 crc kubenswrapper[4558]: I0120 17:51:27.653829 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="4a97d228-d2ed-4b25-9746-7ee8083e562b" containerName="barbican-api" Jan 20 17:51:27 crc kubenswrapper[4558]: I0120 17:51:27.654042 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="3922e044-abc1-42e8-a5a0-dfd1fe256e14" containerName="keystone-api" Jan 20 17:51:27 crc kubenswrapper[4558]: I0120 17:51:27.654067 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="4a97d228-d2ed-4b25-9746-7ee8083e562b" containerName="barbican-api-log" Jan 20 17:51:27 crc kubenswrapper[4558]: I0120 17:51:27.654080 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="cc15c42c-dd9c-4f60-b92a-7140713f32fb" containerName="keystone-api" Jan 20 17:51:27 crc kubenswrapper[4558]: I0120 17:51:27.654093 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="4a97d228-d2ed-4b25-9746-7ee8083e562b" containerName="barbican-api" Jan 20 17:51:27 crc kubenswrapper[4558]: I0120 17:51:27.655158 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-5bfc7dd8b8-pxpm9" Jan 20 17:51:27 crc kubenswrapper[4558]: I0120 17:51:27.665190 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-api-5bfc7dd8b8-pxpm9"] Jan 20 17:51:27 crc kubenswrapper[4558]: I0120 17:51:27.715462 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/memcached-0" podUID="f95ad818-8ba9-4839-a613-2282bbe69ddc" containerName="memcached" probeResult="failure" output="dial tcp 10.217.1.209:11211: i/o timeout" Jan 20 17:51:27 crc kubenswrapper[4558]: I0120 17:51:27.747938 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e638a157-4234-4339-80ef-65d818d39b73-public-tls-certs\") pod \"barbican-api-5bfc7dd8b8-pxpm9\" (UID: \"e638a157-4234-4339-80ef-65d818d39b73\") " pod="openstack-kuttl-tests/barbican-api-5bfc7dd8b8-pxpm9" Jan 20 17:51:27 crc kubenswrapper[4558]: I0120 17:51:27.748087 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e638a157-4234-4339-80ef-65d818d39b73-internal-tls-certs\") pod \"barbican-api-5bfc7dd8b8-pxpm9\" (UID: \"e638a157-4234-4339-80ef-65d818d39b73\") " pod="openstack-kuttl-tests/barbican-api-5bfc7dd8b8-pxpm9" Jan 20 17:51:27 crc kubenswrapper[4558]: I0120 17:51:27.748120 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e638a157-4234-4339-80ef-65d818d39b73-config-data\") pod \"barbican-api-5bfc7dd8b8-pxpm9\" (UID: \"e638a157-4234-4339-80ef-65d818d39b73\") " pod="openstack-kuttl-tests/barbican-api-5bfc7dd8b8-pxpm9" Jan 20 17:51:27 crc kubenswrapper[4558]: I0120 17:51:27.748179 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e638a157-4234-4339-80ef-65d818d39b73-config-data-custom\") pod \"barbican-api-5bfc7dd8b8-pxpm9\" (UID: \"e638a157-4234-4339-80ef-65d818d39b73\") " pod="openstack-kuttl-tests/barbican-api-5bfc7dd8b8-pxpm9" Jan 20 17:51:27 crc kubenswrapper[4558]: I0120 17:51:27.748426 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e638a157-4234-4339-80ef-65d818d39b73-logs\") pod \"barbican-api-5bfc7dd8b8-pxpm9\" (UID: \"e638a157-4234-4339-80ef-65d818d39b73\") " pod="openstack-kuttl-tests/barbican-api-5bfc7dd8b8-pxpm9" Jan 20 17:51:27 crc kubenswrapper[4558]: I0120 17:51:27.748595 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e638a157-4234-4339-80ef-65d818d39b73-combined-ca-bundle\") pod \"barbican-api-5bfc7dd8b8-pxpm9\" (UID: \"e638a157-4234-4339-80ef-65d818d39b73\") " pod="openstack-kuttl-tests/barbican-api-5bfc7dd8b8-pxpm9" Jan 20 17:51:27 crc kubenswrapper[4558]: I0120 17:51:27.748782 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vf97d\" (UniqueName: \"kubernetes.io/projected/e638a157-4234-4339-80ef-65d818d39b73-kube-api-access-vf97d\") pod \"barbican-api-5bfc7dd8b8-pxpm9\" (UID: \"e638a157-4234-4339-80ef-65d818d39b73\") " pod="openstack-kuttl-tests/barbican-api-5bfc7dd8b8-pxpm9" Jan 20 17:51:27 crc kubenswrapper[4558]: I0120 17:51:27.860766 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vf97d\" (UniqueName: \"kubernetes.io/projected/e638a157-4234-4339-80ef-65d818d39b73-kube-api-access-vf97d\") pod \"barbican-api-5bfc7dd8b8-pxpm9\" (UID: \"e638a157-4234-4339-80ef-65d818d39b73\") " pod="openstack-kuttl-tests/barbican-api-5bfc7dd8b8-pxpm9" Jan 20 17:51:27 crc kubenswrapper[4558]: I0120 17:51:27.861421 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e638a157-4234-4339-80ef-65d818d39b73-public-tls-certs\") pod \"barbican-api-5bfc7dd8b8-pxpm9\" (UID: \"e638a157-4234-4339-80ef-65d818d39b73\") " pod="openstack-kuttl-tests/barbican-api-5bfc7dd8b8-pxpm9" Jan 20 17:51:27 crc kubenswrapper[4558]: I0120 17:51:27.861495 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e638a157-4234-4339-80ef-65d818d39b73-internal-tls-certs\") pod \"barbican-api-5bfc7dd8b8-pxpm9\" (UID: \"e638a157-4234-4339-80ef-65d818d39b73\") " pod="openstack-kuttl-tests/barbican-api-5bfc7dd8b8-pxpm9" Jan 20 17:51:27 crc kubenswrapper[4558]: I0120 17:51:27.861527 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e638a157-4234-4339-80ef-65d818d39b73-config-data\") pod \"barbican-api-5bfc7dd8b8-pxpm9\" (UID: \"e638a157-4234-4339-80ef-65d818d39b73\") " pod="openstack-kuttl-tests/barbican-api-5bfc7dd8b8-pxpm9" Jan 20 17:51:27 crc kubenswrapper[4558]: I0120 17:51:27.861556 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e638a157-4234-4339-80ef-65d818d39b73-config-data-custom\") pod \"barbican-api-5bfc7dd8b8-pxpm9\" (UID: \"e638a157-4234-4339-80ef-65d818d39b73\") " pod="openstack-kuttl-tests/barbican-api-5bfc7dd8b8-pxpm9" Jan 20 17:51:27 crc kubenswrapper[4558]: I0120 17:51:27.861659 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e638a157-4234-4339-80ef-65d818d39b73-logs\") pod \"barbican-api-5bfc7dd8b8-pxpm9\" (UID: \"e638a157-4234-4339-80ef-65d818d39b73\") " pod="openstack-kuttl-tests/barbican-api-5bfc7dd8b8-pxpm9" Jan 20 17:51:27 crc kubenswrapper[4558]: I0120 17:51:27.861684 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e638a157-4234-4339-80ef-65d818d39b73-combined-ca-bundle\") pod \"barbican-api-5bfc7dd8b8-pxpm9\" (UID: \"e638a157-4234-4339-80ef-65d818d39b73\") " pod="openstack-kuttl-tests/barbican-api-5bfc7dd8b8-pxpm9" Jan 20 17:51:27 crc kubenswrapper[4558]: I0120 17:51:27.862919 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e638a157-4234-4339-80ef-65d818d39b73-logs\") pod \"barbican-api-5bfc7dd8b8-pxpm9\" (UID: \"e638a157-4234-4339-80ef-65d818d39b73\") " pod="openstack-kuttl-tests/barbican-api-5bfc7dd8b8-pxpm9" Jan 20 17:51:27 crc kubenswrapper[4558]: I0120 17:51:27.867230 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e638a157-4234-4339-80ef-65d818d39b73-config-data-custom\") pod \"barbican-api-5bfc7dd8b8-pxpm9\" (UID: \"e638a157-4234-4339-80ef-65d818d39b73\") " pod="openstack-kuttl-tests/barbican-api-5bfc7dd8b8-pxpm9" Jan 20 17:51:27 crc kubenswrapper[4558]: I0120 17:51:27.868204 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e638a157-4234-4339-80ef-65d818d39b73-public-tls-certs\") pod \"barbican-api-5bfc7dd8b8-pxpm9\" (UID: \"e638a157-4234-4339-80ef-65d818d39b73\") " pod="openstack-kuttl-tests/barbican-api-5bfc7dd8b8-pxpm9" Jan 20 17:51:27 crc kubenswrapper[4558]: I0120 17:51:27.868570 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e638a157-4234-4339-80ef-65d818d39b73-combined-ca-bundle\") pod \"barbican-api-5bfc7dd8b8-pxpm9\" (UID: \"e638a157-4234-4339-80ef-65d818d39b73\") " pod="openstack-kuttl-tests/barbican-api-5bfc7dd8b8-pxpm9" Jan 20 17:51:27 crc kubenswrapper[4558]: I0120 17:51:27.869246 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e638a157-4234-4339-80ef-65d818d39b73-internal-tls-certs\") pod \"barbican-api-5bfc7dd8b8-pxpm9\" (UID: \"e638a157-4234-4339-80ef-65d818d39b73\") " pod="openstack-kuttl-tests/barbican-api-5bfc7dd8b8-pxpm9" Jan 20 17:51:27 crc kubenswrapper[4558]: I0120 17:51:27.870723 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e638a157-4234-4339-80ef-65d818d39b73-config-data\") pod \"barbican-api-5bfc7dd8b8-pxpm9\" (UID: \"e638a157-4234-4339-80ef-65d818d39b73\") " pod="openstack-kuttl-tests/barbican-api-5bfc7dd8b8-pxpm9" Jan 20 17:51:27 crc kubenswrapper[4558]: I0120 17:51:27.878920 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vf97d\" (UniqueName: \"kubernetes.io/projected/e638a157-4234-4339-80ef-65d818d39b73-kube-api-access-vf97d\") pod \"barbican-api-5bfc7dd8b8-pxpm9\" (UID: \"e638a157-4234-4339-80ef-65d818d39b73\") " pod="openstack-kuttl-tests/barbican-api-5bfc7dd8b8-pxpm9" Jan 20 17:51:27 crc kubenswrapper[4558]: I0120 17:51:27.970495 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-5bfc7dd8b8-pxpm9" Jan 20 17:51:28 crc kubenswrapper[4558]: I0120 17:51:28.118640 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:51:28 crc kubenswrapper[4558]: I0120 17:51:28.118678 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:51:28 crc kubenswrapper[4558]: I0120 17:51:28.165893 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:51:28 crc kubenswrapper[4558]: I0120 17:51:28.167725 4558 generic.go:334] "Generic (PLEG): container finished" podID="dd518f67-fd09-4102-9934-594b833bf8c8" containerID="c1f8d8bf07ca82f670329868a579e9d926a00b6090b23758e7d9b8f7b4cae4d3" exitCode=143 Jan 20 17:51:28 crc kubenswrapper[4558]: I0120 17:51:28.167915 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"dd518f67-fd09-4102-9934-594b833bf8c8","Type":"ContainerDied","Data":"c1f8d8bf07ca82f670329868a579e9d926a00b6090b23758e7d9b8f7b4cae4d3"} Jan 20 17:51:28 crc kubenswrapper[4558]: I0120 17:51:28.168017 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-6fcd55fbd-x56xq" Jan 20 17:51:28 crc kubenswrapper[4558]: I0120 17:51:28.168380 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:51:28 crc kubenswrapper[4558]: I0120 17:51:28.175198 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:51:28 crc kubenswrapper[4558]: I0120 17:51:28.235079 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-api-6fcd55fbd-x56xq"] Jan 20 17:51:28 crc kubenswrapper[4558]: I0120 17:51:28.248869 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/barbican-api-6fcd55fbd-x56xq"] Jan 20 17:51:28 crc kubenswrapper[4558]: I0120 17:51:28.428531 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-api-5bfc7dd8b8-pxpm9"] Jan 20 17:51:28 crc kubenswrapper[4558]: I0120 17:51:28.436317 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/memcached-0" Jan 20 17:51:28 crc kubenswrapper[4558]: I0120 17:51:28.575769 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4a97d228-d2ed-4b25-9746-7ee8083e562b" path="/var/lib/kubelet/pods/4a97d228-d2ed-4b25-9746-7ee8083e562b/volumes" Jan 20 17:51:28 crc kubenswrapper[4558]: I0120 17:51:28.576516 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cc15c42c-dd9c-4f60-b92a-7140713f32fb" path="/var/lib/kubelet/pods/cc15c42c-dd9c-4f60-b92a-7140713f32fb/volumes" Jan 20 17:51:28 crc kubenswrapper[4558]: I0120 17:51:28.604917 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:51:28 crc kubenswrapper[4558]: I0120 17:51:28.605474 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-metadata-0" podUID="acb2f7f3-f6d0-4b4f-936f-baee3dfa3938" containerName="nova-metadata-log" containerID="cri-o://e67785bf94379651bfb64558a92f8a11e520c0f771013a70d50dbc2e73a7d2fb" gracePeriod=30 Jan 20 17:51:28 crc kubenswrapper[4558]: I0120 17:51:28.605570 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-metadata-0" podUID="acb2f7f3-f6d0-4b4f-936f-baee3dfa3938" containerName="nova-metadata-metadata" containerID="cri-o://06efd4d354098bbe1c72d647b10d6aecbc2f356d80570ca39ea3a76e7949f322" gracePeriod=30 Jan 20 17:51:28 crc kubenswrapper[4558]: I0120 17:51:28.662754 4558 pod_container_manager_linux.go:210] "Failed to delete cgroup paths" cgroupName=["kubepods","besteffort","pod733a1ae1-a917-447a-afa6-67cf7d688f86"] err="unable to destroy cgroup paths for cgroup [kubepods besteffort pod733a1ae1-a917-447a-afa6-67cf7d688f86] : Timed out while waiting for systemd to remove kubepods-besteffort-pod733a1ae1_a917_447a_afa6_67cf7d688f86.slice" Jan 20 17:51:28 crc kubenswrapper[4558]: E0120 17:51:28.662814 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to delete cgroup paths for [kubepods besteffort pod733a1ae1-a917-447a-afa6-67cf7d688f86] : unable to destroy cgroup paths for cgroup [kubepods besteffort pod733a1ae1-a917-447a-afa6-67cf7d688f86] : Timed out while waiting for systemd to remove kubepods-besteffort-pod733a1ae1_a917_447a_afa6_67cf7d688f86.slice" pod="openstack-kuttl-tests/nova-api-0" podUID="733a1ae1-a917-447a-afa6-67cf7d688f86" Jan 20 17:51:28 crc kubenswrapper[4558]: I0120 17:51:28.722576 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/neutron-7578dc9646-4xs8n"] Jan 20 17:51:28 crc kubenswrapper[4558]: I0120 17:51:28.724332 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-7578dc9646-4xs8n" Jan 20 17:51:28 crc kubenswrapper[4558]: I0120 17:51:28.814489 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-7578dc9646-4xs8n"] Jan 20 17:51:28 crc kubenswrapper[4558]: I0120 17:51:28.893819 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/cf356652-86c3-4fff-acda-805974e723a8-public-tls-certs\") pod \"neutron-7578dc9646-4xs8n\" (UID: \"cf356652-86c3-4fff-acda-805974e723a8\") " pod="openstack-kuttl-tests/neutron-7578dc9646-4xs8n" Jan 20 17:51:28 crc kubenswrapper[4558]: I0120 17:51:28.894197 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf356652-86c3-4fff-acda-805974e723a8-combined-ca-bundle\") pod \"neutron-7578dc9646-4xs8n\" (UID: \"cf356652-86c3-4fff-acda-805974e723a8\") " pod="openstack-kuttl-tests/neutron-7578dc9646-4xs8n" Jan 20 17:51:28 crc kubenswrapper[4558]: I0120 17:51:28.894347 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-htcqf\" (UniqueName: \"kubernetes.io/projected/cf356652-86c3-4fff-acda-805974e723a8-kube-api-access-htcqf\") pod \"neutron-7578dc9646-4xs8n\" (UID: \"cf356652-86c3-4fff-acda-805974e723a8\") " pod="openstack-kuttl-tests/neutron-7578dc9646-4xs8n" Jan 20 17:51:28 crc kubenswrapper[4558]: I0120 17:51:28.894535 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/cf356652-86c3-4fff-acda-805974e723a8-config\") pod \"neutron-7578dc9646-4xs8n\" (UID: \"cf356652-86c3-4fff-acda-805974e723a8\") " pod="openstack-kuttl-tests/neutron-7578dc9646-4xs8n" Jan 20 17:51:28 crc kubenswrapper[4558]: I0120 17:51:28.894730 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/cf356652-86c3-4fff-acda-805974e723a8-httpd-config\") pod \"neutron-7578dc9646-4xs8n\" (UID: \"cf356652-86c3-4fff-acda-805974e723a8\") " pod="openstack-kuttl-tests/neutron-7578dc9646-4xs8n" Jan 20 17:51:28 crc kubenswrapper[4558]: I0120 17:51:28.894783 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cf356652-86c3-4fff-acda-805974e723a8-internal-tls-certs\") pod \"neutron-7578dc9646-4xs8n\" (UID: \"cf356652-86c3-4fff-acda-805974e723a8\") " pod="openstack-kuttl-tests/neutron-7578dc9646-4xs8n" Jan 20 17:51:28 crc kubenswrapper[4558]: I0120 17:51:28.895011 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/cf356652-86c3-4fff-acda-805974e723a8-ovndb-tls-certs\") pod \"neutron-7578dc9646-4xs8n\" (UID: \"cf356652-86c3-4fff-acda-805974e723a8\") " pod="openstack-kuttl-tests/neutron-7578dc9646-4xs8n" Jan 20 17:51:28 crc kubenswrapper[4558]: I0120 17:51:28.996794 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf356652-86c3-4fff-acda-805974e723a8-combined-ca-bundle\") pod \"neutron-7578dc9646-4xs8n\" (UID: \"cf356652-86c3-4fff-acda-805974e723a8\") " pod="openstack-kuttl-tests/neutron-7578dc9646-4xs8n" Jan 20 17:51:28 crc kubenswrapper[4558]: I0120 17:51:28.996893 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-htcqf\" (UniqueName: \"kubernetes.io/projected/cf356652-86c3-4fff-acda-805974e723a8-kube-api-access-htcqf\") pod \"neutron-7578dc9646-4xs8n\" (UID: \"cf356652-86c3-4fff-acda-805974e723a8\") " pod="openstack-kuttl-tests/neutron-7578dc9646-4xs8n" Jan 20 17:51:28 crc kubenswrapper[4558]: I0120 17:51:28.996975 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/cf356652-86c3-4fff-acda-805974e723a8-config\") pod \"neutron-7578dc9646-4xs8n\" (UID: \"cf356652-86c3-4fff-acda-805974e723a8\") " pod="openstack-kuttl-tests/neutron-7578dc9646-4xs8n" Jan 20 17:51:28 crc kubenswrapper[4558]: I0120 17:51:28.997140 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/cf356652-86c3-4fff-acda-805974e723a8-httpd-config\") pod \"neutron-7578dc9646-4xs8n\" (UID: \"cf356652-86c3-4fff-acda-805974e723a8\") " pod="openstack-kuttl-tests/neutron-7578dc9646-4xs8n" Jan 20 17:51:28 crc kubenswrapper[4558]: I0120 17:51:28.997750 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cf356652-86c3-4fff-acda-805974e723a8-internal-tls-certs\") pod \"neutron-7578dc9646-4xs8n\" (UID: \"cf356652-86c3-4fff-acda-805974e723a8\") " pod="openstack-kuttl-tests/neutron-7578dc9646-4xs8n" Jan 20 17:51:28 crc kubenswrapper[4558]: I0120 17:51:28.997994 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/cf356652-86c3-4fff-acda-805974e723a8-ovndb-tls-certs\") pod \"neutron-7578dc9646-4xs8n\" (UID: \"cf356652-86c3-4fff-acda-805974e723a8\") " pod="openstack-kuttl-tests/neutron-7578dc9646-4xs8n" Jan 20 17:51:28 crc kubenswrapper[4558]: I0120 17:51:28.998035 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/cf356652-86c3-4fff-acda-805974e723a8-public-tls-certs\") pod \"neutron-7578dc9646-4xs8n\" (UID: \"cf356652-86c3-4fff-acda-805974e723a8\") " pod="openstack-kuttl-tests/neutron-7578dc9646-4xs8n" Jan 20 17:51:29 crc kubenswrapper[4558]: I0120 17:51:29.098697 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/cf356652-86c3-4fff-acda-805974e723a8-config\") pod \"neutron-7578dc9646-4xs8n\" (UID: \"cf356652-86c3-4fff-acda-805974e723a8\") " pod="openstack-kuttl-tests/neutron-7578dc9646-4xs8n" Jan 20 17:51:29 crc kubenswrapper[4558]: I0120 17:51:29.099118 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/cf356652-86c3-4fff-acda-805974e723a8-httpd-config\") pod \"neutron-7578dc9646-4xs8n\" (UID: \"cf356652-86c3-4fff-acda-805974e723a8\") " pod="openstack-kuttl-tests/neutron-7578dc9646-4xs8n" Jan 20 17:51:29 crc kubenswrapper[4558]: I0120 17:51:29.099738 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf356652-86c3-4fff-acda-805974e723a8-combined-ca-bundle\") pod \"neutron-7578dc9646-4xs8n\" (UID: \"cf356652-86c3-4fff-acda-805974e723a8\") " pod="openstack-kuttl-tests/neutron-7578dc9646-4xs8n" Jan 20 17:51:29 crc kubenswrapper[4558]: I0120 17:51:29.101590 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/cf356652-86c3-4fff-acda-805974e723a8-public-tls-certs\") pod \"neutron-7578dc9646-4xs8n\" (UID: \"cf356652-86c3-4fff-acda-805974e723a8\") " pod="openstack-kuttl-tests/neutron-7578dc9646-4xs8n" Jan 20 17:51:29 crc kubenswrapper[4558]: I0120 17:51:29.101963 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/cf356652-86c3-4fff-acda-805974e723a8-ovndb-tls-certs\") pod \"neutron-7578dc9646-4xs8n\" (UID: \"cf356652-86c3-4fff-acda-805974e723a8\") " pod="openstack-kuttl-tests/neutron-7578dc9646-4xs8n" Jan 20 17:51:29 crc kubenswrapper[4558]: I0120 17:51:29.102369 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cf356652-86c3-4fff-acda-805974e723a8-internal-tls-certs\") pod \"neutron-7578dc9646-4xs8n\" (UID: \"cf356652-86c3-4fff-acda-805974e723a8\") " pod="openstack-kuttl-tests/neutron-7578dc9646-4xs8n" Jan 20 17:51:29 crc kubenswrapper[4558]: I0120 17:51:29.109381 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-htcqf\" (UniqueName: \"kubernetes.io/projected/cf356652-86c3-4fff-acda-805974e723a8-kube-api-access-htcqf\") pod \"neutron-7578dc9646-4xs8n\" (UID: \"cf356652-86c3-4fff-acda-805974e723a8\") " pod="openstack-kuttl-tests/neutron-7578dc9646-4xs8n" Jan 20 17:51:29 crc kubenswrapper[4558]: I0120 17:51:29.179341 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-5bfc7dd8b8-pxpm9" event={"ID":"e638a157-4234-4339-80ef-65d818d39b73","Type":"ContainerStarted","Data":"5ffeb03ff85f67c07d33f3f3725dfcbef564cf6c716c687b03dc2f0eef33238d"} Jan 20 17:51:29 crc kubenswrapper[4558]: I0120 17:51:29.179393 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-5bfc7dd8b8-pxpm9" event={"ID":"e638a157-4234-4339-80ef-65d818d39b73","Type":"ContainerStarted","Data":"6b1b879f8ac6761b57d07842ada4680b78926bc506d8e1ce295b8e90a5aeebbc"} Jan 20 17:51:29 crc kubenswrapper[4558]: I0120 17:51:29.181455 4558 generic.go:334] "Generic (PLEG): container finished" podID="acb2f7f3-f6d0-4b4f-936f-baee3dfa3938" containerID="e67785bf94379651bfb64558a92f8a11e520c0f771013a70d50dbc2e73a7d2fb" exitCode=143 Jan 20 17:51:29 crc kubenswrapper[4558]: I0120 17:51:29.182932 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"acb2f7f3-f6d0-4b4f-936f-baee3dfa3938","Type":"ContainerDied","Data":"e67785bf94379651bfb64558a92f8a11e520c0f771013a70d50dbc2e73a7d2fb"} Jan 20 17:51:29 crc kubenswrapper[4558]: I0120 17:51:29.182963 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:51:29 crc kubenswrapper[4558]: I0120 17:51:29.183009 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:51:29 crc kubenswrapper[4558]: I0120 17:51:29.306513 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:51:29 crc kubenswrapper[4558]: I0120 17:51:29.338383 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:51:29 crc kubenswrapper[4558]: I0120 17:51:29.344506 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-7578dc9646-4xs8n" Jan 20 17:51:29 crc kubenswrapper[4558]: I0120 17:51:29.347419 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:51:29 crc kubenswrapper[4558]: I0120 17:51:29.350236 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:51:29 crc kubenswrapper[4558]: I0120 17:51:29.355975 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-internal-svc" Jan 20 17:51:29 crc kubenswrapper[4558]: I0120 17:51:29.356202 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-api-config-data" Jan 20 17:51:29 crc kubenswrapper[4558]: I0120 17:51:29.356805 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:51:29 crc kubenswrapper[4558]: I0120 17:51:29.358353 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-public-svc" Jan 20 17:51:29 crc kubenswrapper[4558]: I0120 17:51:29.514395 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/64e30e89-cc14-4c89-bd1b-5702bfba717c-public-tls-certs\") pod \"nova-api-0\" (UID: \"64e30e89-cc14-4c89-bd1b-5702bfba717c\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:51:29 crc kubenswrapper[4558]: I0120 17:51:29.514679 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-994rd\" (UniqueName: \"kubernetes.io/projected/64e30e89-cc14-4c89-bd1b-5702bfba717c-kube-api-access-994rd\") pod \"nova-api-0\" (UID: \"64e30e89-cc14-4c89-bd1b-5702bfba717c\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:51:29 crc kubenswrapper[4558]: I0120 17:51:29.514767 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/64e30e89-cc14-4c89-bd1b-5702bfba717c-logs\") pod \"nova-api-0\" (UID: \"64e30e89-cc14-4c89-bd1b-5702bfba717c\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:51:29 crc kubenswrapper[4558]: I0120 17:51:29.514829 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64e30e89-cc14-4c89-bd1b-5702bfba717c-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"64e30e89-cc14-4c89-bd1b-5702bfba717c\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:51:29 crc kubenswrapper[4558]: I0120 17:51:29.514850 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/64e30e89-cc14-4c89-bd1b-5702bfba717c-internal-tls-certs\") pod \"nova-api-0\" (UID: \"64e30e89-cc14-4c89-bd1b-5702bfba717c\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:51:29 crc kubenswrapper[4558]: I0120 17:51:29.514875 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/64e30e89-cc14-4c89-bd1b-5702bfba717c-config-data\") pod \"nova-api-0\" (UID: \"64e30e89-cc14-4c89-bd1b-5702bfba717c\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:51:29 crc kubenswrapper[4558]: I0120 17:51:29.514981 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/733a1ae1-a917-447a-afa6-67cf7d688f86-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:29 crc kubenswrapper[4558]: I0120 17:51:29.569955 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:51:29 crc kubenswrapper[4558]: I0120 17:51:29.599700 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:51:29 crc kubenswrapper[4558]: I0120 17:51:29.599749 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:51:29 crc kubenswrapper[4558]: I0120 17:51:29.616915 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/64e30e89-cc14-4c89-bd1b-5702bfba717c-public-tls-certs\") pod \"nova-api-0\" (UID: \"64e30e89-cc14-4c89-bd1b-5702bfba717c\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:51:29 crc kubenswrapper[4558]: I0120 17:51:29.616971 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-994rd\" (UniqueName: \"kubernetes.io/projected/64e30e89-cc14-4c89-bd1b-5702bfba717c-kube-api-access-994rd\") pod \"nova-api-0\" (UID: \"64e30e89-cc14-4c89-bd1b-5702bfba717c\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:51:29 crc kubenswrapper[4558]: I0120 17:51:29.617084 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/64e30e89-cc14-4c89-bd1b-5702bfba717c-logs\") pod \"nova-api-0\" (UID: \"64e30e89-cc14-4c89-bd1b-5702bfba717c\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:51:29 crc kubenswrapper[4558]: I0120 17:51:29.617154 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64e30e89-cc14-4c89-bd1b-5702bfba717c-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"64e30e89-cc14-4c89-bd1b-5702bfba717c\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:51:29 crc kubenswrapper[4558]: I0120 17:51:29.617196 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/64e30e89-cc14-4c89-bd1b-5702bfba717c-internal-tls-certs\") pod \"nova-api-0\" (UID: \"64e30e89-cc14-4c89-bd1b-5702bfba717c\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:51:29 crc kubenswrapper[4558]: I0120 17:51:29.617225 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/64e30e89-cc14-4c89-bd1b-5702bfba717c-config-data\") pod \"nova-api-0\" (UID: \"64e30e89-cc14-4c89-bd1b-5702bfba717c\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:51:29 crc kubenswrapper[4558]: I0120 17:51:29.617838 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/64e30e89-cc14-4c89-bd1b-5702bfba717c-logs\") pod \"nova-api-0\" (UID: \"64e30e89-cc14-4c89-bd1b-5702bfba717c\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:51:29 crc kubenswrapper[4558]: I0120 17:51:29.622456 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/64e30e89-cc14-4c89-bd1b-5702bfba717c-public-tls-certs\") pod \"nova-api-0\" (UID: \"64e30e89-cc14-4c89-bd1b-5702bfba717c\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:51:29 crc kubenswrapper[4558]: I0120 17:51:29.622657 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64e30e89-cc14-4c89-bd1b-5702bfba717c-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"64e30e89-cc14-4c89-bd1b-5702bfba717c\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:51:29 crc kubenswrapper[4558]: I0120 17:51:29.631830 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/64e30e89-cc14-4c89-bd1b-5702bfba717c-internal-tls-certs\") pod \"nova-api-0\" (UID: \"64e30e89-cc14-4c89-bd1b-5702bfba717c\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:51:29 crc kubenswrapper[4558]: I0120 17:51:29.631982 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/64e30e89-cc14-4c89-bd1b-5702bfba717c-config-data\") pod \"nova-api-0\" (UID: \"64e30e89-cc14-4c89-bd1b-5702bfba717c\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:51:29 crc kubenswrapper[4558]: I0120 17:51:29.635663 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-994rd\" (UniqueName: \"kubernetes.io/projected/64e30e89-cc14-4c89-bd1b-5702bfba717c-kube-api-access-994rd\") pod \"nova-api-0\" (UID: \"64e30e89-cc14-4c89-bd1b-5702bfba717c\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:51:29 crc kubenswrapper[4558]: I0120 17:51:29.645617 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:51:29 crc kubenswrapper[4558]: I0120 17:51:29.669116 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:51:29 crc kubenswrapper[4558]: I0120 17:51:29.753903 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:51:29 crc kubenswrapper[4558]: I0120 17:51:29.788300 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-7578dc9646-4xs8n"] Jan 20 17:51:29 crc kubenswrapper[4558]: I0120 17:51:29.819696 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-7578dc9646-4xs8n"] Jan 20 17:51:29 crc kubenswrapper[4558]: I0120 17:51:29.843694 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/neutron-75676d645b-l2sp7"] Jan 20 17:51:29 crc kubenswrapper[4558]: I0120 17:51:29.845431 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-75676d645b-l2sp7" Jan 20 17:51:29 crc kubenswrapper[4558]: I0120 17:51:29.858066 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-75676d645b-l2sp7"] Jan 20 17:51:29 crc kubenswrapper[4558]: I0120 17:51:29.924832 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/6ea6b41c-c5ed-49c9-84f5-a932a1325aba-ovndb-tls-certs\") pod \"neutron-75676d645b-l2sp7\" (UID: \"6ea6b41c-c5ed-49c9-84f5-a932a1325aba\") " pod="openstack-kuttl-tests/neutron-75676d645b-l2sp7" Jan 20 17:51:29 crc kubenswrapper[4558]: I0120 17:51:29.925281 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6ea6b41c-c5ed-49c9-84f5-a932a1325aba-public-tls-certs\") pod \"neutron-75676d645b-l2sp7\" (UID: \"6ea6b41c-c5ed-49c9-84f5-a932a1325aba\") " pod="openstack-kuttl-tests/neutron-75676d645b-l2sp7" Jan 20 17:51:29 crc kubenswrapper[4558]: I0120 17:51:29.925316 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/6ea6b41c-c5ed-49c9-84f5-a932a1325aba-config\") pod \"neutron-75676d645b-l2sp7\" (UID: \"6ea6b41c-c5ed-49c9-84f5-a932a1325aba\") " pod="openstack-kuttl-tests/neutron-75676d645b-l2sp7" Jan 20 17:51:29 crc kubenswrapper[4558]: I0120 17:51:29.925383 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gd76d\" (UniqueName: \"kubernetes.io/projected/6ea6b41c-c5ed-49c9-84f5-a932a1325aba-kube-api-access-gd76d\") pod \"neutron-75676d645b-l2sp7\" (UID: \"6ea6b41c-c5ed-49c9-84f5-a932a1325aba\") " pod="openstack-kuttl-tests/neutron-75676d645b-l2sp7" Jan 20 17:51:29 crc kubenswrapper[4558]: I0120 17:51:29.925463 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/6ea6b41c-c5ed-49c9-84f5-a932a1325aba-httpd-config\") pod \"neutron-75676d645b-l2sp7\" (UID: \"6ea6b41c-c5ed-49c9-84f5-a932a1325aba\") " pod="openstack-kuttl-tests/neutron-75676d645b-l2sp7" Jan 20 17:51:29 crc kubenswrapper[4558]: I0120 17:51:29.925481 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ea6b41c-c5ed-49c9-84f5-a932a1325aba-combined-ca-bundle\") pod \"neutron-75676d645b-l2sp7\" (UID: \"6ea6b41c-c5ed-49c9-84f5-a932a1325aba\") " pod="openstack-kuttl-tests/neutron-75676d645b-l2sp7" Jan 20 17:51:29 crc kubenswrapper[4558]: I0120 17:51:29.925499 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6ea6b41c-c5ed-49c9-84f5-a932a1325aba-internal-tls-certs\") pod \"neutron-75676d645b-l2sp7\" (UID: \"6ea6b41c-c5ed-49c9-84f5-a932a1325aba\") " pod="openstack-kuttl-tests/neutron-75676d645b-l2sp7" Jan 20 17:51:30 crc kubenswrapper[4558]: I0120 17:51:30.030566 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6ea6b41c-c5ed-49c9-84f5-a932a1325aba-public-tls-certs\") pod \"neutron-75676d645b-l2sp7\" (UID: \"6ea6b41c-c5ed-49c9-84f5-a932a1325aba\") " pod="openstack-kuttl-tests/neutron-75676d645b-l2sp7" Jan 20 17:51:30 crc kubenswrapper[4558]: I0120 17:51:30.030727 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/6ea6b41c-c5ed-49c9-84f5-a932a1325aba-config\") pod \"neutron-75676d645b-l2sp7\" (UID: \"6ea6b41c-c5ed-49c9-84f5-a932a1325aba\") " pod="openstack-kuttl-tests/neutron-75676d645b-l2sp7" Jan 20 17:51:30 crc kubenswrapper[4558]: I0120 17:51:30.030935 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gd76d\" (UniqueName: \"kubernetes.io/projected/6ea6b41c-c5ed-49c9-84f5-a932a1325aba-kube-api-access-gd76d\") pod \"neutron-75676d645b-l2sp7\" (UID: \"6ea6b41c-c5ed-49c9-84f5-a932a1325aba\") " pod="openstack-kuttl-tests/neutron-75676d645b-l2sp7" Jan 20 17:51:30 crc kubenswrapper[4558]: I0120 17:51:30.031336 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/6ea6b41c-c5ed-49c9-84f5-a932a1325aba-httpd-config\") pod \"neutron-75676d645b-l2sp7\" (UID: \"6ea6b41c-c5ed-49c9-84f5-a932a1325aba\") " pod="openstack-kuttl-tests/neutron-75676d645b-l2sp7" Jan 20 17:51:30 crc kubenswrapper[4558]: I0120 17:51:30.031429 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ea6b41c-c5ed-49c9-84f5-a932a1325aba-combined-ca-bundle\") pod \"neutron-75676d645b-l2sp7\" (UID: \"6ea6b41c-c5ed-49c9-84f5-a932a1325aba\") " pod="openstack-kuttl-tests/neutron-75676d645b-l2sp7" Jan 20 17:51:30 crc kubenswrapper[4558]: I0120 17:51:30.031460 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6ea6b41c-c5ed-49c9-84f5-a932a1325aba-internal-tls-certs\") pod \"neutron-75676d645b-l2sp7\" (UID: \"6ea6b41c-c5ed-49c9-84f5-a932a1325aba\") " pod="openstack-kuttl-tests/neutron-75676d645b-l2sp7" Jan 20 17:51:30 crc kubenswrapper[4558]: I0120 17:51:30.031523 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/6ea6b41c-c5ed-49c9-84f5-a932a1325aba-ovndb-tls-certs\") pod \"neutron-75676d645b-l2sp7\" (UID: \"6ea6b41c-c5ed-49c9-84f5-a932a1325aba\") " pod="openstack-kuttl-tests/neutron-75676d645b-l2sp7" Jan 20 17:51:30 crc kubenswrapper[4558]: I0120 17:51:30.034938 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/6ea6b41c-c5ed-49c9-84f5-a932a1325aba-ovndb-tls-certs\") pod \"neutron-75676d645b-l2sp7\" (UID: \"6ea6b41c-c5ed-49c9-84f5-a932a1325aba\") " pod="openstack-kuttl-tests/neutron-75676d645b-l2sp7" Jan 20 17:51:30 crc kubenswrapper[4558]: I0120 17:51:30.035467 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6ea6b41c-c5ed-49c9-84f5-a932a1325aba-internal-tls-certs\") pod \"neutron-75676d645b-l2sp7\" (UID: \"6ea6b41c-c5ed-49c9-84f5-a932a1325aba\") " pod="openstack-kuttl-tests/neutron-75676d645b-l2sp7" Jan 20 17:51:30 crc kubenswrapper[4558]: I0120 17:51:30.035911 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/6ea6b41c-c5ed-49c9-84f5-a932a1325aba-config\") pod \"neutron-75676d645b-l2sp7\" (UID: \"6ea6b41c-c5ed-49c9-84f5-a932a1325aba\") " pod="openstack-kuttl-tests/neutron-75676d645b-l2sp7" Jan 20 17:51:30 crc kubenswrapper[4558]: I0120 17:51:30.038127 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6ea6b41c-c5ed-49c9-84f5-a932a1325aba-public-tls-certs\") pod \"neutron-75676d645b-l2sp7\" (UID: \"6ea6b41c-c5ed-49c9-84f5-a932a1325aba\") " pod="openstack-kuttl-tests/neutron-75676d645b-l2sp7" Jan 20 17:51:30 crc kubenswrapper[4558]: I0120 17:51:30.038832 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ea6b41c-c5ed-49c9-84f5-a932a1325aba-combined-ca-bundle\") pod \"neutron-75676d645b-l2sp7\" (UID: \"6ea6b41c-c5ed-49c9-84f5-a932a1325aba\") " pod="openstack-kuttl-tests/neutron-75676d645b-l2sp7" Jan 20 17:51:30 crc kubenswrapper[4558]: I0120 17:51:30.040119 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/6ea6b41c-c5ed-49c9-84f5-a932a1325aba-httpd-config\") pod \"neutron-75676d645b-l2sp7\" (UID: \"6ea6b41c-c5ed-49c9-84f5-a932a1325aba\") " pod="openstack-kuttl-tests/neutron-75676d645b-l2sp7" Jan 20 17:51:30 crc kubenswrapper[4558]: I0120 17:51:30.044488 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gd76d\" (UniqueName: \"kubernetes.io/projected/6ea6b41c-c5ed-49c9-84f5-a932a1325aba-kube-api-access-gd76d\") pod \"neutron-75676d645b-l2sp7\" (UID: \"6ea6b41c-c5ed-49c9-84f5-a932a1325aba\") " pod="openstack-kuttl-tests/neutron-75676d645b-l2sp7" Jan 20 17:51:30 crc kubenswrapper[4558]: I0120 17:51:30.197367 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-75676d645b-l2sp7" Jan 20 17:51:30 crc kubenswrapper[4558]: I0120 17:51:30.200084 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-7578dc9646-4xs8n" event={"ID":"cf356652-86c3-4fff-acda-805974e723a8","Type":"ContainerStarted","Data":"28d40e4f867e20e57ed2e72e419502ac3f2707b00ad65322f68276cf23847472"} Jan 20 17:51:30 crc kubenswrapper[4558]: I0120 17:51:30.200132 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-7578dc9646-4xs8n" event={"ID":"cf356652-86c3-4fff-acda-805974e723a8","Type":"ContainerStarted","Data":"c55b7311fb58f910d443688df31dff82c4295610e48fdd174af9e08351e45cff"} Jan 20 17:51:30 crc kubenswrapper[4558]: I0120 17:51:30.217233 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:51:30 crc kubenswrapper[4558]: I0120 17:51:30.220014 4558 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 20 17:51:30 crc kubenswrapper[4558]: I0120 17:51:30.220991 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-5bfc7dd8b8-pxpm9" event={"ID":"e638a157-4234-4339-80ef-65d818d39b73","Type":"ContainerStarted","Data":"c2b848f0a44c31229d716190ee834f4766c179c31ef6380b2a6319ec16dbf83b"} Jan 20 17:51:30 crc kubenswrapper[4558]: I0120 17:51:30.221255 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/barbican-api-5bfc7dd8b8-pxpm9" Jan 20 17:51:30 crc kubenswrapper[4558]: I0120 17:51:30.222254 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/barbican-api-5bfc7dd8b8-pxpm9" Jan 20 17:51:30 crc kubenswrapper[4558]: I0120 17:51:30.222373 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:51:30 crc kubenswrapper[4558]: I0120 17:51:30.222443 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:51:30 crc kubenswrapper[4558]: I0120 17:51:30.237331 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/barbican-api-5bfc7dd8b8-pxpm9" podStartSLOduration=3.237315681 podStartE2EDuration="3.237315681s" podCreationTimestamp="2026-01-20 17:51:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:51:30.234375943 +0000 UTC m=+4183.994713911" watchObservedRunningTime="2026-01-20 17:51:30.237315681 +0000 UTC m=+4183.997653648" Jan 20 17:51:30 crc kubenswrapper[4558]: W0120 17:51:30.242654 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod64e30e89_cc14_4c89_bd1b_5702bfba717c.slice/crio-6cf31e786d84d421b05166a4f94e958e8db45bdaf08a5455f8433627e82c0c35 WatchSource:0}: Error finding container 6cf31e786d84d421b05166a4f94e958e8db45bdaf08a5455f8433627e82c0c35: Status 404 returned error can't find the container with id 6cf31e786d84d421b05166a4f94e958e8db45bdaf08a5455f8433627e82c0c35 Jan 20 17:51:30 crc kubenswrapper[4558]: I0120 17:51:30.366151 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:51:30 crc kubenswrapper[4558]: I0120 17:51:30.368123 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:51:30 crc kubenswrapper[4558]: W0120 17:51:30.519635 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6ea6b41c_c5ed_49c9_84f5_a932a1325aba.slice/crio-bf495499deb1c53f2fdace9640958cbca06cba68a6394bf86a0febaaab0a0a8d WatchSource:0}: Error finding container bf495499deb1c53f2fdace9640958cbca06cba68a6394bf86a0febaaab0a0a8d: Status 404 returned error can't find the container with id bf495499deb1c53f2fdace9640958cbca06cba68a6394bf86a0febaaab0a0a8d Jan 20 17:51:30 crc kubenswrapper[4558]: I0120 17:51:30.535580 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-75676d645b-l2sp7"] Jan 20 17:51:30 crc kubenswrapper[4558]: I0120 17:51:30.576205 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="733a1ae1-a917-447a-afa6-67cf7d688f86" path="/var/lib/kubelet/pods/733a1ae1-a917-447a-afa6-67cf7d688f86/volumes" Jan 20 17:51:31 crc kubenswrapper[4558]: I0120 17:51:31.229675 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-75676d645b-l2sp7" event={"ID":"6ea6b41c-c5ed-49c9-84f5-a932a1325aba","Type":"ContainerStarted","Data":"a10e865cdae4227bcd9b239722ad2d2d6bc501887291e7983d1cf31b29c7b76e"} Jan 20 17:51:31 crc kubenswrapper[4558]: I0120 17:51:31.229992 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/neutron-75676d645b-l2sp7" Jan 20 17:51:31 crc kubenswrapper[4558]: I0120 17:51:31.230020 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-75676d645b-l2sp7" event={"ID":"6ea6b41c-c5ed-49c9-84f5-a932a1325aba","Type":"ContainerStarted","Data":"d26f7b1dae3611d65adf8538389e6547e7f472172ad5ffe9456136d1cfd69e59"} Jan 20 17:51:31 crc kubenswrapper[4558]: I0120 17:51:31.230036 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-75676d645b-l2sp7" event={"ID":"6ea6b41c-c5ed-49c9-84f5-a932a1325aba","Type":"ContainerStarted","Data":"bf495499deb1c53f2fdace9640958cbca06cba68a6394bf86a0febaaab0a0a8d"} Jan 20 17:51:31 crc kubenswrapper[4558]: I0120 17:51:31.232088 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"64e30e89-cc14-4c89-bd1b-5702bfba717c","Type":"ContainerStarted","Data":"5a3ae52e8d382a0ba3e16ca78acf47ccaf862a7eab92320268ff87ae7158b25c"} Jan 20 17:51:31 crc kubenswrapper[4558]: I0120 17:51:31.232133 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"64e30e89-cc14-4c89-bd1b-5702bfba717c","Type":"ContainerStarted","Data":"e0d25530c2349a51795cf3ce00e8ae91ba3476254b412bf92d6e0c208e2c53dc"} Jan 20 17:51:31 crc kubenswrapper[4558]: I0120 17:51:31.232144 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"64e30e89-cc14-4c89-bd1b-5702bfba717c","Type":"ContainerStarted","Data":"6cf31e786d84d421b05166a4f94e958e8db45bdaf08a5455f8433627e82c0c35"} Jan 20 17:51:31 crc kubenswrapper[4558]: I0120 17:51:31.234612 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-7578dc9646-4xs8n" event={"ID":"cf356652-86c3-4fff-acda-805974e723a8","Type":"ContainerStarted","Data":"49b03c8d23d852acad3d7b174f8c3b40f90b13f7e611e5c0e9fc8ab0ad8edda2"} Jan 20 17:51:31 crc kubenswrapper[4558]: I0120 17:51:31.234676 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/neutron-7578dc9646-4xs8n" podUID="cf356652-86c3-4fff-acda-805974e723a8" containerName="neutron-api" containerID="cri-o://28d40e4f867e20e57ed2e72e419502ac3f2707b00ad65322f68276cf23847472" gracePeriod=30 Jan 20 17:51:31 crc kubenswrapper[4558]: I0120 17:51:31.234776 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/neutron-7578dc9646-4xs8n" podUID="cf356652-86c3-4fff-acda-805974e723a8" containerName="neutron-httpd" containerID="cri-o://49b03c8d23d852acad3d7b174f8c3b40f90b13f7e611e5c0e9fc8ab0ad8edda2" gracePeriod=30 Jan 20 17:51:31 crc kubenswrapper[4558]: I0120 17:51:31.235708 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/neutron-7578dc9646-4xs8n" Jan 20 17:51:31 crc kubenswrapper[4558]: I0120 17:51:31.248493 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/neutron-75676d645b-l2sp7" podStartSLOduration=2.24847962 podStartE2EDuration="2.24847962s" podCreationTimestamp="2026-01-20 17:51:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:51:31.246087131 +0000 UTC m=+4185.006425098" watchObservedRunningTime="2026-01-20 17:51:31.24847962 +0000 UTC m=+4185.008817587" Jan 20 17:51:31 crc kubenswrapper[4558]: I0120 17:51:31.273786 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-api-0" podStartSLOduration=2.27377191 podStartE2EDuration="2.27377191s" podCreationTimestamp="2026-01-20 17:51:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:51:31.26398155 +0000 UTC m=+4185.024319517" watchObservedRunningTime="2026-01-20 17:51:31.27377191 +0000 UTC m=+4185.034109877" Jan 20 17:51:31 crc kubenswrapper[4558]: I0120 17:51:31.312859 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/neutron-7578dc9646-4xs8n" podStartSLOduration=3.312843018 podStartE2EDuration="3.312843018s" podCreationTimestamp="2026-01-20 17:51:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:51:31.284130637 +0000 UTC m=+4185.044468604" watchObservedRunningTime="2026-01-20 17:51:31.312843018 +0000 UTC m=+4185.073180985" Jan 20 17:51:31 crc kubenswrapper[4558]: I0120 17:51:31.900608 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/nova-metadata-0" podUID="acb2f7f3-f6d0-4b4f-936f-baee3dfa3938" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.1.192:8775/\": read tcp 10.217.0.2:54838->10.217.1.192:8775: read: connection reset by peer" Jan 20 17:51:31 crc kubenswrapper[4558]: I0120 17:51:31.901030 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/nova-metadata-0" podUID="acb2f7f3-f6d0-4b4f-936f-baee3dfa3938" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.1.192:8775/\": read tcp 10.217.0.2:54850->10.217.1.192:8775: read: connection reset by peer" Jan 20 17:51:32 crc kubenswrapper[4558]: I0120 17:51:32.006587 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:51:32 crc kubenswrapper[4558]: I0120 17:51:32.040265 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:51:32 crc kubenswrapper[4558]: I0120 17:51:32.258899 4558 generic.go:334] "Generic (PLEG): container finished" podID="acb2f7f3-f6d0-4b4f-936f-baee3dfa3938" containerID="06efd4d354098bbe1c72d647b10d6aecbc2f356d80570ca39ea3a76e7949f322" exitCode=0 Jan 20 17:51:32 crc kubenswrapper[4558]: I0120 17:51:32.258992 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"acb2f7f3-f6d0-4b4f-936f-baee3dfa3938","Type":"ContainerDied","Data":"06efd4d354098bbe1c72d647b10d6aecbc2f356d80570ca39ea3a76e7949f322"} Jan 20 17:51:32 crc kubenswrapper[4558]: I0120 17:51:32.259079 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"acb2f7f3-f6d0-4b4f-936f-baee3dfa3938","Type":"ContainerDied","Data":"58484e90e24ffc4180dd8312c466efe73ea03a93aeb8a64c6d07d2f682b11f04"} Jan 20 17:51:32 crc kubenswrapper[4558]: I0120 17:51:32.259097 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="58484e90e24ffc4180dd8312c466efe73ea03a93aeb8a64c6d07d2f682b11f04" Jan 20 17:51:32 crc kubenswrapper[4558]: I0120 17:51:32.267423 4558 generic.go:334] "Generic (PLEG): container finished" podID="cf356652-86c3-4fff-acda-805974e723a8" containerID="49b03c8d23d852acad3d7b174f8c3b40f90b13f7e611e5c0e9fc8ab0ad8edda2" exitCode=0 Jan 20 17:51:32 crc kubenswrapper[4558]: I0120 17:51:32.268560 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-7578dc9646-4xs8n" event={"ID":"cf356652-86c3-4fff-acda-805974e723a8","Type":"ContainerDied","Data":"49b03c8d23d852acad3d7b174f8c3b40f90b13f7e611e5c0e9fc8ab0ad8edda2"} Jan 20 17:51:32 crc kubenswrapper[4558]: I0120 17:51:32.268696 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:51:32 crc kubenswrapper[4558]: I0120 17:51:32.389852 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/cinder-api-0" podUID="dd518f67-fd09-4102-9934-594b833bf8c8" containerName="cinder-api" probeResult="failure" output="Get \"https://10.217.1.221:8776/healthcheck\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Jan 20 17:51:32 crc kubenswrapper[4558]: I0120 17:51:32.394180 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/acb2f7f3-f6d0-4b4f-936f-baee3dfa3938-config-data\") pod \"acb2f7f3-f6d0-4b4f-936f-baee3dfa3938\" (UID: \"acb2f7f3-f6d0-4b4f-936f-baee3dfa3938\") " Jan 20 17:51:32 crc kubenswrapper[4558]: I0120 17:51:32.394352 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/acb2f7f3-f6d0-4b4f-936f-baee3dfa3938-combined-ca-bundle\") pod \"acb2f7f3-f6d0-4b4f-936f-baee3dfa3938\" (UID: \"acb2f7f3-f6d0-4b4f-936f-baee3dfa3938\") " Jan 20 17:51:32 crc kubenswrapper[4558]: I0120 17:51:32.394381 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/acb2f7f3-f6d0-4b4f-936f-baee3dfa3938-logs\") pod \"acb2f7f3-f6d0-4b4f-936f-baee3dfa3938\" (UID: \"acb2f7f3-f6d0-4b4f-936f-baee3dfa3938\") " Jan 20 17:51:32 crc kubenswrapper[4558]: I0120 17:51:32.394406 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xpdkk\" (UniqueName: \"kubernetes.io/projected/acb2f7f3-f6d0-4b4f-936f-baee3dfa3938-kube-api-access-xpdkk\") pod \"acb2f7f3-f6d0-4b4f-936f-baee3dfa3938\" (UID: \"acb2f7f3-f6d0-4b4f-936f-baee3dfa3938\") " Jan 20 17:51:32 crc kubenswrapper[4558]: I0120 17:51:32.394806 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/acb2f7f3-f6d0-4b4f-936f-baee3dfa3938-logs" (OuterVolumeSpecName: "logs") pod "acb2f7f3-f6d0-4b4f-936f-baee3dfa3938" (UID: "acb2f7f3-f6d0-4b4f-936f-baee3dfa3938"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:51:32 crc kubenswrapper[4558]: I0120 17:51:32.394869 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/acb2f7f3-f6d0-4b4f-936f-baee3dfa3938-nova-metadata-tls-certs\") pod \"acb2f7f3-f6d0-4b4f-936f-baee3dfa3938\" (UID: \"acb2f7f3-f6d0-4b4f-936f-baee3dfa3938\") " Jan 20 17:51:32 crc kubenswrapper[4558]: I0120 17:51:32.395870 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/acb2f7f3-f6d0-4b4f-936f-baee3dfa3938-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:32 crc kubenswrapper[4558]: I0120 17:51:32.415839 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/acb2f7f3-f6d0-4b4f-936f-baee3dfa3938-kube-api-access-xpdkk" (OuterVolumeSpecName: "kube-api-access-xpdkk") pod "acb2f7f3-f6d0-4b4f-936f-baee3dfa3938" (UID: "acb2f7f3-f6d0-4b4f-936f-baee3dfa3938"). InnerVolumeSpecName "kube-api-access-xpdkk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:51:32 crc kubenswrapper[4558]: I0120 17:51:32.438352 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/acb2f7f3-f6d0-4b4f-936f-baee3dfa3938-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "acb2f7f3-f6d0-4b4f-936f-baee3dfa3938" (UID: "acb2f7f3-f6d0-4b4f-936f-baee3dfa3938"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:51:32 crc kubenswrapper[4558]: I0120 17:51:32.465570 4558 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:51:32 crc kubenswrapper[4558]: I0120 17:51:32.465916 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:51:32 crc kubenswrapper[4558]: I0120 17:51:32.465973 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:51:32 crc kubenswrapper[4558]: I0120 17:51:32.466073 4558 scope.go:117] "RemoveContainer" containerID="4e744e9f021c29bfddbe31a0a527bc6946c7e7d3595c756668ddc79180474c63" Jan 20 17:51:32 crc kubenswrapper[4558]: E0120 17:51:32.466335 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"nova-scheduler-scheduler\" with CrashLoopBackOff: \"back-off 20s restarting failed container=nova-scheduler-scheduler pod=nova-scheduler-0_openstack-kuttl-tests(d9321ea5-ab3b-4f00-b4d0-f2c2f8c2deb2)\"" pod="openstack-kuttl-tests/nova-scheduler-0" podUID="d9321ea5-ab3b-4f00-b4d0-f2c2f8c2deb2" Jan 20 17:51:32 crc kubenswrapper[4558]: I0120 17:51:32.499158 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/acb2f7f3-f6d0-4b4f-936f-baee3dfa3938-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:32 crc kubenswrapper[4558]: I0120 17:51:32.499207 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xpdkk\" (UniqueName: \"kubernetes.io/projected/acb2f7f3-f6d0-4b4f-936f-baee3dfa3938-kube-api-access-xpdkk\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:32 crc kubenswrapper[4558]: I0120 17:51:32.501251 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/acb2f7f3-f6d0-4b4f-936f-baee3dfa3938-config-data" (OuterVolumeSpecName: "config-data") pod "acb2f7f3-f6d0-4b4f-936f-baee3dfa3938" (UID: "acb2f7f3-f6d0-4b4f-936f-baee3dfa3938"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:51:32 crc kubenswrapper[4558]: I0120 17:51:32.541403 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/acb2f7f3-f6d0-4b4f-936f-baee3dfa3938-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "acb2f7f3-f6d0-4b4f-936f-baee3dfa3938" (UID: "acb2f7f3-f6d0-4b4f-936f-baee3dfa3938"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:51:32 crc kubenswrapper[4558]: I0120 17:51:32.601306 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/acb2f7f3-f6d0-4b4f-936f-baee3dfa3938-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:32 crc kubenswrapper[4558]: I0120 17:51:32.601338 4558 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/acb2f7f3-f6d0-4b4f-936f-baee3dfa3938-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:32 crc kubenswrapper[4558]: I0120 17:51:32.874221 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/cinder-api-0" podUID="dd518f67-fd09-4102-9934-594b833bf8c8" containerName="cinder-api" probeResult="failure" output="Get \"https://10.217.1.221:8776/healthcheck\": read tcp 10.217.0.2:56688->10.217.1.221:8776: read: connection reset by peer" Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.167432 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.278612 4558 generic.go:334] "Generic (PLEG): container finished" podID="dd518f67-fd09-4102-9934-594b833bf8c8" containerID="e7e61e40b3ceb7bcd0050ea1fa8a9bb9f8c5958158a076d7e7954bdc11cae1ef" exitCode=0 Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.278682 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.278737 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"dd518f67-fd09-4102-9934-594b833bf8c8","Type":"ContainerDied","Data":"e7e61e40b3ceb7bcd0050ea1fa8a9bb9f8c5958158a076d7e7954bdc11cae1ef"} Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.278802 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"dd518f67-fd09-4102-9934-594b833bf8c8","Type":"ContainerDied","Data":"37c7c64075849d1c723fe292d572371554c228e6e6e06063a5f3fe9f7008213e"} Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.278835 4558 scope.go:117] "RemoveContainer" containerID="e7e61e40b3ceb7bcd0050ea1fa8a9bb9f8c5958158a076d7e7954bdc11cae1ef" Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.279267 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.280569 4558 scope.go:117] "RemoveContainer" containerID="4e744e9f021c29bfddbe31a0a527bc6946c7e7d3595c756668ddc79180474c63" Jan 20 17:51:33 crc kubenswrapper[4558]: E0120 17:51:33.280775 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"nova-scheduler-scheduler\" with CrashLoopBackOff: \"back-off 20s restarting failed container=nova-scheduler-scheduler pod=nova-scheduler-0_openstack-kuttl-tests(d9321ea5-ab3b-4f00-b4d0-f2c2f8c2deb2)\"" pod="openstack-kuttl-tests/nova-scheduler-0" podUID="d9321ea5-ab3b-4f00-b4d0-f2c2f8c2deb2" Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.302025 4558 scope.go:117] "RemoveContainer" containerID="c1f8d8bf07ca82f670329868a579e9d926a00b6090b23758e7d9b8f7b4cae4d3" Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.303356 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.311414 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.322648 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/dd518f67-fd09-4102-9934-594b833bf8c8-etc-machine-id\") pod \"dd518f67-fd09-4102-9934-594b833bf8c8\" (UID: \"dd518f67-fd09-4102-9934-594b833bf8c8\") " Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.322728 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/dd518f67-fd09-4102-9934-594b833bf8c8-config-data-custom\") pod \"dd518f67-fd09-4102-9934-594b833bf8c8\" (UID: \"dd518f67-fd09-4102-9934-594b833bf8c8\") " Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.322793 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dd518f67-fd09-4102-9934-594b833bf8c8-config-data\") pod \"dd518f67-fd09-4102-9934-594b833bf8c8\" (UID: \"dd518f67-fd09-4102-9934-594b833bf8c8\") " Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.322843 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dd518f67-fd09-4102-9934-594b833bf8c8-logs\") pod \"dd518f67-fd09-4102-9934-594b833bf8c8\" (UID: \"dd518f67-fd09-4102-9934-594b833bf8c8\") " Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.322916 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/dd518f67-fd09-4102-9934-594b833bf8c8-internal-tls-certs\") pod \"dd518f67-fd09-4102-9934-594b833bf8c8\" (UID: \"dd518f67-fd09-4102-9934-594b833bf8c8\") " Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.322965 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd518f67-fd09-4102-9934-594b833bf8c8-combined-ca-bundle\") pod \"dd518f67-fd09-4102-9934-594b833bf8c8\" (UID: \"dd518f67-fd09-4102-9934-594b833bf8c8\") " Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.323026 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bblbr\" (UniqueName: \"kubernetes.io/projected/dd518f67-fd09-4102-9934-594b833bf8c8-kube-api-access-bblbr\") pod \"dd518f67-fd09-4102-9934-594b833bf8c8\" (UID: \"dd518f67-fd09-4102-9934-594b833bf8c8\") " Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.323130 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dd518f67-fd09-4102-9934-594b833bf8c8-scripts\") pod \"dd518f67-fd09-4102-9934-594b833bf8c8\" (UID: \"dd518f67-fd09-4102-9934-594b833bf8c8\") " Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.323156 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/dd518f67-fd09-4102-9934-594b833bf8c8-public-tls-certs\") pod \"dd518f67-fd09-4102-9934-594b833bf8c8\" (UID: \"dd518f67-fd09-4102-9934-594b833bf8c8\") " Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.324234 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dd518f67-fd09-4102-9934-594b833bf8c8-logs" (OuterVolumeSpecName: "logs") pod "dd518f67-fd09-4102-9934-594b833bf8c8" (UID: "dd518f67-fd09-4102-9934-594b833bf8c8"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.324287 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/dd518f67-fd09-4102-9934-594b833bf8c8-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "dd518f67-fd09-4102-9934-594b833bf8c8" (UID: "dd518f67-fd09-4102-9934-594b833bf8c8"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.326977 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:51:33 crc kubenswrapper[4558]: E0120 17:51:33.327503 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="acb2f7f3-f6d0-4b4f-936f-baee3dfa3938" containerName="nova-metadata-metadata" Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.327517 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="acb2f7f3-f6d0-4b4f-936f-baee3dfa3938" containerName="nova-metadata-metadata" Jan 20 17:51:33 crc kubenswrapper[4558]: E0120 17:51:33.327542 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd518f67-fd09-4102-9934-594b833bf8c8" containerName="cinder-api" Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.327549 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd518f67-fd09-4102-9934-594b833bf8c8" containerName="cinder-api" Jan 20 17:51:33 crc kubenswrapper[4558]: E0120 17:51:33.327572 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd518f67-fd09-4102-9934-594b833bf8c8" containerName="cinder-api-log" Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.327579 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd518f67-fd09-4102-9934-594b833bf8c8" containerName="cinder-api-log" Jan 20 17:51:33 crc kubenswrapper[4558]: E0120 17:51:33.327595 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="acb2f7f3-f6d0-4b4f-936f-baee3dfa3938" containerName="nova-metadata-log" Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.327601 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="acb2f7f3-f6d0-4b4f-936f-baee3dfa3938" containerName="nova-metadata-log" Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.327809 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="acb2f7f3-f6d0-4b4f-936f-baee3dfa3938" containerName="nova-metadata-metadata" Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.327821 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="dd518f67-fd09-4102-9934-594b833bf8c8" containerName="cinder-api-log" Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.327840 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="acb2f7f3-f6d0-4b4f-936f-baee3dfa3938" containerName="nova-metadata-log" Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.327853 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="dd518f67-fd09-4102-9934-594b833bf8c8" containerName="cinder-api" Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.328965 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.333110 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-metadata-internal-svc" Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.334785 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dd518f67-fd09-4102-9934-594b833bf8c8-kube-api-access-bblbr" (OuterVolumeSpecName: "kube-api-access-bblbr") pod "dd518f67-fd09-4102-9934-594b833bf8c8" (UID: "dd518f67-fd09-4102-9934-594b833bf8c8"). InnerVolumeSpecName "kube-api-access-bblbr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.340260 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-metadata-config-data" Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.352372 4558 scope.go:117] "RemoveContainer" containerID="e7e61e40b3ceb7bcd0050ea1fa8a9bb9f8c5958158a076d7e7954bdc11cae1ef" Jan 20 17:51:33 crc kubenswrapper[4558]: E0120 17:51:33.355399 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e7e61e40b3ceb7bcd0050ea1fa8a9bb9f8c5958158a076d7e7954bdc11cae1ef\": container with ID starting with e7e61e40b3ceb7bcd0050ea1fa8a9bb9f8c5958158a076d7e7954bdc11cae1ef not found: ID does not exist" containerID="e7e61e40b3ceb7bcd0050ea1fa8a9bb9f8c5958158a076d7e7954bdc11cae1ef" Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.355462 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e7e61e40b3ceb7bcd0050ea1fa8a9bb9f8c5958158a076d7e7954bdc11cae1ef"} err="failed to get container status \"e7e61e40b3ceb7bcd0050ea1fa8a9bb9f8c5958158a076d7e7954bdc11cae1ef\": rpc error: code = NotFound desc = could not find container \"e7e61e40b3ceb7bcd0050ea1fa8a9bb9f8c5958158a076d7e7954bdc11cae1ef\": container with ID starting with e7e61e40b3ceb7bcd0050ea1fa8a9bb9f8c5958158a076d7e7954bdc11cae1ef not found: ID does not exist" Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.355494 4558 scope.go:117] "RemoveContainer" containerID="c1f8d8bf07ca82f670329868a579e9d926a00b6090b23758e7d9b8f7b4cae4d3" Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.357412 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dd518f67-fd09-4102-9934-594b833bf8c8-scripts" (OuterVolumeSpecName: "scripts") pod "dd518f67-fd09-4102-9934-594b833bf8c8" (UID: "dd518f67-fd09-4102-9934-594b833bf8c8"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:51:33 crc kubenswrapper[4558]: E0120 17:51:33.358059 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c1f8d8bf07ca82f670329868a579e9d926a00b6090b23758e7d9b8f7b4cae4d3\": container with ID starting with c1f8d8bf07ca82f670329868a579e9d926a00b6090b23758e7d9b8f7b4cae4d3 not found: ID does not exist" containerID="c1f8d8bf07ca82f670329868a579e9d926a00b6090b23758e7d9b8f7b4cae4d3" Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.358118 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c1f8d8bf07ca82f670329868a579e9d926a00b6090b23758e7d9b8f7b4cae4d3"} err="failed to get container status \"c1f8d8bf07ca82f670329868a579e9d926a00b6090b23758e7d9b8f7b4cae4d3\": rpc error: code = NotFound desc = could not find container \"c1f8d8bf07ca82f670329868a579e9d926a00b6090b23758e7d9b8f7b4cae4d3\": container with ID starting with c1f8d8bf07ca82f670329868a579e9d926a00b6090b23758e7d9b8f7b4cae4d3 not found: ID does not exist" Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.362113 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/placement-7f6b9c747b-m4nwt"] Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.362448 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/placement-7f6b9c747b-m4nwt" podUID="f9bdec5b-a12e-4c73-9cb6-3b50c6bf3b0f" containerName="placement-log" containerID="cri-o://f05207f6b368474a1803d5134030c085eaccc7f1d38de03e11ae6754937b6bfd" gracePeriod=30 Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.362930 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/placement-7f6b9c747b-m4nwt" podUID="f9bdec5b-a12e-4c73-9cb6-3b50c6bf3b0f" containerName="placement-api" containerID="cri-o://aababf132d5446979839e0c577985c22fd9f60a631cd19ebf40fb35cac8af51c" gracePeriod=30 Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.373386 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dd518f67-fd09-4102-9934-594b833bf8c8-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "dd518f67-fd09-4102-9934-594b833bf8c8" (UID: "dd518f67-fd09-4102-9934-594b833bf8c8"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.379063 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.388945 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/placement-7f6b9c747b-m4nwt" podUID="f9bdec5b-a12e-4c73-9cb6-3b50c6bf3b0f" containerName="placement-api" probeResult="failure" output="Get \"https://10.217.1.225:8778/\": read tcp 10.217.0.2:51100->10.217.1.225:8778: read: connection reset by peer" Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.389022 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/placement-7f6b9c747b-m4nwt" podUID="f9bdec5b-a12e-4c73-9cb6-3b50c6bf3b0f" containerName="placement-log" probeResult="failure" output="Get \"https://10.217.1.225:8778/\": read tcp 10.217.0.2:51112->10.217.1.225:8778: read: connection reset by peer" Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.407078 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/placement-658f45b9f4-tx296"] Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.417213 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-658f45b9f4-tx296"] Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.417329 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-658f45b9f4-tx296" Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.426829 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6e7de5b3-4426-4816-9d45-9b4226333dbc-config-data\") pod \"nova-metadata-0\" (UID: \"6e7de5b3-4426-4816-9d45-9b4226333dbc\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.426941 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/6e7de5b3-4426-4816-9d45-9b4226333dbc-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"6e7de5b3-4426-4816-9d45-9b4226333dbc\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.427094 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e7de5b3-4426-4816-9d45-9b4226333dbc-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"6e7de5b3-4426-4816-9d45-9b4226333dbc\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.427241 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6e7de5b3-4426-4816-9d45-9b4226333dbc-logs\") pod \"nova-metadata-0\" (UID: \"6e7de5b3-4426-4816-9d45-9b4226333dbc\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.427302 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hnsfg\" (UniqueName: \"kubernetes.io/projected/6e7de5b3-4426-4816-9d45-9b4226333dbc-kube-api-access-hnsfg\") pod \"nova-metadata-0\" (UID: \"6e7de5b3-4426-4816-9d45-9b4226333dbc\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.427375 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bblbr\" (UniqueName: \"kubernetes.io/projected/dd518f67-fd09-4102-9934-594b833bf8c8-kube-api-access-bblbr\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.427394 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dd518f67-fd09-4102-9934-594b833bf8c8-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.427405 4558 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/dd518f67-fd09-4102-9934-594b833bf8c8-etc-machine-id\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.427416 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/dd518f67-fd09-4102-9934-594b833bf8c8-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.427424 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/dd518f67-fd09-4102-9934-594b833bf8c8-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.448628 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dd518f67-fd09-4102-9934-594b833bf8c8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "dd518f67-fd09-4102-9934-594b833bf8c8" (UID: "dd518f67-fd09-4102-9934-594b833bf8c8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.448666 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dd518f67-fd09-4102-9934-594b833bf8c8-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "dd518f67-fd09-4102-9934-594b833bf8c8" (UID: "dd518f67-fd09-4102-9934-594b833bf8c8"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.460347 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dd518f67-fd09-4102-9934-594b833bf8c8-config-data" (OuterVolumeSpecName: "config-data") pod "dd518f67-fd09-4102-9934-594b833bf8c8" (UID: "dd518f67-fd09-4102-9934-594b833bf8c8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.465033 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dd518f67-fd09-4102-9934-594b833bf8c8-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "dd518f67-fd09-4102-9934-594b833bf8c8" (UID: "dd518f67-fd09-4102-9934-594b833bf8c8"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.529562 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e7de5b3-4426-4816-9d45-9b4226333dbc-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"6e7de5b3-4426-4816-9d45-9b4226333dbc\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.529649 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n9d5p\" (UniqueName: \"kubernetes.io/projected/0c9fc067-cb23-4f72-b928-5447fb5182c1-kube-api-access-n9d5p\") pod \"placement-658f45b9f4-tx296\" (UID: \"0c9fc067-cb23-4f72-b928-5447fb5182c1\") " pod="openstack-kuttl-tests/placement-658f45b9f4-tx296" Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.529699 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0c9fc067-cb23-4f72-b928-5447fb5182c1-config-data\") pod \"placement-658f45b9f4-tx296\" (UID: \"0c9fc067-cb23-4f72-b928-5447fb5182c1\") " pod="openstack-kuttl-tests/placement-658f45b9f4-tx296" Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.529728 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6e7de5b3-4426-4816-9d45-9b4226333dbc-logs\") pod \"nova-metadata-0\" (UID: \"6e7de5b3-4426-4816-9d45-9b4226333dbc\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.529753 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0c9fc067-cb23-4f72-b928-5447fb5182c1-internal-tls-certs\") pod \"placement-658f45b9f4-tx296\" (UID: \"0c9fc067-cb23-4f72-b928-5447fb5182c1\") " pod="openstack-kuttl-tests/placement-658f45b9f4-tx296" Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.529857 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0c9fc067-cb23-4f72-b928-5447fb5182c1-logs\") pod \"placement-658f45b9f4-tx296\" (UID: \"0c9fc067-cb23-4f72-b928-5447fb5182c1\") " pod="openstack-kuttl-tests/placement-658f45b9f4-tx296" Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.530224 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6e7de5b3-4426-4816-9d45-9b4226333dbc-logs\") pod \"nova-metadata-0\" (UID: \"6e7de5b3-4426-4816-9d45-9b4226333dbc\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.530355 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hnsfg\" (UniqueName: \"kubernetes.io/projected/6e7de5b3-4426-4816-9d45-9b4226333dbc-kube-api-access-hnsfg\") pod \"nova-metadata-0\" (UID: \"6e7de5b3-4426-4816-9d45-9b4226333dbc\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.530388 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0c9fc067-cb23-4f72-b928-5447fb5182c1-public-tls-certs\") pod \"placement-658f45b9f4-tx296\" (UID: \"0c9fc067-cb23-4f72-b928-5447fb5182c1\") " pod="openstack-kuttl-tests/placement-658f45b9f4-tx296" Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.530736 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6e7de5b3-4426-4816-9d45-9b4226333dbc-config-data\") pod \"nova-metadata-0\" (UID: \"6e7de5b3-4426-4816-9d45-9b4226333dbc\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.531444 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/6e7de5b3-4426-4816-9d45-9b4226333dbc-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"6e7de5b3-4426-4816-9d45-9b4226333dbc\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.531525 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0c9fc067-cb23-4f72-b928-5447fb5182c1-combined-ca-bundle\") pod \"placement-658f45b9f4-tx296\" (UID: \"0c9fc067-cb23-4f72-b928-5447fb5182c1\") " pod="openstack-kuttl-tests/placement-658f45b9f4-tx296" Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.531574 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0c9fc067-cb23-4f72-b928-5447fb5182c1-scripts\") pod \"placement-658f45b9f4-tx296\" (UID: \"0c9fc067-cb23-4f72-b928-5447fb5182c1\") " pod="openstack-kuttl-tests/placement-658f45b9f4-tx296" Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.531715 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/dd518f67-fd09-4102-9934-594b833bf8c8-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.531735 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd518f67-fd09-4102-9934-594b833bf8c8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.531746 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/dd518f67-fd09-4102-9934-594b833bf8c8-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.531757 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dd518f67-fd09-4102-9934-594b833bf8c8-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.533276 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e7de5b3-4426-4816-9d45-9b4226333dbc-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"6e7de5b3-4426-4816-9d45-9b4226333dbc\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.533485 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/6e7de5b3-4426-4816-9d45-9b4226333dbc-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"6e7de5b3-4426-4816-9d45-9b4226333dbc\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.533985 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6e7de5b3-4426-4816-9d45-9b4226333dbc-config-data\") pod \"nova-metadata-0\" (UID: \"6e7de5b3-4426-4816-9d45-9b4226333dbc\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.543474 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hnsfg\" (UniqueName: \"kubernetes.io/projected/6e7de5b3-4426-4816-9d45-9b4226333dbc-kube-api-access-hnsfg\") pod \"nova-metadata-0\" (UID: \"6e7de5b3-4426-4816-9d45-9b4226333dbc\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.634431 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0c9fc067-cb23-4f72-b928-5447fb5182c1-combined-ca-bundle\") pod \"placement-658f45b9f4-tx296\" (UID: \"0c9fc067-cb23-4f72-b928-5447fb5182c1\") " pod="openstack-kuttl-tests/placement-658f45b9f4-tx296" Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.634487 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0c9fc067-cb23-4f72-b928-5447fb5182c1-scripts\") pod \"placement-658f45b9f4-tx296\" (UID: \"0c9fc067-cb23-4f72-b928-5447fb5182c1\") " pod="openstack-kuttl-tests/placement-658f45b9f4-tx296" Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.634647 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n9d5p\" (UniqueName: \"kubernetes.io/projected/0c9fc067-cb23-4f72-b928-5447fb5182c1-kube-api-access-n9d5p\") pod \"placement-658f45b9f4-tx296\" (UID: \"0c9fc067-cb23-4f72-b928-5447fb5182c1\") " pod="openstack-kuttl-tests/placement-658f45b9f4-tx296" Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.634712 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0c9fc067-cb23-4f72-b928-5447fb5182c1-config-data\") pod \"placement-658f45b9f4-tx296\" (UID: \"0c9fc067-cb23-4f72-b928-5447fb5182c1\") " pod="openstack-kuttl-tests/placement-658f45b9f4-tx296" Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.634755 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0c9fc067-cb23-4f72-b928-5447fb5182c1-internal-tls-certs\") pod \"placement-658f45b9f4-tx296\" (UID: \"0c9fc067-cb23-4f72-b928-5447fb5182c1\") " pod="openstack-kuttl-tests/placement-658f45b9f4-tx296" Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.634781 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0c9fc067-cb23-4f72-b928-5447fb5182c1-logs\") pod \"placement-658f45b9f4-tx296\" (UID: \"0c9fc067-cb23-4f72-b928-5447fb5182c1\") " pod="openstack-kuttl-tests/placement-658f45b9f4-tx296" Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.634846 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0c9fc067-cb23-4f72-b928-5447fb5182c1-public-tls-certs\") pod \"placement-658f45b9f4-tx296\" (UID: \"0c9fc067-cb23-4f72-b928-5447fb5182c1\") " pod="openstack-kuttl-tests/placement-658f45b9f4-tx296" Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.644374 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0c9fc067-cb23-4f72-b928-5447fb5182c1-scripts\") pod \"placement-658f45b9f4-tx296\" (UID: \"0c9fc067-cb23-4f72-b928-5447fb5182c1\") " pod="openstack-kuttl-tests/placement-658f45b9f4-tx296" Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.644702 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0c9fc067-cb23-4f72-b928-5447fb5182c1-logs\") pod \"placement-658f45b9f4-tx296\" (UID: \"0c9fc067-cb23-4f72-b928-5447fb5182c1\") " pod="openstack-kuttl-tests/placement-658f45b9f4-tx296" Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.646741 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0c9fc067-cb23-4f72-b928-5447fb5182c1-public-tls-certs\") pod \"placement-658f45b9f4-tx296\" (UID: \"0c9fc067-cb23-4f72-b928-5447fb5182c1\") " pod="openstack-kuttl-tests/placement-658f45b9f4-tx296" Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.646800 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0c9fc067-cb23-4f72-b928-5447fb5182c1-config-data\") pod \"placement-658f45b9f4-tx296\" (UID: \"0c9fc067-cb23-4f72-b928-5447fb5182c1\") " pod="openstack-kuttl-tests/placement-658f45b9f4-tx296" Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.647602 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0c9fc067-cb23-4f72-b928-5447fb5182c1-internal-tls-certs\") pod \"placement-658f45b9f4-tx296\" (UID: \"0c9fc067-cb23-4f72-b928-5447fb5182c1\") " pod="openstack-kuttl-tests/placement-658f45b9f4-tx296" Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.656563 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0c9fc067-cb23-4f72-b928-5447fb5182c1-combined-ca-bundle\") pod \"placement-658f45b9f4-tx296\" (UID: \"0c9fc067-cb23-4f72-b928-5447fb5182c1\") " pod="openstack-kuttl-tests/placement-658f45b9f4-tx296" Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.664545 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n9d5p\" (UniqueName: \"kubernetes.io/projected/0c9fc067-cb23-4f72-b928-5447fb5182c1-kube-api-access-n9d5p\") pod \"placement-658f45b9f4-tx296\" (UID: \"0c9fc067-cb23-4f72-b928-5447fb5182c1\") " pod="openstack-kuttl-tests/placement-658f45b9f4-tx296" Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.682284 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.710846 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.730380 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.732438 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.734116 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-cinder-internal-svc" Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.734673 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-cinder-public-svc" Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.735009 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cinder-api-config-data" Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.748060 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.844862 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5eac3a8d-ee82-4e6f-bdb9-fccb82e09496-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"5eac3a8d-ee82-4e6f-bdb9-fccb82e09496\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.844922 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5eac3a8d-ee82-4e6f-bdb9-fccb82e09496-scripts\") pod \"cinder-api-0\" (UID: \"5eac3a8d-ee82-4e6f-bdb9-fccb82e09496\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.844947 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hjrqb\" (UniqueName: \"kubernetes.io/projected/5eac3a8d-ee82-4e6f-bdb9-fccb82e09496-kube-api-access-hjrqb\") pod \"cinder-api-0\" (UID: \"5eac3a8d-ee82-4e6f-bdb9-fccb82e09496\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.844966 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/5eac3a8d-ee82-4e6f-bdb9-fccb82e09496-etc-machine-id\") pod \"cinder-api-0\" (UID: \"5eac3a8d-ee82-4e6f-bdb9-fccb82e09496\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.845015 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5eac3a8d-ee82-4e6f-bdb9-fccb82e09496-logs\") pod \"cinder-api-0\" (UID: \"5eac3a8d-ee82-4e6f-bdb9-fccb82e09496\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.845046 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5eac3a8d-ee82-4e6f-bdb9-fccb82e09496-public-tls-certs\") pod \"cinder-api-0\" (UID: \"5eac3a8d-ee82-4e6f-bdb9-fccb82e09496\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.845735 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5eac3a8d-ee82-4e6f-bdb9-fccb82e09496-config-data\") pod \"cinder-api-0\" (UID: \"5eac3a8d-ee82-4e6f-bdb9-fccb82e09496\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.845961 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5eac3a8d-ee82-4e6f-bdb9-fccb82e09496-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"5eac3a8d-ee82-4e6f-bdb9-fccb82e09496\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.846054 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5eac3a8d-ee82-4e6f-bdb9-fccb82e09496-config-data-custom\") pod \"cinder-api-0\" (UID: \"5eac3a8d-ee82-4e6f-bdb9-fccb82e09496\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.846849 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.853822 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-658f45b9f4-tx296" Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.906767 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-7f6b9c747b-m4nwt" Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.947880 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f9bdec5b-a12e-4c73-9cb6-3b50c6bf3b0f-config-data\") pod \"f9bdec5b-a12e-4c73-9cb6-3b50c6bf3b0f\" (UID: \"f9bdec5b-a12e-4c73-9cb6-3b50c6bf3b0f\") " Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.948010 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qdq2c\" (UniqueName: \"kubernetes.io/projected/f9bdec5b-a12e-4c73-9cb6-3b50c6bf3b0f-kube-api-access-qdq2c\") pod \"f9bdec5b-a12e-4c73-9cb6-3b50c6bf3b0f\" (UID: \"f9bdec5b-a12e-4c73-9cb6-3b50c6bf3b0f\") " Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.948055 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f9bdec5b-a12e-4c73-9cb6-3b50c6bf3b0f-public-tls-certs\") pod \"f9bdec5b-a12e-4c73-9cb6-3b50c6bf3b0f\" (UID: \"f9bdec5b-a12e-4c73-9cb6-3b50c6bf3b0f\") " Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.948287 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f9bdec5b-a12e-4c73-9cb6-3b50c6bf3b0f-internal-tls-certs\") pod \"f9bdec5b-a12e-4c73-9cb6-3b50c6bf3b0f\" (UID: \"f9bdec5b-a12e-4c73-9cb6-3b50c6bf3b0f\") " Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.948311 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f9bdec5b-a12e-4c73-9cb6-3b50c6bf3b0f-combined-ca-bundle\") pod \"f9bdec5b-a12e-4c73-9cb6-3b50c6bf3b0f\" (UID: \"f9bdec5b-a12e-4c73-9cb6-3b50c6bf3b0f\") " Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.948375 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f9bdec5b-a12e-4c73-9cb6-3b50c6bf3b0f-logs\") pod \"f9bdec5b-a12e-4c73-9cb6-3b50c6bf3b0f\" (UID: \"f9bdec5b-a12e-4c73-9cb6-3b50c6bf3b0f\") " Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.948408 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f9bdec5b-a12e-4c73-9cb6-3b50c6bf3b0f-scripts\") pod \"f9bdec5b-a12e-4c73-9cb6-3b50c6bf3b0f\" (UID: \"f9bdec5b-a12e-4c73-9cb6-3b50c6bf3b0f\") " Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.948837 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f9bdec5b-a12e-4c73-9cb6-3b50c6bf3b0f-logs" (OuterVolumeSpecName: "logs") pod "f9bdec5b-a12e-4c73-9cb6-3b50c6bf3b0f" (UID: "f9bdec5b-a12e-4c73-9cb6-3b50c6bf3b0f"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.948855 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5eac3a8d-ee82-4e6f-bdb9-fccb82e09496-logs\") pod \"cinder-api-0\" (UID: \"5eac3a8d-ee82-4e6f-bdb9-fccb82e09496\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.949697 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5eac3a8d-ee82-4e6f-bdb9-fccb82e09496-logs\") pod \"cinder-api-0\" (UID: \"5eac3a8d-ee82-4e6f-bdb9-fccb82e09496\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.949714 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5eac3a8d-ee82-4e6f-bdb9-fccb82e09496-public-tls-certs\") pod \"cinder-api-0\" (UID: \"5eac3a8d-ee82-4e6f-bdb9-fccb82e09496\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.949745 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5eac3a8d-ee82-4e6f-bdb9-fccb82e09496-config-data\") pod \"cinder-api-0\" (UID: \"5eac3a8d-ee82-4e6f-bdb9-fccb82e09496\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.949798 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5eac3a8d-ee82-4e6f-bdb9-fccb82e09496-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"5eac3a8d-ee82-4e6f-bdb9-fccb82e09496\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.949875 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5eac3a8d-ee82-4e6f-bdb9-fccb82e09496-config-data-custom\") pod \"cinder-api-0\" (UID: \"5eac3a8d-ee82-4e6f-bdb9-fccb82e09496\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.949951 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5eac3a8d-ee82-4e6f-bdb9-fccb82e09496-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"5eac3a8d-ee82-4e6f-bdb9-fccb82e09496\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.949982 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hjrqb\" (UniqueName: \"kubernetes.io/projected/5eac3a8d-ee82-4e6f-bdb9-fccb82e09496-kube-api-access-hjrqb\") pod \"cinder-api-0\" (UID: \"5eac3a8d-ee82-4e6f-bdb9-fccb82e09496\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.949998 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5eac3a8d-ee82-4e6f-bdb9-fccb82e09496-scripts\") pod \"cinder-api-0\" (UID: \"5eac3a8d-ee82-4e6f-bdb9-fccb82e09496\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.950028 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/5eac3a8d-ee82-4e6f-bdb9-fccb82e09496-etc-machine-id\") pod \"cinder-api-0\" (UID: \"5eac3a8d-ee82-4e6f-bdb9-fccb82e09496\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.950103 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f9bdec5b-a12e-4c73-9cb6-3b50c6bf3b0f-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.956881 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/5eac3a8d-ee82-4e6f-bdb9-fccb82e09496-etc-machine-id\") pod \"cinder-api-0\" (UID: \"5eac3a8d-ee82-4e6f-bdb9-fccb82e09496\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.959874 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5eac3a8d-ee82-4e6f-bdb9-fccb82e09496-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"5eac3a8d-ee82-4e6f-bdb9-fccb82e09496\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.960852 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5eac3a8d-ee82-4e6f-bdb9-fccb82e09496-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"5eac3a8d-ee82-4e6f-bdb9-fccb82e09496\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.961616 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5eac3a8d-ee82-4e6f-bdb9-fccb82e09496-config-data-custom\") pod \"cinder-api-0\" (UID: \"5eac3a8d-ee82-4e6f-bdb9-fccb82e09496\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.962748 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5eac3a8d-ee82-4e6f-bdb9-fccb82e09496-config-data\") pod \"cinder-api-0\" (UID: \"5eac3a8d-ee82-4e6f-bdb9-fccb82e09496\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.963148 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f9bdec5b-a12e-4c73-9cb6-3b50c6bf3b0f-kube-api-access-qdq2c" (OuterVolumeSpecName: "kube-api-access-qdq2c") pod "f9bdec5b-a12e-4c73-9cb6-3b50c6bf3b0f" (UID: "f9bdec5b-a12e-4c73-9cb6-3b50c6bf3b0f"). InnerVolumeSpecName "kube-api-access-qdq2c". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.963916 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5eac3a8d-ee82-4e6f-bdb9-fccb82e09496-scripts\") pod \"cinder-api-0\" (UID: \"5eac3a8d-ee82-4e6f-bdb9-fccb82e09496\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.967400 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5eac3a8d-ee82-4e6f-bdb9-fccb82e09496-public-tls-certs\") pod \"cinder-api-0\" (UID: \"5eac3a8d-ee82-4e6f-bdb9-fccb82e09496\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.973967 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hjrqb\" (UniqueName: \"kubernetes.io/projected/5eac3a8d-ee82-4e6f-bdb9-fccb82e09496-kube-api-access-hjrqb\") pod \"cinder-api-0\" (UID: \"5eac3a8d-ee82-4e6f-bdb9-fccb82e09496\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:51:33 crc kubenswrapper[4558]: I0120 17:51:33.975714 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f9bdec5b-a12e-4c73-9cb6-3b50c6bf3b0f-scripts" (OuterVolumeSpecName: "scripts") pod "f9bdec5b-a12e-4c73-9cb6-3b50c6bf3b0f" (UID: "f9bdec5b-a12e-4c73-9cb6-3b50c6bf3b0f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:51:34 crc kubenswrapper[4558]: I0120 17:51:34.037636 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f9bdec5b-a12e-4c73-9cb6-3b50c6bf3b0f-config-data" (OuterVolumeSpecName: "config-data") pod "f9bdec5b-a12e-4c73-9cb6-3b50c6bf3b0f" (UID: "f9bdec5b-a12e-4c73-9cb6-3b50c6bf3b0f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:51:34 crc kubenswrapper[4558]: I0120 17:51:34.054301 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f9bdec5b-a12e-4c73-9cb6-3b50c6bf3b0f-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:34 crc kubenswrapper[4558]: I0120 17:51:34.054339 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f9bdec5b-a12e-4c73-9cb6-3b50c6bf3b0f-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:34 crc kubenswrapper[4558]: I0120 17:51:34.054352 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qdq2c\" (UniqueName: \"kubernetes.io/projected/f9bdec5b-a12e-4c73-9cb6-3b50c6bf3b0f-kube-api-access-qdq2c\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:34 crc kubenswrapper[4558]: I0120 17:51:34.062898 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:51:34 crc kubenswrapper[4558]: I0120 17:51:34.078323 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f9bdec5b-a12e-4c73-9cb6-3b50c6bf3b0f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f9bdec5b-a12e-4c73-9cb6-3b50c6bf3b0f" (UID: "f9bdec5b-a12e-4c73-9cb6-3b50c6bf3b0f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:51:34 crc kubenswrapper[4558]: I0120 17:51:34.103332 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f9bdec5b-a12e-4c73-9cb6-3b50c6bf3b0f-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "f9bdec5b-a12e-4c73-9cb6-3b50c6bf3b0f" (UID: "f9bdec5b-a12e-4c73-9cb6-3b50c6bf3b0f"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:51:34 crc kubenswrapper[4558]: I0120 17:51:34.128209 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f9bdec5b-a12e-4c73-9cb6-3b50c6bf3b0f-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "f9bdec5b-a12e-4c73-9cb6-3b50c6bf3b0f" (UID: "f9bdec5b-a12e-4c73-9cb6-3b50c6bf3b0f"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:51:34 crc kubenswrapper[4558]: I0120 17:51:34.156964 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f9bdec5b-a12e-4c73-9cb6-3b50c6bf3b0f-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:34 crc kubenswrapper[4558]: I0120 17:51:34.156991 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f9bdec5b-a12e-4c73-9cb6-3b50c6bf3b0f-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:34 crc kubenswrapper[4558]: I0120 17:51:34.157011 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f9bdec5b-a12e-4c73-9cb6-3b50c6bf3b0f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:34 crc kubenswrapper[4558]: I0120 17:51:34.303629 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-658f45b9f4-tx296"] Jan 20 17:51:34 crc kubenswrapper[4558]: I0120 17:51:34.308395 4558 generic.go:334] "Generic (PLEG): container finished" podID="f9bdec5b-a12e-4c73-9cb6-3b50c6bf3b0f" containerID="aababf132d5446979839e0c577985c22fd9f60a631cd19ebf40fb35cac8af51c" exitCode=0 Jan 20 17:51:34 crc kubenswrapper[4558]: I0120 17:51:34.308429 4558 generic.go:334] "Generic (PLEG): container finished" podID="f9bdec5b-a12e-4c73-9cb6-3b50c6bf3b0f" containerID="f05207f6b368474a1803d5134030c085eaccc7f1d38de03e11ae6754937b6bfd" exitCode=143 Jan 20 17:51:34 crc kubenswrapper[4558]: I0120 17:51:34.308453 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-7f6b9c747b-m4nwt" event={"ID":"f9bdec5b-a12e-4c73-9cb6-3b50c6bf3b0f","Type":"ContainerDied","Data":"aababf132d5446979839e0c577985c22fd9f60a631cd19ebf40fb35cac8af51c"} Jan 20 17:51:34 crc kubenswrapper[4558]: I0120 17:51:34.308458 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-7f6b9c747b-m4nwt" Jan 20 17:51:34 crc kubenswrapper[4558]: I0120 17:51:34.308480 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-7f6b9c747b-m4nwt" event={"ID":"f9bdec5b-a12e-4c73-9cb6-3b50c6bf3b0f","Type":"ContainerDied","Data":"f05207f6b368474a1803d5134030c085eaccc7f1d38de03e11ae6754937b6bfd"} Jan 20 17:51:34 crc kubenswrapper[4558]: I0120 17:51:34.308491 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-7f6b9c747b-m4nwt" event={"ID":"f9bdec5b-a12e-4c73-9cb6-3b50c6bf3b0f","Type":"ContainerDied","Data":"29d181e3de66fefecb7164a13f30217176cb217ded0bf92e9c42f2b23abd7dfc"} Jan 20 17:51:34 crc kubenswrapper[4558]: I0120 17:51:34.308509 4558 scope.go:117] "RemoveContainer" containerID="aababf132d5446979839e0c577985c22fd9f60a631cd19ebf40fb35cac8af51c" Jan 20 17:51:34 crc kubenswrapper[4558]: I0120 17:51:34.399305 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:51:34 crc kubenswrapper[4558]: I0120 17:51:34.460368 4558 scope.go:117] "RemoveContainer" containerID="f05207f6b368474a1803d5134030c085eaccc7f1d38de03e11ae6754937b6bfd" Jan 20 17:51:34 crc kubenswrapper[4558]: I0120 17:51:34.467940 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/placement-7f6b9c747b-m4nwt"] Jan 20 17:51:34 crc kubenswrapper[4558]: I0120 17:51:34.475114 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/placement-7f6b9c747b-m4nwt"] Jan 20 17:51:34 crc kubenswrapper[4558]: I0120 17:51:34.487979 4558 scope.go:117] "RemoveContainer" containerID="aababf132d5446979839e0c577985c22fd9f60a631cd19ebf40fb35cac8af51c" Jan 20 17:51:34 crc kubenswrapper[4558]: E0120 17:51:34.489258 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aababf132d5446979839e0c577985c22fd9f60a631cd19ebf40fb35cac8af51c\": container with ID starting with aababf132d5446979839e0c577985c22fd9f60a631cd19ebf40fb35cac8af51c not found: ID does not exist" containerID="aababf132d5446979839e0c577985c22fd9f60a631cd19ebf40fb35cac8af51c" Jan 20 17:51:34 crc kubenswrapper[4558]: I0120 17:51:34.489292 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aababf132d5446979839e0c577985c22fd9f60a631cd19ebf40fb35cac8af51c"} err="failed to get container status \"aababf132d5446979839e0c577985c22fd9f60a631cd19ebf40fb35cac8af51c\": rpc error: code = NotFound desc = could not find container \"aababf132d5446979839e0c577985c22fd9f60a631cd19ebf40fb35cac8af51c\": container with ID starting with aababf132d5446979839e0c577985c22fd9f60a631cd19ebf40fb35cac8af51c not found: ID does not exist" Jan 20 17:51:34 crc kubenswrapper[4558]: I0120 17:51:34.489316 4558 scope.go:117] "RemoveContainer" containerID="f05207f6b368474a1803d5134030c085eaccc7f1d38de03e11ae6754937b6bfd" Jan 20 17:51:34 crc kubenswrapper[4558]: E0120 17:51:34.495280 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f05207f6b368474a1803d5134030c085eaccc7f1d38de03e11ae6754937b6bfd\": container with ID starting with f05207f6b368474a1803d5134030c085eaccc7f1d38de03e11ae6754937b6bfd not found: ID does not exist" containerID="f05207f6b368474a1803d5134030c085eaccc7f1d38de03e11ae6754937b6bfd" Jan 20 17:51:34 crc kubenswrapper[4558]: I0120 17:51:34.495338 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f05207f6b368474a1803d5134030c085eaccc7f1d38de03e11ae6754937b6bfd"} err="failed to get container status \"f05207f6b368474a1803d5134030c085eaccc7f1d38de03e11ae6754937b6bfd\": rpc error: code = NotFound desc = could not find container \"f05207f6b368474a1803d5134030c085eaccc7f1d38de03e11ae6754937b6bfd\": container with ID starting with f05207f6b368474a1803d5134030c085eaccc7f1d38de03e11ae6754937b6bfd not found: ID does not exist" Jan 20 17:51:34 crc kubenswrapper[4558]: I0120 17:51:34.495368 4558 scope.go:117] "RemoveContainer" containerID="aababf132d5446979839e0c577985c22fd9f60a631cd19ebf40fb35cac8af51c" Jan 20 17:51:34 crc kubenswrapper[4558]: I0120 17:51:34.496124 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aababf132d5446979839e0c577985c22fd9f60a631cd19ebf40fb35cac8af51c"} err="failed to get container status \"aababf132d5446979839e0c577985c22fd9f60a631cd19ebf40fb35cac8af51c\": rpc error: code = NotFound desc = could not find container \"aababf132d5446979839e0c577985c22fd9f60a631cd19ebf40fb35cac8af51c\": container with ID starting with aababf132d5446979839e0c577985c22fd9f60a631cd19ebf40fb35cac8af51c not found: ID does not exist" Jan 20 17:51:34 crc kubenswrapper[4558]: I0120 17:51:34.496150 4558 scope.go:117] "RemoveContainer" containerID="f05207f6b368474a1803d5134030c085eaccc7f1d38de03e11ae6754937b6bfd" Jan 20 17:51:34 crc kubenswrapper[4558]: I0120 17:51:34.496429 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f05207f6b368474a1803d5134030c085eaccc7f1d38de03e11ae6754937b6bfd"} err="failed to get container status \"f05207f6b368474a1803d5134030c085eaccc7f1d38de03e11ae6754937b6bfd\": rpc error: code = NotFound desc = could not find container \"f05207f6b368474a1803d5134030c085eaccc7f1d38de03e11ae6754937b6bfd\": container with ID starting with f05207f6b368474a1803d5134030c085eaccc7f1d38de03e11ae6754937b6bfd not found: ID does not exist" Jan 20 17:51:34 crc kubenswrapper[4558]: I0120 17:51:34.515888 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:51:34 crc kubenswrapper[4558]: W0120 17:51:34.520721 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5eac3a8d_ee82_4e6f_bdb9_fccb82e09496.slice/crio-0ab6ab06db756b88a51ad0a2e83ca10bfccc0258078b5272a270d4c644f81c49 WatchSource:0}: Error finding container 0ab6ab06db756b88a51ad0a2e83ca10bfccc0258078b5272a270d4c644f81c49: Status 404 returned error can't find the container with id 0ab6ab06db756b88a51ad0a2e83ca10bfccc0258078b5272a270d4c644f81c49 Jan 20 17:51:34 crc kubenswrapper[4558]: I0120 17:51:34.580830 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="acb2f7f3-f6d0-4b4f-936f-baee3dfa3938" path="/var/lib/kubelet/pods/acb2f7f3-f6d0-4b4f-936f-baee3dfa3938/volumes" Jan 20 17:51:34 crc kubenswrapper[4558]: I0120 17:51:34.581687 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dd518f67-fd09-4102-9934-594b833bf8c8" path="/var/lib/kubelet/pods/dd518f67-fd09-4102-9934-594b833bf8c8/volumes" Jan 20 17:51:34 crc kubenswrapper[4558]: I0120 17:51:34.582350 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f9bdec5b-a12e-4c73-9cb6-3b50c6bf3b0f" path="/var/lib/kubelet/pods/f9bdec5b-a12e-4c73-9cb6-3b50c6bf3b0f/volumes" Jan 20 17:51:35 crc kubenswrapper[4558]: I0120 17:51:35.327123 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-658f45b9f4-tx296" event={"ID":"0c9fc067-cb23-4f72-b928-5447fb5182c1","Type":"ContainerStarted","Data":"e300c235b0338f59c68e0edd429de09fcac767478cd41a3614d3e5af5e2760e6"} Jan 20 17:51:35 crc kubenswrapper[4558]: I0120 17:51:35.327192 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-658f45b9f4-tx296" event={"ID":"0c9fc067-cb23-4f72-b928-5447fb5182c1","Type":"ContainerStarted","Data":"a277d8fbf0b081fa88cc6f698d169256895a7e996cb99dec573f1f32b2c3f74c"} Jan 20 17:51:35 crc kubenswrapper[4558]: I0120 17:51:35.327208 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-658f45b9f4-tx296" event={"ID":"0c9fc067-cb23-4f72-b928-5447fb5182c1","Type":"ContainerStarted","Data":"beb7d131a353afccd1fcb9c2ad166867d42235008327a737d77ef0cc6f8b7011"} Jan 20 17:51:35 crc kubenswrapper[4558]: I0120 17:51:35.327846 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/placement-658f45b9f4-tx296" Jan 20 17:51:35 crc kubenswrapper[4558]: I0120 17:51:35.327878 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/placement-658f45b9f4-tx296" Jan 20 17:51:35 crc kubenswrapper[4558]: I0120 17:51:35.330334 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"6e7de5b3-4426-4816-9d45-9b4226333dbc","Type":"ContainerStarted","Data":"49e55db6bbf8e6b2aba631a3aec4df47c74f2b76c8c583fcb5c80461300cdc1b"} Jan 20 17:51:35 crc kubenswrapper[4558]: I0120 17:51:35.330375 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"6e7de5b3-4426-4816-9d45-9b4226333dbc","Type":"ContainerStarted","Data":"5e481f5bc5fbafdbeaf23fc31d695f949c1cb9231a43da689c3c8047fcf0e3bf"} Jan 20 17:51:35 crc kubenswrapper[4558]: I0120 17:51:35.330386 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"6e7de5b3-4426-4816-9d45-9b4226333dbc","Type":"ContainerStarted","Data":"dc50c110f28d874ec9bb3cd35699a7de945a2543c44d84de595c418644ca4a10"} Jan 20 17:51:35 crc kubenswrapper[4558]: I0120 17:51:35.332947 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"5eac3a8d-ee82-4e6f-bdb9-fccb82e09496","Type":"ContainerStarted","Data":"931107b5dd291893e36bfee944ab2879ec15efdaafdf4d8d3e976539e9c14eab"} Jan 20 17:51:35 crc kubenswrapper[4558]: I0120 17:51:35.332976 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"5eac3a8d-ee82-4e6f-bdb9-fccb82e09496","Type":"ContainerStarted","Data":"0ab6ab06db756b88a51ad0a2e83ca10bfccc0258078b5272a270d4c644f81c49"} Jan 20 17:51:35 crc kubenswrapper[4558]: I0120 17:51:35.346414 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/placement-658f45b9f4-tx296" podStartSLOduration=2.346401367 podStartE2EDuration="2.346401367s" podCreationTimestamp="2026-01-20 17:51:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:51:35.346214185 +0000 UTC m=+4189.106552152" watchObservedRunningTime="2026-01-20 17:51:35.346401367 +0000 UTC m=+4189.106739334" Jan 20 17:51:35 crc kubenswrapper[4558]: I0120 17:51:35.364757 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-metadata-0" podStartSLOduration=2.364742105 podStartE2EDuration="2.364742105s" podCreationTimestamp="2026-01-20 17:51:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:51:35.359669616 +0000 UTC m=+4189.120007584" watchObservedRunningTime="2026-01-20 17:51:35.364742105 +0000 UTC m=+4189.125080071" Jan 20 17:51:36 crc kubenswrapper[4558]: I0120 17:51:36.345557 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"5eac3a8d-ee82-4e6f-bdb9-fccb82e09496","Type":"ContainerStarted","Data":"c20582ab3d42dc2c2fc8c4708a740fdb26d1d7abea4ac13a5a595f35117f0d0b"} Jan 20 17:51:36 crc kubenswrapper[4558]: I0120 17:51:36.377574 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/cinder-api-0" podStartSLOduration=3.377555535 podStartE2EDuration="3.377555535s" podCreationTimestamp="2026-01-20 17:51:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:51:36.362045149 +0000 UTC m=+4190.122383117" watchObservedRunningTime="2026-01-20 17:51:36.377555535 +0000 UTC m=+4190.137893492" Jan 20 17:51:36 crc kubenswrapper[4558]: I0120 17:51:36.567948 4558 scope.go:117] "RemoveContainer" containerID="a83e386e23548bf8e99d8a3f739f604866a2baad6493f364fe0412f39f677f52" Jan 20 17:51:36 crc kubenswrapper[4558]: E0120 17:51:36.568684 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"nova-cell1-conductor-conductor\" with CrashLoopBackOff: \"back-off 20s restarting failed container=nova-cell1-conductor-conductor pod=nova-cell1-conductor-0_openstack-kuttl-tests(7c16f0c4-1462-4817-9f74-9e3d93193867)\"" pod="openstack-kuttl-tests/nova-cell1-conductor-0" podUID="7c16f0c4-1462-4817-9f74-9e3d93193867" Jan 20 17:51:37 crc kubenswrapper[4558]: I0120 17:51:37.355411 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:51:37 crc kubenswrapper[4558]: I0120 17:51:37.566370 4558 scope.go:117] "RemoveContainer" containerID="70539f03bf1b2ace01f24a9072ce8275fb4f5d92a181a991e07e1469387f4f02" Jan 20 17:51:37 crc kubenswrapper[4558]: E0120 17:51:37.566633 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"nova-cell0-conductor-conductor\" with CrashLoopBackOff: \"back-off 40s restarting failed container=nova-cell0-conductor-conductor pod=nova-cell0-conductor-0_openstack-kuttl-tests(924fa7ce-8d60-4b2f-b62b-d5e146474f71)\"" pod="openstack-kuttl-tests/nova-cell0-conductor-0" podUID="924fa7ce-8d60-4b2f-b62b-d5e146474f71" Jan 20 17:51:38 crc kubenswrapper[4558]: I0120 17:51:38.847872 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:51:38 crc kubenswrapper[4558]: I0120 17:51:38.848207 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:51:38 crc kubenswrapper[4558]: I0120 17:51:38.862446 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:51:38 crc kubenswrapper[4558]: I0120 17:51:38.862669 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="0b958dc9-cb4f-4494-aa93-73ace6dcdc61" containerName="ceilometer-central-agent" containerID="cri-o://8a78fc71a2e62ed8e4693ae28f1a90dfa8b0fa3f8be4e9a47bb03259d43a4b68" gracePeriod=30 Jan 20 17:51:38 crc kubenswrapper[4558]: I0120 17:51:38.862760 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="0b958dc9-cb4f-4494-aa93-73ace6dcdc61" containerName="sg-core" containerID="cri-o://4af2d6d8bcacf242d89251f17f26c244c27d416082e3b9078fac945c4afb9920" gracePeriod=30 Jan 20 17:51:38 crc kubenswrapper[4558]: I0120 17:51:38.862706 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="0b958dc9-cb4f-4494-aa93-73ace6dcdc61" containerName="proxy-httpd" containerID="cri-o://a269df80b321b46782542c7267ea65a9ec0f5ce92626e5b0afab4be6fd049ba6" gracePeriod=30 Jan 20 17:51:38 crc kubenswrapper[4558]: I0120 17:51:38.863286 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="0b958dc9-cb4f-4494-aa93-73ace6dcdc61" containerName="ceilometer-notification-agent" containerID="cri-o://66fa06375ac661894141d34db8dc370e508df42a50db850135c8f080e8692b6d" gracePeriod=30 Jan 20 17:51:39 crc kubenswrapper[4558]: I0120 17:51:39.206955 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/barbican-api-5bfc7dd8b8-pxpm9" Jan 20 17:51:39 crc kubenswrapper[4558]: I0120 17:51:39.245380 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/barbican-api-5bfc7dd8b8-pxpm9" Jan 20 17:51:39 crc kubenswrapper[4558]: I0120 17:51:39.327150 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-api-7cd9889bc6-72r2h"] Jan 20 17:51:39 crc kubenswrapper[4558]: I0120 17:51:39.327446 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-api-7cd9889bc6-72r2h" podUID="c1e7ea27-3179-4c51-a9d9-2422702f08fa" containerName="barbican-api-log" containerID="cri-o://9c6a604544c830e2edb70b9f7c0f4ad6688b94a6a3eaabf65c2198c2e9f86440" gracePeriod=30 Jan 20 17:51:39 crc kubenswrapper[4558]: I0120 17:51:39.327780 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-api-7cd9889bc6-72r2h" podUID="c1e7ea27-3179-4c51-a9d9-2422702f08fa" containerName="barbican-api" containerID="cri-o://db761370e2918d26be14f649374ac37f0e12f0edc29e9d9fbdd1676c2fcd0724" gracePeriod=30 Jan 20 17:51:39 crc kubenswrapper[4558]: I0120 17:51:39.383872 4558 generic.go:334] "Generic (PLEG): container finished" podID="0b958dc9-cb4f-4494-aa93-73ace6dcdc61" containerID="a269df80b321b46782542c7267ea65a9ec0f5ce92626e5b0afab4be6fd049ba6" exitCode=0 Jan 20 17:51:39 crc kubenswrapper[4558]: I0120 17:51:39.383906 4558 generic.go:334] "Generic (PLEG): container finished" podID="0b958dc9-cb4f-4494-aa93-73ace6dcdc61" containerID="4af2d6d8bcacf242d89251f17f26c244c27d416082e3b9078fac945c4afb9920" exitCode=2 Jan 20 17:51:39 crc kubenswrapper[4558]: I0120 17:51:39.383915 4558 generic.go:334] "Generic (PLEG): container finished" podID="0b958dc9-cb4f-4494-aa93-73ace6dcdc61" containerID="66fa06375ac661894141d34db8dc370e508df42a50db850135c8f080e8692b6d" exitCode=0 Jan 20 17:51:39 crc kubenswrapper[4558]: I0120 17:51:39.383923 4558 generic.go:334] "Generic (PLEG): container finished" podID="0b958dc9-cb4f-4494-aa93-73ace6dcdc61" containerID="8a78fc71a2e62ed8e4693ae28f1a90dfa8b0fa3f8be4e9a47bb03259d43a4b68" exitCode=0 Jan 20 17:51:39 crc kubenswrapper[4558]: I0120 17:51:39.385192 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"0b958dc9-cb4f-4494-aa93-73ace6dcdc61","Type":"ContainerDied","Data":"a269df80b321b46782542c7267ea65a9ec0f5ce92626e5b0afab4be6fd049ba6"} Jan 20 17:51:39 crc kubenswrapper[4558]: I0120 17:51:39.385235 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"0b958dc9-cb4f-4494-aa93-73ace6dcdc61","Type":"ContainerDied","Data":"4af2d6d8bcacf242d89251f17f26c244c27d416082e3b9078fac945c4afb9920"} Jan 20 17:51:39 crc kubenswrapper[4558]: I0120 17:51:39.385248 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"0b958dc9-cb4f-4494-aa93-73ace6dcdc61","Type":"ContainerDied","Data":"66fa06375ac661894141d34db8dc370e508df42a50db850135c8f080e8692b6d"} Jan 20 17:51:39 crc kubenswrapper[4558]: I0120 17:51:39.385258 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"0b958dc9-cb4f-4494-aa93-73ace6dcdc61","Type":"ContainerDied","Data":"8a78fc71a2e62ed8e4693ae28f1a90dfa8b0fa3f8be4e9a47bb03259d43a4b68"} Jan 20 17:51:39 crc kubenswrapper[4558]: I0120 17:51:39.605794 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:51:39 crc kubenswrapper[4558]: I0120 17:51:39.706353 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0b958dc9-cb4f-4494-aa93-73ace6dcdc61-log-httpd\") pod \"0b958dc9-cb4f-4494-aa93-73ace6dcdc61\" (UID: \"0b958dc9-cb4f-4494-aa93-73ace6dcdc61\") " Jan 20 17:51:39 crc kubenswrapper[4558]: I0120 17:51:39.706454 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x5gqs\" (UniqueName: \"kubernetes.io/projected/0b958dc9-cb4f-4494-aa93-73ace6dcdc61-kube-api-access-x5gqs\") pod \"0b958dc9-cb4f-4494-aa93-73ace6dcdc61\" (UID: \"0b958dc9-cb4f-4494-aa93-73ace6dcdc61\") " Jan 20 17:51:39 crc kubenswrapper[4558]: I0120 17:51:39.706632 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0b958dc9-cb4f-4494-aa93-73ace6dcdc61-combined-ca-bundle\") pod \"0b958dc9-cb4f-4494-aa93-73ace6dcdc61\" (UID: \"0b958dc9-cb4f-4494-aa93-73ace6dcdc61\") " Jan 20 17:51:39 crc kubenswrapper[4558]: I0120 17:51:39.706666 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/0b958dc9-cb4f-4494-aa93-73ace6dcdc61-ceilometer-tls-certs\") pod \"0b958dc9-cb4f-4494-aa93-73ace6dcdc61\" (UID: \"0b958dc9-cb4f-4494-aa93-73ace6dcdc61\") " Jan 20 17:51:39 crc kubenswrapper[4558]: I0120 17:51:39.706828 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0b958dc9-cb4f-4494-aa93-73ace6dcdc61-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "0b958dc9-cb4f-4494-aa93-73ace6dcdc61" (UID: "0b958dc9-cb4f-4494-aa93-73ace6dcdc61"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:51:39 crc kubenswrapper[4558]: I0120 17:51:39.707459 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0b958dc9-cb4f-4494-aa93-73ace6dcdc61-scripts\") pod \"0b958dc9-cb4f-4494-aa93-73ace6dcdc61\" (UID: \"0b958dc9-cb4f-4494-aa93-73ace6dcdc61\") " Jan 20 17:51:39 crc kubenswrapper[4558]: I0120 17:51:39.707651 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0b958dc9-cb4f-4494-aa93-73ace6dcdc61-config-data\") pod \"0b958dc9-cb4f-4494-aa93-73ace6dcdc61\" (UID: \"0b958dc9-cb4f-4494-aa93-73ace6dcdc61\") " Jan 20 17:51:39 crc kubenswrapper[4558]: I0120 17:51:39.707805 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0b958dc9-cb4f-4494-aa93-73ace6dcdc61-sg-core-conf-yaml\") pod \"0b958dc9-cb4f-4494-aa93-73ace6dcdc61\" (UID: \"0b958dc9-cb4f-4494-aa93-73ace6dcdc61\") " Jan 20 17:51:39 crc kubenswrapper[4558]: I0120 17:51:39.708028 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0b958dc9-cb4f-4494-aa93-73ace6dcdc61-run-httpd\") pod \"0b958dc9-cb4f-4494-aa93-73ace6dcdc61\" (UID: \"0b958dc9-cb4f-4494-aa93-73ace6dcdc61\") " Jan 20 17:51:39 crc kubenswrapper[4558]: I0120 17:51:39.709503 4558 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0b958dc9-cb4f-4494-aa93-73ace6dcdc61-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:39 crc kubenswrapper[4558]: I0120 17:51:39.710446 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0b958dc9-cb4f-4494-aa93-73ace6dcdc61-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "0b958dc9-cb4f-4494-aa93-73ace6dcdc61" (UID: "0b958dc9-cb4f-4494-aa93-73ace6dcdc61"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:51:39 crc kubenswrapper[4558]: I0120 17:51:39.714056 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b958dc9-cb4f-4494-aa93-73ace6dcdc61-kube-api-access-x5gqs" (OuterVolumeSpecName: "kube-api-access-x5gqs") pod "0b958dc9-cb4f-4494-aa93-73ace6dcdc61" (UID: "0b958dc9-cb4f-4494-aa93-73ace6dcdc61"). InnerVolumeSpecName "kube-api-access-x5gqs". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:51:39 crc kubenswrapper[4558]: I0120 17:51:39.714350 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b958dc9-cb4f-4494-aa93-73ace6dcdc61-scripts" (OuterVolumeSpecName: "scripts") pod "0b958dc9-cb4f-4494-aa93-73ace6dcdc61" (UID: "0b958dc9-cb4f-4494-aa93-73ace6dcdc61"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:51:39 crc kubenswrapper[4558]: I0120 17:51:39.743995 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b958dc9-cb4f-4494-aa93-73ace6dcdc61-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "0b958dc9-cb4f-4494-aa93-73ace6dcdc61" (UID: "0b958dc9-cb4f-4494-aa93-73ace6dcdc61"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:51:39 crc kubenswrapper[4558]: I0120 17:51:39.756732 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:51:39 crc kubenswrapper[4558]: I0120 17:51:39.756791 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:51:39 crc kubenswrapper[4558]: I0120 17:51:39.769489 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b958dc9-cb4f-4494-aa93-73ace6dcdc61-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "0b958dc9-cb4f-4494-aa93-73ace6dcdc61" (UID: "0b958dc9-cb4f-4494-aa93-73ace6dcdc61"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:51:39 crc kubenswrapper[4558]: I0120 17:51:39.789040 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b958dc9-cb4f-4494-aa93-73ace6dcdc61-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0b958dc9-cb4f-4494-aa93-73ace6dcdc61" (UID: "0b958dc9-cb4f-4494-aa93-73ace6dcdc61"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:51:39 crc kubenswrapper[4558]: I0120 17:51:39.810455 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b958dc9-cb4f-4494-aa93-73ace6dcdc61-config-data" (OuterVolumeSpecName: "config-data") pod "0b958dc9-cb4f-4494-aa93-73ace6dcdc61" (UID: "0b958dc9-cb4f-4494-aa93-73ace6dcdc61"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:51:39 crc kubenswrapper[4558]: I0120 17:51:39.812851 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x5gqs\" (UniqueName: \"kubernetes.io/projected/0b958dc9-cb4f-4494-aa93-73ace6dcdc61-kube-api-access-x5gqs\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:39 crc kubenswrapper[4558]: I0120 17:51:39.812881 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0b958dc9-cb4f-4494-aa93-73ace6dcdc61-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:39 crc kubenswrapper[4558]: I0120 17:51:39.812893 4558 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/0b958dc9-cb4f-4494-aa93-73ace6dcdc61-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:39 crc kubenswrapper[4558]: I0120 17:51:39.812904 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0b958dc9-cb4f-4494-aa93-73ace6dcdc61-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:39 crc kubenswrapper[4558]: I0120 17:51:39.812914 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0b958dc9-cb4f-4494-aa93-73ace6dcdc61-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:39 crc kubenswrapper[4558]: I0120 17:51:39.812924 4558 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0b958dc9-cb4f-4494-aa93-73ace6dcdc61-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:39 crc kubenswrapper[4558]: I0120 17:51:39.812934 4558 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0b958dc9-cb4f-4494-aa93-73ace6dcdc61-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:40 crc kubenswrapper[4558]: I0120 17:51:40.410973 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"0b958dc9-cb4f-4494-aa93-73ace6dcdc61","Type":"ContainerDied","Data":"06b10739b6519aa32e9883e5de2135cfd7587729ac30dc7d54920de4c316ee81"} Jan 20 17:51:40 crc kubenswrapper[4558]: I0120 17:51:40.411047 4558 scope.go:117] "RemoveContainer" containerID="a269df80b321b46782542c7267ea65a9ec0f5ce92626e5b0afab4be6fd049ba6" Jan 20 17:51:40 crc kubenswrapper[4558]: I0120 17:51:40.411241 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:51:40 crc kubenswrapper[4558]: I0120 17:51:40.430914 4558 generic.go:334] "Generic (PLEG): container finished" podID="c1e7ea27-3179-4c51-a9d9-2422702f08fa" containerID="9c6a604544c830e2edb70b9f7c0f4ad6688b94a6a3eaabf65c2198c2e9f86440" exitCode=143 Jan 20 17:51:40 crc kubenswrapper[4558]: I0120 17:51:40.430995 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-7cd9889bc6-72r2h" event={"ID":"c1e7ea27-3179-4c51-a9d9-2422702f08fa","Type":"ContainerDied","Data":"9c6a604544c830e2edb70b9f7c0f4ad6688b94a6a3eaabf65c2198c2e9f86440"} Jan 20 17:51:40 crc kubenswrapper[4558]: I0120 17:51:40.452252 4558 scope.go:117] "RemoveContainer" containerID="4af2d6d8bcacf242d89251f17f26c244c27d416082e3b9078fac945c4afb9920" Jan 20 17:51:40 crc kubenswrapper[4558]: I0120 17:51:40.461420 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:51:40 crc kubenswrapper[4558]: I0120 17:51:40.473871 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:51:40 crc kubenswrapper[4558]: I0120 17:51:40.480748 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:51:40 crc kubenswrapper[4558]: E0120 17:51:40.481319 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0b958dc9-cb4f-4494-aa93-73ace6dcdc61" containerName="sg-core" Jan 20 17:51:40 crc kubenswrapper[4558]: I0120 17:51:40.481343 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="0b958dc9-cb4f-4494-aa93-73ace6dcdc61" containerName="sg-core" Jan 20 17:51:40 crc kubenswrapper[4558]: E0120 17:51:40.481363 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0b958dc9-cb4f-4494-aa93-73ace6dcdc61" containerName="proxy-httpd" Jan 20 17:51:40 crc kubenswrapper[4558]: I0120 17:51:40.481371 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="0b958dc9-cb4f-4494-aa93-73ace6dcdc61" containerName="proxy-httpd" Jan 20 17:51:40 crc kubenswrapper[4558]: E0120 17:51:40.481394 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0b958dc9-cb4f-4494-aa93-73ace6dcdc61" containerName="ceilometer-central-agent" Jan 20 17:51:40 crc kubenswrapper[4558]: I0120 17:51:40.481400 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="0b958dc9-cb4f-4494-aa93-73ace6dcdc61" containerName="ceilometer-central-agent" Jan 20 17:51:40 crc kubenswrapper[4558]: E0120 17:51:40.481425 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f9bdec5b-a12e-4c73-9cb6-3b50c6bf3b0f" containerName="placement-api" Jan 20 17:51:40 crc kubenswrapper[4558]: I0120 17:51:40.481431 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="f9bdec5b-a12e-4c73-9cb6-3b50c6bf3b0f" containerName="placement-api" Jan 20 17:51:40 crc kubenswrapper[4558]: E0120 17:51:40.481448 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0b958dc9-cb4f-4494-aa93-73ace6dcdc61" containerName="ceilometer-notification-agent" Jan 20 17:51:40 crc kubenswrapper[4558]: I0120 17:51:40.481453 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="0b958dc9-cb4f-4494-aa93-73ace6dcdc61" containerName="ceilometer-notification-agent" Jan 20 17:51:40 crc kubenswrapper[4558]: E0120 17:51:40.481465 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f9bdec5b-a12e-4c73-9cb6-3b50c6bf3b0f" containerName="placement-log" Jan 20 17:51:40 crc kubenswrapper[4558]: I0120 17:51:40.481473 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="f9bdec5b-a12e-4c73-9cb6-3b50c6bf3b0f" containerName="placement-log" Jan 20 17:51:40 crc kubenswrapper[4558]: I0120 17:51:40.481711 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="0b958dc9-cb4f-4494-aa93-73ace6dcdc61" containerName="proxy-httpd" Jan 20 17:51:40 crc kubenswrapper[4558]: I0120 17:51:40.481727 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="f9bdec5b-a12e-4c73-9cb6-3b50c6bf3b0f" containerName="placement-api" Jan 20 17:51:40 crc kubenswrapper[4558]: I0120 17:51:40.481738 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="0b958dc9-cb4f-4494-aa93-73ace6dcdc61" containerName="ceilometer-central-agent" Jan 20 17:51:40 crc kubenswrapper[4558]: I0120 17:51:40.481751 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="f9bdec5b-a12e-4c73-9cb6-3b50c6bf3b0f" containerName="placement-log" Jan 20 17:51:40 crc kubenswrapper[4558]: I0120 17:51:40.481764 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="0b958dc9-cb4f-4494-aa93-73ace6dcdc61" containerName="sg-core" Jan 20 17:51:40 crc kubenswrapper[4558]: I0120 17:51:40.481775 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="0b958dc9-cb4f-4494-aa93-73ace6dcdc61" containerName="ceilometer-notification-agent" Jan 20 17:51:40 crc kubenswrapper[4558]: I0120 17:51:40.490553 4558 scope.go:117] "RemoveContainer" containerID="66fa06375ac661894141d34db8dc370e508df42a50db850135c8f080e8692b6d" Jan 20 17:51:40 crc kubenswrapper[4558]: I0120 17:51:40.493881 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:51:40 crc kubenswrapper[4558]: I0120 17:51:40.493984 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:51:40 crc kubenswrapper[4558]: I0120 17:51:40.498935 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-scripts" Jan 20 17:51:40 crc kubenswrapper[4558]: I0120 17:51:40.499209 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-ceilometer-internal-svc" Jan 20 17:51:40 crc kubenswrapper[4558]: I0120 17:51:40.507103 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-config-data" Jan 20 17:51:40 crc kubenswrapper[4558]: I0120 17:51:40.535445 4558 scope.go:117] "RemoveContainer" containerID="8a78fc71a2e62ed8e4693ae28f1a90dfa8b0fa3f8be4e9a47bb03259d43a4b68" Jan 20 17:51:40 crc kubenswrapper[4558]: I0120 17:51:40.578064 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b958dc9-cb4f-4494-aa93-73ace6dcdc61" path="/var/lib/kubelet/pods/0b958dc9-cb4f-4494-aa93-73ace6dcdc61/volumes" Jan 20 17:51:40 crc kubenswrapper[4558]: I0120 17:51:40.634602 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1e9bc275-8603-4dc8-ab3f-9d63bea731a0-config-data\") pod \"ceilometer-0\" (UID: \"1e9bc275-8603-4dc8-ab3f-9d63bea731a0\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:51:40 crc kubenswrapper[4558]: I0120 17:51:40.634687 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1e9bc275-8603-4dc8-ab3f-9d63bea731a0-log-httpd\") pod \"ceilometer-0\" (UID: \"1e9bc275-8603-4dc8-ab3f-9d63bea731a0\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:51:40 crc kubenswrapper[4558]: I0120 17:51:40.634720 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/1e9bc275-8603-4dc8-ab3f-9d63bea731a0-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"1e9bc275-8603-4dc8-ab3f-9d63bea731a0\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:51:40 crc kubenswrapper[4558]: I0120 17:51:40.634769 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/1e9bc275-8603-4dc8-ab3f-9d63bea731a0-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"1e9bc275-8603-4dc8-ab3f-9d63bea731a0\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:51:40 crc kubenswrapper[4558]: I0120 17:51:40.634793 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rql4j\" (UniqueName: \"kubernetes.io/projected/1e9bc275-8603-4dc8-ab3f-9d63bea731a0-kube-api-access-rql4j\") pod \"ceilometer-0\" (UID: \"1e9bc275-8603-4dc8-ab3f-9d63bea731a0\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:51:40 crc kubenswrapper[4558]: I0120 17:51:40.634826 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1e9bc275-8603-4dc8-ab3f-9d63bea731a0-run-httpd\") pod \"ceilometer-0\" (UID: \"1e9bc275-8603-4dc8-ab3f-9d63bea731a0\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:51:40 crc kubenswrapper[4558]: I0120 17:51:40.634883 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1e9bc275-8603-4dc8-ab3f-9d63bea731a0-scripts\") pod \"ceilometer-0\" (UID: \"1e9bc275-8603-4dc8-ab3f-9d63bea731a0\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:51:40 crc kubenswrapper[4558]: I0120 17:51:40.634922 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1e9bc275-8603-4dc8-ab3f-9d63bea731a0-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"1e9bc275-8603-4dc8-ab3f-9d63bea731a0\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:51:40 crc kubenswrapper[4558]: I0120 17:51:40.737477 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/1e9bc275-8603-4dc8-ab3f-9d63bea731a0-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"1e9bc275-8603-4dc8-ab3f-9d63bea731a0\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:51:40 crc kubenswrapper[4558]: I0120 17:51:40.737552 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rql4j\" (UniqueName: \"kubernetes.io/projected/1e9bc275-8603-4dc8-ab3f-9d63bea731a0-kube-api-access-rql4j\") pod \"ceilometer-0\" (UID: \"1e9bc275-8603-4dc8-ab3f-9d63bea731a0\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:51:40 crc kubenswrapper[4558]: I0120 17:51:40.737627 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1e9bc275-8603-4dc8-ab3f-9d63bea731a0-run-httpd\") pod \"ceilometer-0\" (UID: \"1e9bc275-8603-4dc8-ab3f-9d63bea731a0\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:51:40 crc kubenswrapper[4558]: I0120 17:51:40.737689 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1e9bc275-8603-4dc8-ab3f-9d63bea731a0-scripts\") pod \"ceilometer-0\" (UID: \"1e9bc275-8603-4dc8-ab3f-9d63bea731a0\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:51:40 crc kubenswrapper[4558]: I0120 17:51:40.737767 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1e9bc275-8603-4dc8-ab3f-9d63bea731a0-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"1e9bc275-8603-4dc8-ab3f-9d63bea731a0\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:51:40 crc kubenswrapper[4558]: I0120 17:51:40.737935 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1e9bc275-8603-4dc8-ab3f-9d63bea731a0-config-data\") pod \"ceilometer-0\" (UID: \"1e9bc275-8603-4dc8-ab3f-9d63bea731a0\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:51:40 crc kubenswrapper[4558]: I0120 17:51:40.738067 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1e9bc275-8603-4dc8-ab3f-9d63bea731a0-log-httpd\") pod \"ceilometer-0\" (UID: \"1e9bc275-8603-4dc8-ab3f-9d63bea731a0\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:51:40 crc kubenswrapper[4558]: I0120 17:51:40.738104 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/1e9bc275-8603-4dc8-ab3f-9d63bea731a0-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"1e9bc275-8603-4dc8-ab3f-9d63bea731a0\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:51:40 crc kubenswrapper[4558]: I0120 17:51:40.739933 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1e9bc275-8603-4dc8-ab3f-9d63bea731a0-run-httpd\") pod \"ceilometer-0\" (UID: \"1e9bc275-8603-4dc8-ab3f-9d63bea731a0\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:51:40 crc kubenswrapper[4558]: I0120 17:51:40.740102 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1e9bc275-8603-4dc8-ab3f-9d63bea731a0-log-httpd\") pod \"ceilometer-0\" (UID: \"1e9bc275-8603-4dc8-ab3f-9d63bea731a0\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:51:40 crc kubenswrapper[4558]: I0120 17:51:40.746802 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/1e9bc275-8603-4dc8-ab3f-9d63bea731a0-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"1e9bc275-8603-4dc8-ab3f-9d63bea731a0\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:51:40 crc kubenswrapper[4558]: I0120 17:51:40.747636 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1e9bc275-8603-4dc8-ab3f-9d63bea731a0-scripts\") pod \"ceilometer-0\" (UID: \"1e9bc275-8603-4dc8-ab3f-9d63bea731a0\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:51:40 crc kubenswrapper[4558]: I0120 17:51:40.747717 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1e9bc275-8603-4dc8-ab3f-9d63bea731a0-config-data\") pod \"ceilometer-0\" (UID: \"1e9bc275-8603-4dc8-ab3f-9d63bea731a0\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:51:40 crc kubenswrapper[4558]: I0120 17:51:40.748129 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/1e9bc275-8603-4dc8-ab3f-9d63bea731a0-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"1e9bc275-8603-4dc8-ab3f-9d63bea731a0\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:51:40 crc kubenswrapper[4558]: I0120 17:51:40.750830 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1e9bc275-8603-4dc8-ab3f-9d63bea731a0-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"1e9bc275-8603-4dc8-ab3f-9d63bea731a0\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:51:40 crc kubenswrapper[4558]: I0120 17:51:40.767878 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rql4j\" (UniqueName: \"kubernetes.io/projected/1e9bc275-8603-4dc8-ab3f-9d63bea731a0-kube-api-access-rql4j\") pod \"ceilometer-0\" (UID: \"1e9bc275-8603-4dc8-ab3f-9d63bea731a0\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:51:40 crc kubenswrapper[4558]: I0120 17:51:40.783341 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-api-0" podUID="64e30e89-cc14-4c89-bd1b-5702bfba717c" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.1.230:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 20 17:51:40 crc kubenswrapper[4558]: I0120 17:51:40.783375 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-api-0" podUID="64e30e89-cc14-4c89-bd1b-5702bfba717c" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.1.230:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 20 17:51:40 crc kubenswrapper[4558]: I0120 17:51:40.838840 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:51:41 crc kubenswrapper[4558]: I0120 17:51:41.541974 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:51:41 crc kubenswrapper[4558]: W0120 17:51:41.551552 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1e9bc275_8603_4dc8_ab3f_9d63bea731a0.slice/crio-ade7f4eaebd3a69f30d55e10eadbddfc46e10a7178045bbae8facf16d27ffd69 WatchSource:0}: Error finding container ade7f4eaebd3a69f30d55e10eadbddfc46e10a7178045bbae8facf16d27ffd69: Status 404 returned error can't find the container with id ade7f4eaebd3a69f30d55e10eadbddfc46e10a7178045bbae8facf16d27ffd69 Jan 20 17:51:42 crc kubenswrapper[4558]: I0120 17:51:42.457656 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"1e9bc275-8603-4dc8-ab3f-9d63bea731a0","Type":"ContainerStarted","Data":"4389e7ca5201cf3921b6868ade9268aebc56ca43ff8d253051310b83c33e7766"} Jan 20 17:51:42 crc kubenswrapper[4558]: I0120 17:51:42.458157 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"1e9bc275-8603-4dc8-ab3f-9d63bea731a0","Type":"ContainerStarted","Data":"ade7f4eaebd3a69f30d55e10eadbddfc46e10a7178045bbae8facf16d27ffd69"} Jan 20 17:51:42 crc kubenswrapper[4558]: I0120 17:51:42.487011 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/barbican-api-7cd9889bc6-72r2h" podUID="c1e7ea27-3179-4c51-a9d9-2422702f08fa" containerName="barbican-api" probeResult="failure" output="Get \"https://10.217.1.218:9311/healthcheck\": read tcp 10.217.0.2:59644->10.217.1.218:9311: read: connection reset by peer" Jan 20 17:51:42 crc kubenswrapper[4558]: I0120 17:51:42.487019 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/barbican-api-7cd9889bc6-72r2h" podUID="c1e7ea27-3179-4c51-a9d9-2422702f08fa" containerName="barbican-api-log" probeResult="failure" output="Get \"https://10.217.1.218:9311/healthcheck\": read tcp 10.217.0.2:59630->10.217.1.218:9311: read: connection reset by peer" Jan 20 17:51:43 crc kubenswrapper[4558]: I0120 17:51:43.003645 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-7cd9889bc6-72r2h" Jan 20 17:51:43 crc kubenswrapper[4558]: I0120 17:51:43.101288 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c1e7ea27-3179-4c51-a9d9-2422702f08fa-combined-ca-bundle\") pod \"c1e7ea27-3179-4c51-a9d9-2422702f08fa\" (UID: \"c1e7ea27-3179-4c51-a9d9-2422702f08fa\") " Jan 20 17:51:43 crc kubenswrapper[4558]: I0120 17:51:43.101355 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c1e7ea27-3179-4c51-a9d9-2422702f08fa-internal-tls-certs\") pod \"c1e7ea27-3179-4c51-a9d9-2422702f08fa\" (UID: \"c1e7ea27-3179-4c51-a9d9-2422702f08fa\") " Jan 20 17:51:43 crc kubenswrapper[4558]: I0120 17:51:43.101382 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c1e7ea27-3179-4c51-a9d9-2422702f08fa-public-tls-certs\") pod \"c1e7ea27-3179-4c51-a9d9-2422702f08fa\" (UID: \"c1e7ea27-3179-4c51-a9d9-2422702f08fa\") " Jan 20 17:51:43 crc kubenswrapper[4558]: I0120 17:51:43.101415 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c1e7ea27-3179-4c51-a9d9-2422702f08fa-logs\") pod \"c1e7ea27-3179-4c51-a9d9-2422702f08fa\" (UID: \"c1e7ea27-3179-4c51-a9d9-2422702f08fa\") " Jan 20 17:51:43 crc kubenswrapper[4558]: I0120 17:51:43.101586 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c1e7ea27-3179-4c51-a9d9-2422702f08fa-config-data\") pod \"c1e7ea27-3179-4c51-a9d9-2422702f08fa\" (UID: \"c1e7ea27-3179-4c51-a9d9-2422702f08fa\") " Jan 20 17:51:43 crc kubenswrapper[4558]: I0120 17:51:43.101623 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4dtkf\" (UniqueName: \"kubernetes.io/projected/c1e7ea27-3179-4c51-a9d9-2422702f08fa-kube-api-access-4dtkf\") pod \"c1e7ea27-3179-4c51-a9d9-2422702f08fa\" (UID: \"c1e7ea27-3179-4c51-a9d9-2422702f08fa\") " Jan 20 17:51:43 crc kubenswrapper[4558]: I0120 17:51:43.101682 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c1e7ea27-3179-4c51-a9d9-2422702f08fa-config-data-custom\") pod \"c1e7ea27-3179-4c51-a9d9-2422702f08fa\" (UID: \"c1e7ea27-3179-4c51-a9d9-2422702f08fa\") " Jan 20 17:51:43 crc kubenswrapper[4558]: I0120 17:51:43.104076 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c1e7ea27-3179-4c51-a9d9-2422702f08fa-logs" (OuterVolumeSpecName: "logs") pod "c1e7ea27-3179-4c51-a9d9-2422702f08fa" (UID: "c1e7ea27-3179-4c51-a9d9-2422702f08fa"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:51:43 crc kubenswrapper[4558]: I0120 17:51:43.107152 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c1e7ea27-3179-4c51-a9d9-2422702f08fa-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "c1e7ea27-3179-4c51-a9d9-2422702f08fa" (UID: "c1e7ea27-3179-4c51-a9d9-2422702f08fa"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:51:43 crc kubenswrapper[4558]: I0120 17:51:43.108201 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c1e7ea27-3179-4c51-a9d9-2422702f08fa-kube-api-access-4dtkf" (OuterVolumeSpecName: "kube-api-access-4dtkf") pod "c1e7ea27-3179-4c51-a9d9-2422702f08fa" (UID: "c1e7ea27-3179-4c51-a9d9-2422702f08fa"). InnerVolumeSpecName "kube-api-access-4dtkf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:51:43 crc kubenswrapper[4558]: I0120 17:51:43.131118 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c1e7ea27-3179-4c51-a9d9-2422702f08fa-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c1e7ea27-3179-4c51-a9d9-2422702f08fa" (UID: "c1e7ea27-3179-4c51-a9d9-2422702f08fa"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:51:43 crc kubenswrapper[4558]: I0120 17:51:43.147034 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c1e7ea27-3179-4c51-a9d9-2422702f08fa-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "c1e7ea27-3179-4c51-a9d9-2422702f08fa" (UID: "c1e7ea27-3179-4c51-a9d9-2422702f08fa"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:51:43 crc kubenswrapper[4558]: I0120 17:51:43.156578 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c1e7ea27-3179-4c51-a9d9-2422702f08fa-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "c1e7ea27-3179-4c51-a9d9-2422702f08fa" (UID: "c1e7ea27-3179-4c51-a9d9-2422702f08fa"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:51:43 crc kubenswrapper[4558]: I0120 17:51:43.161180 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c1e7ea27-3179-4c51-a9d9-2422702f08fa-config-data" (OuterVolumeSpecName: "config-data") pod "c1e7ea27-3179-4c51-a9d9-2422702f08fa" (UID: "c1e7ea27-3179-4c51-a9d9-2422702f08fa"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:51:43 crc kubenswrapper[4558]: I0120 17:51:43.205428 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c1e7ea27-3179-4c51-a9d9-2422702f08fa-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:43 crc kubenswrapper[4558]: I0120 17:51:43.205461 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c1e7ea27-3179-4c51-a9d9-2422702f08fa-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:43 crc kubenswrapper[4558]: I0120 17:51:43.205473 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c1e7ea27-3179-4c51-a9d9-2422702f08fa-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:43 crc kubenswrapper[4558]: I0120 17:51:43.205486 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c1e7ea27-3179-4c51-a9d9-2422702f08fa-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:43 crc kubenswrapper[4558]: I0120 17:51:43.205501 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4dtkf\" (UniqueName: \"kubernetes.io/projected/c1e7ea27-3179-4c51-a9d9-2422702f08fa-kube-api-access-4dtkf\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:43 crc kubenswrapper[4558]: I0120 17:51:43.205511 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c1e7ea27-3179-4c51-a9d9-2422702f08fa-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:43 crc kubenswrapper[4558]: I0120 17:51:43.205520 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c1e7ea27-3179-4c51-a9d9-2422702f08fa-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:43 crc kubenswrapper[4558]: I0120 17:51:43.468219 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"1e9bc275-8603-4dc8-ab3f-9d63bea731a0","Type":"ContainerStarted","Data":"d22d7692b054b8eebf218d2f206edb4ca1bee71cbfe4240fe94349103e4bffe2"} Jan 20 17:51:43 crc kubenswrapper[4558]: I0120 17:51:43.470032 4558 generic.go:334] "Generic (PLEG): container finished" podID="c1e7ea27-3179-4c51-a9d9-2422702f08fa" containerID="db761370e2918d26be14f649374ac37f0e12f0edc29e9d9fbdd1676c2fcd0724" exitCode=0 Jan 20 17:51:43 crc kubenswrapper[4558]: I0120 17:51:43.470082 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-7cd9889bc6-72r2h" event={"ID":"c1e7ea27-3179-4c51-a9d9-2422702f08fa","Type":"ContainerDied","Data":"db761370e2918d26be14f649374ac37f0e12f0edc29e9d9fbdd1676c2fcd0724"} Jan 20 17:51:43 crc kubenswrapper[4558]: I0120 17:51:43.470141 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-7cd9889bc6-72r2h" event={"ID":"c1e7ea27-3179-4c51-a9d9-2422702f08fa","Type":"ContainerDied","Data":"2f2abebcba696cfeaa6578be95ccfa2ecbfa3b1fa674fbde022073a9ed91eb41"} Jan 20 17:51:43 crc kubenswrapper[4558]: I0120 17:51:43.470146 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-7cd9889bc6-72r2h" Jan 20 17:51:43 crc kubenswrapper[4558]: I0120 17:51:43.470184 4558 scope.go:117] "RemoveContainer" containerID="db761370e2918d26be14f649374ac37f0e12f0edc29e9d9fbdd1676c2fcd0724" Jan 20 17:51:43 crc kubenswrapper[4558]: I0120 17:51:43.496651 4558 scope.go:117] "RemoveContainer" containerID="9c6a604544c830e2edb70b9f7c0f4ad6688b94a6a3eaabf65c2198c2e9f86440" Jan 20 17:51:43 crc kubenswrapper[4558]: I0120 17:51:43.507852 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-api-7cd9889bc6-72r2h"] Jan 20 17:51:43 crc kubenswrapper[4558]: I0120 17:51:43.524702 4558 scope.go:117] "RemoveContainer" containerID="db761370e2918d26be14f649374ac37f0e12f0edc29e9d9fbdd1676c2fcd0724" Jan 20 17:51:43 crc kubenswrapper[4558]: E0120 17:51:43.525125 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"db761370e2918d26be14f649374ac37f0e12f0edc29e9d9fbdd1676c2fcd0724\": container with ID starting with db761370e2918d26be14f649374ac37f0e12f0edc29e9d9fbdd1676c2fcd0724 not found: ID does not exist" containerID="db761370e2918d26be14f649374ac37f0e12f0edc29e9d9fbdd1676c2fcd0724" Jan 20 17:51:43 crc kubenswrapper[4558]: I0120 17:51:43.525291 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"db761370e2918d26be14f649374ac37f0e12f0edc29e9d9fbdd1676c2fcd0724"} err="failed to get container status \"db761370e2918d26be14f649374ac37f0e12f0edc29e9d9fbdd1676c2fcd0724\": rpc error: code = NotFound desc = could not find container \"db761370e2918d26be14f649374ac37f0e12f0edc29e9d9fbdd1676c2fcd0724\": container with ID starting with db761370e2918d26be14f649374ac37f0e12f0edc29e9d9fbdd1676c2fcd0724 not found: ID does not exist" Jan 20 17:51:43 crc kubenswrapper[4558]: I0120 17:51:43.525367 4558 scope.go:117] "RemoveContainer" containerID="9c6a604544c830e2edb70b9f7c0f4ad6688b94a6a3eaabf65c2198c2e9f86440" Jan 20 17:51:43 crc kubenswrapper[4558]: E0120 17:51:43.525622 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9c6a604544c830e2edb70b9f7c0f4ad6688b94a6a3eaabf65c2198c2e9f86440\": container with ID starting with 9c6a604544c830e2edb70b9f7c0f4ad6688b94a6a3eaabf65c2198c2e9f86440 not found: ID does not exist" containerID="9c6a604544c830e2edb70b9f7c0f4ad6688b94a6a3eaabf65c2198c2e9f86440" Jan 20 17:51:43 crc kubenswrapper[4558]: I0120 17:51:43.525805 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9c6a604544c830e2edb70b9f7c0f4ad6688b94a6a3eaabf65c2198c2e9f86440"} err="failed to get container status \"9c6a604544c830e2edb70b9f7c0f4ad6688b94a6a3eaabf65c2198c2e9f86440\": rpc error: code = NotFound desc = could not find container \"9c6a604544c830e2edb70b9f7c0f4ad6688b94a6a3eaabf65c2198c2e9f86440\": container with ID starting with 9c6a604544c830e2edb70b9f7c0f4ad6688b94a6a3eaabf65c2198c2e9f86440 not found: ID does not exist" Jan 20 17:51:43 crc kubenswrapper[4558]: I0120 17:51:43.530630 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/barbican-api-7cd9889bc6-72r2h"] Jan 20 17:51:43 crc kubenswrapper[4558]: I0120 17:51:43.849442 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:51:43 crc kubenswrapper[4558]: I0120 17:51:43.849527 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:51:44 crc kubenswrapper[4558]: I0120 17:51:44.485714 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"1e9bc275-8603-4dc8-ab3f-9d63bea731a0","Type":"ContainerStarted","Data":"1c5e6f1d1d51002b9cb986abdde79e37e78e644a6a60f676870d827de94f940e"} Jan 20 17:51:44 crc kubenswrapper[4558]: I0120 17:51:44.575920 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c1e7ea27-3179-4c51-a9d9-2422702f08fa" path="/var/lib/kubelet/pods/c1e7ea27-3179-4c51-a9d9-2422702f08fa/volumes" Jan 20 17:51:44 crc kubenswrapper[4558]: I0120 17:51:44.865332 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-metadata-0" podUID="6e7de5b3-4426-4816-9d45-9b4226333dbc" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.1.232:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 20 17:51:44 crc kubenswrapper[4558]: I0120 17:51:44.865347 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-metadata-0" podUID="6e7de5b3-4426-4816-9d45-9b4226333dbc" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.1.232:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 20 17:51:45 crc kubenswrapper[4558]: I0120 17:51:45.495861 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"1e9bc275-8603-4dc8-ab3f-9d63bea731a0","Type":"ContainerStarted","Data":"71b48e753eec9c326ad87e9648f3d2c2e185d09a418df5033c425c4192b92af4"} Jan 20 17:51:45 crc kubenswrapper[4558]: I0120 17:51:45.496372 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:51:45 crc kubenswrapper[4558]: I0120 17:51:45.520681 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ceilometer-0" podStartSLOduration=2.245096121 podStartE2EDuration="5.520653243s" podCreationTimestamp="2026-01-20 17:51:40 +0000 UTC" firstStartedPulling="2026-01-20 17:51:41.554142909 +0000 UTC m=+4195.314480876" lastFinishedPulling="2026-01-20 17:51:44.82970002 +0000 UTC m=+4198.590037998" observedRunningTime="2026-01-20 17:51:45.512579371 +0000 UTC m=+4199.272917338" watchObservedRunningTime="2026-01-20 17:51:45.520653243 +0000 UTC m=+4199.280991209" Jan 20 17:51:45 crc kubenswrapper[4558]: I0120 17:51:45.566372 4558 scope.go:117] "RemoveContainer" containerID="4e744e9f021c29bfddbe31a0a527bc6946c7e7d3595c756668ddc79180474c63" Jan 20 17:51:45 crc kubenswrapper[4558]: E0120 17:51:45.566834 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"nova-scheduler-scheduler\" with CrashLoopBackOff: \"back-off 20s restarting failed container=nova-scheduler-scheduler pod=nova-scheduler-0_openstack-kuttl-tests(d9321ea5-ab3b-4f00-b4d0-f2c2f8c2deb2)\"" pod="openstack-kuttl-tests/nova-scheduler-0" podUID="d9321ea5-ab3b-4f00-b4d0-f2c2f8c2deb2" Jan 20 17:51:45 crc kubenswrapper[4558]: I0120 17:51:45.690145 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:51:48 crc kubenswrapper[4558]: I0120 17:51:48.566757 4558 scope.go:117] "RemoveContainer" containerID="a83e386e23548bf8e99d8a3f739f604866a2baad6493f364fe0412f39f677f52" Jan 20 17:51:49 crc kubenswrapper[4558]: I0120 17:51:49.536541 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-0" event={"ID":"7c16f0c4-1462-4817-9f74-9e3d93193867","Type":"ContainerStarted","Data":"b5d4742f71c4c1a0e14ccc61fbcd3c7bb3f43da7229787caafda16955ac74e7d"} Jan 20 17:51:49 crc kubenswrapper[4558]: I0120 17:51:49.537181 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:51:49 crc kubenswrapper[4558]: I0120 17:51:49.763928 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:51:49 crc kubenswrapper[4558]: I0120 17:51:49.765213 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:51:49 crc kubenswrapper[4558]: I0120 17:51:49.765377 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:51:49 crc kubenswrapper[4558]: I0120 17:51:49.771873 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:51:50 crc kubenswrapper[4558]: I0120 17:51:50.545225 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:51:50 crc kubenswrapper[4558]: I0120 17:51:50.551244 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:51:51 crc kubenswrapper[4558]: I0120 17:51:51.553786 4558 generic.go:334] "Generic (PLEG): container finished" podID="7c16f0c4-1462-4817-9f74-9e3d93193867" containerID="b5d4742f71c4c1a0e14ccc61fbcd3c7bb3f43da7229787caafda16955ac74e7d" exitCode=1 Jan 20 17:51:51 crc kubenswrapper[4558]: I0120 17:51:51.554695 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-0" event={"ID":"7c16f0c4-1462-4817-9f74-9e3d93193867","Type":"ContainerDied","Data":"b5d4742f71c4c1a0e14ccc61fbcd3c7bb3f43da7229787caafda16955ac74e7d"} Jan 20 17:51:51 crc kubenswrapper[4558]: I0120 17:51:51.554731 4558 scope.go:117] "RemoveContainer" containerID="a83e386e23548bf8e99d8a3f739f604866a2baad6493f364fe0412f39f677f52" Jan 20 17:51:51 crc kubenswrapper[4558]: I0120 17:51:51.555037 4558 scope.go:117] "RemoveContainer" containerID="b5d4742f71c4c1a0e14ccc61fbcd3c7bb3f43da7229787caafda16955ac74e7d" Jan 20 17:51:51 crc kubenswrapper[4558]: E0120 17:51:51.555247 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"nova-cell1-conductor-conductor\" with CrashLoopBackOff: \"back-off 40s restarting failed container=nova-cell1-conductor-conductor pod=nova-cell1-conductor-0_openstack-kuttl-tests(7c16f0c4-1462-4817-9f74-9e3d93193867)\"" pod="openstack-kuttl-tests/nova-cell1-conductor-0" podUID="7c16f0c4-1462-4817-9f74-9e3d93193867" Jan 20 17:51:51 crc kubenswrapper[4558]: I0120 17:51:51.572192 4558 scope.go:117] "RemoveContainer" containerID="70539f03bf1b2ace01f24a9072ce8275fb4f5d92a181a991e07e1469387f4f02" Jan 20 17:51:51 crc kubenswrapper[4558]: E0120 17:51:51.572373 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"nova-cell0-conductor-conductor\" with CrashLoopBackOff: \"back-off 40s restarting failed container=nova-cell0-conductor-conductor pod=nova-cell0-conductor-0_openstack-kuttl-tests(924fa7ce-8d60-4b2f-b62b-d5e146474f71)\"" pod="openstack-kuttl-tests/nova-cell0-conductor-0" podUID="924fa7ce-8d60-4b2f-b62b-d5e146474f71" Jan 20 17:51:53 crc kubenswrapper[4558]: I0120 17:51:53.440071 4558 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:51:53 crc kubenswrapper[4558]: I0120 17:51:53.441930 4558 scope.go:117] "RemoveContainer" containerID="b5d4742f71c4c1a0e14ccc61fbcd3c7bb3f43da7229787caafda16955ac74e7d" Jan 20 17:51:53 crc kubenswrapper[4558]: E0120 17:51:53.442549 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"nova-cell1-conductor-conductor\" with CrashLoopBackOff: \"back-off 40s restarting failed container=nova-cell1-conductor-conductor pod=nova-cell1-conductor-0_openstack-kuttl-tests(7c16f0c4-1462-4817-9f74-9e3d93193867)\"" pod="openstack-kuttl-tests/nova-cell1-conductor-0" podUID="7c16f0c4-1462-4817-9f74-9e3d93193867" Jan 20 17:51:53 crc kubenswrapper[4558]: I0120 17:51:53.852256 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:51:53 crc kubenswrapper[4558]: I0120 17:51:53.852659 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:51:53 crc kubenswrapper[4558]: I0120 17:51:53.863935 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:51:53 crc kubenswrapper[4558]: I0120 17:51:53.864117 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/keystone-667f6b846-wn5fk" Jan 20 17:51:53 crc kubenswrapper[4558]: I0120 17:51:53.866444 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:51:53 crc kubenswrapper[4558]: I0120 17:51:53.957158 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-7ff6469d47-k4mhb"] Jan 20 17:51:54 crc kubenswrapper[4558]: I0120 17:51:54.586219 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/keystone-7ff6469d47-k4mhb" podUID="28e95314-bdcd-4cd6-9f3b-10f29aad259d" containerName="keystone-api" containerID="cri-o://6a26fa3d881fdae5f126b67ecdf3cd57c4714eeb300e6b941666a0c25f2581da" gracePeriod=30 Jan 20 17:51:55 crc kubenswrapper[4558]: I0120 17:51:55.926044 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_neutron-78bcff9576-76n9h_6a566db2-a941-48c9-9169-8e2c16cda1ac/neutron-api/0.log" Jan 20 17:51:55 crc kubenswrapper[4558]: I0120 17:51:55.926490 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-78bcff9576-76n9h" Jan 20 17:51:55 crc kubenswrapper[4558]: I0120 17:51:55.967328 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/openstackclient"] Jan 20 17:51:55 crc kubenswrapper[4558]: E0120 17:51:55.967795 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c1e7ea27-3179-4c51-a9d9-2422702f08fa" containerName="barbican-api" Jan 20 17:51:55 crc kubenswrapper[4558]: I0120 17:51:55.967819 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="c1e7ea27-3179-4c51-a9d9-2422702f08fa" containerName="barbican-api" Jan 20 17:51:55 crc kubenswrapper[4558]: E0120 17:51:55.967847 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6a566db2-a941-48c9-9169-8e2c16cda1ac" containerName="neutron-httpd" Jan 20 17:51:55 crc kubenswrapper[4558]: I0120 17:51:55.967854 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="6a566db2-a941-48c9-9169-8e2c16cda1ac" containerName="neutron-httpd" Jan 20 17:51:55 crc kubenswrapper[4558]: E0120 17:51:55.967861 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c1e7ea27-3179-4c51-a9d9-2422702f08fa" containerName="barbican-api-log" Jan 20 17:51:55 crc kubenswrapper[4558]: I0120 17:51:55.967867 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="c1e7ea27-3179-4c51-a9d9-2422702f08fa" containerName="barbican-api-log" Jan 20 17:51:55 crc kubenswrapper[4558]: E0120 17:51:55.967893 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6a566db2-a941-48c9-9169-8e2c16cda1ac" containerName="neutron-api" Jan 20 17:51:55 crc kubenswrapper[4558]: I0120 17:51:55.967899 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="6a566db2-a941-48c9-9169-8e2c16cda1ac" containerName="neutron-api" Jan 20 17:51:55 crc kubenswrapper[4558]: I0120 17:51:55.968132 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="6a566db2-a941-48c9-9169-8e2c16cda1ac" containerName="neutron-api" Jan 20 17:51:55 crc kubenswrapper[4558]: I0120 17:51:55.968159 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="c1e7ea27-3179-4c51-a9d9-2422702f08fa" containerName="barbican-api-log" Jan 20 17:51:55 crc kubenswrapper[4558]: I0120 17:51:55.968196 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="6a566db2-a941-48c9-9169-8e2c16cda1ac" containerName="neutron-httpd" Jan 20 17:51:55 crc kubenswrapper[4558]: I0120 17:51:55.968206 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="c1e7ea27-3179-4c51-a9d9-2422702f08fa" containerName="barbican-api" Jan 20 17:51:55 crc kubenswrapper[4558]: I0120 17:51:55.968866 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstackclient" Jan 20 17:51:55 crc kubenswrapper[4558]: I0120 17:51:55.972186 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"openstack-config-secret" Jan 20 17:51:55 crc kubenswrapper[4558]: I0120 17:51:55.972357 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"openstackclient-openstackclient-dockercfg-kb7bc" Jan 20 17:51:55 crc kubenswrapper[4558]: I0120 17:51:55.972532 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-config" Jan 20 17:51:55 crc kubenswrapper[4558]: I0120 17:51:55.978945 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/openstackclient"] Jan 20 17:51:56 crc kubenswrapper[4558]: I0120 17:51:56.002253 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/openstackclient"] Jan 20 17:51:56 crc kubenswrapper[4558]: E0120 17:51:56.003313 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[combined-ca-bundle kube-api-access-h9mtx openstack-config openstack-config-secret], unattached volumes=[], failed to process volumes=[combined-ca-bundle kube-api-access-h9mtx openstack-config openstack-config-secret]: context canceled" pod="openstack-kuttl-tests/openstackclient" podUID="04094e60-03f7-4573-8aa3-2cff996529ae" Jan 20 17:51:56 crc kubenswrapper[4558]: I0120 17:51:56.007185 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r8mm9\" (UniqueName: \"kubernetes.io/projected/6a566db2-a941-48c9-9169-8e2c16cda1ac-kube-api-access-r8mm9\") pod \"6a566db2-a941-48c9-9169-8e2c16cda1ac\" (UID: \"6a566db2-a941-48c9-9169-8e2c16cda1ac\") " Jan 20 17:51:56 crc kubenswrapper[4558]: I0120 17:51:56.007222 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/6a566db2-a941-48c9-9169-8e2c16cda1ac-ovndb-tls-certs\") pod \"6a566db2-a941-48c9-9169-8e2c16cda1ac\" (UID: \"6a566db2-a941-48c9-9169-8e2c16cda1ac\") " Jan 20 17:51:56 crc kubenswrapper[4558]: I0120 17:51:56.007246 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6a566db2-a941-48c9-9169-8e2c16cda1ac-public-tls-certs\") pod \"6a566db2-a941-48c9-9169-8e2c16cda1ac\" (UID: \"6a566db2-a941-48c9-9169-8e2c16cda1ac\") " Jan 20 17:51:56 crc kubenswrapper[4558]: I0120 17:51:56.007286 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/6a566db2-a941-48c9-9169-8e2c16cda1ac-httpd-config\") pod \"6a566db2-a941-48c9-9169-8e2c16cda1ac\" (UID: \"6a566db2-a941-48c9-9169-8e2c16cda1ac\") " Jan 20 17:51:56 crc kubenswrapper[4558]: I0120 17:51:56.007321 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/6a566db2-a941-48c9-9169-8e2c16cda1ac-config\") pod \"6a566db2-a941-48c9-9169-8e2c16cda1ac\" (UID: \"6a566db2-a941-48c9-9169-8e2c16cda1ac\") " Jan 20 17:51:56 crc kubenswrapper[4558]: I0120 17:51:56.007372 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6a566db2-a941-48c9-9169-8e2c16cda1ac-combined-ca-bundle\") pod \"6a566db2-a941-48c9-9169-8e2c16cda1ac\" (UID: \"6a566db2-a941-48c9-9169-8e2c16cda1ac\") " Jan 20 17:51:56 crc kubenswrapper[4558]: I0120 17:51:56.007422 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6a566db2-a941-48c9-9169-8e2c16cda1ac-internal-tls-certs\") pod \"6a566db2-a941-48c9-9169-8e2c16cda1ac\" (UID: \"6a566db2-a941-48c9-9169-8e2c16cda1ac\") " Jan 20 17:51:56 crc kubenswrapper[4558]: I0120 17:51:56.012264 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/openstackclient"] Jan 20 17:51:56 crc kubenswrapper[4558]: I0120 17:51:56.015349 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6a566db2-a941-48c9-9169-8e2c16cda1ac-kube-api-access-r8mm9" (OuterVolumeSpecName: "kube-api-access-r8mm9") pod "6a566db2-a941-48c9-9169-8e2c16cda1ac" (UID: "6a566db2-a941-48c9-9169-8e2c16cda1ac"). InnerVolumeSpecName "kube-api-access-r8mm9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:51:56 crc kubenswrapper[4558]: I0120 17:51:56.017977 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6a566db2-a941-48c9-9169-8e2c16cda1ac-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "6a566db2-a941-48c9-9169-8e2c16cda1ac" (UID: "6a566db2-a941-48c9-9169-8e2c16cda1ac"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:51:56 crc kubenswrapper[4558]: I0120 17:51:56.033328 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/openstackclient"] Jan 20 17:51:56 crc kubenswrapper[4558]: I0120 17:51:56.034887 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstackclient" Jan 20 17:51:56 crc kubenswrapper[4558]: I0120 17:51:56.052502 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/openstackclient"] Jan 20 17:51:56 crc kubenswrapper[4558]: I0120 17:51:56.084402 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6a566db2-a941-48c9-9169-8e2c16cda1ac-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6a566db2-a941-48c9-9169-8e2c16cda1ac" (UID: "6a566db2-a941-48c9-9169-8e2c16cda1ac"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:51:56 crc kubenswrapper[4558]: I0120 17:51:56.085565 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6a566db2-a941-48c9-9169-8e2c16cda1ac-config" (OuterVolumeSpecName: "config") pod "6a566db2-a941-48c9-9169-8e2c16cda1ac" (UID: "6a566db2-a941-48c9-9169-8e2c16cda1ac"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:51:56 crc kubenswrapper[4558]: I0120 17:51:56.086259 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6a566db2-a941-48c9-9169-8e2c16cda1ac-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "6a566db2-a941-48c9-9169-8e2c16cda1ac" (UID: "6a566db2-a941-48c9-9169-8e2c16cda1ac"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:51:56 crc kubenswrapper[4558]: I0120 17:51:56.086427 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6a566db2-a941-48c9-9169-8e2c16cda1ac-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "6a566db2-a941-48c9-9169-8e2c16cda1ac" (UID: "6a566db2-a941-48c9-9169-8e2c16cda1ac"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:51:56 crc kubenswrapper[4558]: I0120 17:51:56.103894 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6a566db2-a941-48c9-9169-8e2c16cda1ac-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "6a566db2-a941-48c9-9169-8e2c16cda1ac" (UID: "6a566db2-a941-48c9-9169-8e2c16cda1ac"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:51:56 crc kubenswrapper[4558]: I0120 17:51:56.110469 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/10d0164b-faa3-408c-9b22-25d1cff2c4e3-combined-ca-bundle\") pod \"openstackclient\" (UID: \"10d0164b-faa3-408c-9b22-25d1cff2c4e3\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:51:56 crc kubenswrapper[4558]: I0120 17:51:56.110658 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/10d0164b-faa3-408c-9b22-25d1cff2c4e3-openstack-config\") pod \"openstackclient\" (UID: \"10d0164b-faa3-408c-9b22-25d1cff2c4e3\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:51:56 crc kubenswrapper[4558]: I0120 17:51:56.110866 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qm2b9\" (UniqueName: \"kubernetes.io/projected/10d0164b-faa3-408c-9b22-25d1cff2c4e3-kube-api-access-qm2b9\") pod \"openstackclient\" (UID: \"10d0164b-faa3-408c-9b22-25d1cff2c4e3\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:51:56 crc kubenswrapper[4558]: I0120 17:51:56.110978 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/10d0164b-faa3-408c-9b22-25d1cff2c4e3-openstack-config-secret\") pod \"openstackclient\" (UID: \"10d0164b-faa3-408c-9b22-25d1cff2c4e3\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:51:56 crc kubenswrapper[4558]: I0120 17:51:56.111187 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r8mm9\" (UniqueName: \"kubernetes.io/projected/6a566db2-a941-48c9-9169-8e2c16cda1ac-kube-api-access-r8mm9\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:56 crc kubenswrapper[4558]: I0120 17:51:56.111208 4558 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/6a566db2-a941-48c9-9169-8e2c16cda1ac-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:56 crc kubenswrapper[4558]: I0120 17:51:56.111220 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6a566db2-a941-48c9-9169-8e2c16cda1ac-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:56 crc kubenswrapper[4558]: I0120 17:51:56.111232 4558 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/6a566db2-a941-48c9-9169-8e2c16cda1ac-httpd-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:56 crc kubenswrapper[4558]: I0120 17:51:56.111243 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/6a566db2-a941-48c9-9169-8e2c16cda1ac-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:56 crc kubenswrapper[4558]: I0120 17:51:56.111254 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6a566db2-a941-48c9-9169-8e2c16cda1ac-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:56 crc kubenswrapper[4558]: I0120 17:51:56.111264 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6a566db2-a941-48c9-9169-8e2c16cda1ac-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:56 crc kubenswrapper[4558]: I0120 17:51:56.214271 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qm2b9\" (UniqueName: \"kubernetes.io/projected/10d0164b-faa3-408c-9b22-25d1cff2c4e3-kube-api-access-qm2b9\") pod \"openstackclient\" (UID: \"10d0164b-faa3-408c-9b22-25d1cff2c4e3\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:51:56 crc kubenswrapper[4558]: I0120 17:51:56.214390 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/10d0164b-faa3-408c-9b22-25d1cff2c4e3-openstack-config-secret\") pod \"openstackclient\" (UID: \"10d0164b-faa3-408c-9b22-25d1cff2c4e3\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:51:56 crc kubenswrapper[4558]: I0120 17:51:56.214608 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/10d0164b-faa3-408c-9b22-25d1cff2c4e3-combined-ca-bundle\") pod \"openstackclient\" (UID: \"10d0164b-faa3-408c-9b22-25d1cff2c4e3\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:51:56 crc kubenswrapper[4558]: I0120 17:51:56.214740 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/10d0164b-faa3-408c-9b22-25d1cff2c4e3-openstack-config\") pod \"openstackclient\" (UID: \"10d0164b-faa3-408c-9b22-25d1cff2c4e3\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:51:56 crc kubenswrapper[4558]: I0120 17:51:56.215691 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/10d0164b-faa3-408c-9b22-25d1cff2c4e3-openstack-config\") pod \"openstackclient\" (UID: \"10d0164b-faa3-408c-9b22-25d1cff2c4e3\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:51:56 crc kubenswrapper[4558]: I0120 17:51:56.221722 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/10d0164b-faa3-408c-9b22-25d1cff2c4e3-openstack-config-secret\") pod \"openstackclient\" (UID: \"10d0164b-faa3-408c-9b22-25d1cff2c4e3\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:51:56 crc kubenswrapper[4558]: I0120 17:51:56.221761 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/10d0164b-faa3-408c-9b22-25d1cff2c4e3-combined-ca-bundle\") pod \"openstackclient\" (UID: \"10d0164b-faa3-408c-9b22-25d1cff2c4e3\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:51:56 crc kubenswrapper[4558]: I0120 17:51:56.233516 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qm2b9\" (UniqueName: \"kubernetes.io/projected/10d0164b-faa3-408c-9b22-25d1cff2c4e3-kube-api-access-qm2b9\") pod \"openstackclient\" (UID: \"10d0164b-faa3-408c-9b22-25d1cff2c4e3\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:51:56 crc kubenswrapper[4558]: I0120 17:51:56.403410 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstackclient" Jan 20 17:51:56 crc kubenswrapper[4558]: I0120 17:51:56.587484 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="04094e60-03f7-4573-8aa3-2cff996529ae" path="/var/lib/kubelet/pods/04094e60-03f7-4573-8aa3-2cff996529ae/volumes" Jan 20 17:51:56 crc kubenswrapper[4558]: I0120 17:51:56.623350 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_neutron-78bcff9576-76n9h_6a566db2-a941-48c9-9169-8e2c16cda1ac/neutron-api/0.log" Jan 20 17:51:56 crc kubenswrapper[4558]: I0120 17:51:56.623402 4558 generic.go:334] "Generic (PLEG): container finished" podID="6a566db2-a941-48c9-9169-8e2c16cda1ac" containerID="d6eeb7c903fe5ccb79f100c77cfb1bc792eacae8fb9dc8447344d2655524771b" exitCode=137 Jan 20 17:51:56 crc kubenswrapper[4558]: I0120 17:51:56.623460 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstackclient" Jan 20 17:51:56 crc kubenswrapper[4558]: I0120 17:51:56.624187 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-78bcff9576-76n9h" Jan 20 17:51:56 crc kubenswrapper[4558]: I0120 17:51:56.624918 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-78bcff9576-76n9h" event={"ID":"6a566db2-a941-48c9-9169-8e2c16cda1ac","Type":"ContainerDied","Data":"d6eeb7c903fe5ccb79f100c77cfb1bc792eacae8fb9dc8447344d2655524771b"} Jan 20 17:51:56 crc kubenswrapper[4558]: I0120 17:51:56.624952 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-78bcff9576-76n9h" event={"ID":"6a566db2-a941-48c9-9169-8e2c16cda1ac","Type":"ContainerDied","Data":"e9c9be4776e024a33d47c27e4ba8c11de91438fa621402fd89269283b84d4e30"} Jan 20 17:51:56 crc kubenswrapper[4558]: I0120 17:51:56.624974 4558 scope.go:117] "RemoveContainer" containerID="fe78548aaf34b40e51653f6f6cc0aba6de558e32992d14ccf662ce044e1157f4" Jan 20 17:51:56 crc kubenswrapper[4558]: I0120 17:51:56.679880 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstackclient" Jan 20 17:51:56 crc kubenswrapper[4558]: I0120 17:51:56.688910 4558 scope.go:117] "RemoveContainer" containerID="d6eeb7c903fe5ccb79f100c77cfb1bc792eacae8fb9dc8447344d2655524771b" Jan 20 17:51:56 crc kubenswrapper[4558]: I0120 17:51:56.696338 4558 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack-kuttl-tests/openstackclient" oldPodUID="04094e60-03f7-4573-8aa3-2cff996529ae" podUID="10d0164b-faa3-408c-9b22-25d1cff2c4e3" Jan 20 17:51:56 crc kubenswrapper[4558]: I0120 17:51:56.700153 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-78bcff9576-76n9h"] Jan 20 17:51:56 crc kubenswrapper[4558]: I0120 17:51:56.712982 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/neutron-78bcff9576-76n9h"] Jan 20 17:51:56 crc kubenswrapper[4558]: I0120 17:51:56.725178 4558 scope.go:117] "RemoveContainer" containerID="fe78548aaf34b40e51653f6f6cc0aba6de558e32992d14ccf662ce044e1157f4" Jan 20 17:51:56 crc kubenswrapper[4558]: E0120 17:51:56.725905 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fe78548aaf34b40e51653f6f6cc0aba6de558e32992d14ccf662ce044e1157f4\": container with ID starting with fe78548aaf34b40e51653f6f6cc0aba6de558e32992d14ccf662ce044e1157f4 not found: ID does not exist" containerID="fe78548aaf34b40e51653f6f6cc0aba6de558e32992d14ccf662ce044e1157f4" Jan 20 17:51:56 crc kubenswrapper[4558]: I0120 17:51:56.725952 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fe78548aaf34b40e51653f6f6cc0aba6de558e32992d14ccf662ce044e1157f4"} err="failed to get container status \"fe78548aaf34b40e51653f6f6cc0aba6de558e32992d14ccf662ce044e1157f4\": rpc error: code = NotFound desc = could not find container \"fe78548aaf34b40e51653f6f6cc0aba6de558e32992d14ccf662ce044e1157f4\": container with ID starting with fe78548aaf34b40e51653f6f6cc0aba6de558e32992d14ccf662ce044e1157f4 not found: ID does not exist" Jan 20 17:51:56 crc kubenswrapper[4558]: I0120 17:51:56.725991 4558 scope.go:117] "RemoveContainer" containerID="d6eeb7c903fe5ccb79f100c77cfb1bc792eacae8fb9dc8447344d2655524771b" Jan 20 17:51:56 crc kubenswrapper[4558]: E0120 17:51:56.726558 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d6eeb7c903fe5ccb79f100c77cfb1bc792eacae8fb9dc8447344d2655524771b\": container with ID starting with d6eeb7c903fe5ccb79f100c77cfb1bc792eacae8fb9dc8447344d2655524771b not found: ID does not exist" containerID="d6eeb7c903fe5ccb79f100c77cfb1bc792eacae8fb9dc8447344d2655524771b" Jan 20 17:51:56 crc kubenswrapper[4558]: I0120 17:51:56.726583 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d6eeb7c903fe5ccb79f100c77cfb1bc792eacae8fb9dc8447344d2655524771b"} err="failed to get container status \"d6eeb7c903fe5ccb79f100c77cfb1bc792eacae8fb9dc8447344d2655524771b\": rpc error: code = NotFound desc = could not find container \"d6eeb7c903fe5ccb79f100c77cfb1bc792eacae8fb9dc8447344d2655524771b\": container with ID starting with d6eeb7c903fe5ccb79f100c77cfb1bc792eacae8fb9dc8447344d2655524771b not found: ID does not exist" Jan 20 17:51:56 crc kubenswrapper[4558]: W0120 17:51:56.854023 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod10d0164b_faa3_408c_9b22_25d1cff2c4e3.slice/crio-75348b227a74bbf20d908d85b7e090babca26a50caf38c264db1247663d20fa0 WatchSource:0}: Error finding container 75348b227a74bbf20d908d85b7e090babca26a50caf38c264db1247663d20fa0: Status 404 returned error can't find the container with id 75348b227a74bbf20d908d85b7e090babca26a50caf38c264db1247663d20fa0 Jan 20 17:51:56 crc kubenswrapper[4558]: I0120 17:51:56.858983 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/openstackclient"] Jan 20 17:51:57 crc kubenswrapper[4558]: I0120 17:51:57.329597 4558 patch_prober.go:28] interesting pod/machine-config-daemon-2vr4r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 20 17:51:57 crc kubenswrapper[4558]: I0120 17:51:57.329685 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 20 17:51:57 crc kubenswrapper[4558]: I0120 17:51:57.329759 4558 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" Jan 20 17:51:57 crc kubenswrapper[4558]: I0120 17:51:57.331019 4558 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"4ea7fa61d8b21007a044345acec89fcebe56c2921cf0f76ff2002f44bdc88ede"} pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 20 17:51:57 crc kubenswrapper[4558]: I0120 17:51:57.331097 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" containerID="cri-o://4ea7fa61d8b21007a044345acec89fcebe56c2921cf0f76ff2002f44bdc88ede" gracePeriod=600 Jan 20 17:51:57 crc kubenswrapper[4558]: E0120 17:51:57.451203 4558 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod68337d27_3fa6_4a29_88b0_82e60c3739eb.slice/crio-conmon-4ea7fa61d8b21007a044345acec89fcebe56c2921cf0f76ff2002f44bdc88ede.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod68337d27_3fa6_4a29_88b0_82e60c3739eb.slice/crio-4ea7fa61d8b21007a044345acec89fcebe56c2921cf0f76ff2002f44bdc88ede.scope\": RecentStats: unable to find data in memory cache]" Jan 20 17:51:57 crc kubenswrapper[4558]: E0120 17:51:57.452194 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 17:51:57 crc kubenswrapper[4558]: I0120 17:51:57.640699 4558 generic.go:334] "Generic (PLEG): container finished" podID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerID="4ea7fa61d8b21007a044345acec89fcebe56c2921cf0f76ff2002f44bdc88ede" exitCode=0 Jan 20 17:51:57 crc kubenswrapper[4558]: I0120 17:51:57.640754 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" event={"ID":"68337d27-3fa6-4a29-88b0-82e60c3739eb","Type":"ContainerDied","Data":"4ea7fa61d8b21007a044345acec89fcebe56c2921cf0f76ff2002f44bdc88ede"} Jan 20 17:51:57 crc kubenswrapper[4558]: I0120 17:51:57.640858 4558 scope.go:117] "RemoveContainer" containerID="cf4cc5411787db46440d23f9870d3296fdb39888c50ddb23f460003bc8a70072" Jan 20 17:51:57 crc kubenswrapper[4558]: I0120 17:51:57.642859 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstackclient" event={"ID":"10d0164b-faa3-408c-9b22-25d1cff2c4e3","Type":"ContainerStarted","Data":"ff027b2e7e7f3a64a246665fc1fbace52470e2c0c26cabfe23274befd6dfbcd9"} Jan 20 17:51:57 crc kubenswrapper[4558]: I0120 17:51:57.642916 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstackclient" event={"ID":"10d0164b-faa3-408c-9b22-25d1cff2c4e3","Type":"ContainerStarted","Data":"75348b227a74bbf20d908d85b7e090babca26a50caf38c264db1247663d20fa0"} Jan 20 17:51:57 crc kubenswrapper[4558]: I0120 17:51:57.642884 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstackclient" Jan 20 17:51:57 crc kubenswrapper[4558]: I0120 17:51:57.643560 4558 scope.go:117] "RemoveContainer" containerID="4ea7fa61d8b21007a044345acec89fcebe56c2921cf0f76ff2002f44bdc88ede" Jan 20 17:51:57 crc kubenswrapper[4558]: E0120 17:51:57.644214 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 17:51:57 crc kubenswrapper[4558]: I0120 17:51:57.684371 4558 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack-kuttl-tests/openstackclient" oldPodUID="04094e60-03f7-4573-8aa3-2cff996529ae" podUID="10d0164b-faa3-408c-9b22-25d1cff2c4e3" Jan 20 17:51:57 crc kubenswrapper[4558]: I0120 17:51:57.685428 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/openstackclient" podStartSLOduration=1.685412173 podStartE2EDuration="1.685412173s" podCreationTimestamp="2026-01-20 17:51:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:51:57.681468407 +0000 UTC m=+4211.441806374" watchObservedRunningTime="2026-01-20 17:51:57.685412173 +0000 UTC m=+4211.445750139" Jan 20 17:51:57 crc kubenswrapper[4558]: I0120 17:51:57.742363 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/keystone-7ff6469d47-k4mhb" podUID="28e95314-bdcd-4cd6-9f3b-10f29aad259d" containerName="keystone-api" probeResult="failure" output="Get \"https://10.217.1.132:5000/v3\": read tcp 10.217.0.2:44424->10.217.1.132:5000: read: connection reset by peer" Jan 20 17:51:58 crc kubenswrapper[4558]: I0120 17:51:58.577555 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6a566db2-a941-48c9-9169-8e2c16cda1ac" path="/var/lib/kubelet/pods/6a566db2-a941-48c9-9169-8e2c16cda1ac/volumes" Jan 20 17:51:58 crc kubenswrapper[4558]: I0120 17:51:58.659182 4558 generic.go:334] "Generic (PLEG): container finished" podID="28e95314-bdcd-4cd6-9f3b-10f29aad259d" containerID="6a26fa3d881fdae5f126b67ecdf3cd57c4714eeb300e6b941666a0c25f2581da" exitCode=0 Jan 20 17:51:58 crc kubenswrapper[4558]: I0120 17:51:58.659209 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-7ff6469d47-k4mhb" event={"ID":"28e95314-bdcd-4cd6-9f3b-10f29aad259d","Type":"ContainerDied","Data":"6a26fa3d881fdae5f126b67ecdf3cd57c4714eeb300e6b941666a0c25f2581da"} Jan 20 17:51:58 crc kubenswrapper[4558]: I0120 17:51:58.659265 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-7ff6469d47-k4mhb" event={"ID":"28e95314-bdcd-4cd6-9f3b-10f29aad259d","Type":"ContainerDied","Data":"37399d5b1aab12040dcaa1e2fbd0da0499be5dc684b0eac826b6ed9e5cc88b70"} Jan 20 17:51:58 crc kubenswrapper[4558]: I0120 17:51:58.659285 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="37399d5b1aab12040dcaa1e2fbd0da0499be5dc684b0eac826b6ed9e5cc88b70" Jan 20 17:51:58 crc kubenswrapper[4558]: I0120 17:51:58.708138 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-7ff6469d47-k4mhb" Jan 20 17:51:58 crc kubenswrapper[4558]: I0120 17:51:58.791791 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/28e95314-bdcd-4cd6-9f3b-10f29aad259d-internal-tls-certs\") pod \"28e95314-bdcd-4cd6-9f3b-10f29aad259d\" (UID: \"28e95314-bdcd-4cd6-9f3b-10f29aad259d\") " Jan 20 17:51:58 crc kubenswrapper[4558]: I0120 17:51:58.791832 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/28e95314-bdcd-4cd6-9f3b-10f29aad259d-scripts\") pod \"28e95314-bdcd-4cd6-9f3b-10f29aad259d\" (UID: \"28e95314-bdcd-4cd6-9f3b-10f29aad259d\") " Jan 20 17:51:58 crc kubenswrapper[4558]: I0120 17:51:58.791898 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/28e95314-bdcd-4cd6-9f3b-10f29aad259d-public-tls-certs\") pod \"28e95314-bdcd-4cd6-9f3b-10f29aad259d\" (UID: \"28e95314-bdcd-4cd6-9f3b-10f29aad259d\") " Jan 20 17:51:58 crc kubenswrapper[4558]: I0120 17:51:58.792028 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/28e95314-bdcd-4cd6-9f3b-10f29aad259d-fernet-keys\") pod \"28e95314-bdcd-4cd6-9f3b-10f29aad259d\" (UID: \"28e95314-bdcd-4cd6-9f3b-10f29aad259d\") " Jan 20 17:51:58 crc kubenswrapper[4558]: I0120 17:51:58.792102 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z4fr9\" (UniqueName: \"kubernetes.io/projected/28e95314-bdcd-4cd6-9f3b-10f29aad259d-kube-api-access-z4fr9\") pod \"28e95314-bdcd-4cd6-9f3b-10f29aad259d\" (UID: \"28e95314-bdcd-4cd6-9f3b-10f29aad259d\") " Jan 20 17:51:58 crc kubenswrapper[4558]: I0120 17:51:58.792139 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/28e95314-bdcd-4cd6-9f3b-10f29aad259d-config-data\") pod \"28e95314-bdcd-4cd6-9f3b-10f29aad259d\" (UID: \"28e95314-bdcd-4cd6-9f3b-10f29aad259d\") " Jan 20 17:51:58 crc kubenswrapper[4558]: I0120 17:51:58.792267 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/28e95314-bdcd-4cd6-9f3b-10f29aad259d-credential-keys\") pod \"28e95314-bdcd-4cd6-9f3b-10f29aad259d\" (UID: \"28e95314-bdcd-4cd6-9f3b-10f29aad259d\") " Jan 20 17:51:58 crc kubenswrapper[4558]: I0120 17:51:58.792350 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/28e95314-bdcd-4cd6-9f3b-10f29aad259d-combined-ca-bundle\") pod \"28e95314-bdcd-4cd6-9f3b-10f29aad259d\" (UID: \"28e95314-bdcd-4cd6-9f3b-10f29aad259d\") " Jan 20 17:51:58 crc kubenswrapper[4558]: I0120 17:51:58.798943 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/28e95314-bdcd-4cd6-9f3b-10f29aad259d-scripts" (OuterVolumeSpecName: "scripts") pod "28e95314-bdcd-4cd6-9f3b-10f29aad259d" (UID: "28e95314-bdcd-4cd6-9f3b-10f29aad259d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:51:58 crc kubenswrapper[4558]: I0120 17:51:58.799859 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/28e95314-bdcd-4cd6-9f3b-10f29aad259d-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "28e95314-bdcd-4cd6-9f3b-10f29aad259d" (UID: "28e95314-bdcd-4cd6-9f3b-10f29aad259d"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:51:58 crc kubenswrapper[4558]: I0120 17:51:58.804528 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/28e95314-bdcd-4cd6-9f3b-10f29aad259d-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "28e95314-bdcd-4cd6-9f3b-10f29aad259d" (UID: "28e95314-bdcd-4cd6-9f3b-10f29aad259d"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:51:58 crc kubenswrapper[4558]: I0120 17:51:58.807362 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/28e95314-bdcd-4cd6-9f3b-10f29aad259d-kube-api-access-z4fr9" (OuterVolumeSpecName: "kube-api-access-z4fr9") pod "28e95314-bdcd-4cd6-9f3b-10f29aad259d" (UID: "28e95314-bdcd-4cd6-9f3b-10f29aad259d"). InnerVolumeSpecName "kube-api-access-z4fr9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:51:58 crc kubenswrapper[4558]: I0120 17:51:58.820469 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/28e95314-bdcd-4cd6-9f3b-10f29aad259d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "28e95314-bdcd-4cd6-9f3b-10f29aad259d" (UID: "28e95314-bdcd-4cd6-9f3b-10f29aad259d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:51:58 crc kubenswrapper[4558]: I0120 17:51:58.822520 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/28e95314-bdcd-4cd6-9f3b-10f29aad259d-config-data" (OuterVolumeSpecName: "config-data") pod "28e95314-bdcd-4cd6-9f3b-10f29aad259d" (UID: "28e95314-bdcd-4cd6-9f3b-10f29aad259d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:51:58 crc kubenswrapper[4558]: I0120 17:51:58.837423 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/28e95314-bdcd-4cd6-9f3b-10f29aad259d-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "28e95314-bdcd-4cd6-9f3b-10f29aad259d" (UID: "28e95314-bdcd-4cd6-9f3b-10f29aad259d"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:51:58 crc kubenswrapper[4558]: I0120 17:51:58.843345 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/28e95314-bdcd-4cd6-9f3b-10f29aad259d-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "28e95314-bdcd-4cd6-9f3b-10f29aad259d" (UID: "28e95314-bdcd-4cd6-9f3b-10f29aad259d"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:51:58 crc kubenswrapper[4558]: I0120 17:51:58.895615 4558 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/28e95314-bdcd-4cd6-9f3b-10f29aad259d-credential-keys\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:58 crc kubenswrapper[4558]: I0120 17:51:58.895660 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/28e95314-bdcd-4cd6-9f3b-10f29aad259d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:58 crc kubenswrapper[4558]: I0120 17:51:58.895671 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/28e95314-bdcd-4cd6-9f3b-10f29aad259d-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:58 crc kubenswrapper[4558]: I0120 17:51:58.895685 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/28e95314-bdcd-4cd6-9f3b-10f29aad259d-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:58 crc kubenswrapper[4558]: I0120 17:51:58.895696 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/28e95314-bdcd-4cd6-9f3b-10f29aad259d-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:58 crc kubenswrapper[4558]: I0120 17:51:58.895705 4558 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/28e95314-bdcd-4cd6-9f3b-10f29aad259d-fernet-keys\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:58 crc kubenswrapper[4558]: I0120 17:51:58.895717 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z4fr9\" (UniqueName: \"kubernetes.io/projected/28e95314-bdcd-4cd6-9f3b-10f29aad259d-kube-api-access-z4fr9\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:58 crc kubenswrapper[4558]: I0120 17:51:58.895729 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/28e95314-bdcd-4cd6-9f3b-10f29aad259d-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:51:59 crc kubenswrapper[4558]: I0120 17:51:59.347598 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/neutron-7578dc9646-4xs8n" podUID="cf356652-86c3-4fff-acda-805974e723a8" containerName="neutron-httpd" probeResult="failure" output="Get \"https://10.217.1.229:9696/\": dial tcp 10.217.1.229:9696: connect: connection refused" Jan 20 17:51:59 crc kubenswrapper[4558]: I0120 17:51:59.566471 4558 scope.go:117] "RemoveContainer" containerID="4e744e9f021c29bfddbe31a0a527bc6946c7e7d3595c756668ddc79180474c63" Jan 20 17:51:59 crc kubenswrapper[4558]: I0120 17:51:59.673264 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-7ff6469d47-k4mhb" Jan 20 17:51:59 crc kubenswrapper[4558]: I0120 17:51:59.713286 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-7ff6469d47-k4mhb"] Jan 20 17:51:59 crc kubenswrapper[4558]: I0120 17:51:59.737778 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/keystone-7ff6469d47-k4mhb"] Jan 20 17:52:00 crc kubenswrapper[4558]: I0120 17:52:00.212155 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/neutron-75676d645b-l2sp7" Jan 20 17:52:00 crc kubenswrapper[4558]: I0120 17:52:00.274094 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-6bd94d8c7d-2mkc6"] Jan 20 17:52:00 crc kubenswrapper[4558]: I0120 17:52:00.274616 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/neutron-6bd94d8c7d-2mkc6" podUID="2ef9d4ef-ec42-4464-8744-4b3aea7b6eb9" containerName="neutron-api" containerID="cri-o://94aea94556475245fb3ed99b92c774a78bc94fc47ab42792ec2c8ac17ee6298a" gracePeriod=30 Jan 20 17:52:00 crc kubenswrapper[4558]: I0120 17:52:00.277350 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/neutron-6bd94d8c7d-2mkc6" podUID="2ef9d4ef-ec42-4464-8744-4b3aea7b6eb9" containerName="neutron-httpd" containerID="cri-o://241ee82c0eec1687f073958ac6fb291483d97abeacad8f032be37d7ad893499a" gracePeriod=30 Jan 20 17:52:00 crc kubenswrapper[4558]: I0120 17:52:00.575026 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="28e95314-bdcd-4cd6-9f3b-10f29aad259d" path="/var/lib/kubelet/pods/28e95314-bdcd-4cd6-9f3b-10f29aad259d/volumes" Jan 20 17:52:00 crc kubenswrapper[4558]: I0120 17:52:00.688258 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"d9321ea5-ab3b-4f00-b4d0-f2c2f8c2deb2","Type":"ContainerStarted","Data":"ce0da192b65bf7c5e90dd832e0212db51fd4f3e04276e89c0f7b9256d88b752a"} Jan 20 17:52:00 crc kubenswrapper[4558]: I0120 17:52:00.691321 4558 generic.go:334] "Generic (PLEG): container finished" podID="2ef9d4ef-ec42-4464-8744-4b3aea7b6eb9" containerID="241ee82c0eec1687f073958ac6fb291483d97abeacad8f032be37d7ad893499a" exitCode=0 Jan 20 17:52:00 crc kubenswrapper[4558]: I0120 17:52:00.691385 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-6bd94d8c7d-2mkc6" event={"ID":"2ef9d4ef-ec42-4464-8744-4b3aea7b6eb9","Type":"ContainerDied","Data":"241ee82c0eec1687f073958ac6fb291483d97abeacad8f032be37d7ad893499a"} Jan 20 17:52:01 crc kubenswrapper[4558]: I0120 17:52:01.665679 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_neutron-7578dc9646-4xs8n_cf356652-86c3-4fff-acda-805974e723a8/neutron-api/0.log" Jan 20 17:52:01 crc kubenswrapper[4558]: I0120 17:52:01.666090 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-7578dc9646-4xs8n" Jan 20 17:52:01 crc kubenswrapper[4558]: I0120 17:52:01.701941 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_neutron-7578dc9646-4xs8n_cf356652-86c3-4fff-acda-805974e723a8/neutron-api/0.log" Jan 20 17:52:01 crc kubenswrapper[4558]: I0120 17:52:01.701994 4558 generic.go:334] "Generic (PLEG): container finished" podID="cf356652-86c3-4fff-acda-805974e723a8" containerID="28d40e4f867e20e57ed2e72e419502ac3f2707b00ad65322f68276cf23847472" exitCode=137 Jan 20 17:52:01 crc kubenswrapper[4558]: I0120 17:52:01.702035 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-7578dc9646-4xs8n" event={"ID":"cf356652-86c3-4fff-acda-805974e723a8","Type":"ContainerDied","Data":"28d40e4f867e20e57ed2e72e419502ac3f2707b00ad65322f68276cf23847472"} Jan 20 17:52:01 crc kubenswrapper[4558]: I0120 17:52:01.702075 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-7578dc9646-4xs8n" event={"ID":"cf356652-86c3-4fff-acda-805974e723a8","Type":"ContainerDied","Data":"c55b7311fb58f910d443688df31dff82c4295610e48fdd174af9e08351e45cff"} Jan 20 17:52:01 crc kubenswrapper[4558]: I0120 17:52:01.702095 4558 scope.go:117] "RemoveContainer" containerID="49b03c8d23d852acad3d7b174f8c3b40f90b13f7e611e5c0e9fc8ab0ad8edda2" Jan 20 17:52:01 crc kubenswrapper[4558]: I0120 17:52:01.702099 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-7578dc9646-4xs8n" Jan 20 17:52:01 crc kubenswrapper[4558]: I0120 17:52:01.729952 4558 scope.go:117] "RemoveContainer" containerID="28d40e4f867e20e57ed2e72e419502ac3f2707b00ad65322f68276cf23847472" Jan 20 17:52:01 crc kubenswrapper[4558]: I0120 17:52:01.764264 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/cf356652-86c3-4fff-acda-805974e723a8-public-tls-certs\") pod \"cf356652-86c3-4fff-acda-805974e723a8\" (UID: \"cf356652-86c3-4fff-acda-805974e723a8\") " Jan 20 17:52:01 crc kubenswrapper[4558]: I0120 17:52:01.764428 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/cf356652-86c3-4fff-acda-805974e723a8-ovndb-tls-certs\") pod \"cf356652-86c3-4fff-acda-805974e723a8\" (UID: \"cf356652-86c3-4fff-acda-805974e723a8\") " Jan 20 17:52:01 crc kubenswrapper[4558]: I0120 17:52:01.764583 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/cf356652-86c3-4fff-acda-805974e723a8-httpd-config\") pod \"cf356652-86c3-4fff-acda-805974e723a8\" (UID: \"cf356652-86c3-4fff-acda-805974e723a8\") " Jan 20 17:52:01 crc kubenswrapper[4558]: I0120 17:52:01.764731 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cf356652-86c3-4fff-acda-805974e723a8-internal-tls-certs\") pod \"cf356652-86c3-4fff-acda-805974e723a8\" (UID: \"cf356652-86c3-4fff-acda-805974e723a8\") " Jan 20 17:52:01 crc kubenswrapper[4558]: I0120 17:52:01.765383 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htcqf\" (UniqueName: \"kubernetes.io/projected/cf356652-86c3-4fff-acda-805974e723a8-kube-api-access-htcqf\") pod \"cf356652-86c3-4fff-acda-805974e723a8\" (UID: \"cf356652-86c3-4fff-acda-805974e723a8\") " Jan 20 17:52:01 crc kubenswrapper[4558]: I0120 17:52:01.765440 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf356652-86c3-4fff-acda-805974e723a8-combined-ca-bundle\") pod \"cf356652-86c3-4fff-acda-805974e723a8\" (UID: \"cf356652-86c3-4fff-acda-805974e723a8\") " Jan 20 17:52:01 crc kubenswrapper[4558]: I0120 17:52:01.765486 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/cf356652-86c3-4fff-acda-805974e723a8-config\") pod \"cf356652-86c3-4fff-acda-805974e723a8\" (UID: \"cf356652-86c3-4fff-acda-805974e723a8\") " Jan 20 17:52:01 crc kubenswrapper[4558]: I0120 17:52:01.770664 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cf356652-86c3-4fff-acda-805974e723a8-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "cf356652-86c3-4fff-acda-805974e723a8" (UID: "cf356652-86c3-4fff-acda-805974e723a8"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:52:01 crc kubenswrapper[4558]: I0120 17:52:01.782370 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cf356652-86c3-4fff-acda-805974e723a8-kube-api-access-htcqf" (OuterVolumeSpecName: "kube-api-access-htcqf") pod "cf356652-86c3-4fff-acda-805974e723a8" (UID: "cf356652-86c3-4fff-acda-805974e723a8"). InnerVolumeSpecName "kube-api-access-htcqf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:52:01 crc kubenswrapper[4558]: I0120 17:52:01.821341 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cf356652-86c3-4fff-acda-805974e723a8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "cf356652-86c3-4fff-acda-805974e723a8" (UID: "cf356652-86c3-4fff-acda-805974e723a8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:52:01 crc kubenswrapper[4558]: I0120 17:52:01.823643 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cf356652-86c3-4fff-acda-805974e723a8-config" (OuterVolumeSpecName: "config") pod "cf356652-86c3-4fff-acda-805974e723a8" (UID: "cf356652-86c3-4fff-acda-805974e723a8"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:52:01 crc kubenswrapper[4558]: I0120 17:52:01.827563 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cf356652-86c3-4fff-acda-805974e723a8-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "cf356652-86c3-4fff-acda-805974e723a8" (UID: "cf356652-86c3-4fff-acda-805974e723a8"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:52:01 crc kubenswrapper[4558]: I0120 17:52:01.836743 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cf356652-86c3-4fff-acda-805974e723a8-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "cf356652-86c3-4fff-acda-805974e723a8" (UID: "cf356652-86c3-4fff-acda-805974e723a8"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:52:01 crc kubenswrapper[4558]: I0120 17:52:01.849360 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cf356652-86c3-4fff-acda-805974e723a8-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "cf356652-86c3-4fff-acda-805974e723a8" (UID: "cf356652-86c3-4fff-acda-805974e723a8"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:52:01 crc kubenswrapper[4558]: I0120 17:52:01.869615 4558 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/cf356652-86c3-4fff-acda-805974e723a8-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:52:01 crc kubenswrapper[4558]: I0120 17:52:01.869652 4558 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/cf356652-86c3-4fff-acda-805974e723a8-httpd-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:52:01 crc kubenswrapper[4558]: I0120 17:52:01.869671 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cf356652-86c3-4fff-acda-805974e723a8-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:52:01 crc kubenswrapper[4558]: I0120 17:52:01.869687 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htcqf\" (UniqueName: \"kubernetes.io/projected/cf356652-86c3-4fff-acda-805974e723a8-kube-api-access-htcqf\") on node \"crc\" DevicePath \"\"" Jan 20 17:52:01 crc kubenswrapper[4558]: I0120 17:52:01.869701 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf356652-86c3-4fff-acda-805974e723a8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:52:01 crc kubenswrapper[4558]: I0120 17:52:01.869714 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/cf356652-86c3-4fff-acda-805974e723a8-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:52:01 crc kubenswrapper[4558]: I0120 17:52:01.869728 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/cf356652-86c3-4fff-acda-805974e723a8-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:52:01 crc kubenswrapper[4558]: I0120 17:52:01.887147 4558 scope.go:117] "RemoveContainer" containerID="49b03c8d23d852acad3d7b174f8c3b40f90b13f7e611e5c0e9fc8ab0ad8edda2" Jan 20 17:52:01 crc kubenswrapper[4558]: E0120 17:52:01.887705 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"49b03c8d23d852acad3d7b174f8c3b40f90b13f7e611e5c0e9fc8ab0ad8edda2\": container with ID starting with 49b03c8d23d852acad3d7b174f8c3b40f90b13f7e611e5c0e9fc8ab0ad8edda2 not found: ID does not exist" containerID="49b03c8d23d852acad3d7b174f8c3b40f90b13f7e611e5c0e9fc8ab0ad8edda2" Jan 20 17:52:01 crc kubenswrapper[4558]: I0120 17:52:01.887757 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"49b03c8d23d852acad3d7b174f8c3b40f90b13f7e611e5c0e9fc8ab0ad8edda2"} err="failed to get container status \"49b03c8d23d852acad3d7b174f8c3b40f90b13f7e611e5c0e9fc8ab0ad8edda2\": rpc error: code = NotFound desc = could not find container \"49b03c8d23d852acad3d7b174f8c3b40f90b13f7e611e5c0e9fc8ab0ad8edda2\": container with ID starting with 49b03c8d23d852acad3d7b174f8c3b40f90b13f7e611e5c0e9fc8ab0ad8edda2 not found: ID does not exist" Jan 20 17:52:01 crc kubenswrapper[4558]: I0120 17:52:01.887785 4558 scope.go:117] "RemoveContainer" containerID="28d40e4f867e20e57ed2e72e419502ac3f2707b00ad65322f68276cf23847472" Jan 20 17:52:01 crc kubenswrapper[4558]: E0120 17:52:01.888211 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"28d40e4f867e20e57ed2e72e419502ac3f2707b00ad65322f68276cf23847472\": container with ID starting with 28d40e4f867e20e57ed2e72e419502ac3f2707b00ad65322f68276cf23847472 not found: ID does not exist" containerID="28d40e4f867e20e57ed2e72e419502ac3f2707b00ad65322f68276cf23847472" Jan 20 17:52:01 crc kubenswrapper[4558]: I0120 17:52:01.888236 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"28d40e4f867e20e57ed2e72e419502ac3f2707b00ad65322f68276cf23847472"} err="failed to get container status \"28d40e4f867e20e57ed2e72e419502ac3f2707b00ad65322f68276cf23847472\": rpc error: code = NotFound desc = could not find container \"28d40e4f867e20e57ed2e72e419502ac3f2707b00ad65322f68276cf23847472\": container with ID starting with 28d40e4f867e20e57ed2e72e419502ac3f2707b00ad65322f68276cf23847472 not found: ID does not exist" Jan 20 17:52:02 crc kubenswrapper[4558]: I0120 17:52:02.044836 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-6bd94d8c7d-2mkc6" Jan 20 17:52:02 crc kubenswrapper[4558]: I0120 17:52:02.048340 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-7578dc9646-4xs8n"] Jan 20 17:52:02 crc kubenswrapper[4558]: I0120 17:52:02.070562 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/neutron-7578dc9646-4xs8n"] Jan 20 17:52:02 crc kubenswrapper[4558]: I0120 17:52:02.131323 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/swift-proxy-76d6dddffd-4qcpv"] Jan 20 17:52:02 crc kubenswrapper[4558]: E0120 17:52:02.131707 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2ef9d4ef-ec42-4464-8744-4b3aea7b6eb9" containerName="neutron-api" Jan 20 17:52:02 crc kubenswrapper[4558]: I0120 17:52:02.131724 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="2ef9d4ef-ec42-4464-8744-4b3aea7b6eb9" containerName="neutron-api" Jan 20 17:52:02 crc kubenswrapper[4558]: E0120 17:52:02.131735 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf356652-86c3-4fff-acda-805974e723a8" containerName="neutron-httpd" Jan 20 17:52:02 crc kubenswrapper[4558]: I0120 17:52:02.131741 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf356652-86c3-4fff-acda-805974e723a8" containerName="neutron-httpd" Jan 20 17:52:02 crc kubenswrapper[4558]: E0120 17:52:02.131765 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2ef9d4ef-ec42-4464-8744-4b3aea7b6eb9" containerName="neutron-httpd" Jan 20 17:52:02 crc kubenswrapper[4558]: I0120 17:52:02.131772 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="2ef9d4ef-ec42-4464-8744-4b3aea7b6eb9" containerName="neutron-httpd" Jan 20 17:52:02 crc kubenswrapper[4558]: E0120 17:52:02.131785 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="28e95314-bdcd-4cd6-9f3b-10f29aad259d" containerName="keystone-api" Jan 20 17:52:02 crc kubenswrapper[4558]: I0120 17:52:02.131790 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="28e95314-bdcd-4cd6-9f3b-10f29aad259d" containerName="keystone-api" Jan 20 17:52:02 crc kubenswrapper[4558]: E0120 17:52:02.131806 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf356652-86c3-4fff-acda-805974e723a8" containerName="neutron-api" Jan 20 17:52:02 crc kubenswrapper[4558]: I0120 17:52:02.131812 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf356652-86c3-4fff-acda-805974e723a8" containerName="neutron-api" Jan 20 17:52:02 crc kubenswrapper[4558]: I0120 17:52:02.131952 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="cf356652-86c3-4fff-acda-805974e723a8" containerName="neutron-api" Jan 20 17:52:02 crc kubenswrapper[4558]: I0120 17:52:02.131974 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="2ef9d4ef-ec42-4464-8744-4b3aea7b6eb9" containerName="neutron-api" Jan 20 17:52:02 crc kubenswrapper[4558]: I0120 17:52:02.131987 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="28e95314-bdcd-4cd6-9f3b-10f29aad259d" containerName="keystone-api" Jan 20 17:52:02 crc kubenswrapper[4558]: I0120 17:52:02.131996 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="cf356652-86c3-4fff-acda-805974e723a8" containerName="neutron-httpd" Jan 20 17:52:02 crc kubenswrapper[4558]: I0120 17:52:02.132015 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="2ef9d4ef-ec42-4464-8744-4b3aea7b6eb9" containerName="neutron-httpd" Jan 20 17:52:02 crc kubenswrapper[4558]: I0120 17:52:02.133072 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-proxy-76d6dddffd-4qcpv" Jan 20 17:52:02 crc kubenswrapper[4558]: I0120 17:52:02.150230 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/swift-proxy-76d6dddffd-4qcpv"] Jan 20 17:52:02 crc kubenswrapper[4558]: I0120 17:52:02.176296 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/2ef9d4ef-ec42-4464-8744-4b3aea7b6eb9-config\") pod \"2ef9d4ef-ec42-4464-8744-4b3aea7b6eb9\" (UID: \"2ef9d4ef-ec42-4464-8744-4b3aea7b6eb9\") " Jan 20 17:52:02 crc kubenswrapper[4558]: I0120 17:52:02.176496 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2ef9d4ef-ec42-4464-8744-4b3aea7b6eb9-public-tls-certs\") pod \"2ef9d4ef-ec42-4464-8744-4b3aea7b6eb9\" (UID: \"2ef9d4ef-ec42-4464-8744-4b3aea7b6eb9\") " Jan 20 17:52:02 crc kubenswrapper[4558]: I0120 17:52:02.176563 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q8mbz\" (UniqueName: \"kubernetes.io/projected/2ef9d4ef-ec42-4464-8744-4b3aea7b6eb9-kube-api-access-q8mbz\") pod \"2ef9d4ef-ec42-4464-8744-4b3aea7b6eb9\" (UID: \"2ef9d4ef-ec42-4464-8744-4b3aea7b6eb9\") " Jan 20 17:52:02 crc kubenswrapper[4558]: I0120 17:52:02.176651 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/2ef9d4ef-ec42-4464-8744-4b3aea7b6eb9-ovndb-tls-certs\") pod \"2ef9d4ef-ec42-4464-8744-4b3aea7b6eb9\" (UID: \"2ef9d4ef-ec42-4464-8744-4b3aea7b6eb9\") " Jan 20 17:52:02 crc kubenswrapper[4558]: I0120 17:52:02.176690 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2ef9d4ef-ec42-4464-8744-4b3aea7b6eb9-combined-ca-bundle\") pod \"2ef9d4ef-ec42-4464-8744-4b3aea7b6eb9\" (UID: \"2ef9d4ef-ec42-4464-8744-4b3aea7b6eb9\") " Jan 20 17:52:02 crc kubenswrapper[4558]: I0120 17:52:02.176727 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/2ef9d4ef-ec42-4464-8744-4b3aea7b6eb9-httpd-config\") pod \"2ef9d4ef-ec42-4464-8744-4b3aea7b6eb9\" (UID: \"2ef9d4ef-ec42-4464-8744-4b3aea7b6eb9\") " Jan 20 17:52:02 crc kubenswrapper[4558]: I0120 17:52:02.176814 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2ef9d4ef-ec42-4464-8744-4b3aea7b6eb9-internal-tls-certs\") pod \"2ef9d4ef-ec42-4464-8744-4b3aea7b6eb9\" (UID: \"2ef9d4ef-ec42-4464-8744-4b3aea7b6eb9\") " Jan 20 17:52:02 crc kubenswrapper[4558]: I0120 17:52:02.180323 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2ef9d4ef-ec42-4464-8744-4b3aea7b6eb9-kube-api-access-q8mbz" (OuterVolumeSpecName: "kube-api-access-q8mbz") pod "2ef9d4ef-ec42-4464-8744-4b3aea7b6eb9" (UID: "2ef9d4ef-ec42-4464-8744-4b3aea7b6eb9"). InnerVolumeSpecName "kube-api-access-q8mbz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:52:02 crc kubenswrapper[4558]: I0120 17:52:02.187241 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2ef9d4ef-ec42-4464-8744-4b3aea7b6eb9-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "2ef9d4ef-ec42-4464-8744-4b3aea7b6eb9" (UID: "2ef9d4ef-ec42-4464-8744-4b3aea7b6eb9"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:52:02 crc kubenswrapper[4558]: I0120 17:52:02.221585 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2ef9d4ef-ec42-4464-8744-4b3aea7b6eb9-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "2ef9d4ef-ec42-4464-8744-4b3aea7b6eb9" (UID: "2ef9d4ef-ec42-4464-8744-4b3aea7b6eb9"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:52:02 crc kubenswrapper[4558]: I0120 17:52:02.222586 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2ef9d4ef-ec42-4464-8744-4b3aea7b6eb9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2ef9d4ef-ec42-4464-8744-4b3aea7b6eb9" (UID: "2ef9d4ef-ec42-4464-8744-4b3aea7b6eb9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:52:02 crc kubenswrapper[4558]: I0120 17:52:02.222954 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2ef9d4ef-ec42-4464-8744-4b3aea7b6eb9-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "2ef9d4ef-ec42-4464-8744-4b3aea7b6eb9" (UID: "2ef9d4ef-ec42-4464-8744-4b3aea7b6eb9"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:52:02 crc kubenswrapper[4558]: I0120 17:52:02.229945 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2ef9d4ef-ec42-4464-8744-4b3aea7b6eb9-config" (OuterVolumeSpecName: "config") pod "2ef9d4ef-ec42-4464-8744-4b3aea7b6eb9" (UID: "2ef9d4ef-ec42-4464-8744-4b3aea7b6eb9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:52:02 crc kubenswrapper[4558]: I0120 17:52:02.239503 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2ef9d4ef-ec42-4464-8744-4b3aea7b6eb9-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "2ef9d4ef-ec42-4464-8744-4b3aea7b6eb9" (UID: "2ef9d4ef-ec42-4464-8744-4b3aea7b6eb9"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:52:02 crc kubenswrapper[4558]: I0120 17:52:02.280310 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/433afe0d-c979-4c31-a8dc-8a2d93fbe3df-public-tls-certs\") pod \"swift-proxy-76d6dddffd-4qcpv\" (UID: \"433afe0d-c979-4c31-a8dc-8a2d93fbe3df\") " pod="openstack-kuttl-tests/swift-proxy-76d6dddffd-4qcpv" Jan 20 17:52:02 crc kubenswrapper[4558]: I0120 17:52:02.280385 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/433afe0d-c979-4c31-a8dc-8a2d93fbe3df-internal-tls-certs\") pod \"swift-proxy-76d6dddffd-4qcpv\" (UID: \"433afe0d-c979-4c31-a8dc-8a2d93fbe3df\") " pod="openstack-kuttl-tests/swift-proxy-76d6dddffd-4qcpv" Jan 20 17:52:02 crc kubenswrapper[4558]: I0120 17:52:02.280517 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/433afe0d-c979-4c31-a8dc-8a2d93fbe3df-run-httpd\") pod \"swift-proxy-76d6dddffd-4qcpv\" (UID: \"433afe0d-c979-4c31-a8dc-8a2d93fbe3df\") " pod="openstack-kuttl-tests/swift-proxy-76d6dddffd-4qcpv" Jan 20 17:52:02 crc kubenswrapper[4558]: I0120 17:52:02.280644 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/433afe0d-c979-4c31-a8dc-8a2d93fbe3df-log-httpd\") pod \"swift-proxy-76d6dddffd-4qcpv\" (UID: \"433afe0d-c979-4c31-a8dc-8a2d93fbe3df\") " pod="openstack-kuttl-tests/swift-proxy-76d6dddffd-4qcpv" Jan 20 17:52:02 crc kubenswrapper[4558]: I0120 17:52:02.280705 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/433afe0d-c979-4c31-a8dc-8a2d93fbe3df-config-data\") pod \"swift-proxy-76d6dddffd-4qcpv\" (UID: \"433afe0d-c979-4c31-a8dc-8a2d93fbe3df\") " pod="openstack-kuttl-tests/swift-proxy-76d6dddffd-4qcpv" Jan 20 17:52:02 crc kubenswrapper[4558]: I0120 17:52:02.280845 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/433afe0d-c979-4c31-a8dc-8a2d93fbe3df-etc-swift\") pod \"swift-proxy-76d6dddffd-4qcpv\" (UID: \"433afe0d-c979-4c31-a8dc-8a2d93fbe3df\") " pod="openstack-kuttl-tests/swift-proxy-76d6dddffd-4qcpv" Jan 20 17:52:02 crc kubenswrapper[4558]: I0120 17:52:02.280888 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/433afe0d-c979-4c31-a8dc-8a2d93fbe3df-combined-ca-bundle\") pod \"swift-proxy-76d6dddffd-4qcpv\" (UID: \"433afe0d-c979-4c31-a8dc-8a2d93fbe3df\") " pod="openstack-kuttl-tests/swift-proxy-76d6dddffd-4qcpv" Jan 20 17:52:02 crc kubenswrapper[4558]: I0120 17:52:02.281177 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m8zd4\" (UniqueName: \"kubernetes.io/projected/433afe0d-c979-4c31-a8dc-8a2d93fbe3df-kube-api-access-m8zd4\") pod \"swift-proxy-76d6dddffd-4qcpv\" (UID: \"433afe0d-c979-4c31-a8dc-8a2d93fbe3df\") " pod="openstack-kuttl-tests/swift-proxy-76d6dddffd-4qcpv" Jan 20 17:52:02 crc kubenswrapper[4558]: I0120 17:52:02.281471 4558 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/2ef9d4ef-ec42-4464-8744-4b3aea7b6eb9-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:52:02 crc kubenswrapper[4558]: I0120 17:52:02.281496 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2ef9d4ef-ec42-4464-8744-4b3aea7b6eb9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:52:02 crc kubenswrapper[4558]: I0120 17:52:02.281507 4558 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/2ef9d4ef-ec42-4464-8744-4b3aea7b6eb9-httpd-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:52:02 crc kubenswrapper[4558]: I0120 17:52:02.281521 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2ef9d4ef-ec42-4464-8744-4b3aea7b6eb9-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:52:02 crc kubenswrapper[4558]: I0120 17:52:02.281533 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/2ef9d4ef-ec42-4464-8744-4b3aea7b6eb9-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:52:02 crc kubenswrapper[4558]: I0120 17:52:02.281545 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2ef9d4ef-ec42-4464-8744-4b3aea7b6eb9-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:52:02 crc kubenswrapper[4558]: I0120 17:52:02.281558 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q8mbz\" (UniqueName: \"kubernetes.io/projected/2ef9d4ef-ec42-4464-8744-4b3aea7b6eb9-kube-api-access-q8mbz\") on node \"crc\" DevicePath \"\"" Jan 20 17:52:02 crc kubenswrapper[4558]: I0120 17:52:02.382899 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/433afe0d-c979-4c31-a8dc-8a2d93fbe3df-etc-swift\") pod \"swift-proxy-76d6dddffd-4qcpv\" (UID: \"433afe0d-c979-4c31-a8dc-8a2d93fbe3df\") " pod="openstack-kuttl-tests/swift-proxy-76d6dddffd-4qcpv" Jan 20 17:52:02 crc kubenswrapper[4558]: I0120 17:52:02.382944 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/433afe0d-c979-4c31-a8dc-8a2d93fbe3df-combined-ca-bundle\") pod \"swift-proxy-76d6dddffd-4qcpv\" (UID: \"433afe0d-c979-4c31-a8dc-8a2d93fbe3df\") " pod="openstack-kuttl-tests/swift-proxy-76d6dddffd-4qcpv" Jan 20 17:52:02 crc kubenswrapper[4558]: I0120 17:52:02.382999 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m8zd4\" (UniqueName: \"kubernetes.io/projected/433afe0d-c979-4c31-a8dc-8a2d93fbe3df-kube-api-access-m8zd4\") pod \"swift-proxy-76d6dddffd-4qcpv\" (UID: \"433afe0d-c979-4c31-a8dc-8a2d93fbe3df\") " pod="openstack-kuttl-tests/swift-proxy-76d6dddffd-4qcpv" Jan 20 17:52:02 crc kubenswrapper[4558]: I0120 17:52:02.383041 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/433afe0d-c979-4c31-a8dc-8a2d93fbe3df-public-tls-certs\") pod \"swift-proxy-76d6dddffd-4qcpv\" (UID: \"433afe0d-c979-4c31-a8dc-8a2d93fbe3df\") " pod="openstack-kuttl-tests/swift-proxy-76d6dddffd-4qcpv" Jan 20 17:52:02 crc kubenswrapper[4558]: I0120 17:52:02.383063 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/433afe0d-c979-4c31-a8dc-8a2d93fbe3df-internal-tls-certs\") pod \"swift-proxy-76d6dddffd-4qcpv\" (UID: \"433afe0d-c979-4c31-a8dc-8a2d93fbe3df\") " pod="openstack-kuttl-tests/swift-proxy-76d6dddffd-4qcpv" Jan 20 17:52:02 crc kubenswrapper[4558]: I0120 17:52:02.383097 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/433afe0d-c979-4c31-a8dc-8a2d93fbe3df-run-httpd\") pod \"swift-proxy-76d6dddffd-4qcpv\" (UID: \"433afe0d-c979-4c31-a8dc-8a2d93fbe3df\") " pod="openstack-kuttl-tests/swift-proxy-76d6dddffd-4qcpv" Jan 20 17:52:02 crc kubenswrapper[4558]: I0120 17:52:02.383138 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/433afe0d-c979-4c31-a8dc-8a2d93fbe3df-log-httpd\") pod \"swift-proxy-76d6dddffd-4qcpv\" (UID: \"433afe0d-c979-4c31-a8dc-8a2d93fbe3df\") " pod="openstack-kuttl-tests/swift-proxy-76d6dddffd-4qcpv" Jan 20 17:52:02 crc kubenswrapper[4558]: I0120 17:52:02.383178 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/433afe0d-c979-4c31-a8dc-8a2d93fbe3df-config-data\") pod \"swift-proxy-76d6dddffd-4qcpv\" (UID: \"433afe0d-c979-4c31-a8dc-8a2d93fbe3df\") " pod="openstack-kuttl-tests/swift-proxy-76d6dddffd-4qcpv" Jan 20 17:52:02 crc kubenswrapper[4558]: I0120 17:52:02.384070 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/433afe0d-c979-4c31-a8dc-8a2d93fbe3df-run-httpd\") pod \"swift-proxy-76d6dddffd-4qcpv\" (UID: \"433afe0d-c979-4c31-a8dc-8a2d93fbe3df\") " pod="openstack-kuttl-tests/swift-proxy-76d6dddffd-4qcpv" Jan 20 17:52:02 crc kubenswrapper[4558]: I0120 17:52:02.384249 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/433afe0d-c979-4c31-a8dc-8a2d93fbe3df-log-httpd\") pod \"swift-proxy-76d6dddffd-4qcpv\" (UID: \"433afe0d-c979-4c31-a8dc-8a2d93fbe3df\") " pod="openstack-kuttl-tests/swift-proxy-76d6dddffd-4qcpv" Jan 20 17:52:02 crc kubenswrapper[4558]: I0120 17:52:02.386693 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/433afe0d-c979-4c31-a8dc-8a2d93fbe3df-public-tls-certs\") pod \"swift-proxy-76d6dddffd-4qcpv\" (UID: \"433afe0d-c979-4c31-a8dc-8a2d93fbe3df\") " pod="openstack-kuttl-tests/swift-proxy-76d6dddffd-4qcpv" Jan 20 17:52:02 crc kubenswrapper[4558]: I0120 17:52:02.386901 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/433afe0d-c979-4c31-a8dc-8a2d93fbe3df-internal-tls-certs\") pod \"swift-proxy-76d6dddffd-4qcpv\" (UID: \"433afe0d-c979-4c31-a8dc-8a2d93fbe3df\") " pod="openstack-kuttl-tests/swift-proxy-76d6dddffd-4qcpv" Jan 20 17:52:02 crc kubenswrapper[4558]: I0120 17:52:02.387460 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/433afe0d-c979-4c31-a8dc-8a2d93fbe3df-combined-ca-bundle\") pod \"swift-proxy-76d6dddffd-4qcpv\" (UID: \"433afe0d-c979-4c31-a8dc-8a2d93fbe3df\") " pod="openstack-kuttl-tests/swift-proxy-76d6dddffd-4qcpv" Jan 20 17:52:02 crc kubenswrapper[4558]: I0120 17:52:02.387757 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/433afe0d-c979-4c31-a8dc-8a2d93fbe3df-etc-swift\") pod \"swift-proxy-76d6dddffd-4qcpv\" (UID: \"433afe0d-c979-4c31-a8dc-8a2d93fbe3df\") " pod="openstack-kuttl-tests/swift-proxy-76d6dddffd-4qcpv" Jan 20 17:52:02 crc kubenswrapper[4558]: I0120 17:52:02.388911 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/433afe0d-c979-4c31-a8dc-8a2d93fbe3df-config-data\") pod \"swift-proxy-76d6dddffd-4qcpv\" (UID: \"433afe0d-c979-4c31-a8dc-8a2d93fbe3df\") " pod="openstack-kuttl-tests/swift-proxy-76d6dddffd-4qcpv" Jan 20 17:52:02 crc kubenswrapper[4558]: I0120 17:52:02.401124 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m8zd4\" (UniqueName: \"kubernetes.io/projected/433afe0d-c979-4c31-a8dc-8a2d93fbe3df-kube-api-access-m8zd4\") pod \"swift-proxy-76d6dddffd-4qcpv\" (UID: \"433afe0d-c979-4c31-a8dc-8a2d93fbe3df\") " pod="openstack-kuttl-tests/swift-proxy-76d6dddffd-4qcpv" Jan 20 17:52:02 crc kubenswrapper[4558]: I0120 17:52:02.454444 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-proxy-76d6dddffd-4qcpv" Jan 20 17:52:02 crc kubenswrapper[4558]: I0120 17:52:02.463089 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:52:02 crc kubenswrapper[4558]: I0120 17:52:02.463154 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:52:02 crc kubenswrapper[4558]: I0120 17:52:02.496507 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:52:02 crc kubenswrapper[4558]: I0120 17:52:02.572983 4558 scope.go:117] "RemoveContainer" containerID="70539f03bf1b2ace01f24a9072ce8275fb4f5d92a181a991e07e1469387f4f02" Jan 20 17:52:02 crc kubenswrapper[4558]: I0120 17:52:02.582843 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cf356652-86c3-4fff-acda-805974e723a8" path="/var/lib/kubelet/pods/cf356652-86c3-4fff-acda-805974e723a8/volumes" Jan 20 17:52:02 crc kubenswrapper[4558]: I0120 17:52:02.720636 4558 generic.go:334] "Generic (PLEG): container finished" podID="2ef9d4ef-ec42-4464-8744-4b3aea7b6eb9" containerID="94aea94556475245fb3ed99b92c774a78bc94fc47ab42792ec2c8ac17ee6298a" exitCode=0 Jan 20 17:52:02 crc kubenswrapper[4558]: I0120 17:52:02.720720 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-6bd94d8c7d-2mkc6" Jan 20 17:52:02 crc kubenswrapper[4558]: I0120 17:52:02.720733 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-6bd94d8c7d-2mkc6" event={"ID":"2ef9d4ef-ec42-4464-8744-4b3aea7b6eb9","Type":"ContainerDied","Data":"94aea94556475245fb3ed99b92c774a78bc94fc47ab42792ec2c8ac17ee6298a"} Jan 20 17:52:02 crc kubenswrapper[4558]: I0120 17:52:02.720785 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-6bd94d8c7d-2mkc6" event={"ID":"2ef9d4ef-ec42-4464-8744-4b3aea7b6eb9","Type":"ContainerDied","Data":"962b122f0c0dd826ce08dff87611b9153906227cfee3b82537d770cb585af29a"} Jan 20 17:52:02 crc kubenswrapper[4558]: I0120 17:52:02.720806 4558 scope.go:117] "RemoveContainer" containerID="241ee82c0eec1687f073958ac6fb291483d97abeacad8f032be37d7ad893499a" Jan 20 17:52:02 crc kubenswrapper[4558]: I0120 17:52:02.746502 4558 scope.go:117] "RemoveContainer" containerID="94aea94556475245fb3ed99b92c774a78bc94fc47ab42792ec2c8ac17ee6298a" Jan 20 17:52:02 crc kubenswrapper[4558]: I0120 17:52:02.747933 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-6bd94d8c7d-2mkc6"] Jan 20 17:52:02 crc kubenswrapper[4558]: I0120 17:52:02.756511 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:52:02 crc kubenswrapper[4558]: I0120 17:52:02.757538 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/neutron-6bd94d8c7d-2mkc6"] Jan 20 17:52:02 crc kubenswrapper[4558]: I0120 17:52:02.779972 4558 scope.go:117] "RemoveContainer" containerID="241ee82c0eec1687f073958ac6fb291483d97abeacad8f032be37d7ad893499a" Jan 20 17:52:02 crc kubenswrapper[4558]: E0120 17:52:02.780442 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"241ee82c0eec1687f073958ac6fb291483d97abeacad8f032be37d7ad893499a\": container with ID starting with 241ee82c0eec1687f073958ac6fb291483d97abeacad8f032be37d7ad893499a not found: ID does not exist" containerID="241ee82c0eec1687f073958ac6fb291483d97abeacad8f032be37d7ad893499a" Jan 20 17:52:02 crc kubenswrapper[4558]: I0120 17:52:02.780476 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"241ee82c0eec1687f073958ac6fb291483d97abeacad8f032be37d7ad893499a"} err="failed to get container status \"241ee82c0eec1687f073958ac6fb291483d97abeacad8f032be37d7ad893499a\": rpc error: code = NotFound desc = could not find container \"241ee82c0eec1687f073958ac6fb291483d97abeacad8f032be37d7ad893499a\": container with ID starting with 241ee82c0eec1687f073958ac6fb291483d97abeacad8f032be37d7ad893499a not found: ID does not exist" Jan 20 17:52:02 crc kubenswrapper[4558]: I0120 17:52:02.780499 4558 scope.go:117] "RemoveContainer" containerID="94aea94556475245fb3ed99b92c774a78bc94fc47ab42792ec2c8ac17ee6298a" Jan 20 17:52:02 crc kubenswrapper[4558]: E0120 17:52:02.781573 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"94aea94556475245fb3ed99b92c774a78bc94fc47ab42792ec2c8ac17ee6298a\": container with ID starting with 94aea94556475245fb3ed99b92c774a78bc94fc47ab42792ec2c8ac17ee6298a not found: ID does not exist" containerID="94aea94556475245fb3ed99b92c774a78bc94fc47ab42792ec2c8ac17ee6298a" Jan 20 17:52:02 crc kubenswrapper[4558]: I0120 17:52:02.781596 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"94aea94556475245fb3ed99b92c774a78bc94fc47ab42792ec2c8ac17ee6298a"} err="failed to get container status \"94aea94556475245fb3ed99b92c774a78bc94fc47ab42792ec2c8ac17ee6298a\": rpc error: code = NotFound desc = could not find container \"94aea94556475245fb3ed99b92c774a78bc94fc47ab42792ec2c8ac17ee6298a\": container with ID starting with 94aea94556475245fb3ed99b92c774a78bc94fc47ab42792ec2c8ac17ee6298a not found: ID does not exist" Jan 20 17:52:02 crc kubenswrapper[4558]: I0120 17:52:02.931583 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/swift-proxy-76d6dddffd-4qcpv"] Jan 20 17:52:03 crc kubenswrapper[4558]: I0120 17:52:03.734184 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-proxy-76d6dddffd-4qcpv" event={"ID":"433afe0d-c979-4c31-a8dc-8a2d93fbe3df","Type":"ContainerStarted","Data":"201a91126279426ab94e20fd8f5beddaa054584c71b596e84fb7b72c3f13b29f"} Jan 20 17:52:03 crc kubenswrapper[4558]: I0120 17:52:03.734615 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-proxy-76d6dddffd-4qcpv" event={"ID":"433afe0d-c979-4c31-a8dc-8a2d93fbe3df","Type":"ContainerStarted","Data":"48a32835b74390f16be559ac97cec4eea57eb34d5852a0e5d9f37f38184cdf7a"} Jan 20 17:52:03 crc kubenswrapper[4558]: I0120 17:52:03.735809 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/swift-proxy-76d6dddffd-4qcpv" Jan 20 17:52:03 crc kubenswrapper[4558]: I0120 17:52:03.735866 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-proxy-76d6dddffd-4qcpv" event={"ID":"433afe0d-c979-4c31-a8dc-8a2d93fbe3df","Type":"ContainerStarted","Data":"c1b4e327c581b4775ab7540e7af4a06ae88758cab258a2422129ac0f967db364"} Jan 20 17:52:03 crc kubenswrapper[4558]: I0120 17:52:03.737552 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-0" event={"ID":"924fa7ce-8d60-4b2f-b62b-d5e146474f71","Type":"ContainerStarted","Data":"d99379340565779627596cecb15d625f9bcb83d751ea4f5f14fb7cb8bcb8a6b6"} Jan 20 17:52:03 crc kubenswrapper[4558]: I0120 17:52:03.738322 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:52:03 crc kubenswrapper[4558]: I0120 17:52:03.771477 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/swift-proxy-76d6dddffd-4qcpv" podStartSLOduration=1.771459073 podStartE2EDuration="1.771459073s" podCreationTimestamp="2026-01-20 17:52:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:52:03.761058145 +0000 UTC m=+4217.521396112" watchObservedRunningTime="2026-01-20 17:52:03.771459073 +0000 UTC m=+4217.531797040" Jan 20 17:52:04 crc kubenswrapper[4558]: I0120 17:52:04.574911 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2ef9d4ef-ec42-4464-8744-4b3aea7b6eb9" path="/var/lib/kubelet/pods/2ef9d4ef-ec42-4464-8744-4b3aea7b6eb9/volumes" Jan 20 17:52:04 crc kubenswrapper[4558]: I0120 17:52:04.746828 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/swift-proxy-76d6dddffd-4qcpv" Jan 20 17:52:04 crc kubenswrapper[4558]: I0120 17:52:04.844600 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/placement-658f45b9f4-tx296" Jan 20 17:52:04 crc kubenswrapper[4558]: I0120 17:52:04.904654 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/placement-658f45b9f4-tx296" Jan 20 17:52:04 crc kubenswrapper[4558]: I0120 17:52:04.955961 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/placement-764f5f95dd-qwm2f"] Jan 20 17:52:04 crc kubenswrapper[4558]: I0120 17:52:04.956290 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/placement-764f5f95dd-qwm2f" podUID="f955e7e5-de96-4598-96b9-76573c42d8e5" containerName="placement-log" containerID="cri-o://3567a40831ca68954b26040c1b8fd81909b950423881e4378d24290b2d5b462b" gracePeriod=30 Jan 20 17:52:04 crc kubenswrapper[4558]: I0120 17:52:04.956614 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/placement-764f5f95dd-qwm2f" podUID="f955e7e5-de96-4598-96b9-76573c42d8e5" containerName="placement-api" containerID="cri-o://aaac3048ac7c85ff990616bebe2afbceb777be9814adb36b9554932f80d8482e" gracePeriod=30 Jan 20 17:52:05 crc kubenswrapper[4558]: I0120 17:52:05.764820 4558 generic.go:334] "Generic (PLEG): container finished" podID="f955e7e5-de96-4598-96b9-76573c42d8e5" containerID="3567a40831ca68954b26040c1b8fd81909b950423881e4378d24290b2d5b462b" exitCode=143 Jan 20 17:52:05 crc kubenswrapper[4558]: I0120 17:52:05.764898 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-764f5f95dd-qwm2f" event={"ID":"f955e7e5-de96-4598-96b9-76573c42d8e5","Type":"ContainerDied","Data":"3567a40831ca68954b26040c1b8fd81909b950423881e4378d24290b2d5b462b"} Jan 20 17:52:06 crc kubenswrapper[4558]: I0120 17:52:06.572145 4558 scope.go:117] "RemoveContainer" containerID="b5d4742f71c4c1a0e14ccc61fbcd3c7bb3f43da7229787caafda16955ac74e7d" Jan 20 17:52:06 crc kubenswrapper[4558]: E0120 17:52:06.572654 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"nova-cell1-conductor-conductor\" with CrashLoopBackOff: \"back-off 40s restarting failed container=nova-cell1-conductor-conductor pod=nova-cell1-conductor-0_openstack-kuttl-tests(7c16f0c4-1462-4817-9f74-9e3d93193867)\"" pod="openstack-kuttl-tests/nova-cell1-conductor-0" podUID="7c16f0c4-1462-4817-9f74-9e3d93193867" Jan 20 17:52:08 crc kubenswrapper[4558]: I0120 17:52:08.800552 4558 generic.go:334] "Generic (PLEG): container finished" podID="f955e7e5-de96-4598-96b9-76573c42d8e5" containerID="aaac3048ac7c85ff990616bebe2afbceb777be9814adb36b9554932f80d8482e" exitCode=0 Jan 20 17:52:08 crc kubenswrapper[4558]: I0120 17:52:08.800644 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-764f5f95dd-qwm2f" event={"ID":"f955e7e5-de96-4598-96b9-76573c42d8e5","Type":"ContainerDied","Data":"aaac3048ac7c85ff990616bebe2afbceb777be9814adb36b9554932f80d8482e"} Jan 20 17:52:09 crc kubenswrapper[4558]: I0120 17:52:09.170033 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-764f5f95dd-qwm2f" Jan 20 17:52:09 crc kubenswrapper[4558]: I0120 17:52:09.255016 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f955e7e5-de96-4598-96b9-76573c42d8e5-config-data\") pod \"f955e7e5-de96-4598-96b9-76573c42d8e5\" (UID: \"f955e7e5-de96-4598-96b9-76573c42d8e5\") " Jan 20 17:52:09 crc kubenswrapper[4558]: I0120 17:52:09.255152 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f955e7e5-de96-4598-96b9-76573c42d8e5-scripts\") pod \"f955e7e5-de96-4598-96b9-76573c42d8e5\" (UID: \"f955e7e5-de96-4598-96b9-76573c42d8e5\") " Jan 20 17:52:09 crc kubenswrapper[4558]: I0120 17:52:09.255296 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f955e7e5-de96-4598-96b9-76573c42d8e5-public-tls-certs\") pod \"f955e7e5-de96-4598-96b9-76573c42d8e5\" (UID: \"f955e7e5-de96-4598-96b9-76573c42d8e5\") " Jan 20 17:52:09 crc kubenswrapper[4558]: I0120 17:52:09.255421 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f955e7e5-de96-4598-96b9-76573c42d8e5-logs\") pod \"f955e7e5-de96-4598-96b9-76573c42d8e5\" (UID: \"f955e7e5-de96-4598-96b9-76573c42d8e5\") " Jan 20 17:52:09 crc kubenswrapper[4558]: I0120 17:52:09.255476 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f955e7e5-de96-4598-96b9-76573c42d8e5-internal-tls-certs\") pod \"f955e7e5-de96-4598-96b9-76573c42d8e5\" (UID: \"f955e7e5-de96-4598-96b9-76573c42d8e5\") " Jan 20 17:52:09 crc kubenswrapper[4558]: I0120 17:52:09.255528 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tggtq\" (UniqueName: \"kubernetes.io/projected/f955e7e5-de96-4598-96b9-76573c42d8e5-kube-api-access-tggtq\") pod \"f955e7e5-de96-4598-96b9-76573c42d8e5\" (UID: \"f955e7e5-de96-4598-96b9-76573c42d8e5\") " Jan 20 17:52:09 crc kubenswrapper[4558]: I0120 17:52:09.255588 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f955e7e5-de96-4598-96b9-76573c42d8e5-combined-ca-bundle\") pod \"f955e7e5-de96-4598-96b9-76573c42d8e5\" (UID: \"f955e7e5-de96-4598-96b9-76573c42d8e5\") " Jan 20 17:52:09 crc kubenswrapper[4558]: I0120 17:52:09.256023 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f955e7e5-de96-4598-96b9-76573c42d8e5-logs" (OuterVolumeSpecName: "logs") pod "f955e7e5-de96-4598-96b9-76573c42d8e5" (UID: "f955e7e5-de96-4598-96b9-76573c42d8e5"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:52:09 crc kubenswrapper[4558]: I0120 17:52:09.256317 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f955e7e5-de96-4598-96b9-76573c42d8e5-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:52:09 crc kubenswrapper[4558]: I0120 17:52:09.260902 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f955e7e5-de96-4598-96b9-76573c42d8e5-kube-api-access-tggtq" (OuterVolumeSpecName: "kube-api-access-tggtq") pod "f955e7e5-de96-4598-96b9-76573c42d8e5" (UID: "f955e7e5-de96-4598-96b9-76573c42d8e5"). InnerVolumeSpecName "kube-api-access-tggtq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:52:09 crc kubenswrapper[4558]: I0120 17:52:09.273261 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f955e7e5-de96-4598-96b9-76573c42d8e5-scripts" (OuterVolumeSpecName: "scripts") pod "f955e7e5-de96-4598-96b9-76573c42d8e5" (UID: "f955e7e5-de96-4598-96b9-76573c42d8e5"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:52:09 crc kubenswrapper[4558]: I0120 17:52:09.298317 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f955e7e5-de96-4598-96b9-76573c42d8e5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f955e7e5-de96-4598-96b9-76573c42d8e5" (UID: "f955e7e5-de96-4598-96b9-76573c42d8e5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:52:09 crc kubenswrapper[4558]: I0120 17:52:09.299046 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f955e7e5-de96-4598-96b9-76573c42d8e5-config-data" (OuterVolumeSpecName: "config-data") pod "f955e7e5-de96-4598-96b9-76573c42d8e5" (UID: "f955e7e5-de96-4598-96b9-76573c42d8e5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:52:09 crc kubenswrapper[4558]: I0120 17:52:09.332003 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f955e7e5-de96-4598-96b9-76573c42d8e5-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "f955e7e5-de96-4598-96b9-76573c42d8e5" (UID: "f955e7e5-de96-4598-96b9-76573c42d8e5"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:52:09 crc kubenswrapper[4558]: I0120 17:52:09.335307 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f955e7e5-de96-4598-96b9-76573c42d8e5-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "f955e7e5-de96-4598-96b9-76573c42d8e5" (UID: "f955e7e5-de96-4598-96b9-76573c42d8e5"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:52:09 crc kubenswrapper[4558]: I0120 17:52:09.345066 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:52:09 crc kubenswrapper[4558]: I0120 17:52:09.357691 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f955e7e5-de96-4598-96b9-76573c42d8e5-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:52:09 crc kubenswrapper[4558]: I0120 17:52:09.358375 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f955e7e5-de96-4598-96b9-76573c42d8e5-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:52:09 crc kubenswrapper[4558]: I0120 17:52:09.358531 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f955e7e5-de96-4598-96b9-76573c42d8e5-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:52:09 crc kubenswrapper[4558]: I0120 17:52:09.358559 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tggtq\" (UniqueName: \"kubernetes.io/projected/f955e7e5-de96-4598-96b9-76573c42d8e5-kube-api-access-tggtq\") on node \"crc\" DevicePath \"\"" Jan 20 17:52:09 crc kubenswrapper[4558]: I0120 17:52:09.358574 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f955e7e5-de96-4598-96b9-76573c42d8e5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:52:09 crc kubenswrapper[4558]: I0120 17:52:09.358599 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f955e7e5-de96-4598-96b9-76573c42d8e5-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:52:09 crc kubenswrapper[4558]: I0120 17:52:09.813513 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-764f5f95dd-qwm2f" event={"ID":"f955e7e5-de96-4598-96b9-76573c42d8e5","Type":"ContainerDied","Data":"d886adc55790c399dc46c571a58d19fce02aa9106ba0742781f5a78e49c66465"} Jan 20 17:52:09 crc kubenswrapper[4558]: I0120 17:52:09.813588 4558 scope.go:117] "RemoveContainer" containerID="aaac3048ac7c85ff990616bebe2afbceb777be9814adb36b9554932f80d8482e" Jan 20 17:52:09 crc kubenswrapper[4558]: I0120 17:52:09.813585 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-764f5f95dd-qwm2f" Jan 20 17:52:09 crc kubenswrapper[4558]: I0120 17:52:09.838550 4558 scope.go:117] "RemoveContainer" containerID="3567a40831ca68954b26040c1b8fd81909b950423881e4378d24290b2d5b462b" Jan 20 17:52:09 crc kubenswrapper[4558]: I0120 17:52:09.851456 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/placement-764f5f95dd-qwm2f"] Jan 20 17:52:09 crc kubenswrapper[4558]: I0120 17:52:09.857285 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/placement-764f5f95dd-qwm2f"] Jan 20 17:52:10 crc kubenswrapper[4558]: I0120 17:52:10.565965 4558 scope.go:117] "RemoveContainer" containerID="4ea7fa61d8b21007a044345acec89fcebe56c2921cf0f76ff2002f44bdc88ede" Jan 20 17:52:10 crc kubenswrapper[4558]: E0120 17:52:10.566414 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 17:52:10 crc kubenswrapper[4558]: I0120 17:52:10.576299 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f955e7e5-de96-4598-96b9-76573c42d8e5" path="/var/lib/kubelet/pods/f955e7e5-de96-4598-96b9-76573c42d8e5/volumes" Jan 20 17:52:10 crc kubenswrapper[4558]: I0120 17:52:10.848082 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:52:12 crc kubenswrapper[4558]: I0120 17:52:12.315052 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:52:12 crc kubenswrapper[4558]: I0120 17:52:12.315327 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="1e9bc275-8603-4dc8-ab3f-9d63bea731a0" containerName="ceilometer-central-agent" containerID="cri-o://4389e7ca5201cf3921b6868ade9268aebc56ca43ff8d253051310b83c33e7766" gracePeriod=30 Jan 20 17:52:12 crc kubenswrapper[4558]: I0120 17:52:12.315380 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="1e9bc275-8603-4dc8-ab3f-9d63bea731a0" containerName="proxy-httpd" containerID="cri-o://71b48e753eec9c326ad87e9648f3d2c2e185d09a418df5033c425c4192b92af4" gracePeriod=30 Jan 20 17:52:12 crc kubenswrapper[4558]: I0120 17:52:12.315443 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="1e9bc275-8603-4dc8-ab3f-9d63bea731a0" containerName="ceilometer-notification-agent" containerID="cri-o://d22d7692b054b8eebf218d2f206edb4ca1bee71cbfe4240fe94349103e4bffe2" gracePeriod=30 Jan 20 17:52:12 crc kubenswrapper[4558]: I0120 17:52:12.315443 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="1e9bc275-8603-4dc8-ab3f-9d63bea731a0" containerName="sg-core" containerID="cri-o://1c5e6f1d1d51002b9cb986abdde79e37e78e644a6a60f676870d827de94f940e" gracePeriod=30 Jan 20 17:52:12 crc kubenswrapper[4558]: I0120 17:52:12.462068 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/swift-proxy-76d6dddffd-4qcpv" Jan 20 17:52:12 crc kubenswrapper[4558]: I0120 17:52:12.465442 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/swift-proxy-76d6dddffd-4qcpv" Jan 20 17:52:12 crc kubenswrapper[4558]: I0120 17:52:12.545590 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/swift-proxy-67dccf5df6-qnbjc"] Jan 20 17:52:12 crc kubenswrapper[4558]: I0120 17:52:12.545802 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-proxy-67dccf5df6-qnbjc" podUID="d824a74f-efe9-440c-8b03-f43303fb5923" containerName="proxy-httpd" containerID="cri-o://29b09261b655df3694ff09435f242a8d50d59951e389e134a69944af2e32b744" gracePeriod=30 Jan 20 17:52:12 crc kubenswrapper[4558]: I0120 17:52:12.546214 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-proxy-67dccf5df6-qnbjc" podUID="d824a74f-efe9-440c-8b03-f43303fb5923" containerName="proxy-server" containerID="cri-o://b7ac154d8dd8a84fc960440afd7af5e40789946075eb75c3e21c3d8f3ab318a4" gracePeriod=30 Jan 20 17:52:12 crc kubenswrapper[4558]: I0120 17:52:12.758975 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/swift-proxy-67dccf5df6-qnbjc" podUID="d824a74f-efe9-440c-8b03-f43303fb5923" containerName="proxy-httpd" probeResult="failure" output="Get \"https://10.217.1.139:8080/healthcheck\": dial tcp 10.217.1.139:8080: connect: connection refused" Jan 20 17:52:12 crc kubenswrapper[4558]: I0120 17:52:12.759101 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/swift-proxy-67dccf5df6-qnbjc" podUID="d824a74f-efe9-440c-8b03-f43303fb5923" containerName="proxy-server" probeResult="failure" output="Get \"https://10.217.1.139:8080/healthcheck\": dial tcp 10.217.1.139:8080: connect: connection refused" Jan 20 17:52:12 crc kubenswrapper[4558]: I0120 17:52:12.852521 4558 generic.go:334] "Generic (PLEG): container finished" podID="d824a74f-efe9-440c-8b03-f43303fb5923" containerID="29b09261b655df3694ff09435f242a8d50d59951e389e134a69944af2e32b744" exitCode=0 Jan 20 17:52:12 crc kubenswrapper[4558]: I0120 17:52:12.852590 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-proxy-67dccf5df6-qnbjc" event={"ID":"d824a74f-efe9-440c-8b03-f43303fb5923","Type":"ContainerDied","Data":"29b09261b655df3694ff09435f242a8d50d59951e389e134a69944af2e32b744"} Jan 20 17:52:12 crc kubenswrapper[4558]: I0120 17:52:12.856299 4558 generic.go:334] "Generic (PLEG): container finished" podID="1e9bc275-8603-4dc8-ab3f-9d63bea731a0" containerID="71b48e753eec9c326ad87e9648f3d2c2e185d09a418df5033c425c4192b92af4" exitCode=0 Jan 20 17:52:12 crc kubenswrapper[4558]: I0120 17:52:12.856342 4558 generic.go:334] "Generic (PLEG): container finished" podID="1e9bc275-8603-4dc8-ab3f-9d63bea731a0" containerID="1c5e6f1d1d51002b9cb986abdde79e37e78e644a6a60f676870d827de94f940e" exitCode=2 Jan 20 17:52:12 crc kubenswrapper[4558]: I0120 17:52:12.856352 4558 generic.go:334] "Generic (PLEG): container finished" podID="1e9bc275-8603-4dc8-ab3f-9d63bea731a0" containerID="4389e7ca5201cf3921b6868ade9268aebc56ca43ff8d253051310b83c33e7766" exitCode=0 Jan 20 17:52:12 crc kubenswrapper[4558]: I0120 17:52:12.856367 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"1e9bc275-8603-4dc8-ab3f-9d63bea731a0","Type":"ContainerDied","Data":"71b48e753eec9c326ad87e9648f3d2c2e185d09a418df5033c425c4192b92af4"} Jan 20 17:52:12 crc kubenswrapper[4558]: I0120 17:52:12.856394 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"1e9bc275-8603-4dc8-ab3f-9d63bea731a0","Type":"ContainerDied","Data":"1c5e6f1d1d51002b9cb986abdde79e37e78e644a6a60f676870d827de94f940e"} Jan 20 17:52:12 crc kubenswrapper[4558]: I0120 17:52:12.856407 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"1e9bc275-8603-4dc8-ab3f-9d63bea731a0","Type":"ContainerDied","Data":"4389e7ca5201cf3921b6868ade9268aebc56ca43ff8d253051310b83c33e7766"} Jan 20 17:52:13 crc kubenswrapper[4558]: I0120 17:52:13.423134 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-proxy-67dccf5df6-qnbjc" Jan 20 17:52:13 crc kubenswrapper[4558]: I0120 17:52:13.567642 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d824a74f-efe9-440c-8b03-f43303fb5923-run-httpd\") pod \"d824a74f-efe9-440c-8b03-f43303fb5923\" (UID: \"d824a74f-efe9-440c-8b03-f43303fb5923\") " Jan 20 17:52:13 crc kubenswrapper[4558]: I0120 17:52:13.567728 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6vbhv\" (UniqueName: \"kubernetes.io/projected/d824a74f-efe9-440c-8b03-f43303fb5923-kube-api-access-6vbhv\") pod \"d824a74f-efe9-440c-8b03-f43303fb5923\" (UID: \"d824a74f-efe9-440c-8b03-f43303fb5923\") " Jan 20 17:52:13 crc kubenswrapper[4558]: I0120 17:52:13.567820 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d824a74f-efe9-440c-8b03-f43303fb5923-config-data\") pod \"d824a74f-efe9-440c-8b03-f43303fb5923\" (UID: \"d824a74f-efe9-440c-8b03-f43303fb5923\") " Jan 20 17:52:13 crc kubenswrapper[4558]: I0120 17:52:13.567906 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d824a74f-efe9-440c-8b03-f43303fb5923-internal-tls-certs\") pod \"d824a74f-efe9-440c-8b03-f43303fb5923\" (UID: \"d824a74f-efe9-440c-8b03-f43303fb5923\") " Jan 20 17:52:13 crc kubenswrapper[4558]: I0120 17:52:13.568027 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d824a74f-efe9-440c-8b03-f43303fb5923-public-tls-certs\") pod \"d824a74f-efe9-440c-8b03-f43303fb5923\" (UID: \"d824a74f-efe9-440c-8b03-f43303fb5923\") " Jan 20 17:52:13 crc kubenswrapper[4558]: I0120 17:52:13.568300 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d824a74f-efe9-440c-8b03-f43303fb5923-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "d824a74f-efe9-440c-8b03-f43303fb5923" (UID: "d824a74f-efe9-440c-8b03-f43303fb5923"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:52:13 crc kubenswrapper[4558]: I0120 17:52:13.568680 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d824a74f-efe9-440c-8b03-f43303fb5923-combined-ca-bundle\") pod \"d824a74f-efe9-440c-8b03-f43303fb5923\" (UID: \"d824a74f-efe9-440c-8b03-f43303fb5923\") " Jan 20 17:52:13 crc kubenswrapper[4558]: I0120 17:52:13.568789 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/d824a74f-efe9-440c-8b03-f43303fb5923-etc-swift\") pod \"d824a74f-efe9-440c-8b03-f43303fb5923\" (UID: \"d824a74f-efe9-440c-8b03-f43303fb5923\") " Jan 20 17:52:13 crc kubenswrapper[4558]: I0120 17:52:13.568882 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d824a74f-efe9-440c-8b03-f43303fb5923-log-httpd\") pod \"d824a74f-efe9-440c-8b03-f43303fb5923\" (UID: \"d824a74f-efe9-440c-8b03-f43303fb5923\") " Jan 20 17:52:13 crc kubenswrapper[4558]: I0120 17:52:13.569996 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d824a74f-efe9-440c-8b03-f43303fb5923-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "d824a74f-efe9-440c-8b03-f43303fb5923" (UID: "d824a74f-efe9-440c-8b03-f43303fb5923"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:52:13 crc kubenswrapper[4558]: I0120 17:52:13.571289 4558 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d824a74f-efe9-440c-8b03-f43303fb5923-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:52:13 crc kubenswrapper[4558]: I0120 17:52:13.571314 4558 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d824a74f-efe9-440c-8b03-f43303fb5923-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:52:13 crc kubenswrapper[4558]: I0120 17:52:13.575777 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d824a74f-efe9-440c-8b03-f43303fb5923-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "d824a74f-efe9-440c-8b03-f43303fb5923" (UID: "d824a74f-efe9-440c-8b03-f43303fb5923"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:52:13 crc kubenswrapper[4558]: I0120 17:52:13.579222 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d824a74f-efe9-440c-8b03-f43303fb5923-kube-api-access-6vbhv" (OuterVolumeSpecName: "kube-api-access-6vbhv") pod "d824a74f-efe9-440c-8b03-f43303fb5923" (UID: "d824a74f-efe9-440c-8b03-f43303fb5923"). InnerVolumeSpecName "kube-api-access-6vbhv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:52:13 crc kubenswrapper[4558]: I0120 17:52:13.615967 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d824a74f-efe9-440c-8b03-f43303fb5923-config-data" (OuterVolumeSpecName: "config-data") pod "d824a74f-efe9-440c-8b03-f43303fb5923" (UID: "d824a74f-efe9-440c-8b03-f43303fb5923"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:52:13 crc kubenswrapper[4558]: I0120 17:52:13.616494 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d824a74f-efe9-440c-8b03-f43303fb5923-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d824a74f-efe9-440c-8b03-f43303fb5923" (UID: "d824a74f-efe9-440c-8b03-f43303fb5923"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:52:13 crc kubenswrapper[4558]: I0120 17:52:13.619230 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d824a74f-efe9-440c-8b03-f43303fb5923-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "d824a74f-efe9-440c-8b03-f43303fb5923" (UID: "d824a74f-efe9-440c-8b03-f43303fb5923"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:52:13 crc kubenswrapper[4558]: I0120 17:52:13.622682 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d824a74f-efe9-440c-8b03-f43303fb5923-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "d824a74f-efe9-440c-8b03-f43303fb5923" (UID: "d824a74f-efe9-440c-8b03-f43303fb5923"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:52:13 crc kubenswrapper[4558]: I0120 17:52:13.674401 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6vbhv\" (UniqueName: \"kubernetes.io/projected/d824a74f-efe9-440c-8b03-f43303fb5923-kube-api-access-6vbhv\") on node \"crc\" DevicePath \"\"" Jan 20 17:52:13 crc kubenswrapper[4558]: I0120 17:52:13.674549 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d824a74f-efe9-440c-8b03-f43303fb5923-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:52:13 crc kubenswrapper[4558]: I0120 17:52:13.674621 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d824a74f-efe9-440c-8b03-f43303fb5923-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:52:13 crc kubenswrapper[4558]: I0120 17:52:13.674674 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d824a74f-efe9-440c-8b03-f43303fb5923-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:52:13 crc kubenswrapper[4558]: I0120 17:52:13.674727 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d824a74f-efe9-440c-8b03-f43303fb5923-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:52:13 crc kubenswrapper[4558]: I0120 17:52:13.674782 4558 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/d824a74f-efe9-440c-8b03-f43303fb5923-etc-swift\") on node \"crc\" DevicePath \"\"" Jan 20 17:52:13 crc kubenswrapper[4558]: I0120 17:52:13.868923 4558 generic.go:334] "Generic (PLEG): container finished" podID="d824a74f-efe9-440c-8b03-f43303fb5923" containerID="b7ac154d8dd8a84fc960440afd7af5e40789946075eb75c3e21c3d8f3ab318a4" exitCode=0 Jan 20 17:52:13 crc kubenswrapper[4558]: I0120 17:52:13.868995 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-proxy-67dccf5df6-qnbjc" event={"ID":"d824a74f-efe9-440c-8b03-f43303fb5923","Type":"ContainerDied","Data":"b7ac154d8dd8a84fc960440afd7af5e40789946075eb75c3e21c3d8f3ab318a4"} Jan 20 17:52:13 crc kubenswrapper[4558]: I0120 17:52:13.869053 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-proxy-67dccf5df6-qnbjc" event={"ID":"d824a74f-efe9-440c-8b03-f43303fb5923","Type":"ContainerDied","Data":"d24aac2e275e31d037fd1782ec9d6823bbbc2085a7f02e62eb8fc6928ce4e3bf"} Jan 20 17:52:13 crc kubenswrapper[4558]: I0120 17:52:13.869079 4558 scope.go:117] "RemoveContainer" containerID="b7ac154d8dd8a84fc960440afd7af5e40789946075eb75c3e21c3d8f3ab318a4" Jan 20 17:52:13 crc kubenswrapper[4558]: I0120 17:52:13.869288 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-proxy-67dccf5df6-qnbjc" Jan 20 17:52:13 crc kubenswrapper[4558]: I0120 17:52:13.901260 4558 scope.go:117] "RemoveContainer" containerID="29b09261b655df3694ff09435f242a8d50d59951e389e134a69944af2e32b744" Jan 20 17:52:13 crc kubenswrapper[4558]: I0120 17:52:13.906781 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/swift-proxy-67dccf5df6-qnbjc"] Jan 20 17:52:13 crc kubenswrapper[4558]: I0120 17:52:13.913417 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/swift-proxy-67dccf5df6-qnbjc"] Jan 20 17:52:13 crc kubenswrapper[4558]: I0120 17:52:13.921985 4558 scope.go:117] "RemoveContainer" containerID="b7ac154d8dd8a84fc960440afd7af5e40789946075eb75c3e21c3d8f3ab318a4" Jan 20 17:52:13 crc kubenswrapper[4558]: E0120 17:52:13.922559 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b7ac154d8dd8a84fc960440afd7af5e40789946075eb75c3e21c3d8f3ab318a4\": container with ID starting with b7ac154d8dd8a84fc960440afd7af5e40789946075eb75c3e21c3d8f3ab318a4 not found: ID does not exist" containerID="b7ac154d8dd8a84fc960440afd7af5e40789946075eb75c3e21c3d8f3ab318a4" Jan 20 17:52:13 crc kubenswrapper[4558]: I0120 17:52:13.922596 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b7ac154d8dd8a84fc960440afd7af5e40789946075eb75c3e21c3d8f3ab318a4"} err="failed to get container status \"b7ac154d8dd8a84fc960440afd7af5e40789946075eb75c3e21c3d8f3ab318a4\": rpc error: code = NotFound desc = could not find container \"b7ac154d8dd8a84fc960440afd7af5e40789946075eb75c3e21c3d8f3ab318a4\": container with ID starting with b7ac154d8dd8a84fc960440afd7af5e40789946075eb75c3e21c3d8f3ab318a4 not found: ID does not exist" Jan 20 17:52:13 crc kubenswrapper[4558]: I0120 17:52:13.922628 4558 scope.go:117] "RemoveContainer" containerID="29b09261b655df3694ff09435f242a8d50d59951e389e134a69944af2e32b744" Jan 20 17:52:13 crc kubenswrapper[4558]: E0120 17:52:13.923032 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"29b09261b655df3694ff09435f242a8d50d59951e389e134a69944af2e32b744\": container with ID starting with 29b09261b655df3694ff09435f242a8d50d59951e389e134a69944af2e32b744 not found: ID does not exist" containerID="29b09261b655df3694ff09435f242a8d50d59951e389e134a69944af2e32b744" Jan 20 17:52:13 crc kubenswrapper[4558]: I0120 17:52:13.923086 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"29b09261b655df3694ff09435f242a8d50d59951e389e134a69944af2e32b744"} err="failed to get container status \"29b09261b655df3694ff09435f242a8d50d59951e389e134a69944af2e32b744\": rpc error: code = NotFound desc = could not find container \"29b09261b655df3694ff09435f242a8d50d59951e389e134a69944af2e32b744\": container with ID starting with 29b09261b655df3694ff09435f242a8d50d59951e389e134a69944af2e32b744 not found: ID does not exist" Jan 20 17:52:14 crc kubenswrapper[4558]: I0120 17:52:14.282593 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:52:14 crc kubenswrapper[4558]: I0120 17:52:14.394345 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/1e9bc275-8603-4dc8-ab3f-9d63bea731a0-sg-core-conf-yaml\") pod \"1e9bc275-8603-4dc8-ab3f-9d63bea731a0\" (UID: \"1e9bc275-8603-4dc8-ab3f-9d63bea731a0\") " Jan 20 17:52:14 crc kubenswrapper[4558]: I0120 17:52:14.394415 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/1e9bc275-8603-4dc8-ab3f-9d63bea731a0-ceilometer-tls-certs\") pod \"1e9bc275-8603-4dc8-ab3f-9d63bea731a0\" (UID: \"1e9bc275-8603-4dc8-ab3f-9d63bea731a0\") " Jan 20 17:52:14 crc kubenswrapper[4558]: I0120 17:52:14.394475 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1e9bc275-8603-4dc8-ab3f-9d63bea731a0-config-data\") pod \"1e9bc275-8603-4dc8-ab3f-9d63bea731a0\" (UID: \"1e9bc275-8603-4dc8-ab3f-9d63bea731a0\") " Jan 20 17:52:14 crc kubenswrapper[4558]: I0120 17:52:14.394523 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1e9bc275-8603-4dc8-ab3f-9d63bea731a0-log-httpd\") pod \"1e9bc275-8603-4dc8-ab3f-9d63bea731a0\" (UID: \"1e9bc275-8603-4dc8-ab3f-9d63bea731a0\") " Jan 20 17:52:14 crc kubenswrapper[4558]: I0120 17:52:14.394580 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1e9bc275-8603-4dc8-ab3f-9d63bea731a0-run-httpd\") pod \"1e9bc275-8603-4dc8-ab3f-9d63bea731a0\" (UID: \"1e9bc275-8603-4dc8-ab3f-9d63bea731a0\") " Jan 20 17:52:14 crc kubenswrapper[4558]: I0120 17:52:14.394615 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1e9bc275-8603-4dc8-ab3f-9d63bea731a0-combined-ca-bundle\") pod \"1e9bc275-8603-4dc8-ab3f-9d63bea731a0\" (UID: \"1e9bc275-8603-4dc8-ab3f-9d63bea731a0\") " Jan 20 17:52:14 crc kubenswrapper[4558]: I0120 17:52:14.394685 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rql4j\" (UniqueName: \"kubernetes.io/projected/1e9bc275-8603-4dc8-ab3f-9d63bea731a0-kube-api-access-rql4j\") pod \"1e9bc275-8603-4dc8-ab3f-9d63bea731a0\" (UID: \"1e9bc275-8603-4dc8-ab3f-9d63bea731a0\") " Jan 20 17:52:14 crc kubenswrapper[4558]: I0120 17:52:14.394738 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1e9bc275-8603-4dc8-ab3f-9d63bea731a0-scripts\") pod \"1e9bc275-8603-4dc8-ab3f-9d63bea731a0\" (UID: \"1e9bc275-8603-4dc8-ab3f-9d63bea731a0\") " Jan 20 17:52:14 crc kubenswrapper[4558]: I0120 17:52:14.395082 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1e9bc275-8603-4dc8-ab3f-9d63bea731a0-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "1e9bc275-8603-4dc8-ab3f-9d63bea731a0" (UID: "1e9bc275-8603-4dc8-ab3f-9d63bea731a0"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:52:14 crc kubenswrapper[4558]: I0120 17:52:14.395302 4558 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1e9bc275-8603-4dc8-ab3f-9d63bea731a0-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:52:14 crc kubenswrapper[4558]: I0120 17:52:14.395359 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1e9bc275-8603-4dc8-ab3f-9d63bea731a0-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "1e9bc275-8603-4dc8-ab3f-9d63bea731a0" (UID: "1e9bc275-8603-4dc8-ab3f-9d63bea731a0"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:52:14 crc kubenswrapper[4558]: I0120 17:52:14.398020 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1e9bc275-8603-4dc8-ab3f-9d63bea731a0-kube-api-access-rql4j" (OuterVolumeSpecName: "kube-api-access-rql4j") pod "1e9bc275-8603-4dc8-ab3f-9d63bea731a0" (UID: "1e9bc275-8603-4dc8-ab3f-9d63bea731a0"). InnerVolumeSpecName "kube-api-access-rql4j". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:52:14 crc kubenswrapper[4558]: I0120 17:52:14.398478 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1e9bc275-8603-4dc8-ab3f-9d63bea731a0-scripts" (OuterVolumeSpecName: "scripts") pod "1e9bc275-8603-4dc8-ab3f-9d63bea731a0" (UID: "1e9bc275-8603-4dc8-ab3f-9d63bea731a0"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:52:14 crc kubenswrapper[4558]: I0120 17:52:14.420519 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1e9bc275-8603-4dc8-ab3f-9d63bea731a0-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "1e9bc275-8603-4dc8-ab3f-9d63bea731a0" (UID: "1e9bc275-8603-4dc8-ab3f-9d63bea731a0"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:52:14 crc kubenswrapper[4558]: I0120 17:52:14.442378 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1e9bc275-8603-4dc8-ab3f-9d63bea731a0-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "1e9bc275-8603-4dc8-ab3f-9d63bea731a0" (UID: "1e9bc275-8603-4dc8-ab3f-9d63bea731a0"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:52:14 crc kubenswrapper[4558]: I0120 17:52:14.469448 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1e9bc275-8603-4dc8-ab3f-9d63bea731a0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1e9bc275-8603-4dc8-ab3f-9d63bea731a0" (UID: "1e9bc275-8603-4dc8-ab3f-9d63bea731a0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:52:14 crc kubenswrapper[4558]: I0120 17:52:14.484113 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1e9bc275-8603-4dc8-ab3f-9d63bea731a0-config-data" (OuterVolumeSpecName: "config-data") pod "1e9bc275-8603-4dc8-ab3f-9d63bea731a0" (UID: "1e9bc275-8603-4dc8-ab3f-9d63bea731a0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:52:14 crc kubenswrapper[4558]: I0120 17:52:14.498440 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1e9bc275-8603-4dc8-ab3f-9d63bea731a0-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:52:14 crc kubenswrapper[4558]: I0120 17:52:14.498473 4558 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/1e9bc275-8603-4dc8-ab3f-9d63bea731a0-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 20 17:52:14 crc kubenswrapper[4558]: I0120 17:52:14.498490 4558 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/1e9bc275-8603-4dc8-ab3f-9d63bea731a0-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:52:14 crc kubenswrapper[4558]: I0120 17:52:14.498502 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1e9bc275-8603-4dc8-ab3f-9d63bea731a0-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:52:14 crc kubenswrapper[4558]: I0120 17:52:14.498513 4558 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1e9bc275-8603-4dc8-ab3f-9d63bea731a0-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:52:14 crc kubenswrapper[4558]: I0120 17:52:14.498525 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1e9bc275-8603-4dc8-ab3f-9d63bea731a0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:52:14 crc kubenswrapper[4558]: I0120 17:52:14.498538 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rql4j\" (UniqueName: \"kubernetes.io/projected/1e9bc275-8603-4dc8-ab3f-9d63bea731a0-kube-api-access-rql4j\") on node \"crc\" DevicePath \"\"" Jan 20 17:52:14 crc kubenswrapper[4558]: I0120 17:52:14.577435 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d824a74f-efe9-440c-8b03-f43303fb5923" path="/var/lib/kubelet/pods/d824a74f-efe9-440c-8b03-f43303fb5923/volumes" Jan 20 17:52:14 crc kubenswrapper[4558]: I0120 17:52:14.885615 4558 generic.go:334] "Generic (PLEG): container finished" podID="1e9bc275-8603-4dc8-ab3f-9d63bea731a0" containerID="d22d7692b054b8eebf218d2f206edb4ca1bee71cbfe4240fe94349103e4bffe2" exitCode=0 Jan 20 17:52:14 crc kubenswrapper[4558]: I0120 17:52:14.885711 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:52:14 crc kubenswrapper[4558]: I0120 17:52:14.885710 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"1e9bc275-8603-4dc8-ab3f-9d63bea731a0","Type":"ContainerDied","Data":"d22d7692b054b8eebf218d2f206edb4ca1bee71cbfe4240fe94349103e4bffe2"} Jan 20 17:52:14 crc kubenswrapper[4558]: I0120 17:52:14.885859 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"1e9bc275-8603-4dc8-ab3f-9d63bea731a0","Type":"ContainerDied","Data":"ade7f4eaebd3a69f30d55e10eadbddfc46e10a7178045bbae8facf16d27ffd69"} Jan 20 17:52:14 crc kubenswrapper[4558]: I0120 17:52:14.885924 4558 scope.go:117] "RemoveContainer" containerID="71b48e753eec9c326ad87e9648f3d2c2e185d09a418df5033c425c4192b92af4" Jan 20 17:52:14 crc kubenswrapper[4558]: I0120 17:52:14.920952 4558 scope.go:117] "RemoveContainer" containerID="1c5e6f1d1d51002b9cb986abdde79e37e78e644a6a60f676870d827de94f940e" Jan 20 17:52:14 crc kubenswrapper[4558]: I0120 17:52:14.921342 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:52:14 crc kubenswrapper[4558]: I0120 17:52:14.931262 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:52:14 crc kubenswrapper[4558]: I0120 17:52:14.941575 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:52:14 crc kubenswrapper[4558]: E0120 17:52:14.942047 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d824a74f-efe9-440c-8b03-f43303fb5923" containerName="proxy-server" Jan 20 17:52:14 crc kubenswrapper[4558]: I0120 17:52:14.942067 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d824a74f-efe9-440c-8b03-f43303fb5923" containerName="proxy-server" Jan 20 17:52:14 crc kubenswrapper[4558]: E0120 17:52:14.942086 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f955e7e5-de96-4598-96b9-76573c42d8e5" containerName="placement-api" Jan 20 17:52:14 crc kubenswrapper[4558]: I0120 17:52:14.942094 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="f955e7e5-de96-4598-96b9-76573c42d8e5" containerName="placement-api" Jan 20 17:52:14 crc kubenswrapper[4558]: E0120 17:52:14.942108 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f955e7e5-de96-4598-96b9-76573c42d8e5" containerName="placement-log" Jan 20 17:52:14 crc kubenswrapper[4558]: I0120 17:52:14.942114 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="f955e7e5-de96-4598-96b9-76573c42d8e5" containerName="placement-log" Jan 20 17:52:14 crc kubenswrapper[4558]: E0120 17:52:14.942137 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1e9bc275-8603-4dc8-ab3f-9d63bea731a0" containerName="proxy-httpd" Jan 20 17:52:14 crc kubenswrapper[4558]: I0120 17:52:14.942145 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="1e9bc275-8603-4dc8-ab3f-9d63bea731a0" containerName="proxy-httpd" Jan 20 17:52:14 crc kubenswrapper[4558]: E0120 17:52:14.942157 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d824a74f-efe9-440c-8b03-f43303fb5923" containerName="proxy-httpd" Jan 20 17:52:14 crc kubenswrapper[4558]: I0120 17:52:14.942208 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d824a74f-efe9-440c-8b03-f43303fb5923" containerName="proxy-httpd" Jan 20 17:52:14 crc kubenswrapper[4558]: E0120 17:52:14.942225 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1e9bc275-8603-4dc8-ab3f-9d63bea731a0" containerName="ceilometer-central-agent" Jan 20 17:52:14 crc kubenswrapper[4558]: I0120 17:52:14.942231 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="1e9bc275-8603-4dc8-ab3f-9d63bea731a0" containerName="ceilometer-central-agent" Jan 20 17:52:14 crc kubenswrapper[4558]: E0120 17:52:14.942246 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1e9bc275-8603-4dc8-ab3f-9d63bea731a0" containerName="sg-core" Jan 20 17:52:14 crc kubenswrapper[4558]: I0120 17:52:14.942252 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="1e9bc275-8603-4dc8-ab3f-9d63bea731a0" containerName="sg-core" Jan 20 17:52:14 crc kubenswrapper[4558]: E0120 17:52:14.942270 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1e9bc275-8603-4dc8-ab3f-9d63bea731a0" containerName="ceilometer-notification-agent" Jan 20 17:52:14 crc kubenswrapper[4558]: I0120 17:52:14.942275 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="1e9bc275-8603-4dc8-ab3f-9d63bea731a0" containerName="ceilometer-notification-agent" Jan 20 17:52:14 crc kubenswrapper[4558]: I0120 17:52:14.942437 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="1e9bc275-8603-4dc8-ab3f-9d63bea731a0" containerName="ceilometer-central-agent" Jan 20 17:52:14 crc kubenswrapper[4558]: I0120 17:52:14.942453 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="d824a74f-efe9-440c-8b03-f43303fb5923" containerName="proxy-httpd" Jan 20 17:52:14 crc kubenswrapper[4558]: I0120 17:52:14.942465 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="f955e7e5-de96-4598-96b9-76573c42d8e5" containerName="placement-api" Jan 20 17:52:14 crc kubenswrapper[4558]: I0120 17:52:14.942471 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="1e9bc275-8603-4dc8-ab3f-9d63bea731a0" containerName="ceilometer-notification-agent" Jan 20 17:52:14 crc kubenswrapper[4558]: I0120 17:52:14.942479 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="f955e7e5-de96-4598-96b9-76573c42d8e5" containerName="placement-log" Jan 20 17:52:14 crc kubenswrapper[4558]: I0120 17:52:14.942490 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="d824a74f-efe9-440c-8b03-f43303fb5923" containerName="proxy-server" Jan 20 17:52:14 crc kubenswrapper[4558]: I0120 17:52:14.942498 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="1e9bc275-8603-4dc8-ab3f-9d63bea731a0" containerName="proxy-httpd" Jan 20 17:52:14 crc kubenswrapper[4558]: I0120 17:52:14.942507 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="1e9bc275-8603-4dc8-ab3f-9d63bea731a0" containerName="sg-core" Jan 20 17:52:14 crc kubenswrapper[4558]: I0120 17:52:14.944128 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:52:14 crc kubenswrapper[4558]: I0120 17:52:14.946931 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-ceilometer-internal-svc" Jan 20 17:52:14 crc kubenswrapper[4558]: I0120 17:52:14.947114 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-config-data" Jan 20 17:52:14 crc kubenswrapper[4558]: I0120 17:52:14.947373 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-scripts" Jan 20 17:52:14 crc kubenswrapper[4558]: I0120 17:52:14.948939 4558 scope.go:117] "RemoveContainer" containerID="d22d7692b054b8eebf218d2f206edb4ca1bee71cbfe4240fe94349103e4bffe2" Jan 20 17:52:14 crc kubenswrapper[4558]: I0120 17:52:14.955730 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:52:14 crc kubenswrapper[4558]: I0120 17:52:14.977825 4558 scope.go:117] "RemoveContainer" containerID="4389e7ca5201cf3921b6868ade9268aebc56ca43ff8d253051310b83c33e7766" Jan 20 17:52:15 crc kubenswrapper[4558]: I0120 17:52:15.008525 4558 scope.go:117] "RemoveContainer" containerID="71b48e753eec9c326ad87e9648f3d2c2e185d09a418df5033c425c4192b92af4" Jan 20 17:52:15 crc kubenswrapper[4558]: E0120 17:52:15.008921 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"71b48e753eec9c326ad87e9648f3d2c2e185d09a418df5033c425c4192b92af4\": container with ID starting with 71b48e753eec9c326ad87e9648f3d2c2e185d09a418df5033c425c4192b92af4 not found: ID does not exist" containerID="71b48e753eec9c326ad87e9648f3d2c2e185d09a418df5033c425c4192b92af4" Jan 20 17:52:15 crc kubenswrapper[4558]: I0120 17:52:15.008967 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"71b48e753eec9c326ad87e9648f3d2c2e185d09a418df5033c425c4192b92af4"} err="failed to get container status \"71b48e753eec9c326ad87e9648f3d2c2e185d09a418df5033c425c4192b92af4\": rpc error: code = NotFound desc = could not find container \"71b48e753eec9c326ad87e9648f3d2c2e185d09a418df5033c425c4192b92af4\": container with ID starting with 71b48e753eec9c326ad87e9648f3d2c2e185d09a418df5033c425c4192b92af4 not found: ID does not exist" Jan 20 17:52:15 crc kubenswrapper[4558]: I0120 17:52:15.008997 4558 scope.go:117] "RemoveContainer" containerID="1c5e6f1d1d51002b9cb986abdde79e37e78e644a6a60f676870d827de94f940e" Jan 20 17:52:15 crc kubenswrapper[4558]: E0120 17:52:15.009457 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1c5e6f1d1d51002b9cb986abdde79e37e78e644a6a60f676870d827de94f940e\": container with ID starting with 1c5e6f1d1d51002b9cb986abdde79e37e78e644a6a60f676870d827de94f940e not found: ID does not exist" containerID="1c5e6f1d1d51002b9cb986abdde79e37e78e644a6a60f676870d827de94f940e" Jan 20 17:52:15 crc kubenswrapper[4558]: I0120 17:52:15.009483 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1c5e6f1d1d51002b9cb986abdde79e37e78e644a6a60f676870d827de94f940e"} err="failed to get container status \"1c5e6f1d1d51002b9cb986abdde79e37e78e644a6a60f676870d827de94f940e\": rpc error: code = NotFound desc = could not find container \"1c5e6f1d1d51002b9cb986abdde79e37e78e644a6a60f676870d827de94f940e\": container with ID starting with 1c5e6f1d1d51002b9cb986abdde79e37e78e644a6a60f676870d827de94f940e not found: ID does not exist" Jan 20 17:52:15 crc kubenswrapper[4558]: I0120 17:52:15.009504 4558 scope.go:117] "RemoveContainer" containerID="d22d7692b054b8eebf218d2f206edb4ca1bee71cbfe4240fe94349103e4bffe2" Jan 20 17:52:15 crc kubenswrapper[4558]: E0120 17:52:15.010349 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d22d7692b054b8eebf218d2f206edb4ca1bee71cbfe4240fe94349103e4bffe2\": container with ID starting with d22d7692b054b8eebf218d2f206edb4ca1bee71cbfe4240fe94349103e4bffe2 not found: ID does not exist" containerID="d22d7692b054b8eebf218d2f206edb4ca1bee71cbfe4240fe94349103e4bffe2" Jan 20 17:52:15 crc kubenswrapper[4558]: I0120 17:52:15.010377 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d22d7692b054b8eebf218d2f206edb4ca1bee71cbfe4240fe94349103e4bffe2"} err="failed to get container status \"d22d7692b054b8eebf218d2f206edb4ca1bee71cbfe4240fe94349103e4bffe2\": rpc error: code = NotFound desc = could not find container \"d22d7692b054b8eebf218d2f206edb4ca1bee71cbfe4240fe94349103e4bffe2\": container with ID starting with d22d7692b054b8eebf218d2f206edb4ca1bee71cbfe4240fe94349103e4bffe2 not found: ID does not exist" Jan 20 17:52:15 crc kubenswrapper[4558]: I0120 17:52:15.010397 4558 scope.go:117] "RemoveContainer" containerID="4389e7ca5201cf3921b6868ade9268aebc56ca43ff8d253051310b83c33e7766" Jan 20 17:52:15 crc kubenswrapper[4558]: E0120 17:52:15.010836 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4389e7ca5201cf3921b6868ade9268aebc56ca43ff8d253051310b83c33e7766\": container with ID starting with 4389e7ca5201cf3921b6868ade9268aebc56ca43ff8d253051310b83c33e7766 not found: ID does not exist" containerID="4389e7ca5201cf3921b6868ade9268aebc56ca43ff8d253051310b83c33e7766" Jan 20 17:52:15 crc kubenswrapper[4558]: I0120 17:52:15.010897 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4389e7ca5201cf3921b6868ade9268aebc56ca43ff8d253051310b83c33e7766"} err="failed to get container status \"4389e7ca5201cf3921b6868ade9268aebc56ca43ff8d253051310b83c33e7766\": rpc error: code = NotFound desc = could not find container \"4389e7ca5201cf3921b6868ade9268aebc56ca43ff8d253051310b83c33e7766\": container with ID starting with 4389e7ca5201cf3921b6868ade9268aebc56ca43ff8d253051310b83c33e7766 not found: ID does not exist" Jan 20 17:52:15 crc kubenswrapper[4558]: I0120 17:52:15.121681 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cd2275b9-323a-4981-aadd-791b68003e3d-log-httpd\") pod \"ceilometer-0\" (UID: \"cd2275b9-323a-4981-aadd-791b68003e3d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:52:15 crc kubenswrapper[4558]: I0120 17:52:15.122156 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cd2275b9-323a-4981-aadd-791b68003e3d-run-httpd\") pod \"ceilometer-0\" (UID: \"cd2275b9-323a-4981-aadd-791b68003e3d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:52:15 crc kubenswrapper[4558]: I0120 17:52:15.122231 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/cd2275b9-323a-4981-aadd-791b68003e3d-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"cd2275b9-323a-4981-aadd-791b68003e3d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:52:15 crc kubenswrapper[4558]: I0120 17:52:15.122310 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q28qx\" (UniqueName: \"kubernetes.io/projected/cd2275b9-323a-4981-aadd-791b68003e3d-kube-api-access-q28qx\") pod \"ceilometer-0\" (UID: \"cd2275b9-323a-4981-aadd-791b68003e3d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:52:15 crc kubenswrapper[4558]: I0120 17:52:15.123870 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cd2275b9-323a-4981-aadd-791b68003e3d-scripts\") pod \"ceilometer-0\" (UID: \"cd2275b9-323a-4981-aadd-791b68003e3d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:52:15 crc kubenswrapper[4558]: I0120 17:52:15.123957 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd2275b9-323a-4981-aadd-791b68003e3d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"cd2275b9-323a-4981-aadd-791b68003e3d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:52:15 crc kubenswrapper[4558]: I0120 17:52:15.124211 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cd2275b9-323a-4981-aadd-791b68003e3d-config-data\") pod \"ceilometer-0\" (UID: \"cd2275b9-323a-4981-aadd-791b68003e3d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:52:15 crc kubenswrapper[4558]: I0120 17:52:15.124339 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/cd2275b9-323a-4981-aadd-791b68003e3d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"cd2275b9-323a-4981-aadd-791b68003e3d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:52:15 crc kubenswrapper[4558]: I0120 17:52:15.226443 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/cd2275b9-323a-4981-aadd-791b68003e3d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"cd2275b9-323a-4981-aadd-791b68003e3d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:52:15 crc kubenswrapper[4558]: I0120 17:52:15.226525 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cd2275b9-323a-4981-aadd-791b68003e3d-log-httpd\") pod \"ceilometer-0\" (UID: \"cd2275b9-323a-4981-aadd-791b68003e3d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:52:15 crc kubenswrapper[4558]: I0120 17:52:15.226589 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cd2275b9-323a-4981-aadd-791b68003e3d-run-httpd\") pod \"ceilometer-0\" (UID: \"cd2275b9-323a-4981-aadd-791b68003e3d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:52:15 crc kubenswrapper[4558]: I0120 17:52:15.226619 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/cd2275b9-323a-4981-aadd-791b68003e3d-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"cd2275b9-323a-4981-aadd-791b68003e3d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:52:15 crc kubenswrapper[4558]: I0120 17:52:15.226660 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q28qx\" (UniqueName: \"kubernetes.io/projected/cd2275b9-323a-4981-aadd-791b68003e3d-kube-api-access-q28qx\") pod \"ceilometer-0\" (UID: \"cd2275b9-323a-4981-aadd-791b68003e3d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:52:15 crc kubenswrapper[4558]: I0120 17:52:15.226689 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cd2275b9-323a-4981-aadd-791b68003e3d-scripts\") pod \"ceilometer-0\" (UID: \"cd2275b9-323a-4981-aadd-791b68003e3d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:52:15 crc kubenswrapper[4558]: I0120 17:52:15.226712 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd2275b9-323a-4981-aadd-791b68003e3d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"cd2275b9-323a-4981-aadd-791b68003e3d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:52:15 crc kubenswrapper[4558]: I0120 17:52:15.226761 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cd2275b9-323a-4981-aadd-791b68003e3d-config-data\") pod \"ceilometer-0\" (UID: \"cd2275b9-323a-4981-aadd-791b68003e3d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:52:15 crc kubenswrapper[4558]: I0120 17:52:15.227233 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cd2275b9-323a-4981-aadd-791b68003e3d-run-httpd\") pod \"ceilometer-0\" (UID: \"cd2275b9-323a-4981-aadd-791b68003e3d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:52:15 crc kubenswrapper[4558]: I0120 17:52:15.227327 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cd2275b9-323a-4981-aadd-791b68003e3d-log-httpd\") pod \"ceilometer-0\" (UID: \"cd2275b9-323a-4981-aadd-791b68003e3d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:52:15 crc kubenswrapper[4558]: I0120 17:52:15.233588 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cd2275b9-323a-4981-aadd-791b68003e3d-scripts\") pod \"ceilometer-0\" (UID: \"cd2275b9-323a-4981-aadd-791b68003e3d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:52:15 crc kubenswrapper[4558]: I0120 17:52:15.233623 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/cd2275b9-323a-4981-aadd-791b68003e3d-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"cd2275b9-323a-4981-aadd-791b68003e3d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:52:15 crc kubenswrapper[4558]: I0120 17:52:15.233678 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/cd2275b9-323a-4981-aadd-791b68003e3d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"cd2275b9-323a-4981-aadd-791b68003e3d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:52:15 crc kubenswrapper[4558]: I0120 17:52:15.233768 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cd2275b9-323a-4981-aadd-791b68003e3d-config-data\") pod \"ceilometer-0\" (UID: \"cd2275b9-323a-4981-aadd-791b68003e3d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:52:15 crc kubenswrapper[4558]: I0120 17:52:15.244227 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd2275b9-323a-4981-aadd-791b68003e3d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"cd2275b9-323a-4981-aadd-791b68003e3d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:52:15 crc kubenswrapper[4558]: I0120 17:52:15.244357 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q28qx\" (UniqueName: \"kubernetes.io/projected/cd2275b9-323a-4981-aadd-791b68003e3d-kube-api-access-q28qx\") pod \"ceilometer-0\" (UID: \"cd2275b9-323a-4981-aadd-791b68003e3d\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:52:15 crc kubenswrapper[4558]: I0120 17:52:15.271538 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:52:15 crc kubenswrapper[4558]: I0120 17:52:15.685791 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:52:15 crc kubenswrapper[4558]: I0120 17:52:15.904278 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"cd2275b9-323a-4981-aadd-791b68003e3d","Type":"ContainerStarted","Data":"08fb0cc064b0e71b60496981ed405bce8b4eb865b8fb44017bbdaeda99812fe7"} Jan 20 17:52:16 crc kubenswrapper[4558]: I0120 17:52:16.578929 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1e9bc275-8603-4dc8-ab3f-9d63bea731a0" path="/var/lib/kubelet/pods/1e9bc275-8603-4dc8-ab3f-9d63bea731a0/volumes" Jan 20 17:52:16 crc kubenswrapper[4558]: I0120 17:52:16.930116 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"cd2275b9-323a-4981-aadd-791b68003e3d","Type":"ContainerStarted","Data":"0da9214e9e0f3d0fbf917b83ce287f9004e70a70097ee2a38aa219e649adaa78"} Jan 20 17:52:17 crc kubenswrapper[4558]: I0120 17:52:17.565748 4558 scope.go:117] "RemoveContainer" containerID="b5d4742f71c4c1a0e14ccc61fbcd3c7bb3f43da7229787caafda16955ac74e7d" Jan 20 17:52:17 crc kubenswrapper[4558]: E0120 17:52:17.566296 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"nova-cell1-conductor-conductor\" with CrashLoopBackOff: \"back-off 40s restarting failed container=nova-cell1-conductor-conductor pod=nova-cell1-conductor-0_openstack-kuttl-tests(7c16f0c4-1462-4817-9f74-9e3d93193867)\"" pod="openstack-kuttl-tests/nova-cell1-conductor-0" podUID="7c16f0c4-1462-4817-9f74-9e3d93193867" Jan 20 17:52:17 crc kubenswrapper[4558]: I0120 17:52:17.978789 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"cd2275b9-323a-4981-aadd-791b68003e3d","Type":"ContainerStarted","Data":"0833d5463f07e5954d4e2d0a3a43a4419ed8daf27634dc726b1fc4ce59dfc2f9"} Jan 20 17:52:18 crc kubenswrapper[4558]: I0120 17:52:18.988814 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"cd2275b9-323a-4981-aadd-791b68003e3d","Type":"ContainerStarted","Data":"71e0601a2bf0479fed8749492b6488623cab0139872e72819a3a90baa127e93a"} Jan 20 17:52:21 crc kubenswrapper[4558]: I0120 17:52:21.027253 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"cd2275b9-323a-4981-aadd-791b68003e3d","Type":"ContainerStarted","Data":"7c70db500708c288ba013d8ce9dfe939d968f0b17f97ccf964db23badf9b4e95"} Jan 20 17:52:21 crc kubenswrapper[4558]: I0120 17:52:21.027920 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:52:21 crc kubenswrapper[4558]: I0120 17:52:21.055819 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ceilometer-0" podStartSLOduration=3.134038548 podStartE2EDuration="7.055796952s" podCreationTimestamp="2026-01-20 17:52:14 +0000 UTC" firstStartedPulling="2026-01-20 17:52:15.688552341 +0000 UTC m=+4229.448890297" lastFinishedPulling="2026-01-20 17:52:19.610310734 +0000 UTC m=+4233.370648701" observedRunningTime="2026-01-20 17:52:21.046108254 +0000 UTC m=+4234.806446211" watchObservedRunningTime="2026-01-20 17:52:21.055796952 +0000 UTC m=+4234.816134909" Jan 20 17:52:23 crc kubenswrapper[4558]: I0120 17:52:23.566266 4558 scope.go:117] "RemoveContainer" containerID="4ea7fa61d8b21007a044345acec89fcebe56c2921cf0f76ff2002f44bdc88ede" Jan 20 17:52:23 crc kubenswrapper[4558]: E0120 17:52:23.566849 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 17:52:31 crc kubenswrapper[4558]: I0120 17:52:31.565798 4558 scope.go:117] "RemoveContainer" containerID="b5d4742f71c4c1a0e14ccc61fbcd3c7bb3f43da7229787caafda16955ac74e7d" Jan 20 17:52:32 crc kubenswrapper[4558]: I0120 17:52:32.143962 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-0" event={"ID":"7c16f0c4-1462-4817-9f74-9e3d93193867","Type":"ContainerStarted","Data":"b4a7a63ae5b4336f1c55496b20962afe4a27040b0e22d712c664548644815e60"} Jan 20 17:52:32 crc kubenswrapper[4558]: I0120 17:52:32.144570 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:52:34 crc kubenswrapper[4558]: I0120 17:52:34.566433 4558 scope.go:117] "RemoveContainer" containerID="4ea7fa61d8b21007a044345acec89fcebe56c2921cf0f76ff2002f44bdc88ede" Jan 20 17:52:34 crc kubenswrapper[4558]: E0120 17:52:34.567091 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 17:52:36 crc kubenswrapper[4558]: I0120 17:52:36.466434 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:52:45 crc kubenswrapper[4558]: I0120 17:52:45.280917 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:52:45 crc kubenswrapper[4558]: I0120 17:52:45.566920 4558 scope.go:117] "RemoveContainer" containerID="4ea7fa61d8b21007a044345acec89fcebe56c2921cf0f76ff2002f44bdc88ede" Jan 20 17:52:45 crc kubenswrapper[4558]: E0120 17:52:45.567335 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 17:52:57 crc kubenswrapper[4558]: I0120 17:52:57.566729 4558 scope.go:117] "RemoveContainer" containerID="4ea7fa61d8b21007a044345acec89fcebe56c2921cf0f76ff2002f44bdc88ede" Jan 20 17:52:57 crc kubenswrapper[4558]: E0120 17:52:57.567734 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 17:53:00 crc kubenswrapper[4558]: I0120 17:53:00.739710 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/rabbitmq-cell1-server-0"] Jan 20 17:53:00 crc kubenswrapper[4558]: I0120 17:53:00.766329 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/openstackclient"] Jan 20 17:53:00 crc kubenswrapper[4558]: I0120 17:53:00.779253 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/openstackclient"] Jan 20 17:53:00 crc kubenswrapper[4558]: E0120 17:53:00.857310 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Jan 20 17:53:00 crc kubenswrapper[4558]: E0120 17:53:00.857381 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/c4a11c40-a157-4a28-b1a3-60c211d1d0bf-config-data podName:c4a11c40-a157-4a28-b1a3-60c211d1d0bf nodeName:}" failed. No retries permitted until 2026-01-20 17:53:01.357363168 +0000 UTC m=+4275.117701134 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/c4a11c40-a157-4a28-b1a3-60c211d1d0bf-config-data") pod "rabbitmq-cell1-server-0" (UID: "c4a11c40-a157-4a28-b1a3-60c211d1d0bf") : configmap "rabbitmq-cell1-config-data" not found Jan 20 17:53:00 crc kubenswrapper[4558]: I0120 17:53:00.926947 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/root-account-create-update-bxrps"] Jan 20 17:53:00 crc kubenswrapper[4558]: E0120 17:53:00.927461 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="10d0164b-faa3-408c-9b22-25d1cff2c4e3" containerName="openstackclient" Jan 20 17:53:00 crc kubenswrapper[4558]: I0120 17:53:00.927481 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="10d0164b-faa3-408c-9b22-25d1cff2c4e3" containerName="openstackclient" Jan 20 17:53:00 crc kubenswrapper[4558]: I0120 17:53:00.927675 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="10d0164b-faa3-408c-9b22-25d1cff2c4e3" containerName="openstackclient" Jan 20 17:53:00 crc kubenswrapper[4558]: I0120 17:53:00.928345 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-bxrps" Jan 20 17:53:00 crc kubenswrapper[4558]: I0120 17:53:00.934762 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"openstack-mariadb-root-db-secret" Jan 20 17:53:00 crc kubenswrapper[4558]: I0120 17:53:00.959141 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8101ebb9-ace1-4242-8c8b-698307c6be29-operator-scripts\") pod \"root-account-create-update-bxrps\" (UID: \"8101ebb9-ace1-4242-8c8b-698307c6be29\") " pod="openstack-kuttl-tests/root-account-create-update-bxrps" Jan 20 17:53:00 crc kubenswrapper[4558]: I0120 17:53:00.959584 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9jwrh\" (UniqueName: \"kubernetes.io/projected/8101ebb9-ace1-4242-8c8b-698307c6be29-kube-api-access-9jwrh\") pod \"root-account-create-update-bxrps\" (UID: \"8101ebb9-ace1-4242-8c8b-698307c6be29\") " pod="openstack-kuttl-tests/root-account-create-update-bxrps" Jan 20 17:53:00 crc kubenswrapper[4558]: I0120 17:53:00.959751 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-bxrps"] Jan 20 17:53:01 crc kubenswrapper[4558]: I0120 17:53:01.004784 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-v9ld9"] Jan 20 17:53:01 crc kubenswrapper[4558]: I0120 17:53:01.028242 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovn-northd-0"] Jan 20 17:53:01 crc kubenswrapper[4558]: I0120 17:53:01.028588 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ovn-northd-0" podUID="04c2bb7a-e2e2-4f8f-bf20-e6c381d117ef" containerName="ovn-northd" containerID="cri-o://1035f6c74f13c042848a0da43ccb550222db3c6c766f2b87733c2701b6ec1a2e" gracePeriod=30 Jan 20 17:53:01 crc kubenswrapper[4558]: I0120 17:53:01.029092 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ovn-northd-0" podUID="04c2bb7a-e2e2-4f8f-bf20-e6c381d117ef" containerName="openstack-network-exporter" containerID="cri-o://4b9890fbbd9a3bfc034db961ae88fefa150c5ec801cba28d028682bd258b5ebd" gracePeriod=30 Jan 20 17:53:01 crc kubenswrapper[4558]: I0120 17:53:01.037088 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-v9ld9"] Jan 20 17:53:01 crc kubenswrapper[4558]: I0120 17:53:01.050554 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-nb-0"] Jan 20 17:53:01 crc kubenswrapper[4558]: I0120 17:53:01.051353 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ovsdbserver-nb-0" podUID="fc65f7eb-e162-44ae-8136-bdcb6baa7c0f" containerName="openstack-network-exporter" containerID="cri-o://c90c4608d07fcc026b1da8048f2ccc5e2af6fd92d05975c8c1521388f7341569" gracePeriod=300 Jan 20 17:53:01 crc kubenswrapper[4558]: I0120 17:53:01.061877 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8101ebb9-ace1-4242-8c8b-698307c6be29-operator-scripts\") pod \"root-account-create-update-bxrps\" (UID: \"8101ebb9-ace1-4242-8c8b-698307c6be29\") " pod="openstack-kuttl-tests/root-account-create-update-bxrps" Jan 20 17:53:01 crc kubenswrapper[4558]: I0120 17:53:01.061944 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9jwrh\" (UniqueName: \"kubernetes.io/projected/8101ebb9-ace1-4242-8c8b-698307c6be29-kube-api-access-9jwrh\") pod \"root-account-create-update-bxrps\" (UID: \"8101ebb9-ace1-4242-8c8b-698307c6be29\") " pod="openstack-kuttl-tests/root-account-create-update-bxrps" Jan 20 17:53:01 crc kubenswrapper[4558]: I0120 17:53:01.062629 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8101ebb9-ace1-4242-8c8b-698307c6be29-operator-scripts\") pod \"root-account-create-update-bxrps\" (UID: \"8101ebb9-ace1-4242-8c8b-698307c6be29\") " pod="openstack-kuttl-tests/root-account-create-update-bxrps" Jan 20 17:53:01 crc kubenswrapper[4558]: I0120 17:53:01.097674 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9jwrh\" (UniqueName: \"kubernetes.io/projected/8101ebb9-ace1-4242-8c8b-698307c6be29-kube-api-access-9jwrh\") pod \"root-account-create-update-bxrps\" (UID: \"8101ebb9-ace1-4242-8c8b-698307c6be29\") " pod="openstack-kuttl-tests/root-account-create-update-bxrps" Jan 20 17:53:01 crc kubenswrapper[4558]: I0120 17:53:01.138264 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/placement-db-sync-mr5dl"] Jan 20 17:53:01 crc kubenswrapper[4558]: I0120 17:53:01.149723 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/placement-db-sync-mr5dl"] Jan 20 17:53:01 crc kubenswrapper[4558]: I0120 17:53:01.202351 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ovsdbserver-nb-0" podUID="fc65f7eb-e162-44ae-8136-bdcb6baa7c0f" containerName="ovsdbserver-nb" containerID="cri-o://5f7c09a23fc3d9fe92c0898786808da9cd4326e33ccea9ba8df24f4b5c6ab067" gracePeriod=300 Jan 20 17:53:01 crc kubenswrapper[4558]: I0120 17:53:01.234514 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/neutron-a1e2-account-create-update-hfpfh"] Jan 20 17:53:01 crc kubenswrapper[4558]: I0120 17:53:01.236030 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-a1e2-account-create-update-hfpfh" Jan 20 17:53:01 crc kubenswrapper[4558]: I0120 17:53:01.261409 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"neutron-db-secret" Jan 20 17:53:01 crc kubenswrapper[4558]: I0120 17:53:01.283531 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-bxrps" Jan 20 17:53:01 crc kubenswrapper[4558]: I0120 17:53:01.329696 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-a1e2-account-create-update-hfpfh"] Jan 20 17:53:01 crc kubenswrapper[4558]: I0120 17:53:01.360785 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-api-f275-account-create-update-5pb7k"] Jan 20 17:53:01 crc kubenswrapper[4558]: I0120 17:53:01.362784 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-f275-account-create-update-5pb7k" Jan 20 17:53:01 crc kubenswrapper[4558]: I0120 17:53:01.369660 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-api-db-secret" Jan 20 17:53:01 crc kubenswrapper[4558]: I0120 17:53:01.370280 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-f275-account-create-update-5pb7k"] Jan 20 17:53:01 crc kubenswrapper[4558]: I0120 17:53:01.386133 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell0-a0eb-account-create-update-75hkz"] Jan 20 17:53:01 crc kubenswrapper[4558]: I0120 17:53:01.397108 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-a0eb-account-create-update-75hkz" Jan 20 17:53:01 crc kubenswrapper[4558]: I0120 17:53:01.399517 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell0-db-secret" Jan 20 17:53:01 crc kubenswrapper[4558]: I0120 17:53:01.407425 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell1-d1d9-account-create-update-g54ff"] Jan 20 17:53:01 crc kubenswrapper[4558]: I0120 17:53:01.409556 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-d1d9-account-create-update-g54ff" Jan 20 17:53:01 crc kubenswrapper[4558]: I0120 17:53:01.414682 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell1-db-secret" Jan 20 17:53:01 crc kubenswrapper[4558]: I0120 17:53:01.420932 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mf9k4\" (UniqueName: \"kubernetes.io/projected/3eabae3c-0014-4d8e-9393-7cb934ba9fa4-kube-api-access-mf9k4\") pod \"nova-cell1-d1d9-account-create-update-g54ff\" (UID: \"3eabae3c-0014-4d8e-9393-7cb934ba9fa4\") " pod="openstack-kuttl-tests/nova-cell1-d1d9-account-create-update-g54ff" Jan 20 17:53:01 crc kubenswrapper[4558]: I0120 17:53:01.420980 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hlsqg\" (UniqueName: \"kubernetes.io/projected/01f39889-c29f-4e28-98fa-bc99322f761c-kube-api-access-hlsqg\") pod \"nova-api-f275-account-create-update-5pb7k\" (UID: \"01f39889-c29f-4e28-98fa-bc99322f761c\") " pod="openstack-kuttl-tests/nova-api-f275-account-create-update-5pb7k" Jan 20 17:53:01 crc kubenswrapper[4558]: I0120 17:53:01.421010 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wgzn6\" (UniqueName: \"kubernetes.io/projected/6d80c5b5-880f-4d59-9287-334023ec6ce9-kube-api-access-wgzn6\") pod \"nova-cell0-a0eb-account-create-update-75hkz\" (UID: \"6d80c5b5-880f-4d59-9287-334023ec6ce9\") " pod="openstack-kuttl-tests/nova-cell0-a0eb-account-create-update-75hkz" Jan 20 17:53:01 crc kubenswrapper[4558]: I0120 17:53:01.421810 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6d80c5b5-880f-4d59-9287-334023ec6ce9-operator-scripts\") pod \"nova-cell0-a0eb-account-create-update-75hkz\" (UID: \"6d80c5b5-880f-4d59-9287-334023ec6ce9\") " pod="openstack-kuttl-tests/nova-cell0-a0eb-account-create-update-75hkz" Jan 20 17:53:01 crc kubenswrapper[4558]: E0120 17:53:01.422365 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Jan 20 17:53:01 crc kubenswrapper[4558]: E0120 17:53:01.422450 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/c4a11c40-a157-4a28-b1a3-60c211d1d0bf-config-data podName:c4a11c40-a157-4a28-b1a3-60c211d1d0bf nodeName:}" failed. No retries permitted until 2026-01-20 17:53:02.422411952 +0000 UTC m=+4276.182749909 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/c4a11c40-a157-4a28-b1a3-60c211d1d0bf-config-data") pod "rabbitmq-cell1-server-0" (UID: "c4a11c40-a157-4a28-b1a3-60c211d1d0bf") : configmap "rabbitmq-cell1-config-data" not found Jan 20 17:53:01 crc kubenswrapper[4558]: I0120 17:53:01.422483 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tmjgj\" (UniqueName: \"kubernetes.io/projected/799d2689-2fcd-4bc5-ae70-1d0ae1194c76-kube-api-access-tmjgj\") pod \"neutron-a1e2-account-create-update-hfpfh\" (UID: \"799d2689-2fcd-4bc5-ae70-1d0ae1194c76\") " pod="openstack-kuttl-tests/neutron-a1e2-account-create-update-hfpfh" Jan 20 17:53:01 crc kubenswrapper[4558]: I0120 17:53:01.422549 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3eabae3c-0014-4d8e-9393-7cb934ba9fa4-operator-scripts\") pod \"nova-cell1-d1d9-account-create-update-g54ff\" (UID: \"3eabae3c-0014-4d8e-9393-7cb934ba9fa4\") " pod="openstack-kuttl-tests/nova-cell1-d1d9-account-create-update-g54ff" Jan 20 17:53:01 crc kubenswrapper[4558]: I0120 17:53:01.422630 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/799d2689-2fcd-4bc5-ae70-1d0ae1194c76-operator-scripts\") pod \"neutron-a1e2-account-create-update-hfpfh\" (UID: \"799d2689-2fcd-4bc5-ae70-1d0ae1194c76\") " pod="openstack-kuttl-tests/neutron-a1e2-account-create-update-hfpfh" Jan 20 17:53:01 crc kubenswrapper[4558]: I0120 17:53:01.428041 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/01f39889-c29f-4e28-98fa-bc99322f761c-operator-scripts\") pod \"nova-api-f275-account-create-update-5pb7k\" (UID: \"01f39889-c29f-4e28-98fa-bc99322f761c\") " pod="openstack-kuttl-tests/nova-api-f275-account-create-update-5pb7k" Jan 20 17:53:01 crc kubenswrapper[4558]: I0120 17:53:01.442873 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-a0eb-account-create-update-75hkz"] Jan 20 17:53:01 crc kubenswrapper[4558]: I0120 17:53:01.479840 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovn-northd-0_04c2bb7a-e2e2-4f8f-bf20-e6c381d117ef/ovn-northd/0.log" Jan 20 17:53:01 crc kubenswrapper[4558]: I0120 17:53:01.479890 4558 generic.go:334] "Generic (PLEG): container finished" podID="04c2bb7a-e2e2-4f8f-bf20-e6c381d117ef" containerID="4b9890fbbd9a3bfc034db961ae88fefa150c5ec801cba28d028682bd258b5ebd" exitCode=2 Jan 20 17:53:01 crc kubenswrapper[4558]: I0120 17:53:01.479910 4558 generic.go:334] "Generic (PLEG): container finished" podID="04c2bb7a-e2e2-4f8f-bf20-e6c381d117ef" containerID="1035f6c74f13c042848a0da43ccb550222db3c6c766f2b87733c2701b6ec1a2e" exitCode=143 Jan 20 17:53:01 crc kubenswrapper[4558]: I0120 17:53:01.480245 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-northd-0" event={"ID":"04c2bb7a-e2e2-4f8f-bf20-e6c381d117ef","Type":"ContainerDied","Data":"4b9890fbbd9a3bfc034db961ae88fefa150c5ec801cba28d028682bd258b5ebd"} Jan 20 17:53:01 crc kubenswrapper[4558]: I0120 17:53:01.480289 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-northd-0" event={"ID":"04c2bb7a-e2e2-4f8f-bf20-e6c381d117ef","Type":"ContainerDied","Data":"1035f6c74f13c042848a0da43ccb550222db3c6c766f2b87733c2701b6ec1a2e"} Jan 20 17:53:01 crc kubenswrapper[4558]: I0120 17:53:01.488216 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-d1d9-account-create-update-g54ff"] Jan 20 17:53:01 crc kubenswrapper[4558]: I0120 17:53:01.497377 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovsdbserver-nb-0_fc65f7eb-e162-44ae-8136-bdcb6baa7c0f/ovsdbserver-nb/0.log" Jan 20 17:53:01 crc kubenswrapper[4558]: I0120 17:53:01.497426 4558 generic.go:334] "Generic (PLEG): container finished" podID="fc65f7eb-e162-44ae-8136-bdcb6baa7c0f" containerID="c90c4608d07fcc026b1da8048f2ccc5e2af6fd92d05975c8c1521388f7341569" exitCode=2 Jan 20 17:53:01 crc kubenswrapper[4558]: I0120 17:53:01.497442 4558 generic.go:334] "Generic (PLEG): container finished" podID="fc65f7eb-e162-44ae-8136-bdcb6baa7c0f" containerID="5f7c09a23fc3d9fe92c0898786808da9cd4326e33ccea9ba8df24f4b5c6ab067" exitCode=143 Jan 20 17:53:01 crc kubenswrapper[4558]: I0120 17:53:01.497614 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/openstackclient" podUID="10d0164b-faa3-408c-9b22-25d1cff2c4e3" containerName="openstackclient" containerID="cri-o://ff027b2e7e7f3a64a246665fc1fbace52470e2c0c26cabfe23274befd6dfbcd9" gracePeriod=2 Jan 20 17:53:01 crc kubenswrapper[4558]: I0120 17:53:01.497701 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-nb-0" event={"ID":"fc65f7eb-e162-44ae-8136-bdcb6baa7c0f","Type":"ContainerDied","Data":"c90c4608d07fcc026b1da8048f2ccc5e2af6fd92d05975c8c1521388f7341569"} Jan 20 17:53:01 crc kubenswrapper[4558]: I0120 17:53:01.497737 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-nb-0" event={"ID":"fc65f7eb-e162-44ae-8136-bdcb6baa7c0f","Type":"ContainerDied","Data":"5f7c09a23fc3d9fe92c0898786808da9cd4326e33ccea9ba8df24f4b5c6ab067"} Jan 20 17:53:01 crc kubenswrapper[4558]: I0120 17:53:01.533507 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mf9k4\" (UniqueName: \"kubernetes.io/projected/3eabae3c-0014-4d8e-9393-7cb934ba9fa4-kube-api-access-mf9k4\") pod \"nova-cell1-d1d9-account-create-update-g54ff\" (UID: \"3eabae3c-0014-4d8e-9393-7cb934ba9fa4\") " pod="openstack-kuttl-tests/nova-cell1-d1d9-account-create-update-g54ff" Jan 20 17:53:01 crc kubenswrapper[4558]: I0120 17:53:01.543335 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hlsqg\" (UniqueName: \"kubernetes.io/projected/01f39889-c29f-4e28-98fa-bc99322f761c-kube-api-access-hlsqg\") pod \"nova-api-f275-account-create-update-5pb7k\" (UID: \"01f39889-c29f-4e28-98fa-bc99322f761c\") " pod="openstack-kuttl-tests/nova-api-f275-account-create-update-5pb7k" Jan 20 17:53:01 crc kubenswrapper[4558]: I0120 17:53:01.543395 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wgzn6\" (UniqueName: \"kubernetes.io/projected/6d80c5b5-880f-4d59-9287-334023ec6ce9-kube-api-access-wgzn6\") pod \"nova-cell0-a0eb-account-create-update-75hkz\" (UID: \"6d80c5b5-880f-4d59-9287-334023ec6ce9\") " pod="openstack-kuttl-tests/nova-cell0-a0eb-account-create-update-75hkz" Jan 20 17:53:01 crc kubenswrapper[4558]: I0120 17:53:01.543591 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6d80c5b5-880f-4d59-9287-334023ec6ce9-operator-scripts\") pod \"nova-cell0-a0eb-account-create-update-75hkz\" (UID: \"6d80c5b5-880f-4d59-9287-334023ec6ce9\") " pod="openstack-kuttl-tests/nova-cell0-a0eb-account-create-update-75hkz" Jan 20 17:53:01 crc kubenswrapper[4558]: I0120 17:53:01.543729 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tmjgj\" (UniqueName: \"kubernetes.io/projected/799d2689-2fcd-4bc5-ae70-1d0ae1194c76-kube-api-access-tmjgj\") pod \"neutron-a1e2-account-create-update-hfpfh\" (UID: \"799d2689-2fcd-4bc5-ae70-1d0ae1194c76\") " pod="openstack-kuttl-tests/neutron-a1e2-account-create-update-hfpfh" Jan 20 17:53:01 crc kubenswrapper[4558]: I0120 17:53:01.543762 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3eabae3c-0014-4d8e-9393-7cb934ba9fa4-operator-scripts\") pod \"nova-cell1-d1d9-account-create-update-g54ff\" (UID: \"3eabae3c-0014-4d8e-9393-7cb934ba9fa4\") " pod="openstack-kuttl-tests/nova-cell1-d1d9-account-create-update-g54ff" Jan 20 17:53:01 crc kubenswrapper[4558]: I0120 17:53:01.543823 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/799d2689-2fcd-4bc5-ae70-1d0ae1194c76-operator-scripts\") pod \"neutron-a1e2-account-create-update-hfpfh\" (UID: \"799d2689-2fcd-4bc5-ae70-1d0ae1194c76\") " pod="openstack-kuttl-tests/neutron-a1e2-account-create-update-hfpfh" Jan 20 17:53:01 crc kubenswrapper[4558]: I0120 17:53:01.543971 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/01f39889-c29f-4e28-98fa-bc99322f761c-operator-scripts\") pod \"nova-api-f275-account-create-update-5pb7k\" (UID: \"01f39889-c29f-4e28-98fa-bc99322f761c\") " pod="openstack-kuttl-tests/nova-api-f275-account-create-update-5pb7k" Jan 20 17:53:01 crc kubenswrapper[4558]: I0120 17:53:01.544769 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/01f39889-c29f-4e28-98fa-bc99322f761c-operator-scripts\") pod \"nova-api-f275-account-create-update-5pb7k\" (UID: \"01f39889-c29f-4e28-98fa-bc99322f761c\") " pod="openstack-kuttl-tests/nova-api-f275-account-create-update-5pb7k" Jan 20 17:53:01 crc kubenswrapper[4558]: I0120 17:53:01.545704 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6d80c5b5-880f-4d59-9287-334023ec6ce9-operator-scripts\") pod \"nova-cell0-a0eb-account-create-update-75hkz\" (UID: \"6d80c5b5-880f-4d59-9287-334023ec6ce9\") " pod="openstack-kuttl-tests/nova-cell0-a0eb-account-create-update-75hkz" Jan 20 17:53:01 crc kubenswrapper[4558]: I0120 17:53:01.546641 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3eabae3c-0014-4d8e-9393-7cb934ba9fa4-operator-scripts\") pod \"nova-cell1-d1d9-account-create-update-g54ff\" (UID: \"3eabae3c-0014-4d8e-9393-7cb934ba9fa4\") " pod="openstack-kuttl-tests/nova-cell1-d1d9-account-create-update-g54ff" Jan 20 17:53:01 crc kubenswrapper[4558]: I0120 17:53:01.547314 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/799d2689-2fcd-4bc5-ae70-1d0ae1194c76-operator-scripts\") pod \"neutron-a1e2-account-create-update-hfpfh\" (UID: \"799d2689-2fcd-4bc5-ae70-1d0ae1194c76\") " pod="openstack-kuttl-tests/neutron-a1e2-account-create-update-hfpfh" Jan 20 17:53:01 crc kubenswrapper[4558]: I0120 17:53:01.557054 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-e4fd-account-create-update-zswcv"] Jan 20 17:53:01 crc kubenswrapper[4558]: I0120 17:53:01.560988 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mf9k4\" (UniqueName: \"kubernetes.io/projected/3eabae3c-0014-4d8e-9393-7cb934ba9fa4-kube-api-access-mf9k4\") pod \"nova-cell1-d1d9-account-create-update-g54ff\" (UID: \"3eabae3c-0014-4d8e-9393-7cb934ba9fa4\") " pod="openstack-kuttl-tests/nova-cell1-d1d9-account-create-update-g54ff" Jan 20 17:53:01 crc kubenswrapper[4558]: I0120 17:53:01.567633 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/cinder-e4fd-account-create-update-zswcv"] Jan 20 17:53:01 crc kubenswrapper[4558]: I0120 17:53:01.569924 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-db-sync-rht6q"] Jan 20 17:53:01 crc kubenswrapper[4558]: I0120 17:53:01.576369 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/glance-db-sync-rht6q"] Jan 20 17:53:01 crc kubenswrapper[4558]: I0120 17:53:01.586006 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tmjgj\" (UniqueName: \"kubernetes.io/projected/799d2689-2fcd-4bc5-ae70-1d0ae1194c76-kube-api-access-tmjgj\") pod \"neutron-a1e2-account-create-update-hfpfh\" (UID: \"799d2689-2fcd-4bc5-ae70-1d0ae1194c76\") " pod="openstack-kuttl-tests/neutron-a1e2-account-create-update-hfpfh" Jan 20 17:53:01 crc kubenswrapper[4558]: I0120 17:53:01.587892 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-sb-0"] Jan 20 17:53:01 crc kubenswrapper[4558]: I0120 17:53:01.588770 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ovsdbserver-sb-0" podUID="634825c8-6d4e-42a0-83c6-c83d105a2a7f" containerName="openstack-network-exporter" containerID="cri-o://b80ad06d7261b9a0d3933816a91a930f6b5ef47fe5065a1e8d945d221fa487f7" gracePeriod=300 Jan 20 17:53:01 crc kubenswrapper[4558]: I0120 17:53:01.596712 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/rabbitmq-server-0"] Jan 20 17:53:01 crc kubenswrapper[4558]: I0120 17:53:01.596819 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wgzn6\" (UniqueName: \"kubernetes.io/projected/6d80c5b5-880f-4d59-9287-334023ec6ce9-kube-api-access-wgzn6\") pod \"nova-cell0-a0eb-account-create-update-75hkz\" (UID: \"6d80c5b5-880f-4d59-9287-334023ec6ce9\") " pod="openstack-kuttl-tests/nova-cell0-a0eb-account-create-update-75hkz" Jan 20 17:53:01 crc kubenswrapper[4558]: I0120 17:53:01.604628 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hlsqg\" (UniqueName: \"kubernetes.io/projected/01f39889-c29f-4e28-98fa-bc99322f761c-kube-api-access-hlsqg\") pod \"nova-api-f275-account-create-update-5pb7k\" (UID: \"01f39889-c29f-4e28-98fa-bc99322f761c\") " pod="openstack-kuttl-tests/nova-api-f275-account-create-update-5pb7k" Jan 20 17:53:01 crc kubenswrapper[4558]: I0120 17:53:01.620305 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-db-sync-qcsmp"] Jan 20 17:53:01 crc kubenswrapper[4558]: I0120 17:53:01.620526 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-a1e2-account-create-update-hfpfh" Jan 20 17:53:01 crc kubenswrapper[4558]: I0120 17:53:01.643514 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/barbican-db-sync-qcsmp"] Jan 20 17:53:01 crc kubenswrapper[4558]: E0120 17:53:01.657395 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/combined-ca-bundle: secret "combined-ca-bundle" not found Jan 20 17:53:01 crc kubenswrapper[4558]: E0120 17:53:01.657468 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/49822b01-63d0-40d3-b604-7980891b0683-combined-ca-bundle podName:49822b01-63d0-40d3-b604-7980891b0683 nodeName:}" failed. No retries permitted until 2026-01-20 17:53:02.157448671 +0000 UTC m=+4275.917786637 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/49822b01-63d0-40d3-b604-7980891b0683-combined-ca-bundle") pod "kube-state-metrics-0" (UID: "49822b01-63d0-40d3-b604-7980891b0683") : secret "combined-ca-bundle" not found Jan 20 17:53:01 crc kubenswrapper[4558]: E0120 17:53:01.658098 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-kube-state-metrics-svc: secret "cert-kube-state-metrics-svc" not found Jan 20 17:53:01 crc kubenswrapper[4558]: E0120 17:53:01.658152 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/49822b01-63d0-40d3-b604-7980891b0683-kube-state-metrics-tls-certs podName:49822b01-63d0-40d3-b604-7980891b0683 nodeName:}" failed. No retries permitted until 2026-01-20 17:53:02.158137465 +0000 UTC m=+4275.918475432 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-state-metrics-tls-certs" (UniqueName: "kubernetes.io/secret/49822b01-63d0-40d3-b604-7980891b0683-kube-state-metrics-tls-certs") pod "kube-state-metrics-0" (UID: "49822b01-63d0-40d3-b604-7980891b0683") : secret "cert-kube-state-metrics-svc" not found Jan 20 17:53:01 crc kubenswrapper[4558]: E0120 17:53:01.658428 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Jan 20 17:53:01 crc kubenswrapper[4558]: E0120 17:53:01.658461 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/088e44cc-3515-4736-aa46-721774902209-config-data podName:088e44cc-3515-4736-aa46-721774902209 nodeName:}" failed. No retries permitted until 2026-01-20 17:53:02.158452778 +0000 UTC m=+4275.918790745 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/088e44cc-3515-4736-aa46-721774902209-config-data") pod "rabbitmq-server-0" (UID: "088e44cc-3515-4736-aa46-721774902209") : configmap "rabbitmq-config-data" not found Jan 20 17:53:01 crc kubenswrapper[4558]: I0120 17:53:01.690615 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-a1e2-account-create-update-rmdlk"] Jan 20 17:53:01 crc kubenswrapper[4558]: I0120 17:53:01.695698 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/neutron-a1e2-account-create-update-rmdlk"] Jan 20 17:53:01 crc kubenswrapper[4558]: I0120 17:53:01.699374 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ovsdbserver-sb-0" podUID="634825c8-6d4e-42a0-83c6-c83d105a2a7f" containerName="ovsdbserver-sb" containerID="cri-o://2f7bc8bf8674f0da29404ad2947529694e0eaec86862f2864ca592a29b1fe552" gracePeriod=300 Jan 20 17:53:01 crc kubenswrapper[4558]: I0120 17:53:01.813298 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovn-northd-0_04c2bb7a-e2e2-4f8f-bf20-e6c381d117ef/ovn-northd/0.log" Jan 20 17:53:01 crc kubenswrapper[4558]: I0120 17:53:01.813916 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:53:01 crc kubenswrapper[4558]: I0120 17:53:01.833738 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-f275-account-create-update-5pb7k" Jan 20 17:53:01 crc kubenswrapper[4558]: I0120 17:53:01.845329 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-a0eb-account-create-update-75hkz" Jan 20 17:53:01 crc kubenswrapper[4558]: I0120 17:53:01.862662 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-d1d9-account-create-update-g54ff" Jan 20 17:53:01 crc kubenswrapper[4558]: I0120 17:53:01.929602 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-f275-account-create-update-nwv6j"] Jan 20 17:53:01 crc kubenswrapper[4558]: I0120 17:53:01.963587 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-api-f275-account-create-update-nwv6j"] Jan 20 17:53:01 crc kubenswrapper[4558]: I0120 17:53:01.966013 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/04c2bb7a-e2e2-4f8f-bf20-e6c381d117ef-ovn-rundir\") pod \"04c2bb7a-e2e2-4f8f-bf20-e6c381d117ef\" (UID: \"04c2bb7a-e2e2-4f8f-bf20-e6c381d117ef\") " Jan 20 17:53:01 crc kubenswrapper[4558]: I0120 17:53:01.966074 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7szz6\" (UniqueName: \"kubernetes.io/projected/04c2bb7a-e2e2-4f8f-bf20-e6c381d117ef-kube-api-access-7szz6\") pod \"04c2bb7a-e2e2-4f8f-bf20-e6c381d117ef\" (UID: \"04c2bb7a-e2e2-4f8f-bf20-e6c381d117ef\") " Jan 20 17:53:01 crc kubenswrapper[4558]: I0120 17:53:01.966183 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/04c2bb7a-e2e2-4f8f-bf20-e6c381d117ef-combined-ca-bundle\") pod \"04c2bb7a-e2e2-4f8f-bf20-e6c381d117ef\" (UID: \"04c2bb7a-e2e2-4f8f-bf20-e6c381d117ef\") " Jan 20 17:53:01 crc kubenswrapper[4558]: I0120 17:53:01.966314 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/04c2bb7a-e2e2-4f8f-bf20-e6c381d117ef-metrics-certs-tls-certs\") pod \"04c2bb7a-e2e2-4f8f-bf20-e6c381d117ef\" (UID: \"04c2bb7a-e2e2-4f8f-bf20-e6c381d117ef\") " Jan 20 17:53:01 crc kubenswrapper[4558]: I0120 17:53:01.966373 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/04c2bb7a-e2e2-4f8f-bf20-e6c381d117ef-config\") pod \"04c2bb7a-e2e2-4f8f-bf20-e6c381d117ef\" (UID: \"04c2bb7a-e2e2-4f8f-bf20-e6c381d117ef\") " Jan 20 17:53:01 crc kubenswrapper[4558]: I0120 17:53:01.966495 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/04c2bb7a-e2e2-4f8f-bf20-e6c381d117ef-ovn-northd-tls-certs\") pod \"04c2bb7a-e2e2-4f8f-bf20-e6c381d117ef\" (UID: \"04c2bb7a-e2e2-4f8f-bf20-e6c381d117ef\") " Jan 20 17:53:01 crc kubenswrapper[4558]: I0120 17:53:01.966550 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/04c2bb7a-e2e2-4f8f-bf20-e6c381d117ef-scripts\") pod \"04c2bb7a-e2e2-4f8f-bf20-e6c381d117ef\" (UID: \"04c2bb7a-e2e2-4f8f-bf20-e6c381d117ef\") " Jan 20 17:53:01 crc kubenswrapper[4558]: I0120 17:53:01.966666 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/04c2bb7a-e2e2-4f8f-bf20-e6c381d117ef-ovn-rundir" (OuterVolumeSpecName: "ovn-rundir") pod "04c2bb7a-e2e2-4f8f-bf20-e6c381d117ef" (UID: "04c2bb7a-e2e2-4f8f-bf20-e6c381d117ef"). InnerVolumeSpecName "ovn-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:53:01 crc kubenswrapper[4558]: I0120 17:53:01.967294 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/04c2bb7a-e2e2-4f8f-bf20-e6c381d117ef-scripts" (OuterVolumeSpecName: "scripts") pod "04c2bb7a-e2e2-4f8f-bf20-e6c381d117ef" (UID: "04c2bb7a-e2e2-4f8f-bf20-e6c381d117ef"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:53:01 crc kubenswrapper[4558]: I0120 17:53:01.967364 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/04c2bb7a-e2e2-4f8f-bf20-e6c381d117ef-config" (OuterVolumeSpecName: "config") pod "04c2bb7a-e2e2-4f8f-bf20-e6c381d117ef" (UID: "04c2bb7a-e2e2-4f8f-bf20-e6c381d117ef"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:53:01 crc kubenswrapper[4558]: I0120 17:53:01.967386 4558 reconciler_common.go:293] "Volume detached for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/04c2bb7a-e2e2-4f8f-bf20-e6c381d117ef-ovn-rundir\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:01 crc kubenswrapper[4558]: I0120 17:53:01.980671 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/04c2bb7a-e2e2-4f8f-bf20-e6c381d117ef-kube-api-access-7szz6" (OuterVolumeSpecName: "kube-api-access-7szz6") pod "04c2bb7a-e2e2-4f8f-bf20-e6c381d117ef" (UID: "04c2bb7a-e2e2-4f8f-bf20-e6c381d117ef"). InnerVolumeSpecName "kube-api-access-7szz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:53:01 crc kubenswrapper[4558]: I0120 17:53:01.994158 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-db-sync-bdw77"] Jan 20 17:53:01 crc kubenswrapper[4558]: I0120 17:53:01.996910 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovsdbserver-nb-0_fc65f7eb-e162-44ae-8136-bdcb6baa7c0f/ovsdbserver-nb/0.log" Jan 20 17:53:01 crc kubenswrapper[4558]: I0120 17:53:01.996992 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.003280 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/cinder-db-sync-bdw77"] Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.020104 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell0-a0eb-account-create-update-hpcq8"] Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.043685 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell0-a0eb-account-create-update-hpcq8"] Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.053987 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/04c2bb7a-e2e2-4f8f-bf20-e6c381d117ef-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "04c2bb7a-e2e2-4f8f-bf20-e6c381d117ef" (UID: "04c2bb7a-e2e2-4f8f-bf20-e6c381d117ef"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.063231 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/placement-658f45b9f4-tx296"] Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.066264 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/placement-658f45b9f4-tx296" podUID="0c9fc067-cb23-4f72-b928-5447fb5182c1" containerName="placement-log" containerID="cri-o://a277d8fbf0b081fa88cc6f698d169256895a7e996cb99dec573f1f32b2c3f74c" gracePeriod=30 Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.066742 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/placement-658f45b9f4-tx296" podUID="0c9fc067-cb23-4f72-b928-5447fb5182c1" containerName="placement-api" containerID="cri-o://e300c235b0338f59c68e0edd429de09fcac767478cd41a3614d3e5af5e2760e6" gracePeriod=30 Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.069376 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/04c2bb7a-e2e2-4f8f-bf20-e6c381d117ef-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.069394 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7szz6\" (UniqueName: \"kubernetes.io/projected/04c2bb7a-e2e2-4f8f-bf20-e6c381d117ef-kube-api-access-7szz6\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.069405 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/04c2bb7a-e2e2-4f8f-bf20-e6c381d117ef-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.069417 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/04c2bb7a-e2e2-4f8f-bf20-e6c381d117ef-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.083678 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-d1d9-account-create-update-6dzmm"] Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.090800 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell1-d1d9-account-create-update-6dzmm"] Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.105306 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-db-sync-s4glp"] Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.113069 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/neutron-db-sync-s4glp"] Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.117015 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/04c2bb7a-e2e2-4f8f-bf20-e6c381d117ef-ovn-northd-tls-certs" (OuterVolumeSpecName: "ovn-northd-tls-certs") pod "04c2bb7a-e2e2-4f8f-bf20-e6c381d117ef" (UID: "04c2bb7a-e2e2-4f8f-bf20-e6c381d117ef"). InnerVolumeSpecName "ovn-northd-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.162011 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/04c2bb7a-e2e2-4f8f-bf20-e6c381d117ef-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "04c2bb7a-e2e2-4f8f-bf20-e6c381d117ef" (UID: "04c2bb7a-e2e2-4f8f-bf20-e6c381d117ef"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.168947 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/swift-storage-0"] Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.170238 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="64d029c7-79c6-40fc-b4d2-ac5bfbc387a0" containerName="account-server" containerID="cri-o://2d0d3c4c9084f5b47b5268a7e1d20896d7cf8d59d93e03270dfa21fcb273c377" gracePeriod=30 Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.170500 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fc65f7eb-e162-44ae-8136-bdcb6baa7c0f-config\") pod \"fc65f7eb-e162-44ae-8136-bdcb6baa7c0f\" (UID: \"fc65f7eb-e162-44ae-8136-bdcb6baa7c0f\") " Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.170595 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndbcluster-nb-etc-ovn\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"fc65f7eb-e162-44ae-8136-bdcb6baa7c0f\" (UID: \"fc65f7eb-e162-44ae-8136-bdcb6baa7c0f\") " Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.170690 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/fc65f7eb-e162-44ae-8136-bdcb6baa7c0f-metrics-certs-tls-certs\") pod \"fc65f7eb-e162-44ae-8136-bdcb6baa7c0f\" (UID: \"fc65f7eb-e162-44ae-8136-bdcb6baa7c0f\") " Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.170760 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fc65f7eb-e162-44ae-8136-bdcb6baa7c0f-scripts\") pod \"fc65f7eb-e162-44ae-8136-bdcb6baa7c0f\" (UID: \"fc65f7eb-e162-44ae-8136-bdcb6baa7c0f\") " Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.171323 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="64d029c7-79c6-40fc-b4d2-ac5bfbc387a0" containerName="container-updater" containerID="cri-o://d32a8a2e6495346715f9634ab4b1b4bc18d96e7451b5223fe9976afe7dd5d7c2" gracePeriod=30 Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.171500 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="64d029c7-79c6-40fc-b4d2-ac5bfbc387a0" containerName="swift-recon-cron" containerID="cri-o://3fca1090ab44eb73ee4987d4fe77236fd5fda45c847b185be75610aec8208529" gracePeriod=30 Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.171559 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="64d029c7-79c6-40fc-b4d2-ac5bfbc387a0" containerName="rsync" containerID="cri-o://049005bceb3d9c7f92a80adfa58232b99be025be046ebfa5fdd30c2501029dd6" gracePeriod=30 Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.171598 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="64d029c7-79c6-40fc-b4d2-ac5bfbc387a0" containerName="object-expirer" containerID="cri-o://e38f2eedbeb8bee77176fac3f5c4ecdcf1aef7aa98e160b84718ab6bb18bb8a4" gracePeriod=30 Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.171634 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="64d029c7-79c6-40fc-b4d2-ac5bfbc387a0" containerName="object-updater" containerID="cri-o://a6e878f0584cff110a93e31a31995a6fbd71d401815d3d94bb3700d40da94020" gracePeriod=30 Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.171669 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="64d029c7-79c6-40fc-b4d2-ac5bfbc387a0" containerName="object-auditor" containerID="cri-o://0babdc19d278b5edc8070926b4a7ce96ef2d679f28771d60ffadb5ba187b8117" gracePeriod=30 Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.171700 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="64d029c7-79c6-40fc-b4d2-ac5bfbc387a0" containerName="object-replicator" containerID="cri-o://814558d14f3629d14a008df6a38667804146fe090de98d44f2c3c2447d69c787" gracePeriod=30 Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.171732 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="64d029c7-79c6-40fc-b4d2-ac5bfbc387a0" containerName="object-server" containerID="cri-o://5e23628ab2cc3c257412804853dd2671a2b679976e1f78c61ccacc4929dc21cd" gracePeriod=30 Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.171805 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="64d029c7-79c6-40fc-b4d2-ac5bfbc387a0" containerName="account-reaper" containerID="cri-o://9f25ee1c308a320e8a43a0bc03a0bfd7c3d57cf22c17c10b894b0f930d53afe2" gracePeriod=30 Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.171841 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="64d029c7-79c6-40fc-b4d2-ac5bfbc387a0" containerName="container-auditor" containerID="cri-o://ce8b75288eb38358817832bfb419010893a3a4a082fc83e9ed669e3fe1c9a795" gracePeriod=30 Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.171880 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="64d029c7-79c6-40fc-b4d2-ac5bfbc387a0" containerName="container-replicator" containerID="cri-o://19a88da3e14609ec200568f698ff68961ad644b5fa7a972150ff49ec0d443ab1" gracePeriod=30 Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.171906 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/fc65f7eb-e162-44ae-8136-bdcb6baa7c0f-ovsdbserver-nb-tls-certs\") pod \"fc65f7eb-e162-44ae-8136-bdcb6baa7c0f\" (UID: \"fc65f7eb-e162-44ae-8136-bdcb6baa7c0f\") " Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.171921 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="64d029c7-79c6-40fc-b4d2-ac5bfbc387a0" containerName="container-server" containerID="cri-o://4842ce500b116933451ba6ea6d30de4651175498d4768796b95de36768b44fc5" gracePeriod=30 Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.171952 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/fc65f7eb-e162-44ae-8136-bdcb6baa7c0f-ovsdb-rundir\") pod \"fc65f7eb-e162-44ae-8136-bdcb6baa7c0f\" (UID: \"fc65f7eb-e162-44ae-8136-bdcb6baa7c0f\") " Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.171976 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc65f7eb-e162-44ae-8136-bdcb6baa7c0f-combined-ca-bundle\") pod \"fc65f7eb-e162-44ae-8136-bdcb6baa7c0f\" (UID: \"fc65f7eb-e162-44ae-8136-bdcb6baa7c0f\") " Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.172056 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7hzv8\" (UniqueName: \"kubernetes.io/projected/fc65f7eb-e162-44ae-8136-bdcb6baa7c0f-kube-api-access-7hzv8\") pod \"fc65f7eb-e162-44ae-8136-bdcb6baa7c0f\" (UID: \"fc65f7eb-e162-44ae-8136-bdcb6baa7c0f\") " Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.173324 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fc65f7eb-e162-44ae-8136-bdcb6baa7c0f-scripts" (OuterVolumeSpecName: "scripts") pod "fc65f7eb-e162-44ae-8136-bdcb6baa7c0f" (UID: "fc65f7eb-e162-44ae-8136-bdcb6baa7c0f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.174001 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="64d029c7-79c6-40fc-b4d2-ac5bfbc387a0" containerName="account-replicator" containerID="cri-o://9c8048073ccb7077c6ce8072e9a03b4c7c45eacbbfa51aa61971cd31bec984b4" gracePeriod=30 Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.174187 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="64d029c7-79c6-40fc-b4d2-ac5bfbc387a0" containerName="account-auditor" containerID="cri-o://34b81cfd7ee5d1e0183fe2711993e5340813f109d83301b4976cc429efc8bc12" gracePeriod=30 Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.176398 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fc65f7eb-e162-44ae-8136-bdcb6baa7c0f-ovsdb-rundir" (OuterVolumeSpecName: "ovsdb-rundir") pod "fc65f7eb-e162-44ae-8136-bdcb6baa7c0f" (UID: "fc65f7eb-e162-44ae-8136-bdcb6baa7c0f"). InnerVolumeSpecName "ovsdb-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.181375 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fc65f7eb-e162-44ae-8136-bdcb6baa7c0f-config" (OuterVolumeSpecName: "config") pod "fc65f7eb-e162-44ae-8136-bdcb6baa7c0f" (UID: "fc65f7eb-e162-44ae-8136-bdcb6baa7c0f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.185618 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fc65f7eb-e162-44ae-8136-bdcb6baa7c0f-kube-api-access-7hzv8" (OuterVolumeSpecName: "kube-api-access-7hzv8") pod "fc65f7eb-e162-44ae-8136-bdcb6baa7c0f" (UID: "fc65f7eb-e162-44ae-8136-bdcb6baa7c0f"). InnerVolumeSpecName "kube-api-access-7hzv8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.186757 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage02-crc" (OuterVolumeSpecName: "ovndbcluster-nb-etc-ovn") pod "fc65f7eb-e162-44ae-8136-bdcb6baa7c0f" (UID: "fc65f7eb-e162-44ae-8136-bdcb6baa7c0f"). InnerVolumeSpecName "local-storage02-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.192281 4558 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/04c2bb7a-e2e2-4f8f-bf20-e6c381d117ef-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:02 crc kubenswrapper[4558]: E0120 17:53:02.192402 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/combined-ca-bundle: secret "combined-ca-bundle" not found Jan 20 17:53:02 crc kubenswrapper[4558]: E0120 17:53:02.192465 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/49822b01-63d0-40d3-b604-7980891b0683-combined-ca-bundle podName:49822b01-63d0-40d3-b604-7980891b0683 nodeName:}" failed. No retries permitted until 2026-01-20 17:53:03.192445546 +0000 UTC m=+4276.952783512 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/49822b01-63d0-40d3-b604-7980891b0683-combined-ca-bundle") pod "kube-state-metrics-0" (UID: "49822b01-63d0-40d3-b604-7980891b0683") : secret "combined-ca-bundle" not found Jan 20 17:53:02 crc kubenswrapper[4558]: E0120 17:53:02.192794 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-kube-state-metrics-svc: secret "cert-kube-state-metrics-svc" not found Jan 20 17:53:02 crc kubenswrapper[4558]: E0120 17:53:02.192830 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Jan 20 17:53:02 crc kubenswrapper[4558]: E0120 17:53:02.192855 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/088e44cc-3515-4736-aa46-721774902209-config-data podName:088e44cc-3515-4736-aa46-721774902209 nodeName:}" failed. No retries permitted until 2026-01-20 17:53:03.192847752 +0000 UTC m=+4276.953185719 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/088e44cc-3515-4736-aa46-721774902209-config-data") pod "rabbitmq-server-0" (UID: "088e44cc-3515-4736-aa46-721774902209") : configmap "rabbitmq-config-data" not found Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.192870 4558 reconciler_common.go:293] "Volume detached for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/04c2bb7a-e2e2-4f8f-bf20-e6c381d117ef-ovn-northd-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:02 crc kubenswrapper[4558]: E0120 17:53:02.192896 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/49822b01-63d0-40d3-b604-7980891b0683-kube-state-metrics-tls-certs podName:49822b01-63d0-40d3-b604-7980891b0683 nodeName:}" failed. No retries permitted until 2026-01-20 17:53:03.192890562 +0000 UTC m=+4276.953228529 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-state-metrics-tls-certs" (UniqueName: "kubernetes.io/secret/49822b01-63d0-40d3-b604-7980891b0683-kube-state-metrics-tls-certs") pod "kube-state-metrics-0" (UID: "49822b01-63d0-40d3-b604-7980891b0683") : secret "cert-kube-state-metrics-svc" not found Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.217650 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-bxrps"] Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.319377 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/swift-ring-rebalance-ksvjk"] Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.337125 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fc65f7eb-e162-44ae-8136-bdcb6baa7c0f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fc65f7eb-e162-44ae-8136-bdcb6baa7c0f" (UID: "fc65f7eb-e162-44ae-8136-bdcb6baa7c0f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.361823 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/fc65f7eb-e162-44ae-8136-bdcb6baa7c0f-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.365059 4558 reconciler_common.go:293] "Volume detached for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/fc65f7eb-e162-44ae-8136-bdcb6baa7c0f-ovsdb-rundir\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.365089 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7hzv8\" (UniqueName: \"kubernetes.io/projected/fc65f7eb-e162-44ae-8136-bdcb6baa7c0f-kube-api-access-7hzv8\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.365100 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fc65f7eb-e162-44ae-8136-bdcb6baa7c0f-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.365130 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" " Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.375903 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/swift-ring-rebalance-ksvjk"] Jan 20 17:53:02 crc kubenswrapper[4558]: E0120 17:53:02.397809 4558 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 20 17:53:02 crc kubenswrapper[4558]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[/bin/sh -c #!/bin/bash Jan 20 17:53:02 crc kubenswrapper[4558]: Jan 20 17:53:02 crc kubenswrapper[4558]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 20 17:53:02 crc kubenswrapper[4558]: Jan 20 17:53:02 crc kubenswrapper[4558]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 20 17:53:02 crc kubenswrapper[4558]: Jan 20 17:53:02 crc kubenswrapper[4558]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 20 17:53:02 crc kubenswrapper[4558]: Jan 20 17:53:02 crc kubenswrapper[4558]: if [ -n "neutron" ]; then Jan 20 17:53:02 crc kubenswrapper[4558]: GRANT_DATABASE="neutron" Jan 20 17:53:02 crc kubenswrapper[4558]: else Jan 20 17:53:02 crc kubenswrapper[4558]: GRANT_DATABASE="*" Jan 20 17:53:02 crc kubenswrapper[4558]: fi Jan 20 17:53:02 crc kubenswrapper[4558]: Jan 20 17:53:02 crc kubenswrapper[4558]: # going for maximum compatibility here: Jan 20 17:53:02 crc kubenswrapper[4558]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 20 17:53:02 crc kubenswrapper[4558]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 20 17:53:02 crc kubenswrapper[4558]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 20 17:53:02 crc kubenswrapper[4558]: # support updates Jan 20 17:53:02 crc kubenswrapper[4558]: Jan 20 17:53:02 crc kubenswrapper[4558]: $MYSQL_CMD < logger="UnhandledError" Jan 20 17:53:02 crc kubenswrapper[4558]: E0120 17:53:02.400096 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"neutron-db-secret\\\" not found\"" pod="openstack-kuttl-tests/neutron-a1e2-account-create-update-hfpfh" podUID="799d2689-2fcd-4bc5-ae70-1d0ae1194c76" Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.413768 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.414599 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-external-api-0" podUID="7d535532-403f-4d55-8138-7e09287d0108" containerName="glance-httpd" containerID="cri-o://6d020a191147c34f2712820f7dd8475cc0193d6eeb83f724303b3f67c115e13e" gracePeriod=30 Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.414842 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-external-api-0" podUID="7d535532-403f-4d55-8138-7e09287d0108" containerName="glance-log" containerID="cri-o://f6b470a99751a2192a7c08c8ea1e3240681879e8b22e9a2638b977811313762b" gracePeriod=30 Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.532667 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc65f7eb-e162-44ae-8136-bdcb6baa7c0f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:02 crc kubenswrapper[4558]: E0120 17:53:02.532931 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Jan 20 17:53:02 crc kubenswrapper[4558]: E0120 17:53:02.532976 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/c4a11c40-a157-4a28-b1a3-60c211d1d0bf-config-data podName:c4a11c40-a157-4a28-b1a3-60c211d1d0bf nodeName:}" failed. No retries permitted until 2026-01-20 17:53:04.532958648 +0000 UTC m=+4278.293296615 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/c4a11c40-a157-4a28-b1a3-60c211d1d0bf-config-data") pod "rabbitmq-cell1-server-0" (UID: "c4a11c40-a157-4a28-b1a3-60c211d1d0bf") : configmap "rabbitmq-cell1-config-data" not found Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.536879 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage02-crc" (UniqueName: "kubernetes.io/local-volume/local-storage02-crc") on node "crc" Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.572080 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fc65f7eb-e162-44ae-8136-bdcb6baa7c0f-ovsdbserver-nb-tls-certs" (OuterVolumeSpecName: "ovsdbserver-nb-tls-certs") pod "fc65f7eb-e162-44ae-8136-bdcb6baa7c0f" (UID: "fc65f7eb-e162-44ae-8136-bdcb6baa7c0f"). InnerVolumeSpecName "ovsdbserver-nb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.572252 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-zmctf"] Jan 20 17:53:02 crc kubenswrapper[4558]: E0120 17:53:02.572784 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="04c2bb7a-e2e2-4f8f-bf20-e6c381d117ef" containerName="ovn-northd" Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.572805 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="04c2bb7a-e2e2-4f8f-bf20-e6c381d117ef" containerName="ovn-northd" Jan 20 17:53:02 crc kubenswrapper[4558]: E0120 17:53:02.572815 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="04c2bb7a-e2e2-4f8f-bf20-e6c381d117ef" containerName="openstack-network-exporter" Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.572822 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="04c2bb7a-e2e2-4f8f-bf20-e6c381d117ef" containerName="openstack-network-exporter" Jan 20 17:53:02 crc kubenswrapper[4558]: E0120 17:53:02.572840 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc65f7eb-e162-44ae-8136-bdcb6baa7c0f" containerName="openstack-network-exporter" Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.572846 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc65f7eb-e162-44ae-8136-bdcb6baa7c0f" containerName="openstack-network-exporter" Jan 20 17:53:02 crc kubenswrapper[4558]: E0120 17:53:02.572872 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc65f7eb-e162-44ae-8136-bdcb6baa7c0f" containerName="ovsdbserver-nb" Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.572878 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc65f7eb-e162-44ae-8136-bdcb6baa7c0f" containerName="ovsdbserver-nb" Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.573123 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="fc65f7eb-e162-44ae-8136-bdcb6baa7c0f" containerName="ovsdbserver-nb" Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.573138 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="04c2bb7a-e2e2-4f8f-bf20-e6c381d117ef" containerName="ovn-northd" Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.573152 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="04c2bb7a-e2e2-4f8f-bf20-e6c381d117ef" containerName="openstack-network-exporter" Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.573181 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="fc65f7eb-e162-44ae-8136-bdcb6baa7c0f" containerName="openstack-network-exporter" Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.574688 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-zmctf" Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.621874 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fc65f7eb-e162-44ae-8136-bdcb6baa7c0f-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "fc65f7eb-e162-44ae-8136-bdcb6baa7c0f" (UID: "fc65f7eb-e162-44ae-8136-bdcb6baa7c0f"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.622137 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="11851b63-4564-4f0e-b5f7-16e34e358eca" path="/var/lib/kubelet/pods/11851b63-4564-4f0e-b5f7-16e34e358eca/volumes" Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.622386 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovsdbserver-sb-0_634825c8-6d4e-42a0-83c6-c83d105a2a7f/ovsdbserver-sb/0.log" Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.622433 4558 generic.go:334] "Generic (PLEG): container finished" podID="634825c8-6d4e-42a0-83c6-c83d105a2a7f" containerID="b80ad06d7261b9a0d3933816a91a930f6b5ef47fe5065a1e8d945d221fa487f7" exitCode=2 Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.622452 4558 generic.go:334] "Generic (PLEG): container finished" podID="634825c8-6d4e-42a0-83c6-c83d105a2a7f" containerID="2f7bc8bf8674f0da29404ad2947529694e0eaec86862f2864ca592a29b1fe552" exitCode=143 Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.627891 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20780c8d-f8bc-43c4-84ba-ba6f2e0fe601" path="/var/lib/kubelet/pods/20780c8d-f8bc-43c4-84ba-ba6f2e0fe601/volumes" Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.628695 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="45770d07-3687-4b5d-b3a0-01112456b92f" path="/var/lib/kubelet/pods/45770d07-3687-4b5d-b3a0-01112456b92f/volumes" Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.629270 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="51f6fade-ce82-468f-9a2c-79c733dddeb3" path="/var/lib/kubelet/pods/51f6fade-ce82-468f-9a2c-79c733dddeb3/volumes" Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.634095 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5226a591-a9b2-48f7-82ed-f613fe64b5a9" path="/var/lib/kubelet/pods/5226a591-a9b2-48f7-82ed-f613fe64b5a9/volumes" Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.634981 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5269b2ba-0ce2-4c5a-bd6f-6785afcf8c1b" path="/var/lib/kubelet/pods/5269b2ba-0ce2-4c5a-bd6f-6785afcf8c1b/volumes" Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.635647 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="54a8e29a-d222-49e8-ab57-1aa0afb9648b" path="/var/lib/kubelet/pods/54a8e29a-d222-49e8-ab57-1aa0afb9648b/volumes" Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.637261 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7ea6811a-c54b-451e-9705-5ba4e9b5622f" path="/var/lib/kubelet/pods/7ea6811a-c54b-451e-9705-5ba4e9b5622f/volumes" Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.637920 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b72bfcae-4aa5-47d2-8e1c-b2e6bfbac76a" path="/var/lib/kubelet/pods/b72bfcae-4aa5-47d2-8e1c-b2e6bfbac76a/volumes" Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.638590 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b942958c-b4fa-4db7-aa26-9db8bd06c776" path="/var/lib/kubelet/pods/b942958c-b4fa-4db7-aa26-9db8bd06c776/volumes" Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.639927 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d1809d86-a9c3-46e9-96eb-06433832465c" path="/var/lib/kubelet/pods/d1809d86-a9c3-46e9-96eb-06433832465c/volumes" Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.641174 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f23cc4f0-d10f-43ee-8f56-d72294b6df8c" path="/var/lib/kubelet/pods/f23cc4f0-d10f-43ee-8f56-d72294b6df8c/volumes" Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.643106 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovsdbserver-sb-0_634825c8-6d4e-42a0-83c6-c83d105a2a7f/ovsdbserver-sb/0.log" Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.643196 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.643837 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-cell-mapping-pt6lr"] Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.643925 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell1-cell-mapping-pt6lr"] Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.644015 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-zmctf"] Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.644099 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-sb-0" event={"ID":"634825c8-6d4e-42a0-83c6-c83d105a2a7f","Type":"ContainerDied","Data":"b80ad06d7261b9a0d3933816a91a930f6b5ef47fe5065a1e8d945d221fa487f7"} Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.644195 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-sb-0" event={"ID":"634825c8-6d4e-42a0-83c6-c83d105a2a7f","Type":"ContainerDied","Data":"2f7bc8bf8674f0da29404ad2947529694e0eaec86862f2864ca592a29b1fe552"} Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.644270 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-sb-0" event={"ID":"634825c8-6d4e-42a0-83c6-c83d105a2a7f","Type":"ContainerDied","Data":"8ff36f9bef16f31e4c38cc4e8ef793a35d902f20f56292b5daa79459709bed27"} Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.644341 4558 scope.go:117] "RemoveContainer" containerID="b80ad06d7261b9a0d3933816a91a930f6b5ef47fe5065a1e8d945d221fa487f7" Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.643961 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/c33a9cb9-2eec-4c2e-847d-9b06fcce5fef-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-84b9f45d47-zmctf\" (UID: \"c33a9cb9-2eec-4c2e-847d-9b06fcce5fef\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-zmctf" Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.648568 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c33a9cb9-2eec-4c2e-847d-9b06fcce5fef-config\") pod \"dnsmasq-dnsmasq-84b9f45d47-zmctf\" (UID: \"c33a9cb9-2eec-4c2e-847d-9b06fcce5fef\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-zmctf" Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.648622 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w9hp6\" (UniqueName: \"kubernetes.io/projected/c33a9cb9-2eec-4c2e-847d-9b06fcce5fef-kube-api-access-w9hp6\") pod \"dnsmasq-dnsmasq-84b9f45d47-zmctf\" (UID: \"c33a9cb9-2eec-4c2e-847d-9b06fcce5fef\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-zmctf" Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.648760 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.648794 4558 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/fc65f7eb-e162-44ae-8136-bdcb6baa7c0f-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.648809 4558 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/fc65f7eb-e162-44ae-8136-bdcb6baa7c0f-ovsdbserver-nb-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.649431 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/root-account-create-update-bxrps" event={"ID":"8101ebb9-ace1-4242-8c8b-698307c6be29","Type":"ContainerStarted","Data":"ace592302f09e2dd806f0b4ee4d154b4f2ff126b8edd992d3100f207347780a3"} Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.662145 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-a1e2-account-create-update-hfpfh"] Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.675299 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell0-cell-mapping-28s5c"] Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.693636 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell0-cell-mapping-28s5c"] Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.696360 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-a1e2-account-create-update-hfpfh" event={"ID":"799d2689-2fcd-4bc5-ae70-1d0ae1194c76","Type":"ContainerStarted","Data":"da762432cfe7c11ef497d104d891e650727cbbfa6ae73da3a2e331cd3870382d"} Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.736445 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovsdbserver-nb-0_fc65f7eb-e162-44ae-8136-bdcb6baa7c0f/ovsdbserver-nb/0.log" Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.736493 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.736527 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-nb-0" event={"ID":"fc65f7eb-e162-44ae-8136-bdcb6baa7c0f","Type":"ContainerDied","Data":"d1bff363e27d1138210b36c720191fdd963603ecb4d82dd076279062976f3c5f"} Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.736592 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.737634 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-internal-api-0" podUID="f30aa605-d974-4d0d-aa96-5cb0d96897d6" containerName="glance-log" containerID="cri-o://519c148165b8e5d8031cd18cf91e1dcfa134e4235207cac41f7dc13725bac816" gracePeriod=30 Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.737905 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-internal-api-0" podUID="f30aa605-d974-4d0d-aa96-5cb0d96897d6" containerName="glance-httpd" containerID="cri-o://d68e5d5426e93fd2a2fa4b8833690231b1a3151adb135d0a227b1d81dba55d3c" gracePeriod=30 Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.750393 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/634825c8-6d4e-42a0-83c6-c83d105a2a7f-metrics-certs-tls-certs\") pod \"634825c8-6d4e-42a0-83c6-c83d105a2a7f\" (UID: \"634825c8-6d4e-42a0-83c6-c83d105a2a7f\") " Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.750481 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dmp6v\" (UniqueName: \"kubernetes.io/projected/634825c8-6d4e-42a0-83c6-c83d105a2a7f-kube-api-access-dmp6v\") pod \"634825c8-6d4e-42a0-83c6-c83d105a2a7f\" (UID: \"634825c8-6d4e-42a0-83c6-c83d105a2a7f\") " Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.750515 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/634825c8-6d4e-42a0-83c6-c83d105a2a7f-config\") pod \"634825c8-6d4e-42a0-83c6-c83d105a2a7f\" (UID: \"634825c8-6d4e-42a0-83c6-c83d105a2a7f\") " Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.750542 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/634825c8-6d4e-42a0-83c6-c83d105a2a7f-scripts\") pod \"634825c8-6d4e-42a0-83c6-c83d105a2a7f\" (UID: \"634825c8-6d4e-42a0-83c6-c83d105a2a7f\") " Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.750582 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndbcluster-sb-etc-ovn\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"634825c8-6d4e-42a0-83c6-c83d105a2a7f\" (UID: \"634825c8-6d4e-42a0-83c6-c83d105a2a7f\") " Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.750604 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/634825c8-6d4e-42a0-83c6-c83d105a2a7f-ovsdbserver-sb-tls-certs\") pod \"634825c8-6d4e-42a0-83c6-c83d105a2a7f\" (UID: \"634825c8-6d4e-42a0-83c6-c83d105a2a7f\") " Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.750690 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/634825c8-6d4e-42a0-83c6-c83d105a2a7f-combined-ca-bundle\") pod \"634825c8-6d4e-42a0-83c6-c83d105a2a7f\" (UID: \"634825c8-6d4e-42a0-83c6-c83d105a2a7f\") " Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.750778 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/634825c8-6d4e-42a0-83c6-c83d105a2a7f-ovsdb-rundir\") pod \"634825c8-6d4e-42a0-83c6-c83d105a2a7f\" (UID: \"634825c8-6d4e-42a0-83c6-c83d105a2a7f\") " Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.751078 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c33a9cb9-2eec-4c2e-847d-9b06fcce5fef-config\") pod \"dnsmasq-dnsmasq-84b9f45d47-zmctf\" (UID: \"c33a9cb9-2eec-4c2e-847d-9b06fcce5fef\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-zmctf" Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.751105 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w9hp6\" (UniqueName: \"kubernetes.io/projected/c33a9cb9-2eec-4c2e-847d-9b06fcce5fef-kube-api-access-w9hp6\") pod \"dnsmasq-dnsmasq-84b9f45d47-zmctf\" (UID: \"c33a9cb9-2eec-4c2e-847d-9b06fcce5fef\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-zmctf" Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.751233 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/c33a9cb9-2eec-4c2e-847d-9b06fcce5fef-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-84b9f45d47-zmctf\" (UID: \"c33a9cb9-2eec-4c2e-847d-9b06fcce5fef\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-zmctf" Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.751979 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/634825c8-6d4e-42a0-83c6-c83d105a2a7f-config" (OuterVolumeSpecName: "config") pod "634825c8-6d4e-42a0-83c6-c83d105a2a7f" (UID: "634825c8-6d4e-42a0-83c6-c83d105a2a7f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.752136 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/c33a9cb9-2eec-4c2e-847d-9b06fcce5fef-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-84b9f45d47-zmctf\" (UID: \"c33a9cb9-2eec-4c2e-847d-9b06fcce5fef\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-zmctf" Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.752692 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/634825c8-6d4e-42a0-83c6-c83d105a2a7f-ovsdb-rundir" (OuterVolumeSpecName: "ovsdb-rundir") pod "634825c8-6d4e-42a0-83c6-c83d105a2a7f" (UID: "634825c8-6d4e-42a0-83c6-c83d105a2a7f"). InnerVolumeSpecName "ovsdb-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.752863 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c33a9cb9-2eec-4c2e-847d-9b06fcce5fef-config\") pod \"dnsmasq-dnsmasq-84b9f45d47-zmctf\" (UID: \"c33a9cb9-2eec-4c2e-847d-9b06fcce5fef\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-zmctf" Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.753216 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/634825c8-6d4e-42a0-83c6-c83d105a2a7f-scripts" (OuterVolumeSpecName: "scripts") pod "634825c8-6d4e-42a0-83c6-c83d105a2a7f" (UID: "634825c8-6d4e-42a0-83c6-c83d105a2a7f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.773495 4558 generic.go:334] "Generic (PLEG): container finished" podID="64d029c7-79c6-40fc-b4d2-ac5bfbc387a0" containerID="e38f2eedbeb8bee77176fac3f5c4ecdcf1aef7aa98e160b84718ab6bb18bb8a4" exitCode=0 Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.773528 4558 generic.go:334] "Generic (PLEG): container finished" podID="64d029c7-79c6-40fc-b4d2-ac5bfbc387a0" containerID="a6e878f0584cff110a93e31a31995a6fbd71d401815d3d94bb3700d40da94020" exitCode=0 Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.773538 4558 generic.go:334] "Generic (PLEG): container finished" podID="64d029c7-79c6-40fc-b4d2-ac5bfbc387a0" containerID="0babdc19d278b5edc8070926b4a7ce96ef2d679f28771d60ffadb5ba187b8117" exitCode=0 Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.773545 4558 generic.go:334] "Generic (PLEG): container finished" podID="64d029c7-79c6-40fc-b4d2-ac5bfbc387a0" containerID="814558d14f3629d14a008df6a38667804146fe090de98d44f2c3c2447d69c787" exitCode=0 Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.773552 4558 generic.go:334] "Generic (PLEG): container finished" podID="64d029c7-79c6-40fc-b4d2-ac5bfbc387a0" containerID="d32a8a2e6495346715f9634ab4b1b4bc18d96e7451b5223fe9976afe7dd5d7c2" exitCode=0 Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.773559 4558 generic.go:334] "Generic (PLEG): container finished" podID="64d029c7-79c6-40fc-b4d2-ac5bfbc387a0" containerID="ce8b75288eb38358817832bfb419010893a3a4a082fc83e9ed669e3fe1c9a795" exitCode=0 Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.777680 4558 generic.go:334] "Generic (PLEG): container finished" podID="64d029c7-79c6-40fc-b4d2-ac5bfbc387a0" containerID="19a88da3e14609ec200568f698ff68961ad644b5fa7a972150ff49ec0d443ab1" exitCode=0 Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.777690 4558 generic.go:334] "Generic (PLEG): container finished" podID="64d029c7-79c6-40fc-b4d2-ac5bfbc387a0" containerID="9f25ee1c308a320e8a43a0bc03a0bfd7c3d57cf22c17c10b894b0f930d53afe2" exitCode=0 Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.777700 4558 generic.go:334] "Generic (PLEG): container finished" podID="64d029c7-79c6-40fc-b4d2-ac5bfbc387a0" containerID="34b81cfd7ee5d1e0183fe2711993e5340813f109d83301b4976cc429efc8bc12" exitCode=0 Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.777707 4558 generic.go:334] "Generic (PLEG): container finished" podID="64d029c7-79c6-40fc-b4d2-ac5bfbc387a0" containerID="9c8048073ccb7077c6ce8072e9a03b4c7c45eacbbfa51aa61971cd31bec984b4" exitCode=0 Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.773504 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-f275-account-create-update-5pb7k"] Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.777868 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"64d029c7-79c6-40fc-b4d2-ac5bfbc387a0","Type":"ContainerDied","Data":"e38f2eedbeb8bee77176fac3f5c4ecdcf1aef7aa98e160b84718ab6bb18bb8a4"} Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.777891 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"64d029c7-79c6-40fc-b4d2-ac5bfbc387a0","Type":"ContainerDied","Data":"a6e878f0584cff110a93e31a31995a6fbd71d401815d3d94bb3700d40da94020"} Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.777928 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"64d029c7-79c6-40fc-b4d2-ac5bfbc387a0","Type":"ContainerDied","Data":"0babdc19d278b5edc8070926b4a7ce96ef2d679f28771d60ffadb5ba187b8117"} Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.777940 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"64d029c7-79c6-40fc-b4d2-ac5bfbc387a0","Type":"ContainerDied","Data":"814558d14f3629d14a008df6a38667804146fe090de98d44f2c3c2447d69c787"} Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.777949 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"64d029c7-79c6-40fc-b4d2-ac5bfbc387a0","Type":"ContainerDied","Data":"d32a8a2e6495346715f9634ab4b1b4bc18d96e7451b5223fe9976afe7dd5d7c2"} Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.777959 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"64d029c7-79c6-40fc-b4d2-ac5bfbc387a0","Type":"ContainerDied","Data":"ce8b75288eb38358817832bfb419010893a3a4a082fc83e9ed669e3fe1c9a795"} Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.777967 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"64d029c7-79c6-40fc-b4d2-ac5bfbc387a0","Type":"ContainerDied","Data":"19a88da3e14609ec200568f698ff68961ad644b5fa7a972150ff49ec0d443ab1"} Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.777977 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"64d029c7-79c6-40fc-b4d2-ac5bfbc387a0","Type":"ContainerDied","Data":"9f25ee1c308a320e8a43a0bc03a0bfd7c3d57cf22c17c10b894b0f930d53afe2"} Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.778002 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"64d029c7-79c6-40fc-b4d2-ac5bfbc387a0","Type":"ContainerDied","Data":"34b81cfd7ee5d1e0183fe2711993e5340813f109d83301b4976cc429efc8bc12"} Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.778012 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"64d029c7-79c6-40fc-b4d2-ac5bfbc387a0","Type":"ContainerDied","Data":"9c8048073ccb7077c6ce8072e9a03b4c7c45eacbbfa51aa61971cd31bec984b4"} Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.787260 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/634825c8-6d4e-42a0-83c6-c83d105a2a7f-kube-api-access-dmp6v" (OuterVolumeSpecName: "kube-api-access-dmp6v") pod "634825c8-6d4e-42a0-83c6-c83d105a2a7f" (UID: "634825c8-6d4e-42a0-83c6-c83d105a2a7f"). InnerVolumeSpecName "kube-api-access-dmp6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.808480 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/placement-db-create-lmln8"] Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.813499 4558 generic.go:334] "Generic (PLEG): container finished" podID="0c9fc067-cb23-4f72-b928-5447fb5182c1" containerID="a277d8fbf0b081fa88cc6f698d169256895a7e996cb99dec573f1f32b2c3f74c" exitCode=143 Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.813568 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-658f45b9f4-tx296" event={"ID":"0c9fc067-cb23-4f72-b928-5447fb5182c1","Type":"ContainerDied","Data":"a277d8fbf0b081fa88cc6f698d169256895a7e996cb99dec573f1f32b2c3f74c"} Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.815859 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage03-crc" (OuterVolumeSpecName: "ovndbcluster-sb-etc-ovn") pod "634825c8-6d4e-42a0-83c6-c83d105a2a7f" (UID: "634825c8-6d4e-42a0-83c6-c83d105a2a7f"). InnerVolumeSpecName "local-storage03-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.821700 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w9hp6\" (UniqueName: \"kubernetes.io/projected/c33a9cb9-2eec-4c2e-847d-9b06fcce5fef-kube-api-access-w9hp6\") pod \"dnsmasq-dnsmasq-84b9f45d47-zmctf\" (UID: \"c33a9cb9-2eec-4c2e-847d-9b06fcce5fef\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-zmctf" Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.822646 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovn-northd-0_04c2bb7a-e2e2-4f8f-bf20-e6c381d117ef/ovn-northd/0.log" Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.822758 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-northd-0" event={"ID":"04c2bb7a-e2e2-4f8f-bf20-e6c381d117ef","Type":"ContainerDied","Data":"faa5df1b3cebb05d0c7b3e1f9f523cf1c2ef4a55dc9f1334a82b76cdc35e20de"} Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.822898 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.831719 4558 scope.go:117] "RemoveContainer" containerID="2f7bc8bf8674f0da29404ad2947529694e0eaec86862f2864ca592a29b1fe552" Jan 20 17:53:02 crc kubenswrapper[4558]: E0120 17:53:02.832037 4558 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 20 17:53:02 crc kubenswrapper[4558]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[/bin/sh -c #!/bin/bash Jan 20 17:53:02 crc kubenswrapper[4558]: Jan 20 17:53:02 crc kubenswrapper[4558]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 20 17:53:02 crc kubenswrapper[4558]: Jan 20 17:53:02 crc kubenswrapper[4558]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 20 17:53:02 crc kubenswrapper[4558]: Jan 20 17:53:02 crc kubenswrapper[4558]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 20 17:53:02 crc kubenswrapper[4558]: Jan 20 17:53:02 crc kubenswrapper[4558]: if [ -n "neutron" ]; then Jan 20 17:53:02 crc kubenswrapper[4558]: GRANT_DATABASE="neutron" Jan 20 17:53:02 crc kubenswrapper[4558]: else Jan 20 17:53:02 crc kubenswrapper[4558]: GRANT_DATABASE="*" Jan 20 17:53:02 crc kubenswrapper[4558]: fi Jan 20 17:53:02 crc kubenswrapper[4558]: Jan 20 17:53:02 crc kubenswrapper[4558]: # going for maximum compatibility here: Jan 20 17:53:02 crc kubenswrapper[4558]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 20 17:53:02 crc kubenswrapper[4558]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 20 17:53:02 crc kubenswrapper[4558]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 20 17:53:02 crc kubenswrapper[4558]: # support updates Jan 20 17:53:02 crc kubenswrapper[4558]: Jan 20 17:53:02 crc kubenswrapper[4558]: $MYSQL_CMD < logger="UnhandledError" Jan 20 17:53:02 crc kubenswrapper[4558]: E0120 17:53:02.832509 4558 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 20 17:53:02 crc kubenswrapper[4558]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[/bin/sh -c #!/bin/bash Jan 20 17:53:02 crc kubenswrapper[4558]: Jan 20 17:53:02 crc kubenswrapper[4558]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 20 17:53:02 crc kubenswrapper[4558]: Jan 20 17:53:02 crc kubenswrapper[4558]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 20 17:53:02 crc kubenswrapper[4558]: Jan 20 17:53:02 crc kubenswrapper[4558]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 20 17:53:02 crc kubenswrapper[4558]: Jan 20 17:53:02 crc kubenswrapper[4558]: if [ -n "nova_api" ]; then Jan 20 17:53:02 crc kubenswrapper[4558]: GRANT_DATABASE="nova_api" Jan 20 17:53:02 crc kubenswrapper[4558]: else Jan 20 17:53:02 crc kubenswrapper[4558]: GRANT_DATABASE="*" Jan 20 17:53:02 crc kubenswrapper[4558]: fi Jan 20 17:53:02 crc kubenswrapper[4558]: Jan 20 17:53:02 crc kubenswrapper[4558]: # going for maximum compatibility here: Jan 20 17:53:02 crc kubenswrapper[4558]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 20 17:53:02 crc kubenswrapper[4558]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 20 17:53:02 crc kubenswrapper[4558]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 20 17:53:02 crc kubenswrapper[4558]: # support updates Jan 20 17:53:02 crc kubenswrapper[4558]: Jan 20 17:53:02 crc kubenswrapper[4558]: $MYSQL_CMD < logger="UnhandledError" Jan 20 17:53:02 crc kubenswrapper[4558]: E0120 17:53:02.834213 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"neutron-db-secret\\\" not found\"" pod="openstack-kuttl-tests/neutron-a1e2-account-create-update-hfpfh" podUID="799d2689-2fcd-4bc5-ae70-1d0ae1194c76" Jan 20 17:53:02 crc kubenswrapper[4558]: E0120 17:53:02.835468 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"nova-api-db-secret\\\" not found\"" pod="openstack-kuttl-tests/nova-api-f275-account-create-update-5pb7k" podUID="01f39889-c29f-4e28-98fa-bc99322f761c" Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.849455 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/placement-db-create-lmln8"] Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.854760 4558 reconciler_common.go:293] "Volume detached for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/634825c8-6d4e-42a0-83c6-c83d105a2a7f-ovsdb-rundir\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.854843 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dmp6v\" (UniqueName: \"kubernetes.io/projected/634825c8-6d4e-42a0-83c6-c83d105a2a7f-kube-api-access-dmp6v\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.854932 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/634825c8-6d4e-42a0-83c6-c83d105a2a7f-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.855005 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/634825c8-6d4e-42a0-83c6-c83d105a2a7f-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.855084 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" " Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.898805 4558 scope.go:117] "RemoveContainer" containerID="b80ad06d7261b9a0d3933816a91a930f6b5ef47fe5065a1e8d945d221fa487f7" Jan 20 17:53:02 crc kubenswrapper[4558]: E0120 17:53:02.900137 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b80ad06d7261b9a0d3933816a91a930f6b5ef47fe5065a1e8d945d221fa487f7\": container with ID starting with b80ad06d7261b9a0d3933816a91a930f6b5ef47fe5065a1e8d945d221fa487f7 not found: ID does not exist" containerID="b80ad06d7261b9a0d3933816a91a930f6b5ef47fe5065a1e8d945d221fa487f7" Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.900201 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b80ad06d7261b9a0d3933816a91a930f6b5ef47fe5065a1e8d945d221fa487f7"} err="failed to get container status \"b80ad06d7261b9a0d3933816a91a930f6b5ef47fe5065a1e8d945d221fa487f7\": rpc error: code = NotFound desc = could not find container \"b80ad06d7261b9a0d3933816a91a930f6b5ef47fe5065a1e8d945d221fa487f7\": container with ID starting with b80ad06d7261b9a0d3933816a91a930f6b5ef47fe5065a1e8d945d221fa487f7 not found: ID does not exist" Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.900233 4558 scope.go:117] "RemoveContainer" containerID="2f7bc8bf8674f0da29404ad2947529694e0eaec86862f2864ca592a29b1fe552" Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.901209 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/placement-f441-account-create-update-w7t8p"] Jan 20 17:53:02 crc kubenswrapper[4558]: E0120 17:53:02.901337 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2f7bc8bf8674f0da29404ad2947529694e0eaec86862f2864ca592a29b1fe552\": container with ID starting with 2f7bc8bf8674f0da29404ad2947529694e0eaec86862f2864ca592a29b1fe552 not found: ID does not exist" containerID="2f7bc8bf8674f0da29404ad2947529694e0eaec86862f2864ca592a29b1fe552" Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.901380 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2f7bc8bf8674f0da29404ad2947529694e0eaec86862f2864ca592a29b1fe552"} err="failed to get container status \"2f7bc8bf8674f0da29404ad2947529694e0eaec86862f2864ca592a29b1fe552\": rpc error: code = NotFound desc = could not find container \"2f7bc8bf8674f0da29404ad2947529694e0eaec86862f2864ca592a29b1fe552\": container with ID starting with 2f7bc8bf8674f0da29404ad2947529694e0eaec86862f2864ca592a29b1fe552 not found: ID does not exist" Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.901409 4558 scope.go:117] "RemoveContainer" containerID="b80ad06d7261b9a0d3933816a91a930f6b5ef47fe5065a1e8d945d221fa487f7" Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.902212 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b80ad06d7261b9a0d3933816a91a930f6b5ef47fe5065a1e8d945d221fa487f7"} err="failed to get container status \"b80ad06d7261b9a0d3933816a91a930f6b5ef47fe5065a1e8d945d221fa487f7\": rpc error: code = NotFound desc = could not find container \"b80ad06d7261b9a0d3933816a91a930f6b5ef47fe5065a1e8d945d221fa487f7\": container with ID starting with b80ad06d7261b9a0d3933816a91a930f6b5ef47fe5065a1e8d945d221fa487f7 not found: ID does not exist" Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.902240 4558 scope.go:117] "RemoveContainer" containerID="2f7bc8bf8674f0da29404ad2947529694e0eaec86862f2864ca592a29b1fe552" Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.905083 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2f7bc8bf8674f0da29404ad2947529694e0eaec86862f2864ca592a29b1fe552"} err="failed to get container status \"2f7bc8bf8674f0da29404ad2947529694e0eaec86862f2864ca592a29b1fe552\": rpc error: code = NotFound desc = could not find container \"2f7bc8bf8674f0da29404ad2947529694e0eaec86862f2864ca592a29b1fe552\": container with ID starting with 2f7bc8bf8674f0da29404ad2947529694e0eaec86862f2864ca592a29b1fe552 not found: ID does not exist" Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.905106 4558 scope.go:117] "RemoveContainer" containerID="c90c4608d07fcc026b1da8048f2ccc5e2af6fd92d05975c8c1521388f7341569" Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.916322 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage03-crc" (UniqueName: "kubernetes.io/local-volume/local-storage03-crc") on node "crc" Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.916447 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/placement-f441-account-create-update-w7t8p"] Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.952093 4558 scope.go:117] "RemoveContainer" containerID="5f7c09a23fc3d9fe92c0898786808da9cd4326e33ccea9ba8df24f4b5c6ab067" Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.959669 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.968542 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/634825c8-6d4e-42a0-83c6-c83d105a2a7f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "634825c8-6d4e-42a0-83c6-c83d105a2a7f" (UID: "634825c8-6d4e-42a0-83c6-c83d105a2a7f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.968609 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.968831 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/cinder-api-0" podUID="5eac3a8d-ee82-4e6f-bdb9-fccb82e09496" containerName="cinder-api-log" containerID="cri-o://931107b5dd291893e36bfee944ab2879ec15efdaafdf4d8d3e976539e9c14eab" gracePeriod=30 Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.969291 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/cinder-api-0" podUID="5eac3a8d-ee82-4e6f-bdb9-fccb82e09496" containerName="cinder-api" containerID="cri-o://c20582ab3d42dc2c2fc8c4708a740fdb26d1d7abea4ac13a5a595f35117f0d0b" gracePeriod=30 Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.997228 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.997613 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/cinder-scheduler-0" podUID="51d19369-14ac-4d62-ab05-9c5830856622" containerName="probe" containerID="cri-o://dc3c169cf981fe359f960f5ef7f1e452664aacea8447f6ecb0fcdf2aa8023e7a" gracePeriod=30 Jan 20 17:53:02 crc kubenswrapper[4558]: I0120 17:53:02.997863 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/cinder-scheduler-0" podUID="51d19369-14ac-4d62-ab05-9c5830856622" containerName="cinder-scheduler" containerID="cri-o://bb89698c06a3d8f0dc6700388a8558678523660d9134634c1c9e01a8b703cf50" gracePeriod=30 Jan 20 17:53:03 crc kubenswrapper[4558]: I0120 17:53:03.015786 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-zmctf" Jan 20 17:53:03 crc kubenswrapper[4558]: I0120 17:53:03.045483 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-db-create-7jpvj"] Jan 20 17:53:03 crc kubenswrapper[4558]: I0120 17:53:03.046188 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/634825c8-6d4e-42a0-83c6-c83d105a2a7f-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "634825c8-6d4e-42a0-83c6-c83d105a2a7f" (UID: "634825c8-6d4e-42a0-83c6-c83d105a2a7f"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:53:03 crc kubenswrapper[4558]: I0120 17:53:03.052364 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/glance-db-create-7jpvj"] Jan 20 17:53:03 crc kubenswrapper[4558]: I0120 17:53:03.054574 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/634825c8-6d4e-42a0-83c6-c83d105a2a7f-ovsdbserver-sb-tls-certs" (OuterVolumeSpecName: "ovsdbserver-sb-tls-certs") pod "634825c8-6d4e-42a0-83c6-c83d105a2a7f" (UID: "634825c8-6d4e-42a0-83c6-c83d105a2a7f"). InnerVolumeSpecName "ovsdbserver-sb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:53:03 crc kubenswrapper[4558]: I0120 17:53:03.062328 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/634825c8-6d4e-42a0-83c6-c83d105a2a7f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:03 crc kubenswrapper[4558]: I0120 17:53:03.062350 4558 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/634825c8-6d4e-42a0-83c6-c83d105a2a7f-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:03 crc kubenswrapper[4558]: I0120 17:53:03.062361 4558 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/634825c8-6d4e-42a0-83c6-c83d105a2a7f-ovsdbserver-sb-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:03 crc kubenswrapper[4558]: I0120 17:53:03.063220 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-9ded-account-create-update-l5qrm"] Jan 20 17:53:03 crc kubenswrapper[4558]: I0120 17:53:03.078502 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/glance-9ded-account-create-update-l5qrm"] Jan 20 17:53:03 crc kubenswrapper[4558]: I0120 17:53:03.084049 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-db-create-z66dg"] Jan 20 17:53:03 crc kubenswrapper[4558]: I0120 17:53:03.090320 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/cinder-db-create-z66dg"] Jan 20 17:53:03 crc kubenswrapper[4558]: I0120 17:53:03.095085 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-nb-0"] Jan 20 17:53:03 crc kubenswrapper[4558]: I0120 17:53:03.101099 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-nb-0"] Jan 20 17:53:03 crc kubenswrapper[4558]: I0120 17:53:03.106732 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-db-create-9cjqc"] Jan 20 17:53:03 crc kubenswrapper[4558]: I0120 17:53:03.116261 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/barbican-db-create-9cjqc"] Jan 20 17:53:03 crc kubenswrapper[4558]: I0120 17:53:03.125388 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-db-create-wstvp"] Jan 20 17:53:03 crc kubenswrapper[4558]: I0120 17:53:03.130861 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-a8ee-account-create-update-t5zdv"] Jan 20 17:53:03 crc kubenswrapper[4558]: E0120 17:53:03.134589 4558 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 20 17:53:03 crc kubenswrapper[4558]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[/bin/sh -c #!/bin/bash Jan 20 17:53:03 crc kubenswrapper[4558]: Jan 20 17:53:03 crc kubenswrapper[4558]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 20 17:53:03 crc kubenswrapper[4558]: Jan 20 17:53:03 crc kubenswrapper[4558]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 20 17:53:03 crc kubenswrapper[4558]: Jan 20 17:53:03 crc kubenswrapper[4558]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 20 17:53:03 crc kubenswrapper[4558]: Jan 20 17:53:03 crc kubenswrapper[4558]: if [ -n "nova_cell0" ]; then Jan 20 17:53:03 crc kubenswrapper[4558]: GRANT_DATABASE="nova_cell0" Jan 20 17:53:03 crc kubenswrapper[4558]: else Jan 20 17:53:03 crc kubenswrapper[4558]: GRANT_DATABASE="*" Jan 20 17:53:03 crc kubenswrapper[4558]: fi Jan 20 17:53:03 crc kubenswrapper[4558]: Jan 20 17:53:03 crc kubenswrapper[4558]: # going for maximum compatibility here: Jan 20 17:53:03 crc kubenswrapper[4558]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 20 17:53:03 crc kubenswrapper[4558]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 20 17:53:03 crc kubenswrapper[4558]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 20 17:53:03 crc kubenswrapper[4558]: # support updates Jan 20 17:53:03 crc kubenswrapper[4558]: Jan 20 17:53:03 crc kubenswrapper[4558]: $MYSQL_CMD < logger="UnhandledError" Jan 20 17:53:03 crc kubenswrapper[4558]: E0120 17:53:03.135315 4558 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 20 17:53:03 crc kubenswrapper[4558]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[/bin/sh -c #!/bin/bash Jan 20 17:53:03 crc kubenswrapper[4558]: Jan 20 17:53:03 crc kubenswrapper[4558]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 20 17:53:03 crc kubenswrapper[4558]: Jan 20 17:53:03 crc kubenswrapper[4558]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 20 17:53:03 crc kubenswrapper[4558]: Jan 20 17:53:03 crc kubenswrapper[4558]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 20 17:53:03 crc kubenswrapper[4558]: Jan 20 17:53:03 crc kubenswrapper[4558]: if [ -n "nova_cell1" ]; then Jan 20 17:53:03 crc kubenswrapper[4558]: GRANT_DATABASE="nova_cell1" Jan 20 17:53:03 crc kubenswrapper[4558]: else Jan 20 17:53:03 crc kubenswrapper[4558]: GRANT_DATABASE="*" Jan 20 17:53:03 crc kubenswrapper[4558]: fi Jan 20 17:53:03 crc kubenswrapper[4558]: Jan 20 17:53:03 crc kubenswrapper[4558]: # going for maximum compatibility here: Jan 20 17:53:03 crc kubenswrapper[4558]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 20 17:53:03 crc kubenswrapper[4558]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 20 17:53:03 crc kubenswrapper[4558]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 20 17:53:03 crc kubenswrapper[4558]: # support updates Jan 20 17:53:03 crc kubenswrapper[4558]: Jan 20 17:53:03 crc kubenswrapper[4558]: $MYSQL_CMD < logger="UnhandledError" Jan 20 17:53:03 crc kubenswrapper[4558]: I0120 17:53:03.135393 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovn-northd-0"] Jan 20 17:53:03 crc kubenswrapper[4558]: E0120 17:53:03.136427 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"nova-cell1-db-secret\\\" not found\"" pod="openstack-kuttl-tests/nova-cell1-d1d9-account-create-update-g54ff" podUID="3eabae3c-0014-4d8e-9393-7cb934ba9fa4" Jan 20 17:53:03 crc kubenswrapper[4558]: E0120 17:53:03.136459 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"nova-cell0-db-secret\\\" not found\"" pod="openstack-kuttl-tests/nova-cell0-a0eb-account-create-update-75hkz" podUID="6d80c5b5-880f-4d59-9287-334023ec6ce9" Jan 20 17:53:03 crc kubenswrapper[4558]: I0120 17:53:03.208696 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/neutron-db-create-wstvp"] Jan 20 17:53:03 crc kubenswrapper[4558]: I0120 17:53:03.242269 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/barbican-a8ee-account-create-update-t5zdv"] Jan 20 17:53:03 crc kubenswrapper[4558]: I0120 17:53:03.256363 4558 scope.go:117] "RemoveContainer" containerID="4b9890fbbd9a3bfc034db961ae88fefa150c5ec801cba28d028682bd258b5ebd" Jan 20 17:53:03 crc kubenswrapper[4558]: I0120 17:53:03.263294 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ovn-northd-0"] Jan 20 17:53:03 crc kubenswrapper[4558]: I0120 17:53:03.280893 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-75676d645b-l2sp7"] Jan 20 17:53:03 crc kubenswrapper[4558]: I0120 17:53:03.281255 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/neutron-75676d645b-l2sp7" podUID="6ea6b41c-c5ed-49c9-84f5-a932a1325aba" containerName="neutron-api" containerID="cri-o://d26f7b1dae3611d65adf8538389e6547e7f472172ad5ffe9456136d1cfd69e59" gracePeriod=30 Jan 20 17:53:03 crc kubenswrapper[4558]: I0120 17:53:03.282348 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/neutron-75676d645b-l2sp7" podUID="6ea6b41c-c5ed-49c9-84f5-a932a1325aba" containerName="neutron-httpd" containerID="cri-o://a10e865cdae4227bcd9b239722ad2d2d6bc501887291e7983d1cf31b29c7b76e" gracePeriod=30 Jan 20 17:53:03 crc kubenswrapper[4558]: E0120 17:53:03.288745 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-kube-state-metrics-svc: secret "cert-kube-state-metrics-svc" not found Jan 20 17:53:03 crc kubenswrapper[4558]: E0120 17:53:03.288800 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/combined-ca-bundle: secret "combined-ca-bundle" not found Jan 20 17:53:03 crc kubenswrapper[4558]: E0120 17:53:03.288812 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/49822b01-63d0-40d3-b604-7980891b0683-kube-state-metrics-tls-certs podName:49822b01-63d0-40d3-b604-7980891b0683 nodeName:}" failed. No retries permitted until 2026-01-20 17:53:05.288794254 +0000 UTC m=+4279.049132221 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-state-metrics-tls-certs" (UniqueName: "kubernetes.io/secret/49822b01-63d0-40d3-b604-7980891b0683-kube-state-metrics-tls-certs") pod "kube-state-metrics-0" (UID: "49822b01-63d0-40d3-b604-7980891b0683") : secret "cert-kube-state-metrics-svc" not found Jan 20 17:53:03 crc kubenswrapper[4558]: E0120 17:53:03.288894 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/49822b01-63d0-40d3-b604-7980891b0683-combined-ca-bundle podName:49822b01-63d0-40d3-b604-7980891b0683 nodeName:}" failed. No retries permitted until 2026-01-20 17:53:05.288875466 +0000 UTC m=+4279.049213434 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/49822b01-63d0-40d3-b604-7980891b0683-combined-ca-bundle") pod "kube-state-metrics-0" (UID: "49822b01-63d0-40d3-b604-7980891b0683") : secret "combined-ca-bundle" not found Jan 20 17:53:03 crc kubenswrapper[4558]: E0120 17:53:03.288919 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Jan 20 17:53:03 crc kubenswrapper[4558]: E0120 17:53:03.288951 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/088e44cc-3515-4736-aa46-721774902209-config-data podName:088e44cc-3515-4736-aa46-721774902209 nodeName:}" failed. No retries permitted until 2026-01-20 17:53:05.288942783 +0000 UTC m=+4279.049280750 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/088e44cc-3515-4736-aa46-721774902209-config-data") pod "rabbitmq-server-0" (UID: "088e44cc-3515-4736-aa46-721774902209") : configmap "rabbitmq-config-data" not found Jan 20 17:53:03 crc kubenswrapper[4558]: I0120 17:53:03.293047 4558 scope.go:117] "RemoveContainer" containerID="1035f6c74f13c042848a0da43ccb550222db3c6c766f2b87733c2701b6ec1a2e" Jan 20 17:53:03 crc kubenswrapper[4558]: I0120 17:53:03.324446 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-a1e2-account-create-update-hfpfh"] Jan 20 17:53:03 crc kubenswrapper[4558]: I0120 17:53:03.336207 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/rabbitmq-cell1-server-0"] Jan 20 17:53:03 crc kubenswrapper[4558]: I0120 17:53:03.350083 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-keystone-listener-5bb5dd7cd4-w4jnj"] Jan 20 17:53:03 crc kubenswrapper[4558]: I0120 17:53:03.350392 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-keystone-listener-5bb5dd7cd4-w4jnj" podUID="51c79956-4212-4d46-b3e9-e9d5e7a33c31" containerName="barbican-keystone-listener-log" containerID="cri-o://fbd3990f851ba4c232be9e8e9146ae26662c46c76d2e455f20e0dcc444b77578" gracePeriod=30 Jan 20 17:53:03 crc kubenswrapper[4558]: I0120 17:53:03.350846 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-keystone-listener-5bb5dd7cd4-w4jnj" podUID="51c79956-4212-4d46-b3e9-e9d5e7a33c31" containerName="barbican-keystone-listener" containerID="cri-o://b9ac5c3d167c83d97bcf4206fa59450cb566eb19a154b51f5884efb725cdab16" gracePeriod=30 Jan 20 17:53:03 crc kubenswrapper[4558]: I0120 17:53:03.354252 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-d1d9-account-create-update-g54ff"] Jan 20 17:53:03 crc kubenswrapper[4558]: I0120 17:53:03.361322 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-a0eb-account-create-update-75hkz"] Jan 20 17:53:03 crc kubenswrapper[4558]: I0120 17:53:03.377404 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-db-create-2qpp6"] Jan 20 17:53:03 crc kubenswrapper[4558]: I0120 17:53:03.390985 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell1-db-create-2qpp6"] Jan 20 17:53:03 crc kubenswrapper[4558]: I0120 17:53:03.395993 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" podUID="c4a11c40-a157-4a28-b1a3-60c211d1d0bf" containerName="rabbitmq" containerID="cri-o://ca5aa63a108196583f0f9950193b9c697a06235d9a0ce251fae6acde947d41cf" gracePeriod=604800 Jan 20 17:53:03 crc kubenswrapper[4558]: I0120 17:53:03.402445 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/openstack-cell1-galera-0"] Jan 20 17:53:03 crc kubenswrapper[4558]: I0120 17:53:03.408093 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-d1d9-account-create-update-g54ff"] Jan 20 17:53:03 crc kubenswrapper[4558]: I0120 17:53:03.413179 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:53:03 crc kubenswrapper[4558]: I0120 17:53:03.413436 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-scheduler-0" podUID="d9321ea5-ab3b-4f00-b4d0-f2c2f8c2deb2" containerName="nova-scheduler-scheduler" containerID="cri-o://ce0da192b65bf7c5e90dd832e0212db51fd4f3e04276e89c0f7b9256d88b752a" gracePeriod=30 Jan 20 17:53:03 crc kubenswrapper[4558]: I0120 17:53:03.418916 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:53:03 crc kubenswrapper[4558]: I0120 17:53:03.419138 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-metadata-0" podUID="6e7de5b3-4426-4816-9d45-9b4226333dbc" containerName="nova-metadata-log" containerID="cri-o://5e481f5bc5fbafdbeaf23fc31d695f949c1cb9231a43da689c3c8047fcf0e3bf" gracePeriod=30 Jan 20 17:53:03 crc kubenswrapper[4558]: I0120 17:53:03.419474 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-metadata-0" podUID="6e7de5b3-4426-4816-9d45-9b4226333dbc" containerName="nova-metadata-metadata" containerID="cri-o://49e55db6bbf8e6b2aba631a3aec4df47c74f2b76c8c583fcb5c80461300cdc1b" gracePeriod=30 Jan 20 17:53:03 crc kubenswrapper[4558]: I0120 17:53:03.422706 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell0-db-create-smx77"] Jan 20 17:53:03 crc kubenswrapper[4558]: I0120 17:53:03.430580 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell0-db-create-smx77"] Jan 20 17:53:03 crc kubenswrapper[4558]: I0120 17:53:03.435411 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell0-a0eb-account-create-update-75hkz"] Jan 20 17:53:03 crc kubenswrapper[4558]: I0120 17:53:03.443724 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-db-create-swxb7"] Jan 20 17:53:03 crc kubenswrapper[4558]: I0120 17:53:03.449019 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-api-db-create-swxb7"] Jan 20 17:53:03 crc kubenswrapper[4558]: I0120 17:53:03.454999 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-f275-account-create-update-5pb7k"] Jan 20 17:53:03 crc kubenswrapper[4558]: I0120 17:53:03.462069 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-api-5bfc7dd8b8-pxpm9"] Jan 20 17:53:03 crc kubenswrapper[4558]: I0120 17:53:03.462397 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-api-5bfc7dd8b8-pxpm9" podUID="e638a157-4234-4339-80ef-65d818d39b73" containerName="barbican-api-log" containerID="cri-o://5ffeb03ff85f67c07d33f3f3725dfcbef564cf6c716c687b03dc2f0eef33238d" gracePeriod=30 Jan 20 17:53:03 crc kubenswrapper[4558]: I0120 17:53:03.462527 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-api-5bfc7dd8b8-pxpm9" podUID="e638a157-4234-4339-80ef-65d818d39b73" containerName="barbican-api" containerID="cri-o://c2b848f0a44c31229d716190ee834f4766c179c31ef6380b2a6319ec16dbf83b" gracePeriod=30 Jan 20 17:53:03 crc kubenswrapper[4558]: I0120 17:53:03.474693 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:53:03 crc kubenswrapper[4558]: I0120 17:53:03.474900 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-api-0" podUID="64e30e89-cc14-4c89-bd1b-5702bfba717c" containerName="nova-api-log" containerID="cri-o://e0d25530c2349a51795cf3ce00e8ae91ba3476254b412bf92d6e0c208e2c53dc" gracePeriod=30 Jan 20 17:53:03 crc kubenswrapper[4558]: I0120 17:53:03.475319 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-api-0" podUID="64e30e89-cc14-4c89-bd1b-5702bfba717c" containerName="nova-api-api" containerID="cri-o://5a3ae52e8d382a0ba3e16ca78acf47ccaf862a7eab92320268ff87ae7158b25c" gracePeriod=30 Jan 20 17:53:03 crc kubenswrapper[4558]: I0120 17:53:03.497200 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-worker-6c48cb9cdf-9688c"] Jan 20 17:53:03 crc kubenswrapper[4558]: I0120 17:53:03.497457 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-worker-6c48cb9cdf-9688c" podUID="769afcb7-df8b-4d95-b662-9f5032227822" containerName="barbican-worker-log" containerID="cri-o://90b19c0fa576e6455d0cc3c15cd90c006b0ab179a1017800a7266892fa8e203e" gracePeriod=30 Jan 20 17:53:03 crc kubenswrapper[4558]: I0120 17:53:03.499031 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-worker-6c48cb9cdf-9688c" podUID="769afcb7-df8b-4d95-b662-9f5032227822" containerName="barbican-worker" containerID="cri-o://a5d920e84b45309d975c5a55142f856928971b658393631084941d3b078f522a" gracePeriod=30 Jan 20 17:53:03 crc kubenswrapper[4558]: I0120 17:53:03.508367 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/rabbitmq-server-0"] Jan 20 17:53:03 crc kubenswrapper[4558]: I0120 17:53:03.541227 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-0"] Jan 20 17:53:03 crc kubenswrapper[4558]: I0120 17:53:03.541529 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-cell1-conductor-0" podUID="7c16f0c4-1462-4817-9f74-9e3d93193867" containerName="nova-cell1-conductor-conductor" containerID="cri-o://b4a7a63ae5b4336f1c55496b20962afe4a27040b0e22d712c664548644815e60" gracePeriod=30 Jan 20 17:53:03 crc kubenswrapper[4558]: I0120 17:53:03.555250 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-db-sync-zjq8p"] Jan 20 17:53:03 crc kubenswrapper[4558]: I0120 17:53:03.560965 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-db-sync-zjq8p"] Jan 20 17:53:03 crc kubenswrapper[4558]: I0120 17:53:03.579594 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-db-sync-nd9l5"] Jan 20 17:53:03 crc kubenswrapper[4558]: I0120 17:53:03.587068 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-db-sync-nd9l5"] Jan 20 17:53:03 crc kubenswrapper[4558]: I0120 17:53:03.592017 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-0"] Jan 20 17:53:03 crc kubenswrapper[4558]: I0120 17:53:03.592246 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-cell0-conductor-0" podUID="924fa7ce-8d60-4b2f-b62b-d5e146474f71" containerName="nova-cell0-conductor-conductor" containerID="cri-o://d99379340565779627596cecb15d625f9bcb83d751ea4f5f14fb7cb8bcb8a6b6" gracePeriod=30 Jan 20 17:53:03 crc kubenswrapper[4558]: I0120 17:53:03.595156 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:53:03 crc kubenswrapper[4558]: I0120 17:53:03.595321 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" podUID="e029b16a-f02b-40b4-82b9-5fa08fe62bc1" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://71ba9af23a1c5424b7ecc46753bf8d593f4b1658ef2cbde792f66ad414383b91" gracePeriod=30 Jan 20 17:53:03 crc kubenswrapper[4558]: I0120 17:53:03.600373 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-zmctf"] Jan 20 17:53:03 crc kubenswrapper[4558]: I0120 17:53:03.606197 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 17:53:03 crc kubenswrapper[4558]: I0120 17:53:03.606338 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/kube-state-metrics-0" podUID="49822b01-63d0-40d3-b604-7980891b0683" containerName="kube-state-metrics" containerID="cri-o://7b033a817f9b0c0a5a28b3bb09ca9cc6417d0b599821e6bdbdcf0121964efdf3" gracePeriod=30 Jan 20 17:53:03 crc kubenswrapper[4558]: I0120 17:53:03.607638 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:53:03 crc kubenswrapper[4558]: I0120 17:53:03.607881 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="cd2275b9-323a-4981-aadd-791b68003e3d" containerName="ceilometer-central-agent" containerID="cri-o://0da9214e9e0f3d0fbf917b83ce287f9004e70a70097ee2a38aa219e649adaa78" gracePeriod=30 Jan 20 17:53:03 crc kubenswrapper[4558]: I0120 17:53:03.608031 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="cd2275b9-323a-4981-aadd-791b68003e3d" containerName="proxy-httpd" containerID="cri-o://7c70db500708c288ba013d8ce9dfe939d968f0b17f97ccf964db23badf9b4e95" gracePeriod=30 Jan 20 17:53:03 crc kubenswrapper[4558]: I0120 17:53:03.608072 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="cd2275b9-323a-4981-aadd-791b68003e3d" containerName="sg-core" containerID="cri-o://71e0601a2bf0479fed8749492b6488623cab0139872e72819a3a90baa127e93a" gracePeriod=30 Jan 20 17:53:03 crc kubenswrapper[4558]: I0120 17:53:03.608105 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="cd2275b9-323a-4981-aadd-791b68003e3d" containerName="ceilometer-notification-agent" containerID="cri-o://0833d5463f07e5954d4e2d0a3a43a4419ed8daf27634dc726b1fc4ce59dfc2f9" gracePeriod=30 Jan 20 17:53:03 crc kubenswrapper[4558]: I0120 17:53:03.777555 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/swift-proxy-76d6dddffd-4qcpv"] Jan 20 17:53:03 crc kubenswrapper[4558]: I0120 17:53:03.777998 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-proxy-76d6dddffd-4qcpv" podUID="433afe0d-c979-4c31-a8dc-8a2d93fbe3df" containerName="proxy-httpd" containerID="cri-o://48a32835b74390f16be559ac97cec4eea57eb34d5852a0e5d9f37f38184cdf7a" gracePeriod=30 Jan 20 17:53:03 crc kubenswrapper[4558]: I0120 17:53:03.778483 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-proxy-76d6dddffd-4qcpv" podUID="433afe0d-c979-4c31-a8dc-8a2d93fbe3df" containerName="proxy-server" containerID="cri-o://201a91126279426ab94e20fd8f5beddaa054584c71b596e84fb7b72c3f13b29f" gracePeriod=30 Jan 20 17:53:03 crc kubenswrapper[4558]: I0120 17:53:03.836794 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:53:03 crc kubenswrapper[4558]: I0120 17:53:03.839104 4558 generic.go:334] "Generic (PLEG): container finished" podID="5eac3a8d-ee82-4e6f-bdb9-fccb82e09496" containerID="931107b5dd291893e36bfee944ab2879ec15efdaafdf4d8d3e976539e9c14eab" exitCode=143 Jan 20 17:53:03 crc kubenswrapper[4558]: I0120 17:53:03.839189 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"5eac3a8d-ee82-4e6f-bdb9-fccb82e09496","Type":"ContainerDied","Data":"931107b5dd291893e36bfee944ab2879ec15efdaafdf4d8d3e976539e9c14eab"} Jan 20 17:53:03 crc kubenswrapper[4558]: I0120 17:53:03.847246 4558 generic.go:334] "Generic (PLEG): container finished" podID="6ea6b41c-c5ed-49c9-84f5-a932a1325aba" containerID="a10e865cdae4227bcd9b239722ad2d2d6bc501887291e7983d1cf31b29c7b76e" exitCode=0 Jan 20 17:53:03 crc kubenswrapper[4558]: I0120 17:53:03.847294 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-75676d645b-l2sp7" event={"ID":"6ea6b41c-c5ed-49c9-84f5-a932a1325aba","Type":"ContainerDied","Data":"a10e865cdae4227bcd9b239722ad2d2d6bc501887291e7983d1cf31b29c7b76e"} Jan 20 17:53:03 crc kubenswrapper[4558]: I0120 17:53:03.851013 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-zmctf" event={"ID":"c33a9cb9-2eec-4c2e-847d-9b06fcce5fef","Type":"ContainerStarted","Data":"6bbf17d0b514204638f96f7976faacb7a5932888ba4c9555efd9243c159f4174"} Jan 20 17:53:03 crc kubenswrapper[4558]: I0120 17:53:03.853675 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-a0eb-account-create-update-75hkz" event={"ID":"6d80c5b5-880f-4d59-9287-334023ec6ce9","Type":"ContainerStarted","Data":"17731a7facba62835b8bba083e3ada7aa2f3c60273012bf054a7ed401b083d69"} Jan 20 17:53:03 crc kubenswrapper[4558]: I0120 17:53:03.856301 4558 generic.go:334] "Generic (PLEG): container finished" podID="6e7de5b3-4426-4816-9d45-9b4226333dbc" containerID="5e481f5bc5fbafdbeaf23fc31d695f949c1cb9231a43da689c3c8047fcf0e3bf" exitCode=143 Jan 20 17:53:03 crc kubenswrapper[4558]: I0120 17:53:03.856368 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"6e7de5b3-4426-4816-9d45-9b4226333dbc","Type":"ContainerDied","Data":"5e481f5bc5fbafdbeaf23fc31d695f949c1cb9231a43da689c3c8047fcf0e3bf"} Jan 20 17:53:03 crc kubenswrapper[4558]: I0120 17:53:03.872030 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-sb-0"] Jan 20 17:53:03 crc kubenswrapper[4558]: I0120 17:53:03.878477 4558 generic.go:334] "Generic (PLEG): container finished" podID="f30aa605-d974-4d0d-aa96-5cb0d96897d6" containerID="519c148165b8e5d8031cd18cf91e1dcfa134e4235207cac41f7dc13725bac816" exitCode=143 Jan 20 17:53:03 crc kubenswrapper[4558]: I0120 17:53:03.878541 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"f30aa605-d974-4d0d-aa96-5cb0d96897d6","Type":"ContainerDied","Data":"519c148165b8e5d8031cd18cf91e1dcfa134e4235207cac41f7dc13725bac816"} Jan 20 17:53:03 crc kubenswrapper[4558]: I0120 17:53:03.879179 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-sb-0"] Jan 20 17:53:03 crc kubenswrapper[4558]: I0120 17:53:03.883675 4558 generic.go:334] "Generic (PLEG): container finished" podID="7d535532-403f-4d55-8138-7e09287d0108" containerID="f6b470a99751a2192a7c08c8ea1e3240681879e8b22e9a2638b977811313762b" exitCode=143 Jan 20 17:53:03 crc kubenswrapper[4558]: I0120 17:53:03.883716 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"7d535532-403f-4d55-8138-7e09287d0108","Type":"ContainerDied","Data":"f6b470a99751a2192a7c08c8ea1e3240681879e8b22e9a2638b977811313762b"} Jan 20 17:53:03 crc kubenswrapper[4558]: I0120 17:53:03.893868 4558 generic.go:334] "Generic (PLEG): container finished" podID="51c79956-4212-4d46-b3e9-e9d5e7a33c31" containerID="fbd3990f851ba4c232be9e8e9146ae26662c46c76d2e455f20e0dcc444b77578" exitCode=143 Jan 20 17:53:03 crc kubenswrapper[4558]: I0120 17:53:03.893925 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-keystone-listener-5bb5dd7cd4-w4jnj" event={"ID":"51c79956-4212-4d46-b3e9-e9d5e7a33c31","Type":"ContainerDied","Data":"fbd3990f851ba4c232be9e8e9146ae26662c46c76d2e455f20e0dcc444b77578"} Jan 20 17:53:03 crc kubenswrapper[4558]: I0120 17:53:03.898327 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-f275-account-create-update-5pb7k" event={"ID":"01f39889-c29f-4e28-98fa-bc99322f761c","Type":"ContainerStarted","Data":"b3b94a9f34d9c411968d37f74a75c99e51ebd8f16f5d8c41ee0aaf3c1af32b39"} Jan 20 17:53:03 crc kubenswrapper[4558]: E0120 17:53:03.901396 4558 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 20 17:53:03 crc kubenswrapper[4558]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[/bin/sh -c #!/bin/bash Jan 20 17:53:03 crc kubenswrapper[4558]: Jan 20 17:53:03 crc kubenswrapper[4558]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 20 17:53:03 crc kubenswrapper[4558]: Jan 20 17:53:03 crc kubenswrapper[4558]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 20 17:53:03 crc kubenswrapper[4558]: Jan 20 17:53:03 crc kubenswrapper[4558]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 20 17:53:03 crc kubenswrapper[4558]: Jan 20 17:53:03 crc kubenswrapper[4558]: if [ -n "nova_cell0" ]; then Jan 20 17:53:03 crc kubenswrapper[4558]: GRANT_DATABASE="nova_cell0" Jan 20 17:53:03 crc kubenswrapper[4558]: else Jan 20 17:53:03 crc kubenswrapper[4558]: GRANT_DATABASE="*" Jan 20 17:53:03 crc kubenswrapper[4558]: fi Jan 20 17:53:03 crc kubenswrapper[4558]: Jan 20 17:53:03 crc kubenswrapper[4558]: # going for maximum compatibility here: Jan 20 17:53:03 crc kubenswrapper[4558]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 20 17:53:03 crc kubenswrapper[4558]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 20 17:53:03 crc kubenswrapper[4558]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 20 17:53:03 crc kubenswrapper[4558]: # support updates Jan 20 17:53:03 crc kubenswrapper[4558]: Jan 20 17:53:03 crc kubenswrapper[4558]: $MYSQL_CMD < logger="UnhandledError" Jan 20 17:53:03 crc kubenswrapper[4558]: E0120 17:53:03.902522 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"nova-cell0-db-secret\\\" not found\"" pod="openstack-kuttl-tests/nova-cell0-a0eb-account-create-update-75hkz" podUID="6d80c5b5-880f-4d59-9287-334023ec6ce9" Jan 20 17:53:03 crc kubenswrapper[4558]: E0120 17:53:03.903901 4558 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 20 17:53:03 crc kubenswrapper[4558]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[/bin/sh -c #!/bin/bash Jan 20 17:53:03 crc kubenswrapper[4558]: Jan 20 17:53:03 crc kubenswrapper[4558]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 20 17:53:03 crc kubenswrapper[4558]: Jan 20 17:53:03 crc kubenswrapper[4558]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 20 17:53:03 crc kubenswrapper[4558]: Jan 20 17:53:03 crc kubenswrapper[4558]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 20 17:53:03 crc kubenswrapper[4558]: Jan 20 17:53:03 crc kubenswrapper[4558]: if [ -n "nova_api" ]; then Jan 20 17:53:03 crc kubenswrapper[4558]: GRANT_DATABASE="nova_api" Jan 20 17:53:03 crc kubenswrapper[4558]: else Jan 20 17:53:03 crc kubenswrapper[4558]: GRANT_DATABASE="*" Jan 20 17:53:03 crc kubenswrapper[4558]: fi Jan 20 17:53:03 crc kubenswrapper[4558]: Jan 20 17:53:03 crc kubenswrapper[4558]: # going for maximum compatibility here: Jan 20 17:53:03 crc kubenswrapper[4558]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 20 17:53:03 crc kubenswrapper[4558]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 20 17:53:03 crc kubenswrapper[4558]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 20 17:53:03 crc kubenswrapper[4558]: # support updates Jan 20 17:53:03 crc kubenswrapper[4558]: Jan 20 17:53:03 crc kubenswrapper[4558]: $MYSQL_CMD < logger="UnhandledError" Jan 20 17:53:03 crc kubenswrapper[4558]: E0120 17:53:03.904969 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"nova-api-db-secret\\\" not found\"" pod="openstack-kuttl-tests/nova-api-f275-account-create-update-5pb7k" podUID="01f39889-c29f-4e28-98fa-bc99322f761c" Jan 20 17:53:03 crc kubenswrapper[4558]: I0120 17:53:03.924738 4558 generic.go:334] "Generic (PLEG): container finished" podID="64d029c7-79c6-40fc-b4d2-ac5bfbc387a0" containerID="049005bceb3d9c7f92a80adfa58232b99be025be046ebfa5fdd30c2501029dd6" exitCode=0 Jan 20 17:53:03 crc kubenswrapper[4558]: I0120 17:53:03.924774 4558 generic.go:334] "Generic (PLEG): container finished" podID="64d029c7-79c6-40fc-b4d2-ac5bfbc387a0" containerID="5e23628ab2cc3c257412804853dd2671a2b679976e1f78c61ccacc4929dc21cd" exitCode=0 Jan 20 17:53:03 crc kubenswrapper[4558]: I0120 17:53:03.924783 4558 generic.go:334] "Generic (PLEG): container finished" podID="64d029c7-79c6-40fc-b4d2-ac5bfbc387a0" containerID="4842ce500b116933451ba6ea6d30de4651175498d4768796b95de36768b44fc5" exitCode=0 Jan 20 17:53:03 crc kubenswrapper[4558]: I0120 17:53:03.924793 4558 generic.go:334] "Generic (PLEG): container finished" podID="64d029c7-79c6-40fc-b4d2-ac5bfbc387a0" containerID="2d0d3c4c9084f5b47b5268a7e1d20896d7cf8d59d93e03270dfa21fcb273c377" exitCode=0 Jan 20 17:53:03 crc kubenswrapper[4558]: I0120 17:53:03.924867 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"64d029c7-79c6-40fc-b4d2-ac5bfbc387a0","Type":"ContainerDied","Data":"049005bceb3d9c7f92a80adfa58232b99be025be046ebfa5fdd30c2501029dd6"} Jan 20 17:53:03 crc kubenswrapper[4558]: I0120 17:53:03.924886 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"64d029c7-79c6-40fc-b4d2-ac5bfbc387a0","Type":"ContainerDied","Data":"5e23628ab2cc3c257412804853dd2671a2b679976e1f78c61ccacc4929dc21cd"} Jan 20 17:53:03 crc kubenswrapper[4558]: I0120 17:53:03.924899 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"64d029c7-79c6-40fc-b4d2-ac5bfbc387a0","Type":"ContainerDied","Data":"4842ce500b116933451ba6ea6d30de4651175498d4768796b95de36768b44fc5"} Jan 20 17:53:03 crc kubenswrapper[4558]: I0120 17:53:03.924924 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"64d029c7-79c6-40fc-b4d2-ac5bfbc387a0","Type":"ContainerDied","Data":"2d0d3c4c9084f5b47b5268a7e1d20896d7cf8d59d93e03270dfa21fcb273c377"} Jan 20 17:53:03 crc kubenswrapper[4558]: I0120 17:53:03.937305 4558 generic.go:334] "Generic (PLEG): container finished" podID="8101ebb9-ace1-4242-8c8b-698307c6be29" containerID="95dc200c1f4d2871200f75f66fd687fce8cf419611a0ac26406a639578f912ef" exitCode=1 Jan 20 17:53:03 crc kubenswrapper[4558]: I0120 17:53:03.937483 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/root-account-create-update-bxrps" event={"ID":"8101ebb9-ace1-4242-8c8b-698307c6be29","Type":"ContainerDied","Data":"95dc200c1f4d2871200f75f66fd687fce8cf419611a0ac26406a639578f912ef"} Jan 20 17:53:03 crc kubenswrapper[4558]: I0120 17:53:03.938666 4558 scope.go:117] "RemoveContainer" containerID="95dc200c1f4d2871200f75f66fd687fce8cf419611a0ac26406a639578f912ef" Jan 20 17:53:03 crc kubenswrapper[4558]: I0120 17:53:03.952930 4558 generic.go:334] "Generic (PLEG): container finished" podID="e638a157-4234-4339-80ef-65d818d39b73" containerID="5ffeb03ff85f67c07d33f3f3725dfcbef564cf6c716c687b03dc2f0eef33238d" exitCode=143 Jan 20 17:53:03 crc kubenswrapper[4558]: I0120 17:53:03.953805 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-5bfc7dd8b8-pxpm9" event={"ID":"e638a157-4234-4339-80ef-65d818d39b73","Type":"ContainerDied","Data":"5ffeb03ff85f67c07d33f3f3725dfcbef564cf6c716c687b03dc2f0eef33238d"} Jan 20 17:53:03 crc kubenswrapper[4558]: I0120 17:53:03.965978 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-d1d9-account-create-update-g54ff" event={"ID":"3eabae3c-0014-4d8e-9393-7cb934ba9fa4","Type":"ContainerStarted","Data":"3282ae1da7ed4b5685a8d00992339b13bdeb88b9fb00b1690d6e296c0fb8f1f9"} Jan 20 17:53:03 crc kubenswrapper[4558]: I0120 17:53:03.967596 4558 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="openstack-kuttl-tests/nova-cell1-d1d9-account-create-update-g54ff" secret="" err="secret \"galera-openstack-cell1-dockercfg-gzrvj\" not found" Jan 20 17:53:03 crc kubenswrapper[4558]: E0120 17:53:03.992630 4558 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 20 17:53:03 crc kubenswrapper[4558]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[/bin/sh -c #!/bin/bash Jan 20 17:53:03 crc kubenswrapper[4558]: Jan 20 17:53:03 crc kubenswrapper[4558]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 20 17:53:03 crc kubenswrapper[4558]: Jan 20 17:53:03 crc kubenswrapper[4558]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 20 17:53:03 crc kubenswrapper[4558]: Jan 20 17:53:03 crc kubenswrapper[4558]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 20 17:53:03 crc kubenswrapper[4558]: Jan 20 17:53:03 crc kubenswrapper[4558]: if [ -n "nova_cell1" ]; then Jan 20 17:53:03 crc kubenswrapper[4558]: GRANT_DATABASE="nova_cell1" Jan 20 17:53:03 crc kubenswrapper[4558]: else Jan 20 17:53:03 crc kubenswrapper[4558]: GRANT_DATABASE="*" Jan 20 17:53:03 crc kubenswrapper[4558]: fi Jan 20 17:53:03 crc kubenswrapper[4558]: Jan 20 17:53:03 crc kubenswrapper[4558]: # going for maximum compatibility here: Jan 20 17:53:03 crc kubenswrapper[4558]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 20 17:53:03 crc kubenswrapper[4558]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 20 17:53:03 crc kubenswrapper[4558]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 20 17:53:03 crc kubenswrapper[4558]: # support updates Jan 20 17:53:03 crc kubenswrapper[4558]: Jan 20 17:53:03 crc kubenswrapper[4558]: $MYSQL_CMD < logger="UnhandledError" Jan 20 17:53:03 crc kubenswrapper[4558]: E0120 17:53:03.993464 4558 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 20 17:53:03 crc kubenswrapper[4558]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[/bin/sh -c #!/bin/bash Jan 20 17:53:03 crc kubenswrapper[4558]: Jan 20 17:53:03 crc kubenswrapper[4558]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 20 17:53:03 crc kubenswrapper[4558]: Jan 20 17:53:03 crc kubenswrapper[4558]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 20 17:53:03 crc kubenswrapper[4558]: Jan 20 17:53:03 crc kubenswrapper[4558]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 20 17:53:03 crc kubenswrapper[4558]: Jan 20 17:53:03 crc kubenswrapper[4558]: if [ -n "neutron" ]; then Jan 20 17:53:03 crc kubenswrapper[4558]: GRANT_DATABASE="neutron" Jan 20 17:53:03 crc kubenswrapper[4558]: else Jan 20 17:53:03 crc kubenswrapper[4558]: GRANT_DATABASE="*" Jan 20 17:53:03 crc kubenswrapper[4558]: fi Jan 20 17:53:03 crc kubenswrapper[4558]: Jan 20 17:53:03 crc kubenswrapper[4558]: # going for maximum compatibility here: Jan 20 17:53:03 crc kubenswrapper[4558]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 20 17:53:03 crc kubenswrapper[4558]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 20 17:53:03 crc kubenswrapper[4558]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 20 17:53:03 crc kubenswrapper[4558]: # support updates Jan 20 17:53:03 crc kubenswrapper[4558]: Jan 20 17:53:03 crc kubenswrapper[4558]: $MYSQL_CMD < logger="UnhandledError" Jan 20 17:53:04 crc kubenswrapper[4558]: E0120 17:53:04.006900 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"nova-cell1-db-secret\\\" not found\"" pod="openstack-kuttl-tests/nova-cell1-d1d9-account-create-update-g54ff" podUID="3eabae3c-0014-4d8e-9393-7cb934ba9fa4" Jan 20 17:53:04 crc kubenswrapper[4558]: E0120 17:53:04.006946 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"neutron-db-secret\\\" not found\"" pod="openstack-kuttl-tests/neutron-a1e2-account-create-update-hfpfh" podUID="799d2689-2fcd-4bc5-ae70-1d0ae1194c76" Jan 20 17:53:04 crc kubenswrapper[4558]: E0120 17:53:04.009057 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/openstack-cell1-scripts: configmap "openstack-cell1-scripts" not found Jan 20 17:53:04 crc kubenswrapper[4558]: E0120 17:53:04.009189 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/3eabae3c-0014-4d8e-9393-7cb934ba9fa4-operator-scripts podName:3eabae3c-0014-4d8e-9393-7cb934ba9fa4 nodeName:}" failed. No retries permitted until 2026-01-20 17:53:04.509154903 +0000 UTC m=+4278.269492871 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/3eabae3c-0014-4d8e-9393-7cb934ba9fa4-operator-scripts") pod "nova-cell1-d1d9-account-create-update-g54ff" (UID: "3eabae3c-0014-4d8e-9393-7cb934ba9fa4") : configmap "openstack-cell1-scripts" not found Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.056247 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/rabbitmq-server-0" podUID="088e44cc-3515-4736-aa46-721774902209" containerName="rabbitmq" containerID="cri-o://0cda7f898a6fe83eef5b3e9a295e4cc99749a10d45642f677ab0d846c1aac497" gracePeriod=604800 Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.191615 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/openstack-cell1-galera-0" podUID="05b3b028-51e2-49a9-9fb8-a10c096f3b27" containerName="galera" containerID="cri-o://5c52a309d4b859bba6422d3ae6fa7e9422d3b5d97dc982533c7cb8e876b2f184" gracePeriod=30 Jan 20 17:53:04 crc kubenswrapper[4558]: E0120 17:53:04.319964 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d99379340565779627596cecb15d625f9bcb83d751ea4f5f14fb7cb8bcb8a6b6" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:53:04 crc kubenswrapper[4558]: E0120 17:53:04.321316 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d99379340565779627596cecb15d625f9bcb83d751ea4f5f14fb7cb8bcb8a6b6" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:53:04 crc kubenswrapper[4558]: E0120 17:53:04.322742 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d99379340565779627596cecb15d625f9bcb83d751ea4f5f14fb7cb8bcb8a6b6" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:53:04 crc kubenswrapper[4558]: E0120 17:53:04.322842 4558 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack-kuttl-tests/nova-cell0-conductor-0" podUID="924fa7ce-8d60-4b2f-b62b-d5e146474f71" containerName="nova-cell0-conductor-conductor" Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.470561 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.480246 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-keystone-listener-5bb5dd7cd4-w4jnj" Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.519410 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-brxgv\" (UniqueName: \"kubernetes.io/projected/49822b01-63d0-40d3-b604-7980891b0683-kube-api-access-brxgv\") pod \"49822b01-63d0-40d3-b604-7980891b0683\" (UID: \"49822b01-63d0-40d3-b604-7980891b0683\") " Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.519479 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/49822b01-63d0-40d3-b604-7980891b0683-combined-ca-bundle\") pod \"49822b01-63d0-40d3-b604-7980891b0683\" (UID: \"49822b01-63d0-40d3-b604-7980891b0683\") " Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.519503 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/51c79956-4212-4d46-b3e9-e9d5e7a33c31-combined-ca-bundle\") pod \"51c79956-4212-4d46-b3e9-e9d5e7a33c31\" (UID: \"51c79956-4212-4d46-b3e9-e9d5e7a33c31\") " Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.519545 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bkszk\" (UniqueName: \"kubernetes.io/projected/51c79956-4212-4d46-b3e9-e9d5e7a33c31-kube-api-access-bkszk\") pod \"51c79956-4212-4d46-b3e9-e9d5e7a33c31\" (UID: \"51c79956-4212-4d46-b3e9-e9d5e7a33c31\") " Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.519620 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/51c79956-4212-4d46-b3e9-e9d5e7a33c31-config-data\") pod \"51c79956-4212-4d46-b3e9-e9d5e7a33c31\" (UID: \"51c79956-4212-4d46-b3e9-e9d5e7a33c31\") " Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.519642 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/49822b01-63d0-40d3-b604-7980891b0683-kube-state-metrics-tls-config\") pod \"49822b01-63d0-40d3-b604-7980891b0683\" (UID: \"49822b01-63d0-40d3-b604-7980891b0683\") " Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.519673 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/49822b01-63d0-40d3-b604-7980891b0683-kube-state-metrics-tls-certs\") pod \"49822b01-63d0-40d3-b604-7980891b0683\" (UID: \"49822b01-63d0-40d3-b604-7980891b0683\") " Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.519691 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/51c79956-4212-4d46-b3e9-e9d5e7a33c31-config-data-custom\") pod \"51c79956-4212-4d46-b3e9-e9d5e7a33c31\" (UID: \"51c79956-4212-4d46-b3e9-e9d5e7a33c31\") " Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.519744 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/51c79956-4212-4d46-b3e9-e9d5e7a33c31-logs\") pod \"51c79956-4212-4d46-b3e9-e9d5e7a33c31\" (UID: \"51c79956-4212-4d46-b3e9-e9d5e7a33c31\") " Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.522179 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/rabbitmq-server-0" podUID="088e44cc-3515-4736-aa46-721774902209" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.1.84:5671: connect: connection refused" Jan 20 17:53:04 crc kubenswrapper[4558]: E0120 17:53:04.522593 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/openstack-cell1-scripts: configmap "openstack-cell1-scripts" not found Jan 20 17:53:04 crc kubenswrapper[4558]: E0120 17:53:04.522661 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/3eabae3c-0014-4d8e-9393-7cb934ba9fa4-operator-scripts podName:3eabae3c-0014-4d8e-9393-7cb934ba9fa4 nodeName:}" failed. No retries permitted until 2026-01-20 17:53:05.522637774 +0000 UTC m=+4279.282975741 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/3eabae3c-0014-4d8e-9393-7cb934ba9fa4-operator-scripts") pod "nova-cell1-d1d9-account-create-update-g54ff" (UID: "3eabae3c-0014-4d8e-9393-7cb934ba9fa4") : configmap "openstack-cell1-scripts" not found Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.523379 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/51c79956-4212-4d46-b3e9-e9d5e7a33c31-logs" (OuterVolumeSpecName: "logs") pod "51c79956-4212-4d46-b3e9-e9d5e7a33c31" (UID: "51c79956-4212-4d46-b3e9-e9d5e7a33c31"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.531186 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49822b01-63d0-40d3-b604-7980891b0683-kube-api-access-brxgv" (OuterVolumeSpecName: "kube-api-access-brxgv") pod "49822b01-63d0-40d3-b604-7980891b0683" (UID: "49822b01-63d0-40d3-b604-7980891b0683"). InnerVolumeSpecName "kube-api-access-brxgv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.555062 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/51c79956-4212-4d46-b3e9-e9d5e7a33c31-kube-api-access-bkszk" (OuterVolumeSpecName: "kube-api-access-bkszk") pod "51c79956-4212-4d46-b3e9-e9d5e7a33c31" (UID: "51c79956-4212-4d46-b3e9-e9d5e7a33c31"). InnerVolumeSpecName "kube-api-access-bkszk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.561499 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/51c79956-4212-4d46-b3e9-e9d5e7a33c31-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "51c79956-4212-4d46-b3e9-e9d5e7a33c31" (UID: "51c79956-4212-4d46-b3e9-e9d5e7a33c31"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.581720 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstackclient" Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.584538 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0482f0bb-b64a-407b-954f-8f0f8a04572f" path="/var/lib/kubelet/pods/0482f0bb-b64a-407b-954f-8f0f8a04572f/volumes" Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.585344 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="04c2bb7a-e2e2-4f8f-bf20-e6c381d117ef" path="/var/lib/kubelet/pods/04c2bb7a-e2e2-4f8f-bf20-e6c381d117ef/volumes" Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.586433 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0fd41882-86eb-4969-8b15-43ea14ac1558" path="/var/lib/kubelet/pods/0fd41882-86eb-4969-8b15-43ea14ac1558/volumes" Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.587862 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="100de1e9-d90a-4b00-9e80-82656bc418c1" path="/var/lib/kubelet/pods/100de1e9-d90a-4b00-9e80-82656bc418c1/volumes" Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.589901 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="206eba3c-fb3b-4779-acfe-5d63f0dfc9fa" path="/var/lib/kubelet/pods/206eba3c-fb3b-4779-acfe-5d63f0dfc9fa/volumes" Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.590731 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="30bf2b72-7578-4d79-b11f-e7d8f34dc805" path="/var/lib/kubelet/pods/30bf2b72-7578-4d79-b11f-e7d8f34dc805/volumes" Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.591275 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3b209c02-0fa1-4783-9b88-390400b5282c" path="/var/lib/kubelet/pods/3b209c02-0fa1-4783-9b88-390400b5282c/volumes" Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.591830 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5447b108-6890-4614-88fa-3df24ce0d9b5" path="/var/lib/kubelet/pods/5447b108-6890-4614-88fa-3df24ce0d9b5/volumes" Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.593537 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="594d0c54-940e-4220-ba99-bd5311ce96d0" path="/var/lib/kubelet/pods/594d0c54-940e-4220-ba99-bd5311ce96d0/volumes" Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.594244 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="634825c8-6d4e-42a0-83c6-c83d105a2a7f" path="/var/lib/kubelet/pods/634825c8-6d4e-42a0-83c6-c83d105a2a7f/volumes" Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.595270 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="734caadf-9e91-4acd-9c76-0948d33c4c20" path="/var/lib/kubelet/pods/734caadf-9e91-4acd-9c76-0948d33c4c20/volumes" Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.597905 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="74ff4ade-5ef8-4f1d-81c1-ca1b0a85f84c" path="/var/lib/kubelet/pods/74ff4ade-5ef8-4f1d-81c1-ca1b0a85f84c/volumes" Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.601300 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8ea2267f-dec5-48ce-8f86-275077b68e57" path="/var/lib/kubelet/pods/8ea2267f-dec5-48ce-8f86-275077b68e57/volumes" Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.602716 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ab7f32c5-66c5-47a0-a63f-530282c4db6a" path="/var/lib/kubelet/pods/ab7f32c5-66c5-47a0-a63f-530282c4db6a/volumes" Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.603399 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b26fb19f-321d-4dd1-99dd-a706e445d49c" path="/var/lib/kubelet/pods/b26fb19f-321d-4dd1-99dd-a706e445d49c/volumes" Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.604332 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bebbe6bd-5855-4553-bbb4-7d994a90d025" path="/var/lib/kubelet/pods/bebbe6bd-5855-4553-bbb4-7d994a90d025/volumes" Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.609353 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fa9f3a26-1b5a-4d02-a79d-67585f099131" path="/var/lib/kubelet/pods/fa9f3a26-1b5a-4d02-a79d-67585f099131/volumes" Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.610719 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fc65f7eb-e162-44ae-8136-bdcb6baa7c0f" path="/var/lib/kubelet/pods/fc65f7eb-e162-44ae-8136-bdcb6baa7c0f/volumes" Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.619348 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49822b01-63d0-40d3-b604-7980891b0683-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "49822b01-63d0-40d3-b604-7980891b0683" (UID: "49822b01-63d0-40d3-b604-7980891b0683"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.624182 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qm2b9\" (UniqueName: \"kubernetes.io/projected/10d0164b-faa3-408c-9b22-25d1cff2c4e3-kube-api-access-qm2b9\") pod \"10d0164b-faa3-408c-9b22-25d1cff2c4e3\" (UID: \"10d0164b-faa3-408c-9b22-25d1cff2c4e3\") " Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.624268 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/10d0164b-faa3-408c-9b22-25d1cff2c4e3-openstack-config-secret\") pod \"10d0164b-faa3-408c-9b22-25d1cff2c4e3\" (UID: \"10d0164b-faa3-408c-9b22-25d1cff2c4e3\") " Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.624489 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/10d0164b-faa3-408c-9b22-25d1cff2c4e3-combined-ca-bundle\") pod \"10d0164b-faa3-408c-9b22-25d1cff2c4e3\" (UID: \"10d0164b-faa3-408c-9b22-25d1cff2c4e3\") " Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.624636 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/10d0164b-faa3-408c-9b22-25d1cff2c4e3-openstack-config\") pod \"10d0164b-faa3-408c-9b22-25d1cff2c4e3\" (UID: \"10d0164b-faa3-408c-9b22-25d1cff2c4e3\") " Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.625718 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/49822b01-63d0-40d3-b604-7980891b0683-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.625747 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bkszk\" (UniqueName: \"kubernetes.io/projected/51c79956-4212-4d46-b3e9-e9d5e7a33c31-kube-api-access-bkszk\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.625759 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/51c79956-4212-4d46-b3e9-e9d5e7a33c31-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.625771 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/51c79956-4212-4d46-b3e9-e9d5e7a33c31-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.625784 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-brxgv\" (UniqueName: \"kubernetes.io/projected/49822b01-63d0-40d3-b604-7980891b0683-kube-api-access-brxgv\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:04 crc kubenswrapper[4558]: E0120 17:53:04.626541 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Jan 20 17:53:04 crc kubenswrapper[4558]: E0120 17:53:04.626796 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/c4a11c40-a157-4a28-b1a3-60c211d1d0bf-config-data podName:c4a11c40-a157-4a28-b1a3-60c211d1d0bf nodeName:}" failed. No retries permitted until 2026-01-20 17:53:08.626771606 +0000 UTC m=+4282.387109573 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/c4a11c40-a157-4a28-b1a3-60c211d1d0bf-config-data") pod "rabbitmq-cell1-server-0" (UID: "c4a11c40-a157-4a28-b1a3-60c211d1d0bf") : configmap "rabbitmq-cell1-config-data" not found Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.631269 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/10d0164b-faa3-408c-9b22-25d1cff2c4e3-kube-api-access-qm2b9" (OuterVolumeSpecName: "kube-api-access-qm2b9") pod "10d0164b-faa3-408c-9b22-25d1cff2c4e3" (UID: "10d0164b-faa3-408c-9b22-25d1cff2c4e3"). InnerVolumeSpecName "kube-api-access-qm2b9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.643476 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/51c79956-4212-4d46-b3e9-e9d5e7a33c31-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "51c79956-4212-4d46-b3e9-e9d5e7a33c31" (UID: "51c79956-4212-4d46-b3e9-e9d5e7a33c31"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.645661 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49822b01-63d0-40d3-b604-7980891b0683-kube-state-metrics-tls-config" (OuterVolumeSpecName: "kube-state-metrics-tls-config") pod "49822b01-63d0-40d3-b604-7980891b0683" (UID: "49822b01-63d0-40d3-b604-7980891b0683"). InnerVolumeSpecName "kube-state-metrics-tls-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.674388 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/10d0164b-faa3-408c-9b22-25d1cff2c4e3-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "10d0164b-faa3-408c-9b22-25d1cff2c4e3" (UID: "10d0164b-faa3-408c-9b22-25d1cff2c4e3"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.676766 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/51c79956-4212-4d46-b3e9-e9d5e7a33c31-config-data" (OuterVolumeSpecName: "config-data") pod "51c79956-4212-4d46-b3e9-e9d5e7a33c31" (UID: "51c79956-4212-4d46-b3e9-e9d5e7a33c31"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.684221 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49822b01-63d0-40d3-b604-7980891b0683-kube-state-metrics-tls-certs" (OuterVolumeSpecName: "kube-state-metrics-tls-certs") pod "49822b01-63d0-40d3-b604-7980891b0683" (UID: "49822b01-63d0-40d3-b604-7980891b0683"). InnerVolumeSpecName "kube-state-metrics-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.686274 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/10d0164b-faa3-408c-9b22-25d1cff2c4e3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "10d0164b-faa3-408c-9b22-25d1cff2c4e3" (UID: "10d0164b-faa3-408c-9b22-25d1cff2c4e3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.692529 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.716896 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-worker-6c48cb9cdf-9688c" Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.727740 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e029b16a-f02b-40b4-82b9-5fa08fe62bc1-combined-ca-bundle\") pod \"e029b16a-f02b-40b4-82b9-5fa08fe62bc1\" (UID: \"e029b16a-f02b-40b4-82b9-5fa08fe62bc1\") " Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.727822 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/e029b16a-f02b-40b4-82b9-5fa08fe62bc1-nova-novncproxy-tls-certs\") pod \"e029b16a-f02b-40b4-82b9-5fa08fe62bc1\" (UID: \"e029b16a-f02b-40b4-82b9-5fa08fe62bc1\") " Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.727956 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/e029b16a-f02b-40b4-82b9-5fa08fe62bc1-vencrypt-tls-certs\") pod \"e029b16a-f02b-40b4-82b9-5fa08fe62bc1\" (UID: \"e029b16a-f02b-40b4-82b9-5fa08fe62bc1\") " Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.727995 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e029b16a-f02b-40b4-82b9-5fa08fe62bc1-config-data\") pod \"e029b16a-f02b-40b4-82b9-5fa08fe62bc1\" (UID: \"e029b16a-f02b-40b4-82b9-5fa08fe62bc1\") " Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.728049 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6q8k\" (UniqueName: \"kubernetes.io/projected/e029b16a-f02b-40b4-82b9-5fa08fe62bc1-kube-api-access-d6q8k\") pod \"e029b16a-f02b-40b4-82b9-5fa08fe62bc1\" (UID: \"e029b16a-f02b-40b4-82b9-5fa08fe62bc1\") " Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.728886 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/51c79956-4212-4d46-b3e9-e9d5e7a33c31-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.728907 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qm2b9\" (UniqueName: \"kubernetes.io/projected/10d0164b-faa3-408c-9b22-25d1cff2c4e3-kube-api-access-qm2b9\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.728919 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/51c79956-4212-4d46-b3e9-e9d5e7a33c31-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.728930 4558 reconciler_common.go:293] "Volume detached for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/49822b01-63d0-40d3-b604-7980891b0683-kube-state-metrics-tls-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.728940 4558 reconciler_common.go:293] "Volume detached for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/49822b01-63d0-40d3-b604-7980891b0683-kube-state-metrics-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.728951 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/10d0164b-faa3-408c-9b22-25d1cff2c4e3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.728962 4558 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/10d0164b-faa3-408c-9b22-25d1cff2c4e3-openstack-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.731729 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e029b16a-f02b-40b4-82b9-5fa08fe62bc1-kube-api-access-d6q8k" (OuterVolumeSpecName: "kube-api-access-d6q8k") pod "e029b16a-f02b-40b4-82b9-5fa08fe62bc1" (UID: "e029b16a-f02b-40b4-82b9-5fa08fe62bc1"). InnerVolumeSpecName "kube-api-access-d6q8k". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.735065 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-proxy-76d6dddffd-4qcpv" Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.764825 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e029b16a-f02b-40b4-82b9-5fa08fe62bc1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e029b16a-f02b-40b4-82b9-5fa08fe62bc1" (UID: "e029b16a-f02b-40b4-82b9-5fa08fe62bc1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.769249 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/10d0164b-faa3-408c-9b22-25d1cff2c4e3-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "10d0164b-faa3-408c-9b22-25d1cff2c4e3" (UID: "10d0164b-faa3-408c-9b22-25d1cff2c4e3"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.784524 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e029b16a-f02b-40b4-82b9-5fa08fe62bc1-config-data" (OuterVolumeSpecName: "config-data") pod "e029b16a-f02b-40b4-82b9-5fa08fe62bc1" (UID: "e029b16a-f02b-40b4-82b9-5fa08fe62bc1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.802202 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" podUID="c4a11c40-a157-4a28-b1a3-60c211d1d0bf" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.1.85:5671: connect: connection refused" Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.821276 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e029b16a-f02b-40b4-82b9-5fa08fe62bc1-vencrypt-tls-certs" (OuterVolumeSpecName: "vencrypt-tls-certs") pod "e029b16a-f02b-40b4-82b9-5fa08fe62bc1" (UID: "e029b16a-f02b-40b4-82b9-5fa08fe62bc1"). InnerVolumeSpecName "vencrypt-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.829751 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/769afcb7-df8b-4d95-b662-9f5032227822-config-data-custom\") pod \"769afcb7-df8b-4d95-b662-9f5032227822\" (UID: \"769afcb7-df8b-4d95-b662-9f5032227822\") " Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.829813 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/433afe0d-c979-4c31-a8dc-8a2d93fbe3df-log-httpd\") pod \"433afe0d-c979-4c31-a8dc-8a2d93fbe3df\" (UID: \"433afe0d-c979-4c31-a8dc-8a2d93fbe3df\") " Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.829836 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/769afcb7-df8b-4d95-b662-9f5032227822-config-data\") pod \"769afcb7-df8b-4d95-b662-9f5032227822\" (UID: \"769afcb7-df8b-4d95-b662-9f5032227822\") " Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.829980 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkpbk\" (UniqueName: \"kubernetes.io/projected/769afcb7-df8b-4d95-b662-9f5032227822-kube-api-access-zkpbk\") pod \"769afcb7-df8b-4d95-b662-9f5032227822\" (UID: \"769afcb7-df8b-4d95-b662-9f5032227822\") " Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.830045 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/433afe0d-c979-4c31-a8dc-8a2d93fbe3df-combined-ca-bundle\") pod \"433afe0d-c979-4c31-a8dc-8a2d93fbe3df\" (UID: \"433afe0d-c979-4c31-a8dc-8a2d93fbe3df\") " Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.830063 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/769afcb7-df8b-4d95-b662-9f5032227822-logs\") pod \"769afcb7-df8b-4d95-b662-9f5032227822\" (UID: \"769afcb7-df8b-4d95-b662-9f5032227822\") " Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.830085 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/433afe0d-c979-4c31-a8dc-8a2d93fbe3df-public-tls-certs\") pod \"433afe0d-c979-4c31-a8dc-8a2d93fbe3df\" (UID: \"433afe0d-c979-4c31-a8dc-8a2d93fbe3df\") " Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.830140 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/433afe0d-c979-4c31-a8dc-8a2d93fbe3df-internal-tls-certs\") pod \"433afe0d-c979-4c31-a8dc-8a2d93fbe3df\" (UID: \"433afe0d-c979-4c31-a8dc-8a2d93fbe3df\") " Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.830194 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m8zd4\" (UniqueName: \"kubernetes.io/projected/433afe0d-c979-4c31-a8dc-8a2d93fbe3df-kube-api-access-m8zd4\") pod \"433afe0d-c979-4c31-a8dc-8a2d93fbe3df\" (UID: \"433afe0d-c979-4c31-a8dc-8a2d93fbe3df\") " Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.830255 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/433afe0d-c979-4c31-a8dc-8a2d93fbe3df-run-httpd\") pod \"433afe0d-c979-4c31-a8dc-8a2d93fbe3df\" (UID: \"433afe0d-c979-4c31-a8dc-8a2d93fbe3df\") " Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.831558 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/433afe0d-c979-4c31-a8dc-8a2d93fbe3df-etc-swift\") pod \"433afe0d-c979-4c31-a8dc-8a2d93fbe3df\" (UID: \"433afe0d-c979-4c31-a8dc-8a2d93fbe3df\") " Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.831654 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/769afcb7-df8b-4d95-b662-9f5032227822-combined-ca-bundle\") pod \"769afcb7-df8b-4d95-b662-9f5032227822\" (UID: \"769afcb7-df8b-4d95-b662-9f5032227822\") " Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.831682 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/433afe0d-c979-4c31-a8dc-8a2d93fbe3df-config-data\") pod \"433afe0d-c979-4c31-a8dc-8a2d93fbe3df\" (UID: \"433afe0d-c979-4c31-a8dc-8a2d93fbe3df\") " Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.831862 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/433afe0d-c979-4c31-a8dc-8a2d93fbe3df-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "433afe0d-c979-4c31-a8dc-8a2d93fbe3df" (UID: "433afe0d-c979-4c31-a8dc-8a2d93fbe3df"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.832685 4558 reconciler_common.go:293] "Volume detached for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/e029b16a-f02b-40b4-82b9-5fa08fe62bc1-vencrypt-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.832709 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e029b16a-f02b-40b4-82b9-5fa08fe62bc1-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.832722 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6q8k\" (UniqueName: \"kubernetes.io/projected/e029b16a-f02b-40b4-82b9-5fa08fe62bc1-kube-api-access-d6q8k\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.832734 4558 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/10d0164b-faa3-408c-9b22-25d1cff2c4e3-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.832743 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e029b16a-f02b-40b4-82b9-5fa08fe62bc1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.832752 4558 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/433afe0d-c979-4c31-a8dc-8a2d93fbe3df-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.838301 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e029b16a-f02b-40b4-82b9-5fa08fe62bc1-nova-novncproxy-tls-certs" (OuterVolumeSpecName: "nova-novncproxy-tls-certs") pod "e029b16a-f02b-40b4-82b9-5fa08fe62bc1" (UID: "e029b16a-f02b-40b4-82b9-5fa08fe62bc1"). InnerVolumeSpecName "nova-novncproxy-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.838337 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/769afcb7-df8b-4d95-b662-9f5032227822-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "769afcb7-df8b-4d95-b662-9f5032227822" (UID: "769afcb7-df8b-4d95-b662-9f5032227822"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.839376 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/769afcb7-df8b-4d95-b662-9f5032227822-logs" (OuterVolumeSpecName: "logs") pod "769afcb7-df8b-4d95-b662-9f5032227822" (UID: "769afcb7-df8b-4d95-b662-9f5032227822"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.839531 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/433afe0d-c979-4c31-a8dc-8a2d93fbe3df-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "433afe0d-c979-4c31-a8dc-8a2d93fbe3df" (UID: "433afe0d-c979-4c31-a8dc-8a2d93fbe3df"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.839929 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/433afe0d-c979-4c31-a8dc-8a2d93fbe3df-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "433afe0d-c979-4c31-a8dc-8a2d93fbe3df" (UID: "433afe0d-c979-4c31-a8dc-8a2d93fbe3df"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.863833 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/769afcb7-df8b-4d95-b662-9f5032227822-kube-api-access-zkpbk" (OuterVolumeSpecName: "kube-api-access-zkpbk") pod "769afcb7-df8b-4d95-b662-9f5032227822" (UID: "769afcb7-df8b-4d95-b662-9f5032227822"). InnerVolumeSpecName "kube-api-access-zkpbk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.866394 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/memcached-0"] Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.866632 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/memcached-0" podUID="a905b3ac-e1b2-49f8-8034-912138028aca" containerName="memcached" containerID="cri-o://516d63566e73522c69fe8fb7503a5f9b4f3300f2691369edc89aa5a577b600e5" gracePeriod=30 Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.869298 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/433afe0d-c979-4c31-a8dc-8a2d93fbe3df-kube-api-access-m8zd4" (OuterVolumeSpecName: "kube-api-access-m8zd4") pod "433afe0d-c979-4c31-a8dc-8a2d93fbe3df" (UID: "433afe0d-c979-4c31-a8dc-8a2d93fbe3df"). InnerVolumeSpecName "kube-api-access-m8zd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.941543 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m8zd4\" (UniqueName: \"kubernetes.io/projected/433afe0d-c979-4c31-a8dc-8a2d93fbe3df-kube-api-access-m8zd4\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.941571 4558 reconciler_common.go:293] "Volume detached for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/e029b16a-f02b-40b4-82b9-5fa08fe62bc1-nova-novncproxy-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.941582 4558 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/433afe0d-c979-4c31-a8dc-8a2d93fbe3df-etc-swift\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.941595 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/769afcb7-df8b-4d95-b662-9f5032227822-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.941604 4558 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/433afe0d-c979-4c31-a8dc-8a2d93fbe3df-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.941614 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkpbk\" (UniqueName: \"kubernetes.io/projected/769afcb7-df8b-4d95-b662-9f5032227822-kube-api-access-zkpbk\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.941622 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/769afcb7-df8b-4d95-b662-9f5032227822-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.952362 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-db47-account-create-update-qfpns"] Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.976273 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/keystone-db47-account-create-update-qfpns"] Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.985463 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/keystone-db47-account-create-update-wfpfn"] Jan 20 17:53:04 crc kubenswrapper[4558]: E0120 17:53:04.985895 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="49822b01-63d0-40d3-b604-7980891b0683" containerName="kube-state-metrics" Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.985911 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="49822b01-63d0-40d3-b604-7980891b0683" containerName="kube-state-metrics" Jan 20 17:53:04 crc kubenswrapper[4558]: E0120 17:53:04.985919 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="433afe0d-c979-4c31-a8dc-8a2d93fbe3df" containerName="proxy-server" Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.985924 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="433afe0d-c979-4c31-a8dc-8a2d93fbe3df" containerName="proxy-server" Jan 20 17:53:04 crc kubenswrapper[4558]: E0120 17:53:04.985935 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e029b16a-f02b-40b4-82b9-5fa08fe62bc1" containerName="nova-cell1-novncproxy-novncproxy" Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.985941 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="e029b16a-f02b-40b4-82b9-5fa08fe62bc1" containerName="nova-cell1-novncproxy-novncproxy" Jan 20 17:53:04 crc kubenswrapper[4558]: E0120 17:53:04.985960 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="433afe0d-c979-4c31-a8dc-8a2d93fbe3df" containerName="proxy-httpd" Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.985965 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="433afe0d-c979-4c31-a8dc-8a2d93fbe3df" containerName="proxy-httpd" Jan 20 17:53:04 crc kubenswrapper[4558]: E0120 17:53:04.985974 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="769afcb7-df8b-4d95-b662-9f5032227822" containerName="barbican-worker-log" Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.985979 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="769afcb7-df8b-4d95-b662-9f5032227822" containerName="barbican-worker-log" Jan 20 17:53:04 crc kubenswrapper[4558]: E0120 17:53:04.985997 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="634825c8-6d4e-42a0-83c6-c83d105a2a7f" containerName="openstack-network-exporter" Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.986002 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="634825c8-6d4e-42a0-83c6-c83d105a2a7f" containerName="openstack-network-exporter" Jan 20 17:53:04 crc kubenswrapper[4558]: E0120 17:53:04.986011 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="51c79956-4212-4d46-b3e9-e9d5e7a33c31" containerName="barbican-keystone-listener" Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.986016 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="51c79956-4212-4d46-b3e9-e9d5e7a33c31" containerName="barbican-keystone-listener" Jan 20 17:53:04 crc kubenswrapper[4558]: E0120 17:53:04.986033 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="634825c8-6d4e-42a0-83c6-c83d105a2a7f" containerName="ovsdbserver-sb" Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.986038 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="634825c8-6d4e-42a0-83c6-c83d105a2a7f" containerName="ovsdbserver-sb" Jan 20 17:53:04 crc kubenswrapper[4558]: E0120 17:53:04.986049 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="769afcb7-df8b-4d95-b662-9f5032227822" containerName="barbican-worker" Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.986054 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="769afcb7-df8b-4d95-b662-9f5032227822" containerName="barbican-worker" Jan 20 17:53:04 crc kubenswrapper[4558]: E0120 17:53:04.986071 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="51c79956-4212-4d46-b3e9-e9d5e7a33c31" containerName="barbican-keystone-listener-log" Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.986076 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="51c79956-4212-4d46-b3e9-e9d5e7a33c31" containerName="barbican-keystone-listener-log" Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.986251 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="51c79956-4212-4d46-b3e9-e9d5e7a33c31" containerName="barbican-keystone-listener" Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.986269 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="51c79956-4212-4d46-b3e9-e9d5e7a33c31" containerName="barbican-keystone-listener-log" Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.986280 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="769afcb7-df8b-4d95-b662-9f5032227822" containerName="barbican-worker-log" Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.986287 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="433afe0d-c979-4c31-a8dc-8a2d93fbe3df" containerName="proxy-server" Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.986299 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="e029b16a-f02b-40b4-82b9-5fa08fe62bc1" containerName="nova-cell1-novncproxy-novncproxy" Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.986310 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="634825c8-6d4e-42a0-83c6-c83d105a2a7f" containerName="ovsdbserver-sb" Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.986321 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="49822b01-63d0-40d3-b604-7980891b0683" containerName="kube-state-metrics" Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.986331 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="634825c8-6d4e-42a0-83c6-c83d105a2a7f" containerName="openstack-network-exporter" Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.986340 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="769afcb7-df8b-4d95-b662-9f5032227822" containerName="barbican-worker" Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.986348 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="433afe0d-c979-4c31-a8dc-8a2d93fbe3df" containerName="proxy-httpd" Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.986939 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-db47-account-create-update-wfpfn" Jan 20 17:53:04 crc kubenswrapper[4558]: I0120 17:53:04.994555 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-db-secret" Jan 20 17:53:05 crc kubenswrapper[4558]: I0120 17:53:05.004928 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-db-sync-nx4l8"] Jan 20 17:53:05 crc kubenswrapper[4558]: I0120 17:53:05.014311 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-db47-account-create-update-wfpfn"] Jan 20 17:53:05 crc kubenswrapper[4558]: I0120 17:53:05.016489 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/433afe0d-c979-4c31-a8dc-8a2d93fbe3df-config-data" (OuterVolumeSpecName: "config-data") pod "433afe0d-c979-4c31-a8dc-8a2d93fbe3df" (UID: "433afe0d-c979-4c31-a8dc-8a2d93fbe3df"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:53:05 crc kubenswrapper[4558]: I0120 17:53:05.017364 4558 generic.go:334] "Generic (PLEG): container finished" podID="10d0164b-faa3-408c-9b22-25d1cff2c4e3" containerID="ff027b2e7e7f3a64a246665fc1fbace52470e2c0c26cabfe23274befd6dfbcd9" exitCode=137 Jan 20 17:53:05 crc kubenswrapper[4558]: I0120 17:53:05.017439 4558 scope.go:117] "RemoveContainer" containerID="ff027b2e7e7f3a64a246665fc1fbace52470e2c0c26cabfe23274befd6dfbcd9" Jan 20 17:53:05 crc kubenswrapper[4558]: I0120 17:53:05.017565 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstackclient" Jan 20 17:53:05 crc kubenswrapper[4558]: I0120 17:53:05.018371 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/769afcb7-df8b-4d95-b662-9f5032227822-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "769afcb7-df8b-4d95-b662-9f5032227822" (UID: "769afcb7-df8b-4d95-b662-9f5032227822"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:53:05 crc kubenswrapper[4558]: I0120 17:53:05.022311 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-bootstrap-knlxj"] Jan 20 17:53:05 crc kubenswrapper[4558]: I0120 17:53:05.022735 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/433afe0d-c979-4c31-a8dc-8a2d93fbe3df-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "433afe0d-c979-4c31-a8dc-8a2d93fbe3df" (UID: "433afe0d-c979-4c31-a8dc-8a2d93fbe3df"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:53:05 crc kubenswrapper[4558]: I0120 17:53:05.026987 4558 generic.go:334] "Generic (PLEG): container finished" podID="64e30e89-cc14-4c89-bd1b-5702bfba717c" containerID="e0d25530c2349a51795cf3ce00e8ae91ba3476254b412bf92d6e0c208e2c53dc" exitCode=143 Jan 20 17:53:05 crc kubenswrapper[4558]: I0120 17:53:05.027091 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"64e30e89-cc14-4c89-bd1b-5702bfba717c","Type":"ContainerDied","Data":"e0d25530c2349a51795cf3ce00e8ae91ba3476254b412bf92d6e0c208e2c53dc"} Jan 20 17:53:05 crc kubenswrapper[4558]: I0120 17:53:05.027607 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/433afe0d-c979-4c31-a8dc-8a2d93fbe3df-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "433afe0d-c979-4c31-a8dc-8a2d93fbe3df" (UID: "433afe0d-c979-4c31-a8dc-8a2d93fbe3df"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:53:05 crc kubenswrapper[4558]: I0120 17:53:05.028675 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/keystone-db-sync-nx4l8"] Jan 20 17:53:05 crc kubenswrapper[4558]: I0120 17:53:05.036034 4558 generic.go:334] "Generic (PLEG): container finished" podID="433afe0d-c979-4c31-a8dc-8a2d93fbe3df" containerID="201a91126279426ab94e20fd8f5beddaa054584c71b596e84fb7b72c3f13b29f" exitCode=0 Jan 20 17:53:05 crc kubenswrapper[4558]: I0120 17:53:05.036062 4558 generic.go:334] "Generic (PLEG): container finished" podID="433afe0d-c979-4c31-a8dc-8a2d93fbe3df" containerID="48a32835b74390f16be559ac97cec4eea57eb34d5852a0e5d9f37f38184cdf7a" exitCode=0 Jan 20 17:53:05 crc kubenswrapper[4558]: I0120 17:53:05.036124 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-proxy-76d6dddffd-4qcpv" event={"ID":"433afe0d-c979-4c31-a8dc-8a2d93fbe3df","Type":"ContainerDied","Data":"201a91126279426ab94e20fd8f5beddaa054584c71b596e84fb7b72c3f13b29f"} Jan 20 17:53:05 crc kubenswrapper[4558]: I0120 17:53:05.036148 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-proxy-76d6dddffd-4qcpv" event={"ID":"433afe0d-c979-4c31-a8dc-8a2d93fbe3df","Type":"ContainerDied","Data":"48a32835b74390f16be559ac97cec4eea57eb34d5852a0e5d9f37f38184cdf7a"} Jan 20 17:53:05 crc kubenswrapper[4558]: I0120 17:53:05.036172 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-proxy-76d6dddffd-4qcpv" event={"ID":"433afe0d-c979-4c31-a8dc-8a2d93fbe3df","Type":"ContainerDied","Data":"c1b4e327c581b4775ab7540e7af4a06ae88758cab258a2422129ac0f967db364"} Jan 20 17:53:05 crc kubenswrapper[4558]: I0120 17:53:05.036227 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-proxy-76d6dddffd-4qcpv" Jan 20 17:53:05 crc kubenswrapper[4558]: I0120 17:53:05.040258 4558 generic.go:334] "Generic (PLEG): container finished" podID="cd2275b9-323a-4981-aadd-791b68003e3d" containerID="7c70db500708c288ba013d8ce9dfe939d968f0b17f97ccf964db23badf9b4e95" exitCode=0 Jan 20 17:53:05 crc kubenswrapper[4558]: I0120 17:53:05.040284 4558 generic.go:334] "Generic (PLEG): container finished" podID="cd2275b9-323a-4981-aadd-791b68003e3d" containerID="71e0601a2bf0479fed8749492b6488623cab0139872e72819a3a90baa127e93a" exitCode=2 Jan 20 17:53:05 crc kubenswrapper[4558]: I0120 17:53:05.040294 4558 generic.go:334] "Generic (PLEG): container finished" podID="cd2275b9-323a-4981-aadd-791b68003e3d" containerID="0da9214e9e0f3d0fbf917b83ce287f9004e70a70097ee2a38aa219e649adaa78" exitCode=0 Jan 20 17:53:05 crc kubenswrapper[4558]: I0120 17:53:05.040336 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"cd2275b9-323a-4981-aadd-791b68003e3d","Type":"ContainerDied","Data":"7c70db500708c288ba013d8ce9dfe939d968f0b17f97ccf964db23badf9b4e95"} Jan 20 17:53:05 crc kubenswrapper[4558]: I0120 17:53:05.040359 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"cd2275b9-323a-4981-aadd-791b68003e3d","Type":"ContainerDied","Data":"71e0601a2bf0479fed8749492b6488623cab0139872e72819a3a90baa127e93a"} Jan 20 17:53:05 crc kubenswrapper[4558]: I0120 17:53:05.040370 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"cd2275b9-323a-4981-aadd-791b68003e3d","Type":"ContainerDied","Data":"0da9214e9e0f3d0fbf917b83ce287f9004e70a70097ee2a38aa219e649adaa78"} Jan 20 17:53:05 crc kubenswrapper[4558]: I0120 17:53:05.041635 4558 generic.go:334] "Generic (PLEG): container finished" podID="c33a9cb9-2eec-4c2e-847d-9b06fcce5fef" containerID="c5ebce8532452e1fc28d5c965af2fb1ecd6916ab9ffef4442cb3e2b72c16f2e5" exitCode=0 Jan 20 17:53:05 crc kubenswrapper[4558]: I0120 17:53:05.041675 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-zmctf" event={"ID":"c33a9cb9-2eec-4c2e-847d-9b06fcce5fef","Type":"ContainerDied","Data":"c5ebce8532452e1fc28d5c965af2fb1ecd6916ab9ffef4442cb3e2b72c16f2e5"} Jan 20 17:53:05 crc kubenswrapper[4558]: I0120 17:53:05.043897 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fxcqr\" (UniqueName: \"kubernetes.io/projected/a335ddc5-7bf4-4c3e-83a8-175921bbecf4-kube-api-access-fxcqr\") pod \"keystone-db47-account-create-update-wfpfn\" (UID: \"a335ddc5-7bf4-4c3e-83a8-175921bbecf4\") " pod="openstack-kuttl-tests/keystone-db47-account-create-update-wfpfn" Jan 20 17:53:05 crc kubenswrapper[4558]: I0120 17:53:05.044000 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a335ddc5-7bf4-4c3e-83a8-175921bbecf4-operator-scripts\") pod \"keystone-db47-account-create-update-wfpfn\" (UID: \"a335ddc5-7bf4-4c3e-83a8-175921bbecf4\") " pod="openstack-kuttl-tests/keystone-db47-account-create-update-wfpfn" Jan 20 17:53:05 crc kubenswrapper[4558]: I0120 17:53:05.044060 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/433afe0d-c979-4c31-a8dc-8a2d93fbe3df-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:05 crc kubenswrapper[4558]: I0120 17:53:05.044072 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/433afe0d-c979-4c31-a8dc-8a2d93fbe3df-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:05 crc kubenswrapper[4558]: I0120 17:53:05.044083 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/769afcb7-df8b-4d95-b662-9f5032227822-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:05 crc kubenswrapper[4558]: I0120 17:53:05.044093 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/433afe0d-c979-4c31-a8dc-8a2d93fbe3df-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:05 crc kubenswrapper[4558]: I0120 17:53:05.044248 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/433afe0d-c979-4c31-a8dc-8a2d93fbe3df-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "433afe0d-c979-4c31-a8dc-8a2d93fbe3df" (UID: "433afe0d-c979-4c31-a8dc-8a2d93fbe3df"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:53:05 crc kubenswrapper[4558]: I0120 17:53:05.045813 4558 generic.go:334] "Generic (PLEG): container finished" podID="05b3b028-51e2-49a9-9fb8-a10c096f3b27" containerID="5c52a309d4b859bba6422d3ae6fa7e9422d3b5d97dc982533c7cb8e876b2f184" exitCode=0 Jan 20 17:53:05 crc kubenswrapper[4558]: I0120 17:53:05.045854 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-cell1-galera-0" event={"ID":"05b3b028-51e2-49a9-9fb8-a10c096f3b27","Type":"ContainerDied","Data":"5c52a309d4b859bba6422d3ae6fa7e9422d3b5d97dc982533c7cb8e876b2f184"} Jan 20 17:53:05 crc kubenswrapper[4558]: I0120 17:53:05.045864 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/769afcb7-df8b-4d95-b662-9f5032227822-config-data" (OuterVolumeSpecName: "config-data") pod "769afcb7-df8b-4d95-b662-9f5032227822" (UID: "769afcb7-df8b-4d95-b662-9f5032227822"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:53:05 crc kubenswrapper[4558]: I0120 17:53:05.047617 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/keystone-bootstrap-knlxj"] Jan 20 17:53:05 crc kubenswrapper[4558]: I0120 17:53:05.048651 4558 generic.go:334] "Generic (PLEG): container finished" podID="769afcb7-df8b-4d95-b662-9f5032227822" containerID="a5d920e84b45309d975c5a55142f856928971b658393631084941d3b078f522a" exitCode=0 Jan 20 17:53:05 crc kubenswrapper[4558]: I0120 17:53:05.048699 4558 generic.go:334] "Generic (PLEG): container finished" podID="769afcb7-df8b-4d95-b662-9f5032227822" containerID="90b19c0fa576e6455d0cc3c15cd90c006b0ab179a1017800a7266892fa8e203e" exitCode=143 Jan 20 17:53:05 crc kubenswrapper[4558]: I0120 17:53:05.048709 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-worker-6c48cb9cdf-9688c" Jan 20 17:53:05 crc kubenswrapper[4558]: I0120 17:53:05.048838 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-worker-6c48cb9cdf-9688c" event={"ID":"769afcb7-df8b-4d95-b662-9f5032227822","Type":"ContainerDied","Data":"a5d920e84b45309d975c5a55142f856928971b658393631084941d3b078f522a"} Jan 20 17:53:05 crc kubenswrapper[4558]: I0120 17:53:05.048878 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-worker-6c48cb9cdf-9688c" event={"ID":"769afcb7-df8b-4d95-b662-9f5032227822","Type":"ContainerDied","Data":"90b19c0fa576e6455d0cc3c15cd90c006b0ab179a1017800a7266892fa8e203e"} Jan 20 17:53:05 crc kubenswrapper[4558]: I0120 17:53:05.048892 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-worker-6c48cb9cdf-9688c" event={"ID":"769afcb7-df8b-4d95-b662-9f5032227822","Type":"ContainerDied","Data":"5416068b976c483e0b787504b470fa498322dc028d84dc1f963156c425dba675"} Jan 20 17:53:05 crc kubenswrapper[4558]: I0120 17:53:05.051512 4558 scope.go:117] "RemoveContainer" containerID="ff027b2e7e7f3a64a246665fc1fbace52470e2c0c26cabfe23274befd6dfbcd9" Jan 20 17:53:05 crc kubenswrapper[4558]: E0120 17:53:05.052356 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ff027b2e7e7f3a64a246665fc1fbace52470e2c0c26cabfe23274befd6dfbcd9\": container with ID starting with ff027b2e7e7f3a64a246665fc1fbace52470e2c0c26cabfe23274befd6dfbcd9 not found: ID does not exist" containerID="ff027b2e7e7f3a64a246665fc1fbace52470e2c0c26cabfe23274befd6dfbcd9" Jan 20 17:53:05 crc kubenswrapper[4558]: I0120 17:53:05.052405 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ff027b2e7e7f3a64a246665fc1fbace52470e2c0c26cabfe23274befd6dfbcd9"} err="failed to get container status \"ff027b2e7e7f3a64a246665fc1fbace52470e2c0c26cabfe23274befd6dfbcd9\": rpc error: code = NotFound desc = could not find container \"ff027b2e7e7f3a64a246665fc1fbace52470e2c0c26cabfe23274befd6dfbcd9\": container with ID starting with ff027b2e7e7f3a64a246665fc1fbace52470e2c0c26cabfe23274befd6dfbcd9 not found: ID does not exist" Jan 20 17:53:05 crc kubenswrapper[4558]: I0120 17:53:05.052449 4558 scope.go:117] "RemoveContainer" containerID="201a91126279426ab94e20fd8f5beddaa054584c71b596e84fb7b72c3f13b29f" Jan 20 17:53:05 crc kubenswrapper[4558]: I0120 17:53:05.059774 4558 generic.go:334] "Generic (PLEG): container finished" podID="51c79956-4212-4d46-b3e9-e9d5e7a33c31" containerID="b9ac5c3d167c83d97bcf4206fa59450cb566eb19a154b51f5884efb725cdab16" exitCode=0 Jan 20 17:53:05 crc kubenswrapper[4558]: I0120 17:53:05.059837 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-keystone-listener-5bb5dd7cd4-w4jnj" event={"ID":"51c79956-4212-4d46-b3e9-e9d5e7a33c31","Type":"ContainerDied","Data":"b9ac5c3d167c83d97bcf4206fa59450cb566eb19a154b51f5884efb725cdab16"} Jan 20 17:53:05 crc kubenswrapper[4558]: I0120 17:53:05.059868 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-keystone-listener-5bb5dd7cd4-w4jnj" event={"ID":"51c79956-4212-4d46-b3e9-e9d5e7a33c31","Type":"ContainerDied","Data":"7a78a29587685c6b5db24fc2b2debe25c62d3bcff8bd1002751a66695746d8df"} Jan 20 17:53:05 crc kubenswrapper[4558]: I0120 17:53:05.059932 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-keystone-listener-5bb5dd7cd4-w4jnj" Jan 20 17:53:05 crc kubenswrapper[4558]: I0120 17:53:05.063505 4558 generic.go:334] "Generic (PLEG): container finished" podID="49822b01-63d0-40d3-b604-7980891b0683" containerID="7b033a817f9b0c0a5a28b3bb09ca9cc6417d0b599821e6bdbdcf0121964efdf3" exitCode=2 Jan 20 17:53:05 crc kubenswrapper[4558]: I0120 17:53:05.063574 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:53:05 crc kubenswrapper[4558]: I0120 17:53:05.064353 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/kube-state-metrics-0" event={"ID":"49822b01-63d0-40d3-b604-7980891b0683","Type":"ContainerDied","Data":"7b033a817f9b0c0a5a28b3bb09ca9cc6417d0b599821e6bdbdcf0121964efdf3"} Jan 20 17:53:05 crc kubenswrapper[4558]: I0120 17:53:05.064384 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/kube-state-metrics-0" event={"ID":"49822b01-63d0-40d3-b604-7980891b0683","Type":"ContainerDied","Data":"590789da94b63822bbe31a06415d2201489b900e17908f80eb85919ce57a4b12"} Jan 20 17:53:05 crc kubenswrapper[4558]: I0120 17:53:05.069492 4558 generic.go:334] "Generic (PLEG): container finished" podID="e029b16a-f02b-40b4-82b9-5fa08fe62bc1" containerID="71ba9af23a1c5424b7ecc46753bf8d593f4b1658ef2cbde792f66ad414383b91" exitCode=0 Jan 20 17:53:05 crc kubenswrapper[4558]: I0120 17:53:05.069538 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"e029b16a-f02b-40b4-82b9-5fa08fe62bc1","Type":"ContainerDied","Data":"71ba9af23a1c5424b7ecc46753bf8d593f4b1658ef2cbde792f66ad414383b91"} Jan 20 17:53:05 crc kubenswrapper[4558]: I0120 17:53:05.069556 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"e029b16a-f02b-40b4-82b9-5fa08fe62bc1","Type":"ContainerDied","Data":"4f2b23c2454036de8491db24c5d242ec8e092c442a06ff5758ab7788c4eae2a3"} Jan 20 17:53:05 crc kubenswrapper[4558]: I0120 17:53:05.069600 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:53:05 crc kubenswrapper[4558]: I0120 17:53:05.073037 4558 generic.go:334] "Generic (PLEG): container finished" podID="51d19369-14ac-4d62-ab05-9c5830856622" containerID="dc3c169cf981fe359f960f5ef7f1e452664aacea8447f6ecb0fcdf2aa8023e7a" exitCode=0 Jan 20 17:53:05 crc kubenswrapper[4558]: I0120 17:53:05.073133 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"51d19369-14ac-4d62-ab05-9c5830856622","Type":"ContainerDied","Data":"dc3c169cf981fe359f960f5ef7f1e452664aacea8447f6ecb0fcdf2aa8023e7a"} Jan 20 17:53:05 crc kubenswrapper[4558]: I0120 17:53:05.078403 4558 generic.go:334] "Generic (PLEG): container finished" podID="8101ebb9-ace1-4242-8c8b-698307c6be29" containerID="f74190a5462e78d2ad25c03116d8718fb4477617af58b9c4c6c750b5d4e94b80" exitCode=1 Jan 20 17:53:05 crc kubenswrapper[4558]: I0120 17:53:05.078892 4558 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="openstack-kuttl-tests/root-account-create-update-bxrps" secret="" err="secret \"galera-openstack-dockercfg-7rwrt\" not found" Jan 20 17:53:05 crc kubenswrapper[4558]: I0120 17:53:05.078926 4558 scope.go:117] "RemoveContainer" containerID="f74190a5462e78d2ad25c03116d8718fb4477617af58b9c4c6c750b5d4e94b80" Jan 20 17:53:05 crc kubenswrapper[4558]: E0120 17:53:05.079123 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CrashLoopBackOff: \"back-off 10s restarting failed container=mariadb-account-create-update pod=root-account-create-update-bxrps_openstack-kuttl-tests(8101ebb9-ace1-4242-8c8b-698307c6be29)\"" pod="openstack-kuttl-tests/root-account-create-update-bxrps" podUID="8101ebb9-ace1-4242-8c8b-698307c6be29" Jan 20 17:53:05 crc kubenswrapper[4558]: I0120 17:53:05.079340 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/root-account-create-update-bxrps" event={"ID":"8101ebb9-ace1-4242-8c8b-698307c6be29","Type":"ContainerDied","Data":"f74190a5462e78d2ad25c03116d8718fb4477617af58b9c4c6c750b5d4e94b80"} Jan 20 17:53:05 crc kubenswrapper[4558]: I0120 17:53:05.084504 4558 scope.go:117] "RemoveContainer" containerID="48a32835b74390f16be559ac97cec4eea57eb34d5852a0e5d9f37f38184cdf7a" Jan 20 17:53:05 crc kubenswrapper[4558]: I0120 17:53:05.091193 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-667f6b846-wn5fk"] Jan 20 17:53:05 crc kubenswrapper[4558]: I0120 17:53:05.091518 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/keystone-667f6b846-wn5fk" podUID="7db8891a-284a-47f5-b883-5aac563fc839" containerName="keystone-api" containerID="cri-o://d6f9a9e764e6371a2dc8fd22991e16caacd06318d66716c401edaedf85f6c0d7" gracePeriod=30 Jan 20 17:53:05 crc kubenswrapper[4558]: I0120 17:53:05.101056 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-db47-account-create-update-wfpfn"] Jan 20 17:53:05 crc kubenswrapper[4558]: E0120 17:53:05.102867 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[kube-api-access-fxcqr operator-scripts], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack-kuttl-tests/keystone-db47-account-create-update-wfpfn" podUID="a335ddc5-7bf4-4c3e-83a8-175921bbecf4" Jan 20 17:53:05 crc kubenswrapper[4558]: I0120 17:53:05.109219 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/openstack-galera-0"] Jan 20 17:53:05 crc kubenswrapper[4558]: I0120 17:53:05.121446 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-db-create-2sj6c"] Jan 20 17:53:05 crc kubenswrapper[4558]: I0120 17:53:05.127091 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/keystone-db-create-2sj6c"] Jan 20 17:53:05 crc kubenswrapper[4558]: I0120 17:53:05.146016 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a335ddc5-7bf4-4c3e-83a8-175921bbecf4-operator-scripts\") pod \"keystone-db47-account-create-update-wfpfn\" (UID: \"a335ddc5-7bf4-4c3e-83a8-175921bbecf4\") " pod="openstack-kuttl-tests/keystone-db47-account-create-update-wfpfn" Jan 20 17:53:05 crc kubenswrapper[4558]: I0120 17:53:05.146186 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fxcqr\" (UniqueName: \"kubernetes.io/projected/a335ddc5-7bf4-4c3e-83a8-175921bbecf4-kube-api-access-fxcqr\") pod \"keystone-db47-account-create-update-wfpfn\" (UID: \"a335ddc5-7bf4-4c3e-83a8-175921bbecf4\") " pod="openstack-kuttl-tests/keystone-db47-account-create-update-wfpfn" Jan 20 17:53:05 crc kubenswrapper[4558]: I0120 17:53:05.146259 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/433afe0d-c979-4c31-a8dc-8a2d93fbe3df-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:05 crc kubenswrapper[4558]: I0120 17:53:05.146294 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/769afcb7-df8b-4d95-b662-9f5032227822-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:05 crc kubenswrapper[4558]: E0120 17:53:05.146491 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/openstack-scripts: configmap "openstack-scripts" not found Jan 20 17:53:05 crc kubenswrapper[4558]: E0120 17:53:05.146535 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/a335ddc5-7bf4-4c3e-83a8-175921bbecf4-operator-scripts podName:a335ddc5-7bf4-4c3e-83a8-175921bbecf4 nodeName:}" failed. No retries permitted until 2026-01-20 17:53:05.646518525 +0000 UTC m=+4279.406856492 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/a335ddc5-7bf4-4c3e-83a8-175921bbecf4-operator-scripts") pod "keystone-db47-account-create-update-wfpfn" (UID: "a335ddc5-7bf4-4c3e-83a8-175921bbecf4") : configmap "openstack-scripts" not found Jan 20 17:53:05 crc kubenswrapper[4558]: E0120 17:53:05.146620 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/openstack-scripts: configmap "openstack-scripts" not found Jan 20 17:53:05 crc kubenswrapper[4558]: E0120 17:53:05.146668 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/8101ebb9-ace1-4242-8c8b-698307c6be29-operator-scripts podName:8101ebb9-ace1-4242-8c8b-698307c6be29 nodeName:}" failed. No retries permitted until 2026-01-20 17:53:05.646653309 +0000 UTC m=+4279.406991276 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/8101ebb9-ace1-4242-8c8b-698307c6be29-operator-scripts") pod "root-account-create-update-bxrps" (UID: "8101ebb9-ace1-4242-8c8b-698307c6be29") : configmap "openstack-scripts" not found Jan 20 17:53:05 crc kubenswrapper[4558]: I0120 17:53:05.150387 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-bxrps"] Jan 20 17:53:05 crc kubenswrapper[4558]: E0120 17:53:05.151700 4558 projected.go:194] Error preparing data for projected volume kube-api-access-fxcqr for pod openstack-kuttl-tests/keystone-db47-account-create-update-wfpfn: failed to fetch token: serviceaccounts "galera-openstack" not found Jan 20 17:53:05 crc kubenswrapper[4558]: E0120 17:53:05.151758 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/a335ddc5-7bf4-4c3e-83a8-175921bbecf4-kube-api-access-fxcqr podName:a335ddc5-7bf4-4c3e-83a8-175921bbecf4 nodeName:}" failed. No retries permitted until 2026-01-20 17:53:05.651741256 +0000 UTC m=+4279.412079223 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-fxcqr" (UniqueName: "kubernetes.io/projected/a335ddc5-7bf4-4c3e-83a8-175921bbecf4-kube-api-access-fxcqr") pod "keystone-db47-account-create-update-wfpfn" (UID: "a335ddc5-7bf4-4c3e-83a8-175921bbecf4") : failed to fetch token: serviceaccounts "galera-openstack" not found Jan 20 17:53:05 crc kubenswrapper[4558]: I0120 17:53:05.158799 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-keystone-listener-5bb5dd7cd4-w4jnj"] Jan 20 17:53:05 crc kubenswrapper[4558]: I0120 17:53:05.184941 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/barbican-keystone-listener-5bb5dd7cd4-w4jnj"] Jan 20 17:53:05 crc kubenswrapper[4558]: I0120 17:53:05.185005 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-worker-6c48cb9cdf-9688c"] Jan 20 17:53:05 crc kubenswrapper[4558]: I0120 17:53:05.213927 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/barbican-worker-6c48cb9cdf-9688c"] Jan 20 17:53:05 crc kubenswrapper[4558]: I0120 17:53:05.231633 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:53:05 crc kubenswrapper[4558]: I0120 17:53:05.238476 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:53:05 crc kubenswrapper[4558]: I0120 17:53:05.273135 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 17:53:05 crc kubenswrapper[4558]: I0120 17:53:05.277448 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 17:53:05 crc kubenswrapper[4558]: I0120 17:53:05.309980 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/placement-658f45b9f4-tx296" podUID="0c9fc067-cb23-4f72-b928-5447fb5182c1" containerName="placement-log" probeResult="failure" output="Get \"https://10.217.1.233:8778/\": read tcp 10.217.0.2:53022->10.217.1.233:8778: read: connection reset by peer" Jan 20 17:53:05 crc kubenswrapper[4558]: I0120 17:53:05.310053 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/placement-658f45b9f4-tx296" podUID="0c9fc067-cb23-4f72-b928-5447fb5182c1" containerName="placement-api" probeResult="failure" output="Get \"https://10.217.1.233:8778/\": read tcp 10.217.0.2:53038->10.217.1.233:8778: read: connection reset by peer" Jan 20 17:53:05 crc kubenswrapper[4558]: E0120 17:53:05.350560 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Jan 20 17:53:05 crc kubenswrapper[4558]: E0120 17:53:05.350671 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/088e44cc-3515-4736-aa46-721774902209-config-data podName:088e44cc-3515-4736-aa46-721774902209 nodeName:}" failed. No retries permitted until 2026-01-20 17:53:09.350650029 +0000 UTC m=+4283.110987986 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/088e44cc-3515-4736-aa46-721774902209-config-data") pod "rabbitmq-server-0" (UID: "088e44cc-3515-4736-aa46-721774902209") : configmap "rabbitmq-config-data" not found Jan 20 17:53:05 crc kubenswrapper[4558]: E0120 17:53:05.554567 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/openstack-cell1-scripts: configmap "openstack-cell1-scripts" not found Jan 20 17:53:05 crc kubenswrapper[4558]: E0120 17:53:05.554678 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/3eabae3c-0014-4d8e-9393-7cb934ba9fa4-operator-scripts podName:3eabae3c-0014-4d8e-9393-7cb934ba9fa4 nodeName:}" failed. No retries permitted until 2026-01-20 17:53:07.554652671 +0000 UTC m=+4281.314990638 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/3eabae3c-0014-4d8e-9393-7cb934ba9fa4-operator-scripts") pod "nova-cell1-d1d9-account-create-update-g54ff" (UID: "3eabae3c-0014-4d8e-9393-7cb934ba9fa4") : configmap "openstack-cell1-scripts" not found Jan 20 17:53:05 crc kubenswrapper[4558]: I0120 17:53:05.656576 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a335ddc5-7bf4-4c3e-83a8-175921bbecf4-operator-scripts\") pod \"keystone-db47-account-create-update-wfpfn\" (UID: \"a335ddc5-7bf4-4c3e-83a8-175921bbecf4\") " pod="openstack-kuttl-tests/keystone-db47-account-create-update-wfpfn" Jan 20 17:53:05 crc kubenswrapper[4558]: E0120 17:53:05.656758 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/openstack-scripts: configmap "openstack-scripts" not found Jan 20 17:53:05 crc kubenswrapper[4558]: E0120 17:53:05.656851 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/a335ddc5-7bf4-4c3e-83a8-175921bbecf4-operator-scripts podName:a335ddc5-7bf4-4c3e-83a8-175921bbecf4 nodeName:}" failed. No retries permitted until 2026-01-20 17:53:06.656831556 +0000 UTC m=+4280.417169513 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/a335ddc5-7bf4-4c3e-83a8-175921bbecf4-operator-scripts") pod "keystone-db47-account-create-update-wfpfn" (UID: "a335ddc5-7bf4-4c3e-83a8-175921bbecf4") : configmap "openstack-scripts" not found Jan 20 17:53:05 crc kubenswrapper[4558]: I0120 17:53:05.656904 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fxcqr\" (UniqueName: \"kubernetes.io/projected/a335ddc5-7bf4-4c3e-83a8-175921bbecf4-kube-api-access-fxcqr\") pod \"keystone-db47-account-create-update-wfpfn\" (UID: \"a335ddc5-7bf4-4c3e-83a8-175921bbecf4\") " pod="openstack-kuttl-tests/keystone-db47-account-create-update-wfpfn" Jan 20 17:53:05 crc kubenswrapper[4558]: E0120 17:53:05.656924 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/openstack-scripts: configmap "openstack-scripts" not found Jan 20 17:53:05 crc kubenswrapper[4558]: E0120 17:53:05.657001 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/8101ebb9-ace1-4242-8c8b-698307c6be29-operator-scripts podName:8101ebb9-ace1-4242-8c8b-698307c6be29 nodeName:}" failed. No retries permitted until 2026-01-20 17:53:06.656979835 +0000 UTC m=+4280.417317802 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/8101ebb9-ace1-4242-8c8b-698307c6be29-operator-scripts") pod "root-account-create-update-bxrps" (UID: "8101ebb9-ace1-4242-8c8b-698307c6be29") : configmap "openstack-scripts" not found Jan 20 17:53:05 crc kubenswrapper[4558]: E0120 17:53:05.659550 4558 projected.go:194] Error preparing data for projected volume kube-api-access-fxcqr for pod openstack-kuttl-tests/keystone-db47-account-create-update-wfpfn: failed to fetch token: serviceaccounts "galera-openstack" not found Jan 20 17:53:05 crc kubenswrapper[4558]: E0120 17:53:05.659643 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/a335ddc5-7bf4-4c3e-83a8-175921bbecf4-kube-api-access-fxcqr podName:a335ddc5-7bf4-4c3e-83a8-175921bbecf4 nodeName:}" failed. No retries permitted until 2026-01-20 17:53:06.659618617 +0000 UTC m=+4280.419956574 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-fxcqr" (UniqueName: "kubernetes.io/projected/a335ddc5-7bf4-4c3e-83a8-175921bbecf4-kube-api-access-fxcqr") pod "keystone-db47-account-create-update-wfpfn" (UID: "a335ddc5-7bf4-4c3e-83a8-175921bbecf4") : failed to fetch token: serviceaccounts "galera-openstack" not found Jan 20 17:53:05 crc kubenswrapper[4558]: I0120 17:53:05.962317 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/openstack-galera-0" podUID="a18e9b68-4683-415e-833e-5b363e31d461" containerName="galera" containerID="cri-o://142baadd97db60a56cd4c8eefd272ce6d8e94faf94a19e872d0adb86b7b6376a" gracePeriod=30 Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.089576 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-f275-account-create-update-5pb7k" event={"ID":"01f39889-c29f-4e28-98fa-bc99322f761c","Type":"ContainerDied","Data":"b3b94a9f34d9c411968d37f74a75c99e51ebd8f16f5d8c41ee0aaf3c1af32b39"} Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.089633 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b3b94a9f34d9c411968d37f74a75c99e51ebd8f16f5d8c41ee0aaf3c1af32b39" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.091336 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-a0eb-account-create-update-75hkz" event={"ID":"6d80c5b5-880f-4d59-9287-334023ec6ce9","Type":"ContainerDied","Data":"17731a7facba62835b8bba083e3ada7aa2f3c60273012bf054a7ed401b083d69"} Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.091366 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="17731a7facba62835b8bba083e3ada7aa2f3c60273012bf054a7ed401b083d69" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.095862 4558 generic.go:334] "Generic (PLEG): container finished" podID="a905b3ac-e1b2-49f8-8034-912138028aca" containerID="516d63566e73522c69fe8fb7503a5f9b4f3300f2691369edc89aa5a577b600e5" exitCode=0 Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.095916 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/memcached-0" event={"ID":"a905b3ac-e1b2-49f8-8034-912138028aca","Type":"ContainerDied","Data":"516d63566e73522c69fe8fb7503a5f9b4f3300f2691369edc89aa5a577b600e5"} Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.095936 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/memcached-0" event={"ID":"a905b3ac-e1b2-49f8-8034-912138028aca","Type":"ContainerDied","Data":"929204fe34e13dda9ed94b10d3f6026f38c68ea4979e1462ef3988f90fcf3c31"} Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.095948 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="929204fe34e13dda9ed94b10d3f6026f38c68ea4979e1462ef3988f90fcf3c31" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.097112 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-a1e2-account-create-update-hfpfh" event={"ID":"799d2689-2fcd-4bc5-ae70-1d0ae1194c76","Type":"ContainerDied","Data":"da762432cfe7c11ef497d104d891e650727cbbfa6ae73da3a2e331cd3870382d"} Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.097137 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="da762432cfe7c11ef497d104d891e650727cbbfa6ae73da3a2e331cd3870382d" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.098996 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-cell1-galera-0" event={"ID":"05b3b028-51e2-49a9-9fb8-a10c096f3b27","Type":"ContainerDied","Data":"7d089bb9ade270dd7bdd47aa19ebd6d991a0c4fc863b92f48fc4d9613fb8100f"} Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.099015 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7d089bb9ade270dd7bdd47aa19ebd6d991a0c4fc863b92f48fc4d9613fb8100f" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.101224 4558 generic.go:334] "Generic (PLEG): container finished" podID="0c9fc067-cb23-4f72-b928-5447fb5182c1" containerID="e300c235b0338f59c68e0edd429de09fcac767478cd41a3614d3e5af5e2760e6" exitCode=0 Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.101271 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-658f45b9f4-tx296" event={"ID":"0c9fc067-cb23-4f72-b928-5447fb5182c1","Type":"ContainerDied","Data":"e300c235b0338f59c68e0edd429de09fcac767478cd41a3614d3e5af5e2760e6"} Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.101289 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-658f45b9f4-tx296" event={"ID":"0c9fc067-cb23-4f72-b928-5447fb5182c1","Type":"ContainerDied","Data":"beb7d131a353afccd1fcb9c2ad166867d42235008327a737d77ef0cc6f8b7011"} Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.101300 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="beb7d131a353afccd1fcb9c2ad166867d42235008327a737d77ef0cc6f8b7011" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.103323 4558 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="openstack-kuttl-tests/root-account-create-update-bxrps" secret="" err="secret \"galera-openstack-dockercfg-7rwrt\" not found" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.103370 4558 scope.go:117] "RemoveContainer" containerID="f74190a5462e78d2ad25c03116d8718fb4477617af58b9c4c6c750b5d4e94b80" Jan 20 17:53:06 crc kubenswrapper[4558]: E0120 17:53:06.103745 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CrashLoopBackOff: \"back-off 10s restarting failed container=mariadb-account-create-update pod=root-account-create-update-bxrps_openstack-kuttl-tests(8101ebb9-ace1-4242-8c8b-698307c6be29)\"" pod="openstack-kuttl-tests/root-account-create-update-bxrps" podUID="8101ebb9-ace1-4242-8c8b-698307c6be29" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.104119 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-d1d9-account-create-update-g54ff" event={"ID":"3eabae3c-0014-4d8e-9393-7cb934ba9fa4","Type":"ContainerDied","Data":"3282ae1da7ed4b5685a8d00992339b13bdeb88b9fb00b1690d6e296c0fb8f1f9"} Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.104144 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3282ae1da7ed4b5685a8d00992339b13bdeb88b9fb00b1690d6e296c0fb8f1f9" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.104213 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-db47-account-create-update-wfpfn" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.183406 4558 scope.go:117] "RemoveContainer" containerID="201a91126279426ab94e20fd8f5beddaa054584c71b596e84fb7b72c3f13b29f" Jan 20 17:53:06 crc kubenswrapper[4558]: E0120 17:53:06.183926 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"201a91126279426ab94e20fd8f5beddaa054584c71b596e84fb7b72c3f13b29f\": container with ID starting with 201a91126279426ab94e20fd8f5beddaa054584c71b596e84fb7b72c3f13b29f not found: ID does not exist" containerID="201a91126279426ab94e20fd8f5beddaa054584c71b596e84fb7b72c3f13b29f" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.183969 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"201a91126279426ab94e20fd8f5beddaa054584c71b596e84fb7b72c3f13b29f"} err="failed to get container status \"201a91126279426ab94e20fd8f5beddaa054584c71b596e84fb7b72c3f13b29f\": rpc error: code = NotFound desc = could not find container \"201a91126279426ab94e20fd8f5beddaa054584c71b596e84fb7b72c3f13b29f\": container with ID starting with 201a91126279426ab94e20fd8f5beddaa054584c71b596e84fb7b72c3f13b29f not found: ID does not exist" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.183996 4558 scope.go:117] "RemoveContainer" containerID="48a32835b74390f16be559ac97cec4eea57eb34d5852a0e5d9f37f38184cdf7a" Jan 20 17:53:06 crc kubenswrapper[4558]: E0120 17:53:06.184387 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"48a32835b74390f16be559ac97cec4eea57eb34d5852a0e5d9f37f38184cdf7a\": container with ID starting with 48a32835b74390f16be559ac97cec4eea57eb34d5852a0e5d9f37f38184cdf7a not found: ID does not exist" containerID="48a32835b74390f16be559ac97cec4eea57eb34d5852a0e5d9f37f38184cdf7a" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.184404 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"48a32835b74390f16be559ac97cec4eea57eb34d5852a0e5d9f37f38184cdf7a"} err="failed to get container status \"48a32835b74390f16be559ac97cec4eea57eb34d5852a0e5d9f37f38184cdf7a\": rpc error: code = NotFound desc = could not find container \"48a32835b74390f16be559ac97cec4eea57eb34d5852a0e5d9f37f38184cdf7a\": container with ID starting with 48a32835b74390f16be559ac97cec4eea57eb34d5852a0e5d9f37f38184cdf7a not found: ID does not exist" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.184418 4558 scope.go:117] "RemoveContainer" containerID="201a91126279426ab94e20fd8f5beddaa054584c71b596e84fb7b72c3f13b29f" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.184928 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"201a91126279426ab94e20fd8f5beddaa054584c71b596e84fb7b72c3f13b29f"} err="failed to get container status \"201a91126279426ab94e20fd8f5beddaa054584c71b596e84fb7b72c3f13b29f\": rpc error: code = NotFound desc = could not find container \"201a91126279426ab94e20fd8f5beddaa054584c71b596e84fb7b72c3f13b29f\": container with ID starting with 201a91126279426ab94e20fd8f5beddaa054584c71b596e84fb7b72c3f13b29f not found: ID does not exist" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.184968 4558 scope.go:117] "RemoveContainer" containerID="48a32835b74390f16be559ac97cec4eea57eb34d5852a0e5d9f37f38184cdf7a" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.185410 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"48a32835b74390f16be559ac97cec4eea57eb34d5852a0e5d9f37f38184cdf7a"} err="failed to get container status \"48a32835b74390f16be559ac97cec4eea57eb34d5852a0e5d9f37f38184cdf7a\": rpc error: code = NotFound desc = could not find container \"48a32835b74390f16be559ac97cec4eea57eb34d5852a0e5d9f37f38184cdf7a\": container with ID starting with 48a32835b74390f16be559ac97cec4eea57eb34d5852a0e5d9f37f38184cdf7a not found: ID does not exist" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.185430 4558 scope.go:117] "RemoveContainer" containerID="a5d920e84b45309d975c5a55142f856928971b658393631084941d3b078f522a" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.306760 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.379240 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05b3b028-51e2-49a9-9fb8-a10c096f3b27-combined-ca-bundle\") pod \"05b3b028-51e2-49a9-9fb8-a10c096f3b27\" (UID: \"05b3b028-51e2-49a9-9fb8-a10c096f3b27\") " Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.379324 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8fd2c\" (UniqueName: \"kubernetes.io/projected/05b3b028-51e2-49a9-9fb8-a10c096f3b27-kube-api-access-8fd2c\") pod \"05b3b028-51e2-49a9-9fb8-a10c096f3b27\" (UID: \"05b3b028-51e2-49a9-9fb8-a10c096f3b27\") " Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.379368 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mysql-db\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") pod \"05b3b028-51e2-49a9-9fb8-a10c096f3b27\" (UID: \"05b3b028-51e2-49a9-9fb8-a10c096f3b27\") " Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.379517 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/05b3b028-51e2-49a9-9fb8-a10c096f3b27-config-data-default\") pod \"05b3b028-51e2-49a9-9fb8-a10c096f3b27\" (UID: \"05b3b028-51e2-49a9-9fb8-a10c096f3b27\") " Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.379554 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/05b3b028-51e2-49a9-9fb8-a10c096f3b27-galera-tls-certs\") pod \"05b3b028-51e2-49a9-9fb8-a10c096f3b27\" (UID: \"05b3b028-51e2-49a9-9fb8-a10c096f3b27\") " Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.379581 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/05b3b028-51e2-49a9-9fb8-a10c096f3b27-kolla-config\") pod \"05b3b028-51e2-49a9-9fb8-a10c096f3b27\" (UID: \"05b3b028-51e2-49a9-9fb8-a10c096f3b27\") " Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.379617 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/05b3b028-51e2-49a9-9fb8-a10c096f3b27-operator-scripts\") pod \"05b3b028-51e2-49a9-9fb8-a10c096f3b27\" (UID: \"05b3b028-51e2-49a9-9fb8-a10c096f3b27\") " Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.379703 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/05b3b028-51e2-49a9-9fb8-a10c096f3b27-config-data-generated\") pod \"05b3b028-51e2-49a9-9fb8-a10c096f3b27\" (UID: \"05b3b028-51e2-49a9-9fb8-a10c096f3b27\") " Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.380222 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/05b3b028-51e2-49a9-9fb8-a10c096f3b27-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "05b3b028-51e2-49a9-9fb8-a10c096f3b27" (UID: "05b3b028-51e2-49a9-9fb8-a10c096f3b27"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.380584 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/05b3b028-51e2-49a9-9fb8-a10c096f3b27-config-data-generated" (OuterVolumeSpecName: "config-data-generated") pod "05b3b028-51e2-49a9-9fb8-a10c096f3b27" (UID: "05b3b028-51e2-49a9-9fb8-a10c096f3b27"). InnerVolumeSpecName "config-data-generated". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.380866 4558 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/05b3b028-51e2-49a9-9fb8-a10c096f3b27-kolla-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.380889 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/05b3b028-51e2-49a9-9fb8-a10c096f3b27-config-data-generated\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.381140 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/05b3b028-51e2-49a9-9fb8-a10c096f3b27-config-data-default" (OuterVolumeSpecName: "config-data-default") pod "05b3b028-51e2-49a9-9fb8-a10c096f3b27" (UID: "05b3b028-51e2-49a9-9fb8-a10c096f3b27"). InnerVolumeSpecName "config-data-default". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.381364 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/05b3b028-51e2-49a9-9fb8-a10c096f3b27-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "05b3b028-51e2-49a9-9fb8-a10c096f3b27" (UID: "05b3b028-51e2-49a9-9fb8-a10c096f3b27"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.384726 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/swift-proxy-76d6dddffd-4qcpv"] Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.386645 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/05b3b028-51e2-49a9-9fb8-a10c096f3b27-kube-api-access-8fd2c" (OuterVolumeSpecName: "kube-api-access-8fd2c") pod "05b3b028-51e2-49a9-9fb8-a10c096f3b27" (UID: "05b3b028-51e2-49a9-9fb8-a10c096f3b27"). InnerVolumeSpecName "kube-api-access-8fd2c". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.386695 4558 scope.go:117] "RemoveContainer" containerID="90b19c0fa576e6455d0cc3c15cd90c006b0ab179a1017800a7266892fa8e203e" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.393908 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/swift-proxy-76d6dddffd-4qcpv"] Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.399184 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage15-crc" (OuterVolumeSpecName: "mysql-db") pod "05b3b028-51e2-49a9-9fb8-a10c096f3b27" (UID: "05b3b028-51e2-49a9-9fb8-a10c096f3b27"). InnerVolumeSpecName "local-storage15-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.420530 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/05b3b028-51e2-49a9-9fb8-a10c096f3b27-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "05b3b028-51e2-49a9-9fb8-a10c096f3b27" (UID: "05b3b028-51e2-49a9-9fb8-a10c096f3b27"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.444002 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/cinder-api-0" podUID="5eac3a8d-ee82-4e6f-bdb9-fccb82e09496" containerName="cinder-api" probeResult="failure" output="Get \"https://10.217.1.234:8776/healthcheck\": read tcp 10.217.0.2:45120->10.217.1.234:8776: read: connection reset by peer" Jan 20 17:53:06 crc kubenswrapper[4558]: E0120 17:53:06.445204 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="b4a7a63ae5b4336f1c55496b20962afe4a27040b0e22d712c664548644815e60" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:53:06 crc kubenswrapper[4558]: E0120 17:53:06.447115 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="b4a7a63ae5b4336f1c55496b20962afe4a27040b0e22d712c664548644815e60" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.447547 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/05b3b028-51e2-49a9-9fb8-a10c096f3b27-galera-tls-certs" (OuterVolumeSpecName: "galera-tls-certs") pod "05b3b028-51e2-49a9-9fb8-a10c096f3b27" (UID: "05b3b028-51e2-49a9-9fb8-a10c096f3b27"). InnerVolumeSpecName "galera-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:53:06 crc kubenswrapper[4558]: E0120 17:53:06.449270 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="b4a7a63ae5b4336f1c55496b20962afe4a27040b0e22d712c664548644815e60" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:53:06 crc kubenswrapper[4558]: E0120 17:53:06.449308 4558 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack-kuttl-tests/nova-cell1-conductor-0" podUID="7c16f0c4-1462-4817-9f74-9e3d93193867" containerName="nova-cell1-conductor-conductor" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.483836 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/05b3b028-51e2-49a9-9fb8-a10c096f3b27-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.483859 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05b3b028-51e2-49a9-9fb8-a10c096f3b27-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.483871 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8fd2c\" (UniqueName: \"kubernetes.io/projected/05b3b028-51e2-49a9-9fb8-a10c096f3b27-kube-api-access-8fd2c\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.483894 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage15-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") on node \"crc\" " Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.483904 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/05b3b028-51e2-49a9-9fb8-a10c096f3b27-config-data-default\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.483914 4558 reconciler_common.go:293] "Volume detached for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/05b3b028-51e2-49a9-9fb8-a10c096f3b27-galera-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.502451 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage15-crc" (UniqueName: "kubernetes.io/local-volume/local-storage15-crc") on node "crc" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.551384 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/nova-metadata-0" podUID="6e7de5b3-4426-4816-9d45-9b4226333dbc" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.1.232:8775/\": read tcp 10.217.0.2:39200->10.217.1.232:8775: read: connection reset by peer" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.551846 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/nova-metadata-0" podUID="6e7de5b3-4426-4816-9d45-9b4226333dbc" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.1.232:8775/\": read tcp 10.217.0.2:39194->10.217.1.232:8775: read: connection reset by peer" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.584982 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="10d0164b-faa3-408c-9b22-25d1cff2c4e3" path="/var/lib/kubelet/pods/10d0164b-faa3-408c-9b22-25d1cff2c4e3/volumes" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.585656 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d58948f-0486-4e4a-af1f-9021c760d5cc" path="/var/lib/kubelet/pods/1d58948f-0486-4e4a-af1f-9021c760d5cc/volumes" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.586159 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage15-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.586266 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="344d4281-d7b6-49a8-8a10-62cf1940fa5c" path="/var/lib/kubelet/pods/344d4281-d7b6-49a8-8a10-62cf1940fa5c/volumes" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.592018 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="433afe0d-c979-4c31-a8dc-8a2d93fbe3df" path="/var/lib/kubelet/pods/433afe0d-c979-4c31-a8dc-8a2d93fbe3df/volumes" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.592702 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49822b01-63d0-40d3-b604-7980891b0683" path="/var/lib/kubelet/pods/49822b01-63d0-40d3-b604-7980891b0683/volumes" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.594508 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="51c79956-4212-4d46-b3e9-e9d5e7a33c31" path="/var/lib/kubelet/pods/51c79956-4212-4d46-b3e9-e9d5e7a33c31/volumes" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.595908 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="769afcb7-df8b-4d95-b662-9f5032227822" path="/var/lib/kubelet/pods/769afcb7-df8b-4d95-b662-9f5032227822/volumes" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.596530 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="83dbadc1-6bc5-4c25-a541-c0c6d4bdadd5" path="/var/lib/kubelet/pods/83dbadc1-6bc5-4c25-a541-c0c6d4bdadd5/volumes" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.597320 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e029b16a-f02b-40b4-82b9-5fa08fe62bc1" path="/var/lib/kubelet/pods/e029b16a-f02b-40b4-82b9-5fa08fe62bc1/volumes" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.598386 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efd20cca-38bd-467d-a1e0-5cbf4f0b1a14" path="/var/lib/kubelet/pods/efd20cca-38bd-467d-a1e0-5cbf4f0b1a14/volumes" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.664419 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-f275-account-create-update-5pb7k" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.677445 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-a0eb-account-create-update-75hkz" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.690308 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fxcqr\" (UniqueName: \"kubernetes.io/projected/a335ddc5-7bf4-4c3e-83a8-175921bbecf4-kube-api-access-fxcqr\") pod \"keystone-db47-account-create-update-wfpfn\" (UID: \"a335ddc5-7bf4-4c3e-83a8-175921bbecf4\") " pod="openstack-kuttl-tests/keystone-db47-account-create-update-wfpfn" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.690477 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a335ddc5-7bf4-4c3e-83a8-175921bbecf4-operator-scripts\") pod \"keystone-db47-account-create-update-wfpfn\" (UID: \"a335ddc5-7bf4-4c3e-83a8-175921bbecf4\") " pod="openstack-kuttl-tests/keystone-db47-account-create-update-wfpfn" Jan 20 17:53:06 crc kubenswrapper[4558]: E0120 17:53:06.690764 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/openstack-scripts: configmap "openstack-scripts" not found Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.690832 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-d1d9-account-create-update-g54ff" Jan 20 17:53:06 crc kubenswrapper[4558]: E0120 17:53:06.691688 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/openstack-scripts: configmap "openstack-scripts" not found Jan 20 17:53:06 crc kubenswrapper[4558]: E0120 17:53:06.694517 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/a335ddc5-7bf4-4c3e-83a8-175921bbecf4-operator-scripts podName:a335ddc5-7bf4-4c3e-83a8-175921bbecf4 nodeName:}" failed. No retries permitted until 2026-01-20 17:53:08.690857865 +0000 UTC m=+4282.451195832 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/a335ddc5-7bf4-4c3e-83a8-175921bbecf4-operator-scripts") pod "keystone-db47-account-create-update-wfpfn" (UID: "a335ddc5-7bf4-4c3e-83a8-175921bbecf4") : configmap "openstack-scripts" not found Jan 20 17:53:06 crc kubenswrapper[4558]: E0120 17:53:06.694564 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/8101ebb9-ace1-4242-8c8b-698307c6be29-operator-scripts podName:8101ebb9-ace1-4242-8c8b-698307c6be29 nodeName:}" failed. No retries permitted until 2026-01-20 17:53:08.694552442 +0000 UTC m=+4282.454890409 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/8101ebb9-ace1-4242-8c8b-698307c6be29-operator-scripts") pod "root-account-create-update-bxrps" (UID: "8101ebb9-ace1-4242-8c8b-698307c6be29") : configmap "openstack-scripts" not found Jan 20 17:53:06 crc kubenswrapper[4558]: E0120 17:53:06.695128 4558 projected.go:194] Error preparing data for projected volume kube-api-access-fxcqr for pod openstack-kuttl-tests/keystone-db47-account-create-update-wfpfn: failed to fetch token: serviceaccounts "galera-openstack" not found Jan 20 17:53:06 crc kubenswrapper[4558]: E0120 17:53:06.695210 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/a335ddc5-7bf4-4c3e-83a8-175921bbecf4-kube-api-access-fxcqr podName:a335ddc5-7bf4-4c3e-83a8-175921bbecf4 nodeName:}" failed. No retries permitted until 2026-01-20 17:53:08.695188248 +0000 UTC m=+4282.455526215 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-fxcqr" (UniqueName: "kubernetes.io/projected/a335ddc5-7bf4-4c3e-83a8-175921bbecf4-kube-api-access-fxcqr") pod "keystone-db47-account-create-update-wfpfn" (UID: "a335ddc5-7bf4-4c3e-83a8-175921bbecf4") : failed to fetch token: serviceaccounts "galera-openstack" not found Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.699252 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-a1e2-account-create-update-hfpfh" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.706565 4558 scope.go:117] "RemoveContainer" containerID="a5d920e84b45309d975c5a55142f856928971b658393631084941d3b078f522a" Jan 20 17:53:06 crc kubenswrapper[4558]: E0120 17:53:06.707480 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a5d920e84b45309d975c5a55142f856928971b658393631084941d3b078f522a\": container with ID starting with a5d920e84b45309d975c5a55142f856928971b658393631084941d3b078f522a not found: ID does not exist" containerID="a5d920e84b45309d975c5a55142f856928971b658393631084941d3b078f522a" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.707529 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a5d920e84b45309d975c5a55142f856928971b658393631084941d3b078f522a"} err="failed to get container status \"a5d920e84b45309d975c5a55142f856928971b658393631084941d3b078f522a\": rpc error: code = NotFound desc = could not find container \"a5d920e84b45309d975c5a55142f856928971b658393631084941d3b078f522a\": container with ID starting with a5d920e84b45309d975c5a55142f856928971b658393631084941d3b078f522a not found: ID does not exist" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.707548 4558 scope.go:117] "RemoveContainer" containerID="90b19c0fa576e6455d0cc3c15cd90c006b0ab179a1017800a7266892fa8e203e" Jan 20 17:53:06 crc kubenswrapper[4558]: E0120 17:53:06.709556 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"90b19c0fa576e6455d0cc3c15cd90c006b0ab179a1017800a7266892fa8e203e\": container with ID starting with 90b19c0fa576e6455d0cc3c15cd90c006b0ab179a1017800a7266892fa8e203e not found: ID does not exist" containerID="90b19c0fa576e6455d0cc3c15cd90c006b0ab179a1017800a7266892fa8e203e" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.709584 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"90b19c0fa576e6455d0cc3c15cd90c006b0ab179a1017800a7266892fa8e203e"} err="failed to get container status \"90b19c0fa576e6455d0cc3c15cd90c006b0ab179a1017800a7266892fa8e203e\": rpc error: code = NotFound desc = could not find container \"90b19c0fa576e6455d0cc3c15cd90c006b0ab179a1017800a7266892fa8e203e\": container with ID starting with 90b19c0fa576e6455d0cc3c15cd90c006b0ab179a1017800a7266892fa8e203e not found: ID does not exist" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.709602 4558 scope.go:117] "RemoveContainer" containerID="a5d920e84b45309d975c5a55142f856928971b658393631084941d3b078f522a" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.710397 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-658f45b9f4-tx296" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.710496 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a5d920e84b45309d975c5a55142f856928971b658393631084941d3b078f522a"} err="failed to get container status \"a5d920e84b45309d975c5a55142f856928971b658393631084941d3b078f522a\": rpc error: code = NotFound desc = could not find container \"a5d920e84b45309d975c5a55142f856928971b658393631084941d3b078f522a\": container with ID starting with a5d920e84b45309d975c5a55142f856928971b658393631084941d3b078f522a not found: ID does not exist" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.710523 4558 scope.go:117] "RemoveContainer" containerID="90b19c0fa576e6455d0cc3c15cd90c006b0ab179a1017800a7266892fa8e203e" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.713790 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"90b19c0fa576e6455d0cc3c15cd90c006b0ab179a1017800a7266892fa8e203e"} err="failed to get container status \"90b19c0fa576e6455d0cc3c15cd90c006b0ab179a1017800a7266892fa8e203e\": rpc error: code = NotFound desc = could not find container \"90b19c0fa576e6455d0cc3c15cd90c006b0ab179a1017800a7266892fa8e203e\": container with ID starting with 90b19c0fa576e6455d0cc3c15cd90c006b0ab179a1017800a7266892fa8e203e not found: ID does not exist" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.713830 4558 scope.go:117] "RemoveContainer" containerID="b9ac5c3d167c83d97bcf4206fa59450cb566eb19a154b51f5884efb725cdab16" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.759487 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/memcached-0" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.761690 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-db47-account-create-update-wfpfn" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.777543 4558 scope.go:117] "RemoveContainer" containerID="fbd3990f851ba4c232be9e8e9146ae26662c46c76d2e455f20e0dcc444b77578" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.790837 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.791539 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6d80c5b5-880f-4d59-9287-334023ec6ce9-operator-scripts\") pod \"6d80c5b5-880f-4d59-9287-334023ec6ce9\" (UID: \"6d80c5b5-880f-4d59-9287-334023ec6ce9\") " Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.791615 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3eabae3c-0014-4d8e-9393-7cb934ba9fa4-operator-scripts\") pod \"3eabae3c-0014-4d8e-9393-7cb934ba9fa4\" (UID: \"3eabae3c-0014-4d8e-9393-7cb934ba9fa4\") " Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.791679 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zhpjg\" (UniqueName: \"kubernetes.io/projected/a905b3ac-e1b2-49f8-8034-912138028aca-kube-api-access-zhpjg\") pod \"a905b3ac-e1b2-49f8-8034-912138028aca\" (UID: \"a905b3ac-e1b2-49f8-8034-912138028aca\") " Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.791760 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0c9fc067-cb23-4f72-b928-5447fb5182c1-scripts\") pod \"0c9fc067-cb23-4f72-b928-5447fb5182c1\" (UID: \"0c9fc067-cb23-4f72-b928-5447fb5182c1\") " Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.791793 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0c9fc067-cb23-4f72-b928-5447fb5182c1-config-data\") pod \"0c9fc067-cb23-4f72-b928-5447fb5182c1\" (UID: \"0c9fc067-cb23-4f72-b928-5447fb5182c1\") " Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.791831 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hlsqg\" (UniqueName: \"kubernetes.io/projected/01f39889-c29f-4e28-98fa-bc99322f761c-kube-api-access-hlsqg\") pod \"01f39889-c29f-4e28-98fa-bc99322f761c\" (UID: \"01f39889-c29f-4e28-98fa-bc99322f761c\") " Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.791870 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0c9fc067-cb23-4f72-b928-5447fb5182c1-public-tls-certs\") pod \"0c9fc067-cb23-4f72-b928-5447fb5182c1\" (UID: \"0c9fc067-cb23-4f72-b928-5447fb5182c1\") " Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.791897 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n9d5p\" (UniqueName: \"kubernetes.io/projected/0c9fc067-cb23-4f72-b928-5447fb5182c1-kube-api-access-n9d5p\") pod \"0c9fc067-cb23-4f72-b928-5447fb5182c1\" (UID: \"0c9fc067-cb23-4f72-b928-5447fb5182c1\") " Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.791940 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0c9fc067-cb23-4f72-b928-5447fb5182c1-combined-ca-bundle\") pod \"0c9fc067-cb23-4f72-b928-5447fb5182c1\" (UID: \"0c9fc067-cb23-4f72-b928-5447fb5182c1\") " Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.791987 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0c9fc067-cb23-4f72-b928-5447fb5182c1-internal-tls-certs\") pod \"0c9fc067-cb23-4f72-b928-5447fb5182c1\" (UID: \"0c9fc067-cb23-4f72-b928-5447fb5182c1\") " Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.792049 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tmjgj\" (UniqueName: \"kubernetes.io/projected/799d2689-2fcd-4bc5-ae70-1d0ae1194c76-kube-api-access-tmjgj\") pod \"799d2689-2fcd-4bc5-ae70-1d0ae1194c76\" (UID: \"799d2689-2fcd-4bc5-ae70-1d0ae1194c76\") " Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.792081 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a905b3ac-e1b2-49f8-8034-912138028aca-combined-ca-bundle\") pod \"a905b3ac-e1b2-49f8-8034-912138028aca\" (UID: \"a905b3ac-e1b2-49f8-8034-912138028aca\") " Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.792107 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wgzn6\" (UniqueName: \"kubernetes.io/projected/6d80c5b5-880f-4d59-9287-334023ec6ce9-kube-api-access-wgzn6\") pod \"6d80c5b5-880f-4d59-9287-334023ec6ce9\" (UID: \"6d80c5b5-880f-4d59-9287-334023ec6ce9\") " Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.792126 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/799d2689-2fcd-4bc5-ae70-1d0ae1194c76-operator-scripts\") pod \"799d2689-2fcd-4bc5-ae70-1d0ae1194c76\" (UID: \"799d2689-2fcd-4bc5-ae70-1d0ae1194c76\") " Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.792146 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a905b3ac-e1b2-49f8-8034-912138028aca-config-data\") pod \"a905b3ac-e1b2-49f8-8034-912138028aca\" (UID: \"a905b3ac-e1b2-49f8-8034-912138028aca\") " Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.792571 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0c9fc067-cb23-4f72-b928-5447fb5182c1-logs\") pod \"0c9fc067-cb23-4f72-b928-5447fb5182c1\" (UID: \"0c9fc067-cb23-4f72-b928-5447fb5182c1\") " Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.792673 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/01f39889-c29f-4e28-98fa-bc99322f761c-operator-scripts\") pod \"01f39889-c29f-4e28-98fa-bc99322f761c\" (UID: \"01f39889-c29f-4e28-98fa-bc99322f761c\") " Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.792701 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/a905b3ac-e1b2-49f8-8034-912138028aca-kolla-config\") pod \"a905b3ac-e1b2-49f8-8034-912138028aca\" (UID: \"a905b3ac-e1b2-49f8-8034-912138028aca\") " Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.792727 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/a905b3ac-e1b2-49f8-8034-912138028aca-memcached-tls-certs\") pod \"a905b3ac-e1b2-49f8-8034-912138028aca\" (UID: \"a905b3ac-e1b2-49f8-8034-912138028aca\") " Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.792744 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mf9k4\" (UniqueName: \"kubernetes.io/projected/3eabae3c-0014-4d8e-9393-7cb934ba9fa4-kube-api-access-mf9k4\") pod \"3eabae3c-0014-4d8e-9393-7cb934ba9fa4\" (UID: \"3eabae3c-0014-4d8e-9393-7cb934ba9fa4\") " Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.794008 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.794913 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6d80c5b5-880f-4d59-9287-334023ec6ce9-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "6d80c5b5-880f-4d59-9287-334023ec6ce9" (UID: "6d80c5b5-880f-4d59-9287-334023ec6ce9"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.795141 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3eabae3c-0014-4d8e-9393-7cb934ba9fa4-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "3eabae3c-0014-4d8e-9393-7cb934ba9fa4" (UID: "3eabae3c-0014-4d8e-9393-7cb934ba9fa4"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.795980 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/799d2689-2fcd-4bc5-ae70-1d0ae1194c76-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "799d2689-2fcd-4bc5-ae70-1d0ae1194c76" (UID: "799d2689-2fcd-4bc5-ae70-1d0ae1194c76"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.797104 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a905b3ac-e1b2-49f8-8034-912138028aca-config-data" (OuterVolumeSpecName: "config-data") pod "a905b3ac-e1b2-49f8-8034-912138028aca" (UID: "a905b3ac-e1b2-49f8-8034-912138028aca"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.797703 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a905b3ac-e1b2-49f8-8034-912138028aca-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "a905b3ac-e1b2-49f8-8034-912138028aca" (UID: "a905b3ac-e1b2-49f8-8034-912138028aca"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.798208 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01f39889-c29f-4e28-98fa-bc99322f761c-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "01f39889-c29f-4e28-98fa-bc99322f761c" (UID: "01f39889-c29f-4e28-98fa-bc99322f761c"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.799238 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0c9fc067-cb23-4f72-b928-5447fb5182c1-scripts" (OuterVolumeSpecName: "scripts") pod "0c9fc067-cb23-4f72-b928-5447fb5182c1" (UID: "0c9fc067-cb23-4f72-b928-5447fb5182c1"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.799245 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0c9fc067-cb23-4f72-b928-5447fb5182c1-logs" (OuterVolumeSpecName: "logs") pod "0c9fc067-cb23-4f72-b928-5447fb5182c1" (UID: "0c9fc067-cb23-4f72-b928-5447fb5182c1"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.800738 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a905b3ac-e1b2-49f8-8034-912138028aca-kube-api-access-zhpjg" (OuterVolumeSpecName: "kube-api-access-zhpjg") pod "a905b3ac-e1b2-49f8-8034-912138028aca" (UID: "a905b3ac-e1b2-49f8-8034-912138028aca"). InnerVolumeSpecName "kube-api-access-zhpjg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.803735 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6d80c5b5-880f-4d59-9287-334023ec6ce9-kube-api-access-wgzn6" (OuterVolumeSpecName: "kube-api-access-wgzn6") pod "6d80c5b5-880f-4d59-9287-334023ec6ce9" (UID: "6d80c5b5-880f-4d59-9287-334023ec6ce9"). InnerVolumeSpecName "kube-api-access-wgzn6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.806809 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0c9fc067-cb23-4f72-b928-5447fb5182c1-kube-api-access-n9d5p" (OuterVolumeSpecName: "kube-api-access-n9d5p") pod "0c9fc067-cb23-4f72-b928-5447fb5182c1" (UID: "0c9fc067-cb23-4f72-b928-5447fb5182c1"). InnerVolumeSpecName "kube-api-access-n9d5p". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.809122 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/799d2689-2fcd-4bc5-ae70-1d0ae1194c76-kube-api-access-tmjgj" (OuterVolumeSpecName: "kube-api-access-tmjgj") pod "799d2689-2fcd-4bc5-ae70-1d0ae1194c76" (UID: "799d2689-2fcd-4bc5-ae70-1d0ae1194c76"). InnerVolumeSpecName "kube-api-access-tmjgj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.813173 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01f39889-c29f-4e28-98fa-bc99322f761c-kube-api-access-hlsqg" (OuterVolumeSpecName: "kube-api-access-hlsqg") pod "01f39889-c29f-4e28-98fa-bc99322f761c" (UID: "01f39889-c29f-4e28-98fa-bc99322f761c"). InnerVolumeSpecName "kube-api-access-hlsqg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.814599 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3eabae3c-0014-4d8e-9393-7cb934ba9fa4-kube-api-access-mf9k4" (OuterVolumeSpecName: "kube-api-access-mf9k4") pod "3eabae3c-0014-4d8e-9393-7cb934ba9fa4" (UID: "3eabae3c-0014-4d8e-9393-7cb934ba9fa4"). InnerVolumeSpecName "kube-api-access-mf9k4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.830177 4558 scope.go:117] "RemoveContainer" containerID="b9ac5c3d167c83d97bcf4206fa59450cb566eb19a154b51f5884efb725cdab16" Jan 20 17:53:06 crc kubenswrapper[4558]: E0120 17:53:06.830731 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b9ac5c3d167c83d97bcf4206fa59450cb566eb19a154b51f5884efb725cdab16\": container with ID starting with b9ac5c3d167c83d97bcf4206fa59450cb566eb19a154b51f5884efb725cdab16 not found: ID does not exist" containerID="b9ac5c3d167c83d97bcf4206fa59450cb566eb19a154b51f5884efb725cdab16" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.830762 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b9ac5c3d167c83d97bcf4206fa59450cb566eb19a154b51f5884efb725cdab16"} err="failed to get container status \"b9ac5c3d167c83d97bcf4206fa59450cb566eb19a154b51f5884efb725cdab16\": rpc error: code = NotFound desc = could not find container \"b9ac5c3d167c83d97bcf4206fa59450cb566eb19a154b51f5884efb725cdab16\": container with ID starting with b9ac5c3d167c83d97bcf4206fa59450cb566eb19a154b51f5884efb725cdab16 not found: ID does not exist" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.830782 4558 scope.go:117] "RemoveContainer" containerID="fbd3990f851ba4c232be9e8e9146ae26662c46c76d2e455f20e0dcc444b77578" Jan 20 17:53:06 crc kubenswrapper[4558]: E0120 17:53:06.831066 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fbd3990f851ba4c232be9e8e9146ae26662c46c76d2e455f20e0dcc444b77578\": container with ID starting with fbd3990f851ba4c232be9e8e9146ae26662c46c76d2e455f20e0dcc444b77578 not found: ID does not exist" containerID="fbd3990f851ba4c232be9e8e9146ae26662c46c76d2e455f20e0dcc444b77578" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.831097 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fbd3990f851ba4c232be9e8e9146ae26662c46c76d2e455f20e0dcc444b77578"} err="failed to get container status \"fbd3990f851ba4c232be9e8e9146ae26662c46c76d2e455f20e0dcc444b77578\": rpc error: code = NotFound desc = could not find container \"fbd3990f851ba4c232be9e8e9146ae26662c46c76d2e455f20e0dcc444b77578\": container with ID starting with fbd3990f851ba4c232be9e8e9146ae26662c46c76d2e455f20e0dcc444b77578 not found: ID does not exist" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.831112 4558 scope.go:117] "RemoveContainer" containerID="7b033a817f9b0c0a5a28b3bb09ca9cc6417d0b599821e6bdbdcf0121964efdf3" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.831407 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.883751 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a905b3ac-e1b2-49f8-8034-912138028aca-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a905b3ac-e1b2-49f8-8034-912138028aca" (UID: "a905b3ac-e1b2-49f8-8034-912138028aca"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.886626 4558 scope.go:117] "RemoveContainer" containerID="7b033a817f9b0c0a5a28b3bb09ca9cc6417d0b599821e6bdbdcf0121964efdf3" Jan 20 17:53:06 crc kubenswrapper[4558]: E0120 17:53:06.888406 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7b033a817f9b0c0a5a28b3bb09ca9cc6417d0b599821e6bdbdcf0121964efdf3\": container with ID starting with 7b033a817f9b0c0a5a28b3bb09ca9cc6417d0b599821e6bdbdcf0121964efdf3 not found: ID does not exist" containerID="7b033a817f9b0c0a5a28b3bb09ca9cc6417d0b599821e6bdbdcf0121964efdf3" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.888441 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7b033a817f9b0c0a5a28b3bb09ca9cc6417d0b599821e6bdbdcf0121964efdf3"} err="failed to get container status \"7b033a817f9b0c0a5a28b3bb09ca9cc6417d0b599821e6bdbdcf0121964efdf3\": rpc error: code = NotFound desc = could not find container \"7b033a817f9b0c0a5a28b3bb09ca9cc6417d0b599821e6bdbdcf0121964efdf3\": container with ID starting with 7b033a817f9b0c0a5a28b3bb09ca9cc6417d0b599821e6bdbdcf0121964efdf3 not found: ID does not exist" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.888467 4558 scope.go:117] "RemoveContainer" containerID="71ba9af23a1c5424b7ecc46753bf8d593f4b1658ef2cbde792f66ad414383b91" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.894817 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mysql-db\" (UniqueName: \"kubernetes.io/local-volume/local-storage14-crc\") pod \"a18e9b68-4683-415e-833e-5b363e31d461\" (UID: \"a18e9b68-4683-415e-833e-5b363e31d461\") " Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.894880 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5eac3a8d-ee82-4e6f-bdb9-fccb82e09496-config-data\") pod \"5eac3a8d-ee82-4e6f-bdb9-fccb82e09496\" (UID: \"5eac3a8d-ee82-4e6f-bdb9-fccb82e09496\") " Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.894918 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xhmrl\" (UniqueName: \"kubernetes.io/projected/f30aa605-d974-4d0d-aa96-5cb0d96897d6-kube-api-access-xhmrl\") pod \"f30aa605-d974-4d0d-aa96-5cb0d96897d6\" (UID: \"f30aa605-d974-4d0d-aa96-5cb0d96897d6\") " Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.894950 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a18e9b68-4683-415e-833e-5b363e31d461-combined-ca-bundle\") pod \"a18e9b68-4683-415e-833e-5b363e31d461\" (UID: \"a18e9b68-4683-415e-833e-5b363e31d461\") " Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.894991 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a18e9b68-4683-415e-833e-5b363e31d461-operator-scripts\") pod \"a18e9b68-4683-415e-833e-5b363e31d461\" (UID: \"a18e9b68-4683-415e-833e-5b363e31d461\") " Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.895039 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f30aa605-d974-4d0d-aa96-5cb0d96897d6-logs\") pod \"f30aa605-d974-4d0d-aa96-5cb0d96897d6\" (UID: \"f30aa605-d974-4d0d-aa96-5cb0d96897d6\") " Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.895070 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f30aa605-d974-4d0d-aa96-5cb0d96897d6-scripts\") pod \"f30aa605-d974-4d0d-aa96-5cb0d96897d6\" (UID: \"f30aa605-d974-4d0d-aa96-5cb0d96897d6\") " Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.895092 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"f30aa605-d974-4d0d-aa96-5cb0d96897d6\" (UID: \"f30aa605-d974-4d0d-aa96-5cb0d96897d6\") " Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.895115 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5eac3a8d-ee82-4e6f-bdb9-fccb82e09496-public-tls-certs\") pod \"5eac3a8d-ee82-4e6f-bdb9-fccb82e09496\" (UID: \"5eac3a8d-ee82-4e6f-bdb9-fccb82e09496\") " Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.895136 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f30aa605-d974-4d0d-aa96-5cb0d96897d6-combined-ca-bundle\") pod \"f30aa605-d974-4d0d-aa96-5cb0d96897d6\" (UID: \"f30aa605-d974-4d0d-aa96-5cb0d96897d6\") " Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.895180 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hjrqb\" (UniqueName: \"kubernetes.io/projected/5eac3a8d-ee82-4e6f-bdb9-fccb82e09496-kube-api-access-hjrqb\") pod \"5eac3a8d-ee82-4e6f-bdb9-fccb82e09496\" (UID: \"5eac3a8d-ee82-4e6f-bdb9-fccb82e09496\") " Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.895230 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/a18e9b68-4683-415e-833e-5b363e31d461-config-data-generated\") pod \"a18e9b68-4683-415e-833e-5b363e31d461\" (UID: \"a18e9b68-4683-415e-833e-5b363e31d461\") " Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.895252 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mxfc8\" (UniqueName: \"kubernetes.io/projected/a18e9b68-4683-415e-833e-5b363e31d461-kube-api-access-mxfc8\") pod \"a18e9b68-4683-415e-833e-5b363e31d461\" (UID: \"a18e9b68-4683-415e-833e-5b363e31d461\") " Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.895324 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/a18e9b68-4683-415e-833e-5b363e31d461-kolla-config\") pod \"a18e9b68-4683-415e-833e-5b363e31d461\" (UID: \"a18e9b68-4683-415e-833e-5b363e31d461\") " Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.895350 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5eac3a8d-ee82-4e6f-bdb9-fccb82e09496-config-data-custom\") pod \"5eac3a8d-ee82-4e6f-bdb9-fccb82e09496\" (UID: \"5eac3a8d-ee82-4e6f-bdb9-fccb82e09496\") " Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.895371 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f30aa605-d974-4d0d-aa96-5cb0d96897d6-internal-tls-certs\") pod \"f30aa605-d974-4d0d-aa96-5cb0d96897d6\" (UID: \"f30aa605-d974-4d0d-aa96-5cb0d96897d6\") " Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.895391 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5eac3a8d-ee82-4e6f-bdb9-fccb82e09496-logs\") pod \"5eac3a8d-ee82-4e6f-bdb9-fccb82e09496\" (UID: \"5eac3a8d-ee82-4e6f-bdb9-fccb82e09496\") " Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.895414 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f30aa605-d974-4d0d-aa96-5cb0d96897d6-httpd-run\") pod \"f30aa605-d974-4d0d-aa96-5cb0d96897d6\" (UID: \"f30aa605-d974-4d0d-aa96-5cb0d96897d6\") " Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.895430 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/a18e9b68-4683-415e-833e-5b363e31d461-config-data-default\") pod \"a18e9b68-4683-415e-833e-5b363e31d461\" (UID: \"a18e9b68-4683-415e-833e-5b363e31d461\") " Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.895448 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5eac3a8d-ee82-4e6f-bdb9-fccb82e09496-scripts\") pod \"5eac3a8d-ee82-4e6f-bdb9-fccb82e09496\" (UID: \"5eac3a8d-ee82-4e6f-bdb9-fccb82e09496\") " Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.895475 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/5eac3a8d-ee82-4e6f-bdb9-fccb82e09496-etc-machine-id\") pod \"5eac3a8d-ee82-4e6f-bdb9-fccb82e09496\" (UID: \"5eac3a8d-ee82-4e6f-bdb9-fccb82e09496\") " Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.895494 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f30aa605-d974-4d0d-aa96-5cb0d96897d6-config-data\") pod \"f30aa605-d974-4d0d-aa96-5cb0d96897d6\" (UID: \"f30aa605-d974-4d0d-aa96-5cb0d96897d6\") " Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.895523 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5eac3a8d-ee82-4e6f-bdb9-fccb82e09496-combined-ca-bundle\") pod \"5eac3a8d-ee82-4e6f-bdb9-fccb82e09496\" (UID: \"5eac3a8d-ee82-4e6f-bdb9-fccb82e09496\") " Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.895574 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/a18e9b68-4683-415e-833e-5b363e31d461-galera-tls-certs\") pod \"a18e9b68-4683-415e-833e-5b363e31d461\" (UID: \"a18e9b68-4683-415e-833e-5b363e31d461\") " Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.895622 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5eac3a8d-ee82-4e6f-bdb9-fccb82e09496-internal-tls-certs\") pod \"5eac3a8d-ee82-4e6f-bdb9-fccb82e09496\" (UID: \"5eac3a8d-ee82-4e6f-bdb9-fccb82e09496\") " Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.896136 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3eabae3c-0014-4d8e-9393-7cb934ba9fa4-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.896156 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zhpjg\" (UniqueName: \"kubernetes.io/projected/a905b3ac-e1b2-49f8-8034-912138028aca-kube-api-access-zhpjg\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.896184 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0c9fc067-cb23-4f72-b928-5447fb5182c1-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.896193 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hlsqg\" (UniqueName: \"kubernetes.io/projected/01f39889-c29f-4e28-98fa-bc99322f761c-kube-api-access-hlsqg\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.896203 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n9d5p\" (UniqueName: \"kubernetes.io/projected/0c9fc067-cb23-4f72-b928-5447fb5182c1-kube-api-access-n9d5p\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.896213 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tmjgj\" (UniqueName: \"kubernetes.io/projected/799d2689-2fcd-4bc5-ae70-1d0ae1194c76-kube-api-access-tmjgj\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.896222 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a905b3ac-e1b2-49f8-8034-912138028aca-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.896230 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/799d2689-2fcd-4bc5-ae70-1d0ae1194c76-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.896240 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wgzn6\" (UniqueName: \"kubernetes.io/projected/6d80c5b5-880f-4d59-9287-334023ec6ce9-kube-api-access-wgzn6\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.896250 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a905b3ac-e1b2-49f8-8034-912138028aca-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.896258 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/01f39889-c29f-4e28-98fa-bc99322f761c-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.896267 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0c9fc067-cb23-4f72-b928-5447fb5182c1-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.896275 4558 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/a905b3ac-e1b2-49f8-8034-912138028aca-kolla-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.896285 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mf9k4\" (UniqueName: \"kubernetes.io/projected/3eabae3c-0014-4d8e-9393-7cb934ba9fa4-kube-api-access-mf9k4\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.896294 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6d80c5b5-880f-4d59-9287-334023ec6ce9-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.896554 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f30aa605-d974-4d0d-aa96-5cb0d96897d6-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "f30aa605-d974-4d0d-aa96-5cb0d96897d6" (UID: "f30aa605-d974-4d0d-aa96-5cb0d96897d6"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.896887 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a18e9b68-4683-415e-833e-5b363e31d461-config-data-generated" (OuterVolumeSpecName: "config-data-generated") pod "a18e9b68-4683-415e-833e-5b363e31d461" (UID: "a18e9b68-4683-415e-833e-5b363e31d461"). InnerVolumeSpecName "config-data-generated". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.897955 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a18e9b68-4683-415e-833e-5b363e31d461-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "a18e9b68-4683-415e-833e-5b363e31d461" (UID: "a18e9b68-4683-415e-833e-5b363e31d461"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.898297 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f30aa605-d974-4d0d-aa96-5cb0d96897d6-logs" (OuterVolumeSpecName: "logs") pod "f30aa605-d974-4d0d-aa96-5cb0d96897d6" (UID: "f30aa605-d974-4d0d-aa96-5cb0d96897d6"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.899573 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a18e9b68-4683-415e-833e-5b363e31d461-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "a18e9b68-4683-415e-833e-5b363e31d461" (UID: "a18e9b68-4683-415e-833e-5b363e31d461"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.899956 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5eac3a8d-ee82-4e6f-bdb9-fccb82e09496-logs" (OuterVolumeSpecName: "logs") pod "5eac3a8d-ee82-4e6f-bdb9-fccb82e09496" (UID: "5eac3a8d-ee82-4e6f-bdb9-fccb82e09496"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.899971 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a18e9b68-4683-415e-833e-5b363e31d461-config-data-default" (OuterVolumeSpecName: "config-data-default") pod "a18e9b68-4683-415e-833e-5b363e31d461" (UID: "a18e9b68-4683-415e-833e-5b363e31d461"). InnerVolumeSpecName "config-data-default". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.900307 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a18e9b68-4683-415e-833e-5b363e31d461-kube-api-access-mxfc8" (OuterVolumeSpecName: "kube-api-access-mxfc8") pod "a18e9b68-4683-415e-833e-5b363e31d461" (UID: "a18e9b68-4683-415e-833e-5b363e31d461"). InnerVolumeSpecName "kube-api-access-mxfc8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.900358 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5eac3a8d-ee82-4e6f-bdb9-fccb82e09496-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "5eac3a8d-ee82-4e6f-bdb9-fccb82e09496" (UID: "5eac3a8d-ee82-4e6f-bdb9-fccb82e09496"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.905445 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f30aa605-d974-4d0d-aa96-5cb0d96897d6-scripts" (OuterVolumeSpecName: "scripts") pod "f30aa605-d974-4d0d-aa96-5cb0d96897d6" (UID: "f30aa605-d974-4d0d-aa96-5cb0d96897d6"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.906389 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5eac3a8d-ee82-4e6f-bdb9-fccb82e09496-scripts" (OuterVolumeSpecName: "scripts") pod "5eac3a8d-ee82-4e6f-bdb9-fccb82e09496" (UID: "5eac3a8d-ee82-4e6f-bdb9-fccb82e09496"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.906787 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5eac3a8d-ee82-4e6f-bdb9-fccb82e09496-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "5eac3a8d-ee82-4e6f-bdb9-fccb82e09496" (UID: "5eac3a8d-ee82-4e6f-bdb9-fccb82e09496"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.910989 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5eac3a8d-ee82-4e6f-bdb9-fccb82e09496-kube-api-access-hjrqb" (OuterVolumeSpecName: "kube-api-access-hjrqb") pod "5eac3a8d-ee82-4e6f-bdb9-fccb82e09496" (UID: "5eac3a8d-ee82-4e6f-bdb9-fccb82e09496"). InnerVolumeSpecName "kube-api-access-hjrqb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.918241 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage11-crc" (OuterVolumeSpecName: "glance") pod "f30aa605-d974-4d0d-aa96-5cb0d96897d6" (UID: "f30aa605-d974-4d0d-aa96-5cb0d96897d6"). InnerVolumeSpecName "local-storage11-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.920037 4558 scope.go:117] "RemoveContainer" containerID="71ba9af23a1c5424b7ecc46753bf8d593f4b1658ef2cbde792f66ad414383b91" Jan 20 17:53:06 crc kubenswrapper[4558]: E0120 17:53:06.920567 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"71ba9af23a1c5424b7ecc46753bf8d593f4b1658ef2cbde792f66ad414383b91\": container with ID starting with 71ba9af23a1c5424b7ecc46753bf8d593f4b1658ef2cbde792f66ad414383b91 not found: ID does not exist" containerID="71ba9af23a1c5424b7ecc46753bf8d593f4b1658ef2cbde792f66ad414383b91" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.920605 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"71ba9af23a1c5424b7ecc46753bf8d593f4b1658ef2cbde792f66ad414383b91"} err="failed to get container status \"71ba9af23a1c5424b7ecc46753bf8d593f4b1658ef2cbde792f66ad414383b91\": rpc error: code = NotFound desc = could not find container \"71ba9af23a1c5424b7ecc46753bf8d593f4b1658ef2cbde792f66ad414383b91\": container with ID starting with 71ba9af23a1c5424b7ecc46753bf8d593f4b1658ef2cbde792f66ad414383b91 not found: ID does not exist" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.920629 4558 scope.go:117] "RemoveContainer" containerID="95dc200c1f4d2871200f75f66fd687fce8cf419611a0ac26406a639578f912ef" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.945565 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0c9fc067-cb23-4f72-b928-5447fb5182c1-config-data" (OuterVolumeSpecName: "config-data") pod "0c9fc067-cb23-4f72-b928-5447fb5182c1" (UID: "0c9fc067-cb23-4f72-b928-5447fb5182c1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.947740 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage14-crc" (OuterVolumeSpecName: "mysql-db") pod "a18e9b68-4683-415e-833e-5b363e31d461" (UID: "a18e9b68-4683-415e-833e-5b363e31d461"). InnerVolumeSpecName "local-storage14-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.954378 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f30aa605-d974-4d0d-aa96-5cb0d96897d6-kube-api-access-xhmrl" (OuterVolumeSpecName: "kube-api-access-xhmrl") pod "f30aa605-d974-4d0d-aa96-5cb0d96897d6" (UID: "f30aa605-d974-4d0d-aa96-5cb0d96897d6"). InnerVolumeSpecName "kube-api-access-xhmrl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.955893 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0c9fc067-cb23-4f72-b928-5447fb5182c1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0c9fc067-cb23-4f72-b928-5447fb5182c1" (UID: "0c9fc067-cb23-4f72-b928-5447fb5182c1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.975194 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a905b3ac-e1b2-49f8-8034-912138028aca-memcached-tls-certs" (OuterVolumeSpecName: "memcached-tls-certs") pod "a905b3ac-e1b2-49f8-8034-912138028aca" (UID: "a905b3ac-e1b2-49f8-8034-912138028aca"). InnerVolumeSpecName "memcached-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.976080 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f30aa605-d974-4d0d-aa96-5cb0d96897d6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f30aa605-d974-4d0d-aa96-5cb0d96897d6" (UID: "f30aa605-d974-4d0d-aa96-5cb0d96897d6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.980752 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a18e9b68-4683-415e-833e-5b363e31d461-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a18e9b68-4683-415e-833e-5b363e31d461" (UID: "a18e9b68-4683-415e-833e-5b363e31d461"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.981729 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0c9fc067-cb23-4f72-b928-5447fb5182c1-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "0c9fc067-cb23-4f72-b928-5447fb5182c1" (UID: "0c9fc067-cb23-4f72-b928-5447fb5182c1"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.994202 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5eac3a8d-ee82-4e6f-bdb9-fccb82e09496-config-data" (OuterVolumeSpecName: "config-data") pod "5eac3a8d-ee82-4e6f-bdb9-fccb82e09496" (UID: "5eac3a8d-ee82-4e6f-bdb9-fccb82e09496"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.998894 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0c9fc067-cb23-4f72-b928-5447fb5182c1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.998921 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0c9fc067-cb23-4f72-b928-5447fb5182c1-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.998943 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage14-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage14-crc\") on node \"crc\" " Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.998953 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5eac3a8d-ee82-4e6f-bdb9-fccb82e09496-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.998963 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xhmrl\" (UniqueName: \"kubernetes.io/projected/f30aa605-d974-4d0d-aa96-5cb0d96897d6-kube-api-access-xhmrl\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.998990 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a18e9b68-4683-415e-833e-5b363e31d461-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.999000 4558 reconciler_common.go:293] "Volume detached for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/a905b3ac-e1b2-49f8-8034-912138028aca-memcached-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.999009 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a18e9b68-4683-415e-833e-5b363e31d461-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.999020 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f30aa605-d974-4d0d-aa96-5cb0d96897d6-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.999036 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f30aa605-d974-4d0d-aa96-5cb0d96897d6-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.999050 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" " Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.999059 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f30aa605-d974-4d0d-aa96-5cb0d96897d6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.999069 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hjrqb\" (UniqueName: \"kubernetes.io/projected/5eac3a8d-ee82-4e6f-bdb9-fccb82e09496-kube-api-access-hjrqb\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.999079 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/a18e9b68-4683-415e-833e-5b363e31d461-config-data-generated\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.999089 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mxfc8\" (UniqueName: \"kubernetes.io/projected/a18e9b68-4683-415e-833e-5b363e31d461-kube-api-access-mxfc8\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.999099 4558 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/a18e9b68-4683-415e-833e-5b363e31d461-kolla-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.999108 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5eac3a8d-ee82-4e6f-bdb9-fccb82e09496-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.999116 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5eac3a8d-ee82-4e6f-bdb9-fccb82e09496-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.999124 4558 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f30aa605-d974-4d0d-aa96-5cb0d96897d6-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.999133 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/a18e9b68-4683-415e-833e-5b363e31d461-config-data-default\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.999141 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5eac3a8d-ee82-4e6f-bdb9-fccb82e09496-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.999149 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0c9fc067-cb23-4f72-b928-5447fb5182c1-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:06 crc kubenswrapper[4558]: I0120 17:53:06.999172 4558 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/5eac3a8d-ee82-4e6f-bdb9-fccb82e09496-etc-machine-id\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.001156 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f30aa605-d974-4d0d-aa96-5cb0d96897d6-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "f30aa605-d974-4d0d-aa96-5cb0d96897d6" (UID: "f30aa605-d974-4d0d-aa96-5cb0d96897d6"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.014334 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5eac3a8d-ee82-4e6f-bdb9-fccb82e09496-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5eac3a8d-ee82-4e6f-bdb9-fccb82e09496" (UID: "5eac3a8d-ee82-4e6f-bdb9-fccb82e09496"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.026111 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage11-crc" (UniqueName: "kubernetes.io/local-volume/local-storage11-crc") on node "crc" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.055763 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage14-crc" (UniqueName: "kubernetes.io/local-volume/local-storage14-crc") on node "crc" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.070552 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.071778 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5eac3a8d-ee82-4e6f-bdb9-fccb82e09496-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "5eac3a8d-ee82-4e6f-bdb9-fccb82e09496" (UID: "5eac3a8d-ee82-4e6f-bdb9-fccb82e09496"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.072941 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0c9fc067-cb23-4f72-b928-5447fb5182c1-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "0c9fc067-cb23-4f72-b928-5447fb5182c1" (UID: "0c9fc067-cb23-4f72-b928-5447fb5182c1"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.075664 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f30aa605-d974-4d0d-aa96-5cb0d96897d6-config-data" (OuterVolumeSpecName: "config-data") pod "f30aa605-d974-4d0d-aa96-5cb0d96897d6" (UID: "f30aa605-d974-4d0d-aa96-5cb0d96897d6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.075951 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5eac3a8d-ee82-4e6f-bdb9-fccb82e09496-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "5eac3a8d-ee82-4e6f-bdb9-fccb82e09496" (UID: "5eac3a8d-ee82-4e6f-bdb9-fccb82e09496"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.090619 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a18e9b68-4683-415e-833e-5b363e31d461-galera-tls-certs" (OuterVolumeSpecName: "galera-tls-certs") pod "a18e9b68-4683-415e-833e-5b363e31d461" (UID: "a18e9b68-4683-415e-833e-5b363e31d461"). InnerVolumeSpecName "galera-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.100459 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/6e7de5b3-4426-4816-9d45-9b4226333dbc-nova-metadata-tls-certs\") pod \"6e7de5b3-4426-4816-9d45-9b4226333dbc\" (UID: \"6e7de5b3-4426-4816-9d45-9b4226333dbc\") " Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.100828 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e7de5b3-4426-4816-9d45-9b4226333dbc-combined-ca-bundle\") pod \"6e7de5b3-4426-4816-9d45-9b4226333dbc\" (UID: \"6e7de5b3-4426-4816-9d45-9b4226333dbc\") " Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.101208 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hnsfg\" (UniqueName: \"kubernetes.io/projected/6e7de5b3-4426-4816-9d45-9b4226333dbc-kube-api-access-hnsfg\") pod \"6e7de5b3-4426-4816-9d45-9b4226333dbc\" (UID: \"6e7de5b3-4426-4816-9d45-9b4226333dbc\") " Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.101351 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6e7de5b3-4426-4816-9d45-9b4226333dbc-config-data\") pod \"6e7de5b3-4426-4816-9d45-9b4226333dbc\" (UID: \"6e7de5b3-4426-4816-9d45-9b4226333dbc\") " Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.101382 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6e7de5b3-4426-4816-9d45-9b4226333dbc-logs\") pod \"6e7de5b3-4426-4816-9d45-9b4226333dbc\" (UID: \"6e7de5b3-4426-4816-9d45-9b4226333dbc\") " Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.102112 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0c9fc067-cb23-4f72-b928-5447fb5182c1-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.102132 4558 reconciler_common.go:293] "Volume detached for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/a18e9b68-4683-415e-833e-5b363e31d461-galera-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.102144 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5eac3a8d-ee82-4e6f-bdb9-fccb82e09496-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.102153 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage14-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage14-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.102175 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.102186 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5eac3a8d-ee82-4e6f-bdb9-fccb82e09496-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.102195 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f30aa605-d974-4d0d-aa96-5cb0d96897d6-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.102204 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f30aa605-d974-4d0d-aa96-5cb0d96897d6-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.102213 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5eac3a8d-ee82-4e6f-bdb9-fccb82e09496-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.102750 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6e7de5b3-4426-4816-9d45-9b4226333dbc-logs" (OuterVolumeSpecName: "logs") pod "6e7de5b3-4426-4816-9d45-9b4226333dbc" (UID: "6e7de5b3-4426-4816-9d45-9b4226333dbc"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.103407 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6e7de5b3-4426-4816-9d45-9b4226333dbc-kube-api-access-hnsfg" (OuterVolumeSpecName: "kube-api-access-hnsfg") pod "6e7de5b3-4426-4816-9d45-9b4226333dbc" (UID: "6e7de5b3-4426-4816-9d45-9b4226333dbc"). InnerVolumeSpecName "kube-api-access-hnsfg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.116716 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-5bfc7dd8b8-pxpm9" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.118047 4558 generic.go:334] "Generic (PLEG): container finished" podID="e638a157-4234-4339-80ef-65d818d39b73" containerID="c2b848f0a44c31229d716190ee834f4766c179c31ef6380b2a6319ec16dbf83b" exitCode=0 Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.118096 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-5bfc7dd8b8-pxpm9" event={"ID":"e638a157-4234-4339-80ef-65d818d39b73","Type":"ContainerDied","Data":"c2b848f0a44c31229d716190ee834f4766c179c31ef6380b2a6319ec16dbf83b"} Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.118150 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-5bfc7dd8b8-pxpm9" event={"ID":"e638a157-4234-4339-80ef-65d818d39b73","Type":"ContainerDied","Data":"6b1b879f8ac6761b57d07842ada4680b78926bc506d8e1ce295b8e90a5aeebbc"} Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.118194 4558 scope.go:117] "RemoveContainer" containerID="c2b848f0a44c31229d716190ee834f4766c179c31ef6380b2a6319ec16dbf83b" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.120856 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6e7de5b3-4426-4816-9d45-9b4226333dbc-config-data" (OuterVolumeSpecName: "config-data") pod "6e7de5b3-4426-4816-9d45-9b4226333dbc" (UID: "6e7de5b3-4426-4816-9d45-9b4226333dbc"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.127556 4558 generic.go:334] "Generic (PLEG): container finished" podID="5eac3a8d-ee82-4e6f-bdb9-fccb82e09496" containerID="c20582ab3d42dc2c2fc8c4708a740fdb26d1d7abea4ac13a5a595f35117f0d0b" exitCode=0 Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.127602 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"5eac3a8d-ee82-4e6f-bdb9-fccb82e09496","Type":"ContainerDied","Data":"c20582ab3d42dc2c2fc8c4708a740fdb26d1d7abea4ac13a5a595f35117f0d0b"} Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.127626 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"5eac3a8d-ee82-4e6f-bdb9-fccb82e09496","Type":"ContainerDied","Data":"0ab6ab06db756b88a51ad0a2e83ca10bfccc0258078b5272a270d4c644f81c49"} Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.127700 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.137362 4558 generic.go:334] "Generic (PLEG): container finished" podID="7d535532-403f-4d55-8138-7e09287d0108" containerID="6d020a191147c34f2712820f7dd8475cc0193d6eeb83f724303b3f67c115e13e" exitCode=0 Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.137442 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"7d535532-403f-4d55-8138-7e09287d0108","Type":"ContainerDied","Data":"6d020a191147c34f2712820f7dd8475cc0193d6eeb83f724303b3f67c115e13e"} Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.146274 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6e7de5b3-4426-4816-9d45-9b4226333dbc-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6e7de5b3-4426-4816-9d45-9b4226333dbc" (UID: "6e7de5b3-4426-4816-9d45-9b4226333dbc"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.146320 4558 generic.go:334] "Generic (PLEG): container finished" podID="6e7de5b3-4426-4816-9d45-9b4226333dbc" containerID="49e55db6bbf8e6b2aba631a3aec4df47c74f2b76c8c583fcb5c80461300cdc1b" exitCode=0 Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.146399 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"6e7de5b3-4426-4816-9d45-9b4226333dbc","Type":"ContainerDied","Data":"49e55db6bbf8e6b2aba631a3aec4df47c74f2b76c8c583fcb5c80461300cdc1b"} Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.146459 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.146477 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"6e7de5b3-4426-4816-9d45-9b4226333dbc","Type":"ContainerDied","Data":"dc50c110f28d874ec9bb3cd35699a7de945a2543c44d84de595c418644ca4a10"} Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.151554 4558 generic.go:334] "Generic (PLEG): container finished" podID="f30aa605-d974-4d0d-aa96-5cb0d96897d6" containerID="d68e5d5426e93fd2a2fa4b8833690231b1a3151adb135d0a227b1d81dba55d3c" exitCode=0 Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.151603 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"f30aa605-d974-4d0d-aa96-5cb0d96897d6","Type":"ContainerDied","Data":"d68e5d5426e93fd2a2fa4b8833690231b1a3151adb135d0a227b1d81dba55d3c"} Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.151620 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"f30aa605-d974-4d0d-aa96-5cb0d96897d6","Type":"ContainerDied","Data":"742e919ef5796dd179f11bc4cd5cd77a5cafb4a774f5c6bb526160835ec72764"} Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.151709 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.162388 4558 scope.go:117] "RemoveContainer" containerID="5ffeb03ff85f67c07d33f3f3725dfcbef564cf6c716c687b03dc2f0eef33238d" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.163579 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6e7de5b3-4426-4816-9d45-9b4226333dbc-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "6e7de5b3-4426-4816-9d45-9b4226333dbc" (UID: "6e7de5b3-4426-4816-9d45-9b4226333dbc"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.166390 4558 generic.go:334] "Generic (PLEG): container finished" podID="64e30e89-cc14-4c89-bd1b-5702bfba717c" containerID="5a3ae52e8d382a0ba3e16ca78acf47ccaf862a7eab92320268ff87ae7158b25c" exitCode=0 Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.166435 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"64e30e89-cc14-4c89-bd1b-5702bfba717c","Type":"ContainerDied","Data":"5a3ae52e8d382a0ba3e16ca78acf47ccaf862a7eab92320268ff87ae7158b25c"} Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.170879 4558 generic.go:334] "Generic (PLEG): container finished" podID="a18e9b68-4683-415e-833e-5b363e31d461" containerID="142baadd97db60a56cd4c8eefd272ce6d8e94faf94a19e872d0adb86b7b6376a" exitCode=0 Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.170955 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-galera-0" event={"ID":"a18e9b68-4683-415e-833e-5b363e31d461","Type":"ContainerDied","Data":"142baadd97db60a56cd4c8eefd272ce6d8e94faf94a19e872d0adb86b7b6376a"} Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.171009 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-galera-0" event={"ID":"a18e9b68-4683-415e-833e-5b363e31d461","Type":"ContainerDied","Data":"01578d9045580c439c227d018be1cb6d842df504a938d3e2e9739bff0eb9b11e"} Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.171120 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.172582 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-zmctf" event={"ID":"c33a9cb9-2eec-4c2e-847d-9b06fcce5fef","Type":"ContainerStarted","Data":"2fe565e8e76d528286b242f2370ba6b33966f2dac706f93bf52b30e596d353a1"} Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.173260 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-zmctf" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.178853 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-db47-account-create-update-wfpfn" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.178900 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-658f45b9f4-tx296" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.178925 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/memcached-0" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.179216 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.179264 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-d1d9-account-create-update-g54ff" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.179301 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-a1e2-account-create-update-hfpfh" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.179342 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-a0eb-account-create-update-75hkz" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.179408 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-f275-account-create-update-5pb7k" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.201205 4558 scope.go:117] "RemoveContainer" containerID="c2b848f0a44c31229d716190ee834f4766c179c31ef6380b2a6319ec16dbf83b" Jan 20 17:53:07 crc kubenswrapper[4558]: E0120 17:53:07.201603 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c2b848f0a44c31229d716190ee834f4766c179c31ef6380b2a6319ec16dbf83b\": container with ID starting with c2b848f0a44c31229d716190ee834f4766c179c31ef6380b2a6319ec16dbf83b not found: ID does not exist" containerID="c2b848f0a44c31229d716190ee834f4766c179c31ef6380b2a6319ec16dbf83b" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.201631 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c2b848f0a44c31229d716190ee834f4766c179c31ef6380b2a6319ec16dbf83b"} err="failed to get container status \"c2b848f0a44c31229d716190ee834f4766c179c31ef6380b2a6319ec16dbf83b\": rpc error: code = NotFound desc = could not find container \"c2b848f0a44c31229d716190ee834f4766c179c31ef6380b2a6319ec16dbf83b\": container with ID starting with c2b848f0a44c31229d716190ee834f4766c179c31ef6380b2a6319ec16dbf83b not found: ID does not exist" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.201652 4558 scope.go:117] "RemoveContainer" containerID="5ffeb03ff85f67c07d33f3f3725dfcbef564cf6c716c687b03dc2f0eef33238d" Jan 20 17:53:07 crc kubenswrapper[4558]: E0120 17:53:07.201859 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5ffeb03ff85f67c07d33f3f3725dfcbef564cf6c716c687b03dc2f0eef33238d\": container with ID starting with 5ffeb03ff85f67c07d33f3f3725dfcbef564cf6c716c687b03dc2f0eef33238d not found: ID does not exist" containerID="5ffeb03ff85f67c07d33f3f3725dfcbef564cf6c716c687b03dc2f0eef33238d" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.201873 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5ffeb03ff85f67c07d33f3f3725dfcbef564cf6c716c687b03dc2f0eef33238d"} err="failed to get container status \"5ffeb03ff85f67c07d33f3f3725dfcbef564cf6c716c687b03dc2f0eef33238d\": rpc error: code = NotFound desc = could not find container \"5ffeb03ff85f67c07d33f3f3725dfcbef564cf6c716c687b03dc2f0eef33238d\": container with ID starting with 5ffeb03ff85f67c07d33f3f3725dfcbef564cf6c716c687b03dc2f0eef33238d not found: ID does not exist" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.201888 4558 scope.go:117] "RemoveContainer" containerID="c20582ab3d42dc2c2fc8c4708a740fdb26d1d7abea4ac13a5a595f35117f0d0b" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.202753 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e638a157-4234-4339-80ef-65d818d39b73-config-data-custom\") pod \"e638a157-4234-4339-80ef-65d818d39b73\" (UID: \"e638a157-4234-4339-80ef-65d818d39b73\") " Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.202833 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e638a157-4234-4339-80ef-65d818d39b73-logs\") pod \"e638a157-4234-4339-80ef-65d818d39b73\" (UID: \"e638a157-4234-4339-80ef-65d818d39b73\") " Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.202863 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e638a157-4234-4339-80ef-65d818d39b73-config-data\") pod \"e638a157-4234-4339-80ef-65d818d39b73\" (UID: \"e638a157-4234-4339-80ef-65d818d39b73\") " Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.202894 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vf97d\" (UniqueName: \"kubernetes.io/projected/e638a157-4234-4339-80ef-65d818d39b73-kube-api-access-vf97d\") pod \"e638a157-4234-4339-80ef-65d818d39b73\" (UID: \"e638a157-4234-4339-80ef-65d818d39b73\") " Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.202991 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e638a157-4234-4339-80ef-65d818d39b73-combined-ca-bundle\") pod \"e638a157-4234-4339-80ef-65d818d39b73\" (UID: \"e638a157-4234-4339-80ef-65d818d39b73\") " Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.203102 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e638a157-4234-4339-80ef-65d818d39b73-internal-tls-certs\") pod \"e638a157-4234-4339-80ef-65d818d39b73\" (UID: \"e638a157-4234-4339-80ef-65d818d39b73\") " Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.203131 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e638a157-4234-4339-80ef-65d818d39b73-public-tls-certs\") pod \"e638a157-4234-4339-80ef-65d818d39b73\" (UID: \"e638a157-4234-4339-80ef-65d818d39b73\") " Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.203733 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6e7de5b3-4426-4816-9d45-9b4226333dbc-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.203746 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6e7de5b3-4426-4816-9d45-9b4226333dbc-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.203755 4558 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/6e7de5b3-4426-4816-9d45-9b4226333dbc-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.203765 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e7de5b3-4426-4816-9d45-9b4226333dbc-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.203776 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hnsfg\" (UniqueName: \"kubernetes.io/projected/6e7de5b3-4426-4816-9d45-9b4226333dbc-kube-api-access-hnsfg\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.204634 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e638a157-4234-4339-80ef-65d818d39b73-logs" (OuterVolumeSpecName: "logs") pod "e638a157-4234-4339-80ef-65d818d39b73" (UID: "e638a157-4234-4339-80ef-65d818d39b73"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.211789 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.215872 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e638a157-4234-4339-80ef-65d818d39b73-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "e638a157-4234-4339-80ef-65d818d39b73" (UID: "e638a157-4234-4339-80ef-65d818d39b73"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.220394 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e638a157-4234-4339-80ef-65d818d39b73-kube-api-access-vf97d" (OuterVolumeSpecName: "kube-api-access-vf97d") pod "e638a157-4234-4339-80ef-65d818d39b73" (UID: "e638a157-4234-4339-80ef-65d818d39b73"). InnerVolumeSpecName "kube-api-access-vf97d". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.222253 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.225355 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-zmctf" podStartSLOduration=5.225343589 podStartE2EDuration="5.225343589s" podCreationTimestamp="2026-01-20 17:53:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:53:07.209738436 +0000 UTC m=+4280.970076403" watchObservedRunningTime="2026-01-20 17:53:07.225343589 +0000 UTC m=+4280.985681556" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.254232 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e638a157-4234-4339-80ef-65d818d39b73-config-data" (OuterVolumeSpecName: "config-data") pod "e638a157-4234-4339-80ef-65d818d39b73" (UID: "e638a157-4234-4339-80ef-65d818d39b73"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.261761 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e638a157-4234-4339-80ef-65d818d39b73-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e638a157-4234-4339-80ef-65d818d39b73" (UID: "e638a157-4234-4339-80ef-65d818d39b73"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.306189 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e638a157-4234-4339-80ef-65d818d39b73-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.306219 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e638a157-4234-4339-80ef-65d818d39b73-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.306230 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e638a157-4234-4339-80ef-65d818d39b73-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.306243 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vf97d\" (UniqueName: \"kubernetes.io/projected/e638a157-4234-4339-80ef-65d818d39b73-kube-api-access-vf97d\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.306257 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e638a157-4234-4339-80ef-65d818d39b73-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.319257 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e638a157-4234-4339-80ef-65d818d39b73-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "e638a157-4234-4339-80ef-65d818d39b73" (UID: "e638a157-4234-4339-80ef-65d818d39b73"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.326755 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e638a157-4234-4339-80ef-65d818d39b73-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "e638a157-4234-4339-80ef-65d818d39b73" (UID: "e638a157-4234-4339-80ef-65d818d39b73"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.408543 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e638a157-4234-4339-80ef-65d818d39b73-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.408570 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e638a157-4234-4339-80ef-65d818d39b73-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:07 crc kubenswrapper[4558]: E0120 17:53:07.464750 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="ce0da192b65bf7c5e90dd832e0212db51fd4f3e04276e89c0f7b9256d88b752a" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 20 17:53:07 crc kubenswrapper[4558]: E0120 17:53:07.466548 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="ce0da192b65bf7c5e90dd832e0212db51fd4f3e04276e89c0f7b9256d88b752a" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 20 17:53:07 crc kubenswrapper[4558]: E0120 17:53:07.468073 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="ce0da192b65bf7c5e90dd832e0212db51fd4f3e04276e89c0f7b9256d88b752a" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 20 17:53:07 crc kubenswrapper[4558]: E0120 17:53:07.468126 4558 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack-kuttl-tests/nova-scheduler-0" podUID="d9321ea5-ab3b-4f00-b4d0-f2c2f8c2deb2" containerName="nova-scheduler-scheduler" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.482527 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.489457 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.490067 4558 scope.go:117] "RemoveContainer" containerID="931107b5dd291893e36bfee944ab2879ec15efdaafdf4d8d3e976539e9c14eab" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.495430 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.509549 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7d535532-403f-4d55-8138-7e09287d0108-config-data\") pod \"7d535532-403f-4d55-8138-7e09287d0108\" (UID: \"7d535532-403f-4d55-8138-7e09287d0108\") " Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.509590 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7d535532-403f-4d55-8138-7e09287d0108-scripts\") pod \"7d535532-403f-4d55-8138-7e09287d0108\" (UID: \"7d535532-403f-4d55-8138-7e09287d0108\") " Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.509616 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/7d535532-403f-4d55-8138-7e09287d0108-httpd-run\") pod \"7d535532-403f-4d55-8138-7e09287d0108\" (UID: \"7d535532-403f-4d55-8138-7e09287d0108\") " Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.509669 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7d535532-403f-4d55-8138-7e09287d0108-public-tls-certs\") pod \"7d535532-403f-4d55-8138-7e09287d0108\" (UID: \"7d535532-403f-4d55-8138-7e09287d0108\") " Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.509707 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"7d535532-403f-4d55-8138-7e09287d0108\" (UID: \"7d535532-403f-4d55-8138-7e09287d0108\") " Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.509770 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7d535532-403f-4d55-8138-7e09287d0108-combined-ca-bundle\") pod \"7d535532-403f-4d55-8138-7e09287d0108\" (UID: \"7d535532-403f-4d55-8138-7e09287d0108\") " Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.509846 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7d535532-403f-4d55-8138-7e09287d0108-logs\") pod \"7d535532-403f-4d55-8138-7e09287d0108\" (UID: \"7d535532-403f-4d55-8138-7e09287d0108\") " Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.510587 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k7t8d\" (UniqueName: \"kubernetes.io/projected/7d535532-403f-4d55-8138-7e09287d0108-kube-api-access-k7t8d\") pod \"7d535532-403f-4d55-8138-7e09287d0108\" (UID: \"7d535532-403f-4d55-8138-7e09287d0108\") " Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.510238 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7d535532-403f-4d55-8138-7e09287d0108-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "7d535532-403f-4d55-8138-7e09287d0108" (UID: "7d535532-403f-4d55-8138-7e09287d0108"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.510523 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7d535532-403f-4d55-8138-7e09287d0108-logs" (OuterVolumeSpecName: "logs") pod "7d535532-403f-4d55-8138-7e09287d0108" (UID: "7d535532-403f-4d55-8138-7e09287d0108"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.511729 4558 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/7d535532-403f-4d55-8138-7e09287d0108-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.511749 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7d535532-403f-4d55-8138-7e09287d0108-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.513717 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage09-crc" (OuterVolumeSpecName: "glance") pod "7d535532-403f-4d55-8138-7e09287d0108" (UID: "7d535532-403f-4d55-8138-7e09287d0108"). InnerVolumeSpecName "local-storage09-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.514954 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7d535532-403f-4d55-8138-7e09287d0108-kube-api-access-k7t8d" (OuterVolumeSpecName: "kube-api-access-k7t8d") pod "7d535532-403f-4d55-8138-7e09287d0108" (UID: "7d535532-403f-4d55-8138-7e09287d0108"). InnerVolumeSpecName "kube-api-access-k7t8d". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.519759 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.520760 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7d535532-403f-4d55-8138-7e09287d0108-scripts" (OuterVolumeSpecName: "scripts") pod "7d535532-403f-4d55-8138-7e09287d0108" (UID: "7d535532-403f-4d55-8138-7e09287d0108"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.531316 4558 scope.go:117] "RemoveContainer" containerID="c20582ab3d42dc2c2fc8c4708a740fdb26d1d7abea4ac13a5a595f35117f0d0b" Jan 20 17:53:07 crc kubenswrapper[4558]: E0120 17:53:07.531771 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c20582ab3d42dc2c2fc8c4708a740fdb26d1d7abea4ac13a5a595f35117f0d0b\": container with ID starting with c20582ab3d42dc2c2fc8c4708a740fdb26d1d7abea4ac13a5a595f35117f0d0b not found: ID does not exist" containerID="c20582ab3d42dc2c2fc8c4708a740fdb26d1d7abea4ac13a5a595f35117f0d0b" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.531806 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c20582ab3d42dc2c2fc8c4708a740fdb26d1d7abea4ac13a5a595f35117f0d0b"} err="failed to get container status \"c20582ab3d42dc2c2fc8c4708a740fdb26d1d7abea4ac13a5a595f35117f0d0b\": rpc error: code = NotFound desc = could not find container \"c20582ab3d42dc2c2fc8c4708a740fdb26d1d7abea4ac13a5a595f35117f0d0b\": container with ID starting with c20582ab3d42dc2c2fc8c4708a740fdb26d1d7abea4ac13a5a595f35117f0d0b not found: ID does not exist" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.531859 4558 scope.go:117] "RemoveContainer" containerID="931107b5dd291893e36bfee944ab2879ec15efdaafdf4d8d3e976539e9c14eab" Jan 20 17:53:07 crc kubenswrapper[4558]: E0120 17:53:07.532188 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"931107b5dd291893e36bfee944ab2879ec15efdaafdf4d8d3e976539e9c14eab\": container with ID starting with 931107b5dd291893e36bfee944ab2879ec15efdaafdf4d8d3e976539e9c14eab not found: ID does not exist" containerID="931107b5dd291893e36bfee944ab2879ec15efdaafdf4d8d3e976539e9c14eab" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.532210 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"931107b5dd291893e36bfee944ab2879ec15efdaafdf4d8d3e976539e9c14eab"} err="failed to get container status \"931107b5dd291893e36bfee944ab2879ec15efdaafdf4d8d3e976539e9c14eab\": rpc error: code = NotFound desc = could not find container \"931107b5dd291893e36bfee944ab2879ec15efdaafdf4d8d3e976539e9c14eab\": container with ID starting with 931107b5dd291893e36bfee944ab2879ec15efdaafdf4d8d3e976539e9c14eab not found: ID does not exist" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.532227 4558 scope.go:117] "RemoveContainer" containerID="49e55db6bbf8e6b2aba631a3aec4df47c74f2b76c8c583fcb5c80461300cdc1b" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.539669 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7d535532-403f-4d55-8138-7e09287d0108-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7d535532-403f-4d55-8138-7e09287d0108" (UID: "7d535532-403f-4d55-8138-7e09287d0108"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.545378 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7d535532-403f-4d55-8138-7e09287d0108-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "7d535532-403f-4d55-8138-7e09287d0108" (UID: "7d535532-403f-4d55-8138-7e09287d0108"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.562296 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7d535532-403f-4d55-8138-7e09287d0108-config-data" (OuterVolumeSpecName: "config-data") pod "7d535532-403f-4d55-8138-7e09287d0108" (UID: "7d535532-403f-4d55-8138-7e09287d0108"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.612706 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-994rd\" (UniqueName: \"kubernetes.io/projected/64e30e89-cc14-4c89-bd1b-5702bfba717c-kube-api-access-994rd\") pod \"64e30e89-cc14-4c89-bd1b-5702bfba717c\" (UID: \"64e30e89-cc14-4c89-bd1b-5702bfba717c\") " Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.612752 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64e30e89-cc14-4c89-bd1b-5702bfba717c-combined-ca-bundle\") pod \"64e30e89-cc14-4c89-bd1b-5702bfba717c\" (UID: \"64e30e89-cc14-4c89-bd1b-5702bfba717c\") " Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.612815 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/64e30e89-cc14-4c89-bd1b-5702bfba717c-public-tls-certs\") pod \"64e30e89-cc14-4c89-bd1b-5702bfba717c\" (UID: \"64e30e89-cc14-4c89-bd1b-5702bfba717c\") " Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.612848 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/64e30e89-cc14-4c89-bd1b-5702bfba717c-logs\") pod \"64e30e89-cc14-4c89-bd1b-5702bfba717c\" (UID: \"64e30e89-cc14-4c89-bd1b-5702bfba717c\") " Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.612919 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/64e30e89-cc14-4c89-bd1b-5702bfba717c-config-data\") pod \"64e30e89-cc14-4c89-bd1b-5702bfba717c\" (UID: \"64e30e89-cc14-4c89-bd1b-5702bfba717c\") " Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.612982 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/64e30e89-cc14-4c89-bd1b-5702bfba717c-internal-tls-certs\") pod \"64e30e89-cc14-4c89-bd1b-5702bfba717c\" (UID: \"64e30e89-cc14-4c89-bd1b-5702bfba717c\") " Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.613598 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/64e30e89-cc14-4c89-bd1b-5702bfba717c-logs" (OuterVolumeSpecName: "logs") pod "64e30e89-cc14-4c89-bd1b-5702bfba717c" (UID: "64e30e89-cc14-4c89-bd1b-5702bfba717c"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.613647 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k7t8d\" (UniqueName: \"kubernetes.io/projected/7d535532-403f-4d55-8138-7e09287d0108-kube-api-access-k7t8d\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.613775 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7d535532-403f-4d55-8138-7e09287d0108-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.613831 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7d535532-403f-4d55-8138-7e09287d0108-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.613901 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7d535532-403f-4d55-8138-7e09287d0108-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.613991 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" " Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.614195 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7d535532-403f-4d55-8138-7e09287d0108-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.616890 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/64e30e89-cc14-4c89-bd1b-5702bfba717c-kube-api-access-994rd" (OuterVolumeSpecName: "kube-api-access-994rd") pod "64e30e89-cc14-4c89-bd1b-5702bfba717c" (UID: "64e30e89-cc14-4c89-bd1b-5702bfba717c"). InnerVolumeSpecName "kube-api-access-994rd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.625159 4558 scope.go:117] "RemoveContainer" containerID="5e481f5bc5fbafdbeaf23fc31d695f949c1cb9231a43da689c3c8047fcf0e3bf" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.629251 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-bxrps" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.641189 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/memcached-0"] Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.645145 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage09-crc" (UniqueName: "kubernetes.io/local-volume/local-storage09-crc") on node "crc" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.646611 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/64e30e89-cc14-4c89-bd1b-5702bfba717c-config-data" (OuterVolumeSpecName: "config-data") pod "64e30e89-cc14-4c89-bd1b-5702bfba717c" (UID: "64e30e89-cc14-4c89-bd1b-5702bfba717c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.655814 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/memcached-0"] Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.657248 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/64e30e89-cc14-4c89-bd1b-5702bfba717c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "64e30e89-cc14-4c89-bd1b-5702bfba717c" (UID: "64e30e89-cc14-4c89-bd1b-5702bfba717c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.663141 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.667994 4558 scope.go:117] "RemoveContainer" containerID="49e55db6bbf8e6b2aba631a3aec4df47c74f2b76c8c583fcb5c80461300cdc1b" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.670262 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:53:07 crc kubenswrapper[4558]: E0120 17:53:07.671726 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"49e55db6bbf8e6b2aba631a3aec4df47c74f2b76c8c583fcb5c80461300cdc1b\": container with ID starting with 49e55db6bbf8e6b2aba631a3aec4df47c74f2b76c8c583fcb5c80461300cdc1b not found: ID does not exist" containerID="49e55db6bbf8e6b2aba631a3aec4df47c74f2b76c8c583fcb5c80461300cdc1b" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.671772 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"49e55db6bbf8e6b2aba631a3aec4df47c74f2b76c8c583fcb5c80461300cdc1b"} err="failed to get container status \"49e55db6bbf8e6b2aba631a3aec4df47c74f2b76c8c583fcb5c80461300cdc1b\": rpc error: code = NotFound desc = could not find container \"49e55db6bbf8e6b2aba631a3aec4df47c74f2b76c8c583fcb5c80461300cdc1b\": container with ID starting with 49e55db6bbf8e6b2aba631a3aec4df47c74f2b76c8c583fcb5c80461300cdc1b not found: ID does not exist" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.671801 4558 scope.go:117] "RemoveContainer" containerID="5e481f5bc5fbafdbeaf23fc31d695f949c1cb9231a43da689c3c8047fcf0e3bf" Jan 20 17:53:07 crc kubenswrapper[4558]: E0120 17:53:07.672115 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5e481f5bc5fbafdbeaf23fc31d695f949c1cb9231a43da689c3c8047fcf0e3bf\": container with ID starting with 5e481f5bc5fbafdbeaf23fc31d695f949c1cb9231a43da689c3c8047fcf0e3bf not found: ID does not exist" containerID="5e481f5bc5fbafdbeaf23fc31d695f949c1cb9231a43da689c3c8047fcf0e3bf" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.672151 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5e481f5bc5fbafdbeaf23fc31d695f949c1cb9231a43da689c3c8047fcf0e3bf"} err="failed to get container status \"5e481f5bc5fbafdbeaf23fc31d695f949c1cb9231a43da689c3c8047fcf0e3bf\": rpc error: code = NotFound desc = could not find container \"5e481f5bc5fbafdbeaf23fc31d695f949c1cb9231a43da689c3c8047fcf0e3bf\": container with ID starting with 5e481f5bc5fbafdbeaf23fc31d695f949c1cb9231a43da689c3c8047fcf0e3bf not found: ID does not exist" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.672193 4558 scope.go:117] "RemoveContainer" containerID="d68e5d5426e93fd2a2fa4b8833690231b1a3151adb135d0a227b1d81dba55d3c" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.673367 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/64e30e89-cc14-4c89-bd1b-5702bfba717c-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "64e30e89-cc14-4c89-bd1b-5702bfba717c" (UID: "64e30e89-cc14-4c89-bd1b-5702bfba717c"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.677889 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/64e30e89-cc14-4c89-bd1b-5702bfba717c-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "64e30e89-cc14-4c89-bd1b-5702bfba717c" (UID: "64e30e89-cc14-4c89-bd1b-5702bfba717c"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.678682 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-f275-account-create-update-5pb7k"] Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.683298 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-api-f275-account-create-update-5pb7k"] Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.692868 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-a1e2-account-create-update-hfpfh"] Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.696249 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/neutron-a1e2-account-create-update-hfpfh"] Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.708779 4558 scope.go:117] "RemoveContainer" containerID="519c148165b8e5d8031cd18cf91e1dcfa134e4235207cac41f7dc13725bac816" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.712360 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-db47-account-create-update-wfpfn"] Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.715355 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8101ebb9-ace1-4242-8c8b-698307c6be29-operator-scripts\") pod \"8101ebb9-ace1-4242-8c8b-698307c6be29\" (UID: \"8101ebb9-ace1-4242-8c8b-698307c6be29\") " Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.715396 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9jwrh\" (UniqueName: \"kubernetes.io/projected/8101ebb9-ace1-4242-8c8b-698307c6be29-kube-api-access-9jwrh\") pod \"8101ebb9-ace1-4242-8c8b-698307c6be29\" (UID: \"8101ebb9-ace1-4242-8c8b-698307c6be29\") " Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.717347 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8101ebb9-ace1-4242-8c8b-698307c6be29-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "8101ebb9-ace1-4242-8c8b-698307c6be29" (UID: "8101ebb9-ace1-4242-8c8b-698307c6be29"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.718960 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.718986 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/64e30e89-cc14-4c89-bd1b-5702bfba717c-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.719001 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/64e30e89-cc14-4c89-bd1b-5702bfba717c-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.719011 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8101ebb9-ace1-4242-8c8b-698307c6be29-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.719020 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-994rd\" (UniqueName: \"kubernetes.io/projected/64e30e89-cc14-4c89-bd1b-5702bfba717c-kube-api-access-994rd\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.719040 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64e30e89-cc14-4c89-bd1b-5702bfba717c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.719049 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/64e30e89-cc14-4c89-bd1b-5702bfba717c-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.719059 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/64e30e89-cc14-4c89-bd1b-5702bfba717c-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.723543 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8101ebb9-ace1-4242-8c8b-698307c6be29-kube-api-access-9jwrh" (OuterVolumeSpecName: "kube-api-access-9jwrh") pod "8101ebb9-ace1-4242-8c8b-698307c6be29" (UID: "8101ebb9-ace1-4242-8c8b-698307c6be29"). InnerVolumeSpecName "kube-api-access-9jwrh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.733468 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/keystone-db47-account-create-update-wfpfn"] Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.747994 4558 scope.go:117] "RemoveContainer" containerID="d68e5d5426e93fd2a2fa4b8833690231b1a3151adb135d0a227b1d81dba55d3c" Jan 20 17:53:07 crc kubenswrapper[4558]: E0120 17:53:07.749281 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d68e5d5426e93fd2a2fa4b8833690231b1a3151adb135d0a227b1d81dba55d3c\": container with ID starting with d68e5d5426e93fd2a2fa4b8833690231b1a3151adb135d0a227b1d81dba55d3c not found: ID does not exist" containerID="d68e5d5426e93fd2a2fa4b8833690231b1a3151adb135d0a227b1d81dba55d3c" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.749329 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d68e5d5426e93fd2a2fa4b8833690231b1a3151adb135d0a227b1d81dba55d3c"} err="failed to get container status \"d68e5d5426e93fd2a2fa4b8833690231b1a3151adb135d0a227b1d81dba55d3c\": rpc error: code = NotFound desc = could not find container \"d68e5d5426e93fd2a2fa4b8833690231b1a3151adb135d0a227b1d81dba55d3c\": container with ID starting with d68e5d5426e93fd2a2fa4b8833690231b1a3151adb135d0a227b1d81dba55d3c not found: ID does not exist" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.749362 4558 scope.go:117] "RemoveContainer" containerID="519c148165b8e5d8031cd18cf91e1dcfa134e4235207cac41f7dc13725bac816" Jan 20 17:53:07 crc kubenswrapper[4558]: E0120 17:53:07.753699 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"519c148165b8e5d8031cd18cf91e1dcfa134e4235207cac41f7dc13725bac816\": container with ID starting with 519c148165b8e5d8031cd18cf91e1dcfa134e4235207cac41f7dc13725bac816 not found: ID does not exist" containerID="519c148165b8e5d8031cd18cf91e1dcfa134e4235207cac41f7dc13725bac816" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.753753 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"519c148165b8e5d8031cd18cf91e1dcfa134e4235207cac41f7dc13725bac816"} err="failed to get container status \"519c148165b8e5d8031cd18cf91e1dcfa134e4235207cac41f7dc13725bac816\": rpc error: code = NotFound desc = could not find container \"519c148165b8e5d8031cd18cf91e1dcfa134e4235207cac41f7dc13725bac816\": container with ID starting with 519c148165b8e5d8031cd18cf91e1dcfa134e4235207cac41f7dc13725bac816 not found: ID does not exist" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.753781 4558 scope.go:117] "RemoveContainer" containerID="142baadd97db60a56cd4c8eefd272ce6d8e94faf94a19e872d0adb86b7b6376a" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.780837 4558 scope.go:117] "RemoveContainer" containerID="eedccb8254c4f03083611f17fbaf5f47532ace4c2ec8b61ac20041dbf695b528" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.781002 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell0-a0eb-account-create-update-75hkz"] Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.799384 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell0-a0eb-account-create-update-75hkz"] Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.803698 4558 scope.go:117] "RemoveContainer" containerID="142baadd97db60a56cd4c8eefd272ce6d8e94faf94a19e872d0adb86b7b6376a" Jan 20 17:53:07 crc kubenswrapper[4558]: E0120 17:53:07.804130 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"142baadd97db60a56cd4c8eefd272ce6d8e94faf94a19e872d0adb86b7b6376a\": container with ID starting with 142baadd97db60a56cd4c8eefd272ce6d8e94faf94a19e872d0adb86b7b6376a not found: ID does not exist" containerID="142baadd97db60a56cd4c8eefd272ce6d8e94faf94a19e872d0adb86b7b6376a" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.804181 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"142baadd97db60a56cd4c8eefd272ce6d8e94faf94a19e872d0adb86b7b6376a"} err="failed to get container status \"142baadd97db60a56cd4c8eefd272ce6d8e94faf94a19e872d0adb86b7b6376a\": rpc error: code = NotFound desc = could not find container \"142baadd97db60a56cd4c8eefd272ce6d8e94faf94a19e872d0adb86b7b6376a\": container with ID starting with 142baadd97db60a56cd4c8eefd272ce6d8e94faf94a19e872d0adb86b7b6376a not found: ID does not exist" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.804210 4558 scope.go:117] "RemoveContainer" containerID="eedccb8254c4f03083611f17fbaf5f47532ace4c2ec8b61ac20041dbf695b528" Jan 20 17:53:07 crc kubenswrapper[4558]: E0120 17:53:07.804979 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eedccb8254c4f03083611f17fbaf5f47532ace4c2ec8b61ac20041dbf695b528\": container with ID starting with eedccb8254c4f03083611f17fbaf5f47532ace4c2ec8b61ac20041dbf695b528 not found: ID does not exist" containerID="eedccb8254c4f03083611f17fbaf5f47532ace4c2ec8b61ac20041dbf695b528" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.805002 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eedccb8254c4f03083611f17fbaf5f47532ace4c2ec8b61ac20041dbf695b528"} err="failed to get container status \"eedccb8254c4f03083611f17fbaf5f47532ace4c2ec8b61ac20041dbf695b528\": rpc error: code = NotFound desc = could not find container \"eedccb8254c4f03083611f17fbaf5f47532ace4c2ec8b61ac20041dbf695b528\": container with ID starting with eedccb8254c4f03083611f17fbaf5f47532ace4c2ec8b61ac20041dbf695b528 not found: ID does not exist" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.808275 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-d1d9-account-create-update-g54ff"] Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.826672 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell1-d1d9-account-create-update-g54ff"] Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.827724 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/openstack-cell1-galera-0"] Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.828290 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a335ddc5-7bf4-4c3e-83a8-175921bbecf4-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.828315 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fxcqr\" (UniqueName: \"kubernetes.io/projected/a335ddc5-7bf4-4c3e-83a8-175921bbecf4-kube-api-access-fxcqr\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.828337 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9jwrh\" (UniqueName: \"kubernetes.io/projected/8101ebb9-ace1-4242-8c8b-698307c6be29-kube-api-access-9jwrh\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.835859 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/openstack-cell1-galera-0"] Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.841059 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/placement-658f45b9f4-tx296"] Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.845298 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/placement-658f45b9f4-tx296"] Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.851091 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/openstack-galera-0"] Jan 20 17:53:07 crc kubenswrapper[4558]: I0120 17:53:07.854721 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/openstack-galera-0"] Jan 20 17:53:08 crc kubenswrapper[4558]: I0120 17:53:08.196497 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-5bfc7dd8b8-pxpm9" Jan 20 17:53:08 crc kubenswrapper[4558]: I0120 17:53:08.199697 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"7d535532-403f-4d55-8138-7e09287d0108","Type":"ContainerDied","Data":"8d04c26962083a326120f59b8588c7e99446de909dbf820fe5f993ff533b1b74"} Jan 20 17:53:08 crc kubenswrapper[4558]: I0120 17:53:08.199766 4558 scope.go:117] "RemoveContainer" containerID="6d020a191147c34f2712820f7dd8475cc0193d6eeb83f724303b3f67c115e13e" Jan 20 17:53:08 crc kubenswrapper[4558]: I0120 17:53:08.199782 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:53:08 crc kubenswrapper[4558]: I0120 17:53:08.202695 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/root-account-create-update-bxrps" event={"ID":"8101ebb9-ace1-4242-8c8b-698307c6be29","Type":"ContainerDied","Data":"ace592302f09e2dd806f0b4ee4d154b4f2ff126b8edd992d3100f207347780a3"} Jan 20 17:53:08 crc kubenswrapper[4558]: I0120 17:53:08.202871 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-bxrps" Jan 20 17:53:08 crc kubenswrapper[4558]: I0120 17:53:08.212335 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:53:08 crc kubenswrapper[4558]: I0120 17:53:08.213214 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"64e30e89-cc14-4c89-bd1b-5702bfba717c","Type":"ContainerDied","Data":"6cf31e786d84d421b05166a4f94e958e8db45bdaf08a5455f8433627e82c0c35"} Jan 20 17:53:08 crc kubenswrapper[4558]: I0120 17:53:08.247643 4558 scope.go:117] "RemoveContainer" containerID="f6b470a99751a2192a7c08c8ea1e3240681879e8b22e9a2638b977811313762b" Jan 20 17:53:08 crc kubenswrapper[4558]: I0120 17:53:08.260290 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:53:08 crc kubenswrapper[4558]: I0120 17:53:08.271155 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:53:08 crc kubenswrapper[4558]: I0120 17:53:08.278836 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-api-5bfc7dd8b8-pxpm9"] Jan 20 17:53:08 crc kubenswrapper[4558]: I0120 17:53:08.282626 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/barbican-api-5bfc7dd8b8-pxpm9"] Jan 20 17:53:08 crc kubenswrapper[4558]: I0120 17:53:08.282782 4558 scope.go:117] "RemoveContainer" containerID="f74190a5462e78d2ad25c03116d8718fb4477617af58b9c4c6c750b5d4e94b80" Jan 20 17:53:08 crc kubenswrapper[4558]: I0120 17:53:08.288585 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:53:08 crc kubenswrapper[4558]: I0120 17:53:08.292257 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:53:08 crc kubenswrapper[4558]: I0120 17:53:08.296659 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-bxrps"] Jan 20 17:53:08 crc kubenswrapper[4558]: I0120 17:53:08.300039 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-bxrps"] Jan 20 17:53:08 crc kubenswrapper[4558]: I0120 17:53:08.411489 4558 scope.go:117] "RemoveContainer" containerID="5a3ae52e8d382a0ba3e16ca78acf47ccaf862a7eab92320268ff87ae7158b25c" Jan 20 17:53:08 crc kubenswrapper[4558]: I0120 17:53:08.440223 4558 scope.go:117] "RemoveContainer" containerID="e0d25530c2349a51795cf3ce00e8ae91ba3476254b412bf92d6e0c208e2c53dc" Jan 20 17:53:08 crc kubenswrapper[4558]: I0120 17:53:08.576436 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01f39889-c29f-4e28-98fa-bc99322f761c" path="/var/lib/kubelet/pods/01f39889-c29f-4e28-98fa-bc99322f761c/volumes" Jan 20 17:53:08 crc kubenswrapper[4558]: I0120 17:53:08.577153 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="05b3b028-51e2-49a9-9fb8-a10c096f3b27" path="/var/lib/kubelet/pods/05b3b028-51e2-49a9-9fb8-a10c096f3b27/volumes" Jan 20 17:53:08 crc kubenswrapper[4558]: I0120 17:53:08.577787 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0c9fc067-cb23-4f72-b928-5447fb5182c1" path="/var/lib/kubelet/pods/0c9fc067-cb23-4f72-b928-5447fb5182c1/volumes" Jan 20 17:53:08 crc kubenswrapper[4558]: I0120 17:53:08.579178 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3eabae3c-0014-4d8e-9393-7cb934ba9fa4" path="/var/lib/kubelet/pods/3eabae3c-0014-4d8e-9393-7cb934ba9fa4/volumes" Jan 20 17:53:08 crc kubenswrapper[4558]: I0120 17:53:08.579808 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5eac3a8d-ee82-4e6f-bdb9-fccb82e09496" path="/var/lib/kubelet/pods/5eac3a8d-ee82-4e6f-bdb9-fccb82e09496/volumes" Jan 20 17:53:08 crc kubenswrapper[4558]: I0120 17:53:08.580538 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="64e30e89-cc14-4c89-bd1b-5702bfba717c" path="/var/lib/kubelet/pods/64e30e89-cc14-4c89-bd1b-5702bfba717c/volumes" Jan 20 17:53:08 crc kubenswrapper[4558]: I0120 17:53:08.581585 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6d80c5b5-880f-4d59-9287-334023ec6ce9" path="/var/lib/kubelet/pods/6d80c5b5-880f-4d59-9287-334023ec6ce9/volumes" Jan 20 17:53:08 crc kubenswrapper[4558]: I0120 17:53:08.581985 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6e7de5b3-4426-4816-9d45-9b4226333dbc" path="/var/lib/kubelet/pods/6e7de5b3-4426-4816-9d45-9b4226333dbc/volumes" Jan 20 17:53:08 crc kubenswrapper[4558]: I0120 17:53:08.582549 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="799d2689-2fcd-4bc5-ae70-1d0ae1194c76" path="/var/lib/kubelet/pods/799d2689-2fcd-4bc5-ae70-1d0ae1194c76/volumes" Jan 20 17:53:08 crc kubenswrapper[4558]: I0120 17:53:08.582964 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7d535532-403f-4d55-8138-7e09287d0108" path="/var/lib/kubelet/pods/7d535532-403f-4d55-8138-7e09287d0108/volumes" Jan 20 17:53:08 crc kubenswrapper[4558]: I0120 17:53:08.583965 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8101ebb9-ace1-4242-8c8b-698307c6be29" path="/var/lib/kubelet/pods/8101ebb9-ace1-4242-8c8b-698307c6be29/volumes" Jan 20 17:53:08 crc kubenswrapper[4558]: I0120 17:53:08.584576 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a18e9b68-4683-415e-833e-5b363e31d461" path="/var/lib/kubelet/pods/a18e9b68-4683-415e-833e-5b363e31d461/volumes" Jan 20 17:53:08 crc kubenswrapper[4558]: I0120 17:53:08.585008 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a335ddc5-7bf4-4c3e-83a8-175921bbecf4" path="/var/lib/kubelet/pods/a335ddc5-7bf4-4c3e-83a8-175921bbecf4/volumes" Jan 20 17:53:08 crc kubenswrapper[4558]: I0120 17:53:08.585790 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a905b3ac-e1b2-49f8-8034-912138028aca" path="/var/lib/kubelet/pods/a905b3ac-e1b2-49f8-8034-912138028aca/volumes" Jan 20 17:53:08 crc kubenswrapper[4558]: I0120 17:53:08.586330 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e638a157-4234-4339-80ef-65d818d39b73" path="/var/lib/kubelet/pods/e638a157-4234-4339-80ef-65d818d39b73/volumes" Jan 20 17:53:08 crc kubenswrapper[4558]: I0120 17:53:08.586986 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f30aa605-d974-4d0d-aa96-5cb0d96897d6" path="/var/lib/kubelet/pods/f30aa605-d974-4d0d-aa96-5cb0d96897d6/volumes" Jan 20 17:53:08 crc kubenswrapper[4558]: I0120 17:53:08.622617 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-667f6b846-wn5fk" Jan 20 17:53:08 crc kubenswrapper[4558]: E0120 17:53:08.642215 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Jan 20 17:53:08 crc kubenswrapper[4558]: E0120 17:53:08.642315 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/c4a11c40-a157-4a28-b1a3-60c211d1d0bf-config-data podName:c4a11c40-a157-4a28-b1a3-60c211d1d0bf nodeName:}" failed. No retries permitted until 2026-01-20 17:53:16.642293439 +0000 UTC m=+4290.402631396 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/c4a11c40-a157-4a28-b1a3-60c211d1d0bf-config-data") pod "rabbitmq-cell1-server-0" (UID: "c4a11c40-a157-4a28-b1a3-60c211d1d0bf") : configmap "rabbitmq-cell1-config-data" not found Jan 20 17:53:08 crc kubenswrapper[4558]: I0120 17:53:08.743356 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/7db8891a-284a-47f5-b883-5aac563fc839-credential-keys\") pod \"7db8891a-284a-47f5-b883-5aac563fc839\" (UID: \"7db8891a-284a-47f5-b883-5aac563fc839\") " Jan 20 17:53:08 crc kubenswrapper[4558]: I0120 17:53:08.743471 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pp8gx\" (UniqueName: \"kubernetes.io/projected/7db8891a-284a-47f5-b883-5aac563fc839-kube-api-access-pp8gx\") pod \"7db8891a-284a-47f5-b883-5aac563fc839\" (UID: \"7db8891a-284a-47f5-b883-5aac563fc839\") " Jan 20 17:53:08 crc kubenswrapper[4558]: I0120 17:53:08.743514 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7db8891a-284a-47f5-b883-5aac563fc839-scripts\") pod \"7db8891a-284a-47f5-b883-5aac563fc839\" (UID: \"7db8891a-284a-47f5-b883-5aac563fc839\") " Jan 20 17:53:08 crc kubenswrapper[4558]: I0120 17:53:08.743541 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7db8891a-284a-47f5-b883-5aac563fc839-config-data\") pod \"7db8891a-284a-47f5-b883-5aac563fc839\" (UID: \"7db8891a-284a-47f5-b883-5aac563fc839\") " Jan 20 17:53:08 crc kubenswrapper[4558]: I0120 17:53:08.743562 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/7db8891a-284a-47f5-b883-5aac563fc839-fernet-keys\") pod \"7db8891a-284a-47f5-b883-5aac563fc839\" (UID: \"7db8891a-284a-47f5-b883-5aac563fc839\") " Jan 20 17:53:08 crc kubenswrapper[4558]: I0120 17:53:08.743588 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7db8891a-284a-47f5-b883-5aac563fc839-public-tls-certs\") pod \"7db8891a-284a-47f5-b883-5aac563fc839\" (UID: \"7db8891a-284a-47f5-b883-5aac563fc839\") " Jan 20 17:53:08 crc kubenswrapper[4558]: I0120 17:53:08.743610 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7db8891a-284a-47f5-b883-5aac563fc839-combined-ca-bundle\") pod \"7db8891a-284a-47f5-b883-5aac563fc839\" (UID: \"7db8891a-284a-47f5-b883-5aac563fc839\") " Jan 20 17:53:08 crc kubenswrapper[4558]: I0120 17:53:08.744266 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7db8891a-284a-47f5-b883-5aac563fc839-internal-tls-certs\") pod \"7db8891a-284a-47f5-b883-5aac563fc839\" (UID: \"7db8891a-284a-47f5-b883-5aac563fc839\") " Jan 20 17:53:08 crc kubenswrapper[4558]: I0120 17:53:08.750058 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7db8891a-284a-47f5-b883-5aac563fc839-scripts" (OuterVolumeSpecName: "scripts") pod "7db8891a-284a-47f5-b883-5aac563fc839" (UID: "7db8891a-284a-47f5-b883-5aac563fc839"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:53:08 crc kubenswrapper[4558]: I0120 17:53:08.750113 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7db8891a-284a-47f5-b883-5aac563fc839-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "7db8891a-284a-47f5-b883-5aac563fc839" (UID: "7db8891a-284a-47f5-b883-5aac563fc839"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:53:08 crc kubenswrapper[4558]: I0120 17:53:08.750517 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7db8891a-284a-47f5-b883-5aac563fc839-kube-api-access-pp8gx" (OuterVolumeSpecName: "kube-api-access-pp8gx") pod "7db8891a-284a-47f5-b883-5aac563fc839" (UID: "7db8891a-284a-47f5-b883-5aac563fc839"). InnerVolumeSpecName "kube-api-access-pp8gx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:53:08 crc kubenswrapper[4558]: I0120 17:53:08.751250 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7db8891a-284a-47f5-b883-5aac563fc839-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "7db8891a-284a-47f5-b883-5aac563fc839" (UID: "7db8891a-284a-47f5-b883-5aac563fc839"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:53:08 crc kubenswrapper[4558]: I0120 17:53:08.775227 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7db8891a-284a-47f5-b883-5aac563fc839-config-data" (OuterVolumeSpecName: "config-data") pod "7db8891a-284a-47f5-b883-5aac563fc839" (UID: "7db8891a-284a-47f5-b883-5aac563fc839"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:53:08 crc kubenswrapper[4558]: I0120 17:53:08.777794 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7db8891a-284a-47f5-b883-5aac563fc839-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7db8891a-284a-47f5-b883-5aac563fc839" (UID: "7db8891a-284a-47f5-b883-5aac563fc839"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:53:08 crc kubenswrapper[4558]: I0120 17:53:08.783003 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7db8891a-284a-47f5-b883-5aac563fc839-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "7db8891a-284a-47f5-b883-5aac563fc839" (UID: "7db8891a-284a-47f5-b883-5aac563fc839"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:53:08 crc kubenswrapper[4558]: I0120 17:53:08.785586 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7db8891a-284a-47f5-b883-5aac563fc839-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "7db8891a-284a-47f5-b883-5aac563fc839" (UID: "7db8891a-284a-47f5-b883-5aac563fc839"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:53:08 crc kubenswrapper[4558]: I0120 17:53:08.845452 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pp8gx\" (UniqueName: \"kubernetes.io/projected/7db8891a-284a-47f5-b883-5aac563fc839-kube-api-access-pp8gx\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:08 crc kubenswrapper[4558]: I0120 17:53:08.845545 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7db8891a-284a-47f5-b883-5aac563fc839-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:08 crc kubenswrapper[4558]: I0120 17:53:08.845604 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7db8891a-284a-47f5-b883-5aac563fc839-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:08 crc kubenswrapper[4558]: I0120 17:53:08.845670 4558 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/7db8891a-284a-47f5-b883-5aac563fc839-fernet-keys\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:08 crc kubenswrapper[4558]: I0120 17:53:08.845732 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7db8891a-284a-47f5-b883-5aac563fc839-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:08 crc kubenswrapper[4558]: I0120 17:53:08.845784 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7db8891a-284a-47f5-b883-5aac563fc839-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:08 crc kubenswrapper[4558]: I0120 17:53:08.845831 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7db8891a-284a-47f5-b883-5aac563fc839-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:08 crc kubenswrapper[4558]: I0120 17:53:08.845880 4558 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/7db8891a-284a-47f5-b883-5aac563fc839-credential-keys\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:09 crc kubenswrapper[4558]: I0120 17:53:09.226299 4558 generic.go:334] "Generic (PLEG): container finished" podID="7db8891a-284a-47f5-b883-5aac563fc839" containerID="d6f9a9e764e6371a2dc8fd22991e16caacd06318d66716c401edaedf85f6c0d7" exitCode=0 Jan 20 17:53:09 crc kubenswrapper[4558]: I0120 17:53:09.227098 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-667f6b846-wn5fk" Jan 20 17:53:09 crc kubenswrapper[4558]: I0120 17:53:09.234341 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-667f6b846-wn5fk" event={"ID":"7db8891a-284a-47f5-b883-5aac563fc839","Type":"ContainerDied","Data":"d6f9a9e764e6371a2dc8fd22991e16caacd06318d66716c401edaedf85f6c0d7"} Jan 20 17:53:09 crc kubenswrapper[4558]: I0120 17:53:09.234376 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-667f6b846-wn5fk" event={"ID":"7db8891a-284a-47f5-b883-5aac563fc839","Type":"ContainerDied","Data":"96cf3ec354af83911559cda738591158baa9fb6d63952c3f99c67539a15a1ea7"} Jan 20 17:53:09 crc kubenswrapper[4558]: I0120 17:53:09.234399 4558 scope.go:117] "RemoveContainer" containerID="d6f9a9e764e6371a2dc8fd22991e16caacd06318d66716c401edaedf85f6c0d7" Jan 20 17:53:09 crc kubenswrapper[4558]: I0120 17:53:09.264359 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-667f6b846-wn5fk"] Jan 20 17:53:09 crc kubenswrapper[4558]: I0120 17:53:09.267339 4558 scope.go:117] "RemoveContainer" containerID="d6f9a9e764e6371a2dc8fd22991e16caacd06318d66716c401edaedf85f6c0d7" Jan 20 17:53:09 crc kubenswrapper[4558]: E0120 17:53:09.267692 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d6f9a9e764e6371a2dc8fd22991e16caacd06318d66716c401edaedf85f6c0d7\": container with ID starting with d6f9a9e764e6371a2dc8fd22991e16caacd06318d66716c401edaedf85f6c0d7 not found: ID does not exist" containerID="d6f9a9e764e6371a2dc8fd22991e16caacd06318d66716c401edaedf85f6c0d7" Jan 20 17:53:09 crc kubenswrapper[4558]: I0120 17:53:09.267742 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d6f9a9e764e6371a2dc8fd22991e16caacd06318d66716c401edaedf85f6c0d7"} err="failed to get container status \"d6f9a9e764e6371a2dc8fd22991e16caacd06318d66716c401edaedf85f6c0d7\": rpc error: code = NotFound desc = could not find container \"d6f9a9e764e6371a2dc8fd22991e16caacd06318d66716c401edaedf85f6c0d7\": container with ID starting with d6f9a9e764e6371a2dc8fd22991e16caacd06318d66716c401edaedf85f6c0d7 not found: ID does not exist" Jan 20 17:53:09 crc kubenswrapper[4558]: I0120 17:53:09.271114 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/keystone-667f6b846-wn5fk"] Jan 20 17:53:09 crc kubenswrapper[4558]: E0120 17:53:09.318941 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d99379340565779627596cecb15d625f9bcb83d751ea4f5f14fb7cb8bcb8a6b6" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:53:09 crc kubenswrapper[4558]: E0120 17:53:09.320294 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d99379340565779627596cecb15d625f9bcb83d751ea4f5f14fb7cb8bcb8a6b6" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:53:09 crc kubenswrapper[4558]: E0120 17:53:09.321639 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d99379340565779627596cecb15d625f9bcb83d751ea4f5f14fb7cb8bcb8a6b6" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:53:09 crc kubenswrapper[4558]: E0120 17:53:09.321674 4558 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack-kuttl-tests/nova-cell0-conductor-0" podUID="924fa7ce-8d60-4b2f-b62b-d5e146474f71" containerName="nova-cell0-conductor-conductor" Jan 20 17:53:09 crc kubenswrapper[4558]: E0120 17:53:09.357669 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Jan 20 17:53:09 crc kubenswrapper[4558]: E0120 17:53:09.357743 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/088e44cc-3515-4736-aa46-721774902209-config-data podName:088e44cc-3515-4736-aa46-721774902209 nodeName:}" failed. No retries permitted until 2026-01-20 17:53:17.357720642 +0000 UTC m=+4291.118058609 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/088e44cc-3515-4736-aa46-721774902209-config-data") pod "rabbitmq-server-0" (UID: "088e44cc-3515-4736-aa46-721774902209") : configmap "rabbitmq-config-data" not found Jan 20 17:53:09 crc kubenswrapper[4558]: I0120 17:53:09.566511 4558 scope.go:117] "RemoveContainer" containerID="4ea7fa61d8b21007a044345acec89fcebe56c2921cf0f76ff2002f44bdc88ede" Jan 20 17:53:09 crc kubenswrapper[4558]: E0120 17:53:09.567282 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 17:53:09 crc kubenswrapper[4558]: I0120 17:53:09.853394 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:53:09 crc kubenswrapper[4558]: I0120 17:53:09.966195 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/c4a11c40-a157-4a28-b1a3-60c211d1d0bf-plugins-conf\") pod \"c4a11c40-a157-4a28-b1a3-60c211d1d0bf\" (UID: \"c4a11c40-a157-4a28-b1a3-60c211d1d0bf\") " Jan 20 17:53:09 crc kubenswrapper[4558]: I0120 17:53:09.966317 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"c4a11c40-a157-4a28-b1a3-60c211d1d0bf\" (UID: \"c4a11c40-a157-4a28-b1a3-60c211d1d0bf\") " Jan 20 17:53:09 crc kubenswrapper[4558]: I0120 17:53:09.966398 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/c4a11c40-a157-4a28-b1a3-60c211d1d0bf-rabbitmq-confd\") pod \"c4a11c40-a157-4a28-b1a3-60c211d1d0bf\" (UID: \"c4a11c40-a157-4a28-b1a3-60c211d1d0bf\") " Jan 20 17:53:09 crc kubenswrapper[4558]: I0120 17:53:09.966509 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c4a11c40-a157-4a28-b1a3-60c211d1d0bf-config-data\") pod \"c4a11c40-a157-4a28-b1a3-60c211d1d0bf\" (UID: \"c4a11c40-a157-4a28-b1a3-60c211d1d0bf\") " Jan 20 17:53:09 crc kubenswrapper[4558]: I0120 17:53:09.966835 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/c4a11c40-a157-4a28-b1a3-60c211d1d0bf-rabbitmq-tls\") pod \"c4a11c40-a157-4a28-b1a3-60c211d1d0bf\" (UID: \"c4a11c40-a157-4a28-b1a3-60c211d1d0bf\") " Jan 20 17:53:09 crc kubenswrapper[4558]: I0120 17:53:09.966960 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/c4a11c40-a157-4a28-b1a3-60c211d1d0bf-server-conf\") pod \"c4a11c40-a157-4a28-b1a3-60c211d1d0bf\" (UID: \"c4a11c40-a157-4a28-b1a3-60c211d1d0bf\") " Jan 20 17:53:09 crc kubenswrapper[4558]: I0120 17:53:09.967041 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/c4a11c40-a157-4a28-b1a3-60c211d1d0bf-rabbitmq-plugins\") pod \"c4a11c40-a157-4a28-b1a3-60c211d1d0bf\" (UID: \"c4a11c40-a157-4a28-b1a3-60c211d1d0bf\") " Jan 20 17:53:09 crc kubenswrapper[4558]: I0120 17:53:09.967106 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/c4a11c40-a157-4a28-b1a3-60c211d1d0bf-rabbitmq-erlang-cookie\") pod \"c4a11c40-a157-4a28-b1a3-60c211d1d0bf\" (UID: \"c4a11c40-a157-4a28-b1a3-60c211d1d0bf\") " Jan 20 17:53:09 crc kubenswrapper[4558]: I0120 17:53:09.967222 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/c4a11c40-a157-4a28-b1a3-60c211d1d0bf-pod-info\") pod \"c4a11c40-a157-4a28-b1a3-60c211d1d0bf\" (UID: \"c4a11c40-a157-4a28-b1a3-60c211d1d0bf\") " Jan 20 17:53:09 crc kubenswrapper[4558]: I0120 17:53:09.967309 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/c4a11c40-a157-4a28-b1a3-60c211d1d0bf-erlang-cookie-secret\") pod \"c4a11c40-a157-4a28-b1a3-60c211d1d0bf\" (UID: \"c4a11c40-a157-4a28-b1a3-60c211d1d0bf\") " Jan 20 17:53:09 crc kubenswrapper[4558]: I0120 17:53:09.967381 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dwg9p\" (UniqueName: \"kubernetes.io/projected/c4a11c40-a157-4a28-b1a3-60c211d1d0bf-kube-api-access-dwg9p\") pod \"c4a11c40-a157-4a28-b1a3-60c211d1d0bf\" (UID: \"c4a11c40-a157-4a28-b1a3-60c211d1d0bf\") " Jan 20 17:53:09 crc kubenswrapper[4558]: I0120 17:53:09.966951 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c4a11c40-a157-4a28-b1a3-60c211d1d0bf-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "c4a11c40-a157-4a28-b1a3-60c211d1d0bf" (UID: "c4a11c40-a157-4a28-b1a3-60c211d1d0bf"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:53:09 crc kubenswrapper[4558]: I0120 17:53:09.967630 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c4a11c40-a157-4a28-b1a3-60c211d1d0bf-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "c4a11c40-a157-4a28-b1a3-60c211d1d0bf" (UID: "c4a11c40-a157-4a28-b1a3-60c211d1d0bf"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:53:09 crc kubenswrapper[4558]: I0120 17:53:09.967812 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c4a11c40-a157-4a28-b1a3-60c211d1d0bf-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "c4a11c40-a157-4a28-b1a3-60c211d1d0bf" (UID: "c4a11c40-a157-4a28-b1a3-60c211d1d0bf"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:53:09 crc kubenswrapper[4558]: I0120 17:53:09.972039 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c4a11c40-a157-4a28-b1a3-60c211d1d0bf-kube-api-access-dwg9p" (OuterVolumeSpecName: "kube-api-access-dwg9p") pod "c4a11c40-a157-4a28-b1a3-60c211d1d0bf" (UID: "c4a11c40-a157-4a28-b1a3-60c211d1d0bf"). InnerVolumeSpecName "kube-api-access-dwg9p". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:53:09 crc kubenswrapper[4558]: I0120 17:53:09.972071 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/c4a11c40-a157-4a28-b1a3-60c211d1d0bf-pod-info" (OuterVolumeSpecName: "pod-info") pod "c4a11c40-a157-4a28-b1a3-60c211d1d0bf" (UID: "c4a11c40-a157-4a28-b1a3-60c211d1d0bf"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Jan 20 17:53:09 crc kubenswrapper[4558]: I0120 17:53:09.972569 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c4a11c40-a157-4a28-b1a3-60c211d1d0bf-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "c4a11c40-a157-4a28-b1a3-60c211d1d0bf" (UID: "c4a11c40-a157-4a28-b1a3-60c211d1d0bf"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:53:09 crc kubenswrapper[4558]: I0120 17:53:09.972622 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c4a11c40-a157-4a28-b1a3-60c211d1d0bf-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "c4a11c40-a157-4a28-b1a3-60c211d1d0bf" (UID: "c4a11c40-a157-4a28-b1a3-60c211d1d0bf"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:53:09 crc kubenswrapper[4558]: I0120 17:53:09.978279 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage06-crc" (OuterVolumeSpecName: "persistence") pod "c4a11c40-a157-4a28-b1a3-60c211d1d0bf" (UID: "c4a11c40-a157-4a28-b1a3-60c211d1d0bf"). InnerVolumeSpecName "local-storage06-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:53:09 crc kubenswrapper[4558]: I0120 17:53:09.986310 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c4a11c40-a157-4a28-b1a3-60c211d1d0bf-config-data" (OuterVolumeSpecName: "config-data") pod "c4a11c40-a157-4a28-b1a3-60c211d1d0bf" (UID: "c4a11c40-a157-4a28-b1a3-60c211d1d0bf"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:53:10 crc kubenswrapper[4558]: I0120 17:53:10.001730 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c4a11c40-a157-4a28-b1a3-60c211d1d0bf-server-conf" (OuterVolumeSpecName: "server-conf") pod "c4a11c40-a157-4a28-b1a3-60c211d1d0bf" (UID: "c4a11c40-a157-4a28-b1a3-60c211d1d0bf"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:53:10 crc kubenswrapper[4558]: I0120 17:53:10.023878 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c4a11c40-a157-4a28-b1a3-60c211d1d0bf-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "c4a11c40-a157-4a28-b1a3-60c211d1d0bf" (UID: "c4a11c40-a157-4a28-b1a3-60c211d1d0bf"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:53:10 crc kubenswrapper[4558]: I0120 17:53:10.069522 4558 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/c4a11c40-a157-4a28-b1a3-60c211d1d0bf-server-conf\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:10 crc kubenswrapper[4558]: I0120 17:53:10.069554 4558 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/c4a11c40-a157-4a28-b1a3-60c211d1d0bf-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:10 crc kubenswrapper[4558]: I0120 17:53:10.069567 4558 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/c4a11c40-a157-4a28-b1a3-60c211d1d0bf-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:10 crc kubenswrapper[4558]: I0120 17:53:10.069578 4558 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/c4a11c40-a157-4a28-b1a3-60c211d1d0bf-pod-info\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:10 crc kubenswrapper[4558]: I0120 17:53:10.069590 4558 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/c4a11c40-a157-4a28-b1a3-60c211d1d0bf-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:10 crc kubenswrapper[4558]: I0120 17:53:10.069604 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dwg9p\" (UniqueName: \"kubernetes.io/projected/c4a11c40-a157-4a28-b1a3-60c211d1d0bf-kube-api-access-dwg9p\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:10 crc kubenswrapper[4558]: I0120 17:53:10.069613 4558 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/c4a11c40-a157-4a28-b1a3-60c211d1d0bf-plugins-conf\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:10 crc kubenswrapper[4558]: I0120 17:53:10.069651 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" " Jan 20 17:53:10 crc kubenswrapper[4558]: I0120 17:53:10.069662 4558 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/c4a11c40-a157-4a28-b1a3-60c211d1d0bf-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:10 crc kubenswrapper[4558]: I0120 17:53:10.069671 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c4a11c40-a157-4a28-b1a3-60c211d1d0bf-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:10 crc kubenswrapper[4558]: I0120 17:53:10.069680 4558 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/c4a11c40-a157-4a28-b1a3-60c211d1d0bf-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:10 crc kubenswrapper[4558]: I0120 17:53:10.081583 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage06-crc" (UniqueName: "kubernetes.io/local-volume/local-storage06-crc") on node "crc" Jan 20 17:53:10 crc kubenswrapper[4558]: I0120 17:53:10.174637 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:10 crc kubenswrapper[4558]: I0120 17:53:10.240641 4558 generic.go:334] "Generic (PLEG): container finished" podID="c4a11c40-a157-4a28-b1a3-60c211d1d0bf" containerID="ca5aa63a108196583f0f9950193b9c697a06235d9a0ce251fae6acde947d41cf" exitCode=0 Jan 20 17:53:10 crc kubenswrapper[4558]: I0120 17:53:10.240740 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" event={"ID":"c4a11c40-a157-4a28-b1a3-60c211d1d0bf","Type":"ContainerDied","Data":"ca5aa63a108196583f0f9950193b9c697a06235d9a0ce251fae6acde947d41cf"} Jan 20 17:53:10 crc kubenswrapper[4558]: I0120 17:53:10.240762 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:53:10 crc kubenswrapper[4558]: I0120 17:53:10.240826 4558 scope.go:117] "RemoveContainer" containerID="ca5aa63a108196583f0f9950193b9c697a06235d9a0ce251fae6acde947d41cf" Jan 20 17:53:10 crc kubenswrapper[4558]: I0120 17:53:10.240808 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" event={"ID":"c4a11c40-a157-4a28-b1a3-60c211d1d0bf","Type":"ContainerDied","Data":"28ac71207cfb5bffba5917702d239d24dbca21efa7bf369a7b7619c0e415376f"} Jan 20 17:53:10 crc kubenswrapper[4558]: I0120 17:53:10.305990 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/rabbitmq-cell1-server-0"] Jan 20 17:53:10 crc kubenswrapper[4558]: I0120 17:53:10.312746 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/rabbitmq-cell1-server-0"] Jan 20 17:53:10 crc kubenswrapper[4558]: I0120 17:53:10.324875 4558 scope.go:117] "RemoveContainer" containerID="a4bc9f62c0f630c628a87ed4cdeafe946a3360d7bb2a8e7f9e501c633d62f580" Jan 20 17:53:10 crc kubenswrapper[4558]: I0120 17:53:10.346724 4558 scope.go:117] "RemoveContainer" containerID="ca5aa63a108196583f0f9950193b9c697a06235d9a0ce251fae6acde947d41cf" Jan 20 17:53:10 crc kubenswrapper[4558]: E0120 17:53:10.347102 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ca5aa63a108196583f0f9950193b9c697a06235d9a0ce251fae6acde947d41cf\": container with ID starting with ca5aa63a108196583f0f9950193b9c697a06235d9a0ce251fae6acde947d41cf not found: ID does not exist" containerID="ca5aa63a108196583f0f9950193b9c697a06235d9a0ce251fae6acde947d41cf" Jan 20 17:53:10 crc kubenswrapper[4558]: I0120 17:53:10.347229 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ca5aa63a108196583f0f9950193b9c697a06235d9a0ce251fae6acde947d41cf"} err="failed to get container status \"ca5aa63a108196583f0f9950193b9c697a06235d9a0ce251fae6acde947d41cf\": rpc error: code = NotFound desc = could not find container \"ca5aa63a108196583f0f9950193b9c697a06235d9a0ce251fae6acde947d41cf\": container with ID starting with ca5aa63a108196583f0f9950193b9c697a06235d9a0ce251fae6acde947d41cf not found: ID does not exist" Jan 20 17:53:10 crc kubenswrapper[4558]: I0120 17:53:10.347273 4558 scope.go:117] "RemoveContainer" containerID="a4bc9f62c0f630c628a87ed4cdeafe946a3360d7bb2a8e7f9e501c633d62f580" Jan 20 17:53:10 crc kubenswrapper[4558]: E0120 17:53:10.347661 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a4bc9f62c0f630c628a87ed4cdeafe946a3360d7bb2a8e7f9e501c633d62f580\": container with ID starting with a4bc9f62c0f630c628a87ed4cdeafe946a3360d7bb2a8e7f9e501c633d62f580 not found: ID does not exist" containerID="a4bc9f62c0f630c628a87ed4cdeafe946a3360d7bb2a8e7f9e501c633d62f580" Jan 20 17:53:10 crc kubenswrapper[4558]: I0120 17:53:10.347688 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a4bc9f62c0f630c628a87ed4cdeafe946a3360d7bb2a8e7f9e501c633d62f580"} err="failed to get container status \"a4bc9f62c0f630c628a87ed4cdeafe946a3360d7bb2a8e7f9e501c633d62f580\": rpc error: code = NotFound desc = could not find container \"a4bc9f62c0f630c628a87ed4cdeafe946a3360d7bb2a8e7f9e501c633d62f580\": container with ID starting with a4bc9f62c0f630c628a87ed4cdeafe946a3360d7bb2a8e7f9e501c633d62f580 not found: ID does not exist" Jan 20 17:53:10 crc kubenswrapper[4558]: I0120 17:53:10.578632 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7db8891a-284a-47f5-b883-5aac563fc839" path="/var/lib/kubelet/pods/7db8891a-284a-47f5-b883-5aac563fc839/volumes" Jan 20 17:53:10 crc kubenswrapper[4558]: I0120 17:53:10.579860 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c4a11c40-a157-4a28-b1a3-60c211d1d0bf" path="/var/lib/kubelet/pods/c4a11c40-a157-4a28-b1a3-60c211d1d0bf/volumes" Jan 20 17:53:10 crc kubenswrapper[4558]: I0120 17:53:10.586519 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:53:10 crc kubenswrapper[4558]: I0120 17:53:10.684078 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/088e44cc-3515-4736-aa46-721774902209-rabbitmq-plugins\") pod \"088e44cc-3515-4736-aa46-721774902209\" (UID: \"088e44cc-3515-4736-aa46-721774902209\") " Jan 20 17:53:10 crc kubenswrapper[4558]: I0120 17:53:10.684127 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"088e44cc-3515-4736-aa46-721774902209\" (UID: \"088e44cc-3515-4736-aa46-721774902209\") " Jan 20 17:53:10 crc kubenswrapper[4558]: I0120 17:53:10.684179 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/088e44cc-3515-4736-aa46-721774902209-plugins-conf\") pod \"088e44cc-3515-4736-aa46-721774902209\" (UID: \"088e44cc-3515-4736-aa46-721774902209\") " Jan 20 17:53:10 crc kubenswrapper[4558]: I0120 17:53:10.684231 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x65sw\" (UniqueName: \"kubernetes.io/projected/088e44cc-3515-4736-aa46-721774902209-kube-api-access-x65sw\") pod \"088e44cc-3515-4736-aa46-721774902209\" (UID: \"088e44cc-3515-4736-aa46-721774902209\") " Jan 20 17:53:10 crc kubenswrapper[4558]: I0120 17:53:10.684704 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/088e44cc-3515-4736-aa46-721774902209-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "088e44cc-3515-4736-aa46-721774902209" (UID: "088e44cc-3515-4736-aa46-721774902209"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:53:10 crc kubenswrapper[4558]: I0120 17:53:10.684745 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/088e44cc-3515-4736-aa46-721774902209-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "088e44cc-3515-4736-aa46-721774902209" (UID: "088e44cc-3515-4736-aa46-721774902209"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:53:10 crc kubenswrapper[4558]: I0120 17:53:10.684893 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/088e44cc-3515-4736-aa46-721774902209-erlang-cookie-secret\") pod \"088e44cc-3515-4736-aa46-721774902209\" (UID: \"088e44cc-3515-4736-aa46-721774902209\") " Jan 20 17:53:10 crc kubenswrapper[4558]: I0120 17:53:10.685435 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/088e44cc-3515-4736-aa46-721774902209-config-data\") pod \"088e44cc-3515-4736-aa46-721774902209\" (UID: \"088e44cc-3515-4736-aa46-721774902209\") " Jan 20 17:53:10 crc kubenswrapper[4558]: I0120 17:53:10.685487 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/088e44cc-3515-4736-aa46-721774902209-rabbitmq-tls\") pod \"088e44cc-3515-4736-aa46-721774902209\" (UID: \"088e44cc-3515-4736-aa46-721774902209\") " Jan 20 17:53:10 crc kubenswrapper[4558]: I0120 17:53:10.685947 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/088e44cc-3515-4736-aa46-721774902209-pod-info\") pod \"088e44cc-3515-4736-aa46-721774902209\" (UID: \"088e44cc-3515-4736-aa46-721774902209\") " Jan 20 17:53:10 crc kubenswrapper[4558]: I0120 17:53:10.686346 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/088e44cc-3515-4736-aa46-721774902209-rabbitmq-erlang-cookie\") pod \"088e44cc-3515-4736-aa46-721774902209\" (UID: \"088e44cc-3515-4736-aa46-721774902209\") " Jan 20 17:53:10 crc kubenswrapper[4558]: I0120 17:53:10.686372 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/088e44cc-3515-4736-aa46-721774902209-rabbitmq-confd\") pod \"088e44cc-3515-4736-aa46-721774902209\" (UID: \"088e44cc-3515-4736-aa46-721774902209\") " Jan 20 17:53:10 crc kubenswrapper[4558]: I0120 17:53:10.686401 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/088e44cc-3515-4736-aa46-721774902209-server-conf\") pod \"088e44cc-3515-4736-aa46-721774902209\" (UID: \"088e44cc-3515-4736-aa46-721774902209\") " Jan 20 17:53:10 crc kubenswrapper[4558]: I0120 17:53:10.687126 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/088e44cc-3515-4736-aa46-721774902209-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "088e44cc-3515-4736-aa46-721774902209" (UID: "088e44cc-3515-4736-aa46-721774902209"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:53:10 crc kubenswrapper[4558]: I0120 17:53:10.687915 4558 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/088e44cc-3515-4736-aa46-721774902209-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:10 crc kubenswrapper[4558]: I0120 17:53:10.687948 4558 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/088e44cc-3515-4736-aa46-721774902209-plugins-conf\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:10 crc kubenswrapper[4558]: I0120 17:53:10.687959 4558 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/088e44cc-3515-4736-aa46-721774902209-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:10 crc kubenswrapper[4558]: I0120 17:53:10.688846 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/088e44cc-3515-4736-aa46-721774902209-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "088e44cc-3515-4736-aa46-721774902209" (UID: "088e44cc-3515-4736-aa46-721774902209"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:53:10 crc kubenswrapper[4558]: I0120 17:53:10.689432 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/088e44cc-3515-4736-aa46-721774902209-kube-api-access-x65sw" (OuterVolumeSpecName: "kube-api-access-x65sw") pod "088e44cc-3515-4736-aa46-721774902209" (UID: "088e44cc-3515-4736-aa46-721774902209"). InnerVolumeSpecName "kube-api-access-x65sw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:53:10 crc kubenswrapper[4558]: I0120 17:53:10.689862 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/088e44cc-3515-4736-aa46-721774902209-pod-info" (OuterVolumeSpecName: "pod-info") pod "088e44cc-3515-4736-aa46-721774902209" (UID: "088e44cc-3515-4736-aa46-721774902209"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Jan 20 17:53:10 crc kubenswrapper[4558]: I0120 17:53:10.689981 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/088e44cc-3515-4736-aa46-721774902209-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "088e44cc-3515-4736-aa46-721774902209" (UID: "088e44cc-3515-4736-aa46-721774902209"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:53:10 crc kubenswrapper[4558]: I0120 17:53:10.690982 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage12-crc" (OuterVolumeSpecName: "persistence") pod "088e44cc-3515-4736-aa46-721774902209" (UID: "088e44cc-3515-4736-aa46-721774902209"). InnerVolumeSpecName "local-storage12-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:53:10 crc kubenswrapper[4558]: I0120 17:53:10.704992 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/088e44cc-3515-4736-aa46-721774902209-config-data" (OuterVolumeSpecName: "config-data") pod "088e44cc-3515-4736-aa46-721774902209" (UID: "088e44cc-3515-4736-aa46-721774902209"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:53:10 crc kubenswrapper[4558]: I0120 17:53:10.715313 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/088e44cc-3515-4736-aa46-721774902209-server-conf" (OuterVolumeSpecName: "server-conf") pod "088e44cc-3515-4736-aa46-721774902209" (UID: "088e44cc-3515-4736-aa46-721774902209"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:53:10 crc kubenswrapper[4558]: I0120 17:53:10.743385 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/088e44cc-3515-4736-aa46-721774902209-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "088e44cc-3515-4736-aa46-721774902209" (UID: "088e44cc-3515-4736-aa46-721774902209"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:53:10 crc kubenswrapper[4558]: I0120 17:53:10.789186 4558 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/088e44cc-3515-4736-aa46-721774902209-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:10 crc kubenswrapper[4558]: I0120 17:53:10.789218 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/088e44cc-3515-4736-aa46-721774902209-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:10 crc kubenswrapper[4558]: I0120 17:53:10.789229 4558 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/088e44cc-3515-4736-aa46-721774902209-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:10 crc kubenswrapper[4558]: I0120 17:53:10.789239 4558 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/088e44cc-3515-4736-aa46-721774902209-pod-info\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:10 crc kubenswrapper[4558]: I0120 17:53:10.789250 4558 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/088e44cc-3515-4736-aa46-721774902209-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:10 crc kubenswrapper[4558]: I0120 17:53:10.789260 4558 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/088e44cc-3515-4736-aa46-721774902209-server-conf\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:10 crc kubenswrapper[4558]: I0120 17:53:10.789293 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" " Jan 20 17:53:10 crc kubenswrapper[4558]: I0120 17:53:10.789307 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x65sw\" (UniqueName: \"kubernetes.io/projected/088e44cc-3515-4736-aa46-721774902209-kube-api-access-x65sw\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:10 crc kubenswrapper[4558]: I0120 17:53:10.803178 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage12-crc" (UniqueName: "kubernetes.io/local-volume/local-storage12-crc") on node "crc" Jan 20 17:53:10 crc kubenswrapper[4558]: I0120 17:53:10.890860 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:11 crc kubenswrapper[4558]: I0120 17:53:11.257760 4558 generic.go:334] "Generic (PLEG): container finished" podID="088e44cc-3515-4736-aa46-721774902209" containerID="0cda7f898a6fe83eef5b3e9a295e4cc99749a10d45642f677ab0d846c1aac497" exitCode=0 Jan 20 17:53:11 crc kubenswrapper[4558]: I0120 17:53:11.257835 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-server-0" event={"ID":"088e44cc-3515-4736-aa46-721774902209","Type":"ContainerDied","Data":"0cda7f898a6fe83eef5b3e9a295e4cc99749a10d45642f677ab0d846c1aac497"} Jan 20 17:53:11 crc kubenswrapper[4558]: I0120 17:53:11.257874 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-server-0" event={"ID":"088e44cc-3515-4736-aa46-721774902209","Type":"ContainerDied","Data":"4917e58b675184a62e14b5f3708376d411550b89d2c51d2c439c824d48cbd4de"} Jan 20 17:53:11 crc kubenswrapper[4558]: I0120 17:53:11.257900 4558 scope.go:117] "RemoveContainer" containerID="0cda7f898a6fe83eef5b3e9a295e4cc99749a10d45642f677ab0d846c1aac497" Jan 20 17:53:11 crc kubenswrapper[4558]: I0120 17:53:11.258075 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:53:11 crc kubenswrapper[4558]: I0120 17:53:11.306123 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/rabbitmq-server-0"] Jan 20 17:53:11 crc kubenswrapper[4558]: I0120 17:53:11.312902 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/rabbitmq-server-0"] Jan 20 17:53:11 crc kubenswrapper[4558]: I0120 17:53:11.330958 4558 scope.go:117] "RemoveContainer" containerID="dfbb9c8e1cee5c7cb472c9c48b441fb80ff852d27db0124a79c1d7f68c8fa720" Jan 20 17:53:11 crc kubenswrapper[4558]: I0120 17:53:11.346916 4558 scope.go:117] "RemoveContainer" containerID="0cda7f898a6fe83eef5b3e9a295e4cc99749a10d45642f677ab0d846c1aac497" Jan 20 17:53:11 crc kubenswrapper[4558]: E0120 17:53:11.347641 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0cda7f898a6fe83eef5b3e9a295e4cc99749a10d45642f677ab0d846c1aac497\": container with ID starting with 0cda7f898a6fe83eef5b3e9a295e4cc99749a10d45642f677ab0d846c1aac497 not found: ID does not exist" containerID="0cda7f898a6fe83eef5b3e9a295e4cc99749a10d45642f677ab0d846c1aac497" Jan 20 17:53:11 crc kubenswrapper[4558]: I0120 17:53:11.347677 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0cda7f898a6fe83eef5b3e9a295e4cc99749a10d45642f677ab0d846c1aac497"} err="failed to get container status \"0cda7f898a6fe83eef5b3e9a295e4cc99749a10d45642f677ab0d846c1aac497\": rpc error: code = NotFound desc = could not find container \"0cda7f898a6fe83eef5b3e9a295e4cc99749a10d45642f677ab0d846c1aac497\": container with ID starting with 0cda7f898a6fe83eef5b3e9a295e4cc99749a10d45642f677ab0d846c1aac497 not found: ID does not exist" Jan 20 17:53:11 crc kubenswrapper[4558]: I0120 17:53:11.347704 4558 scope.go:117] "RemoveContainer" containerID="dfbb9c8e1cee5c7cb472c9c48b441fb80ff852d27db0124a79c1d7f68c8fa720" Jan 20 17:53:11 crc kubenswrapper[4558]: E0120 17:53:11.348023 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dfbb9c8e1cee5c7cb472c9c48b441fb80ff852d27db0124a79c1d7f68c8fa720\": container with ID starting with dfbb9c8e1cee5c7cb472c9c48b441fb80ff852d27db0124a79c1d7f68c8fa720 not found: ID does not exist" containerID="dfbb9c8e1cee5c7cb472c9c48b441fb80ff852d27db0124a79c1d7f68c8fa720" Jan 20 17:53:11 crc kubenswrapper[4558]: I0120 17:53:11.348152 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dfbb9c8e1cee5c7cb472c9c48b441fb80ff852d27db0124a79c1d7f68c8fa720"} err="failed to get container status \"dfbb9c8e1cee5c7cb472c9c48b441fb80ff852d27db0124a79c1d7f68c8fa720\": rpc error: code = NotFound desc = could not find container \"dfbb9c8e1cee5c7cb472c9c48b441fb80ff852d27db0124a79c1d7f68c8fa720\": container with ID starting with dfbb9c8e1cee5c7cb472c9c48b441fb80ff852d27db0124a79c1d7f68c8fa720 not found: ID does not exist" Jan 20 17:53:11 crc kubenswrapper[4558]: E0120 17:53:11.441047 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="b4a7a63ae5b4336f1c55496b20962afe4a27040b0e22d712c664548644815e60" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:53:11 crc kubenswrapper[4558]: E0120 17:53:11.443053 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="b4a7a63ae5b4336f1c55496b20962afe4a27040b0e22d712c664548644815e60" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:53:11 crc kubenswrapper[4558]: E0120 17:53:11.444031 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="b4a7a63ae5b4336f1c55496b20962afe4a27040b0e22d712c664548644815e60" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:53:11 crc kubenswrapper[4558]: E0120 17:53:11.444056 4558 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack-kuttl-tests/nova-cell1-conductor-0" podUID="7c16f0c4-1462-4817-9f74-9e3d93193867" containerName="nova-cell1-conductor-conductor" Jan 20 17:53:12 crc kubenswrapper[4558]: E0120 17:53:12.465454 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="ce0da192b65bf7c5e90dd832e0212db51fd4f3e04276e89c0f7b9256d88b752a" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 20 17:53:12 crc kubenswrapper[4558]: E0120 17:53:12.468070 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="ce0da192b65bf7c5e90dd832e0212db51fd4f3e04276e89c0f7b9256d88b752a" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 20 17:53:12 crc kubenswrapper[4558]: E0120 17:53:12.469578 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="ce0da192b65bf7c5e90dd832e0212db51fd4f3e04276e89c0f7b9256d88b752a" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 20 17:53:12 crc kubenswrapper[4558]: E0120 17:53:12.469669 4558 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack-kuttl-tests/nova-scheduler-0" podUID="d9321ea5-ab3b-4f00-b4d0-f2c2f8c2deb2" containerName="nova-scheduler-scheduler" Jan 20 17:53:12 crc kubenswrapper[4558]: I0120 17:53:12.590048 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="088e44cc-3515-4736-aa46-721774902209" path="/var/lib/kubelet/pods/088e44cc-3515-4736-aa46-721774902209/volumes" Jan 20 17:53:13 crc kubenswrapper[4558]: I0120 17:53:13.017328 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-zmctf" Jan 20 17:53:13 crc kubenswrapper[4558]: I0120 17:53:13.067432 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-66c9456f9f-vp4hx"] Jan 20 17:53:13 crc kubenswrapper[4558]: I0120 17:53:13.068808 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-66c9456f9f-vp4hx" podUID="97bee565-aafb-4ccc-956b-d94ac410e50e" containerName="dnsmasq-dns" containerID="cri-o://9884ce142e2764c93ae8f86078f47582909d78eefe96fec6dbd1907c59c8b11e" gracePeriod=10 Jan 20 17:53:13 crc kubenswrapper[4558]: I0120 17:53:13.304953 4558 generic.go:334] "Generic (PLEG): container finished" podID="97bee565-aafb-4ccc-956b-d94ac410e50e" containerID="9884ce142e2764c93ae8f86078f47582909d78eefe96fec6dbd1907c59c8b11e" exitCode=0 Jan 20 17:53:13 crc kubenswrapper[4558]: I0120 17:53:13.305013 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-66c9456f9f-vp4hx" event={"ID":"97bee565-aafb-4ccc-956b-d94ac410e50e","Type":"ContainerDied","Data":"9884ce142e2764c93ae8f86078f47582909d78eefe96fec6dbd1907c59c8b11e"} Jan 20 17:53:13 crc kubenswrapper[4558]: I0120 17:53:13.517761 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-66c9456f9f-vp4hx" Jan 20 17:53:13 crc kubenswrapper[4558]: I0120 17:53:13.634825 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/97bee565-aafb-4ccc-956b-d94ac410e50e-config\") pod \"97bee565-aafb-4ccc-956b-d94ac410e50e\" (UID: \"97bee565-aafb-4ccc-956b-d94ac410e50e\") " Jan 20 17:53:13 crc kubenswrapper[4558]: I0120 17:53:13.634915 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xbzsz\" (UniqueName: \"kubernetes.io/projected/97bee565-aafb-4ccc-956b-d94ac410e50e-kube-api-access-xbzsz\") pod \"97bee565-aafb-4ccc-956b-d94ac410e50e\" (UID: \"97bee565-aafb-4ccc-956b-d94ac410e50e\") " Jan 20 17:53:13 crc kubenswrapper[4558]: I0120 17:53:13.634947 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/97bee565-aafb-4ccc-956b-d94ac410e50e-dnsmasq-svc\") pod \"97bee565-aafb-4ccc-956b-d94ac410e50e\" (UID: \"97bee565-aafb-4ccc-956b-d94ac410e50e\") " Jan 20 17:53:13 crc kubenswrapper[4558]: I0120 17:53:13.635145 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/97bee565-aafb-4ccc-956b-d94ac410e50e-dns-swift-storage-0\") pod \"97bee565-aafb-4ccc-956b-d94ac410e50e\" (UID: \"97bee565-aafb-4ccc-956b-d94ac410e50e\") " Jan 20 17:53:13 crc kubenswrapper[4558]: I0120 17:53:13.653414 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/97bee565-aafb-4ccc-956b-d94ac410e50e-kube-api-access-xbzsz" (OuterVolumeSpecName: "kube-api-access-xbzsz") pod "97bee565-aafb-4ccc-956b-d94ac410e50e" (UID: "97bee565-aafb-4ccc-956b-d94ac410e50e"). InnerVolumeSpecName "kube-api-access-xbzsz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:53:13 crc kubenswrapper[4558]: I0120 17:53:13.667725 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/97bee565-aafb-4ccc-956b-d94ac410e50e-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "97bee565-aafb-4ccc-956b-d94ac410e50e" (UID: "97bee565-aafb-4ccc-956b-d94ac410e50e"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:53:13 crc kubenswrapper[4558]: I0120 17:53:13.668297 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/97bee565-aafb-4ccc-956b-d94ac410e50e-config" (OuterVolumeSpecName: "config") pod "97bee565-aafb-4ccc-956b-d94ac410e50e" (UID: "97bee565-aafb-4ccc-956b-d94ac410e50e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:53:13 crc kubenswrapper[4558]: I0120 17:53:13.677335 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/97bee565-aafb-4ccc-956b-d94ac410e50e-dnsmasq-svc" (OuterVolumeSpecName: "dnsmasq-svc") pod "97bee565-aafb-4ccc-956b-d94ac410e50e" (UID: "97bee565-aafb-4ccc-956b-d94ac410e50e"). InnerVolumeSpecName "dnsmasq-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:53:13 crc kubenswrapper[4558]: I0120 17:53:13.737385 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xbzsz\" (UniqueName: \"kubernetes.io/projected/97bee565-aafb-4ccc-956b-d94ac410e50e-kube-api-access-xbzsz\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:13 crc kubenswrapper[4558]: I0120 17:53:13.737416 4558 reconciler_common.go:293] "Volume detached for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/97bee565-aafb-4ccc-956b-d94ac410e50e-dnsmasq-svc\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:13 crc kubenswrapper[4558]: I0120 17:53:13.737429 4558 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/97bee565-aafb-4ccc-956b-d94ac410e50e-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:13 crc kubenswrapper[4558]: I0120 17:53:13.737440 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/97bee565-aafb-4ccc-956b-d94ac410e50e-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:14 crc kubenswrapper[4558]: I0120 17:53:14.317800 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-66c9456f9f-vp4hx" Jan 20 17:53:14 crc kubenswrapper[4558]: I0120 17:53:14.317793 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-66c9456f9f-vp4hx" event={"ID":"97bee565-aafb-4ccc-956b-d94ac410e50e","Type":"ContainerDied","Data":"1ec8c43a7de023dbee48405c043ff3e97f8d251cf1e1ca8acf387367a1804874"} Jan 20 17:53:14 crc kubenswrapper[4558]: I0120 17:53:14.319416 4558 scope.go:117] "RemoveContainer" containerID="9884ce142e2764c93ae8f86078f47582909d78eefe96fec6dbd1907c59c8b11e" Jan 20 17:53:14 crc kubenswrapper[4558]: E0120 17:53:14.319010 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d99379340565779627596cecb15d625f9bcb83d751ea4f5f14fb7cb8bcb8a6b6" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:53:14 crc kubenswrapper[4558]: E0120 17:53:14.323003 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d99379340565779627596cecb15d625f9bcb83d751ea4f5f14fb7cb8bcb8a6b6" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:53:14 crc kubenswrapper[4558]: E0120 17:53:14.324729 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d99379340565779627596cecb15d625f9bcb83d751ea4f5f14fb7cb8bcb8a6b6" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:53:14 crc kubenswrapper[4558]: E0120 17:53:14.324771 4558 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack-kuttl-tests/nova-cell0-conductor-0" podUID="924fa7ce-8d60-4b2f-b62b-d5e146474f71" containerName="nova-cell0-conductor-conductor" Jan 20 17:53:14 crc kubenswrapper[4558]: I0120 17:53:14.353083 4558 scope.go:117] "RemoveContainer" containerID="20cd4a7cd58221648d8ea58e162919bdedcfaf4fc3ac5733ce7262c92ed5e83f" Jan 20 17:53:14 crc kubenswrapper[4558]: I0120 17:53:14.363104 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-66c9456f9f-vp4hx"] Jan 20 17:53:14 crc kubenswrapper[4558]: I0120 17:53:14.363153 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-66c9456f9f-vp4hx"] Jan 20 17:53:14 crc kubenswrapper[4558]: I0120 17:53:14.579572 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="97bee565-aafb-4ccc-956b-d94ac410e50e" path="/var/lib/kubelet/pods/97bee565-aafb-4ccc-956b-d94ac410e50e/volumes" Jan 20 17:53:15 crc kubenswrapper[4558]: I0120 17:53:15.274996 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/ceilometer-0" podUID="cd2275b9-323a-4981-aadd-791b68003e3d" containerName="proxy-httpd" probeResult="failure" output="Get \"https://10.217.1.239:3000/\": dial tcp 10.217.1.239:3000: connect: connection refused" Jan 20 17:53:16 crc kubenswrapper[4558]: E0120 17:53:16.441784 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="b4a7a63ae5b4336f1c55496b20962afe4a27040b0e22d712c664548644815e60" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:53:16 crc kubenswrapper[4558]: E0120 17:53:16.443828 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="b4a7a63ae5b4336f1c55496b20962afe4a27040b0e22d712c664548644815e60" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:53:16 crc kubenswrapper[4558]: E0120 17:53:16.445099 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="b4a7a63ae5b4336f1c55496b20962afe4a27040b0e22d712c664548644815e60" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:53:16 crc kubenswrapper[4558]: E0120 17:53:16.445134 4558 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack-kuttl-tests/nova-cell1-conductor-0" podUID="7c16f0c4-1462-4817-9f74-9e3d93193867" containerName="nova-cell1-conductor-conductor" Jan 20 17:53:17 crc kubenswrapper[4558]: E0120 17:53:17.464115 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="ce0da192b65bf7c5e90dd832e0212db51fd4f3e04276e89c0f7b9256d88b752a" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 20 17:53:17 crc kubenswrapper[4558]: E0120 17:53:17.465643 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="ce0da192b65bf7c5e90dd832e0212db51fd4f3e04276e89c0f7b9256d88b752a" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 20 17:53:17 crc kubenswrapper[4558]: E0120 17:53:17.466940 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="ce0da192b65bf7c5e90dd832e0212db51fd4f3e04276e89c0f7b9256d88b752a" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 20 17:53:17 crc kubenswrapper[4558]: E0120 17:53:17.466977 4558 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack-kuttl-tests/nova-scheduler-0" podUID="d9321ea5-ab3b-4f00-b4d0-f2c2f8c2deb2" containerName="nova-scheduler-scheduler" Jan 20 17:53:19 crc kubenswrapper[4558]: E0120 17:53:19.318729 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d99379340565779627596cecb15d625f9bcb83d751ea4f5f14fb7cb8bcb8a6b6" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:53:19 crc kubenswrapper[4558]: E0120 17:53:19.319887 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d99379340565779627596cecb15d625f9bcb83d751ea4f5f14fb7cb8bcb8a6b6" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:53:19 crc kubenswrapper[4558]: E0120 17:53:19.320976 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d99379340565779627596cecb15d625f9bcb83d751ea4f5f14fb7cb8bcb8a6b6" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:53:19 crc kubenswrapper[4558]: E0120 17:53:19.321025 4558 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack-kuttl-tests/nova-cell0-conductor-0" podUID="924fa7ce-8d60-4b2f-b62b-d5e146474f71" containerName="nova-cell0-conductor-conductor" Jan 20 17:53:21 crc kubenswrapper[4558]: E0120 17:53:21.441058 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="b4a7a63ae5b4336f1c55496b20962afe4a27040b0e22d712c664548644815e60" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:53:21 crc kubenswrapper[4558]: E0120 17:53:21.442336 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="b4a7a63ae5b4336f1c55496b20962afe4a27040b0e22d712c664548644815e60" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:53:21 crc kubenswrapper[4558]: E0120 17:53:21.443373 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="b4a7a63ae5b4336f1c55496b20962afe4a27040b0e22d712c664548644815e60" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:53:21 crc kubenswrapper[4558]: E0120 17:53:21.443429 4558 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack-kuttl-tests/nova-cell1-conductor-0" podUID="7c16f0c4-1462-4817-9f74-9e3d93193867" containerName="nova-cell1-conductor-conductor" Jan 20 17:53:22 crc kubenswrapper[4558]: E0120 17:53:22.464543 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="ce0da192b65bf7c5e90dd832e0212db51fd4f3e04276e89c0f7b9256d88b752a" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 20 17:53:22 crc kubenswrapper[4558]: E0120 17:53:22.466038 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="ce0da192b65bf7c5e90dd832e0212db51fd4f3e04276e89c0f7b9256d88b752a" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 20 17:53:22 crc kubenswrapper[4558]: E0120 17:53:22.467838 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="ce0da192b65bf7c5e90dd832e0212db51fd4f3e04276e89c0f7b9256d88b752a" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 20 17:53:22 crc kubenswrapper[4558]: E0120 17:53:22.467950 4558 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack-kuttl-tests/nova-scheduler-0" podUID="d9321ea5-ab3b-4f00-b4d0-f2c2f8c2deb2" containerName="nova-scheduler-scheduler" Jan 20 17:53:22 crc kubenswrapper[4558]: I0120 17:53:22.566370 4558 scope.go:117] "RemoveContainer" containerID="4ea7fa61d8b21007a044345acec89fcebe56c2921cf0f76ff2002f44bdc88ede" Jan 20 17:53:22 crc kubenswrapper[4558]: E0120 17:53:22.566617 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 17:53:24 crc kubenswrapper[4558]: E0120 17:53:24.318372 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d99379340565779627596cecb15d625f9bcb83d751ea4f5f14fb7cb8bcb8a6b6" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:53:24 crc kubenswrapper[4558]: E0120 17:53:24.319765 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d99379340565779627596cecb15d625f9bcb83d751ea4f5f14fb7cb8bcb8a6b6" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:53:24 crc kubenswrapper[4558]: E0120 17:53:24.321233 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d99379340565779627596cecb15d625f9bcb83d751ea4f5f14fb7cb8bcb8a6b6" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:53:24 crc kubenswrapper[4558]: E0120 17:53:24.321283 4558 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack-kuttl-tests/nova-cell0-conductor-0" podUID="924fa7ce-8d60-4b2f-b62b-d5e146474f71" containerName="nova-cell0-conductor-conductor" Jan 20 17:53:25 crc kubenswrapper[4558]: I0120 17:53:25.452271 4558 scope.go:117] "RemoveContainer" containerID="7dd5fbed1bb93ffab4c94f345e8862a47fbcd02213be363bc0764b0ebe5c1f83" Jan 20 17:53:25 crc kubenswrapper[4558]: I0120 17:53:25.501455 4558 scope.go:117] "RemoveContainer" containerID="c64c1c6b089bf0563dad7d86569ad1d73d4507d9ac97c8012b53e092021479e5" Jan 20 17:53:26 crc kubenswrapper[4558]: I0120 17:53:26.014841 4558 scope.go:117] "RemoveContainer" containerID="f7bef3d7d2547ce273b172aca5d5e1a2df7c945d8bb078d5bc0c5b41f4c22b93" Jan 20 17:53:26 crc kubenswrapper[4558]: I0120 17:53:26.060958 4558 scope.go:117] "RemoveContainer" containerID="89c254b2cc20564dd33cd7756e681d06ef6e49434611f128e7c7bd18d011d719" Jan 20 17:53:26 crc kubenswrapper[4558]: I0120 17:53:26.082524 4558 scope.go:117] "RemoveContainer" containerID="074493bc02e61ac25a6b00e8358bebb521ffddbaf6ae6d939715d9c5ccbe54c7" Jan 20 17:53:26 crc kubenswrapper[4558]: I0120 17:53:26.112353 4558 scope.go:117] "RemoveContainer" containerID="94906da2e077fe5e9b0743c97f8ed215bd1787bdb22c85dbc8e9ff7e49407f3a" Jan 20 17:53:26 crc kubenswrapper[4558]: I0120 17:53:26.135091 4558 scope.go:117] "RemoveContainer" containerID="99cb94cc5f3f7962fae34e8aeb4e78c1758c65b96c2f5f366e9c06f6ebfe81c9" Jan 20 17:53:26 crc kubenswrapper[4558]: I0120 17:53:26.149372 4558 scope.go:117] "RemoveContainer" containerID="64972ee4dabb5ea14c0f36ff7a7b7a13891412258c4e910ce71c924bb926e69e" Jan 20 17:53:26 crc kubenswrapper[4558]: I0120 17:53:26.172566 4558 scope.go:117] "RemoveContainer" containerID="d464d7020e25ad80b4a7faeb918b86aad826276b94aea66bcc2f1c25be05ceaa" Jan 20 17:53:26 crc kubenswrapper[4558]: I0120 17:53:26.205220 4558 scope.go:117] "RemoveContainer" containerID="a0597f6f3ecf954ffb70d092e87b6d76c3bab5e72a8a9f28787b4ad834a49992" Jan 20 17:53:26 crc kubenswrapper[4558]: I0120 17:53:26.223380 4558 scope.go:117] "RemoveContainer" containerID="2dc44c6d293061ff251208fd3424c4f248fb871f863ac0ae98881f72b9a345ee" Jan 20 17:53:26 crc kubenswrapper[4558]: I0120 17:53:26.240792 4558 scope.go:117] "RemoveContainer" containerID="14c2e0d4dc0056dcda162ff4dd3917c5a10a8a300b1513c3f8b7fb9321d4a88e" Jan 20 17:53:26 crc kubenswrapper[4558]: I0120 17:53:26.261945 4558 scope.go:117] "RemoveContainer" containerID="7064c70f51c5070bb50a04ba6121a31e6cb8e5af6fce497e461d140997c2e99c" Jan 20 17:53:26 crc kubenswrapper[4558]: I0120 17:53:26.299928 4558 scope.go:117] "RemoveContainer" containerID="89888a23e0d26b9c3288dac8f4c5d490eb219f4b8f1f24588119483c51792e56" Jan 20 17:53:26 crc kubenswrapper[4558]: I0120 17:53:26.411332 4558 scope.go:117] "RemoveContainer" containerID="3e30e162193fcb14bcc9faaa352f5c790e274d276a5e03c359619caba0cc19d9" Jan 20 17:53:26 crc kubenswrapper[4558]: I0120 17:53:26.427712 4558 scope.go:117] "RemoveContainer" containerID="68558968f523e7ff4508593f398776870a248bc02f552e2e5df1ccbb58abf958" Jan 20 17:53:26 crc kubenswrapper[4558]: E0120 17:53:26.440796 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="b4a7a63ae5b4336f1c55496b20962afe4a27040b0e22d712c664548644815e60" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:53:26 crc kubenswrapper[4558]: E0120 17:53:26.442139 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="b4a7a63ae5b4336f1c55496b20962afe4a27040b0e22d712c664548644815e60" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:53:26 crc kubenswrapper[4558]: I0120 17:53:26.443320 4558 scope.go:117] "RemoveContainer" containerID="1fa8ee6a85397df8fa8a9f702077e9146daa2b4897ff59434b1a88b468ac76f7" Jan 20 17:53:26 crc kubenswrapper[4558]: E0120 17:53:26.443512 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="b4a7a63ae5b4336f1c55496b20962afe4a27040b0e22d712c664548644815e60" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:53:26 crc kubenswrapper[4558]: E0120 17:53:26.443548 4558 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack-kuttl-tests/nova-cell1-conductor-0" podUID="7c16f0c4-1462-4817-9f74-9e3d93193867" containerName="nova-cell1-conductor-conductor" Jan 20 17:53:26 crc kubenswrapper[4558]: I0120 17:53:26.459762 4558 scope.go:117] "RemoveContainer" containerID="9bb296664d4062cc389adf791e64578806a256d24518a7353d119de85df45770" Jan 20 17:53:26 crc kubenswrapper[4558]: I0120 17:53:26.473906 4558 scope.go:117] "RemoveContainer" containerID="324d9081397c220fd77997997d3f091e61e407a5fbc6bb38370055c3f9f680f3" Jan 20 17:53:26 crc kubenswrapper[4558]: I0120 17:53:26.608785 4558 scope.go:117] "RemoveContainer" containerID="156bb79d6f586adeacf516e5434e22447e062b6466c591681499cef3125750f9" Jan 20 17:53:26 crc kubenswrapper[4558]: I0120 17:53:26.711523 4558 scope.go:117] "RemoveContainer" containerID="a5d3a8ef41b0fbf942dd8d188bb25b5d46afeed4c9868dfa05afacbece3f7157" Jan 20 17:53:26 crc kubenswrapper[4558]: I0120 17:53:26.829472 4558 scope.go:117] "RemoveContainer" containerID="5c017decda645c6d0210efcf18e6ea25bbb27e76266cb4e262a3a03a5802f34c" Jan 20 17:53:26 crc kubenswrapper[4558]: I0120 17:53:26.896817 4558 scope.go:117] "RemoveContainer" containerID="6a26fa3d881fdae5f126b67ecdf3cd57c4714eeb300e6b941666a0c25f2581da" Jan 20 17:53:26 crc kubenswrapper[4558]: I0120 17:53:26.918382 4558 scope.go:117] "RemoveContainer" containerID="871c763a67bacf5aab84d0674fb5be5ddbcc187543e0fee9119ff7443f932ffc" Jan 20 17:53:27 crc kubenswrapper[4558]: E0120 17:53:27.464789 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="ce0da192b65bf7c5e90dd832e0212db51fd4f3e04276e89c0f7b9256d88b752a" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 20 17:53:27 crc kubenswrapper[4558]: E0120 17:53:27.466215 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="ce0da192b65bf7c5e90dd832e0212db51fd4f3e04276e89c0f7b9256d88b752a" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 20 17:53:27 crc kubenswrapper[4558]: E0120 17:53:27.477150 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="ce0da192b65bf7c5e90dd832e0212db51fd4f3e04276e89c0f7b9256d88b752a" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 20 17:53:27 crc kubenswrapper[4558]: E0120 17:53:27.477239 4558 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack-kuttl-tests/nova-scheduler-0" podUID="d9321ea5-ab3b-4f00-b4d0-f2c2f8c2deb2" containerName="nova-scheduler-scheduler" Jan 20 17:53:29 crc kubenswrapper[4558]: E0120 17:53:29.319042 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d99379340565779627596cecb15d625f9bcb83d751ea4f5f14fb7cb8bcb8a6b6" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:53:29 crc kubenswrapper[4558]: E0120 17:53:29.320854 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d99379340565779627596cecb15d625f9bcb83d751ea4f5f14fb7cb8bcb8a6b6" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:53:29 crc kubenswrapper[4558]: E0120 17:53:29.322174 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="d99379340565779627596cecb15d625f9bcb83d751ea4f5f14fb7cb8bcb8a6b6" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:53:29 crc kubenswrapper[4558]: E0120 17:53:29.322231 4558 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack-kuttl-tests/nova-cell0-conductor-0" podUID="924fa7ce-8d60-4b2f-b62b-d5e146474f71" containerName="nova-cell0-conductor-conductor" Jan 20 17:53:30 crc kubenswrapper[4558]: I0120 17:53:30.200672 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/neutron-75676d645b-l2sp7" podUID="6ea6b41c-c5ed-49c9-84f5-a932a1325aba" containerName="neutron-httpd" probeResult="failure" output="Get \"https://10.217.1.231:9696/\": dial tcp 10.217.1.231:9696: connect: connection refused" Jan 20 17:53:31 crc kubenswrapper[4558]: E0120 17:53:31.442347 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="b4a7a63ae5b4336f1c55496b20962afe4a27040b0e22d712c664548644815e60" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:53:31 crc kubenswrapper[4558]: E0120 17:53:31.444827 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="b4a7a63ae5b4336f1c55496b20962afe4a27040b0e22d712c664548644815e60" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:53:31 crc kubenswrapper[4558]: E0120 17:53:31.446394 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="b4a7a63ae5b4336f1c55496b20962afe4a27040b0e22d712c664548644815e60" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:53:31 crc kubenswrapper[4558]: E0120 17:53:31.446436 4558 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack-kuttl-tests/nova-cell1-conductor-0" podUID="7c16f0c4-1462-4817-9f74-9e3d93193867" containerName="nova-cell1-conductor-conductor" Jan 20 17:53:32 crc kubenswrapper[4558]: E0120 17:53:32.465021 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="ce0da192b65bf7c5e90dd832e0212db51fd4f3e04276e89c0f7b9256d88b752a" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 20 17:53:32 crc kubenswrapper[4558]: E0120 17:53:32.466564 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="ce0da192b65bf7c5e90dd832e0212db51fd4f3e04276e89c0f7b9256d88b752a" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 20 17:53:32 crc kubenswrapper[4558]: E0120 17:53:32.467693 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="ce0da192b65bf7c5e90dd832e0212db51fd4f3e04276e89c0f7b9256d88b752a" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 20 17:53:32 crc kubenswrapper[4558]: E0120 17:53:32.467726 4558 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack-kuttl-tests/nova-scheduler-0" podUID="d9321ea5-ab3b-4f00-b4d0-f2c2f8c2deb2" containerName="nova-scheduler-scheduler" Jan 20 17:53:32 crc kubenswrapper[4558]: I0120 17:53:32.568315 4558 generic.go:334] "Generic (PLEG): container finished" podID="64d029c7-79c6-40fc-b4d2-ac5bfbc387a0" containerID="3fca1090ab44eb73ee4987d4fe77236fd5fda45c847b185be75610aec8208529" exitCode=137 Jan 20 17:53:32 crc kubenswrapper[4558]: I0120 17:53:32.569604 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:53:32 crc kubenswrapper[4558]: I0120 17:53:32.585944 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"64d029c7-79c6-40fc-b4d2-ac5bfbc387a0","Type":"ContainerDied","Data":"3fca1090ab44eb73ee4987d4fe77236fd5fda45c847b185be75610aec8208529"} Jan 20 17:53:32 crc kubenswrapper[4558]: I0120 17:53:32.586019 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"64d029c7-79c6-40fc-b4d2-ac5bfbc387a0","Type":"ContainerDied","Data":"d156fc744aaa2e9ee5916bc5532b984f60da0f513c57a66b67e105fb18197a1e"} Jan 20 17:53:32 crc kubenswrapper[4558]: I0120 17:53:32.586071 4558 scope.go:117] "RemoveContainer" containerID="3fca1090ab44eb73ee4987d4fe77236fd5fda45c847b185be75610aec8208529" Jan 20 17:53:32 crc kubenswrapper[4558]: I0120 17:53:32.607439 4558 scope.go:117] "RemoveContainer" containerID="049005bceb3d9c7f92a80adfa58232b99be025be046ebfa5fdd30c2501029dd6" Jan 20 17:53:32 crc kubenswrapper[4558]: I0120 17:53:32.621729 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/64d029c7-79c6-40fc-b4d2-ac5bfbc387a0-cache\") pod \"64d029c7-79c6-40fc-b4d2-ac5bfbc387a0\" (UID: \"64d029c7-79c6-40fc-b4d2-ac5bfbc387a0\") " Jan 20 17:53:32 crc kubenswrapper[4558]: I0120 17:53:32.621792 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/64d029c7-79c6-40fc-b4d2-ac5bfbc387a0-etc-swift\") pod \"64d029c7-79c6-40fc-b4d2-ac5bfbc387a0\" (UID: \"64d029c7-79c6-40fc-b4d2-ac5bfbc387a0\") " Jan 20 17:53:32 crc kubenswrapper[4558]: I0120 17:53:32.622388 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/64d029c7-79c6-40fc-b4d2-ac5bfbc387a0-cache" (OuterVolumeSpecName: "cache") pod "64d029c7-79c6-40fc-b4d2-ac5bfbc387a0" (UID: "64d029c7-79c6-40fc-b4d2-ac5bfbc387a0"). InnerVolumeSpecName "cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:53:32 crc kubenswrapper[4558]: I0120 17:53:32.622743 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/64d029c7-79c6-40fc-b4d2-ac5bfbc387a0-lock\") pod \"64d029c7-79c6-40fc-b4d2-ac5bfbc387a0\" (UID: \"64d029c7-79c6-40fc-b4d2-ac5bfbc387a0\") " Jan 20 17:53:32 crc kubenswrapper[4558]: I0120 17:53:32.622825 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rw5ht\" (UniqueName: \"kubernetes.io/projected/64d029c7-79c6-40fc-b4d2-ac5bfbc387a0-kube-api-access-rw5ht\") pod \"64d029c7-79c6-40fc-b4d2-ac5bfbc387a0\" (UID: \"64d029c7-79c6-40fc-b4d2-ac5bfbc387a0\") " Jan 20 17:53:32 crc kubenswrapper[4558]: I0120 17:53:32.623237 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swift\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"64d029c7-79c6-40fc-b4d2-ac5bfbc387a0\" (UID: \"64d029c7-79c6-40fc-b4d2-ac5bfbc387a0\") " Jan 20 17:53:32 crc kubenswrapper[4558]: I0120 17:53:32.623594 4558 reconciler_common.go:293] "Volume detached for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/64d029c7-79c6-40fc-b4d2-ac5bfbc387a0-cache\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:32 crc kubenswrapper[4558]: I0120 17:53:32.623800 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/64d029c7-79c6-40fc-b4d2-ac5bfbc387a0-lock" (OuterVolumeSpecName: "lock") pod "64d029c7-79c6-40fc-b4d2-ac5bfbc387a0" (UID: "64d029c7-79c6-40fc-b4d2-ac5bfbc387a0"). InnerVolumeSpecName "lock". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:53:32 crc kubenswrapper[4558]: I0120 17:53:32.631016 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage07-crc" (OuterVolumeSpecName: "swift") pod "64d029c7-79c6-40fc-b4d2-ac5bfbc387a0" (UID: "64d029c7-79c6-40fc-b4d2-ac5bfbc387a0"). InnerVolumeSpecName "local-storage07-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:53:32 crc kubenswrapper[4558]: I0120 17:53:32.631043 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/64d029c7-79c6-40fc-b4d2-ac5bfbc387a0-kube-api-access-rw5ht" (OuterVolumeSpecName: "kube-api-access-rw5ht") pod "64d029c7-79c6-40fc-b4d2-ac5bfbc387a0" (UID: "64d029c7-79c6-40fc-b4d2-ac5bfbc387a0"). InnerVolumeSpecName "kube-api-access-rw5ht". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:53:32 crc kubenswrapper[4558]: I0120 17:53:32.634539 4558 scope.go:117] "RemoveContainer" containerID="e38f2eedbeb8bee77176fac3f5c4ecdcf1aef7aa98e160b84718ab6bb18bb8a4" Jan 20 17:53:32 crc kubenswrapper[4558]: I0120 17:53:32.636360 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/64d029c7-79c6-40fc-b4d2-ac5bfbc387a0-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "64d029c7-79c6-40fc-b4d2-ac5bfbc387a0" (UID: "64d029c7-79c6-40fc-b4d2-ac5bfbc387a0"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:53:32 crc kubenswrapper[4558]: I0120 17:53:32.680379 4558 scope.go:117] "RemoveContainer" containerID="a6e878f0584cff110a93e31a31995a6fbd71d401815d3d94bb3700d40da94020" Jan 20 17:53:32 crc kubenswrapper[4558]: I0120 17:53:32.695489 4558 scope.go:117] "RemoveContainer" containerID="0babdc19d278b5edc8070926b4a7ce96ef2d679f28771d60ffadb5ba187b8117" Jan 20 17:53:32 crc kubenswrapper[4558]: I0120 17:53:32.709807 4558 scope.go:117] "RemoveContainer" containerID="814558d14f3629d14a008df6a38667804146fe090de98d44f2c3c2447d69c787" Jan 20 17:53:32 crc kubenswrapper[4558]: I0120 17:53:32.726459 4558 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/64d029c7-79c6-40fc-b4d2-ac5bfbc387a0-etc-swift\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:32 crc kubenswrapper[4558]: I0120 17:53:32.726495 4558 reconciler_common.go:293] "Volume detached for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/64d029c7-79c6-40fc-b4d2-ac5bfbc387a0-lock\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:32 crc kubenswrapper[4558]: I0120 17:53:32.726510 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rw5ht\" (UniqueName: \"kubernetes.io/projected/64d029c7-79c6-40fc-b4d2-ac5bfbc387a0-kube-api-access-rw5ht\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:32 crc kubenswrapper[4558]: I0120 17:53:32.726560 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" " Jan 20 17:53:32 crc kubenswrapper[4558]: I0120 17:53:32.743839 4558 scope.go:117] "RemoveContainer" containerID="5e23628ab2cc3c257412804853dd2671a2b679976e1f78c61ccacc4929dc21cd" Jan 20 17:53:32 crc kubenswrapper[4558]: I0120 17:53:32.744363 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage07-crc" (UniqueName: "kubernetes.io/local-volume/local-storage07-crc") on node "crc" Jan 20 17:53:32 crc kubenswrapper[4558]: I0120 17:53:32.757783 4558 scope.go:117] "RemoveContainer" containerID="d32a8a2e6495346715f9634ab4b1b4bc18d96e7451b5223fe9976afe7dd5d7c2" Jan 20 17:53:32 crc kubenswrapper[4558]: I0120 17:53:32.770770 4558 scope.go:117] "RemoveContainer" containerID="ce8b75288eb38358817832bfb419010893a3a4a082fc83e9ed669e3fe1c9a795" Jan 20 17:53:32 crc kubenswrapper[4558]: I0120 17:53:32.783699 4558 scope.go:117] "RemoveContainer" containerID="19a88da3e14609ec200568f698ff68961ad644b5fa7a972150ff49ec0d443ab1" Jan 20 17:53:32 crc kubenswrapper[4558]: I0120 17:53:32.797676 4558 scope.go:117] "RemoveContainer" containerID="4842ce500b116933451ba6ea6d30de4651175498d4768796b95de36768b44fc5" Jan 20 17:53:32 crc kubenswrapper[4558]: I0120 17:53:32.814184 4558 scope.go:117] "RemoveContainer" containerID="9f25ee1c308a320e8a43a0bc03a0bfd7c3d57cf22c17c10b894b0f930d53afe2" Jan 20 17:53:32 crc kubenswrapper[4558]: I0120 17:53:32.828703 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:32 crc kubenswrapper[4558]: I0120 17:53:32.829713 4558 scope.go:117] "RemoveContainer" containerID="34b81cfd7ee5d1e0183fe2711993e5340813f109d83301b4976cc429efc8bc12" Jan 20 17:53:32 crc kubenswrapper[4558]: I0120 17:53:32.846064 4558 scope.go:117] "RemoveContainer" containerID="9c8048073ccb7077c6ce8072e9a03b4c7c45eacbbfa51aa61971cd31bec984b4" Jan 20 17:53:32 crc kubenswrapper[4558]: I0120 17:53:32.863471 4558 scope.go:117] "RemoveContainer" containerID="2d0d3c4c9084f5b47b5268a7e1d20896d7cf8d59d93e03270dfa21fcb273c377" Jan 20 17:53:32 crc kubenswrapper[4558]: I0120 17:53:32.891253 4558 scope.go:117] "RemoveContainer" containerID="3fca1090ab44eb73ee4987d4fe77236fd5fda45c847b185be75610aec8208529" Jan 20 17:53:32 crc kubenswrapper[4558]: E0120 17:53:32.893197 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3fca1090ab44eb73ee4987d4fe77236fd5fda45c847b185be75610aec8208529\": container with ID starting with 3fca1090ab44eb73ee4987d4fe77236fd5fda45c847b185be75610aec8208529 not found: ID does not exist" containerID="3fca1090ab44eb73ee4987d4fe77236fd5fda45c847b185be75610aec8208529" Jan 20 17:53:32 crc kubenswrapper[4558]: I0120 17:53:32.893226 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3fca1090ab44eb73ee4987d4fe77236fd5fda45c847b185be75610aec8208529"} err="failed to get container status \"3fca1090ab44eb73ee4987d4fe77236fd5fda45c847b185be75610aec8208529\": rpc error: code = NotFound desc = could not find container \"3fca1090ab44eb73ee4987d4fe77236fd5fda45c847b185be75610aec8208529\": container with ID starting with 3fca1090ab44eb73ee4987d4fe77236fd5fda45c847b185be75610aec8208529 not found: ID does not exist" Jan 20 17:53:32 crc kubenswrapper[4558]: I0120 17:53:32.893264 4558 scope.go:117] "RemoveContainer" containerID="049005bceb3d9c7f92a80adfa58232b99be025be046ebfa5fdd30c2501029dd6" Jan 20 17:53:32 crc kubenswrapper[4558]: E0120 17:53:32.894327 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"049005bceb3d9c7f92a80adfa58232b99be025be046ebfa5fdd30c2501029dd6\": container with ID starting with 049005bceb3d9c7f92a80adfa58232b99be025be046ebfa5fdd30c2501029dd6 not found: ID does not exist" containerID="049005bceb3d9c7f92a80adfa58232b99be025be046ebfa5fdd30c2501029dd6" Jan 20 17:53:32 crc kubenswrapper[4558]: I0120 17:53:32.894369 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"049005bceb3d9c7f92a80adfa58232b99be025be046ebfa5fdd30c2501029dd6"} err="failed to get container status \"049005bceb3d9c7f92a80adfa58232b99be025be046ebfa5fdd30c2501029dd6\": rpc error: code = NotFound desc = could not find container \"049005bceb3d9c7f92a80adfa58232b99be025be046ebfa5fdd30c2501029dd6\": container with ID starting with 049005bceb3d9c7f92a80adfa58232b99be025be046ebfa5fdd30c2501029dd6 not found: ID does not exist" Jan 20 17:53:32 crc kubenswrapper[4558]: I0120 17:53:32.894401 4558 scope.go:117] "RemoveContainer" containerID="e38f2eedbeb8bee77176fac3f5c4ecdcf1aef7aa98e160b84718ab6bb18bb8a4" Jan 20 17:53:32 crc kubenswrapper[4558]: E0120 17:53:32.894912 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e38f2eedbeb8bee77176fac3f5c4ecdcf1aef7aa98e160b84718ab6bb18bb8a4\": container with ID starting with e38f2eedbeb8bee77176fac3f5c4ecdcf1aef7aa98e160b84718ab6bb18bb8a4 not found: ID does not exist" containerID="e38f2eedbeb8bee77176fac3f5c4ecdcf1aef7aa98e160b84718ab6bb18bb8a4" Jan 20 17:53:32 crc kubenswrapper[4558]: I0120 17:53:32.894985 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e38f2eedbeb8bee77176fac3f5c4ecdcf1aef7aa98e160b84718ab6bb18bb8a4"} err="failed to get container status \"e38f2eedbeb8bee77176fac3f5c4ecdcf1aef7aa98e160b84718ab6bb18bb8a4\": rpc error: code = NotFound desc = could not find container \"e38f2eedbeb8bee77176fac3f5c4ecdcf1aef7aa98e160b84718ab6bb18bb8a4\": container with ID starting with e38f2eedbeb8bee77176fac3f5c4ecdcf1aef7aa98e160b84718ab6bb18bb8a4 not found: ID does not exist" Jan 20 17:53:32 crc kubenswrapper[4558]: I0120 17:53:32.895059 4558 scope.go:117] "RemoveContainer" containerID="a6e878f0584cff110a93e31a31995a6fbd71d401815d3d94bb3700d40da94020" Jan 20 17:53:32 crc kubenswrapper[4558]: E0120 17:53:32.895708 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a6e878f0584cff110a93e31a31995a6fbd71d401815d3d94bb3700d40da94020\": container with ID starting with a6e878f0584cff110a93e31a31995a6fbd71d401815d3d94bb3700d40da94020 not found: ID does not exist" containerID="a6e878f0584cff110a93e31a31995a6fbd71d401815d3d94bb3700d40da94020" Jan 20 17:53:32 crc kubenswrapper[4558]: I0120 17:53:32.895738 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a6e878f0584cff110a93e31a31995a6fbd71d401815d3d94bb3700d40da94020"} err="failed to get container status \"a6e878f0584cff110a93e31a31995a6fbd71d401815d3d94bb3700d40da94020\": rpc error: code = NotFound desc = could not find container \"a6e878f0584cff110a93e31a31995a6fbd71d401815d3d94bb3700d40da94020\": container with ID starting with a6e878f0584cff110a93e31a31995a6fbd71d401815d3d94bb3700d40da94020 not found: ID does not exist" Jan 20 17:53:32 crc kubenswrapper[4558]: I0120 17:53:32.895756 4558 scope.go:117] "RemoveContainer" containerID="0babdc19d278b5edc8070926b4a7ce96ef2d679f28771d60ffadb5ba187b8117" Jan 20 17:53:32 crc kubenswrapper[4558]: E0120 17:53:32.896353 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0babdc19d278b5edc8070926b4a7ce96ef2d679f28771d60ffadb5ba187b8117\": container with ID starting with 0babdc19d278b5edc8070926b4a7ce96ef2d679f28771d60ffadb5ba187b8117 not found: ID does not exist" containerID="0babdc19d278b5edc8070926b4a7ce96ef2d679f28771d60ffadb5ba187b8117" Jan 20 17:53:32 crc kubenswrapper[4558]: I0120 17:53:32.896406 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0babdc19d278b5edc8070926b4a7ce96ef2d679f28771d60ffadb5ba187b8117"} err="failed to get container status \"0babdc19d278b5edc8070926b4a7ce96ef2d679f28771d60ffadb5ba187b8117\": rpc error: code = NotFound desc = could not find container \"0babdc19d278b5edc8070926b4a7ce96ef2d679f28771d60ffadb5ba187b8117\": container with ID starting with 0babdc19d278b5edc8070926b4a7ce96ef2d679f28771d60ffadb5ba187b8117 not found: ID does not exist" Jan 20 17:53:32 crc kubenswrapper[4558]: I0120 17:53:32.896432 4558 scope.go:117] "RemoveContainer" containerID="814558d14f3629d14a008df6a38667804146fe090de98d44f2c3c2447d69c787" Jan 20 17:53:32 crc kubenswrapper[4558]: E0120 17:53:32.896806 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"814558d14f3629d14a008df6a38667804146fe090de98d44f2c3c2447d69c787\": container with ID starting with 814558d14f3629d14a008df6a38667804146fe090de98d44f2c3c2447d69c787 not found: ID does not exist" containerID="814558d14f3629d14a008df6a38667804146fe090de98d44f2c3c2447d69c787" Jan 20 17:53:32 crc kubenswrapper[4558]: I0120 17:53:32.896836 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"814558d14f3629d14a008df6a38667804146fe090de98d44f2c3c2447d69c787"} err="failed to get container status \"814558d14f3629d14a008df6a38667804146fe090de98d44f2c3c2447d69c787\": rpc error: code = NotFound desc = could not find container \"814558d14f3629d14a008df6a38667804146fe090de98d44f2c3c2447d69c787\": container with ID starting with 814558d14f3629d14a008df6a38667804146fe090de98d44f2c3c2447d69c787 not found: ID does not exist" Jan 20 17:53:32 crc kubenswrapper[4558]: I0120 17:53:32.896863 4558 scope.go:117] "RemoveContainer" containerID="5e23628ab2cc3c257412804853dd2671a2b679976e1f78c61ccacc4929dc21cd" Jan 20 17:53:32 crc kubenswrapper[4558]: E0120 17:53:32.897259 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5e23628ab2cc3c257412804853dd2671a2b679976e1f78c61ccacc4929dc21cd\": container with ID starting with 5e23628ab2cc3c257412804853dd2671a2b679976e1f78c61ccacc4929dc21cd not found: ID does not exist" containerID="5e23628ab2cc3c257412804853dd2671a2b679976e1f78c61ccacc4929dc21cd" Jan 20 17:53:32 crc kubenswrapper[4558]: I0120 17:53:32.897284 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5e23628ab2cc3c257412804853dd2671a2b679976e1f78c61ccacc4929dc21cd"} err="failed to get container status \"5e23628ab2cc3c257412804853dd2671a2b679976e1f78c61ccacc4929dc21cd\": rpc error: code = NotFound desc = could not find container \"5e23628ab2cc3c257412804853dd2671a2b679976e1f78c61ccacc4929dc21cd\": container with ID starting with 5e23628ab2cc3c257412804853dd2671a2b679976e1f78c61ccacc4929dc21cd not found: ID does not exist" Jan 20 17:53:32 crc kubenswrapper[4558]: I0120 17:53:32.897299 4558 scope.go:117] "RemoveContainer" containerID="d32a8a2e6495346715f9634ab4b1b4bc18d96e7451b5223fe9976afe7dd5d7c2" Jan 20 17:53:32 crc kubenswrapper[4558]: E0120 17:53:32.897938 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d32a8a2e6495346715f9634ab4b1b4bc18d96e7451b5223fe9976afe7dd5d7c2\": container with ID starting with d32a8a2e6495346715f9634ab4b1b4bc18d96e7451b5223fe9976afe7dd5d7c2 not found: ID does not exist" containerID="d32a8a2e6495346715f9634ab4b1b4bc18d96e7451b5223fe9976afe7dd5d7c2" Jan 20 17:53:32 crc kubenswrapper[4558]: I0120 17:53:32.897970 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d32a8a2e6495346715f9634ab4b1b4bc18d96e7451b5223fe9976afe7dd5d7c2"} err="failed to get container status \"d32a8a2e6495346715f9634ab4b1b4bc18d96e7451b5223fe9976afe7dd5d7c2\": rpc error: code = NotFound desc = could not find container \"d32a8a2e6495346715f9634ab4b1b4bc18d96e7451b5223fe9976afe7dd5d7c2\": container with ID starting with d32a8a2e6495346715f9634ab4b1b4bc18d96e7451b5223fe9976afe7dd5d7c2 not found: ID does not exist" Jan 20 17:53:32 crc kubenswrapper[4558]: I0120 17:53:32.897986 4558 scope.go:117] "RemoveContainer" containerID="ce8b75288eb38358817832bfb419010893a3a4a082fc83e9ed669e3fe1c9a795" Jan 20 17:53:32 crc kubenswrapper[4558]: E0120 17:53:32.898392 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ce8b75288eb38358817832bfb419010893a3a4a082fc83e9ed669e3fe1c9a795\": container with ID starting with ce8b75288eb38358817832bfb419010893a3a4a082fc83e9ed669e3fe1c9a795 not found: ID does not exist" containerID="ce8b75288eb38358817832bfb419010893a3a4a082fc83e9ed669e3fe1c9a795" Jan 20 17:53:32 crc kubenswrapper[4558]: I0120 17:53:32.898439 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ce8b75288eb38358817832bfb419010893a3a4a082fc83e9ed669e3fe1c9a795"} err="failed to get container status \"ce8b75288eb38358817832bfb419010893a3a4a082fc83e9ed669e3fe1c9a795\": rpc error: code = NotFound desc = could not find container \"ce8b75288eb38358817832bfb419010893a3a4a082fc83e9ed669e3fe1c9a795\": container with ID starting with ce8b75288eb38358817832bfb419010893a3a4a082fc83e9ed669e3fe1c9a795 not found: ID does not exist" Jan 20 17:53:32 crc kubenswrapper[4558]: I0120 17:53:32.898457 4558 scope.go:117] "RemoveContainer" containerID="19a88da3e14609ec200568f698ff68961ad644b5fa7a972150ff49ec0d443ab1" Jan 20 17:53:32 crc kubenswrapper[4558]: E0120 17:53:32.898986 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"19a88da3e14609ec200568f698ff68961ad644b5fa7a972150ff49ec0d443ab1\": container with ID starting with 19a88da3e14609ec200568f698ff68961ad644b5fa7a972150ff49ec0d443ab1 not found: ID does not exist" containerID="19a88da3e14609ec200568f698ff68961ad644b5fa7a972150ff49ec0d443ab1" Jan 20 17:53:32 crc kubenswrapper[4558]: I0120 17:53:32.899024 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"19a88da3e14609ec200568f698ff68961ad644b5fa7a972150ff49ec0d443ab1"} err="failed to get container status \"19a88da3e14609ec200568f698ff68961ad644b5fa7a972150ff49ec0d443ab1\": rpc error: code = NotFound desc = could not find container \"19a88da3e14609ec200568f698ff68961ad644b5fa7a972150ff49ec0d443ab1\": container with ID starting with 19a88da3e14609ec200568f698ff68961ad644b5fa7a972150ff49ec0d443ab1 not found: ID does not exist" Jan 20 17:53:32 crc kubenswrapper[4558]: I0120 17:53:32.899049 4558 scope.go:117] "RemoveContainer" containerID="4842ce500b116933451ba6ea6d30de4651175498d4768796b95de36768b44fc5" Jan 20 17:53:32 crc kubenswrapper[4558]: E0120 17:53:32.899402 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4842ce500b116933451ba6ea6d30de4651175498d4768796b95de36768b44fc5\": container with ID starting with 4842ce500b116933451ba6ea6d30de4651175498d4768796b95de36768b44fc5 not found: ID does not exist" containerID="4842ce500b116933451ba6ea6d30de4651175498d4768796b95de36768b44fc5" Jan 20 17:53:32 crc kubenswrapper[4558]: I0120 17:53:32.899435 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4842ce500b116933451ba6ea6d30de4651175498d4768796b95de36768b44fc5"} err="failed to get container status \"4842ce500b116933451ba6ea6d30de4651175498d4768796b95de36768b44fc5\": rpc error: code = NotFound desc = could not find container \"4842ce500b116933451ba6ea6d30de4651175498d4768796b95de36768b44fc5\": container with ID starting with 4842ce500b116933451ba6ea6d30de4651175498d4768796b95de36768b44fc5 not found: ID does not exist" Jan 20 17:53:32 crc kubenswrapper[4558]: I0120 17:53:32.899452 4558 scope.go:117] "RemoveContainer" containerID="9f25ee1c308a320e8a43a0bc03a0bfd7c3d57cf22c17c10b894b0f930d53afe2" Jan 20 17:53:32 crc kubenswrapper[4558]: E0120 17:53:32.899757 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9f25ee1c308a320e8a43a0bc03a0bfd7c3d57cf22c17c10b894b0f930d53afe2\": container with ID starting with 9f25ee1c308a320e8a43a0bc03a0bfd7c3d57cf22c17c10b894b0f930d53afe2 not found: ID does not exist" containerID="9f25ee1c308a320e8a43a0bc03a0bfd7c3d57cf22c17c10b894b0f930d53afe2" Jan 20 17:53:32 crc kubenswrapper[4558]: I0120 17:53:32.899779 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9f25ee1c308a320e8a43a0bc03a0bfd7c3d57cf22c17c10b894b0f930d53afe2"} err="failed to get container status \"9f25ee1c308a320e8a43a0bc03a0bfd7c3d57cf22c17c10b894b0f930d53afe2\": rpc error: code = NotFound desc = could not find container \"9f25ee1c308a320e8a43a0bc03a0bfd7c3d57cf22c17c10b894b0f930d53afe2\": container with ID starting with 9f25ee1c308a320e8a43a0bc03a0bfd7c3d57cf22c17c10b894b0f930d53afe2 not found: ID does not exist" Jan 20 17:53:32 crc kubenswrapper[4558]: I0120 17:53:32.899793 4558 scope.go:117] "RemoveContainer" containerID="34b81cfd7ee5d1e0183fe2711993e5340813f109d83301b4976cc429efc8bc12" Jan 20 17:53:32 crc kubenswrapper[4558]: E0120 17:53:32.900302 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"34b81cfd7ee5d1e0183fe2711993e5340813f109d83301b4976cc429efc8bc12\": container with ID starting with 34b81cfd7ee5d1e0183fe2711993e5340813f109d83301b4976cc429efc8bc12 not found: ID does not exist" containerID="34b81cfd7ee5d1e0183fe2711993e5340813f109d83301b4976cc429efc8bc12" Jan 20 17:53:32 crc kubenswrapper[4558]: I0120 17:53:32.900324 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"34b81cfd7ee5d1e0183fe2711993e5340813f109d83301b4976cc429efc8bc12"} err="failed to get container status \"34b81cfd7ee5d1e0183fe2711993e5340813f109d83301b4976cc429efc8bc12\": rpc error: code = NotFound desc = could not find container \"34b81cfd7ee5d1e0183fe2711993e5340813f109d83301b4976cc429efc8bc12\": container with ID starting with 34b81cfd7ee5d1e0183fe2711993e5340813f109d83301b4976cc429efc8bc12 not found: ID does not exist" Jan 20 17:53:32 crc kubenswrapper[4558]: I0120 17:53:32.900338 4558 scope.go:117] "RemoveContainer" containerID="9c8048073ccb7077c6ce8072e9a03b4c7c45eacbbfa51aa61971cd31bec984b4" Jan 20 17:53:32 crc kubenswrapper[4558]: E0120 17:53:32.900671 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9c8048073ccb7077c6ce8072e9a03b4c7c45eacbbfa51aa61971cd31bec984b4\": container with ID starting with 9c8048073ccb7077c6ce8072e9a03b4c7c45eacbbfa51aa61971cd31bec984b4 not found: ID does not exist" containerID="9c8048073ccb7077c6ce8072e9a03b4c7c45eacbbfa51aa61971cd31bec984b4" Jan 20 17:53:32 crc kubenswrapper[4558]: I0120 17:53:32.900700 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9c8048073ccb7077c6ce8072e9a03b4c7c45eacbbfa51aa61971cd31bec984b4"} err="failed to get container status \"9c8048073ccb7077c6ce8072e9a03b4c7c45eacbbfa51aa61971cd31bec984b4\": rpc error: code = NotFound desc = could not find container \"9c8048073ccb7077c6ce8072e9a03b4c7c45eacbbfa51aa61971cd31bec984b4\": container with ID starting with 9c8048073ccb7077c6ce8072e9a03b4c7c45eacbbfa51aa61971cd31bec984b4 not found: ID does not exist" Jan 20 17:53:32 crc kubenswrapper[4558]: I0120 17:53:32.900734 4558 scope.go:117] "RemoveContainer" containerID="2d0d3c4c9084f5b47b5268a7e1d20896d7cf8d59d93e03270dfa21fcb273c377" Jan 20 17:53:32 crc kubenswrapper[4558]: E0120 17:53:32.901037 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2d0d3c4c9084f5b47b5268a7e1d20896d7cf8d59d93e03270dfa21fcb273c377\": container with ID starting with 2d0d3c4c9084f5b47b5268a7e1d20896d7cf8d59d93e03270dfa21fcb273c377 not found: ID does not exist" containerID="2d0d3c4c9084f5b47b5268a7e1d20896d7cf8d59d93e03270dfa21fcb273c377" Jan 20 17:53:32 crc kubenswrapper[4558]: I0120 17:53:32.901067 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2d0d3c4c9084f5b47b5268a7e1d20896d7cf8d59d93e03270dfa21fcb273c377"} err="failed to get container status \"2d0d3c4c9084f5b47b5268a7e1d20896d7cf8d59d93e03270dfa21fcb273c377\": rpc error: code = NotFound desc = could not find container \"2d0d3c4c9084f5b47b5268a7e1d20896d7cf8d59d93e03270dfa21fcb273c377\": container with ID starting with 2d0d3c4c9084f5b47b5268a7e1d20896d7cf8d59d93e03270dfa21fcb273c377 not found: ID does not exist" Jan 20 17:53:33 crc kubenswrapper[4558]: I0120 17:53:33.352525 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:53:33 crc kubenswrapper[4558]: I0120 17:53:33.435295 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/51d19369-14ac-4d62-ab05-9c5830856622-combined-ca-bundle\") pod \"51d19369-14ac-4d62-ab05-9c5830856622\" (UID: \"51d19369-14ac-4d62-ab05-9c5830856622\") " Jan 20 17:53:33 crc kubenswrapper[4558]: I0120 17:53:33.435370 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rvzsg\" (UniqueName: \"kubernetes.io/projected/51d19369-14ac-4d62-ab05-9c5830856622-kube-api-access-rvzsg\") pod \"51d19369-14ac-4d62-ab05-9c5830856622\" (UID: \"51d19369-14ac-4d62-ab05-9c5830856622\") " Jan 20 17:53:33 crc kubenswrapper[4558]: I0120 17:53:33.435415 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/51d19369-14ac-4d62-ab05-9c5830856622-config-data\") pod \"51d19369-14ac-4d62-ab05-9c5830856622\" (UID: \"51d19369-14ac-4d62-ab05-9c5830856622\") " Jan 20 17:53:33 crc kubenswrapper[4558]: I0120 17:53:33.435450 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/51d19369-14ac-4d62-ab05-9c5830856622-config-data-custom\") pod \"51d19369-14ac-4d62-ab05-9c5830856622\" (UID: \"51d19369-14ac-4d62-ab05-9c5830856622\") " Jan 20 17:53:33 crc kubenswrapper[4558]: I0120 17:53:33.435528 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/51d19369-14ac-4d62-ab05-9c5830856622-etc-machine-id\") pod \"51d19369-14ac-4d62-ab05-9c5830856622\" (UID: \"51d19369-14ac-4d62-ab05-9c5830856622\") " Jan 20 17:53:33 crc kubenswrapper[4558]: I0120 17:53:33.435603 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/51d19369-14ac-4d62-ab05-9c5830856622-scripts\") pod \"51d19369-14ac-4d62-ab05-9c5830856622\" (UID: \"51d19369-14ac-4d62-ab05-9c5830856622\") " Jan 20 17:53:33 crc kubenswrapper[4558]: I0120 17:53:33.436541 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/51d19369-14ac-4d62-ab05-9c5830856622-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "51d19369-14ac-4d62-ab05-9c5830856622" (UID: "51d19369-14ac-4d62-ab05-9c5830856622"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 17:53:33 crc kubenswrapper[4558]: I0120 17:53:33.440092 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/51d19369-14ac-4d62-ab05-9c5830856622-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "51d19369-14ac-4d62-ab05-9c5830856622" (UID: "51d19369-14ac-4d62-ab05-9c5830856622"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:53:33 crc kubenswrapper[4558]: I0120 17:53:33.440700 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/51d19369-14ac-4d62-ab05-9c5830856622-kube-api-access-rvzsg" (OuterVolumeSpecName: "kube-api-access-rvzsg") pod "51d19369-14ac-4d62-ab05-9c5830856622" (UID: "51d19369-14ac-4d62-ab05-9c5830856622"). InnerVolumeSpecName "kube-api-access-rvzsg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:53:33 crc kubenswrapper[4558]: I0120 17:53:33.440859 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/51d19369-14ac-4d62-ab05-9c5830856622-scripts" (OuterVolumeSpecName: "scripts") pod "51d19369-14ac-4d62-ab05-9c5830856622" (UID: "51d19369-14ac-4d62-ab05-9c5830856622"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:53:33 crc kubenswrapper[4558]: I0120 17:53:33.472428 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/51d19369-14ac-4d62-ab05-9c5830856622-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "51d19369-14ac-4d62-ab05-9c5830856622" (UID: "51d19369-14ac-4d62-ab05-9c5830856622"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:53:33 crc kubenswrapper[4558]: I0120 17:53:33.499964 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/51d19369-14ac-4d62-ab05-9c5830856622-config-data" (OuterVolumeSpecName: "config-data") pod "51d19369-14ac-4d62-ab05-9c5830856622" (UID: "51d19369-14ac-4d62-ab05-9c5830856622"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:53:33 crc kubenswrapper[4558]: I0120 17:53:33.538735 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/51d19369-14ac-4d62-ab05-9c5830856622-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:33 crc kubenswrapper[4558]: I0120 17:53:33.538767 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rvzsg\" (UniqueName: \"kubernetes.io/projected/51d19369-14ac-4d62-ab05-9c5830856622-kube-api-access-rvzsg\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:33 crc kubenswrapper[4558]: I0120 17:53:33.538782 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/51d19369-14ac-4d62-ab05-9c5830856622-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:33 crc kubenswrapper[4558]: I0120 17:53:33.538794 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/51d19369-14ac-4d62-ab05-9c5830856622-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:33 crc kubenswrapper[4558]: I0120 17:53:33.538804 4558 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/51d19369-14ac-4d62-ab05-9c5830856622-etc-machine-id\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:33 crc kubenswrapper[4558]: I0120 17:53:33.538814 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/51d19369-14ac-4d62-ab05-9c5830856622-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:33 crc kubenswrapper[4558]: I0120 17:53:33.582998 4558 generic.go:334] "Generic (PLEG): container finished" podID="d9321ea5-ab3b-4f00-b4d0-f2c2f8c2deb2" containerID="ce0da192b65bf7c5e90dd832e0212db51fd4f3e04276e89c0f7b9256d88b752a" exitCode=137 Jan 20 17:53:33 crc kubenswrapper[4558]: I0120 17:53:33.583093 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"d9321ea5-ab3b-4f00-b4d0-f2c2f8c2deb2","Type":"ContainerDied","Data":"ce0da192b65bf7c5e90dd832e0212db51fd4f3e04276e89c0f7b9256d88b752a"} Jan 20 17:53:33 crc kubenswrapper[4558]: I0120 17:53:33.583204 4558 scope.go:117] "RemoveContainer" containerID="4e744e9f021c29bfddbe31a0a527bc6946c7e7d3595c756668ddc79180474c63" Jan 20 17:53:33 crc kubenswrapper[4558]: I0120 17:53:33.587048 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_neutron-75676d645b-l2sp7_6ea6b41c-c5ed-49c9-84f5-a932a1325aba/neutron-api/0.log" Jan 20 17:53:33 crc kubenswrapper[4558]: I0120 17:53:33.587108 4558 generic.go:334] "Generic (PLEG): container finished" podID="6ea6b41c-c5ed-49c9-84f5-a932a1325aba" containerID="d26f7b1dae3611d65adf8538389e6547e7f472172ad5ffe9456136d1cfd69e59" exitCode=137 Jan 20 17:53:33 crc kubenswrapper[4558]: I0120 17:53:33.587233 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-75676d645b-l2sp7" event={"ID":"6ea6b41c-c5ed-49c9-84f5-a932a1325aba","Type":"ContainerDied","Data":"d26f7b1dae3611d65adf8538389e6547e7f472172ad5ffe9456136d1cfd69e59"} Jan 20 17:53:33 crc kubenswrapper[4558]: I0120 17:53:33.591957 4558 generic.go:334] "Generic (PLEG): container finished" podID="51d19369-14ac-4d62-ab05-9c5830856622" containerID="bb89698c06a3d8f0dc6700388a8558678523660d9134634c1c9e01a8b703cf50" exitCode=137 Jan 20 17:53:33 crc kubenswrapper[4558]: I0120 17:53:33.591989 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"51d19369-14ac-4d62-ab05-9c5830856622","Type":"ContainerDied","Data":"bb89698c06a3d8f0dc6700388a8558678523660d9134634c1c9e01a8b703cf50"} Jan 20 17:53:33 crc kubenswrapper[4558]: I0120 17:53:33.592047 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:53:33 crc kubenswrapper[4558]: I0120 17:53:33.592068 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"51d19369-14ac-4d62-ab05-9c5830856622","Type":"ContainerDied","Data":"c9ec0af9db1de5295d13166fc1e3cb91462f2ab0061206e844f1c3db14529e3e"} Jan 20 17:53:33 crc kubenswrapper[4558]: I0120 17:53:33.593144 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:53:33 crc kubenswrapper[4558]: I0120 17:53:33.775483 4558 scope.go:117] "RemoveContainer" containerID="dc3c169cf981fe359f960f5ef7f1e452664aacea8447f6ecb0fcdf2aa8023e7a" Jan 20 17:53:33 crc kubenswrapper[4558]: I0120 17:53:33.793885 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_neutron-75676d645b-l2sp7_6ea6b41c-c5ed-49c9-84f5-a932a1325aba/neutron-api/0.log" Jan 20 17:53:33 crc kubenswrapper[4558]: I0120 17:53:33.793957 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-75676d645b-l2sp7" Jan 20 17:53:33 crc kubenswrapper[4558]: I0120 17:53:33.795452 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:53:33 crc kubenswrapper[4558]: I0120 17:53:33.804450 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:53:33 crc kubenswrapper[4558]: I0120 17:53:33.807274 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:53:33 crc kubenswrapper[4558]: I0120 17:53:33.815186 4558 scope.go:117] "RemoveContainer" containerID="bb89698c06a3d8f0dc6700388a8558678523660d9134634c1c9e01a8b703cf50" Jan 20 17:53:33 crc kubenswrapper[4558]: I0120 17:53:33.819853 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/swift-storage-0"] Jan 20 17:53:33 crc kubenswrapper[4558]: I0120 17:53:33.840846 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/swift-storage-0"] Jan 20 17:53:33 crc kubenswrapper[4558]: I0120 17:53:33.841745 4558 scope.go:117] "RemoveContainer" containerID="dc3c169cf981fe359f960f5ef7f1e452664aacea8447f6ecb0fcdf2aa8023e7a" Jan 20 17:53:33 crc kubenswrapper[4558]: E0120 17:53:33.842156 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dc3c169cf981fe359f960f5ef7f1e452664aacea8447f6ecb0fcdf2aa8023e7a\": container with ID starting with dc3c169cf981fe359f960f5ef7f1e452664aacea8447f6ecb0fcdf2aa8023e7a not found: ID does not exist" containerID="dc3c169cf981fe359f960f5ef7f1e452664aacea8447f6ecb0fcdf2aa8023e7a" Jan 20 17:53:33 crc kubenswrapper[4558]: I0120 17:53:33.842200 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dc3c169cf981fe359f960f5ef7f1e452664aacea8447f6ecb0fcdf2aa8023e7a"} err="failed to get container status \"dc3c169cf981fe359f960f5ef7f1e452664aacea8447f6ecb0fcdf2aa8023e7a\": rpc error: code = NotFound desc = could not find container \"dc3c169cf981fe359f960f5ef7f1e452664aacea8447f6ecb0fcdf2aa8023e7a\": container with ID starting with dc3c169cf981fe359f960f5ef7f1e452664aacea8447f6ecb0fcdf2aa8023e7a not found: ID does not exist" Jan 20 17:53:33 crc kubenswrapper[4558]: I0120 17:53:33.842225 4558 scope.go:117] "RemoveContainer" containerID="bb89698c06a3d8f0dc6700388a8558678523660d9134634c1c9e01a8b703cf50" Jan 20 17:53:33 crc kubenswrapper[4558]: E0120 17:53:33.842452 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bb89698c06a3d8f0dc6700388a8558678523660d9134634c1c9e01a8b703cf50\": container with ID starting with bb89698c06a3d8f0dc6700388a8558678523660d9134634c1c9e01a8b703cf50 not found: ID does not exist" containerID="bb89698c06a3d8f0dc6700388a8558678523660d9134634c1c9e01a8b703cf50" Jan 20 17:53:33 crc kubenswrapper[4558]: I0120 17:53:33.842473 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bb89698c06a3d8f0dc6700388a8558678523660d9134634c1c9e01a8b703cf50"} err="failed to get container status \"bb89698c06a3d8f0dc6700388a8558678523660d9134634c1c9e01a8b703cf50\": rpc error: code = NotFound desc = could not find container \"bb89698c06a3d8f0dc6700388a8558678523660d9134634c1c9e01a8b703cf50\": container with ID starting with bb89698c06a3d8f0dc6700388a8558678523660d9134634c1c9e01a8b703cf50 not found: ID does not exist" Jan 20 17:53:33 crc kubenswrapper[4558]: I0120 17:53:33.848595 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6ea6b41c-c5ed-49c9-84f5-a932a1325aba-internal-tls-certs\") pod \"6ea6b41c-c5ed-49c9-84f5-a932a1325aba\" (UID: \"6ea6b41c-c5ed-49c9-84f5-a932a1325aba\") " Jan 20 17:53:33 crc kubenswrapper[4558]: I0120 17:53:33.848681 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xzv9p\" (UniqueName: \"kubernetes.io/projected/d9321ea5-ab3b-4f00-b4d0-f2c2f8c2deb2-kube-api-access-xzv9p\") pod \"d9321ea5-ab3b-4f00-b4d0-f2c2f8c2deb2\" (UID: \"d9321ea5-ab3b-4f00-b4d0-f2c2f8c2deb2\") " Jan 20 17:53:33 crc kubenswrapper[4558]: I0120 17:53:33.848718 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d9321ea5-ab3b-4f00-b4d0-f2c2f8c2deb2-combined-ca-bundle\") pod \"d9321ea5-ab3b-4f00-b4d0-f2c2f8c2deb2\" (UID: \"d9321ea5-ab3b-4f00-b4d0-f2c2f8c2deb2\") " Jan 20 17:53:33 crc kubenswrapper[4558]: I0120 17:53:33.848819 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/6ea6b41c-c5ed-49c9-84f5-a932a1325aba-config\") pod \"6ea6b41c-c5ed-49c9-84f5-a932a1325aba\" (UID: \"6ea6b41c-c5ed-49c9-84f5-a932a1325aba\") " Jan 20 17:53:33 crc kubenswrapper[4558]: I0120 17:53:33.848851 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d9321ea5-ab3b-4f00-b4d0-f2c2f8c2deb2-config-data\") pod \"d9321ea5-ab3b-4f00-b4d0-f2c2f8c2deb2\" (UID: \"d9321ea5-ab3b-4f00-b4d0-f2c2f8c2deb2\") " Jan 20 17:53:33 crc kubenswrapper[4558]: I0120 17:53:33.848886 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ea6b41c-c5ed-49c9-84f5-a932a1325aba-combined-ca-bundle\") pod \"6ea6b41c-c5ed-49c9-84f5-a932a1325aba\" (UID: \"6ea6b41c-c5ed-49c9-84f5-a932a1325aba\") " Jan 20 17:53:33 crc kubenswrapper[4558]: I0120 17:53:33.848927 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6ea6b41c-c5ed-49c9-84f5-a932a1325aba-public-tls-certs\") pod \"6ea6b41c-c5ed-49c9-84f5-a932a1325aba\" (UID: \"6ea6b41c-c5ed-49c9-84f5-a932a1325aba\") " Jan 20 17:53:33 crc kubenswrapper[4558]: I0120 17:53:33.848956 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/6ea6b41c-c5ed-49c9-84f5-a932a1325aba-ovndb-tls-certs\") pod \"6ea6b41c-c5ed-49c9-84f5-a932a1325aba\" (UID: \"6ea6b41c-c5ed-49c9-84f5-a932a1325aba\") " Jan 20 17:53:33 crc kubenswrapper[4558]: I0120 17:53:33.849027 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gd76d\" (UniqueName: \"kubernetes.io/projected/6ea6b41c-c5ed-49c9-84f5-a932a1325aba-kube-api-access-gd76d\") pod \"6ea6b41c-c5ed-49c9-84f5-a932a1325aba\" (UID: \"6ea6b41c-c5ed-49c9-84f5-a932a1325aba\") " Jan 20 17:53:33 crc kubenswrapper[4558]: I0120 17:53:33.849068 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/6ea6b41c-c5ed-49c9-84f5-a932a1325aba-httpd-config\") pod \"6ea6b41c-c5ed-49c9-84f5-a932a1325aba\" (UID: \"6ea6b41c-c5ed-49c9-84f5-a932a1325aba\") " Jan 20 17:53:33 crc kubenswrapper[4558]: I0120 17:53:33.856341 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea6b41c-c5ed-49c9-84f5-a932a1325aba-kube-api-access-gd76d" (OuterVolumeSpecName: "kube-api-access-gd76d") pod "6ea6b41c-c5ed-49c9-84f5-a932a1325aba" (UID: "6ea6b41c-c5ed-49c9-84f5-a932a1325aba"). InnerVolumeSpecName "kube-api-access-gd76d". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:53:33 crc kubenswrapper[4558]: I0120 17:53:33.859486 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea6b41c-c5ed-49c9-84f5-a932a1325aba-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "6ea6b41c-c5ed-49c9-84f5-a932a1325aba" (UID: "6ea6b41c-c5ed-49c9-84f5-a932a1325aba"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:53:33 crc kubenswrapper[4558]: I0120 17:53:33.871604 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d9321ea5-ab3b-4f00-b4d0-f2c2f8c2deb2-kube-api-access-xzv9p" (OuterVolumeSpecName: "kube-api-access-xzv9p") pod "d9321ea5-ab3b-4f00-b4d0-f2c2f8c2deb2" (UID: "d9321ea5-ab3b-4f00-b4d0-f2c2f8c2deb2"). InnerVolumeSpecName "kube-api-access-xzv9p". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:53:33 crc kubenswrapper[4558]: I0120 17:53:33.874669 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d9321ea5-ab3b-4f00-b4d0-f2c2f8c2deb2-config-data" (OuterVolumeSpecName: "config-data") pod "d9321ea5-ab3b-4f00-b4d0-f2c2f8c2deb2" (UID: "d9321ea5-ab3b-4f00-b4d0-f2c2f8c2deb2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:53:33 crc kubenswrapper[4558]: I0120 17:53:33.887428 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:53:33 crc kubenswrapper[4558]: I0120 17:53:33.891907 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d9321ea5-ab3b-4f00-b4d0-f2c2f8c2deb2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d9321ea5-ab3b-4f00-b4d0-f2c2f8c2deb2" (UID: "d9321ea5-ab3b-4f00-b4d0-f2c2f8c2deb2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:53:33 crc kubenswrapper[4558]: I0120 17:53:33.906307 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea6b41c-c5ed-49c9-84f5-a932a1325aba-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "6ea6b41c-c5ed-49c9-84f5-a932a1325aba" (UID: "6ea6b41c-c5ed-49c9-84f5-a932a1325aba"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:53:33 crc kubenswrapper[4558]: I0120 17:53:33.909556 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea6b41c-c5ed-49c9-84f5-a932a1325aba-config" (OuterVolumeSpecName: "config") pod "6ea6b41c-c5ed-49c9-84f5-a932a1325aba" (UID: "6ea6b41c-c5ed-49c9-84f5-a932a1325aba"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:53:33 crc kubenswrapper[4558]: I0120 17:53:33.911662 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea6b41c-c5ed-49c9-84f5-a932a1325aba-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6ea6b41c-c5ed-49c9-84f5-a932a1325aba" (UID: "6ea6b41c-c5ed-49c9-84f5-a932a1325aba"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:53:33 crc kubenswrapper[4558]: I0120 17:53:33.916828 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea6b41c-c5ed-49c9-84f5-a932a1325aba-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "6ea6b41c-c5ed-49c9-84f5-a932a1325aba" (UID: "6ea6b41c-c5ed-49c9-84f5-a932a1325aba"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:53:33 crc kubenswrapper[4558]: I0120 17:53:33.923791 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea6b41c-c5ed-49c9-84f5-a932a1325aba-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "6ea6b41c-c5ed-49c9-84f5-a932a1325aba" (UID: "6ea6b41c-c5ed-49c9-84f5-a932a1325aba"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:53:33 crc kubenswrapper[4558]: I0120 17:53:33.942228 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:53:33 crc kubenswrapper[4558]: I0120 17:53:33.950647 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7c16f0c4-1462-4817-9f74-9e3d93193867-config-data\") pod \"7c16f0c4-1462-4817-9f74-9e3d93193867\" (UID: \"7c16f0c4-1462-4817-9f74-9e3d93193867\") " Jan 20 17:53:33 crc kubenswrapper[4558]: I0120 17:53:33.950824 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/924fa7ce-8d60-4b2f-b62b-d5e146474f71-combined-ca-bundle\") pod \"924fa7ce-8d60-4b2f-b62b-d5e146474f71\" (UID: \"924fa7ce-8d60-4b2f-b62b-d5e146474f71\") " Jan 20 17:53:33 crc kubenswrapper[4558]: I0120 17:53:33.950913 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/924fa7ce-8d60-4b2f-b62b-d5e146474f71-config-data\") pod \"924fa7ce-8d60-4b2f-b62b-d5e146474f71\" (UID: \"924fa7ce-8d60-4b2f-b62b-d5e146474f71\") " Jan 20 17:53:33 crc kubenswrapper[4558]: I0120 17:53:33.951067 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-skmt5\" (UniqueName: \"kubernetes.io/projected/924fa7ce-8d60-4b2f-b62b-d5e146474f71-kube-api-access-skmt5\") pod \"924fa7ce-8d60-4b2f-b62b-d5e146474f71\" (UID: \"924fa7ce-8d60-4b2f-b62b-d5e146474f71\") " Jan 20 17:53:33 crc kubenswrapper[4558]: I0120 17:53:33.951144 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z8nz2\" (UniqueName: \"kubernetes.io/projected/7c16f0c4-1462-4817-9f74-9e3d93193867-kube-api-access-z8nz2\") pod \"7c16f0c4-1462-4817-9f74-9e3d93193867\" (UID: \"7c16f0c4-1462-4817-9f74-9e3d93193867\") " Jan 20 17:53:33 crc kubenswrapper[4558]: I0120 17:53:33.951258 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c16f0c4-1462-4817-9f74-9e3d93193867-combined-ca-bundle\") pod \"7c16f0c4-1462-4817-9f74-9e3d93193867\" (UID: \"7c16f0c4-1462-4817-9f74-9e3d93193867\") " Jan 20 17:53:33 crc kubenswrapper[4558]: I0120 17:53:33.951695 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6ea6b41c-c5ed-49c9-84f5-a932a1325aba-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:33 crc kubenswrapper[4558]: I0120 17:53:33.951791 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xzv9p\" (UniqueName: \"kubernetes.io/projected/d9321ea5-ab3b-4f00-b4d0-f2c2f8c2deb2-kube-api-access-xzv9p\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:33 crc kubenswrapper[4558]: I0120 17:53:33.951872 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d9321ea5-ab3b-4f00-b4d0-f2c2f8c2deb2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:33 crc kubenswrapper[4558]: I0120 17:53:33.951939 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/6ea6b41c-c5ed-49c9-84f5-a932a1325aba-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:33 crc kubenswrapper[4558]: I0120 17:53:33.952048 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d9321ea5-ab3b-4f00-b4d0-f2c2f8c2deb2-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:33 crc kubenswrapper[4558]: I0120 17:53:33.952331 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ea6b41c-c5ed-49c9-84f5-a932a1325aba-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:33 crc kubenswrapper[4558]: I0120 17:53:33.952402 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6ea6b41c-c5ed-49c9-84f5-a932a1325aba-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:33 crc kubenswrapper[4558]: I0120 17:53:33.952488 4558 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/6ea6b41c-c5ed-49c9-84f5-a932a1325aba-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:33 crc kubenswrapper[4558]: I0120 17:53:33.952589 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gd76d\" (UniqueName: \"kubernetes.io/projected/6ea6b41c-c5ed-49c9-84f5-a932a1325aba-kube-api-access-gd76d\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:33 crc kubenswrapper[4558]: I0120 17:53:33.952673 4558 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/6ea6b41c-c5ed-49c9-84f5-a932a1325aba-httpd-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:33 crc kubenswrapper[4558]: I0120 17:53:33.968993 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/924fa7ce-8d60-4b2f-b62b-d5e146474f71-kube-api-access-skmt5" (OuterVolumeSpecName: "kube-api-access-skmt5") pod "924fa7ce-8d60-4b2f-b62b-d5e146474f71" (UID: "924fa7ce-8d60-4b2f-b62b-d5e146474f71"). InnerVolumeSpecName "kube-api-access-skmt5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:53:33 crc kubenswrapper[4558]: I0120 17:53:33.969831 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7c16f0c4-1462-4817-9f74-9e3d93193867-kube-api-access-z8nz2" (OuterVolumeSpecName: "kube-api-access-z8nz2") pod "7c16f0c4-1462-4817-9f74-9e3d93193867" (UID: "7c16f0c4-1462-4817-9f74-9e3d93193867"). InnerVolumeSpecName "kube-api-access-z8nz2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:53:33 crc kubenswrapper[4558]: I0120 17:53:33.977347 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7c16f0c4-1462-4817-9f74-9e3d93193867-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7c16f0c4-1462-4817-9f74-9e3d93193867" (UID: "7c16f0c4-1462-4817-9f74-9e3d93193867"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:53:33 crc kubenswrapper[4558]: I0120 17:53:33.977468 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7c16f0c4-1462-4817-9f74-9e3d93193867-config-data" (OuterVolumeSpecName: "config-data") pod "7c16f0c4-1462-4817-9f74-9e3d93193867" (UID: "7c16f0c4-1462-4817-9f74-9e3d93193867"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:53:33 crc kubenswrapper[4558]: I0120 17:53:33.978711 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/924fa7ce-8d60-4b2f-b62b-d5e146474f71-config-data" (OuterVolumeSpecName: "config-data") pod "924fa7ce-8d60-4b2f-b62b-d5e146474f71" (UID: "924fa7ce-8d60-4b2f-b62b-d5e146474f71"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:53:33 crc kubenswrapper[4558]: I0120 17:53:33.986254 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/924fa7ce-8d60-4b2f-b62b-d5e146474f71-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "924fa7ce-8d60-4b2f-b62b-d5e146474f71" (UID: "924fa7ce-8d60-4b2f-b62b-d5e146474f71"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:53:34 crc kubenswrapper[4558]: I0120 17:53:34.037686 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:53:34 crc kubenswrapper[4558]: I0120 17:53:34.053758 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd2275b9-323a-4981-aadd-791b68003e3d-combined-ca-bundle\") pod \"cd2275b9-323a-4981-aadd-791b68003e3d\" (UID: \"cd2275b9-323a-4981-aadd-791b68003e3d\") " Jan 20 17:53:34 crc kubenswrapper[4558]: I0120 17:53:34.053843 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cd2275b9-323a-4981-aadd-791b68003e3d-log-httpd\") pod \"cd2275b9-323a-4981-aadd-791b68003e3d\" (UID: \"cd2275b9-323a-4981-aadd-791b68003e3d\") " Jan 20 17:53:34 crc kubenswrapper[4558]: I0120 17:53:34.053879 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/cd2275b9-323a-4981-aadd-791b68003e3d-sg-core-conf-yaml\") pod \"cd2275b9-323a-4981-aadd-791b68003e3d\" (UID: \"cd2275b9-323a-4981-aadd-791b68003e3d\") " Jan 20 17:53:34 crc kubenswrapper[4558]: I0120 17:53:34.053921 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q28qx\" (UniqueName: \"kubernetes.io/projected/cd2275b9-323a-4981-aadd-791b68003e3d-kube-api-access-q28qx\") pod \"cd2275b9-323a-4981-aadd-791b68003e3d\" (UID: \"cd2275b9-323a-4981-aadd-791b68003e3d\") " Jan 20 17:53:34 crc kubenswrapper[4558]: I0120 17:53:34.053954 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cd2275b9-323a-4981-aadd-791b68003e3d-scripts\") pod \"cd2275b9-323a-4981-aadd-791b68003e3d\" (UID: \"cd2275b9-323a-4981-aadd-791b68003e3d\") " Jan 20 17:53:34 crc kubenswrapper[4558]: I0120 17:53:34.053988 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cd2275b9-323a-4981-aadd-791b68003e3d-run-httpd\") pod \"cd2275b9-323a-4981-aadd-791b68003e3d\" (UID: \"cd2275b9-323a-4981-aadd-791b68003e3d\") " Jan 20 17:53:34 crc kubenswrapper[4558]: I0120 17:53:34.054021 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/cd2275b9-323a-4981-aadd-791b68003e3d-ceilometer-tls-certs\") pod \"cd2275b9-323a-4981-aadd-791b68003e3d\" (UID: \"cd2275b9-323a-4981-aadd-791b68003e3d\") " Jan 20 17:53:34 crc kubenswrapper[4558]: I0120 17:53:34.054065 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cd2275b9-323a-4981-aadd-791b68003e3d-config-data\") pod \"cd2275b9-323a-4981-aadd-791b68003e3d\" (UID: \"cd2275b9-323a-4981-aadd-791b68003e3d\") " Jan 20 17:53:34 crc kubenswrapper[4558]: I0120 17:53:34.054908 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cd2275b9-323a-4981-aadd-791b68003e3d-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "cd2275b9-323a-4981-aadd-791b68003e3d" (UID: "cd2275b9-323a-4981-aadd-791b68003e3d"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:53:34 crc kubenswrapper[4558]: I0120 17:53:34.055902 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c16f0c4-1462-4817-9f74-9e3d93193867-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:34 crc kubenswrapper[4558]: I0120 17:53:34.055942 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7c16f0c4-1462-4817-9f74-9e3d93193867-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:34 crc kubenswrapper[4558]: I0120 17:53:34.055975 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/924fa7ce-8d60-4b2f-b62b-d5e146474f71-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:34 crc kubenswrapper[4558]: I0120 17:53:34.055990 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/924fa7ce-8d60-4b2f-b62b-d5e146474f71-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:34 crc kubenswrapper[4558]: I0120 17:53:34.056008 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-skmt5\" (UniqueName: \"kubernetes.io/projected/924fa7ce-8d60-4b2f-b62b-d5e146474f71-kube-api-access-skmt5\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:34 crc kubenswrapper[4558]: I0120 17:53:34.056021 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z8nz2\" (UniqueName: \"kubernetes.io/projected/7c16f0c4-1462-4817-9f74-9e3d93193867-kube-api-access-z8nz2\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:34 crc kubenswrapper[4558]: I0120 17:53:34.058669 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd2275b9-323a-4981-aadd-791b68003e3d-kube-api-access-q28qx" (OuterVolumeSpecName: "kube-api-access-q28qx") pod "cd2275b9-323a-4981-aadd-791b68003e3d" (UID: "cd2275b9-323a-4981-aadd-791b68003e3d"). InnerVolumeSpecName "kube-api-access-q28qx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:53:34 crc kubenswrapper[4558]: I0120 17:53:34.061471 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cd2275b9-323a-4981-aadd-791b68003e3d-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "cd2275b9-323a-4981-aadd-791b68003e3d" (UID: "cd2275b9-323a-4981-aadd-791b68003e3d"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:53:34 crc kubenswrapper[4558]: I0120 17:53:34.063838 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cd2275b9-323a-4981-aadd-791b68003e3d-scripts" (OuterVolumeSpecName: "scripts") pod "cd2275b9-323a-4981-aadd-791b68003e3d" (UID: "cd2275b9-323a-4981-aadd-791b68003e3d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:53:34 crc kubenswrapper[4558]: I0120 17:53:34.098363 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cd2275b9-323a-4981-aadd-791b68003e3d-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "cd2275b9-323a-4981-aadd-791b68003e3d" (UID: "cd2275b9-323a-4981-aadd-791b68003e3d"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:53:34 crc kubenswrapper[4558]: I0120 17:53:34.099367 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cd2275b9-323a-4981-aadd-791b68003e3d-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "cd2275b9-323a-4981-aadd-791b68003e3d" (UID: "cd2275b9-323a-4981-aadd-791b68003e3d"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:53:34 crc kubenswrapper[4558]: I0120 17:53:34.117867 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cd2275b9-323a-4981-aadd-791b68003e3d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "cd2275b9-323a-4981-aadd-791b68003e3d" (UID: "cd2275b9-323a-4981-aadd-791b68003e3d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:53:34 crc kubenswrapper[4558]: I0120 17:53:34.125278 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cd2275b9-323a-4981-aadd-791b68003e3d-config-data" (OuterVolumeSpecName: "config-data") pod "cd2275b9-323a-4981-aadd-791b68003e3d" (UID: "cd2275b9-323a-4981-aadd-791b68003e3d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:53:34 crc kubenswrapper[4558]: I0120 17:53:34.157903 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q28qx\" (UniqueName: \"kubernetes.io/projected/cd2275b9-323a-4981-aadd-791b68003e3d-kube-api-access-q28qx\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:34 crc kubenswrapper[4558]: I0120 17:53:34.157930 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cd2275b9-323a-4981-aadd-791b68003e3d-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:34 crc kubenswrapper[4558]: I0120 17:53:34.157943 4558 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cd2275b9-323a-4981-aadd-791b68003e3d-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:34 crc kubenswrapper[4558]: I0120 17:53:34.157956 4558 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/cd2275b9-323a-4981-aadd-791b68003e3d-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:34 crc kubenswrapper[4558]: I0120 17:53:34.157965 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cd2275b9-323a-4981-aadd-791b68003e3d-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:34 crc kubenswrapper[4558]: I0120 17:53:34.157976 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd2275b9-323a-4981-aadd-791b68003e3d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:34 crc kubenswrapper[4558]: I0120 17:53:34.157986 4558 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cd2275b9-323a-4981-aadd-791b68003e3d-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:34 crc kubenswrapper[4558]: I0120 17:53:34.157996 4558 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/cd2275b9-323a-4981-aadd-791b68003e3d-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:34 crc kubenswrapper[4558]: I0120 17:53:34.583576 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="51d19369-14ac-4d62-ab05-9c5830856622" path="/var/lib/kubelet/pods/51d19369-14ac-4d62-ab05-9c5830856622/volumes" Jan 20 17:53:34 crc kubenswrapper[4558]: I0120 17:53:34.584640 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="64d029c7-79c6-40fc-b4d2-ac5bfbc387a0" path="/var/lib/kubelet/pods/64d029c7-79c6-40fc-b4d2-ac5bfbc387a0/volumes" Jan 20 17:53:34 crc kubenswrapper[4558]: I0120 17:53:34.612314 4558 generic.go:334] "Generic (PLEG): container finished" podID="7c16f0c4-1462-4817-9f74-9e3d93193867" containerID="b4a7a63ae5b4336f1c55496b20962afe4a27040b0e22d712c664548644815e60" exitCode=137 Jan 20 17:53:34 crc kubenswrapper[4558]: I0120 17:53:34.612416 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:53:34 crc kubenswrapper[4558]: I0120 17:53:34.612418 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-0" event={"ID":"7c16f0c4-1462-4817-9f74-9e3d93193867","Type":"ContainerDied","Data":"b4a7a63ae5b4336f1c55496b20962afe4a27040b0e22d712c664548644815e60"} Jan 20 17:53:34 crc kubenswrapper[4558]: I0120 17:53:34.612828 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-0" event={"ID":"7c16f0c4-1462-4817-9f74-9e3d93193867","Type":"ContainerDied","Data":"9de1eff2e0c32d007cbfdafaced6282266cc5b1f6b59c6f9b84efec9b076c0a5"} Jan 20 17:53:34 crc kubenswrapper[4558]: I0120 17:53:34.612891 4558 scope.go:117] "RemoveContainer" containerID="b4a7a63ae5b4336f1c55496b20962afe4a27040b0e22d712c664548644815e60" Jan 20 17:53:34 crc kubenswrapper[4558]: I0120 17:53:34.616538 4558 generic.go:334] "Generic (PLEG): container finished" podID="924fa7ce-8d60-4b2f-b62b-d5e146474f71" containerID="d99379340565779627596cecb15d625f9bcb83d751ea4f5f14fb7cb8bcb8a6b6" exitCode=137 Jan 20 17:53:34 crc kubenswrapper[4558]: I0120 17:53:34.616627 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-0" event={"ID":"924fa7ce-8d60-4b2f-b62b-d5e146474f71","Type":"ContainerDied","Data":"d99379340565779627596cecb15d625f9bcb83d751ea4f5f14fb7cb8bcb8a6b6"} Jan 20 17:53:34 crc kubenswrapper[4558]: I0120 17:53:34.616664 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-0" event={"ID":"924fa7ce-8d60-4b2f-b62b-d5e146474f71","Type":"ContainerDied","Data":"4ecb84690e1b7faec0f2af080daf12cc500aff5c5becfc57730c4fde59e298fb"} Jan 20 17:53:34 crc kubenswrapper[4558]: I0120 17:53:34.618559 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:53:34 crc kubenswrapper[4558]: I0120 17:53:34.623121 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:53:34 crc kubenswrapper[4558]: I0120 17:53:34.623767 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"d9321ea5-ab3b-4f00-b4d0-f2c2f8c2deb2","Type":"ContainerDied","Data":"0240486ebec76254c8e91fdf408a294730073f9a2f2c48ced46cd50130092549"} Jan 20 17:53:34 crc kubenswrapper[4558]: I0120 17:53:34.627990 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_neutron-75676d645b-l2sp7_6ea6b41c-c5ed-49c9-84f5-a932a1325aba/neutron-api/0.log" Jan 20 17:53:34 crc kubenswrapper[4558]: I0120 17:53:34.628108 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-75676d645b-l2sp7" event={"ID":"6ea6b41c-c5ed-49c9-84f5-a932a1325aba","Type":"ContainerDied","Data":"bf495499deb1c53f2fdace9640958cbca06cba68a6394bf86a0febaaab0a0a8d"} Jan 20 17:53:34 crc kubenswrapper[4558]: I0120 17:53:34.628150 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-75676d645b-l2sp7" Jan 20 17:53:34 crc kubenswrapper[4558]: I0120 17:53:34.631957 4558 generic.go:334] "Generic (PLEG): container finished" podID="cd2275b9-323a-4981-aadd-791b68003e3d" containerID="0833d5463f07e5954d4e2d0a3a43a4419ed8daf27634dc726b1fc4ce59dfc2f9" exitCode=137 Jan 20 17:53:34 crc kubenswrapper[4558]: I0120 17:53:34.631994 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"cd2275b9-323a-4981-aadd-791b68003e3d","Type":"ContainerDied","Data":"0833d5463f07e5954d4e2d0a3a43a4419ed8daf27634dc726b1fc4ce59dfc2f9"} Jan 20 17:53:34 crc kubenswrapper[4558]: I0120 17:53:34.632040 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"cd2275b9-323a-4981-aadd-791b68003e3d","Type":"ContainerDied","Data":"08fb0cc064b0e71b60496981ed405bce8b4eb865b8fb44017bbdaeda99812fe7"} Jan 20 17:53:34 crc kubenswrapper[4558]: I0120 17:53:34.632332 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:53:34 crc kubenswrapper[4558]: I0120 17:53:34.653058 4558 scope.go:117] "RemoveContainer" containerID="b5d4742f71c4c1a0e14ccc61fbcd3c7bb3f43da7229787caafda16955ac74e7d" Jan 20 17:53:34 crc kubenswrapper[4558]: I0120 17:53:34.665053 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:53:34 crc kubenswrapper[4558]: I0120 17:53:34.671536 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:53:34 crc kubenswrapper[4558]: I0120 17:53:34.676494 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-75676d645b-l2sp7"] Jan 20 17:53:34 crc kubenswrapper[4558]: I0120 17:53:34.684669 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/neutron-75676d645b-l2sp7"] Jan 20 17:53:34 crc kubenswrapper[4558]: I0120 17:53:34.690062 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:53:34 crc kubenswrapper[4558]: I0120 17:53:34.694242 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:53:34 crc kubenswrapper[4558]: I0120 17:53:34.695663 4558 scope.go:117] "RemoveContainer" containerID="b4a7a63ae5b4336f1c55496b20962afe4a27040b0e22d712c664548644815e60" Jan 20 17:53:34 crc kubenswrapper[4558]: E0120 17:53:34.696158 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b4a7a63ae5b4336f1c55496b20962afe4a27040b0e22d712c664548644815e60\": container with ID starting with b4a7a63ae5b4336f1c55496b20962afe4a27040b0e22d712c664548644815e60 not found: ID does not exist" containerID="b4a7a63ae5b4336f1c55496b20962afe4a27040b0e22d712c664548644815e60" Jan 20 17:53:34 crc kubenswrapper[4558]: I0120 17:53:34.696222 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b4a7a63ae5b4336f1c55496b20962afe4a27040b0e22d712c664548644815e60"} err="failed to get container status \"b4a7a63ae5b4336f1c55496b20962afe4a27040b0e22d712c664548644815e60\": rpc error: code = NotFound desc = could not find container \"b4a7a63ae5b4336f1c55496b20962afe4a27040b0e22d712c664548644815e60\": container with ID starting with b4a7a63ae5b4336f1c55496b20962afe4a27040b0e22d712c664548644815e60 not found: ID does not exist" Jan 20 17:53:34 crc kubenswrapper[4558]: I0120 17:53:34.696270 4558 scope.go:117] "RemoveContainer" containerID="b5d4742f71c4c1a0e14ccc61fbcd3c7bb3f43da7229787caafda16955ac74e7d" Jan 20 17:53:34 crc kubenswrapper[4558]: E0120 17:53:34.696613 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b5d4742f71c4c1a0e14ccc61fbcd3c7bb3f43da7229787caafda16955ac74e7d\": container with ID starting with b5d4742f71c4c1a0e14ccc61fbcd3c7bb3f43da7229787caafda16955ac74e7d not found: ID does not exist" containerID="b5d4742f71c4c1a0e14ccc61fbcd3c7bb3f43da7229787caafda16955ac74e7d" Jan 20 17:53:34 crc kubenswrapper[4558]: I0120 17:53:34.696640 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b5d4742f71c4c1a0e14ccc61fbcd3c7bb3f43da7229787caafda16955ac74e7d"} err="failed to get container status \"b5d4742f71c4c1a0e14ccc61fbcd3c7bb3f43da7229787caafda16955ac74e7d\": rpc error: code = NotFound desc = could not find container \"b5d4742f71c4c1a0e14ccc61fbcd3c7bb3f43da7229787caafda16955ac74e7d\": container with ID starting with b5d4742f71c4c1a0e14ccc61fbcd3c7bb3f43da7229787caafda16955ac74e7d not found: ID does not exist" Jan 20 17:53:34 crc kubenswrapper[4558]: I0120 17:53:34.696665 4558 scope.go:117] "RemoveContainer" containerID="d99379340565779627596cecb15d625f9bcb83d751ea4f5f14fb7cb8bcb8a6b6" Jan 20 17:53:34 crc kubenswrapper[4558]: I0120 17:53:34.699661 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-0"] Jan 20 17:53:34 crc kubenswrapper[4558]: I0120 17:53:34.704183 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-0"] Jan 20 17:53:34 crc kubenswrapper[4558]: I0120 17:53:34.709222 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-0"] Jan 20 17:53:34 crc kubenswrapper[4558]: I0120 17:53:34.710693 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-0"] Jan 20 17:53:34 crc kubenswrapper[4558]: I0120 17:53:34.717728 4558 scope.go:117] "RemoveContainer" containerID="70539f03bf1b2ace01f24a9072ce8275fb4f5d92a181a991e07e1469387f4f02" Jan 20 17:53:34 crc kubenswrapper[4558]: I0120 17:53:34.746447 4558 scope.go:117] "RemoveContainer" containerID="d99379340565779627596cecb15d625f9bcb83d751ea4f5f14fb7cb8bcb8a6b6" Jan 20 17:53:34 crc kubenswrapper[4558]: E0120 17:53:34.746761 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d99379340565779627596cecb15d625f9bcb83d751ea4f5f14fb7cb8bcb8a6b6\": container with ID starting with d99379340565779627596cecb15d625f9bcb83d751ea4f5f14fb7cb8bcb8a6b6 not found: ID does not exist" containerID="d99379340565779627596cecb15d625f9bcb83d751ea4f5f14fb7cb8bcb8a6b6" Jan 20 17:53:34 crc kubenswrapper[4558]: I0120 17:53:34.746791 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d99379340565779627596cecb15d625f9bcb83d751ea4f5f14fb7cb8bcb8a6b6"} err="failed to get container status \"d99379340565779627596cecb15d625f9bcb83d751ea4f5f14fb7cb8bcb8a6b6\": rpc error: code = NotFound desc = could not find container \"d99379340565779627596cecb15d625f9bcb83d751ea4f5f14fb7cb8bcb8a6b6\": container with ID starting with d99379340565779627596cecb15d625f9bcb83d751ea4f5f14fb7cb8bcb8a6b6 not found: ID does not exist" Jan 20 17:53:34 crc kubenswrapper[4558]: I0120 17:53:34.746814 4558 scope.go:117] "RemoveContainer" containerID="70539f03bf1b2ace01f24a9072ce8275fb4f5d92a181a991e07e1469387f4f02" Jan 20 17:53:34 crc kubenswrapper[4558]: E0120 17:53:34.747071 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"70539f03bf1b2ace01f24a9072ce8275fb4f5d92a181a991e07e1469387f4f02\": container with ID starting with 70539f03bf1b2ace01f24a9072ce8275fb4f5d92a181a991e07e1469387f4f02 not found: ID does not exist" containerID="70539f03bf1b2ace01f24a9072ce8275fb4f5d92a181a991e07e1469387f4f02" Jan 20 17:53:34 crc kubenswrapper[4558]: I0120 17:53:34.747092 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"70539f03bf1b2ace01f24a9072ce8275fb4f5d92a181a991e07e1469387f4f02"} err="failed to get container status \"70539f03bf1b2ace01f24a9072ce8275fb4f5d92a181a991e07e1469387f4f02\": rpc error: code = NotFound desc = could not find container \"70539f03bf1b2ace01f24a9072ce8275fb4f5d92a181a991e07e1469387f4f02\": container with ID starting with 70539f03bf1b2ace01f24a9072ce8275fb4f5d92a181a991e07e1469387f4f02 not found: ID does not exist" Jan 20 17:53:34 crc kubenswrapper[4558]: I0120 17:53:34.747105 4558 scope.go:117] "RemoveContainer" containerID="ce0da192b65bf7c5e90dd832e0212db51fd4f3e04276e89c0f7b9256d88b752a" Jan 20 17:53:34 crc kubenswrapper[4558]: I0120 17:53:34.759933 4558 scope.go:117] "RemoveContainer" containerID="a10e865cdae4227bcd9b239722ad2d2d6bc501887291e7983d1cf31b29c7b76e" Jan 20 17:53:34 crc kubenswrapper[4558]: I0120 17:53:34.780157 4558 scope.go:117] "RemoveContainer" containerID="d26f7b1dae3611d65adf8538389e6547e7f472172ad5ffe9456136d1cfd69e59" Jan 20 17:53:34 crc kubenswrapper[4558]: I0120 17:53:34.796924 4558 scope.go:117] "RemoveContainer" containerID="7c70db500708c288ba013d8ce9dfe939d968f0b17f97ccf964db23badf9b4e95" Jan 20 17:53:34 crc kubenswrapper[4558]: I0120 17:53:34.811776 4558 scope.go:117] "RemoveContainer" containerID="71e0601a2bf0479fed8749492b6488623cab0139872e72819a3a90baa127e93a" Jan 20 17:53:34 crc kubenswrapper[4558]: I0120 17:53:34.827252 4558 scope.go:117] "RemoveContainer" containerID="0833d5463f07e5954d4e2d0a3a43a4419ed8daf27634dc726b1fc4ce59dfc2f9" Jan 20 17:53:34 crc kubenswrapper[4558]: I0120 17:53:34.842669 4558 scope.go:117] "RemoveContainer" containerID="0da9214e9e0f3d0fbf917b83ce287f9004e70a70097ee2a38aa219e649adaa78" Jan 20 17:53:34 crc kubenswrapper[4558]: I0120 17:53:34.858124 4558 scope.go:117] "RemoveContainer" containerID="7c70db500708c288ba013d8ce9dfe939d968f0b17f97ccf964db23badf9b4e95" Jan 20 17:53:34 crc kubenswrapper[4558]: E0120 17:53:34.858525 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7c70db500708c288ba013d8ce9dfe939d968f0b17f97ccf964db23badf9b4e95\": container with ID starting with 7c70db500708c288ba013d8ce9dfe939d968f0b17f97ccf964db23badf9b4e95 not found: ID does not exist" containerID="7c70db500708c288ba013d8ce9dfe939d968f0b17f97ccf964db23badf9b4e95" Jan 20 17:53:34 crc kubenswrapper[4558]: I0120 17:53:34.858558 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7c70db500708c288ba013d8ce9dfe939d968f0b17f97ccf964db23badf9b4e95"} err="failed to get container status \"7c70db500708c288ba013d8ce9dfe939d968f0b17f97ccf964db23badf9b4e95\": rpc error: code = NotFound desc = could not find container \"7c70db500708c288ba013d8ce9dfe939d968f0b17f97ccf964db23badf9b4e95\": container with ID starting with 7c70db500708c288ba013d8ce9dfe939d968f0b17f97ccf964db23badf9b4e95 not found: ID does not exist" Jan 20 17:53:34 crc kubenswrapper[4558]: I0120 17:53:34.858579 4558 scope.go:117] "RemoveContainer" containerID="71e0601a2bf0479fed8749492b6488623cab0139872e72819a3a90baa127e93a" Jan 20 17:53:34 crc kubenswrapper[4558]: E0120 17:53:34.858876 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"71e0601a2bf0479fed8749492b6488623cab0139872e72819a3a90baa127e93a\": container with ID starting with 71e0601a2bf0479fed8749492b6488623cab0139872e72819a3a90baa127e93a not found: ID does not exist" containerID="71e0601a2bf0479fed8749492b6488623cab0139872e72819a3a90baa127e93a" Jan 20 17:53:34 crc kubenswrapper[4558]: I0120 17:53:34.858911 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"71e0601a2bf0479fed8749492b6488623cab0139872e72819a3a90baa127e93a"} err="failed to get container status \"71e0601a2bf0479fed8749492b6488623cab0139872e72819a3a90baa127e93a\": rpc error: code = NotFound desc = could not find container \"71e0601a2bf0479fed8749492b6488623cab0139872e72819a3a90baa127e93a\": container with ID starting with 71e0601a2bf0479fed8749492b6488623cab0139872e72819a3a90baa127e93a not found: ID does not exist" Jan 20 17:53:34 crc kubenswrapper[4558]: I0120 17:53:34.858935 4558 scope.go:117] "RemoveContainer" containerID="0833d5463f07e5954d4e2d0a3a43a4419ed8daf27634dc726b1fc4ce59dfc2f9" Jan 20 17:53:34 crc kubenswrapper[4558]: E0120 17:53:34.859216 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0833d5463f07e5954d4e2d0a3a43a4419ed8daf27634dc726b1fc4ce59dfc2f9\": container with ID starting with 0833d5463f07e5954d4e2d0a3a43a4419ed8daf27634dc726b1fc4ce59dfc2f9 not found: ID does not exist" containerID="0833d5463f07e5954d4e2d0a3a43a4419ed8daf27634dc726b1fc4ce59dfc2f9" Jan 20 17:53:34 crc kubenswrapper[4558]: I0120 17:53:34.859247 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0833d5463f07e5954d4e2d0a3a43a4419ed8daf27634dc726b1fc4ce59dfc2f9"} err="failed to get container status \"0833d5463f07e5954d4e2d0a3a43a4419ed8daf27634dc726b1fc4ce59dfc2f9\": rpc error: code = NotFound desc = could not find container \"0833d5463f07e5954d4e2d0a3a43a4419ed8daf27634dc726b1fc4ce59dfc2f9\": container with ID starting with 0833d5463f07e5954d4e2d0a3a43a4419ed8daf27634dc726b1fc4ce59dfc2f9 not found: ID does not exist" Jan 20 17:53:34 crc kubenswrapper[4558]: I0120 17:53:34.859265 4558 scope.go:117] "RemoveContainer" containerID="0da9214e9e0f3d0fbf917b83ce287f9004e70a70097ee2a38aa219e649adaa78" Jan 20 17:53:34 crc kubenswrapper[4558]: E0120 17:53:34.859634 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0da9214e9e0f3d0fbf917b83ce287f9004e70a70097ee2a38aa219e649adaa78\": container with ID starting with 0da9214e9e0f3d0fbf917b83ce287f9004e70a70097ee2a38aa219e649adaa78 not found: ID does not exist" containerID="0da9214e9e0f3d0fbf917b83ce287f9004e70a70097ee2a38aa219e649adaa78" Jan 20 17:53:34 crc kubenswrapper[4558]: I0120 17:53:34.859722 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0da9214e9e0f3d0fbf917b83ce287f9004e70a70097ee2a38aa219e649adaa78"} err="failed to get container status \"0da9214e9e0f3d0fbf917b83ce287f9004e70a70097ee2a38aa219e649adaa78\": rpc error: code = NotFound desc = could not find container \"0da9214e9e0f3d0fbf917b83ce287f9004e70a70097ee2a38aa219e649adaa78\": container with ID starting with 0da9214e9e0f3d0fbf917b83ce287f9004e70a70097ee2a38aa219e649adaa78 not found: ID does not exist" Jan 20 17:53:35 crc kubenswrapper[4558]: I0120 17:53:35.566048 4558 scope.go:117] "RemoveContainer" containerID="4ea7fa61d8b21007a044345acec89fcebe56c2921cf0f76ff2002f44bdc88ede" Jan 20 17:53:35 crc kubenswrapper[4558]: E0120 17:53:35.566380 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 17:53:36 crc kubenswrapper[4558]: I0120 17:53:36.577606 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea6b41c-c5ed-49c9-84f5-a932a1325aba" path="/var/lib/kubelet/pods/6ea6b41c-c5ed-49c9-84f5-a932a1325aba/volumes" Jan 20 17:53:36 crc kubenswrapper[4558]: I0120 17:53:36.578535 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7c16f0c4-1462-4817-9f74-9e3d93193867" path="/var/lib/kubelet/pods/7c16f0c4-1462-4817-9f74-9e3d93193867/volumes" Jan 20 17:53:36 crc kubenswrapper[4558]: I0120 17:53:36.579087 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="924fa7ce-8d60-4b2f-b62b-d5e146474f71" path="/var/lib/kubelet/pods/924fa7ce-8d60-4b2f-b62b-d5e146474f71/volumes" Jan 20 17:53:36 crc kubenswrapper[4558]: I0120 17:53:36.580332 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd2275b9-323a-4981-aadd-791b68003e3d" path="/var/lib/kubelet/pods/cd2275b9-323a-4981-aadd-791b68003e3d/volumes" Jan 20 17:53:36 crc kubenswrapper[4558]: I0120 17:53:36.580996 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d9321ea5-ab3b-4f00-b4d0-f2c2f8c2deb2" path="/var/lib/kubelet/pods/d9321ea5-ab3b-4f00-b4d0-f2c2f8c2deb2/volumes" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.648772 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["crc-storage/crc-storage-crc-5p9zr"] Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.658679 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["crc-storage/crc-storage-crc-5p9zr"] Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.744992 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["crc-storage/crc-storage-crc-zbmjx"] Jan 20 17:53:43 crc kubenswrapper[4558]: E0120 17:53:43.745415 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64d029c7-79c6-40fc-b4d2-ac5bfbc387a0" containerName="account-replicator" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.745436 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="64d029c7-79c6-40fc-b4d2-ac5bfbc387a0" containerName="account-replicator" Jan 20 17:53:43 crc kubenswrapper[4558]: E0120 17:53:43.745450 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="97bee565-aafb-4ccc-956b-d94ac410e50e" containerName="init" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.745457 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="97bee565-aafb-4ccc-956b-d94ac410e50e" containerName="init" Jan 20 17:53:43 crc kubenswrapper[4558]: E0120 17:53:43.745465 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0c9fc067-cb23-4f72-b928-5447fb5182c1" containerName="placement-api" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.745470 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="0c9fc067-cb23-4f72-b928-5447fb5182c1" containerName="placement-api" Jan 20 17:53:43 crc kubenswrapper[4558]: E0120 17:53:43.745484 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c4a11c40-a157-4a28-b1a3-60c211d1d0bf" containerName="setup-container" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.745490 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="c4a11c40-a157-4a28-b1a3-60c211d1d0bf" containerName="setup-container" Jan 20 17:53:43 crc kubenswrapper[4558]: E0120 17:53:43.745501 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="088e44cc-3515-4736-aa46-721774902209" containerName="setup-container" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.745507 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="088e44cc-3515-4736-aa46-721774902209" containerName="setup-container" Jan 20 17:53:43 crc kubenswrapper[4558]: E0120 17:53:43.745516 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64d029c7-79c6-40fc-b4d2-ac5bfbc387a0" containerName="object-updater" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.745522 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="64d029c7-79c6-40fc-b4d2-ac5bfbc387a0" containerName="object-updater" Jan 20 17:53:43 crc kubenswrapper[4558]: E0120 17:53:43.745531 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64d029c7-79c6-40fc-b4d2-ac5bfbc387a0" containerName="account-server" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.745537 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="64d029c7-79c6-40fc-b4d2-ac5bfbc387a0" containerName="account-server" Jan 20 17:53:43 crc kubenswrapper[4558]: E0120 17:53:43.745546 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7c16f0c4-1462-4817-9f74-9e3d93193867" containerName="nova-cell1-conductor-conductor" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.745552 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="7c16f0c4-1462-4817-9f74-9e3d93193867" containerName="nova-cell1-conductor-conductor" Jan 20 17:53:43 crc kubenswrapper[4558]: E0120 17:53:43.745560 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d9321ea5-ab3b-4f00-b4d0-f2c2f8c2deb2" containerName="nova-scheduler-scheduler" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.745566 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d9321ea5-ab3b-4f00-b4d0-f2c2f8c2deb2" containerName="nova-scheduler-scheduler" Jan 20 17:53:43 crc kubenswrapper[4558]: E0120 17:53:43.745573 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a905b3ac-e1b2-49f8-8034-912138028aca" containerName="memcached" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.745579 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="a905b3ac-e1b2-49f8-8034-912138028aca" containerName="memcached" Jan 20 17:53:43 crc kubenswrapper[4558]: E0120 17:53:43.745586 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64d029c7-79c6-40fc-b4d2-ac5bfbc387a0" containerName="object-auditor" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.745592 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="64d029c7-79c6-40fc-b4d2-ac5bfbc387a0" containerName="object-auditor" Jan 20 17:53:43 crc kubenswrapper[4558]: E0120 17:53:43.745602 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="924fa7ce-8d60-4b2f-b62b-d5e146474f71" containerName="nova-cell0-conductor-conductor" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.745607 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="924fa7ce-8d60-4b2f-b62b-d5e146474f71" containerName="nova-cell0-conductor-conductor" Jan 20 17:53:43 crc kubenswrapper[4558]: E0120 17:53:43.745616 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d9321ea5-ab3b-4f00-b4d0-f2c2f8c2deb2" containerName="nova-scheduler-scheduler" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.745621 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d9321ea5-ab3b-4f00-b4d0-f2c2f8c2deb2" containerName="nova-scheduler-scheduler" Jan 20 17:53:43 crc kubenswrapper[4558]: E0120 17:53:43.745629 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64e30e89-cc14-4c89-bd1b-5702bfba717c" containerName="nova-api-log" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.745633 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="64e30e89-cc14-4c89-bd1b-5702bfba717c" containerName="nova-api-log" Jan 20 17:53:43 crc kubenswrapper[4558]: E0120 17:53:43.745640 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64d029c7-79c6-40fc-b4d2-ac5bfbc387a0" containerName="container-auditor" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.745645 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="64d029c7-79c6-40fc-b4d2-ac5bfbc387a0" containerName="container-auditor" Jan 20 17:53:43 crc kubenswrapper[4558]: E0120 17:53:43.745655 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="924fa7ce-8d60-4b2f-b62b-d5e146474f71" containerName="nova-cell0-conductor-conductor" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.745660 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="924fa7ce-8d60-4b2f-b62b-d5e146474f71" containerName="nova-cell0-conductor-conductor" Jan 20 17:53:43 crc kubenswrapper[4558]: E0120 17:53:43.745665 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64d029c7-79c6-40fc-b4d2-ac5bfbc387a0" containerName="container-updater" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.745670 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="64d029c7-79c6-40fc-b4d2-ac5bfbc387a0" containerName="container-updater" Jan 20 17:53:43 crc kubenswrapper[4558]: E0120 17:53:43.745677 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a18e9b68-4683-415e-833e-5b363e31d461" containerName="mysql-bootstrap" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.745682 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="a18e9b68-4683-415e-833e-5b363e31d461" containerName="mysql-bootstrap" Jan 20 17:53:43 crc kubenswrapper[4558]: E0120 17:53:43.745691 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64d029c7-79c6-40fc-b4d2-ac5bfbc387a0" containerName="container-replicator" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.745696 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="64d029c7-79c6-40fc-b4d2-ac5bfbc387a0" containerName="container-replicator" Jan 20 17:53:43 crc kubenswrapper[4558]: E0120 17:53:43.745706 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="924fa7ce-8d60-4b2f-b62b-d5e146474f71" containerName="nova-cell0-conductor-conductor" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.745711 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="924fa7ce-8d60-4b2f-b62b-d5e146474f71" containerName="nova-cell0-conductor-conductor" Jan 20 17:53:43 crc kubenswrapper[4558]: E0120 17:53:43.745718 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64d029c7-79c6-40fc-b4d2-ac5bfbc387a0" containerName="account-auditor" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.745724 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="64d029c7-79c6-40fc-b4d2-ac5bfbc387a0" containerName="account-auditor" Jan 20 17:53:43 crc kubenswrapper[4558]: E0120 17:53:43.745733 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64d029c7-79c6-40fc-b4d2-ac5bfbc387a0" containerName="object-server" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.745740 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="64d029c7-79c6-40fc-b4d2-ac5bfbc387a0" containerName="object-server" Jan 20 17:53:43 crc kubenswrapper[4558]: E0120 17:53:43.745746 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64d029c7-79c6-40fc-b4d2-ac5bfbc387a0" containerName="object-expirer" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.745752 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="64d029c7-79c6-40fc-b4d2-ac5bfbc387a0" containerName="object-expirer" Jan 20 17:53:43 crc kubenswrapper[4558]: E0120 17:53:43.745760 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="924fa7ce-8d60-4b2f-b62b-d5e146474f71" containerName="nova-cell0-conductor-conductor" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.745765 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="924fa7ce-8d60-4b2f-b62b-d5e146474f71" containerName="nova-cell0-conductor-conductor" Jan 20 17:53:43 crc kubenswrapper[4558]: E0120 17:53:43.745777 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64d029c7-79c6-40fc-b4d2-ac5bfbc387a0" containerName="account-reaper" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.745782 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="64d029c7-79c6-40fc-b4d2-ac5bfbc387a0" containerName="account-reaper" Jan 20 17:53:43 crc kubenswrapper[4558]: E0120 17:53:43.745791 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e638a157-4234-4339-80ef-65d818d39b73" containerName="barbican-api-log" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.745796 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="e638a157-4234-4339-80ef-65d818d39b73" containerName="barbican-api-log" Jan 20 17:53:43 crc kubenswrapper[4558]: E0120 17:53:43.745804 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8101ebb9-ace1-4242-8c8b-698307c6be29" containerName="mariadb-account-create-update" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.745809 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="8101ebb9-ace1-4242-8c8b-698307c6be29" containerName="mariadb-account-create-update" Jan 20 17:53:43 crc kubenswrapper[4558]: E0120 17:53:43.745817 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5eac3a8d-ee82-4e6f-bdb9-fccb82e09496" containerName="cinder-api-log" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.745822 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="5eac3a8d-ee82-4e6f-bdb9-fccb82e09496" containerName="cinder-api-log" Jan 20 17:53:43 crc kubenswrapper[4558]: E0120 17:53:43.745829 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="05b3b028-51e2-49a9-9fb8-a10c096f3b27" containerName="mysql-bootstrap" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.745834 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="05b3b028-51e2-49a9-9fb8-a10c096f3b27" containerName="mysql-bootstrap" Jan 20 17:53:43 crc kubenswrapper[4558]: E0120 17:53:43.745844 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="088e44cc-3515-4736-aa46-721774902209" containerName="rabbitmq" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.745849 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="088e44cc-3515-4736-aa46-721774902209" containerName="rabbitmq" Jan 20 17:53:43 crc kubenswrapper[4558]: E0120 17:53:43.745861 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7db8891a-284a-47f5-b883-5aac563fc839" containerName="keystone-api" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.745868 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="7db8891a-284a-47f5-b883-5aac563fc839" containerName="keystone-api" Jan 20 17:53:43 crc kubenswrapper[4558]: E0120 17:53:43.745876 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6ea6b41c-c5ed-49c9-84f5-a932a1325aba" containerName="neutron-api" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.745881 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="6ea6b41c-c5ed-49c9-84f5-a932a1325aba" containerName="neutron-api" Jan 20 17:53:43 crc kubenswrapper[4558]: E0120 17:53:43.745891 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7d535532-403f-4d55-8138-7e09287d0108" containerName="glance-log" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.745896 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="7d535532-403f-4d55-8138-7e09287d0108" containerName="glance-log" Jan 20 17:53:43 crc kubenswrapper[4558]: E0120 17:53:43.745908 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="05b3b028-51e2-49a9-9fb8-a10c096f3b27" containerName="galera" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.745913 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="05b3b028-51e2-49a9-9fb8-a10c096f3b27" containerName="galera" Jan 20 17:53:43 crc kubenswrapper[4558]: E0120 17:53:43.745922 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a18e9b68-4683-415e-833e-5b363e31d461" containerName="galera" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.745927 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="a18e9b68-4683-415e-833e-5b363e31d461" containerName="galera" Jan 20 17:53:43 crc kubenswrapper[4558]: E0120 17:53:43.745957 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="51d19369-14ac-4d62-ab05-9c5830856622" containerName="probe" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.745963 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="51d19369-14ac-4d62-ab05-9c5830856622" containerName="probe" Jan 20 17:53:43 crc kubenswrapper[4558]: E0120 17:53:43.745973 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cd2275b9-323a-4981-aadd-791b68003e3d" containerName="ceilometer-central-agent" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.745978 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="cd2275b9-323a-4981-aadd-791b68003e3d" containerName="ceilometer-central-agent" Jan 20 17:53:43 crc kubenswrapper[4558]: E0120 17:53:43.745985 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64e30e89-cc14-4c89-bd1b-5702bfba717c" containerName="nova-api-api" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.745990 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="64e30e89-cc14-4c89-bd1b-5702bfba717c" containerName="nova-api-api" Jan 20 17:53:43 crc kubenswrapper[4558]: E0120 17:53:43.745998 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6ea6b41c-c5ed-49c9-84f5-a932a1325aba" containerName="neutron-httpd" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.746003 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="6ea6b41c-c5ed-49c9-84f5-a932a1325aba" containerName="neutron-httpd" Jan 20 17:53:43 crc kubenswrapper[4558]: E0120 17:53:43.746011 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cd2275b9-323a-4981-aadd-791b68003e3d" containerName="ceilometer-notification-agent" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.746015 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="cd2275b9-323a-4981-aadd-791b68003e3d" containerName="ceilometer-notification-agent" Jan 20 17:53:43 crc kubenswrapper[4558]: E0120 17:53:43.746022 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="97bee565-aafb-4ccc-956b-d94ac410e50e" containerName="dnsmasq-dns" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.746027 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="97bee565-aafb-4ccc-956b-d94ac410e50e" containerName="dnsmasq-dns" Jan 20 17:53:43 crc kubenswrapper[4558]: E0120 17:53:43.746043 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64d029c7-79c6-40fc-b4d2-ac5bfbc387a0" containerName="container-server" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.746048 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="64d029c7-79c6-40fc-b4d2-ac5bfbc387a0" containerName="container-server" Jan 20 17:53:43 crc kubenswrapper[4558]: E0120 17:53:43.746055 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f30aa605-d974-4d0d-aa96-5cb0d96897d6" containerName="glance-httpd" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.746060 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="f30aa605-d974-4d0d-aa96-5cb0d96897d6" containerName="glance-httpd" Jan 20 17:53:43 crc kubenswrapper[4558]: E0120 17:53:43.746068 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8101ebb9-ace1-4242-8c8b-698307c6be29" containerName="mariadb-account-create-update" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.746074 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="8101ebb9-ace1-4242-8c8b-698307c6be29" containerName="mariadb-account-create-update" Jan 20 17:53:43 crc kubenswrapper[4558]: E0120 17:53:43.746083 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64d029c7-79c6-40fc-b4d2-ac5bfbc387a0" containerName="swift-recon-cron" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.746087 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="64d029c7-79c6-40fc-b4d2-ac5bfbc387a0" containerName="swift-recon-cron" Jan 20 17:53:43 crc kubenswrapper[4558]: E0120 17:53:43.746095 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="51d19369-14ac-4d62-ab05-9c5830856622" containerName="cinder-scheduler" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.746101 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="51d19369-14ac-4d62-ab05-9c5830856622" containerName="cinder-scheduler" Jan 20 17:53:43 crc kubenswrapper[4558]: E0120 17:53:43.746107 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5eac3a8d-ee82-4e6f-bdb9-fccb82e09496" containerName="cinder-api" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.746112 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="5eac3a8d-ee82-4e6f-bdb9-fccb82e09496" containerName="cinder-api" Jan 20 17:53:43 crc kubenswrapper[4558]: E0120 17:53:43.746119 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64d029c7-79c6-40fc-b4d2-ac5bfbc387a0" containerName="object-replicator" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.746125 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="64d029c7-79c6-40fc-b4d2-ac5bfbc387a0" containerName="object-replicator" Jan 20 17:53:43 crc kubenswrapper[4558]: E0120 17:53:43.746134 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f30aa605-d974-4d0d-aa96-5cb0d96897d6" containerName="glance-log" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.746139 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="f30aa605-d974-4d0d-aa96-5cb0d96897d6" containerName="glance-log" Jan 20 17:53:43 crc kubenswrapper[4558]: E0120 17:53:43.746146 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0c9fc067-cb23-4f72-b928-5447fb5182c1" containerName="placement-log" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.746152 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="0c9fc067-cb23-4f72-b928-5447fb5182c1" containerName="placement-log" Jan 20 17:53:43 crc kubenswrapper[4558]: E0120 17:53:43.746171 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c4a11c40-a157-4a28-b1a3-60c211d1d0bf" containerName="rabbitmq" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.746177 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="c4a11c40-a157-4a28-b1a3-60c211d1d0bf" containerName="rabbitmq" Jan 20 17:53:43 crc kubenswrapper[4558]: E0120 17:53:43.746185 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cd2275b9-323a-4981-aadd-791b68003e3d" containerName="sg-core" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.746190 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="cd2275b9-323a-4981-aadd-791b68003e3d" containerName="sg-core" Jan 20 17:53:43 crc kubenswrapper[4558]: E0120 17:53:43.746200 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7c16f0c4-1462-4817-9f74-9e3d93193867" containerName="nova-cell1-conductor-conductor" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.746205 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="7c16f0c4-1462-4817-9f74-9e3d93193867" containerName="nova-cell1-conductor-conductor" Jan 20 17:53:43 crc kubenswrapper[4558]: E0120 17:53:43.746211 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e638a157-4234-4339-80ef-65d818d39b73" containerName="barbican-api" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.746217 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="e638a157-4234-4339-80ef-65d818d39b73" containerName="barbican-api" Jan 20 17:53:43 crc kubenswrapper[4558]: E0120 17:53:43.746225 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6e7de5b3-4426-4816-9d45-9b4226333dbc" containerName="nova-metadata-metadata" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.746231 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="6e7de5b3-4426-4816-9d45-9b4226333dbc" containerName="nova-metadata-metadata" Jan 20 17:53:43 crc kubenswrapper[4558]: E0120 17:53:43.746240 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64d029c7-79c6-40fc-b4d2-ac5bfbc387a0" containerName="rsync" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.746245 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="64d029c7-79c6-40fc-b4d2-ac5bfbc387a0" containerName="rsync" Jan 20 17:53:43 crc kubenswrapper[4558]: E0120 17:53:43.746255 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7c16f0c4-1462-4817-9f74-9e3d93193867" containerName="nova-cell1-conductor-conductor" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.746261 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="7c16f0c4-1462-4817-9f74-9e3d93193867" containerName="nova-cell1-conductor-conductor" Jan 20 17:53:43 crc kubenswrapper[4558]: E0120 17:53:43.746266 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cd2275b9-323a-4981-aadd-791b68003e3d" containerName="proxy-httpd" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.746272 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="cd2275b9-323a-4981-aadd-791b68003e3d" containerName="proxy-httpd" Jan 20 17:53:43 crc kubenswrapper[4558]: E0120 17:53:43.746280 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7d535532-403f-4d55-8138-7e09287d0108" containerName="glance-httpd" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.746287 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="7d535532-403f-4d55-8138-7e09287d0108" containerName="glance-httpd" Jan 20 17:53:43 crc kubenswrapper[4558]: E0120 17:53:43.746295 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6e7de5b3-4426-4816-9d45-9b4226333dbc" containerName="nova-metadata-log" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.746301 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="6e7de5b3-4426-4816-9d45-9b4226333dbc" containerName="nova-metadata-log" Jan 20 17:53:43 crc kubenswrapper[4558]: E0120 17:53:43.746306 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7c16f0c4-1462-4817-9f74-9e3d93193867" containerName="nova-cell1-conductor-conductor" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.746311 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="7c16f0c4-1462-4817-9f74-9e3d93193867" containerName="nova-cell1-conductor-conductor" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.746677 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="6e7de5b3-4426-4816-9d45-9b4226333dbc" containerName="nova-metadata-metadata" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.746686 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="51d19369-14ac-4d62-ab05-9c5830856622" containerName="probe" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.746702 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="64d029c7-79c6-40fc-b4d2-ac5bfbc387a0" containerName="object-expirer" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.746713 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="cd2275b9-323a-4981-aadd-791b68003e3d" containerName="proxy-httpd" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.746724 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="64d029c7-79c6-40fc-b4d2-ac5bfbc387a0" containerName="object-updater" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.746731 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="924fa7ce-8d60-4b2f-b62b-d5e146474f71" containerName="nova-cell0-conductor-conductor" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.746736 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="c4a11c40-a157-4a28-b1a3-60c211d1d0bf" containerName="rabbitmq" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.746743 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="7c16f0c4-1462-4817-9f74-9e3d93193867" containerName="nova-cell1-conductor-conductor" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.746749 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="64d029c7-79c6-40fc-b4d2-ac5bfbc387a0" containerName="swift-recon-cron" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.746756 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="64d029c7-79c6-40fc-b4d2-ac5bfbc387a0" containerName="container-auditor" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.746764 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="64d029c7-79c6-40fc-b4d2-ac5bfbc387a0" containerName="account-auditor" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.746770 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="97bee565-aafb-4ccc-956b-d94ac410e50e" containerName="dnsmasq-dns" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.746778 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="05b3b028-51e2-49a9-9fb8-a10c096f3b27" containerName="galera" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.746786 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="7d535532-403f-4d55-8138-7e09287d0108" containerName="glance-httpd" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.746792 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="d9321ea5-ab3b-4f00-b4d0-f2c2f8c2deb2" containerName="nova-scheduler-scheduler" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.746800 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="e638a157-4234-4339-80ef-65d818d39b73" containerName="barbican-api" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.746806 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="cd2275b9-323a-4981-aadd-791b68003e3d" containerName="ceilometer-central-agent" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.746816 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="cd2275b9-323a-4981-aadd-791b68003e3d" containerName="ceilometer-notification-agent" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.746824 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="64d029c7-79c6-40fc-b4d2-ac5bfbc387a0" containerName="object-server" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.746831 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="64d029c7-79c6-40fc-b4d2-ac5bfbc387a0" containerName="account-server" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.746837 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="924fa7ce-8d60-4b2f-b62b-d5e146474f71" containerName="nova-cell0-conductor-conductor" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.746845 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="64e30e89-cc14-4c89-bd1b-5702bfba717c" containerName="nova-api-api" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.746854 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="d9321ea5-ab3b-4f00-b4d0-f2c2f8c2deb2" containerName="nova-scheduler-scheduler" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.746861 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="5eac3a8d-ee82-4e6f-bdb9-fccb82e09496" containerName="cinder-api-log" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.746868 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="8101ebb9-ace1-4242-8c8b-698307c6be29" containerName="mariadb-account-create-update" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.746875 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="64d029c7-79c6-40fc-b4d2-ac5bfbc387a0" containerName="rsync" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.746882 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="e638a157-4234-4339-80ef-65d818d39b73" containerName="barbican-api-log" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.746888 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="64d029c7-79c6-40fc-b4d2-ac5bfbc387a0" containerName="account-reaper" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.746899 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="7c16f0c4-1462-4817-9f74-9e3d93193867" containerName="nova-cell1-conductor-conductor" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.746906 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="7c16f0c4-1462-4817-9f74-9e3d93193867" containerName="nova-cell1-conductor-conductor" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.746915 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="a905b3ac-e1b2-49f8-8034-912138028aca" containerName="memcached" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.746924 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="7d535532-403f-4d55-8138-7e09287d0108" containerName="glance-log" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.746932 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="6ea6b41c-c5ed-49c9-84f5-a932a1325aba" containerName="neutron-httpd" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.746942 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="64d029c7-79c6-40fc-b4d2-ac5bfbc387a0" containerName="container-replicator" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.746951 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="0c9fc067-cb23-4f72-b928-5447fb5182c1" containerName="placement-api" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.746961 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="64d029c7-79c6-40fc-b4d2-ac5bfbc387a0" containerName="container-server" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.746969 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="088e44cc-3515-4736-aa46-721774902209" containerName="rabbitmq" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.746975 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="0c9fc067-cb23-4f72-b928-5447fb5182c1" containerName="placement-log" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.746982 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="7db8891a-284a-47f5-b883-5aac563fc839" containerName="keystone-api" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.746989 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="64d029c7-79c6-40fc-b4d2-ac5bfbc387a0" containerName="account-replicator" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.746998 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="cd2275b9-323a-4981-aadd-791b68003e3d" containerName="sg-core" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.747008 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="a18e9b68-4683-415e-833e-5b363e31d461" containerName="galera" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.747016 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="f30aa605-d974-4d0d-aa96-5cb0d96897d6" containerName="glance-httpd" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.747026 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="64d029c7-79c6-40fc-b4d2-ac5bfbc387a0" containerName="container-updater" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.747046 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="f30aa605-d974-4d0d-aa96-5cb0d96897d6" containerName="glance-log" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.747054 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="924fa7ce-8d60-4b2f-b62b-d5e146474f71" containerName="nova-cell0-conductor-conductor" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.747063 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="6e7de5b3-4426-4816-9d45-9b4226333dbc" containerName="nova-metadata-log" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.747071 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="924fa7ce-8d60-4b2f-b62b-d5e146474f71" containerName="nova-cell0-conductor-conductor" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.747077 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="64d029c7-79c6-40fc-b4d2-ac5bfbc387a0" containerName="object-replicator" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.747083 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="6ea6b41c-c5ed-49c9-84f5-a932a1325aba" containerName="neutron-api" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.747092 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="5eac3a8d-ee82-4e6f-bdb9-fccb82e09496" containerName="cinder-api" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.747100 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="64e30e89-cc14-4c89-bd1b-5702bfba717c" containerName="nova-api-log" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.747110 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="64d029c7-79c6-40fc-b4d2-ac5bfbc387a0" containerName="object-auditor" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.747116 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="51d19369-14ac-4d62-ab05-9c5830856622" containerName="cinder-scheduler" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.748090 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-zbmjx" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.751445 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"openshift-service-ca.crt" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.751753 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"kube-root-ca.crt" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.751917 4558 reflector.go:368] Caches populated for *v1.Secret from object-"crc-storage"/"crc-storage-dockercfg-k9ht8" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.752101 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"crc-storage" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.754082 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-zbmjx"] Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.905280 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/d3caee60-c645-4533-86f2-972110dfdd1e-crc-storage\") pod \"crc-storage-crc-zbmjx\" (UID: \"d3caee60-c645-4533-86f2-972110dfdd1e\") " pod="crc-storage/crc-storage-crc-zbmjx" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.905340 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/d3caee60-c645-4533-86f2-972110dfdd1e-node-mnt\") pod \"crc-storage-crc-zbmjx\" (UID: \"d3caee60-c645-4533-86f2-972110dfdd1e\") " pod="crc-storage/crc-storage-crc-zbmjx" Jan 20 17:53:43 crc kubenswrapper[4558]: I0120 17:53:43.905394 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9zxmw\" (UniqueName: \"kubernetes.io/projected/d3caee60-c645-4533-86f2-972110dfdd1e-kube-api-access-9zxmw\") pod \"crc-storage-crc-zbmjx\" (UID: \"d3caee60-c645-4533-86f2-972110dfdd1e\") " pod="crc-storage/crc-storage-crc-zbmjx" Jan 20 17:53:44 crc kubenswrapper[4558]: I0120 17:53:44.006854 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/d3caee60-c645-4533-86f2-972110dfdd1e-crc-storage\") pod \"crc-storage-crc-zbmjx\" (UID: \"d3caee60-c645-4533-86f2-972110dfdd1e\") " pod="crc-storage/crc-storage-crc-zbmjx" Jan 20 17:53:44 crc kubenswrapper[4558]: I0120 17:53:44.006912 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/d3caee60-c645-4533-86f2-972110dfdd1e-node-mnt\") pod \"crc-storage-crc-zbmjx\" (UID: \"d3caee60-c645-4533-86f2-972110dfdd1e\") " pod="crc-storage/crc-storage-crc-zbmjx" Jan 20 17:53:44 crc kubenswrapper[4558]: I0120 17:53:44.006972 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9zxmw\" (UniqueName: \"kubernetes.io/projected/d3caee60-c645-4533-86f2-972110dfdd1e-kube-api-access-9zxmw\") pod \"crc-storage-crc-zbmjx\" (UID: \"d3caee60-c645-4533-86f2-972110dfdd1e\") " pod="crc-storage/crc-storage-crc-zbmjx" Jan 20 17:53:44 crc kubenswrapper[4558]: I0120 17:53:44.007247 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/d3caee60-c645-4533-86f2-972110dfdd1e-node-mnt\") pod \"crc-storage-crc-zbmjx\" (UID: \"d3caee60-c645-4533-86f2-972110dfdd1e\") " pod="crc-storage/crc-storage-crc-zbmjx" Jan 20 17:53:44 crc kubenswrapper[4558]: I0120 17:53:44.008611 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/d3caee60-c645-4533-86f2-972110dfdd1e-crc-storage\") pod \"crc-storage-crc-zbmjx\" (UID: \"d3caee60-c645-4533-86f2-972110dfdd1e\") " pod="crc-storage/crc-storage-crc-zbmjx" Jan 20 17:53:44 crc kubenswrapper[4558]: I0120 17:53:44.026689 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9zxmw\" (UniqueName: \"kubernetes.io/projected/d3caee60-c645-4533-86f2-972110dfdd1e-kube-api-access-9zxmw\") pod \"crc-storage-crc-zbmjx\" (UID: \"d3caee60-c645-4533-86f2-972110dfdd1e\") " pod="crc-storage/crc-storage-crc-zbmjx" Jan 20 17:53:44 crc kubenswrapper[4558]: I0120 17:53:44.073650 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-zbmjx" Jan 20 17:53:44 crc kubenswrapper[4558]: I0120 17:53:44.462520 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-zbmjx"] Jan 20 17:53:44 crc kubenswrapper[4558]: I0120 17:53:44.576684 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="28001944-650c-4394-a080-dce76faf35bb" path="/var/lib/kubelet/pods/28001944-650c-4394-a080-dce76faf35bb/volumes" Jan 20 17:53:44 crc kubenswrapper[4558]: I0120 17:53:44.737502 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-zbmjx" event={"ID":"d3caee60-c645-4533-86f2-972110dfdd1e","Type":"ContainerStarted","Data":"897b6315f41ac1d9e9f05a6b70bd8c1ea9b08aab47f2e8733cda3fd9c203f65e"} Jan 20 17:53:45 crc kubenswrapper[4558]: I0120 17:53:45.749575 4558 generic.go:334] "Generic (PLEG): container finished" podID="d3caee60-c645-4533-86f2-972110dfdd1e" containerID="c7bc5822daf31c31196384db3c359c6efebf789e0fc60bebb6e38c4e4db45622" exitCode=0 Jan 20 17:53:45 crc kubenswrapper[4558]: I0120 17:53:45.749697 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-zbmjx" event={"ID":"d3caee60-c645-4533-86f2-972110dfdd1e","Type":"ContainerDied","Data":"c7bc5822daf31c31196384db3c359c6efebf789e0fc60bebb6e38c4e4db45622"} Jan 20 17:53:46 crc kubenswrapper[4558]: I0120 17:53:46.995044 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-zbmjx" Jan 20 17:53:47 crc kubenswrapper[4558]: I0120 17:53:47.152783 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/d3caee60-c645-4533-86f2-972110dfdd1e-crc-storage\") pod \"d3caee60-c645-4533-86f2-972110dfdd1e\" (UID: \"d3caee60-c645-4533-86f2-972110dfdd1e\") " Jan 20 17:53:47 crc kubenswrapper[4558]: I0120 17:53:47.152872 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9zxmw\" (UniqueName: \"kubernetes.io/projected/d3caee60-c645-4533-86f2-972110dfdd1e-kube-api-access-9zxmw\") pod \"d3caee60-c645-4533-86f2-972110dfdd1e\" (UID: \"d3caee60-c645-4533-86f2-972110dfdd1e\") " Jan 20 17:53:47 crc kubenswrapper[4558]: I0120 17:53:47.153088 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/d3caee60-c645-4533-86f2-972110dfdd1e-node-mnt\") pod \"d3caee60-c645-4533-86f2-972110dfdd1e\" (UID: \"d3caee60-c645-4533-86f2-972110dfdd1e\") " Jan 20 17:53:47 crc kubenswrapper[4558]: I0120 17:53:47.153386 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d3caee60-c645-4533-86f2-972110dfdd1e-node-mnt" (OuterVolumeSpecName: "node-mnt") pod "d3caee60-c645-4533-86f2-972110dfdd1e" (UID: "d3caee60-c645-4533-86f2-972110dfdd1e"). InnerVolumeSpecName "node-mnt". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 17:53:47 crc kubenswrapper[4558]: I0120 17:53:47.153749 4558 reconciler_common.go:293] "Volume detached for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/d3caee60-c645-4533-86f2-972110dfdd1e-node-mnt\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:47 crc kubenswrapper[4558]: I0120 17:53:47.158888 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d3caee60-c645-4533-86f2-972110dfdd1e-kube-api-access-9zxmw" (OuterVolumeSpecName: "kube-api-access-9zxmw") pod "d3caee60-c645-4533-86f2-972110dfdd1e" (UID: "d3caee60-c645-4533-86f2-972110dfdd1e"). InnerVolumeSpecName "kube-api-access-9zxmw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:53:47 crc kubenswrapper[4558]: I0120 17:53:47.172084 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d3caee60-c645-4533-86f2-972110dfdd1e-crc-storage" (OuterVolumeSpecName: "crc-storage") pod "d3caee60-c645-4533-86f2-972110dfdd1e" (UID: "d3caee60-c645-4533-86f2-972110dfdd1e"). InnerVolumeSpecName "crc-storage". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:53:47 crc kubenswrapper[4558]: I0120 17:53:47.255818 4558 reconciler_common.go:293] "Volume detached for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/d3caee60-c645-4533-86f2-972110dfdd1e-crc-storage\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:47 crc kubenswrapper[4558]: I0120 17:53:47.255847 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9zxmw\" (UniqueName: \"kubernetes.io/projected/d3caee60-c645-4533-86f2-972110dfdd1e-kube-api-access-9zxmw\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:47 crc kubenswrapper[4558]: I0120 17:53:47.771599 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-zbmjx" event={"ID":"d3caee60-c645-4533-86f2-972110dfdd1e","Type":"ContainerDied","Data":"897b6315f41ac1d9e9f05a6b70bd8c1ea9b08aab47f2e8733cda3fd9c203f65e"} Jan 20 17:53:47 crc kubenswrapper[4558]: I0120 17:53:47.771650 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="897b6315f41ac1d9e9f05a6b70bd8c1ea9b08aab47f2e8733cda3fd9c203f65e" Jan 20 17:53:47 crc kubenswrapper[4558]: I0120 17:53:47.771660 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-zbmjx" Jan 20 17:53:48 crc kubenswrapper[4558]: I0120 17:53:48.566567 4558 scope.go:117] "RemoveContainer" containerID="4ea7fa61d8b21007a044345acec89fcebe56c2921cf0f76ff2002f44bdc88ede" Jan 20 17:53:48 crc kubenswrapper[4558]: E0120 17:53:48.567145 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 17:53:49 crc kubenswrapper[4558]: I0120 17:53:49.912523 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["crc-storage/crc-storage-crc-zbmjx"] Jan 20 17:53:49 crc kubenswrapper[4558]: I0120 17:53:49.916822 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["crc-storage/crc-storage-crc-zbmjx"] Jan 20 17:53:50 crc kubenswrapper[4558]: I0120 17:53:50.010585 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["crc-storage/crc-storage-crc-sc7wz"] Jan 20 17:53:50 crc kubenswrapper[4558]: E0120 17:53:50.011237 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7c16f0c4-1462-4817-9f74-9e3d93193867" containerName="nova-cell1-conductor-conductor" Jan 20 17:53:50 crc kubenswrapper[4558]: I0120 17:53:50.011268 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="7c16f0c4-1462-4817-9f74-9e3d93193867" containerName="nova-cell1-conductor-conductor" Jan 20 17:53:50 crc kubenswrapper[4558]: E0120 17:53:50.011282 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d9321ea5-ab3b-4f00-b4d0-f2c2f8c2deb2" containerName="nova-scheduler-scheduler" Jan 20 17:53:50 crc kubenswrapper[4558]: I0120 17:53:50.011289 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d9321ea5-ab3b-4f00-b4d0-f2c2f8c2deb2" containerName="nova-scheduler-scheduler" Jan 20 17:53:50 crc kubenswrapper[4558]: E0120 17:53:50.011299 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d9321ea5-ab3b-4f00-b4d0-f2c2f8c2deb2" containerName="nova-scheduler-scheduler" Jan 20 17:53:50 crc kubenswrapper[4558]: I0120 17:53:50.011307 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d9321ea5-ab3b-4f00-b4d0-f2c2f8c2deb2" containerName="nova-scheduler-scheduler" Jan 20 17:53:50 crc kubenswrapper[4558]: E0120 17:53:50.011331 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d3caee60-c645-4533-86f2-972110dfdd1e" containerName="storage" Jan 20 17:53:50 crc kubenswrapper[4558]: I0120 17:53:50.011337 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d3caee60-c645-4533-86f2-972110dfdd1e" containerName="storage" Jan 20 17:53:50 crc kubenswrapper[4558]: E0120 17:53:50.011346 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="924fa7ce-8d60-4b2f-b62b-d5e146474f71" containerName="nova-cell0-conductor-conductor" Jan 20 17:53:50 crc kubenswrapper[4558]: I0120 17:53:50.011352 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="924fa7ce-8d60-4b2f-b62b-d5e146474f71" containerName="nova-cell0-conductor-conductor" Jan 20 17:53:50 crc kubenswrapper[4558]: I0120 17:53:50.011537 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="924fa7ce-8d60-4b2f-b62b-d5e146474f71" containerName="nova-cell0-conductor-conductor" Jan 20 17:53:50 crc kubenswrapper[4558]: I0120 17:53:50.011552 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="7c16f0c4-1462-4817-9f74-9e3d93193867" containerName="nova-cell1-conductor-conductor" Jan 20 17:53:50 crc kubenswrapper[4558]: I0120 17:53:50.011561 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="d9321ea5-ab3b-4f00-b4d0-f2c2f8c2deb2" containerName="nova-scheduler-scheduler" Jan 20 17:53:50 crc kubenswrapper[4558]: I0120 17:53:50.011577 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="d9321ea5-ab3b-4f00-b4d0-f2c2f8c2deb2" containerName="nova-scheduler-scheduler" Jan 20 17:53:50 crc kubenswrapper[4558]: I0120 17:53:50.011592 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="8101ebb9-ace1-4242-8c8b-698307c6be29" containerName="mariadb-account-create-update" Jan 20 17:53:50 crc kubenswrapper[4558]: I0120 17:53:50.011602 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="d3caee60-c645-4533-86f2-972110dfdd1e" containerName="storage" Jan 20 17:53:50 crc kubenswrapper[4558]: I0120 17:53:50.011610 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="7c16f0c4-1462-4817-9f74-9e3d93193867" containerName="nova-cell1-conductor-conductor" Jan 20 17:53:50 crc kubenswrapper[4558]: I0120 17:53:50.012401 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-sc7wz" Jan 20 17:53:50 crc kubenswrapper[4558]: I0120 17:53:50.015220 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"kube-root-ca.crt" Jan 20 17:53:50 crc kubenswrapper[4558]: I0120 17:53:50.015467 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"crc-storage" Jan 20 17:53:50 crc kubenswrapper[4558]: I0120 17:53:50.015568 4558 reflector.go:368] Caches populated for *v1.Secret from object-"crc-storage"/"crc-storage-dockercfg-k9ht8" Jan 20 17:53:50 crc kubenswrapper[4558]: I0120 17:53:50.017787 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-sc7wz"] Jan 20 17:53:50 crc kubenswrapper[4558]: I0120 17:53:50.020332 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"openshift-service-ca.crt" Jan 20 17:53:50 crc kubenswrapper[4558]: I0120 17:53:50.103273 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/5ae32146-24d1-41db-bc15-fdac91abf9e0-node-mnt\") pod \"crc-storage-crc-sc7wz\" (UID: \"5ae32146-24d1-41db-bc15-fdac91abf9e0\") " pod="crc-storage/crc-storage-crc-sc7wz" Jan 20 17:53:50 crc kubenswrapper[4558]: I0120 17:53:50.103321 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/5ae32146-24d1-41db-bc15-fdac91abf9e0-crc-storage\") pod \"crc-storage-crc-sc7wz\" (UID: \"5ae32146-24d1-41db-bc15-fdac91abf9e0\") " pod="crc-storage/crc-storage-crc-sc7wz" Jan 20 17:53:50 crc kubenswrapper[4558]: I0120 17:53:50.103348 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6fnqn\" (UniqueName: \"kubernetes.io/projected/5ae32146-24d1-41db-bc15-fdac91abf9e0-kube-api-access-6fnqn\") pod \"crc-storage-crc-sc7wz\" (UID: \"5ae32146-24d1-41db-bc15-fdac91abf9e0\") " pod="crc-storage/crc-storage-crc-sc7wz" Jan 20 17:53:50 crc kubenswrapper[4558]: I0120 17:53:50.204138 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/5ae32146-24d1-41db-bc15-fdac91abf9e0-node-mnt\") pod \"crc-storage-crc-sc7wz\" (UID: \"5ae32146-24d1-41db-bc15-fdac91abf9e0\") " pod="crc-storage/crc-storage-crc-sc7wz" Jan 20 17:53:50 crc kubenswrapper[4558]: I0120 17:53:50.204218 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/5ae32146-24d1-41db-bc15-fdac91abf9e0-crc-storage\") pod \"crc-storage-crc-sc7wz\" (UID: \"5ae32146-24d1-41db-bc15-fdac91abf9e0\") " pod="crc-storage/crc-storage-crc-sc7wz" Jan 20 17:53:50 crc kubenswrapper[4558]: I0120 17:53:50.204250 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6fnqn\" (UniqueName: \"kubernetes.io/projected/5ae32146-24d1-41db-bc15-fdac91abf9e0-kube-api-access-6fnqn\") pod \"crc-storage-crc-sc7wz\" (UID: \"5ae32146-24d1-41db-bc15-fdac91abf9e0\") " pod="crc-storage/crc-storage-crc-sc7wz" Jan 20 17:53:50 crc kubenswrapper[4558]: I0120 17:53:50.204535 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/5ae32146-24d1-41db-bc15-fdac91abf9e0-node-mnt\") pod \"crc-storage-crc-sc7wz\" (UID: \"5ae32146-24d1-41db-bc15-fdac91abf9e0\") " pod="crc-storage/crc-storage-crc-sc7wz" Jan 20 17:53:50 crc kubenswrapper[4558]: I0120 17:53:50.205014 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/5ae32146-24d1-41db-bc15-fdac91abf9e0-crc-storage\") pod \"crc-storage-crc-sc7wz\" (UID: \"5ae32146-24d1-41db-bc15-fdac91abf9e0\") " pod="crc-storage/crc-storage-crc-sc7wz" Jan 20 17:53:50 crc kubenswrapper[4558]: I0120 17:53:50.222442 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6fnqn\" (UniqueName: \"kubernetes.io/projected/5ae32146-24d1-41db-bc15-fdac91abf9e0-kube-api-access-6fnqn\") pod \"crc-storage-crc-sc7wz\" (UID: \"5ae32146-24d1-41db-bc15-fdac91abf9e0\") " pod="crc-storage/crc-storage-crc-sc7wz" Jan 20 17:53:50 crc kubenswrapper[4558]: I0120 17:53:50.330688 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-sc7wz" Jan 20 17:53:50 crc kubenswrapper[4558]: I0120 17:53:50.575288 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d3caee60-c645-4533-86f2-972110dfdd1e" path="/var/lib/kubelet/pods/d3caee60-c645-4533-86f2-972110dfdd1e/volumes" Jan 20 17:53:50 crc kubenswrapper[4558]: I0120 17:53:50.731500 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-sc7wz"] Jan 20 17:53:50 crc kubenswrapper[4558]: I0120 17:53:50.797394 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-sc7wz" event={"ID":"5ae32146-24d1-41db-bc15-fdac91abf9e0","Type":"ContainerStarted","Data":"f91acf0ce31ab4341018f9d559405bfdc4c55140647a2808105492d7d9c532e4"} Jan 20 17:53:51 crc kubenswrapper[4558]: I0120 17:53:51.808669 4558 generic.go:334] "Generic (PLEG): container finished" podID="5ae32146-24d1-41db-bc15-fdac91abf9e0" containerID="9fcb6151d92eb87c7938a28ca1addce7759bcc22899dd7f45adfd46c5466f653" exitCode=0 Jan 20 17:53:51 crc kubenswrapper[4558]: I0120 17:53:51.808788 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-sc7wz" event={"ID":"5ae32146-24d1-41db-bc15-fdac91abf9e0","Type":"ContainerDied","Data":"9fcb6151d92eb87c7938a28ca1addce7759bcc22899dd7f45adfd46c5466f653"} Jan 20 17:53:53 crc kubenswrapper[4558]: I0120 17:53:53.060713 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-sc7wz" Jan 20 17:53:53 crc kubenswrapper[4558]: I0120 17:53:53.248267 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/5ae32146-24d1-41db-bc15-fdac91abf9e0-crc-storage\") pod \"5ae32146-24d1-41db-bc15-fdac91abf9e0\" (UID: \"5ae32146-24d1-41db-bc15-fdac91abf9e0\") " Jan 20 17:53:53 crc kubenswrapper[4558]: I0120 17:53:53.248363 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6fnqn\" (UniqueName: \"kubernetes.io/projected/5ae32146-24d1-41db-bc15-fdac91abf9e0-kube-api-access-6fnqn\") pod \"5ae32146-24d1-41db-bc15-fdac91abf9e0\" (UID: \"5ae32146-24d1-41db-bc15-fdac91abf9e0\") " Jan 20 17:53:53 crc kubenswrapper[4558]: I0120 17:53:53.248404 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/5ae32146-24d1-41db-bc15-fdac91abf9e0-node-mnt\") pod \"5ae32146-24d1-41db-bc15-fdac91abf9e0\" (UID: \"5ae32146-24d1-41db-bc15-fdac91abf9e0\") " Jan 20 17:53:53 crc kubenswrapper[4558]: I0120 17:53:53.248770 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5ae32146-24d1-41db-bc15-fdac91abf9e0-node-mnt" (OuterVolumeSpecName: "node-mnt") pod "5ae32146-24d1-41db-bc15-fdac91abf9e0" (UID: "5ae32146-24d1-41db-bc15-fdac91abf9e0"). InnerVolumeSpecName "node-mnt". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 17:53:53 crc kubenswrapper[4558]: I0120 17:53:53.255970 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5ae32146-24d1-41db-bc15-fdac91abf9e0-kube-api-access-6fnqn" (OuterVolumeSpecName: "kube-api-access-6fnqn") pod "5ae32146-24d1-41db-bc15-fdac91abf9e0" (UID: "5ae32146-24d1-41db-bc15-fdac91abf9e0"). InnerVolumeSpecName "kube-api-access-6fnqn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:53:53 crc kubenswrapper[4558]: I0120 17:53:53.266631 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5ae32146-24d1-41db-bc15-fdac91abf9e0-crc-storage" (OuterVolumeSpecName: "crc-storage") pod "5ae32146-24d1-41db-bc15-fdac91abf9e0" (UID: "5ae32146-24d1-41db-bc15-fdac91abf9e0"). InnerVolumeSpecName "crc-storage". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:53:53 crc kubenswrapper[4558]: I0120 17:53:53.350341 4558 reconciler_common.go:293] "Volume detached for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/5ae32146-24d1-41db-bc15-fdac91abf9e0-crc-storage\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:53 crc kubenswrapper[4558]: I0120 17:53:53.350390 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6fnqn\" (UniqueName: \"kubernetes.io/projected/5ae32146-24d1-41db-bc15-fdac91abf9e0-kube-api-access-6fnqn\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:53 crc kubenswrapper[4558]: I0120 17:53:53.350403 4558 reconciler_common.go:293] "Volume detached for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/5ae32146-24d1-41db-bc15-fdac91abf9e0-node-mnt\") on node \"crc\" DevicePath \"\"" Jan 20 17:53:53 crc kubenswrapper[4558]: I0120 17:53:53.831766 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-sc7wz" event={"ID":"5ae32146-24d1-41db-bc15-fdac91abf9e0","Type":"ContainerDied","Data":"f91acf0ce31ab4341018f9d559405bfdc4c55140647a2808105492d7d9c532e4"} Jan 20 17:53:53 crc kubenswrapper[4558]: I0120 17:53:53.831851 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f91acf0ce31ab4341018f9d559405bfdc4c55140647a2808105492d7d9c532e4" Jan 20 17:53:53 crc kubenswrapper[4558]: I0120 17:53:53.831954 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-sc7wz" Jan 20 17:54:03 crc kubenswrapper[4558]: I0120 17:54:03.566683 4558 scope.go:117] "RemoveContainer" containerID="4ea7fa61d8b21007a044345acec89fcebe56c2921cf0f76ff2002f44bdc88ede" Jan 20 17:54:03 crc kubenswrapper[4558]: E0120 17:54:03.567847 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 17:54:04 crc kubenswrapper[4558]: I0120 17:54:04.688930 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/rabbitmq-server-0"] Jan 20 17:54:04 crc kubenswrapper[4558]: E0120 17:54:04.689746 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5ae32146-24d1-41db-bc15-fdac91abf9e0" containerName="storage" Jan 20 17:54:04 crc kubenswrapper[4558]: I0120 17:54:04.689765 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="5ae32146-24d1-41db-bc15-fdac91abf9e0" containerName="storage" Jan 20 17:54:04 crc kubenswrapper[4558]: I0120 17:54:04.689927 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="5ae32146-24d1-41db-bc15-fdac91abf9e0" containerName="storage" Jan 20 17:54:04 crc kubenswrapper[4558]: I0120 17:54:04.690763 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:54:04 crc kubenswrapper[4558]: I0120 17:54:04.692389 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"rabbitmq-erlang-cookie" Jan 20 17:54:04 crc kubenswrapper[4558]: I0120 17:54:04.692712 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"rabbitmq-server-conf" Jan 20 17:54:04 crc kubenswrapper[4558]: I0120 17:54:04.692916 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"rabbitmq-server-dockercfg-7t7lz" Jan 20 17:54:04 crc kubenswrapper[4558]: I0120 17:54:04.693072 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"rabbitmq-plugins-conf" Jan 20 17:54:04 crc kubenswrapper[4558]: I0120 17:54:04.693262 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"rabbitmq-config-data" Jan 20 17:54:04 crc kubenswrapper[4558]: I0120 17:54:04.694267 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-rabbitmq-svc" Jan 20 17:54:04 crc kubenswrapper[4558]: I0120 17:54:04.694796 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"rabbitmq-default-user" Jan 20 17:54:04 crc kubenswrapper[4558]: I0120 17:54:04.704728 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/rabbitmq-server-0"] Jan 20 17:54:04 crc kubenswrapper[4558]: I0120 17:54:04.814633 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/d512bec4-7ed0-43bb-b8fe-0f235f7698e5-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"d512bec4-7ed0-43bb-b8fe-0f235f7698e5\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:54:04 crc kubenswrapper[4558]: I0120 17:54:04.814783 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/d512bec4-7ed0-43bb-b8fe-0f235f7698e5-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"d512bec4-7ed0-43bb-b8fe-0f235f7698e5\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:54:04 crc kubenswrapper[4558]: I0120 17:54:04.814828 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mv99z\" (UniqueName: \"kubernetes.io/projected/d512bec4-7ed0-43bb-b8fe-0f235f7698e5-kube-api-access-mv99z\") pod \"rabbitmq-server-0\" (UID: \"d512bec4-7ed0-43bb-b8fe-0f235f7698e5\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:54:04 crc kubenswrapper[4558]: I0120 17:54:04.814902 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/d512bec4-7ed0-43bb-b8fe-0f235f7698e5-server-conf\") pod \"rabbitmq-server-0\" (UID: \"d512bec4-7ed0-43bb-b8fe-0f235f7698e5\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:54:04 crc kubenswrapper[4558]: I0120 17:54:04.814997 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/d512bec4-7ed0-43bb-b8fe-0f235f7698e5-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"d512bec4-7ed0-43bb-b8fe-0f235f7698e5\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:54:04 crc kubenswrapper[4558]: I0120 17:54:04.815054 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage18-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage18-crc\") pod \"rabbitmq-server-0\" (UID: \"d512bec4-7ed0-43bb-b8fe-0f235f7698e5\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:54:04 crc kubenswrapper[4558]: I0120 17:54:04.815080 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/d512bec4-7ed0-43bb-b8fe-0f235f7698e5-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"d512bec4-7ed0-43bb-b8fe-0f235f7698e5\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:54:04 crc kubenswrapper[4558]: I0120 17:54:04.815125 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/d512bec4-7ed0-43bb-b8fe-0f235f7698e5-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"d512bec4-7ed0-43bb-b8fe-0f235f7698e5\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:54:04 crc kubenswrapper[4558]: I0120 17:54:04.815155 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d512bec4-7ed0-43bb-b8fe-0f235f7698e5-config-data\") pod \"rabbitmq-server-0\" (UID: \"d512bec4-7ed0-43bb-b8fe-0f235f7698e5\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:54:04 crc kubenswrapper[4558]: I0120 17:54:04.815250 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/d512bec4-7ed0-43bb-b8fe-0f235f7698e5-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"d512bec4-7ed0-43bb-b8fe-0f235f7698e5\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:54:04 crc kubenswrapper[4558]: I0120 17:54:04.815392 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/d512bec4-7ed0-43bb-b8fe-0f235f7698e5-pod-info\") pod \"rabbitmq-server-0\" (UID: \"d512bec4-7ed0-43bb-b8fe-0f235f7698e5\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:54:04 crc kubenswrapper[4558]: I0120 17:54:04.917734 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/d512bec4-7ed0-43bb-b8fe-0f235f7698e5-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"d512bec4-7ed0-43bb-b8fe-0f235f7698e5\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:54:04 crc kubenswrapper[4558]: I0120 17:54:04.917788 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/d512bec4-7ed0-43bb-b8fe-0f235f7698e5-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"d512bec4-7ed0-43bb-b8fe-0f235f7698e5\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:54:04 crc kubenswrapper[4558]: I0120 17:54:04.917822 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d512bec4-7ed0-43bb-b8fe-0f235f7698e5-config-data\") pod \"rabbitmq-server-0\" (UID: \"d512bec4-7ed0-43bb-b8fe-0f235f7698e5\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:54:04 crc kubenswrapper[4558]: I0120 17:54:04.917848 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/d512bec4-7ed0-43bb-b8fe-0f235f7698e5-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"d512bec4-7ed0-43bb-b8fe-0f235f7698e5\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:54:04 crc kubenswrapper[4558]: I0120 17:54:04.917904 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/d512bec4-7ed0-43bb-b8fe-0f235f7698e5-pod-info\") pod \"rabbitmq-server-0\" (UID: \"d512bec4-7ed0-43bb-b8fe-0f235f7698e5\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:54:04 crc kubenswrapper[4558]: I0120 17:54:04.917931 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/d512bec4-7ed0-43bb-b8fe-0f235f7698e5-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"d512bec4-7ed0-43bb-b8fe-0f235f7698e5\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:54:04 crc kubenswrapper[4558]: I0120 17:54:04.917962 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/d512bec4-7ed0-43bb-b8fe-0f235f7698e5-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"d512bec4-7ed0-43bb-b8fe-0f235f7698e5\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:54:04 crc kubenswrapper[4558]: I0120 17:54:04.917986 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mv99z\" (UniqueName: \"kubernetes.io/projected/d512bec4-7ed0-43bb-b8fe-0f235f7698e5-kube-api-access-mv99z\") pod \"rabbitmq-server-0\" (UID: \"d512bec4-7ed0-43bb-b8fe-0f235f7698e5\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:54:04 crc kubenswrapper[4558]: I0120 17:54:04.918017 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/d512bec4-7ed0-43bb-b8fe-0f235f7698e5-server-conf\") pod \"rabbitmq-server-0\" (UID: \"d512bec4-7ed0-43bb-b8fe-0f235f7698e5\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:54:04 crc kubenswrapper[4558]: I0120 17:54:04.918045 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/d512bec4-7ed0-43bb-b8fe-0f235f7698e5-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"d512bec4-7ed0-43bb-b8fe-0f235f7698e5\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:54:04 crc kubenswrapper[4558]: I0120 17:54:04.918064 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage18-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage18-crc\") pod \"rabbitmq-server-0\" (UID: \"d512bec4-7ed0-43bb-b8fe-0f235f7698e5\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:54:04 crc kubenswrapper[4558]: I0120 17:54:04.918418 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage18-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage18-crc\") pod \"rabbitmq-server-0\" (UID: \"d512bec4-7ed0-43bb-b8fe-0f235f7698e5\") device mount path \"/mnt/openstack/pv18\"" pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:54:04 crc kubenswrapper[4558]: I0120 17:54:04.929584 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/d512bec4-7ed0-43bb-b8fe-0f235f7698e5-pod-info\") pod \"rabbitmq-server-0\" (UID: \"d512bec4-7ed0-43bb-b8fe-0f235f7698e5\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:54:04 crc kubenswrapper[4558]: I0120 17:54:04.930231 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/d512bec4-7ed0-43bb-b8fe-0f235f7698e5-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"d512bec4-7ed0-43bb-b8fe-0f235f7698e5\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:54:04 crc kubenswrapper[4558]: I0120 17:54:04.930495 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/d512bec4-7ed0-43bb-b8fe-0f235f7698e5-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"d512bec4-7ed0-43bb-b8fe-0f235f7698e5\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:54:04 crc kubenswrapper[4558]: I0120 17:54:04.931352 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/d512bec4-7ed0-43bb-b8fe-0f235f7698e5-server-conf\") pod \"rabbitmq-server-0\" (UID: \"d512bec4-7ed0-43bb-b8fe-0f235f7698e5\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:54:04 crc kubenswrapper[4558]: I0120 17:54:04.935462 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d512bec4-7ed0-43bb-b8fe-0f235f7698e5-config-data\") pod \"rabbitmq-server-0\" (UID: \"d512bec4-7ed0-43bb-b8fe-0f235f7698e5\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:54:04 crc kubenswrapper[4558]: I0120 17:54:04.937334 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/d512bec4-7ed0-43bb-b8fe-0f235f7698e5-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"d512bec4-7ed0-43bb-b8fe-0f235f7698e5\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:54:04 crc kubenswrapper[4558]: I0120 17:54:04.937956 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/d512bec4-7ed0-43bb-b8fe-0f235f7698e5-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"d512bec4-7ed0-43bb-b8fe-0f235f7698e5\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:54:04 crc kubenswrapper[4558]: I0120 17:54:04.938421 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/d512bec4-7ed0-43bb-b8fe-0f235f7698e5-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"d512bec4-7ed0-43bb-b8fe-0f235f7698e5\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:54:04 crc kubenswrapper[4558]: I0120 17:54:04.941475 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/d512bec4-7ed0-43bb-b8fe-0f235f7698e5-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"d512bec4-7ed0-43bb-b8fe-0f235f7698e5\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:54:04 crc kubenswrapper[4558]: I0120 17:54:04.942656 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage18-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage18-crc\") pod \"rabbitmq-server-0\" (UID: \"d512bec4-7ed0-43bb-b8fe-0f235f7698e5\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:54:04 crc kubenswrapper[4558]: I0120 17:54:04.949212 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/rabbitmq-cell1-server-0"] Jan 20 17:54:04 crc kubenswrapper[4558]: I0120 17:54:04.951206 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:54:04 crc kubenswrapper[4558]: I0120 17:54:04.953100 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mv99z\" (UniqueName: \"kubernetes.io/projected/d512bec4-7ed0-43bb-b8fe-0f235f7698e5-kube-api-access-mv99z\") pod \"rabbitmq-server-0\" (UID: \"d512bec4-7ed0-43bb-b8fe-0f235f7698e5\") " pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:54:04 crc kubenswrapper[4558]: I0120 17:54:04.953675 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"rabbitmq-cell1-erlang-cookie" Jan 20 17:54:04 crc kubenswrapper[4558]: I0120 17:54:04.955464 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-rabbitmq-cell1-svc" Jan 20 17:54:04 crc kubenswrapper[4558]: I0120 17:54:04.956116 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"rabbitmq-cell1-default-user" Jan 20 17:54:04 crc kubenswrapper[4558]: I0120 17:54:04.956279 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"rabbitmq-cell1-server-conf" Jan 20 17:54:04 crc kubenswrapper[4558]: I0120 17:54:04.956682 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"rabbitmq-cell1-server-dockercfg-rxjtg" Jan 20 17:54:04 crc kubenswrapper[4558]: I0120 17:54:04.956799 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"rabbitmq-cell1-plugins-conf" Jan 20 17:54:04 crc kubenswrapper[4558]: I0120 17:54:04.956913 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"rabbitmq-cell1-config-data" Jan 20 17:54:04 crc kubenswrapper[4558]: I0120 17:54:04.960768 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/rabbitmq-cell1-server-0"] Jan 20 17:54:05 crc kubenswrapper[4558]: I0120 17:54:05.008870 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:54:05 crc kubenswrapper[4558]: I0120 17:54:05.122539 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/948fb15d-07f1-4b25-b8d5-7d582024ef28-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"948fb15d-07f1-4b25-b8d5-7d582024ef28\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:54:05 crc kubenswrapper[4558]: I0120 17:54:05.122586 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/948fb15d-07f1-4b25-b8d5-7d582024ef28-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"948fb15d-07f1-4b25-b8d5-7d582024ef28\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:54:05 crc kubenswrapper[4558]: I0120 17:54:05.122621 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mp9f4\" (UniqueName: \"kubernetes.io/projected/948fb15d-07f1-4b25-b8d5-7d582024ef28-kube-api-access-mp9f4\") pod \"rabbitmq-cell1-server-0\" (UID: \"948fb15d-07f1-4b25-b8d5-7d582024ef28\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:54:05 crc kubenswrapper[4558]: I0120 17:54:05.122670 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/948fb15d-07f1-4b25-b8d5-7d582024ef28-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"948fb15d-07f1-4b25-b8d5-7d582024ef28\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:54:05 crc kubenswrapper[4558]: I0120 17:54:05.122692 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/948fb15d-07f1-4b25-b8d5-7d582024ef28-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"948fb15d-07f1-4b25-b8d5-7d582024ef28\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:54:05 crc kubenswrapper[4558]: I0120 17:54:05.122718 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/948fb15d-07f1-4b25-b8d5-7d582024ef28-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"948fb15d-07f1-4b25-b8d5-7d582024ef28\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:54:05 crc kubenswrapper[4558]: I0120 17:54:05.122739 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/948fb15d-07f1-4b25-b8d5-7d582024ef28-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"948fb15d-07f1-4b25-b8d5-7d582024ef28\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:54:05 crc kubenswrapper[4558]: I0120 17:54:05.122763 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/948fb15d-07f1-4b25-b8d5-7d582024ef28-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"948fb15d-07f1-4b25-b8d5-7d582024ef28\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:54:05 crc kubenswrapper[4558]: I0120 17:54:05.122803 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"948fb15d-07f1-4b25-b8d5-7d582024ef28\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:54:05 crc kubenswrapper[4558]: I0120 17:54:05.122835 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/948fb15d-07f1-4b25-b8d5-7d582024ef28-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"948fb15d-07f1-4b25-b8d5-7d582024ef28\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:54:05 crc kubenswrapper[4558]: I0120 17:54:05.122853 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/948fb15d-07f1-4b25-b8d5-7d582024ef28-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"948fb15d-07f1-4b25-b8d5-7d582024ef28\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:54:05 crc kubenswrapper[4558]: I0120 17:54:05.224655 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"948fb15d-07f1-4b25-b8d5-7d582024ef28\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:54:05 crc kubenswrapper[4558]: I0120 17:54:05.224940 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/948fb15d-07f1-4b25-b8d5-7d582024ef28-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"948fb15d-07f1-4b25-b8d5-7d582024ef28\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:54:05 crc kubenswrapper[4558]: I0120 17:54:05.224963 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/948fb15d-07f1-4b25-b8d5-7d582024ef28-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"948fb15d-07f1-4b25-b8d5-7d582024ef28\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:54:05 crc kubenswrapper[4558]: I0120 17:54:05.225003 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/948fb15d-07f1-4b25-b8d5-7d582024ef28-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"948fb15d-07f1-4b25-b8d5-7d582024ef28\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:54:05 crc kubenswrapper[4558]: I0120 17:54:05.225028 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/948fb15d-07f1-4b25-b8d5-7d582024ef28-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"948fb15d-07f1-4b25-b8d5-7d582024ef28\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:54:05 crc kubenswrapper[4558]: I0120 17:54:05.225063 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mp9f4\" (UniqueName: \"kubernetes.io/projected/948fb15d-07f1-4b25-b8d5-7d582024ef28-kube-api-access-mp9f4\") pod \"rabbitmq-cell1-server-0\" (UID: \"948fb15d-07f1-4b25-b8d5-7d582024ef28\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:54:05 crc kubenswrapper[4558]: I0120 17:54:05.225099 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"948fb15d-07f1-4b25-b8d5-7d582024ef28\") device mount path \"/mnt/openstack/pv02\"" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:54:05 crc kubenswrapper[4558]: I0120 17:54:05.225484 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/948fb15d-07f1-4b25-b8d5-7d582024ef28-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"948fb15d-07f1-4b25-b8d5-7d582024ef28\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:54:05 crc kubenswrapper[4558]: I0120 17:54:05.225107 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/948fb15d-07f1-4b25-b8d5-7d582024ef28-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"948fb15d-07f1-4b25-b8d5-7d582024ef28\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:54:05 crc kubenswrapper[4558]: I0120 17:54:05.225847 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/948fb15d-07f1-4b25-b8d5-7d582024ef28-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"948fb15d-07f1-4b25-b8d5-7d582024ef28\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:54:05 crc kubenswrapper[4558]: I0120 17:54:05.225911 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/948fb15d-07f1-4b25-b8d5-7d582024ef28-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"948fb15d-07f1-4b25-b8d5-7d582024ef28\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:54:05 crc kubenswrapper[4558]: I0120 17:54:05.225936 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/948fb15d-07f1-4b25-b8d5-7d582024ef28-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"948fb15d-07f1-4b25-b8d5-7d582024ef28\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:54:05 crc kubenswrapper[4558]: I0120 17:54:05.225961 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/948fb15d-07f1-4b25-b8d5-7d582024ef28-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"948fb15d-07f1-4b25-b8d5-7d582024ef28\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:54:05 crc kubenswrapper[4558]: I0120 17:54:05.226066 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/948fb15d-07f1-4b25-b8d5-7d582024ef28-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"948fb15d-07f1-4b25-b8d5-7d582024ef28\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:54:05 crc kubenswrapper[4558]: I0120 17:54:05.226421 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/948fb15d-07f1-4b25-b8d5-7d582024ef28-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"948fb15d-07f1-4b25-b8d5-7d582024ef28\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:54:05 crc kubenswrapper[4558]: I0120 17:54:05.226671 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/948fb15d-07f1-4b25-b8d5-7d582024ef28-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"948fb15d-07f1-4b25-b8d5-7d582024ef28\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:54:05 crc kubenswrapper[4558]: I0120 17:54:05.226915 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/948fb15d-07f1-4b25-b8d5-7d582024ef28-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"948fb15d-07f1-4b25-b8d5-7d582024ef28\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:54:05 crc kubenswrapper[4558]: I0120 17:54:05.235806 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/948fb15d-07f1-4b25-b8d5-7d582024ef28-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"948fb15d-07f1-4b25-b8d5-7d582024ef28\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:54:05 crc kubenswrapper[4558]: I0120 17:54:05.236397 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/948fb15d-07f1-4b25-b8d5-7d582024ef28-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"948fb15d-07f1-4b25-b8d5-7d582024ef28\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:54:05 crc kubenswrapper[4558]: I0120 17:54:05.238208 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/948fb15d-07f1-4b25-b8d5-7d582024ef28-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"948fb15d-07f1-4b25-b8d5-7d582024ef28\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:54:05 crc kubenswrapper[4558]: I0120 17:54:05.238518 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/948fb15d-07f1-4b25-b8d5-7d582024ef28-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"948fb15d-07f1-4b25-b8d5-7d582024ef28\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:54:05 crc kubenswrapper[4558]: I0120 17:54:05.243126 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mp9f4\" (UniqueName: \"kubernetes.io/projected/948fb15d-07f1-4b25-b8d5-7d582024ef28-kube-api-access-mp9f4\") pod \"rabbitmq-cell1-server-0\" (UID: \"948fb15d-07f1-4b25-b8d5-7d582024ef28\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:54:05 crc kubenswrapper[4558]: I0120 17:54:05.248235 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"948fb15d-07f1-4b25-b8d5-7d582024ef28\") " pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:54:05 crc kubenswrapper[4558]: I0120 17:54:05.286735 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:54:05 crc kubenswrapper[4558]: I0120 17:54:05.424383 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/rabbitmq-server-0"] Jan 20 17:54:05 crc kubenswrapper[4558]: I0120 17:54:05.522614 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/rabbitmq-cell1-server-0"] Jan 20 17:54:05 crc kubenswrapper[4558]: I0120 17:54:05.929704 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" event={"ID":"948fb15d-07f1-4b25-b8d5-7d582024ef28","Type":"ContainerStarted","Data":"d344a258744d468aa298be757d9c716bec6943b9294fde18b017ca73f2a1c7d3"} Jan 20 17:54:05 crc kubenswrapper[4558]: I0120 17:54:05.930664 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-server-0" event={"ID":"d512bec4-7ed0-43bb-b8fe-0f235f7698e5","Type":"ContainerStarted","Data":"3f7922c9eee61eec77ed6df6e70ea27b3330dae72576bbf24d7bf62d888bdf0f"} Jan 20 17:54:06 crc kubenswrapper[4558]: I0120 17:54:06.599416 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/openstack-galera-0"] Jan 20 17:54:06 crc kubenswrapper[4558]: I0120 17:54:06.601104 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:54:06 crc kubenswrapper[4558]: I0120 17:54:06.603891 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-galera-openstack-svc" Jan 20 17:54:06 crc kubenswrapper[4558]: I0120 17:54:06.603953 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-config-data" Jan 20 17:54:06 crc kubenswrapper[4558]: I0120 17:54:06.604204 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"galera-openstack-dockercfg-nqwzz" Jan 20 17:54:06 crc kubenswrapper[4558]: I0120 17:54:06.604457 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-scripts" Jan 20 17:54:06 crc kubenswrapper[4558]: I0120 17:54:06.610201 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"combined-ca-bundle" Jan 20 17:54:06 crc kubenswrapper[4558]: I0120 17:54:06.611669 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/openstack-galera-0"] Jan 20 17:54:06 crc kubenswrapper[4558]: I0120 17:54:06.752650 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fc22v\" (UniqueName: \"kubernetes.io/projected/94680f2e-23c0-41aa-a7ff-0aadfcfcc5e6-kube-api-access-fc22v\") pod \"openstack-galera-0\" (UID: \"94680f2e-23c0-41aa-a7ff-0aadfcfcc5e6\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:54:06 crc kubenswrapper[4558]: I0120 17:54:06.752729 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/94680f2e-23c0-41aa-a7ff-0aadfcfcc5e6-kolla-config\") pod \"openstack-galera-0\" (UID: \"94680f2e-23c0-41aa-a7ff-0aadfcfcc5e6\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:54:06 crc kubenswrapper[4558]: I0120 17:54:06.752773 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94680f2e-23c0-41aa-a7ff-0aadfcfcc5e6-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"94680f2e-23c0-41aa-a7ff-0aadfcfcc5e6\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:54:06 crc kubenswrapper[4558]: I0120 17:54:06.753073 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/94680f2e-23c0-41aa-a7ff-0aadfcfcc5e6-config-data-generated\") pod \"openstack-galera-0\" (UID: \"94680f2e-23c0-41aa-a7ff-0aadfcfcc5e6\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:54:06 crc kubenswrapper[4558]: I0120 17:54:06.753256 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/94680f2e-23c0-41aa-a7ff-0aadfcfcc5e6-config-data-default\") pod \"openstack-galera-0\" (UID: \"94680f2e-23c0-41aa-a7ff-0aadfcfcc5e6\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:54:06 crc kubenswrapper[4558]: I0120 17:54:06.753315 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage13-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage13-crc\") pod \"openstack-galera-0\" (UID: \"94680f2e-23c0-41aa-a7ff-0aadfcfcc5e6\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:54:06 crc kubenswrapper[4558]: I0120 17:54:06.753546 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/94680f2e-23c0-41aa-a7ff-0aadfcfcc5e6-operator-scripts\") pod \"openstack-galera-0\" (UID: \"94680f2e-23c0-41aa-a7ff-0aadfcfcc5e6\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:54:06 crc kubenswrapper[4558]: I0120 17:54:06.753730 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/94680f2e-23c0-41aa-a7ff-0aadfcfcc5e6-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"94680f2e-23c0-41aa-a7ff-0aadfcfcc5e6\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:54:06 crc kubenswrapper[4558]: I0120 17:54:06.855960 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fc22v\" (UniqueName: \"kubernetes.io/projected/94680f2e-23c0-41aa-a7ff-0aadfcfcc5e6-kube-api-access-fc22v\") pod \"openstack-galera-0\" (UID: \"94680f2e-23c0-41aa-a7ff-0aadfcfcc5e6\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:54:06 crc kubenswrapper[4558]: I0120 17:54:06.856021 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/94680f2e-23c0-41aa-a7ff-0aadfcfcc5e6-kolla-config\") pod \"openstack-galera-0\" (UID: \"94680f2e-23c0-41aa-a7ff-0aadfcfcc5e6\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:54:06 crc kubenswrapper[4558]: I0120 17:54:06.856057 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94680f2e-23c0-41aa-a7ff-0aadfcfcc5e6-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"94680f2e-23c0-41aa-a7ff-0aadfcfcc5e6\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:54:06 crc kubenswrapper[4558]: I0120 17:54:06.856135 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/94680f2e-23c0-41aa-a7ff-0aadfcfcc5e6-config-data-generated\") pod \"openstack-galera-0\" (UID: \"94680f2e-23c0-41aa-a7ff-0aadfcfcc5e6\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:54:06 crc kubenswrapper[4558]: I0120 17:54:06.856203 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/94680f2e-23c0-41aa-a7ff-0aadfcfcc5e6-config-data-default\") pod \"openstack-galera-0\" (UID: \"94680f2e-23c0-41aa-a7ff-0aadfcfcc5e6\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:54:06 crc kubenswrapper[4558]: I0120 17:54:06.856233 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage13-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage13-crc\") pod \"openstack-galera-0\" (UID: \"94680f2e-23c0-41aa-a7ff-0aadfcfcc5e6\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:54:06 crc kubenswrapper[4558]: I0120 17:54:06.856312 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/94680f2e-23c0-41aa-a7ff-0aadfcfcc5e6-operator-scripts\") pod \"openstack-galera-0\" (UID: \"94680f2e-23c0-41aa-a7ff-0aadfcfcc5e6\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:54:06 crc kubenswrapper[4558]: I0120 17:54:06.856579 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage13-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage13-crc\") pod \"openstack-galera-0\" (UID: \"94680f2e-23c0-41aa-a7ff-0aadfcfcc5e6\") device mount path \"/mnt/openstack/pv13\"" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:54:06 crc kubenswrapper[4558]: I0120 17:54:06.856770 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/94680f2e-23c0-41aa-a7ff-0aadfcfcc5e6-config-data-generated\") pod \"openstack-galera-0\" (UID: \"94680f2e-23c0-41aa-a7ff-0aadfcfcc5e6\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:54:06 crc kubenswrapper[4558]: I0120 17:54:06.856959 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/94680f2e-23c0-41aa-a7ff-0aadfcfcc5e6-kolla-config\") pod \"openstack-galera-0\" (UID: \"94680f2e-23c0-41aa-a7ff-0aadfcfcc5e6\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:54:06 crc kubenswrapper[4558]: I0120 17:54:06.857225 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/94680f2e-23c0-41aa-a7ff-0aadfcfcc5e6-config-data-default\") pod \"openstack-galera-0\" (UID: \"94680f2e-23c0-41aa-a7ff-0aadfcfcc5e6\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:54:06 crc kubenswrapper[4558]: I0120 17:54:06.857869 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/94680f2e-23c0-41aa-a7ff-0aadfcfcc5e6-operator-scripts\") pod \"openstack-galera-0\" (UID: \"94680f2e-23c0-41aa-a7ff-0aadfcfcc5e6\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:54:06 crc kubenswrapper[4558]: I0120 17:54:06.858210 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/94680f2e-23c0-41aa-a7ff-0aadfcfcc5e6-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"94680f2e-23c0-41aa-a7ff-0aadfcfcc5e6\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:54:06 crc kubenswrapper[4558]: I0120 17:54:06.861315 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94680f2e-23c0-41aa-a7ff-0aadfcfcc5e6-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"94680f2e-23c0-41aa-a7ff-0aadfcfcc5e6\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:54:06 crc kubenswrapper[4558]: I0120 17:54:06.862262 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/94680f2e-23c0-41aa-a7ff-0aadfcfcc5e6-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"94680f2e-23c0-41aa-a7ff-0aadfcfcc5e6\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:54:06 crc kubenswrapper[4558]: I0120 17:54:06.871081 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fc22v\" (UniqueName: \"kubernetes.io/projected/94680f2e-23c0-41aa-a7ff-0aadfcfcc5e6-kube-api-access-fc22v\") pod \"openstack-galera-0\" (UID: \"94680f2e-23c0-41aa-a7ff-0aadfcfcc5e6\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:54:06 crc kubenswrapper[4558]: I0120 17:54:06.875692 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage13-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage13-crc\") pod \"openstack-galera-0\" (UID: \"94680f2e-23c0-41aa-a7ff-0aadfcfcc5e6\") " pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:54:06 crc kubenswrapper[4558]: I0120 17:54:06.917831 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:54:07 crc kubenswrapper[4558]: I0120 17:54:07.403555 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/openstack-galera-0"] Jan 20 17:54:07 crc kubenswrapper[4558]: I0120 17:54:07.949843 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-galera-0" event={"ID":"94680f2e-23c0-41aa-a7ff-0aadfcfcc5e6","Type":"ContainerStarted","Data":"67e74c2c2f7872eecd88ff144c26d1ab77df0c8b151f063b843778fa52c1bd62"} Jan 20 17:54:07 crc kubenswrapper[4558]: I0120 17:54:07.950223 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-galera-0" event={"ID":"94680f2e-23c0-41aa-a7ff-0aadfcfcc5e6","Type":"ContainerStarted","Data":"33dc9a9bd3762a283e45de54452c87ac3997c2cf3329c7ef08bc3bd13dbb9b65"} Jan 20 17:54:07 crc kubenswrapper[4558]: I0120 17:54:07.951637 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" event={"ID":"948fb15d-07f1-4b25-b8d5-7d582024ef28","Type":"ContainerStarted","Data":"d5c6ace4ceeb24008b1063ad1082708520feefaaabe670144960a772ad86c13a"} Jan 20 17:54:07 crc kubenswrapper[4558]: I0120 17:54:07.953431 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-server-0" event={"ID":"d512bec4-7ed0-43bb-b8fe-0f235f7698e5","Type":"ContainerStarted","Data":"415bf18726a76e61bd79b39ff76bbb5fe83ed4680a0f55bfbff9af2eed5f49a4"} Jan 20 17:54:08 crc kubenswrapper[4558]: I0120 17:54:08.063674 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/openstack-cell1-galera-0"] Jan 20 17:54:08 crc kubenswrapper[4558]: I0120 17:54:08.065062 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:54:08 crc kubenswrapper[4558]: I0120 17:54:08.066641 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-galera-openstack-cell1-svc" Jan 20 17:54:08 crc kubenswrapper[4558]: I0120 17:54:08.067134 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-cell1-scripts" Jan 20 17:54:08 crc kubenswrapper[4558]: I0120 17:54:08.067380 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-cell1-config-data" Jan 20 17:54:08 crc kubenswrapper[4558]: I0120 17:54:08.080470 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"galera-openstack-cell1-dockercfg-d5kzn" Jan 20 17:54:08 crc kubenswrapper[4558]: I0120 17:54:08.091427 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/openstack-cell1-galera-0"] Jan 20 17:54:08 crc kubenswrapper[4558]: I0120 17:54:08.184052 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vdnzt\" (UniqueName: \"kubernetes.io/projected/abd1fde8-57bd-4248-a061-6ddb436501c2-kube-api-access-vdnzt\") pod \"openstack-cell1-galera-0\" (UID: \"abd1fde8-57bd-4248-a061-6ddb436501c2\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:54:08 crc kubenswrapper[4558]: I0120 17:54:08.184122 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/abd1fde8-57bd-4248-a061-6ddb436501c2-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"abd1fde8-57bd-4248-a061-6ddb436501c2\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:54:08 crc kubenswrapper[4558]: I0120 17:54:08.184146 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/abd1fde8-57bd-4248-a061-6ddb436501c2-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"abd1fde8-57bd-4248-a061-6ddb436501c2\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:54:08 crc kubenswrapper[4558]: I0120 17:54:08.184394 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage20-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage20-crc\") pod \"openstack-cell1-galera-0\" (UID: \"abd1fde8-57bd-4248-a061-6ddb436501c2\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:54:08 crc kubenswrapper[4558]: I0120 17:54:08.184466 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/abd1fde8-57bd-4248-a061-6ddb436501c2-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"abd1fde8-57bd-4248-a061-6ddb436501c2\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:54:08 crc kubenswrapper[4558]: I0120 17:54:08.184503 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/abd1fde8-57bd-4248-a061-6ddb436501c2-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"abd1fde8-57bd-4248-a061-6ddb436501c2\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:54:08 crc kubenswrapper[4558]: I0120 17:54:08.184668 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/abd1fde8-57bd-4248-a061-6ddb436501c2-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"abd1fde8-57bd-4248-a061-6ddb436501c2\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:54:08 crc kubenswrapper[4558]: I0120 17:54:08.184700 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/abd1fde8-57bd-4248-a061-6ddb436501c2-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"abd1fde8-57bd-4248-a061-6ddb436501c2\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:54:08 crc kubenswrapper[4558]: I0120 17:54:08.258593 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/memcached-0"] Jan 20 17:54:08 crc kubenswrapper[4558]: I0120 17:54:08.259637 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/memcached-0" Jan 20 17:54:08 crc kubenswrapper[4558]: I0120 17:54:08.262390 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-memcached-svc" Jan 20 17:54:08 crc kubenswrapper[4558]: I0120 17:54:08.262667 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"memcached-config-data" Jan 20 17:54:08 crc kubenswrapper[4558]: I0120 17:54:08.272939 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/memcached-0"] Jan 20 17:54:08 crc kubenswrapper[4558]: I0120 17:54:08.273358 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"memcached-memcached-dockercfg-6xwbn" Jan 20 17:54:08 crc kubenswrapper[4558]: I0120 17:54:08.286293 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage20-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage20-crc\") pod \"openstack-cell1-galera-0\" (UID: \"abd1fde8-57bd-4248-a061-6ddb436501c2\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:54:08 crc kubenswrapper[4558]: I0120 17:54:08.286355 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/abd1fde8-57bd-4248-a061-6ddb436501c2-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"abd1fde8-57bd-4248-a061-6ddb436501c2\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:54:08 crc kubenswrapper[4558]: I0120 17:54:08.286397 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/abd1fde8-57bd-4248-a061-6ddb436501c2-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"abd1fde8-57bd-4248-a061-6ddb436501c2\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:54:08 crc kubenswrapper[4558]: I0120 17:54:08.286477 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/abd1fde8-57bd-4248-a061-6ddb436501c2-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"abd1fde8-57bd-4248-a061-6ddb436501c2\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:54:08 crc kubenswrapper[4558]: I0120 17:54:08.286511 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/abd1fde8-57bd-4248-a061-6ddb436501c2-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"abd1fde8-57bd-4248-a061-6ddb436501c2\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:54:08 crc kubenswrapper[4558]: I0120 17:54:08.286596 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vdnzt\" (UniqueName: \"kubernetes.io/projected/abd1fde8-57bd-4248-a061-6ddb436501c2-kube-api-access-vdnzt\") pod \"openstack-cell1-galera-0\" (UID: \"abd1fde8-57bd-4248-a061-6ddb436501c2\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:54:08 crc kubenswrapper[4558]: I0120 17:54:08.286650 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage20-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage20-crc\") pod \"openstack-cell1-galera-0\" (UID: \"abd1fde8-57bd-4248-a061-6ddb436501c2\") device mount path \"/mnt/openstack/pv20\"" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:54:08 crc kubenswrapper[4558]: I0120 17:54:08.286663 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/abd1fde8-57bd-4248-a061-6ddb436501c2-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"abd1fde8-57bd-4248-a061-6ddb436501c2\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:54:08 crc kubenswrapper[4558]: I0120 17:54:08.286693 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/abd1fde8-57bd-4248-a061-6ddb436501c2-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"abd1fde8-57bd-4248-a061-6ddb436501c2\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:54:08 crc kubenswrapper[4558]: I0120 17:54:08.288667 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/abd1fde8-57bd-4248-a061-6ddb436501c2-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"abd1fde8-57bd-4248-a061-6ddb436501c2\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:54:08 crc kubenswrapper[4558]: I0120 17:54:08.289052 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/abd1fde8-57bd-4248-a061-6ddb436501c2-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"abd1fde8-57bd-4248-a061-6ddb436501c2\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:54:08 crc kubenswrapper[4558]: I0120 17:54:08.289268 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/abd1fde8-57bd-4248-a061-6ddb436501c2-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"abd1fde8-57bd-4248-a061-6ddb436501c2\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:54:08 crc kubenswrapper[4558]: I0120 17:54:08.289292 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/abd1fde8-57bd-4248-a061-6ddb436501c2-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"abd1fde8-57bd-4248-a061-6ddb436501c2\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:54:08 crc kubenswrapper[4558]: I0120 17:54:08.295196 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/abd1fde8-57bd-4248-a061-6ddb436501c2-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"abd1fde8-57bd-4248-a061-6ddb436501c2\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:54:08 crc kubenswrapper[4558]: I0120 17:54:08.297759 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/abd1fde8-57bd-4248-a061-6ddb436501c2-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"abd1fde8-57bd-4248-a061-6ddb436501c2\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:54:08 crc kubenswrapper[4558]: I0120 17:54:08.312187 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage20-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage20-crc\") pod \"openstack-cell1-galera-0\" (UID: \"abd1fde8-57bd-4248-a061-6ddb436501c2\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:54:08 crc kubenswrapper[4558]: I0120 17:54:08.327700 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vdnzt\" (UniqueName: \"kubernetes.io/projected/abd1fde8-57bd-4248-a061-6ddb436501c2-kube-api-access-vdnzt\") pod \"openstack-cell1-galera-0\" (UID: \"abd1fde8-57bd-4248-a061-6ddb436501c2\") " pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:54:08 crc kubenswrapper[4558]: I0120 17:54:08.381669 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:54:08 crc kubenswrapper[4558]: I0120 17:54:08.388024 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ae90d4e7-e4c5-47e4-a320-f9f6069ed69d-config-data\") pod \"memcached-0\" (UID: \"ae90d4e7-e4c5-47e4-a320-f9f6069ed69d\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:54:08 crc kubenswrapper[4558]: I0120 17:54:08.388073 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/ae90d4e7-e4c5-47e4-a320-f9f6069ed69d-kolla-config\") pod \"memcached-0\" (UID: \"ae90d4e7-e4c5-47e4-a320-f9f6069ed69d\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:54:08 crc kubenswrapper[4558]: I0120 17:54:08.388099 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/ae90d4e7-e4c5-47e4-a320-f9f6069ed69d-memcached-tls-certs\") pod \"memcached-0\" (UID: \"ae90d4e7-e4c5-47e4-a320-f9f6069ed69d\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:54:08 crc kubenswrapper[4558]: I0120 17:54:08.388138 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae90d4e7-e4c5-47e4-a320-f9f6069ed69d-combined-ca-bundle\") pod \"memcached-0\" (UID: \"ae90d4e7-e4c5-47e4-a320-f9f6069ed69d\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:54:08 crc kubenswrapper[4558]: I0120 17:54:08.388175 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9k7kh\" (UniqueName: \"kubernetes.io/projected/ae90d4e7-e4c5-47e4-a320-f9f6069ed69d-kube-api-access-9k7kh\") pod \"memcached-0\" (UID: \"ae90d4e7-e4c5-47e4-a320-f9f6069ed69d\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:54:08 crc kubenswrapper[4558]: I0120 17:54:08.490150 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ae90d4e7-e4c5-47e4-a320-f9f6069ed69d-config-data\") pod \"memcached-0\" (UID: \"ae90d4e7-e4c5-47e4-a320-f9f6069ed69d\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:54:08 crc kubenswrapper[4558]: I0120 17:54:08.490259 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/ae90d4e7-e4c5-47e4-a320-f9f6069ed69d-kolla-config\") pod \"memcached-0\" (UID: \"ae90d4e7-e4c5-47e4-a320-f9f6069ed69d\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:54:08 crc kubenswrapper[4558]: I0120 17:54:08.490292 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/ae90d4e7-e4c5-47e4-a320-f9f6069ed69d-memcached-tls-certs\") pod \"memcached-0\" (UID: \"ae90d4e7-e4c5-47e4-a320-f9f6069ed69d\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:54:08 crc kubenswrapper[4558]: I0120 17:54:08.490390 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae90d4e7-e4c5-47e4-a320-f9f6069ed69d-combined-ca-bundle\") pod \"memcached-0\" (UID: \"ae90d4e7-e4c5-47e4-a320-f9f6069ed69d\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:54:08 crc kubenswrapper[4558]: I0120 17:54:08.490418 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9k7kh\" (UniqueName: \"kubernetes.io/projected/ae90d4e7-e4c5-47e4-a320-f9f6069ed69d-kube-api-access-9k7kh\") pod \"memcached-0\" (UID: \"ae90d4e7-e4c5-47e4-a320-f9f6069ed69d\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:54:08 crc kubenswrapper[4558]: I0120 17:54:08.491187 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/ae90d4e7-e4c5-47e4-a320-f9f6069ed69d-kolla-config\") pod \"memcached-0\" (UID: \"ae90d4e7-e4c5-47e4-a320-f9f6069ed69d\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:54:08 crc kubenswrapper[4558]: I0120 17:54:08.491206 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ae90d4e7-e4c5-47e4-a320-f9f6069ed69d-config-data\") pod \"memcached-0\" (UID: \"ae90d4e7-e4c5-47e4-a320-f9f6069ed69d\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:54:08 crc kubenswrapper[4558]: I0120 17:54:08.494763 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/ae90d4e7-e4c5-47e4-a320-f9f6069ed69d-memcached-tls-certs\") pod \"memcached-0\" (UID: \"ae90d4e7-e4c5-47e4-a320-f9f6069ed69d\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:54:08 crc kubenswrapper[4558]: I0120 17:54:08.495672 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae90d4e7-e4c5-47e4-a320-f9f6069ed69d-combined-ca-bundle\") pod \"memcached-0\" (UID: \"ae90d4e7-e4c5-47e4-a320-f9f6069ed69d\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:54:08 crc kubenswrapper[4558]: I0120 17:54:08.516478 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9k7kh\" (UniqueName: \"kubernetes.io/projected/ae90d4e7-e4c5-47e4-a320-f9f6069ed69d-kube-api-access-9k7kh\") pod \"memcached-0\" (UID: \"ae90d4e7-e4c5-47e4-a320-f9f6069ed69d\") " pod="openstack-kuttl-tests/memcached-0" Jan 20 17:54:08 crc kubenswrapper[4558]: I0120 17:54:08.581738 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/memcached-0" Jan 20 17:54:08 crc kubenswrapper[4558]: I0120 17:54:08.891212 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/openstack-cell1-galera-0"] Jan 20 17:54:09 crc kubenswrapper[4558]: I0120 17:54:09.002474 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-cell1-galera-0" event={"ID":"abd1fde8-57bd-4248-a061-6ddb436501c2","Type":"ContainerStarted","Data":"cb022a179e18d716044cdd929cdaba01faac24010f54b70eb0d7659569223da4"} Jan 20 17:54:09 crc kubenswrapper[4558]: I0120 17:54:09.077944 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/memcached-0"] Jan 20 17:54:09 crc kubenswrapper[4558]: W0120 17:54:09.079394 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podae90d4e7_e4c5_47e4_a320_f9f6069ed69d.slice/crio-680237fe0cd701eb1005e70da84f58e12468bf6b7f7436a8a226c30101b73278 WatchSource:0}: Error finding container 680237fe0cd701eb1005e70da84f58e12468bf6b7f7436a8a226c30101b73278: Status 404 returned error can't find the container with id 680237fe0cd701eb1005e70da84f58e12468bf6b7f7436a8a226c30101b73278 Jan 20 17:54:09 crc kubenswrapper[4558]: I0120 17:54:09.886611 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 17:54:09 crc kubenswrapper[4558]: I0120 17:54:09.887600 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:54:09 crc kubenswrapper[4558]: I0120 17:54:09.889299 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"telemetry-ceilometer-dockercfg-6m4ns" Jan 20 17:54:09 crc kubenswrapper[4558]: I0120 17:54:09.902854 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 17:54:10 crc kubenswrapper[4558]: I0120 17:54:10.014674 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-cell1-galera-0" event={"ID":"abd1fde8-57bd-4248-a061-6ddb436501c2","Type":"ContainerStarted","Data":"243bfc24492680f9bb1aba6fa6319099535e5f0e5e0b4e27d224151dc38f4ee3"} Jan 20 17:54:10 crc kubenswrapper[4558]: I0120 17:54:10.016310 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/memcached-0" event={"ID":"ae90d4e7-e4c5-47e4-a320-f9f6069ed69d","Type":"ContainerStarted","Data":"2f47c5b6e676c58c7ac4bb4634e7603f0a0fe15c650b10488e055bd1458da4c9"} Jan 20 17:54:10 crc kubenswrapper[4558]: I0120 17:54:10.016337 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/memcached-0" event={"ID":"ae90d4e7-e4c5-47e4-a320-f9f6069ed69d","Type":"ContainerStarted","Data":"680237fe0cd701eb1005e70da84f58e12468bf6b7f7436a8a226c30101b73278"} Jan 20 17:54:10 crc kubenswrapper[4558]: I0120 17:54:10.016610 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/memcached-0" Jan 20 17:54:10 crc kubenswrapper[4558]: I0120 17:54:10.017532 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zptcv\" (UniqueName: \"kubernetes.io/projected/7f609e06-c77b-4219-9143-8994fac93c0e-kube-api-access-zptcv\") pod \"kube-state-metrics-0\" (UID: \"7f609e06-c77b-4219-9143-8994fac93c0e\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:54:10 crc kubenswrapper[4558]: I0120 17:54:10.047054 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/memcached-0" podStartSLOduration=2.047030296 podStartE2EDuration="2.047030296s" podCreationTimestamp="2026-01-20 17:54:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:54:10.045064769 +0000 UTC m=+4343.805402737" watchObservedRunningTime="2026-01-20 17:54:10.047030296 +0000 UTC m=+4343.807368253" Jan 20 17:54:10 crc kubenswrapper[4558]: I0120 17:54:10.119806 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zptcv\" (UniqueName: \"kubernetes.io/projected/7f609e06-c77b-4219-9143-8994fac93c0e-kube-api-access-zptcv\") pod \"kube-state-metrics-0\" (UID: \"7f609e06-c77b-4219-9143-8994fac93c0e\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:54:10 crc kubenswrapper[4558]: I0120 17:54:10.137873 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zptcv\" (UniqueName: \"kubernetes.io/projected/7f609e06-c77b-4219-9143-8994fac93c0e-kube-api-access-zptcv\") pod \"kube-state-metrics-0\" (UID: \"7f609e06-c77b-4219-9143-8994fac93c0e\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:54:10 crc kubenswrapper[4558]: I0120 17:54:10.208892 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:54:10 crc kubenswrapper[4558]: I0120 17:54:10.629396 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 17:54:11 crc kubenswrapper[4558]: I0120 17:54:11.028653 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/kube-state-metrics-0" event={"ID":"7f609e06-c77b-4219-9143-8994fac93c0e","Type":"ContainerStarted","Data":"299d0f00fce348d45c1bc2778654996557a1a58e3dbb25417ce3a33d746235f8"} Jan 20 17:54:12 crc kubenswrapper[4558]: I0120 17:54:12.038126 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/kube-state-metrics-0" event={"ID":"7f609e06-c77b-4219-9143-8994fac93c0e","Type":"ContainerStarted","Data":"afb05b80e47beabb7af62918a7ee87a317b8c87a34ce9be021af37bd27b5cf9e"} Jan 20 17:54:12 crc kubenswrapper[4558]: I0120 17:54:12.038497 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:54:12 crc kubenswrapper[4558]: I0120 17:54:12.039905 4558 generic.go:334] "Generic (PLEG): container finished" podID="94680f2e-23c0-41aa-a7ff-0aadfcfcc5e6" containerID="67e74c2c2f7872eecd88ff144c26d1ab77df0c8b151f063b843778fa52c1bd62" exitCode=0 Jan 20 17:54:12 crc kubenswrapper[4558]: I0120 17:54:12.039949 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-galera-0" event={"ID":"94680f2e-23c0-41aa-a7ff-0aadfcfcc5e6","Type":"ContainerDied","Data":"67e74c2c2f7872eecd88ff144c26d1ab77df0c8b151f063b843778fa52c1bd62"} Jan 20 17:54:12 crc kubenswrapper[4558]: I0120 17:54:12.062526 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/kube-state-metrics-0" podStartSLOduration=2.723631316 podStartE2EDuration="3.062512541s" podCreationTimestamp="2026-01-20 17:54:09 +0000 UTC" firstStartedPulling="2026-01-20 17:54:10.636539595 +0000 UTC m=+4344.396877562" lastFinishedPulling="2026-01-20 17:54:10.97542082 +0000 UTC m=+4344.735758787" observedRunningTime="2026-01-20 17:54:12.06149602 +0000 UTC m=+4345.821833987" watchObservedRunningTime="2026-01-20 17:54:12.062512541 +0000 UTC m=+4345.822850508" Jan 20 17:54:13 crc kubenswrapper[4558]: I0120 17:54:13.052086 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-galera-0" event={"ID":"94680f2e-23c0-41aa-a7ff-0aadfcfcc5e6","Type":"ContainerStarted","Data":"7444755b1efeb70ce9df8e374dcb007a995d0c3ace0bc1c4539e47db0dbb9408"} Jan 20 17:54:13 crc kubenswrapper[4558]: I0120 17:54:13.053845 4558 generic.go:334] "Generic (PLEG): container finished" podID="abd1fde8-57bd-4248-a061-6ddb436501c2" containerID="243bfc24492680f9bb1aba6fa6319099535e5f0e5e0b4e27d224151dc38f4ee3" exitCode=0 Jan 20 17:54:13 crc kubenswrapper[4558]: I0120 17:54:13.054278 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-cell1-galera-0" event={"ID":"abd1fde8-57bd-4248-a061-6ddb436501c2","Type":"ContainerDied","Data":"243bfc24492680f9bb1aba6fa6319099535e5f0e5e0b4e27d224151dc38f4ee3"} Jan 20 17:54:13 crc kubenswrapper[4558]: I0120 17:54:13.070621 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/openstack-galera-0" podStartSLOduration=8.070610195 podStartE2EDuration="8.070610195s" podCreationTimestamp="2026-01-20 17:54:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:54:13.07003293 +0000 UTC m=+4346.830370897" watchObservedRunningTime="2026-01-20 17:54:13.070610195 +0000 UTC m=+4346.830948152" Jan 20 17:54:13 crc kubenswrapper[4558]: I0120 17:54:13.788888 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ovsdbserver-nb-0"] Jan 20 17:54:13 crc kubenswrapper[4558]: I0120 17:54:13.791586 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:54:13 crc kubenswrapper[4558]: I0120 17:54:13.793423 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"ovndbcluster-nb-config" Jan 20 17:54:13 crc kubenswrapper[4558]: I0120 17:54:13.793517 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ovncluster-ovndbcluster-nb-dockercfg-q25m2" Jan 20 17:54:13 crc kubenswrapper[4558]: I0120 17:54:13.794475 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-ovndbcluster-nb-ovndbs" Jan 20 17:54:13 crc kubenswrapper[4558]: I0120 17:54:13.794668 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"ovndbcluster-nb-scripts" Jan 20 17:54:13 crc kubenswrapper[4558]: I0120 17:54:13.797433 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-ovn-metrics" Jan 20 17:54:13 crc kubenswrapper[4558]: I0120 17:54:13.799866 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-nb-0"] Jan 20 17:54:13 crc kubenswrapper[4558]: I0120 17:54:13.890612 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/57d5060e-37c3-45fb-8eb3-14303daa1751-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"57d5060e-37c3-45fb-8eb3-14303daa1751\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:54:13 crc kubenswrapper[4558]: I0120 17:54:13.890665 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5l7m8\" (UniqueName: \"kubernetes.io/projected/57d5060e-37c3-45fb-8eb3-14303daa1751-kube-api-access-5l7m8\") pod \"ovsdbserver-nb-0\" (UID: \"57d5060e-37c3-45fb-8eb3-14303daa1751\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:54:13 crc kubenswrapper[4558]: I0120 17:54:13.890889 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/57d5060e-37c3-45fb-8eb3-14303daa1751-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"57d5060e-37c3-45fb-8eb3-14303daa1751\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:54:13 crc kubenswrapper[4558]: I0120 17:54:13.890962 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/57d5060e-37c3-45fb-8eb3-14303daa1751-config\") pod \"ovsdbserver-nb-0\" (UID: \"57d5060e-37c3-45fb-8eb3-14303daa1751\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:54:13 crc kubenswrapper[4558]: I0120 17:54:13.891158 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"ovsdbserver-nb-0\" (UID: \"57d5060e-37c3-45fb-8eb3-14303daa1751\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:54:13 crc kubenswrapper[4558]: I0120 17:54:13.891275 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/57d5060e-37c3-45fb-8eb3-14303daa1751-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"57d5060e-37c3-45fb-8eb3-14303daa1751\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:54:13 crc kubenswrapper[4558]: I0120 17:54:13.891338 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/57d5060e-37c3-45fb-8eb3-14303daa1751-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"57d5060e-37c3-45fb-8eb3-14303daa1751\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:54:13 crc kubenswrapper[4558]: I0120 17:54:13.891423 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/57d5060e-37c3-45fb-8eb3-14303daa1751-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"57d5060e-37c3-45fb-8eb3-14303daa1751\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:54:13 crc kubenswrapper[4558]: I0120 17:54:13.992983 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/57d5060e-37c3-45fb-8eb3-14303daa1751-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"57d5060e-37c3-45fb-8eb3-14303daa1751\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:54:13 crc kubenswrapper[4558]: I0120 17:54:13.993062 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/57d5060e-37c3-45fb-8eb3-14303daa1751-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"57d5060e-37c3-45fb-8eb3-14303daa1751\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:54:13 crc kubenswrapper[4558]: I0120 17:54:13.993117 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/57d5060e-37c3-45fb-8eb3-14303daa1751-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"57d5060e-37c3-45fb-8eb3-14303daa1751\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:54:13 crc kubenswrapper[4558]: I0120 17:54:13.993155 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5l7m8\" (UniqueName: \"kubernetes.io/projected/57d5060e-37c3-45fb-8eb3-14303daa1751-kube-api-access-5l7m8\") pod \"ovsdbserver-nb-0\" (UID: \"57d5060e-37c3-45fb-8eb3-14303daa1751\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:54:13 crc kubenswrapper[4558]: I0120 17:54:13.993221 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/57d5060e-37c3-45fb-8eb3-14303daa1751-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"57d5060e-37c3-45fb-8eb3-14303daa1751\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:54:13 crc kubenswrapper[4558]: I0120 17:54:13.993249 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/57d5060e-37c3-45fb-8eb3-14303daa1751-config\") pod \"ovsdbserver-nb-0\" (UID: \"57d5060e-37c3-45fb-8eb3-14303daa1751\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:54:13 crc kubenswrapper[4558]: I0120 17:54:13.993331 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"ovsdbserver-nb-0\" (UID: \"57d5060e-37c3-45fb-8eb3-14303daa1751\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:54:13 crc kubenswrapper[4558]: I0120 17:54:13.993365 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/57d5060e-37c3-45fb-8eb3-14303daa1751-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"57d5060e-37c3-45fb-8eb3-14303daa1751\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:54:13 crc kubenswrapper[4558]: I0120 17:54:13.993664 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/57d5060e-37c3-45fb-8eb3-14303daa1751-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"57d5060e-37c3-45fb-8eb3-14303daa1751\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:54:13 crc kubenswrapper[4558]: I0120 17:54:13.993707 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"ovsdbserver-nb-0\" (UID: \"57d5060e-37c3-45fb-8eb3-14303daa1751\") device mount path \"/mnt/openstack/pv01\"" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:54:13 crc kubenswrapper[4558]: I0120 17:54:13.994351 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/57d5060e-37c3-45fb-8eb3-14303daa1751-config\") pod \"ovsdbserver-nb-0\" (UID: \"57d5060e-37c3-45fb-8eb3-14303daa1751\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:54:13 crc kubenswrapper[4558]: I0120 17:54:13.994419 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/57d5060e-37c3-45fb-8eb3-14303daa1751-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"57d5060e-37c3-45fb-8eb3-14303daa1751\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:54:14 crc kubenswrapper[4558]: I0120 17:54:14.004728 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/57d5060e-37c3-45fb-8eb3-14303daa1751-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"57d5060e-37c3-45fb-8eb3-14303daa1751\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:54:14 crc kubenswrapper[4558]: I0120 17:54:14.004970 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/57d5060e-37c3-45fb-8eb3-14303daa1751-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"57d5060e-37c3-45fb-8eb3-14303daa1751\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:54:14 crc kubenswrapper[4558]: I0120 17:54:14.007474 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/57d5060e-37c3-45fb-8eb3-14303daa1751-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"57d5060e-37c3-45fb-8eb3-14303daa1751\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:54:14 crc kubenswrapper[4558]: I0120 17:54:14.010731 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"ovsdbserver-nb-0\" (UID: \"57d5060e-37c3-45fb-8eb3-14303daa1751\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:54:14 crc kubenswrapper[4558]: I0120 17:54:14.011262 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5l7m8\" (UniqueName: \"kubernetes.io/projected/57d5060e-37c3-45fb-8eb3-14303daa1751-kube-api-access-5l7m8\") pod \"ovsdbserver-nb-0\" (UID: \"57d5060e-37c3-45fb-8eb3-14303daa1751\") " pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:54:14 crc kubenswrapper[4558]: I0120 17:54:14.070436 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-cell1-galera-0" event={"ID":"abd1fde8-57bd-4248-a061-6ddb436501c2","Type":"ContainerStarted","Data":"b7311156a63404e1bd2f45a67f9cea82ec849d5cf981a05cf6aee748ad8c02bd"} Jan 20 17:54:14 crc kubenswrapper[4558]: I0120 17:54:14.093432 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/openstack-cell1-galera-0" podStartSLOduration=7.093415323 podStartE2EDuration="7.093415323s" podCreationTimestamp="2026-01-20 17:54:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:54:14.086104277 +0000 UTC m=+4347.846442244" watchObservedRunningTime="2026-01-20 17:54:14.093415323 +0000 UTC m=+4347.853753291" Jan 20 17:54:14 crc kubenswrapper[4558]: I0120 17:54:14.111999 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:54:14 crc kubenswrapper[4558]: I0120 17:54:14.523428 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-nb-0"] Jan 20 17:54:15 crc kubenswrapper[4558]: I0120 17:54:15.089880 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-nb-0" event={"ID":"57d5060e-37c3-45fb-8eb3-14303daa1751","Type":"ContainerStarted","Data":"f8def63bbe6d6310ffc3a7c86e33982e2c9a2be82a8a0b0727ca59a14180a4d8"} Jan 20 17:54:15 crc kubenswrapper[4558]: I0120 17:54:15.090224 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-nb-0" event={"ID":"57d5060e-37c3-45fb-8eb3-14303daa1751","Type":"ContainerStarted","Data":"e272403361b0e6ff3e6fc8379ff4809e4f17620cf357e7e67ffbb7d8edea5b98"} Jan 20 17:54:15 crc kubenswrapper[4558]: I0120 17:54:15.090239 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-nb-0" event={"ID":"57d5060e-37c3-45fb-8eb3-14303daa1751","Type":"ContainerStarted","Data":"ce0bc3206601b94f426f90ded30d58f9ccdbe1f621513ee780119ad109066911"} Jan 20 17:54:15 crc kubenswrapper[4558]: I0120 17:54:15.109438 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ovsdbserver-nb-0" podStartSLOduration=3.109418132 podStartE2EDuration="3.109418132s" podCreationTimestamp="2026-01-20 17:54:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:54:15.107674203 +0000 UTC m=+4348.868012169" watchObservedRunningTime="2026-01-20 17:54:15.109418132 +0000 UTC m=+4348.869756099" Jan 20 17:54:16 crc kubenswrapper[4558]: I0120 17:54:16.569940 4558 scope.go:117] "RemoveContainer" containerID="4ea7fa61d8b21007a044345acec89fcebe56c2921cf0f76ff2002f44bdc88ede" Jan 20 17:54:16 crc kubenswrapper[4558]: E0120 17:54:16.570951 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 17:54:16 crc kubenswrapper[4558]: I0120 17:54:16.872767 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ovsdbserver-sb-0"] Jan 20 17:54:16 crc kubenswrapper[4558]: I0120 17:54:16.874229 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:54:16 crc kubenswrapper[4558]: I0120 17:54:16.875848 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ovncluster-ovndbcluster-sb-dockercfg-mcsdk" Jan 20 17:54:16 crc kubenswrapper[4558]: I0120 17:54:16.876126 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"ovndbcluster-sb-config" Jan 20 17:54:16 crc kubenswrapper[4558]: I0120 17:54:16.876312 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"ovndbcluster-sb-scripts" Jan 20 17:54:16 crc kubenswrapper[4558]: I0120 17:54:16.877921 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-ovndbcluster-sb-ovndbs" Jan 20 17:54:16 crc kubenswrapper[4558]: I0120 17:54:16.885219 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-sb-0"] Jan 20 17:54:16 crc kubenswrapper[4558]: I0120 17:54:16.917979 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:54:16 crc kubenswrapper[4558]: I0120 17:54:16.918188 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:54:16 crc kubenswrapper[4558]: I0120 17:54:16.953590 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/17f6b80e-1961-4c44-a979-9c23bdd59837-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"17f6b80e-1961-4c44-a979-9c23bdd59837\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:54:16 crc kubenswrapper[4558]: I0120 17:54:16.953663 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/17f6b80e-1961-4c44-a979-9c23bdd59837-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"17f6b80e-1961-4c44-a979-9c23bdd59837\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:54:16 crc kubenswrapper[4558]: I0120 17:54:16.953701 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/17f6b80e-1961-4c44-a979-9c23bdd59837-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"17f6b80e-1961-4c44-a979-9c23bdd59837\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:54:16 crc kubenswrapper[4558]: I0120 17:54:16.953738 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/17f6b80e-1961-4c44-a979-9c23bdd59837-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"17f6b80e-1961-4c44-a979-9c23bdd59837\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:54:16 crc kubenswrapper[4558]: I0120 17:54:16.953788 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/17f6b80e-1961-4c44-a979-9c23bdd59837-config\") pod \"ovsdbserver-sb-0\" (UID: \"17f6b80e-1961-4c44-a979-9c23bdd59837\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:54:16 crc kubenswrapper[4558]: I0120 17:54:16.953811 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/17f6b80e-1961-4c44-a979-9c23bdd59837-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"17f6b80e-1961-4c44-a979-9c23bdd59837\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:54:16 crc kubenswrapper[4558]: I0120 17:54:16.953859 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jhjdr\" (UniqueName: \"kubernetes.io/projected/17f6b80e-1961-4c44-a979-9c23bdd59837-kube-api-access-jhjdr\") pod \"ovsdbserver-sb-0\" (UID: \"17f6b80e-1961-4c44-a979-9c23bdd59837\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:54:16 crc kubenswrapper[4558]: I0120 17:54:16.954096 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"ovsdbserver-sb-0\" (UID: \"17f6b80e-1961-4c44-a979-9c23bdd59837\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:54:17 crc kubenswrapper[4558]: I0120 17:54:17.055530 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/17f6b80e-1961-4c44-a979-9c23bdd59837-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"17f6b80e-1961-4c44-a979-9c23bdd59837\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:54:17 crc kubenswrapper[4558]: I0120 17:54:17.055589 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/17f6b80e-1961-4c44-a979-9c23bdd59837-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"17f6b80e-1961-4c44-a979-9c23bdd59837\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:54:17 crc kubenswrapper[4558]: I0120 17:54:17.055630 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/17f6b80e-1961-4c44-a979-9c23bdd59837-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"17f6b80e-1961-4c44-a979-9c23bdd59837\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:54:17 crc kubenswrapper[4558]: I0120 17:54:17.055684 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/17f6b80e-1961-4c44-a979-9c23bdd59837-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"17f6b80e-1961-4c44-a979-9c23bdd59837\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:54:17 crc kubenswrapper[4558]: I0120 17:54:17.055776 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/17f6b80e-1961-4c44-a979-9c23bdd59837-config\") pod \"ovsdbserver-sb-0\" (UID: \"17f6b80e-1961-4c44-a979-9c23bdd59837\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:54:17 crc kubenswrapper[4558]: I0120 17:54:17.055806 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/17f6b80e-1961-4c44-a979-9c23bdd59837-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"17f6b80e-1961-4c44-a979-9c23bdd59837\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:54:17 crc kubenswrapper[4558]: I0120 17:54:17.055854 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jhjdr\" (UniqueName: \"kubernetes.io/projected/17f6b80e-1961-4c44-a979-9c23bdd59837-kube-api-access-jhjdr\") pod \"ovsdbserver-sb-0\" (UID: \"17f6b80e-1961-4c44-a979-9c23bdd59837\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:54:17 crc kubenswrapper[4558]: I0120 17:54:17.055981 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"ovsdbserver-sb-0\" (UID: \"17f6b80e-1961-4c44-a979-9c23bdd59837\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:54:17 crc kubenswrapper[4558]: I0120 17:54:17.056344 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"ovsdbserver-sb-0\" (UID: \"17f6b80e-1961-4c44-a979-9c23bdd59837\") device mount path \"/mnt/openstack/pv09\"" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:54:17 crc kubenswrapper[4558]: I0120 17:54:17.057293 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/17f6b80e-1961-4c44-a979-9c23bdd59837-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"17f6b80e-1961-4c44-a979-9c23bdd59837\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:54:17 crc kubenswrapper[4558]: I0120 17:54:17.058736 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/17f6b80e-1961-4c44-a979-9c23bdd59837-config\") pod \"ovsdbserver-sb-0\" (UID: \"17f6b80e-1961-4c44-a979-9c23bdd59837\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:54:17 crc kubenswrapper[4558]: I0120 17:54:17.058915 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/17f6b80e-1961-4c44-a979-9c23bdd59837-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"17f6b80e-1961-4c44-a979-9c23bdd59837\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:54:17 crc kubenswrapper[4558]: I0120 17:54:17.096648 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/17f6b80e-1961-4c44-a979-9c23bdd59837-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"17f6b80e-1961-4c44-a979-9c23bdd59837\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:54:17 crc kubenswrapper[4558]: I0120 17:54:17.098234 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/17f6b80e-1961-4c44-a979-9c23bdd59837-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"17f6b80e-1961-4c44-a979-9c23bdd59837\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:54:17 crc kubenswrapper[4558]: I0120 17:54:17.098801 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/17f6b80e-1961-4c44-a979-9c23bdd59837-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"17f6b80e-1961-4c44-a979-9c23bdd59837\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:54:17 crc kubenswrapper[4558]: I0120 17:54:17.099038 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jhjdr\" (UniqueName: \"kubernetes.io/projected/17f6b80e-1961-4c44-a979-9c23bdd59837-kube-api-access-jhjdr\") pod \"ovsdbserver-sb-0\" (UID: \"17f6b80e-1961-4c44-a979-9c23bdd59837\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:54:17 crc kubenswrapper[4558]: I0120 17:54:17.113085 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:54:17 crc kubenswrapper[4558]: I0120 17:54:17.121692 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"ovsdbserver-sb-0\" (UID: \"17f6b80e-1961-4c44-a979-9c23bdd59837\") " pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:54:17 crc kubenswrapper[4558]: I0120 17:54:17.141819 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:54:17 crc kubenswrapper[4558]: I0120 17:54:17.202296 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:54:17 crc kubenswrapper[4558]: I0120 17:54:17.636842 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-sb-0"] Jan 20 17:54:17 crc kubenswrapper[4558]: W0120 17:54:17.643240 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod17f6b80e_1961_4c44_a979_9c23bdd59837.slice/crio-24eaf09c2072b8675c71caeeb0d504227fa39a4e6881c3b1b181ef743a83e953 WatchSource:0}: Error finding container 24eaf09c2072b8675c71caeeb0d504227fa39a4e6881c3b1b181ef743a83e953: Status 404 returned error can't find the container with id 24eaf09c2072b8675c71caeeb0d504227fa39a4e6881c3b1b181ef743a83e953 Jan 20 17:54:18 crc kubenswrapper[4558]: I0120 17:54:18.127462 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-sb-0" event={"ID":"17f6b80e-1961-4c44-a979-9c23bdd59837","Type":"ContainerStarted","Data":"538bcf70ba0df02b3c9616008ced0cabfe97d3b97ecd1cf7f96d63b4177c5ffa"} Jan 20 17:54:18 crc kubenswrapper[4558]: I0120 17:54:18.127539 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-sb-0" event={"ID":"17f6b80e-1961-4c44-a979-9c23bdd59837","Type":"ContainerStarted","Data":"76d0924a191838385c1332ce3ba83da3d2823bf9b2729355de22ccab98022411"} Jan 20 17:54:18 crc kubenswrapper[4558]: I0120 17:54:18.127555 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-sb-0" event={"ID":"17f6b80e-1961-4c44-a979-9c23bdd59837","Type":"ContainerStarted","Data":"24eaf09c2072b8675c71caeeb0d504227fa39a4e6881c3b1b181ef743a83e953"} Jan 20 17:54:18 crc kubenswrapper[4558]: I0120 17:54:18.128283 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:54:18 crc kubenswrapper[4558]: I0120 17:54:18.154616 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ovsdbserver-sb-0" podStartSLOduration=3.154597938 podStartE2EDuration="3.154597938s" podCreationTimestamp="2026-01-20 17:54:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:54:18.149708013 +0000 UTC m=+4351.910045979" watchObservedRunningTime="2026-01-20 17:54:18.154597938 +0000 UTC m=+4351.914935905" Jan 20 17:54:18 crc kubenswrapper[4558]: I0120 17:54:18.383060 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:54:18 crc kubenswrapper[4558]: I0120 17:54:18.383539 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:54:18 crc kubenswrapper[4558]: I0120 17:54:18.584421 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/memcached-0" Jan 20 17:54:19 crc kubenswrapper[4558]: I0120 17:54:19.149469 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:54:19 crc kubenswrapper[4558]: I0120 17:54:19.421959 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:54:19 crc kubenswrapper[4558]: I0120 17:54:19.709193 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:54:20 crc kubenswrapper[4558]: I0120 17:54:20.203904 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:54:20 crc kubenswrapper[4558]: I0120 17:54:20.216858 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:54:20 crc kubenswrapper[4558]: I0120 17:54:20.248769 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:54:20 crc kubenswrapper[4558]: I0120 17:54:20.667060 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:54:20 crc kubenswrapper[4558]: I0120 17:54:20.755429 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:54:21 crc kubenswrapper[4558]: I0120 17:54:21.166943 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:54:21 crc kubenswrapper[4558]: I0120 17:54:21.283433 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/swift-storage-0"] Jan 20 17:54:21 crc kubenswrapper[4558]: I0120 17:54:21.290034 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:54:21 crc kubenswrapper[4558]: I0120 17:54:21.292681 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"swift-ring-files" Jan 20 17:54:21 crc kubenswrapper[4558]: I0120 17:54:21.294228 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"swift-conf" Jan 20 17:54:21 crc kubenswrapper[4558]: I0120 17:54:21.294432 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"swift-swift-dockercfg-pxbrd" Jan 20 17:54:21 crc kubenswrapper[4558]: I0120 17:54:21.295264 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"swift-storage-config-data" Jan 20 17:54:21 crc kubenswrapper[4558]: I0120 17:54:21.302276 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/swift-storage-0"] Jan 20 17:54:21 crc kubenswrapper[4558]: I0120 17:54:21.350311 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/cd300665-4f75-4ed7-9166-4b48e1aeffc4-etc-swift\") pod \"swift-storage-0\" (UID: \"cd300665-4f75-4ed7-9166-4b48e1aeffc4\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:54:21 crc kubenswrapper[4558]: I0120 17:54:21.350376 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bfv8c\" (UniqueName: \"kubernetes.io/projected/cd300665-4f75-4ed7-9166-4b48e1aeffc4-kube-api-access-bfv8c\") pod \"swift-storage-0\" (UID: \"cd300665-4f75-4ed7-9166-4b48e1aeffc4\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:54:21 crc kubenswrapper[4558]: I0120 17:54:21.350405 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/cd300665-4f75-4ed7-9166-4b48e1aeffc4-lock\") pod \"swift-storage-0\" (UID: \"cd300665-4f75-4ed7-9166-4b48e1aeffc4\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:54:21 crc kubenswrapper[4558]: I0120 17:54:21.350428 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/cd300665-4f75-4ed7-9166-4b48e1aeffc4-cache\") pod \"swift-storage-0\" (UID: \"cd300665-4f75-4ed7-9166-4b48e1aeffc4\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:54:21 crc kubenswrapper[4558]: I0120 17:54:21.350482 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"swift-storage-0\" (UID: \"cd300665-4f75-4ed7-9166-4b48e1aeffc4\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:54:21 crc kubenswrapper[4558]: I0120 17:54:21.452765 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"swift-storage-0\" (UID: \"cd300665-4f75-4ed7-9166-4b48e1aeffc4\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:54:21 crc kubenswrapper[4558]: I0120 17:54:21.452996 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/cd300665-4f75-4ed7-9166-4b48e1aeffc4-etc-swift\") pod \"swift-storage-0\" (UID: \"cd300665-4f75-4ed7-9166-4b48e1aeffc4\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:54:21 crc kubenswrapper[4558]: I0120 17:54:21.453071 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bfv8c\" (UniqueName: \"kubernetes.io/projected/cd300665-4f75-4ed7-9166-4b48e1aeffc4-kube-api-access-bfv8c\") pod \"swift-storage-0\" (UID: \"cd300665-4f75-4ed7-9166-4b48e1aeffc4\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:54:21 crc kubenswrapper[4558]: I0120 17:54:21.453102 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/cd300665-4f75-4ed7-9166-4b48e1aeffc4-lock\") pod \"swift-storage-0\" (UID: \"cd300665-4f75-4ed7-9166-4b48e1aeffc4\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:54:21 crc kubenswrapper[4558]: I0120 17:54:21.453131 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/cd300665-4f75-4ed7-9166-4b48e1aeffc4-cache\") pod \"swift-storage-0\" (UID: \"cd300665-4f75-4ed7-9166-4b48e1aeffc4\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:54:21 crc kubenswrapper[4558]: I0120 17:54:21.453294 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"swift-storage-0\" (UID: \"cd300665-4f75-4ed7-9166-4b48e1aeffc4\") device mount path \"/mnt/openstack/pv11\"" pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:54:21 crc kubenswrapper[4558]: E0120 17:54:21.453324 4558 projected.go:288] Couldn't get configMap openstack-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Jan 20 17:54:21 crc kubenswrapper[4558]: E0120 17:54:21.453370 4558 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack-kuttl-tests/swift-storage-0: configmap "swift-ring-files" not found Jan 20 17:54:21 crc kubenswrapper[4558]: E0120 17:54:21.453461 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/cd300665-4f75-4ed7-9166-4b48e1aeffc4-etc-swift podName:cd300665-4f75-4ed7-9166-4b48e1aeffc4 nodeName:}" failed. No retries permitted until 2026-01-20 17:54:21.953431677 +0000 UTC m=+4355.713769654 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/cd300665-4f75-4ed7-9166-4b48e1aeffc4-etc-swift") pod "swift-storage-0" (UID: "cd300665-4f75-4ed7-9166-4b48e1aeffc4") : configmap "swift-ring-files" not found Jan 20 17:54:21 crc kubenswrapper[4558]: I0120 17:54:21.453698 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/cd300665-4f75-4ed7-9166-4b48e1aeffc4-lock\") pod \"swift-storage-0\" (UID: \"cd300665-4f75-4ed7-9166-4b48e1aeffc4\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:54:21 crc kubenswrapper[4558]: I0120 17:54:21.453727 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/cd300665-4f75-4ed7-9166-4b48e1aeffc4-cache\") pod \"swift-storage-0\" (UID: \"cd300665-4f75-4ed7-9166-4b48e1aeffc4\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:54:21 crc kubenswrapper[4558]: I0120 17:54:21.476101 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"swift-storage-0\" (UID: \"cd300665-4f75-4ed7-9166-4b48e1aeffc4\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:54:21 crc kubenswrapper[4558]: I0120 17:54:21.477506 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bfv8c\" (UniqueName: \"kubernetes.io/projected/cd300665-4f75-4ed7-9166-4b48e1aeffc4-kube-api-access-bfv8c\") pod \"swift-storage-0\" (UID: \"cd300665-4f75-4ed7-9166-4b48e1aeffc4\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:54:21 crc kubenswrapper[4558]: I0120 17:54:21.631877 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/swift-ring-rebalance-xznwn"] Jan 20 17:54:21 crc kubenswrapper[4558]: I0120 17:54:21.633210 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-ring-rebalance-xznwn" Jan 20 17:54:21 crc kubenswrapper[4558]: I0120 17:54:21.634881 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"swift-proxy-config-data" Jan 20 17:54:21 crc kubenswrapper[4558]: I0120 17:54:21.634921 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"swift-ring-config-data" Jan 20 17:54:21 crc kubenswrapper[4558]: I0120 17:54:21.635217 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"swift-ring-scripts" Jan 20 17:54:21 crc kubenswrapper[4558]: I0120 17:54:21.639558 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/swift-ring-rebalance-xznwn"] Jan 20 17:54:21 crc kubenswrapper[4558]: I0120 17:54:21.758642 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/0dc1e7fb-21d3-4077-87c9-f2ede71a561a-swiftconf\") pod \"swift-ring-rebalance-xznwn\" (UID: \"0dc1e7fb-21d3-4077-87c9-f2ede71a561a\") " pod="openstack-kuttl-tests/swift-ring-rebalance-xznwn" Jan 20 17:54:21 crc kubenswrapper[4558]: I0120 17:54:21.758731 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/0dc1e7fb-21d3-4077-87c9-f2ede71a561a-etc-swift\") pod \"swift-ring-rebalance-xznwn\" (UID: \"0dc1e7fb-21d3-4077-87c9-f2ede71a561a\") " pod="openstack-kuttl-tests/swift-ring-rebalance-xznwn" Jan 20 17:54:21 crc kubenswrapper[4558]: I0120 17:54:21.758766 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/0dc1e7fb-21d3-4077-87c9-f2ede71a561a-dispersionconf\") pod \"swift-ring-rebalance-xznwn\" (UID: \"0dc1e7fb-21d3-4077-87c9-f2ede71a561a\") " pod="openstack-kuttl-tests/swift-ring-rebalance-xznwn" Jan 20 17:54:21 crc kubenswrapper[4558]: I0120 17:54:21.758830 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/0dc1e7fb-21d3-4077-87c9-f2ede71a561a-ring-data-devices\") pod \"swift-ring-rebalance-xznwn\" (UID: \"0dc1e7fb-21d3-4077-87c9-f2ede71a561a\") " pod="openstack-kuttl-tests/swift-ring-rebalance-xznwn" Jan 20 17:54:21 crc kubenswrapper[4558]: I0120 17:54:21.758959 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0dc1e7fb-21d3-4077-87c9-f2ede71a561a-scripts\") pod \"swift-ring-rebalance-xznwn\" (UID: \"0dc1e7fb-21d3-4077-87c9-f2ede71a561a\") " pod="openstack-kuttl-tests/swift-ring-rebalance-xznwn" Jan 20 17:54:21 crc kubenswrapper[4558]: I0120 17:54:21.759012 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vl6hr\" (UniqueName: \"kubernetes.io/projected/0dc1e7fb-21d3-4077-87c9-f2ede71a561a-kube-api-access-vl6hr\") pod \"swift-ring-rebalance-xznwn\" (UID: \"0dc1e7fb-21d3-4077-87c9-f2ede71a561a\") " pod="openstack-kuttl-tests/swift-ring-rebalance-xznwn" Jan 20 17:54:21 crc kubenswrapper[4558]: I0120 17:54:21.759060 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0dc1e7fb-21d3-4077-87c9-f2ede71a561a-combined-ca-bundle\") pod \"swift-ring-rebalance-xznwn\" (UID: \"0dc1e7fb-21d3-4077-87c9-f2ede71a561a\") " pod="openstack-kuttl-tests/swift-ring-rebalance-xznwn" Jan 20 17:54:21 crc kubenswrapper[4558]: I0120 17:54:21.860385 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/0dc1e7fb-21d3-4077-87c9-f2ede71a561a-dispersionconf\") pod \"swift-ring-rebalance-xznwn\" (UID: \"0dc1e7fb-21d3-4077-87c9-f2ede71a561a\") " pod="openstack-kuttl-tests/swift-ring-rebalance-xznwn" Jan 20 17:54:21 crc kubenswrapper[4558]: I0120 17:54:21.860503 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/0dc1e7fb-21d3-4077-87c9-f2ede71a561a-ring-data-devices\") pod \"swift-ring-rebalance-xznwn\" (UID: \"0dc1e7fb-21d3-4077-87c9-f2ede71a561a\") " pod="openstack-kuttl-tests/swift-ring-rebalance-xznwn" Jan 20 17:54:21 crc kubenswrapper[4558]: I0120 17:54:21.860699 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0dc1e7fb-21d3-4077-87c9-f2ede71a561a-scripts\") pod \"swift-ring-rebalance-xznwn\" (UID: \"0dc1e7fb-21d3-4077-87c9-f2ede71a561a\") " pod="openstack-kuttl-tests/swift-ring-rebalance-xznwn" Jan 20 17:54:21 crc kubenswrapper[4558]: I0120 17:54:21.860780 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vl6hr\" (UniqueName: \"kubernetes.io/projected/0dc1e7fb-21d3-4077-87c9-f2ede71a561a-kube-api-access-vl6hr\") pod \"swift-ring-rebalance-xznwn\" (UID: \"0dc1e7fb-21d3-4077-87c9-f2ede71a561a\") " pod="openstack-kuttl-tests/swift-ring-rebalance-xznwn" Jan 20 17:54:21 crc kubenswrapper[4558]: I0120 17:54:21.860831 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0dc1e7fb-21d3-4077-87c9-f2ede71a561a-combined-ca-bundle\") pod \"swift-ring-rebalance-xznwn\" (UID: \"0dc1e7fb-21d3-4077-87c9-f2ede71a561a\") " pod="openstack-kuttl-tests/swift-ring-rebalance-xznwn" Jan 20 17:54:21 crc kubenswrapper[4558]: I0120 17:54:21.860904 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/0dc1e7fb-21d3-4077-87c9-f2ede71a561a-swiftconf\") pod \"swift-ring-rebalance-xznwn\" (UID: \"0dc1e7fb-21d3-4077-87c9-f2ede71a561a\") " pod="openstack-kuttl-tests/swift-ring-rebalance-xznwn" Jan 20 17:54:21 crc kubenswrapper[4558]: I0120 17:54:21.860986 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/0dc1e7fb-21d3-4077-87c9-f2ede71a561a-etc-swift\") pod \"swift-ring-rebalance-xznwn\" (UID: \"0dc1e7fb-21d3-4077-87c9-f2ede71a561a\") " pod="openstack-kuttl-tests/swift-ring-rebalance-xznwn" Jan 20 17:54:21 crc kubenswrapper[4558]: I0120 17:54:21.861414 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/0dc1e7fb-21d3-4077-87c9-f2ede71a561a-etc-swift\") pod \"swift-ring-rebalance-xznwn\" (UID: \"0dc1e7fb-21d3-4077-87c9-f2ede71a561a\") " pod="openstack-kuttl-tests/swift-ring-rebalance-xznwn" Jan 20 17:54:21 crc kubenswrapper[4558]: I0120 17:54:21.861417 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/0dc1e7fb-21d3-4077-87c9-f2ede71a561a-ring-data-devices\") pod \"swift-ring-rebalance-xznwn\" (UID: \"0dc1e7fb-21d3-4077-87c9-f2ede71a561a\") " pod="openstack-kuttl-tests/swift-ring-rebalance-xznwn" Jan 20 17:54:21 crc kubenswrapper[4558]: I0120 17:54:21.861546 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0dc1e7fb-21d3-4077-87c9-f2ede71a561a-scripts\") pod \"swift-ring-rebalance-xznwn\" (UID: \"0dc1e7fb-21d3-4077-87c9-f2ede71a561a\") " pod="openstack-kuttl-tests/swift-ring-rebalance-xznwn" Jan 20 17:54:21 crc kubenswrapper[4558]: I0120 17:54:21.864339 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0dc1e7fb-21d3-4077-87c9-f2ede71a561a-combined-ca-bundle\") pod \"swift-ring-rebalance-xznwn\" (UID: \"0dc1e7fb-21d3-4077-87c9-f2ede71a561a\") " pod="openstack-kuttl-tests/swift-ring-rebalance-xznwn" Jan 20 17:54:21 crc kubenswrapper[4558]: I0120 17:54:21.864363 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/0dc1e7fb-21d3-4077-87c9-f2ede71a561a-swiftconf\") pod \"swift-ring-rebalance-xznwn\" (UID: \"0dc1e7fb-21d3-4077-87c9-f2ede71a561a\") " pod="openstack-kuttl-tests/swift-ring-rebalance-xznwn" Jan 20 17:54:21 crc kubenswrapper[4558]: I0120 17:54:21.864579 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/0dc1e7fb-21d3-4077-87c9-f2ede71a561a-dispersionconf\") pod \"swift-ring-rebalance-xznwn\" (UID: \"0dc1e7fb-21d3-4077-87c9-f2ede71a561a\") " pod="openstack-kuttl-tests/swift-ring-rebalance-xznwn" Jan 20 17:54:21 crc kubenswrapper[4558]: I0120 17:54:21.875511 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vl6hr\" (UniqueName: \"kubernetes.io/projected/0dc1e7fb-21d3-4077-87c9-f2ede71a561a-kube-api-access-vl6hr\") pod \"swift-ring-rebalance-xznwn\" (UID: \"0dc1e7fb-21d3-4077-87c9-f2ede71a561a\") " pod="openstack-kuttl-tests/swift-ring-rebalance-xznwn" Jan 20 17:54:21 crc kubenswrapper[4558]: I0120 17:54:21.951607 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-ring-rebalance-xznwn" Jan 20 17:54:21 crc kubenswrapper[4558]: I0120 17:54:21.962540 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/cd300665-4f75-4ed7-9166-4b48e1aeffc4-etc-swift\") pod \"swift-storage-0\" (UID: \"cd300665-4f75-4ed7-9166-4b48e1aeffc4\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:54:21 crc kubenswrapper[4558]: E0120 17:54:21.962737 4558 projected.go:288] Couldn't get configMap openstack-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Jan 20 17:54:21 crc kubenswrapper[4558]: E0120 17:54:21.962773 4558 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack-kuttl-tests/swift-storage-0: configmap "swift-ring-files" not found Jan 20 17:54:21 crc kubenswrapper[4558]: E0120 17:54:21.962844 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/cd300665-4f75-4ed7-9166-4b48e1aeffc4-etc-swift podName:cd300665-4f75-4ed7-9166-4b48e1aeffc4 nodeName:}" failed. No retries permitted until 2026-01-20 17:54:22.962823225 +0000 UTC m=+4356.723161192 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/cd300665-4f75-4ed7-9166-4b48e1aeffc4-etc-swift") pod "swift-storage-0" (UID: "cd300665-4f75-4ed7-9166-4b48e1aeffc4") : configmap "swift-ring-files" not found Jan 20 17:54:22 crc kubenswrapper[4558]: I0120 17:54:22.205143 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:54:22 crc kubenswrapper[4558]: I0120 17:54:22.355301 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ovn-northd-0"] Jan 20 17:54:22 crc kubenswrapper[4558]: I0120 17:54:22.357251 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:54:22 crc kubenswrapper[4558]: I0120 17:54:22.360750 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"ovnnorthd-scripts" Jan 20 17:54:22 crc kubenswrapper[4558]: I0120 17:54:22.360928 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-ovnnorthd-ovndbs" Jan 20 17:54:22 crc kubenswrapper[4558]: I0120 17:54:22.361146 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"ovnnorthd-config" Jan 20 17:54:22 crc kubenswrapper[4558]: I0120 17:54:22.361357 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ovnnorthd-ovnnorthd-dockercfg-p6xnb" Jan 20 17:54:22 crc kubenswrapper[4558]: I0120 17:54:22.361543 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/swift-ring-rebalance-xznwn"] Jan 20 17:54:22 crc kubenswrapper[4558]: I0120 17:54:22.364830 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovn-northd-0"] Jan 20 17:54:22 crc kubenswrapper[4558]: I0120 17:54:22.369414 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/dbdd2687-1110-4dce-a521-19c9337df3a2-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"dbdd2687-1110-4dce-a521-19c9337df3a2\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:54:22 crc kubenswrapper[4558]: I0120 17:54:22.369458 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dbdd2687-1110-4dce-a521-19c9337df3a2-config\") pod \"ovn-northd-0\" (UID: \"dbdd2687-1110-4dce-a521-19c9337df3a2\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:54:22 crc kubenswrapper[4558]: I0120 17:54:22.369476 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/dbdd2687-1110-4dce-a521-19c9337df3a2-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"dbdd2687-1110-4dce-a521-19c9337df3a2\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:54:22 crc kubenswrapper[4558]: I0120 17:54:22.369500 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dbdd2687-1110-4dce-a521-19c9337df3a2-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"dbdd2687-1110-4dce-a521-19c9337df3a2\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:54:22 crc kubenswrapper[4558]: I0120 17:54:22.369533 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/dbdd2687-1110-4dce-a521-19c9337df3a2-scripts\") pod \"ovn-northd-0\" (UID: \"dbdd2687-1110-4dce-a521-19c9337df3a2\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:54:22 crc kubenswrapper[4558]: I0120 17:54:22.369593 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8ntm2\" (UniqueName: \"kubernetes.io/projected/dbdd2687-1110-4dce-a521-19c9337df3a2-kube-api-access-8ntm2\") pod \"ovn-northd-0\" (UID: \"dbdd2687-1110-4dce-a521-19c9337df3a2\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:54:22 crc kubenswrapper[4558]: I0120 17:54:22.369630 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/dbdd2687-1110-4dce-a521-19c9337df3a2-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"dbdd2687-1110-4dce-a521-19c9337df3a2\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:54:22 crc kubenswrapper[4558]: I0120 17:54:22.471036 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/dbdd2687-1110-4dce-a521-19c9337df3a2-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"dbdd2687-1110-4dce-a521-19c9337df3a2\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:54:22 crc kubenswrapper[4558]: I0120 17:54:22.471111 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dbdd2687-1110-4dce-a521-19c9337df3a2-config\") pod \"ovn-northd-0\" (UID: \"dbdd2687-1110-4dce-a521-19c9337df3a2\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:54:22 crc kubenswrapper[4558]: I0120 17:54:22.471137 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/dbdd2687-1110-4dce-a521-19c9337df3a2-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"dbdd2687-1110-4dce-a521-19c9337df3a2\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:54:22 crc kubenswrapper[4558]: I0120 17:54:22.471177 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dbdd2687-1110-4dce-a521-19c9337df3a2-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"dbdd2687-1110-4dce-a521-19c9337df3a2\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:54:22 crc kubenswrapper[4558]: I0120 17:54:22.471225 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/dbdd2687-1110-4dce-a521-19c9337df3a2-scripts\") pod \"ovn-northd-0\" (UID: \"dbdd2687-1110-4dce-a521-19c9337df3a2\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:54:22 crc kubenswrapper[4558]: I0120 17:54:22.471294 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8ntm2\" (UniqueName: \"kubernetes.io/projected/dbdd2687-1110-4dce-a521-19c9337df3a2-kube-api-access-8ntm2\") pod \"ovn-northd-0\" (UID: \"dbdd2687-1110-4dce-a521-19c9337df3a2\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:54:22 crc kubenswrapper[4558]: I0120 17:54:22.471337 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/dbdd2687-1110-4dce-a521-19c9337df3a2-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"dbdd2687-1110-4dce-a521-19c9337df3a2\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:54:22 crc kubenswrapper[4558]: I0120 17:54:22.472082 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dbdd2687-1110-4dce-a521-19c9337df3a2-config\") pod \"ovn-northd-0\" (UID: \"dbdd2687-1110-4dce-a521-19c9337df3a2\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:54:22 crc kubenswrapper[4558]: I0120 17:54:22.475699 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/dbdd2687-1110-4dce-a521-19c9337df3a2-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"dbdd2687-1110-4dce-a521-19c9337df3a2\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:54:22 crc kubenswrapper[4558]: I0120 17:54:22.475867 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dbdd2687-1110-4dce-a521-19c9337df3a2-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"dbdd2687-1110-4dce-a521-19c9337df3a2\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:54:22 crc kubenswrapper[4558]: I0120 17:54:22.479666 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/dbdd2687-1110-4dce-a521-19c9337df3a2-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"dbdd2687-1110-4dce-a521-19c9337df3a2\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:54:22 crc kubenswrapper[4558]: I0120 17:54:22.487595 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/dbdd2687-1110-4dce-a521-19c9337df3a2-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"dbdd2687-1110-4dce-a521-19c9337df3a2\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:54:22 crc kubenswrapper[4558]: I0120 17:54:22.487813 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/dbdd2687-1110-4dce-a521-19c9337df3a2-scripts\") pod \"ovn-northd-0\" (UID: \"dbdd2687-1110-4dce-a521-19c9337df3a2\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:54:22 crc kubenswrapper[4558]: I0120 17:54:22.491798 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8ntm2\" (UniqueName: \"kubernetes.io/projected/dbdd2687-1110-4dce-a521-19c9337df3a2-kube-api-access-8ntm2\") pod \"ovn-northd-0\" (UID: \"dbdd2687-1110-4dce-a521-19c9337df3a2\") " pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:54:22 crc kubenswrapper[4558]: I0120 17:54:22.676996 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:54:22 crc kubenswrapper[4558]: I0120 17:54:22.982690 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/cd300665-4f75-4ed7-9166-4b48e1aeffc4-etc-swift\") pod \"swift-storage-0\" (UID: \"cd300665-4f75-4ed7-9166-4b48e1aeffc4\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:54:22 crc kubenswrapper[4558]: E0120 17:54:22.982924 4558 projected.go:288] Couldn't get configMap openstack-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Jan 20 17:54:22 crc kubenswrapper[4558]: E0120 17:54:22.983148 4558 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack-kuttl-tests/swift-storage-0: configmap "swift-ring-files" not found Jan 20 17:54:22 crc kubenswrapper[4558]: E0120 17:54:22.983266 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/cd300665-4f75-4ed7-9166-4b48e1aeffc4-etc-swift podName:cd300665-4f75-4ed7-9166-4b48e1aeffc4 nodeName:}" failed. No retries permitted until 2026-01-20 17:54:24.983218222 +0000 UTC m=+4358.743556189 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/cd300665-4f75-4ed7-9166-4b48e1aeffc4-etc-swift") pod "swift-storage-0" (UID: "cd300665-4f75-4ed7-9166-4b48e1aeffc4") : configmap "swift-ring-files" not found Jan 20 17:54:23 crc kubenswrapper[4558]: I0120 17:54:23.101487 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovn-northd-0"] Jan 20 17:54:23 crc kubenswrapper[4558]: I0120 17:54:23.187540 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-ring-rebalance-xznwn" event={"ID":"0dc1e7fb-21d3-4077-87c9-f2ede71a561a","Type":"ContainerStarted","Data":"5c9f046a891a29b5a5cbad4772933fbe8340e6d505155496bf492f03afe1f344"} Jan 20 17:54:23 crc kubenswrapper[4558]: I0120 17:54:23.187611 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-ring-rebalance-xznwn" event={"ID":"0dc1e7fb-21d3-4077-87c9-f2ede71a561a","Type":"ContainerStarted","Data":"e9f199ff4b378cb04728396fd1a1b0db2bc693086e84ffcc259aef5136cb0dc0"} Jan 20 17:54:23 crc kubenswrapper[4558]: I0120 17:54:23.193375 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-northd-0" event={"ID":"dbdd2687-1110-4dce-a521-19c9337df3a2","Type":"ContainerStarted","Data":"a2a7a2a4918691aaf41b49fcc06de017842d09f24418e64b2e673e16ba0845b2"} Jan 20 17:54:23 crc kubenswrapper[4558]: I0120 17:54:23.208225 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/swift-ring-rebalance-xznwn" podStartSLOduration=2.208205563 podStartE2EDuration="2.208205563s" podCreationTimestamp="2026-01-20 17:54:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:54:23.207088933 +0000 UTC m=+4356.967426901" watchObservedRunningTime="2026-01-20 17:54:23.208205563 +0000 UTC m=+4356.968543530" Jan 20 17:54:24 crc kubenswrapper[4558]: I0120 17:54:24.205649 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-northd-0" event={"ID":"dbdd2687-1110-4dce-a521-19c9337df3a2","Type":"ContainerStarted","Data":"1c2ffc84d31610214ed641d4c67df414b1a44d6e48fb78c1e2211304b04f696f"} Jan 20 17:54:24 crc kubenswrapper[4558]: I0120 17:54:24.205732 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-northd-0" event={"ID":"dbdd2687-1110-4dce-a521-19c9337df3a2","Type":"ContainerStarted","Data":"96d47ec89318825738be6686bad9869d642e9bc40f34628060459ade0aa4b177"} Jan 20 17:54:24 crc kubenswrapper[4558]: I0120 17:54:24.225771 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ovn-northd-0" podStartSLOduration=2.22575061 podStartE2EDuration="2.22575061s" podCreationTimestamp="2026-01-20 17:54:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:54:24.223544323 +0000 UTC m=+4357.983882290" watchObservedRunningTime="2026-01-20 17:54:24.22575061 +0000 UTC m=+4357.986088578" Jan 20 17:54:25 crc kubenswrapper[4558]: I0120 17:54:25.025889 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/cd300665-4f75-4ed7-9166-4b48e1aeffc4-etc-swift\") pod \"swift-storage-0\" (UID: \"cd300665-4f75-4ed7-9166-4b48e1aeffc4\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:54:25 crc kubenswrapper[4558]: E0120 17:54:25.026207 4558 projected.go:288] Couldn't get configMap openstack-kuttl-tests/swift-ring-files: configmap "swift-ring-files" not found Jan 20 17:54:25 crc kubenswrapper[4558]: E0120 17:54:25.026226 4558 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack-kuttl-tests/swift-storage-0: configmap "swift-ring-files" not found Jan 20 17:54:25 crc kubenswrapper[4558]: E0120 17:54:25.026277 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/cd300665-4f75-4ed7-9166-4b48e1aeffc4-etc-swift podName:cd300665-4f75-4ed7-9166-4b48e1aeffc4 nodeName:}" failed. No retries permitted until 2026-01-20 17:54:29.026261553 +0000 UTC m=+4362.786599520 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/cd300665-4f75-4ed7-9166-4b48e1aeffc4-etc-swift") pod "swift-storage-0" (UID: "cd300665-4f75-4ed7-9166-4b48e1aeffc4") : configmap "swift-ring-files" not found Jan 20 17:54:25 crc kubenswrapper[4558]: I0120 17:54:25.212564 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:54:25 crc kubenswrapper[4558]: I0120 17:54:25.571102 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/root-account-create-update-j22zm"] Jan 20 17:54:25 crc kubenswrapper[4558]: I0120 17:54:25.572218 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-j22zm" Jan 20 17:54:25 crc kubenswrapper[4558]: I0120 17:54:25.575588 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"openstack-mariadb-root-db-secret" Jan 20 17:54:25 crc kubenswrapper[4558]: I0120 17:54:25.580650 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-j22zm"] Jan 20 17:54:25 crc kubenswrapper[4558]: I0120 17:54:25.637356 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a92fe2a3-64bf-43a1-9b3f-b1d3184835fb-operator-scripts\") pod \"root-account-create-update-j22zm\" (UID: \"a92fe2a3-64bf-43a1-9b3f-b1d3184835fb\") " pod="openstack-kuttl-tests/root-account-create-update-j22zm" Jan 20 17:54:25 crc kubenswrapper[4558]: I0120 17:54:25.637787 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pcjms\" (UniqueName: \"kubernetes.io/projected/a92fe2a3-64bf-43a1-9b3f-b1d3184835fb-kube-api-access-pcjms\") pod \"root-account-create-update-j22zm\" (UID: \"a92fe2a3-64bf-43a1-9b3f-b1d3184835fb\") " pod="openstack-kuttl-tests/root-account-create-update-j22zm" Jan 20 17:54:25 crc kubenswrapper[4558]: I0120 17:54:25.740146 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pcjms\" (UniqueName: \"kubernetes.io/projected/a92fe2a3-64bf-43a1-9b3f-b1d3184835fb-kube-api-access-pcjms\") pod \"root-account-create-update-j22zm\" (UID: \"a92fe2a3-64bf-43a1-9b3f-b1d3184835fb\") " pod="openstack-kuttl-tests/root-account-create-update-j22zm" Jan 20 17:54:25 crc kubenswrapper[4558]: I0120 17:54:25.740303 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a92fe2a3-64bf-43a1-9b3f-b1d3184835fb-operator-scripts\") pod \"root-account-create-update-j22zm\" (UID: \"a92fe2a3-64bf-43a1-9b3f-b1d3184835fb\") " pod="openstack-kuttl-tests/root-account-create-update-j22zm" Jan 20 17:54:25 crc kubenswrapper[4558]: I0120 17:54:25.741354 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a92fe2a3-64bf-43a1-9b3f-b1d3184835fb-operator-scripts\") pod \"root-account-create-update-j22zm\" (UID: \"a92fe2a3-64bf-43a1-9b3f-b1d3184835fb\") " pod="openstack-kuttl-tests/root-account-create-update-j22zm" Jan 20 17:54:25 crc kubenswrapper[4558]: I0120 17:54:25.894415 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pcjms\" (UniqueName: \"kubernetes.io/projected/a92fe2a3-64bf-43a1-9b3f-b1d3184835fb-kube-api-access-pcjms\") pod \"root-account-create-update-j22zm\" (UID: \"a92fe2a3-64bf-43a1-9b3f-b1d3184835fb\") " pod="openstack-kuttl-tests/root-account-create-update-j22zm" Jan 20 17:54:25 crc kubenswrapper[4558]: I0120 17:54:25.894941 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-j22zm" Jan 20 17:54:26 crc kubenswrapper[4558]: I0120 17:54:26.290995 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-j22zm"] Jan 20 17:54:27 crc kubenswrapper[4558]: I0120 17:54:27.232741 4558 generic.go:334] "Generic (PLEG): container finished" podID="a92fe2a3-64bf-43a1-9b3f-b1d3184835fb" containerID="4ab8c441eeeb6b4f69bfa355c2d0780124143e8277f4f2d7413b6e677705320c" exitCode=0 Jan 20 17:54:27 crc kubenswrapper[4558]: I0120 17:54:27.232803 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/root-account-create-update-j22zm" event={"ID":"a92fe2a3-64bf-43a1-9b3f-b1d3184835fb","Type":"ContainerDied","Data":"4ab8c441eeeb6b4f69bfa355c2d0780124143e8277f4f2d7413b6e677705320c"} Jan 20 17:54:27 crc kubenswrapper[4558]: I0120 17:54:27.232840 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/root-account-create-update-j22zm" event={"ID":"a92fe2a3-64bf-43a1-9b3f-b1d3184835fb","Type":"ContainerStarted","Data":"7eba7bc3cc2ddb258528d9928e0234d29277945a1e9e056144507f98a6b70228"} Jan 20 17:54:27 crc kubenswrapper[4558]: I0120 17:54:27.562285 4558 scope.go:117] "RemoveContainer" containerID="cda249e7881a71ca863707e278b44e93b7b2ddc35df234ef16f4b2629fda0d1a" Jan 20 17:54:27 crc kubenswrapper[4558]: I0120 17:54:27.608857 4558 scope.go:117] "RemoveContainer" containerID="f13b8c904e10f66a85bae11d7194703136a1d193483d5222b8adc92a268e6dc0" Jan 20 17:54:27 crc kubenswrapper[4558]: I0120 17:54:27.630040 4558 scope.go:117] "RemoveContainer" containerID="7588231569d82d21bc037c8f9bdc99164c106acd49e9d29c96f7408401d928c4" Jan 20 17:54:27 crc kubenswrapper[4558]: I0120 17:54:27.671638 4558 scope.go:117] "RemoveContainer" containerID="528cd7b9e6027bbc3a400adab21b7705e6cfe2f8368f79f3e85bef1cda4dd1e6" Jan 20 17:54:27 crc kubenswrapper[4558]: I0120 17:54:27.690672 4558 scope.go:117] "RemoveContainer" containerID="0e79f2cae3c4c0133a0b6788fe7df950d70f340957fa52cebd1533ef87c6bf7f" Jan 20 17:54:27 crc kubenswrapper[4558]: I0120 17:54:27.711427 4558 scope.go:117] "RemoveContainer" containerID="543acc9e8e6d663cae36ab5b04b6b497d854dfd494c3edf417b4d1f28c4e5338" Jan 20 17:54:27 crc kubenswrapper[4558]: I0120 17:54:27.733034 4558 scope.go:117] "RemoveContainer" containerID="441fa4054ae85c1a96a72c9f2e37597c22d876e5267c2bf5c34bbba3be42499c" Jan 20 17:54:27 crc kubenswrapper[4558]: I0120 17:54:27.755081 4558 scope.go:117] "RemoveContainer" containerID="4cc83e2afecb8ccf476b628cbe76038deafd826319b077c37d311481bf29f6f1" Jan 20 17:54:27 crc kubenswrapper[4558]: I0120 17:54:27.775209 4558 scope.go:117] "RemoveContainer" containerID="57b3952a8f41464d4529f673a192a03c8051341064d2770a665439b22f8070fc" Jan 20 17:54:27 crc kubenswrapper[4558]: I0120 17:54:27.795312 4558 scope.go:117] "RemoveContainer" containerID="9eb510d65b859ba67d1fbebbd45639a22b783fc8a8eeb68ec8c537ed76f3ce64" Jan 20 17:54:28 crc kubenswrapper[4558]: I0120 17:54:28.180633 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/keystone-db-create-vfppk"] Jan 20 17:54:28 crc kubenswrapper[4558]: I0120 17:54:28.181681 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-db-create-vfppk" Jan 20 17:54:28 crc kubenswrapper[4558]: I0120 17:54:28.185668 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/41d6ff7a-4cdf-4add-834d-eb19a40181b9-operator-scripts\") pod \"keystone-db-create-vfppk\" (UID: \"41d6ff7a-4cdf-4add-834d-eb19a40181b9\") " pod="openstack-kuttl-tests/keystone-db-create-vfppk" Jan 20 17:54:28 crc kubenswrapper[4558]: I0120 17:54:28.185719 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zp2fw\" (UniqueName: \"kubernetes.io/projected/41d6ff7a-4cdf-4add-834d-eb19a40181b9-kube-api-access-zp2fw\") pod \"keystone-db-create-vfppk\" (UID: \"41d6ff7a-4cdf-4add-834d-eb19a40181b9\") " pod="openstack-kuttl-tests/keystone-db-create-vfppk" Jan 20 17:54:28 crc kubenswrapper[4558]: I0120 17:54:28.188064 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-db-create-vfppk"] Jan 20 17:54:28 crc kubenswrapper[4558]: I0120 17:54:28.287067 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/41d6ff7a-4cdf-4add-834d-eb19a40181b9-operator-scripts\") pod \"keystone-db-create-vfppk\" (UID: \"41d6ff7a-4cdf-4add-834d-eb19a40181b9\") " pod="openstack-kuttl-tests/keystone-db-create-vfppk" Jan 20 17:54:28 crc kubenswrapper[4558]: I0120 17:54:28.287113 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zp2fw\" (UniqueName: \"kubernetes.io/projected/41d6ff7a-4cdf-4add-834d-eb19a40181b9-kube-api-access-zp2fw\") pod \"keystone-db-create-vfppk\" (UID: \"41d6ff7a-4cdf-4add-834d-eb19a40181b9\") " pod="openstack-kuttl-tests/keystone-db-create-vfppk" Jan 20 17:54:28 crc kubenswrapper[4558]: I0120 17:54:28.287922 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/41d6ff7a-4cdf-4add-834d-eb19a40181b9-operator-scripts\") pod \"keystone-db-create-vfppk\" (UID: \"41d6ff7a-4cdf-4add-834d-eb19a40181b9\") " pod="openstack-kuttl-tests/keystone-db-create-vfppk" Jan 20 17:54:28 crc kubenswrapper[4558]: I0120 17:54:28.289605 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/keystone-1afa-account-create-update-9wfwp"] Jan 20 17:54:28 crc kubenswrapper[4558]: I0120 17:54:28.290835 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-1afa-account-create-update-9wfwp" Jan 20 17:54:28 crc kubenswrapper[4558]: I0120 17:54:28.293226 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-db-secret" Jan 20 17:54:28 crc kubenswrapper[4558]: I0120 17:54:28.299216 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-1afa-account-create-update-9wfwp"] Jan 20 17:54:28 crc kubenswrapper[4558]: I0120 17:54:28.310627 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zp2fw\" (UniqueName: \"kubernetes.io/projected/41d6ff7a-4cdf-4add-834d-eb19a40181b9-kube-api-access-zp2fw\") pod \"keystone-db-create-vfppk\" (UID: \"41d6ff7a-4cdf-4add-834d-eb19a40181b9\") " pod="openstack-kuttl-tests/keystone-db-create-vfppk" Jan 20 17:54:28 crc kubenswrapper[4558]: I0120 17:54:28.492308 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/30ba9eb2-c009-491e-9b16-a1ce65ca3d98-operator-scripts\") pod \"keystone-1afa-account-create-update-9wfwp\" (UID: \"30ba9eb2-c009-491e-9b16-a1ce65ca3d98\") " pod="openstack-kuttl-tests/keystone-1afa-account-create-update-9wfwp" Jan 20 17:54:28 crc kubenswrapper[4558]: I0120 17:54:28.492851 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h2729\" (UniqueName: \"kubernetes.io/projected/30ba9eb2-c009-491e-9b16-a1ce65ca3d98-kube-api-access-h2729\") pod \"keystone-1afa-account-create-update-9wfwp\" (UID: \"30ba9eb2-c009-491e-9b16-a1ce65ca3d98\") " pod="openstack-kuttl-tests/keystone-1afa-account-create-update-9wfwp" Jan 20 17:54:28 crc kubenswrapper[4558]: I0120 17:54:28.506687 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-db-create-vfppk" Jan 20 17:54:28 crc kubenswrapper[4558]: I0120 17:54:28.528992 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-j22zm" Jan 20 17:54:28 crc kubenswrapper[4558]: I0120 17:54:28.547093 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/placement-db-create-2hfmj"] Jan 20 17:54:28 crc kubenswrapper[4558]: E0120 17:54:28.547491 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a92fe2a3-64bf-43a1-9b3f-b1d3184835fb" containerName="mariadb-account-create-update" Jan 20 17:54:28 crc kubenswrapper[4558]: I0120 17:54:28.547512 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="a92fe2a3-64bf-43a1-9b3f-b1d3184835fb" containerName="mariadb-account-create-update" Jan 20 17:54:28 crc kubenswrapper[4558]: I0120 17:54:28.547678 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="a92fe2a3-64bf-43a1-9b3f-b1d3184835fb" containerName="mariadb-account-create-update" Jan 20 17:54:28 crc kubenswrapper[4558]: I0120 17:54:28.548315 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-db-create-2hfmj" Jan 20 17:54:28 crc kubenswrapper[4558]: I0120 17:54:28.554135 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-db-create-2hfmj"] Jan 20 17:54:28 crc kubenswrapper[4558]: I0120 17:54:28.597610 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a92fe2a3-64bf-43a1-9b3f-b1d3184835fb-operator-scripts\") pod \"a92fe2a3-64bf-43a1-9b3f-b1d3184835fb\" (UID: \"a92fe2a3-64bf-43a1-9b3f-b1d3184835fb\") " Jan 20 17:54:28 crc kubenswrapper[4558]: I0120 17:54:28.597831 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f0abf094-6fa0-45a0-b6f0-eaa4b4b03b55-operator-scripts\") pod \"placement-db-create-2hfmj\" (UID: \"f0abf094-6fa0-45a0-b6f0-eaa4b4b03b55\") " pod="openstack-kuttl-tests/placement-db-create-2hfmj" Jan 20 17:54:28 crc kubenswrapper[4558]: I0120 17:54:28.597922 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/30ba9eb2-c009-491e-9b16-a1ce65ca3d98-operator-scripts\") pod \"keystone-1afa-account-create-update-9wfwp\" (UID: \"30ba9eb2-c009-491e-9b16-a1ce65ca3d98\") " pod="openstack-kuttl-tests/keystone-1afa-account-create-update-9wfwp" Jan 20 17:54:28 crc kubenswrapper[4558]: I0120 17:54:28.597942 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vtssd\" (UniqueName: \"kubernetes.io/projected/f0abf094-6fa0-45a0-b6f0-eaa4b4b03b55-kube-api-access-vtssd\") pod \"placement-db-create-2hfmj\" (UID: \"f0abf094-6fa0-45a0-b6f0-eaa4b4b03b55\") " pod="openstack-kuttl-tests/placement-db-create-2hfmj" Jan 20 17:54:28 crc kubenswrapper[4558]: I0120 17:54:28.598018 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h2729\" (UniqueName: \"kubernetes.io/projected/30ba9eb2-c009-491e-9b16-a1ce65ca3d98-kube-api-access-h2729\") pod \"keystone-1afa-account-create-update-9wfwp\" (UID: \"30ba9eb2-c009-491e-9b16-a1ce65ca3d98\") " pod="openstack-kuttl-tests/keystone-1afa-account-create-update-9wfwp" Jan 20 17:54:28 crc kubenswrapper[4558]: I0120 17:54:28.598423 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a92fe2a3-64bf-43a1-9b3f-b1d3184835fb-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "a92fe2a3-64bf-43a1-9b3f-b1d3184835fb" (UID: "a92fe2a3-64bf-43a1-9b3f-b1d3184835fb"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:54:28 crc kubenswrapper[4558]: I0120 17:54:28.598871 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/30ba9eb2-c009-491e-9b16-a1ce65ca3d98-operator-scripts\") pod \"keystone-1afa-account-create-update-9wfwp\" (UID: \"30ba9eb2-c009-491e-9b16-a1ce65ca3d98\") " pod="openstack-kuttl-tests/keystone-1afa-account-create-update-9wfwp" Jan 20 17:54:28 crc kubenswrapper[4558]: I0120 17:54:28.615011 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h2729\" (UniqueName: \"kubernetes.io/projected/30ba9eb2-c009-491e-9b16-a1ce65ca3d98-kube-api-access-h2729\") pod \"keystone-1afa-account-create-update-9wfwp\" (UID: \"30ba9eb2-c009-491e-9b16-a1ce65ca3d98\") " pod="openstack-kuttl-tests/keystone-1afa-account-create-update-9wfwp" Jan 20 17:54:28 crc kubenswrapper[4558]: I0120 17:54:28.665613 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/placement-c24c-account-create-update-vlgct"] Jan 20 17:54:28 crc kubenswrapper[4558]: I0120 17:54:28.666798 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-c24c-account-create-update-vlgct" Jan 20 17:54:28 crc kubenswrapper[4558]: I0120 17:54:28.669443 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"placement-db-secret" Jan 20 17:54:28 crc kubenswrapper[4558]: I0120 17:54:28.674064 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-c24c-account-create-update-vlgct"] Jan 20 17:54:28 crc kubenswrapper[4558]: I0120 17:54:28.699687 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcjms\" (UniqueName: \"kubernetes.io/projected/a92fe2a3-64bf-43a1-9b3f-b1d3184835fb-kube-api-access-pcjms\") pod \"a92fe2a3-64bf-43a1-9b3f-b1d3184835fb\" (UID: \"a92fe2a3-64bf-43a1-9b3f-b1d3184835fb\") " Jan 20 17:54:28 crc kubenswrapper[4558]: I0120 17:54:28.700526 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f0abf094-6fa0-45a0-b6f0-eaa4b4b03b55-operator-scripts\") pod \"placement-db-create-2hfmj\" (UID: \"f0abf094-6fa0-45a0-b6f0-eaa4b4b03b55\") " pod="openstack-kuttl-tests/placement-db-create-2hfmj" Jan 20 17:54:28 crc kubenswrapper[4558]: I0120 17:54:28.700576 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-clqgr\" (UniqueName: \"kubernetes.io/projected/673d11aa-a3d9-47e7-a64a-d261d52a8d1b-kube-api-access-clqgr\") pod \"placement-c24c-account-create-update-vlgct\" (UID: \"673d11aa-a3d9-47e7-a64a-d261d52a8d1b\") " pod="openstack-kuttl-tests/placement-c24c-account-create-update-vlgct" Jan 20 17:54:28 crc kubenswrapper[4558]: I0120 17:54:28.700641 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vtssd\" (UniqueName: \"kubernetes.io/projected/f0abf094-6fa0-45a0-b6f0-eaa4b4b03b55-kube-api-access-vtssd\") pod \"placement-db-create-2hfmj\" (UID: \"f0abf094-6fa0-45a0-b6f0-eaa4b4b03b55\") " pod="openstack-kuttl-tests/placement-db-create-2hfmj" Jan 20 17:54:28 crc kubenswrapper[4558]: I0120 17:54:28.700706 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/673d11aa-a3d9-47e7-a64a-d261d52a8d1b-operator-scripts\") pod \"placement-c24c-account-create-update-vlgct\" (UID: \"673d11aa-a3d9-47e7-a64a-d261d52a8d1b\") " pod="openstack-kuttl-tests/placement-c24c-account-create-update-vlgct" Jan 20 17:54:28 crc kubenswrapper[4558]: I0120 17:54:28.700780 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a92fe2a3-64bf-43a1-9b3f-b1d3184835fb-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:54:28 crc kubenswrapper[4558]: I0120 17:54:28.701890 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f0abf094-6fa0-45a0-b6f0-eaa4b4b03b55-operator-scripts\") pod \"placement-db-create-2hfmj\" (UID: \"f0abf094-6fa0-45a0-b6f0-eaa4b4b03b55\") " pod="openstack-kuttl-tests/placement-db-create-2hfmj" Jan 20 17:54:28 crc kubenswrapper[4558]: I0120 17:54:28.710134 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a92fe2a3-64bf-43a1-9b3f-b1d3184835fb-kube-api-access-pcjms" (OuterVolumeSpecName: "kube-api-access-pcjms") pod "a92fe2a3-64bf-43a1-9b3f-b1d3184835fb" (UID: "a92fe2a3-64bf-43a1-9b3f-b1d3184835fb"). InnerVolumeSpecName "kube-api-access-pcjms". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:54:28 crc kubenswrapper[4558]: I0120 17:54:28.714857 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vtssd\" (UniqueName: \"kubernetes.io/projected/f0abf094-6fa0-45a0-b6f0-eaa4b4b03b55-kube-api-access-vtssd\") pod \"placement-db-create-2hfmj\" (UID: \"f0abf094-6fa0-45a0-b6f0-eaa4b4b03b55\") " pod="openstack-kuttl-tests/placement-db-create-2hfmj" Jan 20 17:54:28 crc kubenswrapper[4558]: I0120 17:54:28.761559 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/glance-db-create-k6q4m"] Jan 20 17:54:28 crc kubenswrapper[4558]: I0120 17:54:28.762843 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-db-create-k6q4m" Jan 20 17:54:28 crc kubenswrapper[4558]: I0120 17:54:28.769209 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-db-create-k6q4m"] Jan 20 17:54:28 crc kubenswrapper[4558]: I0120 17:54:28.802609 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-clqgr\" (UniqueName: \"kubernetes.io/projected/673d11aa-a3d9-47e7-a64a-d261d52a8d1b-kube-api-access-clqgr\") pod \"placement-c24c-account-create-update-vlgct\" (UID: \"673d11aa-a3d9-47e7-a64a-d261d52a8d1b\") " pod="openstack-kuttl-tests/placement-c24c-account-create-update-vlgct" Jan 20 17:54:28 crc kubenswrapper[4558]: I0120 17:54:28.802697 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hdcvc\" (UniqueName: \"kubernetes.io/projected/89488e78-3aae-4840-9ae3-2c09ca0013be-kube-api-access-hdcvc\") pod \"glance-db-create-k6q4m\" (UID: \"89488e78-3aae-4840-9ae3-2c09ca0013be\") " pod="openstack-kuttl-tests/glance-db-create-k6q4m" Jan 20 17:54:28 crc kubenswrapper[4558]: I0120 17:54:28.802764 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/89488e78-3aae-4840-9ae3-2c09ca0013be-operator-scripts\") pod \"glance-db-create-k6q4m\" (UID: \"89488e78-3aae-4840-9ae3-2c09ca0013be\") " pod="openstack-kuttl-tests/glance-db-create-k6q4m" Jan 20 17:54:28 crc kubenswrapper[4558]: I0120 17:54:28.802918 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/673d11aa-a3d9-47e7-a64a-d261d52a8d1b-operator-scripts\") pod \"placement-c24c-account-create-update-vlgct\" (UID: \"673d11aa-a3d9-47e7-a64a-d261d52a8d1b\") " pod="openstack-kuttl-tests/placement-c24c-account-create-update-vlgct" Jan 20 17:54:28 crc kubenswrapper[4558]: I0120 17:54:28.803155 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcjms\" (UniqueName: \"kubernetes.io/projected/a92fe2a3-64bf-43a1-9b3f-b1d3184835fb-kube-api-access-pcjms\") on node \"crc\" DevicePath \"\"" Jan 20 17:54:28 crc kubenswrapper[4558]: I0120 17:54:28.804121 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/673d11aa-a3d9-47e7-a64a-d261d52a8d1b-operator-scripts\") pod \"placement-c24c-account-create-update-vlgct\" (UID: \"673d11aa-a3d9-47e7-a64a-d261d52a8d1b\") " pod="openstack-kuttl-tests/placement-c24c-account-create-update-vlgct" Jan 20 17:54:28 crc kubenswrapper[4558]: I0120 17:54:28.817236 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-clqgr\" (UniqueName: \"kubernetes.io/projected/673d11aa-a3d9-47e7-a64a-d261d52a8d1b-kube-api-access-clqgr\") pod \"placement-c24c-account-create-update-vlgct\" (UID: \"673d11aa-a3d9-47e7-a64a-d261d52a8d1b\") " pod="openstack-kuttl-tests/placement-c24c-account-create-update-vlgct" Jan 20 17:54:28 crc kubenswrapper[4558]: I0120 17:54:28.855951 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/glance-94fd-account-create-update-l2nkn"] Jan 20 17:54:28 crc kubenswrapper[4558]: I0120 17:54:28.857028 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-94fd-account-create-update-l2nkn" Jan 20 17:54:28 crc kubenswrapper[4558]: I0120 17:54:28.859868 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-db-secret" Jan 20 17:54:28 crc kubenswrapper[4558]: I0120 17:54:28.869513 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-db-create-2hfmj" Jan 20 17:54:28 crc kubenswrapper[4558]: I0120 17:54:28.889338 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-94fd-account-create-update-l2nkn"] Jan 20 17:54:28 crc kubenswrapper[4558]: I0120 17:54:28.904417 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ed73bb12-40f2-4d0c-913a-07f22e33b1b4-operator-scripts\") pod \"glance-94fd-account-create-update-l2nkn\" (UID: \"ed73bb12-40f2-4d0c-913a-07f22e33b1b4\") " pod="openstack-kuttl-tests/glance-94fd-account-create-update-l2nkn" Jan 20 17:54:28 crc kubenswrapper[4558]: I0120 17:54:28.904511 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hdcvc\" (UniqueName: \"kubernetes.io/projected/89488e78-3aae-4840-9ae3-2c09ca0013be-kube-api-access-hdcvc\") pod \"glance-db-create-k6q4m\" (UID: \"89488e78-3aae-4840-9ae3-2c09ca0013be\") " pod="openstack-kuttl-tests/glance-db-create-k6q4m" Jan 20 17:54:28 crc kubenswrapper[4558]: I0120 17:54:28.904570 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/89488e78-3aae-4840-9ae3-2c09ca0013be-operator-scripts\") pod \"glance-db-create-k6q4m\" (UID: \"89488e78-3aae-4840-9ae3-2c09ca0013be\") " pod="openstack-kuttl-tests/glance-db-create-k6q4m" Jan 20 17:54:28 crc kubenswrapper[4558]: I0120 17:54:28.904899 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kxv57\" (UniqueName: \"kubernetes.io/projected/ed73bb12-40f2-4d0c-913a-07f22e33b1b4-kube-api-access-kxv57\") pod \"glance-94fd-account-create-update-l2nkn\" (UID: \"ed73bb12-40f2-4d0c-913a-07f22e33b1b4\") " pod="openstack-kuttl-tests/glance-94fd-account-create-update-l2nkn" Jan 20 17:54:28 crc kubenswrapper[4558]: I0120 17:54:28.908153 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/89488e78-3aae-4840-9ae3-2c09ca0013be-operator-scripts\") pod \"glance-db-create-k6q4m\" (UID: \"89488e78-3aae-4840-9ae3-2c09ca0013be\") " pod="openstack-kuttl-tests/glance-db-create-k6q4m" Jan 20 17:54:28 crc kubenswrapper[4558]: I0120 17:54:28.910971 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-1afa-account-create-update-9wfwp" Jan 20 17:54:28 crc kubenswrapper[4558]: I0120 17:54:28.921638 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hdcvc\" (UniqueName: \"kubernetes.io/projected/89488e78-3aae-4840-9ae3-2c09ca0013be-kube-api-access-hdcvc\") pod \"glance-db-create-k6q4m\" (UID: \"89488e78-3aae-4840-9ae3-2c09ca0013be\") " pod="openstack-kuttl-tests/glance-db-create-k6q4m" Jan 20 17:54:28 crc kubenswrapper[4558]: I0120 17:54:28.982635 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-c24c-account-create-update-vlgct" Jan 20 17:54:29 crc kubenswrapper[4558]: I0120 17:54:29.007904 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kxv57\" (UniqueName: \"kubernetes.io/projected/ed73bb12-40f2-4d0c-913a-07f22e33b1b4-kube-api-access-kxv57\") pod \"glance-94fd-account-create-update-l2nkn\" (UID: \"ed73bb12-40f2-4d0c-913a-07f22e33b1b4\") " pod="openstack-kuttl-tests/glance-94fd-account-create-update-l2nkn" Jan 20 17:54:29 crc kubenswrapper[4558]: I0120 17:54:29.007991 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ed73bb12-40f2-4d0c-913a-07f22e33b1b4-operator-scripts\") pod \"glance-94fd-account-create-update-l2nkn\" (UID: \"ed73bb12-40f2-4d0c-913a-07f22e33b1b4\") " pod="openstack-kuttl-tests/glance-94fd-account-create-update-l2nkn" Jan 20 17:54:29 crc kubenswrapper[4558]: I0120 17:54:29.008662 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ed73bb12-40f2-4d0c-913a-07f22e33b1b4-operator-scripts\") pod \"glance-94fd-account-create-update-l2nkn\" (UID: \"ed73bb12-40f2-4d0c-913a-07f22e33b1b4\") " pod="openstack-kuttl-tests/glance-94fd-account-create-update-l2nkn" Jan 20 17:54:29 crc kubenswrapper[4558]: I0120 17:54:29.015632 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-db-create-vfppk"] Jan 20 17:54:29 crc kubenswrapper[4558]: I0120 17:54:29.025012 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kxv57\" (UniqueName: \"kubernetes.io/projected/ed73bb12-40f2-4d0c-913a-07f22e33b1b4-kube-api-access-kxv57\") pod \"glance-94fd-account-create-update-l2nkn\" (UID: \"ed73bb12-40f2-4d0c-913a-07f22e33b1b4\") " pod="openstack-kuttl-tests/glance-94fd-account-create-update-l2nkn" Jan 20 17:54:29 crc kubenswrapper[4558]: I0120 17:54:29.109579 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/cd300665-4f75-4ed7-9166-4b48e1aeffc4-etc-swift\") pod \"swift-storage-0\" (UID: \"cd300665-4f75-4ed7-9166-4b48e1aeffc4\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:54:29 crc kubenswrapper[4558]: I0120 17:54:29.114946 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/cd300665-4f75-4ed7-9166-4b48e1aeffc4-etc-swift\") pod \"swift-storage-0\" (UID: \"cd300665-4f75-4ed7-9166-4b48e1aeffc4\") " pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:54:29 crc kubenswrapper[4558]: I0120 17:54:29.119060 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:54:29 crc kubenswrapper[4558]: I0120 17:54:29.159661 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-db-create-k6q4m" Jan 20 17:54:29 crc kubenswrapper[4558]: I0120 17:54:29.175605 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-94fd-account-create-update-l2nkn" Jan 20 17:54:29 crc kubenswrapper[4558]: I0120 17:54:29.258748 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-db-create-vfppk" event={"ID":"41d6ff7a-4cdf-4add-834d-eb19a40181b9","Type":"ContainerStarted","Data":"d1d7d13f1610163a6123d101b3e473bb3892de0996d741588af150949c4555f7"} Jan 20 17:54:29 crc kubenswrapper[4558]: I0120 17:54:29.258815 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-db-create-vfppk" event={"ID":"41d6ff7a-4cdf-4add-834d-eb19a40181b9","Type":"ContainerStarted","Data":"68f4eb6e11c8535347790d498bfec02465f00c466a200aa78161983d9b75d55f"} Jan 20 17:54:29 crc kubenswrapper[4558]: I0120 17:54:29.264547 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/root-account-create-update-j22zm" event={"ID":"a92fe2a3-64bf-43a1-9b3f-b1d3184835fb","Type":"ContainerDied","Data":"7eba7bc3cc2ddb258528d9928e0234d29277945a1e9e056144507f98a6b70228"} Jan 20 17:54:29 crc kubenswrapper[4558]: I0120 17:54:29.264590 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7eba7bc3cc2ddb258528d9928e0234d29277945a1e9e056144507f98a6b70228" Jan 20 17:54:29 crc kubenswrapper[4558]: I0120 17:54:29.264644 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-j22zm" Jan 20 17:54:29 crc kubenswrapper[4558]: I0120 17:54:29.282284 4558 generic.go:334] "Generic (PLEG): container finished" podID="0dc1e7fb-21d3-4077-87c9-f2ede71a561a" containerID="5c9f046a891a29b5a5cbad4772933fbe8340e6d505155496bf492f03afe1f344" exitCode=0 Jan 20 17:54:29 crc kubenswrapper[4558]: I0120 17:54:29.282514 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-ring-rebalance-xznwn" event={"ID":"0dc1e7fb-21d3-4077-87c9-f2ede71a561a","Type":"ContainerDied","Data":"5c9f046a891a29b5a5cbad4772933fbe8340e6d505155496bf492f03afe1f344"} Jan 20 17:54:29 crc kubenswrapper[4558]: I0120 17:54:29.305568 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/keystone-db-create-vfppk" podStartSLOduration=1.30554083 podStartE2EDuration="1.30554083s" podCreationTimestamp="2026-01-20 17:54:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:54:29.277476099 +0000 UTC m=+4363.037814066" watchObservedRunningTime="2026-01-20 17:54:29.30554083 +0000 UTC m=+4363.065878798" Jan 20 17:54:29 crc kubenswrapper[4558]: I0120 17:54:29.313472 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-db-create-2hfmj"] Jan 20 17:54:29 crc kubenswrapper[4558]: W0120 17:54:29.320401 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf0abf094_6fa0_45a0_b6f0_eaa4b4b03b55.slice/crio-6fd520a91ee06f8f4b5d2f4f0b90aa6ca8514afaa75de77808efe44f5b06fdb2 WatchSource:0}: Error finding container 6fd520a91ee06f8f4b5d2f4f0b90aa6ca8514afaa75de77808efe44f5b06fdb2: Status 404 returned error can't find the container with id 6fd520a91ee06f8f4b5d2f4f0b90aa6ca8514afaa75de77808efe44f5b06fdb2 Jan 20 17:54:29 crc kubenswrapper[4558]: I0120 17:54:29.379868 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-1afa-account-create-update-9wfwp"] Jan 20 17:54:29 crc kubenswrapper[4558]: I0120 17:54:29.461222 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-c24c-account-create-update-vlgct"] Jan 20 17:54:29 crc kubenswrapper[4558]: W0120 17:54:29.496353 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod673d11aa_a3d9_47e7_a64a_d261d52a8d1b.slice/crio-a4e4844602c499d4332f1bece9dd1eefcd25fa9289507c422c0f4c899ec9216f WatchSource:0}: Error finding container a4e4844602c499d4332f1bece9dd1eefcd25fa9289507c422c0f4c899ec9216f: Status 404 returned error can't find the container with id a4e4844602c499d4332f1bece9dd1eefcd25fa9289507c422c0f4c899ec9216f Jan 20 17:54:29 crc kubenswrapper[4558]: I0120 17:54:29.568324 4558 scope.go:117] "RemoveContainer" containerID="4ea7fa61d8b21007a044345acec89fcebe56c2921cf0f76ff2002f44bdc88ede" Jan 20 17:54:29 crc kubenswrapper[4558]: E0120 17:54:29.568549 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 17:54:29 crc kubenswrapper[4558]: I0120 17:54:29.600555 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/swift-storage-0"] Jan 20 17:54:29 crc kubenswrapper[4558]: W0120 17:54:29.619350 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcd300665_4f75_4ed7_9166_4b48e1aeffc4.slice/crio-db889e52f3b7f1dce867711e2b49e1d766e923e4984660f1244adda03007d81e WatchSource:0}: Error finding container db889e52f3b7f1dce867711e2b49e1d766e923e4984660f1244adda03007d81e: Status 404 returned error can't find the container with id db889e52f3b7f1dce867711e2b49e1d766e923e4984660f1244adda03007d81e Jan 20 17:54:29 crc kubenswrapper[4558]: I0120 17:54:29.707786 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-db-create-k6q4m"] Jan 20 17:54:29 crc kubenswrapper[4558]: I0120 17:54:29.719738 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-94fd-account-create-update-l2nkn"] Jan 20 17:54:30 crc kubenswrapper[4558]: I0120 17:54:30.291194 4558 generic.go:334] "Generic (PLEG): container finished" podID="89488e78-3aae-4840-9ae3-2c09ca0013be" containerID="420b7880db84675aa4940091f0c98f07078b76d3e87eb3eb2eb3be3c8526f0cb" exitCode=0 Jan 20 17:54:30 crc kubenswrapper[4558]: I0120 17:54:30.291259 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-db-create-k6q4m" event={"ID":"89488e78-3aae-4840-9ae3-2c09ca0013be","Type":"ContainerDied","Data":"420b7880db84675aa4940091f0c98f07078b76d3e87eb3eb2eb3be3c8526f0cb"} Jan 20 17:54:30 crc kubenswrapper[4558]: I0120 17:54:30.292536 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-db-create-k6q4m" event={"ID":"89488e78-3aae-4840-9ae3-2c09ca0013be","Type":"ContainerStarted","Data":"a8dca917a843ee025b53a5d593d8efb4bd838c8a699e611343429cb2b9da3146"} Jan 20 17:54:30 crc kubenswrapper[4558]: I0120 17:54:30.293883 4558 generic.go:334] "Generic (PLEG): container finished" podID="f0abf094-6fa0-45a0-b6f0-eaa4b4b03b55" containerID="e08ded0dee2ee278a2c6a546bbe6145915173951c4cab2b15c0dd0b32eee2d30" exitCode=0 Jan 20 17:54:30 crc kubenswrapper[4558]: I0120 17:54:30.293943 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-db-create-2hfmj" event={"ID":"f0abf094-6fa0-45a0-b6f0-eaa4b4b03b55","Type":"ContainerDied","Data":"e08ded0dee2ee278a2c6a546bbe6145915173951c4cab2b15c0dd0b32eee2d30"} Jan 20 17:54:30 crc kubenswrapper[4558]: I0120 17:54:30.293971 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-db-create-2hfmj" event={"ID":"f0abf094-6fa0-45a0-b6f0-eaa4b4b03b55","Type":"ContainerStarted","Data":"6fd520a91ee06f8f4b5d2f4f0b90aa6ca8514afaa75de77808efe44f5b06fdb2"} Jan 20 17:54:30 crc kubenswrapper[4558]: I0120 17:54:30.297034 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"cd300665-4f75-4ed7-9166-4b48e1aeffc4","Type":"ContainerStarted","Data":"08c5c660504f209c3b527a903bc2ca2f4a2088161c85be3401c90c3cddbca989"} Jan 20 17:54:30 crc kubenswrapper[4558]: I0120 17:54:30.297090 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"cd300665-4f75-4ed7-9166-4b48e1aeffc4","Type":"ContainerStarted","Data":"640b8b31652d22d5361c56898f4108c3cc9d07fa0b6e09405e8ee47586a8b2ee"} Jan 20 17:54:30 crc kubenswrapper[4558]: I0120 17:54:30.297102 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"cd300665-4f75-4ed7-9166-4b48e1aeffc4","Type":"ContainerStarted","Data":"3d23b588456f64012fb1649e6aa289cb457c88d5ad66f1354c313644bf1a8a97"} Jan 20 17:54:30 crc kubenswrapper[4558]: I0120 17:54:30.297112 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"cd300665-4f75-4ed7-9166-4b48e1aeffc4","Type":"ContainerStarted","Data":"db889e52f3b7f1dce867711e2b49e1d766e923e4984660f1244adda03007d81e"} Jan 20 17:54:30 crc kubenswrapper[4558]: I0120 17:54:30.298360 4558 generic.go:334] "Generic (PLEG): container finished" podID="41d6ff7a-4cdf-4add-834d-eb19a40181b9" containerID="d1d7d13f1610163a6123d101b3e473bb3892de0996d741588af150949c4555f7" exitCode=0 Jan 20 17:54:30 crc kubenswrapper[4558]: I0120 17:54:30.298418 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-db-create-vfppk" event={"ID":"41d6ff7a-4cdf-4add-834d-eb19a40181b9","Type":"ContainerDied","Data":"d1d7d13f1610163a6123d101b3e473bb3892de0996d741588af150949c4555f7"} Jan 20 17:54:30 crc kubenswrapper[4558]: I0120 17:54:30.300555 4558 generic.go:334] "Generic (PLEG): container finished" podID="ed73bb12-40f2-4d0c-913a-07f22e33b1b4" containerID="411b4aa95f6e968ca6e019afae9f0a8a0c363318723d95b4e9ee96441a29221b" exitCode=0 Jan 20 17:54:30 crc kubenswrapper[4558]: I0120 17:54:30.300610 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-94fd-account-create-update-l2nkn" event={"ID":"ed73bb12-40f2-4d0c-913a-07f22e33b1b4","Type":"ContainerDied","Data":"411b4aa95f6e968ca6e019afae9f0a8a0c363318723d95b4e9ee96441a29221b"} Jan 20 17:54:30 crc kubenswrapper[4558]: I0120 17:54:30.300631 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-94fd-account-create-update-l2nkn" event={"ID":"ed73bb12-40f2-4d0c-913a-07f22e33b1b4","Type":"ContainerStarted","Data":"d874992f063b4f8def007867efc8d20cf2f365936a8561633f277485f1cc8131"} Jan 20 17:54:30 crc kubenswrapper[4558]: I0120 17:54:30.301923 4558 generic.go:334] "Generic (PLEG): container finished" podID="30ba9eb2-c009-491e-9b16-a1ce65ca3d98" containerID="87f3a4508304b705d3874ba8981a4056c534e97560a2e9d54101a3c112b26f50" exitCode=0 Jan 20 17:54:30 crc kubenswrapper[4558]: I0120 17:54:30.301977 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-1afa-account-create-update-9wfwp" event={"ID":"30ba9eb2-c009-491e-9b16-a1ce65ca3d98","Type":"ContainerDied","Data":"87f3a4508304b705d3874ba8981a4056c534e97560a2e9d54101a3c112b26f50"} Jan 20 17:54:30 crc kubenswrapper[4558]: I0120 17:54:30.301997 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-1afa-account-create-update-9wfwp" event={"ID":"30ba9eb2-c009-491e-9b16-a1ce65ca3d98","Type":"ContainerStarted","Data":"80c82a186e3d63c1d029ca89ecdb46ea63a09869af086b8df17ee53836b756d5"} Jan 20 17:54:30 crc kubenswrapper[4558]: I0120 17:54:30.303414 4558 generic.go:334] "Generic (PLEG): container finished" podID="673d11aa-a3d9-47e7-a64a-d261d52a8d1b" containerID="5168f32fec815124d42587fac8ad3488678817be8cf8058893ebb4def1511256" exitCode=0 Jan 20 17:54:30 crc kubenswrapper[4558]: I0120 17:54:30.303563 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-c24c-account-create-update-vlgct" event={"ID":"673d11aa-a3d9-47e7-a64a-d261d52a8d1b","Type":"ContainerDied","Data":"5168f32fec815124d42587fac8ad3488678817be8cf8058893ebb4def1511256"} Jan 20 17:54:30 crc kubenswrapper[4558]: I0120 17:54:30.303601 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-c24c-account-create-update-vlgct" event={"ID":"673d11aa-a3d9-47e7-a64a-d261d52a8d1b","Type":"ContainerStarted","Data":"a4e4844602c499d4332f1bece9dd1eefcd25fa9289507c422c0f4c899ec9216f"} Jan 20 17:54:30 crc kubenswrapper[4558]: I0120 17:54:30.685922 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-ring-rebalance-xznwn" Jan 20 17:54:30 crc kubenswrapper[4558]: I0120 17:54:30.851182 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vl6hr\" (UniqueName: \"kubernetes.io/projected/0dc1e7fb-21d3-4077-87c9-f2ede71a561a-kube-api-access-vl6hr\") pod \"0dc1e7fb-21d3-4077-87c9-f2ede71a561a\" (UID: \"0dc1e7fb-21d3-4077-87c9-f2ede71a561a\") " Jan 20 17:54:30 crc kubenswrapper[4558]: I0120 17:54:30.851396 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/0dc1e7fb-21d3-4077-87c9-f2ede71a561a-ring-data-devices\") pod \"0dc1e7fb-21d3-4077-87c9-f2ede71a561a\" (UID: \"0dc1e7fb-21d3-4077-87c9-f2ede71a561a\") " Jan 20 17:54:30 crc kubenswrapper[4558]: I0120 17:54:30.851571 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/0dc1e7fb-21d3-4077-87c9-f2ede71a561a-swiftconf\") pod \"0dc1e7fb-21d3-4077-87c9-f2ede71a561a\" (UID: \"0dc1e7fb-21d3-4077-87c9-f2ede71a561a\") " Jan 20 17:54:30 crc kubenswrapper[4558]: I0120 17:54:30.851654 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0dc1e7fb-21d3-4077-87c9-f2ede71a561a-combined-ca-bundle\") pod \"0dc1e7fb-21d3-4077-87c9-f2ede71a561a\" (UID: \"0dc1e7fb-21d3-4077-87c9-f2ede71a561a\") " Jan 20 17:54:30 crc kubenswrapper[4558]: I0120 17:54:30.851761 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/0dc1e7fb-21d3-4077-87c9-f2ede71a561a-dispersionconf\") pod \"0dc1e7fb-21d3-4077-87c9-f2ede71a561a\" (UID: \"0dc1e7fb-21d3-4077-87c9-f2ede71a561a\") " Jan 20 17:54:30 crc kubenswrapper[4558]: I0120 17:54:30.851838 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/0dc1e7fb-21d3-4077-87c9-f2ede71a561a-etc-swift\") pod \"0dc1e7fb-21d3-4077-87c9-f2ede71a561a\" (UID: \"0dc1e7fb-21d3-4077-87c9-f2ede71a561a\") " Jan 20 17:54:30 crc kubenswrapper[4558]: I0120 17:54:30.851939 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0dc1e7fb-21d3-4077-87c9-f2ede71a561a-scripts\") pod \"0dc1e7fb-21d3-4077-87c9-f2ede71a561a\" (UID: \"0dc1e7fb-21d3-4077-87c9-f2ede71a561a\") " Jan 20 17:54:30 crc kubenswrapper[4558]: I0120 17:54:30.856611 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0dc1e7fb-21d3-4077-87c9-f2ede71a561a-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "0dc1e7fb-21d3-4077-87c9-f2ede71a561a" (UID: "0dc1e7fb-21d3-4077-87c9-f2ede71a561a"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:54:30 crc kubenswrapper[4558]: I0120 17:54:30.856814 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0dc1e7fb-21d3-4077-87c9-f2ede71a561a-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "0dc1e7fb-21d3-4077-87c9-f2ede71a561a" (UID: "0dc1e7fb-21d3-4077-87c9-f2ede71a561a"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:54:30 crc kubenswrapper[4558]: I0120 17:54:30.860213 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0dc1e7fb-21d3-4077-87c9-f2ede71a561a-kube-api-access-vl6hr" (OuterVolumeSpecName: "kube-api-access-vl6hr") pod "0dc1e7fb-21d3-4077-87c9-f2ede71a561a" (UID: "0dc1e7fb-21d3-4077-87c9-f2ede71a561a"). InnerVolumeSpecName "kube-api-access-vl6hr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:54:30 crc kubenswrapper[4558]: I0120 17:54:30.862219 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0dc1e7fb-21d3-4077-87c9-f2ede71a561a-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "0dc1e7fb-21d3-4077-87c9-f2ede71a561a" (UID: "0dc1e7fb-21d3-4077-87c9-f2ede71a561a"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:54:30 crc kubenswrapper[4558]: I0120 17:54:30.877888 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0dc1e7fb-21d3-4077-87c9-f2ede71a561a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0dc1e7fb-21d3-4077-87c9-f2ede71a561a" (UID: "0dc1e7fb-21d3-4077-87c9-f2ede71a561a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:54:30 crc kubenswrapper[4558]: I0120 17:54:30.879434 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0dc1e7fb-21d3-4077-87c9-f2ede71a561a-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "0dc1e7fb-21d3-4077-87c9-f2ede71a561a" (UID: "0dc1e7fb-21d3-4077-87c9-f2ede71a561a"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:54:30 crc kubenswrapper[4558]: I0120 17:54:30.879786 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0dc1e7fb-21d3-4077-87c9-f2ede71a561a-scripts" (OuterVolumeSpecName: "scripts") pod "0dc1e7fb-21d3-4077-87c9-f2ede71a561a" (UID: "0dc1e7fb-21d3-4077-87c9-f2ede71a561a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:54:30 crc kubenswrapper[4558]: I0120 17:54:30.953525 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0dc1e7fb-21d3-4077-87c9-f2ede71a561a-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:54:30 crc kubenswrapper[4558]: I0120 17:54:30.953810 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vl6hr\" (UniqueName: \"kubernetes.io/projected/0dc1e7fb-21d3-4077-87c9-f2ede71a561a-kube-api-access-vl6hr\") on node \"crc\" DevicePath \"\"" Jan 20 17:54:30 crc kubenswrapper[4558]: I0120 17:54:30.953885 4558 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/0dc1e7fb-21d3-4077-87c9-f2ede71a561a-ring-data-devices\") on node \"crc\" DevicePath \"\"" Jan 20 17:54:30 crc kubenswrapper[4558]: I0120 17:54:30.953939 4558 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/0dc1e7fb-21d3-4077-87c9-f2ede71a561a-swiftconf\") on node \"crc\" DevicePath \"\"" Jan 20 17:54:30 crc kubenswrapper[4558]: I0120 17:54:30.953989 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0dc1e7fb-21d3-4077-87c9-f2ede71a561a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:54:30 crc kubenswrapper[4558]: I0120 17:54:30.954038 4558 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/0dc1e7fb-21d3-4077-87c9-f2ede71a561a-dispersionconf\") on node \"crc\" DevicePath \"\"" Jan 20 17:54:30 crc kubenswrapper[4558]: I0120 17:54:30.954099 4558 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/0dc1e7fb-21d3-4077-87c9-f2ede71a561a-etc-swift\") on node \"crc\" DevicePath \"\"" Jan 20 17:54:31 crc kubenswrapper[4558]: I0120 17:54:31.334106 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"cd300665-4f75-4ed7-9166-4b48e1aeffc4","Type":"ContainerStarted","Data":"92a5f58dc2bcdf82ee4c76417da6d37919bddddc7444ff849574003ec7f86cee"} Jan 20 17:54:31 crc kubenswrapper[4558]: I0120 17:54:31.334202 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"cd300665-4f75-4ed7-9166-4b48e1aeffc4","Type":"ContainerStarted","Data":"678ea6abd080fbce2550848f03b87a5c11341574e2839169896c148295bf7600"} Jan 20 17:54:31 crc kubenswrapper[4558]: I0120 17:54:31.334214 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"cd300665-4f75-4ed7-9166-4b48e1aeffc4","Type":"ContainerStarted","Data":"91605227d295d861286db724135f07f88f4d737cec1e406e2f078ef2ee97ac12"} Jan 20 17:54:31 crc kubenswrapper[4558]: I0120 17:54:31.334224 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"cd300665-4f75-4ed7-9166-4b48e1aeffc4","Type":"ContainerStarted","Data":"8a87e25ff9ae7de7c19892601f394f9d10fa27cee39a8831c6bd12dcb8c3655b"} Jan 20 17:54:31 crc kubenswrapper[4558]: I0120 17:54:31.334233 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"cd300665-4f75-4ed7-9166-4b48e1aeffc4","Type":"ContainerStarted","Data":"75c122801e2470bd8e501bddfa6a0f20a2d2dbaa3393df3997b6e399b869bab6"} Jan 20 17:54:31 crc kubenswrapper[4558]: I0120 17:54:31.334241 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"cd300665-4f75-4ed7-9166-4b48e1aeffc4","Type":"ContainerStarted","Data":"593ca97090e4c8e828391df9bb4f851893cd4f22b0b607f43e4655293c6f5980"} Jan 20 17:54:31 crc kubenswrapper[4558]: I0120 17:54:31.334249 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"cd300665-4f75-4ed7-9166-4b48e1aeffc4","Type":"ContainerStarted","Data":"649d6dfbc7da772af01bf0565e0467026756c4ec2fe3aed3c80157258c3eea09"} Jan 20 17:54:31 crc kubenswrapper[4558]: I0120 17:54:31.337458 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-ring-rebalance-xznwn" Jan 20 17:54:31 crc kubenswrapper[4558]: I0120 17:54:31.345187 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-ring-rebalance-xznwn" event={"ID":"0dc1e7fb-21d3-4077-87c9-f2ede71a561a","Type":"ContainerDied","Data":"e9f199ff4b378cb04728396fd1a1b0db2bc693086e84ffcc259aef5136cb0dc0"} Jan 20 17:54:31 crc kubenswrapper[4558]: I0120 17:54:31.345211 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e9f199ff4b378cb04728396fd1a1b0db2bc693086e84ffcc259aef5136cb0dc0" Jan 20 17:54:31 crc kubenswrapper[4558]: I0120 17:54:31.738402 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-db-create-k6q4m" Jan 20 17:54:31 crc kubenswrapper[4558]: I0120 17:54:31.872874 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/89488e78-3aae-4840-9ae3-2c09ca0013be-operator-scripts\") pod \"89488e78-3aae-4840-9ae3-2c09ca0013be\" (UID: \"89488e78-3aae-4840-9ae3-2c09ca0013be\") " Jan 20 17:54:31 crc kubenswrapper[4558]: I0120 17:54:31.872926 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hdcvc\" (UniqueName: \"kubernetes.io/projected/89488e78-3aae-4840-9ae3-2c09ca0013be-kube-api-access-hdcvc\") pod \"89488e78-3aae-4840-9ae3-2c09ca0013be\" (UID: \"89488e78-3aae-4840-9ae3-2c09ca0013be\") " Jan 20 17:54:31 crc kubenswrapper[4558]: I0120 17:54:31.875646 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/89488e78-3aae-4840-9ae3-2c09ca0013be-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "89488e78-3aae-4840-9ae3-2c09ca0013be" (UID: "89488e78-3aae-4840-9ae3-2c09ca0013be"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:54:31 crc kubenswrapper[4558]: I0120 17:54:31.890151 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/89488e78-3aae-4840-9ae3-2c09ca0013be-kube-api-access-hdcvc" (OuterVolumeSpecName: "kube-api-access-hdcvc") pod "89488e78-3aae-4840-9ae3-2c09ca0013be" (UID: "89488e78-3aae-4840-9ae3-2c09ca0013be"). InnerVolumeSpecName "kube-api-access-hdcvc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:54:31 crc kubenswrapper[4558]: I0120 17:54:31.975551 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/89488e78-3aae-4840-9ae3-2c09ca0013be-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:54:31 crc kubenswrapper[4558]: I0120 17:54:31.975591 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hdcvc\" (UniqueName: \"kubernetes.io/projected/89488e78-3aae-4840-9ae3-2c09ca0013be-kube-api-access-hdcvc\") on node \"crc\" DevicePath \"\"" Jan 20 17:54:32 crc kubenswrapper[4558]: I0120 17:54:32.000510 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-94fd-account-create-update-l2nkn" Jan 20 17:54:32 crc kubenswrapper[4558]: I0120 17:54:32.013502 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-c24c-account-create-update-vlgct" Jan 20 17:54:32 crc kubenswrapper[4558]: I0120 17:54:32.013998 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-db-create-2hfmj" Jan 20 17:54:32 crc kubenswrapper[4558]: I0120 17:54:32.059824 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-j22zm"] Jan 20 17:54:32 crc kubenswrapper[4558]: I0120 17:54:32.073142 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-j22zm"] Jan 20 17:54:32 crc kubenswrapper[4558]: I0120 17:54:32.179885 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vtssd\" (UniqueName: \"kubernetes.io/projected/f0abf094-6fa0-45a0-b6f0-eaa4b4b03b55-kube-api-access-vtssd\") pod \"f0abf094-6fa0-45a0-b6f0-eaa4b4b03b55\" (UID: \"f0abf094-6fa0-45a0-b6f0-eaa4b4b03b55\") " Jan 20 17:54:32 crc kubenswrapper[4558]: I0120 17:54:32.179981 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-clqgr\" (UniqueName: \"kubernetes.io/projected/673d11aa-a3d9-47e7-a64a-d261d52a8d1b-kube-api-access-clqgr\") pod \"673d11aa-a3d9-47e7-a64a-d261d52a8d1b\" (UID: \"673d11aa-a3d9-47e7-a64a-d261d52a8d1b\") " Jan 20 17:54:32 crc kubenswrapper[4558]: I0120 17:54:32.180072 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kxv57\" (UniqueName: \"kubernetes.io/projected/ed73bb12-40f2-4d0c-913a-07f22e33b1b4-kube-api-access-kxv57\") pod \"ed73bb12-40f2-4d0c-913a-07f22e33b1b4\" (UID: \"ed73bb12-40f2-4d0c-913a-07f22e33b1b4\") " Jan 20 17:54:32 crc kubenswrapper[4558]: I0120 17:54:32.180126 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/673d11aa-a3d9-47e7-a64a-d261d52a8d1b-operator-scripts\") pod \"673d11aa-a3d9-47e7-a64a-d261d52a8d1b\" (UID: \"673d11aa-a3d9-47e7-a64a-d261d52a8d1b\") " Jan 20 17:54:32 crc kubenswrapper[4558]: I0120 17:54:32.180194 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f0abf094-6fa0-45a0-b6f0-eaa4b4b03b55-operator-scripts\") pod \"f0abf094-6fa0-45a0-b6f0-eaa4b4b03b55\" (UID: \"f0abf094-6fa0-45a0-b6f0-eaa4b4b03b55\") " Jan 20 17:54:32 crc kubenswrapper[4558]: I0120 17:54:32.180240 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ed73bb12-40f2-4d0c-913a-07f22e33b1b4-operator-scripts\") pod \"ed73bb12-40f2-4d0c-913a-07f22e33b1b4\" (UID: \"ed73bb12-40f2-4d0c-913a-07f22e33b1b4\") " Jan 20 17:54:32 crc kubenswrapper[4558]: I0120 17:54:32.181293 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ed73bb12-40f2-4d0c-913a-07f22e33b1b4-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "ed73bb12-40f2-4d0c-913a-07f22e33b1b4" (UID: "ed73bb12-40f2-4d0c-913a-07f22e33b1b4"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:54:32 crc kubenswrapper[4558]: I0120 17:54:32.181872 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/673d11aa-a3d9-47e7-a64a-d261d52a8d1b-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "673d11aa-a3d9-47e7-a64a-d261d52a8d1b" (UID: "673d11aa-a3d9-47e7-a64a-d261d52a8d1b"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:54:32 crc kubenswrapper[4558]: I0120 17:54:32.182333 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f0abf094-6fa0-45a0-b6f0-eaa4b4b03b55-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "f0abf094-6fa0-45a0-b6f0-eaa4b4b03b55" (UID: "f0abf094-6fa0-45a0-b6f0-eaa4b4b03b55"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:54:32 crc kubenswrapper[4558]: I0120 17:54:32.214752 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/673d11aa-a3d9-47e7-a64a-d261d52a8d1b-kube-api-access-clqgr" (OuterVolumeSpecName: "kube-api-access-clqgr") pod "673d11aa-a3d9-47e7-a64a-d261d52a8d1b" (UID: "673d11aa-a3d9-47e7-a64a-d261d52a8d1b"). InnerVolumeSpecName "kube-api-access-clqgr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:54:32 crc kubenswrapper[4558]: I0120 17:54:32.219365 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ed73bb12-40f2-4d0c-913a-07f22e33b1b4-kube-api-access-kxv57" (OuterVolumeSpecName: "kube-api-access-kxv57") pod "ed73bb12-40f2-4d0c-913a-07f22e33b1b4" (UID: "ed73bb12-40f2-4d0c-913a-07f22e33b1b4"). InnerVolumeSpecName "kube-api-access-kxv57". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:54:32 crc kubenswrapper[4558]: I0120 17:54:32.221813 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f0abf094-6fa0-45a0-b6f0-eaa4b4b03b55-kube-api-access-vtssd" (OuterVolumeSpecName: "kube-api-access-vtssd") pod "f0abf094-6fa0-45a0-b6f0-eaa4b4b03b55" (UID: "f0abf094-6fa0-45a0-b6f0-eaa4b4b03b55"). InnerVolumeSpecName "kube-api-access-vtssd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:54:32 crc kubenswrapper[4558]: I0120 17:54:32.243639 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-db-create-vfppk" Jan 20 17:54:32 crc kubenswrapper[4558]: I0120 17:54:32.247536 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-1afa-account-create-update-9wfwp" Jan 20 17:54:32 crc kubenswrapper[4558]: I0120 17:54:32.288138 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vtssd\" (UniqueName: \"kubernetes.io/projected/f0abf094-6fa0-45a0-b6f0-eaa4b4b03b55-kube-api-access-vtssd\") on node \"crc\" DevicePath \"\"" Jan 20 17:54:32 crc kubenswrapper[4558]: I0120 17:54:32.288271 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-clqgr\" (UniqueName: \"kubernetes.io/projected/673d11aa-a3d9-47e7-a64a-d261d52a8d1b-kube-api-access-clqgr\") on node \"crc\" DevicePath \"\"" Jan 20 17:54:32 crc kubenswrapper[4558]: I0120 17:54:32.288327 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kxv57\" (UniqueName: \"kubernetes.io/projected/ed73bb12-40f2-4d0c-913a-07f22e33b1b4-kube-api-access-kxv57\") on node \"crc\" DevicePath \"\"" Jan 20 17:54:32 crc kubenswrapper[4558]: I0120 17:54:32.288376 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/673d11aa-a3d9-47e7-a64a-d261d52a8d1b-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:54:32 crc kubenswrapper[4558]: I0120 17:54:32.288422 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f0abf094-6fa0-45a0-b6f0-eaa4b4b03b55-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:54:32 crc kubenswrapper[4558]: I0120 17:54:32.288507 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ed73bb12-40f2-4d0c-913a-07f22e33b1b4-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:54:32 crc kubenswrapper[4558]: I0120 17:54:32.346341 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-94fd-account-create-update-l2nkn" event={"ID":"ed73bb12-40f2-4d0c-913a-07f22e33b1b4","Type":"ContainerDied","Data":"d874992f063b4f8def007867efc8d20cf2f365936a8561633f277485f1cc8131"} Jan 20 17:54:32 crc kubenswrapper[4558]: I0120 17:54:32.346406 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d874992f063b4f8def007867efc8d20cf2f365936a8561633f277485f1cc8131" Jan 20 17:54:32 crc kubenswrapper[4558]: I0120 17:54:32.346413 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-94fd-account-create-update-l2nkn" Jan 20 17:54:32 crc kubenswrapper[4558]: I0120 17:54:32.347904 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-1afa-account-create-update-9wfwp" Jan 20 17:54:32 crc kubenswrapper[4558]: I0120 17:54:32.347924 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-1afa-account-create-update-9wfwp" event={"ID":"30ba9eb2-c009-491e-9b16-a1ce65ca3d98","Type":"ContainerDied","Data":"80c82a186e3d63c1d029ca89ecdb46ea63a09869af086b8df17ee53836b756d5"} Jan 20 17:54:32 crc kubenswrapper[4558]: I0120 17:54:32.347971 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="80c82a186e3d63c1d029ca89ecdb46ea63a09869af086b8df17ee53836b756d5" Jan 20 17:54:32 crc kubenswrapper[4558]: I0120 17:54:32.353602 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-c24c-account-create-update-vlgct" Jan 20 17:54:32 crc kubenswrapper[4558]: I0120 17:54:32.353608 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-c24c-account-create-update-vlgct" event={"ID":"673d11aa-a3d9-47e7-a64a-d261d52a8d1b","Type":"ContainerDied","Data":"a4e4844602c499d4332f1bece9dd1eefcd25fa9289507c422c0f4c899ec9216f"} Jan 20 17:54:32 crc kubenswrapper[4558]: I0120 17:54:32.353646 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a4e4844602c499d4332f1bece9dd1eefcd25fa9289507c422c0f4c899ec9216f" Jan 20 17:54:32 crc kubenswrapper[4558]: I0120 17:54:32.358001 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-db-create-k6q4m" event={"ID":"89488e78-3aae-4840-9ae3-2c09ca0013be","Type":"ContainerDied","Data":"a8dca917a843ee025b53a5d593d8efb4bd838c8a699e611343429cb2b9da3146"} Jan 20 17:54:32 crc kubenswrapper[4558]: I0120 17:54:32.358033 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-db-create-k6q4m" Jan 20 17:54:32 crc kubenswrapper[4558]: I0120 17:54:32.358043 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a8dca917a843ee025b53a5d593d8efb4bd838c8a699e611343429cb2b9da3146" Jan 20 17:54:32 crc kubenswrapper[4558]: I0120 17:54:32.359959 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-db-create-2hfmj" Jan 20 17:54:32 crc kubenswrapper[4558]: I0120 17:54:32.359975 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-db-create-2hfmj" event={"ID":"f0abf094-6fa0-45a0-b6f0-eaa4b4b03b55","Type":"ContainerDied","Data":"6fd520a91ee06f8f4b5d2f4f0b90aa6ca8514afaa75de77808efe44f5b06fdb2"} Jan 20 17:54:32 crc kubenswrapper[4558]: I0120 17:54:32.360023 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6fd520a91ee06f8f4b5d2f4f0b90aa6ca8514afaa75de77808efe44f5b06fdb2" Jan 20 17:54:32 crc kubenswrapper[4558]: I0120 17:54:32.365150 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"cd300665-4f75-4ed7-9166-4b48e1aeffc4","Type":"ContainerStarted","Data":"49dc4ba201bbec0d9ff9d16fd2d2b2c98c88e6f20020f744fca0291f9f36c748"} Jan 20 17:54:32 crc kubenswrapper[4558]: I0120 17:54:32.365206 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"cd300665-4f75-4ed7-9166-4b48e1aeffc4","Type":"ContainerStarted","Data":"63b1e89156b08be7c97cc6cd6b0955544bb87d107fd929728f51e6460408e3ae"} Jan 20 17:54:32 crc kubenswrapper[4558]: I0120 17:54:32.365218 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"cd300665-4f75-4ed7-9166-4b48e1aeffc4","Type":"ContainerStarted","Data":"f88ad017f2098e35ed1d0c224b9e2095984e92f4d2ea3271b416a0d3a18de1f0"} Jan 20 17:54:32 crc kubenswrapper[4558]: I0120 17:54:32.365226 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"cd300665-4f75-4ed7-9166-4b48e1aeffc4","Type":"ContainerStarted","Data":"6d7ee733551241798b4cb21ccf1011d8596e7dc990e0e63f7573c7daa5faee56"} Jan 20 17:54:32 crc kubenswrapper[4558]: I0120 17:54:32.366475 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-db-create-vfppk" event={"ID":"41d6ff7a-4cdf-4add-834d-eb19a40181b9","Type":"ContainerDied","Data":"68f4eb6e11c8535347790d498bfec02465f00c466a200aa78161983d9b75d55f"} Jan 20 17:54:32 crc kubenswrapper[4558]: I0120 17:54:32.366499 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="68f4eb6e11c8535347790d498bfec02465f00c466a200aa78161983d9b75d55f" Jan 20 17:54:32 crc kubenswrapper[4558]: I0120 17:54:32.366548 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-db-create-vfppk" Jan 20 17:54:32 crc kubenswrapper[4558]: I0120 17:54:32.392805 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zp2fw\" (UniqueName: \"kubernetes.io/projected/41d6ff7a-4cdf-4add-834d-eb19a40181b9-kube-api-access-zp2fw\") pod \"41d6ff7a-4cdf-4add-834d-eb19a40181b9\" (UID: \"41d6ff7a-4cdf-4add-834d-eb19a40181b9\") " Jan 20 17:54:32 crc kubenswrapper[4558]: I0120 17:54:32.392920 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/30ba9eb2-c009-491e-9b16-a1ce65ca3d98-operator-scripts\") pod \"30ba9eb2-c009-491e-9b16-a1ce65ca3d98\" (UID: \"30ba9eb2-c009-491e-9b16-a1ce65ca3d98\") " Jan 20 17:54:32 crc kubenswrapper[4558]: I0120 17:54:32.393003 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/41d6ff7a-4cdf-4add-834d-eb19a40181b9-operator-scripts\") pod \"41d6ff7a-4cdf-4add-834d-eb19a40181b9\" (UID: \"41d6ff7a-4cdf-4add-834d-eb19a40181b9\") " Jan 20 17:54:32 crc kubenswrapper[4558]: I0120 17:54:32.393028 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h2729\" (UniqueName: \"kubernetes.io/projected/30ba9eb2-c009-491e-9b16-a1ce65ca3d98-kube-api-access-h2729\") pod \"30ba9eb2-c009-491e-9b16-a1ce65ca3d98\" (UID: \"30ba9eb2-c009-491e-9b16-a1ce65ca3d98\") " Jan 20 17:54:32 crc kubenswrapper[4558]: I0120 17:54:32.393687 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/30ba9eb2-c009-491e-9b16-a1ce65ca3d98-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "30ba9eb2-c009-491e-9b16-a1ce65ca3d98" (UID: "30ba9eb2-c009-491e-9b16-a1ce65ca3d98"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:54:32 crc kubenswrapper[4558]: I0120 17:54:32.394129 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/41d6ff7a-4cdf-4add-834d-eb19a40181b9-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "41d6ff7a-4cdf-4add-834d-eb19a40181b9" (UID: "41d6ff7a-4cdf-4add-834d-eb19a40181b9"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:54:32 crc kubenswrapper[4558]: I0120 17:54:32.397363 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/30ba9eb2-c009-491e-9b16-a1ce65ca3d98-kube-api-access-h2729" (OuterVolumeSpecName: "kube-api-access-h2729") pod "30ba9eb2-c009-491e-9b16-a1ce65ca3d98" (UID: "30ba9eb2-c009-491e-9b16-a1ce65ca3d98"). InnerVolumeSpecName "kube-api-access-h2729". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:54:32 crc kubenswrapper[4558]: I0120 17:54:32.398152 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/41d6ff7a-4cdf-4add-834d-eb19a40181b9-kube-api-access-zp2fw" (OuterVolumeSpecName: "kube-api-access-zp2fw") pod "41d6ff7a-4cdf-4add-834d-eb19a40181b9" (UID: "41d6ff7a-4cdf-4add-834d-eb19a40181b9"). InnerVolumeSpecName "kube-api-access-zp2fw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:54:32 crc kubenswrapper[4558]: I0120 17:54:32.495636 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zp2fw\" (UniqueName: \"kubernetes.io/projected/41d6ff7a-4cdf-4add-834d-eb19a40181b9-kube-api-access-zp2fw\") on node \"crc\" DevicePath \"\"" Jan 20 17:54:32 crc kubenswrapper[4558]: I0120 17:54:32.495883 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/30ba9eb2-c009-491e-9b16-a1ce65ca3d98-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:54:32 crc kubenswrapper[4558]: I0120 17:54:32.495944 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/41d6ff7a-4cdf-4add-834d-eb19a40181b9-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:54:32 crc kubenswrapper[4558]: I0120 17:54:32.495994 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h2729\" (UniqueName: \"kubernetes.io/projected/30ba9eb2-c009-491e-9b16-a1ce65ca3d98-kube-api-access-h2729\") on node \"crc\" DevicePath \"\"" Jan 20 17:54:32 crc kubenswrapper[4558]: I0120 17:54:32.579010 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a92fe2a3-64bf-43a1-9b3f-b1d3184835fb" path="/var/lib/kubelet/pods/a92fe2a3-64bf-43a1-9b3f-b1d3184835fb/volumes" Jan 20 17:54:33 crc kubenswrapper[4558]: I0120 17:54:33.381270 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"cd300665-4f75-4ed7-9166-4b48e1aeffc4","Type":"ContainerStarted","Data":"a7e5224c23551340586e780af89b39957adde43c70c7ae7717575e2e550d32e6"} Jan 20 17:54:33 crc kubenswrapper[4558]: I0120 17:54:33.417293 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/swift-storage-0" podStartSLOduration=13.41727512 podStartE2EDuration="13.41727512s" podCreationTimestamp="2026-01-20 17:54:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:54:33.409825792 +0000 UTC m=+4367.170163759" watchObservedRunningTime="2026-01-20 17:54:33.41727512 +0000 UTC m=+4367.177613087" Jan 20 17:54:33 crc kubenswrapper[4558]: I0120 17:54:33.545124 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-5dc494d4bf-wrrjj"] Jan 20 17:54:33 crc kubenswrapper[4558]: E0120 17:54:33.545800 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="41d6ff7a-4cdf-4add-834d-eb19a40181b9" containerName="mariadb-database-create" Jan 20 17:54:33 crc kubenswrapper[4558]: I0120 17:54:33.545822 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="41d6ff7a-4cdf-4add-834d-eb19a40181b9" containerName="mariadb-database-create" Jan 20 17:54:33 crc kubenswrapper[4558]: E0120 17:54:33.545846 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="89488e78-3aae-4840-9ae3-2c09ca0013be" containerName="mariadb-database-create" Jan 20 17:54:33 crc kubenswrapper[4558]: I0120 17:54:33.545854 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="89488e78-3aae-4840-9ae3-2c09ca0013be" containerName="mariadb-database-create" Jan 20 17:54:33 crc kubenswrapper[4558]: E0120 17:54:33.545868 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="30ba9eb2-c009-491e-9b16-a1ce65ca3d98" containerName="mariadb-account-create-update" Jan 20 17:54:33 crc kubenswrapper[4558]: I0120 17:54:33.545875 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="30ba9eb2-c009-491e-9b16-a1ce65ca3d98" containerName="mariadb-account-create-update" Jan 20 17:54:33 crc kubenswrapper[4558]: E0120 17:54:33.545887 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed73bb12-40f2-4d0c-913a-07f22e33b1b4" containerName="mariadb-account-create-update" Jan 20 17:54:33 crc kubenswrapper[4558]: I0120 17:54:33.545893 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed73bb12-40f2-4d0c-913a-07f22e33b1b4" containerName="mariadb-account-create-update" Jan 20 17:54:33 crc kubenswrapper[4558]: E0120 17:54:33.545902 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="673d11aa-a3d9-47e7-a64a-d261d52a8d1b" containerName="mariadb-account-create-update" Jan 20 17:54:33 crc kubenswrapper[4558]: I0120 17:54:33.545908 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="673d11aa-a3d9-47e7-a64a-d261d52a8d1b" containerName="mariadb-account-create-update" Jan 20 17:54:33 crc kubenswrapper[4558]: E0120 17:54:33.545916 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0dc1e7fb-21d3-4077-87c9-f2ede71a561a" containerName="swift-ring-rebalance" Jan 20 17:54:33 crc kubenswrapper[4558]: I0120 17:54:33.545923 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="0dc1e7fb-21d3-4077-87c9-f2ede71a561a" containerName="swift-ring-rebalance" Jan 20 17:54:33 crc kubenswrapper[4558]: E0120 17:54:33.545941 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f0abf094-6fa0-45a0-b6f0-eaa4b4b03b55" containerName="mariadb-database-create" Jan 20 17:54:33 crc kubenswrapper[4558]: I0120 17:54:33.545948 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="f0abf094-6fa0-45a0-b6f0-eaa4b4b03b55" containerName="mariadb-database-create" Jan 20 17:54:33 crc kubenswrapper[4558]: I0120 17:54:33.546091 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="673d11aa-a3d9-47e7-a64a-d261d52a8d1b" containerName="mariadb-account-create-update" Jan 20 17:54:33 crc kubenswrapper[4558]: I0120 17:54:33.546101 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="41d6ff7a-4cdf-4add-834d-eb19a40181b9" containerName="mariadb-database-create" Jan 20 17:54:33 crc kubenswrapper[4558]: I0120 17:54:33.546114 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="ed73bb12-40f2-4d0c-913a-07f22e33b1b4" containerName="mariadb-account-create-update" Jan 20 17:54:33 crc kubenswrapper[4558]: I0120 17:54:33.546122 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="f0abf094-6fa0-45a0-b6f0-eaa4b4b03b55" containerName="mariadb-database-create" Jan 20 17:54:33 crc kubenswrapper[4558]: I0120 17:54:33.546133 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="30ba9eb2-c009-491e-9b16-a1ce65ca3d98" containerName="mariadb-account-create-update" Jan 20 17:54:33 crc kubenswrapper[4558]: I0120 17:54:33.546145 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="89488e78-3aae-4840-9ae3-2c09ca0013be" containerName="mariadb-database-create" Jan 20 17:54:33 crc kubenswrapper[4558]: I0120 17:54:33.546154 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="0dc1e7fb-21d3-4077-87c9-f2ede71a561a" containerName="swift-ring-rebalance" Jan 20 17:54:33 crc kubenswrapper[4558]: I0120 17:54:33.563961 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-5dc494d4bf-wrrjj" Jan 20 17:54:33 crc kubenswrapper[4558]: I0120 17:54:33.563960 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-5dc494d4bf-wrrjj"] Jan 20 17:54:33 crc kubenswrapper[4558]: I0120 17:54:33.568016 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"dns-swift-storage-0" Jan 20 17:54:33 crc kubenswrapper[4558]: I0120 17:54:33.718569 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/20fbac90-6e69-4c04-8e92-0b191157f185-dns-swift-storage-0\") pod \"dnsmasq-dnsmasq-5dc494d4bf-wrrjj\" (UID: \"20fbac90-6e69-4c04-8e92-0b191157f185\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-5dc494d4bf-wrrjj" Jan 20 17:54:33 crc kubenswrapper[4558]: I0120 17:54:33.718709 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/20fbac90-6e69-4c04-8e92-0b191157f185-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-5dc494d4bf-wrrjj\" (UID: \"20fbac90-6e69-4c04-8e92-0b191157f185\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-5dc494d4bf-wrrjj" Jan 20 17:54:33 crc kubenswrapper[4558]: I0120 17:54:33.718800 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kcrwm\" (UniqueName: \"kubernetes.io/projected/20fbac90-6e69-4c04-8e92-0b191157f185-kube-api-access-kcrwm\") pod \"dnsmasq-dnsmasq-5dc494d4bf-wrrjj\" (UID: \"20fbac90-6e69-4c04-8e92-0b191157f185\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-5dc494d4bf-wrrjj" Jan 20 17:54:33 crc kubenswrapper[4558]: I0120 17:54:33.719447 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/20fbac90-6e69-4c04-8e92-0b191157f185-config\") pod \"dnsmasq-dnsmasq-5dc494d4bf-wrrjj\" (UID: \"20fbac90-6e69-4c04-8e92-0b191157f185\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-5dc494d4bf-wrrjj" Jan 20 17:54:33 crc kubenswrapper[4558]: I0120 17:54:33.821074 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/20fbac90-6e69-4c04-8e92-0b191157f185-config\") pod \"dnsmasq-dnsmasq-5dc494d4bf-wrrjj\" (UID: \"20fbac90-6e69-4c04-8e92-0b191157f185\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-5dc494d4bf-wrrjj" Jan 20 17:54:33 crc kubenswrapper[4558]: I0120 17:54:33.821201 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/20fbac90-6e69-4c04-8e92-0b191157f185-dns-swift-storage-0\") pod \"dnsmasq-dnsmasq-5dc494d4bf-wrrjj\" (UID: \"20fbac90-6e69-4c04-8e92-0b191157f185\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-5dc494d4bf-wrrjj" Jan 20 17:54:33 crc kubenswrapper[4558]: I0120 17:54:33.821244 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/20fbac90-6e69-4c04-8e92-0b191157f185-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-5dc494d4bf-wrrjj\" (UID: \"20fbac90-6e69-4c04-8e92-0b191157f185\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-5dc494d4bf-wrrjj" Jan 20 17:54:33 crc kubenswrapper[4558]: I0120 17:54:33.821306 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kcrwm\" (UniqueName: \"kubernetes.io/projected/20fbac90-6e69-4c04-8e92-0b191157f185-kube-api-access-kcrwm\") pod \"dnsmasq-dnsmasq-5dc494d4bf-wrrjj\" (UID: \"20fbac90-6e69-4c04-8e92-0b191157f185\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-5dc494d4bf-wrrjj" Jan 20 17:54:33 crc kubenswrapper[4558]: I0120 17:54:33.822484 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/20fbac90-6e69-4c04-8e92-0b191157f185-config\") pod \"dnsmasq-dnsmasq-5dc494d4bf-wrrjj\" (UID: \"20fbac90-6e69-4c04-8e92-0b191157f185\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-5dc494d4bf-wrrjj" Jan 20 17:54:33 crc kubenswrapper[4558]: I0120 17:54:33.822500 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/20fbac90-6e69-4c04-8e92-0b191157f185-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-5dc494d4bf-wrrjj\" (UID: \"20fbac90-6e69-4c04-8e92-0b191157f185\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-5dc494d4bf-wrrjj" Jan 20 17:54:33 crc kubenswrapper[4558]: I0120 17:54:33.822962 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/20fbac90-6e69-4c04-8e92-0b191157f185-dns-swift-storage-0\") pod \"dnsmasq-dnsmasq-5dc494d4bf-wrrjj\" (UID: \"20fbac90-6e69-4c04-8e92-0b191157f185\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-5dc494d4bf-wrrjj" Jan 20 17:54:33 crc kubenswrapper[4558]: I0120 17:54:33.984838 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/glance-db-sync-4h4wr"] Jan 20 17:54:33 crc kubenswrapper[4558]: I0120 17:54:33.986042 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-db-sync-4h4wr" Jan 20 17:54:33 crc kubenswrapper[4558]: I0120 17:54:33.989698 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-glance-dockercfg-g64r2" Jan 20 17:54:33 crc kubenswrapper[4558]: I0120 17:54:33.989708 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-config-data" Jan 20 17:54:33 crc kubenswrapper[4558]: I0120 17:54:33.992935 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-db-sync-4h4wr"] Jan 20 17:54:33 crc kubenswrapper[4558]: I0120 17:54:33.997506 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kcrwm\" (UniqueName: \"kubernetes.io/projected/20fbac90-6e69-4c04-8e92-0b191157f185-kube-api-access-kcrwm\") pod \"dnsmasq-dnsmasq-5dc494d4bf-wrrjj\" (UID: \"20fbac90-6e69-4c04-8e92-0b191157f185\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-5dc494d4bf-wrrjj" Jan 20 17:54:34 crc kubenswrapper[4558]: I0120 17:54:34.126364 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3da90c4c-591e-4d99-84ed-3e720b2b853a-combined-ca-bundle\") pod \"glance-db-sync-4h4wr\" (UID: \"3da90c4c-591e-4d99-84ed-3e720b2b853a\") " pod="openstack-kuttl-tests/glance-db-sync-4h4wr" Jan 20 17:54:34 crc kubenswrapper[4558]: I0120 17:54:34.126552 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3da90c4c-591e-4d99-84ed-3e720b2b853a-config-data\") pod \"glance-db-sync-4h4wr\" (UID: \"3da90c4c-591e-4d99-84ed-3e720b2b853a\") " pod="openstack-kuttl-tests/glance-db-sync-4h4wr" Jan 20 17:54:34 crc kubenswrapper[4558]: I0120 17:54:34.126661 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/3da90c4c-591e-4d99-84ed-3e720b2b853a-db-sync-config-data\") pod \"glance-db-sync-4h4wr\" (UID: \"3da90c4c-591e-4d99-84ed-3e720b2b853a\") " pod="openstack-kuttl-tests/glance-db-sync-4h4wr" Jan 20 17:54:34 crc kubenswrapper[4558]: I0120 17:54:34.126844 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7l7gq\" (UniqueName: \"kubernetes.io/projected/3da90c4c-591e-4d99-84ed-3e720b2b853a-kube-api-access-7l7gq\") pod \"glance-db-sync-4h4wr\" (UID: \"3da90c4c-591e-4d99-84ed-3e720b2b853a\") " pod="openstack-kuttl-tests/glance-db-sync-4h4wr" Jan 20 17:54:34 crc kubenswrapper[4558]: I0120 17:54:34.189289 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-5dc494d4bf-wrrjj" Jan 20 17:54:34 crc kubenswrapper[4558]: I0120 17:54:34.228603 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7l7gq\" (UniqueName: \"kubernetes.io/projected/3da90c4c-591e-4d99-84ed-3e720b2b853a-kube-api-access-7l7gq\") pod \"glance-db-sync-4h4wr\" (UID: \"3da90c4c-591e-4d99-84ed-3e720b2b853a\") " pod="openstack-kuttl-tests/glance-db-sync-4h4wr" Jan 20 17:54:34 crc kubenswrapper[4558]: I0120 17:54:34.229124 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3da90c4c-591e-4d99-84ed-3e720b2b853a-combined-ca-bundle\") pod \"glance-db-sync-4h4wr\" (UID: \"3da90c4c-591e-4d99-84ed-3e720b2b853a\") " pod="openstack-kuttl-tests/glance-db-sync-4h4wr" Jan 20 17:54:34 crc kubenswrapper[4558]: I0120 17:54:34.229331 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3da90c4c-591e-4d99-84ed-3e720b2b853a-config-data\") pod \"glance-db-sync-4h4wr\" (UID: \"3da90c4c-591e-4d99-84ed-3e720b2b853a\") " pod="openstack-kuttl-tests/glance-db-sync-4h4wr" Jan 20 17:54:34 crc kubenswrapper[4558]: I0120 17:54:34.229456 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/3da90c4c-591e-4d99-84ed-3e720b2b853a-db-sync-config-data\") pod \"glance-db-sync-4h4wr\" (UID: \"3da90c4c-591e-4d99-84ed-3e720b2b853a\") " pod="openstack-kuttl-tests/glance-db-sync-4h4wr" Jan 20 17:54:34 crc kubenswrapper[4558]: I0120 17:54:34.234066 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/3da90c4c-591e-4d99-84ed-3e720b2b853a-db-sync-config-data\") pod \"glance-db-sync-4h4wr\" (UID: \"3da90c4c-591e-4d99-84ed-3e720b2b853a\") " pod="openstack-kuttl-tests/glance-db-sync-4h4wr" Jan 20 17:54:34 crc kubenswrapper[4558]: I0120 17:54:34.234297 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3da90c4c-591e-4d99-84ed-3e720b2b853a-combined-ca-bundle\") pod \"glance-db-sync-4h4wr\" (UID: \"3da90c4c-591e-4d99-84ed-3e720b2b853a\") " pod="openstack-kuttl-tests/glance-db-sync-4h4wr" Jan 20 17:54:34 crc kubenswrapper[4558]: I0120 17:54:34.234318 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3da90c4c-591e-4d99-84ed-3e720b2b853a-config-data\") pod \"glance-db-sync-4h4wr\" (UID: \"3da90c4c-591e-4d99-84ed-3e720b2b853a\") " pod="openstack-kuttl-tests/glance-db-sync-4h4wr" Jan 20 17:54:34 crc kubenswrapper[4558]: I0120 17:54:34.249975 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7l7gq\" (UniqueName: \"kubernetes.io/projected/3da90c4c-591e-4d99-84ed-3e720b2b853a-kube-api-access-7l7gq\") pod \"glance-db-sync-4h4wr\" (UID: \"3da90c4c-591e-4d99-84ed-3e720b2b853a\") " pod="openstack-kuttl-tests/glance-db-sync-4h4wr" Jan 20 17:54:34 crc kubenswrapper[4558]: I0120 17:54:34.327987 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-db-sync-4h4wr" Jan 20 17:54:34 crc kubenswrapper[4558]: I0120 17:54:34.603621 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-5dc494d4bf-wrrjj"] Jan 20 17:54:34 crc kubenswrapper[4558]: W0120 17:54:34.609890 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod20fbac90_6e69_4c04_8e92_0b191157f185.slice/crio-255d5113d91a699b2a0f958889f9b904277d1c4b3ec03be2a39726629473c38b WatchSource:0}: Error finding container 255d5113d91a699b2a0f958889f9b904277d1c4b3ec03be2a39726629473c38b: Status 404 returned error can't find the container with id 255d5113d91a699b2a0f958889f9b904277d1c4b3ec03be2a39726629473c38b Jan 20 17:54:34 crc kubenswrapper[4558]: I0120 17:54:34.769135 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-db-sync-4h4wr"] Jan 20 17:54:34 crc kubenswrapper[4558]: W0120 17:54:34.777545 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3da90c4c_591e_4d99_84ed_3e720b2b853a.slice/crio-968f13aaebbb1284ce10543f4398d2637ce159677e5ebfd03a8ab16c57feed4b WatchSource:0}: Error finding container 968f13aaebbb1284ce10543f4398d2637ce159677e5ebfd03a8ab16c57feed4b: Status 404 returned error can't find the container with id 968f13aaebbb1284ce10543f4398d2637ce159677e5ebfd03a8ab16c57feed4b Jan 20 17:54:35 crc kubenswrapper[4558]: I0120 17:54:35.405772 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-db-sync-4h4wr" event={"ID":"3da90c4c-591e-4d99-84ed-3e720b2b853a","Type":"ContainerStarted","Data":"ac6033078eae9001c5e05c030c44ecfad51e5afe870494274353a4d660aecfbd"} Jan 20 17:54:35 crc kubenswrapper[4558]: I0120 17:54:35.406279 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-db-sync-4h4wr" event={"ID":"3da90c4c-591e-4d99-84ed-3e720b2b853a","Type":"ContainerStarted","Data":"968f13aaebbb1284ce10543f4398d2637ce159677e5ebfd03a8ab16c57feed4b"} Jan 20 17:54:35 crc kubenswrapper[4558]: I0120 17:54:35.408753 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-5dc494d4bf-wrrjj" event={"ID":"20fbac90-6e69-4c04-8e92-0b191157f185","Type":"ContainerDied","Data":"f08f1e5fb6f16eae2ddfa53be261fac5dbb2be9bcf923f9f4f3313ef2f5b6098"} Jan 20 17:54:35 crc kubenswrapper[4558]: I0120 17:54:35.408700 4558 generic.go:334] "Generic (PLEG): container finished" podID="20fbac90-6e69-4c04-8e92-0b191157f185" containerID="f08f1e5fb6f16eae2ddfa53be261fac5dbb2be9bcf923f9f4f3313ef2f5b6098" exitCode=0 Jan 20 17:54:35 crc kubenswrapper[4558]: I0120 17:54:35.408856 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-5dc494d4bf-wrrjj" event={"ID":"20fbac90-6e69-4c04-8e92-0b191157f185","Type":"ContainerStarted","Data":"255d5113d91a699b2a0f958889f9b904277d1c4b3ec03be2a39726629473c38b"} Jan 20 17:54:35 crc kubenswrapper[4558]: I0120 17:54:35.426823 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/glance-db-sync-4h4wr" podStartSLOduration=2.426802478 podStartE2EDuration="2.426802478s" podCreationTimestamp="2026-01-20 17:54:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:54:35.423196627 +0000 UTC m=+4369.183534594" watchObservedRunningTime="2026-01-20 17:54:35.426802478 +0000 UTC m=+4369.187140446" Jan 20 17:54:36 crc kubenswrapper[4558]: I0120 17:54:36.421281 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-5dc494d4bf-wrrjj" event={"ID":"20fbac90-6e69-4c04-8e92-0b191157f185","Type":"ContainerStarted","Data":"7c8ffe31f73b84ce399c31f05147d9b9d3a1fbf8a93f4e5cb3cdd6110894c5ad"} Jan 20 17:54:36 crc kubenswrapper[4558]: I0120 17:54:36.440741 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-5dc494d4bf-wrrjj" podStartSLOduration=3.440715306 podStartE2EDuration="3.440715306s" podCreationTimestamp="2026-01-20 17:54:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:54:36.438474873 +0000 UTC m=+4370.198812841" watchObservedRunningTime="2026-01-20 17:54:36.440715306 +0000 UTC m=+4370.201053273" Jan 20 17:54:37 crc kubenswrapper[4558]: I0120 17:54:37.055373 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/root-account-create-update-l5vtd"] Jan 20 17:54:37 crc kubenswrapper[4558]: I0120 17:54:37.056588 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-l5vtd" Jan 20 17:54:37 crc kubenswrapper[4558]: I0120 17:54:37.059014 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"openstack-cell1-mariadb-root-db-secret" Jan 20 17:54:37 crc kubenswrapper[4558]: I0120 17:54:37.067964 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-l5vtd"] Jan 20 17:54:37 crc kubenswrapper[4558]: I0120 17:54:37.184394 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rzrll\" (UniqueName: \"kubernetes.io/projected/85e14934-0ea8-4bb8-9ff6-7f06f89f2ae8-kube-api-access-rzrll\") pod \"root-account-create-update-l5vtd\" (UID: \"85e14934-0ea8-4bb8-9ff6-7f06f89f2ae8\") " pod="openstack-kuttl-tests/root-account-create-update-l5vtd" Jan 20 17:54:37 crc kubenswrapper[4558]: I0120 17:54:37.184526 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/85e14934-0ea8-4bb8-9ff6-7f06f89f2ae8-operator-scripts\") pod \"root-account-create-update-l5vtd\" (UID: \"85e14934-0ea8-4bb8-9ff6-7f06f89f2ae8\") " pod="openstack-kuttl-tests/root-account-create-update-l5vtd" Jan 20 17:54:37 crc kubenswrapper[4558]: I0120 17:54:37.285909 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rzrll\" (UniqueName: \"kubernetes.io/projected/85e14934-0ea8-4bb8-9ff6-7f06f89f2ae8-kube-api-access-rzrll\") pod \"root-account-create-update-l5vtd\" (UID: \"85e14934-0ea8-4bb8-9ff6-7f06f89f2ae8\") " pod="openstack-kuttl-tests/root-account-create-update-l5vtd" Jan 20 17:54:37 crc kubenswrapper[4558]: I0120 17:54:37.286024 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/85e14934-0ea8-4bb8-9ff6-7f06f89f2ae8-operator-scripts\") pod \"root-account-create-update-l5vtd\" (UID: \"85e14934-0ea8-4bb8-9ff6-7f06f89f2ae8\") " pod="openstack-kuttl-tests/root-account-create-update-l5vtd" Jan 20 17:54:37 crc kubenswrapper[4558]: I0120 17:54:37.286678 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/85e14934-0ea8-4bb8-9ff6-7f06f89f2ae8-operator-scripts\") pod \"root-account-create-update-l5vtd\" (UID: \"85e14934-0ea8-4bb8-9ff6-7f06f89f2ae8\") " pod="openstack-kuttl-tests/root-account-create-update-l5vtd" Jan 20 17:54:37 crc kubenswrapper[4558]: I0120 17:54:37.304483 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rzrll\" (UniqueName: \"kubernetes.io/projected/85e14934-0ea8-4bb8-9ff6-7f06f89f2ae8-kube-api-access-rzrll\") pod \"root-account-create-update-l5vtd\" (UID: \"85e14934-0ea8-4bb8-9ff6-7f06f89f2ae8\") " pod="openstack-kuttl-tests/root-account-create-update-l5vtd" Jan 20 17:54:37 crc kubenswrapper[4558]: I0120 17:54:37.373203 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-l5vtd" Jan 20 17:54:37 crc kubenswrapper[4558]: I0120 17:54:37.429375 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-5dc494d4bf-wrrjj" Jan 20 17:54:37 crc kubenswrapper[4558]: I0120 17:54:37.736556 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:54:37 crc kubenswrapper[4558]: I0120 17:54:37.775706 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-l5vtd"] Jan 20 17:54:38 crc kubenswrapper[4558]: I0120 17:54:38.442806 4558 generic.go:334] "Generic (PLEG): container finished" podID="85e14934-0ea8-4bb8-9ff6-7f06f89f2ae8" containerID="6951040f5e0eb5e16deb6671f1539f1de80f180d1d0d5cf02b5584ddc8280caf" exitCode=0 Jan 20 17:54:38 crc kubenswrapper[4558]: I0120 17:54:38.442907 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/root-account-create-update-l5vtd" event={"ID":"85e14934-0ea8-4bb8-9ff6-7f06f89f2ae8","Type":"ContainerDied","Data":"6951040f5e0eb5e16deb6671f1539f1de80f180d1d0d5cf02b5584ddc8280caf"} Jan 20 17:54:38 crc kubenswrapper[4558]: I0120 17:54:38.443331 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/root-account-create-update-l5vtd" event={"ID":"85e14934-0ea8-4bb8-9ff6-7f06f89f2ae8","Type":"ContainerStarted","Data":"f735f9a3b040366ee6ddc324a94572d2833c42deccad094c03a220f7c17932df"} Jan 20 17:54:39 crc kubenswrapper[4558]: I0120 17:54:39.454873 4558 generic.go:334] "Generic (PLEG): container finished" podID="948fb15d-07f1-4b25-b8d5-7d582024ef28" containerID="d5c6ace4ceeb24008b1063ad1082708520feefaaabe670144960a772ad86c13a" exitCode=0 Jan 20 17:54:39 crc kubenswrapper[4558]: I0120 17:54:39.454972 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" event={"ID":"948fb15d-07f1-4b25-b8d5-7d582024ef28","Type":"ContainerDied","Data":"d5c6ace4ceeb24008b1063ad1082708520feefaaabe670144960a772ad86c13a"} Jan 20 17:54:39 crc kubenswrapper[4558]: I0120 17:54:39.457885 4558 generic.go:334] "Generic (PLEG): container finished" podID="3da90c4c-591e-4d99-84ed-3e720b2b853a" containerID="ac6033078eae9001c5e05c030c44ecfad51e5afe870494274353a4d660aecfbd" exitCode=0 Jan 20 17:54:39 crc kubenswrapper[4558]: I0120 17:54:39.457957 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-db-sync-4h4wr" event={"ID":"3da90c4c-591e-4d99-84ed-3e720b2b853a","Type":"ContainerDied","Data":"ac6033078eae9001c5e05c030c44ecfad51e5afe870494274353a4d660aecfbd"} Jan 20 17:54:39 crc kubenswrapper[4558]: I0120 17:54:39.460242 4558 generic.go:334] "Generic (PLEG): container finished" podID="d512bec4-7ed0-43bb-b8fe-0f235f7698e5" containerID="415bf18726a76e61bd79b39ff76bbb5fe83ed4680a0f55bfbff9af2eed5f49a4" exitCode=0 Jan 20 17:54:39 crc kubenswrapper[4558]: I0120 17:54:39.460342 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-server-0" event={"ID":"d512bec4-7ed0-43bb-b8fe-0f235f7698e5","Type":"ContainerDied","Data":"415bf18726a76e61bd79b39ff76bbb5fe83ed4680a0f55bfbff9af2eed5f49a4"} Jan 20 17:54:39 crc kubenswrapper[4558]: I0120 17:54:39.782530 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-l5vtd" Jan 20 17:54:39 crc kubenswrapper[4558]: I0120 17:54:39.947007 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rzrll\" (UniqueName: \"kubernetes.io/projected/85e14934-0ea8-4bb8-9ff6-7f06f89f2ae8-kube-api-access-rzrll\") pod \"85e14934-0ea8-4bb8-9ff6-7f06f89f2ae8\" (UID: \"85e14934-0ea8-4bb8-9ff6-7f06f89f2ae8\") " Jan 20 17:54:39 crc kubenswrapper[4558]: I0120 17:54:39.947295 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/85e14934-0ea8-4bb8-9ff6-7f06f89f2ae8-operator-scripts\") pod \"85e14934-0ea8-4bb8-9ff6-7f06f89f2ae8\" (UID: \"85e14934-0ea8-4bb8-9ff6-7f06f89f2ae8\") " Jan 20 17:54:39 crc kubenswrapper[4558]: I0120 17:54:39.947768 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/85e14934-0ea8-4bb8-9ff6-7f06f89f2ae8-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "85e14934-0ea8-4bb8-9ff6-7f06f89f2ae8" (UID: "85e14934-0ea8-4bb8-9ff6-7f06f89f2ae8"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:54:39 crc kubenswrapper[4558]: I0120 17:54:39.948343 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/85e14934-0ea8-4bb8-9ff6-7f06f89f2ae8-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:54:39 crc kubenswrapper[4558]: I0120 17:54:39.953469 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/85e14934-0ea8-4bb8-9ff6-7f06f89f2ae8-kube-api-access-rzrll" (OuterVolumeSpecName: "kube-api-access-rzrll") pod "85e14934-0ea8-4bb8-9ff6-7f06f89f2ae8" (UID: "85e14934-0ea8-4bb8-9ff6-7f06f89f2ae8"). InnerVolumeSpecName "kube-api-access-rzrll". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:54:40 crc kubenswrapper[4558]: I0120 17:54:40.050154 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rzrll\" (UniqueName: \"kubernetes.io/projected/85e14934-0ea8-4bb8-9ff6-7f06f89f2ae8-kube-api-access-rzrll\") on node \"crc\" DevicePath \"\"" Jan 20 17:54:40 crc kubenswrapper[4558]: I0120 17:54:40.472533 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-server-0" event={"ID":"d512bec4-7ed0-43bb-b8fe-0f235f7698e5","Type":"ContainerStarted","Data":"9f92bbadf6b5978b2ec804e4ad254a0133f5ddc1c7a0c908a984ae2727969174"} Jan 20 17:54:40 crc kubenswrapper[4558]: I0120 17:54:40.473863 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:54:40 crc kubenswrapper[4558]: I0120 17:54:40.474715 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/root-account-create-update-l5vtd" event={"ID":"85e14934-0ea8-4bb8-9ff6-7f06f89f2ae8","Type":"ContainerDied","Data":"f735f9a3b040366ee6ddc324a94572d2833c42deccad094c03a220f7c17932df"} Jan 20 17:54:40 crc kubenswrapper[4558]: I0120 17:54:40.474870 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f735f9a3b040366ee6ddc324a94572d2833c42deccad094c03a220f7c17932df" Jan 20 17:54:40 crc kubenswrapper[4558]: I0120 17:54:40.474754 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-l5vtd" Jan 20 17:54:40 crc kubenswrapper[4558]: I0120 17:54:40.477064 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" event={"ID":"948fb15d-07f1-4b25-b8d5-7d582024ef28","Type":"ContainerStarted","Data":"04ae5aafbf49cda71c8764575c7291ff0e2e0ca0bcd2151b6b2cce95d4ba6b14"} Jan 20 17:54:40 crc kubenswrapper[4558]: I0120 17:54:40.477380 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:54:40 crc kubenswrapper[4558]: I0120 17:54:40.511798 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/rabbitmq-server-0" podStartSLOduration=37.511777145 podStartE2EDuration="37.511777145s" podCreationTimestamp="2026-01-20 17:54:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:54:40.501893511 +0000 UTC m=+4374.262231478" watchObservedRunningTime="2026-01-20 17:54:40.511777145 +0000 UTC m=+4374.272115113" Jan 20 17:54:40 crc kubenswrapper[4558]: I0120 17:54:40.523925 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" podStartSLOduration=37.523908859 podStartE2EDuration="37.523908859s" podCreationTimestamp="2026-01-20 17:54:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:54:40.52201116 +0000 UTC m=+4374.282349127" watchObservedRunningTime="2026-01-20 17:54:40.523908859 +0000 UTC m=+4374.284246825" Jan 20 17:54:40 crc kubenswrapper[4558]: I0120 17:54:40.750010 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-db-sync-4h4wr" Jan 20 17:54:40 crc kubenswrapper[4558]: I0120 17:54:40.869587 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3da90c4c-591e-4d99-84ed-3e720b2b853a-combined-ca-bundle\") pod \"3da90c4c-591e-4d99-84ed-3e720b2b853a\" (UID: \"3da90c4c-591e-4d99-84ed-3e720b2b853a\") " Jan 20 17:54:40 crc kubenswrapper[4558]: I0120 17:54:40.869766 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/3da90c4c-591e-4d99-84ed-3e720b2b853a-db-sync-config-data\") pod \"3da90c4c-591e-4d99-84ed-3e720b2b853a\" (UID: \"3da90c4c-591e-4d99-84ed-3e720b2b853a\") " Jan 20 17:54:40 crc kubenswrapper[4558]: I0120 17:54:40.869821 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3da90c4c-591e-4d99-84ed-3e720b2b853a-config-data\") pod \"3da90c4c-591e-4d99-84ed-3e720b2b853a\" (UID: \"3da90c4c-591e-4d99-84ed-3e720b2b853a\") " Jan 20 17:54:40 crc kubenswrapper[4558]: I0120 17:54:40.869874 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7l7gq\" (UniqueName: \"kubernetes.io/projected/3da90c4c-591e-4d99-84ed-3e720b2b853a-kube-api-access-7l7gq\") pod \"3da90c4c-591e-4d99-84ed-3e720b2b853a\" (UID: \"3da90c4c-591e-4d99-84ed-3e720b2b853a\") " Jan 20 17:54:40 crc kubenswrapper[4558]: I0120 17:54:40.875454 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3da90c4c-591e-4d99-84ed-3e720b2b853a-kube-api-access-7l7gq" (OuterVolumeSpecName: "kube-api-access-7l7gq") pod "3da90c4c-591e-4d99-84ed-3e720b2b853a" (UID: "3da90c4c-591e-4d99-84ed-3e720b2b853a"). InnerVolumeSpecName "kube-api-access-7l7gq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:54:40 crc kubenswrapper[4558]: I0120 17:54:40.875735 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3da90c4c-591e-4d99-84ed-3e720b2b853a-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "3da90c4c-591e-4d99-84ed-3e720b2b853a" (UID: "3da90c4c-591e-4d99-84ed-3e720b2b853a"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:54:40 crc kubenswrapper[4558]: I0120 17:54:40.893209 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3da90c4c-591e-4d99-84ed-3e720b2b853a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3da90c4c-591e-4d99-84ed-3e720b2b853a" (UID: "3da90c4c-591e-4d99-84ed-3e720b2b853a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:54:40 crc kubenswrapper[4558]: I0120 17:54:40.913964 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3da90c4c-591e-4d99-84ed-3e720b2b853a-config-data" (OuterVolumeSpecName: "config-data") pod "3da90c4c-591e-4d99-84ed-3e720b2b853a" (UID: "3da90c4c-591e-4d99-84ed-3e720b2b853a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:54:40 crc kubenswrapper[4558]: I0120 17:54:40.972454 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3da90c4c-591e-4d99-84ed-3e720b2b853a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:54:40 crc kubenswrapper[4558]: I0120 17:54:40.972588 4558 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/3da90c4c-591e-4d99-84ed-3e720b2b853a-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:54:40 crc kubenswrapper[4558]: I0120 17:54:40.972653 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3da90c4c-591e-4d99-84ed-3e720b2b853a-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:54:40 crc kubenswrapper[4558]: I0120 17:54:40.972754 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7l7gq\" (UniqueName: \"kubernetes.io/projected/3da90c4c-591e-4d99-84ed-3e720b2b853a-kube-api-access-7l7gq\") on node \"crc\" DevicePath \"\"" Jan 20 17:54:41 crc kubenswrapper[4558]: I0120 17:54:41.490512 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-db-sync-4h4wr" event={"ID":"3da90c4c-591e-4d99-84ed-3e720b2b853a","Type":"ContainerDied","Data":"968f13aaebbb1284ce10543f4398d2637ce159677e5ebfd03a8ab16c57feed4b"} Jan 20 17:54:41 crc kubenswrapper[4558]: I0120 17:54:41.490551 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-db-sync-4h4wr" Jan 20 17:54:41 crc kubenswrapper[4558]: I0120 17:54:41.490567 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="968f13aaebbb1284ce10543f4398d2637ce159677e5ebfd03a8ab16c57feed4b" Jan 20 17:54:42 crc kubenswrapper[4558]: I0120 17:54:42.566582 4558 scope.go:117] "RemoveContainer" containerID="4ea7fa61d8b21007a044345acec89fcebe56c2921cf0f76ff2002f44bdc88ede" Jan 20 17:54:42 crc kubenswrapper[4558]: E0120 17:54:42.566854 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 17:54:44 crc kubenswrapper[4558]: I0120 17:54:44.190358 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-5dc494d4bf-wrrjj" Jan 20 17:54:44 crc kubenswrapper[4558]: I0120 17:54:44.251784 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-zmctf"] Jan 20 17:54:44 crc kubenswrapper[4558]: I0120 17:54:44.252043 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-zmctf" podUID="c33a9cb9-2eec-4c2e-847d-9b06fcce5fef" containerName="dnsmasq-dns" containerID="cri-o://2fe565e8e76d528286b242f2370ba6b33966f2dac706f93bf52b30e596d353a1" gracePeriod=10 Jan 20 17:54:44 crc kubenswrapper[4558]: I0120 17:54:44.521953 4558 generic.go:334] "Generic (PLEG): container finished" podID="c33a9cb9-2eec-4c2e-847d-9b06fcce5fef" containerID="2fe565e8e76d528286b242f2370ba6b33966f2dac706f93bf52b30e596d353a1" exitCode=0 Jan 20 17:54:44 crc kubenswrapper[4558]: I0120 17:54:44.522008 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-zmctf" event={"ID":"c33a9cb9-2eec-4c2e-847d-9b06fcce5fef","Type":"ContainerDied","Data":"2fe565e8e76d528286b242f2370ba6b33966f2dac706f93bf52b30e596d353a1"} Jan 20 17:54:44 crc kubenswrapper[4558]: I0120 17:54:44.667753 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-zmctf" Jan 20 17:54:44 crc kubenswrapper[4558]: I0120 17:54:44.845629 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/c33a9cb9-2eec-4c2e-847d-9b06fcce5fef-dnsmasq-svc\") pod \"c33a9cb9-2eec-4c2e-847d-9b06fcce5fef\" (UID: \"c33a9cb9-2eec-4c2e-847d-9b06fcce5fef\") " Jan 20 17:54:44 crc kubenswrapper[4558]: I0120 17:54:44.845924 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c33a9cb9-2eec-4c2e-847d-9b06fcce5fef-config\") pod \"c33a9cb9-2eec-4c2e-847d-9b06fcce5fef\" (UID: \"c33a9cb9-2eec-4c2e-847d-9b06fcce5fef\") " Jan 20 17:54:44 crc kubenswrapper[4558]: I0120 17:54:44.846834 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9hp6\" (UniqueName: \"kubernetes.io/projected/c33a9cb9-2eec-4c2e-847d-9b06fcce5fef-kube-api-access-w9hp6\") pod \"c33a9cb9-2eec-4c2e-847d-9b06fcce5fef\" (UID: \"c33a9cb9-2eec-4c2e-847d-9b06fcce5fef\") " Jan 20 17:54:45 crc kubenswrapper[4558]: I0120 17:54:45.094305 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c33a9cb9-2eec-4c2e-847d-9b06fcce5fef-kube-api-access-w9hp6" (OuterVolumeSpecName: "kube-api-access-w9hp6") pod "c33a9cb9-2eec-4c2e-847d-9b06fcce5fef" (UID: "c33a9cb9-2eec-4c2e-847d-9b06fcce5fef"). InnerVolumeSpecName "kube-api-access-w9hp6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:54:45 crc kubenswrapper[4558]: I0120 17:54:45.125895 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c33a9cb9-2eec-4c2e-847d-9b06fcce5fef-dnsmasq-svc" (OuterVolumeSpecName: "dnsmasq-svc") pod "c33a9cb9-2eec-4c2e-847d-9b06fcce5fef" (UID: "c33a9cb9-2eec-4c2e-847d-9b06fcce5fef"). InnerVolumeSpecName "dnsmasq-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:54:45 crc kubenswrapper[4558]: I0120 17:54:45.130834 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c33a9cb9-2eec-4c2e-847d-9b06fcce5fef-config" (OuterVolumeSpecName: "config") pod "c33a9cb9-2eec-4c2e-847d-9b06fcce5fef" (UID: "c33a9cb9-2eec-4c2e-847d-9b06fcce5fef"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:54:45 crc kubenswrapper[4558]: I0120 17:54:45.153689 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9hp6\" (UniqueName: \"kubernetes.io/projected/c33a9cb9-2eec-4c2e-847d-9b06fcce5fef-kube-api-access-w9hp6\") on node \"crc\" DevicePath \"\"" Jan 20 17:54:45 crc kubenswrapper[4558]: I0120 17:54:45.153819 4558 reconciler_common.go:293] "Volume detached for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/c33a9cb9-2eec-4c2e-847d-9b06fcce5fef-dnsmasq-svc\") on node \"crc\" DevicePath \"\"" Jan 20 17:54:45 crc kubenswrapper[4558]: I0120 17:54:45.153896 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c33a9cb9-2eec-4c2e-847d-9b06fcce5fef-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:54:45 crc kubenswrapper[4558]: I0120 17:54:45.533748 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-zmctf" event={"ID":"c33a9cb9-2eec-4c2e-847d-9b06fcce5fef","Type":"ContainerDied","Data":"6bbf17d0b514204638f96f7976faacb7a5932888ba4c9555efd9243c159f4174"} Jan 20 17:54:45 crc kubenswrapper[4558]: I0120 17:54:45.534210 4558 scope.go:117] "RemoveContainer" containerID="2fe565e8e76d528286b242f2370ba6b33966f2dac706f93bf52b30e596d353a1" Jan 20 17:54:45 crc kubenswrapper[4558]: I0120 17:54:45.533849 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-zmctf" Jan 20 17:54:45 crc kubenswrapper[4558]: I0120 17:54:45.561971 4558 scope.go:117] "RemoveContainer" containerID="c5ebce8532452e1fc28d5c965af2fb1ecd6916ab9ffef4442cb3e2b72c16f2e5" Jan 20 17:54:45 crc kubenswrapper[4558]: I0120 17:54:45.571978 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-zmctf"] Jan 20 17:54:45 crc kubenswrapper[4558]: I0120 17:54:45.581405 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-zmctf"] Jan 20 17:54:46 crc kubenswrapper[4558]: I0120 17:54:46.577012 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c33a9cb9-2eec-4c2e-847d-9b06fcce5fef" path="/var/lib/kubelet/pods/c33a9cb9-2eec-4c2e-847d-9b06fcce5fef/volumes" Jan 20 17:54:55 crc kubenswrapper[4558]: I0120 17:54:55.011370 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:54:55 crc kubenswrapper[4558]: I0120 17:54:55.289337 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:54:55 crc kubenswrapper[4558]: I0120 17:54:55.343554 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/cinder-db-create-rzf9r"] Jan 20 17:54:55 crc kubenswrapper[4558]: E0120 17:54:55.344700 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c33a9cb9-2eec-4c2e-847d-9b06fcce5fef" containerName="init" Jan 20 17:54:55 crc kubenswrapper[4558]: I0120 17:54:55.344790 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="c33a9cb9-2eec-4c2e-847d-9b06fcce5fef" containerName="init" Jan 20 17:54:55 crc kubenswrapper[4558]: E0120 17:54:55.344877 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3da90c4c-591e-4d99-84ed-3e720b2b853a" containerName="glance-db-sync" Jan 20 17:54:55 crc kubenswrapper[4558]: I0120 17:54:55.344941 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="3da90c4c-591e-4d99-84ed-3e720b2b853a" containerName="glance-db-sync" Jan 20 17:54:55 crc kubenswrapper[4558]: E0120 17:54:55.345023 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c33a9cb9-2eec-4c2e-847d-9b06fcce5fef" containerName="dnsmasq-dns" Jan 20 17:54:55 crc kubenswrapper[4558]: I0120 17:54:55.345098 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="c33a9cb9-2eec-4c2e-847d-9b06fcce5fef" containerName="dnsmasq-dns" Jan 20 17:54:55 crc kubenswrapper[4558]: E0120 17:54:55.345202 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="85e14934-0ea8-4bb8-9ff6-7f06f89f2ae8" containerName="mariadb-account-create-update" Jan 20 17:54:55 crc kubenswrapper[4558]: I0120 17:54:55.345265 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="85e14934-0ea8-4bb8-9ff6-7f06f89f2ae8" containerName="mariadb-account-create-update" Jan 20 17:54:55 crc kubenswrapper[4558]: I0120 17:54:55.345521 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="85e14934-0ea8-4bb8-9ff6-7f06f89f2ae8" containerName="mariadb-account-create-update" Jan 20 17:54:55 crc kubenswrapper[4558]: I0120 17:54:55.345603 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="3da90c4c-591e-4d99-84ed-3e720b2b853a" containerName="glance-db-sync" Jan 20 17:54:55 crc kubenswrapper[4558]: I0120 17:54:55.345675 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="c33a9cb9-2eec-4c2e-847d-9b06fcce5fef" containerName="dnsmasq-dns" Jan 20 17:54:55 crc kubenswrapper[4558]: I0120 17:54:55.346558 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-db-create-rzf9r" Jan 20 17:54:55 crc kubenswrapper[4558]: I0120 17:54:55.375631 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-db-create-rzf9r"] Jan 20 17:54:55 crc kubenswrapper[4558]: I0120 17:54:55.384268 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/barbican-e4c5-account-create-update-hnffc"] Jan 20 17:54:55 crc kubenswrapper[4558]: I0120 17:54:55.385554 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-e4c5-account-create-update-hnffc" Jan 20 17:54:55 crc kubenswrapper[4558]: I0120 17:54:55.387395 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"barbican-db-secret" Jan 20 17:54:55 crc kubenswrapper[4558]: I0120 17:54:55.391503 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-e4c5-account-create-update-hnffc"] Jan 20 17:54:55 crc kubenswrapper[4558]: I0120 17:54:55.439057 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/neutron-db-create-966g7"] Jan 20 17:54:55 crc kubenswrapper[4558]: I0120 17:54:55.440078 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-db-create-966g7" Jan 20 17:54:55 crc kubenswrapper[4558]: I0120 17:54:55.450331 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/neutron-c1b8-account-create-update-5scts"] Jan 20 17:54:55 crc kubenswrapper[4558]: I0120 17:54:55.451665 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-c1b8-account-create-update-5scts" Jan 20 17:54:55 crc kubenswrapper[4558]: I0120 17:54:55.453978 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"neutron-db-secret" Jan 20 17:54:55 crc kubenswrapper[4558]: I0120 17:54:55.464646 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-db-create-966g7"] Jan 20 17:54:55 crc kubenswrapper[4558]: I0120 17:54:55.489405 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-c1b8-account-create-update-5scts"] Jan 20 17:54:55 crc kubenswrapper[4558]: I0120 17:54:55.541232 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/barbican-db-create-qxgzw"] Jan 20 17:54:55 crc kubenswrapper[4558]: I0120 17:54:55.542512 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-db-create-qxgzw" Jan 20 17:54:55 crc kubenswrapper[4558]: I0120 17:54:55.550643 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-db-create-qxgzw"] Jan 20 17:54:55 crc kubenswrapper[4558]: I0120 17:54:55.552839 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r9fls\" (UniqueName: \"kubernetes.io/projected/d8a20e0f-af9e-422e-b8a6-fd37ffc19335-kube-api-access-r9fls\") pod \"neutron-db-create-966g7\" (UID: \"d8a20e0f-af9e-422e-b8a6-fd37ffc19335\") " pod="openstack-kuttl-tests/neutron-db-create-966g7" Jan 20 17:54:55 crc kubenswrapper[4558]: I0120 17:54:55.552919 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/962b2c8c-b0ef-4429-8aba-0fb24998839e-operator-scripts\") pod \"neutron-c1b8-account-create-update-5scts\" (UID: \"962b2c8c-b0ef-4429-8aba-0fb24998839e\") " pod="openstack-kuttl-tests/neutron-c1b8-account-create-update-5scts" Jan 20 17:54:55 crc kubenswrapper[4558]: I0120 17:54:55.553102 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d8a20e0f-af9e-422e-b8a6-fd37ffc19335-operator-scripts\") pod \"neutron-db-create-966g7\" (UID: \"d8a20e0f-af9e-422e-b8a6-fd37ffc19335\") " pod="openstack-kuttl-tests/neutron-db-create-966g7" Jan 20 17:54:55 crc kubenswrapper[4558]: I0120 17:54:55.553145 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zdtvr\" (UniqueName: \"kubernetes.io/projected/19001f5f-e2af-482f-b182-0cf5d3297979-kube-api-access-zdtvr\") pod \"cinder-db-create-rzf9r\" (UID: \"19001f5f-e2af-482f-b182-0cf5d3297979\") " pod="openstack-kuttl-tests/cinder-db-create-rzf9r" Jan 20 17:54:55 crc kubenswrapper[4558]: I0120 17:54:55.553284 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bqbph\" (UniqueName: \"kubernetes.io/projected/81fc72e8-7c47-45cf-8cbd-b77b7b0e9ed5-kube-api-access-bqbph\") pod \"barbican-e4c5-account-create-update-hnffc\" (UID: \"81fc72e8-7c47-45cf-8cbd-b77b7b0e9ed5\") " pod="openstack-kuttl-tests/barbican-e4c5-account-create-update-hnffc" Jan 20 17:54:55 crc kubenswrapper[4558]: I0120 17:54:55.553339 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/81fc72e8-7c47-45cf-8cbd-b77b7b0e9ed5-operator-scripts\") pod \"barbican-e4c5-account-create-update-hnffc\" (UID: \"81fc72e8-7c47-45cf-8cbd-b77b7b0e9ed5\") " pod="openstack-kuttl-tests/barbican-e4c5-account-create-update-hnffc" Jan 20 17:54:55 crc kubenswrapper[4558]: I0120 17:54:55.553361 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/19001f5f-e2af-482f-b182-0cf5d3297979-operator-scripts\") pod \"cinder-db-create-rzf9r\" (UID: \"19001f5f-e2af-482f-b182-0cf5d3297979\") " pod="openstack-kuttl-tests/cinder-db-create-rzf9r" Jan 20 17:54:55 crc kubenswrapper[4558]: I0120 17:54:55.553452 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-55dk9\" (UniqueName: \"kubernetes.io/projected/962b2c8c-b0ef-4429-8aba-0fb24998839e-kube-api-access-55dk9\") pod \"neutron-c1b8-account-create-update-5scts\" (UID: \"962b2c8c-b0ef-4429-8aba-0fb24998839e\") " pod="openstack-kuttl-tests/neutron-c1b8-account-create-update-5scts" Jan 20 17:54:55 crc kubenswrapper[4558]: I0120 17:54:55.560574 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/cinder-fde5-account-create-update-dmprz"] Jan 20 17:54:55 crc kubenswrapper[4558]: I0120 17:54:55.561699 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-fde5-account-create-update-dmprz" Jan 20 17:54:55 crc kubenswrapper[4558]: I0120 17:54:55.563564 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cinder-db-secret" Jan 20 17:54:55 crc kubenswrapper[4558]: I0120 17:54:55.566093 4558 scope.go:117] "RemoveContainer" containerID="4ea7fa61d8b21007a044345acec89fcebe56c2921cf0f76ff2002f44bdc88ede" Jan 20 17:54:55 crc kubenswrapper[4558]: E0120 17:54:55.566309 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 17:54:55 crc kubenswrapper[4558]: I0120 17:54:55.571673 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-fde5-account-create-update-dmprz"] Jan 20 17:54:55 crc kubenswrapper[4558]: I0120 17:54:55.601847 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/keystone-db-sync-ktt4t"] Jan 20 17:54:55 crc kubenswrapper[4558]: I0120 17:54:55.602938 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-db-sync-ktt4t" Jan 20 17:54:55 crc kubenswrapper[4558]: I0120 17:54:55.605130 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-keystone-dockercfg-bqxt2" Jan 20 17:54:55 crc kubenswrapper[4558]: I0120 17:54:55.607489 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-scripts" Jan 20 17:54:55 crc kubenswrapper[4558]: I0120 17:54:55.607515 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-config-data" Jan 20 17:54:55 crc kubenswrapper[4558]: I0120 17:54:55.607805 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone" Jan 20 17:54:55 crc kubenswrapper[4558]: I0120 17:54:55.621921 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-db-sync-ktt4t"] Jan 20 17:54:55 crc kubenswrapper[4558]: I0120 17:54:55.654795 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8bd2r\" (UniqueName: \"kubernetes.io/projected/b467edba-4957-4d06-b48f-c88a2a580e82-kube-api-access-8bd2r\") pod \"barbican-db-create-qxgzw\" (UID: \"b467edba-4957-4d06-b48f-c88a2a580e82\") " pod="openstack-kuttl-tests/barbican-db-create-qxgzw" Jan 20 17:54:55 crc kubenswrapper[4558]: I0120 17:54:55.655194 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/962b2c8c-b0ef-4429-8aba-0fb24998839e-operator-scripts\") pod \"neutron-c1b8-account-create-update-5scts\" (UID: \"962b2c8c-b0ef-4429-8aba-0fb24998839e\") " pod="openstack-kuttl-tests/neutron-c1b8-account-create-update-5scts" Jan 20 17:54:55 crc kubenswrapper[4558]: I0120 17:54:55.655241 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zdtvr\" (UniqueName: \"kubernetes.io/projected/19001f5f-e2af-482f-b182-0cf5d3297979-kube-api-access-zdtvr\") pod \"cinder-db-create-rzf9r\" (UID: \"19001f5f-e2af-482f-b182-0cf5d3297979\") " pod="openstack-kuttl-tests/cinder-db-create-rzf9r" Jan 20 17:54:55 crc kubenswrapper[4558]: I0120 17:54:55.655265 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d8a20e0f-af9e-422e-b8a6-fd37ffc19335-operator-scripts\") pod \"neutron-db-create-966g7\" (UID: \"d8a20e0f-af9e-422e-b8a6-fd37ffc19335\") " pod="openstack-kuttl-tests/neutron-db-create-966g7" Jan 20 17:54:55 crc kubenswrapper[4558]: I0120 17:54:55.655299 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bqbph\" (UniqueName: \"kubernetes.io/projected/81fc72e8-7c47-45cf-8cbd-b77b7b0e9ed5-kube-api-access-bqbph\") pod \"barbican-e4c5-account-create-update-hnffc\" (UID: \"81fc72e8-7c47-45cf-8cbd-b77b7b0e9ed5\") " pod="openstack-kuttl-tests/barbican-e4c5-account-create-update-hnffc" Jan 20 17:54:55 crc kubenswrapper[4558]: I0120 17:54:55.655325 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/81fc72e8-7c47-45cf-8cbd-b77b7b0e9ed5-operator-scripts\") pod \"barbican-e4c5-account-create-update-hnffc\" (UID: \"81fc72e8-7c47-45cf-8cbd-b77b7b0e9ed5\") " pod="openstack-kuttl-tests/barbican-e4c5-account-create-update-hnffc" Jan 20 17:54:55 crc kubenswrapper[4558]: I0120 17:54:55.655341 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/19001f5f-e2af-482f-b182-0cf5d3297979-operator-scripts\") pod \"cinder-db-create-rzf9r\" (UID: \"19001f5f-e2af-482f-b182-0cf5d3297979\") " pod="openstack-kuttl-tests/cinder-db-create-rzf9r" Jan 20 17:54:55 crc kubenswrapper[4558]: I0120 17:54:55.655370 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-55dk9\" (UniqueName: \"kubernetes.io/projected/962b2c8c-b0ef-4429-8aba-0fb24998839e-kube-api-access-55dk9\") pod \"neutron-c1b8-account-create-update-5scts\" (UID: \"962b2c8c-b0ef-4429-8aba-0fb24998839e\") " pod="openstack-kuttl-tests/neutron-c1b8-account-create-update-5scts" Jan 20 17:54:55 crc kubenswrapper[4558]: I0120 17:54:55.655391 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b467edba-4957-4d06-b48f-c88a2a580e82-operator-scripts\") pod \"barbican-db-create-qxgzw\" (UID: \"b467edba-4957-4d06-b48f-c88a2a580e82\") " pod="openstack-kuttl-tests/barbican-db-create-qxgzw" Jan 20 17:54:55 crc kubenswrapper[4558]: I0120 17:54:55.655431 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r9fls\" (UniqueName: \"kubernetes.io/projected/d8a20e0f-af9e-422e-b8a6-fd37ffc19335-kube-api-access-r9fls\") pod \"neutron-db-create-966g7\" (UID: \"d8a20e0f-af9e-422e-b8a6-fd37ffc19335\") " pod="openstack-kuttl-tests/neutron-db-create-966g7" Jan 20 17:54:55 crc kubenswrapper[4558]: I0120 17:54:55.655977 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/962b2c8c-b0ef-4429-8aba-0fb24998839e-operator-scripts\") pod \"neutron-c1b8-account-create-update-5scts\" (UID: \"962b2c8c-b0ef-4429-8aba-0fb24998839e\") " pod="openstack-kuttl-tests/neutron-c1b8-account-create-update-5scts" Jan 20 17:54:55 crc kubenswrapper[4558]: I0120 17:54:55.656676 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/19001f5f-e2af-482f-b182-0cf5d3297979-operator-scripts\") pod \"cinder-db-create-rzf9r\" (UID: \"19001f5f-e2af-482f-b182-0cf5d3297979\") " pod="openstack-kuttl-tests/cinder-db-create-rzf9r" Jan 20 17:54:55 crc kubenswrapper[4558]: I0120 17:54:55.657192 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/81fc72e8-7c47-45cf-8cbd-b77b7b0e9ed5-operator-scripts\") pod \"barbican-e4c5-account-create-update-hnffc\" (UID: \"81fc72e8-7c47-45cf-8cbd-b77b7b0e9ed5\") " pod="openstack-kuttl-tests/barbican-e4c5-account-create-update-hnffc" Jan 20 17:54:55 crc kubenswrapper[4558]: I0120 17:54:55.657238 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d8a20e0f-af9e-422e-b8a6-fd37ffc19335-operator-scripts\") pod \"neutron-db-create-966g7\" (UID: \"d8a20e0f-af9e-422e-b8a6-fd37ffc19335\") " pod="openstack-kuttl-tests/neutron-db-create-966g7" Jan 20 17:54:55 crc kubenswrapper[4558]: I0120 17:54:55.757982 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nj7r4\" (UniqueName: \"kubernetes.io/projected/8a9fc705-3a10-4e6d-8e0a-c1c8dfcb8bc5-kube-api-access-nj7r4\") pod \"keystone-db-sync-ktt4t\" (UID: \"8a9fc705-3a10-4e6d-8e0a-c1c8dfcb8bc5\") " pod="openstack-kuttl-tests/keystone-db-sync-ktt4t" Jan 20 17:54:55 crc kubenswrapper[4558]: I0120 17:54:55.758196 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b467edba-4957-4d06-b48f-c88a2a580e82-operator-scripts\") pod \"barbican-db-create-qxgzw\" (UID: \"b467edba-4957-4d06-b48f-c88a2a580e82\") " pod="openstack-kuttl-tests/barbican-db-create-qxgzw" Jan 20 17:54:55 crc kubenswrapper[4558]: I0120 17:54:55.758243 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8a9fc705-3a10-4e6d-8e0a-c1c8dfcb8bc5-config-data\") pod \"keystone-db-sync-ktt4t\" (UID: \"8a9fc705-3a10-4e6d-8e0a-c1c8dfcb8bc5\") " pod="openstack-kuttl-tests/keystone-db-sync-ktt4t" Jan 20 17:54:55 crc kubenswrapper[4558]: I0120 17:54:55.758274 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zwppc\" (UniqueName: \"kubernetes.io/projected/39abd72a-05b8-4df0-bd89-3c8f6f566b38-kube-api-access-zwppc\") pod \"cinder-fde5-account-create-update-dmprz\" (UID: \"39abd72a-05b8-4df0-bd89-3c8f6f566b38\") " pod="openstack-kuttl-tests/cinder-fde5-account-create-update-dmprz" Jan 20 17:54:55 crc kubenswrapper[4558]: I0120 17:54:55.758461 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8bd2r\" (UniqueName: \"kubernetes.io/projected/b467edba-4957-4d06-b48f-c88a2a580e82-kube-api-access-8bd2r\") pod \"barbican-db-create-qxgzw\" (UID: \"b467edba-4957-4d06-b48f-c88a2a580e82\") " pod="openstack-kuttl-tests/barbican-db-create-qxgzw" Jan 20 17:54:55 crc kubenswrapper[4558]: I0120 17:54:55.759177 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b467edba-4957-4d06-b48f-c88a2a580e82-operator-scripts\") pod \"barbican-db-create-qxgzw\" (UID: \"b467edba-4957-4d06-b48f-c88a2a580e82\") " pod="openstack-kuttl-tests/barbican-db-create-qxgzw" Jan 20 17:54:55 crc kubenswrapper[4558]: I0120 17:54:55.759643 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/39abd72a-05b8-4df0-bd89-3c8f6f566b38-operator-scripts\") pod \"cinder-fde5-account-create-update-dmprz\" (UID: \"39abd72a-05b8-4df0-bd89-3c8f6f566b38\") " pod="openstack-kuttl-tests/cinder-fde5-account-create-update-dmprz" Jan 20 17:54:55 crc kubenswrapper[4558]: I0120 17:54:55.759785 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8a9fc705-3a10-4e6d-8e0a-c1c8dfcb8bc5-combined-ca-bundle\") pod \"keystone-db-sync-ktt4t\" (UID: \"8a9fc705-3a10-4e6d-8e0a-c1c8dfcb8bc5\") " pod="openstack-kuttl-tests/keystone-db-sync-ktt4t" Jan 20 17:54:55 crc kubenswrapper[4558]: I0120 17:54:55.861991 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/39abd72a-05b8-4df0-bd89-3c8f6f566b38-operator-scripts\") pod \"cinder-fde5-account-create-update-dmprz\" (UID: \"39abd72a-05b8-4df0-bd89-3c8f6f566b38\") " pod="openstack-kuttl-tests/cinder-fde5-account-create-update-dmprz" Jan 20 17:54:55 crc kubenswrapper[4558]: I0120 17:54:55.862105 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8a9fc705-3a10-4e6d-8e0a-c1c8dfcb8bc5-combined-ca-bundle\") pod \"keystone-db-sync-ktt4t\" (UID: \"8a9fc705-3a10-4e6d-8e0a-c1c8dfcb8bc5\") " pod="openstack-kuttl-tests/keystone-db-sync-ktt4t" Jan 20 17:54:55 crc kubenswrapper[4558]: I0120 17:54:55.862243 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nj7r4\" (UniqueName: \"kubernetes.io/projected/8a9fc705-3a10-4e6d-8e0a-c1c8dfcb8bc5-kube-api-access-nj7r4\") pod \"keystone-db-sync-ktt4t\" (UID: \"8a9fc705-3a10-4e6d-8e0a-c1c8dfcb8bc5\") " pod="openstack-kuttl-tests/keystone-db-sync-ktt4t" Jan 20 17:54:55 crc kubenswrapper[4558]: I0120 17:54:55.862321 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8a9fc705-3a10-4e6d-8e0a-c1c8dfcb8bc5-config-data\") pod \"keystone-db-sync-ktt4t\" (UID: \"8a9fc705-3a10-4e6d-8e0a-c1c8dfcb8bc5\") " pod="openstack-kuttl-tests/keystone-db-sync-ktt4t" Jan 20 17:54:55 crc kubenswrapper[4558]: I0120 17:54:55.862356 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zwppc\" (UniqueName: \"kubernetes.io/projected/39abd72a-05b8-4df0-bd89-3c8f6f566b38-kube-api-access-zwppc\") pod \"cinder-fde5-account-create-update-dmprz\" (UID: \"39abd72a-05b8-4df0-bd89-3c8f6f566b38\") " pod="openstack-kuttl-tests/cinder-fde5-account-create-update-dmprz" Jan 20 17:54:55 crc kubenswrapper[4558]: I0120 17:54:55.863313 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/39abd72a-05b8-4df0-bd89-3c8f6f566b38-operator-scripts\") pod \"cinder-fde5-account-create-update-dmprz\" (UID: \"39abd72a-05b8-4df0-bd89-3c8f6f566b38\") " pod="openstack-kuttl-tests/cinder-fde5-account-create-update-dmprz" Jan 20 17:54:55 crc kubenswrapper[4558]: I0120 17:54:55.893544 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r9fls\" (UniqueName: \"kubernetes.io/projected/d8a20e0f-af9e-422e-b8a6-fd37ffc19335-kube-api-access-r9fls\") pod \"neutron-db-create-966g7\" (UID: \"d8a20e0f-af9e-422e-b8a6-fd37ffc19335\") " pod="openstack-kuttl-tests/neutron-db-create-966g7" Jan 20 17:54:55 crc kubenswrapper[4558]: I0120 17:54:55.894621 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-55dk9\" (UniqueName: \"kubernetes.io/projected/962b2c8c-b0ef-4429-8aba-0fb24998839e-kube-api-access-55dk9\") pod \"neutron-c1b8-account-create-update-5scts\" (UID: \"962b2c8c-b0ef-4429-8aba-0fb24998839e\") " pod="openstack-kuttl-tests/neutron-c1b8-account-create-update-5scts" Jan 20 17:54:55 crc kubenswrapper[4558]: I0120 17:54:55.896871 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zdtvr\" (UniqueName: \"kubernetes.io/projected/19001f5f-e2af-482f-b182-0cf5d3297979-kube-api-access-zdtvr\") pod \"cinder-db-create-rzf9r\" (UID: \"19001f5f-e2af-482f-b182-0cf5d3297979\") " pod="openstack-kuttl-tests/cinder-db-create-rzf9r" Jan 20 17:54:55 crc kubenswrapper[4558]: I0120 17:54:55.897134 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8bd2r\" (UniqueName: \"kubernetes.io/projected/b467edba-4957-4d06-b48f-c88a2a580e82-kube-api-access-8bd2r\") pod \"barbican-db-create-qxgzw\" (UID: \"b467edba-4957-4d06-b48f-c88a2a580e82\") " pod="openstack-kuttl-tests/barbican-db-create-qxgzw" Jan 20 17:54:55 crc kubenswrapper[4558]: I0120 17:54:55.897140 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bqbph\" (UniqueName: \"kubernetes.io/projected/81fc72e8-7c47-45cf-8cbd-b77b7b0e9ed5-kube-api-access-bqbph\") pod \"barbican-e4c5-account-create-update-hnffc\" (UID: \"81fc72e8-7c47-45cf-8cbd-b77b7b0e9ed5\") " pod="openstack-kuttl-tests/barbican-e4c5-account-create-update-hnffc" Jan 20 17:54:55 crc kubenswrapper[4558]: I0120 17:54:55.897374 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8a9fc705-3a10-4e6d-8e0a-c1c8dfcb8bc5-combined-ca-bundle\") pod \"keystone-db-sync-ktt4t\" (UID: \"8a9fc705-3a10-4e6d-8e0a-c1c8dfcb8bc5\") " pod="openstack-kuttl-tests/keystone-db-sync-ktt4t" Jan 20 17:54:55 crc kubenswrapper[4558]: I0120 17:54:55.897777 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8a9fc705-3a10-4e6d-8e0a-c1c8dfcb8bc5-config-data\") pod \"keystone-db-sync-ktt4t\" (UID: \"8a9fc705-3a10-4e6d-8e0a-c1c8dfcb8bc5\") " pod="openstack-kuttl-tests/keystone-db-sync-ktt4t" Jan 20 17:54:55 crc kubenswrapper[4558]: I0120 17:54:55.898927 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zwppc\" (UniqueName: \"kubernetes.io/projected/39abd72a-05b8-4df0-bd89-3c8f6f566b38-kube-api-access-zwppc\") pod \"cinder-fde5-account-create-update-dmprz\" (UID: \"39abd72a-05b8-4df0-bd89-3c8f6f566b38\") " pod="openstack-kuttl-tests/cinder-fde5-account-create-update-dmprz" Jan 20 17:54:55 crc kubenswrapper[4558]: I0120 17:54:55.907723 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nj7r4\" (UniqueName: \"kubernetes.io/projected/8a9fc705-3a10-4e6d-8e0a-c1c8dfcb8bc5-kube-api-access-nj7r4\") pod \"keystone-db-sync-ktt4t\" (UID: \"8a9fc705-3a10-4e6d-8e0a-c1c8dfcb8bc5\") " pod="openstack-kuttl-tests/keystone-db-sync-ktt4t" Jan 20 17:54:55 crc kubenswrapper[4558]: I0120 17:54:55.917620 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-db-sync-ktt4t" Jan 20 17:54:55 crc kubenswrapper[4558]: I0120 17:54:55.969745 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-db-create-rzf9r" Jan 20 17:54:56 crc kubenswrapper[4558]: I0120 17:54:56.022692 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-e4c5-account-create-update-hnffc" Jan 20 17:54:56 crc kubenswrapper[4558]: I0120 17:54:56.059742 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-db-create-966g7" Jan 20 17:54:56 crc kubenswrapper[4558]: I0120 17:54:56.082426 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-c1b8-account-create-update-5scts" Jan 20 17:54:56 crc kubenswrapper[4558]: I0120 17:54:56.162061 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-db-create-qxgzw" Jan 20 17:54:56 crc kubenswrapper[4558]: I0120 17:54:56.174974 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-fde5-account-create-update-dmprz" Jan 20 17:54:56 crc kubenswrapper[4558]: I0120 17:54:56.248460 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-db-create-rzf9r"] Jan 20 17:54:56 crc kubenswrapper[4558]: I0120 17:54:56.379576 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-db-sync-ktt4t"] Jan 20 17:54:56 crc kubenswrapper[4558]: I0120 17:54:56.602941 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-e4c5-account-create-update-hnffc"] Jan 20 17:54:56 crc kubenswrapper[4558]: I0120 17:54:56.638904 4558 generic.go:334] "Generic (PLEG): container finished" podID="19001f5f-e2af-482f-b182-0cf5d3297979" containerID="ce08a2498230a0eb01bce0d7e2ee1409b7ef0c15376d57223ae2e67fa27da942" exitCode=0 Jan 20 17:54:56 crc kubenswrapper[4558]: I0120 17:54:56.638960 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-db-create-rzf9r" event={"ID":"19001f5f-e2af-482f-b182-0cf5d3297979","Type":"ContainerDied","Data":"ce08a2498230a0eb01bce0d7e2ee1409b7ef0c15376d57223ae2e67fa27da942"} Jan 20 17:54:56 crc kubenswrapper[4558]: I0120 17:54:56.639439 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-db-create-rzf9r" event={"ID":"19001f5f-e2af-482f-b182-0cf5d3297979","Type":"ContainerStarted","Data":"c345c8505e971f0e72fa8a56790742baa8bd0d77b17079f7fc46e740227784ee"} Jan 20 17:54:56 crc kubenswrapper[4558]: I0120 17:54:56.641851 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-db-sync-ktt4t" event={"ID":"8a9fc705-3a10-4e6d-8e0a-c1c8dfcb8bc5","Type":"ContainerStarted","Data":"2b4a0c193260cb0e104fe9e95d28f9d5b353c0fbea727e60067b79e8342e911c"} Jan 20 17:54:56 crc kubenswrapper[4558]: I0120 17:54:56.642191 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-db-sync-ktt4t" event={"ID":"8a9fc705-3a10-4e6d-8e0a-c1c8dfcb8bc5","Type":"ContainerStarted","Data":"403caeae85e92d9aadbdf6ecb92c03f6772ed5be7dcc7352ec754bdb4de86beb"} Jan 20 17:54:56 crc kubenswrapper[4558]: I0120 17:54:56.646375 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-c1b8-account-create-update-5scts"] Jan 20 17:54:56 crc kubenswrapper[4558]: I0120 17:54:56.646684 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-e4c5-account-create-update-hnffc" event={"ID":"81fc72e8-7c47-45cf-8cbd-b77b7b0e9ed5","Type":"ContainerStarted","Data":"0098cf349b0ec0540fa6f2f4ba2721d1ae680743237ee8bbcf47f370e47bb674"} Jan 20 17:54:56 crc kubenswrapper[4558]: W0120 17:54:56.662024 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod962b2c8c_b0ef_4429_8aba_0fb24998839e.slice/crio-f0d05e3970ce9037f17c4ae0cea30d5649d62066e543314723b258484681e50f WatchSource:0}: Error finding container f0d05e3970ce9037f17c4ae0cea30d5649d62066e543314723b258484681e50f: Status 404 returned error can't find the container with id f0d05e3970ce9037f17c4ae0cea30d5649d62066e543314723b258484681e50f Jan 20 17:54:56 crc kubenswrapper[4558]: I0120 17:54:56.662607 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-db-create-966g7"] Jan 20 17:54:56 crc kubenswrapper[4558]: W0120 17:54:56.663121 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd8a20e0f_af9e_422e_b8a6_fd37ffc19335.slice/crio-0a091e4e9d00855f7aaab02c35a5305376275a031883fd1a4f78d3258303c6f0 WatchSource:0}: Error finding container 0a091e4e9d00855f7aaab02c35a5305376275a031883fd1a4f78d3258303c6f0: Status 404 returned error can't find the container with id 0a091e4e9d00855f7aaab02c35a5305376275a031883fd1a4f78d3258303c6f0 Jan 20 17:54:56 crc kubenswrapper[4558]: I0120 17:54:56.682548 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/keystone-db-sync-ktt4t" podStartSLOduration=1.682528339 podStartE2EDuration="1.682528339s" podCreationTimestamp="2026-01-20 17:54:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:54:56.675366733 +0000 UTC m=+4390.435704700" watchObservedRunningTime="2026-01-20 17:54:56.682528339 +0000 UTC m=+4390.442866306" Jan 20 17:54:56 crc kubenswrapper[4558]: I0120 17:54:56.713258 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-db-create-qxgzw"] Jan 20 17:54:56 crc kubenswrapper[4558]: W0120 17:54:56.719557 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb467edba_4957_4d06_b48f_c88a2a580e82.slice/crio-1be7221c87ebb2233251fd88eecf54d176cc49cb2d66fae2565ee0c3f85b2eec WatchSource:0}: Error finding container 1be7221c87ebb2233251fd88eecf54d176cc49cb2d66fae2565ee0c3f85b2eec: Status 404 returned error can't find the container with id 1be7221c87ebb2233251fd88eecf54d176cc49cb2d66fae2565ee0c3f85b2eec Jan 20 17:54:56 crc kubenswrapper[4558]: I0120 17:54:56.784437 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-fde5-account-create-update-dmprz"] Jan 20 17:54:57 crc kubenswrapper[4558]: I0120 17:54:57.663618 4558 generic.go:334] "Generic (PLEG): container finished" podID="b467edba-4957-4d06-b48f-c88a2a580e82" containerID="4b7b35525f0c3029f485e041b6343e7110c7464da4bbf0f7e3ce658808702802" exitCode=0 Jan 20 17:54:57 crc kubenswrapper[4558]: I0120 17:54:57.664205 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-db-create-qxgzw" event={"ID":"b467edba-4957-4d06-b48f-c88a2a580e82","Type":"ContainerDied","Data":"4b7b35525f0c3029f485e041b6343e7110c7464da4bbf0f7e3ce658808702802"} Jan 20 17:54:57 crc kubenswrapper[4558]: I0120 17:54:57.664242 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-db-create-qxgzw" event={"ID":"b467edba-4957-4d06-b48f-c88a2a580e82","Type":"ContainerStarted","Data":"1be7221c87ebb2233251fd88eecf54d176cc49cb2d66fae2565ee0c3f85b2eec"} Jan 20 17:54:57 crc kubenswrapper[4558]: I0120 17:54:57.667219 4558 generic.go:334] "Generic (PLEG): container finished" podID="d8a20e0f-af9e-422e-b8a6-fd37ffc19335" containerID="a67a173b20f463bc4ddc6c13a0332946f983d41b27131157848d410f84fe841b" exitCode=0 Jan 20 17:54:57 crc kubenswrapper[4558]: I0120 17:54:57.667315 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-db-create-966g7" event={"ID":"d8a20e0f-af9e-422e-b8a6-fd37ffc19335","Type":"ContainerDied","Data":"a67a173b20f463bc4ddc6c13a0332946f983d41b27131157848d410f84fe841b"} Jan 20 17:54:57 crc kubenswrapper[4558]: I0120 17:54:57.667350 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-db-create-966g7" event={"ID":"d8a20e0f-af9e-422e-b8a6-fd37ffc19335","Type":"ContainerStarted","Data":"0a091e4e9d00855f7aaab02c35a5305376275a031883fd1a4f78d3258303c6f0"} Jan 20 17:54:57 crc kubenswrapper[4558]: I0120 17:54:57.669437 4558 generic.go:334] "Generic (PLEG): container finished" podID="962b2c8c-b0ef-4429-8aba-0fb24998839e" containerID="1b366f4687deb8288f3ee86256c3df9164bf2bb6ef6e21181b282edad8963350" exitCode=0 Jan 20 17:54:57 crc kubenswrapper[4558]: I0120 17:54:57.669518 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-c1b8-account-create-update-5scts" event={"ID":"962b2c8c-b0ef-4429-8aba-0fb24998839e","Type":"ContainerDied","Data":"1b366f4687deb8288f3ee86256c3df9164bf2bb6ef6e21181b282edad8963350"} Jan 20 17:54:57 crc kubenswrapper[4558]: I0120 17:54:57.669585 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-c1b8-account-create-update-5scts" event={"ID":"962b2c8c-b0ef-4429-8aba-0fb24998839e","Type":"ContainerStarted","Data":"f0d05e3970ce9037f17c4ae0cea30d5649d62066e543314723b258484681e50f"} Jan 20 17:54:57 crc kubenswrapper[4558]: I0120 17:54:57.671701 4558 generic.go:334] "Generic (PLEG): container finished" podID="81fc72e8-7c47-45cf-8cbd-b77b7b0e9ed5" containerID="fa759d890ffd5088e470c2dfd8e7b87cb410350e5d1a45277f73bb96721a0561" exitCode=0 Jan 20 17:54:57 crc kubenswrapper[4558]: I0120 17:54:57.671889 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-e4c5-account-create-update-hnffc" event={"ID":"81fc72e8-7c47-45cf-8cbd-b77b7b0e9ed5","Type":"ContainerDied","Data":"fa759d890ffd5088e470c2dfd8e7b87cb410350e5d1a45277f73bb96721a0561"} Jan 20 17:54:57 crc kubenswrapper[4558]: I0120 17:54:57.675214 4558 generic.go:334] "Generic (PLEG): container finished" podID="39abd72a-05b8-4df0-bd89-3c8f6f566b38" containerID="d153a6bcec9439654a1df7946e305afa4bba3742b0516f4131dfe8375eb4499e" exitCode=0 Jan 20 17:54:57 crc kubenswrapper[4558]: I0120 17:54:57.675726 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-fde5-account-create-update-dmprz" event={"ID":"39abd72a-05b8-4df0-bd89-3c8f6f566b38","Type":"ContainerDied","Data":"d153a6bcec9439654a1df7946e305afa4bba3742b0516f4131dfe8375eb4499e"} Jan 20 17:54:57 crc kubenswrapper[4558]: I0120 17:54:57.675808 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-fde5-account-create-update-dmprz" event={"ID":"39abd72a-05b8-4df0-bd89-3c8f6f566b38","Type":"ContainerStarted","Data":"23469c28303afdd0f962f259b5b544ed39918ec2f408ec24cdba7cb25a0aaf3e"} Jan 20 17:54:58 crc kubenswrapper[4558]: I0120 17:54:58.017953 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-db-create-rzf9r" Jan 20 17:54:58 crc kubenswrapper[4558]: I0120 17:54:58.209469 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/19001f5f-e2af-482f-b182-0cf5d3297979-operator-scripts\") pod \"19001f5f-e2af-482f-b182-0cf5d3297979\" (UID: \"19001f5f-e2af-482f-b182-0cf5d3297979\") " Jan 20 17:54:58 crc kubenswrapper[4558]: I0120 17:54:58.209549 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zdtvr\" (UniqueName: \"kubernetes.io/projected/19001f5f-e2af-482f-b182-0cf5d3297979-kube-api-access-zdtvr\") pod \"19001f5f-e2af-482f-b182-0cf5d3297979\" (UID: \"19001f5f-e2af-482f-b182-0cf5d3297979\") " Jan 20 17:54:58 crc kubenswrapper[4558]: I0120 17:54:58.209851 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/19001f5f-e2af-482f-b182-0cf5d3297979-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "19001f5f-e2af-482f-b182-0cf5d3297979" (UID: "19001f5f-e2af-482f-b182-0cf5d3297979"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:54:58 crc kubenswrapper[4558]: I0120 17:54:58.218967 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/19001f5f-e2af-482f-b182-0cf5d3297979-kube-api-access-zdtvr" (OuterVolumeSpecName: "kube-api-access-zdtvr") pod "19001f5f-e2af-482f-b182-0cf5d3297979" (UID: "19001f5f-e2af-482f-b182-0cf5d3297979"). InnerVolumeSpecName "kube-api-access-zdtvr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:54:58 crc kubenswrapper[4558]: I0120 17:54:58.312580 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/19001f5f-e2af-482f-b182-0cf5d3297979-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:54:58 crc kubenswrapper[4558]: I0120 17:54:58.312632 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zdtvr\" (UniqueName: \"kubernetes.io/projected/19001f5f-e2af-482f-b182-0cf5d3297979-kube-api-access-zdtvr\") on node \"crc\" DevicePath \"\"" Jan 20 17:54:58 crc kubenswrapper[4558]: I0120 17:54:58.687628 4558 generic.go:334] "Generic (PLEG): container finished" podID="8a9fc705-3a10-4e6d-8e0a-c1c8dfcb8bc5" containerID="2b4a0c193260cb0e104fe9e95d28f9d5b353c0fbea727e60067b79e8342e911c" exitCode=0 Jan 20 17:54:58 crc kubenswrapper[4558]: I0120 17:54:58.687778 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-db-sync-ktt4t" event={"ID":"8a9fc705-3a10-4e6d-8e0a-c1c8dfcb8bc5","Type":"ContainerDied","Data":"2b4a0c193260cb0e104fe9e95d28f9d5b353c0fbea727e60067b79e8342e911c"} Jan 20 17:54:58 crc kubenswrapper[4558]: I0120 17:54:58.691323 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-db-create-rzf9r" Jan 20 17:54:58 crc kubenswrapper[4558]: I0120 17:54:58.692406 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-db-create-rzf9r" event={"ID":"19001f5f-e2af-482f-b182-0cf5d3297979","Type":"ContainerDied","Data":"c345c8505e971f0e72fa8a56790742baa8bd0d77b17079f7fc46e740227784ee"} Jan 20 17:54:58 crc kubenswrapper[4558]: I0120 17:54:58.692437 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c345c8505e971f0e72fa8a56790742baa8bd0d77b17079f7fc46e740227784ee" Jan 20 17:54:59 crc kubenswrapper[4558]: I0120 17:54:59.095008 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-db-create-966g7" Jan 20 17:54:59 crc kubenswrapper[4558]: I0120 17:54:59.163641 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-fde5-account-create-update-dmprz" Jan 20 17:54:59 crc kubenswrapper[4558]: I0120 17:54:59.167822 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-e4c5-account-create-update-hnffc" Jan 20 17:54:59 crc kubenswrapper[4558]: I0120 17:54:59.173910 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-c1b8-account-create-update-5scts" Jan 20 17:54:59 crc kubenswrapper[4558]: I0120 17:54:59.182367 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-db-create-qxgzw" Jan 20 17:54:59 crc kubenswrapper[4558]: I0120 17:54:59.227927 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r9fls\" (UniqueName: \"kubernetes.io/projected/d8a20e0f-af9e-422e-b8a6-fd37ffc19335-kube-api-access-r9fls\") pod \"d8a20e0f-af9e-422e-b8a6-fd37ffc19335\" (UID: \"d8a20e0f-af9e-422e-b8a6-fd37ffc19335\") " Jan 20 17:54:59 crc kubenswrapper[4558]: I0120 17:54:59.228298 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d8a20e0f-af9e-422e-b8a6-fd37ffc19335-operator-scripts\") pod \"d8a20e0f-af9e-422e-b8a6-fd37ffc19335\" (UID: \"d8a20e0f-af9e-422e-b8a6-fd37ffc19335\") " Jan 20 17:54:59 crc kubenswrapper[4558]: I0120 17:54:59.228647 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-55dk9\" (UniqueName: \"kubernetes.io/projected/962b2c8c-b0ef-4429-8aba-0fb24998839e-kube-api-access-55dk9\") pod \"962b2c8c-b0ef-4429-8aba-0fb24998839e\" (UID: \"962b2c8c-b0ef-4429-8aba-0fb24998839e\") " Jan 20 17:54:59 crc kubenswrapper[4558]: I0120 17:54:59.228727 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/39abd72a-05b8-4df0-bd89-3c8f6f566b38-operator-scripts\") pod \"39abd72a-05b8-4df0-bd89-3c8f6f566b38\" (UID: \"39abd72a-05b8-4df0-bd89-3c8f6f566b38\") " Jan 20 17:54:59 crc kubenswrapper[4558]: I0120 17:54:59.228751 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zwppc\" (UniqueName: \"kubernetes.io/projected/39abd72a-05b8-4df0-bd89-3c8f6f566b38-kube-api-access-zwppc\") pod \"39abd72a-05b8-4df0-bd89-3c8f6f566b38\" (UID: \"39abd72a-05b8-4df0-bd89-3c8f6f566b38\") " Jan 20 17:54:59 crc kubenswrapper[4558]: I0120 17:54:59.229124 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d8a20e0f-af9e-422e-b8a6-fd37ffc19335-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "d8a20e0f-af9e-422e-b8a6-fd37ffc19335" (UID: "d8a20e0f-af9e-422e-b8a6-fd37ffc19335"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:54:59 crc kubenswrapper[4558]: I0120 17:54:59.229246 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/39abd72a-05b8-4df0-bd89-3c8f6f566b38-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "39abd72a-05b8-4df0-bd89-3c8f6f566b38" (UID: "39abd72a-05b8-4df0-bd89-3c8f6f566b38"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:54:59 crc kubenswrapper[4558]: I0120 17:54:59.231341 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d8a20e0f-af9e-422e-b8a6-fd37ffc19335-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:54:59 crc kubenswrapper[4558]: I0120 17:54:59.231370 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/39abd72a-05b8-4df0-bd89-3c8f6f566b38-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:54:59 crc kubenswrapper[4558]: I0120 17:54:59.237504 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/962b2c8c-b0ef-4429-8aba-0fb24998839e-kube-api-access-55dk9" (OuterVolumeSpecName: "kube-api-access-55dk9") pod "962b2c8c-b0ef-4429-8aba-0fb24998839e" (UID: "962b2c8c-b0ef-4429-8aba-0fb24998839e"). InnerVolumeSpecName "kube-api-access-55dk9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:54:59 crc kubenswrapper[4558]: I0120 17:54:59.237563 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/39abd72a-05b8-4df0-bd89-3c8f6f566b38-kube-api-access-zwppc" (OuterVolumeSpecName: "kube-api-access-zwppc") pod "39abd72a-05b8-4df0-bd89-3c8f6f566b38" (UID: "39abd72a-05b8-4df0-bd89-3c8f6f566b38"). InnerVolumeSpecName "kube-api-access-zwppc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:54:59 crc kubenswrapper[4558]: I0120 17:54:59.242304 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d8a20e0f-af9e-422e-b8a6-fd37ffc19335-kube-api-access-r9fls" (OuterVolumeSpecName: "kube-api-access-r9fls") pod "d8a20e0f-af9e-422e-b8a6-fd37ffc19335" (UID: "d8a20e0f-af9e-422e-b8a6-fd37ffc19335"). InnerVolumeSpecName "kube-api-access-r9fls". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:54:59 crc kubenswrapper[4558]: I0120 17:54:59.332145 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bqbph\" (UniqueName: \"kubernetes.io/projected/81fc72e8-7c47-45cf-8cbd-b77b7b0e9ed5-kube-api-access-bqbph\") pod \"81fc72e8-7c47-45cf-8cbd-b77b7b0e9ed5\" (UID: \"81fc72e8-7c47-45cf-8cbd-b77b7b0e9ed5\") " Jan 20 17:54:59 crc kubenswrapper[4558]: I0120 17:54:59.332242 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/81fc72e8-7c47-45cf-8cbd-b77b7b0e9ed5-operator-scripts\") pod \"81fc72e8-7c47-45cf-8cbd-b77b7b0e9ed5\" (UID: \"81fc72e8-7c47-45cf-8cbd-b77b7b0e9ed5\") " Jan 20 17:54:59 crc kubenswrapper[4558]: I0120 17:54:59.332298 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b467edba-4957-4d06-b48f-c88a2a580e82-operator-scripts\") pod \"b467edba-4957-4d06-b48f-c88a2a580e82\" (UID: \"b467edba-4957-4d06-b48f-c88a2a580e82\") " Jan 20 17:54:59 crc kubenswrapper[4558]: I0120 17:54:59.332497 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/962b2c8c-b0ef-4429-8aba-0fb24998839e-operator-scripts\") pod \"962b2c8c-b0ef-4429-8aba-0fb24998839e\" (UID: \"962b2c8c-b0ef-4429-8aba-0fb24998839e\") " Jan 20 17:54:59 crc kubenswrapper[4558]: I0120 17:54:59.332563 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8bd2r\" (UniqueName: \"kubernetes.io/projected/b467edba-4957-4d06-b48f-c88a2a580e82-kube-api-access-8bd2r\") pod \"b467edba-4957-4d06-b48f-c88a2a580e82\" (UID: \"b467edba-4957-4d06-b48f-c88a2a580e82\") " Jan 20 17:54:59 crc kubenswrapper[4558]: I0120 17:54:59.332932 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r9fls\" (UniqueName: \"kubernetes.io/projected/d8a20e0f-af9e-422e-b8a6-fd37ffc19335-kube-api-access-r9fls\") on node \"crc\" DevicePath \"\"" Jan 20 17:54:59 crc kubenswrapper[4558]: I0120 17:54:59.332953 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-55dk9\" (UniqueName: \"kubernetes.io/projected/962b2c8c-b0ef-4429-8aba-0fb24998839e-kube-api-access-55dk9\") on node \"crc\" DevicePath \"\"" Jan 20 17:54:59 crc kubenswrapper[4558]: I0120 17:54:59.332964 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zwppc\" (UniqueName: \"kubernetes.io/projected/39abd72a-05b8-4df0-bd89-3c8f6f566b38-kube-api-access-zwppc\") on node \"crc\" DevicePath \"\"" Jan 20 17:54:59 crc kubenswrapper[4558]: I0120 17:54:59.333690 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/81fc72e8-7c47-45cf-8cbd-b77b7b0e9ed5-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "81fc72e8-7c47-45cf-8cbd-b77b7b0e9ed5" (UID: "81fc72e8-7c47-45cf-8cbd-b77b7b0e9ed5"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:54:59 crc kubenswrapper[4558]: I0120 17:54:59.333871 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b467edba-4957-4d06-b48f-c88a2a580e82-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "b467edba-4957-4d06-b48f-c88a2a580e82" (UID: "b467edba-4957-4d06-b48f-c88a2a580e82"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:54:59 crc kubenswrapper[4558]: I0120 17:54:59.333982 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/962b2c8c-b0ef-4429-8aba-0fb24998839e-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "962b2c8c-b0ef-4429-8aba-0fb24998839e" (UID: "962b2c8c-b0ef-4429-8aba-0fb24998839e"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:54:59 crc kubenswrapper[4558]: I0120 17:54:59.337370 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b467edba-4957-4d06-b48f-c88a2a580e82-kube-api-access-8bd2r" (OuterVolumeSpecName: "kube-api-access-8bd2r") pod "b467edba-4957-4d06-b48f-c88a2a580e82" (UID: "b467edba-4957-4d06-b48f-c88a2a580e82"). InnerVolumeSpecName "kube-api-access-8bd2r". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:54:59 crc kubenswrapper[4558]: I0120 17:54:59.337487 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/81fc72e8-7c47-45cf-8cbd-b77b7b0e9ed5-kube-api-access-bqbph" (OuterVolumeSpecName: "kube-api-access-bqbph") pod "81fc72e8-7c47-45cf-8cbd-b77b7b0e9ed5" (UID: "81fc72e8-7c47-45cf-8cbd-b77b7b0e9ed5"). InnerVolumeSpecName "kube-api-access-bqbph". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:54:59 crc kubenswrapper[4558]: I0120 17:54:59.435884 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/962b2c8c-b0ef-4429-8aba-0fb24998839e-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:54:59 crc kubenswrapper[4558]: I0120 17:54:59.436148 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8bd2r\" (UniqueName: \"kubernetes.io/projected/b467edba-4957-4d06-b48f-c88a2a580e82-kube-api-access-8bd2r\") on node \"crc\" DevicePath \"\"" Jan 20 17:54:59 crc kubenswrapper[4558]: I0120 17:54:59.436263 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bqbph\" (UniqueName: \"kubernetes.io/projected/81fc72e8-7c47-45cf-8cbd-b77b7b0e9ed5-kube-api-access-bqbph\") on node \"crc\" DevicePath \"\"" Jan 20 17:54:59 crc kubenswrapper[4558]: I0120 17:54:59.436334 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/81fc72e8-7c47-45cf-8cbd-b77b7b0e9ed5-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:54:59 crc kubenswrapper[4558]: I0120 17:54:59.436398 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b467edba-4957-4d06-b48f-c88a2a580e82-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:54:59 crc kubenswrapper[4558]: I0120 17:54:59.704555 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-db-create-qxgzw" event={"ID":"b467edba-4957-4d06-b48f-c88a2a580e82","Type":"ContainerDied","Data":"1be7221c87ebb2233251fd88eecf54d176cc49cb2d66fae2565ee0c3f85b2eec"} Jan 20 17:54:59 crc kubenswrapper[4558]: I0120 17:54:59.704590 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-db-create-qxgzw" Jan 20 17:54:59 crc kubenswrapper[4558]: I0120 17:54:59.704616 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1be7221c87ebb2233251fd88eecf54d176cc49cb2d66fae2565ee0c3f85b2eec" Jan 20 17:54:59 crc kubenswrapper[4558]: I0120 17:54:59.706983 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-db-create-966g7" event={"ID":"d8a20e0f-af9e-422e-b8a6-fd37ffc19335","Type":"ContainerDied","Data":"0a091e4e9d00855f7aaab02c35a5305376275a031883fd1a4f78d3258303c6f0"} Jan 20 17:54:59 crc kubenswrapper[4558]: I0120 17:54:59.707005 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-db-create-966g7" Jan 20 17:54:59 crc kubenswrapper[4558]: I0120 17:54:59.707012 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0a091e4e9d00855f7aaab02c35a5305376275a031883fd1a4f78d3258303c6f0" Jan 20 17:54:59 crc kubenswrapper[4558]: I0120 17:54:59.709638 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-c1b8-account-create-update-5scts" Jan 20 17:54:59 crc kubenswrapper[4558]: I0120 17:54:59.709688 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-c1b8-account-create-update-5scts" event={"ID":"962b2c8c-b0ef-4429-8aba-0fb24998839e","Type":"ContainerDied","Data":"f0d05e3970ce9037f17c4ae0cea30d5649d62066e543314723b258484681e50f"} Jan 20 17:54:59 crc kubenswrapper[4558]: I0120 17:54:59.709745 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f0d05e3970ce9037f17c4ae0cea30d5649d62066e543314723b258484681e50f" Jan 20 17:54:59 crc kubenswrapper[4558]: I0120 17:54:59.714110 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-e4c5-account-create-update-hnffc" event={"ID":"81fc72e8-7c47-45cf-8cbd-b77b7b0e9ed5","Type":"ContainerDied","Data":"0098cf349b0ec0540fa6f2f4ba2721d1ae680743237ee8bbcf47f370e47bb674"} Jan 20 17:54:59 crc kubenswrapper[4558]: I0120 17:54:59.714186 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0098cf349b0ec0540fa6f2f4ba2721d1ae680743237ee8bbcf47f370e47bb674" Jan 20 17:54:59 crc kubenswrapper[4558]: I0120 17:54:59.714297 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-e4c5-account-create-update-hnffc" Jan 20 17:54:59 crc kubenswrapper[4558]: I0120 17:54:59.719550 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-fde5-account-create-update-dmprz" Jan 20 17:54:59 crc kubenswrapper[4558]: I0120 17:54:59.719835 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-fde5-account-create-update-dmprz" event={"ID":"39abd72a-05b8-4df0-bd89-3c8f6f566b38","Type":"ContainerDied","Data":"23469c28303afdd0f962f259b5b544ed39918ec2f408ec24cdba7cb25a0aaf3e"} Jan 20 17:54:59 crc kubenswrapper[4558]: I0120 17:54:59.720362 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="23469c28303afdd0f962f259b5b544ed39918ec2f408ec24cdba7cb25a0aaf3e" Jan 20 17:54:59 crc kubenswrapper[4558]: I0120 17:54:59.920936 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-db-sync-ktt4t" Jan 20 17:55:00 crc kubenswrapper[4558]: I0120 17:55:00.046407 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nj7r4\" (UniqueName: \"kubernetes.io/projected/8a9fc705-3a10-4e6d-8e0a-c1c8dfcb8bc5-kube-api-access-nj7r4\") pod \"8a9fc705-3a10-4e6d-8e0a-c1c8dfcb8bc5\" (UID: \"8a9fc705-3a10-4e6d-8e0a-c1c8dfcb8bc5\") " Jan 20 17:55:00 crc kubenswrapper[4558]: I0120 17:55:00.046730 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8a9fc705-3a10-4e6d-8e0a-c1c8dfcb8bc5-combined-ca-bundle\") pod \"8a9fc705-3a10-4e6d-8e0a-c1c8dfcb8bc5\" (UID: \"8a9fc705-3a10-4e6d-8e0a-c1c8dfcb8bc5\") " Jan 20 17:55:00 crc kubenswrapper[4558]: I0120 17:55:00.046790 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8a9fc705-3a10-4e6d-8e0a-c1c8dfcb8bc5-config-data\") pod \"8a9fc705-3a10-4e6d-8e0a-c1c8dfcb8bc5\" (UID: \"8a9fc705-3a10-4e6d-8e0a-c1c8dfcb8bc5\") " Jan 20 17:55:00 crc kubenswrapper[4558]: I0120 17:55:00.051466 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8a9fc705-3a10-4e6d-8e0a-c1c8dfcb8bc5-kube-api-access-nj7r4" (OuterVolumeSpecName: "kube-api-access-nj7r4") pod "8a9fc705-3a10-4e6d-8e0a-c1c8dfcb8bc5" (UID: "8a9fc705-3a10-4e6d-8e0a-c1c8dfcb8bc5"). InnerVolumeSpecName "kube-api-access-nj7r4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:55:00 crc kubenswrapper[4558]: I0120 17:55:00.069327 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8a9fc705-3a10-4e6d-8e0a-c1c8dfcb8bc5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8a9fc705-3a10-4e6d-8e0a-c1c8dfcb8bc5" (UID: "8a9fc705-3a10-4e6d-8e0a-c1c8dfcb8bc5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:55:00 crc kubenswrapper[4558]: I0120 17:55:00.083371 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8a9fc705-3a10-4e6d-8e0a-c1c8dfcb8bc5-config-data" (OuterVolumeSpecName: "config-data") pod "8a9fc705-3a10-4e6d-8e0a-c1c8dfcb8bc5" (UID: "8a9fc705-3a10-4e6d-8e0a-c1c8dfcb8bc5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:55:00 crc kubenswrapper[4558]: I0120 17:55:00.148957 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8a9fc705-3a10-4e6d-8e0a-c1c8dfcb8bc5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:55:00 crc kubenswrapper[4558]: I0120 17:55:00.149206 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8a9fc705-3a10-4e6d-8e0a-c1c8dfcb8bc5-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:55:00 crc kubenswrapper[4558]: I0120 17:55:00.149269 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nj7r4\" (UniqueName: \"kubernetes.io/projected/8a9fc705-3a10-4e6d-8e0a-c1c8dfcb8bc5-kube-api-access-nj7r4\") on node \"crc\" DevicePath \"\"" Jan 20 17:55:00 crc kubenswrapper[4558]: I0120 17:55:00.730966 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-db-sync-ktt4t" event={"ID":"8a9fc705-3a10-4e6d-8e0a-c1c8dfcb8bc5","Type":"ContainerDied","Data":"403caeae85e92d9aadbdf6ecb92c03f6772ed5be7dcc7352ec754bdb4de86beb"} Jan 20 17:55:00 crc kubenswrapper[4558]: I0120 17:55:00.731018 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="403caeae85e92d9aadbdf6ecb92c03f6772ed5be7dcc7352ec754bdb4de86beb" Jan 20 17:55:00 crc kubenswrapper[4558]: I0120 17:55:00.731064 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-db-sync-ktt4t" Jan 20 17:55:00 crc kubenswrapper[4558]: I0120 17:55:00.830122 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/keystone-bootstrap-k66gc"] Jan 20 17:55:00 crc kubenswrapper[4558]: E0120 17:55:00.830776 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b467edba-4957-4d06-b48f-c88a2a580e82" containerName="mariadb-database-create" Jan 20 17:55:00 crc kubenswrapper[4558]: I0120 17:55:00.830795 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="b467edba-4957-4d06-b48f-c88a2a580e82" containerName="mariadb-database-create" Jan 20 17:55:00 crc kubenswrapper[4558]: E0120 17:55:00.830809 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="19001f5f-e2af-482f-b182-0cf5d3297979" containerName="mariadb-database-create" Jan 20 17:55:00 crc kubenswrapper[4558]: I0120 17:55:00.830816 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="19001f5f-e2af-482f-b182-0cf5d3297979" containerName="mariadb-database-create" Jan 20 17:55:00 crc kubenswrapper[4558]: E0120 17:55:00.830830 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="81fc72e8-7c47-45cf-8cbd-b77b7b0e9ed5" containerName="mariadb-account-create-update" Jan 20 17:55:00 crc kubenswrapper[4558]: I0120 17:55:00.830837 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="81fc72e8-7c47-45cf-8cbd-b77b7b0e9ed5" containerName="mariadb-account-create-update" Jan 20 17:55:00 crc kubenswrapper[4558]: E0120 17:55:00.830852 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="962b2c8c-b0ef-4429-8aba-0fb24998839e" containerName="mariadb-account-create-update" Jan 20 17:55:00 crc kubenswrapper[4558]: I0120 17:55:00.830858 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="962b2c8c-b0ef-4429-8aba-0fb24998839e" containerName="mariadb-account-create-update" Jan 20 17:55:00 crc kubenswrapper[4558]: E0120 17:55:00.830870 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8a9fc705-3a10-4e6d-8e0a-c1c8dfcb8bc5" containerName="keystone-db-sync" Jan 20 17:55:00 crc kubenswrapper[4558]: I0120 17:55:00.830875 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="8a9fc705-3a10-4e6d-8e0a-c1c8dfcb8bc5" containerName="keystone-db-sync" Jan 20 17:55:00 crc kubenswrapper[4558]: E0120 17:55:00.830886 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d8a20e0f-af9e-422e-b8a6-fd37ffc19335" containerName="mariadb-database-create" Jan 20 17:55:00 crc kubenswrapper[4558]: I0120 17:55:00.830893 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d8a20e0f-af9e-422e-b8a6-fd37ffc19335" containerName="mariadb-database-create" Jan 20 17:55:00 crc kubenswrapper[4558]: E0120 17:55:00.830902 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="39abd72a-05b8-4df0-bd89-3c8f6f566b38" containerName="mariadb-account-create-update" Jan 20 17:55:00 crc kubenswrapper[4558]: I0120 17:55:00.830908 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="39abd72a-05b8-4df0-bd89-3c8f6f566b38" containerName="mariadb-account-create-update" Jan 20 17:55:00 crc kubenswrapper[4558]: I0120 17:55:00.831064 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="8a9fc705-3a10-4e6d-8e0a-c1c8dfcb8bc5" containerName="keystone-db-sync" Jan 20 17:55:00 crc kubenswrapper[4558]: I0120 17:55:00.831087 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="81fc72e8-7c47-45cf-8cbd-b77b7b0e9ed5" containerName="mariadb-account-create-update" Jan 20 17:55:00 crc kubenswrapper[4558]: I0120 17:55:00.831096 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="962b2c8c-b0ef-4429-8aba-0fb24998839e" containerName="mariadb-account-create-update" Jan 20 17:55:00 crc kubenswrapper[4558]: I0120 17:55:00.831108 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="39abd72a-05b8-4df0-bd89-3c8f6f566b38" containerName="mariadb-account-create-update" Jan 20 17:55:00 crc kubenswrapper[4558]: I0120 17:55:00.831116 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="b467edba-4957-4d06-b48f-c88a2a580e82" containerName="mariadb-database-create" Jan 20 17:55:00 crc kubenswrapper[4558]: I0120 17:55:00.831126 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="d8a20e0f-af9e-422e-b8a6-fd37ffc19335" containerName="mariadb-database-create" Jan 20 17:55:00 crc kubenswrapper[4558]: I0120 17:55:00.831138 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="19001f5f-e2af-482f-b182-0cf5d3297979" containerName="mariadb-database-create" Jan 20 17:55:00 crc kubenswrapper[4558]: I0120 17:55:00.831673 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-bootstrap-k66gc" Jan 20 17:55:00 crc kubenswrapper[4558]: I0120 17:55:00.834396 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-scripts" Jan 20 17:55:00 crc kubenswrapper[4558]: I0120 17:55:00.834908 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-config-data" Jan 20 17:55:00 crc kubenswrapper[4558]: I0120 17:55:00.835211 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone" Jan 20 17:55:00 crc kubenswrapper[4558]: I0120 17:55:00.835388 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"osp-secret" Jan 20 17:55:00 crc kubenswrapper[4558]: I0120 17:55:00.835523 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-keystone-dockercfg-bqxt2" Jan 20 17:55:00 crc kubenswrapper[4558]: I0120 17:55:00.855005 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-bootstrap-k66gc"] Jan 20 17:55:00 crc kubenswrapper[4558]: I0120 17:55:00.963147 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0353f1c5-452b-413c-981b-84e774d099eb-config-data\") pod \"keystone-bootstrap-k66gc\" (UID: \"0353f1c5-452b-413c-981b-84e774d099eb\") " pod="openstack-kuttl-tests/keystone-bootstrap-k66gc" Jan 20 17:55:00 crc kubenswrapper[4558]: I0120 17:55:00.963308 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-49cn5\" (UniqueName: \"kubernetes.io/projected/0353f1c5-452b-413c-981b-84e774d099eb-kube-api-access-49cn5\") pod \"keystone-bootstrap-k66gc\" (UID: \"0353f1c5-452b-413c-981b-84e774d099eb\") " pod="openstack-kuttl-tests/keystone-bootstrap-k66gc" Jan 20 17:55:00 crc kubenswrapper[4558]: I0120 17:55:00.963364 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0353f1c5-452b-413c-981b-84e774d099eb-scripts\") pod \"keystone-bootstrap-k66gc\" (UID: \"0353f1c5-452b-413c-981b-84e774d099eb\") " pod="openstack-kuttl-tests/keystone-bootstrap-k66gc" Jan 20 17:55:00 crc kubenswrapper[4558]: I0120 17:55:00.963389 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/0353f1c5-452b-413c-981b-84e774d099eb-credential-keys\") pod \"keystone-bootstrap-k66gc\" (UID: \"0353f1c5-452b-413c-981b-84e774d099eb\") " pod="openstack-kuttl-tests/keystone-bootstrap-k66gc" Jan 20 17:55:00 crc kubenswrapper[4558]: I0120 17:55:00.963411 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0353f1c5-452b-413c-981b-84e774d099eb-combined-ca-bundle\") pod \"keystone-bootstrap-k66gc\" (UID: \"0353f1c5-452b-413c-981b-84e774d099eb\") " pod="openstack-kuttl-tests/keystone-bootstrap-k66gc" Jan 20 17:55:00 crc kubenswrapper[4558]: I0120 17:55:00.964156 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/0353f1c5-452b-413c-981b-84e774d099eb-fernet-keys\") pod \"keystone-bootstrap-k66gc\" (UID: \"0353f1c5-452b-413c-981b-84e774d099eb\") " pod="openstack-kuttl-tests/keystone-bootstrap-k66gc" Jan 20 17:55:00 crc kubenswrapper[4558]: I0120 17:55:00.994214 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/neutron-db-sync-5b76z"] Jan 20 17:55:00 crc kubenswrapper[4558]: I0120 17:55:00.995392 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-db-sync-5b76z" Jan 20 17:55:00 crc kubenswrapper[4558]: I0120 17:55:00.997486 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"neutron-httpd-config" Jan 20 17:55:00 crc kubenswrapper[4558]: I0120 17:55:00.997578 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"neutron-neutron-dockercfg-j6g6q" Jan 20 17:55:00 crc kubenswrapper[4558]: I0120 17:55:00.997730 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"neutron-config" Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.001757 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-db-sync-5b76z"] Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.024359 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.026493 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.033395 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.035858 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-scripts" Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.036227 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-config-data" Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.066779 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-49cn5\" (UniqueName: \"kubernetes.io/projected/0353f1c5-452b-413c-981b-84e774d099eb-kube-api-access-49cn5\") pod \"keystone-bootstrap-k66gc\" (UID: \"0353f1c5-452b-413c-981b-84e774d099eb\") " pod="openstack-kuttl-tests/keystone-bootstrap-k66gc" Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.066838 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0353f1c5-452b-413c-981b-84e774d099eb-scripts\") pod \"keystone-bootstrap-k66gc\" (UID: \"0353f1c5-452b-413c-981b-84e774d099eb\") " pod="openstack-kuttl-tests/keystone-bootstrap-k66gc" Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.066867 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/0353f1c5-452b-413c-981b-84e774d099eb-credential-keys\") pod \"keystone-bootstrap-k66gc\" (UID: \"0353f1c5-452b-413c-981b-84e774d099eb\") " pod="openstack-kuttl-tests/keystone-bootstrap-k66gc" Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.066887 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0353f1c5-452b-413c-981b-84e774d099eb-combined-ca-bundle\") pod \"keystone-bootstrap-k66gc\" (UID: \"0353f1c5-452b-413c-981b-84e774d099eb\") " pod="openstack-kuttl-tests/keystone-bootstrap-k66gc" Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.066925 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/0353f1c5-452b-413c-981b-84e774d099eb-fernet-keys\") pod \"keystone-bootstrap-k66gc\" (UID: \"0353f1c5-452b-413c-981b-84e774d099eb\") " pod="openstack-kuttl-tests/keystone-bootstrap-k66gc" Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.066969 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0353f1c5-452b-413c-981b-84e774d099eb-config-data\") pod \"keystone-bootstrap-k66gc\" (UID: \"0353f1c5-452b-413c-981b-84e774d099eb\") " pod="openstack-kuttl-tests/keystone-bootstrap-k66gc" Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.071815 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0353f1c5-452b-413c-981b-84e774d099eb-config-data\") pod \"keystone-bootstrap-k66gc\" (UID: \"0353f1c5-452b-413c-981b-84e774d099eb\") " pod="openstack-kuttl-tests/keystone-bootstrap-k66gc" Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.076028 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/0353f1c5-452b-413c-981b-84e774d099eb-credential-keys\") pod \"keystone-bootstrap-k66gc\" (UID: \"0353f1c5-452b-413c-981b-84e774d099eb\") " pod="openstack-kuttl-tests/keystone-bootstrap-k66gc" Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.092428 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0353f1c5-452b-413c-981b-84e774d099eb-combined-ca-bundle\") pod \"keystone-bootstrap-k66gc\" (UID: \"0353f1c5-452b-413c-981b-84e774d099eb\") " pod="openstack-kuttl-tests/keystone-bootstrap-k66gc" Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.102349 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-49cn5\" (UniqueName: \"kubernetes.io/projected/0353f1c5-452b-413c-981b-84e774d099eb-kube-api-access-49cn5\") pod \"keystone-bootstrap-k66gc\" (UID: \"0353f1c5-452b-413c-981b-84e774d099eb\") " pod="openstack-kuttl-tests/keystone-bootstrap-k66gc" Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.107693 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/0353f1c5-452b-413c-981b-84e774d099eb-fernet-keys\") pod \"keystone-bootstrap-k66gc\" (UID: \"0353f1c5-452b-413c-981b-84e774d099eb\") " pod="openstack-kuttl-tests/keystone-bootstrap-k66gc" Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.118264 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0353f1c5-452b-413c-981b-84e774d099eb-scripts\") pod \"keystone-bootstrap-k66gc\" (UID: \"0353f1c5-452b-413c-981b-84e774d099eb\") " pod="openstack-kuttl-tests/keystone-bootstrap-k66gc" Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.124761 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/cinder-db-sync-7l4r5"] Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.126005 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-db-sync-7l4r5" Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.134827 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cinder-cinder-dockercfg-xbrdb" Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.135284 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cinder-scripts" Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.135857 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cinder-config-data" Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.147575 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-bootstrap-k66gc" Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.162139 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/barbican-db-sync-hsc9c"] Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.167465 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-db-sync-hsc9c" Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.169686 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"barbican-config-data" Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.171717 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"barbican-barbican-dockercfg-wxw9v" Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.186269 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-db-sync-hsc9c"] Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.195563 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-db-sync-7l4r5"] Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.200727 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f963d040-98b2-46b4-a118-072a80ed0d53-combined-ca-bundle\") pod \"neutron-db-sync-5b76z\" (UID: \"f963d040-98b2-46b4-a118-072a80ed0d53\") " pod="openstack-kuttl-tests/neutron-db-sync-5b76z" Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.200769 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/63d78039-84d1-41df-b0dc-3d0b01cc2a61-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"63d78039-84d1-41df-b0dc-3d0b01cc2a61\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.200813 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a8e183b4-694a-4ce9-90e9-1ff11d52ff3c-etc-machine-id\") pod \"cinder-db-sync-7l4r5\" (UID: \"a8e183b4-694a-4ce9-90e9-1ff11d52ff3c\") " pod="openstack-kuttl-tests/cinder-db-sync-7l4r5" Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.200892 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/63d78039-84d1-41df-b0dc-3d0b01cc2a61-scripts\") pod \"ceilometer-0\" (UID: \"63d78039-84d1-41df-b0dc-3d0b01cc2a61\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.200921 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/63d78039-84d1-41df-b0dc-3d0b01cc2a61-config-data\") pod \"ceilometer-0\" (UID: \"63d78039-84d1-41df-b0dc-3d0b01cc2a61\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.200960 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hx5rb\" (UniqueName: \"kubernetes.io/projected/a8e183b4-694a-4ce9-90e9-1ff11d52ff3c-kube-api-access-hx5rb\") pod \"cinder-db-sync-7l4r5\" (UID: \"a8e183b4-694a-4ce9-90e9-1ff11d52ff3c\") " pod="openstack-kuttl-tests/cinder-db-sync-7l4r5" Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.201012 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/63d78039-84d1-41df-b0dc-3d0b01cc2a61-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"63d78039-84d1-41df-b0dc-3d0b01cc2a61\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.201047 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/63d78039-84d1-41df-b0dc-3d0b01cc2a61-run-httpd\") pod \"ceilometer-0\" (UID: \"63d78039-84d1-41df-b0dc-3d0b01cc2a61\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.201103 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8e183b4-694a-4ce9-90e9-1ff11d52ff3c-combined-ca-bundle\") pod \"cinder-db-sync-7l4r5\" (UID: \"a8e183b4-694a-4ce9-90e9-1ff11d52ff3c\") " pod="openstack-kuttl-tests/cinder-db-sync-7l4r5" Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.201127 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/a8e183b4-694a-4ce9-90e9-1ff11d52ff3c-db-sync-config-data\") pod \"cinder-db-sync-7l4r5\" (UID: \"a8e183b4-694a-4ce9-90e9-1ff11d52ff3c\") " pod="openstack-kuttl-tests/cinder-db-sync-7l4r5" Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.201178 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/f963d040-98b2-46b4-a118-072a80ed0d53-config\") pod \"neutron-db-sync-5b76z\" (UID: \"f963d040-98b2-46b4-a118-072a80ed0d53\") " pod="openstack-kuttl-tests/neutron-db-sync-5b76z" Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.201207 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a8e183b4-694a-4ce9-90e9-1ff11d52ff3c-scripts\") pod \"cinder-db-sync-7l4r5\" (UID: \"a8e183b4-694a-4ce9-90e9-1ff11d52ff3c\") " pod="openstack-kuttl-tests/cinder-db-sync-7l4r5" Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.201379 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a8e183b4-694a-4ce9-90e9-1ff11d52ff3c-config-data\") pod \"cinder-db-sync-7l4r5\" (UID: \"a8e183b4-694a-4ce9-90e9-1ff11d52ff3c\") " pod="openstack-kuttl-tests/cinder-db-sync-7l4r5" Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.202130 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/63d78039-84d1-41df-b0dc-3d0b01cc2a61-log-httpd\") pod \"ceilometer-0\" (UID: \"63d78039-84d1-41df-b0dc-3d0b01cc2a61\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.202229 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7hwxm\" (UniqueName: \"kubernetes.io/projected/63d78039-84d1-41df-b0dc-3d0b01cc2a61-kube-api-access-7hwxm\") pod \"ceilometer-0\" (UID: \"63d78039-84d1-41df-b0dc-3d0b01cc2a61\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.202258 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tpm4g\" (UniqueName: \"kubernetes.io/projected/f963d040-98b2-46b4-a118-072a80ed0d53-kube-api-access-tpm4g\") pod \"neutron-db-sync-5b76z\" (UID: \"f963d040-98b2-46b4-a118-072a80ed0d53\") " pod="openstack-kuttl-tests/neutron-db-sync-5b76z" Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.219759 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/placement-db-sync-b22j5"] Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.220833 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-db-sync-b22j5" Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.223504 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"placement-scripts" Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.224902 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"placement-config-data" Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.226689 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"placement-placement-dockercfg-whsds" Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.234223 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-db-sync-b22j5"] Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.308316 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/d89e519c-e68a-48fc-8a7d-09d4d094d251-db-sync-config-data\") pod \"barbican-db-sync-hsc9c\" (UID: \"d89e519c-e68a-48fc-8a7d-09d4d094d251\") " pod="openstack-kuttl-tests/barbican-db-sync-hsc9c" Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.308383 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7hwxm\" (UniqueName: \"kubernetes.io/projected/63d78039-84d1-41df-b0dc-3d0b01cc2a61-kube-api-access-7hwxm\") pod \"ceilometer-0\" (UID: \"63d78039-84d1-41df-b0dc-3d0b01cc2a61\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.308414 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tpm4g\" (UniqueName: \"kubernetes.io/projected/f963d040-98b2-46b4-a118-072a80ed0d53-kube-api-access-tpm4g\") pod \"neutron-db-sync-5b76z\" (UID: \"f963d040-98b2-46b4-a118-072a80ed0d53\") " pod="openstack-kuttl-tests/neutron-db-sync-5b76z" Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.308481 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d68lr\" (UniqueName: \"kubernetes.io/projected/d89e519c-e68a-48fc-8a7d-09d4d094d251-kube-api-access-d68lr\") pod \"barbican-db-sync-hsc9c\" (UID: \"d89e519c-e68a-48fc-8a7d-09d4d094d251\") " pod="openstack-kuttl-tests/barbican-db-sync-hsc9c" Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.308512 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f963d040-98b2-46b4-a118-072a80ed0d53-combined-ca-bundle\") pod \"neutron-db-sync-5b76z\" (UID: \"f963d040-98b2-46b4-a118-072a80ed0d53\") " pod="openstack-kuttl-tests/neutron-db-sync-5b76z" Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.308531 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/63d78039-84d1-41df-b0dc-3d0b01cc2a61-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"63d78039-84d1-41df-b0dc-3d0b01cc2a61\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.308565 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a8e183b4-694a-4ce9-90e9-1ff11d52ff3c-etc-machine-id\") pod \"cinder-db-sync-7l4r5\" (UID: \"a8e183b4-694a-4ce9-90e9-1ff11d52ff3c\") " pod="openstack-kuttl-tests/cinder-db-sync-7l4r5" Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.308606 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4571f178-ad12-4a67-a9a4-0e2cca1e2e6f-combined-ca-bundle\") pod \"placement-db-sync-b22j5\" (UID: \"4571f178-ad12-4a67-a9a4-0e2cca1e2e6f\") " pod="openstack-kuttl-tests/placement-db-sync-b22j5" Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.308637 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/63d78039-84d1-41df-b0dc-3d0b01cc2a61-scripts\") pod \"ceilometer-0\" (UID: \"63d78039-84d1-41df-b0dc-3d0b01cc2a61\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.308662 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/63d78039-84d1-41df-b0dc-3d0b01cc2a61-config-data\") pod \"ceilometer-0\" (UID: \"63d78039-84d1-41df-b0dc-3d0b01cc2a61\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.308682 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4571f178-ad12-4a67-a9a4-0e2cca1e2e6f-scripts\") pod \"placement-db-sync-b22j5\" (UID: \"4571f178-ad12-4a67-a9a4-0e2cca1e2e6f\") " pod="openstack-kuttl-tests/placement-db-sync-b22j5" Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.308708 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t46kl\" (UniqueName: \"kubernetes.io/projected/4571f178-ad12-4a67-a9a4-0e2cca1e2e6f-kube-api-access-t46kl\") pod \"placement-db-sync-b22j5\" (UID: \"4571f178-ad12-4a67-a9a4-0e2cca1e2e6f\") " pod="openstack-kuttl-tests/placement-db-sync-b22j5" Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.308725 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hx5rb\" (UniqueName: \"kubernetes.io/projected/a8e183b4-694a-4ce9-90e9-1ff11d52ff3c-kube-api-access-hx5rb\") pod \"cinder-db-sync-7l4r5\" (UID: \"a8e183b4-694a-4ce9-90e9-1ff11d52ff3c\") " pod="openstack-kuttl-tests/cinder-db-sync-7l4r5" Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.308768 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/63d78039-84d1-41df-b0dc-3d0b01cc2a61-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"63d78039-84d1-41df-b0dc-3d0b01cc2a61\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.308791 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4571f178-ad12-4a67-a9a4-0e2cca1e2e6f-config-data\") pod \"placement-db-sync-b22j5\" (UID: \"4571f178-ad12-4a67-a9a4-0e2cca1e2e6f\") " pod="openstack-kuttl-tests/placement-db-sync-b22j5" Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.308814 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/63d78039-84d1-41df-b0dc-3d0b01cc2a61-run-httpd\") pod \"ceilometer-0\" (UID: \"63d78039-84d1-41df-b0dc-3d0b01cc2a61\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.308852 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8e183b4-694a-4ce9-90e9-1ff11d52ff3c-combined-ca-bundle\") pod \"cinder-db-sync-7l4r5\" (UID: \"a8e183b4-694a-4ce9-90e9-1ff11d52ff3c\") " pod="openstack-kuttl-tests/cinder-db-sync-7l4r5" Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.308871 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/a8e183b4-694a-4ce9-90e9-1ff11d52ff3c-db-sync-config-data\") pod \"cinder-db-sync-7l4r5\" (UID: \"a8e183b4-694a-4ce9-90e9-1ff11d52ff3c\") " pod="openstack-kuttl-tests/cinder-db-sync-7l4r5" Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.308895 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d89e519c-e68a-48fc-8a7d-09d4d094d251-combined-ca-bundle\") pod \"barbican-db-sync-hsc9c\" (UID: \"d89e519c-e68a-48fc-8a7d-09d4d094d251\") " pod="openstack-kuttl-tests/barbican-db-sync-hsc9c" Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.308920 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/f963d040-98b2-46b4-a118-072a80ed0d53-config\") pod \"neutron-db-sync-5b76z\" (UID: \"f963d040-98b2-46b4-a118-072a80ed0d53\") " pod="openstack-kuttl-tests/neutron-db-sync-5b76z" Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.308948 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a8e183b4-694a-4ce9-90e9-1ff11d52ff3c-scripts\") pod \"cinder-db-sync-7l4r5\" (UID: \"a8e183b4-694a-4ce9-90e9-1ff11d52ff3c\") " pod="openstack-kuttl-tests/cinder-db-sync-7l4r5" Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.308982 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a8e183b4-694a-4ce9-90e9-1ff11d52ff3c-config-data\") pod \"cinder-db-sync-7l4r5\" (UID: \"a8e183b4-694a-4ce9-90e9-1ff11d52ff3c\") " pod="openstack-kuttl-tests/cinder-db-sync-7l4r5" Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.309005 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4571f178-ad12-4a67-a9a4-0e2cca1e2e6f-logs\") pod \"placement-db-sync-b22j5\" (UID: \"4571f178-ad12-4a67-a9a4-0e2cca1e2e6f\") " pod="openstack-kuttl-tests/placement-db-sync-b22j5" Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.309031 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/63d78039-84d1-41df-b0dc-3d0b01cc2a61-log-httpd\") pod \"ceilometer-0\" (UID: \"63d78039-84d1-41df-b0dc-3d0b01cc2a61\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.309473 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/63d78039-84d1-41df-b0dc-3d0b01cc2a61-log-httpd\") pod \"ceilometer-0\" (UID: \"63d78039-84d1-41df-b0dc-3d0b01cc2a61\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.310735 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/63d78039-84d1-41df-b0dc-3d0b01cc2a61-run-httpd\") pod \"ceilometer-0\" (UID: \"63d78039-84d1-41df-b0dc-3d0b01cc2a61\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.311083 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a8e183b4-694a-4ce9-90e9-1ff11d52ff3c-etc-machine-id\") pod \"cinder-db-sync-7l4r5\" (UID: \"a8e183b4-694a-4ce9-90e9-1ff11d52ff3c\") " pod="openstack-kuttl-tests/cinder-db-sync-7l4r5" Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.316849 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/63d78039-84d1-41df-b0dc-3d0b01cc2a61-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"63d78039-84d1-41df-b0dc-3d0b01cc2a61\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.316941 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/f963d040-98b2-46b4-a118-072a80ed0d53-config\") pod \"neutron-db-sync-5b76z\" (UID: \"f963d040-98b2-46b4-a118-072a80ed0d53\") " pod="openstack-kuttl-tests/neutron-db-sync-5b76z" Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.319770 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/63d78039-84d1-41df-b0dc-3d0b01cc2a61-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"63d78039-84d1-41df-b0dc-3d0b01cc2a61\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.320712 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a8e183b4-694a-4ce9-90e9-1ff11d52ff3c-scripts\") pod \"cinder-db-sync-7l4r5\" (UID: \"a8e183b4-694a-4ce9-90e9-1ff11d52ff3c\") " pod="openstack-kuttl-tests/cinder-db-sync-7l4r5" Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.320829 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f963d040-98b2-46b4-a118-072a80ed0d53-combined-ca-bundle\") pod \"neutron-db-sync-5b76z\" (UID: \"f963d040-98b2-46b4-a118-072a80ed0d53\") " pod="openstack-kuttl-tests/neutron-db-sync-5b76z" Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.321021 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8e183b4-694a-4ce9-90e9-1ff11d52ff3c-combined-ca-bundle\") pod \"cinder-db-sync-7l4r5\" (UID: \"a8e183b4-694a-4ce9-90e9-1ff11d52ff3c\") " pod="openstack-kuttl-tests/cinder-db-sync-7l4r5" Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.321333 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a8e183b4-694a-4ce9-90e9-1ff11d52ff3c-config-data\") pod \"cinder-db-sync-7l4r5\" (UID: \"a8e183b4-694a-4ce9-90e9-1ff11d52ff3c\") " pod="openstack-kuttl-tests/cinder-db-sync-7l4r5" Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.322873 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/63d78039-84d1-41df-b0dc-3d0b01cc2a61-scripts\") pod \"ceilometer-0\" (UID: \"63d78039-84d1-41df-b0dc-3d0b01cc2a61\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.323748 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/63d78039-84d1-41df-b0dc-3d0b01cc2a61-config-data\") pod \"ceilometer-0\" (UID: \"63d78039-84d1-41df-b0dc-3d0b01cc2a61\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.324129 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/a8e183b4-694a-4ce9-90e9-1ff11d52ff3c-db-sync-config-data\") pod \"cinder-db-sync-7l4r5\" (UID: \"a8e183b4-694a-4ce9-90e9-1ff11d52ff3c\") " pod="openstack-kuttl-tests/cinder-db-sync-7l4r5" Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.324424 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7hwxm\" (UniqueName: \"kubernetes.io/projected/63d78039-84d1-41df-b0dc-3d0b01cc2a61-kube-api-access-7hwxm\") pod \"ceilometer-0\" (UID: \"63d78039-84d1-41df-b0dc-3d0b01cc2a61\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.325451 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tpm4g\" (UniqueName: \"kubernetes.io/projected/f963d040-98b2-46b4-a118-072a80ed0d53-kube-api-access-tpm4g\") pod \"neutron-db-sync-5b76z\" (UID: \"f963d040-98b2-46b4-a118-072a80ed0d53\") " pod="openstack-kuttl-tests/neutron-db-sync-5b76z" Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.325523 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hx5rb\" (UniqueName: \"kubernetes.io/projected/a8e183b4-694a-4ce9-90e9-1ff11d52ff3c-kube-api-access-hx5rb\") pod \"cinder-db-sync-7l4r5\" (UID: \"a8e183b4-694a-4ce9-90e9-1ff11d52ff3c\") " pod="openstack-kuttl-tests/cinder-db-sync-7l4r5" Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.359004 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.410987 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/d89e519c-e68a-48fc-8a7d-09d4d094d251-db-sync-config-data\") pod \"barbican-db-sync-hsc9c\" (UID: \"d89e519c-e68a-48fc-8a7d-09d4d094d251\") " pod="openstack-kuttl-tests/barbican-db-sync-hsc9c" Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.411102 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d68lr\" (UniqueName: \"kubernetes.io/projected/d89e519c-e68a-48fc-8a7d-09d4d094d251-kube-api-access-d68lr\") pod \"barbican-db-sync-hsc9c\" (UID: \"d89e519c-e68a-48fc-8a7d-09d4d094d251\") " pod="openstack-kuttl-tests/barbican-db-sync-hsc9c" Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.411231 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4571f178-ad12-4a67-a9a4-0e2cca1e2e6f-combined-ca-bundle\") pod \"placement-db-sync-b22j5\" (UID: \"4571f178-ad12-4a67-a9a4-0e2cca1e2e6f\") " pod="openstack-kuttl-tests/placement-db-sync-b22j5" Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.412028 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4571f178-ad12-4a67-a9a4-0e2cca1e2e6f-scripts\") pod \"placement-db-sync-b22j5\" (UID: \"4571f178-ad12-4a67-a9a4-0e2cca1e2e6f\") " pod="openstack-kuttl-tests/placement-db-sync-b22j5" Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.412115 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t46kl\" (UniqueName: \"kubernetes.io/projected/4571f178-ad12-4a67-a9a4-0e2cca1e2e6f-kube-api-access-t46kl\") pod \"placement-db-sync-b22j5\" (UID: \"4571f178-ad12-4a67-a9a4-0e2cca1e2e6f\") " pod="openstack-kuttl-tests/placement-db-sync-b22j5" Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.412200 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4571f178-ad12-4a67-a9a4-0e2cca1e2e6f-config-data\") pod \"placement-db-sync-b22j5\" (UID: \"4571f178-ad12-4a67-a9a4-0e2cca1e2e6f\") " pod="openstack-kuttl-tests/placement-db-sync-b22j5" Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.412296 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d89e519c-e68a-48fc-8a7d-09d4d094d251-combined-ca-bundle\") pod \"barbican-db-sync-hsc9c\" (UID: \"d89e519c-e68a-48fc-8a7d-09d4d094d251\") " pod="openstack-kuttl-tests/barbican-db-sync-hsc9c" Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.412364 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4571f178-ad12-4a67-a9a4-0e2cca1e2e6f-logs\") pod \"placement-db-sync-b22j5\" (UID: \"4571f178-ad12-4a67-a9a4-0e2cca1e2e6f\") " pod="openstack-kuttl-tests/placement-db-sync-b22j5" Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.413614 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4571f178-ad12-4a67-a9a4-0e2cca1e2e6f-logs\") pod \"placement-db-sync-b22j5\" (UID: \"4571f178-ad12-4a67-a9a4-0e2cca1e2e6f\") " pod="openstack-kuttl-tests/placement-db-sync-b22j5" Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.414913 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/d89e519c-e68a-48fc-8a7d-09d4d094d251-db-sync-config-data\") pod \"barbican-db-sync-hsc9c\" (UID: \"d89e519c-e68a-48fc-8a7d-09d4d094d251\") " pod="openstack-kuttl-tests/barbican-db-sync-hsc9c" Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.415283 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4571f178-ad12-4a67-a9a4-0e2cca1e2e6f-scripts\") pod \"placement-db-sync-b22j5\" (UID: \"4571f178-ad12-4a67-a9a4-0e2cca1e2e6f\") " pod="openstack-kuttl-tests/placement-db-sync-b22j5" Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.420937 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d89e519c-e68a-48fc-8a7d-09d4d094d251-combined-ca-bundle\") pod \"barbican-db-sync-hsc9c\" (UID: \"d89e519c-e68a-48fc-8a7d-09d4d094d251\") " pod="openstack-kuttl-tests/barbican-db-sync-hsc9c" Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.421209 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4571f178-ad12-4a67-a9a4-0e2cca1e2e6f-config-data\") pod \"placement-db-sync-b22j5\" (UID: \"4571f178-ad12-4a67-a9a4-0e2cca1e2e6f\") " pod="openstack-kuttl-tests/placement-db-sync-b22j5" Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.421217 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4571f178-ad12-4a67-a9a4-0e2cca1e2e6f-combined-ca-bundle\") pod \"placement-db-sync-b22j5\" (UID: \"4571f178-ad12-4a67-a9a4-0e2cca1e2e6f\") " pod="openstack-kuttl-tests/placement-db-sync-b22j5" Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.429962 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d68lr\" (UniqueName: \"kubernetes.io/projected/d89e519c-e68a-48fc-8a7d-09d4d094d251-kube-api-access-d68lr\") pod \"barbican-db-sync-hsc9c\" (UID: \"d89e519c-e68a-48fc-8a7d-09d4d094d251\") " pod="openstack-kuttl-tests/barbican-db-sync-hsc9c" Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.432893 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t46kl\" (UniqueName: \"kubernetes.io/projected/4571f178-ad12-4a67-a9a4-0e2cca1e2e6f-kube-api-access-t46kl\") pod \"placement-db-sync-b22j5\" (UID: \"4571f178-ad12-4a67-a9a4-0e2cca1e2e6f\") " pod="openstack-kuttl-tests/placement-db-sync-b22j5" Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.517678 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-db-sync-7l4r5" Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.541570 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-db-sync-hsc9c" Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.569204 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-db-sync-b22j5" Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.581050 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-bootstrap-k66gc"] Jan 20 17:55:01 crc kubenswrapper[4558]: W0120 17:55:01.595354 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0353f1c5_452b_413c_981b_84e774d099eb.slice/crio-3290922e6842e4b722e2f0a0474bd66c94db36b62b9fa48501a785c8fe30e3cd WatchSource:0}: Error finding container 3290922e6842e4b722e2f0a0474bd66c94db36b62b9fa48501a785c8fe30e3cd: Status 404 returned error can't find the container with id 3290922e6842e4b722e2f0a0474bd66c94db36b62b9fa48501a785c8fe30e3cd Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.624025 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-db-sync-5b76z" Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.741653 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-bootstrap-k66gc" event={"ID":"0353f1c5-452b-413c-981b-84e774d099eb","Type":"ContainerStarted","Data":"3290922e6842e4b722e2f0a0474bd66c94db36b62b9fa48501a785c8fe30e3cd"} Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.790372 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:55:01 crc kubenswrapper[4558]: W0120 17:55:01.810335 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod63d78039_84d1_41df_b0dc_3d0b01cc2a61.slice/crio-574ebb66e35d9d21611416ae8633182d5a5fa517914c62911726888394f50a1d WatchSource:0}: Error finding container 574ebb66e35d9d21611416ae8633182d5a5fa517914c62911726888394f50a1d: Status 404 returned error can't find the container with id 574ebb66e35d9d21611416ae8633182d5a5fa517914c62911726888394f50a1d Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.813712 4558 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.904177 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.910773 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.914369 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-glance-default-public-svc" Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.914656 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-glance-dockercfg-g64r2" Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.915263 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-default-external-config-data" Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.915449 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-scripts" Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.917410 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:55:01 crc kubenswrapper[4558]: I0120 17:55:01.977201 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-db-sync-7l4r5"] Jan 20 17:55:02 crc kubenswrapper[4558]: I0120 17:55:02.009125 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:55:02 crc kubenswrapper[4558]: I0120 17:55:02.010742 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:55:02 crc kubenswrapper[4558]: I0120 17:55:02.024679 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6879da7c-dbb2-4843-a94b-a24dc6aa6cc9-scripts\") pod \"glance-default-external-api-0\" (UID: \"6879da7c-dbb2-4843-a94b-a24dc6aa6cc9\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:55:02 crc kubenswrapper[4558]: I0120 17:55:02.024756 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-external-api-0\" (UID: \"6879da7c-dbb2-4843-a94b-a24dc6aa6cc9\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:55:02 crc kubenswrapper[4558]: I0120 17:55:02.024807 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6879da7c-dbb2-4843-a94b-a24dc6aa6cc9-logs\") pod \"glance-default-external-api-0\" (UID: \"6879da7c-dbb2-4843-a94b-a24dc6aa6cc9\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:55:02 crc kubenswrapper[4558]: I0120 17:55:02.024880 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vqgj7\" (UniqueName: \"kubernetes.io/projected/6879da7c-dbb2-4843-a94b-a24dc6aa6cc9-kube-api-access-vqgj7\") pod \"glance-default-external-api-0\" (UID: \"6879da7c-dbb2-4843-a94b-a24dc6aa6cc9\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:55:02 crc kubenswrapper[4558]: I0120 17:55:02.024906 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6879da7c-dbb2-4843-a94b-a24dc6aa6cc9-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"6879da7c-dbb2-4843-a94b-a24dc6aa6cc9\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:55:02 crc kubenswrapper[4558]: I0120 17:55:02.024929 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6879da7c-dbb2-4843-a94b-a24dc6aa6cc9-config-data\") pod \"glance-default-external-api-0\" (UID: \"6879da7c-dbb2-4843-a94b-a24dc6aa6cc9\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:55:02 crc kubenswrapper[4558]: I0120 17:55:02.024957 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/6879da7c-dbb2-4843-a94b-a24dc6aa6cc9-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"6879da7c-dbb2-4843-a94b-a24dc6aa6cc9\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:55:02 crc kubenswrapper[4558]: I0120 17:55:02.024998 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6879da7c-dbb2-4843-a94b-a24dc6aa6cc9-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"6879da7c-dbb2-4843-a94b-a24dc6aa6cc9\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:55:02 crc kubenswrapper[4558]: I0120 17:55:02.026644 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-default-internal-config-data" Jan 20 17:55:02 crc kubenswrapper[4558]: I0120 17:55:02.026825 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-glance-default-internal-svc" Jan 20 17:55:02 crc kubenswrapper[4558]: I0120 17:55:02.048867 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:55:02 crc kubenswrapper[4558]: I0120 17:55:02.054066 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-db-sync-b22j5"] Jan 20 17:55:02 crc kubenswrapper[4558]: I0120 17:55:02.060618 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-db-sync-hsc9c"] Jan 20 17:55:02 crc kubenswrapper[4558]: I0120 17:55:02.126673 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-external-api-0\" (UID: \"6879da7c-dbb2-4843-a94b-a24dc6aa6cc9\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:55:02 crc kubenswrapper[4558]: I0120 17:55:02.126751 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6879da7c-dbb2-4843-a94b-a24dc6aa6cc9-logs\") pod \"glance-default-external-api-0\" (UID: \"6879da7c-dbb2-4843-a94b-a24dc6aa6cc9\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:55:02 crc kubenswrapper[4558]: I0120 17:55:02.126801 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/02846c59-73e4-4876-b95c-a939a3b914bc-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"02846c59-73e4-4876-b95c-a939a3b914bc\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:55:02 crc kubenswrapper[4558]: I0120 17:55:02.126840 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4zxwd\" (UniqueName: \"kubernetes.io/projected/02846c59-73e4-4876-b95c-a939a3b914bc-kube-api-access-4zxwd\") pod \"glance-default-internal-api-0\" (UID: \"02846c59-73e4-4876-b95c-a939a3b914bc\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:55:02 crc kubenswrapper[4558]: I0120 17:55:02.126903 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/02846c59-73e4-4876-b95c-a939a3b914bc-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"02846c59-73e4-4876-b95c-a939a3b914bc\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:55:02 crc kubenswrapper[4558]: I0120 17:55:02.126926 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vqgj7\" (UniqueName: \"kubernetes.io/projected/6879da7c-dbb2-4843-a94b-a24dc6aa6cc9-kube-api-access-vqgj7\") pod \"glance-default-external-api-0\" (UID: \"6879da7c-dbb2-4843-a94b-a24dc6aa6cc9\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:55:02 crc kubenswrapper[4558]: I0120 17:55:02.126943 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/02846c59-73e4-4876-b95c-a939a3b914bc-scripts\") pod \"glance-default-internal-api-0\" (UID: \"02846c59-73e4-4876-b95c-a939a3b914bc\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:55:02 crc kubenswrapper[4558]: I0120 17:55:02.126982 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6879da7c-dbb2-4843-a94b-a24dc6aa6cc9-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"6879da7c-dbb2-4843-a94b-a24dc6aa6cc9\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:55:02 crc kubenswrapper[4558]: I0120 17:55:02.127002 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6879da7c-dbb2-4843-a94b-a24dc6aa6cc9-config-data\") pod \"glance-default-external-api-0\" (UID: \"6879da7c-dbb2-4843-a94b-a24dc6aa6cc9\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:55:02 crc kubenswrapper[4558]: I0120 17:55:02.127018 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/6879da7c-dbb2-4843-a94b-a24dc6aa6cc9-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"6879da7c-dbb2-4843-a94b-a24dc6aa6cc9\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:55:02 crc kubenswrapper[4558]: I0120 17:55:02.127059 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/02846c59-73e4-4876-b95c-a939a3b914bc-config-data\") pod \"glance-default-internal-api-0\" (UID: \"02846c59-73e4-4876-b95c-a939a3b914bc\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:55:02 crc kubenswrapper[4558]: I0120 17:55:02.127083 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/02846c59-73e4-4876-b95c-a939a3b914bc-logs\") pod \"glance-default-internal-api-0\" (UID: \"02846c59-73e4-4876-b95c-a939a3b914bc\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:55:02 crc kubenswrapper[4558]: I0120 17:55:02.127127 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02846c59-73e4-4876-b95c-a939a3b914bc-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"02846c59-73e4-4876-b95c-a939a3b914bc\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:55:02 crc kubenswrapper[4558]: I0120 17:55:02.127146 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6879da7c-dbb2-4843-a94b-a24dc6aa6cc9-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"6879da7c-dbb2-4843-a94b-a24dc6aa6cc9\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:55:02 crc kubenswrapper[4558]: I0120 17:55:02.127208 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"02846c59-73e4-4876-b95c-a939a3b914bc\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:55:02 crc kubenswrapper[4558]: I0120 17:55:02.127237 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6879da7c-dbb2-4843-a94b-a24dc6aa6cc9-scripts\") pod \"glance-default-external-api-0\" (UID: \"6879da7c-dbb2-4843-a94b-a24dc6aa6cc9\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:55:02 crc kubenswrapper[4558]: I0120 17:55:02.128189 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6879da7c-dbb2-4843-a94b-a24dc6aa6cc9-logs\") pod \"glance-default-external-api-0\" (UID: \"6879da7c-dbb2-4843-a94b-a24dc6aa6cc9\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:55:02 crc kubenswrapper[4558]: I0120 17:55:02.128501 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/6879da7c-dbb2-4843-a94b-a24dc6aa6cc9-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"6879da7c-dbb2-4843-a94b-a24dc6aa6cc9\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:55:02 crc kubenswrapper[4558]: I0120 17:55:02.130128 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-external-api-0\" (UID: \"6879da7c-dbb2-4843-a94b-a24dc6aa6cc9\") device mount path \"/mnt/openstack/pv05\"" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:55:02 crc kubenswrapper[4558]: I0120 17:55:02.134547 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6879da7c-dbb2-4843-a94b-a24dc6aa6cc9-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"6879da7c-dbb2-4843-a94b-a24dc6aa6cc9\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:55:02 crc kubenswrapper[4558]: I0120 17:55:02.136310 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6879da7c-dbb2-4843-a94b-a24dc6aa6cc9-scripts\") pod \"glance-default-external-api-0\" (UID: \"6879da7c-dbb2-4843-a94b-a24dc6aa6cc9\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:55:02 crc kubenswrapper[4558]: I0120 17:55:02.139374 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6879da7c-dbb2-4843-a94b-a24dc6aa6cc9-config-data\") pod \"glance-default-external-api-0\" (UID: \"6879da7c-dbb2-4843-a94b-a24dc6aa6cc9\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:55:02 crc kubenswrapper[4558]: I0120 17:55:02.146972 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6879da7c-dbb2-4843-a94b-a24dc6aa6cc9-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"6879da7c-dbb2-4843-a94b-a24dc6aa6cc9\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:55:02 crc kubenswrapper[4558]: I0120 17:55:02.148811 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-db-sync-5b76z"] Jan 20 17:55:02 crc kubenswrapper[4558]: I0120 17:55:02.156438 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vqgj7\" (UniqueName: \"kubernetes.io/projected/6879da7c-dbb2-4843-a94b-a24dc6aa6cc9-kube-api-access-vqgj7\") pod \"glance-default-external-api-0\" (UID: \"6879da7c-dbb2-4843-a94b-a24dc6aa6cc9\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:55:02 crc kubenswrapper[4558]: I0120 17:55:02.168818 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-external-api-0\" (UID: \"6879da7c-dbb2-4843-a94b-a24dc6aa6cc9\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:55:02 crc kubenswrapper[4558]: I0120 17:55:02.229478 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/02846c59-73e4-4876-b95c-a939a3b914bc-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"02846c59-73e4-4876-b95c-a939a3b914bc\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:55:02 crc kubenswrapper[4558]: I0120 17:55:02.229967 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/02846c59-73e4-4876-b95c-a939a3b914bc-scripts\") pod \"glance-default-internal-api-0\" (UID: \"02846c59-73e4-4876-b95c-a939a3b914bc\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:55:02 crc kubenswrapper[4558]: I0120 17:55:02.230065 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/02846c59-73e4-4876-b95c-a939a3b914bc-config-data\") pod \"glance-default-internal-api-0\" (UID: \"02846c59-73e4-4876-b95c-a939a3b914bc\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:55:02 crc kubenswrapper[4558]: I0120 17:55:02.230145 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/02846c59-73e4-4876-b95c-a939a3b914bc-logs\") pod \"glance-default-internal-api-0\" (UID: \"02846c59-73e4-4876-b95c-a939a3b914bc\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:55:02 crc kubenswrapper[4558]: I0120 17:55:02.230221 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02846c59-73e4-4876-b95c-a939a3b914bc-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"02846c59-73e4-4876-b95c-a939a3b914bc\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:55:02 crc kubenswrapper[4558]: I0120 17:55:02.230304 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"02846c59-73e4-4876-b95c-a939a3b914bc\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:55:02 crc kubenswrapper[4558]: I0120 17:55:02.230438 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/02846c59-73e4-4876-b95c-a939a3b914bc-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"02846c59-73e4-4876-b95c-a939a3b914bc\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:55:02 crc kubenswrapper[4558]: I0120 17:55:02.230505 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4zxwd\" (UniqueName: \"kubernetes.io/projected/02846c59-73e4-4876-b95c-a939a3b914bc-kube-api-access-4zxwd\") pod \"glance-default-internal-api-0\" (UID: \"02846c59-73e4-4876-b95c-a939a3b914bc\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:55:02 crc kubenswrapper[4558]: I0120 17:55:02.230636 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"02846c59-73e4-4876-b95c-a939a3b914bc\") device mount path \"/mnt/openstack/pv07\"" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:55:02 crc kubenswrapper[4558]: I0120 17:55:02.230776 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/02846c59-73e4-4876-b95c-a939a3b914bc-logs\") pod \"glance-default-internal-api-0\" (UID: \"02846c59-73e4-4876-b95c-a939a3b914bc\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:55:02 crc kubenswrapper[4558]: I0120 17:55:02.231060 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/02846c59-73e4-4876-b95c-a939a3b914bc-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"02846c59-73e4-4876-b95c-a939a3b914bc\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:55:02 crc kubenswrapper[4558]: I0120 17:55:02.234405 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:55:02 crc kubenswrapper[4558]: I0120 17:55:02.236722 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/02846c59-73e4-4876-b95c-a939a3b914bc-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"02846c59-73e4-4876-b95c-a939a3b914bc\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:55:02 crc kubenswrapper[4558]: I0120 17:55:02.239674 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/02846c59-73e4-4876-b95c-a939a3b914bc-scripts\") pod \"glance-default-internal-api-0\" (UID: \"02846c59-73e4-4876-b95c-a939a3b914bc\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:55:02 crc kubenswrapper[4558]: I0120 17:55:02.241196 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/02846c59-73e4-4876-b95c-a939a3b914bc-config-data\") pod \"glance-default-internal-api-0\" (UID: \"02846c59-73e4-4876-b95c-a939a3b914bc\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:55:02 crc kubenswrapper[4558]: I0120 17:55:02.242843 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02846c59-73e4-4876-b95c-a939a3b914bc-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"02846c59-73e4-4876-b95c-a939a3b914bc\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:55:02 crc kubenswrapper[4558]: I0120 17:55:02.244780 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4zxwd\" (UniqueName: \"kubernetes.io/projected/02846c59-73e4-4876-b95c-a939a3b914bc-kube-api-access-4zxwd\") pod \"glance-default-internal-api-0\" (UID: \"02846c59-73e4-4876-b95c-a939a3b914bc\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:55:02 crc kubenswrapper[4558]: I0120 17:55:02.251445 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"02846c59-73e4-4876-b95c-a939a3b914bc\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:55:02 crc kubenswrapper[4558]: I0120 17:55:02.343966 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:55:02 crc kubenswrapper[4558]: I0120 17:55:02.664708 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:55:02 crc kubenswrapper[4558]: I0120 17:55:02.756972 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-db-sync-hsc9c" event={"ID":"d89e519c-e68a-48fc-8a7d-09d4d094d251","Type":"ContainerStarted","Data":"8ad2ad62dfdac1505d231618bd110cbb2e4eac1446e02a350103d8e8e6967718"} Jan 20 17:55:02 crc kubenswrapper[4558]: I0120 17:55:02.757049 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-db-sync-hsc9c" event={"ID":"d89e519c-e68a-48fc-8a7d-09d4d094d251","Type":"ContainerStarted","Data":"3769fd0c40c51219f834b481d530c1a75ed9a4a049437ce7ffc7841261b16064"} Jan 20 17:55:02 crc kubenswrapper[4558]: I0120 17:55:02.762157 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-bootstrap-k66gc" event={"ID":"0353f1c5-452b-413c-981b-84e774d099eb","Type":"ContainerStarted","Data":"1efa0468b0518cd84740b23884a4369481e10910db84d1b937611c7aed424709"} Jan 20 17:55:02 crc kubenswrapper[4558]: I0120 17:55:02.769624 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-db-sync-b22j5" event={"ID":"4571f178-ad12-4a67-a9a4-0e2cca1e2e6f","Type":"ContainerStarted","Data":"8d1066d3fad6d8ee52adbc7179c0737528978fefd362036c2b152fa8b47efd48"} Jan 20 17:55:02 crc kubenswrapper[4558]: I0120 17:55:02.769674 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-db-sync-b22j5" event={"ID":"4571f178-ad12-4a67-a9a4-0e2cca1e2e6f","Type":"ContainerStarted","Data":"7cc48b009c312940d54d312faa4f62c78c13d8b28dd2acfec2a6e8362186fd98"} Jan 20 17:55:02 crc kubenswrapper[4558]: I0120 17:55:02.782292 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-db-sync-7l4r5" event={"ID":"a8e183b4-694a-4ce9-90e9-1ff11d52ff3c","Type":"ContainerStarted","Data":"2e07911f819a9e30826bfd851f1984c5d19e9e8f68cb8a36d54515996b7e96d1"} Jan 20 17:55:02 crc kubenswrapper[4558]: I0120 17:55:02.784423 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/barbican-db-sync-hsc9c" podStartSLOduration=1.7844017189999999 podStartE2EDuration="1.784401719s" podCreationTimestamp="2026-01-20 17:55:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:55:02.777524448 +0000 UTC m=+4396.537862416" watchObservedRunningTime="2026-01-20 17:55:02.784401719 +0000 UTC m=+4396.544739687" Jan 20 17:55:02 crc kubenswrapper[4558]: I0120 17:55:02.798378 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"63d78039-84d1-41df-b0dc-3d0b01cc2a61","Type":"ContainerStarted","Data":"574ebb66e35d9d21611416ae8633182d5a5fa517914c62911726888394f50a1d"} Jan 20 17:55:02 crc kubenswrapper[4558]: I0120 17:55:02.802618 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-db-sync-5b76z" event={"ID":"f963d040-98b2-46b4-a118-072a80ed0d53","Type":"ContainerStarted","Data":"7f55f22bc9fb124084f5b5d83ec240d39b440361ab6e1fb735a7109baba3a09c"} Jan 20 17:55:02 crc kubenswrapper[4558]: I0120 17:55:02.802673 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-db-sync-5b76z" event={"ID":"f963d040-98b2-46b4-a118-072a80ed0d53","Type":"ContainerStarted","Data":"63753a69a848c4e6b0d50b394008b514dfebeb076a3d237f7444817dcee6df5c"} Jan 20 17:55:02 crc kubenswrapper[4558]: I0120 17:55:02.828516 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/placement-db-sync-b22j5" podStartSLOduration=1.8284939489999998 podStartE2EDuration="1.828493949s" podCreationTimestamp="2026-01-20 17:55:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:55:02.796741063 +0000 UTC m=+4396.557079031" watchObservedRunningTime="2026-01-20 17:55:02.828493949 +0000 UTC m=+4396.588831917" Jan 20 17:55:02 crc kubenswrapper[4558]: I0120 17:55:02.849241 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/keystone-bootstrap-k66gc" podStartSLOduration=2.849217288 podStartE2EDuration="2.849217288s" podCreationTimestamp="2026-01-20 17:55:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:55:02.827007134 +0000 UTC m=+4396.587345101" watchObservedRunningTime="2026-01-20 17:55:02.849217288 +0000 UTC m=+4396.609555255" Jan 20 17:55:02 crc kubenswrapper[4558]: I0120 17:55:02.860760 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:55:02 crc kubenswrapper[4558]: I0120 17:55:02.863923 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/neutron-db-sync-5b76z" podStartSLOduration=2.863908622 podStartE2EDuration="2.863908622s" podCreationTimestamp="2026-01-20 17:55:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:55:02.859985486 +0000 UTC m=+4396.620323453" watchObservedRunningTime="2026-01-20 17:55:02.863908622 +0000 UTC m=+4396.624246590" Jan 20 17:55:02 crc kubenswrapper[4558]: I0120 17:55:02.888665 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:55:02 crc kubenswrapper[4558]: I0120 17:55:02.941397 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:55:02 crc kubenswrapper[4558]: I0120 17:55:02.951550 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:55:03 crc kubenswrapper[4558]: I0120 17:55:03.823641 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"02846c59-73e4-4876-b95c-a939a3b914bc","Type":"ContainerStarted","Data":"4f2585cf795dca34bc90b44b894237bf0157b72edc2ea4bad3e1ac25bd6e027e"} Jan 20 17:55:03 crc kubenswrapper[4558]: I0120 17:55:03.823904 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"02846c59-73e4-4876-b95c-a939a3b914bc","Type":"ContainerStarted","Data":"3f654ad3901dd001dd27c36236efc6e8cb54ba3f4d81fcb20ed6903669e33c77"} Jan 20 17:55:03 crc kubenswrapper[4558]: I0120 17:55:03.826445 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-db-sync-7l4r5" event={"ID":"a8e183b4-694a-4ce9-90e9-1ff11d52ff3c","Type":"ContainerStarted","Data":"72db31604b9af667b1c4e5942704410ff8f1e3af14c2a6fabe1fefc79f363d83"} Jan 20 17:55:03 crc kubenswrapper[4558]: I0120 17:55:03.852634 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/cinder-db-sync-7l4r5" podStartSLOduration=2.852611637 podStartE2EDuration="2.852611637s" podCreationTimestamp="2026-01-20 17:55:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:55:03.847518971 +0000 UTC m=+4397.607856929" watchObservedRunningTime="2026-01-20 17:55:03.852611637 +0000 UTC m=+4397.612949605" Jan 20 17:55:03 crc kubenswrapper[4558]: I0120 17:55:03.897936 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"63d78039-84d1-41df-b0dc-3d0b01cc2a61","Type":"ContainerStarted","Data":"266031d8114437a6db868dffef46777ee73c1807d22ff263ddc75880bb58a4d8"} Jan 20 17:55:03 crc kubenswrapper[4558]: I0120 17:55:03.911020 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"6879da7c-dbb2-4843-a94b-a24dc6aa6cc9","Type":"ContainerStarted","Data":"d5ca530a3c43a142a18bd309977df04c40397e2c25fe036b9a97d34a2c68290a"} Jan 20 17:55:03 crc kubenswrapper[4558]: I0120 17:55:03.911086 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"6879da7c-dbb2-4843-a94b-a24dc6aa6cc9","Type":"ContainerStarted","Data":"1f723867be1eeef599ab53f9e7a82964432f007850ecb1e8efa74bc834b02cf0"} Jan 20 17:55:04 crc kubenswrapper[4558]: I0120 17:55:04.927682 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"02846c59-73e4-4876-b95c-a939a3b914bc","Type":"ContainerStarted","Data":"b1d6e564deae94a6b542d6d0198eecb759f69a96dfbc747cc0f0a1a1dde288c5"} Jan 20 17:55:04 crc kubenswrapper[4558]: I0120 17:55:04.928043 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-internal-api-0" podUID="02846c59-73e4-4876-b95c-a939a3b914bc" containerName="glance-log" containerID="cri-o://4f2585cf795dca34bc90b44b894237bf0157b72edc2ea4bad3e1ac25bd6e027e" gracePeriod=30 Jan 20 17:55:04 crc kubenswrapper[4558]: I0120 17:55:04.928362 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-internal-api-0" podUID="02846c59-73e4-4876-b95c-a939a3b914bc" containerName="glance-httpd" containerID="cri-o://b1d6e564deae94a6b542d6d0198eecb759f69a96dfbc747cc0f0a1a1dde288c5" gracePeriod=30 Jan 20 17:55:04 crc kubenswrapper[4558]: I0120 17:55:04.933646 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"63d78039-84d1-41df-b0dc-3d0b01cc2a61","Type":"ContainerStarted","Data":"ebf1429a28db4257c55ad73cea3715ef4bb80d2a7a864c1cb0908cfbe3578248"} Jan 20 17:55:04 crc kubenswrapper[4558]: I0120 17:55:04.937137 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"6879da7c-dbb2-4843-a94b-a24dc6aa6cc9","Type":"ContainerStarted","Data":"ffc3f2ff8b01056bc8842d4f59a0616cd4bbbfc1cfd39fa551c21785a6ec6e10"} Jan 20 17:55:04 crc kubenswrapper[4558]: I0120 17:55:04.937265 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-external-api-0" podUID="6879da7c-dbb2-4843-a94b-a24dc6aa6cc9" containerName="glance-log" containerID="cri-o://d5ca530a3c43a142a18bd309977df04c40397e2c25fe036b9a97d34a2c68290a" gracePeriod=30 Jan 20 17:55:04 crc kubenswrapper[4558]: I0120 17:55:04.937303 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-external-api-0" podUID="6879da7c-dbb2-4843-a94b-a24dc6aa6cc9" containerName="glance-httpd" containerID="cri-o://ffc3f2ff8b01056bc8842d4f59a0616cd4bbbfc1cfd39fa551c21785a6ec6e10" gracePeriod=30 Jan 20 17:55:04 crc kubenswrapper[4558]: I0120 17:55:04.939808 4558 generic.go:334] "Generic (PLEG): container finished" podID="d89e519c-e68a-48fc-8a7d-09d4d094d251" containerID="8ad2ad62dfdac1505d231618bd110cbb2e4eac1446e02a350103d8e8e6967718" exitCode=0 Jan 20 17:55:04 crc kubenswrapper[4558]: I0120 17:55:04.940089 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-db-sync-hsc9c" event={"ID":"d89e519c-e68a-48fc-8a7d-09d4d094d251","Type":"ContainerDied","Data":"8ad2ad62dfdac1505d231618bd110cbb2e4eac1446e02a350103d8e8e6967718"} Jan 20 17:55:04 crc kubenswrapper[4558]: I0120 17:55:04.942691 4558 generic.go:334] "Generic (PLEG): container finished" podID="0353f1c5-452b-413c-981b-84e774d099eb" containerID="1efa0468b0518cd84740b23884a4369481e10910db84d1b937611c7aed424709" exitCode=0 Jan 20 17:55:04 crc kubenswrapper[4558]: I0120 17:55:04.942751 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-bootstrap-k66gc" event={"ID":"0353f1c5-452b-413c-981b-84e774d099eb","Type":"ContainerDied","Data":"1efa0468b0518cd84740b23884a4369481e10910db84d1b937611c7aed424709"} Jan 20 17:55:04 crc kubenswrapper[4558]: I0120 17:55:04.951230 4558 generic.go:334] "Generic (PLEG): container finished" podID="4571f178-ad12-4a67-a9a4-0e2cca1e2e6f" containerID="8d1066d3fad6d8ee52adbc7179c0737528978fefd362036c2b152fa8b47efd48" exitCode=0 Jan 20 17:55:04 crc kubenswrapper[4558]: I0120 17:55:04.951351 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-db-sync-b22j5" event={"ID":"4571f178-ad12-4a67-a9a4-0e2cca1e2e6f","Type":"ContainerDied","Data":"8d1066d3fad6d8ee52adbc7179c0737528978fefd362036c2b152fa8b47efd48"} Jan 20 17:55:04 crc kubenswrapper[4558]: I0120 17:55:04.953653 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/glance-default-internal-api-0" podStartSLOduration=4.953640476 podStartE2EDuration="4.953640476s" podCreationTimestamp="2026-01-20 17:55:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:55:04.950702993 +0000 UTC m=+4398.711040960" watchObservedRunningTime="2026-01-20 17:55:04.953640476 +0000 UTC m=+4398.713978443" Jan 20 17:55:05 crc kubenswrapper[4558]: I0120 17:55:05.001269 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/glance-default-external-api-0" podStartSLOduration=5.001248547 podStartE2EDuration="5.001248547s" podCreationTimestamp="2026-01-20 17:55:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:55:04.983385868 +0000 UTC m=+4398.743723835" watchObservedRunningTime="2026-01-20 17:55:05.001248547 +0000 UTC m=+4398.761586514" Jan 20 17:55:05 crc kubenswrapper[4558]: I0120 17:55:05.671152 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:55:05 crc kubenswrapper[4558]: I0120 17:55:05.676336 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:55:05 crc kubenswrapper[4558]: I0120 17:55:05.829237 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6879da7c-dbb2-4843-a94b-a24dc6aa6cc9-logs\") pod \"6879da7c-dbb2-4843-a94b-a24dc6aa6cc9\" (UID: \"6879da7c-dbb2-4843-a94b-a24dc6aa6cc9\") " Jan 20 17:55:05 crc kubenswrapper[4558]: I0120 17:55:05.829291 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/02846c59-73e4-4876-b95c-a939a3b914bc-httpd-run\") pod \"02846c59-73e4-4876-b95c-a939a3b914bc\" (UID: \"02846c59-73e4-4876-b95c-a939a3b914bc\") " Jan 20 17:55:05 crc kubenswrapper[4558]: I0120 17:55:05.829363 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/6879da7c-dbb2-4843-a94b-a24dc6aa6cc9-httpd-run\") pod \"6879da7c-dbb2-4843-a94b-a24dc6aa6cc9\" (UID: \"6879da7c-dbb2-4843-a94b-a24dc6aa6cc9\") " Jan 20 17:55:05 crc kubenswrapper[4558]: I0120 17:55:05.829401 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6879da7c-dbb2-4843-a94b-a24dc6aa6cc9-scripts\") pod \"6879da7c-dbb2-4843-a94b-a24dc6aa6cc9\" (UID: \"6879da7c-dbb2-4843-a94b-a24dc6aa6cc9\") " Jan 20 17:55:05 crc kubenswrapper[4558]: I0120 17:55:05.829433 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/02846c59-73e4-4876-b95c-a939a3b914bc-internal-tls-certs\") pod \"02846c59-73e4-4876-b95c-a939a3b914bc\" (UID: \"02846c59-73e4-4876-b95c-a939a3b914bc\") " Jan 20 17:55:05 crc kubenswrapper[4558]: I0120 17:55:05.829464 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"02846c59-73e4-4876-b95c-a939a3b914bc\" (UID: \"02846c59-73e4-4876-b95c-a939a3b914bc\") " Jan 20 17:55:05 crc kubenswrapper[4558]: I0120 17:55:05.829495 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vqgj7\" (UniqueName: \"kubernetes.io/projected/6879da7c-dbb2-4843-a94b-a24dc6aa6cc9-kube-api-access-vqgj7\") pod \"6879da7c-dbb2-4843-a94b-a24dc6aa6cc9\" (UID: \"6879da7c-dbb2-4843-a94b-a24dc6aa6cc9\") " Jan 20 17:55:05 crc kubenswrapper[4558]: I0120 17:55:05.829588 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6879da7c-dbb2-4843-a94b-a24dc6aa6cc9-combined-ca-bundle\") pod \"6879da7c-dbb2-4843-a94b-a24dc6aa6cc9\" (UID: \"6879da7c-dbb2-4843-a94b-a24dc6aa6cc9\") " Jan 20 17:55:05 crc kubenswrapper[4558]: I0120 17:55:05.829606 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02846c59-73e4-4876-b95c-a939a3b914bc-combined-ca-bundle\") pod \"02846c59-73e4-4876-b95c-a939a3b914bc\" (UID: \"02846c59-73e4-4876-b95c-a939a3b914bc\") " Jan 20 17:55:05 crc kubenswrapper[4558]: I0120 17:55:05.829654 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/02846c59-73e4-4876-b95c-a939a3b914bc-config-data\") pod \"02846c59-73e4-4876-b95c-a939a3b914bc\" (UID: \"02846c59-73e4-4876-b95c-a939a3b914bc\") " Jan 20 17:55:05 crc kubenswrapper[4558]: I0120 17:55:05.829672 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/02846c59-73e4-4876-b95c-a939a3b914bc-logs\") pod \"02846c59-73e4-4876-b95c-a939a3b914bc\" (UID: \"02846c59-73e4-4876-b95c-a939a3b914bc\") " Jan 20 17:55:05 crc kubenswrapper[4558]: I0120 17:55:05.829689 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6879da7c-dbb2-4843-a94b-a24dc6aa6cc9-public-tls-certs\") pod \"6879da7c-dbb2-4843-a94b-a24dc6aa6cc9\" (UID: \"6879da7c-dbb2-4843-a94b-a24dc6aa6cc9\") " Jan 20 17:55:05 crc kubenswrapper[4558]: I0120 17:55:05.829710 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4zxwd\" (UniqueName: \"kubernetes.io/projected/02846c59-73e4-4876-b95c-a939a3b914bc-kube-api-access-4zxwd\") pod \"02846c59-73e4-4876-b95c-a939a3b914bc\" (UID: \"02846c59-73e4-4876-b95c-a939a3b914bc\") " Jan 20 17:55:05 crc kubenswrapper[4558]: I0120 17:55:05.829727 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"6879da7c-dbb2-4843-a94b-a24dc6aa6cc9\" (UID: \"6879da7c-dbb2-4843-a94b-a24dc6aa6cc9\") " Jan 20 17:55:05 crc kubenswrapper[4558]: I0120 17:55:05.829759 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6879da7c-dbb2-4843-a94b-a24dc6aa6cc9-config-data\") pod \"6879da7c-dbb2-4843-a94b-a24dc6aa6cc9\" (UID: \"6879da7c-dbb2-4843-a94b-a24dc6aa6cc9\") " Jan 20 17:55:05 crc kubenswrapper[4558]: I0120 17:55:05.829746 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6879da7c-dbb2-4843-a94b-a24dc6aa6cc9-logs" (OuterVolumeSpecName: "logs") pod "6879da7c-dbb2-4843-a94b-a24dc6aa6cc9" (UID: "6879da7c-dbb2-4843-a94b-a24dc6aa6cc9"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:55:05 crc kubenswrapper[4558]: I0120 17:55:05.829775 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/02846c59-73e4-4876-b95c-a939a3b914bc-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "02846c59-73e4-4876-b95c-a939a3b914bc" (UID: "02846c59-73e4-4876-b95c-a939a3b914bc"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:55:05 crc kubenswrapper[4558]: I0120 17:55:05.829791 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/02846c59-73e4-4876-b95c-a939a3b914bc-scripts\") pod \"02846c59-73e4-4876-b95c-a939a3b914bc\" (UID: \"02846c59-73e4-4876-b95c-a939a3b914bc\") " Jan 20 17:55:05 crc kubenswrapper[4558]: I0120 17:55:05.829824 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6879da7c-dbb2-4843-a94b-a24dc6aa6cc9-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "6879da7c-dbb2-4843-a94b-a24dc6aa6cc9" (UID: "6879da7c-dbb2-4843-a94b-a24dc6aa6cc9"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:55:05 crc kubenswrapper[4558]: I0120 17:55:05.830868 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6879da7c-dbb2-4843-a94b-a24dc6aa6cc9-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:55:05 crc kubenswrapper[4558]: I0120 17:55:05.830892 4558 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/02846c59-73e4-4876-b95c-a939a3b914bc-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 20 17:55:05 crc kubenswrapper[4558]: I0120 17:55:05.830907 4558 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/6879da7c-dbb2-4843-a94b-a24dc6aa6cc9-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 20 17:55:05 crc kubenswrapper[4558]: I0120 17:55:05.832911 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/02846c59-73e4-4876-b95c-a939a3b914bc-logs" (OuterVolumeSpecName: "logs") pod "02846c59-73e4-4876-b95c-a939a3b914bc" (UID: "02846c59-73e4-4876-b95c-a939a3b914bc"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:55:05 crc kubenswrapper[4558]: I0120 17:55:05.837471 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage07-crc" (OuterVolumeSpecName: "glance") pod "02846c59-73e4-4876-b95c-a939a3b914bc" (UID: "02846c59-73e4-4876-b95c-a939a3b914bc"). InnerVolumeSpecName "local-storage07-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:55:05 crc kubenswrapper[4558]: I0120 17:55:05.837483 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/02846c59-73e4-4876-b95c-a939a3b914bc-scripts" (OuterVolumeSpecName: "scripts") pod "02846c59-73e4-4876-b95c-a939a3b914bc" (UID: "02846c59-73e4-4876-b95c-a939a3b914bc"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:55:05 crc kubenswrapper[4558]: I0120 17:55:05.838975 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage05-crc" (OuterVolumeSpecName: "glance") pod "6879da7c-dbb2-4843-a94b-a24dc6aa6cc9" (UID: "6879da7c-dbb2-4843-a94b-a24dc6aa6cc9"). InnerVolumeSpecName "local-storage05-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:55:05 crc kubenswrapper[4558]: I0120 17:55:05.839697 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6879da7c-dbb2-4843-a94b-a24dc6aa6cc9-scripts" (OuterVolumeSpecName: "scripts") pod "6879da7c-dbb2-4843-a94b-a24dc6aa6cc9" (UID: "6879da7c-dbb2-4843-a94b-a24dc6aa6cc9"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:55:05 crc kubenswrapper[4558]: I0120 17:55:05.840241 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/02846c59-73e4-4876-b95c-a939a3b914bc-kube-api-access-4zxwd" (OuterVolumeSpecName: "kube-api-access-4zxwd") pod "02846c59-73e4-4876-b95c-a939a3b914bc" (UID: "02846c59-73e4-4876-b95c-a939a3b914bc"). InnerVolumeSpecName "kube-api-access-4zxwd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:55:05 crc kubenswrapper[4558]: I0120 17:55:05.842085 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6879da7c-dbb2-4843-a94b-a24dc6aa6cc9-kube-api-access-vqgj7" (OuterVolumeSpecName: "kube-api-access-vqgj7") pod "6879da7c-dbb2-4843-a94b-a24dc6aa6cc9" (UID: "6879da7c-dbb2-4843-a94b-a24dc6aa6cc9"). InnerVolumeSpecName "kube-api-access-vqgj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:55:05 crc kubenswrapper[4558]: I0120 17:55:05.859428 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/02846c59-73e4-4876-b95c-a939a3b914bc-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "02846c59-73e4-4876-b95c-a939a3b914bc" (UID: "02846c59-73e4-4876-b95c-a939a3b914bc"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:55:05 crc kubenswrapper[4558]: I0120 17:55:05.861626 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6879da7c-dbb2-4843-a94b-a24dc6aa6cc9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6879da7c-dbb2-4843-a94b-a24dc6aa6cc9" (UID: "6879da7c-dbb2-4843-a94b-a24dc6aa6cc9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:55:05 crc kubenswrapper[4558]: I0120 17:55:05.872746 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/02846c59-73e4-4876-b95c-a939a3b914bc-config-data" (OuterVolumeSpecName: "config-data") pod "02846c59-73e4-4876-b95c-a939a3b914bc" (UID: "02846c59-73e4-4876-b95c-a939a3b914bc"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:55:05 crc kubenswrapper[4558]: I0120 17:55:05.875877 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6879da7c-dbb2-4843-a94b-a24dc6aa6cc9-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "6879da7c-dbb2-4843-a94b-a24dc6aa6cc9" (UID: "6879da7c-dbb2-4843-a94b-a24dc6aa6cc9"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:55:05 crc kubenswrapper[4558]: I0120 17:55:05.880152 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6879da7c-dbb2-4843-a94b-a24dc6aa6cc9-config-data" (OuterVolumeSpecName: "config-data") pod "6879da7c-dbb2-4843-a94b-a24dc6aa6cc9" (UID: "6879da7c-dbb2-4843-a94b-a24dc6aa6cc9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:55:05 crc kubenswrapper[4558]: I0120 17:55:05.883356 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/02846c59-73e4-4876-b95c-a939a3b914bc-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "02846c59-73e4-4876-b95c-a939a3b914bc" (UID: "02846c59-73e4-4876-b95c-a939a3b914bc"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:55:05 crc kubenswrapper[4558]: I0120 17:55:05.933289 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6879da7c-dbb2-4843-a94b-a24dc6aa6cc9-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:55:05 crc kubenswrapper[4558]: I0120 17:55:05.933336 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/02846c59-73e4-4876-b95c-a939a3b914bc-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:55:05 crc kubenswrapper[4558]: I0120 17:55:05.933382 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" " Jan 20 17:55:05 crc kubenswrapper[4558]: I0120 17:55:05.933401 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vqgj7\" (UniqueName: \"kubernetes.io/projected/6879da7c-dbb2-4843-a94b-a24dc6aa6cc9-kube-api-access-vqgj7\") on node \"crc\" DevicePath \"\"" Jan 20 17:55:05 crc kubenswrapper[4558]: I0120 17:55:05.933414 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02846c59-73e4-4876-b95c-a939a3b914bc-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:55:05 crc kubenswrapper[4558]: I0120 17:55:05.933425 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6879da7c-dbb2-4843-a94b-a24dc6aa6cc9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:55:05 crc kubenswrapper[4558]: I0120 17:55:05.933436 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/02846c59-73e4-4876-b95c-a939a3b914bc-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:55:05 crc kubenswrapper[4558]: I0120 17:55:05.933447 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/02846c59-73e4-4876-b95c-a939a3b914bc-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:55:05 crc kubenswrapper[4558]: I0120 17:55:05.933458 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6879da7c-dbb2-4843-a94b-a24dc6aa6cc9-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:55:05 crc kubenswrapper[4558]: I0120 17:55:05.933468 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4zxwd\" (UniqueName: \"kubernetes.io/projected/02846c59-73e4-4876-b95c-a939a3b914bc-kube-api-access-4zxwd\") on node \"crc\" DevicePath \"\"" Jan 20 17:55:05 crc kubenswrapper[4558]: I0120 17:55:05.933494 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" " Jan 20 17:55:05 crc kubenswrapper[4558]: I0120 17:55:05.933517 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6879da7c-dbb2-4843-a94b-a24dc6aa6cc9-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:55:05 crc kubenswrapper[4558]: I0120 17:55:05.933527 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/02846c59-73e4-4876-b95c-a939a3b914bc-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:55:05 crc kubenswrapper[4558]: I0120 17:55:05.949832 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage07-crc" (UniqueName: "kubernetes.io/local-volume/local-storage07-crc") on node "crc" Jan 20 17:55:05 crc kubenswrapper[4558]: I0120 17:55:05.951785 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage05-crc" (UniqueName: "kubernetes.io/local-volume/local-storage05-crc") on node "crc" Jan 20 17:55:05 crc kubenswrapper[4558]: I0120 17:55:05.966612 4558 generic.go:334] "Generic (PLEG): container finished" podID="02846c59-73e4-4876-b95c-a939a3b914bc" containerID="b1d6e564deae94a6b542d6d0198eecb759f69a96dfbc747cc0f0a1a1dde288c5" exitCode=143 Jan 20 17:55:05 crc kubenswrapper[4558]: I0120 17:55:05.966656 4558 generic.go:334] "Generic (PLEG): container finished" podID="02846c59-73e4-4876-b95c-a939a3b914bc" containerID="4f2585cf795dca34bc90b44b894237bf0157b72edc2ea4bad3e1ac25bd6e027e" exitCode=143 Jan 20 17:55:05 crc kubenswrapper[4558]: I0120 17:55:05.966670 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:55:05 crc kubenswrapper[4558]: I0120 17:55:05.966670 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"02846c59-73e4-4876-b95c-a939a3b914bc","Type":"ContainerDied","Data":"b1d6e564deae94a6b542d6d0198eecb759f69a96dfbc747cc0f0a1a1dde288c5"} Jan 20 17:55:05 crc kubenswrapper[4558]: I0120 17:55:05.966753 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"02846c59-73e4-4876-b95c-a939a3b914bc","Type":"ContainerDied","Data":"4f2585cf795dca34bc90b44b894237bf0157b72edc2ea4bad3e1ac25bd6e027e"} Jan 20 17:55:05 crc kubenswrapper[4558]: I0120 17:55:05.966768 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"02846c59-73e4-4876-b95c-a939a3b914bc","Type":"ContainerDied","Data":"3f654ad3901dd001dd27c36236efc6e8cb54ba3f4d81fcb20ed6903669e33c77"} Jan 20 17:55:05 crc kubenswrapper[4558]: I0120 17:55:05.966814 4558 scope.go:117] "RemoveContainer" containerID="b1d6e564deae94a6b542d6d0198eecb759f69a96dfbc747cc0f0a1a1dde288c5" Jan 20 17:55:05 crc kubenswrapper[4558]: I0120 17:55:05.975954 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"63d78039-84d1-41df-b0dc-3d0b01cc2a61","Type":"ContainerStarted","Data":"fb0c76f7efa112c9f84f7b18a860585f8a74dd8aa04b177a266ffe056c7f8929"} Jan 20 17:55:05 crc kubenswrapper[4558]: I0120 17:55:05.989501 4558 generic.go:334] "Generic (PLEG): container finished" podID="6879da7c-dbb2-4843-a94b-a24dc6aa6cc9" containerID="ffc3f2ff8b01056bc8842d4f59a0616cd4bbbfc1cfd39fa551c21785a6ec6e10" exitCode=143 Jan 20 17:55:05 crc kubenswrapper[4558]: I0120 17:55:05.989533 4558 generic.go:334] "Generic (PLEG): container finished" podID="6879da7c-dbb2-4843-a94b-a24dc6aa6cc9" containerID="d5ca530a3c43a142a18bd309977df04c40397e2c25fe036b9a97d34a2c68290a" exitCode=143 Jan 20 17:55:05 crc kubenswrapper[4558]: I0120 17:55:05.989626 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:55:05 crc kubenswrapper[4558]: I0120 17:55:05.989680 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"6879da7c-dbb2-4843-a94b-a24dc6aa6cc9","Type":"ContainerDied","Data":"ffc3f2ff8b01056bc8842d4f59a0616cd4bbbfc1cfd39fa551c21785a6ec6e10"} Jan 20 17:55:05 crc kubenswrapper[4558]: I0120 17:55:05.989712 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"6879da7c-dbb2-4843-a94b-a24dc6aa6cc9","Type":"ContainerDied","Data":"d5ca530a3c43a142a18bd309977df04c40397e2c25fe036b9a97d34a2c68290a"} Jan 20 17:55:05 crc kubenswrapper[4558]: I0120 17:55:05.989729 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"6879da7c-dbb2-4843-a94b-a24dc6aa6cc9","Type":"ContainerDied","Data":"1f723867be1eeef599ab53f9e7a82964432f007850ecb1e8efa74bc834b02cf0"} Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.016450 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.031665 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.035376 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.035403 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.037615 4558 scope.go:117] "RemoveContainer" containerID="4f2585cf795dca34bc90b44b894237bf0157b72edc2ea4bad3e1ac25bd6e027e" Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.056527 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:55:06 crc kubenswrapper[4558]: E0120 17:55:06.057007 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6879da7c-dbb2-4843-a94b-a24dc6aa6cc9" containerName="glance-log" Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.057021 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="6879da7c-dbb2-4843-a94b-a24dc6aa6cc9" containerName="glance-log" Jan 20 17:55:06 crc kubenswrapper[4558]: E0120 17:55:06.057033 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="02846c59-73e4-4876-b95c-a939a3b914bc" containerName="glance-log" Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.057039 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="02846c59-73e4-4876-b95c-a939a3b914bc" containerName="glance-log" Jan 20 17:55:06 crc kubenswrapper[4558]: E0120 17:55:06.057048 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="02846c59-73e4-4876-b95c-a939a3b914bc" containerName="glance-httpd" Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.057065 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="02846c59-73e4-4876-b95c-a939a3b914bc" containerName="glance-httpd" Jan 20 17:55:06 crc kubenswrapper[4558]: E0120 17:55:06.057084 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6879da7c-dbb2-4843-a94b-a24dc6aa6cc9" containerName="glance-httpd" Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.057089 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="6879da7c-dbb2-4843-a94b-a24dc6aa6cc9" containerName="glance-httpd" Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.057264 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="02846c59-73e4-4876-b95c-a939a3b914bc" containerName="glance-log" Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.057276 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="02846c59-73e4-4876-b95c-a939a3b914bc" containerName="glance-httpd" Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.057282 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="6879da7c-dbb2-4843-a94b-a24dc6aa6cc9" containerName="glance-log" Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.057292 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="6879da7c-dbb2-4843-a94b-a24dc6aa6cc9" containerName="glance-httpd" Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.058218 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.060108 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-scripts" Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.060382 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-glance-default-internal-svc" Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.060481 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-default-internal-config-data" Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.060585 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-glance-dockercfg-g64r2" Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.069577 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.076443 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.083600 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.090767 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.092551 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.097315 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.099847 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-default-external-config-data" Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.100150 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-glance-default-public-svc" Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.109546 4558 scope.go:117] "RemoveContainer" containerID="b1d6e564deae94a6b542d6d0198eecb759f69a96dfbc747cc0f0a1a1dde288c5" Jan 20 17:55:06 crc kubenswrapper[4558]: E0120 17:55:06.110130 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b1d6e564deae94a6b542d6d0198eecb759f69a96dfbc747cc0f0a1a1dde288c5\": container with ID starting with b1d6e564deae94a6b542d6d0198eecb759f69a96dfbc747cc0f0a1a1dde288c5 not found: ID does not exist" containerID="b1d6e564deae94a6b542d6d0198eecb759f69a96dfbc747cc0f0a1a1dde288c5" Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.110242 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b1d6e564deae94a6b542d6d0198eecb759f69a96dfbc747cc0f0a1a1dde288c5"} err="failed to get container status \"b1d6e564deae94a6b542d6d0198eecb759f69a96dfbc747cc0f0a1a1dde288c5\": rpc error: code = NotFound desc = could not find container \"b1d6e564deae94a6b542d6d0198eecb759f69a96dfbc747cc0f0a1a1dde288c5\": container with ID starting with b1d6e564deae94a6b542d6d0198eecb759f69a96dfbc747cc0f0a1a1dde288c5 not found: ID does not exist" Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.110277 4558 scope.go:117] "RemoveContainer" containerID="4f2585cf795dca34bc90b44b894237bf0157b72edc2ea4bad3e1ac25bd6e027e" Jan 20 17:55:06 crc kubenswrapper[4558]: E0120 17:55:06.110687 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4f2585cf795dca34bc90b44b894237bf0157b72edc2ea4bad3e1ac25bd6e027e\": container with ID starting with 4f2585cf795dca34bc90b44b894237bf0157b72edc2ea4bad3e1ac25bd6e027e not found: ID does not exist" containerID="4f2585cf795dca34bc90b44b894237bf0157b72edc2ea4bad3e1ac25bd6e027e" Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.110718 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4f2585cf795dca34bc90b44b894237bf0157b72edc2ea4bad3e1ac25bd6e027e"} err="failed to get container status \"4f2585cf795dca34bc90b44b894237bf0157b72edc2ea4bad3e1ac25bd6e027e\": rpc error: code = NotFound desc = could not find container \"4f2585cf795dca34bc90b44b894237bf0157b72edc2ea4bad3e1ac25bd6e027e\": container with ID starting with 4f2585cf795dca34bc90b44b894237bf0157b72edc2ea4bad3e1ac25bd6e027e not found: ID does not exist" Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.110731 4558 scope.go:117] "RemoveContainer" containerID="b1d6e564deae94a6b542d6d0198eecb759f69a96dfbc747cc0f0a1a1dde288c5" Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.112346 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b1d6e564deae94a6b542d6d0198eecb759f69a96dfbc747cc0f0a1a1dde288c5"} err="failed to get container status \"b1d6e564deae94a6b542d6d0198eecb759f69a96dfbc747cc0f0a1a1dde288c5\": rpc error: code = NotFound desc = could not find container \"b1d6e564deae94a6b542d6d0198eecb759f69a96dfbc747cc0f0a1a1dde288c5\": container with ID starting with b1d6e564deae94a6b542d6d0198eecb759f69a96dfbc747cc0f0a1a1dde288c5 not found: ID does not exist" Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.112371 4558 scope.go:117] "RemoveContainer" containerID="4f2585cf795dca34bc90b44b894237bf0157b72edc2ea4bad3e1ac25bd6e027e" Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.112565 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4f2585cf795dca34bc90b44b894237bf0157b72edc2ea4bad3e1ac25bd6e027e"} err="failed to get container status \"4f2585cf795dca34bc90b44b894237bf0157b72edc2ea4bad3e1ac25bd6e027e\": rpc error: code = NotFound desc = could not find container \"4f2585cf795dca34bc90b44b894237bf0157b72edc2ea4bad3e1ac25bd6e027e\": container with ID starting with 4f2585cf795dca34bc90b44b894237bf0157b72edc2ea4bad3e1ac25bd6e027e not found: ID does not exist" Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.112579 4558 scope.go:117] "RemoveContainer" containerID="ffc3f2ff8b01056bc8842d4f59a0616cd4bbbfc1cfd39fa551c21785a6ec6e10" Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.160221 4558 scope.go:117] "RemoveContainer" containerID="d5ca530a3c43a142a18bd309977df04c40397e2c25fe036b9a97d34a2c68290a" Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.214849 4558 scope.go:117] "RemoveContainer" containerID="ffc3f2ff8b01056bc8842d4f59a0616cd4bbbfc1cfd39fa551c21785a6ec6e10" Jan 20 17:55:06 crc kubenswrapper[4558]: E0120 17:55:06.215367 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ffc3f2ff8b01056bc8842d4f59a0616cd4bbbfc1cfd39fa551c21785a6ec6e10\": container with ID starting with ffc3f2ff8b01056bc8842d4f59a0616cd4bbbfc1cfd39fa551c21785a6ec6e10 not found: ID does not exist" containerID="ffc3f2ff8b01056bc8842d4f59a0616cd4bbbfc1cfd39fa551c21785a6ec6e10" Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.215415 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ffc3f2ff8b01056bc8842d4f59a0616cd4bbbfc1cfd39fa551c21785a6ec6e10"} err="failed to get container status \"ffc3f2ff8b01056bc8842d4f59a0616cd4bbbfc1cfd39fa551c21785a6ec6e10\": rpc error: code = NotFound desc = could not find container \"ffc3f2ff8b01056bc8842d4f59a0616cd4bbbfc1cfd39fa551c21785a6ec6e10\": container with ID starting with ffc3f2ff8b01056bc8842d4f59a0616cd4bbbfc1cfd39fa551c21785a6ec6e10 not found: ID does not exist" Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.215441 4558 scope.go:117] "RemoveContainer" containerID="d5ca530a3c43a142a18bd309977df04c40397e2c25fe036b9a97d34a2c68290a" Jan 20 17:55:06 crc kubenswrapper[4558]: E0120 17:55:06.215924 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d5ca530a3c43a142a18bd309977df04c40397e2c25fe036b9a97d34a2c68290a\": container with ID starting with d5ca530a3c43a142a18bd309977df04c40397e2c25fe036b9a97d34a2c68290a not found: ID does not exist" containerID="d5ca530a3c43a142a18bd309977df04c40397e2c25fe036b9a97d34a2c68290a" Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.215970 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d5ca530a3c43a142a18bd309977df04c40397e2c25fe036b9a97d34a2c68290a"} err="failed to get container status \"d5ca530a3c43a142a18bd309977df04c40397e2c25fe036b9a97d34a2c68290a\": rpc error: code = NotFound desc = could not find container \"d5ca530a3c43a142a18bd309977df04c40397e2c25fe036b9a97d34a2c68290a\": container with ID starting with d5ca530a3c43a142a18bd309977df04c40397e2c25fe036b9a97d34a2c68290a not found: ID does not exist" Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.216000 4558 scope.go:117] "RemoveContainer" containerID="ffc3f2ff8b01056bc8842d4f59a0616cd4bbbfc1cfd39fa551c21785a6ec6e10" Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.216421 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ffc3f2ff8b01056bc8842d4f59a0616cd4bbbfc1cfd39fa551c21785a6ec6e10"} err="failed to get container status \"ffc3f2ff8b01056bc8842d4f59a0616cd4bbbfc1cfd39fa551c21785a6ec6e10\": rpc error: code = NotFound desc = could not find container \"ffc3f2ff8b01056bc8842d4f59a0616cd4bbbfc1cfd39fa551c21785a6ec6e10\": container with ID starting with ffc3f2ff8b01056bc8842d4f59a0616cd4bbbfc1cfd39fa551c21785a6ec6e10 not found: ID does not exist" Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.216455 4558 scope.go:117] "RemoveContainer" containerID="d5ca530a3c43a142a18bd309977df04c40397e2c25fe036b9a97d34a2c68290a" Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.216707 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d5ca530a3c43a142a18bd309977df04c40397e2c25fe036b9a97d34a2c68290a"} err="failed to get container status \"d5ca530a3c43a142a18bd309977df04c40397e2c25fe036b9a97d34a2c68290a\": rpc error: code = NotFound desc = could not find container \"d5ca530a3c43a142a18bd309977df04c40397e2c25fe036b9a97d34a2c68290a\": container with ID starting with d5ca530a3c43a142a18bd309977df04c40397e2c25fe036b9a97d34a2c68290a not found: ID does not exist" Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.238788 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b419ccfb-cd20-4b72-8f3a-48ce9ea2c991-config-data\") pod \"glance-default-internal-api-0\" (UID: \"b419ccfb-cd20-4b72-8f3a-48ce9ea2c991\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.238870 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"b419ccfb-cd20-4b72-8f3a-48ce9ea2c991\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.238903 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jwlv9\" (UniqueName: \"kubernetes.io/projected/5881b3da-d3ac-4168-a33c-0f29ad342f60-kube-api-access-jwlv9\") pod \"glance-default-external-api-0\" (UID: \"5881b3da-d3ac-4168-a33c-0f29ad342f60\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.238936 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b419ccfb-cd20-4b72-8f3a-48ce9ea2c991-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"b419ccfb-cd20-4b72-8f3a-48ce9ea2c991\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.238958 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b419ccfb-cd20-4b72-8f3a-48ce9ea2c991-scripts\") pod \"glance-default-internal-api-0\" (UID: \"b419ccfb-cd20-4b72-8f3a-48ce9ea2c991\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.238983 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5881b3da-d3ac-4168-a33c-0f29ad342f60-logs\") pod \"glance-default-external-api-0\" (UID: \"5881b3da-d3ac-4168-a33c-0f29ad342f60\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.239006 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-external-api-0\" (UID: \"5881b3da-d3ac-4168-a33c-0f29ad342f60\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.239024 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gdg4c\" (UniqueName: \"kubernetes.io/projected/b419ccfb-cd20-4b72-8f3a-48ce9ea2c991-kube-api-access-gdg4c\") pod \"glance-default-internal-api-0\" (UID: \"b419ccfb-cd20-4b72-8f3a-48ce9ea2c991\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.239061 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5881b3da-d3ac-4168-a33c-0f29ad342f60-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"5881b3da-d3ac-4168-a33c-0f29ad342f60\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.239081 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5881b3da-d3ac-4168-a33c-0f29ad342f60-config-data\") pod \"glance-default-external-api-0\" (UID: \"5881b3da-d3ac-4168-a33c-0f29ad342f60\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.239099 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b419ccfb-cd20-4b72-8f3a-48ce9ea2c991-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"b419ccfb-cd20-4b72-8f3a-48ce9ea2c991\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.239122 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b419ccfb-cd20-4b72-8f3a-48ce9ea2c991-logs\") pod \"glance-default-internal-api-0\" (UID: \"b419ccfb-cd20-4b72-8f3a-48ce9ea2c991\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.239139 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5881b3da-d3ac-4168-a33c-0f29ad342f60-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"5881b3da-d3ac-4168-a33c-0f29ad342f60\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.239174 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5881b3da-d3ac-4168-a33c-0f29ad342f60-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"5881b3da-d3ac-4168-a33c-0f29ad342f60\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.239194 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b419ccfb-cd20-4b72-8f3a-48ce9ea2c991-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"b419ccfb-cd20-4b72-8f3a-48ce9ea2c991\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.239228 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5881b3da-d3ac-4168-a33c-0f29ad342f60-scripts\") pod \"glance-default-external-api-0\" (UID: \"5881b3da-d3ac-4168-a33c-0f29ad342f60\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.340699 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"b419ccfb-cd20-4b72-8f3a-48ce9ea2c991\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.340772 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jwlv9\" (UniqueName: \"kubernetes.io/projected/5881b3da-d3ac-4168-a33c-0f29ad342f60-kube-api-access-jwlv9\") pod \"glance-default-external-api-0\" (UID: \"5881b3da-d3ac-4168-a33c-0f29ad342f60\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.340835 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b419ccfb-cd20-4b72-8f3a-48ce9ea2c991-scripts\") pod \"glance-default-internal-api-0\" (UID: \"b419ccfb-cd20-4b72-8f3a-48ce9ea2c991\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.340860 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b419ccfb-cd20-4b72-8f3a-48ce9ea2c991-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"b419ccfb-cd20-4b72-8f3a-48ce9ea2c991\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.340902 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5881b3da-d3ac-4168-a33c-0f29ad342f60-logs\") pod \"glance-default-external-api-0\" (UID: \"5881b3da-d3ac-4168-a33c-0f29ad342f60\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.340933 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-external-api-0\" (UID: \"5881b3da-d3ac-4168-a33c-0f29ad342f60\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.340956 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gdg4c\" (UniqueName: \"kubernetes.io/projected/b419ccfb-cd20-4b72-8f3a-48ce9ea2c991-kube-api-access-gdg4c\") pod \"glance-default-internal-api-0\" (UID: \"b419ccfb-cd20-4b72-8f3a-48ce9ea2c991\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.341009 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5881b3da-d3ac-4168-a33c-0f29ad342f60-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"5881b3da-d3ac-4168-a33c-0f29ad342f60\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.341033 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5881b3da-d3ac-4168-a33c-0f29ad342f60-config-data\") pod \"glance-default-external-api-0\" (UID: \"5881b3da-d3ac-4168-a33c-0f29ad342f60\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.341060 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b419ccfb-cd20-4b72-8f3a-48ce9ea2c991-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"b419ccfb-cd20-4b72-8f3a-48ce9ea2c991\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.341080 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b419ccfb-cd20-4b72-8f3a-48ce9ea2c991-logs\") pod \"glance-default-internal-api-0\" (UID: \"b419ccfb-cd20-4b72-8f3a-48ce9ea2c991\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.341104 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5881b3da-d3ac-4168-a33c-0f29ad342f60-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"5881b3da-d3ac-4168-a33c-0f29ad342f60\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.341115 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"b419ccfb-cd20-4b72-8f3a-48ce9ea2c991\") device mount path \"/mnt/openstack/pv07\"" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.341633 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-external-api-0\" (UID: \"5881b3da-d3ac-4168-a33c-0f29ad342f60\") device mount path \"/mnt/openstack/pv05\"" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.341749 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5881b3da-d3ac-4168-a33c-0f29ad342f60-logs\") pod \"glance-default-external-api-0\" (UID: \"5881b3da-d3ac-4168-a33c-0f29ad342f60\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.342347 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b419ccfb-cd20-4b72-8f3a-48ce9ea2c991-logs\") pod \"glance-default-internal-api-0\" (UID: \"b419ccfb-cd20-4b72-8f3a-48ce9ea2c991\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.341128 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5881b3da-d3ac-4168-a33c-0f29ad342f60-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"5881b3da-d3ac-4168-a33c-0f29ad342f60\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.342424 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b419ccfb-cd20-4b72-8f3a-48ce9ea2c991-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"b419ccfb-cd20-4b72-8f3a-48ce9ea2c991\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.342534 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5881b3da-d3ac-4168-a33c-0f29ad342f60-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"5881b3da-d3ac-4168-a33c-0f29ad342f60\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.342575 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5881b3da-d3ac-4168-a33c-0f29ad342f60-scripts\") pod \"glance-default-external-api-0\" (UID: \"5881b3da-d3ac-4168-a33c-0f29ad342f60\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.342659 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b419ccfb-cd20-4b72-8f3a-48ce9ea2c991-config-data\") pod \"glance-default-internal-api-0\" (UID: \"b419ccfb-cd20-4b72-8f3a-48ce9ea2c991\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.342779 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b419ccfb-cd20-4b72-8f3a-48ce9ea2c991-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"b419ccfb-cd20-4b72-8f3a-48ce9ea2c991\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.347801 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5881b3da-d3ac-4168-a33c-0f29ad342f60-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"5881b3da-d3ac-4168-a33c-0f29ad342f60\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.349048 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b419ccfb-cd20-4b72-8f3a-48ce9ea2c991-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"b419ccfb-cd20-4b72-8f3a-48ce9ea2c991\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.349673 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b419ccfb-cd20-4b72-8f3a-48ce9ea2c991-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"b419ccfb-cd20-4b72-8f3a-48ce9ea2c991\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.351041 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5881b3da-d3ac-4168-a33c-0f29ad342f60-config-data\") pod \"glance-default-external-api-0\" (UID: \"5881b3da-d3ac-4168-a33c-0f29ad342f60\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.351257 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b419ccfb-cd20-4b72-8f3a-48ce9ea2c991-scripts\") pod \"glance-default-internal-api-0\" (UID: \"b419ccfb-cd20-4b72-8f3a-48ce9ea2c991\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.357963 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5881b3da-d3ac-4168-a33c-0f29ad342f60-scripts\") pod \"glance-default-external-api-0\" (UID: \"5881b3da-d3ac-4168-a33c-0f29ad342f60\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.358671 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gdg4c\" (UniqueName: \"kubernetes.io/projected/b419ccfb-cd20-4b72-8f3a-48ce9ea2c991-kube-api-access-gdg4c\") pod \"glance-default-internal-api-0\" (UID: \"b419ccfb-cd20-4b72-8f3a-48ce9ea2c991\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.360637 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b419ccfb-cd20-4b72-8f3a-48ce9ea2c991-config-data\") pod \"glance-default-internal-api-0\" (UID: \"b419ccfb-cd20-4b72-8f3a-48ce9ea2c991\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.364731 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5881b3da-d3ac-4168-a33c-0f29ad342f60-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"5881b3da-d3ac-4168-a33c-0f29ad342f60\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.368499 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jwlv9\" (UniqueName: \"kubernetes.io/projected/5881b3da-d3ac-4168-a33c-0f29ad342f60-kube-api-access-jwlv9\") pod \"glance-default-external-api-0\" (UID: \"5881b3da-d3ac-4168-a33c-0f29ad342f60\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.376734 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-external-api-0\" (UID: \"5881b3da-d3ac-4168-a33c-0f29ad342f60\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.382711 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"b419ccfb-cd20-4b72-8f3a-48ce9ea2c991\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.386436 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.417050 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.470117 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-db-sync-b22j5" Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.529592 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-db-sync-hsc9c" Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.544209 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-bootstrap-k66gc" Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.547876 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t46kl\" (UniqueName: \"kubernetes.io/projected/4571f178-ad12-4a67-a9a4-0e2cca1e2e6f-kube-api-access-t46kl\") pod \"4571f178-ad12-4a67-a9a4-0e2cca1e2e6f\" (UID: \"4571f178-ad12-4a67-a9a4-0e2cca1e2e6f\") " Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.547956 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4571f178-ad12-4a67-a9a4-0e2cca1e2e6f-combined-ca-bundle\") pod \"4571f178-ad12-4a67-a9a4-0e2cca1e2e6f\" (UID: \"4571f178-ad12-4a67-a9a4-0e2cca1e2e6f\") " Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.548119 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4571f178-ad12-4a67-a9a4-0e2cca1e2e6f-config-data\") pod \"4571f178-ad12-4a67-a9a4-0e2cca1e2e6f\" (UID: \"4571f178-ad12-4a67-a9a4-0e2cca1e2e6f\") " Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.548218 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4571f178-ad12-4a67-a9a4-0e2cca1e2e6f-logs\") pod \"4571f178-ad12-4a67-a9a4-0e2cca1e2e6f\" (UID: \"4571f178-ad12-4a67-a9a4-0e2cca1e2e6f\") " Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.548318 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4571f178-ad12-4a67-a9a4-0e2cca1e2e6f-scripts\") pod \"4571f178-ad12-4a67-a9a4-0e2cca1e2e6f\" (UID: \"4571f178-ad12-4a67-a9a4-0e2cca1e2e6f\") " Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.551040 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4571f178-ad12-4a67-a9a4-0e2cca1e2e6f-logs" (OuterVolumeSpecName: "logs") pod "4571f178-ad12-4a67-a9a4-0e2cca1e2e6f" (UID: "4571f178-ad12-4a67-a9a4-0e2cca1e2e6f"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.556851 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4571f178-ad12-4a67-a9a4-0e2cca1e2e6f-kube-api-access-t46kl" (OuterVolumeSpecName: "kube-api-access-t46kl") pod "4571f178-ad12-4a67-a9a4-0e2cca1e2e6f" (UID: "4571f178-ad12-4a67-a9a4-0e2cca1e2e6f"). InnerVolumeSpecName "kube-api-access-t46kl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.564217 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4571f178-ad12-4a67-a9a4-0e2cca1e2e6f-scripts" (OuterVolumeSpecName: "scripts") pod "4571f178-ad12-4a67-a9a4-0e2cca1e2e6f" (UID: "4571f178-ad12-4a67-a9a4-0e2cca1e2e6f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.587651 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4571f178-ad12-4a67-a9a4-0e2cca1e2e6f-config-data" (OuterVolumeSpecName: "config-data") pod "4571f178-ad12-4a67-a9a4-0e2cca1e2e6f" (UID: "4571f178-ad12-4a67-a9a4-0e2cca1e2e6f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.587752 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4571f178-ad12-4a67-a9a4-0e2cca1e2e6f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4571f178-ad12-4a67-a9a4-0e2cca1e2e6f" (UID: "4571f178-ad12-4a67-a9a4-0e2cca1e2e6f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.614833 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="02846c59-73e4-4876-b95c-a939a3b914bc" path="/var/lib/kubelet/pods/02846c59-73e4-4876-b95c-a939a3b914bc/volumes" Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.616007 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6879da7c-dbb2-4843-a94b-a24dc6aa6cc9" path="/var/lib/kubelet/pods/6879da7c-dbb2-4843-a94b-a24dc6aa6cc9/volumes" Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.656693 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d89e519c-e68a-48fc-8a7d-09d4d094d251-combined-ca-bundle\") pod \"d89e519c-e68a-48fc-8a7d-09d4d094d251\" (UID: \"d89e519c-e68a-48fc-8a7d-09d4d094d251\") " Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.656758 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0353f1c5-452b-413c-981b-84e774d099eb-scripts\") pod \"0353f1c5-452b-413c-981b-84e774d099eb\" (UID: \"0353f1c5-452b-413c-981b-84e774d099eb\") " Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.656801 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d68lr\" (UniqueName: \"kubernetes.io/projected/d89e519c-e68a-48fc-8a7d-09d4d094d251-kube-api-access-d68lr\") pod \"d89e519c-e68a-48fc-8a7d-09d4d094d251\" (UID: \"d89e519c-e68a-48fc-8a7d-09d4d094d251\") " Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.656881 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/d89e519c-e68a-48fc-8a7d-09d4d094d251-db-sync-config-data\") pod \"d89e519c-e68a-48fc-8a7d-09d4d094d251\" (UID: \"d89e519c-e68a-48fc-8a7d-09d4d094d251\") " Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.656934 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-49cn5\" (UniqueName: \"kubernetes.io/projected/0353f1c5-452b-413c-981b-84e774d099eb-kube-api-access-49cn5\") pod \"0353f1c5-452b-413c-981b-84e774d099eb\" (UID: \"0353f1c5-452b-413c-981b-84e774d099eb\") " Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.656976 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/0353f1c5-452b-413c-981b-84e774d099eb-fernet-keys\") pod \"0353f1c5-452b-413c-981b-84e774d099eb\" (UID: \"0353f1c5-452b-413c-981b-84e774d099eb\") " Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.657010 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/0353f1c5-452b-413c-981b-84e774d099eb-credential-keys\") pod \"0353f1c5-452b-413c-981b-84e774d099eb\" (UID: \"0353f1c5-452b-413c-981b-84e774d099eb\") " Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.657038 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0353f1c5-452b-413c-981b-84e774d099eb-combined-ca-bundle\") pod \"0353f1c5-452b-413c-981b-84e774d099eb\" (UID: \"0353f1c5-452b-413c-981b-84e774d099eb\") " Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.657078 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0353f1c5-452b-413c-981b-84e774d099eb-config-data\") pod \"0353f1c5-452b-413c-981b-84e774d099eb\" (UID: \"0353f1c5-452b-413c-981b-84e774d099eb\") " Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.657432 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t46kl\" (UniqueName: \"kubernetes.io/projected/4571f178-ad12-4a67-a9a4-0e2cca1e2e6f-kube-api-access-t46kl\") on node \"crc\" DevicePath \"\"" Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.657450 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4571f178-ad12-4a67-a9a4-0e2cca1e2e6f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.657461 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4571f178-ad12-4a67-a9a4-0e2cca1e2e6f-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.657470 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4571f178-ad12-4a67-a9a4-0e2cca1e2e6f-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.657479 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4571f178-ad12-4a67-a9a4-0e2cca1e2e6f-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.666496 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d89e519c-e68a-48fc-8a7d-09d4d094d251-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "d89e519c-e68a-48fc-8a7d-09d4d094d251" (UID: "d89e519c-e68a-48fc-8a7d-09d4d094d251"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.666495 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0353f1c5-452b-413c-981b-84e774d099eb-scripts" (OuterVolumeSpecName: "scripts") pod "0353f1c5-452b-413c-981b-84e774d099eb" (UID: "0353f1c5-452b-413c-981b-84e774d099eb"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.666628 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0353f1c5-452b-413c-981b-84e774d099eb-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "0353f1c5-452b-413c-981b-84e774d099eb" (UID: "0353f1c5-452b-413c-981b-84e774d099eb"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.667115 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d89e519c-e68a-48fc-8a7d-09d4d094d251-kube-api-access-d68lr" (OuterVolumeSpecName: "kube-api-access-d68lr") pod "d89e519c-e68a-48fc-8a7d-09d4d094d251" (UID: "d89e519c-e68a-48fc-8a7d-09d4d094d251"). InnerVolumeSpecName "kube-api-access-d68lr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.667220 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0353f1c5-452b-413c-981b-84e774d099eb-kube-api-access-49cn5" (OuterVolumeSpecName: "kube-api-access-49cn5") pod "0353f1c5-452b-413c-981b-84e774d099eb" (UID: "0353f1c5-452b-413c-981b-84e774d099eb"). InnerVolumeSpecName "kube-api-access-49cn5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.667946 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0353f1c5-452b-413c-981b-84e774d099eb-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "0353f1c5-452b-413c-981b-84e774d099eb" (UID: "0353f1c5-452b-413c-981b-84e774d099eb"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.684773 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d89e519c-e68a-48fc-8a7d-09d4d094d251-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d89e519c-e68a-48fc-8a7d-09d4d094d251" (UID: "d89e519c-e68a-48fc-8a7d-09d4d094d251"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.685415 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0353f1c5-452b-413c-981b-84e774d099eb-config-data" (OuterVolumeSpecName: "config-data") pod "0353f1c5-452b-413c-981b-84e774d099eb" (UID: "0353f1c5-452b-413c-981b-84e774d099eb"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.690351 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0353f1c5-452b-413c-981b-84e774d099eb-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0353f1c5-452b-413c-981b-84e774d099eb" (UID: "0353f1c5-452b-413c-981b-84e774d099eb"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.759159 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d89e519c-e68a-48fc-8a7d-09d4d094d251-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.759307 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0353f1c5-452b-413c-981b-84e774d099eb-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.759368 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d68lr\" (UniqueName: \"kubernetes.io/projected/d89e519c-e68a-48fc-8a7d-09d4d094d251-kube-api-access-d68lr\") on node \"crc\" DevicePath \"\"" Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.759443 4558 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/d89e519c-e68a-48fc-8a7d-09d4d094d251-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.759499 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-49cn5\" (UniqueName: \"kubernetes.io/projected/0353f1c5-452b-413c-981b-84e774d099eb-kube-api-access-49cn5\") on node \"crc\" DevicePath \"\"" Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.759548 4558 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/0353f1c5-452b-413c-981b-84e774d099eb-fernet-keys\") on node \"crc\" DevicePath \"\"" Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.759609 4558 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/0353f1c5-452b-413c-981b-84e774d099eb-credential-keys\") on node \"crc\" DevicePath \"\"" Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.759683 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0353f1c5-452b-413c-981b-84e774d099eb-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.759736 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0353f1c5-452b-413c-981b-84e774d099eb-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.936822 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:55:06 crc kubenswrapper[4558]: I0120 17:55:06.994240 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.019401 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-db-sync-hsc9c" event={"ID":"d89e519c-e68a-48fc-8a7d-09d4d094d251","Type":"ContainerDied","Data":"3769fd0c40c51219f834b481d530c1a75ed9a4a049437ce7ffc7841261b16064"} Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.019446 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3769fd0c40c51219f834b481d530c1a75ed9a4a049437ce7ffc7841261b16064" Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.019530 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-db-sync-hsc9c" Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.021900 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-bootstrap-k66gc" event={"ID":"0353f1c5-452b-413c-981b-84e774d099eb","Type":"ContainerDied","Data":"3290922e6842e4b722e2f0a0474bd66c94db36b62b9fa48501a785c8fe30e3cd"} Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.021941 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3290922e6842e4b722e2f0a0474bd66c94db36b62b9fa48501a785c8fe30e3cd" Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.022005 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-bootstrap-k66gc" Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.026541 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-db-sync-b22j5" event={"ID":"4571f178-ad12-4a67-a9a4-0e2cca1e2e6f","Type":"ContainerDied","Data":"7cc48b009c312940d54d312faa4f62c78c13d8b28dd2acfec2a6e8362186fd98"} Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.026603 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7cc48b009c312940d54d312faa4f62c78c13d8b28dd2acfec2a6e8362186fd98" Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.026687 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-db-sync-b22j5" Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.027885 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"b419ccfb-cd20-4b72-8f3a-48ce9ea2c991","Type":"ContainerStarted","Data":"1598d0d352e09b5cbd5494defb385710e095d95cf101858e6ee7ee8d1487d114"} Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.121990 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-bootstrap-k66gc"] Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.132474 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/keystone-bootstrap-k66gc"] Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.177142 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/barbican-worker-6c8cb46bb9-k2j62"] Jan 20 17:55:07 crc kubenswrapper[4558]: E0120 17:55:07.177519 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4571f178-ad12-4a67-a9a4-0e2cca1e2e6f" containerName="placement-db-sync" Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.177534 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="4571f178-ad12-4a67-a9a4-0e2cca1e2e6f" containerName="placement-db-sync" Jan 20 17:55:07 crc kubenswrapper[4558]: E0120 17:55:07.177553 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d89e519c-e68a-48fc-8a7d-09d4d094d251" containerName="barbican-db-sync" Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.177560 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d89e519c-e68a-48fc-8a7d-09d4d094d251" containerName="barbican-db-sync" Jan 20 17:55:07 crc kubenswrapper[4558]: E0120 17:55:07.177573 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0353f1c5-452b-413c-981b-84e774d099eb" containerName="keystone-bootstrap" Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.177580 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="0353f1c5-452b-413c-981b-84e774d099eb" containerName="keystone-bootstrap" Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.177764 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="4571f178-ad12-4a67-a9a4-0e2cca1e2e6f" containerName="placement-db-sync" Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.177782 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="0353f1c5-452b-413c-981b-84e774d099eb" containerName="keystone-bootstrap" Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.177797 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="d89e519c-e68a-48fc-8a7d-09d4d094d251" containerName="barbican-db-sync" Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.178654 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-worker-6c8cb46bb9-k2j62" Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.181268 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"barbican-config-data" Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.181491 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"barbican-worker-config-data" Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.181684 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"barbican-barbican-dockercfg-wxw9v" Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.227581 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-worker-6c8cb46bb9-k2j62"] Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.227630 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/keystone-bootstrap-glvsk"] Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.228872 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-bootstrap-glvsk" Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.233519 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-keystone-dockercfg-bqxt2" Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.238766 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"osp-secret" Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.239045 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone" Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.239562 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-config-data" Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.239719 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-scripts" Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.262089 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-bootstrap-glvsk"] Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.289660 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8a5251c1-60bc-45d7-8524-fd654c09505b-config-data-custom\") pod \"barbican-worker-6c8cb46bb9-k2j62\" (UID: \"8a5251c1-60bc-45d7-8524-fd654c09505b\") " pod="openstack-kuttl-tests/barbican-worker-6c8cb46bb9-k2j62" Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.289802 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8a5251c1-60bc-45d7-8524-fd654c09505b-config-data\") pod \"barbican-worker-6c8cb46bb9-k2j62\" (UID: \"8a5251c1-60bc-45d7-8524-fd654c09505b\") " pod="openstack-kuttl-tests/barbican-worker-6c8cb46bb9-k2j62" Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.289847 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8a5251c1-60bc-45d7-8524-fd654c09505b-combined-ca-bundle\") pod \"barbican-worker-6c8cb46bb9-k2j62\" (UID: \"8a5251c1-60bc-45d7-8524-fd654c09505b\") " pod="openstack-kuttl-tests/barbican-worker-6c8cb46bb9-k2j62" Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.289896 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8a5251c1-60bc-45d7-8524-fd654c09505b-logs\") pod \"barbican-worker-6c8cb46bb9-k2j62\" (UID: \"8a5251c1-60bc-45d7-8524-fd654c09505b\") " pod="openstack-kuttl-tests/barbican-worker-6c8cb46bb9-k2j62" Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.289995 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wrpg6\" (UniqueName: \"kubernetes.io/projected/8a5251c1-60bc-45d7-8524-fd654c09505b-kube-api-access-wrpg6\") pod \"barbican-worker-6c8cb46bb9-k2j62\" (UID: \"8a5251c1-60bc-45d7-8524-fd654c09505b\") " pod="openstack-kuttl-tests/barbican-worker-6c8cb46bb9-k2j62" Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.311288 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/barbican-keystone-listener-58949d94f4-gxkkc"] Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.314357 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-keystone-listener-58949d94f4-gxkkc" Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.319544 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"barbican-keystone-listener-config-data" Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.342904 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/placement-7c7cc56d94-qptzl"] Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.344553 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-7c7cc56d94-qptzl" Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.347736 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"placement-scripts" Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.353211 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"placement-placement-dockercfg-whsds" Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.353397 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"placement-config-data" Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.353585 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-placement-public-svc" Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.353696 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-placement-internal-svc" Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.391767 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-7c7cc56d94-qptzl"] Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.392930 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d739bf90-bc99-4f8a-870e-98026ba6a53c-config-data\") pod \"keystone-bootstrap-glvsk\" (UID: \"d739bf90-bc99-4f8a-870e-98026ba6a53c\") " pod="openstack-kuttl-tests/keystone-bootstrap-glvsk" Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.393035 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8a5251c1-60bc-45d7-8524-fd654c09505b-config-data\") pod \"barbican-worker-6c8cb46bb9-k2j62\" (UID: \"8a5251c1-60bc-45d7-8524-fd654c09505b\") " pod="openstack-kuttl-tests/barbican-worker-6c8cb46bb9-k2j62" Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.393074 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-krcnj\" (UniqueName: \"kubernetes.io/projected/d739bf90-bc99-4f8a-870e-98026ba6a53c-kube-api-access-krcnj\") pod \"keystone-bootstrap-glvsk\" (UID: \"d739bf90-bc99-4f8a-870e-98026ba6a53c\") " pod="openstack-kuttl-tests/keystone-bootstrap-glvsk" Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.393115 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8a5251c1-60bc-45d7-8524-fd654c09505b-combined-ca-bundle\") pod \"barbican-worker-6c8cb46bb9-k2j62\" (UID: \"8a5251c1-60bc-45d7-8524-fd654c09505b\") " pod="openstack-kuttl-tests/barbican-worker-6c8cb46bb9-k2j62" Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.393145 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d739bf90-bc99-4f8a-870e-98026ba6a53c-fernet-keys\") pod \"keystone-bootstrap-glvsk\" (UID: \"d739bf90-bc99-4f8a-870e-98026ba6a53c\") " pod="openstack-kuttl-tests/keystone-bootstrap-glvsk" Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.393199 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8a5251c1-60bc-45d7-8524-fd654c09505b-logs\") pod \"barbican-worker-6c8cb46bb9-k2j62\" (UID: \"8a5251c1-60bc-45d7-8524-fd654c09505b\") " pod="openstack-kuttl-tests/barbican-worker-6c8cb46bb9-k2j62" Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.393280 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d739bf90-bc99-4f8a-870e-98026ba6a53c-combined-ca-bundle\") pod \"keystone-bootstrap-glvsk\" (UID: \"d739bf90-bc99-4f8a-870e-98026ba6a53c\") " pod="openstack-kuttl-tests/keystone-bootstrap-glvsk" Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.393303 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wrpg6\" (UniqueName: \"kubernetes.io/projected/8a5251c1-60bc-45d7-8524-fd654c09505b-kube-api-access-wrpg6\") pod \"barbican-worker-6c8cb46bb9-k2j62\" (UID: \"8a5251c1-60bc-45d7-8524-fd654c09505b\") " pod="openstack-kuttl-tests/barbican-worker-6c8cb46bb9-k2j62" Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.393358 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d739bf90-bc99-4f8a-870e-98026ba6a53c-scripts\") pod \"keystone-bootstrap-glvsk\" (UID: \"d739bf90-bc99-4f8a-870e-98026ba6a53c\") " pod="openstack-kuttl-tests/keystone-bootstrap-glvsk" Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.393421 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/d739bf90-bc99-4f8a-870e-98026ba6a53c-credential-keys\") pod \"keystone-bootstrap-glvsk\" (UID: \"d739bf90-bc99-4f8a-870e-98026ba6a53c\") " pod="openstack-kuttl-tests/keystone-bootstrap-glvsk" Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.393469 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8a5251c1-60bc-45d7-8524-fd654c09505b-config-data-custom\") pod \"barbican-worker-6c8cb46bb9-k2j62\" (UID: \"8a5251c1-60bc-45d7-8524-fd654c09505b\") " pod="openstack-kuttl-tests/barbican-worker-6c8cb46bb9-k2j62" Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.396067 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8a5251c1-60bc-45d7-8524-fd654c09505b-logs\") pod \"barbican-worker-6c8cb46bb9-k2j62\" (UID: \"8a5251c1-60bc-45d7-8524-fd654c09505b\") " pod="openstack-kuttl-tests/barbican-worker-6c8cb46bb9-k2j62" Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.415399 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-keystone-listener-58949d94f4-gxkkc"] Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.415414 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8a5251c1-60bc-45d7-8524-fd654c09505b-combined-ca-bundle\") pod \"barbican-worker-6c8cb46bb9-k2j62\" (UID: \"8a5251c1-60bc-45d7-8524-fd654c09505b\") " pod="openstack-kuttl-tests/barbican-worker-6c8cb46bb9-k2j62" Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.418779 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8a5251c1-60bc-45d7-8524-fd654c09505b-config-data\") pod \"barbican-worker-6c8cb46bb9-k2j62\" (UID: \"8a5251c1-60bc-45d7-8524-fd654c09505b\") " pod="openstack-kuttl-tests/barbican-worker-6c8cb46bb9-k2j62" Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.424141 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8a5251c1-60bc-45d7-8524-fd654c09505b-config-data-custom\") pod \"barbican-worker-6c8cb46bb9-k2j62\" (UID: \"8a5251c1-60bc-45d7-8524-fd654c09505b\") " pod="openstack-kuttl-tests/barbican-worker-6c8cb46bb9-k2j62" Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.427609 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wrpg6\" (UniqueName: \"kubernetes.io/projected/8a5251c1-60bc-45d7-8524-fd654c09505b-kube-api-access-wrpg6\") pod \"barbican-worker-6c8cb46bb9-k2j62\" (UID: \"8a5251c1-60bc-45d7-8524-fd654c09505b\") " pod="openstack-kuttl-tests/barbican-worker-6c8cb46bb9-k2j62" Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.433724 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/barbican-api-7ddd949596-fq6mn"] Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.435277 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-7ddd949596-fq6mn" Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.440208 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-api-7ddd949596-fq6mn"] Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.441421 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"barbican-api-config-data" Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.496999 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d739bf90-bc99-4f8a-870e-98026ba6a53c-combined-ca-bundle\") pod \"keystone-bootstrap-glvsk\" (UID: \"d739bf90-bc99-4f8a-870e-98026ba6a53c\") " pod="openstack-kuttl-tests/keystone-bootstrap-glvsk" Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.497047 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50829e14-401d-4958-9ef7-e2a2ef1f4b32-combined-ca-bundle\") pod \"placement-7c7cc56d94-qptzl\" (UID: \"50829e14-401d-4958-9ef7-e2a2ef1f4b32\") " pod="openstack-kuttl-tests/placement-7c7cc56d94-qptzl" Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.497102 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d739bf90-bc99-4f8a-870e-98026ba6a53c-scripts\") pod \"keystone-bootstrap-glvsk\" (UID: \"d739bf90-bc99-4f8a-870e-98026ba6a53c\") " pod="openstack-kuttl-tests/keystone-bootstrap-glvsk" Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.497137 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2bqq5\" (UniqueName: \"kubernetes.io/projected/a66c352e-8b7e-4f9e-9447-a2bf8a7a5c91-kube-api-access-2bqq5\") pod \"barbican-keystone-listener-58949d94f4-gxkkc\" (UID: \"a66c352e-8b7e-4f9e-9447-a2bf8a7a5c91\") " pod="openstack-kuttl-tests/barbican-keystone-listener-58949d94f4-gxkkc" Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.497157 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a66c352e-8b7e-4f9e-9447-a2bf8a7a5c91-logs\") pod \"barbican-keystone-listener-58949d94f4-gxkkc\" (UID: \"a66c352e-8b7e-4f9e-9447-a2bf8a7a5c91\") " pod="openstack-kuttl-tests/barbican-keystone-listener-58949d94f4-gxkkc" Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.497200 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/d739bf90-bc99-4f8a-870e-98026ba6a53c-credential-keys\") pod \"keystone-bootstrap-glvsk\" (UID: \"d739bf90-bc99-4f8a-870e-98026ba6a53c\") " pod="openstack-kuttl-tests/keystone-bootstrap-glvsk" Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.497228 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/50829e14-401d-4958-9ef7-e2a2ef1f4b32-internal-tls-certs\") pod \"placement-7c7cc56d94-qptzl\" (UID: \"50829e14-401d-4958-9ef7-e2a2ef1f4b32\") " pod="openstack-kuttl-tests/placement-7c7cc56d94-qptzl" Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.497246 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/50829e14-401d-4958-9ef7-e2a2ef1f4b32-config-data\") pod \"placement-7c7cc56d94-qptzl\" (UID: \"50829e14-401d-4958-9ef7-e2a2ef1f4b32\") " pod="openstack-kuttl-tests/placement-7c7cc56d94-qptzl" Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.497279 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d739bf90-bc99-4f8a-870e-98026ba6a53c-config-data\") pod \"keystone-bootstrap-glvsk\" (UID: \"d739bf90-bc99-4f8a-870e-98026ba6a53c\") " pod="openstack-kuttl-tests/keystone-bootstrap-glvsk" Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.497300 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a66c352e-8b7e-4f9e-9447-a2bf8a7a5c91-combined-ca-bundle\") pod \"barbican-keystone-listener-58949d94f4-gxkkc\" (UID: \"a66c352e-8b7e-4f9e-9447-a2bf8a7a5c91\") " pod="openstack-kuttl-tests/barbican-keystone-listener-58949d94f4-gxkkc" Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.497344 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-krcnj\" (UniqueName: \"kubernetes.io/projected/d739bf90-bc99-4f8a-870e-98026ba6a53c-kube-api-access-krcnj\") pod \"keystone-bootstrap-glvsk\" (UID: \"d739bf90-bc99-4f8a-870e-98026ba6a53c\") " pod="openstack-kuttl-tests/keystone-bootstrap-glvsk" Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.497371 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wtcfn\" (UniqueName: \"kubernetes.io/projected/50829e14-401d-4958-9ef7-e2a2ef1f4b32-kube-api-access-wtcfn\") pod \"placement-7c7cc56d94-qptzl\" (UID: \"50829e14-401d-4958-9ef7-e2a2ef1f4b32\") " pod="openstack-kuttl-tests/placement-7c7cc56d94-qptzl" Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.497390 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d739bf90-bc99-4f8a-870e-98026ba6a53c-fernet-keys\") pod \"keystone-bootstrap-glvsk\" (UID: \"d739bf90-bc99-4f8a-870e-98026ba6a53c\") " pod="openstack-kuttl-tests/keystone-bootstrap-glvsk" Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.497414 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/50829e14-401d-4958-9ef7-e2a2ef1f4b32-logs\") pod \"placement-7c7cc56d94-qptzl\" (UID: \"50829e14-401d-4958-9ef7-e2a2ef1f4b32\") " pod="openstack-kuttl-tests/placement-7c7cc56d94-qptzl" Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.497438 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a66c352e-8b7e-4f9e-9447-a2bf8a7a5c91-config-data\") pod \"barbican-keystone-listener-58949d94f4-gxkkc\" (UID: \"a66c352e-8b7e-4f9e-9447-a2bf8a7a5c91\") " pod="openstack-kuttl-tests/barbican-keystone-listener-58949d94f4-gxkkc" Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.497455 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/50829e14-401d-4958-9ef7-e2a2ef1f4b32-public-tls-certs\") pod \"placement-7c7cc56d94-qptzl\" (UID: \"50829e14-401d-4958-9ef7-e2a2ef1f4b32\") " pod="openstack-kuttl-tests/placement-7c7cc56d94-qptzl" Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.497472 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a66c352e-8b7e-4f9e-9447-a2bf8a7a5c91-config-data-custom\") pod \"barbican-keystone-listener-58949d94f4-gxkkc\" (UID: \"a66c352e-8b7e-4f9e-9447-a2bf8a7a5c91\") " pod="openstack-kuttl-tests/barbican-keystone-listener-58949d94f4-gxkkc" Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.497490 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/50829e14-401d-4958-9ef7-e2a2ef1f4b32-scripts\") pod \"placement-7c7cc56d94-qptzl\" (UID: \"50829e14-401d-4958-9ef7-e2a2ef1f4b32\") " pod="openstack-kuttl-tests/placement-7c7cc56d94-qptzl" Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.501589 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d739bf90-bc99-4f8a-870e-98026ba6a53c-combined-ca-bundle\") pod \"keystone-bootstrap-glvsk\" (UID: \"d739bf90-bc99-4f8a-870e-98026ba6a53c\") " pod="openstack-kuttl-tests/keystone-bootstrap-glvsk" Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.502656 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d739bf90-bc99-4f8a-870e-98026ba6a53c-fernet-keys\") pod \"keystone-bootstrap-glvsk\" (UID: \"d739bf90-bc99-4f8a-870e-98026ba6a53c\") " pod="openstack-kuttl-tests/keystone-bootstrap-glvsk" Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.502957 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/d739bf90-bc99-4f8a-870e-98026ba6a53c-credential-keys\") pod \"keystone-bootstrap-glvsk\" (UID: \"d739bf90-bc99-4f8a-870e-98026ba6a53c\") " pod="openstack-kuttl-tests/keystone-bootstrap-glvsk" Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.503264 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d739bf90-bc99-4f8a-870e-98026ba6a53c-config-data\") pod \"keystone-bootstrap-glvsk\" (UID: \"d739bf90-bc99-4f8a-870e-98026ba6a53c\") " pod="openstack-kuttl-tests/keystone-bootstrap-glvsk" Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.509606 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d739bf90-bc99-4f8a-870e-98026ba6a53c-scripts\") pod \"keystone-bootstrap-glvsk\" (UID: \"d739bf90-bc99-4f8a-870e-98026ba6a53c\") " pod="openstack-kuttl-tests/keystone-bootstrap-glvsk" Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.513204 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-krcnj\" (UniqueName: \"kubernetes.io/projected/d739bf90-bc99-4f8a-870e-98026ba6a53c-kube-api-access-krcnj\") pod \"keystone-bootstrap-glvsk\" (UID: \"d739bf90-bc99-4f8a-870e-98026ba6a53c\") " pod="openstack-kuttl-tests/keystone-bootstrap-glvsk" Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.593363 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-worker-6c8cb46bb9-k2j62" Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.599655 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2bqq5\" (UniqueName: \"kubernetes.io/projected/a66c352e-8b7e-4f9e-9447-a2bf8a7a5c91-kube-api-access-2bqq5\") pod \"barbican-keystone-listener-58949d94f4-gxkkc\" (UID: \"a66c352e-8b7e-4f9e-9447-a2bf8a7a5c91\") " pod="openstack-kuttl-tests/barbican-keystone-listener-58949d94f4-gxkkc" Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.599700 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a66c352e-8b7e-4f9e-9447-a2bf8a7a5c91-logs\") pod \"barbican-keystone-listener-58949d94f4-gxkkc\" (UID: \"a66c352e-8b7e-4f9e-9447-a2bf8a7a5c91\") " pod="openstack-kuttl-tests/barbican-keystone-listener-58949d94f4-gxkkc" Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.599742 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/50829e14-401d-4958-9ef7-e2a2ef1f4b32-internal-tls-certs\") pod \"placement-7c7cc56d94-qptzl\" (UID: \"50829e14-401d-4958-9ef7-e2a2ef1f4b32\") " pod="openstack-kuttl-tests/placement-7c7cc56d94-qptzl" Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.599763 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/50829e14-401d-4958-9ef7-e2a2ef1f4b32-config-data\") pod \"placement-7c7cc56d94-qptzl\" (UID: \"50829e14-401d-4958-9ef7-e2a2ef1f4b32\") " pod="openstack-kuttl-tests/placement-7c7cc56d94-qptzl" Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.599808 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a66c352e-8b7e-4f9e-9447-a2bf8a7a5c91-combined-ca-bundle\") pod \"barbican-keystone-listener-58949d94f4-gxkkc\" (UID: \"a66c352e-8b7e-4f9e-9447-a2bf8a7a5c91\") " pod="openstack-kuttl-tests/barbican-keystone-listener-58949d94f4-gxkkc" Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.599835 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cb5bdda0-3564-422d-8b42-24546cf568b1-config-data\") pod \"barbican-api-7ddd949596-fq6mn\" (UID: \"cb5bdda0-3564-422d-8b42-24546cf568b1\") " pod="openstack-kuttl-tests/barbican-api-7ddd949596-fq6mn" Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.599883 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cb5bdda0-3564-422d-8b42-24546cf568b1-combined-ca-bundle\") pod \"barbican-api-7ddd949596-fq6mn\" (UID: \"cb5bdda0-3564-422d-8b42-24546cf568b1\") " pod="openstack-kuttl-tests/barbican-api-7ddd949596-fq6mn" Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.599905 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/cb5bdda0-3564-422d-8b42-24546cf568b1-config-data-custom\") pod \"barbican-api-7ddd949596-fq6mn\" (UID: \"cb5bdda0-3564-422d-8b42-24546cf568b1\") " pod="openstack-kuttl-tests/barbican-api-7ddd949596-fq6mn" Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.599941 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wtcfn\" (UniqueName: \"kubernetes.io/projected/50829e14-401d-4958-9ef7-e2a2ef1f4b32-kube-api-access-wtcfn\") pod \"placement-7c7cc56d94-qptzl\" (UID: \"50829e14-401d-4958-9ef7-e2a2ef1f4b32\") " pod="openstack-kuttl-tests/placement-7c7cc56d94-qptzl" Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.599978 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/50829e14-401d-4958-9ef7-e2a2ef1f4b32-logs\") pod \"placement-7c7cc56d94-qptzl\" (UID: \"50829e14-401d-4958-9ef7-e2a2ef1f4b32\") " pod="openstack-kuttl-tests/placement-7c7cc56d94-qptzl" Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.600033 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9sgqw\" (UniqueName: \"kubernetes.io/projected/cb5bdda0-3564-422d-8b42-24546cf568b1-kube-api-access-9sgqw\") pod \"barbican-api-7ddd949596-fq6mn\" (UID: \"cb5bdda0-3564-422d-8b42-24546cf568b1\") " pod="openstack-kuttl-tests/barbican-api-7ddd949596-fq6mn" Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.600101 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a66c352e-8b7e-4f9e-9447-a2bf8a7a5c91-config-data\") pod \"barbican-keystone-listener-58949d94f4-gxkkc\" (UID: \"a66c352e-8b7e-4f9e-9447-a2bf8a7a5c91\") " pod="openstack-kuttl-tests/barbican-keystone-listener-58949d94f4-gxkkc" Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.600118 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a66c352e-8b7e-4f9e-9447-a2bf8a7a5c91-logs\") pod \"barbican-keystone-listener-58949d94f4-gxkkc\" (UID: \"a66c352e-8b7e-4f9e-9447-a2bf8a7a5c91\") " pod="openstack-kuttl-tests/barbican-keystone-listener-58949d94f4-gxkkc" Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.600125 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/50829e14-401d-4958-9ef7-e2a2ef1f4b32-public-tls-certs\") pod \"placement-7c7cc56d94-qptzl\" (UID: \"50829e14-401d-4958-9ef7-e2a2ef1f4b32\") " pod="openstack-kuttl-tests/placement-7c7cc56d94-qptzl" Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.600219 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a66c352e-8b7e-4f9e-9447-a2bf8a7a5c91-config-data-custom\") pod \"barbican-keystone-listener-58949d94f4-gxkkc\" (UID: \"a66c352e-8b7e-4f9e-9447-a2bf8a7a5c91\") " pod="openstack-kuttl-tests/barbican-keystone-listener-58949d94f4-gxkkc" Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.600251 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/50829e14-401d-4958-9ef7-e2a2ef1f4b32-scripts\") pod \"placement-7c7cc56d94-qptzl\" (UID: \"50829e14-401d-4958-9ef7-e2a2ef1f4b32\") " pod="openstack-kuttl-tests/placement-7c7cc56d94-qptzl" Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.600307 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50829e14-401d-4958-9ef7-e2a2ef1f4b32-combined-ca-bundle\") pod \"placement-7c7cc56d94-qptzl\" (UID: \"50829e14-401d-4958-9ef7-e2a2ef1f4b32\") " pod="openstack-kuttl-tests/placement-7c7cc56d94-qptzl" Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.600343 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/50829e14-401d-4958-9ef7-e2a2ef1f4b32-logs\") pod \"placement-7c7cc56d94-qptzl\" (UID: \"50829e14-401d-4958-9ef7-e2a2ef1f4b32\") " pod="openstack-kuttl-tests/placement-7c7cc56d94-qptzl" Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.600364 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cb5bdda0-3564-422d-8b42-24546cf568b1-logs\") pod \"barbican-api-7ddd949596-fq6mn\" (UID: \"cb5bdda0-3564-422d-8b42-24546cf568b1\") " pod="openstack-kuttl-tests/barbican-api-7ddd949596-fq6mn" Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.603886 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/50829e14-401d-4958-9ef7-e2a2ef1f4b32-config-data\") pod \"placement-7c7cc56d94-qptzl\" (UID: \"50829e14-401d-4958-9ef7-e2a2ef1f4b32\") " pod="openstack-kuttl-tests/placement-7c7cc56d94-qptzl" Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.603924 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/50829e14-401d-4958-9ef7-e2a2ef1f4b32-public-tls-certs\") pod \"placement-7c7cc56d94-qptzl\" (UID: \"50829e14-401d-4958-9ef7-e2a2ef1f4b32\") " pod="openstack-kuttl-tests/placement-7c7cc56d94-qptzl" Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.604483 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a66c352e-8b7e-4f9e-9447-a2bf8a7a5c91-config-data\") pod \"barbican-keystone-listener-58949d94f4-gxkkc\" (UID: \"a66c352e-8b7e-4f9e-9447-a2bf8a7a5c91\") " pod="openstack-kuttl-tests/barbican-keystone-listener-58949d94f4-gxkkc" Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.606665 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a66c352e-8b7e-4f9e-9447-a2bf8a7a5c91-combined-ca-bundle\") pod \"barbican-keystone-listener-58949d94f4-gxkkc\" (UID: \"a66c352e-8b7e-4f9e-9447-a2bf8a7a5c91\") " pod="openstack-kuttl-tests/barbican-keystone-listener-58949d94f4-gxkkc" Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.606791 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-bootstrap-glvsk" Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.607370 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a66c352e-8b7e-4f9e-9447-a2bf8a7a5c91-config-data-custom\") pod \"barbican-keystone-listener-58949d94f4-gxkkc\" (UID: \"a66c352e-8b7e-4f9e-9447-a2bf8a7a5c91\") " pod="openstack-kuttl-tests/barbican-keystone-listener-58949d94f4-gxkkc" Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.612397 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50829e14-401d-4958-9ef7-e2a2ef1f4b32-combined-ca-bundle\") pod \"placement-7c7cc56d94-qptzl\" (UID: \"50829e14-401d-4958-9ef7-e2a2ef1f4b32\") " pod="openstack-kuttl-tests/placement-7c7cc56d94-qptzl" Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.615202 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2bqq5\" (UniqueName: \"kubernetes.io/projected/a66c352e-8b7e-4f9e-9447-a2bf8a7a5c91-kube-api-access-2bqq5\") pod \"barbican-keystone-listener-58949d94f4-gxkkc\" (UID: \"a66c352e-8b7e-4f9e-9447-a2bf8a7a5c91\") " pod="openstack-kuttl-tests/barbican-keystone-listener-58949d94f4-gxkkc" Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.616254 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/50829e14-401d-4958-9ef7-e2a2ef1f4b32-scripts\") pod \"placement-7c7cc56d94-qptzl\" (UID: \"50829e14-401d-4958-9ef7-e2a2ef1f4b32\") " pod="openstack-kuttl-tests/placement-7c7cc56d94-qptzl" Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.616389 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/50829e14-401d-4958-9ef7-e2a2ef1f4b32-internal-tls-certs\") pod \"placement-7c7cc56d94-qptzl\" (UID: \"50829e14-401d-4958-9ef7-e2a2ef1f4b32\") " pod="openstack-kuttl-tests/placement-7c7cc56d94-qptzl" Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.616521 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wtcfn\" (UniqueName: \"kubernetes.io/projected/50829e14-401d-4958-9ef7-e2a2ef1f4b32-kube-api-access-wtcfn\") pod \"placement-7c7cc56d94-qptzl\" (UID: \"50829e14-401d-4958-9ef7-e2a2ef1f4b32\") " pod="openstack-kuttl-tests/placement-7c7cc56d94-qptzl" Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.643002 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-keystone-listener-58949d94f4-gxkkc" Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.692930 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-7c7cc56d94-qptzl" Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.701857 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cb5bdda0-3564-422d-8b42-24546cf568b1-logs\") pod \"barbican-api-7ddd949596-fq6mn\" (UID: \"cb5bdda0-3564-422d-8b42-24546cf568b1\") " pod="openstack-kuttl-tests/barbican-api-7ddd949596-fq6mn" Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.702322 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cb5bdda0-3564-422d-8b42-24546cf568b1-config-data\") pod \"barbican-api-7ddd949596-fq6mn\" (UID: \"cb5bdda0-3564-422d-8b42-24546cf568b1\") " pod="openstack-kuttl-tests/barbican-api-7ddd949596-fq6mn" Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.702433 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cb5bdda0-3564-422d-8b42-24546cf568b1-combined-ca-bundle\") pod \"barbican-api-7ddd949596-fq6mn\" (UID: \"cb5bdda0-3564-422d-8b42-24546cf568b1\") " pod="openstack-kuttl-tests/barbican-api-7ddd949596-fq6mn" Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.702460 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/cb5bdda0-3564-422d-8b42-24546cf568b1-config-data-custom\") pod \"barbican-api-7ddd949596-fq6mn\" (UID: \"cb5bdda0-3564-422d-8b42-24546cf568b1\") " pod="openstack-kuttl-tests/barbican-api-7ddd949596-fq6mn" Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.702551 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9sgqw\" (UniqueName: \"kubernetes.io/projected/cb5bdda0-3564-422d-8b42-24546cf568b1-kube-api-access-9sgqw\") pod \"barbican-api-7ddd949596-fq6mn\" (UID: \"cb5bdda0-3564-422d-8b42-24546cf568b1\") " pod="openstack-kuttl-tests/barbican-api-7ddd949596-fq6mn" Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.709368 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cb5bdda0-3564-422d-8b42-24546cf568b1-config-data\") pod \"barbican-api-7ddd949596-fq6mn\" (UID: \"cb5bdda0-3564-422d-8b42-24546cf568b1\") " pod="openstack-kuttl-tests/barbican-api-7ddd949596-fq6mn" Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.710394 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cb5bdda0-3564-422d-8b42-24546cf568b1-logs\") pod \"barbican-api-7ddd949596-fq6mn\" (UID: \"cb5bdda0-3564-422d-8b42-24546cf568b1\") " pod="openstack-kuttl-tests/barbican-api-7ddd949596-fq6mn" Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.712714 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cb5bdda0-3564-422d-8b42-24546cf568b1-combined-ca-bundle\") pod \"barbican-api-7ddd949596-fq6mn\" (UID: \"cb5bdda0-3564-422d-8b42-24546cf568b1\") " pod="openstack-kuttl-tests/barbican-api-7ddd949596-fq6mn" Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.716510 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/cb5bdda0-3564-422d-8b42-24546cf568b1-config-data-custom\") pod \"barbican-api-7ddd949596-fq6mn\" (UID: \"cb5bdda0-3564-422d-8b42-24546cf568b1\") " pod="openstack-kuttl-tests/barbican-api-7ddd949596-fq6mn" Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.724049 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9sgqw\" (UniqueName: \"kubernetes.io/projected/cb5bdda0-3564-422d-8b42-24546cf568b1-kube-api-access-9sgqw\") pod \"barbican-api-7ddd949596-fq6mn\" (UID: \"cb5bdda0-3564-422d-8b42-24546cf568b1\") " pod="openstack-kuttl-tests/barbican-api-7ddd949596-fq6mn" Jan 20 17:55:07 crc kubenswrapper[4558]: I0120 17:55:07.871255 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-7ddd949596-fq6mn" Jan 20 17:55:08 crc kubenswrapper[4558]: I0120 17:55:08.054238 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"63d78039-84d1-41df-b0dc-3d0b01cc2a61","Type":"ContainerStarted","Data":"e0e7136ff1fc7c45a47afc77af3f0db467ed3608217c443eca310e92d6574564"} Jan 20 17:55:08 crc kubenswrapper[4558]: I0120 17:55:08.054466 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="63d78039-84d1-41df-b0dc-3d0b01cc2a61" containerName="ceilometer-central-agent" containerID="cri-o://266031d8114437a6db868dffef46777ee73c1807d22ff263ddc75880bb58a4d8" gracePeriod=30 Jan 20 17:55:08 crc kubenswrapper[4558]: I0120 17:55:08.055119 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="63d78039-84d1-41df-b0dc-3d0b01cc2a61" containerName="proxy-httpd" containerID="cri-o://e0e7136ff1fc7c45a47afc77af3f0db467ed3608217c443eca310e92d6574564" gracePeriod=30 Jan 20 17:55:08 crc kubenswrapper[4558]: I0120 17:55:08.055196 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="63d78039-84d1-41df-b0dc-3d0b01cc2a61" containerName="sg-core" containerID="cri-o://fb0c76f7efa112c9f84f7b18a860585f8a74dd8aa04b177a266ffe056c7f8929" gracePeriod=30 Jan 20 17:55:08 crc kubenswrapper[4558]: I0120 17:55:08.055243 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="63d78039-84d1-41df-b0dc-3d0b01cc2a61" containerName="ceilometer-notification-agent" containerID="cri-o://ebf1429a28db4257c55ad73cea3715ef4bb80d2a7a864c1cb0908cfbe3578248" gracePeriod=30 Jan 20 17:55:08 crc kubenswrapper[4558]: I0120 17:55:08.055130 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:55:08 crc kubenswrapper[4558]: I0120 17:55:08.061348 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"5881b3da-d3ac-4168-a33c-0f29ad342f60","Type":"ContainerStarted","Data":"2f9349dc89699a650a3a26d019021f394568684a51da796cb0343c7a21d1d6e0"} Jan 20 17:55:08 crc kubenswrapper[4558]: I0120 17:55:08.061384 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"5881b3da-d3ac-4168-a33c-0f29ad342f60","Type":"ContainerStarted","Data":"4f79020f7ad820f4e9c48646626f310cff93a734ca7c416d945fbaa77994559c"} Jan 20 17:55:08 crc kubenswrapper[4558]: I0120 17:55:08.061399 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"b419ccfb-cd20-4b72-8f3a-48ce9ea2c991","Type":"ContainerStarted","Data":"c46f352f9c1ee65a4e358f6c0f82857abddecd43650369939a56f8dd36174cbc"} Jan 20 17:55:08 crc kubenswrapper[4558]: I0120 17:55:08.061400 4558 generic.go:334] "Generic (PLEG): container finished" podID="a8e183b4-694a-4ce9-90e9-1ff11d52ff3c" containerID="72db31604b9af667b1c4e5942704410ff8f1e3af14c2a6fabe1fefc79f363d83" exitCode=0 Jan 20 17:55:08 crc kubenswrapper[4558]: I0120 17:55:08.061413 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-db-sync-7l4r5" event={"ID":"a8e183b4-694a-4ce9-90e9-1ff11d52ff3c","Type":"ContainerDied","Data":"72db31604b9af667b1c4e5942704410ff8f1e3af14c2a6fabe1fefc79f363d83"} Jan 20 17:55:08 crc kubenswrapper[4558]: I0120 17:55:08.070626 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-worker-6c8cb46bb9-k2j62"] Jan 20 17:55:08 crc kubenswrapper[4558]: I0120 17:55:08.076591 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ceilometer-0" podStartSLOduration=2.987647348 podStartE2EDuration="8.076568293s" podCreationTimestamp="2026-01-20 17:55:00 +0000 UTC" firstStartedPulling="2026-01-20 17:55:01.813215134 +0000 UTC m=+4395.573553100" lastFinishedPulling="2026-01-20 17:55:06.902136078 +0000 UTC m=+4400.662474045" observedRunningTime="2026-01-20 17:55:08.074549698 +0000 UTC m=+4401.834887666" watchObservedRunningTime="2026-01-20 17:55:08.076568293 +0000 UTC m=+4401.836906260" Jan 20 17:55:08 crc kubenswrapper[4558]: W0120 17:55:08.084979 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8a5251c1_60bc_45d7_8524_fd654c09505b.slice/crio-27e45eccea6c6cd3142e139b3a5989021d2f7124d2038226b4cf11882553f944 WatchSource:0}: Error finding container 27e45eccea6c6cd3142e139b3a5989021d2f7124d2038226b4cf11882553f944: Status 404 returned error can't find the container with id 27e45eccea6c6cd3142e139b3a5989021d2f7124d2038226b4cf11882553f944 Jan 20 17:55:08 crc kubenswrapper[4558]: I0120 17:55:08.191445 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-7c7cc56d94-qptzl"] Jan 20 17:55:08 crc kubenswrapper[4558]: W0120 17:55:08.213261 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod50829e14_401d_4958_9ef7_e2a2ef1f4b32.slice/crio-87051b354e76a109fd178692cdde020d8f75467d48ca7b6c336ecdc09ab3ed0b WatchSource:0}: Error finding container 87051b354e76a109fd178692cdde020d8f75467d48ca7b6c336ecdc09ab3ed0b: Status 404 returned error can't find the container with id 87051b354e76a109fd178692cdde020d8f75467d48ca7b6c336ecdc09ab3ed0b Jan 20 17:55:08 crc kubenswrapper[4558]: I0120 17:55:08.280820 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-api-7ddd949596-fq6mn"] Jan 20 17:55:08 crc kubenswrapper[4558]: I0120 17:55:08.293848 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-bootstrap-glvsk"] Jan 20 17:55:08 crc kubenswrapper[4558]: I0120 17:55:08.392914 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-keystone-listener-58949d94f4-gxkkc"] Jan 20 17:55:08 crc kubenswrapper[4558]: W0120 17:55:08.436190 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda66c352e_8b7e_4f9e_9447_a2bf8a7a5c91.slice/crio-50a54226f35b4acc596685311008253d6eb55cfd5e9b3ba67a818581a389d9a2 WatchSource:0}: Error finding container 50a54226f35b4acc596685311008253d6eb55cfd5e9b3ba67a818581a389d9a2: Status 404 returned error can't find the container with id 50a54226f35b4acc596685311008253d6eb55cfd5e9b3ba67a818581a389d9a2 Jan 20 17:55:08 crc kubenswrapper[4558]: I0120 17:55:08.591641 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0353f1c5-452b-413c-981b-84e774d099eb" path="/var/lib/kubelet/pods/0353f1c5-452b-413c-981b-84e774d099eb/volumes" Jan 20 17:55:08 crc kubenswrapper[4558]: I0120 17:55:08.873809 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.037151 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/63d78039-84d1-41df-b0dc-3d0b01cc2a61-sg-core-conf-yaml\") pod \"63d78039-84d1-41df-b0dc-3d0b01cc2a61\" (UID: \"63d78039-84d1-41df-b0dc-3d0b01cc2a61\") " Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.037536 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7hwxm\" (UniqueName: \"kubernetes.io/projected/63d78039-84d1-41df-b0dc-3d0b01cc2a61-kube-api-access-7hwxm\") pod \"63d78039-84d1-41df-b0dc-3d0b01cc2a61\" (UID: \"63d78039-84d1-41df-b0dc-3d0b01cc2a61\") " Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.037568 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/63d78039-84d1-41df-b0dc-3d0b01cc2a61-scripts\") pod \"63d78039-84d1-41df-b0dc-3d0b01cc2a61\" (UID: \"63d78039-84d1-41df-b0dc-3d0b01cc2a61\") " Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.037589 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/63d78039-84d1-41df-b0dc-3d0b01cc2a61-log-httpd\") pod \"63d78039-84d1-41df-b0dc-3d0b01cc2a61\" (UID: \"63d78039-84d1-41df-b0dc-3d0b01cc2a61\") " Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.037655 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/63d78039-84d1-41df-b0dc-3d0b01cc2a61-run-httpd\") pod \"63d78039-84d1-41df-b0dc-3d0b01cc2a61\" (UID: \"63d78039-84d1-41df-b0dc-3d0b01cc2a61\") " Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.037740 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/63d78039-84d1-41df-b0dc-3d0b01cc2a61-config-data\") pod \"63d78039-84d1-41df-b0dc-3d0b01cc2a61\" (UID: \"63d78039-84d1-41df-b0dc-3d0b01cc2a61\") " Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.038076 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/63d78039-84d1-41df-b0dc-3d0b01cc2a61-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "63d78039-84d1-41df-b0dc-3d0b01cc2a61" (UID: "63d78039-84d1-41df-b0dc-3d0b01cc2a61"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.038190 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/63d78039-84d1-41df-b0dc-3d0b01cc2a61-combined-ca-bundle\") pod \"63d78039-84d1-41df-b0dc-3d0b01cc2a61\" (UID: \"63d78039-84d1-41df-b0dc-3d0b01cc2a61\") " Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.038203 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/63d78039-84d1-41df-b0dc-3d0b01cc2a61-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "63d78039-84d1-41df-b0dc-3d0b01cc2a61" (UID: "63d78039-84d1-41df-b0dc-3d0b01cc2a61"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.038687 4558 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/63d78039-84d1-41df-b0dc-3d0b01cc2a61-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.038705 4558 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/63d78039-84d1-41df-b0dc-3d0b01cc2a61-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.054507 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/63d78039-84d1-41df-b0dc-3d0b01cc2a61-scripts" (OuterVolumeSpecName: "scripts") pod "63d78039-84d1-41df-b0dc-3d0b01cc2a61" (UID: "63d78039-84d1-41df-b0dc-3d0b01cc2a61"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.054525 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/63d78039-84d1-41df-b0dc-3d0b01cc2a61-kube-api-access-7hwxm" (OuterVolumeSpecName: "kube-api-access-7hwxm") pod "63d78039-84d1-41df-b0dc-3d0b01cc2a61" (UID: "63d78039-84d1-41df-b0dc-3d0b01cc2a61"). InnerVolumeSpecName "kube-api-access-7hwxm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.076962 4558 generic.go:334] "Generic (PLEG): container finished" podID="63d78039-84d1-41df-b0dc-3d0b01cc2a61" containerID="e0e7136ff1fc7c45a47afc77af3f0db467ed3608217c443eca310e92d6574564" exitCode=0 Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.076989 4558 generic.go:334] "Generic (PLEG): container finished" podID="63d78039-84d1-41df-b0dc-3d0b01cc2a61" containerID="fb0c76f7efa112c9f84f7b18a860585f8a74dd8aa04b177a266ffe056c7f8929" exitCode=2 Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.076999 4558 generic.go:334] "Generic (PLEG): container finished" podID="63d78039-84d1-41df-b0dc-3d0b01cc2a61" containerID="ebf1429a28db4257c55ad73cea3715ef4bb80d2a7a864c1cb0908cfbe3578248" exitCode=0 Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.077008 4558 generic.go:334] "Generic (PLEG): container finished" podID="63d78039-84d1-41df-b0dc-3d0b01cc2a61" containerID="266031d8114437a6db868dffef46777ee73c1807d22ff263ddc75880bb58a4d8" exitCode=0 Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.077062 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"63d78039-84d1-41df-b0dc-3d0b01cc2a61","Type":"ContainerDied","Data":"e0e7136ff1fc7c45a47afc77af3f0db467ed3608217c443eca310e92d6574564"} Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.077115 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"63d78039-84d1-41df-b0dc-3d0b01cc2a61","Type":"ContainerDied","Data":"fb0c76f7efa112c9f84f7b18a860585f8a74dd8aa04b177a266ffe056c7f8929"} Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.077129 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"63d78039-84d1-41df-b0dc-3d0b01cc2a61","Type":"ContainerDied","Data":"ebf1429a28db4257c55ad73cea3715ef4bb80d2a7a864c1cb0908cfbe3578248"} Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.077139 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"63d78039-84d1-41df-b0dc-3d0b01cc2a61","Type":"ContainerDied","Data":"266031d8114437a6db868dffef46777ee73c1807d22ff263ddc75880bb58a4d8"} Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.077147 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"63d78039-84d1-41df-b0dc-3d0b01cc2a61","Type":"ContainerDied","Data":"574ebb66e35d9d21611416ae8633182d5a5fa517914c62911726888394f50a1d"} Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.077189 4558 scope.go:117] "RemoveContainer" containerID="e0e7136ff1fc7c45a47afc77af3f0db467ed3608217c443eca310e92d6574564" Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.077313 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.086494 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/63d78039-84d1-41df-b0dc-3d0b01cc2a61-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "63d78039-84d1-41df-b0dc-3d0b01cc2a61" (UID: "63d78039-84d1-41df-b0dc-3d0b01cc2a61"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.090858 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-7ddd949596-fq6mn" event={"ID":"cb5bdda0-3564-422d-8b42-24546cf568b1","Type":"ContainerStarted","Data":"7b2580732885c3c0febe92be2cfd2d2372725698e2a898596f877ba9fc9cbf70"} Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.090913 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-7ddd949596-fq6mn" event={"ID":"cb5bdda0-3564-422d-8b42-24546cf568b1","Type":"ContainerStarted","Data":"78be98eafdcaaf1e558140085e832f02cb6b33728b1024c81ec11221d71d43b2"} Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.090925 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-7ddd949596-fq6mn" event={"ID":"cb5bdda0-3564-422d-8b42-24546cf568b1","Type":"ContainerStarted","Data":"5ea5c6826bcbd301234c722d32b8e7a855f4b650ff2661daa79292125ca35d32"} Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.090991 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/barbican-api-7ddd949596-fq6mn" Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.091012 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/barbican-api-7ddd949596-fq6mn" Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.094485 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-worker-6c8cb46bb9-k2j62" event={"ID":"8a5251c1-60bc-45d7-8524-fd654c09505b","Type":"ContainerStarted","Data":"9f47eb91549a4c39117393c154adc8a527a01fad6a90f46a70181a157bb9033a"} Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.094528 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-worker-6c8cb46bb9-k2j62" event={"ID":"8a5251c1-60bc-45d7-8524-fd654c09505b","Type":"ContainerStarted","Data":"cc8c4f5e82e163a1cfed41c73a53986199d859e052ef15e033504df9f64fa7a9"} Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.094542 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-worker-6c8cb46bb9-k2j62" event={"ID":"8a5251c1-60bc-45d7-8524-fd654c09505b","Type":"ContainerStarted","Data":"27e45eccea6c6cd3142e139b3a5989021d2f7124d2038226b4cf11882553f944"} Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.101333 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"5881b3da-d3ac-4168-a33c-0f29ad342f60","Type":"ContainerStarted","Data":"426c5470720e7ae7d002df3aeb9aab834c73d17509f061f38d2d4533eef638aa"} Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.118646 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-bootstrap-glvsk" event={"ID":"d739bf90-bc99-4f8a-870e-98026ba6a53c","Type":"ContainerStarted","Data":"513cf7b607f4c5f76a98e28f72ed3a3e4460bfb0341cd77104ff3e1a715e39f3"} Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.118692 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-bootstrap-glvsk" event={"ID":"d739bf90-bc99-4f8a-870e-98026ba6a53c","Type":"ContainerStarted","Data":"f72f0830340dc1a2eb38cfa3fadc0b0ca0b77a7404b74932cdc2b1f647d97e1e"} Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.120751 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/barbican-api-7ddd949596-fq6mn" podStartSLOduration=2.120735875 podStartE2EDuration="2.120735875s" podCreationTimestamp="2026-01-20 17:55:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:55:09.113201923 +0000 UTC m=+4402.873539891" watchObservedRunningTime="2026-01-20 17:55:09.120735875 +0000 UTC m=+4402.881073842" Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.137368 4558 scope.go:117] "RemoveContainer" containerID="fb0c76f7efa112c9f84f7b18a860585f8a74dd8aa04b177a266ffe056c7f8929" Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.154514 4558 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/63d78039-84d1-41df-b0dc-3d0b01cc2a61-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.154541 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7hwxm\" (UniqueName: \"kubernetes.io/projected/63d78039-84d1-41df-b0dc-3d0b01cc2a61-kube-api-access-7hwxm\") on node \"crc\" DevicePath \"\"" Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.154553 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/63d78039-84d1-41df-b0dc-3d0b01cc2a61-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.166727 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/glance-default-external-api-0" podStartSLOduration=3.166705756 podStartE2EDuration="3.166705756s" podCreationTimestamp="2026-01-20 17:55:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:55:09.145429111 +0000 UTC m=+4402.905767078" watchObservedRunningTime="2026-01-20 17:55:09.166705756 +0000 UTC m=+4402.927043713" Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.169140 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/barbican-worker-6c8cb46bb9-k2j62" podStartSLOduration=2.169134131 podStartE2EDuration="2.169134131s" podCreationTimestamp="2026-01-20 17:55:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:55:09.16404776 +0000 UTC m=+4402.924385727" watchObservedRunningTime="2026-01-20 17:55:09.169134131 +0000 UTC m=+4402.929472097" Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.169464 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-keystone-listener-58949d94f4-gxkkc" event={"ID":"a66c352e-8b7e-4f9e-9447-a2bf8a7a5c91","Type":"ContainerStarted","Data":"210a0812578a1d65ec60d4f022b3ab7e40e130626b3b2c29f678571f3bef403d"} Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.169514 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-keystone-listener-58949d94f4-gxkkc" event={"ID":"a66c352e-8b7e-4f9e-9447-a2bf8a7a5c91","Type":"ContainerStarted","Data":"ca4ecb7fa4f5c23eb30d668f5754a6906c2e344a7fe9c1284b2323148a7db0a4"} Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.169528 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-keystone-listener-58949d94f4-gxkkc" event={"ID":"a66c352e-8b7e-4f9e-9447-a2bf8a7a5c91","Type":"ContainerStarted","Data":"50a54226f35b4acc596685311008253d6eb55cfd5e9b3ba67a818581a389d9a2"} Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.178249 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"b419ccfb-cd20-4b72-8f3a-48ce9ea2c991","Type":"ContainerStarted","Data":"06b7bcf43cb16007690126d14e7263d4b301518776b44ac910ada8799e3568d9"} Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.181294 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/63d78039-84d1-41df-b0dc-3d0b01cc2a61-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "63d78039-84d1-41df-b0dc-3d0b01cc2a61" (UID: "63d78039-84d1-41df-b0dc-3d0b01cc2a61"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.184818 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/keystone-bootstrap-glvsk" podStartSLOduration=2.184801491 podStartE2EDuration="2.184801491s" podCreationTimestamp="2026-01-20 17:55:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:55:09.1829656 +0000 UTC m=+4402.943303568" watchObservedRunningTime="2026-01-20 17:55:09.184801491 +0000 UTC m=+4402.945139458" Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.188904 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-7c7cc56d94-qptzl" event={"ID":"50829e14-401d-4958-9ef7-e2a2ef1f4b32","Type":"ContainerStarted","Data":"f43ceb14bd50b1f492df9772660a94cd37c5a26e2941454f5b5fa401d0851c00"} Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.188945 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-7c7cc56d94-qptzl" event={"ID":"50829e14-401d-4958-9ef7-e2a2ef1f4b32","Type":"ContainerStarted","Data":"fe2d2dd5529693a11eb03a9bb6bf05aecb6c57bbe0a7f33f65058fc71e14d418"} Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.188959 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-7c7cc56d94-qptzl" event={"ID":"50829e14-401d-4958-9ef7-e2a2ef1f4b32","Type":"ContainerStarted","Data":"87051b354e76a109fd178692cdde020d8f75467d48ca7b6c336ecdc09ab3ed0b"} Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.189039 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/placement-7c7cc56d94-qptzl" Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.209012 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/63d78039-84d1-41df-b0dc-3d0b01cc2a61-config-data" (OuterVolumeSpecName: "config-data") pod "63d78039-84d1-41df-b0dc-3d0b01cc2a61" (UID: "63d78039-84d1-41df-b0dc-3d0b01cc2a61"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.212688 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/glance-default-internal-api-0" podStartSLOduration=3.212662553 podStartE2EDuration="3.212662553s" podCreationTimestamp="2026-01-20 17:55:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:55:09.203314301 +0000 UTC m=+4402.963652267" watchObservedRunningTime="2026-01-20 17:55:09.212662553 +0000 UTC m=+4402.973000520" Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.226666 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/barbican-keystone-listener-58949d94f4-gxkkc" podStartSLOduration=2.226649495 podStartE2EDuration="2.226649495s" podCreationTimestamp="2026-01-20 17:55:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:55:09.219633747 +0000 UTC m=+4402.979971714" watchObservedRunningTime="2026-01-20 17:55:09.226649495 +0000 UTC m=+4402.986987462" Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.254771 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/placement-7c7cc56d94-qptzl" podStartSLOduration=2.2547556970000002 podStartE2EDuration="2.254755697s" podCreationTimestamp="2026-01-20 17:55:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:55:09.242494979 +0000 UTC m=+4403.002832947" watchObservedRunningTime="2026-01-20 17:55:09.254755697 +0000 UTC m=+4403.015093664" Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.257591 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/63d78039-84d1-41df-b0dc-3d0b01cc2a61-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.257621 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/63d78039-84d1-41df-b0dc-3d0b01cc2a61-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.262435 4558 scope.go:117] "RemoveContainer" containerID="ebf1429a28db4257c55ad73cea3715ef4bb80d2a7a864c1cb0908cfbe3578248" Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.319144 4558 scope.go:117] "RemoveContainer" containerID="266031d8114437a6db868dffef46777ee73c1807d22ff263ddc75880bb58a4d8" Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.375322 4558 scope.go:117] "RemoveContainer" containerID="e0e7136ff1fc7c45a47afc77af3f0db467ed3608217c443eca310e92d6574564" Jan 20 17:55:09 crc kubenswrapper[4558]: E0120 17:55:09.375548 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e0e7136ff1fc7c45a47afc77af3f0db467ed3608217c443eca310e92d6574564\": container with ID starting with e0e7136ff1fc7c45a47afc77af3f0db467ed3608217c443eca310e92d6574564 not found: ID does not exist" containerID="e0e7136ff1fc7c45a47afc77af3f0db467ed3608217c443eca310e92d6574564" Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.375577 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e0e7136ff1fc7c45a47afc77af3f0db467ed3608217c443eca310e92d6574564"} err="failed to get container status \"e0e7136ff1fc7c45a47afc77af3f0db467ed3608217c443eca310e92d6574564\": rpc error: code = NotFound desc = could not find container \"e0e7136ff1fc7c45a47afc77af3f0db467ed3608217c443eca310e92d6574564\": container with ID starting with e0e7136ff1fc7c45a47afc77af3f0db467ed3608217c443eca310e92d6574564 not found: ID does not exist" Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.375602 4558 scope.go:117] "RemoveContainer" containerID="fb0c76f7efa112c9f84f7b18a860585f8a74dd8aa04b177a266ffe056c7f8929" Jan 20 17:55:09 crc kubenswrapper[4558]: E0120 17:55:09.382867 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fb0c76f7efa112c9f84f7b18a860585f8a74dd8aa04b177a266ffe056c7f8929\": container with ID starting with fb0c76f7efa112c9f84f7b18a860585f8a74dd8aa04b177a266ffe056c7f8929 not found: ID does not exist" containerID="fb0c76f7efa112c9f84f7b18a860585f8a74dd8aa04b177a266ffe056c7f8929" Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.382972 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fb0c76f7efa112c9f84f7b18a860585f8a74dd8aa04b177a266ffe056c7f8929"} err="failed to get container status \"fb0c76f7efa112c9f84f7b18a860585f8a74dd8aa04b177a266ffe056c7f8929\": rpc error: code = NotFound desc = could not find container \"fb0c76f7efa112c9f84f7b18a860585f8a74dd8aa04b177a266ffe056c7f8929\": container with ID starting with fb0c76f7efa112c9f84f7b18a860585f8a74dd8aa04b177a266ffe056c7f8929 not found: ID does not exist" Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.383047 4558 scope.go:117] "RemoveContainer" containerID="ebf1429a28db4257c55ad73cea3715ef4bb80d2a7a864c1cb0908cfbe3578248" Jan 20 17:55:09 crc kubenswrapper[4558]: E0120 17:55:09.387311 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ebf1429a28db4257c55ad73cea3715ef4bb80d2a7a864c1cb0908cfbe3578248\": container with ID starting with ebf1429a28db4257c55ad73cea3715ef4bb80d2a7a864c1cb0908cfbe3578248 not found: ID does not exist" containerID="ebf1429a28db4257c55ad73cea3715ef4bb80d2a7a864c1cb0908cfbe3578248" Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.387411 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ebf1429a28db4257c55ad73cea3715ef4bb80d2a7a864c1cb0908cfbe3578248"} err="failed to get container status \"ebf1429a28db4257c55ad73cea3715ef4bb80d2a7a864c1cb0908cfbe3578248\": rpc error: code = NotFound desc = could not find container \"ebf1429a28db4257c55ad73cea3715ef4bb80d2a7a864c1cb0908cfbe3578248\": container with ID starting with ebf1429a28db4257c55ad73cea3715ef4bb80d2a7a864c1cb0908cfbe3578248 not found: ID does not exist" Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.387485 4558 scope.go:117] "RemoveContainer" containerID="266031d8114437a6db868dffef46777ee73c1807d22ff263ddc75880bb58a4d8" Jan 20 17:55:09 crc kubenswrapper[4558]: E0120 17:55:09.389547 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"266031d8114437a6db868dffef46777ee73c1807d22ff263ddc75880bb58a4d8\": container with ID starting with 266031d8114437a6db868dffef46777ee73c1807d22ff263ddc75880bb58a4d8 not found: ID does not exist" containerID="266031d8114437a6db868dffef46777ee73c1807d22ff263ddc75880bb58a4d8" Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.389646 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"266031d8114437a6db868dffef46777ee73c1807d22ff263ddc75880bb58a4d8"} err="failed to get container status \"266031d8114437a6db868dffef46777ee73c1807d22ff263ddc75880bb58a4d8\": rpc error: code = NotFound desc = could not find container \"266031d8114437a6db868dffef46777ee73c1807d22ff263ddc75880bb58a4d8\": container with ID starting with 266031d8114437a6db868dffef46777ee73c1807d22ff263ddc75880bb58a4d8 not found: ID does not exist" Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.389717 4558 scope.go:117] "RemoveContainer" containerID="e0e7136ff1fc7c45a47afc77af3f0db467ed3608217c443eca310e92d6574564" Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.389949 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e0e7136ff1fc7c45a47afc77af3f0db467ed3608217c443eca310e92d6574564"} err="failed to get container status \"e0e7136ff1fc7c45a47afc77af3f0db467ed3608217c443eca310e92d6574564\": rpc error: code = NotFound desc = could not find container \"e0e7136ff1fc7c45a47afc77af3f0db467ed3608217c443eca310e92d6574564\": container with ID starting with e0e7136ff1fc7c45a47afc77af3f0db467ed3608217c443eca310e92d6574564 not found: ID does not exist" Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.390041 4558 scope.go:117] "RemoveContainer" containerID="fb0c76f7efa112c9f84f7b18a860585f8a74dd8aa04b177a266ffe056c7f8929" Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.392178 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fb0c76f7efa112c9f84f7b18a860585f8a74dd8aa04b177a266ffe056c7f8929"} err="failed to get container status \"fb0c76f7efa112c9f84f7b18a860585f8a74dd8aa04b177a266ffe056c7f8929\": rpc error: code = NotFound desc = could not find container \"fb0c76f7efa112c9f84f7b18a860585f8a74dd8aa04b177a266ffe056c7f8929\": container with ID starting with fb0c76f7efa112c9f84f7b18a860585f8a74dd8aa04b177a266ffe056c7f8929 not found: ID does not exist" Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.392221 4558 scope.go:117] "RemoveContainer" containerID="ebf1429a28db4257c55ad73cea3715ef4bb80d2a7a864c1cb0908cfbe3578248" Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.392686 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ebf1429a28db4257c55ad73cea3715ef4bb80d2a7a864c1cb0908cfbe3578248"} err="failed to get container status \"ebf1429a28db4257c55ad73cea3715ef4bb80d2a7a864c1cb0908cfbe3578248\": rpc error: code = NotFound desc = could not find container \"ebf1429a28db4257c55ad73cea3715ef4bb80d2a7a864c1cb0908cfbe3578248\": container with ID starting with ebf1429a28db4257c55ad73cea3715ef4bb80d2a7a864c1cb0908cfbe3578248 not found: ID does not exist" Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.392712 4558 scope.go:117] "RemoveContainer" containerID="266031d8114437a6db868dffef46777ee73c1807d22ff263ddc75880bb58a4d8" Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.399819 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"266031d8114437a6db868dffef46777ee73c1807d22ff263ddc75880bb58a4d8"} err="failed to get container status \"266031d8114437a6db868dffef46777ee73c1807d22ff263ddc75880bb58a4d8\": rpc error: code = NotFound desc = could not find container \"266031d8114437a6db868dffef46777ee73c1807d22ff263ddc75880bb58a4d8\": container with ID starting with 266031d8114437a6db868dffef46777ee73c1807d22ff263ddc75880bb58a4d8 not found: ID does not exist" Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.399919 4558 scope.go:117] "RemoveContainer" containerID="e0e7136ff1fc7c45a47afc77af3f0db467ed3608217c443eca310e92d6574564" Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.405996 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e0e7136ff1fc7c45a47afc77af3f0db467ed3608217c443eca310e92d6574564"} err="failed to get container status \"e0e7136ff1fc7c45a47afc77af3f0db467ed3608217c443eca310e92d6574564\": rpc error: code = NotFound desc = could not find container \"e0e7136ff1fc7c45a47afc77af3f0db467ed3608217c443eca310e92d6574564\": container with ID starting with e0e7136ff1fc7c45a47afc77af3f0db467ed3608217c443eca310e92d6574564 not found: ID does not exist" Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.406105 4558 scope.go:117] "RemoveContainer" containerID="fb0c76f7efa112c9f84f7b18a860585f8a74dd8aa04b177a266ffe056c7f8929" Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.406628 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fb0c76f7efa112c9f84f7b18a860585f8a74dd8aa04b177a266ffe056c7f8929"} err="failed to get container status \"fb0c76f7efa112c9f84f7b18a860585f8a74dd8aa04b177a266ffe056c7f8929\": rpc error: code = NotFound desc = could not find container \"fb0c76f7efa112c9f84f7b18a860585f8a74dd8aa04b177a266ffe056c7f8929\": container with ID starting with fb0c76f7efa112c9f84f7b18a860585f8a74dd8aa04b177a266ffe056c7f8929 not found: ID does not exist" Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.406670 4558 scope.go:117] "RemoveContainer" containerID="ebf1429a28db4257c55ad73cea3715ef4bb80d2a7a864c1cb0908cfbe3578248" Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.407001 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ebf1429a28db4257c55ad73cea3715ef4bb80d2a7a864c1cb0908cfbe3578248"} err="failed to get container status \"ebf1429a28db4257c55ad73cea3715ef4bb80d2a7a864c1cb0908cfbe3578248\": rpc error: code = NotFound desc = could not find container \"ebf1429a28db4257c55ad73cea3715ef4bb80d2a7a864c1cb0908cfbe3578248\": container with ID starting with ebf1429a28db4257c55ad73cea3715ef4bb80d2a7a864c1cb0908cfbe3578248 not found: ID does not exist" Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.407025 4558 scope.go:117] "RemoveContainer" containerID="266031d8114437a6db868dffef46777ee73c1807d22ff263ddc75880bb58a4d8" Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.407359 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"266031d8114437a6db868dffef46777ee73c1807d22ff263ddc75880bb58a4d8"} err="failed to get container status \"266031d8114437a6db868dffef46777ee73c1807d22ff263ddc75880bb58a4d8\": rpc error: code = NotFound desc = could not find container \"266031d8114437a6db868dffef46777ee73c1807d22ff263ddc75880bb58a4d8\": container with ID starting with 266031d8114437a6db868dffef46777ee73c1807d22ff263ddc75880bb58a4d8 not found: ID does not exist" Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.407382 4558 scope.go:117] "RemoveContainer" containerID="e0e7136ff1fc7c45a47afc77af3f0db467ed3608217c443eca310e92d6574564" Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.407669 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e0e7136ff1fc7c45a47afc77af3f0db467ed3608217c443eca310e92d6574564"} err="failed to get container status \"e0e7136ff1fc7c45a47afc77af3f0db467ed3608217c443eca310e92d6574564\": rpc error: code = NotFound desc = could not find container \"e0e7136ff1fc7c45a47afc77af3f0db467ed3608217c443eca310e92d6574564\": container with ID starting with e0e7136ff1fc7c45a47afc77af3f0db467ed3608217c443eca310e92d6574564 not found: ID does not exist" Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.407685 4558 scope.go:117] "RemoveContainer" containerID="fb0c76f7efa112c9f84f7b18a860585f8a74dd8aa04b177a266ffe056c7f8929" Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.411556 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fb0c76f7efa112c9f84f7b18a860585f8a74dd8aa04b177a266ffe056c7f8929"} err="failed to get container status \"fb0c76f7efa112c9f84f7b18a860585f8a74dd8aa04b177a266ffe056c7f8929\": rpc error: code = NotFound desc = could not find container \"fb0c76f7efa112c9f84f7b18a860585f8a74dd8aa04b177a266ffe056c7f8929\": container with ID starting with fb0c76f7efa112c9f84f7b18a860585f8a74dd8aa04b177a266ffe056c7f8929 not found: ID does not exist" Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.411600 4558 scope.go:117] "RemoveContainer" containerID="ebf1429a28db4257c55ad73cea3715ef4bb80d2a7a864c1cb0908cfbe3578248" Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.411900 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ebf1429a28db4257c55ad73cea3715ef4bb80d2a7a864c1cb0908cfbe3578248"} err="failed to get container status \"ebf1429a28db4257c55ad73cea3715ef4bb80d2a7a864c1cb0908cfbe3578248\": rpc error: code = NotFound desc = could not find container \"ebf1429a28db4257c55ad73cea3715ef4bb80d2a7a864c1cb0908cfbe3578248\": container with ID starting with ebf1429a28db4257c55ad73cea3715ef4bb80d2a7a864c1cb0908cfbe3578248 not found: ID does not exist" Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.411924 4558 scope.go:117] "RemoveContainer" containerID="266031d8114437a6db868dffef46777ee73c1807d22ff263ddc75880bb58a4d8" Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.412251 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"266031d8114437a6db868dffef46777ee73c1807d22ff263ddc75880bb58a4d8"} err="failed to get container status \"266031d8114437a6db868dffef46777ee73c1807d22ff263ddc75880bb58a4d8\": rpc error: code = NotFound desc = could not find container \"266031d8114437a6db868dffef46777ee73c1807d22ff263ddc75880bb58a4d8\": container with ID starting with 266031d8114437a6db868dffef46777ee73c1807d22ff263ddc75880bb58a4d8 not found: ID does not exist" Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.441806 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.453669 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.459217 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:55:09 crc kubenswrapper[4558]: E0120 17:55:09.459551 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="63d78039-84d1-41df-b0dc-3d0b01cc2a61" containerName="proxy-httpd" Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.459572 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="63d78039-84d1-41df-b0dc-3d0b01cc2a61" containerName="proxy-httpd" Jan 20 17:55:09 crc kubenswrapper[4558]: E0120 17:55:09.459591 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="63d78039-84d1-41df-b0dc-3d0b01cc2a61" containerName="ceilometer-notification-agent" Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.459597 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="63d78039-84d1-41df-b0dc-3d0b01cc2a61" containerName="ceilometer-notification-agent" Jan 20 17:55:09 crc kubenswrapper[4558]: E0120 17:55:09.459619 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="63d78039-84d1-41df-b0dc-3d0b01cc2a61" containerName="ceilometer-central-agent" Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.459625 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="63d78039-84d1-41df-b0dc-3d0b01cc2a61" containerName="ceilometer-central-agent" Jan 20 17:55:09 crc kubenswrapper[4558]: E0120 17:55:09.459641 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="63d78039-84d1-41df-b0dc-3d0b01cc2a61" containerName="sg-core" Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.459647 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="63d78039-84d1-41df-b0dc-3d0b01cc2a61" containerName="sg-core" Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.459781 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="63d78039-84d1-41df-b0dc-3d0b01cc2a61" containerName="proxy-httpd" Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.459794 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="63d78039-84d1-41df-b0dc-3d0b01cc2a61" containerName="ceilometer-notification-agent" Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.459807 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="63d78039-84d1-41df-b0dc-3d0b01cc2a61" containerName="ceilometer-central-agent" Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.459815 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="63d78039-84d1-41df-b0dc-3d0b01cc2a61" containerName="sg-core" Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.463251 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.466674 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-scripts" Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.466994 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-config-data" Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.469902 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.565329 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ba8f8c1-7947-4326-b7d2-49f89d248c21-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"7ba8f8c1-7947-4326-b7d2-49f89d248c21\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.565425 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9d865\" (UniqueName: \"kubernetes.io/projected/7ba8f8c1-7947-4326-b7d2-49f89d248c21-kube-api-access-9d865\") pod \"ceilometer-0\" (UID: \"7ba8f8c1-7947-4326-b7d2-49f89d248c21\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.565462 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7ba8f8c1-7947-4326-b7d2-49f89d248c21-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"7ba8f8c1-7947-4326-b7d2-49f89d248c21\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.565481 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7ba8f8c1-7947-4326-b7d2-49f89d248c21-log-httpd\") pod \"ceilometer-0\" (UID: \"7ba8f8c1-7947-4326-b7d2-49f89d248c21\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.565500 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7ba8f8c1-7947-4326-b7d2-49f89d248c21-config-data\") pod \"ceilometer-0\" (UID: \"7ba8f8c1-7947-4326-b7d2-49f89d248c21\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.565536 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7ba8f8c1-7947-4326-b7d2-49f89d248c21-run-httpd\") pod \"ceilometer-0\" (UID: \"7ba8f8c1-7947-4326-b7d2-49f89d248c21\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.565560 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7ba8f8c1-7947-4326-b7d2-49f89d248c21-scripts\") pod \"ceilometer-0\" (UID: \"7ba8f8c1-7947-4326-b7d2-49f89d248c21\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.593082 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-db-sync-7l4r5" Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.667180 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ba8f8c1-7947-4326-b7d2-49f89d248c21-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"7ba8f8c1-7947-4326-b7d2-49f89d248c21\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.667332 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9d865\" (UniqueName: \"kubernetes.io/projected/7ba8f8c1-7947-4326-b7d2-49f89d248c21-kube-api-access-9d865\") pod \"ceilometer-0\" (UID: \"7ba8f8c1-7947-4326-b7d2-49f89d248c21\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.667383 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7ba8f8c1-7947-4326-b7d2-49f89d248c21-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"7ba8f8c1-7947-4326-b7d2-49f89d248c21\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.667413 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7ba8f8c1-7947-4326-b7d2-49f89d248c21-log-httpd\") pod \"ceilometer-0\" (UID: \"7ba8f8c1-7947-4326-b7d2-49f89d248c21\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.667436 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7ba8f8c1-7947-4326-b7d2-49f89d248c21-config-data\") pod \"ceilometer-0\" (UID: \"7ba8f8c1-7947-4326-b7d2-49f89d248c21\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.667495 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7ba8f8c1-7947-4326-b7d2-49f89d248c21-run-httpd\") pod \"ceilometer-0\" (UID: \"7ba8f8c1-7947-4326-b7d2-49f89d248c21\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.667528 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7ba8f8c1-7947-4326-b7d2-49f89d248c21-scripts\") pod \"ceilometer-0\" (UID: \"7ba8f8c1-7947-4326-b7d2-49f89d248c21\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.668983 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7ba8f8c1-7947-4326-b7d2-49f89d248c21-log-httpd\") pod \"ceilometer-0\" (UID: \"7ba8f8c1-7947-4326-b7d2-49f89d248c21\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.669641 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7ba8f8c1-7947-4326-b7d2-49f89d248c21-run-httpd\") pod \"ceilometer-0\" (UID: \"7ba8f8c1-7947-4326-b7d2-49f89d248c21\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.671680 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7ba8f8c1-7947-4326-b7d2-49f89d248c21-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"7ba8f8c1-7947-4326-b7d2-49f89d248c21\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.675725 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7ba8f8c1-7947-4326-b7d2-49f89d248c21-scripts\") pod \"ceilometer-0\" (UID: \"7ba8f8c1-7947-4326-b7d2-49f89d248c21\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.676315 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ba8f8c1-7947-4326-b7d2-49f89d248c21-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"7ba8f8c1-7947-4326-b7d2-49f89d248c21\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.676880 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7ba8f8c1-7947-4326-b7d2-49f89d248c21-config-data\") pod \"ceilometer-0\" (UID: \"7ba8f8c1-7947-4326-b7d2-49f89d248c21\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.685662 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9d865\" (UniqueName: \"kubernetes.io/projected/7ba8f8c1-7947-4326-b7d2-49f89d248c21-kube-api-access-9d865\") pod \"ceilometer-0\" (UID: \"7ba8f8c1-7947-4326-b7d2-49f89d248c21\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.769654 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hx5rb\" (UniqueName: \"kubernetes.io/projected/a8e183b4-694a-4ce9-90e9-1ff11d52ff3c-kube-api-access-hx5rb\") pod \"a8e183b4-694a-4ce9-90e9-1ff11d52ff3c\" (UID: \"a8e183b4-694a-4ce9-90e9-1ff11d52ff3c\") " Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.769817 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a8e183b4-694a-4ce9-90e9-1ff11d52ff3c-scripts\") pod \"a8e183b4-694a-4ce9-90e9-1ff11d52ff3c\" (UID: \"a8e183b4-694a-4ce9-90e9-1ff11d52ff3c\") " Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.769898 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a8e183b4-694a-4ce9-90e9-1ff11d52ff3c-etc-machine-id\") pod \"a8e183b4-694a-4ce9-90e9-1ff11d52ff3c\" (UID: \"a8e183b4-694a-4ce9-90e9-1ff11d52ff3c\") " Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.770057 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a8e183b4-694a-4ce9-90e9-1ff11d52ff3c-config-data\") pod \"a8e183b4-694a-4ce9-90e9-1ff11d52ff3c\" (UID: \"a8e183b4-694a-4ce9-90e9-1ff11d52ff3c\") " Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.770144 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/a8e183b4-694a-4ce9-90e9-1ff11d52ff3c-db-sync-config-data\") pod \"a8e183b4-694a-4ce9-90e9-1ff11d52ff3c\" (UID: \"a8e183b4-694a-4ce9-90e9-1ff11d52ff3c\") " Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.770194 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8e183b4-694a-4ce9-90e9-1ff11d52ff3c-combined-ca-bundle\") pod \"a8e183b4-694a-4ce9-90e9-1ff11d52ff3c\" (UID: \"a8e183b4-694a-4ce9-90e9-1ff11d52ff3c\") " Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.770646 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a8e183b4-694a-4ce9-90e9-1ff11d52ff3c-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "a8e183b4-694a-4ce9-90e9-1ff11d52ff3c" (UID: "a8e183b4-694a-4ce9-90e9-1ff11d52ff3c"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.771492 4558 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a8e183b4-694a-4ce9-90e9-1ff11d52ff3c-etc-machine-id\") on node \"crc\" DevicePath \"\"" Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.773873 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a8e183b4-694a-4ce9-90e9-1ff11d52ff3c-kube-api-access-hx5rb" (OuterVolumeSpecName: "kube-api-access-hx5rb") pod "a8e183b4-694a-4ce9-90e9-1ff11d52ff3c" (UID: "a8e183b4-694a-4ce9-90e9-1ff11d52ff3c"). InnerVolumeSpecName "kube-api-access-hx5rb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.775335 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a8e183b4-694a-4ce9-90e9-1ff11d52ff3c-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "a8e183b4-694a-4ce9-90e9-1ff11d52ff3c" (UID: "a8e183b4-694a-4ce9-90e9-1ff11d52ff3c"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.775738 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a8e183b4-694a-4ce9-90e9-1ff11d52ff3c-scripts" (OuterVolumeSpecName: "scripts") pod "a8e183b4-694a-4ce9-90e9-1ff11d52ff3c" (UID: "a8e183b4-694a-4ce9-90e9-1ff11d52ff3c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.785465 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.796678 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a8e183b4-694a-4ce9-90e9-1ff11d52ff3c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a8e183b4-694a-4ce9-90e9-1ff11d52ff3c" (UID: "a8e183b4-694a-4ce9-90e9-1ff11d52ff3c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.832552 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a8e183b4-694a-4ce9-90e9-1ff11d52ff3c-config-data" (OuterVolumeSpecName: "config-data") pod "a8e183b4-694a-4ce9-90e9-1ff11d52ff3c" (UID: "a8e183b4-694a-4ce9-90e9-1ff11d52ff3c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.874455 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a8e183b4-694a-4ce9-90e9-1ff11d52ff3c-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.874869 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8e183b4-694a-4ce9-90e9-1ff11d52ff3c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.874886 4558 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/a8e183b4-694a-4ce9-90e9-1ff11d52ff3c-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.874902 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hx5rb\" (UniqueName: \"kubernetes.io/projected/a8e183b4-694a-4ce9-90e9-1ff11d52ff3c-kube-api-access-hx5rb\") on node \"crc\" DevicePath \"\"" Jan 20 17:55:09 crc kubenswrapper[4558]: I0120 17:55:09.874917 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a8e183b4-694a-4ce9-90e9-1ff11d52ff3c-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:55:10 crc kubenswrapper[4558]: I0120 17:55:10.199726 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-db-sync-7l4r5" event={"ID":"a8e183b4-694a-4ce9-90e9-1ff11d52ff3c","Type":"ContainerDied","Data":"2e07911f819a9e30826bfd851f1984c5d19e9e8f68cb8a36d54515996b7e96d1"} Jan 20 17:55:10 crc kubenswrapper[4558]: I0120 17:55:10.199765 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2e07911f819a9e30826bfd851f1984c5d19e9e8f68cb8a36d54515996b7e96d1" Jan 20 17:55:10 crc kubenswrapper[4558]: I0120 17:55:10.199909 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-db-sync-7l4r5" Jan 20 17:55:10 crc kubenswrapper[4558]: I0120 17:55:10.201191 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/placement-7c7cc56d94-qptzl" Jan 20 17:55:10 crc kubenswrapper[4558]: I0120 17:55:10.221305 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:55:10 crc kubenswrapper[4558]: W0120 17:55:10.228453 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7ba8f8c1_7947_4326_b7d2_49f89d248c21.slice/crio-72c054ef318a21775605c91ef2410cce83d929dfd0cf052af173da245eedd676 WatchSource:0}: Error finding container 72c054ef318a21775605c91ef2410cce83d929dfd0cf052af173da245eedd676: Status 404 returned error can't find the container with id 72c054ef318a21775605c91ef2410cce83d929dfd0cf052af173da245eedd676 Jan 20 17:55:10 crc kubenswrapper[4558]: I0120 17:55:10.319033 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:55:10 crc kubenswrapper[4558]: E0120 17:55:10.324989 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a8e183b4-694a-4ce9-90e9-1ff11d52ff3c" containerName="cinder-db-sync" Jan 20 17:55:10 crc kubenswrapper[4558]: I0120 17:55:10.325017 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="a8e183b4-694a-4ce9-90e9-1ff11d52ff3c" containerName="cinder-db-sync" Jan 20 17:55:10 crc kubenswrapper[4558]: I0120 17:55:10.325228 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="a8e183b4-694a-4ce9-90e9-1ff11d52ff3c" containerName="cinder-db-sync" Jan 20 17:55:10 crc kubenswrapper[4558]: I0120 17:55:10.326076 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:55:10 crc kubenswrapper[4558]: I0120 17:55:10.328694 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cinder-scheduler-config-data" Jan 20 17:55:10 crc kubenswrapper[4558]: I0120 17:55:10.328890 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cinder-cinder-dockercfg-xbrdb" Jan 20 17:55:10 crc kubenswrapper[4558]: I0120 17:55:10.329016 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cinder-scripts" Jan 20 17:55:10 crc kubenswrapper[4558]: I0120 17:55:10.329193 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cinder-config-data" Jan 20 17:55:10 crc kubenswrapper[4558]: I0120 17:55:10.339844 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:55:10 crc kubenswrapper[4558]: I0120 17:55:10.435332 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:55:10 crc kubenswrapper[4558]: I0120 17:55:10.436716 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:55:10 crc kubenswrapper[4558]: I0120 17:55:10.438726 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cinder-api-config-data" Jan 20 17:55:10 crc kubenswrapper[4558]: I0120 17:55:10.455936 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:55:10 crc kubenswrapper[4558]: I0120 17:55:10.490458 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/36ea0e6e-00c5-41a9-9ebc-0960214e6a65-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"36ea0e6e-00c5-41a9-9ebc-0960214e6a65\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:55:10 crc kubenswrapper[4558]: I0120 17:55:10.490738 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/36ea0e6e-00c5-41a9-9ebc-0960214e6a65-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"36ea0e6e-00c5-41a9-9ebc-0960214e6a65\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:55:10 crc kubenswrapper[4558]: I0120 17:55:10.490795 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/36ea0e6e-00c5-41a9-9ebc-0960214e6a65-config-data\") pod \"cinder-scheduler-0\" (UID: \"36ea0e6e-00c5-41a9-9ebc-0960214e6a65\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:55:10 crc kubenswrapper[4558]: I0120 17:55:10.490880 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zlk7t\" (UniqueName: \"kubernetes.io/projected/36ea0e6e-00c5-41a9-9ebc-0960214e6a65-kube-api-access-zlk7t\") pod \"cinder-scheduler-0\" (UID: \"36ea0e6e-00c5-41a9-9ebc-0960214e6a65\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:55:10 crc kubenswrapper[4558]: I0120 17:55:10.491456 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36ea0e6e-00c5-41a9-9ebc-0960214e6a65-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"36ea0e6e-00c5-41a9-9ebc-0960214e6a65\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:55:10 crc kubenswrapper[4558]: I0120 17:55:10.491546 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/36ea0e6e-00c5-41a9-9ebc-0960214e6a65-scripts\") pod \"cinder-scheduler-0\" (UID: \"36ea0e6e-00c5-41a9-9ebc-0960214e6a65\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:55:10 crc kubenswrapper[4558]: I0120 17:55:10.566678 4558 scope.go:117] "RemoveContainer" containerID="4ea7fa61d8b21007a044345acec89fcebe56c2921cf0f76ff2002f44bdc88ede" Jan 20 17:55:10 crc kubenswrapper[4558]: E0120 17:55:10.567008 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 17:55:10 crc kubenswrapper[4558]: I0120 17:55:10.579797 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="63d78039-84d1-41df-b0dc-3d0b01cc2a61" path="/var/lib/kubelet/pods/63d78039-84d1-41df-b0dc-3d0b01cc2a61/volumes" Jan 20 17:55:10 crc kubenswrapper[4558]: I0120 17:55:10.593643 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q7bkp\" (UniqueName: \"kubernetes.io/projected/c39fdbce-24c2-4a02-99a4-c671d09a4067-kube-api-access-q7bkp\") pod \"cinder-api-0\" (UID: \"c39fdbce-24c2-4a02-99a4-c671d09a4067\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:55:10 crc kubenswrapper[4558]: I0120 17:55:10.593822 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c39fdbce-24c2-4a02-99a4-c671d09a4067-config-data-custom\") pod \"cinder-api-0\" (UID: \"c39fdbce-24c2-4a02-99a4-c671d09a4067\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:55:10 crc kubenswrapper[4558]: I0120 17:55:10.593900 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c39fdbce-24c2-4a02-99a4-c671d09a4067-logs\") pod \"cinder-api-0\" (UID: \"c39fdbce-24c2-4a02-99a4-c671d09a4067\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:55:10 crc kubenswrapper[4558]: I0120 17:55:10.594014 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/36ea0e6e-00c5-41a9-9ebc-0960214e6a65-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"36ea0e6e-00c5-41a9-9ebc-0960214e6a65\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:55:10 crc kubenswrapper[4558]: I0120 17:55:10.594089 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/c39fdbce-24c2-4a02-99a4-c671d09a4067-etc-machine-id\") pod \"cinder-api-0\" (UID: \"c39fdbce-24c2-4a02-99a4-c671d09a4067\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:55:10 crc kubenswrapper[4558]: I0120 17:55:10.594122 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/36ea0e6e-00c5-41a9-9ebc-0960214e6a65-config-data\") pod \"cinder-scheduler-0\" (UID: \"36ea0e6e-00c5-41a9-9ebc-0960214e6a65\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:55:10 crc kubenswrapper[4558]: I0120 17:55:10.594237 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zlk7t\" (UniqueName: \"kubernetes.io/projected/36ea0e6e-00c5-41a9-9ebc-0960214e6a65-kube-api-access-zlk7t\") pod \"cinder-scheduler-0\" (UID: \"36ea0e6e-00c5-41a9-9ebc-0960214e6a65\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:55:10 crc kubenswrapper[4558]: I0120 17:55:10.594313 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c39fdbce-24c2-4a02-99a4-c671d09a4067-config-data\") pod \"cinder-api-0\" (UID: \"c39fdbce-24c2-4a02-99a4-c671d09a4067\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:55:10 crc kubenswrapper[4558]: I0120 17:55:10.594491 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36ea0e6e-00c5-41a9-9ebc-0960214e6a65-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"36ea0e6e-00c5-41a9-9ebc-0960214e6a65\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:55:10 crc kubenswrapper[4558]: I0120 17:55:10.594546 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/36ea0e6e-00c5-41a9-9ebc-0960214e6a65-scripts\") pod \"cinder-scheduler-0\" (UID: \"36ea0e6e-00c5-41a9-9ebc-0960214e6a65\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:55:10 crc kubenswrapper[4558]: I0120 17:55:10.594568 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c39fdbce-24c2-4a02-99a4-c671d09a4067-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"c39fdbce-24c2-4a02-99a4-c671d09a4067\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:55:10 crc kubenswrapper[4558]: I0120 17:55:10.594637 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c39fdbce-24c2-4a02-99a4-c671d09a4067-scripts\") pod \"cinder-api-0\" (UID: \"c39fdbce-24c2-4a02-99a4-c671d09a4067\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:55:10 crc kubenswrapper[4558]: I0120 17:55:10.594695 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/36ea0e6e-00c5-41a9-9ebc-0960214e6a65-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"36ea0e6e-00c5-41a9-9ebc-0960214e6a65\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:55:10 crc kubenswrapper[4558]: I0120 17:55:10.594874 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/36ea0e6e-00c5-41a9-9ebc-0960214e6a65-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"36ea0e6e-00c5-41a9-9ebc-0960214e6a65\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:55:10 crc kubenswrapper[4558]: I0120 17:55:10.600631 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/36ea0e6e-00c5-41a9-9ebc-0960214e6a65-config-data\") pod \"cinder-scheduler-0\" (UID: \"36ea0e6e-00c5-41a9-9ebc-0960214e6a65\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:55:10 crc kubenswrapper[4558]: I0120 17:55:10.601314 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/36ea0e6e-00c5-41a9-9ebc-0960214e6a65-scripts\") pod \"cinder-scheduler-0\" (UID: \"36ea0e6e-00c5-41a9-9ebc-0960214e6a65\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:55:10 crc kubenswrapper[4558]: I0120 17:55:10.601892 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36ea0e6e-00c5-41a9-9ebc-0960214e6a65-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"36ea0e6e-00c5-41a9-9ebc-0960214e6a65\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:55:10 crc kubenswrapper[4558]: I0120 17:55:10.615661 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/36ea0e6e-00c5-41a9-9ebc-0960214e6a65-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"36ea0e6e-00c5-41a9-9ebc-0960214e6a65\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:55:10 crc kubenswrapper[4558]: I0120 17:55:10.615717 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zlk7t\" (UniqueName: \"kubernetes.io/projected/36ea0e6e-00c5-41a9-9ebc-0960214e6a65-kube-api-access-zlk7t\") pod \"cinder-scheduler-0\" (UID: \"36ea0e6e-00c5-41a9-9ebc-0960214e6a65\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:55:10 crc kubenswrapper[4558]: I0120 17:55:10.649260 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:55:10 crc kubenswrapper[4558]: I0120 17:55:10.697077 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c39fdbce-24c2-4a02-99a4-c671d09a4067-config-data-custom\") pod \"cinder-api-0\" (UID: \"c39fdbce-24c2-4a02-99a4-c671d09a4067\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:55:10 crc kubenswrapper[4558]: I0120 17:55:10.697141 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c39fdbce-24c2-4a02-99a4-c671d09a4067-logs\") pod \"cinder-api-0\" (UID: \"c39fdbce-24c2-4a02-99a4-c671d09a4067\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:55:10 crc kubenswrapper[4558]: I0120 17:55:10.697189 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/c39fdbce-24c2-4a02-99a4-c671d09a4067-etc-machine-id\") pod \"cinder-api-0\" (UID: \"c39fdbce-24c2-4a02-99a4-c671d09a4067\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:55:10 crc kubenswrapper[4558]: I0120 17:55:10.697237 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c39fdbce-24c2-4a02-99a4-c671d09a4067-config-data\") pod \"cinder-api-0\" (UID: \"c39fdbce-24c2-4a02-99a4-c671d09a4067\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:55:10 crc kubenswrapper[4558]: I0120 17:55:10.697304 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c39fdbce-24c2-4a02-99a4-c671d09a4067-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"c39fdbce-24c2-4a02-99a4-c671d09a4067\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:55:10 crc kubenswrapper[4558]: I0120 17:55:10.697350 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/c39fdbce-24c2-4a02-99a4-c671d09a4067-etc-machine-id\") pod \"cinder-api-0\" (UID: \"c39fdbce-24c2-4a02-99a4-c671d09a4067\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:55:10 crc kubenswrapper[4558]: I0120 17:55:10.697380 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c39fdbce-24c2-4a02-99a4-c671d09a4067-scripts\") pod \"cinder-api-0\" (UID: \"c39fdbce-24c2-4a02-99a4-c671d09a4067\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:55:10 crc kubenswrapper[4558]: I0120 17:55:10.697502 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c39fdbce-24c2-4a02-99a4-c671d09a4067-logs\") pod \"cinder-api-0\" (UID: \"c39fdbce-24c2-4a02-99a4-c671d09a4067\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:55:10 crc kubenswrapper[4558]: I0120 17:55:10.697507 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q7bkp\" (UniqueName: \"kubernetes.io/projected/c39fdbce-24c2-4a02-99a4-c671d09a4067-kube-api-access-q7bkp\") pod \"cinder-api-0\" (UID: \"c39fdbce-24c2-4a02-99a4-c671d09a4067\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:55:10 crc kubenswrapper[4558]: I0120 17:55:10.702776 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c39fdbce-24c2-4a02-99a4-c671d09a4067-scripts\") pod \"cinder-api-0\" (UID: \"c39fdbce-24c2-4a02-99a4-c671d09a4067\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:55:10 crc kubenswrapper[4558]: I0120 17:55:10.702800 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c39fdbce-24c2-4a02-99a4-c671d09a4067-config-data-custom\") pod \"cinder-api-0\" (UID: \"c39fdbce-24c2-4a02-99a4-c671d09a4067\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:55:10 crc kubenswrapper[4558]: I0120 17:55:10.702932 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c39fdbce-24c2-4a02-99a4-c671d09a4067-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"c39fdbce-24c2-4a02-99a4-c671d09a4067\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:55:10 crc kubenswrapper[4558]: I0120 17:55:10.703004 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c39fdbce-24c2-4a02-99a4-c671d09a4067-config-data\") pod \"cinder-api-0\" (UID: \"c39fdbce-24c2-4a02-99a4-c671d09a4067\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:55:10 crc kubenswrapper[4558]: I0120 17:55:10.713332 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q7bkp\" (UniqueName: \"kubernetes.io/projected/c39fdbce-24c2-4a02-99a4-c671d09a4067-kube-api-access-q7bkp\") pod \"cinder-api-0\" (UID: \"c39fdbce-24c2-4a02-99a4-c671d09a4067\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:55:10 crc kubenswrapper[4558]: I0120 17:55:10.755340 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:55:11 crc kubenswrapper[4558]: I0120 17:55:11.074476 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:55:11 crc kubenswrapper[4558]: W0120 17:55:11.083187 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod36ea0e6e_00c5_41a9_9ebc_0960214e6a65.slice/crio-30d45dff30ffdef0deca7aadad3510c91a94ef4686e20a1606c228480aefca30 WatchSource:0}: Error finding container 30d45dff30ffdef0deca7aadad3510c91a94ef4686e20a1606c228480aefca30: Status 404 returned error can't find the container with id 30d45dff30ffdef0deca7aadad3510c91a94ef4686e20a1606c228480aefca30 Jan 20 17:55:11 crc kubenswrapper[4558]: I0120 17:55:11.212584 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:55:11 crc kubenswrapper[4558]: I0120 17:55:11.213151 4558 generic.go:334] "Generic (PLEG): container finished" podID="f963d040-98b2-46b4-a118-072a80ed0d53" containerID="7f55f22bc9fb124084f5b5d83ec240d39b440361ab6e1fb735a7109baba3a09c" exitCode=0 Jan 20 17:55:11 crc kubenswrapper[4558]: I0120 17:55:11.213198 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-db-sync-5b76z" event={"ID":"f963d040-98b2-46b4-a118-072a80ed0d53","Type":"ContainerDied","Data":"7f55f22bc9fb124084f5b5d83ec240d39b440361ab6e1fb735a7109baba3a09c"} Jan 20 17:55:11 crc kubenswrapper[4558]: I0120 17:55:11.214746 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"36ea0e6e-00c5-41a9-9ebc-0960214e6a65","Type":"ContainerStarted","Data":"30d45dff30ffdef0deca7aadad3510c91a94ef4686e20a1606c228480aefca30"} Jan 20 17:55:11 crc kubenswrapper[4558]: I0120 17:55:11.220640 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"7ba8f8c1-7947-4326-b7d2-49f89d248c21","Type":"ContainerStarted","Data":"3a9bd65fc6a62f8e31388f25a7802f1d87ccd3138db1d4de87aa87e1de385f54"} Jan 20 17:55:11 crc kubenswrapper[4558]: I0120 17:55:11.220682 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"7ba8f8c1-7947-4326-b7d2-49f89d248c21","Type":"ContainerStarted","Data":"72c054ef318a21775605c91ef2410cce83d929dfd0cf052af173da245eedd676"} Jan 20 17:55:11 crc kubenswrapper[4558]: E0120 17:55:11.493642 4558 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd739bf90_bc99_4f8a_870e_98026ba6a53c.slice/crio-conmon-513cf7b607f4c5f76a98e28f72ed3a3e4460bfb0341cd77104ff3e1a715e39f3.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd739bf90_bc99_4f8a_870e_98026ba6a53c.slice/crio-513cf7b607f4c5f76a98e28f72ed3a3e4460bfb0341cd77104ff3e1a715e39f3.scope\": RecentStats: unable to find data in memory cache]" Jan 20 17:55:12 crc kubenswrapper[4558]: I0120 17:55:12.243984 4558 generic.go:334] "Generic (PLEG): container finished" podID="d739bf90-bc99-4f8a-870e-98026ba6a53c" containerID="513cf7b607f4c5f76a98e28f72ed3a3e4460bfb0341cd77104ff3e1a715e39f3" exitCode=0 Jan 20 17:55:12 crc kubenswrapper[4558]: I0120 17:55:12.244064 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-bootstrap-glvsk" event={"ID":"d739bf90-bc99-4f8a-870e-98026ba6a53c","Type":"ContainerDied","Data":"513cf7b607f4c5f76a98e28f72ed3a3e4460bfb0341cd77104ff3e1a715e39f3"} Jan 20 17:55:12 crc kubenswrapper[4558]: I0120 17:55:12.247837 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"7ba8f8c1-7947-4326-b7d2-49f89d248c21","Type":"ContainerStarted","Data":"abd9f6fd38ccd715c9ead3e1fb12588259945bf444bcd60cc90c24693ed32591"} Jan 20 17:55:12 crc kubenswrapper[4558]: I0120 17:55:12.249720 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"c39fdbce-24c2-4a02-99a4-c671d09a4067","Type":"ContainerStarted","Data":"ba6645c36e46964aa0c2a3a64d022bc81d57d4d9e50dd710053fee0226f00efc"} Jan 20 17:55:12 crc kubenswrapper[4558]: I0120 17:55:12.249777 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"c39fdbce-24c2-4a02-99a4-c671d09a4067","Type":"ContainerStarted","Data":"39295faed190461a799be4c663b4b96327e60d990fed71af3689874f9fee213b"} Jan 20 17:55:12 crc kubenswrapper[4558]: I0120 17:55:12.251598 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"36ea0e6e-00c5-41a9-9ebc-0960214e6a65","Type":"ContainerStarted","Data":"16d7a5b115ea502b2d2354b051017e13ce428286b30fa423a1dbd745dc8c1bee"} Jan 20 17:55:12 crc kubenswrapper[4558]: I0120 17:55:12.561967 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-db-sync-5b76z" Jan 20 17:55:12 crc kubenswrapper[4558]: I0120 17:55:12.636694 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f963d040-98b2-46b4-a118-072a80ed0d53-combined-ca-bundle\") pod \"f963d040-98b2-46b4-a118-072a80ed0d53\" (UID: \"f963d040-98b2-46b4-a118-072a80ed0d53\") " Jan 20 17:55:12 crc kubenswrapper[4558]: I0120 17:55:12.636846 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tpm4g\" (UniqueName: \"kubernetes.io/projected/f963d040-98b2-46b4-a118-072a80ed0d53-kube-api-access-tpm4g\") pod \"f963d040-98b2-46b4-a118-072a80ed0d53\" (UID: \"f963d040-98b2-46b4-a118-072a80ed0d53\") " Jan 20 17:55:12 crc kubenswrapper[4558]: I0120 17:55:12.636959 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/f963d040-98b2-46b4-a118-072a80ed0d53-config\") pod \"f963d040-98b2-46b4-a118-072a80ed0d53\" (UID: \"f963d040-98b2-46b4-a118-072a80ed0d53\") " Jan 20 17:55:12 crc kubenswrapper[4558]: I0120 17:55:12.642393 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f963d040-98b2-46b4-a118-072a80ed0d53-kube-api-access-tpm4g" (OuterVolumeSpecName: "kube-api-access-tpm4g") pod "f963d040-98b2-46b4-a118-072a80ed0d53" (UID: "f963d040-98b2-46b4-a118-072a80ed0d53"). InnerVolumeSpecName "kube-api-access-tpm4g". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:55:12 crc kubenswrapper[4558]: I0120 17:55:12.663002 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f963d040-98b2-46b4-a118-072a80ed0d53-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f963d040-98b2-46b4-a118-072a80ed0d53" (UID: "f963d040-98b2-46b4-a118-072a80ed0d53"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:55:12 crc kubenswrapper[4558]: I0120 17:55:12.666029 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f963d040-98b2-46b4-a118-072a80ed0d53-config" (OuterVolumeSpecName: "config") pod "f963d040-98b2-46b4-a118-072a80ed0d53" (UID: "f963d040-98b2-46b4-a118-072a80ed0d53"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:55:12 crc kubenswrapper[4558]: I0120 17:55:12.739766 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f963d040-98b2-46b4-a118-072a80ed0d53-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:55:12 crc kubenswrapper[4558]: I0120 17:55:12.739803 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tpm4g\" (UniqueName: \"kubernetes.io/projected/f963d040-98b2-46b4-a118-072a80ed0d53-kube-api-access-tpm4g\") on node \"crc\" DevicePath \"\"" Jan 20 17:55:12 crc kubenswrapper[4558]: I0120 17:55:12.739816 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/f963d040-98b2-46b4-a118-072a80ed0d53-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:55:13 crc kubenswrapper[4558]: I0120 17:55:13.261007 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-db-sync-5b76z" event={"ID":"f963d040-98b2-46b4-a118-072a80ed0d53","Type":"ContainerDied","Data":"63753a69a848c4e6b0d50b394008b514dfebeb076a3d237f7444817dcee6df5c"} Jan 20 17:55:13 crc kubenswrapper[4558]: I0120 17:55:13.261374 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="63753a69a848c4e6b0d50b394008b514dfebeb076a3d237f7444817dcee6df5c" Jan 20 17:55:13 crc kubenswrapper[4558]: I0120 17:55:13.261022 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-db-sync-5b76z" Jan 20 17:55:13 crc kubenswrapper[4558]: I0120 17:55:13.263585 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"36ea0e6e-00c5-41a9-9ebc-0960214e6a65","Type":"ContainerStarted","Data":"89d7d8831bb61b1585ef371d9cfbff3b84eb872f95c98bbc734c620c9609dcd7"} Jan 20 17:55:13 crc kubenswrapper[4558]: I0120 17:55:13.266201 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"7ba8f8c1-7947-4326-b7d2-49f89d248c21","Type":"ContainerStarted","Data":"79d692f8937fead706fe3763c908005975158c1dc08e3d95aa08c9cf976b6e89"} Jan 20 17:55:13 crc kubenswrapper[4558]: I0120 17:55:13.268144 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"c39fdbce-24c2-4a02-99a4-c671d09a4067","Type":"ContainerStarted","Data":"3a19ddf46ca42a5adf3fb20561481c08ce4ad4c994527d7977a1c93d179840fe"} Jan 20 17:55:13 crc kubenswrapper[4558]: I0120 17:55:13.268219 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:55:13 crc kubenswrapper[4558]: I0120 17:55:13.284635 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/cinder-scheduler-0" podStartSLOduration=3.284619346 podStartE2EDuration="3.284619346s" podCreationTimestamp="2026-01-20 17:55:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:55:13.282832517 +0000 UTC m=+4407.043170484" watchObservedRunningTime="2026-01-20 17:55:13.284619346 +0000 UTC m=+4407.044957313" Jan 20 17:55:13 crc kubenswrapper[4558]: I0120 17:55:13.310600 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/cinder-api-0" podStartSLOduration=3.310584283 podStartE2EDuration="3.310584283s" podCreationTimestamp="2026-01-20 17:55:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:55:13.298359012 +0000 UTC m=+4407.058696970" watchObservedRunningTime="2026-01-20 17:55:13.310584283 +0000 UTC m=+4407.070922250" Jan 20 17:55:13 crc kubenswrapper[4558]: I0120 17:55:13.490399 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/neutron-64ffc476d5-9zjgb"] Jan 20 17:55:13 crc kubenswrapper[4558]: E0120 17:55:13.491487 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f963d040-98b2-46b4-a118-072a80ed0d53" containerName="neutron-db-sync" Jan 20 17:55:13 crc kubenswrapper[4558]: I0120 17:55:13.491537 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="f963d040-98b2-46b4-a118-072a80ed0d53" containerName="neutron-db-sync" Jan 20 17:55:13 crc kubenswrapper[4558]: I0120 17:55:13.493060 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="f963d040-98b2-46b4-a118-072a80ed0d53" containerName="neutron-db-sync" Jan 20 17:55:13 crc kubenswrapper[4558]: I0120 17:55:13.494014 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-64ffc476d5-9zjgb" Jan 20 17:55:13 crc kubenswrapper[4558]: I0120 17:55:13.497778 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"neutron-httpd-config" Jan 20 17:55:13 crc kubenswrapper[4558]: I0120 17:55:13.497842 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-neutron-ovndbs" Jan 20 17:55:13 crc kubenswrapper[4558]: I0120 17:55:13.497974 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"neutron-neutron-dockercfg-j6g6q" Jan 20 17:55:13 crc kubenswrapper[4558]: I0120 17:55:13.499758 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"neutron-config" Jan 20 17:55:13 crc kubenswrapper[4558]: I0120 17:55:13.510560 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-64ffc476d5-9zjgb"] Jan 20 17:55:13 crc kubenswrapper[4558]: I0120 17:55:13.560869 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/b637458a-f8f6-456e-888f-3d61734be420-config\") pod \"neutron-64ffc476d5-9zjgb\" (UID: \"b637458a-f8f6-456e-888f-3d61734be420\") " pod="openstack-kuttl-tests/neutron-64ffc476d5-9zjgb" Jan 20 17:55:13 crc kubenswrapper[4558]: I0120 17:55:13.560977 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/b637458a-f8f6-456e-888f-3d61734be420-httpd-config\") pod \"neutron-64ffc476d5-9zjgb\" (UID: \"b637458a-f8f6-456e-888f-3d61734be420\") " pod="openstack-kuttl-tests/neutron-64ffc476d5-9zjgb" Jan 20 17:55:13 crc kubenswrapper[4558]: I0120 17:55:13.561009 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b637458a-f8f6-456e-888f-3d61734be420-combined-ca-bundle\") pod \"neutron-64ffc476d5-9zjgb\" (UID: \"b637458a-f8f6-456e-888f-3d61734be420\") " pod="openstack-kuttl-tests/neutron-64ffc476d5-9zjgb" Jan 20 17:55:13 crc kubenswrapper[4558]: I0120 17:55:13.561134 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/b637458a-f8f6-456e-888f-3d61734be420-ovndb-tls-certs\") pod \"neutron-64ffc476d5-9zjgb\" (UID: \"b637458a-f8f6-456e-888f-3d61734be420\") " pod="openstack-kuttl-tests/neutron-64ffc476d5-9zjgb" Jan 20 17:55:13 crc kubenswrapper[4558]: I0120 17:55:13.561185 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j5bc4\" (UniqueName: \"kubernetes.io/projected/b637458a-f8f6-456e-888f-3d61734be420-kube-api-access-j5bc4\") pod \"neutron-64ffc476d5-9zjgb\" (UID: \"b637458a-f8f6-456e-888f-3d61734be420\") " pod="openstack-kuttl-tests/neutron-64ffc476d5-9zjgb" Jan 20 17:55:13 crc kubenswrapper[4558]: I0120 17:55:13.662420 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/b637458a-f8f6-456e-888f-3d61734be420-httpd-config\") pod \"neutron-64ffc476d5-9zjgb\" (UID: \"b637458a-f8f6-456e-888f-3d61734be420\") " pod="openstack-kuttl-tests/neutron-64ffc476d5-9zjgb" Jan 20 17:55:13 crc kubenswrapper[4558]: I0120 17:55:13.662463 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b637458a-f8f6-456e-888f-3d61734be420-combined-ca-bundle\") pod \"neutron-64ffc476d5-9zjgb\" (UID: \"b637458a-f8f6-456e-888f-3d61734be420\") " pod="openstack-kuttl-tests/neutron-64ffc476d5-9zjgb" Jan 20 17:55:13 crc kubenswrapper[4558]: I0120 17:55:13.662568 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/b637458a-f8f6-456e-888f-3d61734be420-ovndb-tls-certs\") pod \"neutron-64ffc476d5-9zjgb\" (UID: \"b637458a-f8f6-456e-888f-3d61734be420\") " pod="openstack-kuttl-tests/neutron-64ffc476d5-9zjgb" Jan 20 17:55:13 crc kubenswrapper[4558]: I0120 17:55:13.662616 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j5bc4\" (UniqueName: \"kubernetes.io/projected/b637458a-f8f6-456e-888f-3d61734be420-kube-api-access-j5bc4\") pod \"neutron-64ffc476d5-9zjgb\" (UID: \"b637458a-f8f6-456e-888f-3d61734be420\") " pod="openstack-kuttl-tests/neutron-64ffc476d5-9zjgb" Jan 20 17:55:13 crc kubenswrapper[4558]: I0120 17:55:13.662655 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/b637458a-f8f6-456e-888f-3d61734be420-config\") pod \"neutron-64ffc476d5-9zjgb\" (UID: \"b637458a-f8f6-456e-888f-3d61734be420\") " pod="openstack-kuttl-tests/neutron-64ffc476d5-9zjgb" Jan 20 17:55:13 crc kubenswrapper[4558]: I0120 17:55:13.694638 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/b637458a-f8f6-456e-888f-3d61734be420-ovndb-tls-certs\") pod \"neutron-64ffc476d5-9zjgb\" (UID: \"b637458a-f8f6-456e-888f-3d61734be420\") " pod="openstack-kuttl-tests/neutron-64ffc476d5-9zjgb" Jan 20 17:55:13 crc kubenswrapper[4558]: I0120 17:55:13.694696 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b637458a-f8f6-456e-888f-3d61734be420-combined-ca-bundle\") pod \"neutron-64ffc476d5-9zjgb\" (UID: \"b637458a-f8f6-456e-888f-3d61734be420\") " pod="openstack-kuttl-tests/neutron-64ffc476d5-9zjgb" Jan 20 17:55:13 crc kubenswrapper[4558]: I0120 17:55:13.694846 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/b637458a-f8f6-456e-888f-3d61734be420-config\") pod \"neutron-64ffc476d5-9zjgb\" (UID: \"b637458a-f8f6-456e-888f-3d61734be420\") " pod="openstack-kuttl-tests/neutron-64ffc476d5-9zjgb" Jan 20 17:55:13 crc kubenswrapper[4558]: I0120 17:55:13.695158 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/b637458a-f8f6-456e-888f-3d61734be420-httpd-config\") pod \"neutron-64ffc476d5-9zjgb\" (UID: \"b637458a-f8f6-456e-888f-3d61734be420\") " pod="openstack-kuttl-tests/neutron-64ffc476d5-9zjgb" Jan 20 17:55:13 crc kubenswrapper[4558]: I0120 17:55:13.695632 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j5bc4\" (UniqueName: \"kubernetes.io/projected/b637458a-f8f6-456e-888f-3d61734be420-kube-api-access-j5bc4\") pod \"neutron-64ffc476d5-9zjgb\" (UID: \"b637458a-f8f6-456e-888f-3d61734be420\") " pod="openstack-kuttl-tests/neutron-64ffc476d5-9zjgb" Jan 20 17:55:13 crc kubenswrapper[4558]: I0120 17:55:13.786976 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-bootstrap-glvsk" Jan 20 17:55:13 crc kubenswrapper[4558]: I0120 17:55:13.839479 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-64ffc476d5-9zjgb" Jan 20 17:55:13 crc kubenswrapper[4558]: I0120 17:55:13.867655 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d739bf90-bc99-4f8a-870e-98026ba6a53c-combined-ca-bundle\") pod \"d739bf90-bc99-4f8a-870e-98026ba6a53c\" (UID: \"d739bf90-bc99-4f8a-870e-98026ba6a53c\") " Jan 20 17:55:13 crc kubenswrapper[4558]: I0120 17:55:13.867701 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d739bf90-bc99-4f8a-870e-98026ba6a53c-config-data\") pod \"d739bf90-bc99-4f8a-870e-98026ba6a53c\" (UID: \"d739bf90-bc99-4f8a-870e-98026ba6a53c\") " Jan 20 17:55:13 crc kubenswrapper[4558]: I0120 17:55:13.867749 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/d739bf90-bc99-4f8a-870e-98026ba6a53c-credential-keys\") pod \"d739bf90-bc99-4f8a-870e-98026ba6a53c\" (UID: \"d739bf90-bc99-4f8a-870e-98026ba6a53c\") " Jan 20 17:55:13 crc kubenswrapper[4558]: I0120 17:55:13.867769 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d739bf90-bc99-4f8a-870e-98026ba6a53c-scripts\") pod \"d739bf90-bc99-4f8a-870e-98026ba6a53c\" (UID: \"d739bf90-bc99-4f8a-870e-98026ba6a53c\") " Jan 20 17:55:13 crc kubenswrapper[4558]: I0120 17:55:13.867802 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d739bf90-bc99-4f8a-870e-98026ba6a53c-fernet-keys\") pod \"d739bf90-bc99-4f8a-870e-98026ba6a53c\" (UID: \"d739bf90-bc99-4f8a-870e-98026ba6a53c\") " Jan 20 17:55:13 crc kubenswrapper[4558]: I0120 17:55:13.867847 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-krcnj\" (UniqueName: \"kubernetes.io/projected/d739bf90-bc99-4f8a-870e-98026ba6a53c-kube-api-access-krcnj\") pod \"d739bf90-bc99-4f8a-870e-98026ba6a53c\" (UID: \"d739bf90-bc99-4f8a-870e-98026ba6a53c\") " Jan 20 17:55:13 crc kubenswrapper[4558]: I0120 17:55:13.895935 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d739bf90-bc99-4f8a-870e-98026ba6a53c-scripts" (OuterVolumeSpecName: "scripts") pod "d739bf90-bc99-4f8a-870e-98026ba6a53c" (UID: "d739bf90-bc99-4f8a-870e-98026ba6a53c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:55:13 crc kubenswrapper[4558]: I0120 17:55:13.896650 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d739bf90-bc99-4f8a-870e-98026ba6a53c-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "d739bf90-bc99-4f8a-870e-98026ba6a53c" (UID: "d739bf90-bc99-4f8a-870e-98026ba6a53c"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:55:13 crc kubenswrapper[4558]: I0120 17:55:13.899044 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d739bf90-bc99-4f8a-870e-98026ba6a53c-kube-api-access-krcnj" (OuterVolumeSpecName: "kube-api-access-krcnj") pod "d739bf90-bc99-4f8a-870e-98026ba6a53c" (UID: "d739bf90-bc99-4f8a-870e-98026ba6a53c"). InnerVolumeSpecName "kube-api-access-krcnj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:55:13 crc kubenswrapper[4558]: I0120 17:55:13.899144 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d739bf90-bc99-4f8a-870e-98026ba6a53c-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "d739bf90-bc99-4f8a-870e-98026ba6a53c" (UID: "d739bf90-bc99-4f8a-870e-98026ba6a53c"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:55:13 crc kubenswrapper[4558]: I0120 17:55:13.900446 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d739bf90-bc99-4f8a-870e-98026ba6a53c-config-data" (OuterVolumeSpecName: "config-data") pod "d739bf90-bc99-4f8a-870e-98026ba6a53c" (UID: "d739bf90-bc99-4f8a-870e-98026ba6a53c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:55:13 crc kubenswrapper[4558]: I0120 17:55:13.905261 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d739bf90-bc99-4f8a-870e-98026ba6a53c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d739bf90-bc99-4f8a-870e-98026ba6a53c" (UID: "d739bf90-bc99-4f8a-870e-98026ba6a53c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:55:13 crc kubenswrapper[4558]: I0120 17:55:13.971091 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d739bf90-bc99-4f8a-870e-98026ba6a53c-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:55:13 crc kubenswrapper[4558]: I0120 17:55:13.971131 4558 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/d739bf90-bc99-4f8a-870e-98026ba6a53c-credential-keys\") on node \"crc\" DevicePath \"\"" Jan 20 17:55:13 crc kubenswrapper[4558]: I0120 17:55:13.971142 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d739bf90-bc99-4f8a-870e-98026ba6a53c-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:55:13 crc kubenswrapper[4558]: I0120 17:55:13.971150 4558 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d739bf90-bc99-4f8a-870e-98026ba6a53c-fernet-keys\") on node \"crc\" DevicePath \"\"" Jan 20 17:55:13 crc kubenswrapper[4558]: I0120 17:55:13.971189 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-krcnj\" (UniqueName: \"kubernetes.io/projected/d739bf90-bc99-4f8a-870e-98026ba6a53c-kube-api-access-krcnj\") on node \"crc\" DevicePath \"\"" Jan 20 17:55:13 crc kubenswrapper[4558]: I0120 17:55:13.971204 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d739bf90-bc99-4f8a-870e-98026ba6a53c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:55:14 crc kubenswrapper[4558]: I0120 17:55:14.257780 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-64ffc476d5-9zjgb"] Jan 20 17:55:14 crc kubenswrapper[4558]: I0120 17:55:14.285898 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-bootstrap-glvsk" event={"ID":"d739bf90-bc99-4f8a-870e-98026ba6a53c","Type":"ContainerDied","Data":"f72f0830340dc1a2eb38cfa3fadc0b0ca0b77a7404b74932cdc2b1f647d97e1e"} Jan 20 17:55:14 crc kubenswrapper[4558]: I0120 17:55:14.285947 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f72f0830340dc1a2eb38cfa3fadc0b0ca0b77a7404b74932cdc2b1f647d97e1e" Jan 20 17:55:14 crc kubenswrapper[4558]: I0120 17:55:14.286037 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-bootstrap-glvsk" Jan 20 17:55:14 crc kubenswrapper[4558]: I0120 17:55:14.287078 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-64ffc476d5-9zjgb" event={"ID":"b637458a-f8f6-456e-888f-3d61734be420","Type":"ContainerStarted","Data":"2a7a8fb4ffd1fa738d4d412796f871c7bff46cab77197775cf3d08b865d59e7c"} Jan 20 17:55:14 crc kubenswrapper[4558]: I0120 17:55:14.406637 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/keystone-747b5668bc-jgcd6"] Jan 20 17:55:14 crc kubenswrapper[4558]: E0120 17:55:14.407061 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d739bf90-bc99-4f8a-870e-98026ba6a53c" containerName="keystone-bootstrap" Jan 20 17:55:14 crc kubenswrapper[4558]: I0120 17:55:14.407082 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d739bf90-bc99-4f8a-870e-98026ba6a53c" containerName="keystone-bootstrap" Jan 20 17:55:14 crc kubenswrapper[4558]: I0120 17:55:14.408962 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="d739bf90-bc99-4f8a-870e-98026ba6a53c" containerName="keystone-bootstrap" Jan 20 17:55:14 crc kubenswrapper[4558]: I0120 17:55:14.409647 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-747b5668bc-jgcd6" Jan 20 17:55:14 crc kubenswrapper[4558]: I0120 17:55:14.416926 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-keystone-public-svc" Jan 20 17:55:14 crc kubenswrapper[4558]: I0120 17:55:14.417044 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone" Jan 20 17:55:14 crc kubenswrapper[4558]: I0120 17:55:14.417228 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-config-data" Jan 20 17:55:14 crc kubenswrapper[4558]: I0120 17:55:14.417302 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-keystone-internal-svc" Jan 20 17:55:14 crc kubenswrapper[4558]: I0120 17:55:14.417377 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-scripts" Jan 20 17:55:14 crc kubenswrapper[4558]: I0120 17:55:14.426840 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-keystone-dockercfg-bqxt2" Jan 20 17:55:14 crc kubenswrapper[4558]: I0120 17:55:14.439149 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-747b5668bc-jgcd6"] Jan 20 17:55:14 crc kubenswrapper[4558]: I0120 17:55:14.517267 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/747b1096-c84a-43e1-930e-9f887a42c524-public-tls-certs\") pod \"keystone-747b5668bc-jgcd6\" (UID: \"747b1096-c84a-43e1-930e-9f887a42c524\") " pod="openstack-kuttl-tests/keystone-747b5668bc-jgcd6" Jan 20 17:55:14 crc kubenswrapper[4558]: I0120 17:55:14.517400 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/747b1096-c84a-43e1-930e-9f887a42c524-fernet-keys\") pod \"keystone-747b5668bc-jgcd6\" (UID: \"747b1096-c84a-43e1-930e-9f887a42c524\") " pod="openstack-kuttl-tests/keystone-747b5668bc-jgcd6" Jan 20 17:55:14 crc kubenswrapper[4558]: I0120 17:55:14.517478 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sd6x5\" (UniqueName: \"kubernetes.io/projected/747b1096-c84a-43e1-930e-9f887a42c524-kube-api-access-sd6x5\") pod \"keystone-747b5668bc-jgcd6\" (UID: \"747b1096-c84a-43e1-930e-9f887a42c524\") " pod="openstack-kuttl-tests/keystone-747b5668bc-jgcd6" Jan 20 17:55:14 crc kubenswrapper[4558]: I0120 17:55:14.517510 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/747b1096-c84a-43e1-930e-9f887a42c524-combined-ca-bundle\") pod \"keystone-747b5668bc-jgcd6\" (UID: \"747b1096-c84a-43e1-930e-9f887a42c524\") " pod="openstack-kuttl-tests/keystone-747b5668bc-jgcd6" Jan 20 17:55:14 crc kubenswrapper[4558]: I0120 17:55:14.517528 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/747b1096-c84a-43e1-930e-9f887a42c524-scripts\") pod \"keystone-747b5668bc-jgcd6\" (UID: \"747b1096-c84a-43e1-930e-9f887a42c524\") " pod="openstack-kuttl-tests/keystone-747b5668bc-jgcd6" Jan 20 17:55:14 crc kubenswrapper[4558]: I0120 17:55:14.517548 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/747b1096-c84a-43e1-930e-9f887a42c524-internal-tls-certs\") pod \"keystone-747b5668bc-jgcd6\" (UID: \"747b1096-c84a-43e1-930e-9f887a42c524\") " pod="openstack-kuttl-tests/keystone-747b5668bc-jgcd6" Jan 20 17:55:14 crc kubenswrapper[4558]: I0120 17:55:14.517601 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/747b1096-c84a-43e1-930e-9f887a42c524-config-data\") pod \"keystone-747b5668bc-jgcd6\" (UID: \"747b1096-c84a-43e1-930e-9f887a42c524\") " pod="openstack-kuttl-tests/keystone-747b5668bc-jgcd6" Jan 20 17:55:14 crc kubenswrapper[4558]: I0120 17:55:14.517637 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/747b1096-c84a-43e1-930e-9f887a42c524-credential-keys\") pod \"keystone-747b5668bc-jgcd6\" (UID: \"747b1096-c84a-43e1-930e-9f887a42c524\") " pod="openstack-kuttl-tests/keystone-747b5668bc-jgcd6" Jan 20 17:55:14 crc kubenswrapper[4558]: I0120 17:55:14.621271 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/747b1096-c84a-43e1-930e-9f887a42c524-credential-keys\") pod \"keystone-747b5668bc-jgcd6\" (UID: \"747b1096-c84a-43e1-930e-9f887a42c524\") " pod="openstack-kuttl-tests/keystone-747b5668bc-jgcd6" Jan 20 17:55:14 crc kubenswrapper[4558]: I0120 17:55:14.621393 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/747b1096-c84a-43e1-930e-9f887a42c524-public-tls-certs\") pod \"keystone-747b5668bc-jgcd6\" (UID: \"747b1096-c84a-43e1-930e-9f887a42c524\") " pod="openstack-kuttl-tests/keystone-747b5668bc-jgcd6" Jan 20 17:55:14 crc kubenswrapper[4558]: I0120 17:55:14.621493 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/747b1096-c84a-43e1-930e-9f887a42c524-fernet-keys\") pod \"keystone-747b5668bc-jgcd6\" (UID: \"747b1096-c84a-43e1-930e-9f887a42c524\") " pod="openstack-kuttl-tests/keystone-747b5668bc-jgcd6" Jan 20 17:55:14 crc kubenswrapper[4558]: I0120 17:55:14.621580 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sd6x5\" (UniqueName: \"kubernetes.io/projected/747b1096-c84a-43e1-930e-9f887a42c524-kube-api-access-sd6x5\") pod \"keystone-747b5668bc-jgcd6\" (UID: \"747b1096-c84a-43e1-930e-9f887a42c524\") " pod="openstack-kuttl-tests/keystone-747b5668bc-jgcd6" Jan 20 17:55:14 crc kubenswrapper[4558]: I0120 17:55:14.621611 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/747b1096-c84a-43e1-930e-9f887a42c524-combined-ca-bundle\") pod \"keystone-747b5668bc-jgcd6\" (UID: \"747b1096-c84a-43e1-930e-9f887a42c524\") " pod="openstack-kuttl-tests/keystone-747b5668bc-jgcd6" Jan 20 17:55:14 crc kubenswrapper[4558]: I0120 17:55:14.621630 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/747b1096-c84a-43e1-930e-9f887a42c524-scripts\") pod \"keystone-747b5668bc-jgcd6\" (UID: \"747b1096-c84a-43e1-930e-9f887a42c524\") " pod="openstack-kuttl-tests/keystone-747b5668bc-jgcd6" Jan 20 17:55:14 crc kubenswrapper[4558]: I0120 17:55:14.621650 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/747b1096-c84a-43e1-930e-9f887a42c524-internal-tls-certs\") pod \"keystone-747b5668bc-jgcd6\" (UID: \"747b1096-c84a-43e1-930e-9f887a42c524\") " pod="openstack-kuttl-tests/keystone-747b5668bc-jgcd6" Jan 20 17:55:14 crc kubenswrapper[4558]: I0120 17:55:14.621722 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/747b1096-c84a-43e1-930e-9f887a42c524-config-data\") pod \"keystone-747b5668bc-jgcd6\" (UID: \"747b1096-c84a-43e1-930e-9f887a42c524\") " pod="openstack-kuttl-tests/keystone-747b5668bc-jgcd6" Jan 20 17:55:14 crc kubenswrapper[4558]: I0120 17:55:14.637300 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/747b1096-c84a-43e1-930e-9f887a42c524-credential-keys\") pod \"keystone-747b5668bc-jgcd6\" (UID: \"747b1096-c84a-43e1-930e-9f887a42c524\") " pod="openstack-kuttl-tests/keystone-747b5668bc-jgcd6" Jan 20 17:55:14 crc kubenswrapper[4558]: I0120 17:55:14.640770 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/747b1096-c84a-43e1-930e-9f887a42c524-public-tls-certs\") pod \"keystone-747b5668bc-jgcd6\" (UID: \"747b1096-c84a-43e1-930e-9f887a42c524\") " pod="openstack-kuttl-tests/keystone-747b5668bc-jgcd6" Jan 20 17:55:14 crc kubenswrapper[4558]: I0120 17:55:14.641449 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/747b1096-c84a-43e1-930e-9f887a42c524-scripts\") pod \"keystone-747b5668bc-jgcd6\" (UID: \"747b1096-c84a-43e1-930e-9f887a42c524\") " pod="openstack-kuttl-tests/keystone-747b5668bc-jgcd6" Jan 20 17:55:14 crc kubenswrapper[4558]: I0120 17:55:14.651992 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/747b1096-c84a-43e1-930e-9f887a42c524-config-data\") pod \"keystone-747b5668bc-jgcd6\" (UID: \"747b1096-c84a-43e1-930e-9f887a42c524\") " pod="openstack-kuttl-tests/keystone-747b5668bc-jgcd6" Jan 20 17:55:14 crc kubenswrapper[4558]: I0120 17:55:14.655644 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/747b1096-c84a-43e1-930e-9f887a42c524-internal-tls-certs\") pod \"keystone-747b5668bc-jgcd6\" (UID: \"747b1096-c84a-43e1-930e-9f887a42c524\") " pod="openstack-kuttl-tests/keystone-747b5668bc-jgcd6" Jan 20 17:55:14 crc kubenswrapper[4558]: I0120 17:55:14.656723 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/747b1096-c84a-43e1-930e-9f887a42c524-fernet-keys\") pod \"keystone-747b5668bc-jgcd6\" (UID: \"747b1096-c84a-43e1-930e-9f887a42c524\") " pod="openstack-kuttl-tests/keystone-747b5668bc-jgcd6" Jan 20 17:55:14 crc kubenswrapper[4558]: I0120 17:55:14.659730 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/747b1096-c84a-43e1-930e-9f887a42c524-combined-ca-bundle\") pod \"keystone-747b5668bc-jgcd6\" (UID: \"747b1096-c84a-43e1-930e-9f887a42c524\") " pod="openstack-kuttl-tests/keystone-747b5668bc-jgcd6" Jan 20 17:55:14 crc kubenswrapper[4558]: I0120 17:55:14.661604 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sd6x5\" (UniqueName: \"kubernetes.io/projected/747b1096-c84a-43e1-930e-9f887a42c524-kube-api-access-sd6x5\") pod \"keystone-747b5668bc-jgcd6\" (UID: \"747b1096-c84a-43e1-930e-9f887a42c524\") " pod="openstack-kuttl-tests/keystone-747b5668bc-jgcd6" Jan 20 17:55:14 crc kubenswrapper[4558]: I0120 17:55:14.755480 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-747b5668bc-jgcd6" Jan 20 17:55:15 crc kubenswrapper[4558]: I0120 17:55:15.204398 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-747b5668bc-jgcd6"] Jan 20 17:55:15 crc kubenswrapper[4558]: W0120 17:55:15.207173 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod747b1096_c84a_43e1_930e_9f887a42c524.slice/crio-fffe7f2e3037040f2613fe69c4c37b61f4fc32c437a71f652f741463a950b67f WatchSource:0}: Error finding container fffe7f2e3037040f2613fe69c4c37b61f4fc32c437a71f652f741463a950b67f: Status 404 returned error can't find the container with id fffe7f2e3037040f2613fe69c4c37b61f4fc32c437a71f652f741463a950b67f Jan 20 17:55:15 crc kubenswrapper[4558]: I0120 17:55:15.299504 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-747b5668bc-jgcd6" event={"ID":"747b1096-c84a-43e1-930e-9f887a42c524","Type":"ContainerStarted","Data":"fffe7f2e3037040f2613fe69c4c37b61f4fc32c437a71f652f741463a950b67f"} Jan 20 17:55:15 crc kubenswrapper[4558]: I0120 17:55:15.302368 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"7ba8f8c1-7947-4326-b7d2-49f89d248c21","Type":"ContainerStarted","Data":"8c5549e3c402e532f1c102abe82c1184d17474896e83aaa32a03381dda0c78c7"} Jan 20 17:55:15 crc kubenswrapper[4558]: I0120 17:55:15.302537 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:55:15 crc kubenswrapper[4558]: I0120 17:55:15.306070 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-64ffc476d5-9zjgb" event={"ID":"b637458a-f8f6-456e-888f-3d61734be420","Type":"ContainerStarted","Data":"fe46b8f805f4f36bad1c42f2cb3cd2007879b8f0cad9c8dfc988eb7d2db5084f"} Jan 20 17:55:15 crc kubenswrapper[4558]: I0120 17:55:15.306119 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-64ffc476d5-9zjgb" event={"ID":"b637458a-f8f6-456e-888f-3d61734be420","Type":"ContainerStarted","Data":"aa3f685bf49ef2fddab92d64cfaccff086ad8c1a707e26911bd2c12773aaacf3"} Jan 20 17:55:15 crc kubenswrapper[4558]: I0120 17:55:15.306329 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/neutron-64ffc476d5-9zjgb" Jan 20 17:55:15 crc kubenswrapper[4558]: I0120 17:55:15.322139 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ceilometer-0" podStartSLOduration=2.035233861 podStartE2EDuration="6.322129132s" podCreationTimestamp="2026-01-20 17:55:09 +0000 UTC" firstStartedPulling="2026-01-20 17:55:10.230032583 +0000 UTC m=+4403.990370540" lastFinishedPulling="2026-01-20 17:55:14.516927843 +0000 UTC m=+4408.277265811" observedRunningTime="2026-01-20 17:55:15.319109505 +0000 UTC m=+4409.079447472" watchObservedRunningTime="2026-01-20 17:55:15.322129132 +0000 UTC m=+4409.082467099" Jan 20 17:55:15 crc kubenswrapper[4558]: I0120 17:55:15.341624 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/neutron-64ffc476d5-9zjgb" podStartSLOduration=2.341607105 podStartE2EDuration="2.341607105s" podCreationTimestamp="2026-01-20 17:55:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:55:15.332267419 +0000 UTC m=+4409.092605386" watchObservedRunningTime="2026-01-20 17:55:15.341607105 +0000 UTC m=+4409.101945072" Jan 20 17:55:15 crc kubenswrapper[4558]: I0120 17:55:15.457240 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:55:15 crc kubenswrapper[4558]: I0120 17:55:15.457547 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/cinder-api-0" podUID="c39fdbce-24c2-4a02-99a4-c671d09a4067" containerName="cinder-api-log" containerID="cri-o://ba6645c36e46964aa0c2a3a64d022bc81d57d4d9e50dd710053fee0226f00efc" gracePeriod=30 Jan 20 17:55:15 crc kubenswrapper[4558]: I0120 17:55:15.457588 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/cinder-api-0" podUID="c39fdbce-24c2-4a02-99a4-c671d09a4067" containerName="cinder-api" containerID="cri-o://3a19ddf46ca42a5adf3fb20561481c08ce4ad4c994527d7977a1c93d179840fe" gracePeriod=30 Jan 20 17:55:15 crc kubenswrapper[4558]: I0120 17:55:15.650307 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:55:15 crc kubenswrapper[4558]: I0120 17:55:15.939322 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:55:16 crc kubenswrapper[4558]: I0120 17:55:16.063866 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c39fdbce-24c2-4a02-99a4-c671d09a4067-config-data-custom\") pod \"c39fdbce-24c2-4a02-99a4-c671d09a4067\" (UID: \"c39fdbce-24c2-4a02-99a4-c671d09a4067\") " Jan 20 17:55:16 crc kubenswrapper[4558]: I0120 17:55:16.063925 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c39fdbce-24c2-4a02-99a4-c671d09a4067-logs\") pod \"c39fdbce-24c2-4a02-99a4-c671d09a4067\" (UID: \"c39fdbce-24c2-4a02-99a4-c671d09a4067\") " Jan 20 17:55:16 crc kubenswrapper[4558]: I0120 17:55:16.063966 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c39fdbce-24c2-4a02-99a4-c671d09a4067-scripts\") pod \"c39fdbce-24c2-4a02-99a4-c671d09a4067\" (UID: \"c39fdbce-24c2-4a02-99a4-c671d09a4067\") " Jan 20 17:55:16 crc kubenswrapper[4558]: I0120 17:55:16.063994 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/c39fdbce-24c2-4a02-99a4-c671d09a4067-etc-machine-id\") pod \"c39fdbce-24c2-4a02-99a4-c671d09a4067\" (UID: \"c39fdbce-24c2-4a02-99a4-c671d09a4067\") " Jan 20 17:55:16 crc kubenswrapper[4558]: I0120 17:55:16.064035 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q7bkp\" (UniqueName: \"kubernetes.io/projected/c39fdbce-24c2-4a02-99a4-c671d09a4067-kube-api-access-q7bkp\") pod \"c39fdbce-24c2-4a02-99a4-c671d09a4067\" (UID: \"c39fdbce-24c2-4a02-99a4-c671d09a4067\") " Jan 20 17:55:16 crc kubenswrapper[4558]: I0120 17:55:16.064087 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c39fdbce-24c2-4a02-99a4-c671d09a4067-combined-ca-bundle\") pod \"c39fdbce-24c2-4a02-99a4-c671d09a4067\" (UID: \"c39fdbce-24c2-4a02-99a4-c671d09a4067\") " Jan 20 17:55:16 crc kubenswrapper[4558]: I0120 17:55:16.064123 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c39fdbce-24c2-4a02-99a4-c671d09a4067-config-data\") pod \"c39fdbce-24c2-4a02-99a4-c671d09a4067\" (UID: \"c39fdbce-24c2-4a02-99a4-c671d09a4067\") " Jan 20 17:55:16 crc kubenswrapper[4558]: I0120 17:55:16.064150 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/c39fdbce-24c2-4a02-99a4-c671d09a4067-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "c39fdbce-24c2-4a02-99a4-c671d09a4067" (UID: "c39fdbce-24c2-4a02-99a4-c671d09a4067"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 17:55:16 crc kubenswrapper[4558]: I0120 17:55:16.064576 4558 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/c39fdbce-24c2-4a02-99a4-c671d09a4067-etc-machine-id\") on node \"crc\" DevicePath \"\"" Jan 20 17:55:16 crc kubenswrapper[4558]: I0120 17:55:16.065395 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c39fdbce-24c2-4a02-99a4-c671d09a4067-logs" (OuterVolumeSpecName: "logs") pod "c39fdbce-24c2-4a02-99a4-c671d09a4067" (UID: "c39fdbce-24c2-4a02-99a4-c671d09a4067"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:55:16 crc kubenswrapper[4558]: I0120 17:55:16.072238 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c39fdbce-24c2-4a02-99a4-c671d09a4067-kube-api-access-q7bkp" (OuterVolumeSpecName: "kube-api-access-q7bkp") pod "c39fdbce-24c2-4a02-99a4-c671d09a4067" (UID: "c39fdbce-24c2-4a02-99a4-c671d09a4067"). InnerVolumeSpecName "kube-api-access-q7bkp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:55:16 crc kubenswrapper[4558]: I0120 17:55:16.072340 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c39fdbce-24c2-4a02-99a4-c671d09a4067-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "c39fdbce-24c2-4a02-99a4-c671d09a4067" (UID: "c39fdbce-24c2-4a02-99a4-c671d09a4067"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:55:16 crc kubenswrapper[4558]: I0120 17:55:16.072362 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c39fdbce-24c2-4a02-99a4-c671d09a4067-scripts" (OuterVolumeSpecName: "scripts") pod "c39fdbce-24c2-4a02-99a4-c671d09a4067" (UID: "c39fdbce-24c2-4a02-99a4-c671d09a4067"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:55:16 crc kubenswrapper[4558]: I0120 17:55:16.089439 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c39fdbce-24c2-4a02-99a4-c671d09a4067-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c39fdbce-24c2-4a02-99a4-c671d09a4067" (UID: "c39fdbce-24c2-4a02-99a4-c671d09a4067"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:55:16 crc kubenswrapper[4558]: I0120 17:55:16.105793 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c39fdbce-24c2-4a02-99a4-c671d09a4067-config-data" (OuterVolumeSpecName: "config-data") pod "c39fdbce-24c2-4a02-99a4-c671d09a4067" (UID: "c39fdbce-24c2-4a02-99a4-c671d09a4067"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:55:16 crc kubenswrapper[4558]: I0120 17:55:16.167384 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c39fdbce-24c2-4a02-99a4-c671d09a4067-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:55:16 crc kubenswrapper[4558]: I0120 17:55:16.167439 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c39fdbce-24c2-4a02-99a4-c671d09a4067-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:55:16 crc kubenswrapper[4558]: I0120 17:55:16.167454 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c39fdbce-24c2-4a02-99a4-c671d09a4067-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:55:16 crc kubenswrapper[4558]: I0120 17:55:16.167465 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c39fdbce-24c2-4a02-99a4-c671d09a4067-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:55:16 crc kubenswrapper[4558]: I0120 17:55:16.167494 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q7bkp\" (UniqueName: \"kubernetes.io/projected/c39fdbce-24c2-4a02-99a4-c671d09a4067-kube-api-access-q7bkp\") on node \"crc\" DevicePath \"\"" Jan 20 17:55:16 crc kubenswrapper[4558]: I0120 17:55:16.167504 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c39fdbce-24c2-4a02-99a4-c671d09a4067-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:55:16 crc kubenswrapper[4558]: I0120 17:55:16.320466 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-747b5668bc-jgcd6" event={"ID":"747b1096-c84a-43e1-930e-9f887a42c524","Type":"ContainerStarted","Data":"f8102ce497d4ab8bf9c0e437f9885597ba53dd47555a7751b8ba88fdad3cac9f"} Jan 20 17:55:16 crc kubenswrapper[4558]: I0120 17:55:16.322141 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/keystone-747b5668bc-jgcd6" Jan 20 17:55:16 crc kubenswrapper[4558]: I0120 17:55:16.323835 4558 generic.go:334] "Generic (PLEG): container finished" podID="c39fdbce-24c2-4a02-99a4-c671d09a4067" containerID="3a19ddf46ca42a5adf3fb20561481c08ce4ad4c994527d7977a1c93d179840fe" exitCode=0 Jan 20 17:55:16 crc kubenswrapper[4558]: I0120 17:55:16.323871 4558 generic.go:334] "Generic (PLEG): container finished" podID="c39fdbce-24c2-4a02-99a4-c671d09a4067" containerID="ba6645c36e46964aa0c2a3a64d022bc81d57d4d9e50dd710053fee0226f00efc" exitCode=143 Jan 20 17:55:16 crc kubenswrapper[4558]: I0120 17:55:16.323946 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:55:16 crc kubenswrapper[4558]: I0120 17:55:16.324656 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"c39fdbce-24c2-4a02-99a4-c671d09a4067","Type":"ContainerDied","Data":"3a19ddf46ca42a5adf3fb20561481c08ce4ad4c994527d7977a1c93d179840fe"} Jan 20 17:55:16 crc kubenswrapper[4558]: I0120 17:55:16.324686 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"c39fdbce-24c2-4a02-99a4-c671d09a4067","Type":"ContainerDied","Data":"ba6645c36e46964aa0c2a3a64d022bc81d57d4d9e50dd710053fee0226f00efc"} Jan 20 17:55:16 crc kubenswrapper[4558]: I0120 17:55:16.324699 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"c39fdbce-24c2-4a02-99a4-c671d09a4067","Type":"ContainerDied","Data":"39295faed190461a799be4c663b4b96327e60d990fed71af3689874f9fee213b"} Jan 20 17:55:16 crc kubenswrapper[4558]: I0120 17:55:16.324716 4558 scope.go:117] "RemoveContainer" containerID="3a19ddf46ca42a5adf3fb20561481c08ce4ad4c994527d7977a1c93d179840fe" Jan 20 17:55:16 crc kubenswrapper[4558]: I0120 17:55:16.351531 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/keystone-747b5668bc-jgcd6" podStartSLOduration=2.351504297 podStartE2EDuration="2.351504297s" podCreationTimestamp="2026-01-20 17:55:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:55:16.338935732 +0000 UTC m=+4410.099273699" watchObservedRunningTime="2026-01-20 17:55:16.351504297 +0000 UTC m=+4410.111842264" Jan 20 17:55:16 crc kubenswrapper[4558]: I0120 17:55:16.356576 4558 scope.go:117] "RemoveContainer" containerID="ba6645c36e46964aa0c2a3a64d022bc81d57d4d9e50dd710053fee0226f00efc" Jan 20 17:55:16 crc kubenswrapper[4558]: I0120 17:55:16.379652 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:55:16 crc kubenswrapper[4558]: I0120 17:55:16.384700 4558 scope.go:117] "RemoveContainer" containerID="3a19ddf46ca42a5adf3fb20561481c08ce4ad4c994527d7977a1c93d179840fe" Jan 20 17:55:16 crc kubenswrapper[4558]: I0120 17:55:16.387946 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:55:16 crc kubenswrapper[4558]: I0120 17:55:16.387989 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:55:16 crc kubenswrapper[4558]: I0120 17:55:16.394015 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:55:16 crc kubenswrapper[4558]: E0120 17:55:16.397291 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3a19ddf46ca42a5adf3fb20561481c08ce4ad4c994527d7977a1c93d179840fe\": container with ID starting with 3a19ddf46ca42a5adf3fb20561481c08ce4ad4c994527d7977a1c93d179840fe not found: ID does not exist" containerID="3a19ddf46ca42a5adf3fb20561481c08ce4ad4c994527d7977a1c93d179840fe" Jan 20 17:55:16 crc kubenswrapper[4558]: I0120 17:55:16.397329 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3a19ddf46ca42a5adf3fb20561481c08ce4ad4c994527d7977a1c93d179840fe"} err="failed to get container status \"3a19ddf46ca42a5adf3fb20561481c08ce4ad4c994527d7977a1c93d179840fe\": rpc error: code = NotFound desc = could not find container \"3a19ddf46ca42a5adf3fb20561481c08ce4ad4c994527d7977a1c93d179840fe\": container with ID starting with 3a19ddf46ca42a5adf3fb20561481c08ce4ad4c994527d7977a1c93d179840fe not found: ID does not exist" Jan 20 17:55:16 crc kubenswrapper[4558]: I0120 17:55:16.397356 4558 scope.go:117] "RemoveContainer" containerID="ba6645c36e46964aa0c2a3a64d022bc81d57d4d9e50dd710053fee0226f00efc" Jan 20 17:55:16 crc kubenswrapper[4558]: E0120 17:55:16.398741 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ba6645c36e46964aa0c2a3a64d022bc81d57d4d9e50dd710053fee0226f00efc\": container with ID starting with ba6645c36e46964aa0c2a3a64d022bc81d57d4d9e50dd710053fee0226f00efc not found: ID does not exist" containerID="ba6645c36e46964aa0c2a3a64d022bc81d57d4d9e50dd710053fee0226f00efc" Jan 20 17:55:16 crc kubenswrapper[4558]: I0120 17:55:16.398779 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ba6645c36e46964aa0c2a3a64d022bc81d57d4d9e50dd710053fee0226f00efc"} err="failed to get container status \"ba6645c36e46964aa0c2a3a64d022bc81d57d4d9e50dd710053fee0226f00efc\": rpc error: code = NotFound desc = could not find container \"ba6645c36e46964aa0c2a3a64d022bc81d57d4d9e50dd710053fee0226f00efc\": container with ID starting with ba6645c36e46964aa0c2a3a64d022bc81d57d4d9e50dd710053fee0226f00efc not found: ID does not exist" Jan 20 17:55:16 crc kubenswrapper[4558]: I0120 17:55:16.398806 4558 scope.go:117] "RemoveContainer" containerID="3a19ddf46ca42a5adf3fb20561481c08ce4ad4c994527d7977a1c93d179840fe" Jan 20 17:55:16 crc kubenswrapper[4558]: I0120 17:55:16.399037 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3a19ddf46ca42a5adf3fb20561481c08ce4ad4c994527d7977a1c93d179840fe"} err="failed to get container status \"3a19ddf46ca42a5adf3fb20561481c08ce4ad4c994527d7977a1c93d179840fe\": rpc error: code = NotFound desc = could not find container \"3a19ddf46ca42a5adf3fb20561481c08ce4ad4c994527d7977a1c93d179840fe\": container with ID starting with 3a19ddf46ca42a5adf3fb20561481c08ce4ad4c994527d7977a1c93d179840fe not found: ID does not exist" Jan 20 17:55:16 crc kubenswrapper[4558]: I0120 17:55:16.399071 4558 scope.go:117] "RemoveContainer" containerID="ba6645c36e46964aa0c2a3a64d022bc81d57d4d9e50dd710053fee0226f00efc" Jan 20 17:55:16 crc kubenswrapper[4558]: I0120 17:55:16.399305 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ba6645c36e46964aa0c2a3a64d022bc81d57d4d9e50dd710053fee0226f00efc"} err="failed to get container status \"ba6645c36e46964aa0c2a3a64d022bc81d57d4d9e50dd710053fee0226f00efc\": rpc error: code = NotFound desc = could not find container \"ba6645c36e46964aa0c2a3a64d022bc81d57d4d9e50dd710053fee0226f00efc\": container with ID starting with ba6645c36e46964aa0c2a3a64d022bc81d57d4d9e50dd710053fee0226f00efc not found: ID does not exist" Jan 20 17:55:16 crc kubenswrapper[4558]: I0120 17:55:16.410244 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:55:16 crc kubenswrapper[4558]: E0120 17:55:16.410848 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c39fdbce-24c2-4a02-99a4-c671d09a4067" containerName="cinder-api" Jan 20 17:55:16 crc kubenswrapper[4558]: I0120 17:55:16.410870 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="c39fdbce-24c2-4a02-99a4-c671d09a4067" containerName="cinder-api" Jan 20 17:55:16 crc kubenswrapper[4558]: E0120 17:55:16.410898 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c39fdbce-24c2-4a02-99a4-c671d09a4067" containerName="cinder-api-log" Jan 20 17:55:16 crc kubenswrapper[4558]: I0120 17:55:16.410906 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="c39fdbce-24c2-4a02-99a4-c671d09a4067" containerName="cinder-api-log" Jan 20 17:55:16 crc kubenswrapper[4558]: I0120 17:55:16.411145 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="c39fdbce-24c2-4a02-99a4-c671d09a4067" containerName="cinder-api-log" Jan 20 17:55:16 crc kubenswrapper[4558]: I0120 17:55:16.411193 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="c39fdbce-24c2-4a02-99a4-c671d09a4067" containerName="cinder-api" Jan 20 17:55:16 crc kubenswrapper[4558]: I0120 17:55:16.412194 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:55:16 crc kubenswrapper[4558]: I0120 17:55:16.413635 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-cinder-public-svc" Jan 20 17:55:16 crc kubenswrapper[4558]: I0120 17:55:16.413903 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-cinder-internal-svc" Jan 20 17:55:16 crc kubenswrapper[4558]: I0120 17:55:16.419447 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:55:16 crc kubenswrapper[4558]: I0120 17:55:16.419503 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:55:16 crc kubenswrapper[4558]: I0120 17:55:16.419609 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:55:16 crc kubenswrapper[4558]: I0120 17:55:16.419841 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cinder-api-config-data" Jan 20 17:55:16 crc kubenswrapper[4558]: I0120 17:55:16.422565 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:55:16 crc kubenswrapper[4558]: I0120 17:55:16.429849 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:55:16 crc kubenswrapper[4558]: I0120 17:55:16.453722 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:55:16 crc kubenswrapper[4558]: I0120 17:55:16.453781 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:55:16 crc kubenswrapper[4558]: I0120 17:55:16.473810 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0b0c3280-9b26-4db4-ab16-68016b055a1f-config-data-custom\") pod \"cinder-api-0\" (UID: \"0b0c3280-9b26-4db4-ab16-68016b055a1f\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:55:16 crc kubenswrapper[4558]: I0120 17:55:16.473938 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0b0c3280-9b26-4db4-ab16-68016b055a1f-logs\") pod \"cinder-api-0\" (UID: \"0b0c3280-9b26-4db4-ab16-68016b055a1f\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:55:16 crc kubenswrapper[4558]: I0120 17:55:16.474221 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0b0c3280-9b26-4db4-ab16-68016b055a1f-config-data\") pod \"cinder-api-0\" (UID: \"0b0c3280-9b26-4db4-ab16-68016b055a1f\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:55:16 crc kubenswrapper[4558]: I0120 17:55:16.474262 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zxwh2\" (UniqueName: \"kubernetes.io/projected/0b0c3280-9b26-4db4-ab16-68016b055a1f-kube-api-access-zxwh2\") pod \"cinder-api-0\" (UID: \"0b0c3280-9b26-4db4-ab16-68016b055a1f\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:55:16 crc kubenswrapper[4558]: I0120 17:55:16.474286 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0b0c3280-9b26-4db4-ab16-68016b055a1f-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"0b0c3280-9b26-4db4-ab16-68016b055a1f\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:55:16 crc kubenswrapper[4558]: I0120 17:55:16.474404 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0b0c3280-9b26-4db4-ab16-68016b055a1f-etc-machine-id\") pod \"cinder-api-0\" (UID: \"0b0c3280-9b26-4db4-ab16-68016b055a1f\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:55:16 crc kubenswrapper[4558]: I0120 17:55:16.474485 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0b0c3280-9b26-4db4-ab16-68016b055a1f-public-tls-certs\") pod \"cinder-api-0\" (UID: \"0b0c3280-9b26-4db4-ab16-68016b055a1f\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:55:16 crc kubenswrapper[4558]: I0120 17:55:16.474515 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0b0c3280-9b26-4db4-ab16-68016b055a1f-scripts\") pod \"cinder-api-0\" (UID: \"0b0c3280-9b26-4db4-ab16-68016b055a1f\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:55:16 crc kubenswrapper[4558]: I0120 17:55:16.474561 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0b0c3280-9b26-4db4-ab16-68016b055a1f-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"0b0c3280-9b26-4db4-ab16-68016b055a1f\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:55:16 crc kubenswrapper[4558]: I0120 17:55:16.576811 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0b0c3280-9b26-4db4-ab16-68016b055a1f-config-data\") pod \"cinder-api-0\" (UID: \"0b0c3280-9b26-4db4-ab16-68016b055a1f\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:55:16 crc kubenswrapper[4558]: I0120 17:55:16.576850 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c39fdbce-24c2-4a02-99a4-c671d09a4067" path="/var/lib/kubelet/pods/c39fdbce-24c2-4a02-99a4-c671d09a4067/volumes" Jan 20 17:55:16 crc kubenswrapper[4558]: I0120 17:55:16.576858 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zxwh2\" (UniqueName: \"kubernetes.io/projected/0b0c3280-9b26-4db4-ab16-68016b055a1f-kube-api-access-zxwh2\") pod \"cinder-api-0\" (UID: \"0b0c3280-9b26-4db4-ab16-68016b055a1f\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:55:16 crc kubenswrapper[4558]: I0120 17:55:16.576981 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0b0c3280-9b26-4db4-ab16-68016b055a1f-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"0b0c3280-9b26-4db4-ab16-68016b055a1f\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:55:16 crc kubenswrapper[4558]: I0120 17:55:16.577038 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0b0c3280-9b26-4db4-ab16-68016b055a1f-etc-machine-id\") pod \"cinder-api-0\" (UID: \"0b0c3280-9b26-4db4-ab16-68016b055a1f\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:55:16 crc kubenswrapper[4558]: I0120 17:55:16.577092 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0b0c3280-9b26-4db4-ab16-68016b055a1f-public-tls-certs\") pod \"cinder-api-0\" (UID: \"0b0c3280-9b26-4db4-ab16-68016b055a1f\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:55:16 crc kubenswrapper[4558]: I0120 17:55:16.577120 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0b0c3280-9b26-4db4-ab16-68016b055a1f-scripts\") pod \"cinder-api-0\" (UID: \"0b0c3280-9b26-4db4-ab16-68016b055a1f\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:55:16 crc kubenswrapper[4558]: I0120 17:55:16.577148 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0b0c3280-9b26-4db4-ab16-68016b055a1f-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"0b0c3280-9b26-4db4-ab16-68016b055a1f\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:55:16 crc kubenswrapper[4558]: I0120 17:55:16.577204 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0b0c3280-9b26-4db4-ab16-68016b055a1f-config-data-custom\") pod \"cinder-api-0\" (UID: \"0b0c3280-9b26-4db4-ab16-68016b055a1f\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:55:16 crc kubenswrapper[4558]: I0120 17:55:16.577260 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0b0c3280-9b26-4db4-ab16-68016b055a1f-logs\") pod \"cinder-api-0\" (UID: \"0b0c3280-9b26-4db4-ab16-68016b055a1f\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:55:16 crc kubenswrapper[4558]: I0120 17:55:16.577334 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0b0c3280-9b26-4db4-ab16-68016b055a1f-etc-machine-id\") pod \"cinder-api-0\" (UID: \"0b0c3280-9b26-4db4-ab16-68016b055a1f\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:55:16 crc kubenswrapper[4558]: I0120 17:55:16.577667 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0b0c3280-9b26-4db4-ab16-68016b055a1f-logs\") pod \"cinder-api-0\" (UID: \"0b0c3280-9b26-4db4-ab16-68016b055a1f\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:55:16 crc kubenswrapper[4558]: I0120 17:55:16.582484 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0b0c3280-9b26-4db4-ab16-68016b055a1f-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"0b0c3280-9b26-4db4-ab16-68016b055a1f\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:55:16 crc kubenswrapper[4558]: I0120 17:55:16.582484 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0b0c3280-9b26-4db4-ab16-68016b055a1f-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"0b0c3280-9b26-4db4-ab16-68016b055a1f\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:55:16 crc kubenswrapper[4558]: I0120 17:55:16.583377 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0b0c3280-9b26-4db4-ab16-68016b055a1f-config-data-custom\") pod \"cinder-api-0\" (UID: \"0b0c3280-9b26-4db4-ab16-68016b055a1f\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:55:16 crc kubenswrapper[4558]: I0120 17:55:16.586949 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0b0c3280-9b26-4db4-ab16-68016b055a1f-scripts\") pod \"cinder-api-0\" (UID: \"0b0c3280-9b26-4db4-ab16-68016b055a1f\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:55:16 crc kubenswrapper[4558]: I0120 17:55:16.587583 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0b0c3280-9b26-4db4-ab16-68016b055a1f-public-tls-certs\") pod \"cinder-api-0\" (UID: \"0b0c3280-9b26-4db4-ab16-68016b055a1f\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:55:16 crc kubenswrapper[4558]: I0120 17:55:16.589868 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0b0c3280-9b26-4db4-ab16-68016b055a1f-config-data\") pod \"cinder-api-0\" (UID: \"0b0c3280-9b26-4db4-ab16-68016b055a1f\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:55:16 crc kubenswrapper[4558]: I0120 17:55:16.593590 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zxwh2\" (UniqueName: \"kubernetes.io/projected/0b0c3280-9b26-4db4-ab16-68016b055a1f-kube-api-access-zxwh2\") pod \"cinder-api-0\" (UID: \"0b0c3280-9b26-4db4-ab16-68016b055a1f\") " pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:55:16 crc kubenswrapper[4558]: I0120 17:55:16.737087 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:55:17 crc kubenswrapper[4558]: I0120 17:55:17.164405 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:55:17 crc kubenswrapper[4558]: I0120 17:55:17.338620 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"0b0c3280-9b26-4db4-ab16-68016b055a1f","Type":"ContainerStarted","Data":"3c41b8d2e4ff79bd3706fc70c233fdfa093a870dea67d23e5ec6f068d9954668"} Jan 20 17:55:17 crc kubenswrapper[4558]: I0120 17:55:17.340984 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:55:17 crc kubenswrapper[4558]: I0120 17:55:17.341042 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:55:17 crc kubenswrapper[4558]: I0120 17:55:17.341059 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:55:17 crc kubenswrapper[4558]: I0120 17:55:17.341068 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:55:18 crc kubenswrapper[4558]: I0120 17:55:18.351423 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"0b0c3280-9b26-4db4-ab16-68016b055a1f","Type":"ContainerStarted","Data":"c77c9b049e2b69bd62c3dcdd5e98b0f9306632a287cc9d5490b8cbb694baaa92"} Jan 20 17:55:18 crc kubenswrapper[4558]: I0120 17:55:18.351908 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"0b0c3280-9b26-4db4-ab16-68016b055a1f","Type":"ContainerStarted","Data":"ed6293f7998d052646c129c9cb1fa0e8231d7f0d152e8108e422d497c055e6c9"} Jan 20 17:55:18 crc kubenswrapper[4558]: I0120 17:55:18.352224 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:55:18 crc kubenswrapper[4558]: I0120 17:55:18.381802 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/cinder-api-0" podStartSLOduration=2.381783461 podStartE2EDuration="2.381783461s" podCreationTimestamp="2026-01-20 17:55:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:55:18.363742879 +0000 UTC m=+4412.124080846" watchObservedRunningTime="2026-01-20 17:55:18.381783461 +0000 UTC m=+4412.142121427" Jan 20 17:55:19 crc kubenswrapper[4558]: I0120 17:55:19.056817 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:55:19 crc kubenswrapper[4558]: I0120 17:55:19.058926 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:55:19 crc kubenswrapper[4558]: I0120 17:55:19.119276 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:55:19 crc kubenswrapper[4558]: I0120 17:55:19.120124 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:55:19 crc kubenswrapper[4558]: I0120 17:55:19.371800 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/barbican-api-7ddd949596-fq6mn" Jan 20 17:55:19 crc kubenswrapper[4558]: I0120 17:55:19.469881 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/neutron-5b5bd9fb5d-vdljd"] Jan 20 17:55:19 crc kubenswrapper[4558]: I0120 17:55:19.471382 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-5b5bd9fb5d-vdljd" Jan 20 17:55:19 crc kubenswrapper[4558]: I0120 17:55:19.473111 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-neutron-public-svc" Jan 20 17:55:19 crc kubenswrapper[4558]: I0120 17:55:19.476923 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-neutron-internal-svc" Jan 20 17:55:19 crc kubenswrapper[4558]: I0120 17:55:19.480982 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-5b5bd9fb5d-vdljd"] Jan 20 17:55:19 crc kubenswrapper[4558]: I0120 17:55:19.482252 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/barbican-api-7ddd949596-fq6mn" Jan 20 17:55:19 crc kubenswrapper[4558]: I0120 17:55:19.547805 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/45d51516-cc3a-473c-bb88-e82d290d36ca-config\") pod \"neutron-5b5bd9fb5d-vdljd\" (UID: \"45d51516-cc3a-473c-bb88-e82d290d36ca\") " pod="openstack-kuttl-tests/neutron-5b5bd9fb5d-vdljd" Jan 20 17:55:19 crc kubenswrapper[4558]: I0120 17:55:19.547893 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/45d51516-cc3a-473c-bb88-e82d290d36ca-internal-tls-certs\") pod \"neutron-5b5bd9fb5d-vdljd\" (UID: \"45d51516-cc3a-473c-bb88-e82d290d36ca\") " pod="openstack-kuttl-tests/neutron-5b5bd9fb5d-vdljd" Jan 20 17:55:19 crc kubenswrapper[4558]: I0120 17:55:19.548100 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/45d51516-cc3a-473c-bb88-e82d290d36ca-httpd-config\") pod \"neutron-5b5bd9fb5d-vdljd\" (UID: \"45d51516-cc3a-473c-bb88-e82d290d36ca\") " pod="openstack-kuttl-tests/neutron-5b5bd9fb5d-vdljd" Jan 20 17:55:19 crc kubenswrapper[4558]: I0120 17:55:19.548240 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/45d51516-cc3a-473c-bb88-e82d290d36ca-ovndb-tls-certs\") pod \"neutron-5b5bd9fb5d-vdljd\" (UID: \"45d51516-cc3a-473c-bb88-e82d290d36ca\") " pod="openstack-kuttl-tests/neutron-5b5bd9fb5d-vdljd" Jan 20 17:55:19 crc kubenswrapper[4558]: I0120 17:55:19.548281 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rhxbj\" (UniqueName: \"kubernetes.io/projected/45d51516-cc3a-473c-bb88-e82d290d36ca-kube-api-access-rhxbj\") pod \"neutron-5b5bd9fb5d-vdljd\" (UID: \"45d51516-cc3a-473c-bb88-e82d290d36ca\") " pod="openstack-kuttl-tests/neutron-5b5bd9fb5d-vdljd" Jan 20 17:55:19 crc kubenswrapper[4558]: I0120 17:55:19.548302 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/45d51516-cc3a-473c-bb88-e82d290d36ca-combined-ca-bundle\") pod \"neutron-5b5bd9fb5d-vdljd\" (UID: \"45d51516-cc3a-473c-bb88-e82d290d36ca\") " pod="openstack-kuttl-tests/neutron-5b5bd9fb5d-vdljd" Jan 20 17:55:19 crc kubenswrapper[4558]: I0120 17:55:19.548355 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/45d51516-cc3a-473c-bb88-e82d290d36ca-public-tls-certs\") pod \"neutron-5b5bd9fb5d-vdljd\" (UID: \"45d51516-cc3a-473c-bb88-e82d290d36ca\") " pod="openstack-kuttl-tests/neutron-5b5bd9fb5d-vdljd" Jan 20 17:55:19 crc kubenswrapper[4558]: I0120 17:55:19.649632 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rhxbj\" (UniqueName: \"kubernetes.io/projected/45d51516-cc3a-473c-bb88-e82d290d36ca-kube-api-access-rhxbj\") pod \"neutron-5b5bd9fb5d-vdljd\" (UID: \"45d51516-cc3a-473c-bb88-e82d290d36ca\") " pod="openstack-kuttl-tests/neutron-5b5bd9fb5d-vdljd" Jan 20 17:55:19 crc kubenswrapper[4558]: I0120 17:55:19.649700 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/45d51516-cc3a-473c-bb88-e82d290d36ca-combined-ca-bundle\") pod \"neutron-5b5bd9fb5d-vdljd\" (UID: \"45d51516-cc3a-473c-bb88-e82d290d36ca\") " pod="openstack-kuttl-tests/neutron-5b5bd9fb5d-vdljd" Jan 20 17:55:19 crc kubenswrapper[4558]: I0120 17:55:19.649748 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/45d51516-cc3a-473c-bb88-e82d290d36ca-public-tls-certs\") pod \"neutron-5b5bd9fb5d-vdljd\" (UID: \"45d51516-cc3a-473c-bb88-e82d290d36ca\") " pod="openstack-kuttl-tests/neutron-5b5bd9fb5d-vdljd" Jan 20 17:55:19 crc kubenswrapper[4558]: I0120 17:55:19.649772 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/45d51516-cc3a-473c-bb88-e82d290d36ca-config\") pod \"neutron-5b5bd9fb5d-vdljd\" (UID: \"45d51516-cc3a-473c-bb88-e82d290d36ca\") " pod="openstack-kuttl-tests/neutron-5b5bd9fb5d-vdljd" Jan 20 17:55:19 crc kubenswrapper[4558]: I0120 17:55:19.649829 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/45d51516-cc3a-473c-bb88-e82d290d36ca-internal-tls-certs\") pod \"neutron-5b5bd9fb5d-vdljd\" (UID: \"45d51516-cc3a-473c-bb88-e82d290d36ca\") " pod="openstack-kuttl-tests/neutron-5b5bd9fb5d-vdljd" Jan 20 17:55:19 crc kubenswrapper[4558]: I0120 17:55:19.649945 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/45d51516-cc3a-473c-bb88-e82d290d36ca-httpd-config\") pod \"neutron-5b5bd9fb5d-vdljd\" (UID: \"45d51516-cc3a-473c-bb88-e82d290d36ca\") " pod="openstack-kuttl-tests/neutron-5b5bd9fb5d-vdljd" Jan 20 17:55:19 crc kubenswrapper[4558]: I0120 17:55:19.650058 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/45d51516-cc3a-473c-bb88-e82d290d36ca-ovndb-tls-certs\") pod \"neutron-5b5bd9fb5d-vdljd\" (UID: \"45d51516-cc3a-473c-bb88-e82d290d36ca\") " pod="openstack-kuttl-tests/neutron-5b5bd9fb5d-vdljd" Jan 20 17:55:19 crc kubenswrapper[4558]: I0120 17:55:19.657421 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/45d51516-cc3a-473c-bb88-e82d290d36ca-ovndb-tls-certs\") pod \"neutron-5b5bd9fb5d-vdljd\" (UID: \"45d51516-cc3a-473c-bb88-e82d290d36ca\") " pod="openstack-kuttl-tests/neutron-5b5bd9fb5d-vdljd" Jan 20 17:55:19 crc kubenswrapper[4558]: I0120 17:55:19.657419 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/45d51516-cc3a-473c-bb88-e82d290d36ca-internal-tls-certs\") pod \"neutron-5b5bd9fb5d-vdljd\" (UID: \"45d51516-cc3a-473c-bb88-e82d290d36ca\") " pod="openstack-kuttl-tests/neutron-5b5bd9fb5d-vdljd" Jan 20 17:55:19 crc kubenswrapper[4558]: I0120 17:55:19.662735 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/45d51516-cc3a-473c-bb88-e82d290d36ca-httpd-config\") pod \"neutron-5b5bd9fb5d-vdljd\" (UID: \"45d51516-cc3a-473c-bb88-e82d290d36ca\") " pod="openstack-kuttl-tests/neutron-5b5bd9fb5d-vdljd" Jan 20 17:55:19 crc kubenswrapper[4558]: I0120 17:55:19.663243 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/45d51516-cc3a-473c-bb88-e82d290d36ca-public-tls-certs\") pod \"neutron-5b5bd9fb5d-vdljd\" (UID: \"45d51516-cc3a-473c-bb88-e82d290d36ca\") " pod="openstack-kuttl-tests/neutron-5b5bd9fb5d-vdljd" Jan 20 17:55:19 crc kubenswrapper[4558]: I0120 17:55:19.667414 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/45d51516-cc3a-473c-bb88-e82d290d36ca-config\") pod \"neutron-5b5bd9fb5d-vdljd\" (UID: \"45d51516-cc3a-473c-bb88-e82d290d36ca\") " pod="openstack-kuttl-tests/neutron-5b5bd9fb5d-vdljd" Jan 20 17:55:19 crc kubenswrapper[4558]: I0120 17:55:19.683004 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rhxbj\" (UniqueName: \"kubernetes.io/projected/45d51516-cc3a-473c-bb88-e82d290d36ca-kube-api-access-rhxbj\") pod \"neutron-5b5bd9fb5d-vdljd\" (UID: \"45d51516-cc3a-473c-bb88-e82d290d36ca\") " pod="openstack-kuttl-tests/neutron-5b5bd9fb5d-vdljd" Jan 20 17:55:19 crc kubenswrapper[4558]: I0120 17:55:19.687532 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/45d51516-cc3a-473c-bb88-e82d290d36ca-combined-ca-bundle\") pod \"neutron-5b5bd9fb5d-vdljd\" (UID: \"45d51516-cc3a-473c-bb88-e82d290d36ca\") " pod="openstack-kuttl-tests/neutron-5b5bd9fb5d-vdljd" Jan 20 17:55:19 crc kubenswrapper[4558]: I0120 17:55:19.788977 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-5b5bd9fb5d-vdljd" Jan 20 17:55:19 crc kubenswrapper[4558]: I0120 17:55:19.916289 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/barbican-api-6bb757fd44-jvx79"] Jan 20 17:55:19 crc kubenswrapper[4558]: I0120 17:55:19.919114 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-6bb757fd44-jvx79" Jan 20 17:55:19 crc kubenswrapper[4558]: I0120 17:55:19.931002 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-barbican-internal-svc" Jan 20 17:55:19 crc kubenswrapper[4558]: I0120 17:55:19.931269 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-barbican-public-svc" Jan 20 17:55:19 crc kubenswrapper[4558]: I0120 17:55:19.941477 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-api-6bb757fd44-jvx79"] Jan 20 17:55:20 crc kubenswrapper[4558]: I0120 17:55:20.069724 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9a1c3ab4-ca8f-4183-9dbf-05f5c5efe4b7-logs\") pod \"barbican-api-6bb757fd44-jvx79\" (UID: \"9a1c3ab4-ca8f-4183-9dbf-05f5c5efe4b7\") " pod="openstack-kuttl-tests/barbican-api-6bb757fd44-jvx79" Jan 20 17:55:20 crc kubenswrapper[4558]: I0120 17:55:20.069810 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9a1c3ab4-ca8f-4183-9dbf-05f5c5efe4b7-combined-ca-bundle\") pod \"barbican-api-6bb757fd44-jvx79\" (UID: \"9a1c3ab4-ca8f-4183-9dbf-05f5c5efe4b7\") " pod="openstack-kuttl-tests/barbican-api-6bb757fd44-jvx79" Jan 20 17:55:20 crc kubenswrapper[4558]: I0120 17:55:20.069857 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9a1c3ab4-ca8f-4183-9dbf-05f5c5efe4b7-config-data-custom\") pod \"barbican-api-6bb757fd44-jvx79\" (UID: \"9a1c3ab4-ca8f-4183-9dbf-05f5c5efe4b7\") " pod="openstack-kuttl-tests/barbican-api-6bb757fd44-jvx79" Jan 20 17:55:20 crc kubenswrapper[4558]: I0120 17:55:20.069911 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9a1c3ab4-ca8f-4183-9dbf-05f5c5efe4b7-internal-tls-certs\") pod \"barbican-api-6bb757fd44-jvx79\" (UID: \"9a1c3ab4-ca8f-4183-9dbf-05f5c5efe4b7\") " pod="openstack-kuttl-tests/barbican-api-6bb757fd44-jvx79" Jan 20 17:55:20 crc kubenswrapper[4558]: I0120 17:55:20.069933 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9a1c3ab4-ca8f-4183-9dbf-05f5c5efe4b7-public-tls-certs\") pod \"barbican-api-6bb757fd44-jvx79\" (UID: \"9a1c3ab4-ca8f-4183-9dbf-05f5c5efe4b7\") " pod="openstack-kuttl-tests/barbican-api-6bb757fd44-jvx79" Jan 20 17:55:20 crc kubenswrapper[4558]: I0120 17:55:20.069965 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9bdw7\" (UniqueName: \"kubernetes.io/projected/9a1c3ab4-ca8f-4183-9dbf-05f5c5efe4b7-kube-api-access-9bdw7\") pod \"barbican-api-6bb757fd44-jvx79\" (UID: \"9a1c3ab4-ca8f-4183-9dbf-05f5c5efe4b7\") " pod="openstack-kuttl-tests/barbican-api-6bb757fd44-jvx79" Jan 20 17:55:20 crc kubenswrapper[4558]: I0120 17:55:20.070001 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9a1c3ab4-ca8f-4183-9dbf-05f5c5efe4b7-config-data\") pod \"barbican-api-6bb757fd44-jvx79\" (UID: \"9a1c3ab4-ca8f-4183-9dbf-05f5c5efe4b7\") " pod="openstack-kuttl-tests/barbican-api-6bb757fd44-jvx79" Jan 20 17:55:20 crc kubenswrapper[4558]: I0120 17:55:20.171793 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9a1c3ab4-ca8f-4183-9dbf-05f5c5efe4b7-config-data-custom\") pod \"barbican-api-6bb757fd44-jvx79\" (UID: \"9a1c3ab4-ca8f-4183-9dbf-05f5c5efe4b7\") " pod="openstack-kuttl-tests/barbican-api-6bb757fd44-jvx79" Jan 20 17:55:20 crc kubenswrapper[4558]: I0120 17:55:20.171855 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9a1c3ab4-ca8f-4183-9dbf-05f5c5efe4b7-internal-tls-certs\") pod \"barbican-api-6bb757fd44-jvx79\" (UID: \"9a1c3ab4-ca8f-4183-9dbf-05f5c5efe4b7\") " pod="openstack-kuttl-tests/barbican-api-6bb757fd44-jvx79" Jan 20 17:55:20 crc kubenswrapper[4558]: I0120 17:55:20.171884 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9a1c3ab4-ca8f-4183-9dbf-05f5c5efe4b7-public-tls-certs\") pod \"barbican-api-6bb757fd44-jvx79\" (UID: \"9a1c3ab4-ca8f-4183-9dbf-05f5c5efe4b7\") " pod="openstack-kuttl-tests/barbican-api-6bb757fd44-jvx79" Jan 20 17:55:20 crc kubenswrapper[4558]: I0120 17:55:20.171915 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9bdw7\" (UniqueName: \"kubernetes.io/projected/9a1c3ab4-ca8f-4183-9dbf-05f5c5efe4b7-kube-api-access-9bdw7\") pod \"barbican-api-6bb757fd44-jvx79\" (UID: \"9a1c3ab4-ca8f-4183-9dbf-05f5c5efe4b7\") " pod="openstack-kuttl-tests/barbican-api-6bb757fd44-jvx79" Jan 20 17:55:20 crc kubenswrapper[4558]: I0120 17:55:20.171938 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9a1c3ab4-ca8f-4183-9dbf-05f5c5efe4b7-config-data\") pod \"barbican-api-6bb757fd44-jvx79\" (UID: \"9a1c3ab4-ca8f-4183-9dbf-05f5c5efe4b7\") " pod="openstack-kuttl-tests/barbican-api-6bb757fd44-jvx79" Jan 20 17:55:20 crc kubenswrapper[4558]: I0120 17:55:20.172007 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9a1c3ab4-ca8f-4183-9dbf-05f5c5efe4b7-logs\") pod \"barbican-api-6bb757fd44-jvx79\" (UID: \"9a1c3ab4-ca8f-4183-9dbf-05f5c5efe4b7\") " pod="openstack-kuttl-tests/barbican-api-6bb757fd44-jvx79" Jan 20 17:55:20 crc kubenswrapper[4558]: I0120 17:55:20.172052 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9a1c3ab4-ca8f-4183-9dbf-05f5c5efe4b7-combined-ca-bundle\") pod \"barbican-api-6bb757fd44-jvx79\" (UID: \"9a1c3ab4-ca8f-4183-9dbf-05f5c5efe4b7\") " pod="openstack-kuttl-tests/barbican-api-6bb757fd44-jvx79" Jan 20 17:55:20 crc kubenswrapper[4558]: I0120 17:55:20.172569 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9a1c3ab4-ca8f-4183-9dbf-05f5c5efe4b7-logs\") pod \"barbican-api-6bb757fd44-jvx79\" (UID: \"9a1c3ab4-ca8f-4183-9dbf-05f5c5efe4b7\") " pod="openstack-kuttl-tests/barbican-api-6bb757fd44-jvx79" Jan 20 17:55:20 crc kubenswrapper[4558]: I0120 17:55:20.180795 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9a1c3ab4-ca8f-4183-9dbf-05f5c5efe4b7-public-tls-certs\") pod \"barbican-api-6bb757fd44-jvx79\" (UID: \"9a1c3ab4-ca8f-4183-9dbf-05f5c5efe4b7\") " pod="openstack-kuttl-tests/barbican-api-6bb757fd44-jvx79" Jan 20 17:55:20 crc kubenswrapper[4558]: I0120 17:55:20.181326 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9a1c3ab4-ca8f-4183-9dbf-05f5c5efe4b7-combined-ca-bundle\") pod \"barbican-api-6bb757fd44-jvx79\" (UID: \"9a1c3ab4-ca8f-4183-9dbf-05f5c5efe4b7\") " pod="openstack-kuttl-tests/barbican-api-6bb757fd44-jvx79" Jan 20 17:55:20 crc kubenswrapper[4558]: I0120 17:55:20.181317 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9a1c3ab4-ca8f-4183-9dbf-05f5c5efe4b7-config-data-custom\") pod \"barbican-api-6bb757fd44-jvx79\" (UID: \"9a1c3ab4-ca8f-4183-9dbf-05f5c5efe4b7\") " pod="openstack-kuttl-tests/barbican-api-6bb757fd44-jvx79" Jan 20 17:55:20 crc kubenswrapper[4558]: I0120 17:55:20.182201 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9a1c3ab4-ca8f-4183-9dbf-05f5c5efe4b7-config-data\") pod \"barbican-api-6bb757fd44-jvx79\" (UID: \"9a1c3ab4-ca8f-4183-9dbf-05f5c5efe4b7\") " pod="openstack-kuttl-tests/barbican-api-6bb757fd44-jvx79" Jan 20 17:55:20 crc kubenswrapper[4558]: I0120 17:55:20.189580 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9bdw7\" (UniqueName: \"kubernetes.io/projected/9a1c3ab4-ca8f-4183-9dbf-05f5c5efe4b7-kube-api-access-9bdw7\") pod \"barbican-api-6bb757fd44-jvx79\" (UID: \"9a1c3ab4-ca8f-4183-9dbf-05f5c5efe4b7\") " pod="openstack-kuttl-tests/barbican-api-6bb757fd44-jvx79" Jan 20 17:55:20 crc kubenswrapper[4558]: I0120 17:55:20.203557 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9a1c3ab4-ca8f-4183-9dbf-05f5c5efe4b7-internal-tls-certs\") pod \"barbican-api-6bb757fd44-jvx79\" (UID: \"9a1c3ab4-ca8f-4183-9dbf-05f5c5efe4b7\") " pod="openstack-kuttl-tests/barbican-api-6bb757fd44-jvx79" Jan 20 17:55:20 crc kubenswrapper[4558]: I0120 17:55:20.263791 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-6bb757fd44-jvx79" Jan 20 17:55:20 crc kubenswrapper[4558]: I0120 17:55:20.331326 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-5b5bd9fb5d-vdljd"] Jan 20 17:55:20 crc kubenswrapper[4558]: W0120 17:55:20.596934 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod45d51516_cc3a_473c_bb88_e82d290d36ca.slice/crio-8b60873bef0f992793dd3bbba55205c79d9a815e1ab68a451ba5722f728dc4f2 WatchSource:0}: Error finding container 8b60873bef0f992793dd3bbba55205c79d9a815e1ab68a451ba5722f728dc4f2: Status 404 returned error can't find the container with id 8b60873bef0f992793dd3bbba55205c79d9a815e1ab68a451ba5722f728dc4f2 Jan 20 17:55:20 crc kubenswrapper[4558]: I0120 17:55:20.865957 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-api-6bb757fd44-jvx79"] Jan 20 17:55:21 crc kubenswrapper[4558]: I0120 17:55:21.221382 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:55:21 crc kubenswrapper[4558]: I0120 17:55:21.337377 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:55:21 crc kubenswrapper[4558]: I0120 17:55:21.375808 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-5b5bd9fb5d-vdljd" event={"ID":"45d51516-cc3a-473c-bb88-e82d290d36ca","Type":"ContainerStarted","Data":"dfd1e35931513664c3c64c46a6882405ae46386429f436dbbda7a5b30ca884e7"} Jan 20 17:55:21 crc kubenswrapper[4558]: I0120 17:55:21.375842 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-5b5bd9fb5d-vdljd" event={"ID":"45d51516-cc3a-473c-bb88-e82d290d36ca","Type":"ContainerStarted","Data":"8b60873bef0f992793dd3bbba55205c79d9a815e1ab68a451ba5722f728dc4f2"} Jan 20 17:55:21 crc kubenswrapper[4558]: I0120 17:55:21.377205 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/cinder-scheduler-0" podUID="36ea0e6e-00c5-41a9-9ebc-0960214e6a65" containerName="cinder-scheduler" containerID="cri-o://16d7a5b115ea502b2d2354b051017e13ce428286b30fa423a1dbd745dc8c1bee" gracePeriod=30 Jan 20 17:55:21 crc kubenswrapper[4558]: I0120 17:55:21.377466 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-6bb757fd44-jvx79" event={"ID":"9a1c3ab4-ca8f-4183-9dbf-05f5c5efe4b7","Type":"ContainerStarted","Data":"2f0d5940c1a430547bdac78180588ab9d0ea89c8cd08f0adfb7ebcb602621819"} Jan 20 17:55:21 crc kubenswrapper[4558]: I0120 17:55:21.377492 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-6bb757fd44-jvx79" event={"ID":"9a1c3ab4-ca8f-4183-9dbf-05f5c5efe4b7","Type":"ContainerStarted","Data":"92e3fcede0a1618b22fdaa64045121c517f956f3b2c1983b6e27802858f4144c"} Jan 20 17:55:21 crc kubenswrapper[4558]: I0120 17:55:21.377754 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/cinder-scheduler-0" podUID="36ea0e6e-00c5-41a9-9ebc-0960214e6a65" containerName="probe" containerID="cri-o://89d7d8831bb61b1585ef371d9cfbff3b84eb872f95c98bbc734c620c9609dcd7" gracePeriod=30 Jan 20 17:55:22 crc kubenswrapper[4558]: I0120 17:55:22.401323 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-5b5bd9fb5d-vdljd" event={"ID":"45d51516-cc3a-473c-bb88-e82d290d36ca","Type":"ContainerStarted","Data":"ecb66a22f0932155a427ab286233ffe4a3ffce78bb66e6be213471c0db633735"} Jan 20 17:55:22 crc kubenswrapper[4558]: I0120 17:55:22.403244 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/neutron-5b5bd9fb5d-vdljd" Jan 20 17:55:22 crc kubenswrapper[4558]: I0120 17:55:22.406196 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-6bb757fd44-jvx79" event={"ID":"9a1c3ab4-ca8f-4183-9dbf-05f5c5efe4b7","Type":"ContainerStarted","Data":"7e84faeb8fb514479840b3b5eeb4711dbbb78f6bac5d538500313eb7bf90d215"} Jan 20 17:55:22 crc kubenswrapper[4558]: I0120 17:55:22.406886 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/barbican-api-6bb757fd44-jvx79" Jan 20 17:55:22 crc kubenswrapper[4558]: I0120 17:55:22.407017 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/barbican-api-6bb757fd44-jvx79" Jan 20 17:55:22 crc kubenswrapper[4558]: I0120 17:55:22.424247 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/neutron-5b5bd9fb5d-vdljd" podStartSLOduration=3.424234881 podStartE2EDuration="3.424234881s" podCreationTimestamp="2026-01-20 17:55:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:55:22.419802771 +0000 UTC m=+4416.180140738" watchObservedRunningTime="2026-01-20 17:55:22.424234881 +0000 UTC m=+4416.184572848" Jan 20 17:55:22 crc kubenswrapper[4558]: I0120 17:55:22.449645 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/barbican-api-6bb757fd44-jvx79" podStartSLOduration=3.44962062 podStartE2EDuration="3.44962062s" podCreationTimestamp="2026-01-20 17:55:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:55:22.442088842 +0000 UTC m=+4416.202426809" watchObservedRunningTime="2026-01-20 17:55:22.44962062 +0000 UTC m=+4416.209958587" Jan 20 17:55:23 crc kubenswrapper[4558]: I0120 17:55:23.160004 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:55:23 crc kubenswrapper[4558]: I0120 17:55:23.258560 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/36ea0e6e-00c5-41a9-9ebc-0960214e6a65-etc-machine-id\") pod \"36ea0e6e-00c5-41a9-9ebc-0960214e6a65\" (UID: \"36ea0e6e-00c5-41a9-9ebc-0960214e6a65\") " Jan 20 17:55:23 crc kubenswrapper[4558]: I0120 17:55:23.258645 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/36ea0e6e-00c5-41a9-9ebc-0960214e6a65-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "36ea0e6e-00c5-41a9-9ebc-0960214e6a65" (UID: "36ea0e6e-00c5-41a9-9ebc-0960214e6a65"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 17:55:23 crc kubenswrapper[4558]: I0120 17:55:23.258776 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/36ea0e6e-00c5-41a9-9ebc-0960214e6a65-config-data-custom\") pod \"36ea0e6e-00c5-41a9-9ebc-0960214e6a65\" (UID: \"36ea0e6e-00c5-41a9-9ebc-0960214e6a65\") " Jan 20 17:55:23 crc kubenswrapper[4558]: I0120 17:55:23.258800 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/36ea0e6e-00c5-41a9-9ebc-0960214e6a65-scripts\") pod \"36ea0e6e-00c5-41a9-9ebc-0960214e6a65\" (UID: \"36ea0e6e-00c5-41a9-9ebc-0960214e6a65\") " Jan 20 17:55:23 crc kubenswrapper[4558]: I0120 17:55:23.259712 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36ea0e6e-00c5-41a9-9ebc-0960214e6a65-combined-ca-bundle\") pod \"36ea0e6e-00c5-41a9-9ebc-0960214e6a65\" (UID: \"36ea0e6e-00c5-41a9-9ebc-0960214e6a65\") " Jan 20 17:55:23 crc kubenswrapper[4558]: I0120 17:55:23.259877 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zlk7t\" (UniqueName: \"kubernetes.io/projected/36ea0e6e-00c5-41a9-9ebc-0960214e6a65-kube-api-access-zlk7t\") pod \"36ea0e6e-00c5-41a9-9ebc-0960214e6a65\" (UID: \"36ea0e6e-00c5-41a9-9ebc-0960214e6a65\") " Jan 20 17:55:23 crc kubenswrapper[4558]: I0120 17:55:23.259917 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/36ea0e6e-00c5-41a9-9ebc-0960214e6a65-config-data\") pod \"36ea0e6e-00c5-41a9-9ebc-0960214e6a65\" (UID: \"36ea0e6e-00c5-41a9-9ebc-0960214e6a65\") " Jan 20 17:55:23 crc kubenswrapper[4558]: I0120 17:55:23.260312 4558 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/36ea0e6e-00c5-41a9-9ebc-0960214e6a65-etc-machine-id\") on node \"crc\" DevicePath \"\"" Jan 20 17:55:23 crc kubenswrapper[4558]: I0120 17:55:23.265323 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/36ea0e6e-00c5-41a9-9ebc-0960214e6a65-scripts" (OuterVolumeSpecName: "scripts") pod "36ea0e6e-00c5-41a9-9ebc-0960214e6a65" (UID: "36ea0e6e-00c5-41a9-9ebc-0960214e6a65"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:55:23 crc kubenswrapper[4558]: I0120 17:55:23.270248 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/36ea0e6e-00c5-41a9-9ebc-0960214e6a65-kube-api-access-zlk7t" (OuterVolumeSpecName: "kube-api-access-zlk7t") pod "36ea0e6e-00c5-41a9-9ebc-0960214e6a65" (UID: "36ea0e6e-00c5-41a9-9ebc-0960214e6a65"). InnerVolumeSpecName "kube-api-access-zlk7t". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:55:23 crc kubenswrapper[4558]: I0120 17:55:23.270246 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/36ea0e6e-00c5-41a9-9ebc-0960214e6a65-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "36ea0e6e-00c5-41a9-9ebc-0960214e6a65" (UID: "36ea0e6e-00c5-41a9-9ebc-0960214e6a65"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:55:23 crc kubenswrapper[4558]: I0120 17:55:23.301575 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/36ea0e6e-00c5-41a9-9ebc-0960214e6a65-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "36ea0e6e-00c5-41a9-9ebc-0960214e6a65" (UID: "36ea0e6e-00c5-41a9-9ebc-0960214e6a65"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:55:23 crc kubenswrapper[4558]: I0120 17:55:23.336872 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/36ea0e6e-00c5-41a9-9ebc-0960214e6a65-config-data" (OuterVolumeSpecName: "config-data") pod "36ea0e6e-00c5-41a9-9ebc-0960214e6a65" (UID: "36ea0e6e-00c5-41a9-9ebc-0960214e6a65"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:55:23 crc kubenswrapper[4558]: I0120 17:55:23.362197 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/36ea0e6e-00c5-41a9-9ebc-0960214e6a65-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:55:23 crc kubenswrapper[4558]: I0120 17:55:23.362227 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/36ea0e6e-00c5-41a9-9ebc-0960214e6a65-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:55:23 crc kubenswrapper[4558]: I0120 17:55:23.362239 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36ea0e6e-00c5-41a9-9ebc-0960214e6a65-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:55:23 crc kubenswrapper[4558]: I0120 17:55:23.362252 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zlk7t\" (UniqueName: \"kubernetes.io/projected/36ea0e6e-00c5-41a9-9ebc-0960214e6a65-kube-api-access-zlk7t\") on node \"crc\" DevicePath \"\"" Jan 20 17:55:23 crc kubenswrapper[4558]: I0120 17:55:23.362264 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/36ea0e6e-00c5-41a9-9ebc-0960214e6a65-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:55:23 crc kubenswrapper[4558]: I0120 17:55:23.416778 4558 generic.go:334] "Generic (PLEG): container finished" podID="36ea0e6e-00c5-41a9-9ebc-0960214e6a65" containerID="89d7d8831bb61b1585ef371d9cfbff3b84eb872f95c98bbc734c620c9609dcd7" exitCode=0 Jan 20 17:55:23 crc kubenswrapper[4558]: I0120 17:55:23.417521 4558 generic.go:334] "Generic (PLEG): container finished" podID="36ea0e6e-00c5-41a9-9ebc-0960214e6a65" containerID="16d7a5b115ea502b2d2354b051017e13ce428286b30fa423a1dbd745dc8c1bee" exitCode=0 Jan 20 17:55:23 crc kubenswrapper[4558]: I0120 17:55:23.416876 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:55:23 crc kubenswrapper[4558]: I0120 17:55:23.416850 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"36ea0e6e-00c5-41a9-9ebc-0960214e6a65","Type":"ContainerDied","Data":"89d7d8831bb61b1585ef371d9cfbff3b84eb872f95c98bbc734c620c9609dcd7"} Jan 20 17:55:23 crc kubenswrapper[4558]: I0120 17:55:23.418318 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"36ea0e6e-00c5-41a9-9ebc-0960214e6a65","Type":"ContainerDied","Data":"16d7a5b115ea502b2d2354b051017e13ce428286b30fa423a1dbd745dc8c1bee"} Jan 20 17:55:23 crc kubenswrapper[4558]: I0120 17:55:23.418354 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"36ea0e6e-00c5-41a9-9ebc-0960214e6a65","Type":"ContainerDied","Data":"30d45dff30ffdef0deca7aadad3510c91a94ef4686e20a1606c228480aefca30"} Jan 20 17:55:23 crc kubenswrapper[4558]: I0120 17:55:23.418383 4558 scope.go:117] "RemoveContainer" containerID="89d7d8831bb61b1585ef371d9cfbff3b84eb872f95c98bbc734c620c9609dcd7" Jan 20 17:55:23 crc kubenswrapper[4558]: I0120 17:55:23.450237 4558 scope.go:117] "RemoveContainer" containerID="16d7a5b115ea502b2d2354b051017e13ce428286b30fa423a1dbd745dc8c1bee" Jan 20 17:55:23 crc kubenswrapper[4558]: I0120 17:55:23.454576 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:55:23 crc kubenswrapper[4558]: I0120 17:55:23.467012 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:55:23 crc kubenswrapper[4558]: I0120 17:55:23.478105 4558 scope.go:117] "RemoveContainer" containerID="89d7d8831bb61b1585ef371d9cfbff3b84eb872f95c98bbc734c620c9609dcd7" Jan 20 17:55:23 crc kubenswrapper[4558]: E0120 17:55:23.478472 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"89d7d8831bb61b1585ef371d9cfbff3b84eb872f95c98bbc734c620c9609dcd7\": container with ID starting with 89d7d8831bb61b1585ef371d9cfbff3b84eb872f95c98bbc734c620c9609dcd7 not found: ID does not exist" containerID="89d7d8831bb61b1585ef371d9cfbff3b84eb872f95c98bbc734c620c9609dcd7" Jan 20 17:55:23 crc kubenswrapper[4558]: I0120 17:55:23.478502 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"89d7d8831bb61b1585ef371d9cfbff3b84eb872f95c98bbc734c620c9609dcd7"} err="failed to get container status \"89d7d8831bb61b1585ef371d9cfbff3b84eb872f95c98bbc734c620c9609dcd7\": rpc error: code = NotFound desc = could not find container \"89d7d8831bb61b1585ef371d9cfbff3b84eb872f95c98bbc734c620c9609dcd7\": container with ID starting with 89d7d8831bb61b1585ef371d9cfbff3b84eb872f95c98bbc734c620c9609dcd7 not found: ID does not exist" Jan 20 17:55:23 crc kubenswrapper[4558]: I0120 17:55:23.478524 4558 scope.go:117] "RemoveContainer" containerID="16d7a5b115ea502b2d2354b051017e13ce428286b30fa423a1dbd745dc8c1bee" Jan 20 17:55:23 crc kubenswrapper[4558]: I0120 17:55:23.478830 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:55:23 crc kubenswrapper[4558]: E0120 17:55:23.478911 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"16d7a5b115ea502b2d2354b051017e13ce428286b30fa423a1dbd745dc8c1bee\": container with ID starting with 16d7a5b115ea502b2d2354b051017e13ce428286b30fa423a1dbd745dc8c1bee not found: ID does not exist" containerID="16d7a5b115ea502b2d2354b051017e13ce428286b30fa423a1dbd745dc8c1bee" Jan 20 17:55:23 crc kubenswrapper[4558]: I0120 17:55:23.478927 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"16d7a5b115ea502b2d2354b051017e13ce428286b30fa423a1dbd745dc8c1bee"} err="failed to get container status \"16d7a5b115ea502b2d2354b051017e13ce428286b30fa423a1dbd745dc8c1bee\": rpc error: code = NotFound desc = could not find container \"16d7a5b115ea502b2d2354b051017e13ce428286b30fa423a1dbd745dc8c1bee\": container with ID starting with 16d7a5b115ea502b2d2354b051017e13ce428286b30fa423a1dbd745dc8c1bee not found: ID does not exist" Jan 20 17:55:23 crc kubenswrapper[4558]: I0120 17:55:23.478939 4558 scope.go:117] "RemoveContainer" containerID="89d7d8831bb61b1585ef371d9cfbff3b84eb872f95c98bbc734c620c9609dcd7" Jan 20 17:55:23 crc kubenswrapper[4558]: I0120 17:55:23.479156 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"89d7d8831bb61b1585ef371d9cfbff3b84eb872f95c98bbc734c620c9609dcd7"} err="failed to get container status \"89d7d8831bb61b1585ef371d9cfbff3b84eb872f95c98bbc734c620c9609dcd7\": rpc error: code = NotFound desc = could not find container \"89d7d8831bb61b1585ef371d9cfbff3b84eb872f95c98bbc734c620c9609dcd7\": container with ID starting with 89d7d8831bb61b1585ef371d9cfbff3b84eb872f95c98bbc734c620c9609dcd7 not found: ID does not exist" Jan 20 17:55:23 crc kubenswrapper[4558]: I0120 17:55:23.479188 4558 scope.go:117] "RemoveContainer" containerID="16d7a5b115ea502b2d2354b051017e13ce428286b30fa423a1dbd745dc8c1bee" Jan 20 17:55:23 crc kubenswrapper[4558]: E0120 17:55:23.479386 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="36ea0e6e-00c5-41a9-9ebc-0960214e6a65" containerName="probe" Jan 20 17:55:23 crc kubenswrapper[4558]: I0120 17:55:23.479407 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="36ea0e6e-00c5-41a9-9ebc-0960214e6a65" containerName="probe" Jan 20 17:55:23 crc kubenswrapper[4558]: E0120 17:55:23.479420 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="36ea0e6e-00c5-41a9-9ebc-0960214e6a65" containerName="cinder-scheduler" Jan 20 17:55:23 crc kubenswrapper[4558]: I0120 17:55:23.479428 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="36ea0e6e-00c5-41a9-9ebc-0960214e6a65" containerName="cinder-scheduler" Jan 20 17:55:23 crc kubenswrapper[4558]: I0120 17:55:23.479433 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"16d7a5b115ea502b2d2354b051017e13ce428286b30fa423a1dbd745dc8c1bee"} err="failed to get container status \"16d7a5b115ea502b2d2354b051017e13ce428286b30fa423a1dbd745dc8c1bee\": rpc error: code = NotFound desc = could not find container \"16d7a5b115ea502b2d2354b051017e13ce428286b30fa423a1dbd745dc8c1bee\": container with ID starting with 16d7a5b115ea502b2d2354b051017e13ce428286b30fa423a1dbd745dc8c1bee not found: ID does not exist" Jan 20 17:55:23 crc kubenswrapper[4558]: I0120 17:55:23.479627 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="36ea0e6e-00c5-41a9-9ebc-0960214e6a65" containerName="cinder-scheduler" Jan 20 17:55:23 crc kubenswrapper[4558]: I0120 17:55:23.479660 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="36ea0e6e-00c5-41a9-9ebc-0960214e6a65" containerName="probe" Jan 20 17:55:23 crc kubenswrapper[4558]: I0120 17:55:23.480711 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:55:23 crc kubenswrapper[4558]: I0120 17:55:23.485176 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cinder-scheduler-config-data" Jan 20 17:55:23 crc kubenswrapper[4558]: I0120 17:55:23.502012 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:55:23 crc kubenswrapper[4558]: I0120 17:55:23.566999 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/1481b06c-22ef-4b33-b49c-2c5d6903cbee-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"1481b06c-22ef-4b33-b49c-2c5d6903cbee\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:55:23 crc kubenswrapper[4558]: I0120 17:55:23.567396 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lh7cr\" (UniqueName: \"kubernetes.io/projected/1481b06c-22ef-4b33-b49c-2c5d6903cbee-kube-api-access-lh7cr\") pod \"cinder-scheduler-0\" (UID: \"1481b06c-22ef-4b33-b49c-2c5d6903cbee\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:55:23 crc kubenswrapper[4558]: I0120 17:55:23.567503 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1481b06c-22ef-4b33-b49c-2c5d6903cbee-config-data\") pod \"cinder-scheduler-0\" (UID: \"1481b06c-22ef-4b33-b49c-2c5d6903cbee\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:55:23 crc kubenswrapper[4558]: I0120 17:55:23.567613 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1481b06c-22ef-4b33-b49c-2c5d6903cbee-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"1481b06c-22ef-4b33-b49c-2c5d6903cbee\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:55:23 crc kubenswrapper[4558]: I0120 17:55:23.567727 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1481b06c-22ef-4b33-b49c-2c5d6903cbee-scripts\") pod \"cinder-scheduler-0\" (UID: \"1481b06c-22ef-4b33-b49c-2c5d6903cbee\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:55:23 crc kubenswrapper[4558]: I0120 17:55:23.567834 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1481b06c-22ef-4b33-b49c-2c5d6903cbee-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"1481b06c-22ef-4b33-b49c-2c5d6903cbee\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:55:23 crc kubenswrapper[4558]: I0120 17:55:23.669662 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1481b06c-22ef-4b33-b49c-2c5d6903cbee-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"1481b06c-22ef-4b33-b49c-2c5d6903cbee\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:55:23 crc kubenswrapper[4558]: I0120 17:55:23.670605 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1481b06c-22ef-4b33-b49c-2c5d6903cbee-scripts\") pod \"cinder-scheduler-0\" (UID: \"1481b06c-22ef-4b33-b49c-2c5d6903cbee\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:55:23 crc kubenswrapper[4558]: I0120 17:55:23.670891 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1481b06c-22ef-4b33-b49c-2c5d6903cbee-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"1481b06c-22ef-4b33-b49c-2c5d6903cbee\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:55:23 crc kubenswrapper[4558]: I0120 17:55:23.671083 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/1481b06c-22ef-4b33-b49c-2c5d6903cbee-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"1481b06c-22ef-4b33-b49c-2c5d6903cbee\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:55:23 crc kubenswrapper[4558]: I0120 17:55:23.671228 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/1481b06c-22ef-4b33-b49c-2c5d6903cbee-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"1481b06c-22ef-4b33-b49c-2c5d6903cbee\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:55:23 crc kubenswrapper[4558]: I0120 17:55:23.671337 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lh7cr\" (UniqueName: \"kubernetes.io/projected/1481b06c-22ef-4b33-b49c-2c5d6903cbee-kube-api-access-lh7cr\") pod \"cinder-scheduler-0\" (UID: \"1481b06c-22ef-4b33-b49c-2c5d6903cbee\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:55:23 crc kubenswrapper[4558]: I0120 17:55:23.671488 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1481b06c-22ef-4b33-b49c-2c5d6903cbee-config-data\") pod \"cinder-scheduler-0\" (UID: \"1481b06c-22ef-4b33-b49c-2c5d6903cbee\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:55:23 crc kubenswrapper[4558]: I0120 17:55:23.675924 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1481b06c-22ef-4b33-b49c-2c5d6903cbee-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"1481b06c-22ef-4b33-b49c-2c5d6903cbee\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:55:23 crc kubenswrapper[4558]: I0120 17:55:23.676136 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1481b06c-22ef-4b33-b49c-2c5d6903cbee-config-data\") pod \"cinder-scheduler-0\" (UID: \"1481b06c-22ef-4b33-b49c-2c5d6903cbee\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:55:23 crc kubenswrapper[4558]: I0120 17:55:23.676340 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1481b06c-22ef-4b33-b49c-2c5d6903cbee-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"1481b06c-22ef-4b33-b49c-2c5d6903cbee\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:55:23 crc kubenswrapper[4558]: I0120 17:55:23.677455 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1481b06c-22ef-4b33-b49c-2c5d6903cbee-scripts\") pod \"cinder-scheduler-0\" (UID: \"1481b06c-22ef-4b33-b49c-2c5d6903cbee\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:55:23 crc kubenswrapper[4558]: I0120 17:55:23.689620 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lh7cr\" (UniqueName: \"kubernetes.io/projected/1481b06c-22ef-4b33-b49c-2c5d6903cbee-kube-api-access-lh7cr\") pod \"cinder-scheduler-0\" (UID: \"1481b06c-22ef-4b33-b49c-2c5d6903cbee\") " pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:55:23 crc kubenswrapper[4558]: I0120 17:55:23.803350 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:55:24 crc kubenswrapper[4558]: I0120 17:55:24.215787 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:55:24 crc kubenswrapper[4558]: W0120 17:55:24.218434 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1481b06c_22ef_4b33_b49c_2c5d6903cbee.slice/crio-2ffafa34e2db68287a17eec1029e94de95bb2ca1a329fba8f977d09c583f1859 WatchSource:0}: Error finding container 2ffafa34e2db68287a17eec1029e94de95bb2ca1a329fba8f977d09c583f1859: Status 404 returned error can't find the container with id 2ffafa34e2db68287a17eec1029e94de95bb2ca1a329fba8f977d09c583f1859 Jan 20 17:55:24 crc kubenswrapper[4558]: I0120 17:55:24.431272 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"1481b06c-22ef-4b33-b49c-2c5d6903cbee","Type":"ContainerStarted","Data":"2ffafa34e2db68287a17eec1029e94de95bb2ca1a329fba8f977d09c583f1859"} Jan 20 17:55:24 crc kubenswrapper[4558]: I0120 17:55:24.566958 4558 scope.go:117] "RemoveContainer" containerID="4ea7fa61d8b21007a044345acec89fcebe56c2921cf0f76ff2002f44bdc88ede" Jan 20 17:55:24 crc kubenswrapper[4558]: E0120 17:55:24.567432 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 17:55:24 crc kubenswrapper[4558]: I0120 17:55:24.579685 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="36ea0e6e-00c5-41a9-9ebc-0960214e6a65" path="/var/lib/kubelet/pods/36ea0e6e-00c5-41a9-9ebc-0960214e6a65/volumes" Jan 20 17:55:25 crc kubenswrapper[4558]: I0120 17:55:25.441446 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"1481b06c-22ef-4b33-b49c-2c5d6903cbee","Type":"ContainerStarted","Data":"dc24e5666420136fe5c65594f8305d0aa03d99644a34fc70829cbd42cab6db49"} Jan 20 17:55:25 crc kubenswrapper[4558]: I0120 17:55:25.441779 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"1481b06c-22ef-4b33-b49c-2c5d6903cbee","Type":"ContainerStarted","Data":"8e9c3b8eaacddd4170f8d9b11bdc908d3994f8a53ba6ad4543a5cf43244fda7c"} Jan 20 17:55:25 crc kubenswrapper[4558]: I0120 17:55:25.460180 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/cinder-scheduler-0" podStartSLOduration=2.460148004 podStartE2EDuration="2.460148004s" podCreationTimestamp="2026-01-20 17:55:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:55:25.455431158 +0000 UTC m=+4419.215769125" watchObservedRunningTime="2026-01-20 17:55:25.460148004 +0000 UTC m=+4419.220485971" Jan 20 17:55:28 crc kubenswrapper[4558]: I0120 17:55:28.040632 4558 scope.go:117] "RemoveContainer" containerID="d9bc6595c7c4e08347d4624266d02240749799eeb7a5dd344e6f490af50f47f5" Jan 20 17:55:28 crc kubenswrapper[4558]: I0120 17:55:28.100219 4558 scope.go:117] "RemoveContainer" containerID="e9d0cdfef66d95da79eeb71c5f5d77c2c15fec36a463b30437310581ba2767bc" Jan 20 17:55:28 crc kubenswrapper[4558]: I0120 17:55:28.139016 4558 scope.go:117] "RemoveContainer" containerID="ca177c8c965ad48d0ae78252e2d314c13abe059f39572e5e736a0d0ce022c3b7" Jan 20 17:55:28 crc kubenswrapper[4558]: I0120 17:55:28.329803 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:55:28 crc kubenswrapper[4558]: I0120 17:55:28.804186 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:55:31 crc kubenswrapper[4558]: I0120 17:55:31.446452 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/barbican-api-6bb757fd44-jvx79" Jan 20 17:55:31 crc kubenswrapper[4558]: I0120 17:55:31.527987 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/barbican-api-6bb757fd44-jvx79" Jan 20 17:55:31 crc kubenswrapper[4558]: I0120 17:55:31.581811 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-api-7ddd949596-fq6mn"] Jan 20 17:55:31 crc kubenswrapper[4558]: I0120 17:55:31.582093 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-api-7ddd949596-fq6mn" podUID="cb5bdda0-3564-422d-8b42-24546cf568b1" containerName="barbican-api-log" containerID="cri-o://78be98eafdcaaf1e558140085e832f02cb6b33728b1024c81ec11221d71d43b2" gracePeriod=30 Jan 20 17:55:31 crc kubenswrapper[4558]: I0120 17:55:31.582284 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-api-7ddd949596-fq6mn" podUID="cb5bdda0-3564-422d-8b42-24546cf568b1" containerName="barbican-api" containerID="cri-o://7b2580732885c3c0febe92be2cfd2d2372725698e2a898596f877ba9fc9cbf70" gracePeriod=30 Jan 20 17:55:32 crc kubenswrapper[4558]: I0120 17:55:32.522775 4558 generic.go:334] "Generic (PLEG): container finished" podID="cb5bdda0-3564-422d-8b42-24546cf568b1" containerID="78be98eafdcaaf1e558140085e832f02cb6b33728b1024c81ec11221d71d43b2" exitCode=143 Jan 20 17:55:32 crc kubenswrapper[4558]: I0120 17:55:32.522867 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-7ddd949596-fq6mn" event={"ID":"cb5bdda0-3564-422d-8b42-24546cf568b1","Type":"ContainerDied","Data":"78be98eafdcaaf1e558140085e832f02cb6b33728b1024c81ec11221d71d43b2"} Jan 20 17:55:34 crc kubenswrapper[4558]: I0120 17:55:34.004110 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:55:34 crc kubenswrapper[4558]: I0120 17:55:34.731587 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/barbican-api-7ddd949596-fq6mn" podUID="cb5bdda0-3564-422d-8b42-24546cf568b1" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.122:9311/healthcheck\": read tcp 10.217.0.2:38024->10.217.0.122:9311: read: connection reset by peer" Jan 20 17:55:34 crc kubenswrapper[4558]: I0120 17:55:34.731668 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/barbican-api-7ddd949596-fq6mn" podUID="cb5bdda0-3564-422d-8b42-24546cf568b1" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.122:9311/healthcheck\": read tcp 10.217.0.2:38022->10.217.0.122:9311: read: connection reset by peer" Jan 20 17:55:35 crc kubenswrapper[4558]: I0120 17:55:35.087862 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-7ddd949596-fq6mn" Jan 20 17:55:35 crc kubenswrapper[4558]: I0120 17:55:35.189352 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cb5bdda0-3564-422d-8b42-24546cf568b1-config-data\") pod \"cb5bdda0-3564-422d-8b42-24546cf568b1\" (UID: \"cb5bdda0-3564-422d-8b42-24546cf568b1\") " Jan 20 17:55:35 crc kubenswrapper[4558]: I0120 17:55:35.189465 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cb5bdda0-3564-422d-8b42-24546cf568b1-combined-ca-bundle\") pod \"cb5bdda0-3564-422d-8b42-24546cf568b1\" (UID: \"cb5bdda0-3564-422d-8b42-24546cf568b1\") " Jan 20 17:55:35 crc kubenswrapper[4558]: I0120 17:55:35.189636 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9sgqw\" (UniqueName: \"kubernetes.io/projected/cb5bdda0-3564-422d-8b42-24546cf568b1-kube-api-access-9sgqw\") pod \"cb5bdda0-3564-422d-8b42-24546cf568b1\" (UID: \"cb5bdda0-3564-422d-8b42-24546cf568b1\") " Jan 20 17:55:35 crc kubenswrapper[4558]: I0120 17:55:35.189662 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cb5bdda0-3564-422d-8b42-24546cf568b1-logs\") pod \"cb5bdda0-3564-422d-8b42-24546cf568b1\" (UID: \"cb5bdda0-3564-422d-8b42-24546cf568b1\") " Jan 20 17:55:35 crc kubenswrapper[4558]: I0120 17:55:35.189698 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/cb5bdda0-3564-422d-8b42-24546cf568b1-config-data-custom\") pod \"cb5bdda0-3564-422d-8b42-24546cf568b1\" (UID: \"cb5bdda0-3564-422d-8b42-24546cf568b1\") " Jan 20 17:55:35 crc kubenswrapper[4558]: I0120 17:55:35.190268 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cb5bdda0-3564-422d-8b42-24546cf568b1-logs" (OuterVolumeSpecName: "logs") pod "cb5bdda0-3564-422d-8b42-24546cf568b1" (UID: "cb5bdda0-3564-422d-8b42-24546cf568b1"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:55:35 crc kubenswrapper[4558]: I0120 17:55:35.190444 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cb5bdda0-3564-422d-8b42-24546cf568b1-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:55:35 crc kubenswrapper[4558]: I0120 17:55:35.194301 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cb5bdda0-3564-422d-8b42-24546cf568b1-kube-api-access-9sgqw" (OuterVolumeSpecName: "kube-api-access-9sgqw") pod "cb5bdda0-3564-422d-8b42-24546cf568b1" (UID: "cb5bdda0-3564-422d-8b42-24546cf568b1"). InnerVolumeSpecName "kube-api-access-9sgqw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:55:35 crc kubenswrapper[4558]: I0120 17:55:35.195456 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cb5bdda0-3564-422d-8b42-24546cf568b1-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "cb5bdda0-3564-422d-8b42-24546cf568b1" (UID: "cb5bdda0-3564-422d-8b42-24546cf568b1"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:55:35 crc kubenswrapper[4558]: I0120 17:55:35.210982 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cb5bdda0-3564-422d-8b42-24546cf568b1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "cb5bdda0-3564-422d-8b42-24546cf568b1" (UID: "cb5bdda0-3564-422d-8b42-24546cf568b1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:55:35 crc kubenswrapper[4558]: I0120 17:55:35.230734 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cb5bdda0-3564-422d-8b42-24546cf568b1-config-data" (OuterVolumeSpecName: "config-data") pod "cb5bdda0-3564-422d-8b42-24546cf568b1" (UID: "cb5bdda0-3564-422d-8b42-24546cf568b1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:55:35 crc kubenswrapper[4558]: I0120 17:55:35.292958 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cb5bdda0-3564-422d-8b42-24546cf568b1-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:55:35 crc kubenswrapper[4558]: I0120 17:55:35.293000 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cb5bdda0-3564-422d-8b42-24546cf568b1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:55:35 crc kubenswrapper[4558]: I0120 17:55:35.293018 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9sgqw\" (UniqueName: \"kubernetes.io/projected/cb5bdda0-3564-422d-8b42-24546cf568b1-kube-api-access-9sgqw\") on node \"crc\" DevicePath \"\"" Jan 20 17:55:35 crc kubenswrapper[4558]: I0120 17:55:35.293029 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/cb5bdda0-3564-422d-8b42-24546cf568b1-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:55:35 crc kubenswrapper[4558]: I0120 17:55:35.555780 4558 generic.go:334] "Generic (PLEG): container finished" podID="cb5bdda0-3564-422d-8b42-24546cf568b1" containerID="7b2580732885c3c0febe92be2cfd2d2372725698e2a898596f877ba9fc9cbf70" exitCode=0 Jan 20 17:55:35 crc kubenswrapper[4558]: I0120 17:55:35.555836 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-7ddd949596-fq6mn" event={"ID":"cb5bdda0-3564-422d-8b42-24546cf568b1","Type":"ContainerDied","Data":"7b2580732885c3c0febe92be2cfd2d2372725698e2a898596f877ba9fc9cbf70"} Jan 20 17:55:35 crc kubenswrapper[4558]: I0120 17:55:35.555868 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-7ddd949596-fq6mn" Jan 20 17:55:35 crc kubenswrapper[4558]: I0120 17:55:35.555907 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-7ddd949596-fq6mn" event={"ID":"cb5bdda0-3564-422d-8b42-24546cf568b1","Type":"ContainerDied","Data":"5ea5c6826bcbd301234c722d32b8e7a855f4b650ff2661daa79292125ca35d32"} Jan 20 17:55:35 crc kubenswrapper[4558]: I0120 17:55:35.555931 4558 scope.go:117] "RemoveContainer" containerID="7b2580732885c3c0febe92be2cfd2d2372725698e2a898596f877ba9fc9cbf70" Jan 20 17:55:35 crc kubenswrapper[4558]: I0120 17:55:35.577698 4558 scope.go:117] "RemoveContainer" containerID="78be98eafdcaaf1e558140085e832f02cb6b33728b1024c81ec11221d71d43b2" Jan 20 17:55:35 crc kubenswrapper[4558]: I0120 17:55:35.591415 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-api-7ddd949596-fq6mn"] Jan 20 17:55:35 crc kubenswrapper[4558]: I0120 17:55:35.597742 4558 scope.go:117] "RemoveContainer" containerID="7b2580732885c3c0febe92be2cfd2d2372725698e2a898596f877ba9fc9cbf70" Jan 20 17:55:35 crc kubenswrapper[4558]: I0120 17:55:35.598541 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/barbican-api-7ddd949596-fq6mn"] Jan 20 17:55:35 crc kubenswrapper[4558]: E0120 17:55:35.599149 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7b2580732885c3c0febe92be2cfd2d2372725698e2a898596f877ba9fc9cbf70\": container with ID starting with 7b2580732885c3c0febe92be2cfd2d2372725698e2a898596f877ba9fc9cbf70 not found: ID does not exist" containerID="7b2580732885c3c0febe92be2cfd2d2372725698e2a898596f877ba9fc9cbf70" Jan 20 17:55:35 crc kubenswrapper[4558]: I0120 17:55:35.599212 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7b2580732885c3c0febe92be2cfd2d2372725698e2a898596f877ba9fc9cbf70"} err="failed to get container status \"7b2580732885c3c0febe92be2cfd2d2372725698e2a898596f877ba9fc9cbf70\": rpc error: code = NotFound desc = could not find container \"7b2580732885c3c0febe92be2cfd2d2372725698e2a898596f877ba9fc9cbf70\": container with ID starting with 7b2580732885c3c0febe92be2cfd2d2372725698e2a898596f877ba9fc9cbf70 not found: ID does not exist" Jan 20 17:55:35 crc kubenswrapper[4558]: I0120 17:55:35.599246 4558 scope.go:117] "RemoveContainer" containerID="78be98eafdcaaf1e558140085e832f02cb6b33728b1024c81ec11221d71d43b2" Jan 20 17:55:35 crc kubenswrapper[4558]: E0120 17:55:35.599615 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"78be98eafdcaaf1e558140085e832f02cb6b33728b1024c81ec11221d71d43b2\": container with ID starting with 78be98eafdcaaf1e558140085e832f02cb6b33728b1024c81ec11221d71d43b2 not found: ID does not exist" containerID="78be98eafdcaaf1e558140085e832f02cb6b33728b1024c81ec11221d71d43b2" Jan 20 17:55:35 crc kubenswrapper[4558]: I0120 17:55:35.599694 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"78be98eafdcaaf1e558140085e832f02cb6b33728b1024c81ec11221d71d43b2"} err="failed to get container status \"78be98eafdcaaf1e558140085e832f02cb6b33728b1024c81ec11221d71d43b2\": rpc error: code = NotFound desc = could not find container \"78be98eafdcaaf1e558140085e832f02cb6b33728b1024c81ec11221d71d43b2\": container with ID starting with 78be98eafdcaaf1e558140085e832f02cb6b33728b1024c81ec11221d71d43b2 not found: ID does not exist" Jan 20 17:55:36 crc kubenswrapper[4558]: I0120 17:55:36.582952 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cb5bdda0-3564-422d-8b42-24546cf568b1" path="/var/lib/kubelet/pods/cb5bdda0-3564-422d-8b42-24546cf568b1/volumes" Jan 20 17:55:38 crc kubenswrapper[4558]: I0120 17:55:38.565826 4558 scope.go:117] "RemoveContainer" containerID="4ea7fa61d8b21007a044345acec89fcebe56c2921cf0f76ff2002f44bdc88ede" Jan 20 17:55:38 crc kubenswrapper[4558]: E0120 17:55:38.566426 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 17:55:38 crc kubenswrapper[4558]: I0120 17:55:38.596372 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/placement-7c7cc56d94-qptzl" Jan 20 17:55:39 crc kubenswrapper[4558]: I0120 17:55:39.644239 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/placement-7c7cc56d94-qptzl" Jan 20 17:55:39 crc kubenswrapper[4558]: I0120 17:55:39.792015 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:55:43 crc kubenswrapper[4558]: I0120 17:55:43.850883 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/neutron-64ffc476d5-9zjgb" Jan 20 17:55:46 crc kubenswrapper[4558]: I0120 17:55:46.108374 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/keystone-747b5668bc-jgcd6" Jan 20 17:55:49 crc kubenswrapper[4558]: I0120 17:55:49.551713 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/openstackclient"] Jan 20 17:55:49 crc kubenswrapper[4558]: E0120 17:55:49.553714 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cb5bdda0-3564-422d-8b42-24546cf568b1" containerName="barbican-api-log" Jan 20 17:55:49 crc kubenswrapper[4558]: I0120 17:55:49.553730 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="cb5bdda0-3564-422d-8b42-24546cf568b1" containerName="barbican-api-log" Jan 20 17:55:49 crc kubenswrapper[4558]: E0120 17:55:49.553753 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cb5bdda0-3564-422d-8b42-24546cf568b1" containerName="barbican-api" Jan 20 17:55:49 crc kubenswrapper[4558]: I0120 17:55:49.553759 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="cb5bdda0-3564-422d-8b42-24546cf568b1" containerName="barbican-api" Jan 20 17:55:49 crc kubenswrapper[4558]: I0120 17:55:49.553942 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="cb5bdda0-3564-422d-8b42-24546cf568b1" containerName="barbican-api" Jan 20 17:55:49 crc kubenswrapper[4558]: I0120 17:55:49.553974 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="cb5bdda0-3564-422d-8b42-24546cf568b1" containerName="barbican-api-log" Jan 20 17:55:49 crc kubenswrapper[4558]: I0120 17:55:49.554734 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstackclient" Jan 20 17:55:49 crc kubenswrapper[4558]: I0120 17:55:49.557004 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-config" Jan 20 17:55:49 crc kubenswrapper[4558]: I0120 17:55:49.557471 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"openstackclient-openstackclient-dockercfg-4whz4" Jan 20 17:55:49 crc kubenswrapper[4558]: I0120 17:55:49.560515 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"openstack-config-secret" Jan 20 17:55:49 crc kubenswrapper[4558]: I0120 17:55:49.570311 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/openstackclient"] Jan 20 17:55:49 crc kubenswrapper[4558]: I0120 17:55:49.650860 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3834281a-b947-44ad-9390-a2057e7e902d-combined-ca-bundle\") pod \"openstackclient\" (UID: \"3834281a-b947-44ad-9390-a2057e7e902d\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:55:49 crc kubenswrapper[4558]: I0120 17:55:49.651024 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/3834281a-b947-44ad-9390-a2057e7e902d-openstack-config\") pod \"openstackclient\" (UID: \"3834281a-b947-44ad-9390-a2057e7e902d\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:55:49 crc kubenswrapper[4558]: I0120 17:55:49.651128 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/3834281a-b947-44ad-9390-a2057e7e902d-openstack-config-secret\") pod \"openstackclient\" (UID: \"3834281a-b947-44ad-9390-a2057e7e902d\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:55:49 crc kubenswrapper[4558]: I0120 17:55:49.651174 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gp9v6\" (UniqueName: \"kubernetes.io/projected/3834281a-b947-44ad-9390-a2057e7e902d-kube-api-access-gp9v6\") pod \"openstackclient\" (UID: \"3834281a-b947-44ad-9390-a2057e7e902d\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:55:49 crc kubenswrapper[4558]: I0120 17:55:49.752636 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3834281a-b947-44ad-9390-a2057e7e902d-combined-ca-bundle\") pod \"openstackclient\" (UID: \"3834281a-b947-44ad-9390-a2057e7e902d\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:55:49 crc kubenswrapper[4558]: I0120 17:55:49.752739 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/3834281a-b947-44ad-9390-a2057e7e902d-openstack-config\") pod \"openstackclient\" (UID: \"3834281a-b947-44ad-9390-a2057e7e902d\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:55:49 crc kubenswrapper[4558]: I0120 17:55:49.752800 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/3834281a-b947-44ad-9390-a2057e7e902d-openstack-config-secret\") pod \"openstackclient\" (UID: \"3834281a-b947-44ad-9390-a2057e7e902d\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:55:49 crc kubenswrapper[4558]: I0120 17:55:49.752821 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gp9v6\" (UniqueName: \"kubernetes.io/projected/3834281a-b947-44ad-9390-a2057e7e902d-kube-api-access-gp9v6\") pod \"openstackclient\" (UID: \"3834281a-b947-44ad-9390-a2057e7e902d\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:55:49 crc kubenswrapper[4558]: I0120 17:55:49.753696 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/3834281a-b947-44ad-9390-a2057e7e902d-openstack-config\") pod \"openstackclient\" (UID: \"3834281a-b947-44ad-9390-a2057e7e902d\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:55:49 crc kubenswrapper[4558]: I0120 17:55:49.758139 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3834281a-b947-44ad-9390-a2057e7e902d-combined-ca-bundle\") pod \"openstackclient\" (UID: \"3834281a-b947-44ad-9390-a2057e7e902d\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:55:49 crc kubenswrapper[4558]: I0120 17:55:49.758219 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/3834281a-b947-44ad-9390-a2057e7e902d-openstack-config-secret\") pod \"openstackclient\" (UID: \"3834281a-b947-44ad-9390-a2057e7e902d\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:55:49 crc kubenswrapper[4558]: I0120 17:55:49.766972 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gp9v6\" (UniqueName: \"kubernetes.io/projected/3834281a-b947-44ad-9390-a2057e7e902d-kube-api-access-gp9v6\") pod \"openstackclient\" (UID: \"3834281a-b947-44ad-9390-a2057e7e902d\") " pod="openstack-kuttl-tests/openstackclient" Jan 20 17:55:49 crc kubenswrapper[4558]: I0120 17:55:49.804282 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/neutron-5b5bd9fb5d-vdljd" Jan 20 17:55:49 crc kubenswrapper[4558]: I0120 17:55:49.874203 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstackclient" Jan 20 17:55:49 crc kubenswrapper[4558]: I0120 17:55:49.877955 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-64ffc476d5-9zjgb"] Jan 20 17:55:49 crc kubenswrapper[4558]: I0120 17:55:49.878577 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/neutron-64ffc476d5-9zjgb" podUID="b637458a-f8f6-456e-888f-3d61734be420" containerName="neutron-api" containerID="cri-o://aa3f685bf49ef2fddab92d64cfaccff086ad8c1a707e26911bd2c12773aaacf3" gracePeriod=30 Jan 20 17:55:49 crc kubenswrapper[4558]: I0120 17:55:49.878768 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/neutron-64ffc476d5-9zjgb" podUID="b637458a-f8f6-456e-888f-3d61734be420" containerName="neutron-httpd" containerID="cri-o://fe46b8f805f4f36bad1c42f2cb3cd2007879b8f0cad9c8dfc988eb7d2db5084f" gracePeriod=30 Jan 20 17:55:50 crc kubenswrapper[4558]: I0120 17:55:50.284721 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/openstackclient"] Jan 20 17:55:50 crc kubenswrapper[4558]: W0120 17:55:50.286509 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3834281a_b947_44ad_9390_a2057e7e902d.slice/crio-04b4bc93118932383adf777a774b172bc0814a4b99e17ba693db817ff75c22ea WatchSource:0}: Error finding container 04b4bc93118932383adf777a774b172bc0814a4b99e17ba693db817ff75c22ea: Status 404 returned error can't find the container with id 04b4bc93118932383adf777a774b172bc0814a4b99e17ba693db817ff75c22ea Jan 20 17:55:50 crc kubenswrapper[4558]: I0120 17:55:50.569703 4558 scope.go:117] "RemoveContainer" containerID="4ea7fa61d8b21007a044345acec89fcebe56c2921cf0f76ff2002f44bdc88ede" Jan 20 17:55:50 crc kubenswrapper[4558]: E0120 17:55:50.570026 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 17:55:50 crc kubenswrapper[4558]: I0120 17:55:50.704362 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstackclient" event={"ID":"3834281a-b947-44ad-9390-a2057e7e902d","Type":"ContainerStarted","Data":"a79ee7ec7558132e07cb924fcd12069b0f87fbec2d9e3ff1c1b559b752eaac9f"} Jan 20 17:55:50 crc kubenswrapper[4558]: I0120 17:55:50.704418 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstackclient" event={"ID":"3834281a-b947-44ad-9390-a2057e7e902d","Type":"ContainerStarted","Data":"04b4bc93118932383adf777a774b172bc0814a4b99e17ba693db817ff75c22ea"} Jan 20 17:55:50 crc kubenswrapper[4558]: I0120 17:55:50.706330 4558 generic.go:334] "Generic (PLEG): container finished" podID="b637458a-f8f6-456e-888f-3d61734be420" containerID="fe46b8f805f4f36bad1c42f2cb3cd2007879b8f0cad9c8dfc988eb7d2db5084f" exitCode=0 Jan 20 17:55:50 crc kubenswrapper[4558]: I0120 17:55:50.706363 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-64ffc476d5-9zjgb" event={"ID":"b637458a-f8f6-456e-888f-3d61734be420","Type":"ContainerDied","Data":"fe46b8f805f4f36bad1c42f2cb3cd2007879b8f0cad9c8dfc988eb7d2db5084f"} Jan 20 17:55:52 crc kubenswrapper[4558]: I0120 17:55:52.000685 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/openstackclient" podStartSLOduration=3.000667596 podStartE2EDuration="3.000667596s" podCreationTimestamp="2026-01-20 17:55:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:55:50.719503222 +0000 UTC m=+4444.479841189" watchObservedRunningTime="2026-01-20 17:55:52.000667596 +0000 UTC m=+4445.761005563" Jan 20 17:55:52 crc kubenswrapper[4558]: I0120 17:55:52.005906 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/swift-proxy-77d4fcb95d-7jsvg"] Jan 20 17:55:52 crc kubenswrapper[4558]: I0120 17:55:52.007122 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-proxy-77d4fcb95d-7jsvg" Jan 20 17:55:52 crc kubenswrapper[4558]: I0120 17:55:52.009391 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-swift-internal-svc" Jan 20 17:55:52 crc kubenswrapper[4558]: I0120 17:55:52.009585 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-swift-public-svc" Jan 20 17:55:52 crc kubenswrapper[4558]: I0120 17:55:52.010380 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"swift-proxy-config-data" Jan 20 17:55:52 crc kubenswrapper[4558]: I0120 17:55:52.017993 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/swift-proxy-77d4fcb95d-7jsvg"] Jan 20 17:55:52 crc kubenswrapper[4558]: I0120 17:55:52.198911 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2c80b87d-f7d5-46dd-a6d9-bba3e82e405b-combined-ca-bundle\") pod \"swift-proxy-77d4fcb95d-7jsvg\" (UID: \"2c80b87d-f7d5-46dd-a6d9-bba3e82e405b\") " pod="openstack-kuttl-tests/swift-proxy-77d4fcb95d-7jsvg" Jan 20 17:55:52 crc kubenswrapper[4558]: I0120 17:55:52.198970 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2c80b87d-f7d5-46dd-a6d9-bba3e82e405b-config-data\") pod \"swift-proxy-77d4fcb95d-7jsvg\" (UID: \"2c80b87d-f7d5-46dd-a6d9-bba3e82e405b\") " pod="openstack-kuttl-tests/swift-proxy-77d4fcb95d-7jsvg" Jan 20 17:55:52 crc kubenswrapper[4558]: I0120 17:55:52.199067 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2c80b87d-f7d5-46dd-a6d9-bba3e82e405b-public-tls-certs\") pod \"swift-proxy-77d4fcb95d-7jsvg\" (UID: \"2c80b87d-f7d5-46dd-a6d9-bba3e82e405b\") " pod="openstack-kuttl-tests/swift-proxy-77d4fcb95d-7jsvg" Jan 20 17:55:52 crc kubenswrapper[4558]: I0120 17:55:52.199097 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/2c80b87d-f7d5-46dd-a6d9-bba3e82e405b-etc-swift\") pod \"swift-proxy-77d4fcb95d-7jsvg\" (UID: \"2c80b87d-f7d5-46dd-a6d9-bba3e82e405b\") " pod="openstack-kuttl-tests/swift-proxy-77d4fcb95d-7jsvg" Jan 20 17:55:52 crc kubenswrapper[4558]: I0120 17:55:52.199120 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dgrmm\" (UniqueName: \"kubernetes.io/projected/2c80b87d-f7d5-46dd-a6d9-bba3e82e405b-kube-api-access-dgrmm\") pod \"swift-proxy-77d4fcb95d-7jsvg\" (UID: \"2c80b87d-f7d5-46dd-a6d9-bba3e82e405b\") " pod="openstack-kuttl-tests/swift-proxy-77d4fcb95d-7jsvg" Jan 20 17:55:52 crc kubenswrapper[4558]: I0120 17:55:52.199143 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2c80b87d-f7d5-46dd-a6d9-bba3e82e405b-run-httpd\") pod \"swift-proxy-77d4fcb95d-7jsvg\" (UID: \"2c80b87d-f7d5-46dd-a6d9-bba3e82e405b\") " pod="openstack-kuttl-tests/swift-proxy-77d4fcb95d-7jsvg" Jan 20 17:55:52 crc kubenswrapper[4558]: I0120 17:55:52.199278 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2c80b87d-f7d5-46dd-a6d9-bba3e82e405b-internal-tls-certs\") pod \"swift-proxy-77d4fcb95d-7jsvg\" (UID: \"2c80b87d-f7d5-46dd-a6d9-bba3e82e405b\") " pod="openstack-kuttl-tests/swift-proxy-77d4fcb95d-7jsvg" Jan 20 17:55:52 crc kubenswrapper[4558]: I0120 17:55:52.199513 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2c80b87d-f7d5-46dd-a6d9-bba3e82e405b-log-httpd\") pod \"swift-proxy-77d4fcb95d-7jsvg\" (UID: \"2c80b87d-f7d5-46dd-a6d9-bba3e82e405b\") " pod="openstack-kuttl-tests/swift-proxy-77d4fcb95d-7jsvg" Jan 20 17:55:52 crc kubenswrapper[4558]: I0120 17:55:52.300963 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2c80b87d-f7d5-46dd-a6d9-bba3e82e405b-log-httpd\") pod \"swift-proxy-77d4fcb95d-7jsvg\" (UID: \"2c80b87d-f7d5-46dd-a6d9-bba3e82e405b\") " pod="openstack-kuttl-tests/swift-proxy-77d4fcb95d-7jsvg" Jan 20 17:55:52 crc kubenswrapper[4558]: I0120 17:55:52.301059 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2c80b87d-f7d5-46dd-a6d9-bba3e82e405b-combined-ca-bundle\") pod \"swift-proxy-77d4fcb95d-7jsvg\" (UID: \"2c80b87d-f7d5-46dd-a6d9-bba3e82e405b\") " pod="openstack-kuttl-tests/swift-proxy-77d4fcb95d-7jsvg" Jan 20 17:55:52 crc kubenswrapper[4558]: I0120 17:55:52.301113 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2c80b87d-f7d5-46dd-a6d9-bba3e82e405b-config-data\") pod \"swift-proxy-77d4fcb95d-7jsvg\" (UID: \"2c80b87d-f7d5-46dd-a6d9-bba3e82e405b\") " pod="openstack-kuttl-tests/swift-proxy-77d4fcb95d-7jsvg" Jan 20 17:55:52 crc kubenswrapper[4558]: I0120 17:55:52.301208 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2c80b87d-f7d5-46dd-a6d9-bba3e82e405b-public-tls-certs\") pod \"swift-proxy-77d4fcb95d-7jsvg\" (UID: \"2c80b87d-f7d5-46dd-a6d9-bba3e82e405b\") " pod="openstack-kuttl-tests/swift-proxy-77d4fcb95d-7jsvg" Jan 20 17:55:52 crc kubenswrapper[4558]: I0120 17:55:52.301236 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/2c80b87d-f7d5-46dd-a6d9-bba3e82e405b-etc-swift\") pod \"swift-proxy-77d4fcb95d-7jsvg\" (UID: \"2c80b87d-f7d5-46dd-a6d9-bba3e82e405b\") " pod="openstack-kuttl-tests/swift-proxy-77d4fcb95d-7jsvg" Jan 20 17:55:52 crc kubenswrapper[4558]: I0120 17:55:52.301263 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dgrmm\" (UniqueName: \"kubernetes.io/projected/2c80b87d-f7d5-46dd-a6d9-bba3e82e405b-kube-api-access-dgrmm\") pod \"swift-proxy-77d4fcb95d-7jsvg\" (UID: \"2c80b87d-f7d5-46dd-a6d9-bba3e82e405b\") " pod="openstack-kuttl-tests/swift-proxy-77d4fcb95d-7jsvg" Jan 20 17:55:52 crc kubenswrapper[4558]: I0120 17:55:52.301290 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2c80b87d-f7d5-46dd-a6d9-bba3e82e405b-run-httpd\") pod \"swift-proxy-77d4fcb95d-7jsvg\" (UID: \"2c80b87d-f7d5-46dd-a6d9-bba3e82e405b\") " pod="openstack-kuttl-tests/swift-proxy-77d4fcb95d-7jsvg" Jan 20 17:55:52 crc kubenswrapper[4558]: I0120 17:55:52.301329 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2c80b87d-f7d5-46dd-a6d9-bba3e82e405b-internal-tls-certs\") pod \"swift-proxy-77d4fcb95d-7jsvg\" (UID: \"2c80b87d-f7d5-46dd-a6d9-bba3e82e405b\") " pod="openstack-kuttl-tests/swift-proxy-77d4fcb95d-7jsvg" Jan 20 17:55:52 crc kubenswrapper[4558]: I0120 17:55:52.301608 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2c80b87d-f7d5-46dd-a6d9-bba3e82e405b-log-httpd\") pod \"swift-proxy-77d4fcb95d-7jsvg\" (UID: \"2c80b87d-f7d5-46dd-a6d9-bba3e82e405b\") " pod="openstack-kuttl-tests/swift-proxy-77d4fcb95d-7jsvg" Jan 20 17:55:52 crc kubenswrapper[4558]: I0120 17:55:52.306460 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2c80b87d-f7d5-46dd-a6d9-bba3e82e405b-public-tls-certs\") pod \"swift-proxy-77d4fcb95d-7jsvg\" (UID: \"2c80b87d-f7d5-46dd-a6d9-bba3e82e405b\") " pod="openstack-kuttl-tests/swift-proxy-77d4fcb95d-7jsvg" Jan 20 17:55:52 crc kubenswrapper[4558]: I0120 17:55:52.310255 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2c80b87d-f7d5-46dd-a6d9-bba3e82e405b-combined-ca-bundle\") pod \"swift-proxy-77d4fcb95d-7jsvg\" (UID: \"2c80b87d-f7d5-46dd-a6d9-bba3e82e405b\") " pod="openstack-kuttl-tests/swift-proxy-77d4fcb95d-7jsvg" Jan 20 17:55:52 crc kubenswrapper[4558]: I0120 17:55:52.313377 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2c80b87d-f7d5-46dd-a6d9-bba3e82e405b-run-httpd\") pod \"swift-proxy-77d4fcb95d-7jsvg\" (UID: \"2c80b87d-f7d5-46dd-a6d9-bba3e82e405b\") " pod="openstack-kuttl-tests/swift-proxy-77d4fcb95d-7jsvg" Jan 20 17:55:52 crc kubenswrapper[4558]: I0120 17:55:52.313863 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2c80b87d-f7d5-46dd-a6d9-bba3e82e405b-config-data\") pod \"swift-proxy-77d4fcb95d-7jsvg\" (UID: \"2c80b87d-f7d5-46dd-a6d9-bba3e82e405b\") " pod="openstack-kuttl-tests/swift-proxy-77d4fcb95d-7jsvg" Jan 20 17:55:52 crc kubenswrapper[4558]: I0120 17:55:52.314423 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/2c80b87d-f7d5-46dd-a6d9-bba3e82e405b-etc-swift\") pod \"swift-proxy-77d4fcb95d-7jsvg\" (UID: \"2c80b87d-f7d5-46dd-a6d9-bba3e82e405b\") " pod="openstack-kuttl-tests/swift-proxy-77d4fcb95d-7jsvg" Jan 20 17:55:52 crc kubenswrapper[4558]: I0120 17:55:52.314864 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2c80b87d-f7d5-46dd-a6d9-bba3e82e405b-internal-tls-certs\") pod \"swift-proxy-77d4fcb95d-7jsvg\" (UID: \"2c80b87d-f7d5-46dd-a6d9-bba3e82e405b\") " pod="openstack-kuttl-tests/swift-proxy-77d4fcb95d-7jsvg" Jan 20 17:55:52 crc kubenswrapper[4558]: I0120 17:55:52.321587 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dgrmm\" (UniqueName: \"kubernetes.io/projected/2c80b87d-f7d5-46dd-a6d9-bba3e82e405b-kube-api-access-dgrmm\") pod \"swift-proxy-77d4fcb95d-7jsvg\" (UID: \"2c80b87d-f7d5-46dd-a6d9-bba3e82e405b\") " pod="openstack-kuttl-tests/swift-proxy-77d4fcb95d-7jsvg" Jan 20 17:55:52 crc kubenswrapper[4558]: I0120 17:55:52.334016 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-proxy-77d4fcb95d-7jsvg" Jan 20 17:55:52 crc kubenswrapper[4558]: I0120 17:55:52.755383 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/swift-proxy-77d4fcb95d-7jsvg"] Jan 20 17:55:53 crc kubenswrapper[4558]: I0120 17:55:53.519357 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:55:53 crc kubenswrapper[4558]: I0120 17:55:53.519785 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="7ba8f8c1-7947-4326-b7d2-49f89d248c21" containerName="ceilometer-notification-agent" containerID="cri-o://abd9f6fd38ccd715c9ead3e1fb12588259945bf444bcd60cc90c24693ed32591" gracePeriod=30 Jan 20 17:55:53 crc kubenswrapper[4558]: I0120 17:55:53.519785 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="7ba8f8c1-7947-4326-b7d2-49f89d248c21" containerName="sg-core" containerID="cri-o://79d692f8937fead706fe3763c908005975158c1dc08e3d95aa08c9cf976b6e89" gracePeriod=30 Jan 20 17:55:53 crc kubenswrapper[4558]: I0120 17:55:53.519904 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="7ba8f8c1-7947-4326-b7d2-49f89d248c21" containerName="proxy-httpd" containerID="cri-o://8c5549e3c402e532f1c102abe82c1184d17474896e83aaa32a03381dda0c78c7" gracePeriod=30 Jan 20 17:55:53 crc kubenswrapper[4558]: I0120 17:55:53.520122 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="7ba8f8c1-7947-4326-b7d2-49f89d248c21" containerName="ceilometer-central-agent" containerID="cri-o://3a9bd65fc6a62f8e31388f25a7802f1d87ccd3138db1d4de87aa87e1de385f54" gracePeriod=30 Jan 20 17:55:53 crc kubenswrapper[4558]: I0120 17:55:53.738104 4558 generic.go:334] "Generic (PLEG): container finished" podID="7ba8f8c1-7947-4326-b7d2-49f89d248c21" containerID="8c5549e3c402e532f1c102abe82c1184d17474896e83aaa32a03381dda0c78c7" exitCode=0 Jan 20 17:55:53 crc kubenswrapper[4558]: I0120 17:55:53.738143 4558 generic.go:334] "Generic (PLEG): container finished" podID="7ba8f8c1-7947-4326-b7d2-49f89d248c21" containerID="79d692f8937fead706fe3763c908005975158c1dc08e3d95aa08c9cf976b6e89" exitCode=2 Jan 20 17:55:53 crc kubenswrapper[4558]: I0120 17:55:53.738251 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"7ba8f8c1-7947-4326-b7d2-49f89d248c21","Type":"ContainerDied","Data":"8c5549e3c402e532f1c102abe82c1184d17474896e83aaa32a03381dda0c78c7"} Jan 20 17:55:53 crc kubenswrapper[4558]: I0120 17:55:53.738365 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"7ba8f8c1-7947-4326-b7d2-49f89d248c21","Type":"ContainerDied","Data":"79d692f8937fead706fe3763c908005975158c1dc08e3d95aa08c9cf976b6e89"} Jan 20 17:55:53 crc kubenswrapper[4558]: I0120 17:55:53.741422 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-proxy-77d4fcb95d-7jsvg" event={"ID":"2c80b87d-f7d5-46dd-a6d9-bba3e82e405b","Type":"ContainerStarted","Data":"a6dd9f88121dd9a74ad7da43d7ab6d29f99cd801b82eb031635bd0e0e30304e3"} Jan 20 17:55:53 crc kubenswrapper[4558]: I0120 17:55:53.741476 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-proxy-77d4fcb95d-7jsvg" event={"ID":"2c80b87d-f7d5-46dd-a6d9-bba3e82e405b","Type":"ContainerStarted","Data":"3ea77410056274873a4720a35fcc92ab9a47e76916a85817eee90b16a86d216d"} Jan 20 17:55:53 crc kubenswrapper[4558]: I0120 17:55:53.741490 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-proxy-77d4fcb95d-7jsvg" event={"ID":"2c80b87d-f7d5-46dd-a6d9-bba3e82e405b","Type":"ContainerStarted","Data":"47a75f1a8d8f8184c94d7b506aa4c769c07364acea356bd91f6ce11c9f175376"} Jan 20 17:55:53 crc kubenswrapper[4558]: I0120 17:55:53.741601 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/swift-proxy-77d4fcb95d-7jsvg" Jan 20 17:55:53 crc kubenswrapper[4558]: I0120 17:55:53.773026 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/swift-proxy-77d4fcb95d-7jsvg" podStartSLOduration=2.773009204 podStartE2EDuration="2.773009204s" podCreationTimestamp="2026-01-20 17:55:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:55:53.764488178 +0000 UTC m=+4447.524826145" watchObservedRunningTime="2026-01-20 17:55:53.773009204 +0000 UTC m=+4447.533347172" Jan 20 17:55:54 crc kubenswrapper[4558]: I0120 17:55:54.265562 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:55:54 crc kubenswrapper[4558]: I0120 17:55:54.351787 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7ba8f8c1-7947-4326-b7d2-49f89d248c21-scripts\") pod \"7ba8f8c1-7947-4326-b7d2-49f89d248c21\" (UID: \"7ba8f8c1-7947-4326-b7d2-49f89d248c21\") " Jan 20 17:55:54 crc kubenswrapper[4558]: I0120 17:55:54.351829 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7ba8f8c1-7947-4326-b7d2-49f89d248c21-config-data\") pod \"7ba8f8c1-7947-4326-b7d2-49f89d248c21\" (UID: \"7ba8f8c1-7947-4326-b7d2-49f89d248c21\") " Jan 20 17:55:54 crc kubenswrapper[4558]: I0120 17:55:54.351926 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ba8f8c1-7947-4326-b7d2-49f89d248c21-combined-ca-bundle\") pod \"7ba8f8c1-7947-4326-b7d2-49f89d248c21\" (UID: \"7ba8f8c1-7947-4326-b7d2-49f89d248c21\") " Jan 20 17:55:54 crc kubenswrapper[4558]: I0120 17:55:54.351952 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7ba8f8c1-7947-4326-b7d2-49f89d248c21-log-httpd\") pod \"7ba8f8c1-7947-4326-b7d2-49f89d248c21\" (UID: \"7ba8f8c1-7947-4326-b7d2-49f89d248c21\") " Jan 20 17:55:54 crc kubenswrapper[4558]: I0120 17:55:54.351992 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9d865\" (UniqueName: \"kubernetes.io/projected/7ba8f8c1-7947-4326-b7d2-49f89d248c21-kube-api-access-9d865\") pod \"7ba8f8c1-7947-4326-b7d2-49f89d248c21\" (UID: \"7ba8f8c1-7947-4326-b7d2-49f89d248c21\") " Jan 20 17:55:54 crc kubenswrapper[4558]: I0120 17:55:54.352665 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7ba8f8c1-7947-4326-b7d2-49f89d248c21-run-httpd\") pod \"7ba8f8c1-7947-4326-b7d2-49f89d248c21\" (UID: \"7ba8f8c1-7947-4326-b7d2-49f89d248c21\") " Jan 20 17:55:54 crc kubenswrapper[4558]: I0120 17:55:54.352717 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7ba8f8c1-7947-4326-b7d2-49f89d248c21-sg-core-conf-yaml\") pod \"7ba8f8c1-7947-4326-b7d2-49f89d248c21\" (UID: \"7ba8f8c1-7947-4326-b7d2-49f89d248c21\") " Jan 20 17:55:54 crc kubenswrapper[4558]: I0120 17:55:54.352373 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7ba8f8c1-7947-4326-b7d2-49f89d248c21-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "7ba8f8c1-7947-4326-b7d2-49f89d248c21" (UID: "7ba8f8c1-7947-4326-b7d2-49f89d248c21"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:55:54 crc kubenswrapper[4558]: I0120 17:55:54.354115 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7ba8f8c1-7947-4326-b7d2-49f89d248c21-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "7ba8f8c1-7947-4326-b7d2-49f89d248c21" (UID: "7ba8f8c1-7947-4326-b7d2-49f89d248c21"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:55:54 crc kubenswrapper[4558]: I0120 17:55:54.357642 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7ba8f8c1-7947-4326-b7d2-49f89d248c21-scripts" (OuterVolumeSpecName: "scripts") pod "7ba8f8c1-7947-4326-b7d2-49f89d248c21" (UID: "7ba8f8c1-7947-4326-b7d2-49f89d248c21"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:55:54 crc kubenswrapper[4558]: I0120 17:55:54.358535 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7ba8f8c1-7947-4326-b7d2-49f89d248c21-kube-api-access-9d865" (OuterVolumeSpecName: "kube-api-access-9d865") pod "7ba8f8c1-7947-4326-b7d2-49f89d248c21" (UID: "7ba8f8c1-7947-4326-b7d2-49f89d248c21"). InnerVolumeSpecName "kube-api-access-9d865". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:55:54 crc kubenswrapper[4558]: I0120 17:55:54.376585 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7ba8f8c1-7947-4326-b7d2-49f89d248c21-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "7ba8f8c1-7947-4326-b7d2-49f89d248c21" (UID: "7ba8f8c1-7947-4326-b7d2-49f89d248c21"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:55:54 crc kubenswrapper[4558]: I0120 17:55:54.423074 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7ba8f8c1-7947-4326-b7d2-49f89d248c21-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7ba8f8c1-7947-4326-b7d2-49f89d248c21" (UID: "7ba8f8c1-7947-4326-b7d2-49f89d248c21"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:55:54 crc kubenswrapper[4558]: I0120 17:55:54.433184 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7ba8f8c1-7947-4326-b7d2-49f89d248c21-config-data" (OuterVolumeSpecName: "config-data") pod "7ba8f8c1-7947-4326-b7d2-49f89d248c21" (UID: "7ba8f8c1-7947-4326-b7d2-49f89d248c21"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:55:54 crc kubenswrapper[4558]: I0120 17:55:54.456222 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7ba8f8c1-7947-4326-b7d2-49f89d248c21-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:55:54 crc kubenswrapper[4558]: I0120 17:55:54.456284 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7ba8f8c1-7947-4326-b7d2-49f89d248c21-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:55:54 crc kubenswrapper[4558]: I0120 17:55:54.456345 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ba8f8c1-7947-4326-b7d2-49f89d248c21-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:55:54 crc kubenswrapper[4558]: I0120 17:55:54.456399 4558 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7ba8f8c1-7947-4326-b7d2-49f89d248c21-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:55:54 crc kubenswrapper[4558]: I0120 17:55:54.456447 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9d865\" (UniqueName: \"kubernetes.io/projected/7ba8f8c1-7947-4326-b7d2-49f89d248c21-kube-api-access-9d865\") on node \"crc\" DevicePath \"\"" Jan 20 17:55:54 crc kubenswrapper[4558]: I0120 17:55:54.456490 4558 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7ba8f8c1-7947-4326-b7d2-49f89d248c21-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:55:54 crc kubenswrapper[4558]: I0120 17:55:54.456536 4558 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7ba8f8c1-7947-4326-b7d2-49f89d248c21-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 20 17:55:54 crc kubenswrapper[4558]: I0120 17:55:54.750635 4558 generic.go:334] "Generic (PLEG): container finished" podID="7ba8f8c1-7947-4326-b7d2-49f89d248c21" containerID="abd9f6fd38ccd715c9ead3e1fb12588259945bf444bcd60cc90c24693ed32591" exitCode=0 Jan 20 17:55:54 crc kubenswrapper[4558]: I0120 17:55:54.750666 4558 generic.go:334] "Generic (PLEG): container finished" podID="7ba8f8c1-7947-4326-b7d2-49f89d248c21" containerID="3a9bd65fc6a62f8e31388f25a7802f1d87ccd3138db1d4de87aa87e1de385f54" exitCode=0 Jan 20 17:55:54 crc kubenswrapper[4558]: I0120 17:55:54.751529 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:55:54 crc kubenswrapper[4558]: I0120 17:55:54.751999 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"7ba8f8c1-7947-4326-b7d2-49f89d248c21","Type":"ContainerDied","Data":"abd9f6fd38ccd715c9ead3e1fb12588259945bf444bcd60cc90c24693ed32591"} Jan 20 17:55:54 crc kubenswrapper[4558]: I0120 17:55:54.752041 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"7ba8f8c1-7947-4326-b7d2-49f89d248c21","Type":"ContainerDied","Data":"3a9bd65fc6a62f8e31388f25a7802f1d87ccd3138db1d4de87aa87e1de385f54"} Jan 20 17:55:54 crc kubenswrapper[4558]: I0120 17:55:54.752055 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"7ba8f8c1-7947-4326-b7d2-49f89d248c21","Type":"ContainerDied","Data":"72c054ef318a21775605c91ef2410cce83d929dfd0cf052af173da245eedd676"} Jan 20 17:55:54 crc kubenswrapper[4558]: I0120 17:55:54.752072 4558 scope.go:117] "RemoveContainer" containerID="8c5549e3c402e532f1c102abe82c1184d17474896e83aaa32a03381dda0c78c7" Jan 20 17:55:54 crc kubenswrapper[4558]: I0120 17:55:54.752199 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/swift-proxy-77d4fcb95d-7jsvg" Jan 20 17:55:54 crc kubenswrapper[4558]: I0120 17:55:54.773903 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:55:54 crc kubenswrapper[4558]: I0120 17:55:54.774432 4558 scope.go:117] "RemoveContainer" containerID="79d692f8937fead706fe3763c908005975158c1dc08e3d95aa08c9cf976b6e89" Jan 20 17:55:54 crc kubenswrapper[4558]: I0120 17:55:54.783980 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:55:54 crc kubenswrapper[4558]: I0120 17:55:54.798359 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:55:54 crc kubenswrapper[4558]: E0120 17:55:54.798833 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7ba8f8c1-7947-4326-b7d2-49f89d248c21" containerName="ceilometer-central-agent" Jan 20 17:55:54 crc kubenswrapper[4558]: I0120 17:55:54.798885 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="7ba8f8c1-7947-4326-b7d2-49f89d248c21" containerName="ceilometer-central-agent" Jan 20 17:55:54 crc kubenswrapper[4558]: E0120 17:55:54.798958 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7ba8f8c1-7947-4326-b7d2-49f89d248c21" containerName="proxy-httpd" Jan 20 17:55:54 crc kubenswrapper[4558]: I0120 17:55:54.798999 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="7ba8f8c1-7947-4326-b7d2-49f89d248c21" containerName="proxy-httpd" Jan 20 17:55:54 crc kubenswrapper[4558]: E0120 17:55:54.799059 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7ba8f8c1-7947-4326-b7d2-49f89d248c21" containerName="ceilometer-notification-agent" Jan 20 17:55:54 crc kubenswrapper[4558]: I0120 17:55:54.799101 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="7ba8f8c1-7947-4326-b7d2-49f89d248c21" containerName="ceilometer-notification-agent" Jan 20 17:55:54 crc kubenswrapper[4558]: E0120 17:55:54.799146 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7ba8f8c1-7947-4326-b7d2-49f89d248c21" containerName="sg-core" Jan 20 17:55:54 crc kubenswrapper[4558]: I0120 17:55:54.799235 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="7ba8f8c1-7947-4326-b7d2-49f89d248c21" containerName="sg-core" Jan 20 17:55:54 crc kubenswrapper[4558]: I0120 17:55:54.799467 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="7ba8f8c1-7947-4326-b7d2-49f89d248c21" containerName="sg-core" Jan 20 17:55:54 crc kubenswrapper[4558]: I0120 17:55:54.799524 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="7ba8f8c1-7947-4326-b7d2-49f89d248c21" containerName="ceilometer-notification-agent" Jan 20 17:55:54 crc kubenswrapper[4558]: I0120 17:55:54.799572 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="7ba8f8c1-7947-4326-b7d2-49f89d248c21" containerName="proxy-httpd" Jan 20 17:55:54 crc kubenswrapper[4558]: I0120 17:55:54.799616 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="7ba8f8c1-7947-4326-b7d2-49f89d248c21" containerName="ceilometer-central-agent" Jan 20 17:55:54 crc kubenswrapper[4558]: I0120 17:55:54.801055 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:55:54 crc kubenswrapper[4558]: I0120 17:55:54.801295 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:55:54 crc kubenswrapper[4558]: I0120 17:55:54.801342 4558 scope.go:117] "RemoveContainer" containerID="abd9f6fd38ccd715c9ead3e1fb12588259945bf444bcd60cc90c24693ed32591" Jan 20 17:55:54 crc kubenswrapper[4558]: I0120 17:55:54.803879 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-config-data" Jan 20 17:55:54 crc kubenswrapper[4558]: I0120 17:55:54.804219 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-scripts" Jan 20 17:55:54 crc kubenswrapper[4558]: I0120 17:55:54.833925 4558 scope.go:117] "RemoveContainer" containerID="3a9bd65fc6a62f8e31388f25a7802f1d87ccd3138db1d4de87aa87e1de385f54" Jan 20 17:55:54 crc kubenswrapper[4558]: I0120 17:55:54.855417 4558 scope.go:117] "RemoveContainer" containerID="8c5549e3c402e532f1c102abe82c1184d17474896e83aaa32a03381dda0c78c7" Jan 20 17:55:54 crc kubenswrapper[4558]: E0120 17:55:54.855826 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8c5549e3c402e532f1c102abe82c1184d17474896e83aaa32a03381dda0c78c7\": container with ID starting with 8c5549e3c402e532f1c102abe82c1184d17474896e83aaa32a03381dda0c78c7 not found: ID does not exist" containerID="8c5549e3c402e532f1c102abe82c1184d17474896e83aaa32a03381dda0c78c7" Jan 20 17:55:54 crc kubenswrapper[4558]: I0120 17:55:54.855863 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8c5549e3c402e532f1c102abe82c1184d17474896e83aaa32a03381dda0c78c7"} err="failed to get container status \"8c5549e3c402e532f1c102abe82c1184d17474896e83aaa32a03381dda0c78c7\": rpc error: code = NotFound desc = could not find container \"8c5549e3c402e532f1c102abe82c1184d17474896e83aaa32a03381dda0c78c7\": container with ID starting with 8c5549e3c402e532f1c102abe82c1184d17474896e83aaa32a03381dda0c78c7 not found: ID does not exist" Jan 20 17:55:54 crc kubenswrapper[4558]: I0120 17:55:54.855891 4558 scope.go:117] "RemoveContainer" containerID="79d692f8937fead706fe3763c908005975158c1dc08e3d95aa08c9cf976b6e89" Jan 20 17:55:54 crc kubenswrapper[4558]: E0120 17:55:54.856300 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"79d692f8937fead706fe3763c908005975158c1dc08e3d95aa08c9cf976b6e89\": container with ID starting with 79d692f8937fead706fe3763c908005975158c1dc08e3d95aa08c9cf976b6e89 not found: ID does not exist" containerID="79d692f8937fead706fe3763c908005975158c1dc08e3d95aa08c9cf976b6e89" Jan 20 17:55:54 crc kubenswrapper[4558]: I0120 17:55:54.856329 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"79d692f8937fead706fe3763c908005975158c1dc08e3d95aa08c9cf976b6e89"} err="failed to get container status \"79d692f8937fead706fe3763c908005975158c1dc08e3d95aa08c9cf976b6e89\": rpc error: code = NotFound desc = could not find container \"79d692f8937fead706fe3763c908005975158c1dc08e3d95aa08c9cf976b6e89\": container with ID starting with 79d692f8937fead706fe3763c908005975158c1dc08e3d95aa08c9cf976b6e89 not found: ID does not exist" Jan 20 17:55:54 crc kubenswrapper[4558]: I0120 17:55:54.856345 4558 scope.go:117] "RemoveContainer" containerID="abd9f6fd38ccd715c9ead3e1fb12588259945bf444bcd60cc90c24693ed32591" Jan 20 17:55:54 crc kubenswrapper[4558]: E0120 17:55:54.856725 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"abd9f6fd38ccd715c9ead3e1fb12588259945bf444bcd60cc90c24693ed32591\": container with ID starting with abd9f6fd38ccd715c9ead3e1fb12588259945bf444bcd60cc90c24693ed32591 not found: ID does not exist" containerID="abd9f6fd38ccd715c9ead3e1fb12588259945bf444bcd60cc90c24693ed32591" Jan 20 17:55:54 crc kubenswrapper[4558]: I0120 17:55:54.856750 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"abd9f6fd38ccd715c9ead3e1fb12588259945bf444bcd60cc90c24693ed32591"} err="failed to get container status \"abd9f6fd38ccd715c9ead3e1fb12588259945bf444bcd60cc90c24693ed32591\": rpc error: code = NotFound desc = could not find container \"abd9f6fd38ccd715c9ead3e1fb12588259945bf444bcd60cc90c24693ed32591\": container with ID starting with abd9f6fd38ccd715c9ead3e1fb12588259945bf444bcd60cc90c24693ed32591 not found: ID does not exist" Jan 20 17:55:54 crc kubenswrapper[4558]: I0120 17:55:54.856764 4558 scope.go:117] "RemoveContainer" containerID="3a9bd65fc6a62f8e31388f25a7802f1d87ccd3138db1d4de87aa87e1de385f54" Jan 20 17:55:54 crc kubenswrapper[4558]: E0120 17:55:54.857147 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3a9bd65fc6a62f8e31388f25a7802f1d87ccd3138db1d4de87aa87e1de385f54\": container with ID starting with 3a9bd65fc6a62f8e31388f25a7802f1d87ccd3138db1d4de87aa87e1de385f54 not found: ID does not exist" containerID="3a9bd65fc6a62f8e31388f25a7802f1d87ccd3138db1d4de87aa87e1de385f54" Jan 20 17:55:54 crc kubenswrapper[4558]: I0120 17:55:54.857188 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3a9bd65fc6a62f8e31388f25a7802f1d87ccd3138db1d4de87aa87e1de385f54"} err="failed to get container status \"3a9bd65fc6a62f8e31388f25a7802f1d87ccd3138db1d4de87aa87e1de385f54\": rpc error: code = NotFound desc = could not find container \"3a9bd65fc6a62f8e31388f25a7802f1d87ccd3138db1d4de87aa87e1de385f54\": container with ID starting with 3a9bd65fc6a62f8e31388f25a7802f1d87ccd3138db1d4de87aa87e1de385f54 not found: ID does not exist" Jan 20 17:55:54 crc kubenswrapper[4558]: I0120 17:55:54.857203 4558 scope.go:117] "RemoveContainer" containerID="8c5549e3c402e532f1c102abe82c1184d17474896e83aaa32a03381dda0c78c7" Jan 20 17:55:54 crc kubenswrapper[4558]: I0120 17:55:54.857626 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8c5549e3c402e532f1c102abe82c1184d17474896e83aaa32a03381dda0c78c7"} err="failed to get container status \"8c5549e3c402e532f1c102abe82c1184d17474896e83aaa32a03381dda0c78c7\": rpc error: code = NotFound desc = could not find container \"8c5549e3c402e532f1c102abe82c1184d17474896e83aaa32a03381dda0c78c7\": container with ID starting with 8c5549e3c402e532f1c102abe82c1184d17474896e83aaa32a03381dda0c78c7 not found: ID does not exist" Jan 20 17:55:54 crc kubenswrapper[4558]: I0120 17:55:54.857674 4558 scope.go:117] "RemoveContainer" containerID="79d692f8937fead706fe3763c908005975158c1dc08e3d95aa08c9cf976b6e89" Jan 20 17:55:54 crc kubenswrapper[4558]: I0120 17:55:54.858044 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"79d692f8937fead706fe3763c908005975158c1dc08e3d95aa08c9cf976b6e89"} err="failed to get container status \"79d692f8937fead706fe3763c908005975158c1dc08e3d95aa08c9cf976b6e89\": rpc error: code = NotFound desc = could not find container \"79d692f8937fead706fe3763c908005975158c1dc08e3d95aa08c9cf976b6e89\": container with ID starting with 79d692f8937fead706fe3763c908005975158c1dc08e3d95aa08c9cf976b6e89 not found: ID does not exist" Jan 20 17:55:54 crc kubenswrapper[4558]: I0120 17:55:54.858082 4558 scope.go:117] "RemoveContainer" containerID="abd9f6fd38ccd715c9ead3e1fb12588259945bf444bcd60cc90c24693ed32591" Jan 20 17:55:54 crc kubenswrapper[4558]: I0120 17:55:54.858465 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"abd9f6fd38ccd715c9ead3e1fb12588259945bf444bcd60cc90c24693ed32591"} err="failed to get container status \"abd9f6fd38ccd715c9ead3e1fb12588259945bf444bcd60cc90c24693ed32591\": rpc error: code = NotFound desc = could not find container \"abd9f6fd38ccd715c9ead3e1fb12588259945bf444bcd60cc90c24693ed32591\": container with ID starting with abd9f6fd38ccd715c9ead3e1fb12588259945bf444bcd60cc90c24693ed32591 not found: ID does not exist" Jan 20 17:55:54 crc kubenswrapper[4558]: I0120 17:55:54.858491 4558 scope.go:117] "RemoveContainer" containerID="3a9bd65fc6a62f8e31388f25a7802f1d87ccd3138db1d4de87aa87e1de385f54" Jan 20 17:55:54 crc kubenswrapper[4558]: I0120 17:55:54.858838 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3a9bd65fc6a62f8e31388f25a7802f1d87ccd3138db1d4de87aa87e1de385f54"} err="failed to get container status \"3a9bd65fc6a62f8e31388f25a7802f1d87ccd3138db1d4de87aa87e1de385f54\": rpc error: code = NotFound desc = could not find container \"3a9bd65fc6a62f8e31388f25a7802f1d87ccd3138db1d4de87aa87e1de385f54\": container with ID starting with 3a9bd65fc6a62f8e31388f25a7802f1d87ccd3138db1d4de87aa87e1de385f54 not found: ID does not exist" Jan 20 17:55:54 crc kubenswrapper[4558]: I0120 17:55:54.966350 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/65701817-3f49-4895-b777-25a5d42388ff-config-data\") pod \"ceilometer-0\" (UID: \"65701817-3f49-4895-b777-25a5d42388ff\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:55:54 crc kubenswrapper[4558]: I0120 17:55:54.966453 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/65701817-3f49-4895-b777-25a5d42388ff-scripts\") pod \"ceilometer-0\" (UID: \"65701817-3f49-4895-b777-25a5d42388ff\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:55:54 crc kubenswrapper[4558]: I0120 17:55:54.966474 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/65701817-3f49-4895-b777-25a5d42388ff-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"65701817-3f49-4895-b777-25a5d42388ff\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:55:54 crc kubenswrapper[4558]: I0120 17:55:54.966514 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/65701817-3f49-4895-b777-25a5d42388ff-run-httpd\") pod \"ceilometer-0\" (UID: \"65701817-3f49-4895-b777-25a5d42388ff\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:55:54 crc kubenswrapper[4558]: I0120 17:55:54.966772 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jvppg\" (UniqueName: \"kubernetes.io/projected/65701817-3f49-4895-b777-25a5d42388ff-kube-api-access-jvppg\") pod \"ceilometer-0\" (UID: \"65701817-3f49-4895-b777-25a5d42388ff\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:55:54 crc kubenswrapper[4558]: I0120 17:55:54.966817 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/65701817-3f49-4895-b777-25a5d42388ff-log-httpd\") pod \"ceilometer-0\" (UID: \"65701817-3f49-4895-b777-25a5d42388ff\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:55:54 crc kubenswrapper[4558]: I0120 17:55:54.966892 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65701817-3f49-4895-b777-25a5d42388ff-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"65701817-3f49-4895-b777-25a5d42388ff\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:55:55 crc kubenswrapper[4558]: I0120 17:55:55.071186 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/65701817-3f49-4895-b777-25a5d42388ff-scripts\") pod \"ceilometer-0\" (UID: \"65701817-3f49-4895-b777-25a5d42388ff\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:55:55 crc kubenswrapper[4558]: I0120 17:55:55.071244 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/65701817-3f49-4895-b777-25a5d42388ff-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"65701817-3f49-4895-b777-25a5d42388ff\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:55:55 crc kubenswrapper[4558]: I0120 17:55:55.071303 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/65701817-3f49-4895-b777-25a5d42388ff-run-httpd\") pod \"ceilometer-0\" (UID: \"65701817-3f49-4895-b777-25a5d42388ff\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:55:55 crc kubenswrapper[4558]: I0120 17:55:55.071426 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jvppg\" (UniqueName: \"kubernetes.io/projected/65701817-3f49-4895-b777-25a5d42388ff-kube-api-access-jvppg\") pod \"ceilometer-0\" (UID: \"65701817-3f49-4895-b777-25a5d42388ff\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:55:55 crc kubenswrapper[4558]: I0120 17:55:55.071455 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/65701817-3f49-4895-b777-25a5d42388ff-log-httpd\") pod \"ceilometer-0\" (UID: \"65701817-3f49-4895-b777-25a5d42388ff\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:55:55 crc kubenswrapper[4558]: I0120 17:55:55.071505 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65701817-3f49-4895-b777-25a5d42388ff-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"65701817-3f49-4895-b777-25a5d42388ff\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:55:55 crc kubenswrapper[4558]: I0120 17:55:55.071668 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/65701817-3f49-4895-b777-25a5d42388ff-config-data\") pod \"ceilometer-0\" (UID: \"65701817-3f49-4895-b777-25a5d42388ff\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:55:55 crc kubenswrapper[4558]: I0120 17:55:55.076969 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/65701817-3f49-4895-b777-25a5d42388ff-run-httpd\") pod \"ceilometer-0\" (UID: \"65701817-3f49-4895-b777-25a5d42388ff\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:55:55 crc kubenswrapper[4558]: I0120 17:55:55.077691 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/65701817-3f49-4895-b777-25a5d42388ff-log-httpd\") pod \"ceilometer-0\" (UID: \"65701817-3f49-4895-b777-25a5d42388ff\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:55:55 crc kubenswrapper[4558]: I0120 17:55:55.081815 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/65701817-3f49-4895-b777-25a5d42388ff-scripts\") pod \"ceilometer-0\" (UID: \"65701817-3f49-4895-b777-25a5d42388ff\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:55:55 crc kubenswrapper[4558]: I0120 17:55:55.094963 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/65701817-3f49-4895-b777-25a5d42388ff-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"65701817-3f49-4895-b777-25a5d42388ff\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:55:55 crc kubenswrapper[4558]: I0120 17:55:55.095046 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65701817-3f49-4895-b777-25a5d42388ff-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"65701817-3f49-4895-b777-25a5d42388ff\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:55:55 crc kubenswrapper[4558]: I0120 17:55:55.101908 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/65701817-3f49-4895-b777-25a5d42388ff-config-data\") pod \"ceilometer-0\" (UID: \"65701817-3f49-4895-b777-25a5d42388ff\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:55:55 crc kubenswrapper[4558]: I0120 17:55:55.120790 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jvppg\" (UniqueName: \"kubernetes.io/projected/65701817-3f49-4895-b777-25a5d42388ff-kube-api-access-jvppg\") pod \"ceilometer-0\" (UID: \"65701817-3f49-4895-b777-25a5d42388ff\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:55:55 crc kubenswrapper[4558]: I0120 17:55:55.285696 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:55:55 crc kubenswrapper[4558]: I0120 17:55:55.286688 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:55:55 crc kubenswrapper[4558]: I0120 17:55:55.687404 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:55:55 crc kubenswrapper[4558]: I0120 17:55:55.758320 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"65701817-3f49-4895-b777-25a5d42388ff","Type":"ContainerStarted","Data":"6d0521261f48b9734b884b677ce6d163da3bbb746819aa4a91b54b7430c0a2da"} Jan 20 17:55:55 crc kubenswrapper[4558]: I0120 17:55:55.988871 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-api-db-create-4n4vn"] Jan 20 17:55:55 crc kubenswrapper[4558]: I0120 17:55:55.990016 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-db-create-4n4vn" Jan 20 17:55:55 crc kubenswrapper[4558]: I0120 17:55:55.997956 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-db-create-4n4vn"] Jan 20 17:55:56 crc kubenswrapper[4558]: I0120 17:55:56.044207 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-api-42d5-account-create-update-ndxkx"] Jan 20 17:55:56 crc kubenswrapper[4558]: I0120 17:55:56.045408 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-42d5-account-create-update-ndxkx" Jan 20 17:55:56 crc kubenswrapper[4558]: I0120 17:55:56.049799 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-api-db-secret" Jan 20 17:55:56 crc kubenswrapper[4558]: I0120 17:55:56.050893 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell0-db-create-f9xfm"] Jan 20 17:55:56 crc kubenswrapper[4558]: I0120 17:55:56.051799 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-db-create-f9xfm" Jan 20 17:55:56 crc kubenswrapper[4558]: I0120 17:55:56.061550 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-42d5-account-create-update-ndxkx"] Jan 20 17:55:56 crc kubenswrapper[4558]: I0120 17:55:56.065523 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-db-create-f9xfm"] Jan 20 17:55:56 crc kubenswrapper[4558]: I0120 17:55:56.091932 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1e64325d-9808-497f-8889-6658129c5da8-operator-scripts\") pod \"nova-api-db-create-4n4vn\" (UID: \"1e64325d-9808-497f-8889-6658129c5da8\") " pod="openstack-kuttl-tests/nova-api-db-create-4n4vn" Jan 20 17:55:56 crc kubenswrapper[4558]: I0120 17:55:56.092024 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5k5z7\" (UniqueName: \"kubernetes.io/projected/1e64325d-9808-497f-8889-6658129c5da8-kube-api-access-5k5z7\") pod \"nova-api-db-create-4n4vn\" (UID: \"1e64325d-9808-497f-8889-6658129c5da8\") " pod="openstack-kuttl-tests/nova-api-db-create-4n4vn" Jan 20 17:55:56 crc kubenswrapper[4558]: I0120 17:55:56.185816 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell1-db-create-rmbrd"] Jan 20 17:55:56 crc kubenswrapper[4558]: I0120 17:55:56.187008 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-db-create-rmbrd" Jan 20 17:55:56 crc kubenswrapper[4558]: I0120 17:55:56.193694 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x9vnx\" (UniqueName: \"kubernetes.io/projected/93caafe5-244a-422b-9fd2-7c02979af792-kube-api-access-x9vnx\") pod \"nova-api-42d5-account-create-update-ndxkx\" (UID: \"93caafe5-244a-422b-9fd2-7c02979af792\") " pod="openstack-kuttl-tests/nova-api-42d5-account-create-update-ndxkx" Jan 20 17:55:56 crc kubenswrapper[4558]: I0120 17:55:56.193742 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sw9kv\" (UniqueName: \"kubernetes.io/projected/78a5052e-9643-4fe6-b908-b6e36974350a-kube-api-access-sw9kv\") pod \"nova-cell0-db-create-f9xfm\" (UID: \"78a5052e-9643-4fe6-b908-b6e36974350a\") " pod="openstack-kuttl-tests/nova-cell0-db-create-f9xfm" Jan 20 17:55:56 crc kubenswrapper[4558]: I0120 17:55:56.193771 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/93caafe5-244a-422b-9fd2-7c02979af792-operator-scripts\") pod \"nova-api-42d5-account-create-update-ndxkx\" (UID: \"93caafe5-244a-422b-9fd2-7c02979af792\") " pod="openstack-kuttl-tests/nova-api-42d5-account-create-update-ndxkx" Jan 20 17:55:56 crc kubenswrapper[4558]: I0120 17:55:56.194029 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1e64325d-9808-497f-8889-6658129c5da8-operator-scripts\") pod \"nova-api-db-create-4n4vn\" (UID: \"1e64325d-9808-497f-8889-6658129c5da8\") " pod="openstack-kuttl-tests/nova-api-db-create-4n4vn" Jan 20 17:55:56 crc kubenswrapper[4558]: I0120 17:55:56.194153 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/78a5052e-9643-4fe6-b908-b6e36974350a-operator-scripts\") pod \"nova-cell0-db-create-f9xfm\" (UID: \"78a5052e-9643-4fe6-b908-b6e36974350a\") " pod="openstack-kuttl-tests/nova-cell0-db-create-f9xfm" Jan 20 17:55:56 crc kubenswrapper[4558]: I0120 17:55:56.194434 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5k5z7\" (UniqueName: \"kubernetes.io/projected/1e64325d-9808-497f-8889-6658129c5da8-kube-api-access-5k5z7\") pod \"nova-api-db-create-4n4vn\" (UID: \"1e64325d-9808-497f-8889-6658129c5da8\") " pod="openstack-kuttl-tests/nova-api-db-create-4n4vn" Jan 20 17:55:56 crc kubenswrapper[4558]: I0120 17:55:56.194786 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1e64325d-9808-497f-8889-6658129c5da8-operator-scripts\") pod \"nova-api-db-create-4n4vn\" (UID: \"1e64325d-9808-497f-8889-6658129c5da8\") " pod="openstack-kuttl-tests/nova-api-db-create-4n4vn" Jan 20 17:55:56 crc kubenswrapper[4558]: I0120 17:55:56.201313 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-db-create-rmbrd"] Jan 20 17:55:56 crc kubenswrapper[4558]: I0120 17:55:56.214005 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell0-5191-account-create-update-t744c"] Jan 20 17:55:56 crc kubenswrapper[4558]: I0120 17:55:56.215360 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-5191-account-create-update-t744c" Jan 20 17:55:56 crc kubenswrapper[4558]: I0120 17:55:56.215367 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5k5z7\" (UniqueName: \"kubernetes.io/projected/1e64325d-9808-497f-8889-6658129c5da8-kube-api-access-5k5z7\") pod \"nova-api-db-create-4n4vn\" (UID: \"1e64325d-9808-497f-8889-6658129c5da8\") " pod="openstack-kuttl-tests/nova-api-db-create-4n4vn" Jan 20 17:55:56 crc kubenswrapper[4558]: I0120 17:55:56.218469 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell0-db-secret" Jan 20 17:55:56 crc kubenswrapper[4558]: I0120 17:55:56.238376 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-5191-account-create-update-t744c"] Jan 20 17:55:56 crc kubenswrapper[4558]: I0120 17:55:56.296855 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jvhq2\" (UniqueName: \"kubernetes.io/projected/29fef75f-d977-4359-bb82-7fe5807625cc-kube-api-access-jvhq2\") pod \"nova-cell1-db-create-rmbrd\" (UID: \"29fef75f-d977-4359-bb82-7fe5807625cc\") " pod="openstack-kuttl-tests/nova-cell1-db-create-rmbrd" Jan 20 17:55:56 crc kubenswrapper[4558]: I0120 17:55:56.296934 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x9vnx\" (UniqueName: \"kubernetes.io/projected/93caafe5-244a-422b-9fd2-7c02979af792-kube-api-access-x9vnx\") pod \"nova-api-42d5-account-create-update-ndxkx\" (UID: \"93caafe5-244a-422b-9fd2-7c02979af792\") " pod="openstack-kuttl-tests/nova-api-42d5-account-create-update-ndxkx" Jan 20 17:55:56 crc kubenswrapper[4558]: I0120 17:55:56.296965 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sw9kv\" (UniqueName: \"kubernetes.io/projected/78a5052e-9643-4fe6-b908-b6e36974350a-kube-api-access-sw9kv\") pod \"nova-cell0-db-create-f9xfm\" (UID: \"78a5052e-9643-4fe6-b908-b6e36974350a\") " pod="openstack-kuttl-tests/nova-cell0-db-create-f9xfm" Jan 20 17:55:56 crc kubenswrapper[4558]: I0120 17:55:56.296997 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/93caafe5-244a-422b-9fd2-7c02979af792-operator-scripts\") pod \"nova-api-42d5-account-create-update-ndxkx\" (UID: \"93caafe5-244a-422b-9fd2-7c02979af792\") " pod="openstack-kuttl-tests/nova-api-42d5-account-create-update-ndxkx" Jan 20 17:55:56 crc kubenswrapper[4558]: I0120 17:55:56.297073 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/78a5052e-9643-4fe6-b908-b6e36974350a-operator-scripts\") pod \"nova-cell0-db-create-f9xfm\" (UID: \"78a5052e-9643-4fe6-b908-b6e36974350a\") " pod="openstack-kuttl-tests/nova-cell0-db-create-f9xfm" Jan 20 17:55:56 crc kubenswrapper[4558]: I0120 17:55:56.297119 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/29fef75f-d977-4359-bb82-7fe5807625cc-operator-scripts\") pod \"nova-cell1-db-create-rmbrd\" (UID: \"29fef75f-d977-4359-bb82-7fe5807625cc\") " pod="openstack-kuttl-tests/nova-cell1-db-create-rmbrd" Jan 20 17:55:56 crc kubenswrapper[4558]: I0120 17:55:56.298331 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/93caafe5-244a-422b-9fd2-7c02979af792-operator-scripts\") pod \"nova-api-42d5-account-create-update-ndxkx\" (UID: \"93caafe5-244a-422b-9fd2-7c02979af792\") " pod="openstack-kuttl-tests/nova-api-42d5-account-create-update-ndxkx" Jan 20 17:55:56 crc kubenswrapper[4558]: I0120 17:55:56.298503 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/78a5052e-9643-4fe6-b908-b6e36974350a-operator-scripts\") pod \"nova-cell0-db-create-f9xfm\" (UID: \"78a5052e-9643-4fe6-b908-b6e36974350a\") " pod="openstack-kuttl-tests/nova-cell0-db-create-f9xfm" Jan 20 17:55:56 crc kubenswrapper[4558]: I0120 17:55:56.307661 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-db-create-4n4vn" Jan 20 17:55:56 crc kubenswrapper[4558]: I0120 17:55:56.313098 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sw9kv\" (UniqueName: \"kubernetes.io/projected/78a5052e-9643-4fe6-b908-b6e36974350a-kube-api-access-sw9kv\") pod \"nova-cell0-db-create-f9xfm\" (UID: \"78a5052e-9643-4fe6-b908-b6e36974350a\") " pod="openstack-kuttl-tests/nova-cell0-db-create-f9xfm" Jan 20 17:55:56 crc kubenswrapper[4558]: I0120 17:55:56.314365 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x9vnx\" (UniqueName: \"kubernetes.io/projected/93caafe5-244a-422b-9fd2-7c02979af792-kube-api-access-x9vnx\") pod \"nova-api-42d5-account-create-update-ndxkx\" (UID: \"93caafe5-244a-422b-9fd2-7c02979af792\") " pod="openstack-kuttl-tests/nova-api-42d5-account-create-update-ndxkx" Jan 20 17:55:56 crc kubenswrapper[4558]: I0120 17:55:56.361836 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-42d5-account-create-update-ndxkx" Jan 20 17:55:56 crc kubenswrapper[4558]: I0120 17:55:56.371031 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-db-create-f9xfm" Jan 20 17:55:56 crc kubenswrapper[4558]: I0120 17:55:56.398796 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jvhq2\" (UniqueName: \"kubernetes.io/projected/29fef75f-d977-4359-bb82-7fe5807625cc-kube-api-access-jvhq2\") pod \"nova-cell1-db-create-rmbrd\" (UID: \"29fef75f-d977-4359-bb82-7fe5807625cc\") " pod="openstack-kuttl-tests/nova-cell1-db-create-rmbrd" Jan 20 17:55:56 crc kubenswrapper[4558]: I0120 17:55:56.398930 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vqjgn\" (UniqueName: \"kubernetes.io/projected/492f23ec-9869-4bd8-a6ac-b0643d668a3b-kube-api-access-vqjgn\") pod \"nova-cell0-5191-account-create-update-t744c\" (UID: \"492f23ec-9869-4bd8-a6ac-b0643d668a3b\") " pod="openstack-kuttl-tests/nova-cell0-5191-account-create-update-t744c" Jan 20 17:55:56 crc kubenswrapper[4558]: I0120 17:55:56.398979 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/29fef75f-d977-4359-bb82-7fe5807625cc-operator-scripts\") pod \"nova-cell1-db-create-rmbrd\" (UID: \"29fef75f-d977-4359-bb82-7fe5807625cc\") " pod="openstack-kuttl-tests/nova-cell1-db-create-rmbrd" Jan 20 17:55:56 crc kubenswrapper[4558]: I0120 17:55:56.398996 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/492f23ec-9869-4bd8-a6ac-b0643d668a3b-operator-scripts\") pod \"nova-cell0-5191-account-create-update-t744c\" (UID: \"492f23ec-9869-4bd8-a6ac-b0643d668a3b\") " pod="openstack-kuttl-tests/nova-cell0-5191-account-create-update-t744c" Jan 20 17:55:56 crc kubenswrapper[4558]: I0120 17:55:56.400274 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/29fef75f-d977-4359-bb82-7fe5807625cc-operator-scripts\") pod \"nova-cell1-db-create-rmbrd\" (UID: \"29fef75f-d977-4359-bb82-7fe5807625cc\") " pod="openstack-kuttl-tests/nova-cell1-db-create-rmbrd" Jan 20 17:55:56 crc kubenswrapper[4558]: I0120 17:55:56.405091 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell1-f06e-account-create-update-8mj9b"] Jan 20 17:55:56 crc kubenswrapper[4558]: I0120 17:55:56.406537 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-f06e-account-create-update-8mj9b" Jan 20 17:55:56 crc kubenswrapper[4558]: I0120 17:55:56.410265 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell1-db-secret" Jan 20 17:55:56 crc kubenswrapper[4558]: I0120 17:55:56.415361 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jvhq2\" (UniqueName: \"kubernetes.io/projected/29fef75f-d977-4359-bb82-7fe5807625cc-kube-api-access-jvhq2\") pod \"nova-cell1-db-create-rmbrd\" (UID: \"29fef75f-d977-4359-bb82-7fe5807625cc\") " pod="openstack-kuttl-tests/nova-cell1-db-create-rmbrd" Jan 20 17:55:56 crc kubenswrapper[4558]: I0120 17:55:56.426376 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-f06e-account-create-update-8mj9b"] Jan 20 17:55:56 crc kubenswrapper[4558]: I0120 17:55:56.500742 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-db-create-rmbrd" Jan 20 17:55:56 crc kubenswrapper[4558]: I0120 17:55:56.502450 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vqjgn\" (UniqueName: \"kubernetes.io/projected/492f23ec-9869-4bd8-a6ac-b0643d668a3b-kube-api-access-vqjgn\") pod \"nova-cell0-5191-account-create-update-t744c\" (UID: \"492f23ec-9869-4bd8-a6ac-b0643d668a3b\") " pod="openstack-kuttl-tests/nova-cell0-5191-account-create-update-t744c" Jan 20 17:55:56 crc kubenswrapper[4558]: I0120 17:55:56.502511 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/492f23ec-9869-4bd8-a6ac-b0643d668a3b-operator-scripts\") pod \"nova-cell0-5191-account-create-update-t744c\" (UID: \"492f23ec-9869-4bd8-a6ac-b0643d668a3b\") " pod="openstack-kuttl-tests/nova-cell0-5191-account-create-update-t744c" Jan 20 17:55:56 crc kubenswrapper[4558]: I0120 17:55:56.502631 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hdhtp\" (UniqueName: \"kubernetes.io/projected/a698db22-7d59-4bc1-94fd-9f7b86601bb6-kube-api-access-hdhtp\") pod \"nova-cell1-f06e-account-create-update-8mj9b\" (UID: \"a698db22-7d59-4bc1-94fd-9f7b86601bb6\") " pod="openstack-kuttl-tests/nova-cell1-f06e-account-create-update-8mj9b" Jan 20 17:55:56 crc kubenswrapper[4558]: I0120 17:55:56.502679 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a698db22-7d59-4bc1-94fd-9f7b86601bb6-operator-scripts\") pod \"nova-cell1-f06e-account-create-update-8mj9b\" (UID: \"a698db22-7d59-4bc1-94fd-9f7b86601bb6\") " pod="openstack-kuttl-tests/nova-cell1-f06e-account-create-update-8mj9b" Jan 20 17:55:56 crc kubenswrapper[4558]: I0120 17:55:56.504188 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/492f23ec-9869-4bd8-a6ac-b0643d668a3b-operator-scripts\") pod \"nova-cell0-5191-account-create-update-t744c\" (UID: \"492f23ec-9869-4bd8-a6ac-b0643d668a3b\") " pod="openstack-kuttl-tests/nova-cell0-5191-account-create-update-t744c" Jan 20 17:55:56 crc kubenswrapper[4558]: I0120 17:55:56.520644 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vqjgn\" (UniqueName: \"kubernetes.io/projected/492f23ec-9869-4bd8-a6ac-b0643d668a3b-kube-api-access-vqjgn\") pod \"nova-cell0-5191-account-create-update-t744c\" (UID: \"492f23ec-9869-4bd8-a6ac-b0643d668a3b\") " pod="openstack-kuttl-tests/nova-cell0-5191-account-create-update-t744c" Jan 20 17:55:56 crc kubenswrapper[4558]: I0120 17:55:56.571011 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-5191-account-create-update-t744c" Jan 20 17:55:56 crc kubenswrapper[4558]: I0120 17:55:56.582703 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7ba8f8c1-7947-4326-b7d2-49f89d248c21" path="/var/lib/kubelet/pods/7ba8f8c1-7947-4326-b7d2-49f89d248c21/volumes" Jan 20 17:55:56 crc kubenswrapper[4558]: I0120 17:55:56.605152 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hdhtp\" (UniqueName: \"kubernetes.io/projected/a698db22-7d59-4bc1-94fd-9f7b86601bb6-kube-api-access-hdhtp\") pod \"nova-cell1-f06e-account-create-update-8mj9b\" (UID: \"a698db22-7d59-4bc1-94fd-9f7b86601bb6\") " pod="openstack-kuttl-tests/nova-cell1-f06e-account-create-update-8mj9b" Jan 20 17:55:56 crc kubenswrapper[4558]: I0120 17:55:56.605274 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a698db22-7d59-4bc1-94fd-9f7b86601bb6-operator-scripts\") pod \"nova-cell1-f06e-account-create-update-8mj9b\" (UID: \"a698db22-7d59-4bc1-94fd-9f7b86601bb6\") " pod="openstack-kuttl-tests/nova-cell1-f06e-account-create-update-8mj9b" Jan 20 17:55:56 crc kubenswrapper[4558]: I0120 17:55:56.605981 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a698db22-7d59-4bc1-94fd-9f7b86601bb6-operator-scripts\") pod \"nova-cell1-f06e-account-create-update-8mj9b\" (UID: \"a698db22-7d59-4bc1-94fd-9f7b86601bb6\") " pod="openstack-kuttl-tests/nova-cell1-f06e-account-create-update-8mj9b" Jan 20 17:55:56 crc kubenswrapper[4558]: I0120 17:55:56.627608 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hdhtp\" (UniqueName: \"kubernetes.io/projected/a698db22-7d59-4bc1-94fd-9f7b86601bb6-kube-api-access-hdhtp\") pod \"nova-cell1-f06e-account-create-update-8mj9b\" (UID: \"a698db22-7d59-4bc1-94fd-9f7b86601bb6\") " pod="openstack-kuttl-tests/nova-cell1-f06e-account-create-update-8mj9b" Jan 20 17:55:56 crc kubenswrapper[4558]: I0120 17:55:56.766207 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-db-create-4n4vn"] Jan 20 17:55:56 crc kubenswrapper[4558]: I0120 17:55:56.778485 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"65701817-3f49-4895-b777-25a5d42388ff","Type":"ContainerStarted","Data":"0ea9d4cc95d283ecb2592497db81efa86f8ba9c94d80f001fd60080c03240620"} Jan 20 17:55:56 crc kubenswrapper[4558]: I0120 17:55:56.781735 4558 generic.go:334] "Generic (PLEG): container finished" podID="b637458a-f8f6-456e-888f-3d61734be420" containerID="aa3f685bf49ef2fddab92d64cfaccff086ad8c1a707e26911bd2c12773aaacf3" exitCode=0 Jan 20 17:55:56 crc kubenswrapper[4558]: I0120 17:55:56.781769 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-64ffc476d5-9zjgb" event={"ID":"b637458a-f8f6-456e-888f-3d61734be420","Type":"ContainerDied","Data":"aa3f685bf49ef2fddab92d64cfaccff086ad8c1a707e26911bd2c12773aaacf3"} Jan 20 17:55:56 crc kubenswrapper[4558]: I0120 17:55:56.808314 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-f06e-account-create-update-8mj9b" Jan 20 17:55:56 crc kubenswrapper[4558]: I0120 17:55:56.888194 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-42d5-account-create-update-ndxkx"] Jan 20 17:55:56 crc kubenswrapper[4558]: W0120 17:55:56.908548 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod93caafe5_244a_422b_9fd2_7c02979af792.slice/crio-fa9f0cd8f3c90950ba9203bec2306de731e2456f928d8d1732b9240f85ac2532 WatchSource:0}: Error finding container fa9f0cd8f3c90950ba9203bec2306de731e2456f928d8d1732b9240f85ac2532: Status 404 returned error can't find the container with id fa9f0cd8f3c90950ba9203bec2306de731e2456f928d8d1732b9240f85ac2532 Jan 20 17:55:57 crc kubenswrapper[4558]: I0120 17:55:57.006052 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-db-create-rmbrd"] Jan 20 17:55:57 crc kubenswrapper[4558]: I0120 17:55:57.020893 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-db-create-f9xfm"] Jan 20 17:55:57 crc kubenswrapper[4558]: I0120 17:55:57.030457 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-64ffc476d5-9zjgb" Jan 20 17:55:57 crc kubenswrapper[4558]: I0120 17:55:57.175742 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-5191-account-create-update-t744c"] Jan 20 17:55:57 crc kubenswrapper[4558]: I0120 17:55:57.218900 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/b637458a-f8f6-456e-888f-3d61734be420-httpd-config\") pod \"b637458a-f8f6-456e-888f-3d61734be420\" (UID: \"b637458a-f8f6-456e-888f-3d61734be420\") " Jan 20 17:55:57 crc kubenswrapper[4558]: I0120 17:55:57.219062 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/b637458a-f8f6-456e-888f-3d61734be420-ovndb-tls-certs\") pod \"b637458a-f8f6-456e-888f-3d61734be420\" (UID: \"b637458a-f8f6-456e-888f-3d61734be420\") " Jan 20 17:55:57 crc kubenswrapper[4558]: I0120 17:55:57.219227 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j5bc4\" (UniqueName: \"kubernetes.io/projected/b637458a-f8f6-456e-888f-3d61734be420-kube-api-access-j5bc4\") pod \"b637458a-f8f6-456e-888f-3d61734be420\" (UID: \"b637458a-f8f6-456e-888f-3d61734be420\") " Jan 20 17:55:57 crc kubenswrapper[4558]: I0120 17:55:57.219318 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/b637458a-f8f6-456e-888f-3d61734be420-config\") pod \"b637458a-f8f6-456e-888f-3d61734be420\" (UID: \"b637458a-f8f6-456e-888f-3d61734be420\") " Jan 20 17:55:57 crc kubenswrapper[4558]: I0120 17:55:57.219590 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b637458a-f8f6-456e-888f-3d61734be420-combined-ca-bundle\") pod \"b637458a-f8f6-456e-888f-3d61734be420\" (UID: \"b637458a-f8f6-456e-888f-3d61734be420\") " Jan 20 17:55:57 crc kubenswrapper[4558]: I0120 17:55:57.228144 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b637458a-f8f6-456e-888f-3d61734be420-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "b637458a-f8f6-456e-888f-3d61734be420" (UID: "b637458a-f8f6-456e-888f-3d61734be420"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:55:57 crc kubenswrapper[4558]: I0120 17:55:57.231110 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b637458a-f8f6-456e-888f-3d61734be420-kube-api-access-j5bc4" (OuterVolumeSpecName: "kube-api-access-j5bc4") pod "b637458a-f8f6-456e-888f-3d61734be420" (UID: "b637458a-f8f6-456e-888f-3d61734be420"). InnerVolumeSpecName "kube-api-access-j5bc4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:55:57 crc kubenswrapper[4558]: I0120 17:55:57.267870 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b637458a-f8f6-456e-888f-3d61734be420-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b637458a-f8f6-456e-888f-3d61734be420" (UID: "b637458a-f8f6-456e-888f-3d61734be420"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:55:57 crc kubenswrapper[4558]: I0120 17:55:57.276086 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b637458a-f8f6-456e-888f-3d61734be420-config" (OuterVolumeSpecName: "config") pod "b637458a-f8f6-456e-888f-3d61734be420" (UID: "b637458a-f8f6-456e-888f-3d61734be420"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:55:57 crc kubenswrapper[4558]: I0120 17:55:57.312327 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b637458a-f8f6-456e-888f-3d61734be420-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "b637458a-f8f6-456e-888f-3d61734be420" (UID: "b637458a-f8f6-456e-888f-3d61734be420"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:55:57 crc kubenswrapper[4558]: I0120 17:55:57.324042 4558 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/b637458a-f8f6-456e-888f-3d61734be420-httpd-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:55:57 crc kubenswrapper[4558]: I0120 17:55:57.324070 4558 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/b637458a-f8f6-456e-888f-3d61734be420-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:55:57 crc kubenswrapper[4558]: I0120 17:55:57.324084 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j5bc4\" (UniqueName: \"kubernetes.io/projected/b637458a-f8f6-456e-888f-3d61734be420-kube-api-access-j5bc4\") on node \"crc\" DevicePath \"\"" Jan 20 17:55:57 crc kubenswrapper[4558]: I0120 17:55:57.324094 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/b637458a-f8f6-456e-888f-3d61734be420-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:55:57 crc kubenswrapper[4558]: I0120 17:55:57.324104 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b637458a-f8f6-456e-888f-3d61734be420-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:55:57 crc kubenswrapper[4558]: I0120 17:55:57.352655 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-f06e-account-create-update-8mj9b"] Jan 20 17:55:57 crc kubenswrapper[4558]: W0120 17:55:57.358826 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda698db22_7d59_4bc1_94fd_9f7b86601bb6.slice/crio-cd04b9b753a72ab42cff55f01f4acae87e78f0e3dde1e7a5f8cf7b9bbaceffa3 WatchSource:0}: Error finding container cd04b9b753a72ab42cff55f01f4acae87e78f0e3dde1e7a5f8cf7b9bbaceffa3: Status 404 returned error can't find the container with id cd04b9b753a72ab42cff55f01f4acae87e78f0e3dde1e7a5f8cf7b9bbaceffa3 Jan 20 17:55:57 crc kubenswrapper[4558]: I0120 17:55:57.365552 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/swift-proxy-77d4fcb95d-7jsvg" Jan 20 17:55:57 crc kubenswrapper[4558]: I0120 17:55:57.792879 4558 generic.go:334] "Generic (PLEG): container finished" podID="78a5052e-9643-4fe6-b908-b6e36974350a" containerID="444bb013e7c42b48e6d23336ee61f643f1067783c9c8c72f8068084f4d7cf163" exitCode=0 Jan 20 17:55:57 crc kubenswrapper[4558]: I0120 17:55:57.793516 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-db-create-f9xfm" event={"ID":"78a5052e-9643-4fe6-b908-b6e36974350a","Type":"ContainerDied","Data":"444bb013e7c42b48e6d23336ee61f643f1067783c9c8c72f8068084f4d7cf163"} Jan 20 17:55:57 crc kubenswrapper[4558]: I0120 17:55:57.793693 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-db-create-f9xfm" event={"ID":"78a5052e-9643-4fe6-b908-b6e36974350a","Type":"ContainerStarted","Data":"d118d71c8bfcca252da51705e80b33dc89f800c408413e923a47937b8514e205"} Jan 20 17:55:57 crc kubenswrapper[4558]: I0120 17:55:57.795512 4558 generic.go:334] "Generic (PLEG): container finished" podID="93caafe5-244a-422b-9fd2-7c02979af792" containerID="59d8c4e436c7f5a4aad847d81229a8d92faa6c515ebaf24787e41c7993f75693" exitCode=0 Jan 20 17:55:57 crc kubenswrapper[4558]: I0120 17:55:57.795582 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-42d5-account-create-update-ndxkx" event={"ID":"93caafe5-244a-422b-9fd2-7c02979af792","Type":"ContainerDied","Data":"59d8c4e436c7f5a4aad847d81229a8d92faa6c515ebaf24787e41c7993f75693"} Jan 20 17:55:57 crc kubenswrapper[4558]: I0120 17:55:57.795638 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-42d5-account-create-update-ndxkx" event={"ID":"93caafe5-244a-422b-9fd2-7c02979af792","Type":"ContainerStarted","Data":"fa9f0cd8f3c90950ba9203bec2306de731e2456f928d8d1732b9240f85ac2532"} Jan 20 17:55:57 crc kubenswrapper[4558]: I0120 17:55:57.799457 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"65701817-3f49-4895-b777-25a5d42388ff","Type":"ContainerStarted","Data":"5fb72403cd6e4ff4fee6b73a71bd84f306595019c29f91d4dbf146e9c2b679e4"} Jan 20 17:55:57 crc kubenswrapper[4558]: I0120 17:55:57.800700 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-f06e-account-create-update-8mj9b" event={"ID":"a698db22-7d59-4bc1-94fd-9f7b86601bb6","Type":"ContainerStarted","Data":"d3d543ae948707353c576f305357f367d0aa01f73668bcc81ac19bcc7f5ea140"} Jan 20 17:55:57 crc kubenswrapper[4558]: I0120 17:55:57.800726 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-f06e-account-create-update-8mj9b" event={"ID":"a698db22-7d59-4bc1-94fd-9f7b86601bb6","Type":"ContainerStarted","Data":"cd04b9b753a72ab42cff55f01f4acae87e78f0e3dde1e7a5f8cf7b9bbaceffa3"} Jan 20 17:55:57 crc kubenswrapper[4558]: I0120 17:55:57.805940 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-5191-account-create-update-t744c" event={"ID":"492f23ec-9869-4bd8-a6ac-b0643d668a3b","Type":"ContainerStarted","Data":"c2c78b1f544d5f9cadbb72d37b5b668352e6d6879a5e657cfe4f7f30cab6e809"} Jan 20 17:55:57 crc kubenswrapper[4558]: I0120 17:55:57.805969 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-5191-account-create-update-t744c" event={"ID":"492f23ec-9869-4bd8-a6ac-b0643d668a3b","Type":"ContainerStarted","Data":"b58216bcab0202fe2d43788b897dfb603e8e405812855f0b5c2bce0249d2223f"} Jan 20 17:55:57 crc kubenswrapper[4558]: I0120 17:55:57.810241 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-64ffc476d5-9zjgb" event={"ID":"b637458a-f8f6-456e-888f-3d61734be420","Type":"ContainerDied","Data":"2a7a8fb4ffd1fa738d4d412796f871c7bff46cab77197775cf3d08b865d59e7c"} Jan 20 17:55:57 crc kubenswrapper[4558]: I0120 17:55:57.810278 4558 scope.go:117] "RemoveContainer" containerID="fe46b8f805f4f36bad1c42f2cb3cd2007879b8f0cad9c8dfc988eb7d2db5084f" Jan 20 17:55:57 crc kubenswrapper[4558]: I0120 17:55:57.810404 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-64ffc476d5-9zjgb" Jan 20 17:55:57 crc kubenswrapper[4558]: I0120 17:55:57.820373 4558 generic.go:334] "Generic (PLEG): container finished" podID="1e64325d-9808-497f-8889-6658129c5da8" containerID="92ba7827254ded61befb4ea8bef7d19312169445c2d52de93d43b8f0d521581e" exitCode=0 Jan 20 17:55:57 crc kubenswrapper[4558]: I0120 17:55:57.820432 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-db-create-4n4vn" event={"ID":"1e64325d-9808-497f-8889-6658129c5da8","Type":"ContainerDied","Data":"92ba7827254ded61befb4ea8bef7d19312169445c2d52de93d43b8f0d521581e"} Jan 20 17:55:57 crc kubenswrapper[4558]: I0120 17:55:57.820453 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-db-create-4n4vn" event={"ID":"1e64325d-9808-497f-8889-6658129c5da8","Type":"ContainerStarted","Data":"267698c2bc05390831c7995127c9934fb93bd320d8f219f402d3f70d7dc3b602"} Jan 20 17:55:57 crc kubenswrapper[4558]: I0120 17:55:57.826283 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell0-5191-account-create-update-t744c" podStartSLOduration=1.8262726900000001 podStartE2EDuration="1.82627269s" podCreationTimestamp="2026-01-20 17:55:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:55:57.823717406 +0000 UTC m=+4451.584055373" watchObservedRunningTime="2026-01-20 17:55:57.82627269 +0000 UTC m=+4451.586610657" Jan 20 17:55:57 crc kubenswrapper[4558]: I0120 17:55:57.830753 4558 generic.go:334] "Generic (PLEG): container finished" podID="29fef75f-d977-4359-bb82-7fe5807625cc" containerID="2af5575e62456e4be7656ee0e5229903dd77c0dd144e3458d8c9fe0b3ae6c15b" exitCode=0 Jan 20 17:55:57 crc kubenswrapper[4558]: I0120 17:55:57.830782 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-db-create-rmbrd" event={"ID":"29fef75f-d977-4359-bb82-7fe5807625cc","Type":"ContainerDied","Data":"2af5575e62456e4be7656ee0e5229903dd77c0dd144e3458d8c9fe0b3ae6c15b"} Jan 20 17:55:57 crc kubenswrapper[4558]: I0120 17:55:57.830823 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-db-create-rmbrd" event={"ID":"29fef75f-d977-4359-bb82-7fe5807625cc","Type":"ContainerStarted","Data":"79507bd95e2c92e37da7c04c29f47e60ef6a6ff53b011c856d387db977a6d4ec"} Jan 20 17:55:57 crc kubenswrapper[4558]: I0120 17:55:57.844387 4558 scope.go:117] "RemoveContainer" containerID="aa3f685bf49ef2fddab92d64cfaccff086ad8c1a707e26911bd2c12773aaacf3" Jan 20 17:55:57 crc kubenswrapper[4558]: I0120 17:55:57.862598 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell1-f06e-account-create-update-8mj9b" podStartSLOduration=1.8625761600000001 podStartE2EDuration="1.86257616s" podCreationTimestamp="2026-01-20 17:55:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:55:57.845499461 +0000 UTC m=+4451.605837428" watchObservedRunningTime="2026-01-20 17:55:57.86257616 +0000 UTC m=+4451.622914127" Jan 20 17:55:57 crc kubenswrapper[4558]: I0120 17:55:57.907088 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-64ffc476d5-9zjgb"] Jan 20 17:55:57 crc kubenswrapper[4558]: I0120 17:55:57.915527 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/neutron-64ffc476d5-9zjgb"] Jan 20 17:55:58 crc kubenswrapper[4558]: I0120 17:55:58.575720 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b637458a-f8f6-456e-888f-3d61734be420" path="/var/lib/kubelet/pods/b637458a-f8f6-456e-888f-3d61734be420/volumes" Jan 20 17:55:58 crc kubenswrapper[4558]: I0120 17:55:58.843763 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"65701817-3f49-4895-b777-25a5d42388ff","Type":"ContainerStarted","Data":"5bc176a4081eeddb9d4ae0d67c0245828dde3fbffc57b044c3bb1d1c4a7460cd"} Jan 20 17:55:58 crc kubenswrapper[4558]: I0120 17:55:58.846070 4558 generic.go:334] "Generic (PLEG): container finished" podID="a698db22-7d59-4bc1-94fd-9f7b86601bb6" containerID="d3d543ae948707353c576f305357f367d0aa01f73668bcc81ac19bcc7f5ea140" exitCode=0 Jan 20 17:55:58 crc kubenswrapper[4558]: I0120 17:55:58.846128 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-f06e-account-create-update-8mj9b" event={"ID":"a698db22-7d59-4bc1-94fd-9f7b86601bb6","Type":"ContainerDied","Data":"d3d543ae948707353c576f305357f367d0aa01f73668bcc81ac19bcc7f5ea140"} Jan 20 17:55:58 crc kubenswrapper[4558]: I0120 17:55:58.848289 4558 generic.go:334] "Generic (PLEG): container finished" podID="492f23ec-9869-4bd8-a6ac-b0643d668a3b" containerID="c2c78b1f544d5f9cadbb72d37b5b668352e6d6879a5e657cfe4f7f30cab6e809" exitCode=0 Jan 20 17:55:58 crc kubenswrapper[4558]: I0120 17:55:58.848355 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-5191-account-create-update-t744c" event={"ID":"492f23ec-9869-4bd8-a6ac-b0643d668a3b","Type":"ContainerDied","Data":"c2c78b1f544d5f9cadbb72d37b5b668352e6d6879a5e657cfe4f7f30cab6e809"} Jan 20 17:55:59 crc kubenswrapper[4558]: I0120 17:55:59.110134 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-db-create-rmbrd" Jan 20 17:55:59 crc kubenswrapper[4558]: I0120 17:55:59.265084 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/29fef75f-d977-4359-bb82-7fe5807625cc-operator-scripts\") pod \"29fef75f-d977-4359-bb82-7fe5807625cc\" (UID: \"29fef75f-d977-4359-bb82-7fe5807625cc\") " Jan 20 17:55:59 crc kubenswrapper[4558]: I0120 17:55:59.265724 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/29fef75f-d977-4359-bb82-7fe5807625cc-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "29fef75f-d977-4359-bb82-7fe5807625cc" (UID: "29fef75f-d977-4359-bb82-7fe5807625cc"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:55:59 crc kubenswrapper[4558]: I0120 17:55:59.268387 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jvhq2\" (UniqueName: \"kubernetes.io/projected/29fef75f-d977-4359-bb82-7fe5807625cc-kube-api-access-jvhq2\") pod \"29fef75f-d977-4359-bb82-7fe5807625cc\" (UID: \"29fef75f-d977-4359-bb82-7fe5807625cc\") " Jan 20 17:55:59 crc kubenswrapper[4558]: I0120 17:55:59.268902 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/29fef75f-d977-4359-bb82-7fe5807625cc-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:55:59 crc kubenswrapper[4558]: I0120 17:55:59.273935 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-42d5-account-create-update-ndxkx" Jan 20 17:55:59 crc kubenswrapper[4558]: I0120 17:55:59.275123 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/29fef75f-d977-4359-bb82-7fe5807625cc-kube-api-access-jvhq2" (OuterVolumeSpecName: "kube-api-access-jvhq2") pod "29fef75f-d977-4359-bb82-7fe5807625cc" (UID: "29fef75f-d977-4359-bb82-7fe5807625cc"). InnerVolumeSpecName "kube-api-access-jvhq2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:55:59 crc kubenswrapper[4558]: I0120 17:55:59.285667 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-db-create-f9xfm" Jan 20 17:55:59 crc kubenswrapper[4558]: I0120 17:55:59.334136 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-db-create-4n4vn" Jan 20 17:55:59 crc kubenswrapper[4558]: I0120 17:55:59.382025 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/93caafe5-244a-422b-9fd2-7c02979af792-operator-scripts\") pod \"93caafe5-244a-422b-9fd2-7c02979af792\" (UID: \"93caafe5-244a-422b-9fd2-7c02979af792\") " Jan 20 17:55:59 crc kubenswrapper[4558]: I0120 17:55:59.382116 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sw9kv\" (UniqueName: \"kubernetes.io/projected/78a5052e-9643-4fe6-b908-b6e36974350a-kube-api-access-sw9kv\") pod \"78a5052e-9643-4fe6-b908-b6e36974350a\" (UID: \"78a5052e-9643-4fe6-b908-b6e36974350a\") " Jan 20 17:55:59 crc kubenswrapper[4558]: I0120 17:55:59.382176 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/78a5052e-9643-4fe6-b908-b6e36974350a-operator-scripts\") pod \"78a5052e-9643-4fe6-b908-b6e36974350a\" (UID: \"78a5052e-9643-4fe6-b908-b6e36974350a\") " Jan 20 17:55:59 crc kubenswrapper[4558]: I0120 17:55:59.382348 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5k5z7\" (UniqueName: \"kubernetes.io/projected/1e64325d-9808-497f-8889-6658129c5da8-kube-api-access-5k5z7\") pod \"1e64325d-9808-497f-8889-6658129c5da8\" (UID: \"1e64325d-9808-497f-8889-6658129c5da8\") " Jan 20 17:55:59 crc kubenswrapper[4558]: I0120 17:55:59.382445 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1e64325d-9808-497f-8889-6658129c5da8-operator-scripts\") pod \"1e64325d-9808-497f-8889-6658129c5da8\" (UID: \"1e64325d-9808-497f-8889-6658129c5da8\") " Jan 20 17:55:59 crc kubenswrapper[4558]: I0120 17:55:59.382466 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x9vnx\" (UniqueName: \"kubernetes.io/projected/93caafe5-244a-422b-9fd2-7c02979af792-kube-api-access-x9vnx\") pod \"93caafe5-244a-422b-9fd2-7c02979af792\" (UID: \"93caafe5-244a-422b-9fd2-7c02979af792\") " Jan 20 17:55:59 crc kubenswrapper[4558]: I0120 17:55:59.383066 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jvhq2\" (UniqueName: \"kubernetes.io/projected/29fef75f-d977-4359-bb82-7fe5807625cc-kube-api-access-jvhq2\") on node \"crc\" DevicePath \"\"" Jan 20 17:55:59 crc kubenswrapper[4558]: I0120 17:55:59.394752 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1e64325d-9808-497f-8889-6658129c5da8-kube-api-access-5k5z7" (OuterVolumeSpecName: "kube-api-access-5k5z7") pod "1e64325d-9808-497f-8889-6658129c5da8" (UID: "1e64325d-9808-497f-8889-6658129c5da8"). InnerVolumeSpecName "kube-api-access-5k5z7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:55:59 crc kubenswrapper[4558]: I0120 17:55:59.395290 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/78a5052e-9643-4fe6-b908-b6e36974350a-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "78a5052e-9643-4fe6-b908-b6e36974350a" (UID: "78a5052e-9643-4fe6-b908-b6e36974350a"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:55:59 crc kubenswrapper[4558]: I0120 17:55:59.397309 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/78a5052e-9643-4fe6-b908-b6e36974350a-kube-api-access-sw9kv" (OuterVolumeSpecName: "kube-api-access-sw9kv") pod "78a5052e-9643-4fe6-b908-b6e36974350a" (UID: "78a5052e-9643-4fe6-b908-b6e36974350a"). InnerVolumeSpecName "kube-api-access-sw9kv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:55:59 crc kubenswrapper[4558]: I0120 17:55:59.397588 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/93caafe5-244a-422b-9fd2-7c02979af792-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "93caafe5-244a-422b-9fd2-7c02979af792" (UID: "93caafe5-244a-422b-9fd2-7c02979af792"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:55:59 crc kubenswrapper[4558]: I0120 17:55:59.401374 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1e64325d-9808-497f-8889-6658129c5da8-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "1e64325d-9808-497f-8889-6658129c5da8" (UID: "1e64325d-9808-497f-8889-6658129c5da8"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:55:59 crc kubenswrapper[4558]: I0120 17:55:59.409564 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/93caafe5-244a-422b-9fd2-7c02979af792-kube-api-access-x9vnx" (OuterVolumeSpecName: "kube-api-access-x9vnx") pod "93caafe5-244a-422b-9fd2-7c02979af792" (UID: "93caafe5-244a-422b-9fd2-7c02979af792"). InnerVolumeSpecName "kube-api-access-x9vnx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:55:59 crc kubenswrapper[4558]: I0120 17:55:59.485010 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1e64325d-9808-497f-8889-6658129c5da8-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:55:59 crc kubenswrapper[4558]: I0120 17:55:59.485043 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x9vnx\" (UniqueName: \"kubernetes.io/projected/93caafe5-244a-422b-9fd2-7c02979af792-kube-api-access-x9vnx\") on node \"crc\" DevicePath \"\"" Jan 20 17:55:59 crc kubenswrapper[4558]: I0120 17:55:59.485056 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/93caafe5-244a-422b-9fd2-7c02979af792-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:55:59 crc kubenswrapper[4558]: I0120 17:55:59.485066 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sw9kv\" (UniqueName: \"kubernetes.io/projected/78a5052e-9643-4fe6-b908-b6e36974350a-kube-api-access-sw9kv\") on node \"crc\" DevicePath \"\"" Jan 20 17:55:59 crc kubenswrapper[4558]: I0120 17:55:59.485075 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/78a5052e-9643-4fe6-b908-b6e36974350a-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:55:59 crc kubenswrapper[4558]: I0120 17:55:59.485083 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5k5z7\" (UniqueName: \"kubernetes.io/projected/1e64325d-9808-497f-8889-6658129c5da8-kube-api-access-5k5z7\") on node \"crc\" DevicePath \"\"" Jan 20 17:55:59 crc kubenswrapper[4558]: I0120 17:55:59.734963 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:55:59 crc kubenswrapper[4558]: I0120 17:55:59.735387 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-internal-api-0" podUID="b419ccfb-cd20-4b72-8f3a-48ce9ea2c991" containerName="glance-log" containerID="cri-o://c46f352f9c1ee65a4e358f6c0f82857abddecd43650369939a56f8dd36174cbc" gracePeriod=30 Jan 20 17:55:59 crc kubenswrapper[4558]: I0120 17:55:59.735401 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-internal-api-0" podUID="b419ccfb-cd20-4b72-8f3a-48ce9ea2c991" containerName="glance-httpd" containerID="cri-o://06b7bcf43cb16007690126d14e7263d4b301518776b44ac910ada8799e3568d9" gracePeriod=30 Jan 20 17:55:59 crc kubenswrapper[4558]: I0120 17:55:59.862482 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-db-create-f9xfm" event={"ID":"78a5052e-9643-4fe6-b908-b6e36974350a","Type":"ContainerDied","Data":"d118d71c8bfcca252da51705e80b33dc89f800c408413e923a47937b8514e205"} Jan 20 17:55:59 crc kubenswrapper[4558]: I0120 17:55:59.862835 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d118d71c8bfcca252da51705e80b33dc89f800c408413e923a47937b8514e205" Jan 20 17:55:59 crc kubenswrapper[4558]: I0120 17:55:59.862689 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-db-create-f9xfm" Jan 20 17:55:59 crc kubenswrapper[4558]: I0120 17:55:59.866534 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-42d5-account-create-update-ndxkx" event={"ID":"93caafe5-244a-422b-9fd2-7c02979af792","Type":"ContainerDied","Data":"fa9f0cd8f3c90950ba9203bec2306de731e2456f928d8d1732b9240f85ac2532"} Jan 20 17:55:59 crc kubenswrapper[4558]: I0120 17:55:59.866616 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fa9f0cd8f3c90950ba9203bec2306de731e2456f928d8d1732b9240f85ac2532" Jan 20 17:55:59 crc kubenswrapper[4558]: I0120 17:55:59.866557 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-42d5-account-create-update-ndxkx" Jan 20 17:55:59 crc kubenswrapper[4558]: I0120 17:55:59.868267 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-db-create-4n4vn" event={"ID":"1e64325d-9808-497f-8889-6658129c5da8","Type":"ContainerDied","Data":"267698c2bc05390831c7995127c9934fb93bd320d8f219f402d3f70d7dc3b602"} Jan 20 17:55:59 crc kubenswrapper[4558]: I0120 17:55:59.868357 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="267698c2bc05390831c7995127c9934fb93bd320d8f219f402d3f70d7dc3b602" Jan 20 17:55:59 crc kubenswrapper[4558]: I0120 17:55:59.868309 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-db-create-4n4vn" Jan 20 17:55:59 crc kubenswrapper[4558]: I0120 17:55:59.873516 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-db-create-rmbrd" event={"ID":"29fef75f-d977-4359-bb82-7fe5807625cc","Type":"ContainerDied","Data":"79507bd95e2c92e37da7c04c29f47e60ef6a6ff53b011c856d387db977a6d4ec"} Jan 20 17:55:59 crc kubenswrapper[4558]: I0120 17:55:59.873596 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="79507bd95e2c92e37da7c04c29f47e60ef6a6ff53b011c856d387db977a6d4ec" Jan 20 17:55:59 crc kubenswrapper[4558]: I0120 17:55:59.873719 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-db-create-rmbrd" Jan 20 17:56:00 crc kubenswrapper[4558]: I0120 17:56:00.184287 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-5191-account-create-update-t744c" Jan 20 17:56:00 crc kubenswrapper[4558]: I0120 17:56:00.207409 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-f06e-account-create-update-8mj9b" Jan 20 17:56:00 crc kubenswrapper[4558]: I0120 17:56:00.303476 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/492f23ec-9869-4bd8-a6ac-b0643d668a3b-operator-scripts\") pod \"492f23ec-9869-4bd8-a6ac-b0643d668a3b\" (UID: \"492f23ec-9869-4bd8-a6ac-b0643d668a3b\") " Jan 20 17:56:00 crc kubenswrapper[4558]: I0120 17:56:00.303749 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vqjgn\" (UniqueName: \"kubernetes.io/projected/492f23ec-9869-4bd8-a6ac-b0643d668a3b-kube-api-access-vqjgn\") pod \"492f23ec-9869-4bd8-a6ac-b0643d668a3b\" (UID: \"492f23ec-9869-4bd8-a6ac-b0643d668a3b\") " Jan 20 17:56:00 crc kubenswrapper[4558]: I0120 17:56:00.304061 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/492f23ec-9869-4bd8-a6ac-b0643d668a3b-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "492f23ec-9869-4bd8-a6ac-b0643d668a3b" (UID: "492f23ec-9869-4bd8-a6ac-b0643d668a3b"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:56:00 crc kubenswrapper[4558]: I0120 17:56:00.304979 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/492f23ec-9869-4bd8-a6ac-b0643d668a3b-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:56:00 crc kubenswrapper[4558]: I0120 17:56:00.309094 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/492f23ec-9869-4bd8-a6ac-b0643d668a3b-kube-api-access-vqjgn" (OuterVolumeSpecName: "kube-api-access-vqjgn") pod "492f23ec-9869-4bd8-a6ac-b0643d668a3b" (UID: "492f23ec-9869-4bd8-a6ac-b0643d668a3b"). InnerVolumeSpecName "kube-api-access-vqjgn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:56:00 crc kubenswrapper[4558]: I0120 17:56:00.407583 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hdhtp\" (UniqueName: \"kubernetes.io/projected/a698db22-7d59-4bc1-94fd-9f7b86601bb6-kube-api-access-hdhtp\") pod \"a698db22-7d59-4bc1-94fd-9f7b86601bb6\" (UID: \"a698db22-7d59-4bc1-94fd-9f7b86601bb6\") " Jan 20 17:56:00 crc kubenswrapper[4558]: I0120 17:56:00.407668 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a698db22-7d59-4bc1-94fd-9f7b86601bb6-operator-scripts\") pod \"a698db22-7d59-4bc1-94fd-9f7b86601bb6\" (UID: \"a698db22-7d59-4bc1-94fd-9f7b86601bb6\") " Jan 20 17:56:00 crc kubenswrapper[4558]: I0120 17:56:00.408231 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a698db22-7d59-4bc1-94fd-9f7b86601bb6-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "a698db22-7d59-4bc1-94fd-9f7b86601bb6" (UID: "a698db22-7d59-4bc1-94fd-9f7b86601bb6"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:56:00 crc kubenswrapper[4558]: I0120 17:56:00.409060 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vqjgn\" (UniqueName: \"kubernetes.io/projected/492f23ec-9869-4bd8-a6ac-b0643d668a3b-kube-api-access-vqjgn\") on node \"crc\" DevicePath \"\"" Jan 20 17:56:00 crc kubenswrapper[4558]: I0120 17:56:00.409093 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a698db22-7d59-4bc1-94fd-9f7b86601bb6-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:56:00 crc kubenswrapper[4558]: I0120 17:56:00.411366 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a698db22-7d59-4bc1-94fd-9f7b86601bb6-kube-api-access-hdhtp" (OuterVolumeSpecName: "kube-api-access-hdhtp") pod "a698db22-7d59-4bc1-94fd-9f7b86601bb6" (UID: "a698db22-7d59-4bc1-94fd-9f7b86601bb6"). InnerVolumeSpecName "kube-api-access-hdhtp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:56:00 crc kubenswrapper[4558]: I0120 17:56:00.510201 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hdhtp\" (UniqueName: \"kubernetes.io/projected/a698db22-7d59-4bc1-94fd-9f7b86601bb6-kube-api-access-hdhtp\") on node \"crc\" DevicePath \"\"" Jan 20 17:56:00 crc kubenswrapper[4558]: I0120 17:56:00.584700 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:56:00 crc kubenswrapper[4558]: I0120 17:56:00.584903 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-external-api-0" podUID="5881b3da-d3ac-4168-a33c-0f29ad342f60" containerName="glance-log" containerID="cri-o://2f9349dc89699a650a3a26d019021f394568684a51da796cb0343c7a21d1d6e0" gracePeriod=30 Jan 20 17:56:00 crc kubenswrapper[4558]: I0120 17:56:00.585263 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-external-api-0" podUID="5881b3da-d3ac-4168-a33c-0f29ad342f60" containerName="glance-httpd" containerID="cri-o://426c5470720e7ae7d002df3aeb9aab834c73d17509f061f38d2d4533eef638aa" gracePeriod=30 Jan 20 17:56:00 crc kubenswrapper[4558]: I0120 17:56:00.889494 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"65701817-3f49-4895-b777-25a5d42388ff","Type":"ContainerStarted","Data":"efe31811d6c0482abf08ad08b68099f980ce0921f634d8890a69090b6ec6849d"} Jan 20 17:56:00 crc kubenswrapper[4558]: I0120 17:56:00.889731 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="65701817-3f49-4895-b777-25a5d42388ff" containerName="ceilometer-central-agent" containerID="cri-o://0ea9d4cc95d283ecb2592497db81efa86f8ba9c94d80f001fd60080c03240620" gracePeriod=30 Jan 20 17:56:00 crc kubenswrapper[4558]: I0120 17:56:00.890065 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:56:00 crc kubenswrapper[4558]: I0120 17:56:00.890446 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="65701817-3f49-4895-b777-25a5d42388ff" containerName="proxy-httpd" containerID="cri-o://efe31811d6c0482abf08ad08b68099f980ce0921f634d8890a69090b6ec6849d" gracePeriod=30 Jan 20 17:56:00 crc kubenswrapper[4558]: I0120 17:56:00.890508 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="65701817-3f49-4895-b777-25a5d42388ff" containerName="sg-core" containerID="cri-o://5bc176a4081eeddb9d4ae0d67c0245828dde3fbffc57b044c3bb1d1c4a7460cd" gracePeriod=30 Jan 20 17:56:00 crc kubenswrapper[4558]: I0120 17:56:00.890554 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="65701817-3f49-4895-b777-25a5d42388ff" containerName="ceilometer-notification-agent" containerID="cri-o://5fb72403cd6e4ff4fee6b73a71bd84f306595019c29f91d4dbf146e9c2b679e4" gracePeriod=30 Jan 20 17:56:00 crc kubenswrapper[4558]: I0120 17:56:00.893126 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-f06e-account-create-update-8mj9b" event={"ID":"a698db22-7d59-4bc1-94fd-9f7b86601bb6","Type":"ContainerDied","Data":"cd04b9b753a72ab42cff55f01f4acae87e78f0e3dde1e7a5f8cf7b9bbaceffa3"} Jan 20 17:56:00 crc kubenswrapper[4558]: I0120 17:56:00.893215 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cd04b9b753a72ab42cff55f01f4acae87e78f0e3dde1e7a5f8cf7b9bbaceffa3" Jan 20 17:56:00 crc kubenswrapper[4558]: I0120 17:56:00.893153 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-f06e-account-create-update-8mj9b" Jan 20 17:56:00 crc kubenswrapper[4558]: I0120 17:56:00.898558 4558 generic.go:334] "Generic (PLEG): container finished" podID="5881b3da-d3ac-4168-a33c-0f29ad342f60" containerID="2f9349dc89699a650a3a26d019021f394568684a51da796cb0343c7a21d1d6e0" exitCode=143 Jan 20 17:56:00 crc kubenswrapper[4558]: I0120 17:56:00.898634 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"5881b3da-d3ac-4168-a33c-0f29ad342f60","Type":"ContainerDied","Data":"2f9349dc89699a650a3a26d019021f394568684a51da796cb0343c7a21d1d6e0"} Jan 20 17:56:00 crc kubenswrapper[4558]: I0120 17:56:00.905070 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-5191-account-create-update-t744c" event={"ID":"492f23ec-9869-4bd8-a6ac-b0643d668a3b","Type":"ContainerDied","Data":"b58216bcab0202fe2d43788b897dfb603e8e405812855f0b5c2bce0249d2223f"} Jan 20 17:56:00 crc kubenswrapper[4558]: I0120 17:56:00.905108 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b58216bcab0202fe2d43788b897dfb603e8e405812855f0b5c2bce0249d2223f" Jan 20 17:56:00 crc kubenswrapper[4558]: I0120 17:56:00.905184 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-5191-account-create-update-t744c" Jan 20 17:56:00 crc kubenswrapper[4558]: I0120 17:56:00.912241 4558 generic.go:334] "Generic (PLEG): container finished" podID="b419ccfb-cd20-4b72-8f3a-48ce9ea2c991" containerID="c46f352f9c1ee65a4e358f6c0f82857abddecd43650369939a56f8dd36174cbc" exitCode=143 Jan 20 17:56:00 crc kubenswrapper[4558]: I0120 17:56:00.912291 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"b419ccfb-cd20-4b72-8f3a-48ce9ea2c991","Type":"ContainerDied","Data":"c46f352f9c1ee65a4e358f6c0f82857abddecd43650369939a56f8dd36174cbc"} Jan 20 17:56:00 crc kubenswrapper[4558]: I0120 17:56:00.929045 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ceilometer-0" podStartSLOduration=2.647017188 podStartE2EDuration="6.929011837s" podCreationTimestamp="2026-01-20 17:55:54 +0000 UTC" firstStartedPulling="2026-01-20 17:55:55.686077032 +0000 UTC m=+4449.446414999" lastFinishedPulling="2026-01-20 17:55:59.968071682 +0000 UTC m=+4453.728409648" observedRunningTime="2026-01-20 17:56:00.922990318 +0000 UTC m=+4454.683328285" watchObservedRunningTime="2026-01-20 17:56:00.929011837 +0000 UTC m=+4454.689349824" Jan 20 17:56:01 crc kubenswrapper[4558]: I0120 17:56:01.564546 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-db-sync-rznhk"] Jan 20 17:56:01 crc kubenswrapper[4558]: E0120 17:56:01.564940 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1e64325d-9808-497f-8889-6658129c5da8" containerName="mariadb-database-create" Jan 20 17:56:01 crc kubenswrapper[4558]: I0120 17:56:01.564958 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="1e64325d-9808-497f-8889-6658129c5da8" containerName="mariadb-database-create" Jan 20 17:56:01 crc kubenswrapper[4558]: E0120 17:56:01.564985 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="93caafe5-244a-422b-9fd2-7c02979af792" containerName="mariadb-account-create-update" Jan 20 17:56:01 crc kubenswrapper[4558]: I0120 17:56:01.564991 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="93caafe5-244a-422b-9fd2-7c02979af792" containerName="mariadb-account-create-update" Jan 20 17:56:01 crc kubenswrapper[4558]: E0120 17:56:01.565003 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="78a5052e-9643-4fe6-b908-b6e36974350a" containerName="mariadb-database-create" Jan 20 17:56:01 crc kubenswrapper[4558]: I0120 17:56:01.565009 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="78a5052e-9643-4fe6-b908-b6e36974350a" containerName="mariadb-database-create" Jan 20 17:56:01 crc kubenswrapper[4558]: E0120 17:56:01.565023 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="492f23ec-9869-4bd8-a6ac-b0643d668a3b" containerName="mariadb-account-create-update" Jan 20 17:56:01 crc kubenswrapper[4558]: I0120 17:56:01.565029 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="492f23ec-9869-4bd8-a6ac-b0643d668a3b" containerName="mariadb-account-create-update" Jan 20 17:56:01 crc kubenswrapper[4558]: E0120 17:56:01.565050 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b637458a-f8f6-456e-888f-3d61734be420" containerName="neutron-httpd" Jan 20 17:56:01 crc kubenswrapper[4558]: I0120 17:56:01.565056 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="b637458a-f8f6-456e-888f-3d61734be420" containerName="neutron-httpd" Jan 20 17:56:01 crc kubenswrapper[4558]: E0120 17:56:01.565071 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29fef75f-d977-4359-bb82-7fe5807625cc" containerName="mariadb-database-create" Jan 20 17:56:01 crc kubenswrapper[4558]: I0120 17:56:01.565078 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="29fef75f-d977-4359-bb82-7fe5807625cc" containerName="mariadb-database-create" Jan 20 17:56:01 crc kubenswrapper[4558]: E0120 17:56:01.565087 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b637458a-f8f6-456e-888f-3d61734be420" containerName="neutron-api" Jan 20 17:56:01 crc kubenswrapper[4558]: I0120 17:56:01.565093 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="b637458a-f8f6-456e-888f-3d61734be420" containerName="neutron-api" Jan 20 17:56:01 crc kubenswrapper[4558]: E0120 17:56:01.565102 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a698db22-7d59-4bc1-94fd-9f7b86601bb6" containerName="mariadb-account-create-update" Jan 20 17:56:01 crc kubenswrapper[4558]: I0120 17:56:01.565108 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="a698db22-7d59-4bc1-94fd-9f7b86601bb6" containerName="mariadb-account-create-update" Jan 20 17:56:01 crc kubenswrapper[4558]: I0120 17:56:01.565309 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="29fef75f-d977-4359-bb82-7fe5807625cc" containerName="mariadb-database-create" Jan 20 17:56:01 crc kubenswrapper[4558]: I0120 17:56:01.565324 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="78a5052e-9643-4fe6-b908-b6e36974350a" containerName="mariadb-database-create" Jan 20 17:56:01 crc kubenswrapper[4558]: I0120 17:56:01.565332 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="a698db22-7d59-4bc1-94fd-9f7b86601bb6" containerName="mariadb-account-create-update" Jan 20 17:56:01 crc kubenswrapper[4558]: I0120 17:56:01.565342 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="1e64325d-9808-497f-8889-6658129c5da8" containerName="mariadb-database-create" Jan 20 17:56:01 crc kubenswrapper[4558]: I0120 17:56:01.565350 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="93caafe5-244a-422b-9fd2-7c02979af792" containerName="mariadb-account-create-update" Jan 20 17:56:01 crc kubenswrapper[4558]: I0120 17:56:01.565359 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="b637458a-f8f6-456e-888f-3d61734be420" containerName="neutron-httpd" Jan 20 17:56:01 crc kubenswrapper[4558]: I0120 17:56:01.565368 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="492f23ec-9869-4bd8-a6ac-b0643d668a3b" containerName="mariadb-account-create-update" Jan 20 17:56:01 crc kubenswrapper[4558]: I0120 17:56:01.565380 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="b637458a-f8f6-456e-888f-3d61734be420" containerName="neutron-api" Jan 20 17:56:01 crc kubenswrapper[4558]: I0120 17:56:01.566960 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-rznhk" Jan 20 17:56:01 crc kubenswrapper[4558]: I0120 17:56:01.569764 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-nova-dockercfg-s7p6w" Jan 20 17:56:01 crc kubenswrapper[4558]: I0120 17:56:01.570050 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell0-conductor-config-data" Jan 20 17:56:01 crc kubenswrapper[4558]: I0120 17:56:01.570214 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell0-conductor-scripts" Jan 20 17:56:01 crc kubenswrapper[4558]: I0120 17:56:01.577218 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-db-sync-rznhk"] Jan 20 17:56:01 crc kubenswrapper[4558]: I0120 17:56:01.640646 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2b4nb\" (UniqueName: \"kubernetes.io/projected/e28cd4f4-bcf1-4a99-8161-5675a4dc6cb9-kube-api-access-2b4nb\") pod \"nova-cell0-conductor-db-sync-rznhk\" (UID: \"e28cd4f4-bcf1-4a99-8161-5675a4dc6cb9\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-rznhk" Jan 20 17:56:01 crc kubenswrapper[4558]: I0120 17:56:01.640702 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e28cd4f4-bcf1-4a99-8161-5675a4dc6cb9-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-rznhk\" (UID: \"e28cd4f4-bcf1-4a99-8161-5675a4dc6cb9\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-rznhk" Jan 20 17:56:01 crc kubenswrapper[4558]: I0120 17:56:01.640829 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e28cd4f4-bcf1-4a99-8161-5675a4dc6cb9-scripts\") pod \"nova-cell0-conductor-db-sync-rznhk\" (UID: \"e28cd4f4-bcf1-4a99-8161-5675a4dc6cb9\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-rznhk" Jan 20 17:56:01 crc kubenswrapper[4558]: I0120 17:56:01.640890 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e28cd4f4-bcf1-4a99-8161-5675a4dc6cb9-config-data\") pod \"nova-cell0-conductor-db-sync-rznhk\" (UID: \"e28cd4f4-bcf1-4a99-8161-5675a4dc6cb9\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-rznhk" Jan 20 17:56:01 crc kubenswrapper[4558]: I0120 17:56:01.655360 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:56:01 crc kubenswrapper[4558]: I0120 17:56:01.742851 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/65701817-3f49-4895-b777-25a5d42388ff-log-httpd\") pod \"65701817-3f49-4895-b777-25a5d42388ff\" (UID: \"65701817-3f49-4895-b777-25a5d42388ff\") " Jan 20 17:56:01 crc kubenswrapper[4558]: I0120 17:56:01.742936 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/65701817-3f49-4895-b777-25a5d42388ff-config-data\") pod \"65701817-3f49-4895-b777-25a5d42388ff\" (UID: \"65701817-3f49-4895-b777-25a5d42388ff\") " Jan 20 17:56:01 crc kubenswrapper[4558]: I0120 17:56:01.743036 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65701817-3f49-4895-b777-25a5d42388ff-combined-ca-bundle\") pod \"65701817-3f49-4895-b777-25a5d42388ff\" (UID: \"65701817-3f49-4895-b777-25a5d42388ff\") " Jan 20 17:56:01 crc kubenswrapper[4558]: I0120 17:56:01.743206 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/65701817-3f49-4895-b777-25a5d42388ff-run-httpd\") pod \"65701817-3f49-4895-b777-25a5d42388ff\" (UID: \"65701817-3f49-4895-b777-25a5d42388ff\") " Jan 20 17:56:01 crc kubenswrapper[4558]: I0120 17:56:01.743297 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/65701817-3f49-4895-b777-25a5d42388ff-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "65701817-3f49-4895-b777-25a5d42388ff" (UID: "65701817-3f49-4895-b777-25a5d42388ff"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:56:01 crc kubenswrapper[4558]: I0120 17:56:01.743321 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jvppg\" (UniqueName: \"kubernetes.io/projected/65701817-3f49-4895-b777-25a5d42388ff-kube-api-access-jvppg\") pod \"65701817-3f49-4895-b777-25a5d42388ff\" (UID: \"65701817-3f49-4895-b777-25a5d42388ff\") " Jan 20 17:56:01 crc kubenswrapper[4558]: I0120 17:56:01.743345 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/65701817-3f49-4895-b777-25a5d42388ff-scripts\") pod \"65701817-3f49-4895-b777-25a5d42388ff\" (UID: \"65701817-3f49-4895-b777-25a5d42388ff\") " Jan 20 17:56:01 crc kubenswrapper[4558]: I0120 17:56:01.743381 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/65701817-3f49-4895-b777-25a5d42388ff-sg-core-conf-yaml\") pod \"65701817-3f49-4895-b777-25a5d42388ff\" (UID: \"65701817-3f49-4895-b777-25a5d42388ff\") " Jan 20 17:56:01 crc kubenswrapper[4558]: I0120 17:56:01.743503 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/65701817-3f49-4895-b777-25a5d42388ff-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "65701817-3f49-4895-b777-25a5d42388ff" (UID: "65701817-3f49-4895-b777-25a5d42388ff"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:56:01 crc kubenswrapper[4558]: I0120 17:56:01.743687 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2b4nb\" (UniqueName: \"kubernetes.io/projected/e28cd4f4-bcf1-4a99-8161-5675a4dc6cb9-kube-api-access-2b4nb\") pod \"nova-cell0-conductor-db-sync-rznhk\" (UID: \"e28cd4f4-bcf1-4a99-8161-5675a4dc6cb9\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-rznhk" Jan 20 17:56:01 crc kubenswrapper[4558]: I0120 17:56:01.743720 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e28cd4f4-bcf1-4a99-8161-5675a4dc6cb9-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-rznhk\" (UID: \"e28cd4f4-bcf1-4a99-8161-5675a4dc6cb9\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-rznhk" Jan 20 17:56:01 crc kubenswrapper[4558]: I0120 17:56:01.743795 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e28cd4f4-bcf1-4a99-8161-5675a4dc6cb9-scripts\") pod \"nova-cell0-conductor-db-sync-rznhk\" (UID: \"e28cd4f4-bcf1-4a99-8161-5675a4dc6cb9\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-rznhk" Jan 20 17:56:01 crc kubenswrapper[4558]: I0120 17:56:01.743843 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e28cd4f4-bcf1-4a99-8161-5675a4dc6cb9-config-data\") pod \"nova-cell0-conductor-db-sync-rznhk\" (UID: \"e28cd4f4-bcf1-4a99-8161-5675a4dc6cb9\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-rznhk" Jan 20 17:56:01 crc kubenswrapper[4558]: I0120 17:56:01.743918 4558 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/65701817-3f49-4895-b777-25a5d42388ff-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:56:01 crc kubenswrapper[4558]: I0120 17:56:01.743928 4558 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/65701817-3f49-4895-b777-25a5d42388ff-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:56:01 crc kubenswrapper[4558]: I0120 17:56:01.749919 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/65701817-3f49-4895-b777-25a5d42388ff-kube-api-access-jvppg" (OuterVolumeSpecName: "kube-api-access-jvppg") pod "65701817-3f49-4895-b777-25a5d42388ff" (UID: "65701817-3f49-4895-b777-25a5d42388ff"). InnerVolumeSpecName "kube-api-access-jvppg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:56:01 crc kubenswrapper[4558]: I0120 17:56:01.751308 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e28cd4f4-bcf1-4a99-8161-5675a4dc6cb9-config-data\") pod \"nova-cell0-conductor-db-sync-rznhk\" (UID: \"e28cd4f4-bcf1-4a99-8161-5675a4dc6cb9\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-rznhk" Jan 20 17:56:01 crc kubenswrapper[4558]: I0120 17:56:01.751646 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e28cd4f4-bcf1-4a99-8161-5675a4dc6cb9-scripts\") pod \"nova-cell0-conductor-db-sync-rznhk\" (UID: \"e28cd4f4-bcf1-4a99-8161-5675a4dc6cb9\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-rznhk" Jan 20 17:56:01 crc kubenswrapper[4558]: I0120 17:56:01.754669 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e28cd4f4-bcf1-4a99-8161-5675a4dc6cb9-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-rznhk\" (UID: \"e28cd4f4-bcf1-4a99-8161-5675a4dc6cb9\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-rznhk" Jan 20 17:56:01 crc kubenswrapper[4558]: I0120 17:56:01.759511 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2b4nb\" (UniqueName: \"kubernetes.io/projected/e28cd4f4-bcf1-4a99-8161-5675a4dc6cb9-kube-api-access-2b4nb\") pod \"nova-cell0-conductor-db-sync-rznhk\" (UID: \"e28cd4f4-bcf1-4a99-8161-5675a4dc6cb9\") " pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-rznhk" Jan 20 17:56:01 crc kubenswrapper[4558]: I0120 17:56:01.768953 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/65701817-3f49-4895-b777-25a5d42388ff-scripts" (OuterVolumeSpecName: "scripts") pod "65701817-3f49-4895-b777-25a5d42388ff" (UID: "65701817-3f49-4895-b777-25a5d42388ff"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:56:01 crc kubenswrapper[4558]: I0120 17:56:01.778251 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/65701817-3f49-4895-b777-25a5d42388ff-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "65701817-3f49-4895-b777-25a5d42388ff" (UID: "65701817-3f49-4895-b777-25a5d42388ff"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:56:01 crc kubenswrapper[4558]: I0120 17:56:01.834736 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/65701817-3f49-4895-b777-25a5d42388ff-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "65701817-3f49-4895-b777-25a5d42388ff" (UID: "65701817-3f49-4895-b777-25a5d42388ff"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:56:01 crc kubenswrapper[4558]: I0120 17:56:01.845522 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65701817-3f49-4895-b777-25a5d42388ff-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:56:01 crc kubenswrapper[4558]: I0120 17:56:01.845564 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jvppg\" (UniqueName: \"kubernetes.io/projected/65701817-3f49-4895-b777-25a5d42388ff-kube-api-access-jvppg\") on node \"crc\" DevicePath \"\"" Jan 20 17:56:01 crc kubenswrapper[4558]: I0120 17:56:01.845581 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/65701817-3f49-4895-b777-25a5d42388ff-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:56:01 crc kubenswrapper[4558]: I0120 17:56:01.845592 4558 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/65701817-3f49-4895-b777-25a5d42388ff-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 20 17:56:01 crc kubenswrapper[4558]: I0120 17:56:01.846655 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/65701817-3f49-4895-b777-25a5d42388ff-config-data" (OuterVolumeSpecName: "config-data") pod "65701817-3f49-4895-b777-25a5d42388ff" (UID: "65701817-3f49-4895-b777-25a5d42388ff"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:56:01 crc kubenswrapper[4558]: I0120 17:56:01.926084 4558 generic.go:334] "Generic (PLEG): container finished" podID="65701817-3f49-4895-b777-25a5d42388ff" containerID="efe31811d6c0482abf08ad08b68099f980ce0921f634d8890a69090b6ec6849d" exitCode=0 Jan 20 17:56:01 crc kubenswrapper[4558]: I0120 17:56:01.926125 4558 generic.go:334] "Generic (PLEG): container finished" podID="65701817-3f49-4895-b777-25a5d42388ff" containerID="5bc176a4081eeddb9d4ae0d67c0245828dde3fbffc57b044c3bb1d1c4a7460cd" exitCode=2 Jan 20 17:56:01 crc kubenswrapper[4558]: I0120 17:56:01.926134 4558 generic.go:334] "Generic (PLEG): container finished" podID="65701817-3f49-4895-b777-25a5d42388ff" containerID="5fb72403cd6e4ff4fee6b73a71bd84f306595019c29f91d4dbf146e9c2b679e4" exitCode=0 Jan 20 17:56:01 crc kubenswrapper[4558]: I0120 17:56:01.926142 4558 generic.go:334] "Generic (PLEG): container finished" podID="65701817-3f49-4895-b777-25a5d42388ff" containerID="0ea9d4cc95d283ecb2592497db81efa86f8ba9c94d80f001fd60080c03240620" exitCode=0 Jan 20 17:56:01 crc kubenswrapper[4558]: I0120 17:56:01.926195 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"65701817-3f49-4895-b777-25a5d42388ff","Type":"ContainerDied","Data":"efe31811d6c0482abf08ad08b68099f980ce0921f634d8890a69090b6ec6849d"} Jan 20 17:56:01 crc kubenswrapper[4558]: I0120 17:56:01.926243 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"65701817-3f49-4895-b777-25a5d42388ff","Type":"ContainerDied","Data":"5bc176a4081eeddb9d4ae0d67c0245828dde3fbffc57b044c3bb1d1c4a7460cd"} Jan 20 17:56:01 crc kubenswrapper[4558]: I0120 17:56:01.926273 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"65701817-3f49-4895-b777-25a5d42388ff","Type":"ContainerDied","Data":"5fb72403cd6e4ff4fee6b73a71bd84f306595019c29f91d4dbf146e9c2b679e4"} Jan 20 17:56:01 crc kubenswrapper[4558]: I0120 17:56:01.926285 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"65701817-3f49-4895-b777-25a5d42388ff","Type":"ContainerDied","Data":"0ea9d4cc95d283ecb2592497db81efa86f8ba9c94d80f001fd60080c03240620"} Jan 20 17:56:01 crc kubenswrapper[4558]: I0120 17:56:01.926298 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"65701817-3f49-4895-b777-25a5d42388ff","Type":"ContainerDied","Data":"6d0521261f48b9734b884b677ce6d163da3bbb746819aa4a91b54b7430c0a2da"} Jan 20 17:56:01 crc kubenswrapper[4558]: I0120 17:56:01.926320 4558 scope.go:117] "RemoveContainer" containerID="efe31811d6c0482abf08ad08b68099f980ce0921f634d8890a69090b6ec6849d" Jan 20 17:56:01 crc kubenswrapper[4558]: I0120 17:56:01.926545 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:56:01 crc kubenswrapper[4558]: I0120 17:56:01.959890 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-rznhk" Jan 20 17:56:01 crc kubenswrapper[4558]: I0120 17:56:01.961199 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/65701817-3f49-4895-b777-25a5d42388ff-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:56:01 crc kubenswrapper[4558]: I0120 17:56:01.962552 4558 scope.go:117] "RemoveContainer" containerID="5bc176a4081eeddb9d4ae0d67c0245828dde3fbffc57b044c3bb1d1c4a7460cd" Jan 20 17:56:01 crc kubenswrapper[4558]: I0120 17:56:01.964336 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:56:02 crc kubenswrapper[4558]: I0120 17:56:02.017560 4558 scope.go:117] "RemoveContainer" containerID="5fb72403cd6e4ff4fee6b73a71bd84f306595019c29f91d4dbf146e9c2b679e4" Jan 20 17:56:02 crc kubenswrapper[4558]: I0120 17:56:02.018038 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:56:02 crc kubenswrapper[4558]: I0120 17:56:02.036674 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:56:02 crc kubenswrapper[4558]: E0120 17:56:02.037402 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="65701817-3f49-4895-b777-25a5d42388ff" containerName="ceilometer-notification-agent" Jan 20 17:56:02 crc kubenswrapper[4558]: I0120 17:56:02.037418 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="65701817-3f49-4895-b777-25a5d42388ff" containerName="ceilometer-notification-agent" Jan 20 17:56:02 crc kubenswrapper[4558]: E0120 17:56:02.037426 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="65701817-3f49-4895-b777-25a5d42388ff" containerName="sg-core" Jan 20 17:56:02 crc kubenswrapper[4558]: I0120 17:56:02.037432 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="65701817-3f49-4895-b777-25a5d42388ff" containerName="sg-core" Jan 20 17:56:02 crc kubenswrapper[4558]: E0120 17:56:02.037450 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="65701817-3f49-4895-b777-25a5d42388ff" containerName="proxy-httpd" Jan 20 17:56:02 crc kubenswrapper[4558]: I0120 17:56:02.037456 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="65701817-3f49-4895-b777-25a5d42388ff" containerName="proxy-httpd" Jan 20 17:56:02 crc kubenswrapper[4558]: E0120 17:56:02.037467 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="65701817-3f49-4895-b777-25a5d42388ff" containerName="ceilometer-central-agent" Jan 20 17:56:02 crc kubenswrapper[4558]: I0120 17:56:02.037472 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="65701817-3f49-4895-b777-25a5d42388ff" containerName="ceilometer-central-agent" Jan 20 17:56:02 crc kubenswrapper[4558]: I0120 17:56:02.037646 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="65701817-3f49-4895-b777-25a5d42388ff" containerName="ceilometer-central-agent" Jan 20 17:56:02 crc kubenswrapper[4558]: I0120 17:56:02.037664 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="65701817-3f49-4895-b777-25a5d42388ff" containerName="sg-core" Jan 20 17:56:02 crc kubenswrapper[4558]: I0120 17:56:02.037672 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="65701817-3f49-4895-b777-25a5d42388ff" containerName="proxy-httpd" Jan 20 17:56:02 crc kubenswrapper[4558]: I0120 17:56:02.037688 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="65701817-3f49-4895-b777-25a5d42388ff" containerName="ceilometer-notification-agent" Jan 20 17:56:02 crc kubenswrapper[4558]: I0120 17:56:02.040517 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:56:02 crc kubenswrapper[4558]: I0120 17:56:02.042436 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-scripts" Jan 20 17:56:02 crc kubenswrapper[4558]: I0120 17:56:02.044887 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-config-data" Jan 20 17:56:02 crc kubenswrapper[4558]: I0120 17:56:02.044913 4558 scope.go:117] "RemoveContainer" containerID="0ea9d4cc95d283ecb2592497db81efa86f8ba9c94d80f001fd60080c03240620" Jan 20 17:56:02 crc kubenswrapper[4558]: I0120 17:56:02.050807 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:56:02 crc kubenswrapper[4558]: I0120 17:56:02.063967 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bqjws\" (UniqueName: \"kubernetes.io/projected/88926ba7-3b09-4f2d-b021-562d79c8f122-kube-api-access-bqjws\") pod \"ceilometer-0\" (UID: \"88926ba7-3b09-4f2d-b021-562d79c8f122\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:56:02 crc kubenswrapper[4558]: I0120 17:56:02.064013 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/88926ba7-3b09-4f2d-b021-562d79c8f122-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"88926ba7-3b09-4f2d-b021-562d79c8f122\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:56:02 crc kubenswrapper[4558]: I0120 17:56:02.064068 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/88926ba7-3b09-4f2d-b021-562d79c8f122-scripts\") pod \"ceilometer-0\" (UID: \"88926ba7-3b09-4f2d-b021-562d79c8f122\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:56:02 crc kubenswrapper[4558]: I0120 17:56:02.064136 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/88926ba7-3b09-4f2d-b021-562d79c8f122-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"88926ba7-3b09-4f2d-b021-562d79c8f122\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:56:02 crc kubenswrapper[4558]: I0120 17:56:02.064201 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/88926ba7-3b09-4f2d-b021-562d79c8f122-config-data\") pod \"ceilometer-0\" (UID: \"88926ba7-3b09-4f2d-b021-562d79c8f122\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:56:02 crc kubenswrapper[4558]: I0120 17:56:02.064219 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/88926ba7-3b09-4f2d-b021-562d79c8f122-run-httpd\") pod \"ceilometer-0\" (UID: \"88926ba7-3b09-4f2d-b021-562d79c8f122\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:56:02 crc kubenswrapper[4558]: I0120 17:56:02.064280 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/88926ba7-3b09-4f2d-b021-562d79c8f122-log-httpd\") pod \"ceilometer-0\" (UID: \"88926ba7-3b09-4f2d-b021-562d79c8f122\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:56:02 crc kubenswrapper[4558]: I0120 17:56:02.071270 4558 scope.go:117] "RemoveContainer" containerID="efe31811d6c0482abf08ad08b68099f980ce0921f634d8890a69090b6ec6849d" Jan 20 17:56:02 crc kubenswrapper[4558]: E0120 17:56:02.071969 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"efe31811d6c0482abf08ad08b68099f980ce0921f634d8890a69090b6ec6849d\": container with ID starting with efe31811d6c0482abf08ad08b68099f980ce0921f634d8890a69090b6ec6849d not found: ID does not exist" containerID="efe31811d6c0482abf08ad08b68099f980ce0921f634d8890a69090b6ec6849d" Jan 20 17:56:02 crc kubenswrapper[4558]: I0120 17:56:02.072006 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"efe31811d6c0482abf08ad08b68099f980ce0921f634d8890a69090b6ec6849d"} err="failed to get container status \"efe31811d6c0482abf08ad08b68099f980ce0921f634d8890a69090b6ec6849d\": rpc error: code = NotFound desc = could not find container \"efe31811d6c0482abf08ad08b68099f980ce0921f634d8890a69090b6ec6849d\": container with ID starting with efe31811d6c0482abf08ad08b68099f980ce0921f634d8890a69090b6ec6849d not found: ID does not exist" Jan 20 17:56:02 crc kubenswrapper[4558]: I0120 17:56:02.072041 4558 scope.go:117] "RemoveContainer" containerID="5bc176a4081eeddb9d4ae0d67c0245828dde3fbffc57b044c3bb1d1c4a7460cd" Jan 20 17:56:02 crc kubenswrapper[4558]: E0120 17:56:02.072387 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5bc176a4081eeddb9d4ae0d67c0245828dde3fbffc57b044c3bb1d1c4a7460cd\": container with ID starting with 5bc176a4081eeddb9d4ae0d67c0245828dde3fbffc57b044c3bb1d1c4a7460cd not found: ID does not exist" containerID="5bc176a4081eeddb9d4ae0d67c0245828dde3fbffc57b044c3bb1d1c4a7460cd" Jan 20 17:56:02 crc kubenswrapper[4558]: I0120 17:56:02.072522 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5bc176a4081eeddb9d4ae0d67c0245828dde3fbffc57b044c3bb1d1c4a7460cd"} err="failed to get container status \"5bc176a4081eeddb9d4ae0d67c0245828dde3fbffc57b044c3bb1d1c4a7460cd\": rpc error: code = NotFound desc = could not find container \"5bc176a4081eeddb9d4ae0d67c0245828dde3fbffc57b044c3bb1d1c4a7460cd\": container with ID starting with 5bc176a4081eeddb9d4ae0d67c0245828dde3fbffc57b044c3bb1d1c4a7460cd not found: ID does not exist" Jan 20 17:56:02 crc kubenswrapper[4558]: I0120 17:56:02.072595 4558 scope.go:117] "RemoveContainer" containerID="5fb72403cd6e4ff4fee6b73a71bd84f306595019c29f91d4dbf146e9c2b679e4" Jan 20 17:56:02 crc kubenswrapper[4558]: E0120 17:56:02.073438 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5fb72403cd6e4ff4fee6b73a71bd84f306595019c29f91d4dbf146e9c2b679e4\": container with ID starting with 5fb72403cd6e4ff4fee6b73a71bd84f306595019c29f91d4dbf146e9c2b679e4 not found: ID does not exist" containerID="5fb72403cd6e4ff4fee6b73a71bd84f306595019c29f91d4dbf146e9c2b679e4" Jan 20 17:56:02 crc kubenswrapper[4558]: I0120 17:56:02.073475 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5fb72403cd6e4ff4fee6b73a71bd84f306595019c29f91d4dbf146e9c2b679e4"} err="failed to get container status \"5fb72403cd6e4ff4fee6b73a71bd84f306595019c29f91d4dbf146e9c2b679e4\": rpc error: code = NotFound desc = could not find container \"5fb72403cd6e4ff4fee6b73a71bd84f306595019c29f91d4dbf146e9c2b679e4\": container with ID starting with 5fb72403cd6e4ff4fee6b73a71bd84f306595019c29f91d4dbf146e9c2b679e4 not found: ID does not exist" Jan 20 17:56:02 crc kubenswrapper[4558]: I0120 17:56:02.073501 4558 scope.go:117] "RemoveContainer" containerID="0ea9d4cc95d283ecb2592497db81efa86f8ba9c94d80f001fd60080c03240620" Jan 20 17:56:02 crc kubenswrapper[4558]: E0120 17:56:02.073766 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0ea9d4cc95d283ecb2592497db81efa86f8ba9c94d80f001fd60080c03240620\": container with ID starting with 0ea9d4cc95d283ecb2592497db81efa86f8ba9c94d80f001fd60080c03240620 not found: ID does not exist" containerID="0ea9d4cc95d283ecb2592497db81efa86f8ba9c94d80f001fd60080c03240620" Jan 20 17:56:02 crc kubenswrapper[4558]: I0120 17:56:02.073792 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0ea9d4cc95d283ecb2592497db81efa86f8ba9c94d80f001fd60080c03240620"} err="failed to get container status \"0ea9d4cc95d283ecb2592497db81efa86f8ba9c94d80f001fd60080c03240620\": rpc error: code = NotFound desc = could not find container \"0ea9d4cc95d283ecb2592497db81efa86f8ba9c94d80f001fd60080c03240620\": container with ID starting with 0ea9d4cc95d283ecb2592497db81efa86f8ba9c94d80f001fd60080c03240620 not found: ID does not exist" Jan 20 17:56:02 crc kubenswrapper[4558]: I0120 17:56:02.073807 4558 scope.go:117] "RemoveContainer" containerID="efe31811d6c0482abf08ad08b68099f980ce0921f634d8890a69090b6ec6849d" Jan 20 17:56:02 crc kubenswrapper[4558]: I0120 17:56:02.074047 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"efe31811d6c0482abf08ad08b68099f980ce0921f634d8890a69090b6ec6849d"} err="failed to get container status \"efe31811d6c0482abf08ad08b68099f980ce0921f634d8890a69090b6ec6849d\": rpc error: code = NotFound desc = could not find container \"efe31811d6c0482abf08ad08b68099f980ce0921f634d8890a69090b6ec6849d\": container with ID starting with efe31811d6c0482abf08ad08b68099f980ce0921f634d8890a69090b6ec6849d not found: ID does not exist" Jan 20 17:56:02 crc kubenswrapper[4558]: I0120 17:56:02.074062 4558 scope.go:117] "RemoveContainer" containerID="5bc176a4081eeddb9d4ae0d67c0245828dde3fbffc57b044c3bb1d1c4a7460cd" Jan 20 17:56:02 crc kubenswrapper[4558]: I0120 17:56:02.074506 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5bc176a4081eeddb9d4ae0d67c0245828dde3fbffc57b044c3bb1d1c4a7460cd"} err="failed to get container status \"5bc176a4081eeddb9d4ae0d67c0245828dde3fbffc57b044c3bb1d1c4a7460cd\": rpc error: code = NotFound desc = could not find container \"5bc176a4081eeddb9d4ae0d67c0245828dde3fbffc57b044c3bb1d1c4a7460cd\": container with ID starting with 5bc176a4081eeddb9d4ae0d67c0245828dde3fbffc57b044c3bb1d1c4a7460cd not found: ID does not exist" Jan 20 17:56:02 crc kubenswrapper[4558]: I0120 17:56:02.074527 4558 scope.go:117] "RemoveContainer" containerID="5fb72403cd6e4ff4fee6b73a71bd84f306595019c29f91d4dbf146e9c2b679e4" Jan 20 17:56:02 crc kubenswrapper[4558]: I0120 17:56:02.074754 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5fb72403cd6e4ff4fee6b73a71bd84f306595019c29f91d4dbf146e9c2b679e4"} err="failed to get container status \"5fb72403cd6e4ff4fee6b73a71bd84f306595019c29f91d4dbf146e9c2b679e4\": rpc error: code = NotFound desc = could not find container \"5fb72403cd6e4ff4fee6b73a71bd84f306595019c29f91d4dbf146e9c2b679e4\": container with ID starting with 5fb72403cd6e4ff4fee6b73a71bd84f306595019c29f91d4dbf146e9c2b679e4 not found: ID does not exist" Jan 20 17:56:02 crc kubenswrapper[4558]: I0120 17:56:02.074773 4558 scope.go:117] "RemoveContainer" containerID="0ea9d4cc95d283ecb2592497db81efa86f8ba9c94d80f001fd60080c03240620" Jan 20 17:56:02 crc kubenswrapper[4558]: I0120 17:56:02.075313 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0ea9d4cc95d283ecb2592497db81efa86f8ba9c94d80f001fd60080c03240620"} err="failed to get container status \"0ea9d4cc95d283ecb2592497db81efa86f8ba9c94d80f001fd60080c03240620\": rpc error: code = NotFound desc = could not find container \"0ea9d4cc95d283ecb2592497db81efa86f8ba9c94d80f001fd60080c03240620\": container with ID starting with 0ea9d4cc95d283ecb2592497db81efa86f8ba9c94d80f001fd60080c03240620 not found: ID does not exist" Jan 20 17:56:02 crc kubenswrapper[4558]: I0120 17:56:02.075332 4558 scope.go:117] "RemoveContainer" containerID="efe31811d6c0482abf08ad08b68099f980ce0921f634d8890a69090b6ec6849d" Jan 20 17:56:02 crc kubenswrapper[4558]: I0120 17:56:02.075557 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"efe31811d6c0482abf08ad08b68099f980ce0921f634d8890a69090b6ec6849d"} err="failed to get container status \"efe31811d6c0482abf08ad08b68099f980ce0921f634d8890a69090b6ec6849d\": rpc error: code = NotFound desc = could not find container \"efe31811d6c0482abf08ad08b68099f980ce0921f634d8890a69090b6ec6849d\": container with ID starting with efe31811d6c0482abf08ad08b68099f980ce0921f634d8890a69090b6ec6849d not found: ID does not exist" Jan 20 17:56:02 crc kubenswrapper[4558]: I0120 17:56:02.075575 4558 scope.go:117] "RemoveContainer" containerID="5bc176a4081eeddb9d4ae0d67c0245828dde3fbffc57b044c3bb1d1c4a7460cd" Jan 20 17:56:02 crc kubenswrapper[4558]: I0120 17:56:02.075992 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5bc176a4081eeddb9d4ae0d67c0245828dde3fbffc57b044c3bb1d1c4a7460cd"} err="failed to get container status \"5bc176a4081eeddb9d4ae0d67c0245828dde3fbffc57b044c3bb1d1c4a7460cd\": rpc error: code = NotFound desc = could not find container \"5bc176a4081eeddb9d4ae0d67c0245828dde3fbffc57b044c3bb1d1c4a7460cd\": container with ID starting with 5bc176a4081eeddb9d4ae0d67c0245828dde3fbffc57b044c3bb1d1c4a7460cd not found: ID does not exist" Jan 20 17:56:02 crc kubenswrapper[4558]: I0120 17:56:02.076005 4558 scope.go:117] "RemoveContainer" containerID="5fb72403cd6e4ff4fee6b73a71bd84f306595019c29f91d4dbf146e9c2b679e4" Jan 20 17:56:02 crc kubenswrapper[4558]: I0120 17:56:02.076261 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5fb72403cd6e4ff4fee6b73a71bd84f306595019c29f91d4dbf146e9c2b679e4"} err="failed to get container status \"5fb72403cd6e4ff4fee6b73a71bd84f306595019c29f91d4dbf146e9c2b679e4\": rpc error: code = NotFound desc = could not find container \"5fb72403cd6e4ff4fee6b73a71bd84f306595019c29f91d4dbf146e9c2b679e4\": container with ID starting with 5fb72403cd6e4ff4fee6b73a71bd84f306595019c29f91d4dbf146e9c2b679e4 not found: ID does not exist" Jan 20 17:56:02 crc kubenswrapper[4558]: I0120 17:56:02.076274 4558 scope.go:117] "RemoveContainer" containerID="0ea9d4cc95d283ecb2592497db81efa86f8ba9c94d80f001fd60080c03240620" Jan 20 17:56:02 crc kubenswrapper[4558]: I0120 17:56:02.076433 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0ea9d4cc95d283ecb2592497db81efa86f8ba9c94d80f001fd60080c03240620"} err="failed to get container status \"0ea9d4cc95d283ecb2592497db81efa86f8ba9c94d80f001fd60080c03240620\": rpc error: code = NotFound desc = could not find container \"0ea9d4cc95d283ecb2592497db81efa86f8ba9c94d80f001fd60080c03240620\": container with ID starting with 0ea9d4cc95d283ecb2592497db81efa86f8ba9c94d80f001fd60080c03240620 not found: ID does not exist" Jan 20 17:56:02 crc kubenswrapper[4558]: I0120 17:56:02.076448 4558 scope.go:117] "RemoveContainer" containerID="efe31811d6c0482abf08ad08b68099f980ce0921f634d8890a69090b6ec6849d" Jan 20 17:56:02 crc kubenswrapper[4558]: I0120 17:56:02.076598 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"efe31811d6c0482abf08ad08b68099f980ce0921f634d8890a69090b6ec6849d"} err="failed to get container status \"efe31811d6c0482abf08ad08b68099f980ce0921f634d8890a69090b6ec6849d\": rpc error: code = NotFound desc = could not find container \"efe31811d6c0482abf08ad08b68099f980ce0921f634d8890a69090b6ec6849d\": container with ID starting with efe31811d6c0482abf08ad08b68099f980ce0921f634d8890a69090b6ec6849d not found: ID does not exist" Jan 20 17:56:02 crc kubenswrapper[4558]: I0120 17:56:02.076610 4558 scope.go:117] "RemoveContainer" containerID="5bc176a4081eeddb9d4ae0d67c0245828dde3fbffc57b044c3bb1d1c4a7460cd" Jan 20 17:56:02 crc kubenswrapper[4558]: I0120 17:56:02.076842 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5bc176a4081eeddb9d4ae0d67c0245828dde3fbffc57b044c3bb1d1c4a7460cd"} err="failed to get container status \"5bc176a4081eeddb9d4ae0d67c0245828dde3fbffc57b044c3bb1d1c4a7460cd\": rpc error: code = NotFound desc = could not find container \"5bc176a4081eeddb9d4ae0d67c0245828dde3fbffc57b044c3bb1d1c4a7460cd\": container with ID starting with 5bc176a4081eeddb9d4ae0d67c0245828dde3fbffc57b044c3bb1d1c4a7460cd not found: ID does not exist" Jan 20 17:56:02 crc kubenswrapper[4558]: I0120 17:56:02.076857 4558 scope.go:117] "RemoveContainer" containerID="5fb72403cd6e4ff4fee6b73a71bd84f306595019c29f91d4dbf146e9c2b679e4" Jan 20 17:56:02 crc kubenswrapper[4558]: I0120 17:56:02.077068 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5fb72403cd6e4ff4fee6b73a71bd84f306595019c29f91d4dbf146e9c2b679e4"} err="failed to get container status \"5fb72403cd6e4ff4fee6b73a71bd84f306595019c29f91d4dbf146e9c2b679e4\": rpc error: code = NotFound desc = could not find container \"5fb72403cd6e4ff4fee6b73a71bd84f306595019c29f91d4dbf146e9c2b679e4\": container with ID starting with 5fb72403cd6e4ff4fee6b73a71bd84f306595019c29f91d4dbf146e9c2b679e4 not found: ID does not exist" Jan 20 17:56:02 crc kubenswrapper[4558]: I0120 17:56:02.077082 4558 scope.go:117] "RemoveContainer" containerID="0ea9d4cc95d283ecb2592497db81efa86f8ba9c94d80f001fd60080c03240620" Jan 20 17:56:02 crc kubenswrapper[4558]: I0120 17:56:02.077276 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0ea9d4cc95d283ecb2592497db81efa86f8ba9c94d80f001fd60080c03240620"} err="failed to get container status \"0ea9d4cc95d283ecb2592497db81efa86f8ba9c94d80f001fd60080c03240620\": rpc error: code = NotFound desc = could not find container \"0ea9d4cc95d283ecb2592497db81efa86f8ba9c94d80f001fd60080c03240620\": container with ID starting with 0ea9d4cc95d283ecb2592497db81efa86f8ba9c94d80f001fd60080c03240620 not found: ID does not exist" Jan 20 17:56:02 crc kubenswrapper[4558]: I0120 17:56:02.165708 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/88926ba7-3b09-4f2d-b021-562d79c8f122-log-httpd\") pod \"ceilometer-0\" (UID: \"88926ba7-3b09-4f2d-b021-562d79c8f122\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:56:02 crc kubenswrapper[4558]: I0120 17:56:02.165765 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bqjws\" (UniqueName: \"kubernetes.io/projected/88926ba7-3b09-4f2d-b021-562d79c8f122-kube-api-access-bqjws\") pod \"ceilometer-0\" (UID: \"88926ba7-3b09-4f2d-b021-562d79c8f122\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:56:02 crc kubenswrapper[4558]: I0120 17:56:02.165793 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/88926ba7-3b09-4f2d-b021-562d79c8f122-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"88926ba7-3b09-4f2d-b021-562d79c8f122\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:56:02 crc kubenswrapper[4558]: I0120 17:56:02.165824 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/88926ba7-3b09-4f2d-b021-562d79c8f122-scripts\") pod \"ceilometer-0\" (UID: \"88926ba7-3b09-4f2d-b021-562d79c8f122\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:56:02 crc kubenswrapper[4558]: I0120 17:56:02.165874 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/88926ba7-3b09-4f2d-b021-562d79c8f122-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"88926ba7-3b09-4f2d-b021-562d79c8f122\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:56:02 crc kubenswrapper[4558]: I0120 17:56:02.165908 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/88926ba7-3b09-4f2d-b021-562d79c8f122-config-data\") pod \"ceilometer-0\" (UID: \"88926ba7-3b09-4f2d-b021-562d79c8f122\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:56:02 crc kubenswrapper[4558]: I0120 17:56:02.165922 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/88926ba7-3b09-4f2d-b021-562d79c8f122-run-httpd\") pod \"ceilometer-0\" (UID: \"88926ba7-3b09-4f2d-b021-562d79c8f122\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:56:02 crc kubenswrapper[4558]: I0120 17:56:02.166400 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/88926ba7-3b09-4f2d-b021-562d79c8f122-run-httpd\") pod \"ceilometer-0\" (UID: \"88926ba7-3b09-4f2d-b021-562d79c8f122\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:56:02 crc kubenswrapper[4558]: I0120 17:56:02.166626 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/88926ba7-3b09-4f2d-b021-562d79c8f122-log-httpd\") pod \"ceilometer-0\" (UID: \"88926ba7-3b09-4f2d-b021-562d79c8f122\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:56:02 crc kubenswrapper[4558]: I0120 17:56:02.171319 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/88926ba7-3b09-4f2d-b021-562d79c8f122-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"88926ba7-3b09-4f2d-b021-562d79c8f122\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:56:02 crc kubenswrapper[4558]: I0120 17:56:02.172558 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/88926ba7-3b09-4f2d-b021-562d79c8f122-scripts\") pod \"ceilometer-0\" (UID: \"88926ba7-3b09-4f2d-b021-562d79c8f122\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:56:02 crc kubenswrapper[4558]: I0120 17:56:02.174236 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/88926ba7-3b09-4f2d-b021-562d79c8f122-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"88926ba7-3b09-4f2d-b021-562d79c8f122\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:56:02 crc kubenswrapper[4558]: I0120 17:56:02.179150 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/88926ba7-3b09-4f2d-b021-562d79c8f122-config-data\") pod \"ceilometer-0\" (UID: \"88926ba7-3b09-4f2d-b021-562d79c8f122\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:56:02 crc kubenswrapper[4558]: I0120 17:56:02.189268 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bqjws\" (UniqueName: \"kubernetes.io/projected/88926ba7-3b09-4f2d-b021-562d79c8f122-kube-api-access-bqjws\") pod \"ceilometer-0\" (UID: \"88926ba7-3b09-4f2d-b021-562d79c8f122\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:56:02 crc kubenswrapper[4558]: I0120 17:56:02.342876 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/swift-proxy-77d4fcb95d-7jsvg" Jan 20 17:56:02 crc kubenswrapper[4558]: I0120 17:56:02.365527 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:56:02 crc kubenswrapper[4558]: I0120 17:56:02.423658 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-db-sync-rznhk"] Jan 20 17:56:02 crc kubenswrapper[4558]: I0120 17:56:02.584659 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="65701817-3f49-4895-b777-25a5d42388ff" path="/var/lib/kubelet/pods/65701817-3f49-4895-b777-25a5d42388ff/volumes" Jan 20 17:56:02 crc kubenswrapper[4558]: W0120 17:56:02.883783 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod88926ba7_3b09_4f2d_b021_562d79c8f122.slice/crio-db39d97cd3e22a99f43e1fa2dec229cb6e9cee7c897e55e35b083f79bd5cbaaa WatchSource:0}: Error finding container db39d97cd3e22a99f43e1fa2dec229cb6e9cee7c897e55e35b083f79bd5cbaaa: Status 404 returned error can't find the container with id db39d97cd3e22a99f43e1fa2dec229cb6e9cee7c897e55e35b083f79bd5cbaaa Jan 20 17:56:02 crc kubenswrapper[4558]: I0120 17:56:02.895588 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:56:02 crc kubenswrapper[4558]: I0120 17:56:02.943994 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-rznhk" event={"ID":"e28cd4f4-bcf1-4a99-8161-5675a4dc6cb9","Type":"ContainerStarted","Data":"861bd48be8d8402e91402ef59c9aafef2f48435302e1a32fdbaaea6e18815246"} Jan 20 17:56:02 crc kubenswrapper[4558]: I0120 17:56:02.944060 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-rznhk" event={"ID":"e28cd4f4-bcf1-4a99-8161-5675a4dc6cb9","Type":"ContainerStarted","Data":"939d093f5ce07d62440e6a74d0cb19f163bbbda2a269065b1736f58f2f09b70e"} Jan 20 17:56:02 crc kubenswrapper[4558]: I0120 17:56:02.948221 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"88926ba7-3b09-4f2d-b021-562d79c8f122","Type":"ContainerStarted","Data":"db39d97cd3e22a99f43e1fa2dec229cb6e9cee7c897e55e35b083f79bd5cbaaa"} Jan 20 17:56:03 crc kubenswrapper[4558]: I0120 17:56:03.228337 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:56:03 crc kubenswrapper[4558]: I0120 17:56:03.249086 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-rznhk" podStartSLOduration=2.24906468 podStartE2EDuration="2.24906468s" podCreationTimestamp="2026-01-20 17:56:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:56:02.963904483 +0000 UTC m=+4456.724242450" watchObservedRunningTime="2026-01-20 17:56:03.24906468 +0000 UTC m=+4457.009402647" Jan 20 17:56:03 crc kubenswrapper[4558]: I0120 17:56:03.393316 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"b419ccfb-cd20-4b72-8f3a-48ce9ea2c991\" (UID: \"b419ccfb-cd20-4b72-8f3a-48ce9ea2c991\") " Jan 20 17:56:03 crc kubenswrapper[4558]: I0120 17:56:03.393426 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b419ccfb-cd20-4b72-8f3a-48ce9ea2c991-scripts\") pod \"b419ccfb-cd20-4b72-8f3a-48ce9ea2c991\" (UID: \"b419ccfb-cd20-4b72-8f3a-48ce9ea2c991\") " Jan 20 17:56:03 crc kubenswrapper[4558]: I0120 17:56:03.393505 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b419ccfb-cd20-4b72-8f3a-48ce9ea2c991-config-data\") pod \"b419ccfb-cd20-4b72-8f3a-48ce9ea2c991\" (UID: \"b419ccfb-cd20-4b72-8f3a-48ce9ea2c991\") " Jan 20 17:56:03 crc kubenswrapper[4558]: I0120 17:56:03.393658 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b419ccfb-cd20-4b72-8f3a-48ce9ea2c991-combined-ca-bundle\") pod \"b419ccfb-cd20-4b72-8f3a-48ce9ea2c991\" (UID: \"b419ccfb-cd20-4b72-8f3a-48ce9ea2c991\") " Jan 20 17:56:03 crc kubenswrapper[4558]: I0120 17:56:03.393718 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b419ccfb-cd20-4b72-8f3a-48ce9ea2c991-logs\") pod \"b419ccfb-cd20-4b72-8f3a-48ce9ea2c991\" (UID: \"b419ccfb-cd20-4b72-8f3a-48ce9ea2c991\") " Jan 20 17:56:03 crc kubenswrapper[4558]: I0120 17:56:03.394024 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b419ccfb-cd20-4b72-8f3a-48ce9ea2c991-httpd-run\") pod \"b419ccfb-cd20-4b72-8f3a-48ce9ea2c991\" (UID: \"b419ccfb-cd20-4b72-8f3a-48ce9ea2c991\") " Jan 20 17:56:03 crc kubenswrapper[4558]: I0120 17:56:03.394120 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gdg4c\" (UniqueName: \"kubernetes.io/projected/b419ccfb-cd20-4b72-8f3a-48ce9ea2c991-kube-api-access-gdg4c\") pod \"b419ccfb-cd20-4b72-8f3a-48ce9ea2c991\" (UID: \"b419ccfb-cd20-4b72-8f3a-48ce9ea2c991\") " Jan 20 17:56:03 crc kubenswrapper[4558]: I0120 17:56:03.394252 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b419ccfb-cd20-4b72-8f3a-48ce9ea2c991-logs" (OuterVolumeSpecName: "logs") pod "b419ccfb-cd20-4b72-8f3a-48ce9ea2c991" (UID: "b419ccfb-cd20-4b72-8f3a-48ce9ea2c991"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:56:03 crc kubenswrapper[4558]: I0120 17:56:03.394364 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b419ccfb-cd20-4b72-8f3a-48ce9ea2c991-internal-tls-certs\") pod \"b419ccfb-cd20-4b72-8f3a-48ce9ea2c991\" (UID: \"b419ccfb-cd20-4b72-8f3a-48ce9ea2c991\") " Jan 20 17:56:03 crc kubenswrapper[4558]: I0120 17:56:03.394495 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b419ccfb-cd20-4b72-8f3a-48ce9ea2c991-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "b419ccfb-cd20-4b72-8f3a-48ce9ea2c991" (UID: "b419ccfb-cd20-4b72-8f3a-48ce9ea2c991"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:56:03 crc kubenswrapper[4558]: I0120 17:56:03.395460 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b419ccfb-cd20-4b72-8f3a-48ce9ea2c991-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:56:03 crc kubenswrapper[4558]: I0120 17:56:03.395502 4558 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b419ccfb-cd20-4b72-8f3a-48ce9ea2c991-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 20 17:56:03 crc kubenswrapper[4558]: I0120 17:56:03.400427 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b419ccfb-cd20-4b72-8f3a-48ce9ea2c991-kube-api-access-gdg4c" (OuterVolumeSpecName: "kube-api-access-gdg4c") pod "b419ccfb-cd20-4b72-8f3a-48ce9ea2c991" (UID: "b419ccfb-cd20-4b72-8f3a-48ce9ea2c991"). InnerVolumeSpecName "kube-api-access-gdg4c". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:56:03 crc kubenswrapper[4558]: I0120 17:56:03.400569 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b419ccfb-cd20-4b72-8f3a-48ce9ea2c991-scripts" (OuterVolumeSpecName: "scripts") pod "b419ccfb-cd20-4b72-8f3a-48ce9ea2c991" (UID: "b419ccfb-cd20-4b72-8f3a-48ce9ea2c991"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:56:03 crc kubenswrapper[4558]: I0120 17:56:03.402690 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage07-crc" (OuterVolumeSpecName: "glance") pod "b419ccfb-cd20-4b72-8f3a-48ce9ea2c991" (UID: "b419ccfb-cd20-4b72-8f3a-48ce9ea2c991"). InnerVolumeSpecName "local-storage07-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:56:03 crc kubenswrapper[4558]: I0120 17:56:03.424178 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b419ccfb-cd20-4b72-8f3a-48ce9ea2c991-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b419ccfb-cd20-4b72-8f3a-48ce9ea2c991" (UID: "b419ccfb-cd20-4b72-8f3a-48ce9ea2c991"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:56:03 crc kubenswrapper[4558]: I0120 17:56:03.447476 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b419ccfb-cd20-4b72-8f3a-48ce9ea2c991-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "b419ccfb-cd20-4b72-8f3a-48ce9ea2c991" (UID: "b419ccfb-cd20-4b72-8f3a-48ce9ea2c991"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:56:03 crc kubenswrapper[4558]: I0120 17:56:03.457052 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b419ccfb-cd20-4b72-8f3a-48ce9ea2c991-config-data" (OuterVolumeSpecName: "config-data") pod "b419ccfb-cd20-4b72-8f3a-48ce9ea2c991" (UID: "b419ccfb-cd20-4b72-8f3a-48ce9ea2c991"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:56:03 crc kubenswrapper[4558]: I0120 17:56:03.497820 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" " Jan 20 17:56:03 crc kubenswrapper[4558]: I0120 17:56:03.497861 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b419ccfb-cd20-4b72-8f3a-48ce9ea2c991-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:56:03 crc kubenswrapper[4558]: I0120 17:56:03.497873 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b419ccfb-cd20-4b72-8f3a-48ce9ea2c991-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:56:03 crc kubenswrapper[4558]: I0120 17:56:03.497892 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b419ccfb-cd20-4b72-8f3a-48ce9ea2c991-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:56:03 crc kubenswrapper[4558]: I0120 17:56:03.497909 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gdg4c\" (UniqueName: \"kubernetes.io/projected/b419ccfb-cd20-4b72-8f3a-48ce9ea2c991-kube-api-access-gdg4c\") on node \"crc\" DevicePath \"\"" Jan 20 17:56:03 crc kubenswrapper[4558]: I0120 17:56:03.497921 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b419ccfb-cd20-4b72-8f3a-48ce9ea2c991-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:56:03 crc kubenswrapper[4558]: I0120 17:56:03.514806 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage07-crc" (UniqueName: "kubernetes.io/local-volume/local-storage07-crc") on node "crc" Jan 20 17:56:03 crc kubenswrapper[4558]: I0120 17:56:03.599899 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:56:03 crc kubenswrapper[4558]: I0120 17:56:03.654142 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:56:03 crc kubenswrapper[4558]: I0120 17:56:03.969264 4558 generic.go:334] "Generic (PLEG): container finished" podID="b419ccfb-cd20-4b72-8f3a-48ce9ea2c991" containerID="06b7bcf43cb16007690126d14e7263d4b301518776b44ac910ada8799e3568d9" exitCode=0 Jan 20 17:56:03 crc kubenswrapper[4558]: I0120 17:56:03.969362 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:56:03 crc kubenswrapper[4558]: I0120 17:56:03.969379 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"b419ccfb-cd20-4b72-8f3a-48ce9ea2c991","Type":"ContainerDied","Data":"06b7bcf43cb16007690126d14e7263d4b301518776b44ac910ada8799e3568d9"} Jan 20 17:56:03 crc kubenswrapper[4558]: I0120 17:56:03.969874 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"b419ccfb-cd20-4b72-8f3a-48ce9ea2c991","Type":"ContainerDied","Data":"1598d0d352e09b5cbd5494defb385710e095d95cf101858e6ee7ee8d1487d114"} Jan 20 17:56:03 crc kubenswrapper[4558]: I0120 17:56:03.969899 4558 scope.go:117] "RemoveContainer" containerID="06b7bcf43cb16007690126d14e7263d4b301518776b44ac910ada8799e3568d9" Jan 20 17:56:03 crc kubenswrapper[4558]: I0120 17:56:03.982534 4558 generic.go:334] "Generic (PLEG): container finished" podID="5881b3da-d3ac-4168-a33c-0f29ad342f60" containerID="426c5470720e7ae7d002df3aeb9aab834c73d17509f061f38d2d4533eef638aa" exitCode=0 Jan 20 17:56:03 crc kubenswrapper[4558]: I0120 17:56:03.982619 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"5881b3da-d3ac-4168-a33c-0f29ad342f60","Type":"ContainerDied","Data":"426c5470720e7ae7d002df3aeb9aab834c73d17509f061f38d2d4533eef638aa"} Jan 20 17:56:03 crc kubenswrapper[4558]: I0120 17:56:03.994016 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"88926ba7-3b09-4f2d-b021-562d79c8f122","Type":"ContainerStarted","Data":"76d0657e5ac88a9f86d16aa2cf66e4f05523e093af77132e101ac0848cd7d94d"} Jan 20 17:56:04 crc kubenswrapper[4558]: I0120 17:56:04.017016 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:56:04 crc kubenswrapper[4558]: I0120 17:56:04.021703 4558 scope.go:117] "RemoveContainer" containerID="c46f352f9c1ee65a4e358f6c0f82857abddecd43650369939a56f8dd36174cbc" Jan 20 17:56:04 crc kubenswrapper[4558]: I0120 17:56:04.025064 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:56:04 crc kubenswrapper[4558]: I0120 17:56:04.039903 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:56:04 crc kubenswrapper[4558]: E0120 17:56:04.040286 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b419ccfb-cd20-4b72-8f3a-48ce9ea2c991" containerName="glance-httpd" Jan 20 17:56:04 crc kubenswrapper[4558]: I0120 17:56:04.040301 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="b419ccfb-cd20-4b72-8f3a-48ce9ea2c991" containerName="glance-httpd" Jan 20 17:56:04 crc kubenswrapper[4558]: E0120 17:56:04.040330 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b419ccfb-cd20-4b72-8f3a-48ce9ea2c991" containerName="glance-log" Jan 20 17:56:04 crc kubenswrapper[4558]: I0120 17:56:04.040337 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="b419ccfb-cd20-4b72-8f3a-48ce9ea2c991" containerName="glance-log" Jan 20 17:56:04 crc kubenswrapper[4558]: I0120 17:56:04.040485 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="b419ccfb-cd20-4b72-8f3a-48ce9ea2c991" containerName="glance-httpd" Jan 20 17:56:04 crc kubenswrapper[4558]: I0120 17:56:04.040501 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="b419ccfb-cd20-4b72-8f3a-48ce9ea2c991" containerName="glance-log" Jan 20 17:56:04 crc kubenswrapper[4558]: I0120 17:56:04.047085 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:56:04 crc kubenswrapper[4558]: I0120 17:56:04.073918 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:56:04 crc kubenswrapper[4558]: I0120 17:56:04.077579 4558 scope.go:117] "RemoveContainer" containerID="06b7bcf43cb16007690126d14e7263d4b301518776b44ac910ada8799e3568d9" Jan 20 17:56:04 crc kubenswrapper[4558]: I0120 17:56:04.078509 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-default-internal-config-data" Jan 20 17:56:04 crc kubenswrapper[4558]: I0120 17:56:04.079569 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-glance-default-internal-svc" Jan 20 17:56:04 crc kubenswrapper[4558]: E0120 17:56:04.081540 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"06b7bcf43cb16007690126d14e7263d4b301518776b44ac910ada8799e3568d9\": container with ID starting with 06b7bcf43cb16007690126d14e7263d4b301518776b44ac910ada8799e3568d9 not found: ID does not exist" containerID="06b7bcf43cb16007690126d14e7263d4b301518776b44ac910ada8799e3568d9" Jan 20 17:56:04 crc kubenswrapper[4558]: I0120 17:56:04.084876 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"06b7bcf43cb16007690126d14e7263d4b301518776b44ac910ada8799e3568d9"} err="failed to get container status \"06b7bcf43cb16007690126d14e7263d4b301518776b44ac910ada8799e3568d9\": rpc error: code = NotFound desc = could not find container \"06b7bcf43cb16007690126d14e7263d4b301518776b44ac910ada8799e3568d9\": container with ID starting with 06b7bcf43cb16007690126d14e7263d4b301518776b44ac910ada8799e3568d9 not found: ID does not exist" Jan 20 17:56:04 crc kubenswrapper[4558]: I0120 17:56:04.084910 4558 scope.go:117] "RemoveContainer" containerID="c46f352f9c1ee65a4e358f6c0f82857abddecd43650369939a56f8dd36174cbc" Jan 20 17:56:04 crc kubenswrapper[4558]: E0120 17:56:04.088311 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c46f352f9c1ee65a4e358f6c0f82857abddecd43650369939a56f8dd36174cbc\": container with ID starting with c46f352f9c1ee65a4e358f6c0f82857abddecd43650369939a56f8dd36174cbc not found: ID does not exist" containerID="c46f352f9c1ee65a4e358f6c0f82857abddecd43650369939a56f8dd36174cbc" Jan 20 17:56:04 crc kubenswrapper[4558]: I0120 17:56:04.088354 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c46f352f9c1ee65a4e358f6c0f82857abddecd43650369939a56f8dd36174cbc"} err="failed to get container status \"c46f352f9c1ee65a4e358f6c0f82857abddecd43650369939a56f8dd36174cbc\": rpc error: code = NotFound desc = could not find container \"c46f352f9c1ee65a4e358f6c0f82857abddecd43650369939a56f8dd36174cbc\": container with ID starting with c46f352f9c1ee65a4e358f6c0f82857abddecd43650369939a56f8dd36174cbc not found: ID does not exist" Jan 20 17:56:04 crc kubenswrapper[4558]: I0120 17:56:04.165395 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:56:04 crc kubenswrapper[4558]: I0120 17:56:04.214962 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7dcf074e-8444-4bd2-b1f9-143390e96ef8-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"7dcf074e-8444-4bd2-b1f9-143390e96ef8\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:56:04 crc kubenswrapper[4558]: I0120 17:56:04.215060 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7dcf074e-8444-4bd2-b1f9-143390e96ef8-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"7dcf074e-8444-4bd2-b1f9-143390e96ef8\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:56:04 crc kubenswrapper[4558]: I0120 17:56:04.215213 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/7dcf074e-8444-4bd2-b1f9-143390e96ef8-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"7dcf074e-8444-4bd2-b1f9-143390e96ef8\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:56:04 crc kubenswrapper[4558]: I0120 17:56:04.215257 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8p8dr\" (UniqueName: \"kubernetes.io/projected/7dcf074e-8444-4bd2-b1f9-143390e96ef8-kube-api-access-8p8dr\") pod \"glance-default-internal-api-0\" (UID: \"7dcf074e-8444-4bd2-b1f9-143390e96ef8\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:56:04 crc kubenswrapper[4558]: I0120 17:56:04.215297 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"7dcf074e-8444-4bd2-b1f9-143390e96ef8\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:56:04 crc kubenswrapper[4558]: I0120 17:56:04.215327 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7dcf074e-8444-4bd2-b1f9-143390e96ef8-logs\") pod \"glance-default-internal-api-0\" (UID: \"7dcf074e-8444-4bd2-b1f9-143390e96ef8\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:56:04 crc kubenswrapper[4558]: I0120 17:56:04.215399 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7dcf074e-8444-4bd2-b1f9-143390e96ef8-scripts\") pod \"glance-default-internal-api-0\" (UID: \"7dcf074e-8444-4bd2-b1f9-143390e96ef8\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:56:04 crc kubenswrapper[4558]: I0120 17:56:04.215434 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7dcf074e-8444-4bd2-b1f9-143390e96ef8-config-data\") pod \"glance-default-internal-api-0\" (UID: \"7dcf074e-8444-4bd2-b1f9-143390e96ef8\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:56:04 crc kubenswrapper[4558]: I0120 17:56:04.316325 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5881b3da-d3ac-4168-a33c-0f29ad342f60-public-tls-certs\") pod \"5881b3da-d3ac-4168-a33c-0f29ad342f60\" (UID: \"5881b3da-d3ac-4168-a33c-0f29ad342f60\") " Jan 20 17:56:04 crc kubenswrapper[4558]: I0120 17:56:04.316646 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5881b3da-d3ac-4168-a33c-0f29ad342f60-config-data\") pod \"5881b3da-d3ac-4168-a33c-0f29ad342f60\" (UID: \"5881b3da-d3ac-4168-a33c-0f29ad342f60\") " Jan 20 17:56:04 crc kubenswrapper[4558]: I0120 17:56:04.316736 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5881b3da-d3ac-4168-a33c-0f29ad342f60-combined-ca-bundle\") pod \"5881b3da-d3ac-4168-a33c-0f29ad342f60\" (UID: \"5881b3da-d3ac-4168-a33c-0f29ad342f60\") " Jan 20 17:56:04 crc kubenswrapper[4558]: I0120 17:56:04.317109 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"5881b3da-d3ac-4168-a33c-0f29ad342f60\" (UID: \"5881b3da-d3ac-4168-a33c-0f29ad342f60\") " Jan 20 17:56:04 crc kubenswrapper[4558]: I0120 17:56:04.317201 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jwlv9\" (UniqueName: \"kubernetes.io/projected/5881b3da-d3ac-4168-a33c-0f29ad342f60-kube-api-access-jwlv9\") pod \"5881b3da-d3ac-4168-a33c-0f29ad342f60\" (UID: \"5881b3da-d3ac-4168-a33c-0f29ad342f60\") " Jan 20 17:56:04 crc kubenswrapper[4558]: I0120 17:56:04.317320 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5881b3da-d3ac-4168-a33c-0f29ad342f60-httpd-run\") pod \"5881b3da-d3ac-4168-a33c-0f29ad342f60\" (UID: \"5881b3da-d3ac-4168-a33c-0f29ad342f60\") " Jan 20 17:56:04 crc kubenswrapper[4558]: I0120 17:56:04.317398 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5881b3da-d3ac-4168-a33c-0f29ad342f60-scripts\") pod \"5881b3da-d3ac-4168-a33c-0f29ad342f60\" (UID: \"5881b3da-d3ac-4168-a33c-0f29ad342f60\") " Jan 20 17:56:04 crc kubenswrapper[4558]: I0120 17:56:04.317495 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5881b3da-d3ac-4168-a33c-0f29ad342f60-logs\") pod \"5881b3da-d3ac-4168-a33c-0f29ad342f60\" (UID: \"5881b3da-d3ac-4168-a33c-0f29ad342f60\") " Jan 20 17:56:04 crc kubenswrapper[4558]: I0120 17:56:04.318122 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"7dcf074e-8444-4bd2-b1f9-143390e96ef8\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:56:04 crc kubenswrapper[4558]: I0120 17:56:04.318396 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7dcf074e-8444-4bd2-b1f9-143390e96ef8-logs\") pod \"glance-default-internal-api-0\" (UID: \"7dcf074e-8444-4bd2-b1f9-143390e96ef8\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:56:04 crc kubenswrapper[4558]: I0120 17:56:04.318609 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"7dcf074e-8444-4bd2-b1f9-143390e96ef8\") device mount path \"/mnt/openstack/pv07\"" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:56:04 crc kubenswrapper[4558]: I0120 17:56:04.318815 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7dcf074e-8444-4bd2-b1f9-143390e96ef8-logs\") pod \"glance-default-internal-api-0\" (UID: \"7dcf074e-8444-4bd2-b1f9-143390e96ef8\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:56:04 crc kubenswrapper[4558]: I0120 17:56:04.319203 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7dcf074e-8444-4bd2-b1f9-143390e96ef8-scripts\") pod \"glance-default-internal-api-0\" (UID: \"7dcf074e-8444-4bd2-b1f9-143390e96ef8\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:56:04 crc kubenswrapper[4558]: I0120 17:56:04.319286 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7dcf074e-8444-4bd2-b1f9-143390e96ef8-config-data\") pod \"glance-default-internal-api-0\" (UID: \"7dcf074e-8444-4bd2-b1f9-143390e96ef8\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:56:04 crc kubenswrapper[4558]: I0120 17:56:04.319343 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7dcf074e-8444-4bd2-b1f9-143390e96ef8-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"7dcf074e-8444-4bd2-b1f9-143390e96ef8\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:56:04 crc kubenswrapper[4558]: I0120 17:56:04.319407 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7dcf074e-8444-4bd2-b1f9-143390e96ef8-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"7dcf074e-8444-4bd2-b1f9-143390e96ef8\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:56:04 crc kubenswrapper[4558]: I0120 17:56:04.319464 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/7dcf074e-8444-4bd2-b1f9-143390e96ef8-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"7dcf074e-8444-4bd2-b1f9-143390e96ef8\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:56:04 crc kubenswrapper[4558]: I0120 17:56:04.319483 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8p8dr\" (UniqueName: \"kubernetes.io/projected/7dcf074e-8444-4bd2-b1f9-143390e96ef8-kube-api-access-8p8dr\") pod \"glance-default-internal-api-0\" (UID: \"7dcf074e-8444-4bd2-b1f9-143390e96ef8\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:56:04 crc kubenswrapper[4558]: I0120 17:56:04.320178 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5881b3da-d3ac-4168-a33c-0f29ad342f60-logs" (OuterVolumeSpecName: "logs") pod "5881b3da-d3ac-4168-a33c-0f29ad342f60" (UID: "5881b3da-d3ac-4168-a33c-0f29ad342f60"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:56:04 crc kubenswrapper[4558]: I0120 17:56:04.321928 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/7dcf074e-8444-4bd2-b1f9-143390e96ef8-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"7dcf074e-8444-4bd2-b1f9-143390e96ef8\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:56:04 crc kubenswrapper[4558]: I0120 17:56:04.322401 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage05-crc" (OuterVolumeSpecName: "glance") pod "5881b3da-d3ac-4168-a33c-0f29ad342f60" (UID: "5881b3da-d3ac-4168-a33c-0f29ad342f60"). InnerVolumeSpecName "local-storage05-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:56:04 crc kubenswrapper[4558]: I0120 17:56:04.322839 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7dcf074e-8444-4bd2-b1f9-143390e96ef8-scripts\") pod \"glance-default-internal-api-0\" (UID: \"7dcf074e-8444-4bd2-b1f9-143390e96ef8\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:56:04 crc kubenswrapper[4558]: I0120 17:56:04.322957 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5881b3da-d3ac-4168-a33c-0f29ad342f60-kube-api-access-jwlv9" (OuterVolumeSpecName: "kube-api-access-jwlv9") pod "5881b3da-d3ac-4168-a33c-0f29ad342f60" (UID: "5881b3da-d3ac-4168-a33c-0f29ad342f60"). InnerVolumeSpecName "kube-api-access-jwlv9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:56:04 crc kubenswrapper[4558]: I0120 17:56:04.324425 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5881b3da-d3ac-4168-a33c-0f29ad342f60-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "5881b3da-d3ac-4168-a33c-0f29ad342f60" (UID: "5881b3da-d3ac-4168-a33c-0f29ad342f60"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:56:04 crc kubenswrapper[4558]: I0120 17:56:04.325451 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7dcf074e-8444-4bd2-b1f9-143390e96ef8-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"7dcf074e-8444-4bd2-b1f9-143390e96ef8\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:56:04 crc kubenswrapper[4558]: I0120 17:56:04.326768 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7dcf074e-8444-4bd2-b1f9-143390e96ef8-config-data\") pod \"glance-default-internal-api-0\" (UID: \"7dcf074e-8444-4bd2-b1f9-143390e96ef8\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:56:04 crc kubenswrapper[4558]: I0120 17:56:04.330990 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7dcf074e-8444-4bd2-b1f9-143390e96ef8-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"7dcf074e-8444-4bd2-b1f9-143390e96ef8\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:56:04 crc kubenswrapper[4558]: I0120 17:56:04.334883 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5881b3da-d3ac-4168-a33c-0f29ad342f60-scripts" (OuterVolumeSpecName: "scripts") pod "5881b3da-d3ac-4168-a33c-0f29ad342f60" (UID: "5881b3da-d3ac-4168-a33c-0f29ad342f60"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:56:04 crc kubenswrapper[4558]: I0120 17:56:04.335469 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8p8dr\" (UniqueName: \"kubernetes.io/projected/7dcf074e-8444-4bd2-b1f9-143390e96ef8-kube-api-access-8p8dr\") pod \"glance-default-internal-api-0\" (UID: \"7dcf074e-8444-4bd2-b1f9-143390e96ef8\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:56:04 crc kubenswrapper[4558]: I0120 17:56:04.360933 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"7dcf074e-8444-4bd2-b1f9-143390e96ef8\") " pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:56:04 crc kubenswrapper[4558]: I0120 17:56:04.382315 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5881b3da-d3ac-4168-a33c-0f29ad342f60-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5881b3da-d3ac-4168-a33c-0f29ad342f60" (UID: "5881b3da-d3ac-4168-a33c-0f29ad342f60"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:56:04 crc kubenswrapper[4558]: I0120 17:56:04.393549 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5881b3da-d3ac-4168-a33c-0f29ad342f60-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "5881b3da-d3ac-4168-a33c-0f29ad342f60" (UID: "5881b3da-d3ac-4168-a33c-0f29ad342f60"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:56:04 crc kubenswrapper[4558]: I0120 17:56:04.402957 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:56:04 crc kubenswrapper[4558]: I0120 17:56:04.406677 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5881b3da-d3ac-4168-a33c-0f29ad342f60-config-data" (OuterVolumeSpecName: "config-data") pod "5881b3da-d3ac-4168-a33c-0f29ad342f60" (UID: "5881b3da-d3ac-4168-a33c-0f29ad342f60"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:56:04 crc kubenswrapper[4558]: I0120 17:56:04.422504 4558 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/5881b3da-d3ac-4168-a33c-0f29ad342f60-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 20 17:56:04 crc kubenswrapper[4558]: I0120 17:56:04.422548 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5881b3da-d3ac-4168-a33c-0f29ad342f60-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:56:04 crc kubenswrapper[4558]: I0120 17:56:04.422562 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5881b3da-d3ac-4168-a33c-0f29ad342f60-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:56:04 crc kubenswrapper[4558]: I0120 17:56:04.422575 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5881b3da-d3ac-4168-a33c-0f29ad342f60-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:56:04 crc kubenswrapper[4558]: I0120 17:56:04.422587 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5881b3da-d3ac-4168-a33c-0f29ad342f60-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:56:04 crc kubenswrapper[4558]: I0120 17:56:04.422603 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5881b3da-d3ac-4168-a33c-0f29ad342f60-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:56:04 crc kubenswrapper[4558]: I0120 17:56:04.422658 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" " Jan 20 17:56:04 crc kubenswrapper[4558]: I0120 17:56:04.422673 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jwlv9\" (UniqueName: \"kubernetes.io/projected/5881b3da-d3ac-4168-a33c-0f29ad342f60-kube-api-access-jwlv9\") on node \"crc\" DevicePath \"\"" Jan 20 17:56:04 crc kubenswrapper[4558]: I0120 17:56:04.452310 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage05-crc" (UniqueName: "kubernetes.io/local-volume/local-storage05-crc") on node "crc" Jan 20 17:56:04 crc kubenswrapper[4558]: I0120 17:56:04.527374 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:56:04 crc kubenswrapper[4558]: I0120 17:56:04.598374 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b419ccfb-cd20-4b72-8f3a-48ce9ea2c991" path="/var/lib/kubelet/pods/b419ccfb-cd20-4b72-8f3a-48ce9ea2c991/volumes" Jan 20 17:56:04 crc kubenswrapper[4558]: I0120 17:56:04.839055 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:56:05 crc kubenswrapper[4558]: I0120 17:56:05.009594 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"5881b3da-d3ac-4168-a33c-0f29ad342f60","Type":"ContainerDied","Data":"4f79020f7ad820f4e9c48646626f310cff93a734ca7c416d945fbaa77994559c"} Jan 20 17:56:05 crc kubenswrapper[4558]: I0120 17:56:05.009654 4558 scope.go:117] "RemoveContainer" containerID="426c5470720e7ae7d002df3aeb9aab834c73d17509f061f38d2d4533eef638aa" Jan 20 17:56:05 crc kubenswrapper[4558]: I0120 17:56:05.009757 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:56:05 crc kubenswrapper[4558]: I0120 17:56:05.018421 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"88926ba7-3b09-4f2d-b021-562d79c8f122","Type":"ContainerStarted","Data":"94d0f209a0ad2dfe1184a534c72a4a8436d2bad0d52e492f5bb3d629f0510a0b"} Jan 20 17:56:05 crc kubenswrapper[4558]: I0120 17:56:05.046596 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:56:05 crc kubenswrapper[4558]: I0120 17:56:05.051500 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:56:05 crc kubenswrapper[4558]: I0120 17:56:05.053826 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"7dcf074e-8444-4bd2-b1f9-143390e96ef8","Type":"ContainerStarted","Data":"e321eaafee8525c51e9de1b6cbca4c0ccaa4f6d832b563b5571a376bbfd1c72a"} Jan 20 17:56:05 crc kubenswrapper[4558]: I0120 17:56:05.078210 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:56:05 crc kubenswrapper[4558]: E0120 17:56:05.078682 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5881b3da-d3ac-4168-a33c-0f29ad342f60" containerName="glance-log" Jan 20 17:56:05 crc kubenswrapper[4558]: I0120 17:56:05.078703 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="5881b3da-d3ac-4168-a33c-0f29ad342f60" containerName="glance-log" Jan 20 17:56:05 crc kubenswrapper[4558]: E0120 17:56:05.078728 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5881b3da-d3ac-4168-a33c-0f29ad342f60" containerName="glance-httpd" Jan 20 17:56:05 crc kubenswrapper[4558]: I0120 17:56:05.078735 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="5881b3da-d3ac-4168-a33c-0f29ad342f60" containerName="glance-httpd" Jan 20 17:56:05 crc kubenswrapper[4558]: I0120 17:56:05.078931 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="5881b3da-d3ac-4168-a33c-0f29ad342f60" containerName="glance-log" Jan 20 17:56:05 crc kubenswrapper[4558]: I0120 17:56:05.078957 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="5881b3da-d3ac-4168-a33c-0f29ad342f60" containerName="glance-httpd" Jan 20 17:56:05 crc kubenswrapper[4558]: I0120 17:56:05.079973 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:56:05 crc kubenswrapper[4558]: I0120 17:56:05.082038 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-default-external-config-data" Jan 20 17:56:05 crc kubenswrapper[4558]: I0120 17:56:05.082296 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-glance-default-public-svc" Jan 20 17:56:05 crc kubenswrapper[4558]: I0120 17:56:05.082515 4558 scope.go:117] "RemoveContainer" containerID="2f9349dc89699a650a3a26d019021f394568684a51da796cb0343c7a21d1d6e0" Jan 20 17:56:05 crc kubenswrapper[4558]: I0120 17:56:05.094488 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:56:05 crc kubenswrapper[4558]: I0120 17:56:05.245559 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8f5c27dd-2365-49ee-b4bd-b46e952199f7-logs\") pod \"glance-default-external-api-0\" (UID: \"8f5c27dd-2365-49ee-b4bd-b46e952199f7\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:56:05 crc kubenswrapper[4558]: I0120 17:56:05.245613 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8f5c27dd-2365-49ee-b4bd-b46e952199f7-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"8f5c27dd-2365-49ee-b4bd-b46e952199f7\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:56:05 crc kubenswrapper[4558]: I0120 17:56:05.245664 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z286r\" (UniqueName: \"kubernetes.io/projected/8f5c27dd-2365-49ee-b4bd-b46e952199f7-kube-api-access-z286r\") pod \"glance-default-external-api-0\" (UID: \"8f5c27dd-2365-49ee-b4bd-b46e952199f7\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:56:05 crc kubenswrapper[4558]: I0120 17:56:05.245714 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8f5c27dd-2365-49ee-b4bd-b46e952199f7-scripts\") pod \"glance-default-external-api-0\" (UID: \"8f5c27dd-2365-49ee-b4bd-b46e952199f7\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:56:05 crc kubenswrapper[4558]: I0120 17:56:05.245744 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-external-api-0\" (UID: \"8f5c27dd-2365-49ee-b4bd-b46e952199f7\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:56:05 crc kubenswrapper[4558]: I0120 17:56:05.245780 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8f5c27dd-2365-49ee-b4bd-b46e952199f7-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"8f5c27dd-2365-49ee-b4bd-b46e952199f7\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:56:05 crc kubenswrapper[4558]: I0120 17:56:05.245955 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8f5c27dd-2365-49ee-b4bd-b46e952199f7-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"8f5c27dd-2365-49ee-b4bd-b46e952199f7\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:56:05 crc kubenswrapper[4558]: I0120 17:56:05.246125 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8f5c27dd-2365-49ee-b4bd-b46e952199f7-config-data\") pod \"glance-default-external-api-0\" (UID: \"8f5c27dd-2365-49ee-b4bd-b46e952199f7\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:56:05 crc kubenswrapper[4558]: I0120 17:56:05.349283 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8f5c27dd-2365-49ee-b4bd-b46e952199f7-logs\") pod \"glance-default-external-api-0\" (UID: \"8f5c27dd-2365-49ee-b4bd-b46e952199f7\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:56:05 crc kubenswrapper[4558]: I0120 17:56:05.349350 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8f5c27dd-2365-49ee-b4bd-b46e952199f7-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"8f5c27dd-2365-49ee-b4bd-b46e952199f7\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:56:05 crc kubenswrapper[4558]: I0120 17:56:05.349426 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z286r\" (UniqueName: \"kubernetes.io/projected/8f5c27dd-2365-49ee-b4bd-b46e952199f7-kube-api-access-z286r\") pod \"glance-default-external-api-0\" (UID: \"8f5c27dd-2365-49ee-b4bd-b46e952199f7\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:56:05 crc kubenswrapper[4558]: I0120 17:56:05.349507 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8f5c27dd-2365-49ee-b4bd-b46e952199f7-scripts\") pod \"glance-default-external-api-0\" (UID: \"8f5c27dd-2365-49ee-b4bd-b46e952199f7\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:56:05 crc kubenswrapper[4558]: I0120 17:56:05.349546 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-external-api-0\" (UID: \"8f5c27dd-2365-49ee-b4bd-b46e952199f7\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:56:05 crc kubenswrapper[4558]: I0120 17:56:05.349582 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8f5c27dd-2365-49ee-b4bd-b46e952199f7-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"8f5c27dd-2365-49ee-b4bd-b46e952199f7\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:56:05 crc kubenswrapper[4558]: I0120 17:56:05.349642 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8f5c27dd-2365-49ee-b4bd-b46e952199f7-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"8f5c27dd-2365-49ee-b4bd-b46e952199f7\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:56:05 crc kubenswrapper[4558]: I0120 17:56:05.349727 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8f5c27dd-2365-49ee-b4bd-b46e952199f7-config-data\") pod \"glance-default-external-api-0\" (UID: \"8f5c27dd-2365-49ee-b4bd-b46e952199f7\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:56:05 crc kubenswrapper[4558]: I0120 17:56:05.350482 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-external-api-0\" (UID: \"8f5c27dd-2365-49ee-b4bd-b46e952199f7\") device mount path \"/mnt/openstack/pv05\"" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:56:05 crc kubenswrapper[4558]: I0120 17:56:05.350570 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8f5c27dd-2365-49ee-b4bd-b46e952199f7-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"8f5c27dd-2365-49ee-b4bd-b46e952199f7\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:56:05 crc kubenswrapper[4558]: I0120 17:56:05.351041 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8f5c27dd-2365-49ee-b4bd-b46e952199f7-logs\") pod \"glance-default-external-api-0\" (UID: \"8f5c27dd-2365-49ee-b4bd-b46e952199f7\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:56:05 crc kubenswrapper[4558]: I0120 17:56:05.356018 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8f5c27dd-2365-49ee-b4bd-b46e952199f7-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"8f5c27dd-2365-49ee-b4bd-b46e952199f7\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:56:05 crc kubenswrapper[4558]: I0120 17:56:05.356209 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8f5c27dd-2365-49ee-b4bd-b46e952199f7-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"8f5c27dd-2365-49ee-b4bd-b46e952199f7\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:56:05 crc kubenswrapper[4558]: I0120 17:56:05.356305 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8f5c27dd-2365-49ee-b4bd-b46e952199f7-scripts\") pod \"glance-default-external-api-0\" (UID: \"8f5c27dd-2365-49ee-b4bd-b46e952199f7\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:56:05 crc kubenswrapper[4558]: I0120 17:56:05.356895 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8f5c27dd-2365-49ee-b4bd-b46e952199f7-config-data\") pod \"glance-default-external-api-0\" (UID: \"8f5c27dd-2365-49ee-b4bd-b46e952199f7\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:56:05 crc kubenswrapper[4558]: I0120 17:56:05.366193 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z286r\" (UniqueName: \"kubernetes.io/projected/8f5c27dd-2365-49ee-b4bd-b46e952199f7-kube-api-access-z286r\") pod \"glance-default-external-api-0\" (UID: \"8f5c27dd-2365-49ee-b4bd-b46e952199f7\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:56:05 crc kubenswrapper[4558]: I0120 17:56:05.374959 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"glance-default-external-api-0\" (UID: \"8f5c27dd-2365-49ee-b4bd-b46e952199f7\") " pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:56:05 crc kubenswrapper[4558]: I0120 17:56:05.420218 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:56:05 crc kubenswrapper[4558]: I0120 17:56:05.565579 4558 scope.go:117] "RemoveContainer" containerID="4ea7fa61d8b21007a044345acec89fcebe56c2921cf0f76ff2002f44bdc88ede" Jan 20 17:56:05 crc kubenswrapper[4558]: E0120 17:56:05.565965 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 17:56:05 crc kubenswrapper[4558]: I0120 17:56:05.867564 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:56:06 crc kubenswrapper[4558]: I0120 17:56:06.065830 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"7dcf074e-8444-4bd2-b1f9-143390e96ef8","Type":"ContainerStarted","Data":"d464d4bcdd194c4ee64ccc0190f47e6e0ded24b37d0780e2cc7ded9c8f572a41"} Jan 20 17:56:06 crc kubenswrapper[4558]: I0120 17:56:06.071289 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"88926ba7-3b09-4f2d-b021-562d79c8f122","Type":"ContainerStarted","Data":"1e292c8137b3381afeef0cf89bf5962d454f3d5c2998d7b1868128aff7f2b86d"} Jan 20 17:56:06 crc kubenswrapper[4558]: W0120 17:56:06.206290 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8f5c27dd_2365_49ee_b4bd_b46e952199f7.slice/crio-68a29dece1a57865631949e2370c639a8b3ff0e4319d8a8262e6d9751ffb57d6 WatchSource:0}: Error finding container 68a29dece1a57865631949e2370c639a8b3ff0e4319d8a8262e6d9751ffb57d6: Status 404 returned error can't find the container with id 68a29dece1a57865631949e2370c639a8b3ff0e4319d8a8262e6d9751ffb57d6 Jan 20 17:56:06 crc kubenswrapper[4558]: I0120 17:56:06.597373 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5881b3da-d3ac-4168-a33c-0f29ad342f60" path="/var/lib/kubelet/pods/5881b3da-d3ac-4168-a33c-0f29ad342f60/volumes" Jan 20 17:56:07 crc kubenswrapper[4558]: I0120 17:56:07.091696 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"8f5c27dd-2365-49ee-b4bd-b46e952199f7","Type":"ContainerStarted","Data":"c8373ab659f997ee81de532a6542773f3920ce134a08cd0ec3ca60979a45eb58"} Jan 20 17:56:07 crc kubenswrapper[4558]: I0120 17:56:07.091771 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"8f5c27dd-2365-49ee-b4bd-b46e952199f7","Type":"ContainerStarted","Data":"68a29dece1a57865631949e2370c639a8b3ff0e4319d8a8262e6d9751ffb57d6"} Jan 20 17:56:07 crc kubenswrapper[4558]: I0120 17:56:07.105403 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"88926ba7-3b09-4f2d-b021-562d79c8f122","Type":"ContainerStarted","Data":"da21fe7fa17518e5c4dde6b9ceca4b600ad45f4567477b28e1809e7bb854aa4d"} Jan 20 17:56:07 crc kubenswrapper[4558]: I0120 17:56:07.105747 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="88926ba7-3b09-4f2d-b021-562d79c8f122" containerName="ceilometer-central-agent" containerID="cri-o://76d0657e5ac88a9f86d16aa2cf66e4f05523e093af77132e101ac0848cd7d94d" gracePeriod=30 Jan 20 17:56:07 crc kubenswrapper[4558]: I0120 17:56:07.106213 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="88926ba7-3b09-4f2d-b021-562d79c8f122" containerName="proxy-httpd" containerID="cri-o://da21fe7fa17518e5c4dde6b9ceca4b600ad45f4567477b28e1809e7bb854aa4d" gracePeriod=30 Jan 20 17:56:07 crc kubenswrapper[4558]: I0120 17:56:07.106271 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="88926ba7-3b09-4f2d-b021-562d79c8f122" containerName="ceilometer-notification-agent" containerID="cri-o://94d0f209a0ad2dfe1184a534c72a4a8436d2bad0d52e492f5bb3d629f0510a0b" gracePeriod=30 Jan 20 17:56:07 crc kubenswrapper[4558]: I0120 17:56:07.106292 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:56:07 crc kubenswrapper[4558]: I0120 17:56:07.106331 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="88926ba7-3b09-4f2d-b021-562d79c8f122" containerName="sg-core" containerID="cri-o://1e292c8137b3381afeef0cf89bf5962d454f3d5c2998d7b1868128aff7f2b86d" gracePeriod=30 Jan 20 17:56:07 crc kubenswrapper[4558]: I0120 17:56:07.134669 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ceilometer-0" podStartSLOduration=2.654659021 podStartE2EDuration="6.134650584s" podCreationTimestamp="2026-01-20 17:56:01 +0000 UTC" firstStartedPulling="2026-01-20 17:56:02.889224053 +0000 UTC m=+4456.649562021" lastFinishedPulling="2026-01-20 17:56:06.369215616 +0000 UTC m=+4460.129553584" observedRunningTime="2026-01-20 17:56:07.131552511 +0000 UTC m=+4460.891890478" watchObservedRunningTime="2026-01-20 17:56:07.134650584 +0000 UTC m=+4460.894988552" Jan 20 17:56:07 crc kubenswrapper[4558]: I0120 17:56:07.164025 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"7dcf074e-8444-4bd2-b1f9-143390e96ef8","Type":"ContainerStarted","Data":"4a4fc0e35d400b1717dbd88e9cb076f22b20999b3e1867032e08446fa50f658e"} Jan 20 17:56:08 crc kubenswrapper[4558]: I0120 17:56:08.073136 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:56:08 crc kubenswrapper[4558]: I0120 17:56:08.098927 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/glance-default-internal-api-0" podStartSLOduration=4.098907034 podStartE2EDuration="4.098907034s" podCreationTimestamp="2026-01-20 17:56:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:56:07.189383728 +0000 UTC m=+4460.949721695" watchObservedRunningTime="2026-01-20 17:56:08.098907034 +0000 UTC m=+4461.859245001" Jan 20 17:56:08 crc kubenswrapper[4558]: I0120 17:56:08.177195 4558 generic.go:334] "Generic (PLEG): container finished" podID="88926ba7-3b09-4f2d-b021-562d79c8f122" containerID="da21fe7fa17518e5c4dde6b9ceca4b600ad45f4567477b28e1809e7bb854aa4d" exitCode=0 Jan 20 17:56:08 crc kubenswrapper[4558]: I0120 17:56:08.177232 4558 generic.go:334] "Generic (PLEG): container finished" podID="88926ba7-3b09-4f2d-b021-562d79c8f122" containerID="1e292c8137b3381afeef0cf89bf5962d454f3d5c2998d7b1868128aff7f2b86d" exitCode=2 Jan 20 17:56:08 crc kubenswrapper[4558]: I0120 17:56:08.177247 4558 generic.go:334] "Generic (PLEG): container finished" podID="88926ba7-3b09-4f2d-b021-562d79c8f122" containerID="94d0f209a0ad2dfe1184a534c72a4a8436d2bad0d52e492f5bb3d629f0510a0b" exitCode=0 Jan 20 17:56:08 crc kubenswrapper[4558]: I0120 17:56:08.177258 4558 generic.go:334] "Generic (PLEG): container finished" podID="88926ba7-3b09-4f2d-b021-562d79c8f122" containerID="76d0657e5ac88a9f86d16aa2cf66e4f05523e093af77132e101ac0848cd7d94d" exitCode=0 Jan 20 17:56:08 crc kubenswrapper[4558]: I0120 17:56:08.177290 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"88926ba7-3b09-4f2d-b021-562d79c8f122","Type":"ContainerDied","Data":"da21fe7fa17518e5c4dde6b9ceca4b600ad45f4567477b28e1809e7bb854aa4d"} Jan 20 17:56:08 crc kubenswrapper[4558]: I0120 17:56:08.177318 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:56:08 crc kubenswrapper[4558]: I0120 17:56:08.177348 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"88926ba7-3b09-4f2d-b021-562d79c8f122","Type":"ContainerDied","Data":"1e292c8137b3381afeef0cf89bf5962d454f3d5c2998d7b1868128aff7f2b86d"} Jan 20 17:56:08 crc kubenswrapper[4558]: I0120 17:56:08.177364 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"88926ba7-3b09-4f2d-b021-562d79c8f122","Type":"ContainerDied","Data":"94d0f209a0ad2dfe1184a534c72a4a8436d2bad0d52e492f5bb3d629f0510a0b"} Jan 20 17:56:08 crc kubenswrapper[4558]: I0120 17:56:08.177374 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"88926ba7-3b09-4f2d-b021-562d79c8f122","Type":"ContainerDied","Data":"76d0657e5ac88a9f86d16aa2cf66e4f05523e093af77132e101ac0848cd7d94d"} Jan 20 17:56:08 crc kubenswrapper[4558]: I0120 17:56:08.177385 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"88926ba7-3b09-4f2d-b021-562d79c8f122","Type":"ContainerDied","Data":"db39d97cd3e22a99f43e1fa2dec229cb6e9cee7c897e55e35b083f79bd5cbaaa"} Jan 20 17:56:08 crc kubenswrapper[4558]: I0120 17:56:08.177485 4558 scope.go:117] "RemoveContainer" containerID="da21fe7fa17518e5c4dde6b9ceca4b600ad45f4567477b28e1809e7bb854aa4d" Jan 20 17:56:08 crc kubenswrapper[4558]: I0120 17:56:08.180234 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"8f5c27dd-2365-49ee-b4bd-b46e952199f7","Type":"ContainerStarted","Data":"bf44882c187617ca00e2141f2d59bb3077ec0bd79f8fd782d703007b103558bc"} Jan 20 17:56:08 crc kubenswrapper[4558]: I0120 17:56:08.208183 4558 scope.go:117] "RemoveContainer" containerID="1e292c8137b3381afeef0cf89bf5962d454f3d5c2998d7b1868128aff7f2b86d" Jan 20 17:56:08 crc kubenswrapper[4558]: I0120 17:56:08.211078 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/glance-default-external-api-0" podStartSLOduration=3.211063469 podStartE2EDuration="3.211063469s" podCreationTimestamp="2026-01-20 17:56:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:56:08.202792442 +0000 UTC m=+4461.963130408" watchObservedRunningTime="2026-01-20 17:56:08.211063469 +0000 UTC m=+4461.971401435" Jan 20 17:56:08 crc kubenswrapper[4558]: I0120 17:56:08.221354 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/88926ba7-3b09-4f2d-b021-562d79c8f122-sg-core-conf-yaml\") pod \"88926ba7-3b09-4f2d-b021-562d79c8f122\" (UID: \"88926ba7-3b09-4f2d-b021-562d79c8f122\") " Jan 20 17:56:08 crc kubenswrapper[4558]: I0120 17:56:08.221433 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/88926ba7-3b09-4f2d-b021-562d79c8f122-run-httpd\") pod \"88926ba7-3b09-4f2d-b021-562d79c8f122\" (UID: \"88926ba7-3b09-4f2d-b021-562d79c8f122\") " Jan 20 17:56:08 crc kubenswrapper[4558]: I0120 17:56:08.221457 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/88926ba7-3b09-4f2d-b021-562d79c8f122-scripts\") pod \"88926ba7-3b09-4f2d-b021-562d79c8f122\" (UID: \"88926ba7-3b09-4f2d-b021-562d79c8f122\") " Jan 20 17:56:08 crc kubenswrapper[4558]: I0120 17:56:08.221493 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/88926ba7-3b09-4f2d-b021-562d79c8f122-config-data\") pod \"88926ba7-3b09-4f2d-b021-562d79c8f122\" (UID: \"88926ba7-3b09-4f2d-b021-562d79c8f122\") " Jan 20 17:56:08 crc kubenswrapper[4558]: I0120 17:56:08.221547 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bqjws\" (UniqueName: \"kubernetes.io/projected/88926ba7-3b09-4f2d-b021-562d79c8f122-kube-api-access-bqjws\") pod \"88926ba7-3b09-4f2d-b021-562d79c8f122\" (UID: \"88926ba7-3b09-4f2d-b021-562d79c8f122\") " Jan 20 17:56:08 crc kubenswrapper[4558]: I0120 17:56:08.221642 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/88926ba7-3b09-4f2d-b021-562d79c8f122-combined-ca-bundle\") pod \"88926ba7-3b09-4f2d-b021-562d79c8f122\" (UID: \"88926ba7-3b09-4f2d-b021-562d79c8f122\") " Jan 20 17:56:08 crc kubenswrapper[4558]: I0120 17:56:08.221719 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/88926ba7-3b09-4f2d-b021-562d79c8f122-log-httpd\") pod \"88926ba7-3b09-4f2d-b021-562d79c8f122\" (UID: \"88926ba7-3b09-4f2d-b021-562d79c8f122\") " Jan 20 17:56:08 crc kubenswrapper[4558]: I0120 17:56:08.221940 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/88926ba7-3b09-4f2d-b021-562d79c8f122-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "88926ba7-3b09-4f2d-b021-562d79c8f122" (UID: "88926ba7-3b09-4f2d-b021-562d79c8f122"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:56:08 crc kubenswrapper[4558]: I0120 17:56:08.222599 4558 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/88926ba7-3b09-4f2d-b021-562d79c8f122-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:56:08 crc kubenswrapper[4558]: I0120 17:56:08.222893 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/88926ba7-3b09-4f2d-b021-562d79c8f122-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "88926ba7-3b09-4f2d-b021-562d79c8f122" (UID: "88926ba7-3b09-4f2d-b021-562d79c8f122"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:56:08 crc kubenswrapper[4558]: I0120 17:56:08.229484 4558 scope.go:117] "RemoveContainer" containerID="94d0f209a0ad2dfe1184a534c72a4a8436d2bad0d52e492f5bb3d629f0510a0b" Jan 20 17:56:08 crc kubenswrapper[4558]: I0120 17:56:08.275876 4558 scope.go:117] "RemoveContainer" containerID="76d0657e5ac88a9f86d16aa2cf66e4f05523e093af77132e101ac0848cd7d94d" Jan 20 17:56:08 crc kubenswrapper[4558]: I0120 17:56:08.293599 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/88926ba7-3b09-4f2d-b021-562d79c8f122-scripts" (OuterVolumeSpecName: "scripts") pod "88926ba7-3b09-4f2d-b021-562d79c8f122" (UID: "88926ba7-3b09-4f2d-b021-562d79c8f122"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:56:08 crc kubenswrapper[4558]: I0120 17:56:08.293666 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/88926ba7-3b09-4f2d-b021-562d79c8f122-kube-api-access-bqjws" (OuterVolumeSpecName: "kube-api-access-bqjws") pod "88926ba7-3b09-4f2d-b021-562d79c8f122" (UID: "88926ba7-3b09-4f2d-b021-562d79c8f122"). InnerVolumeSpecName "kube-api-access-bqjws". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:56:08 crc kubenswrapper[4558]: I0120 17:56:08.297335 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/88926ba7-3b09-4f2d-b021-562d79c8f122-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "88926ba7-3b09-4f2d-b021-562d79c8f122" (UID: "88926ba7-3b09-4f2d-b021-562d79c8f122"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:56:08 crc kubenswrapper[4558]: I0120 17:56:08.298604 4558 scope.go:117] "RemoveContainer" containerID="da21fe7fa17518e5c4dde6b9ceca4b600ad45f4567477b28e1809e7bb854aa4d" Jan 20 17:56:08 crc kubenswrapper[4558]: E0120 17:56:08.301107 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"da21fe7fa17518e5c4dde6b9ceca4b600ad45f4567477b28e1809e7bb854aa4d\": container with ID starting with da21fe7fa17518e5c4dde6b9ceca4b600ad45f4567477b28e1809e7bb854aa4d not found: ID does not exist" containerID="da21fe7fa17518e5c4dde6b9ceca4b600ad45f4567477b28e1809e7bb854aa4d" Jan 20 17:56:08 crc kubenswrapper[4558]: I0120 17:56:08.301251 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"da21fe7fa17518e5c4dde6b9ceca4b600ad45f4567477b28e1809e7bb854aa4d"} err="failed to get container status \"da21fe7fa17518e5c4dde6b9ceca4b600ad45f4567477b28e1809e7bb854aa4d\": rpc error: code = NotFound desc = could not find container \"da21fe7fa17518e5c4dde6b9ceca4b600ad45f4567477b28e1809e7bb854aa4d\": container with ID starting with da21fe7fa17518e5c4dde6b9ceca4b600ad45f4567477b28e1809e7bb854aa4d not found: ID does not exist" Jan 20 17:56:08 crc kubenswrapper[4558]: I0120 17:56:08.301338 4558 scope.go:117] "RemoveContainer" containerID="1e292c8137b3381afeef0cf89bf5962d454f3d5c2998d7b1868128aff7f2b86d" Jan 20 17:56:08 crc kubenswrapper[4558]: E0120 17:56:08.301781 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1e292c8137b3381afeef0cf89bf5962d454f3d5c2998d7b1868128aff7f2b86d\": container with ID starting with 1e292c8137b3381afeef0cf89bf5962d454f3d5c2998d7b1868128aff7f2b86d not found: ID does not exist" containerID="1e292c8137b3381afeef0cf89bf5962d454f3d5c2998d7b1868128aff7f2b86d" Jan 20 17:56:08 crc kubenswrapper[4558]: I0120 17:56:08.301826 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1e292c8137b3381afeef0cf89bf5962d454f3d5c2998d7b1868128aff7f2b86d"} err="failed to get container status \"1e292c8137b3381afeef0cf89bf5962d454f3d5c2998d7b1868128aff7f2b86d\": rpc error: code = NotFound desc = could not find container \"1e292c8137b3381afeef0cf89bf5962d454f3d5c2998d7b1868128aff7f2b86d\": container with ID starting with 1e292c8137b3381afeef0cf89bf5962d454f3d5c2998d7b1868128aff7f2b86d not found: ID does not exist" Jan 20 17:56:08 crc kubenswrapper[4558]: I0120 17:56:08.301854 4558 scope.go:117] "RemoveContainer" containerID="94d0f209a0ad2dfe1184a534c72a4a8436d2bad0d52e492f5bb3d629f0510a0b" Jan 20 17:56:08 crc kubenswrapper[4558]: E0120 17:56:08.302107 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"94d0f209a0ad2dfe1184a534c72a4a8436d2bad0d52e492f5bb3d629f0510a0b\": container with ID starting with 94d0f209a0ad2dfe1184a534c72a4a8436d2bad0d52e492f5bb3d629f0510a0b not found: ID does not exist" containerID="94d0f209a0ad2dfe1184a534c72a4a8436d2bad0d52e492f5bb3d629f0510a0b" Jan 20 17:56:08 crc kubenswrapper[4558]: I0120 17:56:08.302130 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"94d0f209a0ad2dfe1184a534c72a4a8436d2bad0d52e492f5bb3d629f0510a0b"} err="failed to get container status \"94d0f209a0ad2dfe1184a534c72a4a8436d2bad0d52e492f5bb3d629f0510a0b\": rpc error: code = NotFound desc = could not find container \"94d0f209a0ad2dfe1184a534c72a4a8436d2bad0d52e492f5bb3d629f0510a0b\": container with ID starting with 94d0f209a0ad2dfe1184a534c72a4a8436d2bad0d52e492f5bb3d629f0510a0b not found: ID does not exist" Jan 20 17:56:08 crc kubenswrapper[4558]: I0120 17:56:08.302144 4558 scope.go:117] "RemoveContainer" containerID="76d0657e5ac88a9f86d16aa2cf66e4f05523e093af77132e101ac0848cd7d94d" Jan 20 17:56:08 crc kubenswrapper[4558]: E0120 17:56:08.302909 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"76d0657e5ac88a9f86d16aa2cf66e4f05523e093af77132e101ac0848cd7d94d\": container with ID starting with 76d0657e5ac88a9f86d16aa2cf66e4f05523e093af77132e101ac0848cd7d94d not found: ID does not exist" containerID="76d0657e5ac88a9f86d16aa2cf66e4f05523e093af77132e101ac0848cd7d94d" Jan 20 17:56:08 crc kubenswrapper[4558]: I0120 17:56:08.302972 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"76d0657e5ac88a9f86d16aa2cf66e4f05523e093af77132e101ac0848cd7d94d"} err="failed to get container status \"76d0657e5ac88a9f86d16aa2cf66e4f05523e093af77132e101ac0848cd7d94d\": rpc error: code = NotFound desc = could not find container \"76d0657e5ac88a9f86d16aa2cf66e4f05523e093af77132e101ac0848cd7d94d\": container with ID starting with 76d0657e5ac88a9f86d16aa2cf66e4f05523e093af77132e101ac0848cd7d94d not found: ID does not exist" Jan 20 17:56:08 crc kubenswrapper[4558]: I0120 17:56:08.302988 4558 scope.go:117] "RemoveContainer" containerID="da21fe7fa17518e5c4dde6b9ceca4b600ad45f4567477b28e1809e7bb854aa4d" Jan 20 17:56:08 crc kubenswrapper[4558]: I0120 17:56:08.303240 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"da21fe7fa17518e5c4dde6b9ceca4b600ad45f4567477b28e1809e7bb854aa4d"} err="failed to get container status \"da21fe7fa17518e5c4dde6b9ceca4b600ad45f4567477b28e1809e7bb854aa4d\": rpc error: code = NotFound desc = could not find container \"da21fe7fa17518e5c4dde6b9ceca4b600ad45f4567477b28e1809e7bb854aa4d\": container with ID starting with da21fe7fa17518e5c4dde6b9ceca4b600ad45f4567477b28e1809e7bb854aa4d not found: ID does not exist" Jan 20 17:56:08 crc kubenswrapper[4558]: I0120 17:56:08.303260 4558 scope.go:117] "RemoveContainer" containerID="1e292c8137b3381afeef0cf89bf5962d454f3d5c2998d7b1868128aff7f2b86d" Jan 20 17:56:08 crc kubenswrapper[4558]: I0120 17:56:08.303628 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1e292c8137b3381afeef0cf89bf5962d454f3d5c2998d7b1868128aff7f2b86d"} err="failed to get container status \"1e292c8137b3381afeef0cf89bf5962d454f3d5c2998d7b1868128aff7f2b86d\": rpc error: code = NotFound desc = could not find container \"1e292c8137b3381afeef0cf89bf5962d454f3d5c2998d7b1868128aff7f2b86d\": container with ID starting with 1e292c8137b3381afeef0cf89bf5962d454f3d5c2998d7b1868128aff7f2b86d not found: ID does not exist" Jan 20 17:56:08 crc kubenswrapper[4558]: I0120 17:56:08.303750 4558 scope.go:117] "RemoveContainer" containerID="94d0f209a0ad2dfe1184a534c72a4a8436d2bad0d52e492f5bb3d629f0510a0b" Jan 20 17:56:08 crc kubenswrapper[4558]: I0120 17:56:08.304020 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"94d0f209a0ad2dfe1184a534c72a4a8436d2bad0d52e492f5bb3d629f0510a0b"} err="failed to get container status \"94d0f209a0ad2dfe1184a534c72a4a8436d2bad0d52e492f5bb3d629f0510a0b\": rpc error: code = NotFound desc = could not find container \"94d0f209a0ad2dfe1184a534c72a4a8436d2bad0d52e492f5bb3d629f0510a0b\": container with ID starting with 94d0f209a0ad2dfe1184a534c72a4a8436d2bad0d52e492f5bb3d629f0510a0b not found: ID does not exist" Jan 20 17:56:08 crc kubenswrapper[4558]: I0120 17:56:08.304048 4558 scope.go:117] "RemoveContainer" containerID="76d0657e5ac88a9f86d16aa2cf66e4f05523e093af77132e101ac0848cd7d94d" Jan 20 17:56:08 crc kubenswrapper[4558]: I0120 17:56:08.304840 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"76d0657e5ac88a9f86d16aa2cf66e4f05523e093af77132e101ac0848cd7d94d"} err="failed to get container status \"76d0657e5ac88a9f86d16aa2cf66e4f05523e093af77132e101ac0848cd7d94d\": rpc error: code = NotFound desc = could not find container \"76d0657e5ac88a9f86d16aa2cf66e4f05523e093af77132e101ac0848cd7d94d\": container with ID starting with 76d0657e5ac88a9f86d16aa2cf66e4f05523e093af77132e101ac0848cd7d94d not found: ID does not exist" Jan 20 17:56:08 crc kubenswrapper[4558]: I0120 17:56:08.304910 4558 scope.go:117] "RemoveContainer" containerID="da21fe7fa17518e5c4dde6b9ceca4b600ad45f4567477b28e1809e7bb854aa4d" Jan 20 17:56:08 crc kubenswrapper[4558]: I0120 17:56:08.305178 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"da21fe7fa17518e5c4dde6b9ceca4b600ad45f4567477b28e1809e7bb854aa4d"} err="failed to get container status \"da21fe7fa17518e5c4dde6b9ceca4b600ad45f4567477b28e1809e7bb854aa4d\": rpc error: code = NotFound desc = could not find container \"da21fe7fa17518e5c4dde6b9ceca4b600ad45f4567477b28e1809e7bb854aa4d\": container with ID starting with da21fe7fa17518e5c4dde6b9ceca4b600ad45f4567477b28e1809e7bb854aa4d not found: ID does not exist" Jan 20 17:56:08 crc kubenswrapper[4558]: I0120 17:56:08.305245 4558 scope.go:117] "RemoveContainer" containerID="1e292c8137b3381afeef0cf89bf5962d454f3d5c2998d7b1868128aff7f2b86d" Jan 20 17:56:08 crc kubenswrapper[4558]: I0120 17:56:08.305497 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1e292c8137b3381afeef0cf89bf5962d454f3d5c2998d7b1868128aff7f2b86d"} err="failed to get container status \"1e292c8137b3381afeef0cf89bf5962d454f3d5c2998d7b1868128aff7f2b86d\": rpc error: code = NotFound desc = could not find container \"1e292c8137b3381afeef0cf89bf5962d454f3d5c2998d7b1868128aff7f2b86d\": container with ID starting with 1e292c8137b3381afeef0cf89bf5962d454f3d5c2998d7b1868128aff7f2b86d not found: ID does not exist" Jan 20 17:56:08 crc kubenswrapper[4558]: I0120 17:56:08.305756 4558 scope.go:117] "RemoveContainer" containerID="94d0f209a0ad2dfe1184a534c72a4a8436d2bad0d52e492f5bb3d629f0510a0b" Jan 20 17:56:08 crc kubenswrapper[4558]: I0120 17:56:08.306056 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"94d0f209a0ad2dfe1184a534c72a4a8436d2bad0d52e492f5bb3d629f0510a0b"} err="failed to get container status \"94d0f209a0ad2dfe1184a534c72a4a8436d2bad0d52e492f5bb3d629f0510a0b\": rpc error: code = NotFound desc = could not find container \"94d0f209a0ad2dfe1184a534c72a4a8436d2bad0d52e492f5bb3d629f0510a0b\": container with ID starting with 94d0f209a0ad2dfe1184a534c72a4a8436d2bad0d52e492f5bb3d629f0510a0b not found: ID does not exist" Jan 20 17:56:08 crc kubenswrapper[4558]: I0120 17:56:08.306142 4558 scope.go:117] "RemoveContainer" containerID="76d0657e5ac88a9f86d16aa2cf66e4f05523e093af77132e101ac0848cd7d94d" Jan 20 17:56:08 crc kubenswrapper[4558]: I0120 17:56:08.306967 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"76d0657e5ac88a9f86d16aa2cf66e4f05523e093af77132e101ac0848cd7d94d"} err="failed to get container status \"76d0657e5ac88a9f86d16aa2cf66e4f05523e093af77132e101ac0848cd7d94d\": rpc error: code = NotFound desc = could not find container \"76d0657e5ac88a9f86d16aa2cf66e4f05523e093af77132e101ac0848cd7d94d\": container with ID starting with 76d0657e5ac88a9f86d16aa2cf66e4f05523e093af77132e101ac0848cd7d94d not found: ID does not exist" Jan 20 17:56:08 crc kubenswrapper[4558]: I0120 17:56:08.307000 4558 scope.go:117] "RemoveContainer" containerID="da21fe7fa17518e5c4dde6b9ceca4b600ad45f4567477b28e1809e7bb854aa4d" Jan 20 17:56:08 crc kubenswrapper[4558]: I0120 17:56:08.307565 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"da21fe7fa17518e5c4dde6b9ceca4b600ad45f4567477b28e1809e7bb854aa4d"} err="failed to get container status \"da21fe7fa17518e5c4dde6b9ceca4b600ad45f4567477b28e1809e7bb854aa4d\": rpc error: code = NotFound desc = could not find container \"da21fe7fa17518e5c4dde6b9ceca4b600ad45f4567477b28e1809e7bb854aa4d\": container with ID starting with da21fe7fa17518e5c4dde6b9ceca4b600ad45f4567477b28e1809e7bb854aa4d not found: ID does not exist" Jan 20 17:56:08 crc kubenswrapper[4558]: I0120 17:56:08.307639 4558 scope.go:117] "RemoveContainer" containerID="1e292c8137b3381afeef0cf89bf5962d454f3d5c2998d7b1868128aff7f2b86d" Jan 20 17:56:08 crc kubenswrapper[4558]: I0120 17:56:08.308044 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1e292c8137b3381afeef0cf89bf5962d454f3d5c2998d7b1868128aff7f2b86d"} err="failed to get container status \"1e292c8137b3381afeef0cf89bf5962d454f3d5c2998d7b1868128aff7f2b86d\": rpc error: code = NotFound desc = could not find container \"1e292c8137b3381afeef0cf89bf5962d454f3d5c2998d7b1868128aff7f2b86d\": container with ID starting with 1e292c8137b3381afeef0cf89bf5962d454f3d5c2998d7b1868128aff7f2b86d not found: ID does not exist" Jan 20 17:56:08 crc kubenswrapper[4558]: I0120 17:56:08.308090 4558 scope.go:117] "RemoveContainer" containerID="94d0f209a0ad2dfe1184a534c72a4a8436d2bad0d52e492f5bb3d629f0510a0b" Jan 20 17:56:08 crc kubenswrapper[4558]: I0120 17:56:08.308528 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"94d0f209a0ad2dfe1184a534c72a4a8436d2bad0d52e492f5bb3d629f0510a0b"} err="failed to get container status \"94d0f209a0ad2dfe1184a534c72a4a8436d2bad0d52e492f5bb3d629f0510a0b\": rpc error: code = NotFound desc = could not find container \"94d0f209a0ad2dfe1184a534c72a4a8436d2bad0d52e492f5bb3d629f0510a0b\": container with ID starting with 94d0f209a0ad2dfe1184a534c72a4a8436d2bad0d52e492f5bb3d629f0510a0b not found: ID does not exist" Jan 20 17:56:08 crc kubenswrapper[4558]: I0120 17:56:08.308607 4558 scope.go:117] "RemoveContainer" containerID="76d0657e5ac88a9f86d16aa2cf66e4f05523e093af77132e101ac0848cd7d94d" Jan 20 17:56:08 crc kubenswrapper[4558]: I0120 17:56:08.308966 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"76d0657e5ac88a9f86d16aa2cf66e4f05523e093af77132e101ac0848cd7d94d"} err="failed to get container status \"76d0657e5ac88a9f86d16aa2cf66e4f05523e093af77132e101ac0848cd7d94d\": rpc error: code = NotFound desc = could not find container \"76d0657e5ac88a9f86d16aa2cf66e4f05523e093af77132e101ac0848cd7d94d\": container with ID starting with 76d0657e5ac88a9f86d16aa2cf66e4f05523e093af77132e101ac0848cd7d94d not found: ID does not exist" Jan 20 17:56:08 crc kubenswrapper[4558]: I0120 17:56:08.325371 4558 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/88926ba7-3b09-4f2d-b021-562d79c8f122-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:56:08 crc kubenswrapper[4558]: I0120 17:56:08.325395 4558 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/88926ba7-3b09-4f2d-b021-562d79c8f122-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 20 17:56:08 crc kubenswrapper[4558]: I0120 17:56:08.325427 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/88926ba7-3b09-4f2d-b021-562d79c8f122-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:56:08 crc kubenswrapper[4558]: I0120 17:56:08.325440 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bqjws\" (UniqueName: \"kubernetes.io/projected/88926ba7-3b09-4f2d-b021-562d79c8f122-kube-api-access-bqjws\") on node \"crc\" DevicePath \"\"" Jan 20 17:56:08 crc kubenswrapper[4558]: I0120 17:56:08.333798 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/88926ba7-3b09-4f2d-b021-562d79c8f122-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "88926ba7-3b09-4f2d-b021-562d79c8f122" (UID: "88926ba7-3b09-4f2d-b021-562d79c8f122"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:56:08 crc kubenswrapper[4558]: I0120 17:56:08.346364 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/88926ba7-3b09-4f2d-b021-562d79c8f122-config-data" (OuterVolumeSpecName: "config-data") pod "88926ba7-3b09-4f2d-b021-562d79c8f122" (UID: "88926ba7-3b09-4f2d-b021-562d79c8f122"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:56:08 crc kubenswrapper[4558]: I0120 17:56:08.427823 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/88926ba7-3b09-4f2d-b021-562d79c8f122-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:56:08 crc kubenswrapper[4558]: I0120 17:56:08.427868 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/88926ba7-3b09-4f2d-b021-562d79c8f122-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:56:08 crc kubenswrapper[4558]: I0120 17:56:08.514265 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:56:08 crc kubenswrapper[4558]: I0120 17:56:08.523527 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:56:08 crc kubenswrapper[4558]: I0120 17:56:08.531007 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:56:08 crc kubenswrapper[4558]: E0120 17:56:08.531561 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="88926ba7-3b09-4f2d-b021-562d79c8f122" containerName="ceilometer-notification-agent" Jan 20 17:56:08 crc kubenswrapper[4558]: I0120 17:56:08.531625 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="88926ba7-3b09-4f2d-b021-562d79c8f122" containerName="ceilometer-notification-agent" Jan 20 17:56:08 crc kubenswrapper[4558]: E0120 17:56:08.531679 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="88926ba7-3b09-4f2d-b021-562d79c8f122" containerName="proxy-httpd" Jan 20 17:56:08 crc kubenswrapper[4558]: I0120 17:56:08.531723 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="88926ba7-3b09-4f2d-b021-562d79c8f122" containerName="proxy-httpd" Jan 20 17:56:08 crc kubenswrapper[4558]: E0120 17:56:08.531803 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="88926ba7-3b09-4f2d-b021-562d79c8f122" containerName="sg-core" Jan 20 17:56:08 crc kubenswrapper[4558]: I0120 17:56:08.531850 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="88926ba7-3b09-4f2d-b021-562d79c8f122" containerName="sg-core" Jan 20 17:56:08 crc kubenswrapper[4558]: E0120 17:56:08.531907 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="88926ba7-3b09-4f2d-b021-562d79c8f122" containerName="ceilometer-central-agent" Jan 20 17:56:08 crc kubenswrapper[4558]: I0120 17:56:08.531958 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="88926ba7-3b09-4f2d-b021-562d79c8f122" containerName="ceilometer-central-agent" Jan 20 17:56:08 crc kubenswrapper[4558]: I0120 17:56:08.532185 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="88926ba7-3b09-4f2d-b021-562d79c8f122" containerName="ceilometer-central-agent" Jan 20 17:56:08 crc kubenswrapper[4558]: I0120 17:56:08.532263 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="88926ba7-3b09-4f2d-b021-562d79c8f122" containerName="ceilometer-notification-agent" Jan 20 17:56:08 crc kubenswrapper[4558]: I0120 17:56:08.532322 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="88926ba7-3b09-4f2d-b021-562d79c8f122" containerName="proxy-httpd" Jan 20 17:56:08 crc kubenswrapper[4558]: I0120 17:56:08.532371 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="88926ba7-3b09-4f2d-b021-562d79c8f122" containerName="sg-core" Jan 20 17:56:08 crc kubenswrapper[4558]: I0120 17:56:08.534086 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:56:08 crc kubenswrapper[4558]: I0120 17:56:08.535677 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-config-data" Jan 20 17:56:08 crc kubenswrapper[4558]: I0120 17:56:08.535816 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-scripts" Jan 20 17:56:08 crc kubenswrapper[4558]: I0120 17:56:08.548495 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:56:08 crc kubenswrapper[4558]: I0120 17:56:08.575738 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="88926ba7-3b09-4f2d-b021-562d79c8f122" path="/var/lib/kubelet/pods/88926ba7-3b09-4f2d-b021-562d79c8f122/volumes" Jan 20 17:56:08 crc kubenswrapper[4558]: I0120 17:56:08.633045 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ed169196-847d-4bea-93a5-2be90c9ebbf2-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ed169196-847d-4bea-93a5-2be90c9ebbf2\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:56:08 crc kubenswrapper[4558]: I0120 17:56:08.633150 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ed169196-847d-4bea-93a5-2be90c9ebbf2-config-data\") pod \"ceilometer-0\" (UID: \"ed169196-847d-4bea-93a5-2be90c9ebbf2\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:56:08 crc kubenswrapper[4558]: I0120 17:56:08.633200 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ed169196-847d-4bea-93a5-2be90c9ebbf2-run-httpd\") pod \"ceilometer-0\" (UID: \"ed169196-847d-4bea-93a5-2be90c9ebbf2\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:56:08 crc kubenswrapper[4558]: I0120 17:56:08.633244 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ed169196-847d-4bea-93a5-2be90c9ebbf2-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ed169196-847d-4bea-93a5-2be90c9ebbf2\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:56:08 crc kubenswrapper[4558]: I0120 17:56:08.633278 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pqgdl\" (UniqueName: \"kubernetes.io/projected/ed169196-847d-4bea-93a5-2be90c9ebbf2-kube-api-access-pqgdl\") pod \"ceilometer-0\" (UID: \"ed169196-847d-4bea-93a5-2be90c9ebbf2\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:56:08 crc kubenswrapper[4558]: I0120 17:56:08.633313 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ed169196-847d-4bea-93a5-2be90c9ebbf2-scripts\") pod \"ceilometer-0\" (UID: \"ed169196-847d-4bea-93a5-2be90c9ebbf2\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:56:08 crc kubenswrapper[4558]: I0120 17:56:08.633346 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ed169196-847d-4bea-93a5-2be90c9ebbf2-log-httpd\") pod \"ceilometer-0\" (UID: \"ed169196-847d-4bea-93a5-2be90c9ebbf2\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:56:08 crc kubenswrapper[4558]: I0120 17:56:08.734969 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ed169196-847d-4bea-93a5-2be90c9ebbf2-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ed169196-847d-4bea-93a5-2be90c9ebbf2\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:56:08 crc kubenswrapper[4558]: I0120 17:56:08.735047 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ed169196-847d-4bea-93a5-2be90c9ebbf2-config-data\") pod \"ceilometer-0\" (UID: \"ed169196-847d-4bea-93a5-2be90c9ebbf2\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:56:08 crc kubenswrapper[4558]: I0120 17:56:08.735070 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ed169196-847d-4bea-93a5-2be90c9ebbf2-run-httpd\") pod \"ceilometer-0\" (UID: \"ed169196-847d-4bea-93a5-2be90c9ebbf2\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:56:08 crc kubenswrapper[4558]: I0120 17:56:08.735149 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ed169196-847d-4bea-93a5-2be90c9ebbf2-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ed169196-847d-4bea-93a5-2be90c9ebbf2\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:56:08 crc kubenswrapper[4558]: I0120 17:56:08.735195 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pqgdl\" (UniqueName: \"kubernetes.io/projected/ed169196-847d-4bea-93a5-2be90c9ebbf2-kube-api-access-pqgdl\") pod \"ceilometer-0\" (UID: \"ed169196-847d-4bea-93a5-2be90c9ebbf2\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:56:08 crc kubenswrapper[4558]: I0120 17:56:08.735227 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ed169196-847d-4bea-93a5-2be90c9ebbf2-scripts\") pod \"ceilometer-0\" (UID: \"ed169196-847d-4bea-93a5-2be90c9ebbf2\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:56:08 crc kubenswrapper[4558]: I0120 17:56:08.735253 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ed169196-847d-4bea-93a5-2be90c9ebbf2-log-httpd\") pod \"ceilometer-0\" (UID: \"ed169196-847d-4bea-93a5-2be90c9ebbf2\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:56:08 crc kubenswrapper[4558]: I0120 17:56:08.736293 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ed169196-847d-4bea-93a5-2be90c9ebbf2-run-httpd\") pod \"ceilometer-0\" (UID: \"ed169196-847d-4bea-93a5-2be90c9ebbf2\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:56:08 crc kubenswrapper[4558]: I0120 17:56:08.736471 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ed169196-847d-4bea-93a5-2be90c9ebbf2-log-httpd\") pod \"ceilometer-0\" (UID: \"ed169196-847d-4bea-93a5-2be90c9ebbf2\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:56:08 crc kubenswrapper[4558]: I0120 17:56:08.739097 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ed169196-847d-4bea-93a5-2be90c9ebbf2-config-data\") pod \"ceilometer-0\" (UID: \"ed169196-847d-4bea-93a5-2be90c9ebbf2\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:56:08 crc kubenswrapper[4558]: I0120 17:56:08.739413 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ed169196-847d-4bea-93a5-2be90c9ebbf2-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ed169196-847d-4bea-93a5-2be90c9ebbf2\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:56:08 crc kubenswrapper[4558]: I0120 17:56:08.741266 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ed169196-847d-4bea-93a5-2be90c9ebbf2-scripts\") pod \"ceilometer-0\" (UID: \"ed169196-847d-4bea-93a5-2be90c9ebbf2\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:56:08 crc kubenswrapper[4558]: I0120 17:56:08.741859 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ed169196-847d-4bea-93a5-2be90c9ebbf2-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ed169196-847d-4bea-93a5-2be90c9ebbf2\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:56:08 crc kubenswrapper[4558]: I0120 17:56:08.750824 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pqgdl\" (UniqueName: \"kubernetes.io/projected/ed169196-847d-4bea-93a5-2be90c9ebbf2-kube-api-access-pqgdl\") pod \"ceilometer-0\" (UID: \"ed169196-847d-4bea-93a5-2be90c9ebbf2\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:56:08 crc kubenswrapper[4558]: I0120 17:56:08.850819 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:56:09 crc kubenswrapper[4558]: I0120 17:56:09.294855 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:56:09 crc kubenswrapper[4558]: W0120 17:56:09.306076 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poded169196_847d_4bea_93a5_2be90c9ebbf2.slice/crio-1d1109d781af1600620e72f2681818c8a335d65ac4e578ad40673b00ba9ff4be WatchSource:0}: Error finding container 1d1109d781af1600620e72f2681818c8a335d65ac4e578ad40673b00ba9ff4be: Status 404 returned error can't find the container with id 1d1109d781af1600620e72f2681818c8a335d65ac4e578ad40673b00ba9ff4be Jan 20 17:56:10 crc kubenswrapper[4558]: I0120 17:56:10.226263 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"ed169196-847d-4bea-93a5-2be90c9ebbf2","Type":"ContainerStarted","Data":"1d1109d781af1600620e72f2681818c8a335d65ac4e578ad40673b00ba9ff4be"} Jan 20 17:56:10 crc kubenswrapper[4558]: I0120 17:56:10.228914 4558 generic.go:334] "Generic (PLEG): container finished" podID="e28cd4f4-bcf1-4a99-8161-5675a4dc6cb9" containerID="861bd48be8d8402e91402ef59c9aafef2f48435302e1a32fdbaaea6e18815246" exitCode=0 Jan 20 17:56:10 crc kubenswrapper[4558]: I0120 17:56:10.228971 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-rznhk" event={"ID":"e28cd4f4-bcf1-4a99-8161-5675a4dc6cb9","Type":"ContainerDied","Data":"861bd48be8d8402e91402ef59c9aafef2f48435302e1a32fdbaaea6e18815246"} Jan 20 17:56:11 crc kubenswrapper[4558]: I0120 17:56:11.243968 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"ed169196-847d-4bea-93a5-2be90c9ebbf2","Type":"ContainerStarted","Data":"8c7de8e01ffa5d59a06f6013ab585e02b47f48035ebd16bf2e53063b16a6a414"} Jan 20 17:56:11 crc kubenswrapper[4558]: I0120 17:56:11.244394 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"ed169196-847d-4bea-93a5-2be90c9ebbf2","Type":"ContainerStarted","Data":"d5f2a4b689a5694e8dd8d4bbd173c03fe1b789746e028f168613a9770400ad44"} Jan 20 17:56:11 crc kubenswrapper[4558]: I0120 17:56:11.540563 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-rznhk" Jan 20 17:56:11 crc kubenswrapper[4558]: I0120 17:56:11.699867 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e28cd4f4-bcf1-4a99-8161-5675a4dc6cb9-scripts\") pod \"e28cd4f4-bcf1-4a99-8161-5675a4dc6cb9\" (UID: \"e28cd4f4-bcf1-4a99-8161-5675a4dc6cb9\") " Jan 20 17:56:11 crc kubenswrapper[4558]: I0120 17:56:11.700259 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2b4nb\" (UniqueName: \"kubernetes.io/projected/e28cd4f4-bcf1-4a99-8161-5675a4dc6cb9-kube-api-access-2b4nb\") pod \"e28cd4f4-bcf1-4a99-8161-5675a4dc6cb9\" (UID: \"e28cd4f4-bcf1-4a99-8161-5675a4dc6cb9\") " Jan 20 17:56:11 crc kubenswrapper[4558]: I0120 17:56:11.700281 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e28cd4f4-bcf1-4a99-8161-5675a4dc6cb9-config-data\") pod \"e28cd4f4-bcf1-4a99-8161-5675a4dc6cb9\" (UID: \"e28cd4f4-bcf1-4a99-8161-5675a4dc6cb9\") " Jan 20 17:56:11 crc kubenswrapper[4558]: I0120 17:56:11.700536 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e28cd4f4-bcf1-4a99-8161-5675a4dc6cb9-combined-ca-bundle\") pod \"e28cd4f4-bcf1-4a99-8161-5675a4dc6cb9\" (UID: \"e28cd4f4-bcf1-4a99-8161-5675a4dc6cb9\") " Jan 20 17:56:11 crc kubenswrapper[4558]: I0120 17:56:11.705402 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e28cd4f4-bcf1-4a99-8161-5675a4dc6cb9-scripts" (OuterVolumeSpecName: "scripts") pod "e28cd4f4-bcf1-4a99-8161-5675a4dc6cb9" (UID: "e28cd4f4-bcf1-4a99-8161-5675a4dc6cb9"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:56:11 crc kubenswrapper[4558]: I0120 17:56:11.706415 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e28cd4f4-bcf1-4a99-8161-5675a4dc6cb9-kube-api-access-2b4nb" (OuterVolumeSpecName: "kube-api-access-2b4nb") pod "e28cd4f4-bcf1-4a99-8161-5675a4dc6cb9" (UID: "e28cd4f4-bcf1-4a99-8161-5675a4dc6cb9"). InnerVolumeSpecName "kube-api-access-2b4nb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:56:11 crc kubenswrapper[4558]: I0120 17:56:11.725952 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e28cd4f4-bcf1-4a99-8161-5675a4dc6cb9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e28cd4f4-bcf1-4a99-8161-5675a4dc6cb9" (UID: "e28cd4f4-bcf1-4a99-8161-5675a4dc6cb9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:56:11 crc kubenswrapper[4558]: I0120 17:56:11.738563 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e28cd4f4-bcf1-4a99-8161-5675a4dc6cb9-config-data" (OuterVolumeSpecName: "config-data") pod "e28cd4f4-bcf1-4a99-8161-5675a4dc6cb9" (UID: "e28cd4f4-bcf1-4a99-8161-5675a4dc6cb9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:56:11 crc kubenswrapper[4558]: I0120 17:56:11.805609 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e28cd4f4-bcf1-4a99-8161-5675a4dc6cb9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:56:11 crc kubenswrapper[4558]: I0120 17:56:11.805641 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e28cd4f4-bcf1-4a99-8161-5675a4dc6cb9-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:56:11 crc kubenswrapper[4558]: I0120 17:56:11.805659 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2b4nb\" (UniqueName: \"kubernetes.io/projected/e28cd4f4-bcf1-4a99-8161-5675a4dc6cb9-kube-api-access-2b4nb\") on node \"crc\" DevicePath \"\"" Jan 20 17:56:11 crc kubenswrapper[4558]: I0120 17:56:11.805674 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e28cd4f4-bcf1-4a99-8161-5675a4dc6cb9-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:56:12 crc kubenswrapper[4558]: I0120 17:56:12.257219 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"ed169196-847d-4bea-93a5-2be90c9ebbf2","Type":"ContainerStarted","Data":"e21f248cc1c822ee5a6940162002a806026271c0bbcfb0f28ecb1bf3047cee71"} Jan 20 17:56:12 crc kubenswrapper[4558]: I0120 17:56:12.259709 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-rznhk" event={"ID":"e28cd4f4-bcf1-4a99-8161-5675a4dc6cb9","Type":"ContainerDied","Data":"939d093f5ce07d62440e6a74d0cb19f163bbbda2a269065b1736f58f2f09b70e"} Jan 20 17:56:12 crc kubenswrapper[4558]: I0120 17:56:12.259789 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="939d093f5ce07d62440e6a74d0cb19f163bbbda2a269065b1736f58f2f09b70e" Jan 20 17:56:12 crc kubenswrapper[4558]: I0120 17:56:12.259901 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-db-sync-rznhk" Jan 20 17:56:12 crc kubenswrapper[4558]: I0120 17:56:12.316937 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-0"] Jan 20 17:56:12 crc kubenswrapper[4558]: E0120 17:56:12.317370 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e28cd4f4-bcf1-4a99-8161-5675a4dc6cb9" containerName="nova-cell0-conductor-db-sync" Jan 20 17:56:12 crc kubenswrapper[4558]: I0120 17:56:12.317389 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="e28cd4f4-bcf1-4a99-8161-5675a4dc6cb9" containerName="nova-cell0-conductor-db-sync" Jan 20 17:56:12 crc kubenswrapper[4558]: I0120 17:56:12.317552 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="e28cd4f4-bcf1-4a99-8161-5675a4dc6cb9" containerName="nova-cell0-conductor-db-sync" Jan 20 17:56:12 crc kubenswrapper[4558]: I0120 17:56:12.318147 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:56:12 crc kubenswrapper[4558]: I0120 17:56:12.319577 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-nova-dockercfg-s7p6w" Jan 20 17:56:12 crc kubenswrapper[4558]: I0120 17:56:12.321388 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell0-conductor-config-data" Jan 20 17:56:12 crc kubenswrapper[4558]: I0120 17:56:12.329966 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-0"] Jan 20 17:56:12 crc kubenswrapper[4558]: I0120 17:56:12.418807 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xgl6v\" (UniqueName: \"kubernetes.io/projected/8a1b8bc3-6d56-42f1-99a2-f80ff914e3d8-kube-api-access-xgl6v\") pod \"nova-cell0-conductor-0\" (UID: \"8a1b8bc3-6d56-42f1-99a2-f80ff914e3d8\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:56:12 crc kubenswrapper[4558]: I0120 17:56:12.418981 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8a1b8bc3-6d56-42f1-99a2-f80ff914e3d8-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"8a1b8bc3-6d56-42f1-99a2-f80ff914e3d8\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:56:12 crc kubenswrapper[4558]: I0120 17:56:12.419222 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8a1b8bc3-6d56-42f1-99a2-f80ff914e3d8-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"8a1b8bc3-6d56-42f1-99a2-f80ff914e3d8\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:56:12 crc kubenswrapper[4558]: I0120 17:56:12.520859 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xgl6v\" (UniqueName: \"kubernetes.io/projected/8a1b8bc3-6d56-42f1-99a2-f80ff914e3d8-kube-api-access-xgl6v\") pod \"nova-cell0-conductor-0\" (UID: \"8a1b8bc3-6d56-42f1-99a2-f80ff914e3d8\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:56:12 crc kubenswrapper[4558]: I0120 17:56:12.520936 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8a1b8bc3-6d56-42f1-99a2-f80ff914e3d8-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"8a1b8bc3-6d56-42f1-99a2-f80ff914e3d8\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:56:12 crc kubenswrapper[4558]: I0120 17:56:12.521747 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8a1b8bc3-6d56-42f1-99a2-f80ff914e3d8-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"8a1b8bc3-6d56-42f1-99a2-f80ff914e3d8\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:56:12 crc kubenswrapper[4558]: I0120 17:56:12.529063 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8a1b8bc3-6d56-42f1-99a2-f80ff914e3d8-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"8a1b8bc3-6d56-42f1-99a2-f80ff914e3d8\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:56:12 crc kubenswrapper[4558]: I0120 17:56:12.536429 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8a1b8bc3-6d56-42f1-99a2-f80ff914e3d8-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"8a1b8bc3-6d56-42f1-99a2-f80ff914e3d8\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:56:12 crc kubenswrapper[4558]: I0120 17:56:12.537582 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xgl6v\" (UniqueName: \"kubernetes.io/projected/8a1b8bc3-6d56-42f1-99a2-f80ff914e3d8-kube-api-access-xgl6v\") pod \"nova-cell0-conductor-0\" (UID: \"8a1b8bc3-6d56-42f1-99a2-f80ff914e3d8\") " pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:56:12 crc kubenswrapper[4558]: I0120 17:56:12.637229 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:56:12 crc kubenswrapper[4558]: I0120 17:56:12.761361 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:56:13 crc kubenswrapper[4558]: W0120 17:56:13.086984 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8a1b8bc3_6d56_42f1_99a2_f80ff914e3d8.slice/crio-42794666edb1bd11fa81bab1b80470a2797bd6f129b84f293af5f49df6003066 WatchSource:0}: Error finding container 42794666edb1bd11fa81bab1b80470a2797bd6f129b84f293af5f49df6003066: Status 404 returned error can't find the container with id 42794666edb1bd11fa81bab1b80470a2797bd6f129b84f293af5f49df6003066 Jan 20 17:56:13 crc kubenswrapper[4558]: I0120 17:56:13.092358 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-0"] Jan 20 17:56:13 crc kubenswrapper[4558]: I0120 17:56:13.270728 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-0" event={"ID":"8a1b8bc3-6d56-42f1-99a2-f80ff914e3d8","Type":"ContainerStarted","Data":"42794666edb1bd11fa81bab1b80470a2797bd6f129b84f293af5f49df6003066"} Jan 20 17:56:14 crc kubenswrapper[4558]: I0120 17:56:14.283839 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-0" event={"ID":"8a1b8bc3-6d56-42f1-99a2-f80ff914e3d8","Type":"ContainerStarted","Data":"5772b7b876d3a0d68ec8e5a68aa617182f064469dac3fc22b2b6ef8493342da2"} Jan 20 17:56:14 crc kubenswrapper[4558]: I0120 17:56:14.284943 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:56:14 crc kubenswrapper[4558]: I0120 17:56:14.286892 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"ed169196-847d-4bea-93a5-2be90c9ebbf2","Type":"ContainerStarted","Data":"2f0663ab26eadd2e2db8023c4c27e8039d69ccee3887942c437a89d40732248d"} Jan 20 17:56:14 crc kubenswrapper[4558]: I0120 17:56:14.287223 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:56:14 crc kubenswrapper[4558]: I0120 17:56:14.287232 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="ed169196-847d-4bea-93a5-2be90c9ebbf2" containerName="proxy-httpd" containerID="cri-o://2f0663ab26eadd2e2db8023c4c27e8039d69ccee3887942c437a89d40732248d" gracePeriod=30 Jan 20 17:56:14 crc kubenswrapper[4558]: I0120 17:56:14.287246 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="ed169196-847d-4bea-93a5-2be90c9ebbf2" containerName="sg-core" containerID="cri-o://e21f248cc1c822ee5a6940162002a806026271c0bbcfb0f28ecb1bf3047cee71" gracePeriod=30 Jan 20 17:56:14 crc kubenswrapper[4558]: I0120 17:56:14.287318 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="ed169196-847d-4bea-93a5-2be90c9ebbf2" containerName="ceilometer-notification-agent" containerID="cri-o://8c7de8e01ffa5d59a06f6013ab585e02b47f48035ebd16bf2e53063b16a6a414" gracePeriod=30 Jan 20 17:56:14 crc kubenswrapper[4558]: I0120 17:56:14.287210 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="ed169196-847d-4bea-93a5-2be90c9ebbf2" containerName="ceilometer-central-agent" containerID="cri-o://d5f2a4b689a5694e8dd8d4bbd173c03fe1b789746e028f168613a9770400ad44" gracePeriod=30 Jan 20 17:56:14 crc kubenswrapper[4558]: I0120 17:56:14.312367 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell0-conductor-0" podStartSLOduration=2.312346785 podStartE2EDuration="2.312346785s" podCreationTimestamp="2026-01-20 17:56:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:56:14.30375792 +0000 UTC m=+4468.064095887" watchObservedRunningTime="2026-01-20 17:56:14.312346785 +0000 UTC m=+4468.072684752" Jan 20 17:56:14 crc kubenswrapper[4558]: I0120 17:56:14.324265 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ceilometer-0" podStartSLOduration=2.56953629 podStartE2EDuration="6.324253977s" podCreationTimestamp="2026-01-20 17:56:08 +0000 UTC" firstStartedPulling="2026-01-20 17:56:09.308581525 +0000 UTC m=+4463.068919493" lastFinishedPulling="2026-01-20 17:56:13.063299213 +0000 UTC m=+4466.823637180" observedRunningTime="2026-01-20 17:56:14.32121774 +0000 UTC m=+4468.081555707" watchObservedRunningTime="2026-01-20 17:56:14.324253977 +0000 UTC m=+4468.084591944" Jan 20 17:56:14 crc kubenswrapper[4558]: I0120 17:56:14.403606 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:56:14 crc kubenswrapper[4558]: I0120 17:56:14.403663 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:56:14 crc kubenswrapper[4558]: I0120 17:56:14.436584 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:56:14 crc kubenswrapper[4558]: I0120 17:56:14.439269 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:56:15 crc kubenswrapper[4558]: I0120 17:56:15.301643 4558 generic.go:334] "Generic (PLEG): container finished" podID="ed169196-847d-4bea-93a5-2be90c9ebbf2" containerID="2f0663ab26eadd2e2db8023c4c27e8039d69ccee3887942c437a89d40732248d" exitCode=0 Jan 20 17:56:15 crc kubenswrapper[4558]: I0120 17:56:15.301678 4558 generic.go:334] "Generic (PLEG): container finished" podID="ed169196-847d-4bea-93a5-2be90c9ebbf2" containerID="e21f248cc1c822ee5a6940162002a806026271c0bbcfb0f28ecb1bf3047cee71" exitCode=2 Jan 20 17:56:15 crc kubenswrapper[4558]: I0120 17:56:15.301685 4558 generic.go:334] "Generic (PLEG): container finished" podID="ed169196-847d-4bea-93a5-2be90c9ebbf2" containerID="8c7de8e01ffa5d59a06f6013ab585e02b47f48035ebd16bf2e53063b16a6a414" exitCode=0 Jan 20 17:56:15 crc kubenswrapper[4558]: I0120 17:56:15.302297 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"ed169196-847d-4bea-93a5-2be90c9ebbf2","Type":"ContainerDied","Data":"2f0663ab26eadd2e2db8023c4c27e8039d69ccee3887942c437a89d40732248d"} Jan 20 17:56:15 crc kubenswrapper[4558]: I0120 17:56:15.302403 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"ed169196-847d-4bea-93a5-2be90c9ebbf2","Type":"ContainerDied","Data":"e21f248cc1c822ee5a6940162002a806026271c0bbcfb0f28ecb1bf3047cee71"} Jan 20 17:56:15 crc kubenswrapper[4558]: I0120 17:56:15.302421 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"ed169196-847d-4bea-93a5-2be90c9ebbf2","Type":"ContainerDied","Data":"8c7de8e01ffa5d59a06f6013ab585e02b47f48035ebd16bf2e53063b16a6a414"} Jan 20 17:56:15 crc kubenswrapper[4558]: I0120 17:56:15.302791 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:56:15 crc kubenswrapper[4558]: I0120 17:56:15.302826 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:56:15 crc kubenswrapper[4558]: I0120 17:56:15.420914 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:56:15 crc kubenswrapper[4558]: I0120 17:56:15.420981 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:56:15 crc kubenswrapper[4558]: I0120 17:56:15.455968 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:56:15 crc kubenswrapper[4558]: I0120 17:56:15.458113 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:56:16 crc kubenswrapper[4558]: I0120 17:56:16.313032 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:56:16 crc kubenswrapper[4558]: I0120 17:56:16.314101 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:56:16 crc kubenswrapper[4558]: I0120 17:56:16.998101 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:56:16 crc kubenswrapper[4558]: I0120 17:56:16.999541 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:56:18 crc kubenswrapper[4558]: I0120 17:56:18.043687 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:56:18 crc kubenswrapper[4558]: I0120 17:56:18.049989 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:56:18 crc kubenswrapper[4558]: I0120 17:56:18.566881 4558 scope.go:117] "RemoveContainer" containerID="4ea7fa61d8b21007a044345acec89fcebe56c2921cf0f76ff2002f44bdc88ede" Jan 20 17:56:18 crc kubenswrapper[4558]: E0120 17:56:18.567344 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 17:56:20 crc kubenswrapper[4558]: I0120 17:56:20.118112 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:56:20 crc kubenswrapper[4558]: I0120 17:56:20.194461 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ed169196-847d-4bea-93a5-2be90c9ebbf2-run-httpd\") pod \"ed169196-847d-4bea-93a5-2be90c9ebbf2\" (UID: \"ed169196-847d-4bea-93a5-2be90c9ebbf2\") " Jan 20 17:56:20 crc kubenswrapper[4558]: I0120 17:56:20.194556 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ed169196-847d-4bea-93a5-2be90c9ebbf2-log-httpd\") pod \"ed169196-847d-4bea-93a5-2be90c9ebbf2\" (UID: \"ed169196-847d-4bea-93a5-2be90c9ebbf2\") " Jan 20 17:56:20 crc kubenswrapper[4558]: I0120 17:56:20.194588 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ed169196-847d-4bea-93a5-2be90c9ebbf2-combined-ca-bundle\") pod \"ed169196-847d-4bea-93a5-2be90c9ebbf2\" (UID: \"ed169196-847d-4bea-93a5-2be90c9ebbf2\") " Jan 20 17:56:20 crc kubenswrapper[4558]: I0120 17:56:20.194688 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ed169196-847d-4bea-93a5-2be90c9ebbf2-sg-core-conf-yaml\") pod \"ed169196-847d-4bea-93a5-2be90c9ebbf2\" (UID: \"ed169196-847d-4bea-93a5-2be90c9ebbf2\") " Jan 20 17:56:20 crc kubenswrapper[4558]: I0120 17:56:20.194715 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ed169196-847d-4bea-93a5-2be90c9ebbf2-scripts\") pod \"ed169196-847d-4bea-93a5-2be90c9ebbf2\" (UID: \"ed169196-847d-4bea-93a5-2be90c9ebbf2\") " Jan 20 17:56:20 crc kubenswrapper[4558]: I0120 17:56:20.194783 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pqgdl\" (UniqueName: \"kubernetes.io/projected/ed169196-847d-4bea-93a5-2be90c9ebbf2-kube-api-access-pqgdl\") pod \"ed169196-847d-4bea-93a5-2be90c9ebbf2\" (UID: \"ed169196-847d-4bea-93a5-2be90c9ebbf2\") " Jan 20 17:56:20 crc kubenswrapper[4558]: I0120 17:56:20.194801 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ed169196-847d-4bea-93a5-2be90c9ebbf2-config-data\") pod \"ed169196-847d-4bea-93a5-2be90c9ebbf2\" (UID: \"ed169196-847d-4bea-93a5-2be90c9ebbf2\") " Jan 20 17:56:20 crc kubenswrapper[4558]: I0120 17:56:20.195059 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ed169196-847d-4bea-93a5-2be90c9ebbf2-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "ed169196-847d-4bea-93a5-2be90c9ebbf2" (UID: "ed169196-847d-4bea-93a5-2be90c9ebbf2"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:56:20 crc kubenswrapper[4558]: I0120 17:56:20.195418 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ed169196-847d-4bea-93a5-2be90c9ebbf2-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "ed169196-847d-4bea-93a5-2be90c9ebbf2" (UID: "ed169196-847d-4bea-93a5-2be90c9ebbf2"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:56:20 crc kubenswrapper[4558]: I0120 17:56:20.195729 4558 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ed169196-847d-4bea-93a5-2be90c9ebbf2-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:56:20 crc kubenswrapper[4558]: I0120 17:56:20.195748 4558 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ed169196-847d-4bea-93a5-2be90c9ebbf2-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:56:20 crc kubenswrapper[4558]: I0120 17:56:20.200070 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ed169196-847d-4bea-93a5-2be90c9ebbf2-scripts" (OuterVolumeSpecName: "scripts") pod "ed169196-847d-4bea-93a5-2be90c9ebbf2" (UID: "ed169196-847d-4bea-93a5-2be90c9ebbf2"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:56:20 crc kubenswrapper[4558]: I0120 17:56:20.201788 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ed169196-847d-4bea-93a5-2be90c9ebbf2-kube-api-access-pqgdl" (OuterVolumeSpecName: "kube-api-access-pqgdl") pod "ed169196-847d-4bea-93a5-2be90c9ebbf2" (UID: "ed169196-847d-4bea-93a5-2be90c9ebbf2"). InnerVolumeSpecName "kube-api-access-pqgdl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:56:20 crc kubenswrapper[4558]: I0120 17:56:20.217143 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ed169196-847d-4bea-93a5-2be90c9ebbf2-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "ed169196-847d-4bea-93a5-2be90c9ebbf2" (UID: "ed169196-847d-4bea-93a5-2be90c9ebbf2"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:56:20 crc kubenswrapper[4558]: I0120 17:56:20.243992 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ed169196-847d-4bea-93a5-2be90c9ebbf2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ed169196-847d-4bea-93a5-2be90c9ebbf2" (UID: "ed169196-847d-4bea-93a5-2be90c9ebbf2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:56:20 crc kubenswrapper[4558]: I0120 17:56:20.258344 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ed169196-847d-4bea-93a5-2be90c9ebbf2-config-data" (OuterVolumeSpecName: "config-data") pod "ed169196-847d-4bea-93a5-2be90c9ebbf2" (UID: "ed169196-847d-4bea-93a5-2be90c9ebbf2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:56:20 crc kubenswrapper[4558]: I0120 17:56:20.298160 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pqgdl\" (UniqueName: \"kubernetes.io/projected/ed169196-847d-4bea-93a5-2be90c9ebbf2-kube-api-access-pqgdl\") on node \"crc\" DevicePath \"\"" Jan 20 17:56:20 crc kubenswrapper[4558]: I0120 17:56:20.298205 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ed169196-847d-4bea-93a5-2be90c9ebbf2-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:56:20 crc kubenswrapper[4558]: I0120 17:56:20.298223 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ed169196-847d-4bea-93a5-2be90c9ebbf2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:56:20 crc kubenswrapper[4558]: I0120 17:56:20.298237 4558 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ed169196-847d-4bea-93a5-2be90c9ebbf2-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 20 17:56:20 crc kubenswrapper[4558]: I0120 17:56:20.298249 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ed169196-847d-4bea-93a5-2be90c9ebbf2-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:56:20 crc kubenswrapper[4558]: I0120 17:56:20.363484 4558 generic.go:334] "Generic (PLEG): container finished" podID="ed169196-847d-4bea-93a5-2be90c9ebbf2" containerID="d5f2a4b689a5694e8dd8d4bbd173c03fe1b789746e028f168613a9770400ad44" exitCode=0 Jan 20 17:56:20 crc kubenswrapper[4558]: I0120 17:56:20.363586 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:56:20 crc kubenswrapper[4558]: I0120 17:56:20.363728 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"ed169196-847d-4bea-93a5-2be90c9ebbf2","Type":"ContainerDied","Data":"d5f2a4b689a5694e8dd8d4bbd173c03fe1b789746e028f168613a9770400ad44"} Jan 20 17:56:20 crc kubenswrapper[4558]: I0120 17:56:20.363864 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"ed169196-847d-4bea-93a5-2be90c9ebbf2","Type":"ContainerDied","Data":"1d1109d781af1600620e72f2681818c8a335d65ac4e578ad40673b00ba9ff4be"} Jan 20 17:56:20 crc kubenswrapper[4558]: I0120 17:56:20.363944 4558 scope.go:117] "RemoveContainer" containerID="2f0663ab26eadd2e2db8023c4c27e8039d69ccee3887942c437a89d40732248d" Jan 20 17:56:20 crc kubenswrapper[4558]: I0120 17:56:20.383269 4558 scope.go:117] "RemoveContainer" containerID="e21f248cc1c822ee5a6940162002a806026271c0bbcfb0f28ecb1bf3047cee71" Jan 20 17:56:20 crc kubenswrapper[4558]: I0120 17:56:20.396012 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:56:20 crc kubenswrapper[4558]: I0120 17:56:20.418284 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:56:20 crc kubenswrapper[4558]: I0120 17:56:20.423656 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:56:20 crc kubenswrapper[4558]: E0120 17:56:20.423999 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed169196-847d-4bea-93a5-2be90c9ebbf2" containerName="ceilometer-notification-agent" Jan 20 17:56:20 crc kubenswrapper[4558]: I0120 17:56:20.424018 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed169196-847d-4bea-93a5-2be90c9ebbf2" containerName="ceilometer-notification-agent" Jan 20 17:56:20 crc kubenswrapper[4558]: E0120 17:56:20.424052 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed169196-847d-4bea-93a5-2be90c9ebbf2" containerName="ceilometer-central-agent" Jan 20 17:56:20 crc kubenswrapper[4558]: I0120 17:56:20.424060 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed169196-847d-4bea-93a5-2be90c9ebbf2" containerName="ceilometer-central-agent" Jan 20 17:56:20 crc kubenswrapper[4558]: E0120 17:56:20.424068 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed169196-847d-4bea-93a5-2be90c9ebbf2" containerName="sg-core" Jan 20 17:56:20 crc kubenswrapper[4558]: I0120 17:56:20.424073 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed169196-847d-4bea-93a5-2be90c9ebbf2" containerName="sg-core" Jan 20 17:56:20 crc kubenswrapper[4558]: E0120 17:56:20.424099 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed169196-847d-4bea-93a5-2be90c9ebbf2" containerName="proxy-httpd" Jan 20 17:56:20 crc kubenswrapper[4558]: I0120 17:56:20.424106 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed169196-847d-4bea-93a5-2be90c9ebbf2" containerName="proxy-httpd" Jan 20 17:56:20 crc kubenswrapper[4558]: I0120 17:56:20.424272 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="ed169196-847d-4bea-93a5-2be90c9ebbf2" containerName="ceilometer-notification-agent" Jan 20 17:56:20 crc kubenswrapper[4558]: I0120 17:56:20.424297 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="ed169196-847d-4bea-93a5-2be90c9ebbf2" containerName="sg-core" Jan 20 17:56:20 crc kubenswrapper[4558]: I0120 17:56:20.424311 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="ed169196-847d-4bea-93a5-2be90c9ebbf2" containerName="proxy-httpd" Jan 20 17:56:20 crc kubenswrapper[4558]: I0120 17:56:20.424326 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="ed169196-847d-4bea-93a5-2be90c9ebbf2" containerName="ceilometer-central-agent" Jan 20 17:56:20 crc kubenswrapper[4558]: I0120 17:56:20.424905 4558 scope.go:117] "RemoveContainer" containerID="8c7de8e01ffa5d59a06f6013ab585e02b47f48035ebd16bf2e53063b16a6a414" Jan 20 17:56:20 crc kubenswrapper[4558]: I0120 17:56:20.425681 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:56:20 crc kubenswrapper[4558]: I0120 17:56:20.428863 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-config-data" Jan 20 17:56:20 crc kubenswrapper[4558]: I0120 17:56:20.429061 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-scripts" Jan 20 17:56:20 crc kubenswrapper[4558]: I0120 17:56:20.444213 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:56:20 crc kubenswrapper[4558]: I0120 17:56:20.449657 4558 scope.go:117] "RemoveContainer" containerID="d5f2a4b689a5694e8dd8d4bbd173c03fe1b789746e028f168613a9770400ad44" Jan 20 17:56:20 crc kubenswrapper[4558]: I0120 17:56:20.467708 4558 scope.go:117] "RemoveContainer" containerID="2f0663ab26eadd2e2db8023c4c27e8039d69ccee3887942c437a89d40732248d" Jan 20 17:56:20 crc kubenswrapper[4558]: E0120 17:56:20.468207 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2f0663ab26eadd2e2db8023c4c27e8039d69ccee3887942c437a89d40732248d\": container with ID starting with 2f0663ab26eadd2e2db8023c4c27e8039d69ccee3887942c437a89d40732248d not found: ID does not exist" containerID="2f0663ab26eadd2e2db8023c4c27e8039d69ccee3887942c437a89d40732248d" Jan 20 17:56:20 crc kubenswrapper[4558]: I0120 17:56:20.468256 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2f0663ab26eadd2e2db8023c4c27e8039d69ccee3887942c437a89d40732248d"} err="failed to get container status \"2f0663ab26eadd2e2db8023c4c27e8039d69ccee3887942c437a89d40732248d\": rpc error: code = NotFound desc = could not find container \"2f0663ab26eadd2e2db8023c4c27e8039d69ccee3887942c437a89d40732248d\": container with ID starting with 2f0663ab26eadd2e2db8023c4c27e8039d69ccee3887942c437a89d40732248d not found: ID does not exist" Jan 20 17:56:20 crc kubenswrapper[4558]: I0120 17:56:20.468288 4558 scope.go:117] "RemoveContainer" containerID="e21f248cc1c822ee5a6940162002a806026271c0bbcfb0f28ecb1bf3047cee71" Jan 20 17:56:20 crc kubenswrapper[4558]: E0120 17:56:20.468638 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e21f248cc1c822ee5a6940162002a806026271c0bbcfb0f28ecb1bf3047cee71\": container with ID starting with e21f248cc1c822ee5a6940162002a806026271c0bbcfb0f28ecb1bf3047cee71 not found: ID does not exist" containerID="e21f248cc1c822ee5a6940162002a806026271c0bbcfb0f28ecb1bf3047cee71" Jan 20 17:56:20 crc kubenswrapper[4558]: I0120 17:56:20.468736 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e21f248cc1c822ee5a6940162002a806026271c0bbcfb0f28ecb1bf3047cee71"} err="failed to get container status \"e21f248cc1c822ee5a6940162002a806026271c0bbcfb0f28ecb1bf3047cee71\": rpc error: code = NotFound desc = could not find container \"e21f248cc1c822ee5a6940162002a806026271c0bbcfb0f28ecb1bf3047cee71\": container with ID starting with e21f248cc1c822ee5a6940162002a806026271c0bbcfb0f28ecb1bf3047cee71 not found: ID does not exist" Jan 20 17:56:20 crc kubenswrapper[4558]: I0120 17:56:20.468817 4558 scope.go:117] "RemoveContainer" containerID="8c7de8e01ffa5d59a06f6013ab585e02b47f48035ebd16bf2e53063b16a6a414" Jan 20 17:56:20 crc kubenswrapper[4558]: E0120 17:56:20.469143 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8c7de8e01ffa5d59a06f6013ab585e02b47f48035ebd16bf2e53063b16a6a414\": container with ID starting with 8c7de8e01ffa5d59a06f6013ab585e02b47f48035ebd16bf2e53063b16a6a414 not found: ID does not exist" containerID="8c7de8e01ffa5d59a06f6013ab585e02b47f48035ebd16bf2e53063b16a6a414" Jan 20 17:56:20 crc kubenswrapper[4558]: I0120 17:56:20.469231 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8c7de8e01ffa5d59a06f6013ab585e02b47f48035ebd16bf2e53063b16a6a414"} err="failed to get container status \"8c7de8e01ffa5d59a06f6013ab585e02b47f48035ebd16bf2e53063b16a6a414\": rpc error: code = NotFound desc = could not find container \"8c7de8e01ffa5d59a06f6013ab585e02b47f48035ebd16bf2e53063b16a6a414\": container with ID starting with 8c7de8e01ffa5d59a06f6013ab585e02b47f48035ebd16bf2e53063b16a6a414 not found: ID does not exist" Jan 20 17:56:20 crc kubenswrapper[4558]: I0120 17:56:20.469302 4558 scope.go:117] "RemoveContainer" containerID="d5f2a4b689a5694e8dd8d4bbd173c03fe1b789746e028f168613a9770400ad44" Jan 20 17:56:20 crc kubenswrapper[4558]: E0120 17:56:20.469618 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d5f2a4b689a5694e8dd8d4bbd173c03fe1b789746e028f168613a9770400ad44\": container with ID starting with d5f2a4b689a5694e8dd8d4bbd173c03fe1b789746e028f168613a9770400ad44 not found: ID does not exist" containerID="d5f2a4b689a5694e8dd8d4bbd173c03fe1b789746e028f168613a9770400ad44" Jan 20 17:56:20 crc kubenswrapper[4558]: I0120 17:56:20.469691 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d5f2a4b689a5694e8dd8d4bbd173c03fe1b789746e028f168613a9770400ad44"} err="failed to get container status \"d5f2a4b689a5694e8dd8d4bbd173c03fe1b789746e028f168613a9770400ad44\": rpc error: code = NotFound desc = could not find container \"d5f2a4b689a5694e8dd8d4bbd173c03fe1b789746e028f168613a9770400ad44\": container with ID starting with d5f2a4b689a5694e8dd8d4bbd173c03fe1b789746e028f168613a9770400ad44 not found: ID does not exist" Jan 20 17:56:20 crc kubenswrapper[4558]: I0120 17:56:20.502812 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/491a5173-ab6a-4abf-932d-93e7b4856b8b-run-httpd\") pod \"ceilometer-0\" (UID: \"491a5173-ab6a-4abf-932d-93e7b4856b8b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:56:20 crc kubenswrapper[4558]: I0120 17:56:20.503426 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/491a5173-ab6a-4abf-932d-93e7b4856b8b-log-httpd\") pod \"ceilometer-0\" (UID: \"491a5173-ab6a-4abf-932d-93e7b4856b8b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:56:20 crc kubenswrapper[4558]: I0120 17:56:20.503588 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/491a5173-ab6a-4abf-932d-93e7b4856b8b-scripts\") pod \"ceilometer-0\" (UID: \"491a5173-ab6a-4abf-932d-93e7b4856b8b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:56:20 crc kubenswrapper[4558]: I0120 17:56:20.503796 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/491a5173-ab6a-4abf-932d-93e7b4856b8b-config-data\") pod \"ceilometer-0\" (UID: \"491a5173-ab6a-4abf-932d-93e7b4856b8b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:56:20 crc kubenswrapper[4558]: I0120 17:56:20.504012 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/491a5173-ab6a-4abf-932d-93e7b4856b8b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"491a5173-ab6a-4abf-932d-93e7b4856b8b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:56:20 crc kubenswrapper[4558]: I0120 17:56:20.504147 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k4cfk\" (UniqueName: \"kubernetes.io/projected/491a5173-ab6a-4abf-932d-93e7b4856b8b-kube-api-access-k4cfk\") pod \"ceilometer-0\" (UID: \"491a5173-ab6a-4abf-932d-93e7b4856b8b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:56:20 crc kubenswrapper[4558]: I0120 17:56:20.504254 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/491a5173-ab6a-4abf-932d-93e7b4856b8b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"491a5173-ab6a-4abf-932d-93e7b4856b8b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:56:20 crc kubenswrapper[4558]: I0120 17:56:20.579831 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ed169196-847d-4bea-93a5-2be90c9ebbf2" path="/var/lib/kubelet/pods/ed169196-847d-4bea-93a5-2be90c9ebbf2/volumes" Jan 20 17:56:20 crc kubenswrapper[4558]: I0120 17:56:20.606407 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/491a5173-ab6a-4abf-932d-93e7b4856b8b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"491a5173-ab6a-4abf-932d-93e7b4856b8b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:56:20 crc kubenswrapper[4558]: I0120 17:56:20.606515 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/491a5173-ab6a-4abf-932d-93e7b4856b8b-run-httpd\") pod \"ceilometer-0\" (UID: \"491a5173-ab6a-4abf-932d-93e7b4856b8b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:56:20 crc kubenswrapper[4558]: I0120 17:56:20.606547 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/491a5173-ab6a-4abf-932d-93e7b4856b8b-log-httpd\") pod \"ceilometer-0\" (UID: \"491a5173-ab6a-4abf-932d-93e7b4856b8b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:56:20 crc kubenswrapper[4558]: I0120 17:56:20.606583 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/491a5173-ab6a-4abf-932d-93e7b4856b8b-scripts\") pod \"ceilometer-0\" (UID: \"491a5173-ab6a-4abf-932d-93e7b4856b8b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:56:20 crc kubenswrapper[4558]: I0120 17:56:20.606640 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/491a5173-ab6a-4abf-932d-93e7b4856b8b-config-data\") pod \"ceilometer-0\" (UID: \"491a5173-ab6a-4abf-932d-93e7b4856b8b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:56:20 crc kubenswrapper[4558]: I0120 17:56:20.606717 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/491a5173-ab6a-4abf-932d-93e7b4856b8b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"491a5173-ab6a-4abf-932d-93e7b4856b8b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:56:20 crc kubenswrapper[4558]: I0120 17:56:20.606770 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k4cfk\" (UniqueName: \"kubernetes.io/projected/491a5173-ab6a-4abf-932d-93e7b4856b8b-kube-api-access-k4cfk\") pod \"ceilometer-0\" (UID: \"491a5173-ab6a-4abf-932d-93e7b4856b8b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:56:20 crc kubenswrapper[4558]: I0120 17:56:20.609882 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/491a5173-ab6a-4abf-932d-93e7b4856b8b-run-httpd\") pod \"ceilometer-0\" (UID: \"491a5173-ab6a-4abf-932d-93e7b4856b8b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:56:20 crc kubenswrapper[4558]: I0120 17:56:20.610015 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/491a5173-ab6a-4abf-932d-93e7b4856b8b-log-httpd\") pod \"ceilometer-0\" (UID: \"491a5173-ab6a-4abf-932d-93e7b4856b8b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:56:20 crc kubenswrapper[4558]: I0120 17:56:20.612238 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/491a5173-ab6a-4abf-932d-93e7b4856b8b-config-data\") pod \"ceilometer-0\" (UID: \"491a5173-ab6a-4abf-932d-93e7b4856b8b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:56:20 crc kubenswrapper[4558]: I0120 17:56:20.612425 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/491a5173-ab6a-4abf-932d-93e7b4856b8b-scripts\") pod \"ceilometer-0\" (UID: \"491a5173-ab6a-4abf-932d-93e7b4856b8b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:56:20 crc kubenswrapper[4558]: I0120 17:56:20.612524 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/491a5173-ab6a-4abf-932d-93e7b4856b8b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"491a5173-ab6a-4abf-932d-93e7b4856b8b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:56:20 crc kubenswrapper[4558]: I0120 17:56:20.614832 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/491a5173-ab6a-4abf-932d-93e7b4856b8b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"491a5173-ab6a-4abf-932d-93e7b4856b8b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:56:20 crc kubenswrapper[4558]: I0120 17:56:20.622303 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k4cfk\" (UniqueName: \"kubernetes.io/projected/491a5173-ab6a-4abf-932d-93e7b4856b8b-kube-api-access-k4cfk\") pod \"ceilometer-0\" (UID: \"491a5173-ab6a-4abf-932d-93e7b4856b8b\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:56:20 crc kubenswrapper[4558]: I0120 17:56:20.751358 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:56:21 crc kubenswrapper[4558]: I0120 17:56:21.203922 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:56:21 crc kubenswrapper[4558]: I0120 17:56:21.375839 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"491a5173-ab6a-4abf-932d-93e7b4856b8b","Type":"ContainerStarted","Data":"3fa1517b6de81cc3b5a18c2d8656e7cb280d899df37a40bb8cf2e29849adebff"} Jan 20 17:56:22 crc kubenswrapper[4558]: I0120 17:56:22.389088 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"491a5173-ab6a-4abf-932d-93e7b4856b8b","Type":"ContainerStarted","Data":"c832bcbb9393f9662665b8047449f27e4fcb185bed442d412b8f9e53cfdceaa1"} Jan 20 17:56:22 crc kubenswrapper[4558]: I0120 17:56:22.665403 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:56:23 crc kubenswrapper[4558]: I0120 17:56:23.223595 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell0-cell-mapping-l8mfx"] Jan 20 17:56:23 crc kubenswrapper[4558]: I0120 17:56:23.225355 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-cell-mapping-l8mfx" Jan 20 17:56:23 crc kubenswrapper[4558]: I0120 17:56:23.227964 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell0-manage-config-data" Jan 20 17:56:23 crc kubenswrapper[4558]: I0120 17:56:23.228297 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell0-manage-scripts" Jan 20 17:56:23 crc kubenswrapper[4558]: I0120 17:56:23.231660 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-cell-mapping-l8mfx"] Jan 20 17:56:23 crc kubenswrapper[4558]: I0120 17:56:23.313745 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:56:23 crc kubenswrapper[4558]: I0120 17:56:23.315124 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:56:23 crc kubenswrapper[4558]: I0120 17:56:23.317295 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-api-config-data" Jan 20 17:56:23 crc kubenswrapper[4558]: I0120 17:56:23.359629 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a0f54b8a-a5cf-4081-b4d6-f759bb1ae331-config-data\") pod \"nova-cell0-cell-mapping-l8mfx\" (UID: \"a0f54b8a-a5cf-4081-b4d6-f759bb1ae331\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-l8mfx" Jan 20 17:56:23 crc kubenswrapper[4558]: I0120 17:56:23.359754 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a0f54b8a-a5cf-4081-b4d6-f759bb1ae331-scripts\") pod \"nova-cell0-cell-mapping-l8mfx\" (UID: \"a0f54b8a-a5cf-4081-b4d6-f759bb1ae331\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-l8mfx" Jan 20 17:56:23 crc kubenswrapper[4558]: I0120 17:56:23.359900 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-htfrv\" (UniqueName: \"kubernetes.io/projected/a0f54b8a-a5cf-4081-b4d6-f759bb1ae331-kube-api-access-htfrv\") pod \"nova-cell0-cell-mapping-l8mfx\" (UID: \"a0f54b8a-a5cf-4081-b4d6-f759bb1ae331\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-l8mfx" Jan 20 17:56:23 crc kubenswrapper[4558]: I0120 17:56:23.360050 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0f54b8a-a5cf-4081-b4d6-f759bb1ae331-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-l8mfx\" (UID: \"a0f54b8a-a5cf-4081-b4d6-f759bb1ae331\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-l8mfx" Jan 20 17:56:23 crc kubenswrapper[4558]: I0120 17:56:23.405368 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:56:23 crc kubenswrapper[4558]: I0120 17:56:23.430634 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"491a5173-ab6a-4abf-932d-93e7b4856b8b","Type":"ContainerStarted","Data":"965fc9512076833004f35447a71fb467c1c8fe0081f4bd5ac443f07474f15c19"} Jan 20 17:56:23 crc kubenswrapper[4558]: I0120 17:56:23.461586 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0f54b8a-a5cf-4081-b4d6-f759bb1ae331-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-l8mfx\" (UID: \"a0f54b8a-a5cf-4081-b4d6-f759bb1ae331\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-l8mfx" Jan 20 17:56:23 crc kubenswrapper[4558]: I0120 17:56:23.461639 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tzff9\" (UniqueName: \"kubernetes.io/projected/9200cf36-f689-4e68-9847-5ad9f1aad20c-kube-api-access-tzff9\") pod \"nova-api-0\" (UID: \"9200cf36-f689-4e68-9847-5ad9f1aad20c\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:56:23 crc kubenswrapper[4558]: I0120 17:56:23.461700 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a0f54b8a-a5cf-4081-b4d6-f759bb1ae331-config-data\") pod \"nova-cell0-cell-mapping-l8mfx\" (UID: \"a0f54b8a-a5cf-4081-b4d6-f759bb1ae331\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-l8mfx" Jan 20 17:56:23 crc kubenswrapper[4558]: I0120 17:56:23.461744 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a0f54b8a-a5cf-4081-b4d6-f759bb1ae331-scripts\") pod \"nova-cell0-cell-mapping-l8mfx\" (UID: \"a0f54b8a-a5cf-4081-b4d6-f759bb1ae331\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-l8mfx" Jan 20 17:56:23 crc kubenswrapper[4558]: I0120 17:56:23.461765 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9200cf36-f689-4e68-9847-5ad9f1aad20c-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"9200cf36-f689-4e68-9847-5ad9f1aad20c\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:56:23 crc kubenswrapper[4558]: I0120 17:56:23.461789 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9200cf36-f689-4e68-9847-5ad9f1aad20c-logs\") pod \"nova-api-0\" (UID: \"9200cf36-f689-4e68-9847-5ad9f1aad20c\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:56:23 crc kubenswrapper[4558]: I0120 17:56:23.461828 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-htfrv\" (UniqueName: \"kubernetes.io/projected/a0f54b8a-a5cf-4081-b4d6-f759bb1ae331-kube-api-access-htfrv\") pod \"nova-cell0-cell-mapping-l8mfx\" (UID: \"a0f54b8a-a5cf-4081-b4d6-f759bb1ae331\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-l8mfx" Jan 20 17:56:23 crc kubenswrapper[4558]: I0120 17:56:23.461861 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9200cf36-f689-4e68-9847-5ad9f1aad20c-config-data\") pod \"nova-api-0\" (UID: \"9200cf36-f689-4e68-9847-5ad9f1aad20c\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:56:23 crc kubenswrapper[4558]: I0120 17:56:23.479802 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0f54b8a-a5cf-4081-b4d6-f759bb1ae331-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-l8mfx\" (UID: \"a0f54b8a-a5cf-4081-b4d6-f759bb1ae331\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-l8mfx" Jan 20 17:56:23 crc kubenswrapper[4558]: I0120 17:56:23.480235 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a0f54b8a-a5cf-4081-b4d6-f759bb1ae331-scripts\") pod \"nova-cell0-cell-mapping-l8mfx\" (UID: \"a0f54b8a-a5cf-4081-b4d6-f759bb1ae331\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-l8mfx" Jan 20 17:56:23 crc kubenswrapper[4558]: I0120 17:56:23.491074 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-htfrv\" (UniqueName: \"kubernetes.io/projected/a0f54b8a-a5cf-4081-b4d6-f759bb1ae331-kube-api-access-htfrv\") pod \"nova-cell0-cell-mapping-l8mfx\" (UID: \"a0f54b8a-a5cf-4081-b4d6-f759bb1ae331\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-l8mfx" Jan 20 17:56:23 crc kubenswrapper[4558]: I0120 17:56:23.501759 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a0f54b8a-a5cf-4081-b4d6-f759bb1ae331-config-data\") pod \"nova-cell0-cell-mapping-l8mfx\" (UID: \"a0f54b8a-a5cf-4081-b4d6-f759bb1ae331\") " pod="openstack-kuttl-tests/nova-cell0-cell-mapping-l8mfx" Jan 20 17:56:23 crc kubenswrapper[4558]: I0120 17:56:23.509765 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:56:23 crc kubenswrapper[4558]: I0120 17:56:23.511008 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:56:23 crc kubenswrapper[4558]: I0120 17:56:23.513620 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-scheduler-config-data" Jan 20 17:56:23 crc kubenswrapper[4558]: I0120 17:56:23.547097 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-cell-mapping-l8mfx" Jan 20 17:56:23 crc kubenswrapper[4558]: I0120 17:56:23.553066 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:56:23 crc kubenswrapper[4558]: I0120 17:56:23.554891 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:56:23 crc kubenswrapper[4558]: I0120 17:56:23.557344 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-metadata-config-data" Jan 20 17:56:23 crc kubenswrapper[4558]: I0120 17:56:23.564125 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/92d9f753-7bce-42db-878e-1d274cfcea51-config-data\") pod \"nova-scheduler-0\" (UID: \"92d9f753-7bce-42db-878e-1d274cfcea51\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:56:23 crc kubenswrapper[4558]: I0120 17:56:23.564192 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9200cf36-f689-4e68-9847-5ad9f1aad20c-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"9200cf36-f689-4e68-9847-5ad9f1aad20c\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:56:23 crc kubenswrapper[4558]: I0120 17:56:23.564221 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9200cf36-f689-4e68-9847-5ad9f1aad20c-logs\") pod \"nova-api-0\" (UID: \"9200cf36-f689-4e68-9847-5ad9f1aad20c\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:56:23 crc kubenswrapper[4558]: I0120 17:56:23.564252 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zn9nh\" (UniqueName: \"kubernetes.io/projected/92d9f753-7bce-42db-878e-1d274cfcea51-kube-api-access-zn9nh\") pod \"nova-scheduler-0\" (UID: \"92d9f753-7bce-42db-878e-1d274cfcea51\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:56:23 crc kubenswrapper[4558]: I0120 17:56:23.564293 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/92d9f753-7bce-42db-878e-1d274cfcea51-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"92d9f753-7bce-42db-878e-1d274cfcea51\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:56:23 crc kubenswrapper[4558]: I0120 17:56:23.564329 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9200cf36-f689-4e68-9847-5ad9f1aad20c-config-data\") pod \"nova-api-0\" (UID: \"9200cf36-f689-4e68-9847-5ad9f1aad20c\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:56:23 crc kubenswrapper[4558]: I0120 17:56:23.564488 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tzff9\" (UniqueName: \"kubernetes.io/projected/9200cf36-f689-4e68-9847-5ad9f1aad20c-kube-api-access-tzff9\") pod \"nova-api-0\" (UID: \"9200cf36-f689-4e68-9847-5ad9f1aad20c\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:56:23 crc kubenswrapper[4558]: I0120 17:56:23.564873 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9200cf36-f689-4e68-9847-5ad9f1aad20c-logs\") pod \"nova-api-0\" (UID: \"9200cf36-f689-4e68-9847-5ad9f1aad20c\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:56:23 crc kubenswrapper[4558]: I0120 17:56:23.573687 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9200cf36-f689-4e68-9847-5ad9f1aad20c-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"9200cf36-f689-4e68-9847-5ad9f1aad20c\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:56:23 crc kubenswrapper[4558]: I0120 17:56:23.576988 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9200cf36-f689-4e68-9847-5ad9f1aad20c-config-data\") pod \"nova-api-0\" (UID: \"9200cf36-f689-4e68-9847-5ad9f1aad20c\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:56:23 crc kubenswrapper[4558]: I0120 17:56:23.577079 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:56:23 crc kubenswrapper[4558]: I0120 17:56:23.582908 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tzff9\" (UniqueName: \"kubernetes.io/projected/9200cf36-f689-4e68-9847-5ad9f1aad20c-kube-api-access-tzff9\") pod \"nova-api-0\" (UID: \"9200cf36-f689-4e68-9847-5ad9f1aad20c\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:56:23 crc kubenswrapper[4558]: I0120 17:56:23.596252 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:56:23 crc kubenswrapper[4558]: I0120 17:56:23.598121 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:56:23 crc kubenswrapper[4558]: I0120 17:56:23.601243 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell1-novncproxy-config-data" Jan 20 17:56:23 crc kubenswrapper[4558]: I0120 17:56:23.611435 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:56:23 crc kubenswrapper[4558]: I0120 17:56:23.648089 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:56:23 crc kubenswrapper[4558]: I0120 17:56:23.669904 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:56:23 crc kubenswrapper[4558]: I0120 17:56:23.671966 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/25281e2f-b275-4e90-9966-f73ad7e12598-config-data\") pod \"nova-metadata-0\" (UID: \"25281e2f-b275-4e90-9966-f73ad7e12598\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:56:23 crc kubenswrapper[4558]: I0120 17:56:23.672275 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nhp7l\" (UniqueName: \"kubernetes.io/projected/dfee2556-f92d-4f25-a05e-2747991cb8b2-kube-api-access-nhp7l\") pod \"nova-cell1-novncproxy-0\" (UID: \"dfee2556-f92d-4f25-a05e-2747991cb8b2\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:56:23 crc kubenswrapper[4558]: I0120 17:56:23.672588 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/25281e2f-b275-4e90-9966-f73ad7e12598-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"25281e2f-b275-4e90-9966-f73ad7e12598\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:56:23 crc kubenswrapper[4558]: I0120 17:56:23.672695 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dfee2556-f92d-4f25-a05e-2747991cb8b2-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"dfee2556-f92d-4f25-a05e-2747991cb8b2\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:56:23 crc kubenswrapper[4558]: I0120 17:56:23.672887 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/92d9f753-7bce-42db-878e-1d274cfcea51-config-data\") pod \"nova-scheduler-0\" (UID: \"92d9f753-7bce-42db-878e-1d274cfcea51\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:56:23 crc kubenswrapper[4558]: I0120 17:56:23.672996 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zn9nh\" (UniqueName: \"kubernetes.io/projected/92d9f753-7bce-42db-878e-1d274cfcea51-kube-api-access-zn9nh\") pod \"nova-scheduler-0\" (UID: \"92d9f753-7bce-42db-878e-1d274cfcea51\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:56:23 crc kubenswrapper[4558]: I0120 17:56:23.673072 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/25281e2f-b275-4e90-9966-f73ad7e12598-logs\") pod \"nova-metadata-0\" (UID: \"25281e2f-b275-4e90-9966-f73ad7e12598\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:56:23 crc kubenswrapper[4558]: I0120 17:56:23.673133 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bxnc8\" (UniqueName: \"kubernetes.io/projected/25281e2f-b275-4e90-9966-f73ad7e12598-kube-api-access-bxnc8\") pod \"nova-metadata-0\" (UID: \"25281e2f-b275-4e90-9966-f73ad7e12598\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:56:23 crc kubenswrapper[4558]: I0120 17:56:23.673199 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/92d9f753-7bce-42db-878e-1d274cfcea51-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"92d9f753-7bce-42db-878e-1d274cfcea51\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:56:23 crc kubenswrapper[4558]: I0120 17:56:23.673309 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dfee2556-f92d-4f25-a05e-2747991cb8b2-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"dfee2556-f92d-4f25-a05e-2747991cb8b2\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:56:23 crc kubenswrapper[4558]: I0120 17:56:23.678254 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/92d9f753-7bce-42db-878e-1d274cfcea51-config-data\") pod \"nova-scheduler-0\" (UID: \"92d9f753-7bce-42db-878e-1d274cfcea51\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:56:23 crc kubenswrapper[4558]: I0120 17:56:23.679633 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/92d9f753-7bce-42db-878e-1d274cfcea51-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"92d9f753-7bce-42db-878e-1d274cfcea51\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:56:23 crc kubenswrapper[4558]: I0120 17:56:23.694470 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zn9nh\" (UniqueName: \"kubernetes.io/projected/92d9f753-7bce-42db-878e-1d274cfcea51-kube-api-access-zn9nh\") pod \"nova-scheduler-0\" (UID: \"92d9f753-7bce-42db-878e-1d274cfcea51\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:56:23 crc kubenswrapper[4558]: I0120 17:56:23.775467 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dfee2556-f92d-4f25-a05e-2747991cb8b2-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"dfee2556-f92d-4f25-a05e-2747991cb8b2\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:56:23 crc kubenswrapper[4558]: I0120 17:56:23.775640 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/25281e2f-b275-4e90-9966-f73ad7e12598-config-data\") pod \"nova-metadata-0\" (UID: \"25281e2f-b275-4e90-9966-f73ad7e12598\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:56:23 crc kubenswrapper[4558]: I0120 17:56:23.775667 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nhp7l\" (UniqueName: \"kubernetes.io/projected/dfee2556-f92d-4f25-a05e-2747991cb8b2-kube-api-access-nhp7l\") pod \"nova-cell1-novncproxy-0\" (UID: \"dfee2556-f92d-4f25-a05e-2747991cb8b2\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:56:23 crc kubenswrapper[4558]: I0120 17:56:23.775760 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/25281e2f-b275-4e90-9966-f73ad7e12598-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"25281e2f-b275-4e90-9966-f73ad7e12598\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:56:23 crc kubenswrapper[4558]: I0120 17:56:23.775781 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dfee2556-f92d-4f25-a05e-2747991cb8b2-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"dfee2556-f92d-4f25-a05e-2747991cb8b2\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:56:23 crc kubenswrapper[4558]: I0120 17:56:23.775844 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/25281e2f-b275-4e90-9966-f73ad7e12598-logs\") pod \"nova-metadata-0\" (UID: \"25281e2f-b275-4e90-9966-f73ad7e12598\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:56:23 crc kubenswrapper[4558]: I0120 17:56:23.775867 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bxnc8\" (UniqueName: \"kubernetes.io/projected/25281e2f-b275-4e90-9966-f73ad7e12598-kube-api-access-bxnc8\") pod \"nova-metadata-0\" (UID: \"25281e2f-b275-4e90-9966-f73ad7e12598\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:56:23 crc kubenswrapper[4558]: I0120 17:56:23.779292 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/25281e2f-b275-4e90-9966-f73ad7e12598-logs\") pod \"nova-metadata-0\" (UID: \"25281e2f-b275-4e90-9966-f73ad7e12598\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:56:23 crc kubenswrapper[4558]: I0120 17:56:23.781526 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dfee2556-f92d-4f25-a05e-2747991cb8b2-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"dfee2556-f92d-4f25-a05e-2747991cb8b2\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:56:23 crc kubenswrapper[4558]: I0120 17:56:23.781543 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/25281e2f-b275-4e90-9966-f73ad7e12598-config-data\") pod \"nova-metadata-0\" (UID: \"25281e2f-b275-4e90-9966-f73ad7e12598\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:56:23 crc kubenswrapper[4558]: I0120 17:56:23.784967 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/25281e2f-b275-4e90-9966-f73ad7e12598-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"25281e2f-b275-4e90-9966-f73ad7e12598\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:56:23 crc kubenswrapper[4558]: I0120 17:56:23.785430 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dfee2556-f92d-4f25-a05e-2747991cb8b2-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"dfee2556-f92d-4f25-a05e-2747991cb8b2\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:56:23 crc kubenswrapper[4558]: I0120 17:56:23.795693 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nhp7l\" (UniqueName: \"kubernetes.io/projected/dfee2556-f92d-4f25-a05e-2747991cb8b2-kube-api-access-nhp7l\") pod \"nova-cell1-novncproxy-0\" (UID: \"dfee2556-f92d-4f25-a05e-2747991cb8b2\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:56:23 crc kubenswrapper[4558]: I0120 17:56:23.796081 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bxnc8\" (UniqueName: \"kubernetes.io/projected/25281e2f-b275-4e90-9966-f73ad7e12598-kube-api-access-bxnc8\") pod \"nova-metadata-0\" (UID: \"25281e2f-b275-4e90-9966-f73ad7e12598\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:56:23 crc kubenswrapper[4558]: I0120 17:56:23.960056 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:56:23 crc kubenswrapper[4558]: I0120 17:56:23.961132 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:56:23 crc kubenswrapper[4558]: I0120 17:56:23.976549 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:56:24 crc kubenswrapper[4558]: I0120 17:56:24.092181 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-cell-mapping-l8mfx"] Jan 20 17:56:24 crc kubenswrapper[4558]: I0120 17:56:24.176211 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:56:24 crc kubenswrapper[4558]: I0120 17:56:24.181201 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-db-sync-6ltj7"] Jan 20 17:56:24 crc kubenswrapper[4558]: I0120 17:56:24.182575 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-6ltj7" Jan 20 17:56:24 crc kubenswrapper[4558]: I0120 17:56:24.185186 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell1-conductor-config-data" Jan 20 17:56:24 crc kubenswrapper[4558]: I0120 17:56:24.186838 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell1-conductor-scripts" Jan 20 17:56:24 crc kubenswrapper[4558]: I0120 17:56:24.188277 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-db-sync-6ltj7"] Jan 20 17:56:24 crc kubenswrapper[4558]: I0120 17:56:24.288478 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/27e67faf-b924-4c16-b22b-3297bb62a925-config-data\") pod \"nova-cell1-conductor-db-sync-6ltj7\" (UID: \"27e67faf-b924-4c16-b22b-3297bb62a925\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-6ltj7" Jan 20 17:56:24 crc kubenswrapper[4558]: I0120 17:56:24.288533 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/27e67faf-b924-4c16-b22b-3297bb62a925-scripts\") pod \"nova-cell1-conductor-db-sync-6ltj7\" (UID: \"27e67faf-b924-4c16-b22b-3297bb62a925\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-6ltj7" Jan 20 17:56:24 crc kubenswrapper[4558]: I0120 17:56:24.288563 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5qp48\" (UniqueName: \"kubernetes.io/projected/27e67faf-b924-4c16-b22b-3297bb62a925-kube-api-access-5qp48\") pod \"nova-cell1-conductor-db-sync-6ltj7\" (UID: \"27e67faf-b924-4c16-b22b-3297bb62a925\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-6ltj7" Jan 20 17:56:24 crc kubenswrapper[4558]: I0120 17:56:24.288752 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/27e67faf-b924-4c16-b22b-3297bb62a925-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-6ltj7\" (UID: \"27e67faf-b924-4c16-b22b-3297bb62a925\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-6ltj7" Jan 20 17:56:24 crc kubenswrapper[4558]: I0120 17:56:24.391190 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/27e67faf-b924-4c16-b22b-3297bb62a925-config-data\") pod \"nova-cell1-conductor-db-sync-6ltj7\" (UID: \"27e67faf-b924-4c16-b22b-3297bb62a925\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-6ltj7" Jan 20 17:56:24 crc kubenswrapper[4558]: I0120 17:56:24.391251 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/27e67faf-b924-4c16-b22b-3297bb62a925-scripts\") pod \"nova-cell1-conductor-db-sync-6ltj7\" (UID: \"27e67faf-b924-4c16-b22b-3297bb62a925\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-6ltj7" Jan 20 17:56:24 crc kubenswrapper[4558]: I0120 17:56:24.391283 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5qp48\" (UniqueName: \"kubernetes.io/projected/27e67faf-b924-4c16-b22b-3297bb62a925-kube-api-access-5qp48\") pod \"nova-cell1-conductor-db-sync-6ltj7\" (UID: \"27e67faf-b924-4c16-b22b-3297bb62a925\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-6ltj7" Jan 20 17:56:24 crc kubenswrapper[4558]: I0120 17:56:24.391434 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/27e67faf-b924-4c16-b22b-3297bb62a925-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-6ltj7\" (UID: \"27e67faf-b924-4c16-b22b-3297bb62a925\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-6ltj7" Jan 20 17:56:24 crc kubenswrapper[4558]: I0120 17:56:24.397202 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/27e67faf-b924-4c16-b22b-3297bb62a925-scripts\") pod \"nova-cell1-conductor-db-sync-6ltj7\" (UID: \"27e67faf-b924-4c16-b22b-3297bb62a925\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-6ltj7" Jan 20 17:56:24 crc kubenswrapper[4558]: I0120 17:56:24.403748 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/27e67faf-b924-4c16-b22b-3297bb62a925-config-data\") pod \"nova-cell1-conductor-db-sync-6ltj7\" (UID: \"27e67faf-b924-4c16-b22b-3297bb62a925\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-6ltj7" Jan 20 17:56:24 crc kubenswrapper[4558]: I0120 17:56:24.404053 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/27e67faf-b924-4c16-b22b-3297bb62a925-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-6ltj7\" (UID: \"27e67faf-b924-4c16-b22b-3297bb62a925\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-6ltj7" Jan 20 17:56:24 crc kubenswrapper[4558]: I0120 17:56:24.426305 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5qp48\" (UniqueName: \"kubernetes.io/projected/27e67faf-b924-4c16-b22b-3297bb62a925-kube-api-access-5qp48\") pod \"nova-cell1-conductor-db-sync-6ltj7\" (UID: \"27e67faf-b924-4c16-b22b-3297bb62a925\") " pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-6ltj7" Jan 20 17:56:24 crc kubenswrapper[4558]: I0120 17:56:24.476406 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-cell-mapping-l8mfx" event={"ID":"a0f54b8a-a5cf-4081-b4d6-f759bb1ae331","Type":"ContainerStarted","Data":"dd8db3ceab78f235dc97917832328d6e0fcaa24e03f8ac167d9b8cc02b674413"} Jan 20 17:56:24 crc kubenswrapper[4558]: I0120 17:56:24.476748 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-cell-mapping-l8mfx" event={"ID":"a0f54b8a-a5cf-4081-b4d6-f759bb1ae331","Type":"ContainerStarted","Data":"0d34dba01c9d1800d43b26acca64efc520f44e2535d90771b1ae3f125ffbe193"} Jan 20 17:56:24 crc kubenswrapper[4558]: I0120 17:56:24.482802 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:56:24 crc kubenswrapper[4558]: I0120 17:56:24.489685 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"491a5173-ab6a-4abf-932d-93e7b4856b8b","Type":"ContainerStarted","Data":"28e3b444eea0f63050d55e27a5abc4c42e289af941a1ae4b77bdb7c9624f71a8"} Jan 20 17:56:24 crc kubenswrapper[4558]: I0120 17:56:24.492201 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"9200cf36-f689-4e68-9847-5ad9f1aad20c","Type":"ContainerStarted","Data":"00663a7778425e0f73bea2d684a395a3da2dd27df2181da8e3d491c11d317544"} Jan 20 17:56:24 crc kubenswrapper[4558]: I0120 17:56:24.492224 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"9200cf36-f689-4e68-9847-5ad9f1aad20c","Type":"ContainerStarted","Data":"6ce2c49d98258521f9a124b91906129922e0ba7df2ffdec76c73d4f76100893f"} Jan 20 17:56:24 crc kubenswrapper[4558]: I0120 17:56:24.502113 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell0-cell-mapping-l8mfx" podStartSLOduration=1.502101207 podStartE2EDuration="1.502101207s" podCreationTimestamp="2026-01-20 17:56:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:56:24.500142624 +0000 UTC m=+4478.260480592" watchObservedRunningTime="2026-01-20 17:56:24.502101207 +0000 UTC m=+4478.262439175" Jan 20 17:56:24 crc kubenswrapper[4558]: I0120 17:56:24.513498 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-6ltj7" Jan 20 17:56:24 crc kubenswrapper[4558]: I0120 17:56:24.578824 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:56:24 crc kubenswrapper[4558]: I0120 17:56:24.669539 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:56:25 crc kubenswrapper[4558]: I0120 17:56:25.003095 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-db-sync-6ltj7"] Jan 20 17:56:25 crc kubenswrapper[4558]: W0120 17:56:25.009872 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod27e67faf_b924_4c16_b22b_3297bb62a925.slice/crio-2d39c5ee1c2a02f98dc6f8bea4c2057f86bcdb14ac2b7372957d441cfb781e76 WatchSource:0}: Error finding container 2d39c5ee1c2a02f98dc6f8bea4c2057f86bcdb14ac2b7372957d441cfb781e76: Status 404 returned error can't find the container with id 2d39c5ee1c2a02f98dc6f8bea4c2057f86bcdb14ac2b7372957d441cfb781e76 Jan 20 17:56:25 crc kubenswrapper[4558]: I0120 17:56:25.501446 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"dfee2556-f92d-4f25-a05e-2747991cb8b2","Type":"ContainerStarted","Data":"0952a2a18f63c5d5c7453dc0de4c130b02fbc15679d05934a18831084681377d"} Jan 20 17:56:25 crc kubenswrapper[4558]: I0120 17:56:25.501507 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"dfee2556-f92d-4f25-a05e-2747991cb8b2","Type":"ContainerStarted","Data":"2ea09eaee857610e8c24b9c5582ab8af77807170ffe73af04d1c33953fa9155d"} Jan 20 17:56:25 crc kubenswrapper[4558]: I0120 17:56:25.503254 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"92d9f753-7bce-42db-878e-1d274cfcea51","Type":"ContainerStarted","Data":"6235a6d4a347ec8f43f97bf14e81fa63fe879b20b8165d6c5d78a45ac07c32e9"} Jan 20 17:56:25 crc kubenswrapper[4558]: I0120 17:56:25.503285 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"92d9f753-7bce-42db-878e-1d274cfcea51","Type":"ContainerStarted","Data":"036c927aed338cd8745edfed28f5c6e024a7e9e4316d25503c4d3ba95f0942ec"} Jan 20 17:56:25 crc kubenswrapper[4558]: I0120 17:56:25.504719 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"25281e2f-b275-4e90-9966-f73ad7e12598","Type":"ContainerStarted","Data":"e4c87a9909632ad1d54d8c23a4781ae654e0e86d45a624523b40bd6edc782acd"} Jan 20 17:56:25 crc kubenswrapper[4558]: I0120 17:56:25.504763 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"25281e2f-b275-4e90-9966-f73ad7e12598","Type":"ContainerStarted","Data":"aa87bc91f839f6cd2f15c9cd1c1c78279e8258941863361663cef0dca8c586e0"} Jan 20 17:56:25 crc kubenswrapper[4558]: I0120 17:56:25.504775 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"25281e2f-b275-4e90-9966-f73ad7e12598","Type":"ContainerStarted","Data":"92dbcedab8dc0c8ed4503f0f8b206a4bb7b5eaf04ebed7f78a33b99407475731"} Jan 20 17:56:25 crc kubenswrapper[4558]: I0120 17:56:25.507041 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"491a5173-ab6a-4abf-932d-93e7b4856b8b","Type":"ContainerStarted","Data":"737858e99577b342734a59a10def7df38917eabf05f581f4f3ca9bf3bb32550b"} Jan 20 17:56:25 crc kubenswrapper[4558]: I0120 17:56:25.507225 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:56:25 crc kubenswrapper[4558]: I0120 17:56:25.508996 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"9200cf36-f689-4e68-9847-5ad9f1aad20c","Type":"ContainerStarted","Data":"4327cff3b2a73ebb89e07fafb3730b873a2555273bb891e589fcb0f39e6e591f"} Jan 20 17:56:25 crc kubenswrapper[4558]: I0120 17:56:25.512739 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-6ltj7" event={"ID":"27e67faf-b924-4c16-b22b-3297bb62a925","Type":"ContainerStarted","Data":"085308a6dc97c681857f59245f2f70743caf09888e9a5758a37a53be58e66937"} Jan 20 17:56:25 crc kubenswrapper[4558]: I0120 17:56:25.512778 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-6ltj7" event={"ID":"27e67faf-b924-4c16-b22b-3297bb62a925","Type":"ContainerStarted","Data":"2d39c5ee1c2a02f98dc6f8bea4c2057f86bcdb14ac2b7372957d441cfb781e76"} Jan 20 17:56:25 crc kubenswrapper[4558]: I0120 17:56:25.526607 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" podStartSLOduration=2.526588465 podStartE2EDuration="2.526588465s" podCreationTimestamp="2026-01-20 17:56:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:56:25.520514447 +0000 UTC m=+4479.280852414" watchObservedRunningTime="2026-01-20 17:56:25.526588465 +0000 UTC m=+4479.286926432" Jan 20 17:56:25 crc kubenswrapper[4558]: I0120 17:56:25.542037 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ceilometer-0" podStartSLOduration=1.634810319 podStartE2EDuration="5.542018579s" podCreationTimestamp="2026-01-20 17:56:20 +0000 UTC" firstStartedPulling="2026-01-20 17:56:21.197694507 +0000 UTC m=+4474.958032464" lastFinishedPulling="2026-01-20 17:56:25.104902756 +0000 UTC m=+4478.865240724" observedRunningTime="2026-01-20 17:56:25.535441857 +0000 UTC m=+4479.295779825" watchObservedRunningTime="2026-01-20 17:56:25.542018579 +0000 UTC m=+4479.302356547" Jan 20 17:56:25 crc kubenswrapper[4558]: I0120 17:56:25.548499 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-api-0" podStartSLOduration=2.548492058 podStartE2EDuration="2.548492058s" podCreationTimestamp="2026-01-20 17:56:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:56:25.546958104 +0000 UTC m=+4479.307296072" watchObservedRunningTime="2026-01-20 17:56:25.548492058 +0000 UTC m=+4479.308830024" Jan 20 17:56:25 crc kubenswrapper[4558]: I0120 17:56:25.568557 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-metadata-0" podStartSLOduration=2.568533791 podStartE2EDuration="2.568533791s" podCreationTimestamp="2026-01-20 17:56:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:56:25.562339658 +0000 UTC m=+4479.322677625" watchObservedRunningTime="2026-01-20 17:56:25.568533791 +0000 UTC m=+4479.328871758" Jan 20 17:56:25 crc kubenswrapper[4558]: I0120 17:56:25.588519 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-scheduler-0" podStartSLOduration=2.588508007 podStartE2EDuration="2.588508007s" podCreationTimestamp="2026-01-20 17:56:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:56:25.584931315 +0000 UTC m=+4479.345269281" watchObservedRunningTime="2026-01-20 17:56:25.588508007 +0000 UTC m=+4479.348845975" Jan 20 17:56:25 crc kubenswrapper[4558]: I0120 17:56:25.602307 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-6ltj7" podStartSLOduration=1.602267892 podStartE2EDuration="1.602267892s" podCreationTimestamp="2026-01-20 17:56:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:56:25.598192722 +0000 UTC m=+4479.358530689" watchObservedRunningTime="2026-01-20 17:56:25.602267892 +0000 UTC m=+4479.362605859" Jan 20 17:56:27 crc kubenswrapper[4558]: I0120 17:56:27.128631 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:56:27 crc kubenswrapper[4558]: I0120 17:56:27.137630 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:56:27 crc kubenswrapper[4558]: I0120 17:56:27.529071 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" podUID="dfee2556-f92d-4f25-a05e-2747991cb8b2" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://0952a2a18f63c5d5c7453dc0de4c130b02fbc15679d05934a18831084681377d" gracePeriod=30 Jan 20 17:56:27 crc kubenswrapper[4558]: I0120 17:56:27.529131 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-metadata-0" podUID="25281e2f-b275-4e90-9966-f73ad7e12598" containerName="nova-metadata-log" containerID="cri-o://aa87bc91f839f6cd2f15c9cd1c1c78279e8258941863361663cef0dca8c586e0" gracePeriod=30 Jan 20 17:56:27 crc kubenswrapper[4558]: I0120 17:56:27.529595 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-metadata-0" podUID="25281e2f-b275-4e90-9966-f73ad7e12598" containerName="nova-metadata-metadata" containerID="cri-o://e4c87a9909632ad1d54d8c23a4781ae654e0e86d45a624523b40bd6edc782acd" gracePeriod=30 Jan 20 17:56:28 crc kubenswrapper[4558]: I0120 17:56:28.076931 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:56:28 crc kubenswrapper[4558]: I0120 17:56:28.142858 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:56:28 crc kubenswrapper[4558]: I0120 17:56:28.179198 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dfee2556-f92d-4f25-a05e-2747991cb8b2-config-data\") pod \"dfee2556-f92d-4f25-a05e-2747991cb8b2\" (UID: \"dfee2556-f92d-4f25-a05e-2747991cb8b2\") " Jan 20 17:56:28 crc kubenswrapper[4558]: I0120 17:56:28.179248 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/25281e2f-b275-4e90-9966-f73ad7e12598-logs\") pod \"25281e2f-b275-4e90-9966-f73ad7e12598\" (UID: \"25281e2f-b275-4e90-9966-f73ad7e12598\") " Jan 20 17:56:28 crc kubenswrapper[4558]: I0120 17:56:28.179332 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bxnc8\" (UniqueName: \"kubernetes.io/projected/25281e2f-b275-4e90-9966-f73ad7e12598-kube-api-access-bxnc8\") pod \"25281e2f-b275-4e90-9966-f73ad7e12598\" (UID: \"25281e2f-b275-4e90-9966-f73ad7e12598\") " Jan 20 17:56:28 crc kubenswrapper[4558]: I0120 17:56:28.179374 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nhp7l\" (UniqueName: \"kubernetes.io/projected/dfee2556-f92d-4f25-a05e-2747991cb8b2-kube-api-access-nhp7l\") pod \"dfee2556-f92d-4f25-a05e-2747991cb8b2\" (UID: \"dfee2556-f92d-4f25-a05e-2747991cb8b2\") " Jan 20 17:56:28 crc kubenswrapper[4558]: I0120 17:56:28.179412 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dfee2556-f92d-4f25-a05e-2747991cb8b2-combined-ca-bundle\") pod \"dfee2556-f92d-4f25-a05e-2747991cb8b2\" (UID: \"dfee2556-f92d-4f25-a05e-2747991cb8b2\") " Jan 20 17:56:28 crc kubenswrapper[4558]: I0120 17:56:28.179449 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/25281e2f-b275-4e90-9966-f73ad7e12598-combined-ca-bundle\") pod \"25281e2f-b275-4e90-9966-f73ad7e12598\" (UID: \"25281e2f-b275-4e90-9966-f73ad7e12598\") " Jan 20 17:56:28 crc kubenswrapper[4558]: I0120 17:56:28.179479 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/25281e2f-b275-4e90-9966-f73ad7e12598-config-data\") pod \"25281e2f-b275-4e90-9966-f73ad7e12598\" (UID: \"25281e2f-b275-4e90-9966-f73ad7e12598\") " Jan 20 17:56:28 crc kubenswrapper[4558]: I0120 17:56:28.179726 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/25281e2f-b275-4e90-9966-f73ad7e12598-logs" (OuterVolumeSpecName: "logs") pod "25281e2f-b275-4e90-9966-f73ad7e12598" (UID: "25281e2f-b275-4e90-9966-f73ad7e12598"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:56:28 crc kubenswrapper[4558]: I0120 17:56:28.180001 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/25281e2f-b275-4e90-9966-f73ad7e12598-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:56:28 crc kubenswrapper[4558]: I0120 17:56:28.191302 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25281e2f-b275-4e90-9966-f73ad7e12598-kube-api-access-bxnc8" (OuterVolumeSpecName: "kube-api-access-bxnc8") pod "25281e2f-b275-4e90-9966-f73ad7e12598" (UID: "25281e2f-b275-4e90-9966-f73ad7e12598"). InnerVolumeSpecName "kube-api-access-bxnc8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:56:28 crc kubenswrapper[4558]: I0120 17:56:28.191355 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dfee2556-f92d-4f25-a05e-2747991cb8b2-kube-api-access-nhp7l" (OuterVolumeSpecName: "kube-api-access-nhp7l") pod "dfee2556-f92d-4f25-a05e-2747991cb8b2" (UID: "dfee2556-f92d-4f25-a05e-2747991cb8b2"). InnerVolumeSpecName "kube-api-access-nhp7l". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:56:28 crc kubenswrapper[4558]: I0120 17:56:28.202611 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dfee2556-f92d-4f25-a05e-2747991cb8b2-config-data" (OuterVolumeSpecName: "config-data") pod "dfee2556-f92d-4f25-a05e-2747991cb8b2" (UID: "dfee2556-f92d-4f25-a05e-2747991cb8b2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:56:28 crc kubenswrapper[4558]: I0120 17:56:28.207602 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25281e2f-b275-4e90-9966-f73ad7e12598-config-data" (OuterVolumeSpecName: "config-data") pod "25281e2f-b275-4e90-9966-f73ad7e12598" (UID: "25281e2f-b275-4e90-9966-f73ad7e12598"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:56:28 crc kubenswrapper[4558]: I0120 17:56:28.210315 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dfee2556-f92d-4f25-a05e-2747991cb8b2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "dfee2556-f92d-4f25-a05e-2747991cb8b2" (UID: "dfee2556-f92d-4f25-a05e-2747991cb8b2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:56:28 crc kubenswrapper[4558]: I0120 17:56:28.210338 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25281e2f-b275-4e90-9966-f73ad7e12598-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "25281e2f-b275-4e90-9966-f73ad7e12598" (UID: "25281e2f-b275-4e90-9966-f73ad7e12598"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:56:28 crc kubenswrapper[4558]: I0120 17:56:28.281160 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/25281e2f-b275-4e90-9966-f73ad7e12598-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:56:28 crc kubenswrapper[4558]: I0120 17:56:28.281203 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dfee2556-f92d-4f25-a05e-2747991cb8b2-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:56:28 crc kubenswrapper[4558]: I0120 17:56:28.281219 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bxnc8\" (UniqueName: \"kubernetes.io/projected/25281e2f-b275-4e90-9966-f73ad7e12598-kube-api-access-bxnc8\") on node \"crc\" DevicePath \"\"" Jan 20 17:56:28 crc kubenswrapper[4558]: I0120 17:56:28.281235 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nhp7l\" (UniqueName: \"kubernetes.io/projected/dfee2556-f92d-4f25-a05e-2747991cb8b2-kube-api-access-nhp7l\") on node \"crc\" DevicePath \"\"" Jan 20 17:56:28 crc kubenswrapper[4558]: I0120 17:56:28.281245 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dfee2556-f92d-4f25-a05e-2747991cb8b2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:56:28 crc kubenswrapper[4558]: I0120 17:56:28.281257 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/25281e2f-b275-4e90-9966-f73ad7e12598-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:56:28 crc kubenswrapper[4558]: I0120 17:56:28.541739 4558 generic.go:334] "Generic (PLEG): container finished" podID="dfee2556-f92d-4f25-a05e-2747991cb8b2" containerID="0952a2a18f63c5d5c7453dc0de4c130b02fbc15679d05934a18831084681377d" exitCode=0 Jan 20 17:56:28 crc kubenswrapper[4558]: I0120 17:56:28.541832 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"dfee2556-f92d-4f25-a05e-2747991cb8b2","Type":"ContainerDied","Data":"0952a2a18f63c5d5c7453dc0de4c130b02fbc15679d05934a18831084681377d"} Jan 20 17:56:28 crc kubenswrapper[4558]: I0120 17:56:28.541871 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"dfee2556-f92d-4f25-a05e-2747991cb8b2","Type":"ContainerDied","Data":"2ea09eaee857610e8c24b9c5582ab8af77807170ffe73af04d1c33953fa9155d"} Jan 20 17:56:28 crc kubenswrapper[4558]: I0120 17:56:28.541903 4558 scope.go:117] "RemoveContainer" containerID="0952a2a18f63c5d5c7453dc0de4c130b02fbc15679d05934a18831084681377d" Jan 20 17:56:28 crc kubenswrapper[4558]: I0120 17:56:28.542105 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:56:28 crc kubenswrapper[4558]: I0120 17:56:28.547974 4558 generic.go:334] "Generic (PLEG): container finished" podID="25281e2f-b275-4e90-9966-f73ad7e12598" containerID="e4c87a9909632ad1d54d8c23a4781ae654e0e86d45a624523b40bd6edc782acd" exitCode=0 Jan 20 17:56:28 crc kubenswrapper[4558]: I0120 17:56:28.548011 4558 generic.go:334] "Generic (PLEG): container finished" podID="25281e2f-b275-4e90-9966-f73ad7e12598" containerID="aa87bc91f839f6cd2f15c9cd1c1c78279e8258941863361663cef0dca8c586e0" exitCode=143 Jan 20 17:56:28 crc kubenswrapper[4558]: I0120 17:56:28.548058 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"25281e2f-b275-4e90-9966-f73ad7e12598","Type":"ContainerDied","Data":"e4c87a9909632ad1d54d8c23a4781ae654e0e86d45a624523b40bd6edc782acd"} Jan 20 17:56:28 crc kubenswrapper[4558]: I0120 17:56:28.548103 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"25281e2f-b275-4e90-9966-f73ad7e12598","Type":"ContainerDied","Data":"aa87bc91f839f6cd2f15c9cd1c1c78279e8258941863361663cef0dca8c586e0"} Jan 20 17:56:28 crc kubenswrapper[4558]: I0120 17:56:28.548114 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"25281e2f-b275-4e90-9966-f73ad7e12598","Type":"ContainerDied","Data":"92dbcedab8dc0c8ed4503f0f8b206a4bb7b5eaf04ebed7f78a33b99407475731"} Jan 20 17:56:28 crc kubenswrapper[4558]: I0120 17:56:28.548204 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:56:28 crc kubenswrapper[4558]: I0120 17:56:28.571491 4558 scope.go:117] "RemoveContainer" containerID="0952a2a18f63c5d5c7453dc0de4c130b02fbc15679d05934a18831084681377d" Jan 20 17:56:28 crc kubenswrapper[4558]: E0120 17:56:28.572710 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0952a2a18f63c5d5c7453dc0de4c130b02fbc15679d05934a18831084681377d\": container with ID starting with 0952a2a18f63c5d5c7453dc0de4c130b02fbc15679d05934a18831084681377d not found: ID does not exist" containerID="0952a2a18f63c5d5c7453dc0de4c130b02fbc15679d05934a18831084681377d" Jan 20 17:56:28 crc kubenswrapper[4558]: I0120 17:56:28.572754 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0952a2a18f63c5d5c7453dc0de4c130b02fbc15679d05934a18831084681377d"} err="failed to get container status \"0952a2a18f63c5d5c7453dc0de4c130b02fbc15679d05934a18831084681377d\": rpc error: code = NotFound desc = could not find container \"0952a2a18f63c5d5c7453dc0de4c130b02fbc15679d05934a18831084681377d\": container with ID starting with 0952a2a18f63c5d5c7453dc0de4c130b02fbc15679d05934a18831084681377d not found: ID does not exist" Jan 20 17:56:28 crc kubenswrapper[4558]: I0120 17:56:28.572780 4558 scope.go:117] "RemoveContainer" containerID="e4c87a9909632ad1d54d8c23a4781ae654e0e86d45a624523b40bd6edc782acd" Jan 20 17:56:28 crc kubenswrapper[4558]: I0120 17:56:28.580745 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:56:28 crc kubenswrapper[4558]: I0120 17:56:28.593056 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:56:28 crc kubenswrapper[4558]: I0120 17:56:28.599328 4558 scope.go:117] "RemoveContainer" containerID="aa87bc91f839f6cd2f15c9cd1c1c78279e8258941863361663cef0dca8c586e0" Jan 20 17:56:28 crc kubenswrapper[4558]: I0120 17:56:28.604938 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:56:28 crc kubenswrapper[4558]: I0120 17:56:28.609335 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:56:28 crc kubenswrapper[4558]: E0120 17:56:28.609710 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="25281e2f-b275-4e90-9966-f73ad7e12598" containerName="nova-metadata-log" Jan 20 17:56:28 crc kubenswrapper[4558]: I0120 17:56:28.609730 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="25281e2f-b275-4e90-9966-f73ad7e12598" containerName="nova-metadata-log" Jan 20 17:56:28 crc kubenswrapper[4558]: E0120 17:56:28.609738 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dfee2556-f92d-4f25-a05e-2747991cb8b2" containerName="nova-cell1-novncproxy-novncproxy" Jan 20 17:56:28 crc kubenswrapper[4558]: I0120 17:56:28.609747 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="dfee2556-f92d-4f25-a05e-2747991cb8b2" containerName="nova-cell1-novncproxy-novncproxy" Jan 20 17:56:28 crc kubenswrapper[4558]: E0120 17:56:28.609758 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="25281e2f-b275-4e90-9966-f73ad7e12598" containerName="nova-metadata-metadata" Jan 20 17:56:28 crc kubenswrapper[4558]: I0120 17:56:28.609763 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="25281e2f-b275-4e90-9966-f73ad7e12598" containerName="nova-metadata-metadata" Jan 20 17:56:28 crc kubenswrapper[4558]: I0120 17:56:28.610533 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="25281e2f-b275-4e90-9966-f73ad7e12598" containerName="nova-metadata-metadata" Jan 20 17:56:28 crc kubenswrapper[4558]: I0120 17:56:28.610562 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="dfee2556-f92d-4f25-a05e-2747991cb8b2" containerName="nova-cell1-novncproxy-novncproxy" Jan 20 17:56:28 crc kubenswrapper[4558]: I0120 17:56:28.610577 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="25281e2f-b275-4e90-9966-f73ad7e12598" containerName="nova-metadata-log" Jan 20 17:56:28 crc kubenswrapper[4558]: I0120 17:56:28.611373 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:56:28 crc kubenswrapper[4558]: I0120 17:56:28.615914 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell1-novncproxy-config-data" Jan 20 17:56:28 crc kubenswrapper[4558]: I0120 17:56:28.615994 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-novncproxy-cell1-public-svc" Jan 20 17:56:28 crc kubenswrapper[4558]: I0120 17:56:28.616141 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-novncproxy-cell1-vencrypt" Jan 20 17:56:28 crc kubenswrapper[4558]: I0120 17:56:28.621939 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:56:28 crc kubenswrapper[4558]: I0120 17:56:28.627956 4558 scope.go:117] "RemoveContainer" containerID="e4c87a9909632ad1d54d8c23a4781ae654e0e86d45a624523b40bd6edc782acd" Jan 20 17:56:28 crc kubenswrapper[4558]: E0120 17:56:28.628789 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e4c87a9909632ad1d54d8c23a4781ae654e0e86d45a624523b40bd6edc782acd\": container with ID starting with e4c87a9909632ad1d54d8c23a4781ae654e0e86d45a624523b40bd6edc782acd not found: ID does not exist" containerID="e4c87a9909632ad1d54d8c23a4781ae654e0e86d45a624523b40bd6edc782acd" Jan 20 17:56:28 crc kubenswrapper[4558]: I0120 17:56:28.628829 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e4c87a9909632ad1d54d8c23a4781ae654e0e86d45a624523b40bd6edc782acd"} err="failed to get container status \"e4c87a9909632ad1d54d8c23a4781ae654e0e86d45a624523b40bd6edc782acd\": rpc error: code = NotFound desc = could not find container \"e4c87a9909632ad1d54d8c23a4781ae654e0e86d45a624523b40bd6edc782acd\": container with ID starting with e4c87a9909632ad1d54d8c23a4781ae654e0e86d45a624523b40bd6edc782acd not found: ID does not exist" Jan 20 17:56:28 crc kubenswrapper[4558]: I0120 17:56:28.628857 4558 scope.go:117] "RemoveContainer" containerID="aa87bc91f839f6cd2f15c9cd1c1c78279e8258941863361663cef0dca8c586e0" Jan 20 17:56:28 crc kubenswrapper[4558]: E0120 17:56:28.629849 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aa87bc91f839f6cd2f15c9cd1c1c78279e8258941863361663cef0dca8c586e0\": container with ID starting with aa87bc91f839f6cd2f15c9cd1c1c78279e8258941863361663cef0dca8c586e0 not found: ID does not exist" containerID="aa87bc91f839f6cd2f15c9cd1c1c78279e8258941863361663cef0dca8c586e0" Jan 20 17:56:28 crc kubenswrapper[4558]: I0120 17:56:28.629877 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aa87bc91f839f6cd2f15c9cd1c1c78279e8258941863361663cef0dca8c586e0"} err="failed to get container status \"aa87bc91f839f6cd2f15c9cd1c1c78279e8258941863361663cef0dca8c586e0\": rpc error: code = NotFound desc = could not find container \"aa87bc91f839f6cd2f15c9cd1c1c78279e8258941863361663cef0dca8c586e0\": container with ID starting with aa87bc91f839f6cd2f15c9cd1c1c78279e8258941863361663cef0dca8c586e0 not found: ID does not exist" Jan 20 17:56:28 crc kubenswrapper[4558]: I0120 17:56:28.629892 4558 scope.go:117] "RemoveContainer" containerID="e4c87a9909632ad1d54d8c23a4781ae654e0e86d45a624523b40bd6edc782acd" Jan 20 17:56:28 crc kubenswrapper[4558]: I0120 17:56:28.630662 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e4c87a9909632ad1d54d8c23a4781ae654e0e86d45a624523b40bd6edc782acd"} err="failed to get container status \"e4c87a9909632ad1d54d8c23a4781ae654e0e86d45a624523b40bd6edc782acd\": rpc error: code = NotFound desc = could not find container \"e4c87a9909632ad1d54d8c23a4781ae654e0e86d45a624523b40bd6edc782acd\": container with ID starting with e4c87a9909632ad1d54d8c23a4781ae654e0e86d45a624523b40bd6edc782acd not found: ID does not exist" Jan 20 17:56:28 crc kubenswrapper[4558]: I0120 17:56:28.630726 4558 scope.go:117] "RemoveContainer" containerID="aa87bc91f839f6cd2f15c9cd1c1c78279e8258941863361663cef0dca8c586e0" Jan 20 17:56:28 crc kubenswrapper[4558]: I0120 17:56:28.631233 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aa87bc91f839f6cd2f15c9cd1c1c78279e8258941863361663cef0dca8c586e0"} err="failed to get container status \"aa87bc91f839f6cd2f15c9cd1c1c78279e8258941863361663cef0dca8c586e0\": rpc error: code = NotFound desc = could not find container \"aa87bc91f839f6cd2f15c9cd1c1c78279e8258941863361663cef0dca8c586e0\": container with ID starting with aa87bc91f839f6cd2f15c9cd1c1c78279e8258941863361663cef0dca8c586e0 not found: ID does not exist" Jan 20 17:56:28 crc kubenswrapper[4558]: I0120 17:56:28.631281 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:56:28 crc kubenswrapper[4558]: I0120 17:56:28.633591 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:56:28 crc kubenswrapper[4558]: I0120 17:56:28.640526 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-metadata-config-data" Jan 20 17:56:28 crc kubenswrapper[4558]: I0120 17:56:28.640806 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-metadata-internal-svc" Jan 20 17:56:28 crc kubenswrapper[4558]: I0120 17:56:28.644625 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:56:28 crc kubenswrapper[4558]: I0120 17:56:28.660708 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:56:28 crc kubenswrapper[4558]: I0120 17:56:28.691321 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/de8410e3-8de9-4013-b2ed-545ccdff866c-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"de8410e3-8de9-4013-b2ed-545ccdff866c\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:56:28 crc kubenswrapper[4558]: I0120 17:56:28.691410 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6w6sn\" (UniqueName: \"kubernetes.io/projected/26871e4f-876d-488f-88c9-3aa973870a11-kube-api-access-6w6sn\") pod \"nova-metadata-0\" (UID: \"26871e4f-876d-488f-88c9-3aa973870a11\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:56:28 crc kubenswrapper[4558]: I0120 17:56:28.691642 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/26871e4f-876d-488f-88c9-3aa973870a11-logs\") pod \"nova-metadata-0\" (UID: \"26871e4f-876d-488f-88c9-3aa973870a11\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:56:28 crc kubenswrapper[4558]: I0120 17:56:28.692047 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/26871e4f-876d-488f-88c9-3aa973870a11-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"26871e4f-876d-488f-88c9-3aa973870a11\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:56:28 crc kubenswrapper[4558]: I0120 17:56:28.692079 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/de8410e3-8de9-4013-b2ed-545ccdff866c-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"de8410e3-8de9-4013-b2ed-545ccdff866c\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:56:28 crc kubenswrapper[4558]: I0120 17:56:28.692110 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/26871e4f-876d-488f-88c9-3aa973870a11-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"26871e4f-876d-488f-88c9-3aa973870a11\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:56:28 crc kubenswrapper[4558]: I0120 17:56:28.692151 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/26871e4f-876d-488f-88c9-3aa973870a11-config-data\") pod \"nova-metadata-0\" (UID: \"26871e4f-876d-488f-88c9-3aa973870a11\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:56:28 crc kubenswrapper[4558]: I0120 17:56:28.692207 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/de8410e3-8de9-4013-b2ed-545ccdff866c-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"de8410e3-8de9-4013-b2ed-545ccdff866c\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:56:28 crc kubenswrapper[4558]: I0120 17:56:28.692261 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/de8410e3-8de9-4013-b2ed-545ccdff866c-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"de8410e3-8de9-4013-b2ed-545ccdff866c\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:56:28 crc kubenswrapper[4558]: I0120 17:56:28.692290 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4mk87\" (UniqueName: \"kubernetes.io/projected/de8410e3-8de9-4013-b2ed-545ccdff866c-kube-api-access-4mk87\") pod \"nova-cell1-novncproxy-0\" (UID: \"de8410e3-8de9-4013-b2ed-545ccdff866c\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:56:28 crc kubenswrapper[4558]: I0120 17:56:28.794880 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/26871e4f-876d-488f-88c9-3aa973870a11-config-data\") pod \"nova-metadata-0\" (UID: \"26871e4f-876d-488f-88c9-3aa973870a11\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:56:28 crc kubenswrapper[4558]: I0120 17:56:28.794963 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/de8410e3-8de9-4013-b2ed-545ccdff866c-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"de8410e3-8de9-4013-b2ed-545ccdff866c\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:56:28 crc kubenswrapper[4558]: I0120 17:56:28.795093 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/de8410e3-8de9-4013-b2ed-545ccdff866c-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"de8410e3-8de9-4013-b2ed-545ccdff866c\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:56:28 crc kubenswrapper[4558]: I0120 17:56:28.795180 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4mk87\" (UniqueName: \"kubernetes.io/projected/de8410e3-8de9-4013-b2ed-545ccdff866c-kube-api-access-4mk87\") pod \"nova-cell1-novncproxy-0\" (UID: \"de8410e3-8de9-4013-b2ed-545ccdff866c\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:56:28 crc kubenswrapper[4558]: I0120 17:56:28.795319 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/de8410e3-8de9-4013-b2ed-545ccdff866c-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"de8410e3-8de9-4013-b2ed-545ccdff866c\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:56:28 crc kubenswrapper[4558]: I0120 17:56:28.795419 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6w6sn\" (UniqueName: \"kubernetes.io/projected/26871e4f-876d-488f-88c9-3aa973870a11-kube-api-access-6w6sn\") pod \"nova-metadata-0\" (UID: \"26871e4f-876d-488f-88c9-3aa973870a11\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:56:28 crc kubenswrapper[4558]: I0120 17:56:28.795504 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/26871e4f-876d-488f-88c9-3aa973870a11-logs\") pod \"nova-metadata-0\" (UID: \"26871e4f-876d-488f-88c9-3aa973870a11\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:56:28 crc kubenswrapper[4558]: I0120 17:56:28.795589 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/26871e4f-876d-488f-88c9-3aa973870a11-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"26871e4f-876d-488f-88c9-3aa973870a11\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:56:28 crc kubenswrapper[4558]: I0120 17:56:28.795619 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/de8410e3-8de9-4013-b2ed-545ccdff866c-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"de8410e3-8de9-4013-b2ed-545ccdff866c\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:56:28 crc kubenswrapper[4558]: I0120 17:56:28.795674 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/26871e4f-876d-488f-88c9-3aa973870a11-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"26871e4f-876d-488f-88c9-3aa973870a11\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:56:28 crc kubenswrapper[4558]: I0120 17:56:28.796437 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/26871e4f-876d-488f-88c9-3aa973870a11-logs\") pod \"nova-metadata-0\" (UID: \"26871e4f-876d-488f-88c9-3aa973870a11\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:56:28 crc kubenswrapper[4558]: I0120 17:56:28.801376 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/de8410e3-8de9-4013-b2ed-545ccdff866c-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"de8410e3-8de9-4013-b2ed-545ccdff866c\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:56:28 crc kubenswrapper[4558]: I0120 17:56:28.801643 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/de8410e3-8de9-4013-b2ed-545ccdff866c-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"de8410e3-8de9-4013-b2ed-545ccdff866c\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:56:28 crc kubenswrapper[4558]: I0120 17:56:28.802322 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/de8410e3-8de9-4013-b2ed-545ccdff866c-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"de8410e3-8de9-4013-b2ed-545ccdff866c\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:56:28 crc kubenswrapper[4558]: I0120 17:56:28.805859 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/de8410e3-8de9-4013-b2ed-545ccdff866c-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"de8410e3-8de9-4013-b2ed-545ccdff866c\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:56:28 crc kubenswrapper[4558]: I0120 17:56:28.818353 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/26871e4f-876d-488f-88c9-3aa973870a11-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"26871e4f-876d-488f-88c9-3aa973870a11\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:56:28 crc kubenswrapper[4558]: I0120 17:56:28.819373 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/26871e4f-876d-488f-88c9-3aa973870a11-config-data\") pod \"nova-metadata-0\" (UID: \"26871e4f-876d-488f-88c9-3aa973870a11\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:56:28 crc kubenswrapper[4558]: I0120 17:56:28.819462 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6w6sn\" (UniqueName: \"kubernetes.io/projected/26871e4f-876d-488f-88c9-3aa973870a11-kube-api-access-6w6sn\") pod \"nova-metadata-0\" (UID: \"26871e4f-876d-488f-88c9-3aa973870a11\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:56:28 crc kubenswrapper[4558]: I0120 17:56:28.819724 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/26871e4f-876d-488f-88c9-3aa973870a11-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"26871e4f-876d-488f-88c9-3aa973870a11\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:56:28 crc kubenswrapper[4558]: I0120 17:56:28.820272 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4mk87\" (UniqueName: \"kubernetes.io/projected/de8410e3-8de9-4013-b2ed-545ccdff866c-kube-api-access-4mk87\") pod \"nova-cell1-novncproxy-0\" (UID: \"de8410e3-8de9-4013-b2ed-545ccdff866c\") " pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:56:28 crc kubenswrapper[4558]: I0120 17:56:28.927998 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:56:28 crc kubenswrapper[4558]: I0120 17:56:28.976651 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:56:29 crc kubenswrapper[4558]: I0120 17:56:29.002782 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:56:29 crc kubenswrapper[4558]: I0120 17:56:29.428564 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:56:29 crc kubenswrapper[4558]: W0120 17:56:29.430611 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podde8410e3_8de9_4013_b2ed_545ccdff866c.slice/crio-e0b0d938faf64b937e2053af4169fcd1ad2d263b71f5c8ac240fb8ad479713ba WatchSource:0}: Error finding container e0b0d938faf64b937e2053af4169fcd1ad2d263b71f5c8ac240fb8ad479713ba: Status 404 returned error can't find the container with id e0b0d938faf64b937e2053af4169fcd1ad2d263b71f5c8ac240fb8ad479713ba Jan 20 17:56:29 crc kubenswrapper[4558]: W0120 17:56:29.497042 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod26871e4f_876d_488f_88c9_3aa973870a11.slice/crio-dd0fd4464656cf0388fd04d718132c848e1d0c1fc3c1c68b2acd8ba8bdd1aa27 WatchSource:0}: Error finding container dd0fd4464656cf0388fd04d718132c848e1d0c1fc3c1c68b2acd8ba8bdd1aa27: Status 404 returned error can't find the container with id dd0fd4464656cf0388fd04d718132c848e1d0c1fc3c1c68b2acd8ba8bdd1aa27 Jan 20 17:56:29 crc kubenswrapper[4558]: I0120 17:56:29.507675 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:56:29 crc kubenswrapper[4558]: I0120 17:56:29.567234 4558 scope.go:117] "RemoveContainer" containerID="4ea7fa61d8b21007a044345acec89fcebe56c2921cf0f76ff2002f44bdc88ede" Jan 20 17:56:29 crc kubenswrapper[4558]: I0120 17:56:29.567772 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"de8410e3-8de9-4013-b2ed-545ccdff866c","Type":"ContainerStarted","Data":"e0b0d938faf64b937e2053af4169fcd1ad2d263b71f5c8ac240fb8ad479713ba"} Jan 20 17:56:29 crc kubenswrapper[4558]: E0120 17:56:29.568065 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 17:56:29 crc kubenswrapper[4558]: I0120 17:56:29.574964 4558 generic.go:334] "Generic (PLEG): container finished" podID="27e67faf-b924-4c16-b22b-3297bb62a925" containerID="085308a6dc97c681857f59245f2f70743caf09888e9a5758a37a53be58e66937" exitCode=0 Jan 20 17:56:29 crc kubenswrapper[4558]: I0120 17:56:29.575065 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-6ltj7" event={"ID":"27e67faf-b924-4c16-b22b-3297bb62a925","Type":"ContainerDied","Data":"085308a6dc97c681857f59245f2f70743caf09888e9a5758a37a53be58e66937"} Jan 20 17:56:29 crc kubenswrapper[4558]: I0120 17:56:29.577003 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"26871e4f-876d-488f-88c9-3aa973870a11","Type":"ContainerStarted","Data":"dd0fd4464656cf0388fd04d718132c848e1d0c1fc3c1c68b2acd8ba8bdd1aa27"} Jan 20 17:56:30 crc kubenswrapper[4558]: I0120 17:56:30.578575 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25281e2f-b275-4e90-9966-f73ad7e12598" path="/var/lib/kubelet/pods/25281e2f-b275-4e90-9966-f73ad7e12598/volumes" Jan 20 17:56:30 crc kubenswrapper[4558]: I0120 17:56:30.579466 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dfee2556-f92d-4f25-a05e-2747991cb8b2" path="/var/lib/kubelet/pods/dfee2556-f92d-4f25-a05e-2747991cb8b2/volumes" Jan 20 17:56:30 crc kubenswrapper[4558]: I0120 17:56:30.589536 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"de8410e3-8de9-4013-b2ed-545ccdff866c","Type":"ContainerStarted","Data":"f83e1e941e2ebb680c9bcf90480fb830a1ce1ccd57989006602331fa43a28a91"} Jan 20 17:56:30 crc kubenswrapper[4558]: I0120 17:56:30.591972 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"26871e4f-876d-488f-88c9-3aa973870a11","Type":"ContainerStarted","Data":"424b6e5f92b76fc81c831fbc08e5609a2c221b898ed12b28d791cf6eab46f46e"} Jan 20 17:56:30 crc kubenswrapper[4558]: I0120 17:56:30.592010 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"26871e4f-876d-488f-88c9-3aa973870a11","Type":"ContainerStarted","Data":"88d6eaab583bb20c071c86195b717ccf31acee517d98f82fdac7e0ffd4f4cf63"} Jan 20 17:56:30 crc kubenswrapper[4558]: I0120 17:56:30.617016 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" podStartSLOduration=2.6169947970000003 podStartE2EDuration="2.616994797s" podCreationTimestamp="2026-01-20 17:56:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:56:30.615247703 +0000 UTC m=+4484.375585670" watchObservedRunningTime="2026-01-20 17:56:30.616994797 +0000 UTC m=+4484.377332764" Jan 20 17:56:30 crc kubenswrapper[4558]: I0120 17:56:30.640582 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-metadata-0" podStartSLOduration=2.640565734 podStartE2EDuration="2.640565734s" podCreationTimestamp="2026-01-20 17:56:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:56:30.637042562 +0000 UTC m=+4484.397380528" watchObservedRunningTime="2026-01-20 17:56:30.640565734 +0000 UTC m=+4484.400903701" Jan 20 17:56:30 crc kubenswrapper[4558]: I0120 17:56:30.941632 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-6ltj7" Jan 20 17:56:31 crc kubenswrapper[4558]: I0120 17:56:31.039531 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/27e67faf-b924-4c16-b22b-3297bb62a925-combined-ca-bundle\") pod \"27e67faf-b924-4c16-b22b-3297bb62a925\" (UID: \"27e67faf-b924-4c16-b22b-3297bb62a925\") " Jan 20 17:56:31 crc kubenswrapper[4558]: I0120 17:56:31.039586 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/27e67faf-b924-4c16-b22b-3297bb62a925-config-data\") pod \"27e67faf-b924-4c16-b22b-3297bb62a925\" (UID: \"27e67faf-b924-4c16-b22b-3297bb62a925\") " Jan 20 17:56:31 crc kubenswrapper[4558]: I0120 17:56:31.039654 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/27e67faf-b924-4c16-b22b-3297bb62a925-scripts\") pod \"27e67faf-b924-4c16-b22b-3297bb62a925\" (UID: \"27e67faf-b924-4c16-b22b-3297bb62a925\") " Jan 20 17:56:31 crc kubenswrapper[4558]: I0120 17:56:31.039763 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5qp48\" (UniqueName: \"kubernetes.io/projected/27e67faf-b924-4c16-b22b-3297bb62a925-kube-api-access-5qp48\") pod \"27e67faf-b924-4c16-b22b-3297bb62a925\" (UID: \"27e67faf-b924-4c16-b22b-3297bb62a925\") " Jan 20 17:56:31 crc kubenswrapper[4558]: I0120 17:56:31.046643 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/27e67faf-b924-4c16-b22b-3297bb62a925-kube-api-access-5qp48" (OuterVolumeSpecName: "kube-api-access-5qp48") pod "27e67faf-b924-4c16-b22b-3297bb62a925" (UID: "27e67faf-b924-4c16-b22b-3297bb62a925"). InnerVolumeSpecName "kube-api-access-5qp48". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:56:31 crc kubenswrapper[4558]: I0120 17:56:31.046980 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/27e67faf-b924-4c16-b22b-3297bb62a925-scripts" (OuterVolumeSpecName: "scripts") pod "27e67faf-b924-4c16-b22b-3297bb62a925" (UID: "27e67faf-b924-4c16-b22b-3297bb62a925"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:56:31 crc kubenswrapper[4558]: I0120 17:56:31.067924 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/27e67faf-b924-4c16-b22b-3297bb62a925-config-data" (OuterVolumeSpecName: "config-data") pod "27e67faf-b924-4c16-b22b-3297bb62a925" (UID: "27e67faf-b924-4c16-b22b-3297bb62a925"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:56:31 crc kubenswrapper[4558]: I0120 17:56:31.071148 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/27e67faf-b924-4c16-b22b-3297bb62a925-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "27e67faf-b924-4c16-b22b-3297bb62a925" (UID: "27e67faf-b924-4c16-b22b-3297bb62a925"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:56:31 crc kubenswrapper[4558]: I0120 17:56:31.142823 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/27e67faf-b924-4c16-b22b-3297bb62a925-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:56:31 crc kubenswrapper[4558]: I0120 17:56:31.142859 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/27e67faf-b924-4c16-b22b-3297bb62a925-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:56:31 crc kubenswrapper[4558]: I0120 17:56:31.142870 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/27e67faf-b924-4c16-b22b-3297bb62a925-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:56:31 crc kubenswrapper[4558]: I0120 17:56:31.142884 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5qp48\" (UniqueName: \"kubernetes.io/projected/27e67faf-b924-4c16-b22b-3297bb62a925-kube-api-access-5qp48\") on node \"crc\" DevicePath \"\"" Jan 20 17:56:31 crc kubenswrapper[4558]: I0120 17:56:31.604107 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-6ltj7" event={"ID":"27e67faf-b924-4c16-b22b-3297bb62a925","Type":"ContainerDied","Data":"2d39c5ee1c2a02f98dc6f8bea4c2057f86bcdb14ac2b7372957d441cfb781e76"} Jan 20 17:56:31 crc kubenswrapper[4558]: I0120 17:56:31.604277 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2d39c5ee1c2a02f98dc6f8bea4c2057f86bcdb14ac2b7372957d441cfb781e76" Jan 20 17:56:31 crc kubenswrapper[4558]: I0120 17:56:31.604145 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-conductor-db-sync-6ltj7" Jan 20 17:56:31 crc kubenswrapper[4558]: I0120 17:56:31.605777 4558 generic.go:334] "Generic (PLEG): container finished" podID="a0f54b8a-a5cf-4081-b4d6-f759bb1ae331" containerID="dd8db3ceab78f235dc97917832328d6e0fcaa24e03f8ac167d9b8cc02b674413" exitCode=0 Jan 20 17:56:31 crc kubenswrapper[4558]: I0120 17:56:31.605811 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-cell-mapping-l8mfx" event={"ID":"a0f54b8a-a5cf-4081-b4d6-f759bb1ae331","Type":"ContainerDied","Data":"dd8db3ceab78f235dc97917832328d6e0fcaa24e03f8ac167d9b8cc02b674413"} Jan 20 17:56:31 crc kubenswrapper[4558]: I0120 17:56:31.656487 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-0"] Jan 20 17:56:31 crc kubenswrapper[4558]: E0120 17:56:31.656959 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="27e67faf-b924-4c16-b22b-3297bb62a925" containerName="nova-cell1-conductor-db-sync" Jan 20 17:56:31 crc kubenswrapper[4558]: I0120 17:56:31.656981 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="27e67faf-b924-4c16-b22b-3297bb62a925" containerName="nova-cell1-conductor-db-sync" Jan 20 17:56:31 crc kubenswrapper[4558]: I0120 17:56:31.657265 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="27e67faf-b924-4c16-b22b-3297bb62a925" containerName="nova-cell1-conductor-db-sync" Jan 20 17:56:31 crc kubenswrapper[4558]: I0120 17:56:31.657987 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:56:31 crc kubenswrapper[4558]: I0120 17:56:31.659658 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell1-conductor-config-data" Jan 20 17:56:31 crc kubenswrapper[4558]: I0120 17:56:31.666790 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-0"] Jan 20 17:56:31 crc kubenswrapper[4558]: I0120 17:56:31.753485 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/54cbdea3-8fcc-4d2d-870b-cf3663cfc633-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"54cbdea3-8fcc-4d2d-870b-cf3663cfc633\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:56:31 crc kubenswrapper[4558]: I0120 17:56:31.753549 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54cbdea3-8fcc-4d2d-870b-cf3663cfc633-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"54cbdea3-8fcc-4d2d-870b-cf3663cfc633\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:56:31 crc kubenswrapper[4558]: I0120 17:56:31.753601 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jxrm2\" (UniqueName: \"kubernetes.io/projected/54cbdea3-8fcc-4d2d-870b-cf3663cfc633-kube-api-access-jxrm2\") pod \"nova-cell1-conductor-0\" (UID: \"54cbdea3-8fcc-4d2d-870b-cf3663cfc633\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:56:31 crc kubenswrapper[4558]: I0120 17:56:31.854952 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54cbdea3-8fcc-4d2d-870b-cf3663cfc633-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"54cbdea3-8fcc-4d2d-870b-cf3663cfc633\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:56:31 crc kubenswrapper[4558]: I0120 17:56:31.855013 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jxrm2\" (UniqueName: \"kubernetes.io/projected/54cbdea3-8fcc-4d2d-870b-cf3663cfc633-kube-api-access-jxrm2\") pod \"nova-cell1-conductor-0\" (UID: \"54cbdea3-8fcc-4d2d-870b-cf3663cfc633\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:56:31 crc kubenswrapper[4558]: I0120 17:56:31.855138 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/54cbdea3-8fcc-4d2d-870b-cf3663cfc633-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"54cbdea3-8fcc-4d2d-870b-cf3663cfc633\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:56:31 crc kubenswrapper[4558]: I0120 17:56:31.860098 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/54cbdea3-8fcc-4d2d-870b-cf3663cfc633-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"54cbdea3-8fcc-4d2d-870b-cf3663cfc633\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:56:31 crc kubenswrapper[4558]: I0120 17:56:31.860643 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54cbdea3-8fcc-4d2d-870b-cf3663cfc633-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"54cbdea3-8fcc-4d2d-870b-cf3663cfc633\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:56:31 crc kubenswrapper[4558]: I0120 17:56:31.869234 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jxrm2\" (UniqueName: \"kubernetes.io/projected/54cbdea3-8fcc-4d2d-870b-cf3663cfc633-kube-api-access-jxrm2\") pod \"nova-cell1-conductor-0\" (UID: \"54cbdea3-8fcc-4d2d-870b-cf3663cfc633\") " pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:56:31 crc kubenswrapper[4558]: I0120 17:56:31.981203 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:56:32 crc kubenswrapper[4558]: I0120 17:56:32.396436 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-0"] Jan 20 17:56:32 crc kubenswrapper[4558]: I0120 17:56:32.624550 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-0" event={"ID":"54cbdea3-8fcc-4d2d-870b-cf3663cfc633","Type":"ContainerStarted","Data":"93ad707d620f625a0b7b0c96075af0a312fdd519b3c0dede73cdb9bef7e7e9a5"} Jan 20 17:56:32 crc kubenswrapper[4558]: I0120 17:56:32.624631 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-0" event={"ID":"54cbdea3-8fcc-4d2d-870b-cf3663cfc633","Type":"ContainerStarted","Data":"27028af688f77cb0d0d48c29a6639a2e3128cc80e75bbbd64fd48d2760225807"} Jan 20 17:56:32 crc kubenswrapper[4558]: I0120 17:56:32.624658 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:56:32 crc kubenswrapper[4558]: I0120 17:56:32.654402 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell1-conductor-0" podStartSLOduration=1.654377353 podStartE2EDuration="1.654377353s" podCreationTimestamp="2026-01-20 17:56:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:56:32.647255887 +0000 UTC m=+4486.407593854" watchObservedRunningTime="2026-01-20 17:56:32.654377353 +0000 UTC m=+4486.414715321" Jan 20 17:56:32 crc kubenswrapper[4558]: I0120 17:56:32.895431 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-cell-mapping-l8mfx" Jan 20 17:56:32 crc kubenswrapper[4558]: I0120 17:56:32.976214 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a0f54b8a-a5cf-4081-b4d6-f759bb1ae331-scripts\") pod \"a0f54b8a-a5cf-4081-b4d6-f759bb1ae331\" (UID: \"a0f54b8a-a5cf-4081-b4d6-f759bb1ae331\") " Jan 20 17:56:32 crc kubenswrapper[4558]: I0120 17:56:32.976357 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfrv\" (UniqueName: \"kubernetes.io/projected/a0f54b8a-a5cf-4081-b4d6-f759bb1ae331-kube-api-access-htfrv\") pod \"a0f54b8a-a5cf-4081-b4d6-f759bb1ae331\" (UID: \"a0f54b8a-a5cf-4081-b4d6-f759bb1ae331\") " Jan 20 17:56:32 crc kubenswrapper[4558]: I0120 17:56:32.976425 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a0f54b8a-a5cf-4081-b4d6-f759bb1ae331-config-data\") pod \"a0f54b8a-a5cf-4081-b4d6-f759bb1ae331\" (UID: \"a0f54b8a-a5cf-4081-b4d6-f759bb1ae331\") " Jan 20 17:56:32 crc kubenswrapper[4558]: I0120 17:56:32.976462 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0f54b8a-a5cf-4081-b4d6-f759bb1ae331-combined-ca-bundle\") pod \"a0f54b8a-a5cf-4081-b4d6-f759bb1ae331\" (UID: \"a0f54b8a-a5cf-4081-b4d6-f759bb1ae331\") " Jan 20 17:56:32 crc kubenswrapper[4558]: I0120 17:56:32.983792 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0f54b8a-a5cf-4081-b4d6-f759bb1ae331-kube-api-access-htfrv" (OuterVolumeSpecName: "kube-api-access-htfrv") pod "a0f54b8a-a5cf-4081-b4d6-f759bb1ae331" (UID: "a0f54b8a-a5cf-4081-b4d6-f759bb1ae331"). InnerVolumeSpecName "kube-api-access-htfrv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:56:32 crc kubenswrapper[4558]: I0120 17:56:32.984205 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0f54b8a-a5cf-4081-b4d6-f759bb1ae331-scripts" (OuterVolumeSpecName: "scripts") pod "a0f54b8a-a5cf-4081-b4d6-f759bb1ae331" (UID: "a0f54b8a-a5cf-4081-b4d6-f759bb1ae331"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:56:33 crc kubenswrapper[4558]: I0120 17:56:33.002689 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0f54b8a-a5cf-4081-b4d6-f759bb1ae331-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a0f54b8a-a5cf-4081-b4d6-f759bb1ae331" (UID: "a0f54b8a-a5cf-4081-b4d6-f759bb1ae331"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:56:33 crc kubenswrapper[4558]: I0120 17:56:33.013701 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0f54b8a-a5cf-4081-b4d6-f759bb1ae331-config-data" (OuterVolumeSpecName: "config-data") pod "a0f54b8a-a5cf-4081-b4d6-f759bb1ae331" (UID: "a0f54b8a-a5cf-4081-b4d6-f759bb1ae331"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:56:33 crc kubenswrapper[4558]: I0120 17:56:33.079135 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a0f54b8a-a5cf-4081-b4d6-f759bb1ae331-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:56:33 crc kubenswrapper[4558]: I0120 17:56:33.079251 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0f54b8a-a5cf-4081-b4d6-f759bb1ae331-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:56:33 crc kubenswrapper[4558]: I0120 17:56:33.079297 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a0f54b8a-a5cf-4081-b4d6-f759bb1ae331-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:56:33 crc kubenswrapper[4558]: I0120 17:56:33.079313 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfrv\" (UniqueName: \"kubernetes.io/projected/a0f54b8a-a5cf-4081-b4d6-f759bb1ae331-kube-api-access-htfrv\") on node \"crc\" DevicePath \"\"" Jan 20 17:56:33 crc kubenswrapper[4558]: I0120 17:56:33.648584 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:56:33 crc kubenswrapper[4558]: I0120 17:56:33.648714 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-cell-mapping-l8mfx" Jan 20 17:56:33 crc kubenswrapper[4558]: I0120 17:56:33.648788 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-cell-mapping-l8mfx" event={"ID":"a0f54b8a-a5cf-4081-b4d6-f759bb1ae331","Type":"ContainerDied","Data":"0d34dba01c9d1800d43b26acca64efc520f44e2535d90771b1ae3f125ffbe193"} Jan 20 17:56:33 crc kubenswrapper[4558]: I0120 17:56:33.649104 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0d34dba01c9d1800d43b26acca64efc520f44e2535d90771b1ae3f125ffbe193" Jan 20 17:56:33 crc kubenswrapper[4558]: I0120 17:56:33.649123 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:56:33 crc kubenswrapper[4558]: I0120 17:56:33.800301 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:56:33 crc kubenswrapper[4558]: I0120 17:56:33.810585 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:56:33 crc kubenswrapper[4558]: I0120 17:56:33.810934 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-scheduler-0" podUID="92d9f753-7bce-42db-878e-1d274cfcea51" containerName="nova-scheduler-scheduler" containerID="cri-o://6235a6d4a347ec8f43f97bf14e81fa63fe879b20b8165d6c5d78a45ac07c32e9" gracePeriod=30 Jan 20 17:56:33 crc kubenswrapper[4558]: I0120 17:56:33.848994 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:56:33 crc kubenswrapper[4558]: I0120 17:56:33.849232 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-metadata-0" podUID="26871e4f-876d-488f-88c9-3aa973870a11" containerName="nova-metadata-log" containerID="cri-o://88d6eaab583bb20c071c86195b717ccf31acee517d98f82fdac7e0ffd4f4cf63" gracePeriod=30 Jan 20 17:56:33 crc kubenswrapper[4558]: I0120 17:56:33.849293 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-metadata-0" podUID="26871e4f-876d-488f-88c9-3aa973870a11" containerName="nova-metadata-metadata" containerID="cri-o://424b6e5f92b76fc81c831fbc08e5609a2c221b898ed12b28d791cf6eab46f46e" gracePeriod=30 Jan 20 17:56:33 crc kubenswrapper[4558]: I0120 17:56:33.928478 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:56:34 crc kubenswrapper[4558]: I0120 17:56:34.003463 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:56:34 crc kubenswrapper[4558]: I0120 17:56:34.003536 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:56:34 crc kubenswrapper[4558]: I0120 17:56:34.391125 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:56:34 crc kubenswrapper[4558]: I0120 17:56:34.404737 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6w6sn\" (UniqueName: \"kubernetes.io/projected/26871e4f-876d-488f-88c9-3aa973870a11-kube-api-access-6w6sn\") pod \"26871e4f-876d-488f-88c9-3aa973870a11\" (UID: \"26871e4f-876d-488f-88c9-3aa973870a11\") " Jan 20 17:56:34 crc kubenswrapper[4558]: I0120 17:56:34.405145 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/26871e4f-876d-488f-88c9-3aa973870a11-config-data\") pod \"26871e4f-876d-488f-88c9-3aa973870a11\" (UID: \"26871e4f-876d-488f-88c9-3aa973870a11\") " Jan 20 17:56:34 crc kubenswrapper[4558]: I0120 17:56:34.405277 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/26871e4f-876d-488f-88c9-3aa973870a11-combined-ca-bundle\") pod \"26871e4f-876d-488f-88c9-3aa973870a11\" (UID: \"26871e4f-876d-488f-88c9-3aa973870a11\") " Jan 20 17:56:34 crc kubenswrapper[4558]: I0120 17:56:34.405391 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/26871e4f-876d-488f-88c9-3aa973870a11-logs\") pod \"26871e4f-876d-488f-88c9-3aa973870a11\" (UID: \"26871e4f-876d-488f-88c9-3aa973870a11\") " Jan 20 17:56:34 crc kubenswrapper[4558]: I0120 17:56:34.405430 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/26871e4f-876d-488f-88c9-3aa973870a11-nova-metadata-tls-certs\") pod \"26871e4f-876d-488f-88c9-3aa973870a11\" (UID: \"26871e4f-876d-488f-88c9-3aa973870a11\") " Jan 20 17:56:34 crc kubenswrapper[4558]: I0120 17:56:34.405794 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/26871e4f-876d-488f-88c9-3aa973870a11-logs" (OuterVolumeSpecName: "logs") pod "26871e4f-876d-488f-88c9-3aa973870a11" (UID: "26871e4f-876d-488f-88c9-3aa973870a11"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:56:34 crc kubenswrapper[4558]: I0120 17:56:34.406351 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/26871e4f-876d-488f-88c9-3aa973870a11-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:56:34 crc kubenswrapper[4558]: I0120 17:56:34.415310 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/26871e4f-876d-488f-88c9-3aa973870a11-kube-api-access-6w6sn" (OuterVolumeSpecName: "kube-api-access-6w6sn") pod "26871e4f-876d-488f-88c9-3aa973870a11" (UID: "26871e4f-876d-488f-88c9-3aa973870a11"). InnerVolumeSpecName "kube-api-access-6w6sn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:56:34 crc kubenswrapper[4558]: I0120 17:56:34.471993 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/26871e4f-876d-488f-88c9-3aa973870a11-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "26871e4f-876d-488f-88c9-3aa973870a11" (UID: "26871e4f-876d-488f-88c9-3aa973870a11"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:56:34 crc kubenswrapper[4558]: I0120 17:56:34.490253 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/26871e4f-876d-488f-88c9-3aa973870a11-config-data" (OuterVolumeSpecName: "config-data") pod "26871e4f-876d-488f-88c9-3aa973870a11" (UID: "26871e4f-876d-488f-88c9-3aa973870a11"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:56:34 crc kubenswrapper[4558]: I0120 17:56:34.510717 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/26871e4f-876d-488f-88c9-3aa973870a11-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:56:34 crc kubenswrapper[4558]: I0120 17:56:34.510749 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/26871e4f-876d-488f-88c9-3aa973870a11-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:56:34 crc kubenswrapper[4558]: I0120 17:56:34.510764 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6w6sn\" (UniqueName: \"kubernetes.io/projected/26871e4f-876d-488f-88c9-3aa973870a11-kube-api-access-6w6sn\") on node \"crc\" DevicePath \"\"" Jan 20 17:56:34 crc kubenswrapper[4558]: I0120 17:56:34.513187 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/26871e4f-876d-488f-88c9-3aa973870a11-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "26871e4f-876d-488f-88c9-3aa973870a11" (UID: "26871e4f-876d-488f-88c9-3aa973870a11"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:56:34 crc kubenswrapper[4558]: I0120 17:56:34.613654 4558 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/26871e4f-876d-488f-88c9-3aa973870a11-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:56:34 crc kubenswrapper[4558]: I0120 17:56:34.663224 4558 generic.go:334] "Generic (PLEG): container finished" podID="26871e4f-876d-488f-88c9-3aa973870a11" containerID="424b6e5f92b76fc81c831fbc08e5609a2c221b898ed12b28d791cf6eab46f46e" exitCode=0 Jan 20 17:56:34 crc kubenswrapper[4558]: I0120 17:56:34.664075 4558 generic.go:334] "Generic (PLEG): container finished" podID="26871e4f-876d-488f-88c9-3aa973870a11" containerID="88d6eaab583bb20c071c86195b717ccf31acee517d98f82fdac7e0ffd4f4cf63" exitCode=143 Jan 20 17:56:34 crc kubenswrapper[4558]: I0120 17:56:34.663788 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"26871e4f-876d-488f-88c9-3aa973870a11","Type":"ContainerDied","Data":"424b6e5f92b76fc81c831fbc08e5609a2c221b898ed12b28d791cf6eab46f46e"} Jan 20 17:56:34 crc kubenswrapper[4558]: I0120 17:56:34.664308 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"26871e4f-876d-488f-88c9-3aa973870a11","Type":"ContainerDied","Data":"88d6eaab583bb20c071c86195b717ccf31acee517d98f82fdac7e0ffd4f4cf63"} Jan 20 17:56:34 crc kubenswrapper[4558]: I0120 17:56:34.664350 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"26871e4f-876d-488f-88c9-3aa973870a11","Type":"ContainerDied","Data":"dd0fd4464656cf0388fd04d718132c848e1d0c1fc3c1c68b2acd8ba8bdd1aa27"} Jan 20 17:56:34 crc kubenswrapper[4558]: I0120 17:56:34.664380 4558 scope.go:117] "RemoveContainer" containerID="424b6e5f92b76fc81c831fbc08e5609a2c221b898ed12b28d791cf6eab46f46e" Jan 20 17:56:34 crc kubenswrapper[4558]: I0120 17:56:34.663913 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:56:34 crc kubenswrapper[4558]: I0120 17:56:34.708396 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:56:34 crc kubenswrapper[4558]: I0120 17:56:34.709664 4558 scope.go:117] "RemoveContainer" containerID="88d6eaab583bb20c071c86195b717ccf31acee517d98f82fdac7e0ffd4f4cf63" Jan 20 17:56:34 crc kubenswrapper[4558]: I0120 17:56:34.714233 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:56:34 crc kubenswrapper[4558]: I0120 17:56:34.725968 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:56:34 crc kubenswrapper[4558]: E0120 17:56:34.726491 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="26871e4f-876d-488f-88c9-3aa973870a11" containerName="nova-metadata-metadata" Jan 20 17:56:34 crc kubenswrapper[4558]: I0120 17:56:34.726506 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="26871e4f-876d-488f-88c9-3aa973870a11" containerName="nova-metadata-metadata" Jan 20 17:56:34 crc kubenswrapper[4558]: E0120 17:56:34.726518 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a0f54b8a-a5cf-4081-b4d6-f759bb1ae331" containerName="nova-manage" Jan 20 17:56:34 crc kubenswrapper[4558]: I0120 17:56:34.726524 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="a0f54b8a-a5cf-4081-b4d6-f759bb1ae331" containerName="nova-manage" Jan 20 17:56:34 crc kubenswrapper[4558]: E0120 17:56:34.726541 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="26871e4f-876d-488f-88c9-3aa973870a11" containerName="nova-metadata-log" Jan 20 17:56:34 crc kubenswrapper[4558]: I0120 17:56:34.726547 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="26871e4f-876d-488f-88c9-3aa973870a11" containerName="nova-metadata-log" Jan 20 17:56:34 crc kubenswrapper[4558]: I0120 17:56:34.726740 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="26871e4f-876d-488f-88c9-3aa973870a11" containerName="nova-metadata-metadata" Jan 20 17:56:34 crc kubenswrapper[4558]: I0120 17:56:34.726755 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="a0f54b8a-a5cf-4081-b4d6-f759bb1ae331" containerName="nova-manage" Jan 20 17:56:34 crc kubenswrapper[4558]: I0120 17:56:34.726766 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="26871e4f-876d-488f-88c9-3aa973870a11" containerName="nova-metadata-log" Jan 20 17:56:34 crc kubenswrapper[4558]: I0120 17:56:34.727835 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:56:34 crc kubenswrapper[4558]: I0120 17:56:34.728098 4558 scope.go:117] "RemoveContainer" containerID="424b6e5f92b76fc81c831fbc08e5609a2c221b898ed12b28d791cf6eab46f46e" Jan 20 17:56:34 crc kubenswrapper[4558]: E0120 17:56:34.728639 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"424b6e5f92b76fc81c831fbc08e5609a2c221b898ed12b28d791cf6eab46f46e\": container with ID starting with 424b6e5f92b76fc81c831fbc08e5609a2c221b898ed12b28d791cf6eab46f46e not found: ID does not exist" containerID="424b6e5f92b76fc81c831fbc08e5609a2c221b898ed12b28d791cf6eab46f46e" Jan 20 17:56:34 crc kubenswrapper[4558]: I0120 17:56:34.728692 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"424b6e5f92b76fc81c831fbc08e5609a2c221b898ed12b28d791cf6eab46f46e"} err="failed to get container status \"424b6e5f92b76fc81c831fbc08e5609a2c221b898ed12b28d791cf6eab46f46e\": rpc error: code = NotFound desc = could not find container \"424b6e5f92b76fc81c831fbc08e5609a2c221b898ed12b28d791cf6eab46f46e\": container with ID starting with 424b6e5f92b76fc81c831fbc08e5609a2c221b898ed12b28d791cf6eab46f46e not found: ID does not exist" Jan 20 17:56:34 crc kubenswrapper[4558]: I0120 17:56:34.728727 4558 scope.go:117] "RemoveContainer" containerID="88d6eaab583bb20c071c86195b717ccf31acee517d98f82fdac7e0ffd4f4cf63" Jan 20 17:56:34 crc kubenswrapper[4558]: E0120 17:56:34.729008 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"88d6eaab583bb20c071c86195b717ccf31acee517d98f82fdac7e0ffd4f4cf63\": container with ID starting with 88d6eaab583bb20c071c86195b717ccf31acee517d98f82fdac7e0ffd4f4cf63 not found: ID does not exist" containerID="88d6eaab583bb20c071c86195b717ccf31acee517d98f82fdac7e0ffd4f4cf63" Jan 20 17:56:34 crc kubenswrapper[4558]: I0120 17:56:34.729048 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"88d6eaab583bb20c071c86195b717ccf31acee517d98f82fdac7e0ffd4f4cf63"} err="failed to get container status \"88d6eaab583bb20c071c86195b717ccf31acee517d98f82fdac7e0ffd4f4cf63\": rpc error: code = NotFound desc = could not find container \"88d6eaab583bb20c071c86195b717ccf31acee517d98f82fdac7e0ffd4f4cf63\": container with ID starting with 88d6eaab583bb20c071c86195b717ccf31acee517d98f82fdac7e0ffd4f4cf63 not found: ID does not exist" Jan 20 17:56:34 crc kubenswrapper[4558]: I0120 17:56:34.729072 4558 scope.go:117] "RemoveContainer" containerID="424b6e5f92b76fc81c831fbc08e5609a2c221b898ed12b28d791cf6eab46f46e" Jan 20 17:56:34 crc kubenswrapper[4558]: I0120 17:56:34.729268 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"424b6e5f92b76fc81c831fbc08e5609a2c221b898ed12b28d791cf6eab46f46e"} err="failed to get container status \"424b6e5f92b76fc81c831fbc08e5609a2c221b898ed12b28d791cf6eab46f46e\": rpc error: code = NotFound desc = could not find container \"424b6e5f92b76fc81c831fbc08e5609a2c221b898ed12b28d791cf6eab46f46e\": container with ID starting with 424b6e5f92b76fc81c831fbc08e5609a2c221b898ed12b28d791cf6eab46f46e not found: ID does not exist" Jan 20 17:56:34 crc kubenswrapper[4558]: I0120 17:56:34.729289 4558 scope.go:117] "RemoveContainer" containerID="88d6eaab583bb20c071c86195b717ccf31acee517d98f82fdac7e0ffd4f4cf63" Jan 20 17:56:34 crc kubenswrapper[4558]: I0120 17:56:34.729439 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"88d6eaab583bb20c071c86195b717ccf31acee517d98f82fdac7e0ffd4f4cf63"} err="failed to get container status \"88d6eaab583bb20c071c86195b717ccf31acee517d98f82fdac7e0ffd4f4cf63\": rpc error: code = NotFound desc = could not find container \"88d6eaab583bb20c071c86195b717ccf31acee517d98f82fdac7e0ffd4f4cf63\": container with ID starting with 88d6eaab583bb20c071c86195b717ccf31acee517d98f82fdac7e0ffd4f4cf63 not found: ID does not exist" Jan 20 17:56:34 crc kubenswrapper[4558]: I0120 17:56:34.730257 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-metadata-internal-svc" Jan 20 17:56:34 crc kubenswrapper[4558]: I0120 17:56:34.733342 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-api-0" podUID="9200cf36-f689-4e68-9847-5ad9f1aad20c" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.149:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 20 17:56:34 crc kubenswrapper[4558]: I0120 17:56:34.733409 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-api-0" podUID="9200cf36-f689-4e68-9847-5ad9f1aad20c" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.149:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 20 17:56:34 crc kubenswrapper[4558]: I0120 17:56:34.733754 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-metadata-config-data" Jan 20 17:56:34 crc kubenswrapper[4558]: I0120 17:56:34.746031 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:56:34 crc kubenswrapper[4558]: I0120 17:56:34.819300 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sdhfx\" (UniqueName: \"kubernetes.io/projected/9d2add51-d3c5-44af-a792-a7301e834322-kube-api-access-sdhfx\") pod \"nova-metadata-0\" (UID: \"9d2add51-d3c5-44af-a792-a7301e834322\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:56:34 crc kubenswrapper[4558]: I0120 17:56:34.819426 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/9d2add51-d3c5-44af-a792-a7301e834322-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"9d2add51-d3c5-44af-a792-a7301e834322\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:56:34 crc kubenswrapper[4558]: I0120 17:56:34.819695 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9d2add51-d3c5-44af-a792-a7301e834322-config-data\") pod \"nova-metadata-0\" (UID: \"9d2add51-d3c5-44af-a792-a7301e834322\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:56:34 crc kubenswrapper[4558]: I0120 17:56:34.819766 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9d2add51-d3c5-44af-a792-a7301e834322-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"9d2add51-d3c5-44af-a792-a7301e834322\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:56:34 crc kubenswrapper[4558]: I0120 17:56:34.819985 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9d2add51-d3c5-44af-a792-a7301e834322-logs\") pod \"nova-metadata-0\" (UID: \"9d2add51-d3c5-44af-a792-a7301e834322\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:56:34 crc kubenswrapper[4558]: I0120 17:56:34.921985 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/9d2add51-d3c5-44af-a792-a7301e834322-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"9d2add51-d3c5-44af-a792-a7301e834322\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:56:34 crc kubenswrapper[4558]: I0120 17:56:34.922097 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9d2add51-d3c5-44af-a792-a7301e834322-config-data\") pod \"nova-metadata-0\" (UID: \"9d2add51-d3c5-44af-a792-a7301e834322\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:56:34 crc kubenswrapper[4558]: I0120 17:56:34.922452 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9d2add51-d3c5-44af-a792-a7301e834322-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"9d2add51-d3c5-44af-a792-a7301e834322\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:56:34 crc kubenswrapper[4558]: I0120 17:56:34.922514 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9d2add51-d3c5-44af-a792-a7301e834322-logs\") pod \"nova-metadata-0\" (UID: \"9d2add51-d3c5-44af-a792-a7301e834322\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:56:34 crc kubenswrapper[4558]: I0120 17:56:34.922567 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sdhfx\" (UniqueName: \"kubernetes.io/projected/9d2add51-d3c5-44af-a792-a7301e834322-kube-api-access-sdhfx\") pod \"nova-metadata-0\" (UID: \"9d2add51-d3c5-44af-a792-a7301e834322\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:56:34 crc kubenswrapper[4558]: I0120 17:56:34.926309 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9d2add51-d3c5-44af-a792-a7301e834322-logs\") pod \"nova-metadata-0\" (UID: \"9d2add51-d3c5-44af-a792-a7301e834322\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:56:34 crc kubenswrapper[4558]: I0120 17:56:34.926819 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/9d2add51-d3c5-44af-a792-a7301e834322-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"9d2add51-d3c5-44af-a792-a7301e834322\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:56:34 crc kubenswrapper[4558]: I0120 17:56:34.928190 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9d2add51-d3c5-44af-a792-a7301e834322-config-data\") pod \"nova-metadata-0\" (UID: \"9d2add51-d3c5-44af-a792-a7301e834322\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:56:34 crc kubenswrapper[4558]: I0120 17:56:34.935246 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9d2add51-d3c5-44af-a792-a7301e834322-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"9d2add51-d3c5-44af-a792-a7301e834322\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:56:34 crc kubenswrapper[4558]: I0120 17:56:34.936793 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sdhfx\" (UniqueName: \"kubernetes.io/projected/9d2add51-d3c5-44af-a792-a7301e834322-kube-api-access-sdhfx\") pod \"nova-metadata-0\" (UID: \"9d2add51-d3c5-44af-a792-a7301e834322\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:56:35 crc kubenswrapper[4558]: I0120 17:56:35.064675 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:56:35 crc kubenswrapper[4558]: I0120 17:56:35.408100 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-k6dw6"] Jan 20 17:56:35 crc kubenswrapper[4558]: I0120 17:56:35.410232 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-k6dw6" Jan 20 17:56:35 crc kubenswrapper[4558]: I0120 17:56:35.422689 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-k6dw6"] Jan 20 17:56:35 crc kubenswrapper[4558]: I0120 17:56:35.432567 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sfxv2\" (UniqueName: \"kubernetes.io/projected/6b78b532-b4ab-4bd3-a833-95f464f0849d-kube-api-access-sfxv2\") pod \"redhat-marketplace-k6dw6\" (UID: \"6b78b532-b4ab-4bd3-a833-95f464f0849d\") " pod="openshift-marketplace/redhat-marketplace-k6dw6" Jan 20 17:56:35 crc kubenswrapper[4558]: I0120 17:56:35.432611 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6b78b532-b4ab-4bd3-a833-95f464f0849d-utilities\") pod \"redhat-marketplace-k6dw6\" (UID: \"6b78b532-b4ab-4bd3-a833-95f464f0849d\") " pod="openshift-marketplace/redhat-marketplace-k6dw6" Jan 20 17:56:35 crc kubenswrapper[4558]: I0120 17:56:35.432696 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6b78b532-b4ab-4bd3-a833-95f464f0849d-catalog-content\") pod \"redhat-marketplace-k6dw6\" (UID: \"6b78b532-b4ab-4bd3-a833-95f464f0849d\") " pod="openshift-marketplace/redhat-marketplace-k6dw6" Jan 20 17:56:35 crc kubenswrapper[4558]: I0120 17:56:35.483662 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:56:35 crc kubenswrapper[4558]: I0120 17:56:35.534742 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sfxv2\" (UniqueName: \"kubernetes.io/projected/6b78b532-b4ab-4bd3-a833-95f464f0849d-kube-api-access-sfxv2\") pod \"redhat-marketplace-k6dw6\" (UID: \"6b78b532-b4ab-4bd3-a833-95f464f0849d\") " pod="openshift-marketplace/redhat-marketplace-k6dw6" Jan 20 17:56:35 crc kubenswrapper[4558]: I0120 17:56:35.535035 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6b78b532-b4ab-4bd3-a833-95f464f0849d-utilities\") pod \"redhat-marketplace-k6dw6\" (UID: \"6b78b532-b4ab-4bd3-a833-95f464f0849d\") " pod="openshift-marketplace/redhat-marketplace-k6dw6" Jan 20 17:56:35 crc kubenswrapper[4558]: I0120 17:56:35.535364 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6b78b532-b4ab-4bd3-a833-95f464f0849d-catalog-content\") pod \"redhat-marketplace-k6dw6\" (UID: \"6b78b532-b4ab-4bd3-a833-95f464f0849d\") " pod="openshift-marketplace/redhat-marketplace-k6dw6" Jan 20 17:56:35 crc kubenswrapper[4558]: I0120 17:56:35.535952 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6b78b532-b4ab-4bd3-a833-95f464f0849d-catalog-content\") pod \"redhat-marketplace-k6dw6\" (UID: \"6b78b532-b4ab-4bd3-a833-95f464f0849d\") " pod="openshift-marketplace/redhat-marketplace-k6dw6" Jan 20 17:56:35 crc kubenswrapper[4558]: I0120 17:56:35.536634 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6b78b532-b4ab-4bd3-a833-95f464f0849d-utilities\") pod \"redhat-marketplace-k6dw6\" (UID: \"6b78b532-b4ab-4bd3-a833-95f464f0849d\") " pod="openshift-marketplace/redhat-marketplace-k6dw6" Jan 20 17:56:35 crc kubenswrapper[4558]: I0120 17:56:35.553253 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sfxv2\" (UniqueName: \"kubernetes.io/projected/6b78b532-b4ab-4bd3-a833-95f464f0849d-kube-api-access-sfxv2\") pod \"redhat-marketplace-k6dw6\" (UID: \"6b78b532-b4ab-4bd3-a833-95f464f0849d\") " pod="openshift-marketplace/redhat-marketplace-k6dw6" Jan 20 17:56:35 crc kubenswrapper[4558]: I0120 17:56:35.674248 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"9d2add51-d3c5-44af-a792-a7301e834322","Type":"ContainerStarted","Data":"32b281763590d5026a26422ced7e24a1579ad8615e8a306bc2cf6fb2ec584ce4"} Jan 20 17:56:35 crc kubenswrapper[4558]: I0120 17:56:35.674382 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-api-0" podUID="9200cf36-f689-4e68-9847-5ad9f1aad20c" containerName="nova-api-log" containerID="cri-o://00663a7778425e0f73bea2d684a395a3da2dd27df2181da8e3d491c11d317544" gracePeriod=30 Jan 20 17:56:35 crc kubenswrapper[4558]: I0120 17:56:35.674435 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-api-0" podUID="9200cf36-f689-4e68-9847-5ad9f1aad20c" containerName="nova-api-api" containerID="cri-o://4327cff3b2a73ebb89e07fafb3730b873a2555273bb891e589fcb0f39e6e591f" gracePeriod=30 Jan 20 17:56:35 crc kubenswrapper[4558]: I0120 17:56:35.732996 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-k6dw6" Jan 20 17:56:36 crc kubenswrapper[4558]: I0120 17:56:36.189008 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-k6dw6"] Jan 20 17:56:36 crc kubenswrapper[4558]: W0120 17:56:36.198149 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6b78b532_b4ab_4bd3_a833_95f464f0849d.slice/crio-bce78c2338329ba2aaded12ee14a6358f29601e953bbcc92a71a834c8556c4fd WatchSource:0}: Error finding container bce78c2338329ba2aaded12ee14a6358f29601e953bbcc92a71a834c8556c4fd: Status 404 returned error can't find the container with id bce78c2338329ba2aaded12ee14a6358f29601e953bbcc92a71a834c8556c4fd Jan 20 17:56:36 crc kubenswrapper[4558]: I0120 17:56:36.513652 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:56:36 crc kubenswrapper[4558]: I0120 17:56:36.559413 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zn9nh\" (UniqueName: \"kubernetes.io/projected/92d9f753-7bce-42db-878e-1d274cfcea51-kube-api-access-zn9nh\") pod \"92d9f753-7bce-42db-878e-1d274cfcea51\" (UID: \"92d9f753-7bce-42db-878e-1d274cfcea51\") " Jan 20 17:56:36 crc kubenswrapper[4558]: I0120 17:56:36.559526 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/92d9f753-7bce-42db-878e-1d274cfcea51-combined-ca-bundle\") pod \"92d9f753-7bce-42db-878e-1d274cfcea51\" (UID: \"92d9f753-7bce-42db-878e-1d274cfcea51\") " Jan 20 17:56:36 crc kubenswrapper[4558]: I0120 17:56:36.560663 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/92d9f753-7bce-42db-878e-1d274cfcea51-config-data\") pod \"92d9f753-7bce-42db-878e-1d274cfcea51\" (UID: \"92d9f753-7bce-42db-878e-1d274cfcea51\") " Jan 20 17:56:36 crc kubenswrapper[4558]: I0120 17:56:36.571342 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/92d9f753-7bce-42db-878e-1d274cfcea51-kube-api-access-zn9nh" (OuterVolumeSpecName: "kube-api-access-zn9nh") pod "92d9f753-7bce-42db-878e-1d274cfcea51" (UID: "92d9f753-7bce-42db-878e-1d274cfcea51"). InnerVolumeSpecName "kube-api-access-zn9nh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:56:36 crc kubenswrapper[4558]: I0120 17:56:36.579264 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="26871e4f-876d-488f-88c9-3aa973870a11" path="/var/lib/kubelet/pods/26871e4f-876d-488f-88c9-3aa973870a11/volumes" Jan 20 17:56:36 crc kubenswrapper[4558]: I0120 17:56:36.593431 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/92d9f753-7bce-42db-878e-1d274cfcea51-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "92d9f753-7bce-42db-878e-1d274cfcea51" (UID: "92d9f753-7bce-42db-878e-1d274cfcea51"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:56:36 crc kubenswrapper[4558]: I0120 17:56:36.598669 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/92d9f753-7bce-42db-878e-1d274cfcea51-config-data" (OuterVolumeSpecName: "config-data") pod "92d9f753-7bce-42db-878e-1d274cfcea51" (UID: "92d9f753-7bce-42db-878e-1d274cfcea51"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:56:36 crc kubenswrapper[4558]: I0120 17:56:36.664397 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/92d9f753-7bce-42db-878e-1d274cfcea51-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:56:36 crc kubenswrapper[4558]: I0120 17:56:36.664422 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zn9nh\" (UniqueName: \"kubernetes.io/projected/92d9f753-7bce-42db-878e-1d274cfcea51-kube-api-access-zn9nh\") on node \"crc\" DevicePath \"\"" Jan 20 17:56:36 crc kubenswrapper[4558]: I0120 17:56:36.664435 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/92d9f753-7bce-42db-878e-1d274cfcea51-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:56:36 crc kubenswrapper[4558]: I0120 17:56:36.683862 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"9d2add51-d3c5-44af-a792-a7301e834322","Type":"ContainerStarted","Data":"48c1a79885d63262f1636dd956e4fd1544b1f866fc373b6a47841c7ffba899eb"} Jan 20 17:56:36 crc kubenswrapper[4558]: I0120 17:56:36.683901 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"9d2add51-d3c5-44af-a792-a7301e834322","Type":"ContainerStarted","Data":"a51a56718bd2a2dbc6833433430e6b07d4e0e76fa08d35b70dd83eaeb7bc29a8"} Jan 20 17:56:36 crc kubenswrapper[4558]: I0120 17:56:36.685893 4558 generic.go:334] "Generic (PLEG): container finished" podID="92d9f753-7bce-42db-878e-1d274cfcea51" containerID="6235a6d4a347ec8f43f97bf14e81fa63fe879b20b8165d6c5d78a45ac07c32e9" exitCode=0 Jan 20 17:56:36 crc kubenswrapper[4558]: I0120 17:56:36.685937 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"92d9f753-7bce-42db-878e-1d274cfcea51","Type":"ContainerDied","Data":"6235a6d4a347ec8f43f97bf14e81fa63fe879b20b8165d6c5d78a45ac07c32e9"} Jan 20 17:56:36 crc kubenswrapper[4558]: I0120 17:56:36.685954 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"92d9f753-7bce-42db-878e-1d274cfcea51","Type":"ContainerDied","Data":"036c927aed338cd8745edfed28f5c6e024a7e9e4316d25503c4d3ba95f0942ec"} Jan 20 17:56:36 crc kubenswrapper[4558]: I0120 17:56:36.685972 4558 scope.go:117] "RemoveContainer" containerID="6235a6d4a347ec8f43f97bf14e81fa63fe879b20b8165d6c5d78a45ac07c32e9" Jan 20 17:56:36 crc kubenswrapper[4558]: I0120 17:56:36.686049 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:56:36 crc kubenswrapper[4558]: I0120 17:56:36.692579 4558 generic.go:334] "Generic (PLEG): container finished" podID="6b78b532-b4ab-4bd3-a833-95f464f0849d" containerID="d8dad32e8466c966fc099301fb386aa3c26abf2543b909a5090de63cc81ddf2c" exitCode=0 Jan 20 17:56:36 crc kubenswrapper[4558]: I0120 17:56:36.692964 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-k6dw6" event={"ID":"6b78b532-b4ab-4bd3-a833-95f464f0849d","Type":"ContainerDied","Data":"d8dad32e8466c966fc099301fb386aa3c26abf2543b909a5090de63cc81ddf2c"} Jan 20 17:56:36 crc kubenswrapper[4558]: I0120 17:56:36.693031 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-k6dw6" event={"ID":"6b78b532-b4ab-4bd3-a833-95f464f0849d","Type":"ContainerStarted","Data":"bce78c2338329ba2aaded12ee14a6358f29601e953bbcc92a71a834c8556c4fd"} Jan 20 17:56:36 crc kubenswrapper[4558]: I0120 17:56:36.694939 4558 generic.go:334] "Generic (PLEG): container finished" podID="9200cf36-f689-4e68-9847-5ad9f1aad20c" containerID="00663a7778425e0f73bea2d684a395a3da2dd27df2181da8e3d491c11d317544" exitCode=143 Jan 20 17:56:36 crc kubenswrapper[4558]: I0120 17:56:36.694972 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"9200cf36-f689-4e68-9847-5ad9f1aad20c","Type":"ContainerDied","Data":"00663a7778425e0f73bea2d684a395a3da2dd27df2181da8e3d491c11d317544"} Jan 20 17:56:36 crc kubenswrapper[4558]: I0120 17:56:36.712470 4558 scope.go:117] "RemoveContainer" containerID="6235a6d4a347ec8f43f97bf14e81fa63fe879b20b8165d6c5d78a45ac07c32e9" Jan 20 17:56:36 crc kubenswrapper[4558]: E0120 17:56:36.713253 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6235a6d4a347ec8f43f97bf14e81fa63fe879b20b8165d6c5d78a45ac07c32e9\": container with ID starting with 6235a6d4a347ec8f43f97bf14e81fa63fe879b20b8165d6c5d78a45ac07c32e9 not found: ID does not exist" containerID="6235a6d4a347ec8f43f97bf14e81fa63fe879b20b8165d6c5d78a45ac07c32e9" Jan 20 17:56:36 crc kubenswrapper[4558]: I0120 17:56:36.713339 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6235a6d4a347ec8f43f97bf14e81fa63fe879b20b8165d6c5d78a45ac07c32e9"} err="failed to get container status \"6235a6d4a347ec8f43f97bf14e81fa63fe879b20b8165d6c5d78a45ac07c32e9\": rpc error: code = NotFound desc = could not find container \"6235a6d4a347ec8f43f97bf14e81fa63fe879b20b8165d6c5d78a45ac07c32e9\": container with ID starting with 6235a6d4a347ec8f43f97bf14e81fa63fe879b20b8165d6c5d78a45ac07c32e9 not found: ID does not exist" Jan 20 17:56:36 crc kubenswrapper[4558]: I0120 17:56:36.760364 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-metadata-0" podStartSLOduration=2.760339297 podStartE2EDuration="2.760339297s" podCreationTimestamp="2026-01-20 17:56:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:56:36.707813883 +0000 UTC m=+4490.468151850" watchObservedRunningTime="2026-01-20 17:56:36.760339297 +0000 UTC m=+4490.520677264" Jan 20 17:56:36 crc kubenswrapper[4558]: I0120 17:56:36.778787 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:56:36 crc kubenswrapper[4558]: I0120 17:56:36.784844 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:56:36 crc kubenswrapper[4558]: I0120 17:56:36.790559 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:56:36 crc kubenswrapper[4558]: E0120 17:56:36.791057 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="92d9f753-7bce-42db-878e-1d274cfcea51" containerName="nova-scheduler-scheduler" Jan 20 17:56:36 crc kubenswrapper[4558]: I0120 17:56:36.791078 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="92d9f753-7bce-42db-878e-1d274cfcea51" containerName="nova-scheduler-scheduler" Jan 20 17:56:36 crc kubenswrapper[4558]: I0120 17:56:36.791355 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="92d9f753-7bce-42db-878e-1d274cfcea51" containerName="nova-scheduler-scheduler" Jan 20 17:56:36 crc kubenswrapper[4558]: I0120 17:56:36.792158 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:56:36 crc kubenswrapper[4558]: I0120 17:56:36.794078 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-scheduler-config-data" Jan 20 17:56:36 crc kubenswrapper[4558]: I0120 17:56:36.799221 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:56:36 crc kubenswrapper[4558]: I0120 17:56:36.868279 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/27f0171c-82eb-4da0-9cb6-1e8f3ce1db3d-config-data\") pod \"nova-scheduler-0\" (UID: \"27f0171c-82eb-4da0-9cb6-1e8f3ce1db3d\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:56:36 crc kubenswrapper[4558]: I0120 17:56:36.868334 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/27f0171c-82eb-4da0-9cb6-1e8f3ce1db3d-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"27f0171c-82eb-4da0-9cb6-1e8f3ce1db3d\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:56:36 crc kubenswrapper[4558]: I0120 17:56:36.868451 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d2wcv\" (UniqueName: \"kubernetes.io/projected/27f0171c-82eb-4da0-9cb6-1e8f3ce1db3d-kube-api-access-d2wcv\") pod \"nova-scheduler-0\" (UID: \"27f0171c-82eb-4da0-9cb6-1e8f3ce1db3d\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:56:36 crc kubenswrapper[4558]: I0120 17:56:36.969735 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/27f0171c-82eb-4da0-9cb6-1e8f3ce1db3d-config-data\") pod \"nova-scheduler-0\" (UID: \"27f0171c-82eb-4da0-9cb6-1e8f3ce1db3d\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:56:36 crc kubenswrapper[4558]: I0120 17:56:36.969775 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/27f0171c-82eb-4da0-9cb6-1e8f3ce1db3d-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"27f0171c-82eb-4da0-9cb6-1e8f3ce1db3d\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:56:36 crc kubenswrapper[4558]: I0120 17:56:36.969838 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d2wcv\" (UniqueName: \"kubernetes.io/projected/27f0171c-82eb-4da0-9cb6-1e8f3ce1db3d-kube-api-access-d2wcv\") pod \"nova-scheduler-0\" (UID: \"27f0171c-82eb-4da0-9cb6-1e8f3ce1db3d\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:56:36 crc kubenswrapper[4558]: I0120 17:56:36.974742 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/27f0171c-82eb-4da0-9cb6-1e8f3ce1db3d-config-data\") pod \"nova-scheduler-0\" (UID: \"27f0171c-82eb-4da0-9cb6-1e8f3ce1db3d\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:56:36 crc kubenswrapper[4558]: I0120 17:56:36.975198 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/27f0171c-82eb-4da0-9cb6-1e8f3ce1db3d-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"27f0171c-82eb-4da0-9cb6-1e8f3ce1db3d\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:56:36 crc kubenswrapper[4558]: I0120 17:56:36.983059 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d2wcv\" (UniqueName: \"kubernetes.io/projected/27f0171c-82eb-4da0-9cb6-1e8f3ce1db3d-kube-api-access-d2wcv\") pod \"nova-scheduler-0\" (UID: \"27f0171c-82eb-4da0-9cb6-1e8f3ce1db3d\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:56:37 crc kubenswrapper[4558]: I0120 17:56:37.107587 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:56:37 crc kubenswrapper[4558]: I0120 17:56:37.522655 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:56:38 crc kubenswrapper[4558]: I0120 17:56:38.576741 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="92d9f753-7bce-42db-878e-1d274cfcea51" path="/var/lib/kubelet/pods/92d9f753-7bce-42db-878e-1d274cfcea51/volumes" Jan 20 17:56:38 crc kubenswrapper[4558]: I0120 17:56:38.722939 4558 generic.go:334] "Generic (PLEG): container finished" podID="6b78b532-b4ab-4bd3-a833-95f464f0849d" containerID="690ac72bd61d725b3d2068122cf4b402e7cb192df043c7c7f6b3c386578e4fb4" exitCode=0 Jan 20 17:56:38 crc kubenswrapper[4558]: I0120 17:56:38.723036 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-k6dw6" event={"ID":"6b78b532-b4ab-4bd3-a833-95f464f0849d","Type":"ContainerDied","Data":"690ac72bd61d725b3d2068122cf4b402e7cb192df043c7c7f6b3c386578e4fb4"} Jan 20 17:56:38 crc kubenswrapper[4558]: I0120 17:56:38.726254 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"27f0171c-82eb-4da0-9cb6-1e8f3ce1db3d","Type":"ContainerStarted","Data":"4476058cbcb49b0953be27b4233e9206097b51f7403463516c9726ffdf82d395"} Jan 20 17:56:38 crc kubenswrapper[4558]: I0120 17:56:38.726326 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"27f0171c-82eb-4da0-9cb6-1e8f3ce1db3d","Type":"ContainerStarted","Data":"66459d74bbe222c6504c998ba08fcf3e2d242617a7c92f40640f8c812ce4067d"} Jan 20 17:56:38 crc kubenswrapper[4558]: I0120 17:56:38.764809 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-scheduler-0" podStartSLOduration=2.764782276 podStartE2EDuration="2.764782276s" podCreationTimestamp="2026-01-20 17:56:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:56:38.76053425 +0000 UTC m=+4492.520872217" watchObservedRunningTime="2026-01-20 17:56:38.764782276 +0000 UTC m=+4492.525120243" Jan 20 17:56:38 crc kubenswrapper[4558]: I0120 17:56:38.928963 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:56:38 crc kubenswrapper[4558]: I0120 17:56:38.948611 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:56:39 crc kubenswrapper[4558]: I0120 17:56:39.740003 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-k6dw6" event={"ID":"6b78b532-b4ab-4bd3-a833-95f464f0849d","Type":"ContainerStarted","Data":"a34c4b9be156eb278ecc45038acc26d0eabba5069eb21ad733e68f50f7d0ba59"} Jan 20 17:56:39 crc kubenswrapper[4558]: I0120 17:56:39.747387 4558 generic.go:334] "Generic (PLEG): container finished" podID="9200cf36-f689-4e68-9847-5ad9f1aad20c" containerID="4327cff3b2a73ebb89e07fafb3730b873a2555273bb891e589fcb0f39e6e591f" exitCode=0 Jan 20 17:56:39 crc kubenswrapper[4558]: I0120 17:56:39.747698 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"9200cf36-f689-4e68-9847-5ad9f1aad20c","Type":"ContainerDied","Data":"4327cff3b2a73ebb89e07fafb3730b873a2555273bb891e589fcb0f39e6e591f"} Jan 20 17:56:39 crc kubenswrapper[4558]: I0120 17:56:39.778343 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:56:39 crc kubenswrapper[4558]: I0120 17:56:39.781355 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-k6dw6" podStartSLOduration=2.238310197 podStartE2EDuration="4.781337393s" podCreationTimestamp="2026-01-20 17:56:35 +0000 UTC" firstStartedPulling="2026-01-20 17:56:36.694304509 +0000 UTC m=+4490.454642476" lastFinishedPulling="2026-01-20 17:56:39.237331705 +0000 UTC m=+4492.997669672" observedRunningTime="2026-01-20 17:56:39.774141426 +0000 UTC m=+4493.534479394" watchObservedRunningTime="2026-01-20 17:56:39.781337393 +0000 UTC m=+4493.541675350" Jan 20 17:56:40 crc kubenswrapper[4558]: I0120 17:56:40.065932 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:56:40 crc kubenswrapper[4558]: I0120 17:56:40.066352 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:56:40 crc kubenswrapper[4558]: I0120 17:56:40.084992 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:56:40 crc kubenswrapper[4558]: I0120 17:56:40.132412 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9200cf36-f689-4e68-9847-5ad9f1aad20c-combined-ca-bundle\") pod \"9200cf36-f689-4e68-9847-5ad9f1aad20c\" (UID: \"9200cf36-f689-4e68-9847-5ad9f1aad20c\") " Jan 20 17:56:40 crc kubenswrapper[4558]: I0120 17:56:40.132492 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9200cf36-f689-4e68-9847-5ad9f1aad20c-logs\") pod \"9200cf36-f689-4e68-9847-5ad9f1aad20c\" (UID: \"9200cf36-f689-4e68-9847-5ad9f1aad20c\") " Jan 20 17:56:40 crc kubenswrapper[4558]: I0120 17:56:40.132575 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9200cf36-f689-4e68-9847-5ad9f1aad20c-config-data\") pod \"9200cf36-f689-4e68-9847-5ad9f1aad20c\" (UID: \"9200cf36-f689-4e68-9847-5ad9f1aad20c\") " Jan 20 17:56:40 crc kubenswrapper[4558]: I0120 17:56:40.132598 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tzff9\" (UniqueName: \"kubernetes.io/projected/9200cf36-f689-4e68-9847-5ad9f1aad20c-kube-api-access-tzff9\") pod \"9200cf36-f689-4e68-9847-5ad9f1aad20c\" (UID: \"9200cf36-f689-4e68-9847-5ad9f1aad20c\") " Jan 20 17:56:40 crc kubenswrapper[4558]: I0120 17:56:40.132952 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9200cf36-f689-4e68-9847-5ad9f1aad20c-logs" (OuterVolumeSpecName: "logs") pod "9200cf36-f689-4e68-9847-5ad9f1aad20c" (UID: "9200cf36-f689-4e68-9847-5ad9f1aad20c"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:56:40 crc kubenswrapper[4558]: I0120 17:56:40.140399 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9200cf36-f689-4e68-9847-5ad9f1aad20c-kube-api-access-tzff9" (OuterVolumeSpecName: "kube-api-access-tzff9") pod "9200cf36-f689-4e68-9847-5ad9f1aad20c" (UID: "9200cf36-f689-4e68-9847-5ad9f1aad20c"). InnerVolumeSpecName "kube-api-access-tzff9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:56:40 crc kubenswrapper[4558]: I0120 17:56:40.161602 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9200cf36-f689-4e68-9847-5ad9f1aad20c-config-data" (OuterVolumeSpecName: "config-data") pod "9200cf36-f689-4e68-9847-5ad9f1aad20c" (UID: "9200cf36-f689-4e68-9847-5ad9f1aad20c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:56:40 crc kubenswrapper[4558]: I0120 17:56:40.162252 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9200cf36-f689-4e68-9847-5ad9f1aad20c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9200cf36-f689-4e68-9847-5ad9f1aad20c" (UID: "9200cf36-f689-4e68-9847-5ad9f1aad20c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:56:40 crc kubenswrapper[4558]: I0120 17:56:40.234926 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9200cf36-f689-4e68-9847-5ad9f1aad20c-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:56:40 crc kubenswrapper[4558]: I0120 17:56:40.234968 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tzff9\" (UniqueName: \"kubernetes.io/projected/9200cf36-f689-4e68-9847-5ad9f1aad20c-kube-api-access-tzff9\") on node \"crc\" DevicePath \"\"" Jan 20 17:56:40 crc kubenswrapper[4558]: I0120 17:56:40.234985 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9200cf36-f689-4e68-9847-5ad9f1aad20c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:56:40 crc kubenswrapper[4558]: I0120 17:56:40.234997 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9200cf36-f689-4e68-9847-5ad9f1aad20c-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:56:40 crc kubenswrapper[4558]: I0120 17:56:40.761813 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:56:40 crc kubenswrapper[4558]: I0120 17:56:40.761888 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"9200cf36-f689-4e68-9847-5ad9f1aad20c","Type":"ContainerDied","Data":"6ce2c49d98258521f9a124b91906129922e0ba7df2ffdec76c73d4f76100893f"} Jan 20 17:56:40 crc kubenswrapper[4558]: I0120 17:56:40.761978 4558 scope.go:117] "RemoveContainer" containerID="4327cff3b2a73ebb89e07fafb3730b873a2555273bb891e589fcb0f39e6e591f" Jan 20 17:56:40 crc kubenswrapper[4558]: I0120 17:56:40.787919 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:56:40 crc kubenswrapper[4558]: I0120 17:56:40.796050 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:56:40 crc kubenswrapper[4558]: I0120 17:56:40.809835 4558 scope.go:117] "RemoveContainer" containerID="00663a7778425e0f73bea2d684a395a3da2dd27df2181da8e3d491c11d317544" Jan 20 17:56:40 crc kubenswrapper[4558]: I0120 17:56:40.817221 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:56:40 crc kubenswrapper[4558]: E0120 17:56:40.817654 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9200cf36-f689-4e68-9847-5ad9f1aad20c" containerName="nova-api-api" Jan 20 17:56:40 crc kubenswrapper[4558]: I0120 17:56:40.817666 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="9200cf36-f689-4e68-9847-5ad9f1aad20c" containerName="nova-api-api" Jan 20 17:56:40 crc kubenswrapper[4558]: E0120 17:56:40.817699 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9200cf36-f689-4e68-9847-5ad9f1aad20c" containerName="nova-api-log" Jan 20 17:56:40 crc kubenswrapper[4558]: I0120 17:56:40.817707 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="9200cf36-f689-4e68-9847-5ad9f1aad20c" containerName="nova-api-log" Jan 20 17:56:40 crc kubenswrapper[4558]: I0120 17:56:40.817900 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="9200cf36-f689-4e68-9847-5ad9f1aad20c" containerName="nova-api-log" Jan 20 17:56:40 crc kubenswrapper[4558]: I0120 17:56:40.817927 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="9200cf36-f689-4e68-9847-5ad9f1aad20c" containerName="nova-api-api" Jan 20 17:56:40 crc kubenswrapper[4558]: I0120 17:56:40.818893 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:56:40 crc kubenswrapper[4558]: I0120 17:56:40.820684 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-api-config-data" Jan 20 17:56:40 crc kubenswrapper[4558]: I0120 17:56:40.827523 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:56:40 crc kubenswrapper[4558]: I0120 17:56:40.846475 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cc5l5\" (UniqueName: \"kubernetes.io/projected/05eb5b95-07c8-49be-919a-daf3454ec864-kube-api-access-cc5l5\") pod \"nova-api-0\" (UID: \"05eb5b95-07c8-49be-919a-daf3454ec864\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:56:40 crc kubenswrapper[4558]: I0120 17:56:40.846871 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/05eb5b95-07c8-49be-919a-daf3454ec864-logs\") pod \"nova-api-0\" (UID: \"05eb5b95-07c8-49be-919a-daf3454ec864\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:56:40 crc kubenswrapper[4558]: I0120 17:56:40.846907 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05eb5b95-07c8-49be-919a-daf3454ec864-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"05eb5b95-07c8-49be-919a-daf3454ec864\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:56:40 crc kubenswrapper[4558]: I0120 17:56:40.847382 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/05eb5b95-07c8-49be-919a-daf3454ec864-config-data\") pod \"nova-api-0\" (UID: \"05eb5b95-07c8-49be-919a-daf3454ec864\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:56:40 crc kubenswrapper[4558]: I0120 17:56:40.949042 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/05eb5b95-07c8-49be-919a-daf3454ec864-config-data\") pod \"nova-api-0\" (UID: \"05eb5b95-07c8-49be-919a-daf3454ec864\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:56:40 crc kubenswrapper[4558]: I0120 17:56:40.949114 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cc5l5\" (UniqueName: \"kubernetes.io/projected/05eb5b95-07c8-49be-919a-daf3454ec864-kube-api-access-cc5l5\") pod \"nova-api-0\" (UID: \"05eb5b95-07c8-49be-919a-daf3454ec864\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:56:40 crc kubenswrapper[4558]: I0120 17:56:40.949231 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/05eb5b95-07c8-49be-919a-daf3454ec864-logs\") pod \"nova-api-0\" (UID: \"05eb5b95-07c8-49be-919a-daf3454ec864\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:56:40 crc kubenswrapper[4558]: I0120 17:56:40.949266 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05eb5b95-07c8-49be-919a-daf3454ec864-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"05eb5b95-07c8-49be-919a-daf3454ec864\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:56:40 crc kubenswrapper[4558]: I0120 17:56:40.949692 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/05eb5b95-07c8-49be-919a-daf3454ec864-logs\") pod \"nova-api-0\" (UID: \"05eb5b95-07c8-49be-919a-daf3454ec864\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:56:40 crc kubenswrapper[4558]: I0120 17:56:40.954680 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05eb5b95-07c8-49be-919a-daf3454ec864-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"05eb5b95-07c8-49be-919a-daf3454ec864\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:56:40 crc kubenswrapper[4558]: I0120 17:56:40.954899 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/05eb5b95-07c8-49be-919a-daf3454ec864-config-data\") pod \"nova-api-0\" (UID: \"05eb5b95-07c8-49be-919a-daf3454ec864\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:56:40 crc kubenswrapper[4558]: I0120 17:56:40.966425 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cc5l5\" (UniqueName: \"kubernetes.io/projected/05eb5b95-07c8-49be-919a-daf3454ec864-kube-api-access-cc5l5\") pod \"nova-api-0\" (UID: \"05eb5b95-07c8-49be-919a-daf3454ec864\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:56:41 crc kubenswrapper[4558]: I0120 17:56:41.141234 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:56:41 crc kubenswrapper[4558]: I0120 17:56:41.557190 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:56:41 crc kubenswrapper[4558]: I0120 17:56:41.778671 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"05eb5b95-07c8-49be-919a-daf3454ec864","Type":"ContainerStarted","Data":"3cf45edf5e1c7993f4d92174596cd4547d771cda726f545e0f83ad14a7aa7f4c"} Jan 20 17:56:41 crc kubenswrapper[4558]: I0120 17:56:41.778942 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"05eb5b95-07c8-49be-919a-daf3454ec864","Type":"ContainerStarted","Data":"f41c153924735291a227a8a5fc11cb99d729a4beae292b00b387577af9339be0"} Jan 20 17:56:42 crc kubenswrapper[4558]: I0120 17:56:42.004994 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:56:42 crc kubenswrapper[4558]: I0120 17:56:42.108887 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:56:42 crc kubenswrapper[4558]: I0120 17:56:42.409376 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell1-cell-mapping-kn9ww"] Jan 20 17:56:42 crc kubenswrapper[4558]: I0120 17:56:42.410884 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-cell-mapping-kn9ww" Jan 20 17:56:42 crc kubenswrapper[4558]: I0120 17:56:42.412660 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell1-manage-config-data" Jan 20 17:56:42 crc kubenswrapper[4558]: I0120 17:56:42.413422 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell1-manage-scripts" Jan 20 17:56:42 crc kubenswrapper[4558]: I0120 17:56:42.424233 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-cell-mapping-kn9ww"] Jan 20 17:56:42 crc kubenswrapper[4558]: I0120 17:56:42.484533 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n4xlp\" (UniqueName: \"kubernetes.io/projected/6b50a90e-3159-4824-b752-a8bc047c54d1-kube-api-access-n4xlp\") pod \"nova-cell1-cell-mapping-kn9ww\" (UID: \"6b50a90e-3159-4824-b752-a8bc047c54d1\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-kn9ww" Jan 20 17:56:42 crc kubenswrapper[4558]: I0120 17:56:42.484881 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b50a90e-3159-4824-b752-a8bc047c54d1-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-kn9ww\" (UID: \"6b50a90e-3159-4824-b752-a8bc047c54d1\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-kn9ww" Jan 20 17:56:42 crc kubenswrapper[4558]: I0120 17:56:42.484982 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6b50a90e-3159-4824-b752-a8bc047c54d1-scripts\") pod \"nova-cell1-cell-mapping-kn9ww\" (UID: \"6b50a90e-3159-4824-b752-a8bc047c54d1\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-kn9ww" Jan 20 17:56:42 crc kubenswrapper[4558]: I0120 17:56:42.485216 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b50a90e-3159-4824-b752-a8bc047c54d1-config-data\") pod \"nova-cell1-cell-mapping-kn9ww\" (UID: \"6b50a90e-3159-4824-b752-a8bc047c54d1\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-kn9ww" Jan 20 17:56:42 crc kubenswrapper[4558]: I0120 17:56:42.579332 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9200cf36-f689-4e68-9847-5ad9f1aad20c" path="/var/lib/kubelet/pods/9200cf36-f689-4e68-9847-5ad9f1aad20c/volumes" Jan 20 17:56:42 crc kubenswrapper[4558]: I0120 17:56:42.587632 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b50a90e-3159-4824-b752-a8bc047c54d1-config-data\") pod \"nova-cell1-cell-mapping-kn9ww\" (UID: \"6b50a90e-3159-4824-b752-a8bc047c54d1\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-kn9ww" Jan 20 17:56:42 crc kubenswrapper[4558]: I0120 17:56:42.587949 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n4xlp\" (UniqueName: \"kubernetes.io/projected/6b50a90e-3159-4824-b752-a8bc047c54d1-kube-api-access-n4xlp\") pod \"nova-cell1-cell-mapping-kn9ww\" (UID: \"6b50a90e-3159-4824-b752-a8bc047c54d1\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-kn9ww" Jan 20 17:56:42 crc kubenswrapper[4558]: I0120 17:56:42.588096 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b50a90e-3159-4824-b752-a8bc047c54d1-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-kn9ww\" (UID: \"6b50a90e-3159-4824-b752-a8bc047c54d1\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-kn9ww" Jan 20 17:56:42 crc kubenswrapper[4558]: I0120 17:56:42.588293 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6b50a90e-3159-4824-b752-a8bc047c54d1-scripts\") pod \"nova-cell1-cell-mapping-kn9ww\" (UID: \"6b50a90e-3159-4824-b752-a8bc047c54d1\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-kn9ww" Jan 20 17:56:42 crc kubenswrapper[4558]: I0120 17:56:42.598709 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6b50a90e-3159-4824-b752-a8bc047c54d1-scripts\") pod \"nova-cell1-cell-mapping-kn9ww\" (UID: \"6b50a90e-3159-4824-b752-a8bc047c54d1\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-kn9ww" Jan 20 17:56:42 crc kubenswrapper[4558]: I0120 17:56:42.601095 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b50a90e-3159-4824-b752-a8bc047c54d1-config-data\") pod \"nova-cell1-cell-mapping-kn9ww\" (UID: \"6b50a90e-3159-4824-b752-a8bc047c54d1\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-kn9ww" Jan 20 17:56:42 crc kubenswrapper[4558]: I0120 17:56:42.605537 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b50a90e-3159-4824-b752-a8bc047c54d1-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-kn9ww\" (UID: \"6b50a90e-3159-4824-b752-a8bc047c54d1\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-kn9ww" Jan 20 17:56:42 crc kubenswrapper[4558]: I0120 17:56:42.608224 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n4xlp\" (UniqueName: \"kubernetes.io/projected/6b50a90e-3159-4824-b752-a8bc047c54d1-kube-api-access-n4xlp\") pod \"nova-cell1-cell-mapping-kn9ww\" (UID: \"6b50a90e-3159-4824-b752-a8bc047c54d1\") " pod="openstack-kuttl-tests/nova-cell1-cell-mapping-kn9ww" Jan 20 17:56:42 crc kubenswrapper[4558]: I0120 17:56:42.735975 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-cell-mapping-kn9ww" Jan 20 17:56:42 crc kubenswrapper[4558]: I0120 17:56:42.800829 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"05eb5b95-07c8-49be-919a-daf3454ec864","Type":"ContainerStarted","Data":"e678f09f7be1d0a38e4b6f5003393c7efce508836f8c2a8563d0df850216d77f"} Jan 20 17:56:42 crc kubenswrapper[4558]: I0120 17:56:42.834102 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-api-0" podStartSLOduration=2.834076694 podStartE2EDuration="2.834076694s" podCreationTimestamp="2026-01-20 17:56:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:56:42.819063183 +0000 UTC m=+4496.579401139" watchObservedRunningTime="2026-01-20 17:56:42.834076694 +0000 UTC m=+4496.594414662" Jan 20 17:56:43 crc kubenswrapper[4558]: I0120 17:56:43.168141 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell1-cell-mapping-kn9ww"] Jan 20 17:56:43 crc kubenswrapper[4558]: W0120 17:56:43.170858 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6b50a90e_3159_4824_b752_a8bc047c54d1.slice/crio-d7117efd1124bc9d1621de6a15d68e1a91b72312c94284746f9f76fdead69f44 WatchSource:0}: Error finding container d7117efd1124bc9d1621de6a15d68e1a91b72312c94284746f9f76fdead69f44: Status 404 returned error can't find the container with id d7117efd1124bc9d1621de6a15d68e1a91b72312c94284746f9f76fdead69f44 Jan 20 17:56:43 crc kubenswrapper[4558]: I0120 17:56:43.811893 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-cell-mapping-kn9ww" event={"ID":"6b50a90e-3159-4824-b752-a8bc047c54d1","Type":"ContainerStarted","Data":"6d3bcdd8bc2daf8370cfb0ead7863d55e4210dd7cd17bf8e13b1e10aace7f03f"} Jan 20 17:56:43 crc kubenswrapper[4558]: I0120 17:56:43.812400 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-cell-mapping-kn9ww" event={"ID":"6b50a90e-3159-4824-b752-a8bc047c54d1","Type":"ContainerStarted","Data":"d7117efd1124bc9d1621de6a15d68e1a91b72312c94284746f9f76fdead69f44"} Jan 20 17:56:43 crc kubenswrapper[4558]: I0120 17:56:43.834523 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-cell1-cell-mapping-kn9ww" podStartSLOduration=1.8345055989999999 podStartE2EDuration="1.834505599s" podCreationTimestamp="2026-01-20 17:56:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:56:43.828011011 +0000 UTC m=+4497.588348978" watchObservedRunningTime="2026-01-20 17:56:43.834505599 +0000 UTC m=+4497.594843566" Jan 20 17:56:44 crc kubenswrapper[4558]: I0120 17:56:44.566261 4558 scope.go:117] "RemoveContainer" containerID="4ea7fa61d8b21007a044345acec89fcebe56c2921cf0f76ff2002f44bdc88ede" Jan 20 17:56:44 crc kubenswrapper[4558]: E0120 17:56:44.566545 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 17:56:45 crc kubenswrapper[4558]: I0120 17:56:45.065749 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:56:45 crc kubenswrapper[4558]: I0120 17:56:45.065814 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:56:45 crc kubenswrapper[4558]: I0120 17:56:45.733348 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-k6dw6" Jan 20 17:56:45 crc kubenswrapper[4558]: I0120 17:56:45.733689 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-k6dw6" Jan 20 17:56:45 crc kubenswrapper[4558]: I0120 17:56:45.777572 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-k6dw6" Jan 20 17:56:45 crc kubenswrapper[4558]: I0120 17:56:45.861707 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-k6dw6" Jan 20 17:56:46 crc kubenswrapper[4558]: I0120 17:56:46.020411 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-k6dw6"] Jan 20 17:56:46 crc kubenswrapper[4558]: I0120 17:56:46.078322 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-metadata-0" podUID="9d2add51-d3c5-44af-a792-a7301e834322" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.157:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 20 17:56:46 crc kubenswrapper[4558]: I0120 17:56:46.078351 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-metadata-0" podUID="9d2add51-d3c5-44af-a792-a7301e834322" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.157:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 20 17:56:47 crc kubenswrapper[4558]: I0120 17:56:47.108999 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:56:47 crc kubenswrapper[4558]: I0120 17:56:47.137369 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:56:47 crc kubenswrapper[4558]: I0120 17:56:47.848895 4558 generic.go:334] "Generic (PLEG): container finished" podID="6b50a90e-3159-4824-b752-a8bc047c54d1" containerID="6d3bcdd8bc2daf8370cfb0ead7863d55e4210dd7cd17bf8e13b1e10aace7f03f" exitCode=0 Jan 20 17:56:47 crc kubenswrapper[4558]: I0120 17:56:47.848992 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-cell-mapping-kn9ww" event={"ID":"6b50a90e-3159-4824-b752-a8bc047c54d1","Type":"ContainerDied","Data":"6d3bcdd8bc2daf8370cfb0ead7863d55e4210dd7cd17bf8e13b1e10aace7f03f"} Jan 20 17:56:47 crc kubenswrapper[4558]: I0120 17:56:47.849330 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-k6dw6" podUID="6b78b532-b4ab-4bd3-a833-95f464f0849d" containerName="registry-server" containerID="cri-o://a34c4b9be156eb278ecc45038acc26d0eabba5069eb21ad733e68f50f7d0ba59" gracePeriod=2 Jan 20 17:56:47 crc kubenswrapper[4558]: I0120 17:56:47.880107 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:56:48 crc kubenswrapper[4558]: I0120 17:56:48.292679 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-k6dw6" Jan 20 17:56:48 crc kubenswrapper[4558]: I0120 17:56:48.406907 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6b78b532-b4ab-4bd3-a833-95f464f0849d-utilities\") pod \"6b78b532-b4ab-4bd3-a833-95f464f0849d\" (UID: \"6b78b532-b4ab-4bd3-a833-95f464f0849d\") " Jan 20 17:56:48 crc kubenswrapper[4558]: I0120 17:56:48.406994 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sfxv2\" (UniqueName: \"kubernetes.io/projected/6b78b532-b4ab-4bd3-a833-95f464f0849d-kube-api-access-sfxv2\") pod \"6b78b532-b4ab-4bd3-a833-95f464f0849d\" (UID: \"6b78b532-b4ab-4bd3-a833-95f464f0849d\") " Jan 20 17:56:48 crc kubenswrapper[4558]: I0120 17:56:48.407084 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6b78b532-b4ab-4bd3-a833-95f464f0849d-catalog-content\") pod \"6b78b532-b4ab-4bd3-a833-95f464f0849d\" (UID: \"6b78b532-b4ab-4bd3-a833-95f464f0849d\") " Jan 20 17:56:48 crc kubenswrapper[4558]: I0120 17:56:48.407970 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6b78b532-b4ab-4bd3-a833-95f464f0849d-utilities" (OuterVolumeSpecName: "utilities") pod "6b78b532-b4ab-4bd3-a833-95f464f0849d" (UID: "6b78b532-b4ab-4bd3-a833-95f464f0849d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:56:48 crc kubenswrapper[4558]: I0120 17:56:48.424348 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6b78b532-b4ab-4bd3-a833-95f464f0849d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6b78b532-b4ab-4bd3-a833-95f464f0849d" (UID: "6b78b532-b4ab-4bd3-a833-95f464f0849d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:56:48 crc kubenswrapper[4558]: I0120 17:56:48.508953 4558 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6b78b532-b4ab-4bd3-a833-95f464f0849d-utilities\") on node \"crc\" DevicePath \"\"" Jan 20 17:56:48 crc kubenswrapper[4558]: I0120 17:56:48.508982 4558 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6b78b532-b4ab-4bd3-a833-95f464f0849d-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 20 17:56:48 crc kubenswrapper[4558]: I0120 17:56:48.862136 4558 generic.go:334] "Generic (PLEG): container finished" podID="6b78b532-b4ab-4bd3-a833-95f464f0849d" containerID="a34c4b9be156eb278ecc45038acc26d0eabba5069eb21ad733e68f50f7d0ba59" exitCode=0 Jan 20 17:56:48 crc kubenswrapper[4558]: I0120 17:56:48.862210 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-k6dw6" event={"ID":"6b78b532-b4ab-4bd3-a833-95f464f0849d","Type":"ContainerDied","Data":"a34c4b9be156eb278ecc45038acc26d0eabba5069eb21ad733e68f50f7d0ba59"} Jan 20 17:56:48 crc kubenswrapper[4558]: I0120 17:56:48.862265 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-k6dw6" Jan 20 17:56:48 crc kubenswrapper[4558]: I0120 17:56:48.862790 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-k6dw6" event={"ID":"6b78b532-b4ab-4bd3-a833-95f464f0849d","Type":"ContainerDied","Data":"bce78c2338329ba2aaded12ee14a6358f29601e953bbcc92a71a834c8556c4fd"} Jan 20 17:56:48 crc kubenswrapper[4558]: I0120 17:56:48.862888 4558 scope.go:117] "RemoveContainer" containerID="a34c4b9be156eb278ecc45038acc26d0eabba5069eb21ad733e68f50f7d0ba59" Jan 20 17:56:48 crc kubenswrapper[4558]: I0120 17:56:48.892030 4558 scope.go:117] "RemoveContainer" containerID="690ac72bd61d725b3d2068122cf4b402e7cb192df043c7c7f6b3c386578e4fb4" Jan 20 17:56:48 crc kubenswrapper[4558]: I0120 17:56:48.994458 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6b78b532-b4ab-4bd3-a833-95f464f0849d-kube-api-access-sfxv2" (OuterVolumeSpecName: "kube-api-access-sfxv2") pod "6b78b532-b4ab-4bd3-a833-95f464f0849d" (UID: "6b78b532-b4ab-4bd3-a833-95f464f0849d"). InnerVolumeSpecName "kube-api-access-sfxv2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:56:49 crc kubenswrapper[4558]: I0120 17:56:49.014327 4558 scope.go:117] "RemoveContainer" containerID="d8dad32e8466c966fc099301fb386aa3c26abf2543b909a5090de63cc81ddf2c" Jan 20 17:56:49 crc kubenswrapper[4558]: I0120 17:56:49.030010 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sfxv2\" (UniqueName: \"kubernetes.io/projected/6b78b532-b4ab-4bd3-a833-95f464f0849d-kube-api-access-sfxv2\") on node \"crc\" DevicePath \"\"" Jan 20 17:56:49 crc kubenswrapper[4558]: I0120 17:56:49.265277 4558 scope.go:117] "RemoveContainer" containerID="a34c4b9be156eb278ecc45038acc26d0eabba5069eb21ad733e68f50f7d0ba59" Jan 20 17:56:49 crc kubenswrapper[4558]: E0120 17:56:49.265637 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a34c4b9be156eb278ecc45038acc26d0eabba5069eb21ad733e68f50f7d0ba59\": container with ID starting with a34c4b9be156eb278ecc45038acc26d0eabba5069eb21ad733e68f50f7d0ba59 not found: ID does not exist" containerID="a34c4b9be156eb278ecc45038acc26d0eabba5069eb21ad733e68f50f7d0ba59" Jan 20 17:56:49 crc kubenswrapper[4558]: I0120 17:56:49.265691 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a34c4b9be156eb278ecc45038acc26d0eabba5069eb21ad733e68f50f7d0ba59"} err="failed to get container status \"a34c4b9be156eb278ecc45038acc26d0eabba5069eb21ad733e68f50f7d0ba59\": rpc error: code = NotFound desc = could not find container \"a34c4b9be156eb278ecc45038acc26d0eabba5069eb21ad733e68f50f7d0ba59\": container with ID starting with a34c4b9be156eb278ecc45038acc26d0eabba5069eb21ad733e68f50f7d0ba59 not found: ID does not exist" Jan 20 17:56:49 crc kubenswrapper[4558]: I0120 17:56:49.265721 4558 scope.go:117] "RemoveContainer" containerID="690ac72bd61d725b3d2068122cf4b402e7cb192df043c7c7f6b3c386578e4fb4" Jan 20 17:56:49 crc kubenswrapper[4558]: E0120 17:56:49.266118 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"690ac72bd61d725b3d2068122cf4b402e7cb192df043c7c7f6b3c386578e4fb4\": container with ID starting with 690ac72bd61d725b3d2068122cf4b402e7cb192df043c7c7f6b3c386578e4fb4 not found: ID does not exist" containerID="690ac72bd61d725b3d2068122cf4b402e7cb192df043c7c7f6b3c386578e4fb4" Jan 20 17:56:49 crc kubenswrapper[4558]: I0120 17:56:49.266156 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"690ac72bd61d725b3d2068122cf4b402e7cb192df043c7c7f6b3c386578e4fb4"} err="failed to get container status \"690ac72bd61d725b3d2068122cf4b402e7cb192df043c7c7f6b3c386578e4fb4\": rpc error: code = NotFound desc = could not find container \"690ac72bd61d725b3d2068122cf4b402e7cb192df043c7c7f6b3c386578e4fb4\": container with ID starting with 690ac72bd61d725b3d2068122cf4b402e7cb192df043c7c7f6b3c386578e4fb4 not found: ID does not exist" Jan 20 17:56:49 crc kubenswrapper[4558]: I0120 17:56:49.266206 4558 scope.go:117] "RemoveContainer" containerID="d8dad32e8466c966fc099301fb386aa3c26abf2543b909a5090de63cc81ddf2c" Jan 20 17:56:49 crc kubenswrapper[4558]: E0120 17:56:49.266742 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d8dad32e8466c966fc099301fb386aa3c26abf2543b909a5090de63cc81ddf2c\": container with ID starting with d8dad32e8466c966fc099301fb386aa3c26abf2543b909a5090de63cc81ddf2c not found: ID does not exist" containerID="d8dad32e8466c966fc099301fb386aa3c26abf2543b909a5090de63cc81ddf2c" Jan 20 17:56:49 crc kubenswrapper[4558]: I0120 17:56:49.266768 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d8dad32e8466c966fc099301fb386aa3c26abf2543b909a5090de63cc81ddf2c"} err="failed to get container status \"d8dad32e8466c966fc099301fb386aa3c26abf2543b909a5090de63cc81ddf2c\": rpc error: code = NotFound desc = could not find container \"d8dad32e8466c966fc099301fb386aa3c26abf2543b909a5090de63cc81ddf2c\": container with ID starting with d8dad32e8466c966fc099301fb386aa3c26abf2543b909a5090de63cc81ddf2c not found: ID does not exist" Jan 20 17:56:49 crc kubenswrapper[4558]: I0120 17:56:49.291633 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-cell-mapping-kn9ww" Jan 20 17:56:49 crc kubenswrapper[4558]: I0120 17:56:49.312487 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-k6dw6"] Jan 20 17:56:49 crc kubenswrapper[4558]: I0120 17:56:49.318554 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-k6dw6"] Jan 20 17:56:49 crc kubenswrapper[4558]: I0120 17:56:49.438002 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b50a90e-3159-4824-b752-a8bc047c54d1-combined-ca-bundle\") pod \"6b50a90e-3159-4824-b752-a8bc047c54d1\" (UID: \"6b50a90e-3159-4824-b752-a8bc047c54d1\") " Jan 20 17:56:49 crc kubenswrapper[4558]: I0120 17:56:49.438181 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6b50a90e-3159-4824-b752-a8bc047c54d1-scripts\") pod \"6b50a90e-3159-4824-b752-a8bc047c54d1\" (UID: \"6b50a90e-3159-4824-b752-a8bc047c54d1\") " Jan 20 17:56:49 crc kubenswrapper[4558]: I0120 17:56:49.438219 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n4xlp\" (UniqueName: \"kubernetes.io/projected/6b50a90e-3159-4824-b752-a8bc047c54d1-kube-api-access-n4xlp\") pod \"6b50a90e-3159-4824-b752-a8bc047c54d1\" (UID: \"6b50a90e-3159-4824-b752-a8bc047c54d1\") " Jan 20 17:56:49 crc kubenswrapper[4558]: I0120 17:56:49.438271 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b50a90e-3159-4824-b752-a8bc047c54d1-config-data\") pod \"6b50a90e-3159-4824-b752-a8bc047c54d1\" (UID: \"6b50a90e-3159-4824-b752-a8bc047c54d1\") " Jan 20 17:56:49 crc kubenswrapper[4558]: I0120 17:56:49.444347 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6b50a90e-3159-4824-b752-a8bc047c54d1-scripts" (OuterVolumeSpecName: "scripts") pod "6b50a90e-3159-4824-b752-a8bc047c54d1" (UID: "6b50a90e-3159-4824-b752-a8bc047c54d1"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:56:49 crc kubenswrapper[4558]: I0120 17:56:49.444704 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6b50a90e-3159-4824-b752-a8bc047c54d1-kube-api-access-n4xlp" (OuterVolumeSpecName: "kube-api-access-n4xlp") pod "6b50a90e-3159-4824-b752-a8bc047c54d1" (UID: "6b50a90e-3159-4824-b752-a8bc047c54d1"). InnerVolumeSpecName "kube-api-access-n4xlp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:56:49 crc kubenswrapper[4558]: I0120 17:56:49.463723 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6b50a90e-3159-4824-b752-a8bc047c54d1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6b50a90e-3159-4824-b752-a8bc047c54d1" (UID: "6b50a90e-3159-4824-b752-a8bc047c54d1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:56:49 crc kubenswrapper[4558]: I0120 17:56:49.464140 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6b50a90e-3159-4824-b752-a8bc047c54d1-config-data" (OuterVolumeSpecName: "config-data") pod "6b50a90e-3159-4824-b752-a8bc047c54d1" (UID: "6b50a90e-3159-4824-b752-a8bc047c54d1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:56:49 crc kubenswrapper[4558]: I0120 17:56:49.541521 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6b50a90e-3159-4824-b752-a8bc047c54d1-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:56:49 crc kubenswrapper[4558]: I0120 17:56:49.541552 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n4xlp\" (UniqueName: \"kubernetes.io/projected/6b50a90e-3159-4824-b752-a8bc047c54d1-kube-api-access-n4xlp\") on node \"crc\" DevicePath \"\"" Jan 20 17:56:49 crc kubenswrapper[4558]: I0120 17:56:49.541564 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b50a90e-3159-4824-b752-a8bc047c54d1-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:56:49 crc kubenswrapper[4558]: I0120 17:56:49.541577 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b50a90e-3159-4824-b752-a8bc047c54d1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:56:49 crc kubenswrapper[4558]: I0120 17:56:49.875693 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-cell-mapping-kn9ww" event={"ID":"6b50a90e-3159-4824-b752-a8bc047c54d1","Type":"ContainerDied","Data":"d7117efd1124bc9d1621de6a15d68e1a91b72312c94284746f9f76fdead69f44"} Jan 20 17:56:49 crc kubenswrapper[4558]: I0120 17:56:49.876234 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d7117efd1124bc9d1621de6a15d68e1a91b72312c94284746f9f76fdead69f44" Jan 20 17:56:49 crc kubenswrapper[4558]: I0120 17:56:49.875701 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-cell-mapping-kn9ww" Jan 20 17:56:50 crc kubenswrapper[4558]: I0120 17:56:50.043751 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:56:50 crc kubenswrapper[4558]: I0120 17:56:50.044042 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-api-0" podUID="05eb5b95-07c8-49be-919a-daf3454ec864" containerName="nova-api-log" containerID="cri-o://3cf45edf5e1c7993f4d92174596cd4547d771cda726f545e0f83ad14a7aa7f4c" gracePeriod=30 Jan 20 17:56:50 crc kubenswrapper[4558]: I0120 17:56:50.044120 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-api-0" podUID="05eb5b95-07c8-49be-919a-daf3454ec864" containerName="nova-api-api" containerID="cri-o://e678f09f7be1d0a38e4b6f5003393c7efce508836f8c2a8563d0df850216d77f" gracePeriod=30 Jan 20 17:56:50 crc kubenswrapper[4558]: I0120 17:56:50.054026 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:56:50 crc kubenswrapper[4558]: I0120 17:56:50.054244 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-scheduler-0" podUID="27f0171c-82eb-4da0-9cb6-1e8f3ce1db3d" containerName="nova-scheduler-scheduler" containerID="cri-o://4476058cbcb49b0953be27b4233e9206097b51f7403463516c9726ffdf82d395" gracePeriod=30 Jan 20 17:56:50 crc kubenswrapper[4558]: I0120 17:56:50.120057 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:56:50 crc kubenswrapper[4558]: I0120 17:56:50.120313 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-metadata-0" podUID="9d2add51-d3c5-44af-a792-a7301e834322" containerName="nova-metadata-log" containerID="cri-o://a51a56718bd2a2dbc6833433430e6b07d4e0e76fa08d35b70dd83eaeb7bc29a8" gracePeriod=30 Jan 20 17:56:50 crc kubenswrapper[4558]: I0120 17:56:50.120390 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-metadata-0" podUID="9d2add51-d3c5-44af-a792-a7301e834322" containerName="nova-metadata-metadata" containerID="cri-o://48c1a79885d63262f1636dd956e4fd1544b1f866fc373b6a47841c7ffba899eb" gracePeriod=30 Jan 20 17:56:50 crc kubenswrapper[4558]: I0120 17:56:50.576818 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6b78b532-b4ab-4bd3-a833-95f464f0849d" path="/var/lib/kubelet/pods/6b78b532-b4ab-4bd3-a833-95f464f0849d/volumes" Jan 20 17:56:50 crc kubenswrapper[4558]: I0120 17:56:50.759717 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:56:50 crc kubenswrapper[4558]: I0120 17:56:50.864664 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:56:50 crc kubenswrapper[4558]: I0120 17:56:50.886537 4558 generic.go:334] "Generic (PLEG): container finished" podID="9d2add51-d3c5-44af-a792-a7301e834322" containerID="a51a56718bd2a2dbc6833433430e6b07d4e0e76fa08d35b70dd83eaeb7bc29a8" exitCode=143 Jan 20 17:56:50 crc kubenswrapper[4558]: I0120 17:56:50.886597 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"9d2add51-d3c5-44af-a792-a7301e834322","Type":"ContainerDied","Data":"a51a56718bd2a2dbc6833433430e6b07d4e0e76fa08d35b70dd83eaeb7bc29a8"} Jan 20 17:56:50 crc kubenswrapper[4558]: I0120 17:56:50.888503 4558 generic.go:334] "Generic (PLEG): container finished" podID="05eb5b95-07c8-49be-919a-daf3454ec864" containerID="e678f09f7be1d0a38e4b6f5003393c7efce508836f8c2a8563d0df850216d77f" exitCode=0 Jan 20 17:56:50 crc kubenswrapper[4558]: I0120 17:56:50.888526 4558 generic.go:334] "Generic (PLEG): container finished" podID="05eb5b95-07c8-49be-919a-daf3454ec864" containerID="3cf45edf5e1c7993f4d92174596cd4547d771cda726f545e0f83ad14a7aa7f4c" exitCode=143 Jan 20 17:56:50 crc kubenswrapper[4558]: I0120 17:56:50.888542 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"05eb5b95-07c8-49be-919a-daf3454ec864","Type":"ContainerDied","Data":"e678f09f7be1d0a38e4b6f5003393c7efce508836f8c2a8563d0df850216d77f"} Jan 20 17:56:50 crc kubenswrapper[4558]: I0120 17:56:50.888560 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"05eb5b95-07c8-49be-919a-daf3454ec864","Type":"ContainerDied","Data":"3cf45edf5e1c7993f4d92174596cd4547d771cda726f545e0f83ad14a7aa7f4c"} Jan 20 17:56:50 crc kubenswrapper[4558]: I0120 17:56:50.888571 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"05eb5b95-07c8-49be-919a-daf3454ec864","Type":"ContainerDied","Data":"f41c153924735291a227a8a5fc11cb99d729a4beae292b00b387577af9339be0"} Jan 20 17:56:50 crc kubenswrapper[4558]: I0120 17:56:50.888591 4558 scope.go:117] "RemoveContainer" containerID="e678f09f7be1d0a38e4b6f5003393c7efce508836f8c2a8563d0df850216d77f" Jan 20 17:56:50 crc kubenswrapper[4558]: I0120 17:56:50.888675 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:56:50 crc kubenswrapper[4558]: I0120 17:56:50.937897 4558 scope.go:117] "RemoveContainer" containerID="3cf45edf5e1c7993f4d92174596cd4547d771cda726f545e0f83ad14a7aa7f4c" Jan 20 17:56:50 crc kubenswrapper[4558]: I0120 17:56:50.960984 4558 scope.go:117] "RemoveContainer" containerID="e678f09f7be1d0a38e4b6f5003393c7efce508836f8c2a8563d0df850216d77f" Jan 20 17:56:50 crc kubenswrapper[4558]: E0120 17:56:50.961557 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e678f09f7be1d0a38e4b6f5003393c7efce508836f8c2a8563d0df850216d77f\": container with ID starting with e678f09f7be1d0a38e4b6f5003393c7efce508836f8c2a8563d0df850216d77f not found: ID does not exist" containerID="e678f09f7be1d0a38e4b6f5003393c7efce508836f8c2a8563d0df850216d77f" Jan 20 17:56:50 crc kubenswrapper[4558]: I0120 17:56:50.961625 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e678f09f7be1d0a38e4b6f5003393c7efce508836f8c2a8563d0df850216d77f"} err="failed to get container status \"e678f09f7be1d0a38e4b6f5003393c7efce508836f8c2a8563d0df850216d77f\": rpc error: code = NotFound desc = could not find container \"e678f09f7be1d0a38e4b6f5003393c7efce508836f8c2a8563d0df850216d77f\": container with ID starting with e678f09f7be1d0a38e4b6f5003393c7efce508836f8c2a8563d0df850216d77f not found: ID does not exist" Jan 20 17:56:50 crc kubenswrapper[4558]: I0120 17:56:50.961680 4558 scope.go:117] "RemoveContainer" containerID="3cf45edf5e1c7993f4d92174596cd4547d771cda726f545e0f83ad14a7aa7f4c" Jan 20 17:56:50 crc kubenswrapper[4558]: E0120 17:56:50.962083 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3cf45edf5e1c7993f4d92174596cd4547d771cda726f545e0f83ad14a7aa7f4c\": container with ID starting with 3cf45edf5e1c7993f4d92174596cd4547d771cda726f545e0f83ad14a7aa7f4c not found: ID does not exist" containerID="3cf45edf5e1c7993f4d92174596cd4547d771cda726f545e0f83ad14a7aa7f4c" Jan 20 17:56:50 crc kubenswrapper[4558]: I0120 17:56:50.962124 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3cf45edf5e1c7993f4d92174596cd4547d771cda726f545e0f83ad14a7aa7f4c"} err="failed to get container status \"3cf45edf5e1c7993f4d92174596cd4547d771cda726f545e0f83ad14a7aa7f4c\": rpc error: code = NotFound desc = could not find container \"3cf45edf5e1c7993f4d92174596cd4547d771cda726f545e0f83ad14a7aa7f4c\": container with ID starting with 3cf45edf5e1c7993f4d92174596cd4547d771cda726f545e0f83ad14a7aa7f4c not found: ID does not exist" Jan 20 17:56:50 crc kubenswrapper[4558]: I0120 17:56:50.962151 4558 scope.go:117] "RemoveContainer" containerID="e678f09f7be1d0a38e4b6f5003393c7efce508836f8c2a8563d0df850216d77f" Jan 20 17:56:50 crc kubenswrapper[4558]: I0120 17:56:50.962458 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e678f09f7be1d0a38e4b6f5003393c7efce508836f8c2a8563d0df850216d77f"} err="failed to get container status \"e678f09f7be1d0a38e4b6f5003393c7efce508836f8c2a8563d0df850216d77f\": rpc error: code = NotFound desc = could not find container \"e678f09f7be1d0a38e4b6f5003393c7efce508836f8c2a8563d0df850216d77f\": container with ID starting with e678f09f7be1d0a38e4b6f5003393c7efce508836f8c2a8563d0df850216d77f not found: ID does not exist" Jan 20 17:56:50 crc kubenswrapper[4558]: I0120 17:56:50.962485 4558 scope.go:117] "RemoveContainer" containerID="3cf45edf5e1c7993f4d92174596cd4547d771cda726f545e0f83ad14a7aa7f4c" Jan 20 17:56:50 crc kubenswrapper[4558]: I0120 17:56:50.962743 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3cf45edf5e1c7993f4d92174596cd4547d771cda726f545e0f83ad14a7aa7f4c"} err="failed to get container status \"3cf45edf5e1c7993f4d92174596cd4547d771cda726f545e0f83ad14a7aa7f4c\": rpc error: code = NotFound desc = could not find container \"3cf45edf5e1c7993f4d92174596cd4547d771cda726f545e0f83ad14a7aa7f4c\": container with ID starting with 3cf45edf5e1c7993f4d92174596cd4547d771cda726f545e0f83ad14a7aa7f4c not found: ID does not exist" Jan 20 17:56:50 crc kubenswrapper[4558]: I0120 17:56:50.972398 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cc5l5\" (UniqueName: \"kubernetes.io/projected/05eb5b95-07c8-49be-919a-daf3454ec864-kube-api-access-cc5l5\") pod \"05eb5b95-07c8-49be-919a-daf3454ec864\" (UID: \"05eb5b95-07c8-49be-919a-daf3454ec864\") " Jan 20 17:56:50 crc kubenswrapper[4558]: I0120 17:56:50.972802 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/05eb5b95-07c8-49be-919a-daf3454ec864-config-data\") pod \"05eb5b95-07c8-49be-919a-daf3454ec864\" (UID: \"05eb5b95-07c8-49be-919a-daf3454ec864\") " Jan 20 17:56:50 crc kubenswrapper[4558]: I0120 17:56:50.972845 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05eb5b95-07c8-49be-919a-daf3454ec864-combined-ca-bundle\") pod \"05eb5b95-07c8-49be-919a-daf3454ec864\" (UID: \"05eb5b95-07c8-49be-919a-daf3454ec864\") " Jan 20 17:56:50 crc kubenswrapper[4558]: I0120 17:56:50.973114 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/05eb5b95-07c8-49be-919a-daf3454ec864-logs\") pod \"05eb5b95-07c8-49be-919a-daf3454ec864\" (UID: \"05eb5b95-07c8-49be-919a-daf3454ec864\") " Jan 20 17:56:50 crc kubenswrapper[4558]: I0120 17:56:50.973590 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/05eb5b95-07c8-49be-919a-daf3454ec864-logs" (OuterVolumeSpecName: "logs") pod "05eb5b95-07c8-49be-919a-daf3454ec864" (UID: "05eb5b95-07c8-49be-919a-daf3454ec864"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:56:50 crc kubenswrapper[4558]: I0120 17:56:50.994064 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/05eb5b95-07c8-49be-919a-daf3454ec864-kube-api-access-cc5l5" (OuterVolumeSpecName: "kube-api-access-cc5l5") pod "05eb5b95-07c8-49be-919a-daf3454ec864" (UID: "05eb5b95-07c8-49be-919a-daf3454ec864"). InnerVolumeSpecName "kube-api-access-cc5l5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:56:50 crc kubenswrapper[4558]: I0120 17:56:50.999550 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/05eb5b95-07c8-49be-919a-daf3454ec864-config-data" (OuterVolumeSpecName: "config-data") pod "05eb5b95-07c8-49be-919a-daf3454ec864" (UID: "05eb5b95-07c8-49be-919a-daf3454ec864"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:56:51 crc kubenswrapper[4558]: I0120 17:56:51.000720 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/05eb5b95-07c8-49be-919a-daf3454ec864-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "05eb5b95-07c8-49be-919a-daf3454ec864" (UID: "05eb5b95-07c8-49be-919a-daf3454ec864"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:56:51 crc kubenswrapper[4558]: I0120 17:56:51.076098 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/05eb5b95-07c8-49be-919a-daf3454ec864-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:56:51 crc kubenswrapper[4558]: I0120 17:56:51.076137 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cc5l5\" (UniqueName: \"kubernetes.io/projected/05eb5b95-07c8-49be-919a-daf3454ec864-kube-api-access-cc5l5\") on node \"crc\" DevicePath \"\"" Jan 20 17:56:51 crc kubenswrapper[4558]: I0120 17:56:51.076149 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/05eb5b95-07c8-49be-919a-daf3454ec864-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:56:51 crc kubenswrapper[4558]: I0120 17:56:51.076179 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05eb5b95-07c8-49be-919a-daf3454ec864-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:56:51 crc kubenswrapper[4558]: I0120 17:56:51.228824 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:56:51 crc kubenswrapper[4558]: I0120 17:56:51.236438 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:56:51 crc kubenswrapper[4558]: I0120 17:56:51.247080 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:56:51 crc kubenswrapper[4558]: E0120 17:56:51.247590 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6b78b532-b4ab-4bd3-a833-95f464f0849d" containerName="registry-server" Jan 20 17:56:51 crc kubenswrapper[4558]: I0120 17:56:51.247611 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="6b78b532-b4ab-4bd3-a833-95f464f0849d" containerName="registry-server" Jan 20 17:56:51 crc kubenswrapper[4558]: E0120 17:56:51.247633 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6b78b532-b4ab-4bd3-a833-95f464f0849d" containerName="extract-utilities" Jan 20 17:56:51 crc kubenswrapper[4558]: I0120 17:56:51.247639 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="6b78b532-b4ab-4bd3-a833-95f464f0849d" containerName="extract-utilities" Jan 20 17:56:51 crc kubenswrapper[4558]: E0120 17:56:51.247655 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6b50a90e-3159-4824-b752-a8bc047c54d1" containerName="nova-manage" Jan 20 17:56:51 crc kubenswrapper[4558]: I0120 17:56:51.247662 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="6b50a90e-3159-4824-b752-a8bc047c54d1" containerName="nova-manage" Jan 20 17:56:51 crc kubenswrapper[4558]: E0120 17:56:51.247681 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="05eb5b95-07c8-49be-919a-daf3454ec864" containerName="nova-api-log" Jan 20 17:56:51 crc kubenswrapper[4558]: I0120 17:56:51.247687 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="05eb5b95-07c8-49be-919a-daf3454ec864" containerName="nova-api-log" Jan 20 17:56:51 crc kubenswrapper[4558]: E0120 17:56:51.247694 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="05eb5b95-07c8-49be-919a-daf3454ec864" containerName="nova-api-api" Jan 20 17:56:51 crc kubenswrapper[4558]: I0120 17:56:51.247700 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="05eb5b95-07c8-49be-919a-daf3454ec864" containerName="nova-api-api" Jan 20 17:56:51 crc kubenswrapper[4558]: E0120 17:56:51.247714 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6b78b532-b4ab-4bd3-a833-95f464f0849d" containerName="extract-content" Jan 20 17:56:51 crc kubenswrapper[4558]: I0120 17:56:51.247722 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="6b78b532-b4ab-4bd3-a833-95f464f0849d" containerName="extract-content" Jan 20 17:56:51 crc kubenswrapper[4558]: I0120 17:56:51.247925 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="6b78b532-b4ab-4bd3-a833-95f464f0849d" containerName="registry-server" Jan 20 17:56:51 crc kubenswrapper[4558]: I0120 17:56:51.247939 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="05eb5b95-07c8-49be-919a-daf3454ec864" containerName="nova-api-log" Jan 20 17:56:51 crc kubenswrapper[4558]: I0120 17:56:51.247951 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="05eb5b95-07c8-49be-919a-daf3454ec864" containerName="nova-api-api" Jan 20 17:56:51 crc kubenswrapper[4558]: I0120 17:56:51.247963 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="6b50a90e-3159-4824-b752-a8bc047c54d1" containerName="nova-manage" Jan 20 17:56:51 crc kubenswrapper[4558]: I0120 17:56:51.249074 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:56:51 crc kubenswrapper[4558]: I0120 17:56:51.251126 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-api-config-data" Jan 20 17:56:51 crc kubenswrapper[4558]: I0120 17:56:51.253910 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:56:51 crc kubenswrapper[4558]: I0120 17:56:51.294929 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f504f815-d70e-4393-b434-1557ea71cdac-config-data\") pod \"nova-api-0\" (UID: \"f504f815-d70e-4393-b434-1557ea71cdac\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:56:51 crc kubenswrapper[4558]: I0120 17:56:51.295048 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f504f815-d70e-4393-b434-1557ea71cdac-logs\") pod \"nova-api-0\" (UID: \"f504f815-d70e-4393-b434-1557ea71cdac\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:56:51 crc kubenswrapper[4558]: I0120 17:56:51.295211 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wbxtv\" (UniqueName: \"kubernetes.io/projected/f504f815-d70e-4393-b434-1557ea71cdac-kube-api-access-wbxtv\") pod \"nova-api-0\" (UID: \"f504f815-d70e-4393-b434-1557ea71cdac\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:56:51 crc kubenswrapper[4558]: I0120 17:56:51.295552 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f504f815-d70e-4393-b434-1557ea71cdac-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"f504f815-d70e-4393-b434-1557ea71cdac\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:56:51 crc kubenswrapper[4558]: I0120 17:56:51.397750 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f504f815-d70e-4393-b434-1557ea71cdac-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"f504f815-d70e-4393-b434-1557ea71cdac\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:56:51 crc kubenswrapper[4558]: I0120 17:56:51.397841 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f504f815-d70e-4393-b434-1557ea71cdac-config-data\") pod \"nova-api-0\" (UID: \"f504f815-d70e-4393-b434-1557ea71cdac\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:56:51 crc kubenswrapper[4558]: I0120 17:56:51.397878 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f504f815-d70e-4393-b434-1557ea71cdac-logs\") pod \"nova-api-0\" (UID: \"f504f815-d70e-4393-b434-1557ea71cdac\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:56:51 crc kubenswrapper[4558]: I0120 17:56:51.397929 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wbxtv\" (UniqueName: \"kubernetes.io/projected/f504f815-d70e-4393-b434-1557ea71cdac-kube-api-access-wbxtv\") pod \"nova-api-0\" (UID: \"f504f815-d70e-4393-b434-1557ea71cdac\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:56:51 crc kubenswrapper[4558]: I0120 17:56:51.398725 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f504f815-d70e-4393-b434-1557ea71cdac-logs\") pod \"nova-api-0\" (UID: \"f504f815-d70e-4393-b434-1557ea71cdac\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:56:51 crc kubenswrapper[4558]: I0120 17:56:51.411473 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f504f815-d70e-4393-b434-1557ea71cdac-config-data\") pod \"nova-api-0\" (UID: \"f504f815-d70e-4393-b434-1557ea71cdac\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:56:51 crc kubenswrapper[4558]: I0120 17:56:51.413266 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wbxtv\" (UniqueName: \"kubernetes.io/projected/f504f815-d70e-4393-b434-1557ea71cdac-kube-api-access-wbxtv\") pod \"nova-api-0\" (UID: \"f504f815-d70e-4393-b434-1557ea71cdac\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:56:51 crc kubenswrapper[4558]: I0120 17:56:51.424790 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f504f815-d70e-4393-b434-1557ea71cdac-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"f504f815-d70e-4393-b434-1557ea71cdac\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:56:51 crc kubenswrapper[4558]: I0120 17:56:51.565548 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:56:51 crc kubenswrapper[4558]: I0120 17:56:51.974899 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:56:51 crc kubenswrapper[4558]: W0120 17:56:51.976232 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf504f815_d70e_4393_b434_1557ea71cdac.slice/crio-9c31d822ad903313e6423530591f76386f46f63c99c87dba4045ada5aac205ee WatchSource:0}: Error finding container 9c31d822ad903313e6423530591f76386f46f63c99c87dba4045ada5aac205ee: Status 404 returned error can't find the container with id 9c31d822ad903313e6423530591f76386f46f63c99c87dba4045ada5aac205ee Jan 20 17:56:52 crc kubenswrapper[4558]: E0120 17:56:52.109935 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="4476058cbcb49b0953be27b4233e9206097b51f7403463516c9726ffdf82d395" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 20 17:56:52 crc kubenswrapper[4558]: E0120 17:56:52.114544 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="4476058cbcb49b0953be27b4233e9206097b51f7403463516c9726ffdf82d395" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 20 17:56:52 crc kubenswrapper[4558]: E0120 17:56:52.115870 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="4476058cbcb49b0953be27b4233e9206097b51f7403463516c9726ffdf82d395" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 20 17:56:52 crc kubenswrapper[4558]: E0120 17:56:52.115903 4558 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack-kuttl-tests/nova-scheduler-0" podUID="27f0171c-82eb-4da0-9cb6-1e8f3ce1db3d" containerName="nova-scheduler-scheduler" Jan 20 17:56:52 crc kubenswrapper[4558]: I0120 17:56:52.579149 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="05eb5b95-07c8-49be-919a-daf3454ec864" path="/var/lib/kubelet/pods/05eb5b95-07c8-49be-919a-daf3454ec864/volumes" Jan 20 17:56:52 crc kubenswrapper[4558]: I0120 17:56:52.918059 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"f504f815-d70e-4393-b434-1557ea71cdac","Type":"ContainerStarted","Data":"13d3643ad3b5b35641cbd8b9007eb162a4196f10a53748dbe4afc9a2b738ab39"} Jan 20 17:56:52 crc kubenswrapper[4558]: I0120 17:56:52.918117 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"f504f815-d70e-4393-b434-1557ea71cdac","Type":"ContainerStarted","Data":"0bed7a79966310e4d3b7b07fe2039cbcec2b6a9d72c6c1e5851aa950c4d1bf3c"} Jan 20 17:56:52 crc kubenswrapper[4558]: I0120 17:56:52.918129 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"f504f815-d70e-4393-b434-1557ea71cdac","Type":"ContainerStarted","Data":"9c31d822ad903313e6423530591f76386f46f63c99c87dba4045ada5aac205ee"} Jan 20 17:56:52 crc kubenswrapper[4558]: I0120 17:56:52.940663 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-api-0" podStartSLOduration=1.9406474230000001 podStartE2EDuration="1.940647423s" podCreationTimestamp="2026-01-20 17:56:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:56:52.935337972 +0000 UTC m=+4506.695675939" watchObservedRunningTime="2026-01-20 17:56:52.940647423 +0000 UTC m=+4506.700985389" Jan 20 17:56:53 crc kubenswrapper[4558]: I0120 17:56:53.690081 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:56:53 crc kubenswrapper[4558]: I0120 17:56:53.747535 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sdhfx\" (UniqueName: \"kubernetes.io/projected/9d2add51-d3c5-44af-a792-a7301e834322-kube-api-access-sdhfx\") pod \"9d2add51-d3c5-44af-a792-a7301e834322\" (UID: \"9d2add51-d3c5-44af-a792-a7301e834322\") " Jan 20 17:56:53 crc kubenswrapper[4558]: I0120 17:56:53.747659 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9d2add51-d3c5-44af-a792-a7301e834322-config-data\") pod \"9d2add51-d3c5-44af-a792-a7301e834322\" (UID: \"9d2add51-d3c5-44af-a792-a7301e834322\") " Jan 20 17:56:53 crc kubenswrapper[4558]: I0120 17:56:53.747719 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/9d2add51-d3c5-44af-a792-a7301e834322-nova-metadata-tls-certs\") pod \"9d2add51-d3c5-44af-a792-a7301e834322\" (UID: \"9d2add51-d3c5-44af-a792-a7301e834322\") " Jan 20 17:56:53 crc kubenswrapper[4558]: I0120 17:56:53.747846 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9d2add51-d3c5-44af-a792-a7301e834322-logs\") pod \"9d2add51-d3c5-44af-a792-a7301e834322\" (UID: \"9d2add51-d3c5-44af-a792-a7301e834322\") " Jan 20 17:56:53 crc kubenswrapper[4558]: I0120 17:56:53.747897 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9d2add51-d3c5-44af-a792-a7301e834322-combined-ca-bundle\") pod \"9d2add51-d3c5-44af-a792-a7301e834322\" (UID: \"9d2add51-d3c5-44af-a792-a7301e834322\") " Jan 20 17:56:53 crc kubenswrapper[4558]: I0120 17:56:53.750652 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9d2add51-d3c5-44af-a792-a7301e834322-logs" (OuterVolumeSpecName: "logs") pod "9d2add51-d3c5-44af-a792-a7301e834322" (UID: "9d2add51-d3c5-44af-a792-a7301e834322"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:56:53 crc kubenswrapper[4558]: I0120 17:56:53.777515 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d2add51-d3c5-44af-a792-a7301e834322-kube-api-access-sdhfx" (OuterVolumeSpecName: "kube-api-access-sdhfx") pod "9d2add51-d3c5-44af-a792-a7301e834322" (UID: "9d2add51-d3c5-44af-a792-a7301e834322"). InnerVolumeSpecName "kube-api-access-sdhfx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:56:53 crc kubenswrapper[4558]: I0120 17:56:53.793089 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d2add51-d3c5-44af-a792-a7301e834322-config-data" (OuterVolumeSpecName: "config-data") pod "9d2add51-d3c5-44af-a792-a7301e834322" (UID: "9d2add51-d3c5-44af-a792-a7301e834322"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:56:53 crc kubenswrapper[4558]: I0120 17:56:53.800274 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d2add51-d3c5-44af-a792-a7301e834322-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9d2add51-d3c5-44af-a792-a7301e834322" (UID: "9d2add51-d3c5-44af-a792-a7301e834322"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:56:53 crc kubenswrapper[4558]: I0120 17:56:53.812931 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d2add51-d3c5-44af-a792-a7301e834322-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "9d2add51-d3c5-44af-a792-a7301e834322" (UID: "9d2add51-d3c5-44af-a792-a7301e834322"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:56:53 crc kubenswrapper[4558]: I0120 17:56:53.849895 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9d2add51-d3c5-44af-a792-a7301e834322-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:56:53 crc kubenswrapper[4558]: I0120 17:56:53.849933 4558 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/9d2add51-d3c5-44af-a792-a7301e834322-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:56:53 crc kubenswrapper[4558]: I0120 17:56:53.849947 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9d2add51-d3c5-44af-a792-a7301e834322-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:56:53 crc kubenswrapper[4558]: I0120 17:56:53.849961 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9d2add51-d3c5-44af-a792-a7301e834322-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:56:53 crc kubenswrapper[4558]: I0120 17:56:53.849972 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sdhfx\" (UniqueName: \"kubernetes.io/projected/9d2add51-d3c5-44af-a792-a7301e834322-kube-api-access-sdhfx\") on node \"crc\" DevicePath \"\"" Jan 20 17:56:53 crc kubenswrapper[4558]: I0120 17:56:53.927984 4558 generic.go:334] "Generic (PLEG): container finished" podID="9d2add51-d3c5-44af-a792-a7301e834322" containerID="48c1a79885d63262f1636dd956e4fd1544b1f866fc373b6a47841c7ffba899eb" exitCode=0 Jan 20 17:56:53 crc kubenswrapper[4558]: I0120 17:56:53.929492 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:56:53 crc kubenswrapper[4558]: I0120 17:56:53.932406 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"9d2add51-d3c5-44af-a792-a7301e834322","Type":"ContainerDied","Data":"48c1a79885d63262f1636dd956e4fd1544b1f866fc373b6a47841c7ffba899eb"} Jan 20 17:56:53 crc kubenswrapper[4558]: I0120 17:56:53.932489 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"9d2add51-d3c5-44af-a792-a7301e834322","Type":"ContainerDied","Data":"32b281763590d5026a26422ced7e24a1579ad8615e8a306bc2cf6fb2ec584ce4"} Jan 20 17:56:53 crc kubenswrapper[4558]: I0120 17:56:53.932549 4558 scope.go:117] "RemoveContainer" containerID="48c1a79885d63262f1636dd956e4fd1544b1f866fc373b6a47841c7ffba899eb" Jan 20 17:56:53 crc kubenswrapper[4558]: I0120 17:56:53.961131 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:56:53 crc kubenswrapper[4558]: I0120 17:56:53.969574 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:56:53 crc kubenswrapper[4558]: I0120 17:56:53.983401 4558 scope.go:117] "RemoveContainer" containerID="a51a56718bd2a2dbc6833433430e6b07d4e0e76fa08d35b70dd83eaeb7bc29a8" Jan 20 17:56:53 crc kubenswrapper[4558]: I0120 17:56:53.993229 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:56:53 crc kubenswrapper[4558]: E0120 17:56:53.993696 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9d2add51-d3c5-44af-a792-a7301e834322" containerName="nova-metadata-log" Jan 20 17:56:53 crc kubenswrapper[4558]: I0120 17:56:53.993713 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="9d2add51-d3c5-44af-a792-a7301e834322" containerName="nova-metadata-log" Jan 20 17:56:53 crc kubenswrapper[4558]: E0120 17:56:53.993727 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9d2add51-d3c5-44af-a792-a7301e834322" containerName="nova-metadata-metadata" Jan 20 17:56:53 crc kubenswrapper[4558]: I0120 17:56:53.993734 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="9d2add51-d3c5-44af-a792-a7301e834322" containerName="nova-metadata-metadata" Jan 20 17:56:53 crc kubenswrapper[4558]: I0120 17:56:53.993928 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="9d2add51-d3c5-44af-a792-a7301e834322" containerName="nova-metadata-log" Jan 20 17:56:53 crc kubenswrapper[4558]: I0120 17:56:53.993965 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="9d2add51-d3c5-44af-a792-a7301e834322" containerName="nova-metadata-metadata" Jan 20 17:56:53 crc kubenswrapper[4558]: I0120 17:56:53.994939 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:56:53 crc kubenswrapper[4558]: I0120 17:56:53.997351 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-metadata-internal-svc" Jan 20 17:56:53 crc kubenswrapper[4558]: I0120 17:56:53.997646 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-metadata-config-data" Jan 20 17:56:54 crc kubenswrapper[4558]: I0120 17:56:54.009149 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:56:54 crc kubenswrapper[4558]: I0120 17:56:54.015711 4558 scope.go:117] "RemoveContainer" containerID="48c1a79885d63262f1636dd956e4fd1544b1f866fc373b6a47841c7ffba899eb" Jan 20 17:56:54 crc kubenswrapper[4558]: E0120 17:56:54.017572 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"48c1a79885d63262f1636dd956e4fd1544b1f866fc373b6a47841c7ffba899eb\": container with ID starting with 48c1a79885d63262f1636dd956e4fd1544b1f866fc373b6a47841c7ffba899eb not found: ID does not exist" containerID="48c1a79885d63262f1636dd956e4fd1544b1f866fc373b6a47841c7ffba899eb" Jan 20 17:56:54 crc kubenswrapper[4558]: I0120 17:56:54.017612 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"48c1a79885d63262f1636dd956e4fd1544b1f866fc373b6a47841c7ffba899eb"} err="failed to get container status \"48c1a79885d63262f1636dd956e4fd1544b1f866fc373b6a47841c7ffba899eb\": rpc error: code = NotFound desc = could not find container \"48c1a79885d63262f1636dd956e4fd1544b1f866fc373b6a47841c7ffba899eb\": container with ID starting with 48c1a79885d63262f1636dd956e4fd1544b1f866fc373b6a47841c7ffba899eb not found: ID does not exist" Jan 20 17:56:54 crc kubenswrapper[4558]: I0120 17:56:54.017640 4558 scope.go:117] "RemoveContainer" containerID="a51a56718bd2a2dbc6833433430e6b07d4e0e76fa08d35b70dd83eaeb7bc29a8" Jan 20 17:56:54 crc kubenswrapper[4558]: E0120 17:56:54.018055 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a51a56718bd2a2dbc6833433430e6b07d4e0e76fa08d35b70dd83eaeb7bc29a8\": container with ID starting with a51a56718bd2a2dbc6833433430e6b07d4e0e76fa08d35b70dd83eaeb7bc29a8 not found: ID does not exist" containerID="a51a56718bd2a2dbc6833433430e6b07d4e0e76fa08d35b70dd83eaeb7bc29a8" Jan 20 17:56:54 crc kubenswrapper[4558]: I0120 17:56:54.018080 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a51a56718bd2a2dbc6833433430e6b07d4e0e76fa08d35b70dd83eaeb7bc29a8"} err="failed to get container status \"a51a56718bd2a2dbc6833433430e6b07d4e0e76fa08d35b70dd83eaeb7bc29a8\": rpc error: code = NotFound desc = could not find container \"a51a56718bd2a2dbc6833433430e6b07d4e0e76fa08d35b70dd83eaeb7bc29a8\": container with ID starting with a51a56718bd2a2dbc6833433430e6b07d4e0e76fa08d35b70dd83eaeb7bc29a8 not found: ID does not exist" Jan 20 17:56:54 crc kubenswrapper[4558]: I0120 17:56:54.053382 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c8f11f45-bfce-4989-bfa0-684011f74619-config-data\") pod \"nova-metadata-0\" (UID: \"c8f11f45-bfce-4989-bfa0-684011f74619\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:56:54 crc kubenswrapper[4558]: I0120 17:56:54.053423 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/c8f11f45-bfce-4989-bfa0-684011f74619-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"c8f11f45-bfce-4989-bfa0-684011f74619\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:56:54 crc kubenswrapper[4558]: I0120 17:56:54.053495 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nrm9p\" (UniqueName: \"kubernetes.io/projected/c8f11f45-bfce-4989-bfa0-684011f74619-kube-api-access-nrm9p\") pod \"nova-metadata-0\" (UID: \"c8f11f45-bfce-4989-bfa0-684011f74619\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:56:54 crc kubenswrapper[4558]: I0120 17:56:54.053666 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c8f11f45-bfce-4989-bfa0-684011f74619-logs\") pod \"nova-metadata-0\" (UID: \"c8f11f45-bfce-4989-bfa0-684011f74619\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:56:54 crc kubenswrapper[4558]: I0120 17:56:54.053730 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c8f11f45-bfce-4989-bfa0-684011f74619-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"c8f11f45-bfce-4989-bfa0-684011f74619\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:56:54 crc kubenswrapper[4558]: I0120 17:56:54.062131 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 17:56:54 crc kubenswrapper[4558]: I0120 17:56:54.062386 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/kube-state-metrics-0" podUID="7f609e06-c77b-4219-9143-8994fac93c0e" containerName="kube-state-metrics" containerID="cri-o://afb05b80e47beabb7af62918a7ee87a317b8c87a34ce9be021af37bd27b5cf9e" gracePeriod=30 Jan 20 17:56:54 crc kubenswrapper[4558]: I0120 17:56:54.156906 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nrm9p\" (UniqueName: \"kubernetes.io/projected/c8f11f45-bfce-4989-bfa0-684011f74619-kube-api-access-nrm9p\") pod \"nova-metadata-0\" (UID: \"c8f11f45-bfce-4989-bfa0-684011f74619\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:56:54 crc kubenswrapper[4558]: I0120 17:56:54.157358 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c8f11f45-bfce-4989-bfa0-684011f74619-logs\") pod \"nova-metadata-0\" (UID: \"c8f11f45-bfce-4989-bfa0-684011f74619\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:56:54 crc kubenswrapper[4558]: I0120 17:56:54.157413 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c8f11f45-bfce-4989-bfa0-684011f74619-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"c8f11f45-bfce-4989-bfa0-684011f74619\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:56:54 crc kubenswrapper[4558]: I0120 17:56:54.157466 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c8f11f45-bfce-4989-bfa0-684011f74619-config-data\") pod \"nova-metadata-0\" (UID: \"c8f11f45-bfce-4989-bfa0-684011f74619\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:56:54 crc kubenswrapper[4558]: I0120 17:56:54.157496 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/c8f11f45-bfce-4989-bfa0-684011f74619-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"c8f11f45-bfce-4989-bfa0-684011f74619\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:56:54 crc kubenswrapper[4558]: I0120 17:56:54.157758 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c8f11f45-bfce-4989-bfa0-684011f74619-logs\") pod \"nova-metadata-0\" (UID: \"c8f11f45-bfce-4989-bfa0-684011f74619\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:56:54 crc kubenswrapper[4558]: I0120 17:56:54.162126 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c8f11f45-bfce-4989-bfa0-684011f74619-config-data\") pod \"nova-metadata-0\" (UID: \"c8f11f45-bfce-4989-bfa0-684011f74619\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:56:54 crc kubenswrapper[4558]: I0120 17:56:54.162882 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c8f11f45-bfce-4989-bfa0-684011f74619-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"c8f11f45-bfce-4989-bfa0-684011f74619\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:56:54 crc kubenswrapper[4558]: I0120 17:56:54.167236 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/c8f11f45-bfce-4989-bfa0-684011f74619-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"c8f11f45-bfce-4989-bfa0-684011f74619\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:56:54 crc kubenswrapper[4558]: I0120 17:56:54.175671 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nrm9p\" (UniqueName: \"kubernetes.io/projected/c8f11f45-bfce-4989-bfa0-684011f74619-kube-api-access-nrm9p\") pod \"nova-metadata-0\" (UID: \"c8f11f45-bfce-4989-bfa0-684011f74619\") " pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:56:54 crc kubenswrapper[4558]: I0120 17:56:54.319643 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:56:54 crc kubenswrapper[4558]: I0120 17:56:54.585119 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:56:54 crc kubenswrapper[4558]: I0120 17:56:54.600981 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d2add51-d3c5-44af-a792-a7301e834322" path="/var/lib/kubelet/pods/9d2add51-d3c5-44af-a792-a7301e834322/volumes" Jan 20 17:56:54 crc kubenswrapper[4558]: I0120 17:56:54.615033 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:56:54 crc kubenswrapper[4558]: I0120 17:56:54.689847 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/27f0171c-82eb-4da0-9cb6-1e8f3ce1db3d-combined-ca-bundle\") pod \"27f0171c-82eb-4da0-9cb6-1e8f3ce1db3d\" (UID: \"27f0171c-82eb-4da0-9cb6-1e8f3ce1db3d\") " Jan 20 17:56:54 crc kubenswrapper[4558]: I0120 17:56:54.690057 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/27f0171c-82eb-4da0-9cb6-1e8f3ce1db3d-config-data\") pod \"27f0171c-82eb-4da0-9cb6-1e8f3ce1db3d\" (UID: \"27f0171c-82eb-4da0-9cb6-1e8f3ce1db3d\") " Jan 20 17:56:54 crc kubenswrapper[4558]: I0120 17:56:54.690086 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zptcv\" (UniqueName: \"kubernetes.io/projected/7f609e06-c77b-4219-9143-8994fac93c0e-kube-api-access-zptcv\") pod \"7f609e06-c77b-4219-9143-8994fac93c0e\" (UID: \"7f609e06-c77b-4219-9143-8994fac93c0e\") " Jan 20 17:56:54 crc kubenswrapper[4558]: I0120 17:56:54.690159 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d2wcv\" (UniqueName: \"kubernetes.io/projected/27f0171c-82eb-4da0-9cb6-1e8f3ce1db3d-kube-api-access-d2wcv\") pod \"27f0171c-82eb-4da0-9cb6-1e8f3ce1db3d\" (UID: \"27f0171c-82eb-4da0-9cb6-1e8f3ce1db3d\") " Jan 20 17:56:54 crc kubenswrapper[4558]: I0120 17:56:54.695025 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7f609e06-c77b-4219-9143-8994fac93c0e-kube-api-access-zptcv" (OuterVolumeSpecName: "kube-api-access-zptcv") pod "7f609e06-c77b-4219-9143-8994fac93c0e" (UID: "7f609e06-c77b-4219-9143-8994fac93c0e"). InnerVolumeSpecName "kube-api-access-zptcv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:56:54 crc kubenswrapper[4558]: I0120 17:56:54.701036 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/27f0171c-82eb-4da0-9cb6-1e8f3ce1db3d-kube-api-access-d2wcv" (OuterVolumeSpecName: "kube-api-access-d2wcv") pod "27f0171c-82eb-4da0-9cb6-1e8f3ce1db3d" (UID: "27f0171c-82eb-4da0-9cb6-1e8f3ce1db3d"). InnerVolumeSpecName "kube-api-access-d2wcv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:56:54 crc kubenswrapper[4558]: I0120 17:56:54.711857 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/27f0171c-82eb-4da0-9cb6-1e8f3ce1db3d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "27f0171c-82eb-4da0-9cb6-1e8f3ce1db3d" (UID: "27f0171c-82eb-4da0-9cb6-1e8f3ce1db3d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:56:54 crc kubenswrapper[4558]: I0120 17:56:54.713061 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/27f0171c-82eb-4da0-9cb6-1e8f3ce1db3d-config-data" (OuterVolumeSpecName: "config-data") pod "27f0171c-82eb-4da0-9cb6-1e8f3ce1db3d" (UID: "27f0171c-82eb-4da0-9cb6-1e8f3ce1db3d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:56:54 crc kubenswrapper[4558]: I0120 17:56:54.793310 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/27f0171c-82eb-4da0-9cb6-1e8f3ce1db3d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:56:54 crc kubenswrapper[4558]: I0120 17:56:54.793354 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/27f0171c-82eb-4da0-9cb6-1e8f3ce1db3d-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:56:54 crc kubenswrapper[4558]: I0120 17:56:54.793372 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zptcv\" (UniqueName: \"kubernetes.io/projected/7f609e06-c77b-4219-9143-8994fac93c0e-kube-api-access-zptcv\") on node \"crc\" DevicePath \"\"" Jan 20 17:56:54 crc kubenswrapper[4558]: I0120 17:56:54.793391 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d2wcv\" (UniqueName: \"kubernetes.io/projected/27f0171c-82eb-4da0-9cb6-1e8f3ce1db3d-kube-api-access-d2wcv\") on node \"crc\" DevicePath \"\"" Jan 20 17:56:54 crc kubenswrapper[4558]: I0120 17:56:54.798743 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:56:54 crc kubenswrapper[4558]: I0120 17:56:54.951630 4558 generic.go:334] "Generic (PLEG): container finished" podID="27f0171c-82eb-4da0-9cb6-1e8f3ce1db3d" containerID="4476058cbcb49b0953be27b4233e9206097b51f7403463516c9726ffdf82d395" exitCode=0 Jan 20 17:56:54 crc kubenswrapper[4558]: I0120 17:56:54.952026 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"27f0171c-82eb-4da0-9cb6-1e8f3ce1db3d","Type":"ContainerDied","Data":"4476058cbcb49b0953be27b4233e9206097b51f7403463516c9726ffdf82d395"} Jan 20 17:56:54 crc kubenswrapper[4558]: I0120 17:56:54.952047 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:56:54 crc kubenswrapper[4558]: I0120 17:56:54.952101 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"27f0171c-82eb-4da0-9cb6-1e8f3ce1db3d","Type":"ContainerDied","Data":"66459d74bbe222c6504c998ba08fcf3e2d242617a7c92f40640f8c812ce4067d"} Jan 20 17:56:54 crc kubenswrapper[4558]: I0120 17:56:54.952124 4558 scope.go:117] "RemoveContainer" containerID="4476058cbcb49b0953be27b4233e9206097b51f7403463516c9726ffdf82d395" Jan 20 17:56:54 crc kubenswrapper[4558]: I0120 17:56:54.967272 4558 generic.go:334] "Generic (PLEG): container finished" podID="7f609e06-c77b-4219-9143-8994fac93c0e" containerID="afb05b80e47beabb7af62918a7ee87a317b8c87a34ce9be021af37bd27b5cf9e" exitCode=2 Jan 20 17:56:54 crc kubenswrapper[4558]: I0120 17:56:54.967408 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:56:54 crc kubenswrapper[4558]: I0120 17:56:54.968089 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/kube-state-metrics-0" event={"ID":"7f609e06-c77b-4219-9143-8994fac93c0e","Type":"ContainerDied","Data":"afb05b80e47beabb7af62918a7ee87a317b8c87a34ce9be021af37bd27b5cf9e"} Jan 20 17:56:54 crc kubenswrapper[4558]: I0120 17:56:54.968137 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/kube-state-metrics-0" event={"ID":"7f609e06-c77b-4219-9143-8994fac93c0e","Type":"ContainerDied","Data":"299d0f00fce348d45c1bc2778654996557a1a58e3dbb25417ce3a33d746235f8"} Jan 20 17:56:54 crc kubenswrapper[4558]: I0120 17:56:54.980679 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"c8f11f45-bfce-4989-bfa0-684011f74619","Type":"ContainerStarted","Data":"54df676fde10a0849dfebcb70b517d2908a83f1c31dbdeeb408ebc25bb01c32f"} Jan 20 17:56:55 crc kubenswrapper[4558]: I0120 17:56:55.014124 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:56:55 crc kubenswrapper[4558]: I0120 17:56:55.031563 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:56:55 crc kubenswrapper[4558]: I0120 17:56:55.037605 4558 scope.go:117] "RemoveContainer" containerID="4476058cbcb49b0953be27b4233e9206097b51f7403463516c9726ffdf82d395" Jan 20 17:56:55 crc kubenswrapper[4558]: I0120 17:56:55.037695 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 17:56:55 crc kubenswrapper[4558]: E0120 17:56:55.043276 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4476058cbcb49b0953be27b4233e9206097b51f7403463516c9726ffdf82d395\": container with ID starting with 4476058cbcb49b0953be27b4233e9206097b51f7403463516c9726ffdf82d395 not found: ID does not exist" containerID="4476058cbcb49b0953be27b4233e9206097b51f7403463516c9726ffdf82d395" Jan 20 17:56:55 crc kubenswrapper[4558]: I0120 17:56:55.043312 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4476058cbcb49b0953be27b4233e9206097b51f7403463516c9726ffdf82d395"} err="failed to get container status \"4476058cbcb49b0953be27b4233e9206097b51f7403463516c9726ffdf82d395\": rpc error: code = NotFound desc = could not find container \"4476058cbcb49b0953be27b4233e9206097b51f7403463516c9726ffdf82d395\": container with ID starting with 4476058cbcb49b0953be27b4233e9206097b51f7403463516c9726ffdf82d395 not found: ID does not exist" Jan 20 17:56:55 crc kubenswrapper[4558]: I0120 17:56:55.043347 4558 scope.go:117] "RemoveContainer" containerID="afb05b80e47beabb7af62918a7ee87a317b8c87a34ce9be021af37bd27b5cf9e" Jan 20 17:56:55 crc kubenswrapper[4558]: I0120 17:56:55.050123 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 17:56:55 crc kubenswrapper[4558]: I0120 17:56:55.067259 4558 scope.go:117] "RemoveContainer" containerID="afb05b80e47beabb7af62918a7ee87a317b8c87a34ce9be021af37bd27b5cf9e" Jan 20 17:56:55 crc kubenswrapper[4558]: E0120 17:56:55.068843 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"afb05b80e47beabb7af62918a7ee87a317b8c87a34ce9be021af37bd27b5cf9e\": container with ID starting with afb05b80e47beabb7af62918a7ee87a317b8c87a34ce9be021af37bd27b5cf9e not found: ID does not exist" containerID="afb05b80e47beabb7af62918a7ee87a317b8c87a34ce9be021af37bd27b5cf9e" Jan 20 17:56:55 crc kubenswrapper[4558]: I0120 17:56:55.068874 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"afb05b80e47beabb7af62918a7ee87a317b8c87a34ce9be021af37bd27b5cf9e"} err="failed to get container status \"afb05b80e47beabb7af62918a7ee87a317b8c87a34ce9be021af37bd27b5cf9e\": rpc error: code = NotFound desc = could not find container \"afb05b80e47beabb7af62918a7ee87a317b8c87a34ce9be021af37bd27b5cf9e\": container with ID starting with afb05b80e47beabb7af62918a7ee87a317b8c87a34ce9be021af37bd27b5cf9e not found: ID does not exist" Jan 20 17:56:55 crc kubenswrapper[4558]: I0120 17:56:55.077728 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:56:55 crc kubenswrapper[4558]: E0120 17:56:55.078293 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7f609e06-c77b-4219-9143-8994fac93c0e" containerName="kube-state-metrics" Jan 20 17:56:55 crc kubenswrapper[4558]: I0120 17:56:55.078315 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="7f609e06-c77b-4219-9143-8994fac93c0e" containerName="kube-state-metrics" Jan 20 17:56:55 crc kubenswrapper[4558]: E0120 17:56:55.078339 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="27f0171c-82eb-4da0-9cb6-1e8f3ce1db3d" containerName="nova-scheduler-scheduler" Jan 20 17:56:55 crc kubenswrapper[4558]: I0120 17:56:55.078350 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="27f0171c-82eb-4da0-9cb6-1e8f3ce1db3d" containerName="nova-scheduler-scheduler" Jan 20 17:56:55 crc kubenswrapper[4558]: I0120 17:56:55.078571 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="27f0171c-82eb-4da0-9cb6-1e8f3ce1db3d" containerName="nova-scheduler-scheduler" Jan 20 17:56:55 crc kubenswrapper[4558]: I0120 17:56:55.078589 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="7f609e06-c77b-4219-9143-8994fac93c0e" containerName="kube-state-metrics" Jan 20 17:56:55 crc kubenswrapper[4558]: I0120 17:56:55.079303 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:56:55 crc kubenswrapper[4558]: I0120 17:56:55.084296 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-scheduler-config-data" Jan 20 17:56:55 crc kubenswrapper[4558]: I0120 17:56:55.087042 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 17:56:55 crc kubenswrapper[4558]: I0120 17:56:55.098150 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:56:55 crc kubenswrapper[4558]: I0120 17:56:55.098281 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:56:55 crc kubenswrapper[4558]: I0120 17:56:55.100332 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 17:56:55 crc kubenswrapper[4558]: I0120 17:56:55.114220 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"kube-state-metrics-tls-config" Jan 20 17:56:55 crc kubenswrapper[4558]: I0120 17:56:55.114479 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-kube-state-metrics-svc" Jan 20 17:56:55 crc kubenswrapper[4558]: I0120 17:56:55.201060 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e270f26-f899-4007-94bf-ab62080fe4ce-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"4e270f26-f899-4007-94bf-ab62080fe4ce\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:56:55 crc kubenswrapper[4558]: I0120 17:56:55.201300 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/4e270f26-f899-4007-94bf-ab62080fe4ce-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"4e270f26-f899-4007-94bf-ab62080fe4ce\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:56:55 crc kubenswrapper[4558]: I0120 17:56:55.201515 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e270f26-f899-4007-94bf-ab62080fe4ce-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"4e270f26-f899-4007-94bf-ab62080fe4ce\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:56:55 crc kubenswrapper[4558]: I0120 17:56:55.201548 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b8bqv\" (UniqueName: \"kubernetes.io/projected/f5beef7a-89e0-4e53-960b-1cacc665d586-kube-api-access-b8bqv\") pod \"nova-scheduler-0\" (UID: \"f5beef7a-89e0-4e53-960b-1cacc665d586\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:56:55 crc kubenswrapper[4558]: I0120 17:56:55.201690 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f5beef7a-89e0-4e53-960b-1cacc665d586-config-data\") pod \"nova-scheduler-0\" (UID: \"f5beef7a-89e0-4e53-960b-1cacc665d586\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:56:55 crc kubenswrapper[4558]: I0120 17:56:55.201874 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5beef7a-89e0-4e53-960b-1cacc665d586-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"f5beef7a-89e0-4e53-960b-1cacc665d586\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:56:55 crc kubenswrapper[4558]: I0120 17:56:55.201919 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-55gl2\" (UniqueName: \"kubernetes.io/projected/4e270f26-f899-4007-94bf-ab62080fe4ce-kube-api-access-55gl2\") pod \"kube-state-metrics-0\" (UID: \"4e270f26-f899-4007-94bf-ab62080fe4ce\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:56:55 crc kubenswrapper[4558]: I0120 17:56:55.303031 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/4e270f26-f899-4007-94bf-ab62080fe4ce-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"4e270f26-f899-4007-94bf-ab62080fe4ce\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:56:55 crc kubenswrapper[4558]: I0120 17:56:55.303103 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e270f26-f899-4007-94bf-ab62080fe4ce-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"4e270f26-f899-4007-94bf-ab62080fe4ce\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:56:55 crc kubenswrapper[4558]: I0120 17:56:55.303128 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b8bqv\" (UniqueName: \"kubernetes.io/projected/f5beef7a-89e0-4e53-960b-1cacc665d586-kube-api-access-b8bqv\") pod \"nova-scheduler-0\" (UID: \"f5beef7a-89e0-4e53-960b-1cacc665d586\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:56:55 crc kubenswrapper[4558]: I0120 17:56:55.303185 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f5beef7a-89e0-4e53-960b-1cacc665d586-config-data\") pod \"nova-scheduler-0\" (UID: \"f5beef7a-89e0-4e53-960b-1cacc665d586\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:56:55 crc kubenswrapper[4558]: I0120 17:56:55.303237 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5beef7a-89e0-4e53-960b-1cacc665d586-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"f5beef7a-89e0-4e53-960b-1cacc665d586\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:56:55 crc kubenswrapper[4558]: I0120 17:56:55.303258 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-55gl2\" (UniqueName: \"kubernetes.io/projected/4e270f26-f899-4007-94bf-ab62080fe4ce-kube-api-access-55gl2\") pod \"kube-state-metrics-0\" (UID: \"4e270f26-f899-4007-94bf-ab62080fe4ce\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:56:55 crc kubenswrapper[4558]: I0120 17:56:55.303283 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e270f26-f899-4007-94bf-ab62080fe4ce-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"4e270f26-f899-4007-94bf-ab62080fe4ce\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:56:55 crc kubenswrapper[4558]: I0120 17:56:55.306565 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e270f26-f899-4007-94bf-ab62080fe4ce-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"4e270f26-f899-4007-94bf-ab62080fe4ce\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:56:55 crc kubenswrapper[4558]: I0120 17:56:55.306956 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/4e270f26-f899-4007-94bf-ab62080fe4ce-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"4e270f26-f899-4007-94bf-ab62080fe4ce\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:56:55 crc kubenswrapper[4558]: I0120 17:56:55.307287 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e270f26-f899-4007-94bf-ab62080fe4ce-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"4e270f26-f899-4007-94bf-ab62080fe4ce\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:56:55 crc kubenswrapper[4558]: I0120 17:56:55.307310 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f5beef7a-89e0-4e53-960b-1cacc665d586-config-data\") pod \"nova-scheduler-0\" (UID: \"f5beef7a-89e0-4e53-960b-1cacc665d586\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:56:55 crc kubenswrapper[4558]: I0120 17:56:55.307366 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5beef7a-89e0-4e53-960b-1cacc665d586-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"f5beef7a-89e0-4e53-960b-1cacc665d586\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:56:55 crc kubenswrapper[4558]: I0120 17:56:55.324815 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b8bqv\" (UniqueName: \"kubernetes.io/projected/f5beef7a-89e0-4e53-960b-1cacc665d586-kube-api-access-b8bqv\") pod \"nova-scheduler-0\" (UID: \"f5beef7a-89e0-4e53-960b-1cacc665d586\") " pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:56:55 crc kubenswrapper[4558]: I0120 17:56:55.326125 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-55gl2\" (UniqueName: \"kubernetes.io/projected/4e270f26-f899-4007-94bf-ab62080fe4ce-kube-api-access-55gl2\") pod \"kube-state-metrics-0\" (UID: \"4e270f26-f899-4007-94bf-ab62080fe4ce\") " pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:56:55 crc kubenswrapper[4558]: I0120 17:56:55.414437 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:56:55 crc kubenswrapper[4558]: I0120 17:56:55.466514 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:56:55 crc kubenswrapper[4558]: I0120 17:56:55.871309 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:56:55 crc kubenswrapper[4558]: I0120 17:56:55.953480 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 17:56:55 crc kubenswrapper[4558]: I0120 17:56:55.992025 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/kube-state-metrics-0" event={"ID":"4e270f26-f899-4007-94bf-ab62080fe4ce","Type":"ContainerStarted","Data":"b0e7917ea1ab5684946e8b6bba4e8c0d10bad4512ac8a326577302ed3e8d9993"} Jan 20 17:56:55 crc kubenswrapper[4558]: I0120 17:56:55.998329 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"c8f11f45-bfce-4989-bfa0-684011f74619","Type":"ContainerStarted","Data":"8bfec59d12be9eefb33c786c52426fac978276a374abf67c2991cebb7a215c6d"} Jan 20 17:56:55 crc kubenswrapper[4558]: I0120 17:56:55.998393 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"c8f11f45-bfce-4989-bfa0-684011f74619","Type":"ContainerStarted","Data":"d0304cd10afa9031be3b363bc240ee32e662b09bbf8d0575f8a91d278b93f42b"} Jan 20 17:56:56 crc kubenswrapper[4558]: I0120 17:56:56.001551 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"f5beef7a-89e0-4e53-960b-1cacc665d586","Type":"ContainerStarted","Data":"b0b70a84fc120072c22f67b39645276298802c40a8f1d3d2753fa575dce540e9"} Jan 20 17:56:56 crc kubenswrapper[4558]: I0120 17:56:56.016316 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-metadata-0" podStartSLOduration=3.016300756 podStartE2EDuration="3.016300756s" podCreationTimestamp="2026-01-20 17:56:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:56:56.015035437 +0000 UTC m=+4509.775373405" watchObservedRunningTime="2026-01-20 17:56:56.016300756 +0000 UTC m=+4509.776638723" Jan 20 17:56:56 crc kubenswrapper[4558]: I0120 17:56:56.036964 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:56:56 crc kubenswrapper[4558]: I0120 17:56:56.037256 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="491a5173-ab6a-4abf-932d-93e7b4856b8b" containerName="ceilometer-central-agent" containerID="cri-o://c832bcbb9393f9662665b8047449f27e4fcb185bed442d412b8f9e53cfdceaa1" gracePeriod=30 Jan 20 17:56:56 crc kubenswrapper[4558]: I0120 17:56:56.037652 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="491a5173-ab6a-4abf-932d-93e7b4856b8b" containerName="proxy-httpd" containerID="cri-o://737858e99577b342734a59a10def7df38917eabf05f581f4f3ca9bf3bb32550b" gracePeriod=30 Jan 20 17:56:56 crc kubenswrapper[4558]: I0120 17:56:56.037695 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="491a5173-ab6a-4abf-932d-93e7b4856b8b" containerName="sg-core" containerID="cri-o://28e3b444eea0f63050d55e27a5abc4c42e289af941a1ae4b77bdb7c9624f71a8" gracePeriod=30 Jan 20 17:56:56 crc kubenswrapper[4558]: I0120 17:56:56.037728 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="491a5173-ab6a-4abf-932d-93e7b4856b8b" containerName="ceilometer-notification-agent" containerID="cri-o://965fc9512076833004f35447a71fb467c1c8fe0081f4bd5ac443f07474f15c19" gracePeriod=30 Jan 20 17:56:56 crc kubenswrapper[4558]: I0120 17:56:56.566646 4558 scope.go:117] "RemoveContainer" containerID="4ea7fa61d8b21007a044345acec89fcebe56c2921cf0f76ff2002f44bdc88ede" Jan 20 17:56:56 crc kubenswrapper[4558]: E0120 17:56:56.567209 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 17:56:56 crc kubenswrapper[4558]: I0120 17:56:56.575856 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="27f0171c-82eb-4da0-9cb6-1e8f3ce1db3d" path="/var/lib/kubelet/pods/27f0171c-82eb-4da0-9cb6-1e8f3ce1db3d/volumes" Jan 20 17:56:56 crc kubenswrapper[4558]: I0120 17:56:56.576511 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7f609e06-c77b-4219-9143-8994fac93c0e" path="/var/lib/kubelet/pods/7f609e06-c77b-4219-9143-8994fac93c0e/volumes" Jan 20 17:56:57 crc kubenswrapper[4558]: I0120 17:56:57.028028 4558 generic.go:334] "Generic (PLEG): container finished" podID="491a5173-ab6a-4abf-932d-93e7b4856b8b" containerID="737858e99577b342734a59a10def7df38917eabf05f581f4f3ca9bf3bb32550b" exitCode=0 Jan 20 17:56:57 crc kubenswrapper[4558]: I0120 17:56:57.028070 4558 generic.go:334] "Generic (PLEG): container finished" podID="491a5173-ab6a-4abf-932d-93e7b4856b8b" containerID="28e3b444eea0f63050d55e27a5abc4c42e289af941a1ae4b77bdb7c9624f71a8" exitCode=2 Jan 20 17:56:57 crc kubenswrapper[4558]: I0120 17:56:57.028078 4558 generic.go:334] "Generic (PLEG): container finished" podID="491a5173-ab6a-4abf-932d-93e7b4856b8b" containerID="c832bcbb9393f9662665b8047449f27e4fcb185bed442d412b8f9e53cfdceaa1" exitCode=0 Jan 20 17:56:57 crc kubenswrapper[4558]: I0120 17:56:57.028119 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"491a5173-ab6a-4abf-932d-93e7b4856b8b","Type":"ContainerDied","Data":"737858e99577b342734a59a10def7df38917eabf05f581f4f3ca9bf3bb32550b"} Jan 20 17:56:57 crc kubenswrapper[4558]: I0120 17:56:57.028150 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"491a5173-ab6a-4abf-932d-93e7b4856b8b","Type":"ContainerDied","Data":"28e3b444eea0f63050d55e27a5abc4c42e289af941a1ae4b77bdb7c9624f71a8"} Jan 20 17:56:57 crc kubenswrapper[4558]: I0120 17:56:57.028182 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"491a5173-ab6a-4abf-932d-93e7b4856b8b","Type":"ContainerDied","Data":"c832bcbb9393f9662665b8047449f27e4fcb185bed442d412b8f9e53cfdceaa1"} Jan 20 17:56:57 crc kubenswrapper[4558]: I0120 17:56:57.030514 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"f5beef7a-89e0-4e53-960b-1cacc665d586","Type":"ContainerStarted","Data":"06a55fe618e77fdc355e098374066b5b94b32c6d040b20a118075fff457d4cea"} Jan 20 17:56:57 crc kubenswrapper[4558]: I0120 17:56:57.036103 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/kube-state-metrics-0" event={"ID":"4e270f26-f899-4007-94bf-ab62080fe4ce","Type":"ContainerStarted","Data":"1fe99cb66e9016b46286062b2b8309256638fd5cb58f35b387cebb419822b3bc"} Jan 20 17:56:57 crc kubenswrapper[4558]: I0120 17:56:57.037050 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:56:57 crc kubenswrapper[4558]: I0120 17:56:57.053759 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-scheduler-0" podStartSLOduration=2.053739429 podStartE2EDuration="2.053739429s" podCreationTimestamp="2026-01-20 17:56:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:56:57.04585031 +0000 UTC m=+4510.806188277" watchObservedRunningTime="2026-01-20 17:56:57.053739429 +0000 UTC m=+4510.814077396" Jan 20 17:56:57 crc kubenswrapper[4558]: I0120 17:56:57.071437 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/kube-state-metrics-0" podStartSLOduration=1.7985883230000002 podStartE2EDuration="2.071414685s" podCreationTimestamp="2026-01-20 17:56:55 +0000 UTC" firstStartedPulling="2026-01-20 17:56:55.953530064 +0000 UTC m=+4509.713868031" lastFinishedPulling="2026-01-20 17:56:56.226356436 +0000 UTC m=+4509.986694393" observedRunningTime="2026-01-20 17:56:57.065490809 +0000 UTC m=+4510.825828776" watchObservedRunningTime="2026-01-20 17:56:57.071414685 +0000 UTC m=+4510.831752652" Jan 20 17:56:59 crc kubenswrapper[4558]: I0120 17:56:59.319767 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:56:59 crc kubenswrapper[4558]: I0120 17:56:59.320007 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:56:59 crc kubenswrapper[4558]: I0120 17:56:59.481149 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:56:59 crc kubenswrapper[4558]: I0120 17:56:59.612936 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/491a5173-ab6a-4abf-932d-93e7b4856b8b-scripts\") pod \"491a5173-ab6a-4abf-932d-93e7b4856b8b\" (UID: \"491a5173-ab6a-4abf-932d-93e7b4856b8b\") " Jan 20 17:56:59 crc kubenswrapper[4558]: I0120 17:56:59.613091 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k4cfk\" (UniqueName: \"kubernetes.io/projected/491a5173-ab6a-4abf-932d-93e7b4856b8b-kube-api-access-k4cfk\") pod \"491a5173-ab6a-4abf-932d-93e7b4856b8b\" (UID: \"491a5173-ab6a-4abf-932d-93e7b4856b8b\") " Jan 20 17:56:59 crc kubenswrapper[4558]: I0120 17:56:59.613147 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/491a5173-ab6a-4abf-932d-93e7b4856b8b-config-data\") pod \"491a5173-ab6a-4abf-932d-93e7b4856b8b\" (UID: \"491a5173-ab6a-4abf-932d-93e7b4856b8b\") " Jan 20 17:56:59 crc kubenswrapper[4558]: I0120 17:56:59.613207 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/491a5173-ab6a-4abf-932d-93e7b4856b8b-log-httpd\") pod \"491a5173-ab6a-4abf-932d-93e7b4856b8b\" (UID: \"491a5173-ab6a-4abf-932d-93e7b4856b8b\") " Jan 20 17:56:59 crc kubenswrapper[4558]: I0120 17:56:59.613238 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/491a5173-ab6a-4abf-932d-93e7b4856b8b-run-httpd\") pod \"491a5173-ab6a-4abf-932d-93e7b4856b8b\" (UID: \"491a5173-ab6a-4abf-932d-93e7b4856b8b\") " Jan 20 17:56:59 crc kubenswrapper[4558]: I0120 17:56:59.613262 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/491a5173-ab6a-4abf-932d-93e7b4856b8b-sg-core-conf-yaml\") pod \"491a5173-ab6a-4abf-932d-93e7b4856b8b\" (UID: \"491a5173-ab6a-4abf-932d-93e7b4856b8b\") " Jan 20 17:56:59 crc kubenswrapper[4558]: I0120 17:56:59.613318 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/491a5173-ab6a-4abf-932d-93e7b4856b8b-combined-ca-bundle\") pod \"491a5173-ab6a-4abf-932d-93e7b4856b8b\" (UID: \"491a5173-ab6a-4abf-932d-93e7b4856b8b\") " Jan 20 17:56:59 crc kubenswrapper[4558]: I0120 17:56:59.614395 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/491a5173-ab6a-4abf-932d-93e7b4856b8b-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "491a5173-ab6a-4abf-932d-93e7b4856b8b" (UID: "491a5173-ab6a-4abf-932d-93e7b4856b8b"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:56:59 crc kubenswrapper[4558]: I0120 17:56:59.614765 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/491a5173-ab6a-4abf-932d-93e7b4856b8b-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "491a5173-ab6a-4abf-932d-93e7b4856b8b" (UID: "491a5173-ab6a-4abf-932d-93e7b4856b8b"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:56:59 crc kubenswrapper[4558]: I0120 17:56:59.622416 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/491a5173-ab6a-4abf-932d-93e7b4856b8b-kube-api-access-k4cfk" (OuterVolumeSpecName: "kube-api-access-k4cfk") pod "491a5173-ab6a-4abf-932d-93e7b4856b8b" (UID: "491a5173-ab6a-4abf-932d-93e7b4856b8b"). InnerVolumeSpecName "kube-api-access-k4cfk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:56:59 crc kubenswrapper[4558]: I0120 17:56:59.628279 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/491a5173-ab6a-4abf-932d-93e7b4856b8b-scripts" (OuterVolumeSpecName: "scripts") pod "491a5173-ab6a-4abf-932d-93e7b4856b8b" (UID: "491a5173-ab6a-4abf-932d-93e7b4856b8b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:56:59 crc kubenswrapper[4558]: I0120 17:56:59.643054 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/491a5173-ab6a-4abf-932d-93e7b4856b8b-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "491a5173-ab6a-4abf-932d-93e7b4856b8b" (UID: "491a5173-ab6a-4abf-932d-93e7b4856b8b"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:56:59 crc kubenswrapper[4558]: I0120 17:56:59.673327 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/491a5173-ab6a-4abf-932d-93e7b4856b8b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "491a5173-ab6a-4abf-932d-93e7b4856b8b" (UID: "491a5173-ab6a-4abf-932d-93e7b4856b8b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:56:59 crc kubenswrapper[4558]: I0120 17:56:59.710562 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/491a5173-ab6a-4abf-932d-93e7b4856b8b-config-data" (OuterVolumeSpecName: "config-data") pod "491a5173-ab6a-4abf-932d-93e7b4856b8b" (UID: "491a5173-ab6a-4abf-932d-93e7b4856b8b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:56:59 crc kubenswrapper[4558]: I0120 17:56:59.716587 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/491a5173-ab6a-4abf-932d-93e7b4856b8b-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:56:59 crc kubenswrapper[4558]: I0120 17:56:59.716623 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k4cfk\" (UniqueName: \"kubernetes.io/projected/491a5173-ab6a-4abf-932d-93e7b4856b8b-kube-api-access-k4cfk\") on node \"crc\" DevicePath \"\"" Jan 20 17:56:59 crc kubenswrapper[4558]: I0120 17:56:59.716636 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/491a5173-ab6a-4abf-932d-93e7b4856b8b-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:56:59 crc kubenswrapper[4558]: I0120 17:56:59.716649 4558 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/491a5173-ab6a-4abf-932d-93e7b4856b8b-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:56:59 crc kubenswrapper[4558]: I0120 17:56:59.716659 4558 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/491a5173-ab6a-4abf-932d-93e7b4856b8b-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:56:59 crc kubenswrapper[4558]: I0120 17:56:59.716669 4558 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/491a5173-ab6a-4abf-932d-93e7b4856b8b-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 20 17:56:59 crc kubenswrapper[4558]: I0120 17:56:59.716678 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/491a5173-ab6a-4abf-932d-93e7b4856b8b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:57:00 crc kubenswrapper[4558]: I0120 17:57:00.073486 4558 generic.go:334] "Generic (PLEG): container finished" podID="491a5173-ab6a-4abf-932d-93e7b4856b8b" containerID="965fc9512076833004f35447a71fb467c1c8fe0081f4bd5ac443f07474f15c19" exitCode=0 Jan 20 17:57:00 crc kubenswrapper[4558]: I0120 17:57:00.073538 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"491a5173-ab6a-4abf-932d-93e7b4856b8b","Type":"ContainerDied","Data":"965fc9512076833004f35447a71fb467c1c8fe0081f4bd5ac443f07474f15c19"} Jan 20 17:57:00 crc kubenswrapper[4558]: I0120 17:57:00.073557 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:57:00 crc kubenswrapper[4558]: I0120 17:57:00.073578 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"491a5173-ab6a-4abf-932d-93e7b4856b8b","Type":"ContainerDied","Data":"3fa1517b6de81cc3b5a18c2d8656e7cb280d899df37a40bb8cf2e29849adebff"} Jan 20 17:57:00 crc kubenswrapper[4558]: I0120 17:57:00.073602 4558 scope.go:117] "RemoveContainer" containerID="737858e99577b342734a59a10def7df38917eabf05f581f4f3ca9bf3bb32550b" Jan 20 17:57:00 crc kubenswrapper[4558]: I0120 17:57:00.092940 4558 scope.go:117] "RemoveContainer" containerID="28e3b444eea0f63050d55e27a5abc4c42e289af941a1ae4b77bdb7c9624f71a8" Jan 20 17:57:00 crc kubenswrapper[4558]: I0120 17:57:00.103472 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:57:00 crc kubenswrapper[4558]: I0120 17:57:00.110327 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:57:00 crc kubenswrapper[4558]: I0120 17:57:00.124248 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:57:00 crc kubenswrapper[4558]: E0120 17:57:00.124594 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="491a5173-ab6a-4abf-932d-93e7b4856b8b" containerName="ceilometer-notification-agent" Jan 20 17:57:00 crc kubenswrapper[4558]: I0120 17:57:00.124613 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="491a5173-ab6a-4abf-932d-93e7b4856b8b" containerName="ceilometer-notification-agent" Jan 20 17:57:00 crc kubenswrapper[4558]: E0120 17:57:00.124626 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="491a5173-ab6a-4abf-932d-93e7b4856b8b" containerName="proxy-httpd" Jan 20 17:57:00 crc kubenswrapper[4558]: I0120 17:57:00.124632 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="491a5173-ab6a-4abf-932d-93e7b4856b8b" containerName="proxy-httpd" Jan 20 17:57:00 crc kubenswrapper[4558]: E0120 17:57:00.124659 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="491a5173-ab6a-4abf-932d-93e7b4856b8b" containerName="ceilometer-central-agent" Jan 20 17:57:00 crc kubenswrapper[4558]: I0120 17:57:00.124665 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="491a5173-ab6a-4abf-932d-93e7b4856b8b" containerName="ceilometer-central-agent" Jan 20 17:57:00 crc kubenswrapper[4558]: E0120 17:57:00.124677 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="491a5173-ab6a-4abf-932d-93e7b4856b8b" containerName="sg-core" Jan 20 17:57:00 crc kubenswrapper[4558]: I0120 17:57:00.124682 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="491a5173-ab6a-4abf-932d-93e7b4856b8b" containerName="sg-core" Jan 20 17:57:00 crc kubenswrapper[4558]: I0120 17:57:00.124831 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="491a5173-ab6a-4abf-932d-93e7b4856b8b" containerName="ceilometer-notification-agent" Jan 20 17:57:00 crc kubenswrapper[4558]: I0120 17:57:00.124852 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="491a5173-ab6a-4abf-932d-93e7b4856b8b" containerName="ceilometer-central-agent" Jan 20 17:57:00 crc kubenswrapper[4558]: I0120 17:57:00.124867 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="491a5173-ab6a-4abf-932d-93e7b4856b8b" containerName="proxy-httpd" Jan 20 17:57:00 crc kubenswrapper[4558]: I0120 17:57:00.124878 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="491a5173-ab6a-4abf-932d-93e7b4856b8b" containerName="sg-core" Jan 20 17:57:00 crc kubenswrapper[4558]: I0120 17:57:00.124933 4558 scope.go:117] "RemoveContainer" containerID="965fc9512076833004f35447a71fb467c1c8fe0081f4bd5ac443f07474f15c19" Jan 20 17:57:00 crc kubenswrapper[4558]: I0120 17:57:00.126892 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:57:00 crc kubenswrapper[4558]: I0120 17:57:00.129837 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-ceilometer-internal-svc" Jan 20 17:57:00 crc kubenswrapper[4558]: I0120 17:57:00.130034 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-scripts" Jan 20 17:57:00 crc kubenswrapper[4558]: I0120 17:57:00.130188 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-config-data" Jan 20 17:57:00 crc kubenswrapper[4558]: I0120 17:57:00.171907 4558 scope.go:117] "RemoveContainer" containerID="c832bcbb9393f9662665b8047449f27e4fcb185bed442d412b8f9e53cfdceaa1" Jan 20 17:57:00 crc kubenswrapper[4558]: I0120 17:57:00.189000 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:57:00 crc kubenswrapper[4558]: I0120 17:57:00.201298 4558 scope.go:117] "RemoveContainer" containerID="737858e99577b342734a59a10def7df38917eabf05f581f4f3ca9bf3bb32550b" Jan 20 17:57:00 crc kubenswrapper[4558]: E0120 17:57:00.201738 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"737858e99577b342734a59a10def7df38917eabf05f581f4f3ca9bf3bb32550b\": container with ID starting with 737858e99577b342734a59a10def7df38917eabf05f581f4f3ca9bf3bb32550b not found: ID does not exist" containerID="737858e99577b342734a59a10def7df38917eabf05f581f4f3ca9bf3bb32550b" Jan 20 17:57:00 crc kubenswrapper[4558]: I0120 17:57:00.201777 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"737858e99577b342734a59a10def7df38917eabf05f581f4f3ca9bf3bb32550b"} err="failed to get container status \"737858e99577b342734a59a10def7df38917eabf05f581f4f3ca9bf3bb32550b\": rpc error: code = NotFound desc = could not find container \"737858e99577b342734a59a10def7df38917eabf05f581f4f3ca9bf3bb32550b\": container with ID starting with 737858e99577b342734a59a10def7df38917eabf05f581f4f3ca9bf3bb32550b not found: ID does not exist" Jan 20 17:57:00 crc kubenswrapper[4558]: I0120 17:57:00.201804 4558 scope.go:117] "RemoveContainer" containerID="28e3b444eea0f63050d55e27a5abc4c42e289af941a1ae4b77bdb7c9624f71a8" Jan 20 17:57:00 crc kubenswrapper[4558]: E0120 17:57:00.202278 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"28e3b444eea0f63050d55e27a5abc4c42e289af941a1ae4b77bdb7c9624f71a8\": container with ID starting with 28e3b444eea0f63050d55e27a5abc4c42e289af941a1ae4b77bdb7c9624f71a8 not found: ID does not exist" containerID="28e3b444eea0f63050d55e27a5abc4c42e289af941a1ae4b77bdb7c9624f71a8" Jan 20 17:57:00 crc kubenswrapper[4558]: I0120 17:57:00.202323 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"28e3b444eea0f63050d55e27a5abc4c42e289af941a1ae4b77bdb7c9624f71a8"} err="failed to get container status \"28e3b444eea0f63050d55e27a5abc4c42e289af941a1ae4b77bdb7c9624f71a8\": rpc error: code = NotFound desc = could not find container \"28e3b444eea0f63050d55e27a5abc4c42e289af941a1ae4b77bdb7c9624f71a8\": container with ID starting with 28e3b444eea0f63050d55e27a5abc4c42e289af941a1ae4b77bdb7c9624f71a8 not found: ID does not exist" Jan 20 17:57:00 crc kubenswrapper[4558]: I0120 17:57:00.202346 4558 scope.go:117] "RemoveContainer" containerID="965fc9512076833004f35447a71fb467c1c8fe0081f4bd5ac443f07474f15c19" Jan 20 17:57:00 crc kubenswrapper[4558]: E0120 17:57:00.202630 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"965fc9512076833004f35447a71fb467c1c8fe0081f4bd5ac443f07474f15c19\": container with ID starting with 965fc9512076833004f35447a71fb467c1c8fe0081f4bd5ac443f07474f15c19 not found: ID does not exist" containerID="965fc9512076833004f35447a71fb467c1c8fe0081f4bd5ac443f07474f15c19" Jan 20 17:57:00 crc kubenswrapper[4558]: I0120 17:57:00.202651 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"965fc9512076833004f35447a71fb467c1c8fe0081f4bd5ac443f07474f15c19"} err="failed to get container status \"965fc9512076833004f35447a71fb467c1c8fe0081f4bd5ac443f07474f15c19\": rpc error: code = NotFound desc = could not find container \"965fc9512076833004f35447a71fb467c1c8fe0081f4bd5ac443f07474f15c19\": container with ID starting with 965fc9512076833004f35447a71fb467c1c8fe0081f4bd5ac443f07474f15c19 not found: ID does not exist" Jan 20 17:57:00 crc kubenswrapper[4558]: I0120 17:57:00.202665 4558 scope.go:117] "RemoveContainer" containerID="c832bcbb9393f9662665b8047449f27e4fcb185bed442d412b8f9e53cfdceaa1" Jan 20 17:57:00 crc kubenswrapper[4558]: E0120 17:57:00.203108 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c832bcbb9393f9662665b8047449f27e4fcb185bed442d412b8f9e53cfdceaa1\": container with ID starting with c832bcbb9393f9662665b8047449f27e4fcb185bed442d412b8f9e53cfdceaa1 not found: ID does not exist" containerID="c832bcbb9393f9662665b8047449f27e4fcb185bed442d412b8f9e53cfdceaa1" Jan 20 17:57:00 crc kubenswrapper[4558]: I0120 17:57:00.203136 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c832bcbb9393f9662665b8047449f27e4fcb185bed442d412b8f9e53cfdceaa1"} err="failed to get container status \"c832bcbb9393f9662665b8047449f27e4fcb185bed442d412b8f9e53cfdceaa1\": rpc error: code = NotFound desc = could not find container \"c832bcbb9393f9662665b8047449f27e4fcb185bed442d412b8f9e53cfdceaa1\": container with ID starting with c832bcbb9393f9662665b8047449f27e4fcb185bed442d412b8f9e53cfdceaa1 not found: ID does not exist" Jan 20 17:57:00 crc kubenswrapper[4558]: I0120 17:57:00.229686 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c659794a-55cf-48b5-8740-1b92d7c8eb2c-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"c659794a-55cf-48b5-8740-1b92d7c8eb2c\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:57:00 crc kubenswrapper[4558]: I0120 17:57:00.229766 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c659794a-55cf-48b5-8740-1b92d7c8eb2c-config-data\") pod \"ceilometer-0\" (UID: \"c659794a-55cf-48b5-8740-1b92d7c8eb2c\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:57:00 crc kubenswrapper[4558]: I0120 17:57:00.229834 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c659794a-55cf-48b5-8740-1b92d7c8eb2c-scripts\") pod \"ceilometer-0\" (UID: \"c659794a-55cf-48b5-8740-1b92d7c8eb2c\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:57:00 crc kubenswrapper[4558]: I0120 17:57:00.229907 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c659794a-55cf-48b5-8740-1b92d7c8eb2c-run-httpd\") pod \"ceilometer-0\" (UID: \"c659794a-55cf-48b5-8740-1b92d7c8eb2c\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:57:00 crc kubenswrapper[4558]: I0120 17:57:00.229950 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/c659794a-55cf-48b5-8740-1b92d7c8eb2c-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"c659794a-55cf-48b5-8740-1b92d7c8eb2c\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:57:00 crc kubenswrapper[4558]: I0120 17:57:00.229975 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c659794a-55cf-48b5-8740-1b92d7c8eb2c-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"c659794a-55cf-48b5-8740-1b92d7c8eb2c\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:57:00 crc kubenswrapper[4558]: I0120 17:57:00.230022 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7h2b6\" (UniqueName: \"kubernetes.io/projected/c659794a-55cf-48b5-8740-1b92d7c8eb2c-kube-api-access-7h2b6\") pod \"ceilometer-0\" (UID: \"c659794a-55cf-48b5-8740-1b92d7c8eb2c\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:57:00 crc kubenswrapper[4558]: I0120 17:57:00.230043 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c659794a-55cf-48b5-8740-1b92d7c8eb2c-log-httpd\") pod \"ceilometer-0\" (UID: \"c659794a-55cf-48b5-8740-1b92d7c8eb2c\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:57:00 crc kubenswrapper[4558]: I0120 17:57:00.332416 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7h2b6\" (UniqueName: \"kubernetes.io/projected/c659794a-55cf-48b5-8740-1b92d7c8eb2c-kube-api-access-7h2b6\") pod \"ceilometer-0\" (UID: \"c659794a-55cf-48b5-8740-1b92d7c8eb2c\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:57:00 crc kubenswrapper[4558]: I0120 17:57:00.332470 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c659794a-55cf-48b5-8740-1b92d7c8eb2c-log-httpd\") pod \"ceilometer-0\" (UID: \"c659794a-55cf-48b5-8740-1b92d7c8eb2c\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:57:00 crc kubenswrapper[4558]: I0120 17:57:00.332505 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c659794a-55cf-48b5-8740-1b92d7c8eb2c-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"c659794a-55cf-48b5-8740-1b92d7c8eb2c\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:57:00 crc kubenswrapper[4558]: I0120 17:57:00.332554 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c659794a-55cf-48b5-8740-1b92d7c8eb2c-config-data\") pod \"ceilometer-0\" (UID: \"c659794a-55cf-48b5-8740-1b92d7c8eb2c\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:57:00 crc kubenswrapper[4558]: I0120 17:57:00.332616 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c659794a-55cf-48b5-8740-1b92d7c8eb2c-scripts\") pod \"ceilometer-0\" (UID: \"c659794a-55cf-48b5-8740-1b92d7c8eb2c\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:57:00 crc kubenswrapper[4558]: I0120 17:57:00.332689 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c659794a-55cf-48b5-8740-1b92d7c8eb2c-run-httpd\") pod \"ceilometer-0\" (UID: \"c659794a-55cf-48b5-8740-1b92d7c8eb2c\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:57:00 crc kubenswrapper[4558]: I0120 17:57:00.332726 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/c659794a-55cf-48b5-8740-1b92d7c8eb2c-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"c659794a-55cf-48b5-8740-1b92d7c8eb2c\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:57:00 crc kubenswrapper[4558]: I0120 17:57:00.332751 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c659794a-55cf-48b5-8740-1b92d7c8eb2c-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"c659794a-55cf-48b5-8740-1b92d7c8eb2c\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:57:00 crc kubenswrapper[4558]: I0120 17:57:00.333291 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c659794a-55cf-48b5-8740-1b92d7c8eb2c-log-httpd\") pod \"ceilometer-0\" (UID: \"c659794a-55cf-48b5-8740-1b92d7c8eb2c\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:57:00 crc kubenswrapper[4558]: I0120 17:57:00.333421 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c659794a-55cf-48b5-8740-1b92d7c8eb2c-run-httpd\") pod \"ceilometer-0\" (UID: \"c659794a-55cf-48b5-8740-1b92d7c8eb2c\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:57:00 crc kubenswrapper[4558]: I0120 17:57:00.338538 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c659794a-55cf-48b5-8740-1b92d7c8eb2c-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"c659794a-55cf-48b5-8740-1b92d7c8eb2c\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:57:00 crc kubenswrapper[4558]: I0120 17:57:00.339969 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c659794a-55cf-48b5-8740-1b92d7c8eb2c-config-data\") pod \"ceilometer-0\" (UID: \"c659794a-55cf-48b5-8740-1b92d7c8eb2c\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:57:00 crc kubenswrapper[4558]: I0120 17:57:00.340243 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/c659794a-55cf-48b5-8740-1b92d7c8eb2c-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"c659794a-55cf-48b5-8740-1b92d7c8eb2c\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:57:00 crc kubenswrapper[4558]: I0120 17:57:00.340694 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c659794a-55cf-48b5-8740-1b92d7c8eb2c-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"c659794a-55cf-48b5-8740-1b92d7c8eb2c\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:57:00 crc kubenswrapper[4558]: I0120 17:57:00.341546 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c659794a-55cf-48b5-8740-1b92d7c8eb2c-scripts\") pod \"ceilometer-0\" (UID: \"c659794a-55cf-48b5-8740-1b92d7c8eb2c\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:57:00 crc kubenswrapper[4558]: I0120 17:57:00.350603 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7h2b6\" (UniqueName: \"kubernetes.io/projected/c659794a-55cf-48b5-8740-1b92d7c8eb2c-kube-api-access-7h2b6\") pod \"ceilometer-0\" (UID: \"c659794a-55cf-48b5-8740-1b92d7c8eb2c\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:57:00 crc kubenswrapper[4558]: I0120 17:57:00.414843 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:57:00 crc kubenswrapper[4558]: I0120 17:57:00.446955 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:57:00 crc kubenswrapper[4558]: I0120 17:57:00.577022 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="491a5173-ab6a-4abf-932d-93e7b4856b8b" path="/var/lib/kubelet/pods/491a5173-ab6a-4abf-932d-93e7b4856b8b/volumes" Jan 20 17:57:00 crc kubenswrapper[4558]: W0120 17:57:00.997222 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc659794a_55cf_48b5_8740_1b92d7c8eb2c.slice/crio-7834a020ac38080239425b89b008f93dca2e14fc5f18bf695da3ae699cb3c6bc WatchSource:0}: Error finding container 7834a020ac38080239425b89b008f93dca2e14fc5f18bf695da3ae699cb3c6bc: Status 404 returned error can't find the container with id 7834a020ac38080239425b89b008f93dca2e14fc5f18bf695da3ae699cb3c6bc Jan 20 17:57:00 crc kubenswrapper[4558]: I0120 17:57:00.999781 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:57:01 crc kubenswrapper[4558]: I0120 17:57:01.087019 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"c659794a-55cf-48b5-8740-1b92d7c8eb2c","Type":"ContainerStarted","Data":"7834a020ac38080239425b89b008f93dca2e14fc5f18bf695da3ae699cb3c6bc"} Jan 20 17:57:01 crc kubenswrapper[4558]: I0120 17:57:01.568002 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:57:01 crc kubenswrapper[4558]: I0120 17:57:01.568866 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:57:02 crc kubenswrapper[4558]: I0120 17:57:02.103516 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"c659794a-55cf-48b5-8740-1b92d7c8eb2c","Type":"ContainerStarted","Data":"97d74f3ed6580028d3a5a8de298454deb8b9c545fe16c287fcc6f285de1ab5bd"} Jan 20 17:57:02 crc kubenswrapper[4558]: I0120 17:57:02.650997 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-api-0" podUID="f504f815-d70e-4393-b434-1557ea71cdac" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.162:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 20 17:57:02 crc kubenswrapper[4558]: I0120 17:57:02.651835 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-api-0" podUID="f504f815-d70e-4393-b434-1557ea71cdac" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.162:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 20 17:57:03 crc kubenswrapper[4558]: I0120 17:57:03.124690 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"c659794a-55cf-48b5-8740-1b92d7c8eb2c","Type":"ContainerStarted","Data":"9efc73014ed78e45ad45b5d6a320ba6e0cea9b2db49ef41bc5149084b1d804ea"} Jan 20 17:57:04 crc kubenswrapper[4558]: I0120 17:57:04.143681 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"c659794a-55cf-48b5-8740-1b92d7c8eb2c","Type":"ContainerStarted","Data":"6c9337309bee54251b833690ca525e424855e97ceb931770a6f3390ef7dbad64"} Jan 20 17:57:04 crc kubenswrapper[4558]: I0120 17:57:04.320618 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:57:04 crc kubenswrapper[4558]: I0120 17:57:04.320878 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:57:05 crc kubenswrapper[4558]: I0120 17:57:05.160533 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"c659794a-55cf-48b5-8740-1b92d7c8eb2c","Type":"ContainerStarted","Data":"7ab27247c9800640b750e7df5ecd88bd29458871894aeeb70a90f390df5655ef"} Jan 20 17:57:05 crc kubenswrapper[4558]: I0120 17:57:05.161396 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:57:05 crc kubenswrapper[4558]: I0120 17:57:05.188731 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ceilometer-0" podStartSLOduration=1.690716141 podStartE2EDuration="5.188708362s" podCreationTimestamp="2026-01-20 17:57:00 +0000 UTC" firstStartedPulling="2026-01-20 17:57:00.999640601 +0000 UTC m=+4514.759978568" lastFinishedPulling="2026-01-20 17:57:04.497632821 +0000 UTC m=+4518.257970789" observedRunningTime="2026-01-20 17:57:05.183136518 +0000 UTC m=+4518.943474486" watchObservedRunningTime="2026-01-20 17:57:05.188708362 +0000 UTC m=+4518.949046329" Jan 20 17:57:05 crc kubenswrapper[4558]: I0120 17:57:05.332315 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-metadata-0" podUID="c8f11f45-bfce-4989-bfa0-684011f74619" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.163:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 20 17:57:05 crc kubenswrapper[4558]: I0120 17:57:05.332378 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-metadata-0" podUID="c8f11f45-bfce-4989-bfa0-684011f74619" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.163:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 20 17:57:05 crc kubenswrapper[4558]: I0120 17:57:05.414620 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:57:05 crc kubenswrapper[4558]: I0120 17:57:05.445423 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:57:05 crc kubenswrapper[4558]: I0120 17:57:05.481937 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:57:06 crc kubenswrapper[4558]: I0120 17:57:06.206577 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:57:11 crc kubenswrapper[4558]: I0120 17:57:11.565563 4558 scope.go:117] "RemoveContainer" containerID="4ea7fa61d8b21007a044345acec89fcebe56c2921cf0f76ff2002f44bdc88ede" Jan 20 17:57:11 crc kubenswrapper[4558]: I0120 17:57:11.572368 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:57:11 crc kubenswrapper[4558]: I0120 17:57:11.572422 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:57:11 crc kubenswrapper[4558]: I0120 17:57:11.572742 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:57:11 crc kubenswrapper[4558]: I0120 17:57:11.572826 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:57:11 crc kubenswrapper[4558]: I0120 17:57:11.574614 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:57:11 crc kubenswrapper[4558]: I0120 17:57:11.576919 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:57:12 crc kubenswrapper[4558]: I0120 17:57:12.229534 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" event={"ID":"68337d27-3fa6-4a29-88b0-82e60c3739eb","Type":"ContainerStarted","Data":"06773f88c6859f6cd4428c9c5cf74a790c0f5de8a90ecadbaf3e2b35d66f98a5"} Jan 20 17:57:13 crc kubenswrapper[4558]: I0120 17:57:13.616150 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:57:13 crc kubenswrapper[4558]: I0120 17:57:13.616952 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="c659794a-55cf-48b5-8740-1b92d7c8eb2c" containerName="ceilometer-central-agent" containerID="cri-o://97d74f3ed6580028d3a5a8de298454deb8b9c545fe16c287fcc6f285de1ab5bd" gracePeriod=30 Jan 20 17:57:13 crc kubenswrapper[4558]: I0120 17:57:13.617020 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="c659794a-55cf-48b5-8740-1b92d7c8eb2c" containerName="sg-core" containerID="cri-o://6c9337309bee54251b833690ca525e424855e97ceb931770a6f3390ef7dbad64" gracePeriod=30 Jan 20 17:57:13 crc kubenswrapper[4558]: I0120 17:57:13.617027 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="c659794a-55cf-48b5-8740-1b92d7c8eb2c" containerName="ceilometer-notification-agent" containerID="cri-o://9efc73014ed78e45ad45b5d6a320ba6e0cea9b2db49ef41bc5149084b1d804ea" gracePeriod=30 Jan 20 17:57:13 crc kubenswrapper[4558]: I0120 17:57:13.617178 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="c659794a-55cf-48b5-8740-1b92d7c8eb2c" containerName="proxy-httpd" containerID="cri-o://7ab27247c9800640b750e7df5ecd88bd29458871894aeeb70a90f390df5655ef" gracePeriod=30 Jan 20 17:57:14 crc kubenswrapper[4558]: I0120 17:57:14.036468 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:57:14 crc kubenswrapper[4558]: I0120 17:57:14.251782 4558 generic.go:334] "Generic (PLEG): container finished" podID="c659794a-55cf-48b5-8740-1b92d7c8eb2c" containerID="7ab27247c9800640b750e7df5ecd88bd29458871894aeeb70a90f390df5655ef" exitCode=0 Jan 20 17:57:14 crc kubenswrapper[4558]: I0120 17:57:14.251815 4558 generic.go:334] "Generic (PLEG): container finished" podID="c659794a-55cf-48b5-8740-1b92d7c8eb2c" containerID="6c9337309bee54251b833690ca525e424855e97ceb931770a6f3390ef7dbad64" exitCode=2 Jan 20 17:57:14 crc kubenswrapper[4558]: I0120 17:57:14.251827 4558 generic.go:334] "Generic (PLEG): container finished" podID="c659794a-55cf-48b5-8740-1b92d7c8eb2c" containerID="97d74f3ed6580028d3a5a8de298454deb8b9c545fe16c287fcc6f285de1ab5bd" exitCode=0 Jan 20 17:57:14 crc kubenswrapper[4558]: I0120 17:57:14.252043 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-api-0" podUID="f504f815-d70e-4393-b434-1557ea71cdac" containerName="nova-api-log" containerID="cri-o://0bed7a79966310e4d3b7b07fe2039cbcec2b6a9d72c6c1e5851aa950c4d1bf3c" gracePeriod=30 Jan 20 17:57:14 crc kubenswrapper[4558]: I0120 17:57:14.252324 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"c659794a-55cf-48b5-8740-1b92d7c8eb2c","Type":"ContainerDied","Data":"7ab27247c9800640b750e7df5ecd88bd29458871894aeeb70a90f390df5655ef"} Jan 20 17:57:14 crc kubenswrapper[4558]: I0120 17:57:14.252378 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-api-0" podUID="f504f815-d70e-4393-b434-1557ea71cdac" containerName="nova-api-api" containerID="cri-o://13d3643ad3b5b35641cbd8b9007eb162a4196f10a53748dbe4afc9a2b738ab39" gracePeriod=30 Jan 20 17:57:14 crc kubenswrapper[4558]: I0120 17:57:14.252411 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"c659794a-55cf-48b5-8740-1b92d7c8eb2c","Type":"ContainerDied","Data":"6c9337309bee54251b833690ca525e424855e97ceb931770a6f3390ef7dbad64"} Jan 20 17:57:14 crc kubenswrapper[4558]: I0120 17:57:14.252429 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"c659794a-55cf-48b5-8740-1b92d7c8eb2c","Type":"ContainerDied","Data":"97d74f3ed6580028d3a5a8de298454deb8b9c545fe16c287fcc6f285de1ab5bd"} Jan 20 17:57:14 crc kubenswrapper[4558]: I0120 17:57:14.330464 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:57:14 crc kubenswrapper[4558]: I0120 17:57:14.330733 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:57:14 crc kubenswrapper[4558]: I0120 17:57:14.334911 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:57:14 crc kubenswrapper[4558]: I0120 17:57:14.336454 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:57:15 crc kubenswrapper[4558]: I0120 17:57:15.267207 4558 generic.go:334] "Generic (PLEG): container finished" podID="f504f815-d70e-4393-b434-1557ea71cdac" containerID="0bed7a79966310e4d3b7b07fe2039cbcec2b6a9d72c6c1e5851aa950c4d1bf3c" exitCode=143 Jan 20 17:57:15 crc kubenswrapper[4558]: I0120 17:57:15.267300 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"f504f815-d70e-4393-b434-1557ea71cdac","Type":"ContainerDied","Data":"0bed7a79966310e4d3b7b07fe2039cbcec2b6a9d72c6c1e5851aa950c4d1bf3c"} Jan 20 17:57:17 crc kubenswrapper[4558]: I0120 17:57:17.290307 4558 generic.go:334] "Generic (PLEG): container finished" podID="c659794a-55cf-48b5-8740-1b92d7c8eb2c" containerID="9efc73014ed78e45ad45b5d6a320ba6e0cea9b2db49ef41bc5149084b1d804ea" exitCode=0 Jan 20 17:57:17 crc kubenswrapper[4558]: I0120 17:57:17.290387 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"c659794a-55cf-48b5-8740-1b92d7c8eb2c","Type":"ContainerDied","Data":"9efc73014ed78e45ad45b5d6a320ba6e0cea9b2db49ef41bc5149084b1d804ea"} Jan 20 17:57:17 crc kubenswrapper[4558]: I0120 17:57:17.376077 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:57:17 crc kubenswrapper[4558]: I0120 17:57:17.567077 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7h2b6\" (UniqueName: \"kubernetes.io/projected/c659794a-55cf-48b5-8740-1b92d7c8eb2c-kube-api-access-7h2b6\") pod \"c659794a-55cf-48b5-8740-1b92d7c8eb2c\" (UID: \"c659794a-55cf-48b5-8740-1b92d7c8eb2c\") " Jan 20 17:57:17 crc kubenswrapper[4558]: I0120 17:57:17.567428 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c659794a-55cf-48b5-8740-1b92d7c8eb2c-config-data\") pod \"c659794a-55cf-48b5-8740-1b92d7c8eb2c\" (UID: \"c659794a-55cf-48b5-8740-1b92d7c8eb2c\") " Jan 20 17:57:17 crc kubenswrapper[4558]: I0120 17:57:17.572687 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c659794a-55cf-48b5-8740-1b92d7c8eb2c-combined-ca-bundle\") pod \"c659794a-55cf-48b5-8740-1b92d7c8eb2c\" (UID: \"c659794a-55cf-48b5-8740-1b92d7c8eb2c\") " Jan 20 17:57:17 crc kubenswrapper[4558]: I0120 17:57:17.573035 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/c659794a-55cf-48b5-8740-1b92d7c8eb2c-ceilometer-tls-certs\") pod \"c659794a-55cf-48b5-8740-1b92d7c8eb2c\" (UID: \"c659794a-55cf-48b5-8740-1b92d7c8eb2c\") " Jan 20 17:57:17 crc kubenswrapper[4558]: I0120 17:57:17.573068 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c659794a-55cf-48b5-8740-1b92d7c8eb2c-scripts\") pod \"c659794a-55cf-48b5-8740-1b92d7c8eb2c\" (UID: \"c659794a-55cf-48b5-8740-1b92d7c8eb2c\") " Jan 20 17:57:17 crc kubenswrapper[4558]: I0120 17:57:17.573619 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c659794a-55cf-48b5-8740-1b92d7c8eb2c-log-httpd\") pod \"c659794a-55cf-48b5-8740-1b92d7c8eb2c\" (UID: \"c659794a-55cf-48b5-8740-1b92d7c8eb2c\") " Jan 20 17:57:17 crc kubenswrapper[4558]: I0120 17:57:17.574181 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c659794a-55cf-48b5-8740-1b92d7c8eb2c-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "c659794a-55cf-48b5-8740-1b92d7c8eb2c" (UID: "c659794a-55cf-48b5-8740-1b92d7c8eb2c"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:57:17 crc kubenswrapper[4558]: I0120 17:57:17.574563 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c659794a-55cf-48b5-8740-1b92d7c8eb2c-sg-core-conf-yaml\") pod \"c659794a-55cf-48b5-8740-1b92d7c8eb2c\" (UID: \"c659794a-55cf-48b5-8740-1b92d7c8eb2c\") " Jan 20 17:57:17 crc kubenswrapper[4558]: I0120 17:57:17.574612 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c659794a-55cf-48b5-8740-1b92d7c8eb2c-run-httpd\") pod \"c659794a-55cf-48b5-8740-1b92d7c8eb2c\" (UID: \"c659794a-55cf-48b5-8740-1b92d7c8eb2c\") " Jan 20 17:57:17 crc kubenswrapper[4558]: I0120 17:57:17.576305 4558 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c659794a-55cf-48b5-8740-1b92d7c8eb2c-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:57:17 crc kubenswrapper[4558]: I0120 17:57:17.577502 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c659794a-55cf-48b5-8740-1b92d7c8eb2c-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "c659794a-55cf-48b5-8740-1b92d7c8eb2c" (UID: "c659794a-55cf-48b5-8740-1b92d7c8eb2c"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:57:17 crc kubenswrapper[4558]: I0120 17:57:17.596120 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c659794a-55cf-48b5-8740-1b92d7c8eb2c-scripts" (OuterVolumeSpecName: "scripts") pod "c659794a-55cf-48b5-8740-1b92d7c8eb2c" (UID: "c659794a-55cf-48b5-8740-1b92d7c8eb2c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:57:17 crc kubenswrapper[4558]: I0120 17:57:17.596853 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c659794a-55cf-48b5-8740-1b92d7c8eb2c-kube-api-access-7h2b6" (OuterVolumeSpecName: "kube-api-access-7h2b6") pod "c659794a-55cf-48b5-8740-1b92d7c8eb2c" (UID: "c659794a-55cf-48b5-8740-1b92d7c8eb2c"). InnerVolumeSpecName "kube-api-access-7h2b6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:57:17 crc kubenswrapper[4558]: I0120 17:57:17.608958 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c659794a-55cf-48b5-8740-1b92d7c8eb2c-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "c659794a-55cf-48b5-8740-1b92d7c8eb2c" (UID: "c659794a-55cf-48b5-8740-1b92d7c8eb2c"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:57:17 crc kubenswrapper[4558]: I0120 17:57:17.623880 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c659794a-55cf-48b5-8740-1b92d7c8eb2c-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "c659794a-55cf-48b5-8740-1b92d7c8eb2c" (UID: "c659794a-55cf-48b5-8740-1b92d7c8eb2c"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:57:17 crc kubenswrapper[4558]: I0120 17:57:17.658663 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c659794a-55cf-48b5-8740-1b92d7c8eb2c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c659794a-55cf-48b5-8740-1b92d7c8eb2c" (UID: "c659794a-55cf-48b5-8740-1b92d7c8eb2c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:57:17 crc kubenswrapper[4558]: I0120 17:57:17.663152 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c659794a-55cf-48b5-8740-1b92d7c8eb2c-config-data" (OuterVolumeSpecName: "config-data") pod "c659794a-55cf-48b5-8740-1b92d7c8eb2c" (UID: "c659794a-55cf-48b5-8740-1b92d7c8eb2c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:57:17 crc kubenswrapper[4558]: I0120 17:57:17.679036 4558 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c659794a-55cf-48b5-8740-1b92d7c8eb2c-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:57:17 crc kubenswrapper[4558]: I0120 17:57:17.679065 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7h2b6\" (UniqueName: \"kubernetes.io/projected/c659794a-55cf-48b5-8740-1b92d7c8eb2c-kube-api-access-7h2b6\") on node \"crc\" DevicePath \"\"" Jan 20 17:57:17 crc kubenswrapper[4558]: I0120 17:57:17.679078 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c659794a-55cf-48b5-8740-1b92d7c8eb2c-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:57:17 crc kubenswrapper[4558]: I0120 17:57:17.679091 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c659794a-55cf-48b5-8740-1b92d7c8eb2c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:57:17 crc kubenswrapper[4558]: I0120 17:57:17.679101 4558 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/c659794a-55cf-48b5-8740-1b92d7c8eb2c-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:57:17 crc kubenswrapper[4558]: I0120 17:57:17.679110 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c659794a-55cf-48b5-8740-1b92d7c8eb2c-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:57:17 crc kubenswrapper[4558]: I0120 17:57:17.679118 4558 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c659794a-55cf-48b5-8740-1b92d7c8eb2c-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 20 17:57:17 crc kubenswrapper[4558]: I0120 17:57:17.802045 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:57:17 crc kubenswrapper[4558]: I0120 17:57:17.882374 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wbxtv\" (UniqueName: \"kubernetes.io/projected/f504f815-d70e-4393-b434-1557ea71cdac-kube-api-access-wbxtv\") pod \"f504f815-d70e-4393-b434-1557ea71cdac\" (UID: \"f504f815-d70e-4393-b434-1557ea71cdac\") " Jan 20 17:57:17 crc kubenswrapper[4558]: I0120 17:57:17.882546 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f504f815-d70e-4393-b434-1557ea71cdac-combined-ca-bundle\") pod \"f504f815-d70e-4393-b434-1557ea71cdac\" (UID: \"f504f815-d70e-4393-b434-1557ea71cdac\") " Jan 20 17:57:17 crc kubenswrapper[4558]: I0120 17:57:17.882572 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f504f815-d70e-4393-b434-1557ea71cdac-logs\") pod \"f504f815-d70e-4393-b434-1557ea71cdac\" (UID: \"f504f815-d70e-4393-b434-1557ea71cdac\") " Jan 20 17:57:17 crc kubenswrapper[4558]: I0120 17:57:17.882594 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f504f815-d70e-4393-b434-1557ea71cdac-config-data\") pod \"f504f815-d70e-4393-b434-1557ea71cdac\" (UID: \"f504f815-d70e-4393-b434-1557ea71cdac\") " Jan 20 17:57:17 crc kubenswrapper[4558]: I0120 17:57:17.883015 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f504f815-d70e-4393-b434-1557ea71cdac-logs" (OuterVolumeSpecName: "logs") pod "f504f815-d70e-4393-b434-1557ea71cdac" (UID: "f504f815-d70e-4393-b434-1557ea71cdac"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:57:17 crc kubenswrapper[4558]: I0120 17:57:17.886941 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f504f815-d70e-4393-b434-1557ea71cdac-kube-api-access-wbxtv" (OuterVolumeSpecName: "kube-api-access-wbxtv") pod "f504f815-d70e-4393-b434-1557ea71cdac" (UID: "f504f815-d70e-4393-b434-1557ea71cdac"). InnerVolumeSpecName "kube-api-access-wbxtv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:57:17 crc kubenswrapper[4558]: I0120 17:57:17.904423 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f504f815-d70e-4393-b434-1557ea71cdac-config-data" (OuterVolumeSpecName: "config-data") pod "f504f815-d70e-4393-b434-1557ea71cdac" (UID: "f504f815-d70e-4393-b434-1557ea71cdac"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:57:17 crc kubenswrapper[4558]: I0120 17:57:17.906781 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f504f815-d70e-4393-b434-1557ea71cdac-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f504f815-d70e-4393-b434-1557ea71cdac" (UID: "f504f815-d70e-4393-b434-1557ea71cdac"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:57:17 crc kubenswrapper[4558]: I0120 17:57:17.983782 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wbxtv\" (UniqueName: \"kubernetes.io/projected/f504f815-d70e-4393-b434-1557ea71cdac-kube-api-access-wbxtv\") on node \"crc\" DevicePath \"\"" Jan 20 17:57:17 crc kubenswrapper[4558]: I0120 17:57:17.983824 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f504f815-d70e-4393-b434-1557ea71cdac-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:57:17 crc kubenswrapper[4558]: I0120 17:57:17.983836 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f504f815-d70e-4393-b434-1557ea71cdac-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:57:17 crc kubenswrapper[4558]: I0120 17:57:17.983846 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f504f815-d70e-4393-b434-1557ea71cdac-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:57:18 crc kubenswrapper[4558]: I0120 17:57:18.301821 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"c659794a-55cf-48b5-8740-1b92d7c8eb2c","Type":"ContainerDied","Data":"7834a020ac38080239425b89b008f93dca2e14fc5f18bf695da3ae699cb3c6bc"} Jan 20 17:57:18 crc kubenswrapper[4558]: I0120 17:57:18.301854 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:57:18 crc kubenswrapper[4558]: I0120 17:57:18.301872 4558 scope.go:117] "RemoveContainer" containerID="7ab27247c9800640b750e7df5ecd88bd29458871894aeeb70a90f390df5655ef" Jan 20 17:57:18 crc kubenswrapper[4558]: I0120 17:57:18.308269 4558 generic.go:334] "Generic (PLEG): container finished" podID="f504f815-d70e-4393-b434-1557ea71cdac" containerID="13d3643ad3b5b35641cbd8b9007eb162a4196f10a53748dbe4afc9a2b738ab39" exitCode=0 Jan 20 17:57:18 crc kubenswrapper[4558]: I0120 17:57:18.308325 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:57:18 crc kubenswrapper[4558]: I0120 17:57:18.308328 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"f504f815-d70e-4393-b434-1557ea71cdac","Type":"ContainerDied","Data":"13d3643ad3b5b35641cbd8b9007eb162a4196f10a53748dbe4afc9a2b738ab39"} Jan 20 17:57:18 crc kubenswrapper[4558]: I0120 17:57:18.308361 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"f504f815-d70e-4393-b434-1557ea71cdac","Type":"ContainerDied","Data":"9c31d822ad903313e6423530591f76386f46f63c99c87dba4045ada5aac205ee"} Jan 20 17:57:18 crc kubenswrapper[4558]: I0120 17:57:18.342219 4558 scope.go:117] "RemoveContainer" containerID="6c9337309bee54251b833690ca525e424855e97ceb931770a6f3390ef7dbad64" Jan 20 17:57:18 crc kubenswrapper[4558]: I0120 17:57:18.356841 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:57:18 crc kubenswrapper[4558]: I0120 17:57:18.367554 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:57:18 crc kubenswrapper[4558]: I0120 17:57:18.376763 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:57:18 crc kubenswrapper[4558]: I0120 17:57:18.384236 4558 scope.go:117] "RemoveContainer" containerID="9efc73014ed78e45ad45b5d6a320ba6e0cea9b2db49ef41bc5149084b1d804ea" Jan 20 17:57:18 crc kubenswrapper[4558]: I0120 17:57:18.385469 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:57:18 crc kubenswrapper[4558]: I0120 17:57:18.392074 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:57:18 crc kubenswrapper[4558]: E0120 17:57:18.392469 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c659794a-55cf-48b5-8740-1b92d7c8eb2c" containerName="ceilometer-notification-agent" Jan 20 17:57:18 crc kubenswrapper[4558]: I0120 17:57:18.392487 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="c659794a-55cf-48b5-8740-1b92d7c8eb2c" containerName="ceilometer-notification-agent" Jan 20 17:57:18 crc kubenswrapper[4558]: E0120 17:57:18.392506 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c659794a-55cf-48b5-8740-1b92d7c8eb2c" containerName="ceilometer-central-agent" Jan 20 17:57:18 crc kubenswrapper[4558]: I0120 17:57:18.392513 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="c659794a-55cf-48b5-8740-1b92d7c8eb2c" containerName="ceilometer-central-agent" Jan 20 17:57:18 crc kubenswrapper[4558]: E0120 17:57:18.392529 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f504f815-d70e-4393-b434-1557ea71cdac" containerName="nova-api-api" Jan 20 17:57:18 crc kubenswrapper[4558]: I0120 17:57:18.392536 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="f504f815-d70e-4393-b434-1557ea71cdac" containerName="nova-api-api" Jan 20 17:57:18 crc kubenswrapper[4558]: E0120 17:57:18.392549 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c659794a-55cf-48b5-8740-1b92d7c8eb2c" containerName="proxy-httpd" Jan 20 17:57:18 crc kubenswrapper[4558]: I0120 17:57:18.392555 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="c659794a-55cf-48b5-8740-1b92d7c8eb2c" containerName="proxy-httpd" Jan 20 17:57:18 crc kubenswrapper[4558]: E0120 17:57:18.392568 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c659794a-55cf-48b5-8740-1b92d7c8eb2c" containerName="sg-core" Jan 20 17:57:18 crc kubenswrapper[4558]: I0120 17:57:18.392576 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="c659794a-55cf-48b5-8740-1b92d7c8eb2c" containerName="sg-core" Jan 20 17:57:18 crc kubenswrapper[4558]: E0120 17:57:18.392603 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f504f815-d70e-4393-b434-1557ea71cdac" containerName="nova-api-log" Jan 20 17:57:18 crc kubenswrapper[4558]: I0120 17:57:18.392610 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="f504f815-d70e-4393-b434-1557ea71cdac" containerName="nova-api-log" Jan 20 17:57:18 crc kubenswrapper[4558]: I0120 17:57:18.392798 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="f504f815-d70e-4393-b434-1557ea71cdac" containerName="nova-api-log" Jan 20 17:57:18 crc kubenswrapper[4558]: I0120 17:57:18.392815 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="f504f815-d70e-4393-b434-1557ea71cdac" containerName="nova-api-api" Jan 20 17:57:18 crc kubenswrapper[4558]: I0120 17:57:18.392830 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="c659794a-55cf-48b5-8740-1b92d7c8eb2c" containerName="ceilometer-notification-agent" Jan 20 17:57:18 crc kubenswrapper[4558]: I0120 17:57:18.392842 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="c659794a-55cf-48b5-8740-1b92d7c8eb2c" containerName="sg-core" Jan 20 17:57:18 crc kubenswrapper[4558]: I0120 17:57:18.392853 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="c659794a-55cf-48b5-8740-1b92d7c8eb2c" containerName="ceilometer-central-agent" Jan 20 17:57:18 crc kubenswrapper[4558]: I0120 17:57:18.392867 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="c659794a-55cf-48b5-8740-1b92d7c8eb2c" containerName="proxy-httpd" Jan 20 17:57:18 crc kubenswrapper[4558]: I0120 17:57:18.394488 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:57:18 crc kubenswrapper[4558]: I0120 17:57:18.396532 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-scripts" Jan 20 17:57:18 crc kubenswrapper[4558]: I0120 17:57:18.398470 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:57:18 crc kubenswrapper[4558]: I0120 17:57:18.400052 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:57:18 crc kubenswrapper[4558]: I0120 17:57:18.405079 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-api-config-data" Jan 20 17:57:18 crc kubenswrapper[4558]: I0120 17:57:18.405398 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"ceilometer-config-data" Jan 20 17:57:18 crc kubenswrapper[4558]: I0120 17:57:18.405614 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-internal-svc" Jan 20 17:57:18 crc kubenswrapper[4558]: I0120 17:57:18.405788 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-ceilometer-internal-svc" Jan 20 17:57:18 crc kubenswrapper[4558]: I0120 17:57:18.405985 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cert-nova-public-svc" Jan 20 17:57:18 crc kubenswrapper[4558]: I0120 17:57:18.406515 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:57:18 crc kubenswrapper[4558]: I0120 17:57:18.424610 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:57:18 crc kubenswrapper[4558]: I0120 17:57:18.444306 4558 scope.go:117] "RemoveContainer" containerID="97d74f3ed6580028d3a5a8de298454deb8b9c545fe16c287fcc6f285de1ab5bd" Jan 20 17:57:18 crc kubenswrapper[4558]: I0120 17:57:18.473850 4558 scope.go:117] "RemoveContainer" containerID="13d3643ad3b5b35641cbd8b9007eb162a4196f10a53748dbe4afc9a2b738ab39" Jan 20 17:57:18 crc kubenswrapper[4558]: I0120 17:57:18.493707 4558 scope.go:117] "RemoveContainer" containerID="0bed7a79966310e4d3b7b07fe2039cbcec2b6a9d72c6c1e5851aa950c4d1bf3c" Jan 20 17:57:18 crc kubenswrapper[4558]: I0120 17:57:18.511245 4558 scope.go:117] "RemoveContainer" containerID="13d3643ad3b5b35641cbd8b9007eb162a4196f10a53748dbe4afc9a2b738ab39" Jan 20 17:57:18 crc kubenswrapper[4558]: E0120 17:57:18.511726 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"13d3643ad3b5b35641cbd8b9007eb162a4196f10a53748dbe4afc9a2b738ab39\": container with ID starting with 13d3643ad3b5b35641cbd8b9007eb162a4196f10a53748dbe4afc9a2b738ab39 not found: ID does not exist" containerID="13d3643ad3b5b35641cbd8b9007eb162a4196f10a53748dbe4afc9a2b738ab39" Jan 20 17:57:18 crc kubenswrapper[4558]: I0120 17:57:18.511766 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"13d3643ad3b5b35641cbd8b9007eb162a4196f10a53748dbe4afc9a2b738ab39"} err="failed to get container status \"13d3643ad3b5b35641cbd8b9007eb162a4196f10a53748dbe4afc9a2b738ab39\": rpc error: code = NotFound desc = could not find container \"13d3643ad3b5b35641cbd8b9007eb162a4196f10a53748dbe4afc9a2b738ab39\": container with ID starting with 13d3643ad3b5b35641cbd8b9007eb162a4196f10a53748dbe4afc9a2b738ab39 not found: ID does not exist" Jan 20 17:57:18 crc kubenswrapper[4558]: I0120 17:57:18.511792 4558 scope.go:117] "RemoveContainer" containerID="0bed7a79966310e4d3b7b07fe2039cbcec2b6a9d72c6c1e5851aa950c4d1bf3c" Jan 20 17:57:18 crc kubenswrapper[4558]: E0120 17:57:18.512217 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0bed7a79966310e4d3b7b07fe2039cbcec2b6a9d72c6c1e5851aa950c4d1bf3c\": container with ID starting with 0bed7a79966310e4d3b7b07fe2039cbcec2b6a9d72c6c1e5851aa950c4d1bf3c not found: ID does not exist" containerID="0bed7a79966310e4d3b7b07fe2039cbcec2b6a9d72c6c1e5851aa950c4d1bf3c" Jan 20 17:57:18 crc kubenswrapper[4558]: I0120 17:57:18.512241 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0bed7a79966310e4d3b7b07fe2039cbcec2b6a9d72c6c1e5851aa950c4d1bf3c"} err="failed to get container status \"0bed7a79966310e4d3b7b07fe2039cbcec2b6a9d72c6c1e5851aa950c4d1bf3c\": rpc error: code = NotFound desc = could not find container \"0bed7a79966310e4d3b7b07fe2039cbcec2b6a9d72c6c1e5851aa950c4d1bf3c\": container with ID starting with 0bed7a79966310e4d3b7b07fe2039cbcec2b6a9d72c6c1e5851aa950c4d1bf3c not found: ID does not exist" Jan 20 17:57:18 crc kubenswrapper[4558]: I0120 17:57:18.575803 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c659794a-55cf-48b5-8740-1b92d7c8eb2c" path="/var/lib/kubelet/pods/c659794a-55cf-48b5-8740-1b92d7c8eb2c/volumes" Jan 20 17:57:18 crc kubenswrapper[4558]: I0120 17:57:18.576590 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f504f815-d70e-4393-b434-1557ea71cdac" path="/var/lib/kubelet/pods/f504f815-d70e-4393-b434-1557ea71cdac/volumes" Jan 20 17:57:18 crc kubenswrapper[4558]: I0120 17:57:18.592560 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/680eb8d4-16c8-40ae-bfb6-054fcf23f281-scripts\") pod \"ceilometer-0\" (UID: \"680eb8d4-16c8-40ae-bfb6-054fcf23f281\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:57:18 crc kubenswrapper[4558]: I0120 17:57:18.592609 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2q6tn\" (UniqueName: \"kubernetes.io/projected/6f0733ea-bb8a-4c0f-9ea8-edd6f687301e-kube-api-access-2q6tn\") pod \"nova-api-0\" (UID: \"6f0733ea-bb8a-4c0f-9ea8-edd6f687301e\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:57:18 crc kubenswrapper[4558]: I0120 17:57:18.592644 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dczqx\" (UniqueName: \"kubernetes.io/projected/680eb8d4-16c8-40ae-bfb6-054fcf23f281-kube-api-access-dczqx\") pod \"ceilometer-0\" (UID: \"680eb8d4-16c8-40ae-bfb6-054fcf23f281\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:57:18 crc kubenswrapper[4558]: I0120 17:57:18.592671 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6f0733ea-bb8a-4c0f-9ea8-edd6f687301e-public-tls-certs\") pod \"nova-api-0\" (UID: \"6f0733ea-bb8a-4c0f-9ea8-edd6f687301e\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:57:18 crc kubenswrapper[4558]: I0120 17:57:18.593967 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/680eb8d4-16c8-40ae-bfb6-054fcf23f281-log-httpd\") pod \"ceilometer-0\" (UID: \"680eb8d4-16c8-40ae-bfb6-054fcf23f281\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:57:18 crc kubenswrapper[4558]: I0120 17:57:18.594249 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/680eb8d4-16c8-40ae-bfb6-054fcf23f281-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"680eb8d4-16c8-40ae-bfb6-054fcf23f281\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:57:18 crc kubenswrapper[4558]: I0120 17:57:18.594297 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6f0733ea-bb8a-4c0f-9ea8-edd6f687301e-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"6f0733ea-bb8a-4c0f-9ea8-edd6f687301e\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:57:18 crc kubenswrapper[4558]: I0120 17:57:18.594328 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6f0733ea-bb8a-4c0f-9ea8-edd6f687301e-internal-tls-certs\") pod \"nova-api-0\" (UID: \"6f0733ea-bb8a-4c0f-9ea8-edd6f687301e\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:57:18 crc kubenswrapper[4558]: I0120 17:57:18.594384 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/680eb8d4-16c8-40ae-bfb6-054fcf23f281-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"680eb8d4-16c8-40ae-bfb6-054fcf23f281\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:57:18 crc kubenswrapper[4558]: I0120 17:57:18.594428 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/680eb8d4-16c8-40ae-bfb6-054fcf23f281-run-httpd\") pod \"ceilometer-0\" (UID: \"680eb8d4-16c8-40ae-bfb6-054fcf23f281\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:57:18 crc kubenswrapper[4558]: I0120 17:57:18.594452 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/680eb8d4-16c8-40ae-bfb6-054fcf23f281-config-data\") pod \"ceilometer-0\" (UID: \"680eb8d4-16c8-40ae-bfb6-054fcf23f281\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:57:18 crc kubenswrapper[4558]: I0120 17:57:18.594480 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6f0733ea-bb8a-4c0f-9ea8-edd6f687301e-config-data\") pod \"nova-api-0\" (UID: \"6f0733ea-bb8a-4c0f-9ea8-edd6f687301e\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:57:18 crc kubenswrapper[4558]: I0120 17:57:18.594521 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6f0733ea-bb8a-4c0f-9ea8-edd6f687301e-logs\") pod \"nova-api-0\" (UID: \"6f0733ea-bb8a-4c0f-9ea8-edd6f687301e\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:57:18 crc kubenswrapper[4558]: I0120 17:57:18.594544 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/680eb8d4-16c8-40ae-bfb6-054fcf23f281-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"680eb8d4-16c8-40ae-bfb6-054fcf23f281\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:57:18 crc kubenswrapper[4558]: I0120 17:57:18.696745 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6f0733ea-bb8a-4c0f-9ea8-edd6f687301e-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"6f0733ea-bb8a-4c0f-9ea8-edd6f687301e\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:57:18 crc kubenswrapper[4558]: I0120 17:57:18.697086 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6f0733ea-bb8a-4c0f-9ea8-edd6f687301e-internal-tls-certs\") pod \"nova-api-0\" (UID: \"6f0733ea-bb8a-4c0f-9ea8-edd6f687301e\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:57:18 crc kubenswrapper[4558]: I0120 17:57:18.697142 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/680eb8d4-16c8-40ae-bfb6-054fcf23f281-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"680eb8d4-16c8-40ae-bfb6-054fcf23f281\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:57:18 crc kubenswrapper[4558]: I0120 17:57:18.697207 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/680eb8d4-16c8-40ae-bfb6-054fcf23f281-run-httpd\") pod \"ceilometer-0\" (UID: \"680eb8d4-16c8-40ae-bfb6-054fcf23f281\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:57:18 crc kubenswrapper[4558]: I0120 17:57:18.697236 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/680eb8d4-16c8-40ae-bfb6-054fcf23f281-config-data\") pod \"ceilometer-0\" (UID: \"680eb8d4-16c8-40ae-bfb6-054fcf23f281\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:57:18 crc kubenswrapper[4558]: I0120 17:57:18.697269 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6f0733ea-bb8a-4c0f-9ea8-edd6f687301e-config-data\") pod \"nova-api-0\" (UID: \"6f0733ea-bb8a-4c0f-9ea8-edd6f687301e\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:57:18 crc kubenswrapper[4558]: I0120 17:57:18.697298 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6f0733ea-bb8a-4c0f-9ea8-edd6f687301e-logs\") pod \"nova-api-0\" (UID: \"6f0733ea-bb8a-4c0f-9ea8-edd6f687301e\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:57:18 crc kubenswrapper[4558]: I0120 17:57:18.697323 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/680eb8d4-16c8-40ae-bfb6-054fcf23f281-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"680eb8d4-16c8-40ae-bfb6-054fcf23f281\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:57:18 crc kubenswrapper[4558]: I0120 17:57:18.697352 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/680eb8d4-16c8-40ae-bfb6-054fcf23f281-scripts\") pod \"ceilometer-0\" (UID: \"680eb8d4-16c8-40ae-bfb6-054fcf23f281\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:57:18 crc kubenswrapper[4558]: I0120 17:57:18.697383 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2q6tn\" (UniqueName: \"kubernetes.io/projected/6f0733ea-bb8a-4c0f-9ea8-edd6f687301e-kube-api-access-2q6tn\") pod \"nova-api-0\" (UID: \"6f0733ea-bb8a-4c0f-9ea8-edd6f687301e\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:57:18 crc kubenswrapper[4558]: I0120 17:57:18.697419 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dczqx\" (UniqueName: \"kubernetes.io/projected/680eb8d4-16c8-40ae-bfb6-054fcf23f281-kube-api-access-dczqx\") pod \"ceilometer-0\" (UID: \"680eb8d4-16c8-40ae-bfb6-054fcf23f281\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:57:18 crc kubenswrapper[4558]: I0120 17:57:18.697441 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6f0733ea-bb8a-4c0f-9ea8-edd6f687301e-public-tls-certs\") pod \"nova-api-0\" (UID: \"6f0733ea-bb8a-4c0f-9ea8-edd6f687301e\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:57:18 crc kubenswrapper[4558]: I0120 17:57:18.697498 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/680eb8d4-16c8-40ae-bfb6-054fcf23f281-log-httpd\") pod \"ceilometer-0\" (UID: \"680eb8d4-16c8-40ae-bfb6-054fcf23f281\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:57:18 crc kubenswrapper[4558]: I0120 17:57:18.697537 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/680eb8d4-16c8-40ae-bfb6-054fcf23f281-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"680eb8d4-16c8-40ae-bfb6-054fcf23f281\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:57:18 crc kubenswrapper[4558]: I0120 17:57:18.698410 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/680eb8d4-16c8-40ae-bfb6-054fcf23f281-run-httpd\") pod \"ceilometer-0\" (UID: \"680eb8d4-16c8-40ae-bfb6-054fcf23f281\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:57:18 crc kubenswrapper[4558]: I0120 17:57:18.698973 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6f0733ea-bb8a-4c0f-9ea8-edd6f687301e-logs\") pod \"nova-api-0\" (UID: \"6f0733ea-bb8a-4c0f-9ea8-edd6f687301e\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:57:18 crc kubenswrapper[4558]: I0120 17:57:18.699744 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/680eb8d4-16c8-40ae-bfb6-054fcf23f281-log-httpd\") pod \"ceilometer-0\" (UID: \"680eb8d4-16c8-40ae-bfb6-054fcf23f281\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:57:18 crc kubenswrapper[4558]: I0120 17:57:18.703602 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6f0733ea-bb8a-4c0f-9ea8-edd6f687301e-internal-tls-certs\") pod \"nova-api-0\" (UID: \"6f0733ea-bb8a-4c0f-9ea8-edd6f687301e\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:57:18 crc kubenswrapper[4558]: I0120 17:57:18.703732 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6f0733ea-bb8a-4c0f-9ea8-edd6f687301e-config-data\") pod \"nova-api-0\" (UID: \"6f0733ea-bb8a-4c0f-9ea8-edd6f687301e\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:57:18 crc kubenswrapper[4558]: I0120 17:57:18.704036 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6f0733ea-bb8a-4c0f-9ea8-edd6f687301e-public-tls-certs\") pod \"nova-api-0\" (UID: \"6f0733ea-bb8a-4c0f-9ea8-edd6f687301e\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:57:18 crc kubenswrapper[4558]: I0120 17:57:18.704922 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/680eb8d4-16c8-40ae-bfb6-054fcf23f281-config-data\") pod \"ceilometer-0\" (UID: \"680eb8d4-16c8-40ae-bfb6-054fcf23f281\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:57:18 crc kubenswrapper[4558]: I0120 17:57:18.705104 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/680eb8d4-16c8-40ae-bfb6-054fcf23f281-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"680eb8d4-16c8-40ae-bfb6-054fcf23f281\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:57:18 crc kubenswrapper[4558]: I0120 17:57:18.705408 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/680eb8d4-16c8-40ae-bfb6-054fcf23f281-scripts\") pod \"ceilometer-0\" (UID: \"680eb8d4-16c8-40ae-bfb6-054fcf23f281\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:57:18 crc kubenswrapper[4558]: I0120 17:57:18.706569 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/680eb8d4-16c8-40ae-bfb6-054fcf23f281-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"680eb8d4-16c8-40ae-bfb6-054fcf23f281\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:57:18 crc kubenswrapper[4558]: I0120 17:57:18.706735 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/680eb8d4-16c8-40ae-bfb6-054fcf23f281-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"680eb8d4-16c8-40ae-bfb6-054fcf23f281\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:57:18 crc kubenswrapper[4558]: I0120 17:57:18.715838 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6f0733ea-bb8a-4c0f-9ea8-edd6f687301e-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"6f0733ea-bb8a-4c0f-9ea8-edd6f687301e\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:57:18 crc kubenswrapper[4558]: I0120 17:57:18.717632 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2q6tn\" (UniqueName: \"kubernetes.io/projected/6f0733ea-bb8a-4c0f-9ea8-edd6f687301e-kube-api-access-2q6tn\") pod \"nova-api-0\" (UID: \"6f0733ea-bb8a-4c0f-9ea8-edd6f687301e\") " pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:57:18 crc kubenswrapper[4558]: I0120 17:57:18.718642 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dczqx\" (UniqueName: \"kubernetes.io/projected/680eb8d4-16c8-40ae-bfb6-054fcf23f281-kube-api-access-dczqx\") pod \"ceilometer-0\" (UID: \"680eb8d4-16c8-40ae-bfb6-054fcf23f281\") " pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:57:18 crc kubenswrapper[4558]: I0120 17:57:18.731446 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:57:18 crc kubenswrapper[4558]: I0120 17:57:18.738052 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:57:19 crc kubenswrapper[4558]: I0120 17:57:19.213234 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:57:19 crc kubenswrapper[4558]: I0120 17:57:19.223879 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:57:19 crc kubenswrapper[4558]: I0120 17:57:19.333207 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"680eb8d4-16c8-40ae-bfb6-054fcf23f281","Type":"ContainerStarted","Data":"f6dcb5fc1d0deff15892ff3c64b6895e7b259d9fb06ca065118c08d037815769"} Jan 20 17:57:19 crc kubenswrapper[4558]: I0120 17:57:19.335373 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"6f0733ea-bb8a-4c0f-9ea8-edd6f687301e","Type":"ContainerStarted","Data":"f4ff893712d3918c57eb9ba718a110d56e06d254307060f771a68a95a658c3cf"} Jan 20 17:57:20 crc kubenswrapper[4558]: I0120 17:57:20.350317 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"6f0733ea-bb8a-4c0f-9ea8-edd6f687301e","Type":"ContainerStarted","Data":"9c03143dbfd7f3302ef2dbffcc4cbebbe5dec6b4db2e4e83a24cb243d0feac1c"} Jan 20 17:57:20 crc kubenswrapper[4558]: I0120 17:57:20.350902 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"6f0733ea-bb8a-4c0f-9ea8-edd6f687301e","Type":"ContainerStarted","Data":"3a49860bcfdad30df2f697c1fe78b7c7dd55ae654b13cecc83fb386c4afb2fa4"} Jan 20 17:57:20 crc kubenswrapper[4558]: I0120 17:57:20.352475 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"680eb8d4-16c8-40ae-bfb6-054fcf23f281","Type":"ContainerStarted","Data":"a1c6c829c5d10f6cb194bd9017159b0daa07b6905d16ede8abe89e2a8949198f"} Jan 20 17:57:21 crc kubenswrapper[4558]: I0120 17:57:21.367720 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"680eb8d4-16c8-40ae-bfb6-054fcf23f281","Type":"ContainerStarted","Data":"2b5d5a7da9b64e1d83404cfe438d86d2e219e79fe96bf81cc97e5f2e3a7048ac"} Jan 20 17:57:21 crc kubenswrapper[4558]: I0120 17:57:21.368111 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"680eb8d4-16c8-40ae-bfb6-054fcf23f281","Type":"ContainerStarted","Data":"c1d7abefdf96cc2643c3a9001b8d6be58a4e2428ed5a8dab2d908237b77ba7cc"} Jan 20 17:57:23 crc kubenswrapper[4558]: I0120 17:57:23.388041 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"680eb8d4-16c8-40ae-bfb6-054fcf23f281","Type":"ContainerStarted","Data":"528a68a740a5bc1c67a3fbd8155ae60dbf9ec6b340d2ce96b37c200e6e6de11b"} Jan 20 17:57:23 crc kubenswrapper[4558]: I0120 17:57:23.389583 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:57:23 crc kubenswrapper[4558]: I0120 17:57:23.430046 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-api-0" podStartSLOduration=5.430023522 podStartE2EDuration="5.430023522s" podCreationTimestamp="2026-01-20 17:57:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:57:20.368318207 +0000 UTC m=+4534.128656174" watchObservedRunningTime="2026-01-20 17:57:23.430023522 +0000 UTC m=+4537.190361499" Jan 20 17:57:23 crc kubenswrapper[4558]: I0120 17:57:23.431321 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ceilometer-0" podStartSLOduration=2.190512587 podStartE2EDuration="5.431312484s" podCreationTimestamp="2026-01-20 17:57:18 +0000 UTC" firstStartedPulling="2026-01-20 17:57:19.215392198 +0000 UTC m=+4532.975730165" lastFinishedPulling="2026-01-20 17:57:22.456192094 +0000 UTC m=+4536.216530062" observedRunningTime="2026-01-20 17:57:23.40667365 +0000 UTC m=+4537.167011618" watchObservedRunningTime="2026-01-20 17:57:23.431312484 +0000 UTC m=+4537.191650452" Jan 20 17:57:28 crc kubenswrapper[4558]: I0120 17:57:28.466225 4558 scope.go:117] "RemoveContainer" containerID="5c52a309d4b859bba6422d3ae6fa7e9422d3b5d97dc982533c7cb8e876b2f184" Jan 20 17:57:28 crc kubenswrapper[4558]: I0120 17:57:28.490848 4558 scope.go:117] "RemoveContainer" containerID="a25982768f5cf2c38ae4bca2414ca592ef39c07924ab9b34e0d46ea3478509b7" Jan 20 17:57:28 crc kubenswrapper[4558]: I0120 17:57:28.522842 4558 scope.go:117] "RemoveContainer" containerID="e67785bf94379651bfb64558a92f8a11e520c0f771013a70d50dbc2e73a7d2fb" Jan 20 17:57:28 crc kubenswrapper[4558]: I0120 17:57:28.546766 4558 scope.go:117] "RemoveContainer" containerID="516d63566e73522c69fe8fb7503a5f9b4f3300f2691369edc89aa5a577b600e5" Jan 20 17:57:28 crc kubenswrapper[4558]: I0120 17:57:28.564521 4558 scope.go:117] "RemoveContainer" containerID="06efd4d354098bbe1c72d647b10d6aecbc2f356d80570ca39ea3a76e7949f322" Jan 20 17:57:28 crc kubenswrapper[4558]: I0120 17:57:28.738313 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:57:28 crc kubenswrapper[4558]: I0120 17:57:28.738398 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:57:29 crc kubenswrapper[4558]: I0120 17:57:29.753322 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-api-0" podUID="6f0733ea-bb8a-4c0f-9ea8-edd6f687301e" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.168:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 20 17:57:29 crc kubenswrapper[4558]: I0120 17:57:29.753352 4558 prober.go:107] "Probe failed" probeType="Startup" pod="openstack-kuttl-tests/nova-api-0" podUID="6f0733ea-bb8a-4c0f-9ea8-edd6f687301e" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.168:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 20 17:57:38 crc kubenswrapper[4558]: I0120 17:57:38.745559 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:57:38 crc kubenswrapper[4558]: I0120 17:57:38.746292 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:57:38 crc kubenswrapper[4558]: I0120 17:57:38.746567 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:57:38 crc kubenswrapper[4558]: I0120 17:57:38.746618 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:57:38 crc kubenswrapper[4558]: I0120 17:57:38.753454 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:57:38 crc kubenswrapper[4558]: I0120 17:57:38.754044 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:57:48 crc kubenswrapper[4558]: I0120 17:57:48.738911 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:58:04 crc kubenswrapper[4558]: I0120 17:58:04.188487 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/barbican-e4c5-account-create-update-qbwc2"] Jan 20 17:58:04 crc kubenswrapper[4558]: I0120 17:58:04.190420 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-e4c5-account-create-update-qbwc2" Jan 20 17:58:04 crc kubenswrapper[4558]: I0120 17:58:04.194542 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"barbican-db-secret" Jan 20 17:58:04 crc kubenswrapper[4558]: I0120 17:58:04.199289 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/root-account-create-update-x75ng"] Jan 20 17:58:04 crc kubenswrapper[4558]: I0120 17:58:04.200697 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-x75ng" Jan 20 17:58:04 crc kubenswrapper[4558]: I0120 17:58:04.204480 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"openstack-mariadb-root-db-secret" Jan 20 17:58:04 crc kubenswrapper[4558]: I0120 17:58:04.224253 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/rabbitmq-cell1-server-0"] Jan 20 17:58:04 crc kubenswrapper[4558]: I0120 17:58:04.229870 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/barbican-e4c5-account-create-update-qbwc2"] Jan 20 17:58:04 crc kubenswrapper[4558]: I0120 17:58:04.241326 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-x75ng"] Jan 20 17:58:04 crc kubenswrapper[4558]: I0120 17:58:04.278065 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/placement-c24c-account-create-update-mxf4v"] Jan 20 17:58:04 crc kubenswrapper[4558]: I0120 17:58:04.302347 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-c24c-account-create-update-mxf4v" Jan 20 17:58:04 crc kubenswrapper[4558]: I0120 17:58:04.305888 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"placement-db-secret" Jan 20 17:58:04 crc kubenswrapper[4558]: I0120 17:58:04.349051 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8b6h5\" (UniqueName: \"kubernetes.io/projected/75a8c03c-5b0a-4144-b1a8-293ce0d50739-kube-api-access-8b6h5\") pod \"root-account-create-update-x75ng\" (UID: \"75a8c03c-5b0a-4144-b1a8-293ce0d50739\") " pod="openstack-kuttl-tests/root-account-create-update-x75ng" Jan 20 17:58:04 crc kubenswrapper[4558]: I0120 17:58:04.349146 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/75a8c03c-5b0a-4144-b1a8-293ce0d50739-operator-scripts\") pod \"root-account-create-update-x75ng\" (UID: \"75a8c03c-5b0a-4144-b1a8-293ce0d50739\") " pod="openstack-kuttl-tests/root-account-create-update-x75ng" Jan 20 17:58:04 crc kubenswrapper[4558]: I0120 17:58:04.349187 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bczvc\" (UniqueName: \"kubernetes.io/projected/5dfca7b5-dd95-4da3-a088-de4d48ea8c10-kube-api-access-bczvc\") pod \"barbican-e4c5-account-create-update-qbwc2\" (UID: \"5dfca7b5-dd95-4da3-a088-de4d48ea8c10\") " pod="openstack-kuttl-tests/barbican-e4c5-account-create-update-qbwc2" Jan 20 17:58:04 crc kubenswrapper[4558]: I0120 17:58:04.349395 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5dfca7b5-dd95-4da3-a088-de4d48ea8c10-operator-scripts\") pod \"barbican-e4c5-account-create-update-qbwc2\" (UID: \"5dfca7b5-dd95-4da3-a088-de4d48ea8c10\") " pod="openstack-kuttl-tests/barbican-e4c5-account-create-update-qbwc2" Jan 20 17:58:04 crc kubenswrapper[4558]: E0120 17:58:04.349995 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Jan 20 17:58:04 crc kubenswrapper[4558]: E0120 17:58:04.350048 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/948fb15d-07f1-4b25-b8d5-7d582024ef28-config-data podName:948fb15d-07f1-4b25-b8d5-7d582024ef28 nodeName:}" failed. No retries permitted until 2026-01-20 17:58:04.850032492 +0000 UTC m=+4578.610370449 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/948fb15d-07f1-4b25-b8d5-7d582024ef28-config-data") pod "rabbitmq-cell1-server-0" (UID: "948fb15d-07f1-4b25-b8d5-7d582024ef28") : configmap "rabbitmq-cell1-config-data" not found Jan 20 17:58:04 crc kubenswrapper[4558]: I0120 17:58:04.418225 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/openstackclient"] Jan 20 17:58:04 crc kubenswrapper[4558]: I0120 17:58:04.418801 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/openstackclient" podUID="3834281a-b947-44ad-9390-a2057e7e902d" containerName="openstackclient" containerID="cri-o://a79ee7ec7558132e07cb924fcd12069b0f87fbec2d9e3ff1c1b559b752eaac9f" gracePeriod=2 Jan 20 17:58:04 crc kubenswrapper[4558]: I0120 17:58:04.432211 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-e4c5-account-create-update-hnffc"] Jan 20 17:58:04 crc kubenswrapper[4558]: I0120 17:58:04.453307 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/openstackclient"] Jan 20 17:58:04 crc kubenswrapper[4558]: I0120 17:58:04.454030 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8vd2s\" (UniqueName: \"kubernetes.io/projected/25990a01-f231-4924-a1cc-9207cc5af3c8-kube-api-access-8vd2s\") pod \"placement-c24c-account-create-update-mxf4v\" (UID: \"25990a01-f231-4924-a1cc-9207cc5af3c8\") " pod="openstack-kuttl-tests/placement-c24c-account-create-update-mxf4v" Jan 20 17:58:04 crc kubenswrapper[4558]: I0120 17:58:04.454094 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5dfca7b5-dd95-4da3-a088-de4d48ea8c10-operator-scripts\") pod \"barbican-e4c5-account-create-update-qbwc2\" (UID: \"5dfca7b5-dd95-4da3-a088-de4d48ea8c10\") " pod="openstack-kuttl-tests/barbican-e4c5-account-create-update-qbwc2" Jan 20 17:58:04 crc kubenswrapper[4558]: I0120 17:58:04.454347 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/25990a01-f231-4924-a1cc-9207cc5af3c8-operator-scripts\") pod \"placement-c24c-account-create-update-mxf4v\" (UID: \"25990a01-f231-4924-a1cc-9207cc5af3c8\") " pod="openstack-kuttl-tests/placement-c24c-account-create-update-mxf4v" Jan 20 17:58:04 crc kubenswrapper[4558]: I0120 17:58:04.454420 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8b6h5\" (UniqueName: \"kubernetes.io/projected/75a8c03c-5b0a-4144-b1a8-293ce0d50739-kube-api-access-8b6h5\") pod \"root-account-create-update-x75ng\" (UID: \"75a8c03c-5b0a-4144-b1a8-293ce0d50739\") " pod="openstack-kuttl-tests/root-account-create-update-x75ng" Jan 20 17:58:04 crc kubenswrapper[4558]: I0120 17:58:04.454486 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/75a8c03c-5b0a-4144-b1a8-293ce0d50739-operator-scripts\") pod \"root-account-create-update-x75ng\" (UID: \"75a8c03c-5b0a-4144-b1a8-293ce0d50739\") " pod="openstack-kuttl-tests/root-account-create-update-x75ng" Jan 20 17:58:04 crc kubenswrapper[4558]: I0120 17:58:04.454515 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bczvc\" (UniqueName: \"kubernetes.io/projected/5dfca7b5-dd95-4da3-a088-de4d48ea8c10-kube-api-access-bczvc\") pod \"barbican-e4c5-account-create-update-qbwc2\" (UID: \"5dfca7b5-dd95-4da3-a088-de4d48ea8c10\") " pod="openstack-kuttl-tests/barbican-e4c5-account-create-update-qbwc2" Jan 20 17:58:04 crc kubenswrapper[4558]: I0120 17:58:04.455929 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/75a8c03c-5b0a-4144-b1a8-293ce0d50739-operator-scripts\") pod \"root-account-create-update-x75ng\" (UID: \"75a8c03c-5b0a-4144-b1a8-293ce0d50739\") " pod="openstack-kuttl-tests/root-account-create-update-x75ng" Jan 20 17:58:04 crc kubenswrapper[4558]: I0120 17:58:04.456482 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5dfca7b5-dd95-4da3-a088-de4d48ea8c10-operator-scripts\") pod \"barbican-e4c5-account-create-update-qbwc2\" (UID: \"5dfca7b5-dd95-4da3-a088-de4d48ea8c10\") " pod="openstack-kuttl-tests/barbican-e4c5-account-create-update-qbwc2" Jan 20 17:58:04 crc kubenswrapper[4558]: I0120 17:58:04.480517 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/barbican-e4c5-account-create-update-hnffc"] Jan 20 17:58:04 crc kubenswrapper[4558]: I0120 17:58:04.486905 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8b6h5\" (UniqueName: \"kubernetes.io/projected/75a8c03c-5b0a-4144-b1a8-293ce0d50739-kube-api-access-8b6h5\") pod \"root-account-create-update-x75ng\" (UID: \"75a8c03c-5b0a-4144-b1a8-293ce0d50739\") " pod="openstack-kuttl-tests/root-account-create-update-x75ng" Jan 20 17:58:04 crc kubenswrapper[4558]: I0120 17:58:04.505629 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bczvc\" (UniqueName: \"kubernetes.io/projected/5dfca7b5-dd95-4da3-a088-de4d48ea8c10-kube-api-access-bczvc\") pod \"barbican-e4c5-account-create-update-qbwc2\" (UID: \"5dfca7b5-dd95-4da3-a088-de4d48ea8c10\") " pod="openstack-kuttl-tests/barbican-e4c5-account-create-update-qbwc2" Jan 20 17:58:04 crc kubenswrapper[4558]: I0120 17:58:04.518735 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-e4c5-account-create-update-qbwc2" Jan 20 17:58:04 crc kubenswrapper[4558]: I0120 17:58:04.521256 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-l5vtd"] Jan 20 17:58:04 crc kubenswrapper[4558]: I0120 17:58:04.530821 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-x75ng" Jan 20 17:58:04 crc kubenswrapper[4558]: I0120 17:58:04.559215 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/25990a01-f231-4924-a1cc-9207cc5af3c8-operator-scripts\") pod \"placement-c24c-account-create-update-mxf4v\" (UID: \"25990a01-f231-4924-a1cc-9207cc5af3c8\") " pod="openstack-kuttl-tests/placement-c24c-account-create-update-mxf4v" Jan 20 17:58:04 crc kubenswrapper[4558]: I0120 17:58:04.559495 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8vd2s\" (UniqueName: \"kubernetes.io/projected/25990a01-f231-4924-a1cc-9207cc5af3c8-kube-api-access-8vd2s\") pod \"placement-c24c-account-create-update-mxf4v\" (UID: \"25990a01-f231-4924-a1cc-9207cc5af3c8\") " pod="openstack-kuttl-tests/placement-c24c-account-create-update-mxf4v" Jan 20 17:58:04 crc kubenswrapper[4558]: I0120 17:58:04.560281 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/25990a01-f231-4924-a1cc-9207cc5af3c8-operator-scripts\") pod \"placement-c24c-account-create-update-mxf4v\" (UID: \"25990a01-f231-4924-a1cc-9207cc5af3c8\") " pod="openstack-kuttl-tests/placement-c24c-account-create-update-mxf4v" Jan 20 17:58:04 crc kubenswrapper[4558]: I0120 17:58:04.602495 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8vd2s\" (UniqueName: \"kubernetes.io/projected/25990a01-f231-4924-a1cc-9207cc5af3c8-kube-api-access-8vd2s\") pod \"placement-c24c-account-create-update-mxf4v\" (UID: \"25990a01-f231-4924-a1cc-9207cc5af3c8\") " pod="openstack-kuttl-tests/placement-c24c-account-create-update-mxf4v" Jan 20 17:58:04 crc kubenswrapper[4558]: I0120 17:58:04.624925 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="81fc72e8-7c47-45cf-8cbd-b77b7b0e9ed5" path="/var/lib/kubelet/pods/81fc72e8-7c47-45cf-8cbd-b77b7b0e9ed5/volumes" Jan 20 17:58:04 crc kubenswrapper[4558]: I0120 17:58:04.625672 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-l5vtd"] Jan 20 17:58:04 crc kubenswrapper[4558]: I0120 17:58:04.625716 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/placement-c24c-account-create-update-mxf4v"] Jan 20 17:58:04 crc kubenswrapper[4558]: I0120 17:58:04.625730 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/placement-c24c-account-create-update-vlgct"] Jan 20 17:58:04 crc kubenswrapper[4558]: I0120 17:58:04.625746 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/placement-c24c-account-create-update-vlgct"] Jan 20 17:58:04 crc kubenswrapper[4558]: I0120 17:58:04.625757 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/glance-94fd-account-create-update-wkrmc"] Jan 20 17:58:04 crc kubenswrapper[4558]: E0120 17:58:04.626092 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3834281a-b947-44ad-9390-a2057e7e902d" containerName="openstackclient" Jan 20 17:58:04 crc kubenswrapper[4558]: I0120 17:58:04.626104 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="3834281a-b947-44ad-9390-a2057e7e902d" containerName="openstackclient" Jan 20 17:58:04 crc kubenswrapper[4558]: I0120 17:58:04.626386 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="3834281a-b947-44ad-9390-a2057e7e902d" containerName="openstackclient" Jan 20 17:58:04 crc kubenswrapper[4558]: I0120 17:58:04.627096 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-94fd-account-create-update-wkrmc" Jan 20 17:58:04 crc kubenswrapper[4558]: I0120 17:58:04.627415 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-c24c-account-create-update-mxf4v" Jan 20 17:58:04 crc kubenswrapper[4558]: I0120 17:58:04.637261 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-nb-0"] Jan 20 17:58:04 crc kubenswrapper[4558]: I0120 17:58:04.637720 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"glance-db-secret" Jan 20 17:58:04 crc kubenswrapper[4558]: I0120 17:58:04.638538 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ovsdbserver-nb-0" podUID="57d5060e-37c3-45fb-8eb3-14303daa1751" containerName="openstack-network-exporter" containerID="cri-o://f8def63bbe6d6310ffc3a7c86e33982e2c9a2be82a8a0b0727ca59a14180a4d8" gracePeriod=300 Jan 20 17:58:04 crc kubenswrapper[4558]: I0120 17:58:04.639490 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/glance-94fd-account-create-update-wkrmc"] Jan 20 17:58:04 crc kubenswrapper[4558]: I0120 17:58:04.666840 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/rabbitmq-server-0"] Jan 20 17:58:04 crc kubenswrapper[4558]: I0120 17:58:04.695876 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-api-42d5-account-create-update-6wcjv"] Jan 20 17:58:04 crc kubenswrapper[4558]: I0120 17:58:04.697533 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/cinder-fde5-account-create-update-zmxrm"] Jan 20 17:58:04 crc kubenswrapper[4558]: I0120 17:58:04.698301 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-fde5-account-create-update-zmxrm" Jan 20 17:58:04 crc kubenswrapper[4558]: I0120 17:58:04.701541 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-42d5-account-create-update-6wcjv" Jan 20 17:58:04 crc kubenswrapper[4558]: I0120 17:58:04.703575 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"cinder-db-secret" Jan 20 17:58:04 crc kubenswrapper[4558]: I0120 17:58:04.703737 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-api-db-secret" Jan 20 17:58:04 crc kubenswrapper[4558]: I0120 17:58:04.711496 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/cinder-fde5-account-create-update-zmxrm"] Jan 20 17:58:04 crc kubenswrapper[4558]: I0120 17:58:04.727754 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-api-42d5-account-create-update-6wcjv"] Jan 20 17:58:04 crc kubenswrapper[4558]: I0120 17:58:04.767901 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nzgmg\" (UniqueName: \"kubernetes.io/projected/c9479a19-1b1e-4311-97f5-8922e8602b18-kube-api-access-nzgmg\") pod \"nova-api-42d5-account-create-update-6wcjv\" (UID: \"c9479a19-1b1e-4311-97f5-8922e8602b18\") " pod="openstack-kuttl-tests/nova-api-42d5-account-create-update-6wcjv" Jan 20 17:58:04 crc kubenswrapper[4558]: I0120 17:58:04.768009 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/96cbceb8-1912-4174-8d93-22ab4aa1b152-operator-scripts\") pod \"cinder-fde5-account-create-update-zmxrm\" (UID: \"96cbceb8-1912-4174-8d93-22ab4aa1b152\") " pod="openstack-kuttl-tests/cinder-fde5-account-create-update-zmxrm" Jan 20 17:58:04 crc kubenswrapper[4558]: I0120 17:58:04.768029 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-77n4n\" (UniqueName: \"kubernetes.io/projected/96cbceb8-1912-4174-8d93-22ab4aa1b152-kube-api-access-77n4n\") pod \"cinder-fde5-account-create-update-zmxrm\" (UID: \"96cbceb8-1912-4174-8d93-22ab4aa1b152\") " pod="openstack-kuttl-tests/cinder-fde5-account-create-update-zmxrm" Jan 20 17:58:04 crc kubenswrapper[4558]: I0120 17:58:04.768274 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ce3fd1cf-8efc-4180-b37f-8b562827c8a2-operator-scripts\") pod \"glance-94fd-account-create-update-wkrmc\" (UID: \"ce3fd1cf-8efc-4180-b37f-8b562827c8a2\") " pod="openstack-kuttl-tests/glance-94fd-account-create-update-wkrmc" Jan 20 17:58:04 crc kubenswrapper[4558]: I0120 17:58:04.768326 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c9479a19-1b1e-4311-97f5-8922e8602b18-operator-scripts\") pod \"nova-api-42d5-account-create-update-6wcjv\" (UID: \"c9479a19-1b1e-4311-97f5-8922e8602b18\") " pod="openstack-kuttl-tests/nova-api-42d5-account-create-update-6wcjv" Jan 20 17:58:04 crc kubenswrapper[4558]: I0120 17:58:04.768432 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6ff6g\" (UniqueName: \"kubernetes.io/projected/ce3fd1cf-8efc-4180-b37f-8b562827c8a2-kube-api-access-6ff6g\") pod \"glance-94fd-account-create-update-wkrmc\" (UID: \"ce3fd1cf-8efc-4180-b37f-8b562827c8a2\") " pod="openstack-kuttl-tests/glance-94fd-account-create-update-wkrmc" Jan 20 17:58:04 crc kubenswrapper[4558]: E0120 17:58:04.769581 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Jan 20 17:58:04 crc kubenswrapper[4558]: E0120 17:58:04.769653 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/d512bec4-7ed0-43bb-b8fe-0f235f7698e5-config-data podName:d512bec4-7ed0-43bb-b8fe-0f235f7698e5 nodeName:}" failed. No retries permitted until 2026-01-20 17:58:05.269636057 +0000 UTC m=+4579.029974024 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/d512bec4-7ed0-43bb-b8fe-0f235f7698e5-config-data") pod "rabbitmq-server-0" (UID: "d512bec4-7ed0-43bb-b8fe-0f235f7698e5") : configmap "rabbitmq-config-data" not found Jan 20 17:58:04 crc kubenswrapper[4558]: I0120 17:58:04.772683 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-db-sync-5b76z"] Jan 20 17:58:04 crc kubenswrapper[4558]: I0120 17:58:04.808682 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/neutron-db-sync-5b76z"] Jan 20 17:58:04 crc kubenswrapper[4558]: I0120 17:58:04.831426 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-94fd-account-create-update-l2nkn"] Jan 20 17:58:04 crc kubenswrapper[4558]: I0120 17:58:04.849875 4558 generic.go:334] "Generic (PLEG): container finished" podID="57d5060e-37c3-45fb-8eb3-14303daa1751" containerID="f8def63bbe6d6310ffc3a7c86e33982e2c9a2be82a8a0b0727ca59a14180a4d8" exitCode=2 Jan 20 17:58:04 crc kubenswrapper[4558]: I0120 17:58:04.850099 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-cell0-5191-account-create-update-cznq6"] Jan 20 17:58:04 crc kubenswrapper[4558]: I0120 17:58:04.856492 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-nb-0" event={"ID":"57d5060e-37c3-45fb-8eb3-14303daa1751","Type":"ContainerDied","Data":"f8def63bbe6d6310ffc3a7c86e33982e2c9a2be82a8a0b0727ca59a14180a4d8"} Jan 20 17:58:04 crc kubenswrapper[4558]: I0120 17:58:04.856592 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-5191-account-create-update-cznq6" Jan 20 17:58:04 crc kubenswrapper[4558]: I0120 17:58:04.857279 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ovsdbserver-nb-0" podUID="57d5060e-37c3-45fb-8eb3-14303daa1751" containerName="ovsdbserver-nb" containerID="cri-o://e272403361b0e6ff3e6fc8379ff4809e4f17620cf357e7e67ffbb7d8edea5b98" gracePeriod=300 Jan 20 17:58:04 crc kubenswrapper[4558]: I0120 17:58:04.858481 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell0-db-secret" Jan 20 17:58:04 crc kubenswrapper[4558]: I0120 17:58:04.867218 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/glance-94fd-account-create-update-l2nkn"] Jan 20 17:58:04 crc kubenswrapper[4558]: I0120 17:58:04.870619 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nzgmg\" (UniqueName: \"kubernetes.io/projected/c9479a19-1b1e-4311-97f5-8922e8602b18-kube-api-access-nzgmg\") pod \"nova-api-42d5-account-create-update-6wcjv\" (UID: \"c9479a19-1b1e-4311-97f5-8922e8602b18\") " pod="openstack-kuttl-tests/nova-api-42d5-account-create-update-6wcjv" Jan 20 17:58:04 crc kubenswrapper[4558]: I0120 17:58:04.871123 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/96cbceb8-1912-4174-8d93-22ab4aa1b152-operator-scripts\") pod \"cinder-fde5-account-create-update-zmxrm\" (UID: \"96cbceb8-1912-4174-8d93-22ab4aa1b152\") " pod="openstack-kuttl-tests/cinder-fde5-account-create-update-zmxrm" Jan 20 17:58:04 crc kubenswrapper[4558]: I0120 17:58:04.871147 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-77n4n\" (UniqueName: \"kubernetes.io/projected/96cbceb8-1912-4174-8d93-22ab4aa1b152-kube-api-access-77n4n\") pod \"cinder-fde5-account-create-update-zmxrm\" (UID: \"96cbceb8-1912-4174-8d93-22ab4aa1b152\") " pod="openstack-kuttl-tests/cinder-fde5-account-create-update-zmxrm" Jan 20 17:58:04 crc kubenswrapper[4558]: I0120 17:58:04.871189 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ce3fd1cf-8efc-4180-b37f-8b562827c8a2-operator-scripts\") pod \"glance-94fd-account-create-update-wkrmc\" (UID: \"ce3fd1cf-8efc-4180-b37f-8b562827c8a2\") " pod="openstack-kuttl-tests/glance-94fd-account-create-update-wkrmc" Jan 20 17:58:04 crc kubenswrapper[4558]: I0120 17:58:04.871215 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c9479a19-1b1e-4311-97f5-8922e8602b18-operator-scripts\") pod \"nova-api-42d5-account-create-update-6wcjv\" (UID: \"c9479a19-1b1e-4311-97f5-8922e8602b18\") " pod="openstack-kuttl-tests/nova-api-42d5-account-create-update-6wcjv" Jan 20 17:58:04 crc kubenswrapper[4558]: I0120 17:58:04.871276 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6ff6g\" (UniqueName: \"kubernetes.io/projected/ce3fd1cf-8efc-4180-b37f-8b562827c8a2-kube-api-access-6ff6g\") pod \"glance-94fd-account-create-update-wkrmc\" (UID: \"ce3fd1cf-8efc-4180-b37f-8b562827c8a2\") " pod="openstack-kuttl-tests/glance-94fd-account-create-update-wkrmc" Jan 20 17:58:04 crc kubenswrapper[4558]: E0120 17:58:04.871615 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Jan 20 17:58:04 crc kubenswrapper[4558]: E0120 17:58:04.871691 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/948fb15d-07f1-4b25-b8d5-7d582024ef28-config-data podName:948fb15d-07f1-4b25-b8d5-7d582024ef28 nodeName:}" failed. No retries permitted until 2026-01-20 17:58:05.871673961 +0000 UTC m=+4579.632011928 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/948fb15d-07f1-4b25-b8d5-7d582024ef28-config-data") pod "rabbitmq-cell1-server-0" (UID: "948fb15d-07f1-4b25-b8d5-7d582024ef28") : configmap "rabbitmq-cell1-config-data" not found Jan 20 17:58:04 crc kubenswrapper[4558]: I0120 17:58:04.871953 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/96cbceb8-1912-4174-8d93-22ab4aa1b152-operator-scripts\") pod \"cinder-fde5-account-create-update-zmxrm\" (UID: \"96cbceb8-1912-4174-8d93-22ab4aa1b152\") " pod="openstack-kuttl-tests/cinder-fde5-account-create-update-zmxrm" Jan 20 17:58:04 crc kubenswrapper[4558]: I0120 17:58:04.872271 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ce3fd1cf-8efc-4180-b37f-8b562827c8a2-operator-scripts\") pod \"glance-94fd-account-create-update-wkrmc\" (UID: \"ce3fd1cf-8efc-4180-b37f-8b562827c8a2\") " pod="openstack-kuttl-tests/glance-94fd-account-create-update-wkrmc" Jan 20 17:58:04 crc kubenswrapper[4558]: I0120 17:58:04.872537 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c9479a19-1b1e-4311-97f5-8922e8602b18-operator-scripts\") pod \"nova-api-42d5-account-create-update-6wcjv\" (UID: \"c9479a19-1b1e-4311-97f5-8922e8602b18\") " pod="openstack-kuttl-tests/nova-api-42d5-account-create-update-6wcjv" Jan 20 17:58:04 crc kubenswrapper[4558]: I0120 17:58:04.880537 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-sb-0"] Jan 20 17:58:04 crc kubenswrapper[4558]: I0120 17:58:04.880960 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ovsdbserver-sb-0" podUID="17f6b80e-1961-4c44-a979-9c23bdd59837" containerName="openstack-network-exporter" containerID="cri-o://538bcf70ba0df02b3c9616008ced0cabfe97d3b97ecd1cf7f96d63b4177c5ffa" gracePeriod=300 Jan 20 17:58:04 crc kubenswrapper[4558]: I0120 17:58:04.896574 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-cell0-5191-account-create-update-cznq6"] Jan 20 17:58:04 crc kubenswrapper[4558]: I0120 17:58:04.900025 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6ff6g\" (UniqueName: \"kubernetes.io/projected/ce3fd1cf-8efc-4180-b37f-8b562827c8a2-kube-api-access-6ff6g\") pod \"glance-94fd-account-create-update-wkrmc\" (UID: \"ce3fd1cf-8efc-4180-b37f-8b562827c8a2\") " pod="openstack-kuttl-tests/glance-94fd-account-create-update-wkrmc" Jan 20 17:58:04 crc kubenswrapper[4558]: I0120 17:58:04.903873 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nzgmg\" (UniqueName: \"kubernetes.io/projected/c9479a19-1b1e-4311-97f5-8922e8602b18-kube-api-access-nzgmg\") pod \"nova-api-42d5-account-create-update-6wcjv\" (UID: \"c9479a19-1b1e-4311-97f5-8922e8602b18\") " pod="openstack-kuttl-tests/nova-api-42d5-account-create-update-6wcjv" Jan 20 17:58:04 crc kubenswrapper[4558]: I0120 17:58:04.906403 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-fde5-account-create-update-dmprz"] Jan 20 17:58:04 crc kubenswrapper[4558]: I0120 17:58:04.923652 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-77n4n\" (UniqueName: \"kubernetes.io/projected/96cbceb8-1912-4174-8d93-22ab4aa1b152-kube-api-access-77n4n\") pod \"cinder-fde5-account-create-update-zmxrm\" (UID: \"96cbceb8-1912-4174-8d93-22ab4aa1b152\") " pod="openstack-kuttl-tests/cinder-fde5-account-create-update-zmxrm" Jan 20 17:58:04 crc kubenswrapper[4558]: I0120 17:58:04.925919 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/cinder-fde5-account-create-update-dmprz"] Jan 20 17:58:04 crc kubenswrapper[4558]: I0120 17:58:04.973936 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8c1a8bdd-f084-4e64-a651-91e76ad6f7d1-operator-scripts\") pod \"nova-cell0-5191-account-create-update-cznq6\" (UID: \"8c1a8bdd-f084-4e64-a651-91e76ad6f7d1\") " pod="openstack-kuttl-tests/nova-cell0-5191-account-create-update-cznq6" Jan 20 17:58:04 crc kubenswrapper[4558]: I0120 17:58:04.974620 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rgmnl\" (UniqueName: \"kubernetes.io/projected/8c1a8bdd-f084-4e64-a651-91e76ad6f7d1-kube-api-access-rgmnl\") pod \"nova-cell0-5191-account-create-update-cznq6\" (UID: \"8c1a8bdd-f084-4e64-a651-91e76ad6f7d1\") " pod="openstack-kuttl-tests/nova-cell0-5191-account-create-update-cznq6" Jan 20 17:58:05 crc kubenswrapper[4558]: I0120 17:58:05.007040 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovn-northd-0"] Jan 20 17:58:05 crc kubenswrapper[4558]: I0120 17:58:05.007373 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ovn-northd-0" podUID="dbdd2687-1110-4dce-a521-19c9337df3a2" containerName="ovn-northd" containerID="cri-o://96d47ec89318825738be6686bad9869d642e9bc40f34628060459ade0aa4b177" gracePeriod=30 Jan 20 17:58:05 crc kubenswrapper[4558]: I0120 17:58:05.007906 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ovn-northd-0" podUID="dbdd2687-1110-4dce-a521-19c9337df3a2" containerName="openstack-network-exporter" containerID="cri-o://1c2ffc84d31610214ed641d4c67df414b1a44d6e48fb78c1e2211304b04f696f" gracePeriod=30 Jan 20 17:58:05 crc kubenswrapper[4558]: I0120 17:58:05.024773 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-94fd-account-create-update-wkrmc" Jan 20 17:58:05 crc kubenswrapper[4558]: I0120 17:58:05.040331 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-42d5-account-create-update-ndxkx"] Jan 20 17:58:05 crc kubenswrapper[4558]: I0120 17:58:05.058645 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-api-42d5-account-create-update-ndxkx"] Jan 20 17:58:05 crc kubenswrapper[4558]: I0120 17:58:05.061737 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-fde5-account-create-update-zmxrm" Jan 20 17:58:05 crc kubenswrapper[4558]: I0120 17:58:05.066789 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ovsdbserver-sb-0" podUID="17f6b80e-1961-4c44-a979-9c23bdd59837" containerName="ovsdbserver-sb" containerID="cri-o://76d0924a191838385c1332ce3ba83da3d2823bf9b2729355de22ccab98022411" gracePeriod=300 Jan 20 17:58:05 crc kubenswrapper[4558]: I0120 17:58:05.072551 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-42d5-account-create-update-6wcjv" Jan 20 17:58:05 crc kubenswrapper[4558]: I0120 17:58:05.077494 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8c1a8bdd-f084-4e64-a651-91e76ad6f7d1-operator-scripts\") pod \"nova-cell0-5191-account-create-update-cznq6\" (UID: \"8c1a8bdd-f084-4e64-a651-91e76ad6f7d1\") " pod="openstack-kuttl-tests/nova-cell0-5191-account-create-update-cznq6" Jan 20 17:58:05 crc kubenswrapper[4558]: I0120 17:58:05.077536 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rgmnl\" (UniqueName: \"kubernetes.io/projected/8c1a8bdd-f084-4e64-a651-91e76ad6f7d1-kube-api-access-rgmnl\") pod \"nova-cell0-5191-account-create-update-cznq6\" (UID: \"8c1a8bdd-f084-4e64-a651-91e76ad6f7d1\") " pod="openstack-kuttl-tests/nova-cell0-5191-account-create-update-cznq6" Jan 20 17:58:05 crc kubenswrapper[4558]: I0120 17:58:05.078455 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8c1a8bdd-f084-4e64-a651-91e76ad6f7d1-operator-scripts\") pod \"nova-cell0-5191-account-create-update-cznq6\" (UID: \"8c1a8bdd-f084-4e64-a651-91e76ad6f7d1\") " pod="openstack-kuttl-tests/nova-cell0-5191-account-create-update-cznq6" Jan 20 17:58:05 crc kubenswrapper[4558]: I0120 17:58:05.097201 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/placement-db-sync-b22j5"] Jan 20 17:58:05 crc kubenswrapper[4558]: I0120 17:58:05.112862 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rgmnl\" (UniqueName: \"kubernetes.io/projected/8c1a8bdd-f084-4e64-a651-91e76ad6f7d1-kube-api-access-rgmnl\") pod \"nova-cell0-5191-account-create-update-cznq6\" (UID: \"8c1a8bdd-f084-4e64-a651-91e76ad6f7d1\") " pod="openstack-kuttl-tests/nova-cell0-5191-account-create-update-cznq6" Jan 20 17:58:05 crc kubenswrapper[4558]: I0120 17:58:05.116318 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/placement-db-sync-b22j5"] Jan 20 17:58:05 crc kubenswrapper[4558]: I0120 17:58:05.192121 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell0-5191-account-create-update-t744c"] Jan 20 17:58:05 crc kubenswrapper[4558]: I0120 17:58:05.222435 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-5191-account-create-update-cznq6" Jan 20 17:58:05 crc kubenswrapper[4558]: I0120 17:58:05.298396 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell0-5191-account-create-update-t744c"] Jan 20 17:58:05 crc kubenswrapper[4558]: E0120 17:58:05.317903 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Jan 20 17:58:05 crc kubenswrapper[4558]: E0120 17:58:05.317980 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/d512bec4-7ed0-43bb-b8fe-0f235f7698e5-config-data podName:d512bec4-7ed0-43bb-b8fe-0f235f7698e5 nodeName:}" failed. No retries permitted until 2026-01-20 17:58:06.317964882 +0000 UTC m=+4580.078302848 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/d512bec4-7ed0-43bb-b8fe-0f235f7698e5-config-data") pod "rabbitmq-server-0" (UID: "d512bec4-7ed0-43bb-b8fe-0f235f7698e5") : configmap "rabbitmq-config-data" not found Jan 20 17:58:05 crc kubenswrapper[4558]: I0120 17:58:05.327149 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-db-sync-hsc9c"] Jan 20 17:58:05 crc kubenswrapper[4558]: I0120 17:58:05.352687 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/barbican-db-sync-hsc9c"] Jan 20 17:58:05 crc kubenswrapper[4558]: I0120 17:58:05.379861 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-f06e-account-create-update-8mj9b"] Jan 20 17:58:05 crc kubenswrapper[4558]: I0120 17:58:05.402596 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell1-f06e-account-create-update-8mj9b"] Jan 20 17:58:05 crc kubenswrapper[4558]: I0120 17:58:05.420423 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-db-sync-4h4wr"] Jan 20 17:58:05 crc kubenswrapper[4558]: I0120 17:58:05.438041 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/glance-db-sync-4h4wr"] Jan 20 17:58:05 crc kubenswrapper[4558]: I0120 17:58:05.468900 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-db-sync-7l4r5"] Jan 20 17:58:05 crc kubenswrapper[4558]: I0120 17:58:05.496031 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/cinder-db-sync-7l4r5"] Jan 20 17:58:05 crc kubenswrapper[4558]: I0120 17:58:05.513305 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-5b5bd9fb5d-vdljd"] Jan 20 17:58:05 crc kubenswrapper[4558]: I0120 17:58:05.513566 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/neutron-5b5bd9fb5d-vdljd" podUID="45d51516-cc3a-473c-bb88-e82d290d36ca" containerName="neutron-api" containerID="cri-o://dfd1e35931513664c3c64c46a6882405ae46386429f436dbbda7a5b30ca884e7" gracePeriod=30 Jan 20 17:58:05 crc kubenswrapper[4558]: I0120 17:58:05.514037 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/neutron-5b5bd9fb5d-vdljd" podUID="45d51516-cc3a-473c-bb88-e82d290d36ca" containerName="neutron-httpd" containerID="cri-o://ecb66a22f0932155a427ab286233ffe4a3ffce78bb66e6be213471c0db633735" gracePeriod=30 Jan 20 17:58:05 crc kubenswrapper[4558]: I0120 17:58:05.546222 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-cell-mapping-kn9ww"] Jan 20 17:58:05 crc kubenswrapper[4558]: I0120 17:58:05.559729 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell1-cell-mapping-kn9ww"] Jan 20 17:58:05 crc kubenswrapper[4558]: I0120 17:58:05.560982 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell0-cell-mapping-l8mfx"] Jan 20 17:58:05 crc kubenswrapper[4558]: I0120 17:58:05.569780 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell0-cell-mapping-l8mfx"] Jan 20 17:58:05 crc kubenswrapper[4558]: I0120 17:58:05.733362 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/swift-storage-0"] Jan 20 17:58:05 crc kubenswrapper[4558]: I0120 17:58:05.734448 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="cd300665-4f75-4ed7-9166-4b48e1aeffc4" containerName="account-server" containerID="cri-o://3d23b588456f64012fb1649e6aa289cb457c88d5ad66f1354c313644bf1a8a97" gracePeriod=30 Jan 20 17:58:05 crc kubenswrapper[4558]: I0120 17:58:05.735036 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="cd300665-4f75-4ed7-9166-4b48e1aeffc4" containerName="swift-recon-cron" containerID="cri-o://a7e5224c23551340586e780af89b39957adde43c70c7ae7717575e2e550d32e6" gracePeriod=30 Jan 20 17:58:05 crc kubenswrapper[4558]: I0120 17:58:05.735090 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="cd300665-4f75-4ed7-9166-4b48e1aeffc4" containerName="rsync" containerID="cri-o://49dc4ba201bbec0d9ff9d16fd2d2b2c98c88e6f20020f744fca0291f9f36c748" gracePeriod=30 Jan 20 17:58:05 crc kubenswrapper[4558]: I0120 17:58:05.735124 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="cd300665-4f75-4ed7-9166-4b48e1aeffc4" containerName="object-expirer" containerID="cri-o://63b1e89156b08be7c97cc6cd6b0955544bb87d107fd929728f51e6460408e3ae" gracePeriod=30 Jan 20 17:58:05 crc kubenswrapper[4558]: I0120 17:58:05.735209 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="cd300665-4f75-4ed7-9166-4b48e1aeffc4" containerName="object-updater" containerID="cri-o://f88ad017f2098e35ed1d0c224b9e2095984e92f4d2ea3271b416a0d3a18de1f0" gracePeriod=30 Jan 20 17:58:05 crc kubenswrapper[4558]: I0120 17:58:05.735255 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="cd300665-4f75-4ed7-9166-4b48e1aeffc4" containerName="object-auditor" containerID="cri-o://6d7ee733551241798b4cb21ccf1011d8596e7dc990e0e63f7573c7daa5faee56" gracePeriod=30 Jan 20 17:58:05 crc kubenswrapper[4558]: I0120 17:58:05.735293 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="cd300665-4f75-4ed7-9166-4b48e1aeffc4" containerName="object-replicator" containerID="cri-o://92a5f58dc2bcdf82ee4c76417da6d37919bddddc7444ff849574003ec7f86cee" gracePeriod=30 Jan 20 17:58:05 crc kubenswrapper[4558]: I0120 17:58:05.735336 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="cd300665-4f75-4ed7-9166-4b48e1aeffc4" containerName="object-server" containerID="cri-o://678ea6abd080fbce2550848f03b87a5c11341574e2839169896c148295bf7600" gracePeriod=30 Jan 20 17:58:05 crc kubenswrapper[4558]: I0120 17:58:05.735365 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="cd300665-4f75-4ed7-9166-4b48e1aeffc4" containerName="container-updater" containerID="cri-o://91605227d295d861286db724135f07f88f4d737cec1e406e2f078ef2ee97ac12" gracePeriod=30 Jan 20 17:58:05 crc kubenswrapper[4558]: I0120 17:58:05.735393 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="cd300665-4f75-4ed7-9166-4b48e1aeffc4" containerName="container-auditor" containerID="cri-o://8a87e25ff9ae7de7c19892601f394f9d10fa27cee39a8831c6bd12dcb8c3655b" gracePeriod=30 Jan 20 17:58:05 crc kubenswrapper[4558]: I0120 17:58:05.735423 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="cd300665-4f75-4ed7-9166-4b48e1aeffc4" containerName="container-replicator" containerID="cri-o://75c122801e2470bd8e501bddfa6a0f20a2d2dbaa3393df3997b6e399b869bab6" gracePeriod=30 Jan 20 17:58:05 crc kubenswrapper[4558]: I0120 17:58:05.735451 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="cd300665-4f75-4ed7-9166-4b48e1aeffc4" containerName="container-server" containerID="cri-o://593ca97090e4c8e828391df9bb4f851893cd4f22b0b607f43e4655293c6f5980" gracePeriod=30 Jan 20 17:58:05 crc kubenswrapper[4558]: I0120 17:58:05.735478 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="cd300665-4f75-4ed7-9166-4b48e1aeffc4" containerName="account-reaper" containerID="cri-o://649d6dfbc7da772af01bf0565e0467026756c4ec2fe3aed3c80157258c3eea09" gracePeriod=30 Jan 20 17:58:05 crc kubenswrapper[4558]: I0120 17:58:05.735507 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="cd300665-4f75-4ed7-9166-4b48e1aeffc4" containerName="account-auditor" containerID="cri-o://08c5c660504f209c3b527a903bc2ca2f4a2088161c85be3401c90c3cddbca989" gracePeriod=30 Jan 20 17:58:05 crc kubenswrapper[4558]: I0120 17:58:05.735538 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-storage-0" podUID="cd300665-4f75-4ed7-9166-4b48e1aeffc4" containerName="account-replicator" containerID="cri-o://640b8b31652d22d5361c56898f4108c3cc9d07fa0b6e09405e8ee47586a8b2ee" gracePeriod=30 Jan 20 17:58:05 crc kubenswrapper[4558]: I0120 17:58:05.767262 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/swift-ring-rebalance-xznwn"] Jan 20 17:58:05 crc kubenswrapper[4558]: I0120 17:58:05.792323 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/swift-ring-rebalance-xznwn"] Jan 20 17:58:05 crc kubenswrapper[4558]: I0120 17:58:05.816917 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/placement-7c7cc56d94-qptzl"] Jan 20 17:58:05 crc kubenswrapper[4558]: I0120 17:58:05.817185 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/placement-7c7cc56d94-qptzl" podUID="50829e14-401d-4958-9ef7-e2a2ef1f4b32" containerName="placement-log" containerID="cri-o://fe2d2dd5529693a11eb03a9bb6bf05aecb6c57bbe0a7f33f65058fc71e14d418" gracePeriod=30 Jan 20 17:58:05 crc kubenswrapper[4558]: I0120 17:58:05.817434 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/placement-7c7cc56d94-qptzl" podUID="50829e14-401d-4958-9ef7-e2a2ef1f4b32" containerName="placement-api" containerID="cri-o://f43ceb14bd50b1f492df9772660a94cd37c5a26e2941454f5b5fa401d0851c00" gracePeriod=30 Jan 20 17:58:05 crc kubenswrapper[4558]: I0120 17:58:05.827807 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovsdbserver-nb-0_57d5060e-37c3-45fb-8eb3-14303daa1751/ovsdbserver-nb/0.log" Jan 20 17:58:05 crc kubenswrapper[4558]: I0120 17:58:05.827890 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:58:05 crc kubenswrapper[4558]: I0120 17:58:05.861546 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-l7p2w"] Jan 20 17:58:05 crc kubenswrapper[4558]: E0120 17:58:05.865250 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="57d5060e-37c3-45fb-8eb3-14303daa1751" containerName="openstack-network-exporter" Jan 20 17:58:05 crc kubenswrapper[4558]: I0120 17:58:05.865275 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="57d5060e-37c3-45fb-8eb3-14303daa1751" containerName="openstack-network-exporter" Jan 20 17:58:05 crc kubenswrapper[4558]: E0120 17:58:05.865309 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="57d5060e-37c3-45fb-8eb3-14303daa1751" containerName="ovsdbserver-nb" Jan 20 17:58:05 crc kubenswrapper[4558]: I0120 17:58:05.865316 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="57d5060e-37c3-45fb-8eb3-14303daa1751" containerName="ovsdbserver-nb" Jan 20 17:58:05 crc kubenswrapper[4558]: I0120 17:58:05.865523 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="57d5060e-37c3-45fb-8eb3-14303daa1751" containerName="ovsdbserver-nb" Jan 20 17:58:05 crc kubenswrapper[4558]: I0120 17:58:05.865547 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="57d5060e-37c3-45fb-8eb3-14303daa1751" containerName="openstack-network-exporter" Jan 20 17:58:05 crc kubenswrapper[4558]: I0120 17:58:05.868261 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/57d5060e-37c3-45fb-8eb3-14303daa1751-ovsdbserver-nb-tls-certs\") pod \"57d5060e-37c3-45fb-8eb3-14303daa1751\" (UID: \"57d5060e-37c3-45fb-8eb3-14303daa1751\") " Jan 20 17:58:05 crc kubenswrapper[4558]: I0120 17:58:05.868366 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/57d5060e-37c3-45fb-8eb3-14303daa1751-scripts\") pod \"57d5060e-37c3-45fb-8eb3-14303daa1751\" (UID: \"57d5060e-37c3-45fb-8eb3-14303daa1751\") " Jan 20 17:58:05 crc kubenswrapper[4558]: I0120 17:58:05.868450 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndbcluster-nb-etc-ovn\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"57d5060e-37c3-45fb-8eb3-14303daa1751\" (UID: \"57d5060e-37c3-45fb-8eb3-14303daa1751\") " Jan 20 17:58:05 crc kubenswrapper[4558]: I0120 17:58:05.868525 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/57d5060e-37c3-45fb-8eb3-14303daa1751-ovsdb-rundir\") pod \"57d5060e-37c3-45fb-8eb3-14303daa1751\" (UID: \"57d5060e-37c3-45fb-8eb3-14303daa1751\") " Jan 20 17:58:05 crc kubenswrapper[4558]: I0120 17:58:05.868601 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/57d5060e-37c3-45fb-8eb3-14303daa1751-metrics-certs-tls-certs\") pod \"57d5060e-37c3-45fb-8eb3-14303daa1751\" (UID: \"57d5060e-37c3-45fb-8eb3-14303daa1751\") " Jan 20 17:58:05 crc kubenswrapper[4558]: I0120 17:58:05.868622 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/57d5060e-37c3-45fb-8eb3-14303daa1751-config\") pod \"57d5060e-37c3-45fb-8eb3-14303daa1751\" (UID: \"57d5060e-37c3-45fb-8eb3-14303daa1751\") " Jan 20 17:58:05 crc kubenswrapper[4558]: I0120 17:58:05.868745 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5l7m8\" (UniqueName: \"kubernetes.io/projected/57d5060e-37c3-45fb-8eb3-14303daa1751-kube-api-access-5l7m8\") pod \"57d5060e-37c3-45fb-8eb3-14303daa1751\" (UID: \"57d5060e-37c3-45fb-8eb3-14303daa1751\") " Jan 20 17:58:05 crc kubenswrapper[4558]: I0120 17:58:05.868829 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/57d5060e-37c3-45fb-8eb3-14303daa1751-combined-ca-bundle\") pod \"57d5060e-37c3-45fb-8eb3-14303daa1751\" (UID: \"57d5060e-37c3-45fb-8eb3-14303daa1751\") " Jan 20 17:58:05 crc kubenswrapper[4558]: I0120 17:58:05.869855 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/57d5060e-37c3-45fb-8eb3-14303daa1751-scripts" (OuterVolumeSpecName: "scripts") pod "57d5060e-37c3-45fb-8eb3-14303daa1751" (UID: "57d5060e-37c3-45fb-8eb3-14303daa1751"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:58:05 crc kubenswrapper[4558]: I0120 17:58:05.870047 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-l7p2w" Jan 20 17:58:05 crc kubenswrapper[4558]: I0120 17:58:05.870627 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/57d5060e-37c3-45fb-8eb3-14303daa1751-config" (OuterVolumeSpecName: "config") pod "57d5060e-37c3-45fb-8eb3-14303daa1751" (UID: "57d5060e-37c3-45fb-8eb3-14303daa1751"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:58:05 crc kubenswrapper[4558]: I0120 17:58:05.870933 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57d5060e-37c3-45fb-8eb3-14303daa1751-ovsdb-rundir" (OuterVolumeSpecName: "ovsdb-rundir") pod "57d5060e-37c3-45fb-8eb3-14303daa1751" (UID: "57d5060e-37c3-45fb-8eb3-14303daa1751"). InnerVolumeSpecName "ovsdb-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:58:05 crc kubenswrapper[4558]: I0120 17:58:05.895408 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-l7p2w"] Jan 20 17:58:05 crc kubenswrapper[4558]: I0120 17:58:05.898462 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage01-crc" (OuterVolumeSpecName: "ovndbcluster-nb-etc-ovn") pod "57d5060e-37c3-45fb-8eb3-14303daa1751" (UID: "57d5060e-37c3-45fb-8eb3-14303daa1751"). InnerVolumeSpecName "local-storage01-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:58:05 crc kubenswrapper[4558]: I0120 17:58:05.909594 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/rabbitmq-cell1-server-0"] Jan 20 17:58:05 crc kubenswrapper[4558]: I0120 17:58:05.931442 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovsdbserver-nb-0_57d5060e-37c3-45fb-8eb3-14303daa1751/ovsdbserver-nb/0.log" Jan 20 17:58:05 crc kubenswrapper[4558]: I0120 17:58:05.933186 4558 generic.go:334] "Generic (PLEG): container finished" podID="57d5060e-37c3-45fb-8eb3-14303daa1751" containerID="e272403361b0e6ff3e6fc8379ff4809e4f17620cf357e7e67ffbb7d8edea5b98" exitCode=143 Jan 20 17:58:05 crc kubenswrapper[4558]: I0120 17:58:05.933389 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-nb-0" Jan 20 17:58:05 crc kubenswrapper[4558]: I0120 17:58:05.934373 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-nb-0" event={"ID":"57d5060e-37c3-45fb-8eb3-14303daa1751","Type":"ContainerDied","Data":"e272403361b0e6ff3e6fc8379ff4809e4f17620cf357e7e67ffbb7d8edea5b98"} Jan 20 17:58:05 crc kubenswrapper[4558]: I0120 17:58:05.946000 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-nb-0" event={"ID":"57d5060e-37c3-45fb-8eb3-14303daa1751","Type":"ContainerDied","Data":"ce0bc3206601b94f426f90ded30d58f9ccdbe1f621513ee780119ad109066911"} Jan 20 17:58:05 crc kubenswrapper[4558]: I0120 17:58:05.946065 4558 scope.go:117] "RemoveContainer" containerID="f8def63bbe6d6310ffc3a7c86e33982e2c9a2be82a8a0b0727ca59a14180a4d8" Jan 20 17:58:05 crc kubenswrapper[4558]: I0120 17:58:05.947790 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57d5060e-37c3-45fb-8eb3-14303daa1751-kube-api-access-5l7m8" (OuterVolumeSpecName: "kube-api-access-5l7m8") pod "57d5060e-37c3-45fb-8eb3-14303daa1751" (UID: "57d5060e-37c3-45fb-8eb3-14303daa1751"). InnerVolumeSpecName "kube-api-access-5l7m8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:58:05 crc kubenswrapper[4558]: I0120 17:58:05.957198 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-db-create-966g7"] Jan 20 17:58:05 crc kubenswrapper[4558]: I0120 17:58:05.970898 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/314a7e85-558d-49d5-b281-52342e3e4c01-config\") pod \"dnsmasq-dnsmasq-84b9f45d47-l7p2w\" (UID: \"314a7e85-558d-49d5-b281-52342e3e4c01\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-l7p2w" Jan 20 17:58:05 crc kubenswrapper[4558]: I0120 17:58:05.970937 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/314a7e85-558d-49d5-b281-52342e3e4c01-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-84b9f45d47-l7p2w\" (UID: \"314a7e85-558d-49d5-b281-52342e3e4c01\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-l7p2w" Jan 20 17:58:05 crc kubenswrapper[4558]: I0120 17:58:05.971026 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bsb7c\" (UniqueName: \"kubernetes.io/projected/314a7e85-558d-49d5-b281-52342e3e4c01-kube-api-access-bsb7c\") pod \"dnsmasq-dnsmasq-84b9f45d47-l7p2w\" (UID: \"314a7e85-558d-49d5-b281-52342e3e4c01\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-l7p2w" Jan 20 17:58:05 crc kubenswrapper[4558]: I0120 17:58:05.971118 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/57d5060e-37c3-45fb-8eb3-14303daa1751-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:05 crc kubenswrapper[4558]: I0120 17:58:05.971141 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" " Jan 20 17:58:05 crc kubenswrapper[4558]: I0120 17:58:05.971150 4558 reconciler_common.go:293] "Volume detached for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/57d5060e-37c3-45fb-8eb3-14303daa1751-ovsdb-rundir\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:05 crc kubenswrapper[4558]: I0120 17:58:05.971176 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/57d5060e-37c3-45fb-8eb3-14303daa1751-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:05 crc kubenswrapper[4558]: I0120 17:58:05.971186 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5l7m8\" (UniqueName: \"kubernetes.io/projected/57d5060e-37c3-45fb-8eb3-14303daa1751-kube-api-access-5l7m8\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:05 crc kubenswrapper[4558]: E0120 17:58:05.971303 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Jan 20 17:58:05 crc kubenswrapper[4558]: E0120 17:58:05.971364 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/948fb15d-07f1-4b25-b8d5-7d582024ef28-config-data podName:948fb15d-07f1-4b25-b8d5-7d582024ef28 nodeName:}" failed. No retries permitted until 2026-01-20 17:58:07.971344364 +0000 UTC m=+4581.731682331 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/948fb15d-07f1-4b25-b8d5-7d582024ef28-config-data") pod "rabbitmq-cell1-server-0" (UID: "948fb15d-07f1-4b25-b8d5-7d582024ef28") : configmap "rabbitmq-cell1-config-data" not found Jan 20 17:58:05 crc kubenswrapper[4558]: I0120 17:58:05.976137 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/57d5060e-37c3-45fb-8eb3-14303daa1751-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "57d5060e-37c3-45fb-8eb3-14303daa1751" (UID: "57d5060e-37c3-45fb-8eb3-14303daa1751"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:58:05 crc kubenswrapper[4558]: I0120 17:58:05.979239 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/neutron-db-create-966g7"] Jan 20 17:58:05 crc kubenswrapper[4558]: I0120 17:58:05.980767 4558 generic.go:334] "Generic (PLEG): container finished" podID="45d51516-cc3a-473c-bb88-e82d290d36ca" containerID="ecb66a22f0932155a427ab286233ffe4a3ffce78bb66e6be213471c0db633735" exitCode=0 Jan 20 17:58:05 crc kubenswrapper[4558]: I0120 17:58:05.980830 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-5b5bd9fb5d-vdljd" event={"ID":"45d51516-cc3a-473c-bb88-e82d290d36ca","Type":"ContainerDied","Data":"ecb66a22f0932155a427ab286233ffe4a3ffce78bb66e6be213471c0db633735"} Jan 20 17:58:05 crc kubenswrapper[4558]: I0120 17:58:05.983237 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovsdbserver-sb-0_17f6b80e-1961-4c44-a979-9c23bdd59837/ovsdbserver-sb/0.log" Jan 20 17:58:05 crc kubenswrapper[4558]: I0120 17:58:05.983267 4558 generic.go:334] "Generic (PLEG): container finished" podID="17f6b80e-1961-4c44-a979-9c23bdd59837" containerID="538bcf70ba0df02b3c9616008ced0cabfe97d3b97ecd1cf7f96d63b4177c5ffa" exitCode=2 Jan 20 17:58:05 crc kubenswrapper[4558]: I0120 17:58:05.986611 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-sb-0" event={"ID":"17f6b80e-1961-4c44-a979-9c23bdd59837","Type":"ContainerDied","Data":"538bcf70ba0df02b3c9616008ced0cabfe97d3b97ecd1cf7f96d63b4177c5ffa"} Jan 20 17:58:05 crc kubenswrapper[4558]: I0120 17:58:05.986657 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-sb-0" event={"ID":"17f6b80e-1961-4c44-a979-9c23bdd59837","Type":"ContainerDied","Data":"76d0924a191838385c1332ce3ba83da3d2823bf9b2729355de22ccab98022411"} Jan 20 17:58:05 crc kubenswrapper[4558]: I0120 17:58:05.986707 4558 generic.go:334] "Generic (PLEG): container finished" podID="17f6b80e-1961-4c44-a979-9c23bdd59837" containerID="76d0924a191838385c1332ce3ba83da3d2823bf9b2729355de22ccab98022411" exitCode=143 Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.009403 4558 generic.go:334] "Generic (PLEG): container finished" podID="dbdd2687-1110-4dce-a521-19c9337df3a2" containerID="1c2ffc84d31610214ed641d4c67df414b1a44d6e48fb78c1e2211304b04f696f" exitCode=2 Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.009446 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-northd-0" event={"ID":"dbdd2687-1110-4dce-a521-19c9337df3a2","Type":"ContainerDied","Data":"1c2ffc84d31610214ed641d4c67df414b1a44d6e48fb78c1e2211304b04f696f"} Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.019103 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.019398 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-internal-api-0" podUID="7dcf074e-8444-4bd2-b1f9-143390e96ef8" containerName="glance-log" containerID="cri-o://d464d4bcdd194c4ee64ccc0190f47e6e0ded24b37d0780e2cc7ded9c8f572a41" gracePeriod=30 Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.019779 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-internal-api-0" podUID="7dcf074e-8444-4bd2-b1f9-143390e96ef8" containerName="glance-httpd" containerID="cri-o://4a4fc0e35d400b1717dbd88e9cb076f22b20999b3e1867032e08446fa50f658e" gracePeriod=30 Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.049203 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" podUID="948fb15d-07f1-4b25-b8d5-7d582024ef28" containerName="rabbitmq" containerID="cri-o://04ae5aafbf49cda71c8764575c7291ff0e2e0ca0bcd2151b6b2cce95d4ba6b14" gracePeriod=604800 Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.069516 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage01-crc" (UniqueName: "kubernetes.io/local-volume/local-storage01-crc") on node "crc" Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.075824 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/314a7e85-558d-49d5-b281-52342e3e4c01-config\") pod \"dnsmasq-dnsmasq-84b9f45d47-l7p2w\" (UID: \"314a7e85-558d-49d5-b281-52342e3e4c01\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-l7p2w" Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.075864 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/314a7e85-558d-49d5-b281-52342e3e4c01-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-84b9f45d47-l7p2w\" (UID: \"314a7e85-558d-49d5-b281-52342e3e4c01\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-l7p2w" Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.076008 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bsb7c\" (UniqueName: \"kubernetes.io/projected/314a7e85-558d-49d5-b281-52342e3e4c01-kube-api-access-bsb7c\") pod \"dnsmasq-dnsmasq-84b9f45d47-l7p2w\" (UID: \"314a7e85-558d-49d5-b281-52342e3e4c01\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-l7p2w" Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.076148 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/57d5060e-37c3-45fb-8eb3-14303daa1751-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.076179 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.077388 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/314a7e85-558d-49d5-b281-52342e3e4c01-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-84b9f45d47-l7p2w\" (UID: \"314a7e85-558d-49d5-b281-52342e3e4c01\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-l7p2w" Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.077709 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-c1b8-account-create-update-5scts"] Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.093327 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/314a7e85-558d-49d5-b281-52342e3e4c01-config\") pod \"dnsmasq-dnsmasq-84b9f45d47-l7p2w\" (UID: \"314a7e85-558d-49d5-b281-52342e3e4c01\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-l7p2w" Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.097097 4558 scope.go:117] "RemoveContainer" containerID="e272403361b0e6ff3e6fc8379ff4809e4f17620cf357e7e67ffbb7d8edea5b98" Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.103157 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bsb7c\" (UniqueName: \"kubernetes.io/projected/314a7e85-558d-49d5-b281-52342e3e4c01-kube-api-access-bsb7c\") pod \"dnsmasq-dnsmasq-84b9f45d47-l7p2w\" (UID: \"314a7e85-558d-49d5-b281-52342e3e4c01\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-l7p2w" Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.146658 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovsdbserver-sb-0_17f6b80e-1961-4c44-a979-9c23bdd59837/ovsdbserver-sb/0.log" Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.146784 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.173478 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/57d5060e-37c3-45fb-8eb3-14303daa1751-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "57d5060e-37c3-45fb-8eb3-14303daa1751" (UID: "57d5060e-37c3-45fb-8eb3-14303daa1751"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.176296 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/neutron-c1b8-account-create-update-5scts"] Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.177759 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/17f6b80e-1961-4c44-a979-9c23bdd59837-config\") pod \"17f6b80e-1961-4c44-a979-9c23bdd59837\" (UID: \"17f6b80e-1961-4c44-a979-9c23bdd59837\") " Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.177874 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhjdr\" (UniqueName: \"kubernetes.io/projected/17f6b80e-1961-4c44-a979-9c23bdd59837-kube-api-access-jhjdr\") pod \"17f6b80e-1961-4c44-a979-9c23bdd59837\" (UID: \"17f6b80e-1961-4c44-a979-9c23bdd59837\") " Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.177918 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/17f6b80e-1961-4c44-a979-9c23bdd59837-metrics-certs-tls-certs\") pod \"17f6b80e-1961-4c44-a979-9c23bdd59837\" (UID: \"17f6b80e-1961-4c44-a979-9c23bdd59837\") " Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.177999 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/17f6b80e-1961-4c44-a979-9c23bdd59837-ovsdb-rundir\") pod \"17f6b80e-1961-4c44-a979-9c23bdd59837\" (UID: \"17f6b80e-1961-4c44-a979-9c23bdd59837\") " Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.178066 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/17f6b80e-1961-4c44-a979-9c23bdd59837-ovsdbserver-sb-tls-certs\") pod \"17f6b80e-1961-4c44-a979-9c23bdd59837\" (UID: \"17f6b80e-1961-4c44-a979-9c23bdd59837\") " Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.178157 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/17f6b80e-1961-4c44-a979-9c23bdd59837-scripts\") pod \"17f6b80e-1961-4c44-a979-9c23bdd59837\" (UID: \"17f6b80e-1961-4c44-a979-9c23bdd59837\") " Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.178292 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/17f6b80e-1961-4c44-a979-9c23bdd59837-combined-ca-bundle\") pod \"17f6b80e-1961-4c44-a979-9c23bdd59837\" (UID: \"17f6b80e-1961-4c44-a979-9c23bdd59837\") " Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.178321 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndbcluster-sb-etc-ovn\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"17f6b80e-1961-4c44-a979-9c23bdd59837\" (UID: \"17f6b80e-1961-4c44-a979-9c23bdd59837\") " Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.179070 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/17f6b80e-1961-4c44-a979-9c23bdd59837-config" (OuterVolumeSpecName: "config") pod "17f6b80e-1961-4c44-a979-9c23bdd59837" (UID: "17f6b80e-1961-4c44-a979-9c23bdd59837"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.171593 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/57d5060e-37c3-45fb-8eb3-14303daa1751-ovsdbserver-nb-tls-certs" (OuterVolumeSpecName: "ovsdbserver-nb-tls-certs") pod "57d5060e-37c3-45fb-8eb3-14303daa1751" (UID: "57d5060e-37c3-45fb-8eb3-14303daa1751"). InnerVolumeSpecName "ovsdbserver-nb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.179740 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/17f6b80e-1961-4c44-a979-9c23bdd59837-scripts" (OuterVolumeSpecName: "scripts") pod "17f6b80e-1961-4c44-a979-9c23bdd59837" (UID: "17f6b80e-1961-4c44-a979-9c23bdd59837"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.179921 4558 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/57d5060e-37c3-45fb-8eb3-14303daa1751-ovsdbserver-nb-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.179959 4558 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/57d5060e-37c3-45fb-8eb3-14303daa1751-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.179974 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/17f6b80e-1961-4c44-a979-9c23bdd59837-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.185007 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/17f6b80e-1961-4c44-a979-9c23bdd59837-ovsdb-rundir" (OuterVolumeSpecName: "ovsdb-rundir") pod "17f6b80e-1961-4c44-a979-9c23bdd59837" (UID: "17f6b80e-1961-4c44-a979-9c23bdd59837"). InnerVolumeSpecName "ovsdb-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.198126 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/17f6b80e-1961-4c44-a979-9c23bdd59837-kube-api-access-jhjdr" (OuterVolumeSpecName: "kube-api-access-jhjdr") pod "17f6b80e-1961-4c44-a979-9c23bdd59837" (UID: "17f6b80e-1961-4c44-a979-9c23bdd59837"). InnerVolumeSpecName "kube-api-access-jhjdr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.203616 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage09-crc" (OuterVolumeSpecName: "ovndbcluster-sb-etc-ovn") pod "17f6b80e-1961-4c44-a979-9c23bdd59837" (UID: "17f6b80e-1961-4c44-a979-9c23bdd59837"). InnerVolumeSpecName "local-storage09-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.209793 4558 scope.go:117] "RemoveContainer" containerID="f8def63bbe6d6310ffc3a7c86e33982e2c9a2be82a8a0b0727ca59a14180a4d8" Jan 20 17:58:06 crc kubenswrapper[4558]: E0120 17:58:06.211778 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f8def63bbe6d6310ffc3a7c86e33982e2c9a2be82a8a0b0727ca59a14180a4d8\": container with ID starting with f8def63bbe6d6310ffc3a7c86e33982e2c9a2be82a8a0b0727ca59a14180a4d8 not found: ID does not exist" containerID="f8def63bbe6d6310ffc3a7c86e33982e2c9a2be82a8a0b0727ca59a14180a4d8" Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.212035 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f8def63bbe6d6310ffc3a7c86e33982e2c9a2be82a8a0b0727ca59a14180a4d8"} err="failed to get container status \"f8def63bbe6d6310ffc3a7c86e33982e2c9a2be82a8a0b0727ca59a14180a4d8\": rpc error: code = NotFound desc = could not find container \"f8def63bbe6d6310ffc3a7c86e33982e2c9a2be82a8a0b0727ca59a14180a4d8\": container with ID starting with f8def63bbe6d6310ffc3a7c86e33982e2c9a2be82a8a0b0727ca59a14180a4d8 not found: ID does not exist" Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.212122 4558 scope.go:117] "RemoveContainer" containerID="e272403361b0e6ff3e6fc8379ff4809e4f17620cf357e7e67ffbb7d8edea5b98" Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.212327 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.212785 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-external-api-0" podUID="8f5c27dd-2365-49ee-b4bd-b46e952199f7" containerName="glance-log" containerID="cri-o://c8373ab659f997ee81de532a6542773f3920ce134a08cd0ec3ca60979a45eb58" gracePeriod=30 Jan 20 17:58:06 crc kubenswrapper[4558]: E0120 17:58:06.220669 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e272403361b0e6ff3e6fc8379ff4809e4f17620cf357e7e67ffbb7d8edea5b98\": container with ID starting with e272403361b0e6ff3e6fc8379ff4809e4f17620cf357e7e67ffbb7d8edea5b98 not found: ID does not exist" containerID="e272403361b0e6ff3e6fc8379ff4809e4f17620cf357e7e67ffbb7d8edea5b98" Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.220716 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e272403361b0e6ff3e6fc8379ff4809e4f17620cf357e7e67ffbb7d8edea5b98"} err="failed to get container status \"e272403361b0e6ff3e6fc8379ff4809e4f17620cf357e7e67ffbb7d8edea5b98\": rpc error: code = NotFound desc = could not find container \"e272403361b0e6ff3e6fc8379ff4809e4f17620cf357e7e67ffbb7d8edea5b98\": container with ID starting with e272403361b0e6ff3e6fc8379ff4809e4f17620cf357e7e67ffbb7d8edea5b98 not found: ID does not exist" Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.220918 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/glance-default-external-api-0" podUID="8f5c27dd-2365-49ee-b4bd-b46e952199f7" containerName="glance-httpd" containerID="cri-o://bf44882c187617ca00e2141f2d59bb3077ec0bd79f8fd782d703007b103558bc" gracePeriod=30 Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.226252 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/17f6b80e-1961-4c44-a979-9c23bdd59837-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "17f6b80e-1961-4c44-a979-9c23bdd59837" (UID: "17f6b80e-1961-4c44-a979-9c23bdd59837"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.232766 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.250964 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/cinder-scheduler-0" podUID="1481b06c-22ef-4b33-b49c-2c5d6903cbee" containerName="cinder-scheduler" containerID="cri-o://8e9c3b8eaacddd4170f8d9b11bdc908d3994f8a53ba6ad4543a5cf43244fda7c" gracePeriod=30 Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.256005 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/cinder-scheduler-0" podUID="1481b06c-22ef-4b33-b49c-2c5d6903cbee" containerName="probe" containerID="cri-o://dc24e5666420136fe5c65594f8305d0aa03d99644a34fc70829cbd42cab6db49" gracePeriod=30 Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.257796 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-l7p2w" Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.267126 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/placement-db-create-2hfmj"] Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.275797 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/placement-db-create-2hfmj"] Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.286028 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/placement-c24c-account-create-update-mxf4v"] Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.295844 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.295637 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhjdr\" (UniqueName: \"kubernetes.io/projected/17f6b80e-1961-4c44-a979-9c23bdd59837-kube-api-access-jhjdr\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.297547 4558 reconciler_common.go:293] "Volume detached for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/17f6b80e-1961-4c44-a979-9c23bdd59837-ovsdb-rundir\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.297560 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/17f6b80e-1961-4c44-a979-9c23bdd59837-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.297573 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/17f6b80e-1961-4c44-a979-9c23bdd59837-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.297596 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" " Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.298104 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/cinder-api-0" podUID="0b0c3280-9b26-4db4-ab16-68016b055a1f" containerName="cinder-api-log" containerID="cri-o://ed6293f7998d052646c129c9cb1fa0e8231d7f0d152e8108e422d497c055e6c9" gracePeriod=30 Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.298545 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/cinder-api-0" podUID="0b0c3280-9b26-4db4-ab16-68016b055a1f" containerName="cinder-api" containerID="cri-o://c77c9b049e2b69bd62c3dcdd5e98b0f9306632a287cc9d5490b8cbb694baaa92" gracePeriod=30 Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.305197 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-db-create-qxgzw"] Jan 20 17:58:06 crc kubenswrapper[4558]: E0120 17:58:06.305360 4558 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 20 17:58:06 crc kubenswrapper[4558]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[/bin/sh -c #!/bin/bash Jan 20 17:58:06 crc kubenswrapper[4558]: Jan 20 17:58:06 crc kubenswrapper[4558]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 20 17:58:06 crc kubenswrapper[4558]: Jan 20 17:58:06 crc kubenswrapper[4558]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 20 17:58:06 crc kubenswrapper[4558]: Jan 20 17:58:06 crc kubenswrapper[4558]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 20 17:58:06 crc kubenswrapper[4558]: Jan 20 17:58:06 crc kubenswrapper[4558]: if [ -n "barbican" ]; then Jan 20 17:58:06 crc kubenswrapper[4558]: GRANT_DATABASE="barbican" Jan 20 17:58:06 crc kubenswrapper[4558]: else Jan 20 17:58:06 crc kubenswrapper[4558]: GRANT_DATABASE="*" Jan 20 17:58:06 crc kubenswrapper[4558]: fi Jan 20 17:58:06 crc kubenswrapper[4558]: Jan 20 17:58:06 crc kubenswrapper[4558]: # going for maximum compatibility here: Jan 20 17:58:06 crc kubenswrapper[4558]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 20 17:58:06 crc kubenswrapper[4558]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 20 17:58:06 crc kubenswrapper[4558]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 20 17:58:06 crc kubenswrapper[4558]: # support updates Jan 20 17:58:06 crc kubenswrapper[4558]: Jan 20 17:58:06 crc kubenswrapper[4558]: $MYSQL_CMD < logger="UnhandledError" Jan 20 17:58:06 crc kubenswrapper[4558]: E0120 17:58:06.305752 4558 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 20 17:58:06 crc kubenswrapper[4558]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[/bin/sh -c #!/bin/bash Jan 20 17:58:06 crc kubenswrapper[4558]: Jan 20 17:58:06 crc kubenswrapper[4558]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 20 17:58:06 crc kubenswrapper[4558]: Jan 20 17:58:06 crc kubenswrapper[4558]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 20 17:58:06 crc kubenswrapper[4558]: Jan 20 17:58:06 crc kubenswrapper[4558]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 20 17:58:06 crc kubenswrapper[4558]: Jan 20 17:58:06 crc kubenswrapper[4558]: if [ -n "placement" ]; then Jan 20 17:58:06 crc kubenswrapper[4558]: GRANT_DATABASE="placement" Jan 20 17:58:06 crc kubenswrapper[4558]: else Jan 20 17:58:06 crc kubenswrapper[4558]: GRANT_DATABASE="*" Jan 20 17:58:06 crc kubenswrapper[4558]: fi Jan 20 17:58:06 crc kubenswrapper[4558]: Jan 20 17:58:06 crc kubenswrapper[4558]: # going for maximum compatibility here: Jan 20 17:58:06 crc kubenswrapper[4558]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 20 17:58:06 crc kubenswrapper[4558]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 20 17:58:06 crc kubenswrapper[4558]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 20 17:58:06 crc kubenswrapper[4558]: # support updates Jan 20 17:58:06 crc kubenswrapper[4558]: Jan 20 17:58:06 crc kubenswrapper[4558]: $MYSQL_CMD < logger="UnhandledError" Jan 20 17:58:06 crc kubenswrapper[4558]: E0120 17:58:06.307298 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"placement-db-secret\\\" not found\"" pod="openstack-kuttl-tests/placement-c24c-account-create-update-mxf4v" podUID="25990a01-f231-4924-a1cc-9207cc5af3c8" Jan 20 17:58:06 crc kubenswrapper[4558]: E0120 17:58:06.307337 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"barbican-db-secret\\\" not found\"" pod="openstack-kuttl-tests/barbican-e4c5-account-create-update-qbwc2" podUID="5dfca7b5-dd95-4da3-a088-de4d48ea8c10" Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.329272 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/barbican-db-create-qxgzw"] Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.331947 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage09-crc" (UniqueName: "kubernetes.io/local-volume/local-storage09-crc") on node "crc" Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.332587 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-e4c5-account-create-update-qbwc2"] Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.343148 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-db-create-k6q4m"] Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.353300 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/17f6b80e-1961-4c44-a979-9c23bdd59837-ovsdbserver-sb-tls-certs" (OuterVolumeSpecName: "ovsdbserver-sb-tls-certs") pod "17f6b80e-1961-4c44-a979-9c23bdd59837" (UID: "17f6b80e-1961-4c44-a979-9c23bdd59837"). InnerVolumeSpecName "ovsdbserver-sb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.362354 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/glance-db-create-k6q4m"] Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.372590 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.372863 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-metadata-0" podUID="c8f11f45-bfce-4989-bfa0-684011f74619" containerName="nova-metadata-log" containerID="cri-o://d0304cd10afa9031be3b363bc240ee32e662b09bbf8d0575f8a91d278b93f42b" gracePeriod=30 Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.373377 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-metadata-0" podUID="c8f11f45-bfce-4989-bfa0-684011f74619" containerName="nova-metadata-metadata" containerID="cri-o://8bfec59d12be9eefb33c786c52426fac978276a374abf67c2991cebb7a215c6d" gracePeriod=30 Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.375788 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-94fd-account-create-update-wkrmc"] Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.381483 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-db-create-rzf9r"] Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.393287 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/17f6b80e-1961-4c44-a979-9c23bdd59837-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "17f6b80e-1961-4c44-a979-9c23bdd59837" (UID: "17f6b80e-1961-4c44-a979-9c23bdd59837"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.401221 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/cinder-db-create-rzf9r"] Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.403192 4558 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/17f6b80e-1961-4c44-a979-9c23bdd59837-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.403209 4558 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/17f6b80e-1961-4c44-a979-9c23bdd59837-ovsdbserver-sb-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.403219 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:06 crc kubenswrapper[4558]: E0120 17:58:06.403282 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Jan 20 17:58:06 crc kubenswrapper[4558]: E0120 17:58:06.403328 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/d512bec4-7ed0-43bb-b8fe-0f235f7698e5-config-data podName:d512bec4-7ed0-43bb-b8fe-0f235f7698e5 nodeName:}" failed. No retries permitted until 2026-01-20 17:58:08.403311009 +0000 UTC m=+4582.163648965 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/d512bec4-7ed0-43bb-b8fe-0f235f7698e5-config-data") pod "rabbitmq-server-0" (UID: "d512bec4-7ed0-43bb-b8fe-0f235f7698e5") : configmap "rabbitmq-config-data" not found Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.407949 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-x75ng"] Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.419888 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.420155 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-api-0" podUID="6f0733ea-bb8a-4c0f-9ea8-edd6f687301e" containerName="nova-api-log" containerID="cri-o://3a49860bcfdad30df2f697c1fe78b7c7dd55ae654b13cecc83fb386c4afb2fa4" gracePeriod=30 Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.421300 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-api-0" podUID="6f0733ea-bb8a-4c0f-9ea8-edd6f687301e" containerName="nova-api-api" containerID="cri-o://9c03143dbfd7f3302ef2dbffcc4cbebbe5dec6b4db2e4e83a24cb243d0feac1c" gracePeriod=30 Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.423530 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-e4c5-account-create-update-qbwc2"] Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.436386 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell0-db-create-f9xfm"] Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.462145 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell0-db-create-f9xfm"] Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.504802 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell0-5191-account-create-update-cznq6"] Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.512554 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-fde5-account-create-update-zmxrm"] Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.520913 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/openstack-cell1-galera-0"] Jan 20 17:58:06 crc kubenswrapper[4558]: E0120 17:58:06.535456 4558 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 20 17:58:06 crc kubenswrapper[4558]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[/bin/sh -c #!/bin/bash Jan 20 17:58:06 crc kubenswrapper[4558]: Jan 20 17:58:06 crc kubenswrapper[4558]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 20 17:58:06 crc kubenswrapper[4558]: Jan 20 17:58:06 crc kubenswrapper[4558]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 20 17:58:06 crc kubenswrapper[4558]: Jan 20 17:58:06 crc kubenswrapper[4558]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 20 17:58:06 crc kubenswrapper[4558]: Jan 20 17:58:06 crc kubenswrapper[4558]: if [ -n "glance" ]; then Jan 20 17:58:06 crc kubenswrapper[4558]: GRANT_DATABASE="glance" Jan 20 17:58:06 crc kubenswrapper[4558]: else Jan 20 17:58:06 crc kubenswrapper[4558]: GRANT_DATABASE="*" Jan 20 17:58:06 crc kubenswrapper[4558]: fi Jan 20 17:58:06 crc kubenswrapper[4558]: Jan 20 17:58:06 crc kubenswrapper[4558]: # going for maximum compatibility here: Jan 20 17:58:06 crc kubenswrapper[4558]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 20 17:58:06 crc kubenswrapper[4558]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 20 17:58:06 crc kubenswrapper[4558]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 20 17:58:06 crc kubenswrapper[4558]: # support updates Jan 20 17:58:06 crc kubenswrapper[4558]: Jan 20 17:58:06 crc kubenswrapper[4558]: $MYSQL_CMD < logger="UnhandledError" Jan 20 17:58:06 crc kubenswrapper[4558]: E0120 17:58:06.536830 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"glance-db-secret\\\" not found\"" pod="openstack-kuttl-tests/glance-94fd-account-create-update-wkrmc" podUID="ce3fd1cf-8efc-4180-b37f-8b562827c8a2" Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.541090 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/placement-c24c-account-create-update-mxf4v"] Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.554965 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-db-create-4n4vn"] Jan 20 17:58:06 crc kubenswrapper[4558]: W0120 17:58:06.583658 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc9479a19_1b1e_4311_97f5_8922e8602b18.slice/crio-8800283c27e298292eeb77f2cff240f0b8adf5a4492206a13ae1e3dfd046d71d WatchSource:0}: Error finding container 8800283c27e298292eeb77f2cff240f0b8adf5a4492206a13ae1e3dfd046d71d: Status 404 returned error can't find the container with id 8800283c27e298292eeb77f2cff240f0b8adf5a4492206a13ae1e3dfd046d71d Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.589021 4558 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="openstack-kuttl-tests/nova-cell1-conductor-0" secret="" err="secret \"nova-nova-dockercfg-s7p6w\" not found" Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.647231 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0dc1e7fb-21d3-4077-87c9-f2ede71a561a" path="/var/lib/kubelet/pods/0dc1e7fb-21d3-4077-87c9-f2ede71a561a/volumes" Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.648253 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="19001f5f-e2af-482f-b182-0cf5d3297979" path="/var/lib/kubelet/pods/19001f5f-e2af-482f-b182-0cf5d3297979/volumes" Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.648847 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="39abd72a-05b8-4df0-bd89-3c8f6f566b38" path="/var/lib/kubelet/pods/39abd72a-05b8-4df0-bd89-3c8f6f566b38/volumes" Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.649386 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3da90c4c-591e-4d99-84ed-3e720b2b853a" path="/var/lib/kubelet/pods/3da90c4c-591e-4d99-84ed-3e720b2b853a/volumes" Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.650476 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4571f178-ad12-4a67-a9a4-0e2cca1e2e6f" path="/var/lib/kubelet/pods/4571f178-ad12-4a67-a9a4-0e2cca1e2e6f/volumes" Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.650962 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="492f23ec-9869-4bd8-a6ac-b0643d668a3b" path="/var/lib/kubelet/pods/492f23ec-9869-4bd8-a6ac-b0643d668a3b/volumes" Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.651465 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="673d11aa-a3d9-47e7-a64a-d261d52a8d1b" path="/var/lib/kubelet/pods/673d11aa-a3d9-47e7-a64a-d261d52a8d1b/volumes" Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.658952 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6b50a90e-3159-4824-b752-a8bc047c54d1" path="/var/lib/kubelet/pods/6b50a90e-3159-4824-b752-a8bc047c54d1/volumes" Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.659533 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="78a5052e-9643-4fe6-b908-b6e36974350a" path="/var/lib/kubelet/pods/78a5052e-9643-4fe6-b908-b6e36974350a/volumes" Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.660029 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="85e14934-0ea8-4bb8-9ff6-7f06f89f2ae8" path="/var/lib/kubelet/pods/85e14934-0ea8-4bb8-9ff6-7f06f89f2ae8/volumes" Jan 20 17:58:06 crc kubenswrapper[4558]: E0120 17:58:06.660426 4558 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 20 17:58:06 crc kubenswrapper[4558]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[/bin/sh -c #!/bin/bash Jan 20 17:58:06 crc kubenswrapper[4558]: Jan 20 17:58:06 crc kubenswrapper[4558]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 20 17:58:06 crc kubenswrapper[4558]: Jan 20 17:58:06 crc kubenswrapper[4558]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 20 17:58:06 crc kubenswrapper[4558]: Jan 20 17:58:06 crc kubenswrapper[4558]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 20 17:58:06 crc kubenswrapper[4558]: Jan 20 17:58:06 crc kubenswrapper[4558]: if [ -n "nova_api" ]; then Jan 20 17:58:06 crc kubenswrapper[4558]: GRANT_DATABASE="nova_api" Jan 20 17:58:06 crc kubenswrapper[4558]: else Jan 20 17:58:06 crc kubenswrapper[4558]: GRANT_DATABASE="*" Jan 20 17:58:06 crc kubenswrapper[4558]: fi Jan 20 17:58:06 crc kubenswrapper[4558]: Jan 20 17:58:06 crc kubenswrapper[4558]: # going for maximum compatibility here: Jan 20 17:58:06 crc kubenswrapper[4558]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 20 17:58:06 crc kubenswrapper[4558]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 20 17:58:06 crc kubenswrapper[4558]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 20 17:58:06 crc kubenswrapper[4558]: # support updates Jan 20 17:58:06 crc kubenswrapper[4558]: Jan 20 17:58:06 crc kubenswrapper[4558]: $MYSQL_CMD < logger="UnhandledError" Jan 20 17:58:06 crc kubenswrapper[4558]: W0120 17:58:06.660752 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8c1a8bdd_f084_4e64_a651_91e76ad6f7d1.slice/crio-9596b87dc4ac15dd166842b4198c0c0596e7023fbee864ced700714abb6929e7 WatchSource:0}: Error finding container 9596b87dc4ac15dd166842b4198c0c0596e7023fbee864ced700714abb6929e7: Status 404 returned error can't find the container with id 9596b87dc4ac15dd166842b4198c0c0596e7023fbee864ced700714abb6929e7 Jan 20 17:58:06 crc kubenswrapper[4558]: E0120 17:58:06.662543 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"nova-api-db-secret\\\" not found\"" pod="openstack-kuttl-tests/nova-api-42d5-account-create-update-6wcjv" podUID="c9479a19-1b1e-4311-97f5-8922e8602b18" Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.668511 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="89488e78-3aae-4840-9ae3-2c09ca0013be" path="/var/lib/kubelet/pods/89488e78-3aae-4840-9ae3-2c09ca0013be/volumes" Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.669075 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="93caafe5-244a-422b-9fd2-7c02979af792" path="/var/lib/kubelet/pods/93caafe5-244a-422b-9fd2-7c02979af792/volumes" Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.669604 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="962b2c8c-b0ef-4429-8aba-0fb24998839e" path="/var/lib/kubelet/pods/962b2c8c-b0ef-4429-8aba-0fb24998839e/volumes" Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.670102 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0f54b8a-a5cf-4081-b4d6-f759bb1ae331" path="/var/lib/kubelet/pods/a0f54b8a-a5cf-4081-b4d6-f759bb1ae331/volumes" Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.675012 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a698db22-7d59-4bc1-94fd-9f7b86601bb6" path="/var/lib/kubelet/pods/a698db22-7d59-4bc1-94fd-9f7b86601bb6/volumes" Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.675543 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a8e183b4-694a-4ce9-90e9-1ff11d52ff3c" path="/var/lib/kubelet/pods/a8e183b4-694a-4ce9-90e9-1ff11d52ff3c/volumes" Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.676907 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b467edba-4957-4d06-b48f-c88a2a580e82" path="/var/lib/kubelet/pods/b467edba-4957-4d06-b48f-c88a2a580e82/volumes" Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.680568 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d89e519c-e68a-48fc-8a7d-09d4d094d251" path="/var/lib/kubelet/pods/d89e519c-e68a-48fc-8a7d-09d4d094d251/volumes" Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.681370 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d8a20e0f-af9e-422e-b8a6-fd37ffc19335" path="/var/lib/kubelet/pods/d8a20e0f-af9e-422e-b8a6-fd37ffc19335/volumes" Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.683040 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ed73bb12-40f2-4d0c-913a-07f22e33b1b4" path="/var/lib/kubelet/pods/ed73bb12-40f2-4d0c-913a-07f22e33b1b4/volumes" Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.689634 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f0abf094-6fa0-45a0-b6f0-eaa4b4b03b55" path="/var/lib/kubelet/pods/f0abf094-6fa0-45a0-b6f0-eaa4b4b03b55/volumes" Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.690219 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f963d040-98b2-46b4-a118-072a80ed0d53" path="/var/lib/kubelet/pods/f963d040-98b2-46b4-a118-072a80ed0d53/volumes" Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.692849 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-api-db-create-4n4vn"] Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.692878 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-db-create-rmbrd"] Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.694195 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell1-db-create-rmbrd"] Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.701484 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-nb-0"] Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.706815 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-42d5-account-create-update-6wcjv"] Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.714677 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-nb-0"] Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.719567 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/rabbitmq-server-0"] Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.725690 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-worker-6c8cb46bb9-k2j62"] Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.725969 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-worker-6c8cb46bb9-k2j62" podUID="8a5251c1-60bc-45d7-8524-fd654c09505b" containerName="barbican-worker-log" containerID="cri-o://cc8c4f5e82e163a1cfed41c73a53986199d859e052ef15e033504df9f64fa7a9" gracePeriod=30 Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.726117 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-worker-6c8cb46bb9-k2j62" podUID="8a5251c1-60bc-45d7-8524-fd654c09505b" containerName="barbican-worker" containerID="cri-o://9f47eb91549a4c39117393c154adc8a527a01fad6a90f46a70181a157bb9033a" gracePeriod=30 Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.730716 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-keystone-listener-58949d94f4-gxkkc"] Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.731016 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-keystone-listener-58949d94f4-gxkkc" podUID="a66c352e-8b7e-4f9e-9447-a2bf8a7a5c91" containerName="barbican-keystone-listener-log" containerID="cri-o://ca4ecb7fa4f5c23eb30d668f5754a6906c2e344a7fe9c1284b2323148a7db0a4" gracePeriod=30 Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.731175 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-keystone-listener-58949d94f4-gxkkc" podUID="a66c352e-8b7e-4f9e-9447-a2bf8a7a5c91" containerName="barbican-keystone-listener" containerID="cri-o://210a0812578a1d65ec60d4f022b3ab7e40e130626b3b2c29f678571f3bef403d" gracePeriod=30 Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.738271 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-api-6bb757fd44-jvx79"] Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.738566 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-api-6bb757fd44-jvx79" podUID="9a1c3ab4-ca8f-4183-9dbf-05f5c5efe4b7" containerName="barbican-api-log" containerID="cri-o://2f0d5940c1a430547bdac78180588ab9d0ea89c8cd08f0adfb7ebcb602621819" gracePeriod=30 Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.738722 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/barbican-api-6bb757fd44-jvx79" podUID="9a1c3ab4-ca8f-4183-9dbf-05f5c5efe4b7" containerName="barbican-api" containerID="cri-o://7e84faeb8fb514479840b3b5eeb4711dbbb78f6bac5d538500313eb7bf90d215" gracePeriod=30 Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.741254 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.741474 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" podUID="de8410e3-8de9-4013-b2ed-545ccdff866c" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://f83e1e941e2ebb680c9bcf90480fb830a1ce1ccd57989006602331fa43a28a91" gracePeriod=30 Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.746551 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-94fd-account-create-update-wkrmc"] Jan 20 17:58:06 crc kubenswrapper[4558]: E0120 17:58:06.748314 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/nova-cell1-conductor-config-data: secret "nova-cell1-conductor-config-data" not found Jan 20 17:58:06 crc kubenswrapper[4558]: E0120 17:58:06.748368 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/54cbdea3-8fcc-4d2d-870b-cf3663cfc633-config-data podName:54cbdea3-8fcc-4d2d-870b-cf3663cfc633 nodeName:}" failed. No retries permitted until 2026-01-20 17:58:07.248352416 +0000 UTC m=+4581.008690383 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/secret/54cbdea3-8fcc-4d2d-870b-cf3663cfc633-config-data") pod "nova-cell1-conductor-0" (UID: "54cbdea3-8fcc-4d2d-870b-cf3663cfc633") : secret "nova-cell1-conductor-config-data" not found Jan 20 17:58:06 crc kubenswrapper[4558]: E0120 17:58:06.748410 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/combined-ca-bundle: secret "combined-ca-bundle" not found Jan 20 17:58:06 crc kubenswrapper[4558]: E0120 17:58:06.748432 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/54cbdea3-8fcc-4d2d-870b-cf3663cfc633-combined-ca-bundle podName:54cbdea3-8fcc-4d2d-870b-cf3663cfc633 nodeName:}" failed. No retries permitted until 2026-01-20 17:58:07.248425914 +0000 UTC m=+4581.008763881 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/54cbdea3-8fcc-4d2d-870b-cf3663cfc633-combined-ca-bundle") pod "nova-cell1-conductor-0" (UID: "54cbdea3-8fcc-4d2d-870b-cf3663cfc633") : secret "combined-ca-bundle" not found Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.754865 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-42d5-account-create-update-6wcjv"] Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.760677 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-db-sync-6ltj7"] Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.771413 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-db-sync-6ltj7"] Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.820080 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-0"] Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.835320 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-0"] Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.835822 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-cell0-conductor-0" podUID="8a1b8bc3-6d56-42f1-99a2-f80ff914e3d8" containerName="nova-cell0-conductor-conductor" containerID="cri-o://5772b7b876d3a0d68ec8e5a68aa617182f064469dac3fc22b2b6ef8493342da2" gracePeriod=30 Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.854952 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-db-sync-rznhk"] Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.865969 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-db-sync-rznhk"] Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.877538 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell0-5191-account-create-update-cznq6"] Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.883369 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-fde5-account-create-update-zmxrm"] Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.890953 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.891246 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/kube-state-metrics-0" podUID="4e270f26-f899-4007-94bf-ab62080fe4ce" containerName="kube-state-metrics" containerID="cri-o://1fe99cb66e9016b46286062b2b8309256638fd5cb58f35b387cebb419822b3bc" gracePeriod=30 Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.913520 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.913763 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="680eb8d4-16c8-40ae-bfb6-054fcf23f281" containerName="ceilometer-central-agent" containerID="cri-o://a1c6c829c5d10f6cb194bd9017159b0daa07b6905d16ede8abe89e2a8949198f" gracePeriod=30 Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.913903 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="680eb8d4-16c8-40ae-bfb6-054fcf23f281" containerName="proxy-httpd" containerID="cri-o://528a68a740a5bc1c67a3fbd8155ae60dbf9ec6b340d2ce96b37c200e6e6de11b" gracePeriod=30 Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.913952 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="680eb8d4-16c8-40ae-bfb6-054fcf23f281" containerName="sg-core" containerID="cri-o://2b5d5a7da9b64e1d83404cfe438d86d2e219e79fe96bf81cc97e5f2e3a7048ac" gracePeriod=30 Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.913998 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/ceilometer-0" podUID="680eb8d4-16c8-40ae-bfb6-054fcf23f281" containerName="ceilometer-notification-agent" containerID="cri-o://c1d7abefdf96cc2643c3a9001b8d6be58a4e2428ed5a8dab2d908237b77ba7cc" gracePeriod=30 Jan 20 17:58:06 crc kubenswrapper[4558]: I0120 17:58:06.939402 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-l7p2w"] Jan 20 17:58:07 crc kubenswrapper[4558]: I0120 17:58:07.027682 4558 generic.go:334] "Generic (PLEG): container finished" podID="0b0c3280-9b26-4db4-ab16-68016b055a1f" containerID="ed6293f7998d052646c129c9cb1fa0e8231d7f0d152e8108e422d497c055e6c9" exitCode=143 Jan 20 17:58:07 crc kubenswrapper[4558]: I0120 17:58:07.027768 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"0b0c3280-9b26-4db4-ab16-68016b055a1f","Type":"ContainerDied","Data":"ed6293f7998d052646c129c9cb1fa0e8231d7f0d152e8108e422d497c055e6c9"} Jan 20 17:58:07 crc kubenswrapper[4558]: I0120 17:58:07.037926 4558 generic.go:334] "Generic (PLEG): container finished" podID="cd300665-4f75-4ed7-9166-4b48e1aeffc4" containerID="49dc4ba201bbec0d9ff9d16fd2d2b2c98c88e6f20020f744fca0291f9f36c748" exitCode=0 Jan 20 17:58:07 crc kubenswrapper[4558]: I0120 17:58:07.037952 4558 generic.go:334] "Generic (PLEG): container finished" podID="cd300665-4f75-4ed7-9166-4b48e1aeffc4" containerID="63b1e89156b08be7c97cc6cd6b0955544bb87d107fd929728f51e6460408e3ae" exitCode=0 Jan 20 17:58:07 crc kubenswrapper[4558]: I0120 17:58:07.037961 4558 generic.go:334] "Generic (PLEG): container finished" podID="cd300665-4f75-4ed7-9166-4b48e1aeffc4" containerID="f88ad017f2098e35ed1d0c224b9e2095984e92f4d2ea3271b416a0d3a18de1f0" exitCode=0 Jan 20 17:58:07 crc kubenswrapper[4558]: I0120 17:58:07.037970 4558 generic.go:334] "Generic (PLEG): container finished" podID="cd300665-4f75-4ed7-9166-4b48e1aeffc4" containerID="6d7ee733551241798b4cb21ccf1011d8596e7dc990e0e63f7573c7daa5faee56" exitCode=0 Jan 20 17:58:07 crc kubenswrapper[4558]: I0120 17:58:07.037978 4558 generic.go:334] "Generic (PLEG): container finished" podID="cd300665-4f75-4ed7-9166-4b48e1aeffc4" containerID="92a5f58dc2bcdf82ee4c76417da6d37919bddddc7444ff849574003ec7f86cee" exitCode=0 Jan 20 17:58:07 crc kubenswrapper[4558]: I0120 17:58:07.037994 4558 generic.go:334] "Generic (PLEG): container finished" podID="cd300665-4f75-4ed7-9166-4b48e1aeffc4" containerID="678ea6abd080fbce2550848f03b87a5c11341574e2839169896c148295bf7600" exitCode=0 Jan 20 17:58:07 crc kubenswrapper[4558]: I0120 17:58:07.038000 4558 generic.go:334] "Generic (PLEG): container finished" podID="cd300665-4f75-4ed7-9166-4b48e1aeffc4" containerID="91605227d295d861286db724135f07f88f4d737cec1e406e2f078ef2ee97ac12" exitCode=0 Jan 20 17:58:07 crc kubenswrapper[4558]: I0120 17:58:07.038007 4558 generic.go:334] "Generic (PLEG): container finished" podID="cd300665-4f75-4ed7-9166-4b48e1aeffc4" containerID="8a87e25ff9ae7de7c19892601f394f9d10fa27cee39a8831c6bd12dcb8c3655b" exitCode=0 Jan 20 17:58:07 crc kubenswrapper[4558]: I0120 17:58:07.038014 4558 generic.go:334] "Generic (PLEG): container finished" podID="cd300665-4f75-4ed7-9166-4b48e1aeffc4" containerID="75c122801e2470bd8e501bddfa6a0f20a2d2dbaa3393df3997b6e399b869bab6" exitCode=0 Jan 20 17:58:07 crc kubenswrapper[4558]: I0120 17:58:07.038020 4558 generic.go:334] "Generic (PLEG): container finished" podID="cd300665-4f75-4ed7-9166-4b48e1aeffc4" containerID="593ca97090e4c8e828391df9bb4f851893cd4f22b0b607f43e4655293c6f5980" exitCode=0 Jan 20 17:58:07 crc kubenswrapper[4558]: I0120 17:58:07.038028 4558 generic.go:334] "Generic (PLEG): container finished" podID="cd300665-4f75-4ed7-9166-4b48e1aeffc4" containerID="649d6dfbc7da772af01bf0565e0467026756c4ec2fe3aed3c80157258c3eea09" exitCode=0 Jan 20 17:58:07 crc kubenswrapper[4558]: I0120 17:58:07.038033 4558 generic.go:334] "Generic (PLEG): container finished" podID="cd300665-4f75-4ed7-9166-4b48e1aeffc4" containerID="08c5c660504f209c3b527a903bc2ca2f4a2088161c85be3401c90c3cddbca989" exitCode=0 Jan 20 17:58:07 crc kubenswrapper[4558]: I0120 17:58:07.038039 4558 generic.go:334] "Generic (PLEG): container finished" podID="cd300665-4f75-4ed7-9166-4b48e1aeffc4" containerID="640b8b31652d22d5361c56898f4108c3cc9d07fa0b6e09405e8ee47586a8b2ee" exitCode=0 Jan 20 17:58:07 crc kubenswrapper[4558]: I0120 17:58:07.038045 4558 generic.go:334] "Generic (PLEG): container finished" podID="cd300665-4f75-4ed7-9166-4b48e1aeffc4" containerID="3d23b588456f64012fb1649e6aa289cb457c88d5ad66f1354c313644bf1a8a97" exitCode=0 Jan 20 17:58:07 crc kubenswrapper[4558]: I0120 17:58:07.038084 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"cd300665-4f75-4ed7-9166-4b48e1aeffc4","Type":"ContainerDied","Data":"49dc4ba201bbec0d9ff9d16fd2d2b2c98c88e6f20020f744fca0291f9f36c748"} Jan 20 17:58:07 crc kubenswrapper[4558]: I0120 17:58:07.038108 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"cd300665-4f75-4ed7-9166-4b48e1aeffc4","Type":"ContainerDied","Data":"63b1e89156b08be7c97cc6cd6b0955544bb87d107fd929728f51e6460408e3ae"} Jan 20 17:58:07 crc kubenswrapper[4558]: I0120 17:58:07.038120 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"cd300665-4f75-4ed7-9166-4b48e1aeffc4","Type":"ContainerDied","Data":"f88ad017f2098e35ed1d0c224b9e2095984e92f4d2ea3271b416a0d3a18de1f0"} Jan 20 17:58:07 crc kubenswrapper[4558]: I0120 17:58:07.038130 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"cd300665-4f75-4ed7-9166-4b48e1aeffc4","Type":"ContainerDied","Data":"6d7ee733551241798b4cb21ccf1011d8596e7dc990e0e63f7573c7daa5faee56"} Jan 20 17:58:07 crc kubenswrapper[4558]: I0120 17:58:07.038141 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"cd300665-4f75-4ed7-9166-4b48e1aeffc4","Type":"ContainerDied","Data":"92a5f58dc2bcdf82ee4c76417da6d37919bddddc7444ff849574003ec7f86cee"} Jan 20 17:58:07 crc kubenswrapper[4558]: I0120 17:58:07.038152 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"cd300665-4f75-4ed7-9166-4b48e1aeffc4","Type":"ContainerDied","Data":"678ea6abd080fbce2550848f03b87a5c11341574e2839169896c148295bf7600"} Jan 20 17:58:07 crc kubenswrapper[4558]: I0120 17:58:07.038179 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"cd300665-4f75-4ed7-9166-4b48e1aeffc4","Type":"ContainerDied","Data":"91605227d295d861286db724135f07f88f4d737cec1e406e2f078ef2ee97ac12"} Jan 20 17:58:07 crc kubenswrapper[4558]: I0120 17:58:07.038188 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"cd300665-4f75-4ed7-9166-4b48e1aeffc4","Type":"ContainerDied","Data":"8a87e25ff9ae7de7c19892601f394f9d10fa27cee39a8831c6bd12dcb8c3655b"} Jan 20 17:58:07 crc kubenswrapper[4558]: I0120 17:58:07.038198 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"cd300665-4f75-4ed7-9166-4b48e1aeffc4","Type":"ContainerDied","Data":"75c122801e2470bd8e501bddfa6a0f20a2d2dbaa3393df3997b6e399b869bab6"} Jan 20 17:58:07 crc kubenswrapper[4558]: I0120 17:58:07.038207 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"cd300665-4f75-4ed7-9166-4b48e1aeffc4","Type":"ContainerDied","Data":"593ca97090e4c8e828391df9bb4f851893cd4f22b0b607f43e4655293c6f5980"} Jan 20 17:58:07 crc kubenswrapper[4558]: I0120 17:58:07.038215 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"cd300665-4f75-4ed7-9166-4b48e1aeffc4","Type":"ContainerDied","Data":"649d6dfbc7da772af01bf0565e0467026756c4ec2fe3aed3c80157258c3eea09"} Jan 20 17:58:07 crc kubenswrapper[4558]: I0120 17:58:07.038224 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"cd300665-4f75-4ed7-9166-4b48e1aeffc4","Type":"ContainerDied","Data":"08c5c660504f209c3b527a903bc2ca2f4a2088161c85be3401c90c3cddbca989"} Jan 20 17:58:07 crc kubenswrapper[4558]: I0120 17:58:07.038232 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"cd300665-4f75-4ed7-9166-4b48e1aeffc4","Type":"ContainerDied","Data":"640b8b31652d22d5361c56898f4108c3cc9d07fa0b6e09405e8ee47586a8b2ee"} Jan 20 17:58:07 crc kubenswrapper[4558]: I0120 17:58:07.038240 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"cd300665-4f75-4ed7-9166-4b48e1aeffc4","Type":"ContainerDied","Data":"3d23b588456f64012fb1649e6aa289cb457c88d5ad66f1354c313644bf1a8a97"} Jan 20 17:58:07 crc kubenswrapper[4558]: I0120 17:58:07.045608 4558 generic.go:334] "Generic (PLEG): container finished" podID="50829e14-401d-4958-9ef7-e2a2ef1f4b32" containerID="fe2d2dd5529693a11eb03a9bb6bf05aecb6c57bbe0a7f33f65058fc71e14d418" exitCode=143 Jan 20 17:58:07 crc kubenswrapper[4558]: I0120 17:58:07.045703 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-7c7cc56d94-qptzl" event={"ID":"50829e14-401d-4958-9ef7-e2a2ef1f4b32","Type":"ContainerDied","Data":"fe2d2dd5529693a11eb03a9bb6bf05aecb6c57bbe0a7f33f65058fc71e14d418"} Jan 20 17:58:07 crc kubenswrapper[4558]: I0120 17:58:07.049697 4558 generic.go:334] "Generic (PLEG): container finished" podID="8f5c27dd-2365-49ee-b4bd-b46e952199f7" containerID="c8373ab659f997ee81de532a6542773f3920ce134a08cd0ec3ca60979a45eb58" exitCode=143 Jan 20 17:58:07 crc kubenswrapper[4558]: I0120 17:58:07.049772 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"8f5c27dd-2365-49ee-b4bd-b46e952199f7","Type":"ContainerDied","Data":"c8373ab659f997ee81de532a6542773f3920ce134a08cd0ec3ca60979a45eb58"} Jan 20 17:58:07 crc kubenswrapper[4558]: I0120 17:58:07.054481 4558 generic.go:334] "Generic (PLEG): container finished" podID="6f0733ea-bb8a-4c0f-9ea8-edd6f687301e" containerID="3a49860bcfdad30df2f697c1fe78b7c7dd55ae654b13cecc83fb386c4afb2fa4" exitCode=143 Jan 20 17:58:07 crc kubenswrapper[4558]: I0120 17:58:07.054548 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"6f0733ea-bb8a-4c0f-9ea8-edd6f687301e","Type":"ContainerDied","Data":"3a49860bcfdad30df2f697c1fe78b7c7dd55ae654b13cecc83fb386c4afb2fa4"} Jan 20 17:58:07 crc kubenswrapper[4558]: I0120 17:58:07.055842 4558 generic.go:334] "Generic (PLEG): container finished" podID="3834281a-b947-44ad-9390-a2057e7e902d" containerID="a79ee7ec7558132e07cb924fcd12069b0f87fbec2d9e3ff1c1b559b752eaac9f" exitCode=137 Jan 20 17:58:07 crc kubenswrapper[4558]: I0120 17:58:07.058736 4558 generic.go:334] "Generic (PLEG): container finished" podID="7dcf074e-8444-4bd2-b1f9-143390e96ef8" containerID="d464d4bcdd194c4ee64ccc0190f47e6e0ded24b37d0780e2cc7ded9c8f572a41" exitCode=143 Jan 20 17:58:07 crc kubenswrapper[4558]: I0120 17:58:07.058785 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"7dcf074e-8444-4bd2-b1f9-143390e96ef8","Type":"ContainerDied","Data":"d464d4bcdd194c4ee64ccc0190f47e6e0ded24b37d0780e2cc7ded9c8f572a41"} Jan 20 17:58:07 crc kubenswrapper[4558]: I0120 17:58:07.059591 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/root-account-create-update-x75ng" event={"ID":"75a8c03c-5b0a-4144-b1a8-293ce0d50739","Type":"ContainerStarted","Data":"26276f024cfeb757d03447f36508b63bdeb579bd42eb98c498a7736d6ea9f934"} Jan 20 17:58:07 crc kubenswrapper[4558]: I0120 17:58:07.060489 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-42d5-account-create-update-6wcjv" event={"ID":"c9479a19-1b1e-4311-97f5-8922e8602b18","Type":"ContainerStarted","Data":"8800283c27e298292eeb77f2cff240f0b8adf5a4492206a13ae1e3dfd046d71d"} Jan 20 17:58:07 crc kubenswrapper[4558]: I0120 17:58:07.062367 4558 generic.go:334] "Generic (PLEG): container finished" podID="c8f11f45-bfce-4989-bfa0-684011f74619" containerID="d0304cd10afa9031be3b363bc240ee32e662b09bbf8d0575f8a91d278b93f42b" exitCode=143 Jan 20 17:58:07 crc kubenswrapper[4558]: I0120 17:58:07.062417 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"c8f11f45-bfce-4989-bfa0-684011f74619","Type":"ContainerDied","Data":"d0304cd10afa9031be3b363bc240ee32e662b09bbf8d0575f8a91d278b93f42b"} Jan 20 17:58:07 crc kubenswrapper[4558]: I0120 17:58:07.063326 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-e4c5-account-create-update-qbwc2" event={"ID":"5dfca7b5-dd95-4da3-a088-de4d48ea8c10","Type":"ContainerStarted","Data":"8ee7bec436f0787f4acda3d0400fa14f5957655a213e44c8e77c009c00f0a2b7"} Jan 20 17:58:07 crc kubenswrapper[4558]: I0120 17:58:07.066960 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovsdbserver-sb-0_17f6b80e-1961-4c44-a979-9c23bdd59837/ovsdbserver-sb/0.log" Jan 20 17:58:07 crc kubenswrapper[4558]: I0120 17:58:07.067105 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovsdbserver-sb-0" Jan 20 17:58:07 crc kubenswrapper[4558]: I0120 17:58:07.068022 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovsdbserver-sb-0" event={"ID":"17f6b80e-1961-4c44-a979-9c23bdd59837","Type":"ContainerDied","Data":"24eaf09c2072b8675c71caeeb0d504227fa39a4e6881c3b1b181ef743a83e953"} Jan 20 17:58:07 crc kubenswrapper[4558]: I0120 17:58:07.068085 4558 scope.go:117] "RemoveContainer" containerID="538bcf70ba0df02b3c9616008ced0cabfe97d3b97ecd1cf7f96d63b4177c5ffa" Jan 20 17:58:07 crc kubenswrapper[4558]: I0120 17:58:07.077708 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-5191-account-create-update-cznq6" event={"ID":"8c1a8bdd-f084-4e64-a651-91e76ad6f7d1","Type":"ContainerStarted","Data":"9596b87dc4ac15dd166842b4198c0c0596e7023fbee864ced700714abb6929e7"} Jan 20 17:58:07 crc kubenswrapper[4558]: I0120 17:58:07.083625 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-94fd-account-create-update-wkrmc" event={"ID":"ce3fd1cf-8efc-4180-b37f-8b562827c8a2","Type":"ContainerStarted","Data":"669d63c9318a8eaa820af4fdc66729f3163da728006b15fcee96655959e48df8"} Jan 20 17:58:07 crc kubenswrapper[4558]: I0120 17:58:07.091595 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-c24c-account-create-update-mxf4v" event={"ID":"25990a01-f231-4924-a1cc-9207cc5af3c8","Type":"ContainerStarted","Data":"a6796e65a485132f0ab91df3afd76f82543ec8b8c275521f97f9542d0847df8a"} Jan 20 17:58:07 crc kubenswrapper[4558]: I0120 17:58:07.091690 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-cell1-conductor-0" podUID="54cbdea3-8fcc-4d2d-870b-cf3663cfc633" containerName="nova-cell1-conductor-conductor" containerID="cri-o://93ad707d620f625a0b7b0c96075af0a312fdd519b3c0dede73cdb9bef7e7e9a5" gracePeriod=30 Jan 20 17:58:07 crc kubenswrapper[4558]: E0120 17:58:07.123119 4558 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 20 17:58:07 crc kubenswrapper[4558]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[/bin/sh -c #!/bin/bash Jan 20 17:58:07 crc kubenswrapper[4558]: Jan 20 17:58:07 crc kubenswrapper[4558]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 20 17:58:07 crc kubenswrapper[4558]: Jan 20 17:58:07 crc kubenswrapper[4558]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 20 17:58:07 crc kubenswrapper[4558]: Jan 20 17:58:07 crc kubenswrapper[4558]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 20 17:58:07 crc kubenswrapper[4558]: Jan 20 17:58:07 crc kubenswrapper[4558]: if [ -n "nova_cell0" ]; then Jan 20 17:58:07 crc kubenswrapper[4558]: GRANT_DATABASE="nova_cell0" Jan 20 17:58:07 crc kubenswrapper[4558]: else Jan 20 17:58:07 crc kubenswrapper[4558]: GRANT_DATABASE="*" Jan 20 17:58:07 crc kubenswrapper[4558]: fi Jan 20 17:58:07 crc kubenswrapper[4558]: Jan 20 17:58:07 crc kubenswrapper[4558]: # going for maximum compatibility here: Jan 20 17:58:07 crc kubenswrapper[4558]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 20 17:58:07 crc kubenswrapper[4558]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 20 17:58:07 crc kubenswrapper[4558]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 20 17:58:07 crc kubenswrapper[4558]: # support updates Jan 20 17:58:07 crc kubenswrapper[4558]: Jan 20 17:58:07 crc kubenswrapper[4558]: $MYSQL_CMD < logger="UnhandledError" Jan 20 17:58:07 crc kubenswrapper[4558]: E0120 17:58:07.124641 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"nova-cell0-db-secret\\\" not found\"" pod="openstack-kuttl-tests/nova-cell0-5191-account-create-update-cznq6" podUID="8c1a8bdd-f084-4e64-a651-91e76ad6f7d1" Jan 20 17:58:07 crc kubenswrapper[4558]: E0120 17:58:07.170461 4558 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 20 17:58:07 crc kubenswrapper[4558]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[/bin/sh -c #!/bin/bash Jan 20 17:58:07 crc kubenswrapper[4558]: Jan 20 17:58:07 crc kubenswrapper[4558]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 20 17:58:07 crc kubenswrapper[4558]: Jan 20 17:58:07 crc kubenswrapper[4558]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 20 17:58:07 crc kubenswrapper[4558]: Jan 20 17:58:07 crc kubenswrapper[4558]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 20 17:58:07 crc kubenswrapper[4558]: Jan 20 17:58:07 crc kubenswrapper[4558]: if [ -n "cinder" ]; then Jan 20 17:58:07 crc kubenswrapper[4558]: GRANT_DATABASE="cinder" Jan 20 17:58:07 crc kubenswrapper[4558]: else Jan 20 17:58:07 crc kubenswrapper[4558]: GRANT_DATABASE="*" Jan 20 17:58:07 crc kubenswrapper[4558]: fi Jan 20 17:58:07 crc kubenswrapper[4558]: Jan 20 17:58:07 crc kubenswrapper[4558]: # going for maximum compatibility here: Jan 20 17:58:07 crc kubenswrapper[4558]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 20 17:58:07 crc kubenswrapper[4558]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 20 17:58:07 crc kubenswrapper[4558]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 20 17:58:07 crc kubenswrapper[4558]: # support updates Jan 20 17:58:07 crc kubenswrapper[4558]: Jan 20 17:58:07 crc kubenswrapper[4558]: $MYSQL_CMD < logger="UnhandledError" Jan 20 17:58:07 crc kubenswrapper[4558]: E0120 17:58:07.170575 4558 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 20 17:58:07 crc kubenswrapper[4558]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[/bin/sh -c #!/bin/bash Jan 20 17:58:07 crc kubenswrapper[4558]: Jan 20 17:58:07 crc kubenswrapper[4558]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 20 17:58:07 crc kubenswrapper[4558]: Jan 20 17:58:07 crc kubenswrapper[4558]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 20 17:58:07 crc kubenswrapper[4558]: Jan 20 17:58:07 crc kubenswrapper[4558]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 20 17:58:07 crc kubenswrapper[4558]: Jan 20 17:58:07 crc kubenswrapper[4558]: if [ -n "barbican" ]; then Jan 20 17:58:07 crc kubenswrapper[4558]: GRANT_DATABASE="barbican" Jan 20 17:58:07 crc kubenswrapper[4558]: else Jan 20 17:58:07 crc kubenswrapper[4558]: GRANT_DATABASE="*" Jan 20 17:58:07 crc kubenswrapper[4558]: fi Jan 20 17:58:07 crc kubenswrapper[4558]: Jan 20 17:58:07 crc kubenswrapper[4558]: # going for maximum compatibility here: Jan 20 17:58:07 crc kubenswrapper[4558]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 20 17:58:07 crc kubenswrapper[4558]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 20 17:58:07 crc kubenswrapper[4558]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 20 17:58:07 crc kubenswrapper[4558]: # support updates Jan 20 17:58:07 crc kubenswrapper[4558]: Jan 20 17:58:07 crc kubenswrapper[4558]: $MYSQL_CMD < logger="UnhandledError" Jan 20 17:58:07 crc kubenswrapper[4558]: E0120 17:58:07.170873 4558 kuberuntime_manager.go:1274] "Unhandled Error" err=< Jan 20 17:58:07 crc kubenswrapper[4558]: container &Container{Name:mariadb-account-create-update,Image:quay.io/podified-antelope-centos9/openstack-mariadb@sha256:ed0f8ba03f3ce47a32006d730c3049455325eb2c3b98b9fd6b3fb9901004df13,Command:[/bin/sh -c #!/bin/bash Jan 20 17:58:07 crc kubenswrapper[4558]: Jan 20 17:58:07 crc kubenswrapper[4558]: MYSQL_REMOTE_HOST="" source /var/lib/operator-scripts/mysql_root_auth.sh Jan 20 17:58:07 crc kubenswrapper[4558]: Jan 20 17:58:07 crc kubenswrapper[4558]: export DatabasePassword=${DatabasePassword:?"Please specify a DatabasePassword variable."} Jan 20 17:58:07 crc kubenswrapper[4558]: Jan 20 17:58:07 crc kubenswrapper[4558]: MYSQL_CMD="mysql -h -u root -P 3306" Jan 20 17:58:07 crc kubenswrapper[4558]: Jan 20 17:58:07 crc kubenswrapper[4558]: if [ -n "nova_api" ]; then Jan 20 17:58:07 crc kubenswrapper[4558]: GRANT_DATABASE="nova_api" Jan 20 17:58:07 crc kubenswrapper[4558]: else Jan 20 17:58:07 crc kubenswrapper[4558]: GRANT_DATABASE="*" Jan 20 17:58:07 crc kubenswrapper[4558]: fi Jan 20 17:58:07 crc kubenswrapper[4558]: Jan 20 17:58:07 crc kubenswrapper[4558]: # going for maximum compatibility here: Jan 20 17:58:07 crc kubenswrapper[4558]: # 1. MySQL 8 no longer allows implicit create user when GRANT is used Jan 20 17:58:07 crc kubenswrapper[4558]: # 2. MariaDB has "CREATE OR REPLACE", but MySQL does not Jan 20 17:58:07 crc kubenswrapper[4558]: # 3. create user with CREATE but then do all password and TLS with ALTER to Jan 20 17:58:07 crc kubenswrapper[4558]: # support updates Jan 20 17:58:07 crc kubenswrapper[4558]: Jan 20 17:58:07 crc kubenswrapper[4558]: $MYSQL_CMD < logger="UnhandledError" Jan 20 17:58:07 crc kubenswrapper[4558]: E0120 17:58:07.171547 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"cinder-db-secret\\\" not found\"" pod="openstack-kuttl-tests/cinder-fde5-account-create-update-zmxrm" podUID="96cbceb8-1912-4174-8d93-22ab4aa1b152" Jan 20 17:58:07 crc kubenswrapper[4558]: E0120 17:58:07.171620 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"barbican-db-secret\\\" not found\"" pod="openstack-kuttl-tests/barbican-e4c5-account-create-update-qbwc2" podUID="5dfca7b5-dd95-4da3-a088-de4d48ea8c10" Jan 20 17:58:07 crc kubenswrapper[4558]: E0120 17:58:07.172663 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CreateContainerConfigError: \"secret \\\"nova-api-db-secret\\\" not found\"" pod="openstack-kuttl-tests/nova-api-42d5-account-create-update-6wcjv" podUID="c9479a19-1b1e-4311-97f5-8922e8602b18" Jan 20 17:58:07 crc kubenswrapper[4558]: E0120 17:58:07.263450 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/combined-ca-bundle: secret "combined-ca-bundle" not found Jan 20 17:58:07 crc kubenswrapper[4558]: E0120 17:58:07.264699 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/54cbdea3-8fcc-4d2d-870b-cf3663cfc633-combined-ca-bundle podName:54cbdea3-8fcc-4d2d-870b-cf3663cfc633 nodeName:}" failed. No retries permitted until 2026-01-20 17:58:08.264670339 +0000 UTC m=+4582.025008305 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/54cbdea3-8fcc-4d2d-870b-cf3663cfc633-combined-ca-bundle") pod "nova-cell1-conductor-0" (UID: "54cbdea3-8fcc-4d2d-870b-cf3663cfc633") : secret "combined-ca-bundle" not found Jan 20 17:58:07 crc kubenswrapper[4558]: E0120 17:58:07.264442 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/nova-cell1-conductor-config-data: secret "nova-cell1-conductor-config-data" not found Jan 20 17:58:07 crc kubenswrapper[4558]: E0120 17:58:07.265755 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/54cbdea3-8fcc-4d2d-870b-cf3663cfc633-config-data podName:54cbdea3-8fcc-4d2d-870b-cf3663cfc633 nodeName:}" failed. No retries permitted until 2026-01-20 17:58:08.26573536 +0000 UTC m=+4582.026073327 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/secret/54cbdea3-8fcc-4d2d-870b-cf3663cfc633-config-data") pod "nova-cell1-conductor-0" (UID: "54cbdea3-8fcc-4d2d-870b-cf3663cfc633") : secret "nova-cell1-conductor-config-data" not found Jan 20 17:58:07 crc kubenswrapper[4558]: I0120 17:58:07.303663 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/rabbitmq-server-0" podUID="d512bec4-7ed0-43bb-b8fe-0f235f7698e5" containerName="rabbitmq" containerID="cri-o://9f92bbadf6b5978b2ec804e4ad254a0133f5ddc1c7a0c908a984ae2727969174" gracePeriod=604800 Jan 20 17:58:07 crc kubenswrapper[4558]: I0120 17:58:07.353647 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/openstack-cell1-galera-0" podUID="abd1fde8-57bd-4248-a061-6ddb436501c2" containerName="galera" containerID="cri-o://b7311156a63404e1bd2f45a67f9cea82ec849d5cf981a05cf6aee748ad8c02bd" gracePeriod=30 Jan 20 17:58:07 crc kubenswrapper[4558]: I0120 17:58:07.455534 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstackclient" Jan 20 17:58:07 crc kubenswrapper[4558]: I0120 17:58:07.492332 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-sb-0"] Jan 20 17:58:07 crc kubenswrapper[4558]: I0120 17:58:07.499378 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ovsdbserver-sb-0"] Jan 20 17:58:07 crc kubenswrapper[4558]: I0120 17:58:07.511340 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/swift-proxy-77d4fcb95d-7jsvg"] Jan 20 17:58:07 crc kubenswrapper[4558]: I0120 17:58:07.511558 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-proxy-77d4fcb95d-7jsvg" podUID="2c80b87d-f7d5-46dd-a6d9-bba3e82e405b" containerName="proxy-httpd" containerID="cri-o://3ea77410056274873a4720a35fcc92ab9a47e76916a85817eee90b16a86d216d" gracePeriod=30 Jan 20 17:58:07 crc kubenswrapper[4558]: I0120 17:58:07.511941 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/swift-proxy-77d4fcb95d-7jsvg" podUID="2c80b87d-f7d5-46dd-a6d9-bba3e82e405b" containerName="proxy-server" containerID="cri-o://a6dd9f88121dd9a74ad7da43d7ab6d29f99cd801b82eb031635bd0e0e30304e3" gracePeriod=30 Jan 20 17:58:07 crc kubenswrapper[4558]: I0120 17:58:07.569486 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/3834281a-b947-44ad-9390-a2057e7e902d-openstack-config\") pod \"3834281a-b947-44ad-9390-a2057e7e902d\" (UID: \"3834281a-b947-44ad-9390-a2057e7e902d\") " Jan 20 17:58:07 crc kubenswrapper[4558]: I0120 17:58:07.570224 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3834281a-b947-44ad-9390-a2057e7e902d-combined-ca-bundle\") pod \"3834281a-b947-44ad-9390-a2057e7e902d\" (UID: \"3834281a-b947-44ad-9390-a2057e7e902d\") " Jan 20 17:58:07 crc kubenswrapper[4558]: I0120 17:58:07.571122 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gp9v6\" (UniqueName: \"kubernetes.io/projected/3834281a-b947-44ad-9390-a2057e7e902d-kube-api-access-gp9v6\") pod \"3834281a-b947-44ad-9390-a2057e7e902d\" (UID: \"3834281a-b947-44ad-9390-a2057e7e902d\") " Jan 20 17:58:07 crc kubenswrapper[4558]: I0120 17:58:07.571234 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/3834281a-b947-44ad-9390-a2057e7e902d-openstack-config-secret\") pod \"3834281a-b947-44ad-9390-a2057e7e902d\" (UID: \"3834281a-b947-44ad-9390-a2057e7e902d\") " Jan 20 17:58:07 crc kubenswrapper[4558]: I0120 17:58:07.575122 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3834281a-b947-44ad-9390-a2057e7e902d-kube-api-access-gp9v6" (OuterVolumeSpecName: "kube-api-access-gp9v6") pod "3834281a-b947-44ad-9390-a2057e7e902d" (UID: "3834281a-b947-44ad-9390-a2057e7e902d"). InnerVolumeSpecName "kube-api-access-gp9v6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:58:07 crc kubenswrapper[4558]: E0120 17:58:07.643357 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="5772b7b876d3a0d68ec8e5a68aa617182f064469dac3fc22b2b6ef8493342da2" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:58:07 crc kubenswrapper[4558]: E0120 17:58:07.645258 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="5772b7b876d3a0d68ec8e5a68aa617182f064469dac3fc22b2b6ef8493342da2" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:58:07 crc kubenswrapper[4558]: E0120 17:58:07.651175 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="5772b7b876d3a0d68ec8e5a68aa617182f064469dac3fc22b2b6ef8493342da2" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:58:07 crc kubenswrapper[4558]: E0120 17:58:07.651212 4558 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack-kuttl-tests/nova-cell0-conductor-0" podUID="8a1b8bc3-6d56-42f1-99a2-f80ff914e3d8" containerName="nova-cell0-conductor-conductor" Jan 20 17:58:07 crc kubenswrapper[4558]: I0120 17:58:07.673972 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gp9v6\" (UniqueName: \"kubernetes.io/projected/3834281a-b947-44ad-9390-a2057e7e902d-kube-api-access-gp9v6\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:07 crc kubenswrapper[4558]: E0120 17:58:07.674370 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/combined-ca-bundle: secret "combined-ca-bundle" not found Jan 20 17:58:07 crc kubenswrapper[4558]: E0120 17:58:07.674464 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-keystone-internal-svc: secret "cert-keystone-internal-svc" not found Jan 20 17:58:07 crc kubenswrapper[4558]: E0120 17:58:07.674477 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/747b1096-c84a-43e1-930e-9f887a42c524-combined-ca-bundle podName:747b1096-c84a-43e1-930e-9f887a42c524 nodeName:}" failed. No retries permitted until 2026-01-20 17:58:08.174457912 +0000 UTC m=+4581.934795879 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/747b1096-c84a-43e1-930e-9f887a42c524-combined-ca-bundle") pod "keystone-747b5668bc-jgcd6" (UID: "747b1096-c84a-43e1-930e-9f887a42c524") : secret "combined-ca-bundle" not found Jan 20 17:58:07 crc kubenswrapper[4558]: E0120 17:58:07.674573 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/747b1096-c84a-43e1-930e-9f887a42c524-internal-tls-certs podName:747b1096-c84a-43e1-930e-9f887a42c524 nodeName:}" failed. No retries permitted until 2026-01-20 17:58:08.174555816 +0000 UTC m=+4581.934893782 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "internal-tls-certs" (UniqueName: "kubernetes.io/secret/747b1096-c84a-43e1-930e-9f887a42c524-internal-tls-certs") pod "keystone-747b5668bc-jgcd6" (UID: "747b1096-c84a-43e1-930e-9f887a42c524") : secret "cert-keystone-internal-svc" not found Jan 20 17:58:07 crc kubenswrapper[4558]: E0120 17:58:07.674725 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-keystone-public-svc: secret "cert-keystone-public-svc" not found Jan 20 17:58:07 crc kubenswrapper[4558]: E0120 17:58:07.676341 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/747b1096-c84a-43e1-930e-9f887a42c524-public-tls-certs podName:747b1096-c84a-43e1-930e-9f887a42c524 nodeName:}" failed. No retries permitted until 2026-01-20 17:58:08.176311986 +0000 UTC m=+4581.936649954 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "public-tls-certs" (UniqueName: "kubernetes.io/secret/747b1096-c84a-43e1-930e-9f887a42c524-public-tls-certs") pod "keystone-747b5668bc-jgcd6" (UID: "747b1096-c84a-43e1-930e-9f887a42c524") : secret "cert-keystone-public-svc" not found Jan 20 17:58:07 crc kubenswrapper[4558]: E0120 17:58:07.679016 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="96d47ec89318825738be6686bad9869d642e9bc40f34628060459ade0aa4b177" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Jan 20 17:58:07 crc kubenswrapper[4558]: E0120 17:58:07.680132 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="96d47ec89318825738be6686bad9869d642e9bc40f34628060459ade0aa4b177" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Jan 20 17:58:07 crc kubenswrapper[4558]: E0120 17:58:07.682341 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="96d47ec89318825738be6686bad9869d642e9bc40f34628060459ade0aa4b177" cmd=["/usr/local/bin/container-scripts/status_check.sh"] Jan 20 17:58:07 crc kubenswrapper[4558]: E0120 17:58:07.682377 4558 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack-kuttl-tests/ovn-northd-0" podUID="dbdd2687-1110-4dce-a521-19c9337df3a2" containerName="ovn-northd" Jan 20 17:58:07 crc kubenswrapper[4558]: I0120 17:58:07.697240 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3834281a-b947-44ad-9390-a2057e7e902d-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "3834281a-b947-44ad-9390-a2057e7e902d" (UID: "3834281a-b947-44ad-9390-a2057e7e902d"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:58:07 crc kubenswrapper[4558]: I0120 17:58:07.700192 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3834281a-b947-44ad-9390-a2057e7e902d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3834281a-b947-44ad-9390-a2057e7e902d" (UID: "3834281a-b947-44ad-9390-a2057e7e902d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:58:07 crc kubenswrapper[4558]: I0120 17:58:07.721385 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3834281a-b947-44ad-9390-a2057e7e902d-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "3834281a-b947-44ad-9390-a2057e7e902d" (UID: "3834281a-b947-44ad-9390-a2057e7e902d"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:58:07 crc kubenswrapper[4558]: I0120 17:58:07.769551 4558 scope.go:117] "RemoveContainer" containerID="76d0924a191838385c1332ce3ba83da3d2823bf9b2729355de22ccab98022411" Jan 20 17:58:07 crc kubenswrapper[4558]: I0120 17:58:07.775487 4558 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/3834281a-b947-44ad-9390-a2057e7e902d-openstack-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:07 crc kubenswrapper[4558]: I0120 17:58:07.775512 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3834281a-b947-44ad-9390-a2057e7e902d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:07 crc kubenswrapper[4558]: I0120 17:58:07.775523 4558 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/3834281a-b947-44ad-9390-a2057e7e902d-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:07 crc kubenswrapper[4558]: I0120 17:58:07.872849 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-94fd-account-create-update-wkrmc" Jan 20 17:58:07 crc kubenswrapper[4558]: I0120 17:58:07.877049 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-c24c-account-create-update-mxf4v" Jan 20 17:58:07 crc kubenswrapper[4558]: I0120 17:58:07.904554 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:58:07 crc kubenswrapper[4558]: I0120 17:58:07.944009 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:58:07 crc kubenswrapper[4558]: I0120 17:58:07.981541 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8vd2s\" (UniqueName: \"kubernetes.io/projected/25990a01-f231-4924-a1cc-9207cc5af3c8-kube-api-access-8vd2s\") pod \"25990a01-f231-4924-a1cc-9207cc5af3c8\" (UID: \"25990a01-f231-4924-a1cc-9207cc5af3c8\") " Jan 20 17:58:07 crc kubenswrapper[4558]: I0120 17:58:07.981581 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ff6g\" (UniqueName: \"kubernetes.io/projected/ce3fd1cf-8efc-4180-b37f-8b562827c8a2-kube-api-access-6ff6g\") pod \"ce3fd1cf-8efc-4180-b37f-8b562827c8a2\" (UID: \"ce3fd1cf-8efc-4180-b37f-8b562827c8a2\") " Jan 20 17:58:07 crc kubenswrapper[4558]: I0120 17:58:07.981611 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e270f26-f899-4007-94bf-ab62080fe4ce-kube-state-metrics-tls-certs\") pod \"4e270f26-f899-4007-94bf-ab62080fe4ce\" (UID: \"4e270f26-f899-4007-94bf-ab62080fe4ce\") " Jan 20 17:58:07 crc kubenswrapper[4558]: I0120 17:58:07.981641 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/de8410e3-8de9-4013-b2ed-545ccdff866c-vencrypt-tls-certs\") pod \"de8410e3-8de9-4013-b2ed-545ccdff866c\" (UID: \"de8410e3-8de9-4013-b2ed-545ccdff866c\") " Jan 20 17:58:07 crc kubenswrapper[4558]: I0120 17:58:07.981670 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ce3fd1cf-8efc-4180-b37f-8b562827c8a2-operator-scripts\") pod \"ce3fd1cf-8efc-4180-b37f-8b562827c8a2\" (UID: \"ce3fd1cf-8efc-4180-b37f-8b562827c8a2\") " Jan 20 17:58:07 crc kubenswrapper[4558]: I0120 17:58:07.981693 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4mk87\" (UniqueName: \"kubernetes.io/projected/de8410e3-8de9-4013-b2ed-545ccdff866c-kube-api-access-4mk87\") pod \"de8410e3-8de9-4013-b2ed-545ccdff866c\" (UID: \"de8410e3-8de9-4013-b2ed-545ccdff866c\") " Jan 20 17:58:07 crc kubenswrapper[4558]: I0120 17:58:07.981776 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/de8410e3-8de9-4013-b2ed-545ccdff866c-nova-novncproxy-tls-certs\") pod \"de8410e3-8de9-4013-b2ed-545ccdff866c\" (UID: \"de8410e3-8de9-4013-b2ed-545ccdff866c\") " Jan 20 17:58:07 crc kubenswrapper[4558]: I0120 17:58:07.981852 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/de8410e3-8de9-4013-b2ed-545ccdff866c-config-data\") pod \"de8410e3-8de9-4013-b2ed-545ccdff866c\" (UID: \"de8410e3-8de9-4013-b2ed-545ccdff866c\") " Jan 20 17:58:07 crc kubenswrapper[4558]: I0120 17:58:07.981968 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/4e270f26-f899-4007-94bf-ab62080fe4ce-kube-state-metrics-tls-config\") pod \"4e270f26-f899-4007-94bf-ab62080fe4ce\" (UID: \"4e270f26-f899-4007-94bf-ab62080fe4ce\") " Jan 20 17:58:07 crc kubenswrapper[4558]: I0120 17:58:07.982012 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/25990a01-f231-4924-a1cc-9207cc5af3c8-operator-scripts\") pod \"25990a01-f231-4924-a1cc-9207cc5af3c8\" (UID: \"25990a01-f231-4924-a1cc-9207cc5af3c8\") " Jan 20 17:58:07 crc kubenswrapper[4558]: I0120 17:58:07.982050 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/de8410e3-8de9-4013-b2ed-545ccdff866c-combined-ca-bundle\") pod \"de8410e3-8de9-4013-b2ed-545ccdff866c\" (UID: \"de8410e3-8de9-4013-b2ed-545ccdff866c\") " Jan 20 17:58:07 crc kubenswrapper[4558]: I0120 17:58:07.982068 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-55gl2\" (UniqueName: \"kubernetes.io/projected/4e270f26-f899-4007-94bf-ab62080fe4ce-kube-api-access-55gl2\") pod \"4e270f26-f899-4007-94bf-ab62080fe4ce\" (UID: \"4e270f26-f899-4007-94bf-ab62080fe4ce\") " Jan 20 17:58:07 crc kubenswrapper[4558]: I0120 17:58:07.982091 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e270f26-f899-4007-94bf-ab62080fe4ce-combined-ca-bundle\") pod \"4e270f26-f899-4007-94bf-ab62080fe4ce\" (UID: \"4e270f26-f899-4007-94bf-ab62080fe4ce\") " Jan 20 17:58:07 crc kubenswrapper[4558]: E0120 17:58:07.982834 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Jan 20 17:58:07 crc kubenswrapper[4558]: E0120 17:58:07.982876 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/948fb15d-07f1-4b25-b8d5-7d582024ef28-config-data podName:948fb15d-07f1-4b25-b8d5-7d582024ef28 nodeName:}" failed. No retries permitted until 2026-01-20 17:58:11.982861039 +0000 UTC m=+4585.743199006 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/948fb15d-07f1-4b25-b8d5-7d582024ef28-config-data") pod "rabbitmq-cell1-server-0" (UID: "948fb15d-07f1-4b25-b8d5-7d582024ef28") : configmap "rabbitmq-cell1-config-data" not found Jan 20 17:58:07 crc kubenswrapper[4558]: I0120 17:58:07.988773 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/de8410e3-8de9-4013-b2ed-545ccdff866c-kube-api-access-4mk87" (OuterVolumeSpecName: "kube-api-access-4mk87") pod "de8410e3-8de9-4013-b2ed-545ccdff866c" (UID: "de8410e3-8de9-4013-b2ed-545ccdff866c"). InnerVolumeSpecName "kube-api-access-4mk87". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:58:07 crc kubenswrapper[4558]: I0120 17:58:07.993754 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ce3fd1cf-8efc-4180-b37f-8b562827c8a2-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "ce3fd1cf-8efc-4180-b37f-8b562827c8a2" (UID: "ce3fd1cf-8efc-4180-b37f-8b562827c8a2"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:07.995145 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25990a01-f231-4924-a1cc-9207cc5af3c8-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "25990a01-f231-4924-a1cc-9207cc5af3c8" (UID: "25990a01-f231-4924-a1cc-9207cc5af3c8"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.027325 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4e270f26-f899-4007-94bf-ab62080fe4ce-kube-api-access-55gl2" (OuterVolumeSpecName: "kube-api-access-55gl2") pod "4e270f26-f899-4007-94bf-ab62080fe4ce" (UID: "4e270f26-f899-4007-94bf-ab62080fe4ce"). InnerVolumeSpecName "kube-api-access-55gl2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.035611 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25990a01-f231-4924-a1cc-9207cc5af3c8-kube-api-access-8vd2s" (OuterVolumeSpecName: "kube-api-access-8vd2s") pod "25990a01-f231-4924-a1cc-9207cc5af3c8" (UID: "25990a01-f231-4924-a1cc-9207cc5af3c8"). InnerVolumeSpecName "kube-api-access-8vd2s". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.039775 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ce3fd1cf-8efc-4180-b37f-8b562827c8a2-kube-api-access-6ff6g" (OuterVolumeSpecName: "kube-api-access-6ff6g") pod "ce3fd1cf-8efc-4180-b37f-8b562827c8a2" (UID: "ce3fd1cf-8efc-4180-b37f-8b562827c8a2"). InnerVolumeSpecName "kube-api-access-6ff6g". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.040230 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/de8410e3-8de9-4013-b2ed-545ccdff866c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "de8410e3-8de9-4013-b2ed-545ccdff866c" (UID: "de8410e3-8de9-4013-b2ed-545ccdff866c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.072219 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e270f26-f899-4007-94bf-ab62080fe4ce-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4e270f26-f899-4007-94bf-ab62080fe4ce" (UID: "4e270f26-f899-4007-94bf-ab62080fe4ce"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.084428 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/de8410e3-8de9-4013-b2ed-545ccdff866c-config-data" (OuterVolumeSpecName: "config-data") pod "de8410e3-8de9-4013-b2ed-545ccdff866c" (UID: "de8410e3-8de9-4013-b2ed-545ccdff866c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.085697 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/de8410e3-8de9-4013-b2ed-545ccdff866c-config-data\") pod \"de8410e3-8de9-4013-b2ed-545ccdff866c\" (UID: \"de8410e3-8de9-4013-b2ed-545ccdff866c\") " Jan 20 17:58:08 crc kubenswrapper[4558]: W0120 17:58:08.087725 4558 empty_dir.go:500] Warning: Unmount skipped because path does not exist: /var/lib/kubelet/pods/de8410e3-8de9-4013-b2ed-545ccdff866c/volumes/kubernetes.io~secret/config-data Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.087746 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/de8410e3-8de9-4013-b2ed-545ccdff866c-config-data" (OuterVolumeSpecName: "config-data") pod "de8410e3-8de9-4013-b2ed-545ccdff866c" (UID: "de8410e3-8de9-4013-b2ed-545ccdff866c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.087968 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/de8410e3-8de9-4013-b2ed-545ccdff866c-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.088062 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/25990a01-f231-4924-a1cc-9207cc5af3c8-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.088153 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/de8410e3-8de9-4013-b2ed-545ccdff866c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.088232 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-55gl2\" (UniqueName: \"kubernetes.io/projected/4e270f26-f899-4007-94bf-ab62080fe4ce-kube-api-access-55gl2\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.088282 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e270f26-f899-4007-94bf-ab62080fe4ce-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.088372 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8vd2s\" (UniqueName: \"kubernetes.io/projected/25990a01-f231-4924-a1cc-9207cc5af3c8-kube-api-access-8vd2s\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.088429 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ff6g\" (UniqueName: \"kubernetes.io/projected/ce3fd1cf-8efc-4180-b37f-8b562827c8a2-kube-api-access-6ff6g\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.091020 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ce3fd1cf-8efc-4180-b37f-8b562827c8a2-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.091087 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4mk87\" (UniqueName: \"kubernetes.io/projected/de8410e3-8de9-4013-b2ed-545ccdff866c-kube-api-access-4mk87\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.092918 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e270f26-f899-4007-94bf-ab62080fe4ce-kube-state-metrics-tls-config" (OuterVolumeSpecName: "kube-state-metrics-tls-config") pod "4e270f26-f899-4007-94bf-ab62080fe4ce" (UID: "4e270f26-f899-4007-94bf-ab62080fe4ce"). InnerVolumeSpecName "kube-state-metrics-tls-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.096925 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/de8410e3-8de9-4013-b2ed-545ccdff866c-vencrypt-tls-certs" (OuterVolumeSpecName: "vencrypt-tls-certs") pod "de8410e3-8de9-4013-b2ed-545ccdff866c" (UID: "de8410e3-8de9-4013-b2ed-545ccdff866c"). InnerVolumeSpecName "vencrypt-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.105654 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.107226 4558 generic.go:334] "Generic (PLEG): container finished" podID="75a8c03c-5b0a-4144-b1a8-293ce0d50739" containerID="a640bc6d0ce6fe28ee110c1a41a4036f89e48c474ea9e34e4f704aff90b74d58" exitCode=1 Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.107689 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/root-account-create-update-x75ng" event={"ID":"75a8c03c-5b0a-4144-b1a8-293ce0d50739","Type":"ContainerDied","Data":"a640bc6d0ce6fe28ee110c1a41a4036f89e48c474ea9e34e4f704aff90b74d58"} Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.108476 4558 scope.go:117] "RemoveContainer" containerID="a640bc6d0ce6fe28ee110c1a41a4036f89e48c474ea9e34e4f704aff90b74d58" Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.117470 4558 scope.go:117] "RemoveContainer" containerID="a79ee7ec7558132e07cb924fcd12069b0f87fbec2d9e3ff1c1b559b752eaac9f" Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.117611 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstackclient" Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.162710 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/de8410e3-8de9-4013-b2ed-545ccdff866c-nova-novncproxy-tls-certs" (OuterVolumeSpecName: "nova-novncproxy-tls-certs") pod "de8410e3-8de9-4013-b2ed-545ccdff866c" (UID: "de8410e3-8de9-4013-b2ed-545ccdff866c"). InnerVolumeSpecName "nova-novncproxy-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.175274 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e270f26-f899-4007-94bf-ab62080fe4ce-kube-state-metrics-tls-certs" (OuterVolumeSpecName: "kube-state-metrics-tls-certs") pod "4e270f26-f899-4007-94bf-ab62080fe4ce" (UID: "4e270f26-f899-4007-94bf-ab62080fe4ce"). InnerVolumeSpecName "kube-state-metrics-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.194347 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1481b06c-22ef-4b33-b49c-2c5d6903cbee-config-data-custom\") pod \"1481b06c-22ef-4b33-b49c-2c5d6903cbee\" (UID: \"1481b06c-22ef-4b33-b49c-2c5d6903cbee\") " Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.194418 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1481b06c-22ef-4b33-b49c-2c5d6903cbee-combined-ca-bundle\") pod \"1481b06c-22ef-4b33-b49c-2c5d6903cbee\" (UID: \"1481b06c-22ef-4b33-b49c-2c5d6903cbee\") " Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.194446 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1481b06c-22ef-4b33-b49c-2c5d6903cbee-scripts\") pod \"1481b06c-22ef-4b33-b49c-2c5d6903cbee\" (UID: \"1481b06c-22ef-4b33-b49c-2c5d6903cbee\") " Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.194464 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/1481b06c-22ef-4b33-b49c-2c5d6903cbee-etc-machine-id\") pod \"1481b06c-22ef-4b33-b49c-2c5d6903cbee\" (UID: \"1481b06c-22ef-4b33-b49c-2c5d6903cbee\") " Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.197252 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lh7cr\" (UniqueName: \"kubernetes.io/projected/1481b06c-22ef-4b33-b49c-2c5d6903cbee-kube-api-access-lh7cr\") pod \"1481b06c-22ef-4b33-b49c-2c5d6903cbee\" (UID: \"1481b06c-22ef-4b33-b49c-2c5d6903cbee\") " Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.197314 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1481b06c-22ef-4b33-b49c-2c5d6903cbee-config-data\") pod \"1481b06c-22ef-4b33-b49c-2c5d6903cbee\" (UID: \"1481b06c-22ef-4b33-b49c-2c5d6903cbee\") " Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.200015 4558 reconciler_common.go:293] "Volume detached for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/de8410e3-8de9-4013-b2ed-545ccdff866c-nova-novncproxy-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.200034 4558 reconciler_common.go:293] "Volume detached for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/4e270f26-f899-4007-94bf-ab62080fe4ce-kube-state-metrics-tls-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.200044 4558 reconciler_common.go:293] "Volume detached for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e270f26-f899-4007-94bf-ab62080fe4ce-kube-state-metrics-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.200055 4558 reconciler_common.go:293] "Volume detached for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/de8410e3-8de9-4013-b2ed-545ccdff866c-vencrypt-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:08 crc kubenswrapper[4558]: E0120 17:58:08.200140 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-keystone-public-svc: secret "cert-keystone-public-svc" not found Jan 20 17:58:08 crc kubenswrapper[4558]: E0120 17:58:08.200196 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/747b1096-c84a-43e1-930e-9f887a42c524-public-tls-certs podName:747b1096-c84a-43e1-930e-9f887a42c524 nodeName:}" failed. No retries permitted until 2026-01-20 17:58:09.200182577 +0000 UTC m=+4582.960520544 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "public-tls-certs" (UniqueName: "kubernetes.io/secret/747b1096-c84a-43e1-930e-9f887a42c524-public-tls-certs") pod "keystone-747b5668bc-jgcd6" (UID: "747b1096-c84a-43e1-930e-9f887a42c524") : secret "cert-keystone-public-svc" not found Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.204617 4558 generic.go:334] "Generic (PLEG): container finished" podID="1481b06c-22ef-4b33-b49c-2c5d6903cbee" containerID="dc24e5666420136fe5c65594f8305d0aa03d99644a34fc70829cbd42cab6db49" exitCode=0 Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.204647 4558 generic.go:334] "Generic (PLEG): container finished" podID="1481b06c-22ef-4b33-b49c-2c5d6903cbee" containerID="8e9c3b8eaacddd4170f8d9b11bdc908d3994f8a53ba6ad4543a5cf43244fda7c" exitCode=0 Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.204769 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-scheduler-0" Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.205863 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-1afa-account-create-update-9wfwp"] Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.205893 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"1481b06c-22ef-4b33-b49c-2c5d6903cbee","Type":"ContainerDied","Data":"dc24e5666420136fe5c65594f8305d0aa03d99644a34fc70829cbd42cab6db49"} Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.205912 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"1481b06c-22ef-4b33-b49c-2c5d6903cbee","Type":"ContainerDied","Data":"8e9c3b8eaacddd4170f8d9b11bdc908d3994f8a53ba6ad4543a5cf43244fda7c"} Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.205924 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-scheduler-0" event={"ID":"1481b06c-22ef-4b33-b49c-2c5d6903cbee","Type":"ContainerDied","Data":"2ffafa34e2db68287a17eec1029e94de95bb2ca1a329fba8f977d09c583f1859"} Jan 20 17:58:08 crc kubenswrapper[4558]: E0120 17:58:08.208268 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-keystone-internal-svc: secret "cert-keystone-internal-svc" not found Jan 20 17:58:08 crc kubenswrapper[4558]: E0120 17:58:08.208329 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/747b1096-c84a-43e1-930e-9f887a42c524-internal-tls-certs podName:747b1096-c84a-43e1-930e-9f887a42c524 nodeName:}" failed. No retries permitted until 2026-01-20 17:58:09.208312439 +0000 UTC m=+4582.968650406 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "internal-tls-certs" (UniqueName: "kubernetes.io/secret/747b1096-c84a-43e1-930e-9f887a42c524-internal-tls-certs") pod "keystone-747b5668bc-jgcd6" (UID: "747b1096-c84a-43e1-930e-9f887a42c524") : secret "cert-keystone-internal-svc" not found Jan 20 17:58:08 crc kubenswrapper[4558]: E0120 17:58:08.208913 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/combined-ca-bundle: secret "combined-ca-bundle" not found Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.208932 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1481b06c-22ef-4b33-b49c-2c5d6903cbee-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "1481b06c-22ef-4b33-b49c-2c5d6903cbee" (UID: "1481b06c-22ef-4b33-b49c-2c5d6903cbee"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 17:58:08 crc kubenswrapper[4558]: E0120 17:58:08.208991 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/747b1096-c84a-43e1-930e-9f887a42c524-combined-ca-bundle podName:747b1096-c84a-43e1-930e-9f887a42c524 nodeName:}" failed. No retries permitted until 2026-01-20 17:58:09.208965256 +0000 UTC m=+4582.969303224 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/747b1096-c84a-43e1-930e-9f887a42c524-combined-ca-bundle") pod "keystone-747b5668bc-jgcd6" (UID: "747b1096-c84a-43e1-930e-9f887a42c524") : secret "combined-ca-bundle" not found Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.210884 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1481b06c-22ef-4b33-b49c-2c5d6903cbee-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "1481b06c-22ef-4b33-b49c-2c5d6903cbee" (UID: "1481b06c-22ef-4b33-b49c-2c5d6903cbee"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.219428 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1481b06c-22ef-4b33-b49c-2c5d6903cbee-kube-api-access-lh7cr" (OuterVolumeSpecName: "kube-api-access-lh7cr") pod "1481b06c-22ef-4b33-b49c-2c5d6903cbee" (UID: "1481b06c-22ef-4b33-b49c-2c5d6903cbee"). InnerVolumeSpecName "kube-api-access-lh7cr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.219534 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1481b06c-22ef-4b33-b49c-2c5d6903cbee-scripts" (OuterVolumeSpecName: "scripts") pod "1481b06c-22ef-4b33-b49c-2c5d6903cbee" (UID: "1481b06c-22ef-4b33-b49c-2c5d6903cbee"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.232300 4558 scope.go:117] "RemoveContainer" containerID="dc24e5666420136fe5c65594f8305d0aa03d99644a34fc70829cbd42cab6db49" Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.246324 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/keystone-1afa-account-create-update-9wfwp"] Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.266603 4558 generic.go:334] "Generic (PLEG): container finished" podID="680eb8d4-16c8-40ae-bfb6-054fcf23f281" containerID="528a68a740a5bc1c67a3fbd8155ae60dbf9ec6b340d2ce96b37c200e6e6de11b" exitCode=0 Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.266631 4558 generic.go:334] "Generic (PLEG): container finished" podID="680eb8d4-16c8-40ae-bfb6-054fcf23f281" containerID="2b5d5a7da9b64e1d83404cfe438d86d2e219e79fe96bf81cc97e5f2e3a7048ac" exitCode=2 Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.266638 4558 generic.go:334] "Generic (PLEG): container finished" podID="680eb8d4-16c8-40ae-bfb6-054fcf23f281" containerID="a1c6c829c5d10f6cb194bd9017159b0daa07b6905d16ede8abe89e2a8949198f" exitCode=0 Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.266686 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"680eb8d4-16c8-40ae-bfb6-054fcf23f281","Type":"ContainerDied","Data":"528a68a740a5bc1c67a3fbd8155ae60dbf9ec6b340d2ce96b37c200e6e6de11b"} Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.266711 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"680eb8d4-16c8-40ae-bfb6-054fcf23f281","Type":"ContainerDied","Data":"2b5d5a7da9b64e1d83404cfe438d86d2e219e79fe96bf81cc97e5f2e3a7048ac"} Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.266722 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"680eb8d4-16c8-40ae-bfb6-054fcf23f281","Type":"ContainerDied","Data":"a1c6c829c5d10f6cb194bd9017159b0daa07b6905d16ede8abe89e2a8949198f"} Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.275408 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-94fd-account-create-update-wkrmc" event={"ID":"ce3fd1cf-8efc-4180-b37f-8b562827c8a2","Type":"ContainerDied","Data":"669d63c9318a8eaa820af4fdc66729f3163da728006b15fcee96655959e48df8"} Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.275476 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-94fd-account-create-update-wkrmc" Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.304723 4558 generic.go:334] "Generic (PLEG): container finished" podID="8a5251c1-60bc-45d7-8524-fd654c09505b" containerID="cc8c4f5e82e163a1cfed41c73a53986199d859e052ef15e033504df9f64fa7a9" exitCode=143 Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.304848 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-worker-6c8cb46bb9-k2j62" event={"ID":"8a5251c1-60bc-45d7-8524-fd654c09505b","Type":"ContainerDied","Data":"cc8c4f5e82e163a1cfed41c73a53986199d859e052ef15e033504df9f64fa7a9"} Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.307766 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/keystone-1afa-account-create-update-tmdn7"] Jan 20 17:58:08 crc kubenswrapper[4558]: E0120 17:58:08.307954 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/nova-cell1-conductor-config-data: secret "nova-cell1-conductor-config-data" not found Jan 20 17:58:08 crc kubenswrapper[4558]: E0120 17:58:08.308040 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/54cbdea3-8fcc-4d2d-870b-cf3663cfc633-config-data podName:54cbdea3-8fcc-4d2d-870b-cf3663cfc633 nodeName:}" failed. No retries permitted until 2026-01-20 17:58:10.308020664 +0000 UTC m=+4584.068358631 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/secret/54cbdea3-8fcc-4d2d-870b-cf3663cfc633-config-data") pod "nova-cell1-conductor-0" (UID: "54cbdea3-8fcc-4d2d-870b-cf3663cfc633") : secret "nova-cell1-conductor-config-data" not found Jan 20 17:58:08 crc kubenswrapper[4558]: E0120 17:58:08.308707 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1481b06c-22ef-4b33-b49c-2c5d6903cbee" containerName="cinder-scheduler" Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.308733 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="1481b06c-22ef-4b33-b49c-2c5d6903cbee" containerName="cinder-scheduler" Jan 20 17:58:08 crc kubenswrapper[4558]: E0120 17:58:08.308750 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="de8410e3-8de9-4013-b2ed-545ccdff866c" containerName="nova-cell1-novncproxy-novncproxy" Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.308767 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="de8410e3-8de9-4013-b2ed-545ccdff866c" containerName="nova-cell1-novncproxy-novncproxy" Jan 20 17:58:08 crc kubenswrapper[4558]: E0120 17:58:08.308796 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="17f6b80e-1961-4c44-a979-9c23bdd59837" containerName="openstack-network-exporter" Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.308804 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="17f6b80e-1961-4c44-a979-9c23bdd59837" containerName="openstack-network-exporter" Jan 20 17:58:08 crc kubenswrapper[4558]: E0120 17:58:08.308827 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e270f26-f899-4007-94bf-ab62080fe4ce" containerName="kube-state-metrics" Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.308837 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e270f26-f899-4007-94bf-ab62080fe4ce" containerName="kube-state-metrics" Jan 20 17:58:08 crc kubenswrapper[4558]: E0120 17:58:08.308849 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1481b06c-22ef-4b33-b49c-2c5d6903cbee" containerName="probe" Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.308857 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="1481b06c-22ef-4b33-b49c-2c5d6903cbee" containerName="probe" Jan 20 17:58:08 crc kubenswrapper[4558]: E0120 17:58:08.308883 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="17f6b80e-1961-4c44-a979-9c23bdd59837" containerName="ovsdbserver-sb" Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.308889 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="17f6b80e-1961-4c44-a979-9c23bdd59837" containerName="ovsdbserver-sb" Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.309244 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="de8410e3-8de9-4013-b2ed-545ccdff866c" containerName="nova-cell1-novncproxy-novncproxy" Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.309275 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="17f6b80e-1961-4c44-a979-9c23bdd59837" containerName="ovsdbserver-sb" Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.309288 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="17f6b80e-1961-4c44-a979-9c23bdd59837" containerName="openstack-network-exporter" Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.309302 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="1481b06c-22ef-4b33-b49c-2c5d6903cbee" containerName="probe" Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.309316 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="1481b06c-22ef-4b33-b49c-2c5d6903cbee" containerName="cinder-scheduler" Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.309331 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="4e270f26-f899-4007-94bf-ab62080fe4ce" containerName="kube-state-metrics" Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.310252 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-1afa-account-create-update-tmdn7" Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.312864 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"keystone-db-secret" Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.314148 4558 scope.go:117] "RemoveContainer" containerID="8e9c3b8eaacddd4170f8d9b11bdc908d3994f8a53ba6ad4543a5cf43244fda7c" Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.319728 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/keystone-1afa-account-create-update-tmdn7"] Jan 20 17:58:08 crc kubenswrapper[4558]: E0120 17:58:08.321602 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/combined-ca-bundle: secret "combined-ca-bundle" not found Jan 20 17:58:08 crc kubenswrapper[4558]: E0120 17:58:08.321644 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/54cbdea3-8fcc-4d2d-870b-cf3663cfc633-combined-ca-bundle podName:54cbdea3-8fcc-4d2d-870b-cf3663cfc633 nodeName:}" failed. No retries permitted until 2026-01-20 17:58:10.321633262 +0000 UTC m=+4584.081971229 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/54cbdea3-8fcc-4d2d-870b-cf3663cfc633-combined-ca-bundle") pod "nova-cell1-conductor-0" (UID: "54cbdea3-8fcc-4d2d-870b-cf3663cfc633") : secret "combined-ca-bundle" not found Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.321832 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1481b06c-22ef-4b33-b49c-2c5d6903cbee-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.321846 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1481b06c-22ef-4b33-b49c-2c5d6903cbee-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.321857 4558 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/1481b06c-22ef-4b33-b49c-2c5d6903cbee-etc-machine-id\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.321867 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lh7cr\" (UniqueName: \"kubernetes.io/projected/1481b06c-22ef-4b33-b49c-2c5d6903cbee-kube-api-access-lh7cr\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.331291 4558 generic.go:334] "Generic (PLEG): container finished" podID="314a7e85-558d-49d5-b281-52342e3e4c01" containerID="81d5e20fc02296208cf6c1a8c066a820d8a642c7b086db42abc8ea3d3a418052" exitCode=0 Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.331344 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-l7p2w" event={"ID":"314a7e85-558d-49d5-b281-52342e3e4c01","Type":"ContainerDied","Data":"81d5e20fc02296208cf6c1a8c066a820d8a642c7b086db42abc8ea3d3a418052"} Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.331365 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-l7p2w" event={"ID":"314a7e85-558d-49d5-b281-52342e3e4c01","Type":"ContainerStarted","Data":"26b801148151bae8daf2efca3e965b6f4a03b58da5117a7f22ac3bd058a41fdb"} Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.341843 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-bootstrap-glvsk"] Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.344421 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-fde5-account-create-update-zmxrm" event={"ID":"96cbceb8-1912-4174-8d93-22ab4aa1b152","Type":"ContainerStarted","Data":"28ff192c11ce555aa1f1e05d999db27db9b987b9245a1db6ec7dbee05afcdcdd"} Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.353613 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.353775 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-db-sync-ktt4t"] Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.361399 4558 generic.go:334] "Generic (PLEG): container finished" podID="a66c352e-8b7e-4f9e-9447-a2bf8a7a5c91" containerID="ca4ecb7fa4f5c23eb30d668f5754a6906c2e344a7fe9c1284b2323148a7db0a4" exitCode=143 Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.361440 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-keystone-listener-58949d94f4-gxkkc" event={"ID":"a66c352e-8b7e-4f9e-9447-a2bf8a7a5c91","Type":"ContainerDied","Data":"ca4ecb7fa4f5c23eb30d668f5754a6906c2e344a7fe9c1284b2323148a7db0a4"} Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.365845 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/keystone-db-sync-ktt4t"] Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.370656 4558 generic.go:334] "Generic (PLEG): container finished" podID="2c80b87d-f7d5-46dd-a6d9-bba3e82e405b" containerID="3ea77410056274873a4720a35fcc92ab9a47e76916a85817eee90b16a86d216d" exitCode=0 Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.370718 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-proxy-77d4fcb95d-7jsvg" event={"ID":"2c80b87d-f7d5-46dd-a6d9-bba3e82e405b","Type":"ContainerDied","Data":"3ea77410056274873a4720a35fcc92ab9a47e76916a85817eee90b16a86d216d"} Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.371840 4558 generic.go:334] "Generic (PLEG): container finished" podID="de8410e3-8de9-4013-b2ed-545ccdff866c" containerID="f83e1e941e2ebb680c9bcf90480fb830a1ce1ccd57989006602331fa43a28a91" exitCode=0 Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.371879 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"de8410e3-8de9-4013-b2ed-545ccdff866c","Type":"ContainerDied","Data":"f83e1e941e2ebb680c9bcf90480fb830a1ce1ccd57989006602331fa43a28a91"} Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.371894 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" event={"ID":"de8410e3-8de9-4013-b2ed-545ccdff866c","Type":"ContainerDied","Data":"e0b0d938faf64b937e2053af4169fcd1ad2d263b71f5c8ac240fb8ad479713ba"} Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.371950 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-novncproxy-0" Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.378422 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-747b5668bc-jgcd6"] Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.378471 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/keystone-bootstrap-glvsk"] Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.393095 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-c24c-account-create-update-mxf4v" Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.393100 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-c24c-account-create-update-mxf4v" event={"ID":"25990a01-f231-4924-a1cc-9207cc5af3c8","Type":"ContainerDied","Data":"a6796e65a485132f0ab91df3afd76f82543ec8b8c275521f97f9542d0847df8a"} Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.413747 4558 scope.go:117] "RemoveContainer" containerID="dc24e5666420136fe5c65594f8305d0aa03d99644a34fc70829cbd42cab6db49" Jan 20 17:58:08 crc kubenswrapper[4558]: E0120 17:58:08.418480 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dc24e5666420136fe5c65594f8305d0aa03d99644a34fc70829cbd42cab6db49\": container with ID starting with dc24e5666420136fe5c65594f8305d0aa03d99644a34fc70829cbd42cab6db49 not found: ID does not exist" containerID="dc24e5666420136fe5c65594f8305d0aa03d99644a34fc70829cbd42cab6db49" Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.418649 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dc24e5666420136fe5c65594f8305d0aa03d99644a34fc70829cbd42cab6db49"} err="failed to get container status \"dc24e5666420136fe5c65594f8305d0aa03d99644a34fc70829cbd42cab6db49\": rpc error: code = NotFound desc = could not find container \"dc24e5666420136fe5c65594f8305d0aa03d99644a34fc70829cbd42cab6db49\": container with ID starting with dc24e5666420136fe5c65594f8305d0aa03d99644a34fc70829cbd42cab6db49 not found: ID does not exist" Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.418670 4558 scope.go:117] "RemoveContainer" containerID="8e9c3b8eaacddd4170f8d9b11bdc908d3994f8a53ba6ad4543a5cf43244fda7c" Jan 20 17:58:08 crc kubenswrapper[4558]: E0120 17:58:08.418944 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8e9c3b8eaacddd4170f8d9b11bdc908d3994f8a53ba6ad4543a5cf43244fda7c\": container with ID starting with 8e9c3b8eaacddd4170f8d9b11bdc908d3994f8a53ba6ad4543a5cf43244fda7c not found: ID does not exist" containerID="8e9c3b8eaacddd4170f8d9b11bdc908d3994f8a53ba6ad4543a5cf43244fda7c" Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.419218 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8e9c3b8eaacddd4170f8d9b11bdc908d3994f8a53ba6ad4543a5cf43244fda7c"} err="failed to get container status \"8e9c3b8eaacddd4170f8d9b11bdc908d3994f8a53ba6ad4543a5cf43244fda7c\": rpc error: code = NotFound desc = could not find container \"8e9c3b8eaacddd4170f8d9b11bdc908d3994f8a53ba6ad4543a5cf43244fda7c\": container with ID starting with 8e9c3b8eaacddd4170f8d9b11bdc908d3994f8a53ba6ad4543a5cf43244fda7c not found: ID does not exist" Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.419233 4558 scope.go:117] "RemoveContainer" containerID="dc24e5666420136fe5c65594f8305d0aa03d99644a34fc70829cbd42cab6db49" Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.426283 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/abd1fde8-57bd-4248-a061-6ddb436501c2-kolla-config\") pod \"abd1fde8-57bd-4248-a061-6ddb436501c2\" (UID: \"abd1fde8-57bd-4248-a061-6ddb436501c2\") " Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.426565 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mysql-db\" (UniqueName: \"kubernetes.io/local-volume/local-storage20-crc\") pod \"abd1fde8-57bd-4248-a061-6ddb436501c2\" (UID: \"abd1fde8-57bd-4248-a061-6ddb436501c2\") " Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.426594 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/abd1fde8-57bd-4248-a061-6ddb436501c2-galera-tls-certs\") pod \"abd1fde8-57bd-4248-a061-6ddb436501c2\" (UID: \"abd1fde8-57bd-4248-a061-6ddb436501c2\") " Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.426828 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/abd1fde8-57bd-4248-a061-6ddb436501c2-config-data-default\") pod \"abd1fde8-57bd-4248-a061-6ddb436501c2\" (UID: \"abd1fde8-57bd-4248-a061-6ddb436501c2\") " Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.426850 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vdnzt\" (UniqueName: \"kubernetes.io/projected/abd1fde8-57bd-4248-a061-6ddb436501c2-kube-api-access-vdnzt\") pod \"abd1fde8-57bd-4248-a061-6ddb436501c2\" (UID: \"abd1fde8-57bd-4248-a061-6ddb436501c2\") " Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.428331 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dc24e5666420136fe5c65594f8305d0aa03d99644a34fc70829cbd42cab6db49"} err="failed to get container status \"dc24e5666420136fe5c65594f8305d0aa03d99644a34fc70829cbd42cab6db49\": rpc error: code = NotFound desc = could not find container \"dc24e5666420136fe5c65594f8305d0aa03d99644a34fc70829cbd42cab6db49\": container with ID starting with dc24e5666420136fe5c65594f8305d0aa03d99644a34fc70829cbd42cab6db49 not found: ID does not exist" Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.428369 4558 scope.go:117] "RemoveContainer" containerID="8e9c3b8eaacddd4170f8d9b11bdc908d3994f8a53ba6ad4543a5cf43244fda7c" Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.426940 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/abd1fde8-57bd-4248-a061-6ddb436501c2-operator-scripts\") pod \"abd1fde8-57bd-4248-a061-6ddb436501c2\" (UID: \"abd1fde8-57bd-4248-a061-6ddb436501c2\") " Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.431300 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/abd1fde8-57bd-4248-a061-6ddb436501c2-combined-ca-bundle\") pod \"abd1fde8-57bd-4248-a061-6ddb436501c2\" (UID: \"abd1fde8-57bd-4248-a061-6ddb436501c2\") " Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.431345 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/abd1fde8-57bd-4248-a061-6ddb436501c2-config-data-generated\") pod \"abd1fde8-57bd-4248-a061-6ddb436501c2\" (UID: \"abd1fde8-57bd-4248-a061-6ddb436501c2\") " Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.433723 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8e9c3b8eaacddd4170f8d9b11bdc908d3994f8a53ba6ad4543a5cf43244fda7c"} err="failed to get container status \"8e9c3b8eaacddd4170f8d9b11bdc908d3994f8a53ba6ad4543a5cf43244fda7c\": rpc error: code = NotFound desc = could not find container \"8e9c3b8eaacddd4170f8d9b11bdc908d3994f8a53ba6ad4543a5cf43244fda7c\": container with ID starting with 8e9c3b8eaacddd4170f8d9b11bdc908d3994f8a53ba6ad4543a5cf43244fda7c not found: ID does not exist" Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.433838 4558 scope.go:117] "RemoveContainer" containerID="f83e1e941e2ebb680c9bcf90480fb830a1ce1ccd57989006602331fa43a28a91" Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.433955 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/abd1fde8-57bd-4248-a061-6ddb436501c2-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "abd1fde8-57bd-4248-a061-6ddb436501c2" (UID: "abd1fde8-57bd-4248-a061-6ddb436501c2"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.435011 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-62mvs\" (UniqueName: \"kubernetes.io/projected/71795b1b-e445-456a-aa68-e410bdaccc73-kube-api-access-62mvs\") pod \"keystone-1afa-account-create-update-tmdn7\" (UID: \"71795b1b-e445-456a-aa68-e410bdaccc73\") " pod="openstack-kuttl-tests/keystone-1afa-account-create-update-tmdn7" Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.435293 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/71795b1b-e445-456a-aa68-e410bdaccc73-operator-scripts\") pod \"keystone-1afa-account-create-update-tmdn7\" (UID: \"71795b1b-e445-456a-aa68-e410bdaccc73\") " pod="openstack-kuttl-tests/keystone-1afa-account-create-update-tmdn7" Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.435436 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/abd1fde8-57bd-4248-a061-6ddb436501c2-config-data-default" (OuterVolumeSpecName: "config-data-default") pod "abd1fde8-57bd-4248-a061-6ddb436501c2" (UID: "abd1fde8-57bd-4248-a061-6ddb436501c2"). InnerVolumeSpecName "config-data-default". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.435469 4558 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/abd1fde8-57bd-4248-a061-6ddb436501c2-kolla-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.435845 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/abd1fde8-57bd-4248-a061-6ddb436501c2-config-data-generated" (OuterVolumeSpecName: "config-data-generated") pod "abd1fde8-57bd-4248-a061-6ddb436501c2" (UID: "abd1fde8-57bd-4248-a061-6ddb436501c2"). InnerVolumeSpecName "config-data-generated". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:58:08 crc kubenswrapper[4558]: E0120 17:58:08.436007 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Jan 20 17:58:08 crc kubenswrapper[4558]: E0120 17:58:08.436148 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/d512bec4-7ed0-43bb-b8fe-0f235f7698e5-config-data podName:d512bec4-7ed0-43bb-b8fe-0f235f7698e5 nodeName:}" failed. No retries permitted until 2026-01-20 17:58:12.436128814 +0000 UTC m=+4586.196466781 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/d512bec4-7ed0-43bb-b8fe-0f235f7698e5-config-data") pod "rabbitmq-server-0" (UID: "d512bec4-7ed0-43bb-b8fe-0f235f7698e5") : configmap "rabbitmq-config-data" not found Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.437255 4558 generic.go:334] "Generic (PLEG): container finished" podID="abd1fde8-57bd-4248-a061-6ddb436501c2" containerID="b7311156a63404e1bd2f45a67f9cea82ec849d5cf981a05cf6aee748ad8c02bd" exitCode=0 Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.436484 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/abd1fde8-57bd-4248-a061-6ddb436501c2-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "abd1fde8-57bd-4248-a061-6ddb436501c2" (UID: "abd1fde8-57bd-4248-a061-6ddb436501c2"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.438285 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-cell1-galera-0" Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.443766 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-cell1-galera-0" event={"ID":"abd1fde8-57bd-4248-a061-6ddb436501c2","Type":"ContainerDied","Data":"b7311156a63404e1bd2f45a67f9cea82ec849d5cf981a05cf6aee748ad8c02bd"} Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.461033 4558 generic.go:334] "Generic (PLEG): container finished" podID="9a1c3ab4-ca8f-4183-9dbf-05f5c5efe4b7" containerID="2f0d5940c1a430547bdac78180588ab9d0ea89c8cd08f0adfb7ebcb602621819" exitCode=143 Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.461120 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-6bb757fd44-jvx79" event={"ID":"9a1c3ab4-ca8f-4183-9dbf-05f5c5efe4b7","Type":"ContainerDied","Data":"2f0d5940c1a430547bdac78180588ab9d0ea89c8cd08f0adfb7ebcb602621819"} Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.473472 4558 generic.go:334] "Generic (PLEG): container finished" podID="4e270f26-f899-4007-94bf-ab62080fe4ce" containerID="1fe99cb66e9016b46286062b2b8309256638fd5cb58f35b387cebb419822b3bc" exitCode=2 Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.473920 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/kube-state-metrics-0" event={"ID":"4e270f26-f899-4007-94bf-ab62080fe4ce","Type":"ContainerDied","Data":"1fe99cb66e9016b46286062b2b8309256638fd5cb58f35b387cebb419822b3bc"} Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.473972 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/kube-state-metrics-0" event={"ID":"4e270f26-f899-4007-94bf-ab62080fe4ce","Type":"ContainerDied","Data":"b0e7917ea1ab5684946e8b6bba4e8c0d10bad4512ac8a326577302ed3e8d9993"} Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.479882 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/keystone-747b5668bc-jgcd6" podUID="747b1096-c84a-43e1-930e-9f887a42c524" containerName="keystone-api" containerID="cri-o://f8102ce497d4ab8bf9c0e437f9885597ba53dd47555a7751b8ba88fdad3cac9f" gracePeriod=30 Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.480054 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.500400 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/abd1fde8-57bd-4248-a061-6ddb436501c2-kube-api-access-vdnzt" (OuterVolumeSpecName: "kube-api-access-vdnzt") pod "abd1fde8-57bd-4248-a061-6ddb436501c2" (UID: "abd1fde8-57bd-4248-a061-6ddb436501c2"). InnerVolumeSpecName "kube-api-access-vdnzt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.513489 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/openstack-galera-0"] Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.515667 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage20-crc" (OuterVolumeSpecName: "mysql-db") pod "abd1fde8-57bd-4248-a061-6ddb436501c2" (UID: "abd1fde8-57bd-4248-a061-6ddb436501c2"). InnerVolumeSpecName "local-storage20-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.547593 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-db-create-vfppk"] Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.548118 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1481b06c-22ef-4b33-b49c-2c5d6903cbee-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1481b06c-22ef-4b33-b49c-2c5d6903cbee" (UID: "1481b06c-22ef-4b33-b49c-2c5d6903cbee"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.554230 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-62mvs\" (UniqueName: \"kubernetes.io/projected/71795b1b-e445-456a-aa68-e410bdaccc73-kube-api-access-62mvs\") pod \"keystone-1afa-account-create-update-tmdn7\" (UID: \"71795b1b-e445-456a-aa68-e410bdaccc73\") " pod="openstack-kuttl-tests/keystone-1afa-account-create-update-tmdn7" Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.555618 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/71795b1b-e445-456a-aa68-e410bdaccc73-operator-scripts\") pod \"keystone-1afa-account-create-update-tmdn7\" (UID: \"71795b1b-e445-456a-aa68-e410bdaccc73\") " pod="openstack-kuttl-tests/keystone-1afa-account-create-update-tmdn7" Jan 20 17:58:08 crc kubenswrapper[4558]: E0120 17:58:08.558008 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/openstack-scripts: configmap "openstack-scripts" not found Jan 20 17:58:08 crc kubenswrapper[4558]: E0120 17:58:08.559969 4558 projected.go:194] Error preparing data for projected volume kube-api-access-62mvs for pod openstack-kuttl-tests/keystone-1afa-account-create-update-tmdn7: failed to fetch token: serviceaccounts "galera-openstack" not found Jan 20 17:58:08 crc kubenswrapper[4558]: E0120 17:58:08.566784 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/71795b1b-e445-456a-aa68-e410bdaccc73-operator-scripts podName:71795b1b-e445-456a-aa68-e410bdaccc73 nodeName:}" failed. No retries permitted until 2026-01-20 17:58:09.066616566 +0000 UTC m=+4582.826954534 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/71795b1b-e445-456a-aa68-e410bdaccc73-operator-scripts") pod "keystone-1afa-account-create-update-tmdn7" (UID: "71795b1b-e445-456a-aa68-e410bdaccc73") : configmap "openstack-scripts" not found Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.558762 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1481b06c-22ef-4b33-b49c-2c5d6903cbee-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.566886 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage20-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage20-crc\") on node \"crc\" " Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.559835 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-1afa-account-create-update-tmdn7"] Jan 20 17:58:08 crc kubenswrapper[4558]: E0120 17:58:08.567001 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/71795b1b-e445-456a-aa68-e410bdaccc73-kube-api-access-62mvs podName:71795b1b-e445-456a-aa68-e410bdaccc73 nodeName:}" failed. No retries permitted until 2026-01-20 17:58:09.066897093 +0000 UTC m=+4582.827235061 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-62mvs" (UniqueName: "kubernetes.io/projected/71795b1b-e445-456a-aa68-e410bdaccc73-kube-api-access-62mvs") pod "keystone-1afa-account-create-update-tmdn7" (UID: "71795b1b-e445-456a-aa68-e410bdaccc73") : failed to fetch token: serviceaccounts "galera-openstack" not found Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.568301 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/abd1fde8-57bd-4248-a061-6ddb436501c2-config-data-default\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.568317 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vdnzt\" (UniqueName: \"kubernetes.io/projected/abd1fde8-57bd-4248-a061-6ddb436501c2-kube-api-access-vdnzt\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.568330 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/abd1fde8-57bd-4248-a061-6ddb436501c2-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.568633 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/abd1fde8-57bd-4248-a061-6ddb436501c2-config-data-generated\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:08 crc kubenswrapper[4558]: E0120 17:58:08.568913 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[kube-api-access-62mvs operator-scripts], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack-kuttl-tests/keystone-1afa-account-create-update-tmdn7" podUID="71795b1b-e445-456a-aa68-e410bdaccc73" Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.622551 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/abd1fde8-57bd-4248-a061-6ddb436501c2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "abd1fde8-57bd-4248-a061-6ddb436501c2" (UID: "abd1fde8-57bd-4248-a061-6ddb436501c2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.623109 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1481b06c-22ef-4b33-b49c-2c5d6903cbee-config-data" (OuterVolumeSpecName: "config-data") pod "1481b06c-22ef-4b33-b49c-2c5d6903cbee" (UID: "1481b06c-22ef-4b33-b49c-2c5d6903cbee"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.631584 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage20-crc" (UniqueName: "kubernetes.io/local-volume/local-storage20-crc") on node "crc" Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.633410 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/abd1fde8-57bd-4248-a061-6ddb436501c2-galera-tls-certs" (OuterVolumeSpecName: "galera-tls-certs") pod "abd1fde8-57bd-4248-a061-6ddb436501c2" (UID: "abd1fde8-57bd-4248-a061-6ddb436501c2"). InnerVolumeSpecName "galera-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.663493 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="17f6b80e-1961-4c44-a979-9c23bdd59837" path="/var/lib/kubelet/pods/17f6b80e-1961-4c44-a979-9c23bdd59837/volumes" Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.664112 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1e64325d-9808-497f-8889-6658129c5da8" path="/var/lib/kubelet/pods/1e64325d-9808-497f-8889-6658129c5da8/volumes" Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.664757 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="27e67faf-b924-4c16-b22b-3297bb62a925" path="/var/lib/kubelet/pods/27e67faf-b924-4c16-b22b-3297bb62a925/volumes" Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.666048 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="29fef75f-d977-4359-bb82-7fe5807625cc" path="/var/lib/kubelet/pods/29fef75f-d977-4359-bb82-7fe5807625cc/volumes" Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.666594 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="30ba9eb2-c009-491e-9b16-a1ce65ca3d98" path="/var/lib/kubelet/pods/30ba9eb2-c009-491e-9b16-a1ce65ca3d98/volumes" Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.667500 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3834281a-b947-44ad-9390-a2057e7e902d" path="/var/lib/kubelet/pods/3834281a-b947-44ad-9390-a2057e7e902d/volumes" Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.668539 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57d5060e-37c3-45fb-8eb3-14303daa1751" path="/var/lib/kubelet/pods/57d5060e-37c3-45fb-8eb3-14303daa1751/volumes" Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.669731 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8a9fc705-3a10-4e6d-8e0a-c1c8dfcb8bc5" path="/var/lib/kubelet/pods/8a9fc705-3a10-4e6d-8e0a-c1c8dfcb8bc5/volumes" Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.670434 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d739bf90-bc99-4f8a-870e-98026ba6a53c" path="/var/lib/kubelet/pods/d739bf90-bc99-4f8a-870e-98026ba6a53c/volumes" Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.670906 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage20-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage20-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.671195 4558 reconciler_common.go:293] "Volume detached for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/abd1fde8-57bd-4248-a061-6ddb436501c2-galera-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.671266 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1481b06c-22ef-4b33-b49c-2c5d6903cbee-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.671322 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/abd1fde8-57bd-4248-a061-6ddb436501c2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.671055 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e28cd4f4-bcf1-4a99-8161-5675a4dc6cb9" path="/var/lib/kubelet/pods/e28cd4f4-bcf1-4a99-8161-5675a4dc6cb9/volumes" Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.672742 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/keystone-db-create-vfppk"] Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.673451 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-x75ng"] Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.673598 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-94fd-account-create-update-wkrmc"] Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.673664 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/glance-94fd-account-create-update-wkrmc"] Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.676062 4558 scope.go:117] "RemoveContainer" containerID="f83e1e941e2ebb680c9bcf90480fb830a1ce1ccd57989006602331fa43a28a91" Jan 20 17:58:08 crc kubenswrapper[4558]: E0120 17:58:08.676469 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f83e1e941e2ebb680c9bcf90480fb830a1ce1ccd57989006602331fa43a28a91\": container with ID starting with f83e1e941e2ebb680c9bcf90480fb830a1ce1ccd57989006602331fa43a28a91 not found: ID does not exist" containerID="f83e1e941e2ebb680c9bcf90480fb830a1ce1ccd57989006602331fa43a28a91" Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.676503 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f83e1e941e2ebb680c9bcf90480fb830a1ce1ccd57989006602331fa43a28a91"} err="failed to get container status \"f83e1e941e2ebb680c9bcf90480fb830a1ce1ccd57989006602331fa43a28a91\": rpc error: code = NotFound desc = could not find container \"f83e1e941e2ebb680c9bcf90480fb830a1ce1ccd57989006602331fa43a28a91\": container with ID starting with f83e1e941e2ebb680c9bcf90480fb830a1ce1ccd57989006602331fa43a28a91 not found: ID does not exist" Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.676526 4558 scope.go:117] "RemoveContainer" containerID="b7311156a63404e1bd2f45a67f9cea82ec849d5cf981a05cf6aee748ad8c02bd" Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.783635 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/openstack-galera-0" podUID="94680f2e-23c0-41aa-a7ff-0aadfcfcc5e6" containerName="galera" containerID="cri-o://7444755b1efeb70ce9df8e374dcb007a995d0c3ace0bc1c4539e47db0dbb9408" gracePeriod=30 Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.855997 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-fde5-account-create-update-zmxrm" Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.857316 4558 scope.go:117] "RemoveContainer" containerID="243bfc24492680f9bb1aba6fa6319099535e5f0e5e0b4e27d224151dc38f4ee3" Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.880909 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.904295 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell1-novncproxy-0"] Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.916289 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/placement-c24c-account-create-update-mxf4v"] Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.917879 4558 scope.go:117] "RemoveContainer" containerID="1fe99cb66e9016b46286062b2b8309256638fd5cb58f35b387cebb419822b3bc" Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.922797 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/placement-c24c-account-create-update-mxf4v"] Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.930586 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/openstack-cell1-galera-0"] Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.937287 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/openstack-cell1-galera-0"] Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.949780 4558 scope.go:117] "RemoveContainer" containerID="1fe99cb66e9016b46286062b2b8309256638fd5cb58f35b387cebb419822b3bc" Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.949940 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:58:08 crc kubenswrapper[4558]: E0120 17:58:08.951336 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1fe99cb66e9016b46286062b2b8309256638fd5cb58f35b387cebb419822b3bc\": container with ID starting with 1fe99cb66e9016b46286062b2b8309256638fd5cb58f35b387cebb419822b3bc not found: ID does not exist" containerID="1fe99cb66e9016b46286062b2b8309256638fd5cb58f35b387cebb419822b3bc" Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.951366 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1fe99cb66e9016b46286062b2b8309256638fd5cb58f35b387cebb419822b3bc"} err="failed to get container status \"1fe99cb66e9016b46286062b2b8309256638fd5cb58f35b387cebb419822b3bc\": rpc error: code = NotFound desc = could not find container \"1fe99cb66e9016b46286062b2b8309256638fd5cb58f35b387cebb419822b3bc\": container with ID starting with 1fe99cb66e9016b46286062b2b8309256638fd5cb58f35b387cebb419822b3bc not found: ID does not exist" Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.951493 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/cinder-scheduler-0"] Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.965993 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/placement-7c7cc56d94-qptzl" podUID="50829e14-401d-4958-9ef7-e2a2ef1f4b32" containerName="placement-log" probeResult="failure" output="Get \"https://10.217.0.121:8778/\": read tcp 10.217.0.2:35258->10.217.0.121:8778: read: connection reset by peer" Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.966228 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/placement-7c7cc56d94-qptzl" podUID="50829e14-401d-4958-9ef7-e2a2ef1f4b32" containerName="placement-api" probeResult="failure" output="Get \"https://10.217.0.121:8778/\": read tcp 10.217.0.2:35270->10.217.0.121:8778: read: connection reset by peer" Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.980820 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/96cbceb8-1912-4174-8d93-22ab4aa1b152-operator-scripts\") pod \"96cbceb8-1912-4174-8d93-22ab4aa1b152\" (UID: \"96cbceb8-1912-4174-8d93-22ab4aa1b152\") " Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.980860 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-77n4n\" (UniqueName: \"kubernetes.io/projected/96cbceb8-1912-4174-8d93-22ab4aa1b152-kube-api-access-77n4n\") pod \"96cbceb8-1912-4174-8d93-22ab4aa1b152\" (UID: \"96cbceb8-1912-4174-8d93-22ab4aa1b152\") " Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.982199 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/96cbceb8-1912-4174-8d93-22ab4aa1b152-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "96cbceb8-1912-4174-8d93-22ab4aa1b152" (UID: "96cbceb8-1912-4174-8d93-22ab4aa1b152"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.984425 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/96cbceb8-1912-4174-8d93-22ab4aa1b152-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:08 crc kubenswrapper[4558]: I0120 17:58:08.996639 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96cbceb8-1912-4174-8d93-22ab4aa1b152-kube-api-access-77n4n" (OuterVolumeSpecName: "kube-api-access-77n4n") pod "96cbceb8-1912-4174-8d93-22ab4aa1b152" (UID: "96cbceb8-1912-4174-8d93-22ab4aa1b152"). InnerVolumeSpecName "kube-api-access-77n4n". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.057602 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-e4c5-account-create-update-qbwc2" Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.084419 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-5191-account-create-update-cznq6" Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.086189 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/71795b1b-e445-456a-aa68-e410bdaccc73-operator-scripts\") pod \"keystone-1afa-account-create-update-tmdn7\" (UID: \"71795b1b-e445-456a-aa68-e410bdaccc73\") " pod="openstack-kuttl-tests/keystone-1afa-account-create-update-tmdn7" Jan 20 17:58:09 crc kubenswrapper[4558]: E0120 17:58:09.086269 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/openstack-scripts: configmap "openstack-scripts" not found Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.086315 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-62mvs\" (UniqueName: \"kubernetes.io/projected/71795b1b-e445-456a-aa68-e410bdaccc73-kube-api-access-62mvs\") pod \"keystone-1afa-account-create-update-tmdn7\" (UID: \"71795b1b-e445-456a-aa68-e410bdaccc73\") " pod="openstack-kuttl-tests/keystone-1afa-account-create-update-tmdn7" Jan 20 17:58:09 crc kubenswrapper[4558]: E0120 17:58:09.086343 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/71795b1b-e445-456a-aa68-e410bdaccc73-operator-scripts podName:71795b1b-e445-456a-aa68-e410bdaccc73 nodeName:}" failed. No retries permitted until 2026-01-20 17:58:10.086325964 +0000 UTC m=+4583.846663931 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/71795b1b-e445-456a-aa68-e410bdaccc73-operator-scripts") pod "keystone-1afa-account-create-update-tmdn7" (UID: "71795b1b-e445-456a-aa68-e410bdaccc73") : configmap "openstack-scripts" not found Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.086433 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-77n4n\" (UniqueName: \"kubernetes.io/projected/96cbceb8-1912-4174-8d93-22ab4aa1b152-kube-api-access-77n4n\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.086691 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-42d5-account-create-update-6wcjv" Jan 20 17:58:09 crc kubenswrapper[4558]: E0120 17:58:09.092687 4558 projected.go:194] Error preparing data for projected volume kube-api-access-62mvs for pod openstack-kuttl-tests/keystone-1afa-account-create-update-tmdn7: failed to fetch token: serviceaccounts "galera-openstack" not found Jan 20 17:58:09 crc kubenswrapper[4558]: E0120 17:58:09.092817 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/71795b1b-e445-456a-aa68-e410bdaccc73-kube-api-access-62mvs podName:71795b1b-e445-456a-aa68-e410bdaccc73 nodeName:}" failed. No retries permitted until 2026-01-20 17:58:10.092801727 +0000 UTC m=+4583.853139694 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-62mvs" (UniqueName: "kubernetes.io/projected/71795b1b-e445-456a-aa68-e410bdaccc73-kube-api-access-62mvs") pod "keystone-1afa-account-create-update-tmdn7" (UID: "71795b1b-e445-456a-aa68-e410bdaccc73") : failed to fetch token: serviceaccounts "galera-openstack" not found Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.187360 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bczvc\" (UniqueName: \"kubernetes.io/projected/5dfca7b5-dd95-4da3-a088-de4d48ea8c10-kube-api-access-bczvc\") pod \"5dfca7b5-dd95-4da3-a088-de4d48ea8c10\" (UID: \"5dfca7b5-dd95-4da3-a088-de4d48ea8c10\") " Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.187411 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzgmg\" (UniqueName: \"kubernetes.io/projected/c9479a19-1b1e-4311-97f5-8922e8602b18-kube-api-access-nzgmg\") pod \"c9479a19-1b1e-4311-97f5-8922e8602b18\" (UID: \"c9479a19-1b1e-4311-97f5-8922e8602b18\") " Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.187436 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rgmnl\" (UniqueName: \"kubernetes.io/projected/8c1a8bdd-f084-4e64-a651-91e76ad6f7d1-kube-api-access-rgmnl\") pod \"8c1a8bdd-f084-4e64-a651-91e76ad6f7d1\" (UID: \"8c1a8bdd-f084-4e64-a651-91e76ad6f7d1\") " Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.187544 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c9479a19-1b1e-4311-97f5-8922e8602b18-operator-scripts\") pod \"c9479a19-1b1e-4311-97f5-8922e8602b18\" (UID: \"c9479a19-1b1e-4311-97f5-8922e8602b18\") " Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.187599 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5dfca7b5-dd95-4da3-a088-de4d48ea8c10-operator-scripts\") pod \"5dfca7b5-dd95-4da3-a088-de4d48ea8c10\" (UID: \"5dfca7b5-dd95-4da3-a088-de4d48ea8c10\") " Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.187632 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8c1a8bdd-f084-4e64-a651-91e76ad6f7d1-operator-scripts\") pod \"8c1a8bdd-f084-4e64-a651-91e76ad6f7d1\" (UID: \"8c1a8bdd-f084-4e64-a651-91e76ad6f7d1\") " Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.188459 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8c1a8bdd-f084-4e64-a651-91e76ad6f7d1-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "8c1a8bdd-f084-4e64-a651-91e76ad6f7d1" (UID: "8c1a8bdd-f084-4e64-a651-91e76ad6f7d1"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.188845 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c9479a19-1b1e-4311-97f5-8922e8602b18-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "c9479a19-1b1e-4311-97f5-8922e8602b18" (UID: "c9479a19-1b1e-4311-97f5-8922e8602b18"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.190475 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5dfca7b5-dd95-4da3-a088-de4d48ea8c10-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "5dfca7b5-dd95-4da3-a088-de4d48ea8c10" (UID: "5dfca7b5-dd95-4da3-a088-de4d48ea8c10"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.192961 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8c1a8bdd-f084-4e64-a651-91e76ad6f7d1-kube-api-access-rgmnl" (OuterVolumeSpecName: "kube-api-access-rgmnl") pod "8c1a8bdd-f084-4e64-a651-91e76ad6f7d1" (UID: "8c1a8bdd-f084-4e64-a651-91e76ad6f7d1"). InnerVolumeSpecName "kube-api-access-rgmnl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.193380 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5dfca7b5-dd95-4da3-a088-de4d48ea8c10-kube-api-access-bczvc" (OuterVolumeSpecName: "kube-api-access-bczvc") pod "5dfca7b5-dd95-4da3-a088-de4d48ea8c10" (UID: "5dfca7b5-dd95-4da3-a088-de4d48ea8c10"). InnerVolumeSpecName "kube-api-access-bczvc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.196313 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c9479a19-1b1e-4311-97f5-8922e8602b18-kube-api-access-nzgmg" (OuterVolumeSpecName: "kube-api-access-nzgmg") pod "c9479a19-1b1e-4311-97f5-8922e8602b18" (UID: "c9479a19-1b1e-4311-97f5-8922e8602b18"). InnerVolumeSpecName "kube-api-access-nzgmg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.276538 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-5b5bd9fb5d-vdljd" Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.289761 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bczvc\" (UniqueName: \"kubernetes.io/projected/5dfca7b5-dd95-4da3-a088-de4d48ea8c10-kube-api-access-bczvc\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.289789 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzgmg\" (UniqueName: \"kubernetes.io/projected/c9479a19-1b1e-4311-97f5-8922e8602b18-kube-api-access-nzgmg\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.289799 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rgmnl\" (UniqueName: \"kubernetes.io/projected/8c1a8bdd-f084-4e64-a651-91e76ad6f7d1-kube-api-access-rgmnl\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.289809 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c9479a19-1b1e-4311-97f5-8922e8602b18-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.289817 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5dfca7b5-dd95-4da3-a088-de4d48ea8c10-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.289825 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8c1a8bdd-f084-4e64-a651-91e76ad6f7d1-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:09 crc kubenswrapper[4558]: E0120 17:58:09.289904 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-keystone-internal-svc: secret "cert-keystone-internal-svc" not found Jan 20 17:58:09 crc kubenswrapper[4558]: E0120 17:58:09.289946 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/747b1096-c84a-43e1-930e-9f887a42c524-internal-tls-certs podName:747b1096-c84a-43e1-930e-9f887a42c524 nodeName:}" failed. No retries permitted until 2026-01-20 17:58:11.289932391 +0000 UTC m=+4585.050270359 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "internal-tls-certs" (UniqueName: "kubernetes.io/secret/747b1096-c84a-43e1-930e-9f887a42c524-internal-tls-certs") pod "keystone-747b5668bc-jgcd6" (UID: "747b1096-c84a-43e1-930e-9f887a42c524") : secret "cert-keystone-internal-svc" not found Jan 20 17:58:09 crc kubenswrapper[4558]: E0120 17:58:09.290269 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-keystone-public-svc: secret "cert-keystone-public-svc" not found Jan 20 17:58:09 crc kubenswrapper[4558]: E0120 17:58:09.290296 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/747b1096-c84a-43e1-930e-9f887a42c524-public-tls-certs podName:747b1096-c84a-43e1-930e-9f887a42c524 nodeName:}" failed. No retries permitted until 2026-01-20 17:58:11.290288962 +0000 UTC m=+4585.050626929 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "public-tls-certs" (UniqueName: "kubernetes.io/secret/747b1096-c84a-43e1-930e-9f887a42c524-public-tls-certs") pod "keystone-747b5668bc-jgcd6" (UID: "747b1096-c84a-43e1-930e-9f887a42c524") : secret "cert-keystone-public-svc" not found Jan 20 17:58:09 crc kubenswrapper[4558]: E0120 17:58:09.290327 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/combined-ca-bundle: secret "combined-ca-bundle" not found Jan 20 17:58:09 crc kubenswrapper[4558]: E0120 17:58:09.290345 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/747b1096-c84a-43e1-930e-9f887a42c524-combined-ca-bundle podName:747b1096-c84a-43e1-930e-9f887a42c524 nodeName:}" failed. No retries permitted until 2026-01-20 17:58:11.290339456 +0000 UTC m=+4585.050677423 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/747b1096-c84a-43e1-930e-9f887a42c524-combined-ca-bundle") pod "keystone-747b5668bc-jgcd6" (UID: "747b1096-c84a-43e1-930e-9f887a42c524") : secret "combined-ca-bundle" not found Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.294682 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-keystone-listener-58949d94f4-gxkkc" Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.300887 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-proxy-77d4fcb95d-7jsvg" Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.391151 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/45d51516-cc3a-473c-bb88-e82d290d36ca-httpd-config\") pod \"45d51516-cc3a-473c-bb88-e82d290d36ca\" (UID: \"45d51516-cc3a-473c-bb88-e82d290d36ca\") " Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.391653 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rhxbj\" (UniqueName: \"kubernetes.io/projected/45d51516-cc3a-473c-bb88-e82d290d36ca-kube-api-access-rhxbj\") pod \"45d51516-cc3a-473c-bb88-e82d290d36ca\" (UID: \"45d51516-cc3a-473c-bb88-e82d290d36ca\") " Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.391771 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2c80b87d-f7d5-46dd-a6d9-bba3e82e405b-public-tls-certs\") pod \"2c80b87d-f7d5-46dd-a6d9-bba3e82e405b\" (UID: \"2c80b87d-f7d5-46dd-a6d9-bba3e82e405b\") " Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.391887 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dgrmm\" (UniqueName: \"kubernetes.io/projected/2c80b87d-f7d5-46dd-a6d9-bba3e82e405b-kube-api-access-dgrmm\") pod \"2c80b87d-f7d5-46dd-a6d9-bba3e82e405b\" (UID: \"2c80b87d-f7d5-46dd-a6d9-bba3e82e405b\") " Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.391992 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2c80b87d-f7d5-46dd-a6d9-bba3e82e405b-combined-ca-bundle\") pod \"2c80b87d-f7d5-46dd-a6d9-bba3e82e405b\" (UID: \"2c80b87d-f7d5-46dd-a6d9-bba3e82e405b\") " Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.392069 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/45d51516-cc3a-473c-bb88-e82d290d36ca-config\") pod \"45d51516-cc3a-473c-bb88-e82d290d36ca\" (UID: \"45d51516-cc3a-473c-bb88-e82d290d36ca\") " Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.392147 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2bqq5\" (UniqueName: \"kubernetes.io/projected/a66c352e-8b7e-4f9e-9447-a2bf8a7a5c91-kube-api-access-2bqq5\") pod \"a66c352e-8b7e-4f9e-9447-a2bf8a7a5c91\" (UID: \"a66c352e-8b7e-4f9e-9447-a2bf8a7a5c91\") " Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.392244 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2c80b87d-f7d5-46dd-a6d9-bba3e82e405b-run-httpd\") pod \"2c80b87d-f7d5-46dd-a6d9-bba3e82e405b\" (UID: \"2c80b87d-f7d5-46dd-a6d9-bba3e82e405b\") " Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.392314 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/45d51516-cc3a-473c-bb88-e82d290d36ca-internal-tls-certs\") pod \"45d51516-cc3a-473c-bb88-e82d290d36ca\" (UID: \"45d51516-cc3a-473c-bb88-e82d290d36ca\") " Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.392402 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a66c352e-8b7e-4f9e-9447-a2bf8a7a5c91-config-data\") pod \"a66c352e-8b7e-4f9e-9447-a2bf8a7a5c91\" (UID: \"a66c352e-8b7e-4f9e-9447-a2bf8a7a5c91\") " Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.392474 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a66c352e-8b7e-4f9e-9447-a2bf8a7a5c91-combined-ca-bundle\") pod \"a66c352e-8b7e-4f9e-9447-a2bf8a7a5c91\" (UID: \"a66c352e-8b7e-4f9e-9447-a2bf8a7a5c91\") " Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.392546 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2c80b87d-f7d5-46dd-a6d9-bba3e82e405b-log-httpd\") pod \"2c80b87d-f7d5-46dd-a6d9-bba3e82e405b\" (UID: \"2c80b87d-f7d5-46dd-a6d9-bba3e82e405b\") " Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.392971 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/45d51516-cc3a-473c-bb88-e82d290d36ca-public-tls-certs\") pod \"45d51516-cc3a-473c-bb88-e82d290d36ca\" (UID: \"45d51516-cc3a-473c-bb88-e82d290d36ca\") " Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.393133 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2c80b87d-f7d5-46dd-a6d9-bba3e82e405b-config-data\") pod \"2c80b87d-f7d5-46dd-a6d9-bba3e82e405b\" (UID: \"2c80b87d-f7d5-46dd-a6d9-bba3e82e405b\") " Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.393228 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/2c80b87d-f7d5-46dd-a6d9-bba3e82e405b-etc-swift\") pod \"2c80b87d-f7d5-46dd-a6d9-bba3e82e405b\" (UID: \"2c80b87d-f7d5-46dd-a6d9-bba3e82e405b\") " Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.393330 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/45d51516-cc3a-473c-bb88-e82d290d36ca-combined-ca-bundle\") pod \"45d51516-cc3a-473c-bb88-e82d290d36ca\" (UID: \"45d51516-cc3a-473c-bb88-e82d290d36ca\") " Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.393408 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2c80b87d-f7d5-46dd-a6d9-bba3e82e405b-internal-tls-certs\") pod \"2c80b87d-f7d5-46dd-a6d9-bba3e82e405b\" (UID: \"2c80b87d-f7d5-46dd-a6d9-bba3e82e405b\") " Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.393477 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/45d51516-cc3a-473c-bb88-e82d290d36ca-ovndb-tls-certs\") pod \"45d51516-cc3a-473c-bb88-e82d290d36ca\" (UID: \"45d51516-cc3a-473c-bb88-e82d290d36ca\") " Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.393539 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a66c352e-8b7e-4f9e-9447-a2bf8a7a5c91-config-data-custom\") pod \"a66c352e-8b7e-4f9e-9447-a2bf8a7a5c91\" (UID: \"a66c352e-8b7e-4f9e-9447-a2bf8a7a5c91\") " Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.393627 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a66c352e-8b7e-4f9e-9447-a2bf8a7a5c91-logs\") pod \"a66c352e-8b7e-4f9e-9447-a2bf8a7a5c91\" (UID: \"a66c352e-8b7e-4f9e-9447-a2bf8a7a5c91\") " Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.394428 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2c80b87d-f7d5-46dd-a6d9-bba3e82e405b-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "2c80b87d-f7d5-46dd-a6d9-bba3e82e405b" (UID: "2c80b87d-f7d5-46dd-a6d9-bba3e82e405b"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.395699 4558 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2c80b87d-f7d5-46dd-a6d9-bba3e82e405b-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.397433 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a66c352e-8b7e-4f9e-9447-a2bf8a7a5c91-logs" (OuterVolumeSpecName: "logs") pod "a66c352e-8b7e-4f9e-9447-a2bf8a7a5c91" (UID: "a66c352e-8b7e-4f9e-9447-a2bf8a7a5c91"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.397499 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2c80b87d-f7d5-46dd-a6d9-bba3e82e405b-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "2c80b87d-f7d5-46dd-a6d9-bba3e82e405b" (UID: "2c80b87d-f7d5-46dd-a6d9-bba3e82e405b"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.491520 4558 generic.go:334] "Generic (PLEG): container finished" podID="a66c352e-8b7e-4f9e-9447-a2bf8a7a5c91" containerID="210a0812578a1d65ec60d4f022b3ab7e40e130626b3b2c29f678571f3bef403d" exitCode=0 Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.491587 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-keystone-listener-58949d94f4-gxkkc" event={"ID":"a66c352e-8b7e-4f9e-9447-a2bf8a7a5c91","Type":"ContainerDied","Data":"210a0812578a1d65ec60d4f022b3ab7e40e130626b3b2c29f678571f3bef403d"} Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.491622 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-keystone-listener-58949d94f4-gxkkc" event={"ID":"a66c352e-8b7e-4f9e-9447-a2bf8a7a5c91","Type":"ContainerDied","Data":"50a54226f35b4acc596685311008253d6eb55cfd5e9b3ba67a818581a389d9a2"} Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.491641 4558 scope.go:117] "RemoveContainer" containerID="210a0812578a1d65ec60d4f022b3ab7e40e130626b3b2c29f678571f3bef403d" Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.491739 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-keystone-listener-58949d94f4-gxkkc" Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.497868 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a66c352e-8b7e-4f9e-9447-a2bf8a7a5c91-kube-api-access-2bqq5" (OuterVolumeSpecName: "kube-api-access-2bqq5") pod "a66c352e-8b7e-4f9e-9447-a2bf8a7a5c91" (UID: "a66c352e-8b7e-4f9e-9447-a2bf8a7a5c91"). InnerVolumeSpecName "kube-api-access-2bqq5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.497911 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-proxy-77d4fcb95d-7jsvg" Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.497965 4558 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/2c80b87d-f7d5-46dd-a6d9-bba3e82e405b-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.498114 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-proxy-77d4fcb95d-7jsvg" event={"ID":"2c80b87d-f7d5-46dd-a6d9-bba3e82e405b","Type":"ContainerDied","Data":"a6dd9f88121dd9a74ad7da43d7ab6d29f99cd801b82eb031635bd0e0e30304e3"} Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.499104 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a66c352e-8b7e-4f9e-9447-a2bf8a7a5c91-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.503144 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a66c352e-8b7e-4f9e-9447-a2bf8a7a5c91-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "a66c352e-8b7e-4f9e-9447-a2bf8a7a5c91" (UID: "a66c352e-8b7e-4f9e-9447-a2bf8a7a5c91"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.504382 4558 generic.go:334] "Generic (PLEG): container finished" podID="2c80b87d-f7d5-46dd-a6d9-bba3e82e405b" containerID="a6dd9f88121dd9a74ad7da43d7ab6d29f99cd801b82eb031635bd0e0e30304e3" exitCode=0 Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.503459 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2c80b87d-f7d5-46dd-a6d9-bba3e82e405b-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "2c80b87d-f7d5-46dd-a6d9-bba3e82e405b" (UID: "2c80b87d-f7d5-46dd-a6d9-bba3e82e405b"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.504217 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2c80b87d-f7d5-46dd-a6d9-bba3e82e405b-kube-api-access-dgrmm" (OuterVolumeSpecName: "kube-api-access-dgrmm") pod "2c80b87d-f7d5-46dd-a6d9-bba3e82e405b" (UID: "2c80b87d-f7d5-46dd-a6d9-bba3e82e405b"). InnerVolumeSpecName "kube-api-access-dgrmm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.507005 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/45d51516-cc3a-473c-bb88-e82d290d36ca-kube-api-access-rhxbj" (OuterVolumeSpecName: "kube-api-access-rhxbj") pod "45d51516-cc3a-473c-bb88-e82d290d36ca" (UID: "45d51516-cc3a-473c-bb88-e82d290d36ca"). InnerVolumeSpecName "kube-api-access-rhxbj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.510471 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/45d51516-cc3a-473c-bb88-e82d290d36ca-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "45d51516-cc3a-473c-bb88-e82d290d36ca" (UID: "45d51516-cc3a-473c-bb88-e82d290d36ca"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.511103 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-proxy-77d4fcb95d-7jsvg" event={"ID":"2c80b87d-f7d5-46dd-a6d9-bba3e82e405b","Type":"ContainerDied","Data":"47a75f1a8d8f8184c94d7b506aa4c769c07364acea356bd91f6ce11c9f175376"} Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.513149 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-fde5-account-create-update-zmxrm" event={"ID":"96cbceb8-1912-4174-8d93-22ab4aa1b152","Type":"ContainerDied","Data":"28ff192c11ce555aa1f1e05d999db27db9b987b9245a1db6ec7dbee05afcdcdd"} Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.513356 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-fde5-account-create-update-zmxrm" Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.516780 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/cinder-api-0" podUID="0b0c3280-9b26-4db4-ab16-68016b055a1f" containerName="cinder-api" probeResult="failure" output="Get \"https://10.217.0.128:8776/healthcheck\": read tcp 10.217.0.2:48048->10.217.0.128:8776: read: connection reset by peer" Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.518649 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-5191-account-create-update-cznq6" Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.518648 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-5191-account-create-update-cznq6" event={"ID":"8c1a8bdd-f084-4e64-a651-91e76ad6f7d1","Type":"ContainerDied","Data":"9596b87dc4ac15dd166842b4198c0c0596e7023fbee864ced700714abb6929e7"} Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.522359 4558 generic.go:334] "Generic (PLEG): container finished" podID="8f5c27dd-2365-49ee-b4bd-b46e952199f7" containerID="bf44882c187617ca00e2141f2d59bb3077ec0bd79f8fd782d703007b103558bc" exitCode=0 Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.522422 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"8f5c27dd-2365-49ee-b4bd-b46e952199f7","Type":"ContainerDied","Data":"bf44882c187617ca00e2141f2d59bb3077ec0bd79f8fd782d703007b103558bc"} Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.528757 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-l7p2w" event={"ID":"314a7e85-558d-49d5-b281-52342e3e4c01","Type":"ContainerStarted","Data":"fc055d0cca31052c4ea70b2f03c41fa7f8864175aa9928a543f46fd5c571ca9b"} Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.528818 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-l7p2w" Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.536615 4558 generic.go:334] "Generic (PLEG): container finished" podID="75a8c03c-5b0a-4144-b1a8-293ce0d50739" containerID="cdef0a456ecd1cc7d8ef71a2f399391ccf5a26e7ba80fce2ee22ed4cc27a792a" exitCode=1 Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.536652 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/root-account-create-update-x75ng" event={"ID":"75a8c03c-5b0a-4144-b1a8-293ce0d50739","Type":"ContainerDied","Data":"cdef0a456ecd1cc7d8ef71a2f399391ccf5a26e7ba80fce2ee22ed4cc27a792a"} Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.536957 4558 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="openstack-kuttl-tests/root-account-create-update-x75ng" secret="" err="secret \"galera-openstack-dockercfg-nqwzz\" not found" Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.537011 4558 scope.go:117] "RemoveContainer" containerID="cdef0a456ecd1cc7d8ef71a2f399391ccf5a26e7ba80fce2ee22ed4cc27a792a" Jan 20 17:58:09 crc kubenswrapper[4558]: E0120 17:58:09.537275 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-create-update\" with CrashLoopBackOff: \"back-off 10s restarting failed container=mariadb-account-create-update pod=root-account-create-update-x75ng_openstack-kuttl-tests(75a8c03c-5b0a-4144-b1a8-293ce0d50739)\"" pod="openstack-kuttl-tests/root-account-create-update-x75ng" podUID="75a8c03c-5b0a-4144-b1a8-293ce0d50739" Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.540009 4558 generic.go:334] "Generic (PLEG): container finished" podID="50829e14-401d-4958-9ef7-e2a2ef1f4b32" containerID="f43ceb14bd50b1f492df9772660a94cd37c5a26e2941454f5b5fa401d0851c00" exitCode=0 Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.540063 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-7c7cc56d94-qptzl" event={"ID":"50829e14-401d-4958-9ef7-e2a2ef1f4b32","Type":"ContainerDied","Data":"f43ceb14bd50b1f492df9772660a94cd37c5a26e2941454f5b5fa401d0851c00"} Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.546041 4558 generic.go:334] "Generic (PLEG): container finished" podID="7dcf074e-8444-4bd2-b1f9-143390e96ef8" containerID="4a4fc0e35d400b1717dbd88e9cb076f22b20999b3e1867032e08446fa50f658e" exitCode=0 Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.546086 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"7dcf074e-8444-4bd2-b1f9-143390e96ef8","Type":"ContainerDied","Data":"4a4fc0e35d400b1717dbd88e9cb076f22b20999b3e1867032e08446fa50f658e"} Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.547441 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-l7p2w" podStartSLOduration=4.547429769 podStartE2EDuration="4.547429769s" podCreationTimestamp="2026-01-20 17:58:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:58:09.544737028 +0000 UTC m=+4583.305074994" watchObservedRunningTime="2026-01-20 17:58:09.547429769 +0000 UTC m=+4583.307767736" Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.554235 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-42d5-account-create-update-6wcjv" Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.551697 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-42d5-account-create-update-6wcjv" event={"ID":"c9479a19-1b1e-4311-97f5-8922e8602b18","Type":"ContainerDied","Data":"8800283c27e298292eeb77f2cff240f0b8adf5a4492206a13ae1e3dfd046d71d"} Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.563130 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/nova-metadata-0" podUID="c8f11f45-bfce-4989-bfa0-684011f74619" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.163:8775/\": read tcp 10.217.0.2:34620->10.217.0.163:8775: read: connection reset by peer" Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.567260 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/nova-metadata-0" podUID="c8f11f45-bfce-4989-bfa0-684011f74619" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.163:8775/\": read tcp 10.217.0.2:34636->10.217.0.163:8775: read: connection reset by peer" Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.574514 4558 generic.go:334] "Generic (PLEG): container finished" podID="45d51516-cc3a-473c-bb88-e82d290d36ca" containerID="dfd1e35931513664c3c64c46a6882405ae46386429f436dbbda7a5b30ca884e7" exitCode=0 Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.574814 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-5b5bd9fb5d-vdljd" event={"ID":"45d51516-cc3a-473c-bb88-e82d290d36ca","Type":"ContainerDied","Data":"dfd1e35931513664c3c64c46a6882405ae46386429f436dbbda7a5b30ca884e7"} Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.574911 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-5b5bd9fb5d-vdljd" event={"ID":"45d51516-cc3a-473c-bb88-e82d290d36ca","Type":"ContainerDied","Data":"8b60873bef0f992793dd3bbba55205c79d9a815e1ab68a451ba5722f728dc4f2"} Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.578473 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-5b5bd9fb5d-vdljd" Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.594842 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a66c352e-8b7e-4f9e-9447-a2bf8a7a5c91-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a66c352e-8b7e-4f9e-9447-a2bf8a7a5c91" (UID: "a66c352e-8b7e-4f9e-9447-a2bf8a7a5c91"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.599808 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-1afa-account-create-update-tmdn7" Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.601377 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-e4c5-account-create-update-qbwc2" Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.602231 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-e4c5-account-create-update-qbwc2" event={"ID":"5dfca7b5-dd95-4da3-a088-de4d48ea8c10","Type":"ContainerDied","Data":"8ee7bec436f0787f4acda3d0400fa14f5957655a213e44c8e77c009c00f0a2b7"} Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.607959 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/45d51516-cc3a-473c-bb88-e82d290d36ca-config" (OuterVolumeSpecName: "config") pod "45d51516-cc3a-473c-bb88-e82d290d36ca" (UID: "45d51516-cc3a-473c-bb88-e82d290d36ca"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.608060 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2c80b87d-f7d5-46dd-a6d9-bba3e82e405b-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "2c80b87d-f7d5-46dd-a6d9-bba3e82e405b" (UID: "2c80b87d-f7d5-46dd-a6d9-bba3e82e405b"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.614734 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2c80b87d-f7d5-46dd-a6d9-bba3e82e405b-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "2c80b87d-f7d5-46dd-a6d9-bba3e82e405b" (UID: "2c80b87d-f7d5-46dd-a6d9-bba3e82e405b"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.615005 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2c80b87d-f7d5-46dd-a6d9-bba3e82e405b-internal-tls-certs\") pod \"2c80b87d-f7d5-46dd-a6d9-bba3e82e405b\" (UID: \"2c80b87d-f7d5-46dd-a6d9-bba3e82e405b\") " Jan 20 17:58:09 crc kubenswrapper[4558]: W0120 17:58:09.615307 4558 empty_dir.go:500] Warning: Unmount skipped because path does not exist: /var/lib/kubelet/pods/2c80b87d-f7d5-46dd-a6d9-bba3e82e405b/volumes/kubernetes.io~secret/internal-tls-certs Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.615337 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2c80b87d-f7d5-46dd-a6d9-bba3e82e405b-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "2c80b87d-f7d5-46dd-a6d9-bba3e82e405b" (UID: "2c80b87d-f7d5-46dd-a6d9-bba3e82e405b"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.619227 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2c80b87d-f7d5-46dd-a6d9-bba3e82e405b-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.619268 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a66c352e-8b7e-4f9e-9447-a2bf8a7a5c91-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.619288 4558 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/45d51516-cc3a-473c-bb88-e82d290d36ca-httpd-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.619308 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rhxbj\" (UniqueName: \"kubernetes.io/projected/45d51516-cc3a-473c-bb88-e82d290d36ca-kube-api-access-rhxbj\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.619306 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2c80b87d-f7d5-46dd-a6d9-bba3e82e405b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2c80b87d-f7d5-46dd-a6d9-bba3e82e405b" (UID: "2c80b87d-f7d5-46dd-a6d9-bba3e82e405b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.619325 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2c80b87d-f7d5-46dd-a6d9-bba3e82e405b-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.619338 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dgrmm\" (UniqueName: \"kubernetes.io/projected/2c80b87d-f7d5-46dd-a6d9-bba3e82e405b-kube-api-access-dgrmm\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.619356 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/45d51516-cc3a-473c-bb88-e82d290d36ca-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.619370 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2bqq5\" (UniqueName: \"kubernetes.io/projected/a66c352e-8b7e-4f9e-9447-a2bf8a7a5c91-kube-api-access-2bqq5\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.619383 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a66c352e-8b7e-4f9e-9447-a2bf8a7a5c91-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.619395 4558 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/2c80b87d-f7d5-46dd-a6d9-bba3e82e405b-etc-swift\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.622695 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/45d51516-cc3a-473c-bb88-e82d290d36ca-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "45d51516-cc3a-473c-bb88-e82d290d36ca" (UID: "45d51516-cc3a-473c-bb88-e82d290d36ca"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.627406 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a66c352e-8b7e-4f9e-9447-a2bf8a7a5c91-config-data" (OuterVolumeSpecName: "config-data") pod "a66c352e-8b7e-4f9e-9447-a2bf8a7a5c91" (UID: "a66c352e-8b7e-4f9e-9447-a2bf8a7a5c91"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.640371 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2c80b87d-f7d5-46dd-a6d9-bba3e82e405b-config-data" (OuterVolumeSpecName: "config-data") pod "2c80b87d-f7d5-46dd-a6d9-bba3e82e405b" (UID: "2c80b87d-f7d5-46dd-a6d9-bba3e82e405b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.652361 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/45d51516-cc3a-473c-bb88-e82d290d36ca-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "45d51516-cc3a-473c-bb88-e82d290d36ca" (UID: "45d51516-cc3a-473c-bb88-e82d290d36ca"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.657764 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/45d51516-cc3a-473c-bb88-e82d290d36ca-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "45d51516-cc3a-473c-bb88-e82d290d36ca" (UID: "45d51516-cc3a-473c-bb88-e82d290d36ca"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.682376 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/45d51516-cc3a-473c-bb88-e82d290d36ca-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "45d51516-cc3a-473c-bb88-e82d290d36ca" (UID: "45d51516-cc3a-473c-bb88-e82d290d36ca"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:58:09 crc kubenswrapper[4558]: E0120 17:58:09.721350 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/openstack-scripts: configmap "openstack-scripts" not found Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.721995 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2c80b87d-f7d5-46dd-a6d9-bba3e82e405b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.722029 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/45d51516-cc3a-473c-bb88-e82d290d36ca-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.722041 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a66c352e-8b7e-4f9e-9447-a2bf8a7a5c91-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.722087 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/45d51516-cc3a-473c-bb88-e82d290d36ca-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.722111 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2c80b87d-f7d5-46dd-a6d9-bba3e82e405b-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.722125 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/45d51516-cc3a-473c-bb88-e82d290d36ca-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.722136 4558 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/45d51516-cc3a-473c-bb88-e82d290d36ca-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:09 crc kubenswrapper[4558]: E0120 17:58:09.722387 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/75a8c03c-5b0a-4144-b1a8-293ce0d50739-operator-scripts podName:75a8c03c-5b0a-4144-b1a8-293ce0d50739 nodeName:}" failed. No retries permitted until 2026-01-20 17:58:10.222362827 +0000 UTC m=+4583.982700795 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/75a8c03c-5b0a-4144-b1a8-293ce0d50739-operator-scripts") pod "root-account-create-update-x75ng" (UID: "75a8c03c-5b0a-4144-b1a8-293ce0d50739") : configmap "openstack-scripts" not found Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.916341 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-7c7cc56d94-qptzl" Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.936368 4558 scope.go:117] "RemoveContainer" containerID="ca4ecb7fa4f5c23eb30d668f5754a6906c2e344a7fe9c1284b2323148a7db0a4" Jan 20 17:58:09 crc kubenswrapper[4558]: I0120 17:58:09.941817 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-1afa-account-create-update-tmdn7" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.029429 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/50829e14-401d-4958-9ef7-e2a2ef1f4b32-public-tls-certs\") pod \"50829e14-401d-4958-9ef7-e2a2ef1f4b32\" (UID: \"50829e14-401d-4958-9ef7-e2a2ef1f4b32\") " Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.029487 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/50829e14-401d-4958-9ef7-e2a2ef1f4b32-config-data\") pod \"50829e14-401d-4958-9ef7-e2a2ef1f4b32\" (UID: \"50829e14-401d-4958-9ef7-e2a2ef1f4b32\") " Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.029613 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/50829e14-401d-4958-9ef7-e2a2ef1f4b32-logs\") pod \"50829e14-401d-4958-9ef7-e2a2ef1f4b32\" (UID: \"50829e14-401d-4958-9ef7-e2a2ef1f4b32\") " Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.029705 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/50829e14-401d-4958-9ef7-e2a2ef1f4b32-internal-tls-certs\") pod \"50829e14-401d-4958-9ef7-e2a2ef1f4b32\" (UID: \"50829e14-401d-4958-9ef7-e2a2ef1f4b32\") " Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.029820 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50829e14-401d-4958-9ef7-e2a2ef1f4b32-combined-ca-bundle\") pod \"50829e14-401d-4958-9ef7-e2a2ef1f4b32\" (UID: \"50829e14-401d-4958-9ef7-e2a2ef1f4b32\") " Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.029932 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/50829e14-401d-4958-9ef7-e2a2ef1f4b32-scripts\") pod \"50829e14-401d-4958-9ef7-e2a2ef1f4b32\" (UID: \"50829e14-401d-4958-9ef7-e2a2ef1f4b32\") " Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.029962 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wtcfn\" (UniqueName: \"kubernetes.io/projected/50829e14-401d-4958-9ef7-e2a2ef1f4b32-kube-api-access-wtcfn\") pod \"50829e14-401d-4958-9ef7-e2a2ef1f4b32\" (UID: \"50829e14-401d-4958-9ef7-e2a2ef1f4b32\") " Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.030732 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/50829e14-401d-4958-9ef7-e2a2ef1f4b32-logs" (OuterVolumeSpecName: "logs") pod "50829e14-401d-4958-9ef7-e2a2ef1f4b32" (UID: "50829e14-401d-4958-9ef7-e2a2ef1f4b32"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.032862 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/50829e14-401d-4958-9ef7-e2a2ef1f4b32-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.051898 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/50829e14-401d-4958-9ef7-e2a2ef1f4b32-kube-api-access-wtcfn" (OuterVolumeSpecName: "kube-api-access-wtcfn") pod "50829e14-401d-4958-9ef7-e2a2ef1f4b32" (UID: "50829e14-401d-4958-9ef7-e2a2ef1f4b32"). InnerVolumeSpecName "kube-api-access-wtcfn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.055287 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/50829e14-401d-4958-9ef7-e2a2ef1f4b32-scripts" (OuterVolumeSpecName: "scripts") pod "50829e14-401d-4958-9ef7-e2a2ef1f4b32" (UID: "50829e14-401d-4958-9ef7-e2a2ef1f4b32"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.085454 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/50829e14-401d-4958-9ef7-e2a2ef1f4b32-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "50829e14-401d-4958-9ef7-e2a2ef1f4b32" (UID: "50829e14-401d-4958-9ef7-e2a2ef1f4b32"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.125965 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.128540 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-42d5-account-create-update-6wcjv"] Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.131406 4558 scope.go:117] "RemoveContainer" containerID="210a0812578a1d65ec60d4f022b3ab7e40e130626b3b2c29f678571f3bef403d" Jan 20 17:58:10 crc kubenswrapper[4558]: E0120 17:58:10.131891 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"210a0812578a1d65ec60d4f022b3ab7e40e130626b3b2c29f678571f3bef403d\": container with ID starting with 210a0812578a1d65ec60d4f022b3ab7e40e130626b3b2c29f678571f3bef403d not found: ID does not exist" containerID="210a0812578a1d65ec60d4f022b3ab7e40e130626b3b2c29f678571f3bef403d" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.131996 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"210a0812578a1d65ec60d4f022b3ab7e40e130626b3b2c29f678571f3bef403d"} err="failed to get container status \"210a0812578a1d65ec60d4f022b3ab7e40e130626b3b2c29f678571f3bef403d\": rpc error: code = NotFound desc = could not find container \"210a0812578a1d65ec60d4f022b3ab7e40e130626b3b2c29f678571f3bef403d\": container with ID starting with 210a0812578a1d65ec60d4f022b3ab7e40e130626b3b2c29f678571f3bef403d not found: ID does not exist" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.132082 4558 scope.go:117] "RemoveContainer" containerID="ca4ecb7fa4f5c23eb30d668f5754a6906c2e344a7fe9c1284b2323148a7db0a4" Jan 20 17:58:10 crc kubenswrapper[4558]: E0120 17:58:10.132397 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ca4ecb7fa4f5c23eb30d668f5754a6906c2e344a7fe9c1284b2323148a7db0a4\": container with ID starting with ca4ecb7fa4f5c23eb30d668f5754a6906c2e344a7fe9c1284b2323148a7db0a4 not found: ID does not exist" containerID="ca4ecb7fa4f5c23eb30d668f5754a6906c2e344a7fe9c1284b2323148a7db0a4" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.132435 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ca4ecb7fa4f5c23eb30d668f5754a6906c2e344a7fe9c1284b2323148a7db0a4"} err="failed to get container status \"ca4ecb7fa4f5c23eb30d668f5754a6906c2e344a7fe9c1284b2323148a7db0a4\": rpc error: code = NotFound desc = could not find container \"ca4ecb7fa4f5c23eb30d668f5754a6906c2e344a7fe9c1284b2323148a7db0a4\": container with ID starting with ca4ecb7fa4f5c23eb30d668f5754a6906c2e344a7fe9c1284b2323148a7db0a4 not found: ID does not exist" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.132465 4558 scope.go:117] "RemoveContainer" containerID="a6dd9f88121dd9a74ad7da43d7ab6d29f99cd801b82eb031635bd0e0e30304e3" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.135994 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/71795b1b-e445-456a-aa68-e410bdaccc73-operator-scripts\") pod \"keystone-1afa-account-create-update-tmdn7\" (UID: \"71795b1b-e445-456a-aa68-e410bdaccc73\") " pod="openstack-kuttl-tests/keystone-1afa-account-create-update-tmdn7" Jan 20 17:58:10 crc kubenswrapper[4558]: E0120 17:58:10.136101 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/openstack-scripts: configmap "openstack-scripts" not found Jan 20 17:58:10 crc kubenswrapper[4558]: E0120 17:58:10.136202 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/71795b1b-e445-456a-aa68-e410bdaccc73-operator-scripts podName:71795b1b-e445-456a-aa68-e410bdaccc73 nodeName:}" failed. No retries permitted until 2026-01-20 17:58:12.136179425 +0000 UTC m=+4585.896517391 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/71795b1b-e445-456a-aa68-e410bdaccc73-operator-scripts") pod "keystone-1afa-account-create-update-tmdn7" (UID: "71795b1b-e445-456a-aa68-e410bdaccc73") : configmap "openstack-scripts" not found Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.136378 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-62mvs\" (UniqueName: \"kubernetes.io/projected/71795b1b-e445-456a-aa68-e410bdaccc73-kube-api-access-62mvs\") pod \"keystone-1afa-account-create-update-tmdn7\" (UID: \"71795b1b-e445-456a-aa68-e410bdaccc73\") " pod="openstack-kuttl-tests/keystone-1afa-account-create-update-tmdn7" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.136528 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/50829e14-401d-4958-9ef7-e2a2ef1f4b32-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.136546 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/50829e14-401d-4958-9ef7-e2a2ef1f4b32-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.136559 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wtcfn\" (UniqueName: \"kubernetes.io/projected/50829e14-401d-4958-9ef7-e2a2ef1f4b32-kube-api-access-wtcfn\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.137026 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-api-42d5-account-create-update-6wcjv"] Jan 20 17:58:10 crc kubenswrapper[4558]: E0120 17:58:10.139473 4558 projected.go:194] Error preparing data for projected volume kube-api-access-62mvs for pod openstack-kuttl-tests/keystone-1afa-account-create-update-tmdn7: failed to fetch token: serviceaccounts "galera-openstack" not found Jan 20 17:58:10 crc kubenswrapper[4558]: E0120 17:58:10.139687 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/71795b1b-e445-456a-aa68-e410bdaccc73-kube-api-access-62mvs podName:71795b1b-e445-456a-aa68-e410bdaccc73 nodeName:}" failed. No retries permitted until 2026-01-20 17:58:12.13965097 +0000 UTC m=+4585.899988937 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-62mvs" (UniqueName: "kubernetes.io/projected/71795b1b-e445-456a-aa68-e410bdaccc73-kube-api-access-62mvs") pod "keystone-1afa-account-create-update-tmdn7" (UID: "71795b1b-e445-456a-aa68-e410bdaccc73") : failed to fetch token: serviceaccounts "galera-openstack" not found Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.142366 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/50829e14-401d-4958-9ef7-e2a2ef1f4b32-config-data" (OuterVolumeSpecName: "config-data") pod "50829e14-401d-4958-9ef7-e2a2ef1f4b32" (UID: "50829e14-401d-4958-9ef7-e2a2ef1f4b32"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.153783 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-5b5bd9fb5d-vdljd"] Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.160089 4558 scope.go:117] "RemoveContainer" containerID="3ea77410056274873a4720a35fcc92ab9a47e76916a85817eee90b16a86d216d" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.160237 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/neutron-5b5bd9fb5d-vdljd"] Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.170668 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-fde5-account-create-update-zmxrm"] Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.177795 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/cinder-fde5-account-create-update-zmxrm"] Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.184880 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/50829e14-401d-4958-9ef7-e2a2ef1f4b32-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "50829e14-401d-4958-9ef7-e2a2ef1f4b32" (UID: "50829e14-401d-4958-9ef7-e2a2ef1f4b32"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.188432 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell0-5191-account-create-update-cznq6"] Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.194381 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell0-5191-account-create-update-cznq6"] Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.200792 4558 scope.go:117] "RemoveContainer" containerID="a6dd9f88121dd9a74ad7da43d7ab6d29f99cd801b82eb031635bd0e0e30304e3" Jan 20 17:58:10 crc kubenswrapper[4558]: E0120 17:58:10.201361 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a6dd9f88121dd9a74ad7da43d7ab6d29f99cd801b82eb031635bd0e0e30304e3\": container with ID starting with a6dd9f88121dd9a74ad7da43d7ab6d29f99cd801b82eb031635bd0e0e30304e3 not found: ID does not exist" containerID="a6dd9f88121dd9a74ad7da43d7ab6d29f99cd801b82eb031635bd0e0e30304e3" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.201404 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a6dd9f88121dd9a74ad7da43d7ab6d29f99cd801b82eb031635bd0e0e30304e3"} err="failed to get container status \"a6dd9f88121dd9a74ad7da43d7ab6d29f99cd801b82eb031635bd0e0e30304e3\": rpc error: code = NotFound desc = could not find container \"a6dd9f88121dd9a74ad7da43d7ab6d29f99cd801b82eb031635bd0e0e30304e3\": container with ID starting with a6dd9f88121dd9a74ad7da43d7ab6d29f99cd801b82eb031635bd0e0e30304e3 not found: ID does not exist" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.201437 4558 scope.go:117] "RemoveContainer" containerID="3ea77410056274873a4720a35fcc92ab9a47e76916a85817eee90b16a86d216d" Jan 20 17:58:10 crc kubenswrapper[4558]: E0120 17:58:10.205078 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3ea77410056274873a4720a35fcc92ab9a47e76916a85817eee90b16a86d216d\": container with ID starting with 3ea77410056274873a4720a35fcc92ab9a47e76916a85817eee90b16a86d216d not found: ID does not exist" containerID="3ea77410056274873a4720a35fcc92ab9a47e76916a85817eee90b16a86d216d" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.205100 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3ea77410056274873a4720a35fcc92ab9a47e76916a85817eee90b16a86d216d"} err="failed to get container status \"3ea77410056274873a4720a35fcc92ab9a47e76916a85817eee90b16a86d216d\": rpc error: code = NotFound desc = could not find container \"3ea77410056274873a4720a35fcc92ab9a47e76916a85817eee90b16a86d216d\": container with ID starting with 3ea77410056274873a4720a35fcc92ab9a47e76916a85817eee90b16a86d216d not found: ID does not exist" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.205114 4558 scope.go:117] "RemoveContainer" containerID="a640bc6d0ce6fe28ee110c1a41a4036f89e48c474ea9e34e4f704aff90b74d58" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.221473 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-e4c5-account-create-update-qbwc2"] Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.225594 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.231650 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/barbican-e4c5-account-create-update-qbwc2"] Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.237180 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.237471 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8p8dr\" (UniqueName: \"kubernetes.io/projected/7dcf074e-8444-4bd2-b1f9-143390e96ef8-kube-api-access-8p8dr\") pod \"7dcf074e-8444-4bd2-b1f9-143390e96ef8\" (UID: \"7dcf074e-8444-4bd2-b1f9-143390e96ef8\") " Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.237574 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7dcf074e-8444-4bd2-b1f9-143390e96ef8-internal-tls-certs\") pod \"7dcf074e-8444-4bd2-b1f9-143390e96ef8\" (UID: \"7dcf074e-8444-4bd2-b1f9-143390e96ef8\") " Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.237618 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"7dcf074e-8444-4bd2-b1f9-143390e96ef8\" (UID: \"7dcf074e-8444-4bd2-b1f9-143390e96ef8\") " Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.237657 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7dcf074e-8444-4bd2-b1f9-143390e96ef8-combined-ca-bundle\") pod \"7dcf074e-8444-4bd2-b1f9-143390e96ef8\" (UID: \"7dcf074e-8444-4bd2-b1f9-143390e96ef8\") " Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.237691 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7dcf074e-8444-4bd2-b1f9-143390e96ef8-logs\") pod \"7dcf074e-8444-4bd2-b1f9-143390e96ef8\" (UID: \"7dcf074e-8444-4bd2-b1f9-143390e96ef8\") " Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.237771 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7dcf074e-8444-4bd2-b1f9-143390e96ef8-config-data\") pod \"7dcf074e-8444-4bd2-b1f9-143390e96ef8\" (UID: \"7dcf074e-8444-4bd2-b1f9-143390e96ef8\") " Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.237820 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/7dcf074e-8444-4bd2-b1f9-143390e96ef8-httpd-run\") pod \"7dcf074e-8444-4bd2-b1f9-143390e96ef8\" (UID: \"7dcf074e-8444-4bd2-b1f9-143390e96ef8\") " Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.237847 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7dcf074e-8444-4bd2-b1f9-143390e96ef8-scripts\") pod \"7dcf074e-8444-4bd2-b1f9-143390e96ef8\" (UID: \"7dcf074e-8444-4bd2-b1f9-143390e96ef8\") " Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.238553 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7dcf074e-8444-4bd2-b1f9-143390e96ef8-logs" (OuterVolumeSpecName: "logs") pod "7dcf074e-8444-4bd2-b1f9-143390e96ef8" (UID: "7dcf074e-8444-4bd2-b1f9-143390e96ef8"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:58:10 crc kubenswrapper[4558]: E0120 17:58:10.238630 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/openstack-scripts: configmap "openstack-scripts" not found Jan 20 17:58:10 crc kubenswrapper[4558]: E0120 17:58:10.238681 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/75a8c03c-5b0a-4144-b1a8-293ce0d50739-operator-scripts podName:75a8c03c-5b0a-4144-b1a8-293ce0d50739 nodeName:}" failed. No retries permitted until 2026-01-20 17:58:11.238665511 +0000 UTC m=+4584.999003478 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/75a8c03c-5b0a-4144-b1a8-293ce0d50739-operator-scripts") pod "root-account-create-update-x75ng" (UID: "75a8c03c-5b0a-4144-b1a8-293ce0d50739") : configmap "openstack-scripts" not found Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.238747 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/50829e14-401d-4958-9ef7-e2a2ef1f4b32-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.238770 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/50829e14-401d-4958-9ef7-e2a2ef1f4b32-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.238785 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7dcf074e-8444-4bd2-b1f9-143390e96ef8-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.239192 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7dcf074e-8444-4bd2-b1f9-143390e96ef8-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "7dcf074e-8444-4bd2-b1f9-143390e96ef8" (UID: "7dcf074e-8444-4bd2-b1f9-143390e96ef8"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.242019 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/swift-proxy-77d4fcb95d-7jsvg"] Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.253564 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7dcf074e-8444-4bd2-b1f9-143390e96ef8-kube-api-access-8p8dr" (OuterVolumeSpecName: "kube-api-access-8p8dr") pod "7dcf074e-8444-4bd2-b1f9-143390e96ef8" (UID: "7dcf074e-8444-4bd2-b1f9-143390e96ef8"). InnerVolumeSpecName "kube-api-access-8p8dr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.254727 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/swift-proxy-77d4fcb95d-7jsvg"] Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.259842 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-keystone-listener-58949d94f4-gxkkc"] Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.262569 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage07-crc" (OuterVolumeSpecName: "glance") pod "7dcf074e-8444-4bd2-b1f9-143390e96ef8" (UID: "7dcf074e-8444-4bd2-b1f9-143390e96ef8"). InnerVolumeSpecName "local-storage07-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.263699 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.266101 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/barbican-keystone-listener-58949d94f4-gxkkc"] Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.267470 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7dcf074e-8444-4bd2-b1f9-143390e96ef8-scripts" (OuterVolumeSpecName: "scripts") pod "7dcf074e-8444-4bd2-b1f9-143390e96ef8" (UID: "7dcf074e-8444-4bd2-b1f9-143390e96ef8"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.272561 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/50829e14-401d-4958-9ef7-e2a2ef1f4b32-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "50829e14-401d-4958-9ef7-e2a2ef1f4b32" (UID: "50829e14-401d-4958-9ef7-e2a2ef1f4b32"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.273535 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.276068 4558 scope.go:117] "RemoveContainer" containerID="ecb66a22f0932155a427ab286233ffe4a3ffce78bb66e6be213471c0db633735" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.293543 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7dcf074e-8444-4bd2-b1f9-143390e96ef8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7dcf074e-8444-4bd2-b1f9-143390e96ef8" (UID: "7dcf074e-8444-4bd2-b1f9-143390e96ef8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.301858 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7dcf074e-8444-4bd2-b1f9-143390e96ef8-config-data" (OuterVolumeSpecName: "config-data") pod "7dcf074e-8444-4bd2-b1f9-143390e96ef8" (UID: "7dcf074e-8444-4bd2-b1f9-143390e96ef8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.307275 4558 scope.go:117] "RemoveContainer" containerID="dfd1e35931513664c3c64c46a6882405ae46386429f436dbbda7a5b30ca884e7" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.316297 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7dcf074e-8444-4bd2-b1f9-143390e96ef8-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "7dcf074e-8444-4bd2-b1f9-143390e96ef8" (UID: "7dcf074e-8444-4bd2-b1f9-143390e96ef8"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.323832 4558 scope.go:117] "RemoveContainer" containerID="ecb66a22f0932155a427ab286233ffe4a3ffce78bb66e6be213471c0db633735" Jan 20 17:58:10 crc kubenswrapper[4558]: E0120 17:58:10.325691 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ecb66a22f0932155a427ab286233ffe4a3ffce78bb66e6be213471c0db633735\": container with ID starting with ecb66a22f0932155a427ab286233ffe4a3ffce78bb66e6be213471c0db633735 not found: ID does not exist" containerID="ecb66a22f0932155a427ab286233ffe4a3ffce78bb66e6be213471c0db633735" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.325731 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ecb66a22f0932155a427ab286233ffe4a3ffce78bb66e6be213471c0db633735"} err="failed to get container status \"ecb66a22f0932155a427ab286233ffe4a3ffce78bb66e6be213471c0db633735\": rpc error: code = NotFound desc = could not find container \"ecb66a22f0932155a427ab286233ffe4a3ffce78bb66e6be213471c0db633735\": container with ID starting with ecb66a22f0932155a427ab286233ffe4a3ffce78bb66e6be213471c0db633735 not found: ID does not exist" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.325763 4558 scope.go:117] "RemoveContainer" containerID="dfd1e35931513664c3c64c46a6882405ae46386429f436dbbda7a5b30ca884e7" Jan 20 17:58:10 crc kubenswrapper[4558]: E0120 17:58:10.326120 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dfd1e35931513664c3c64c46a6882405ae46386429f436dbbda7a5b30ca884e7\": container with ID starting with dfd1e35931513664c3c64c46a6882405ae46386429f436dbbda7a5b30ca884e7 not found: ID does not exist" containerID="dfd1e35931513664c3c64c46a6882405ae46386429f436dbbda7a5b30ca884e7" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.326150 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dfd1e35931513664c3c64c46a6882405ae46386429f436dbbda7a5b30ca884e7"} err="failed to get container status \"dfd1e35931513664c3c64c46a6882405ae46386429f436dbbda7a5b30ca884e7\": rpc error: code = NotFound desc = could not find container \"dfd1e35931513664c3c64c46a6882405ae46386429f436dbbda7a5b30ca884e7\": container with ID starting with dfd1e35931513664c3c64c46a6882405ae46386429f436dbbda7a5b30ca884e7 not found: ID does not exist" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.339901 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0b0c3280-9b26-4db4-ab16-68016b055a1f-config-data\") pod \"0b0c3280-9b26-4db4-ab16-68016b055a1f\" (UID: \"0b0c3280-9b26-4db4-ab16-68016b055a1f\") " Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.339962 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94680f2e-23c0-41aa-a7ff-0aadfcfcc5e6-combined-ca-bundle\") pod \"94680f2e-23c0-41aa-a7ff-0aadfcfcc5e6\" (UID: \"94680f2e-23c0-41aa-a7ff-0aadfcfcc5e6\") " Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.340002 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zxwh2\" (UniqueName: \"kubernetes.io/projected/0b0c3280-9b26-4db4-ab16-68016b055a1f-kube-api-access-zxwh2\") pod \"0b0c3280-9b26-4db4-ab16-68016b055a1f\" (UID: \"0b0c3280-9b26-4db4-ab16-68016b055a1f\") " Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.340034 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c8f11f45-bfce-4989-bfa0-684011f74619-config-data\") pod \"c8f11f45-bfce-4989-bfa0-684011f74619\" (UID: \"c8f11f45-bfce-4989-bfa0-684011f74619\") " Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.340100 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/94680f2e-23c0-41aa-a7ff-0aadfcfcc5e6-operator-scripts\") pod \"94680f2e-23c0-41aa-a7ff-0aadfcfcc5e6\" (UID: \"94680f2e-23c0-41aa-a7ff-0aadfcfcc5e6\") " Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.340195 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/94680f2e-23c0-41aa-a7ff-0aadfcfcc5e6-galera-tls-certs\") pod \"94680f2e-23c0-41aa-a7ff-0aadfcfcc5e6\" (UID: \"94680f2e-23c0-41aa-a7ff-0aadfcfcc5e6\") " Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.340227 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0b0c3280-9b26-4db4-ab16-68016b055a1f-logs\") pod \"0b0c3280-9b26-4db4-ab16-68016b055a1f\" (UID: \"0b0c3280-9b26-4db4-ab16-68016b055a1f\") " Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.340256 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/94680f2e-23c0-41aa-a7ff-0aadfcfcc5e6-config-data-generated\") pod \"94680f2e-23c0-41aa-a7ff-0aadfcfcc5e6\" (UID: \"94680f2e-23c0-41aa-a7ff-0aadfcfcc5e6\") " Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.340301 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fc22v\" (UniqueName: \"kubernetes.io/projected/94680f2e-23c0-41aa-a7ff-0aadfcfcc5e6-kube-api-access-fc22v\") pod \"94680f2e-23c0-41aa-a7ff-0aadfcfcc5e6\" (UID: \"94680f2e-23c0-41aa-a7ff-0aadfcfcc5e6\") " Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.340347 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0b0c3280-9b26-4db4-ab16-68016b055a1f-internal-tls-certs\") pod \"0b0c3280-9b26-4db4-ab16-68016b055a1f\" (UID: \"0b0c3280-9b26-4db4-ab16-68016b055a1f\") " Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.340375 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c8f11f45-bfce-4989-bfa0-684011f74619-combined-ca-bundle\") pod \"c8f11f45-bfce-4989-bfa0-684011f74619\" (UID: \"c8f11f45-bfce-4989-bfa0-684011f74619\") " Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.340410 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0b0c3280-9b26-4db4-ab16-68016b055a1f-etc-machine-id\") pod \"0b0c3280-9b26-4db4-ab16-68016b055a1f\" (UID: \"0b0c3280-9b26-4db4-ab16-68016b055a1f\") " Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.340430 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0b0c3280-9b26-4db4-ab16-68016b055a1f-scripts\") pod \"0b0c3280-9b26-4db4-ab16-68016b055a1f\" (UID: \"0b0c3280-9b26-4db4-ab16-68016b055a1f\") " Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.340477 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nrm9p\" (UniqueName: \"kubernetes.io/projected/c8f11f45-bfce-4989-bfa0-684011f74619-kube-api-access-nrm9p\") pod \"c8f11f45-bfce-4989-bfa0-684011f74619\" (UID: \"c8f11f45-bfce-4989-bfa0-684011f74619\") " Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.340512 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/94680f2e-23c0-41aa-a7ff-0aadfcfcc5e6-kolla-config\") pod \"94680f2e-23c0-41aa-a7ff-0aadfcfcc5e6\" (UID: \"94680f2e-23c0-41aa-a7ff-0aadfcfcc5e6\") " Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.340543 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/94680f2e-23c0-41aa-a7ff-0aadfcfcc5e6-config-data-default\") pod \"94680f2e-23c0-41aa-a7ff-0aadfcfcc5e6\" (UID: \"94680f2e-23c0-41aa-a7ff-0aadfcfcc5e6\") " Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.340559 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c8f11f45-bfce-4989-bfa0-684011f74619-logs\") pod \"c8f11f45-bfce-4989-bfa0-684011f74619\" (UID: \"c8f11f45-bfce-4989-bfa0-684011f74619\") " Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.340621 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mysql-db\" (UniqueName: \"kubernetes.io/local-volume/local-storage13-crc\") pod \"94680f2e-23c0-41aa-a7ff-0aadfcfcc5e6\" (UID: \"94680f2e-23c0-41aa-a7ff-0aadfcfcc5e6\") " Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.340659 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0b0c3280-9b26-4db4-ab16-68016b055a1f-config-data-custom\") pod \"0b0c3280-9b26-4db4-ab16-68016b055a1f\" (UID: \"0b0c3280-9b26-4db4-ab16-68016b055a1f\") " Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.340681 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0b0c3280-9b26-4db4-ab16-68016b055a1f-public-tls-certs\") pod \"0b0c3280-9b26-4db4-ab16-68016b055a1f\" (UID: \"0b0c3280-9b26-4db4-ab16-68016b055a1f\") " Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.340722 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0b0c3280-9b26-4db4-ab16-68016b055a1f-combined-ca-bundle\") pod \"0b0c3280-9b26-4db4-ab16-68016b055a1f\" (UID: \"0b0c3280-9b26-4db4-ab16-68016b055a1f\") " Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.340746 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/c8f11f45-bfce-4989-bfa0-684011f74619-nova-metadata-tls-certs\") pod \"c8f11f45-bfce-4989-bfa0-684011f74619\" (UID: \"c8f11f45-bfce-4989-bfa0-684011f74619\") " Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.340745 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0b0c3280-9b26-4db4-ab16-68016b055a1f-logs" (OuterVolumeSpecName: "logs") pod "0b0c3280-9b26-4db4-ab16-68016b055a1f" (UID: "0b0c3280-9b26-4db4-ab16-68016b055a1f"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.340831 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/94680f2e-23c0-41aa-a7ff-0aadfcfcc5e6-config-data-generated" (OuterVolumeSpecName: "config-data-generated") pod "94680f2e-23c0-41aa-a7ff-0aadfcfcc5e6" (UID: "94680f2e-23c0-41aa-a7ff-0aadfcfcc5e6"). InnerVolumeSpecName "config-data-generated". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.341296 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7dcf074e-8444-4bd2-b1f9-143390e96ef8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.341322 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7dcf074e-8444-4bd2-b1f9-143390e96ef8-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.341333 4558 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/7dcf074e-8444-4bd2-b1f9-143390e96ef8-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.341344 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7dcf074e-8444-4bd2-b1f9-143390e96ef8-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.341354 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/50829e14-401d-4958-9ef7-e2a2ef1f4b32-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.341362 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0b0c3280-9b26-4db4-ab16-68016b055a1f-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.341371 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/94680f2e-23c0-41aa-a7ff-0aadfcfcc5e6-config-data-generated\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.341382 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8p8dr\" (UniqueName: \"kubernetes.io/projected/7dcf074e-8444-4bd2-b1f9-143390e96ef8-kube-api-access-8p8dr\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.341392 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7dcf074e-8444-4bd2-b1f9-143390e96ef8-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.341412 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" " Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.341704 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/94680f2e-23c0-41aa-a7ff-0aadfcfcc5e6-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "94680f2e-23c0-41aa-a7ff-0aadfcfcc5e6" (UID: "94680f2e-23c0-41aa-a7ff-0aadfcfcc5e6"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.341773 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/94680f2e-23c0-41aa-a7ff-0aadfcfcc5e6-config-data-default" (OuterVolumeSpecName: "config-data-default") pod "94680f2e-23c0-41aa-a7ff-0aadfcfcc5e6" (UID: "94680f2e-23c0-41aa-a7ff-0aadfcfcc5e6"). InnerVolumeSpecName "config-data-default". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.341813 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0b0c3280-9b26-4db4-ab16-68016b055a1f-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "0b0c3280-9b26-4db4-ab16-68016b055a1f" (UID: "0b0c3280-9b26-4db4-ab16-68016b055a1f"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.345478 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b0c3280-9b26-4db4-ab16-68016b055a1f-scripts" (OuterVolumeSpecName: "scripts") pod "0b0c3280-9b26-4db4-ab16-68016b055a1f" (UID: "0b0c3280-9b26-4db4-ab16-68016b055a1f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.345685 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b0c3280-9b26-4db4-ab16-68016b055a1f-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "0b0c3280-9b26-4db4-ab16-68016b055a1f" (UID: "0b0c3280-9b26-4db4-ab16-68016b055a1f"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.346305 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c8f11f45-bfce-4989-bfa0-684011f74619-logs" (OuterVolumeSpecName: "logs") pod "c8f11f45-bfce-4989-bfa0-684011f74619" (UID: "c8f11f45-bfce-4989-bfa0-684011f74619"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.346930 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b0c3280-9b26-4db4-ab16-68016b055a1f-kube-api-access-zxwh2" (OuterVolumeSpecName: "kube-api-access-zxwh2") pod "0b0c3280-9b26-4db4-ab16-68016b055a1f" (UID: "0b0c3280-9b26-4db4-ab16-68016b055a1f"). InnerVolumeSpecName "kube-api-access-zxwh2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.349521 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/94680f2e-23c0-41aa-a7ff-0aadfcfcc5e6-kube-api-access-fc22v" (OuterVolumeSpecName: "kube-api-access-fc22v") pod "94680f2e-23c0-41aa-a7ff-0aadfcfcc5e6" (UID: "94680f2e-23c0-41aa-a7ff-0aadfcfcc5e6"). InnerVolumeSpecName "kube-api-access-fc22v". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:58:10 crc kubenswrapper[4558]: E0120 17:58:10.350374 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/nova-cell1-conductor-config-data: secret "nova-cell1-conductor-config-data" not found Jan 20 17:58:10 crc kubenswrapper[4558]: E0120 17:58:10.350421 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/54cbdea3-8fcc-4d2d-870b-cf3663cfc633-config-data podName:54cbdea3-8fcc-4d2d-870b-cf3663cfc633 nodeName:}" failed. No retries permitted until 2026-01-20 17:58:14.350404311 +0000 UTC m=+4588.110742278 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/secret/54cbdea3-8fcc-4d2d-870b-cf3663cfc633-config-data") pod "nova-cell1-conductor-0" (UID: "54cbdea3-8fcc-4d2d-870b-cf3663cfc633") : secret "nova-cell1-conductor-config-data" not found Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.360311 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c8f11f45-bfce-4989-bfa0-684011f74619-kube-api-access-nrm9p" (OuterVolumeSpecName: "kube-api-access-nrm9p") pod "c8f11f45-bfce-4989-bfa0-684011f74619" (UID: "c8f11f45-bfce-4989-bfa0-684011f74619"). InnerVolumeSpecName "kube-api-access-nrm9p". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:58:10 crc kubenswrapper[4558]: E0120 17:58:10.360183 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/combined-ca-bundle: secret "combined-ca-bundle" not found Jan 20 17:58:10 crc kubenswrapper[4558]: E0120 17:58:10.360765 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/54cbdea3-8fcc-4d2d-870b-cf3663cfc633-combined-ca-bundle podName:54cbdea3-8fcc-4d2d-870b-cf3663cfc633 nodeName:}" failed. No retries permitted until 2026-01-20 17:58:14.360755048 +0000 UTC m=+4588.121093014 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/54cbdea3-8fcc-4d2d-870b-cf3663cfc633-combined-ca-bundle") pod "nova-cell1-conductor-0" (UID: "54cbdea3-8fcc-4d2d-870b-cf3663cfc633") : secret "combined-ca-bundle" not found Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.361202 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/94680f2e-23c0-41aa-a7ff-0aadfcfcc5e6-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "94680f2e-23c0-41aa-a7ff-0aadfcfcc5e6" (UID: "94680f2e-23c0-41aa-a7ff-0aadfcfcc5e6"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.362390 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage07-crc" (UniqueName: "kubernetes.io/local-volume/local-storage07-crc") on node "crc" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.373632 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-6bb757fd44-jvx79" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.395197 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage13-crc" (OuterVolumeSpecName: "mysql-db") pod "94680f2e-23c0-41aa-a7ff-0aadfcfcc5e6" (UID: "94680f2e-23c0-41aa-a7ff-0aadfcfcc5e6"). InnerVolumeSpecName "local-storage13-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.424665 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/94680f2e-23c0-41aa-a7ff-0aadfcfcc5e6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "94680f2e-23c0-41aa-a7ff-0aadfcfcc5e6" (UID: "94680f2e-23c0-41aa-a7ff-0aadfcfcc5e6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.426960 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/94680f2e-23c0-41aa-a7ff-0aadfcfcc5e6-galera-tls-certs" (OuterVolumeSpecName: "galera-tls-certs") pod "94680f2e-23c0-41aa-a7ff-0aadfcfcc5e6" (UID: "94680f2e-23c0-41aa-a7ff-0aadfcfcc5e6"). InnerVolumeSpecName "galera-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.429801 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b0c3280-9b26-4db4-ab16-68016b055a1f-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "0b0c3280-9b26-4db4-ab16-68016b055a1f" (UID: "0b0c3280-9b26-4db4-ab16-68016b055a1f"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.436962 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c8f11f45-bfce-4989-bfa0-684011f74619-config-data" (OuterVolumeSpecName: "config-data") pod "c8f11f45-bfce-4989-bfa0-684011f74619" (UID: "c8f11f45-bfce-4989-bfa0-684011f74619"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.440670 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c8f11f45-bfce-4989-bfa0-684011f74619-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c8f11f45-bfce-4989-bfa0-684011f74619" (UID: "c8f11f45-bfce-4989-bfa0-684011f74619"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.441741 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b0c3280-9b26-4db4-ab16-68016b055a1f-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "0b0c3280-9b26-4db4-ab16-68016b055a1f" (UID: "0b0c3280-9b26-4db4-ab16-68016b055a1f"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.441758 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b0c3280-9b26-4db4-ab16-68016b055a1f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0b0c3280-9b26-4db4-ab16-68016b055a1f" (UID: "0b0c3280-9b26-4db4-ab16-68016b055a1f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.442927 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8f5c27dd-2365-49ee-b4bd-b46e952199f7-logs\") pod \"8f5c27dd-2365-49ee-b4bd-b46e952199f7\" (UID: \"8f5c27dd-2365-49ee-b4bd-b46e952199f7\") " Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.442978 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0b0c3280-9b26-4db4-ab16-68016b055a1f-combined-ca-bundle\") pod \"0b0c3280-9b26-4db4-ab16-68016b055a1f\" (UID: \"0b0c3280-9b26-4db4-ab16-68016b055a1f\") " Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.443062 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8f5c27dd-2365-49ee-b4bd-b46e952199f7-config-data\") pod \"8f5c27dd-2365-49ee-b4bd-b46e952199f7\" (UID: \"8f5c27dd-2365-49ee-b4bd-b46e952199f7\") " Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.443116 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8f5c27dd-2365-49ee-b4bd-b46e952199f7-public-tls-certs\") pod \"8f5c27dd-2365-49ee-b4bd-b46e952199f7\" (UID: \"8f5c27dd-2365-49ee-b4bd-b46e952199f7\") " Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.443135 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"8f5c27dd-2365-49ee-b4bd-b46e952199f7\" (UID: \"8f5c27dd-2365-49ee-b4bd-b46e952199f7\") " Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.443203 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z286r\" (UniqueName: \"kubernetes.io/projected/8f5c27dd-2365-49ee-b4bd-b46e952199f7-kube-api-access-z286r\") pod \"8f5c27dd-2365-49ee-b4bd-b46e952199f7\" (UID: \"8f5c27dd-2365-49ee-b4bd-b46e952199f7\") " Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.443327 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8f5c27dd-2365-49ee-b4bd-b46e952199f7-combined-ca-bundle\") pod \"8f5c27dd-2365-49ee-b4bd-b46e952199f7\" (UID: \"8f5c27dd-2365-49ee-b4bd-b46e952199f7\") " Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.443378 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8f5c27dd-2365-49ee-b4bd-b46e952199f7-httpd-run\") pod \"8f5c27dd-2365-49ee-b4bd-b46e952199f7\" (UID: \"8f5c27dd-2365-49ee-b4bd-b46e952199f7\") " Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.443447 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0b0c3280-9b26-4db4-ab16-68016b055a1f-internal-tls-certs\") pod \"0b0c3280-9b26-4db4-ab16-68016b055a1f\" (UID: \"0b0c3280-9b26-4db4-ab16-68016b055a1f\") " Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.443517 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8f5c27dd-2365-49ee-b4bd-b46e952199f7-scripts\") pod \"8f5c27dd-2365-49ee-b4bd-b46e952199f7\" (UID: \"8f5c27dd-2365-49ee-b4bd-b46e952199f7\") " Jan 20 17:58:10 crc kubenswrapper[4558]: W0120 17:58:10.444431 4558 empty_dir.go:500] Warning: Unmount skipped because path does not exist: /var/lib/kubelet/pods/0b0c3280-9b26-4db4-ab16-68016b055a1f/volumes/kubernetes.io~secret/combined-ca-bundle Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.444995 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b0c3280-9b26-4db4-ab16-68016b055a1f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0b0c3280-9b26-4db4-ab16-68016b055a1f" (UID: "0b0c3280-9b26-4db4-ab16-68016b055a1f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.445030 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f5c27dd-2365-49ee-b4bd-b46e952199f7-logs" (OuterVolumeSpecName: "logs") pod "8f5c27dd-2365-49ee-b4bd-b46e952199f7" (UID: "8f5c27dd-2365-49ee-b4bd-b46e952199f7"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:58:10 crc kubenswrapper[4558]: W0120 17:58:10.444746 4558 empty_dir.go:500] Warning: Unmount skipped because path does not exist: /var/lib/kubelet/pods/0b0c3280-9b26-4db4-ab16-68016b055a1f/volumes/kubernetes.io~secret/internal-tls-certs Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.445362 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b0c3280-9b26-4db4-ab16-68016b055a1f-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "0b0c3280-9b26-4db4-ab16-68016b055a1f" (UID: "0b0c3280-9b26-4db4-ab16-68016b055a1f"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.444506 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage13-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage13-crc\") on node \"crc\" " Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.445506 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0b0c3280-9b26-4db4-ab16-68016b055a1f-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.445577 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/0b0c3280-9b26-4db4-ab16-68016b055a1f-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.445633 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0b0c3280-9b26-4db4-ab16-68016b055a1f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.445686 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94680f2e-23c0-41aa-a7ff-0aadfcfcc5e6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.445737 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zxwh2\" (UniqueName: \"kubernetes.io/projected/0b0c3280-9b26-4db4-ab16-68016b055a1f-kube-api-access-zxwh2\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.445788 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c8f11f45-bfce-4989-bfa0-684011f74619-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.445851 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/94680f2e-23c0-41aa-a7ff-0aadfcfcc5e6-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.445914 4558 reconciler_common.go:293] "Volume detached for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/94680f2e-23c0-41aa-a7ff-0aadfcfcc5e6-galera-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.445965 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fc22v\" (UniqueName: \"kubernetes.io/projected/94680f2e-23c0-41aa-a7ff-0aadfcfcc5e6-kube-api-access-fc22v\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.446229 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0b0c3280-9b26-4db4-ab16-68016b055a1f-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.446405 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c8f11f45-bfce-4989-bfa0-684011f74619-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.446471 4558 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0b0c3280-9b26-4db4-ab16-68016b055a1f-etc-machine-id\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.446533 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0b0c3280-9b26-4db4-ab16-68016b055a1f-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.446595 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.446649 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nrm9p\" (UniqueName: \"kubernetes.io/projected/c8f11f45-bfce-4989-bfa0-684011f74619-kube-api-access-nrm9p\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.446701 4558 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/94680f2e-23c0-41aa-a7ff-0aadfcfcc5e6-kolla-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.446764 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/94680f2e-23c0-41aa-a7ff-0aadfcfcc5e6-config-data-default\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.446824 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c8f11f45-bfce-4989-bfa0-684011f74619-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.446328 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f5c27dd-2365-49ee-b4bd-b46e952199f7-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "8f5c27dd-2365-49ee-b4bd-b46e952199f7" (UID: "8f5c27dd-2365-49ee-b4bd-b46e952199f7"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.447786 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b0c3280-9b26-4db4-ab16-68016b055a1f-config-data" (OuterVolumeSpecName: "config-data") pod "0b0c3280-9b26-4db4-ab16-68016b055a1f" (UID: "0b0c3280-9b26-4db4-ab16-68016b055a1f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.448326 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage05-crc" (OuterVolumeSpecName: "glance") pod "8f5c27dd-2365-49ee-b4bd-b46e952199f7" (UID: "8f5c27dd-2365-49ee-b4bd-b46e952199f7"). InnerVolumeSpecName "local-storage05-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.448515 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f5c27dd-2365-49ee-b4bd-b46e952199f7-scripts" (OuterVolumeSpecName: "scripts") pod "8f5c27dd-2365-49ee-b4bd-b46e952199f7" (UID: "8f5c27dd-2365-49ee-b4bd-b46e952199f7"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.448619 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c8f11f45-bfce-4989-bfa0-684011f74619-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "c8f11f45-bfce-4989-bfa0-684011f74619" (UID: "c8f11f45-bfce-4989-bfa0-684011f74619"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.448765 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f5c27dd-2365-49ee-b4bd-b46e952199f7-kube-api-access-z286r" (OuterVolumeSpecName: "kube-api-access-z286r") pod "8f5c27dd-2365-49ee-b4bd-b46e952199f7" (UID: "8f5c27dd-2365-49ee-b4bd-b46e952199f7"). InnerVolumeSpecName "kube-api-access-z286r". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.460187 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage13-crc" (UniqueName: "kubernetes.io/local-volume/local-storage13-crc") on node "crc" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.465922 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f5c27dd-2365-49ee-b4bd-b46e952199f7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8f5c27dd-2365-49ee-b4bd-b46e952199f7" (UID: "8f5c27dd-2365-49ee-b4bd-b46e952199f7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.478322 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovn-northd-0_dbdd2687-1110-4dce-a521-19c9337df3a2/ovn-northd/0.log" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.478412 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.483196 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f5c27dd-2365-49ee-b4bd-b46e952199f7-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "8f5c27dd-2365-49ee-b4bd-b46e952199f7" (UID: "8f5c27dd-2365-49ee-b4bd-b46e952199f7"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.484456 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f5c27dd-2365-49ee-b4bd-b46e952199f7-config-data" (OuterVolumeSpecName: "config-data") pod "8f5c27dd-2365-49ee-b4bd-b46e952199f7" (UID: "8f5c27dd-2365-49ee-b4bd-b46e952199f7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.548431 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9a1c3ab4-ca8f-4183-9dbf-05f5c5efe4b7-internal-tls-certs\") pod \"9a1c3ab4-ca8f-4183-9dbf-05f5c5efe4b7\" (UID: \"9a1c3ab4-ca8f-4183-9dbf-05f5c5efe4b7\") " Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.548498 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9bdw7\" (UniqueName: \"kubernetes.io/projected/9a1c3ab4-ca8f-4183-9dbf-05f5c5efe4b7-kube-api-access-9bdw7\") pod \"9a1c3ab4-ca8f-4183-9dbf-05f5c5efe4b7\" (UID: \"9a1c3ab4-ca8f-4183-9dbf-05f5c5efe4b7\") " Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.548560 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9a1c3ab4-ca8f-4183-9dbf-05f5c5efe4b7-config-data-custom\") pod \"9a1c3ab4-ca8f-4183-9dbf-05f5c5efe4b7\" (UID: \"9a1c3ab4-ca8f-4183-9dbf-05f5c5efe4b7\") " Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.548664 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9a1c3ab4-ca8f-4183-9dbf-05f5c5efe4b7-config-data\") pod \"9a1c3ab4-ca8f-4183-9dbf-05f5c5efe4b7\" (UID: \"9a1c3ab4-ca8f-4183-9dbf-05f5c5efe4b7\") " Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.548730 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9a1c3ab4-ca8f-4183-9dbf-05f5c5efe4b7-logs\") pod \"9a1c3ab4-ca8f-4183-9dbf-05f5c5efe4b7\" (UID: \"9a1c3ab4-ca8f-4183-9dbf-05f5c5efe4b7\") " Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.548823 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9a1c3ab4-ca8f-4183-9dbf-05f5c5efe4b7-combined-ca-bundle\") pod \"9a1c3ab4-ca8f-4183-9dbf-05f5c5efe4b7\" (UID: \"9a1c3ab4-ca8f-4183-9dbf-05f5c5efe4b7\") " Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.548937 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9a1c3ab4-ca8f-4183-9dbf-05f5c5efe4b7-public-tls-certs\") pod \"9a1c3ab4-ca8f-4183-9dbf-05f5c5efe4b7\" (UID: \"9a1c3ab4-ca8f-4183-9dbf-05f5c5efe4b7\") " Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.549290 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9a1c3ab4-ca8f-4183-9dbf-05f5c5efe4b7-logs" (OuterVolumeSpecName: "logs") pod "9a1c3ab4-ca8f-4183-9dbf-05f5c5efe4b7" (UID: "9a1c3ab4-ca8f-4183-9dbf-05f5c5efe4b7"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.549448 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8f5c27dd-2365-49ee-b4bd-b46e952199f7-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.549487 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" " Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.549499 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8f5c27dd-2365-49ee-b4bd-b46e952199f7-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.549510 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z286r\" (UniqueName: \"kubernetes.io/projected/8f5c27dd-2365-49ee-b4bd-b46e952199f7-kube-api-access-z286r\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.549519 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9a1c3ab4-ca8f-4183-9dbf-05f5c5efe4b7-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.549529 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8f5c27dd-2365-49ee-b4bd-b46e952199f7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.549537 4558 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8f5c27dd-2365-49ee-b4bd-b46e952199f7-httpd-run\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.549545 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8f5c27dd-2365-49ee-b4bd-b46e952199f7-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.549555 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage13-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage13-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.549605 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8f5c27dd-2365-49ee-b4bd-b46e952199f7-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.549617 4558 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/c8f11f45-bfce-4989-bfa0-684011f74619-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.549626 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0b0c3280-9b26-4db4-ab16-68016b055a1f-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.551360 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9a1c3ab4-ca8f-4183-9dbf-05f5c5efe4b7-kube-api-access-9bdw7" (OuterVolumeSpecName: "kube-api-access-9bdw7") pod "9a1c3ab4-ca8f-4183-9dbf-05f5c5efe4b7" (UID: "9a1c3ab4-ca8f-4183-9dbf-05f5c5efe4b7"). InnerVolumeSpecName "kube-api-access-9bdw7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.552149 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9a1c3ab4-ca8f-4183-9dbf-05f5c5efe4b7-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "9a1c3ab4-ca8f-4183-9dbf-05f5c5efe4b7" (UID: "9a1c3ab4-ca8f-4183-9dbf-05f5c5efe4b7"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.562835 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage05-crc" (UniqueName: "kubernetes.io/local-volume/local-storage05-crc") on node "crc" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.565665 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9a1c3ab4-ca8f-4183-9dbf-05f5c5efe4b7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9a1c3ab4-ca8f-4183-9dbf-05f5c5efe4b7" (UID: "9a1c3ab4-ca8f-4183-9dbf-05f5c5efe4b7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.576576 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1481b06c-22ef-4b33-b49c-2c5d6903cbee" path="/var/lib/kubelet/pods/1481b06c-22ef-4b33-b49c-2c5d6903cbee/volumes" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.577314 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25990a01-f231-4924-a1cc-9207cc5af3c8" path="/var/lib/kubelet/pods/25990a01-f231-4924-a1cc-9207cc5af3c8/volumes" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.577953 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2c80b87d-f7d5-46dd-a6d9-bba3e82e405b" path="/var/lib/kubelet/pods/2c80b87d-f7d5-46dd-a6d9-bba3e82e405b/volumes" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.578116 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9a1c3ab4-ca8f-4183-9dbf-05f5c5efe4b7-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "9a1c3ab4-ca8f-4183-9dbf-05f5c5efe4b7" (UID: "9a1c3ab4-ca8f-4183-9dbf-05f5c5efe4b7"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.579391 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="41d6ff7a-4cdf-4add-834d-eb19a40181b9" path="/var/lib/kubelet/pods/41d6ff7a-4cdf-4add-834d-eb19a40181b9/volumes" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.580351 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="45d51516-cc3a-473c-bb88-e82d290d36ca" path="/var/lib/kubelet/pods/45d51516-cc3a-473c-bb88-e82d290d36ca/volumes" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.581267 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5dfca7b5-dd95-4da3-a088-de4d48ea8c10" path="/var/lib/kubelet/pods/5dfca7b5-dd95-4da3-a088-de4d48ea8c10/volumes" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.581760 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8c1a8bdd-f084-4e64-a651-91e76ad6f7d1" path="/var/lib/kubelet/pods/8c1a8bdd-f084-4e64-a651-91e76ad6f7d1/volumes" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.582973 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96cbceb8-1912-4174-8d93-22ab4aa1b152" path="/var/lib/kubelet/pods/96cbceb8-1912-4174-8d93-22ab4aa1b152/volumes" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.583343 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9a1c3ab4-ca8f-4183-9dbf-05f5c5efe4b7-config-data" (OuterVolumeSpecName: "config-data") pod "9a1c3ab4-ca8f-4183-9dbf-05f5c5efe4b7" (UID: "9a1c3ab4-ca8f-4183-9dbf-05f5c5efe4b7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.583380 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9a1c3ab4-ca8f-4183-9dbf-05f5c5efe4b7-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "9a1c3ab4-ca8f-4183-9dbf-05f5c5efe4b7" (UID: "9a1c3ab4-ca8f-4183-9dbf-05f5c5efe4b7"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.583507 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a66c352e-8b7e-4f9e-9447-a2bf8a7a5c91" path="/var/lib/kubelet/pods/a66c352e-8b7e-4f9e-9447-a2bf8a7a5c91/volumes" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.584351 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="abd1fde8-57bd-4248-a061-6ddb436501c2" path="/var/lib/kubelet/pods/abd1fde8-57bd-4248-a061-6ddb436501c2/volumes" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.585410 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c9479a19-1b1e-4311-97f5-8922e8602b18" path="/var/lib/kubelet/pods/c9479a19-1b1e-4311-97f5-8922e8602b18/volumes" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.585919 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ce3fd1cf-8efc-4180-b37f-8b562827c8a2" path="/var/lib/kubelet/pods/ce3fd1cf-8efc-4180-b37f-8b562827c8a2/volumes" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.586818 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="de8410e3-8de9-4013-b2ed-545ccdff866c" path="/var/lib/kubelet/pods/de8410e3-8de9-4013-b2ed-545ccdff866c/volumes" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.621019 4558 generic.go:334] "Generic (PLEG): container finished" podID="9a1c3ab4-ca8f-4183-9dbf-05f5c5efe4b7" containerID="7e84faeb8fb514479840b3b5eeb4711dbbb78f6bac5d538500313eb7bf90d215" exitCode=0 Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.621376 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-api-6bb757fd44-jvx79" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.624034 4558 generic.go:334] "Generic (PLEG): container finished" podID="0b0c3280-9b26-4db4-ab16-68016b055a1f" containerID="c77c9b049e2b69bd62c3dcdd5e98b0f9306632a287cc9d5490b8cbb694baaa92" exitCode=0 Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.624347 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/cinder-api-0" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.634511 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_ovn-northd-0_dbdd2687-1110-4dce-a521-19c9337df3a2/ovn-northd/0.log" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.634604 4558 generic.go:334] "Generic (PLEG): container finished" podID="dbdd2687-1110-4dce-a521-19c9337df3a2" containerID="96d47ec89318825738be6686bad9869d642e9bc40f34628060459ade0aa4b177" exitCode=139 Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.634784 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-northd-0" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.644059 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-external-api-0" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.650730 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dbdd2687-1110-4dce-a521-19c9337df3a2-combined-ca-bundle\") pod \"dbdd2687-1110-4dce-a521-19c9337df3a2\" (UID: \"dbdd2687-1110-4dce-a521-19c9337df3a2\") " Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.650886 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/dbdd2687-1110-4dce-a521-19c9337df3a2-scripts\") pod \"dbdd2687-1110-4dce-a521-19c9337df3a2\" (UID: \"dbdd2687-1110-4dce-a521-19c9337df3a2\") " Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.650943 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8ntm2\" (UniqueName: \"kubernetes.io/projected/dbdd2687-1110-4dce-a521-19c9337df3a2-kube-api-access-8ntm2\") pod \"dbdd2687-1110-4dce-a521-19c9337df3a2\" (UID: \"dbdd2687-1110-4dce-a521-19c9337df3a2\") " Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.650978 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/dbdd2687-1110-4dce-a521-19c9337df3a2-ovn-rundir\") pod \"dbdd2687-1110-4dce-a521-19c9337df3a2\" (UID: \"dbdd2687-1110-4dce-a521-19c9337df3a2\") " Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.651061 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/dbdd2687-1110-4dce-a521-19c9337df3a2-metrics-certs-tls-certs\") pod \"dbdd2687-1110-4dce-a521-19c9337df3a2\" (UID: \"dbdd2687-1110-4dce-a521-19c9337df3a2\") " Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.651103 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/dbdd2687-1110-4dce-a521-19c9337df3a2-ovn-northd-tls-certs\") pod \"dbdd2687-1110-4dce-a521-19c9337df3a2\" (UID: \"dbdd2687-1110-4dce-a521-19c9337df3a2\") " Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.651139 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dbdd2687-1110-4dce-a521-19c9337df3a2-config\") pod \"dbdd2687-1110-4dce-a521-19c9337df3a2\" (UID: \"dbdd2687-1110-4dce-a521-19c9337df3a2\") " Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.651177 4558 generic.go:334] "Generic (PLEG): container finished" podID="94680f2e-23c0-41aa-a7ff-0aadfcfcc5e6" containerID="7444755b1efeb70ce9df8e374dcb007a995d0c3ace0bc1c4539e47db0dbb9408" exitCode=0 Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.651359 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/openstack-galera-0" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.654472 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9bdw7\" (UniqueName: \"kubernetes.io/projected/9a1c3ab4-ca8f-4183-9dbf-05f5c5efe4b7-kube-api-access-9bdw7\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.654820 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dbdd2687-1110-4dce-a521-19c9337df3a2-kube-api-access-8ntm2" (OuterVolumeSpecName: "kube-api-access-8ntm2") pod "dbdd2687-1110-4dce-a521-19c9337df3a2" (UID: "dbdd2687-1110-4dce-a521-19c9337df3a2"). InnerVolumeSpecName "kube-api-access-8ntm2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.655874 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dbdd2687-1110-4dce-a521-19c9337df3a2-scripts" (OuterVolumeSpecName: "scripts") pod "dbdd2687-1110-4dce-a521-19c9337df3a2" (UID: "dbdd2687-1110-4dce-a521-19c9337df3a2"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.656004 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9a1c3ab4-ca8f-4183-9dbf-05f5c5efe4b7-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.656029 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9a1c3ab4-ca8f-4183-9dbf-05f5c5efe4b7-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.656050 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.656068 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9a1c3ab4-ca8f-4183-9dbf-05f5c5efe4b7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.656081 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9a1c3ab4-ca8f-4183-9dbf-05f5c5efe4b7-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.656093 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9a1c3ab4-ca8f-4183-9dbf-05f5c5efe4b7-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.656552 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dbdd2687-1110-4dce-a521-19c9337df3a2-config" (OuterVolumeSpecName: "config") pod "dbdd2687-1110-4dce-a521-19c9337df3a2" (UID: "dbdd2687-1110-4dce-a521-19c9337df3a2"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.662093 4558 generic.go:334] "Generic (PLEG): container finished" podID="c8f11f45-bfce-4989-bfa0-684011f74619" containerID="8bfec59d12be9eefb33c786c52426fac978276a374abf67c2991cebb7a215c6d" exitCode=0 Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.662221 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-metadata-0" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.662760 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dbdd2687-1110-4dce-a521-19c9337df3a2-ovn-rundir" (OuterVolumeSpecName: "ovn-rundir") pod "dbdd2687-1110-4dce-a521-19c9337df3a2" (UID: "dbdd2687-1110-4dce-a521-19c9337df3a2"). InnerVolumeSpecName "ovn-rundir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.669051 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/placement-7c7cc56d94-qptzl" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.681914 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-1afa-account-create-update-tmdn7" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.682262 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/glance-default-internal-api-0" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.693800 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dbdd2687-1110-4dce-a521-19c9337df3a2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "dbdd2687-1110-4dce-a521-19c9337df3a2" (UID: "dbdd2687-1110-4dce-a521-19c9337df3a2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.700103 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dbdd2687-1110-4dce-a521-19c9337df3a2-ovn-northd-tls-certs" (OuterVolumeSpecName: "ovn-northd-tls-certs") pod "dbdd2687-1110-4dce-a521-19c9337df3a2" (UID: "dbdd2687-1110-4dce-a521-19c9337df3a2"). InnerVolumeSpecName "ovn-northd-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.722014 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dbdd2687-1110-4dce-a521-19c9337df3a2-metrics-certs-tls-certs" (OuterVolumeSpecName: "metrics-certs-tls-certs") pod "dbdd2687-1110-4dce-a521-19c9337df3a2" (UID: "dbdd2687-1110-4dce-a521-19c9337df3a2"). InnerVolumeSpecName "metrics-certs-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.757806 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/dbdd2687-1110-4dce-a521-19c9337df3a2-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.757837 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8ntm2\" (UniqueName: \"kubernetes.io/projected/dbdd2687-1110-4dce-a521-19c9337df3a2-kube-api-access-8ntm2\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.757849 4558 reconciler_common.go:293] "Volume detached for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/dbdd2687-1110-4dce-a521-19c9337df3a2-ovn-rundir\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.757862 4558 reconciler_common.go:293] "Volume detached for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/dbdd2687-1110-4dce-a521-19c9337df3a2-metrics-certs-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.757872 4558 reconciler_common.go:293] "Volume detached for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/dbdd2687-1110-4dce-a521-19c9337df3a2-ovn-northd-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.757880 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dbdd2687-1110-4dce-a521-19c9337df3a2-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.757889 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dbdd2687-1110-4dce-a521-19c9337df3a2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.827038 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-6bb757fd44-jvx79" event={"ID":"9a1c3ab4-ca8f-4183-9dbf-05f5c5efe4b7","Type":"ContainerDied","Data":"7e84faeb8fb514479840b3b5eeb4711dbbb78f6bac5d538500313eb7bf90d215"} Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.827100 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-api-6bb757fd44-jvx79" event={"ID":"9a1c3ab4-ca8f-4183-9dbf-05f5c5efe4b7","Type":"ContainerDied","Data":"92e3fcede0a1618b22fdaa64045121c517f956f3b2c1983b6e27802858f4144c"} Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.827114 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"0b0c3280-9b26-4db4-ab16-68016b055a1f","Type":"ContainerDied","Data":"c77c9b049e2b69bd62c3dcdd5e98b0f9306632a287cc9d5490b8cbb694baaa92"} Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.827132 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/cinder-api-0" event={"ID":"0b0c3280-9b26-4db4-ab16-68016b055a1f","Type":"ContainerDied","Data":"3c41b8d2e4ff79bd3706fc70c233fdfa093a870dea67d23e5ec6f068d9954668"} Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.827145 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-northd-0" event={"ID":"dbdd2687-1110-4dce-a521-19c9337df3a2","Type":"ContainerDied","Data":"96d47ec89318825738be6686bad9869d642e9bc40f34628060459ade0aa4b177"} Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.827158 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-northd-0" event={"ID":"dbdd2687-1110-4dce-a521-19c9337df3a2","Type":"ContainerDied","Data":"a2a7a2a4918691aaf41b49fcc06de017842d09f24418e64b2e673e16ba0845b2"} Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.827189 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-external-api-0" event={"ID":"8f5c27dd-2365-49ee-b4bd-b46e952199f7","Type":"ContainerDied","Data":"68a29dece1a57865631949e2370c639a8b3ff0e4319d8a8262e6d9751ffb57d6"} Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.827203 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-galera-0" event={"ID":"94680f2e-23c0-41aa-a7ff-0aadfcfcc5e6","Type":"ContainerDied","Data":"7444755b1efeb70ce9df8e374dcb007a995d0c3ace0bc1c4539e47db0dbb9408"} Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.827204 4558 scope.go:117] "RemoveContainer" containerID="7e84faeb8fb514479840b3b5eeb4711dbbb78f6bac5d538500313eb7bf90d215" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.827217 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/openstack-galera-0" event={"ID":"94680f2e-23c0-41aa-a7ff-0aadfcfcc5e6","Type":"ContainerDied","Data":"33dc9a9bd3762a283e45de54452c87ac3997c2cf3329c7ef08bc3bd13dbb9b65"} Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.827232 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"c8f11f45-bfce-4989-bfa0-684011f74619","Type":"ContainerDied","Data":"8bfec59d12be9eefb33c786c52426fac978276a374abf67c2991cebb7a215c6d"} Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.827244 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-metadata-0" event={"ID":"c8f11f45-bfce-4989-bfa0-684011f74619","Type":"ContainerDied","Data":"54df676fde10a0849dfebcb70b517d2908a83f1c31dbdeeb408ebc25bb01c32f"} Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.827255 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/placement-7c7cc56d94-qptzl" event={"ID":"50829e14-401d-4958-9ef7-e2a2ef1f4b32","Type":"ContainerDied","Data":"87051b354e76a109fd178692cdde020d8f75467d48ca7b6c336ecdc09ab3ed0b"} Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.827268 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/glance-default-internal-api-0" event={"ID":"7dcf074e-8444-4bd2-b1f9-143390e96ef8","Type":"ContainerDied","Data":"e321eaafee8525c51e9de1b6cbca4c0ccaa4f6d832b563b5571a376bbfd1c72a"} Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.957422 4558 scope.go:117] "RemoveContainer" containerID="2f0d5940c1a430547bdac78180588ab9d0ea89c8cd08f0adfb7ebcb602621819" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.966668 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.971129 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-x75ng" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.975659 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/glance-default-external-api-0"] Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.982496 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-api-6bb757fd44-jvx79"] Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.990427 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/barbican-api-6bb757fd44-jvx79"] Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.994232 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/placement-7c7cc56d94-qptzl"] Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.994530 4558 scope.go:117] "RemoveContainer" containerID="7e84faeb8fb514479840b3b5eeb4711dbbb78f6bac5d538500313eb7bf90d215" Jan 20 17:58:10 crc kubenswrapper[4558]: E0120 17:58:10.995805 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7e84faeb8fb514479840b3b5eeb4711dbbb78f6bac5d538500313eb7bf90d215\": container with ID starting with 7e84faeb8fb514479840b3b5eeb4711dbbb78f6bac5d538500313eb7bf90d215 not found: ID does not exist" containerID="7e84faeb8fb514479840b3b5eeb4711dbbb78f6bac5d538500313eb7bf90d215" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.995848 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7e84faeb8fb514479840b3b5eeb4711dbbb78f6bac5d538500313eb7bf90d215"} err="failed to get container status \"7e84faeb8fb514479840b3b5eeb4711dbbb78f6bac5d538500313eb7bf90d215\": rpc error: code = NotFound desc = could not find container \"7e84faeb8fb514479840b3b5eeb4711dbbb78f6bac5d538500313eb7bf90d215\": container with ID starting with 7e84faeb8fb514479840b3b5eeb4711dbbb78f6bac5d538500313eb7bf90d215 not found: ID does not exist" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.995872 4558 scope.go:117] "RemoveContainer" containerID="2f0d5940c1a430547bdac78180588ab9d0ea89c8cd08f0adfb7ebcb602621819" Jan 20 17:58:10 crc kubenswrapper[4558]: E0120 17:58:10.996350 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2f0d5940c1a430547bdac78180588ab9d0ea89c8cd08f0adfb7ebcb602621819\": container with ID starting with 2f0d5940c1a430547bdac78180588ab9d0ea89c8cd08f0adfb7ebcb602621819 not found: ID does not exist" containerID="2f0d5940c1a430547bdac78180588ab9d0ea89c8cd08f0adfb7ebcb602621819" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.996377 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2f0d5940c1a430547bdac78180588ab9d0ea89c8cd08f0adfb7ebcb602621819"} err="failed to get container status \"2f0d5940c1a430547bdac78180588ab9d0ea89c8cd08f0adfb7ebcb602621819\": rpc error: code = NotFound desc = could not find container \"2f0d5940c1a430547bdac78180588ab9d0ea89c8cd08f0adfb7ebcb602621819\": container with ID starting with 2f0d5940c1a430547bdac78180588ab9d0ea89c8cd08f0adfb7ebcb602621819 not found: ID does not exist" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.996396 4558 scope.go:117] "RemoveContainer" containerID="c77c9b049e2b69bd62c3dcdd5e98b0f9306632a287cc9d5490b8cbb694baaa92" Jan 20 17:58:10 crc kubenswrapper[4558]: I0120 17:58:10.999910 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/placement-7c7cc56d94-qptzl"] Jan 20 17:58:11 crc kubenswrapper[4558]: I0120 17:58:11.012677 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:58:11 crc kubenswrapper[4558]: I0120 17:58:11.016635 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-worker-6c8cb46bb9-k2j62" Jan 20 17:58:11 crc kubenswrapper[4558]: I0120 17:58:11.020069 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/glance-default-internal-api-0"] Jan 20 17:58:11 crc kubenswrapper[4558]: I0120 17:58:11.025754 4558 scope.go:117] "RemoveContainer" containerID="ed6293f7998d052646c129c9cb1fa0e8231d7f0d152e8108e422d497c055e6c9" Jan 20 17:58:11 crc kubenswrapper[4558]: I0120 17:58:11.038232 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-1afa-account-create-update-tmdn7"] Jan 20 17:58:11 crc kubenswrapper[4558]: I0120 17:58:11.047257 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/keystone-1afa-account-create-update-tmdn7"] Jan 20 17:58:11 crc kubenswrapper[4558]: I0120 17:58:11.047761 4558 scope.go:117] "RemoveContainer" containerID="c77c9b049e2b69bd62c3dcdd5e98b0f9306632a287cc9d5490b8cbb694baaa92" Jan 20 17:58:11 crc kubenswrapper[4558]: E0120 17:58:11.052920 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c77c9b049e2b69bd62c3dcdd5e98b0f9306632a287cc9d5490b8cbb694baaa92\": container with ID starting with c77c9b049e2b69bd62c3dcdd5e98b0f9306632a287cc9d5490b8cbb694baaa92 not found: ID does not exist" containerID="c77c9b049e2b69bd62c3dcdd5e98b0f9306632a287cc9d5490b8cbb694baaa92" Jan 20 17:58:11 crc kubenswrapper[4558]: I0120 17:58:11.052945 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c77c9b049e2b69bd62c3dcdd5e98b0f9306632a287cc9d5490b8cbb694baaa92"} err="failed to get container status \"c77c9b049e2b69bd62c3dcdd5e98b0f9306632a287cc9d5490b8cbb694baaa92\": rpc error: code = NotFound desc = could not find container \"c77c9b049e2b69bd62c3dcdd5e98b0f9306632a287cc9d5490b8cbb694baaa92\": container with ID starting with c77c9b049e2b69bd62c3dcdd5e98b0f9306632a287cc9d5490b8cbb694baaa92 not found: ID does not exist" Jan 20 17:58:11 crc kubenswrapper[4558]: I0120 17:58:11.052963 4558 scope.go:117] "RemoveContainer" containerID="ed6293f7998d052646c129c9cb1fa0e8231d7f0d152e8108e422d497c055e6c9" Jan 20 17:58:11 crc kubenswrapper[4558]: E0120 17:58:11.053212 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ed6293f7998d052646c129c9cb1fa0e8231d7f0d152e8108e422d497c055e6c9\": container with ID starting with ed6293f7998d052646c129c9cb1fa0e8231d7f0d152e8108e422d497c055e6c9 not found: ID does not exist" containerID="ed6293f7998d052646c129c9cb1fa0e8231d7f0d152e8108e422d497c055e6c9" Jan 20 17:58:11 crc kubenswrapper[4558]: I0120 17:58:11.053230 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ed6293f7998d052646c129c9cb1fa0e8231d7f0d152e8108e422d497c055e6c9"} err="failed to get container status \"ed6293f7998d052646c129c9cb1fa0e8231d7f0d152e8108e422d497c055e6c9\": rpc error: code = NotFound desc = could not find container \"ed6293f7998d052646c129c9cb1fa0e8231d7f0d152e8108e422d497c055e6c9\": container with ID starting with ed6293f7998d052646c129c9cb1fa0e8231d7f0d152e8108e422d497c055e6c9 not found: ID does not exist" Jan 20 17:58:11 crc kubenswrapper[4558]: I0120 17:58:11.053244 4558 scope.go:117] "RemoveContainer" containerID="1c2ffc84d31610214ed641d4c67df414b1a44d6e48fb78c1e2211304b04f696f" Jan 20 17:58:11 crc kubenswrapper[4558]: I0120 17:58:11.053991 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:58:11 crc kubenswrapper[4558]: I0120 17:58:11.066087 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8b6h5\" (UniqueName: \"kubernetes.io/projected/75a8c03c-5b0a-4144-b1a8-293ce0d50739-kube-api-access-8b6h5\") pod \"75a8c03c-5b0a-4144-b1a8-293ce0d50739\" (UID: \"75a8c03c-5b0a-4144-b1a8-293ce0d50739\") " Jan 20 17:58:11 crc kubenswrapper[4558]: I0120 17:58:11.066380 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/75a8c03c-5b0a-4144-b1a8-293ce0d50739-operator-scripts\") pod \"75a8c03c-5b0a-4144-b1a8-293ce0d50739\" (UID: \"75a8c03c-5b0a-4144-b1a8-293ce0d50739\") " Jan 20 17:58:11 crc kubenswrapper[4558]: I0120 17:58:11.067106 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/75a8c03c-5b0a-4144-b1a8-293ce0d50739-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "75a8c03c-5b0a-4144-b1a8-293ce0d50739" (UID: "75a8c03c-5b0a-4144-b1a8-293ce0d50739"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:58:11 crc kubenswrapper[4558]: I0120 17:58:11.070174 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/cinder-api-0"] Jan 20 17:58:11 crc kubenswrapper[4558]: I0120 17:58:11.078301 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/75a8c03c-5b0a-4144-b1a8-293ce0d50739-kube-api-access-8b6h5" (OuterVolumeSpecName: "kube-api-access-8b6h5") pod "75a8c03c-5b0a-4144-b1a8-293ce0d50739" (UID: "75a8c03c-5b0a-4144-b1a8-293ce0d50739"). InnerVolumeSpecName "kube-api-access-8b6h5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:58:11 crc kubenswrapper[4558]: I0120 17:58:11.079844 4558 scope.go:117] "RemoveContainer" containerID="96d47ec89318825738be6686bad9869d642e9bc40f34628060459ade0aa4b177" Jan 20 17:58:11 crc kubenswrapper[4558]: I0120 17:58:11.079994 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:58:11 crc kubenswrapper[4558]: I0120 17:58:11.084481 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-metadata-0"] Jan 20 17:58:11 crc kubenswrapper[4558]: I0120 17:58:11.087635 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/openstack-galera-0"] Jan 20 17:58:11 crc kubenswrapper[4558]: I0120 17:58:11.090980 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/openstack-galera-0"] Jan 20 17:58:11 crc kubenswrapper[4558]: I0120 17:58:11.094516 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovn-northd-0"] Jan 20 17:58:11 crc kubenswrapper[4558]: I0120 17:58:11.106450 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ovn-northd-0"] Jan 20 17:58:11 crc kubenswrapper[4558]: I0120 17:58:11.109256 4558 scope.go:117] "RemoveContainer" containerID="1c2ffc84d31610214ed641d4c67df414b1a44d6e48fb78c1e2211304b04f696f" Jan 20 17:58:11 crc kubenswrapper[4558]: E0120 17:58:11.113255 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1c2ffc84d31610214ed641d4c67df414b1a44d6e48fb78c1e2211304b04f696f\": container with ID starting with 1c2ffc84d31610214ed641d4c67df414b1a44d6e48fb78c1e2211304b04f696f not found: ID does not exist" containerID="1c2ffc84d31610214ed641d4c67df414b1a44d6e48fb78c1e2211304b04f696f" Jan 20 17:58:11 crc kubenswrapper[4558]: I0120 17:58:11.113289 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1c2ffc84d31610214ed641d4c67df414b1a44d6e48fb78c1e2211304b04f696f"} err="failed to get container status \"1c2ffc84d31610214ed641d4c67df414b1a44d6e48fb78c1e2211304b04f696f\": rpc error: code = NotFound desc = could not find container \"1c2ffc84d31610214ed641d4c67df414b1a44d6e48fb78c1e2211304b04f696f\": container with ID starting with 1c2ffc84d31610214ed641d4c67df414b1a44d6e48fb78c1e2211304b04f696f not found: ID does not exist" Jan 20 17:58:11 crc kubenswrapper[4558]: I0120 17:58:11.113307 4558 scope.go:117] "RemoveContainer" containerID="96d47ec89318825738be6686bad9869d642e9bc40f34628060459ade0aa4b177" Jan 20 17:58:11 crc kubenswrapper[4558]: E0120 17:58:11.119673 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"96d47ec89318825738be6686bad9869d642e9bc40f34628060459ade0aa4b177\": container with ID starting with 96d47ec89318825738be6686bad9869d642e9bc40f34628060459ade0aa4b177 not found: ID does not exist" containerID="96d47ec89318825738be6686bad9869d642e9bc40f34628060459ade0aa4b177" Jan 20 17:58:11 crc kubenswrapper[4558]: I0120 17:58:11.119712 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"96d47ec89318825738be6686bad9869d642e9bc40f34628060459ade0aa4b177"} err="failed to get container status \"96d47ec89318825738be6686bad9869d642e9bc40f34628060459ade0aa4b177\": rpc error: code = NotFound desc = could not find container \"96d47ec89318825738be6686bad9869d642e9bc40f34628060459ade0aa4b177\": container with ID starting with 96d47ec89318825738be6686bad9869d642e9bc40f34628060459ade0aa4b177 not found: ID does not exist" Jan 20 17:58:11 crc kubenswrapper[4558]: I0120 17:58:11.119739 4558 scope.go:117] "RemoveContainer" containerID="bf44882c187617ca00e2141f2d59bb3077ec0bd79f8fd782d703007b103558bc" Jan 20 17:58:11 crc kubenswrapper[4558]: I0120 17:58:11.153736 4558 scope.go:117] "RemoveContainer" containerID="c8373ab659f997ee81de532a6542773f3920ce134a08cd0ec3ca60979a45eb58" Jan 20 17:58:11 crc kubenswrapper[4558]: I0120 17:58:11.167782 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wrpg6\" (UniqueName: \"kubernetes.io/projected/8a5251c1-60bc-45d7-8524-fd654c09505b-kube-api-access-wrpg6\") pod \"8a5251c1-60bc-45d7-8524-fd654c09505b\" (UID: \"8a5251c1-60bc-45d7-8524-fd654c09505b\") " Jan 20 17:58:11 crc kubenswrapper[4558]: I0120 17:58:11.168070 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8a5251c1-60bc-45d7-8524-fd654c09505b-logs\") pod \"8a5251c1-60bc-45d7-8524-fd654c09505b\" (UID: \"8a5251c1-60bc-45d7-8524-fd654c09505b\") " Jan 20 17:58:11 crc kubenswrapper[4558]: I0120 17:58:11.168146 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8a5251c1-60bc-45d7-8524-fd654c09505b-config-data-custom\") pod \"8a5251c1-60bc-45d7-8524-fd654c09505b\" (UID: \"8a5251c1-60bc-45d7-8524-fd654c09505b\") " Jan 20 17:58:11 crc kubenswrapper[4558]: I0120 17:58:11.168204 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8a5251c1-60bc-45d7-8524-fd654c09505b-combined-ca-bundle\") pod \"8a5251c1-60bc-45d7-8524-fd654c09505b\" (UID: \"8a5251c1-60bc-45d7-8524-fd654c09505b\") " Jan 20 17:58:11 crc kubenswrapper[4558]: I0120 17:58:11.168307 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8a5251c1-60bc-45d7-8524-fd654c09505b-config-data\") pod \"8a5251c1-60bc-45d7-8524-fd654c09505b\" (UID: \"8a5251c1-60bc-45d7-8524-fd654c09505b\") " Jan 20 17:58:11 crc kubenswrapper[4558]: I0120 17:58:11.168616 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8a5251c1-60bc-45d7-8524-fd654c09505b-logs" (OuterVolumeSpecName: "logs") pod "8a5251c1-60bc-45d7-8524-fd654c09505b" (UID: "8a5251c1-60bc-45d7-8524-fd654c09505b"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:58:11 crc kubenswrapper[4558]: I0120 17:58:11.169131 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8a5251c1-60bc-45d7-8524-fd654c09505b-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:11 crc kubenswrapper[4558]: I0120 17:58:11.169468 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8b6h5\" (UniqueName: \"kubernetes.io/projected/75a8c03c-5b0a-4144-b1a8-293ce0d50739-kube-api-access-8b6h5\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:11 crc kubenswrapper[4558]: I0120 17:58:11.169483 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-62mvs\" (UniqueName: \"kubernetes.io/projected/71795b1b-e445-456a-aa68-e410bdaccc73-kube-api-access-62mvs\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:11 crc kubenswrapper[4558]: I0120 17:58:11.169496 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/71795b1b-e445-456a-aa68-e410bdaccc73-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:11 crc kubenswrapper[4558]: I0120 17:58:11.169507 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/75a8c03c-5b0a-4144-b1a8-293ce0d50739-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:11 crc kubenswrapper[4558]: I0120 17:58:11.171808 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8a5251c1-60bc-45d7-8524-fd654c09505b-kube-api-access-wrpg6" (OuterVolumeSpecName: "kube-api-access-wrpg6") pod "8a5251c1-60bc-45d7-8524-fd654c09505b" (UID: "8a5251c1-60bc-45d7-8524-fd654c09505b"). InnerVolumeSpecName "kube-api-access-wrpg6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:58:11 crc kubenswrapper[4558]: I0120 17:58:11.173088 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8a5251c1-60bc-45d7-8524-fd654c09505b-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "8a5251c1-60bc-45d7-8524-fd654c09505b" (UID: "8a5251c1-60bc-45d7-8524-fd654c09505b"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:58:11 crc kubenswrapper[4558]: I0120 17:58:11.177188 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:58:11 crc kubenswrapper[4558]: I0120 17:58:11.179078 4558 scope.go:117] "RemoveContainer" containerID="7444755b1efeb70ce9df8e374dcb007a995d0c3ace0bc1c4539e47db0dbb9408" Jan 20 17:58:11 crc kubenswrapper[4558]: I0120 17:58:11.189811 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8a5251c1-60bc-45d7-8524-fd654c09505b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8a5251c1-60bc-45d7-8524-fd654c09505b" (UID: "8a5251c1-60bc-45d7-8524-fd654c09505b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:58:11 crc kubenswrapper[4558]: I0120 17:58:11.210289 4558 scope.go:117] "RemoveContainer" containerID="67e74c2c2f7872eecd88ff144c26d1ab77df0c8b151f063b843778fa52c1bd62" Jan 20 17:58:11 crc kubenswrapper[4558]: I0120 17:58:11.211741 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8a5251c1-60bc-45d7-8524-fd654c09505b-config-data" (OuterVolumeSpecName: "config-data") pod "8a5251c1-60bc-45d7-8524-fd654c09505b" (UID: "8a5251c1-60bc-45d7-8524-fd654c09505b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:58:11 crc kubenswrapper[4558]: I0120 17:58:11.228212 4558 scope.go:117] "RemoveContainer" containerID="7444755b1efeb70ce9df8e374dcb007a995d0c3ace0bc1c4539e47db0dbb9408" Jan 20 17:58:11 crc kubenswrapper[4558]: E0120 17:58:11.228544 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7444755b1efeb70ce9df8e374dcb007a995d0c3ace0bc1c4539e47db0dbb9408\": container with ID starting with 7444755b1efeb70ce9df8e374dcb007a995d0c3ace0bc1c4539e47db0dbb9408 not found: ID does not exist" containerID="7444755b1efeb70ce9df8e374dcb007a995d0c3ace0bc1c4539e47db0dbb9408" Jan 20 17:58:11 crc kubenswrapper[4558]: I0120 17:58:11.228581 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7444755b1efeb70ce9df8e374dcb007a995d0c3ace0bc1c4539e47db0dbb9408"} err="failed to get container status \"7444755b1efeb70ce9df8e374dcb007a995d0c3ace0bc1c4539e47db0dbb9408\": rpc error: code = NotFound desc = could not find container \"7444755b1efeb70ce9df8e374dcb007a995d0c3ace0bc1c4539e47db0dbb9408\": container with ID starting with 7444755b1efeb70ce9df8e374dcb007a995d0c3ace0bc1c4539e47db0dbb9408 not found: ID does not exist" Jan 20 17:58:11 crc kubenswrapper[4558]: I0120 17:58:11.228606 4558 scope.go:117] "RemoveContainer" containerID="67e74c2c2f7872eecd88ff144c26d1ab77df0c8b151f063b843778fa52c1bd62" Jan 20 17:58:11 crc kubenswrapper[4558]: E0120 17:58:11.229224 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"67e74c2c2f7872eecd88ff144c26d1ab77df0c8b151f063b843778fa52c1bd62\": container with ID starting with 67e74c2c2f7872eecd88ff144c26d1ab77df0c8b151f063b843778fa52c1bd62 not found: ID does not exist" containerID="67e74c2c2f7872eecd88ff144c26d1ab77df0c8b151f063b843778fa52c1bd62" Jan 20 17:58:11 crc kubenswrapper[4558]: I0120 17:58:11.229274 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"67e74c2c2f7872eecd88ff144c26d1ab77df0c8b151f063b843778fa52c1bd62"} err="failed to get container status \"67e74c2c2f7872eecd88ff144c26d1ab77df0c8b151f063b843778fa52c1bd62\": rpc error: code = NotFound desc = could not find container \"67e74c2c2f7872eecd88ff144c26d1ab77df0c8b151f063b843778fa52c1bd62\": container with ID starting with 67e74c2c2f7872eecd88ff144c26d1ab77df0c8b151f063b843778fa52c1bd62 not found: ID does not exist" Jan 20 17:58:11 crc kubenswrapper[4558]: I0120 17:58:11.229290 4558 scope.go:117] "RemoveContainer" containerID="8bfec59d12be9eefb33c786c52426fac978276a374abf67c2991cebb7a215c6d" Jan 20 17:58:11 crc kubenswrapper[4558]: I0120 17:58:11.244389 4558 scope.go:117] "RemoveContainer" containerID="d0304cd10afa9031be3b363bc240ee32e662b09bbf8d0575f8a91d278b93f42b" Jan 20 17:58:11 crc kubenswrapper[4558]: I0120 17:58:11.256637 4558 scope.go:117] "RemoveContainer" containerID="8bfec59d12be9eefb33c786c52426fac978276a374abf67c2991cebb7a215c6d" Jan 20 17:58:11 crc kubenswrapper[4558]: E0120 17:58:11.257318 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8bfec59d12be9eefb33c786c52426fac978276a374abf67c2991cebb7a215c6d\": container with ID starting with 8bfec59d12be9eefb33c786c52426fac978276a374abf67c2991cebb7a215c6d not found: ID does not exist" containerID="8bfec59d12be9eefb33c786c52426fac978276a374abf67c2991cebb7a215c6d" Jan 20 17:58:11 crc kubenswrapper[4558]: I0120 17:58:11.257365 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8bfec59d12be9eefb33c786c52426fac978276a374abf67c2991cebb7a215c6d"} err="failed to get container status \"8bfec59d12be9eefb33c786c52426fac978276a374abf67c2991cebb7a215c6d\": rpc error: code = NotFound desc = could not find container \"8bfec59d12be9eefb33c786c52426fac978276a374abf67c2991cebb7a215c6d\": container with ID starting with 8bfec59d12be9eefb33c786c52426fac978276a374abf67c2991cebb7a215c6d not found: ID does not exist" Jan 20 17:58:11 crc kubenswrapper[4558]: I0120 17:58:11.257414 4558 scope.go:117] "RemoveContainer" containerID="d0304cd10afa9031be3b363bc240ee32e662b09bbf8d0575f8a91d278b93f42b" Jan 20 17:58:11 crc kubenswrapper[4558]: E0120 17:58:11.258132 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d0304cd10afa9031be3b363bc240ee32e662b09bbf8d0575f8a91d278b93f42b\": container with ID starting with d0304cd10afa9031be3b363bc240ee32e662b09bbf8d0575f8a91d278b93f42b not found: ID does not exist" containerID="d0304cd10afa9031be3b363bc240ee32e662b09bbf8d0575f8a91d278b93f42b" Jan 20 17:58:11 crc kubenswrapper[4558]: I0120 17:58:11.258197 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d0304cd10afa9031be3b363bc240ee32e662b09bbf8d0575f8a91d278b93f42b"} err="failed to get container status \"d0304cd10afa9031be3b363bc240ee32e662b09bbf8d0575f8a91d278b93f42b\": rpc error: code = NotFound desc = could not find container \"d0304cd10afa9031be3b363bc240ee32e662b09bbf8d0575f8a91d278b93f42b\": container with ID starting with d0304cd10afa9031be3b363bc240ee32e662b09bbf8d0575f8a91d278b93f42b not found: ID does not exist" Jan 20 17:58:11 crc kubenswrapper[4558]: I0120 17:58:11.258230 4558 scope.go:117] "RemoveContainer" containerID="f43ceb14bd50b1f492df9772660a94cd37c5a26e2941454f5b5fa401d0851c00" Jan 20 17:58:11 crc kubenswrapper[4558]: I0120 17:58:11.270668 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6f0733ea-bb8a-4c0f-9ea8-edd6f687301e-combined-ca-bundle\") pod \"6f0733ea-bb8a-4c0f-9ea8-edd6f687301e\" (UID: \"6f0733ea-bb8a-4c0f-9ea8-edd6f687301e\") " Jan 20 17:58:11 crc kubenswrapper[4558]: I0120 17:58:11.270712 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6f0733ea-bb8a-4c0f-9ea8-edd6f687301e-logs\") pod \"6f0733ea-bb8a-4c0f-9ea8-edd6f687301e\" (UID: \"6f0733ea-bb8a-4c0f-9ea8-edd6f687301e\") " Jan 20 17:58:11 crc kubenswrapper[4558]: I0120 17:58:11.270803 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6f0733ea-bb8a-4c0f-9ea8-edd6f687301e-config-data\") pod \"6f0733ea-bb8a-4c0f-9ea8-edd6f687301e\" (UID: \"6f0733ea-bb8a-4c0f-9ea8-edd6f687301e\") " Jan 20 17:58:11 crc kubenswrapper[4558]: I0120 17:58:11.271428 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6f0733ea-bb8a-4c0f-9ea8-edd6f687301e-public-tls-certs\") pod \"6f0733ea-bb8a-4c0f-9ea8-edd6f687301e\" (UID: \"6f0733ea-bb8a-4c0f-9ea8-edd6f687301e\") " Jan 20 17:58:11 crc kubenswrapper[4558]: I0120 17:58:11.271304 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6f0733ea-bb8a-4c0f-9ea8-edd6f687301e-logs" (OuterVolumeSpecName: "logs") pod "6f0733ea-bb8a-4c0f-9ea8-edd6f687301e" (UID: "6f0733ea-bb8a-4c0f-9ea8-edd6f687301e"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:58:11 crc kubenswrapper[4558]: I0120 17:58:11.271592 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6f0733ea-bb8a-4c0f-9ea8-edd6f687301e-internal-tls-certs\") pod \"6f0733ea-bb8a-4c0f-9ea8-edd6f687301e\" (UID: \"6f0733ea-bb8a-4c0f-9ea8-edd6f687301e\") " Jan 20 17:58:11 crc kubenswrapper[4558]: I0120 17:58:11.271717 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2q6tn\" (UniqueName: \"kubernetes.io/projected/6f0733ea-bb8a-4c0f-9ea8-edd6f687301e-kube-api-access-2q6tn\") pod \"6f0733ea-bb8a-4c0f-9ea8-edd6f687301e\" (UID: \"6f0733ea-bb8a-4c0f-9ea8-edd6f687301e\") " Jan 20 17:58:11 crc kubenswrapper[4558]: I0120 17:58:11.273085 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8a5251c1-60bc-45d7-8524-fd654c09505b-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:11 crc kubenswrapper[4558]: I0120 17:58:11.273111 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wrpg6\" (UniqueName: \"kubernetes.io/projected/8a5251c1-60bc-45d7-8524-fd654c09505b-kube-api-access-wrpg6\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:11 crc kubenswrapper[4558]: I0120 17:58:11.273137 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8a5251c1-60bc-45d7-8524-fd654c09505b-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:11 crc kubenswrapper[4558]: I0120 17:58:11.273148 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8a5251c1-60bc-45d7-8524-fd654c09505b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:11 crc kubenswrapper[4558]: I0120 17:58:11.273156 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6f0733ea-bb8a-4c0f-9ea8-edd6f687301e-logs\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:11 crc kubenswrapper[4558]: I0120 17:58:11.275547 4558 scope.go:117] "RemoveContainer" containerID="fe2d2dd5529693a11eb03a9bb6bf05aecb6c57bbe0a7f33f65058fc71e14d418" Jan 20 17:58:11 crc kubenswrapper[4558]: I0120 17:58:11.279060 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6f0733ea-bb8a-4c0f-9ea8-edd6f687301e-kube-api-access-2q6tn" (OuterVolumeSpecName: "kube-api-access-2q6tn") pod "6f0733ea-bb8a-4c0f-9ea8-edd6f687301e" (UID: "6f0733ea-bb8a-4c0f-9ea8-edd6f687301e"). InnerVolumeSpecName "kube-api-access-2q6tn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:58:11 crc kubenswrapper[4558]: I0120 17:58:11.293316 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6f0733ea-bb8a-4c0f-9ea8-edd6f687301e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6f0733ea-bb8a-4c0f-9ea8-edd6f687301e" (UID: "6f0733ea-bb8a-4c0f-9ea8-edd6f687301e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:58:11 crc kubenswrapper[4558]: I0120 17:58:11.294430 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6f0733ea-bb8a-4c0f-9ea8-edd6f687301e-config-data" (OuterVolumeSpecName: "config-data") pod "6f0733ea-bb8a-4c0f-9ea8-edd6f687301e" (UID: "6f0733ea-bb8a-4c0f-9ea8-edd6f687301e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:58:11 crc kubenswrapper[4558]: I0120 17:58:11.295321 4558 scope.go:117] "RemoveContainer" containerID="4a4fc0e35d400b1717dbd88e9cb076f22b20999b3e1867032e08446fa50f658e" Jan 20 17:58:11 crc kubenswrapper[4558]: I0120 17:58:11.307024 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6f0733ea-bb8a-4c0f-9ea8-edd6f687301e-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "6f0733ea-bb8a-4c0f-9ea8-edd6f687301e" (UID: "6f0733ea-bb8a-4c0f-9ea8-edd6f687301e"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:58:11 crc kubenswrapper[4558]: I0120 17:58:11.308437 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6f0733ea-bb8a-4c0f-9ea8-edd6f687301e-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "6f0733ea-bb8a-4c0f-9ea8-edd6f687301e" (UID: "6f0733ea-bb8a-4c0f-9ea8-edd6f687301e"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:58:11 crc kubenswrapper[4558]: I0120 17:58:11.316039 4558 scope.go:117] "RemoveContainer" containerID="d464d4bcdd194c4ee64ccc0190f47e6e0ded24b37d0780e2cc7ded9c8f572a41" Jan 20 17:58:11 crc kubenswrapper[4558]: E0120 17:58:11.375727 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/combined-ca-bundle: secret "combined-ca-bundle" not found Jan 20 17:58:11 crc kubenswrapper[4558]: E0120 17:58:11.375819 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/747b1096-c84a-43e1-930e-9f887a42c524-combined-ca-bundle podName:747b1096-c84a-43e1-930e-9f887a42c524 nodeName:}" failed. No retries permitted until 2026-01-20 17:58:15.375797882 +0000 UTC m=+4589.136135849 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/747b1096-c84a-43e1-930e-9f887a42c524-combined-ca-bundle") pod "keystone-747b5668bc-jgcd6" (UID: "747b1096-c84a-43e1-930e-9f887a42c524") : secret "combined-ca-bundle" not found Jan 20 17:58:11 crc kubenswrapper[4558]: I0120 17:58:11.375854 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6f0733ea-bb8a-4c0f-9ea8-edd6f687301e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:11 crc kubenswrapper[4558]: I0120 17:58:11.375876 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6f0733ea-bb8a-4c0f-9ea8-edd6f687301e-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:11 crc kubenswrapper[4558]: I0120 17:58:11.375889 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6f0733ea-bb8a-4c0f-9ea8-edd6f687301e-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:11 crc kubenswrapper[4558]: I0120 17:58:11.375905 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6f0733ea-bb8a-4c0f-9ea8-edd6f687301e-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:11 crc kubenswrapper[4558]: I0120 17:58:11.375920 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2q6tn\" (UniqueName: \"kubernetes.io/projected/6f0733ea-bb8a-4c0f-9ea8-edd6f687301e-kube-api-access-2q6tn\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:11 crc kubenswrapper[4558]: E0120 17:58:11.375850 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-keystone-internal-svc: secret "cert-keystone-internal-svc" not found Jan 20 17:58:11 crc kubenswrapper[4558]: E0120 17:58:11.375908 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/cert-keystone-public-svc: secret "cert-keystone-public-svc" not found Jan 20 17:58:11 crc kubenswrapper[4558]: E0120 17:58:11.375957 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/747b1096-c84a-43e1-930e-9f887a42c524-internal-tls-certs podName:747b1096-c84a-43e1-930e-9f887a42c524 nodeName:}" failed. No retries permitted until 2026-01-20 17:58:15.37595084 +0000 UTC m=+4589.136288797 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "internal-tls-certs" (UniqueName: "kubernetes.io/secret/747b1096-c84a-43e1-930e-9f887a42c524-internal-tls-certs") pod "keystone-747b5668bc-jgcd6" (UID: "747b1096-c84a-43e1-930e-9f887a42c524") : secret "cert-keystone-internal-svc" not found Jan 20 17:58:11 crc kubenswrapper[4558]: E0120 17:58:11.376083 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/747b1096-c84a-43e1-930e-9f887a42c524-public-tls-certs podName:747b1096-c84a-43e1-930e-9f887a42c524 nodeName:}" failed. No retries permitted until 2026-01-20 17:58:15.376065575 +0000 UTC m=+4589.136403543 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "public-tls-certs" (UniqueName: "kubernetes.io/secret/747b1096-c84a-43e1-930e-9f887a42c524-public-tls-certs") pod "keystone-747b5668bc-jgcd6" (UID: "747b1096-c84a-43e1-930e-9f887a42c524") : secret "cert-keystone-public-svc" not found Jan 20 17:58:11 crc kubenswrapper[4558]: I0120 17:58:11.565966 4558 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="openstack-kuttl-tests/nova-scheduler-0" secret="" err="secret \"nova-nova-dockercfg-s7p6w\" not found" Jan 20 17:58:11 crc kubenswrapper[4558]: E0120 17:58:11.680374 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/combined-ca-bundle: secret "combined-ca-bundle" not found Jan 20 17:58:11 crc kubenswrapper[4558]: E0120 17:58:11.680491 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f5beef7a-89e0-4e53-960b-1cacc665d586-combined-ca-bundle podName:f5beef7a-89e0-4e53-960b-1cacc665d586 nodeName:}" failed. No retries permitted until 2026-01-20 17:58:12.180464625 +0000 UTC m=+4585.940802593 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/f5beef7a-89e0-4e53-960b-1cacc665d586-combined-ca-bundle") pod "nova-scheduler-0" (UID: "f5beef7a-89e0-4e53-960b-1cacc665d586") : secret "combined-ca-bundle" not found Jan 20 17:58:11 crc kubenswrapper[4558]: I0120 17:58:11.709007 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/root-account-create-update-x75ng" Jan 20 17:58:11 crc kubenswrapper[4558]: I0120 17:58:11.709121 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/root-account-create-update-x75ng" event={"ID":"75a8c03c-5b0a-4144-b1a8-293ce0d50739","Type":"ContainerDied","Data":"26276f024cfeb757d03447f36508b63bdeb579bd42eb98c498a7736d6ea9f934"} Jan 20 17:58:11 crc kubenswrapper[4558]: I0120 17:58:11.709286 4558 scope.go:117] "RemoveContainer" containerID="cdef0a456ecd1cc7d8ef71a2f399391ccf5a26e7ba80fce2ee22ed4cc27a792a" Jan 20 17:58:11 crc kubenswrapper[4558]: I0120 17:58:11.713233 4558 generic.go:334] "Generic (PLEG): container finished" podID="6f0733ea-bb8a-4c0f-9ea8-edd6f687301e" containerID="9c03143dbfd7f3302ef2dbffcc4cbebbe5dec6b4db2e4e83a24cb243d0feac1c" exitCode=0 Jan 20 17:58:11 crc kubenswrapper[4558]: I0120 17:58:11.713362 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"6f0733ea-bb8a-4c0f-9ea8-edd6f687301e","Type":"ContainerDied","Data":"9c03143dbfd7f3302ef2dbffcc4cbebbe5dec6b4db2e4e83a24cb243d0feac1c"} Jan 20 17:58:11 crc kubenswrapper[4558]: I0120 17:58:11.713398 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-api-0" event={"ID":"6f0733ea-bb8a-4c0f-9ea8-edd6f687301e","Type":"ContainerDied","Data":"f4ff893712d3918c57eb9ba718a110d56e06d254307060f771a68a95a658c3cf"} Jan 20 17:58:11 crc kubenswrapper[4558]: I0120 17:58:11.713482 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-api-0" Jan 20 17:58:11 crc kubenswrapper[4558]: I0120 17:58:11.722090 4558 generic.go:334] "Generic (PLEG): container finished" podID="8a5251c1-60bc-45d7-8524-fd654c09505b" containerID="9f47eb91549a4c39117393c154adc8a527a01fad6a90f46a70181a157bb9033a" exitCode=0 Jan 20 17:58:11 crc kubenswrapper[4558]: I0120 17:58:11.722144 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/barbican-worker-6c8cb46bb9-k2j62" Jan 20 17:58:11 crc kubenswrapper[4558]: I0120 17:58:11.722194 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-worker-6c8cb46bb9-k2j62" event={"ID":"8a5251c1-60bc-45d7-8524-fd654c09505b","Type":"ContainerDied","Data":"9f47eb91549a4c39117393c154adc8a527a01fad6a90f46a70181a157bb9033a"} Jan 20 17:58:11 crc kubenswrapper[4558]: I0120 17:58:11.722243 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/barbican-worker-6c8cb46bb9-k2j62" event={"ID":"8a5251c1-60bc-45d7-8524-fd654c09505b","Type":"ContainerDied","Data":"27e45eccea6c6cd3142e139b3a5989021d2f7124d2038226b4cf11882553f944"} Jan 20 17:58:11 crc kubenswrapper[4558]: I0120 17:58:11.799693 4558 scope.go:117] "RemoveContainer" containerID="9c03143dbfd7f3302ef2dbffcc4cbebbe5dec6b4db2e4e83a24cb243d0feac1c" Jan 20 17:58:11 crc kubenswrapper[4558]: I0120 17:58:11.836005 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/barbican-worker-6c8cb46bb9-k2j62"] Jan 20 17:58:11 crc kubenswrapper[4558]: I0120 17:58:11.845757 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/barbican-worker-6c8cb46bb9-k2j62"] Jan 20 17:58:11 crc kubenswrapper[4558]: I0120 17:58:11.850021 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:58:11 crc kubenswrapper[4558]: I0120 17:58:11.852718 4558 scope.go:117] "RemoveContainer" containerID="3a49860bcfdad30df2f697c1fe78b7c7dd55ae654b13cecc83fb386c4afb2fa4" Jan 20 17:58:11 crc kubenswrapper[4558]: I0120 17:58:11.856611 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-api-0"] Jan 20 17:58:11 crc kubenswrapper[4558]: I0120 17:58:11.859035 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-x75ng"] Jan 20 17:58:11 crc kubenswrapper[4558]: I0120 17:58:11.886647 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/root-account-create-update-x75ng"] Jan 20 17:58:11 crc kubenswrapper[4558]: I0120 17:58:11.906130 4558 scope.go:117] "RemoveContainer" containerID="9c03143dbfd7f3302ef2dbffcc4cbebbe5dec6b4db2e4e83a24cb243d0feac1c" Jan 20 17:58:11 crc kubenswrapper[4558]: E0120 17:58:11.906573 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9c03143dbfd7f3302ef2dbffcc4cbebbe5dec6b4db2e4e83a24cb243d0feac1c\": container with ID starting with 9c03143dbfd7f3302ef2dbffcc4cbebbe5dec6b4db2e4e83a24cb243d0feac1c not found: ID does not exist" containerID="9c03143dbfd7f3302ef2dbffcc4cbebbe5dec6b4db2e4e83a24cb243d0feac1c" Jan 20 17:58:11 crc kubenswrapper[4558]: I0120 17:58:11.906611 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9c03143dbfd7f3302ef2dbffcc4cbebbe5dec6b4db2e4e83a24cb243d0feac1c"} err="failed to get container status \"9c03143dbfd7f3302ef2dbffcc4cbebbe5dec6b4db2e4e83a24cb243d0feac1c\": rpc error: code = NotFound desc = could not find container \"9c03143dbfd7f3302ef2dbffcc4cbebbe5dec6b4db2e4e83a24cb243d0feac1c\": container with ID starting with 9c03143dbfd7f3302ef2dbffcc4cbebbe5dec6b4db2e4e83a24cb243d0feac1c not found: ID does not exist" Jan 20 17:58:11 crc kubenswrapper[4558]: I0120 17:58:11.906635 4558 scope.go:117] "RemoveContainer" containerID="3a49860bcfdad30df2f697c1fe78b7c7dd55ae654b13cecc83fb386c4afb2fa4" Jan 20 17:58:11 crc kubenswrapper[4558]: E0120 17:58:11.907078 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3a49860bcfdad30df2f697c1fe78b7c7dd55ae654b13cecc83fb386c4afb2fa4\": container with ID starting with 3a49860bcfdad30df2f697c1fe78b7c7dd55ae654b13cecc83fb386c4afb2fa4 not found: ID does not exist" containerID="3a49860bcfdad30df2f697c1fe78b7c7dd55ae654b13cecc83fb386c4afb2fa4" Jan 20 17:58:11 crc kubenswrapper[4558]: I0120 17:58:11.907121 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3a49860bcfdad30df2f697c1fe78b7c7dd55ae654b13cecc83fb386c4afb2fa4"} err="failed to get container status \"3a49860bcfdad30df2f697c1fe78b7c7dd55ae654b13cecc83fb386c4afb2fa4\": rpc error: code = NotFound desc = could not find container \"3a49860bcfdad30df2f697c1fe78b7c7dd55ae654b13cecc83fb386c4afb2fa4\": container with ID starting with 3a49860bcfdad30df2f697c1fe78b7c7dd55ae654b13cecc83fb386c4afb2fa4 not found: ID does not exist" Jan 20 17:58:11 crc kubenswrapper[4558]: I0120 17:58:11.907151 4558 scope.go:117] "RemoveContainer" containerID="9f47eb91549a4c39117393c154adc8a527a01fad6a90f46a70181a157bb9033a" Jan 20 17:58:11 crc kubenswrapper[4558]: I0120 17:58:11.930200 4558 scope.go:117] "RemoveContainer" containerID="cc8c4f5e82e163a1cfed41c73a53986199d859e052ef15e033504df9f64fa7a9" Jan 20 17:58:11 crc kubenswrapper[4558]: I0120 17:58:11.951113 4558 scope.go:117] "RemoveContainer" containerID="9f47eb91549a4c39117393c154adc8a527a01fad6a90f46a70181a157bb9033a" Jan 20 17:58:11 crc kubenswrapper[4558]: E0120 17:58:11.951445 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9f47eb91549a4c39117393c154adc8a527a01fad6a90f46a70181a157bb9033a\": container with ID starting with 9f47eb91549a4c39117393c154adc8a527a01fad6a90f46a70181a157bb9033a not found: ID does not exist" containerID="9f47eb91549a4c39117393c154adc8a527a01fad6a90f46a70181a157bb9033a" Jan 20 17:58:11 crc kubenswrapper[4558]: I0120 17:58:11.951490 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9f47eb91549a4c39117393c154adc8a527a01fad6a90f46a70181a157bb9033a"} err="failed to get container status \"9f47eb91549a4c39117393c154adc8a527a01fad6a90f46a70181a157bb9033a\": rpc error: code = NotFound desc = could not find container \"9f47eb91549a4c39117393c154adc8a527a01fad6a90f46a70181a157bb9033a\": container with ID starting with 9f47eb91549a4c39117393c154adc8a527a01fad6a90f46a70181a157bb9033a not found: ID does not exist" Jan 20 17:58:11 crc kubenswrapper[4558]: I0120 17:58:11.951518 4558 scope.go:117] "RemoveContainer" containerID="cc8c4f5e82e163a1cfed41c73a53986199d859e052ef15e033504df9f64fa7a9" Jan 20 17:58:11 crc kubenswrapper[4558]: E0120 17:58:11.952146 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cc8c4f5e82e163a1cfed41c73a53986199d859e052ef15e033504df9f64fa7a9\": container with ID starting with cc8c4f5e82e163a1cfed41c73a53986199d859e052ef15e033504df9f64fa7a9 not found: ID does not exist" containerID="cc8c4f5e82e163a1cfed41c73a53986199d859e052ef15e033504df9f64fa7a9" Jan 20 17:58:11 crc kubenswrapper[4558]: I0120 17:58:11.952207 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cc8c4f5e82e163a1cfed41c73a53986199d859e052ef15e033504df9f64fa7a9"} err="failed to get container status \"cc8c4f5e82e163a1cfed41c73a53986199d859e052ef15e033504df9f64fa7a9\": rpc error: code = NotFound desc = could not find container \"cc8c4f5e82e163a1cfed41c73a53986199d859e052ef15e033504df9f64fa7a9\": container with ID starting with cc8c4f5e82e163a1cfed41c73a53986199d859e052ef15e033504df9f64fa7a9 not found: ID does not exist" Jan 20 17:58:11 crc kubenswrapper[4558]: E0120 17:58:11.984471 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="93ad707d620f625a0b7b0c96075af0a312fdd519b3c0dede73cdb9bef7e7e9a5" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:58:11 crc kubenswrapper[4558]: E0120 17:58:11.986053 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="93ad707d620f625a0b7b0c96075af0a312fdd519b3c0dede73cdb9bef7e7e9a5" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:58:11 crc kubenswrapper[4558]: E0120 17:58:11.987390 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="93ad707d620f625a0b7b0c96075af0a312fdd519b3c0dede73cdb9bef7e7e9a5" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:58:11 crc kubenswrapper[4558]: E0120 17:58:11.987463 4558 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack-kuttl-tests/nova-cell1-conductor-0" podUID="54cbdea3-8fcc-4d2d-870b-cf3663cfc633" containerName="nova-cell1-conductor-conductor" Jan 20 17:58:11 crc kubenswrapper[4558]: E0120 17:58:11.988878 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/rabbitmq-cell1-config-data: configmap "rabbitmq-cell1-config-data" not found Jan 20 17:58:11 crc kubenswrapper[4558]: E0120 17:58:11.989031 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/948fb15d-07f1-4b25-b8d5-7d582024ef28-config-data podName:948fb15d-07f1-4b25-b8d5-7d582024ef28 nodeName:}" failed. No retries permitted until 2026-01-20 17:58:19.989010852 +0000 UTC m=+4593.749348820 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/948fb15d-07f1-4b25-b8d5-7d582024ef28-config-data") pod "rabbitmq-cell1-server-0" (UID: "948fb15d-07f1-4b25-b8d5-7d582024ef28") : configmap "rabbitmq-cell1-config-data" not found Jan 20 17:58:12 crc kubenswrapper[4558]: I0120 17:58:12.016622 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-747b5668bc-jgcd6" Jan 20 17:58:12 crc kubenswrapper[4558]: I0120 17:58:12.190964 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/747b1096-c84a-43e1-930e-9f887a42c524-config-data\") pod \"747b1096-c84a-43e1-930e-9f887a42c524\" (UID: \"747b1096-c84a-43e1-930e-9f887a42c524\") " Jan 20 17:58:12 crc kubenswrapper[4558]: I0120 17:58:12.191239 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/747b1096-c84a-43e1-930e-9f887a42c524-public-tls-certs\") pod \"747b1096-c84a-43e1-930e-9f887a42c524\" (UID: \"747b1096-c84a-43e1-930e-9f887a42c524\") " Jan 20 17:58:12 crc kubenswrapper[4558]: I0120 17:58:12.191500 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/747b1096-c84a-43e1-930e-9f887a42c524-fernet-keys\") pod \"747b1096-c84a-43e1-930e-9f887a42c524\" (UID: \"747b1096-c84a-43e1-930e-9f887a42c524\") " Jan 20 17:58:12 crc kubenswrapper[4558]: I0120 17:58:12.191595 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/747b1096-c84a-43e1-930e-9f887a42c524-internal-tls-certs\") pod \"747b1096-c84a-43e1-930e-9f887a42c524\" (UID: \"747b1096-c84a-43e1-930e-9f887a42c524\") " Jan 20 17:58:12 crc kubenswrapper[4558]: I0120 17:58:12.191646 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/747b1096-c84a-43e1-930e-9f887a42c524-credential-keys\") pod \"747b1096-c84a-43e1-930e-9f887a42c524\" (UID: \"747b1096-c84a-43e1-930e-9f887a42c524\") " Jan 20 17:58:12 crc kubenswrapper[4558]: I0120 17:58:12.191673 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/747b1096-c84a-43e1-930e-9f887a42c524-combined-ca-bundle\") pod \"747b1096-c84a-43e1-930e-9f887a42c524\" (UID: \"747b1096-c84a-43e1-930e-9f887a42c524\") " Jan 20 17:58:12 crc kubenswrapper[4558]: I0120 17:58:12.191726 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sd6x5\" (UniqueName: \"kubernetes.io/projected/747b1096-c84a-43e1-930e-9f887a42c524-kube-api-access-sd6x5\") pod \"747b1096-c84a-43e1-930e-9f887a42c524\" (UID: \"747b1096-c84a-43e1-930e-9f887a42c524\") " Jan 20 17:58:12 crc kubenswrapper[4558]: I0120 17:58:12.191766 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/747b1096-c84a-43e1-930e-9f887a42c524-scripts\") pod \"747b1096-c84a-43e1-930e-9f887a42c524\" (UID: \"747b1096-c84a-43e1-930e-9f887a42c524\") " Jan 20 17:58:12 crc kubenswrapper[4558]: I0120 17:58:12.195885 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/747b1096-c84a-43e1-930e-9f887a42c524-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "747b1096-c84a-43e1-930e-9f887a42c524" (UID: "747b1096-c84a-43e1-930e-9f887a42c524"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:58:12 crc kubenswrapper[4558]: I0120 17:58:12.197095 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/747b1096-c84a-43e1-930e-9f887a42c524-kube-api-access-sd6x5" (OuterVolumeSpecName: "kube-api-access-sd6x5") pod "747b1096-c84a-43e1-930e-9f887a42c524" (UID: "747b1096-c84a-43e1-930e-9f887a42c524"). InnerVolumeSpecName "kube-api-access-sd6x5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:58:12 crc kubenswrapper[4558]: I0120 17:58:12.197435 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/747b1096-c84a-43e1-930e-9f887a42c524-scripts" (OuterVolumeSpecName: "scripts") pod "747b1096-c84a-43e1-930e-9f887a42c524" (UID: "747b1096-c84a-43e1-930e-9f887a42c524"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:58:12 crc kubenswrapper[4558]: I0120 17:58:12.197852 4558 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/747b1096-c84a-43e1-930e-9f887a42c524-credential-keys\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:12 crc kubenswrapper[4558]: I0120 17:58:12.197884 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sd6x5\" (UniqueName: \"kubernetes.io/projected/747b1096-c84a-43e1-930e-9f887a42c524-kube-api-access-sd6x5\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:12 crc kubenswrapper[4558]: I0120 17:58:12.197898 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/747b1096-c84a-43e1-930e-9f887a42c524-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:12 crc kubenswrapper[4558]: E0120 17:58:12.197934 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/combined-ca-bundle: secret "combined-ca-bundle" not found Jan 20 17:58:12 crc kubenswrapper[4558]: E0120 17:58:12.198135 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f5beef7a-89e0-4e53-960b-1cacc665d586-combined-ca-bundle podName:f5beef7a-89e0-4e53-960b-1cacc665d586 nodeName:}" failed. No retries permitted until 2026-01-20 17:58:13.198105856 +0000 UTC m=+4586.958443823 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/f5beef7a-89e0-4e53-960b-1cacc665d586-combined-ca-bundle") pod "nova-scheduler-0" (UID: "f5beef7a-89e0-4e53-960b-1cacc665d586") : secret "combined-ca-bundle" not found Jan 20 17:58:12 crc kubenswrapper[4558]: I0120 17:58:12.198270 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/747b1096-c84a-43e1-930e-9f887a42c524-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "747b1096-c84a-43e1-930e-9f887a42c524" (UID: "747b1096-c84a-43e1-930e-9f887a42c524"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:58:12 crc kubenswrapper[4558]: I0120 17:58:12.215475 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/747b1096-c84a-43e1-930e-9f887a42c524-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "747b1096-c84a-43e1-930e-9f887a42c524" (UID: "747b1096-c84a-43e1-930e-9f887a42c524"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:58:12 crc kubenswrapper[4558]: I0120 17:58:12.219929 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/747b1096-c84a-43e1-930e-9f887a42c524-config-data" (OuterVolumeSpecName: "config-data") pod "747b1096-c84a-43e1-930e-9f887a42c524" (UID: "747b1096-c84a-43e1-930e-9f887a42c524"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:58:12 crc kubenswrapper[4558]: I0120 17:58:12.224043 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/747b1096-c84a-43e1-930e-9f887a42c524-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "747b1096-c84a-43e1-930e-9f887a42c524" (UID: "747b1096-c84a-43e1-930e-9f887a42c524"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:58:12 crc kubenswrapper[4558]: I0120 17:58:12.239195 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/747b1096-c84a-43e1-930e-9f887a42c524-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "747b1096-c84a-43e1-930e-9f887a42c524" (UID: "747b1096-c84a-43e1-930e-9f887a42c524"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:58:12 crc kubenswrapper[4558]: I0120 17:58:12.300335 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/747b1096-c84a-43e1-930e-9f887a42c524-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:12 crc kubenswrapper[4558]: I0120 17:58:12.300371 4558 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/747b1096-c84a-43e1-930e-9f887a42c524-fernet-keys\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:12 crc kubenswrapper[4558]: I0120 17:58:12.300383 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/747b1096-c84a-43e1-930e-9f887a42c524-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:12 crc kubenswrapper[4558]: I0120 17:58:12.300394 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/747b1096-c84a-43e1-930e-9f887a42c524-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:12 crc kubenswrapper[4558]: I0120 17:58:12.300406 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/747b1096-c84a-43e1-930e-9f887a42c524-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:12 crc kubenswrapper[4558]: I0120 17:58:12.432748 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:58:12 crc kubenswrapper[4558]: E0120 17:58:12.504829 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/rabbitmq-config-data: configmap "rabbitmq-config-data" not found Jan 20 17:58:12 crc kubenswrapper[4558]: E0120 17:58:12.504911 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/d512bec4-7ed0-43bb-b8fe-0f235f7698e5-config-data podName:d512bec4-7ed0-43bb-b8fe-0f235f7698e5 nodeName:}" failed. No retries permitted until 2026-01-20 17:58:20.504892625 +0000 UTC m=+4594.265230592 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/d512bec4-7ed0-43bb-b8fe-0f235f7698e5-config-data") pod "rabbitmq-server-0" (UID: "d512bec4-7ed0-43bb-b8fe-0f235f7698e5") : configmap "rabbitmq-config-data" not found Jan 20 17:58:12 crc kubenswrapper[4558]: I0120 17:58:12.576927 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b0c3280-9b26-4db4-ab16-68016b055a1f" path="/var/lib/kubelet/pods/0b0c3280-9b26-4db4-ab16-68016b055a1f/volumes" Jan 20 17:58:12 crc kubenswrapper[4558]: I0120 17:58:12.578182 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="50829e14-401d-4958-9ef7-e2a2ef1f4b32" path="/var/lib/kubelet/pods/50829e14-401d-4958-9ef7-e2a2ef1f4b32/volumes" Jan 20 17:58:12 crc kubenswrapper[4558]: I0120 17:58:12.578799 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6f0733ea-bb8a-4c0f-9ea8-edd6f687301e" path="/var/lib/kubelet/pods/6f0733ea-bb8a-4c0f-9ea8-edd6f687301e/volumes" Jan 20 17:58:12 crc kubenswrapper[4558]: I0120 17:58:12.579787 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="71795b1b-e445-456a-aa68-e410bdaccc73" path="/var/lib/kubelet/pods/71795b1b-e445-456a-aa68-e410bdaccc73/volumes" Jan 20 17:58:12 crc kubenswrapper[4558]: I0120 17:58:12.580276 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="75a8c03c-5b0a-4144-b1a8-293ce0d50739" path="/var/lib/kubelet/pods/75a8c03c-5b0a-4144-b1a8-293ce0d50739/volumes" Jan 20 17:58:12 crc kubenswrapper[4558]: I0120 17:58:12.582459 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7dcf074e-8444-4bd2-b1f9-143390e96ef8" path="/var/lib/kubelet/pods/7dcf074e-8444-4bd2-b1f9-143390e96ef8/volumes" Jan 20 17:58:12 crc kubenswrapper[4558]: I0120 17:58:12.583733 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8a5251c1-60bc-45d7-8524-fd654c09505b" path="/var/lib/kubelet/pods/8a5251c1-60bc-45d7-8524-fd654c09505b/volumes" Jan 20 17:58:12 crc kubenswrapper[4558]: I0120 17:58:12.584378 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f5c27dd-2365-49ee-b4bd-b46e952199f7" path="/var/lib/kubelet/pods/8f5c27dd-2365-49ee-b4bd-b46e952199f7/volumes" Jan 20 17:58:12 crc kubenswrapper[4558]: I0120 17:58:12.585089 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="94680f2e-23c0-41aa-a7ff-0aadfcfcc5e6" path="/var/lib/kubelet/pods/94680f2e-23c0-41aa-a7ff-0aadfcfcc5e6/volumes" Jan 20 17:58:12 crc kubenswrapper[4558]: I0120 17:58:12.586123 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9a1c3ab4-ca8f-4183-9dbf-05f5c5efe4b7" path="/var/lib/kubelet/pods/9a1c3ab4-ca8f-4183-9dbf-05f5c5efe4b7/volumes" Jan 20 17:58:12 crc kubenswrapper[4558]: I0120 17:58:12.586762 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c8f11f45-bfce-4989-bfa0-684011f74619" path="/var/lib/kubelet/pods/c8f11f45-bfce-4989-bfa0-684011f74619/volumes" Jan 20 17:58:12 crc kubenswrapper[4558]: I0120 17:58:12.588011 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dbdd2687-1110-4dce-a521-19c9337df3a2" path="/var/lib/kubelet/pods/dbdd2687-1110-4dce-a521-19c9337df3a2/volumes" Jan 20 17:58:12 crc kubenswrapper[4558]: I0120 17:58:12.605564 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/948fb15d-07f1-4b25-b8d5-7d582024ef28-plugins-conf\") pod \"948fb15d-07f1-4b25-b8d5-7d582024ef28\" (UID: \"948fb15d-07f1-4b25-b8d5-7d582024ef28\") " Jan 20 17:58:12 crc kubenswrapper[4558]: I0120 17:58:12.605762 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/948fb15d-07f1-4b25-b8d5-7d582024ef28-config-data\") pod \"948fb15d-07f1-4b25-b8d5-7d582024ef28\" (UID: \"948fb15d-07f1-4b25-b8d5-7d582024ef28\") " Jan 20 17:58:12 crc kubenswrapper[4558]: I0120 17:58:12.605810 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mp9f4\" (UniqueName: \"kubernetes.io/projected/948fb15d-07f1-4b25-b8d5-7d582024ef28-kube-api-access-mp9f4\") pod \"948fb15d-07f1-4b25-b8d5-7d582024ef28\" (UID: \"948fb15d-07f1-4b25-b8d5-7d582024ef28\") " Jan 20 17:58:12 crc kubenswrapper[4558]: I0120 17:58:12.605833 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/948fb15d-07f1-4b25-b8d5-7d582024ef28-rabbitmq-tls\") pod \"948fb15d-07f1-4b25-b8d5-7d582024ef28\" (UID: \"948fb15d-07f1-4b25-b8d5-7d582024ef28\") " Jan 20 17:58:12 crc kubenswrapper[4558]: I0120 17:58:12.605884 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/948fb15d-07f1-4b25-b8d5-7d582024ef28-rabbitmq-erlang-cookie\") pod \"948fb15d-07f1-4b25-b8d5-7d582024ef28\" (UID: \"948fb15d-07f1-4b25-b8d5-7d582024ef28\") " Jan 20 17:58:12 crc kubenswrapper[4558]: I0120 17:58:12.605929 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/948fb15d-07f1-4b25-b8d5-7d582024ef28-rabbitmq-plugins\") pod \"948fb15d-07f1-4b25-b8d5-7d582024ef28\" (UID: \"948fb15d-07f1-4b25-b8d5-7d582024ef28\") " Jan 20 17:58:12 crc kubenswrapper[4558]: I0120 17:58:12.606040 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/948fb15d-07f1-4b25-b8d5-7d582024ef28-rabbitmq-confd\") pod \"948fb15d-07f1-4b25-b8d5-7d582024ef28\" (UID: \"948fb15d-07f1-4b25-b8d5-7d582024ef28\") " Jan 20 17:58:12 crc kubenswrapper[4558]: I0120 17:58:12.606061 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/948fb15d-07f1-4b25-b8d5-7d582024ef28-server-conf\") pod \"948fb15d-07f1-4b25-b8d5-7d582024ef28\" (UID: \"948fb15d-07f1-4b25-b8d5-7d582024ef28\") " Jan 20 17:58:12 crc kubenswrapper[4558]: I0120 17:58:12.606079 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"948fb15d-07f1-4b25-b8d5-7d582024ef28\" (UID: \"948fb15d-07f1-4b25-b8d5-7d582024ef28\") " Jan 20 17:58:12 crc kubenswrapper[4558]: I0120 17:58:12.606127 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/948fb15d-07f1-4b25-b8d5-7d582024ef28-pod-info\") pod \"948fb15d-07f1-4b25-b8d5-7d582024ef28\" (UID: \"948fb15d-07f1-4b25-b8d5-7d582024ef28\") " Jan 20 17:58:12 crc kubenswrapper[4558]: I0120 17:58:12.606189 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/948fb15d-07f1-4b25-b8d5-7d582024ef28-erlang-cookie-secret\") pod \"948fb15d-07f1-4b25-b8d5-7d582024ef28\" (UID: \"948fb15d-07f1-4b25-b8d5-7d582024ef28\") " Jan 20 17:58:12 crc kubenswrapper[4558]: I0120 17:58:12.606557 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/948fb15d-07f1-4b25-b8d5-7d582024ef28-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "948fb15d-07f1-4b25-b8d5-7d582024ef28" (UID: "948fb15d-07f1-4b25-b8d5-7d582024ef28"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:58:12 crc kubenswrapper[4558]: I0120 17:58:12.606796 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/948fb15d-07f1-4b25-b8d5-7d582024ef28-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "948fb15d-07f1-4b25-b8d5-7d582024ef28" (UID: "948fb15d-07f1-4b25-b8d5-7d582024ef28"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:58:12 crc kubenswrapper[4558]: I0120 17:58:12.606953 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/948fb15d-07f1-4b25-b8d5-7d582024ef28-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "948fb15d-07f1-4b25-b8d5-7d582024ef28" (UID: "948fb15d-07f1-4b25-b8d5-7d582024ef28"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:58:12 crc kubenswrapper[4558]: I0120 17:58:12.607865 4558 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/948fb15d-07f1-4b25-b8d5-7d582024ef28-plugins-conf\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:12 crc kubenswrapper[4558]: I0120 17:58:12.607895 4558 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/948fb15d-07f1-4b25-b8d5-7d582024ef28-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:12 crc kubenswrapper[4558]: I0120 17:58:12.607914 4558 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/948fb15d-07f1-4b25-b8d5-7d582024ef28-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:12 crc kubenswrapper[4558]: I0120 17:58:12.611065 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage02-crc" (OuterVolumeSpecName: "persistence") pod "948fb15d-07f1-4b25-b8d5-7d582024ef28" (UID: "948fb15d-07f1-4b25-b8d5-7d582024ef28"). InnerVolumeSpecName "local-storage02-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:58:12 crc kubenswrapper[4558]: I0120 17:58:12.611758 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/948fb15d-07f1-4b25-b8d5-7d582024ef28-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "948fb15d-07f1-4b25-b8d5-7d582024ef28" (UID: "948fb15d-07f1-4b25-b8d5-7d582024ef28"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:58:12 crc kubenswrapper[4558]: I0120 17:58:12.611886 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/948fb15d-07f1-4b25-b8d5-7d582024ef28-pod-info" (OuterVolumeSpecName: "pod-info") pod "948fb15d-07f1-4b25-b8d5-7d582024ef28" (UID: "948fb15d-07f1-4b25-b8d5-7d582024ef28"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Jan 20 17:58:12 crc kubenswrapper[4558]: I0120 17:58:12.612300 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/948fb15d-07f1-4b25-b8d5-7d582024ef28-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "948fb15d-07f1-4b25-b8d5-7d582024ef28" (UID: "948fb15d-07f1-4b25-b8d5-7d582024ef28"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:58:12 crc kubenswrapper[4558]: I0120 17:58:12.612387 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/948fb15d-07f1-4b25-b8d5-7d582024ef28-kube-api-access-mp9f4" (OuterVolumeSpecName: "kube-api-access-mp9f4") pod "948fb15d-07f1-4b25-b8d5-7d582024ef28" (UID: "948fb15d-07f1-4b25-b8d5-7d582024ef28"). InnerVolumeSpecName "kube-api-access-mp9f4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:58:12 crc kubenswrapper[4558]: I0120 17:58:12.625714 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/948fb15d-07f1-4b25-b8d5-7d582024ef28-config-data" (OuterVolumeSpecName: "config-data") pod "948fb15d-07f1-4b25-b8d5-7d582024ef28" (UID: "948fb15d-07f1-4b25-b8d5-7d582024ef28"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:58:12 crc kubenswrapper[4558]: E0120 17:58:12.640322 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="5772b7b876d3a0d68ec8e5a68aa617182f064469dac3fc22b2b6ef8493342da2" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:58:12 crc kubenswrapper[4558]: I0120 17:58:12.640925 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/948fb15d-07f1-4b25-b8d5-7d582024ef28-server-conf" (OuterVolumeSpecName: "server-conf") pod "948fb15d-07f1-4b25-b8d5-7d582024ef28" (UID: "948fb15d-07f1-4b25-b8d5-7d582024ef28"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:58:12 crc kubenswrapper[4558]: E0120 17:58:12.642247 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="5772b7b876d3a0d68ec8e5a68aa617182f064469dac3fc22b2b6ef8493342da2" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:58:12 crc kubenswrapper[4558]: E0120 17:58:12.643742 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="5772b7b876d3a0d68ec8e5a68aa617182f064469dac3fc22b2b6ef8493342da2" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Jan 20 17:58:12 crc kubenswrapper[4558]: E0120 17:58:12.643803 4558 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack-kuttl-tests/nova-cell0-conductor-0" podUID="8a1b8bc3-6d56-42f1-99a2-f80ff914e3d8" containerName="nova-cell0-conductor-conductor" Jan 20 17:58:12 crc kubenswrapper[4558]: I0120 17:58:12.664517 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/948fb15d-07f1-4b25-b8d5-7d582024ef28-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "948fb15d-07f1-4b25-b8d5-7d582024ef28" (UID: "948fb15d-07f1-4b25-b8d5-7d582024ef28"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:58:12 crc kubenswrapper[4558]: I0120 17:58:12.710151 4558 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/948fb15d-07f1-4b25-b8d5-7d582024ef28-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:12 crc kubenswrapper[4558]: I0120 17:58:12.710753 4558 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/948fb15d-07f1-4b25-b8d5-7d582024ef28-server-conf\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:12 crc kubenswrapper[4558]: I0120 17:58:12.710806 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" " Jan 20 17:58:12 crc kubenswrapper[4558]: I0120 17:58:12.710892 4558 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/948fb15d-07f1-4b25-b8d5-7d582024ef28-pod-info\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:12 crc kubenswrapper[4558]: I0120 17:58:12.710938 4558 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/948fb15d-07f1-4b25-b8d5-7d582024ef28-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:12 crc kubenswrapper[4558]: I0120 17:58:12.710954 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/948fb15d-07f1-4b25-b8d5-7d582024ef28-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:12 crc kubenswrapper[4558]: I0120 17:58:12.710967 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mp9f4\" (UniqueName: \"kubernetes.io/projected/948fb15d-07f1-4b25-b8d5-7d582024ef28-kube-api-access-mp9f4\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:12 crc kubenswrapper[4558]: I0120 17:58:12.710978 4558 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/948fb15d-07f1-4b25-b8d5-7d582024ef28-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:12 crc kubenswrapper[4558]: I0120 17:58:12.724211 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage02-crc" (UniqueName: "kubernetes.io/local-volume/local-storage02-crc") on node "crc" Jan 20 17:58:12 crc kubenswrapper[4558]: I0120 17:58:12.736712 4558 generic.go:334] "Generic (PLEG): container finished" podID="948fb15d-07f1-4b25-b8d5-7d582024ef28" containerID="04ae5aafbf49cda71c8764575c7291ff0e2e0ca0bcd2151b6b2cce95d4ba6b14" exitCode=0 Jan 20 17:58:12 crc kubenswrapper[4558]: I0120 17:58:12.736742 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" Jan 20 17:58:12 crc kubenswrapper[4558]: I0120 17:58:12.736748 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" event={"ID":"948fb15d-07f1-4b25-b8d5-7d582024ef28","Type":"ContainerDied","Data":"04ae5aafbf49cda71c8764575c7291ff0e2e0ca0bcd2151b6b2cce95d4ba6b14"} Jan 20 17:58:12 crc kubenswrapper[4558]: I0120 17:58:12.737034 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-cell1-server-0" event={"ID":"948fb15d-07f1-4b25-b8d5-7d582024ef28","Type":"ContainerDied","Data":"d344a258744d468aa298be757d9c716bec6943b9294fde18b017ca73f2a1c7d3"} Jan 20 17:58:12 crc kubenswrapper[4558]: I0120 17:58:12.737076 4558 scope.go:117] "RemoveContainer" containerID="04ae5aafbf49cda71c8764575c7291ff0e2e0ca0bcd2151b6b2cce95d4ba6b14" Jan 20 17:58:12 crc kubenswrapper[4558]: I0120 17:58:12.740379 4558 generic.go:334] "Generic (PLEG): container finished" podID="747b1096-c84a-43e1-930e-9f887a42c524" containerID="f8102ce497d4ab8bf9c0e437f9885597ba53dd47555a7751b8ba88fdad3cac9f" exitCode=0 Jan 20 17:58:12 crc kubenswrapper[4558]: I0120 17:58:12.740502 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/keystone-747b5668bc-jgcd6" Jan 20 17:58:12 crc kubenswrapper[4558]: I0120 17:58:12.740506 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-747b5668bc-jgcd6" event={"ID":"747b1096-c84a-43e1-930e-9f887a42c524","Type":"ContainerDied","Data":"f8102ce497d4ab8bf9c0e437f9885597ba53dd47555a7751b8ba88fdad3cac9f"} Jan 20 17:58:12 crc kubenswrapper[4558]: I0120 17:58:12.740736 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/keystone-747b5668bc-jgcd6" event={"ID":"747b1096-c84a-43e1-930e-9f887a42c524","Type":"ContainerDied","Data":"fffe7f2e3037040f2613fe69c4c37b61f4fc32c437a71f652f741463a950b67f"} Jan 20 17:58:12 crc kubenswrapper[4558]: I0120 17:58:12.764564 4558 scope.go:117] "RemoveContainer" containerID="d5c6ace4ceeb24008b1063ad1082708520feefaaabe670144960a772ad86c13a" Jan 20 17:58:12 crc kubenswrapper[4558]: I0120 17:58:12.774393 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/keystone-747b5668bc-jgcd6"] Jan 20 17:58:12 crc kubenswrapper[4558]: I0120 17:58:12.785431 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/keystone-747b5668bc-jgcd6"] Jan 20 17:58:12 crc kubenswrapper[4558]: I0120 17:58:12.794256 4558 scope.go:117] "RemoveContainer" containerID="04ae5aafbf49cda71c8764575c7291ff0e2e0ca0bcd2151b6b2cce95d4ba6b14" Jan 20 17:58:12 crc kubenswrapper[4558]: E0120 17:58:12.795835 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"04ae5aafbf49cda71c8764575c7291ff0e2e0ca0bcd2151b6b2cce95d4ba6b14\": container with ID starting with 04ae5aafbf49cda71c8764575c7291ff0e2e0ca0bcd2151b6b2cce95d4ba6b14 not found: ID does not exist" containerID="04ae5aafbf49cda71c8764575c7291ff0e2e0ca0bcd2151b6b2cce95d4ba6b14" Jan 20 17:58:12 crc kubenswrapper[4558]: I0120 17:58:12.795894 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"04ae5aafbf49cda71c8764575c7291ff0e2e0ca0bcd2151b6b2cce95d4ba6b14"} err="failed to get container status \"04ae5aafbf49cda71c8764575c7291ff0e2e0ca0bcd2151b6b2cce95d4ba6b14\": rpc error: code = NotFound desc = could not find container \"04ae5aafbf49cda71c8764575c7291ff0e2e0ca0bcd2151b6b2cce95d4ba6b14\": container with ID starting with 04ae5aafbf49cda71c8764575c7291ff0e2e0ca0bcd2151b6b2cce95d4ba6b14 not found: ID does not exist" Jan 20 17:58:12 crc kubenswrapper[4558]: I0120 17:58:12.796435 4558 scope.go:117] "RemoveContainer" containerID="d5c6ace4ceeb24008b1063ad1082708520feefaaabe670144960a772ad86c13a" Jan 20 17:58:12 crc kubenswrapper[4558]: E0120 17:58:12.797150 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d5c6ace4ceeb24008b1063ad1082708520feefaaabe670144960a772ad86c13a\": container with ID starting with d5c6ace4ceeb24008b1063ad1082708520feefaaabe670144960a772ad86c13a not found: ID does not exist" containerID="d5c6ace4ceeb24008b1063ad1082708520feefaaabe670144960a772ad86c13a" Jan 20 17:58:12 crc kubenswrapper[4558]: I0120 17:58:12.797208 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d5c6ace4ceeb24008b1063ad1082708520feefaaabe670144960a772ad86c13a"} err="failed to get container status \"d5c6ace4ceeb24008b1063ad1082708520feefaaabe670144960a772ad86c13a\": rpc error: code = NotFound desc = could not find container \"d5c6ace4ceeb24008b1063ad1082708520feefaaabe670144960a772ad86c13a\": container with ID starting with d5c6ace4ceeb24008b1063ad1082708520feefaaabe670144960a772ad86c13a not found: ID does not exist" Jan 20 17:58:12 crc kubenswrapper[4558]: I0120 17:58:12.797239 4558 scope.go:117] "RemoveContainer" containerID="f8102ce497d4ab8bf9c0e437f9885597ba53dd47555a7751b8ba88fdad3cac9f" Jan 20 17:58:12 crc kubenswrapper[4558]: I0120 17:58:12.798740 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/rabbitmq-cell1-server-0"] Jan 20 17:58:12 crc kubenswrapper[4558]: I0120 17:58:12.809693 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/rabbitmq-cell1-server-0"] Jan 20 17:58:12 crc kubenswrapper[4558]: I0120 17:58:12.813654 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:12 crc kubenswrapper[4558]: I0120 17:58:12.820553 4558 scope.go:117] "RemoveContainer" containerID="f8102ce497d4ab8bf9c0e437f9885597ba53dd47555a7751b8ba88fdad3cac9f" Jan 20 17:58:12 crc kubenswrapper[4558]: E0120 17:58:12.820947 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f8102ce497d4ab8bf9c0e437f9885597ba53dd47555a7751b8ba88fdad3cac9f\": container with ID starting with f8102ce497d4ab8bf9c0e437f9885597ba53dd47555a7751b8ba88fdad3cac9f not found: ID does not exist" containerID="f8102ce497d4ab8bf9c0e437f9885597ba53dd47555a7751b8ba88fdad3cac9f" Jan 20 17:58:12 crc kubenswrapper[4558]: I0120 17:58:12.821000 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f8102ce497d4ab8bf9c0e437f9885597ba53dd47555a7751b8ba88fdad3cac9f"} err="failed to get container status \"f8102ce497d4ab8bf9c0e437f9885597ba53dd47555a7751b8ba88fdad3cac9f\": rpc error: code = NotFound desc = could not find container \"f8102ce497d4ab8bf9c0e437f9885597ba53dd47555a7751b8ba88fdad3cac9f\": container with ID starting with f8102ce497d4ab8bf9c0e437f9885597ba53dd47555a7751b8ba88fdad3cac9f not found: ID does not exist" Jan 20 17:58:13 crc kubenswrapper[4558]: E0120 17:58:13.221570 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/combined-ca-bundle: secret "combined-ca-bundle" not found Jan 20 17:58:13 crc kubenswrapper[4558]: E0120 17:58:13.222206 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f5beef7a-89e0-4e53-960b-1cacc665d586-combined-ca-bundle podName:f5beef7a-89e0-4e53-960b-1cacc665d586 nodeName:}" failed. No retries permitted until 2026-01-20 17:58:15.222184596 +0000 UTC m=+4588.982522564 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/f5beef7a-89e0-4e53-960b-1cacc665d586-combined-ca-bundle") pod "nova-scheduler-0" (UID: "f5beef7a-89e0-4e53-960b-1cacc665d586") : secret "combined-ca-bundle" not found Jan 20 17:58:13 crc kubenswrapper[4558]: I0120 17:58:13.759412 4558 generic.go:334] "Generic (PLEG): container finished" podID="d512bec4-7ed0-43bb-b8fe-0f235f7698e5" containerID="9f92bbadf6b5978b2ec804e4ad254a0133f5ddc1c7a0c908a984ae2727969174" exitCode=0 Jan 20 17:58:13 crc kubenswrapper[4558]: I0120 17:58:13.759524 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-server-0" event={"ID":"d512bec4-7ed0-43bb-b8fe-0f235f7698e5","Type":"ContainerDied","Data":"9f92bbadf6b5978b2ec804e4ad254a0133f5ddc1c7a0c908a984ae2727969174"} Jan 20 17:58:13 crc kubenswrapper[4558]: I0120 17:58:13.759557 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/rabbitmq-server-0" event={"ID":"d512bec4-7ed0-43bb-b8fe-0f235f7698e5","Type":"ContainerDied","Data":"3f7922c9eee61eec77ed6df6e70ea27b3330dae72576bbf24d7bf62d888bdf0f"} Jan 20 17:58:13 crc kubenswrapper[4558]: I0120 17:58:13.759571 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3f7922c9eee61eec77ed6df6e70ea27b3330dae72576bbf24d7bf62d888bdf0f" Jan 20 17:58:13 crc kubenswrapper[4558]: I0120 17:58:13.760850 4558 generic.go:334] "Generic (PLEG): container finished" podID="54cbdea3-8fcc-4d2d-870b-cf3663cfc633" containerID="93ad707d620f625a0b7b0c96075af0a312fdd519b3c0dede73cdb9bef7e7e9a5" exitCode=0 Jan 20 17:58:13 crc kubenswrapper[4558]: I0120 17:58:13.760891 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-0" event={"ID":"54cbdea3-8fcc-4d2d-870b-cf3663cfc633","Type":"ContainerDied","Data":"93ad707d620f625a0b7b0c96075af0a312fdd519b3c0dede73cdb9bef7e7e9a5"} Jan 20 17:58:13 crc kubenswrapper[4558]: I0120 17:58:13.797040 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:58:13 crc kubenswrapper[4558]: I0120 17:58:13.886239 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:58:13 crc kubenswrapper[4558]: I0120 17:58:13.931828 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/d512bec4-7ed0-43bb-b8fe-0f235f7698e5-rabbitmq-plugins\") pod \"d512bec4-7ed0-43bb-b8fe-0f235f7698e5\" (UID: \"d512bec4-7ed0-43bb-b8fe-0f235f7698e5\") " Jan 20 17:58:13 crc kubenswrapper[4558]: I0120 17:58:13.932205 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/d512bec4-7ed0-43bb-b8fe-0f235f7698e5-rabbitmq-tls\") pod \"d512bec4-7ed0-43bb-b8fe-0f235f7698e5\" (UID: \"d512bec4-7ed0-43bb-b8fe-0f235f7698e5\") " Jan 20 17:58:13 crc kubenswrapper[4558]: I0120 17:58:13.932340 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/d512bec4-7ed0-43bb-b8fe-0f235f7698e5-server-conf\") pod \"d512bec4-7ed0-43bb-b8fe-0f235f7698e5\" (UID: \"d512bec4-7ed0-43bb-b8fe-0f235f7698e5\") " Jan 20 17:58:13 crc kubenswrapper[4558]: I0120 17:58:13.932381 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/d512bec4-7ed0-43bb-b8fe-0f235f7698e5-pod-info\") pod \"d512bec4-7ed0-43bb-b8fe-0f235f7698e5\" (UID: \"d512bec4-7ed0-43bb-b8fe-0f235f7698e5\") " Jan 20 17:58:13 crc kubenswrapper[4558]: I0120 17:58:13.932417 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/d512bec4-7ed0-43bb-b8fe-0f235f7698e5-plugins-conf\") pod \"d512bec4-7ed0-43bb-b8fe-0f235f7698e5\" (UID: \"d512bec4-7ed0-43bb-b8fe-0f235f7698e5\") " Jan 20 17:58:13 crc kubenswrapper[4558]: I0120 17:58:13.932400 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d512bec4-7ed0-43bb-b8fe-0f235f7698e5-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "d512bec4-7ed0-43bb-b8fe-0f235f7698e5" (UID: "d512bec4-7ed0-43bb-b8fe-0f235f7698e5"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:58:13 crc kubenswrapper[4558]: I0120 17:58:13.932455 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/d512bec4-7ed0-43bb-b8fe-0f235f7698e5-rabbitmq-confd\") pod \"d512bec4-7ed0-43bb-b8fe-0f235f7698e5\" (UID: \"d512bec4-7ed0-43bb-b8fe-0f235f7698e5\") " Jan 20 17:58:13 crc kubenswrapper[4558]: I0120 17:58:13.932487 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d512bec4-7ed0-43bb-b8fe-0f235f7698e5-config-data\") pod \"d512bec4-7ed0-43bb-b8fe-0f235f7698e5\" (UID: \"d512bec4-7ed0-43bb-b8fe-0f235f7698e5\") " Jan 20 17:58:13 crc kubenswrapper[4558]: I0120 17:58:13.932519 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage18-crc\") pod \"d512bec4-7ed0-43bb-b8fe-0f235f7698e5\" (UID: \"d512bec4-7ed0-43bb-b8fe-0f235f7698e5\") " Jan 20 17:58:13 crc kubenswrapper[4558]: I0120 17:58:13.932573 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/d512bec4-7ed0-43bb-b8fe-0f235f7698e5-rabbitmq-erlang-cookie\") pod \"d512bec4-7ed0-43bb-b8fe-0f235f7698e5\" (UID: \"d512bec4-7ed0-43bb-b8fe-0f235f7698e5\") " Jan 20 17:58:13 crc kubenswrapper[4558]: I0120 17:58:13.932617 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mv99z\" (UniqueName: \"kubernetes.io/projected/d512bec4-7ed0-43bb-b8fe-0f235f7698e5-kube-api-access-mv99z\") pod \"d512bec4-7ed0-43bb-b8fe-0f235f7698e5\" (UID: \"d512bec4-7ed0-43bb-b8fe-0f235f7698e5\") " Jan 20 17:58:13 crc kubenswrapper[4558]: I0120 17:58:13.932688 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/d512bec4-7ed0-43bb-b8fe-0f235f7698e5-erlang-cookie-secret\") pod \"d512bec4-7ed0-43bb-b8fe-0f235f7698e5\" (UID: \"d512bec4-7ed0-43bb-b8fe-0f235f7698e5\") " Jan 20 17:58:13 crc kubenswrapper[4558]: I0120 17:58:13.933111 4558 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/d512bec4-7ed0-43bb-b8fe-0f235f7698e5-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:13 crc kubenswrapper[4558]: I0120 17:58:13.933316 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d512bec4-7ed0-43bb-b8fe-0f235f7698e5-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "d512bec4-7ed0-43bb-b8fe-0f235f7698e5" (UID: "d512bec4-7ed0-43bb-b8fe-0f235f7698e5"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:58:13 crc kubenswrapper[4558]: I0120 17:58:13.933776 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d512bec4-7ed0-43bb-b8fe-0f235f7698e5-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "d512bec4-7ed0-43bb-b8fe-0f235f7698e5" (UID: "d512bec4-7ed0-43bb-b8fe-0f235f7698e5"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:58:13 crc kubenswrapper[4558]: I0120 17:58:13.938625 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d512bec4-7ed0-43bb-b8fe-0f235f7698e5-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "d512bec4-7ed0-43bb-b8fe-0f235f7698e5" (UID: "d512bec4-7ed0-43bb-b8fe-0f235f7698e5"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:58:13 crc kubenswrapper[4558]: I0120 17:58:13.938691 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d512bec4-7ed0-43bb-b8fe-0f235f7698e5-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "d512bec4-7ed0-43bb-b8fe-0f235f7698e5" (UID: "d512bec4-7ed0-43bb-b8fe-0f235f7698e5"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:58:13 crc kubenswrapper[4558]: I0120 17:58:13.938743 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage18-crc" (OuterVolumeSpecName: "persistence") pod "d512bec4-7ed0-43bb-b8fe-0f235f7698e5" (UID: "d512bec4-7ed0-43bb-b8fe-0f235f7698e5"). InnerVolumeSpecName "local-storage18-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:58:13 crc kubenswrapper[4558]: I0120 17:58:13.938793 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/d512bec4-7ed0-43bb-b8fe-0f235f7698e5-pod-info" (OuterVolumeSpecName: "pod-info") pod "d512bec4-7ed0-43bb-b8fe-0f235f7698e5" (UID: "d512bec4-7ed0-43bb-b8fe-0f235f7698e5"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Jan 20 17:58:13 crc kubenswrapper[4558]: I0120 17:58:13.939107 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d512bec4-7ed0-43bb-b8fe-0f235f7698e5-kube-api-access-mv99z" (OuterVolumeSpecName: "kube-api-access-mv99z") pod "d512bec4-7ed0-43bb-b8fe-0f235f7698e5" (UID: "d512bec4-7ed0-43bb-b8fe-0f235f7698e5"). InnerVolumeSpecName "kube-api-access-mv99z". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:58:13 crc kubenswrapper[4558]: I0120 17:58:13.954459 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d512bec4-7ed0-43bb-b8fe-0f235f7698e5-config-data" (OuterVolumeSpecName: "config-data") pod "d512bec4-7ed0-43bb-b8fe-0f235f7698e5" (UID: "d512bec4-7ed0-43bb-b8fe-0f235f7698e5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:58:13 crc kubenswrapper[4558]: I0120 17:58:13.968833 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d512bec4-7ed0-43bb-b8fe-0f235f7698e5-server-conf" (OuterVolumeSpecName: "server-conf") pod "d512bec4-7ed0-43bb-b8fe-0f235f7698e5" (UID: "d512bec4-7ed0-43bb-b8fe-0f235f7698e5"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:58:13 crc kubenswrapper[4558]: I0120 17:58:13.994991 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d512bec4-7ed0-43bb-b8fe-0f235f7698e5-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "d512bec4-7ed0-43bb-b8fe-0f235f7698e5" (UID: "d512bec4-7ed0-43bb-b8fe-0f235f7698e5"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:58:14 crc kubenswrapper[4558]: I0120 17:58:14.034761 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jxrm2\" (UniqueName: \"kubernetes.io/projected/54cbdea3-8fcc-4d2d-870b-cf3663cfc633-kube-api-access-jxrm2\") pod \"54cbdea3-8fcc-4d2d-870b-cf3663cfc633\" (UID: \"54cbdea3-8fcc-4d2d-870b-cf3663cfc633\") " Jan 20 17:58:14 crc kubenswrapper[4558]: I0120 17:58:14.034835 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/54cbdea3-8fcc-4d2d-870b-cf3663cfc633-config-data\") pod \"54cbdea3-8fcc-4d2d-870b-cf3663cfc633\" (UID: \"54cbdea3-8fcc-4d2d-870b-cf3663cfc633\") " Jan 20 17:58:14 crc kubenswrapper[4558]: I0120 17:58:14.034942 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54cbdea3-8fcc-4d2d-870b-cf3663cfc633-combined-ca-bundle\") pod \"54cbdea3-8fcc-4d2d-870b-cf3663cfc633\" (UID: \"54cbdea3-8fcc-4d2d-870b-cf3663cfc633\") " Jan 20 17:58:14 crc kubenswrapper[4558]: I0120 17:58:14.035508 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mv99z\" (UniqueName: \"kubernetes.io/projected/d512bec4-7ed0-43bb-b8fe-0f235f7698e5-kube-api-access-mv99z\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:14 crc kubenswrapper[4558]: I0120 17:58:14.035532 4558 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/d512bec4-7ed0-43bb-b8fe-0f235f7698e5-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:14 crc kubenswrapper[4558]: I0120 17:58:14.035546 4558 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/d512bec4-7ed0-43bb-b8fe-0f235f7698e5-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:14 crc kubenswrapper[4558]: I0120 17:58:14.035559 4558 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/d512bec4-7ed0-43bb-b8fe-0f235f7698e5-server-conf\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:14 crc kubenswrapper[4558]: I0120 17:58:14.035571 4558 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/d512bec4-7ed0-43bb-b8fe-0f235f7698e5-pod-info\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:14 crc kubenswrapper[4558]: I0120 17:58:14.035585 4558 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/d512bec4-7ed0-43bb-b8fe-0f235f7698e5-plugins-conf\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:14 crc kubenswrapper[4558]: I0120 17:58:14.035607 4558 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/d512bec4-7ed0-43bb-b8fe-0f235f7698e5-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:14 crc kubenswrapper[4558]: I0120 17:58:14.035619 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d512bec4-7ed0-43bb-b8fe-0f235f7698e5-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:14 crc kubenswrapper[4558]: I0120 17:58:14.035646 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage18-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage18-crc\") on node \"crc\" " Jan 20 17:58:14 crc kubenswrapper[4558]: I0120 17:58:14.035659 4558 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/d512bec4-7ed0-43bb-b8fe-0f235f7698e5-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:14 crc kubenswrapper[4558]: I0120 17:58:14.037355 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/54cbdea3-8fcc-4d2d-870b-cf3663cfc633-kube-api-access-jxrm2" (OuterVolumeSpecName: "kube-api-access-jxrm2") pod "54cbdea3-8fcc-4d2d-870b-cf3663cfc633" (UID: "54cbdea3-8fcc-4d2d-870b-cf3663cfc633"). InnerVolumeSpecName "kube-api-access-jxrm2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:58:14 crc kubenswrapper[4558]: I0120 17:58:14.048068 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage18-crc" (UniqueName: "kubernetes.io/local-volume/local-storage18-crc") on node "crc" Jan 20 17:58:14 crc kubenswrapper[4558]: I0120 17:58:14.052365 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/54cbdea3-8fcc-4d2d-870b-cf3663cfc633-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "54cbdea3-8fcc-4d2d-870b-cf3663cfc633" (UID: "54cbdea3-8fcc-4d2d-870b-cf3663cfc633"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:58:14 crc kubenswrapper[4558]: I0120 17:58:14.052741 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/54cbdea3-8fcc-4d2d-870b-cf3663cfc633-config-data" (OuterVolumeSpecName: "config-data") pod "54cbdea3-8fcc-4d2d-870b-cf3663cfc633" (UID: "54cbdea3-8fcc-4d2d-870b-cf3663cfc633"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:58:14 crc kubenswrapper[4558]: I0120 17:58:14.138241 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jxrm2\" (UniqueName: \"kubernetes.io/projected/54cbdea3-8fcc-4d2d-870b-cf3663cfc633-kube-api-access-jxrm2\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:14 crc kubenswrapper[4558]: I0120 17:58:14.138279 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/54cbdea3-8fcc-4d2d-870b-cf3663cfc633-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:14 crc kubenswrapper[4558]: I0120 17:58:14.138291 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage18-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage18-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:14 crc kubenswrapper[4558]: I0120 17:58:14.138303 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54cbdea3-8fcc-4d2d-870b-cf3663cfc633-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:14 crc kubenswrapper[4558]: I0120 17:58:14.586186 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="747b1096-c84a-43e1-930e-9f887a42c524" path="/var/lib/kubelet/pods/747b1096-c84a-43e1-930e-9f887a42c524/volumes" Jan 20 17:58:14 crc kubenswrapper[4558]: I0120 17:58:14.587074 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="948fb15d-07f1-4b25-b8d5-7d582024ef28" path="/var/lib/kubelet/pods/948fb15d-07f1-4b25-b8d5-7d582024ef28/volumes" Jan 20 17:58:14 crc kubenswrapper[4558]: I0120 17:58:14.773316 4558 generic.go:334] "Generic (PLEG): container finished" podID="680eb8d4-16c8-40ae-bfb6-054fcf23f281" containerID="c1d7abefdf96cc2643c3a9001b8d6be58a4e2428ed5a8dab2d908237b77ba7cc" exitCode=0 Jan 20 17:58:14 crc kubenswrapper[4558]: I0120 17:58:14.773367 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"680eb8d4-16c8-40ae-bfb6-054fcf23f281","Type":"ContainerDied","Data":"c1d7abefdf96cc2643c3a9001b8d6be58a4e2428ed5a8dab2d908237b77ba7cc"} Jan 20 17:58:14 crc kubenswrapper[4558]: I0120 17:58:14.774870 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/rabbitmq-server-0" Jan 20 17:58:14 crc kubenswrapper[4558]: I0120 17:58:14.775260 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell1-conductor-0" event={"ID":"54cbdea3-8fcc-4d2d-870b-cf3663cfc633","Type":"ContainerDied","Data":"27028af688f77cb0d0d48c29a6639a2e3128cc80e75bbbd64fd48d2760225807"} Jan 20 17:58:14 crc kubenswrapper[4558]: I0120 17:58:14.775314 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell1-conductor-0" Jan 20 17:58:14 crc kubenswrapper[4558]: I0120 17:58:14.775324 4558 scope.go:117] "RemoveContainer" containerID="93ad707d620f625a0b7b0c96075af0a312fdd519b3c0dede73cdb9bef7e7e9a5" Jan 20 17:58:14 crc kubenswrapper[4558]: I0120 17:58:14.908492 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:58:14 crc kubenswrapper[4558]: I0120 17:58:14.921522 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-0"] Jan 20 17:58:14 crc kubenswrapper[4558]: I0120 17:58:14.930732 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell1-conductor-0"] Jan 20 17:58:14 crc kubenswrapper[4558]: I0120 17:58:14.939234 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/rabbitmq-server-0"] Jan 20 17:58:14 crc kubenswrapper[4558]: I0120 17:58:14.944122 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/rabbitmq-server-0"] Jan 20 17:58:14 crc kubenswrapper[4558]: I0120 17:58:14.953496 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/680eb8d4-16c8-40ae-bfb6-054fcf23f281-ceilometer-tls-certs\") pod \"680eb8d4-16c8-40ae-bfb6-054fcf23f281\" (UID: \"680eb8d4-16c8-40ae-bfb6-054fcf23f281\") " Jan 20 17:58:14 crc kubenswrapper[4558]: I0120 17:58:14.953564 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/680eb8d4-16c8-40ae-bfb6-054fcf23f281-scripts\") pod \"680eb8d4-16c8-40ae-bfb6-054fcf23f281\" (UID: \"680eb8d4-16c8-40ae-bfb6-054fcf23f281\") " Jan 20 17:58:14 crc kubenswrapper[4558]: I0120 17:58:14.953589 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dczqx\" (UniqueName: \"kubernetes.io/projected/680eb8d4-16c8-40ae-bfb6-054fcf23f281-kube-api-access-dczqx\") pod \"680eb8d4-16c8-40ae-bfb6-054fcf23f281\" (UID: \"680eb8d4-16c8-40ae-bfb6-054fcf23f281\") " Jan 20 17:58:14 crc kubenswrapper[4558]: I0120 17:58:14.953627 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/680eb8d4-16c8-40ae-bfb6-054fcf23f281-sg-core-conf-yaml\") pod \"680eb8d4-16c8-40ae-bfb6-054fcf23f281\" (UID: \"680eb8d4-16c8-40ae-bfb6-054fcf23f281\") " Jan 20 17:58:14 crc kubenswrapper[4558]: I0120 17:58:14.953777 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/680eb8d4-16c8-40ae-bfb6-054fcf23f281-combined-ca-bundle\") pod \"680eb8d4-16c8-40ae-bfb6-054fcf23f281\" (UID: \"680eb8d4-16c8-40ae-bfb6-054fcf23f281\") " Jan 20 17:58:14 crc kubenswrapper[4558]: I0120 17:58:14.953803 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/680eb8d4-16c8-40ae-bfb6-054fcf23f281-config-data\") pod \"680eb8d4-16c8-40ae-bfb6-054fcf23f281\" (UID: \"680eb8d4-16c8-40ae-bfb6-054fcf23f281\") " Jan 20 17:58:14 crc kubenswrapper[4558]: I0120 17:58:14.953842 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/680eb8d4-16c8-40ae-bfb6-054fcf23f281-log-httpd\") pod \"680eb8d4-16c8-40ae-bfb6-054fcf23f281\" (UID: \"680eb8d4-16c8-40ae-bfb6-054fcf23f281\") " Jan 20 17:58:14 crc kubenswrapper[4558]: I0120 17:58:14.953887 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/680eb8d4-16c8-40ae-bfb6-054fcf23f281-run-httpd\") pod \"680eb8d4-16c8-40ae-bfb6-054fcf23f281\" (UID: \"680eb8d4-16c8-40ae-bfb6-054fcf23f281\") " Jan 20 17:58:14 crc kubenswrapper[4558]: I0120 17:58:14.954714 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/680eb8d4-16c8-40ae-bfb6-054fcf23f281-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "680eb8d4-16c8-40ae-bfb6-054fcf23f281" (UID: "680eb8d4-16c8-40ae-bfb6-054fcf23f281"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:58:14 crc kubenswrapper[4558]: I0120 17:58:14.957762 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/680eb8d4-16c8-40ae-bfb6-054fcf23f281-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "680eb8d4-16c8-40ae-bfb6-054fcf23f281" (UID: "680eb8d4-16c8-40ae-bfb6-054fcf23f281"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:58:14 crc kubenswrapper[4558]: I0120 17:58:14.965500 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/680eb8d4-16c8-40ae-bfb6-054fcf23f281-scripts" (OuterVolumeSpecName: "scripts") pod "680eb8d4-16c8-40ae-bfb6-054fcf23f281" (UID: "680eb8d4-16c8-40ae-bfb6-054fcf23f281"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:58:14 crc kubenswrapper[4558]: I0120 17:58:14.966077 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/680eb8d4-16c8-40ae-bfb6-054fcf23f281-kube-api-access-dczqx" (OuterVolumeSpecName: "kube-api-access-dczqx") pod "680eb8d4-16c8-40ae-bfb6-054fcf23f281" (UID: "680eb8d4-16c8-40ae-bfb6-054fcf23f281"). InnerVolumeSpecName "kube-api-access-dczqx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:58:14 crc kubenswrapper[4558]: I0120 17:58:14.975762 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/680eb8d4-16c8-40ae-bfb6-054fcf23f281-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "680eb8d4-16c8-40ae-bfb6-054fcf23f281" (UID: "680eb8d4-16c8-40ae-bfb6-054fcf23f281"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:58:14 crc kubenswrapper[4558]: I0120 17:58:14.993974 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/680eb8d4-16c8-40ae-bfb6-054fcf23f281-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "680eb8d4-16c8-40ae-bfb6-054fcf23f281" (UID: "680eb8d4-16c8-40ae-bfb6-054fcf23f281"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:58:15 crc kubenswrapper[4558]: I0120 17:58:15.013263 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/680eb8d4-16c8-40ae-bfb6-054fcf23f281-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "680eb8d4-16c8-40ae-bfb6-054fcf23f281" (UID: "680eb8d4-16c8-40ae-bfb6-054fcf23f281"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:58:15 crc kubenswrapper[4558]: I0120 17:58:15.028571 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/680eb8d4-16c8-40ae-bfb6-054fcf23f281-config-data" (OuterVolumeSpecName: "config-data") pod "680eb8d4-16c8-40ae-bfb6-054fcf23f281" (UID: "680eb8d4-16c8-40ae-bfb6-054fcf23f281"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:58:15 crc kubenswrapper[4558]: I0120 17:58:15.057598 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/680eb8d4-16c8-40ae-bfb6-054fcf23f281-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:15 crc kubenswrapper[4558]: I0120 17:58:15.057628 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/680eb8d4-16c8-40ae-bfb6-054fcf23f281-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:15 crc kubenswrapper[4558]: I0120 17:58:15.057639 4558 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/680eb8d4-16c8-40ae-bfb6-054fcf23f281-log-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:15 crc kubenswrapper[4558]: I0120 17:58:15.057650 4558 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/680eb8d4-16c8-40ae-bfb6-054fcf23f281-run-httpd\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:15 crc kubenswrapper[4558]: I0120 17:58:15.057662 4558 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/680eb8d4-16c8-40ae-bfb6-054fcf23f281-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:15 crc kubenswrapper[4558]: I0120 17:58:15.057673 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/680eb8d4-16c8-40ae-bfb6-054fcf23f281-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:15 crc kubenswrapper[4558]: I0120 17:58:15.057686 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dczqx\" (UniqueName: \"kubernetes.io/projected/680eb8d4-16c8-40ae-bfb6-054fcf23f281-kube-api-access-dczqx\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:15 crc kubenswrapper[4558]: I0120 17:58:15.057697 4558 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/680eb8d4-16c8-40ae-bfb6-054fcf23f281-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:15 crc kubenswrapper[4558]: I0120 17:58:15.101807 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:58:15 crc kubenswrapper[4558]: I0120 17:58:15.260246 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8a1b8bc3-6d56-42f1-99a2-f80ff914e3d8-config-data\") pod \"8a1b8bc3-6d56-42f1-99a2-f80ff914e3d8\" (UID: \"8a1b8bc3-6d56-42f1-99a2-f80ff914e3d8\") " Jan 20 17:58:15 crc kubenswrapper[4558]: I0120 17:58:15.260310 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8a1b8bc3-6d56-42f1-99a2-f80ff914e3d8-combined-ca-bundle\") pod \"8a1b8bc3-6d56-42f1-99a2-f80ff914e3d8\" (UID: \"8a1b8bc3-6d56-42f1-99a2-f80ff914e3d8\") " Jan 20 17:58:15 crc kubenswrapper[4558]: I0120 17:58:15.260437 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xgl6v\" (UniqueName: \"kubernetes.io/projected/8a1b8bc3-6d56-42f1-99a2-f80ff914e3d8-kube-api-access-xgl6v\") pod \"8a1b8bc3-6d56-42f1-99a2-f80ff914e3d8\" (UID: \"8a1b8bc3-6d56-42f1-99a2-f80ff914e3d8\") " Jan 20 17:58:15 crc kubenswrapper[4558]: E0120 17:58:15.261313 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/combined-ca-bundle: secret "combined-ca-bundle" not found Jan 20 17:58:15 crc kubenswrapper[4558]: E0120 17:58:15.261399 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f5beef7a-89e0-4e53-960b-1cacc665d586-combined-ca-bundle podName:f5beef7a-89e0-4e53-960b-1cacc665d586 nodeName:}" failed. No retries permitted until 2026-01-20 17:58:19.26137479 +0000 UTC m=+4593.021712747 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/f5beef7a-89e0-4e53-960b-1cacc665d586-combined-ca-bundle") pod "nova-scheduler-0" (UID: "f5beef7a-89e0-4e53-960b-1cacc665d586") : secret "combined-ca-bundle" not found Jan 20 17:58:15 crc kubenswrapper[4558]: I0120 17:58:15.264311 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/barbican-api-6bb757fd44-jvx79" podUID="9a1c3ab4-ca8f-4183-9dbf-05f5c5efe4b7" containerName="barbican-api-log" probeResult="failure" output="Get \"https://10.217.0.130:9311/healthcheck\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Jan 20 17:58:15 crc kubenswrapper[4558]: I0120 17:58:15.264354 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/barbican-api-6bb757fd44-jvx79" podUID="9a1c3ab4-ca8f-4183-9dbf-05f5c5efe4b7" containerName="barbican-api" probeResult="failure" output="Get \"https://10.217.0.130:9311/healthcheck\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 20 17:58:15 crc kubenswrapper[4558]: I0120 17:58:15.264812 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8a1b8bc3-6d56-42f1-99a2-f80ff914e3d8-kube-api-access-xgl6v" (OuterVolumeSpecName: "kube-api-access-xgl6v") pod "8a1b8bc3-6d56-42f1-99a2-f80ff914e3d8" (UID: "8a1b8bc3-6d56-42f1-99a2-f80ff914e3d8"). InnerVolumeSpecName "kube-api-access-xgl6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:58:15 crc kubenswrapper[4558]: I0120 17:58:15.278829 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8a1b8bc3-6d56-42f1-99a2-f80ff914e3d8-config-data" (OuterVolumeSpecName: "config-data") pod "8a1b8bc3-6d56-42f1-99a2-f80ff914e3d8" (UID: "8a1b8bc3-6d56-42f1-99a2-f80ff914e3d8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:58:15 crc kubenswrapper[4558]: I0120 17:58:15.279556 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8a1b8bc3-6d56-42f1-99a2-f80ff914e3d8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8a1b8bc3-6d56-42f1-99a2-f80ff914e3d8" (UID: "8a1b8bc3-6d56-42f1-99a2-f80ff914e3d8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:58:15 crc kubenswrapper[4558]: I0120 17:58:15.363149 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8a1b8bc3-6d56-42f1-99a2-f80ff914e3d8-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:15 crc kubenswrapper[4558]: I0120 17:58:15.363204 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8a1b8bc3-6d56-42f1-99a2-f80ff914e3d8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:15 crc kubenswrapper[4558]: I0120 17:58:15.363228 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xgl6v\" (UniqueName: \"kubernetes.io/projected/8a1b8bc3-6d56-42f1-99a2-f80ff914e3d8-kube-api-access-xgl6v\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:15 crc kubenswrapper[4558]: I0120 17:58:15.785190 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ceilometer-0" event={"ID":"680eb8d4-16c8-40ae-bfb6-054fcf23f281","Type":"ContainerDied","Data":"f6dcb5fc1d0deff15892ff3c64b6895e7b259d9fb06ca065118c08d037815769"} Jan 20 17:58:15 crc kubenswrapper[4558]: I0120 17:58:15.785217 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ceilometer-0" Jan 20 17:58:15 crc kubenswrapper[4558]: I0120 17:58:15.785246 4558 scope.go:117] "RemoveContainer" containerID="528a68a740a5bc1c67a3fbd8155ae60dbf9ec6b340d2ce96b37c200e6e6de11b" Jan 20 17:58:15 crc kubenswrapper[4558]: I0120 17:58:15.789059 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-cell0-conductor-0" Jan 20 17:58:15 crc kubenswrapper[4558]: I0120 17:58:15.789175 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-0" event={"ID":"8a1b8bc3-6d56-42f1-99a2-f80ff914e3d8","Type":"ContainerDied","Data":"5772b7b876d3a0d68ec8e5a68aa617182f064469dac3fc22b2b6ef8493342da2"} Jan 20 17:58:15 crc kubenswrapper[4558]: I0120 17:58:15.791037 4558 generic.go:334] "Generic (PLEG): container finished" podID="8a1b8bc3-6d56-42f1-99a2-f80ff914e3d8" containerID="5772b7b876d3a0d68ec8e5a68aa617182f064469dac3fc22b2b6ef8493342da2" exitCode=0 Jan 20 17:58:15 crc kubenswrapper[4558]: I0120 17:58:15.791127 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-cell0-conductor-0" event={"ID":"8a1b8bc3-6d56-42f1-99a2-f80ff914e3d8","Type":"ContainerDied","Data":"42794666edb1bd11fa81bab1b80470a2797bd6f129b84f293af5f49df6003066"} Jan 20 17:58:15 crc kubenswrapper[4558]: I0120 17:58:15.813067 4558 scope.go:117] "RemoveContainer" containerID="2b5d5a7da9b64e1d83404cfe438d86d2e219e79fe96bf81cc97e5f2e3a7048ac" Jan 20 17:58:15 crc kubenswrapper[4558]: I0120 17:58:15.813406 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:58:15 crc kubenswrapper[4558]: I0120 17:58:15.818437 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ceilometer-0"] Jan 20 17:58:15 crc kubenswrapper[4558]: I0120 17:58:15.823616 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-0"] Jan 20 17:58:15 crc kubenswrapper[4558]: I0120 17:58:15.826219 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-cell0-conductor-0"] Jan 20 17:58:15 crc kubenswrapper[4558]: I0120 17:58:15.832056 4558 scope.go:117] "RemoveContainer" containerID="c1d7abefdf96cc2643c3a9001b8d6be58a4e2428ed5a8dab2d908237b77ba7cc" Jan 20 17:58:15 crc kubenswrapper[4558]: I0120 17:58:15.848614 4558 scope.go:117] "RemoveContainer" containerID="a1c6c829c5d10f6cb194bd9017159b0daa07b6905d16ede8abe89e2a8949198f" Jan 20 17:58:15 crc kubenswrapper[4558]: I0120 17:58:15.866769 4558 scope.go:117] "RemoveContainer" containerID="5772b7b876d3a0d68ec8e5a68aa617182f064469dac3fc22b2b6ef8493342da2" Jan 20 17:58:15 crc kubenswrapper[4558]: I0120 17:58:15.888183 4558 scope.go:117] "RemoveContainer" containerID="5772b7b876d3a0d68ec8e5a68aa617182f064469dac3fc22b2b6ef8493342da2" Jan 20 17:58:15 crc kubenswrapper[4558]: E0120 17:58:15.888646 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5772b7b876d3a0d68ec8e5a68aa617182f064469dac3fc22b2b6ef8493342da2\": container with ID starting with 5772b7b876d3a0d68ec8e5a68aa617182f064469dac3fc22b2b6ef8493342da2 not found: ID does not exist" containerID="5772b7b876d3a0d68ec8e5a68aa617182f064469dac3fc22b2b6ef8493342da2" Jan 20 17:58:15 crc kubenswrapper[4558]: I0120 17:58:15.888711 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5772b7b876d3a0d68ec8e5a68aa617182f064469dac3fc22b2b6ef8493342da2"} err="failed to get container status \"5772b7b876d3a0d68ec8e5a68aa617182f064469dac3fc22b2b6ef8493342da2\": rpc error: code = NotFound desc = could not find container \"5772b7b876d3a0d68ec8e5a68aa617182f064469dac3fc22b2b6ef8493342da2\": container with ID starting with 5772b7b876d3a0d68ec8e5a68aa617182f064469dac3fc22b2b6ef8493342da2 not found: ID does not exist" Jan 20 17:58:16 crc kubenswrapper[4558]: I0120 17:58:16.232720 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/memcached-0"] Jan 20 17:58:16 crc kubenswrapper[4558]: I0120 17:58:16.232946 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/memcached-0" podUID="ae90d4e7-e4c5-47e4-a320-f9f6069ed69d" containerName="memcached" containerID="cri-o://2f47c5b6e676c58c7ac4bb4634e7603f0a0fe15c650b10488e055bd1458da4c9" gracePeriod=30 Jan 20 17:58:16 crc kubenswrapper[4558]: I0120 17:58:16.241594 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:58:16 crc kubenswrapper[4558]: I0120 17:58:16.241803 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/nova-scheduler-0" podUID="f5beef7a-89e0-4e53-960b-1cacc665d586" containerName="nova-scheduler-scheduler" containerID="cri-o://06a55fe618e77fdc355e098374066b5b94b32c6d040b20a118075fff457d4cea" gracePeriod=30 Jan 20 17:58:16 crc kubenswrapper[4558]: I0120 17:58:16.259342 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-l7p2w" Jan 20 17:58:16 crc kubenswrapper[4558]: I0120 17:58:16.305174 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-5dc494d4bf-wrrjj"] Jan 20 17:58:16 crc kubenswrapper[4558]: I0120 17:58:16.305407 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-5dc494d4bf-wrrjj" podUID="20fbac90-6e69-4c04-8e92-0b191157f185" containerName="dnsmasq-dns" containerID="cri-o://7c8ffe31f73b84ce399c31f05147d9b9d3a1fbf8a93f4e5cb3cdd6110894c5ad" gracePeriod=10 Jan 20 17:58:16 crc kubenswrapper[4558]: I0120 17:58:16.576689 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="54cbdea3-8fcc-4d2d-870b-cf3663cfc633" path="/var/lib/kubelet/pods/54cbdea3-8fcc-4d2d-870b-cf3663cfc633/volumes" Jan 20 17:58:16 crc kubenswrapper[4558]: I0120 17:58:16.577682 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="680eb8d4-16c8-40ae-bfb6-054fcf23f281" path="/var/lib/kubelet/pods/680eb8d4-16c8-40ae-bfb6-054fcf23f281/volumes" Jan 20 17:58:16 crc kubenswrapper[4558]: I0120 17:58:16.578462 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8a1b8bc3-6d56-42f1-99a2-f80ff914e3d8" path="/var/lib/kubelet/pods/8a1b8bc3-6d56-42f1-99a2-f80ff914e3d8/volumes" Jan 20 17:58:16 crc kubenswrapper[4558]: I0120 17:58:16.580276 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d512bec4-7ed0-43bb-b8fe-0f235f7698e5" path="/var/lib/kubelet/pods/d512bec4-7ed0-43bb-b8fe-0f235f7698e5/volumes" Jan 20 17:58:16 crc kubenswrapper[4558]: I0120 17:58:16.705590 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-5dc494d4bf-wrrjj" Jan 20 17:58:16 crc kubenswrapper[4558]: I0120 17:58:16.814856 4558 generic.go:334] "Generic (PLEG): container finished" podID="20fbac90-6e69-4c04-8e92-0b191157f185" containerID="7c8ffe31f73b84ce399c31f05147d9b9d3a1fbf8a93f4e5cb3cdd6110894c5ad" exitCode=0 Jan 20 17:58:16 crc kubenswrapper[4558]: I0120 17:58:16.815596 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-5dc494d4bf-wrrjj" event={"ID":"20fbac90-6e69-4c04-8e92-0b191157f185","Type":"ContainerDied","Data":"7c8ffe31f73b84ce399c31f05147d9b9d3a1fbf8a93f4e5cb3cdd6110894c5ad"} Jan 20 17:58:16 crc kubenswrapper[4558]: I0120 17:58:16.815686 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-5dc494d4bf-wrrjj" event={"ID":"20fbac90-6e69-4c04-8e92-0b191157f185","Type":"ContainerDied","Data":"255d5113d91a699b2a0f958889f9b904277d1c4b3ec03be2a39726629473c38b"} Jan 20 17:58:16 crc kubenswrapper[4558]: I0120 17:58:16.815766 4558 scope.go:117] "RemoveContainer" containerID="7c8ffe31f73b84ce399c31f05147d9b9d3a1fbf8a93f4e5cb3cdd6110894c5ad" Jan 20 17:58:16 crc kubenswrapper[4558]: I0120 17:58:16.815963 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-5dc494d4bf-wrrjj" Jan 20 17:58:16 crc kubenswrapper[4558]: I0120 17:58:16.838750 4558 scope.go:117] "RemoveContainer" containerID="f08f1e5fb6f16eae2ddfa53be261fac5dbb2be9bcf923f9f4f3313ef2f5b6098" Jan 20 17:58:16 crc kubenswrapper[4558]: I0120 17:58:16.860393 4558 scope.go:117] "RemoveContainer" containerID="7c8ffe31f73b84ce399c31f05147d9b9d3a1fbf8a93f4e5cb3cdd6110894c5ad" Jan 20 17:58:16 crc kubenswrapper[4558]: E0120 17:58:16.860852 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7c8ffe31f73b84ce399c31f05147d9b9d3a1fbf8a93f4e5cb3cdd6110894c5ad\": container with ID starting with 7c8ffe31f73b84ce399c31f05147d9b9d3a1fbf8a93f4e5cb3cdd6110894c5ad not found: ID does not exist" containerID="7c8ffe31f73b84ce399c31f05147d9b9d3a1fbf8a93f4e5cb3cdd6110894c5ad" Jan 20 17:58:16 crc kubenswrapper[4558]: I0120 17:58:16.860901 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7c8ffe31f73b84ce399c31f05147d9b9d3a1fbf8a93f4e5cb3cdd6110894c5ad"} err="failed to get container status \"7c8ffe31f73b84ce399c31f05147d9b9d3a1fbf8a93f4e5cb3cdd6110894c5ad\": rpc error: code = NotFound desc = could not find container \"7c8ffe31f73b84ce399c31f05147d9b9d3a1fbf8a93f4e5cb3cdd6110894c5ad\": container with ID starting with 7c8ffe31f73b84ce399c31f05147d9b9d3a1fbf8a93f4e5cb3cdd6110894c5ad not found: ID does not exist" Jan 20 17:58:16 crc kubenswrapper[4558]: I0120 17:58:16.860933 4558 scope.go:117] "RemoveContainer" containerID="f08f1e5fb6f16eae2ddfa53be261fac5dbb2be9bcf923f9f4f3313ef2f5b6098" Jan 20 17:58:16 crc kubenswrapper[4558]: E0120 17:58:16.861391 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f08f1e5fb6f16eae2ddfa53be261fac5dbb2be9bcf923f9f4f3313ef2f5b6098\": container with ID starting with f08f1e5fb6f16eae2ddfa53be261fac5dbb2be9bcf923f9f4f3313ef2f5b6098 not found: ID does not exist" containerID="f08f1e5fb6f16eae2ddfa53be261fac5dbb2be9bcf923f9f4f3313ef2f5b6098" Jan 20 17:58:16 crc kubenswrapper[4558]: I0120 17:58:16.861428 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f08f1e5fb6f16eae2ddfa53be261fac5dbb2be9bcf923f9f4f3313ef2f5b6098"} err="failed to get container status \"f08f1e5fb6f16eae2ddfa53be261fac5dbb2be9bcf923f9f4f3313ef2f5b6098\": rpc error: code = NotFound desc = could not find container \"f08f1e5fb6f16eae2ddfa53be261fac5dbb2be9bcf923f9f4f3313ef2f5b6098\": container with ID starting with f08f1e5fb6f16eae2ddfa53be261fac5dbb2be9bcf923f9f4f3313ef2f5b6098 not found: ID does not exist" Jan 20 17:58:16 crc kubenswrapper[4558]: I0120 17:58:16.890190 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/20fbac90-6e69-4c04-8e92-0b191157f185-config\") pod \"20fbac90-6e69-4c04-8e92-0b191157f185\" (UID: \"20fbac90-6e69-4c04-8e92-0b191157f185\") " Jan 20 17:58:16 crc kubenswrapper[4558]: I0120 17:58:16.890265 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kcrwm\" (UniqueName: \"kubernetes.io/projected/20fbac90-6e69-4c04-8e92-0b191157f185-kube-api-access-kcrwm\") pod \"20fbac90-6e69-4c04-8e92-0b191157f185\" (UID: \"20fbac90-6e69-4c04-8e92-0b191157f185\") " Jan 20 17:58:16 crc kubenswrapper[4558]: I0120 17:58:16.890369 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/20fbac90-6e69-4c04-8e92-0b191157f185-dns-swift-storage-0\") pod \"20fbac90-6e69-4c04-8e92-0b191157f185\" (UID: \"20fbac90-6e69-4c04-8e92-0b191157f185\") " Jan 20 17:58:16 crc kubenswrapper[4558]: I0120 17:58:16.890420 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/20fbac90-6e69-4c04-8e92-0b191157f185-dnsmasq-svc\") pod \"20fbac90-6e69-4c04-8e92-0b191157f185\" (UID: \"20fbac90-6e69-4c04-8e92-0b191157f185\") " Jan 20 17:58:16 crc kubenswrapper[4558]: I0120 17:58:16.899851 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20fbac90-6e69-4c04-8e92-0b191157f185-kube-api-access-kcrwm" (OuterVolumeSpecName: "kube-api-access-kcrwm") pod "20fbac90-6e69-4c04-8e92-0b191157f185" (UID: "20fbac90-6e69-4c04-8e92-0b191157f185"). InnerVolumeSpecName "kube-api-access-kcrwm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:58:16 crc kubenswrapper[4558]: I0120 17:58:16.919603 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/20fbac90-6e69-4c04-8e92-0b191157f185-config" (OuterVolumeSpecName: "config") pod "20fbac90-6e69-4c04-8e92-0b191157f185" (UID: "20fbac90-6e69-4c04-8e92-0b191157f185"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:58:16 crc kubenswrapper[4558]: I0120 17:58:16.920263 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/20fbac90-6e69-4c04-8e92-0b191157f185-dnsmasq-svc" (OuterVolumeSpecName: "dnsmasq-svc") pod "20fbac90-6e69-4c04-8e92-0b191157f185" (UID: "20fbac90-6e69-4c04-8e92-0b191157f185"). InnerVolumeSpecName "dnsmasq-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:58:16 crc kubenswrapper[4558]: I0120 17:58:16.921399 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/20fbac90-6e69-4c04-8e92-0b191157f185-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "20fbac90-6e69-4c04-8e92-0b191157f185" (UID: "20fbac90-6e69-4c04-8e92-0b191157f185"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:58:16 crc kubenswrapper[4558]: I0120 17:58:16.992996 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/20fbac90-6e69-4c04-8e92-0b191157f185-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:16 crc kubenswrapper[4558]: I0120 17:58:16.993027 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kcrwm\" (UniqueName: \"kubernetes.io/projected/20fbac90-6e69-4c04-8e92-0b191157f185-kube-api-access-kcrwm\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:16 crc kubenswrapper[4558]: I0120 17:58:16.993042 4558 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/20fbac90-6e69-4c04-8e92-0b191157f185-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:16 crc kubenswrapper[4558]: I0120 17:58:16.993052 4558 reconciler_common.go:293] "Volume detached for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/20fbac90-6e69-4c04-8e92-0b191157f185-dnsmasq-svc\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:17 crc kubenswrapper[4558]: I0120 17:58:17.147884 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-5dc494d4bf-wrrjj"] Jan 20 17:58:17 crc kubenswrapper[4558]: I0120 17:58:17.152392 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-5dc494d4bf-wrrjj"] Jan 20 17:58:17 crc kubenswrapper[4558]: I0120 17:58:17.175714 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/memcached-0" Jan 20 17:58:17 crc kubenswrapper[4558]: I0120 17:58:17.296738 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae90d4e7-e4c5-47e4-a320-f9f6069ed69d-combined-ca-bundle\") pod \"ae90d4e7-e4c5-47e4-a320-f9f6069ed69d\" (UID: \"ae90d4e7-e4c5-47e4-a320-f9f6069ed69d\") " Jan 20 17:58:17 crc kubenswrapper[4558]: I0120 17:58:17.296869 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/ae90d4e7-e4c5-47e4-a320-f9f6069ed69d-memcached-tls-certs\") pod \"ae90d4e7-e4c5-47e4-a320-f9f6069ed69d\" (UID: \"ae90d4e7-e4c5-47e4-a320-f9f6069ed69d\") " Jan 20 17:58:17 crc kubenswrapper[4558]: I0120 17:58:17.296936 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9k7kh\" (UniqueName: \"kubernetes.io/projected/ae90d4e7-e4c5-47e4-a320-f9f6069ed69d-kube-api-access-9k7kh\") pod \"ae90d4e7-e4c5-47e4-a320-f9f6069ed69d\" (UID: \"ae90d4e7-e4c5-47e4-a320-f9f6069ed69d\") " Jan 20 17:58:17 crc kubenswrapper[4558]: I0120 17:58:17.296972 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ae90d4e7-e4c5-47e4-a320-f9f6069ed69d-config-data\") pod \"ae90d4e7-e4c5-47e4-a320-f9f6069ed69d\" (UID: \"ae90d4e7-e4c5-47e4-a320-f9f6069ed69d\") " Jan 20 17:58:17 crc kubenswrapper[4558]: I0120 17:58:17.297021 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/ae90d4e7-e4c5-47e4-a320-f9f6069ed69d-kolla-config\") pod \"ae90d4e7-e4c5-47e4-a320-f9f6069ed69d\" (UID: \"ae90d4e7-e4c5-47e4-a320-f9f6069ed69d\") " Jan 20 17:58:17 crc kubenswrapper[4558]: I0120 17:58:17.298242 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ae90d4e7-e4c5-47e4-a320-f9f6069ed69d-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "ae90d4e7-e4c5-47e4-a320-f9f6069ed69d" (UID: "ae90d4e7-e4c5-47e4-a320-f9f6069ed69d"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:58:17 crc kubenswrapper[4558]: I0120 17:58:17.298321 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ae90d4e7-e4c5-47e4-a320-f9f6069ed69d-config-data" (OuterVolumeSpecName: "config-data") pod "ae90d4e7-e4c5-47e4-a320-f9f6069ed69d" (UID: "ae90d4e7-e4c5-47e4-a320-f9f6069ed69d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:58:17 crc kubenswrapper[4558]: I0120 17:58:17.301651 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ae90d4e7-e4c5-47e4-a320-f9f6069ed69d-kube-api-access-9k7kh" (OuterVolumeSpecName: "kube-api-access-9k7kh") pod "ae90d4e7-e4c5-47e4-a320-f9f6069ed69d" (UID: "ae90d4e7-e4c5-47e4-a320-f9f6069ed69d"). InnerVolumeSpecName "kube-api-access-9k7kh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:58:17 crc kubenswrapper[4558]: I0120 17:58:17.319013 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ae90d4e7-e4c5-47e4-a320-f9f6069ed69d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ae90d4e7-e4c5-47e4-a320-f9f6069ed69d" (UID: "ae90d4e7-e4c5-47e4-a320-f9f6069ed69d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:58:17 crc kubenswrapper[4558]: I0120 17:58:17.327594 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ae90d4e7-e4c5-47e4-a320-f9f6069ed69d-memcached-tls-certs" (OuterVolumeSpecName: "memcached-tls-certs") pod "ae90d4e7-e4c5-47e4-a320-f9f6069ed69d" (UID: "ae90d4e7-e4c5-47e4-a320-f9f6069ed69d"). InnerVolumeSpecName "memcached-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:58:17 crc kubenswrapper[4558]: I0120 17:58:17.400668 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9k7kh\" (UniqueName: \"kubernetes.io/projected/ae90d4e7-e4c5-47e4-a320-f9f6069ed69d-kube-api-access-9k7kh\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:17 crc kubenswrapper[4558]: I0120 17:58:17.400720 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ae90d4e7-e4c5-47e4-a320-f9f6069ed69d-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:17 crc kubenswrapper[4558]: I0120 17:58:17.400734 4558 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/ae90d4e7-e4c5-47e4-a320-f9f6069ed69d-kolla-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:17 crc kubenswrapper[4558]: I0120 17:58:17.400746 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae90d4e7-e4c5-47e4-a320-f9f6069ed69d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:17 crc kubenswrapper[4558]: I0120 17:58:17.400760 4558 reconciler_common.go:293] "Volume detached for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/ae90d4e7-e4c5-47e4-a320-f9f6069ed69d-memcached-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:17 crc kubenswrapper[4558]: I0120 17:58:17.827598 4558 generic.go:334] "Generic (PLEG): container finished" podID="ae90d4e7-e4c5-47e4-a320-f9f6069ed69d" containerID="2f47c5b6e676c58c7ac4bb4634e7603f0a0fe15c650b10488e055bd1458da4c9" exitCode=0 Jan 20 17:58:17 crc kubenswrapper[4558]: I0120 17:58:17.827655 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/memcached-0" Jan 20 17:58:17 crc kubenswrapper[4558]: I0120 17:58:17.827673 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/memcached-0" event={"ID":"ae90d4e7-e4c5-47e4-a320-f9f6069ed69d","Type":"ContainerDied","Data":"2f47c5b6e676c58c7ac4bb4634e7603f0a0fe15c650b10488e055bd1458da4c9"} Jan 20 17:58:17 crc kubenswrapper[4558]: I0120 17:58:17.828210 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/memcached-0" event={"ID":"ae90d4e7-e4c5-47e4-a320-f9f6069ed69d","Type":"ContainerDied","Data":"680237fe0cd701eb1005e70da84f58e12468bf6b7f7436a8a226c30101b73278"} Jan 20 17:58:17 crc kubenswrapper[4558]: I0120 17:58:17.828234 4558 scope.go:117] "RemoveContainer" containerID="2f47c5b6e676c58c7ac4bb4634e7603f0a0fe15c650b10488e055bd1458da4c9" Jan 20 17:58:17 crc kubenswrapper[4558]: I0120 17:58:17.850654 4558 scope.go:117] "RemoveContainer" containerID="2f47c5b6e676c58c7ac4bb4634e7603f0a0fe15c650b10488e055bd1458da4c9" Jan 20 17:58:17 crc kubenswrapper[4558]: E0120 17:58:17.851082 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2f47c5b6e676c58c7ac4bb4634e7603f0a0fe15c650b10488e055bd1458da4c9\": container with ID starting with 2f47c5b6e676c58c7ac4bb4634e7603f0a0fe15c650b10488e055bd1458da4c9 not found: ID does not exist" containerID="2f47c5b6e676c58c7ac4bb4634e7603f0a0fe15c650b10488e055bd1458da4c9" Jan 20 17:58:17 crc kubenswrapper[4558]: I0120 17:58:17.851134 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2f47c5b6e676c58c7ac4bb4634e7603f0a0fe15c650b10488e055bd1458da4c9"} err="failed to get container status \"2f47c5b6e676c58c7ac4bb4634e7603f0a0fe15c650b10488e055bd1458da4c9\": rpc error: code = NotFound desc = could not find container \"2f47c5b6e676c58c7ac4bb4634e7603f0a0fe15c650b10488e055bd1458da4c9\": container with ID starting with 2f47c5b6e676c58c7ac4bb4634e7603f0a0fe15c650b10488e055bd1458da4c9 not found: ID does not exist" Jan 20 17:58:17 crc kubenswrapper[4558]: I0120 17:58:17.857446 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/memcached-0"] Jan 20 17:58:17 crc kubenswrapper[4558]: I0120 17:58:17.864463 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/memcached-0"] Jan 20 17:58:18 crc kubenswrapper[4558]: I0120 17:58:18.575425 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20fbac90-6e69-4c04-8e92-0b191157f185" path="/var/lib/kubelet/pods/20fbac90-6e69-4c04-8e92-0b191157f185/volumes" Jan 20 17:58:18 crc kubenswrapper[4558]: I0120 17:58:18.576133 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ae90d4e7-e4c5-47e4-a320-f9f6069ed69d" path="/var/lib/kubelet/pods/ae90d4e7-e4c5-47e4-a320-f9f6069ed69d/volumes" Jan 20 17:58:19 crc kubenswrapper[4558]: E0120 17:58:19.332183 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/combined-ca-bundle: secret "combined-ca-bundle" not found Jan 20 17:58:19 crc kubenswrapper[4558]: E0120 17:58:19.332522 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f5beef7a-89e0-4e53-960b-1cacc665d586-combined-ca-bundle podName:f5beef7a-89e0-4e53-960b-1cacc665d586 nodeName:}" failed. No retries permitted until 2026-01-20 17:58:27.332507104 +0000 UTC m=+4601.092845071 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/f5beef7a-89e0-4e53-960b-1cacc665d586-combined-ca-bundle") pod "nova-scheduler-0" (UID: "f5beef7a-89e0-4e53-960b-1cacc665d586") : secret "combined-ca-bundle" not found Jan 20 17:58:20 crc kubenswrapper[4558]: E0120 17:58:20.417098 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="06a55fe618e77fdc355e098374066b5b94b32c6d040b20a118075fff457d4cea" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 20 17:58:20 crc kubenswrapper[4558]: E0120 17:58:20.418625 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="06a55fe618e77fdc355e098374066b5b94b32c6d040b20a118075fff457d4cea" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 20 17:58:20 crc kubenswrapper[4558]: E0120 17:58:20.419929 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="06a55fe618e77fdc355e098374066b5b94b32c6d040b20a118075fff457d4cea" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Jan 20 17:58:20 crc kubenswrapper[4558]: E0120 17:58:20.419974 4558 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack-kuttl-tests/nova-scheduler-0" podUID="f5beef7a-89e0-4e53-960b-1cacc665d586" containerName="nova-scheduler-scheduler" Jan 20 17:58:22 crc kubenswrapper[4558]: I0120 17:58:22.874392 4558 generic.go:334] "Generic (PLEG): container finished" podID="f5beef7a-89e0-4e53-960b-1cacc665d586" containerID="06a55fe618e77fdc355e098374066b5b94b32c6d040b20a118075fff457d4cea" exitCode=0 Jan 20 17:58:22 crc kubenswrapper[4558]: I0120 17:58:22.874450 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"f5beef7a-89e0-4e53-960b-1cacc665d586","Type":"ContainerDied","Data":"06a55fe618e77fdc355e098374066b5b94b32c6d040b20a118075fff457d4cea"} Jan 20 17:58:23 crc kubenswrapper[4558]: I0120 17:58:23.035089 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:58:23 crc kubenswrapper[4558]: I0120 17:58:23.182204 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5beef7a-89e0-4e53-960b-1cacc665d586-combined-ca-bundle\") pod \"f5beef7a-89e0-4e53-960b-1cacc665d586\" (UID: \"f5beef7a-89e0-4e53-960b-1cacc665d586\") " Jan 20 17:58:23 crc kubenswrapper[4558]: I0120 17:58:23.182292 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f5beef7a-89e0-4e53-960b-1cacc665d586-config-data\") pod \"f5beef7a-89e0-4e53-960b-1cacc665d586\" (UID: \"f5beef7a-89e0-4e53-960b-1cacc665d586\") " Jan 20 17:58:23 crc kubenswrapper[4558]: I0120 17:58:23.182357 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b8bqv\" (UniqueName: \"kubernetes.io/projected/f5beef7a-89e0-4e53-960b-1cacc665d586-kube-api-access-b8bqv\") pod \"f5beef7a-89e0-4e53-960b-1cacc665d586\" (UID: \"f5beef7a-89e0-4e53-960b-1cacc665d586\") " Jan 20 17:58:23 crc kubenswrapper[4558]: I0120 17:58:23.188060 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f5beef7a-89e0-4e53-960b-1cacc665d586-kube-api-access-b8bqv" (OuterVolumeSpecName: "kube-api-access-b8bqv") pod "f5beef7a-89e0-4e53-960b-1cacc665d586" (UID: "f5beef7a-89e0-4e53-960b-1cacc665d586"). InnerVolumeSpecName "kube-api-access-b8bqv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:58:23 crc kubenswrapper[4558]: I0120 17:58:23.203307 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f5beef7a-89e0-4e53-960b-1cacc665d586-config-data" (OuterVolumeSpecName: "config-data") pod "f5beef7a-89e0-4e53-960b-1cacc665d586" (UID: "f5beef7a-89e0-4e53-960b-1cacc665d586"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:58:23 crc kubenswrapper[4558]: I0120 17:58:23.203398 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f5beef7a-89e0-4e53-960b-1cacc665d586-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f5beef7a-89e0-4e53-960b-1cacc665d586" (UID: "f5beef7a-89e0-4e53-960b-1cacc665d586"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:58:23 crc kubenswrapper[4558]: I0120 17:58:23.283561 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f5beef7a-89e0-4e53-960b-1cacc665d586-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:23 crc kubenswrapper[4558]: I0120 17:58:23.283593 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b8bqv\" (UniqueName: \"kubernetes.io/projected/f5beef7a-89e0-4e53-960b-1cacc665d586-kube-api-access-b8bqv\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:23 crc kubenswrapper[4558]: I0120 17:58:23.283608 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5beef7a-89e0-4e53-960b-1cacc665d586-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:23 crc kubenswrapper[4558]: I0120 17:58:23.885806 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-scheduler-0" event={"ID":"f5beef7a-89e0-4e53-960b-1cacc665d586","Type":"ContainerDied","Data":"b0b70a84fc120072c22f67b39645276298802c40a8f1d3d2753fa575dce540e9"} Jan 20 17:58:23 crc kubenswrapper[4558]: I0120 17:58:23.886057 4558 scope.go:117] "RemoveContainer" containerID="06a55fe618e77fdc355e098374066b5b94b32c6d040b20a118075fff457d4cea" Jan 20 17:58:23 crc kubenswrapper[4558]: I0120 17:58:23.885869 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-scheduler-0" Jan 20 17:58:23 crc kubenswrapper[4558]: I0120 17:58:23.912058 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:58:23 crc kubenswrapper[4558]: I0120 17:58:23.916943 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-scheduler-0"] Jan 20 17:58:24 crc kubenswrapper[4558]: I0120 17:58:24.575372 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f5beef7a-89e0-4e53-960b-1cacc665d586" path="/var/lib/kubelet/pods/f5beef7a-89e0-4e53-960b-1cacc665d586/volumes" Jan 20 17:58:28 crc kubenswrapper[4558]: I0120 17:58:28.837568 4558 scope.go:117] "RemoveContainer" containerID="a277d8fbf0b081fa88cc6f698d169256895a7e996cb99dec573f1f32b2c3f74c" Jan 20 17:58:28 crc kubenswrapper[4558]: I0120 17:58:28.857261 4558 scope.go:117] "RemoveContainer" containerID="e300c235b0338f59c68e0edd429de09fcac767478cd41a3614d3e5af5e2760e6" Jan 20 17:58:36 crc kubenswrapper[4558]: I0120 17:58:36.037155 4558 generic.go:334] "Generic (PLEG): container finished" podID="cd300665-4f75-4ed7-9166-4b48e1aeffc4" containerID="a7e5224c23551340586e780af89b39957adde43c70c7ae7717575e2e550d32e6" exitCode=137 Jan 20 17:58:36 crc kubenswrapper[4558]: I0120 17:58:36.037966 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"cd300665-4f75-4ed7-9166-4b48e1aeffc4","Type":"ContainerDied","Data":"a7e5224c23551340586e780af89b39957adde43c70c7ae7717575e2e550d32e6"} Jan 20 17:58:36 crc kubenswrapper[4558]: I0120 17:58:36.038047 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/swift-storage-0" event={"ID":"cd300665-4f75-4ed7-9166-4b48e1aeffc4","Type":"ContainerDied","Data":"db889e52f3b7f1dce867711e2b49e1d766e923e4984660f1244adda03007d81e"} Jan 20 17:58:36 crc kubenswrapper[4558]: I0120 17:58:36.038060 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="db889e52f3b7f1dce867711e2b49e1d766e923e4984660f1244adda03007d81e" Jan 20 17:58:36 crc kubenswrapper[4558]: I0120 17:58:36.044128 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:58:36 crc kubenswrapper[4558]: I0120 17:58:36.170502 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/cd300665-4f75-4ed7-9166-4b48e1aeffc4-cache\") pod \"cd300665-4f75-4ed7-9166-4b48e1aeffc4\" (UID: \"cd300665-4f75-4ed7-9166-4b48e1aeffc4\") " Jan 20 17:58:36 crc kubenswrapper[4558]: I0120 17:58:36.170692 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swift\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"cd300665-4f75-4ed7-9166-4b48e1aeffc4\" (UID: \"cd300665-4f75-4ed7-9166-4b48e1aeffc4\") " Jan 20 17:58:36 crc kubenswrapper[4558]: I0120 17:58:36.170808 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bfv8c\" (UniqueName: \"kubernetes.io/projected/cd300665-4f75-4ed7-9166-4b48e1aeffc4-kube-api-access-bfv8c\") pod \"cd300665-4f75-4ed7-9166-4b48e1aeffc4\" (UID: \"cd300665-4f75-4ed7-9166-4b48e1aeffc4\") " Jan 20 17:58:36 crc kubenswrapper[4558]: I0120 17:58:36.170842 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/cd300665-4f75-4ed7-9166-4b48e1aeffc4-etc-swift\") pod \"cd300665-4f75-4ed7-9166-4b48e1aeffc4\" (UID: \"cd300665-4f75-4ed7-9166-4b48e1aeffc4\") " Jan 20 17:58:36 crc kubenswrapper[4558]: I0120 17:58:36.170887 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/cd300665-4f75-4ed7-9166-4b48e1aeffc4-lock\") pod \"cd300665-4f75-4ed7-9166-4b48e1aeffc4\" (UID: \"cd300665-4f75-4ed7-9166-4b48e1aeffc4\") " Jan 20 17:58:36 crc kubenswrapper[4558]: I0120 17:58:36.171513 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cd300665-4f75-4ed7-9166-4b48e1aeffc4-lock" (OuterVolumeSpecName: "lock") pod "cd300665-4f75-4ed7-9166-4b48e1aeffc4" (UID: "cd300665-4f75-4ed7-9166-4b48e1aeffc4"). InnerVolumeSpecName "lock". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:58:36 crc kubenswrapper[4558]: I0120 17:58:36.171664 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cd300665-4f75-4ed7-9166-4b48e1aeffc4-cache" (OuterVolumeSpecName: "cache") pod "cd300665-4f75-4ed7-9166-4b48e1aeffc4" (UID: "cd300665-4f75-4ed7-9166-4b48e1aeffc4"). InnerVolumeSpecName "cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 17:58:36 crc kubenswrapper[4558]: I0120 17:58:36.176004 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage11-crc" (OuterVolumeSpecName: "swift") pod "cd300665-4f75-4ed7-9166-4b48e1aeffc4" (UID: "cd300665-4f75-4ed7-9166-4b48e1aeffc4"). InnerVolumeSpecName "local-storage11-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 17:58:36 crc kubenswrapper[4558]: I0120 17:58:36.176256 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd300665-4f75-4ed7-9166-4b48e1aeffc4-kube-api-access-bfv8c" (OuterVolumeSpecName: "kube-api-access-bfv8c") pod "cd300665-4f75-4ed7-9166-4b48e1aeffc4" (UID: "cd300665-4f75-4ed7-9166-4b48e1aeffc4"). InnerVolumeSpecName "kube-api-access-bfv8c". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:58:36 crc kubenswrapper[4558]: I0120 17:58:36.176929 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd300665-4f75-4ed7-9166-4b48e1aeffc4-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "cd300665-4f75-4ed7-9166-4b48e1aeffc4" (UID: "cd300665-4f75-4ed7-9166-4b48e1aeffc4"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:58:36 crc kubenswrapper[4558]: I0120 17:58:36.272366 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bfv8c\" (UniqueName: \"kubernetes.io/projected/cd300665-4f75-4ed7-9166-4b48e1aeffc4-kube-api-access-bfv8c\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:36 crc kubenswrapper[4558]: I0120 17:58:36.272394 4558 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/cd300665-4f75-4ed7-9166-4b48e1aeffc4-etc-swift\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:36 crc kubenswrapper[4558]: I0120 17:58:36.272407 4558 reconciler_common.go:293] "Volume detached for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/cd300665-4f75-4ed7-9166-4b48e1aeffc4-lock\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:36 crc kubenswrapper[4558]: I0120 17:58:36.272418 4558 reconciler_common.go:293] "Volume detached for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/cd300665-4f75-4ed7-9166-4b48e1aeffc4-cache\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:36 crc kubenswrapper[4558]: I0120 17:58:36.272448 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" " Jan 20 17:58:36 crc kubenswrapper[4558]: I0120 17:58:36.282613 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage11-crc" (UniqueName: "kubernetes.io/local-volume/local-storage11-crc") on node "crc" Jan 20 17:58:36 crc kubenswrapper[4558]: I0120 17:58:36.373273 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:37 crc kubenswrapper[4558]: I0120 17:58:37.046189 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/swift-storage-0" Jan 20 17:58:37 crc kubenswrapper[4558]: I0120 17:58:37.071002 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/swift-storage-0"] Jan 20 17:58:37 crc kubenswrapper[4558]: I0120 17:58:37.074301 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/swift-storage-0"] Jan 20 17:58:37 crc kubenswrapper[4558]: I0120 17:58:37.401933 4558 pod_container_manager_linux.go:210] "Failed to delete cgroup paths" cgroupName=["kubepods","besteffort","pod17f6b80e-1961-4c44-a979-9c23bdd59837"] err="unable to destroy cgroup paths for cgroup [kubepods besteffort pod17f6b80e-1961-4c44-a979-9c23bdd59837] : Timed out while waiting for systemd to remove kubepods-besteffort-pod17f6b80e_1961_4c44_a979_9c23bdd59837.slice" Jan 20 17:58:38 crc kubenswrapper[4558]: I0120 17:58:38.574254 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd300665-4f75-4ed7-9166-4b48e1aeffc4" path="/var/lib/kubelet/pods/cd300665-4f75-4ed7-9166-4b48e1aeffc4/volumes" Jan 20 17:58:38 crc kubenswrapper[4558]: I0120 17:58:38.840294 4558 pod_container_manager_linux.go:210] "Failed to delete cgroup paths" cgroupName=["kubepods","besteffort","pod4e270f26-f899-4007-94bf-ab62080fe4ce"] err="unable to destroy cgroup paths for cgroup [kubepods besteffort pod4e270f26-f899-4007-94bf-ab62080fe4ce] : Timed out while waiting for systemd to remove kubepods-besteffort-pod4e270f26_f899_4007_94bf_ab62080fe4ce.slice" Jan 20 17:58:38 crc kubenswrapper[4558]: E0120 17:58:38.840343 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to delete cgroup paths for [kubepods besteffort pod4e270f26-f899-4007-94bf-ab62080fe4ce] : unable to destroy cgroup paths for cgroup [kubepods besteffort pod4e270f26-f899-4007-94bf-ab62080fe4ce] : Timed out while waiting for systemd to remove kubepods-besteffort-pod4e270f26_f899_4007_94bf_ab62080fe4ce.slice" pod="openstack-kuttl-tests/kube-state-metrics-0" podUID="4e270f26-f899-4007-94bf-ab62080fe4ce" Jan 20 17:58:39 crc kubenswrapper[4558]: I0120 17:58:39.060866 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/kube-state-metrics-0" Jan 20 17:58:39 crc kubenswrapper[4558]: I0120 17:58:39.088386 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 17:58:39 crc kubenswrapper[4558]: I0120 17:58:39.091904 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/kube-state-metrics-0"] Jan 20 17:58:40 crc kubenswrapper[4558]: I0120 17:58:40.573438 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4e270f26-f899-4007-94bf-ab62080fe4ce" path="/var/lib/kubelet/pods/4e270f26-f899-4007-94bf-ab62080fe4ce/volumes" Jan 20 17:58:44 crc kubenswrapper[4558]: I0120 17:58:44.820353 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["crc-storage/crc-storage-crc-sc7wz"] Jan 20 17:58:44 crc kubenswrapper[4558]: I0120 17:58:44.825832 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["crc-storage/crc-storage-crc-sc7wz"] Jan 20 17:58:44 crc kubenswrapper[4558]: I0120 17:58:44.939274 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["crc-storage/crc-storage-crc-dwzdv"] Jan 20 17:58:44 crc kubenswrapper[4558]: E0120 17:58:44.939713 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="948fb15d-07f1-4b25-b8d5-7d582024ef28" containerName="setup-container" Jan 20 17:58:44 crc kubenswrapper[4558]: I0120 17:58:44.939738 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="948fb15d-07f1-4b25-b8d5-7d582024ef28" containerName="setup-container" Jan 20 17:58:44 crc kubenswrapper[4558]: E0120 17:58:44.939751 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ae90d4e7-e4c5-47e4-a320-f9f6069ed69d" containerName="memcached" Jan 20 17:58:44 crc kubenswrapper[4558]: I0120 17:58:44.939759 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ae90d4e7-e4c5-47e4-a320-f9f6069ed69d" containerName="memcached" Jan 20 17:58:44 crc kubenswrapper[4558]: E0120 17:58:44.939769 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cd300665-4f75-4ed7-9166-4b48e1aeffc4" containerName="container-server" Jan 20 17:58:44 crc kubenswrapper[4558]: I0120 17:58:44.939777 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="cd300665-4f75-4ed7-9166-4b48e1aeffc4" containerName="container-server" Jan 20 17:58:44 crc kubenswrapper[4558]: E0120 17:58:44.939795 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7dcf074e-8444-4bd2-b1f9-143390e96ef8" containerName="glance-httpd" Jan 20 17:58:44 crc kubenswrapper[4558]: I0120 17:58:44.939801 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="7dcf074e-8444-4bd2-b1f9-143390e96ef8" containerName="glance-httpd" Jan 20 17:58:44 crc kubenswrapper[4558]: E0120 17:58:44.939813 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9a1c3ab4-ca8f-4183-9dbf-05f5c5efe4b7" containerName="barbican-api-log" Jan 20 17:58:44 crc kubenswrapper[4558]: I0120 17:58:44.939819 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="9a1c3ab4-ca8f-4183-9dbf-05f5c5efe4b7" containerName="barbican-api-log" Jan 20 17:58:44 crc kubenswrapper[4558]: E0120 17:58:44.939828 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cd300665-4f75-4ed7-9166-4b48e1aeffc4" containerName="object-server" Jan 20 17:58:44 crc kubenswrapper[4558]: I0120 17:58:44.939835 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="cd300665-4f75-4ed7-9166-4b48e1aeffc4" containerName="object-server" Jan 20 17:58:44 crc kubenswrapper[4558]: E0120 17:58:44.939845 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cd300665-4f75-4ed7-9166-4b48e1aeffc4" containerName="account-server" Jan 20 17:58:44 crc kubenswrapper[4558]: I0120 17:58:44.939852 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="cd300665-4f75-4ed7-9166-4b48e1aeffc4" containerName="account-server" Jan 20 17:58:44 crc kubenswrapper[4558]: E0120 17:58:44.939860 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f5beef7a-89e0-4e53-960b-1cacc665d586" containerName="nova-scheduler-scheduler" Jan 20 17:58:44 crc kubenswrapper[4558]: I0120 17:58:44.939865 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="f5beef7a-89e0-4e53-960b-1cacc665d586" containerName="nova-scheduler-scheduler" Jan 20 17:58:44 crc kubenswrapper[4558]: E0120 17:58:44.939874 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="abd1fde8-57bd-4248-a061-6ddb436501c2" containerName="galera" Jan 20 17:58:44 crc kubenswrapper[4558]: I0120 17:58:44.939880 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="abd1fde8-57bd-4248-a061-6ddb436501c2" containerName="galera" Jan 20 17:58:44 crc kubenswrapper[4558]: E0120 17:58:44.939888 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2c80b87d-f7d5-46dd-a6d9-bba3e82e405b" containerName="proxy-server" Jan 20 17:58:44 crc kubenswrapper[4558]: I0120 17:58:44.939893 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="2c80b87d-f7d5-46dd-a6d9-bba3e82e405b" containerName="proxy-server" Jan 20 17:58:44 crc kubenswrapper[4558]: E0120 17:58:44.939901 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cd300665-4f75-4ed7-9166-4b48e1aeffc4" containerName="object-auditor" Jan 20 17:58:44 crc kubenswrapper[4558]: I0120 17:58:44.939907 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="cd300665-4f75-4ed7-9166-4b48e1aeffc4" containerName="object-auditor" Jan 20 17:58:44 crc kubenswrapper[4558]: E0120 17:58:44.939919 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cd300665-4f75-4ed7-9166-4b48e1aeffc4" containerName="object-updater" Jan 20 17:58:44 crc kubenswrapper[4558]: I0120 17:58:44.939925 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="cd300665-4f75-4ed7-9166-4b48e1aeffc4" containerName="object-updater" Jan 20 17:58:44 crc kubenswrapper[4558]: E0120 17:58:44.939932 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cd300665-4f75-4ed7-9166-4b48e1aeffc4" containerName="container-auditor" Jan 20 17:58:44 crc kubenswrapper[4558]: I0120 17:58:44.939938 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="cd300665-4f75-4ed7-9166-4b48e1aeffc4" containerName="container-auditor" Jan 20 17:58:44 crc kubenswrapper[4558]: E0120 17:58:44.939947 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d512bec4-7ed0-43bb-b8fe-0f235f7698e5" containerName="rabbitmq" Jan 20 17:58:44 crc kubenswrapper[4558]: I0120 17:58:44.939953 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d512bec4-7ed0-43bb-b8fe-0f235f7698e5" containerName="rabbitmq" Jan 20 17:58:44 crc kubenswrapper[4558]: E0120 17:58:44.939961 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a66c352e-8b7e-4f9e-9447-a2bf8a7a5c91" containerName="barbican-keystone-listener" Jan 20 17:58:44 crc kubenswrapper[4558]: I0120 17:58:44.939966 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="a66c352e-8b7e-4f9e-9447-a2bf8a7a5c91" containerName="barbican-keystone-listener" Jan 20 17:58:44 crc kubenswrapper[4558]: E0120 17:58:44.939987 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c8f11f45-bfce-4989-bfa0-684011f74619" containerName="nova-metadata-metadata" Jan 20 17:58:44 crc kubenswrapper[4558]: I0120 17:58:44.939993 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="c8f11f45-bfce-4989-bfa0-684011f74619" containerName="nova-metadata-metadata" Jan 20 17:58:44 crc kubenswrapper[4558]: E0120 17:58:44.940001 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cd300665-4f75-4ed7-9166-4b48e1aeffc4" containerName="account-auditor" Jan 20 17:58:44 crc kubenswrapper[4558]: I0120 17:58:44.940007 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="cd300665-4f75-4ed7-9166-4b48e1aeffc4" containerName="account-auditor" Jan 20 17:58:44 crc kubenswrapper[4558]: E0120 17:58:44.940016 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2c80b87d-f7d5-46dd-a6d9-bba3e82e405b" containerName="proxy-httpd" Jan 20 17:58:44 crc kubenswrapper[4558]: I0120 17:58:44.940021 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="2c80b87d-f7d5-46dd-a6d9-bba3e82e405b" containerName="proxy-httpd" Jan 20 17:58:44 crc kubenswrapper[4558]: E0120 17:58:44.940030 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cd300665-4f75-4ed7-9166-4b48e1aeffc4" containerName="object-replicator" Jan 20 17:58:44 crc kubenswrapper[4558]: I0120 17:58:44.940037 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="cd300665-4f75-4ed7-9166-4b48e1aeffc4" containerName="object-replicator" Jan 20 17:58:44 crc kubenswrapper[4558]: E0120 17:58:44.940045 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="680eb8d4-16c8-40ae-bfb6-054fcf23f281" containerName="ceilometer-notification-agent" Jan 20 17:58:44 crc kubenswrapper[4558]: I0120 17:58:44.940053 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="680eb8d4-16c8-40ae-bfb6-054fcf23f281" containerName="ceilometer-notification-agent" Jan 20 17:58:44 crc kubenswrapper[4558]: E0120 17:58:44.940062 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8f5c27dd-2365-49ee-b4bd-b46e952199f7" containerName="glance-httpd" Jan 20 17:58:44 crc kubenswrapper[4558]: I0120 17:58:44.940070 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="8f5c27dd-2365-49ee-b4bd-b46e952199f7" containerName="glance-httpd" Jan 20 17:58:44 crc kubenswrapper[4558]: E0120 17:58:44.940082 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8f5c27dd-2365-49ee-b4bd-b46e952199f7" containerName="glance-log" Jan 20 17:58:44 crc kubenswrapper[4558]: I0120 17:58:44.940088 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="8f5c27dd-2365-49ee-b4bd-b46e952199f7" containerName="glance-log" Jan 20 17:58:44 crc kubenswrapper[4558]: E0120 17:58:44.940097 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8a5251c1-60bc-45d7-8524-fd654c09505b" containerName="barbican-worker-log" Jan 20 17:58:44 crc kubenswrapper[4558]: I0120 17:58:44.940104 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="8a5251c1-60bc-45d7-8524-fd654c09505b" containerName="barbican-worker-log" Jan 20 17:58:44 crc kubenswrapper[4558]: E0120 17:58:44.940114 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="75a8c03c-5b0a-4144-b1a8-293ce0d50739" containerName="mariadb-account-create-update" Jan 20 17:58:44 crc kubenswrapper[4558]: I0120 17:58:44.940121 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="75a8c03c-5b0a-4144-b1a8-293ce0d50739" containerName="mariadb-account-create-update" Jan 20 17:58:44 crc kubenswrapper[4558]: E0120 17:58:44.940130 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a66c352e-8b7e-4f9e-9447-a2bf8a7a5c91" containerName="barbican-keystone-listener-log" Jan 20 17:58:44 crc kubenswrapper[4558]: I0120 17:58:44.940137 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="a66c352e-8b7e-4f9e-9447-a2bf8a7a5c91" containerName="barbican-keystone-listener-log" Jan 20 17:58:44 crc kubenswrapper[4558]: E0120 17:58:44.940149 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="45d51516-cc3a-473c-bb88-e82d290d36ca" containerName="neutron-api" Jan 20 17:58:44 crc kubenswrapper[4558]: I0120 17:58:44.940154 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="45d51516-cc3a-473c-bb88-e82d290d36ca" containerName="neutron-api" Jan 20 17:58:44 crc kubenswrapper[4558]: E0120 17:58:44.940178 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c8f11f45-bfce-4989-bfa0-684011f74619" containerName="nova-metadata-log" Jan 20 17:58:44 crc kubenswrapper[4558]: I0120 17:58:44.940185 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="c8f11f45-bfce-4989-bfa0-684011f74619" containerName="nova-metadata-log" Jan 20 17:58:44 crc kubenswrapper[4558]: E0120 17:58:44.940195 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="680eb8d4-16c8-40ae-bfb6-054fcf23f281" containerName="sg-core" Jan 20 17:58:44 crc kubenswrapper[4558]: I0120 17:58:44.940202 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="680eb8d4-16c8-40ae-bfb6-054fcf23f281" containerName="sg-core" Jan 20 17:58:44 crc kubenswrapper[4558]: E0120 17:58:44.940212 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8a1b8bc3-6d56-42f1-99a2-f80ff914e3d8" containerName="nova-cell0-conductor-conductor" Jan 20 17:58:44 crc kubenswrapper[4558]: I0120 17:58:44.940219 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="8a1b8bc3-6d56-42f1-99a2-f80ff914e3d8" containerName="nova-cell0-conductor-conductor" Jan 20 17:58:44 crc kubenswrapper[4558]: E0120 17:58:44.940229 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8a5251c1-60bc-45d7-8524-fd654c09505b" containerName="barbican-worker" Jan 20 17:58:44 crc kubenswrapper[4558]: I0120 17:58:44.940235 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="8a5251c1-60bc-45d7-8524-fd654c09505b" containerName="barbican-worker" Jan 20 17:58:44 crc kubenswrapper[4558]: E0120 17:58:44.940241 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6f0733ea-bb8a-4c0f-9ea8-edd6f687301e" containerName="nova-api-log" Jan 20 17:58:44 crc kubenswrapper[4558]: I0120 17:58:44.940246 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="6f0733ea-bb8a-4c0f-9ea8-edd6f687301e" containerName="nova-api-log" Jan 20 17:58:44 crc kubenswrapper[4558]: E0120 17:58:44.940257 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cd300665-4f75-4ed7-9166-4b48e1aeffc4" containerName="object-expirer" Jan 20 17:58:44 crc kubenswrapper[4558]: I0120 17:58:44.940263 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="cd300665-4f75-4ed7-9166-4b48e1aeffc4" containerName="object-expirer" Jan 20 17:58:44 crc kubenswrapper[4558]: E0120 17:58:44.940270 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cd300665-4f75-4ed7-9166-4b48e1aeffc4" containerName="account-reaper" Jan 20 17:58:44 crc kubenswrapper[4558]: I0120 17:58:44.940276 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="cd300665-4f75-4ed7-9166-4b48e1aeffc4" containerName="account-reaper" Jan 20 17:58:44 crc kubenswrapper[4558]: E0120 17:58:44.940284 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="747b1096-c84a-43e1-930e-9f887a42c524" containerName="keystone-api" Jan 20 17:58:44 crc kubenswrapper[4558]: I0120 17:58:44.940290 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="747b1096-c84a-43e1-930e-9f887a42c524" containerName="keystone-api" Jan 20 17:58:44 crc kubenswrapper[4558]: E0120 17:58:44.940299 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="94680f2e-23c0-41aa-a7ff-0aadfcfcc5e6" containerName="mysql-bootstrap" Jan 20 17:58:44 crc kubenswrapper[4558]: I0120 17:58:44.940306 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="94680f2e-23c0-41aa-a7ff-0aadfcfcc5e6" containerName="mysql-bootstrap" Jan 20 17:58:44 crc kubenswrapper[4558]: E0120 17:58:44.940314 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="abd1fde8-57bd-4248-a061-6ddb436501c2" containerName="mysql-bootstrap" Jan 20 17:58:44 crc kubenswrapper[4558]: I0120 17:58:44.940319 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="abd1fde8-57bd-4248-a061-6ddb436501c2" containerName="mysql-bootstrap" Jan 20 17:58:44 crc kubenswrapper[4558]: E0120 17:58:44.940325 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="45d51516-cc3a-473c-bb88-e82d290d36ca" containerName="neutron-httpd" Jan 20 17:58:44 crc kubenswrapper[4558]: I0120 17:58:44.940331 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="45d51516-cc3a-473c-bb88-e82d290d36ca" containerName="neutron-httpd" Jan 20 17:58:44 crc kubenswrapper[4558]: E0120 17:58:44.940337 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cd300665-4f75-4ed7-9166-4b48e1aeffc4" containerName="rsync" Jan 20 17:58:44 crc kubenswrapper[4558]: I0120 17:58:44.940342 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="cd300665-4f75-4ed7-9166-4b48e1aeffc4" containerName="rsync" Jan 20 17:58:44 crc kubenswrapper[4558]: E0120 17:58:44.940350 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9a1c3ab4-ca8f-4183-9dbf-05f5c5efe4b7" containerName="barbican-api" Jan 20 17:58:44 crc kubenswrapper[4558]: I0120 17:58:44.940355 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="9a1c3ab4-ca8f-4183-9dbf-05f5c5efe4b7" containerName="barbican-api" Jan 20 17:58:44 crc kubenswrapper[4558]: E0120 17:58:44.940364 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dbdd2687-1110-4dce-a521-19c9337df3a2" containerName="openstack-network-exporter" Jan 20 17:58:44 crc kubenswrapper[4558]: I0120 17:58:44.940370 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="dbdd2687-1110-4dce-a521-19c9337df3a2" containerName="openstack-network-exporter" Jan 20 17:58:44 crc kubenswrapper[4558]: E0120 17:58:44.940377 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cd300665-4f75-4ed7-9166-4b48e1aeffc4" containerName="swift-recon-cron" Jan 20 17:58:44 crc kubenswrapper[4558]: I0120 17:58:44.940382 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="cd300665-4f75-4ed7-9166-4b48e1aeffc4" containerName="swift-recon-cron" Jan 20 17:58:44 crc kubenswrapper[4558]: E0120 17:58:44.940390 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="20fbac90-6e69-4c04-8e92-0b191157f185" containerName="dnsmasq-dns" Jan 20 17:58:44 crc kubenswrapper[4558]: I0120 17:58:44.940394 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="20fbac90-6e69-4c04-8e92-0b191157f185" containerName="dnsmasq-dns" Jan 20 17:58:44 crc kubenswrapper[4558]: E0120 17:58:44.940402 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6f0733ea-bb8a-4c0f-9ea8-edd6f687301e" containerName="nova-api-api" Jan 20 17:58:44 crc kubenswrapper[4558]: I0120 17:58:44.940409 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="6f0733ea-bb8a-4c0f-9ea8-edd6f687301e" containerName="nova-api-api" Jan 20 17:58:44 crc kubenswrapper[4558]: E0120 17:58:44.940423 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="54cbdea3-8fcc-4d2d-870b-cf3663cfc633" containerName="nova-cell1-conductor-conductor" Jan 20 17:58:44 crc kubenswrapper[4558]: I0120 17:58:44.940430 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="54cbdea3-8fcc-4d2d-870b-cf3663cfc633" containerName="nova-cell1-conductor-conductor" Jan 20 17:58:44 crc kubenswrapper[4558]: E0120 17:58:44.940440 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dbdd2687-1110-4dce-a521-19c9337df3a2" containerName="ovn-northd" Jan 20 17:58:44 crc kubenswrapper[4558]: I0120 17:58:44.940446 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="dbdd2687-1110-4dce-a521-19c9337df3a2" containerName="ovn-northd" Jan 20 17:58:44 crc kubenswrapper[4558]: E0120 17:58:44.940456 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="50829e14-401d-4958-9ef7-e2a2ef1f4b32" containerName="placement-api" Jan 20 17:58:44 crc kubenswrapper[4558]: I0120 17:58:44.940461 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="50829e14-401d-4958-9ef7-e2a2ef1f4b32" containerName="placement-api" Jan 20 17:58:44 crc kubenswrapper[4558]: E0120 17:58:44.940468 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7dcf074e-8444-4bd2-b1f9-143390e96ef8" containerName="glance-log" Jan 20 17:58:44 crc kubenswrapper[4558]: I0120 17:58:44.940473 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="7dcf074e-8444-4bd2-b1f9-143390e96ef8" containerName="glance-log" Jan 20 17:58:44 crc kubenswrapper[4558]: E0120 17:58:44.940480 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d512bec4-7ed0-43bb-b8fe-0f235f7698e5" containerName="setup-container" Jan 20 17:58:44 crc kubenswrapper[4558]: I0120 17:58:44.940487 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d512bec4-7ed0-43bb-b8fe-0f235f7698e5" containerName="setup-container" Jan 20 17:58:44 crc kubenswrapper[4558]: E0120 17:58:44.940494 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="948fb15d-07f1-4b25-b8d5-7d582024ef28" containerName="rabbitmq" Jan 20 17:58:44 crc kubenswrapper[4558]: I0120 17:58:44.940500 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="948fb15d-07f1-4b25-b8d5-7d582024ef28" containerName="rabbitmq" Jan 20 17:58:44 crc kubenswrapper[4558]: E0120 17:58:44.940508 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="680eb8d4-16c8-40ae-bfb6-054fcf23f281" containerName="ceilometer-central-agent" Jan 20 17:58:44 crc kubenswrapper[4558]: I0120 17:58:44.940516 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="680eb8d4-16c8-40ae-bfb6-054fcf23f281" containerName="ceilometer-central-agent" Jan 20 17:58:44 crc kubenswrapper[4558]: E0120 17:58:44.940524 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="680eb8d4-16c8-40ae-bfb6-054fcf23f281" containerName="proxy-httpd" Jan 20 17:58:44 crc kubenswrapper[4558]: I0120 17:58:44.940528 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="680eb8d4-16c8-40ae-bfb6-054fcf23f281" containerName="proxy-httpd" Jan 20 17:58:44 crc kubenswrapper[4558]: E0120 17:58:44.940536 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="94680f2e-23c0-41aa-a7ff-0aadfcfcc5e6" containerName="galera" Jan 20 17:58:44 crc kubenswrapper[4558]: I0120 17:58:44.940541 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="94680f2e-23c0-41aa-a7ff-0aadfcfcc5e6" containerName="galera" Jan 20 17:58:44 crc kubenswrapper[4558]: E0120 17:58:44.940549 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cd300665-4f75-4ed7-9166-4b48e1aeffc4" containerName="account-replicator" Jan 20 17:58:44 crc kubenswrapper[4558]: I0120 17:58:44.940555 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="cd300665-4f75-4ed7-9166-4b48e1aeffc4" containerName="account-replicator" Jan 20 17:58:44 crc kubenswrapper[4558]: E0120 17:58:44.940561 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cd300665-4f75-4ed7-9166-4b48e1aeffc4" containerName="container-replicator" Jan 20 17:58:44 crc kubenswrapper[4558]: I0120 17:58:44.940567 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="cd300665-4f75-4ed7-9166-4b48e1aeffc4" containerName="container-replicator" Jan 20 17:58:44 crc kubenswrapper[4558]: E0120 17:58:44.940573 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="20fbac90-6e69-4c04-8e92-0b191157f185" containerName="init" Jan 20 17:58:44 crc kubenswrapper[4558]: I0120 17:58:44.940578 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="20fbac90-6e69-4c04-8e92-0b191157f185" containerName="init" Jan 20 17:58:44 crc kubenswrapper[4558]: E0120 17:58:44.940585 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cd300665-4f75-4ed7-9166-4b48e1aeffc4" containerName="container-updater" Jan 20 17:58:44 crc kubenswrapper[4558]: I0120 17:58:44.940590 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="cd300665-4f75-4ed7-9166-4b48e1aeffc4" containerName="container-updater" Jan 20 17:58:44 crc kubenswrapper[4558]: E0120 17:58:44.940598 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0b0c3280-9b26-4db4-ab16-68016b055a1f" containerName="cinder-api-log" Jan 20 17:58:44 crc kubenswrapper[4558]: I0120 17:58:44.940603 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="0b0c3280-9b26-4db4-ab16-68016b055a1f" containerName="cinder-api-log" Jan 20 17:58:44 crc kubenswrapper[4558]: E0120 17:58:44.940609 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0b0c3280-9b26-4db4-ab16-68016b055a1f" containerName="cinder-api" Jan 20 17:58:44 crc kubenswrapper[4558]: I0120 17:58:44.940616 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="0b0c3280-9b26-4db4-ab16-68016b055a1f" containerName="cinder-api" Jan 20 17:58:44 crc kubenswrapper[4558]: E0120 17:58:44.940624 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="50829e14-401d-4958-9ef7-e2a2ef1f4b32" containerName="placement-log" Jan 20 17:58:44 crc kubenswrapper[4558]: I0120 17:58:44.940630 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="50829e14-401d-4958-9ef7-e2a2ef1f4b32" containerName="placement-log" Jan 20 17:58:44 crc kubenswrapper[4558]: I0120 17:58:44.940806 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="8f5c27dd-2365-49ee-b4bd-b46e952199f7" containerName="glance-httpd" Jan 20 17:58:44 crc kubenswrapper[4558]: I0120 17:58:44.940815 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="cd300665-4f75-4ed7-9166-4b48e1aeffc4" containerName="object-replicator" Jan 20 17:58:44 crc kubenswrapper[4558]: I0120 17:58:44.940828 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="cd300665-4f75-4ed7-9166-4b48e1aeffc4" containerName="container-updater" Jan 20 17:58:44 crc kubenswrapper[4558]: I0120 17:58:44.940836 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="cd300665-4f75-4ed7-9166-4b48e1aeffc4" containerName="rsync" Jan 20 17:58:44 crc kubenswrapper[4558]: I0120 17:58:44.940846 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="20fbac90-6e69-4c04-8e92-0b191157f185" containerName="dnsmasq-dns" Jan 20 17:58:44 crc kubenswrapper[4558]: I0120 17:58:44.940855 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="680eb8d4-16c8-40ae-bfb6-054fcf23f281" containerName="ceilometer-central-agent" Jan 20 17:58:44 crc kubenswrapper[4558]: I0120 17:58:44.940861 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="cd300665-4f75-4ed7-9166-4b48e1aeffc4" containerName="object-auditor" Jan 20 17:58:44 crc kubenswrapper[4558]: I0120 17:58:44.940867 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="cd300665-4f75-4ed7-9166-4b48e1aeffc4" containerName="account-replicator" Jan 20 17:58:44 crc kubenswrapper[4558]: I0120 17:58:44.940876 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="cd300665-4f75-4ed7-9166-4b48e1aeffc4" containerName="account-server" Jan 20 17:58:44 crc kubenswrapper[4558]: I0120 17:58:44.940882 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="50829e14-401d-4958-9ef7-e2a2ef1f4b32" containerName="placement-log" Jan 20 17:58:44 crc kubenswrapper[4558]: I0120 17:58:44.940891 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="f5beef7a-89e0-4e53-960b-1cacc665d586" containerName="nova-scheduler-scheduler" Jan 20 17:58:44 crc kubenswrapper[4558]: I0120 17:58:44.940902 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="cd300665-4f75-4ed7-9166-4b48e1aeffc4" containerName="container-replicator" Jan 20 17:58:44 crc kubenswrapper[4558]: I0120 17:58:44.940910 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="8a5251c1-60bc-45d7-8524-fd654c09505b" containerName="barbican-worker-log" Jan 20 17:58:44 crc kubenswrapper[4558]: I0120 17:58:44.940919 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="cd300665-4f75-4ed7-9166-4b48e1aeffc4" containerName="account-reaper" Jan 20 17:58:44 crc kubenswrapper[4558]: I0120 17:58:44.940928 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="0b0c3280-9b26-4db4-ab16-68016b055a1f" containerName="cinder-api-log" Jan 20 17:58:44 crc kubenswrapper[4558]: I0120 17:58:44.940936 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="ae90d4e7-e4c5-47e4-a320-f9f6069ed69d" containerName="memcached" Jan 20 17:58:44 crc kubenswrapper[4558]: I0120 17:58:44.940944 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="dbdd2687-1110-4dce-a521-19c9337df3a2" containerName="openstack-network-exporter" Jan 20 17:58:44 crc kubenswrapper[4558]: I0120 17:58:44.940950 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="7dcf074e-8444-4bd2-b1f9-143390e96ef8" containerName="glance-httpd" Jan 20 17:58:44 crc kubenswrapper[4558]: I0120 17:58:44.940958 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="9a1c3ab4-ca8f-4183-9dbf-05f5c5efe4b7" containerName="barbican-api" Jan 20 17:58:44 crc kubenswrapper[4558]: I0120 17:58:44.940968 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="45d51516-cc3a-473c-bb88-e82d290d36ca" containerName="neutron-api" Jan 20 17:58:44 crc kubenswrapper[4558]: I0120 17:58:44.940986 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="45d51516-cc3a-473c-bb88-e82d290d36ca" containerName="neutron-httpd" Jan 20 17:58:44 crc kubenswrapper[4558]: I0120 17:58:44.940994 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="6f0733ea-bb8a-4c0f-9ea8-edd6f687301e" containerName="nova-api-log" Jan 20 17:58:44 crc kubenswrapper[4558]: I0120 17:58:44.941004 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="50829e14-401d-4958-9ef7-e2a2ef1f4b32" containerName="placement-api" Jan 20 17:58:44 crc kubenswrapper[4558]: I0120 17:58:44.941014 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="0b0c3280-9b26-4db4-ab16-68016b055a1f" containerName="cinder-api" Jan 20 17:58:44 crc kubenswrapper[4558]: I0120 17:58:44.941023 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="a66c352e-8b7e-4f9e-9447-a2bf8a7a5c91" containerName="barbican-keystone-listener-log" Jan 20 17:58:44 crc kubenswrapper[4558]: I0120 17:58:44.941030 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="a66c352e-8b7e-4f9e-9447-a2bf8a7a5c91" containerName="barbican-keystone-listener" Jan 20 17:58:44 crc kubenswrapper[4558]: I0120 17:58:44.941039 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="75a8c03c-5b0a-4144-b1a8-293ce0d50739" containerName="mariadb-account-create-update" Jan 20 17:58:44 crc kubenswrapper[4558]: I0120 17:58:44.941046 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="94680f2e-23c0-41aa-a7ff-0aadfcfcc5e6" containerName="galera" Jan 20 17:58:44 crc kubenswrapper[4558]: I0120 17:58:44.941052 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="cd300665-4f75-4ed7-9166-4b48e1aeffc4" containerName="account-auditor" Jan 20 17:58:44 crc kubenswrapper[4558]: I0120 17:58:44.941060 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="cd300665-4f75-4ed7-9166-4b48e1aeffc4" containerName="object-updater" Jan 20 17:58:44 crc kubenswrapper[4558]: I0120 17:58:44.941069 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="cd300665-4f75-4ed7-9166-4b48e1aeffc4" containerName="container-auditor" Jan 20 17:58:44 crc kubenswrapper[4558]: I0120 17:58:44.941077 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="cd300665-4f75-4ed7-9166-4b48e1aeffc4" containerName="swift-recon-cron" Jan 20 17:58:44 crc kubenswrapper[4558]: I0120 17:58:44.941083 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="cd300665-4f75-4ed7-9166-4b48e1aeffc4" containerName="object-expirer" Jan 20 17:58:44 crc kubenswrapper[4558]: I0120 17:58:44.941090 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="8a1b8bc3-6d56-42f1-99a2-f80ff914e3d8" containerName="nova-cell0-conductor-conductor" Jan 20 17:58:44 crc kubenswrapper[4558]: I0120 17:58:44.941096 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="c8f11f45-bfce-4989-bfa0-684011f74619" containerName="nova-metadata-log" Jan 20 17:58:44 crc kubenswrapper[4558]: I0120 17:58:44.941101 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="8f5c27dd-2365-49ee-b4bd-b46e952199f7" containerName="glance-log" Jan 20 17:58:44 crc kubenswrapper[4558]: I0120 17:58:44.941109 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="cd300665-4f75-4ed7-9166-4b48e1aeffc4" containerName="object-server" Jan 20 17:58:44 crc kubenswrapper[4558]: I0120 17:58:44.941116 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="2c80b87d-f7d5-46dd-a6d9-bba3e82e405b" containerName="proxy-server" Jan 20 17:58:44 crc kubenswrapper[4558]: I0120 17:58:44.941122 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="7dcf074e-8444-4bd2-b1f9-143390e96ef8" containerName="glance-log" Jan 20 17:58:44 crc kubenswrapper[4558]: I0120 17:58:44.941128 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="680eb8d4-16c8-40ae-bfb6-054fcf23f281" containerName="ceilometer-notification-agent" Jan 20 17:58:44 crc kubenswrapper[4558]: I0120 17:58:44.941138 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="8a5251c1-60bc-45d7-8524-fd654c09505b" containerName="barbican-worker" Jan 20 17:58:44 crc kubenswrapper[4558]: I0120 17:58:44.941144 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="680eb8d4-16c8-40ae-bfb6-054fcf23f281" containerName="proxy-httpd" Jan 20 17:58:44 crc kubenswrapper[4558]: I0120 17:58:44.941151 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="2c80b87d-f7d5-46dd-a6d9-bba3e82e405b" containerName="proxy-httpd" Jan 20 17:58:44 crc kubenswrapper[4558]: I0120 17:58:44.941158 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="cd300665-4f75-4ed7-9166-4b48e1aeffc4" containerName="container-server" Jan 20 17:58:44 crc kubenswrapper[4558]: I0120 17:58:44.941183 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="9a1c3ab4-ca8f-4183-9dbf-05f5c5efe4b7" containerName="barbican-api-log" Jan 20 17:58:44 crc kubenswrapper[4558]: I0120 17:58:44.941191 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="747b1096-c84a-43e1-930e-9f887a42c524" containerName="keystone-api" Jan 20 17:58:44 crc kubenswrapper[4558]: I0120 17:58:44.941199 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="abd1fde8-57bd-4248-a061-6ddb436501c2" containerName="galera" Jan 20 17:58:44 crc kubenswrapper[4558]: I0120 17:58:44.941203 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="948fb15d-07f1-4b25-b8d5-7d582024ef28" containerName="rabbitmq" Jan 20 17:58:44 crc kubenswrapper[4558]: I0120 17:58:44.941211 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="54cbdea3-8fcc-4d2d-870b-cf3663cfc633" containerName="nova-cell1-conductor-conductor" Jan 20 17:58:44 crc kubenswrapper[4558]: I0120 17:58:44.941219 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="d512bec4-7ed0-43bb-b8fe-0f235f7698e5" containerName="rabbitmq" Jan 20 17:58:44 crc kubenswrapper[4558]: I0120 17:58:44.941226 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="c8f11f45-bfce-4989-bfa0-684011f74619" containerName="nova-metadata-metadata" Jan 20 17:58:44 crc kubenswrapper[4558]: I0120 17:58:44.941233 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="dbdd2687-1110-4dce-a521-19c9337df3a2" containerName="ovn-northd" Jan 20 17:58:44 crc kubenswrapper[4558]: I0120 17:58:44.941240 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="680eb8d4-16c8-40ae-bfb6-054fcf23f281" containerName="sg-core" Jan 20 17:58:44 crc kubenswrapper[4558]: I0120 17:58:44.941246 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="6f0733ea-bb8a-4c0f-9ea8-edd6f687301e" containerName="nova-api-api" Jan 20 17:58:44 crc kubenswrapper[4558]: I0120 17:58:44.941997 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-dwzdv" Jan 20 17:58:44 crc kubenswrapper[4558]: I0120 17:58:44.944070 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"crc-storage" Jan 20 17:58:44 crc kubenswrapper[4558]: I0120 17:58:44.944353 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"kube-root-ca.crt" Jan 20 17:58:44 crc kubenswrapper[4558]: I0120 17:58:44.944619 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"openshift-service-ca.crt" Jan 20 17:58:44 crc kubenswrapper[4558]: I0120 17:58:44.945003 4558 reflector.go:368] Caches populated for *v1.Secret from object-"crc-storage"/"crc-storage-dockercfg-k9ht8" Jan 20 17:58:44 crc kubenswrapper[4558]: I0120 17:58:44.951345 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-dwzdv"] Jan 20 17:58:45 crc kubenswrapper[4558]: I0120 17:58:45.091066 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qcpfx\" (UniqueName: \"kubernetes.io/projected/54045988-4050-480a-afff-04cbaf78c220-kube-api-access-qcpfx\") pod \"crc-storage-crc-dwzdv\" (UID: \"54045988-4050-480a-afff-04cbaf78c220\") " pod="crc-storage/crc-storage-crc-dwzdv" Jan 20 17:58:45 crc kubenswrapper[4558]: I0120 17:58:45.091116 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/54045988-4050-480a-afff-04cbaf78c220-crc-storage\") pod \"crc-storage-crc-dwzdv\" (UID: \"54045988-4050-480a-afff-04cbaf78c220\") " pod="crc-storage/crc-storage-crc-dwzdv" Jan 20 17:58:45 crc kubenswrapper[4558]: I0120 17:58:45.091677 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/54045988-4050-480a-afff-04cbaf78c220-node-mnt\") pod \"crc-storage-crc-dwzdv\" (UID: \"54045988-4050-480a-afff-04cbaf78c220\") " pod="crc-storage/crc-storage-crc-dwzdv" Jan 20 17:58:45 crc kubenswrapper[4558]: I0120 17:58:45.194214 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qcpfx\" (UniqueName: \"kubernetes.io/projected/54045988-4050-480a-afff-04cbaf78c220-kube-api-access-qcpfx\") pod \"crc-storage-crc-dwzdv\" (UID: \"54045988-4050-480a-afff-04cbaf78c220\") " pod="crc-storage/crc-storage-crc-dwzdv" Jan 20 17:58:45 crc kubenswrapper[4558]: I0120 17:58:45.194269 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/54045988-4050-480a-afff-04cbaf78c220-crc-storage\") pod \"crc-storage-crc-dwzdv\" (UID: \"54045988-4050-480a-afff-04cbaf78c220\") " pod="crc-storage/crc-storage-crc-dwzdv" Jan 20 17:58:45 crc kubenswrapper[4558]: I0120 17:58:45.194334 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/54045988-4050-480a-afff-04cbaf78c220-node-mnt\") pod \"crc-storage-crc-dwzdv\" (UID: \"54045988-4050-480a-afff-04cbaf78c220\") " pod="crc-storage/crc-storage-crc-dwzdv" Jan 20 17:58:45 crc kubenswrapper[4558]: I0120 17:58:45.194575 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/54045988-4050-480a-afff-04cbaf78c220-node-mnt\") pod \"crc-storage-crc-dwzdv\" (UID: \"54045988-4050-480a-afff-04cbaf78c220\") " pod="crc-storage/crc-storage-crc-dwzdv" Jan 20 17:58:45 crc kubenswrapper[4558]: I0120 17:58:45.195340 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/54045988-4050-480a-afff-04cbaf78c220-crc-storage\") pod \"crc-storage-crc-dwzdv\" (UID: \"54045988-4050-480a-afff-04cbaf78c220\") " pod="crc-storage/crc-storage-crc-dwzdv" Jan 20 17:58:45 crc kubenswrapper[4558]: I0120 17:58:45.212846 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qcpfx\" (UniqueName: \"kubernetes.io/projected/54045988-4050-480a-afff-04cbaf78c220-kube-api-access-qcpfx\") pod \"crc-storage-crc-dwzdv\" (UID: \"54045988-4050-480a-afff-04cbaf78c220\") " pod="crc-storage/crc-storage-crc-dwzdv" Jan 20 17:58:45 crc kubenswrapper[4558]: I0120 17:58:45.260238 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-dwzdv" Jan 20 17:58:45 crc kubenswrapper[4558]: I0120 17:58:45.647515 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-dwzdv"] Jan 20 17:58:46 crc kubenswrapper[4558]: I0120 17:58:46.121671 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-dwzdv" event={"ID":"54045988-4050-480a-afff-04cbaf78c220","Type":"ContainerStarted","Data":"e8e6bcd0464887a274d2c54a55598544213168e98d415c28eade38eb9547080c"} Jan 20 17:58:46 crc kubenswrapper[4558]: I0120 17:58:46.576310 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5ae32146-24d1-41db-bc15-fdac91abf9e0" path="/var/lib/kubelet/pods/5ae32146-24d1-41db-bc15-fdac91abf9e0/volumes" Jan 20 17:58:47 crc kubenswrapper[4558]: I0120 17:58:47.132809 4558 generic.go:334] "Generic (PLEG): container finished" podID="54045988-4050-480a-afff-04cbaf78c220" containerID="9ea6ff980fb53a92de35240296a25aa2e7062286f09a7e38918440c0642fed3a" exitCode=0 Jan 20 17:58:47 crc kubenswrapper[4558]: I0120 17:58:47.132977 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-dwzdv" event={"ID":"54045988-4050-480a-afff-04cbaf78c220","Type":"ContainerDied","Data":"9ea6ff980fb53a92de35240296a25aa2e7062286f09a7e38918440c0642fed3a"} Jan 20 17:58:48 crc kubenswrapper[4558]: I0120 17:58:48.389547 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-dwzdv" Jan 20 17:58:48 crc kubenswrapper[4558]: I0120 17:58:48.537654 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qcpfx\" (UniqueName: \"kubernetes.io/projected/54045988-4050-480a-afff-04cbaf78c220-kube-api-access-qcpfx\") pod \"54045988-4050-480a-afff-04cbaf78c220\" (UID: \"54045988-4050-480a-afff-04cbaf78c220\") " Jan 20 17:58:48 crc kubenswrapper[4558]: I0120 17:58:48.538097 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/54045988-4050-480a-afff-04cbaf78c220-node-mnt\") pod \"54045988-4050-480a-afff-04cbaf78c220\" (UID: \"54045988-4050-480a-afff-04cbaf78c220\") " Jan 20 17:58:48 crc kubenswrapper[4558]: I0120 17:58:48.538272 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/54045988-4050-480a-afff-04cbaf78c220-crc-storage\") pod \"54045988-4050-480a-afff-04cbaf78c220\" (UID: \"54045988-4050-480a-afff-04cbaf78c220\") " Jan 20 17:58:48 crc kubenswrapper[4558]: I0120 17:58:48.538263 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/54045988-4050-480a-afff-04cbaf78c220-node-mnt" (OuterVolumeSpecName: "node-mnt") pod "54045988-4050-480a-afff-04cbaf78c220" (UID: "54045988-4050-480a-afff-04cbaf78c220"). InnerVolumeSpecName "node-mnt". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 17:58:48 crc kubenswrapper[4558]: I0120 17:58:48.538859 4558 reconciler_common.go:293] "Volume detached for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/54045988-4050-480a-afff-04cbaf78c220-node-mnt\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:48 crc kubenswrapper[4558]: I0120 17:58:48.544899 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/54045988-4050-480a-afff-04cbaf78c220-kube-api-access-qcpfx" (OuterVolumeSpecName: "kube-api-access-qcpfx") pod "54045988-4050-480a-afff-04cbaf78c220" (UID: "54045988-4050-480a-afff-04cbaf78c220"). InnerVolumeSpecName "kube-api-access-qcpfx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:58:48 crc kubenswrapper[4558]: I0120 17:58:48.556304 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/54045988-4050-480a-afff-04cbaf78c220-crc-storage" (OuterVolumeSpecName: "crc-storage") pod "54045988-4050-480a-afff-04cbaf78c220" (UID: "54045988-4050-480a-afff-04cbaf78c220"). InnerVolumeSpecName "crc-storage". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:58:48 crc kubenswrapper[4558]: I0120 17:58:48.641243 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qcpfx\" (UniqueName: \"kubernetes.io/projected/54045988-4050-480a-afff-04cbaf78c220-kube-api-access-qcpfx\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:48 crc kubenswrapper[4558]: I0120 17:58:48.641276 4558 reconciler_common.go:293] "Volume detached for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/54045988-4050-480a-afff-04cbaf78c220-crc-storage\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:49 crc kubenswrapper[4558]: I0120 17:58:49.151043 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-dwzdv" event={"ID":"54045988-4050-480a-afff-04cbaf78c220","Type":"ContainerDied","Data":"e8e6bcd0464887a274d2c54a55598544213168e98d415c28eade38eb9547080c"} Jan 20 17:58:49 crc kubenswrapper[4558]: I0120 17:58:49.151097 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e8e6bcd0464887a274d2c54a55598544213168e98d415c28eade38eb9547080c" Jan 20 17:58:49 crc kubenswrapper[4558]: I0120 17:58:49.151108 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-dwzdv" Jan 20 17:58:51 crc kubenswrapper[4558]: I0120 17:58:51.306496 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["crc-storage/crc-storage-crc-dwzdv"] Jan 20 17:58:51 crc kubenswrapper[4558]: I0120 17:58:51.310535 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["crc-storage/crc-storage-crc-dwzdv"] Jan 20 17:58:51 crc kubenswrapper[4558]: I0120 17:58:51.414742 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["crc-storage/crc-storage-crc-d8tgq"] Jan 20 17:58:51 crc kubenswrapper[4558]: E0120 17:58:51.415050 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="75a8c03c-5b0a-4144-b1a8-293ce0d50739" containerName="mariadb-account-create-update" Jan 20 17:58:51 crc kubenswrapper[4558]: I0120 17:58:51.415068 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="75a8c03c-5b0a-4144-b1a8-293ce0d50739" containerName="mariadb-account-create-update" Jan 20 17:58:51 crc kubenswrapper[4558]: E0120 17:58:51.415090 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="54045988-4050-480a-afff-04cbaf78c220" containerName="storage" Jan 20 17:58:51 crc kubenswrapper[4558]: I0120 17:58:51.415095 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="54045988-4050-480a-afff-04cbaf78c220" containerName="storage" Jan 20 17:58:51 crc kubenswrapper[4558]: I0120 17:58:51.415260 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="75a8c03c-5b0a-4144-b1a8-293ce0d50739" containerName="mariadb-account-create-update" Jan 20 17:58:51 crc kubenswrapper[4558]: I0120 17:58:51.415278 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="54045988-4050-480a-afff-04cbaf78c220" containerName="storage" Jan 20 17:58:51 crc kubenswrapper[4558]: I0120 17:58:51.415786 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-d8tgq" Jan 20 17:58:51 crc kubenswrapper[4558]: I0120 17:58:51.418463 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"openshift-service-ca.crt" Jan 20 17:58:51 crc kubenswrapper[4558]: I0120 17:58:51.418463 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"crc-storage" Jan 20 17:58:51 crc kubenswrapper[4558]: I0120 17:58:51.418579 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"kube-root-ca.crt" Jan 20 17:58:51 crc kubenswrapper[4558]: I0120 17:58:51.419001 4558 reflector.go:368] Caches populated for *v1.Secret from object-"crc-storage"/"crc-storage-dockercfg-k9ht8" Jan 20 17:58:51 crc kubenswrapper[4558]: I0120 17:58:51.422807 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-d8tgq"] Jan 20 17:58:51 crc kubenswrapper[4558]: I0120 17:58:51.477465 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/cbd862c9-fadb-44e6-928e-0976fa3a022d-node-mnt\") pod \"crc-storage-crc-d8tgq\" (UID: \"cbd862c9-fadb-44e6-928e-0976fa3a022d\") " pod="crc-storage/crc-storage-crc-d8tgq" Jan 20 17:58:51 crc kubenswrapper[4558]: I0120 17:58:51.477521 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/cbd862c9-fadb-44e6-928e-0976fa3a022d-crc-storage\") pod \"crc-storage-crc-d8tgq\" (UID: \"cbd862c9-fadb-44e6-928e-0976fa3a022d\") " pod="crc-storage/crc-storage-crc-d8tgq" Jan 20 17:58:51 crc kubenswrapper[4558]: I0120 17:58:51.477587 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p7dwp\" (UniqueName: \"kubernetes.io/projected/cbd862c9-fadb-44e6-928e-0976fa3a022d-kube-api-access-p7dwp\") pod \"crc-storage-crc-d8tgq\" (UID: \"cbd862c9-fadb-44e6-928e-0976fa3a022d\") " pod="crc-storage/crc-storage-crc-d8tgq" Jan 20 17:58:51 crc kubenswrapper[4558]: I0120 17:58:51.579078 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p7dwp\" (UniqueName: \"kubernetes.io/projected/cbd862c9-fadb-44e6-928e-0976fa3a022d-kube-api-access-p7dwp\") pod \"crc-storage-crc-d8tgq\" (UID: \"cbd862c9-fadb-44e6-928e-0976fa3a022d\") " pod="crc-storage/crc-storage-crc-d8tgq" Jan 20 17:58:51 crc kubenswrapper[4558]: I0120 17:58:51.579218 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/cbd862c9-fadb-44e6-928e-0976fa3a022d-node-mnt\") pod \"crc-storage-crc-d8tgq\" (UID: \"cbd862c9-fadb-44e6-928e-0976fa3a022d\") " pod="crc-storage/crc-storage-crc-d8tgq" Jan 20 17:58:51 crc kubenswrapper[4558]: I0120 17:58:51.579265 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/cbd862c9-fadb-44e6-928e-0976fa3a022d-crc-storage\") pod \"crc-storage-crc-d8tgq\" (UID: \"cbd862c9-fadb-44e6-928e-0976fa3a022d\") " pod="crc-storage/crc-storage-crc-d8tgq" Jan 20 17:58:51 crc kubenswrapper[4558]: I0120 17:58:51.579747 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/cbd862c9-fadb-44e6-928e-0976fa3a022d-node-mnt\") pod \"crc-storage-crc-d8tgq\" (UID: \"cbd862c9-fadb-44e6-928e-0976fa3a022d\") " pod="crc-storage/crc-storage-crc-d8tgq" Jan 20 17:58:51 crc kubenswrapper[4558]: I0120 17:58:51.580042 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/cbd862c9-fadb-44e6-928e-0976fa3a022d-crc-storage\") pod \"crc-storage-crc-d8tgq\" (UID: \"cbd862c9-fadb-44e6-928e-0976fa3a022d\") " pod="crc-storage/crc-storage-crc-d8tgq" Jan 20 17:58:51 crc kubenswrapper[4558]: I0120 17:58:51.598388 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p7dwp\" (UniqueName: \"kubernetes.io/projected/cbd862c9-fadb-44e6-928e-0976fa3a022d-kube-api-access-p7dwp\") pod \"crc-storage-crc-d8tgq\" (UID: \"cbd862c9-fadb-44e6-928e-0976fa3a022d\") " pod="crc-storage/crc-storage-crc-d8tgq" Jan 20 17:58:51 crc kubenswrapper[4558]: I0120 17:58:51.729731 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-d8tgq" Jan 20 17:58:52 crc kubenswrapper[4558]: I0120 17:58:52.116395 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-d8tgq"] Jan 20 17:58:52 crc kubenswrapper[4558]: I0120 17:58:52.183406 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-d8tgq" event={"ID":"cbd862c9-fadb-44e6-928e-0976fa3a022d","Type":"ContainerStarted","Data":"7d252c96b2d02f67dd35b74fbde01972a5a1fda9fd2b88f69eada1b366ab0c25"} Jan 20 17:58:52 crc kubenswrapper[4558]: I0120 17:58:52.574540 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="54045988-4050-480a-afff-04cbaf78c220" path="/var/lib/kubelet/pods/54045988-4050-480a-afff-04cbaf78c220/volumes" Jan 20 17:58:53 crc kubenswrapper[4558]: I0120 17:58:53.194486 4558 generic.go:334] "Generic (PLEG): container finished" podID="cbd862c9-fadb-44e6-928e-0976fa3a022d" containerID="790091312ed918acd438cbb2df2c660600bb30ed84ef002a58f4cd77a21666a1" exitCode=0 Jan 20 17:58:53 crc kubenswrapper[4558]: I0120 17:58:53.194561 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-d8tgq" event={"ID":"cbd862c9-fadb-44e6-928e-0976fa3a022d","Type":"ContainerDied","Data":"790091312ed918acd438cbb2df2c660600bb30ed84ef002a58f4cd77a21666a1"} Jan 20 17:58:54 crc kubenswrapper[4558]: I0120 17:58:54.439345 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-d8tgq" Jan 20 17:58:54 crc kubenswrapper[4558]: I0120 17:58:54.614124 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p7dwp\" (UniqueName: \"kubernetes.io/projected/cbd862c9-fadb-44e6-928e-0976fa3a022d-kube-api-access-p7dwp\") pod \"cbd862c9-fadb-44e6-928e-0976fa3a022d\" (UID: \"cbd862c9-fadb-44e6-928e-0976fa3a022d\") " Jan 20 17:58:54 crc kubenswrapper[4558]: I0120 17:58:54.614204 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/cbd862c9-fadb-44e6-928e-0976fa3a022d-crc-storage\") pod \"cbd862c9-fadb-44e6-928e-0976fa3a022d\" (UID: \"cbd862c9-fadb-44e6-928e-0976fa3a022d\") " Jan 20 17:58:54 crc kubenswrapper[4558]: I0120 17:58:54.614258 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/cbd862c9-fadb-44e6-928e-0976fa3a022d-node-mnt\") pod \"cbd862c9-fadb-44e6-928e-0976fa3a022d\" (UID: \"cbd862c9-fadb-44e6-928e-0976fa3a022d\") " Jan 20 17:58:54 crc kubenswrapper[4558]: I0120 17:58:54.614481 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/cbd862c9-fadb-44e6-928e-0976fa3a022d-node-mnt" (OuterVolumeSpecName: "node-mnt") pod "cbd862c9-fadb-44e6-928e-0976fa3a022d" (UID: "cbd862c9-fadb-44e6-928e-0976fa3a022d"). InnerVolumeSpecName "node-mnt". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 17:58:54 crc kubenswrapper[4558]: I0120 17:58:54.621867 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cbd862c9-fadb-44e6-928e-0976fa3a022d-kube-api-access-p7dwp" (OuterVolumeSpecName: "kube-api-access-p7dwp") pod "cbd862c9-fadb-44e6-928e-0976fa3a022d" (UID: "cbd862c9-fadb-44e6-928e-0976fa3a022d"). InnerVolumeSpecName "kube-api-access-p7dwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:58:54 crc kubenswrapper[4558]: I0120 17:58:54.634547 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cbd862c9-fadb-44e6-928e-0976fa3a022d-crc-storage" (OuterVolumeSpecName: "crc-storage") pod "cbd862c9-fadb-44e6-928e-0976fa3a022d" (UID: "cbd862c9-fadb-44e6-928e-0976fa3a022d"). InnerVolumeSpecName "crc-storage". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:58:54 crc kubenswrapper[4558]: I0120 17:58:54.716189 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p7dwp\" (UniqueName: \"kubernetes.io/projected/cbd862c9-fadb-44e6-928e-0976fa3a022d-kube-api-access-p7dwp\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:54 crc kubenswrapper[4558]: I0120 17:58:54.716231 4558 reconciler_common.go:293] "Volume detached for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/cbd862c9-fadb-44e6-928e-0976fa3a022d-crc-storage\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:54 crc kubenswrapper[4558]: I0120 17:58:54.716246 4558 reconciler_common.go:293] "Volume detached for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/cbd862c9-fadb-44e6-928e-0976fa3a022d-node-mnt\") on node \"crc\" DevicePath \"\"" Jan 20 17:58:55 crc kubenswrapper[4558]: I0120 17:58:55.212465 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-d8tgq" event={"ID":"cbd862c9-fadb-44e6-928e-0976fa3a022d","Type":"ContainerDied","Data":"7d252c96b2d02f67dd35b74fbde01972a5a1fda9fd2b88f69eada1b366ab0c25"} Jan 20 17:58:55 crc kubenswrapper[4558]: I0120 17:58:55.212841 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7d252c96b2d02f67dd35b74fbde01972a5a1fda9fd2b88f69eada1b366ab0c25" Jan 20 17:58:55 crc kubenswrapper[4558]: I0120 17:58:55.212560 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-d8tgq" Jan 20 17:58:58 crc kubenswrapper[4558]: I0120 17:58:58.210513 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-5b68c79f89-ctm52"] Jan 20 17:58:58 crc kubenswrapper[4558]: E0120 17:58:58.211220 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cbd862c9-fadb-44e6-928e-0976fa3a022d" containerName="storage" Jan 20 17:58:58 crc kubenswrapper[4558]: I0120 17:58:58.211236 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="cbd862c9-fadb-44e6-928e-0976fa3a022d" containerName="storage" Jan 20 17:58:58 crc kubenswrapper[4558]: I0120 17:58:58.211385 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="cbd862c9-fadb-44e6-928e-0976fa3a022d" containerName="storage" Jan 20 17:58:58 crc kubenswrapper[4558]: I0120 17:58:58.212199 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-5b68c79f89-ctm52" Jan 20 17:58:58 crc kubenswrapper[4558]: I0120 17:58:58.214395 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-edpm-ipam" Jan 20 17:58:58 crc kubenswrapper[4558]: I0120 17:58:58.222224 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-5b68c79f89-ctm52"] Jan 20 17:58:58 crc kubenswrapper[4558]: I0120 17:58:58.268237 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ce022194-8c04-4d08-9d00-c95e9f15a559-config\") pod \"dnsmasq-dnsmasq-5b68c79f89-ctm52\" (UID: \"ce022194-8c04-4d08-9d00-c95e9f15a559\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-5b68c79f89-ctm52" Jan 20 17:58:58 crc kubenswrapper[4558]: I0120 17:58:58.268371 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jk4nn\" (UniqueName: \"kubernetes.io/projected/ce022194-8c04-4d08-9d00-c95e9f15a559-kube-api-access-jk4nn\") pod \"dnsmasq-dnsmasq-5b68c79f89-ctm52\" (UID: \"ce022194-8c04-4d08-9d00-c95e9f15a559\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-5b68c79f89-ctm52" Jan 20 17:58:58 crc kubenswrapper[4558]: I0120 17:58:58.268457 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/ce022194-8c04-4d08-9d00-c95e9f15a559-openstack-edpm-ipam\") pod \"dnsmasq-dnsmasq-5b68c79f89-ctm52\" (UID: \"ce022194-8c04-4d08-9d00-c95e9f15a559\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-5b68c79f89-ctm52" Jan 20 17:58:58 crc kubenswrapper[4558]: I0120 17:58:58.268513 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/ce022194-8c04-4d08-9d00-c95e9f15a559-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-5b68c79f89-ctm52\" (UID: \"ce022194-8c04-4d08-9d00-c95e9f15a559\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-5b68c79f89-ctm52" Jan 20 17:58:58 crc kubenswrapper[4558]: I0120 17:58:58.369396 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jk4nn\" (UniqueName: \"kubernetes.io/projected/ce022194-8c04-4d08-9d00-c95e9f15a559-kube-api-access-jk4nn\") pod \"dnsmasq-dnsmasq-5b68c79f89-ctm52\" (UID: \"ce022194-8c04-4d08-9d00-c95e9f15a559\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-5b68c79f89-ctm52" Jan 20 17:58:58 crc kubenswrapper[4558]: I0120 17:58:58.369476 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/ce022194-8c04-4d08-9d00-c95e9f15a559-openstack-edpm-ipam\") pod \"dnsmasq-dnsmasq-5b68c79f89-ctm52\" (UID: \"ce022194-8c04-4d08-9d00-c95e9f15a559\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-5b68c79f89-ctm52" Jan 20 17:58:58 crc kubenswrapper[4558]: I0120 17:58:58.369514 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/ce022194-8c04-4d08-9d00-c95e9f15a559-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-5b68c79f89-ctm52\" (UID: \"ce022194-8c04-4d08-9d00-c95e9f15a559\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-5b68c79f89-ctm52" Jan 20 17:58:58 crc kubenswrapper[4558]: I0120 17:58:58.369553 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ce022194-8c04-4d08-9d00-c95e9f15a559-config\") pod \"dnsmasq-dnsmasq-5b68c79f89-ctm52\" (UID: \"ce022194-8c04-4d08-9d00-c95e9f15a559\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-5b68c79f89-ctm52" Jan 20 17:58:58 crc kubenswrapper[4558]: I0120 17:58:58.370654 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ce022194-8c04-4d08-9d00-c95e9f15a559-config\") pod \"dnsmasq-dnsmasq-5b68c79f89-ctm52\" (UID: \"ce022194-8c04-4d08-9d00-c95e9f15a559\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-5b68c79f89-ctm52" Jan 20 17:58:58 crc kubenswrapper[4558]: I0120 17:58:58.370762 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/ce022194-8c04-4d08-9d00-c95e9f15a559-openstack-edpm-ipam\") pod \"dnsmasq-dnsmasq-5b68c79f89-ctm52\" (UID: \"ce022194-8c04-4d08-9d00-c95e9f15a559\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-5b68c79f89-ctm52" Jan 20 17:58:58 crc kubenswrapper[4558]: I0120 17:58:58.370807 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/ce022194-8c04-4d08-9d00-c95e9f15a559-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-5b68c79f89-ctm52\" (UID: \"ce022194-8c04-4d08-9d00-c95e9f15a559\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-5b68c79f89-ctm52" Jan 20 17:58:58 crc kubenswrapper[4558]: I0120 17:58:58.387378 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jk4nn\" (UniqueName: \"kubernetes.io/projected/ce022194-8c04-4d08-9d00-c95e9f15a559-kube-api-access-jk4nn\") pod \"dnsmasq-dnsmasq-5b68c79f89-ctm52\" (UID: \"ce022194-8c04-4d08-9d00-c95e9f15a559\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-5b68c79f89-ctm52" Jan 20 17:58:58 crc kubenswrapper[4558]: I0120 17:58:58.527654 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-5b68c79f89-ctm52" Jan 20 17:58:59 crc kubenswrapper[4558]: I0120 17:58:59.379477 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-5b68c79f89-ctm52"] Jan 20 17:59:00 crc kubenswrapper[4558]: I0120 17:59:00.262097 4558 generic.go:334] "Generic (PLEG): container finished" podID="ce022194-8c04-4d08-9d00-c95e9f15a559" containerID="94cf87b83d0920312ed77bc4f0f14377663825e352b011645bff6f7570f5929a" exitCode=0 Jan 20 17:59:00 crc kubenswrapper[4558]: I0120 17:59:00.262207 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-5b68c79f89-ctm52" event={"ID":"ce022194-8c04-4d08-9d00-c95e9f15a559","Type":"ContainerDied","Data":"94cf87b83d0920312ed77bc4f0f14377663825e352b011645bff6f7570f5929a"} Jan 20 17:59:00 crc kubenswrapper[4558]: I0120 17:59:00.262450 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-5b68c79f89-ctm52" event={"ID":"ce022194-8c04-4d08-9d00-c95e9f15a559","Type":"ContainerStarted","Data":"c0643c80785e8175a87935a9d633ec82cd8617ab5b2c1c980c27257d676c9cdf"} Jan 20 17:59:01 crc kubenswrapper[4558]: I0120 17:59:01.272295 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-5b68c79f89-ctm52" event={"ID":"ce022194-8c04-4d08-9d00-c95e9f15a559","Type":"ContainerStarted","Data":"6ea4f09cc53b9264e03aa6958bc090bc8d92903359284a9cf9a25fcaf8bc65c8"} Jan 20 17:59:01 crc kubenswrapper[4558]: I0120 17:59:01.272582 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-5b68c79f89-ctm52" Jan 20 17:59:01 crc kubenswrapper[4558]: I0120 17:59:01.289840 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-5b68c79f89-ctm52" podStartSLOduration=3.289823097 podStartE2EDuration="3.289823097s" podCreationTimestamp="2026-01-20 17:58:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:59:01.287441651 +0000 UTC m=+4635.047779618" watchObservedRunningTime="2026-01-20 17:59:01.289823097 +0000 UTC m=+4635.050161065" Jan 20 17:59:08 crc kubenswrapper[4558]: I0120 17:59:08.529409 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-5b68c79f89-ctm52" Jan 20 17:59:08 crc kubenswrapper[4558]: I0120 17:59:08.597587 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-l7p2w"] Jan 20 17:59:08 crc kubenswrapper[4558]: I0120 17:59:08.597834 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-l7p2w" podUID="314a7e85-558d-49d5-b281-52342e3e4c01" containerName="dnsmasq-dns" containerID="cri-o://fc055d0cca31052c4ea70b2f03c41fa7f8864175aa9928a543f46fd5c571ca9b" gracePeriod=10 Jan 20 17:59:08 crc kubenswrapper[4558]: I0120 17:59:08.707659 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-79667f9c49-fgsgc"] Jan 20 17:59:08 crc kubenswrapper[4558]: I0120 17:59:08.708773 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-79667f9c49-fgsgc" Jan 20 17:59:08 crc kubenswrapper[4558]: I0120 17:59:08.711973 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6ttfj\" (UniqueName: \"kubernetes.io/projected/bc6d633b-3454-4712-966c-5150f8b7219e-kube-api-access-6ttfj\") pod \"dnsmasq-dnsmasq-79667f9c49-fgsgc\" (UID: \"bc6d633b-3454-4712-966c-5150f8b7219e\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-79667f9c49-fgsgc" Jan 20 17:59:08 crc kubenswrapper[4558]: I0120 17:59:08.712034 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bc6d633b-3454-4712-966c-5150f8b7219e-config\") pod \"dnsmasq-dnsmasq-79667f9c49-fgsgc\" (UID: \"bc6d633b-3454-4712-966c-5150f8b7219e\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-79667f9c49-fgsgc" Jan 20 17:59:08 crc kubenswrapper[4558]: I0120 17:59:08.712052 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/bc6d633b-3454-4712-966c-5150f8b7219e-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-79667f9c49-fgsgc\" (UID: \"bc6d633b-3454-4712-966c-5150f8b7219e\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-79667f9c49-fgsgc" Jan 20 17:59:08 crc kubenswrapper[4558]: I0120 17:59:08.712085 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/bc6d633b-3454-4712-966c-5150f8b7219e-openstack-edpm-ipam\") pod \"dnsmasq-dnsmasq-79667f9c49-fgsgc\" (UID: \"bc6d633b-3454-4712-966c-5150f8b7219e\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-79667f9c49-fgsgc" Jan 20 17:59:08 crc kubenswrapper[4558]: I0120 17:59:08.717619 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-79667f9c49-fgsgc"] Jan 20 17:59:08 crc kubenswrapper[4558]: I0120 17:59:08.806306 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-79667f9c49-fgsgc"] Jan 20 17:59:08 crc kubenswrapper[4558]: E0120 17:59:08.806835 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[config dnsmasq-svc kube-api-access-6ttfj openstack-edpm-ipam], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-79667f9c49-fgsgc" podUID="bc6d633b-3454-4712-966c-5150f8b7219e" Jan 20 17:59:08 crc kubenswrapper[4558]: I0120 17:59:08.813252 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6ttfj\" (UniqueName: \"kubernetes.io/projected/bc6d633b-3454-4712-966c-5150f8b7219e-kube-api-access-6ttfj\") pod \"dnsmasq-dnsmasq-79667f9c49-fgsgc\" (UID: \"bc6d633b-3454-4712-966c-5150f8b7219e\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-79667f9c49-fgsgc" Jan 20 17:59:08 crc kubenswrapper[4558]: I0120 17:59:08.813321 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bc6d633b-3454-4712-966c-5150f8b7219e-config\") pod \"dnsmasq-dnsmasq-79667f9c49-fgsgc\" (UID: \"bc6d633b-3454-4712-966c-5150f8b7219e\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-79667f9c49-fgsgc" Jan 20 17:59:08 crc kubenswrapper[4558]: I0120 17:59:08.813473 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/bc6d633b-3454-4712-966c-5150f8b7219e-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-79667f9c49-fgsgc\" (UID: \"bc6d633b-3454-4712-966c-5150f8b7219e\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-79667f9c49-fgsgc" Jan 20 17:59:08 crc kubenswrapper[4558]: I0120 17:59:08.813519 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/bc6d633b-3454-4712-966c-5150f8b7219e-openstack-edpm-ipam\") pod \"dnsmasq-dnsmasq-79667f9c49-fgsgc\" (UID: \"bc6d633b-3454-4712-966c-5150f8b7219e\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-79667f9c49-fgsgc" Jan 20 17:59:08 crc kubenswrapper[4558]: E0120 17:59:08.813615 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/openstack-edpm-ipam: configmap "openstack-edpm-ipam" not found Jan 20 17:59:08 crc kubenswrapper[4558]: E0120 17:59:08.813674 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/bc6d633b-3454-4712-966c-5150f8b7219e-openstack-edpm-ipam podName:bc6d633b-3454-4712-966c-5150f8b7219e nodeName:}" failed. No retries permitted until 2026-01-20 17:59:09.313659961 +0000 UTC m=+4643.073997928 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "openstack-edpm-ipam" (UniqueName: "kubernetes.io/configmap/bc6d633b-3454-4712-966c-5150f8b7219e-openstack-edpm-ipam") pod "dnsmasq-dnsmasq-79667f9c49-fgsgc" (UID: "bc6d633b-3454-4712-966c-5150f8b7219e") : configmap "openstack-edpm-ipam" not found Jan 20 17:59:08 crc kubenswrapper[4558]: I0120 17:59:08.814423 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bc6d633b-3454-4712-966c-5150f8b7219e-config\") pod \"dnsmasq-dnsmasq-79667f9c49-fgsgc\" (UID: \"bc6d633b-3454-4712-966c-5150f8b7219e\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-79667f9c49-fgsgc" Jan 20 17:59:08 crc kubenswrapper[4558]: I0120 17:59:08.814697 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/bc6d633b-3454-4712-966c-5150f8b7219e-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-79667f9c49-fgsgc\" (UID: \"bc6d633b-3454-4712-966c-5150f8b7219e\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-79667f9c49-fgsgc" Jan 20 17:59:08 crc kubenswrapper[4558]: I0120 17:59:08.827516 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-9pmhk"] Jan 20 17:59:08 crc kubenswrapper[4558]: I0120 17:59:08.828594 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-9pmhk" Jan 20 17:59:08 crc kubenswrapper[4558]: I0120 17:59:08.848946 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-9pmhk"] Jan 20 17:59:08 crc kubenswrapper[4558]: I0120 17:59:08.903000 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6ttfj\" (UniqueName: \"kubernetes.io/projected/bc6d633b-3454-4712-966c-5150f8b7219e-kube-api-access-6ttfj\") pod \"dnsmasq-dnsmasq-79667f9c49-fgsgc\" (UID: \"bc6d633b-3454-4712-966c-5150f8b7219e\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-79667f9c49-fgsgc" Jan 20 17:59:08 crc kubenswrapper[4558]: I0120 17:59:08.915959 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b76c9805-2cef-461b-85d8-d4848ac6ed1e-config\") pod \"dnsmasq-dnsmasq-84b9f45d47-9pmhk\" (UID: \"b76c9805-2cef-461b-85d8-d4848ac6ed1e\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-9pmhk" Jan 20 17:59:08 crc kubenswrapper[4558]: I0120 17:59:08.916059 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mvmhv\" (UniqueName: \"kubernetes.io/projected/b76c9805-2cef-461b-85d8-d4848ac6ed1e-kube-api-access-mvmhv\") pod \"dnsmasq-dnsmasq-84b9f45d47-9pmhk\" (UID: \"b76c9805-2cef-461b-85d8-d4848ac6ed1e\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-9pmhk" Jan 20 17:59:08 crc kubenswrapper[4558]: I0120 17:59:08.916205 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/b76c9805-2cef-461b-85d8-d4848ac6ed1e-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-84b9f45d47-9pmhk\" (UID: \"b76c9805-2cef-461b-85d8-d4848ac6ed1e\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-9pmhk" Jan 20 17:59:09 crc kubenswrapper[4558]: I0120 17:59:09.017629 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b76c9805-2cef-461b-85d8-d4848ac6ed1e-config\") pod \"dnsmasq-dnsmasq-84b9f45d47-9pmhk\" (UID: \"b76c9805-2cef-461b-85d8-d4848ac6ed1e\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-9pmhk" Jan 20 17:59:09 crc kubenswrapper[4558]: I0120 17:59:09.017885 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mvmhv\" (UniqueName: \"kubernetes.io/projected/b76c9805-2cef-461b-85d8-d4848ac6ed1e-kube-api-access-mvmhv\") pod \"dnsmasq-dnsmasq-84b9f45d47-9pmhk\" (UID: \"b76c9805-2cef-461b-85d8-d4848ac6ed1e\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-9pmhk" Jan 20 17:59:09 crc kubenswrapper[4558]: I0120 17:59:09.017955 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/b76c9805-2cef-461b-85d8-d4848ac6ed1e-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-84b9f45d47-9pmhk\" (UID: \"b76c9805-2cef-461b-85d8-d4848ac6ed1e\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-9pmhk" Jan 20 17:59:09 crc kubenswrapper[4558]: I0120 17:59:09.018868 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/b76c9805-2cef-461b-85d8-d4848ac6ed1e-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-84b9f45d47-9pmhk\" (UID: \"b76c9805-2cef-461b-85d8-d4848ac6ed1e\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-9pmhk" Jan 20 17:59:09 crc kubenswrapper[4558]: I0120 17:59:09.019425 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b76c9805-2cef-461b-85d8-d4848ac6ed1e-config\") pod \"dnsmasq-dnsmasq-84b9f45d47-9pmhk\" (UID: \"b76c9805-2cef-461b-85d8-d4848ac6ed1e\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-9pmhk" Jan 20 17:59:09 crc kubenswrapper[4558]: I0120 17:59:09.036782 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mvmhv\" (UniqueName: \"kubernetes.io/projected/b76c9805-2cef-461b-85d8-d4848ac6ed1e-kube-api-access-mvmhv\") pod \"dnsmasq-dnsmasq-84b9f45d47-9pmhk\" (UID: \"b76c9805-2cef-461b-85d8-d4848ac6ed1e\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-9pmhk" Jan 20 17:59:09 crc kubenswrapper[4558]: I0120 17:59:09.149504 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-l7p2w" Jan 20 17:59:09 crc kubenswrapper[4558]: I0120 17:59:09.151802 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-9pmhk" Jan 20 17:59:09 crc kubenswrapper[4558]: I0120 17:59:09.226336 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bsb7c\" (UniqueName: \"kubernetes.io/projected/314a7e85-558d-49d5-b281-52342e3e4c01-kube-api-access-bsb7c\") pod \"314a7e85-558d-49d5-b281-52342e3e4c01\" (UID: \"314a7e85-558d-49d5-b281-52342e3e4c01\") " Jan 20 17:59:09 crc kubenswrapper[4558]: I0120 17:59:09.226410 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/314a7e85-558d-49d5-b281-52342e3e4c01-config\") pod \"314a7e85-558d-49d5-b281-52342e3e4c01\" (UID: \"314a7e85-558d-49d5-b281-52342e3e4c01\") " Jan 20 17:59:09 crc kubenswrapper[4558]: I0120 17:59:09.226449 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/314a7e85-558d-49d5-b281-52342e3e4c01-dnsmasq-svc\") pod \"314a7e85-558d-49d5-b281-52342e3e4c01\" (UID: \"314a7e85-558d-49d5-b281-52342e3e4c01\") " Jan 20 17:59:09 crc kubenswrapper[4558]: I0120 17:59:09.238076 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/314a7e85-558d-49d5-b281-52342e3e4c01-kube-api-access-bsb7c" (OuterVolumeSpecName: "kube-api-access-bsb7c") pod "314a7e85-558d-49d5-b281-52342e3e4c01" (UID: "314a7e85-558d-49d5-b281-52342e3e4c01"). InnerVolumeSpecName "kube-api-access-bsb7c". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:59:09 crc kubenswrapper[4558]: I0120 17:59:09.265981 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/314a7e85-558d-49d5-b281-52342e3e4c01-config" (OuterVolumeSpecName: "config") pod "314a7e85-558d-49d5-b281-52342e3e4c01" (UID: "314a7e85-558d-49d5-b281-52342e3e4c01"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:59:09 crc kubenswrapper[4558]: I0120 17:59:09.275767 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/314a7e85-558d-49d5-b281-52342e3e4c01-dnsmasq-svc" (OuterVolumeSpecName: "dnsmasq-svc") pod "314a7e85-558d-49d5-b281-52342e3e4c01" (UID: "314a7e85-558d-49d5-b281-52342e3e4c01"). InnerVolumeSpecName "dnsmasq-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:59:09 crc kubenswrapper[4558]: I0120 17:59:09.327969 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/bc6d633b-3454-4712-966c-5150f8b7219e-openstack-edpm-ipam\") pod \"dnsmasq-dnsmasq-79667f9c49-fgsgc\" (UID: \"bc6d633b-3454-4712-966c-5150f8b7219e\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-79667f9c49-fgsgc" Jan 20 17:59:09 crc kubenswrapper[4558]: I0120 17:59:09.328110 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/314a7e85-558d-49d5-b281-52342e3e4c01-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:59:09 crc kubenswrapper[4558]: I0120 17:59:09.328131 4558 reconciler_common.go:293] "Volume detached for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/314a7e85-558d-49d5-b281-52342e3e4c01-dnsmasq-svc\") on node \"crc\" DevicePath \"\"" Jan 20 17:59:09 crc kubenswrapper[4558]: I0120 17:59:09.328145 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bsb7c\" (UniqueName: \"kubernetes.io/projected/314a7e85-558d-49d5-b281-52342e3e4c01-kube-api-access-bsb7c\") on node \"crc\" DevicePath \"\"" Jan 20 17:59:09 crc kubenswrapper[4558]: E0120 17:59:09.328105 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/openstack-edpm-ipam: configmap "openstack-edpm-ipam" not found Jan 20 17:59:09 crc kubenswrapper[4558]: E0120 17:59:09.328220 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/bc6d633b-3454-4712-966c-5150f8b7219e-openstack-edpm-ipam podName:bc6d633b-3454-4712-966c-5150f8b7219e nodeName:}" failed. No retries permitted until 2026-01-20 17:59:10.328205842 +0000 UTC m=+4644.088543810 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "openstack-edpm-ipam" (UniqueName: "kubernetes.io/configmap/bc6d633b-3454-4712-966c-5150f8b7219e-openstack-edpm-ipam") pod "dnsmasq-dnsmasq-79667f9c49-fgsgc" (UID: "bc6d633b-3454-4712-966c-5150f8b7219e") : configmap "openstack-edpm-ipam" not found Jan 20 17:59:09 crc kubenswrapper[4558]: I0120 17:59:09.346180 4558 generic.go:334] "Generic (PLEG): container finished" podID="314a7e85-558d-49d5-b281-52342e3e4c01" containerID="fc055d0cca31052c4ea70b2f03c41fa7f8864175aa9928a543f46fd5c571ca9b" exitCode=0 Jan 20 17:59:09 crc kubenswrapper[4558]: I0120 17:59:09.346241 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-l7p2w" event={"ID":"314a7e85-558d-49d5-b281-52342e3e4c01","Type":"ContainerDied","Data":"fc055d0cca31052c4ea70b2f03c41fa7f8864175aa9928a543f46fd5c571ca9b"} Jan 20 17:59:09 crc kubenswrapper[4558]: I0120 17:59:09.346259 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-l7p2w" Jan 20 17:59:09 crc kubenswrapper[4558]: I0120 17:59:09.346286 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-l7p2w" event={"ID":"314a7e85-558d-49d5-b281-52342e3e4c01","Type":"ContainerDied","Data":"26b801148151bae8daf2efca3e965b6f4a03b58da5117a7f22ac3bd058a41fdb"} Jan 20 17:59:09 crc kubenswrapper[4558]: I0120 17:59:09.346307 4558 scope.go:117] "RemoveContainer" containerID="fc055d0cca31052c4ea70b2f03c41fa7f8864175aa9928a543f46fd5c571ca9b" Jan 20 17:59:09 crc kubenswrapper[4558]: I0120 17:59:09.346268 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-79667f9c49-fgsgc" Jan 20 17:59:09 crc kubenswrapper[4558]: I0120 17:59:09.359311 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-79667f9c49-fgsgc" Jan 20 17:59:09 crc kubenswrapper[4558]: I0120 17:59:09.372647 4558 scope.go:117] "RemoveContainer" containerID="81d5e20fc02296208cf6c1a8c066a820d8a642c7b086db42abc8ea3d3a418052" Jan 20 17:59:09 crc kubenswrapper[4558]: I0120 17:59:09.383396 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-l7p2w"] Jan 20 17:59:09 crc kubenswrapper[4558]: I0120 17:59:09.387219 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-l7p2w"] Jan 20 17:59:09 crc kubenswrapper[4558]: I0120 17:59:09.395300 4558 scope.go:117] "RemoveContainer" containerID="fc055d0cca31052c4ea70b2f03c41fa7f8864175aa9928a543f46fd5c571ca9b" Jan 20 17:59:09 crc kubenswrapper[4558]: E0120 17:59:09.395660 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fc055d0cca31052c4ea70b2f03c41fa7f8864175aa9928a543f46fd5c571ca9b\": container with ID starting with fc055d0cca31052c4ea70b2f03c41fa7f8864175aa9928a543f46fd5c571ca9b not found: ID does not exist" containerID="fc055d0cca31052c4ea70b2f03c41fa7f8864175aa9928a543f46fd5c571ca9b" Jan 20 17:59:09 crc kubenswrapper[4558]: I0120 17:59:09.395692 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fc055d0cca31052c4ea70b2f03c41fa7f8864175aa9928a543f46fd5c571ca9b"} err="failed to get container status \"fc055d0cca31052c4ea70b2f03c41fa7f8864175aa9928a543f46fd5c571ca9b\": rpc error: code = NotFound desc = could not find container \"fc055d0cca31052c4ea70b2f03c41fa7f8864175aa9928a543f46fd5c571ca9b\": container with ID starting with fc055d0cca31052c4ea70b2f03c41fa7f8864175aa9928a543f46fd5c571ca9b not found: ID does not exist" Jan 20 17:59:09 crc kubenswrapper[4558]: I0120 17:59:09.395715 4558 scope.go:117] "RemoveContainer" containerID="81d5e20fc02296208cf6c1a8c066a820d8a642c7b086db42abc8ea3d3a418052" Jan 20 17:59:09 crc kubenswrapper[4558]: E0120 17:59:09.396037 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"81d5e20fc02296208cf6c1a8c066a820d8a642c7b086db42abc8ea3d3a418052\": container with ID starting with 81d5e20fc02296208cf6c1a8c066a820d8a642c7b086db42abc8ea3d3a418052 not found: ID does not exist" containerID="81d5e20fc02296208cf6c1a8c066a820d8a642c7b086db42abc8ea3d3a418052" Jan 20 17:59:09 crc kubenswrapper[4558]: I0120 17:59:09.396090 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"81d5e20fc02296208cf6c1a8c066a820d8a642c7b086db42abc8ea3d3a418052"} err="failed to get container status \"81d5e20fc02296208cf6c1a8c066a820d8a642c7b086db42abc8ea3d3a418052\": rpc error: code = NotFound desc = could not find container \"81d5e20fc02296208cf6c1a8c066a820d8a642c7b086db42abc8ea3d3a418052\": container with ID starting with 81d5e20fc02296208cf6c1a8c066a820d8a642c7b086db42abc8ea3d3a418052 not found: ID does not exist" Jan 20 17:59:09 crc kubenswrapper[4558]: I0120 17:59:09.429268 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/bc6d633b-3454-4712-966c-5150f8b7219e-dnsmasq-svc\") pod \"bc6d633b-3454-4712-966c-5150f8b7219e\" (UID: \"bc6d633b-3454-4712-966c-5150f8b7219e\") " Jan 20 17:59:09 crc kubenswrapper[4558]: I0120 17:59:09.429343 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bc6d633b-3454-4712-966c-5150f8b7219e-config\") pod \"bc6d633b-3454-4712-966c-5150f8b7219e\" (UID: \"bc6d633b-3454-4712-966c-5150f8b7219e\") " Jan 20 17:59:09 crc kubenswrapper[4558]: I0120 17:59:09.429398 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ttfj\" (UniqueName: \"kubernetes.io/projected/bc6d633b-3454-4712-966c-5150f8b7219e-kube-api-access-6ttfj\") pod \"bc6d633b-3454-4712-966c-5150f8b7219e\" (UID: \"bc6d633b-3454-4712-966c-5150f8b7219e\") " Jan 20 17:59:09 crc kubenswrapper[4558]: I0120 17:59:09.429916 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bc6d633b-3454-4712-966c-5150f8b7219e-config" (OuterVolumeSpecName: "config") pod "bc6d633b-3454-4712-966c-5150f8b7219e" (UID: "bc6d633b-3454-4712-966c-5150f8b7219e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:59:09 crc kubenswrapper[4558]: I0120 17:59:09.430343 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bc6d633b-3454-4712-966c-5150f8b7219e-dnsmasq-svc" (OuterVolumeSpecName: "dnsmasq-svc") pod "bc6d633b-3454-4712-966c-5150f8b7219e" (UID: "bc6d633b-3454-4712-966c-5150f8b7219e"). InnerVolumeSpecName "dnsmasq-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:59:09 crc kubenswrapper[4558]: I0120 17:59:09.433769 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc6d633b-3454-4712-966c-5150f8b7219e-kube-api-access-6ttfj" (OuterVolumeSpecName: "kube-api-access-6ttfj") pod "bc6d633b-3454-4712-966c-5150f8b7219e" (UID: "bc6d633b-3454-4712-966c-5150f8b7219e"). InnerVolumeSpecName "kube-api-access-6ttfj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:59:09 crc kubenswrapper[4558]: I0120 17:59:09.531876 4558 reconciler_common.go:293] "Volume detached for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/bc6d633b-3454-4712-966c-5150f8b7219e-dnsmasq-svc\") on node \"crc\" DevicePath \"\"" Jan 20 17:59:09 crc kubenswrapper[4558]: I0120 17:59:09.531908 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bc6d633b-3454-4712-966c-5150f8b7219e-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:59:09 crc kubenswrapper[4558]: I0120 17:59:09.531923 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ttfj\" (UniqueName: \"kubernetes.io/projected/bc6d633b-3454-4712-966c-5150f8b7219e-kube-api-access-6ttfj\") on node \"crc\" DevicePath \"\"" Jan 20 17:59:09 crc kubenswrapper[4558]: I0120 17:59:09.594148 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-9pmhk"] Jan 20 17:59:10 crc kubenswrapper[4558]: I0120 17:59:10.348031 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/bc6d633b-3454-4712-966c-5150f8b7219e-openstack-edpm-ipam\") pod \"dnsmasq-dnsmasq-79667f9c49-fgsgc\" (UID: \"bc6d633b-3454-4712-966c-5150f8b7219e\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-79667f9c49-fgsgc" Jan 20 17:59:10 crc kubenswrapper[4558]: E0120 17:59:10.348190 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/openstack-edpm-ipam: configmap "openstack-edpm-ipam" not found Jan 20 17:59:10 crc kubenswrapper[4558]: E0120 17:59:10.348454 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/bc6d633b-3454-4712-966c-5150f8b7219e-openstack-edpm-ipam podName:bc6d633b-3454-4712-966c-5150f8b7219e nodeName:}" failed. No retries permitted until 2026-01-20 17:59:12.348438274 +0000 UTC m=+4646.108776240 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "openstack-edpm-ipam" (UniqueName: "kubernetes.io/configmap/bc6d633b-3454-4712-966c-5150f8b7219e-openstack-edpm-ipam") pod "dnsmasq-dnsmasq-79667f9c49-fgsgc" (UID: "bc6d633b-3454-4712-966c-5150f8b7219e") : configmap "openstack-edpm-ipam" not found Jan 20 17:59:10 crc kubenswrapper[4558]: I0120 17:59:10.357304 4558 generic.go:334] "Generic (PLEG): container finished" podID="b76c9805-2cef-461b-85d8-d4848ac6ed1e" containerID="e2ef94d90c4e131b0b9b303ed386f34bb3f7aafba75048b96dd0117cfd812a16" exitCode=0 Jan 20 17:59:10 crc kubenswrapper[4558]: I0120 17:59:10.357401 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-79667f9c49-fgsgc" Jan 20 17:59:10 crc kubenswrapper[4558]: I0120 17:59:10.357444 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-9pmhk" event={"ID":"b76c9805-2cef-461b-85d8-d4848ac6ed1e","Type":"ContainerDied","Data":"e2ef94d90c4e131b0b9b303ed386f34bb3f7aafba75048b96dd0117cfd812a16"} Jan 20 17:59:10 crc kubenswrapper[4558]: I0120 17:59:10.357480 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-9pmhk" event={"ID":"b76c9805-2cef-461b-85d8-d4848ac6ed1e","Type":"ContainerStarted","Data":"9c512d751c93f7532d32cfa3109988d72c4449d8c37616352e99a2c2e447970c"} Jan 20 17:59:10 crc kubenswrapper[4558]: I0120 17:59:10.453416 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-79667f9c49-fgsgc"] Jan 20 17:59:10 crc kubenswrapper[4558]: I0120 17:59:10.456834 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-79667f9c49-fgsgc"] Jan 20 17:59:10 crc kubenswrapper[4558]: I0120 17:59:10.552467 4558 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/bc6d633b-3454-4712-966c-5150f8b7219e-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Jan 20 17:59:10 crc kubenswrapper[4558]: I0120 17:59:10.576035 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="314a7e85-558d-49d5-b281-52342e3e4c01" path="/var/lib/kubelet/pods/314a7e85-558d-49d5-b281-52342e3e4c01/volumes" Jan 20 17:59:10 crc kubenswrapper[4558]: I0120 17:59:10.576728 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc6d633b-3454-4712-966c-5150f8b7219e" path="/var/lib/kubelet/pods/bc6d633b-3454-4712-966c-5150f8b7219e/volumes" Jan 20 17:59:11 crc kubenswrapper[4558]: I0120 17:59:11.369710 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-9pmhk" event={"ID":"b76c9805-2cef-461b-85d8-d4848ac6ed1e","Type":"ContainerStarted","Data":"6420d15433b5356b42cb5b21c01089cb101f0b9577e22ab79a6900a469b9421c"} Jan 20 17:59:11 crc kubenswrapper[4558]: I0120 17:59:11.369891 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-9pmhk" Jan 20 17:59:11 crc kubenswrapper[4558]: I0120 17:59:11.401633 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-9pmhk" podStartSLOduration=3.401605873 podStartE2EDuration="3.401605873s" podCreationTimestamp="2026-01-20 17:59:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:59:11.384319499 +0000 UTC m=+4645.144657466" watchObservedRunningTime="2026-01-20 17:59:11.401605873 +0000 UTC m=+4645.161943839" Jan 20 17:59:15 crc kubenswrapper[4558]: I0120 17:59:15.069123 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["crc-storage/crc-storage-crc-d8tgq"] Jan 20 17:59:15 crc kubenswrapper[4558]: I0120 17:59:15.072689 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["crc-storage/crc-storage-crc-d8tgq"] Jan 20 17:59:15 crc kubenswrapper[4558]: I0120 17:59:15.194759 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["crc-storage/crc-storage-crc-cchzz"] Jan 20 17:59:15 crc kubenswrapper[4558]: E0120 17:59:15.195185 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="314a7e85-558d-49d5-b281-52342e3e4c01" containerName="dnsmasq-dns" Jan 20 17:59:15 crc kubenswrapper[4558]: I0120 17:59:15.195205 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="314a7e85-558d-49d5-b281-52342e3e4c01" containerName="dnsmasq-dns" Jan 20 17:59:15 crc kubenswrapper[4558]: E0120 17:59:15.195221 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="314a7e85-558d-49d5-b281-52342e3e4c01" containerName="init" Jan 20 17:59:15 crc kubenswrapper[4558]: I0120 17:59:15.195228 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="314a7e85-558d-49d5-b281-52342e3e4c01" containerName="init" Jan 20 17:59:15 crc kubenswrapper[4558]: I0120 17:59:15.195365 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="314a7e85-558d-49d5-b281-52342e3e4c01" containerName="dnsmasq-dns" Jan 20 17:59:15 crc kubenswrapper[4558]: I0120 17:59:15.195899 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-cchzz" Jan 20 17:59:15 crc kubenswrapper[4558]: I0120 17:59:15.200177 4558 reflector.go:368] Caches populated for *v1.Secret from object-"crc-storage"/"crc-storage-dockercfg-k9ht8" Jan 20 17:59:15 crc kubenswrapper[4558]: I0120 17:59:15.200679 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-cchzz"] Jan 20 17:59:15 crc kubenswrapper[4558]: I0120 17:59:15.202670 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"kube-root-ca.crt" Jan 20 17:59:15 crc kubenswrapper[4558]: I0120 17:59:15.202834 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"openshift-service-ca.crt" Jan 20 17:59:15 crc kubenswrapper[4558]: I0120 17:59:15.203000 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"crc-storage" Jan 20 17:59:15 crc kubenswrapper[4558]: I0120 17:59:15.217656 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-28b6z\" (UniqueName: \"kubernetes.io/projected/6c1eae80-b9e5-41b4-b33e-27315787e122-kube-api-access-28b6z\") pod \"crc-storage-crc-cchzz\" (UID: \"6c1eae80-b9e5-41b4-b33e-27315787e122\") " pod="crc-storage/crc-storage-crc-cchzz" Jan 20 17:59:15 crc kubenswrapper[4558]: I0120 17:59:15.217972 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/6c1eae80-b9e5-41b4-b33e-27315787e122-crc-storage\") pod \"crc-storage-crc-cchzz\" (UID: \"6c1eae80-b9e5-41b4-b33e-27315787e122\") " pod="crc-storage/crc-storage-crc-cchzz" Jan 20 17:59:15 crc kubenswrapper[4558]: I0120 17:59:15.218003 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/6c1eae80-b9e5-41b4-b33e-27315787e122-node-mnt\") pod \"crc-storage-crc-cchzz\" (UID: \"6c1eae80-b9e5-41b4-b33e-27315787e122\") " pod="crc-storage/crc-storage-crc-cchzz" Jan 20 17:59:15 crc kubenswrapper[4558]: I0120 17:59:15.319768 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-28b6z\" (UniqueName: \"kubernetes.io/projected/6c1eae80-b9e5-41b4-b33e-27315787e122-kube-api-access-28b6z\") pod \"crc-storage-crc-cchzz\" (UID: \"6c1eae80-b9e5-41b4-b33e-27315787e122\") " pod="crc-storage/crc-storage-crc-cchzz" Jan 20 17:59:15 crc kubenswrapper[4558]: I0120 17:59:15.319836 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/6c1eae80-b9e5-41b4-b33e-27315787e122-crc-storage\") pod \"crc-storage-crc-cchzz\" (UID: \"6c1eae80-b9e5-41b4-b33e-27315787e122\") " pod="crc-storage/crc-storage-crc-cchzz" Jan 20 17:59:15 crc kubenswrapper[4558]: I0120 17:59:15.319873 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/6c1eae80-b9e5-41b4-b33e-27315787e122-node-mnt\") pod \"crc-storage-crc-cchzz\" (UID: \"6c1eae80-b9e5-41b4-b33e-27315787e122\") " pod="crc-storage/crc-storage-crc-cchzz" Jan 20 17:59:15 crc kubenswrapper[4558]: I0120 17:59:15.320211 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/6c1eae80-b9e5-41b4-b33e-27315787e122-node-mnt\") pod \"crc-storage-crc-cchzz\" (UID: \"6c1eae80-b9e5-41b4-b33e-27315787e122\") " pod="crc-storage/crc-storage-crc-cchzz" Jan 20 17:59:15 crc kubenswrapper[4558]: I0120 17:59:15.320694 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/6c1eae80-b9e5-41b4-b33e-27315787e122-crc-storage\") pod \"crc-storage-crc-cchzz\" (UID: \"6c1eae80-b9e5-41b4-b33e-27315787e122\") " pod="crc-storage/crc-storage-crc-cchzz" Jan 20 17:59:15 crc kubenswrapper[4558]: I0120 17:59:15.348967 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-28b6z\" (UniqueName: \"kubernetes.io/projected/6c1eae80-b9e5-41b4-b33e-27315787e122-kube-api-access-28b6z\") pod \"crc-storage-crc-cchzz\" (UID: \"6c1eae80-b9e5-41b4-b33e-27315787e122\") " pod="crc-storage/crc-storage-crc-cchzz" Jan 20 17:59:15 crc kubenswrapper[4558]: I0120 17:59:15.511048 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-cchzz" Jan 20 17:59:15 crc kubenswrapper[4558]: I0120 17:59:15.915407 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-cchzz"] Jan 20 17:59:16 crc kubenswrapper[4558]: I0120 17:59:16.421060 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-cchzz" event={"ID":"6c1eae80-b9e5-41b4-b33e-27315787e122","Type":"ContainerStarted","Data":"1fd7d334b31a9a392a5c00c5b3c4fa414274ab4bac9e6e5d5b140945b0ea2a56"} Jan 20 17:59:16 crc kubenswrapper[4558]: I0120 17:59:16.577518 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cbd862c9-fadb-44e6-928e-0976fa3a022d" path="/var/lib/kubelet/pods/cbd862c9-fadb-44e6-928e-0976fa3a022d/volumes" Jan 20 17:59:16 crc kubenswrapper[4558]: E0120 17:59:16.835629 4558 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6c1eae80_b9e5_41b4_b33e_27315787e122.slice/crio-conmon-213d4a39fc3d76e3e28077d4f05dc4a4b9af3c199c14c25a05cac8843f18e8d9.scope\": RecentStats: unable to find data in memory cache]" Jan 20 17:59:17 crc kubenswrapper[4558]: I0120 17:59:17.431046 4558 generic.go:334] "Generic (PLEG): container finished" podID="6c1eae80-b9e5-41b4-b33e-27315787e122" containerID="213d4a39fc3d76e3e28077d4f05dc4a4b9af3c199c14c25a05cac8843f18e8d9" exitCode=0 Jan 20 17:59:17 crc kubenswrapper[4558]: I0120 17:59:17.431111 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-cchzz" event={"ID":"6c1eae80-b9e5-41b4-b33e-27315787e122","Type":"ContainerDied","Data":"213d4a39fc3d76e3e28077d4f05dc4a4b9af3c199c14c25a05cac8843f18e8d9"} Jan 20 17:59:19 crc kubenswrapper[4558]: I0120 17:59:19.049113 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-cchzz" Jan 20 17:59:19 crc kubenswrapper[4558]: I0120 17:59:19.154326 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-9pmhk" Jan 20 17:59:19 crc kubenswrapper[4558]: I0120 17:59:19.181579 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/6c1eae80-b9e5-41b4-b33e-27315787e122-node-mnt\") pod \"6c1eae80-b9e5-41b4-b33e-27315787e122\" (UID: \"6c1eae80-b9e5-41b4-b33e-27315787e122\") " Jan 20 17:59:19 crc kubenswrapper[4558]: I0120 17:59:19.181706 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-28b6z\" (UniqueName: \"kubernetes.io/projected/6c1eae80-b9e5-41b4-b33e-27315787e122-kube-api-access-28b6z\") pod \"6c1eae80-b9e5-41b4-b33e-27315787e122\" (UID: \"6c1eae80-b9e5-41b4-b33e-27315787e122\") " Jan 20 17:59:19 crc kubenswrapper[4558]: I0120 17:59:19.182361 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/6c1eae80-b9e5-41b4-b33e-27315787e122-crc-storage\") pod \"6c1eae80-b9e5-41b4-b33e-27315787e122\" (UID: \"6c1eae80-b9e5-41b4-b33e-27315787e122\") " Jan 20 17:59:19 crc kubenswrapper[4558]: I0120 17:59:19.182467 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/6c1eae80-b9e5-41b4-b33e-27315787e122-node-mnt" (OuterVolumeSpecName: "node-mnt") pod "6c1eae80-b9e5-41b4-b33e-27315787e122" (UID: "6c1eae80-b9e5-41b4-b33e-27315787e122"). InnerVolumeSpecName "node-mnt". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 17:59:19 crc kubenswrapper[4558]: I0120 17:59:19.182847 4558 reconciler_common.go:293] "Volume detached for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/6c1eae80-b9e5-41b4-b33e-27315787e122-node-mnt\") on node \"crc\" DevicePath \"\"" Jan 20 17:59:19 crc kubenswrapper[4558]: I0120 17:59:19.196819 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6c1eae80-b9e5-41b4-b33e-27315787e122-kube-api-access-28b6z" (OuterVolumeSpecName: "kube-api-access-28b6z") pod "6c1eae80-b9e5-41b4-b33e-27315787e122" (UID: "6c1eae80-b9e5-41b4-b33e-27315787e122"). InnerVolumeSpecName "kube-api-access-28b6z". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:59:19 crc kubenswrapper[4558]: I0120 17:59:19.207139 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6c1eae80-b9e5-41b4-b33e-27315787e122-crc-storage" (OuterVolumeSpecName: "crc-storage") pod "6c1eae80-b9e5-41b4-b33e-27315787e122" (UID: "6c1eae80-b9e5-41b4-b33e-27315787e122"). InnerVolumeSpecName "crc-storage". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:59:19 crc kubenswrapper[4558]: I0120 17:59:19.209116 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-5b68c79f89-ctm52"] Jan 20 17:59:19 crc kubenswrapper[4558]: I0120 17:59:19.209507 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-5b68c79f89-ctm52" podUID="ce022194-8c04-4d08-9d00-c95e9f15a559" containerName="dnsmasq-dns" containerID="cri-o://6ea4f09cc53b9264e03aa6958bc090bc8d92903359284a9cf9a25fcaf8bc65c8" gracePeriod=10 Jan 20 17:59:19 crc kubenswrapper[4558]: I0120 17:59:19.284366 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-28b6z\" (UniqueName: \"kubernetes.io/projected/6c1eae80-b9e5-41b4-b33e-27315787e122-kube-api-access-28b6z\") on node \"crc\" DevicePath \"\"" Jan 20 17:59:19 crc kubenswrapper[4558]: I0120 17:59:19.284400 4558 reconciler_common.go:293] "Volume detached for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/6c1eae80-b9e5-41b4-b33e-27315787e122-crc-storage\") on node \"crc\" DevicePath \"\"" Jan 20 17:59:19 crc kubenswrapper[4558]: I0120 17:59:19.450081 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-cchzz" event={"ID":"6c1eae80-b9e5-41b4-b33e-27315787e122","Type":"ContainerDied","Data":"1fd7d334b31a9a392a5c00c5b3c4fa414274ab4bac9e6e5d5b140945b0ea2a56"} Jan 20 17:59:19 crc kubenswrapper[4558]: I0120 17:59:19.450139 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1fd7d334b31a9a392a5c00c5b3c4fa414274ab4bac9e6e5d5b140945b0ea2a56" Jan 20 17:59:19 crc kubenswrapper[4558]: I0120 17:59:19.450243 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-cchzz" Jan 20 17:59:19 crc kubenswrapper[4558]: I0120 17:59:19.454522 4558 generic.go:334] "Generic (PLEG): container finished" podID="ce022194-8c04-4d08-9d00-c95e9f15a559" containerID="6ea4f09cc53b9264e03aa6958bc090bc8d92903359284a9cf9a25fcaf8bc65c8" exitCode=0 Jan 20 17:59:19 crc kubenswrapper[4558]: I0120 17:59:19.454573 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-5b68c79f89-ctm52" event={"ID":"ce022194-8c04-4d08-9d00-c95e9f15a559","Type":"ContainerDied","Data":"6ea4f09cc53b9264e03aa6958bc090bc8d92903359284a9cf9a25fcaf8bc65c8"} Jan 20 17:59:19 crc kubenswrapper[4558]: I0120 17:59:19.607218 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-5b68c79f89-ctm52" Jan 20 17:59:19 crc kubenswrapper[4558]: I0120 17:59:19.792149 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/ce022194-8c04-4d08-9d00-c95e9f15a559-openstack-edpm-ipam\") pod \"ce022194-8c04-4d08-9d00-c95e9f15a559\" (UID: \"ce022194-8c04-4d08-9d00-c95e9f15a559\") " Jan 20 17:59:19 crc kubenswrapper[4558]: I0120 17:59:19.792268 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jk4nn\" (UniqueName: \"kubernetes.io/projected/ce022194-8c04-4d08-9d00-c95e9f15a559-kube-api-access-jk4nn\") pod \"ce022194-8c04-4d08-9d00-c95e9f15a559\" (UID: \"ce022194-8c04-4d08-9d00-c95e9f15a559\") " Jan 20 17:59:19 crc kubenswrapper[4558]: I0120 17:59:19.792529 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/ce022194-8c04-4d08-9d00-c95e9f15a559-dnsmasq-svc\") pod \"ce022194-8c04-4d08-9d00-c95e9f15a559\" (UID: \"ce022194-8c04-4d08-9d00-c95e9f15a559\") " Jan 20 17:59:19 crc kubenswrapper[4558]: I0120 17:59:19.792563 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ce022194-8c04-4d08-9d00-c95e9f15a559-config\") pod \"ce022194-8c04-4d08-9d00-c95e9f15a559\" (UID: \"ce022194-8c04-4d08-9d00-c95e9f15a559\") " Jan 20 17:59:19 crc kubenswrapper[4558]: I0120 17:59:19.798075 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ce022194-8c04-4d08-9d00-c95e9f15a559-kube-api-access-jk4nn" (OuterVolumeSpecName: "kube-api-access-jk4nn") pod "ce022194-8c04-4d08-9d00-c95e9f15a559" (UID: "ce022194-8c04-4d08-9d00-c95e9f15a559"). InnerVolumeSpecName "kube-api-access-jk4nn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:59:19 crc kubenswrapper[4558]: I0120 17:59:19.821158 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ce022194-8c04-4d08-9d00-c95e9f15a559-config" (OuterVolumeSpecName: "config") pod "ce022194-8c04-4d08-9d00-c95e9f15a559" (UID: "ce022194-8c04-4d08-9d00-c95e9f15a559"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:59:19 crc kubenswrapper[4558]: I0120 17:59:19.824059 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ce022194-8c04-4d08-9d00-c95e9f15a559-openstack-edpm-ipam" (OuterVolumeSpecName: "openstack-edpm-ipam") pod "ce022194-8c04-4d08-9d00-c95e9f15a559" (UID: "ce022194-8c04-4d08-9d00-c95e9f15a559"). InnerVolumeSpecName "openstack-edpm-ipam". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:59:19 crc kubenswrapper[4558]: I0120 17:59:19.824207 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ce022194-8c04-4d08-9d00-c95e9f15a559-dnsmasq-svc" (OuterVolumeSpecName: "dnsmasq-svc") pod "ce022194-8c04-4d08-9d00-c95e9f15a559" (UID: "ce022194-8c04-4d08-9d00-c95e9f15a559"). InnerVolumeSpecName "dnsmasq-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:59:19 crc kubenswrapper[4558]: I0120 17:59:19.894738 4558 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/ce022194-8c04-4d08-9d00-c95e9f15a559-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Jan 20 17:59:19 crc kubenswrapper[4558]: I0120 17:59:19.894772 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jk4nn\" (UniqueName: \"kubernetes.io/projected/ce022194-8c04-4d08-9d00-c95e9f15a559-kube-api-access-jk4nn\") on node \"crc\" DevicePath \"\"" Jan 20 17:59:19 crc kubenswrapper[4558]: I0120 17:59:19.894792 4558 reconciler_common.go:293] "Volume detached for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/ce022194-8c04-4d08-9d00-c95e9f15a559-dnsmasq-svc\") on node \"crc\" DevicePath \"\"" Jan 20 17:59:19 crc kubenswrapper[4558]: I0120 17:59:19.894805 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ce022194-8c04-4d08-9d00-c95e9f15a559-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:59:20 crc kubenswrapper[4558]: I0120 17:59:20.467028 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-5b68c79f89-ctm52" event={"ID":"ce022194-8c04-4d08-9d00-c95e9f15a559","Type":"ContainerDied","Data":"c0643c80785e8175a87935a9d633ec82cd8617ab5b2c1c980c27257d676c9cdf"} Jan 20 17:59:20 crc kubenswrapper[4558]: I0120 17:59:20.467109 4558 scope.go:117] "RemoveContainer" containerID="6ea4f09cc53b9264e03aa6958bc090bc8d92903359284a9cf9a25fcaf8bc65c8" Jan 20 17:59:20 crc kubenswrapper[4558]: I0120 17:59:20.467334 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-5b68c79f89-ctm52" Jan 20 17:59:20 crc kubenswrapper[4558]: I0120 17:59:20.494905 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-5b68c79f89-ctm52"] Jan 20 17:59:20 crc kubenswrapper[4558]: I0120 17:59:20.496059 4558 scope.go:117] "RemoveContainer" containerID="94cf87b83d0920312ed77bc4f0f14377663825e352b011645bff6f7570f5929a" Jan 20 17:59:20 crc kubenswrapper[4558]: I0120 17:59:20.499573 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-5b68c79f89-ctm52"] Jan 20 17:59:20 crc kubenswrapper[4558]: I0120 17:59:20.573494 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ce022194-8c04-4d08-9d00-c95e9f15a559" path="/var/lib/kubelet/pods/ce022194-8c04-4d08-9d00-c95e9f15a559/volumes" Jan 20 17:59:21 crc kubenswrapper[4558]: I0120 17:59:21.957434 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["crc-storage/crc-storage-crc-cchzz"] Jan 20 17:59:21 crc kubenswrapper[4558]: I0120 17:59:21.961527 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["crc-storage/crc-storage-crc-cchzz"] Jan 20 17:59:22 crc kubenswrapper[4558]: I0120 17:59:22.070902 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["crc-storage/crc-storage-crc-zn958"] Jan 20 17:59:22 crc kubenswrapper[4558]: E0120 17:59:22.071284 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ce022194-8c04-4d08-9d00-c95e9f15a559" containerName="dnsmasq-dns" Jan 20 17:59:22 crc kubenswrapper[4558]: I0120 17:59:22.071306 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ce022194-8c04-4d08-9d00-c95e9f15a559" containerName="dnsmasq-dns" Jan 20 17:59:22 crc kubenswrapper[4558]: E0120 17:59:22.071323 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c1eae80-b9e5-41b4-b33e-27315787e122" containerName="storage" Jan 20 17:59:22 crc kubenswrapper[4558]: I0120 17:59:22.071330 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c1eae80-b9e5-41b4-b33e-27315787e122" containerName="storage" Jan 20 17:59:22 crc kubenswrapper[4558]: E0120 17:59:22.071347 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ce022194-8c04-4d08-9d00-c95e9f15a559" containerName="init" Jan 20 17:59:22 crc kubenswrapper[4558]: I0120 17:59:22.071353 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ce022194-8c04-4d08-9d00-c95e9f15a559" containerName="init" Jan 20 17:59:22 crc kubenswrapper[4558]: I0120 17:59:22.071512 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="6c1eae80-b9e5-41b4-b33e-27315787e122" containerName="storage" Jan 20 17:59:22 crc kubenswrapper[4558]: I0120 17:59:22.071528 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="ce022194-8c04-4d08-9d00-c95e9f15a559" containerName="dnsmasq-dns" Jan 20 17:59:22 crc kubenswrapper[4558]: I0120 17:59:22.072114 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-zn958" Jan 20 17:59:22 crc kubenswrapper[4558]: I0120 17:59:22.073668 4558 reflector.go:368] Caches populated for *v1.Secret from object-"crc-storage"/"crc-storage-dockercfg-k9ht8" Jan 20 17:59:22 crc kubenswrapper[4558]: I0120 17:59:22.075148 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"kube-root-ca.crt" Jan 20 17:59:22 crc kubenswrapper[4558]: I0120 17:59:22.075393 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"openshift-service-ca.crt" Jan 20 17:59:22 crc kubenswrapper[4558]: I0120 17:59:22.075505 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"crc-storage" Jan 20 17:59:22 crc kubenswrapper[4558]: I0120 17:59:22.087491 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-zn958"] Jan 20 17:59:22 crc kubenswrapper[4558]: I0120 17:59:22.239226 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/64b6908c-6f3f-4189-bd92-7bc168d122b9-crc-storage\") pod \"crc-storage-crc-zn958\" (UID: \"64b6908c-6f3f-4189-bd92-7bc168d122b9\") " pod="crc-storage/crc-storage-crc-zn958" Jan 20 17:59:22 crc kubenswrapper[4558]: I0120 17:59:22.239317 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/64b6908c-6f3f-4189-bd92-7bc168d122b9-node-mnt\") pod \"crc-storage-crc-zn958\" (UID: \"64b6908c-6f3f-4189-bd92-7bc168d122b9\") " pod="crc-storage/crc-storage-crc-zn958" Jan 20 17:59:22 crc kubenswrapper[4558]: I0120 17:59:22.239353 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mjcqt\" (UniqueName: \"kubernetes.io/projected/64b6908c-6f3f-4189-bd92-7bc168d122b9-kube-api-access-mjcqt\") pod \"crc-storage-crc-zn958\" (UID: \"64b6908c-6f3f-4189-bd92-7bc168d122b9\") " pod="crc-storage/crc-storage-crc-zn958" Jan 20 17:59:22 crc kubenswrapper[4558]: I0120 17:59:22.340156 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/64b6908c-6f3f-4189-bd92-7bc168d122b9-crc-storage\") pod \"crc-storage-crc-zn958\" (UID: \"64b6908c-6f3f-4189-bd92-7bc168d122b9\") " pod="crc-storage/crc-storage-crc-zn958" Jan 20 17:59:22 crc kubenswrapper[4558]: I0120 17:59:22.340269 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/64b6908c-6f3f-4189-bd92-7bc168d122b9-node-mnt\") pod \"crc-storage-crc-zn958\" (UID: \"64b6908c-6f3f-4189-bd92-7bc168d122b9\") " pod="crc-storage/crc-storage-crc-zn958" Jan 20 17:59:22 crc kubenswrapper[4558]: I0120 17:59:22.340298 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mjcqt\" (UniqueName: \"kubernetes.io/projected/64b6908c-6f3f-4189-bd92-7bc168d122b9-kube-api-access-mjcqt\") pod \"crc-storage-crc-zn958\" (UID: \"64b6908c-6f3f-4189-bd92-7bc168d122b9\") " pod="crc-storage/crc-storage-crc-zn958" Jan 20 17:59:22 crc kubenswrapper[4558]: I0120 17:59:22.340610 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/64b6908c-6f3f-4189-bd92-7bc168d122b9-node-mnt\") pod \"crc-storage-crc-zn958\" (UID: \"64b6908c-6f3f-4189-bd92-7bc168d122b9\") " pod="crc-storage/crc-storage-crc-zn958" Jan 20 17:59:22 crc kubenswrapper[4558]: I0120 17:59:22.341053 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/64b6908c-6f3f-4189-bd92-7bc168d122b9-crc-storage\") pod \"crc-storage-crc-zn958\" (UID: \"64b6908c-6f3f-4189-bd92-7bc168d122b9\") " pod="crc-storage/crc-storage-crc-zn958" Jan 20 17:59:22 crc kubenswrapper[4558]: I0120 17:59:22.357334 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mjcqt\" (UniqueName: \"kubernetes.io/projected/64b6908c-6f3f-4189-bd92-7bc168d122b9-kube-api-access-mjcqt\") pod \"crc-storage-crc-zn958\" (UID: \"64b6908c-6f3f-4189-bd92-7bc168d122b9\") " pod="crc-storage/crc-storage-crc-zn958" Jan 20 17:59:22 crc kubenswrapper[4558]: I0120 17:59:22.387467 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-zn958" Jan 20 17:59:22 crc kubenswrapper[4558]: I0120 17:59:22.574694 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6c1eae80-b9e5-41b4-b33e-27315787e122" path="/var/lib/kubelet/pods/6c1eae80-b9e5-41b4-b33e-27315787e122/volumes" Jan 20 17:59:22 crc kubenswrapper[4558]: I0120 17:59:22.776114 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-zn958"] Jan 20 17:59:23 crc kubenswrapper[4558]: I0120 17:59:23.494328 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-zn958" event={"ID":"64b6908c-6f3f-4189-bd92-7bc168d122b9","Type":"ContainerStarted","Data":"f96795ab6454bb53b1a32cb4c873624dc17c79dc60511e54ae22f26f4199e205"} Jan 20 17:59:24 crc kubenswrapper[4558]: I0120 17:59:24.505446 4558 generic.go:334] "Generic (PLEG): container finished" podID="64b6908c-6f3f-4189-bd92-7bc168d122b9" containerID="7310cf3e7f48f89d7e00a18bb1a190b6e8a5082773a3a4ced4740251c39dede3" exitCode=0 Jan 20 17:59:24 crc kubenswrapper[4558]: I0120 17:59:24.505500 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-zn958" event={"ID":"64b6908c-6f3f-4189-bd92-7bc168d122b9","Type":"ContainerDied","Data":"7310cf3e7f48f89d7e00a18bb1a190b6e8a5082773a3a4ced4740251c39dede3"} Jan 20 17:59:25 crc kubenswrapper[4558]: I0120 17:59:25.756927 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-zn958" Jan 20 17:59:25 crc kubenswrapper[4558]: I0120 17:59:25.891660 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/64b6908c-6f3f-4189-bd92-7bc168d122b9-node-mnt\") pod \"64b6908c-6f3f-4189-bd92-7bc168d122b9\" (UID: \"64b6908c-6f3f-4189-bd92-7bc168d122b9\") " Jan 20 17:59:25 crc kubenswrapper[4558]: I0120 17:59:25.891740 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mjcqt\" (UniqueName: \"kubernetes.io/projected/64b6908c-6f3f-4189-bd92-7bc168d122b9-kube-api-access-mjcqt\") pod \"64b6908c-6f3f-4189-bd92-7bc168d122b9\" (UID: \"64b6908c-6f3f-4189-bd92-7bc168d122b9\") " Jan 20 17:59:25 crc kubenswrapper[4558]: I0120 17:59:25.891774 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/64b6908c-6f3f-4189-bd92-7bc168d122b9-crc-storage\") pod \"64b6908c-6f3f-4189-bd92-7bc168d122b9\" (UID: \"64b6908c-6f3f-4189-bd92-7bc168d122b9\") " Jan 20 17:59:25 crc kubenswrapper[4558]: I0120 17:59:25.891971 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/64b6908c-6f3f-4189-bd92-7bc168d122b9-node-mnt" (OuterVolumeSpecName: "node-mnt") pod "64b6908c-6f3f-4189-bd92-7bc168d122b9" (UID: "64b6908c-6f3f-4189-bd92-7bc168d122b9"). InnerVolumeSpecName "node-mnt". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 17:59:25 crc kubenswrapper[4558]: I0120 17:59:25.892251 4558 reconciler_common.go:293] "Volume detached for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/64b6908c-6f3f-4189-bd92-7bc168d122b9-node-mnt\") on node \"crc\" DevicePath \"\"" Jan 20 17:59:25 crc kubenswrapper[4558]: I0120 17:59:25.897975 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/64b6908c-6f3f-4189-bd92-7bc168d122b9-kube-api-access-mjcqt" (OuterVolumeSpecName: "kube-api-access-mjcqt") pod "64b6908c-6f3f-4189-bd92-7bc168d122b9" (UID: "64b6908c-6f3f-4189-bd92-7bc168d122b9"). InnerVolumeSpecName "kube-api-access-mjcqt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:59:25 crc kubenswrapper[4558]: I0120 17:59:25.909456 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/64b6908c-6f3f-4189-bd92-7bc168d122b9-crc-storage" (OuterVolumeSpecName: "crc-storage") pod "64b6908c-6f3f-4189-bd92-7bc168d122b9" (UID: "64b6908c-6f3f-4189-bd92-7bc168d122b9"). InnerVolumeSpecName "crc-storage". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:59:25 crc kubenswrapper[4558]: I0120 17:59:25.994444 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mjcqt\" (UniqueName: \"kubernetes.io/projected/64b6908c-6f3f-4189-bd92-7bc168d122b9-kube-api-access-mjcqt\") on node \"crc\" DevicePath \"\"" Jan 20 17:59:25 crc kubenswrapper[4558]: I0120 17:59:25.994481 4558 reconciler_common.go:293] "Volume detached for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/64b6908c-6f3f-4189-bd92-7bc168d122b9-crc-storage\") on node \"crc\" DevicePath \"\"" Jan 20 17:59:26 crc kubenswrapper[4558]: I0120 17:59:26.526511 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-zn958" event={"ID":"64b6908c-6f3f-4189-bd92-7bc168d122b9","Type":"ContainerDied","Data":"f96795ab6454bb53b1a32cb4c873624dc17c79dc60511e54ae22f26f4199e205"} Jan 20 17:59:26 crc kubenswrapper[4558]: I0120 17:59:26.526567 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f96795ab6454bb53b1a32cb4c873624dc17c79dc60511e54ae22f26f4199e205" Jan 20 17:59:26 crc kubenswrapper[4558]: I0120 17:59:26.526567 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-zn958" Jan 20 17:59:27 crc kubenswrapper[4558]: I0120 17:59:27.330518 4558 patch_prober.go:28] interesting pod/machine-config-daemon-2vr4r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 20 17:59:27 crc kubenswrapper[4558]: I0120 17:59:27.330602 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 20 17:59:28 crc kubenswrapper[4558]: I0120 17:59:28.884669 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-7d78464677-qck2g"] Jan 20 17:59:28 crc kubenswrapper[4558]: E0120 17:59:28.885002 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64b6908c-6f3f-4189-bd92-7bc168d122b9" containerName="storage" Jan 20 17:59:28 crc kubenswrapper[4558]: I0120 17:59:28.885015 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="64b6908c-6f3f-4189-bd92-7bc168d122b9" containerName="storage" Jan 20 17:59:28 crc kubenswrapper[4558]: I0120 17:59:28.885134 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="64b6908c-6f3f-4189-bd92-7bc168d122b9" containerName="storage" Jan 20 17:59:28 crc kubenswrapper[4558]: I0120 17:59:28.885802 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-7d78464677-qck2g" Jan 20 17:59:28 crc kubenswrapper[4558]: I0120 17:59:28.888008 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"edpm-compute-global" Jan 20 17:59:28 crc kubenswrapper[4558]: I0120 17:59:28.900498 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-7d78464677-qck2g"] Jan 20 17:59:28 crc kubenswrapper[4558]: I0120 17:59:28.935830 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c224b81e-d5b0-4b28-86ce-04a9f616a986-config\") pod \"dnsmasq-dnsmasq-7d78464677-qck2g\" (UID: \"c224b81e-d5b0-4b28-86ce-04a9f616a986\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-7d78464677-qck2g" Jan 20 17:59:28 crc kubenswrapper[4558]: I0120 17:59:28.935886 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2jqc9\" (UniqueName: \"kubernetes.io/projected/c224b81e-d5b0-4b28-86ce-04a9f616a986-kube-api-access-2jqc9\") pod \"dnsmasq-dnsmasq-7d78464677-qck2g\" (UID: \"c224b81e-d5b0-4b28-86ce-04a9f616a986\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-7d78464677-qck2g" Jan 20 17:59:28 crc kubenswrapper[4558]: I0120 17:59:28.935909 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"edpm-compute-global\" (UniqueName: \"kubernetes.io/configmap/c224b81e-d5b0-4b28-86ce-04a9f616a986-edpm-compute-global\") pod \"dnsmasq-dnsmasq-7d78464677-qck2g\" (UID: \"c224b81e-d5b0-4b28-86ce-04a9f616a986\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-7d78464677-qck2g" Jan 20 17:59:28 crc kubenswrapper[4558]: I0120 17:59:28.935990 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/c224b81e-d5b0-4b28-86ce-04a9f616a986-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-7d78464677-qck2g\" (UID: \"c224b81e-d5b0-4b28-86ce-04a9f616a986\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-7d78464677-qck2g" Jan 20 17:59:29 crc kubenswrapper[4558]: I0120 17:59:29.038233 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c224b81e-d5b0-4b28-86ce-04a9f616a986-config\") pod \"dnsmasq-dnsmasq-7d78464677-qck2g\" (UID: \"c224b81e-d5b0-4b28-86ce-04a9f616a986\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-7d78464677-qck2g" Jan 20 17:59:29 crc kubenswrapper[4558]: I0120 17:59:29.038337 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2jqc9\" (UniqueName: \"kubernetes.io/projected/c224b81e-d5b0-4b28-86ce-04a9f616a986-kube-api-access-2jqc9\") pod \"dnsmasq-dnsmasq-7d78464677-qck2g\" (UID: \"c224b81e-d5b0-4b28-86ce-04a9f616a986\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-7d78464677-qck2g" Jan 20 17:59:29 crc kubenswrapper[4558]: I0120 17:59:29.038366 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"edpm-compute-global\" (UniqueName: \"kubernetes.io/configmap/c224b81e-d5b0-4b28-86ce-04a9f616a986-edpm-compute-global\") pod \"dnsmasq-dnsmasq-7d78464677-qck2g\" (UID: \"c224b81e-d5b0-4b28-86ce-04a9f616a986\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-7d78464677-qck2g" Jan 20 17:59:29 crc kubenswrapper[4558]: I0120 17:59:29.038414 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/c224b81e-d5b0-4b28-86ce-04a9f616a986-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-7d78464677-qck2g\" (UID: \"c224b81e-d5b0-4b28-86ce-04a9f616a986\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-7d78464677-qck2g" Jan 20 17:59:29 crc kubenswrapper[4558]: I0120 17:59:29.039447 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/c224b81e-d5b0-4b28-86ce-04a9f616a986-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-7d78464677-qck2g\" (UID: \"c224b81e-d5b0-4b28-86ce-04a9f616a986\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-7d78464677-qck2g" Jan 20 17:59:29 crc kubenswrapper[4558]: I0120 17:59:29.039473 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c224b81e-d5b0-4b28-86ce-04a9f616a986-config\") pod \"dnsmasq-dnsmasq-7d78464677-qck2g\" (UID: \"c224b81e-d5b0-4b28-86ce-04a9f616a986\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-7d78464677-qck2g" Jan 20 17:59:29 crc kubenswrapper[4558]: I0120 17:59:29.039646 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"edpm-compute-global\" (UniqueName: \"kubernetes.io/configmap/c224b81e-d5b0-4b28-86ce-04a9f616a986-edpm-compute-global\") pod \"dnsmasq-dnsmasq-7d78464677-qck2g\" (UID: \"c224b81e-d5b0-4b28-86ce-04a9f616a986\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-7d78464677-qck2g" Jan 20 17:59:29 crc kubenswrapper[4558]: I0120 17:59:29.056990 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2jqc9\" (UniqueName: \"kubernetes.io/projected/c224b81e-d5b0-4b28-86ce-04a9f616a986-kube-api-access-2jqc9\") pod \"dnsmasq-dnsmasq-7d78464677-qck2g\" (UID: \"c224b81e-d5b0-4b28-86ce-04a9f616a986\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-7d78464677-qck2g" Jan 20 17:59:29 crc kubenswrapper[4558]: I0120 17:59:29.204743 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-7d78464677-qck2g" Jan 20 17:59:29 crc kubenswrapper[4558]: I0120 17:59:29.609234 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-7d78464677-qck2g"] Jan 20 17:59:29 crc kubenswrapper[4558]: W0120 17:59:29.613031 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc224b81e_d5b0_4b28_86ce_04a9f616a986.slice/crio-64d26cd851b623ce459914a4700384bdaaa05b023ac150b3e9eb6301c89fd693 WatchSource:0}: Error finding container 64d26cd851b623ce459914a4700384bdaaa05b023ac150b3e9eb6301c89fd693: Status 404 returned error can't find the container with id 64d26cd851b623ce459914a4700384bdaaa05b023ac150b3e9eb6301c89fd693 Jan 20 17:59:30 crc kubenswrapper[4558]: I0120 17:59:30.565010 4558 generic.go:334] "Generic (PLEG): container finished" podID="c224b81e-d5b0-4b28-86ce-04a9f616a986" containerID="7461946b9929270e3fffbe2650f18238c1490db415b83867a5e299f29ef5b3f6" exitCode=0 Jan 20 17:59:30 crc kubenswrapper[4558]: I0120 17:59:30.578378 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-7d78464677-qck2g" event={"ID":"c224b81e-d5b0-4b28-86ce-04a9f616a986","Type":"ContainerDied","Data":"7461946b9929270e3fffbe2650f18238c1490db415b83867a5e299f29ef5b3f6"} Jan 20 17:59:30 crc kubenswrapper[4558]: I0120 17:59:30.578419 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-7d78464677-qck2g" event={"ID":"c224b81e-d5b0-4b28-86ce-04a9f616a986","Type":"ContainerStarted","Data":"64d26cd851b623ce459914a4700384bdaaa05b023ac150b3e9eb6301c89fd693"} Jan 20 17:59:31 crc kubenswrapper[4558]: I0120 17:59:31.578457 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-7d78464677-qck2g" event={"ID":"c224b81e-d5b0-4b28-86ce-04a9f616a986","Type":"ContainerStarted","Data":"53550b6bbab1413ad18d77360ae821adcce85e9150bdbdcaf3e557bd7027eb4d"} Jan 20 17:59:31 crc kubenswrapper[4558]: I0120 17:59:31.578591 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-7d78464677-qck2g" Jan 20 17:59:31 crc kubenswrapper[4558]: I0120 17:59:31.596896 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-7d78464677-qck2g" podStartSLOduration=3.596859064 podStartE2EDuration="3.596859064s" podCreationTimestamp="2026-01-20 17:59:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 17:59:31.594630075 +0000 UTC m=+4665.354968041" watchObservedRunningTime="2026-01-20 17:59:31.596859064 +0000 UTC m=+4665.357197031" Jan 20 17:59:39 crc kubenswrapper[4558]: I0120 17:59:39.206353 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-7d78464677-qck2g" Jan 20 17:59:39 crc kubenswrapper[4558]: I0120 17:59:39.255485 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-9pmhk"] Jan 20 17:59:39 crc kubenswrapper[4558]: I0120 17:59:39.256025 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-9pmhk" podUID="b76c9805-2cef-461b-85d8-d4848ac6ed1e" containerName="dnsmasq-dns" containerID="cri-o://6420d15433b5356b42cb5b21c01089cb101f0b9577e22ab79a6900a469b9421c" gracePeriod=10 Jan 20 17:59:39 crc kubenswrapper[4558]: I0120 17:59:39.645799 4558 generic.go:334] "Generic (PLEG): container finished" podID="b76c9805-2cef-461b-85d8-d4848ac6ed1e" containerID="6420d15433b5356b42cb5b21c01089cb101f0b9577e22ab79a6900a469b9421c" exitCode=0 Jan 20 17:59:39 crc kubenswrapper[4558]: I0120 17:59:39.645869 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-9pmhk" event={"ID":"b76c9805-2cef-461b-85d8-d4848ac6ed1e","Type":"ContainerDied","Data":"6420d15433b5356b42cb5b21c01089cb101f0b9577e22ab79a6900a469b9421c"} Jan 20 17:59:39 crc kubenswrapper[4558]: I0120 17:59:39.946668 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-9pmhk" Jan 20 17:59:40 crc kubenswrapper[4558]: I0120 17:59:40.112561 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mvmhv\" (UniqueName: \"kubernetes.io/projected/b76c9805-2cef-461b-85d8-d4848ac6ed1e-kube-api-access-mvmhv\") pod \"b76c9805-2cef-461b-85d8-d4848ac6ed1e\" (UID: \"b76c9805-2cef-461b-85d8-d4848ac6ed1e\") " Jan 20 17:59:40 crc kubenswrapper[4558]: I0120 17:59:40.112815 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/b76c9805-2cef-461b-85d8-d4848ac6ed1e-dnsmasq-svc\") pod \"b76c9805-2cef-461b-85d8-d4848ac6ed1e\" (UID: \"b76c9805-2cef-461b-85d8-d4848ac6ed1e\") " Jan 20 17:59:40 crc kubenswrapper[4558]: I0120 17:59:40.112867 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b76c9805-2cef-461b-85d8-d4848ac6ed1e-config\") pod \"b76c9805-2cef-461b-85d8-d4848ac6ed1e\" (UID: \"b76c9805-2cef-461b-85d8-d4848ac6ed1e\") " Jan 20 17:59:40 crc kubenswrapper[4558]: I0120 17:59:40.120562 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b76c9805-2cef-461b-85d8-d4848ac6ed1e-kube-api-access-mvmhv" (OuterVolumeSpecName: "kube-api-access-mvmhv") pod "b76c9805-2cef-461b-85d8-d4848ac6ed1e" (UID: "b76c9805-2cef-461b-85d8-d4848ac6ed1e"). InnerVolumeSpecName "kube-api-access-mvmhv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:59:40 crc kubenswrapper[4558]: I0120 17:59:40.144529 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b76c9805-2cef-461b-85d8-d4848ac6ed1e-config" (OuterVolumeSpecName: "config") pod "b76c9805-2cef-461b-85d8-d4848ac6ed1e" (UID: "b76c9805-2cef-461b-85d8-d4848ac6ed1e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:59:40 crc kubenswrapper[4558]: I0120 17:59:40.147983 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b76c9805-2cef-461b-85d8-d4848ac6ed1e-dnsmasq-svc" (OuterVolumeSpecName: "dnsmasq-svc") pod "b76c9805-2cef-461b-85d8-d4848ac6ed1e" (UID: "b76c9805-2cef-461b-85d8-d4848ac6ed1e"). InnerVolumeSpecName "dnsmasq-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 17:59:40 crc kubenswrapper[4558]: I0120 17:59:40.214864 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mvmhv\" (UniqueName: \"kubernetes.io/projected/b76c9805-2cef-461b-85d8-d4848ac6ed1e-kube-api-access-mvmhv\") on node \"crc\" DevicePath \"\"" Jan 20 17:59:40 crc kubenswrapper[4558]: I0120 17:59:40.214902 4558 reconciler_common.go:293] "Volume detached for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/b76c9805-2cef-461b-85d8-d4848ac6ed1e-dnsmasq-svc\") on node \"crc\" DevicePath \"\"" Jan 20 17:59:40 crc kubenswrapper[4558]: I0120 17:59:40.214914 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b76c9805-2cef-461b-85d8-d4848ac6ed1e-config\") on node \"crc\" DevicePath \"\"" Jan 20 17:59:40 crc kubenswrapper[4558]: I0120 17:59:40.661218 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-9pmhk" event={"ID":"b76c9805-2cef-461b-85d8-d4848ac6ed1e","Type":"ContainerDied","Data":"9c512d751c93f7532d32cfa3109988d72c4449d8c37616352e99a2c2e447970c"} Jan 20 17:59:40 crc kubenswrapper[4558]: I0120 17:59:40.661278 4558 scope.go:117] "RemoveContainer" containerID="6420d15433b5356b42cb5b21c01089cb101f0b9577e22ab79a6900a469b9421c" Jan 20 17:59:40 crc kubenswrapper[4558]: I0120 17:59:40.661425 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-9pmhk" Jan 20 17:59:40 crc kubenswrapper[4558]: I0120 17:59:40.682346 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-9pmhk"] Jan 20 17:59:40 crc kubenswrapper[4558]: I0120 17:59:40.683728 4558 scope.go:117] "RemoveContainer" containerID="e2ef94d90c4e131b0b9b303ed386f34bb3f7aafba75048b96dd0117cfd812a16" Jan 20 17:59:40 crc kubenswrapper[4558]: I0120 17:59:40.690316 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-9pmhk"] Jan 20 17:59:42 crc kubenswrapper[4558]: I0120 17:59:42.590992 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b76c9805-2cef-461b-85d8-d4848ac6ed1e" path="/var/lib/kubelet/pods/b76c9805-2cef-461b-85d8-d4848ac6ed1e/volumes" Jan 20 17:59:44 crc kubenswrapper[4558]: I0120 17:59:44.833296 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/download-cache-edpm-compute-global-edpm-compute-global-27fb2"] Jan 20 17:59:44 crc kubenswrapper[4558]: E0120 17:59:44.834688 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b76c9805-2cef-461b-85d8-d4848ac6ed1e" containerName="init" Jan 20 17:59:44 crc kubenswrapper[4558]: I0120 17:59:44.834772 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="b76c9805-2cef-461b-85d8-d4848ac6ed1e" containerName="init" Jan 20 17:59:44 crc kubenswrapper[4558]: E0120 17:59:44.834844 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b76c9805-2cef-461b-85d8-d4848ac6ed1e" containerName="dnsmasq-dns" Jan 20 17:59:44 crc kubenswrapper[4558]: I0120 17:59:44.834920 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="b76c9805-2cef-461b-85d8-d4848ac6ed1e" containerName="dnsmasq-dns" Jan 20 17:59:44 crc kubenswrapper[4558]: I0120 17:59:44.835135 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="b76c9805-2cef-461b-85d8-d4848ac6ed1e" containerName="dnsmasq-dns" Jan 20 17:59:44 crc kubenswrapper[4558]: I0120 17:59:44.835671 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/download-cache-edpm-compute-global-edpm-compute-global-27fb2" Jan 20 17:59:44 crc kubenswrapper[4558]: I0120 17:59:44.837532 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplanenodeset-edpm-compute-global" Jan 20 17:59:44 crc kubenswrapper[4558]: I0120 17:59:44.837620 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplane-ansible-ssh-private-key-secret" Jan 20 17:59:44 crc kubenswrapper[4558]: I0120 17:59:44.838302 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-aee-default-env" Jan 20 17:59:44 crc kubenswrapper[4558]: I0120 17:59:44.839246 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"edpm-compute-global-dockercfg-wkdxg" Jan 20 17:59:44 crc kubenswrapper[4558]: I0120 17:59:44.845460 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/download-cache-edpm-compute-global-edpm-compute-global-27fb2"] Jan 20 17:59:44 crc kubenswrapper[4558]: I0120 17:59:44.881107 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d7f4122e-af2f-41dd-9e4f-0b99d9457e82-inventory\") pod \"download-cache-edpm-compute-global-edpm-compute-global-27fb2\" (UID: \"d7f4122e-af2f-41dd-9e4f-0b99d9457e82\") " pod="openstack-kuttl-tests/download-cache-edpm-compute-global-edpm-compute-global-27fb2" Jan 20 17:59:44 crc kubenswrapper[4558]: I0120 17:59:44.881283 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/d7f4122e-af2f-41dd-9e4f-0b99d9457e82-ssh-key-edpm-compute-global\") pod \"download-cache-edpm-compute-global-edpm-compute-global-27fb2\" (UID: \"d7f4122e-af2f-41dd-9e4f-0b99d9457e82\") " pod="openstack-kuttl-tests/download-cache-edpm-compute-global-edpm-compute-global-27fb2" Jan 20 17:59:44 crc kubenswrapper[4558]: I0120 17:59:44.881318 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k5vsm\" (UniqueName: \"kubernetes.io/projected/d7f4122e-af2f-41dd-9e4f-0b99d9457e82-kube-api-access-k5vsm\") pod \"download-cache-edpm-compute-global-edpm-compute-global-27fb2\" (UID: \"d7f4122e-af2f-41dd-9e4f-0b99d9457e82\") " pod="openstack-kuttl-tests/download-cache-edpm-compute-global-edpm-compute-global-27fb2" Jan 20 17:59:44 crc kubenswrapper[4558]: I0120 17:59:44.983986 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k5vsm\" (UniqueName: \"kubernetes.io/projected/d7f4122e-af2f-41dd-9e4f-0b99d9457e82-kube-api-access-k5vsm\") pod \"download-cache-edpm-compute-global-edpm-compute-global-27fb2\" (UID: \"d7f4122e-af2f-41dd-9e4f-0b99d9457e82\") " pod="openstack-kuttl-tests/download-cache-edpm-compute-global-edpm-compute-global-27fb2" Jan 20 17:59:44 crc kubenswrapper[4558]: I0120 17:59:44.984316 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d7f4122e-af2f-41dd-9e4f-0b99d9457e82-inventory\") pod \"download-cache-edpm-compute-global-edpm-compute-global-27fb2\" (UID: \"d7f4122e-af2f-41dd-9e4f-0b99d9457e82\") " pod="openstack-kuttl-tests/download-cache-edpm-compute-global-edpm-compute-global-27fb2" Jan 20 17:59:44 crc kubenswrapper[4558]: I0120 17:59:44.984570 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/d7f4122e-af2f-41dd-9e4f-0b99d9457e82-ssh-key-edpm-compute-global\") pod \"download-cache-edpm-compute-global-edpm-compute-global-27fb2\" (UID: \"d7f4122e-af2f-41dd-9e4f-0b99d9457e82\") " pod="openstack-kuttl-tests/download-cache-edpm-compute-global-edpm-compute-global-27fb2" Jan 20 17:59:44 crc kubenswrapper[4558]: I0120 17:59:44.991763 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/d7f4122e-af2f-41dd-9e4f-0b99d9457e82-ssh-key-edpm-compute-global\") pod \"download-cache-edpm-compute-global-edpm-compute-global-27fb2\" (UID: \"d7f4122e-af2f-41dd-9e4f-0b99d9457e82\") " pod="openstack-kuttl-tests/download-cache-edpm-compute-global-edpm-compute-global-27fb2" Jan 20 17:59:44 crc kubenswrapper[4558]: I0120 17:59:44.991788 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d7f4122e-af2f-41dd-9e4f-0b99d9457e82-inventory\") pod \"download-cache-edpm-compute-global-edpm-compute-global-27fb2\" (UID: \"d7f4122e-af2f-41dd-9e4f-0b99d9457e82\") " pod="openstack-kuttl-tests/download-cache-edpm-compute-global-edpm-compute-global-27fb2" Jan 20 17:59:44 crc kubenswrapper[4558]: I0120 17:59:44.998217 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k5vsm\" (UniqueName: \"kubernetes.io/projected/d7f4122e-af2f-41dd-9e4f-0b99d9457e82-kube-api-access-k5vsm\") pod \"download-cache-edpm-compute-global-edpm-compute-global-27fb2\" (UID: \"d7f4122e-af2f-41dd-9e4f-0b99d9457e82\") " pod="openstack-kuttl-tests/download-cache-edpm-compute-global-edpm-compute-global-27fb2" Jan 20 17:59:45 crc kubenswrapper[4558]: I0120 17:59:45.150692 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/download-cache-edpm-compute-global-edpm-compute-global-27fb2" Jan 20 17:59:45 crc kubenswrapper[4558]: I0120 17:59:45.718188 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/download-cache-edpm-compute-global-edpm-compute-global-27fb2"] Jan 20 17:59:46 crc kubenswrapper[4558]: I0120 17:59:46.727158 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/download-cache-edpm-compute-global-edpm-compute-global-27fb2" event={"ID":"d7f4122e-af2f-41dd-9e4f-0b99d9457e82","Type":"ContainerStarted","Data":"37e98f4e7df318ce2522dd1d1c50e8dc41fc3ac592ccf8b4487f4439042712b1"} Jan 20 17:59:54 crc kubenswrapper[4558]: I0120 17:59:54.551682 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-aee-default-env" Jan 20 17:59:54 crc kubenswrapper[4558]: I0120 17:59:54.848681 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/download-cache-edpm-compute-global-edpm-compute-global-27fb2" event={"ID":"d7f4122e-af2f-41dd-9e4f-0b99d9457e82","Type":"ContainerStarted","Data":"1c439f89c744152402cb33f807cfdfc137423ba15d981e9a3577d1ec69c10d38"} Jan 20 17:59:54 crc kubenswrapper[4558]: I0120 17:59:54.869681 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/download-cache-edpm-compute-global-edpm-compute-global-27fb2" podStartSLOduration=2.047156691 podStartE2EDuration="10.869661823s" podCreationTimestamp="2026-01-20 17:59:44 +0000 UTC" firstStartedPulling="2026-01-20 17:59:45.727113846 +0000 UTC m=+4679.487451812" lastFinishedPulling="2026-01-20 17:59:54.549618977 +0000 UTC m=+4688.309956944" observedRunningTime="2026-01-20 17:59:54.863758227 +0000 UTC m=+4688.624096193" watchObservedRunningTime="2026-01-20 17:59:54.869661823 +0000 UTC m=+4688.629999790" Jan 20 17:59:55 crc kubenswrapper[4558]: I0120 17:59:55.861149 4558 generic.go:334] "Generic (PLEG): container finished" podID="d7f4122e-af2f-41dd-9e4f-0b99d9457e82" containerID="1c439f89c744152402cb33f807cfdfc137423ba15d981e9a3577d1ec69c10d38" exitCode=0 Jan 20 17:59:55 crc kubenswrapper[4558]: I0120 17:59:55.861229 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/download-cache-edpm-compute-global-edpm-compute-global-27fb2" event={"ID":"d7f4122e-af2f-41dd-9e4f-0b99d9457e82","Type":"ContainerDied","Data":"1c439f89c744152402cb33f807cfdfc137423ba15d981e9a3577d1ec69c10d38"} Jan 20 17:59:57 crc kubenswrapper[4558]: I0120 17:59:57.219264 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/download-cache-edpm-compute-global-edpm-compute-global-27fb2" Jan 20 17:59:57 crc kubenswrapper[4558]: I0120 17:59:57.287775 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k5vsm\" (UniqueName: \"kubernetes.io/projected/d7f4122e-af2f-41dd-9e4f-0b99d9457e82-kube-api-access-k5vsm\") pod \"d7f4122e-af2f-41dd-9e4f-0b99d9457e82\" (UID: \"d7f4122e-af2f-41dd-9e4f-0b99d9457e82\") " Jan 20 17:59:57 crc kubenswrapper[4558]: I0120 17:59:57.287815 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/d7f4122e-af2f-41dd-9e4f-0b99d9457e82-ssh-key-edpm-compute-global\") pod \"d7f4122e-af2f-41dd-9e4f-0b99d9457e82\" (UID: \"d7f4122e-af2f-41dd-9e4f-0b99d9457e82\") " Jan 20 17:59:57 crc kubenswrapper[4558]: I0120 17:59:57.287857 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d7f4122e-af2f-41dd-9e4f-0b99d9457e82-inventory\") pod \"d7f4122e-af2f-41dd-9e4f-0b99d9457e82\" (UID: \"d7f4122e-af2f-41dd-9e4f-0b99d9457e82\") " Jan 20 17:59:57 crc kubenswrapper[4558]: I0120 17:59:57.293225 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d7f4122e-af2f-41dd-9e4f-0b99d9457e82-kube-api-access-k5vsm" (OuterVolumeSpecName: "kube-api-access-k5vsm") pod "d7f4122e-af2f-41dd-9e4f-0b99d9457e82" (UID: "d7f4122e-af2f-41dd-9e4f-0b99d9457e82"). InnerVolumeSpecName "kube-api-access-k5vsm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 17:59:57 crc kubenswrapper[4558]: I0120 17:59:57.306577 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d7f4122e-af2f-41dd-9e4f-0b99d9457e82-ssh-key-edpm-compute-global" (OuterVolumeSpecName: "ssh-key-edpm-compute-global") pod "d7f4122e-af2f-41dd-9e4f-0b99d9457e82" (UID: "d7f4122e-af2f-41dd-9e4f-0b99d9457e82"). InnerVolumeSpecName "ssh-key-edpm-compute-global". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:59:57 crc kubenswrapper[4558]: I0120 17:59:57.307207 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d7f4122e-af2f-41dd-9e4f-0b99d9457e82-inventory" (OuterVolumeSpecName: "inventory") pod "d7f4122e-af2f-41dd-9e4f-0b99d9457e82" (UID: "d7f4122e-af2f-41dd-9e4f-0b99d9457e82"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 17:59:57 crc kubenswrapper[4558]: I0120 17:59:57.330152 4558 patch_prober.go:28] interesting pod/machine-config-daemon-2vr4r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 20 17:59:57 crc kubenswrapper[4558]: I0120 17:59:57.330220 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 20 17:59:57 crc kubenswrapper[4558]: I0120 17:59:57.389606 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k5vsm\" (UniqueName: \"kubernetes.io/projected/d7f4122e-af2f-41dd-9e4f-0b99d9457e82-kube-api-access-k5vsm\") on node \"crc\" DevicePath \"\"" Jan 20 17:59:57 crc kubenswrapper[4558]: I0120 17:59:57.389640 4558 reconciler_common.go:293] "Volume detached for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/d7f4122e-af2f-41dd-9e4f-0b99d9457e82-ssh-key-edpm-compute-global\") on node \"crc\" DevicePath \"\"" Jan 20 17:59:57 crc kubenswrapper[4558]: I0120 17:59:57.389656 4558 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d7f4122e-af2f-41dd-9e4f-0b99d9457e82-inventory\") on node \"crc\" DevicePath \"\"" Jan 20 17:59:57 crc kubenswrapper[4558]: I0120 17:59:57.880353 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/download-cache-edpm-compute-global-edpm-compute-global-27fb2" event={"ID":"d7f4122e-af2f-41dd-9e4f-0b99d9457e82","Type":"ContainerDied","Data":"37e98f4e7df318ce2522dd1d1c50e8dc41fc3ac592ccf8b4487f4439042712b1"} Jan 20 17:59:57 crc kubenswrapper[4558]: I0120 17:59:57.880862 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="37e98f4e7df318ce2522dd1d1c50e8dc41fc3ac592ccf8b4487f4439042712b1" Jan 20 17:59:57 crc kubenswrapper[4558]: I0120 17:59:57.880401 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/download-cache-edpm-compute-global-edpm-compute-global-27fb2" Jan 20 17:59:57 crc kubenswrapper[4558]: I0120 17:59:57.943664 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/bootstrap-edpm-compute-global-edpm-compute-global-lbtv4"] Jan 20 17:59:57 crc kubenswrapper[4558]: E0120 17:59:57.944056 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d7f4122e-af2f-41dd-9e4f-0b99d9457e82" containerName="download-cache-edpm-compute-global-edpm-compute-global" Jan 20 17:59:57 crc kubenswrapper[4558]: I0120 17:59:57.944077 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d7f4122e-af2f-41dd-9e4f-0b99d9457e82" containerName="download-cache-edpm-compute-global-edpm-compute-global" Jan 20 17:59:57 crc kubenswrapper[4558]: I0120 17:59:57.944299 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="d7f4122e-af2f-41dd-9e4f-0b99d9457e82" containerName="download-cache-edpm-compute-global-edpm-compute-global" Jan 20 17:59:57 crc kubenswrapper[4558]: I0120 17:59:57.944952 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/bootstrap-edpm-compute-global-edpm-compute-global-lbtv4" Jan 20 17:59:57 crc kubenswrapper[4558]: I0120 17:59:57.946617 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"combined-ca-bundle" Jan 20 17:59:57 crc kubenswrapper[4558]: I0120 17:59:57.946874 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-aee-default-env" Jan 20 17:59:57 crc kubenswrapper[4558]: I0120 17:59:57.947123 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"edpm-compute-global-dockercfg-wkdxg" Jan 20 17:59:57 crc kubenswrapper[4558]: I0120 17:59:57.947718 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplane-ansible-ssh-private-key-secret" Jan 20 17:59:57 crc kubenswrapper[4558]: I0120 17:59:57.949611 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplanenodeset-edpm-compute-global" Jan 20 17:59:57 crc kubenswrapper[4558]: I0120 17:59:57.953635 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/bootstrap-edpm-compute-global-edpm-compute-global-lbtv4"] Jan 20 17:59:58 crc kubenswrapper[4558]: I0120 17:59:58.104406 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ssx2g\" (UniqueName: \"kubernetes.io/projected/1fa9a21f-9bed-423c-b4a0-439b614b88fc-kube-api-access-ssx2g\") pod \"bootstrap-edpm-compute-global-edpm-compute-global-lbtv4\" (UID: \"1fa9a21f-9bed-423c-b4a0-439b614b88fc\") " pod="openstack-kuttl-tests/bootstrap-edpm-compute-global-edpm-compute-global-lbtv4" Jan 20 17:59:58 crc kubenswrapper[4558]: I0120 17:59:58.104480 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1fa9a21f-9bed-423c-b4a0-439b614b88fc-inventory\") pod \"bootstrap-edpm-compute-global-edpm-compute-global-lbtv4\" (UID: \"1fa9a21f-9bed-423c-b4a0-439b614b88fc\") " pod="openstack-kuttl-tests/bootstrap-edpm-compute-global-edpm-compute-global-lbtv4" Jan 20 17:59:58 crc kubenswrapper[4558]: I0120 17:59:58.104521 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1fa9a21f-9bed-423c-b4a0-439b614b88fc-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-compute-global-edpm-compute-global-lbtv4\" (UID: \"1fa9a21f-9bed-423c-b4a0-439b614b88fc\") " pod="openstack-kuttl-tests/bootstrap-edpm-compute-global-edpm-compute-global-lbtv4" Jan 20 17:59:58 crc kubenswrapper[4558]: I0120 17:59:58.104689 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/1fa9a21f-9bed-423c-b4a0-439b614b88fc-ssh-key-edpm-compute-global\") pod \"bootstrap-edpm-compute-global-edpm-compute-global-lbtv4\" (UID: \"1fa9a21f-9bed-423c-b4a0-439b614b88fc\") " pod="openstack-kuttl-tests/bootstrap-edpm-compute-global-edpm-compute-global-lbtv4" Jan 20 17:59:58 crc kubenswrapper[4558]: I0120 17:59:58.206149 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ssx2g\" (UniqueName: \"kubernetes.io/projected/1fa9a21f-9bed-423c-b4a0-439b614b88fc-kube-api-access-ssx2g\") pod \"bootstrap-edpm-compute-global-edpm-compute-global-lbtv4\" (UID: \"1fa9a21f-9bed-423c-b4a0-439b614b88fc\") " pod="openstack-kuttl-tests/bootstrap-edpm-compute-global-edpm-compute-global-lbtv4" Jan 20 17:59:58 crc kubenswrapper[4558]: I0120 17:59:58.206245 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1fa9a21f-9bed-423c-b4a0-439b614b88fc-inventory\") pod \"bootstrap-edpm-compute-global-edpm-compute-global-lbtv4\" (UID: \"1fa9a21f-9bed-423c-b4a0-439b614b88fc\") " pod="openstack-kuttl-tests/bootstrap-edpm-compute-global-edpm-compute-global-lbtv4" Jan 20 17:59:58 crc kubenswrapper[4558]: I0120 17:59:58.206281 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1fa9a21f-9bed-423c-b4a0-439b614b88fc-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-compute-global-edpm-compute-global-lbtv4\" (UID: \"1fa9a21f-9bed-423c-b4a0-439b614b88fc\") " pod="openstack-kuttl-tests/bootstrap-edpm-compute-global-edpm-compute-global-lbtv4" Jan 20 17:59:58 crc kubenswrapper[4558]: I0120 17:59:58.206310 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/1fa9a21f-9bed-423c-b4a0-439b614b88fc-ssh-key-edpm-compute-global\") pod \"bootstrap-edpm-compute-global-edpm-compute-global-lbtv4\" (UID: \"1fa9a21f-9bed-423c-b4a0-439b614b88fc\") " pod="openstack-kuttl-tests/bootstrap-edpm-compute-global-edpm-compute-global-lbtv4" Jan 20 17:59:58 crc kubenswrapper[4558]: I0120 17:59:58.212687 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1fa9a21f-9bed-423c-b4a0-439b614b88fc-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-compute-global-edpm-compute-global-lbtv4\" (UID: \"1fa9a21f-9bed-423c-b4a0-439b614b88fc\") " pod="openstack-kuttl-tests/bootstrap-edpm-compute-global-edpm-compute-global-lbtv4" Jan 20 17:59:58 crc kubenswrapper[4558]: I0120 17:59:58.212701 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/1fa9a21f-9bed-423c-b4a0-439b614b88fc-ssh-key-edpm-compute-global\") pod \"bootstrap-edpm-compute-global-edpm-compute-global-lbtv4\" (UID: \"1fa9a21f-9bed-423c-b4a0-439b614b88fc\") " pod="openstack-kuttl-tests/bootstrap-edpm-compute-global-edpm-compute-global-lbtv4" Jan 20 17:59:58 crc kubenswrapper[4558]: I0120 17:59:58.213149 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1fa9a21f-9bed-423c-b4a0-439b614b88fc-inventory\") pod \"bootstrap-edpm-compute-global-edpm-compute-global-lbtv4\" (UID: \"1fa9a21f-9bed-423c-b4a0-439b614b88fc\") " pod="openstack-kuttl-tests/bootstrap-edpm-compute-global-edpm-compute-global-lbtv4" Jan 20 17:59:58 crc kubenswrapper[4558]: I0120 17:59:58.219783 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ssx2g\" (UniqueName: \"kubernetes.io/projected/1fa9a21f-9bed-423c-b4a0-439b614b88fc-kube-api-access-ssx2g\") pod \"bootstrap-edpm-compute-global-edpm-compute-global-lbtv4\" (UID: \"1fa9a21f-9bed-423c-b4a0-439b614b88fc\") " pod="openstack-kuttl-tests/bootstrap-edpm-compute-global-edpm-compute-global-lbtv4" Jan 20 17:59:58 crc kubenswrapper[4558]: I0120 17:59:58.262852 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/bootstrap-edpm-compute-global-edpm-compute-global-lbtv4" Jan 20 17:59:58 crc kubenswrapper[4558]: I0120 17:59:58.647718 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/bootstrap-edpm-compute-global-edpm-compute-global-lbtv4"] Jan 20 17:59:58 crc kubenswrapper[4558]: W0120 17:59:58.651503 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1fa9a21f_9bed_423c_b4a0_439b614b88fc.slice/crio-46ec31577fe4982f21128a74310bebe6aec600da74f71166b2753a70d4dc166d WatchSource:0}: Error finding container 46ec31577fe4982f21128a74310bebe6aec600da74f71166b2753a70d4dc166d: Status 404 returned error can't find the container with id 46ec31577fe4982f21128a74310bebe6aec600da74f71166b2753a70d4dc166d Jan 20 17:59:58 crc kubenswrapper[4558]: I0120 17:59:58.892910 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/bootstrap-edpm-compute-global-edpm-compute-global-lbtv4" event={"ID":"1fa9a21f-9bed-423c-b4a0-439b614b88fc","Type":"ContainerStarted","Data":"46ec31577fe4982f21128a74310bebe6aec600da74f71166b2753a70d4dc166d"} Jan 20 17:59:59 crc kubenswrapper[4558]: I0120 17:59:59.905412 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/bootstrap-edpm-compute-global-edpm-compute-global-lbtv4" event={"ID":"1fa9a21f-9bed-423c-b4a0-439b614b88fc","Type":"ContainerStarted","Data":"a9b20114fcbec2fe8dacb3c269b720dd7f56c474c36782e17d4c5ce75e0f68db"} Jan 20 17:59:59 crc kubenswrapper[4558]: I0120 17:59:59.926400 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/bootstrap-edpm-compute-global-edpm-compute-global-lbtv4" podStartSLOduration=2.314902122 podStartE2EDuration="2.926383387s" podCreationTimestamp="2026-01-20 17:59:57 +0000 UTC" firstStartedPulling="2026-01-20 17:59:58.654105477 +0000 UTC m=+4692.414443434" lastFinishedPulling="2026-01-20 17:59:59.265586732 +0000 UTC m=+4693.025924699" observedRunningTime="2026-01-20 17:59:59.921197719 +0000 UTC m=+4693.681535686" watchObservedRunningTime="2026-01-20 17:59:59.926383387 +0000 UTC m=+4693.686721354" Jan 20 18:00:00 crc kubenswrapper[4558]: I0120 18:00:00.156299 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29482200-wl5jw"] Jan 20 18:00:00 crc kubenswrapper[4558]: I0120 18:00:00.157461 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29482200-wl5jw" Jan 20 18:00:00 crc kubenswrapper[4558]: I0120 18:00:00.159653 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 20 18:00:00 crc kubenswrapper[4558]: I0120 18:00:00.161050 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 20 18:00:00 crc kubenswrapper[4558]: I0120 18:00:00.170819 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29482200-wl5jw"] Jan 20 18:00:00 crc kubenswrapper[4558]: I0120 18:00:00.339896 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wwvkr\" (UniqueName: \"kubernetes.io/projected/f7c2d02a-545b-4766-bdc3-6273e29a76e3-kube-api-access-wwvkr\") pod \"collect-profiles-29482200-wl5jw\" (UID: \"f7c2d02a-545b-4766-bdc3-6273e29a76e3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482200-wl5jw" Jan 20 18:00:00 crc kubenswrapper[4558]: I0120 18:00:00.340124 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f7c2d02a-545b-4766-bdc3-6273e29a76e3-secret-volume\") pod \"collect-profiles-29482200-wl5jw\" (UID: \"f7c2d02a-545b-4766-bdc3-6273e29a76e3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482200-wl5jw" Jan 20 18:00:00 crc kubenswrapper[4558]: I0120 18:00:00.340363 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f7c2d02a-545b-4766-bdc3-6273e29a76e3-config-volume\") pod \"collect-profiles-29482200-wl5jw\" (UID: \"f7c2d02a-545b-4766-bdc3-6273e29a76e3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482200-wl5jw" Jan 20 18:00:00 crc kubenswrapper[4558]: I0120 18:00:00.441798 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wwvkr\" (UniqueName: \"kubernetes.io/projected/f7c2d02a-545b-4766-bdc3-6273e29a76e3-kube-api-access-wwvkr\") pod \"collect-profiles-29482200-wl5jw\" (UID: \"f7c2d02a-545b-4766-bdc3-6273e29a76e3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482200-wl5jw" Jan 20 18:00:00 crc kubenswrapper[4558]: I0120 18:00:00.441900 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f7c2d02a-545b-4766-bdc3-6273e29a76e3-secret-volume\") pod \"collect-profiles-29482200-wl5jw\" (UID: \"f7c2d02a-545b-4766-bdc3-6273e29a76e3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482200-wl5jw" Jan 20 18:00:00 crc kubenswrapper[4558]: I0120 18:00:00.441996 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f7c2d02a-545b-4766-bdc3-6273e29a76e3-config-volume\") pod \"collect-profiles-29482200-wl5jw\" (UID: \"f7c2d02a-545b-4766-bdc3-6273e29a76e3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482200-wl5jw" Jan 20 18:00:00 crc kubenswrapper[4558]: I0120 18:00:00.443064 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f7c2d02a-545b-4766-bdc3-6273e29a76e3-config-volume\") pod \"collect-profiles-29482200-wl5jw\" (UID: \"f7c2d02a-545b-4766-bdc3-6273e29a76e3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482200-wl5jw" Jan 20 18:00:00 crc kubenswrapper[4558]: I0120 18:00:00.450538 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f7c2d02a-545b-4766-bdc3-6273e29a76e3-secret-volume\") pod \"collect-profiles-29482200-wl5jw\" (UID: \"f7c2d02a-545b-4766-bdc3-6273e29a76e3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482200-wl5jw" Jan 20 18:00:00 crc kubenswrapper[4558]: I0120 18:00:00.456924 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wwvkr\" (UniqueName: \"kubernetes.io/projected/f7c2d02a-545b-4766-bdc3-6273e29a76e3-kube-api-access-wwvkr\") pod \"collect-profiles-29482200-wl5jw\" (UID: \"f7c2d02a-545b-4766-bdc3-6273e29a76e3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482200-wl5jw" Jan 20 18:00:00 crc kubenswrapper[4558]: I0120 18:00:00.485559 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29482200-wl5jw" Jan 20 18:00:00 crc kubenswrapper[4558]: I0120 18:00:00.872642 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29482200-wl5jw"] Jan 20 18:00:00 crc kubenswrapper[4558]: I0120 18:00:00.915381 4558 generic.go:334] "Generic (PLEG): container finished" podID="1fa9a21f-9bed-423c-b4a0-439b614b88fc" containerID="a9b20114fcbec2fe8dacb3c269b720dd7f56c474c36782e17d4c5ce75e0f68db" exitCode=0 Jan 20 18:00:00 crc kubenswrapper[4558]: I0120 18:00:00.915437 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/bootstrap-edpm-compute-global-edpm-compute-global-lbtv4" event={"ID":"1fa9a21f-9bed-423c-b4a0-439b614b88fc","Type":"ContainerDied","Data":"a9b20114fcbec2fe8dacb3c269b720dd7f56c474c36782e17d4c5ce75e0f68db"} Jan 20 18:00:01 crc kubenswrapper[4558]: I0120 18:00:01.927147 4558 generic.go:334] "Generic (PLEG): container finished" podID="f7c2d02a-545b-4766-bdc3-6273e29a76e3" containerID="eb6fe33e4dda395eccf2487f9595d079173c44e56a22cb3987c1e00b84d32dca" exitCode=0 Jan 20 18:00:01 crc kubenswrapper[4558]: I0120 18:00:01.927211 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29482200-wl5jw" event={"ID":"f7c2d02a-545b-4766-bdc3-6273e29a76e3","Type":"ContainerDied","Data":"eb6fe33e4dda395eccf2487f9595d079173c44e56a22cb3987c1e00b84d32dca"} Jan 20 18:00:01 crc kubenswrapper[4558]: I0120 18:00:01.927621 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29482200-wl5jw" event={"ID":"f7c2d02a-545b-4766-bdc3-6273e29a76e3","Type":"ContainerStarted","Data":"c20330b9fd307d2f8d5e68af6d215acfe6cc1328f5cb5a567b599e5507648ccb"} Jan 20 18:00:02 crc kubenswrapper[4558]: I0120 18:00:02.179463 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/bootstrap-edpm-compute-global-edpm-compute-global-lbtv4" Jan 20 18:00:02 crc kubenswrapper[4558]: I0120 18:00:02.375079 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1fa9a21f-9bed-423c-b4a0-439b614b88fc-inventory\") pod \"1fa9a21f-9bed-423c-b4a0-439b614b88fc\" (UID: \"1fa9a21f-9bed-423c-b4a0-439b614b88fc\") " Jan 20 18:00:02 crc kubenswrapper[4558]: I0120 18:00:02.375511 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ssx2g\" (UniqueName: \"kubernetes.io/projected/1fa9a21f-9bed-423c-b4a0-439b614b88fc-kube-api-access-ssx2g\") pod \"1fa9a21f-9bed-423c-b4a0-439b614b88fc\" (UID: \"1fa9a21f-9bed-423c-b4a0-439b614b88fc\") " Jan 20 18:00:02 crc kubenswrapper[4558]: I0120 18:00:02.375558 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1fa9a21f-9bed-423c-b4a0-439b614b88fc-bootstrap-combined-ca-bundle\") pod \"1fa9a21f-9bed-423c-b4a0-439b614b88fc\" (UID: \"1fa9a21f-9bed-423c-b4a0-439b614b88fc\") " Jan 20 18:00:02 crc kubenswrapper[4558]: I0120 18:00:02.375615 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/1fa9a21f-9bed-423c-b4a0-439b614b88fc-ssh-key-edpm-compute-global\") pod \"1fa9a21f-9bed-423c-b4a0-439b614b88fc\" (UID: \"1fa9a21f-9bed-423c-b4a0-439b614b88fc\") " Jan 20 18:00:02 crc kubenswrapper[4558]: I0120 18:00:02.593352 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1fa9a21f-9bed-423c-b4a0-439b614b88fc-kube-api-access-ssx2g" (OuterVolumeSpecName: "kube-api-access-ssx2g") pod "1fa9a21f-9bed-423c-b4a0-439b614b88fc" (UID: "1fa9a21f-9bed-423c-b4a0-439b614b88fc"). InnerVolumeSpecName "kube-api-access-ssx2g". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:00:02 crc kubenswrapper[4558]: I0120 18:00:02.593818 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1fa9a21f-9bed-423c-b4a0-439b614b88fc-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "1fa9a21f-9bed-423c-b4a0-439b614b88fc" (UID: "1fa9a21f-9bed-423c-b4a0-439b614b88fc"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:00:02 crc kubenswrapper[4558]: E0120 18:00:02.605064 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/1fa9a21f-9bed-423c-b4a0-439b614b88fc-inventory podName:1fa9a21f-9bed-423c-b4a0-439b614b88fc nodeName:}" failed. No retries permitted until 2026-01-20 18:00:03.105035031 +0000 UTC m=+4696.865372998 (durationBeforeRetry 500ms). Error: error cleaning subPath mounts for volume "inventory" (UniqueName: "kubernetes.io/secret/1fa9a21f-9bed-423c-b4a0-439b614b88fc-inventory") pod "1fa9a21f-9bed-423c-b4a0-439b614b88fc" (UID: "1fa9a21f-9bed-423c-b4a0-439b614b88fc") : error deleting /var/lib/kubelet/pods/1fa9a21f-9bed-423c-b4a0-439b614b88fc/volume-subpaths: remove /var/lib/kubelet/pods/1fa9a21f-9bed-423c-b4a0-439b614b88fc/volume-subpaths: no such file or directory Jan 20 18:00:02 crc kubenswrapper[4558]: I0120 18:00:02.607319 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1fa9a21f-9bed-423c-b4a0-439b614b88fc-ssh-key-edpm-compute-global" (OuterVolumeSpecName: "ssh-key-edpm-compute-global") pod "1fa9a21f-9bed-423c-b4a0-439b614b88fc" (UID: "1fa9a21f-9bed-423c-b4a0-439b614b88fc"). InnerVolumeSpecName "ssh-key-edpm-compute-global". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:00:02 crc kubenswrapper[4558]: I0120 18:00:02.680154 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ssx2g\" (UniqueName: \"kubernetes.io/projected/1fa9a21f-9bed-423c-b4a0-439b614b88fc-kube-api-access-ssx2g\") on node \"crc\" DevicePath \"\"" Jan 20 18:00:02 crc kubenswrapper[4558]: I0120 18:00:02.680355 4558 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1fa9a21f-9bed-423c-b4a0-439b614b88fc-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 18:00:02 crc kubenswrapper[4558]: I0120 18:00:02.680446 4558 reconciler_common.go:293] "Volume detached for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/1fa9a21f-9bed-423c-b4a0-439b614b88fc-ssh-key-edpm-compute-global\") on node \"crc\" DevicePath \"\"" Jan 20 18:00:02 crc kubenswrapper[4558]: I0120 18:00:02.937787 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/bootstrap-edpm-compute-global-edpm-compute-global-lbtv4" event={"ID":"1fa9a21f-9bed-423c-b4a0-439b614b88fc","Type":"ContainerDied","Data":"46ec31577fe4982f21128a74310bebe6aec600da74f71166b2753a70d4dc166d"} Jan 20 18:00:02 crc kubenswrapper[4558]: I0120 18:00:02.937833 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="46ec31577fe4982f21128a74310bebe6aec600da74f71166b2753a70d4dc166d" Jan 20 18:00:02 crc kubenswrapper[4558]: I0120 18:00:02.937812 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/bootstrap-edpm-compute-global-edpm-compute-global-lbtv4" Jan 20 18:00:02 crc kubenswrapper[4558]: I0120 18:00:02.986181 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/configure-network-edpm-compute-global-edpm-compute-global-nqljk"] Jan 20 18:00:02 crc kubenswrapper[4558]: E0120 18:00:02.986653 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1fa9a21f-9bed-423c-b4a0-439b614b88fc" containerName="bootstrap-edpm-compute-global-edpm-compute-global" Jan 20 18:00:02 crc kubenswrapper[4558]: I0120 18:00:02.986754 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="1fa9a21f-9bed-423c-b4a0-439b614b88fc" containerName="bootstrap-edpm-compute-global-edpm-compute-global" Jan 20 18:00:02 crc kubenswrapper[4558]: I0120 18:00:02.986989 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="1fa9a21f-9bed-423c-b4a0-439b614b88fc" containerName="bootstrap-edpm-compute-global-edpm-compute-global" Jan 20 18:00:02 crc kubenswrapper[4558]: I0120 18:00:02.987535 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/configure-network-edpm-compute-global-edpm-compute-global-nqljk" Jan 20 18:00:02 crc kubenswrapper[4558]: I0120 18:00:02.992033 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/configure-network-edpm-compute-global-edpm-compute-global-nqljk"] Jan 20 18:00:03 crc kubenswrapper[4558]: I0120 18:00:03.182401 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29482200-wl5jw" Jan 20 18:00:03 crc kubenswrapper[4558]: I0120 18:00:03.188216 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1fa9a21f-9bed-423c-b4a0-439b614b88fc-inventory\") pod \"1fa9a21f-9bed-423c-b4a0-439b614b88fc\" (UID: \"1fa9a21f-9bed-423c-b4a0-439b614b88fc\") " Jan 20 18:00:03 crc kubenswrapper[4558]: I0120 18:00:03.188447 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/8a9744b7-9035-4303-8daf-f7b4089b88d9-ssh-key-edpm-compute-global\") pod \"configure-network-edpm-compute-global-edpm-compute-global-nqljk\" (UID: \"8a9744b7-9035-4303-8daf-f7b4089b88d9\") " pod="openstack-kuttl-tests/configure-network-edpm-compute-global-edpm-compute-global-nqljk" Jan 20 18:00:03 crc kubenswrapper[4558]: I0120 18:00:03.188510 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8a9744b7-9035-4303-8daf-f7b4089b88d9-inventory\") pod \"configure-network-edpm-compute-global-edpm-compute-global-nqljk\" (UID: \"8a9744b7-9035-4303-8daf-f7b4089b88d9\") " pod="openstack-kuttl-tests/configure-network-edpm-compute-global-edpm-compute-global-nqljk" Jan 20 18:00:03 crc kubenswrapper[4558]: I0120 18:00:03.188564 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rjwnp\" (UniqueName: \"kubernetes.io/projected/8a9744b7-9035-4303-8daf-f7b4089b88d9-kube-api-access-rjwnp\") pod \"configure-network-edpm-compute-global-edpm-compute-global-nqljk\" (UID: \"8a9744b7-9035-4303-8daf-f7b4089b88d9\") " pod="openstack-kuttl-tests/configure-network-edpm-compute-global-edpm-compute-global-nqljk" Jan 20 18:00:03 crc kubenswrapper[4558]: I0120 18:00:03.191311 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1fa9a21f-9bed-423c-b4a0-439b614b88fc-inventory" (OuterVolumeSpecName: "inventory") pod "1fa9a21f-9bed-423c-b4a0-439b614b88fc" (UID: "1fa9a21f-9bed-423c-b4a0-439b614b88fc"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:00:03 crc kubenswrapper[4558]: I0120 18:00:03.289149 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wwvkr\" (UniqueName: \"kubernetes.io/projected/f7c2d02a-545b-4766-bdc3-6273e29a76e3-kube-api-access-wwvkr\") pod \"f7c2d02a-545b-4766-bdc3-6273e29a76e3\" (UID: \"f7c2d02a-545b-4766-bdc3-6273e29a76e3\") " Jan 20 18:00:03 crc kubenswrapper[4558]: I0120 18:00:03.289495 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f7c2d02a-545b-4766-bdc3-6273e29a76e3-config-volume\") pod \"f7c2d02a-545b-4766-bdc3-6273e29a76e3\" (UID: \"f7c2d02a-545b-4766-bdc3-6273e29a76e3\") " Jan 20 18:00:03 crc kubenswrapper[4558]: I0120 18:00:03.289556 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f7c2d02a-545b-4766-bdc3-6273e29a76e3-secret-volume\") pod \"f7c2d02a-545b-4766-bdc3-6273e29a76e3\" (UID: \"f7c2d02a-545b-4766-bdc3-6273e29a76e3\") " Jan 20 18:00:03 crc kubenswrapper[4558]: I0120 18:00:03.289730 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8a9744b7-9035-4303-8daf-f7b4089b88d9-inventory\") pod \"configure-network-edpm-compute-global-edpm-compute-global-nqljk\" (UID: \"8a9744b7-9035-4303-8daf-f7b4089b88d9\") " pod="openstack-kuttl-tests/configure-network-edpm-compute-global-edpm-compute-global-nqljk" Jan 20 18:00:03 crc kubenswrapper[4558]: I0120 18:00:03.289758 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rjwnp\" (UniqueName: \"kubernetes.io/projected/8a9744b7-9035-4303-8daf-f7b4089b88d9-kube-api-access-rjwnp\") pod \"configure-network-edpm-compute-global-edpm-compute-global-nqljk\" (UID: \"8a9744b7-9035-4303-8daf-f7b4089b88d9\") " pod="openstack-kuttl-tests/configure-network-edpm-compute-global-edpm-compute-global-nqljk" Jan 20 18:00:03 crc kubenswrapper[4558]: I0120 18:00:03.289825 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/8a9744b7-9035-4303-8daf-f7b4089b88d9-ssh-key-edpm-compute-global\") pod \"configure-network-edpm-compute-global-edpm-compute-global-nqljk\" (UID: \"8a9744b7-9035-4303-8daf-f7b4089b88d9\") " pod="openstack-kuttl-tests/configure-network-edpm-compute-global-edpm-compute-global-nqljk" Jan 20 18:00:03 crc kubenswrapper[4558]: I0120 18:00:03.289884 4558 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1fa9a21f-9bed-423c-b4a0-439b614b88fc-inventory\") on node \"crc\" DevicePath \"\"" Jan 20 18:00:03 crc kubenswrapper[4558]: I0120 18:00:03.290105 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f7c2d02a-545b-4766-bdc3-6273e29a76e3-config-volume" (OuterVolumeSpecName: "config-volume") pod "f7c2d02a-545b-4766-bdc3-6273e29a76e3" (UID: "f7c2d02a-545b-4766-bdc3-6273e29a76e3"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:00:03 crc kubenswrapper[4558]: I0120 18:00:03.292120 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f7c2d02a-545b-4766-bdc3-6273e29a76e3-kube-api-access-wwvkr" (OuterVolumeSpecName: "kube-api-access-wwvkr") pod "f7c2d02a-545b-4766-bdc3-6273e29a76e3" (UID: "f7c2d02a-545b-4766-bdc3-6273e29a76e3"). InnerVolumeSpecName "kube-api-access-wwvkr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:00:03 crc kubenswrapper[4558]: I0120 18:00:03.292587 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f7c2d02a-545b-4766-bdc3-6273e29a76e3-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "f7c2d02a-545b-4766-bdc3-6273e29a76e3" (UID: "f7c2d02a-545b-4766-bdc3-6273e29a76e3"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:00:03 crc kubenswrapper[4558]: I0120 18:00:03.293378 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8a9744b7-9035-4303-8daf-f7b4089b88d9-inventory\") pod \"configure-network-edpm-compute-global-edpm-compute-global-nqljk\" (UID: \"8a9744b7-9035-4303-8daf-f7b4089b88d9\") " pod="openstack-kuttl-tests/configure-network-edpm-compute-global-edpm-compute-global-nqljk" Jan 20 18:00:03 crc kubenswrapper[4558]: I0120 18:00:03.293978 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/8a9744b7-9035-4303-8daf-f7b4089b88d9-ssh-key-edpm-compute-global\") pod \"configure-network-edpm-compute-global-edpm-compute-global-nqljk\" (UID: \"8a9744b7-9035-4303-8daf-f7b4089b88d9\") " pod="openstack-kuttl-tests/configure-network-edpm-compute-global-edpm-compute-global-nqljk" Jan 20 18:00:03 crc kubenswrapper[4558]: I0120 18:00:03.304949 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rjwnp\" (UniqueName: \"kubernetes.io/projected/8a9744b7-9035-4303-8daf-f7b4089b88d9-kube-api-access-rjwnp\") pod \"configure-network-edpm-compute-global-edpm-compute-global-nqljk\" (UID: \"8a9744b7-9035-4303-8daf-f7b4089b88d9\") " pod="openstack-kuttl-tests/configure-network-edpm-compute-global-edpm-compute-global-nqljk" Jan 20 18:00:03 crc kubenswrapper[4558]: I0120 18:00:03.391081 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wwvkr\" (UniqueName: \"kubernetes.io/projected/f7c2d02a-545b-4766-bdc3-6273e29a76e3-kube-api-access-wwvkr\") on node \"crc\" DevicePath \"\"" Jan 20 18:00:03 crc kubenswrapper[4558]: I0120 18:00:03.391110 4558 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f7c2d02a-545b-4766-bdc3-6273e29a76e3-config-volume\") on node \"crc\" DevicePath \"\"" Jan 20 18:00:03 crc kubenswrapper[4558]: I0120 18:00:03.391121 4558 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f7c2d02a-545b-4766-bdc3-6273e29a76e3-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 20 18:00:03 crc kubenswrapper[4558]: I0120 18:00:03.601645 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/configure-network-edpm-compute-global-edpm-compute-global-nqljk" Jan 20 18:00:03 crc kubenswrapper[4558]: I0120 18:00:03.948600 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29482200-wl5jw" event={"ID":"f7c2d02a-545b-4766-bdc3-6273e29a76e3","Type":"ContainerDied","Data":"c20330b9fd307d2f8d5e68af6d215acfe6cc1328f5cb5a567b599e5507648ccb"} Jan 20 18:00:03 crc kubenswrapper[4558]: I0120 18:00:03.948661 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c20330b9fd307d2f8d5e68af6d215acfe6cc1328f5cb5a567b599e5507648ccb" Jan 20 18:00:03 crc kubenswrapper[4558]: I0120 18:00:03.948665 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29482200-wl5jw" Jan 20 18:00:03 crc kubenswrapper[4558]: I0120 18:00:03.997198 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/configure-network-edpm-compute-global-edpm-compute-global-nqljk"] Jan 20 18:00:04 crc kubenswrapper[4558]: W0120 18:00:04.000729 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8a9744b7_9035_4303_8daf_f7b4089b88d9.slice/crio-c0dbbe90903bffa97302a4770d836cace55182bc002acfdee9f2ed93786b3377 WatchSource:0}: Error finding container c0dbbe90903bffa97302a4770d836cace55182bc002acfdee9f2ed93786b3377: Status 404 returned error can't find the container with id c0dbbe90903bffa97302a4770d836cace55182bc002acfdee9f2ed93786b3377 Jan 20 18:00:04 crc kubenswrapper[4558]: I0120 18:00:04.004888 4558 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 20 18:00:04 crc kubenswrapper[4558]: I0120 18:00:04.259832 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29482155-xfwxm"] Jan 20 18:00:04 crc kubenswrapper[4558]: I0120 18:00:04.263890 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29482155-xfwxm"] Jan 20 18:00:04 crc kubenswrapper[4558]: I0120 18:00:04.575753 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9080d637-d392-4532-9054-180e871834f0" path="/var/lib/kubelet/pods/9080d637-d392-4532-9054-180e871834f0/volumes" Jan 20 18:00:04 crc kubenswrapper[4558]: I0120 18:00:04.963583 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/configure-network-edpm-compute-global-edpm-compute-global-nqljk" event={"ID":"8a9744b7-9035-4303-8daf-f7b4089b88d9","Type":"ContainerStarted","Data":"c0dbbe90903bffa97302a4770d836cace55182bc002acfdee9f2ed93786b3377"} Jan 20 18:00:05 crc kubenswrapper[4558]: I0120 18:00:05.979583 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/configure-network-edpm-compute-global-edpm-compute-global-nqljk" event={"ID":"8a9744b7-9035-4303-8daf-f7b4089b88d9","Type":"ContainerStarted","Data":"ed8fbbf22228e4cf6c5612b4ee404afc61a0ce4a54631e2454f7c9637ff8c312"} Jan 20 18:00:05 crc kubenswrapper[4558]: I0120 18:00:05.996036 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/configure-network-edpm-compute-global-edpm-compute-global-nqljk" podStartSLOduration=3.163091879 podStartE2EDuration="3.996022541s" podCreationTimestamp="2026-01-20 18:00:02 +0000 UTC" firstStartedPulling="2026-01-20 18:00:04.004614173 +0000 UTC m=+4697.764952130" lastFinishedPulling="2026-01-20 18:00:04.837544825 +0000 UTC m=+4698.597882792" observedRunningTime="2026-01-20 18:00:05.9909313 +0000 UTC m=+4699.751269267" watchObservedRunningTime="2026-01-20 18:00:05.996022541 +0000 UTC m=+4699.756360508" Jan 20 18:00:06 crc kubenswrapper[4558]: I0120 18:00:06.990860 4558 generic.go:334] "Generic (PLEG): container finished" podID="8a9744b7-9035-4303-8daf-f7b4089b88d9" containerID="ed8fbbf22228e4cf6c5612b4ee404afc61a0ce4a54631e2454f7c9637ff8c312" exitCode=0 Jan 20 18:00:06 crc kubenswrapper[4558]: I0120 18:00:06.991284 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/configure-network-edpm-compute-global-edpm-compute-global-nqljk" event={"ID":"8a9744b7-9035-4303-8daf-f7b4089b88d9","Type":"ContainerDied","Data":"ed8fbbf22228e4cf6c5612b4ee404afc61a0ce4a54631e2454f7c9637ff8c312"} Jan 20 18:00:08 crc kubenswrapper[4558]: I0120 18:00:08.244206 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/configure-network-edpm-compute-global-edpm-compute-global-nqljk" Jan 20 18:00:08 crc kubenswrapper[4558]: I0120 18:00:08.261888 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rjwnp\" (UniqueName: \"kubernetes.io/projected/8a9744b7-9035-4303-8daf-f7b4089b88d9-kube-api-access-rjwnp\") pod \"8a9744b7-9035-4303-8daf-f7b4089b88d9\" (UID: \"8a9744b7-9035-4303-8daf-f7b4089b88d9\") " Jan 20 18:00:08 crc kubenswrapper[4558]: I0120 18:00:08.261935 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/8a9744b7-9035-4303-8daf-f7b4089b88d9-ssh-key-edpm-compute-global\") pod \"8a9744b7-9035-4303-8daf-f7b4089b88d9\" (UID: \"8a9744b7-9035-4303-8daf-f7b4089b88d9\") " Jan 20 18:00:08 crc kubenswrapper[4558]: I0120 18:00:08.262675 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8a9744b7-9035-4303-8daf-f7b4089b88d9-inventory\") pod \"8a9744b7-9035-4303-8daf-f7b4089b88d9\" (UID: \"8a9744b7-9035-4303-8daf-f7b4089b88d9\") " Jan 20 18:00:08 crc kubenswrapper[4558]: I0120 18:00:08.267066 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8a9744b7-9035-4303-8daf-f7b4089b88d9-kube-api-access-rjwnp" (OuterVolumeSpecName: "kube-api-access-rjwnp") pod "8a9744b7-9035-4303-8daf-f7b4089b88d9" (UID: "8a9744b7-9035-4303-8daf-f7b4089b88d9"). InnerVolumeSpecName "kube-api-access-rjwnp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:00:08 crc kubenswrapper[4558]: I0120 18:00:08.280092 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8a9744b7-9035-4303-8daf-f7b4089b88d9-ssh-key-edpm-compute-global" (OuterVolumeSpecName: "ssh-key-edpm-compute-global") pod "8a9744b7-9035-4303-8daf-f7b4089b88d9" (UID: "8a9744b7-9035-4303-8daf-f7b4089b88d9"). InnerVolumeSpecName "ssh-key-edpm-compute-global". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:00:08 crc kubenswrapper[4558]: I0120 18:00:08.281579 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8a9744b7-9035-4303-8daf-f7b4089b88d9-inventory" (OuterVolumeSpecName: "inventory") pod "8a9744b7-9035-4303-8daf-f7b4089b88d9" (UID: "8a9744b7-9035-4303-8daf-f7b4089b88d9"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:00:08 crc kubenswrapper[4558]: I0120 18:00:08.364025 4558 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8a9744b7-9035-4303-8daf-f7b4089b88d9-inventory\") on node \"crc\" DevicePath \"\"" Jan 20 18:00:08 crc kubenswrapper[4558]: I0120 18:00:08.364054 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rjwnp\" (UniqueName: \"kubernetes.io/projected/8a9744b7-9035-4303-8daf-f7b4089b88d9-kube-api-access-rjwnp\") on node \"crc\" DevicePath \"\"" Jan 20 18:00:08 crc kubenswrapper[4558]: I0120 18:00:08.364065 4558 reconciler_common.go:293] "Volume detached for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/8a9744b7-9035-4303-8daf-f7b4089b88d9-ssh-key-edpm-compute-global\") on node \"crc\" DevicePath \"\"" Jan 20 18:00:09 crc kubenswrapper[4558]: I0120 18:00:09.010767 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/configure-network-edpm-compute-global-edpm-compute-global-nqljk" event={"ID":"8a9744b7-9035-4303-8daf-f7b4089b88d9","Type":"ContainerDied","Data":"c0dbbe90903bffa97302a4770d836cace55182bc002acfdee9f2ed93786b3377"} Jan 20 18:00:09 crc kubenswrapper[4558]: I0120 18:00:09.011028 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c0dbbe90903bffa97302a4770d836cace55182bc002acfdee9f2ed93786b3377" Jan 20 18:00:09 crc kubenswrapper[4558]: I0120 18:00:09.010836 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/configure-network-edpm-compute-global-edpm-compute-global-nqljk" Jan 20 18:00:09 crc kubenswrapper[4558]: I0120 18:00:09.067286 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/validate-network-edpm-compute-global-edpm-compute-global-zjpgn"] Jan 20 18:00:09 crc kubenswrapper[4558]: E0120 18:00:09.067847 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8a9744b7-9035-4303-8daf-f7b4089b88d9" containerName="configure-network-edpm-compute-global-edpm-compute-global" Jan 20 18:00:09 crc kubenswrapper[4558]: I0120 18:00:09.067868 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="8a9744b7-9035-4303-8daf-f7b4089b88d9" containerName="configure-network-edpm-compute-global-edpm-compute-global" Jan 20 18:00:09 crc kubenswrapper[4558]: E0120 18:00:09.067885 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f7c2d02a-545b-4766-bdc3-6273e29a76e3" containerName="collect-profiles" Jan 20 18:00:09 crc kubenswrapper[4558]: I0120 18:00:09.067891 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="f7c2d02a-545b-4766-bdc3-6273e29a76e3" containerName="collect-profiles" Jan 20 18:00:09 crc kubenswrapper[4558]: I0120 18:00:09.068024 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="f7c2d02a-545b-4766-bdc3-6273e29a76e3" containerName="collect-profiles" Jan 20 18:00:09 crc kubenswrapper[4558]: I0120 18:00:09.068038 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="8a9744b7-9035-4303-8daf-f7b4089b88d9" containerName="configure-network-edpm-compute-global-edpm-compute-global" Jan 20 18:00:09 crc kubenswrapper[4558]: I0120 18:00:09.068498 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/validate-network-edpm-compute-global-edpm-compute-global-zjpgn" Jan 20 18:00:09 crc kubenswrapper[4558]: I0120 18:00:09.070041 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplanenodeset-edpm-compute-global" Jan 20 18:00:09 crc kubenswrapper[4558]: I0120 18:00:09.070390 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"edpm-compute-global-dockercfg-wkdxg" Jan 20 18:00:09 crc kubenswrapper[4558]: I0120 18:00:09.070613 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-aee-default-env" Jan 20 18:00:09 crc kubenswrapper[4558]: I0120 18:00:09.070866 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplane-ansible-ssh-private-key-secret" Jan 20 18:00:09 crc kubenswrapper[4558]: I0120 18:00:09.074321 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/84aae701-ceff-4234-aa5a-3ecce5c826bb-inventory\") pod \"validate-network-edpm-compute-global-edpm-compute-global-zjpgn\" (UID: \"84aae701-ceff-4234-aa5a-3ecce5c826bb\") " pod="openstack-kuttl-tests/validate-network-edpm-compute-global-edpm-compute-global-zjpgn" Jan 20 18:00:09 crc kubenswrapper[4558]: I0120 18:00:09.074435 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kd92m\" (UniqueName: \"kubernetes.io/projected/84aae701-ceff-4234-aa5a-3ecce5c826bb-kube-api-access-kd92m\") pod \"validate-network-edpm-compute-global-edpm-compute-global-zjpgn\" (UID: \"84aae701-ceff-4234-aa5a-3ecce5c826bb\") " pod="openstack-kuttl-tests/validate-network-edpm-compute-global-edpm-compute-global-zjpgn" Jan 20 18:00:09 crc kubenswrapper[4558]: I0120 18:00:09.074488 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/84aae701-ceff-4234-aa5a-3ecce5c826bb-ssh-key-edpm-compute-global\") pod \"validate-network-edpm-compute-global-edpm-compute-global-zjpgn\" (UID: \"84aae701-ceff-4234-aa5a-3ecce5c826bb\") " pod="openstack-kuttl-tests/validate-network-edpm-compute-global-edpm-compute-global-zjpgn" Jan 20 18:00:09 crc kubenswrapper[4558]: I0120 18:00:09.078562 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/validate-network-edpm-compute-global-edpm-compute-global-zjpgn"] Jan 20 18:00:09 crc kubenswrapper[4558]: I0120 18:00:09.176223 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/84aae701-ceff-4234-aa5a-3ecce5c826bb-inventory\") pod \"validate-network-edpm-compute-global-edpm-compute-global-zjpgn\" (UID: \"84aae701-ceff-4234-aa5a-3ecce5c826bb\") " pod="openstack-kuttl-tests/validate-network-edpm-compute-global-edpm-compute-global-zjpgn" Jan 20 18:00:09 crc kubenswrapper[4558]: I0120 18:00:09.176386 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kd92m\" (UniqueName: \"kubernetes.io/projected/84aae701-ceff-4234-aa5a-3ecce5c826bb-kube-api-access-kd92m\") pod \"validate-network-edpm-compute-global-edpm-compute-global-zjpgn\" (UID: \"84aae701-ceff-4234-aa5a-3ecce5c826bb\") " pod="openstack-kuttl-tests/validate-network-edpm-compute-global-edpm-compute-global-zjpgn" Jan 20 18:00:09 crc kubenswrapper[4558]: I0120 18:00:09.176457 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/84aae701-ceff-4234-aa5a-3ecce5c826bb-ssh-key-edpm-compute-global\") pod \"validate-network-edpm-compute-global-edpm-compute-global-zjpgn\" (UID: \"84aae701-ceff-4234-aa5a-3ecce5c826bb\") " pod="openstack-kuttl-tests/validate-network-edpm-compute-global-edpm-compute-global-zjpgn" Jan 20 18:00:09 crc kubenswrapper[4558]: I0120 18:00:09.181292 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/84aae701-ceff-4234-aa5a-3ecce5c826bb-inventory\") pod \"validate-network-edpm-compute-global-edpm-compute-global-zjpgn\" (UID: \"84aae701-ceff-4234-aa5a-3ecce5c826bb\") " pod="openstack-kuttl-tests/validate-network-edpm-compute-global-edpm-compute-global-zjpgn" Jan 20 18:00:09 crc kubenswrapper[4558]: I0120 18:00:09.181343 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/84aae701-ceff-4234-aa5a-3ecce5c826bb-ssh-key-edpm-compute-global\") pod \"validate-network-edpm-compute-global-edpm-compute-global-zjpgn\" (UID: \"84aae701-ceff-4234-aa5a-3ecce5c826bb\") " pod="openstack-kuttl-tests/validate-network-edpm-compute-global-edpm-compute-global-zjpgn" Jan 20 18:00:09 crc kubenswrapper[4558]: I0120 18:00:09.191114 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kd92m\" (UniqueName: \"kubernetes.io/projected/84aae701-ceff-4234-aa5a-3ecce5c826bb-kube-api-access-kd92m\") pod \"validate-network-edpm-compute-global-edpm-compute-global-zjpgn\" (UID: \"84aae701-ceff-4234-aa5a-3ecce5c826bb\") " pod="openstack-kuttl-tests/validate-network-edpm-compute-global-edpm-compute-global-zjpgn" Jan 20 18:00:09 crc kubenswrapper[4558]: I0120 18:00:09.381139 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/validate-network-edpm-compute-global-edpm-compute-global-zjpgn" Jan 20 18:00:10 crc kubenswrapper[4558]: I0120 18:00:09.785320 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/validate-network-edpm-compute-global-edpm-compute-global-zjpgn"] Jan 20 18:00:10 crc kubenswrapper[4558]: W0120 18:00:09.787173 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod84aae701_ceff_4234_aa5a_3ecce5c826bb.slice/crio-a349b5d0daf33762b72d10cf86ecd0e6d8eb46bcde5838d734fd33978ecba006 WatchSource:0}: Error finding container a349b5d0daf33762b72d10cf86ecd0e6d8eb46bcde5838d734fd33978ecba006: Status 404 returned error can't find the container with id a349b5d0daf33762b72d10cf86ecd0e6d8eb46bcde5838d734fd33978ecba006 Jan 20 18:00:10 crc kubenswrapper[4558]: I0120 18:00:10.029697 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/validate-network-edpm-compute-global-edpm-compute-global-zjpgn" event={"ID":"84aae701-ceff-4234-aa5a-3ecce5c826bb","Type":"ContainerStarted","Data":"a349b5d0daf33762b72d10cf86ecd0e6d8eb46bcde5838d734fd33978ecba006"} Jan 20 18:00:11 crc kubenswrapper[4558]: I0120 18:00:11.041996 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/validate-network-edpm-compute-global-edpm-compute-global-zjpgn" event={"ID":"84aae701-ceff-4234-aa5a-3ecce5c826bb","Type":"ContainerStarted","Data":"ce204e839c11f34c659e55d45c47dba075ea61f30450f3a9bb1d8ed89d47c69f"} Jan 20 18:00:11 crc kubenswrapper[4558]: I0120 18:00:11.057825 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/validate-network-edpm-compute-global-edpm-compute-global-zjpgn" podStartSLOduration=1.205370373 podStartE2EDuration="2.057781613s" podCreationTimestamp="2026-01-20 18:00:09 +0000 UTC" firstStartedPulling="2026-01-20 18:00:09.795860411 +0000 UTC m=+4703.556198378" lastFinishedPulling="2026-01-20 18:00:10.64827165 +0000 UTC m=+4704.408609618" observedRunningTime="2026-01-20 18:00:11.055548796 +0000 UTC m=+4704.815886762" watchObservedRunningTime="2026-01-20 18:00:11.057781613 +0000 UTC m=+4704.818119580" Jan 20 18:00:12 crc kubenswrapper[4558]: I0120 18:00:12.053680 4558 generic.go:334] "Generic (PLEG): container finished" podID="84aae701-ceff-4234-aa5a-3ecce5c826bb" containerID="ce204e839c11f34c659e55d45c47dba075ea61f30450f3a9bb1d8ed89d47c69f" exitCode=0 Jan 20 18:00:12 crc kubenswrapper[4558]: I0120 18:00:12.053748 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/validate-network-edpm-compute-global-edpm-compute-global-zjpgn" event={"ID":"84aae701-ceff-4234-aa5a-3ecce5c826bb","Type":"ContainerDied","Data":"ce204e839c11f34c659e55d45c47dba075ea61f30450f3a9bb1d8ed89d47c69f"} Jan 20 18:00:13 crc kubenswrapper[4558]: I0120 18:00:13.303359 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/validate-network-edpm-compute-global-edpm-compute-global-zjpgn" Jan 20 18:00:13 crc kubenswrapper[4558]: I0120 18:00:13.435662 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kd92m\" (UniqueName: \"kubernetes.io/projected/84aae701-ceff-4234-aa5a-3ecce5c826bb-kube-api-access-kd92m\") pod \"84aae701-ceff-4234-aa5a-3ecce5c826bb\" (UID: \"84aae701-ceff-4234-aa5a-3ecce5c826bb\") " Jan 20 18:00:13 crc kubenswrapper[4558]: I0120 18:00:13.435831 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/84aae701-ceff-4234-aa5a-3ecce5c826bb-ssh-key-edpm-compute-global\") pod \"84aae701-ceff-4234-aa5a-3ecce5c826bb\" (UID: \"84aae701-ceff-4234-aa5a-3ecce5c826bb\") " Jan 20 18:00:13 crc kubenswrapper[4558]: I0120 18:00:13.435910 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/84aae701-ceff-4234-aa5a-3ecce5c826bb-inventory\") pod \"84aae701-ceff-4234-aa5a-3ecce5c826bb\" (UID: \"84aae701-ceff-4234-aa5a-3ecce5c826bb\") " Jan 20 18:00:13 crc kubenswrapper[4558]: I0120 18:00:13.442634 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/84aae701-ceff-4234-aa5a-3ecce5c826bb-kube-api-access-kd92m" (OuterVolumeSpecName: "kube-api-access-kd92m") pod "84aae701-ceff-4234-aa5a-3ecce5c826bb" (UID: "84aae701-ceff-4234-aa5a-3ecce5c826bb"). InnerVolumeSpecName "kube-api-access-kd92m". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:00:13 crc kubenswrapper[4558]: I0120 18:00:13.456740 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/84aae701-ceff-4234-aa5a-3ecce5c826bb-inventory" (OuterVolumeSpecName: "inventory") pod "84aae701-ceff-4234-aa5a-3ecce5c826bb" (UID: "84aae701-ceff-4234-aa5a-3ecce5c826bb"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:00:13 crc kubenswrapper[4558]: I0120 18:00:13.457962 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/84aae701-ceff-4234-aa5a-3ecce5c826bb-ssh-key-edpm-compute-global" (OuterVolumeSpecName: "ssh-key-edpm-compute-global") pod "84aae701-ceff-4234-aa5a-3ecce5c826bb" (UID: "84aae701-ceff-4234-aa5a-3ecce5c826bb"). InnerVolumeSpecName "ssh-key-edpm-compute-global". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:00:13 crc kubenswrapper[4558]: I0120 18:00:13.538300 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kd92m\" (UniqueName: \"kubernetes.io/projected/84aae701-ceff-4234-aa5a-3ecce5c826bb-kube-api-access-kd92m\") on node \"crc\" DevicePath \"\"" Jan 20 18:00:13 crc kubenswrapper[4558]: I0120 18:00:13.538543 4558 reconciler_common.go:293] "Volume detached for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/84aae701-ceff-4234-aa5a-3ecce5c826bb-ssh-key-edpm-compute-global\") on node \"crc\" DevicePath \"\"" Jan 20 18:00:13 crc kubenswrapper[4558]: I0120 18:00:13.538581 4558 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/84aae701-ceff-4234-aa5a-3ecce5c826bb-inventory\") on node \"crc\" DevicePath \"\"" Jan 20 18:00:14 crc kubenswrapper[4558]: I0120 18:00:14.068867 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/validate-network-edpm-compute-global-edpm-compute-global-zjpgn" event={"ID":"84aae701-ceff-4234-aa5a-3ecce5c826bb","Type":"ContainerDied","Data":"a349b5d0daf33762b72d10cf86ecd0e6d8eb46bcde5838d734fd33978ecba006"} Jan 20 18:00:14 crc kubenswrapper[4558]: I0120 18:00:14.068921 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a349b5d0daf33762b72d10cf86ecd0e6d8eb46bcde5838d734fd33978ecba006" Jan 20 18:00:14 crc kubenswrapper[4558]: I0120 18:00:14.068932 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/validate-network-edpm-compute-global-edpm-compute-global-zjpgn" Jan 20 18:00:14 crc kubenswrapper[4558]: I0120 18:00:14.115760 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/install-os-edpm-compute-global-edpm-compute-global-mq7qb"] Jan 20 18:00:14 crc kubenswrapper[4558]: E0120 18:00:14.116099 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="84aae701-ceff-4234-aa5a-3ecce5c826bb" containerName="validate-network-edpm-compute-global-edpm-compute-global" Jan 20 18:00:14 crc kubenswrapper[4558]: I0120 18:00:14.116118 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="84aae701-ceff-4234-aa5a-3ecce5c826bb" containerName="validate-network-edpm-compute-global-edpm-compute-global" Jan 20 18:00:14 crc kubenswrapper[4558]: I0120 18:00:14.116305 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="84aae701-ceff-4234-aa5a-3ecce5c826bb" containerName="validate-network-edpm-compute-global-edpm-compute-global" Jan 20 18:00:14 crc kubenswrapper[4558]: I0120 18:00:14.116792 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/install-os-edpm-compute-global-edpm-compute-global-mq7qb" Jan 20 18:00:14 crc kubenswrapper[4558]: I0120 18:00:14.118476 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-aee-default-env" Jan 20 18:00:14 crc kubenswrapper[4558]: I0120 18:00:14.118789 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplanenodeset-edpm-compute-global" Jan 20 18:00:14 crc kubenswrapper[4558]: I0120 18:00:14.118832 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplane-ansible-ssh-private-key-secret" Jan 20 18:00:14 crc kubenswrapper[4558]: I0120 18:00:14.118797 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"edpm-compute-global-dockercfg-wkdxg" Jan 20 18:00:14 crc kubenswrapper[4558]: I0120 18:00:14.129061 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/install-os-edpm-compute-global-edpm-compute-global-mq7qb"] Jan 20 18:00:14 crc kubenswrapper[4558]: I0120 18:00:14.144751 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/f0222acb-beb0-4bec-8c83-53ed056817e5-ssh-key-edpm-compute-global\") pod \"install-os-edpm-compute-global-edpm-compute-global-mq7qb\" (UID: \"f0222acb-beb0-4bec-8c83-53ed056817e5\") " pod="openstack-kuttl-tests/install-os-edpm-compute-global-edpm-compute-global-mq7qb" Jan 20 18:00:14 crc kubenswrapper[4558]: I0120 18:00:14.144819 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f0222acb-beb0-4bec-8c83-53ed056817e5-inventory\") pod \"install-os-edpm-compute-global-edpm-compute-global-mq7qb\" (UID: \"f0222acb-beb0-4bec-8c83-53ed056817e5\") " pod="openstack-kuttl-tests/install-os-edpm-compute-global-edpm-compute-global-mq7qb" Jan 20 18:00:14 crc kubenswrapper[4558]: I0120 18:00:14.144845 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-79xvd\" (UniqueName: \"kubernetes.io/projected/f0222acb-beb0-4bec-8c83-53ed056817e5-kube-api-access-79xvd\") pod \"install-os-edpm-compute-global-edpm-compute-global-mq7qb\" (UID: \"f0222acb-beb0-4bec-8c83-53ed056817e5\") " pod="openstack-kuttl-tests/install-os-edpm-compute-global-edpm-compute-global-mq7qb" Jan 20 18:00:14 crc kubenswrapper[4558]: I0120 18:00:14.245393 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/f0222acb-beb0-4bec-8c83-53ed056817e5-ssh-key-edpm-compute-global\") pod \"install-os-edpm-compute-global-edpm-compute-global-mq7qb\" (UID: \"f0222acb-beb0-4bec-8c83-53ed056817e5\") " pod="openstack-kuttl-tests/install-os-edpm-compute-global-edpm-compute-global-mq7qb" Jan 20 18:00:14 crc kubenswrapper[4558]: I0120 18:00:14.245454 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f0222acb-beb0-4bec-8c83-53ed056817e5-inventory\") pod \"install-os-edpm-compute-global-edpm-compute-global-mq7qb\" (UID: \"f0222acb-beb0-4bec-8c83-53ed056817e5\") " pod="openstack-kuttl-tests/install-os-edpm-compute-global-edpm-compute-global-mq7qb" Jan 20 18:00:14 crc kubenswrapper[4558]: I0120 18:00:14.245478 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-79xvd\" (UniqueName: \"kubernetes.io/projected/f0222acb-beb0-4bec-8c83-53ed056817e5-kube-api-access-79xvd\") pod \"install-os-edpm-compute-global-edpm-compute-global-mq7qb\" (UID: \"f0222acb-beb0-4bec-8c83-53ed056817e5\") " pod="openstack-kuttl-tests/install-os-edpm-compute-global-edpm-compute-global-mq7qb" Jan 20 18:00:14 crc kubenswrapper[4558]: I0120 18:00:14.250327 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/f0222acb-beb0-4bec-8c83-53ed056817e5-ssh-key-edpm-compute-global\") pod \"install-os-edpm-compute-global-edpm-compute-global-mq7qb\" (UID: \"f0222acb-beb0-4bec-8c83-53ed056817e5\") " pod="openstack-kuttl-tests/install-os-edpm-compute-global-edpm-compute-global-mq7qb" Jan 20 18:00:14 crc kubenswrapper[4558]: I0120 18:00:14.252250 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f0222acb-beb0-4bec-8c83-53ed056817e5-inventory\") pod \"install-os-edpm-compute-global-edpm-compute-global-mq7qb\" (UID: \"f0222acb-beb0-4bec-8c83-53ed056817e5\") " pod="openstack-kuttl-tests/install-os-edpm-compute-global-edpm-compute-global-mq7qb" Jan 20 18:00:14 crc kubenswrapper[4558]: I0120 18:00:14.261244 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-79xvd\" (UniqueName: \"kubernetes.io/projected/f0222acb-beb0-4bec-8c83-53ed056817e5-kube-api-access-79xvd\") pod \"install-os-edpm-compute-global-edpm-compute-global-mq7qb\" (UID: \"f0222acb-beb0-4bec-8c83-53ed056817e5\") " pod="openstack-kuttl-tests/install-os-edpm-compute-global-edpm-compute-global-mq7qb" Jan 20 18:00:14 crc kubenswrapper[4558]: I0120 18:00:14.430803 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/install-os-edpm-compute-global-edpm-compute-global-mq7qb" Jan 20 18:00:14 crc kubenswrapper[4558]: I0120 18:00:14.839157 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/install-os-edpm-compute-global-edpm-compute-global-mq7qb"] Jan 20 18:00:14 crc kubenswrapper[4558]: W0120 18:00:14.841198 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf0222acb_beb0_4bec_8c83_53ed056817e5.slice/crio-7c504f98b6f999938f7782f0cc7899300d94715f913e33d49cc230c96a66b3d6 WatchSource:0}: Error finding container 7c504f98b6f999938f7782f0cc7899300d94715f913e33d49cc230c96a66b3d6: Status 404 returned error can't find the container with id 7c504f98b6f999938f7782f0cc7899300d94715f913e33d49cc230c96a66b3d6 Jan 20 18:00:15 crc kubenswrapper[4558]: I0120 18:00:15.078130 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/install-os-edpm-compute-global-edpm-compute-global-mq7qb" event={"ID":"f0222acb-beb0-4bec-8c83-53ed056817e5","Type":"ContainerStarted","Data":"7c504f98b6f999938f7782f0cc7899300d94715f913e33d49cc230c96a66b3d6"} Jan 20 18:00:18 crc kubenswrapper[4558]: I0120 18:00:18.124848 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/install-os-edpm-compute-global-edpm-compute-global-mq7qb" event={"ID":"f0222acb-beb0-4bec-8c83-53ed056817e5","Type":"ContainerStarted","Data":"f3480dff1283cc8b54d75770a199bbd3ab7c2e7281d8214dc64c90ffcc63d0c2"} Jan 20 18:00:19 crc kubenswrapper[4558]: I0120 18:00:19.137707 4558 generic.go:334] "Generic (PLEG): container finished" podID="f0222acb-beb0-4bec-8c83-53ed056817e5" containerID="f3480dff1283cc8b54d75770a199bbd3ab7c2e7281d8214dc64c90ffcc63d0c2" exitCode=0 Jan 20 18:00:19 crc kubenswrapper[4558]: I0120 18:00:19.137765 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/install-os-edpm-compute-global-edpm-compute-global-mq7qb" event={"ID":"f0222acb-beb0-4bec-8c83-53ed056817e5","Type":"ContainerDied","Data":"f3480dff1283cc8b54d75770a199bbd3ab7c2e7281d8214dc64c90ffcc63d0c2"} Jan 20 18:00:19 crc kubenswrapper[4558]: I0120 18:00:19.382951 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/install-os-edpm-compute-global-edpm-compute-global-mq7qb" Jan 20 18:00:19 crc kubenswrapper[4558]: I0120 18:00:19.437005 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f0222acb-beb0-4bec-8c83-53ed056817e5-inventory\") pod \"f0222acb-beb0-4bec-8c83-53ed056817e5\" (UID: \"f0222acb-beb0-4bec-8c83-53ed056817e5\") " Jan 20 18:00:19 crc kubenswrapper[4558]: I0120 18:00:19.437078 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/f0222acb-beb0-4bec-8c83-53ed056817e5-ssh-key-edpm-compute-global\") pod \"f0222acb-beb0-4bec-8c83-53ed056817e5\" (UID: \"f0222acb-beb0-4bec-8c83-53ed056817e5\") " Jan 20 18:00:19 crc kubenswrapper[4558]: I0120 18:00:19.437116 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-79xvd\" (UniqueName: \"kubernetes.io/projected/f0222acb-beb0-4bec-8c83-53ed056817e5-kube-api-access-79xvd\") pod \"f0222acb-beb0-4bec-8c83-53ed056817e5\" (UID: \"f0222acb-beb0-4bec-8c83-53ed056817e5\") " Jan 20 18:00:19 crc kubenswrapper[4558]: I0120 18:00:19.442101 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f0222acb-beb0-4bec-8c83-53ed056817e5-kube-api-access-79xvd" (OuterVolumeSpecName: "kube-api-access-79xvd") pod "f0222acb-beb0-4bec-8c83-53ed056817e5" (UID: "f0222acb-beb0-4bec-8c83-53ed056817e5"). InnerVolumeSpecName "kube-api-access-79xvd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:00:19 crc kubenswrapper[4558]: I0120 18:00:19.454366 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f0222acb-beb0-4bec-8c83-53ed056817e5-inventory" (OuterVolumeSpecName: "inventory") pod "f0222acb-beb0-4bec-8c83-53ed056817e5" (UID: "f0222acb-beb0-4bec-8c83-53ed056817e5"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:00:19 crc kubenswrapper[4558]: I0120 18:00:19.456519 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f0222acb-beb0-4bec-8c83-53ed056817e5-ssh-key-edpm-compute-global" (OuterVolumeSpecName: "ssh-key-edpm-compute-global") pod "f0222acb-beb0-4bec-8c83-53ed056817e5" (UID: "f0222acb-beb0-4bec-8c83-53ed056817e5"). InnerVolumeSpecName "ssh-key-edpm-compute-global". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:00:19 crc kubenswrapper[4558]: I0120 18:00:19.539126 4558 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f0222acb-beb0-4bec-8c83-53ed056817e5-inventory\") on node \"crc\" DevicePath \"\"" Jan 20 18:00:19 crc kubenswrapper[4558]: I0120 18:00:19.539186 4558 reconciler_common.go:293] "Volume detached for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/f0222acb-beb0-4bec-8c83-53ed056817e5-ssh-key-edpm-compute-global\") on node \"crc\" DevicePath \"\"" Jan 20 18:00:19 crc kubenswrapper[4558]: I0120 18:00:19.539209 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-79xvd\" (UniqueName: \"kubernetes.io/projected/f0222acb-beb0-4bec-8c83-53ed056817e5-kube-api-access-79xvd\") on node \"crc\" DevicePath \"\"" Jan 20 18:00:20 crc kubenswrapper[4558]: I0120 18:00:20.151374 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/install-os-edpm-compute-global-edpm-compute-global-mq7qb" event={"ID":"f0222acb-beb0-4bec-8c83-53ed056817e5","Type":"ContainerDied","Data":"7c504f98b6f999938f7782f0cc7899300d94715f913e33d49cc230c96a66b3d6"} Jan 20 18:00:20 crc kubenswrapper[4558]: I0120 18:00:20.151431 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7c504f98b6f999938f7782f0cc7899300d94715f913e33d49cc230c96a66b3d6" Jan 20 18:00:20 crc kubenswrapper[4558]: I0120 18:00:20.151466 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/install-os-edpm-compute-global-edpm-compute-global-mq7qb" Jan 20 18:00:20 crc kubenswrapper[4558]: I0120 18:00:20.443508 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/configure-os-edpm-compute-global-edpm-compute-global-dxdjf"] Jan 20 18:00:20 crc kubenswrapper[4558]: E0120 18:00:20.443817 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f0222acb-beb0-4bec-8c83-53ed056817e5" containerName="install-os-edpm-compute-global-edpm-compute-global" Jan 20 18:00:20 crc kubenswrapper[4558]: I0120 18:00:20.443835 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="f0222acb-beb0-4bec-8c83-53ed056817e5" containerName="install-os-edpm-compute-global-edpm-compute-global" Jan 20 18:00:20 crc kubenswrapper[4558]: I0120 18:00:20.443975 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="f0222acb-beb0-4bec-8c83-53ed056817e5" containerName="install-os-edpm-compute-global-edpm-compute-global" Jan 20 18:00:20 crc kubenswrapper[4558]: I0120 18:00:20.444446 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/configure-os-edpm-compute-global-edpm-compute-global-dxdjf" Jan 20 18:00:20 crc kubenswrapper[4558]: I0120 18:00:20.446042 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplane-ansible-ssh-private-key-secret" Jan 20 18:00:20 crc kubenswrapper[4558]: I0120 18:00:20.446184 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"edpm-compute-global-dockercfg-wkdxg" Jan 20 18:00:20 crc kubenswrapper[4558]: I0120 18:00:20.447604 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-aee-default-env" Jan 20 18:00:20 crc kubenswrapper[4558]: I0120 18:00:20.447815 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplanenodeset-edpm-compute-global" Jan 20 18:00:20 crc kubenswrapper[4558]: I0120 18:00:20.453975 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e7680571-76c8-40eb-8e59-fa68b86d77f7-inventory\") pod \"configure-os-edpm-compute-global-edpm-compute-global-dxdjf\" (UID: \"e7680571-76c8-40eb-8e59-fa68b86d77f7\") " pod="openstack-kuttl-tests/configure-os-edpm-compute-global-edpm-compute-global-dxdjf" Jan 20 18:00:20 crc kubenswrapper[4558]: I0120 18:00:20.454018 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/e7680571-76c8-40eb-8e59-fa68b86d77f7-ssh-key-edpm-compute-global\") pod \"configure-os-edpm-compute-global-edpm-compute-global-dxdjf\" (UID: \"e7680571-76c8-40eb-8e59-fa68b86d77f7\") " pod="openstack-kuttl-tests/configure-os-edpm-compute-global-edpm-compute-global-dxdjf" Jan 20 18:00:20 crc kubenswrapper[4558]: I0120 18:00:20.454115 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-btnnh\" (UniqueName: \"kubernetes.io/projected/e7680571-76c8-40eb-8e59-fa68b86d77f7-kube-api-access-btnnh\") pod \"configure-os-edpm-compute-global-edpm-compute-global-dxdjf\" (UID: \"e7680571-76c8-40eb-8e59-fa68b86d77f7\") " pod="openstack-kuttl-tests/configure-os-edpm-compute-global-edpm-compute-global-dxdjf" Jan 20 18:00:20 crc kubenswrapper[4558]: I0120 18:00:20.459357 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/configure-os-edpm-compute-global-edpm-compute-global-dxdjf"] Jan 20 18:00:20 crc kubenswrapper[4558]: I0120 18:00:20.555558 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/e7680571-76c8-40eb-8e59-fa68b86d77f7-ssh-key-edpm-compute-global\") pod \"configure-os-edpm-compute-global-edpm-compute-global-dxdjf\" (UID: \"e7680571-76c8-40eb-8e59-fa68b86d77f7\") " pod="openstack-kuttl-tests/configure-os-edpm-compute-global-edpm-compute-global-dxdjf" Jan 20 18:00:20 crc kubenswrapper[4558]: I0120 18:00:20.555789 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-btnnh\" (UniqueName: \"kubernetes.io/projected/e7680571-76c8-40eb-8e59-fa68b86d77f7-kube-api-access-btnnh\") pod \"configure-os-edpm-compute-global-edpm-compute-global-dxdjf\" (UID: \"e7680571-76c8-40eb-8e59-fa68b86d77f7\") " pod="openstack-kuttl-tests/configure-os-edpm-compute-global-edpm-compute-global-dxdjf" Jan 20 18:00:20 crc kubenswrapper[4558]: I0120 18:00:20.556123 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e7680571-76c8-40eb-8e59-fa68b86d77f7-inventory\") pod \"configure-os-edpm-compute-global-edpm-compute-global-dxdjf\" (UID: \"e7680571-76c8-40eb-8e59-fa68b86d77f7\") " pod="openstack-kuttl-tests/configure-os-edpm-compute-global-edpm-compute-global-dxdjf" Jan 20 18:00:20 crc kubenswrapper[4558]: I0120 18:00:20.560115 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e7680571-76c8-40eb-8e59-fa68b86d77f7-inventory\") pod \"configure-os-edpm-compute-global-edpm-compute-global-dxdjf\" (UID: \"e7680571-76c8-40eb-8e59-fa68b86d77f7\") " pod="openstack-kuttl-tests/configure-os-edpm-compute-global-edpm-compute-global-dxdjf" Jan 20 18:00:20 crc kubenswrapper[4558]: I0120 18:00:20.561228 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/e7680571-76c8-40eb-8e59-fa68b86d77f7-ssh-key-edpm-compute-global\") pod \"configure-os-edpm-compute-global-edpm-compute-global-dxdjf\" (UID: \"e7680571-76c8-40eb-8e59-fa68b86d77f7\") " pod="openstack-kuttl-tests/configure-os-edpm-compute-global-edpm-compute-global-dxdjf" Jan 20 18:00:20 crc kubenswrapper[4558]: I0120 18:00:20.570741 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-btnnh\" (UniqueName: \"kubernetes.io/projected/e7680571-76c8-40eb-8e59-fa68b86d77f7-kube-api-access-btnnh\") pod \"configure-os-edpm-compute-global-edpm-compute-global-dxdjf\" (UID: \"e7680571-76c8-40eb-8e59-fa68b86d77f7\") " pod="openstack-kuttl-tests/configure-os-edpm-compute-global-edpm-compute-global-dxdjf" Jan 20 18:00:20 crc kubenswrapper[4558]: I0120 18:00:20.768767 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/configure-os-edpm-compute-global-edpm-compute-global-dxdjf" Jan 20 18:00:21 crc kubenswrapper[4558]: I0120 18:00:21.153998 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/configure-os-edpm-compute-global-edpm-compute-global-dxdjf"] Jan 20 18:00:22 crc kubenswrapper[4558]: I0120 18:00:22.181972 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/configure-os-edpm-compute-global-edpm-compute-global-dxdjf" event={"ID":"e7680571-76c8-40eb-8e59-fa68b86d77f7","Type":"ContainerStarted","Data":"69238bf780e86d6b248987e5399cbc45bef008fb69249c8d3b1152aecbe4b847"} Jan 20 18:00:23 crc kubenswrapper[4558]: I0120 18:00:23.193733 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/configure-os-edpm-compute-global-edpm-compute-global-dxdjf" event={"ID":"e7680571-76c8-40eb-8e59-fa68b86d77f7","Type":"ContainerStarted","Data":"74a7d32f8723f55fc502de179d8dd728dfde29b93a7ef96158a0b8cd466e4b57"} Jan 20 18:00:23 crc kubenswrapper[4558]: I0120 18:00:23.215105 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/configure-os-edpm-compute-global-edpm-compute-global-dxdjf" podStartSLOduration=2.44163088 podStartE2EDuration="3.215087536s" podCreationTimestamp="2026-01-20 18:00:20 +0000 UTC" firstStartedPulling="2026-01-20 18:00:21.167011369 +0000 UTC m=+4714.927349336" lastFinishedPulling="2026-01-20 18:00:21.940468015 +0000 UTC m=+4715.700805992" observedRunningTime="2026-01-20 18:00:23.208925343 +0000 UTC m=+4716.969263310" watchObservedRunningTime="2026-01-20 18:00:23.215087536 +0000 UTC m=+4716.975425504" Jan 20 18:00:24 crc kubenswrapper[4558]: I0120 18:00:24.201122 4558 generic.go:334] "Generic (PLEG): container finished" podID="e7680571-76c8-40eb-8e59-fa68b86d77f7" containerID="74a7d32f8723f55fc502de179d8dd728dfde29b93a7ef96158a0b8cd466e4b57" exitCode=0 Jan 20 18:00:24 crc kubenswrapper[4558]: I0120 18:00:24.201159 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/configure-os-edpm-compute-global-edpm-compute-global-dxdjf" event={"ID":"e7680571-76c8-40eb-8e59-fa68b86d77f7","Type":"ContainerDied","Data":"74a7d32f8723f55fc502de179d8dd728dfde29b93a7ef96158a0b8cd466e4b57"} Jan 20 18:00:25 crc kubenswrapper[4558]: I0120 18:00:25.456660 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/configure-os-edpm-compute-global-edpm-compute-global-dxdjf" Jan 20 18:00:25 crc kubenswrapper[4558]: I0120 18:00:25.528139 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/e7680571-76c8-40eb-8e59-fa68b86d77f7-ssh-key-edpm-compute-global\") pod \"e7680571-76c8-40eb-8e59-fa68b86d77f7\" (UID: \"e7680571-76c8-40eb-8e59-fa68b86d77f7\") " Jan 20 18:00:25 crc kubenswrapper[4558]: I0120 18:00:25.528461 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-btnnh\" (UniqueName: \"kubernetes.io/projected/e7680571-76c8-40eb-8e59-fa68b86d77f7-kube-api-access-btnnh\") pod \"e7680571-76c8-40eb-8e59-fa68b86d77f7\" (UID: \"e7680571-76c8-40eb-8e59-fa68b86d77f7\") " Jan 20 18:00:25 crc kubenswrapper[4558]: I0120 18:00:25.528503 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e7680571-76c8-40eb-8e59-fa68b86d77f7-inventory\") pod \"e7680571-76c8-40eb-8e59-fa68b86d77f7\" (UID: \"e7680571-76c8-40eb-8e59-fa68b86d77f7\") " Jan 20 18:00:25 crc kubenswrapper[4558]: I0120 18:00:25.532985 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7680571-76c8-40eb-8e59-fa68b86d77f7-kube-api-access-btnnh" (OuterVolumeSpecName: "kube-api-access-btnnh") pod "e7680571-76c8-40eb-8e59-fa68b86d77f7" (UID: "e7680571-76c8-40eb-8e59-fa68b86d77f7"). InnerVolumeSpecName "kube-api-access-btnnh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:00:25 crc kubenswrapper[4558]: I0120 18:00:25.545285 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7680571-76c8-40eb-8e59-fa68b86d77f7-ssh-key-edpm-compute-global" (OuterVolumeSpecName: "ssh-key-edpm-compute-global") pod "e7680571-76c8-40eb-8e59-fa68b86d77f7" (UID: "e7680571-76c8-40eb-8e59-fa68b86d77f7"). InnerVolumeSpecName "ssh-key-edpm-compute-global". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:00:25 crc kubenswrapper[4558]: I0120 18:00:25.545915 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7680571-76c8-40eb-8e59-fa68b86d77f7-inventory" (OuterVolumeSpecName: "inventory") pod "e7680571-76c8-40eb-8e59-fa68b86d77f7" (UID: "e7680571-76c8-40eb-8e59-fa68b86d77f7"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:00:25 crc kubenswrapper[4558]: I0120 18:00:25.629085 4558 reconciler_common.go:293] "Volume detached for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/e7680571-76c8-40eb-8e59-fa68b86d77f7-ssh-key-edpm-compute-global\") on node \"crc\" DevicePath \"\"" Jan 20 18:00:25 crc kubenswrapper[4558]: I0120 18:00:25.629114 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-btnnh\" (UniqueName: \"kubernetes.io/projected/e7680571-76c8-40eb-8e59-fa68b86d77f7-kube-api-access-btnnh\") on node \"crc\" DevicePath \"\"" Jan 20 18:00:25 crc kubenswrapper[4558]: I0120 18:00:25.629130 4558 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e7680571-76c8-40eb-8e59-fa68b86d77f7-inventory\") on node \"crc\" DevicePath \"\"" Jan 20 18:00:26 crc kubenswrapper[4558]: I0120 18:00:26.217727 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/configure-os-edpm-compute-global-edpm-compute-global-dxdjf" event={"ID":"e7680571-76c8-40eb-8e59-fa68b86d77f7","Type":"ContainerDied","Data":"69238bf780e86d6b248987e5399cbc45bef008fb69249c8d3b1152aecbe4b847"} Jan 20 18:00:26 crc kubenswrapper[4558]: I0120 18:00:26.217770 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="69238bf780e86d6b248987e5399cbc45bef008fb69249c8d3b1152aecbe4b847" Jan 20 18:00:26 crc kubenswrapper[4558]: I0120 18:00:26.217786 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/configure-os-edpm-compute-global-edpm-compute-global-dxdjf" Jan 20 18:00:26 crc kubenswrapper[4558]: I0120 18:00:26.278671 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/run-os-edpm-compute-global-edpm-compute-global-vgfbg"] Jan 20 18:00:26 crc kubenswrapper[4558]: E0120 18:00:26.279356 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e7680571-76c8-40eb-8e59-fa68b86d77f7" containerName="configure-os-edpm-compute-global-edpm-compute-global" Jan 20 18:00:26 crc kubenswrapper[4558]: I0120 18:00:26.279379 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="e7680571-76c8-40eb-8e59-fa68b86d77f7" containerName="configure-os-edpm-compute-global-edpm-compute-global" Jan 20 18:00:26 crc kubenswrapper[4558]: I0120 18:00:26.279716 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="e7680571-76c8-40eb-8e59-fa68b86d77f7" containerName="configure-os-edpm-compute-global-edpm-compute-global" Jan 20 18:00:26 crc kubenswrapper[4558]: I0120 18:00:26.281665 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/run-os-edpm-compute-global-edpm-compute-global-vgfbg" Jan 20 18:00:26 crc kubenswrapper[4558]: I0120 18:00:26.283723 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"edpm-compute-global-dockercfg-wkdxg" Jan 20 18:00:26 crc kubenswrapper[4558]: I0120 18:00:26.283975 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-aee-default-env" Jan 20 18:00:26 crc kubenswrapper[4558]: I0120 18:00:26.284273 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplanenodeset-edpm-compute-global" Jan 20 18:00:26 crc kubenswrapper[4558]: I0120 18:00:26.286691 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplane-ansible-ssh-private-key-secret" Jan 20 18:00:26 crc kubenswrapper[4558]: I0120 18:00:26.293424 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/run-os-edpm-compute-global-edpm-compute-global-vgfbg"] Jan 20 18:00:26 crc kubenswrapper[4558]: I0120 18:00:26.442657 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bf27h\" (UniqueName: \"kubernetes.io/projected/2b196ecf-e0ae-4495-96df-ed2423adcc58-kube-api-access-bf27h\") pod \"run-os-edpm-compute-global-edpm-compute-global-vgfbg\" (UID: \"2b196ecf-e0ae-4495-96df-ed2423adcc58\") " pod="openstack-kuttl-tests/run-os-edpm-compute-global-edpm-compute-global-vgfbg" Jan 20 18:00:26 crc kubenswrapper[4558]: I0120 18:00:26.442757 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2b196ecf-e0ae-4495-96df-ed2423adcc58-inventory\") pod \"run-os-edpm-compute-global-edpm-compute-global-vgfbg\" (UID: \"2b196ecf-e0ae-4495-96df-ed2423adcc58\") " pod="openstack-kuttl-tests/run-os-edpm-compute-global-edpm-compute-global-vgfbg" Jan 20 18:00:26 crc kubenswrapper[4558]: I0120 18:00:26.442802 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/2b196ecf-e0ae-4495-96df-ed2423adcc58-ssh-key-edpm-compute-global\") pod \"run-os-edpm-compute-global-edpm-compute-global-vgfbg\" (UID: \"2b196ecf-e0ae-4495-96df-ed2423adcc58\") " pod="openstack-kuttl-tests/run-os-edpm-compute-global-edpm-compute-global-vgfbg" Jan 20 18:00:26 crc kubenswrapper[4558]: I0120 18:00:26.543852 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bf27h\" (UniqueName: \"kubernetes.io/projected/2b196ecf-e0ae-4495-96df-ed2423adcc58-kube-api-access-bf27h\") pod \"run-os-edpm-compute-global-edpm-compute-global-vgfbg\" (UID: \"2b196ecf-e0ae-4495-96df-ed2423adcc58\") " pod="openstack-kuttl-tests/run-os-edpm-compute-global-edpm-compute-global-vgfbg" Jan 20 18:00:26 crc kubenswrapper[4558]: I0120 18:00:26.543951 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2b196ecf-e0ae-4495-96df-ed2423adcc58-inventory\") pod \"run-os-edpm-compute-global-edpm-compute-global-vgfbg\" (UID: \"2b196ecf-e0ae-4495-96df-ed2423adcc58\") " pod="openstack-kuttl-tests/run-os-edpm-compute-global-edpm-compute-global-vgfbg" Jan 20 18:00:26 crc kubenswrapper[4558]: I0120 18:00:26.544006 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/2b196ecf-e0ae-4495-96df-ed2423adcc58-ssh-key-edpm-compute-global\") pod \"run-os-edpm-compute-global-edpm-compute-global-vgfbg\" (UID: \"2b196ecf-e0ae-4495-96df-ed2423adcc58\") " pod="openstack-kuttl-tests/run-os-edpm-compute-global-edpm-compute-global-vgfbg" Jan 20 18:00:26 crc kubenswrapper[4558]: I0120 18:00:26.548833 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/2b196ecf-e0ae-4495-96df-ed2423adcc58-ssh-key-edpm-compute-global\") pod \"run-os-edpm-compute-global-edpm-compute-global-vgfbg\" (UID: \"2b196ecf-e0ae-4495-96df-ed2423adcc58\") " pod="openstack-kuttl-tests/run-os-edpm-compute-global-edpm-compute-global-vgfbg" Jan 20 18:00:26 crc kubenswrapper[4558]: I0120 18:00:26.550270 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2b196ecf-e0ae-4495-96df-ed2423adcc58-inventory\") pod \"run-os-edpm-compute-global-edpm-compute-global-vgfbg\" (UID: \"2b196ecf-e0ae-4495-96df-ed2423adcc58\") " pod="openstack-kuttl-tests/run-os-edpm-compute-global-edpm-compute-global-vgfbg" Jan 20 18:00:26 crc kubenswrapper[4558]: I0120 18:00:26.558406 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bf27h\" (UniqueName: \"kubernetes.io/projected/2b196ecf-e0ae-4495-96df-ed2423adcc58-kube-api-access-bf27h\") pod \"run-os-edpm-compute-global-edpm-compute-global-vgfbg\" (UID: \"2b196ecf-e0ae-4495-96df-ed2423adcc58\") " pod="openstack-kuttl-tests/run-os-edpm-compute-global-edpm-compute-global-vgfbg" Jan 20 18:00:26 crc kubenswrapper[4558]: I0120 18:00:26.608486 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/run-os-edpm-compute-global-edpm-compute-global-vgfbg" Jan 20 18:00:27 crc kubenswrapper[4558]: I0120 18:00:27.010068 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/run-os-edpm-compute-global-edpm-compute-global-vgfbg"] Jan 20 18:00:27 crc kubenswrapper[4558]: I0120 18:00:27.229594 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/run-os-edpm-compute-global-edpm-compute-global-vgfbg" event={"ID":"2b196ecf-e0ae-4495-96df-ed2423adcc58","Type":"ContainerStarted","Data":"576bec5a05558dcf5ce599dce73374de59458bd5eaef278413ae9b906dc4364e"} Jan 20 18:00:27 crc kubenswrapper[4558]: I0120 18:00:27.330148 4558 patch_prober.go:28] interesting pod/machine-config-daemon-2vr4r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 20 18:00:27 crc kubenswrapper[4558]: I0120 18:00:27.330548 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 20 18:00:27 crc kubenswrapper[4558]: I0120 18:00:27.330611 4558 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" Jan 20 18:00:27 crc kubenswrapper[4558]: I0120 18:00:27.331359 4558 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"06773f88c6859f6cd4428c9c5cf74a790c0f5de8a90ecadbaf3e2b35d66f98a5"} pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 20 18:00:27 crc kubenswrapper[4558]: I0120 18:00:27.331433 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" containerID="cri-o://06773f88c6859f6cd4428c9c5cf74a790c0f5de8a90ecadbaf3e2b35d66f98a5" gracePeriod=600 Jan 20 18:00:28 crc kubenswrapper[4558]: I0120 18:00:28.244717 4558 generic.go:334] "Generic (PLEG): container finished" podID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerID="06773f88c6859f6cd4428c9c5cf74a790c0f5de8a90ecadbaf3e2b35d66f98a5" exitCode=0 Jan 20 18:00:28 crc kubenswrapper[4558]: I0120 18:00:28.244792 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" event={"ID":"68337d27-3fa6-4a29-88b0-82e60c3739eb","Type":"ContainerDied","Data":"06773f88c6859f6cd4428c9c5cf74a790c0f5de8a90ecadbaf3e2b35d66f98a5"} Jan 20 18:00:28 crc kubenswrapper[4558]: I0120 18:00:28.245144 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" event={"ID":"68337d27-3fa6-4a29-88b0-82e60c3739eb","Type":"ContainerStarted","Data":"6b02d31ec11fe5435af988e35303a6f66b5afa5ef952ce5f7cc4b3cd5f4d9497"} Jan 20 18:00:28 crc kubenswrapper[4558]: I0120 18:00:28.245275 4558 scope.go:117] "RemoveContainer" containerID="4ea7fa61d8b21007a044345acec89fcebe56c2921cf0f76ff2002f44bdc88ede" Jan 20 18:00:28 crc kubenswrapper[4558]: I0120 18:00:28.250839 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/run-os-edpm-compute-global-edpm-compute-global-vgfbg" event={"ID":"2b196ecf-e0ae-4495-96df-ed2423adcc58","Type":"ContainerStarted","Data":"bf1073626acf9931e1253c7cee3519239421ae4bd985a32c838e184014ba91cd"} Jan 20 18:00:28 crc kubenswrapper[4558]: I0120 18:00:28.286964 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/run-os-edpm-compute-global-edpm-compute-global-vgfbg" podStartSLOduration=1.770701119 podStartE2EDuration="2.286940594s" podCreationTimestamp="2026-01-20 18:00:26 +0000 UTC" firstStartedPulling="2026-01-20 18:00:27.020334696 +0000 UTC m=+4720.780672662" lastFinishedPulling="2026-01-20 18:00:27.536574171 +0000 UTC m=+4721.296912137" observedRunningTime="2026-01-20 18:00:28.283819696 +0000 UTC m=+4722.044157664" watchObservedRunningTime="2026-01-20 18:00:28.286940594 +0000 UTC m=+4722.047278561" Jan 20 18:00:29 crc kubenswrapper[4558]: I0120 18:00:29.267179 4558 generic.go:334] "Generic (PLEG): container finished" podID="2b196ecf-e0ae-4495-96df-ed2423adcc58" containerID="bf1073626acf9931e1253c7cee3519239421ae4bd985a32c838e184014ba91cd" exitCode=0 Jan 20 18:00:29 crc kubenswrapper[4558]: I0120 18:00:29.267459 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/run-os-edpm-compute-global-edpm-compute-global-vgfbg" event={"ID":"2b196ecf-e0ae-4495-96df-ed2423adcc58","Type":"ContainerDied","Data":"bf1073626acf9931e1253c7cee3519239421ae4bd985a32c838e184014ba91cd"} Jan 20 18:00:29 crc kubenswrapper[4558]: I0120 18:00:29.412604 4558 scope.go:117] "RemoveContainer" containerID="d1d7d13f1610163a6123d101b3e473bb3892de0996d741588af150949c4555f7" Jan 20 18:00:29 crc kubenswrapper[4558]: I0120 18:00:29.436806 4558 scope.go:117] "RemoveContainer" containerID="5c9f046a891a29b5a5cbad4772933fbe8340e6d505155496bf492f03afe1f344" Jan 20 18:00:29 crc kubenswrapper[4558]: I0120 18:00:29.465784 4558 scope.go:117] "RemoveContainer" containerID="e08ded0dee2ee278a2c6a546bbe6145915173951c4cab2b15c0dd0b32eee2d30" Jan 20 18:00:29 crc kubenswrapper[4558]: I0120 18:00:29.487431 4558 scope.go:117] "RemoveContainer" containerID="190e0e153cb7f3ebee2da00cdb98b714cac30a72af17afd576aa0adf5db3eec6" Jan 20 18:00:29 crc kubenswrapper[4558]: I0120 18:00:29.508394 4558 scope.go:117] "RemoveContainer" containerID="4ab8c441eeeb6b4f69bfa355c2d0780124143e8277f4f2d7413b6e677705320c" Jan 20 18:00:29 crc kubenswrapper[4558]: I0120 18:00:29.530091 4558 scope.go:117] "RemoveContainer" containerID="415bf18726a76e61bd79b39ff76bbb5fe83ed4680a0f55bfbff9af2eed5f49a4" Jan 20 18:00:29 crc kubenswrapper[4558]: I0120 18:00:29.546666 4558 scope.go:117] "RemoveContainer" containerID="c7bc5822daf31c31196384db3c359c6efebf789e0fc60bebb6e38c4e4db45622" Jan 20 18:00:29 crc kubenswrapper[4558]: I0120 18:00:29.571778 4558 scope.go:117] "RemoveContainer" containerID="9fcb6151d92eb87c7938a28ca1addce7759bcc22899dd7f45adfd46c5466f653" Jan 20 18:00:31 crc kubenswrapper[4558]: I0120 18:00:31.021694 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/run-os-edpm-compute-global-edpm-compute-global-vgfbg" Jan 20 18:00:31 crc kubenswrapper[4558]: I0120 18:00:31.117997 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf27h\" (UniqueName: \"kubernetes.io/projected/2b196ecf-e0ae-4495-96df-ed2423adcc58-kube-api-access-bf27h\") pod \"2b196ecf-e0ae-4495-96df-ed2423adcc58\" (UID: \"2b196ecf-e0ae-4495-96df-ed2423adcc58\") " Jan 20 18:00:31 crc kubenswrapper[4558]: I0120 18:00:31.118071 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2b196ecf-e0ae-4495-96df-ed2423adcc58-inventory\") pod \"2b196ecf-e0ae-4495-96df-ed2423adcc58\" (UID: \"2b196ecf-e0ae-4495-96df-ed2423adcc58\") " Jan 20 18:00:31 crc kubenswrapper[4558]: I0120 18:00:31.118246 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/2b196ecf-e0ae-4495-96df-ed2423adcc58-ssh-key-edpm-compute-global\") pod \"2b196ecf-e0ae-4495-96df-ed2423adcc58\" (UID: \"2b196ecf-e0ae-4495-96df-ed2423adcc58\") " Jan 20 18:00:31 crc kubenswrapper[4558]: I0120 18:00:31.122998 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2b196ecf-e0ae-4495-96df-ed2423adcc58-kube-api-access-bf27h" (OuterVolumeSpecName: "kube-api-access-bf27h") pod "2b196ecf-e0ae-4495-96df-ed2423adcc58" (UID: "2b196ecf-e0ae-4495-96df-ed2423adcc58"). InnerVolumeSpecName "kube-api-access-bf27h". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:00:31 crc kubenswrapper[4558]: I0120 18:00:31.136755 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2b196ecf-e0ae-4495-96df-ed2423adcc58-ssh-key-edpm-compute-global" (OuterVolumeSpecName: "ssh-key-edpm-compute-global") pod "2b196ecf-e0ae-4495-96df-ed2423adcc58" (UID: "2b196ecf-e0ae-4495-96df-ed2423adcc58"). InnerVolumeSpecName "ssh-key-edpm-compute-global". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:00:31 crc kubenswrapper[4558]: I0120 18:00:31.137142 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2b196ecf-e0ae-4495-96df-ed2423adcc58-inventory" (OuterVolumeSpecName: "inventory") pod "2b196ecf-e0ae-4495-96df-ed2423adcc58" (UID: "2b196ecf-e0ae-4495-96df-ed2423adcc58"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:00:31 crc kubenswrapper[4558]: I0120 18:00:31.220455 4558 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2b196ecf-e0ae-4495-96df-ed2423adcc58-inventory\") on node \"crc\" DevicePath \"\"" Jan 20 18:00:31 crc kubenswrapper[4558]: I0120 18:00:31.220784 4558 reconciler_common.go:293] "Volume detached for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/2b196ecf-e0ae-4495-96df-ed2423adcc58-ssh-key-edpm-compute-global\") on node \"crc\" DevicePath \"\"" Jan 20 18:00:31 crc kubenswrapper[4558]: I0120 18:00:31.220800 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf27h\" (UniqueName: \"kubernetes.io/projected/2b196ecf-e0ae-4495-96df-ed2423adcc58-kube-api-access-bf27h\") on node \"crc\" DevicePath \"\"" Jan 20 18:00:31 crc kubenswrapper[4558]: I0120 18:00:31.290342 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/run-os-edpm-compute-global-edpm-compute-global-vgfbg" event={"ID":"2b196ecf-e0ae-4495-96df-ed2423adcc58","Type":"ContainerDied","Data":"576bec5a05558dcf5ce599dce73374de59458bd5eaef278413ae9b906dc4364e"} Jan 20 18:00:31 crc kubenswrapper[4558]: I0120 18:00:31.290388 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/run-os-edpm-compute-global-edpm-compute-global-vgfbg" Jan 20 18:00:31 crc kubenswrapper[4558]: I0120 18:00:31.290391 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="576bec5a05558dcf5ce599dce73374de59458bd5eaef278413ae9b906dc4364e" Jan 20 18:00:31 crc kubenswrapper[4558]: I0120 18:00:31.346628 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/install-certs-edpm-compute-global-edpm-compute-global-xl8pq"] Jan 20 18:00:31 crc kubenswrapper[4558]: E0120 18:00:31.346989 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2b196ecf-e0ae-4495-96df-ed2423adcc58" containerName="run-os-edpm-compute-global-edpm-compute-global" Jan 20 18:00:31 crc kubenswrapper[4558]: I0120 18:00:31.347010 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="2b196ecf-e0ae-4495-96df-ed2423adcc58" containerName="run-os-edpm-compute-global-edpm-compute-global" Jan 20 18:00:31 crc kubenswrapper[4558]: I0120 18:00:31.347218 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="2b196ecf-e0ae-4495-96df-ed2423adcc58" containerName="run-os-edpm-compute-global-edpm-compute-global" Jan 20 18:00:31 crc kubenswrapper[4558]: I0120 18:00:31.347760 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/install-certs-edpm-compute-global-edpm-compute-global-xl8pq" Jan 20 18:00:31 crc kubenswrapper[4558]: I0120 18:00:31.349917 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"combined-ca-bundle" Jan 20 18:00:31 crc kubenswrapper[4558]: I0120 18:00:31.350076 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"edpm-compute-global-dockercfg-wkdxg" Jan 20 18:00:31 crc kubenswrapper[4558]: I0120 18:00:31.350887 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-aee-default-env" Jan 20 18:00:31 crc kubenswrapper[4558]: I0120 18:00:31.350896 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplane-ansible-ssh-private-key-secret" Jan 20 18:00:31 crc kubenswrapper[4558]: I0120 18:00:31.351000 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplanenodeset-edpm-compute-global" Jan 20 18:00:31 crc kubenswrapper[4558]: I0120 18:00:31.354678 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/install-certs-edpm-compute-global-edpm-compute-global-xl8pq"] Jan 20 18:00:31 crc kubenswrapper[4558]: I0120 18:00:31.423255 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"custom-global-service-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/71e1f655-faf6-4b2e-b4ec-506c6b761638-custom-global-service-combined-ca-bundle\") pod \"install-certs-edpm-compute-global-edpm-compute-global-xl8pq\" (UID: \"71e1f655-faf6-4b2e-b4ec-506c6b761638\") " pod="openstack-kuttl-tests/install-certs-edpm-compute-global-edpm-compute-global-xl8pq" Jan 20 18:00:31 crc kubenswrapper[4558]: I0120 18:00:31.423329 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/71e1f655-faf6-4b2e-b4ec-506c6b761638-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-compute-global-edpm-compute-global-xl8pq\" (UID: \"71e1f655-faf6-4b2e-b4ec-506c6b761638\") " pod="openstack-kuttl-tests/install-certs-edpm-compute-global-edpm-compute-global-xl8pq" Jan 20 18:00:31 crc kubenswrapper[4558]: I0120 18:00:31.423368 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b2jql\" (UniqueName: \"kubernetes.io/projected/71e1f655-faf6-4b2e-b4ec-506c6b761638-kube-api-access-b2jql\") pod \"install-certs-edpm-compute-global-edpm-compute-global-xl8pq\" (UID: \"71e1f655-faf6-4b2e-b4ec-506c6b761638\") " pod="openstack-kuttl-tests/install-certs-edpm-compute-global-edpm-compute-global-xl8pq" Jan 20 18:00:31 crc kubenswrapper[4558]: I0120 18:00:31.423416 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/71e1f655-faf6-4b2e-b4ec-506c6b761638-inventory\") pod \"install-certs-edpm-compute-global-edpm-compute-global-xl8pq\" (UID: \"71e1f655-faf6-4b2e-b4ec-506c6b761638\") " pod="openstack-kuttl-tests/install-certs-edpm-compute-global-edpm-compute-global-xl8pq" Jan 20 18:00:31 crc kubenswrapper[4558]: I0120 18:00:31.423477 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/71e1f655-faf6-4b2e-b4ec-506c6b761638-ssh-key-edpm-compute-global\") pod \"install-certs-edpm-compute-global-edpm-compute-global-xl8pq\" (UID: \"71e1f655-faf6-4b2e-b4ec-506c6b761638\") " pod="openstack-kuttl-tests/install-certs-edpm-compute-global-edpm-compute-global-xl8pq" Jan 20 18:00:31 crc kubenswrapper[4558]: I0120 18:00:31.423542 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/71e1f655-faf6-4b2e-b4ec-506c6b761638-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-compute-global-edpm-compute-global-xl8pq\" (UID: \"71e1f655-faf6-4b2e-b4ec-506c6b761638\") " pod="openstack-kuttl-tests/install-certs-edpm-compute-global-edpm-compute-global-xl8pq" Jan 20 18:00:31 crc kubenswrapper[4558]: I0120 18:00:31.423576 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/71e1f655-faf6-4b2e-b4ec-506c6b761638-neutron-dhcp-combined-ca-bundle\") pod \"install-certs-edpm-compute-global-edpm-compute-global-xl8pq\" (UID: \"71e1f655-faf6-4b2e-b4ec-506c6b761638\") " pod="openstack-kuttl-tests/install-certs-edpm-compute-global-edpm-compute-global-xl8pq" Jan 20 18:00:31 crc kubenswrapper[4558]: I0120 18:00:31.423638 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/71e1f655-faf6-4b2e-b4ec-506c6b761638-ovn-combined-ca-bundle\") pod \"install-certs-edpm-compute-global-edpm-compute-global-xl8pq\" (UID: \"71e1f655-faf6-4b2e-b4ec-506c6b761638\") " pod="openstack-kuttl-tests/install-certs-edpm-compute-global-edpm-compute-global-xl8pq" Jan 20 18:00:31 crc kubenswrapper[4558]: I0120 18:00:31.423655 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/71e1f655-faf6-4b2e-b4ec-506c6b761638-nova-combined-ca-bundle\") pod \"install-certs-edpm-compute-global-edpm-compute-global-xl8pq\" (UID: \"71e1f655-faf6-4b2e-b4ec-506c6b761638\") " pod="openstack-kuttl-tests/install-certs-edpm-compute-global-edpm-compute-global-xl8pq" Jan 20 18:00:31 crc kubenswrapper[4558]: I0120 18:00:31.423724 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/71e1f655-faf6-4b2e-b4ec-506c6b761638-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-compute-global-edpm-compute-global-xl8pq\" (UID: \"71e1f655-faf6-4b2e-b4ec-506c6b761638\") " pod="openstack-kuttl-tests/install-certs-edpm-compute-global-edpm-compute-global-xl8pq" Jan 20 18:00:31 crc kubenswrapper[4558]: I0120 18:00:31.423756 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/71e1f655-faf6-4b2e-b4ec-506c6b761638-neutron-ovn-combined-ca-bundle\") pod \"install-certs-edpm-compute-global-edpm-compute-global-xl8pq\" (UID: \"71e1f655-faf6-4b2e-b4ec-506c6b761638\") " pod="openstack-kuttl-tests/install-certs-edpm-compute-global-edpm-compute-global-xl8pq" Jan 20 18:00:31 crc kubenswrapper[4558]: I0120 18:00:31.423782 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/71e1f655-faf6-4b2e-b4ec-506c6b761638-neutron-sriov-combined-ca-bundle\") pod \"install-certs-edpm-compute-global-edpm-compute-global-xl8pq\" (UID: \"71e1f655-faf6-4b2e-b4ec-506c6b761638\") " pod="openstack-kuttl-tests/install-certs-edpm-compute-global-edpm-compute-global-xl8pq" Jan 20 18:00:31 crc kubenswrapper[4558]: I0120 18:00:31.525411 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/71e1f655-faf6-4b2e-b4ec-506c6b761638-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-compute-global-edpm-compute-global-xl8pq\" (UID: \"71e1f655-faf6-4b2e-b4ec-506c6b761638\") " pod="openstack-kuttl-tests/install-certs-edpm-compute-global-edpm-compute-global-xl8pq" Jan 20 18:00:31 crc kubenswrapper[4558]: I0120 18:00:31.525469 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/71e1f655-faf6-4b2e-b4ec-506c6b761638-neutron-ovn-combined-ca-bundle\") pod \"install-certs-edpm-compute-global-edpm-compute-global-xl8pq\" (UID: \"71e1f655-faf6-4b2e-b4ec-506c6b761638\") " pod="openstack-kuttl-tests/install-certs-edpm-compute-global-edpm-compute-global-xl8pq" Jan 20 18:00:31 crc kubenswrapper[4558]: I0120 18:00:31.525506 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/71e1f655-faf6-4b2e-b4ec-506c6b761638-neutron-sriov-combined-ca-bundle\") pod \"install-certs-edpm-compute-global-edpm-compute-global-xl8pq\" (UID: \"71e1f655-faf6-4b2e-b4ec-506c6b761638\") " pod="openstack-kuttl-tests/install-certs-edpm-compute-global-edpm-compute-global-xl8pq" Jan 20 18:00:31 crc kubenswrapper[4558]: I0120 18:00:31.525592 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"custom-global-service-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/71e1f655-faf6-4b2e-b4ec-506c6b761638-custom-global-service-combined-ca-bundle\") pod \"install-certs-edpm-compute-global-edpm-compute-global-xl8pq\" (UID: \"71e1f655-faf6-4b2e-b4ec-506c6b761638\") " pod="openstack-kuttl-tests/install-certs-edpm-compute-global-edpm-compute-global-xl8pq" Jan 20 18:00:31 crc kubenswrapper[4558]: I0120 18:00:31.525620 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/71e1f655-faf6-4b2e-b4ec-506c6b761638-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-compute-global-edpm-compute-global-xl8pq\" (UID: \"71e1f655-faf6-4b2e-b4ec-506c6b761638\") " pod="openstack-kuttl-tests/install-certs-edpm-compute-global-edpm-compute-global-xl8pq" Jan 20 18:00:31 crc kubenswrapper[4558]: I0120 18:00:31.525644 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b2jql\" (UniqueName: \"kubernetes.io/projected/71e1f655-faf6-4b2e-b4ec-506c6b761638-kube-api-access-b2jql\") pod \"install-certs-edpm-compute-global-edpm-compute-global-xl8pq\" (UID: \"71e1f655-faf6-4b2e-b4ec-506c6b761638\") " pod="openstack-kuttl-tests/install-certs-edpm-compute-global-edpm-compute-global-xl8pq" Jan 20 18:00:31 crc kubenswrapper[4558]: I0120 18:00:31.525686 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/71e1f655-faf6-4b2e-b4ec-506c6b761638-inventory\") pod \"install-certs-edpm-compute-global-edpm-compute-global-xl8pq\" (UID: \"71e1f655-faf6-4b2e-b4ec-506c6b761638\") " pod="openstack-kuttl-tests/install-certs-edpm-compute-global-edpm-compute-global-xl8pq" Jan 20 18:00:31 crc kubenswrapper[4558]: I0120 18:00:31.525732 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/71e1f655-faf6-4b2e-b4ec-506c6b761638-ssh-key-edpm-compute-global\") pod \"install-certs-edpm-compute-global-edpm-compute-global-xl8pq\" (UID: \"71e1f655-faf6-4b2e-b4ec-506c6b761638\") " pod="openstack-kuttl-tests/install-certs-edpm-compute-global-edpm-compute-global-xl8pq" Jan 20 18:00:31 crc kubenswrapper[4558]: I0120 18:00:31.525784 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/71e1f655-faf6-4b2e-b4ec-506c6b761638-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-compute-global-edpm-compute-global-xl8pq\" (UID: \"71e1f655-faf6-4b2e-b4ec-506c6b761638\") " pod="openstack-kuttl-tests/install-certs-edpm-compute-global-edpm-compute-global-xl8pq" Jan 20 18:00:31 crc kubenswrapper[4558]: I0120 18:00:31.525813 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/71e1f655-faf6-4b2e-b4ec-506c6b761638-neutron-dhcp-combined-ca-bundle\") pod \"install-certs-edpm-compute-global-edpm-compute-global-xl8pq\" (UID: \"71e1f655-faf6-4b2e-b4ec-506c6b761638\") " pod="openstack-kuttl-tests/install-certs-edpm-compute-global-edpm-compute-global-xl8pq" Jan 20 18:00:31 crc kubenswrapper[4558]: I0120 18:00:31.525859 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/71e1f655-faf6-4b2e-b4ec-506c6b761638-ovn-combined-ca-bundle\") pod \"install-certs-edpm-compute-global-edpm-compute-global-xl8pq\" (UID: \"71e1f655-faf6-4b2e-b4ec-506c6b761638\") " pod="openstack-kuttl-tests/install-certs-edpm-compute-global-edpm-compute-global-xl8pq" Jan 20 18:00:31 crc kubenswrapper[4558]: I0120 18:00:31.525883 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/71e1f655-faf6-4b2e-b4ec-506c6b761638-nova-combined-ca-bundle\") pod \"install-certs-edpm-compute-global-edpm-compute-global-xl8pq\" (UID: \"71e1f655-faf6-4b2e-b4ec-506c6b761638\") " pod="openstack-kuttl-tests/install-certs-edpm-compute-global-edpm-compute-global-xl8pq" Jan 20 18:00:31 crc kubenswrapper[4558]: I0120 18:00:31.531267 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/71e1f655-faf6-4b2e-b4ec-506c6b761638-ssh-key-edpm-compute-global\") pod \"install-certs-edpm-compute-global-edpm-compute-global-xl8pq\" (UID: \"71e1f655-faf6-4b2e-b4ec-506c6b761638\") " pod="openstack-kuttl-tests/install-certs-edpm-compute-global-edpm-compute-global-xl8pq" Jan 20 18:00:31 crc kubenswrapper[4558]: I0120 18:00:31.531424 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/71e1f655-faf6-4b2e-b4ec-506c6b761638-nova-combined-ca-bundle\") pod \"install-certs-edpm-compute-global-edpm-compute-global-xl8pq\" (UID: \"71e1f655-faf6-4b2e-b4ec-506c6b761638\") " pod="openstack-kuttl-tests/install-certs-edpm-compute-global-edpm-compute-global-xl8pq" Jan 20 18:00:31 crc kubenswrapper[4558]: I0120 18:00:31.532056 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/71e1f655-faf6-4b2e-b4ec-506c6b761638-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-compute-global-edpm-compute-global-xl8pq\" (UID: \"71e1f655-faf6-4b2e-b4ec-506c6b761638\") " pod="openstack-kuttl-tests/install-certs-edpm-compute-global-edpm-compute-global-xl8pq" Jan 20 18:00:31 crc kubenswrapper[4558]: I0120 18:00:31.532299 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/71e1f655-faf6-4b2e-b4ec-506c6b761638-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-compute-global-edpm-compute-global-xl8pq\" (UID: \"71e1f655-faf6-4b2e-b4ec-506c6b761638\") " pod="openstack-kuttl-tests/install-certs-edpm-compute-global-edpm-compute-global-xl8pq" Jan 20 18:00:31 crc kubenswrapper[4558]: I0120 18:00:31.532367 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/71e1f655-faf6-4b2e-b4ec-506c6b761638-inventory\") pod \"install-certs-edpm-compute-global-edpm-compute-global-xl8pq\" (UID: \"71e1f655-faf6-4b2e-b4ec-506c6b761638\") " pod="openstack-kuttl-tests/install-certs-edpm-compute-global-edpm-compute-global-xl8pq" Jan 20 18:00:31 crc kubenswrapper[4558]: I0120 18:00:31.532488 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"custom-global-service-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/71e1f655-faf6-4b2e-b4ec-506c6b761638-custom-global-service-combined-ca-bundle\") pod \"install-certs-edpm-compute-global-edpm-compute-global-xl8pq\" (UID: \"71e1f655-faf6-4b2e-b4ec-506c6b761638\") " pod="openstack-kuttl-tests/install-certs-edpm-compute-global-edpm-compute-global-xl8pq" Jan 20 18:00:31 crc kubenswrapper[4558]: I0120 18:00:31.532709 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/71e1f655-faf6-4b2e-b4ec-506c6b761638-neutron-dhcp-combined-ca-bundle\") pod \"install-certs-edpm-compute-global-edpm-compute-global-xl8pq\" (UID: \"71e1f655-faf6-4b2e-b4ec-506c6b761638\") " pod="openstack-kuttl-tests/install-certs-edpm-compute-global-edpm-compute-global-xl8pq" Jan 20 18:00:31 crc kubenswrapper[4558]: I0120 18:00:31.532856 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/71e1f655-faf6-4b2e-b4ec-506c6b761638-neutron-ovn-combined-ca-bundle\") pod \"install-certs-edpm-compute-global-edpm-compute-global-xl8pq\" (UID: \"71e1f655-faf6-4b2e-b4ec-506c6b761638\") " pod="openstack-kuttl-tests/install-certs-edpm-compute-global-edpm-compute-global-xl8pq" Jan 20 18:00:31 crc kubenswrapper[4558]: I0120 18:00:31.532944 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/71e1f655-faf6-4b2e-b4ec-506c6b761638-ovn-combined-ca-bundle\") pod \"install-certs-edpm-compute-global-edpm-compute-global-xl8pq\" (UID: \"71e1f655-faf6-4b2e-b4ec-506c6b761638\") " pod="openstack-kuttl-tests/install-certs-edpm-compute-global-edpm-compute-global-xl8pq" Jan 20 18:00:31 crc kubenswrapper[4558]: I0120 18:00:31.534048 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/71e1f655-faf6-4b2e-b4ec-506c6b761638-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-compute-global-edpm-compute-global-xl8pq\" (UID: \"71e1f655-faf6-4b2e-b4ec-506c6b761638\") " pod="openstack-kuttl-tests/install-certs-edpm-compute-global-edpm-compute-global-xl8pq" Jan 20 18:00:31 crc kubenswrapper[4558]: I0120 18:00:31.537794 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/71e1f655-faf6-4b2e-b4ec-506c6b761638-neutron-sriov-combined-ca-bundle\") pod \"install-certs-edpm-compute-global-edpm-compute-global-xl8pq\" (UID: \"71e1f655-faf6-4b2e-b4ec-506c6b761638\") " pod="openstack-kuttl-tests/install-certs-edpm-compute-global-edpm-compute-global-xl8pq" Jan 20 18:00:31 crc kubenswrapper[4558]: I0120 18:00:31.540821 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b2jql\" (UniqueName: \"kubernetes.io/projected/71e1f655-faf6-4b2e-b4ec-506c6b761638-kube-api-access-b2jql\") pod \"install-certs-edpm-compute-global-edpm-compute-global-xl8pq\" (UID: \"71e1f655-faf6-4b2e-b4ec-506c6b761638\") " pod="openstack-kuttl-tests/install-certs-edpm-compute-global-edpm-compute-global-xl8pq" Jan 20 18:00:31 crc kubenswrapper[4558]: I0120 18:00:31.661657 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/install-certs-edpm-compute-global-edpm-compute-global-xl8pq" Jan 20 18:00:32 crc kubenswrapper[4558]: I0120 18:00:32.047033 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/install-certs-edpm-compute-global-edpm-compute-global-xl8pq"] Jan 20 18:00:32 crc kubenswrapper[4558]: W0120 18:00:32.199966 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod71e1f655_faf6_4b2e_b4ec_506c6b761638.slice/crio-ce633703657f172646158d9da594bbe2808e265c550fd4bae90c05a27ba28c58 WatchSource:0}: Error finding container ce633703657f172646158d9da594bbe2808e265c550fd4bae90c05a27ba28c58: Status 404 returned error can't find the container with id ce633703657f172646158d9da594bbe2808e265c550fd4bae90c05a27ba28c58 Jan 20 18:00:32 crc kubenswrapper[4558]: I0120 18:00:32.299999 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/install-certs-edpm-compute-global-edpm-compute-global-xl8pq" event={"ID":"71e1f655-faf6-4b2e-b4ec-506c6b761638","Type":"ContainerStarted","Data":"ce633703657f172646158d9da594bbe2808e265c550fd4bae90c05a27ba28c58"} Jan 20 18:00:33 crc kubenswrapper[4558]: I0120 18:00:33.309006 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/install-certs-edpm-compute-global-edpm-compute-global-xl8pq" event={"ID":"71e1f655-faf6-4b2e-b4ec-506c6b761638","Type":"ContainerStarted","Data":"b982130ff786a10b03fd153dfb0a401619acd1328bd5a3ffcd2bf61c96ab54ca"} Jan 20 18:00:34 crc kubenswrapper[4558]: I0120 18:00:34.321272 4558 generic.go:334] "Generic (PLEG): container finished" podID="71e1f655-faf6-4b2e-b4ec-506c6b761638" containerID="b982130ff786a10b03fd153dfb0a401619acd1328bd5a3ffcd2bf61c96ab54ca" exitCode=0 Jan 20 18:00:34 crc kubenswrapper[4558]: I0120 18:00:34.321324 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/install-certs-edpm-compute-global-edpm-compute-global-xl8pq" event={"ID":"71e1f655-faf6-4b2e-b4ec-506c6b761638","Type":"ContainerDied","Data":"b982130ff786a10b03fd153dfb0a401619acd1328bd5a3ffcd2bf61c96ab54ca"} Jan 20 18:00:35 crc kubenswrapper[4558]: I0120 18:00:35.567680 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/install-certs-edpm-compute-global-edpm-compute-global-xl8pq" Jan 20 18:00:35 crc kubenswrapper[4558]: I0120 18:00:35.606718 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/71e1f655-faf6-4b2e-b4ec-506c6b761638-ssh-key-edpm-compute-global\") pod \"71e1f655-faf6-4b2e-b4ec-506c6b761638\" (UID: \"71e1f655-faf6-4b2e-b4ec-506c6b761638\") " Jan 20 18:00:35 crc kubenswrapper[4558]: I0120 18:00:35.606774 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/71e1f655-faf6-4b2e-b4ec-506c6b761638-neutron-ovn-combined-ca-bundle\") pod \"71e1f655-faf6-4b2e-b4ec-506c6b761638\" (UID: \"71e1f655-faf6-4b2e-b4ec-506c6b761638\") " Jan 20 18:00:35 crc kubenswrapper[4558]: I0120 18:00:35.606806 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/71e1f655-faf6-4b2e-b4ec-506c6b761638-inventory\") pod \"71e1f655-faf6-4b2e-b4ec-506c6b761638\" (UID: \"71e1f655-faf6-4b2e-b4ec-506c6b761638\") " Jan 20 18:00:35 crc kubenswrapper[4558]: I0120 18:00:35.606828 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/71e1f655-faf6-4b2e-b4ec-506c6b761638-neutron-dhcp-combined-ca-bundle\") pod \"71e1f655-faf6-4b2e-b4ec-506c6b761638\" (UID: \"71e1f655-faf6-4b2e-b4ec-506c6b761638\") " Jan 20 18:00:35 crc kubenswrapper[4558]: I0120 18:00:35.606853 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/71e1f655-faf6-4b2e-b4ec-506c6b761638-ovn-combined-ca-bundle\") pod \"71e1f655-faf6-4b2e-b4ec-506c6b761638\" (UID: \"71e1f655-faf6-4b2e-b4ec-506c6b761638\") " Jan 20 18:00:35 crc kubenswrapper[4558]: I0120 18:00:35.607451 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/71e1f655-faf6-4b2e-b4ec-506c6b761638-nova-combined-ca-bundle\") pod \"71e1f655-faf6-4b2e-b4ec-506c6b761638\" (UID: \"71e1f655-faf6-4b2e-b4ec-506c6b761638\") " Jan 20 18:00:35 crc kubenswrapper[4558]: I0120 18:00:35.607482 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b2jql\" (UniqueName: \"kubernetes.io/projected/71e1f655-faf6-4b2e-b4ec-506c6b761638-kube-api-access-b2jql\") pod \"71e1f655-faf6-4b2e-b4ec-506c6b761638\" (UID: \"71e1f655-faf6-4b2e-b4ec-506c6b761638\") " Jan 20 18:00:35 crc kubenswrapper[4558]: I0120 18:00:35.607515 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/71e1f655-faf6-4b2e-b4ec-506c6b761638-bootstrap-combined-ca-bundle\") pod \"71e1f655-faf6-4b2e-b4ec-506c6b761638\" (UID: \"71e1f655-faf6-4b2e-b4ec-506c6b761638\") " Jan 20 18:00:35 crc kubenswrapper[4558]: I0120 18:00:35.607534 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/71e1f655-faf6-4b2e-b4ec-506c6b761638-neutron-metadata-combined-ca-bundle\") pod \"71e1f655-faf6-4b2e-b4ec-506c6b761638\" (UID: \"71e1f655-faf6-4b2e-b4ec-506c6b761638\") " Jan 20 18:00:35 crc kubenswrapper[4558]: I0120 18:00:35.607554 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"custom-global-service-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/71e1f655-faf6-4b2e-b4ec-506c6b761638-custom-global-service-combined-ca-bundle\") pod \"71e1f655-faf6-4b2e-b4ec-506c6b761638\" (UID: \"71e1f655-faf6-4b2e-b4ec-506c6b761638\") " Jan 20 18:00:35 crc kubenswrapper[4558]: I0120 18:00:35.607586 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/71e1f655-faf6-4b2e-b4ec-506c6b761638-libvirt-combined-ca-bundle\") pod \"71e1f655-faf6-4b2e-b4ec-506c6b761638\" (UID: \"71e1f655-faf6-4b2e-b4ec-506c6b761638\") " Jan 20 18:00:35 crc kubenswrapper[4558]: I0120 18:00:35.607612 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/71e1f655-faf6-4b2e-b4ec-506c6b761638-neutron-sriov-combined-ca-bundle\") pod \"71e1f655-faf6-4b2e-b4ec-506c6b761638\" (UID: \"71e1f655-faf6-4b2e-b4ec-506c6b761638\") " Jan 20 18:00:35 crc kubenswrapper[4558]: I0120 18:00:35.612260 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/71e1f655-faf6-4b2e-b4ec-506c6b761638-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "71e1f655-faf6-4b2e-b4ec-506c6b761638" (UID: "71e1f655-faf6-4b2e-b4ec-506c6b761638"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:00:35 crc kubenswrapper[4558]: I0120 18:00:35.612564 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/71e1f655-faf6-4b2e-b4ec-506c6b761638-neutron-sriov-combined-ca-bundle" (OuterVolumeSpecName: "neutron-sriov-combined-ca-bundle") pod "71e1f655-faf6-4b2e-b4ec-506c6b761638" (UID: "71e1f655-faf6-4b2e-b4ec-506c6b761638"). InnerVolumeSpecName "neutron-sriov-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:00:35 crc kubenswrapper[4558]: I0120 18:00:35.612592 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/71e1f655-faf6-4b2e-b4ec-506c6b761638-neutron-dhcp-combined-ca-bundle" (OuterVolumeSpecName: "neutron-dhcp-combined-ca-bundle") pod "71e1f655-faf6-4b2e-b4ec-506c6b761638" (UID: "71e1f655-faf6-4b2e-b4ec-506c6b761638"). InnerVolumeSpecName "neutron-dhcp-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:00:35 crc kubenswrapper[4558]: I0120 18:00:35.612875 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/71e1f655-faf6-4b2e-b4ec-506c6b761638-kube-api-access-b2jql" (OuterVolumeSpecName: "kube-api-access-b2jql") pod "71e1f655-faf6-4b2e-b4ec-506c6b761638" (UID: "71e1f655-faf6-4b2e-b4ec-506c6b761638"). InnerVolumeSpecName "kube-api-access-b2jql". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:00:35 crc kubenswrapper[4558]: I0120 18:00:35.613107 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/71e1f655-faf6-4b2e-b4ec-506c6b761638-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "71e1f655-faf6-4b2e-b4ec-506c6b761638" (UID: "71e1f655-faf6-4b2e-b4ec-506c6b761638"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:00:35 crc kubenswrapper[4558]: I0120 18:00:35.613290 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/71e1f655-faf6-4b2e-b4ec-506c6b761638-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "71e1f655-faf6-4b2e-b4ec-506c6b761638" (UID: "71e1f655-faf6-4b2e-b4ec-506c6b761638"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:00:35 crc kubenswrapper[4558]: I0120 18:00:35.614059 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/71e1f655-faf6-4b2e-b4ec-506c6b761638-neutron-ovn-combined-ca-bundle" (OuterVolumeSpecName: "neutron-ovn-combined-ca-bundle") pod "71e1f655-faf6-4b2e-b4ec-506c6b761638" (UID: "71e1f655-faf6-4b2e-b4ec-506c6b761638"). InnerVolumeSpecName "neutron-ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:00:35 crc kubenswrapper[4558]: I0120 18:00:35.614287 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/71e1f655-faf6-4b2e-b4ec-506c6b761638-custom-global-service-combined-ca-bundle" (OuterVolumeSpecName: "custom-global-service-combined-ca-bundle") pod "71e1f655-faf6-4b2e-b4ec-506c6b761638" (UID: "71e1f655-faf6-4b2e-b4ec-506c6b761638"). InnerVolumeSpecName "custom-global-service-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:00:35 crc kubenswrapper[4558]: I0120 18:00:35.616321 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/71e1f655-faf6-4b2e-b4ec-506c6b761638-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "71e1f655-faf6-4b2e-b4ec-506c6b761638" (UID: "71e1f655-faf6-4b2e-b4ec-506c6b761638"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:00:35 crc kubenswrapper[4558]: I0120 18:00:35.617360 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/71e1f655-faf6-4b2e-b4ec-506c6b761638-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "71e1f655-faf6-4b2e-b4ec-506c6b761638" (UID: "71e1f655-faf6-4b2e-b4ec-506c6b761638"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:00:35 crc kubenswrapper[4558]: I0120 18:00:35.626499 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/71e1f655-faf6-4b2e-b4ec-506c6b761638-ssh-key-edpm-compute-global" (OuterVolumeSpecName: "ssh-key-edpm-compute-global") pod "71e1f655-faf6-4b2e-b4ec-506c6b761638" (UID: "71e1f655-faf6-4b2e-b4ec-506c6b761638"). InnerVolumeSpecName "ssh-key-edpm-compute-global". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:00:35 crc kubenswrapper[4558]: I0120 18:00:35.627338 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/71e1f655-faf6-4b2e-b4ec-506c6b761638-inventory" (OuterVolumeSpecName: "inventory") pod "71e1f655-faf6-4b2e-b4ec-506c6b761638" (UID: "71e1f655-faf6-4b2e-b4ec-506c6b761638"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:00:35 crc kubenswrapper[4558]: I0120 18:00:35.708737 4558 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/71e1f655-faf6-4b2e-b4ec-506c6b761638-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 18:00:35 crc kubenswrapper[4558]: I0120 18:00:35.708771 4558 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/71e1f655-faf6-4b2e-b4ec-506c6b761638-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 18:00:35 crc kubenswrapper[4558]: I0120 18:00:35.708788 4558 reconciler_common.go:293] "Volume detached for volume \"custom-global-service-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/71e1f655-faf6-4b2e-b4ec-506c6b761638-custom-global-service-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 18:00:35 crc kubenswrapper[4558]: I0120 18:00:35.708802 4558 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/71e1f655-faf6-4b2e-b4ec-506c6b761638-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 18:00:35 crc kubenswrapper[4558]: I0120 18:00:35.708815 4558 reconciler_common.go:293] "Volume detached for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/71e1f655-faf6-4b2e-b4ec-506c6b761638-neutron-sriov-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 18:00:35 crc kubenswrapper[4558]: I0120 18:00:35.708825 4558 reconciler_common.go:293] "Volume detached for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/71e1f655-faf6-4b2e-b4ec-506c6b761638-ssh-key-edpm-compute-global\") on node \"crc\" DevicePath \"\"" Jan 20 18:00:35 crc kubenswrapper[4558]: I0120 18:00:35.708837 4558 reconciler_common.go:293] "Volume detached for volume \"neutron-ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/71e1f655-faf6-4b2e-b4ec-506c6b761638-neutron-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 18:00:35 crc kubenswrapper[4558]: I0120 18:00:35.708849 4558 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/71e1f655-faf6-4b2e-b4ec-506c6b761638-inventory\") on node \"crc\" DevicePath \"\"" Jan 20 18:00:35 crc kubenswrapper[4558]: I0120 18:00:35.708862 4558 reconciler_common.go:293] "Volume detached for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/71e1f655-faf6-4b2e-b4ec-506c6b761638-neutron-dhcp-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 18:00:35 crc kubenswrapper[4558]: I0120 18:00:35.708874 4558 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/71e1f655-faf6-4b2e-b4ec-506c6b761638-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 18:00:35 crc kubenswrapper[4558]: I0120 18:00:35.708886 4558 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/71e1f655-faf6-4b2e-b4ec-506c6b761638-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 18:00:35 crc kubenswrapper[4558]: I0120 18:00:35.708902 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b2jql\" (UniqueName: \"kubernetes.io/projected/71e1f655-faf6-4b2e-b4ec-506c6b761638-kube-api-access-b2jql\") on node \"crc\" DevicePath \"\"" Jan 20 18:00:36 crc kubenswrapper[4558]: I0120 18:00:36.341763 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/install-certs-edpm-compute-global-edpm-compute-global-xl8pq" event={"ID":"71e1f655-faf6-4b2e-b4ec-506c6b761638","Type":"ContainerDied","Data":"ce633703657f172646158d9da594bbe2808e265c550fd4bae90c05a27ba28c58"} Jan 20 18:00:36 crc kubenswrapper[4558]: I0120 18:00:36.341817 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ce633703657f172646158d9da594bbe2808e265c550fd4bae90c05a27ba28c58" Jan 20 18:00:36 crc kubenswrapper[4558]: I0120 18:00:36.341898 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/install-certs-edpm-compute-global-edpm-compute-global-xl8pq" Jan 20 18:00:36 crc kubenswrapper[4558]: I0120 18:00:36.405400 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ovn-edpm-compute-global-edpm-compute-global-6kctq"] Jan 20 18:00:36 crc kubenswrapper[4558]: E0120 18:00:36.405750 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="71e1f655-faf6-4b2e-b4ec-506c6b761638" containerName="install-certs-edpm-compute-global-edpm-compute-global" Jan 20 18:00:36 crc kubenswrapper[4558]: I0120 18:00:36.405770 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="71e1f655-faf6-4b2e-b4ec-506c6b761638" containerName="install-certs-edpm-compute-global-edpm-compute-global" Jan 20 18:00:36 crc kubenswrapper[4558]: I0120 18:00:36.405948 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="71e1f655-faf6-4b2e-b4ec-506c6b761638" containerName="install-certs-edpm-compute-global-edpm-compute-global" Jan 20 18:00:36 crc kubenswrapper[4558]: I0120 18:00:36.406516 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-edpm-compute-global-edpm-compute-global-6kctq" Jan 20 18:00:36 crc kubenswrapper[4558]: I0120 18:00:36.409210 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"edpm-compute-global-dockercfg-wkdxg" Jan 20 18:00:36 crc kubenswrapper[4558]: I0120 18:00:36.409226 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplanenodeset-edpm-compute-global" Jan 20 18:00:36 crc kubenswrapper[4558]: I0120 18:00:36.409494 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-aee-default-env" Jan 20 18:00:36 crc kubenswrapper[4558]: I0120 18:00:36.409593 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"ovncontroller-config" Jan 20 18:00:36 crc kubenswrapper[4558]: I0120 18:00:36.409681 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplane-ansible-ssh-private-key-secret" Jan 20 18:00:36 crc kubenswrapper[4558]: I0120 18:00:36.409922 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"combined-ca-bundle" Jan 20 18:00:36 crc kubenswrapper[4558]: I0120 18:00:36.417731 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovn-edpm-compute-global-edpm-compute-global-6kctq"] Jan 20 18:00:36 crc kubenswrapper[4558]: I0120 18:00:36.520551 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-srjdm\" (UniqueName: \"kubernetes.io/projected/54ba1771-d7af-4d97-9293-d4278f9baacd-kube-api-access-srjdm\") pod \"ovn-edpm-compute-global-edpm-compute-global-6kctq\" (UID: \"54ba1771-d7af-4d97-9293-d4278f9baacd\") " pod="openstack-kuttl-tests/ovn-edpm-compute-global-edpm-compute-global-6kctq" Jan 20 18:00:36 crc kubenswrapper[4558]: I0120 18:00:36.520822 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/54ba1771-d7af-4d97-9293-d4278f9baacd-ovncontroller-config-0\") pod \"ovn-edpm-compute-global-edpm-compute-global-6kctq\" (UID: \"54ba1771-d7af-4d97-9293-d4278f9baacd\") " pod="openstack-kuttl-tests/ovn-edpm-compute-global-edpm-compute-global-6kctq" Jan 20 18:00:36 crc kubenswrapper[4558]: I0120 18:00:36.520957 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/54ba1771-d7af-4d97-9293-d4278f9baacd-inventory\") pod \"ovn-edpm-compute-global-edpm-compute-global-6kctq\" (UID: \"54ba1771-d7af-4d97-9293-d4278f9baacd\") " pod="openstack-kuttl-tests/ovn-edpm-compute-global-edpm-compute-global-6kctq" Jan 20 18:00:36 crc kubenswrapper[4558]: I0120 18:00:36.521037 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54ba1771-d7af-4d97-9293-d4278f9baacd-ovn-combined-ca-bundle\") pod \"ovn-edpm-compute-global-edpm-compute-global-6kctq\" (UID: \"54ba1771-d7af-4d97-9293-d4278f9baacd\") " pod="openstack-kuttl-tests/ovn-edpm-compute-global-edpm-compute-global-6kctq" Jan 20 18:00:36 crc kubenswrapper[4558]: I0120 18:00:36.521111 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/54ba1771-d7af-4d97-9293-d4278f9baacd-ssh-key-edpm-compute-global\") pod \"ovn-edpm-compute-global-edpm-compute-global-6kctq\" (UID: \"54ba1771-d7af-4d97-9293-d4278f9baacd\") " pod="openstack-kuttl-tests/ovn-edpm-compute-global-edpm-compute-global-6kctq" Jan 20 18:00:36 crc kubenswrapper[4558]: I0120 18:00:36.622099 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/54ba1771-d7af-4d97-9293-d4278f9baacd-ovncontroller-config-0\") pod \"ovn-edpm-compute-global-edpm-compute-global-6kctq\" (UID: \"54ba1771-d7af-4d97-9293-d4278f9baacd\") " pod="openstack-kuttl-tests/ovn-edpm-compute-global-edpm-compute-global-6kctq" Jan 20 18:00:36 crc kubenswrapper[4558]: I0120 18:00:36.622154 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/54ba1771-d7af-4d97-9293-d4278f9baacd-inventory\") pod \"ovn-edpm-compute-global-edpm-compute-global-6kctq\" (UID: \"54ba1771-d7af-4d97-9293-d4278f9baacd\") " pod="openstack-kuttl-tests/ovn-edpm-compute-global-edpm-compute-global-6kctq" Jan 20 18:00:36 crc kubenswrapper[4558]: I0120 18:00:36.622202 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54ba1771-d7af-4d97-9293-d4278f9baacd-ovn-combined-ca-bundle\") pod \"ovn-edpm-compute-global-edpm-compute-global-6kctq\" (UID: \"54ba1771-d7af-4d97-9293-d4278f9baacd\") " pod="openstack-kuttl-tests/ovn-edpm-compute-global-edpm-compute-global-6kctq" Jan 20 18:00:36 crc kubenswrapper[4558]: I0120 18:00:36.622226 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/54ba1771-d7af-4d97-9293-d4278f9baacd-ssh-key-edpm-compute-global\") pod \"ovn-edpm-compute-global-edpm-compute-global-6kctq\" (UID: \"54ba1771-d7af-4d97-9293-d4278f9baacd\") " pod="openstack-kuttl-tests/ovn-edpm-compute-global-edpm-compute-global-6kctq" Jan 20 18:00:36 crc kubenswrapper[4558]: I0120 18:00:36.622290 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-srjdm\" (UniqueName: \"kubernetes.io/projected/54ba1771-d7af-4d97-9293-d4278f9baacd-kube-api-access-srjdm\") pod \"ovn-edpm-compute-global-edpm-compute-global-6kctq\" (UID: \"54ba1771-d7af-4d97-9293-d4278f9baacd\") " pod="openstack-kuttl-tests/ovn-edpm-compute-global-edpm-compute-global-6kctq" Jan 20 18:00:36 crc kubenswrapper[4558]: I0120 18:00:36.623569 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/54ba1771-d7af-4d97-9293-d4278f9baacd-ovncontroller-config-0\") pod \"ovn-edpm-compute-global-edpm-compute-global-6kctq\" (UID: \"54ba1771-d7af-4d97-9293-d4278f9baacd\") " pod="openstack-kuttl-tests/ovn-edpm-compute-global-edpm-compute-global-6kctq" Jan 20 18:00:36 crc kubenswrapper[4558]: I0120 18:00:36.629004 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/54ba1771-d7af-4d97-9293-d4278f9baacd-ssh-key-edpm-compute-global\") pod \"ovn-edpm-compute-global-edpm-compute-global-6kctq\" (UID: \"54ba1771-d7af-4d97-9293-d4278f9baacd\") " pod="openstack-kuttl-tests/ovn-edpm-compute-global-edpm-compute-global-6kctq" Jan 20 18:00:36 crc kubenswrapper[4558]: I0120 18:00:36.629096 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/54ba1771-d7af-4d97-9293-d4278f9baacd-inventory\") pod \"ovn-edpm-compute-global-edpm-compute-global-6kctq\" (UID: \"54ba1771-d7af-4d97-9293-d4278f9baacd\") " pod="openstack-kuttl-tests/ovn-edpm-compute-global-edpm-compute-global-6kctq" Jan 20 18:00:36 crc kubenswrapper[4558]: I0120 18:00:36.630060 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54ba1771-d7af-4d97-9293-d4278f9baacd-ovn-combined-ca-bundle\") pod \"ovn-edpm-compute-global-edpm-compute-global-6kctq\" (UID: \"54ba1771-d7af-4d97-9293-d4278f9baacd\") " pod="openstack-kuttl-tests/ovn-edpm-compute-global-edpm-compute-global-6kctq" Jan 20 18:00:36 crc kubenswrapper[4558]: I0120 18:00:36.638442 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-srjdm\" (UniqueName: \"kubernetes.io/projected/54ba1771-d7af-4d97-9293-d4278f9baacd-kube-api-access-srjdm\") pod \"ovn-edpm-compute-global-edpm-compute-global-6kctq\" (UID: \"54ba1771-d7af-4d97-9293-d4278f9baacd\") " pod="openstack-kuttl-tests/ovn-edpm-compute-global-edpm-compute-global-6kctq" Jan 20 18:00:36 crc kubenswrapper[4558]: I0120 18:00:36.722202 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-edpm-compute-global-edpm-compute-global-6kctq" Jan 20 18:00:37 crc kubenswrapper[4558]: I0120 18:00:37.116330 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovn-edpm-compute-global-edpm-compute-global-6kctq"] Jan 20 18:00:37 crc kubenswrapper[4558]: I0120 18:00:37.350491 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-edpm-compute-global-edpm-compute-global-6kctq" event={"ID":"54ba1771-d7af-4d97-9293-d4278f9baacd","Type":"ContainerStarted","Data":"aab5fe722d1af413db8b56a8a86ac359f46415575de5195c23924f964939e2d6"} Jan 20 18:00:37 crc kubenswrapper[4558]: I0120 18:00:37.647995 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-xfnvt"] Jan 20 18:00:37 crc kubenswrapper[4558]: I0120 18:00:37.650484 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-xfnvt" Jan 20 18:00:37 crc kubenswrapper[4558]: I0120 18:00:37.661082 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-xfnvt"] Jan 20 18:00:37 crc kubenswrapper[4558]: I0120 18:00:37.744850 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/104ae5cb-67ef-4dac-af44-7fab7118cae1-catalog-content\") pod \"redhat-operators-xfnvt\" (UID: \"104ae5cb-67ef-4dac-af44-7fab7118cae1\") " pod="openshift-marketplace/redhat-operators-xfnvt" Jan 20 18:00:37 crc kubenswrapper[4558]: I0120 18:00:37.744936 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/104ae5cb-67ef-4dac-af44-7fab7118cae1-utilities\") pod \"redhat-operators-xfnvt\" (UID: \"104ae5cb-67ef-4dac-af44-7fab7118cae1\") " pod="openshift-marketplace/redhat-operators-xfnvt" Jan 20 18:00:37 crc kubenswrapper[4558]: I0120 18:00:37.744960 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zxt75\" (UniqueName: \"kubernetes.io/projected/104ae5cb-67ef-4dac-af44-7fab7118cae1-kube-api-access-zxt75\") pod \"redhat-operators-xfnvt\" (UID: \"104ae5cb-67ef-4dac-af44-7fab7118cae1\") " pod="openshift-marketplace/redhat-operators-xfnvt" Jan 20 18:00:37 crc kubenswrapper[4558]: I0120 18:00:37.846490 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/104ae5cb-67ef-4dac-af44-7fab7118cae1-utilities\") pod \"redhat-operators-xfnvt\" (UID: \"104ae5cb-67ef-4dac-af44-7fab7118cae1\") " pod="openshift-marketplace/redhat-operators-xfnvt" Jan 20 18:00:37 crc kubenswrapper[4558]: I0120 18:00:37.846755 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zxt75\" (UniqueName: \"kubernetes.io/projected/104ae5cb-67ef-4dac-af44-7fab7118cae1-kube-api-access-zxt75\") pod \"redhat-operators-xfnvt\" (UID: \"104ae5cb-67ef-4dac-af44-7fab7118cae1\") " pod="openshift-marketplace/redhat-operators-xfnvt" Jan 20 18:00:37 crc kubenswrapper[4558]: I0120 18:00:37.846824 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/104ae5cb-67ef-4dac-af44-7fab7118cae1-catalog-content\") pod \"redhat-operators-xfnvt\" (UID: \"104ae5cb-67ef-4dac-af44-7fab7118cae1\") " pod="openshift-marketplace/redhat-operators-xfnvt" Jan 20 18:00:37 crc kubenswrapper[4558]: I0120 18:00:37.847091 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/104ae5cb-67ef-4dac-af44-7fab7118cae1-utilities\") pod \"redhat-operators-xfnvt\" (UID: \"104ae5cb-67ef-4dac-af44-7fab7118cae1\") " pod="openshift-marketplace/redhat-operators-xfnvt" Jan 20 18:00:37 crc kubenswrapper[4558]: I0120 18:00:37.847123 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/104ae5cb-67ef-4dac-af44-7fab7118cae1-catalog-content\") pod \"redhat-operators-xfnvt\" (UID: \"104ae5cb-67ef-4dac-af44-7fab7118cae1\") " pod="openshift-marketplace/redhat-operators-xfnvt" Jan 20 18:00:37 crc kubenswrapper[4558]: I0120 18:00:37.864240 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zxt75\" (UniqueName: \"kubernetes.io/projected/104ae5cb-67ef-4dac-af44-7fab7118cae1-kube-api-access-zxt75\") pod \"redhat-operators-xfnvt\" (UID: \"104ae5cb-67ef-4dac-af44-7fab7118cae1\") " pod="openshift-marketplace/redhat-operators-xfnvt" Jan 20 18:00:37 crc kubenswrapper[4558]: I0120 18:00:37.970265 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-xfnvt" Jan 20 18:00:38 crc kubenswrapper[4558]: I0120 18:00:38.358313 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-edpm-compute-global-edpm-compute-global-6kctq" event={"ID":"54ba1771-d7af-4d97-9293-d4278f9baacd","Type":"ContainerStarted","Data":"cb9d97b6167536a1e4f73d87f108cd283463df50132925265a657a9ec4c91f10"} Jan 20 18:00:38 crc kubenswrapper[4558]: I0120 18:00:38.376126 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ovn-edpm-compute-global-edpm-compute-global-6kctq" podStartSLOduration=1.702342445 podStartE2EDuration="2.37610932s" podCreationTimestamp="2026-01-20 18:00:36 +0000 UTC" firstStartedPulling="2026-01-20 18:00:37.125810476 +0000 UTC m=+4730.886148443" lastFinishedPulling="2026-01-20 18:00:37.799577351 +0000 UTC m=+4731.559915318" observedRunningTime="2026-01-20 18:00:38.373570067 +0000 UTC m=+4732.133908034" watchObservedRunningTime="2026-01-20 18:00:38.37610932 +0000 UTC m=+4732.136447287" Jan 20 18:00:38 crc kubenswrapper[4558]: I0120 18:00:38.423472 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-xfnvt"] Jan 20 18:00:38 crc kubenswrapper[4558]: W0120 18:00:38.426361 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod104ae5cb_67ef_4dac_af44_7fab7118cae1.slice/crio-0ee8d7b40459c5de23b4848c0b335cfd2121dc11c6712d51685eb1e240957a0c WatchSource:0}: Error finding container 0ee8d7b40459c5de23b4848c0b335cfd2121dc11c6712d51685eb1e240957a0c: Status 404 returned error can't find the container with id 0ee8d7b40459c5de23b4848c0b335cfd2121dc11c6712d51685eb1e240957a0c Jan 20 18:00:39 crc kubenswrapper[4558]: I0120 18:00:39.370518 4558 generic.go:334] "Generic (PLEG): container finished" podID="104ae5cb-67ef-4dac-af44-7fab7118cae1" containerID="5fce438ff7f02f90d7e395cd5333db3bcf4a616b3bc4f45780931b597975028a" exitCode=0 Jan 20 18:00:39 crc kubenswrapper[4558]: I0120 18:00:39.370607 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xfnvt" event={"ID":"104ae5cb-67ef-4dac-af44-7fab7118cae1","Type":"ContainerDied","Data":"5fce438ff7f02f90d7e395cd5333db3bcf4a616b3bc4f45780931b597975028a"} Jan 20 18:00:39 crc kubenswrapper[4558]: I0120 18:00:39.371014 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xfnvt" event={"ID":"104ae5cb-67ef-4dac-af44-7fab7118cae1","Type":"ContainerStarted","Data":"0ee8d7b40459c5de23b4848c0b335cfd2121dc11c6712d51685eb1e240957a0c"} Jan 20 18:00:39 crc kubenswrapper[4558]: I0120 18:00:39.373100 4558 generic.go:334] "Generic (PLEG): container finished" podID="54ba1771-d7af-4d97-9293-d4278f9baacd" containerID="cb9d97b6167536a1e4f73d87f108cd283463df50132925265a657a9ec4c91f10" exitCode=0 Jan 20 18:00:39 crc kubenswrapper[4558]: I0120 18:00:39.373191 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-edpm-compute-global-edpm-compute-global-6kctq" event={"ID":"54ba1771-d7af-4d97-9293-d4278f9baacd","Type":"ContainerDied","Data":"cb9d97b6167536a1e4f73d87f108cd283463df50132925265a657a9ec4c91f10"} Jan 20 18:00:40 crc kubenswrapper[4558]: I0120 18:00:40.658115 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-edpm-compute-global-edpm-compute-global-6kctq" Jan 20 18:00:40 crc kubenswrapper[4558]: I0120 18:00:40.687655 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54ba1771-d7af-4d97-9293-d4278f9baacd-ovn-combined-ca-bundle\") pod \"54ba1771-d7af-4d97-9293-d4278f9baacd\" (UID: \"54ba1771-d7af-4d97-9293-d4278f9baacd\") " Jan 20 18:00:40 crc kubenswrapper[4558]: I0120 18:00:40.687762 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/54ba1771-d7af-4d97-9293-d4278f9baacd-ssh-key-edpm-compute-global\") pod \"54ba1771-d7af-4d97-9293-d4278f9baacd\" (UID: \"54ba1771-d7af-4d97-9293-d4278f9baacd\") " Jan 20 18:00:40 crc kubenswrapper[4558]: I0120 18:00:40.687869 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-srjdm\" (UniqueName: \"kubernetes.io/projected/54ba1771-d7af-4d97-9293-d4278f9baacd-kube-api-access-srjdm\") pod \"54ba1771-d7af-4d97-9293-d4278f9baacd\" (UID: \"54ba1771-d7af-4d97-9293-d4278f9baacd\") " Jan 20 18:00:40 crc kubenswrapper[4558]: I0120 18:00:40.687948 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/54ba1771-d7af-4d97-9293-d4278f9baacd-inventory\") pod \"54ba1771-d7af-4d97-9293-d4278f9baacd\" (UID: \"54ba1771-d7af-4d97-9293-d4278f9baacd\") " Jan 20 18:00:40 crc kubenswrapper[4558]: I0120 18:00:40.687985 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/54ba1771-d7af-4d97-9293-d4278f9baacd-ovncontroller-config-0\") pod \"54ba1771-d7af-4d97-9293-d4278f9baacd\" (UID: \"54ba1771-d7af-4d97-9293-d4278f9baacd\") " Jan 20 18:00:40 crc kubenswrapper[4558]: I0120 18:00:40.694697 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/54ba1771-d7af-4d97-9293-d4278f9baacd-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "54ba1771-d7af-4d97-9293-d4278f9baacd" (UID: "54ba1771-d7af-4d97-9293-d4278f9baacd"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:00:40 crc kubenswrapper[4558]: I0120 18:00:40.696205 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/54ba1771-d7af-4d97-9293-d4278f9baacd-kube-api-access-srjdm" (OuterVolumeSpecName: "kube-api-access-srjdm") pod "54ba1771-d7af-4d97-9293-d4278f9baacd" (UID: "54ba1771-d7af-4d97-9293-d4278f9baacd"). InnerVolumeSpecName "kube-api-access-srjdm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:00:40 crc kubenswrapper[4558]: I0120 18:00:40.708039 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/54ba1771-d7af-4d97-9293-d4278f9baacd-ssh-key-edpm-compute-global" (OuterVolumeSpecName: "ssh-key-edpm-compute-global") pod "54ba1771-d7af-4d97-9293-d4278f9baacd" (UID: "54ba1771-d7af-4d97-9293-d4278f9baacd"). InnerVolumeSpecName "ssh-key-edpm-compute-global". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:00:40 crc kubenswrapper[4558]: I0120 18:00:40.708691 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/54ba1771-d7af-4d97-9293-d4278f9baacd-ovncontroller-config-0" (OuterVolumeSpecName: "ovncontroller-config-0") pod "54ba1771-d7af-4d97-9293-d4278f9baacd" (UID: "54ba1771-d7af-4d97-9293-d4278f9baacd"). InnerVolumeSpecName "ovncontroller-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:00:40 crc kubenswrapper[4558]: I0120 18:00:40.709631 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/54ba1771-d7af-4d97-9293-d4278f9baacd-inventory" (OuterVolumeSpecName: "inventory") pod "54ba1771-d7af-4d97-9293-d4278f9baacd" (UID: "54ba1771-d7af-4d97-9293-d4278f9baacd"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:00:40 crc kubenswrapper[4558]: I0120 18:00:40.790560 4558 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/54ba1771-d7af-4d97-9293-d4278f9baacd-inventory\") on node \"crc\" DevicePath \"\"" Jan 20 18:00:40 crc kubenswrapper[4558]: I0120 18:00:40.790604 4558 reconciler_common.go:293] "Volume detached for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/54ba1771-d7af-4d97-9293-d4278f9baacd-ovncontroller-config-0\") on node \"crc\" DevicePath \"\"" Jan 20 18:00:40 crc kubenswrapper[4558]: I0120 18:00:40.790618 4558 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54ba1771-d7af-4d97-9293-d4278f9baacd-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 18:00:40 crc kubenswrapper[4558]: I0120 18:00:40.790634 4558 reconciler_common.go:293] "Volume detached for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/54ba1771-d7af-4d97-9293-d4278f9baacd-ssh-key-edpm-compute-global\") on node \"crc\" DevicePath \"\"" Jan 20 18:00:40 crc kubenswrapper[4558]: I0120 18:00:40.790651 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-srjdm\" (UniqueName: \"kubernetes.io/projected/54ba1771-d7af-4d97-9293-d4278f9baacd-kube-api-access-srjdm\") on node \"crc\" DevicePath \"\"" Jan 20 18:00:41 crc kubenswrapper[4558]: I0120 18:00:41.392890 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-edpm-compute-global-edpm-compute-global-6kctq" event={"ID":"54ba1771-d7af-4d97-9293-d4278f9baacd","Type":"ContainerDied","Data":"aab5fe722d1af413db8b56a8a86ac359f46415575de5195c23924f964939e2d6"} Jan 20 18:00:41 crc kubenswrapper[4558]: I0120 18:00:41.392977 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-edpm-compute-global-edpm-compute-global-6kctq" Jan 20 18:00:41 crc kubenswrapper[4558]: I0120 18:00:41.393048 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="aab5fe722d1af413db8b56a8a86ac359f46415575de5195c23924f964939e2d6" Jan 20 18:00:41 crc kubenswrapper[4558]: I0120 18:00:41.395133 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xfnvt" event={"ID":"104ae5cb-67ef-4dac-af44-7fab7118cae1","Type":"ContainerStarted","Data":"fc78be79b3948d4b2a5893a9d2797be752c714a3794e1fcc699f57389197e3bd"} Jan 20 18:00:41 crc kubenswrapper[4558]: I0120 18:00:41.455764 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/neutron-metadata-edpm-compute-global-edpm-compute-global-x5msg"] Jan 20 18:00:41 crc kubenswrapper[4558]: E0120 18:00:41.456095 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="54ba1771-d7af-4d97-9293-d4278f9baacd" containerName="ovn-edpm-compute-global-edpm-compute-global" Jan 20 18:00:41 crc kubenswrapper[4558]: I0120 18:00:41.456115 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="54ba1771-d7af-4d97-9293-d4278f9baacd" containerName="ovn-edpm-compute-global-edpm-compute-global" Jan 20 18:00:41 crc kubenswrapper[4558]: I0120 18:00:41.456261 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="54ba1771-d7af-4d97-9293-d4278f9baacd" containerName="ovn-edpm-compute-global-edpm-compute-global" Jan 20 18:00:41 crc kubenswrapper[4558]: I0120 18:00:41.456732 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-metadata-edpm-compute-global-edpm-compute-global-x5msg" Jan 20 18:00:41 crc kubenswrapper[4558]: I0120 18:00:41.460679 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-metadata-neutron-config" Jan 20 18:00:41 crc kubenswrapper[4558]: I0120 18:00:41.460811 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplanenodeset-edpm-compute-global" Jan 20 18:00:41 crc kubenswrapper[4558]: I0120 18:00:41.460880 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"combined-ca-bundle" Jan 20 18:00:41 crc kubenswrapper[4558]: I0120 18:00:41.461014 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-aee-default-env" Jan 20 18:00:41 crc kubenswrapper[4558]: I0120 18:00:41.461205 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"edpm-compute-global-dockercfg-wkdxg" Jan 20 18:00:41 crc kubenswrapper[4558]: I0120 18:00:41.461737 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"neutron-ovn-metadata-agent-neutron-config" Jan 20 18:00:41 crc kubenswrapper[4558]: I0120 18:00:41.462384 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplane-ansible-ssh-private-key-secret" Jan 20 18:00:41 crc kubenswrapper[4558]: I0120 18:00:41.464882 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-metadata-edpm-compute-global-edpm-compute-global-x5msg"] Jan 20 18:00:41 crc kubenswrapper[4558]: I0120 18:00:41.498590 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/0eec69f6-ce99-4e2f-b9e4-99b2afa4bb52-ssh-key-edpm-compute-global\") pod \"neutron-metadata-edpm-compute-global-edpm-compute-global-x5msg\" (UID: \"0eec69f6-ce99-4e2f-b9e4-99b2afa4bb52\") " pod="openstack-kuttl-tests/neutron-metadata-edpm-compute-global-edpm-compute-global-x5msg" Jan 20 18:00:41 crc kubenswrapper[4558]: I0120 18:00:41.498633 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/0eec69f6-ce99-4e2f-b9e4-99b2afa4bb52-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-compute-global-edpm-compute-global-x5msg\" (UID: \"0eec69f6-ce99-4e2f-b9e4-99b2afa4bb52\") " pod="openstack-kuttl-tests/neutron-metadata-edpm-compute-global-edpm-compute-global-x5msg" Jan 20 18:00:41 crc kubenswrapper[4558]: I0120 18:00:41.498658 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0eec69f6-ce99-4e2f-b9e4-99b2afa4bb52-inventory\") pod \"neutron-metadata-edpm-compute-global-edpm-compute-global-x5msg\" (UID: \"0eec69f6-ce99-4e2f-b9e4-99b2afa4bb52\") " pod="openstack-kuttl-tests/neutron-metadata-edpm-compute-global-edpm-compute-global-x5msg" Jan 20 18:00:41 crc kubenswrapper[4558]: I0120 18:00:41.498685 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/0eec69f6-ce99-4e2f-b9e4-99b2afa4bb52-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-compute-global-edpm-compute-global-x5msg\" (UID: \"0eec69f6-ce99-4e2f-b9e4-99b2afa4bb52\") " pod="openstack-kuttl-tests/neutron-metadata-edpm-compute-global-edpm-compute-global-x5msg" Jan 20 18:00:41 crc kubenswrapper[4558]: I0120 18:00:41.498708 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0eec69f6-ce99-4e2f-b9e4-99b2afa4bb52-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-compute-global-edpm-compute-global-x5msg\" (UID: \"0eec69f6-ce99-4e2f-b9e4-99b2afa4bb52\") " pod="openstack-kuttl-tests/neutron-metadata-edpm-compute-global-edpm-compute-global-x5msg" Jan 20 18:00:41 crc kubenswrapper[4558]: I0120 18:00:41.498793 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-neutron-config-1\" (UniqueName: \"kubernetes.io/secret/0eec69f6-ce99-4e2f-b9e4-99b2afa4bb52-nova-metadata-neutron-config-1\") pod \"neutron-metadata-edpm-compute-global-edpm-compute-global-x5msg\" (UID: \"0eec69f6-ce99-4e2f-b9e4-99b2afa4bb52\") " pod="openstack-kuttl-tests/neutron-metadata-edpm-compute-global-edpm-compute-global-x5msg" Jan 20 18:00:41 crc kubenswrapper[4558]: I0120 18:00:41.498812 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vps2h\" (UniqueName: \"kubernetes.io/projected/0eec69f6-ce99-4e2f-b9e4-99b2afa4bb52-kube-api-access-vps2h\") pod \"neutron-metadata-edpm-compute-global-edpm-compute-global-x5msg\" (UID: \"0eec69f6-ce99-4e2f-b9e4-99b2afa4bb52\") " pod="openstack-kuttl-tests/neutron-metadata-edpm-compute-global-edpm-compute-global-x5msg" Jan 20 18:00:41 crc kubenswrapper[4558]: I0120 18:00:41.498839 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-neutron-config-2\" (UniqueName: \"kubernetes.io/secret/0eec69f6-ce99-4e2f-b9e4-99b2afa4bb52-nova-metadata-neutron-config-2\") pod \"neutron-metadata-edpm-compute-global-edpm-compute-global-x5msg\" (UID: \"0eec69f6-ce99-4e2f-b9e4-99b2afa4bb52\") " pod="openstack-kuttl-tests/neutron-metadata-edpm-compute-global-edpm-compute-global-x5msg" Jan 20 18:00:41 crc kubenswrapper[4558]: I0120 18:00:41.599338 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/0eec69f6-ce99-4e2f-b9e4-99b2afa4bb52-ssh-key-edpm-compute-global\") pod \"neutron-metadata-edpm-compute-global-edpm-compute-global-x5msg\" (UID: \"0eec69f6-ce99-4e2f-b9e4-99b2afa4bb52\") " pod="openstack-kuttl-tests/neutron-metadata-edpm-compute-global-edpm-compute-global-x5msg" Jan 20 18:00:41 crc kubenswrapper[4558]: I0120 18:00:41.599387 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/0eec69f6-ce99-4e2f-b9e4-99b2afa4bb52-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-compute-global-edpm-compute-global-x5msg\" (UID: \"0eec69f6-ce99-4e2f-b9e4-99b2afa4bb52\") " pod="openstack-kuttl-tests/neutron-metadata-edpm-compute-global-edpm-compute-global-x5msg" Jan 20 18:00:41 crc kubenswrapper[4558]: I0120 18:00:41.599413 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0eec69f6-ce99-4e2f-b9e4-99b2afa4bb52-inventory\") pod \"neutron-metadata-edpm-compute-global-edpm-compute-global-x5msg\" (UID: \"0eec69f6-ce99-4e2f-b9e4-99b2afa4bb52\") " pod="openstack-kuttl-tests/neutron-metadata-edpm-compute-global-edpm-compute-global-x5msg" Jan 20 18:00:41 crc kubenswrapper[4558]: I0120 18:00:41.599451 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/0eec69f6-ce99-4e2f-b9e4-99b2afa4bb52-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-compute-global-edpm-compute-global-x5msg\" (UID: \"0eec69f6-ce99-4e2f-b9e4-99b2afa4bb52\") " pod="openstack-kuttl-tests/neutron-metadata-edpm-compute-global-edpm-compute-global-x5msg" Jan 20 18:00:41 crc kubenswrapper[4558]: I0120 18:00:41.599482 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0eec69f6-ce99-4e2f-b9e4-99b2afa4bb52-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-compute-global-edpm-compute-global-x5msg\" (UID: \"0eec69f6-ce99-4e2f-b9e4-99b2afa4bb52\") " pod="openstack-kuttl-tests/neutron-metadata-edpm-compute-global-edpm-compute-global-x5msg" Jan 20 18:00:41 crc kubenswrapper[4558]: I0120 18:00:41.599573 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-neutron-config-1\" (UniqueName: \"kubernetes.io/secret/0eec69f6-ce99-4e2f-b9e4-99b2afa4bb52-nova-metadata-neutron-config-1\") pod \"neutron-metadata-edpm-compute-global-edpm-compute-global-x5msg\" (UID: \"0eec69f6-ce99-4e2f-b9e4-99b2afa4bb52\") " pod="openstack-kuttl-tests/neutron-metadata-edpm-compute-global-edpm-compute-global-x5msg" Jan 20 18:00:41 crc kubenswrapper[4558]: I0120 18:00:41.599611 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vps2h\" (UniqueName: \"kubernetes.io/projected/0eec69f6-ce99-4e2f-b9e4-99b2afa4bb52-kube-api-access-vps2h\") pod \"neutron-metadata-edpm-compute-global-edpm-compute-global-x5msg\" (UID: \"0eec69f6-ce99-4e2f-b9e4-99b2afa4bb52\") " pod="openstack-kuttl-tests/neutron-metadata-edpm-compute-global-edpm-compute-global-x5msg" Jan 20 18:00:41 crc kubenswrapper[4558]: I0120 18:00:41.599648 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-neutron-config-2\" (UniqueName: \"kubernetes.io/secret/0eec69f6-ce99-4e2f-b9e4-99b2afa4bb52-nova-metadata-neutron-config-2\") pod \"neutron-metadata-edpm-compute-global-edpm-compute-global-x5msg\" (UID: \"0eec69f6-ce99-4e2f-b9e4-99b2afa4bb52\") " pod="openstack-kuttl-tests/neutron-metadata-edpm-compute-global-edpm-compute-global-x5msg" Jan 20 18:00:41 crc kubenswrapper[4558]: I0120 18:00:41.604475 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/0eec69f6-ce99-4e2f-b9e4-99b2afa4bb52-ssh-key-edpm-compute-global\") pod \"neutron-metadata-edpm-compute-global-edpm-compute-global-x5msg\" (UID: \"0eec69f6-ce99-4e2f-b9e4-99b2afa4bb52\") " pod="openstack-kuttl-tests/neutron-metadata-edpm-compute-global-edpm-compute-global-x5msg" Jan 20 18:00:41 crc kubenswrapper[4558]: I0120 18:00:41.605219 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0eec69f6-ce99-4e2f-b9e4-99b2afa4bb52-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-compute-global-edpm-compute-global-x5msg\" (UID: \"0eec69f6-ce99-4e2f-b9e4-99b2afa4bb52\") " pod="openstack-kuttl-tests/neutron-metadata-edpm-compute-global-edpm-compute-global-x5msg" Jan 20 18:00:41 crc kubenswrapper[4558]: I0120 18:00:41.605801 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/0eec69f6-ce99-4e2f-b9e4-99b2afa4bb52-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-compute-global-edpm-compute-global-x5msg\" (UID: \"0eec69f6-ce99-4e2f-b9e4-99b2afa4bb52\") " pod="openstack-kuttl-tests/neutron-metadata-edpm-compute-global-edpm-compute-global-x5msg" Jan 20 18:00:41 crc kubenswrapper[4558]: I0120 18:00:41.607622 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/0eec69f6-ce99-4e2f-b9e4-99b2afa4bb52-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-compute-global-edpm-compute-global-x5msg\" (UID: \"0eec69f6-ce99-4e2f-b9e4-99b2afa4bb52\") " pod="openstack-kuttl-tests/neutron-metadata-edpm-compute-global-edpm-compute-global-x5msg" Jan 20 18:00:41 crc kubenswrapper[4558]: I0120 18:00:41.609310 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-neutron-config-1\" (UniqueName: \"kubernetes.io/secret/0eec69f6-ce99-4e2f-b9e4-99b2afa4bb52-nova-metadata-neutron-config-1\") pod \"neutron-metadata-edpm-compute-global-edpm-compute-global-x5msg\" (UID: \"0eec69f6-ce99-4e2f-b9e4-99b2afa4bb52\") " pod="openstack-kuttl-tests/neutron-metadata-edpm-compute-global-edpm-compute-global-x5msg" Jan 20 18:00:41 crc kubenswrapper[4558]: I0120 18:00:41.609766 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-neutron-config-2\" (UniqueName: \"kubernetes.io/secret/0eec69f6-ce99-4e2f-b9e4-99b2afa4bb52-nova-metadata-neutron-config-2\") pod \"neutron-metadata-edpm-compute-global-edpm-compute-global-x5msg\" (UID: \"0eec69f6-ce99-4e2f-b9e4-99b2afa4bb52\") " pod="openstack-kuttl-tests/neutron-metadata-edpm-compute-global-edpm-compute-global-x5msg" Jan 20 18:00:41 crc kubenswrapper[4558]: I0120 18:00:41.611779 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0eec69f6-ce99-4e2f-b9e4-99b2afa4bb52-inventory\") pod \"neutron-metadata-edpm-compute-global-edpm-compute-global-x5msg\" (UID: \"0eec69f6-ce99-4e2f-b9e4-99b2afa4bb52\") " pod="openstack-kuttl-tests/neutron-metadata-edpm-compute-global-edpm-compute-global-x5msg" Jan 20 18:00:41 crc kubenswrapper[4558]: I0120 18:00:41.614762 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vps2h\" (UniqueName: \"kubernetes.io/projected/0eec69f6-ce99-4e2f-b9e4-99b2afa4bb52-kube-api-access-vps2h\") pod \"neutron-metadata-edpm-compute-global-edpm-compute-global-x5msg\" (UID: \"0eec69f6-ce99-4e2f-b9e4-99b2afa4bb52\") " pod="openstack-kuttl-tests/neutron-metadata-edpm-compute-global-edpm-compute-global-x5msg" Jan 20 18:00:41 crc kubenswrapper[4558]: I0120 18:00:41.772343 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-metadata-edpm-compute-global-edpm-compute-global-x5msg" Jan 20 18:00:42 crc kubenswrapper[4558]: I0120 18:00:42.158997 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-metadata-edpm-compute-global-edpm-compute-global-x5msg"] Jan 20 18:00:42 crc kubenswrapper[4558]: W0120 18:00:42.166313 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0eec69f6_ce99_4e2f_b9e4_99b2afa4bb52.slice/crio-6f28b22280288c59216d97c522b7766f1c476795e075e2dc55e4d2d03e1099f1 WatchSource:0}: Error finding container 6f28b22280288c59216d97c522b7766f1c476795e075e2dc55e4d2d03e1099f1: Status 404 returned error can't find the container with id 6f28b22280288c59216d97c522b7766f1c476795e075e2dc55e4d2d03e1099f1 Jan 20 18:00:42 crc kubenswrapper[4558]: I0120 18:00:42.408025 4558 generic.go:334] "Generic (PLEG): container finished" podID="104ae5cb-67ef-4dac-af44-7fab7118cae1" containerID="fc78be79b3948d4b2a5893a9d2797be752c714a3794e1fcc699f57389197e3bd" exitCode=0 Jan 20 18:00:42 crc kubenswrapper[4558]: I0120 18:00:42.408138 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xfnvt" event={"ID":"104ae5cb-67ef-4dac-af44-7fab7118cae1","Type":"ContainerDied","Data":"fc78be79b3948d4b2a5893a9d2797be752c714a3794e1fcc699f57389197e3bd"} Jan 20 18:00:42 crc kubenswrapper[4558]: I0120 18:00:42.409857 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-metadata-edpm-compute-global-edpm-compute-global-x5msg" event={"ID":"0eec69f6-ce99-4e2f-b9e4-99b2afa4bb52","Type":"ContainerStarted","Data":"6f28b22280288c59216d97c522b7766f1c476795e075e2dc55e4d2d03e1099f1"} Jan 20 18:00:43 crc kubenswrapper[4558]: I0120 18:00:43.423155 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xfnvt" event={"ID":"104ae5cb-67ef-4dac-af44-7fab7118cae1","Type":"ContainerStarted","Data":"902f7da23862fcd7aa3483c5b431f96f63b48d1eeee42267cb5e48344765f7b2"} Jan 20 18:00:43 crc kubenswrapper[4558]: I0120 18:00:43.426373 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-metadata-edpm-compute-global-edpm-compute-global-x5msg" event={"ID":"0eec69f6-ce99-4e2f-b9e4-99b2afa4bb52","Type":"ContainerStarted","Data":"130c862694f4bf857ee52c143d80ecb5bfd0e0e61fd692aff1924680329ca140"} Jan 20 18:00:43 crc kubenswrapper[4558]: I0120 18:00:43.447086 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-xfnvt" podStartSLOduration=2.939951555 podStartE2EDuration="6.447063708s" podCreationTimestamp="2026-01-20 18:00:37 +0000 UTC" firstStartedPulling="2026-01-20 18:00:39.372104111 +0000 UTC m=+4733.132442077" lastFinishedPulling="2026-01-20 18:00:42.879216263 +0000 UTC m=+4736.639554230" observedRunningTime="2026-01-20 18:00:43.443199705 +0000 UTC m=+4737.203537671" watchObservedRunningTime="2026-01-20 18:00:43.447063708 +0000 UTC m=+4737.207401675" Jan 20 18:00:43 crc kubenswrapper[4558]: I0120 18:00:43.470790 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/neutron-metadata-edpm-compute-global-edpm-compute-global-x5msg" podStartSLOduration=1.988023112 podStartE2EDuration="2.470763928s" podCreationTimestamp="2026-01-20 18:00:41 +0000 UTC" firstStartedPulling="2026-01-20 18:00:42.168829361 +0000 UTC m=+4735.929167329" lastFinishedPulling="2026-01-20 18:00:42.651570178 +0000 UTC m=+4736.411908145" observedRunningTime="2026-01-20 18:00:43.463296772 +0000 UTC m=+4737.223634739" watchObservedRunningTime="2026-01-20 18:00:43.470763928 +0000 UTC m=+4737.231101895" Jan 20 18:00:44 crc kubenswrapper[4558]: I0120 18:00:44.429786 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-v2gb7"] Jan 20 18:00:44 crc kubenswrapper[4558]: I0120 18:00:44.431877 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-v2gb7" Jan 20 18:00:44 crc kubenswrapper[4558]: I0120 18:00:44.438117 4558 generic.go:334] "Generic (PLEG): container finished" podID="0eec69f6-ce99-4e2f-b9e4-99b2afa4bb52" containerID="130c862694f4bf857ee52c143d80ecb5bfd0e0e61fd692aff1924680329ca140" exitCode=0 Jan 20 18:00:44 crc kubenswrapper[4558]: I0120 18:00:44.438197 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-metadata-edpm-compute-global-edpm-compute-global-x5msg" event={"ID":"0eec69f6-ce99-4e2f-b9e4-99b2afa4bb52","Type":"ContainerDied","Data":"130c862694f4bf857ee52c143d80ecb5bfd0e0e61fd692aff1924680329ca140"} Jan 20 18:00:44 crc kubenswrapper[4558]: I0120 18:00:44.443328 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-v2gb7"] Jan 20 18:00:44 crc kubenswrapper[4558]: I0120 18:00:44.448438 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cd2tj\" (UniqueName: \"kubernetes.io/projected/7d3d2009-8b03-4d74-881b-1dd341ead556-kube-api-access-cd2tj\") pod \"certified-operators-v2gb7\" (UID: \"7d3d2009-8b03-4d74-881b-1dd341ead556\") " pod="openshift-marketplace/certified-operators-v2gb7" Jan 20 18:00:44 crc kubenswrapper[4558]: I0120 18:00:44.448532 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7d3d2009-8b03-4d74-881b-1dd341ead556-catalog-content\") pod \"certified-operators-v2gb7\" (UID: \"7d3d2009-8b03-4d74-881b-1dd341ead556\") " pod="openshift-marketplace/certified-operators-v2gb7" Jan 20 18:00:44 crc kubenswrapper[4558]: I0120 18:00:44.448586 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7d3d2009-8b03-4d74-881b-1dd341ead556-utilities\") pod \"certified-operators-v2gb7\" (UID: \"7d3d2009-8b03-4d74-881b-1dd341ead556\") " pod="openshift-marketplace/certified-operators-v2gb7" Jan 20 18:00:44 crc kubenswrapper[4558]: I0120 18:00:44.549492 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cd2tj\" (UniqueName: \"kubernetes.io/projected/7d3d2009-8b03-4d74-881b-1dd341ead556-kube-api-access-cd2tj\") pod \"certified-operators-v2gb7\" (UID: \"7d3d2009-8b03-4d74-881b-1dd341ead556\") " pod="openshift-marketplace/certified-operators-v2gb7" Jan 20 18:00:44 crc kubenswrapper[4558]: I0120 18:00:44.549575 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7d3d2009-8b03-4d74-881b-1dd341ead556-catalog-content\") pod \"certified-operators-v2gb7\" (UID: \"7d3d2009-8b03-4d74-881b-1dd341ead556\") " pod="openshift-marketplace/certified-operators-v2gb7" Jan 20 18:00:44 crc kubenswrapper[4558]: I0120 18:00:44.549629 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7d3d2009-8b03-4d74-881b-1dd341ead556-utilities\") pod \"certified-operators-v2gb7\" (UID: \"7d3d2009-8b03-4d74-881b-1dd341ead556\") " pod="openshift-marketplace/certified-operators-v2gb7" Jan 20 18:00:44 crc kubenswrapper[4558]: I0120 18:00:44.550085 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7d3d2009-8b03-4d74-881b-1dd341ead556-utilities\") pod \"certified-operators-v2gb7\" (UID: \"7d3d2009-8b03-4d74-881b-1dd341ead556\") " pod="openshift-marketplace/certified-operators-v2gb7" Jan 20 18:00:44 crc kubenswrapper[4558]: I0120 18:00:44.550262 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7d3d2009-8b03-4d74-881b-1dd341ead556-catalog-content\") pod \"certified-operators-v2gb7\" (UID: \"7d3d2009-8b03-4d74-881b-1dd341ead556\") " pod="openshift-marketplace/certified-operators-v2gb7" Jan 20 18:00:44 crc kubenswrapper[4558]: I0120 18:00:44.574104 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cd2tj\" (UniqueName: \"kubernetes.io/projected/7d3d2009-8b03-4d74-881b-1dd341ead556-kube-api-access-cd2tj\") pod \"certified-operators-v2gb7\" (UID: \"7d3d2009-8b03-4d74-881b-1dd341ead556\") " pod="openshift-marketplace/certified-operators-v2gb7" Jan 20 18:00:44 crc kubenswrapper[4558]: I0120 18:00:44.747537 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-v2gb7" Jan 20 18:00:45 crc kubenswrapper[4558]: I0120 18:00:45.162892 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-v2gb7"] Jan 20 18:00:45 crc kubenswrapper[4558]: W0120 18:00:45.170328 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7d3d2009_8b03_4d74_881b_1dd341ead556.slice/crio-372176ff0c07f1c9425544d68de680cf186483d65df87ded264593d500e48260 WatchSource:0}: Error finding container 372176ff0c07f1c9425544d68de680cf186483d65df87ded264593d500e48260: Status 404 returned error can't find the container with id 372176ff0c07f1c9425544d68de680cf186483d65df87ded264593d500e48260 Jan 20 18:00:45 crc kubenswrapper[4558]: I0120 18:00:45.451353 4558 generic.go:334] "Generic (PLEG): container finished" podID="7d3d2009-8b03-4d74-881b-1dd341ead556" containerID="1492361460a9146bc204ce6c5fa0b78dabdb4fe1c41d5eea0c85411ff7064cdb" exitCode=0 Jan 20 18:00:45 crc kubenswrapper[4558]: I0120 18:00:45.451464 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-v2gb7" event={"ID":"7d3d2009-8b03-4d74-881b-1dd341ead556","Type":"ContainerDied","Data":"1492361460a9146bc204ce6c5fa0b78dabdb4fe1c41d5eea0c85411ff7064cdb"} Jan 20 18:00:45 crc kubenswrapper[4558]: I0120 18:00:45.451555 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-v2gb7" event={"ID":"7d3d2009-8b03-4d74-881b-1dd341ead556","Type":"ContainerStarted","Data":"372176ff0c07f1c9425544d68de680cf186483d65df87ded264593d500e48260"} Jan 20 18:00:45 crc kubenswrapper[4558]: I0120 18:00:45.731389 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-metadata-edpm-compute-global-edpm-compute-global-x5msg" Jan 20 18:00:45 crc kubenswrapper[4558]: I0120 18:00:45.877713 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-neutron-config-1\" (UniqueName: \"kubernetes.io/secret/0eec69f6-ce99-4e2f-b9e4-99b2afa4bb52-nova-metadata-neutron-config-1\") pod \"0eec69f6-ce99-4e2f-b9e4-99b2afa4bb52\" (UID: \"0eec69f6-ce99-4e2f-b9e4-99b2afa4bb52\") " Jan 20 18:00:45 crc kubenswrapper[4558]: I0120 18:00:45.877875 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/0eec69f6-ce99-4e2f-b9e4-99b2afa4bb52-nova-metadata-neutron-config-0\") pod \"0eec69f6-ce99-4e2f-b9e4-99b2afa4bb52\" (UID: \"0eec69f6-ce99-4e2f-b9e4-99b2afa4bb52\") " Jan 20 18:00:45 crc kubenswrapper[4558]: I0120 18:00:45.877910 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-neutron-config-2\" (UniqueName: \"kubernetes.io/secret/0eec69f6-ce99-4e2f-b9e4-99b2afa4bb52-nova-metadata-neutron-config-2\") pod \"0eec69f6-ce99-4e2f-b9e4-99b2afa4bb52\" (UID: \"0eec69f6-ce99-4e2f-b9e4-99b2afa4bb52\") " Jan 20 18:00:45 crc kubenswrapper[4558]: I0120 18:00:45.877959 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0eec69f6-ce99-4e2f-b9e4-99b2afa4bb52-inventory\") pod \"0eec69f6-ce99-4e2f-b9e4-99b2afa4bb52\" (UID: \"0eec69f6-ce99-4e2f-b9e4-99b2afa4bb52\") " Jan 20 18:00:45 crc kubenswrapper[4558]: I0120 18:00:45.878081 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0eec69f6-ce99-4e2f-b9e4-99b2afa4bb52-neutron-metadata-combined-ca-bundle\") pod \"0eec69f6-ce99-4e2f-b9e4-99b2afa4bb52\" (UID: \"0eec69f6-ce99-4e2f-b9e4-99b2afa4bb52\") " Jan 20 18:00:45 crc kubenswrapper[4558]: I0120 18:00:45.878840 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vps2h\" (UniqueName: \"kubernetes.io/projected/0eec69f6-ce99-4e2f-b9e4-99b2afa4bb52-kube-api-access-vps2h\") pod \"0eec69f6-ce99-4e2f-b9e4-99b2afa4bb52\" (UID: \"0eec69f6-ce99-4e2f-b9e4-99b2afa4bb52\") " Jan 20 18:00:45 crc kubenswrapper[4558]: I0120 18:00:45.878921 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/0eec69f6-ce99-4e2f-b9e4-99b2afa4bb52-ssh-key-edpm-compute-global\") pod \"0eec69f6-ce99-4e2f-b9e4-99b2afa4bb52\" (UID: \"0eec69f6-ce99-4e2f-b9e4-99b2afa4bb52\") " Jan 20 18:00:45 crc kubenswrapper[4558]: I0120 18:00:45.879038 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/0eec69f6-ce99-4e2f-b9e4-99b2afa4bb52-neutron-ovn-metadata-agent-neutron-config-0\") pod \"0eec69f6-ce99-4e2f-b9e4-99b2afa4bb52\" (UID: \"0eec69f6-ce99-4e2f-b9e4-99b2afa4bb52\") " Jan 20 18:00:45 crc kubenswrapper[4558]: I0120 18:00:45.893591 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0eec69f6-ce99-4e2f-b9e4-99b2afa4bb52-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "0eec69f6-ce99-4e2f-b9e4-99b2afa4bb52" (UID: "0eec69f6-ce99-4e2f-b9e4-99b2afa4bb52"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:00:45 crc kubenswrapper[4558]: I0120 18:00:45.893737 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0eec69f6-ce99-4e2f-b9e4-99b2afa4bb52-kube-api-access-vps2h" (OuterVolumeSpecName: "kube-api-access-vps2h") pod "0eec69f6-ce99-4e2f-b9e4-99b2afa4bb52" (UID: "0eec69f6-ce99-4e2f-b9e4-99b2afa4bb52"). InnerVolumeSpecName "kube-api-access-vps2h". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:00:45 crc kubenswrapper[4558]: I0120 18:00:45.901341 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0eec69f6-ce99-4e2f-b9e4-99b2afa4bb52-inventory" (OuterVolumeSpecName: "inventory") pod "0eec69f6-ce99-4e2f-b9e4-99b2afa4bb52" (UID: "0eec69f6-ce99-4e2f-b9e4-99b2afa4bb52"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:00:45 crc kubenswrapper[4558]: I0120 18:00:45.902199 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0eec69f6-ce99-4e2f-b9e4-99b2afa4bb52-nova-metadata-neutron-config-2" (OuterVolumeSpecName: "nova-metadata-neutron-config-2") pod "0eec69f6-ce99-4e2f-b9e4-99b2afa4bb52" (UID: "0eec69f6-ce99-4e2f-b9e4-99b2afa4bb52"). InnerVolumeSpecName "nova-metadata-neutron-config-2". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:00:45 crc kubenswrapper[4558]: I0120 18:00:45.903529 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0eec69f6-ce99-4e2f-b9e4-99b2afa4bb52-nova-metadata-neutron-config-1" (OuterVolumeSpecName: "nova-metadata-neutron-config-1") pod "0eec69f6-ce99-4e2f-b9e4-99b2afa4bb52" (UID: "0eec69f6-ce99-4e2f-b9e4-99b2afa4bb52"). InnerVolumeSpecName "nova-metadata-neutron-config-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:00:45 crc kubenswrapper[4558]: I0120 18:00:45.905716 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0eec69f6-ce99-4e2f-b9e4-99b2afa4bb52-ssh-key-edpm-compute-global" (OuterVolumeSpecName: "ssh-key-edpm-compute-global") pod "0eec69f6-ce99-4e2f-b9e4-99b2afa4bb52" (UID: "0eec69f6-ce99-4e2f-b9e4-99b2afa4bb52"). InnerVolumeSpecName "ssh-key-edpm-compute-global". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:00:45 crc kubenswrapper[4558]: I0120 18:00:45.908286 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0eec69f6-ce99-4e2f-b9e4-99b2afa4bb52-neutron-ovn-metadata-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-ovn-metadata-agent-neutron-config-0") pod "0eec69f6-ce99-4e2f-b9e4-99b2afa4bb52" (UID: "0eec69f6-ce99-4e2f-b9e4-99b2afa4bb52"). InnerVolumeSpecName "neutron-ovn-metadata-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:00:45 crc kubenswrapper[4558]: I0120 18:00:45.911025 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0eec69f6-ce99-4e2f-b9e4-99b2afa4bb52-nova-metadata-neutron-config-0" (OuterVolumeSpecName: "nova-metadata-neutron-config-0") pod "0eec69f6-ce99-4e2f-b9e4-99b2afa4bb52" (UID: "0eec69f6-ce99-4e2f-b9e4-99b2afa4bb52"). InnerVolumeSpecName "nova-metadata-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:00:45 crc kubenswrapper[4558]: I0120 18:00:45.980776 4558 reconciler_common.go:293] "Volume detached for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/0eec69f6-ce99-4e2f-b9e4-99b2afa4bb52-neutron-ovn-metadata-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Jan 20 18:00:45 crc kubenswrapper[4558]: I0120 18:00:45.980807 4558 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-neutron-config-1\" (UniqueName: \"kubernetes.io/secret/0eec69f6-ce99-4e2f-b9e4-99b2afa4bb52-nova-metadata-neutron-config-1\") on node \"crc\" DevicePath \"\"" Jan 20 18:00:45 crc kubenswrapper[4558]: I0120 18:00:45.980818 4558 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/0eec69f6-ce99-4e2f-b9e4-99b2afa4bb52-nova-metadata-neutron-config-0\") on node \"crc\" DevicePath \"\"" Jan 20 18:00:45 crc kubenswrapper[4558]: I0120 18:00:45.980833 4558 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-neutron-config-2\" (UniqueName: \"kubernetes.io/secret/0eec69f6-ce99-4e2f-b9e4-99b2afa4bb52-nova-metadata-neutron-config-2\") on node \"crc\" DevicePath \"\"" Jan 20 18:00:45 crc kubenswrapper[4558]: I0120 18:00:45.980846 4558 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0eec69f6-ce99-4e2f-b9e4-99b2afa4bb52-inventory\") on node \"crc\" DevicePath \"\"" Jan 20 18:00:45 crc kubenswrapper[4558]: I0120 18:00:45.980857 4558 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0eec69f6-ce99-4e2f-b9e4-99b2afa4bb52-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 18:00:45 crc kubenswrapper[4558]: I0120 18:00:45.980870 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vps2h\" (UniqueName: \"kubernetes.io/projected/0eec69f6-ce99-4e2f-b9e4-99b2afa4bb52-kube-api-access-vps2h\") on node \"crc\" DevicePath \"\"" Jan 20 18:00:45 crc kubenswrapper[4558]: I0120 18:00:45.980881 4558 reconciler_common.go:293] "Volume detached for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/0eec69f6-ce99-4e2f-b9e4-99b2afa4bb52-ssh-key-edpm-compute-global\") on node \"crc\" DevicePath \"\"" Jan 20 18:00:46 crc kubenswrapper[4558]: I0120 18:00:46.464046 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-metadata-edpm-compute-global-edpm-compute-global-x5msg" event={"ID":"0eec69f6-ce99-4e2f-b9e4-99b2afa4bb52","Type":"ContainerDied","Data":"6f28b22280288c59216d97c522b7766f1c476795e075e2dc55e4d2d03e1099f1"} Jan 20 18:00:46 crc kubenswrapper[4558]: I0120 18:00:46.464097 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6f28b22280288c59216d97c522b7766f1c476795e075e2dc55e4d2d03e1099f1" Jan 20 18:00:46 crc kubenswrapper[4558]: I0120 18:00:46.464136 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-metadata-edpm-compute-global-edpm-compute-global-x5msg" Jan 20 18:00:46 crc kubenswrapper[4558]: I0120 18:00:46.555627 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/neutron-ovn-edpm-compute-global-edpm-compute-global-xhhjn"] Jan 20 18:00:46 crc kubenswrapper[4558]: E0120 18:00:46.555945 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0eec69f6-ce99-4e2f-b9e4-99b2afa4bb52" containerName="neutron-metadata-edpm-compute-global-edpm-compute-global" Jan 20 18:00:46 crc kubenswrapper[4558]: I0120 18:00:46.555959 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="0eec69f6-ce99-4e2f-b9e4-99b2afa4bb52" containerName="neutron-metadata-edpm-compute-global-edpm-compute-global" Jan 20 18:00:46 crc kubenswrapper[4558]: I0120 18:00:46.556108 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="0eec69f6-ce99-4e2f-b9e4-99b2afa4bb52" containerName="neutron-metadata-edpm-compute-global-edpm-compute-global" Jan 20 18:00:46 crc kubenswrapper[4558]: I0120 18:00:46.556557 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-ovn-edpm-compute-global-edpm-compute-global-xhhjn" Jan 20 18:00:46 crc kubenswrapper[4558]: I0120 18:00:46.558682 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"neutron-ovn-agent-neutron-config" Jan 20 18:00:46 crc kubenswrapper[4558]: I0120 18:00:46.558964 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"edpm-compute-global-dockercfg-wkdxg" Jan 20 18:00:46 crc kubenswrapper[4558]: I0120 18:00:46.558995 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"combined-ca-bundle" Jan 20 18:00:46 crc kubenswrapper[4558]: I0120 18:00:46.559273 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplanenodeset-edpm-compute-global" Jan 20 18:00:46 crc kubenswrapper[4558]: I0120 18:00:46.560533 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-aee-default-env" Jan 20 18:00:46 crc kubenswrapper[4558]: I0120 18:00:46.574792 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplane-ansible-ssh-private-key-secret" Jan 20 18:00:46 crc kubenswrapper[4558]: I0120 18:00:46.575337 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-ovn-edpm-compute-global-edpm-compute-global-xhhjn"] Jan 20 18:00:46 crc kubenswrapper[4558]: I0120 18:00:46.696857 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t4rw7\" (UniqueName: \"kubernetes.io/projected/3447daca-2874-4616-bc72-c3f0838862a7-kube-api-access-t4rw7\") pod \"neutron-ovn-edpm-compute-global-edpm-compute-global-xhhjn\" (UID: \"3447daca-2874-4616-bc72-c3f0838862a7\") " pod="openstack-kuttl-tests/neutron-ovn-edpm-compute-global-edpm-compute-global-xhhjn" Jan 20 18:00:46 crc kubenswrapper[4558]: I0120 18:00:46.696935 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3447daca-2874-4616-bc72-c3f0838862a7-inventory\") pod \"neutron-ovn-edpm-compute-global-edpm-compute-global-xhhjn\" (UID: \"3447daca-2874-4616-bc72-c3f0838862a7\") " pod="openstack-kuttl-tests/neutron-ovn-edpm-compute-global-edpm-compute-global-xhhjn" Jan 20 18:00:46 crc kubenswrapper[4558]: I0120 18:00:46.696990 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3447daca-2874-4616-bc72-c3f0838862a7-neutron-ovn-combined-ca-bundle\") pod \"neutron-ovn-edpm-compute-global-edpm-compute-global-xhhjn\" (UID: \"3447daca-2874-4616-bc72-c3f0838862a7\") " pod="openstack-kuttl-tests/neutron-ovn-edpm-compute-global-edpm-compute-global-xhhjn" Jan 20 18:00:46 crc kubenswrapper[4558]: I0120 18:00:46.697014 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-ovn-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/3447daca-2874-4616-bc72-c3f0838862a7-neutron-ovn-agent-neutron-config-0\") pod \"neutron-ovn-edpm-compute-global-edpm-compute-global-xhhjn\" (UID: \"3447daca-2874-4616-bc72-c3f0838862a7\") " pod="openstack-kuttl-tests/neutron-ovn-edpm-compute-global-edpm-compute-global-xhhjn" Jan 20 18:00:46 crc kubenswrapper[4558]: I0120 18:00:46.697048 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/3447daca-2874-4616-bc72-c3f0838862a7-ssh-key-edpm-compute-global\") pod \"neutron-ovn-edpm-compute-global-edpm-compute-global-xhhjn\" (UID: \"3447daca-2874-4616-bc72-c3f0838862a7\") " pod="openstack-kuttl-tests/neutron-ovn-edpm-compute-global-edpm-compute-global-xhhjn" Jan 20 18:00:46 crc kubenswrapper[4558]: I0120 18:00:46.798796 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t4rw7\" (UniqueName: \"kubernetes.io/projected/3447daca-2874-4616-bc72-c3f0838862a7-kube-api-access-t4rw7\") pod \"neutron-ovn-edpm-compute-global-edpm-compute-global-xhhjn\" (UID: \"3447daca-2874-4616-bc72-c3f0838862a7\") " pod="openstack-kuttl-tests/neutron-ovn-edpm-compute-global-edpm-compute-global-xhhjn" Jan 20 18:00:46 crc kubenswrapper[4558]: I0120 18:00:46.798883 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3447daca-2874-4616-bc72-c3f0838862a7-inventory\") pod \"neutron-ovn-edpm-compute-global-edpm-compute-global-xhhjn\" (UID: \"3447daca-2874-4616-bc72-c3f0838862a7\") " pod="openstack-kuttl-tests/neutron-ovn-edpm-compute-global-edpm-compute-global-xhhjn" Jan 20 18:00:46 crc kubenswrapper[4558]: I0120 18:00:46.798920 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3447daca-2874-4616-bc72-c3f0838862a7-neutron-ovn-combined-ca-bundle\") pod \"neutron-ovn-edpm-compute-global-edpm-compute-global-xhhjn\" (UID: \"3447daca-2874-4616-bc72-c3f0838862a7\") " pod="openstack-kuttl-tests/neutron-ovn-edpm-compute-global-edpm-compute-global-xhhjn" Jan 20 18:00:46 crc kubenswrapper[4558]: I0120 18:00:46.798963 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-ovn-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/3447daca-2874-4616-bc72-c3f0838862a7-neutron-ovn-agent-neutron-config-0\") pod \"neutron-ovn-edpm-compute-global-edpm-compute-global-xhhjn\" (UID: \"3447daca-2874-4616-bc72-c3f0838862a7\") " pod="openstack-kuttl-tests/neutron-ovn-edpm-compute-global-edpm-compute-global-xhhjn" Jan 20 18:00:46 crc kubenswrapper[4558]: I0120 18:00:46.799003 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/3447daca-2874-4616-bc72-c3f0838862a7-ssh-key-edpm-compute-global\") pod \"neutron-ovn-edpm-compute-global-edpm-compute-global-xhhjn\" (UID: \"3447daca-2874-4616-bc72-c3f0838862a7\") " pod="openstack-kuttl-tests/neutron-ovn-edpm-compute-global-edpm-compute-global-xhhjn" Jan 20 18:00:46 crc kubenswrapper[4558]: I0120 18:00:46.801935 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"combined-ca-bundle" Jan 20 18:00:46 crc kubenswrapper[4558]: I0120 18:00:46.802021 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplane-ansible-ssh-private-key-secret" Jan 20 18:00:46 crc kubenswrapper[4558]: I0120 18:00:46.802138 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplanenodeset-edpm-compute-global" Jan 20 18:00:46 crc kubenswrapper[4558]: I0120 18:00:46.804374 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"neutron-ovn-agent-neutron-config" Jan 20 18:00:46 crc kubenswrapper[4558]: I0120 18:00:46.815066 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3447daca-2874-4616-bc72-c3f0838862a7-neutron-ovn-combined-ca-bundle\") pod \"neutron-ovn-edpm-compute-global-edpm-compute-global-xhhjn\" (UID: \"3447daca-2874-4616-bc72-c3f0838862a7\") " pod="openstack-kuttl-tests/neutron-ovn-edpm-compute-global-edpm-compute-global-xhhjn" Jan 20 18:00:46 crc kubenswrapper[4558]: I0120 18:00:46.815074 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3447daca-2874-4616-bc72-c3f0838862a7-inventory\") pod \"neutron-ovn-edpm-compute-global-edpm-compute-global-xhhjn\" (UID: \"3447daca-2874-4616-bc72-c3f0838862a7\") " pod="openstack-kuttl-tests/neutron-ovn-edpm-compute-global-edpm-compute-global-xhhjn" Jan 20 18:00:46 crc kubenswrapper[4558]: I0120 18:00:46.817717 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/3447daca-2874-4616-bc72-c3f0838862a7-ssh-key-edpm-compute-global\") pod \"neutron-ovn-edpm-compute-global-edpm-compute-global-xhhjn\" (UID: \"3447daca-2874-4616-bc72-c3f0838862a7\") " pod="openstack-kuttl-tests/neutron-ovn-edpm-compute-global-edpm-compute-global-xhhjn" Jan 20 18:00:46 crc kubenswrapper[4558]: I0120 18:00:46.818091 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-ovn-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/3447daca-2874-4616-bc72-c3f0838862a7-neutron-ovn-agent-neutron-config-0\") pod \"neutron-ovn-edpm-compute-global-edpm-compute-global-xhhjn\" (UID: \"3447daca-2874-4616-bc72-c3f0838862a7\") " pod="openstack-kuttl-tests/neutron-ovn-edpm-compute-global-edpm-compute-global-xhhjn" Jan 20 18:00:46 crc kubenswrapper[4558]: I0120 18:00:46.818415 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t4rw7\" (UniqueName: \"kubernetes.io/projected/3447daca-2874-4616-bc72-c3f0838862a7-kube-api-access-t4rw7\") pod \"neutron-ovn-edpm-compute-global-edpm-compute-global-xhhjn\" (UID: \"3447daca-2874-4616-bc72-c3f0838862a7\") " pod="openstack-kuttl-tests/neutron-ovn-edpm-compute-global-edpm-compute-global-xhhjn" Jan 20 18:00:46 crc kubenswrapper[4558]: I0120 18:00:46.875113 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"edpm-compute-global-dockercfg-wkdxg" Jan 20 18:00:46 crc kubenswrapper[4558]: I0120 18:00:46.884389 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-ovn-edpm-compute-global-edpm-compute-global-xhhjn" Jan 20 18:00:47 crc kubenswrapper[4558]: I0120 18:00:47.297464 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-ovn-edpm-compute-global-edpm-compute-global-xhhjn"] Jan 20 18:00:47 crc kubenswrapper[4558]: W0120 18:00:47.320878 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3447daca_2874_4616_bc72_c3f0838862a7.slice/crio-db260c440d4367cb9c5b5c549318d047916c0184eb9788f53d3ab1b0acecc63b WatchSource:0}: Error finding container db260c440d4367cb9c5b5c549318d047916c0184eb9788f53d3ab1b0acecc63b: Status 404 returned error can't find the container with id db260c440d4367cb9c5b5c549318d047916c0184eb9788f53d3ab1b0acecc63b Jan 20 18:00:47 crc kubenswrapper[4558]: I0120 18:00:47.479380 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-ovn-edpm-compute-global-edpm-compute-global-xhhjn" event={"ID":"3447daca-2874-4616-bc72-c3f0838862a7","Type":"ContainerStarted","Data":"db260c440d4367cb9c5b5c549318d047916c0184eb9788f53d3ab1b0acecc63b"} Jan 20 18:00:47 crc kubenswrapper[4558]: I0120 18:00:47.971010 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-xfnvt" Jan 20 18:00:47 crc kubenswrapper[4558]: I0120 18:00:47.971258 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-xfnvt" Jan 20 18:00:47 crc kubenswrapper[4558]: I0120 18:00:47.985187 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-aee-default-env" Jan 20 18:00:48 crc kubenswrapper[4558]: I0120 18:00:48.016309 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-xfnvt" Jan 20 18:00:48 crc kubenswrapper[4558]: I0120 18:00:48.506086 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-ovn-edpm-compute-global-edpm-compute-global-xhhjn" event={"ID":"3447daca-2874-4616-bc72-c3f0838862a7","Type":"ContainerStarted","Data":"b1e8d55d144061fe99552080982b3674f1edcf3751fb0f212ec977996a16cba9"} Jan 20 18:00:48 crc kubenswrapper[4558]: I0120 18:00:48.529227 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/neutron-ovn-edpm-compute-global-edpm-compute-global-xhhjn" podStartSLOduration=1.877689534 podStartE2EDuration="2.529202448s" podCreationTimestamp="2026-01-20 18:00:46 +0000 UTC" firstStartedPulling="2026-01-20 18:00:47.330702492 +0000 UTC m=+4741.091040459" lastFinishedPulling="2026-01-20 18:00:47.982215406 +0000 UTC m=+4741.742553373" observedRunningTime="2026-01-20 18:00:48.523704685 +0000 UTC m=+4742.284042651" watchObservedRunningTime="2026-01-20 18:00:48.529202448 +0000 UTC m=+4742.289540415" Jan 20 18:00:48 crc kubenswrapper[4558]: I0120 18:00:48.557241 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-xfnvt" Jan 20 18:00:49 crc kubenswrapper[4558]: I0120 18:00:49.223268 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-xfnvt"] Jan 20 18:00:49 crc kubenswrapper[4558]: I0120 18:00:49.518769 4558 generic.go:334] "Generic (PLEG): container finished" podID="3447daca-2874-4616-bc72-c3f0838862a7" containerID="b1e8d55d144061fe99552080982b3674f1edcf3751fb0f212ec977996a16cba9" exitCode=0 Jan 20 18:00:49 crc kubenswrapper[4558]: I0120 18:00:49.518829 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-ovn-edpm-compute-global-edpm-compute-global-xhhjn" event={"ID":"3447daca-2874-4616-bc72-c3f0838862a7","Type":"ContainerDied","Data":"b1e8d55d144061fe99552080982b3674f1edcf3751fb0f212ec977996a16cba9"} Jan 20 18:00:50 crc kubenswrapper[4558]: I0120 18:00:50.529518 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-xfnvt" podUID="104ae5cb-67ef-4dac-af44-7fab7118cae1" containerName="registry-server" containerID="cri-o://902f7da23862fcd7aa3483c5b431f96f63b48d1eeee42267cb5e48344765f7b2" gracePeriod=2 Jan 20 18:00:51 crc kubenswrapper[4558]: I0120 18:00:51.477611 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-ovn-edpm-compute-global-edpm-compute-global-xhhjn" Jan 20 18:00:51 crc kubenswrapper[4558]: I0120 18:00:51.499302 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/3447daca-2874-4616-bc72-c3f0838862a7-ssh-key-edpm-compute-global\") pod \"3447daca-2874-4616-bc72-c3f0838862a7\" (UID: \"3447daca-2874-4616-bc72-c3f0838862a7\") " Jan 20 18:00:51 crc kubenswrapper[4558]: I0120 18:00:51.499442 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3447daca-2874-4616-bc72-c3f0838862a7-inventory\") pod \"3447daca-2874-4616-bc72-c3f0838862a7\" (UID: \"3447daca-2874-4616-bc72-c3f0838862a7\") " Jan 20 18:00:51 crc kubenswrapper[4558]: I0120 18:00:51.499517 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t4rw7\" (UniqueName: \"kubernetes.io/projected/3447daca-2874-4616-bc72-c3f0838862a7-kube-api-access-t4rw7\") pod \"3447daca-2874-4616-bc72-c3f0838862a7\" (UID: \"3447daca-2874-4616-bc72-c3f0838862a7\") " Jan 20 18:00:51 crc kubenswrapper[4558]: I0120 18:00:51.499790 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3447daca-2874-4616-bc72-c3f0838862a7-neutron-ovn-combined-ca-bundle\") pod \"3447daca-2874-4616-bc72-c3f0838862a7\" (UID: \"3447daca-2874-4616-bc72-c3f0838862a7\") " Jan 20 18:00:51 crc kubenswrapper[4558]: I0120 18:00:51.499818 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-ovn-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/3447daca-2874-4616-bc72-c3f0838862a7-neutron-ovn-agent-neutron-config-0\") pod \"3447daca-2874-4616-bc72-c3f0838862a7\" (UID: \"3447daca-2874-4616-bc72-c3f0838862a7\") " Jan 20 18:00:51 crc kubenswrapper[4558]: I0120 18:00:51.506663 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3447daca-2874-4616-bc72-c3f0838862a7-kube-api-access-t4rw7" (OuterVolumeSpecName: "kube-api-access-t4rw7") pod "3447daca-2874-4616-bc72-c3f0838862a7" (UID: "3447daca-2874-4616-bc72-c3f0838862a7"). InnerVolumeSpecName "kube-api-access-t4rw7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:00:51 crc kubenswrapper[4558]: I0120 18:00:51.516364 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3447daca-2874-4616-bc72-c3f0838862a7-neutron-ovn-combined-ca-bundle" (OuterVolumeSpecName: "neutron-ovn-combined-ca-bundle") pod "3447daca-2874-4616-bc72-c3f0838862a7" (UID: "3447daca-2874-4616-bc72-c3f0838862a7"). InnerVolumeSpecName "neutron-ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:00:51 crc kubenswrapper[4558]: I0120 18:00:51.538200 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3447daca-2874-4616-bc72-c3f0838862a7-neutron-ovn-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-ovn-agent-neutron-config-0") pod "3447daca-2874-4616-bc72-c3f0838862a7" (UID: "3447daca-2874-4616-bc72-c3f0838862a7"). InnerVolumeSpecName "neutron-ovn-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:00:51 crc kubenswrapper[4558]: I0120 18:00:51.539888 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3447daca-2874-4616-bc72-c3f0838862a7-ssh-key-edpm-compute-global" (OuterVolumeSpecName: "ssh-key-edpm-compute-global") pod "3447daca-2874-4616-bc72-c3f0838862a7" (UID: "3447daca-2874-4616-bc72-c3f0838862a7"). InnerVolumeSpecName "ssh-key-edpm-compute-global". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:00:51 crc kubenswrapper[4558]: I0120 18:00:51.543023 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-ovn-edpm-compute-global-edpm-compute-global-xhhjn" event={"ID":"3447daca-2874-4616-bc72-c3f0838862a7","Type":"ContainerDied","Data":"db260c440d4367cb9c5b5c549318d047916c0184eb9788f53d3ab1b0acecc63b"} Jan 20 18:00:51 crc kubenswrapper[4558]: I0120 18:00:51.543065 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="db260c440d4367cb9c5b5c549318d047916c0184eb9788f53d3ab1b0acecc63b" Jan 20 18:00:51 crc kubenswrapper[4558]: I0120 18:00:51.543116 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-ovn-edpm-compute-global-edpm-compute-global-xhhjn" Jan 20 18:00:51 crc kubenswrapper[4558]: I0120 18:00:51.544904 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3447daca-2874-4616-bc72-c3f0838862a7-inventory" (OuterVolumeSpecName: "inventory") pod "3447daca-2874-4616-bc72-c3f0838862a7" (UID: "3447daca-2874-4616-bc72-c3f0838862a7"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:00:51 crc kubenswrapper[4558]: I0120 18:00:51.555206 4558 generic.go:334] "Generic (PLEG): container finished" podID="104ae5cb-67ef-4dac-af44-7fab7118cae1" containerID="902f7da23862fcd7aa3483c5b431f96f63b48d1eeee42267cb5e48344765f7b2" exitCode=0 Jan 20 18:00:51 crc kubenswrapper[4558]: I0120 18:00:51.555253 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xfnvt" event={"ID":"104ae5cb-67ef-4dac-af44-7fab7118cae1","Type":"ContainerDied","Data":"902f7da23862fcd7aa3483c5b431f96f63b48d1eeee42267cb5e48344765f7b2"} Jan 20 18:00:51 crc kubenswrapper[4558]: I0120 18:00:51.587785 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/neutron-sriov-edpm-compute-global-edpm-compute-global-p96c4"] Jan 20 18:00:51 crc kubenswrapper[4558]: E0120 18:00:51.588124 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3447daca-2874-4616-bc72-c3f0838862a7" containerName="neutron-ovn-edpm-compute-global-edpm-compute-global" Jan 20 18:00:51 crc kubenswrapper[4558]: I0120 18:00:51.588139 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="3447daca-2874-4616-bc72-c3f0838862a7" containerName="neutron-ovn-edpm-compute-global-edpm-compute-global" Jan 20 18:00:51 crc kubenswrapper[4558]: I0120 18:00:51.588313 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="3447daca-2874-4616-bc72-c3f0838862a7" containerName="neutron-ovn-edpm-compute-global-edpm-compute-global" Jan 20 18:00:51 crc kubenswrapper[4558]: I0120 18:00:51.589194 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-sriov-edpm-compute-global-edpm-compute-global-p96c4" Jan 20 18:00:51 crc kubenswrapper[4558]: I0120 18:00:51.591373 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"neutron-sriov-agent-neutron-config" Jan 20 18:00:51 crc kubenswrapper[4558]: I0120 18:00:51.608917 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-sriov-edpm-compute-global-edpm-compute-global-p96c4"] Jan 20 18:00:51 crc kubenswrapper[4558]: I0120 18:00:51.609727 4558 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3447daca-2874-4616-bc72-c3f0838862a7-inventory\") on node \"crc\" DevicePath \"\"" Jan 20 18:00:51 crc kubenswrapper[4558]: I0120 18:00:51.609758 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t4rw7\" (UniqueName: \"kubernetes.io/projected/3447daca-2874-4616-bc72-c3f0838862a7-kube-api-access-t4rw7\") on node \"crc\" DevicePath \"\"" Jan 20 18:00:51 crc kubenswrapper[4558]: I0120 18:00:51.609777 4558 reconciler_common.go:293] "Volume detached for volume \"neutron-ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3447daca-2874-4616-bc72-c3f0838862a7-neutron-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 18:00:51 crc kubenswrapper[4558]: I0120 18:00:51.609786 4558 reconciler_common.go:293] "Volume detached for volume \"neutron-ovn-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/3447daca-2874-4616-bc72-c3f0838862a7-neutron-ovn-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Jan 20 18:00:51 crc kubenswrapper[4558]: I0120 18:00:51.609796 4558 reconciler_common.go:293] "Volume detached for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/3447daca-2874-4616-bc72-c3f0838862a7-ssh-key-edpm-compute-global\") on node \"crc\" DevicePath \"\"" Jan 20 18:00:51 crc kubenswrapper[4558]: I0120 18:00:51.670084 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-xfnvt" Jan 20 18:00:51 crc kubenswrapper[4558]: I0120 18:00:51.711084 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/104ae5cb-67ef-4dac-af44-7fab7118cae1-utilities\") pod \"104ae5cb-67ef-4dac-af44-7fab7118cae1\" (UID: \"104ae5cb-67ef-4dac-af44-7fab7118cae1\") " Jan 20 18:00:51 crc kubenswrapper[4558]: I0120 18:00:51.711204 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zxt75\" (UniqueName: \"kubernetes.io/projected/104ae5cb-67ef-4dac-af44-7fab7118cae1-kube-api-access-zxt75\") pod \"104ae5cb-67ef-4dac-af44-7fab7118cae1\" (UID: \"104ae5cb-67ef-4dac-af44-7fab7118cae1\") " Jan 20 18:00:51 crc kubenswrapper[4558]: I0120 18:00:51.711301 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/104ae5cb-67ef-4dac-af44-7fab7118cae1-catalog-content\") pod \"104ae5cb-67ef-4dac-af44-7fab7118cae1\" (UID: \"104ae5cb-67ef-4dac-af44-7fab7118cae1\") " Jan 20 18:00:51 crc kubenswrapper[4558]: I0120 18:00:51.711558 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-sriov-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/fd8020a9-d02c-4ad9-a4c7-e9dd15f1959c-neutron-sriov-agent-neutron-config-0\") pod \"neutron-sriov-edpm-compute-global-edpm-compute-global-p96c4\" (UID: \"fd8020a9-d02c-4ad9-a4c7-e9dd15f1959c\") " pod="openstack-kuttl-tests/neutron-sriov-edpm-compute-global-edpm-compute-global-p96c4" Jan 20 18:00:51 crc kubenswrapper[4558]: I0120 18:00:51.711586 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fd8020a9-d02c-4ad9-a4c7-e9dd15f1959c-inventory\") pod \"neutron-sriov-edpm-compute-global-edpm-compute-global-p96c4\" (UID: \"fd8020a9-d02c-4ad9-a4c7-e9dd15f1959c\") " pod="openstack-kuttl-tests/neutron-sriov-edpm-compute-global-edpm-compute-global-p96c4" Jan 20 18:00:51 crc kubenswrapper[4558]: I0120 18:00:51.711662 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/fd8020a9-d02c-4ad9-a4c7-e9dd15f1959c-ssh-key-edpm-compute-global\") pod \"neutron-sriov-edpm-compute-global-edpm-compute-global-p96c4\" (UID: \"fd8020a9-d02c-4ad9-a4c7-e9dd15f1959c\") " pod="openstack-kuttl-tests/neutron-sriov-edpm-compute-global-edpm-compute-global-p96c4" Jan 20 18:00:51 crc kubenswrapper[4558]: I0120 18:00:51.711704 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd8020a9-d02c-4ad9-a4c7-e9dd15f1959c-neutron-sriov-combined-ca-bundle\") pod \"neutron-sriov-edpm-compute-global-edpm-compute-global-p96c4\" (UID: \"fd8020a9-d02c-4ad9-a4c7-e9dd15f1959c\") " pod="openstack-kuttl-tests/neutron-sriov-edpm-compute-global-edpm-compute-global-p96c4" Jan 20 18:00:51 crc kubenswrapper[4558]: I0120 18:00:51.711729 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7rzdz\" (UniqueName: \"kubernetes.io/projected/fd8020a9-d02c-4ad9-a4c7-e9dd15f1959c-kube-api-access-7rzdz\") pod \"neutron-sriov-edpm-compute-global-edpm-compute-global-p96c4\" (UID: \"fd8020a9-d02c-4ad9-a4c7-e9dd15f1959c\") " pod="openstack-kuttl-tests/neutron-sriov-edpm-compute-global-edpm-compute-global-p96c4" Jan 20 18:00:51 crc kubenswrapper[4558]: I0120 18:00:51.712364 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/104ae5cb-67ef-4dac-af44-7fab7118cae1-utilities" (OuterVolumeSpecName: "utilities") pod "104ae5cb-67ef-4dac-af44-7fab7118cae1" (UID: "104ae5cb-67ef-4dac-af44-7fab7118cae1"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 18:00:51 crc kubenswrapper[4558]: I0120 18:00:51.714192 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/104ae5cb-67ef-4dac-af44-7fab7118cae1-kube-api-access-zxt75" (OuterVolumeSpecName: "kube-api-access-zxt75") pod "104ae5cb-67ef-4dac-af44-7fab7118cae1" (UID: "104ae5cb-67ef-4dac-af44-7fab7118cae1"). InnerVolumeSpecName "kube-api-access-zxt75". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:00:51 crc kubenswrapper[4558]: I0120 18:00:51.813261 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/fd8020a9-d02c-4ad9-a4c7-e9dd15f1959c-ssh-key-edpm-compute-global\") pod \"neutron-sriov-edpm-compute-global-edpm-compute-global-p96c4\" (UID: \"fd8020a9-d02c-4ad9-a4c7-e9dd15f1959c\") " pod="openstack-kuttl-tests/neutron-sriov-edpm-compute-global-edpm-compute-global-p96c4" Jan 20 18:00:51 crc kubenswrapper[4558]: I0120 18:00:51.813395 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd8020a9-d02c-4ad9-a4c7-e9dd15f1959c-neutron-sriov-combined-ca-bundle\") pod \"neutron-sriov-edpm-compute-global-edpm-compute-global-p96c4\" (UID: \"fd8020a9-d02c-4ad9-a4c7-e9dd15f1959c\") " pod="openstack-kuttl-tests/neutron-sriov-edpm-compute-global-edpm-compute-global-p96c4" Jan 20 18:00:51 crc kubenswrapper[4558]: I0120 18:00:51.813461 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7rzdz\" (UniqueName: \"kubernetes.io/projected/fd8020a9-d02c-4ad9-a4c7-e9dd15f1959c-kube-api-access-7rzdz\") pod \"neutron-sriov-edpm-compute-global-edpm-compute-global-p96c4\" (UID: \"fd8020a9-d02c-4ad9-a4c7-e9dd15f1959c\") " pod="openstack-kuttl-tests/neutron-sriov-edpm-compute-global-edpm-compute-global-p96c4" Jan 20 18:00:51 crc kubenswrapper[4558]: I0120 18:00:51.813544 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-sriov-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/fd8020a9-d02c-4ad9-a4c7-e9dd15f1959c-neutron-sriov-agent-neutron-config-0\") pod \"neutron-sriov-edpm-compute-global-edpm-compute-global-p96c4\" (UID: \"fd8020a9-d02c-4ad9-a4c7-e9dd15f1959c\") " pod="openstack-kuttl-tests/neutron-sriov-edpm-compute-global-edpm-compute-global-p96c4" Jan 20 18:00:51 crc kubenswrapper[4558]: I0120 18:00:51.813571 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fd8020a9-d02c-4ad9-a4c7-e9dd15f1959c-inventory\") pod \"neutron-sriov-edpm-compute-global-edpm-compute-global-p96c4\" (UID: \"fd8020a9-d02c-4ad9-a4c7-e9dd15f1959c\") " pod="openstack-kuttl-tests/neutron-sriov-edpm-compute-global-edpm-compute-global-p96c4" Jan 20 18:00:51 crc kubenswrapper[4558]: I0120 18:00:51.813688 4558 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/104ae5cb-67ef-4dac-af44-7fab7118cae1-utilities\") on node \"crc\" DevicePath \"\"" Jan 20 18:00:51 crc kubenswrapper[4558]: I0120 18:00:51.813710 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zxt75\" (UniqueName: \"kubernetes.io/projected/104ae5cb-67ef-4dac-af44-7fab7118cae1-kube-api-access-zxt75\") on node \"crc\" DevicePath \"\"" Jan 20 18:00:51 crc kubenswrapper[4558]: I0120 18:00:51.816428 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd8020a9-d02c-4ad9-a4c7-e9dd15f1959c-neutron-sriov-combined-ca-bundle\") pod \"neutron-sriov-edpm-compute-global-edpm-compute-global-p96c4\" (UID: \"fd8020a9-d02c-4ad9-a4c7-e9dd15f1959c\") " pod="openstack-kuttl-tests/neutron-sriov-edpm-compute-global-edpm-compute-global-p96c4" Jan 20 18:00:51 crc kubenswrapper[4558]: I0120 18:00:51.816428 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fd8020a9-d02c-4ad9-a4c7-e9dd15f1959c-inventory\") pod \"neutron-sriov-edpm-compute-global-edpm-compute-global-p96c4\" (UID: \"fd8020a9-d02c-4ad9-a4c7-e9dd15f1959c\") " pod="openstack-kuttl-tests/neutron-sriov-edpm-compute-global-edpm-compute-global-p96c4" Jan 20 18:00:51 crc kubenswrapper[4558]: I0120 18:00:51.816503 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-sriov-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/fd8020a9-d02c-4ad9-a4c7-e9dd15f1959c-neutron-sriov-agent-neutron-config-0\") pod \"neutron-sriov-edpm-compute-global-edpm-compute-global-p96c4\" (UID: \"fd8020a9-d02c-4ad9-a4c7-e9dd15f1959c\") " pod="openstack-kuttl-tests/neutron-sriov-edpm-compute-global-edpm-compute-global-p96c4" Jan 20 18:00:51 crc kubenswrapper[4558]: I0120 18:00:51.816777 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/fd8020a9-d02c-4ad9-a4c7-e9dd15f1959c-ssh-key-edpm-compute-global\") pod \"neutron-sriov-edpm-compute-global-edpm-compute-global-p96c4\" (UID: \"fd8020a9-d02c-4ad9-a4c7-e9dd15f1959c\") " pod="openstack-kuttl-tests/neutron-sriov-edpm-compute-global-edpm-compute-global-p96c4" Jan 20 18:00:51 crc kubenswrapper[4558]: I0120 18:00:51.826795 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7rzdz\" (UniqueName: \"kubernetes.io/projected/fd8020a9-d02c-4ad9-a4c7-e9dd15f1959c-kube-api-access-7rzdz\") pod \"neutron-sriov-edpm-compute-global-edpm-compute-global-p96c4\" (UID: \"fd8020a9-d02c-4ad9-a4c7-e9dd15f1959c\") " pod="openstack-kuttl-tests/neutron-sriov-edpm-compute-global-edpm-compute-global-p96c4" Jan 20 18:00:51 crc kubenswrapper[4558]: I0120 18:00:51.926230 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-sriov-edpm-compute-global-edpm-compute-global-p96c4" Jan 20 18:00:52 crc kubenswrapper[4558]: I0120 18:00:52.110609 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/104ae5cb-67ef-4dac-af44-7fab7118cae1-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "104ae5cb-67ef-4dac-af44-7fab7118cae1" (UID: "104ae5cb-67ef-4dac-af44-7fab7118cae1"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 18:00:52 crc kubenswrapper[4558]: I0120 18:00:52.118037 4558 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/104ae5cb-67ef-4dac-af44-7fab7118cae1-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 20 18:00:52 crc kubenswrapper[4558]: I0120 18:00:52.317652 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-sriov-edpm-compute-global-edpm-compute-global-p96c4"] Jan 20 18:00:52 crc kubenswrapper[4558]: W0120 18:00:52.321801 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfd8020a9_d02c_4ad9_a4c7_e9dd15f1959c.slice/crio-f81ff0a9242967fb8276fce98dbdfe5dd906d270c9c3f0d82b115a656acbbacd WatchSource:0}: Error finding container f81ff0a9242967fb8276fce98dbdfe5dd906d270c9c3f0d82b115a656acbbacd: Status 404 returned error can't find the container with id f81ff0a9242967fb8276fce98dbdfe5dd906d270c9c3f0d82b115a656acbbacd Jan 20 18:00:52 crc kubenswrapper[4558]: I0120 18:00:52.569260 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-xfnvt" Jan 20 18:00:52 crc kubenswrapper[4558]: I0120 18:00:52.573346 4558 generic.go:334] "Generic (PLEG): container finished" podID="7d3d2009-8b03-4d74-881b-1dd341ead556" containerID="a1a29f1ec18bf5f735539e184e8d55034869d7def92de67b7e93bab907b33650" exitCode=0 Jan 20 18:00:52 crc kubenswrapper[4558]: I0120 18:00:52.578946 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-sriov-edpm-compute-global-edpm-compute-global-p96c4" event={"ID":"fd8020a9-d02c-4ad9-a4c7-e9dd15f1959c","Type":"ContainerStarted","Data":"f81ff0a9242967fb8276fce98dbdfe5dd906d270c9c3f0d82b115a656acbbacd"} Jan 20 18:00:52 crc kubenswrapper[4558]: I0120 18:00:52.578983 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xfnvt" event={"ID":"104ae5cb-67ef-4dac-af44-7fab7118cae1","Type":"ContainerDied","Data":"0ee8d7b40459c5de23b4848c0b335cfd2121dc11c6712d51685eb1e240957a0c"} Jan 20 18:00:52 crc kubenswrapper[4558]: I0120 18:00:52.579005 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-v2gb7" event={"ID":"7d3d2009-8b03-4d74-881b-1dd341ead556","Type":"ContainerDied","Data":"a1a29f1ec18bf5f735539e184e8d55034869d7def92de67b7e93bab907b33650"} Jan 20 18:00:52 crc kubenswrapper[4558]: I0120 18:00:52.579034 4558 scope.go:117] "RemoveContainer" containerID="902f7da23862fcd7aa3483c5b431f96f63b48d1eeee42267cb5e48344765f7b2" Jan 20 18:00:52 crc kubenswrapper[4558]: I0120 18:00:52.608793 4558 scope.go:117] "RemoveContainer" containerID="fc78be79b3948d4b2a5893a9d2797be752c714a3794e1fcc699f57389197e3bd" Jan 20 18:00:52 crc kubenswrapper[4558]: I0120 18:00:52.630555 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-xfnvt"] Jan 20 18:00:52 crc kubenswrapper[4558]: I0120 18:00:52.636427 4558 scope.go:117] "RemoveContainer" containerID="5fce438ff7f02f90d7e395cd5333db3bcf4a616b3bc4f45780931b597975028a" Jan 20 18:00:52 crc kubenswrapper[4558]: I0120 18:00:52.637989 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-xfnvt"] Jan 20 18:00:53 crc kubenswrapper[4558]: I0120 18:00:53.587996 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-v2gb7" event={"ID":"7d3d2009-8b03-4d74-881b-1dd341ead556","Type":"ContainerStarted","Data":"d4aedfe7c4ac09a3251410c9d96bebfdba6c3b90b8229ff26c7781feb57b8f15"} Jan 20 18:00:53 crc kubenswrapper[4558]: I0120 18:00:53.590034 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-sriov-edpm-compute-global-edpm-compute-global-p96c4" event={"ID":"fd8020a9-d02c-4ad9-a4c7-e9dd15f1959c","Type":"ContainerStarted","Data":"f66d5e3b991f3f68fa91c7c17d38fd34980830c10782a4b3d415cb3841f21fde"} Jan 20 18:00:53 crc kubenswrapper[4558]: I0120 18:00:53.608034 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-v2gb7" podStartSLOduration=1.888372119 podStartE2EDuration="9.608011431s" podCreationTimestamp="2026-01-20 18:00:44 +0000 UTC" firstStartedPulling="2026-01-20 18:00:45.45382842 +0000 UTC m=+4739.214166387" lastFinishedPulling="2026-01-20 18:00:53.173467732 +0000 UTC m=+4746.933805699" observedRunningTime="2026-01-20 18:00:53.607460114 +0000 UTC m=+4747.367798081" watchObservedRunningTime="2026-01-20 18:00:53.608011431 +0000 UTC m=+4747.368349398" Jan 20 18:00:53 crc kubenswrapper[4558]: I0120 18:00:53.635836 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/neutron-sriov-edpm-compute-global-edpm-compute-global-p96c4" podStartSLOduration=2.040952569 podStartE2EDuration="2.635809805s" podCreationTimestamp="2026-01-20 18:00:51 +0000 UTC" firstStartedPulling="2026-01-20 18:00:52.325539248 +0000 UTC m=+4746.085877225" lastFinishedPulling="2026-01-20 18:00:52.920396494 +0000 UTC m=+4746.680734461" observedRunningTime="2026-01-20 18:00:53.624766806 +0000 UTC m=+4747.385104763" watchObservedRunningTime="2026-01-20 18:00:53.635809805 +0000 UTC m=+4747.396147772" Jan 20 18:00:54 crc kubenswrapper[4558]: I0120 18:00:54.607221 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="104ae5cb-67ef-4dac-af44-7fab7118cae1" path="/var/lib/kubelet/pods/104ae5cb-67ef-4dac-af44-7fab7118cae1/volumes" Jan 20 18:00:54 crc kubenswrapper[4558]: I0120 18:00:54.617077 4558 generic.go:334] "Generic (PLEG): container finished" podID="fd8020a9-d02c-4ad9-a4c7-e9dd15f1959c" containerID="f66d5e3b991f3f68fa91c7c17d38fd34980830c10782a4b3d415cb3841f21fde" exitCode=0 Jan 20 18:00:54 crc kubenswrapper[4558]: I0120 18:00:54.617152 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-sriov-edpm-compute-global-edpm-compute-global-p96c4" event={"ID":"fd8020a9-d02c-4ad9-a4c7-e9dd15f1959c","Type":"ContainerDied","Data":"f66d5e3b991f3f68fa91c7c17d38fd34980830c10782a4b3d415cb3841f21fde"} Jan 20 18:00:54 crc kubenswrapper[4558]: I0120 18:00:54.748036 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-v2gb7" Jan 20 18:00:54 crc kubenswrapper[4558]: I0120 18:00:54.748286 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-v2gb7" Jan 20 18:00:54 crc kubenswrapper[4558]: I0120 18:00:54.785357 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-v2gb7" Jan 20 18:00:56 crc kubenswrapper[4558]: I0120 18:00:56.146298 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-sriov-edpm-compute-global-edpm-compute-global-p96c4" Jan 20 18:00:56 crc kubenswrapper[4558]: I0120 18:00:56.184306 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7rzdz\" (UniqueName: \"kubernetes.io/projected/fd8020a9-d02c-4ad9-a4c7-e9dd15f1959c-kube-api-access-7rzdz\") pod \"fd8020a9-d02c-4ad9-a4c7-e9dd15f1959c\" (UID: \"fd8020a9-d02c-4ad9-a4c7-e9dd15f1959c\") " Jan 20 18:00:56 crc kubenswrapper[4558]: I0120 18:00:56.184351 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/fd8020a9-d02c-4ad9-a4c7-e9dd15f1959c-ssh-key-edpm-compute-global\") pod \"fd8020a9-d02c-4ad9-a4c7-e9dd15f1959c\" (UID: \"fd8020a9-d02c-4ad9-a4c7-e9dd15f1959c\") " Jan 20 18:00:56 crc kubenswrapper[4558]: I0120 18:00:56.184466 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fd8020a9-d02c-4ad9-a4c7-e9dd15f1959c-inventory\") pod \"fd8020a9-d02c-4ad9-a4c7-e9dd15f1959c\" (UID: \"fd8020a9-d02c-4ad9-a4c7-e9dd15f1959c\") " Jan 20 18:00:56 crc kubenswrapper[4558]: I0120 18:00:56.184490 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd8020a9-d02c-4ad9-a4c7-e9dd15f1959c-neutron-sriov-combined-ca-bundle\") pod \"fd8020a9-d02c-4ad9-a4c7-e9dd15f1959c\" (UID: \"fd8020a9-d02c-4ad9-a4c7-e9dd15f1959c\") " Jan 20 18:00:56 crc kubenswrapper[4558]: I0120 18:00:56.184624 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-sriov-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/fd8020a9-d02c-4ad9-a4c7-e9dd15f1959c-neutron-sriov-agent-neutron-config-0\") pod \"fd8020a9-d02c-4ad9-a4c7-e9dd15f1959c\" (UID: \"fd8020a9-d02c-4ad9-a4c7-e9dd15f1959c\") " Jan 20 18:00:56 crc kubenswrapper[4558]: I0120 18:00:56.189733 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fd8020a9-d02c-4ad9-a4c7-e9dd15f1959c-neutron-sriov-combined-ca-bundle" (OuterVolumeSpecName: "neutron-sriov-combined-ca-bundle") pod "fd8020a9-d02c-4ad9-a4c7-e9dd15f1959c" (UID: "fd8020a9-d02c-4ad9-a4c7-e9dd15f1959c"). InnerVolumeSpecName "neutron-sriov-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:00:56 crc kubenswrapper[4558]: I0120 18:00:56.192008 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fd8020a9-d02c-4ad9-a4c7-e9dd15f1959c-kube-api-access-7rzdz" (OuterVolumeSpecName: "kube-api-access-7rzdz") pod "fd8020a9-d02c-4ad9-a4c7-e9dd15f1959c" (UID: "fd8020a9-d02c-4ad9-a4c7-e9dd15f1959c"). InnerVolumeSpecName "kube-api-access-7rzdz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:00:56 crc kubenswrapper[4558]: I0120 18:00:56.203309 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fd8020a9-d02c-4ad9-a4c7-e9dd15f1959c-ssh-key-edpm-compute-global" (OuterVolumeSpecName: "ssh-key-edpm-compute-global") pod "fd8020a9-d02c-4ad9-a4c7-e9dd15f1959c" (UID: "fd8020a9-d02c-4ad9-a4c7-e9dd15f1959c"). InnerVolumeSpecName "ssh-key-edpm-compute-global". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:00:56 crc kubenswrapper[4558]: I0120 18:00:56.205679 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fd8020a9-d02c-4ad9-a4c7-e9dd15f1959c-neutron-sriov-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-sriov-agent-neutron-config-0") pod "fd8020a9-d02c-4ad9-a4c7-e9dd15f1959c" (UID: "fd8020a9-d02c-4ad9-a4c7-e9dd15f1959c"). InnerVolumeSpecName "neutron-sriov-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:00:56 crc kubenswrapper[4558]: I0120 18:00:56.206028 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fd8020a9-d02c-4ad9-a4c7-e9dd15f1959c-inventory" (OuterVolumeSpecName: "inventory") pod "fd8020a9-d02c-4ad9-a4c7-e9dd15f1959c" (UID: "fd8020a9-d02c-4ad9-a4c7-e9dd15f1959c"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:00:56 crc kubenswrapper[4558]: I0120 18:00:56.285491 4558 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/fd8020a9-d02c-4ad9-a4c7-e9dd15f1959c-inventory\") on node \"crc\" DevicePath \"\"" Jan 20 18:00:56 crc kubenswrapper[4558]: I0120 18:00:56.285525 4558 reconciler_common.go:293] "Volume detached for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fd8020a9-d02c-4ad9-a4c7-e9dd15f1959c-neutron-sriov-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 18:00:56 crc kubenswrapper[4558]: I0120 18:00:56.285537 4558 reconciler_common.go:293] "Volume detached for volume \"neutron-sriov-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/fd8020a9-d02c-4ad9-a4c7-e9dd15f1959c-neutron-sriov-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Jan 20 18:00:56 crc kubenswrapper[4558]: I0120 18:00:56.285552 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7rzdz\" (UniqueName: \"kubernetes.io/projected/fd8020a9-d02c-4ad9-a4c7-e9dd15f1959c-kube-api-access-7rzdz\") on node \"crc\" DevicePath \"\"" Jan 20 18:00:56 crc kubenswrapper[4558]: I0120 18:00:56.285563 4558 reconciler_common.go:293] "Volume detached for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/fd8020a9-d02c-4ad9-a4c7-e9dd15f1959c-ssh-key-edpm-compute-global\") on node \"crc\" DevicePath \"\"" Jan 20 18:00:56 crc kubenswrapper[4558]: I0120 18:00:56.636251 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-sriov-edpm-compute-global-edpm-compute-global-p96c4" event={"ID":"fd8020a9-d02c-4ad9-a4c7-e9dd15f1959c","Type":"ContainerDied","Data":"f81ff0a9242967fb8276fce98dbdfe5dd906d270c9c3f0d82b115a656acbbacd"} Jan 20 18:00:56 crc kubenswrapper[4558]: I0120 18:00:56.636445 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f81ff0a9242967fb8276fce98dbdfe5dd906d270c9c3f0d82b115a656acbbacd" Jan 20 18:00:56 crc kubenswrapper[4558]: I0120 18:00:56.636303 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-sriov-edpm-compute-global-edpm-compute-global-p96c4" Jan 20 18:00:56 crc kubenswrapper[4558]: I0120 18:00:56.698575 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/neutron-dhcp-edpm-compute-global-edpm-compute-global-b8h4h"] Jan 20 18:00:56 crc kubenswrapper[4558]: E0120 18:00:56.699110 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="104ae5cb-67ef-4dac-af44-7fab7118cae1" containerName="extract-utilities" Jan 20 18:00:56 crc kubenswrapper[4558]: I0120 18:00:56.699137 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="104ae5cb-67ef-4dac-af44-7fab7118cae1" containerName="extract-utilities" Jan 20 18:00:56 crc kubenswrapper[4558]: E0120 18:00:56.699184 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fd8020a9-d02c-4ad9-a4c7-e9dd15f1959c" containerName="neutron-sriov-edpm-compute-global-edpm-compute-global" Jan 20 18:00:56 crc kubenswrapper[4558]: I0120 18:00:56.699193 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="fd8020a9-d02c-4ad9-a4c7-e9dd15f1959c" containerName="neutron-sriov-edpm-compute-global-edpm-compute-global" Jan 20 18:00:56 crc kubenswrapper[4558]: E0120 18:00:56.699218 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="104ae5cb-67ef-4dac-af44-7fab7118cae1" containerName="registry-server" Jan 20 18:00:56 crc kubenswrapper[4558]: I0120 18:00:56.699225 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="104ae5cb-67ef-4dac-af44-7fab7118cae1" containerName="registry-server" Jan 20 18:00:56 crc kubenswrapper[4558]: E0120 18:00:56.699249 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="104ae5cb-67ef-4dac-af44-7fab7118cae1" containerName="extract-content" Jan 20 18:00:56 crc kubenswrapper[4558]: I0120 18:00:56.699259 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="104ae5cb-67ef-4dac-af44-7fab7118cae1" containerName="extract-content" Jan 20 18:00:56 crc kubenswrapper[4558]: I0120 18:00:56.699498 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="104ae5cb-67ef-4dac-af44-7fab7118cae1" containerName="registry-server" Jan 20 18:00:56 crc kubenswrapper[4558]: I0120 18:00:56.699528 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="fd8020a9-d02c-4ad9-a4c7-e9dd15f1959c" containerName="neutron-sriov-edpm-compute-global-edpm-compute-global" Jan 20 18:00:56 crc kubenswrapper[4558]: I0120 18:00:56.700353 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-dhcp-edpm-compute-global-edpm-compute-global-b8h4h" Jan 20 18:00:56 crc kubenswrapper[4558]: I0120 18:00:56.703294 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplanenodeset-edpm-compute-global" Jan 20 18:00:56 crc kubenswrapper[4558]: I0120 18:00:56.703333 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"neutron-dhcp-agent-neutron-config" Jan 20 18:00:56 crc kubenswrapper[4558]: I0120 18:00:56.703580 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-aee-default-env" Jan 20 18:00:56 crc kubenswrapper[4558]: I0120 18:00:56.703896 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplane-ansible-ssh-private-key-secret" Jan 20 18:00:56 crc kubenswrapper[4558]: I0120 18:00:56.704287 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"combined-ca-bundle" Jan 20 18:00:56 crc kubenswrapper[4558]: I0120 18:00:56.704491 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"edpm-compute-global-dockercfg-wkdxg" Jan 20 18:00:56 crc kubenswrapper[4558]: I0120 18:00:56.709754 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-dhcp-edpm-compute-global-edpm-compute-global-b8h4h"] Jan 20 18:00:56 crc kubenswrapper[4558]: I0120 18:00:56.807754 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/69e764c8-5789-4cb0-842f-1760cbc84701-inventory\") pod \"neutron-dhcp-edpm-compute-global-edpm-compute-global-b8h4h\" (UID: \"69e764c8-5789-4cb0-842f-1760cbc84701\") " pod="openstack-kuttl-tests/neutron-dhcp-edpm-compute-global-edpm-compute-global-b8h4h" Jan 20 18:00:56 crc kubenswrapper[4558]: I0120 18:00:56.807930 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-dhcp-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/69e764c8-5789-4cb0-842f-1760cbc84701-neutron-dhcp-agent-neutron-config-0\") pod \"neutron-dhcp-edpm-compute-global-edpm-compute-global-b8h4h\" (UID: \"69e764c8-5789-4cb0-842f-1760cbc84701\") " pod="openstack-kuttl-tests/neutron-dhcp-edpm-compute-global-edpm-compute-global-b8h4h" Jan 20 18:00:56 crc kubenswrapper[4558]: I0120 18:00:56.808358 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p5qrf\" (UniqueName: \"kubernetes.io/projected/69e764c8-5789-4cb0-842f-1760cbc84701-kube-api-access-p5qrf\") pod \"neutron-dhcp-edpm-compute-global-edpm-compute-global-b8h4h\" (UID: \"69e764c8-5789-4cb0-842f-1760cbc84701\") " pod="openstack-kuttl-tests/neutron-dhcp-edpm-compute-global-edpm-compute-global-b8h4h" Jan 20 18:00:56 crc kubenswrapper[4558]: I0120 18:00:56.808412 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/69e764c8-5789-4cb0-842f-1760cbc84701-neutron-dhcp-combined-ca-bundle\") pod \"neutron-dhcp-edpm-compute-global-edpm-compute-global-b8h4h\" (UID: \"69e764c8-5789-4cb0-842f-1760cbc84701\") " pod="openstack-kuttl-tests/neutron-dhcp-edpm-compute-global-edpm-compute-global-b8h4h" Jan 20 18:00:56 crc kubenswrapper[4558]: I0120 18:00:56.808751 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/69e764c8-5789-4cb0-842f-1760cbc84701-ssh-key-edpm-compute-global\") pod \"neutron-dhcp-edpm-compute-global-edpm-compute-global-b8h4h\" (UID: \"69e764c8-5789-4cb0-842f-1760cbc84701\") " pod="openstack-kuttl-tests/neutron-dhcp-edpm-compute-global-edpm-compute-global-b8h4h" Jan 20 18:00:56 crc kubenswrapper[4558]: I0120 18:00:56.911116 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/69e764c8-5789-4cb0-842f-1760cbc84701-inventory\") pod \"neutron-dhcp-edpm-compute-global-edpm-compute-global-b8h4h\" (UID: \"69e764c8-5789-4cb0-842f-1760cbc84701\") " pod="openstack-kuttl-tests/neutron-dhcp-edpm-compute-global-edpm-compute-global-b8h4h" Jan 20 18:00:56 crc kubenswrapper[4558]: I0120 18:00:56.911437 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-dhcp-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/69e764c8-5789-4cb0-842f-1760cbc84701-neutron-dhcp-agent-neutron-config-0\") pod \"neutron-dhcp-edpm-compute-global-edpm-compute-global-b8h4h\" (UID: \"69e764c8-5789-4cb0-842f-1760cbc84701\") " pod="openstack-kuttl-tests/neutron-dhcp-edpm-compute-global-edpm-compute-global-b8h4h" Jan 20 18:00:56 crc kubenswrapper[4558]: I0120 18:00:56.911498 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p5qrf\" (UniqueName: \"kubernetes.io/projected/69e764c8-5789-4cb0-842f-1760cbc84701-kube-api-access-p5qrf\") pod \"neutron-dhcp-edpm-compute-global-edpm-compute-global-b8h4h\" (UID: \"69e764c8-5789-4cb0-842f-1760cbc84701\") " pod="openstack-kuttl-tests/neutron-dhcp-edpm-compute-global-edpm-compute-global-b8h4h" Jan 20 18:00:56 crc kubenswrapper[4558]: I0120 18:00:56.911541 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/69e764c8-5789-4cb0-842f-1760cbc84701-neutron-dhcp-combined-ca-bundle\") pod \"neutron-dhcp-edpm-compute-global-edpm-compute-global-b8h4h\" (UID: \"69e764c8-5789-4cb0-842f-1760cbc84701\") " pod="openstack-kuttl-tests/neutron-dhcp-edpm-compute-global-edpm-compute-global-b8h4h" Jan 20 18:00:56 crc kubenswrapper[4558]: I0120 18:00:56.911632 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/69e764c8-5789-4cb0-842f-1760cbc84701-ssh-key-edpm-compute-global\") pod \"neutron-dhcp-edpm-compute-global-edpm-compute-global-b8h4h\" (UID: \"69e764c8-5789-4cb0-842f-1760cbc84701\") " pod="openstack-kuttl-tests/neutron-dhcp-edpm-compute-global-edpm-compute-global-b8h4h" Jan 20 18:00:56 crc kubenswrapper[4558]: I0120 18:00:56.915331 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/69e764c8-5789-4cb0-842f-1760cbc84701-inventory\") pod \"neutron-dhcp-edpm-compute-global-edpm-compute-global-b8h4h\" (UID: \"69e764c8-5789-4cb0-842f-1760cbc84701\") " pod="openstack-kuttl-tests/neutron-dhcp-edpm-compute-global-edpm-compute-global-b8h4h" Jan 20 18:00:56 crc kubenswrapper[4558]: I0120 18:00:56.915897 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/69e764c8-5789-4cb0-842f-1760cbc84701-ssh-key-edpm-compute-global\") pod \"neutron-dhcp-edpm-compute-global-edpm-compute-global-b8h4h\" (UID: \"69e764c8-5789-4cb0-842f-1760cbc84701\") " pod="openstack-kuttl-tests/neutron-dhcp-edpm-compute-global-edpm-compute-global-b8h4h" Jan 20 18:00:56 crc kubenswrapper[4558]: I0120 18:00:56.919701 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-dhcp-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/69e764c8-5789-4cb0-842f-1760cbc84701-neutron-dhcp-agent-neutron-config-0\") pod \"neutron-dhcp-edpm-compute-global-edpm-compute-global-b8h4h\" (UID: \"69e764c8-5789-4cb0-842f-1760cbc84701\") " pod="openstack-kuttl-tests/neutron-dhcp-edpm-compute-global-edpm-compute-global-b8h4h" Jan 20 18:00:56 crc kubenswrapper[4558]: I0120 18:00:56.927764 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/69e764c8-5789-4cb0-842f-1760cbc84701-neutron-dhcp-combined-ca-bundle\") pod \"neutron-dhcp-edpm-compute-global-edpm-compute-global-b8h4h\" (UID: \"69e764c8-5789-4cb0-842f-1760cbc84701\") " pod="openstack-kuttl-tests/neutron-dhcp-edpm-compute-global-edpm-compute-global-b8h4h" Jan 20 18:00:56 crc kubenswrapper[4558]: I0120 18:00:56.929200 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p5qrf\" (UniqueName: \"kubernetes.io/projected/69e764c8-5789-4cb0-842f-1760cbc84701-kube-api-access-p5qrf\") pod \"neutron-dhcp-edpm-compute-global-edpm-compute-global-b8h4h\" (UID: \"69e764c8-5789-4cb0-842f-1760cbc84701\") " pod="openstack-kuttl-tests/neutron-dhcp-edpm-compute-global-edpm-compute-global-b8h4h" Jan 20 18:00:57 crc kubenswrapper[4558]: I0120 18:00:57.016635 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-dhcp-edpm-compute-global-edpm-compute-global-b8h4h" Jan 20 18:00:57 crc kubenswrapper[4558]: I0120 18:00:57.403462 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-dhcp-edpm-compute-global-edpm-compute-global-b8h4h"] Jan 20 18:00:57 crc kubenswrapper[4558]: W0120 18:00:57.498070 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod69e764c8_5789_4cb0_842f_1760cbc84701.slice/crio-848e91f9c7840e6944960752a40c1076eb3c2b3f3b2227e5110e31808c99d0f4 WatchSource:0}: Error finding container 848e91f9c7840e6944960752a40c1076eb3c2b3f3b2227e5110e31808c99d0f4: Status 404 returned error can't find the container with id 848e91f9c7840e6944960752a40c1076eb3c2b3f3b2227e5110e31808c99d0f4 Jan 20 18:00:57 crc kubenswrapper[4558]: I0120 18:00:57.646547 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-dhcp-edpm-compute-global-edpm-compute-global-b8h4h" event={"ID":"69e764c8-5789-4cb0-842f-1760cbc84701","Type":"ContainerStarted","Data":"848e91f9c7840e6944960752a40c1076eb3c2b3f3b2227e5110e31808c99d0f4"} Jan 20 18:00:58 crc kubenswrapper[4558]: I0120 18:00:58.657623 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-dhcp-edpm-compute-global-edpm-compute-global-b8h4h" event={"ID":"69e764c8-5789-4cb0-842f-1760cbc84701","Type":"ContainerStarted","Data":"8b87b8214b3bb5ab1c7a2b632f239b5c65e546850d535e6893d19fc3331a702a"} Jan 20 18:00:58 crc kubenswrapper[4558]: I0120 18:00:58.682877 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/neutron-dhcp-edpm-compute-global-edpm-compute-global-b8h4h" podStartSLOduration=2.196333495 podStartE2EDuration="2.682854938s" podCreationTimestamp="2026-01-20 18:00:56 +0000 UTC" firstStartedPulling="2026-01-20 18:00:57.500755641 +0000 UTC m=+4751.261093608" lastFinishedPulling="2026-01-20 18:00:57.987277084 +0000 UTC m=+4751.747615051" observedRunningTime="2026-01-20 18:00:58.672365411 +0000 UTC m=+4752.432703379" watchObservedRunningTime="2026-01-20 18:00:58.682854938 +0000 UTC m=+4752.443192905" Jan 20 18:00:59 crc kubenswrapper[4558]: I0120 18:00:59.669734 4558 generic.go:334] "Generic (PLEG): container finished" podID="69e764c8-5789-4cb0-842f-1760cbc84701" containerID="8b87b8214b3bb5ab1c7a2b632f239b5c65e546850d535e6893d19fc3331a702a" exitCode=0 Jan 20 18:00:59 crc kubenswrapper[4558]: I0120 18:00:59.669791 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-dhcp-edpm-compute-global-edpm-compute-global-b8h4h" event={"ID":"69e764c8-5789-4cb0-842f-1760cbc84701","Type":"ContainerDied","Data":"8b87b8214b3bb5ab1c7a2b632f239b5c65e546850d535e6893d19fc3331a702a"} Jan 20 18:01:00 crc kubenswrapper[4558]: I0120 18:01:00.933208 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-dhcp-edpm-compute-global-edpm-compute-global-b8h4h" Jan 20 18:01:00 crc kubenswrapper[4558]: I0120 18:01:00.973792 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-dhcp-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/69e764c8-5789-4cb0-842f-1760cbc84701-neutron-dhcp-agent-neutron-config-0\") pod \"69e764c8-5789-4cb0-842f-1760cbc84701\" (UID: \"69e764c8-5789-4cb0-842f-1760cbc84701\") " Jan 20 18:01:00 crc kubenswrapper[4558]: I0120 18:01:00.973862 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/69e764c8-5789-4cb0-842f-1760cbc84701-inventory\") pod \"69e764c8-5789-4cb0-842f-1760cbc84701\" (UID: \"69e764c8-5789-4cb0-842f-1760cbc84701\") " Jan 20 18:01:00 crc kubenswrapper[4558]: I0120 18:01:00.973933 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/69e764c8-5789-4cb0-842f-1760cbc84701-neutron-dhcp-combined-ca-bundle\") pod \"69e764c8-5789-4cb0-842f-1760cbc84701\" (UID: \"69e764c8-5789-4cb0-842f-1760cbc84701\") " Jan 20 18:01:00 crc kubenswrapper[4558]: I0120 18:01:00.974392 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/69e764c8-5789-4cb0-842f-1760cbc84701-ssh-key-edpm-compute-global\") pod \"69e764c8-5789-4cb0-842f-1760cbc84701\" (UID: \"69e764c8-5789-4cb0-842f-1760cbc84701\") " Jan 20 18:01:00 crc kubenswrapper[4558]: I0120 18:01:00.974461 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p5qrf\" (UniqueName: \"kubernetes.io/projected/69e764c8-5789-4cb0-842f-1760cbc84701-kube-api-access-p5qrf\") pod \"69e764c8-5789-4cb0-842f-1760cbc84701\" (UID: \"69e764c8-5789-4cb0-842f-1760cbc84701\") " Jan 20 18:01:00 crc kubenswrapper[4558]: I0120 18:01:00.980181 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/69e764c8-5789-4cb0-842f-1760cbc84701-neutron-dhcp-combined-ca-bundle" (OuterVolumeSpecName: "neutron-dhcp-combined-ca-bundle") pod "69e764c8-5789-4cb0-842f-1760cbc84701" (UID: "69e764c8-5789-4cb0-842f-1760cbc84701"). InnerVolumeSpecName "neutron-dhcp-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:01:00 crc kubenswrapper[4558]: I0120 18:01:00.982769 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/69e764c8-5789-4cb0-842f-1760cbc84701-kube-api-access-p5qrf" (OuterVolumeSpecName: "kube-api-access-p5qrf") pod "69e764c8-5789-4cb0-842f-1760cbc84701" (UID: "69e764c8-5789-4cb0-842f-1760cbc84701"). InnerVolumeSpecName "kube-api-access-p5qrf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:01:00 crc kubenswrapper[4558]: I0120 18:01:00.994659 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/69e764c8-5789-4cb0-842f-1760cbc84701-ssh-key-edpm-compute-global" (OuterVolumeSpecName: "ssh-key-edpm-compute-global") pod "69e764c8-5789-4cb0-842f-1760cbc84701" (UID: "69e764c8-5789-4cb0-842f-1760cbc84701"). InnerVolumeSpecName "ssh-key-edpm-compute-global". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:01:00 crc kubenswrapper[4558]: I0120 18:01:00.994905 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/69e764c8-5789-4cb0-842f-1760cbc84701-neutron-dhcp-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-dhcp-agent-neutron-config-0") pod "69e764c8-5789-4cb0-842f-1760cbc84701" (UID: "69e764c8-5789-4cb0-842f-1760cbc84701"). InnerVolumeSpecName "neutron-dhcp-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:01:00 crc kubenswrapper[4558]: I0120 18:01:00.995487 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/69e764c8-5789-4cb0-842f-1760cbc84701-inventory" (OuterVolumeSpecName: "inventory") pod "69e764c8-5789-4cb0-842f-1760cbc84701" (UID: "69e764c8-5789-4cb0-842f-1760cbc84701"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:01:01 crc kubenswrapper[4558]: I0120 18:01:01.076928 4558 reconciler_common.go:293] "Volume detached for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/69e764c8-5789-4cb0-842f-1760cbc84701-ssh-key-edpm-compute-global\") on node \"crc\" DevicePath \"\"" Jan 20 18:01:01 crc kubenswrapper[4558]: I0120 18:01:01.076962 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p5qrf\" (UniqueName: \"kubernetes.io/projected/69e764c8-5789-4cb0-842f-1760cbc84701-kube-api-access-p5qrf\") on node \"crc\" DevicePath \"\"" Jan 20 18:01:01 crc kubenswrapper[4558]: I0120 18:01:01.076973 4558 reconciler_common.go:293] "Volume detached for volume \"neutron-dhcp-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/69e764c8-5789-4cb0-842f-1760cbc84701-neutron-dhcp-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Jan 20 18:01:01 crc kubenswrapper[4558]: I0120 18:01:01.076989 4558 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/69e764c8-5789-4cb0-842f-1760cbc84701-inventory\") on node \"crc\" DevicePath \"\"" Jan 20 18:01:01 crc kubenswrapper[4558]: I0120 18:01:01.077001 4558 reconciler_common.go:293] "Volume detached for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/69e764c8-5789-4cb0-842f-1760cbc84701-neutron-dhcp-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 18:01:01 crc kubenswrapper[4558]: I0120 18:01:01.692445 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-dhcp-edpm-compute-global-edpm-compute-global-b8h4h" event={"ID":"69e764c8-5789-4cb0-842f-1760cbc84701","Type":"ContainerDied","Data":"848e91f9c7840e6944960752a40c1076eb3c2b3f3b2227e5110e31808c99d0f4"} Jan 20 18:01:01 crc kubenswrapper[4558]: I0120 18:01:01.692491 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-dhcp-edpm-compute-global-edpm-compute-global-b8h4h" Jan 20 18:01:01 crc kubenswrapper[4558]: I0120 18:01:01.692502 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="848e91f9c7840e6944960752a40c1076eb3c2b3f3b2227e5110e31808c99d0f4" Jan 20 18:01:01 crc kubenswrapper[4558]: I0120 18:01:01.740148 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/libvirt-edpm-compute-global-edpm-compute-global-r2b84"] Jan 20 18:01:01 crc kubenswrapper[4558]: E0120 18:01:01.740523 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="69e764c8-5789-4cb0-842f-1760cbc84701" containerName="neutron-dhcp-edpm-compute-global-edpm-compute-global" Jan 20 18:01:01 crc kubenswrapper[4558]: I0120 18:01:01.740545 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="69e764c8-5789-4cb0-842f-1760cbc84701" containerName="neutron-dhcp-edpm-compute-global-edpm-compute-global" Jan 20 18:01:01 crc kubenswrapper[4558]: I0120 18:01:01.740694 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="69e764c8-5789-4cb0-842f-1760cbc84701" containerName="neutron-dhcp-edpm-compute-global-edpm-compute-global" Jan 20 18:01:01 crc kubenswrapper[4558]: I0120 18:01:01.741236 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/libvirt-edpm-compute-global-edpm-compute-global-r2b84" Jan 20 18:01:01 crc kubenswrapper[4558]: I0120 18:01:01.743512 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"edpm-compute-global-dockercfg-wkdxg" Jan 20 18:01:01 crc kubenswrapper[4558]: I0120 18:01:01.743735 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"libvirt-secret" Jan 20 18:01:01 crc kubenswrapper[4558]: I0120 18:01:01.743888 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplanenodeset-edpm-compute-global" Jan 20 18:01:01 crc kubenswrapper[4558]: I0120 18:01:01.744028 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"combined-ca-bundle" Jan 20 18:01:01 crc kubenswrapper[4558]: I0120 18:01:01.744259 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-aee-default-env" Jan 20 18:01:01 crc kubenswrapper[4558]: I0120 18:01:01.744470 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplane-ansible-ssh-private-key-secret" Jan 20 18:01:01 crc kubenswrapper[4558]: I0120 18:01:01.754326 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/libvirt-edpm-compute-global-edpm-compute-global-r2b84"] Jan 20 18:01:01 crc kubenswrapper[4558]: I0120 18:01:01.787651 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/45750bec-72b3-45e9-9a13-14b1a3e409a3-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-compute-global-edpm-compute-global-r2b84\" (UID: \"45750bec-72b3-45e9-9a13-14b1a3e409a3\") " pod="openstack-kuttl-tests/libvirt-edpm-compute-global-edpm-compute-global-r2b84" Jan 20 18:01:01 crc kubenswrapper[4558]: I0120 18:01:01.787690 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/45750bec-72b3-45e9-9a13-14b1a3e409a3-libvirt-secret-0\") pod \"libvirt-edpm-compute-global-edpm-compute-global-r2b84\" (UID: \"45750bec-72b3-45e9-9a13-14b1a3e409a3\") " pod="openstack-kuttl-tests/libvirt-edpm-compute-global-edpm-compute-global-r2b84" Jan 20 18:01:01 crc kubenswrapper[4558]: I0120 18:01:01.787843 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/45750bec-72b3-45e9-9a13-14b1a3e409a3-ssh-key-edpm-compute-global\") pod \"libvirt-edpm-compute-global-edpm-compute-global-r2b84\" (UID: \"45750bec-72b3-45e9-9a13-14b1a3e409a3\") " pod="openstack-kuttl-tests/libvirt-edpm-compute-global-edpm-compute-global-r2b84" Jan 20 18:01:01 crc kubenswrapper[4558]: I0120 18:01:01.787897 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jzmfb\" (UniqueName: \"kubernetes.io/projected/45750bec-72b3-45e9-9a13-14b1a3e409a3-kube-api-access-jzmfb\") pod \"libvirt-edpm-compute-global-edpm-compute-global-r2b84\" (UID: \"45750bec-72b3-45e9-9a13-14b1a3e409a3\") " pod="openstack-kuttl-tests/libvirt-edpm-compute-global-edpm-compute-global-r2b84" Jan 20 18:01:01 crc kubenswrapper[4558]: I0120 18:01:01.788049 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/45750bec-72b3-45e9-9a13-14b1a3e409a3-inventory\") pod \"libvirt-edpm-compute-global-edpm-compute-global-r2b84\" (UID: \"45750bec-72b3-45e9-9a13-14b1a3e409a3\") " pod="openstack-kuttl-tests/libvirt-edpm-compute-global-edpm-compute-global-r2b84" Jan 20 18:01:01 crc kubenswrapper[4558]: I0120 18:01:01.889929 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/45750bec-72b3-45e9-9a13-14b1a3e409a3-inventory\") pod \"libvirt-edpm-compute-global-edpm-compute-global-r2b84\" (UID: \"45750bec-72b3-45e9-9a13-14b1a3e409a3\") " pod="openstack-kuttl-tests/libvirt-edpm-compute-global-edpm-compute-global-r2b84" Jan 20 18:01:01 crc kubenswrapper[4558]: I0120 18:01:01.890026 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/45750bec-72b3-45e9-9a13-14b1a3e409a3-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-compute-global-edpm-compute-global-r2b84\" (UID: \"45750bec-72b3-45e9-9a13-14b1a3e409a3\") " pod="openstack-kuttl-tests/libvirt-edpm-compute-global-edpm-compute-global-r2b84" Jan 20 18:01:01 crc kubenswrapper[4558]: I0120 18:01:01.890054 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/45750bec-72b3-45e9-9a13-14b1a3e409a3-libvirt-secret-0\") pod \"libvirt-edpm-compute-global-edpm-compute-global-r2b84\" (UID: \"45750bec-72b3-45e9-9a13-14b1a3e409a3\") " pod="openstack-kuttl-tests/libvirt-edpm-compute-global-edpm-compute-global-r2b84" Jan 20 18:01:01 crc kubenswrapper[4558]: I0120 18:01:01.891098 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/45750bec-72b3-45e9-9a13-14b1a3e409a3-ssh-key-edpm-compute-global\") pod \"libvirt-edpm-compute-global-edpm-compute-global-r2b84\" (UID: \"45750bec-72b3-45e9-9a13-14b1a3e409a3\") " pod="openstack-kuttl-tests/libvirt-edpm-compute-global-edpm-compute-global-r2b84" Jan 20 18:01:01 crc kubenswrapper[4558]: I0120 18:01:01.891147 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jzmfb\" (UniqueName: \"kubernetes.io/projected/45750bec-72b3-45e9-9a13-14b1a3e409a3-kube-api-access-jzmfb\") pod \"libvirt-edpm-compute-global-edpm-compute-global-r2b84\" (UID: \"45750bec-72b3-45e9-9a13-14b1a3e409a3\") " pod="openstack-kuttl-tests/libvirt-edpm-compute-global-edpm-compute-global-r2b84" Jan 20 18:01:01 crc kubenswrapper[4558]: I0120 18:01:01.894825 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/45750bec-72b3-45e9-9a13-14b1a3e409a3-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-compute-global-edpm-compute-global-r2b84\" (UID: \"45750bec-72b3-45e9-9a13-14b1a3e409a3\") " pod="openstack-kuttl-tests/libvirt-edpm-compute-global-edpm-compute-global-r2b84" Jan 20 18:01:01 crc kubenswrapper[4558]: I0120 18:01:01.895313 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/45750bec-72b3-45e9-9a13-14b1a3e409a3-libvirt-secret-0\") pod \"libvirt-edpm-compute-global-edpm-compute-global-r2b84\" (UID: \"45750bec-72b3-45e9-9a13-14b1a3e409a3\") " pod="openstack-kuttl-tests/libvirt-edpm-compute-global-edpm-compute-global-r2b84" Jan 20 18:01:01 crc kubenswrapper[4558]: I0120 18:01:01.895517 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/45750bec-72b3-45e9-9a13-14b1a3e409a3-ssh-key-edpm-compute-global\") pod \"libvirt-edpm-compute-global-edpm-compute-global-r2b84\" (UID: \"45750bec-72b3-45e9-9a13-14b1a3e409a3\") " pod="openstack-kuttl-tests/libvirt-edpm-compute-global-edpm-compute-global-r2b84" Jan 20 18:01:01 crc kubenswrapper[4558]: I0120 18:01:01.895980 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/45750bec-72b3-45e9-9a13-14b1a3e409a3-inventory\") pod \"libvirt-edpm-compute-global-edpm-compute-global-r2b84\" (UID: \"45750bec-72b3-45e9-9a13-14b1a3e409a3\") " pod="openstack-kuttl-tests/libvirt-edpm-compute-global-edpm-compute-global-r2b84" Jan 20 18:01:01 crc kubenswrapper[4558]: I0120 18:01:01.904930 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jzmfb\" (UniqueName: \"kubernetes.io/projected/45750bec-72b3-45e9-9a13-14b1a3e409a3-kube-api-access-jzmfb\") pod \"libvirt-edpm-compute-global-edpm-compute-global-r2b84\" (UID: \"45750bec-72b3-45e9-9a13-14b1a3e409a3\") " pod="openstack-kuttl-tests/libvirt-edpm-compute-global-edpm-compute-global-r2b84" Jan 20 18:01:02 crc kubenswrapper[4558]: I0120 18:01:02.068289 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/libvirt-edpm-compute-global-edpm-compute-global-r2b84" Jan 20 18:01:02 crc kubenswrapper[4558]: I0120 18:01:02.454758 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/libvirt-edpm-compute-global-edpm-compute-global-r2b84"] Jan 20 18:01:02 crc kubenswrapper[4558]: I0120 18:01:02.700858 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/libvirt-edpm-compute-global-edpm-compute-global-r2b84" event={"ID":"45750bec-72b3-45e9-9a13-14b1a3e409a3","Type":"ContainerStarted","Data":"36218eeae7bbb995d1f6537d2b91c3028101f3eead89b0b742b8eaab0896fc6b"} Jan 20 18:01:03 crc kubenswrapper[4558]: I0120 18:01:03.711292 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/libvirt-edpm-compute-global-edpm-compute-global-r2b84" event={"ID":"45750bec-72b3-45e9-9a13-14b1a3e409a3","Type":"ContainerStarted","Data":"9eb7ac8f0ca68c77c099de96b56d1159e971f0399ad8ba04172b97e40aae2b12"} Jan 20 18:01:03 crc kubenswrapper[4558]: I0120 18:01:03.736187 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/libvirt-edpm-compute-global-edpm-compute-global-r2b84" podStartSLOduration=2.2390433610000002 podStartE2EDuration="2.736148398s" podCreationTimestamp="2026-01-20 18:01:01 +0000 UTC" firstStartedPulling="2026-01-20 18:01:02.46056236 +0000 UTC m=+4756.220900327" lastFinishedPulling="2026-01-20 18:01:02.957667398 +0000 UTC m=+4756.718005364" observedRunningTime="2026-01-20 18:01:03.727567588 +0000 UTC m=+4757.487905555" watchObservedRunningTime="2026-01-20 18:01:03.736148398 +0000 UTC m=+4757.496486365" Jan 20 18:01:04 crc kubenswrapper[4558]: I0120 18:01:04.719292 4558 generic.go:334] "Generic (PLEG): container finished" podID="45750bec-72b3-45e9-9a13-14b1a3e409a3" containerID="9eb7ac8f0ca68c77c099de96b56d1159e971f0399ad8ba04172b97e40aae2b12" exitCode=0 Jan 20 18:01:04 crc kubenswrapper[4558]: I0120 18:01:04.719371 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/libvirt-edpm-compute-global-edpm-compute-global-r2b84" event={"ID":"45750bec-72b3-45e9-9a13-14b1a3e409a3","Type":"ContainerDied","Data":"9eb7ac8f0ca68c77c099de96b56d1159e971f0399ad8ba04172b97e40aae2b12"} Jan 20 18:01:04 crc kubenswrapper[4558]: I0120 18:01:04.782315 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-v2gb7" Jan 20 18:01:04 crc kubenswrapper[4558]: I0120 18:01:04.831891 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-v2gb7"] Jan 20 18:01:04 crc kubenswrapper[4558]: I0120 18:01:04.864572 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-mhth9"] Jan 20 18:01:04 crc kubenswrapper[4558]: I0120 18:01:04.864821 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-mhth9" podUID="236763ea-aede-458b-8f3c-e6ec5627a605" containerName="registry-server" containerID="cri-o://06dd4bdb76a15d4317349243b4cf7dba47936c3dfaf57497abdb8ab39c067e8a" gracePeriod=2 Jan 20 18:01:05 crc kubenswrapper[4558]: E0120 18:01:05.090879 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 06dd4bdb76a15d4317349243b4cf7dba47936c3dfaf57497abdb8ab39c067e8a is running failed: container process not found" containerID="06dd4bdb76a15d4317349243b4cf7dba47936c3dfaf57497abdb8ab39c067e8a" cmd=["grpc_health_probe","-addr=:50051"] Jan 20 18:01:05 crc kubenswrapper[4558]: E0120 18:01:05.091569 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 06dd4bdb76a15d4317349243b4cf7dba47936c3dfaf57497abdb8ab39c067e8a is running failed: container process not found" containerID="06dd4bdb76a15d4317349243b4cf7dba47936c3dfaf57497abdb8ab39c067e8a" cmd=["grpc_health_probe","-addr=:50051"] Jan 20 18:01:05 crc kubenswrapper[4558]: E0120 18:01:05.091855 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 06dd4bdb76a15d4317349243b4cf7dba47936c3dfaf57497abdb8ab39c067e8a is running failed: container process not found" containerID="06dd4bdb76a15d4317349243b4cf7dba47936c3dfaf57497abdb8ab39c067e8a" cmd=["grpc_health_probe","-addr=:50051"] Jan 20 18:01:05 crc kubenswrapper[4558]: E0120 18:01:05.091896 4558 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 06dd4bdb76a15d4317349243b4cf7dba47936c3dfaf57497abdb8ab39c067e8a is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/certified-operators-mhth9" podUID="236763ea-aede-458b-8f3c-e6ec5627a605" containerName="registry-server" Jan 20 18:01:05 crc kubenswrapper[4558]: I0120 18:01:05.236106 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mhth9" Jan 20 18:01:05 crc kubenswrapper[4558]: I0120 18:01:05.340315 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/236763ea-aede-458b-8f3c-e6ec5627a605-catalog-content\") pod \"236763ea-aede-458b-8f3c-e6ec5627a605\" (UID: \"236763ea-aede-458b-8f3c-e6ec5627a605\") " Jan 20 18:01:05 crc kubenswrapper[4558]: I0120 18:01:05.340357 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/236763ea-aede-458b-8f3c-e6ec5627a605-utilities\") pod \"236763ea-aede-458b-8f3c-e6ec5627a605\" (UID: \"236763ea-aede-458b-8f3c-e6ec5627a605\") " Jan 20 18:01:05 crc kubenswrapper[4558]: I0120 18:01:05.340503 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sw9wg\" (UniqueName: \"kubernetes.io/projected/236763ea-aede-458b-8f3c-e6ec5627a605-kube-api-access-sw9wg\") pod \"236763ea-aede-458b-8f3c-e6ec5627a605\" (UID: \"236763ea-aede-458b-8f3c-e6ec5627a605\") " Jan 20 18:01:05 crc kubenswrapper[4558]: I0120 18:01:05.342481 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/236763ea-aede-458b-8f3c-e6ec5627a605-utilities" (OuterVolumeSpecName: "utilities") pod "236763ea-aede-458b-8f3c-e6ec5627a605" (UID: "236763ea-aede-458b-8f3c-e6ec5627a605"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 18:01:05 crc kubenswrapper[4558]: I0120 18:01:05.347332 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/236763ea-aede-458b-8f3c-e6ec5627a605-kube-api-access-sw9wg" (OuterVolumeSpecName: "kube-api-access-sw9wg") pod "236763ea-aede-458b-8f3c-e6ec5627a605" (UID: "236763ea-aede-458b-8f3c-e6ec5627a605"). InnerVolumeSpecName "kube-api-access-sw9wg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:01:05 crc kubenswrapper[4558]: I0120 18:01:05.391774 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/236763ea-aede-458b-8f3c-e6ec5627a605-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "236763ea-aede-458b-8f3c-e6ec5627a605" (UID: "236763ea-aede-458b-8f3c-e6ec5627a605"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 18:01:05 crc kubenswrapper[4558]: I0120 18:01:05.441962 4558 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/236763ea-aede-458b-8f3c-e6ec5627a605-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 20 18:01:05 crc kubenswrapper[4558]: I0120 18:01:05.441998 4558 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/236763ea-aede-458b-8f3c-e6ec5627a605-utilities\") on node \"crc\" DevicePath \"\"" Jan 20 18:01:05 crc kubenswrapper[4558]: I0120 18:01:05.442012 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sw9wg\" (UniqueName: \"kubernetes.io/projected/236763ea-aede-458b-8f3c-e6ec5627a605-kube-api-access-sw9wg\") on node \"crc\" DevicePath \"\"" Jan 20 18:01:05 crc kubenswrapper[4558]: I0120 18:01:05.734580 4558 generic.go:334] "Generic (PLEG): container finished" podID="236763ea-aede-458b-8f3c-e6ec5627a605" containerID="06dd4bdb76a15d4317349243b4cf7dba47936c3dfaf57497abdb8ab39c067e8a" exitCode=0 Jan 20 18:01:05 crc kubenswrapper[4558]: I0120 18:01:05.734650 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mhth9" Jan 20 18:01:05 crc kubenswrapper[4558]: I0120 18:01:05.734756 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mhth9" event={"ID":"236763ea-aede-458b-8f3c-e6ec5627a605","Type":"ContainerDied","Data":"06dd4bdb76a15d4317349243b4cf7dba47936c3dfaf57497abdb8ab39c067e8a"} Jan 20 18:01:05 crc kubenswrapper[4558]: I0120 18:01:05.734813 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mhth9" event={"ID":"236763ea-aede-458b-8f3c-e6ec5627a605","Type":"ContainerDied","Data":"e2c6db6cfb5b52f79841a980c3830e41e4e8e0da3c3b795eca2973fbfef7a06e"} Jan 20 18:01:05 crc kubenswrapper[4558]: I0120 18:01:05.734840 4558 scope.go:117] "RemoveContainer" containerID="06dd4bdb76a15d4317349243b4cf7dba47936c3dfaf57497abdb8ab39c067e8a" Jan 20 18:01:05 crc kubenswrapper[4558]: I0120 18:01:05.771176 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-mhth9"] Jan 20 18:01:05 crc kubenswrapper[4558]: I0120 18:01:05.780116 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-mhth9"] Jan 20 18:01:05 crc kubenswrapper[4558]: I0120 18:01:05.781883 4558 scope.go:117] "RemoveContainer" containerID="5318abbea033245e7d6ff85032b9a8ae9c8cc272b42b4d5a565025df199ae290" Jan 20 18:01:05 crc kubenswrapper[4558]: I0120 18:01:05.822421 4558 scope.go:117] "RemoveContainer" containerID="01129e26dee8b3dadd62d4b58f53182bc18ceb362bdb5e1f94d88321e95445a4" Jan 20 18:01:05 crc kubenswrapper[4558]: I0120 18:01:05.848836 4558 scope.go:117] "RemoveContainer" containerID="06dd4bdb76a15d4317349243b4cf7dba47936c3dfaf57497abdb8ab39c067e8a" Jan 20 18:01:05 crc kubenswrapper[4558]: E0120 18:01:05.855879 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"06dd4bdb76a15d4317349243b4cf7dba47936c3dfaf57497abdb8ab39c067e8a\": container with ID starting with 06dd4bdb76a15d4317349243b4cf7dba47936c3dfaf57497abdb8ab39c067e8a not found: ID does not exist" containerID="06dd4bdb76a15d4317349243b4cf7dba47936c3dfaf57497abdb8ab39c067e8a" Jan 20 18:01:05 crc kubenswrapper[4558]: I0120 18:01:05.855930 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"06dd4bdb76a15d4317349243b4cf7dba47936c3dfaf57497abdb8ab39c067e8a"} err="failed to get container status \"06dd4bdb76a15d4317349243b4cf7dba47936c3dfaf57497abdb8ab39c067e8a\": rpc error: code = NotFound desc = could not find container \"06dd4bdb76a15d4317349243b4cf7dba47936c3dfaf57497abdb8ab39c067e8a\": container with ID starting with 06dd4bdb76a15d4317349243b4cf7dba47936c3dfaf57497abdb8ab39c067e8a not found: ID does not exist" Jan 20 18:01:05 crc kubenswrapper[4558]: I0120 18:01:05.855959 4558 scope.go:117] "RemoveContainer" containerID="5318abbea033245e7d6ff85032b9a8ae9c8cc272b42b4d5a565025df199ae290" Jan 20 18:01:05 crc kubenswrapper[4558]: E0120 18:01:05.856880 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5318abbea033245e7d6ff85032b9a8ae9c8cc272b42b4d5a565025df199ae290\": container with ID starting with 5318abbea033245e7d6ff85032b9a8ae9c8cc272b42b4d5a565025df199ae290 not found: ID does not exist" containerID="5318abbea033245e7d6ff85032b9a8ae9c8cc272b42b4d5a565025df199ae290" Jan 20 18:01:05 crc kubenswrapper[4558]: I0120 18:01:05.856910 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5318abbea033245e7d6ff85032b9a8ae9c8cc272b42b4d5a565025df199ae290"} err="failed to get container status \"5318abbea033245e7d6ff85032b9a8ae9c8cc272b42b4d5a565025df199ae290\": rpc error: code = NotFound desc = could not find container \"5318abbea033245e7d6ff85032b9a8ae9c8cc272b42b4d5a565025df199ae290\": container with ID starting with 5318abbea033245e7d6ff85032b9a8ae9c8cc272b42b4d5a565025df199ae290 not found: ID does not exist" Jan 20 18:01:05 crc kubenswrapper[4558]: I0120 18:01:05.856957 4558 scope.go:117] "RemoveContainer" containerID="01129e26dee8b3dadd62d4b58f53182bc18ceb362bdb5e1f94d88321e95445a4" Jan 20 18:01:05 crc kubenswrapper[4558]: E0120 18:01:05.857237 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"01129e26dee8b3dadd62d4b58f53182bc18ceb362bdb5e1f94d88321e95445a4\": container with ID starting with 01129e26dee8b3dadd62d4b58f53182bc18ceb362bdb5e1f94d88321e95445a4 not found: ID does not exist" containerID="01129e26dee8b3dadd62d4b58f53182bc18ceb362bdb5e1f94d88321e95445a4" Jan 20 18:01:05 crc kubenswrapper[4558]: I0120 18:01:05.857260 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"01129e26dee8b3dadd62d4b58f53182bc18ceb362bdb5e1f94d88321e95445a4"} err="failed to get container status \"01129e26dee8b3dadd62d4b58f53182bc18ceb362bdb5e1f94d88321e95445a4\": rpc error: code = NotFound desc = could not find container \"01129e26dee8b3dadd62d4b58f53182bc18ceb362bdb5e1f94d88321e95445a4\": container with ID starting with 01129e26dee8b3dadd62d4b58f53182bc18ceb362bdb5e1f94d88321e95445a4 not found: ID does not exist" Jan 20 18:01:05 crc kubenswrapper[4558]: I0120 18:01:05.974695 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/libvirt-edpm-compute-global-edpm-compute-global-r2b84" Jan 20 18:01:06 crc kubenswrapper[4558]: I0120 18:01:06.053055 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jzmfb\" (UniqueName: \"kubernetes.io/projected/45750bec-72b3-45e9-9a13-14b1a3e409a3-kube-api-access-jzmfb\") pod \"45750bec-72b3-45e9-9a13-14b1a3e409a3\" (UID: \"45750bec-72b3-45e9-9a13-14b1a3e409a3\") " Jan 20 18:01:06 crc kubenswrapper[4558]: I0120 18:01:06.053105 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/45750bec-72b3-45e9-9a13-14b1a3e409a3-libvirt-combined-ca-bundle\") pod \"45750bec-72b3-45e9-9a13-14b1a3e409a3\" (UID: \"45750bec-72b3-45e9-9a13-14b1a3e409a3\") " Jan 20 18:01:06 crc kubenswrapper[4558]: I0120 18:01:06.053144 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/45750bec-72b3-45e9-9a13-14b1a3e409a3-libvirt-secret-0\") pod \"45750bec-72b3-45e9-9a13-14b1a3e409a3\" (UID: \"45750bec-72b3-45e9-9a13-14b1a3e409a3\") " Jan 20 18:01:06 crc kubenswrapper[4558]: I0120 18:01:06.053215 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/45750bec-72b3-45e9-9a13-14b1a3e409a3-ssh-key-edpm-compute-global\") pod \"45750bec-72b3-45e9-9a13-14b1a3e409a3\" (UID: \"45750bec-72b3-45e9-9a13-14b1a3e409a3\") " Jan 20 18:01:06 crc kubenswrapper[4558]: I0120 18:01:06.053254 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/45750bec-72b3-45e9-9a13-14b1a3e409a3-inventory\") pod \"45750bec-72b3-45e9-9a13-14b1a3e409a3\" (UID: \"45750bec-72b3-45e9-9a13-14b1a3e409a3\") " Jan 20 18:01:06 crc kubenswrapper[4558]: I0120 18:01:06.061464 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/45750bec-72b3-45e9-9a13-14b1a3e409a3-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "45750bec-72b3-45e9-9a13-14b1a3e409a3" (UID: "45750bec-72b3-45e9-9a13-14b1a3e409a3"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:01:06 crc kubenswrapper[4558]: I0120 18:01:06.063323 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/45750bec-72b3-45e9-9a13-14b1a3e409a3-kube-api-access-jzmfb" (OuterVolumeSpecName: "kube-api-access-jzmfb") pod "45750bec-72b3-45e9-9a13-14b1a3e409a3" (UID: "45750bec-72b3-45e9-9a13-14b1a3e409a3"). InnerVolumeSpecName "kube-api-access-jzmfb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:01:06 crc kubenswrapper[4558]: I0120 18:01:06.078483 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/45750bec-72b3-45e9-9a13-14b1a3e409a3-inventory" (OuterVolumeSpecName: "inventory") pod "45750bec-72b3-45e9-9a13-14b1a3e409a3" (UID: "45750bec-72b3-45e9-9a13-14b1a3e409a3"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:01:06 crc kubenswrapper[4558]: I0120 18:01:06.086523 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/45750bec-72b3-45e9-9a13-14b1a3e409a3-libvirt-secret-0" (OuterVolumeSpecName: "libvirt-secret-0") pod "45750bec-72b3-45e9-9a13-14b1a3e409a3" (UID: "45750bec-72b3-45e9-9a13-14b1a3e409a3"). InnerVolumeSpecName "libvirt-secret-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:01:06 crc kubenswrapper[4558]: I0120 18:01:06.090299 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/45750bec-72b3-45e9-9a13-14b1a3e409a3-ssh-key-edpm-compute-global" (OuterVolumeSpecName: "ssh-key-edpm-compute-global") pod "45750bec-72b3-45e9-9a13-14b1a3e409a3" (UID: "45750bec-72b3-45e9-9a13-14b1a3e409a3"). InnerVolumeSpecName "ssh-key-edpm-compute-global". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:01:06 crc kubenswrapper[4558]: I0120 18:01:06.154421 4558 reconciler_common.go:293] "Volume detached for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/45750bec-72b3-45e9-9a13-14b1a3e409a3-libvirt-secret-0\") on node \"crc\" DevicePath \"\"" Jan 20 18:01:06 crc kubenswrapper[4558]: I0120 18:01:06.154454 4558 reconciler_common.go:293] "Volume detached for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/45750bec-72b3-45e9-9a13-14b1a3e409a3-ssh-key-edpm-compute-global\") on node \"crc\" DevicePath \"\"" Jan 20 18:01:06 crc kubenswrapper[4558]: I0120 18:01:06.154469 4558 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/45750bec-72b3-45e9-9a13-14b1a3e409a3-inventory\") on node \"crc\" DevicePath \"\"" Jan 20 18:01:06 crc kubenswrapper[4558]: I0120 18:01:06.154481 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jzmfb\" (UniqueName: \"kubernetes.io/projected/45750bec-72b3-45e9-9a13-14b1a3e409a3-kube-api-access-jzmfb\") on node \"crc\" DevicePath \"\"" Jan 20 18:01:06 crc kubenswrapper[4558]: I0120 18:01:06.154490 4558 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/45750bec-72b3-45e9-9a13-14b1a3e409a3-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 18:01:06 crc kubenswrapper[4558]: I0120 18:01:06.575999 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="236763ea-aede-458b-8f3c-e6ec5627a605" path="/var/lib/kubelet/pods/236763ea-aede-458b-8f3c-e6ec5627a605/volumes" Jan 20 18:01:06 crc kubenswrapper[4558]: I0120 18:01:06.745885 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/libvirt-edpm-compute-global-edpm-compute-global-r2b84" Jan 20 18:01:06 crc kubenswrapper[4558]: I0120 18:01:06.745863 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/libvirt-edpm-compute-global-edpm-compute-global-r2b84" event={"ID":"45750bec-72b3-45e9-9a13-14b1a3e409a3","Type":"ContainerDied","Data":"36218eeae7bbb995d1f6537d2b91c3028101f3eead89b0b742b8eaab0896fc6b"} Jan 20 18:01:06 crc kubenswrapper[4558]: I0120 18:01:06.746015 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="36218eeae7bbb995d1f6537d2b91c3028101f3eead89b0b742b8eaab0896fc6b" Jan 20 18:01:06 crc kubenswrapper[4558]: I0120 18:01:06.809115 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-edpm-compute-global-edpm-compute-global-8f677"] Jan 20 18:01:06 crc kubenswrapper[4558]: E0120 18:01:06.809688 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="236763ea-aede-458b-8f3c-e6ec5627a605" containerName="registry-server" Jan 20 18:01:06 crc kubenswrapper[4558]: I0120 18:01:06.809705 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="236763ea-aede-458b-8f3c-e6ec5627a605" containerName="registry-server" Jan 20 18:01:06 crc kubenswrapper[4558]: E0120 18:01:06.809731 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="45750bec-72b3-45e9-9a13-14b1a3e409a3" containerName="libvirt-edpm-compute-global-edpm-compute-global" Jan 20 18:01:06 crc kubenswrapper[4558]: I0120 18:01:06.809738 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="45750bec-72b3-45e9-9a13-14b1a3e409a3" containerName="libvirt-edpm-compute-global-edpm-compute-global" Jan 20 18:01:06 crc kubenswrapper[4558]: E0120 18:01:06.809752 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="236763ea-aede-458b-8f3c-e6ec5627a605" containerName="extract-content" Jan 20 18:01:06 crc kubenswrapper[4558]: I0120 18:01:06.809758 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="236763ea-aede-458b-8f3c-e6ec5627a605" containerName="extract-content" Jan 20 18:01:06 crc kubenswrapper[4558]: E0120 18:01:06.809769 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="236763ea-aede-458b-8f3c-e6ec5627a605" containerName="extract-utilities" Jan 20 18:01:06 crc kubenswrapper[4558]: I0120 18:01:06.809774 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="236763ea-aede-458b-8f3c-e6ec5627a605" containerName="extract-utilities" Jan 20 18:01:06 crc kubenswrapper[4558]: I0120 18:01:06.809894 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="45750bec-72b3-45e9-9a13-14b1a3e409a3" containerName="libvirt-edpm-compute-global-edpm-compute-global" Jan 20 18:01:06 crc kubenswrapper[4558]: I0120 18:01:06.809910 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="236763ea-aede-458b-8f3c-e6ec5627a605" containerName="registry-server" Jan 20 18:01:06 crc kubenswrapper[4558]: I0120 18:01:06.810413 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-edpm-compute-global-edpm-compute-global-8f677" Jan 20 18:01:06 crc kubenswrapper[4558]: I0120 18:01:06.811652 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-migration-ssh-key" Jan 20 18:01:06 crc kubenswrapper[4558]: I0120 18:01:06.812978 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"combined-ca-bundle" Jan 20 18:01:06 crc kubenswrapper[4558]: I0120 18:01:06.813320 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplanenodeset-edpm-compute-global" Jan 20 18:01:06 crc kubenswrapper[4558]: I0120 18:01:06.813374 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell1-compute-config" Jan 20 18:01:06 crc kubenswrapper[4558]: I0120 18:01:06.813399 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplane-ansible-ssh-private-key-secret" Jan 20 18:01:06 crc kubenswrapper[4558]: I0120 18:01:06.813589 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-aee-default-env" Jan 20 18:01:06 crc kubenswrapper[4558]: I0120 18:01:06.813964 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"edpm-compute-global-dockercfg-wkdxg" Jan 20 18:01:06 crc kubenswrapper[4558]: I0120 18:01:06.823542 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-edpm-compute-global-edpm-compute-global-8f677"] Jan 20 18:01:06 crc kubenswrapper[4558]: I0120 18:01:06.864544 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8905304c-0fea-45d1-9fd4-554934a574b4-inventory\") pod \"nova-edpm-compute-global-edpm-compute-global-8f677\" (UID: \"8905304c-0fea-45d1-9fd4-554934a574b4\") " pod="openstack-kuttl-tests/nova-edpm-compute-global-edpm-compute-global-8f677" Jan 20 18:01:06 crc kubenswrapper[4558]: I0120 18:01:06.864595 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/8905304c-0fea-45d1-9fd4-554934a574b4-nova-migration-ssh-key-1\") pod \"nova-edpm-compute-global-edpm-compute-global-8f677\" (UID: \"8905304c-0fea-45d1-9fd4-554934a574b4\") " pod="openstack-kuttl-tests/nova-edpm-compute-global-edpm-compute-global-8f677" Jan 20 18:01:06 crc kubenswrapper[4558]: I0120 18:01:06.864621 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nwq69\" (UniqueName: \"kubernetes.io/projected/8905304c-0fea-45d1-9fd4-554934a574b4-kube-api-access-nwq69\") pod \"nova-edpm-compute-global-edpm-compute-global-8f677\" (UID: \"8905304c-0fea-45d1-9fd4-554934a574b4\") " pod="openstack-kuttl-tests/nova-edpm-compute-global-edpm-compute-global-8f677" Jan 20 18:01:06 crc kubenswrapper[4558]: I0120 18:01:06.864705 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/8905304c-0fea-45d1-9fd4-554934a574b4-nova-cell1-compute-config-0\") pod \"nova-edpm-compute-global-edpm-compute-global-8f677\" (UID: \"8905304c-0fea-45d1-9fd4-554934a574b4\") " pod="openstack-kuttl-tests/nova-edpm-compute-global-edpm-compute-global-8f677" Jan 20 18:01:06 crc kubenswrapper[4558]: I0120 18:01:06.864789 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/8905304c-0fea-45d1-9fd4-554934a574b4-nova-migration-ssh-key-0\") pod \"nova-edpm-compute-global-edpm-compute-global-8f677\" (UID: \"8905304c-0fea-45d1-9fd4-554934a574b4\") " pod="openstack-kuttl-tests/nova-edpm-compute-global-edpm-compute-global-8f677" Jan 20 18:01:06 crc kubenswrapper[4558]: I0120 18:01:06.864854 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8905304c-0fea-45d1-9fd4-554934a574b4-nova-combined-ca-bundle\") pod \"nova-edpm-compute-global-edpm-compute-global-8f677\" (UID: \"8905304c-0fea-45d1-9fd4-554934a574b4\") " pod="openstack-kuttl-tests/nova-edpm-compute-global-edpm-compute-global-8f677" Jan 20 18:01:06 crc kubenswrapper[4558]: I0120 18:01:06.864882 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/8905304c-0fea-45d1-9fd4-554934a574b4-nova-cell1-compute-config-1\") pod \"nova-edpm-compute-global-edpm-compute-global-8f677\" (UID: \"8905304c-0fea-45d1-9fd4-554934a574b4\") " pod="openstack-kuttl-tests/nova-edpm-compute-global-edpm-compute-global-8f677" Jan 20 18:01:06 crc kubenswrapper[4558]: I0120 18:01:06.864913 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/8905304c-0fea-45d1-9fd4-554934a574b4-ssh-key-edpm-compute-global\") pod \"nova-edpm-compute-global-edpm-compute-global-8f677\" (UID: \"8905304c-0fea-45d1-9fd4-554934a574b4\") " pod="openstack-kuttl-tests/nova-edpm-compute-global-edpm-compute-global-8f677" Jan 20 18:01:06 crc kubenswrapper[4558]: I0120 18:01:06.965800 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/8905304c-0fea-45d1-9fd4-554934a574b4-nova-migration-ssh-key-0\") pod \"nova-edpm-compute-global-edpm-compute-global-8f677\" (UID: \"8905304c-0fea-45d1-9fd4-554934a574b4\") " pod="openstack-kuttl-tests/nova-edpm-compute-global-edpm-compute-global-8f677" Jan 20 18:01:06 crc kubenswrapper[4558]: I0120 18:01:06.965850 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8905304c-0fea-45d1-9fd4-554934a574b4-nova-combined-ca-bundle\") pod \"nova-edpm-compute-global-edpm-compute-global-8f677\" (UID: \"8905304c-0fea-45d1-9fd4-554934a574b4\") " pod="openstack-kuttl-tests/nova-edpm-compute-global-edpm-compute-global-8f677" Jan 20 18:01:06 crc kubenswrapper[4558]: I0120 18:01:06.965880 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/8905304c-0fea-45d1-9fd4-554934a574b4-nova-cell1-compute-config-1\") pod \"nova-edpm-compute-global-edpm-compute-global-8f677\" (UID: \"8905304c-0fea-45d1-9fd4-554934a574b4\") " pod="openstack-kuttl-tests/nova-edpm-compute-global-edpm-compute-global-8f677" Jan 20 18:01:06 crc kubenswrapper[4558]: I0120 18:01:06.965909 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/8905304c-0fea-45d1-9fd4-554934a574b4-ssh-key-edpm-compute-global\") pod \"nova-edpm-compute-global-edpm-compute-global-8f677\" (UID: \"8905304c-0fea-45d1-9fd4-554934a574b4\") " pod="openstack-kuttl-tests/nova-edpm-compute-global-edpm-compute-global-8f677" Jan 20 18:01:06 crc kubenswrapper[4558]: I0120 18:01:06.965968 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8905304c-0fea-45d1-9fd4-554934a574b4-inventory\") pod \"nova-edpm-compute-global-edpm-compute-global-8f677\" (UID: \"8905304c-0fea-45d1-9fd4-554934a574b4\") " pod="openstack-kuttl-tests/nova-edpm-compute-global-edpm-compute-global-8f677" Jan 20 18:01:06 crc kubenswrapper[4558]: I0120 18:01:06.966011 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/8905304c-0fea-45d1-9fd4-554934a574b4-nova-migration-ssh-key-1\") pod \"nova-edpm-compute-global-edpm-compute-global-8f677\" (UID: \"8905304c-0fea-45d1-9fd4-554934a574b4\") " pod="openstack-kuttl-tests/nova-edpm-compute-global-edpm-compute-global-8f677" Jan 20 18:01:06 crc kubenswrapper[4558]: I0120 18:01:06.966033 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nwq69\" (UniqueName: \"kubernetes.io/projected/8905304c-0fea-45d1-9fd4-554934a574b4-kube-api-access-nwq69\") pod \"nova-edpm-compute-global-edpm-compute-global-8f677\" (UID: \"8905304c-0fea-45d1-9fd4-554934a574b4\") " pod="openstack-kuttl-tests/nova-edpm-compute-global-edpm-compute-global-8f677" Jan 20 18:01:06 crc kubenswrapper[4558]: I0120 18:01:06.966065 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/8905304c-0fea-45d1-9fd4-554934a574b4-nova-cell1-compute-config-0\") pod \"nova-edpm-compute-global-edpm-compute-global-8f677\" (UID: \"8905304c-0fea-45d1-9fd4-554934a574b4\") " pod="openstack-kuttl-tests/nova-edpm-compute-global-edpm-compute-global-8f677" Jan 20 18:01:06 crc kubenswrapper[4558]: I0120 18:01:06.971459 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8905304c-0fea-45d1-9fd4-554934a574b4-inventory\") pod \"nova-edpm-compute-global-edpm-compute-global-8f677\" (UID: \"8905304c-0fea-45d1-9fd4-554934a574b4\") " pod="openstack-kuttl-tests/nova-edpm-compute-global-edpm-compute-global-8f677" Jan 20 18:01:06 crc kubenswrapper[4558]: I0120 18:01:06.971522 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/8905304c-0fea-45d1-9fd4-554934a574b4-nova-cell1-compute-config-0\") pod \"nova-edpm-compute-global-edpm-compute-global-8f677\" (UID: \"8905304c-0fea-45d1-9fd4-554934a574b4\") " pod="openstack-kuttl-tests/nova-edpm-compute-global-edpm-compute-global-8f677" Jan 20 18:01:06 crc kubenswrapper[4558]: I0120 18:01:06.971637 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8905304c-0fea-45d1-9fd4-554934a574b4-nova-combined-ca-bundle\") pod \"nova-edpm-compute-global-edpm-compute-global-8f677\" (UID: \"8905304c-0fea-45d1-9fd4-554934a574b4\") " pod="openstack-kuttl-tests/nova-edpm-compute-global-edpm-compute-global-8f677" Jan 20 18:01:06 crc kubenswrapper[4558]: I0120 18:01:06.971695 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/8905304c-0fea-45d1-9fd4-554934a574b4-nova-migration-ssh-key-1\") pod \"nova-edpm-compute-global-edpm-compute-global-8f677\" (UID: \"8905304c-0fea-45d1-9fd4-554934a574b4\") " pod="openstack-kuttl-tests/nova-edpm-compute-global-edpm-compute-global-8f677" Jan 20 18:01:06 crc kubenswrapper[4558]: I0120 18:01:06.971838 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/8905304c-0fea-45d1-9fd4-554934a574b4-nova-migration-ssh-key-0\") pod \"nova-edpm-compute-global-edpm-compute-global-8f677\" (UID: \"8905304c-0fea-45d1-9fd4-554934a574b4\") " pod="openstack-kuttl-tests/nova-edpm-compute-global-edpm-compute-global-8f677" Jan 20 18:01:06 crc kubenswrapper[4558]: I0120 18:01:06.972206 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/8905304c-0fea-45d1-9fd4-554934a574b4-nova-cell1-compute-config-1\") pod \"nova-edpm-compute-global-edpm-compute-global-8f677\" (UID: \"8905304c-0fea-45d1-9fd4-554934a574b4\") " pod="openstack-kuttl-tests/nova-edpm-compute-global-edpm-compute-global-8f677" Jan 20 18:01:06 crc kubenswrapper[4558]: I0120 18:01:06.972685 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/8905304c-0fea-45d1-9fd4-554934a574b4-ssh-key-edpm-compute-global\") pod \"nova-edpm-compute-global-edpm-compute-global-8f677\" (UID: \"8905304c-0fea-45d1-9fd4-554934a574b4\") " pod="openstack-kuttl-tests/nova-edpm-compute-global-edpm-compute-global-8f677" Jan 20 18:01:06 crc kubenswrapper[4558]: I0120 18:01:06.981158 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nwq69\" (UniqueName: \"kubernetes.io/projected/8905304c-0fea-45d1-9fd4-554934a574b4-kube-api-access-nwq69\") pod \"nova-edpm-compute-global-edpm-compute-global-8f677\" (UID: \"8905304c-0fea-45d1-9fd4-554934a574b4\") " pod="openstack-kuttl-tests/nova-edpm-compute-global-edpm-compute-global-8f677" Jan 20 18:01:07 crc kubenswrapper[4558]: I0120 18:01:07.123703 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-edpm-compute-global-edpm-compute-global-8f677" Jan 20 18:01:07 crc kubenswrapper[4558]: I0120 18:01:07.511523 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-edpm-compute-global-edpm-compute-global-8f677"] Jan 20 18:01:07 crc kubenswrapper[4558]: W0120 18:01:07.512227 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8905304c_0fea_45d1_9fd4_554934a574b4.slice/crio-88c602b1fca7ac6c2aa51e91137eeff4212c94e8272ce6ae3c6010e7ee701f65 WatchSource:0}: Error finding container 88c602b1fca7ac6c2aa51e91137eeff4212c94e8272ce6ae3c6010e7ee701f65: Status 404 returned error can't find the container with id 88c602b1fca7ac6c2aa51e91137eeff4212c94e8272ce6ae3c6010e7ee701f65 Jan 20 18:01:07 crc kubenswrapper[4558]: I0120 18:01:07.759614 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-edpm-compute-global-edpm-compute-global-8f677" event={"ID":"8905304c-0fea-45d1-9fd4-554934a574b4","Type":"ContainerStarted","Data":"88c602b1fca7ac6c2aa51e91137eeff4212c94e8272ce6ae3c6010e7ee701f65"} Jan 20 18:01:08 crc kubenswrapper[4558]: I0120 18:01:08.769340 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-edpm-compute-global-edpm-compute-global-8f677" event={"ID":"8905304c-0fea-45d1-9fd4-554934a574b4","Type":"ContainerStarted","Data":"12a4698521dc5737817cfa0bf5d12f229dda374c9b5eb85943f9d288ac3776b0"} Jan 20 18:01:08 crc kubenswrapper[4558]: I0120 18:01:08.790280 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-edpm-compute-global-edpm-compute-global-8f677" podStartSLOduration=2.187549487 podStartE2EDuration="2.790246189s" podCreationTimestamp="2026-01-20 18:01:06 +0000 UTC" firstStartedPulling="2026-01-20 18:01:07.514817237 +0000 UTC m=+4761.275155204" lastFinishedPulling="2026-01-20 18:01:08.117513938 +0000 UTC m=+4761.877851906" observedRunningTime="2026-01-20 18:01:08.787955053 +0000 UTC m=+4762.548293020" watchObservedRunningTime="2026-01-20 18:01:08.790246189 +0000 UTC m=+4762.550584156" Jan 20 18:01:09 crc kubenswrapper[4558]: I0120 18:01:09.785605 4558 generic.go:334] "Generic (PLEG): container finished" podID="8905304c-0fea-45d1-9fd4-554934a574b4" containerID="12a4698521dc5737817cfa0bf5d12f229dda374c9b5eb85943f9d288ac3776b0" exitCode=0 Jan 20 18:01:09 crc kubenswrapper[4558]: I0120 18:01:09.785725 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-edpm-compute-global-edpm-compute-global-8f677" event={"ID":"8905304c-0fea-45d1-9fd4-554934a574b4","Type":"ContainerDied","Data":"12a4698521dc5737817cfa0bf5d12f229dda374c9b5eb85943f9d288ac3776b0"} Jan 20 18:01:11 crc kubenswrapper[4558]: I0120 18:01:11.026064 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-edpm-compute-global-edpm-compute-global-8f677" Jan 20 18:01:11 crc kubenswrapper[4558]: I0120 18:01:11.138360 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/8905304c-0fea-45d1-9fd4-554934a574b4-nova-migration-ssh-key-1\") pod \"8905304c-0fea-45d1-9fd4-554934a574b4\" (UID: \"8905304c-0fea-45d1-9fd4-554934a574b4\") " Jan 20 18:01:11 crc kubenswrapper[4558]: I0120 18:01:11.138410 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8905304c-0fea-45d1-9fd4-554934a574b4-inventory\") pod \"8905304c-0fea-45d1-9fd4-554934a574b4\" (UID: \"8905304c-0fea-45d1-9fd4-554934a574b4\") " Jan 20 18:01:11 crc kubenswrapper[4558]: I0120 18:01:11.138450 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nwq69\" (UniqueName: \"kubernetes.io/projected/8905304c-0fea-45d1-9fd4-554934a574b4-kube-api-access-nwq69\") pod \"8905304c-0fea-45d1-9fd4-554934a574b4\" (UID: \"8905304c-0fea-45d1-9fd4-554934a574b4\") " Jan 20 18:01:11 crc kubenswrapper[4558]: I0120 18:01:11.138483 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8905304c-0fea-45d1-9fd4-554934a574b4-nova-combined-ca-bundle\") pod \"8905304c-0fea-45d1-9fd4-554934a574b4\" (UID: \"8905304c-0fea-45d1-9fd4-554934a574b4\") " Jan 20 18:01:11 crc kubenswrapper[4558]: I0120 18:01:11.138517 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/8905304c-0fea-45d1-9fd4-554934a574b4-nova-migration-ssh-key-0\") pod \"8905304c-0fea-45d1-9fd4-554934a574b4\" (UID: \"8905304c-0fea-45d1-9fd4-554934a574b4\") " Jan 20 18:01:11 crc kubenswrapper[4558]: I0120 18:01:11.138539 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/8905304c-0fea-45d1-9fd4-554934a574b4-nova-cell1-compute-config-1\") pod \"8905304c-0fea-45d1-9fd4-554934a574b4\" (UID: \"8905304c-0fea-45d1-9fd4-554934a574b4\") " Jan 20 18:01:11 crc kubenswrapper[4558]: I0120 18:01:11.138570 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/8905304c-0fea-45d1-9fd4-554934a574b4-nova-cell1-compute-config-0\") pod \"8905304c-0fea-45d1-9fd4-554934a574b4\" (UID: \"8905304c-0fea-45d1-9fd4-554934a574b4\") " Jan 20 18:01:11 crc kubenswrapper[4558]: I0120 18:01:11.138628 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/8905304c-0fea-45d1-9fd4-554934a574b4-ssh-key-edpm-compute-global\") pod \"8905304c-0fea-45d1-9fd4-554934a574b4\" (UID: \"8905304c-0fea-45d1-9fd4-554934a574b4\") " Jan 20 18:01:11 crc kubenswrapper[4558]: I0120 18:01:11.143580 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8905304c-0fea-45d1-9fd4-554934a574b4-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "8905304c-0fea-45d1-9fd4-554934a574b4" (UID: "8905304c-0fea-45d1-9fd4-554934a574b4"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:01:11 crc kubenswrapper[4558]: I0120 18:01:11.143671 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8905304c-0fea-45d1-9fd4-554934a574b4-kube-api-access-nwq69" (OuterVolumeSpecName: "kube-api-access-nwq69") pod "8905304c-0fea-45d1-9fd4-554934a574b4" (UID: "8905304c-0fea-45d1-9fd4-554934a574b4"). InnerVolumeSpecName "kube-api-access-nwq69". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:01:11 crc kubenswrapper[4558]: E0120 18:01:11.158901 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/8905304c-0fea-45d1-9fd4-554934a574b4-inventory podName:8905304c-0fea-45d1-9fd4-554934a574b4 nodeName:}" failed. No retries permitted until 2026-01-20 18:01:11.658845246 +0000 UTC m=+4765.419183213 (durationBeforeRetry 500ms). Error: error cleaning subPath mounts for volume "inventory" (UniqueName: "kubernetes.io/secret/8905304c-0fea-45d1-9fd4-554934a574b4-inventory") pod "8905304c-0fea-45d1-9fd4-554934a574b4" (UID: "8905304c-0fea-45d1-9fd4-554934a574b4") : error deleting /var/lib/kubelet/pods/8905304c-0fea-45d1-9fd4-554934a574b4/volume-subpaths: remove /var/lib/kubelet/pods/8905304c-0fea-45d1-9fd4-554934a574b4/volume-subpaths: no such file or directory Jan 20 18:01:11 crc kubenswrapper[4558]: E0120 18:01:11.159119 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/8905304c-0fea-45d1-9fd4-554934a574b4-ssh-key-edpm-compute-global podName:8905304c-0fea-45d1-9fd4-554934a574b4 nodeName:}" failed. No retries permitted until 2026-01-20 18:01:11.659104773 +0000 UTC m=+4765.419442741 (durationBeforeRetry 500ms). Error: error cleaning subPath mounts for volume "ssh-key-edpm-compute-global" (UniqueName: "kubernetes.io/secret/8905304c-0fea-45d1-9fd4-554934a574b4-ssh-key-edpm-compute-global") pod "8905304c-0fea-45d1-9fd4-554934a574b4" (UID: "8905304c-0fea-45d1-9fd4-554934a574b4") : error deleting /var/lib/kubelet/pods/8905304c-0fea-45d1-9fd4-554934a574b4/volume-subpaths: remove /var/lib/kubelet/pods/8905304c-0fea-45d1-9fd4-554934a574b4/volume-subpaths: no such file or directory Jan 20 18:01:11 crc kubenswrapper[4558]: I0120 18:01:11.159563 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8905304c-0fea-45d1-9fd4-554934a574b4-nova-migration-ssh-key-1" (OuterVolumeSpecName: "nova-migration-ssh-key-1") pod "8905304c-0fea-45d1-9fd4-554934a574b4" (UID: "8905304c-0fea-45d1-9fd4-554934a574b4"). InnerVolumeSpecName "nova-migration-ssh-key-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:01:11 crc kubenswrapper[4558]: I0120 18:01:11.160655 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8905304c-0fea-45d1-9fd4-554934a574b4-nova-migration-ssh-key-0" (OuterVolumeSpecName: "nova-migration-ssh-key-0") pod "8905304c-0fea-45d1-9fd4-554934a574b4" (UID: "8905304c-0fea-45d1-9fd4-554934a574b4"). InnerVolumeSpecName "nova-migration-ssh-key-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:01:11 crc kubenswrapper[4558]: I0120 18:01:11.161028 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8905304c-0fea-45d1-9fd4-554934a574b4-nova-cell1-compute-config-1" (OuterVolumeSpecName: "nova-cell1-compute-config-1") pod "8905304c-0fea-45d1-9fd4-554934a574b4" (UID: "8905304c-0fea-45d1-9fd4-554934a574b4"). InnerVolumeSpecName "nova-cell1-compute-config-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:01:11 crc kubenswrapper[4558]: I0120 18:01:11.161470 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8905304c-0fea-45d1-9fd4-554934a574b4-nova-cell1-compute-config-0" (OuterVolumeSpecName: "nova-cell1-compute-config-0") pod "8905304c-0fea-45d1-9fd4-554934a574b4" (UID: "8905304c-0fea-45d1-9fd4-554934a574b4"). InnerVolumeSpecName "nova-cell1-compute-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:01:11 crc kubenswrapper[4558]: I0120 18:01:11.241308 4558 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/8905304c-0fea-45d1-9fd4-554934a574b4-nova-migration-ssh-key-1\") on node \"crc\" DevicePath \"\"" Jan 20 18:01:11 crc kubenswrapper[4558]: I0120 18:01:11.241456 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nwq69\" (UniqueName: \"kubernetes.io/projected/8905304c-0fea-45d1-9fd4-554934a574b4-kube-api-access-nwq69\") on node \"crc\" DevicePath \"\"" Jan 20 18:01:11 crc kubenswrapper[4558]: I0120 18:01:11.241481 4558 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8905304c-0fea-45d1-9fd4-554934a574b4-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 18:01:11 crc kubenswrapper[4558]: I0120 18:01:11.241494 4558 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/8905304c-0fea-45d1-9fd4-554934a574b4-nova-migration-ssh-key-0\") on node \"crc\" DevicePath \"\"" Jan 20 18:01:11 crc kubenswrapper[4558]: I0120 18:01:11.241508 4558 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/8905304c-0fea-45d1-9fd4-554934a574b4-nova-cell1-compute-config-1\") on node \"crc\" DevicePath \"\"" Jan 20 18:01:11 crc kubenswrapper[4558]: I0120 18:01:11.241547 4558 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/8905304c-0fea-45d1-9fd4-554934a574b4-nova-cell1-compute-config-0\") on node \"crc\" DevicePath \"\"" Jan 20 18:01:11 crc kubenswrapper[4558]: I0120 18:01:11.749404 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8905304c-0fea-45d1-9fd4-554934a574b4-inventory\") pod \"8905304c-0fea-45d1-9fd4-554934a574b4\" (UID: \"8905304c-0fea-45d1-9fd4-554934a574b4\") " Jan 20 18:01:11 crc kubenswrapper[4558]: I0120 18:01:11.749598 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/8905304c-0fea-45d1-9fd4-554934a574b4-ssh-key-edpm-compute-global\") pod \"8905304c-0fea-45d1-9fd4-554934a574b4\" (UID: \"8905304c-0fea-45d1-9fd4-554934a574b4\") " Jan 20 18:01:11 crc kubenswrapper[4558]: I0120 18:01:11.755135 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8905304c-0fea-45d1-9fd4-554934a574b4-inventory" (OuterVolumeSpecName: "inventory") pod "8905304c-0fea-45d1-9fd4-554934a574b4" (UID: "8905304c-0fea-45d1-9fd4-554934a574b4"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:01:11 crc kubenswrapper[4558]: I0120 18:01:11.755175 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8905304c-0fea-45d1-9fd4-554934a574b4-ssh-key-edpm-compute-global" (OuterVolumeSpecName: "ssh-key-edpm-compute-global") pod "8905304c-0fea-45d1-9fd4-554934a574b4" (UID: "8905304c-0fea-45d1-9fd4-554934a574b4"). InnerVolumeSpecName "ssh-key-edpm-compute-global". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:01:11 crc kubenswrapper[4558]: I0120 18:01:11.808717 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-edpm-compute-global-edpm-compute-global-8f677" event={"ID":"8905304c-0fea-45d1-9fd4-554934a574b4","Type":"ContainerDied","Data":"88c602b1fca7ac6c2aa51e91137eeff4212c94e8272ce6ae3c6010e7ee701f65"} Jan 20 18:01:11 crc kubenswrapper[4558]: I0120 18:01:11.809105 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="88c602b1fca7ac6c2aa51e91137eeff4212c94e8272ce6ae3c6010e7ee701f65" Jan 20 18:01:11 crc kubenswrapper[4558]: I0120 18:01:11.808800 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-edpm-compute-global-edpm-compute-global-8f677" Jan 20 18:01:11 crc kubenswrapper[4558]: I0120 18:01:11.850734 4558 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8905304c-0fea-45d1-9fd4-554934a574b4-inventory\") on node \"crc\" DevicePath \"\"" Jan 20 18:01:11 crc kubenswrapper[4558]: I0120 18:01:11.850770 4558 reconciler_common.go:293] "Volume detached for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/8905304c-0fea-45d1-9fd4-554934a574b4-ssh-key-edpm-compute-global\") on node \"crc\" DevicePath \"\"" Jan 20 18:01:11 crc kubenswrapper[4558]: I0120 18:01:11.860340 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/custom-global-service-edpm-compute-global-g877m"] Jan 20 18:01:11 crc kubenswrapper[4558]: E0120 18:01:11.860749 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8905304c-0fea-45d1-9fd4-554934a574b4" containerName="nova-edpm-compute-global-edpm-compute-global" Jan 20 18:01:11 crc kubenswrapper[4558]: I0120 18:01:11.860770 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="8905304c-0fea-45d1-9fd4-554934a574b4" containerName="nova-edpm-compute-global-edpm-compute-global" Jan 20 18:01:11 crc kubenswrapper[4558]: I0120 18:01:11.860982 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="8905304c-0fea-45d1-9fd4-554934a574b4" containerName="nova-edpm-compute-global-edpm-compute-global" Jan 20 18:01:11 crc kubenswrapper[4558]: I0120 18:01:11.861573 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/custom-global-service-edpm-compute-global-g877m" Jan 20 18:01:11 crc kubenswrapper[4558]: I0120 18:01:11.864572 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"combined-ca-bundle" Jan 20 18:01:11 crc kubenswrapper[4558]: I0120 18:01:11.864593 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplane-ansible-ssh-private-key-secret" Jan 20 18:01:11 crc kubenswrapper[4558]: I0120 18:01:11.864680 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-aee-default-env" Jan 20 18:01:11 crc kubenswrapper[4558]: I0120 18:01:11.864713 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplanenodeset-edpm-compute-global" Jan 20 18:01:11 crc kubenswrapper[4558]: I0120 18:01:11.866471 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"edpm-compute-global-dockercfg-wkdxg" Jan 20 18:01:11 crc kubenswrapper[4558]: I0120 18:01:11.870692 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/custom-global-service-edpm-compute-global-g877m"] Jan 20 18:01:11 crc kubenswrapper[4558]: I0120 18:01:11.951547 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ntr69\" (UniqueName: \"kubernetes.io/projected/ac804cf5-3104-492e-84e8-d39ac5d82fd0-kube-api-access-ntr69\") pod \"custom-global-service-edpm-compute-global-g877m\" (UID: \"ac804cf5-3104-492e-84e8-d39ac5d82fd0\") " pod="openstack-kuttl-tests/custom-global-service-edpm-compute-global-g877m" Jan 20 18:01:11 crc kubenswrapper[4558]: I0120 18:01:11.951608 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/ac804cf5-3104-492e-84e8-d39ac5d82fd0-inventory-0\") pod \"custom-global-service-edpm-compute-global-g877m\" (UID: \"ac804cf5-3104-492e-84e8-d39ac5d82fd0\") " pod="openstack-kuttl-tests/custom-global-service-edpm-compute-global-g877m" Jan 20 18:01:11 crc kubenswrapper[4558]: I0120 18:01:11.951894 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/ac804cf5-3104-492e-84e8-d39ac5d82fd0-ssh-key-edpm-compute-global\") pod \"custom-global-service-edpm-compute-global-g877m\" (UID: \"ac804cf5-3104-492e-84e8-d39ac5d82fd0\") " pod="openstack-kuttl-tests/custom-global-service-edpm-compute-global-g877m" Jan 20 18:01:11 crc kubenswrapper[4558]: I0120 18:01:11.952184 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"custom-global-service-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ac804cf5-3104-492e-84e8-d39ac5d82fd0-custom-global-service-combined-ca-bundle\") pod \"custom-global-service-edpm-compute-global-g877m\" (UID: \"ac804cf5-3104-492e-84e8-d39ac5d82fd0\") " pod="openstack-kuttl-tests/custom-global-service-edpm-compute-global-g877m" Jan 20 18:01:12 crc kubenswrapper[4558]: I0120 18:01:12.054179 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ntr69\" (UniqueName: \"kubernetes.io/projected/ac804cf5-3104-492e-84e8-d39ac5d82fd0-kube-api-access-ntr69\") pod \"custom-global-service-edpm-compute-global-g877m\" (UID: \"ac804cf5-3104-492e-84e8-d39ac5d82fd0\") " pod="openstack-kuttl-tests/custom-global-service-edpm-compute-global-g877m" Jan 20 18:01:12 crc kubenswrapper[4558]: I0120 18:01:12.054244 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/ac804cf5-3104-492e-84e8-d39ac5d82fd0-inventory-0\") pod \"custom-global-service-edpm-compute-global-g877m\" (UID: \"ac804cf5-3104-492e-84e8-d39ac5d82fd0\") " pod="openstack-kuttl-tests/custom-global-service-edpm-compute-global-g877m" Jan 20 18:01:12 crc kubenswrapper[4558]: I0120 18:01:12.054314 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/ac804cf5-3104-492e-84e8-d39ac5d82fd0-ssh-key-edpm-compute-global\") pod \"custom-global-service-edpm-compute-global-g877m\" (UID: \"ac804cf5-3104-492e-84e8-d39ac5d82fd0\") " pod="openstack-kuttl-tests/custom-global-service-edpm-compute-global-g877m" Jan 20 18:01:12 crc kubenswrapper[4558]: I0120 18:01:12.054374 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"custom-global-service-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ac804cf5-3104-492e-84e8-d39ac5d82fd0-custom-global-service-combined-ca-bundle\") pod \"custom-global-service-edpm-compute-global-g877m\" (UID: \"ac804cf5-3104-492e-84e8-d39ac5d82fd0\") " pod="openstack-kuttl-tests/custom-global-service-edpm-compute-global-g877m" Jan 20 18:01:12 crc kubenswrapper[4558]: I0120 18:01:12.059726 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"custom-global-service-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ac804cf5-3104-492e-84e8-d39ac5d82fd0-custom-global-service-combined-ca-bundle\") pod \"custom-global-service-edpm-compute-global-g877m\" (UID: \"ac804cf5-3104-492e-84e8-d39ac5d82fd0\") " pod="openstack-kuttl-tests/custom-global-service-edpm-compute-global-g877m" Jan 20 18:01:12 crc kubenswrapper[4558]: I0120 18:01:12.059726 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/ac804cf5-3104-492e-84e8-d39ac5d82fd0-inventory-0\") pod \"custom-global-service-edpm-compute-global-g877m\" (UID: \"ac804cf5-3104-492e-84e8-d39ac5d82fd0\") " pod="openstack-kuttl-tests/custom-global-service-edpm-compute-global-g877m" Jan 20 18:01:12 crc kubenswrapper[4558]: I0120 18:01:12.059878 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/ac804cf5-3104-492e-84e8-d39ac5d82fd0-ssh-key-edpm-compute-global\") pod \"custom-global-service-edpm-compute-global-g877m\" (UID: \"ac804cf5-3104-492e-84e8-d39ac5d82fd0\") " pod="openstack-kuttl-tests/custom-global-service-edpm-compute-global-g877m" Jan 20 18:01:12 crc kubenswrapper[4558]: I0120 18:01:12.070434 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ntr69\" (UniqueName: \"kubernetes.io/projected/ac804cf5-3104-492e-84e8-d39ac5d82fd0-kube-api-access-ntr69\") pod \"custom-global-service-edpm-compute-global-g877m\" (UID: \"ac804cf5-3104-492e-84e8-d39ac5d82fd0\") " pod="openstack-kuttl-tests/custom-global-service-edpm-compute-global-g877m" Jan 20 18:01:12 crc kubenswrapper[4558]: I0120 18:01:12.178507 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/custom-global-service-edpm-compute-global-g877m" Jan 20 18:01:12 crc kubenswrapper[4558]: W0120 18:01:12.574455 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podac804cf5_3104_492e_84e8_d39ac5d82fd0.slice/crio-d9f38475c9cac7cfeb3350759f3c280f67af07e8f2d5741ea89a3bf86caf3136 WatchSource:0}: Error finding container d9f38475c9cac7cfeb3350759f3c280f67af07e8f2d5741ea89a3bf86caf3136: Status 404 returned error can't find the container with id d9f38475c9cac7cfeb3350759f3c280f67af07e8f2d5741ea89a3bf86caf3136 Jan 20 18:01:12 crc kubenswrapper[4558]: I0120 18:01:12.577033 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/custom-global-service-edpm-compute-global-g877m"] Jan 20 18:01:12 crc kubenswrapper[4558]: I0120 18:01:12.820398 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/custom-global-service-edpm-compute-global-g877m" event={"ID":"ac804cf5-3104-492e-84e8-d39ac5d82fd0","Type":"ContainerStarted","Data":"d9f38475c9cac7cfeb3350759f3c280f67af07e8f2d5741ea89a3bf86caf3136"} Jan 20 18:01:13 crc kubenswrapper[4558]: I0120 18:01:13.832569 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/custom-global-service-edpm-compute-global-g877m" event={"ID":"ac804cf5-3104-492e-84e8-d39ac5d82fd0","Type":"ContainerStarted","Data":"b9f095dcb40f0fa598a4805ecd94fad1a657b559a8d086c77cf3682fb4f334a2"} Jan 20 18:01:13 crc kubenswrapper[4558]: I0120 18:01:13.852591 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/custom-global-service-edpm-compute-global-g877m" podStartSLOduration=2.297283401 podStartE2EDuration="2.852569613s" podCreationTimestamp="2026-01-20 18:01:11 +0000 UTC" firstStartedPulling="2026-01-20 18:01:12.578456142 +0000 UTC m=+4766.338794110" lastFinishedPulling="2026-01-20 18:01:13.133742356 +0000 UTC m=+4766.894080322" observedRunningTime="2026-01-20 18:01:13.846692786 +0000 UTC m=+4767.607030753" watchObservedRunningTime="2026-01-20 18:01:13.852569613 +0000 UTC m=+4767.612907580" Jan 20 18:01:15 crc kubenswrapper[4558]: I0120 18:01:15.854382 4558 generic.go:334] "Generic (PLEG): container finished" podID="ac804cf5-3104-492e-84e8-d39ac5d82fd0" containerID="b9f095dcb40f0fa598a4805ecd94fad1a657b559a8d086c77cf3682fb4f334a2" exitCode=0 Jan 20 18:01:15 crc kubenswrapper[4558]: I0120 18:01:15.854480 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/custom-global-service-edpm-compute-global-g877m" event={"ID":"ac804cf5-3104-492e-84e8-d39ac5d82fd0","Type":"ContainerDied","Data":"b9f095dcb40f0fa598a4805ecd94fad1a657b559a8d086c77cf3682fb4f334a2"} Jan 20 18:01:17 crc kubenswrapper[4558]: I0120 18:01:17.112723 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/custom-global-service-edpm-compute-global-g877m" Jan 20 18:01:17 crc kubenswrapper[4558]: I0120 18:01:17.233953 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/ac804cf5-3104-492e-84e8-d39ac5d82fd0-ssh-key-edpm-compute-global\") pod \"ac804cf5-3104-492e-84e8-d39ac5d82fd0\" (UID: \"ac804cf5-3104-492e-84e8-d39ac5d82fd0\") " Jan 20 18:01:17 crc kubenswrapper[4558]: I0120 18:01:17.234276 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ntr69\" (UniqueName: \"kubernetes.io/projected/ac804cf5-3104-492e-84e8-d39ac5d82fd0-kube-api-access-ntr69\") pod \"ac804cf5-3104-492e-84e8-d39ac5d82fd0\" (UID: \"ac804cf5-3104-492e-84e8-d39ac5d82fd0\") " Jan 20 18:01:17 crc kubenswrapper[4558]: I0120 18:01:17.234308 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"custom-global-service-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ac804cf5-3104-492e-84e8-d39ac5d82fd0-custom-global-service-combined-ca-bundle\") pod \"ac804cf5-3104-492e-84e8-d39ac5d82fd0\" (UID: \"ac804cf5-3104-492e-84e8-d39ac5d82fd0\") " Jan 20 18:01:17 crc kubenswrapper[4558]: I0120 18:01:17.234341 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/ac804cf5-3104-492e-84e8-d39ac5d82fd0-inventory-0\") pod \"ac804cf5-3104-492e-84e8-d39ac5d82fd0\" (UID: \"ac804cf5-3104-492e-84e8-d39ac5d82fd0\") " Jan 20 18:01:17 crc kubenswrapper[4558]: I0120 18:01:17.240858 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ac804cf5-3104-492e-84e8-d39ac5d82fd0-kube-api-access-ntr69" (OuterVolumeSpecName: "kube-api-access-ntr69") pod "ac804cf5-3104-492e-84e8-d39ac5d82fd0" (UID: "ac804cf5-3104-492e-84e8-d39ac5d82fd0"). InnerVolumeSpecName "kube-api-access-ntr69". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:01:17 crc kubenswrapper[4558]: I0120 18:01:17.240892 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ac804cf5-3104-492e-84e8-d39ac5d82fd0-custom-global-service-combined-ca-bundle" (OuterVolumeSpecName: "custom-global-service-combined-ca-bundle") pod "ac804cf5-3104-492e-84e8-d39ac5d82fd0" (UID: "ac804cf5-3104-492e-84e8-d39ac5d82fd0"). InnerVolumeSpecName "custom-global-service-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:01:17 crc kubenswrapper[4558]: I0120 18:01:17.255987 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ac804cf5-3104-492e-84e8-d39ac5d82fd0-inventory-0" (OuterVolumeSpecName: "inventory-0") pod "ac804cf5-3104-492e-84e8-d39ac5d82fd0" (UID: "ac804cf5-3104-492e-84e8-d39ac5d82fd0"). InnerVolumeSpecName "inventory-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:01:17 crc kubenswrapper[4558]: I0120 18:01:17.256223 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ac804cf5-3104-492e-84e8-d39ac5d82fd0-ssh-key-edpm-compute-global" (OuterVolumeSpecName: "ssh-key-edpm-compute-global") pod "ac804cf5-3104-492e-84e8-d39ac5d82fd0" (UID: "ac804cf5-3104-492e-84e8-d39ac5d82fd0"). InnerVolumeSpecName "ssh-key-edpm-compute-global". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:01:17 crc kubenswrapper[4558]: I0120 18:01:17.336373 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ntr69\" (UniqueName: \"kubernetes.io/projected/ac804cf5-3104-492e-84e8-d39ac5d82fd0-kube-api-access-ntr69\") on node \"crc\" DevicePath \"\"" Jan 20 18:01:17 crc kubenswrapper[4558]: I0120 18:01:17.336414 4558 reconciler_common.go:293] "Volume detached for volume \"custom-global-service-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ac804cf5-3104-492e-84e8-d39ac5d82fd0-custom-global-service-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 18:01:17 crc kubenswrapper[4558]: I0120 18:01:17.336431 4558 reconciler_common.go:293] "Volume detached for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/ac804cf5-3104-492e-84e8-d39ac5d82fd0-inventory-0\") on node \"crc\" DevicePath \"\"" Jan 20 18:01:17 crc kubenswrapper[4558]: I0120 18:01:17.336446 4558 reconciler_common.go:293] "Volume detached for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/ac804cf5-3104-492e-84e8-d39ac5d82fd0-ssh-key-edpm-compute-global\") on node \"crc\" DevicePath \"\"" Jan 20 18:01:17 crc kubenswrapper[4558]: I0120 18:01:17.876933 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/custom-global-service-edpm-compute-global-g877m" event={"ID":"ac804cf5-3104-492e-84e8-d39ac5d82fd0","Type":"ContainerDied","Data":"d9f38475c9cac7cfeb3350759f3c280f67af07e8f2d5741ea89a3bf86caf3136"} Jan 20 18:01:17 crc kubenswrapper[4558]: I0120 18:01:17.877378 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d9f38475c9cac7cfeb3350759f3c280f67af07e8f2d5741ea89a3bf86caf3136" Jan 20 18:01:17 crc kubenswrapper[4558]: I0120 18:01:17.877017 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/custom-global-service-edpm-compute-global-g877m" Jan 20 18:01:18 crc kubenswrapper[4558]: I0120 18:01:18.949585 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-6668544499-64ftq"] Jan 20 18:01:18 crc kubenswrapper[4558]: E0120 18:01:18.950616 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ac804cf5-3104-492e-84e8-d39ac5d82fd0" containerName="custom-global-service-edpm-compute-global" Jan 20 18:01:18 crc kubenswrapper[4558]: I0120 18:01:18.956298 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ac804cf5-3104-492e-84e8-d39ac5d82fd0" containerName="custom-global-service-edpm-compute-global" Jan 20 18:01:18 crc kubenswrapper[4558]: I0120 18:01:18.956606 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="ac804cf5-3104-492e-84e8-d39ac5d82fd0" containerName="custom-global-service-edpm-compute-global" Jan 20 18:01:18 crc kubenswrapper[4558]: I0120 18:01:18.957521 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-6668544499-64ftq" Jan 20 18:01:18 crc kubenswrapper[4558]: I0120 18:01:18.961512 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"edpm-compute-beta-nodeset" Jan 20 18:01:18 crc kubenswrapper[4558]: I0120 18:01:18.975959 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-6668544499-64ftq"] Jan 20 18:01:19 crc kubenswrapper[4558]: I0120 18:01:19.068458 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/56e4457b-2828-4258-8f84-6159b2422531-config\") pod \"dnsmasq-dnsmasq-6668544499-64ftq\" (UID: \"56e4457b-2828-4258-8f84-6159b2422531\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-6668544499-64ftq" Jan 20 18:01:19 crc kubenswrapper[4558]: I0120 18:01:19.068716 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/56e4457b-2828-4258-8f84-6159b2422531-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-6668544499-64ftq\" (UID: \"56e4457b-2828-4258-8f84-6159b2422531\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-6668544499-64ftq" Jan 20 18:01:19 crc kubenswrapper[4558]: I0120 18:01:19.068953 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"edpm-compute-global\" (UniqueName: \"kubernetes.io/configmap/56e4457b-2828-4258-8f84-6159b2422531-edpm-compute-global\") pod \"dnsmasq-dnsmasq-6668544499-64ftq\" (UID: \"56e4457b-2828-4258-8f84-6159b2422531\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-6668544499-64ftq" Jan 20 18:01:19 crc kubenswrapper[4558]: I0120 18:01:19.069074 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2779n\" (UniqueName: \"kubernetes.io/projected/56e4457b-2828-4258-8f84-6159b2422531-kube-api-access-2779n\") pod \"dnsmasq-dnsmasq-6668544499-64ftq\" (UID: \"56e4457b-2828-4258-8f84-6159b2422531\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-6668544499-64ftq" Jan 20 18:01:19 crc kubenswrapper[4558]: I0120 18:01:19.069233 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"edpm-compute-beta-nodeset\" (UniqueName: \"kubernetes.io/configmap/56e4457b-2828-4258-8f84-6159b2422531-edpm-compute-beta-nodeset\") pod \"dnsmasq-dnsmasq-6668544499-64ftq\" (UID: \"56e4457b-2828-4258-8f84-6159b2422531\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-6668544499-64ftq" Jan 20 18:01:19 crc kubenswrapper[4558]: I0120 18:01:19.170906 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/56e4457b-2828-4258-8f84-6159b2422531-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-6668544499-64ftq\" (UID: \"56e4457b-2828-4258-8f84-6159b2422531\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-6668544499-64ftq" Jan 20 18:01:19 crc kubenswrapper[4558]: I0120 18:01:19.171083 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"edpm-compute-global\" (UniqueName: \"kubernetes.io/configmap/56e4457b-2828-4258-8f84-6159b2422531-edpm-compute-global\") pod \"dnsmasq-dnsmasq-6668544499-64ftq\" (UID: \"56e4457b-2828-4258-8f84-6159b2422531\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-6668544499-64ftq" Jan 20 18:01:19 crc kubenswrapper[4558]: I0120 18:01:19.171140 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2779n\" (UniqueName: \"kubernetes.io/projected/56e4457b-2828-4258-8f84-6159b2422531-kube-api-access-2779n\") pod \"dnsmasq-dnsmasq-6668544499-64ftq\" (UID: \"56e4457b-2828-4258-8f84-6159b2422531\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-6668544499-64ftq" Jan 20 18:01:19 crc kubenswrapper[4558]: I0120 18:01:19.171210 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"edpm-compute-beta-nodeset\" (UniqueName: \"kubernetes.io/configmap/56e4457b-2828-4258-8f84-6159b2422531-edpm-compute-beta-nodeset\") pod \"dnsmasq-dnsmasq-6668544499-64ftq\" (UID: \"56e4457b-2828-4258-8f84-6159b2422531\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-6668544499-64ftq" Jan 20 18:01:19 crc kubenswrapper[4558]: I0120 18:01:19.171329 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/56e4457b-2828-4258-8f84-6159b2422531-config\") pod \"dnsmasq-dnsmasq-6668544499-64ftq\" (UID: \"56e4457b-2828-4258-8f84-6159b2422531\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-6668544499-64ftq" Jan 20 18:01:19 crc kubenswrapper[4558]: I0120 18:01:19.172149 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/56e4457b-2828-4258-8f84-6159b2422531-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-6668544499-64ftq\" (UID: \"56e4457b-2828-4258-8f84-6159b2422531\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-6668544499-64ftq" Jan 20 18:01:19 crc kubenswrapper[4558]: I0120 18:01:19.172201 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"edpm-compute-global\" (UniqueName: \"kubernetes.io/configmap/56e4457b-2828-4258-8f84-6159b2422531-edpm-compute-global\") pod \"dnsmasq-dnsmasq-6668544499-64ftq\" (UID: \"56e4457b-2828-4258-8f84-6159b2422531\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-6668544499-64ftq" Jan 20 18:01:19 crc kubenswrapper[4558]: I0120 18:01:19.172497 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"edpm-compute-beta-nodeset\" (UniqueName: \"kubernetes.io/configmap/56e4457b-2828-4258-8f84-6159b2422531-edpm-compute-beta-nodeset\") pod \"dnsmasq-dnsmasq-6668544499-64ftq\" (UID: \"56e4457b-2828-4258-8f84-6159b2422531\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-6668544499-64ftq" Jan 20 18:01:19 crc kubenswrapper[4558]: I0120 18:01:19.173311 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/56e4457b-2828-4258-8f84-6159b2422531-config\") pod \"dnsmasq-dnsmasq-6668544499-64ftq\" (UID: \"56e4457b-2828-4258-8f84-6159b2422531\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-6668544499-64ftq" Jan 20 18:01:19 crc kubenswrapper[4558]: I0120 18:01:19.198495 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2779n\" (UniqueName: \"kubernetes.io/projected/56e4457b-2828-4258-8f84-6159b2422531-kube-api-access-2779n\") pod \"dnsmasq-dnsmasq-6668544499-64ftq\" (UID: \"56e4457b-2828-4258-8f84-6159b2422531\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-6668544499-64ftq" Jan 20 18:01:19 crc kubenswrapper[4558]: I0120 18:01:19.281288 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-6668544499-64ftq" Jan 20 18:01:19 crc kubenswrapper[4558]: I0120 18:01:19.706037 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-6668544499-64ftq"] Jan 20 18:01:19 crc kubenswrapper[4558]: I0120 18:01:19.902323 4558 generic.go:334] "Generic (PLEG): container finished" podID="56e4457b-2828-4258-8f84-6159b2422531" containerID="9c66ffe123731fef025cf7390bf78c73c985782dcbe2fc1e71e07008b579428d" exitCode=0 Jan 20 18:01:19 crc kubenswrapper[4558]: I0120 18:01:19.902395 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-6668544499-64ftq" event={"ID":"56e4457b-2828-4258-8f84-6159b2422531","Type":"ContainerDied","Data":"9c66ffe123731fef025cf7390bf78c73c985782dcbe2fc1e71e07008b579428d"} Jan 20 18:01:19 crc kubenswrapper[4558]: I0120 18:01:19.902464 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-6668544499-64ftq" event={"ID":"56e4457b-2828-4258-8f84-6159b2422531","Type":"ContainerStarted","Data":"4fd1f6c486eb12b56b43499db44f751463127b26e6c839091c911bb26660d920"} Jan 20 18:01:20 crc kubenswrapper[4558]: I0120 18:01:20.913469 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-6668544499-64ftq" event={"ID":"56e4457b-2828-4258-8f84-6159b2422531","Type":"ContainerStarted","Data":"d02adfd4d4f0140e2faca8210c03484f2c1494acd968b18032fa2acd0976c05f"} Jan 20 18:01:20 crc kubenswrapper[4558]: I0120 18:01:20.913859 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-6668544499-64ftq" Jan 20 18:01:20 crc kubenswrapper[4558]: I0120 18:01:20.931269 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-6668544499-64ftq" podStartSLOduration=2.931243868 podStartE2EDuration="2.931243868s" podCreationTimestamp="2026-01-20 18:01:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 18:01:20.930217037 +0000 UTC m=+4774.690555005" watchObservedRunningTime="2026-01-20 18:01:20.931243868 +0000 UTC m=+4774.691581836" Jan 20 18:01:29 crc kubenswrapper[4558]: I0120 18:01:29.282346 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-6668544499-64ftq" Jan 20 18:01:29 crc kubenswrapper[4558]: I0120 18:01:29.329842 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-7d78464677-qck2g"] Jan 20 18:01:29 crc kubenswrapper[4558]: I0120 18:01:29.330064 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-7d78464677-qck2g" podUID="c224b81e-d5b0-4b28-86ce-04a9f616a986" containerName="dnsmasq-dns" containerID="cri-o://53550b6bbab1413ad18d77360ae821adcce85e9150bdbdcaf3e557bd7027eb4d" gracePeriod=10 Jan 20 18:01:29 crc kubenswrapper[4558]: I0120 18:01:29.685409 4558 scope.go:117] "RemoveContainer" containerID="6951040f5e0eb5e16deb6671f1539f1de80f180d1d0d5cf02b5584ddc8280caf" Jan 20 18:01:29 crc kubenswrapper[4558]: I0120 18:01:29.712327 4558 scope.go:117] "RemoveContainer" containerID="49dc4ba201bbec0d9ff9d16fd2d2b2c98c88e6f20020f744fca0291f9f36c748" Jan 20 18:01:29 crc kubenswrapper[4558]: I0120 18:01:29.722073 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-7d78464677-qck2g" Jan 20 18:01:29 crc kubenswrapper[4558]: I0120 18:01:29.732027 4558 scope.go:117] "RemoveContainer" containerID="08c5c660504f209c3b527a903bc2ca2f4a2088161c85be3401c90c3cddbca989" Jan 20 18:01:29 crc kubenswrapper[4558]: I0120 18:01:29.752342 4558 scope.go:117] "RemoveContainer" containerID="92a5f58dc2bcdf82ee4c76417da6d37919bddddc7444ff849574003ec7f86cee" Jan 20 18:01:29 crc kubenswrapper[4558]: I0120 18:01:29.767407 4558 scope.go:117] "RemoveContainer" containerID="4b7b35525f0c3029f485e041b6343e7110c7464da4bbf0f7e3ce658808702802" Jan 20 18:01:29 crc kubenswrapper[4558]: I0120 18:01:29.784862 4558 scope.go:117] "RemoveContainer" containerID="72db31604b9af667b1c4e5942704410ff8f1e3af14c2a6fabe1fefc79f363d83" Jan 20 18:01:29 crc kubenswrapper[4558]: I0120 18:01:29.804858 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2jqc9\" (UniqueName: \"kubernetes.io/projected/c224b81e-d5b0-4b28-86ce-04a9f616a986-kube-api-access-2jqc9\") pod \"c224b81e-d5b0-4b28-86ce-04a9f616a986\" (UID: \"c224b81e-d5b0-4b28-86ce-04a9f616a986\") " Jan 20 18:01:29 crc kubenswrapper[4558]: I0120 18:01:29.804927 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"edpm-compute-global\" (UniqueName: \"kubernetes.io/configmap/c224b81e-d5b0-4b28-86ce-04a9f616a986-edpm-compute-global\") pod \"c224b81e-d5b0-4b28-86ce-04a9f616a986\" (UID: \"c224b81e-d5b0-4b28-86ce-04a9f616a986\") " Jan 20 18:01:29 crc kubenswrapper[4558]: I0120 18:01:29.804966 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c224b81e-d5b0-4b28-86ce-04a9f616a986-config\") pod \"c224b81e-d5b0-4b28-86ce-04a9f616a986\" (UID: \"c224b81e-d5b0-4b28-86ce-04a9f616a986\") " Jan 20 18:01:29 crc kubenswrapper[4558]: I0120 18:01:29.804981 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/c224b81e-d5b0-4b28-86ce-04a9f616a986-dnsmasq-svc\") pod \"c224b81e-d5b0-4b28-86ce-04a9f616a986\" (UID: \"c224b81e-d5b0-4b28-86ce-04a9f616a986\") " Jan 20 18:01:29 crc kubenswrapper[4558]: I0120 18:01:29.809590 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c224b81e-d5b0-4b28-86ce-04a9f616a986-kube-api-access-2jqc9" (OuterVolumeSpecName: "kube-api-access-2jqc9") pod "c224b81e-d5b0-4b28-86ce-04a9f616a986" (UID: "c224b81e-d5b0-4b28-86ce-04a9f616a986"). InnerVolumeSpecName "kube-api-access-2jqc9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:01:29 crc kubenswrapper[4558]: I0120 18:01:29.827996 4558 scope.go:117] "RemoveContainer" containerID="649d6dfbc7da772af01bf0565e0467026756c4ec2fe3aed3c80157258c3eea09" Jan 20 18:01:29 crc kubenswrapper[4558]: I0120 18:01:29.832386 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c224b81e-d5b0-4b28-86ce-04a9f616a986-edpm-compute-global" (OuterVolumeSpecName: "edpm-compute-global") pod "c224b81e-d5b0-4b28-86ce-04a9f616a986" (UID: "c224b81e-d5b0-4b28-86ce-04a9f616a986"). InnerVolumeSpecName "edpm-compute-global". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:01:29 crc kubenswrapper[4558]: I0120 18:01:29.832679 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c224b81e-d5b0-4b28-86ce-04a9f616a986-config" (OuterVolumeSpecName: "config") pod "c224b81e-d5b0-4b28-86ce-04a9f616a986" (UID: "c224b81e-d5b0-4b28-86ce-04a9f616a986"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:01:29 crc kubenswrapper[4558]: I0120 18:01:29.835420 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c224b81e-d5b0-4b28-86ce-04a9f616a986-dnsmasq-svc" (OuterVolumeSpecName: "dnsmasq-svc") pod "c224b81e-d5b0-4b28-86ce-04a9f616a986" (UID: "c224b81e-d5b0-4b28-86ce-04a9f616a986"). InnerVolumeSpecName "dnsmasq-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:01:29 crc kubenswrapper[4558]: I0120 18:01:29.847876 4558 scope.go:117] "RemoveContainer" containerID="6d7ee733551241798b4cb21ccf1011d8596e7dc990e0e63f7573c7daa5faee56" Jan 20 18:01:29 crc kubenswrapper[4558]: I0120 18:01:29.868092 4558 scope.go:117] "RemoveContainer" containerID="8ad2ad62dfdac1505d231618bd110cbb2e4eac1446e02a350103d8e8e6967718" Jan 20 18:01:29 crc kubenswrapper[4558]: I0120 18:01:29.891840 4558 scope.go:117] "RemoveContainer" containerID="d153a6bcec9439654a1df7946e305afa4bba3742b0516f4131dfe8375eb4499e" Jan 20 18:01:29 crc kubenswrapper[4558]: I0120 18:01:29.905850 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2jqc9\" (UniqueName: \"kubernetes.io/projected/c224b81e-d5b0-4b28-86ce-04a9f616a986-kube-api-access-2jqc9\") on node \"crc\" DevicePath \"\"" Jan 20 18:01:29 crc kubenswrapper[4558]: I0120 18:01:29.905879 4558 reconciler_common.go:293] "Volume detached for volume \"edpm-compute-global\" (UniqueName: \"kubernetes.io/configmap/c224b81e-d5b0-4b28-86ce-04a9f616a986-edpm-compute-global\") on node \"crc\" DevicePath \"\"" Jan 20 18:01:29 crc kubenswrapper[4558]: I0120 18:01:29.905890 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c224b81e-d5b0-4b28-86ce-04a9f616a986-config\") on node \"crc\" DevicePath \"\"" Jan 20 18:01:29 crc kubenswrapper[4558]: I0120 18:01:29.905901 4558 reconciler_common.go:293] "Volume detached for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/c224b81e-d5b0-4b28-86ce-04a9f616a986-dnsmasq-svc\") on node \"crc\" DevicePath \"\"" Jan 20 18:01:29 crc kubenswrapper[4558]: I0120 18:01:29.909411 4558 scope.go:117] "RemoveContainer" containerID="fa759d890ffd5088e470c2dfd8e7b87cb410350e5d1a45277f73bb96721a0561" Jan 20 18:01:29 crc kubenswrapper[4558]: I0120 18:01:29.926723 4558 scope.go:117] "RemoveContainer" containerID="7f55f22bc9fb124084f5b5d83ec240d39b440361ab6e1fb735a7109baba3a09c" Jan 20 18:01:29 crc kubenswrapper[4558]: I0120 18:01:29.963443 4558 scope.go:117] "RemoveContainer" containerID="ce08a2498230a0eb01bce0d7e2ee1409b7ef0c15376d57223ae2e67fa27da942" Jan 20 18:01:29 crc kubenswrapper[4558]: I0120 18:01:29.978477 4558 scope.go:117] "RemoveContainer" containerID="3d23b588456f64012fb1649e6aa289cb457c88d5ad66f1354c313644bf1a8a97" Jan 20 18:01:29 crc kubenswrapper[4558]: I0120 18:01:29.997390 4558 scope.go:117] "RemoveContainer" containerID="75c122801e2470bd8e501bddfa6a0f20a2d2dbaa3393df3997b6e399b869bab6" Jan 20 18:01:30 crc kubenswrapper[4558]: I0120 18:01:30.015509 4558 generic.go:334] "Generic (PLEG): container finished" podID="c224b81e-d5b0-4b28-86ce-04a9f616a986" containerID="53550b6bbab1413ad18d77360ae821adcce85e9150bdbdcaf3e557bd7027eb4d" exitCode=0 Jan 20 18:01:30 crc kubenswrapper[4558]: I0120 18:01:30.015573 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-7d78464677-qck2g" event={"ID":"c224b81e-d5b0-4b28-86ce-04a9f616a986","Type":"ContainerDied","Data":"53550b6bbab1413ad18d77360ae821adcce85e9150bdbdcaf3e557bd7027eb4d"} Jan 20 18:01:30 crc kubenswrapper[4558]: I0120 18:01:30.015661 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-7d78464677-qck2g" event={"ID":"c224b81e-d5b0-4b28-86ce-04a9f616a986","Type":"ContainerDied","Data":"64d26cd851b623ce459914a4700384bdaaa05b023ac150b3e9eb6301c89fd693"} Jan 20 18:01:30 crc kubenswrapper[4558]: I0120 18:01:30.015697 4558 scope.go:117] "RemoveContainer" containerID="53550b6bbab1413ad18d77360ae821adcce85e9150bdbdcaf3e557bd7027eb4d" Jan 20 18:01:30 crc kubenswrapper[4558]: I0120 18:01:30.015818 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-7d78464677-qck2g" Jan 20 18:01:30 crc kubenswrapper[4558]: I0120 18:01:30.019967 4558 scope.go:117] "RemoveContainer" containerID="1efa0468b0518cd84740b23884a4369481e10910db84d1b937611c7aed424709" Jan 20 18:01:30 crc kubenswrapper[4558]: I0120 18:01:30.034720 4558 scope.go:117] "RemoveContainer" containerID="7461946b9929270e3fffbe2650f18238c1490db415b83867a5e299f29ef5b3f6" Jan 20 18:01:30 crc kubenswrapper[4558]: I0120 18:01:30.047637 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-7d78464677-qck2g"] Jan 20 18:01:30 crc kubenswrapper[4558]: I0120 18:01:30.052054 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-7d78464677-qck2g"] Jan 20 18:01:30 crc kubenswrapper[4558]: I0120 18:01:30.099046 4558 scope.go:117] "RemoveContainer" containerID="53550b6bbab1413ad18d77360ae821adcce85e9150bdbdcaf3e557bd7027eb4d" Jan 20 18:01:30 crc kubenswrapper[4558]: E0120 18:01:30.099661 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"53550b6bbab1413ad18d77360ae821adcce85e9150bdbdcaf3e557bd7027eb4d\": container with ID starting with 53550b6bbab1413ad18d77360ae821adcce85e9150bdbdcaf3e557bd7027eb4d not found: ID does not exist" containerID="53550b6bbab1413ad18d77360ae821adcce85e9150bdbdcaf3e557bd7027eb4d" Jan 20 18:01:30 crc kubenswrapper[4558]: I0120 18:01:30.099775 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"53550b6bbab1413ad18d77360ae821adcce85e9150bdbdcaf3e557bd7027eb4d"} err="failed to get container status \"53550b6bbab1413ad18d77360ae821adcce85e9150bdbdcaf3e557bd7027eb4d\": rpc error: code = NotFound desc = could not find container \"53550b6bbab1413ad18d77360ae821adcce85e9150bdbdcaf3e557bd7027eb4d\": container with ID starting with 53550b6bbab1413ad18d77360ae821adcce85e9150bdbdcaf3e557bd7027eb4d not found: ID does not exist" Jan 20 18:01:30 crc kubenswrapper[4558]: I0120 18:01:30.099868 4558 scope.go:117] "RemoveContainer" containerID="7461946b9929270e3fffbe2650f18238c1490db415b83867a5e299f29ef5b3f6" Jan 20 18:01:30 crc kubenswrapper[4558]: E0120 18:01:30.100491 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7461946b9929270e3fffbe2650f18238c1490db415b83867a5e299f29ef5b3f6\": container with ID starting with 7461946b9929270e3fffbe2650f18238c1490db415b83867a5e299f29ef5b3f6 not found: ID does not exist" containerID="7461946b9929270e3fffbe2650f18238c1490db415b83867a5e299f29ef5b3f6" Jan 20 18:01:30 crc kubenswrapper[4558]: I0120 18:01:30.100582 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7461946b9929270e3fffbe2650f18238c1490db415b83867a5e299f29ef5b3f6"} err="failed to get container status \"7461946b9929270e3fffbe2650f18238c1490db415b83867a5e299f29ef5b3f6\": rpc error: code = NotFound desc = could not find container \"7461946b9929270e3fffbe2650f18238c1490db415b83867a5e299f29ef5b3f6\": container with ID starting with 7461946b9929270e3fffbe2650f18238c1490db415b83867a5e299f29ef5b3f6 not found: ID does not exist" Jan 20 18:01:30 crc kubenswrapper[4558]: I0120 18:01:30.124090 4558 scope.go:117] "RemoveContainer" containerID="a7e5224c23551340586e780af89b39957adde43c70c7ae7717575e2e550d32e6" Jan 20 18:01:30 crc kubenswrapper[4558]: I0120 18:01:30.140699 4558 scope.go:117] "RemoveContainer" containerID="63b1e89156b08be7c97cc6cd6b0955544bb87d107fd929728f51e6460408e3ae" Jan 20 18:01:30 crc kubenswrapper[4558]: I0120 18:01:30.156658 4558 scope.go:117] "RemoveContainer" containerID="91605227d295d861286db724135f07f88f4d737cec1e406e2f078ef2ee97ac12" Jan 20 18:01:30 crc kubenswrapper[4558]: I0120 18:01:30.169751 4558 scope.go:117] "RemoveContainer" containerID="593ca97090e4c8e828391df9bb4f851893cd4f22b0b607f43e4655293c6f5980" Jan 20 18:01:30 crc kubenswrapper[4558]: I0120 18:01:30.188611 4558 scope.go:117] "RemoveContainer" containerID="420b7880db84675aa4940091f0c98f07078b76d3e87eb3eb2eb3be3c8526f0cb" Jan 20 18:01:30 crc kubenswrapper[4558]: I0120 18:01:30.204788 4558 scope.go:117] "RemoveContainer" containerID="87f3a4508304b705d3874ba8981a4056c534e97560a2e9d54101a3c112b26f50" Jan 20 18:01:30 crc kubenswrapper[4558]: I0120 18:01:30.223249 4558 scope.go:117] "RemoveContainer" containerID="a67a173b20f463bc4ddc6c13a0332946f983d41b27131157848d410f84fe841b" Jan 20 18:01:30 crc kubenswrapper[4558]: I0120 18:01:30.240518 4558 scope.go:117] "RemoveContainer" containerID="1b366f4687deb8288f3ee86256c3df9164bf2bb6ef6e21181b282edad8963350" Jan 20 18:01:30 crc kubenswrapper[4558]: I0120 18:01:30.257627 4558 scope.go:117] "RemoveContainer" containerID="2b4a0c193260cb0e104fe9e95d28f9d5b353c0fbea727e60067b79e8342e911c" Jan 20 18:01:30 crc kubenswrapper[4558]: I0120 18:01:30.285666 4558 scope.go:117] "RemoveContainer" containerID="f88ad017f2098e35ed1d0c224b9e2095984e92f4d2ea3271b416a0d3a18de1f0" Jan 20 18:01:30 crc kubenswrapper[4558]: I0120 18:01:30.302773 4558 scope.go:117] "RemoveContainer" containerID="640b8b31652d22d5361c56898f4108c3cc9d07fa0b6e09405e8ee47586a8b2ee" Jan 20 18:01:30 crc kubenswrapper[4558]: I0120 18:01:30.327214 4558 scope.go:117] "RemoveContainer" containerID="8d1066d3fad6d8ee52adbc7179c0737528978fefd362036c2b152fa8b47efd48" Jan 20 18:01:30 crc kubenswrapper[4558]: I0120 18:01:30.364924 4558 scope.go:117] "RemoveContainer" containerID="513cf7b607f4c5f76a98e28f72ed3a3e4460bfb0341cd77104ff3e1a715e39f3" Jan 20 18:01:30 crc kubenswrapper[4558]: I0120 18:01:30.396110 4558 scope.go:117] "RemoveContainer" containerID="ac6033078eae9001c5e05c030c44ecfad51e5afe870494274353a4d660aecfbd" Jan 20 18:01:30 crc kubenswrapper[4558]: I0120 18:01:30.433676 4558 scope.go:117] "RemoveContainer" containerID="8a87e25ff9ae7de7c19892601f394f9d10fa27cee39a8831c6bd12dcb8c3655b" Jan 20 18:01:30 crc kubenswrapper[4558]: I0120 18:01:30.491468 4558 scope.go:117] "RemoveContainer" containerID="9f92bbadf6b5978b2ec804e4ad254a0133f5ddc1c7a0c908a984ae2727969174" Jan 20 18:01:30 crc kubenswrapper[4558]: I0120 18:01:30.507382 4558 scope.go:117] "RemoveContainer" containerID="678ea6abd080fbce2550848f03b87a5c11341574e2839169896c148295bf7600" Jan 20 18:01:30 crc kubenswrapper[4558]: I0120 18:01:30.529183 4558 scope.go:117] "RemoveContainer" containerID="411b4aa95f6e968ca6e019afae9f0a8a0c363318723d95b4e9ee96441a29221b" Jan 20 18:01:30 crc kubenswrapper[4558]: I0120 18:01:30.550149 4558 scope.go:117] "RemoveContainer" containerID="5168f32fec815124d42587fac8ad3488678817be8cf8058893ebb4def1511256" Jan 20 18:01:30 crc kubenswrapper[4558]: I0120 18:01:30.578378 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c224b81e-d5b0-4b28-86ce-04a9f616a986" path="/var/lib/kubelet/pods/c224b81e-d5b0-4b28-86ce-04a9f616a986/volumes" Jan 20 18:01:33 crc kubenswrapper[4558]: I0120 18:01:33.889373 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/download-cache-edpm-multinodeset-edpm-compute-global-2jlgt"] Jan 20 18:01:33 crc kubenswrapper[4558]: E0120 18:01:33.889969 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c224b81e-d5b0-4b28-86ce-04a9f616a986" containerName="dnsmasq-dns" Jan 20 18:01:33 crc kubenswrapper[4558]: I0120 18:01:33.889983 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="c224b81e-d5b0-4b28-86ce-04a9f616a986" containerName="dnsmasq-dns" Jan 20 18:01:33 crc kubenswrapper[4558]: E0120 18:01:33.889998 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c224b81e-d5b0-4b28-86ce-04a9f616a986" containerName="init" Jan 20 18:01:33 crc kubenswrapper[4558]: I0120 18:01:33.890003 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="c224b81e-d5b0-4b28-86ce-04a9f616a986" containerName="init" Jan 20 18:01:33 crc kubenswrapper[4558]: I0120 18:01:33.890156 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="c224b81e-d5b0-4b28-86ce-04a9f616a986" containerName="dnsmasq-dns" Jan 20 18:01:33 crc kubenswrapper[4558]: I0120 18:01:33.890620 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/download-cache-edpm-multinodeset-edpm-compute-global-2jlgt" Jan 20 18:01:33 crc kubenswrapper[4558]: I0120 18:01:33.892614 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-aee-default-env" Jan 20 18:01:33 crc kubenswrapper[4558]: I0120 18:01:33.892979 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplanenodeset-edpm-compute-global" Jan 20 18:01:33 crc kubenswrapper[4558]: I0120 18:01:33.893121 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplane-ansible-ssh-private-key-secret" Jan 20 18:01:33 crc kubenswrapper[4558]: I0120 18:01:33.893443 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"edpm-compute-global-dockercfg-wkdxg" Jan 20 18:01:33 crc kubenswrapper[4558]: I0120 18:01:33.900400 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/download-cache-edpm-multinodeset-edpm-compute-beta-nodesetxsmms"] Jan 20 18:01:33 crc kubenswrapper[4558]: I0120 18:01:33.901513 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/download-cache-edpm-multinodeset-edpm-compute-beta-nodesetxsmms" Jan 20 18:01:33 crc kubenswrapper[4558]: I0120 18:01:33.903777 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplanenodeset-edpm-compute-beta-nodeset" Jan 20 18:01:33 crc kubenswrapper[4558]: I0120 18:01:33.904085 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"edpm-compute-beta-nodeset-dockercfg-99l57" Jan 20 18:01:33 crc kubenswrapper[4558]: I0120 18:01:33.906254 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/download-cache-edpm-multinodeset-edpm-compute-global-2jlgt"] Jan 20 18:01:33 crc kubenswrapper[4558]: I0120 18:01:33.910996 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/download-cache-edpm-multinodeset-edpm-compute-beta-nodesetxsmms"] Jan 20 18:01:33 crc kubenswrapper[4558]: I0120 18:01:33.965401 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f3e4cbee-8cb9-4f90-8f78-2fd3c9d5d9e3-inventory\") pod \"download-cache-edpm-multinodeset-edpm-compute-beta-nodesetxsmms\" (UID: \"f3e4cbee-8cb9-4f90-8f78-2fd3c9d5d9e3\") " pod="openstack-kuttl-tests/download-cache-edpm-multinodeset-edpm-compute-beta-nodesetxsmms" Jan 20 18:01:33 crc kubenswrapper[4558]: I0120 18:01:33.965445 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/565265a9-d1cc-478d-95d0-6ab9c7e2d60f-ssh-key-edpm-compute-global\") pod \"download-cache-edpm-multinodeset-edpm-compute-global-2jlgt\" (UID: \"565265a9-d1cc-478d-95d0-6ab9c7e2d60f\") " pod="openstack-kuttl-tests/download-cache-edpm-multinodeset-edpm-compute-global-2jlgt" Jan 20 18:01:33 crc kubenswrapper[4558]: I0120 18:01:33.965488 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xmz9g\" (UniqueName: \"kubernetes.io/projected/565265a9-d1cc-478d-95d0-6ab9c7e2d60f-kube-api-access-xmz9g\") pod \"download-cache-edpm-multinodeset-edpm-compute-global-2jlgt\" (UID: \"565265a9-d1cc-478d-95d0-6ab9c7e2d60f\") " pod="openstack-kuttl-tests/download-cache-edpm-multinodeset-edpm-compute-global-2jlgt" Jan 20 18:01:33 crc kubenswrapper[4558]: I0120 18:01:33.965509 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-edpm-compute-beta-nodeset\" (UniqueName: \"kubernetes.io/secret/f3e4cbee-8cb9-4f90-8f78-2fd3c9d5d9e3-ssh-key-edpm-compute-beta-nodeset\") pod \"download-cache-edpm-multinodeset-edpm-compute-beta-nodesetxsmms\" (UID: \"f3e4cbee-8cb9-4f90-8f78-2fd3c9d5d9e3\") " pod="openstack-kuttl-tests/download-cache-edpm-multinodeset-edpm-compute-beta-nodesetxsmms" Jan 20 18:01:33 crc kubenswrapper[4558]: I0120 18:01:33.965551 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-479z6\" (UniqueName: \"kubernetes.io/projected/f3e4cbee-8cb9-4f90-8f78-2fd3c9d5d9e3-kube-api-access-479z6\") pod \"download-cache-edpm-multinodeset-edpm-compute-beta-nodesetxsmms\" (UID: \"f3e4cbee-8cb9-4f90-8f78-2fd3c9d5d9e3\") " pod="openstack-kuttl-tests/download-cache-edpm-multinodeset-edpm-compute-beta-nodesetxsmms" Jan 20 18:01:33 crc kubenswrapper[4558]: I0120 18:01:33.965632 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/565265a9-d1cc-478d-95d0-6ab9c7e2d60f-inventory\") pod \"download-cache-edpm-multinodeset-edpm-compute-global-2jlgt\" (UID: \"565265a9-d1cc-478d-95d0-6ab9c7e2d60f\") " pod="openstack-kuttl-tests/download-cache-edpm-multinodeset-edpm-compute-global-2jlgt" Jan 20 18:01:34 crc kubenswrapper[4558]: I0120 18:01:34.066662 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f3e4cbee-8cb9-4f90-8f78-2fd3c9d5d9e3-inventory\") pod \"download-cache-edpm-multinodeset-edpm-compute-beta-nodesetxsmms\" (UID: \"f3e4cbee-8cb9-4f90-8f78-2fd3c9d5d9e3\") " pod="openstack-kuttl-tests/download-cache-edpm-multinodeset-edpm-compute-beta-nodesetxsmms" Jan 20 18:01:34 crc kubenswrapper[4558]: I0120 18:01:34.066745 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/565265a9-d1cc-478d-95d0-6ab9c7e2d60f-ssh-key-edpm-compute-global\") pod \"download-cache-edpm-multinodeset-edpm-compute-global-2jlgt\" (UID: \"565265a9-d1cc-478d-95d0-6ab9c7e2d60f\") " pod="openstack-kuttl-tests/download-cache-edpm-multinodeset-edpm-compute-global-2jlgt" Jan 20 18:01:34 crc kubenswrapper[4558]: I0120 18:01:34.066788 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xmz9g\" (UniqueName: \"kubernetes.io/projected/565265a9-d1cc-478d-95d0-6ab9c7e2d60f-kube-api-access-xmz9g\") pod \"download-cache-edpm-multinodeset-edpm-compute-global-2jlgt\" (UID: \"565265a9-d1cc-478d-95d0-6ab9c7e2d60f\") " pod="openstack-kuttl-tests/download-cache-edpm-multinodeset-edpm-compute-global-2jlgt" Jan 20 18:01:34 crc kubenswrapper[4558]: I0120 18:01:34.066810 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-edpm-compute-beta-nodeset\" (UniqueName: \"kubernetes.io/secret/f3e4cbee-8cb9-4f90-8f78-2fd3c9d5d9e3-ssh-key-edpm-compute-beta-nodeset\") pod \"download-cache-edpm-multinodeset-edpm-compute-beta-nodesetxsmms\" (UID: \"f3e4cbee-8cb9-4f90-8f78-2fd3c9d5d9e3\") " pod="openstack-kuttl-tests/download-cache-edpm-multinodeset-edpm-compute-beta-nodesetxsmms" Jan 20 18:01:34 crc kubenswrapper[4558]: I0120 18:01:34.066864 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-479z6\" (UniqueName: \"kubernetes.io/projected/f3e4cbee-8cb9-4f90-8f78-2fd3c9d5d9e3-kube-api-access-479z6\") pod \"download-cache-edpm-multinodeset-edpm-compute-beta-nodesetxsmms\" (UID: \"f3e4cbee-8cb9-4f90-8f78-2fd3c9d5d9e3\") " pod="openstack-kuttl-tests/download-cache-edpm-multinodeset-edpm-compute-beta-nodesetxsmms" Jan 20 18:01:34 crc kubenswrapper[4558]: I0120 18:01:34.066924 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/565265a9-d1cc-478d-95d0-6ab9c7e2d60f-inventory\") pod \"download-cache-edpm-multinodeset-edpm-compute-global-2jlgt\" (UID: \"565265a9-d1cc-478d-95d0-6ab9c7e2d60f\") " pod="openstack-kuttl-tests/download-cache-edpm-multinodeset-edpm-compute-global-2jlgt" Jan 20 18:01:34 crc kubenswrapper[4558]: I0120 18:01:34.074625 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f3e4cbee-8cb9-4f90-8f78-2fd3c9d5d9e3-inventory\") pod \"download-cache-edpm-multinodeset-edpm-compute-beta-nodesetxsmms\" (UID: \"f3e4cbee-8cb9-4f90-8f78-2fd3c9d5d9e3\") " pod="openstack-kuttl-tests/download-cache-edpm-multinodeset-edpm-compute-beta-nodesetxsmms" Jan 20 18:01:34 crc kubenswrapper[4558]: I0120 18:01:34.074947 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-edpm-compute-beta-nodeset\" (UniqueName: \"kubernetes.io/secret/f3e4cbee-8cb9-4f90-8f78-2fd3c9d5d9e3-ssh-key-edpm-compute-beta-nodeset\") pod \"download-cache-edpm-multinodeset-edpm-compute-beta-nodesetxsmms\" (UID: \"f3e4cbee-8cb9-4f90-8f78-2fd3c9d5d9e3\") " pod="openstack-kuttl-tests/download-cache-edpm-multinodeset-edpm-compute-beta-nodesetxsmms" Jan 20 18:01:34 crc kubenswrapper[4558]: I0120 18:01:34.075050 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/565265a9-d1cc-478d-95d0-6ab9c7e2d60f-inventory\") pod \"download-cache-edpm-multinodeset-edpm-compute-global-2jlgt\" (UID: \"565265a9-d1cc-478d-95d0-6ab9c7e2d60f\") " pod="openstack-kuttl-tests/download-cache-edpm-multinodeset-edpm-compute-global-2jlgt" Jan 20 18:01:34 crc kubenswrapper[4558]: I0120 18:01:34.075262 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/565265a9-d1cc-478d-95d0-6ab9c7e2d60f-ssh-key-edpm-compute-global\") pod \"download-cache-edpm-multinodeset-edpm-compute-global-2jlgt\" (UID: \"565265a9-d1cc-478d-95d0-6ab9c7e2d60f\") " pod="openstack-kuttl-tests/download-cache-edpm-multinodeset-edpm-compute-global-2jlgt" Jan 20 18:01:34 crc kubenswrapper[4558]: I0120 18:01:34.083743 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-479z6\" (UniqueName: \"kubernetes.io/projected/f3e4cbee-8cb9-4f90-8f78-2fd3c9d5d9e3-kube-api-access-479z6\") pod \"download-cache-edpm-multinodeset-edpm-compute-beta-nodesetxsmms\" (UID: \"f3e4cbee-8cb9-4f90-8f78-2fd3c9d5d9e3\") " pod="openstack-kuttl-tests/download-cache-edpm-multinodeset-edpm-compute-beta-nodesetxsmms" Jan 20 18:01:34 crc kubenswrapper[4558]: I0120 18:01:34.084154 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xmz9g\" (UniqueName: \"kubernetes.io/projected/565265a9-d1cc-478d-95d0-6ab9c7e2d60f-kube-api-access-xmz9g\") pod \"download-cache-edpm-multinodeset-edpm-compute-global-2jlgt\" (UID: \"565265a9-d1cc-478d-95d0-6ab9c7e2d60f\") " pod="openstack-kuttl-tests/download-cache-edpm-multinodeset-edpm-compute-global-2jlgt" Jan 20 18:01:34 crc kubenswrapper[4558]: I0120 18:01:34.207444 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/download-cache-edpm-multinodeset-edpm-compute-global-2jlgt" Jan 20 18:01:34 crc kubenswrapper[4558]: I0120 18:01:34.215920 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/download-cache-edpm-multinodeset-edpm-compute-beta-nodesetxsmms" Jan 20 18:01:34 crc kubenswrapper[4558]: I0120 18:01:34.633150 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/download-cache-edpm-multinodeset-edpm-compute-beta-nodesetxsmms"] Jan 20 18:01:34 crc kubenswrapper[4558]: I0120 18:01:34.669347 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/download-cache-edpm-multinodeset-edpm-compute-global-2jlgt"] Jan 20 18:01:34 crc kubenswrapper[4558]: W0120 18:01:34.670039 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod565265a9_d1cc_478d_95d0_6ab9c7e2d60f.slice/crio-1f011388e6b306b806c5a1b0896f48de699278bcfae400a53e9426df58db0923 WatchSource:0}: Error finding container 1f011388e6b306b806c5a1b0896f48de699278bcfae400a53e9426df58db0923: Status 404 returned error can't find the container with id 1f011388e6b306b806c5a1b0896f48de699278bcfae400a53e9426df58db0923 Jan 20 18:01:35 crc kubenswrapper[4558]: I0120 18:01:35.083541 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/download-cache-edpm-multinodeset-edpm-compute-global-2jlgt" event={"ID":"565265a9-d1cc-478d-95d0-6ab9c7e2d60f","Type":"ContainerStarted","Data":"1f011388e6b306b806c5a1b0896f48de699278bcfae400a53e9426df58db0923"} Jan 20 18:01:35 crc kubenswrapper[4558]: I0120 18:01:35.084965 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/download-cache-edpm-multinodeset-edpm-compute-beta-nodesetxsmms" event={"ID":"f3e4cbee-8cb9-4f90-8f78-2fd3c9d5d9e3","Type":"ContainerStarted","Data":"4a1e8e52177042ca0190c2b886eada7e741c2893b38c39d5e169c890b7e6d66e"} Jan 20 18:01:36 crc kubenswrapper[4558]: I0120 18:01:36.095374 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/download-cache-edpm-multinodeset-edpm-compute-global-2jlgt" event={"ID":"565265a9-d1cc-478d-95d0-6ab9c7e2d60f","Type":"ContainerStarted","Data":"0a753af56b757f6bc5348351bfd3b389aa51323a3a5c3556062e047b14e49abf"} Jan 20 18:01:36 crc kubenswrapper[4558]: I0120 18:01:36.098806 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/download-cache-edpm-multinodeset-edpm-compute-beta-nodesetxsmms" event={"ID":"f3e4cbee-8cb9-4f90-8f78-2fd3c9d5d9e3","Type":"ContainerStarted","Data":"2f4911e27581efd12811ebf25498eff366913a3d353db9bc01e191405fa119d3"} Jan 20 18:01:36 crc kubenswrapper[4558]: I0120 18:01:36.116398 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/download-cache-edpm-multinodeset-edpm-compute-global-2jlgt" podStartSLOduration=2.560182475 podStartE2EDuration="3.116382847s" podCreationTimestamp="2026-01-20 18:01:33 +0000 UTC" firstStartedPulling="2026-01-20 18:01:34.672546261 +0000 UTC m=+4788.432884229" lastFinishedPulling="2026-01-20 18:01:35.228746633 +0000 UTC m=+4788.989084601" observedRunningTime="2026-01-20 18:01:36.111535114 +0000 UTC m=+4789.871873081" watchObservedRunningTime="2026-01-20 18:01:36.116382847 +0000 UTC m=+4789.876720814" Jan 20 18:01:36 crc kubenswrapper[4558]: I0120 18:01:36.140691 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/download-cache-edpm-multinodeset-edpm-compute-beta-nodesetxsmms" podStartSLOduration=2.518074173 podStartE2EDuration="3.140672665s" podCreationTimestamp="2026-01-20 18:01:33 +0000 UTC" firstStartedPulling="2026-01-20 18:01:34.639359118 +0000 UTC m=+4788.399697086" lastFinishedPulling="2026-01-20 18:01:35.261957611 +0000 UTC m=+4789.022295578" observedRunningTime="2026-01-20 18:01:36.136965616 +0000 UTC m=+4789.897303583" watchObservedRunningTime="2026-01-20 18:01:36.140672665 +0000 UTC m=+4789.901010633" Jan 20 18:01:37 crc kubenswrapper[4558]: I0120 18:01:37.112850 4558 generic.go:334] "Generic (PLEG): container finished" podID="565265a9-d1cc-478d-95d0-6ab9c7e2d60f" containerID="0a753af56b757f6bc5348351bfd3b389aa51323a3a5c3556062e047b14e49abf" exitCode=0 Jan 20 18:01:37 crc kubenswrapper[4558]: I0120 18:01:37.112920 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/download-cache-edpm-multinodeset-edpm-compute-global-2jlgt" event={"ID":"565265a9-d1cc-478d-95d0-6ab9c7e2d60f","Type":"ContainerDied","Data":"0a753af56b757f6bc5348351bfd3b389aa51323a3a5c3556062e047b14e49abf"} Jan 20 18:01:37 crc kubenswrapper[4558]: I0120 18:01:37.114970 4558 generic.go:334] "Generic (PLEG): container finished" podID="f3e4cbee-8cb9-4f90-8f78-2fd3c9d5d9e3" containerID="2f4911e27581efd12811ebf25498eff366913a3d353db9bc01e191405fa119d3" exitCode=0 Jan 20 18:01:37 crc kubenswrapper[4558]: I0120 18:01:37.115022 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/download-cache-edpm-multinodeset-edpm-compute-beta-nodesetxsmms" event={"ID":"f3e4cbee-8cb9-4f90-8f78-2fd3c9d5d9e3","Type":"ContainerDied","Data":"2f4911e27581efd12811ebf25498eff366913a3d353db9bc01e191405fa119d3"} Jan 20 18:01:38 crc kubenswrapper[4558]: I0120 18:01:38.417744 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/download-cache-edpm-multinodeset-edpm-compute-global-2jlgt" Jan 20 18:01:38 crc kubenswrapper[4558]: I0120 18:01:38.422829 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/download-cache-edpm-multinodeset-edpm-compute-beta-nodesetxsmms" Jan 20 18:01:38 crc kubenswrapper[4558]: I0120 18:01:38.533663 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f3e4cbee-8cb9-4f90-8f78-2fd3c9d5d9e3-inventory\") pod \"f3e4cbee-8cb9-4f90-8f78-2fd3c9d5d9e3\" (UID: \"f3e4cbee-8cb9-4f90-8f78-2fd3c9d5d9e3\") " Jan 20 18:01:38 crc kubenswrapper[4558]: I0120 18:01:38.533715 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-edpm-compute-beta-nodeset\" (UniqueName: \"kubernetes.io/secret/f3e4cbee-8cb9-4f90-8f78-2fd3c9d5d9e3-ssh-key-edpm-compute-beta-nodeset\") pod \"f3e4cbee-8cb9-4f90-8f78-2fd3c9d5d9e3\" (UID: \"f3e4cbee-8cb9-4f90-8f78-2fd3c9d5d9e3\") " Jan 20 18:01:38 crc kubenswrapper[4558]: I0120 18:01:38.533780 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-479z6\" (UniqueName: \"kubernetes.io/projected/f3e4cbee-8cb9-4f90-8f78-2fd3c9d5d9e3-kube-api-access-479z6\") pod \"f3e4cbee-8cb9-4f90-8f78-2fd3c9d5d9e3\" (UID: \"f3e4cbee-8cb9-4f90-8f78-2fd3c9d5d9e3\") " Jan 20 18:01:38 crc kubenswrapper[4558]: I0120 18:01:38.533804 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/565265a9-d1cc-478d-95d0-6ab9c7e2d60f-inventory\") pod \"565265a9-d1cc-478d-95d0-6ab9c7e2d60f\" (UID: \"565265a9-d1cc-478d-95d0-6ab9c7e2d60f\") " Jan 20 18:01:38 crc kubenswrapper[4558]: I0120 18:01:38.533853 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xmz9g\" (UniqueName: \"kubernetes.io/projected/565265a9-d1cc-478d-95d0-6ab9c7e2d60f-kube-api-access-xmz9g\") pod \"565265a9-d1cc-478d-95d0-6ab9c7e2d60f\" (UID: \"565265a9-d1cc-478d-95d0-6ab9c7e2d60f\") " Jan 20 18:01:38 crc kubenswrapper[4558]: I0120 18:01:38.533885 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/565265a9-d1cc-478d-95d0-6ab9c7e2d60f-ssh-key-edpm-compute-global\") pod \"565265a9-d1cc-478d-95d0-6ab9c7e2d60f\" (UID: \"565265a9-d1cc-478d-95d0-6ab9c7e2d60f\") " Jan 20 18:01:38 crc kubenswrapper[4558]: I0120 18:01:38.540682 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/565265a9-d1cc-478d-95d0-6ab9c7e2d60f-kube-api-access-xmz9g" (OuterVolumeSpecName: "kube-api-access-xmz9g") pod "565265a9-d1cc-478d-95d0-6ab9c7e2d60f" (UID: "565265a9-d1cc-478d-95d0-6ab9c7e2d60f"). InnerVolumeSpecName "kube-api-access-xmz9g". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:01:38 crc kubenswrapper[4558]: I0120 18:01:38.541000 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f3e4cbee-8cb9-4f90-8f78-2fd3c9d5d9e3-kube-api-access-479z6" (OuterVolumeSpecName: "kube-api-access-479z6") pod "f3e4cbee-8cb9-4f90-8f78-2fd3c9d5d9e3" (UID: "f3e4cbee-8cb9-4f90-8f78-2fd3c9d5d9e3"). InnerVolumeSpecName "kube-api-access-479z6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:01:38 crc kubenswrapper[4558]: I0120 18:01:38.555190 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/565265a9-d1cc-478d-95d0-6ab9c7e2d60f-inventory" (OuterVolumeSpecName: "inventory") pod "565265a9-d1cc-478d-95d0-6ab9c7e2d60f" (UID: "565265a9-d1cc-478d-95d0-6ab9c7e2d60f"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:01:38 crc kubenswrapper[4558]: I0120 18:01:38.555415 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f3e4cbee-8cb9-4f90-8f78-2fd3c9d5d9e3-ssh-key-edpm-compute-beta-nodeset" (OuterVolumeSpecName: "ssh-key-edpm-compute-beta-nodeset") pod "f3e4cbee-8cb9-4f90-8f78-2fd3c9d5d9e3" (UID: "f3e4cbee-8cb9-4f90-8f78-2fd3c9d5d9e3"). InnerVolumeSpecName "ssh-key-edpm-compute-beta-nodeset". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:01:38 crc kubenswrapper[4558]: I0120 18:01:38.556102 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/565265a9-d1cc-478d-95d0-6ab9c7e2d60f-ssh-key-edpm-compute-global" (OuterVolumeSpecName: "ssh-key-edpm-compute-global") pod "565265a9-d1cc-478d-95d0-6ab9c7e2d60f" (UID: "565265a9-d1cc-478d-95d0-6ab9c7e2d60f"). InnerVolumeSpecName "ssh-key-edpm-compute-global". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:01:38 crc kubenswrapper[4558]: I0120 18:01:38.556277 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f3e4cbee-8cb9-4f90-8f78-2fd3c9d5d9e3-inventory" (OuterVolumeSpecName: "inventory") pod "f3e4cbee-8cb9-4f90-8f78-2fd3c9d5d9e3" (UID: "f3e4cbee-8cb9-4f90-8f78-2fd3c9d5d9e3"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:01:38 crc kubenswrapper[4558]: I0120 18:01:38.635960 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-479z6\" (UniqueName: \"kubernetes.io/projected/f3e4cbee-8cb9-4f90-8f78-2fd3c9d5d9e3-kube-api-access-479z6\") on node \"crc\" DevicePath \"\"" Jan 20 18:01:38 crc kubenswrapper[4558]: I0120 18:01:38.635988 4558 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/565265a9-d1cc-478d-95d0-6ab9c7e2d60f-inventory\") on node \"crc\" DevicePath \"\"" Jan 20 18:01:38 crc kubenswrapper[4558]: I0120 18:01:38.636001 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xmz9g\" (UniqueName: \"kubernetes.io/projected/565265a9-d1cc-478d-95d0-6ab9c7e2d60f-kube-api-access-xmz9g\") on node \"crc\" DevicePath \"\"" Jan 20 18:01:38 crc kubenswrapper[4558]: I0120 18:01:38.636012 4558 reconciler_common.go:293] "Volume detached for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/565265a9-d1cc-478d-95d0-6ab9c7e2d60f-ssh-key-edpm-compute-global\") on node \"crc\" DevicePath \"\"" Jan 20 18:01:38 crc kubenswrapper[4558]: I0120 18:01:38.636026 4558 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f3e4cbee-8cb9-4f90-8f78-2fd3c9d5d9e3-inventory\") on node \"crc\" DevicePath \"\"" Jan 20 18:01:38 crc kubenswrapper[4558]: I0120 18:01:38.636037 4558 reconciler_common.go:293] "Volume detached for volume \"ssh-key-edpm-compute-beta-nodeset\" (UniqueName: \"kubernetes.io/secret/f3e4cbee-8cb9-4f90-8f78-2fd3c9d5d9e3-ssh-key-edpm-compute-beta-nodeset\") on node \"crc\" DevicePath \"\"" Jan 20 18:01:39 crc kubenswrapper[4558]: I0120 18:01:39.136521 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/download-cache-edpm-multinodeset-edpm-compute-global-2jlgt" event={"ID":"565265a9-d1cc-478d-95d0-6ab9c7e2d60f","Type":"ContainerDied","Data":"1f011388e6b306b806c5a1b0896f48de699278bcfae400a53e9426df58db0923"} Jan 20 18:01:39 crc kubenswrapper[4558]: I0120 18:01:39.136607 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1f011388e6b306b806c5a1b0896f48de699278bcfae400a53e9426df58db0923" Jan 20 18:01:39 crc kubenswrapper[4558]: I0120 18:01:39.136547 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/download-cache-edpm-multinodeset-edpm-compute-global-2jlgt" Jan 20 18:01:39 crc kubenswrapper[4558]: I0120 18:01:39.142328 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/download-cache-edpm-multinodeset-edpm-compute-beta-nodesetxsmms" event={"ID":"f3e4cbee-8cb9-4f90-8f78-2fd3c9d5d9e3","Type":"ContainerDied","Data":"4a1e8e52177042ca0190c2b886eada7e741c2893b38c39d5e169c890b7e6d66e"} Jan 20 18:01:39 crc kubenswrapper[4558]: I0120 18:01:39.142368 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/download-cache-edpm-multinodeset-edpm-compute-beta-nodesetxsmms" Jan 20 18:01:39 crc kubenswrapper[4558]: I0120 18:01:39.142371 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4a1e8e52177042ca0190c2b886eada7e741c2893b38c39d5e169c890b7e6d66e" Jan 20 18:01:39 crc kubenswrapper[4558]: I0120 18:01:39.190378 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/bootstrap-edpm-multinodeset-edpm-compute-global-csl8v"] Jan 20 18:01:39 crc kubenswrapper[4558]: E0120 18:01:39.190713 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f3e4cbee-8cb9-4f90-8f78-2fd3c9d5d9e3" containerName="download-cache-edpm-multinodeset-edpm-compute-beta-nodeset" Jan 20 18:01:39 crc kubenswrapper[4558]: I0120 18:01:39.190734 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="f3e4cbee-8cb9-4f90-8f78-2fd3c9d5d9e3" containerName="download-cache-edpm-multinodeset-edpm-compute-beta-nodeset" Jan 20 18:01:39 crc kubenswrapper[4558]: E0120 18:01:39.190745 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="565265a9-d1cc-478d-95d0-6ab9c7e2d60f" containerName="download-cache-edpm-multinodeset-edpm-compute-global" Jan 20 18:01:39 crc kubenswrapper[4558]: I0120 18:01:39.190752 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="565265a9-d1cc-478d-95d0-6ab9c7e2d60f" containerName="download-cache-edpm-multinodeset-edpm-compute-global" Jan 20 18:01:39 crc kubenswrapper[4558]: I0120 18:01:39.190892 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="f3e4cbee-8cb9-4f90-8f78-2fd3c9d5d9e3" containerName="download-cache-edpm-multinodeset-edpm-compute-beta-nodeset" Jan 20 18:01:39 crc kubenswrapper[4558]: I0120 18:01:39.190929 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="565265a9-d1cc-478d-95d0-6ab9c7e2d60f" containerName="download-cache-edpm-multinodeset-edpm-compute-global" Jan 20 18:01:39 crc kubenswrapper[4558]: I0120 18:01:39.191405 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/bootstrap-edpm-multinodeset-edpm-compute-global-csl8v" Jan 20 18:01:39 crc kubenswrapper[4558]: I0120 18:01:39.193762 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-aee-default-env" Jan 20 18:01:39 crc kubenswrapper[4558]: I0120 18:01:39.193810 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplane-ansible-ssh-private-key-secret" Jan 20 18:01:39 crc kubenswrapper[4558]: I0120 18:01:39.194050 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplanenodeset-edpm-compute-global" Jan 20 18:01:39 crc kubenswrapper[4558]: I0120 18:01:39.194119 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"combined-ca-bundle" Jan 20 18:01:39 crc kubenswrapper[4558]: I0120 18:01:39.194445 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"edpm-compute-global-dockercfg-wkdxg" Jan 20 18:01:39 crc kubenswrapper[4558]: I0120 18:01:39.201156 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/bootstrap-edpm-multinodeset-edpm-compute-global-csl8v"] Jan 20 18:01:39 crc kubenswrapper[4558]: I0120 18:01:39.214940 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/bootstrap-edpm-multinodeset-edpm-compute-beta-nodeset-k8bgg"] Jan 20 18:01:39 crc kubenswrapper[4558]: I0120 18:01:39.216207 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/bootstrap-edpm-multinodeset-edpm-compute-beta-nodeset-k8bgg" Jan 20 18:01:39 crc kubenswrapper[4558]: I0120 18:01:39.218993 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"edpm-compute-beta-nodeset-dockercfg-99l57" Jan 20 18:01:39 crc kubenswrapper[4558]: I0120 18:01:39.219252 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplanenodeset-edpm-compute-beta-nodeset" Jan 20 18:01:39 crc kubenswrapper[4558]: I0120 18:01:39.232884 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/bootstrap-edpm-multinodeset-edpm-compute-beta-nodeset-k8bgg"] Jan 20 18:01:39 crc kubenswrapper[4558]: I0120 18:01:39.244333 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ff4bae03-f99f-462a-8ded-9c48304110a1-inventory\") pod \"bootstrap-edpm-multinodeset-edpm-compute-beta-nodeset-k8bgg\" (UID: \"ff4bae03-f99f-462a-8ded-9c48304110a1\") " pod="openstack-kuttl-tests/bootstrap-edpm-multinodeset-edpm-compute-beta-nodeset-k8bgg" Jan 20 18:01:39 crc kubenswrapper[4558]: I0120 18:01:39.244396 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ff4bae03-f99f-462a-8ded-9c48304110a1-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-multinodeset-edpm-compute-beta-nodeset-k8bgg\" (UID: \"ff4bae03-f99f-462a-8ded-9c48304110a1\") " pod="openstack-kuttl-tests/bootstrap-edpm-multinodeset-edpm-compute-beta-nodeset-k8bgg" Jan 20 18:01:39 crc kubenswrapper[4558]: I0120 18:01:39.244440 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1d3f0500-fe34-435f-acfe-8f6be614ffb8-inventory\") pod \"bootstrap-edpm-multinodeset-edpm-compute-global-csl8v\" (UID: \"1d3f0500-fe34-435f-acfe-8f6be614ffb8\") " pod="openstack-kuttl-tests/bootstrap-edpm-multinodeset-edpm-compute-global-csl8v" Jan 20 18:01:39 crc kubenswrapper[4558]: I0120 18:01:39.244466 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/1d3f0500-fe34-435f-acfe-8f6be614ffb8-ssh-key-edpm-compute-global\") pod \"bootstrap-edpm-multinodeset-edpm-compute-global-csl8v\" (UID: \"1d3f0500-fe34-435f-acfe-8f6be614ffb8\") " pod="openstack-kuttl-tests/bootstrap-edpm-multinodeset-edpm-compute-global-csl8v" Jan 20 18:01:39 crc kubenswrapper[4558]: I0120 18:01:39.244488 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-edpm-compute-beta-nodeset\" (UniqueName: \"kubernetes.io/secret/ff4bae03-f99f-462a-8ded-9c48304110a1-ssh-key-edpm-compute-beta-nodeset\") pod \"bootstrap-edpm-multinodeset-edpm-compute-beta-nodeset-k8bgg\" (UID: \"ff4bae03-f99f-462a-8ded-9c48304110a1\") " pod="openstack-kuttl-tests/bootstrap-edpm-multinodeset-edpm-compute-beta-nodeset-k8bgg" Jan 20 18:01:39 crc kubenswrapper[4558]: I0120 18:01:39.244514 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1d3f0500-fe34-435f-acfe-8f6be614ffb8-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-multinodeset-edpm-compute-global-csl8v\" (UID: \"1d3f0500-fe34-435f-acfe-8f6be614ffb8\") " pod="openstack-kuttl-tests/bootstrap-edpm-multinodeset-edpm-compute-global-csl8v" Jan 20 18:01:39 crc kubenswrapper[4558]: I0120 18:01:39.244540 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ms8kl\" (UniqueName: \"kubernetes.io/projected/1d3f0500-fe34-435f-acfe-8f6be614ffb8-kube-api-access-ms8kl\") pod \"bootstrap-edpm-multinodeset-edpm-compute-global-csl8v\" (UID: \"1d3f0500-fe34-435f-acfe-8f6be614ffb8\") " pod="openstack-kuttl-tests/bootstrap-edpm-multinodeset-edpm-compute-global-csl8v" Jan 20 18:01:39 crc kubenswrapper[4558]: I0120 18:01:39.244562 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2jqgc\" (UniqueName: \"kubernetes.io/projected/ff4bae03-f99f-462a-8ded-9c48304110a1-kube-api-access-2jqgc\") pod \"bootstrap-edpm-multinodeset-edpm-compute-beta-nodeset-k8bgg\" (UID: \"ff4bae03-f99f-462a-8ded-9c48304110a1\") " pod="openstack-kuttl-tests/bootstrap-edpm-multinodeset-edpm-compute-beta-nodeset-k8bgg" Jan 20 18:01:39 crc kubenswrapper[4558]: I0120 18:01:39.345626 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1d3f0500-fe34-435f-acfe-8f6be614ffb8-inventory\") pod \"bootstrap-edpm-multinodeset-edpm-compute-global-csl8v\" (UID: \"1d3f0500-fe34-435f-acfe-8f6be614ffb8\") " pod="openstack-kuttl-tests/bootstrap-edpm-multinodeset-edpm-compute-global-csl8v" Jan 20 18:01:39 crc kubenswrapper[4558]: I0120 18:01:39.345669 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/1d3f0500-fe34-435f-acfe-8f6be614ffb8-ssh-key-edpm-compute-global\") pod \"bootstrap-edpm-multinodeset-edpm-compute-global-csl8v\" (UID: \"1d3f0500-fe34-435f-acfe-8f6be614ffb8\") " pod="openstack-kuttl-tests/bootstrap-edpm-multinodeset-edpm-compute-global-csl8v" Jan 20 18:01:39 crc kubenswrapper[4558]: I0120 18:01:39.345700 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-edpm-compute-beta-nodeset\" (UniqueName: \"kubernetes.io/secret/ff4bae03-f99f-462a-8ded-9c48304110a1-ssh-key-edpm-compute-beta-nodeset\") pod \"bootstrap-edpm-multinodeset-edpm-compute-beta-nodeset-k8bgg\" (UID: \"ff4bae03-f99f-462a-8ded-9c48304110a1\") " pod="openstack-kuttl-tests/bootstrap-edpm-multinodeset-edpm-compute-beta-nodeset-k8bgg" Jan 20 18:01:39 crc kubenswrapper[4558]: I0120 18:01:39.345731 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1d3f0500-fe34-435f-acfe-8f6be614ffb8-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-multinodeset-edpm-compute-global-csl8v\" (UID: \"1d3f0500-fe34-435f-acfe-8f6be614ffb8\") " pod="openstack-kuttl-tests/bootstrap-edpm-multinodeset-edpm-compute-global-csl8v" Jan 20 18:01:39 crc kubenswrapper[4558]: I0120 18:01:39.345759 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ms8kl\" (UniqueName: \"kubernetes.io/projected/1d3f0500-fe34-435f-acfe-8f6be614ffb8-kube-api-access-ms8kl\") pod \"bootstrap-edpm-multinodeset-edpm-compute-global-csl8v\" (UID: \"1d3f0500-fe34-435f-acfe-8f6be614ffb8\") " pod="openstack-kuttl-tests/bootstrap-edpm-multinodeset-edpm-compute-global-csl8v" Jan 20 18:01:39 crc kubenswrapper[4558]: I0120 18:01:39.345787 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2jqgc\" (UniqueName: \"kubernetes.io/projected/ff4bae03-f99f-462a-8ded-9c48304110a1-kube-api-access-2jqgc\") pod \"bootstrap-edpm-multinodeset-edpm-compute-beta-nodeset-k8bgg\" (UID: \"ff4bae03-f99f-462a-8ded-9c48304110a1\") " pod="openstack-kuttl-tests/bootstrap-edpm-multinodeset-edpm-compute-beta-nodeset-k8bgg" Jan 20 18:01:39 crc kubenswrapper[4558]: I0120 18:01:39.345830 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ff4bae03-f99f-462a-8ded-9c48304110a1-inventory\") pod \"bootstrap-edpm-multinodeset-edpm-compute-beta-nodeset-k8bgg\" (UID: \"ff4bae03-f99f-462a-8ded-9c48304110a1\") " pod="openstack-kuttl-tests/bootstrap-edpm-multinodeset-edpm-compute-beta-nodeset-k8bgg" Jan 20 18:01:39 crc kubenswrapper[4558]: I0120 18:01:39.345865 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ff4bae03-f99f-462a-8ded-9c48304110a1-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-multinodeset-edpm-compute-beta-nodeset-k8bgg\" (UID: \"ff4bae03-f99f-462a-8ded-9c48304110a1\") " pod="openstack-kuttl-tests/bootstrap-edpm-multinodeset-edpm-compute-beta-nodeset-k8bgg" Jan 20 18:01:39 crc kubenswrapper[4558]: I0120 18:01:39.360972 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ff4bae03-f99f-462a-8ded-9c48304110a1-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-multinodeset-edpm-compute-beta-nodeset-k8bgg\" (UID: \"ff4bae03-f99f-462a-8ded-9c48304110a1\") " pod="openstack-kuttl-tests/bootstrap-edpm-multinodeset-edpm-compute-beta-nodeset-k8bgg" Jan 20 18:01:39 crc kubenswrapper[4558]: I0120 18:01:39.361349 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1d3f0500-fe34-435f-acfe-8f6be614ffb8-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-multinodeset-edpm-compute-global-csl8v\" (UID: \"1d3f0500-fe34-435f-acfe-8f6be614ffb8\") " pod="openstack-kuttl-tests/bootstrap-edpm-multinodeset-edpm-compute-global-csl8v" Jan 20 18:01:39 crc kubenswrapper[4558]: I0120 18:01:39.362346 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1d3f0500-fe34-435f-acfe-8f6be614ffb8-inventory\") pod \"bootstrap-edpm-multinodeset-edpm-compute-global-csl8v\" (UID: \"1d3f0500-fe34-435f-acfe-8f6be614ffb8\") " pod="openstack-kuttl-tests/bootstrap-edpm-multinodeset-edpm-compute-global-csl8v" Jan 20 18:01:39 crc kubenswrapper[4558]: I0120 18:01:39.361001 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ff4bae03-f99f-462a-8ded-9c48304110a1-inventory\") pod \"bootstrap-edpm-multinodeset-edpm-compute-beta-nodeset-k8bgg\" (UID: \"ff4bae03-f99f-462a-8ded-9c48304110a1\") " pod="openstack-kuttl-tests/bootstrap-edpm-multinodeset-edpm-compute-beta-nodeset-k8bgg" Jan 20 18:01:39 crc kubenswrapper[4558]: I0120 18:01:39.366316 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2jqgc\" (UniqueName: \"kubernetes.io/projected/ff4bae03-f99f-462a-8ded-9c48304110a1-kube-api-access-2jqgc\") pod \"bootstrap-edpm-multinodeset-edpm-compute-beta-nodeset-k8bgg\" (UID: \"ff4bae03-f99f-462a-8ded-9c48304110a1\") " pod="openstack-kuttl-tests/bootstrap-edpm-multinodeset-edpm-compute-beta-nodeset-k8bgg" Jan 20 18:01:39 crc kubenswrapper[4558]: I0120 18:01:39.367088 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ms8kl\" (UniqueName: \"kubernetes.io/projected/1d3f0500-fe34-435f-acfe-8f6be614ffb8-kube-api-access-ms8kl\") pod \"bootstrap-edpm-multinodeset-edpm-compute-global-csl8v\" (UID: \"1d3f0500-fe34-435f-acfe-8f6be614ffb8\") " pod="openstack-kuttl-tests/bootstrap-edpm-multinodeset-edpm-compute-global-csl8v" Jan 20 18:01:39 crc kubenswrapper[4558]: I0120 18:01:39.367740 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/1d3f0500-fe34-435f-acfe-8f6be614ffb8-ssh-key-edpm-compute-global\") pod \"bootstrap-edpm-multinodeset-edpm-compute-global-csl8v\" (UID: \"1d3f0500-fe34-435f-acfe-8f6be614ffb8\") " pod="openstack-kuttl-tests/bootstrap-edpm-multinodeset-edpm-compute-global-csl8v" Jan 20 18:01:39 crc kubenswrapper[4558]: I0120 18:01:39.368305 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-edpm-compute-beta-nodeset\" (UniqueName: \"kubernetes.io/secret/ff4bae03-f99f-462a-8ded-9c48304110a1-ssh-key-edpm-compute-beta-nodeset\") pod \"bootstrap-edpm-multinodeset-edpm-compute-beta-nodeset-k8bgg\" (UID: \"ff4bae03-f99f-462a-8ded-9c48304110a1\") " pod="openstack-kuttl-tests/bootstrap-edpm-multinodeset-edpm-compute-beta-nodeset-k8bgg" Jan 20 18:01:39 crc kubenswrapper[4558]: I0120 18:01:39.503094 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/bootstrap-edpm-multinodeset-edpm-compute-global-csl8v" Jan 20 18:01:39 crc kubenswrapper[4558]: I0120 18:01:39.528601 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/bootstrap-edpm-multinodeset-edpm-compute-beta-nodeset-k8bgg" Jan 20 18:01:39 crc kubenswrapper[4558]: I0120 18:01:39.918076 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/bootstrap-edpm-multinodeset-edpm-compute-global-csl8v"] Jan 20 18:01:39 crc kubenswrapper[4558]: W0120 18:01:39.922460 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1d3f0500_fe34_435f_acfe_8f6be614ffb8.slice/crio-82f157b8e5419d1975d647b90bc12000b769c3f0bac22c120ae55aec4fc9a565 WatchSource:0}: Error finding container 82f157b8e5419d1975d647b90bc12000b769c3f0bac22c120ae55aec4fc9a565: Status 404 returned error can't find the container with id 82f157b8e5419d1975d647b90bc12000b769c3f0bac22c120ae55aec4fc9a565 Jan 20 18:01:39 crc kubenswrapper[4558]: I0120 18:01:39.949604 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/bootstrap-edpm-multinodeset-edpm-compute-beta-nodeset-k8bgg"] Jan 20 18:01:39 crc kubenswrapper[4558]: W0120 18:01:39.951994 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podff4bae03_f99f_462a_8ded_9c48304110a1.slice/crio-bd2f36814f3d51c7027e492d6ce77291def5ddfaea64afaa019cb334fadf0e8b WatchSource:0}: Error finding container bd2f36814f3d51c7027e492d6ce77291def5ddfaea64afaa019cb334fadf0e8b: Status 404 returned error can't find the container with id bd2f36814f3d51c7027e492d6ce77291def5ddfaea64afaa019cb334fadf0e8b Jan 20 18:01:40 crc kubenswrapper[4558]: I0120 18:01:40.152990 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/bootstrap-edpm-multinodeset-edpm-compute-beta-nodeset-k8bgg" event={"ID":"ff4bae03-f99f-462a-8ded-9c48304110a1","Type":"ContainerStarted","Data":"bd2f36814f3d51c7027e492d6ce77291def5ddfaea64afaa019cb334fadf0e8b"} Jan 20 18:01:40 crc kubenswrapper[4558]: I0120 18:01:40.154559 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/bootstrap-edpm-multinodeset-edpm-compute-global-csl8v" event={"ID":"1d3f0500-fe34-435f-acfe-8f6be614ffb8","Type":"ContainerStarted","Data":"82f157b8e5419d1975d647b90bc12000b769c3f0bac22c120ae55aec4fc9a565"} Jan 20 18:01:41 crc kubenswrapper[4558]: I0120 18:01:41.165324 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/bootstrap-edpm-multinodeset-edpm-compute-beta-nodeset-k8bgg" event={"ID":"ff4bae03-f99f-462a-8ded-9c48304110a1","Type":"ContainerStarted","Data":"f55b28c638183d096a150ac70c469c05ed01edf6198fb78f20a01896ce7f4508"} Jan 20 18:01:41 crc kubenswrapper[4558]: I0120 18:01:41.168272 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/bootstrap-edpm-multinodeset-edpm-compute-global-csl8v" event={"ID":"1d3f0500-fe34-435f-acfe-8f6be614ffb8","Type":"ContainerStarted","Data":"170fc088791aa1343a57b7094aab97b4039b8fc0f4af19be12926ec07034f0c3"} Jan 20 18:01:41 crc kubenswrapper[4558]: I0120 18:01:41.187359 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/bootstrap-edpm-multinodeset-edpm-compute-beta-nodeset-k8bgg" podStartSLOduration=1.4957128499999999 podStartE2EDuration="2.187333216s" podCreationTimestamp="2026-01-20 18:01:39 +0000 UTC" firstStartedPulling="2026-01-20 18:01:39.953774909 +0000 UTC m=+4793.714112876" lastFinishedPulling="2026-01-20 18:01:40.645395275 +0000 UTC m=+4794.405733242" observedRunningTime="2026-01-20 18:01:41.181084751 +0000 UTC m=+4794.941422708" watchObservedRunningTime="2026-01-20 18:01:41.187333216 +0000 UTC m=+4794.947671183" Jan 20 18:01:41 crc kubenswrapper[4558]: I0120 18:01:41.202736 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/bootstrap-edpm-multinodeset-edpm-compute-global-csl8v" podStartSLOduration=1.5821073 podStartE2EDuration="2.202711314s" podCreationTimestamp="2026-01-20 18:01:39 +0000 UTC" firstStartedPulling="2026-01-20 18:01:39.924837054 +0000 UTC m=+4793.685175021" lastFinishedPulling="2026-01-20 18:01:40.545441067 +0000 UTC m=+4794.305779035" observedRunningTime="2026-01-20 18:01:41.197271498 +0000 UTC m=+4794.957609465" watchObservedRunningTime="2026-01-20 18:01:41.202711314 +0000 UTC m=+4794.963049280" Jan 20 18:01:42 crc kubenswrapper[4558]: I0120 18:01:42.180396 4558 generic.go:334] "Generic (PLEG): container finished" podID="ff4bae03-f99f-462a-8ded-9c48304110a1" containerID="f55b28c638183d096a150ac70c469c05ed01edf6198fb78f20a01896ce7f4508" exitCode=0 Jan 20 18:01:42 crc kubenswrapper[4558]: I0120 18:01:42.180481 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/bootstrap-edpm-multinodeset-edpm-compute-beta-nodeset-k8bgg" event={"ID":"ff4bae03-f99f-462a-8ded-9c48304110a1","Type":"ContainerDied","Data":"f55b28c638183d096a150ac70c469c05ed01edf6198fb78f20a01896ce7f4508"} Jan 20 18:01:42 crc kubenswrapper[4558]: I0120 18:01:42.182658 4558 generic.go:334] "Generic (PLEG): container finished" podID="1d3f0500-fe34-435f-acfe-8f6be614ffb8" containerID="170fc088791aa1343a57b7094aab97b4039b8fc0f4af19be12926ec07034f0c3" exitCode=0 Jan 20 18:01:42 crc kubenswrapper[4558]: I0120 18:01:42.182694 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/bootstrap-edpm-multinodeset-edpm-compute-global-csl8v" event={"ID":"1d3f0500-fe34-435f-acfe-8f6be614ffb8","Type":"ContainerDied","Data":"170fc088791aa1343a57b7094aab97b4039b8fc0f4af19be12926ec07034f0c3"} Jan 20 18:01:43 crc kubenswrapper[4558]: I0120 18:01:43.497395 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/bootstrap-edpm-multinodeset-edpm-compute-beta-nodeset-k8bgg" Jan 20 18:01:43 crc kubenswrapper[4558]: I0120 18:01:43.503562 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/bootstrap-edpm-multinodeset-edpm-compute-global-csl8v" Jan 20 18:01:43 crc kubenswrapper[4558]: I0120 18:01:43.604477 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1d3f0500-fe34-435f-acfe-8f6be614ffb8-inventory\") pod \"1d3f0500-fe34-435f-acfe-8f6be614ffb8\" (UID: \"1d3f0500-fe34-435f-acfe-8f6be614ffb8\") " Jan 20 18:01:43 crc kubenswrapper[4558]: I0120 18:01:43.604535 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-edpm-compute-beta-nodeset\" (UniqueName: \"kubernetes.io/secret/ff4bae03-f99f-462a-8ded-9c48304110a1-ssh-key-edpm-compute-beta-nodeset\") pod \"ff4bae03-f99f-462a-8ded-9c48304110a1\" (UID: \"ff4bae03-f99f-462a-8ded-9c48304110a1\") " Jan 20 18:01:43 crc kubenswrapper[4558]: I0120 18:01:43.604573 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2jqgc\" (UniqueName: \"kubernetes.io/projected/ff4bae03-f99f-462a-8ded-9c48304110a1-kube-api-access-2jqgc\") pod \"ff4bae03-f99f-462a-8ded-9c48304110a1\" (UID: \"ff4bae03-f99f-462a-8ded-9c48304110a1\") " Jan 20 18:01:43 crc kubenswrapper[4558]: I0120 18:01:43.604613 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/1d3f0500-fe34-435f-acfe-8f6be614ffb8-ssh-key-edpm-compute-global\") pod \"1d3f0500-fe34-435f-acfe-8f6be614ffb8\" (UID: \"1d3f0500-fe34-435f-acfe-8f6be614ffb8\") " Jan 20 18:01:43 crc kubenswrapper[4558]: I0120 18:01:43.604638 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ms8kl\" (UniqueName: \"kubernetes.io/projected/1d3f0500-fe34-435f-acfe-8f6be614ffb8-kube-api-access-ms8kl\") pod \"1d3f0500-fe34-435f-acfe-8f6be614ffb8\" (UID: \"1d3f0500-fe34-435f-acfe-8f6be614ffb8\") " Jan 20 18:01:43 crc kubenswrapper[4558]: I0120 18:01:43.604675 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1d3f0500-fe34-435f-acfe-8f6be614ffb8-bootstrap-combined-ca-bundle\") pod \"1d3f0500-fe34-435f-acfe-8f6be614ffb8\" (UID: \"1d3f0500-fe34-435f-acfe-8f6be614ffb8\") " Jan 20 18:01:43 crc kubenswrapper[4558]: I0120 18:01:43.604728 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ff4bae03-f99f-462a-8ded-9c48304110a1-inventory\") pod \"ff4bae03-f99f-462a-8ded-9c48304110a1\" (UID: \"ff4bae03-f99f-462a-8ded-9c48304110a1\") " Jan 20 18:01:43 crc kubenswrapper[4558]: I0120 18:01:43.604759 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ff4bae03-f99f-462a-8ded-9c48304110a1-bootstrap-combined-ca-bundle\") pod \"ff4bae03-f99f-462a-8ded-9c48304110a1\" (UID: \"ff4bae03-f99f-462a-8ded-9c48304110a1\") " Jan 20 18:01:43 crc kubenswrapper[4558]: I0120 18:01:43.611521 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ff4bae03-f99f-462a-8ded-9c48304110a1-kube-api-access-2jqgc" (OuterVolumeSpecName: "kube-api-access-2jqgc") pod "ff4bae03-f99f-462a-8ded-9c48304110a1" (UID: "ff4bae03-f99f-462a-8ded-9c48304110a1"). InnerVolumeSpecName "kube-api-access-2jqgc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:01:43 crc kubenswrapper[4558]: I0120 18:01:43.611677 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1d3f0500-fe34-435f-acfe-8f6be614ffb8-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "1d3f0500-fe34-435f-acfe-8f6be614ffb8" (UID: "1d3f0500-fe34-435f-acfe-8f6be614ffb8"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:01:43 crc kubenswrapper[4558]: I0120 18:01:43.611685 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d3f0500-fe34-435f-acfe-8f6be614ffb8-kube-api-access-ms8kl" (OuterVolumeSpecName: "kube-api-access-ms8kl") pod "1d3f0500-fe34-435f-acfe-8f6be614ffb8" (UID: "1d3f0500-fe34-435f-acfe-8f6be614ffb8"). InnerVolumeSpecName "kube-api-access-ms8kl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:01:43 crc kubenswrapper[4558]: I0120 18:01:43.612298 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ff4bae03-f99f-462a-8ded-9c48304110a1-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "ff4bae03-f99f-462a-8ded-9c48304110a1" (UID: "ff4bae03-f99f-462a-8ded-9c48304110a1"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:01:43 crc kubenswrapper[4558]: I0120 18:01:43.626630 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1d3f0500-fe34-435f-acfe-8f6be614ffb8-inventory" (OuterVolumeSpecName: "inventory") pod "1d3f0500-fe34-435f-acfe-8f6be614ffb8" (UID: "1d3f0500-fe34-435f-acfe-8f6be614ffb8"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:01:43 crc kubenswrapper[4558]: I0120 18:01:43.627052 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1d3f0500-fe34-435f-acfe-8f6be614ffb8-ssh-key-edpm-compute-global" (OuterVolumeSpecName: "ssh-key-edpm-compute-global") pod "1d3f0500-fe34-435f-acfe-8f6be614ffb8" (UID: "1d3f0500-fe34-435f-acfe-8f6be614ffb8"). InnerVolumeSpecName "ssh-key-edpm-compute-global". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:01:43 crc kubenswrapper[4558]: I0120 18:01:43.627211 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ff4bae03-f99f-462a-8ded-9c48304110a1-ssh-key-edpm-compute-beta-nodeset" (OuterVolumeSpecName: "ssh-key-edpm-compute-beta-nodeset") pod "ff4bae03-f99f-462a-8ded-9c48304110a1" (UID: "ff4bae03-f99f-462a-8ded-9c48304110a1"). InnerVolumeSpecName "ssh-key-edpm-compute-beta-nodeset". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:01:43 crc kubenswrapper[4558]: I0120 18:01:43.627560 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ff4bae03-f99f-462a-8ded-9c48304110a1-inventory" (OuterVolumeSpecName: "inventory") pod "ff4bae03-f99f-462a-8ded-9c48304110a1" (UID: "ff4bae03-f99f-462a-8ded-9c48304110a1"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:01:43 crc kubenswrapper[4558]: I0120 18:01:43.707018 4558 reconciler_common.go:293] "Volume detached for volume \"ssh-key-edpm-compute-beta-nodeset\" (UniqueName: \"kubernetes.io/secret/ff4bae03-f99f-462a-8ded-9c48304110a1-ssh-key-edpm-compute-beta-nodeset\") on node \"crc\" DevicePath \"\"" Jan 20 18:01:43 crc kubenswrapper[4558]: I0120 18:01:43.707052 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2jqgc\" (UniqueName: \"kubernetes.io/projected/ff4bae03-f99f-462a-8ded-9c48304110a1-kube-api-access-2jqgc\") on node \"crc\" DevicePath \"\"" Jan 20 18:01:43 crc kubenswrapper[4558]: I0120 18:01:43.707067 4558 reconciler_common.go:293] "Volume detached for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/1d3f0500-fe34-435f-acfe-8f6be614ffb8-ssh-key-edpm-compute-global\") on node \"crc\" DevicePath \"\"" Jan 20 18:01:43 crc kubenswrapper[4558]: I0120 18:01:43.707079 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ms8kl\" (UniqueName: \"kubernetes.io/projected/1d3f0500-fe34-435f-acfe-8f6be614ffb8-kube-api-access-ms8kl\") on node \"crc\" DevicePath \"\"" Jan 20 18:01:43 crc kubenswrapper[4558]: I0120 18:01:43.707090 4558 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1d3f0500-fe34-435f-acfe-8f6be614ffb8-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 18:01:43 crc kubenswrapper[4558]: I0120 18:01:43.707100 4558 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ff4bae03-f99f-462a-8ded-9c48304110a1-inventory\") on node \"crc\" DevicePath \"\"" Jan 20 18:01:43 crc kubenswrapper[4558]: I0120 18:01:43.707112 4558 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ff4bae03-f99f-462a-8ded-9c48304110a1-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 18:01:43 crc kubenswrapper[4558]: I0120 18:01:43.707122 4558 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1d3f0500-fe34-435f-acfe-8f6be614ffb8-inventory\") on node \"crc\" DevicePath \"\"" Jan 20 18:01:44 crc kubenswrapper[4558]: I0120 18:01:44.217701 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/bootstrap-edpm-multinodeset-edpm-compute-beta-nodeset-k8bgg" event={"ID":"ff4bae03-f99f-462a-8ded-9c48304110a1","Type":"ContainerDied","Data":"bd2f36814f3d51c7027e492d6ce77291def5ddfaea64afaa019cb334fadf0e8b"} Jan 20 18:01:44 crc kubenswrapper[4558]: I0120 18:01:44.217994 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bd2f36814f3d51c7027e492d6ce77291def5ddfaea64afaa019cb334fadf0e8b" Jan 20 18:01:44 crc kubenswrapper[4558]: I0120 18:01:44.218119 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/bootstrap-edpm-multinodeset-edpm-compute-beta-nodeset-k8bgg" Jan 20 18:01:44 crc kubenswrapper[4558]: I0120 18:01:44.222197 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/bootstrap-edpm-multinodeset-edpm-compute-global-csl8v" event={"ID":"1d3f0500-fe34-435f-acfe-8f6be614ffb8","Type":"ContainerDied","Data":"82f157b8e5419d1975d647b90bc12000b769c3f0bac22c120ae55aec4fc9a565"} Jan 20 18:01:44 crc kubenswrapper[4558]: I0120 18:01:44.222232 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="82f157b8e5419d1975d647b90bc12000b769c3f0bac22c120ae55aec4fc9a565" Jan 20 18:01:44 crc kubenswrapper[4558]: I0120 18:01:44.222395 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/bootstrap-edpm-multinodeset-edpm-compute-global-csl8v" Jan 20 18:01:44 crc kubenswrapper[4558]: I0120 18:01:44.264254 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/configure-network-edpm-multinodeset-edpm-compute-global-t76hl"] Jan 20 18:01:44 crc kubenswrapper[4558]: E0120 18:01:44.264605 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1d3f0500-fe34-435f-acfe-8f6be614ffb8" containerName="bootstrap-edpm-multinodeset-edpm-compute-global" Jan 20 18:01:44 crc kubenswrapper[4558]: I0120 18:01:44.264621 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="1d3f0500-fe34-435f-acfe-8f6be614ffb8" containerName="bootstrap-edpm-multinodeset-edpm-compute-global" Jan 20 18:01:44 crc kubenswrapper[4558]: E0120 18:01:44.264647 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ff4bae03-f99f-462a-8ded-9c48304110a1" containerName="bootstrap-edpm-multinodeset-edpm-compute-beta-nodeset" Jan 20 18:01:44 crc kubenswrapper[4558]: I0120 18:01:44.264654 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ff4bae03-f99f-462a-8ded-9c48304110a1" containerName="bootstrap-edpm-multinodeset-edpm-compute-beta-nodeset" Jan 20 18:01:44 crc kubenswrapper[4558]: I0120 18:01:44.264852 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="ff4bae03-f99f-462a-8ded-9c48304110a1" containerName="bootstrap-edpm-multinodeset-edpm-compute-beta-nodeset" Jan 20 18:01:44 crc kubenswrapper[4558]: I0120 18:01:44.264862 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="1d3f0500-fe34-435f-acfe-8f6be614ffb8" containerName="bootstrap-edpm-multinodeset-edpm-compute-global" Jan 20 18:01:44 crc kubenswrapper[4558]: I0120 18:01:44.266946 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/configure-network-edpm-multinodeset-edpm-compute-global-t76hl" Jan 20 18:01:44 crc kubenswrapper[4558]: I0120 18:01:44.268419 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"edpm-compute-global-dockercfg-wkdxg" Jan 20 18:01:44 crc kubenswrapper[4558]: I0120 18:01:44.271140 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-aee-default-env" Jan 20 18:01:44 crc kubenswrapper[4558]: I0120 18:01:44.271299 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplane-ansible-ssh-private-key-secret" Jan 20 18:01:44 crc kubenswrapper[4558]: I0120 18:01:44.272506 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplanenodeset-edpm-compute-global" Jan 20 18:01:44 crc kubenswrapper[4558]: I0120 18:01:44.277046 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/configure-network-edpm-multinodeset-edpm-compute-global-t76hl"] Jan 20 18:01:44 crc kubenswrapper[4558]: I0120 18:01:44.320079 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x6bmk\" (UniqueName: \"kubernetes.io/projected/585b79c3-bdc4-4bb5-b190-cc7cb6887c1e-kube-api-access-x6bmk\") pod \"configure-network-edpm-multinodeset-edpm-compute-global-t76hl\" (UID: \"585b79c3-bdc4-4bb5-b190-cc7cb6887c1e\") " pod="openstack-kuttl-tests/configure-network-edpm-multinodeset-edpm-compute-global-t76hl" Jan 20 18:01:44 crc kubenswrapper[4558]: I0120 18:01:44.320135 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/585b79c3-bdc4-4bb5-b190-cc7cb6887c1e-inventory\") pod \"configure-network-edpm-multinodeset-edpm-compute-global-t76hl\" (UID: \"585b79c3-bdc4-4bb5-b190-cc7cb6887c1e\") " pod="openstack-kuttl-tests/configure-network-edpm-multinodeset-edpm-compute-global-t76hl" Jan 20 18:01:44 crc kubenswrapper[4558]: I0120 18:01:44.320191 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/585b79c3-bdc4-4bb5-b190-cc7cb6887c1e-ssh-key-edpm-compute-global\") pod \"configure-network-edpm-multinodeset-edpm-compute-global-t76hl\" (UID: \"585b79c3-bdc4-4bb5-b190-cc7cb6887c1e\") " pod="openstack-kuttl-tests/configure-network-edpm-multinodeset-edpm-compute-global-t76hl" Jan 20 18:01:44 crc kubenswrapper[4558]: I0120 18:01:44.422225 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x6bmk\" (UniqueName: \"kubernetes.io/projected/585b79c3-bdc4-4bb5-b190-cc7cb6887c1e-kube-api-access-x6bmk\") pod \"configure-network-edpm-multinodeset-edpm-compute-global-t76hl\" (UID: \"585b79c3-bdc4-4bb5-b190-cc7cb6887c1e\") " pod="openstack-kuttl-tests/configure-network-edpm-multinodeset-edpm-compute-global-t76hl" Jan 20 18:01:44 crc kubenswrapper[4558]: I0120 18:01:44.422269 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/585b79c3-bdc4-4bb5-b190-cc7cb6887c1e-inventory\") pod \"configure-network-edpm-multinodeset-edpm-compute-global-t76hl\" (UID: \"585b79c3-bdc4-4bb5-b190-cc7cb6887c1e\") " pod="openstack-kuttl-tests/configure-network-edpm-multinodeset-edpm-compute-global-t76hl" Jan 20 18:01:44 crc kubenswrapper[4558]: I0120 18:01:44.422292 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/585b79c3-bdc4-4bb5-b190-cc7cb6887c1e-ssh-key-edpm-compute-global\") pod \"configure-network-edpm-multinodeset-edpm-compute-global-t76hl\" (UID: \"585b79c3-bdc4-4bb5-b190-cc7cb6887c1e\") " pod="openstack-kuttl-tests/configure-network-edpm-multinodeset-edpm-compute-global-t76hl" Jan 20 18:01:44 crc kubenswrapper[4558]: I0120 18:01:44.428077 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/585b79c3-bdc4-4bb5-b190-cc7cb6887c1e-inventory\") pod \"configure-network-edpm-multinodeset-edpm-compute-global-t76hl\" (UID: \"585b79c3-bdc4-4bb5-b190-cc7cb6887c1e\") " pod="openstack-kuttl-tests/configure-network-edpm-multinodeset-edpm-compute-global-t76hl" Jan 20 18:01:44 crc kubenswrapper[4558]: I0120 18:01:44.429062 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/585b79c3-bdc4-4bb5-b190-cc7cb6887c1e-ssh-key-edpm-compute-global\") pod \"configure-network-edpm-multinodeset-edpm-compute-global-t76hl\" (UID: \"585b79c3-bdc4-4bb5-b190-cc7cb6887c1e\") " pod="openstack-kuttl-tests/configure-network-edpm-multinodeset-edpm-compute-global-t76hl" Jan 20 18:01:44 crc kubenswrapper[4558]: I0120 18:01:44.437135 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x6bmk\" (UniqueName: \"kubernetes.io/projected/585b79c3-bdc4-4bb5-b190-cc7cb6887c1e-kube-api-access-x6bmk\") pod \"configure-network-edpm-multinodeset-edpm-compute-global-t76hl\" (UID: \"585b79c3-bdc4-4bb5-b190-cc7cb6887c1e\") " pod="openstack-kuttl-tests/configure-network-edpm-multinodeset-edpm-compute-global-t76hl" Jan 20 18:01:44 crc kubenswrapper[4558]: I0120 18:01:44.586388 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/configure-network-edpm-multinodeset-edpm-compute-global-t76hl" Jan 20 18:01:44 crc kubenswrapper[4558]: I0120 18:01:44.973829 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/configure-network-edpm-multinodeset-edpm-compute-global-t76hl"] Jan 20 18:01:44 crc kubenswrapper[4558]: W0120 18:01:44.976918 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod585b79c3_bdc4_4bb5_b190_cc7cb6887c1e.slice/crio-f8fd6919e45718acfc13e4916795885d179eff81960ec9cb10cd91b8de96223b WatchSource:0}: Error finding container f8fd6919e45718acfc13e4916795885d179eff81960ec9cb10cd91b8de96223b: Status 404 returned error can't find the container with id f8fd6919e45718acfc13e4916795885d179eff81960ec9cb10cd91b8de96223b Jan 20 18:01:45 crc kubenswrapper[4558]: I0120 18:01:45.234809 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/configure-network-edpm-multinodeset-edpm-compute-global-t76hl" event={"ID":"585b79c3-bdc4-4bb5-b190-cc7cb6887c1e","Type":"ContainerStarted","Data":"f8fd6919e45718acfc13e4916795885d179eff81960ec9cb10cd91b8de96223b"} Jan 20 18:01:46 crc kubenswrapper[4558]: I0120 18:01:46.246150 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/configure-network-edpm-multinodeset-edpm-compute-global-t76hl" event={"ID":"585b79c3-bdc4-4bb5-b190-cc7cb6887c1e","Type":"ContainerStarted","Data":"71f1c783db8c7f691afadbbc4458f473a50d37f859dca6011229e49700f8765f"} Jan 20 18:01:46 crc kubenswrapper[4558]: I0120 18:01:46.272962 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/configure-network-edpm-multinodeset-edpm-compute-global-t76hl" podStartSLOduration=1.745520245 podStartE2EDuration="2.272936099s" podCreationTimestamp="2026-01-20 18:01:44 +0000 UTC" firstStartedPulling="2026-01-20 18:01:44.979549902 +0000 UTC m=+4798.739887868" lastFinishedPulling="2026-01-20 18:01:45.506965755 +0000 UTC m=+4799.267303722" observedRunningTime="2026-01-20 18:01:46.26620685 +0000 UTC m=+4800.026544817" watchObservedRunningTime="2026-01-20 18:01:46.272936099 +0000 UTC m=+4800.033274066" Jan 20 18:01:47 crc kubenswrapper[4558]: I0120 18:01:47.257732 4558 generic.go:334] "Generic (PLEG): container finished" podID="585b79c3-bdc4-4bb5-b190-cc7cb6887c1e" containerID="71f1c783db8c7f691afadbbc4458f473a50d37f859dca6011229e49700f8765f" exitCode=0 Jan 20 18:01:47 crc kubenswrapper[4558]: I0120 18:01:47.257842 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/configure-network-edpm-multinodeset-edpm-compute-global-t76hl" event={"ID":"585b79c3-bdc4-4bb5-b190-cc7cb6887c1e","Type":"ContainerDied","Data":"71f1c783db8c7f691afadbbc4458f473a50d37f859dca6011229e49700f8765f"} Jan 20 18:01:48 crc kubenswrapper[4558]: I0120 18:01:48.537559 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/configure-network-edpm-multinodeset-edpm-compute-global-t76hl" Jan 20 18:01:48 crc kubenswrapper[4558]: I0120 18:01:48.587280 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/585b79c3-bdc4-4bb5-b190-cc7cb6887c1e-ssh-key-edpm-compute-global\") pod \"585b79c3-bdc4-4bb5-b190-cc7cb6887c1e\" (UID: \"585b79c3-bdc4-4bb5-b190-cc7cb6887c1e\") " Jan 20 18:01:48 crc kubenswrapper[4558]: I0120 18:01:48.587571 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/585b79c3-bdc4-4bb5-b190-cc7cb6887c1e-inventory\") pod \"585b79c3-bdc4-4bb5-b190-cc7cb6887c1e\" (UID: \"585b79c3-bdc4-4bb5-b190-cc7cb6887c1e\") " Jan 20 18:01:48 crc kubenswrapper[4558]: I0120 18:01:48.587647 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x6bmk\" (UniqueName: \"kubernetes.io/projected/585b79c3-bdc4-4bb5-b190-cc7cb6887c1e-kube-api-access-x6bmk\") pod \"585b79c3-bdc4-4bb5-b190-cc7cb6887c1e\" (UID: \"585b79c3-bdc4-4bb5-b190-cc7cb6887c1e\") " Jan 20 18:01:48 crc kubenswrapper[4558]: I0120 18:01:48.594696 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/585b79c3-bdc4-4bb5-b190-cc7cb6887c1e-kube-api-access-x6bmk" (OuterVolumeSpecName: "kube-api-access-x6bmk") pod "585b79c3-bdc4-4bb5-b190-cc7cb6887c1e" (UID: "585b79c3-bdc4-4bb5-b190-cc7cb6887c1e"). InnerVolumeSpecName "kube-api-access-x6bmk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:01:48 crc kubenswrapper[4558]: E0120 18:01:48.605019 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/585b79c3-bdc4-4bb5-b190-cc7cb6887c1e-inventory podName:585b79c3-bdc4-4bb5-b190-cc7cb6887c1e nodeName:}" failed. No retries permitted until 2026-01-20 18:01:49.104993347 +0000 UTC m=+4802.865331303 (durationBeforeRetry 500ms). Error: error cleaning subPath mounts for volume "inventory" (UniqueName: "kubernetes.io/secret/585b79c3-bdc4-4bb5-b190-cc7cb6887c1e-inventory") pod "585b79c3-bdc4-4bb5-b190-cc7cb6887c1e" (UID: "585b79c3-bdc4-4bb5-b190-cc7cb6887c1e") : error deleting /var/lib/kubelet/pods/585b79c3-bdc4-4bb5-b190-cc7cb6887c1e/volume-subpaths: remove /var/lib/kubelet/pods/585b79c3-bdc4-4bb5-b190-cc7cb6887c1e/volume-subpaths: no such file or directory Jan 20 18:01:48 crc kubenswrapper[4558]: I0120 18:01:48.607099 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/585b79c3-bdc4-4bb5-b190-cc7cb6887c1e-ssh-key-edpm-compute-global" (OuterVolumeSpecName: "ssh-key-edpm-compute-global") pod "585b79c3-bdc4-4bb5-b190-cc7cb6887c1e" (UID: "585b79c3-bdc4-4bb5-b190-cc7cb6887c1e"). InnerVolumeSpecName "ssh-key-edpm-compute-global". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:01:48 crc kubenswrapper[4558]: I0120 18:01:48.690137 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x6bmk\" (UniqueName: \"kubernetes.io/projected/585b79c3-bdc4-4bb5-b190-cc7cb6887c1e-kube-api-access-x6bmk\") on node \"crc\" DevicePath \"\"" Jan 20 18:01:48 crc kubenswrapper[4558]: I0120 18:01:48.690198 4558 reconciler_common.go:293] "Volume detached for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/585b79c3-bdc4-4bb5-b190-cc7cb6887c1e-ssh-key-edpm-compute-global\") on node \"crc\" DevicePath \"\"" Jan 20 18:01:49 crc kubenswrapper[4558]: I0120 18:01:49.195353 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/585b79c3-bdc4-4bb5-b190-cc7cb6887c1e-inventory\") pod \"585b79c3-bdc4-4bb5-b190-cc7cb6887c1e\" (UID: \"585b79c3-bdc4-4bb5-b190-cc7cb6887c1e\") " Jan 20 18:01:49 crc kubenswrapper[4558]: I0120 18:01:49.280490 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/configure-network-edpm-multinodeset-edpm-compute-global-t76hl" event={"ID":"585b79c3-bdc4-4bb5-b190-cc7cb6887c1e","Type":"ContainerDied","Data":"f8fd6919e45718acfc13e4916795885d179eff81960ec9cb10cd91b8de96223b"} Jan 20 18:01:49 crc kubenswrapper[4558]: I0120 18:01:49.280546 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f8fd6919e45718acfc13e4916795885d179eff81960ec9cb10cd91b8de96223b" Jan 20 18:01:49 crc kubenswrapper[4558]: I0120 18:01:49.280563 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/configure-network-edpm-multinodeset-edpm-compute-global-t76hl" Jan 20 18:01:49 crc kubenswrapper[4558]: I0120 18:01:49.338634 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/validate-network-edpm-multinodeset-edpm-compute-global-j2rjw"] Jan 20 18:01:49 crc kubenswrapper[4558]: E0120 18:01:49.339145 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="585b79c3-bdc4-4bb5-b190-cc7cb6887c1e" containerName="configure-network-edpm-multinodeset-edpm-compute-global" Jan 20 18:01:49 crc kubenswrapper[4558]: I0120 18:01:49.339192 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="585b79c3-bdc4-4bb5-b190-cc7cb6887c1e" containerName="configure-network-edpm-multinodeset-edpm-compute-global" Jan 20 18:01:49 crc kubenswrapper[4558]: I0120 18:01:49.339338 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="585b79c3-bdc4-4bb5-b190-cc7cb6887c1e" containerName="configure-network-edpm-multinodeset-edpm-compute-global" Jan 20 18:01:49 crc kubenswrapper[4558]: I0120 18:01:49.339951 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/validate-network-edpm-multinodeset-edpm-compute-global-j2rjw" Jan 20 18:01:49 crc kubenswrapper[4558]: I0120 18:01:49.352494 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/validate-network-edpm-multinodeset-edpm-compute-global-j2rjw"] Jan 20 18:01:49 crc kubenswrapper[4558]: I0120 18:01:49.397288 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/8be8df01-d6c9-4a23-a78b-5422feb8c2b6-ssh-key-edpm-compute-global\") pod \"validate-network-edpm-multinodeset-edpm-compute-global-j2rjw\" (UID: \"8be8df01-d6c9-4a23-a78b-5422feb8c2b6\") " pod="openstack-kuttl-tests/validate-network-edpm-multinodeset-edpm-compute-global-j2rjw" Jan 20 18:01:49 crc kubenswrapper[4558]: I0120 18:01:49.397370 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8be8df01-d6c9-4a23-a78b-5422feb8c2b6-inventory\") pod \"validate-network-edpm-multinodeset-edpm-compute-global-j2rjw\" (UID: \"8be8df01-d6c9-4a23-a78b-5422feb8c2b6\") " pod="openstack-kuttl-tests/validate-network-edpm-multinodeset-edpm-compute-global-j2rjw" Jan 20 18:01:49 crc kubenswrapper[4558]: I0120 18:01:49.397526 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ffj8j\" (UniqueName: \"kubernetes.io/projected/8be8df01-d6c9-4a23-a78b-5422feb8c2b6-kube-api-access-ffj8j\") pod \"validate-network-edpm-multinodeset-edpm-compute-global-j2rjw\" (UID: \"8be8df01-d6c9-4a23-a78b-5422feb8c2b6\") " pod="openstack-kuttl-tests/validate-network-edpm-multinodeset-edpm-compute-global-j2rjw" Jan 20 18:01:49 crc kubenswrapper[4558]: I0120 18:01:49.499076 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ffj8j\" (UniqueName: \"kubernetes.io/projected/8be8df01-d6c9-4a23-a78b-5422feb8c2b6-kube-api-access-ffj8j\") pod \"validate-network-edpm-multinodeset-edpm-compute-global-j2rjw\" (UID: \"8be8df01-d6c9-4a23-a78b-5422feb8c2b6\") " pod="openstack-kuttl-tests/validate-network-edpm-multinodeset-edpm-compute-global-j2rjw" Jan 20 18:01:49 crc kubenswrapper[4558]: I0120 18:01:49.499283 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/8be8df01-d6c9-4a23-a78b-5422feb8c2b6-ssh-key-edpm-compute-global\") pod \"validate-network-edpm-multinodeset-edpm-compute-global-j2rjw\" (UID: \"8be8df01-d6c9-4a23-a78b-5422feb8c2b6\") " pod="openstack-kuttl-tests/validate-network-edpm-multinodeset-edpm-compute-global-j2rjw" Jan 20 18:01:49 crc kubenswrapper[4558]: I0120 18:01:49.499419 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8be8df01-d6c9-4a23-a78b-5422feb8c2b6-inventory\") pod \"validate-network-edpm-multinodeset-edpm-compute-global-j2rjw\" (UID: \"8be8df01-d6c9-4a23-a78b-5422feb8c2b6\") " pod="openstack-kuttl-tests/validate-network-edpm-multinodeset-edpm-compute-global-j2rjw" Jan 20 18:01:49 crc kubenswrapper[4558]: I0120 18:01:49.593459 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/585b79c3-bdc4-4bb5-b190-cc7cb6887c1e-inventory" (OuterVolumeSpecName: "inventory") pod "585b79c3-bdc4-4bb5-b190-cc7cb6887c1e" (UID: "585b79c3-bdc4-4bb5-b190-cc7cb6887c1e"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:01:49 crc kubenswrapper[4558]: I0120 18:01:49.594839 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/8be8df01-d6c9-4a23-a78b-5422feb8c2b6-ssh-key-edpm-compute-global\") pod \"validate-network-edpm-multinodeset-edpm-compute-global-j2rjw\" (UID: \"8be8df01-d6c9-4a23-a78b-5422feb8c2b6\") " pod="openstack-kuttl-tests/validate-network-edpm-multinodeset-edpm-compute-global-j2rjw" Jan 20 18:01:49 crc kubenswrapper[4558]: I0120 18:01:49.597851 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8be8df01-d6c9-4a23-a78b-5422feb8c2b6-inventory\") pod \"validate-network-edpm-multinodeset-edpm-compute-global-j2rjw\" (UID: \"8be8df01-d6c9-4a23-a78b-5422feb8c2b6\") " pod="openstack-kuttl-tests/validate-network-edpm-multinodeset-edpm-compute-global-j2rjw" Jan 20 18:01:49 crc kubenswrapper[4558]: I0120 18:01:49.597935 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ffj8j\" (UniqueName: \"kubernetes.io/projected/8be8df01-d6c9-4a23-a78b-5422feb8c2b6-kube-api-access-ffj8j\") pod \"validate-network-edpm-multinodeset-edpm-compute-global-j2rjw\" (UID: \"8be8df01-d6c9-4a23-a78b-5422feb8c2b6\") " pod="openstack-kuttl-tests/validate-network-edpm-multinodeset-edpm-compute-global-j2rjw" Jan 20 18:01:49 crc kubenswrapper[4558]: I0120 18:01:49.601655 4558 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/585b79c3-bdc4-4bb5-b190-cc7cb6887c1e-inventory\") on node \"crc\" DevicePath \"\"" Jan 20 18:01:49 crc kubenswrapper[4558]: I0120 18:01:49.656559 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/validate-network-edpm-multinodeset-edpm-compute-global-j2rjw" Jan 20 18:01:50 crc kubenswrapper[4558]: I0120 18:01:50.058344 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/validate-network-edpm-multinodeset-edpm-compute-global-j2rjw"] Jan 20 18:01:50 crc kubenswrapper[4558]: I0120 18:01:50.291197 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/validate-network-edpm-multinodeset-edpm-compute-global-j2rjw" event={"ID":"8be8df01-d6c9-4a23-a78b-5422feb8c2b6","Type":"ContainerStarted","Data":"99fad14a626ae10e6ac39a2f8acdcd6df52732495a2246b9de0d68472bb41e2d"} Jan 20 18:01:51 crc kubenswrapper[4558]: I0120 18:01:51.301423 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/validate-network-edpm-multinodeset-edpm-compute-global-j2rjw" event={"ID":"8be8df01-d6c9-4a23-a78b-5422feb8c2b6","Type":"ContainerStarted","Data":"566a6eca35e66f52dd932aa03a7f5bd111bb8cc6fd61cc7a80d8c92da9dcf755"} Jan 20 18:01:51 crc kubenswrapper[4558]: I0120 18:01:51.324143 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/validate-network-edpm-multinodeset-edpm-compute-global-j2rjw" podStartSLOduration=1.667173366 podStartE2EDuration="2.324121345s" podCreationTimestamp="2026-01-20 18:01:49 +0000 UTC" firstStartedPulling="2026-01-20 18:01:50.057685608 +0000 UTC m=+4803.818023576" lastFinishedPulling="2026-01-20 18:01:50.714633599 +0000 UTC m=+4804.474971555" observedRunningTime="2026-01-20 18:01:51.315793021 +0000 UTC m=+4805.076130988" watchObservedRunningTime="2026-01-20 18:01:51.324121345 +0000 UTC m=+4805.084459312" Jan 20 18:01:52 crc kubenswrapper[4558]: I0120 18:01:52.312694 4558 generic.go:334] "Generic (PLEG): container finished" podID="8be8df01-d6c9-4a23-a78b-5422feb8c2b6" containerID="566a6eca35e66f52dd932aa03a7f5bd111bb8cc6fd61cc7a80d8c92da9dcf755" exitCode=0 Jan 20 18:01:52 crc kubenswrapper[4558]: I0120 18:01:52.312769 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/validate-network-edpm-multinodeset-edpm-compute-global-j2rjw" event={"ID":"8be8df01-d6c9-4a23-a78b-5422feb8c2b6","Type":"ContainerDied","Data":"566a6eca35e66f52dd932aa03a7f5bd111bb8cc6fd61cc7a80d8c92da9dcf755"} Jan 20 18:01:53 crc kubenswrapper[4558]: I0120 18:01:53.577974 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/validate-network-edpm-multinodeset-edpm-compute-global-j2rjw" Jan 20 18:01:53 crc kubenswrapper[4558]: I0120 18:01:53.655552 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/8be8df01-d6c9-4a23-a78b-5422feb8c2b6-ssh-key-edpm-compute-global\") pod \"8be8df01-d6c9-4a23-a78b-5422feb8c2b6\" (UID: \"8be8df01-d6c9-4a23-a78b-5422feb8c2b6\") " Jan 20 18:01:53 crc kubenswrapper[4558]: I0120 18:01:53.655769 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ffj8j\" (UniqueName: \"kubernetes.io/projected/8be8df01-d6c9-4a23-a78b-5422feb8c2b6-kube-api-access-ffj8j\") pod \"8be8df01-d6c9-4a23-a78b-5422feb8c2b6\" (UID: \"8be8df01-d6c9-4a23-a78b-5422feb8c2b6\") " Jan 20 18:01:53 crc kubenswrapper[4558]: I0120 18:01:53.655933 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8be8df01-d6c9-4a23-a78b-5422feb8c2b6-inventory\") pod \"8be8df01-d6c9-4a23-a78b-5422feb8c2b6\" (UID: \"8be8df01-d6c9-4a23-a78b-5422feb8c2b6\") " Jan 20 18:01:53 crc kubenswrapper[4558]: I0120 18:01:53.661987 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8be8df01-d6c9-4a23-a78b-5422feb8c2b6-kube-api-access-ffj8j" (OuterVolumeSpecName: "kube-api-access-ffj8j") pod "8be8df01-d6c9-4a23-a78b-5422feb8c2b6" (UID: "8be8df01-d6c9-4a23-a78b-5422feb8c2b6"). InnerVolumeSpecName "kube-api-access-ffj8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:01:53 crc kubenswrapper[4558]: I0120 18:01:53.676015 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8be8df01-d6c9-4a23-a78b-5422feb8c2b6-inventory" (OuterVolumeSpecName: "inventory") pod "8be8df01-d6c9-4a23-a78b-5422feb8c2b6" (UID: "8be8df01-d6c9-4a23-a78b-5422feb8c2b6"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:01:53 crc kubenswrapper[4558]: I0120 18:01:53.676552 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8be8df01-d6c9-4a23-a78b-5422feb8c2b6-ssh-key-edpm-compute-global" (OuterVolumeSpecName: "ssh-key-edpm-compute-global") pod "8be8df01-d6c9-4a23-a78b-5422feb8c2b6" (UID: "8be8df01-d6c9-4a23-a78b-5422feb8c2b6"). InnerVolumeSpecName "ssh-key-edpm-compute-global". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:01:53 crc kubenswrapper[4558]: I0120 18:01:53.757106 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ffj8j\" (UniqueName: \"kubernetes.io/projected/8be8df01-d6c9-4a23-a78b-5422feb8c2b6-kube-api-access-ffj8j\") on node \"crc\" DevicePath \"\"" Jan 20 18:01:53 crc kubenswrapper[4558]: I0120 18:01:53.757137 4558 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8be8df01-d6c9-4a23-a78b-5422feb8c2b6-inventory\") on node \"crc\" DevicePath \"\"" Jan 20 18:01:53 crc kubenswrapper[4558]: I0120 18:01:53.757149 4558 reconciler_common.go:293] "Volume detached for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/8be8df01-d6c9-4a23-a78b-5422feb8c2b6-ssh-key-edpm-compute-global\") on node \"crc\" DevicePath \"\"" Jan 20 18:01:54 crc kubenswrapper[4558]: I0120 18:01:54.334339 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/validate-network-edpm-multinodeset-edpm-compute-global-j2rjw" event={"ID":"8be8df01-d6c9-4a23-a78b-5422feb8c2b6","Type":"ContainerDied","Data":"99fad14a626ae10e6ac39a2f8acdcd6df52732495a2246b9de0d68472bb41e2d"} Jan 20 18:01:54 crc kubenswrapper[4558]: I0120 18:01:54.334389 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="99fad14a626ae10e6ac39a2f8acdcd6df52732495a2246b9de0d68472bb41e2d" Jan 20 18:01:54 crc kubenswrapper[4558]: I0120 18:01:54.334396 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/validate-network-edpm-multinodeset-edpm-compute-global-j2rjw" Jan 20 18:01:54 crc kubenswrapper[4558]: I0120 18:01:54.393723 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/install-os-edpm-multinodeset-edpm-compute-global-ztq9r"] Jan 20 18:01:54 crc kubenswrapper[4558]: E0120 18:01:54.394847 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8be8df01-d6c9-4a23-a78b-5422feb8c2b6" containerName="validate-network-edpm-multinodeset-edpm-compute-global" Jan 20 18:01:54 crc kubenswrapper[4558]: I0120 18:01:54.394877 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="8be8df01-d6c9-4a23-a78b-5422feb8c2b6" containerName="validate-network-edpm-multinodeset-edpm-compute-global" Jan 20 18:01:54 crc kubenswrapper[4558]: I0120 18:01:54.395123 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="8be8df01-d6c9-4a23-a78b-5422feb8c2b6" containerName="validate-network-edpm-multinodeset-edpm-compute-global" Jan 20 18:01:54 crc kubenswrapper[4558]: I0120 18:01:54.395882 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/install-os-edpm-multinodeset-edpm-compute-global-ztq9r" Jan 20 18:01:54 crc kubenswrapper[4558]: I0120 18:01:54.398312 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-aee-default-env" Jan 20 18:01:54 crc kubenswrapper[4558]: I0120 18:01:54.398810 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"edpm-compute-global-dockercfg-wkdxg" Jan 20 18:01:54 crc kubenswrapper[4558]: I0120 18:01:54.398875 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplanenodeset-edpm-compute-global" Jan 20 18:01:54 crc kubenswrapper[4558]: I0120 18:01:54.406864 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/install-os-edpm-multinodeset-edpm-compute-global-ztq9r"] Jan 20 18:01:54 crc kubenswrapper[4558]: I0120 18:01:54.412365 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplane-ansible-ssh-private-key-secret" Jan 20 18:01:54 crc kubenswrapper[4558]: I0120 18:01:54.567708 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/07d4b53a-7d96-4698-83a6-885070980783-ssh-key-edpm-compute-global\") pod \"install-os-edpm-multinodeset-edpm-compute-global-ztq9r\" (UID: \"07d4b53a-7d96-4698-83a6-885070980783\") " pod="openstack-kuttl-tests/install-os-edpm-multinodeset-edpm-compute-global-ztq9r" Jan 20 18:01:54 crc kubenswrapper[4558]: I0120 18:01:54.567769 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/07d4b53a-7d96-4698-83a6-885070980783-inventory\") pod \"install-os-edpm-multinodeset-edpm-compute-global-ztq9r\" (UID: \"07d4b53a-7d96-4698-83a6-885070980783\") " pod="openstack-kuttl-tests/install-os-edpm-multinodeset-edpm-compute-global-ztq9r" Jan 20 18:01:54 crc kubenswrapper[4558]: I0120 18:01:54.568232 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7xddm\" (UniqueName: \"kubernetes.io/projected/07d4b53a-7d96-4698-83a6-885070980783-kube-api-access-7xddm\") pod \"install-os-edpm-multinodeset-edpm-compute-global-ztq9r\" (UID: \"07d4b53a-7d96-4698-83a6-885070980783\") " pod="openstack-kuttl-tests/install-os-edpm-multinodeset-edpm-compute-global-ztq9r" Jan 20 18:01:54 crc kubenswrapper[4558]: I0120 18:01:54.669176 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/07d4b53a-7d96-4698-83a6-885070980783-ssh-key-edpm-compute-global\") pod \"install-os-edpm-multinodeset-edpm-compute-global-ztq9r\" (UID: \"07d4b53a-7d96-4698-83a6-885070980783\") " pod="openstack-kuttl-tests/install-os-edpm-multinodeset-edpm-compute-global-ztq9r" Jan 20 18:01:54 crc kubenswrapper[4558]: I0120 18:01:54.669239 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/07d4b53a-7d96-4698-83a6-885070980783-inventory\") pod \"install-os-edpm-multinodeset-edpm-compute-global-ztq9r\" (UID: \"07d4b53a-7d96-4698-83a6-885070980783\") " pod="openstack-kuttl-tests/install-os-edpm-multinodeset-edpm-compute-global-ztq9r" Jan 20 18:01:54 crc kubenswrapper[4558]: I0120 18:01:54.669295 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7xddm\" (UniqueName: \"kubernetes.io/projected/07d4b53a-7d96-4698-83a6-885070980783-kube-api-access-7xddm\") pod \"install-os-edpm-multinodeset-edpm-compute-global-ztq9r\" (UID: \"07d4b53a-7d96-4698-83a6-885070980783\") " pod="openstack-kuttl-tests/install-os-edpm-multinodeset-edpm-compute-global-ztq9r" Jan 20 18:01:54 crc kubenswrapper[4558]: I0120 18:01:54.675534 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/07d4b53a-7d96-4698-83a6-885070980783-inventory\") pod \"install-os-edpm-multinodeset-edpm-compute-global-ztq9r\" (UID: \"07d4b53a-7d96-4698-83a6-885070980783\") " pod="openstack-kuttl-tests/install-os-edpm-multinodeset-edpm-compute-global-ztq9r" Jan 20 18:01:54 crc kubenswrapper[4558]: I0120 18:01:54.675730 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/07d4b53a-7d96-4698-83a6-885070980783-ssh-key-edpm-compute-global\") pod \"install-os-edpm-multinodeset-edpm-compute-global-ztq9r\" (UID: \"07d4b53a-7d96-4698-83a6-885070980783\") " pod="openstack-kuttl-tests/install-os-edpm-multinodeset-edpm-compute-global-ztq9r" Jan 20 18:01:54 crc kubenswrapper[4558]: I0120 18:01:54.687017 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7xddm\" (UniqueName: \"kubernetes.io/projected/07d4b53a-7d96-4698-83a6-885070980783-kube-api-access-7xddm\") pod \"install-os-edpm-multinodeset-edpm-compute-global-ztq9r\" (UID: \"07d4b53a-7d96-4698-83a6-885070980783\") " pod="openstack-kuttl-tests/install-os-edpm-multinodeset-edpm-compute-global-ztq9r" Jan 20 18:01:54 crc kubenswrapper[4558]: I0120 18:01:54.714844 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/install-os-edpm-multinodeset-edpm-compute-global-ztq9r" Jan 20 18:01:55 crc kubenswrapper[4558]: I0120 18:01:55.112723 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/install-os-edpm-multinodeset-edpm-compute-global-ztq9r"] Jan 20 18:01:55 crc kubenswrapper[4558]: W0120 18:01:55.116038 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod07d4b53a_7d96_4698_83a6_885070980783.slice/crio-813e326325d2f71b9cd0630b9f31d1d6f5b08a19cd15a2d5dc0132c2d4486a60 WatchSource:0}: Error finding container 813e326325d2f71b9cd0630b9f31d1d6f5b08a19cd15a2d5dc0132c2d4486a60: Status 404 returned error can't find the container with id 813e326325d2f71b9cd0630b9f31d1d6f5b08a19cd15a2d5dc0132c2d4486a60 Jan 20 18:01:55 crc kubenswrapper[4558]: I0120 18:01:55.346278 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/install-os-edpm-multinodeset-edpm-compute-global-ztq9r" event={"ID":"07d4b53a-7d96-4698-83a6-885070980783","Type":"ContainerStarted","Data":"813e326325d2f71b9cd0630b9f31d1d6f5b08a19cd15a2d5dc0132c2d4486a60"} Jan 20 18:01:56 crc kubenswrapper[4558]: I0120 18:01:56.357911 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/install-os-edpm-multinodeset-edpm-compute-global-ztq9r" event={"ID":"07d4b53a-7d96-4698-83a6-885070980783","Type":"ContainerStarted","Data":"1a997d470884a39f7eb44c72758d6bc648bf9af5d98e3af36150fd95c134b997"} Jan 20 18:01:56 crc kubenswrapper[4558]: I0120 18:01:56.391723 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/install-os-edpm-multinodeset-edpm-compute-global-ztq9r" podStartSLOduration=1.646614061 podStartE2EDuration="2.39169046s" podCreationTimestamp="2026-01-20 18:01:54 +0000 UTC" firstStartedPulling="2026-01-20 18:01:55.118429383 +0000 UTC m=+4808.878767349" lastFinishedPulling="2026-01-20 18:01:55.863505781 +0000 UTC m=+4809.623843748" observedRunningTime="2026-01-20 18:01:56.371830187 +0000 UTC m=+4810.132168155" watchObservedRunningTime="2026-01-20 18:01:56.39169046 +0000 UTC m=+4810.152028426" Jan 20 18:01:57 crc kubenswrapper[4558]: I0120 18:01:57.367589 4558 generic.go:334] "Generic (PLEG): container finished" podID="07d4b53a-7d96-4698-83a6-885070980783" containerID="1a997d470884a39f7eb44c72758d6bc648bf9af5d98e3af36150fd95c134b997" exitCode=0 Jan 20 18:01:57 crc kubenswrapper[4558]: I0120 18:01:57.367675 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/install-os-edpm-multinodeset-edpm-compute-global-ztq9r" event={"ID":"07d4b53a-7d96-4698-83a6-885070980783","Type":"ContainerDied","Data":"1a997d470884a39f7eb44c72758d6bc648bf9af5d98e3af36150fd95c134b997"} Jan 20 18:01:58 crc kubenswrapper[4558]: I0120 18:01:58.616188 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/install-os-edpm-multinodeset-edpm-compute-global-ztq9r" Jan 20 18:01:58 crc kubenswrapper[4558]: I0120 18:01:58.737561 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7xddm\" (UniqueName: \"kubernetes.io/projected/07d4b53a-7d96-4698-83a6-885070980783-kube-api-access-7xddm\") pod \"07d4b53a-7d96-4698-83a6-885070980783\" (UID: \"07d4b53a-7d96-4698-83a6-885070980783\") " Jan 20 18:01:58 crc kubenswrapper[4558]: I0120 18:01:58.737684 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/07d4b53a-7d96-4698-83a6-885070980783-ssh-key-edpm-compute-global\") pod \"07d4b53a-7d96-4698-83a6-885070980783\" (UID: \"07d4b53a-7d96-4698-83a6-885070980783\") " Jan 20 18:01:58 crc kubenswrapper[4558]: I0120 18:01:58.737745 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/07d4b53a-7d96-4698-83a6-885070980783-inventory\") pod \"07d4b53a-7d96-4698-83a6-885070980783\" (UID: \"07d4b53a-7d96-4698-83a6-885070980783\") " Jan 20 18:01:58 crc kubenswrapper[4558]: I0120 18:01:58.743507 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/07d4b53a-7d96-4698-83a6-885070980783-kube-api-access-7xddm" (OuterVolumeSpecName: "kube-api-access-7xddm") pod "07d4b53a-7d96-4698-83a6-885070980783" (UID: "07d4b53a-7d96-4698-83a6-885070980783"). InnerVolumeSpecName "kube-api-access-7xddm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:01:58 crc kubenswrapper[4558]: E0120 18:01:58.754868 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/07d4b53a-7d96-4698-83a6-885070980783-ssh-key-edpm-compute-global podName:07d4b53a-7d96-4698-83a6-885070980783 nodeName:}" failed. No retries permitted until 2026-01-20 18:01:59.254820577 +0000 UTC m=+4813.015158544 (durationBeforeRetry 500ms). Error: error cleaning subPath mounts for volume "ssh-key-edpm-compute-global" (UniqueName: "kubernetes.io/secret/07d4b53a-7d96-4698-83a6-885070980783-ssh-key-edpm-compute-global") pod "07d4b53a-7d96-4698-83a6-885070980783" (UID: "07d4b53a-7d96-4698-83a6-885070980783") : error deleting /var/lib/kubelet/pods/07d4b53a-7d96-4698-83a6-885070980783/volume-subpaths: remove /var/lib/kubelet/pods/07d4b53a-7d96-4698-83a6-885070980783/volume-subpaths: no such file or directory Jan 20 18:01:58 crc kubenswrapper[4558]: I0120 18:01:58.757714 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/07d4b53a-7d96-4698-83a6-885070980783-inventory" (OuterVolumeSpecName: "inventory") pod "07d4b53a-7d96-4698-83a6-885070980783" (UID: "07d4b53a-7d96-4698-83a6-885070980783"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:01:58 crc kubenswrapper[4558]: I0120 18:01:58.840254 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7xddm\" (UniqueName: \"kubernetes.io/projected/07d4b53a-7d96-4698-83a6-885070980783-kube-api-access-7xddm\") on node \"crc\" DevicePath \"\"" Jan 20 18:01:58 crc kubenswrapper[4558]: I0120 18:01:58.840285 4558 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/07d4b53a-7d96-4698-83a6-885070980783-inventory\") on node \"crc\" DevicePath \"\"" Jan 20 18:01:59 crc kubenswrapper[4558]: I0120 18:01:59.346718 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/07d4b53a-7d96-4698-83a6-885070980783-ssh-key-edpm-compute-global\") pod \"07d4b53a-7d96-4698-83a6-885070980783\" (UID: \"07d4b53a-7d96-4698-83a6-885070980783\") " Jan 20 18:01:59 crc kubenswrapper[4558]: I0120 18:01:59.349846 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/07d4b53a-7d96-4698-83a6-885070980783-ssh-key-edpm-compute-global" (OuterVolumeSpecName: "ssh-key-edpm-compute-global") pod "07d4b53a-7d96-4698-83a6-885070980783" (UID: "07d4b53a-7d96-4698-83a6-885070980783"). InnerVolumeSpecName "ssh-key-edpm-compute-global". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:01:59 crc kubenswrapper[4558]: I0120 18:01:59.387085 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/install-os-edpm-multinodeset-edpm-compute-global-ztq9r" event={"ID":"07d4b53a-7d96-4698-83a6-885070980783","Type":"ContainerDied","Data":"813e326325d2f71b9cd0630b9f31d1d6f5b08a19cd15a2d5dc0132c2d4486a60"} Jan 20 18:01:59 crc kubenswrapper[4558]: I0120 18:01:59.387142 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="813e326325d2f71b9cd0630b9f31d1d6f5b08a19cd15a2d5dc0132c2d4486a60" Jan 20 18:01:59 crc kubenswrapper[4558]: I0120 18:01:59.387232 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/install-os-edpm-multinodeset-edpm-compute-global-ztq9r" Jan 20 18:01:59 crc kubenswrapper[4558]: I0120 18:01:59.448459 4558 reconciler_common.go:293] "Volume detached for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/07d4b53a-7d96-4698-83a6-885070980783-ssh-key-edpm-compute-global\") on node \"crc\" DevicePath \"\"" Jan 20 18:01:59 crc kubenswrapper[4558]: I0120 18:01:59.452766 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/configure-os-edpm-multinodeset-edpm-compute-global-wx5df"] Jan 20 18:01:59 crc kubenswrapper[4558]: E0120 18:01:59.453128 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="07d4b53a-7d96-4698-83a6-885070980783" containerName="install-os-edpm-multinodeset-edpm-compute-global" Jan 20 18:01:59 crc kubenswrapper[4558]: I0120 18:01:59.453154 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="07d4b53a-7d96-4698-83a6-885070980783" containerName="install-os-edpm-multinodeset-edpm-compute-global" Jan 20 18:01:59 crc kubenswrapper[4558]: I0120 18:01:59.453385 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="07d4b53a-7d96-4698-83a6-885070980783" containerName="install-os-edpm-multinodeset-edpm-compute-global" Jan 20 18:01:59 crc kubenswrapper[4558]: I0120 18:01:59.453925 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/configure-os-edpm-multinodeset-edpm-compute-global-wx5df" Jan 20 18:01:59 crc kubenswrapper[4558]: I0120 18:01:59.456238 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplane-ansible-ssh-private-key-secret" Jan 20 18:01:59 crc kubenswrapper[4558]: I0120 18:01:59.456299 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplanenodeset-edpm-compute-global" Jan 20 18:01:59 crc kubenswrapper[4558]: I0120 18:01:59.456471 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"edpm-compute-global-dockercfg-wkdxg" Jan 20 18:01:59 crc kubenswrapper[4558]: I0120 18:01:59.456589 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-aee-default-env" Jan 20 18:01:59 crc kubenswrapper[4558]: I0120 18:01:59.459649 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/configure-os-edpm-multinodeset-edpm-compute-global-wx5df"] Jan 20 18:01:59 crc kubenswrapper[4558]: I0120 18:01:59.550700 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/60e20e0a-3082-4a93-b6a3-0a388fbd2ff5-inventory\") pod \"configure-os-edpm-multinodeset-edpm-compute-global-wx5df\" (UID: \"60e20e0a-3082-4a93-b6a3-0a388fbd2ff5\") " pod="openstack-kuttl-tests/configure-os-edpm-multinodeset-edpm-compute-global-wx5df" Jan 20 18:01:59 crc kubenswrapper[4558]: I0120 18:01:59.550766 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kv9fg\" (UniqueName: \"kubernetes.io/projected/60e20e0a-3082-4a93-b6a3-0a388fbd2ff5-kube-api-access-kv9fg\") pod \"configure-os-edpm-multinodeset-edpm-compute-global-wx5df\" (UID: \"60e20e0a-3082-4a93-b6a3-0a388fbd2ff5\") " pod="openstack-kuttl-tests/configure-os-edpm-multinodeset-edpm-compute-global-wx5df" Jan 20 18:01:59 crc kubenswrapper[4558]: I0120 18:01:59.550872 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/60e20e0a-3082-4a93-b6a3-0a388fbd2ff5-ssh-key-edpm-compute-global\") pod \"configure-os-edpm-multinodeset-edpm-compute-global-wx5df\" (UID: \"60e20e0a-3082-4a93-b6a3-0a388fbd2ff5\") " pod="openstack-kuttl-tests/configure-os-edpm-multinodeset-edpm-compute-global-wx5df" Jan 20 18:01:59 crc kubenswrapper[4558]: I0120 18:01:59.653526 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/60e20e0a-3082-4a93-b6a3-0a388fbd2ff5-inventory\") pod \"configure-os-edpm-multinodeset-edpm-compute-global-wx5df\" (UID: \"60e20e0a-3082-4a93-b6a3-0a388fbd2ff5\") " pod="openstack-kuttl-tests/configure-os-edpm-multinodeset-edpm-compute-global-wx5df" Jan 20 18:01:59 crc kubenswrapper[4558]: I0120 18:01:59.653588 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kv9fg\" (UniqueName: \"kubernetes.io/projected/60e20e0a-3082-4a93-b6a3-0a388fbd2ff5-kube-api-access-kv9fg\") pod \"configure-os-edpm-multinodeset-edpm-compute-global-wx5df\" (UID: \"60e20e0a-3082-4a93-b6a3-0a388fbd2ff5\") " pod="openstack-kuttl-tests/configure-os-edpm-multinodeset-edpm-compute-global-wx5df" Jan 20 18:01:59 crc kubenswrapper[4558]: I0120 18:01:59.653687 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/60e20e0a-3082-4a93-b6a3-0a388fbd2ff5-ssh-key-edpm-compute-global\") pod \"configure-os-edpm-multinodeset-edpm-compute-global-wx5df\" (UID: \"60e20e0a-3082-4a93-b6a3-0a388fbd2ff5\") " pod="openstack-kuttl-tests/configure-os-edpm-multinodeset-edpm-compute-global-wx5df" Jan 20 18:01:59 crc kubenswrapper[4558]: I0120 18:01:59.893962 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/60e20e0a-3082-4a93-b6a3-0a388fbd2ff5-ssh-key-edpm-compute-global\") pod \"configure-os-edpm-multinodeset-edpm-compute-global-wx5df\" (UID: \"60e20e0a-3082-4a93-b6a3-0a388fbd2ff5\") " pod="openstack-kuttl-tests/configure-os-edpm-multinodeset-edpm-compute-global-wx5df" Jan 20 18:01:59 crc kubenswrapper[4558]: I0120 18:01:59.893977 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/60e20e0a-3082-4a93-b6a3-0a388fbd2ff5-inventory\") pod \"configure-os-edpm-multinodeset-edpm-compute-global-wx5df\" (UID: \"60e20e0a-3082-4a93-b6a3-0a388fbd2ff5\") " pod="openstack-kuttl-tests/configure-os-edpm-multinodeset-edpm-compute-global-wx5df" Jan 20 18:01:59 crc kubenswrapper[4558]: I0120 18:01:59.894478 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kv9fg\" (UniqueName: \"kubernetes.io/projected/60e20e0a-3082-4a93-b6a3-0a388fbd2ff5-kube-api-access-kv9fg\") pod \"configure-os-edpm-multinodeset-edpm-compute-global-wx5df\" (UID: \"60e20e0a-3082-4a93-b6a3-0a388fbd2ff5\") " pod="openstack-kuttl-tests/configure-os-edpm-multinodeset-edpm-compute-global-wx5df" Jan 20 18:02:00 crc kubenswrapper[4558]: I0120 18:02:00.069967 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/configure-os-edpm-multinodeset-edpm-compute-global-wx5df" Jan 20 18:02:00 crc kubenswrapper[4558]: I0120 18:02:00.465570 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/configure-os-edpm-multinodeset-edpm-compute-global-wx5df"] Jan 20 18:02:00 crc kubenswrapper[4558]: W0120 18:02:00.467630 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod60e20e0a_3082_4a93_b6a3_0a388fbd2ff5.slice/crio-a7bd4ba443d9b3da72d4f523995a2ef22803136a4750449dab17c40729dab8ee WatchSource:0}: Error finding container a7bd4ba443d9b3da72d4f523995a2ef22803136a4750449dab17c40729dab8ee: Status 404 returned error can't find the container with id a7bd4ba443d9b3da72d4f523995a2ef22803136a4750449dab17c40729dab8ee Jan 20 18:02:01 crc kubenswrapper[4558]: I0120 18:02:01.420340 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/configure-os-edpm-multinodeset-edpm-compute-global-wx5df" event={"ID":"60e20e0a-3082-4a93-b6a3-0a388fbd2ff5","Type":"ContainerStarted","Data":"a7d59ab3dd89c04918067b3b577e740c68bc67311390ae44c4fedb735b11dddb"} Jan 20 18:02:01 crc kubenswrapper[4558]: I0120 18:02:01.420628 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/configure-os-edpm-multinodeset-edpm-compute-global-wx5df" event={"ID":"60e20e0a-3082-4a93-b6a3-0a388fbd2ff5","Type":"ContainerStarted","Data":"a7bd4ba443d9b3da72d4f523995a2ef22803136a4750449dab17c40729dab8ee"} Jan 20 18:02:01 crc kubenswrapper[4558]: I0120 18:02:01.440456 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/configure-os-edpm-multinodeset-edpm-compute-global-wx5df" podStartSLOduration=1.880920728 podStartE2EDuration="2.440429529s" podCreationTimestamp="2026-01-20 18:01:59 +0000 UTC" firstStartedPulling="2026-01-20 18:02:00.470275542 +0000 UTC m=+4814.230613499" lastFinishedPulling="2026-01-20 18:02:01.029784333 +0000 UTC m=+4814.790122300" observedRunningTime="2026-01-20 18:02:01.435921394 +0000 UTC m=+4815.196259361" watchObservedRunningTime="2026-01-20 18:02:01.440429529 +0000 UTC m=+4815.200767495" Jan 20 18:02:03 crc kubenswrapper[4558]: I0120 18:02:03.440351 4558 generic.go:334] "Generic (PLEG): container finished" podID="60e20e0a-3082-4a93-b6a3-0a388fbd2ff5" containerID="a7d59ab3dd89c04918067b3b577e740c68bc67311390ae44c4fedb735b11dddb" exitCode=0 Jan 20 18:02:03 crc kubenswrapper[4558]: I0120 18:02:03.440441 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/configure-os-edpm-multinodeset-edpm-compute-global-wx5df" event={"ID":"60e20e0a-3082-4a93-b6a3-0a388fbd2ff5","Type":"ContainerDied","Data":"a7d59ab3dd89c04918067b3b577e740c68bc67311390ae44c4fedb735b11dddb"} Jan 20 18:02:04 crc kubenswrapper[4558]: I0120 18:02:04.697411 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/configure-os-edpm-multinodeset-edpm-compute-global-wx5df" Jan 20 18:02:04 crc kubenswrapper[4558]: I0120 18:02:04.721555 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/60e20e0a-3082-4a93-b6a3-0a388fbd2ff5-inventory\") pod \"60e20e0a-3082-4a93-b6a3-0a388fbd2ff5\" (UID: \"60e20e0a-3082-4a93-b6a3-0a388fbd2ff5\") " Jan 20 18:02:04 crc kubenswrapper[4558]: I0120 18:02:04.721640 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kv9fg\" (UniqueName: \"kubernetes.io/projected/60e20e0a-3082-4a93-b6a3-0a388fbd2ff5-kube-api-access-kv9fg\") pod \"60e20e0a-3082-4a93-b6a3-0a388fbd2ff5\" (UID: \"60e20e0a-3082-4a93-b6a3-0a388fbd2ff5\") " Jan 20 18:02:04 crc kubenswrapper[4558]: I0120 18:02:04.721782 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/60e20e0a-3082-4a93-b6a3-0a388fbd2ff5-ssh-key-edpm-compute-global\") pod \"60e20e0a-3082-4a93-b6a3-0a388fbd2ff5\" (UID: \"60e20e0a-3082-4a93-b6a3-0a388fbd2ff5\") " Jan 20 18:02:04 crc kubenswrapper[4558]: I0120 18:02:04.727054 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/60e20e0a-3082-4a93-b6a3-0a388fbd2ff5-kube-api-access-kv9fg" (OuterVolumeSpecName: "kube-api-access-kv9fg") pod "60e20e0a-3082-4a93-b6a3-0a388fbd2ff5" (UID: "60e20e0a-3082-4a93-b6a3-0a388fbd2ff5"). InnerVolumeSpecName "kube-api-access-kv9fg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:02:04 crc kubenswrapper[4558]: I0120 18:02:04.744865 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/60e20e0a-3082-4a93-b6a3-0a388fbd2ff5-inventory" (OuterVolumeSpecName: "inventory") pod "60e20e0a-3082-4a93-b6a3-0a388fbd2ff5" (UID: "60e20e0a-3082-4a93-b6a3-0a388fbd2ff5"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:02:04 crc kubenswrapper[4558]: I0120 18:02:04.744938 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/60e20e0a-3082-4a93-b6a3-0a388fbd2ff5-ssh-key-edpm-compute-global" (OuterVolumeSpecName: "ssh-key-edpm-compute-global") pod "60e20e0a-3082-4a93-b6a3-0a388fbd2ff5" (UID: "60e20e0a-3082-4a93-b6a3-0a388fbd2ff5"). InnerVolumeSpecName "ssh-key-edpm-compute-global". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:02:04 crc kubenswrapper[4558]: I0120 18:02:04.824201 4558 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/60e20e0a-3082-4a93-b6a3-0a388fbd2ff5-inventory\") on node \"crc\" DevicePath \"\"" Jan 20 18:02:04 crc kubenswrapper[4558]: I0120 18:02:04.824237 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kv9fg\" (UniqueName: \"kubernetes.io/projected/60e20e0a-3082-4a93-b6a3-0a388fbd2ff5-kube-api-access-kv9fg\") on node \"crc\" DevicePath \"\"" Jan 20 18:02:04 crc kubenswrapper[4558]: I0120 18:02:04.824251 4558 reconciler_common.go:293] "Volume detached for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/60e20e0a-3082-4a93-b6a3-0a388fbd2ff5-ssh-key-edpm-compute-global\") on node \"crc\" DevicePath \"\"" Jan 20 18:02:05 crc kubenswrapper[4558]: I0120 18:02:05.461007 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/configure-os-edpm-multinodeset-edpm-compute-global-wx5df" event={"ID":"60e20e0a-3082-4a93-b6a3-0a388fbd2ff5","Type":"ContainerDied","Data":"a7bd4ba443d9b3da72d4f523995a2ef22803136a4750449dab17c40729dab8ee"} Jan 20 18:02:05 crc kubenswrapper[4558]: I0120 18:02:05.461063 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a7bd4ba443d9b3da72d4f523995a2ef22803136a4750449dab17c40729dab8ee" Jan 20 18:02:05 crc kubenswrapper[4558]: I0120 18:02:05.461132 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/configure-os-edpm-multinodeset-edpm-compute-global-wx5df" Jan 20 18:02:05 crc kubenswrapper[4558]: I0120 18:02:05.512928 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/run-os-edpm-multinodeset-edpm-compute-global-tq9fx"] Jan 20 18:02:05 crc kubenswrapper[4558]: E0120 18:02:05.513577 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="60e20e0a-3082-4a93-b6a3-0a388fbd2ff5" containerName="configure-os-edpm-multinodeset-edpm-compute-global" Jan 20 18:02:05 crc kubenswrapper[4558]: I0120 18:02:05.513601 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="60e20e0a-3082-4a93-b6a3-0a388fbd2ff5" containerName="configure-os-edpm-multinodeset-edpm-compute-global" Jan 20 18:02:05 crc kubenswrapper[4558]: I0120 18:02:05.513797 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="60e20e0a-3082-4a93-b6a3-0a388fbd2ff5" containerName="configure-os-edpm-multinodeset-edpm-compute-global" Jan 20 18:02:05 crc kubenswrapper[4558]: I0120 18:02:05.514427 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/run-os-edpm-multinodeset-edpm-compute-global-tq9fx" Jan 20 18:02:05 crc kubenswrapper[4558]: I0120 18:02:05.516204 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-aee-default-env" Jan 20 18:02:05 crc kubenswrapper[4558]: I0120 18:02:05.518761 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplanenodeset-edpm-compute-global" Jan 20 18:02:05 crc kubenswrapper[4558]: I0120 18:02:05.519647 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplane-ansible-ssh-private-key-secret" Jan 20 18:02:05 crc kubenswrapper[4558]: I0120 18:02:05.520602 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"edpm-compute-global-dockercfg-wkdxg" Jan 20 18:02:05 crc kubenswrapper[4558]: I0120 18:02:05.540814 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/run-os-edpm-multinodeset-edpm-compute-global-tq9fx"] Jan 20 18:02:05 crc kubenswrapper[4558]: I0120 18:02:05.642500 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ce5580a4-7384-4c6b-8a81-aa07c8d84ecc-inventory\") pod \"run-os-edpm-multinodeset-edpm-compute-global-tq9fx\" (UID: \"ce5580a4-7384-4c6b-8a81-aa07c8d84ecc\") " pod="openstack-kuttl-tests/run-os-edpm-multinodeset-edpm-compute-global-tq9fx" Jan 20 18:02:05 crc kubenswrapper[4558]: I0120 18:02:05.642697 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8sgjt\" (UniqueName: \"kubernetes.io/projected/ce5580a4-7384-4c6b-8a81-aa07c8d84ecc-kube-api-access-8sgjt\") pod \"run-os-edpm-multinodeset-edpm-compute-global-tq9fx\" (UID: \"ce5580a4-7384-4c6b-8a81-aa07c8d84ecc\") " pod="openstack-kuttl-tests/run-os-edpm-multinodeset-edpm-compute-global-tq9fx" Jan 20 18:02:05 crc kubenswrapper[4558]: I0120 18:02:05.642749 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/ce5580a4-7384-4c6b-8a81-aa07c8d84ecc-ssh-key-edpm-compute-global\") pod \"run-os-edpm-multinodeset-edpm-compute-global-tq9fx\" (UID: \"ce5580a4-7384-4c6b-8a81-aa07c8d84ecc\") " pod="openstack-kuttl-tests/run-os-edpm-multinodeset-edpm-compute-global-tq9fx" Jan 20 18:02:05 crc kubenswrapper[4558]: I0120 18:02:05.744405 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ce5580a4-7384-4c6b-8a81-aa07c8d84ecc-inventory\") pod \"run-os-edpm-multinodeset-edpm-compute-global-tq9fx\" (UID: \"ce5580a4-7384-4c6b-8a81-aa07c8d84ecc\") " pod="openstack-kuttl-tests/run-os-edpm-multinodeset-edpm-compute-global-tq9fx" Jan 20 18:02:05 crc kubenswrapper[4558]: I0120 18:02:05.744477 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8sgjt\" (UniqueName: \"kubernetes.io/projected/ce5580a4-7384-4c6b-8a81-aa07c8d84ecc-kube-api-access-8sgjt\") pod \"run-os-edpm-multinodeset-edpm-compute-global-tq9fx\" (UID: \"ce5580a4-7384-4c6b-8a81-aa07c8d84ecc\") " pod="openstack-kuttl-tests/run-os-edpm-multinodeset-edpm-compute-global-tq9fx" Jan 20 18:02:05 crc kubenswrapper[4558]: I0120 18:02:05.744510 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/ce5580a4-7384-4c6b-8a81-aa07c8d84ecc-ssh-key-edpm-compute-global\") pod \"run-os-edpm-multinodeset-edpm-compute-global-tq9fx\" (UID: \"ce5580a4-7384-4c6b-8a81-aa07c8d84ecc\") " pod="openstack-kuttl-tests/run-os-edpm-multinodeset-edpm-compute-global-tq9fx" Jan 20 18:02:05 crc kubenswrapper[4558]: I0120 18:02:05.749916 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ce5580a4-7384-4c6b-8a81-aa07c8d84ecc-inventory\") pod \"run-os-edpm-multinodeset-edpm-compute-global-tq9fx\" (UID: \"ce5580a4-7384-4c6b-8a81-aa07c8d84ecc\") " pod="openstack-kuttl-tests/run-os-edpm-multinodeset-edpm-compute-global-tq9fx" Jan 20 18:02:05 crc kubenswrapper[4558]: I0120 18:02:05.750304 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/ce5580a4-7384-4c6b-8a81-aa07c8d84ecc-ssh-key-edpm-compute-global\") pod \"run-os-edpm-multinodeset-edpm-compute-global-tq9fx\" (UID: \"ce5580a4-7384-4c6b-8a81-aa07c8d84ecc\") " pod="openstack-kuttl-tests/run-os-edpm-multinodeset-edpm-compute-global-tq9fx" Jan 20 18:02:05 crc kubenswrapper[4558]: I0120 18:02:05.757529 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8sgjt\" (UniqueName: \"kubernetes.io/projected/ce5580a4-7384-4c6b-8a81-aa07c8d84ecc-kube-api-access-8sgjt\") pod \"run-os-edpm-multinodeset-edpm-compute-global-tq9fx\" (UID: \"ce5580a4-7384-4c6b-8a81-aa07c8d84ecc\") " pod="openstack-kuttl-tests/run-os-edpm-multinodeset-edpm-compute-global-tq9fx" Jan 20 18:02:05 crc kubenswrapper[4558]: I0120 18:02:05.832945 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/run-os-edpm-multinodeset-edpm-compute-global-tq9fx" Jan 20 18:02:06 crc kubenswrapper[4558]: I0120 18:02:06.218864 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/run-os-edpm-multinodeset-edpm-compute-global-tq9fx"] Jan 20 18:02:06 crc kubenswrapper[4558]: I0120 18:02:06.472281 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/run-os-edpm-multinodeset-edpm-compute-global-tq9fx" event={"ID":"ce5580a4-7384-4c6b-8a81-aa07c8d84ecc","Type":"ContainerStarted","Data":"0b55c05226c205d85cbac73f2f703c01224a8ab402e9b507ab866421c45149b1"} Jan 20 18:02:07 crc kubenswrapper[4558]: I0120 18:02:07.481727 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/run-os-edpm-multinodeset-edpm-compute-global-tq9fx" event={"ID":"ce5580a4-7384-4c6b-8a81-aa07c8d84ecc","Type":"ContainerStarted","Data":"129f8671268b133fccb5a9037db7cdd2b2b252cea20ad1db03c7b2261495a37b"} Jan 20 18:02:07 crc kubenswrapper[4558]: I0120 18:02:07.501203 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/run-os-edpm-multinodeset-edpm-compute-global-tq9fx" podStartSLOduration=1.964442137 podStartE2EDuration="2.501152863s" podCreationTimestamp="2026-01-20 18:02:05 +0000 UTC" firstStartedPulling="2026-01-20 18:02:06.222397957 +0000 UTC m=+4819.982735925" lastFinishedPulling="2026-01-20 18:02:06.759108684 +0000 UTC m=+4820.519446651" observedRunningTime="2026-01-20 18:02:07.496422671 +0000 UTC m=+4821.256760639" watchObservedRunningTime="2026-01-20 18:02:07.501152863 +0000 UTC m=+4821.261490830" Jan 20 18:02:08 crc kubenswrapper[4558]: I0120 18:02:08.493286 4558 generic.go:334] "Generic (PLEG): container finished" podID="ce5580a4-7384-4c6b-8a81-aa07c8d84ecc" containerID="129f8671268b133fccb5a9037db7cdd2b2b252cea20ad1db03c7b2261495a37b" exitCode=0 Jan 20 18:02:08 crc kubenswrapper[4558]: I0120 18:02:08.493340 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/run-os-edpm-multinodeset-edpm-compute-global-tq9fx" event={"ID":"ce5580a4-7384-4c6b-8a81-aa07c8d84ecc","Type":"ContainerDied","Data":"129f8671268b133fccb5a9037db7cdd2b2b252cea20ad1db03c7b2261495a37b"} Jan 20 18:02:09 crc kubenswrapper[4558]: I0120 18:02:09.744724 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/run-os-edpm-multinodeset-edpm-compute-global-tq9fx" Jan 20 18:02:09 crc kubenswrapper[4558]: I0120 18:02:09.805978 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/ce5580a4-7384-4c6b-8a81-aa07c8d84ecc-ssh-key-edpm-compute-global\") pod \"ce5580a4-7384-4c6b-8a81-aa07c8d84ecc\" (UID: \"ce5580a4-7384-4c6b-8a81-aa07c8d84ecc\") " Jan 20 18:02:09 crc kubenswrapper[4558]: I0120 18:02:09.806093 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8sgjt\" (UniqueName: \"kubernetes.io/projected/ce5580a4-7384-4c6b-8a81-aa07c8d84ecc-kube-api-access-8sgjt\") pod \"ce5580a4-7384-4c6b-8a81-aa07c8d84ecc\" (UID: \"ce5580a4-7384-4c6b-8a81-aa07c8d84ecc\") " Jan 20 18:02:09 crc kubenswrapper[4558]: I0120 18:02:09.806180 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ce5580a4-7384-4c6b-8a81-aa07c8d84ecc-inventory\") pod \"ce5580a4-7384-4c6b-8a81-aa07c8d84ecc\" (UID: \"ce5580a4-7384-4c6b-8a81-aa07c8d84ecc\") " Jan 20 18:02:09 crc kubenswrapper[4558]: I0120 18:02:09.811722 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ce5580a4-7384-4c6b-8a81-aa07c8d84ecc-kube-api-access-8sgjt" (OuterVolumeSpecName: "kube-api-access-8sgjt") pod "ce5580a4-7384-4c6b-8a81-aa07c8d84ecc" (UID: "ce5580a4-7384-4c6b-8a81-aa07c8d84ecc"). InnerVolumeSpecName "kube-api-access-8sgjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:02:09 crc kubenswrapper[4558]: I0120 18:02:09.826044 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ce5580a4-7384-4c6b-8a81-aa07c8d84ecc-inventory" (OuterVolumeSpecName: "inventory") pod "ce5580a4-7384-4c6b-8a81-aa07c8d84ecc" (UID: "ce5580a4-7384-4c6b-8a81-aa07c8d84ecc"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:02:09 crc kubenswrapper[4558]: I0120 18:02:09.826232 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ce5580a4-7384-4c6b-8a81-aa07c8d84ecc-ssh-key-edpm-compute-global" (OuterVolumeSpecName: "ssh-key-edpm-compute-global") pod "ce5580a4-7384-4c6b-8a81-aa07c8d84ecc" (UID: "ce5580a4-7384-4c6b-8a81-aa07c8d84ecc"). InnerVolumeSpecName "ssh-key-edpm-compute-global". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:02:09 crc kubenswrapper[4558]: I0120 18:02:09.910539 4558 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ce5580a4-7384-4c6b-8a81-aa07c8d84ecc-inventory\") on node \"crc\" DevicePath \"\"" Jan 20 18:02:09 crc kubenswrapper[4558]: I0120 18:02:09.910602 4558 reconciler_common.go:293] "Volume detached for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/ce5580a4-7384-4c6b-8a81-aa07c8d84ecc-ssh-key-edpm-compute-global\") on node \"crc\" DevicePath \"\"" Jan 20 18:02:09 crc kubenswrapper[4558]: I0120 18:02:09.910622 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8sgjt\" (UniqueName: \"kubernetes.io/projected/ce5580a4-7384-4c6b-8a81-aa07c8d84ecc-kube-api-access-8sgjt\") on node \"crc\" DevicePath \"\"" Jan 20 18:02:10 crc kubenswrapper[4558]: I0120 18:02:10.512058 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/run-os-edpm-multinodeset-edpm-compute-global-tq9fx" event={"ID":"ce5580a4-7384-4c6b-8a81-aa07c8d84ecc","Type":"ContainerDied","Data":"0b55c05226c205d85cbac73f2f703c01224a8ab402e9b507ab866421c45149b1"} Jan 20 18:02:10 crc kubenswrapper[4558]: I0120 18:02:10.512467 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0b55c05226c205d85cbac73f2f703c01224a8ab402e9b507ab866421c45149b1" Jan 20 18:02:10 crc kubenswrapper[4558]: I0120 18:02:10.512112 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/run-os-edpm-multinodeset-edpm-compute-global-tq9fx" Jan 20 18:02:10 crc kubenswrapper[4558]: I0120 18:02:10.828736 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/install-certs-edpm-multinodeset-edpm-compute-global-76wj2"] Jan 20 18:02:10 crc kubenswrapper[4558]: E0120 18:02:10.829067 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ce5580a4-7384-4c6b-8a81-aa07c8d84ecc" containerName="run-os-edpm-multinodeset-edpm-compute-global" Jan 20 18:02:10 crc kubenswrapper[4558]: I0120 18:02:10.829081 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ce5580a4-7384-4c6b-8a81-aa07c8d84ecc" containerName="run-os-edpm-multinodeset-edpm-compute-global" Jan 20 18:02:10 crc kubenswrapper[4558]: I0120 18:02:10.829246 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="ce5580a4-7384-4c6b-8a81-aa07c8d84ecc" containerName="run-os-edpm-multinodeset-edpm-compute-global" Jan 20 18:02:10 crc kubenswrapper[4558]: I0120 18:02:10.829709 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/install-certs-edpm-multinodeset-edpm-compute-global-76wj2" Jan 20 18:02:10 crc kubenswrapper[4558]: I0120 18:02:10.831876 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"combined-ca-bundle" Jan 20 18:02:10 crc kubenswrapper[4558]: I0120 18:02:10.833984 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplane-ansible-ssh-private-key-secret" Jan 20 18:02:10 crc kubenswrapper[4558]: I0120 18:02:10.834186 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplanenodeset-edpm-compute-global" Jan 20 18:02:10 crc kubenswrapper[4558]: I0120 18:02:10.834329 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"edpm-compute-global-dockercfg-wkdxg" Jan 20 18:02:10 crc kubenswrapper[4558]: I0120 18:02:10.834448 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-aee-default-env" Jan 20 18:02:10 crc kubenswrapper[4558]: I0120 18:02:10.842178 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/install-certs-edpm-multinodeset-edpm-compute-global-76wj2"] Jan 20 18:02:10 crc kubenswrapper[4558]: I0120 18:02:10.925884 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40158c82-5595-45cb-abfa-6708fb8d590b-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-multinodeset-edpm-compute-global-76wj2\" (UID: \"40158c82-5595-45cb-abfa-6708fb8d590b\") " pod="openstack-kuttl-tests/install-certs-edpm-multinodeset-edpm-compute-global-76wj2" Jan 20 18:02:10 crc kubenswrapper[4558]: I0120 18:02:10.925959 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40158c82-5595-45cb-abfa-6708fb8d590b-neutron-ovn-combined-ca-bundle\") pod \"install-certs-edpm-multinodeset-edpm-compute-global-76wj2\" (UID: \"40158c82-5595-45cb-abfa-6708fb8d590b\") " pod="openstack-kuttl-tests/install-certs-edpm-multinodeset-edpm-compute-global-76wj2" Jan 20 18:02:10 crc kubenswrapper[4558]: I0120 18:02:10.925992 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40158c82-5595-45cb-abfa-6708fb8d590b-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-multinodeset-edpm-compute-global-76wj2\" (UID: \"40158c82-5595-45cb-abfa-6708fb8d590b\") " pod="openstack-kuttl-tests/install-certs-edpm-multinodeset-edpm-compute-global-76wj2" Jan 20 18:02:10 crc kubenswrapper[4558]: I0120 18:02:10.926043 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40158c82-5595-45cb-abfa-6708fb8d590b-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-multinodeset-edpm-compute-global-76wj2\" (UID: \"40158c82-5595-45cb-abfa-6708fb8d590b\") " pod="openstack-kuttl-tests/install-certs-edpm-multinodeset-edpm-compute-global-76wj2" Jan 20 18:02:10 crc kubenswrapper[4558]: I0120 18:02:10.926085 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40158c82-5595-45cb-abfa-6708fb8d590b-ovn-combined-ca-bundle\") pod \"install-certs-edpm-multinodeset-edpm-compute-global-76wj2\" (UID: \"40158c82-5595-45cb-abfa-6708fb8d590b\") " pod="openstack-kuttl-tests/install-certs-edpm-multinodeset-edpm-compute-global-76wj2" Jan 20 18:02:10 crc kubenswrapper[4558]: I0120 18:02:10.926105 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40158c82-5595-45cb-abfa-6708fb8d590b-nova-combined-ca-bundle\") pod \"install-certs-edpm-multinodeset-edpm-compute-global-76wj2\" (UID: \"40158c82-5595-45cb-abfa-6708fb8d590b\") " pod="openstack-kuttl-tests/install-certs-edpm-multinodeset-edpm-compute-global-76wj2" Jan 20 18:02:10 crc kubenswrapper[4558]: I0120 18:02:10.926154 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40158c82-5595-45cb-abfa-6708fb8d590b-neutron-sriov-combined-ca-bundle\") pod \"install-certs-edpm-multinodeset-edpm-compute-global-76wj2\" (UID: \"40158c82-5595-45cb-abfa-6708fb8d590b\") " pod="openstack-kuttl-tests/install-certs-edpm-multinodeset-edpm-compute-global-76wj2" Jan 20 18:02:10 crc kubenswrapper[4558]: I0120 18:02:10.926193 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/40158c82-5595-45cb-abfa-6708fb8d590b-inventory\") pod \"install-certs-edpm-multinodeset-edpm-compute-global-76wj2\" (UID: \"40158c82-5595-45cb-abfa-6708fb8d590b\") " pod="openstack-kuttl-tests/install-certs-edpm-multinodeset-edpm-compute-global-76wj2" Jan 20 18:02:10 crc kubenswrapper[4558]: I0120 18:02:10.926253 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40158c82-5595-45cb-abfa-6708fb8d590b-neutron-dhcp-combined-ca-bundle\") pod \"install-certs-edpm-multinodeset-edpm-compute-global-76wj2\" (UID: \"40158c82-5595-45cb-abfa-6708fb8d590b\") " pod="openstack-kuttl-tests/install-certs-edpm-multinodeset-edpm-compute-global-76wj2" Jan 20 18:02:10 crc kubenswrapper[4558]: I0120 18:02:10.926281 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"custom-global-service-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40158c82-5595-45cb-abfa-6708fb8d590b-custom-global-service-combined-ca-bundle\") pod \"install-certs-edpm-multinodeset-edpm-compute-global-76wj2\" (UID: \"40158c82-5595-45cb-abfa-6708fb8d590b\") " pod="openstack-kuttl-tests/install-certs-edpm-multinodeset-edpm-compute-global-76wj2" Jan 20 18:02:10 crc kubenswrapper[4558]: I0120 18:02:10.926307 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v52fp\" (UniqueName: \"kubernetes.io/projected/40158c82-5595-45cb-abfa-6708fb8d590b-kube-api-access-v52fp\") pod \"install-certs-edpm-multinodeset-edpm-compute-global-76wj2\" (UID: \"40158c82-5595-45cb-abfa-6708fb8d590b\") " pod="openstack-kuttl-tests/install-certs-edpm-multinodeset-edpm-compute-global-76wj2" Jan 20 18:02:10 crc kubenswrapper[4558]: I0120 18:02:10.926328 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/40158c82-5595-45cb-abfa-6708fb8d590b-ssh-key-edpm-compute-global\") pod \"install-certs-edpm-multinodeset-edpm-compute-global-76wj2\" (UID: \"40158c82-5595-45cb-abfa-6708fb8d590b\") " pod="openstack-kuttl-tests/install-certs-edpm-multinodeset-edpm-compute-global-76wj2" Jan 20 18:02:11 crc kubenswrapper[4558]: I0120 18:02:11.026960 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40158c82-5595-45cb-abfa-6708fb8d590b-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-multinodeset-edpm-compute-global-76wj2\" (UID: \"40158c82-5595-45cb-abfa-6708fb8d590b\") " pod="openstack-kuttl-tests/install-certs-edpm-multinodeset-edpm-compute-global-76wj2" Jan 20 18:02:11 crc kubenswrapper[4558]: I0120 18:02:11.027109 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40158c82-5595-45cb-abfa-6708fb8d590b-ovn-combined-ca-bundle\") pod \"install-certs-edpm-multinodeset-edpm-compute-global-76wj2\" (UID: \"40158c82-5595-45cb-abfa-6708fb8d590b\") " pod="openstack-kuttl-tests/install-certs-edpm-multinodeset-edpm-compute-global-76wj2" Jan 20 18:02:11 crc kubenswrapper[4558]: I0120 18:02:11.027219 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40158c82-5595-45cb-abfa-6708fb8d590b-nova-combined-ca-bundle\") pod \"install-certs-edpm-multinodeset-edpm-compute-global-76wj2\" (UID: \"40158c82-5595-45cb-abfa-6708fb8d590b\") " pod="openstack-kuttl-tests/install-certs-edpm-multinodeset-edpm-compute-global-76wj2" Jan 20 18:02:11 crc kubenswrapper[4558]: I0120 18:02:11.027319 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40158c82-5595-45cb-abfa-6708fb8d590b-neutron-sriov-combined-ca-bundle\") pod \"install-certs-edpm-multinodeset-edpm-compute-global-76wj2\" (UID: \"40158c82-5595-45cb-abfa-6708fb8d590b\") " pod="openstack-kuttl-tests/install-certs-edpm-multinodeset-edpm-compute-global-76wj2" Jan 20 18:02:11 crc kubenswrapper[4558]: I0120 18:02:11.027392 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/40158c82-5595-45cb-abfa-6708fb8d590b-inventory\") pod \"install-certs-edpm-multinodeset-edpm-compute-global-76wj2\" (UID: \"40158c82-5595-45cb-abfa-6708fb8d590b\") " pod="openstack-kuttl-tests/install-certs-edpm-multinodeset-edpm-compute-global-76wj2" Jan 20 18:02:11 crc kubenswrapper[4558]: I0120 18:02:11.027509 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40158c82-5595-45cb-abfa-6708fb8d590b-neutron-dhcp-combined-ca-bundle\") pod \"install-certs-edpm-multinodeset-edpm-compute-global-76wj2\" (UID: \"40158c82-5595-45cb-abfa-6708fb8d590b\") " pod="openstack-kuttl-tests/install-certs-edpm-multinodeset-edpm-compute-global-76wj2" Jan 20 18:02:11 crc kubenswrapper[4558]: I0120 18:02:11.027589 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"custom-global-service-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40158c82-5595-45cb-abfa-6708fb8d590b-custom-global-service-combined-ca-bundle\") pod \"install-certs-edpm-multinodeset-edpm-compute-global-76wj2\" (UID: \"40158c82-5595-45cb-abfa-6708fb8d590b\") " pod="openstack-kuttl-tests/install-certs-edpm-multinodeset-edpm-compute-global-76wj2" Jan 20 18:02:11 crc kubenswrapper[4558]: I0120 18:02:11.027659 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v52fp\" (UniqueName: \"kubernetes.io/projected/40158c82-5595-45cb-abfa-6708fb8d590b-kube-api-access-v52fp\") pod \"install-certs-edpm-multinodeset-edpm-compute-global-76wj2\" (UID: \"40158c82-5595-45cb-abfa-6708fb8d590b\") " pod="openstack-kuttl-tests/install-certs-edpm-multinodeset-edpm-compute-global-76wj2" Jan 20 18:02:11 crc kubenswrapper[4558]: I0120 18:02:11.027739 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/40158c82-5595-45cb-abfa-6708fb8d590b-ssh-key-edpm-compute-global\") pod \"install-certs-edpm-multinodeset-edpm-compute-global-76wj2\" (UID: \"40158c82-5595-45cb-abfa-6708fb8d590b\") " pod="openstack-kuttl-tests/install-certs-edpm-multinodeset-edpm-compute-global-76wj2" Jan 20 18:02:11 crc kubenswrapper[4558]: I0120 18:02:11.027829 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40158c82-5595-45cb-abfa-6708fb8d590b-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-multinodeset-edpm-compute-global-76wj2\" (UID: \"40158c82-5595-45cb-abfa-6708fb8d590b\") " pod="openstack-kuttl-tests/install-certs-edpm-multinodeset-edpm-compute-global-76wj2" Jan 20 18:02:11 crc kubenswrapper[4558]: I0120 18:02:11.027939 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40158c82-5595-45cb-abfa-6708fb8d590b-neutron-ovn-combined-ca-bundle\") pod \"install-certs-edpm-multinodeset-edpm-compute-global-76wj2\" (UID: \"40158c82-5595-45cb-abfa-6708fb8d590b\") " pod="openstack-kuttl-tests/install-certs-edpm-multinodeset-edpm-compute-global-76wj2" Jan 20 18:02:11 crc kubenswrapper[4558]: I0120 18:02:11.028008 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40158c82-5595-45cb-abfa-6708fb8d590b-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-multinodeset-edpm-compute-global-76wj2\" (UID: \"40158c82-5595-45cb-abfa-6708fb8d590b\") " pod="openstack-kuttl-tests/install-certs-edpm-multinodeset-edpm-compute-global-76wj2" Jan 20 18:02:11 crc kubenswrapper[4558]: I0120 18:02:11.033199 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40158c82-5595-45cb-abfa-6708fb8d590b-ovn-combined-ca-bundle\") pod \"install-certs-edpm-multinodeset-edpm-compute-global-76wj2\" (UID: \"40158c82-5595-45cb-abfa-6708fb8d590b\") " pod="openstack-kuttl-tests/install-certs-edpm-multinodeset-edpm-compute-global-76wj2" Jan 20 18:02:11 crc kubenswrapper[4558]: I0120 18:02:11.033798 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40158c82-5595-45cb-abfa-6708fb8d590b-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-multinodeset-edpm-compute-global-76wj2\" (UID: \"40158c82-5595-45cb-abfa-6708fb8d590b\") " pod="openstack-kuttl-tests/install-certs-edpm-multinodeset-edpm-compute-global-76wj2" Jan 20 18:02:11 crc kubenswrapper[4558]: I0120 18:02:11.033951 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40158c82-5595-45cb-abfa-6708fb8d590b-neutron-dhcp-combined-ca-bundle\") pod \"install-certs-edpm-multinodeset-edpm-compute-global-76wj2\" (UID: \"40158c82-5595-45cb-abfa-6708fb8d590b\") " pod="openstack-kuttl-tests/install-certs-edpm-multinodeset-edpm-compute-global-76wj2" Jan 20 18:02:11 crc kubenswrapper[4558]: I0120 18:02:11.034837 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40158c82-5595-45cb-abfa-6708fb8d590b-neutron-ovn-combined-ca-bundle\") pod \"install-certs-edpm-multinodeset-edpm-compute-global-76wj2\" (UID: \"40158c82-5595-45cb-abfa-6708fb8d590b\") " pod="openstack-kuttl-tests/install-certs-edpm-multinodeset-edpm-compute-global-76wj2" Jan 20 18:02:11 crc kubenswrapper[4558]: I0120 18:02:11.035375 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40158c82-5595-45cb-abfa-6708fb8d590b-nova-combined-ca-bundle\") pod \"install-certs-edpm-multinodeset-edpm-compute-global-76wj2\" (UID: \"40158c82-5595-45cb-abfa-6708fb8d590b\") " pod="openstack-kuttl-tests/install-certs-edpm-multinodeset-edpm-compute-global-76wj2" Jan 20 18:02:11 crc kubenswrapper[4558]: I0120 18:02:11.035599 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/40158c82-5595-45cb-abfa-6708fb8d590b-ssh-key-edpm-compute-global\") pod \"install-certs-edpm-multinodeset-edpm-compute-global-76wj2\" (UID: \"40158c82-5595-45cb-abfa-6708fb8d590b\") " pod="openstack-kuttl-tests/install-certs-edpm-multinodeset-edpm-compute-global-76wj2" Jan 20 18:02:11 crc kubenswrapper[4558]: I0120 18:02:11.038810 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/40158c82-5595-45cb-abfa-6708fb8d590b-inventory\") pod \"install-certs-edpm-multinodeset-edpm-compute-global-76wj2\" (UID: \"40158c82-5595-45cb-abfa-6708fb8d590b\") " pod="openstack-kuttl-tests/install-certs-edpm-multinodeset-edpm-compute-global-76wj2" Jan 20 18:02:11 crc kubenswrapper[4558]: I0120 18:02:11.038873 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40158c82-5595-45cb-abfa-6708fb8d590b-neutron-sriov-combined-ca-bundle\") pod \"install-certs-edpm-multinodeset-edpm-compute-global-76wj2\" (UID: \"40158c82-5595-45cb-abfa-6708fb8d590b\") " pod="openstack-kuttl-tests/install-certs-edpm-multinodeset-edpm-compute-global-76wj2" Jan 20 18:02:11 crc kubenswrapper[4558]: I0120 18:02:11.038960 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40158c82-5595-45cb-abfa-6708fb8d590b-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-multinodeset-edpm-compute-global-76wj2\" (UID: \"40158c82-5595-45cb-abfa-6708fb8d590b\") " pod="openstack-kuttl-tests/install-certs-edpm-multinodeset-edpm-compute-global-76wj2" Jan 20 18:02:11 crc kubenswrapper[4558]: I0120 18:02:11.039092 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"custom-global-service-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40158c82-5595-45cb-abfa-6708fb8d590b-custom-global-service-combined-ca-bundle\") pod \"install-certs-edpm-multinodeset-edpm-compute-global-76wj2\" (UID: \"40158c82-5595-45cb-abfa-6708fb8d590b\") " pod="openstack-kuttl-tests/install-certs-edpm-multinodeset-edpm-compute-global-76wj2" Jan 20 18:02:11 crc kubenswrapper[4558]: I0120 18:02:11.039862 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40158c82-5595-45cb-abfa-6708fb8d590b-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-multinodeset-edpm-compute-global-76wj2\" (UID: \"40158c82-5595-45cb-abfa-6708fb8d590b\") " pod="openstack-kuttl-tests/install-certs-edpm-multinodeset-edpm-compute-global-76wj2" Jan 20 18:02:11 crc kubenswrapper[4558]: I0120 18:02:11.046304 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v52fp\" (UniqueName: \"kubernetes.io/projected/40158c82-5595-45cb-abfa-6708fb8d590b-kube-api-access-v52fp\") pod \"install-certs-edpm-multinodeset-edpm-compute-global-76wj2\" (UID: \"40158c82-5595-45cb-abfa-6708fb8d590b\") " pod="openstack-kuttl-tests/install-certs-edpm-multinodeset-edpm-compute-global-76wj2" Jan 20 18:02:11 crc kubenswrapper[4558]: I0120 18:02:11.145822 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/install-certs-edpm-multinodeset-edpm-compute-global-76wj2" Jan 20 18:02:11 crc kubenswrapper[4558]: I0120 18:02:11.553963 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/install-certs-edpm-multinodeset-edpm-compute-global-76wj2"] Jan 20 18:02:11 crc kubenswrapper[4558]: W0120 18:02:11.557354 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod40158c82_5595_45cb_abfa_6708fb8d590b.slice/crio-79e807b09fdd2d0392de80d82b209ff1e7c21f41a5748c64d718c0e3f1bde3af WatchSource:0}: Error finding container 79e807b09fdd2d0392de80d82b209ff1e7c21f41a5748c64d718c0e3f1bde3af: Status 404 returned error can't find the container with id 79e807b09fdd2d0392de80d82b209ff1e7c21f41a5748c64d718c0e3f1bde3af Jan 20 18:02:12 crc kubenswrapper[4558]: I0120 18:02:12.531204 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/install-certs-edpm-multinodeset-edpm-compute-global-76wj2" event={"ID":"40158c82-5595-45cb-abfa-6708fb8d590b","Type":"ContainerStarted","Data":"3322f0fcd065a6f620f07166f64f841b34aefae668a3e32059ae25ae20d153d3"} Jan 20 18:02:12 crc kubenswrapper[4558]: I0120 18:02:12.531558 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/install-certs-edpm-multinodeset-edpm-compute-global-76wj2" event={"ID":"40158c82-5595-45cb-abfa-6708fb8d590b","Type":"ContainerStarted","Data":"79e807b09fdd2d0392de80d82b209ff1e7c21f41a5748c64d718c0e3f1bde3af"} Jan 20 18:02:12 crc kubenswrapper[4558]: I0120 18:02:12.553680 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/install-certs-edpm-multinodeset-edpm-compute-global-76wj2" podStartSLOduration=1.90556753 podStartE2EDuration="2.553654905s" podCreationTimestamp="2026-01-20 18:02:10 +0000 UTC" firstStartedPulling="2026-01-20 18:02:11.560266574 +0000 UTC m=+4825.320604541" lastFinishedPulling="2026-01-20 18:02:12.208353949 +0000 UTC m=+4825.968691916" observedRunningTime="2026-01-20 18:02:12.549351085 +0000 UTC m=+4826.309689052" watchObservedRunningTime="2026-01-20 18:02:12.553654905 +0000 UTC m=+4826.313992872" Jan 20 18:02:13 crc kubenswrapper[4558]: I0120 18:02:13.543554 4558 generic.go:334] "Generic (PLEG): container finished" podID="40158c82-5595-45cb-abfa-6708fb8d590b" containerID="3322f0fcd065a6f620f07166f64f841b34aefae668a3e32059ae25ae20d153d3" exitCode=0 Jan 20 18:02:13 crc kubenswrapper[4558]: I0120 18:02:13.543665 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/install-certs-edpm-multinodeset-edpm-compute-global-76wj2" event={"ID":"40158c82-5595-45cb-abfa-6708fb8d590b","Type":"ContainerDied","Data":"3322f0fcd065a6f620f07166f64f841b34aefae668a3e32059ae25ae20d153d3"} Jan 20 18:02:14 crc kubenswrapper[4558]: I0120 18:02:14.811356 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/install-certs-edpm-multinodeset-edpm-compute-global-76wj2" Jan 20 18:02:14 crc kubenswrapper[4558]: I0120 18:02:14.985597 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/40158c82-5595-45cb-abfa-6708fb8d590b-ssh-key-edpm-compute-global\") pod \"40158c82-5595-45cb-abfa-6708fb8d590b\" (UID: \"40158c82-5595-45cb-abfa-6708fb8d590b\") " Jan 20 18:02:14 crc kubenswrapper[4558]: I0120 18:02:14.985664 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40158c82-5595-45cb-abfa-6708fb8d590b-libvirt-combined-ca-bundle\") pod \"40158c82-5595-45cb-abfa-6708fb8d590b\" (UID: \"40158c82-5595-45cb-abfa-6708fb8d590b\") " Jan 20 18:02:14 crc kubenswrapper[4558]: I0120 18:02:14.985690 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40158c82-5595-45cb-abfa-6708fb8d590b-neutron-metadata-combined-ca-bundle\") pod \"40158c82-5595-45cb-abfa-6708fb8d590b\" (UID: \"40158c82-5595-45cb-abfa-6708fb8d590b\") " Jan 20 18:02:14 crc kubenswrapper[4558]: I0120 18:02:14.985926 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40158c82-5595-45cb-abfa-6708fb8d590b-neutron-dhcp-combined-ca-bundle\") pod \"40158c82-5595-45cb-abfa-6708fb8d590b\" (UID: \"40158c82-5595-45cb-abfa-6708fb8d590b\") " Jan 20 18:02:14 crc kubenswrapper[4558]: I0120 18:02:14.985984 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"custom-global-service-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40158c82-5595-45cb-abfa-6708fb8d590b-custom-global-service-combined-ca-bundle\") pod \"40158c82-5595-45cb-abfa-6708fb8d590b\" (UID: \"40158c82-5595-45cb-abfa-6708fb8d590b\") " Jan 20 18:02:14 crc kubenswrapper[4558]: I0120 18:02:14.986014 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v52fp\" (UniqueName: \"kubernetes.io/projected/40158c82-5595-45cb-abfa-6708fb8d590b-kube-api-access-v52fp\") pod \"40158c82-5595-45cb-abfa-6708fb8d590b\" (UID: \"40158c82-5595-45cb-abfa-6708fb8d590b\") " Jan 20 18:02:14 crc kubenswrapper[4558]: I0120 18:02:14.986049 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40158c82-5595-45cb-abfa-6708fb8d590b-bootstrap-combined-ca-bundle\") pod \"40158c82-5595-45cb-abfa-6708fb8d590b\" (UID: \"40158c82-5595-45cb-abfa-6708fb8d590b\") " Jan 20 18:02:14 crc kubenswrapper[4558]: I0120 18:02:14.986070 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40158c82-5595-45cb-abfa-6708fb8d590b-neutron-sriov-combined-ca-bundle\") pod \"40158c82-5595-45cb-abfa-6708fb8d590b\" (UID: \"40158c82-5595-45cb-abfa-6708fb8d590b\") " Jan 20 18:02:14 crc kubenswrapper[4558]: I0120 18:02:14.986104 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40158c82-5595-45cb-abfa-6708fb8d590b-neutron-ovn-combined-ca-bundle\") pod \"40158c82-5595-45cb-abfa-6708fb8d590b\" (UID: \"40158c82-5595-45cb-abfa-6708fb8d590b\") " Jan 20 18:02:14 crc kubenswrapper[4558]: I0120 18:02:14.986136 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/40158c82-5595-45cb-abfa-6708fb8d590b-inventory\") pod \"40158c82-5595-45cb-abfa-6708fb8d590b\" (UID: \"40158c82-5595-45cb-abfa-6708fb8d590b\") " Jan 20 18:02:14 crc kubenswrapper[4558]: I0120 18:02:14.986175 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40158c82-5595-45cb-abfa-6708fb8d590b-nova-combined-ca-bundle\") pod \"40158c82-5595-45cb-abfa-6708fb8d590b\" (UID: \"40158c82-5595-45cb-abfa-6708fb8d590b\") " Jan 20 18:02:14 crc kubenswrapper[4558]: I0120 18:02:14.986209 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40158c82-5595-45cb-abfa-6708fb8d590b-ovn-combined-ca-bundle\") pod \"40158c82-5595-45cb-abfa-6708fb8d590b\" (UID: \"40158c82-5595-45cb-abfa-6708fb8d590b\") " Jan 20 18:02:14 crc kubenswrapper[4558]: I0120 18:02:14.992520 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/40158c82-5595-45cb-abfa-6708fb8d590b-neutron-dhcp-combined-ca-bundle" (OuterVolumeSpecName: "neutron-dhcp-combined-ca-bundle") pod "40158c82-5595-45cb-abfa-6708fb8d590b" (UID: "40158c82-5595-45cb-abfa-6708fb8d590b"). InnerVolumeSpecName "neutron-dhcp-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:02:14 crc kubenswrapper[4558]: I0120 18:02:14.992581 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/40158c82-5595-45cb-abfa-6708fb8d590b-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "40158c82-5595-45cb-abfa-6708fb8d590b" (UID: "40158c82-5595-45cb-abfa-6708fb8d590b"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:02:14 crc kubenswrapper[4558]: I0120 18:02:14.992858 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/40158c82-5595-45cb-abfa-6708fb8d590b-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "40158c82-5595-45cb-abfa-6708fb8d590b" (UID: "40158c82-5595-45cb-abfa-6708fb8d590b"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:02:14 crc kubenswrapper[4558]: I0120 18:02:14.993042 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/40158c82-5595-45cb-abfa-6708fb8d590b-neutron-sriov-combined-ca-bundle" (OuterVolumeSpecName: "neutron-sriov-combined-ca-bundle") pod "40158c82-5595-45cb-abfa-6708fb8d590b" (UID: "40158c82-5595-45cb-abfa-6708fb8d590b"). InnerVolumeSpecName "neutron-sriov-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:02:14 crc kubenswrapper[4558]: I0120 18:02:14.993393 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/40158c82-5595-45cb-abfa-6708fb8d590b-neutron-ovn-combined-ca-bundle" (OuterVolumeSpecName: "neutron-ovn-combined-ca-bundle") pod "40158c82-5595-45cb-abfa-6708fb8d590b" (UID: "40158c82-5595-45cb-abfa-6708fb8d590b"). InnerVolumeSpecName "neutron-ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:02:14 crc kubenswrapper[4558]: I0120 18:02:14.994060 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/40158c82-5595-45cb-abfa-6708fb8d590b-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "40158c82-5595-45cb-abfa-6708fb8d590b" (UID: "40158c82-5595-45cb-abfa-6708fb8d590b"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:02:14 crc kubenswrapper[4558]: I0120 18:02:14.994148 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/40158c82-5595-45cb-abfa-6708fb8d590b-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "40158c82-5595-45cb-abfa-6708fb8d590b" (UID: "40158c82-5595-45cb-abfa-6708fb8d590b"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:02:14 crc kubenswrapper[4558]: I0120 18:02:14.994227 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/40158c82-5595-45cb-abfa-6708fb8d590b-custom-global-service-combined-ca-bundle" (OuterVolumeSpecName: "custom-global-service-combined-ca-bundle") pod "40158c82-5595-45cb-abfa-6708fb8d590b" (UID: "40158c82-5595-45cb-abfa-6708fb8d590b"). InnerVolumeSpecName "custom-global-service-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:02:14 crc kubenswrapper[4558]: I0120 18:02:14.994315 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/40158c82-5595-45cb-abfa-6708fb8d590b-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "40158c82-5595-45cb-abfa-6708fb8d590b" (UID: "40158c82-5595-45cb-abfa-6708fb8d590b"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:02:14 crc kubenswrapper[4558]: I0120 18:02:14.994836 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/40158c82-5595-45cb-abfa-6708fb8d590b-kube-api-access-v52fp" (OuterVolumeSpecName: "kube-api-access-v52fp") pod "40158c82-5595-45cb-abfa-6708fb8d590b" (UID: "40158c82-5595-45cb-abfa-6708fb8d590b"). InnerVolumeSpecName "kube-api-access-v52fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:02:15 crc kubenswrapper[4558]: I0120 18:02:15.008402 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/40158c82-5595-45cb-abfa-6708fb8d590b-ssh-key-edpm-compute-global" (OuterVolumeSpecName: "ssh-key-edpm-compute-global") pod "40158c82-5595-45cb-abfa-6708fb8d590b" (UID: "40158c82-5595-45cb-abfa-6708fb8d590b"). InnerVolumeSpecName "ssh-key-edpm-compute-global". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:02:15 crc kubenswrapper[4558]: I0120 18:02:15.008750 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/40158c82-5595-45cb-abfa-6708fb8d590b-inventory" (OuterVolumeSpecName: "inventory") pod "40158c82-5595-45cb-abfa-6708fb8d590b" (UID: "40158c82-5595-45cb-abfa-6708fb8d590b"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:02:15 crc kubenswrapper[4558]: I0120 18:02:15.088720 4558 reconciler_common.go:293] "Volume detached for volume \"custom-global-service-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40158c82-5595-45cb-abfa-6708fb8d590b-custom-global-service-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 18:02:15 crc kubenswrapper[4558]: I0120 18:02:15.088762 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v52fp\" (UniqueName: \"kubernetes.io/projected/40158c82-5595-45cb-abfa-6708fb8d590b-kube-api-access-v52fp\") on node \"crc\" DevicePath \"\"" Jan 20 18:02:15 crc kubenswrapper[4558]: I0120 18:02:15.088777 4558 reconciler_common.go:293] "Volume detached for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40158c82-5595-45cb-abfa-6708fb8d590b-neutron-sriov-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 18:02:15 crc kubenswrapper[4558]: I0120 18:02:15.088789 4558 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40158c82-5595-45cb-abfa-6708fb8d590b-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 18:02:15 crc kubenswrapper[4558]: I0120 18:02:15.088801 4558 reconciler_common.go:293] "Volume detached for volume \"neutron-ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40158c82-5595-45cb-abfa-6708fb8d590b-neutron-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 18:02:15 crc kubenswrapper[4558]: I0120 18:02:15.088814 4558 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/40158c82-5595-45cb-abfa-6708fb8d590b-inventory\") on node \"crc\" DevicePath \"\"" Jan 20 18:02:15 crc kubenswrapper[4558]: I0120 18:02:15.088826 4558 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40158c82-5595-45cb-abfa-6708fb8d590b-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 18:02:15 crc kubenswrapper[4558]: I0120 18:02:15.088838 4558 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40158c82-5595-45cb-abfa-6708fb8d590b-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 18:02:15 crc kubenswrapper[4558]: I0120 18:02:15.088847 4558 reconciler_common.go:293] "Volume detached for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/40158c82-5595-45cb-abfa-6708fb8d590b-ssh-key-edpm-compute-global\") on node \"crc\" DevicePath \"\"" Jan 20 18:02:15 crc kubenswrapper[4558]: I0120 18:02:15.088856 4558 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40158c82-5595-45cb-abfa-6708fb8d590b-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 18:02:15 crc kubenswrapper[4558]: I0120 18:02:15.088864 4558 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40158c82-5595-45cb-abfa-6708fb8d590b-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 18:02:15 crc kubenswrapper[4558]: I0120 18:02:15.088874 4558 reconciler_common.go:293] "Volume detached for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40158c82-5595-45cb-abfa-6708fb8d590b-neutron-dhcp-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 18:02:15 crc kubenswrapper[4558]: I0120 18:02:15.563671 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/install-certs-edpm-multinodeset-edpm-compute-global-76wj2" event={"ID":"40158c82-5595-45cb-abfa-6708fb8d590b","Type":"ContainerDied","Data":"79e807b09fdd2d0392de80d82b209ff1e7c21f41a5748c64d718c0e3f1bde3af"} Jan 20 18:02:15 crc kubenswrapper[4558]: I0120 18:02:15.563707 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/install-certs-edpm-multinodeset-edpm-compute-global-76wj2" Jan 20 18:02:15 crc kubenswrapper[4558]: I0120 18:02:15.563726 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="79e807b09fdd2d0392de80d82b209ff1e7c21f41a5748c64d718c0e3f1bde3af" Jan 20 18:02:15 crc kubenswrapper[4558]: I0120 18:02:15.612745 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ovn-edpm-multinodeset-edpm-compute-global-5sw7k"] Jan 20 18:02:15 crc kubenswrapper[4558]: E0120 18:02:15.613121 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="40158c82-5595-45cb-abfa-6708fb8d590b" containerName="install-certs-edpm-multinodeset-edpm-compute-global" Jan 20 18:02:15 crc kubenswrapper[4558]: I0120 18:02:15.613142 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="40158c82-5595-45cb-abfa-6708fb8d590b" containerName="install-certs-edpm-multinodeset-edpm-compute-global" Jan 20 18:02:15 crc kubenswrapper[4558]: I0120 18:02:15.613321 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="40158c82-5595-45cb-abfa-6708fb8d590b" containerName="install-certs-edpm-multinodeset-edpm-compute-global" Jan 20 18:02:15 crc kubenswrapper[4558]: I0120 18:02:15.613827 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-edpm-multinodeset-edpm-compute-global-5sw7k" Jan 20 18:02:15 crc kubenswrapper[4558]: I0120 18:02:15.616527 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"combined-ca-bundle" Jan 20 18:02:15 crc kubenswrapper[4558]: I0120 18:02:15.617673 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplane-ansible-ssh-private-key-secret" Jan 20 18:02:15 crc kubenswrapper[4558]: I0120 18:02:15.617881 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"ovncontroller-config" Jan 20 18:02:15 crc kubenswrapper[4558]: I0120 18:02:15.618564 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-aee-default-env" Jan 20 18:02:15 crc kubenswrapper[4558]: I0120 18:02:15.618749 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplanenodeset-edpm-compute-global" Jan 20 18:02:15 crc kubenswrapper[4558]: I0120 18:02:15.619263 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"edpm-compute-global-dockercfg-wkdxg" Jan 20 18:02:15 crc kubenswrapper[4558]: I0120 18:02:15.626441 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovn-edpm-multinodeset-edpm-compute-global-5sw7k"] Jan 20 18:02:15 crc kubenswrapper[4558]: I0120 18:02:15.698709 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k4csc\" (UniqueName: \"kubernetes.io/projected/9daabf6f-11b9-4e54-a514-9ef0c9aa8c1a-kube-api-access-k4csc\") pod \"ovn-edpm-multinodeset-edpm-compute-global-5sw7k\" (UID: \"9daabf6f-11b9-4e54-a514-9ef0c9aa8c1a\") " pod="openstack-kuttl-tests/ovn-edpm-multinodeset-edpm-compute-global-5sw7k" Jan 20 18:02:15 crc kubenswrapper[4558]: I0120 18:02:15.698770 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9daabf6f-11b9-4e54-a514-9ef0c9aa8c1a-ovn-combined-ca-bundle\") pod \"ovn-edpm-multinodeset-edpm-compute-global-5sw7k\" (UID: \"9daabf6f-11b9-4e54-a514-9ef0c9aa8c1a\") " pod="openstack-kuttl-tests/ovn-edpm-multinodeset-edpm-compute-global-5sw7k" Jan 20 18:02:15 crc kubenswrapper[4558]: I0120 18:02:15.698816 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/9daabf6f-11b9-4e54-a514-9ef0c9aa8c1a-ssh-key-edpm-compute-global\") pod \"ovn-edpm-multinodeset-edpm-compute-global-5sw7k\" (UID: \"9daabf6f-11b9-4e54-a514-9ef0c9aa8c1a\") " pod="openstack-kuttl-tests/ovn-edpm-multinodeset-edpm-compute-global-5sw7k" Jan 20 18:02:15 crc kubenswrapper[4558]: I0120 18:02:15.698908 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9daabf6f-11b9-4e54-a514-9ef0c9aa8c1a-inventory\") pod \"ovn-edpm-multinodeset-edpm-compute-global-5sw7k\" (UID: \"9daabf6f-11b9-4e54-a514-9ef0c9aa8c1a\") " pod="openstack-kuttl-tests/ovn-edpm-multinodeset-edpm-compute-global-5sw7k" Jan 20 18:02:15 crc kubenswrapper[4558]: I0120 18:02:15.699052 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/9daabf6f-11b9-4e54-a514-9ef0c9aa8c1a-ovncontroller-config-0\") pod \"ovn-edpm-multinodeset-edpm-compute-global-5sw7k\" (UID: \"9daabf6f-11b9-4e54-a514-9ef0c9aa8c1a\") " pod="openstack-kuttl-tests/ovn-edpm-multinodeset-edpm-compute-global-5sw7k" Jan 20 18:02:15 crc kubenswrapper[4558]: I0120 18:02:15.801160 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/9daabf6f-11b9-4e54-a514-9ef0c9aa8c1a-ovncontroller-config-0\") pod \"ovn-edpm-multinodeset-edpm-compute-global-5sw7k\" (UID: \"9daabf6f-11b9-4e54-a514-9ef0c9aa8c1a\") " pod="openstack-kuttl-tests/ovn-edpm-multinodeset-edpm-compute-global-5sw7k" Jan 20 18:02:15 crc kubenswrapper[4558]: I0120 18:02:15.801326 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k4csc\" (UniqueName: \"kubernetes.io/projected/9daabf6f-11b9-4e54-a514-9ef0c9aa8c1a-kube-api-access-k4csc\") pod \"ovn-edpm-multinodeset-edpm-compute-global-5sw7k\" (UID: \"9daabf6f-11b9-4e54-a514-9ef0c9aa8c1a\") " pod="openstack-kuttl-tests/ovn-edpm-multinodeset-edpm-compute-global-5sw7k" Jan 20 18:02:15 crc kubenswrapper[4558]: I0120 18:02:15.801377 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9daabf6f-11b9-4e54-a514-9ef0c9aa8c1a-ovn-combined-ca-bundle\") pod \"ovn-edpm-multinodeset-edpm-compute-global-5sw7k\" (UID: \"9daabf6f-11b9-4e54-a514-9ef0c9aa8c1a\") " pod="openstack-kuttl-tests/ovn-edpm-multinodeset-edpm-compute-global-5sw7k" Jan 20 18:02:15 crc kubenswrapper[4558]: I0120 18:02:15.801463 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/9daabf6f-11b9-4e54-a514-9ef0c9aa8c1a-ssh-key-edpm-compute-global\") pod \"ovn-edpm-multinodeset-edpm-compute-global-5sw7k\" (UID: \"9daabf6f-11b9-4e54-a514-9ef0c9aa8c1a\") " pod="openstack-kuttl-tests/ovn-edpm-multinodeset-edpm-compute-global-5sw7k" Jan 20 18:02:15 crc kubenswrapper[4558]: I0120 18:02:15.801498 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9daabf6f-11b9-4e54-a514-9ef0c9aa8c1a-inventory\") pod \"ovn-edpm-multinodeset-edpm-compute-global-5sw7k\" (UID: \"9daabf6f-11b9-4e54-a514-9ef0c9aa8c1a\") " pod="openstack-kuttl-tests/ovn-edpm-multinodeset-edpm-compute-global-5sw7k" Jan 20 18:02:15 crc kubenswrapper[4558]: I0120 18:02:15.802123 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/9daabf6f-11b9-4e54-a514-9ef0c9aa8c1a-ovncontroller-config-0\") pod \"ovn-edpm-multinodeset-edpm-compute-global-5sw7k\" (UID: \"9daabf6f-11b9-4e54-a514-9ef0c9aa8c1a\") " pod="openstack-kuttl-tests/ovn-edpm-multinodeset-edpm-compute-global-5sw7k" Jan 20 18:02:15 crc kubenswrapper[4558]: I0120 18:02:15.807667 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/9daabf6f-11b9-4e54-a514-9ef0c9aa8c1a-ssh-key-edpm-compute-global\") pod \"ovn-edpm-multinodeset-edpm-compute-global-5sw7k\" (UID: \"9daabf6f-11b9-4e54-a514-9ef0c9aa8c1a\") " pod="openstack-kuttl-tests/ovn-edpm-multinodeset-edpm-compute-global-5sw7k" Jan 20 18:02:15 crc kubenswrapper[4558]: I0120 18:02:15.807690 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9daabf6f-11b9-4e54-a514-9ef0c9aa8c1a-inventory\") pod \"ovn-edpm-multinodeset-edpm-compute-global-5sw7k\" (UID: \"9daabf6f-11b9-4e54-a514-9ef0c9aa8c1a\") " pod="openstack-kuttl-tests/ovn-edpm-multinodeset-edpm-compute-global-5sw7k" Jan 20 18:02:15 crc kubenswrapper[4558]: I0120 18:02:15.808202 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9daabf6f-11b9-4e54-a514-9ef0c9aa8c1a-ovn-combined-ca-bundle\") pod \"ovn-edpm-multinodeset-edpm-compute-global-5sw7k\" (UID: \"9daabf6f-11b9-4e54-a514-9ef0c9aa8c1a\") " pod="openstack-kuttl-tests/ovn-edpm-multinodeset-edpm-compute-global-5sw7k" Jan 20 18:02:15 crc kubenswrapper[4558]: I0120 18:02:15.816009 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k4csc\" (UniqueName: \"kubernetes.io/projected/9daabf6f-11b9-4e54-a514-9ef0c9aa8c1a-kube-api-access-k4csc\") pod \"ovn-edpm-multinodeset-edpm-compute-global-5sw7k\" (UID: \"9daabf6f-11b9-4e54-a514-9ef0c9aa8c1a\") " pod="openstack-kuttl-tests/ovn-edpm-multinodeset-edpm-compute-global-5sw7k" Jan 20 18:02:15 crc kubenswrapper[4558]: I0120 18:02:15.929735 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-edpm-multinodeset-edpm-compute-global-5sw7k" Jan 20 18:02:16 crc kubenswrapper[4558]: I0120 18:02:16.311149 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovn-edpm-multinodeset-edpm-compute-global-5sw7k"] Jan 20 18:02:16 crc kubenswrapper[4558]: I0120 18:02:16.576980 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-edpm-multinodeset-edpm-compute-global-5sw7k" event={"ID":"9daabf6f-11b9-4e54-a514-9ef0c9aa8c1a","Type":"ContainerStarted","Data":"3fce051ede8552724e9aa0a78fb6090f4933fef07cfc3c6d0d234564459617c1"} Jan 20 18:02:17 crc kubenswrapper[4558]: I0120 18:02:17.587779 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-edpm-multinodeset-edpm-compute-global-5sw7k" event={"ID":"9daabf6f-11b9-4e54-a514-9ef0c9aa8c1a","Type":"ContainerStarted","Data":"7e72c105d94fc259543c84539cc70f2de6cdb6e3c353a725bb352f82454a1602"} Jan 20 18:02:17 crc kubenswrapper[4558]: I0120 18:02:17.612383 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ovn-edpm-multinodeset-edpm-compute-global-5sw7k" podStartSLOduration=2.052329385 podStartE2EDuration="2.612359866s" podCreationTimestamp="2026-01-20 18:02:15 +0000 UTC" firstStartedPulling="2026-01-20 18:02:16.316767081 +0000 UTC m=+4830.077105048" lastFinishedPulling="2026-01-20 18:02:16.876797562 +0000 UTC m=+4830.637135529" observedRunningTime="2026-01-20 18:02:17.60165852 +0000 UTC m=+4831.361996488" watchObservedRunningTime="2026-01-20 18:02:17.612359866 +0000 UTC m=+4831.372697834" Jan 20 18:02:18 crc kubenswrapper[4558]: I0120 18:02:18.597931 4558 generic.go:334] "Generic (PLEG): container finished" podID="9daabf6f-11b9-4e54-a514-9ef0c9aa8c1a" containerID="7e72c105d94fc259543c84539cc70f2de6cdb6e3c353a725bb352f82454a1602" exitCode=0 Jan 20 18:02:18 crc kubenswrapper[4558]: I0120 18:02:18.597974 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-edpm-multinodeset-edpm-compute-global-5sw7k" event={"ID":"9daabf6f-11b9-4e54-a514-9ef0c9aa8c1a","Type":"ContainerDied","Data":"7e72c105d94fc259543c84539cc70f2de6cdb6e3c353a725bb352f82454a1602"} Jan 20 18:02:19 crc kubenswrapper[4558]: I0120 18:02:19.848267 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-edpm-multinodeset-edpm-compute-global-5sw7k" Jan 20 18:02:19 crc kubenswrapper[4558]: I0120 18:02:19.863615 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k4csc\" (UniqueName: \"kubernetes.io/projected/9daabf6f-11b9-4e54-a514-9ef0c9aa8c1a-kube-api-access-k4csc\") pod \"9daabf6f-11b9-4e54-a514-9ef0c9aa8c1a\" (UID: \"9daabf6f-11b9-4e54-a514-9ef0c9aa8c1a\") " Jan 20 18:02:19 crc kubenswrapper[4558]: I0120 18:02:19.869407 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9daabf6f-11b9-4e54-a514-9ef0c9aa8c1a-kube-api-access-k4csc" (OuterVolumeSpecName: "kube-api-access-k4csc") pod "9daabf6f-11b9-4e54-a514-9ef0c9aa8c1a" (UID: "9daabf6f-11b9-4e54-a514-9ef0c9aa8c1a"). InnerVolumeSpecName "kube-api-access-k4csc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:02:19 crc kubenswrapper[4558]: I0120 18:02:19.964707 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9daabf6f-11b9-4e54-a514-9ef0c9aa8c1a-inventory\") pod \"9daabf6f-11b9-4e54-a514-9ef0c9aa8c1a\" (UID: \"9daabf6f-11b9-4e54-a514-9ef0c9aa8c1a\") " Jan 20 18:02:19 crc kubenswrapper[4558]: I0120 18:02:19.964855 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9daabf6f-11b9-4e54-a514-9ef0c9aa8c1a-ovn-combined-ca-bundle\") pod \"9daabf6f-11b9-4e54-a514-9ef0c9aa8c1a\" (UID: \"9daabf6f-11b9-4e54-a514-9ef0c9aa8c1a\") " Jan 20 18:02:19 crc kubenswrapper[4558]: I0120 18:02:19.965037 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/9daabf6f-11b9-4e54-a514-9ef0c9aa8c1a-ssh-key-edpm-compute-global\") pod \"9daabf6f-11b9-4e54-a514-9ef0c9aa8c1a\" (UID: \"9daabf6f-11b9-4e54-a514-9ef0c9aa8c1a\") " Jan 20 18:02:19 crc kubenswrapper[4558]: I0120 18:02:19.965129 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/9daabf6f-11b9-4e54-a514-9ef0c9aa8c1a-ovncontroller-config-0\") pod \"9daabf6f-11b9-4e54-a514-9ef0c9aa8c1a\" (UID: \"9daabf6f-11b9-4e54-a514-9ef0c9aa8c1a\") " Jan 20 18:02:19 crc kubenswrapper[4558]: I0120 18:02:19.965487 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k4csc\" (UniqueName: \"kubernetes.io/projected/9daabf6f-11b9-4e54-a514-9ef0c9aa8c1a-kube-api-access-k4csc\") on node \"crc\" DevicePath \"\"" Jan 20 18:02:19 crc kubenswrapper[4558]: I0120 18:02:19.967810 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9daabf6f-11b9-4e54-a514-9ef0c9aa8c1a-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "9daabf6f-11b9-4e54-a514-9ef0c9aa8c1a" (UID: "9daabf6f-11b9-4e54-a514-9ef0c9aa8c1a"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:02:19 crc kubenswrapper[4558]: I0120 18:02:19.982069 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9daabf6f-11b9-4e54-a514-9ef0c9aa8c1a-inventory" (OuterVolumeSpecName: "inventory") pod "9daabf6f-11b9-4e54-a514-9ef0c9aa8c1a" (UID: "9daabf6f-11b9-4e54-a514-9ef0c9aa8c1a"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:02:19 crc kubenswrapper[4558]: I0120 18:02:19.983209 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9daabf6f-11b9-4e54-a514-9ef0c9aa8c1a-ovncontroller-config-0" (OuterVolumeSpecName: "ovncontroller-config-0") pod "9daabf6f-11b9-4e54-a514-9ef0c9aa8c1a" (UID: "9daabf6f-11b9-4e54-a514-9ef0c9aa8c1a"). InnerVolumeSpecName "ovncontroller-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:02:19 crc kubenswrapper[4558]: I0120 18:02:19.983836 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9daabf6f-11b9-4e54-a514-9ef0c9aa8c1a-ssh-key-edpm-compute-global" (OuterVolumeSpecName: "ssh-key-edpm-compute-global") pod "9daabf6f-11b9-4e54-a514-9ef0c9aa8c1a" (UID: "9daabf6f-11b9-4e54-a514-9ef0c9aa8c1a"). InnerVolumeSpecName "ssh-key-edpm-compute-global". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:02:20 crc kubenswrapper[4558]: I0120 18:02:20.066344 4558 reconciler_common.go:293] "Volume detached for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/9daabf6f-11b9-4e54-a514-9ef0c9aa8c1a-ssh-key-edpm-compute-global\") on node \"crc\" DevicePath \"\"" Jan 20 18:02:20 crc kubenswrapper[4558]: I0120 18:02:20.066376 4558 reconciler_common.go:293] "Volume detached for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/9daabf6f-11b9-4e54-a514-9ef0c9aa8c1a-ovncontroller-config-0\") on node \"crc\" DevicePath \"\"" Jan 20 18:02:20 crc kubenswrapper[4558]: I0120 18:02:20.066390 4558 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9daabf6f-11b9-4e54-a514-9ef0c9aa8c1a-inventory\") on node \"crc\" DevicePath \"\"" Jan 20 18:02:20 crc kubenswrapper[4558]: I0120 18:02:20.066403 4558 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9daabf6f-11b9-4e54-a514-9ef0c9aa8c1a-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 18:02:20 crc kubenswrapper[4558]: I0120 18:02:20.615797 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-edpm-multinodeset-edpm-compute-global-5sw7k" event={"ID":"9daabf6f-11b9-4e54-a514-9ef0c9aa8c1a","Type":"ContainerDied","Data":"3fce051ede8552724e9aa0a78fb6090f4933fef07cfc3c6d0d234564459617c1"} Jan 20 18:02:20 crc kubenswrapper[4558]: I0120 18:02:20.616044 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3fce051ede8552724e9aa0a78fb6090f4933fef07cfc3c6d0d234564459617c1" Jan 20 18:02:20 crc kubenswrapper[4558]: I0120 18:02:20.615837 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-edpm-multinodeset-edpm-compute-global-5sw7k" Jan 20 18:02:20 crc kubenswrapper[4558]: I0120 18:02:20.671942 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/neutron-metadata-edpm-multinodeset-edpm-compute-global-gvk2n"] Jan 20 18:02:20 crc kubenswrapper[4558]: E0120 18:02:20.672746 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9daabf6f-11b9-4e54-a514-9ef0c9aa8c1a" containerName="ovn-edpm-multinodeset-edpm-compute-global" Jan 20 18:02:20 crc kubenswrapper[4558]: I0120 18:02:20.672774 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="9daabf6f-11b9-4e54-a514-9ef0c9aa8c1a" containerName="ovn-edpm-multinodeset-edpm-compute-global" Jan 20 18:02:20 crc kubenswrapper[4558]: I0120 18:02:20.673073 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="9daabf6f-11b9-4e54-a514-9ef0c9aa8c1a" containerName="ovn-edpm-multinodeset-edpm-compute-global" Jan 20 18:02:20 crc kubenswrapper[4558]: I0120 18:02:20.673897 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-metadata-edpm-multinodeset-edpm-compute-global-gvk2n" Jan 20 18:02:20 crc kubenswrapper[4558]: I0120 18:02:20.675844 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-aee-default-env" Jan 20 18:02:20 crc kubenswrapper[4558]: I0120 18:02:20.675916 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"neutron-ovn-metadata-agent-neutron-config" Jan 20 18:02:20 crc kubenswrapper[4558]: I0120 18:02:20.676605 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplanenodeset-edpm-compute-global" Jan 20 18:02:20 crc kubenswrapper[4558]: I0120 18:02:20.678383 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"combined-ca-bundle" Jan 20 18:02:20 crc kubenswrapper[4558]: I0120 18:02:20.678401 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-metadata-neutron-config" Jan 20 18:02:20 crc kubenswrapper[4558]: I0120 18:02:20.679515 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"edpm-compute-global-dockercfg-wkdxg" Jan 20 18:02:20 crc kubenswrapper[4558]: I0120 18:02:20.679736 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplane-ansible-ssh-private-key-secret" Jan 20 18:02:20 crc kubenswrapper[4558]: I0120 18:02:20.687910 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-metadata-edpm-multinodeset-edpm-compute-global-gvk2n"] Jan 20 18:02:20 crc kubenswrapper[4558]: I0120 18:02:20.775209 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/133f8af1-1bea-41e4-8512-76fc9499aff6-ssh-key-edpm-compute-global\") pod \"neutron-metadata-edpm-multinodeset-edpm-compute-global-gvk2n\" (UID: \"133f8af1-1bea-41e4-8512-76fc9499aff6\") " pod="openstack-kuttl-tests/neutron-metadata-edpm-multinodeset-edpm-compute-global-gvk2n" Jan 20 18:02:20 crc kubenswrapper[4558]: I0120 18:02:20.775606 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cn9zq\" (UniqueName: \"kubernetes.io/projected/133f8af1-1bea-41e4-8512-76fc9499aff6-kube-api-access-cn9zq\") pod \"neutron-metadata-edpm-multinodeset-edpm-compute-global-gvk2n\" (UID: \"133f8af1-1bea-41e4-8512-76fc9499aff6\") " pod="openstack-kuttl-tests/neutron-metadata-edpm-multinodeset-edpm-compute-global-gvk2n" Jan 20 18:02:20 crc kubenswrapper[4558]: I0120 18:02:20.775644 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/133f8af1-1bea-41e4-8512-76fc9499aff6-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-multinodeset-edpm-compute-global-gvk2n\" (UID: \"133f8af1-1bea-41e4-8512-76fc9499aff6\") " pod="openstack-kuttl-tests/neutron-metadata-edpm-multinodeset-edpm-compute-global-gvk2n" Jan 20 18:02:20 crc kubenswrapper[4558]: I0120 18:02:20.775676 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/133f8af1-1bea-41e4-8512-76fc9499aff6-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-multinodeset-edpm-compute-global-gvk2n\" (UID: \"133f8af1-1bea-41e4-8512-76fc9499aff6\") " pod="openstack-kuttl-tests/neutron-metadata-edpm-multinodeset-edpm-compute-global-gvk2n" Jan 20 18:02:20 crc kubenswrapper[4558]: I0120 18:02:20.775764 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/133f8af1-1bea-41e4-8512-76fc9499aff6-inventory\") pod \"neutron-metadata-edpm-multinodeset-edpm-compute-global-gvk2n\" (UID: \"133f8af1-1bea-41e4-8512-76fc9499aff6\") " pod="openstack-kuttl-tests/neutron-metadata-edpm-multinodeset-edpm-compute-global-gvk2n" Jan 20 18:02:20 crc kubenswrapper[4558]: I0120 18:02:20.775841 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/133f8af1-1bea-41e4-8512-76fc9499aff6-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-multinodeset-edpm-compute-global-gvk2n\" (UID: \"133f8af1-1bea-41e4-8512-76fc9499aff6\") " pod="openstack-kuttl-tests/neutron-metadata-edpm-multinodeset-edpm-compute-global-gvk2n" Jan 20 18:02:20 crc kubenswrapper[4558]: I0120 18:02:20.776004 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-neutron-config-2\" (UniqueName: \"kubernetes.io/secret/133f8af1-1bea-41e4-8512-76fc9499aff6-nova-metadata-neutron-config-2\") pod \"neutron-metadata-edpm-multinodeset-edpm-compute-global-gvk2n\" (UID: \"133f8af1-1bea-41e4-8512-76fc9499aff6\") " pod="openstack-kuttl-tests/neutron-metadata-edpm-multinodeset-edpm-compute-global-gvk2n" Jan 20 18:02:20 crc kubenswrapper[4558]: I0120 18:02:20.776069 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-neutron-config-1\" (UniqueName: \"kubernetes.io/secret/133f8af1-1bea-41e4-8512-76fc9499aff6-nova-metadata-neutron-config-1\") pod \"neutron-metadata-edpm-multinodeset-edpm-compute-global-gvk2n\" (UID: \"133f8af1-1bea-41e4-8512-76fc9499aff6\") " pod="openstack-kuttl-tests/neutron-metadata-edpm-multinodeset-edpm-compute-global-gvk2n" Jan 20 18:02:20 crc kubenswrapper[4558]: I0120 18:02:20.877272 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-neutron-config-2\" (UniqueName: \"kubernetes.io/secret/133f8af1-1bea-41e4-8512-76fc9499aff6-nova-metadata-neutron-config-2\") pod \"neutron-metadata-edpm-multinodeset-edpm-compute-global-gvk2n\" (UID: \"133f8af1-1bea-41e4-8512-76fc9499aff6\") " pod="openstack-kuttl-tests/neutron-metadata-edpm-multinodeset-edpm-compute-global-gvk2n" Jan 20 18:02:20 crc kubenswrapper[4558]: I0120 18:02:20.877320 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-neutron-config-1\" (UniqueName: \"kubernetes.io/secret/133f8af1-1bea-41e4-8512-76fc9499aff6-nova-metadata-neutron-config-1\") pod \"neutron-metadata-edpm-multinodeset-edpm-compute-global-gvk2n\" (UID: \"133f8af1-1bea-41e4-8512-76fc9499aff6\") " pod="openstack-kuttl-tests/neutron-metadata-edpm-multinodeset-edpm-compute-global-gvk2n" Jan 20 18:02:20 crc kubenswrapper[4558]: I0120 18:02:20.877348 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/133f8af1-1bea-41e4-8512-76fc9499aff6-ssh-key-edpm-compute-global\") pod \"neutron-metadata-edpm-multinodeset-edpm-compute-global-gvk2n\" (UID: \"133f8af1-1bea-41e4-8512-76fc9499aff6\") " pod="openstack-kuttl-tests/neutron-metadata-edpm-multinodeset-edpm-compute-global-gvk2n" Jan 20 18:02:20 crc kubenswrapper[4558]: I0120 18:02:20.877379 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cn9zq\" (UniqueName: \"kubernetes.io/projected/133f8af1-1bea-41e4-8512-76fc9499aff6-kube-api-access-cn9zq\") pod \"neutron-metadata-edpm-multinodeset-edpm-compute-global-gvk2n\" (UID: \"133f8af1-1bea-41e4-8512-76fc9499aff6\") " pod="openstack-kuttl-tests/neutron-metadata-edpm-multinodeset-edpm-compute-global-gvk2n" Jan 20 18:02:20 crc kubenswrapper[4558]: I0120 18:02:20.877409 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/133f8af1-1bea-41e4-8512-76fc9499aff6-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-multinodeset-edpm-compute-global-gvk2n\" (UID: \"133f8af1-1bea-41e4-8512-76fc9499aff6\") " pod="openstack-kuttl-tests/neutron-metadata-edpm-multinodeset-edpm-compute-global-gvk2n" Jan 20 18:02:20 crc kubenswrapper[4558]: I0120 18:02:20.877440 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/133f8af1-1bea-41e4-8512-76fc9499aff6-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-multinodeset-edpm-compute-global-gvk2n\" (UID: \"133f8af1-1bea-41e4-8512-76fc9499aff6\") " pod="openstack-kuttl-tests/neutron-metadata-edpm-multinodeset-edpm-compute-global-gvk2n" Jan 20 18:02:20 crc kubenswrapper[4558]: I0120 18:02:20.877471 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/133f8af1-1bea-41e4-8512-76fc9499aff6-inventory\") pod \"neutron-metadata-edpm-multinodeset-edpm-compute-global-gvk2n\" (UID: \"133f8af1-1bea-41e4-8512-76fc9499aff6\") " pod="openstack-kuttl-tests/neutron-metadata-edpm-multinodeset-edpm-compute-global-gvk2n" Jan 20 18:02:20 crc kubenswrapper[4558]: I0120 18:02:20.877525 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/133f8af1-1bea-41e4-8512-76fc9499aff6-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-multinodeset-edpm-compute-global-gvk2n\" (UID: \"133f8af1-1bea-41e4-8512-76fc9499aff6\") " pod="openstack-kuttl-tests/neutron-metadata-edpm-multinodeset-edpm-compute-global-gvk2n" Jan 20 18:02:20 crc kubenswrapper[4558]: I0120 18:02:20.882539 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-neutron-config-2\" (UniqueName: \"kubernetes.io/secret/133f8af1-1bea-41e4-8512-76fc9499aff6-nova-metadata-neutron-config-2\") pod \"neutron-metadata-edpm-multinodeset-edpm-compute-global-gvk2n\" (UID: \"133f8af1-1bea-41e4-8512-76fc9499aff6\") " pod="openstack-kuttl-tests/neutron-metadata-edpm-multinodeset-edpm-compute-global-gvk2n" Jan 20 18:02:20 crc kubenswrapper[4558]: I0120 18:02:20.882560 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/133f8af1-1bea-41e4-8512-76fc9499aff6-inventory\") pod \"neutron-metadata-edpm-multinodeset-edpm-compute-global-gvk2n\" (UID: \"133f8af1-1bea-41e4-8512-76fc9499aff6\") " pod="openstack-kuttl-tests/neutron-metadata-edpm-multinodeset-edpm-compute-global-gvk2n" Jan 20 18:02:20 crc kubenswrapper[4558]: I0120 18:02:20.882639 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/133f8af1-1bea-41e4-8512-76fc9499aff6-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-multinodeset-edpm-compute-global-gvk2n\" (UID: \"133f8af1-1bea-41e4-8512-76fc9499aff6\") " pod="openstack-kuttl-tests/neutron-metadata-edpm-multinodeset-edpm-compute-global-gvk2n" Jan 20 18:02:20 crc kubenswrapper[4558]: I0120 18:02:20.883291 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-neutron-config-1\" (UniqueName: \"kubernetes.io/secret/133f8af1-1bea-41e4-8512-76fc9499aff6-nova-metadata-neutron-config-1\") pod \"neutron-metadata-edpm-multinodeset-edpm-compute-global-gvk2n\" (UID: \"133f8af1-1bea-41e4-8512-76fc9499aff6\") " pod="openstack-kuttl-tests/neutron-metadata-edpm-multinodeset-edpm-compute-global-gvk2n" Jan 20 18:02:20 crc kubenswrapper[4558]: I0120 18:02:20.883389 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/133f8af1-1bea-41e4-8512-76fc9499aff6-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-multinodeset-edpm-compute-global-gvk2n\" (UID: \"133f8af1-1bea-41e4-8512-76fc9499aff6\") " pod="openstack-kuttl-tests/neutron-metadata-edpm-multinodeset-edpm-compute-global-gvk2n" Jan 20 18:02:20 crc kubenswrapper[4558]: I0120 18:02:20.883505 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/133f8af1-1bea-41e4-8512-76fc9499aff6-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-multinodeset-edpm-compute-global-gvk2n\" (UID: \"133f8af1-1bea-41e4-8512-76fc9499aff6\") " pod="openstack-kuttl-tests/neutron-metadata-edpm-multinodeset-edpm-compute-global-gvk2n" Jan 20 18:02:20 crc kubenswrapper[4558]: I0120 18:02:20.883715 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/133f8af1-1bea-41e4-8512-76fc9499aff6-ssh-key-edpm-compute-global\") pod \"neutron-metadata-edpm-multinodeset-edpm-compute-global-gvk2n\" (UID: \"133f8af1-1bea-41e4-8512-76fc9499aff6\") " pod="openstack-kuttl-tests/neutron-metadata-edpm-multinodeset-edpm-compute-global-gvk2n" Jan 20 18:02:20 crc kubenswrapper[4558]: I0120 18:02:20.893475 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cn9zq\" (UniqueName: \"kubernetes.io/projected/133f8af1-1bea-41e4-8512-76fc9499aff6-kube-api-access-cn9zq\") pod \"neutron-metadata-edpm-multinodeset-edpm-compute-global-gvk2n\" (UID: \"133f8af1-1bea-41e4-8512-76fc9499aff6\") " pod="openstack-kuttl-tests/neutron-metadata-edpm-multinodeset-edpm-compute-global-gvk2n" Jan 20 18:02:20 crc kubenswrapper[4558]: I0120 18:02:20.989470 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-metadata-edpm-multinodeset-edpm-compute-global-gvk2n" Jan 20 18:02:21 crc kubenswrapper[4558]: I0120 18:02:21.390910 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-metadata-edpm-multinodeset-edpm-compute-global-gvk2n"] Jan 20 18:02:21 crc kubenswrapper[4558]: W0120 18:02:21.392319 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod133f8af1_1bea_41e4_8512_76fc9499aff6.slice/crio-39b4cc0a7e4298e4b7e0916ace9aa52adc0a4553fb5d43914fccaa1827d817d8 WatchSource:0}: Error finding container 39b4cc0a7e4298e4b7e0916ace9aa52adc0a4553fb5d43914fccaa1827d817d8: Status 404 returned error can't find the container with id 39b4cc0a7e4298e4b7e0916ace9aa52adc0a4553fb5d43914fccaa1827d817d8 Jan 20 18:02:21 crc kubenswrapper[4558]: I0120 18:02:21.628254 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-metadata-edpm-multinodeset-edpm-compute-global-gvk2n" event={"ID":"133f8af1-1bea-41e4-8512-76fc9499aff6","Type":"ContainerStarted","Data":"39b4cc0a7e4298e4b7e0916ace9aa52adc0a4553fb5d43914fccaa1827d817d8"} Jan 20 18:02:22 crc kubenswrapper[4558]: I0120 18:02:22.637489 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-metadata-edpm-multinodeset-edpm-compute-global-gvk2n" event={"ID":"133f8af1-1bea-41e4-8512-76fc9499aff6","Type":"ContainerStarted","Data":"47e77ca48159e1b5a6cd274fb0aa21214e2c821bd50bbe97a5ed961159dd768e"} Jan 20 18:02:22 crc kubenswrapper[4558]: I0120 18:02:22.654437 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/neutron-metadata-edpm-multinodeset-edpm-compute-global-gvk2n" podStartSLOduration=2.067953777 podStartE2EDuration="2.654422334s" podCreationTimestamp="2026-01-20 18:02:20 +0000 UTC" firstStartedPulling="2026-01-20 18:02:21.395070221 +0000 UTC m=+4835.155408189" lastFinishedPulling="2026-01-20 18:02:21.981538779 +0000 UTC m=+4835.741876746" observedRunningTime="2026-01-20 18:02:22.652812188 +0000 UTC m=+4836.413150155" watchObservedRunningTime="2026-01-20 18:02:22.654422334 +0000 UTC m=+4836.414760302" Jan 20 18:02:23 crc kubenswrapper[4558]: I0120 18:02:23.649026 4558 generic.go:334] "Generic (PLEG): container finished" podID="133f8af1-1bea-41e4-8512-76fc9499aff6" containerID="47e77ca48159e1b5a6cd274fb0aa21214e2c821bd50bbe97a5ed961159dd768e" exitCode=0 Jan 20 18:02:23 crc kubenswrapper[4558]: I0120 18:02:23.649081 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-metadata-edpm-multinodeset-edpm-compute-global-gvk2n" event={"ID":"133f8af1-1bea-41e4-8512-76fc9499aff6","Type":"ContainerDied","Data":"47e77ca48159e1b5a6cd274fb0aa21214e2c821bd50bbe97a5ed961159dd768e"} Jan 20 18:02:24 crc kubenswrapper[4558]: I0120 18:02:24.873981 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-metadata-edpm-multinodeset-edpm-compute-global-gvk2n" Jan 20 18:02:25 crc kubenswrapper[4558]: I0120 18:02:25.038620 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/133f8af1-1bea-41e4-8512-76fc9499aff6-inventory\") pod \"133f8af1-1bea-41e4-8512-76fc9499aff6\" (UID: \"133f8af1-1bea-41e4-8512-76fc9499aff6\") " Jan 20 18:02:25 crc kubenswrapper[4558]: I0120 18:02:25.038693 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/133f8af1-1bea-41e4-8512-76fc9499aff6-nova-metadata-neutron-config-0\") pod \"133f8af1-1bea-41e4-8512-76fc9499aff6\" (UID: \"133f8af1-1bea-41e4-8512-76fc9499aff6\") " Jan 20 18:02:25 crc kubenswrapper[4558]: I0120 18:02:25.038724 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-neutron-config-2\" (UniqueName: \"kubernetes.io/secret/133f8af1-1bea-41e4-8512-76fc9499aff6-nova-metadata-neutron-config-2\") pod \"133f8af1-1bea-41e4-8512-76fc9499aff6\" (UID: \"133f8af1-1bea-41e4-8512-76fc9499aff6\") " Jan 20 18:02:25 crc kubenswrapper[4558]: I0120 18:02:25.038767 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/133f8af1-1bea-41e4-8512-76fc9499aff6-neutron-ovn-metadata-agent-neutron-config-0\") pod \"133f8af1-1bea-41e4-8512-76fc9499aff6\" (UID: \"133f8af1-1bea-41e4-8512-76fc9499aff6\") " Jan 20 18:02:25 crc kubenswrapper[4558]: I0120 18:02:25.038867 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-neutron-config-1\" (UniqueName: \"kubernetes.io/secret/133f8af1-1bea-41e4-8512-76fc9499aff6-nova-metadata-neutron-config-1\") pod \"133f8af1-1bea-41e4-8512-76fc9499aff6\" (UID: \"133f8af1-1bea-41e4-8512-76fc9499aff6\") " Jan 20 18:02:25 crc kubenswrapper[4558]: I0120 18:02:25.038929 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/133f8af1-1bea-41e4-8512-76fc9499aff6-neutron-metadata-combined-ca-bundle\") pod \"133f8af1-1bea-41e4-8512-76fc9499aff6\" (UID: \"133f8af1-1bea-41e4-8512-76fc9499aff6\") " Jan 20 18:02:25 crc kubenswrapper[4558]: I0120 18:02:25.038957 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/133f8af1-1bea-41e4-8512-76fc9499aff6-ssh-key-edpm-compute-global\") pod \"133f8af1-1bea-41e4-8512-76fc9499aff6\" (UID: \"133f8af1-1bea-41e4-8512-76fc9499aff6\") " Jan 20 18:02:25 crc kubenswrapper[4558]: I0120 18:02:25.038986 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cn9zq\" (UniqueName: \"kubernetes.io/projected/133f8af1-1bea-41e4-8512-76fc9499aff6-kube-api-access-cn9zq\") pod \"133f8af1-1bea-41e4-8512-76fc9499aff6\" (UID: \"133f8af1-1bea-41e4-8512-76fc9499aff6\") " Jan 20 18:02:25 crc kubenswrapper[4558]: I0120 18:02:25.045202 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/133f8af1-1bea-41e4-8512-76fc9499aff6-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "133f8af1-1bea-41e4-8512-76fc9499aff6" (UID: "133f8af1-1bea-41e4-8512-76fc9499aff6"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:02:25 crc kubenswrapper[4558]: I0120 18:02:25.049512 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/133f8af1-1bea-41e4-8512-76fc9499aff6-kube-api-access-cn9zq" (OuterVolumeSpecName: "kube-api-access-cn9zq") pod "133f8af1-1bea-41e4-8512-76fc9499aff6" (UID: "133f8af1-1bea-41e4-8512-76fc9499aff6"). InnerVolumeSpecName "kube-api-access-cn9zq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:02:25 crc kubenswrapper[4558]: I0120 18:02:25.062129 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/133f8af1-1bea-41e4-8512-76fc9499aff6-nova-metadata-neutron-config-1" (OuterVolumeSpecName: "nova-metadata-neutron-config-1") pod "133f8af1-1bea-41e4-8512-76fc9499aff6" (UID: "133f8af1-1bea-41e4-8512-76fc9499aff6"). InnerVolumeSpecName "nova-metadata-neutron-config-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:02:25 crc kubenswrapper[4558]: I0120 18:02:25.062188 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/133f8af1-1bea-41e4-8512-76fc9499aff6-neutron-ovn-metadata-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-ovn-metadata-agent-neutron-config-0") pod "133f8af1-1bea-41e4-8512-76fc9499aff6" (UID: "133f8af1-1bea-41e4-8512-76fc9499aff6"). InnerVolumeSpecName "neutron-ovn-metadata-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:02:25 crc kubenswrapper[4558]: I0120 18:02:25.062230 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/133f8af1-1bea-41e4-8512-76fc9499aff6-nova-metadata-neutron-config-0" (OuterVolumeSpecName: "nova-metadata-neutron-config-0") pod "133f8af1-1bea-41e4-8512-76fc9499aff6" (UID: "133f8af1-1bea-41e4-8512-76fc9499aff6"). InnerVolumeSpecName "nova-metadata-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:02:25 crc kubenswrapper[4558]: I0120 18:02:25.062770 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/133f8af1-1bea-41e4-8512-76fc9499aff6-inventory" (OuterVolumeSpecName: "inventory") pod "133f8af1-1bea-41e4-8512-76fc9499aff6" (UID: "133f8af1-1bea-41e4-8512-76fc9499aff6"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:02:25 crc kubenswrapper[4558]: I0120 18:02:25.064329 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/133f8af1-1bea-41e4-8512-76fc9499aff6-ssh-key-edpm-compute-global" (OuterVolumeSpecName: "ssh-key-edpm-compute-global") pod "133f8af1-1bea-41e4-8512-76fc9499aff6" (UID: "133f8af1-1bea-41e4-8512-76fc9499aff6"). InnerVolumeSpecName "ssh-key-edpm-compute-global". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:02:25 crc kubenswrapper[4558]: I0120 18:02:25.064417 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/133f8af1-1bea-41e4-8512-76fc9499aff6-nova-metadata-neutron-config-2" (OuterVolumeSpecName: "nova-metadata-neutron-config-2") pod "133f8af1-1bea-41e4-8512-76fc9499aff6" (UID: "133f8af1-1bea-41e4-8512-76fc9499aff6"). InnerVolumeSpecName "nova-metadata-neutron-config-2". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:02:25 crc kubenswrapper[4558]: I0120 18:02:25.141411 4558 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/133f8af1-1bea-41e4-8512-76fc9499aff6-nova-metadata-neutron-config-0\") on node \"crc\" DevicePath \"\"" Jan 20 18:02:25 crc kubenswrapper[4558]: I0120 18:02:25.141447 4558 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-neutron-config-2\" (UniqueName: \"kubernetes.io/secret/133f8af1-1bea-41e4-8512-76fc9499aff6-nova-metadata-neutron-config-2\") on node \"crc\" DevicePath \"\"" Jan 20 18:02:25 crc kubenswrapper[4558]: I0120 18:02:25.141461 4558 reconciler_common.go:293] "Volume detached for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/133f8af1-1bea-41e4-8512-76fc9499aff6-neutron-ovn-metadata-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Jan 20 18:02:25 crc kubenswrapper[4558]: I0120 18:02:25.141476 4558 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-neutron-config-1\" (UniqueName: \"kubernetes.io/secret/133f8af1-1bea-41e4-8512-76fc9499aff6-nova-metadata-neutron-config-1\") on node \"crc\" DevicePath \"\"" Jan 20 18:02:25 crc kubenswrapper[4558]: I0120 18:02:25.141488 4558 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/133f8af1-1bea-41e4-8512-76fc9499aff6-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 18:02:25 crc kubenswrapper[4558]: I0120 18:02:25.141501 4558 reconciler_common.go:293] "Volume detached for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/133f8af1-1bea-41e4-8512-76fc9499aff6-ssh-key-edpm-compute-global\") on node \"crc\" DevicePath \"\"" Jan 20 18:02:25 crc kubenswrapper[4558]: I0120 18:02:25.141516 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cn9zq\" (UniqueName: \"kubernetes.io/projected/133f8af1-1bea-41e4-8512-76fc9499aff6-kube-api-access-cn9zq\") on node \"crc\" DevicePath \"\"" Jan 20 18:02:25 crc kubenswrapper[4558]: I0120 18:02:25.141527 4558 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/133f8af1-1bea-41e4-8512-76fc9499aff6-inventory\") on node \"crc\" DevicePath \"\"" Jan 20 18:02:25 crc kubenswrapper[4558]: I0120 18:02:25.670258 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-metadata-edpm-multinodeset-edpm-compute-global-gvk2n" event={"ID":"133f8af1-1bea-41e4-8512-76fc9499aff6","Type":"ContainerDied","Data":"39b4cc0a7e4298e4b7e0916ace9aa52adc0a4553fb5d43914fccaa1827d817d8"} Jan 20 18:02:25 crc kubenswrapper[4558]: I0120 18:02:25.670306 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="39b4cc0a7e4298e4b7e0916ace9aa52adc0a4553fb5d43914fccaa1827d817d8" Jan 20 18:02:25 crc kubenswrapper[4558]: I0120 18:02:25.670339 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-metadata-edpm-multinodeset-edpm-compute-global-gvk2n" Jan 20 18:02:25 crc kubenswrapper[4558]: I0120 18:02:25.736267 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/neutron-ovn-edpm-multinodeset-edpm-compute-global-9gwtw"] Jan 20 18:02:25 crc kubenswrapper[4558]: E0120 18:02:25.736618 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="133f8af1-1bea-41e4-8512-76fc9499aff6" containerName="neutron-metadata-edpm-multinodeset-edpm-compute-global" Jan 20 18:02:25 crc kubenswrapper[4558]: I0120 18:02:25.736636 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="133f8af1-1bea-41e4-8512-76fc9499aff6" containerName="neutron-metadata-edpm-multinodeset-edpm-compute-global" Jan 20 18:02:25 crc kubenswrapper[4558]: I0120 18:02:25.736774 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="133f8af1-1bea-41e4-8512-76fc9499aff6" containerName="neutron-metadata-edpm-multinodeset-edpm-compute-global" Jan 20 18:02:25 crc kubenswrapper[4558]: I0120 18:02:25.737250 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-ovn-edpm-multinodeset-edpm-compute-global-9gwtw" Jan 20 18:02:25 crc kubenswrapper[4558]: I0120 18:02:25.738578 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"combined-ca-bundle" Jan 20 18:02:25 crc kubenswrapper[4558]: I0120 18:02:25.739053 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplanenodeset-edpm-compute-global" Jan 20 18:02:25 crc kubenswrapper[4558]: I0120 18:02:25.739431 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"neutron-ovn-agent-neutron-config" Jan 20 18:02:25 crc kubenswrapper[4558]: I0120 18:02:25.739527 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-aee-default-env" Jan 20 18:02:25 crc kubenswrapper[4558]: I0120 18:02:25.740620 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplane-ansible-ssh-private-key-secret" Jan 20 18:02:25 crc kubenswrapper[4558]: I0120 18:02:25.740903 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"edpm-compute-global-dockercfg-wkdxg" Jan 20 18:02:25 crc kubenswrapper[4558]: I0120 18:02:25.751842 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-ovn-edpm-multinodeset-edpm-compute-global-9gwtw"] Jan 20 18:02:25 crc kubenswrapper[4558]: I0120 18:02:25.852668 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2cd51553-9e13-40db-88e9-3011d212d5e6-neutron-ovn-combined-ca-bundle\") pod \"neutron-ovn-edpm-multinodeset-edpm-compute-global-9gwtw\" (UID: \"2cd51553-9e13-40db-88e9-3011d212d5e6\") " pod="openstack-kuttl-tests/neutron-ovn-edpm-multinodeset-edpm-compute-global-9gwtw" Jan 20 18:02:25 crc kubenswrapper[4558]: I0120 18:02:25.852825 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cnlhr\" (UniqueName: \"kubernetes.io/projected/2cd51553-9e13-40db-88e9-3011d212d5e6-kube-api-access-cnlhr\") pod \"neutron-ovn-edpm-multinodeset-edpm-compute-global-9gwtw\" (UID: \"2cd51553-9e13-40db-88e9-3011d212d5e6\") " pod="openstack-kuttl-tests/neutron-ovn-edpm-multinodeset-edpm-compute-global-9gwtw" Jan 20 18:02:25 crc kubenswrapper[4558]: I0120 18:02:25.852932 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2cd51553-9e13-40db-88e9-3011d212d5e6-inventory\") pod \"neutron-ovn-edpm-multinodeset-edpm-compute-global-9gwtw\" (UID: \"2cd51553-9e13-40db-88e9-3011d212d5e6\") " pod="openstack-kuttl-tests/neutron-ovn-edpm-multinodeset-edpm-compute-global-9gwtw" Jan 20 18:02:25 crc kubenswrapper[4558]: I0120 18:02:25.852988 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-ovn-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/2cd51553-9e13-40db-88e9-3011d212d5e6-neutron-ovn-agent-neutron-config-0\") pod \"neutron-ovn-edpm-multinodeset-edpm-compute-global-9gwtw\" (UID: \"2cd51553-9e13-40db-88e9-3011d212d5e6\") " pod="openstack-kuttl-tests/neutron-ovn-edpm-multinodeset-edpm-compute-global-9gwtw" Jan 20 18:02:25 crc kubenswrapper[4558]: I0120 18:02:25.853015 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/2cd51553-9e13-40db-88e9-3011d212d5e6-ssh-key-edpm-compute-global\") pod \"neutron-ovn-edpm-multinodeset-edpm-compute-global-9gwtw\" (UID: \"2cd51553-9e13-40db-88e9-3011d212d5e6\") " pod="openstack-kuttl-tests/neutron-ovn-edpm-multinodeset-edpm-compute-global-9gwtw" Jan 20 18:02:25 crc kubenswrapper[4558]: I0120 18:02:25.955312 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cnlhr\" (UniqueName: \"kubernetes.io/projected/2cd51553-9e13-40db-88e9-3011d212d5e6-kube-api-access-cnlhr\") pod \"neutron-ovn-edpm-multinodeset-edpm-compute-global-9gwtw\" (UID: \"2cd51553-9e13-40db-88e9-3011d212d5e6\") " pod="openstack-kuttl-tests/neutron-ovn-edpm-multinodeset-edpm-compute-global-9gwtw" Jan 20 18:02:25 crc kubenswrapper[4558]: I0120 18:02:25.955381 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2cd51553-9e13-40db-88e9-3011d212d5e6-inventory\") pod \"neutron-ovn-edpm-multinodeset-edpm-compute-global-9gwtw\" (UID: \"2cd51553-9e13-40db-88e9-3011d212d5e6\") " pod="openstack-kuttl-tests/neutron-ovn-edpm-multinodeset-edpm-compute-global-9gwtw" Jan 20 18:02:25 crc kubenswrapper[4558]: I0120 18:02:25.955418 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-ovn-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/2cd51553-9e13-40db-88e9-3011d212d5e6-neutron-ovn-agent-neutron-config-0\") pod \"neutron-ovn-edpm-multinodeset-edpm-compute-global-9gwtw\" (UID: \"2cd51553-9e13-40db-88e9-3011d212d5e6\") " pod="openstack-kuttl-tests/neutron-ovn-edpm-multinodeset-edpm-compute-global-9gwtw" Jan 20 18:02:25 crc kubenswrapper[4558]: I0120 18:02:25.955447 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/2cd51553-9e13-40db-88e9-3011d212d5e6-ssh-key-edpm-compute-global\") pod \"neutron-ovn-edpm-multinodeset-edpm-compute-global-9gwtw\" (UID: \"2cd51553-9e13-40db-88e9-3011d212d5e6\") " pod="openstack-kuttl-tests/neutron-ovn-edpm-multinodeset-edpm-compute-global-9gwtw" Jan 20 18:02:25 crc kubenswrapper[4558]: I0120 18:02:25.955738 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2cd51553-9e13-40db-88e9-3011d212d5e6-neutron-ovn-combined-ca-bundle\") pod \"neutron-ovn-edpm-multinodeset-edpm-compute-global-9gwtw\" (UID: \"2cd51553-9e13-40db-88e9-3011d212d5e6\") " pod="openstack-kuttl-tests/neutron-ovn-edpm-multinodeset-edpm-compute-global-9gwtw" Jan 20 18:02:25 crc kubenswrapper[4558]: I0120 18:02:25.960378 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-ovn-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/2cd51553-9e13-40db-88e9-3011d212d5e6-neutron-ovn-agent-neutron-config-0\") pod \"neutron-ovn-edpm-multinodeset-edpm-compute-global-9gwtw\" (UID: \"2cd51553-9e13-40db-88e9-3011d212d5e6\") " pod="openstack-kuttl-tests/neutron-ovn-edpm-multinodeset-edpm-compute-global-9gwtw" Jan 20 18:02:25 crc kubenswrapper[4558]: I0120 18:02:25.960881 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/2cd51553-9e13-40db-88e9-3011d212d5e6-ssh-key-edpm-compute-global\") pod \"neutron-ovn-edpm-multinodeset-edpm-compute-global-9gwtw\" (UID: \"2cd51553-9e13-40db-88e9-3011d212d5e6\") " pod="openstack-kuttl-tests/neutron-ovn-edpm-multinodeset-edpm-compute-global-9gwtw" Jan 20 18:02:25 crc kubenswrapper[4558]: I0120 18:02:25.961002 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2cd51553-9e13-40db-88e9-3011d212d5e6-neutron-ovn-combined-ca-bundle\") pod \"neutron-ovn-edpm-multinodeset-edpm-compute-global-9gwtw\" (UID: \"2cd51553-9e13-40db-88e9-3011d212d5e6\") " pod="openstack-kuttl-tests/neutron-ovn-edpm-multinodeset-edpm-compute-global-9gwtw" Jan 20 18:02:25 crc kubenswrapper[4558]: I0120 18:02:25.961250 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2cd51553-9e13-40db-88e9-3011d212d5e6-inventory\") pod \"neutron-ovn-edpm-multinodeset-edpm-compute-global-9gwtw\" (UID: \"2cd51553-9e13-40db-88e9-3011d212d5e6\") " pod="openstack-kuttl-tests/neutron-ovn-edpm-multinodeset-edpm-compute-global-9gwtw" Jan 20 18:02:25 crc kubenswrapper[4558]: I0120 18:02:25.970323 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cnlhr\" (UniqueName: \"kubernetes.io/projected/2cd51553-9e13-40db-88e9-3011d212d5e6-kube-api-access-cnlhr\") pod \"neutron-ovn-edpm-multinodeset-edpm-compute-global-9gwtw\" (UID: \"2cd51553-9e13-40db-88e9-3011d212d5e6\") " pod="openstack-kuttl-tests/neutron-ovn-edpm-multinodeset-edpm-compute-global-9gwtw" Jan 20 18:02:26 crc kubenswrapper[4558]: I0120 18:02:26.049629 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-ovn-edpm-multinodeset-edpm-compute-global-9gwtw" Jan 20 18:02:26 crc kubenswrapper[4558]: W0120 18:02:26.435998 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2cd51553_9e13_40db_88e9_3011d212d5e6.slice/crio-169e70bf0372a99e110c629f1c926a4795996b8b688414d1c332b29caa2b7c1c WatchSource:0}: Error finding container 169e70bf0372a99e110c629f1c926a4795996b8b688414d1c332b29caa2b7c1c: Status 404 returned error can't find the container with id 169e70bf0372a99e110c629f1c926a4795996b8b688414d1c332b29caa2b7c1c Jan 20 18:02:26 crc kubenswrapper[4558]: I0120 18:02:26.436592 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-ovn-edpm-multinodeset-edpm-compute-global-9gwtw"] Jan 20 18:02:26 crc kubenswrapper[4558]: I0120 18:02:26.680024 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-ovn-edpm-multinodeset-edpm-compute-global-9gwtw" event={"ID":"2cd51553-9e13-40db-88e9-3011d212d5e6","Type":"ContainerStarted","Data":"169e70bf0372a99e110c629f1c926a4795996b8b688414d1c332b29caa2b7c1c"} Jan 20 18:02:27 crc kubenswrapper[4558]: I0120 18:02:27.330259 4558 patch_prober.go:28] interesting pod/machine-config-daemon-2vr4r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 20 18:02:27 crc kubenswrapper[4558]: I0120 18:02:27.331430 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 20 18:02:27 crc kubenswrapper[4558]: I0120 18:02:27.692872 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-ovn-edpm-multinodeset-edpm-compute-global-9gwtw" event={"ID":"2cd51553-9e13-40db-88e9-3011d212d5e6","Type":"ContainerStarted","Data":"7c8c295aa796a74bd860c38675342562e1092a52b2c7074578717f0739271d19"} Jan 20 18:02:27 crc kubenswrapper[4558]: I0120 18:02:27.712156 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/neutron-ovn-edpm-multinodeset-edpm-compute-global-9gwtw" podStartSLOduration=2.169110065 podStartE2EDuration="2.712130151s" podCreationTimestamp="2026-01-20 18:02:25 +0000 UTC" firstStartedPulling="2026-01-20 18:02:26.438317166 +0000 UTC m=+4840.198655133" lastFinishedPulling="2026-01-20 18:02:26.981337251 +0000 UTC m=+4840.741675219" observedRunningTime="2026-01-20 18:02:27.707386404 +0000 UTC m=+4841.467724371" watchObservedRunningTime="2026-01-20 18:02:27.712130151 +0000 UTC m=+4841.472468118" Jan 20 18:02:29 crc kubenswrapper[4558]: I0120 18:02:29.710713 4558 generic.go:334] "Generic (PLEG): container finished" podID="2cd51553-9e13-40db-88e9-3011d212d5e6" containerID="7c8c295aa796a74bd860c38675342562e1092a52b2c7074578717f0739271d19" exitCode=0 Jan 20 18:02:29 crc kubenswrapper[4558]: I0120 18:02:29.710818 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-ovn-edpm-multinodeset-edpm-compute-global-9gwtw" event={"ID":"2cd51553-9e13-40db-88e9-3011d212d5e6","Type":"ContainerDied","Data":"7c8c295aa796a74bd860c38675342562e1092a52b2c7074578717f0739271d19"} Jan 20 18:02:30 crc kubenswrapper[4558]: I0120 18:02:30.882349 4558 scope.go:117] "RemoveContainer" containerID="d3d543ae948707353c576f305357f367d0aa01f73668bcc81ac19bcc7f5ea140" Jan 20 18:02:30 crc kubenswrapper[4558]: I0120 18:02:30.904302 4558 scope.go:117] "RemoveContainer" containerID="861bd48be8d8402e91402ef59c9aafef2f48435302e1a32fdbaaea6e18815246" Jan 20 18:02:30 crc kubenswrapper[4558]: I0120 18:02:30.963055 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-ovn-edpm-multinodeset-edpm-compute-global-9gwtw" Jan 20 18:02:30 crc kubenswrapper[4558]: I0120 18:02:30.975394 4558 scope.go:117] "RemoveContainer" containerID="444bb013e7c42b48e6d23336ee61f643f1067783c9c8c72f8068084f4d7cf163" Jan 20 18:02:31 crc kubenswrapper[4558]: I0120 18:02:31.008625 4558 scope.go:117] "RemoveContainer" containerID="c2c78b1f544d5f9cadbb72d37b5b668352e6d6879a5e657cfe4f7f30cab6e809" Jan 20 18:02:31 crc kubenswrapper[4558]: I0120 18:02:31.025438 4558 scope.go:117] "RemoveContainer" containerID="92ba7827254ded61befb4ea8bef7d19312169445c2d52de93d43b8f0d521581e" Jan 20 18:02:31 crc kubenswrapper[4558]: I0120 18:02:31.040580 4558 scope.go:117] "RemoveContainer" containerID="dd8db3ceab78f235dc97917832328d6e0fcaa24e03f8ac167d9b8cc02b674413" Jan 20 18:02:31 crc kubenswrapper[4558]: I0120 18:02:31.074640 4558 scope.go:117] "RemoveContainer" containerID="59d8c4e436c7f5a4aad847d81229a8d92faa6c515ebaf24787e41c7993f75693" Jan 20 18:02:31 crc kubenswrapper[4558]: I0120 18:02:31.090521 4558 scope.go:117] "RemoveContainer" containerID="085308a6dc97c681857f59245f2f70743caf09888e9a5758a37a53be58e66937" Jan 20 18:02:31 crc kubenswrapper[4558]: I0120 18:02:31.123143 4558 scope.go:117] "RemoveContainer" containerID="2af5575e62456e4be7656ee0e5229903dd77c0dd144e3458d8c9fe0b3ae6c15b" Jan 20 18:02:31 crc kubenswrapper[4558]: I0120 18:02:31.126355 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2cd51553-9e13-40db-88e9-3011d212d5e6-inventory\") pod \"2cd51553-9e13-40db-88e9-3011d212d5e6\" (UID: \"2cd51553-9e13-40db-88e9-3011d212d5e6\") " Jan 20 18:02:31 crc kubenswrapper[4558]: I0120 18:02:31.126587 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2cd51553-9e13-40db-88e9-3011d212d5e6-neutron-ovn-combined-ca-bundle\") pod \"2cd51553-9e13-40db-88e9-3011d212d5e6\" (UID: \"2cd51553-9e13-40db-88e9-3011d212d5e6\") " Jan 20 18:02:31 crc kubenswrapper[4558]: I0120 18:02:31.126661 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cnlhr\" (UniqueName: \"kubernetes.io/projected/2cd51553-9e13-40db-88e9-3011d212d5e6-kube-api-access-cnlhr\") pod \"2cd51553-9e13-40db-88e9-3011d212d5e6\" (UID: \"2cd51553-9e13-40db-88e9-3011d212d5e6\") " Jan 20 18:02:31 crc kubenswrapper[4558]: I0120 18:02:31.126700 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-ovn-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/2cd51553-9e13-40db-88e9-3011d212d5e6-neutron-ovn-agent-neutron-config-0\") pod \"2cd51553-9e13-40db-88e9-3011d212d5e6\" (UID: \"2cd51553-9e13-40db-88e9-3011d212d5e6\") " Jan 20 18:02:31 crc kubenswrapper[4558]: I0120 18:02:31.126763 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/2cd51553-9e13-40db-88e9-3011d212d5e6-ssh-key-edpm-compute-global\") pod \"2cd51553-9e13-40db-88e9-3011d212d5e6\" (UID: \"2cd51553-9e13-40db-88e9-3011d212d5e6\") " Jan 20 18:02:31 crc kubenswrapper[4558]: I0120 18:02:31.134091 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2cd51553-9e13-40db-88e9-3011d212d5e6-neutron-ovn-combined-ca-bundle" (OuterVolumeSpecName: "neutron-ovn-combined-ca-bundle") pod "2cd51553-9e13-40db-88e9-3011d212d5e6" (UID: "2cd51553-9e13-40db-88e9-3011d212d5e6"). InnerVolumeSpecName "neutron-ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:02:31 crc kubenswrapper[4558]: I0120 18:02:31.134197 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2cd51553-9e13-40db-88e9-3011d212d5e6-kube-api-access-cnlhr" (OuterVolumeSpecName: "kube-api-access-cnlhr") pod "2cd51553-9e13-40db-88e9-3011d212d5e6" (UID: "2cd51553-9e13-40db-88e9-3011d212d5e6"). InnerVolumeSpecName "kube-api-access-cnlhr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:02:31 crc kubenswrapper[4558]: I0120 18:02:31.147325 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2cd51553-9e13-40db-88e9-3011d212d5e6-ssh-key-edpm-compute-global" (OuterVolumeSpecName: "ssh-key-edpm-compute-global") pod "2cd51553-9e13-40db-88e9-3011d212d5e6" (UID: "2cd51553-9e13-40db-88e9-3011d212d5e6"). InnerVolumeSpecName "ssh-key-edpm-compute-global". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:02:31 crc kubenswrapper[4558]: I0120 18:02:31.148910 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2cd51553-9e13-40db-88e9-3011d212d5e6-inventory" (OuterVolumeSpecName: "inventory") pod "2cd51553-9e13-40db-88e9-3011d212d5e6" (UID: "2cd51553-9e13-40db-88e9-3011d212d5e6"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:02:31 crc kubenswrapper[4558]: I0120 18:02:31.149191 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2cd51553-9e13-40db-88e9-3011d212d5e6-neutron-ovn-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-ovn-agent-neutron-config-0") pod "2cd51553-9e13-40db-88e9-3011d212d5e6" (UID: "2cd51553-9e13-40db-88e9-3011d212d5e6"). InnerVolumeSpecName "neutron-ovn-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:02:31 crc kubenswrapper[4558]: I0120 18:02:31.229421 4558 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2cd51553-9e13-40db-88e9-3011d212d5e6-inventory\") on node \"crc\" DevicePath \"\"" Jan 20 18:02:31 crc kubenswrapper[4558]: I0120 18:02:31.229463 4558 reconciler_common.go:293] "Volume detached for volume \"neutron-ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2cd51553-9e13-40db-88e9-3011d212d5e6-neutron-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 18:02:31 crc kubenswrapper[4558]: I0120 18:02:31.229499 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cnlhr\" (UniqueName: \"kubernetes.io/projected/2cd51553-9e13-40db-88e9-3011d212d5e6-kube-api-access-cnlhr\") on node \"crc\" DevicePath \"\"" Jan 20 18:02:31 crc kubenswrapper[4558]: I0120 18:02:31.229514 4558 reconciler_common.go:293] "Volume detached for volume \"neutron-ovn-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/2cd51553-9e13-40db-88e9-3011d212d5e6-neutron-ovn-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Jan 20 18:02:31 crc kubenswrapper[4558]: I0120 18:02:31.229526 4558 reconciler_common.go:293] "Volume detached for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/2cd51553-9e13-40db-88e9-3011d212d5e6-ssh-key-edpm-compute-global\") on node \"crc\" DevicePath \"\"" Jan 20 18:02:31 crc kubenswrapper[4558]: I0120 18:02:31.733446 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-ovn-edpm-multinodeset-edpm-compute-global-9gwtw" event={"ID":"2cd51553-9e13-40db-88e9-3011d212d5e6","Type":"ContainerDied","Data":"169e70bf0372a99e110c629f1c926a4795996b8b688414d1c332b29caa2b7c1c"} Jan 20 18:02:31 crc kubenswrapper[4558]: I0120 18:02:31.733509 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="169e70bf0372a99e110c629f1c926a4795996b8b688414d1c332b29caa2b7c1c" Jan 20 18:02:31 crc kubenswrapper[4558]: I0120 18:02:31.733481 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-ovn-edpm-multinodeset-edpm-compute-global-9gwtw" Jan 20 18:02:31 crc kubenswrapper[4558]: I0120 18:02:31.797723 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/neutron-sriov-edpm-multinodeset-edpm-compute-global-j9j8g"] Jan 20 18:02:31 crc kubenswrapper[4558]: E0120 18:02:31.798110 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2cd51553-9e13-40db-88e9-3011d212d5e6" containerName="neutron-ovn-edpm-multinodeset-edpm-compute-global" Jan 20 18:02:31 crc kubenswrapper[4558]: I0120 18:02:31.798132 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="2cd51553-9e13-40db-88e9-3011d212d5e6" containerName="neutron-ovn-edpm-multinodeset-edpm-compute-global" Jan 20 18:02:31 crc kubenswrapper[4558]: I0120 18:02:31.798300 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="2cd51553-9e13-40db-88e9-3011d212d5e6" containerName="neutron-ovn-edpm-multinodeset-edpm-compute-global" Jan 20 18:02:31 crc kubenswrapper[4558]: I0120 18:02:31.798867 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-sriov-edpm-multinodeset-edpm-compute-global-j9j8g" Jan 20 18:02:31 crc kubenswrapper[4558]: I0120 18:02:31.807389 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-sriov-edpm-multinodeset-edpm-compute-global-j9j8g"] Jan 20 18:02:31 crc kubenswrapper[4558]: I0120 18:02:31.808862 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplane-ansible-ssh-private-key-secret" Jan 20 18:02:31 crc kubenswrapper[4558]: I0120 18:02:31.809007 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"edpm-compute-global-dockercfg-wkdxg" Jan 20 18:02:31 crc kubenswrapper[4558]: I0120 18:02:31.810974 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"neutron-sriov-agent-neutron-config" Jan 20 18:02:31 crc kubenswrapper[4558]: I0120 18:02:31.811023 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-aee-default-env" Jan 20 18:02:31 crc kubenswrapper[4558]: I0120 18:02:31.810988 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplanenodeset-edpm-compute-global" Jan 20 18:02:31 crc kubenswrapper[4558]: I0120 18:02:31.811144 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"combined-ca-bundle" Jan 20 18:02:31 crc kubenswrapper[4558]: I0120 18:02:31.837257 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-sriov-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/169142be-b565-4b53-9953-a3971e720edc-neutron-sriov-agent-neutron-config-0\") pod \"neutron-sriov-edpm-multinodeset-edpm-compute-global-j9j8g\" (UID: \"169142be-b565-4b53-9953-a3971e720edc\") " pod="openstack-kuttl-tests/neutron-sriov-edpm-multinodeset-edpm-compute-global-j9j8g" Jan 20 18:02:31 crc kubenswrapper[4558]: I0120 18:02:31.837305 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/169142be-b565-4b53-9953-a3971e720edc-neutron-sriov-combined-ca-bundle\") pod \"neutron-sriov-edpm-multinodeset-edpm-compute-global-j9j8g\" (UID: \"169142be-b565-4b53-9953-a3971e720edc\") " pod="openstack-kuttl-tests/neutron-sriov-edpm-multinodeset-edpm-compute-global-j9j8g" Jan 20 18:02:31 crc kubenswrapper[4558]: I0120 18:02:31.837356 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/169142be-b565-4b53-9953-a3971e720edc-inventory\") pod \"neutron-sriov-edpm-multinodeset-edpm-compute-global-j9j8g\" (UID: \"169142be-b565-4b53-9953-a3971e720edc\") " pod="openstack-kuttl-tests/neutron-sriov-edpm-multinodeset-edpm-compute-global-j9j8g" Jan 20 18:02:31 crc kubenswrapper[4558]: I0120 18:02:31.837509 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8xsd6\" (UniqueName: \"kubernetes.io/projected/169142be-b565-4b53-9953-a3971e720edc-kube-api-access-8xsd6\") pod \"neutron-sriov-edpm-multinodeset-edpm-compute-global-j9j8g\" (UID: \"169142be-b565-4b53-9953-a3971e720edc\") " pod="openstack-kuttl-tests/neutron-sriov-edpm-multinodeset-edpm-compute-global-j9j8g" Jan 20 18:02:31 crc kubenswrapper[4558]: I0120 18:02:31.837568 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/169142be-b565-4b53-9953-a3971e720edc-ssh-key-edpm-compute-global\") pod \"neutron-sriov-edpm-multinodeset-edpm-compute-global-j9j8g\" (UID: \"169142be-b565-4b53-9953-a3971e720edc\") " pod="openstack-kuttl-tests/neutron-sriov-edpm-multinodeset-edpm-compute-global-j9j8g" Jan 20 18:02:31 crc kubenswrapper[4558]: I0120 18:02:31.938768 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8xsd6\" (UniqueName: \"kubernetes.io/projected/169142be-b565-4b53-9953-a3971e720edc-kube-api-access-8xsd6\") pod \"neutron-sriov-edpm-multinodeset-edpm-compute-global-j9j8g\" (UID: \"169142be-b565-4b53-9953-a3971e720edc\") " pod="openstack-kuttl-tests/neutron-sriov-edpm-multinodeset-edpm-compute-global-j9j8g" Jan 20 18:02:31 crc kubenswrapper[4558]: I0120 18:02:31.939203 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/169142be-b565-4b53-9953-a3971e720edc-ssh-key-edpm-compute-global\") pod \"neutron-sriov-edpm-multinodeset-edpm-compute-global-j9j8g\" (UID: \"169142be-b565-4b53-9953-a3971e720edc\") " pod="openstack-kuttl-tests/neutron-sriov-edpm-multinodeset-edpm-compute-global-j9j8g" Jan 20 18:02:31 crc kubenswrapper[4558]: I0120 18:02:31.939268 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-sriov-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/169142be-b565-4b53-9953-a3971e720edc-neutron-sriov-agent-neutron-config-0\") pod \"neutron-sriov-edpm-multinodeset-edpm-compute-global-j9j8g\" (UID: \"169142be-b565-4b53-9953-a3971e720edc\") " pod="openstack-kuttl-tests/neutron-sriov-edpm-multinodeset-edpm-compute-global-j9j8g" Jan 20 18:02:31 crc kubenswrapper[4558]: I0120 18:02:31.939297 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/169142be-b565-4b53-9953-a3971e720edc-neutron-sriov-combined-ca-bundle\") pod \"neutron-sriov-edpm-multinodeset-edpm-compute-global-j9j8g\" (UID: \"169142be-b565-4b53-9953-a3971e720edc\") " pod="openstack-kuttl-tests/neutron-sriov-edpm-multinodeset-edpm-compute-global-j9j8g" Jan 20 18:02:31 crc kubenswrapper[4558]: I0120 18:02:31.939324 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/169142be-b565-4b53-9953-a3971e720edc-inventory\") pod \"neutron-sriov-edpm-multinodeset-edpm-compute-global-j9j8g\" (UID: \"169142be-b565-4b53-9953-a3971e720edc\") " pod="openstack-kuttl-tests/neutron-sriov-edpm-multinodeset-edpm-compute-global-j9j8g" Jan 20 18:02:31 crc kubenswrapper[4558]: I0120 18:02:31.943486 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/169142be-b565-4b53-9953-a3971e720edc-ssh-key-edpm-compute-global\") pod \"neutron-sriov-edpm-multinodeset-edpm-compute-global-j9j8g\" (UID: \"169142be-b565-4b53-9953-a3971e720edc\") " pod="openstack-kuttl-tests/neutron-sriov-edpm-multinodeset-edpm-compute-global-j9j8g" Jan 20 18:02:31 crc kubenswrapper[4558]: I0120 18:02:31.943498 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/169142be-b565-4b53-9953-a3971e720edc-inventory\") pod \"neutron-sriov-edpm-multinodeset-edpm-compute-global-j9j8g\" (UID: \"169142be-b565-4b53-9953-a3971e720edc\") " pod="openstack-kuttl-tests/neutron-sriov-edpm-multinodeset-edpm-compute-global-j9j8g" Jan 20 18:02:31 crc kubenswrapper[4558]: I0120 18:02:31.943729 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-sriov-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/169142be-b565-4b53-9953-a3971e720edc-neutron-sriov-agent-neutron-config-0\") pod \"neutron-sriov-edpm-multinodeset-edpm-compute-global-j9j8g\" (UID: \"169142be-b565-4b53-9953-a3971e720edc\") " pod="openstack-kuttl-tests/neutron-sriov-edpm-multinodeset-edpm-compute-global-j9j8g" Jan 20 18:02:31 crc kubenswrapper[4558]: I0120 18:02:31.944061 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/169142be-b565-4b53-9953-a3971e720edc-neutron-sriov-combined-ca-bundle\") pod \"neutron-sriov-edpm-multinodeset-edpm-compute-global-j9j8g\" (UID: \"169142be-b565-4b53-9953-a3971e720edc\") " pod="openstack-kuttl-tests/neutron-sriov-edpm-multinodeset-edpm-compute-global-j9j8g" Jan 20 18:02:31 crc kubenswrapper[4558]: I0120 18:02:31.954282 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8xsd6\" (UniqueName: \"kubernetes.io/projected/169142be-b565-4b53-9953-a3971e720edc-kube-api-access-8xsd6\") pod \"neutron-sriov-edpm-multinodeset-edpm-compute-global-j9j8g\" (UID: \"169142be-b565-4b53-9953-a3971e720edc\") " pod="openstack-kuttl-tests/neutron-sriov-edpm-multinodeset-edpm-compute-global-j9j8g" Jan 20 18:02:32 crc kubenswrapper[4558]: I0120 18:02:32.114405 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-sriov-edpm-multinodeset-edpm-compute-global-j9j8g" Jan 20 18:02:32 crc kubenswrapper[4558]: I0120 18:02:32.509619 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-sriov-edpm-multinodeset-edpm-compute-global-j9j8g"] Jan 20 18:02:32 crc kubenswrapper[4558]: W0120 18:02:32.510215 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod169142be_b565_4b53_9953_a3971e720edc.slice/crio-9788e04f66bec48127aa170c8aa15f0754052e5409f50e414256021a50fff5ab WatchSource:0}: Error finding container 9788e04f66bec48127aa170c8aa15f0754052e5409f50e414256021a50fff5ab: Status 404 returned error can't find the container with id 9788e04f66bec48127aa170c8aa15f0754052e5409f50e414256021a50fff5ab Jan 20 18:02:32 crc kubenswrapper[4558]: I0120 18:02:32.743678 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-sriov-edpm-multinodeset-edpm-compute-global-j9j8g" event={"ID":"169142be-b565-4b53-9953-a3971e720edc","Type":"ContainerStarted","Data":"9788e04f66bec48127aa170c8aa15f0754052e5409f50e414256021a50fff5ab"} Jan 20 18:02:33 crc kubenswrapper[4558]: I0120 18:02:33.761120 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-sriov-edpm-multinodeset-edpm-compute-global-j9j8g" event={"ID":"169142be-b565-4b53-9953-a3971e720edc","Type":"ContainerStarted","Data":"bd1569896700f9b7af284848f74e4dfdba0e2cb1c7e54f9f4ac7ce817d50ebac"} Jan 20 18:02:33 crc kubenswrapper[4558]: I0120 18:02:33.784801 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/neutron-sriov-edpm-multinodeset-edpm-compute-global-j9j8g" podStartSLOduration=2.111902211 podStartE2EDuration="2.784783992s" podCreationTimestamp="2026-01-20 18:02:31 +0000 UTC" firstStartedPulling="2026-01-20 18:02:32.51773992 +0000 UTC m=+4846.278077887" lastFinishedPulling="2026-01-20 18:02:33.190621701 +0000 UTC m=+4846.950959668" observedRunningTime="2026-01-20 18:02:33.783813928 +0000 UTC m=+4847.544151895" watchObservedRunningTime="2026-01-20 18:02:33.784783992 +0000 UTC m=+4847.545121959" Jan 20 18:02:34 crc kubenswrapper[4558]: I0120 18:02:34.772470 4558 generic.go:334] "Generic (PLEG): container finished" podID="169142be-b565-4b53-9953-a3971e720edc" containerID="bd1569896700f9b7af284848f74e4dfdba0e2cb1c7e54f9f4ac7ce817d50ebac" exitCode=0 Jan 20 18:02:34 crc kubenswrapper[4558]: I0120 18:02:34.772544 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-sriov-edpm-multinodeset-edpm-compute-global-j9j8g" event={"ID":"169142be-b565-4b53-9953-a3971e720edc","Type":"ContainerDied","Data":"bd1569896700f9b7af284848f74e4dfdba0e2cb1c7e54f9f4ac7ce817d50ebac"} Jan 20 18:02:36 crc kubenswrapper[4558]: I0120 18:02:36.443929 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-sriov-edpm-multinodeset-edpm-compute-global-j9j8g" Jan 20 18:02:36 crc kubenswrapper[4558]: I0120 18:02:36.613251 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/169142be-b565-4b53-9953-a3971e720edc-neutron-sriov-combined-ca-bundle\") pod \"169142be-b565-4b53-9953-a3971e720edc\" (UID: \"169142be-b565-4b53-9953-a3971e720edc\") " Jan 20 18:02:36 crc kubenswrapper[4558]: I0120 18:02:36.613527 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/169142be-b565-4b53-9953-a3971e720edc-inventory\") pod \"169142be-b565-4b53-9953-a3971e720edc\" (UID: \"169142be-b565-4b53-9953-a3971e720edc\") " Jan 20 18:02:36 crc kubenswrapper[4558]: I0120 18:02:36.613563 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-sriov-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/169142be-b565-4b53-9953-a3971e720edc-neutron-sriov-agent-neutron-config-0\") pod \"169142be-b565-4b53-9953-a3971e720edc\" (UID: \"169142be-b565-4b53-9953-a3971e720edc\") " Jan 20 18:02:36 crc kubenswrapper[4558]: I0120 18:02:36.613625 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/169142be-b565-4b53-9953-a3971e720edc-ssh-key-edpm-compute-global\") pod \"169142be-b565-4b53-9953-a3971e720edc\" (UID: \"169142be-b565-4b53-9953-a3971e720edc\") " Jan 20 18:02:36 crc kubenswrapper[4558]: I0120 18:02:36.613658 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8xsd6\" (UniqueName: \"kubernetes.io/projected/169142be-b565-4b53-9953-a3971e720edc-kube-api-access-8xsd6\") pod \"169142be-b565-4b53-9953-a3971e720edc\" (UID: \"169142be-b565-4b53-9953-a3971e720edc\") " Jan 20 18:02:36 crc kubenswrapper[4558]: I0120 18:02:36.618866 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/169142be-b565-4b53-9953-a3971e720edc-kube-api-access-8xsd6" (OuterVolumeSpecName: "kube-api-access-8xsd6") pod "169142be-b565-4b53-9953-a3971e720edc" (UID: "169142be-b565-4b53-9953-a3971e720edc"). InnerVolumeSpecName "kube-api-access-8xsd6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:02:36 crc kubenswrapper[4558]: I0120 18:02:36.619069 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/169142be-b565-4b53-9953-a3971e720edc-neutron-sriov-combined-ca-bundle" (OuterVolumeSpecName: "neutron-sriov-combined-ca-bundle") pod "169142be-b565-4b53-9953-a3971e720edc" (UID: "169142be-b565-4b53-9953-a3971e720edc"). InnerVolumeSpecName "neutron-sriov-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:02:36 crc kubenswrapper[4558]: I0120 18:02:36.631775 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/169142be-b565-4b53-9953-a3971e720edc-inventory" (OuterVolumeSpecName: "inventory") pod "169142be-b565-4b53-9953-a3971e720edc" (UID: "169142be-b565-4b53-9953-a3971e720edc"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:02:36 crc kubenswrapper[4558]: I0120 18:02:36.632086 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/169142be-b565-4b53-9953-a3971e720edc-neutron-sriov-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-sriov-agent-neutron-config-0") pod "169142be-b565-4b53-9953-a3971e720edc" (UID: "169142be-b565-4b53-9953-a3971e720edc"). InnerVolumeSpecName "neutron-sriov-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:02:36 crc kubenswrapper[4558]: I0120 18:02:36.633005 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/169142be-b565-4b53-9953-a3971e720edc-ssh-key-edpm-compute-global" (OuterVolumeSpecName: "ssh-key-edpm-compute-global") pod "169142be-b565-4b53-9953-a3971e720edc" (UID: "169142be-b565-4b53-9953-a3971e720edc"). InnerVolumeSpecName "ssh-key-edpm-compute-global". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:02:36 crc kubenswrapper[4558]: I0120 18:02:36.715309 4558 reconciler_common.go:293] "Volume detached for volume \"neutron-sriov-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/169142be-b565-4b53-9953-a3971e720edc-neutron-sriov-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Jan 20 18:02:36 crc kubenswrapper[4558]: I0120 18:02:36.715340 4558 reconciler_common.go:293] "Volume detached for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/169142be-b565-4b53-9953-a3971e720edc-ssh-key-edpm-compute-global\") on node \"crc\" DevicePath \"\"" Jan 20 18:02:36 crc kubenswrapper[4558]: I0120 18:02:36.715356 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8xsd6\" (UniqueName: \"kubernetes.io/projected/169142be-b565-4b53-9953-a3971e720edc-kube-api-access-8xsd6\") on node \"crc\" DevicePath \"\"" Jan 20 18:02:36 crc kubenswrapper[4558]: I0120 18:02:36.715367 4558 reconciler_common.go:293] "Volume detached for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/169142be-b565-4b53-9953-a3971e720edc-neutron-sriov-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 18:02:36 crc kubenswrapper[4558]: I0120 18:02:36.715380 4558 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/169142be-b565-4b53-9953-a3971e720edc-inventory\") on node \"crc\" DevicePath \"\"" Jan 20 18:02:36 crc kubenswrapper[4558]: I0120 18:02:36.794678 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-sriov-edpm-multinodeset-edpm-compute-global-j9j8g" event={"ID":"169142be-b565-4b53-9953-a3971e720edc","Type":"ContainerDied","Data":"9788e04f66bec48127aa170c8aa15f0754052e5409f50e414256021a50fff5ab"} Jan 20 18:02:36 crc kubenswrapper[4558]: I0120 18:02:36.794732 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9788e04f66bec48127aa170c8aa15f0754052e5409f50e414256021a50fff5ab" Jan 20 18:02:36 crc kubenswrapper[4558]: I0120 18:02:36.794741 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-sriov-edpm-multinodeset-edpm-compute-global-j9j8g" Jan 20 18:02:36 crc kubenswrapper[4558]: I0120 18:02:36.856981 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/neutron-dhcp-edpm-multinodeset-edpm-compute-global-9c6xq"] Jan 20 18:02:36 crc kubenswrapper[4558]: E0120 18:02:36.857454 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="169142be-b565-4b53-9953-a3971e720edc" containerName="neutron-sriov-edpm-multinodeset-edpm-compute-global" Jan 20 18:02:36 crc kubenswrapper[4558]: I0120 18:02:36.857473 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="169142be-b565-4b53-9953-a3971e720edc" containerName="neutron-sriov-edpm-multinodeset-edpm-compute-global" Jan 20 18:02:36 crc kubenswrapper[4558]: I0120 18:02:36.857685 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="169142be-b565-4b53-9953-a3971e720edc" containerName="neutron-sriov-edpm-multinodeset-edpm-compute-global" Jan 20 18:02:36 crc kubenswrapper[4558]: I0120 18:02:36.858336 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-dhcp-edpm-multinodeset-edpm-compute-global-9c6xq" Jan 20 18:02:36 crc kubenswrapper[4558]: I0120 18:02:36.862517 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-dhcp-edpm-multinodeset-edpm-compute-global-9c6xq"] Jan 20 18:02:36 crc kubenswrapper[4558]: I0120 18:02:36.863198 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-aee-default-env" Jan 20 18:02:36 crc kubenswrapper[4558]: I0120 18:02:36.863255 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"neutron-dhcp-agent-neutron-config" Jan 20 18:02:36 crc kubenswrapper[4558]: I0120 18:02:36.863208 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplane-ansible-ssh-private-key-secret" Jan 20 18:02:36 crc kubenswrapper[4558]: I0120 18:02:36.863210 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"combined-ca-bundle" Jan 20 18:02:36 crc kubenswrapper[4558]: I0120 18:02:36.863471 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplanenodeset-edpm-compute-global" Jan 20 18:02:36 crc kubenswrapper[4558]: I0120 18:02:36.863722 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"edpm-compute-global-dockercfg-wkdxg" Jan 20 18:02:36 crc kubenswrapper[4558]: I0120 18:02:36.920016 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/040af523-2207-48f4-809f-80e8b7f24918-neutron-dhcp-combined-ca-bundle\") pod \"neutron-dhcp-edpm-multinodeset-edpm-compute-global-9c6xq\" (UID: \"040af523-2207-48f4-809f-80e8b7f24918\") " pod="openstack-kuttl-tests/neutron-dhcp-edpm-multinodeset-edpm-compute-global-9c6xq" Jan 20 18:02:36 crc kubenswrapper[4558]: I0120 18:02:36.920194 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/040af523-2207-48f4-809f-80e8b7f24918-inventory\") pod \"neutron-dhcp-edpm-multinodeset-edpm-compute-global-9c6xq\" (UID: \"040af523-2207-48f4-809f-80e8b7f24918\") " pod="openstack-kuttl-tests/neutron-dhcp-edpm-multinodeset-edpm-compute-global-9c6xq" Jan 20 18:02:36 crc kubenswrapper[4558]: I0120 18:02:36.920244 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-dhcp-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/040af523-2207-48f4-809f-80e8b7f24918-neutron-dhcp-agent-neutron-config-0\") pod \"neutron-dhcp-edpm-multinodeset-edpm-compute-global-9c6xq\" (UID: \"040af523-2207-48f4-809f-80e8b7f24918\") " pod="openstack-kuttl-tests/neutron-dhcp-edpm-multinodeset-edpm-compute-global-9c6xq" Jan 20 18:02:36 crc kubenswrapper[4558]: I0120 18:02:36.920285 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tgkbj\" (UniqueName: \"kubernetes.io/projected/040af523-2207-48f4-809f-80e8b7f24918-kube-api-access-tgkbj\") pod \"neutron-dhcp-edpm-multinodeset-edpm-compute-global-9c6xq\" (UID: \"040af523-2207-48f4-809f-80e8b7f24918\") " pod="openstack-kuttl-tests/neutron-dhcp-edpm-multinodeset-edpm-compute-global-9c6xq" Jan 20 18:02:36 crc kubenswrapper[4558]: I0120 18:02:36.920357 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/040af523-2207-48f4-809f-80e8b7f24918-ssh-key-edpm-compute-global\") pod \"neutron-dhcp-edpm-multinodeset-edpm-compute-global-9c6xq\" (UID: \"040af523-2207-48f4-809f-80e8b7f24918\") " pod="openstack-kuttl-tests/neutron-dhcp-edpm-multinodeset-edpm-compute-global-9c6xq" Jan 20 18:02:37 crc kubenswrapper[4558]: I0120 18:02:37.021875 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/040af523-2207-48f4-809f-80e8b7f24918-ssh-key-edpm-compute-global\") pod \"neutron-dhcp-edpm-multinodeset-edpm-compute-global-9c6xq\" (UID: \"040af523-2207-48f4-809f-80e8b7f24918\") " pod="openstack-kuttl-tests/neutron-dhcp-edpm-multinodeset-edpm-compute-global-9c6xq" Jan 20 18:02:37 crc kubenswrapper[4558]: I0120 18:02:37.021970 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/040af523-2207-48f4-809f-80e8b7f24918-neutron-dhcp-combined-ca-bundle\") pod \"neutron-dhcp-edpm-multinodeset-edpm-compute-global-9c6xq\" (UID: \"040af523-2207-48f4-809f-80e8b7f24918\") " pod="openstack-kuttl-tests/neutron-dhcp-edpm-multinodeset-edpm-compute-global-9c6xq" Jan 20 18:02:37 crc kubenswrapper[4558]: I0120 18:02:37.022051 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/040af523-2207-48f4-809f-80e8b7f24918-inventory\") pod \"neutron-dhcp-edpm-multinodeset-edpm-compute-global-9c6xq\" (UID: \"040af523-2207-48f4-809f-80e8b7f24918\") " pod="openstack-kuttl-tests/neutron-dhcp-edpm-multinodeset-edpm-compute-global-9c6xq" Jan 20 18:02:37 crc kubenswrapper[4558]: I0120 18:02:37.022078 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-dhcp-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/040af523-2207-48f4-809f-80e8b7f24918-neutron-dhcp-agent-neutron-config-0\") pod \"neutron-dhcp-edpm-multinodeset-edpm-compute-global-9c6xq\" (UID: \"040af523-2207-48f4-809f-80e8b7f24918\") " pod="openstack-kuttl-tests/neutron-dhcp-edpm-multinodeset-edpm-compute-global-9c6xq" Jan 20 18:02:37 crc kubenswrapper[4558]: I0120 18:02:37.022107 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tgkbj\" (UniqueName: \"kubernetes.io/projected/040af523-2207-48f4-809f-80e8b7f24918-kube-api-access-tgkbj\") pod \"neutron-dhcp-edpm-multinodeset-edpm-compute-global-9c6xq\" (UID: \"040af523-2207-48f4-809f-80e8b7f24918\") " pod="openstack-kuttl-tests/neutron-dhcp-edpm-multinodeset-edpm-compute-global-9c6xq" Jan 20 18:02:37 crc kubenswrapper[4558]: I0120 18:02:37.026210 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/040af523-2207-48f4-809f-80e8b7f24918-neutron-dhcp-combined-ca-bundle\") pod \"neutron-dhcp-edpm-multinodeset-edpm-compute-global-9c6xq\" (UID: \"040af523-2207-48f4-809f-80e8b7f24918\") " pod="openstack-kuttl-tests/neutron-dhcp-edpm-multinodeset-edpm-compute-global-9c6xq" Jan 20 18:02:37 crc kubenswrapper[4558]: I0120 18:02:37.026321 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/040af523-2207-48f4-809f-80e8b7f24918-ssh-key-edpm-compute-global\") pod \"neutron-dhcp-edpm-multinodeset-edpm-compute-global-9c6xq\" (UID: \"040af523-2207-48f4-809f-80e8b7f24918\") " pod="openstack-kuttl-tests/neutron-dhcp-edpm-multinodeset-edpm-compute-global-9c6xq" Jan 20 18:02:37 crc kubenswrapper[4558]: I0120 18:02:37.026672 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/040af523-2207-48f4-809f-80e8b7f24918-inventory\") pod \"neutron-dhcp-edpm-multinodeset-edpm-compute-global-9c6xq\" (UID: \"040af523-2207-48f4-809f-80e8b7f24918\") " pod="openstack-kuttl-tests/neutron-dhcp-edpm-multinodeset-edpm-compute-global-9c6xq" Jan 20 18:02:37 crc kubenswrapper[4558]: I0120 18:02:37.026769 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-dhcp-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/040af523-2207-48f4-809f-80e8b7f24918-neutron-dhcp-agent-neutron-config-0\") pod \"neutron-dhcp-edpm-multinodeset-edpm-compute-global-9c6xq\" (UID: \"040af523-2207-48f4-809f-80e8b7f24918\") " pod="openstack-kuttl-tests/neutron-dhcp-edpm-multinodeset-edpm-compute-global-9c6xq" Jan 20 18:02:37 crc kubenswrapper[4558]: I0120 18:02:37.037839 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tgkbj\" (UniqueName: \"kubernetes.io/projected/040af523-2207-48f4-809f-80e8b7f24918-kube-api-access-tgkbj\") pod \"neutron-dhcp-edpm-multinodeset-edpm-compute-global-9c6xq\" (UID: \"040af523-2207-48f4-809f-80e8b7f24918\") " pod="openstack-kuttl-tests/neutron-dhcp-edpm-multinodeset-edpm-compute-global-9c6xq" Jan 20 18:02:37 crc kubenswrapper[4558]: I0120 18:02:37.182478 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-dhcp-edpm-multinodeset-edpm-compute-global-9c6xq" Jan 20 18:02:37 crc kubenswrapper[4558]: I0120 18:02:37.569670 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-dhcp-edpm-multinodeset-edpm-compute-global-9c6xq"] Jan 20 18:02:37 crc kubenswrapper[4558]: W0120 18:02:37.897814 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod040af523_2207_48f4_809f_80e8b7f24918.slice/crio-b6849b9fb640573fb15cc1200c2d33c99f6bddee66245d8c9ac083d9784f60e0 WatchSource:0}: Error finding container b6849b9fb640573fb15cc1200c2d33c99f6bddee66245d8c9ac083d9784f60e0: Status 404 returned error can't find the container with id b6849b9fb640573fb15cc1200c2d33c99f6bddee66245d8c9ac083d9784f60e0 Jan 20 18:02:38 crc kubenswrapper[4558]: I0120 18:02:38.813929 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-dhcp-edpm-multinodeset-edpm-compute-global-9c6xq" event={"ID":"040af523-2207-48f4-809f-80e8b7f24918","Type":"ContainerStarted","Data":"5bf40472223623fb14809cdce4ee579c75362a851d306e78c2354777d1677c4c"} Jan 20 18:02:38 crc kubenswrapper[4558]: I0120 18:02:38.814007 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-dhcp-edpm-multinodeset-edpm-compute-global-9c6xq" event={"ID":"040af523-2207-48f4-809f-80e8b7f24918","Type":"ContainerStarted","Data":"b6849b9fb640573fb15cc1200c2d33c99f6bddee66245d8c9ac083d9784f60e0"} Jan 20 18:02:38 crc kubenswrapper[4558]: I0120 18:02:38.831675 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/neutron-dhcp-edpm-multinodeset-edpm-compute-global-9c6xq" podStartSLOduration=2.295833365 podStartE2EDuration="2.831659097s" podCreationTimestamp="2026-01-20 18:02:36 +0000 UTC" firstStartedPulling="2026-01-20 18:02:37.900479472 +0000 UTC m=+4851.660817439" lastFinishedPulling="2026-01-20 18:02:38.436305204 +0000 UTC m=+4852.196643171" observedRunningTime="2026-01-20 18:02:38.828793029 +0000 UTC m=+4852.589130995" watchObservedRunningTime="2026-01-20 18:02:38.831659097 +0000 UTC m=+4852.591997063" Jan 20 18:02:39 crc kubenswrapper[4558]: I0120 18:02:39.825614 4558 generic.go:334] "Generic (PLEG): container finished" podID="040af523-2207-48f4-809f-80e8b7f24918" containerID="5bf40472223623fb14809cdce4ee579c75362a851d306e78c2354777d1677c4c" exitCode=0 Jan 20 18:02:39 crc kubenswrapper[4558]: I0120 18:02:39.825669 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-dhcp-edpm-multinodeset-edpm-compute-global-9c6xq" event={"ID":"040af523-2207-48f4-809f-80e8b7f24918","Type":"ContainerDied","Data":"5bf40472223623fb14809cdce4ee579c75362a851d306e78c2354777d1677c4c"} Jan 20 18:02:41 crc kubenswrapper[4558]: I0120 18:02:41.078343 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-dhcp-edpm-multinodeset-edpm-compute-global-9c6xq" Jan 20 18:02:41 crc kubenswrapper[4558]: I0120 18:02:41.193259 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/040af523-2207-48f4-809f-80e8b7f24918-neutron-dhcp-combined-ca-bundle\") pod \"040af523-2207-48f4-809f-80e8b7f24918\" (UID: \"040af523-2207-48f4-809f-80e8b7f24918\") " Jan 20 18:02:41 crc kubenswrapper[4558]: I0120 18:02:41.193322 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tgkbj\" (UniqueName: \"kubernetes.io/projected/040af523-2207-48f4-809f-80e8b7f24918-kube-api-access-tgkbj\") pod \"040af523-2207-48f4-809f-80e8b7f24918\" (UID: \"040af523-2207-48f4-809f-80e8b7f24918\") " Jan 20 18:02:41 crc kubenswrapper[4558]: I0120 18:02:41.193357 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/040af523-2207-48f4-809f-80e8b7f24918-inventory\") pod \"040af523-2207-48f4-809f-80e8b7f24918\" (UID: \"040af523-2207-48f4-809f-80e8b7f24918\") " Jan 20 18:02:41 crc kubenswrapper[4558]: I0120 18:02:41.193415 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/040af523-2207-48f4-809f-80e8b7f24918-ssh-key-edpm-compute-global\") pod \"040af523-2207-48f4-809f-80e8b7f24918\" (UID: \"040af523-2207-48f4-809f-80e8b7f24918\") " Jan 20 18:02:41 crc kubenswrapper[4558]: I0120 18:02:41.194120 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-dhcp-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/040af523-2207-48f4-809f-80e8b7f24918-neutron-dhcp-agent-neutron-config-0\") pod \"040af523-2207-48f4-809f-80e8b7f24918\" (UID: \"040af523-2207-48f4-809f-80e8b7f24918\") " Jan 20 18:02:41 crc kubenswrapper[4558]: I0120 18:02:41.199845 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/040af523-2207-48f4-809f-80e8b7f24918-kube-api-access-tgkbj" (OuterVolumeSpecName: "kube-api-access-tgkbj") pod "040af523-2207-48f4-809f-80e8b7f24918" (UID: "040af523-2207-48f4-809f-80e8b7f24918"). InnerVolumeSpecName "kube-api-access-tgkbj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:02:41 crc kubenswrapper[4558]: I0120 18:02:41.204344 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/040af523-2207-48f4-809f-80e8b7f24918-neutron-dhcp-combined-ca-bundle" (OuterVolumeSpecName: "neutron-dhcp-combined-ca-bundle") pod "040af523-2207-48f4-809f-80e8b7f24918" (UID: "040af523-2207-48f4-809f-80e8b7f24918"). InnerVolumeSpecName "neutron-dhcp-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:02:41 crc kubenswrapper[4558]: I0120 18:02:41.215095 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/040af523-2207-48f4-809f-80e8b7f24918-ssh-key-edpm-compute-global" (OuterVolumeSpecName: "ssh-key-edpm-compute-global") pod "040af523-2207-48f4-809f-80e8b7f24918" (UID: "040af523-2207-48f4-809f-80e8b7f24918"). InnerVolumeSpecName "ssh-key-edpm-compute-global". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:02:41 crc kubenswrapper[4558]: I0120 18:02:41.215545 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/040af523-2207-48f4-809f-80e8b7f24918-neutron-dhcp-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-dhcp-agent-neutron-config-0") pod "040af523-2207-48f4-809f-80e8b7f24918" (UID: "040af523-2207-48f4-809f-80e8b7f24918"). InnerVolumeSpecName "neutron-dhcp-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:02:41 crc kubenswrapper[4558]: I0120 18:02:41.216005 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/040af523-2207-48f4-809f-80e8b7f24918-inventory" (OuterVolumeSpecName: "inventory") pod "040af523-2207-48f4-809f-80e8b7f24918" (UID: "040af523-2207-48f4-809f-80e8b7f24918"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:02:41 crc kubenswrapper[4558]: I0120 18:02:41.296231 4558 reconciler_common.go:293] "Volume detached for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/040af523-2207-48f4-809f-80e8b7f24918-neutron-dhcp-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 18:02:41 crc kubenswrapper[4558]: I0120 18:02:41.296271 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tgkbj\" (UniqueName: \"kubernetes.io/projected/040af523-2207-48f4-809f-80e8b7f24918-kube-api-access-tgkbj\") on node \"crc\" DevicePath \"\"" Jan 20 18:02:41 crc kubenswrapper[4558]: I0120 18:02:41.296285 4558 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/040af523-2207-48f4-809f-80e8b7f24918-inventory\") on node \"crc\" DevicePath \"\"" Jan 20 18:02:41 crc kubenswrapper[4558]: I0120 18:02:41.296296 4558 reconciler_common.go:293] "Volume detached for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/040af523-2207-48f4-809f-80e8b7f24918-ssh-key-edpm-compute-global\") on node \"crc\" DevicePath \"\"" Jan 20 18:02:41 crc kubenswrapper[4558]: I0120 18:02:41.296307 4558 reconciler_common.go:293] "Volume detached for volume \"neutron-dhcp-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/040af523-2207-48f4-809f-80e8b7f24918-neutron-dhcp-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Jan 20 18:02:41 crc kubenswrapper[4558]: I0120 18:02:41.857180 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-dhcp-edpm-multinodeset-edpm-compute-global-9c6xq" event={"ID":"040af523-2207-48f4-809f-80e8b7f24918","Type":"ContainerDied","Data":"b6849b9fb640573fb15cc1200c2d33c99f6bddee66245d8c9ac083d9784f60e0"} Jan 20 18:02:41 crc kubenswrapper[4558]: I0120 18:02:41.857230 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b6849b9fb640573fb15cc1200c2d33c99f6bddee66245d8c9ac083d9784f60e0" Jan 20 18:02:41 crc kubenswrapper[4558]: I0120 18:02:41.857233 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-dhcp-edpm-multinodeset-edpm-compute-global-9c6xq" Jan 20 18:02:41 crc kubenswrapper[4558]: I0120 18:02:41.896415 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/libvirt-edpm-multinodeset-edpm-compute-global-l4lg2"] Jan 20 18:02:41 crc kubenswrapper[4558]: E0120 18:02:41.896819 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="040af523-2207-48f4-809f-80e8b7f24918" containerName="neutron-dhcp-edpm-multinodeset-edpm-compute-global" Jan 20 18:02:41 crc kubenswrapper[4558]: I0120 18:02:41.896839 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="040af523-2207-48f4-809f-80e8b7f24918" containerName="neutron-dhcp-edpm-multinodeset-edpm-compute-global" Jan 20 18:02:41 crc kubenswrapper[4558]: I0120 18:02:41.897011 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="040af523-2207-48f4-809f-80e8b7f24918" containerName="neutron-dhcp-edpm-multinodeset-edpm-compute-global" Jan 20 18:02:41 crc kubenswrapper[4558]: I0120 18:02:41.897540 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/libvirt-edpm-multinodeset-edpm-compute-global-l4lg2" Jan 20 18:02:41 crc kubenswrapper[4558]: I0120 18:02:41.900180 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"combined-ca-bundle" Jan 20 18:02:41 crc kubenswrapper[4558]: I0120 18:02:41.905342 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/450b08fd-f6a4-439d-80fe-b92708702750-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-multinodeset-edpm-compute-global-l4lg2\" (UID: \"450b08fd-f6a4-439d-80fe-b92708702750\") " pod="openstack-kuttl-tests/libvirt-edpm-multinodeset-edpm-compute-global-l4lg2" Jan 20 18:02:41 crc kubenswrapper[4558]: I0120 18:02:41.905494 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/450b08fd-f6a4-439d-80fe-b92708702750-libvirt-secret-0\") pod \"libvirt-edpm-multinodeset-edpm-compute-global-l4lg2\" (UID: \"450b08fd-f6a4-439d-80fe-b92708702750\") " pod="openstack-kuttl-tests/libvirt-edpm-multinodeset-edpm-compute-global-l4lg2" Jan 20 18:02:41 crc kubenswrapper[4558]: I0120 18:02:41.905563 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gkffc\" (UniqueName: \"kubernetes.io/projected/450b08fd-f6a4-439d-80fe-b92708702750-kube-api-access-gkffc\") pod \"libvirt-edpm-multinodeset-edpm-compute-global-l4lg2\" (UID: \"450b08fd-f6a4-439d-80fe-b92708702750\") " pod="openstack-kuttl-tests/libvirt-edpm-multinodeset-edpm-compute-global-l4lg2" Jan 20 18:02:41 crc kubenswrapper[4558]: I0120 18:02:41.905679 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/450b08fd-f6a4-439d-80fe-b92708702750-ssh-key-edpm-compute-global\") pod \"libvirt-edpm-multinodeset-edpm-compute-global-l4lg2\" (UID: \"450b08fd-f6a4-439d-80fe-b92708702750\") " pod="openstack-kuttl-tests/libvirt-edpm-multinodeset-edpm-compute-global-l4lg2" Jan 20 18:02:41 crc kubenswrapper[4558]: I0120 18:02:41.905843 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/450b08fd-f6a4-439d-80fe-b92708702750-inventory\") pod \"libvirt-edpm-multinodeset-edpm-compute-global-l4lg2\" (UID: \"450b08fd-f6a4-439d-80fe-b92708702750\") " pod="openstack-kuttl-tests/libvirt-edpm-multinodeset-edpm-compute-global-l4lg2" Jan 20 18:02:41 crc kubenswrapper[4558]: I0120 18:02:41.907930 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"edpm-compute-global-dockercfg-wkdxg" Jan 20 18:02:41 crc kubenswrapper[4558]: I0120 18:02:41.908372 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/libvirt-edpm-multinodeset-edpm-compute-global-l4lg2"] Jan 20 18:02:41 crc kubenswrapper[4558]: I0120 18:02:41.909262 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplane-ansible-ssh-private-key-secret" Jan 20 18:02:41 crc kubenswrapper[4558]: I0120 18:02:41.909539 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"libvirt-secret" Jan 20 18:02:41 crc kubenswrapper[4558]: I0120 18:02:41.910138 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-aee-default-env" Jan 20 18:02:41 crc kubenswrapper[4558]: I0120 18:02:41.911680 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplanenodeset-edpm-compute-global" Jan 20 18:02:42 crc kubenswrapper[4558]: I0120 18:02:42.008279 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/450b08fd-f6a4-439d-80fe-b92708702750-libvirt-secret-0\") pod \"libvirt-edpm-multinodeset-edpm-compute-global-l4lg2\" (UID: \"450b08fd-f6a4-439d-80fe-b92708702750\") " pod="openstack-kuttl-tests/libvirt-edpm-multinodeset-edpm-compute-global-l4lg2" Jan 20 18:02:42 crc kubenswrapper[4558]: I0120 18:02:42.008376 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gkffc\" (UniqueName: \"kubernetes.io/projected/450b08fd-f6a4-439d-80fe-b92708702750-kube-api-access-gkffc\") pod \"libvirt-edpm-multinodeset-edpm-compute-global-l4lg2\" (UID: \"450b08fd-f6a4-439d-80fe-b92708702750\") " pod="openstack-kuttl-tests/libvirt-edpm-multinodeset-edpm-compute-global-l4lg2" Jan 20 18:02:42 crc kubenswrapper[4558]: I0120 18:02:42.008431 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/450b08fd-f6a4-439d-80fe-b92708702750-ssh-key-edpm-compute-global\") pod \"libvirt-edpm-multinodeset-edpm-compute-global-l4lg2\" (UID: \"450b08fd-f6a4-439d-80fe-b92708702750\") " pod="openstack-kuttl-tests/libvirt-edpm-multinodeset-edpm-compute-global-l4lg2" Jan 20 18:02:42 crc kubenswrapper[4558]: I0120 18:02:42.008459 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/450b08fd-f6a4-439d-80fe-b92708702750-inventory\") pod \"libvirt-edpm-multinodeset-edpm-compute-global-l4lg2\" (UID: \"450b08fd-f6a4-439d-80fe-b92708702750\") " pod="openstack-kuttl-tests/libvirt-edpm-multinodeset-edpm-compute-global-l4lg2" Jan 20 18:02:42 crc kubenswrapper[4558]: I0120 18:02:42.008508 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/450b08fd-f6a4-439d-80fe-b92708702750-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-multinodeset-edpm-compute-global-l4lg2\" (UID: \"450b08fd-f6a4-439d-80fe-b92708702750\") " pod="openstack-kuttl-tests/libvirt-edpm-multinodeset-edpm-compute-global-l4lg2" Jan 20 18:02:42 crc kubenswrapper[4558]: I0120 18:02:42.014582 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/450b08fd-f6a4-439d-80fe-b92708702750-inventory\") pod \"libvirt-edpm-multinodeset-edpm-compute-global-l4lg2\" (UID: \"450b08fd-f6a4-439d-80fe-b92708702750\") " pod="openstack-kuttl-tests/libvirt-edpm-multinodeset-edpm-compute-global-l4lg2" Jan 20 18:02:42 crc kubenswrapper[4558]: I0120 18:02:42.014586 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/450b08fd-f6a4-439d-80fe-b92708702750-libvirt-secret-0\") pod \"libvirt-edpm-multinodeset-edpm-compute-global-l4lg2\" (UID: \"450b08fd-f6a4-439d-80fe-b92708702750\") " pod="openstack-kuttl-tests/libvirt-edpm-multinodeset-edpm-compute-global-l4lg2" Jan 20 18:02:42 crc kubenswrapper[4558]: I0120 18:02:42.014719 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/450b08fd-f6a4-439d-80fe-b92708702750-ssh-key-edpm-compute-global\") pod \"libvirt-edpm-multinodeset-edpm-compute-global-l4lg2\" (UID: \"450b08fd-f6a4-439d-80fe-b92708702750\") " pod="openstack-kuttl-tests/libvirt-edpm-multinodeset-edpm-compute-global-l4lg2" Jan 20 18:02:42 crc kubenswrapper[4558]: I0120 18:02:42.015642 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/450b08fd-f6a4-439d-80fe-b92708702750-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-multinodeset-edpm-compute-global-l4lg2\" (UID: \"450b08fd-f6a4-439d-80fe-b92708702750\") " pod="openstack-kuttl-tests/libvirt-edpm-multinodeset-edpm-compute-global-l4lg2" Jan 20 18:02:42 crc kubenswrapper[4558]: I0120 18:02:42.023699 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gkffc\" (UniqueName: \"kubernetes.io/projected/450b08fd-f6a4-439d-80fe-b92708702750-kube-api-access-gkffc\") pod \"libvirt-edpm-multinodeset-edpm-compute-global-l4lg2\" (UID: \"450b08fd-f6a4-439d-80fe-b92708702750\") " pod="openstack-kuttl-tests/libvirt-edpm-multinodeset-edpm-compute-global-l4lg2" Jan 20 18:02:42 crc kubenswrapper[4558]: I0120 18:02:42.211622 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/libvirt-edpm-multinodeset-edpm-compute-global-l4lg2" Jan 20 18:02:42 crc kubenswrapper[4558]: I0120 18:02:42.595921 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/libvirt-edpm-multinodeset-edpm-compute-global-l4lg2"] Jan 20 18:02:42 crc kubenswrapper[4558]: W0120 18:02:42.599336 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod450b08fd_f6a4_439d_80fe_b92708702750.slice/crio-e10898e672583d7f57eb260bfd5b941bc73e71d5cdd6d542462f2de4327bb68e WatchSource:0}: Error finding container e10898e672583d7f57eb260bfd5b941bc73e71d5cdd6d542462f2de4327bb68e: Status 404 returned error can't find the container with id e10898e672583d7f57eb260bfd5b941bc73e71d5cdd6d542462f2de4327bb68e Jan 20 18:02:42 crc kubenswrapper[4558]: I0120 18:02:42.868394 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/libvirt-edpm-multinodeset-edpm-compute-global-l4lg2" event={"ID":"450b08fd-f6a4-439d-80fe-b92708702750","Type":"ContainerStarted","Data":"e10898e672583d7f57eb260bfd5b941bc73e71d5cdd6d542462f2de4327bb68e"} Jan 20 18:02:43 crc kubenswrapper[4558]: I0120 18:02:43.881611 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/libvirt-edpm-multinodeset-edpm-compute-global-l4lg2" event={"ID":"450b08fd-f6a4-439d-80fe-b92708702750","Type":"ContainerStarted","Data":"5cafd2c19011fe93c117ac03c2453a8fd0a8f11382645004596df4482604d641"} Jan 20 18:02:43 crc kubenswrapper[4558]: I0120 18:02:43.900249 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/libvirt-edpm-multinodeset-edpm-compute-global-l4lg2" podStartSLOduration=2.383329115 podStartE2EDuration="2.900224673s" podCreationTimestamp="2026-01-20 18:02:41 +0000 UTC" firstStartedPulling="2026-01-20 18:02:42.601981045 +0000 UTC m=+4856.362319013" lastFinishedPulling="2026-01-20 18:02:43.118876603 +0000 UTC m=+4856.879214571" observedRunningTime="2026-01-20 18:02:43.896553262 +0000 UTC m=+4857.656891229" watchObservedRunningTime="2026-01-20 18:02:43.900224673 +0000 UTC m=+4857.660562640" Jan 20 18:02:44 crc kubenswrapper[4558]: I0120 18:02:44.893245 4558 generic.go:334] "Generic (PLEG): container finished" podID="450b08fd-f6a4-439d-80fe-b92708702750" containerID="5cafd2c19011fe93c117ac03c2453a8fd0a8f11382645004596df4482604d641" exitCode=0 Jan 20 18:02:44 crc kubenswrapper[4558]: I0120 18:02:44.893297 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/libvirt-edpm-multinodeset-edpm-compute-global-l4lg2" event={"ID":"450b08fd-f6a4-439d-80fe-b92708702750","Type":"ContainerDied","Data":"5cafd2c19011fe93c117ac03c2453a8fd0a8f11382645004596df4482604d641"} Jan 20 18:02:46 crc kubenswrapper[4558]: I0120 18:02:46.250417 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/libvirt-edpm-multinodeset-edpm-compute-global-l4lg2" Jan 20 18:02:46 crc kubenswrapper[4558]: I0120 18:02:46.276931 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/450b08fd-f6a4-439d-80fe-b92708702750-ssh-key-edpm-compute-global\") pod \"450b08fd-f6a4-439d-80fe-b92708702750\" (UID: \"450b08fd-f6a4-439d-80fe-b92708702750\") " Jan 20 18:02:46 crc kubenswrapper[4558]: I0120 18:02:46.276985 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gkffc\" (UniqueName: \"kubernetes.io/projected/450b08fd-f6a4-439d-80fe-b92708702750-kube-api-access-gkffc\") pod \"450b08fd-f6a4-439d-80fe-b92708702750\" (UID: \"450b08fd-f6a4-439d-80fe-b92708702750\") " Jan 20 18:02:46 crc kubenswrapper[4558]: I0120 18:02:46.277007 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/450b08fd-f6a4-439d-80fe-b92708702750-libvirt-combined-ca-bundle\") pod \"450b08fd-f6a4-439d-80fe-b92708702750\" (UID: \"450b08fd-f6a4-439d-80fe-b92708702750\") " Jan 20 18:02:46 crc kubenswrapper[4558]: I0120 18:02:46.277045 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/450b08fd-f6a4-439d-80fe-b92708702750-libvirt-secret-0\") pod \"450b08fd-f6a4-439d-80fe-b92708702750\" (UID: \"450b08fd-f6a4-439d-80fe-b92708702750\") " Jan 20 18:02:46 crc kubenswrapper[4558]: I0120 18:02:46.277111 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/450b08fd-f6a4-439d-80fe-b92708702750-inventory\") pod \"450b08fd-f6a4-439d-80fe-b92708702750\" (UID: \"450b08fd-f6a4-439d-80fe-b92708702750\") " Jan 20 18:02:46 crc kubenswrapper[4558]: I0120 18:02:46.282574 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/450b08fd-f6a4-439d-80fe-b92708702750-kube-api-access-gkffc" (OuterVolumeSpecName: "kube-api-access-gkffc") pod "450b08fd-f6a4-439d-80fe-b92708702750" (UID: "450b08fd-f6a4-439d-80fe-b92708702750"). InnerVolumeSpecName "kube-api-access-gkffc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:02:46 crc kubenswrapper[4558]: I0120 18:02:46.284823 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/450b08fd-f6a4-439d-80fe-b92708702750-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "450b08fd-f6a4-439d-80fe-b92708702750" (UID: "450b08fd-f6a4-439d-80fe-b92708702750"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:02:46 crc kubenswrapper[4558]: I0120 18:02:46.295602 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/450b08fd-f6a4-439d-80fe-b92708702750-libvirt-secret-0" (OuterVolumeSpecName: "libvirt-secret-0") pod "450b08fd-f6a4-439d-80fe-b92708702750" (UID: "450b08fd-f6a4-439d-80fe-b92708702750"). InnerVolumeSpecName "libvirt-secret-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:02:46 crc kubenswrapper[4558]: I0120 18:02:46.297121 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/450b08fd-f6a4-439d-80fe-b92708702750-ssh-key-edpm-compute-global" (OuterVolumeSpecName: "ssh-key-edpm-compute-global") pod "450b08fd-f6a4-439d-80fe-b92708702750" (UID: "450b08fd-f6a4-439d-80fe-b92708702750"). InnerVolumeSpecName "ssh-key-edpm-compute-global". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:02:46 crc kubenswrapper[4558]: I0120 18:02:46.379149 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gkffc\" (UniqueName: \"kubernetes.io/projected/450b08fd-f6a4-439d-80fe-b92708702750-kube-api-access-gkffc\") on node \"crc\" DevicePath \"\"" Jan 20 18:02:46 crc kubenswrapper[4558]: I0120 18:02:46.379196 4558 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/450b08fd-f6a4-439d-80fe-b92708702750-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 18:02:46 crc kubenswrapper[4558]: I0120 18:02:46.379209 4558 reconciler_common.go:293] "Volume detached for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/450b08fd-f6a4-439d-80fe-b92708702750-libvirt-secret-0\") on node \"crc\" DevicePath \"\"" Jan 20 18:02:46 crc kubenswrapper[4558]: I0120 18:02:46.379220 4558 reconciler_common.go:293] "Volume detached for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/450b08fd-f6a4-439d-80fe-b92708702750-ssh-key-edpm-compute-global\") on node \"crc\" DevicePath \"\"" Jan 20 18:02:46 crc kubenswrapper[4558]: I0120 18:02:46.396940 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/450b08fd-f6a4-439d-80fe-b92708702750-inventory" (OuterVolumeSpecName: "inventory") pod "450b08fd-f6a4-439d-80fe-b92708702750" (UID: "450b08fd-f6a4-439d-80fe-b92708702750"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:02:46 crc kubenswrapper[4558]: I0120 18:02:46.481521 4558 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/450b08fd-f6a4-439d-80fe-b92708702750-inventory\") on node \"crc\" DevicePath \"\"" Jan 20 18:02:46 crc kubenswrapper[4558]: I0120 18:02:46.910481 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/libvirt-edpm-multinodeset-edpm-compute-global-l4lg2" event={"ID":"450b08fd-f6a4-439d-80fe-b92708702750","Type":"ContainerDied","Data":"e10898e672583d7f57eb260bfd5b941bc73e71d5cdd6d542462f2de4327bb68e"} Jan 20 18:02:46 crc kubenswrapper[4558]: I0120 18:02:46.910540 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e10898e672583d7f57eb260bfd5b941bc73e71d5cdd6d542462f2de4327bb68e" Jan 20 18:02:46 crc kubenswrapper[4558]: I0120 18:02:46.910538 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/libvirt-edpm-multinodeset-edpm-compute-global-l4lg2" Jan 20 18:02:46 crc kubenswrapper[4558]: I0120 18:02:46.972426 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-edpm-multinodeset-edpm-compute-global-clkvg"] Jan 20 18:02:46 crc kubenswrapper[4558]: E0120 18:02:46.972786 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="450b08fd-f6a4-439d-80fe-b92708702750" containerName="libvirt-edpm-multinodeset-edpm-compute-global" Jan 20 18:02:46 crc kubenswrapper[4558]: I0120 18:02:46.972810 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="450b08fd-f6a4-439d-80fe-b92708702750" containerName="libvirt-edpm-multinodeset-edpm-compute-global" Jan 20 18:02:46 crc kubenswrapper[4558]: I0120 18:02:46.972947 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="450b08fd-f6a4-439d-80fe-b92708702750" containerName="libvirt-edpm-multinodeset-edpm-compute-global" Jan 20 18:02:46 crc kubenswrapper[4558]: I0120 18:02:46.973447 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-edpm-multinodeset-edpm-compute-global-clkvg" Jan 20 18:02:46 crc kubenswrapper[4558]: I0120 18:02:46.975263 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"combined-ca-bundle" Jan 20 18:02:46 crc kubenswrapper[4558]: I0120 18:02:46.975661 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-migration-ssh-key" Jan 20 18:02:46 crc kubenswrapper[4558]: I0120 18:02:46.975807 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"edpm-compute-global-dockercfg-wkdxg" Jan 20 18:02:46 crc kubenswrapper[4558]: I0120 18:02:46.976037 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-aee-default-env" Jan 20 18:02:46 crc kubenswrapper[4558]: I0120 18:02:46.976136 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell1-compute-config" Jan 20 18:02:46 crc kubenswrapper[4558]: I0120 18:02:46.977339 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplanenodeset-edpm-compute-global" Jan 20 18:02:46 crc kubenswrapper[4558]: I0120 18:02:46.977347 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplane-ansible-ssh-private-key-secret" Jan 20 18:02:46 crc kubenswrapper[4558]: I0120 18:02:46.981411 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-edpm-multinodeset-edpm-compute-global-clkvg"] Jan 20 18:02:46 crc kubenswrapper[4558]: I0120 18:02:46.996699 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5a44994-7dfb-4450-8b25-66b78fea03a0-nova-combined-ca-bundle\") pod \"nova-edpm-multinodeset-edpm-compute-global-clkvg\" (UID: \"f5a44994-7dfb-4450-8b25-66b78fea03a0\") " pod="openstack-kuttl-tests/nova-edpm-multinodeset-edpm-compute-global-clkvg" Jan 20 18:02:46 crc kubenswrapper[4558]: I0120 18:02:46.996873 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/f5a44994-7dfb-4450-8b25-66b78fea03a0-nova-migration-ssh-key-0\") pod \"nova-edpm-multinodeset-edpm-compute-global-clkvg\" (UID: \"f5a44994-7dfb-4450-8b25-66b78fea03a0\") " pod="openstack-kuttl-tests/nova-edpm-multinodeset-edpm-compute-global-clkvg" Jan 20 18:02:46 crc kubenswrapper[4558]: I0120 18:02:46.997022 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tjs6s\" (UniqueName: \"kubernetes.io/projected/f5a44994-7dfb-4450-8b25-66b78fea03a0-kube-api-access-tjs6s\") pod \"nova-edpm-multinodeset-edpm-compute-global-clkvg\" (UID: \"f5a44994-7dfb-4450-8b25-66b78fea03a0\") " pod="openstack-kuttl-tests/nova-edpm-multinodeset-edpm-compute-global-clkvg" Jan 20 18:02:46 crc kubenswrapper[4558]: I0120 18:02:46.997119 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/f5a44994-7dfb-4450-8b25-66b78fea03a0-nova-cell1-compute-config-0\") pod \"nova-edpm-multinodeset-edpm-compute-global-clkvg\" (UID: \"f5a44994-7dfb-4450-8b25-66b78fea03a0\") " pod="openstack-kuttl-tests/nova-edpm-multinodeset-edpm-compute-global-clkvg" Jan 20 18:02:46 crc kubenswrapper[4558]: I0120 18:02:46.997241 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/f5a44994-7dfb-4450-8b25-66b78fea03a0-ssh-key-edpm-compute-global\") pod \"nova-edpm-multinodeset-edpm-compute-global-clkvg\" (UID: \"f5a44994-7dfb-4450-8b25-66b78fea03a0\") " pod="openstack-kuttl-tests/nova-edpm-multinodeset-edpm-compute-global-clkvg" Jan 20 18:02:46 crc kubenswrapper[4558]: I0120 18:02:46.997326 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f5a44994-7dfb-4450-8b25-66b78fea03a0-inventory\") pod \"nova-edpm-multinodeset-edpm-compute-global-clkvg\" (UID: \"f5a44994-7dfb-4450-8b25-66b78fea03a0\") " pod="openstack-kuttl-tests/nova-edpm-multinodeset-edpm-compute-global-clkvg" Jan 20 18:02:46 crc kubenswrapper[4558]: I0120 18:02:46.997411 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/f5a44994-7dfb-4450-8b25-66b78fea03a0-nova-cell1-compute-config-1\") pod \"nova-edpm-multinodeset-edpm-compute-global-clkvg\" (UID: \"f5a44994-7dfb-4450-8b25-66b78fea03a0\") " pod="openstack-kuttl-tests/nova-edpm-multinodeset-edpm-compute-global-clkvg" Jan 20 18:02:46 crc kubenswrapper[4558]: I0120 18:02:46.997485 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/f5a44994-7dfb-4450-8b25-66b78fea03a0-nova-migration-ssh-key-1\") pod \"nova-edpm-multinodeset-edpm-compute-global-clkvg\" (UID: \"f5a44994-7dfb-4450-8b25-66b78fea03a0\") " pod="openstack-kuttl-tests/nova-edpm-multinodeset-edpm-compute-global-clkvg" Jan 20 18:02:47 crc kubenswrapper[4558]: I0120 18:02:47.099188 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5a44994-7dfb-4450-8b25-66b78fea03a0-nova-combined-ca-bundle\") pod \"nova-edpm-multinodeset-edpm-compute-global-clkvg\" (UID: \"f5a44994-7dfb-4450-8b25-66b78fea03a0\") " pod="openstack-kuttl-tests/nova-edpm-multinodeset-edpm-compute-global-clkvg" Jan 20 18:02:47 crc kubenswrapper[4558]: I0120 18:02:47.099312 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/f5a44994-7dfb-4450-8b25-66b78fea03a0-nova-migration-ssh-key-0\") pod \"nova-edpm-multinodeset-edpm-compute-global-clkvg\" (UID: \"f5a44994-7dfb-4450-8b25-66b78fea03a0\") " pod="openstack-kuttl-tests/nova-edpm-multinodeset-edpm-compute-global-clkvg" Jan 20 18:02:47 crc kubenswrapper[4558]: I0120 18:02:47.099422 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tjs6s\" (UniqueName: \"kubernetes.io/projected/f5a44994-7dfb-4450-8b25-66b78fea03a0-kube-api-access-tjs6s\") pod \"nova-edpm-multinodeset-edpm-compute-global-clkvg\" (UID: \"f5a44994-7dfb-4450-8b25-66b78fea03a0\") " pod="openstack-kuttl-tests/nova-edpm-multinodeset-edpm-compute-global-clkvg" Jan 20 18:02:47 crc kubenswrapper[4558]: I0120 18:02:47.099794 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/f5a44994-7dfb-4450-8b25-66b78fea03a0-nova-cell1-compute-config-0\") pod \"nova-edpm-multinodeset-edpm-compute-global-clkvg\" (UID: \"f5a44994-7dfb-4450-8b25-66b78fea03a0\") " pod="openstack-kuttl-tests/nova-edpm-multinodeset-edpm-compute-global-clkvg" Jan 20 18:02:47 crc kubenswrapper[4558]: I0120 18:02:47.100306 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/f5a44994-7dfb-4450-8b25-66b78fea03a0-ssh-key-edpm-compute-global\") pod \"nova-edpm-multinodeset-edpm-compute-global-clkvg\" (UID: \"f5a44994-7dfb-4450-8b25-66b78fea03a0\") " pod="openstack-kuttl-tests/nova-edpm-multinodeset-edpm-compute-global-clkvg" Jan 20 18:02:47 crc kubenswrapper[4558]: I0120 18:02:47.100346 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f5a44994-7dfb-4450-8b25-66b78fea03a0-inventory\") pod \"nova-edpm-multinodeset-edpm-compute-global-clkvg\" (UID: \"f5a44994-7dfb-4450-8b25-66b78fea03a0\") " pod="openstack-kuttl-tests/nova-edpm-multinodeset-edpm-compute-global-clkvg" Jan 20 18:02:47 crc kubenswrapper[4558]: I0120 18:02:47.100386 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/f5a44994-7dfb-4450-8b25-66b78fea03a0-nova-migration-ssh-key-1\") pod \"nova-edpm-multinodeset-edpm-compute-global-clkvg\" (UID: \"f5a44994-7dfb-4450-8b25-66b78fea03a0\") " pod="openstack-kuttl-tests/nova-edpm-multinodeset-edpm-compute-global-clkvg" Jan 20 18:02:47 crc kubenswrapper[4558]: I0120 18:02:47.100414 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/f5a44994-7dfb-4450-8b25-66b78fea03a0-nova-cell1-compute-config-1\") pod \"nova-edpm-multinodeset-edpm-compute-global-clkvg\" (UID: \"f5a44994-7dfb-4450-8b25-66b78fea03a0\") " pod="openstack-kuttl-tests/nova-edpm-multinodeset-edpm-compute-global-clkvg" Jan 20 18:02:47 crc kubenswrapper[4558]: I0120 18:02:47.116892 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5a44994-7dfb-4450-8b25-66b78fea03a0-nova-combined-ca-bundle\") pod \"nova-edpm-multinodeset-edpm-compute-global-clkvg\" (UID: \"f5a44994-7dfb-4450-8b25-66b78fea03a0\") " pod="openstack-kuttl-tests/nova-edpm-multinodeset-edpm-compute-global-clkvg" Jan 20 18:02:47 crc kubenswrapper[4558]: I0120 18:02:47.117256 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/f5a44994-7dfb-4450-8b25-66b78fea03a0-nova-migration-ssh-key-0\") pod \"nova-edpm-multinodeset-edpm-compute-global-clkvg\" (UID: \"f5a44994-7dfb-4450-8b25-66b78fea03a0\") " pod="openstack-kuttl-tests/nova-edpm-multinodeset-edpm-compute-global-clkvg" Jan 20 18:02:47 crc kubenswrapper[4558]: I0120 18:02:47.123030 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tjs6s\" (UniqueName: \"kubernetes.io/projected/f5a44994-7dfb-4450-8b25-66b78fea03a0-kube-api-access-tjs6s\") pod \"nova-edpm-multinodeset-edpm-compute-global-clkvg\" (UID: \"f5a44994-7dfb-4450-8b25-66b78fea03a0\") " pod="openstack-kuttl-tests/nova-edpm-multinodeset-edpm-compute-global-clkvg" Jan 20 18:02:47 crc kubenswrapper[4558]: I0120 18:02:47.123419 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f5a44994-7dfb-4450-8b25-66b78fea03a0-inventory\") pod \"nova-edpm-multinodeset-edpm-compute-global-clkvg\" (UID: \"f5a44994-7dfb-4450-8b25-66b78fea03a0\") " pod="openstack-kuttl-tests/nova-edpm-multinodeset-edpm-compute-global-clkvg" Jan 20 18:02:47 crc kubenswrapper[4558]: I0120 18:02:47.123623 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/f5a44994-7dfb-4450-8b25-66b78fea03a0-nova-cell1-compute-config-1\") pod \"nova-edpm-multinodeset-edpm-compute-global-clkvg\" (UID: \"f5a44994-7dfb-4450-8b25-66b78fea03a0\") " pod="openstack-kuttl-tests/nova-edpm-multinodeset-edpm-compute-global-clkvg" Jan 20 18:02:47 crc kubenswrapper[4558]: I0120 18:02:47.127628 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/f5a44994-7dfb-4450-8b25-66b78fea03a0-nova-cell1-compute-config-0\") pod \"nova-edpm-multinodeset-edpm-compute-global-clkvg\" (UID: \"f5a44994-7dfb-4450-8b25-66b78fea03a0\") " pod="openstack-kuttl-tests/nova-edpm-multinodeset-edpm-compute-global-clkvg" Jan 20 18:02:47 crc kubenswrapper[4558]: I0120 18:02:47.128123 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/f5a44994-7dfb-4450-8b25-66b78fea03a0-ssh-key-edpm-compute-global\") pod \"nova-edpm-multinodeset-edpm-compute-global-clkvg\" (UID: \"f5a44994-7dfb-4450-8b25-66b78fea03a0\") " pod="openstack-kuttl-tests/nova-edpm-multinodeset-edpm-compute-global-clkvg" Jan 20 18:02:47 crc kubenswrapper[4558]: I0120 18:02:47.128313 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/f5a44994-7dfb-4450-8b25-66b78fea03a0-nova-migration-ssh-key-1\") pod \"nova-edpm-multinodeset-edpm-compute-global-clkvg\" (UID: \"f5a44994-7dfb-4450-8b25-66b78fea03a0\") " pod="openstack-kuttl-tests/nova-edpm-multinodeset-edpm-compute-global-clkvg" Jan 20 18:02:47 crc kubenswrapper[4558]: I0120 18:02:47.298899 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-edpm-multinodeset-edpm-compute-global-clkvg" Jan 20 18:02:47 crc kubenswrapper[4558]: I0120 18:02:47.679705 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-edpm-multinodeset-edpm-compute-global-clkvg"] Jan 20 18:02:48 crc kubenswrapper[4558]: W0120 18:02:48.097654 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf5a44994_7dfb_4450_8b25_66b78fea03a0.slice/crio-0f8710eb16e88cd5f4bdc0b183163133c3a30b302b937e24a65e037a71d0f5de WatchSource:0}: Error finding container 0f8710eb16e88cd5f4bdc0b183163133c3a30b302b937e24a65e037a71d0f5de: Status 404 returned error can't find the container with id 0f8710eb16e88cd5f4bdc0b183163133c3a30b302b937e24a65e037a71d0f5de Jan 20 18:02:48 crc kubenswrapper[4558]: I0120 18:02:48.941368 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-edpm-multinodeset-edpm-compute-global-clkvg" event={"ID":"f5a44994-7dfb-4450-8b25-66b78fea03a0","Type":"ContainerStarted","Data":"689c9cb422cdae5161aa35080cc015b394f770bc490f68a53ed82912791d7313"} Jan 20 18:02:48 crc kubenswrapper[4558]: I0120 18:02:48.942804 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-edpm-multinodeset-edpm-compute-global-clkvg" event={"ID":"f5a44994-7dfb-4450-8b25-66b78fea03a0","Type":"ContainerStarted","Data":"0f8710eb16e88cd5f4bdc0b183163133c3a30b302b937e24a65e037a71d0f5de"} Jan 20 18:02:48 crc kubenswrapper[4558]: I0120 18:02:48.958669 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-edpm-multinodeset-edpm-compute-global-clkvg" podStartSLOduration=2.446469123 podStartE2EDuration="2.958647334s" podCreationTimestamp="2026-01-20 18:02:46 +0000 UTC" firstStartedPulling="2026-01-20 18:02:48.100133753 +0000 UTC m=+4861.860471720" lastFinishedPulling="2026-01-20 18:02:48.612311965 +0000 UTC m=+4862.372649931" observedRunningTime="2026-01-20 18:02:48.957481302 +0000 UTC m=+4862.717819289" watchObservedRunningTime="2026-01-20 18:02:48.958647334 +0000 UTC m=+4862.718985292" Jan 20 18:02:49 crc kubenswrapper[4558]: I0120 18:02:49.953622 4558 generic.go:334] "Generic (PLEG): container finished" podID="f5a44994-7dfb-4450-8b25-66b78fea03a0" containerID="689c9cb422cdae5161aa35080cc015b394f770bc490f68a53ed82912791d7313" exitCode=0 Jan 20 18:02:49 crc kubenswrapper[4558]: I0120 18:02:49.953784 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-edpm-multinodeset-edpm-compute-global-clkvg" event={"ID":"f5a44994-7dfb-4450-8b25-66b78fea03a0","Type":"ContainerDied","Data":"689c9cb422cdae5161aa35080cc015b394f770bc490f68a53ed82912791d7313"} Jan 20 18:02:51 crc kubenswrapper[4558]: I0120 18:02:51.187001 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-edpm-multinodeset-edpm-compute-global-clkvg" Jan 20 18:02:51 crc kubenswrapper[4558]: I0120 18:02:51.264927 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5a44994-7dfb-4450-8b25-66b78fea03a0-nova-combined-ca-bundle\") pod \"f5a44994-7dfb-4450-8b25-66b78fea03a0\" (UID: \"f5a44994-7dfb-4450-8b25-66b78fea03a0\") " Jan 20 18:02:51 crc kubenswrapper[4558]: I0120 18:02:51.265040 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/f5a44994-7dfb-4450-8b25-66b78fea03a0-nova-cell1-compute-config-0\") pod \"f5a44994-7dfb-4450-8b25-66b78fea03a0\" (UID: \"f5a44994-7dfb-4450-8b25-66b78fea03a0\") " Jan 20 18:02:51 crc kubenswrapper[4558]: I0120 18:02:51.265078 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/f5a44994-7dfb-4450-8b25-66b78fea03a0-nova-migration-ssh-key-1\") pod \"f5a44994-7dfb-4450-8b25-66b78fea03a0\" (UID: \"f5a44994-7dfb-4450-8b25-66b78fea03a0\") " Jan 20 18:02:51 crc kubenswrapper[4558]: I0120 18:02:51.265101 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tjs6s\" (UniqueName: \"kubernetes.io/projected/f5a44994-7dfb-4450-8b25-66b78fea03a0-kube-api-access-tjs6s\") pod \"f5a44994-7dfb-4450-8b25-66b78fea03a0\" (UID: \"f5a44994-7dfb-4450-8b25-66b78fea03a0\") " Jan 20 18:02:51 crc kubenswrapper[4558]: I0120 18:02:51.265137 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/f5a44994-7dfb-4450-8b25-66b78fea03a0-nova-cell1-compute-config-1\") pod \"f5a44994-7dfb-4450-8b25-66b78fea03a0\" (UID: \"f5a44994-7dfb-4450-8b25-66b78fea03a0\") " Jan 20 18:02:51 crc kubenswrapper[4558]: I0120 18:02:51.265222 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/f5a44994-7dfb-4450-8b25-66b78fea03a0-ssh-key-edpm-compute-global\") pod \"f5a44994-7dfb-4450-8b25-66b78fea03a0\" (UID: \"f5a44994-7dfb-4450-8b25-66b78fea03a0\") " Jan 20 18:02:51 crc kubenswrapper[4558]: I0120 18:02:51.265251 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/f5a44994-7dfb-4450-8b25-66b78fea03a0-nova-migration-ssh-key-0\") pod \"f5a44994-7dfb-4450-8b25-66b78fea03a0\" (UID: \"f5a44994-7dfb-4450-8b25-66b78fea03a0\") " Jan 20 18:02:51 crc kubenswrapper[4558]: I0120 18:02:51.265284 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f5a44994-7dfb-4450-8b25-66b78fea03a0-inventory\") pod \"f5a44994-7dfb-4450-8b25-66b78fea03a0\" (UID: \"f5a44994-7dfb-4450-8b25-66b78fea03a0\") " Jan 20 18:02:51 crc kubenswrapper[4558]: I0120 18:02:51.270623 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f5a44994-7dfb-4450-8b25-66b78fea03a0-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "f5a44994-7dfb-4450-8b25-66b78fea03a0" (UID: "f5a44994-7dfb-4450-8b25-66b78fea03a0"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:02:51 crc kubenswrapper[4558]: I0120 18:02:51.271340 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f5a44994-7dfb-4450-8b25-66b78fea03a0-kube-api-access-tjs6s" (OuterVolumeSpecName: "kube-api-access-tjs6s") pod "f5a44994-7dfb-4450-8b25-66b78fea03a0" (UID: "f5a44994-7dfb-4450-8b25-66b78fea03a0"). InnerVolumeSpecName "kube-api-access-tjs6s". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:02:51 crc kubenswrapper[4558]: I0120 18:02:51.284807 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f5a44994-7dfb-4450-8b25-66b78fea03a0-inventory" (OuterVolumeSpecName: "inventory") pod "f5a44994-7dfb-4450-8b25-66b78fea03a0" (UID: "f5a44994-7dfb-4450-8b25-66b78fea03a0"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:02:51 crc kubenswrapper[4558]: I0120 18:02:51.284947 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f5a44994-7dfb-4450-8b25-66b78fea03a0-nova-migration-ssh-key-0" (OuterVolumeSpecName: "nova-migration-ssh-key-0") pod "f5a44994-7dfb-4450-8b25-66b78fea03a0" (UID: "f5a44994-7dfb-4450-8b25-66b78fea03a0"). InnerVolumeSpecName "nova-migration-ssh-key-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:02:51 crc kubenswrapper[4558]: I0120 18:02:51.285578 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f5a44994-7dfb-4450-8b25-66b78fea03a0-nova-cell1-compute-config-0" (OuterVolumeSpecName: "nova-cell1-compute-config-0") pod "f5a44994-7dfb-4450-8b25-66b78fea03a0" (UID: "f5a44994-7dfb-4450-8b25-66b78fea03a0"). InnerVolumeSpecName "nova-cell1-compute-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:02:51 crc kubenswrapper[4558]: I0120 18:02:51.286123 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f5a44994-7dfb-4450-8b25-66b78fea03a0-ssh-key-edpm-compute-global" (OuterVolumeSpecName: "ssh-key-edpm-compute-global") pod "f5a44994-7dfb-4450-8b25-66b78fea03a0" (UID: "f5a44994-7dfb-4450-8b25-66b78fea03a0"). InnerVolumeSpecName "ssh-key-edpm-compute-global". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:02:51 crc kubenswrapper[4558]: I0120 18:02:51.288485 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f5a44994-7dfb-4450-8b25-66b78fea03a0-nova-migration-ssh-key-1" (OuterVolumeSpecName: "nova-migration-ssh-key-1") pod "f5a44994-7dfb-4450-8b25-66b78fea03a0" (UID: "f5a44994-7dfb-4450-8b25-66b78fea03a0"). InnerVolumeSpecName "nova-migration-ssh-key-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:02:51 crc kubenswrapper[4558]: I0120 18:02:51.289321 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f5a44994-7dfb-4450-8b25-66b78fea03a0-nova-cell1-compute-config-1" (OuterVolumeSpecName: "nova-cell1-compute-config-1") pod "f5a44994-7dfb-4450-8b25-66b78fea03a0" (UID: "f5a44994-7dfb-4450-8b25-66b78fea03a0"). InnerVolumeSpecName "nova-cell1-compute-config-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:02:51 crc kubenswrapper[4558]: I0120 18:02:51.367549 4558 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/f5a44994-7dfb-4450-8b25-66b78fea03a0-nova-cell1-compute-config-0\") on node \"crc\" DevicePath \"\"" Jan 20 18:02:51 crc kubenswrapper[4558]: I0120 18:02:51.367589 4558 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/f5a44994-7dfb-4450-8b25-66b78fea03a0-nova-migration-ssh-key-1\") on node \"crc\" DevicePath \"\"" Jan 20 18:02:51 crc kubenswrapper[4558]: I0120 18:02:51.367601 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tjs6s\" (UniqueName: \"kubernetes.io/projected/f5a44994-7dfb-4450-8b25-66b78fea03a0-kube-api-access-tjs6s\") on node \"crc\" DevicePath \"\"" Jan 20 18:02:51 crc kubenswrapper[4558]: I0120 18:02:51.367612 4558 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/f5a44994-7dfb-4450-8b25-66b78fea03a0-nova-cell1-compute-config-1\") on node \"crc\" DevicePath \"\"" Jan 20 18:02:51 crc kubenswrapper[4558]: I0120 18:02:51.367623 4558 reconciler_common.go:293] "Volume detached for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/f5a44994-7dfb-4450-8b25-66b78fea03a0-ssh-key-edpm-compute-global\") on node \"crc\" DevicePath \"\"" Jan 20 18:02:51 crc kubenswrapper[4558]: I0120 18:02:51.367632 4558 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/f5a44994-7dfb-4450-8b25-66b78fea03a0-nova-migration-ssh-key-0\") on node \"crc\" DevicePath \"\"" Jan 20 18:02:51 crc kubenswrapper[4558]: I0120 18:02:51.367647 4558 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f5a44994-7dfb-4450-8b25-66b78fea03a0-inventory\") on node \"crc\" DevicePath \"\"" Jan 20 18:02:51 crc kubenswrapper[4558]: I0120 18:02:51.367657 4558 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5a44994-7dfb-4450-8b25-66b78fea03a0-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 18:02:51 crc kubenswrapper[4558]: I0120 18:02:51.971927 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-edpm-multinodeset-edpm-compute-global-clkvg" event={"ID":"f5a44994-7dfb-4450-8b25-66b78fea03a0","Type":"ContainerDied","Data":"0f8710eb16e88cd5f4bdc0b183163133c3a30b302b937e24a65e037a71d0f5de"} Jan 20 18:02:51 crc kubenswrapper[4558]: I0120 18:02:51.972332 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0f8710eb16e88cd5f4bdc0b183163133c3a30b302b937e24a65e037a71d0f5de" Jan 20 18:02:51 crc kubenswrapper[4558]: I0120 18:02:51.972005 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-edpm-multinodeset-edpm-compute-global-clkvg" Jan 20 18:02:52 crc kubenswrapper[4558]: I0120 18:02:52.030520 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/custom-global-service-edpm-multinodeset-mrr8q"] Jan 20 18:02:52 crc kubenswrapper[4558]: E0120 18:02:52.030955 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f5a44994-7dfb-4450-8b25-66b78fea03a0" containerName="nova-edpm-multinodeset-edpm-compute-global" Jan 20 18:02:52 crc kubenswrapper[4558]: I0120 18:02:52.030976 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="f5a44994-7dfb-4450-8b25-66b78fea03a0" containerName="nova-edpm-multinodeset-edpm-compute-global" Jan 20 18:02:52 crc kubenswrapper[4558]: I0120 18:02:52.031143 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="f5a44994-7dfb-4450-8b25-66b78fea03a0" containerName="nova-edpm-multinodeset-edpm-compute-global" Jan 20 18:02:52 crc kubenswrapper[4558]: I0120 18:02:52.031711 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/custom-global-service-edpm-multinodeset-mrr8q" Jan 20 18:02:52 crc kubenswrapper[4558]: I0120 18:02:52.033698 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplanenodeset-edpm-compute-global" Jan 20 18:02:52 crc kubenswrapper[4558]: I0120 18:02:52.033789 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplanenodeset-edpm-compute-beta-nodeset" Jan 20 18:02:52 crc kubenswrapper[4558]: I0120 18:02:52.034062 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplane-ansible-ssh-private-key-secret" Jan 20 18:02:52 crc kubenswrapper[4558]: I0120 18:02:52.034184 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"edpm-compute-global-dockercfg-wkdxg" Jan 20 18:02:52 crc kubenswrapper[4558]: I0120 18:02:52.035120 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"combined-ca-bundle" Jan 20 18:02:52 crc kubenswrapper[4558]: I0120 18:02:52.036824 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-aee-default-env" Jan 20 18:02:52 crc kubenswrapper[4558]: I0120 18:02:52.040512 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/custom-global-service-edpm-multinodeset-mrr8q"] Jan 20 18:02:52 crc kubenswrapper[4558]: I0120 18:02:52.077921 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory-1\" (UniqueName: \"kubernetes.io/secret/82416928-2463-4fa9-8a7e-37319a1ca481-inventory-1\") pod \"custom-global-service-edpm-multinodeset-mrr8q\" (UID: \"82416928-2463-4fa9-8a7e-37319a1ca481\") " pod="openstack-kuttl-tests/custom-global-service-edpm-multinodeset-mrr8q" Jan 20 18:02:52 crc kubenswrapper[4558]: I0120 18:02:52.077986 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/82416928-2463-4fa9-8a7e-37319a1ca481-inventory-0\") pod \"custom-global-service-edpm-multinodeset-mrr8q\" (UID: \"82416928-2463-4fa9-8a7e-37319a1ca481\") " pod="openstack-kuttl-tests/custom-global-service-edpm-multinodeset-mrr8q" Jan 20 18:02:52 crc kubenswrapper[4558]: I0120 18:02:52.078226 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7vk5n\" (UniqueName: \"kubernetes.io/projected/82416928-2463-4fa9-8a7e-37319a1ca481-kube-api-access-7vk5n\") pod \"custom-global-service-edpm-multinodeset-mrr8q\" (UID: \"82416928-2463-4fa9-8a7e-37319a1ca481\") " pod="openstack-kuttl-tests/custom-global-service-edpm-multinodeset-mrr8q" Jan 20 18:02:52 crc kubenswrapper[4558]: I0120 18:02:52.078354 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"custom-global-service-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/82416928-2463-4fa9-8a7e-37319a1ca481-custom-global-service-combined-ca-bundle\") pod \"custom-global-service-edpm-multinodeset-mrr8q\" (UID: \"82416928-2463-4fa9-8a7e-37319a1ca481\") " pod="openstack-kuttl-tests/custom-global-service-edpm-multinodeset-mrr8q" Jan 20 18:02:52 crc kubenswrapper[4558]: I0120 18:02:52.078406 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-edpm-compute-beta-nodeset\" (UniqueName: \"kubernetes.io/secret/82416928-2463-4fa9-8a7e-37319a1ca481-ssh-key-edpm-compute-beta-nodeset\") pod \"custom-global-service-edpm-multinodeset-mrr8q\" (UID: \"82416928-2463-4fa9-8a7e-37319a1ca481\") " pod="openstack-kuttl-tests/custom-global-service-edpm-multinodeset-mrr8q" Jan 20 18:02:52 crc kubenswrapper[4558]: I0120 18:02:52.078496 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/82416928-2463-4fa9-8a7e-37319a1ca481-ssh-key-edpm-compute-global\") pod \"custom-global-service-edpm-multinodeset-mrr8q\" (UID: \"82416928-2463-4fa9-8a7e-37319a1ca481\") " pod="openstack-kuttl-tests/custom-global-service-edpm-multinodeset-mrr8q" Jan 20 18:02:52 crc kubenswrapper[4558]: I0120 18:02:52.180135 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7vk5n\" (UniqueName: \"kubernetes.io/projected/82416928-2463-4fa9-8a7e-37319a1ca481-kube-api-access-7vk5n\") pod \"custom-global-service-edpm-multinodeset-mrr8q\" (UID: \"82416928-2463-4fa9-8a7e-37319a1ca481\") " pod="openstack-kuttl-tests/custom-global-service-edpm-multinodeset-mrr8q" Jan 20 18:02:52 crc kubenswrapper[4558]: I0120 18:02:52.180301 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"custom-global-service-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/82416928-2463-4fa9-8a7e-37319a1ca481-custom-global-service-combined-ca-bundle\") pod \"custom-global-service-edpm-multinodeset-mrr8q\" (UID: \"82416928-2463-4fa9-8a7e-37319a1ca481\") " pod="openstack-kuttl-tests/custom-global-service-edpm-multinodeset-mrr8q" Jan 20 18:02:52 crc kubenswrapper[4558]: I0120 18:02:52.180391 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-edpm-compute-beta-nodeset\" (UniqueName: \"kubernetes.io/secret/82416928-2463-4fa9-8a7e-37319a1ca481-ssh-key-edpm-compute-beta-nodeset\") pod \"custom-global-service-edpm-multinodeset-mrr8q\" (UID: \"82416928-2463-4fa9-8a7e-37319a1ca481\") " pod="openstack-kuttl-tests/custom-global-service-edpm-multinodeset-mrr8q" Jan 20 18:02:52 crc kubenswrapper[4558]: I0120 18:02:52.180509 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/82416928-2463-4fa9-8a7e-37319a1ca481-ssh-key-edpm-compute-global\") pod \"custom-global-service-edpm-multinodeset-mrr8q\" (UID: \"82416928-2463-4fa9-8a7e-37319a1ca481\") " pod="openstack-kuttl-tests/custom-global-service-edpm-multinodeset-mrr8q" Jan 20 18:02:52 crc kubenswrapper[4558]: I0120 18:02:52.180591 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory-1\" (UniqueName: \"kubernetes.io/secret/82416928-2463-4fa9-8a7e-37319a1ca481-inventory-1\") pod \"custom-global-service-edpm-multinodeset-mrr8q\" (UID: \"82416928-2463-4fa9-8a7e-37319a1ca481\") " pod="openstack-kuttl-tests/custom-global-service-edpm-multinodeset-mrr8q" Jan 20 18:02:52 crc kubenswrapper[4558]: I0120 18:02:52.180704 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/82416928-2463-4fa9-8a7e-37319a1ca481-inventory-0\") pod \"custom-global-service-edpm-multinodeset-mrr8q\" (UID: \"82416928-2463-4fa9-8a7e-37319a1ca481\") " pod="openstack-kuttl-tests/custom-global-service-edpm-multinodeset-mrr8q" Jan 20 18:02:52 crc kubenswrapper[4558]: I0120 18:02:52.185037 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory-1\" (UniqueName: \"kubernetes.io/secret/82416928-2463-4fa9-8a7e-37319a1ca481-inventory-1\") pod \"custom-global-service-edpm-multinodeset-mrr8q\" (UID: \"82416928-2463-4fa9-8a7e-37319a1ca481\") " pod="openstack-kuttl-tests/custom-global-service-edpm-multinodeset-mrr8q" Jan 20 18:02:52 crc kubenswrapper[4558]: I0120 18:02:52.185066 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/82416928-2463-4fa9-8a7e-37319a1ca481-inventory-0\") pod \"custom-global-service-edpm-multinodeset-mrr8q\" (UID: \"82416928-2463-4fa9-8a7e-37319a1ca481\") " pod="openstack-kuttl-tests/custom-global-service-edpm-multinodeset-mrr8q" Jan 20 18:02:52 crc kubenswrapper[4558]: I0120 18:02:52.185066 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-edpm-compute-beta-nodeset\" (UniqueName: \"kubernetes.io/secret/82416928-2463-4fa9-8a7e-37319a1ca481-ssh-key-edpm-compute-beta-nodeset\") pod \"custom-global-service-edpm-multinodeset-mrr8q\" (UID: \"82416928-2463-4fa9-8a7e-37319a1ca481\") " pod="openstack-kuttl-tests/custom-global-service-edpm-multinodeset-mrr8q" Jan 20 18:02:52 crc kubenswrapper[4558]: I0120 18:02:52.185467 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"custom-global-service-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/82416928-2463-4fa9-8a7e-37319a1ca481-custom-global-service-combined-ca-bundle\") pod \"custom-global-service-edpm-multinodeset-mrr8q\" (UID: \"82416928-2463-4fa9-8a7e-37319a1ca481\") " pod="openstack-kuttl-tests/custom-global-service-edpm-multinodeset-mrr8q" Jan 20 18:02:52 crc kubenswrapper[4558]: I0120 18:02:52.186194 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/82416928-2463-4fa9-8a7e-37319a1ca481-ssh-key-edpm-compute-global\") pod \"custom-global-service-edpm-multinodeset-mrr8q\" (UID: \"82416928-2463-4fa9-8a7e-37319a1ca481\") " pod="openstack-kuttl-tests/custom-global-service-edpm-multinodeset-mrr8q" Jan 20 18:02:52 crc kubenswrapper[4558]: I0120 18:02:52.195041 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7vk5n\" (UniqueName: \"kubernetes.io/projected/82416928-2463-4fa9-8a7e-37319a1ca481-kube-api-access-7vk5n\") pod \"custom-global-service-edpm-multinodeset-mrr8q\" (UID: \"82416928-2463-4fa9-8a7e-37319a1ca481\") " pod="openstack-kuttl-tests/custom-global-service-edpm-multinodeset-mrr8q" Jan 20 18:02:52 crc kubenswrapper[4558]: I0120 18:02:52.346343 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/custom-global-service-edpm-multinodeset-mrr8q" Jan 20 18:02:52 crc kubenswrapper[4558]: I0120 18:02:52.819398 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/custom-global-service-edpm-multinodeset-mrr8q"] Jan 20 18:02:52 crc kubenswrapper[4558]: W0120 18:02:52.820027 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod82416928_2463_4fa9_8a7e_37319a1ca481.slice/crio-e1da469b67bbe22d97c5de2b9211828d95cc2b0c016f6d3fbc21c7f63fe462e3 WatchSource:0}: Error finding container e1da469b67bbe22d97c5de2b9211828d95cc2b0c016f6d3fbc21c7f63fe462e3: Status 404 returned error can't find the container with id e1da469b67bbe22d97c5de2b9211828d95cc2b0c016f6d3fbc21c7f63fe462e3 Jan 20 18:02:52 crc kubenswrapper[4558]: I0120 18:02:52.984709 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/custom-global-service-edpm-multinodeset-mrr8q" event={"ID":"82416928-2463-4fa9-8a7e-37319a1ca481","Type":"ContainerStarted","Data":"e1da469b67bbe22d97c5de2b9211828d95cc2b0c016f6d3fbc21c7f63fe462e3"} Jan 20 18:02:53 crc kubenswrapper[4558]: I0120 18:02:53.995589 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/custom-global-service-edpm-multinodeset-mrr8q" event={"ID":"82416928-2463-4fa9-8a7e-37319a1ca481","Type":"ContainerStarted","Data":"ab4aac561b9cf5e22720dd49c2034c427640d4636cf082503d78cb49e91bf3ab"} Jan 20 18:02:56 crc kubenswrapper[4558]: I0120 18:02:56.014448 4558 generic.go:334] "Generic (PLEG): container finished" podID="82416928-2463-4fa9-8a7e-37319a1ca481" containerID="ab4aac561b9cf5e22720dd49c2034c427640d4636cf082503d78cb49e91bf3ab" exitCode=0 Jan 20 18:02:56 crc kubenswrapper[4558]: I0120 18:02:56.014539 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/custom-global-service-edpm-multinodeset-mrr8q" event={"ID":"82416928-2463-4fa9-8a7e-37319a1ca481","Type":"ContainerDied","Data":"ab4aac561b9cf5e22720dd49c2034c427640d4636cf082503d78cb49e91bf3ab"} Jan 20 18:02:57 crc kubenswrapper[4558]: I0120 18:02:57.259458 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/custom-global-service-edpm-multinodeset-mrr8q" Jan 20 18:02:57 crc kubenswrapper[4558]: I0120 18:02:57.329956 4558 patch_prober.go:28] interesting pod/machine-config-daemon-2vr4r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 20 18:02:57 crc kubenswrapper[4558]: I0120 18:02:57.330043 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 20 18:02:57 crc kubenswrapper[4558]: I0120 18:02:57.357779 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/82416928-2463-4fa9-8a7e-37319a1ca481-inventory-0\") pod \"82416928-2463-4fa9-8a7e-37319a1ca481\" (UID: \"82416928-2463-4fa9-8a7e-37319a1ca481\") " Jan 20 18:02:57 crc kubenswrapper[4558]: I0120 18:02:57.357831 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"custom-global-service-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/82416928-2463-4fa9-8a7e-37319a1ca481-custom-global-service-combined-ca-bundle\") pod \"82416928-2463-4fa9-8a7e-37319a1ca481\" (UID: \"82416928-2463-4fa9-8a7e-37319a1ca481\") " Jan 20 18:02:57 crc kubenswrapper[4558]: I0120 18:02:57.357857 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/82416928-2463-4fa9-8a7e-37319a1ca481-ssh-key-edpm-compute-global\") pod \"82416928-2463-4fa9-8a7e-37319a1ca481\" (UID: \"82416928-2463-4fa9-8a7e-37319a1ca481\") " Jan 20 18:02:57 crc kubenswrapper[4558]: I0120 18:02:57.357893 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-edpm-compute-beta-nodeset\" (UniqueName: \"kubernetes.io/secret/82416928-2463-4fa9-8a7e-37319a1ca481-ssh-key-edpm-compute-beta-nodeset\") pod \"82416928-2463-4fa9-8a7e-37319a1ca481\" (UID: \"82416928-2463-4fa9-8a7e-37319a1ca481\") " Jan 20 18:02:57 crc kubenswrapper[4558]: I0120 18:02:57.357922 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory-1\" (UniqueName: \"kubernetes.io/secret/82416928-2463-4fa9-8a7e-37319a1ca481-inventory-1\") pod \"82416928-2463-4fa9-8a7e-37319a1ca481\" (UID: \"82416928-2463-4fa9-8a7e-37319a1ca481\") " Jan 20 18:02:57 crc kubenswrapper[4558]: I0120 18:02:57.357953 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7vk5n\" (UniqueName: \"kubernetes.io/projected/82416928-2463-4fa9-8a7e-37319a1ca481-kube-api-access-7vk5n\") pod \"82416928-2463-4fa9-8a7e-37319a1ca481\" (UID: \"82416928-2463-4fa9-8a7e-37319a1ca481\") " Jan 20 18:02:57 crc kubenswrapper[4558]: I0120 18:02:57.363770 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/82416928-2463-4fa9-8a7e-37319a1ca481-kube-api-access-7vk5n" (OuterVolumeSpecName: "kube-api-access-7vk5n") pod "82416928-2463-4fa9-8a7e-37319a1ca481" (UID: "82416928-2463-4fa9-8a7e-37319a1ca481"). InnerVolumeSpecName "kube-api-access-7vk5n". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:02:57 crc kubenswrapper[4558]: I0120 18:02:57.363821 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/82416928-2463-4fa9-8a7e-37319a1ca481-custom-global-service-combined-ca-bundle" (OuterVolumeSpecName: "custom-global-service-combined-ca-bundle") pod "82416928-2463-4fa9-8a7e-37319a1ca481" (UID: "82416928-2463-4fa9-8a7e-37319a1ca481"). InnerVolumeSpecName "custom-global-service-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:02:57 crc kubenswrapper[4558]: I0120 18:02:57.378367 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/82416928-2463-4fa9-8a7e-37319a1ca481-inventory-0" (OuterVolumeSpecName: "inventory-0") pod "82416928-2463-4fa9-8a7e-37319a1ca481" (UID: "82416928-2463-4fa9-8a7e-37319a1ca481"). InnerVolumeSpecName "inventory-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:02:57 crc kubenswrapper[4558]: I0120 18:02:57.380109 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/82416928-2463-4fa9-8a7e-37319a1ca481-ssh-key-edpm-compute-global" (OuterVolumeSpecName: "ssh-key-edpm-compute-global") pod "82416928-2463-4fa9-8a7e-37319a1ca481" (UID: "82416928-2463-4fa9-8a7e-37319a1ca481"). InnerVolumeSpecName "ssh-key-edpm-compute-global". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:02:57 crc kubenswrapper[4558]: I0120 18:02:57.381520 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/82416928-2463-4fa9-8a7e-37319a1ca481-inventory-1" (OuterVolumeSpecName: "inventory-1") pod "82416928-2463-4fa9-8a7e-37319a1ca481" (UID: "82416928-2463-4fa9-8a7e-37319a1ca481"). InnerVolumeSpecName "inventory-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:02:57 crc kubenswrapper[4558]: I0120 18:02:57.382534 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/82416928-2463-4fa9-8a7e-37319a1ca481-ssh-key-edpm-compute-beta-nodeset" (OuterVolumeSpecName: "ssh-key-edpm-compute-beta-nodeset") pod "82416928-2463-4fa9-8a7e-37319a1ca481" (UID: "82416928-2463-4fa9-8a7e-37319a1ca481"). InnerVolumeSpecName "ssh-key-edpm-compute-beta-nodeset". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:02:57 crc kubenswrapper[4558]: I0120 18:02:57.460655 4558 reconciler_common.go:293] "Volume detached for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/82416928-2463-4fa9-8a7e-37319a1ca481-inventory-0\") on node \"crc\" DevicePath \"\"" Jan 20 18:02:57 crc kubenswrapper[4558]: I0120 18:02:57.460684 4558 reconciler_common.go:293] "Volume detached for volume \"custom-global-service-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/82416928-2463-4fa9-8a7e-37319a1ca481-custom-global-service-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 18:02:57 crc kubenswrapper[4558]: I0120 18:02:57.460698 4558 reconciler_common.go:293] "Volume detached for volume \"ssh-key-edpm-compute-global\" (UniqueName: \"kubernetes.io/secret/82416928-2463-4fa9-8a7e-37319a1ca481-ssh-key-edpm-compute-global\") on node \"crc\" DevicePath \"\"" Jan 20 18:02:57 crc kubenswrapper[4558]: I0120 18:02:57.460710 4558 reconciler_common.go:293] "Volume detached for volume \"ssh-key-edpm-compute-beta-nodeset\" (UniqueName: \"kubernetes.io/secret/82416928-2463-4fa9-8a7e-37319a1ca481-ssh-key-edpm-compute-beta-nodeset\") on node \"crc\" DevicePath \"\"" Jan 20 18:02:57 crc kubenswrapper[4558]: I0120 18:02:57.460720 4558 reconciler_common.go:293] "Volume detached for volume \"inventory-1\" (UniqueName: \"kubernetes.io/secret/82416928-2463-4fa9-8a7e-37319a1ca481-inventory-1\") on node \"crc\" DevicePath \"\"" Jan 20 18:02:57 crc kubenswrapper[4558]: I0120 18:02:57.460733 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7vk5n\" (UniqueName: \"kubernetes.io/projected/82416928-2463-4fa9-8a7e-37319a1ca481-kube-api-access-7vk5n\") on node \"crc\" DevicePath \"\"" Jan 20 18:02:58 crc kubenswrapper[4558]: I0120 18:02:58.039297 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/custom-global-service-edpm-multinodeset-mrr8q" event={"ID":"82416928-2463-4fa9-8a7e-37319a1ca481","Type":"ContainerDied","Data":"e1da469b67bbe22d97c5de2b9211828d95cc2b0c016f6d3fbc21c7f63fe462e3"} Jan 20 18:02:58 crc kubenswrapper[4558]: I0120 18:02:58.039393 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e1da469b67bbe22d97c5de2b9211828d95cc2b0c016f6d3fbc21c7f63fe462e3" Jan 20 18:02:58 crc kubenswrapper[4558]: I0120 18:02:58.039495 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/custom-global-service-edpm-multinodeset-mrr8q" Jan 20 18:02:58 crc kubenswrapper[4558]: I0120 18:02:58.478205 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/download-cache-edpm-multinodeset-edpm-compute-global-2jlgt"] Jan 20 18:02:58 crc kubenswrapper[4558]: I0120 18:02:58.484801 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/bootstrap-edpm-multinodeset-edpm-compute-beta-nodeset-k8bgg"] Jan 20 18:02:58 crc kubenswrapper[4558]: I0120 18:02:58.491142 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/install-certs-edpm-multinodeset-edpm-compute-global-76wj2"] Jan 20 18:02:58 crc kubenswrapper[4558]: I0120 18:02:58.497580 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/custom-global-service-edpm-multinodeset-mrr8q"] Jan 20 18:02:58 crc kubenswrapper[4558]: I0120 18:02:58.507221 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/download-cache-edpm-multinodeset-edpm-compute-global-2jlgt"] Jan 20 18:02:58 crc kubenswrapper[4558]: I0120 18:02:58.513298 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/configure-os-edpm-multinodeset-edpm-compute-global-wx5df"] Jan 20 18:02:58 crc kubenswrapper[4558]: I0120 18:02:58.522356 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-ovn-edpm-multinodeset-edpm-compute-global-9gwtw"] Jan 20 18:02:58 crc kubenswrapper[4558]: I0120 18:02:58.526995 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/bootstrap-edpm-multinodeset-edpm-compute-beta-nodeset-k8bgg"] Jan 20 18:02:58 crc kubenswrapper[4558]: I0120 18:02:58.533081 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/install-os-edpm-multinodeset-edpm-compute-global-ztq9r"] Jan 20 18:02:58 crc kubenswrapper[4558]: I0120 18:02:58.537491 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/libvirt-edpm-multinodeset-edpm-compute-global-l4lg2"] Jan 20 18:02:58 crc kubenswrapper[4558]: I0120 18:02:58.542208 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/configure-network-edpm-multinodeset-edpm-compute-global-t76hl"] Jan 20 18:02:58 crc kubenswrapper[4558]: I0120 18:02:58.546624 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/validate-network-edpm-multinodeset-edpm-compute-global-j2rjw"] Jan 20 18:02:58 crc kubenswrapper[4558]: I0120 18:02:58.550956 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-metadata-edpm-multinodeset-edpm-compute-global-gvk2n"] Jan 20 18:02:58 crc kubenswrapper[4558]: I0120 18:02:58.555175 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/install-certs-edpm-multinodeset-edpm-compute-global-76wj2"] Jan 20 18:02:58 crc kubenswrapper[4558]: I0120 18:02:58.560227 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/custom-global-service-edpm-multinodeset-mrr8q"] Jan 20 18:02:58 crc kubenswrapper[4558]: I0120 18:02:58.564460 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/configure-os-edpm-multinodeset-edpm-compute-global-wx5df"] Jan 20 18:02:58 crc kubenswrapper[4558]: I0120 18:02:58.573450 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="40158c82-5595-45cb-abfa-6708fb8d590b" path="/var/lib/kubelet/pods/40158c82-5595-45cb-abfa-6708fb8d590b/volumes" Jan 20 18:02:58 crc kubenswrapper[4558]: I0120 18:02:58.573942 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="565265a9-d1cc-478d-95d0-6ab9c7e2d60f" path="/var/lib/kubelet/pods/565265a9-d1cc-478d-95d0-6ab9c7e2d60f/volumes" Jan 20 18:02:58 crc kubenswrapper[4558]: I0120 18:02:58.574411 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="60e20e0a-3082-4a93-b6a3-0a388fbd2ff5" path="/var/lib/kubelet/pods/60e20e0a-3082-4a93-b6a3-0a388fbd2ff5/volumes" Jan 20 18:02:58 crc kubenswrapper[4558]: I0120 18:02:58.574848 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="82416928-2463-4fa9-8a7e-37319a1ca481" path="/var/lib/kubelet/pods/82416928-2463-4fa9-8a7e-37319a1ca481/volumes" Jan 20 18:02:58 crc kubenswrapper[4558]: I0120 18:02:58.575786 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ff4bae03-f99f-462a-8ded-9c48304110a1" path="/var/lib/kubelet/pods/ff4bae03-f99f-462a-8ded-9c48304110a1/volumes" Jan 20 18:02:58 crc kubenswrapper[4558]: I0120 18:02:58.576267 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/neutron-ovn-edpm-multinodeset-edpm-compute-global-9gwtw"] Jan 20 18:02:58 crc kubenswrapper[4558]: I0120 18:02:58.576296 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/validate-network-edpm-multinodeset-edpm-compute-global-j2rjw"] Jan 20 18:02:58 crc kubenswrapper[4558]: I0120 18:02:58.582058 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/libvirt-edpm-multinodeset-edpm-compute-global-l4lg2"] Jan 20 18:02:58 crc kubenswrapper[4558]: I0120 18:02:58.594229 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/install-os-edpm-multinodeset-edpm-compute-global-ztq9r"] Jan 20 18:02:58 crc kubenswrapper[4558]: I0120 18:02:58.598387 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/neutron-metadata-edpm-multinodeset-edpm-compute-global-gvk2n"] Jan 20 18:02:58 crc kubenswrapper[4558]: I0120 18:02:58.602525 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/configure-network-edpm-multinodeset-edpm-compute-global-t76hl"] Jan 20 18:02:58 crc kubenswrapper[4558]: I0120 18:02:58.606603 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/download-cache-edpm-multinodeset-edpm-compute-beta-nodesetxsmms"] Jan 20 18:02:58 crc kubenswrapper[4558]: I0120 18:02:58.610604 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/download-cache-edpm-multinodeset-edpm-compute-beta-nodesetxsmms"] Jan 20 18:02:58 crc kubenswrapper[4558]: I0120 18:02:58.614869 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/bootstrap-edpm-multinodeset-edpm-compute-global-csl8v"] Jan 20 18:02:58 crc kubenswrapper[4558]: I0120 18:02:58.618938 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-dhcp-edpm-multinodeset-edpm-compute-global-9c6xq"] Jan 20 18:02:58 crc kubenswrapper[4558]: I0120 18:02:58.622945 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-sriov-edpm-multinodeset-edpm-compute-global-j9j8g"] Jan 20 18:02:58 crc kubenswrapper[4558]: I0120 18:02:58.627186 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-edpm-multinodeset-edpm-compute-global-clkvg"] Jan 20 18:02:58 crc kubenswrapper[4558]: I0120 18:02:58.631192 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/bootstrap-edpm-multinodeset-edpm-compute-global-csl8v"] Jan 20 18:02:58 crc kubenswrapper[4558]: I0120 18:02:58.635559 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovn-edpm-multinodeset-edpm-compute-global-5sw7k"] Jan 20 18:02:58 crc kubenswrapper[4558]: I0120 18:02:58.639935 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-edpm-multinodeset-edpm-compute-global-clkvg"] Jan 20 18:02:58 crc kubenswrapper[4558]: I0120 18:02:58.644278 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/neutron-sriov-edpm-multinodeset-edpm-compute-global-j9j8g"] Jan 20 18:02:58 crc kubenswrapper[4558]: I0120 18:02:58.648692 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/neutron-dhcp-edpm-multinodeset-edpm-compute-global-9c6xq"] Jan 20 18:02:58 crc kubenswrapper[4558]: I0120 18:02:58.653110 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovn-edpm-compute-global-edpm-compute-global-6kctq"] Jan 20 18:02:58 crc kubenswrapper[4558]: I0120 18:02:58.657392 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/run-os-edpm-compute-global-edpm-compute-global-vgfbg"] Jan 20 18:02:58 crc kubenswrapper[4558]: I0120 18:02:58.661805 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/run-os-edpm-multinodeset-edpm-compute-global-tq9fx"] Jan 20 18:02:58 crc kubenswrapper[4558]: I0120 18:02:58.666558 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/configure-network-edpm-compute-global-edpm-compute-global-nqljk"] Jan 20 18:02:58 crc kubenswrapper[4558]: I0120 18:02:58.670606 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/bootstrap-edpm-compute-global-edpm-compute-global-lbtv4"] Jan 20 18:02:58 crc kubenswrapper[4558]: I0120 18:02:58.675556 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-ovn-edpm-compute-global-edpm-compute-global-xhhjn"] Jan 20 18:02:58 crc kubenswrapper[4558]: I0120 18:02:58.678755 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ovn-edpm-compute-global-edpm-compute-global-6kctq"] Jan 20 18:02:58 crc kubenswrapper[4558]: I0120 18:02:58.683230 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/run-os-edpm-compute-global-edpm-compute-global-vgfbg"] Jan 20 18:02:58 crc kubenswrapper[4558]: I0120 18:02:58.686969 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/configure-os-edpm-compute-global-edpm-compute-global-dxdjf"] Jan 20 18:02:58 crc kubenswrapper[4558]: I0120 18:02:58.690963 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-dhcp-edpm-compute-global-edpm-compute-global-b8h4h"] Jan 20 18:02:58 crc kubenswrapper[4558]: I0120 18:02:58.694926 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-edpm-compute-global-edpm-compute-global-8f677"] Jan 20 18:02:58 crc kubenswrapper[4558]: I0120 18:02:58.698905 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ovn-edpm-multinodeset-edpm-compute-global-5sw7k"] Jan 20 18:02:58 crc kubenswrapper[4558]: I0120 18:02:58.702818 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/libvirt-edpm-compute-global-edpm-compute-global-r2b84"] Jan 20 18:02:58 crc kubenswrapper[4558]: I0120 18:02:58.706742 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/install-certs-edpm-compute-global-edpm-compute-global-xl8pq"] Jan 20 18:02:58 crc kubenswrapper[4558]: I0120 18:02:58.710755 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/download-cache-edpm-compute-global-edpm-compute-global-27fb2"] Jan 20 18:02:58 crc kubenswrapper[4558]: I0120 18:02:58.714526 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/install-os-edpm-compute-global-edpm-compute-global-mq7qb"] Jan 20 18:02:58 crc kubenswrapper[4558]: I0120 18:02:58.734490 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/run-os-edpm-multinodeset-edpm-compute-global-tq9fx"] Jan 20 18:02:58 crc kubenswrapper[4558]: I0120 18:02:58.740155 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-metadata-edpm-compute-global-edpm-compute-global-x5msg"] Jan 20 18:02:58 crc kubenswrapper[4558]: I0120 18:02:58.744221 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/validate-network-edpm-compute-global-edpm-compute-global-zjpgn"] Jan 20 18:02:58 crc kubenswrapper[4558]: I0120 18:02:58.748155 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/bootstrap-edpm-compute-global-edpm-compute-global-lbtv4"] Jan 20 18:02:58 crc kubenswrapper[4558]: I0120 18:02:58.752349 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/neutron-dhcp-edpm-compute-global-edpm-compute-global-b8h4h"] Jan 20 18:02:58 crc kubenswrapper[4558]: I0120 18:02:58.755741 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/libvirt-edpm-compute-global-edpm-compute-global-r2b84"] Jan 20 18:02:58 crc kubenswrapper[4558]: I0120 18:02:58.759353 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/download-cache-edpm-compute-global-edpm-compute-global-27fb2"] Jan 20 18:02:58 crc kubenswrapper[4558]: I0120 18:02:58.764638 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/configure-os-edpm-compute-global-edpm-compute-global-dxdjf"] Jan 20 18:02:58 crc kubenswrapper[4558]: I0120 18:02:58.771898 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/install-certs-edpm-compute-global-edpm-compute-global-xl8pq"] Jan 20 18:02:58 crc kubenswrapper[4558]: I0120 18:02:58.777414 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/configure-network-edpm-compute-global-edpm-compute-global-nqljk"] Jan 20 18:02:58 crc kubenswrapper[4558]: I0120 18:02:58.781746 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/install-os-edpm-compute-global-edpm-compute-global-mq7qb"] Jan 20 18:02:58 crc kubenswrapper[4558]: I0120 18:02:58.785810 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/neutron-ovn-edpm-compute-global-edpm-compute-global-xhhjn"] Jan 20 18:02:58 crc kubenswrapper[4558]: I0120 18:02:58.790111 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-edpm-compute-global-edpm-compute-global-8f677"] Jan 20 18:02:58 crc kubenswrapper[4558]: I0120 18:02:58.794055 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/validate-network-edpm-compute-global-edpm-compute-global-zjpgn"] Jan 20 18:02:58 crc kubenswrapper[4558]: I0120 18:02:58.798283 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/neutron-metadata-edpm-compute-global-edpm-compute-global-x5msg"] Jan 20 18:02:58 crc kubenswrapper[4558]: I0120 18:02:58.802055 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/custom-global-service-edpm-compute-global-g877m"] Jan 20 18:02:58 crc kubenswrapper[4558]: I0120 18:02:58.806230 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/custom-global-service-edpm-compute-global-g877m"] Jan 20 18:02:58 crc kubenswrapper[4558]: I0120 18:02:58.809947 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-sriov-edpm-compute-global-edpm-compute-global-p96c4"] Jan 20 18:02:58 crc kubenswrapper[4558]: I0120 18:02:58.813624 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/neutron-sriov-edpm-compute-global-edpm-compute-global-p96c4"] Jan 20 18:02:58 crc kubenswrapper[4558]: I0120 18:02:58.817282 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-82d4h"] Jan 20 18:02:58 crc kubenswrapper[4558]: E0120 18:02:58.817645 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="82416928-2463-4fa9-8a7e-37319a1ca481" containerName="custom-global-service-edpm-multinodeset" Jan 20 18:02:58 crc kubenswrapper[4558]: I0120 18:02:58.817664 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="82416928-2463-4fa9-8a7e-37319a1ca481" containerName="custom-global-service-edpm-multinodeset" Jan 20 18:02:58 crc kubenswrapper[4558]: I0120 18:02:58.817905 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="82416928-2463-4fa9-8a7e-37319a1ca481" containerName="custom-global-service-edpm-multinodeset" Jan 20 18:02:58 crc kubenswrapper[4558]: I0120 18:02:58.818985 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-82d4h" Jan 20 18:02:58 crc kubenswrapper[4558]: I0120 18:02:58.821314 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-82d4h"] Jan 20 18:02:58 crc kubenswrapper[4558]: I0120 18:02:58.879235 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v4gqk\" (UniqueName: \"kubernetes.io/projected/dd440515-39d3-4f1c-a85a-d03a0bfc0427-kube-api-access-v4gqk\") pod \"dnsmasq-dnsmasq-84b9f45d47-82d4h\" (UID: \"dd440515-39d3-4f1c-a85a-d03a0bfc0427\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-82d4h" Jan 20 18:02:58 crc kubenswrapper[4558]: I0120 18:02:58.879278 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/dd440515-39d3-4f1c-a85a-d03a0bfc0427-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-84b9f45d47-82d4h\" (UID: \"dd440515-39d3-4f1c-a85a-d03a0bfc0427\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-82d4h" Jan 20 18:02:58 crc kubenswrapper[4558]: I0120 18:02:58.879320 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dd440515-39d3-4f1c-a85a-d03a0bfc0427-config\") pod \"dnsmasq-dnsmasq-84b9f45d47-82d4h\" (UID: \"dd440515-39d3-4f1c-a85a-d03a0bfc0427\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-82d4h" Jan 20 18:02:58 crc kubenswrapper[4558]: I0120 18:02:58.980495 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v4gqk\" (UniqueName: \"kubernetes.io/projected/dd440515-39d3-4f1c-a85a-d03a0bfc0427-kube-api-access-v4gqk\") pod \"dnsmasq-dnsmasq-84b9f45d47-82d4h\" (UID: \"dd440515-39d3-4f1c-a85a-d03a0bfc0427\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-82d4h" Jan 20 18:02:58 crc kubenswrapper[4558]: I0120 18:02:58.980551 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/dd440515-39d3-4f1c-a85a-d03a0bfc0427-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-84b9f45d47-82d4h\" (UID: \"dd440515-39d3-4f1c-a85a-d03a0bfc0427\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-82d4h" Jan 20 18:02:58 crc kubenswrapper[4558]: I0120 18:02:58.980602 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dd440515-39d3-4f1c-a85a-d03a0bfc0427-config\") pod \"dnsmasq-dnsmasq-84b9f45d47-82d4h\" (UID: \"dd440515-39d3-4f1c-a85a-d03a0bfc0427\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-82d4h" Jan 20 18:02:58 crc kubenswrapper[4558]: I0120 18:02:58.981540 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/dd440515-39d3-4f1c-a85a-d03a0bfc0427-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-84b9f45d47-82d4h\" (UID: \"dd440515-39d3-4f1c-a85a-d03a0bfc0427\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-82d4h" Jan 20 18:02:58 crc kubenswrapper[4558]: I0120 18:02:58.982264 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dd440515-39d3-4f1c-a85a-d03a0bfc0427-config\") pod \"dnsmasq-dnsmasq-84b9f45d47-82d4h\" (UID: \"dd440515-39d3-4f1c-a85a-d03a0bfc0427\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-82d4h" Jan 20 18:02:58 crc kubenswrapper[4558]: I0120 18:02:58.997898 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v4gqk\" (UniqueName: \"kubernetes.io/projected/dd440515-39d3-4f1c-a85a-d03a0bfc0427-kube-api-access-v4gqk\") pod \"dnsmasq-dnsmasq-84b9f45d47-82d4h\" (UID: \"dd440515-39d3-4f1c-a85a-d03a0bfc0427\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-82d4h" Jan 20 18:02:59 crc kubenswrapper[4558]: I0120 18:02:59.134193 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-82d4h" Jan 20 18:02:59 crc kubenswrapper[4558]: I0120 18:02:59.522719 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-82d4h"] Jan 20 18:03:00 crc kubenswrapper[4558]: I0120 18:03:00.057689 4558 generic.go:334] "Generic (PLEG): container finished" podID="dd440515-39d3-4f1c-a85a-d03a0bfc0427" containerID="69453032edde0cb8d7dbc359f75edc7809d0f8a436a9750d9c7dc6f4a2523e61" exitCode=0 Jan 20 18:03:00 crc kubenswrapper[4558]: I0120 18:03:00.057741 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-82d4h" event={"ID":"dd440515-39d3-4f1c-a85a-d03a0bfc0427","Type":"ContainerDied","Data":"69453032edde0cb8d7dbc359f75edc7809d0f8a436a9750d9c7dc6f4a2523e61"} Jan 20 18:03:00 crc kubenswrapper[4558]: I0120 18:03:00.058061 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-82d4h" event={"ID":"dd440515-39d3-4f1c-a85a-d03a0bfc0427","Type":"ContainerStarted","Data":"bf8bf9fdd2cd589aa2685153cc2e18c4acd6ffc56f32237ae75ae55740456aa1"} Jan 20 18:03:00 crc kubenswrapper[4558]: I0120 18:03:00.578779 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="040af523-2207-48f4-809f-80e8b7f24918" path="/var/lib/kubelet/pods/040af523-2207-48f4-809f-80e8b7f24918/volumes" Jan 20 18:03:00 crc kubenswrapper[4558]: I0120 18:03:00.580490 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="07d4b53a-7d96-4698-83a6-885070980783" path="/var/lib/kubelet/pods/07d4b53a-7d96-4698-83a6-885070980783/volumes" Jan 20 18:03:00 crc kubenswrapper[4558]: I0120 18:03:00.582602 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0eec69f6-ce99-4e2f-b9e4-99b2afa4bb52" path="/var/lib/kubelet/pods/0eec69f6-ce99-4e2f-b9e4-99b2afa4bb52/volumes" Jan 20 18:03:00 crc kubenswrapper[4558]: I0120 18:03:00.584274 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="133f8af1-1bea-41e4-8512-76fc9499aff6" path="/var/lib/kubelet/pods/133f8af1-1bea-41e4-8512-76fc9499aff6/volumes" Jan 20 18:03:00 crc kubenswrapper[4558]: I0120 18:03:00.585256 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="169142be-b565-4b53-9953-a3971e720edc" path="/var/lib/kubelet/pods/169142be-b565-4b53-9953-a3971e720edc/volumes" Jan 20 18:03:00 crc kubenswrapper[4558]: I0120 18:03:00.585711 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d3f0500-fe34-435f-acfe-8f6be614ffb8" path="/var/lib/kubelet/pods/1d3f0500-fe34-435f-acfe-8f6be614ffb8/volumes" Jan 20 18:03:00 crc kubenswrapper[4558]: I0120 18:03:00.586219 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1fa9a21f-9bed-423c-b4a0-439b614b88fc" path="/var/lib/kubelet/pods/1fa9a21f-9bed-423c-b4a0-439b614b88fc/volumes" Jan 20 18:03:00 crc kubenswrapper[4558]: I0120 18:03:00.587106 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2b196ecf-e0ae-4495-96df-ed2423adcc58" path="/var/lib/kubelet/pods/2b196ecf-e0ae-4495-96df-ed2423adcc58/volumes" Jan 20 18:03:00 crc kubenswrapper[4558]: I0120 18:03:00.587585 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2cd51553-9e13-40db-88e9-3011d212d5e6" path="/var/lib/kubelet/pods/2cd51553-9e13-40db-88e9-3011d212d5e6/volumes" Jan 20 18:03:00 crc kubenswrapper[4558]: I0120 18:03:00.588053 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3447daca-2874-4616-bc72-c3f0838862a7" path="/var/lib/kubelet/pods/3447daca-2874-4616-bc72-c3f0838862a7/volumes" Jan 20 18:03:00 crc kubenswrapper[4558]: I0120 18:03:00.588952 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="450b08fd-f6a4-439d-80fe-b92708702750" path="/var/lib/kubelet/pods/450b08fd-f6a4-439d-80fe-b92708702750/volumes" Jan 20 18:03:00 crc kubenswrapper[4558]: I0120 18:03:00.589437 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="45750bec-72b3-45e9-9a13-14b1a3e409a3" path="/var/lib/kubelet/pods/45750bec-72b3-45e9-9a13-14b1a3e409a3/volumes" Jan 20 18:03:00 crc kubenswrapper[4558]: I0120 18:03:00.589953 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="54ba1771-d7af-4d97-9293-d4278f9baacd" path="/var/lib/kubelet/pods/54ba1771-d7af-4d97-9293-d4278f9baacd/volumes" Jan 20 18:03:00 crc kubenswrapper[4558]: I0120 18:03:00.590411 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="585b79c3-bdc4-4bb5-b190-cc7cb6887c1e" path="/var/lib/kubelet/pods/585b79c3-bdc4-4bb5-b190-cc7cb6887c1e/volumes" Jan 20 18:03:00 crc kubenswrapper[4558]: I0120 18:03:00.591265 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="69e764c8-5789-4cb0-842f-1760cbc84701" path="/var/lib/kubelet/pods/69e764c8-5789-4cb0-842f-1760cbc84701/volumes" Jan 20 18:03:00 crc kubenswrapper[4558]: I0120 18:03:00.591714 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="71e1f655-faf6-4b2e-b4ec-506c6b761638" path="/var/lib/kubelet/pods/71e1f655-faf6-4b2e-b4ec-506c6b761638/volumes" Jan 20 18:03:00 crc kubenswrapper[4558]: I0120 18:03:00.592196 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="84aae701-ceff-4234-aa5a-3ecce5c826bb" path="/var/lib/kubelet/pods/84aae701-ceff-4234-aa5a-3ecce5c826bb/volumes" Jan 20 18:03:00 crc kubenswrapper[4558]: I0120 18:03:00.593016 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8905304c-0fea-45d1-9fd4-554934a574b4" path="/var/lib/kubelet/pods/8905304c-0fea-45d1-9fd4-554934a574b4/volumes" Jan 20 18:03:00 crc kubenswrapper[4558]: I0120 18:03:00.593500 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8a9744b7-9035-4303-8daf-f7b4089b88d9" path="/var/lib/kubelet/pods/8a9744b7-9035-4303-8daf-f7b4089b88d9/volumes" Jan 20 18:03:00 crc kubenswrapper[4558]: I0120 18:03:00.593961 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8be8df01-d6c9-4a23-a78b-5422feb8c2b6" path="/var/lib/kubelet/pods/8be8df01-d6c9-4a23-a78b-5422feb8c2b6/volumes" Jan 20 18:03:00 crc kubenswrapper[4558]: I0120 18:03:00.594924 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9daabf6f-11b9-4e54-a514-9ef0c9aa8c1a" path="/var/lib/kubelet/pods/9daabf6f-11b9-4e54-a514-9ef0c9aa8c1a/volumes" Jan 20 18:03:00 crc kubenswrapper[4558]: I0120 18:03:00.595395 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ac804cf5-3104-492e-84e8-d39ac5d82fd0" path="/var/lib/kubelet/pods/ac804cf5-3104-492e-84e8-d39ac5d82fd0/volumes" Jan 20 18:03:00 crc kubenswrapper[4558]: I0120 18:03:00.595829 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ce5580a4-7384-4c6b-8a81-aa07c8d84ecc" path="/var/lib/kubelet/pods/ce5580a4-7384-4c6b-8a81-aa07c8d84ecc/volumes" Jan 20 18:03:00 crc kubenswrapper[4558]: I0120 18:03:00.596298 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d7f4122e-af2f-41dd-9e4f-0b99d9457e82" path="/var/lib/kubelet/pods/d7f4122e-af2f-41dd-9e4f-0b99d9457e82/volumes" Jan 20 18:03:00 crc kubenswrapper[4558]: I0120 18:03:00.597142 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7680571-76c8-40eb-8e59-fa68b86d77f7" path="/var/lib/kubelet/pods/e7680571-76c8-40eb-8e59-fa68b86d77f7/volumes" Jan 20 18:03:00 crc kubenswrapper[4558]: I0120 18:03:00.597610 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f0222acb-beb0-4bec-8c83-53ed056817e5" path="/var/lib/kubelet/pods/f0222acb-beb0-4bec-8c83-53ed056817e5/volumes" Jan 20 18:03:00 crc kubenswrapper[4558]: I0120 18:03:00.598064 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f3e4cbee-8cb9-4f90-8f78-2fd3c9d5d9e3" path="/var/lib/kubelet/pods/f3e4cbee-8cb9-4f90-8f78-2fd3c9d5d9e3/volumes" Jan 20 18:03:00 crc kubenswrapper[4558]: I0120 18:03:00.598998 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f5a44994-7dfb-4450-8b25-66b78fea03a0" path="/var/lib/kubelet/pods/f5a44994-7dfb-4450-8b25-66b78fea03a0/volumes" Jan 20 18:03:00 crc kubenswrapper[4558]: I0120 18:03:00.599461 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fd8020a9-d02c-4ad9-a4c7-e9dd15f1959c" path="/var/lib/kubelet/pods/fd8020a9-d02c-4ad9-a4c7-e9dd15f1959c/volumes" Jan 20 18:03:01 crc kubenswrapper[4558]: I0120 18:03:01.068691 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-82d4h" event={"ID":"dd440515-39d3-4f1c-a85a-d03a0bfc0427","Type":"ContainerStarted","Data":"f9b09f9ee0c281fa78e53d4c778c2d500368621ce6fa6c0131211f6eac1b2d78"} Jan 20 18:03:01 crc kubenswrapper[4558]: I0120 18:03:01.069039 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-82d4h" Jan 20 18:03:01 crc kubenswrapper[4558]: I0120 18:03:01.087397 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-82d4h" podStartSLOduration=3.087378154 podStartE2EDuration="3.087378154s" podCreationTimestamp="2026-01-20 18:02:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 18:03:01.083453447 +0000 UTC m=+4874.843791414" watchObservedRunningTime="2026-01-20 18:03:01.087378154 +0000 UTC m=+4874.847716121" Jan 20 18:03:05 crc kubenswrapper[4558]: I0120 18:03:05.001744 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["crc-storage/crc-storage-crc-zn958"] Jan 20 18:03:05 crc kubenswrapper[4558]: I0120 18:03:05.006287 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["crc-storage/crc-storage-crc-zn958"] Jan 20 18:03:05 crc kubenswrapper[4558]: I0120 18:03:05.117345 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["crc-storage/crc-storage-crc-58lk8"] Jan 20 18:03:05 crc kubenswrapper[4558]: I0120 18:03:05.118588 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-58lk8" Jan 20 18:03:05 crc kubenswrapper[4558]: I0120 18:03:05.121661 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"crc-storage" Jan 20 18:03:05 crc kubenswrapper[4558]: I0120 18:03:05.121774 4558 reflector.go:368] Caches populated for *v1.Secret from object-"crc-storage"/"crc-storage-dockercfg-k9ht8" Jan 20 18:03:05 crc kubenswrapper[4558]: I0120 18:03:05.122918 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"kube-root-ca.crt" Jan 20 18:03:05 crc kubenswrapper[4558]: I0120 18:03:05.128940 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"openshift-service-ca.crt" Jan 20 18:03:05 crc kubenswrapper[4558]: I0120 18:03:05.135326 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-58lk8"] Jan 20 18:03:05 crc kubenswrapper[4558]: I0120 18:03:05.168915 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6745m\" (UniqueName: \"kubernetes.io/projected/dfa06cec-a2e9-429d-b806-8db9eb4da606-kube-api-access-6745m\") pod \"crc-storage-crc-58lk8\" (UID: \"dfa06cec-a2e9-429d-b806-8db9eb4da606\") " pod="crc-storage/crc-storage-crc-58lk8" Jan 20 18:03:05 crc kubenswrapper[4558]: I0120 18:03:05.168964 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/dfa06cec-a2e9-429d-b806-8db9eb4da606-node-mnt\") pod \"crc-storage-crc-58lk8\" (UID: \"dfa06cec-a2e9-429d-b806-8db9eb4da606\") " pod="crc-storage/crc-storage-crc-58lk8" Jan 20 18:03:05 crc kubenswrapper[4558]: I0120 18:03:05.169213 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/dfa06cec-a2e9-429d-b806-8db9eb4da606-crc-storage\") pod \"crc-storage-crc-58lk8\" (UID: \"dfa06cec-a2e9-429d-b806-8db9eb4da606\") " pod="crc-storage/crc-storage-crc-58lk8" Jan 20 18:03:05 crc kubenswrapper[4558]: I0120 18:03:05.270497 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/dfa06cec-a2e9-429d-b806-8db9eb4da606-crc-storage\") pod \"crc-storage-crc-58lk8\" (UID: \"dfa06cec-a2e9-429d-b806-8db9eb4da606\") " pod="crc-storage/crc-storage-crc-58lk8" Jan 20 18:03:05 crc kubenswrapper[4558]: I0120 18:03:05.270564 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6745m\" (UniqueName: \"kubernetes.io/projected/dfa06cec-a2e9-429d-b806-8db9eb4da606-kube-api-access-6745m\") pod \"crc-storage-crc-58lk8\" (UID: \"dfa06cec-a2e9-429d-b806-8db9eb4da606\") " pod="crc-storage/crc-storage-crc-58lk8" Jan 20 18:03:05 crc kubenswrapper[4558]: I0120 18:03:05.270599 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/dfa06cec-a2e9-429d-b806-8db9eb4da606-node-mnt\") pod \"crc-storage-crc-58lk8\" (UID: \"dfa06cec-a2e9-429d-b806-8db9eb4da606\") " pod="crc-storage/crc-storage-crc-58lk8" Jan 20 18:03:05 crc kubenswrapper[4558]: I0120 18:03:05.270909 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/dfa06cec-a2e9-429d-b806-8db9eb4da606-node-mnt\") pod \"crc-storage-crc-58lk8\" (UID: \"dfa06cec-a2e9-429d-b806-8db9eb4da606\") " pod="crc-storage/crc-storage-crc-58lk8" Jan 20 18:03:05 crc kubenswrapper[4558]: I0120 18:03:05.271795 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/dfa06cec-a2e9-429d-b806-8db9eb4da606-crc-storage\") pod \"crc-storage-crc-58lk8\" (UID: \"dfa06cec-a2e9-429d-b806-8db9eb4da606\") " pod="crc-storage/crc-storage-crc-58lk8" Jan 20 18:03:05 crc kubenswrapper[4558]: I0120 18:03:05.289337 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6745m\" (UniqueName: \"kubernetes.io/projected/dfa06cec-a2e9-429d-b806-8db9eb4da606-kube-api-access-6745m\") pod \"crc-storage-crc-58lk8\" (UID: \"dfa06cec-a2e9-429d-b806-8db9eb4da606\") " pod="crc-storage/crc-storage-crc-58lk8" Jan 20 18:03:05 crc kubenswrapper[4558]: I0120 18:03:05.437078 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-58lk8" Jan 20 18:03:05 crc kubenswrapper[4558]: I0120 18:03:05.819180 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-58lk8"] Jan 20 18:03:06 crc kubenswrapper[4558]: I0120 18:03:06.108331 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-58lk8" event={"ID":"dfa06cec-a2e9-429d-b806-8db9eb4da606","Type":"ContainerStarted","Data":"3fdaba82cb7dd08d2a32dd07fc9833333b76d27dfd81a5b804d93c699c7962ea"} Jan 20 18:03:06 crc kubenswrapper[4558]: I0120 18:03:06.577806 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="64b6908c-6f3f-4189-bd92-7bc168d122b9" path="/var/lib/kubelet/pods/64b6908c-6f3f-4189-bd92-7bc168d122b9/volumes" Jan 20 18:03:07 crc kubenswrapper[4558]: I0120 18:03:07.118830 4558 generic.go:334] "Generic (PLEG): container finished" podID="dfa06cec-a2e9-429d-b806-8db9eb4da606" containerID="e03cc394c485d82132b145aeda9118ad39cd87b0221b7049886130d0c947ec41" exitCode=0 Jan 20 18:03:07 crc kubenswrapper[4558]: I0120 18:03:07.118892 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-58lk8" event={"ID":"dfa06cec-a2e9-429d-b806-8db9eb4da606","Type":"ContainerDied","Data":"e03cc394c485d82132b145aeda9118ad39cd87b0221b7049886130d0c947ec41"} Jan 20 18:03:08 crc kubenswrapper[4558]: I0120 18:03:08.352536 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-58lk8" Jan 20 18:03:08 crc kubenswrapper[4558]: I0120 18:03:08.421455 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/dfa06cec-a2e9-429d-b806-8db9eb4da606-node-mnt\") pod \"dfa06cec-a2e9-429d-b806-8db9eb4da606\" (UID: \"dfa06cec-a2e9-429d-b806-8db9eb4da606\") " Jan 20 18:03:08 crc kubenswrapper[4558]: I0120 18:03:08.421566 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6745m\" (UniqueName: \"kubernetes.io/projected/dfa06cec-a2e9-429d-b806-8db9eb4da606-kube-api-access-6745m\") pod \"dfa06cec-a2e9-429d-b806-8db9eb4da606\" (UID: \"dfa06cec-a2e9-429d-b806-8db9eb4da606\") " Jan 20 18:03:08 crc kubenswrapper[4558]: I0120 18:03:08.421603 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/dfa06cec-a2e9-429d-b806-8db9eb4da606-crc-storage\") pod \"dfa06cec-a2e9-429d-b806-8db9eb4da606\" (UID: \"dfa06cec-a2e9-429d-b806-8db9eb4da606\") " Jan 20 18:03:08 crc kubenswrapper[4558]: I0120 18:03:08.421601 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/dfa06cec-a2e9-429d-b806-8db9eb4da606-node-mnt" (OuterVolumeSpecName: "node-mnt") pod "dfa06cec-a2e9-429d-b806-8db9eb4da606" (UID: "dfa06cec-a2e9-429d-b806-8db9eb4da606"). InnerVolumeSpecName "node-mnt". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 18:03:08 crc kubenswrapper[4558]: I0120 18:03:08.421881 4558 reconciler_common.go:293] "Volume detached for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/dfa06cec-a2e9-429d-b806-8db9eb4da606-node-mnt\") on node \"crc\" DevicePath \"\"" Jan 20 18:03:08 crc kubenswrapper[4558]: I0120 18:03:08.427244 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dfa06cec-a2e9-429d-b806-8db9eb4da606-kube-api-access-6745m" (OuterVolumeSpecName: "kube-api-access-6745m") pod "dfa06cec-a2e9-429d-b806-8db9eb4da606" (UID: "dfa06cec-a2e9-429d-b806-8db9eb4da606"). InnerVolumeSpecName "kube-api-access-6745m". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:03:08 crc kubenswrapper[4558]: I0120 18:03:08.438682 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dfa06cec-a2e9-429d-b806-8db9eb4da606-crc-storage" (OuterVolumeSpecName: "crc-storage") pod "dfa06cec-a2e9-429d-b806-8db9eb4da606" (UID: "dfa06cec-a2e9-429d-b806-8db9eb4da606"). InnerVolumeSpecName "crc-storage". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:03:08 crc kubenswrapper[4558]: I0120 18:03:08.523116 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6745m\" (UniqueName: \"kubernetes.io/projected/dfa06cec-a2e9-429d-b806-8db9eb4da606-kube-api-access-6745m\") on node \"crc\" DevicePath \"\"" Jan 20 18:03:08 crc kubenswrapper[4558]: I0120 18:03:08.523150 4558 reconciler_common.go:293] "Volume detached for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/dfa06cec-a2e9-429d-b806-8db9eb4da606-crc-storage\") on node \"crc\" DevicePath \"\"" Jan 20 18:03:09 crc kubenswrapper[4558]: I0120 18:03:09.135030 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-58lk8" event={"ID":"dfa06cec-a2e9-429d-b806-8db9eb4da606","Type":"ContainerDied","Data":"3fdaba82cb7dd08d2a32dd07fc9833333b76d27dfd81a5b804d93c699c7962ea"} Jan 20 18:03:09 crc kubenswrapper[4558]: I0120 18:03:09.135078 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3fdaba82cb7dd08d2a32dd07fc9833333b76d27dfd81a5b804d93c699c7962ea" Jan 20 18:03:09 crc kubenswrapper[4558]: I0120 18:03:09.135081 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-58lk8" Jan 20 18:03:09 crc kubenswrapper[4558]: I0120 18:03:09.135340 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-82d4h" Jan 20 18:03:09 crc kubenswrapper[4558]: I0120 18:03:09.173153 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-6668544499-64ftq"] Jan 20 18:03:09 crc kubenswrapper[4558]: I0120 18:03:09.173350 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-6668544499-64ftq" podUID="56e4457b-2828-4258-8f84-6159b2422531" containerName="dnsmasq-dns" containerID="cri-o://d02adfd4d4f0140e2faca8210c03484f2c1494acd968b18032fa2acd0976c05f" gracePeriod=10 Jan 20 18:03:09 crc kubenswrapper[4558]: I0120 18:03:09.281839 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-6668544499-64ftq" podUID="56e4457b-2828-4258-8f84-6159b2422531" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.205:5353: connect: connection refused" Jan 20 18:03:09 crc kubenswrapper[4558]: I0120 18:03:09.507153 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-6668544499-64ftq" Jan 20 18:03:09 crc kubenswrapper[4558]: I0120 18:03:09.535819 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"edpm-compute-global\" (UniqueName: \"kubernetes.io/configmap/56e4457b-2828-4258-8f84-6159b2422531-edpm-compute-global\") pod \"56e4457b-2828-4258-8f84-6159b2422531\" (UID: \"56e4457b-2828-4258-8f84-6159b2422531\") " Jan 20 18:03:09 crc kubenswrapper[4558]: I0120 18:03:09.535864 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/56e4457b-2828-4258-8f84-6159b2422531-dnsmasq-svc\") pod \"56e4457b-2828-4258-8f84-6159b2422531\" (UID: \"56e4457b-2828-4258-8f84-6159b2422531\") " Jan 20 18:03:09 crc kubenswrapper[4558]: I0120 18:03:09.535931 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2779n\" (UniqueName: \"kubernetes.io/projected/56e4457b-2828-4258-8f84-6159b2422531-kube-api-access-2779n\") pod \"56e4457b-2828-4258-8f84-6159b2422531\" (UID: \"56e4457b-2828-4258-8f84-6159b2422531\") " Jan 20 18:03:09 crc kubenswrapper[4558]: I0120 18:03:09.535968 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/56e4457b-2828-4258-8f84-6159b2422531-config\") pod \"56e4457b-2828-4258-8f84-6159b2422531\" (UID: \"56e4457b-2828-4258-8f84-6159b2422531\") " Jan 20 18:03:09 crc kubenswrapper[4558]: I0120 18:03:09.536025 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"edpm-compute-beta-nodeset\" (UniqueName: \"kubernetes.io/configmap/56e4457b-2828-4258-8f84-6159b2422531-edpm-compute-beta-nodeset\") pod \"56e4457b-2828-4258-8f84-6159b2422531\" (UID: \"56e4457b-2828-4258-8f84-6159b2422531\") " Jan 20 18:03:09 crc kubenswrapper[4558]: I0120 18:03:09.540858 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/56e4457b-2828-4258-8f84-6159b2422531-kube-api-access-2779n" (OuterVolumeSpecName: "kube-api-access-2779n") pod "56e4457b-2828-4258-8f84-6159b2422531" (UID: "56e4457b-2828-4258-8f84-6159b2422531"). InnerVolumeSpecName "kube-api-access-2779n". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:03:09 crc kubenswrapper[4558]: I0120 18:03:09.566618 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/56e4457b-2828-4258-8f84-6159b2422531-dnsmasq-svc" (OuterVolumeSpecName: "dnsmasq-svc") pod "56e4457b-2828-4258-8f84-6159b2422531" (UID: "56e4457b-2828-4258-8f84-6159b2422531"). InnerVolumeSpecName "dnsmasq-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:03:09 crc kubenswrapper[4558]: I0120 18:03:09.567608 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/56e4457b-2828-4258-8f84-6159b2422531-config" (OuterVolumeSpecName: "config") pod "56e4457b-2828-4258-8f84-6159b2422531" (UID: "56e4457b-2828-4258-8f84-6159b2422531"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:03:09 crc kubenswrapper[4558]: I0120 18:03:09.569082 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/56e4457b-2828-4258-8f84-6159b2422531-edpm-compute-global" (OuterVolumeSpecName: "edpm-compute-global") pod "56e4457b-2828-4258-8f84-6159b2422531" (UID: "56e4457b-2828-4258-8f84-6159b2422531"). InnerVolumeSpecName "edpm-compute-global". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:03:09 crc kubenswrapper[4558]: I0120 18:03:09.572802 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/56e4457b-2828-4258-8f84-6159b2422531-edpm-compute-beta-nodeset" (OuterVolumeSpecName: "edpm-compute-beta-nodeset") pod "56e4457b-2828-4258-8f84-6159b2422531" (UID: "56e4457b-2828-4258-8f84-6159b2422531"). InnerVolumeSpecName "edpm-compute-beta-nodeset". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:03:09 crc kubenswrapper[4558]: I0120 18:03:09.637381 4558 reconciler_common.go:293] "Volume detached for volume \"edpm-compute-beta-nodeset\" (UniqueName: \"kubernetes.io/configmap/56e4457b-2828-4258-8f84-6159b2422531-edpm-compute-beta-nodeset\") on node \"crc\" DevicePath \"\"" Jan 20 18:03:09 crc kubenswrapper[4558]: I0120 18:03:09.637407 4558 reconciler_common.go:293] "Volume detached for volume \"edpm-compute-global\" (UniqueName: \"kubernetes.io/configmap/56e4457b-2828-4258-8f84-6159b2422531-edpm-compute-global\") on node \"crc\" DevicePath \"\"" Jan 20 18:03:09 crc kubenswrapper[4558]: I0120 18:03:09.637419 4558 reconciler_common.go:293] "Volume detached for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/56e4457b-2828-4258-8f84-6159b2422531-dnsmasq-svc\") on node \"crc\" DevicePath \"\"" Jan 20 18:03:09 crc kubenswrapper[4558]: I0120 18:03:09.637432 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2779n\" (UniqueName: \"kubernetes.io/projected/56e4457b-2828-4258-8f84-6159b2422531-kube-api-access-2779n\") on node \"crc\" DevicePath \"\"" Jan 20 18:03:09 crc kubenswrapper[4558]: I0120 18:03:09.637442 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/56e4457b-2828-4258-8f84-6159b2422531-config\") on node \"crc\" DevicePath \"\"" Jan 20 18:03:10 crc kubenswrapper[4558]: I0120 18:03:10.143143 4558 generic.go:334] "Generic (PLEG): container finished" podID="56e4457b-2828-4258-8f84-6159b2422531" containerID="d02adfd4d4f0140e2faca8210c03484f2c1494acd968b18032fa2acd0976c05f" exitCode=0 Jan 20 18:03:10 crc kubenswrapper[4558]: I0120 18:03:10.143201 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-6668544499-64ftq" event={"ID":"56e4457b-2828-4258-8f84-6159b2422531","Type":"ContainerDied","Data":"d02adfd4d4f0140e2faca8210c03484f2c1494acd968b18032fa2acd0976c05f"} Jan 20 18:03:10 crc kubenswrapper[4558]: I0120 18:03:10.143229 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-6668544499-64ftq" event={"ID":"56e4457b-2828-4258-8f84-6159b2422531","Type":"ContainerDied","Data":"4fd1f6c486eb12b56b43499db44f751463127b26e6c839091c911bb26660d920"} Jan 20 18:03:10 crc kubenswrapper[4558]: I0120 18:03:10.143249 4558 scope.go:117] "RemoveContainer" containerID="d02adfd4d4f0140e2faca8210c03484f2c1494acd968b18032fa2acd0976c05f" Jan 20 18:03:10 crc kubenswrapper[4558]: I0120 18:03:10.143206 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-6668544499-64ftq" Jan 20 18:03:10 crc kubenswrapper[4558]: I0120 18:03:10.162359 4558 scope.go:117] "RemoveContainer" containerID="9c66ffe123731fef025cf7390bf78c73c985782dcbe2fc1e71e07008b579428d" Jan 20 18:03:10 crc kubenswrapper[4558]: I0120 18:03:10.168751 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-6668544499-64ftq"] Jan 20 18:03:10 crc kubenswrapper[4558]: I0120 18:03:10.175143 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-6668544499-64ftq"] Jan 20 18:03:10 crc kubenswrapper[4558]: I0120 18:03:10.204597 4558 scope.go:117] "RemoveContainer" containerID="d02adfd4d4f0140e2faca8210c03484f2c1494acd968b18032fa2acd0976c05f" Jan 20 18:03:10 crc kubenswrapper[4558]: E0120 18:03:10.205239 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d02adfd4d4f0140e2faca8210c03484f2c1494acd968b18032fa2acd0976c05f\": container with ID starting with d02adfd4d4f0140e2faca8210c03484f2c1494acd968b18032fa2acd0976c05f not found: ID does not exist" containerID="d02adfd4d4f0140e2faca8210c03484f2c1494acd968b18032fa2acd0976c05f" Jan 20 18:03:10 crc kubenswrapper[4558]: I0120 18:03:10.205268 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d02adfd4d4f0140e2faca8210c03484f2c1494acd968b18032fa2acd0976c05f"} err="failed to get container status \"d02adfd4d4f0140e2faca8210c03484f2c1494acd968b18032fa2acd0976c05f\": rpc error: code = NotFound desc = could not find container \"d02adfd4d4f0140e2faca8210c03484f2c1494acd968b18032fa2acd0976c05f\": container with ID starting with d02adfd4d4f0140e2faca8210c03484f2c1494acd968b18032fa2acd0976c05f not found: ID does not exist" Jan 20 18:03:10 crc kubenswrapper[4558]: I0120 18:03:10.205288 4558 scope.go:117] "RemoveContainer" containerID="9c66ffe123731fef025cf7390bf78c73c985782dcbe2fc1e71e07008b579428d" Jan 20 18:03:10 crc kubenswrapper[4558]: E0120 18:03:10.205665 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9c66ffe123731fef025cf7390bf78c73c985782dcbe2fc1e71e07008b579428d\": container with ID starting with 9c66ffe123731fef025cf7390bf78c73c985782dcbe2fc1e71e07008b579428d not found: ID does not exist" containerID="9c66ffe123731fef025cf7390bf78c73c985782dcbe2fc1e71e07008b579428d" Jan 20 18:03:10 crc kubenswrapper[4558]: I0120 18:03:10.205705 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9c66ffe123731fef025cf7390bf78c73c985782dcbe2fc1e71e07008b579428d"} err="failed to get container status \"9c66ffe123731fef025cf7390bf78c73c985782dcbe2fc1e71e07008b579428d\": rpc error: code = NotFound desc = could not find container \"9c66ffe123731fef025cf7390bf78c73c985782dcbe2fc1e71e07008b579428d\": container with ID starting with 9c66ffe123731fef025cf7390bf78c73c985782dcbe2fc1e71e07008b579428d not found: ID does not exist" Jan 20 18:03:10 crc kubenswrapper[4558]: I0120 18:03:10.574000 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="56e4457b-2828-4258-8f84-6159b2422531" path="/var/lib/kubelet/pods/56e4457b-2828-4258-8f84-6159b2422531/volumes" Jan 20 18:03:11 crc kubenswrapper[4558]: I0120 18:03:11.186671 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["crc-storage/crc-storage-crc-58lk8"] Jan 20 18:03:11 crc kubenswrapper[4558]: I0120 18:03:11.190204 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["crc-storage/crc-storage-crc-58lk8"] Jan 20 18:03:11 crc kubenswrapper[4558]: I0120 18:03:11.285327 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["crc-storage/crc-storage-crc-c5jlp"] Jan 20 18:03:11 crc kubenswrapper[4558]: E0120 18:03:11.285699 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dfa06cec-a2e9-429d-b806-8db9eb4da606" containerName="storage" Jan 20 18:03:11 crc kubenswrapper[4558]: I0120 18:03:11.285719 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="dfa06cec-a2e9-429d-b806-8db9eb4da606" containerName="storage" Jan 20 18:03:11 crc kubenswrapper[4558]: E0120 18:03:11.285763 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="56e4457b-2828-4258-8f84-6159b2422531" containerName="dnsmasq-dns" Jan 20 18:03:11 crc kubenswrapper[4558]: I0120 18:03:11.285770 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="56e4457b-2828-4258-8f84-6159b2422531" containerName="dnsmasq-dns" Jan 20 18:03:11 crc kubenswrapper[4558]: E0120 18:03:11.285782 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="56e4457b-2828-4258-8f84-6159b2422531" containerName="init" Jan 20 18:03:11 crc kubenswrapper[4558]: I0120 18:03:11.285788 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="56e4457b-2828-4258-8f84-6159b2422531" containerName="init" Jan 20 18:03:11 crc kubenswrapper[4558]: I0120 18:03:11.285934 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="dfa06cec-a2e9-429d-b806-8db9eb4da606" containerName="storage" Jan 20 18:03:11 crc kubenswrapper[4558]: I0120 18:03:11.285957 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="56e4457b-2828-4258-8f84-6159b2422531" containerName="dnsmasq-dns" Jan 20 18:03:11 crc kubenswrapper[4558]: I0120 18:03:11.286526 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-c5jlp" Jan 20 18:03:11 crc kubenswrapper[4558]: I0120 18:03:11.288445 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"kube-root-ca.crt" Jan 20 18:03:11 crc kubenswrapper[4558]: I0120 18:03:11.289043 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"crc-storage" Jan 20 18:03:11 crc kubenswrapper[4558]: I0120 18:03:11.289190 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"openshift-service-ca.crt" Jan 20 18:03:11 crc kubenswrapper[4558]: I0120 18:03:11.289200 4558 reflector.go:368] Caches populated for *v1.Secret from object-"crc-storage"/"crc-storage-dockercfg-k9ht8" Jan 20 18:03:11 crc kubenswrapper[4558]: I0120 18:03:11.300990 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-c5jlp"] Jan 20 18:03:11 crc kubenswrapper[4558]: I0120 18:03:11.463542 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w57jv\" (UniqueName: \"kubernetes.io/projected/1382485d-e2ea-474b-a0f5-3db36a8cf19c-kube-api-access-w57jv\") pod \"crc-storage-crc-c5jlp\" (UID: \"1382485d-e2ea-474b-a0f5-3db36a8cf19c\") " pod="crc-storage/crc-storage-crc-c5jlp" Jan 20 18:03:11 crc kubenswrapper[4558]: I0120 18:03:11.463610 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/1382485d-e2ea-474b-a0f5-3db36a8cf19c-node-mnt\") pod \"crc-storage-crc-c5jlp\" (UID: \"1382485d-e2ea-474b-a0f5-3db36a8cf19c\") " pod="crc-storage/crc-storage-crc-c5jlp" Jan 20 18:03:11 crc kubenswrapper[4558]: I0120 18:03:11.463630 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/1382485d-e2ea-474b-a0f5-3db36a8cf19c-crc-storage\") pod \"crc-storage-crc-c5jlp\" (UID: \"1382485d-e2ea-474b-a0f5-3db36a8cf19c\") " pod="crc-storage/crc-storage-crc-c5jlp" Jan 20 18:03:11 crc kubenswrapper[4558]: I0120 18:03:11.565533 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w57jv\" (UniqueName: \"kubernetes.io/projected/1382485d-e2ea-474b-a0f5-3db36a8cf19c-kube-api-access-w57jv\") pod \"crc-storage-crc-c5jlp\" (UID: \"1382485d-e2ea-474b-a0f5-3db36a8cf19c\") " pod="crc-storage/crc-storage-crc-c5jlp" Jan 20 18:03:11 crc kubenswrapper[4558]: I0120 18:03:11.565631 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/1382485d-e2ea-474b-a0f5-3db36a8cf19c-crc-storage\") pod \"crc-storage-crc-c5jlp\" (UID: \"1382485d-e2ea-474b-a0f5-3db36a8cf19c\") " pod="crc-storage/crc-storage-crc-c5jlp" Jan 20 18:03:11 crc kubenswrapper[4558]: I0120 18:03:11.565655 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/1382485d-e2ea-474b-a0f5-3db36a8cf19c-node-mnt\") pod \"crc-storage-crc-c5jlp\" (UID: \"1382485d-e2ea-474b-a0f5-3db36a8cf19c\") " pod="crc-storage/crc-storage-crc-c5jlp" Jan 20 18:03:11 crc kubenswrapper[4558]: I0120 18:03:11.565951 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/1382485d-e2ea-474b-a0f5-3db36a8cf19c-node-mnt\") pod \"crc-storage-crc-c5jlp\" (UID: \"1382485d-e2ea-474b-a0f5-3db36a8cf19c\") " pod="crc-storage/crc-storage-crc-c5jlp" Jan 20 18:03:11 crc kubenswrapper[4558]: I0120 18:03:11.567006 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/1382485d-e2ea-474b-a0f5-3db36a8cf19c-crc-storage\") pod \"crc-storage-crc-c5jlp\" (UID: \"1382485d-e2ea-474b-a0f5-3db36a8cf19c\") " pod="crc-storage/crc-storage-crc-c5jlp" Jan 20 18:03:11 crc kubenswrapper[4558]: I0120 18:03:11.584367 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w57jv\" (UniqueName: \"kubernetes.io/projected/1382485d-e2ea-474b-a0f5-3db36a8cf19c-kube-api-access-w57jv\") pod \"crc-storage-crc-c5jlp\" (UID: \"1382485d-e2ea-474b-a0f5-3db36a8cf19c\") " pod="crc-storage/crc-storage-crc-c5jlp" Jan 20 18:03:11 crc kubenswrapper[4558]: I0120 18:03:11.603280 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-c5jlp" Jan 20 18:03:11 crc kubenswrapper[4558]: I0120 18:03:11.994143 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-c5jlp"] Jan 20 18:03:12 crc kubenswrapper[4558]: I0120 18:03:12.167235 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-c5jlp" event={"ID":"1382485d-e2ea-474b-a0f5-3db36a8cf19c","Type":"ContainerStarted","Data":"f21d401581b3cc78ed672cc993cafd127f8ba44dfc79243802f3248448938025"} Jan 20 18:03:12 crc kubenswrapper[4558]: I0120 18:03:12.574953 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dfa06cec-a2e9-429d-b806-8db9eb4da606" path="/var/lib/kubelet/pods/dfa06cec-a2e9-429d-b806-8db9eb4da606/volumes" Jan 20 18:03:13 crc kubenswrapper[4558]: I0120 18:03:13.177187 4558 generic.go:334] "Generic (PLEG): container finished" podID="1382485d-e2ea-474b-a0f5-3db36a8cf19c" containerID="658800ecf53ee1c56db45161afaf9b0aa0d93eb5e57b5b87faae5600e01895ec" exitCode=0 Jan 20 18:03:13 crc kubenswrapper[4558]: I0120 18:03:13.177296 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-c5jlp" event={"ID":"1382485d-e2ea-474b-a0f5-3db36a8cf19c","Type":"ContainerDied","Data":"658800ecf53ee1c56db45161afaf9b0aa0d93eb5e57b5b87faae5600e01895ec"} Jan 20 18:03:14 crc kubenswrapper[4558]: I0120 18:03:14.423005 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-c5jlp" Jan 20 18:03:14 crc kubenswrapper[4558]: I0120 18:03:14.508102 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/1382485d-e2ea-474b-a0f5-3db36a8cf19c-crc-storage\") pod \"1382485d-e2ea-474b-a0f5-3db36a8cf19c\" (UID: \"1382485d-e2ea-474b-a0f5-3db36a8cf19c\") " Jan 20 18:03:14 crc kubenswrapper[4558]: I0120 18:03:14.525605 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1382485d-e2ea-474b-a0f5-3db36a8cf19c-crc-storage" (OuterVolumeSpecName: "crc-storage") pod "1382485d-e2ea-474b-a0f5-3db36a8cf19c" (UID: "1382485d-e2ea-474b-a0f5-3db36a8cf19c"). InnerVolumeSpecName "crc-storage". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:03:14 crc kubenswrapper[4558]: I0120 18:03:14.610344 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w57jv\" (UniqueName: \"kubernetes.io/projected/1382485d-e2ea-474b-a0f5-3db36a8cf19c-kube-api-access-w57jv\") pod \"1382485d-e2ea-474b-a0f5-3db36a8cf19c\" (UID: \"1382485d-e2ea-474b-a0f5-3db36a8cf19c\") " Jan 20 18:03:14 crc kubenswrapper[4558]: I0120 18:03:14.610498 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/1382485d-e2ea-474b-a0f5-3db36a8cf19c-node-mnt\") pod \"1382485d-e2ea-474b-a0f5-3db36a8cf19c\" (UID: \"1382485d-e2ea-474b-a0f5-3db36a8cf19c\") " Jan 20 18:03:14 crc kubenswrapper[4558]: I0120 18:03:14.610614 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1382485d-e2ea-474b-a0f5-3db36a8cf19c-node-mnt" (OuterVolumeSpecName: "node-mnt") pod "1382485d-e2ea-474b-a0f5-3db36a8cf19c" (UID: "1382485d-e2ea-474b-a0f5-3db36a8cf19c"). InnerVolumeSpecName "node-mnt". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 18:03:14 crc kubenswrapper[4558]: I0120 18:03:14.610959 4558 reconciler_common.go:293] "Volume detached for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/1382485d-e2ea-474b-a0f5-3db36a8cf19c-node-mnt\") on node \"crc\" DevicePath \"\"" Jan 20 18:03:14 crc kubenswrapper[4558]: I0120 18:03:14.610984 4558 reconciler_common.go:293] "Volume detached for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/1382485d-e2ea-474b-a0f5-3db36a8cf19c-crc-storage\") on node \"crc\" DevicePath \"\"" Jan 20 18:03:14 crc kubenswrapper[4558]: I0120 18:03:14.614213 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1382485d-e2ea-474b-a0f5-3db36a8cf19c-kube-api-access-w57jv" (OuterVolumeSpecName: "kube-api-access-w57jv") pod "1382485d-e2ea-474b-a0f5-3db36a8cf19c" (UID: "1382485d-e2ea-474b-a0f5-3db36a8cf19c"). InnerVolumeSpecName "kube-api-access-w57jv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:03:14 crc kubenswrapper[4558]: I0120 18:03:14.713007 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w57jv\" (UniqueName: \"kubernetes.io/projected/1382485d-e2ea-474b-a0f5-3db36a8cf19c-kube-api-access-w57jv\") on node \"crc\" DevicePath \"\"" Jan 20 18:03:15 crc kubenswrapper[4558]: I0120 18:03:15.197423 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-c5jlp" event={"ID":"1382485d-e2ea-474b-a0f5-3db36a8cf19c","Type":"ContainerDied","Data":"f21d401581b3cc78ed672cc993cafd127f8ba44dfc79243802f3248448938025"} Jan 20 18:03:15 crc kubenswrapper[4558]: I0120 18:03:15.197485 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f21d401581b3cc78ed672cc993cafd127f8ba44dfc79243802f3248448938025" Jan 20 18:03:15 crc kubenswrapper[4558]: I0120 18:03:15.197490 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-c5jlp" Jan 20 18:03:17 crc kubenswrapper[4558]: I0120 18:03:17.605257 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-78c7b787f5-8gcnh"] Jan 20 18:03:17 crc kubenswrapper[4558]: E0120 18:03:17.605818 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1382485d-e2ea-474b-a0f5-3db36a8cf19c" containerName="storage" Jan 20 18:03:17 crc kubenswrapper[4558]: I0120 18:03:17.605829 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="1382485d-e2ea-474b-a0f5-3db36a8cf19c" containerName="storage" Jan 20 18:03:17 crc kubenswrapper[4558]: I0120 18:03:17.605981 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="1382485d-e2ea-474b-a0f5-3db36a8cf19c" containerName="storage" Jan 20 18:03:17 crc kubenswrapper[4558]: I0120 18:03:17.606666 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-78c7b787f5-8gcnh" Jan 20 18:03:17 crc kubenswrapper[4558]: I0120 18:03:17.610892 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-edpm-tls" Jan 20 18:03:17 crc kubenswrapper[4558]: I0120 18:03:17.616103 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-78c7b787f5-8gcnh"] Jan 20 18:03:17 crc kubenswrapper[4558]: I0120 18:03:17.673213 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/43949aad-0465-4be8-a540-32db9e3b6cbd-config\") pod \"dnsmasq-dnsmasq-78c7b787f5-8gcnh\" (UID: \"43949aad-0465-4be8-a540-32db9e3b6cbd\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-78c7b787f5-8gcnh" Jan 20 18:03:17 crc kubenswrapper[4558]: I0120 18:03:17.673555 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mxhnx\" (UniqueName: \"kubernetes.io/projected/43949aad-0465-4be8-a540-32db9e3b6cbd-kube-api-access-mxhnx\") pod \"dnsmasq-dnsmasq-78c7b787f5-8gcnh\" (UID: \"43949aad-0465-4be8-a540-32db9e3b6cbd\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-78c7b787f5-8gcnh" Jan 20 18:03:17 crc kubenswrapper[4558]: I0120 18:03:17.673585 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/43949aad-0465-4be8-a540-32db9e3b6cbd-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-78c7b787f5-8gcnh\" (UID: \"43949aad-0465-4be8-a540-32db9e3b6cbd\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-78c7b787f5-8gcnh" Jan 20 18:03:17 crc kubenswrapper[4558]: I0120 18:03:17.673618 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-tls\" (UniqueName: \"kubernetes.io/configmap/43949aad-0465-4be8-a540-32db9e3b6cbd-openstack-edpm-tls\") pod \"dnsmasq-dnsmasq-78c7b787f5-8gcnh\" (UID: \"43949aad-0465-4be8-a540-32db9e3b6cbd\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-78c7b787f5-8gcnh" Jan 20 18:03:17 crc kubenswrapper[4558]: I0120 18:03:17.716270 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-78c7b787f5-8gcnh"] Jan 20 18:03:17 crc kubenswrapper[4558]: E0120 18:03:17.716765 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[config dnsmasq-svc kube-api-access-mxhnx openstack-edpm-tls], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-78c7b787f5-8gcnh" podUID="43949aad-0465-4be8-a540-32db9e3b6cbd" Jan 20 18:03:17 crc kubenswrapper[4558]: I0120 18:03:17.737041 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-79c5857c95-fclrv"] Jan 20 18:03:17 crc kubenswrapper[4558]: I0120 18:03:17.738260 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-79c5857c95-fclrv" Jan 20 18:03:17 crc kubenswrapper[4558]: I0120 18:03:17.744305 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-79c5857c95-fclrv"] Jan 20 18:03:17 crc kubenswrapper[4558]: I0120 18:03:17.774895 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-tls\" (UniqueName: \"kubernetes.io/configmap/43949aad-0465-4be8-a540-32db9e3b6cbd-openstack-edpm-tls\") pod \"dnsmasq-dnsmasq-78c7b787f5-8gcnh\" (UID: \"43949aad-0465-4be8-a540-32db9e3b6cbd\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-78c7b787f5-8gcnh" Jan 20 18:03:17 crc kubenswrapper[4558]: I0120 18:03:17.774947 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e8b2847a-be4f-43d0-b15a-0bf0e540a57b-config\") pod \"dnsmasq-dnsmasq-79c5857c95-fclrv\" (UID: \"e8b2847a-be4f-43d0-b15a-0bf0e540a57b\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-79c5857c95-fclrv" Jan 20 18:03:17 crc kubenswrapper[4558]: I0120 18:03:17.774998 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/43949aad-0465-4be8-a540-32db9e3b6cbd-config\") pod \"dnsmasq-dnsmasq-78c7b787f5-8gcnh\" (UID: \"43949aad-0465-4be8-a540-32db9e3b6cbd\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-78c7b787f5-8gcnh" Jan 20 18:03:17 crc kubenswrapper[4558]: I0120 18:03:17.775026 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v8pjg\" (UniqueName: \"kubernetes.io/projected/e8b2847a-be4f-43d0-b15a-0bf0e540a57b-kube-api-access-v8pjg\") pod \"dnsmasq-dnsmasq-79c5857c95-fclrv\" (UID: \"e8b2847a-be4f-43d0-b15a-0bf0e540a57b\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-79c5857c95-fclrv" Jan 20 18:03:17 crc kubenswrapper[4558]: I0120 18:03:17.775053 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-tls\" (UniqueName: \"kubernetes.io/configmap/e8b2847a-be4f-43d0-b15a-0bf0e540a57b-openstack-edpm-tls\") pod \"dnsmasq-dnsmasq-79c5857c95-fclrv\" (UID: \"e8b2847a-be4f-43d0-b15a-0bf0e540a57b\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-79c5857c95-fclrv" Jan 20 18:03:17 crc kubenswrapper[4558]: I0120 18:03:17.775220 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mxhnx\" (UniqueName: \"kubernetes.io/projected/43949aad-0465-4be8-a540-32db9e3b6cbd-kube-api-access-mxhnx\") pod \"dnsmasq-dnsmasq-78c7b787f5-8gcnh\" (UID: \"43949aad-0465-4be8-a540-32db9e3b6cbd\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-78c7b787f5-8gcnh" Jan 20 18:03:17 crc kubenswrapper[4558]: I0120 18:03:17.775276 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/43949aad-0465-4be8-a540-32db9e3b6cbd-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-78c7b787f5-8gcnh\" (UID: \"43949aad-0465-4be8-a540-32db9e3b6cbd\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-78c7b787f5-8gcnh" Jan 20 18:03:17 crc kubenswrapper[4558]: I0120 18:03:17.775329 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/e8b2847a-be4f-43d0-b15a-0bf0e540a57b-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-79c5857c95-fclrv\" (UID: \"e8b2847a-be4f-43d0-b15a-0bf0e540a57b\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-79c5857c95-fclrv" Jan 20 18:03:17 crc kubenswrapper[4558]: I0120 18:03:17.775923 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/43949aad-0465-4be8-a540-32db9e3b6cbd-config\") pod \"dnsmasq-dnsmasq-78c7b787f5-8gcnh\" (UID: \"43949aad-0465-4be8-a540-32db9e3b6cbd\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-78c7b787f5-8gcnh" Jan 20 18:03:17 crc kubenswrapper[4558]: I0120 18:03:17.775936 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/43949aad-0465-4be8-a540-32db9e3b6cbd-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-78c7b787f5-8gcnh\" (UID: \"43949aad-0465-4be8-a540-32db9e3b6cbd\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-78c7b787f5-8gcnh" Jan 20 18:03:17 crc kubenswrapper[4558]: I0120 18:03:17.776348 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-tls\" (UniqueName: \"kubernetes.io/configmap/43949aad-0465-4be8-a540-32db9e3b6cbd-openstack-edpm-tls\") pod \"dnsmasq-dnsmasq-78c7b787f5-8gcnh\" (UID: \"43949aad-0465-4be8-a540-32db9e3b6cbd\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-78c7b787f5-8gcnh" Jan 20 18:03:17 crc kubenswrapper[4558]: I0120 18:03:17.792332 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mxhnx\" (UniqueName: \"kubernetes.io/projected/43949aad-0465-4be8-a540-32db9e3b6cbd-kube-api-access-mxhnx\") pod \"dnsmasq-dnsmasq-78c7b787f5-8gcnh\" (UID: \"43949aad-0465-4be8-a540-32db9e3b6cbd\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-78c7b787f5-8gcnh" Jan 20 18:03:17 crc kubenswrapper[4558]: I0120 18:03:17.820130 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-79c5857c95-fclrv"] Jan 20 18:03:17 crc kubenswrapper[4558]: E0120 18:03:17.820670 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[config dnsmasq-svc kube-api-access-v8pjg openstack-edpm-tls], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-79c5857c95-fclrv" podUID="e8b2847a-be4f-43d0-b15a-0bf0e540a57b" Jan 20 18:03:17 crc kubenswrapper[4558]: I0120 18:03:17.841885 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-76b7c4d945-bk85s"] Jan 20 18:03:17 crc kubenswrapper[4558]: I0120 18:03:17.842971 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-76b7c4d945-bk85s" Jan 20 18:03:17 crc kubenswrapper[4558]: I0120 18:03:17.850081 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-76b7c4d945-bk85s"] Jan 20 18:03:17 crc kubenswrapper[4558]: I0120 18:03:17.875809 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v8pjg\" (UniqueName: \"kubernetes.io/projected/e8b2847a-be4f-43d0-b15a-0bf0e540a57b-kube-api-access-v8pjg\") pod \"dnsmasq-dnsmasq-79c5857c95-fclrv\" (UID: \"e8b2847a-be4f-43d0-b15a-0bf0e540a57b\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-79c5857c95-fclrv" Jan 20 18:03:17 crc kubenswrapper[4558]: I0120 18:03:17.875854 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-tls\" (UniqueName: \"kubernetes.io/configmap/e8b2847a-be4f-43d0-b15a-0bf0e540a57b-openstack-edpm-tls\") pod \"dnsmasq-dnsmasq-79c5857c95-fclrv\" (UID: \"e8b2847a-be4f-43d0-b15a-0bf0e540a57b\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-79c5857c95-fclrv" Jan 20 18:03:17 crc kubenswrapper[4558]: I0120 18:03:17.875898 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-tls\" (UniqueName: \"kubernetes.io/configmap/3e7ccbfc-455e-4369-90a8-58abb6cb42a5-openstack-edpm-tls\") pod \"dnsmasq-dnsmasq-76b7c4d945-bk85s\" (UID: \"3e7ccbfc-455e-4369-90a8-58abb6cb42a5\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-76b7c4d945-bk85s" Jan 20 18:03:17 crc kubenswrapper[4558]: I0120 18:03:17.875932 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wtjfg\" (UniqueName: \"kubernetes.io/projected/3e7ccbfc-455e-4369-90a8-58abb6cb42a5-kube-api-access-wtjfg\") pod \"dnsmasq-dnsmasq-76b7c4d945-bk85s\" (UID: \"3e7ccbfc-455e-4369-90a8-58abb6cb42a5\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-76b7c4d945-bk85s" Jan 20 18:03:17 crc kubenswrapper[4558]: I0120 18:03:17.875953 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/e8b2847a-be4f-43d0-b15a-0bf0e540a57b-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-79c5857c95-fclrv\" (UID: \"e8b2847a-be4f-43d0-b15a-0bf0e540a57b\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-79c5857c95-fclrv" Jan 20 18:03:17 crc kubenswrapper[4558]: I0120 18:03:17.875974 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3e7ccbfc-455e-4369-90a8-58abb6cb42a5-config\") pod \"dnsmasq-dnsmasq-76b7c4d945-bk85s\" (UID: \"3e7ccbfc-455e-4369-90a8-58abb6cb42a5\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-76b7c4d945-bk85s" Jan 20 18:03:17 crc kubenswrapper[4558]: I0120 18:03:17.875998 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e8b2847a-be4f-43d0-b15a-0bf0e540a57b-config\") pod \"dnsmasq-dnsmasq-79c5857c95-fclrv\" (UID: \"e8b2847a-be4f-43d0-b15a-0bf0e540a57b\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-79c5857c95-fclrv" Jan 20 18:03:17 crc kubenswrapper[4558]: I0120 18:03:17.876039 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/3e7ccbfc-455e-4369-90a8-58abb6cb42a5-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-76b7c4d945-bk85s\" (UID: \"3e7ccbfc-455e-4369-90a8-58abb6cb42a5\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-76b7c4d945-bk85s" Jan 20 18:03:17 crc kubenswrapper[4558]: I0120 18:03:17.876781 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-tls\" (UniqueName: \"kubernetes.io/configmap/e8b2847a-be4f-43d0-b15a-0bf0e540a57b-openstack-edpm-tls\") pod \"dnsmasq-dnsmasq-79c5857c95-fclrv\" (UID: \"e8b2847a-be4f-43d0-b15a-0bf0e540a57b\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-79c5857c95-fclrv" Jan 20 18:03:17 crc kubenswrapper[4558]: I0120 18:03:17.876790 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/e8b2847a-be4f-43d0-b15a-0bf0e540a57b-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-79c5857c95-fclrv\" (UID: \"e8b2847a-be4f-43d0-b15a-0bf0e540a57b\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-79c5857c95-fclrv" Jan 20 18:03:17 crc kubenswrapper[4558]: I0120 18:03:17.876965 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e8b2847a-be4f-43d0-b15a-0bf0e540a57b-config\") pod \"dnsmasq-dnsmasq-79c5857c95-fclrv\" (UID: \"e8b2847a-be4f-43d0-b15a-0bf0e540a57b\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-79c5857c95-fclrv" Jan 20 18:03:17 crc kubenswrapper[4558]: I0120 18:03:17.890491 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v8pjg\" (UniqueName: \"kubernetes.io/projected/e8b2847a-be4f-43d0-b15a-0bf0e540a57b-kube-api-access-v8pjg\") pod \"dnsmasq-dnsmasq-79c5857c95-fclrv\" (UID: \"e8b2847a-be4f-43d0-b15a-0bf0e540a57b\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-79c5857c95-fclrv" Jan 20 18:03:17 crc kubenswrapper[4558]: I0120 18:03:17.977696 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/3e7ccbfc-455e-4369-90a8-58abb6cb42a5-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-76b7c4d945-bk85s\" (UID: \"3e7ccbfc-455e-4369-90a8-58abb6cb42a5\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-76b7c4d945-bk85s" Jan 20 18:03:17 crc kubenswrapper[4558]: I0120 18:03:17.977818 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-tls\" (UniqueName: \"kubernetes.io/configmap/3e7ccbfc-455e-4369-90a8-58abb6cb42a5-openstack-edpm-tls\") pod \"dnsmasq-dnsmasq-76b7c4d945-bk85s\" (UID: \"3e7ccbfc-455e-4369-90a8-58abb6cb42a5\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-76b7c4d945-bk85s" Jan 20 18:03:17 crc kubenswrapper[4558]: I0120 18:03:17.977906 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wtjfg\" (UniqueName: \"kubernetes.io/projected/3e7ccbfc-455e-4369-90a8-58abb6cb42a5-kube-api-access-wtjfg\") pod \"dnsmasq-dnsmasq-76b7c4d945-bk85s\" (UID: \"3e7ccbfc-455e-4369-90a8-58abb6cb42a5\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-76b7c4d945-bk85s" Jan 20 18:03:17 crc kubenswrapper[4558]: I0120 18:03:17.977953 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3e7ccbfc-455e-4369-90a8-58abb6cb42a5-config\") pod \"dnsmasq-dnsmasq-76b7c4d945-bk85s\" (UID: \"3e7ccbfc-455e-4369-90a8-58abb6cb42a5\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-76b7c4d945-bk85s" Jan 20 18:03:17 crc kubenswrapper[4558]: I0120 18:03:17.978857 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/3e7ccbfc-455e-4369-90a8-58abb6cb42a5-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-76b7c4d945-bk85s\" (UID: \"3e7ccbfc-455e-4369-90a8-58abb6cb42a5\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-76b7c4d945-bk85s" Jan 20 18:03:17 crc kubenswrapper[4558]: I0120 18:03:17.978951 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-tls\" (UniqueName: \"kubernetes.io/configmap/3e7ccbfc-455e-4369-90a8-58abb6cb42a5-openstack-edpm-tls\") pod \"dnsmasq-dnsmasq-76b7c4d945-bk85s\" (UID: \"3e7ccbfc-455e-4369-90a8-58abb6cb42a5\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-76b7c4d945-bk85s" Jan 20 18:03:17 crc kubenswrapper[4558]: I0120 18:03:17.979027 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3e7ccbfc-455e-4369-90a8-58abb6cb42a5-config\") pod \"dnsmasq-dnsmasq-76b7c4d945-bk85s\" (UID: \"3e7ccbfc-455e-4369-90a8-58abb6cb42a5\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-76b7c4d945-bk85s" Jan 20 18:03:17 crc kubenswrapper[4558]: I0120 18:03:17.991407 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wtjfg\" (UniqueName: \"kubernetes.io/projected/3e7ccbfc-455e-4369-90a8-58abb6cb42a5-kube-api-access-wtjfg\") pod \"dnsmasq-dnsmasq-76b7c4d945-bk85s\" (UID: \"3e7ccbfc-455e-4369-90a8-58abb6cb42a5\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-76b7c4d945-bk85s" Jan 20 18:03:18 crc kubenswrapper[4558]: I0120 18:03:18.157159 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-76b7c4d945-bk85s" Jan 20 18:03:18 crc kubenswrapper[4558]: I0120 18:03:18.220740 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-78c7b787f5-8gcnh" Jan 20 18:03:18 crc kubenswrapper[4558]: I0120 18:03:18.220796 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-79c5857c95-fclrv" Jan 20 18:03:18 crc kubenswrapper[4558]: I0120 18:03:18.238586 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-78c7b787f5-8gcnh" Jan 20 18:03:18 crc kubenswrapper[4558]: I0120 18:03:18.245676 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-79c5857c95-fclrv" Jan 20 18:03:18 crc kubenswrapper[4558]: I0120 18:03:18.282091 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-tls\" (UniqueName: \"kubernetes.io/configmap/e8b2847a-be4f-43d0-b15a-0bf0e540a57b-openstack-edpm-tls\") pod \"e8b2847a-be4f-43d0-b15a-0bf0e540a57b\" (UID: \"e8b2847a-be4f-43d0-b15a-0bf0e540a57b\") " Jan 20 18:03:18 crc kubenswrapper[4558]: I0120 18:03:18.282358 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/43949aad-0465-4be8-a540-32db9e3b6cbd-config\") pod \"43949aad-0465-4be8-a540-32db9e3b6cbd\" (UID: \"43949aad-0465-4be8-a540-32db9e3b6cbd\") " Jan 20 18:03:18 crc kubenswrapper[4558]: I0120 18:03:18.282395 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/e8b2847a-be4f-43d0-b15a-0bf0e540a57b-dnsmasq-svc\") pod \"e8b2847a-be4f-43d0-b15a-0bf0e540a57b\" (UID: \"e8b2847a-be4f-43d0-b15a-0bf0e540a57b\") " Jan 20 18:03:18 crc kubenswrapper[4558]: I0120 18:03:18.282524 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-tls\" (UniqueName: \"kubernetes.io/configmap/43949aad-0465-4be8-a540-32db9e3b6cbd-openstack-edpm-tls\") pod \"43949aad-0465-4be8-a540-32db9e3b6cbd\" (UID: \"43949aad-0465-4be8-a540-32db9e3b6cbd\") " Jan 20 18:03:18 crc kubenswrapper[4558]: I0120 18:03:18.282579 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e8b2847a-be4f-43d0-b15a-0bf0e540a57b-openstack-edpm-tls" (OuterVolumeSpecName: "openstack-edpm-tls") pod "e8b2847a-be4f-43d0-b15a-0bf0e540a57b" (UID: "e8b2847a-be4f-43d0-b15a-0bf0e540a57b"). InnerVolumeSpecName "openstack-edpm-tls". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:03:18 crc kubenswrapper[4558]: I0120 18:03:18.282629 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/43949aad-0465-4be8-a540-32db9e3b6cbd-dnsmasq-svc\") pod \"43949aad-0465-4be8-a540-32db9e3b6cbd\" (UID: \"43949aad-0465-4be8-a540-32db9e3b6cbd\") " Jan 20 18:03:18 crc kubenswrapper[4558]: I0120 18:03:18.282666 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mxhnx\" (UniqueName: \"kubernetes.io/projected/43949aad-0465-4be8-a540-32db9e3b6cbd-kube-api-access-mxhnx\") pod \"43949aad-0465-4be8-a540-32db9e3b6cbd\" (UID: \"43949aad-0465-4be8-a540-32db9e3b6cbd\") " Jan 20 18:03:18 crc kubenswrapper[4558]: I0120 18:03:18.282774 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v8pjg\" (UniqueName: \"kubernetes.io/projected/e8b2847a-be4f-43d0-b15a-0bf0e540a57b-kube-api-access-v8pjg\") pod \"e8b2847a-be4f-43d0-b15a-0bf0e540a57b\" (UID: \"e8b2847a-be4f-43d0-b15a-0bf0e540a57b\") " Jan 20 18:03:18 crc kubenswrapper[4558]: I0120 18:03:18.282822 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e8b2847a-be4f-43d0-b15a-0bf0e540a57b-config\") pod \"e8b2847a-be4f-43d0-b15a-0bf0e540a57b\" (UID: \"e8b2847a-be4f-43d0-b15a-0bf0e540a57b\") " Jan 20 18:03:18 crc kubenswrapper[4558]: I0120 18:03:18.283107 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43949aad-0465-4be8-a540-32db9e3b6cbd-dnsmasq-svc" (OuterVolumeSpecName: "dnsmasq-svc") pod "43949aad-0465-4be8-a540-32db9e3b6cbd" (UID: "43949aad-0465-4be8-a540-32db9e3b6cbd"). InnerVolumeSpecName "dnsmasq-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:03:18 crc kubenswrapper[4558]: I0120 18:03:18.283238 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43949aad-0465-4be8-a540-32db9e3b6cbd-config" (OuterVolumeSpecName: "config") pod "43949aad-0465-4be8-a540-32db9e3b6cbd" (UID: "43949aad-0465-4be8-a540-32db9e3b6cbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:03:18 crc kubenswrapper[4558]: I0120 18:03:18.283524 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e8b2847a-be4f-43d0-b15a-0bf0e540a57b-dnsmasq-svc" (OuterVolumeSpecName: "dnsmasq-svc") pod "e8b2847a-be4f-43d0-b15a-0bf0e540a57b" (UID: "e8b2847a-be4f-43d0-b15a-0bf0e540a57b"). InnerVolumeSpecName "dnsmasq-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:03:18 crc kubenswrapper[4558]: I0120 18:03:18.283569 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e8b2847a-be4f-43d0-b15a-0bf0e540a57b-config" (OuterVolumeSpecName: "config") pod "e8b2847a-be4f-43d0-b15a-0bf0e540a57b" (UID: "e8b2847a-be4f-43d0-b15a-0bf0e540a57b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:03:18 crc kubenswrapper[4558]: I0120 18:03:18.283854 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e8b2847a-be4f-43d0-b15a-0bf0e540a57b-config\") on node \"crc\" DevicePath \"\"" Jan 20 18:03:18 crc kubenswrapper[4558]: I0120 18:03:18.283884 4558 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-tls\" (UniqueName: \"kubernetes.io/configmap/e8b2847a-be4f-43d0-b15a-0bf0e540a57b-openstack-edpm-tls\") on node \"crc\" DevicePath \"\"" Jan 20 18:03:18 crc kubenswrapper[4558]: I0120 18:03:18.283898 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/43949aad-0465-4be8-a540-32db9e3b6cbd-config\") on node \"crc\" DevicePath \"\"" Jan 20 18:03:18 crc kubenswrapper[4558]: I0120 18:03:18.283910 4558 reconciler_common.go:293] "Volume detached for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/e8b2847a-be4f-43d0-b15a-0bf0e540a57b-dnsmasq-svc\") on node \"crc\" DevicePath \"\"" Jan 20 18:03:18 crc kubenswrapper[4558]: I0120 18:03:18.283919 4558 reconciler_common.go:293] "Volume detached for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/43949aad-0465-4be8-a540-32db9e3b6cbd-dnsmasq-svc\") on node \"crc\" DevicePath \"\"" Jan 20 18:03:18 crc kubenswrapper[4558]: I0120 18:03:18.283953 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43949aad-0465-4be8-a540-32db9e3b6cbd-openstack-edpm-tls" (OuterVolumeSpecName: "openstack-edpm-tls") pod "43949aad-0465-4be8-a540-32db9e3b6cbd" (UID: "43949aad-0465-4be8-a540-32db9e3b6cbd"). InnerVolumeSpecName "openstack-edpm-tls". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:03:18 crc kubenswrapper[4558]: I0120 18:03:18.288207 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43949aad-0465-4be8-a540-32db9e3b6cbd-kube-api-access-mxhnx" (OuterVolumeSpecName: "kube-api-access-mxhnx") pod "43949aad-0465-4be8-a540-32db9e3b6cbd" (UID: "43949aad-0465-4be8-a540-32db9e3b6cbd"). InnerVolumeSpecName "kube-api-access-mxhnx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:03:18 crc kubenswrapper[4558]: I0120 18:03:18.288823 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e8b2847a-be4f-43d0-b15a-0bf0e540a57b-kube-api-access-v8pjg" (OuterVolumeSpecName: "kube-api-access-v8pjg") pod "e8b2847a-be4f-43d0-b15a-0bf0e540a57b" (UID: "e8b2847a-be4f-43d0-b15a-0bf0e540a57b"). InnerVolumeSpecName "kube-api-access-v8pjg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:03:18 crc kubenswrapper[4558]: I0120 18:03:18.370633 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-76b7c4d945-bk85s"] Jan 20 18:03:18 crc kubenswrapper[4558]: I0120 18:03:18.385772 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v8pjg\" (UniqueName: \"kubernetes.io/projected/e8b2847a-be4f-43d0-b15a-0bf0e540a57b-kube-api-access-v8pjg\") on node \"crc\" DevicePath \"\"" Jan 20 18:03:18 crc kubenswrapper[4558]: I0120 18:03:18.385819 4558 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-tls\" (UniqueName: \"kubernetes.io/configmap/43949aad-0465-4be8-a540-32db9e3b6cbd-openstack-edpm-tls\") on node \"crc\" DevicePath \"\"" Jan 20 18:03:18 crc kubenswrapper[4558]: I0120 18:03:18.385832 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mxhnx\" (UniqueName: \"kubernetes.io/projected/43949aad-0465-4be8-a540-32db9e3b6cbd-kube-api-access-mxhnx\") on node \"crc\" DevicePath \"\"" Jan 20 18:03:19 crc kubenswrapper[4558]: I0120 18:03:19.240415 4558 generic.go:334] "Generic (PLEG): container finished" podID="3e7ccbfc-455e-4369-90a8-58abb6cb42a5" containerID="a46ddbe8de3c509f4604edf587bfde0299a36b9402aa8cd8d94edbbc31e569ce" exitCode=0 Jan 20 18:03:19 crc kubenswrapper[4558]: I0120 18:03:19.240771 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-79c5857c95-fclrv" Jan 20 18:03:19 crc kubenswrapper[4558]: I0120 18:03:19.240963 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-76b7c4d945-bk85s" event={"ID":"3e7ccbfc-455e-4369-90a8-58abb6cb42a5","Type":"ContainerDied","Data":"a46ddbe8de3c509f4604edf587bfde0299a36b9402aa8cd8d94edbbc31e569ce"} Jan 20 18:03:19 crc kubenswrapper[4558]: I0120 18:03:19.241024 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-78c7b787f5-8gcnh" Jan 20 18:03:19 crc kubenswrapper[4558]: I0120 18:03:19.241049 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-76b7c4d945-bk85s" event={"ID":"3e7ccbfc-455e-4369-90a8-58abb6cb42a5","Type":"ContainerStarted","Data":"42c2507dfa037b2d8af38347104b0e3f95a95daed639b5e33f653781eca98369"} Jan 20 18:03:19 crc kubenswrapper[4558]: I0120 18:03:19.285901 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-78c7b787f5-8gcnh"] Jan 20 18:03:19 crc kubenswrapper[4558]: I0120 18:03:19.298896 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-78c7b787f5-8gcnh"] Jan 20 18:03:19 crc kubenswrapper[4558]: I0120 18:03:19.310185 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-79c5857c95-fclrv"] Jan 20 18:03:19 crc kubenswrapper[4558]: I0120 18:03:19.313218 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-79c5857c95-fclrv"] Jan 20 18:03:20 crc kubenswrapper[4558]: I0120 18:03:20.251750 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-76b7c4d945-bk85s" event={"ID":"3e7ccbfc-455e-4369-90a8-58abb6cb42a5","Type":"ContainerStarted","Data":"bb6a2d45bcc378b41b93c6cd0baaeefdb01ef66dedfa23d67d52c4eec6180c18"} Jan 20 18:03:20 crc kubenswrapper[4558]: I0120 18:03:20.251951 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-76b7c4d945-bk85s" Jan 20 18:03:20 crc kubenswrapper[4558]: I0120 18:03:20.273000 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-76b7c4d945-bk85s" podStartSLOduration=3.272981688 podStartE2EDuration="3.272981688s" podCreationTimestamp="2026-01-20 18:03:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 18:03:20.265081248 +0000 UTC m=+4894.025419215" watchObservedRunningTime="2026-01-20 18:03:20.272981688 +0000 UTC m=+4894.033319645" Jan 20 18:03:20 crc kubenswrapper[4558]: I0120 18:03:20.576041 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43949aad-0465-4be8-a540-32db9e3b6cbd" path="/var/lib/kubelet/pods/43949aad-0465-4be8-a540-32db9e3b6cbd/volumes" Jan 20 18:03:20 crc kubenswrapper[4558]: I0120 18:03:20.577397 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e8b2847a-be4f-43d0-b15a-0bf0e540a57b" path="/var/lib/kubelet/pods/e8b2847a-be4f-43d0-b15a-0bf0e540a57b/volumes" Jan 20 18:03:27 crc kubenswrapper[4558]: I0120 18:03:27.330363 4558 patch_prober.go:28] interesting pod/machine-config-daemon-2vr4r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 20 18:03:27 crc kubenswrapper[4558]: I0120 18:03:27.330984 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 20 18:03:27 crc kubenswrapper[4558]: I0120 18:03:27.331057 4558 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" Jan 20 18:03:27 crc kubenswrapper[4558]: I0120 18:03:27.331802 4558 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"6b02d31ec11fe5435af988e35303a6f66b5afa5ef952ce5f7cc4b3cd5f4d9497"} pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 20 18:03:27 crc kubenswrapper[4558]: I0120 18:03:27.331859 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" containerID="cri-o://6b02d31ec11fe5435af988e35303a6f66b5afa5ef952ce5f7cc4b3cd5f4d9497" gracePeriod=600 Jan 20 18:03:27 crc kubenswrapper[4558]: E0120 18:03:27.455485 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 18:03:28 crc kubenswrapper[4558]: I0120 18:03:28.159317 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-76b7c4d945-bk85s" Jan 20 18:03:28 crc kubenswrapper[4558]: I0120 18:03:28.200148 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-82d4h"] Jan 20 18:03:28 crc kubenswrapper[4558]: I0120 18:03:28.200421 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-82d4h" podUID="dd440515-39d3-4f1c-a85a-d03a0bfc0427" containerName="dnsmasq-dns" containerID="cri-o://f9b09f9ee0c281fa78e53d4c778c2d500368621ce6fa6c0131211f6eac1b2d78" gracePeriod=10 Jan 20 18:03:28 crc kubenswrapper[4558]: I0120 18:03:28.325100 4558 generic.go:334] "Generic (PLEG): container finished" podID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerID="6b02d31ec11fe5435af988e35303a6f66b5afa5ef952ce5f7cc4b3cd5f4d9497" exitCode=0 Jan 20 18:03:28 crc kubenswrapper[4558]: I0120 18:03:28.325205 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" event={"ID":"68337d27-3fa6-4a29-88b0-82e60c3739eb","Type":"ContainerDied","Data":"6b02d31ec11fe5435af988e35303a6f66b5afa5ef952ce5f7cc4b3cd5f4d9497"} Jan 20 18:03:28 crc kubenswrapper[4558]: I0120 18:03:28.325258 4558 scope.go:117] "RemoveContainer" containerID="06773f88c6859f6cd4428c9c5cf74a790c0f5de8a90ecadbaf3e2b35d66f98a5" Jan 20 18:03:28 crc kubenswrapper[4558]: I0120 18:03:28.326214 4558 scope.go:117] "RemoveContainer" containerID="6b02d31ec11fe5435af988e35303a6f66b5afa5ef952ce5f7cc4b3cd5f4d9497" Jan 20 18:03:28 crc kubenswrapper[4558]: E0120 18:03:28.326493 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 18:03:28 crc kubenswrapper[4558]: I0120 18:03:28.332495 4558 generic.go:334] "Generic (PLEG): container finished" podID="dd440515-39d3-4f1c-a85a-d03a0bfc0427" containerID="f9b09f9ee0c281fa78e53d4c778c2d500368621ce6fa6c0131211f6eac1b2d78" exitCode=0 Jan 20 18:03:28 crc kubenswrapper[4558]: I0120 18:03:28.332571 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-82d4h" event={"ID":"dd440515-39d3-4f1c-a85a-d03a0bfc0427","Type":"ContainerDied","Data":"f9b09f9ee0c281fa78e53d4c778c2d500368621ce6fa6c0131211f6eac1b2d78"} Jan 20 18:03:28 crc kubenswrapper[4558]: I0120 18:03:28.576656 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-82d4h" Jan 20 18:03:28 crc kubenswrapper[4558]: I0120 18:03:28.641922 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v4gqk\" (UniqueName: \"kubernetes.io/projected/dd440515-39d3-4f1c-a85a-d03a0bfc0427-kube-api-access-v4gqk\") pod \"dd440515-39d3-4f1c-a85a-d03a0bfc0427\" (UID: \"dd440515-39d3-4f1c-a85a-d03a0bfc0427\") " Jan 20 18:03:28 crc kubenswrapper[4558]: I0120 18:03:28.641990 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/dd440515-39d3-4f1c-a85a-d03a0bfc0427-dnsmasq-svc\") pod \"dd440515-39d3-4f1c-a85a-d03a0bfc0427\" (UID: \"dd440515-39d3-4f1c-a85a-d03a0bfc0427\") " Jan 20 18:03:28 crc kubenswrapper[4558]: I0120 18:03:28.642273 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dd440515-39d3-4f1c-a85a-d03a0bfc0427-config\") pod \"dd440515-39d3-4f1c-a85a-d03a0bfc0427\" (UID: \"dd440515-39d3-4f1c-a85a-d03a0bfc0427\") " Jan 20 18:03:28 crc kubenswrapper[4558]: I0120 18:03:28.648896 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dd440515-39d3-4f1c-a85a-d03a0bfc0427-kube-api-access-v4gqk" (OuterVolumeSpecName: "kube-api-access-v4gqk") pod "dd440515-39d3-4f1c-a85a-d03a0bfc0427" (UID: "dd440515-39d3-4f1c-a85a-d03a0bfc0427"). InnerVolumeSpecName "kube-api-access-v4gqk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:03:28 crc kubenswrapper[4558]: I0120 18:03:28.674758 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dd440515-39d3-4f1c-a85a-d03a0bfc0427-dnsmasq-svc" (OuterVolumeSpecName: "dnsmasq-svc") pod "dd440515-39d3-4f1c-a85a-d03a0bfc0427" (UID: "dd440515-39d3-4f1c-a85a-d03a0bfc0427"). InnerVolumeSpecName "dnsmasq-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:03:28 crc kubenswrapper[4558]: I0120 18:03:28.676711 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dd440515-39d3-4f1c-a85a-d03a0bfc0427-config" (OuterVolumeSpecName: "config") pod "dd440515-39d3-4f1c-a85a-d03a0bfc0427" (UID: "dd440515-39d3-4f1c-a85a-d03a0bfc0427"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:03:28 crc kubenswrapper[4558]: I0120 18:03:28.754819 4558 reconciler_common.go:293] "Volume detached for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/dd440515-39d3-4f1c-a85a-d03a0bfc0427-dnsmasq-svc\") on node \"crc\" DevicePath \"\"" Jan 20 18:03:28 crc kubenswrapper[4558]: I0120 18:03:28.755202 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dd440515-39d3-4f1c-a85a-d03a0bfc0427-config\") on node \"crc\" DevicePath \"\"" Jan 20 18:03:28 crc kubenswrapper[4558]: I0120 18:03:28.755222 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v4gqk\" (UniqueName: \"kubernetes.io/projected/dd440515-39d3-4f1c-a85a-d03a0bfc0427-kube-api-access-v4gqk\") on node \"crc\" DevicePath \"\"" Jan 20 18:03:29 crc kubenswrapper[4558]: I0120 18:03:29.352179 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-82d4h" event={"ID":"dd440515-39d3-4f1c-a85a-d03a0bfc0427","Type":"ContainerDied","Data":"bf8bf9fdd2cd589aa2685153cc2e18c4acd6ffc56f32237ae75ae55740456aa1"} Jan 20 18:03:29 crc kubenswrapper[4558]: I0120 18:03:29.352242 4558 scope.go:117] "RemoveContainer" containerID="f9b09f9ee0c281fa78e53d4c778c2d500368621ce6fa6c0131211f6eac1b2d78" Jan 20 18:03:29 crc kubenswrapper[4558]: I0120 18:03:29.352348 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-82d4h" Jan 20 18:03:29 crc kubenswrapper[4558]: I0120 18:03:29.389542 4558 scope.go:117] "RemoveContainer" containerID="69453032edde0cb8d7dbc359f75edc7809d0f8a436a9750d9c7dc6f4a2523e61" Jan 20 18:03:29 crc kubenswrapper[4558]: I0120 18:03:29.391630 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-82d4h"] Jan 20 18:03:29 crc kubenswrapper[4558]: I0120 18:03:29.405030 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-82d4h"] Jan 20 18:03:30 crc kubenswrapper[4558]: I0120 18:03:30.575854 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dd440515-39d3-4f1c-a85a-d03a0bfc0427" path="/var/lib/kubelet/pods/dd440515-39d3-4f1c-a85a-d03a0bfc0427/volumes" Jan 20 18:03:31 crc kubenswrapper[4558]: I0120 18:03:31.265816 4558 scope.go:117] "RemoveContainer" containerID="6d3bcdd8bc2daf8370cfb0ead7863d55e4210dd7cd17bf8e13b1e10aace7f03f" Jan 20 18:03:32 crc kubenswrapper[4558]: I0120 18:03:32.432856 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/install-certs-ovr-openstack-edpm-tls-openstack-edpm-tls-dflsf"] Jan 20 18:03:32 crc kubenswrapper[4558]: E0120 18:03:32.433215 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd440515-39d3-4f1c-a85a-d03a0bfc0427" containerName="dnsmasq-dns" Jan 20 18:03:32 crc kubenswrapper[4558]: I0120 18:03:32.433229 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd440515-39d3-4f1c-a85a-d03a0bfc0427" containerName="dnsmasq-dns" Jan 20 18:03:32 crc kubenswrapper[4558]: E0120 18:03:32.433263 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd440515-39d3-4f1c-a85a-d03a0bfc0427" containerName="init" Jan 20 18:03:32 crc kubenswrapper[4558]: I0120 18:03:32.433269 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd440515-39d3-4f1c-a85a-d03a0bfc0427" containerName="init" Jan 20 18:03:32 crc kubenswrapper[4558]: I0120 18:03:32.433392 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="dd440515-39d3-4f1c-a85a-d03a0bfc0427" containerName="dnsmasq-dns" Jan 20 18:03:32 crc kubenswrapper[4558]: I0120 18:03:32.433892 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/install-certs-ovr-openstack-edpm-tls-openstack-edpm-tls-dflsf" Jan 20 18:03:32 crc kubenswrapper[4558]: I0120 18:03:32.436187 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"openstack-edpm-tls-dockercfg-5xq2z" Jan 20 18:03:32 crc kubenswrapper[4558]: I0120 18:03:32.436471 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplanenodeset-openstack-edpm-tls" Jan 20 18:03:32 crc kubenswrapper[4558]: I0120 18:03:32.436591 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"combined-ca-bundle" Jan 20 18:03:32 crc kubenswrapper[4558]: I0120 18:03:32.436816 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"openstack-edpm-tls-generic-service1-default-certs-1" Jan 20 18:03:32 crc kubenswrapper[4558]: I0120 18:03:32.436948 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"openstack-edpm-tls-generic-service1-default-certs-0" Jan 20 18:03:32 crc kubenswrapper[4558]: I0120 18:03:32.437098 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"openstack-edpm-tls-generic-service1-default-certs-2" Jan 20 18:03:32 crc kubenswrapper[4558]: I0120 18:03:32.437228 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-aee-default-env" Jan 20 18:03:32 crc kubenswrapper[4558]: I0120 18:03:32.437412 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplane-ansible-ssh-private-key-secret" Jan 20 18:03:32 crc kubenswrapper[4558]: I0120 18:03:32.446604 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/install-certs-ovr-openstack-edpm-tls-openstack-edpm-tls-dflsf"] Jan 20 18:03:32 crc kubenswrapper[4558]: I0120 18:03:32.509763 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"generic-service1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21a4af65-13b6-4950-9d76-6b0ea77230b0-generic-service1-combined-ca-bundle\") pod \"install-certs-ovr-openstack-edpm-tls-openstack-edpm-tls-dflsf\" (UID: \"21a4af65-13b6-4950-9d76-6b0ea77230b0\") " pod="openstack-kuttl-tests/install-certs-ovr-openstack-edpm-tls-openstack-edpm-tls-dflsf" Jan 20 18:03:32 crc kubenswrapper[4558]: I0120 18:03:32.509810 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-tls\" (UniqueName: \"kubernetes.io/secret/21a4af65-13b6-4950-9d76-6b0ea77230b0-ssh-key-openstack-edpm-tls\") pod \"install-certs-ovr-openstack-edpm-tls-openstack-edpm-tls-dflsf\" (UID: \"21a4af65-13b6-4950-9d76-6b0ea77230b0\") " pod="openstack-kuttl-tests/install-certs-ovr-openstack-edpm-tls-openstack-edpm-tls-dflsf" Jan 20 18:03:32 crc kubenswrapper[4558]: I0120 18:03:32.509838 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pklc5\" (UniqueName: \"kubernetes.io/projected/21a4af65-13b6-4950-9d76-6b0ea77230b0-kube-api-access-pklc5\") pod \"install-certs-ovr-openstack-edpm-tls-openstack-edpm-tls-dflsf\" (UID: \"21a4af65-13b6-4950-9d76-6b0ea77230b0\") " pod="openstack-kuttl-tests/install-certs-ovr-openstack-edpm-tls-openstack-edpm-tls-dflsf" Jan 20 18:03:32 crc kubenswrapper[4558]: I0120 18:03:32.510022 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/21a4af65-13b6-4950-9d76-6b0ea77230b0-inventory\") pod \"install-certs-ovr-openstack-edpm-tls-openstack-edpm-tls-dflsf\" (UID: \"21a4af65-13b6-4950-9d76-6b0ea77230b0\") " pod="openstack-kuttl-tests/install-certs-ovr-openstack-edpm-tls-openstack-edpm-tls-dflsf" Jan 20 18:03:32 crc kubenswrapper[4558]: I0120 18:03:32.510117 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-tls-generic-service1-default-certs-0\" (UniqueName: \"kubernetes.io/projected/21a4af65-13b6-4950-9d76-6b0ea77230b0-openstack-edpm-tls-generic-service1-default-certs-0\") pod \"install-certs-ovr-openstack-edpm-tls-openstack-edpm-tls-dflsf\" (UID: \"21a4af65-13b6-4950-9d76-6b0ea77230b0\") " pod="openstack-kuttl-tests/install-certs-ovr-openstack-edpm-tls-openstack-edpm-tls-dflsf" Jan 20 18:03:32 crc kubenswrapper[4558]: I0120 18:03:32.510156 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"install-certs-ovr-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21a4af65-13b6-4950-9d76-6b0ea77230b0-install-certs-ovr-combined-ca-bundle\") pod \"install-certs-ovr-openstack-edpm-tls-openstack-edpm-tls-dflsf\" (UID: \"21a4af65-13b6-4950-9d76-6b0ea77230b0\") " pod="openstack-kuttl-tests/install-certs-ovr-openstack-edpm-tls-openstack-edpm-tls-dflsf" Jan 20 18:03:32 crc kubenswrapper[4558]: I0120 18:03:32.611531 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"generic-service1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21a4af65-13b6-4950-9d76-6b0ea77230b0-generic-service1-combined-ca-bundle\") pod \"install-certs-ovr-openstack-edpm-tls-openstack-edpm-tls-dflsf\" (UID: \"21a4af65-13b6-4950-9d76-6b0ea77230b0\") " pod="openstack-kuttl-tests/install-certs-ovr-openstack-edpm-tls-openstack-edpm-tls-dflsf" Jan 20 18:03:32 crc kubenswrapper[4558]: I0120 18:03:32.611593 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-tls\" (UniqueName: \"kubernetes.io/secret/21a4af65-13b6-4950-9d76-6b0ea77230b0-ssh-key-openstack-edpm-tls\") pod \"install-certs-ovr-openstack-edpm-tls-openstack-edpm-tls-dflsf\" (UID: \"21a4af65-13b6-4950-9d76-6b0ea77230b0\") " pod="openstack-kuttl-tests/install-certs-ovr-openstack-edpm-tls-openstack-edpm-tls-dflsf" Jan 20 18:03:32 crc kubenswrapper[4558]: I0120 18:03:32.611632 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pklc5\" (UniqueName: \"kubernetes.io/projected/21a4af65-13b6-4950-9d76-6b0ea77230b0-kube-api-access-pklc5\") pod \"install-certs-ovr-openstack-edpm-tls-openstack-edpm-tls-dflsf\" (UID: \"21a4af65-13b6-4950-9d76-6b0ea77230b0\") " pod="openstack-kuttl-tests/install-certs-ovr-openstack-edpm-tls-openstack-edpm-tls-dflsf" Jan 20 18:03:32 crc kubenswrapper[4558]: I0120 18:03:32.611712 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/21a4af65-13b6-4950-9d76-6b0ea77230b0-inventory\") pod \"install-certs-ovr-openstack-edpm-tls-openstack-edpm-tls-dflsf\" (UID: \"21a4af65-13b6-4950-9d76-6b0ea77230b0\") " pod="openstack-kuttl-tests/install-certs-ovr-openstack-edpm-tls-openstack-edpm-tls-dflsf" Jan 20 18:03:32 crc kubenswrapper[4558]: I0120 18:03:32.611765 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"install-certs-ovr-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21a4af65-13b6-4950-9d76-6b0ea77230b0-install-certs-ovr-combined-ca-bundle\") pod \"install-certs-ovr-openstack-edpm-tls-openstack-edpm-tls-dflsf\" (UID: \"21a4af65-13b6-4950-9d76-6b0ea77230b0\") " pod="openstack-kuttl-tests/install-certs-ovr-openstack-edpm-tls-openstack-edpm-tls-dflsf" Jan 20 18:03:32 crc kubenswrapper[4558]: I0120 18:03:32.611789 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-tls-generic-service1-default-certs-0\" (UniqueName: \"kubernetes.io/projected/21a4af65-13b6-4950-9d76-6b0ea77230b0-openstack-edpm-tls-generic-service1-default-certs-0\") pod \"install-certs-ovr-openstack-edpm-tls-openstack-edpm-tls-dflsf\" (UID: \"21a4af65-13b6-4950-9d76-6b0ea77230b0\") " pod="openstack-kuttl-tests/install-certs-ovr-openstack-edpm-tls-openstack-edpm-tls-dflsf" Jan 20 18:03:32 crc kubenswrapper[4558]: I0120 18:03:32.618661 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-tls\" (UniqueName: \"kubernetes.io/secret/21a4af65-13b6-4950-9d76-6b0ea77230b0-ssh-key-openstack-edpm-tls\") pod \"install-certs-ovr-openstack-edpm-tls-openstack-edpm-tls-dflsf\" (UID: \"21a4af65-13b6-4950-9d76-6b0ea77230b0\") " pod="openstack-kuttl-tests/install-certs-ovr-openstack-edpm-tls-openstack-edpm-tls-dflsf" Jan 20 18:03:32 crc kubenswrapper[4558]: I0120 18:03:32.618926 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/21a4af65-13b6-4950-9d76-6b0ea77230b0-inventory\") pod \"install-certs-ovr-openstack-edpm-tls-openstack-edpm-tls-dflsf\" (UID: \"21a4af65-13b6-4950-9d76-6b0ea77230b0\") " pod="openstack-kuttl-tests/install-certs-ovr-openstack-edpm-tls-openstack-edpm-tls-dflsf" Jan 20 18:03:32 crc kubenswrapper[4558]: I0120 18:03:32.619768 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-tls-generic-service1-default-certs-0\" (UniqueName: \"kubernetes.io/projected/21a4af65-13b6-4950-9d76-6b0ea77230b0-openstack-edpm-tls-generic-service1-default-certs-0\") pod \"install-certs-ovr-openstack-edpm-tls-openstack-edpm-tls-dflsf\" (UID: \"21a4af65-13b6-4950-9d76-6b0ea77230b0\") " pod="openstack-kuttl-tests/install-certs-ovr-openstack-edpm-tls-openstack-edpm-tls-dflsf" Jan 20 18:03:32 crc kubenswrapper[4558]: I0120 18:03:32.619976 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"generic-service1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21a4af65-13b6-4950-9d76-6b0ea77230b0-generic-service1-combined-ca-bundle\") pod \"install-certs-ovr-openstack-edpm-tls-openstack-edpm-tls-dflsf\" (UID: \"21a4af65-13b6-4950-9d76-6b0ea77230b0\") " pod="openstack-kuttl-tests/install-certs-ovr-openstack-edpm-tls-openstack-edpm-tls-dflsf" Jan 20 18:03:32 crc kubenswrapper[4558]: I0120 18:03:32.620331 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"install-certs-ovr-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21a4af65-13b6-4950-9d76-6b0ea77230b0-install-certs-ovr-combined-ca-bundle\") pod \"install-certs-ovr-openstack-edpm-tls-openstack-edpm-tls-dflsf\" (UID: \"21a4af65-13b6-4950-9d76-6b0ea77230b0\") " pod="openstack-kuttl-tests/install-certs-ovr-openstack-edpm-tls-openstack-edpm-tls-dflsf" Jan 20 18:03:32 crc kubenswrapper[4558]: I0120 18:03:32.625761 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pklc5\" (UniqueName: \"kubernetes.io/projected/21a4af65-13b6-4950-9d76-6b0ea77230b0-kube-api-access-pklc5\") pod \"install-certs-ovr-openstack-edpm-tls-openstack-edpm-tls-dflsf\" (UID: \"21a4af65-13b6-4950-9d76-6b0ea77230b0\") " pod="openstack-kuttl-tests/install-certs-ovr-openstack-edpm-tls-openstack-edpm-tls-dflsf" Jan 20 18:03:32 crc kubenswrapper[4558]: I0120 18:03:32.750821 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/install-certs-ovr-openstack-edpm-tls-openstack-edpm-tls-dflsf" Jan 20 18:03:33 crc kubenswrapper[4558]: I0120 18:03:33.144836 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/install-certs-ovr-openstack-edpm-tls-openstack-edpm-tls-dflsf"] Jan 20 18:03:33 crc kubenswrapper[4558]: W0120 18:03:33.148375 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod21a4af65_13b6_4950_9d76_6b0ea77230b0.slice/crio-a24bf89fdf7a60beb6153d26060d431f36e9abcbcc784cbd54d4453857dd0f18 WatchSource:0}: Error finding container a24bf89fdf7a60beb6153d26060d431f36e9abcbcc784cbd54d4453857dd0f18: Status 404 returned error can't find the container with id a24bf89fdf7a60beb6153d26060d431f36e9abcbcc784cbd54d4453857dd0f18 Jan 20 18:03:33 crc kubenswrapper[4558]: I0120 18:03:33.389759 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/install-certs-ovr-openstack-edpm-tls-openstack-edpm-tls-dflsf" event={"ID":"21a4af65-13b6-4950-9d76-6b0ea77230b0","Type":"ContainerStarted","Data":"a24bf89fdf7a60beb6153d26060d431f36e9abcbcc784cbd54d4453857dd0f18"} Jan 20 18:03:34 crc kubenswrapper[4558]: I0120 18:03:34.402283 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/install-certs-ovr-openstack-edpm-tls-openstack-edpm-tls-dflsf" event={"ID":"21a4af65-13b6-4950-9d76-6b0ea77230b0","Type":"ContainerStarted","Data":"56b1bad1c26f54ce418c79aa5b1f06a6259bc82bb730f7f529d2783a2c39c82c"} Jan 20 18:03:34 crc kubenswrapper[4558]: I0120 18:03:34.420434 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/install-certs-ovr-openstack-edpm-tls-openstack-edpm-tls-dflsf" podStartSLOduration=1.8532727119999999 podStartE2EDuration="2.420402177s" podCreationTimestamp="2026-01-20 18:03:32 +0000 UTC" firstStartedPulling="2026-01-20 18:03:33.150919712 +0000 UTC m=+4906.911257679" lastFinishedPulling="2026-01-20 18:03:33.718049176 +0000 UTC m=+4907.478387144" observedRunningTime="2026-01-20 18:03:34.414518568 +0000 UTC m=+4908.174856525" watchObservedRunningTime="2026-01-20 18:03:34.420402177 +0000 UTC m=+4908.180740144" Jan 20 18:03:36 crc kubenswrapper[4558]: I0120 18:03:36.421798 4558 generic.go:334] "Generic (PLEG): container finished" podID="21a4af65-13b6-4950-9d76-6b0ea77230b0" containerID="56b1bad1c26f54ce418c79aa5b1f06a6259bc82bb730f7f529d2783a2c39c82c" exitCode=0 Jan 20 18:03:36 crc kubenswrapper[4558]: I0120 18:03:36.421882 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/install-certs-ovr-openstack-edpm-tls-openstack-edpm-tls-dflsf" event={"ID":"21a4af65-13b6-4950-9d76-6b0ea77230b0","Type":"ContainerDied","Data":"56b1bad1c26f54ce418c79aa5b1f06a6259bc82bb730f7f529d2783a2c39c82c"} Jan 20 18:03:37 crc kubenswrapper[4558]: I0120 18:03:37.681751 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/install-certs-ovr-openstack-edpm-tls-openstack-edpm-tls-dflsf" Jan 20 18:03:37 crc kubenswrapper[4558]: I0120 18:03:37.690510 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-tls-generic-service1-default-certs-0\" (UniqueName: \"kubernetes.io/projected/21a4af65-13b6-4950-9d76-6b0ea77230b0-openstack-edpm-tls-generic-service1-default-certs-0\") pod \"21a4af65-13b6-4950-9d76-6b0ea77230b0\" (UID: \"21a4af65-13b6-4950-9d76-6b0ea77230b0\") " Jan 20 18:03:37 crc kubenswrapper[4558]: I0120 18:03:37.690654 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"install-certs-ovr-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21a4af65-13b6-4950-9d76-6b0ea77230b0-install-certs-ovr-combined-ca-bundle\") pod \"21a4af65-13b6-4950-9d76-6b0ea77230b0\" (UID: \"21a4af65-13b6-4950-9d76-6b0ea77230b0\") " Jan 20 18:03:37 crc kubenswrapper[4558]: I0120 18:03:37.690744 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-tls\" (UniqueName: \"kubernetes.io/secret/21a4af65-13b6-4950-9d76-6b0ea77230b0-ssh-key-openstack-edpm-tls\") pod \"21a4af65-13b6-4950-9d76-6b0ea77230b0\" (UID: \"21a4af65-13b6-4950-9d76-6b0ea77230b0\") " Jan 20 18:03:37 crc kubenswrapper[4558]: I0120 18:03:37.690783 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"generic-service1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21a4af65-13b6-4950-9d76-6b0ea77230b0-generic-service1-combined-ca-bundle\") pod \"21a4af65-13b6-4950-9d76-6b0ea77230b0\" (UID: \"21a4af65-13b6-4950-9d76-6b0ea77230b0\") " Jan 20 18:03:37 crc kubenswrapper[4558]: I0120 18:03:37.690879 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pklc5\" (UniqueName: \"kubernetes.io/projected/21a4af65-13b6-4950-9d76-6b0ea77230b0-kube-api-access-pklc5\") pod \"21a4af65-13b6-4950-9d76-6b0ea77230b0\" (UID: \"21a4af65-13b6-4950-9d76-6b0ea77230b0\") " Jan 20 18:03:37 crc kubenswrapper[4558]: I0120 18:03:37.690934 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/21a4af65-13b6-4950-9d76-6b0ea77230b0-inventory\") pod \"21a4af65-13b6-4950-9d76-6b0ea77230b0\" (UID: \"21a4af65-13b6-4950-9d76-6b0ea77230b0\") " Jan 20 18:03:37 crc kubenswrapper[4558]: I0120 18:03:37.711038 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/21a4af65-13b6-4950-9d76-6b0ea77230b0-install-certs-ovr-combined-ca-bundle" (OuterVolumeSpecName: "install-certs-ovr-combined-ca-bundle") pod "21a4af65-13b6-4950-9d76-6b0ea77230b0" (UID: "21a4af65-13b6-4950-9d76-6b0ea77230b0"). InnerVolumeSpecName "install-certs-ovr-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:03:37 crc kubenswrapper[4558]: I0120 18:03:37.711118 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/21a4af65-13b6-4950-9d76-6b0ea77230b0-openstack-edpm-tls-generic-service1-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-tls-generic-service1-default-certs-0") pod "21a4af65-13b6-4950-9d76-6b0ea77230b0" (UID: "21a4af65-13b6-4950-9d76-6b0ea77230b0"). InnerVolumeSpecName "openstack-edpm-tls-generic-service1-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:03:37 crc kubenswrapper[4558]: I0120 18:03:37.711482 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/21a4af65-13b6-4950-9d76-6b0ea77230b0-kube-api-access-pklc5" (OuterVolumeSpecName: "kube-api-access-pklc5") pod "21a4af65-13b6-4950-9d76-6b0ea77230b0" (UID: "21a4af65-13b6-4950-9d76-6b0ea77230b0"). InnerVolumeSpecName "kube-api-access-pklc5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:03:37 crc kubenswrapper[4558]: I0120 18:03:37.713622 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/21a4af65-13b6-4950-9d76-6b0ea77230b0-generic-service1-combined-ca-bundle" (OuterVolumeSpecName: "generic-service1-combined-ca-bundle") pod "21a4af65-13b6-4950-9d76-6b0ea77230b0" (UID: "21a4af65-13b6-4950-9d76-6b0ea77230b0"). InnerVolumeSpecName "generic-service1-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:03:37 crc kubenswrapper[4558]: I0120 18:03:37.717729 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/21a4af65-13b6-4950-9d76-6b0ea77230b0-ssh-key-openstack-edpm-tls" (OuterVolumeSpecName: "ssh-key-openstack-edpm-tls") pod "21a4af65-13b6-4950-9d76-6b0ea77230b0" (UID: "21a4af65-13b6-4950-9d76-6b0ea77230b0"). InnerVolumeSpecName "ssh-key-openstack-edpm-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:03:37 crc kubenswrapper[4558]: I0120 18:03:37.721867 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/21a4af65-13b6-4950-9d76-6b0ea77230b0-inventory" (OuterVolumeSpecName: "inventory") pod "21a4af65-13b6-4950-9d76-6b0ea77230b0" (UID: "21a4af65-13b6-4950-9d76-6b0ea77230b0"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:03:37 crc kubenswrapper[4558]: I0120 18:03:37.792597 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pklc5\" (UniqueName: \"kubernetes.io/projected/21a4af65-13b6-4950-9d76-6b0ea77230b0-kube-api-access-pklc5\") on node \"crc\" DevicePath \"\"" Jan 20 18:03:37 crc kubenswrapper[4558]: I0120 18:03:37.792623 4558 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/21a4af65-13b6-4950-9d76-6b0ea77230b0-inventory\") on node \"crc\" DevicePath \"\"" Jan 20 18:03:37 crc kubenswrapper[4558]: I0120 18:03:37.792635 4558 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-tls-generic-service1-default-certs-0\" (UniqueName: \"kubernetes.io/projected/21a4af65-13b6-4950-9d76-6b0ea77230b0-openstack-edpm-tls-generic-service1-default-certs-0\") on node \"crc\" DevicePath \"\"" Jan 20 18:03:37 crc kubenswrapper[4558]: I0120 18:03:37.792664 4558 reconciler_common.go:293] "Volume detached for volume \"install-certs-ovr-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21a4af65-13b6-4950-9d76-6b0ea77230b0-install-certs-ovr-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 18:03:37 crc kubenswrapper[4558]: I0120 18:03:37.792675 4558 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-tls\" (UniqueName: \"kubernetes.io/secret/21a4af65-13b6-4950-9d76-6b0ea77230b0-ssh-key-openstack-edpm-tls\") on node \"crc\" DevicePath \"\"" Jan 20 18:03:37 crc kubenswrapper[4558]: I0120 18:03:37.792685 4558 reconciler_common.go:293] "Volume detached for volume \"generic-service1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21a4af65-13b6-4950-9d76-6b0ea77230b0-generic-service1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 18:03:38 crc kubenswrapper[4558]: I0120 18:03:38.441582 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/install-certs-ovr-openstack-edpm-tls-openstack-edpm-tls-dflsf" event={"ID":"21a4af65-13b6-4950-9d76-6b0ea77230b0","Type":"ContainerDied","Data":"a24bf89fdf7a60beb6153d26060d431f36e9abcbcc784cbd54d4453857dd0f18"} Jan 20 18:03:38 crc kubenswrapper[4558]: I0120 18:03:38.442010 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a24bf89fdf7a60beb6153d26060d431f36e9abcbcc784cbd54d4453857dd0f18" Jan 20 18:03:38 crc kubenswrapper[4558]: I0120 18:03:38.441651 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/install-certs-ovr-openstack-edpm-tls-openstack-edpm-tls-dflsf" Jan 20 18:03:38 crc kubenswrapper[4558]: I0120 18:03:38.514607 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/generic-service1-openstack-edpm-tls-openstack-edpm-tls-f6r9g"] Jan 20 18:03:38 crc kubenswrapper[4558]: E0120 18:03:38.515003 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="21a4af65-13b6-4950-9d76-6b0ea77230b0" containerName="install-certs-ovr-openstack-edpm-tls-openstack-edpm-tls" Jan 20 18:03:38 crc kubenswrapper[4558]: I0120 18:03:38.515025 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="21a4af65-13b6-4950-9d76-6b0ea77230b0" containerName="install-certs-ovr-openstack-edpm-tls-openstack-edpm-tls" Jan 20 18:03:38 crc kubenswrapper[4558]: I0120 18:03:38.515219 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="21a4af65-13b6-4950-9d76-6b0ea77230b0" containerName="install-certs-ovr-openstack-edpm-tls-openstack-edpm-tls" Jan 20 18:03:38 crc kubenswrapper[4558]: I0120 18:03:38.515868 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/generic-service1-openstack-edpm-tls-openstack-edpm-tls-f6r9g" Jan 20 18:03:38 crc kubenswrapper[4558]: I0120 18:03:38.518147 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"combined-ca-bundle" Jan 20 18:03:38 crc kubenswrapper[4558]: I0120 18:03:38.518250 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplanenodeset-openstack-edpm-tls" Jan 20 18:03:38 crc kubenswrapper[4558]: I0120 18:03:38.518542 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-aee-default-env" Jan 20 18:03:38 crc kubenswrapper[4558]: I0120 18:03:38.518882 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplane-ansible-ssh-private-key-secret" Jan 20 18:03:38 crc kubenswrapper[4558]: I0120 18:03:38.519927 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"openstack-edpm-tls-dockercfg-5xq2z" Jan 20 18:03:38 crc kubenswrapper[4558]: I0120 18:03:38.523410 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/generic-service1-openstack-edpm-tls-openstack-edpm-tls-f6r9g"] Jan 20 18:03:38 crc kubenswrapper[4558]: I0120 18:03:38.606772 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d35adfac-32a6-46b1-94f5-521da5524bf2-inventory\") pod \"generic-service1-openstack-edpm-tls-openstack-edpm-tls-f6r9g\" (UID: \"d35adfac-32a6-46b1-94f5-521da5524bf2\") " pod="openstack-kuttl-tests/generic-service1-openstack-edpm-tls-openstack-edpm-tls-f6r9g" Jan 20 18:03:38 crc kubenswrapper[4558]: I0120 18:03:38.606820 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"generic-service1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d35adfac-32a6-46b1-94f5-521da5524bf2-generic-service1-combined-ca-bundle\") pod \"generic-service1-openstack-edpm-tls-openstack-edpm-tls-f6r9g\" (UID: \"d35adfac-32a6-46b1-94f5-521da5524bf2\") " pod="openstack-kuttl-tests/generic-service1-openstack-edpm-tls-openstack-edpm-tls-f6r9g" Jan 20 18:03:38 crc kubenswrapper[4558]: I0120 18:03:38.606847 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-tls\" (UniqueName: \"kubernetes.io/secret/d35adfac-32a6-46b1-94f5-521da5524bf2-ssh-key-openstack-edpm-tls\") pod \"generic-service1-openstack-edpm-tls-openstack-edpm-tls-f6r9g\" (UID: \"d35adfac-32a6-46b1-94f5-521da5524bf2\") " pod="openstack-kuttl-tests/generic-service1-openstack-edpm-tls-openstack-edpm-tls-f6r9g" Jan 20 18:03:38 crc kubenswrapper[4558]: I0120 18:03:38.606998 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-94dn9\" (UniqueName: \"kubernetes.io/projected/d35adfac-32a6-46b1-94f5-521da5524bf2-kube-api-access-94dn9\") pod \"generic-service1-openstack-edpm-tls-openstack-edpm-tls-f6r9g\" (UID: \"d35adfac-32a6-46b1-94f5-521da5524bf2\") " pod="openstack-kuttl-tests/generic-service1-openstack-edpm-tls-openstack-edpm-tls-f6r9g" Jan 20 18:03:38 crc kubenswrapper[4558]: I0120 18:03:38.709478 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d35adfac-32a6-46b1-94f5-521da5524bf2-inventory\") pod \"generic-service1-openstack-edpm-tls-openstack-edpm-tls-f6r9g\" (UID: \"d35adfac-32a6-46b1-94f5-521da5524bf2\") " pod="openstack-kuttl-tests/generic-service1-openstack-edpm-tls-openstack-edpm-tls-f6r9g" Jan 20 18:03:38 crc kubenswrapper[4558]: I0120 18:03:38.709547 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"generic-service1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d35adfac-32a6-46b1-94f5-521da5524bf2-generic-service1-combined-ca-bundle\") pod \"generic-service1-openstack-edpm-tls-openstack-edpm-tls-f6r9g\" (UID: \"d35adfac-32a6-46b1-94f5-521da5524bf2\") " pod="openstack-kuttl-tests/generic-service1-openstack-edpm-tls-openstack-edpm-tls-f6r9g" Jan 20 18:03:38 crc kubenswrapper[4558]: I0120 18:03:38.709575 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-tls\" (UniqueName: \"kubernetes.io/secret/d35adfac-32a6-46b1-94f5-521da5524bf2-ssh-key-openstack-edpm-tls\") pod \"generic-service1-openstack-edpm-tls-openstack-edpm-tls-f6r9g\" (UID: \"d35adfac-32a6-46b1-94f5-521da5524bf2\") " pod="openstack-kuttl-tests/generic-service1-openstack-edpm-tls-openstack-edpm-tls-f6r9g" Jan 20 18:03:38 crc kubenswrapper[4558]: I0120 18:03:38.709637 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-94dn9\" (UniqueName: \"kubernetes.io/projected/d35adfac-32a6-46b1-94f5-521da5524bf2-kube-api-access-94dn9\") pod \"generic-service1-openstack-edpm-tls-openstack-edpm-tls-f6r9g\" (UID: \"d35adfac-32a6-46b1-94f5-521da5524bf2\") " pod="openstack-kuttl-tests/generic-service1-openstack-edpm-tls-openstack-edpm-tls-f6r9g" Jan 20 18:03:38 crc kubenswrapper[4558]: I0120 18:03:38.715791 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"generic-service1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d35adfac-32a6-46b1-94f5-521da5524bf2-generic-service1-combined-ca-bundle\") pod \"generic-service1-openstack-edpm-tls-openstack-edpm-tls-f6r9g\" (UID: \"d35adfac-32a6-46b1-94f5-521da5524bf2\") " pod="openstack-kuttl-tests/generic-service1-openstack-edpm-tls-openstack-edpm-tls-f6r9g" Jan 20 18:03:38 crc kubenswrapper[4558]: I0120 18:03:38.715840 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-tls\" (UniqueName: \"kubernetes.io/secret/d35adfac-32a6-46b1-94f5-521da5524bf2-ssh-key-openstack-edpm-tls\") pod \"generic-service1-openstack-edpm-tls-openstack-edpm-tls-f6r9g\" (UID: \"d35adfac-32a6-46b1-94f5-521da5524bf2\") " pod="openstack-kuttl-tests/generic-service1-openstack-edpm-tls-openstack-edpm-tls-f6r9g" Jan 20 18:03:38 crc kubenswrapper[4558]: I0120 18:03:38.716387 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d35adfac-32a6-46b1-94f5-521da5524bf2-inventory\") pod \"generic-service1-openstack-edpm-tls-openstack-edpm-tls-f6r9g\" (UID: \"d35adfac-32a6-46b1-94f5-521da5524bf2\") " pod="openstack-kuttl-tests/generic-service1-openstack-edpm-tls-openstack-edpm-tls-f6r9g" Jan 20 18:03:38 crc kubenswrapper[4558]: I0120 18:03:38.726463 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-94dn9\" (UniqueName: \"kubernetes.io/projected/d35adfac-32a6-46b1-94f5-521da5524bf2-kube-api-access-94dn9\") pod \"generic-service1-openstack-edpm-tls-openstack-edpm-tls-f6r9g\" (UID: \"d35adfac-32a6-46b1-94f5-521da5524bf2\") " pod="openstack-kuttl-tests/generic-service1-openstack-edpm-tls-openstack-edpm-tls-f6r9g" Jan 20 18:03:38 crc kubenswrapper[4558]: I0120 18:03:38.830970 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/generic-service1-openstack-edpm-tls-openstack-edpm-tls-f6r9g" Jan 20 18:03:39 crc kubenswrapper[4558]: I0120 18:03:39.217023 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/generic-service1-openstack-edpm-tls-openstack-edpm-tls-f6r9g"] Jan 20 18:03:39 crc kubenswrapper[4558]: I0120 18:03:39.454323 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/generic-service1-openstack-edpm-tls-openstack-edpm-tls-f6r9g" event={"ID":"d35adfac-32a6-46b1-94f5-521da5524bf2","Type":"ContainerStarted","Data":"149873bf0806bd0ddf6c4cde037bf7a28195e22a67a41760d82c1d141116dd79"} Jan 20 18:03:40 crc kubenswrapper[4558]: I0120 18:03:40.468765 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/generic-service1-openstack-edpm-tls-openstack-edpm-tls-f6r9g" event={"ID":"d35adfac-32a6-46b1-94f5-521da5524bf2","Type":"ContainerStarted","Data":"1185e47915505c6785f0d83431115dfd450327f88c169d9a61b3f3becbcc4354"} Jan 20 18:03:40 crc kubenswrapper[4558]: I0120 18:03:40.483377 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/generic-service1-openstack-edpm-tls-openstack-edpm-tls-f6r9g" podStartSLOduration=1.8915478430000001 podStartE2EDuration="2.48336184s" podCreationTimestamp="2026-01-20 18:03:38 +0000 UTC" firstStartedPulling="2026-01-20 18:03:39.224950367 +0000 UTC m=+4912.985288334" lastFinishedPulling="2026-01-20 18:03:39.816764363 +0000 UTC m=+4913.577102331" observedRunningTime="2026-01-20 18:03:40.482643258 +0000 UTC m=+4914.242981225" watchObservedRunningTime="2026-01-20 18:03:40.48336184 +0000 UTC m=+4914.243699807" Jan 20 18:03:42 crc kubenswrapper[4558]: I0120 18:03:42.489193 4558 generic.go:334] "Generic (PLEG): container finished" podID="d35adfac-32a6-46b1-94f5-521da5524bf2" containerID="1185e47915505c6785f0d83431115dfd450327f88c169d9a61b3f3becbcc4354" exitCode=0 Jan 20 18:03:42 crc kubenswrapper[4558]: I0120 18:03:42.489269 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/generic-service1-openstack-edpm-tls-openstack-edpm-tls-f6r9g" event={"ID":"d35adfac-32a6-46b1-94f5-521da5524bf2","Type":"ContainerDied","Data":"1185e47915505c6785f0d83431115dfd450327f88c169d9a61b3f3becbcc4354"} Jan 20 18:03:42 crc kubenswrapper[4558]: I0120 18:03:42.565942 4558 scope.go:117] "RemoveContainer" containerID="6b02d31ec11fe5435af988e35303a6f66b5afa5ef952ce5f7cc4b3cd5f4d9497" Jan 20 18:03:42 crc kubenswrapper[4558]: E0120 18:03:42.566240 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 18:03:43 crc kubenswrapper[4558]: I0120 18:03:43.739274 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/generic-service1-openstack-edpm-tls-openstack-edpm-tls-f6r9g" Jan 20 18:03:43 crc kubenswrapper[4558]: I0120 18:03:43.782546 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d35adfac-32a6-46b1-94f5-521da5524bf2-inventory\") pod \"d35adfac-32a6-46b1-94f5-521da5524bf2\" (UID: \"d35adfac-32a6-46b1-94f5-521da5524bf2\") " Jan 20 18:03:43 crc kubenswrapper[4558]: I0120 18:03:43.782616 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"generic-service1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d35adfac-32a6-46b1-94f5-521da5524bf2-generic-service1-combined-ca-bundle\") pod \"d35adfac-32a6-46b1-94f5-521da5524bf2\" (UID: \"d35adfac-32a6-46b1-94f5-521da5524bf2\") " Jan 20 18:03:43 crc kubenswrapper[4558]: I0120 18:03:43.782710 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-tls\" (UniqueName: \"kubernetes.io/secret/d35adfac-32a6-46b1-94f5-521da5524bf2-ssh-key-openstack-edpm-tls\") pod \"d35adfac-32a6-46b1-94f5-521da5524bf2\" (UID: \"d35adfac-32a6-46b1-94f5-521da5524bf2\") " Jan 20 18:03:43 crc kubenswrapper[4558]: I0120 18:03:43.782733 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-94dn9\" (UniqueName: \"kubernetes.io/projected/d35adfac-32a6-46b1-94f5-521da5524bf2-kube-api-access-94dn9\") pod \"d35adfac-32a6-46b1-94f5-521da5524bf2\" (UID: \"d35adfac-32a6-46b1-94f5-521da5524bf2\") " Jan 20 18:03:43 crc kubenswrapper[4558]: I0120 18:03:43.787002 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d35adfac-32a6-46b1-94f5-521da5524bf2-generic-service1-combined-ca-bundle" (OuterVolumeSpecName: "generic-service1-combined-ca-bundle") pod "d35adfac-32a6-46b1-94f5-521da5524bf2" (UID: "d35adfac-32a6-46b1-94f5-521da5524bf2"). InnerVolumeSpecName "generic-service1-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:03:43 crc kubenswrapper[4558]: I0120 18:03:43.789347 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d35adfac-32a6-46b1-94f5-521da5524bf2-kube-api-access-94dn9" (OuterVolumeSpecName: "kube-api-access-94dn9") pod "d35adfac-32a6-46b1-94f5-521da5524bf2" (UID: "d35adfac-32a6-46b1-94f5-521da5524bf2"). InnerVolumeSpecName "kube-api-access-94dn9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:03:43 crc kubenswrapper[4558]: E0120 18:03:43.797818 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d35adfac-32a6-46b1-94f5-521da5524bf2-ssh-key-openstack-edpm-tls podName:d35adfac-32a6-46b1-94f5-521da5524bf2 nodeName:}" failed. No retries permitted until 2026-01-20 18:03:44.297792778 +0000 UTC m=+4918.058130744 (durationBeforeRetry 500ms). Error: error cleaning subPath mounts for volume "ssh-key-openstack-edpm-tls" (UniqueName: "kubernetes.io/secret/d35adfac-32a6-46b1-94f5-521da5524bf2-ssh-key-openstack-edpm-tls") pod "d35adfac-32a6-46b1-94f5-521da5524bf2" (UID: "d35adfac-32a6-46b1-94f5-521da5524bf2") : error deleting /var/lib/kubelet/pods/d35adfac-32a6-46b1-94f5-521da5524bf2/volume-subpaths: remove /var/lib/kubelet/pods/d35adfac-32a6-46b1-94f5-521da5524bf2/volume-subpaths: no such file or directory Jan 20 18:03:43 crc kubenswrapper[4558]: I0120 18:03:43.799814 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d35adfac-32a6-46b1-94f5-521da5524bf2-inventory" (OuterVolumeSpecName: "inventory") pod "d35adfac-32a6-46b1-94f5-521da5524bf2" (UID: "d35adfac-32a6-46b1-94f5-521da5524bf2"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:03:43 crc kubenswrapper[4558]: I0120 18:03:43.885333 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-94dn9\" (UniqueName: \"kubernetes.io/projected/d35adfac-32a6-46b1-94f5-521da5524bf2-kube-api-access-94dn9\") on node \"crc\" DevicePath \"\"" Jan 20 18:03:43 crc kubenswrapper[4558]: I0120 18:03:43.885368 4558 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d35adfac-32a6-46b1-94f5-521da5524bf2-inventory\") on node \"crc\" DevicePath \"\"" Jan 20 18:03:43 crc kubenswrapper[4558]: I0120 18:03:43.885382 4558 reconciler_common.go:293] "Volume detached for volume \"generic-service1-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d35adfac-32a6-46b1-94f5-521da5524bf2-generic-service1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 18:03:44 crc kubenswrapper[4558]: I0120 18:03:44.391309 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-tls\" (UniqueName: \"kubernetes.io/secret/d35adfac-32a6-46b1-94f5-521da5524bf2-ssh-key-openstack-edpm-tls\") pod \"d35adfac-32a6-46b1-94f5-521da5524bf2\" (UID: \"d35adfac-32a6-46b1-94f5-521da5524bf2\") " Jan 20 18:03:44 crc kubenswrapper[4558]: I0120 18:03:44.394800 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d35adfac-32a6-46b1-94f5-521da5524bf2-ssh-key-openstack-edpm-tls" (OuterVolumeSpecName: "ssh-key-openstack-edpm-tls") pod "d35adfac-32a6-46b1-94f5-521da5524bf2" (UID: "d35adfac-32a6-46b1-94f5-521da5524bf2"). InnerVolumeSpecName "ssh-key-openstack-edpm-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:03:44 crc kubenswrapper[4558]: I0120 18:03:44.492792 4558 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-tls\" (UniqueName: \"kubernetes.io/secret/d35adfac-32a6-46b1-94f5-521da5524bf2-ssh-key-openstack-edpm-tls\") on node \"crc\" DevicePath \"\"" Jan 20 18:03:44 crc kubenswrapper[4558]: I0120 18:03:44.510050 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/generic-service1-openstack-edpm-tls-openstack-edpm-tls-f6r9g" event={"ID":"d35adfac-32a6-46b1-94f5-521da5524bf2","Type":"ContainerDied","Data":"149873bf0806bd0ddf6c4cde037bf7a28195e22a67a41760d82c1d141116dd79"} Jan 20 18:03:44 crc kubenswrapper[4558]: I0120 18:03:44.510123 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="149873bf0806bd0ddf6c4cde037bf7a28195e22a67a41760d82c1d141116dd79" Jan 20 18:03:44 crc kubenswrapper[4558]: I0120 18:03:44.510098 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/generic-service1-openstack-edpm-tls-openstack-edpm-tls-f6r9g" Jan 20 18:03:45 crc kubenswrapper[4558]: I0120 18:03:45.285645 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/install-certs-ovr-openstack-edpm-tls-openstack-edpm-tls-dflsf"] Jan 20 18:03:45 crc kubenswrapper[4558]: I0120 18:03:45.289319 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/generic-service1-openstack-edpm-tls-openstack-edpm-tls-f6r9g"] Jan 20 18:03:45 crc kubenswrapper[4558]: I0120 18:03:45.292785 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/install-certs-ovr-openstack-edpm-tls-openstack-edpm-tls-dflsf"] Jan 20 18:03:45 crc kubenswrapper[4558]: I0120 18:03:45.296990 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/generic-service1-openstack-edpm-tls-openstack-edpm-tls-f6r9g"] Jan 20 18:03:45 crc kubenswrapper[4558]: I0120 18:03:45.341381 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-lql2g"] Jan 20 18:03:45 crc kubenswrapper[4558]: E0120 18:03:45.341732 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d35adfac-32a6-46b1-94f5-521da5524bf2" containerName="generic-service1-openstack-edpm-tls-openstack-edpm-tls" Jan 20 18:03:45 crc kubenswrapper[4558]: I0120 18:03:45.341752 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d35adfac-32a6-46b1-94f5-521da5524bf2" containerName="generic-service1-openstack-edpm-tls-openstack-edpm-tls" Jan 20 18:03:45 crc kubenswrapper[4558]: I0120 18:03:45.341897 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="d35adfac-32a6-46b1-94f5-521da5524bf2" containerName="generic-service1-openstack-edpm-tls-openstack-edpm-tls" Jan 20 18:03:45 crc kubenswrapper[4558]: I0120 18:03:45.342684 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-lql2g" Jan 20 18:03:45 crc kubenswrapper[4558]: I0120 18:03:45.349643 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-lql2g"] Jan 20 18:03:45 crc kubenswrapper[4558]: I0120 18:03:45.406015 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/828c25e0-1972-41a8-aab7-0e18d409b9e6-config\") pod \"dnsmasq-dnsmasq-84b9f45d47-lql2g\" (UID: \"828c25e0-1972-41a8-aab7-0e18d409b9e6\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-lql2g" Jan 20 18:03:45 crc kubenswrapper[4558]: I0120 18:03:45.406093 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5klh4\" (UniqueName: \"kubernetes.io/projected/828c25e0-1972-41a8-aab7-0e18d409b9e6-kube-api-access-5klh4\") pod \"dnsmasq-dnsmasq-84b9f45d47-lql2g\" (UID: \"828c25e0-1972-41a8-aab7-0e18d409b9e6\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-lql2g" Jan 20 18:03:45 crc kubenswrapper[4558]: I0120 18:03:45.406122 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/828c25e0-1972-41a8-aab7-0e18d409b9e6-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-84b9f45d47-lql2g\" (UID: \"828c25e0-1972-41a8-aab7-0e18d409b9e6\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-lql2g" Jan 20 18:03:45 crc kubenswrapper[4558]: I0120 18:03:45.508140 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/828c25e0-1972-41a8-aab7-0e18d409b9e6-config\") pod \"dnsmasq-dnsmasq-84b9f45d47-lql2g\" (UID: \"828c25e0-1972-41a8-aab7-0e18d409b9e6\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-lql2g" Jan 20 18:03:45 crc kubenswrapper[4558]: I0120 18:03:45.508313 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5klh4\" (UniqueName: \"kubernetes.io/projected/828c25e0-1972-41a8-aab7-0e18d409b9e6-kube-api-access-5klh4\") pod \"dnsmasq-dnsmasq-84b9f45d47-lql2g\" (UID: \"828c25e0-1972-41a8-aab7-0e18d409b9e6\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-lql2g" Jan 20 18:03:45 crc kubenswrapper[4558]: I0120 18:03:45.508348 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/828c25e0-1972-41a8-aab7-0e18d409b9e6-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-84b9f45d47-lql2g\" (UID: \"828c25e0-1972-41a8-aab7-0e18d409b9e6\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-lql2g" Jan 20 18:03:45 crc kubenswrapper[4558]: I0120 18:03:45.509351 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/828c25e0-1972-41a8-aab7-0e18d409b9e6-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-84b9f45d47-lql2g\" (UID: \"828c25e0-1972-41a8-aab7-0e18d409b9e6\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-lql2g" Jan 20 18:03:45 crc kubenswrapper[4558]: I0120 18:03:45.509351 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/828c25e0-1972-41a8-aab7-0e18d409b9e6-config\") pod \"dnsmasq-dnsmasq-84b9f45d47-lql2g\" (UID: \"828c25e0-1972-41a8-aab7-0e18d409b9e6\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-lql2g" Jan 20 18:03:45 crc kubenswrapper[4558]: I0120 18:03:45.531073 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5klh4\" (UniqueName: \"kubernetes.io/projected/828c25e0-1972-41a8-aab7-0e18d409b9e6-kube-api-access-5klh4\") pod \"dnsmasq-dnsmasq-84b9f45d47-lql2g\" (UID: \"828c25e0-1972-41a8-aab7-0e18d409b9e6\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-lql2g" Jan 20 18:03:45 crc kubenswrapper[4558]: I0120 18:03:45.661728 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-lql2g" Jan 20 18:03:46 crc kubenswrapper[4558]: I0120 18:03:46.043294 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-lql2g"] Jan 20 18:03:46 crc kubenswrapper[4558]: I0120 18:03:46.532357 4558 generic.go:334] "Generic (PLEG): container finished" podID="828c25e0-1972-41a8-aab7-0e18d409b9e6" containerID="c7a378fe1e040b5e9a8c3e98fd85815935a94432406ee473e45b5af1d8927ac1" exitCode=0 Jan 20 18:03:46 crc kubenswrapper[4558]: I0120 18:03:46.532488 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-lql2g" event={"ID":"828c25e0-1972-41a8-aab7-0e18d409b9e6","Type":"ContainerDied","Data":"c7a378fe1e040b5e9a8c3e98fd85815935a94432406ee473e45b5af1d8927ac1"} Jan 20 18:03:46 crc kubenswrapper[4558]: I0120 18:03:46.532758 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-lql2g" event={"ID":"828c25e0-1972-41a8-aab7-0e18d409b9e6","Type":"ContainerStarted","Data":"58a8af8dd7890976874e349ec11124dd626b59e003fb32c7c4922521fb714a69"} Jan 20 18:03:46 crc kubenswrapper[4558]: I0120 18:03:46.574355 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="21a4af65-13b6-4950-9d76-6b0ea77230b0" path="/var/lib/kubelet/pods/21a4af65-13b6-4950-9d76-6b0ea77230b0/volumes" Jan 20 18:03:46 crc kubenswrapper[4558]: I0120 18:03:46.574837 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d35adfac-32a6-46b1-94f5-521da5524bf2" path="/var/lib/kubelet/pods/d35adfac-32a6-46b1-94f5-521da5524bf2/volumes" Jan 20 18:03:47 crc kubenswrapper[4558]: I0120 18:03:47.545373 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-lql2g" event={"ID":"828c25e0-1972-41a8-aab7-0e18d409b9e6","Type":"ContainerStarted","Data":"6297f64c334a125bb7ee66ccac20dd9efc1d257340764e4551d5acdaa2d20fd4"} Jan 20 18:03:47 crc kubenswrapper[4558]: I0120 18:03:47.546254 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-lql2g" Jan 20 18:03:47 crc kubenswrapper[4558]: I0120 18:03:47.568439 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-lql2g" podStartSLOduration=2.56841825 podStartE2EDuration="2.56841825s" podCreationTimestamp="2026-01-20 18:03:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 18:03:47.562432616 +0000 UTC m=+4921.322770584" watchObservedRunningTime="2026-01-20 18:03:47.56841825 +0000 UTC m=+4921.328756217" Jan 20 18:03:52 crc kubenswrapper[4558]: I0120 18:03:52.973004 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["crc-storage/crc-storage-crc-c5jlp"] Jan 20 18:03:52 crc kubenswrapper[4558]: I0120 18:03:52.976945 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["crc-storage/crc-storage-crc-c5jlp"] Jan 20 18:03:53 crc kubenswrapper[4558]: I0120 18:03:53.076388 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["crc-storage/crc-storage-crc-v4gss"] Jan 20 18:03:53 crc kubenswrapper[4558]: I0120 18:03:53.077304 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-v4gss" Jan 20 18:03:53 crc kubenswrapper[4558]: I0120 18:03:53.078900 4558 reflector.go:368] Caches populated for *v1.Secret from object-"crc-storage"/"crc-storage-dockercfg-k9ht8" Jan 20 18:03:53 crc kubenswrapper[4558]: I0120 18:03:53.079763 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"openshift-service-ca.crt" Jan 20 18:03:53 crc kubenswrapper[4558]: I0120 18:03:53.079916 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"kube-root-ca.crt" Jan 20 18:03:53 crc kubenswrapper[4558]: I0120 18:03:53.080484 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"crc-storage" Jan 20 18:03:53 crc kubenswrapper[4558]: I0120 18:03:53.087576 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-v4gss"] Jan 20 18:03:53 crc kubenswrapper[4558]: I0120 18:03:53.208260 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w5ndc\" (UniqueName: \"kubernetes.io/projected/7cc87951-8257-4ea5-9907-55cab9595fc1-kube-api-access-w5ndc\") pod \"crc-storage-crc-v4gss\" (UID: \"7cc87951-8257-4ea5-9907-55cab9595fc1\") " pod="crc-storage/crc-storage-crc-v4gss" Jan 20 18:03:53 crc kubenswrapper[4558]: I0120 18:03:53.208342 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/7cc87951-8257-4ea5-9907-55cab9595fc1-crc-storage\") pod \"crc-storage-crc-v4gss\" (UID: \"7cc87951-8257-4ea5-9907-55cab9595fc1\") " pod="crc-storage/crc-storage-crc-v4gss" Jan 20 18:03:53 crc kubenswrapper[4558]: I0120 18:03:53.208382 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/7cc87951-8257-4ea5-9907-55cab9595fc1-node-mnt\") pod \"crc-storage-crc-v4gss\" (UID: \"7cc87951-8257-4ea5-9907-55cab9595fc1\") " pod="crc-storage/crc-storage-crc-v4gss" Jan 20 18:03:53 crc kubenswrapper[4558]: I0120 18:03:53.309635 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w5ndc\" (UniqueName: \"kubernetes.io/projected/7cc87951-8257-4ea5-9907-55cab9595fc1-kube-api-access-w5ndc\") pod \"crc-storage-crc-v4gss\" (UID: \"7cc87951-8257-4ea5-9907-55cab9595fc1\") " pod="crc-storage/crc-storage-crc-v4gss" Jan 20 18:03:53 crc kubenswrapper[4558]: I0120 18:03:53.309825 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/7cc87951-8257-4ea5-9907-55cab9595fc1-crc-storage\") pod \"crc-storage-crc-v4gss\" (UID: \"7cc87951-8257-4ea5-9907-55cab9595fc1\") " pod="crc-storage/crc-storage-crc-v4gss" Jan 20 18:03:53 crc kubenswrapper[4558]: I0120 18:03:53.309965 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/7cc87951-8257-4ea5-9907-55cab9595fc1-node-mnt\") pod \"crc-storage-crc-v4gss\" (UID: \"7cc87951-8257-4ea5-9907-55cab9595fc1\") " pod="crc-storage/crc-storage-crc-v4gss" Jan 20 18:03:53 crc kubenswrapper[4558]: I0120 18:03:53.310281 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/7cc87951-8257-4ea5-9907-55cab9595fc1-node-mnt\") pod \"crc-storage-crc-v4gss\" (UID: \"7cc87951-8257-4ea5-9907-55cab9595fc1\") " pod="crc-storage/crc-storage-crc-v4gss" Jan 20 18:03:53 crc kubenswrapper[4558]: I0120 18:03:53.310624 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/7cc87951-8257-4ea5-9907-55cab9595fc1-crc-storage\") pod \"crc-storage-crc-v4gss\" (UID: \"7cc87951-8257-4ea5-9907-55cab9595fc1\") " pod="crc-storage/crc-storage-crc-v4gss" Jan 20 18:03:53 crc kubenswrapper[4558]: I0120 18:03:53.328216 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w5ndc\" (UniqueName: \"kubernetes.io/projected/7cc87951-8257-4ea5-9907-55cab9595fc1-kube-api-access-w5ndc\") pod \"crc-storage-crc-v4gss\" (UID: \"7cc87951-8257-4ea5-9907-55cab9595fc1\") " pod="crc-storage/crc-storage-crc-v4gss" Jan 20 18:03:53 crc kubenswrapper[4558]: I0120 18:03:53.392583 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-v4gss" Jan 20 18:03:53 crc kubenswrapper[4558]: I0120 18:03:53.566607 4558 scope.go:117] "RemoveContainer" containerID="6b02d31ec11fe5435af988e35303a6f66b5afa5ef952ce5f7cc4b3cd5f4d9497" Jan 20 18:03:53 crc kubenswrapper[4558]: E0120 18:03:53.567125 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 18:03:53 crc kubenswrapper[4558]: I0120 18:03:53.768730 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-v4gss"] Jan 20 18:03:54 crc kubenswrapper[4558]: I0120 18:03:54.576998 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1382485d-e2ea-474b-a0f5-3db36a8cf19c" path="/var/lib/kubelet/pods/1382485d-e2ea-474b-a0f5-3db36a8cf19c/volumes" Jan 20 18:03:54 crc kubenswrapper[4558]: I0120 18:03:54.614995 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-v4gss" event={"ID":"7cc87951-8257-4ea5-9907-55cab9595fc1","Type":"ContainerStarted","Data":"01b210cb61c80d24600fe03b823f4355f9b8b34f0d57df6d5dc8f492716ad30c"} Jan 20 18:03:54 crc kubenswrapper[4558]: I0120 18:03:54.615065 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-v4gss" event={"ID":"7cc87951-8257-4ea5-9907-55cab9595fc1","Type":"ContainerStarted","Data":"321b39af0058c3c697630414d2802e15d966bf08e0f82b12cdeb7e9266bfae32"} Jan 20 18:03:54 crc kubenswrapper[4558]: I0120 18:03:54.635064 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="crc-storage/crc-storage-crc-v4gss" podStartSLOduration=0.956885215 podStartE2EDuration="1.635046613s" podCreationTimestamp="2026-01-20 18:03:53 +0000 UTC" firstStartedPulling="2026-01-20 18:03:53.777197085 +0000 UTC m=+4927.537535052" lastFinishedPulling="2026-01-20 18:03:54.455358483 +0000 UTC m=+4928.215696450" observedRunningTime="2026-01-20 18:03:54.62748739 +0000 UTC m=+4928.387825356" watchObservedRunningTime="2026-01-20 18:03:54.635046613 +0000 UTC m=+4928.395384580" Jan 20 18:03:55 crc kubenswrapper[4558]: I0120 18:03:55.627391 4558 generic.go:334] "Generic (PLEG): container finished" podID="7cc87951-8257-4ea5-9907-55cab9595fc1" containerID="01b210cb61c80d24600fe03b823f4355f9b8b34f0d57df6d5dc8f492716ad30c" exitCode=0 Jan 20 18:03:55 crc kubenswrapper[4558]: I0120 18:03:55.627521 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-v4gss" event={"ID":"7cc87951-8257-4ea5-9907-55cab9595fc1","Type":"ContainerDied","Data":"01b210cb61c80d24600fe03b823f4355f9b8b34f0d57df6d5dc8f492716ad30c"} Jan 20 18:03:55 crc kubenswrapper[4558]: I0120 18:03:55.663401 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-lql2g" Jan 20 18:03:55 crc kubenswrapper[4558]: I0120 18:03:55.705913 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-76b7c4d945-bk85s"] Jan 20 18:03:55 crc kubenswrapper[4558]: I0120 18:03:55.706139 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-76b7c4d945-bk85s" podUID="3e7ccbfc-455e-4369-90a8-58abb6cb42a5" containerName="dnsmasq-dns" containerID="cri-o://bb6a2d45bcc378b41b93c6cd0baaeefdb01ef66dedfa23d67d52c4eec6180c18" gracePeriod=10 Jan 20 18:03:56 crc kubenswrapper[4558]: I0120 18:03:56.440998 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-76b7c4d945-bk85s" Jan 20 18:03:56 crc kubenswrapper[4558]: I0120 18:03:56.560245 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-tls\" (UniqueName: \"kubernetes.io/configmap/3e7ccbfc-455e-4369-90a8-58abb6cb42a5-openstack-edpm-tls\") pod \"3e7ccbfc-455e-4369-90a8-58abb6cb42a5\" (UID: \"3e7ccbfc-455e-4369-90a8-58abb6cb42a5\") " Jan 20 18:03:56 crc kubenswrapper[4558]: I0120 18:03:56.560433 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/3e7ccbfc-455e-4369-90a8-58abb6cb42a5-dnsmasq-svc\") pod \"3e7ccbfc-455e-4369-90a8-58abb6cb42a5\" (UID: \"3e7ccbfc-455e-4369-90a8-58abb6cb42a5\") " Jan 20 18:03:56 crc kubenswrapper[4558]: I0120 18:03:56.560467 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3e7ccbfc-455e-4369-90a8-58abb6cb42a5-config\") pod \"3e7ccbfc-455e-4369-90a8-58abb6cb42a5\" (UID: \"3e7ccbfc-455e-4369-90a8-58abb6cb42a5\") " Jan 20 18:03:56 crc kubenswrapper[4558]: I0120 18:03:56.560610 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wtjfg\" (UniqueName: \"kubernetes.io/projected/3e7ccbfc-455e-4369-90a8-58abb6cb42a5-kube-api-access-wtjfg\") pod \"3e7ccbfc-455e-4369-90a8-58abb6cb42a5\" (UID: \"3e7ccbfc-455e-4369-90a8-58abb6cb42a5\") " Jan 20 18:03:56 crc kubenswrapper[4558]: I0120 18:03:56.568580 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3e7ccbfc-455e-4369-90a8-58abb6cb42a5-kube-api-access-wtjfg" (OuterVolumeSpecName: "kube-api-access-wtjfg") pod "3e7ccbfc-455e-4369-90a8-58abb6cb42a5" (UID: "3e7ccbfc-455e-4369-90a8-58abb6cb42a5"). InnerVolumeSpecName "kube-api-access-wtjfg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:03:56 crc kubenswrapper[4558]: I0120 18:03:56.598681 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3e7ccbfc-455e-4369-90a8-58abb6cb42a5-dnsmasq-svc" (OuterVolumeSpecName: "dnsmasq-svc") pod "3e7ccbfc-455e-4369-90a8-58abb6cb42a5" (UID: "3e7ccbfc-455e-4369-90a8-58abb6cb42a5"). InnerVolumeSpecName "dnsmasq-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:03:56 crc kubenswrapper[4558]: I0120 18:03:56.601436 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3e7ccbfc-455e-4369-90a8-58abb6cb42a5-config" (OuterVolumeSpecName: "config") pod "3e7ccbfc-455e-4369-90a8-58abb6cb42a5" (UID: "3e7ccbfc-455e-4369-90a8-58abb6cb42a5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:03:56 crc kubenswrapper[4558]: I0120 18:03:56.612014 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3e7ccbfc-455e-4369-90a8-58abb6cb42a5-openstack-edpm-tls" (OuterVolumeSpecName: "openstack-edpm-tls") pod "3e7ccbfc-455e-4369-90a8-58abb6cb42a5" (UID: "3e7ccbfc-455e-4369-90a8-58abb6cb42a5"). InnerVolumeSpecName "openstack-edpm-tls". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:03:56 crc kubenswrapper[4558]: I0120 18:03:56.642828 4558 generic.go:334] "Generic (PLEG): container finished" podID="3e7ccbfc-455e-4369-90a8-58abb6cb42a5" containerID="bb6a2d45bcc378b41b93c6cd0baaeefdb01ef66dedfa23d67d52c4eec6180c18" exitCode=0 Jan 20 18:03:56 crc kubenswrapper[4558]: I0120 18:03:56.642973 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-76b7c4d945-bk85s" event={"ID":"3e7ccbfc-455e-4369-90a8-58abb6cb42a5","Type":"ContainerDied","Data":"bb6a2d45bcc378b41b93c6cd0baaeefdb01ef66dedfa23d67d52c4eec6180c18"} Jan 20 18:03:56 crc kubenswrapper[4558]: I0120 18:03:56.643355 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-76b7c4d945-bk85s" event={"ID":"3e7ccbfc-455e-4369-90a8-58abb6cb42a5","Type":"ContainerDied","Data":"42c2507dfa037b2d8af38347104b0e3f95a95daed639b5e33f653781eca98369"} Jan 20 18:03:56 crc kubenswrapper[4558]: I0120 18:03:56.643080 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-76b7c4d945-bk85s" Jan 20 18:03:56 crc kubenswrapper[4558]: I0120 18:03:56.643403 4558 scope.go:117] "RemoveContainer" containerID="bb6a2d45bcc378b41b93c6cd0baaeefdb01ef66dedfa23d67d52c4eec6180c18" Jan 20 18:03:56 crc kubenswrapper[4558]: I0120 18:03:56.665566 4558 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-tls\" (UniqueName: \"kubernetes.io/configmap/3e7ccbfc-455e-4369-90a8-58abb6cb42a5-openstack-edpm-tls\") on node \"crc\" DevicePath \"\"" Jan 20 18:03:56 crc kubenswrapper[4558]: I0120 18:03:56.665598 4558 reconciler_common.go:293] "Volume detached for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/3e7ccbfc-455e-4369-90a8-58abb6cb42a5-dnsmasq-svc\") on node \"crc\" DevicePath \"\"" Jan 20 18:03:56 crc kubenswrapper[4558]: I0120 18:03:56.665612 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3e7ccbfc-455e-4369-90a8-58abb6cb42a5-config\") on node \"crc\" DevicePath \"\"" Jan 20 18:03:56 crc kubenswrapper[4558]: I0120 18:03:56.665625 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wtjfg\" (UniqueName: \"kubernetes.io/projected/3e7ccbfc-455e-4369-90a8-58abb6cb42a5-kube-api-access-wtjfg\") on node \"crc\" DevicePath \"\"" Jan 20 18:03:56 crc kubenswrapper[4558]: I0120 18:03:56.670947 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-76b7c4d945-bk85s"] Jan 20 18:03:56 crc kubenswrapper[4558]: I0120 18:03:56.675385 4558 scope.go:117] "RemoveContainer" containerID="a46ddbe8de3c509f4604edf587bfde0299a36b9402aa8cd8d94edbbc31e569ce" Jan 20 18:03:56 crc kubenswrapper[4558]: I0120 18:03:56.681356 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-76b7c4d945-bk85s"] Jan 20 18:03:56 crc kubenswrapper[4558]: I0120 18:03:56.706883 4558 scope.go:117] "RemoveContainer" containerID="bb6a2d45bcc378b41b93c6cd0baaeefdb01ef66dedfa23d67d52c4eec6180c18" Jan 20 18:03:56 crc kubenswrapper[4558]: E0120 18:03:56.707520 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bb6a2d45bcc378b41b93c6cd0baaeefdb01ef66dedfa23d67d52c4eec6180c18\": container with ID starting with bb6a2d45bcc378b41b93c6cd0baaeefdb01ef66dedfa23d67d52c4eec6180c18 not found: ID does not exist" containerID="bb6a2d45bcc378b41b93c6cd0baaeefdb01ef66dedfa23d67d52c4eec6180c18" Jan 20 18:03:56 crc kubenswrapper[4558]: I0120 18:03:56.707675 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bb6a2d45bcc378b41b93c6cd0baaeefdb01ef66dedfa23d67d52c4eec6180c18"} err="failed to get container status \"bb6a2d45bcc378b41b93c6cd0baaeefdb01ef66dedfa23d67d52c4eec6180c18\": rpc error: code = NotFound desc = could not find container \"bb6a2d45bcc378b41b93c6cd0baaeefdb01ef66dedfa23d67d52c4eec6180c18\": container with ID starting with bb6a2d45bcc378b41b93c6cd0baaeefdb01ef66dedfa23d67d52c4eec6180c18 not found: ID does not exist" Jan 20 18:03:56 crc kubenswrapper[4558]: I0120 18:03:56.707786 4558 scope.go:117] "RemoveContainer" containerID="a46ddbe8de3c509f4604edf587bfde0299a36b9402aa8cd8d94edbbc31e569ce" Jan 20 18:03:56 crc kubenswrapper[4558]: E0120 18:03:56.708228 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a46ddbe8de3c509f4604edf587bfde0299a36b9402aa8cd8d94edbbc31e569ce\": container with ID starting with a46ddbe8de3c509f4604edf587bfde0299a36b9402aa8cd8d94edbbc31e569ce not found: ID does not exist" containerID="a46ddbe8de3c509f4604edf587bfde0299a36b9402aa8cd8d94edbbc31e569ce" Jan 20 18:03:56 crc kubenswrapper[4558]: I0120 18:03:56.708320 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a46ddbe8de3c509f4604edf587bfde0299a36b9402aa8cd8d94edbbc31e569ce"} err="failed to get container status \"a46ddbe8de3c509f4604edf587bfde0299a36b9402aa8cd8d94edbbc31e569ce\": rpc error: code = NotFound desc = could not find container \"a46ddbe8de3c509f4604edf587bfde0299a36b9402aa8cd8d94edbbc31e569ce\": container with ID starting with a46ddbe8de3c509f4604edf587bfde0299a36b9402aa8cd8d94edbbc31e569ce not found: ID does not exist" Jan 20 18:03:56 crc kubenswrapper[4558]: I0120 18:03:56.950743 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-v4gss" Jan 20 18:03:57 crc kubenswrapper[4558]: I0120 18:03:57.072415 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/7cc87951-8257-4ea5-9907-55cab9595fc1-node-mnt\") pod \"7cc87951-8257-4ea5-9907-55cab9595fc1\" (UID: \"7cc87951-8257-4ea5-9907-55cab9595fc1\") " Jan 20 18:03:57 crc kubenswrapper[4558]: I0120 18:03:57.072498 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w5ndc\" (UniqueName: \"kubernetes.io/projected/7cc87951-8257-4ea5-9907-55cab9595fc1-kube-api-access-w5ndc\") pod \"7cc87951-8257-4ea5-9907-55cab9595fc1\" (UID: \"7cc87951-8257-4ea5-9907-55cab9595fc1\") " Jan 20 18:03:57 crc kubenswrapper[4558]: I0120 18:03:57.072572 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/7cc87951-8257-4ea5-9907-55cab9595fc1-crc-storage\") pod \"7cc87951-8257-4ea5-9907-55cab9595fc1\" (UID: \"7cc87951-8257-4ea5-9907-55cab9595fc1\") " Jan 20 18:03:57 crc kubenswrapper[4558]: I0120 18:03:57.072567 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7cc87951-8257-4ea5-9907-55cab9595fc1-node-mnt" (OuterVolumeSpecName: "node-mnt") pod "7cc87951-8257-4ea5-9907-55cab9595fc1" (UID: "7cc87951-8257-4ea5-9907-55cab9595fc1"). InnerVolumeSpecName "node-mnt". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 18:03:57 crc kubenswrapper[4558]: I0120 18:03:57.073067 4558 reconciler_common.go:293] "Volume detached for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/7cc87951-8257-4ea5-9907-55cab9595fc1-node-mnt\") on node \"crc\" DevicePath \"\"" Jan 20 18:03:57 crc kubenswrapper[4558]: I0120 18:03:57.078219 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7cc87951-8257-4ea5-9907-55cab9595fc1-kube-api-access-w5ndc" (OuterVolumeSpecName: "kube-api-access-w5ndc") pod "7cc87951-8257-4ea5-9907-55cab9595fc1" (UID: "7cc87951-8257-4ea5-9907-55cab9595fc1"). InnerVolumeSpecName "kube-api-access-w5ndc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:03:57 crc kubenswrapper[4558]: I0120 18:03:57.090993 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7cc87951-8257-4ea5-9907-55cab9595fc1-crc-storage" (OuterVolumeSpecName: "crc-storage") pod "7cc87951-8257-4ea5-9907-55cab9595fc1" (UID: "7cc87951-8257-4ea5-9907-55cab9595fc1"). InnerVolumeSpecName "crc-storage". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:03:57 crc kubenswrapper[4558]: I0120 18:03:57.174571 4558 reconciler_common.go:293] "Volume detached for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/7cc87951-8257-4ea5-9907-55cab9595fc1-crc-storage\") on node \"crc\" DevicePath \"\"" Jan 20 18:03:57 crc kubenswrapper[4558]: I0120 18:03:57.174604 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w5ndc\" (UniqueName: \"kubernetes.io/projected/7cc87951-8257-4ea5-9907-55cab9595fc1-kube-api-access-w5ndc\") on node \"crc\" DevicePath \"\"" Jan 20 18:03:57 crc kubenswrapper[4558]: I0120 18:03:57.657437 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-v4gss" Jan 20 18:03:57 crc kubenswrapper[4558]: I0120 18:03:57.657470 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-v4gss" event={"ID":"7cc87951-8257-4ea5-9907-55cab9595fc1","Type":"ContainerDied","Data":"321b39af0058c3c697630414d2802e15d966bf08e0f82b12cdeb7e9266bfae32"} Jan 20 18:03:57 crc kubenswrapper[4558]: I0120 18:03:57.657539 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="321b39af0058c3c697630414d2802e15d966bf08e0f82b12cdeb7e9266bfae32" Jan 20 18:03:58 crc kubenswrapper[4558]: I0120 18:03:58.573703 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3e7ccbfc-455e-4369-90a8-58abb6cb42a5" path="/var/lib/kubelet/pods/3e7ccbfc-455e-4369-90a8-58abb6cb42a5/volumes" Jan 20 18:03:59 crc kubenswrapper[4558]: I0120 18:03:59.968186 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["crc-storage/crc-storage-crc-v4gss"] Jan 20 18:03:59 crc kubenswrapper[4558]: I0120 18:03:59.972140 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["crc-storage/crc-storage-crc-v4gss"] Jan 20 18:04:00 crc kubenswrapper[4558]: I0120 18:04:00.104940 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["crc-storage/crc-storage-crc-dw4hb"] Jan 20 18:04:00 crc kubenswrapper[4558]: E0120 18:04:00.105513 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7cc87951-8257-4ea5-9907-55cab9595fc1" containerName="storage" Jan 20 18:04:00 crc kubenswrapper[4558]: I0120 18:04:00.105592 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="7cc87951-8257-4ea5-9907-55cab9595fc1" containerName="storage" Jan 20 18:04:00 crc kubenswrapper[4558]: E0120 18:04:00.105680 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e7ccbfc-455e-4369-90a8-58abb6cb42a5" containerName="init" Jan 20 18:04:00 crc kubenswrapper[4558]: I0120 18:04:00.105733 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e7ccbfc-455e-4369-90a8-58abb6cb42a5" containerName="init" Jan 20 18:04:00 crc kubenswrapper[4558]: E0120 18:04:00.105791 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e7ccbfc-455e-4369-90a8-58abb6cb42a5" containerName="dnsmasq-dns" Jan 20 18:04:00 crc kubenswrapper[4558]: I0120 18:04:00.105836 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e7ccbfc-455e-4369-90a8-58abb6cb42a5" containerName="dnsmasq-dns" Jan 20 18:04:00 crc kubenswrapper[4558]: I0120 18:04:00.106039 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="7cc87951-8257-4ea5-9907-55cab9595fc1" containerName="storage" Jan 20 18:04:00 crc kubenswrapper[4558]: I0120 18:04:00.106094 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="3e7ccbfc-455e-4369-90a8-58abb6cb42a5" containerName="dnsmasq-dns" Jan 20 18:04:00 crc kubenswrapper[4558]: I0120 18:04:00.106694 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-dw4hb" Jan 20 18:04:00 crc kubenswrapper[4558]: I0120 18:04:00.108515 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"openshift-service-ca.crt" Jan 20 18:04:00 crc kubenswrapper[4558]: I0120 18:04:00.108644 4558 reflector.go:368] Caches populated for *v1.Secret from object-"crc-storage"/"crc-storage-dockercfg-k9ht8" Jan 20 18:04:00 crc kubenswrapper[4558]: I0120 18:04:00.108782 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"kube-root-ca.crt" Jan 20 18:04:00 crc kubenswrapper[4558]: I0120 18:04:00.109646 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-dw4hb"] Jan 20 18:04:00 crc kubenswrapper[4558]: I0120 18:04:00.109680 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"crc-storage" Jan 20 18:04:00 crc kubenswrapper[4558]: I0120 18:04:00.114123 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/9879d8af-ef37-40f5-8014-28a5eb53c952-crc-storage\") pod \"crc-storage-crc-dw4hb\" (UID: \"9879d8af-ef37-40f5-8014-28a5eb53c952\") " pod="crc-storage/crc-storage-crc-dw4hb" Jan 20 18:04:00 crc kubenswrapper[4558]: I0120 18:04:00.114197 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/9879d8af-ef37-40f5-8014-28a5eb53c952-node-mnt\") pod \"crc-storage-crc-dw4hb\" (UID: \"9879d8af-ef37-40f5-8014-28a5eb53c952\") " pod="crc-storage/crc-storage-crc-dw4hb" Jan 20 18:04:00 crc kubenswrapper[4558]: I0120 18:04:00.114235 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8mbs9\" (UniqueName: \"kubernetes.io/projected/9879d8af-ef37-40f5-8014-28a5eb53c952-kube-api-access-8mbs9\") pod \"crc-storage-crc-dw4hb\" (UID: \"9879d8af-ef37-40f5-8014-28a5eb53c952\") " pod="crc-storage/crc-storage-crc-dw4hb" Jan 20 18:04:00 crc kubenswrapper[4558]: I0120 18:04:00.215086 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8mbs9\" (UniqueName: \"kubernetes.io/projected/9879d8af-ef37-40f5-8014-28a5eb53c952-kube-api-access-8mbs9\") pod \"crc-storage-crc-dw4hb\" (UID: \"9879d8af-ef37-40f5-8014-28a5eb53c952\") " pod="crc-storage/crc-storage-crc-dw4hb" Jan 20 18:04:00 crc kubenswrapper[4558]: I0120 18:04:00.215207 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/9879d8af-ef37-40f5-8014-28a5eb53c952-crc-storage\") pod \"crc-storage-crc-dw4hb\" (UID: \"9879d8af-ef37-40f5-8014-28a5eb53c952\") " pod="crc-storage/crc-storage-crc-dw4hb" Jan 20 18:04:00 crc kubenswrapper[4558]: I0120 18:04:00.215250 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/9879d8af-ef37-40f5-8014-28a5eb53c952-node-mnt\") pod \"crc-storage-crc-dw4hb\" (UID: \"9879d8af-ef37-40f5-8014-28a5eb53c952\") " pod="crc-storage/crc-storage-crc-dw4hb" Jan 20 18:04:00 crc kubenswrapper[4558]: I0120 18:04:00.215647 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/9879d8af-ef37-40f5-8014-28a5eb53c952-node-mnt\") pod \"crc-storage-crc-dw4hb\" (UID: \"9879d8af-ef37-40f5-8014-28a5eb53c952\") " pod="crc-storage/crc-storage-crc-dw4hb" Jan 20 18:04:00 crc kubenswrapper[4558]: I0120 18:04:00.216152 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/9879d8af-ef37-40f5-8014-28a5eb53c952-crc-storage\") pod \"crc-storage-crc-dw4hb\" (UID: \"9879d8af-ef37-40f5-8014-28a5eb53c952\") " pod="crc-storage/crc-storage-crc-dw4hb" Jan 20 18:04:00 crc kubenswrapper[4558]: I0120 18:04:00.234209 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8mbs9\" (UniqueName: \"kubernetes.io/projected/9879d8af-ef37-40f5-8014-28a5eb53c952-kube-api-access-8mbs9\") pod \"crc-storage-crc-dw4hb\" (UID: \"9879d8af-ef37-40f5-8014-28a5eb53c952\") " pod="crc-storage/crc-storage-crc-dw4hb" Jan 20 18:04:00 crc kubenswrapper[4558]: I0120 18:04:00.420835 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-dw4hb" Jan 20 18:04:00 crc kubenswrapper[4558]: I0120 18:04:00.576337 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7cc87951-8257-4ea5-9907-55cab9595fc1" path="/var/lib/kubelet/pods/7cc87951-8257-4ea5-9907-55cab9595fc1/volumes" Jan 20 18:04:00 crc kubenswrapper[4558]: I0120 18:04:00.808737 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-dw4hb"] Jan 20 18:04:00 crc kubenswrapper[4558]: W0120 18:04:00.811457 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9879d8af_ef37_40f5_8014_28a5eb53c952.slice/crio-68859b1b4bfa6ac23320feb8afd5dbdeeb60ca59a323554467bb21546a50e927 WatchSource:0}: Error finding container 68859b1b4bfa6ac23320feb8afd5dbdeeb60ca59a323554467bb21546a50e927: Status 404 returned error can't find the container with id 68859b1b4bfa6ac23320feb8afd5dbdeeb60ca59a323554467bb21546a50e927 Jan 20 18:04:01 crc kubenswrapper[4558]: I0120 18:04:01.698540 4558 generic.go:334] "Generic (PLEG): container finished" podID="9879d8af-ef37-40f5-8014-28a5eb53c952" containerID="91cf12afee15a6239013ae4102301de64ba2bfeb10aa3fdc172976b1b986c0ca" exitCode=0 Jan 20 18:04:01 crc kubenswrapper[4558]: I0120 18:04:01.698821 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-dw4hb" event={"ID":"9879d8af-ef37-40f5-8014-28a5eb53c952","Type":"ContainerDied","Data":"91cf12afee15a6239013ae4102301de64ba2bfeb10aa3fdc172976b1b986c0ca"} Jan 20 18:04:01 crc kubenswrapper[4558]: I0120 18:04:01.698871 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-dw4hb" event={"ID":"9879d8af-ef37-40f5-8014-28a5eb53c952","Type":"ContainerStarted","Data":"68859b1b4bfa6ac23320feb8afd5dbdeeb60ca59a323554467bb21546a50e927"} Jan 20 18:04:02 crc kubenswrapper[4558]: I0120 18:04:02.958758 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-dw4hb" Jan 20 18:04:03 crc kubenswrapper[4558]: I0120 18:04:03.059576 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/9879d8af-ef37-40f5-8014-28a5eb53c952-node-mnt\") pod \"9879d8af-ef37-40f5-8014-28a5eb53c952\" (UID: \"9879d8af-ef37-40f5-8014-28a5eb53c952\") " Jan 20 18:04:03 crc kubenswrapper[4558]: I0120 18:04:03.059640 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8mbs9\" (UniqueName: \"kubernetes.io/projected/9879d8af-ef37-40f5-8014-28a5eb53c952-kube-api-access-8mbs9\") pod \"9879d8af-ef37-40f5-8014-28a5eb53c952\" (UID: \"9879d8af-ef37-40f5-8014-28a5eb53c952\") " Jan 20 18:04:03 crc kubenswrapper[4558]: I0120 18:04:03.059687 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9879d8af-ef37-40f5-8014-28a5eb53c952-node-mnt" (OuterVolumeSpecName: "node-mnt") pod "9879d8af-ef37-40f5-8014-28a5eb53c952" (UID: "9879d8af-ef37-40f5-8014-28a5eb53c952"). InnerVolumeSpecName "node-mnt". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 18:04:03 crc kubenswrapper[4558]: I0120 18:04:03.059990 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/9879d8af-ef37-40f5-8014-28a5eb53c952-crc-storage\") pod \"9879d8af-ef37-40f5-8014-28a5eb53c952\" (UID: \"9879d8af-ef37-40f5-8014-28a5eb53c952\") " Jan 20 18:04:03 crc kubenswrapper[4558]: I0120 18:04:03.060461 4558 reconciler_common.go:293] "Volume detached for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/9879d8af-ef37-40f5-8014-28a5eb53c952-node-mnt\") on node \"crc\" DevicePath \"\"" Jan 20 18:04:03 crc kubenswrapper[4558]: I0120 18:04:03.065965 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9879d8af-ef37-40f5-8014-28a5eb53c952-kube-api-access-8mbs9" (OuterVolumeSpecName: "kube-api-access-8mbs9") pod "9879d8af-ef37-40f5-8014-28a5eb53c952" (UID: "9879d8af-ef37-40f5-8014-28a5eb53c952"). InnerVolumeSpecName "kube-api-access-8mbs9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:04:03 crc kubenswrapper[4558]: I0120 18:04:03.079508 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9879d8af-ef37-40f5-8014-28a5eb53c952-crc-storage" (OuterVolumeSpecName: "crc-storage") pod "9879d8af-ef37-40f5-8014-28a5eb53c952" (UID: "9879d8af-ef37-40f5-8014-28a5eb53c952"). InnerVolumeSpecName "crc-storage". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:04:03 crc kubenswrapper[4558]: I0120 18:04:03.161523 4558 reconciler_common.go:293] "Volume detached for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/9879d8af-ef37-40f5-8014-28a5eb53c952-crc-storage\") on node \"crc\" DevicePath \"\"" Jan 20 18:04:03 crc kubenswrapper[4558]: I0120 18:04:03.161557 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8mbs9\" (UniqueName: \"kubernetes.io/projected/9879d8af-ef37-40f5-8014-28a5eb53c952-kube-api-access-8mbs9\") on node \"crc\" DevicePath \"\"" Jan 20 18:04:03 crc kubenswrapper[4558]: I0120 18:04:03.717423 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-dw4hb" event={"ID":"9879d8af-ef37-40f5-8014-28a5eb53c952","Type":"ContainerDied","Data":"68859b1b4bfa6ac23320feb8afd5dbdeeb60ca59a323554467bb21546a50e927"} Jan 20 18:04:03 crc kubenswrapper[4558]: I0120 18:04:03.717480 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="68859b1b4bfa6ac23320feb8afd5dbdeeb60ca59a323554467bb21546a50e927" Jan 20 18:04:03 crc kubenswrapper[4558]: I0120 18:04:03.717475 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-dw4hb" Jan 20 18:04:05 crc kubenswrapper[4558]: I0120 18:04:05.993706 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-64864b6d57-v78fc"] Jan 20 18:04:05 crc kubenswrapper[4558]: E0120 18:04:05.994395 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9879d8af-ef37-40f5-8014-28a5eb53c952" containerName="storage" Jan 20 18:04:05 crc kubenswrapper[4558]: I0120 18:04:05.994409 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="9879d8af-ef37-40f5-8014-28a5eb53c952" containerName="storage" Jan 20 18:04:05 crc kubenswrapper[4558]: I0120 18:04:05.994557 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="9879d8af-ef37-40f5-8014-28a5eb53c952" containerName="storage" Jan 20 18:04:05 crc kubenswrapper[4558]: I0120 18:04:05.995343 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-64864b6d57-v78fc" Jan 20 18:04:05 crc kubenswrapper[4558]: I0120 18:04:05.997458 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"edpm-compute-no-nodes" Jan 20 18:04:06 crc kubenswrapper[4558]: I0120 18:04:06.004853 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-64864b6d57-v78fc"] Jan 20 18:04:06 crc kubenswrapper[4558]: I0120 18:04:06.109790 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2czk9\" (UniqueName: \"kubernetes.io/projected/bd900914-8370-4987-bd89-cc9608d5d01b-kube-api-access-2czk9\") pod \"dnsmasq-dnsmasq-64864b6d57-v78fc\" (UID: \"bd900914-8370-4987-bd89-cc9608d5d01b\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-64864b6d57-v78fc" Jan 20 18:04:06 crc kubenswrapper[4558]: I0120 18:04:06.110080 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/bd900914-8370-4987-bd89-cc9608d5d01b-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-64864b6d57-v78fc\" (UID: \"bd900914-8370-4987-bd89-cc9608d5d01b\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-64864b6d57-v78fc" Jan 20 18:04:06 crc kubenswrapper[4558]: I0120 18:04:06.110199 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/configmap/bd900914-8370-4987-bd89-cc9608d5d01b-edpm-compute-no-nodes\") pod \"dnsmasq-dnsmasq-64864b6d57-v78fc\" (UID: \"bd900914-8370-4987-bd89-cc9608d5d01b\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-64864b6d57-v78fc" Jan 20 18:04:06 crc kubenswrapper[4558]: I0120 18:04:06.110253 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bd900914-8370-4987-bd89-cc9608d5d01b-config\") pod \"dnsmasq-dnsmasq-64864b6d57-v78fc\" (UID: \"bd900914-8370-4987-bd89-cc9608d5d01b\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-64864b6d57-v78fc" Jan 20 18:04:06 crc kubenswrapper[4558]: I0120 18:04:06.212417 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/bd900914-8370-4987-bd89-cc9608d5d01b-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-64864b6d57-v78fc\" (UID: \"bd900914-8370-4987-bd89-cc9608d5d01b\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-64864b6d57-v78fc" Jan 20 18:04:06 crc kubenswrapper[4558]: I0120 18:04:06.212492 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/configmap/bd900914-8370-4987-bd89-cc9608d5d01b-edpm-compute-no-nodes\") pod \"dnsmasq-dnsmasq-64864b6d57-v78fc\" (UID: \"bd900914-8370-4987-bd89-cc9608d5d01b\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-64864b6d57-v78fc" Jan 20 18:04:06 crc kubenswrapper[4558]: I0120 18:04:06.212527 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bd900914-8370-4987-bd89-cc9608d5d01b-config\") pod \"dnsmasq-dnsmasq-64864b6d57-v78fc\" (UID: \"bd900914-8370-4987-bd89-cc9608d5d01b\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-64864b6d57-v78fc" Jan 20 18:04:06 crc kubenswrapper[4558]: I0120 18:04:06.212576 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2czk9\" (UniqueName: \"kubernetes.io/projected/bd900914-8370-4987-bd89-cc9608d5d01b-kube-api-access-2czk9\") pod \"dnsmasq-dnsmasq-64864b6d57-v78fc\" (UID: \"bd900914-8370-4987-bd89-cc9608d5d01b\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-64864b6d57-v78fc" Jan 20 18:04:06 crc kubenswrapper[4558]: I0120 18:04:06.213813 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bd900914-8370-4987-bd89-cc9608d5d01b-config\") pod \"dnsmasq-dnsmasq-64864b6d57-v78fc\" (UID: \"bd900914-8370-4987-bd89-cc9608d5d01b\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-64864b6d57-v78fc" Jan 20 18:04:06 crc kubenswrapper[4558]: I0120 18:04:06.213845 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/bd900914-8370-4987-bd89-cc9608d5d01b-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-64864b6d57-v78fc\" (UID: \"bd900914-8370-4987-bd89-cc9608d5d01b\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-64864b6d57-v78fc" Jan 20 18:04:06 crc kubenswrapper[4558]: I0120 18:04:06.213856 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/configmap/bd900914-8370-4987-bd89-cc9608d5d01b-edpm-compute-no-nodes\") pod \"dnsmasq-dnsmasq-64864b6d57-v78fc\" (UID: \"bd900914-8370-4987-bd89-cc9608d5d01b\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-64864b6d57-v78fc" Jan 20 18:04:06 crc kubenswrapper[4558]: I0120 18:04:06.395240 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2czk9\" (UniqueName: \"kubernetes.io/projected/bd900914-8370-4987-bd89-cc9608d5d01b-kube-api-access-2czk9\") pod \"dnsmasq-dnsmasq-64864b6d57-v78fc\" (UID: \"bd900914-8370-4987-bd89-cc9608d5d01b\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-64864b6d57-v78fc" Jan 20 18:04:06 crc kubenswrapper[4558]: I0120 18:04:06.612494 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-64864b6d57-v78fc" Jan 20 18:04:07 crc kubenswrapper[4558]: I0120 18:04:07.006604 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-64864b6d57-v78fc"] Jan 20 18:04:07 crc kubenswrapper[4558]: I0120 18:04:07.566073 4558 scope.go:117] "RemoveContainer" containerID="6b02d31ec11fe5435af988e35303a6f66b5afa5ef952ce5f7cc4b3cd5f4d9497" Jan 20 18:04:07 crc kubenswrapper[4558]: E0120 18:04:07.566736 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 18:04:07 crc kubenswrapper[4558]: I0120 18:04:07.753491 4558 generic.go:334] "Generic (PLEG): container finished" podID="bd900914-8370-4987-bd89-cc9608d5d01b" containerID="156f065bcf6a5f8adb31adccc30b5afd280bc7cc62eae9c43f5f1b8b3833581f" exitCode=0 Jan 20 18:04:07 crc kubenswrapper[4558]: I0120 18:04:07.753556 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-64864b6d57-v78fc" event={"ID":"bd900914-8370-4987-bd89-cc9608d5d01b","Type":"ContainerDied","Data":"156f065bcf6a5f8adb31adccc30b5afd280bc7cc62eae9c43f5f1b8b3833581f"} Jan 20 18:04:07 crc kubenswrapper[4558]: I0120 18:04:07.753595 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-64864b6d57-v78fc" event={"ID":"bd900914-8370-4987-bd89-cc9608d5d01b","Type":"ContainerStarted","Data":"25ecc8b655769dde8f48a50be41f2b4714a03216001496ff858b3b62fa399bea"} Jan 20 18:04:08 crc kubenswrapper[4558]: I0120 18:04:08.768277 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-64864b6d57-v78fc" event={"ID":"bd900914-8370-4987-bd89-cc9608d5d01b","Type":"ContainerStarted","Data":"aeddbdc5fc86255ca17886304a7754ff9cd3918d4e51744d680e59862e309c07"} Jan 20 18:04:08 crc kubenswrapper[4558]: I0120 18:04:08.769011 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-64864b6d57-v78fc" Jan 20 18:04:08 crc kubenswrapper[4558]: I0120 18:04:08.789990 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-64864b6d57-v78fc" podStartSLOduration=3.789971338 podStartE2EDuration="3.789971338s" podCreationTimestamp="2026-01-20 18:04:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 18:04:08.784784536 +0000 UTC m=+4942.545122503" watchObservedRunningTime="2026-01-20 18:04:08.789971338 +0000 UTC m=+4942.550309306" Jan 20 18:04:16 crc kubenswrapper[4558]: I0120 18:04:16.614281 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-64864b6d57-v78fc" Jan 20 18:04:16 crc kubenswrapper[4558]: I0120 18:04:16.701611 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-lql2g"] Jan 20 18:04:16 crc kubenswrapper[4558]: I0120 18:04:16.701914 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-lql2g" podUID="828c25e0-1972-41a8-aab7-0e18d409b9e6" containerName="dnsmasq-dns" containerID="cri-o://6297f64c334a125bb7ee66ccac20dd9efc1d257340764e4551d5acdaa2d20fd4" gracePeriod=10 Jan 20 18:04:16 crc kubenswrapper[4558]: I0120 18:04:16.841115 4558 generic.go:334] "Generic (PLEG): container finished" podID="828c25e0-1972-41a8-aab7-0e18d409b9e6" containerID="6297f64c334a125bb7ee66ccac20dd9efc1d257340764e4551d5acdaa2d20fd4" exitCode=0 Jan 20 18:04:16 crc kubenswrapper[4558]: I0120 18:04:16.841183 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-lql2g" event={"ID":"828c25e0-1972-41a8-aab7-0e18d409b9e6","Type":"ContainerDied","Data":"6297f64c334a125bb7ee66ccac20dd9efc1d257340764e4551d5acdaa2d20fd4"} Jan 20 18:04:16 crc kubenswrapper[4558]: I0120 18:04:16.998068 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/download-cache-edpm-compute-no-nodes-edpm-compute-no-nodesm9d27"] Jan 20 18:04:16 crc kubenswrapper[4558]: I0120 18:04:16.999038 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/download-cache-edpm-compute-no-nodes-edpm-compute-no-nodesm9d27" Jan 20 18:04:17 crc kubenswrapper[4558]: I0120 18:04:17.001076 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-aee-default-env" Jan 20 18:04:17 crc kubenswrapper[4558]: I0120 18:04:17.001183 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"edpm-compute-no-nodes-dockercfg-tljmj" Jan 20 18:04:17 crc kubenswrapper[4558]: I0120 18:04:17.001288 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplanenodeset-edpm-compute-no-nodes" Jan 20 18:04:17 crc kubenswrapper[4558]: I0120 18:04:17.001399 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplane-ansible-ssh-private-key-secret" Jan 20 18:04:17 crc kubenswrapper[4558]: I0120 18:04:17.016439 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/download-cache-edpm-compute-no-nodes-edpm-compute-no-nodesm9d27"] Jan 20 18:04:17 crc kubenswrapper[4558]: I0120 18:04:17.069915 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-lql2g" Jan 20 18:04:17 crc kubenswrapper[4558]: I0120 18:04:17.098829 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/828c25e0-1972-41a8-aab7-0e18d409b9e6-config\") pod \"828c25e0-1972-41a8-aab7-0e18d409b9e6\" (UID: \"828c25e0-1972-41a8-aab7-0e18d409b9e6\") " Jan 20 18:04:17 crc kubenswrapper[4558]: I0120 18:04:17.099007 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5klh4\" (UniqueName: \"kubernetes.io/projected/828c25e0-1972-41a8-aab7-0e18d409b9e6-kube-api-access-5klh4\") pod \"828c25e0-1972-41a8-aab7-0e18d409b9e6\" (UID: \"828c25e0-1972-41a8-aab7-0e18d409b9e6\") " Jan 20 18:04:17 crc kubenswrapper[4558]: I0120 18:04:17.099194 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/828c25e0-1972-41a8-aab7-0e18d409b9e6-dnsmasq-svc\") pod \"828c25e0-1972-41a8-aab7-0e18d409b9e6\" (UID: \"828c25e0-1972-41a8-aab7-0e18d409b9e6\") " Jan 20 18:04:17 crc kubenswrapper[4558]: I0120 18:04:17.099593 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/1897fe4b-a1b9-4dc9-b07d-cadf99938fab-ssh-key-edpm-compute-no-nodes\") pod \"download-cache-edpm-compute-no-nodes-edpm-compute-no-nodesm9d27\" (UID: \"1897fe4b-a1b9-4dc9-b07d-cadf99938fab\") " pod="openstack-kuttl-tests/download-cache-edpm-compute-no-nodes-edpm-compute-no-nodesm9d27" Jan 20 18:04:17 crc kubenswrapper[4558]: I0120 18:04:17.099664 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h4d4l\" (UniqueName: \"kubernetes.io/projected/1897fe4b-a1b9-4dc9-b07d-cadf99938fab-kube-api-access-h4d4l\") pod \"download-cache-edpm-compute-no-nodes-edpm-compute-no-nodesm9d27\" (UID: \"1897fe4b-a1b9-4dc9-b07d-cadf99938fab\") " pod="openstack-kuttl-tests/download-cache-edpm-compute-no-nodes-edpm-compute-no-nodesm9d27" Jan 20 18:04:17 crc kubenswrapper[4558]: I0120 18:04:17.099751 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1897fe4b-a1b9-4dc9-b07d-cadf99938fab-inventory\") pod \"download-cache-edpm-compute-no-nodes-edpm-compute-no-nodesm9d27\" (UID: \"1897fe4b-a1b9-4dc9-b07d-cadf99938fab\") " pod="openstack-kuttl-tests/download-cache-edpm-compute-no-nodes-edpm-compute-no-nodesm9d27" Jan 20 18:04:17 crc kubenswrapper[4558]: I0120 18:04:17.106389 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/828c25e0-1972-41a8-aab7-0e18d409b9e6-kube-api-access-5klh4" (OuterVolumeSpecName: "kube-api-access-5klh4") pod "828c25e0-1972-41a8-aab7-0e18d409b9e6" (UID: "828c25e0-1972-41a8-aab7-0e18d409b9e6"). InnerVolumeSpecName "kube-api-access-5klh4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:04:17 crc kubenswrapper[4558]: I0120 18:04:17.152524 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/828c25e0-1972-41a8-aab7-0e18d409b9e6-config" (OuterVolumeSpecName: "config") pod "828c25e0-1972-41a8-aab7-0e18d409b9e6" (UID: "828c25e0-1972-41a8-aab7-0e18d409b9e6"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:04:17 crc kubenswrapper[4558]: I0120 18:04:17.171091 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/828c25e0-1972-41a8-aab7-0e18d409b9e6-dnsmasq-svc" (OuterVolumeSpecName: "dnsmasq-svc") pod "828c25e0-1972-41a8-aab7-0e18d409b9e6" (UID: "828c25e0-1972-41a8-aab7-0e18d409b9e6"). InnerVolumeSpecName "dnsmasq-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:04:17 crc kubenswrapper[4558]: I0120 18:04:17.201256 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/1897fe4b-a1b9-4dc9-b07d-cadf99938fab-ssh-key-edpm-compute-no-nodes\") pod \"download-cache-edpm-compute-no-nodes-edpm-compute-no-nodesm9d27\" (UID: \"1897fe4b-a1b9-4dc9-b07d-cadf99938fab\") " pod="openstack-kuttl-tests/download-cache-edpm-compute-no-nodes-edpm-compute-no-nodesm9d27" Jan 20 18:04:17 crc kubenswrapper[4558]: I0120 18:04:17.201532 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h4d4l\" (UniqueName: \"kubernetes.io/projected/1897fe4b-a1b9-4dc9-b07d-cadf99938fab-kube-api-access-h4d4l\") pod \"download-cache-edpm-compute-no-nodes-edpm-compute-no-nodesm9d27\" (UID: \"1897fe4b-a1b9-4dc9-b07d-cadf99938fab\") " pod="openstack-kuttl-tests/download-cache-edpm-compute-no-nodes-edpm-compute-no-nodesm9d27" Jan 20 18:04:17 crc kubenswrapper[4558]: I0120 18:04:17.201723 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1897fe4b-a1b9-4dc9-b07d-cadf99938fab-inventory\") pod \"download-cache-edpm-compute-no-nodes-edpm-compute-no-nodesm9d27\" (UID: \"1897fe4b-a1b9-4dc9-b07d-cadf99938fab\") " pod="openstack-kuttl-tests/download-cache-edpm-compute-no-nodes-edpm-compute-no-nodesm9d27" Jan 20 18:04:17 crc kubenswrapper[4558]: I0120 18:04:17.201949 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/828c25e0-1972-41a8-aab7-0e18d409b9e6-config\") on node \"crc\" DevicePath \"\"" Jan 20 18:04:17 crc kubenswrapper[4558]: I0120 18:04:17.202020 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5klh4\" (UniqueName: \"kubernetes.io/projected/828c25e0-1972-41a8-aab7-0e18d409b9e6-kube-api-access-5klh4\") on node \"crc\" DevicePath \"\"" Jan 20 18:04:17 crc kubenswrapper[4558]: I0120 18:04:17.202077 4558 reconciler_common.go:293] "Volume detached for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/828c25e0-1972-41a8-aab7-0e18d409b9e6-dnsmasq-svc\") on node \"crc\" DevicePath \"\"" Jan 20 18:04:17 crc kubenswrapper[4558]: I0120 18:04:17.204725 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/1897fe4b-a1b9-4dc9-b07d-cadf99938fab-ssh-key-edpm-compute-no-nodes\") pod \"download-cache-edpm-compute-no-nodes-edpm-compute-no-nodesm9d27\" (UID: \"1897fe4b-a1b9-4dc9-b07d-cadf99938fab\") " pod="openstack-kuttl-tests/download-cache-edpm-compute-no-nodes-edpm-compute-no-nodesm9d27" Jan 20 18:04:17 crc kubenswrapper[4558]: I0120 18:04:17.205110 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1897fe4b-a1b9-4dc9-b07d-cadf99938fab-inventory\") pod \"download-cache-edpm-compute-no-nodes-edpm-compute-no-nodesm9d27\" (UID: \"1897fe4b-a1b9-4dc9-b07d-cadf99938fab\") " pod="openstack-kuttl-tests/download-cache-edpm-compute-no-nodes-edpm-compute-no-nodesm9d27" Jan 20 18:04:17 crc kubenswrapper[4558]: I0120 18:04:17.215693 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h4d4l\" (UniqueName: \"kubernetes.io/projected/1897fe4b-a1b9-4dc9-b07d-cadf99938fab-kube-api-access-h4d4l\") pod \"download-cache-edpm-compute-no-nodes-edpm-compute-no-nodesm9d27\" (UID: \"1897fe4b-a1b9-4dc9-b07d-cadf99938fab\") " pod="openstack-kuttl-tests/download-cache-edpm-compute-no-nodes-edpm-compute-no-nodesm9d27" Jan 20 18:04:17 crc kubenswrapper[4558]: I0120 18:04:17.317619 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/download-cache-edpm-compute-no-nodes-edpm-compute-no-nodesm9d27" Jan 20 18:04:17 crc kubenswrapper[4558]: I0120 18:04:17.717355 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/download-cache-edpm-compute-no-nodes-edpm-compute-no-nodesm9d27"] Jan 20 18:04:17 crc kubenswrapper[4558]: I0120 18:04:17.852512 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/download-cache-edpm-compute-no-nodes-edpm-compute-no-nodesm9d27" event={"ID":"1897fe4b-a1b9-4dc9-b07d-cadf99938fab","Type":"ContainerStarted","Data":"904e447bebe874bdb49f568bdc29ace31504ac97e4362431f85bfea384671606"} Jan 20 18:04:17 crc kubenswrapper[4558]: I0120 18:04:17.855090 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-lql2g" event={"ID":"828c25e0-1972-41a8-aab7-0e18d409b9e6","Type":"ContainerDied","Data":"58a8af8dd7890976874e349ec11124dd626b59e003fb32c7c4922521fb714a69"} Jan 20 18:04:17 crc kubenswrapper[4558]: I0120 18:04:17.855182 4558 scope.go:117] "RemoveContainer" containerID="6297f64c334a125bb7ee66ccac20dd9efc1d257340764e4551d5acdaa2d20fd4" Jan 20 18:04:17 crc kubenswrapper[4558]: I0120 18:04:17.855156 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-lql2g" Jan 20 18:04:17 crc kubenswrapper[4558]: I0120 18:04:17.906552 4558 scope.go:117] "RemoveContainer" containerID="c7a378fe1e040b5e9a8c3e98fd85815935a94432406ee473e45b5af1d8927ac1" Jan 20 18:04:17 crc kubenswrapper[4558]: I0120 18:04:17.914249 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-lql2g"] Jan 20 18:04:17 crc kubenswrapper[4558]: I0120 18:04:17.921264 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-lql2g"] Jan 20 18:04:18 crc kubenswrapper[4558]: I0120 18:04:18.593918 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="828c25e0-1972-41a8-aab7-0e18d409b9e6" path="/var/lib/kubelet/pods/828c25e0-1972-41a8-aab7-0e18d409b9e6/volumes" Jan 20 18:04:18 crc kubenswrapper[4558]: I0120 18:04:18.864859 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/download-cache-edpm-compute-no-nodes-edpm-compute-no-nodesm9d27" event={"ID":"1897fe4b-a1b9-4dc9-b07d-cadf99938fab","Type":"ContainerStarted","Data":"cb81cdcb32f844625692122a4816f3b04fc8f25dd62b7b1032d238a783481362"} Jan 20 18:04:18 crc kubenswrapper[4558]: I0120 18:04:18.890399 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/download-cache-edpm-compute-no-nodes-edpm-compute-no-nodesm9d27" podStartSLOduration=2.253370114 podStartE2EDuration="2.890371333s" podCreationTimestamp="2026-01-20 18:04:16 +0000 UTC" firstStartedPulling="2026-01-20 18:04:17.802523924 +0000 UTC m=+4951.562861891" lastFinishedPulling="2026-01-20 18:04:18.439525143 +0000 UTC m=+4952.199863110" observedRunningTime="2026-01-20 18:04:18.883816518 +0000 UTC m=+4952.644154486" watchObservedRunningTime="2026-01-20 18:04:18.890371333 +0000 UTC m=+4952.650709300" Jan 20 18:04:19 crc kubenswrapper[4558]: I0120 18:04:19.876842 4558 generic.go:334] "Generic (PLEG): container finished" podID="1897fe4b-a1b9-4dc9-b07d-cadf99938fab" containerID="cb81cdcb32f844625692122a4816f3b04fc8f25dd62b7b1032d238a783481362" exitCode=0 Jan 20 18:04:19 crc kubenswrapper[4558]: I0120 18:04:19.876899 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/download-cache-edpm-compute-no-nodes-edpm-compute-no-nodesm9d27" event={"ID":"1897fe4b-a1b9-4dc9-b07d-cadf99938fab","Type":"ContainerDied","Data":"cb81cdcb32f844625692122a4816f3b04fc8f25dd62b7b1032d238a783481362"} Jan 20 18:04:20 crc kubenswrapper[4558]: I0120 18:04:20.566967 4558 scope.go:117] "RemoveContainer" containerID="6b02d31ec11fe5435af988e35303a6f66b5afa5ef952ce5f7cc4b3cd5f4d9497" Jan 20 18:04:20 crc kubenswrapper[4558]: E0120 18:04:20.567613 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 18:04:21 crc kubenswrapper[4558]: I0120 18:04:21.127236 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/download-cache-edpm-compute-no-nodes-edpm-compute-no-nodesm9d27" Jan 20 18:04:21 crc kubenswrapper[4558]: I0120 18:04:21.156860 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h4d4l\" (UniqueName: \"kubernetes.io/projected/1897fe4b-a1b9-4dc9-b07d-cadf99938fab-kube-api-access-h4d4l\") pod \"1897fe4b-a1b9-4dc9-b07d-cadf99938fab\" (UID: \"1897fe4b-a1b9-4dc9-b07d-cadf99938fab\") " Jan 20 18:04:21 crc kubenswrapper[4558]: I0120 18:04:21.156963 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1897fe4b-a1b9-4dc9-b07d-cadf99938fab-inventory\") pod \"1897fe4b-a1b9-4dc9-b07d-cadf99938fab\" (UID: \"1897fe4b-a1b9-4dc9-b07d-cadf99938fab\") " Jan 20 18:04:21 crc kubenswrapper[4558]: I0120 18:04:21.157010 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/1897fe4b-a1b9-4dc9-b07d-cadf99938fab-ssh-key-edpm-compute-no-nodes\") pod \"1897fe4b-a1b9-4dc9-b07d-cadf99938fab\" (UID: \"1897fe4b-a1b9-4dc9-b07d-cadf99938fab\") " Jan 20 18:04:21 crc kubenswrapper[4558]: I0120 18:04:21.164377 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1897fe4b-a1b9-4dc9-b07d-cadf99938fab-kube-api-access-h4d4l" (OuterVolumeSpecName: "kube-api-access-h4d4l") pod "1897fe4b-a1b9-4dc9-b07d-cadf99938fab" (UID: "1897fe4b-a1b9-4dc9-b07d-cadf99938fab"). InnerVolumeSpecName "kube-api-access-h4d4l". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:04:21 crc kubenswrapper[4558]: I0120 18:04:21.175818 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1897fe4b-a1b9-4dc9-b07d-cadf99938fab-inventory" (OuterVolumeSpecName: "inventory") pod "1897fe4b-a1b9-4dc9-b07d-cadf99938fab" (UID: "1897fe4b-a1b9-4dc9-b07d-cadf99938fab"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:04:21 crc kubenswrapper[4558]: I0120 18:04:21.178029 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1897fe4b-a1b9-4dc9-b07d-cadf99938fab-ssh-key-edpm-compute-no-nodes" (OuterVolumeSpecName: "ssh-key-edpm-compute-no-nodes") pod "1897fe4b-a1b9-4dc9-b07d-cadf99938fab" (UID: "1897fe4b-a1b9-4dc9-b07d-cadf99938fab"). InnerVolumeSpecName "ssh-key-edpm-compute-no-nodes". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:04:21 crc kubenswrapper[4558]: I0120 18:04:21.259340 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h4d4l\" (UniqueName: \"kubernetes.io/projected/1897fe4b-a1b9-4dc9-b07d-cadf99938fab-kube-api-access-h4d4l\") on node \"crc\" DevicePath \"\"" Jan 20 18:04:21 crc kubenswrapper[4558]: I0120 18:04:21.259375 4558 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1897fe4b-a1b9-4dc9-b07d-cadf99938fab-inventory\") on node \"crc\" DevicePath \"\"" Jan 20 18:04:21 crc kubenswrapper[4558]: I0120 18:04:21.259387 4558 reconciler_common.go:293] "Volume detached for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/1897fe4b-a1b9-4dc9-b07d-cadf99938fab-ssh-key-edpm-compute-no-nodes\") on node \"crc\" DevicePath \"\"" Jan 20 18:04:21 crc kubenswrapper[4558]: I0120 18:04:21.896575 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/download-cache-edpm-compute-no-nodes-edpm-compute-no-nodesm9d27" event={"ID":"1897fe4b-a1b9-4dc9-b07d-cadf99938fab","Type":"ContainerDied","Data":"904e447bebe874bdb49f568bdc29ace31504ac97e4362431f85bfea384671606"} Jan 20 18:04:21 crc kubenswrapper[4558]: I0120 18:04:21.896637 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="904e447bebe874bdb49f568bdc29ace31504ac97e4362431f85bfea384671606" Jan 20 18:04:21 crc kubenswrapper[4558]: I0120 18:04:21.896634 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/download-cache-edpm-compute-no-nodes-edpm-compute-no-nodesm9d27" Jan 20 18:04:21 crc kubenswrapper[4558]: I0120 18:04:21.949325 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/bootstrap-edpm-compute-no-nodes-edpm-compute-no-nodes-hmt8x"] Jan 20 18:04:21 crc kubenswrapper[4558]: E0120 18:04:21.949647 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="828c25e0-1972-41a8-aab7-0e18d409b9e6" containerName="init" Jan 20 18:04:21 crc kubenswrapper[4558]: I0120 18:04:21.949668 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="828c25e0-1972-41a8-aab7-0e18d409b9e6" containerName="init" Jan 20 18:04:21 crc kubenswrapper[4558]: E0120 18:04:21.949696 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="828c25e0-1972-41a8-aab7-0e18d409b9e6" containerName="dnsmasq-dns" Jan 20 18:04:21 crc kubenswrapper[4558]: I0120 18:04:21.949701 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="828c25e0-1972-41a8-aab7-0e18d409b9e6" containerName="dnsmasq-dns" Jan 20 18:04:21 crc kubenswrapper[4558]: E0120 18:04:21.949709 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1897fe4b-a1b9-4dc9-b07d-cadf99938fab" containerName="download-cache-edpm-compute-no-nodes-edpm-compute-no-nodes" Jan 20 18:04:21 crc kubenswrapper[4558]: I0120 18:04:21.949716 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="1897fe4b-a1b9-4dc9-b07d-cadf99938fab" containerName="download-cache-edpm-compute-no-nodes-edpm-compute-no-nodes" Jan 20 18:04:21 crc kubenswrapper[4558]: I0120 18:04:21.949856 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="1897fe4b-a1b9-4dc9-b07d-cadf99938fab" containerName="download-cache-edpm-compute-no-nodes-edpm-compute-no-nodes" Jan 20 18:04:21 crc kubenswrapper[4558]: I0120 18:04:21.949883 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="828c25e0-1972-41a8-aab7-0e18d409b9e6" containerName="dnsmasq-dns" Jan 20 18:04:21 crc kubenswrapper[4558]: I0120 18:04:21.950388 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/bootstrap-edpm-compute-no-nodes-edpm-compute-no-nodes-hmt8x" Jan 20 18:04:21 crc kubenswrapper[4558]: I0120 18:04:21.953379 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplane-ansible-ssh-private-key-secret" Jan 20 18:04:21 crc kubenswrapper[4558]: I0120 18:04:21.953632 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"edpm-compute-no-nodes-dockercfg-tljmj" Jan 20 18:04:21 crc kubenswrapper[4558]: I0120 18:04:21.953813 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplanenodeset-edpm-compute-no-nodes" Jan 20 18:04:21 crc kubenswrapper[4558]: I0120 18:04:21.953943 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"combined-ca-bundle" Jan 20 18:04:21 crc kubenswrapper[4558]: I0120 18:04:21.954063 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-aee-default-env" Jan 20 18:04:21 crc kubenswrapper[4558]: I0120 18:04:21.959436 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/bootstrap-edpm-compute-no-nodes-edpm-compute-no-nodes-hmt8x"] Jan 20 18:04:21 crc kubenswrapper[4558]: I0120 18:04:21.971564 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2rcrn\" (UniqueName: \"kubernetes.io/projected/4a10dbb7-8193-4f7c-89b2-889ab7842c98-kube-api-access-2rcrn\") pod \"bootstrap-edpm-compute-no-nodes-edpm-compute-no-nodes-hmt8x\" (UID: \"4a10dbb7-8193-4f7c-89b2-889ab7842c98\") " pod="openstack-kuttl-tests/bootstrap-edpm-compute-no-nodes-edpm-compute-no-nodes-hmt8x" Jan 20 18:04:21 crc kubenswrapper[4558]: I0120 18:04:21.971636 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4a10dbb7-8193-4f7c-89b2-889ab7842c98-inventory\") pod \"bootstrap-edpm-compute-no-nodes-edpm-compute-no-nodes-hmt8x\" (UID: \"4a10dbb7-8193-4f7c-89b2-889ab7842c98\") " pod="openstack-kuttl-tests/bootstrap-edpm-compute-no-nodes-edpm-compute-no-nodes-hmt8x" Jan 20 18:04:21 crc kubenswrapper[4558]: I0120 18:04:21.971715 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4a10dbb7-8193-4f7c-89b2-889ab7842c98-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-compute-no-nodes-edpm-compute-no-nodes-hmt8x\" (UID: \"4a10dbb7-8193-4f7c-89b2-889ab7842c98\") " pod="openstack-kuttl-tests/bootstrap-edpm-compute-no-nodes-edpm-compute-no-nodes-hmt8x" Jan 20 18:04:21 crc kubenswrapper[4558]: I0120 18:04:21.971760 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/4a10dbb7-8193-4f7c-89b2-889ab7842c98-ssh-key-edpm-compute-no-nodes\") pod \"bootstrap-edpm-compute-no-nodes-edpm-compute-no-nodes-hmt8x\" (UID: \"4a10dbb7-8193-4f7c-89b2-889ab7842c98\") " pod="openstack-kuttl-tests/bootstrap-edpm-compute-no-nodes-edpm-compute-no-nodes-hmt8x" Jan 20 18:04:22 crc kubenswrapper[4558]: I0120 18:04:22.073984 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4a10dbb7-8193-4f7c-89b2-889ab7842c98-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-compute-no-nodes-edpm-compute-no-nodes-hmt8x\" (UID: \"4a10dbb7-8193-4f7c-89b2-889ab7842c98\") " pod="openstack-kuttl-tests/bootstrap-edpm-compute-no-nodes-edpm-compute-no-nodes-hmt8x" Jan 20 18:04:22 crc kubenswrapper[4558]: I0120 18:04:22.074277 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/4a10dbb7-8193-4f7c-89b2-889ab7842c98-ssh-key-edpm-compute-no-nodes\") pod \"bootstrap-edpm-compute-no-nodes-edpm-compute-no-nodes-hmt8x\" (UID: \"4a10dbb7-8193-4f7c-89b2-889ab7842c98\") " pod="openstack-kuttl-tests/bootstrap-edpm-compute-no-nodes-edpm-compute-no-nodes-hmt8x" Jan 20 18:04:22 crc kubenswrapper[4558]: I0120 18:04:22.074509 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2rcrn\" (UniqueName: \"kubernetes.io/projected/4a10dbb7-8193-4f7c-89b2-889ab7842c98-kube-api-access-2rcrn\") pod \"bootstrap-edpm-compute-no-nodes-edpm-compute-no-nodes-hmt8x\" (UID: \"4a10dbb7-8193-4f7c-89b2-889ab7842c98\") " pod="openstack-kuttl-tests/bootstrap-edpm-compute-no-nodes-edpm-compute-no-nodes-hmt8x" Jan 20 18:04:22 crc kubenswrapper[4558]: I0120 18:04:22.074692 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4a10dbb7-8193-4f7c-89b2-889ab7842c98-inventory\") pod \"bootstrap-edpm-compute-no-nodes-edpm-compute-no-nodes-hmt8x\" (UID: \"4a10dbb7-8193-4f7c-89b2-889ab7842c98\") " pod="openstack-kuttl-tests/bootstrap-edpm-compute-no-nodes-edpm-compute-no-nodes-hmt8x" Jan 20 18:04:22 crc kubenswrapper[4558]: I0120 18:04:22.078791 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/4a10dbb7-8193-4f7c-89b2-889ab7842c98-ssh-key-edpm-compute-no-nodes\") pod \"bootstrap-edpm-compute-no-nodes-edpm-compute-no-nodes-hmt8x\" (UID: \"4a10dbb7-8193-4f7c-89b2-889ab7842c98\") " pod="openstack-kuttl-tests/bootstrap-edpm-compute-no-nodes-edpm-compute-no-nodes-hmt8x" Jan 20 18:04:22 crc kubenswrapper[4558]: I0120 18:04:22.079546 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4a10dbb7-8193-4f7c-89b2-889ab7842c98-inventory\") pod \"bootstrap-edpm-compute-no-nodes-edpm-compute-no-nodes-hmt8x\" (UID: \"4a10dbb7-8193-4f7c-89b2-889ab7842c98\") " pod="openstack-kuttl-tests/bootstrap-edpm-compute-no-nodes-edpm-compute-no-nodes-hmt8x" Jan 20 18:04:22 crc kubenswrapper[4558]: I0120 18:04:22.080522 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4a10dbb7-8193-4f7c-89b2-889ab7842c98-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-compute-no-nodes-edpm-compute-no-nodes-hmt8x\" (UID: \"4a10dbb7-8193-4f7c-89b2-889ab7842c98\") " pod="openstack-kuttl-tests/bootstrap-edpm-compute-no-nodes-edpm-compute-no-nodes-hmt8x" Jan 20 18:04:22 crc kubenswrapper[4558]: I0120 18:04:22.091437 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2rcrn\" (UniqueName: \"kubernetes.io/projected/4a10dbb7-8193-4f7c-89b2-889ab7842c98-kube-api-access-2rcrn\") pod \"bootstrap-edpm-compute-no-nodes-edpm-compute-no-nodes-hmt8x\" (UID: \"4a10dbb7-8193-4f7c-89b2-889ab7842c98\") " pod="openstack-kuttl-tests/bootstrap-edpm-compute-no-nodes-edpm-compute-no-nodes-hmt8x" Jan 20 18:04:22 crc kubenswrapper[4558]: I0120 18:04:22.268751 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/bootstrap-edpm-compute-no-nodes-edpm-compute-no-nodes-hmt8x" Jan 20 18:04:22 crc kubenswrapper[4558]: I0120 18:04:22.713215 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/bootstrap-edpm-compute-no-nodes-edpm-compute-no-nodes-hmt8x"] Jan 20 18:04:22 crc kubenswrapper[4558]: I0120 18:04:22.908254 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/bootstrap-edpm-compute-no-nodes-edpm-compute-no-nodes-hmt8x" event={"ID":"4a10dbb7-8193-4f7c-89b2-889ab7842c98","Type":"ContainerStarted","Data":"df461a77c2913cbeb0c9063a0b03954cb9e94148ba267e0bcb7d5319aad83ebd"} Jan 20 18:04:23 crc kubenswrapper[4558]: I0120 18:04:23.918041 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/bootstrap-edpm-compute-no-nodes-edpm-compute-no-nodes-hmt8x" event={"ID":"4a10dbb7-8193-4f7c-89b2-889ab7842c98","Type":"ContainerStarted","Data":"ec9467e4d7a0341c215effcc90ce66f3cb78441df4ad8336af7f57524447d2d4"} Jan 20 18:04:23 crc kubenswrapper[4558]: I0120 18:04:23.955291 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/bootstrap-edpm-compute-no-nodes-edpm-compute-no-nodes-hmt8x" podStartSLOduration=2.262248228 podStartE2EDuration="2.955265539s" podCreationTimestamp="2026-01-20 18:04:21 +0000 UTC" firstStartedPulling="2026-01-20 18:04:22.718953157 +0000 UTC m=+4956.479291123" lastFinishedPulling="2026-01-20 18:04:23.411970467 +0000 UTC m=+4957.172308434" observedRunningTime="2026-01-20 18:04:23.951732948 +0000 UTC m=+4957.712070914" watchObservedRunningTime="2026-01-20 18:04:23.955265539 +0000 UTC m=+4957.715603506" Jan 20 18:04:24 crc kubenswrapper[4558]: I0120 18:04:24.928289 4558 generic.go:334] "Generic (PLEG): container finished" podID="4a10dbb7-8193-4f7c-89b2-889ab7842c98" containerID="ec9467e4d7a0341c215effcc90ce66f3cb78441df4ad8336af7f57524447d2d4" exitCode=0 Jan 20 18:04:24 crc kubenswrapper[4558]: I0120 18:04:24.928396 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/bootstrap-edpm-compute-no-nodes-edpm-compute-no-nodes-hmt8x" event={"ID":"4a10dbb7-8193-4f7c-89b2-889ab7842c98","Type":"ContainerDied","Data":"ec9467e4d7a0341c215effcc90ce66f3cb78441df4ad8336af7f57524447d2d4"} Jan 20 18:04:26 crc kubenswrapper[4558]: I0120 18:04:26.182585 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/bootstrap-edpm-compute-no-nodes-edpm-compute-no-nodes-hmt8x" Jan 20 18:04:26 crc kubenswrapper[4558]: I0120 18:04:26.239639 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2rcrn\" (UniqueName: \"kubernetes.io/projected/4a10dbb7-8193-4f7c-89b2-889ab7842c98-kube-api-access-2rcrn\") pod \"4a10dbb7-8193-4f7c-89b2-889ab7842c98\" (UID: \"4a10dbb7-8193-4f7c-89b2-889ab7842c98\") " Jan 20 18:04:26 crc kubenswrapper[4558]: I0120 18:04:26.239778 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/4a10dbb7-8193-4f7c-89b2-889ab7842c98-ssh-key-edpm-compute-no-nodes\") pod \"4a10dbb7-8193-4f7c-89b2-889ab7842c98\" (UID: \"4a10dbb7-8193-4f7c-89b2-889ab7842c98\") " Jan 20 18:04:26 crc kubenswrapper[4558]: I0120 18:04:26.239873 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4a10dbb7-8193-4f7c-89b2-889ab7842c98-inventory\") pod \"4a10dbb7-8193-4f7c-89b2-889ab7842c98\" (UID: \"4a10dbb7-8193-4f7c-89b2-889ab7842c98\") " Jan 20 18:04:26 crc kubenswrapper[4558]: I0120 18:04:26.239977 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4a10dbb7-8193-4f7c-89b2-889ab7842c98-bootstrap-combined-ca-bundle\") pod \"4a10dbb7-8193-4f7c-89b2-889ab7842c98\" (UID: \"4a10dbb7-8193-4f7c-89b2-889ab7842c98\") " Jan 20 18:04:26 crc kubenswrapper[4558]: I0120 18:04:26.394217 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4a10dbb7-8193-4f7c-89b2-889ab7842c98-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "4a10dbb7-8193-4f7c-89b2-889ab7842c98" (UID: "4a10dbb7-8193-4f7c-89b2-889ab7842c98"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:04:26 crc kubenswrapper[4558]: I0120 18:04:26.394245 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4a10dbb7-8193-4f7c-89b2-889ab7842c98-kube-api-access-2rcrn" (OuterVolumeSpecName: "kube-api-access-2rcrn") pod "4a10dbb7-8193-4f7c-89b2-889ab7842c98" (UID: "4a10dbb7-8193-4f7c-89b2-889ab7842c98"). InnerVolumeSpecName "kube-api-access-2rcrn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:04:26 crc kubenswrapper[4558]: I0120 18:04:26.409774 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4a10dbb7-8193-4f7c-89b2-889ab7842c98-ssh-key-edpm-compute-no-nodes" (OuterVolumeSpecName: "ssh-key-edpm-compute-no-nodes") pod "4a10dbb7-8193-4f7c-89b2-889ab7842c98" (UID: "4a10dbb7-8193-4f7c-89b2-889ab7842c98"). InnerVolumeSpecName "ssh-key-edpm-compute-no-nodes". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:04:26 crc kubenswrapper[4558]: I0120 18:04:26.409809 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4a10dbb7-8193-4f7c-89b2-889ab7842c98-inventory" (OuterVolumeSpecName: "inventory") pod "4a10dbb7-8193-4f7c-89b2-889ab7842c98" (UID: "4a10dbb7-8193-4f7c-89b2-889ab7842c98"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:04:26 crc kubenswrapper[4558]: I0120 18:04:26.443628 4558 reconciler_common.go:293] "Volume detached for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/4a10dbb7-8193-4f7c-89b2-889ab7842c98-ssh-key-edpm-compute-no-nodes\") on node \"crc\" DevicePath \"\"" Jan 20 18:04:26 crc kubenswrapper[4558]: I0120 18:04:26.443673 4558 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4a10dbb7-8193-4f7c-89b2-889ab7842c98-inventory\") on node \"crc\" DevicePath \"\"" Jan 20 18:04:26 crc kubenswrapper[4558]: I0120 18:04:26.443688 4558 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4a10dbb7-8193-4f7c-89b2-889ab7842c98-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 18:04:26 crc kubenswrapper[4558]: I0120 18:04:26.443732 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2rcrn\" (UniqueName: \"kubernetes.io/projected/4a10dbb7-8193-4f7c-89b2-889ab7842c98-kube-api-access-2rcrn\") on node \"crc\" DevicePath \"\"" Jan 20 18:04:26 crc kubenswrapper[4558]: I0120 18:04:26.946661 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/bootstrap-edpm-compute-no-nodes-edpm-compute-no-nodes-hmt8x" event={"ID":"4a10dbb7-8193-4f7c-89b2-889ab7842c98","Type":"ContainerDied","Data":"df461a77c2913cbeb0c9063a0b03954cb9e94148ba267e0bcb7d5319aad83ebd"} Jan 20 18:04:26 crc kubenswrapper[4558]: I0120 18:04:26.946718 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="df461a77c2913cbeb0c9063a0b03954cb9e94148ba267e0bcb7d5319aad83ebd" Jan 20 18:04:26 crc kubenswrapper[4558]: I0120 18:04:26.946761 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/bootstrap-edpm-compute-no-nodes-edpm-compute-no-nodes-hmt8x" Jan 20 18:04:27 crc kubenswrapper[4558]: I0120 18:04:27.255148 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/configure-network-edpm-compute-no-nodes-edpm-compute-no-nokr9g2"] Jan 20 18:04:27 crc kubenswrapper[4558]: E0120 18:04:27.255792 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4a10dbb7-8193-4f7c-89b2-889ab7842c98" containerName="bootstrap-edpm-compute-no-nodes-edpm-compute-no-nodes" Jan 20 18:04:27 crc kubenswrapper[4558]: I0120 18:04:27.255808 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="4a10dbb7-8193-4f7c-89b2-889ab7842c98" containerName="bootstrap-edpm-compute-no-nodes-edpm-compute-no-nodes" Jan 20 18:04:27 crc kubenswrapper[4558]: I0120 18:04:27.255964 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="4a10dbb7-8193-4f7c-89b2-889ab7842c98" containerName="bootstrap-edpm-compute-no-nodes-edpm-compute-no-nodes" Jan 20 18:04:27 crc kubenswrapper[4558]: I0120 18:04:27.256441 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/configure-network-edpm-compute-no-nodes-edpm-compute-no-nokr9g2" Jan 20 18:04:27 crc kubenswrapper[4558]: I0120 18:04:27.257932 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-aee-default-env" Jan 20 18:04:27 crc kubenswrapper[4558]: I0120 18:04:27.258371 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplanenodeset-edpm-compute-no-nodes" Jan 20 18:04:27 crc kubenswrapper[4558]: I0120 18:04:27.258412 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"edpm-compute-no-nodes-dockercfg-tljmj" Jan 20 18:04:27 crc kubenswrapper[4558]: I0120 18:04:27.258802 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplane-ansible-ssh-private-key-secret" Jan 20 18:04:27 crc kubenswrapper[4558]: I0120 18:04:27.264491 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/configure-network-edpm-compute-no-nodes-edpm-compute-no-nokr9g2"] Jan 20 18:04:27 crc kubenswrapper[4558]: I0120 18:04:27.358294 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7nt88\" (UniqueName: \"kubernetes.io/projected/46eabd98-9646-4e4d-97b7-96ebad66a340-kube-api-access-7nt88\") pod \"configure-network-edpm-compute-no-nodes-edpm-compute-no-nokr9g2\" (UID: \"46eabd98-9646-4e4d-97b7-96ebad66a340\") " pod="openstack-kuttl-tests/configure-network-edpm-compute-no-nodes-edpm-compute-no-nokr9g2" Jan 20 18:04:27 crc kubenswrapper[4558]: I0120 18:04:27.358356 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/46eabd98-9646-4e4d-97b7-96ebad66a340-ssh-key-edpm-compute-no-nodes\") pod \"configure-network-edpm-compute-no-nodes-edpm-compute-no-nokr9g2\" (UID: \"46eabd98-9646-4e4d-97b7-96ebad66a340\") " pod="openstack-kuttl-tests/configure-network-edpm-compute-no-nodes-edpm-compute-no-nokr9g2" Jan 20 18:04:27 crc kubenswrapper[4558]: I0120 18:04:27.358400 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/46eabd98-9646-4e4d-97b7-96ebad66a340-inventory\") pod \"configure-network-edpm-compute-no-nodes-edpm-compute-no-nokr9g2\" (UID: \"46eabd98-9646-4e4d-97b7-96ebad66a340\") " pod="openstack-kuttl-tests/configure-network-edpm-compute-no-nodes-edpm-compute-no-nokr9g2" Jan 20 18:04:27 crc kubenswrapper[4558]: I0120 18:04:27.459979 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7nt88\" (UniqueName: \"kubernetes.io/projected/46eabd98-9646-4e4d-97b7-96ebad66a340-kube-api-access-7nt88\") pod \"configure-network-edpm-compute-no-nodes-edpm-compute-no-nokr9g2\" (UID: \"46eabd98-9646-4e4d-97b7-96ebad66a340\") " pod="openstack-kuttl-tests/configure-network-edpm-compute-no-nodes-edpm-compute-no-nokr9g2" Jan 20 18:04:27 crc kubenswrapper[4558]: I0120 18:04:27.460046 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/46eabd98-9646-4e4d-97b7-96ebad66a340-ssh-key-edpm-compute-no-nodes\") pod \"configure-network-edpm-compute-no-nodes-edpm-compute-no-nokr9g2\" (UID: \"46eabd98-9646-4e4d-97b7-96ebad66a340\") " pod="openstack-kuttl-tests/configure-network-edpm-compute-no-nodes-edpm-compute-no-nokr9g2" Jan 20 18:04:27 crc kubenswrapper[4558]: I0120 18:04:27.460109 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/46eabd98-9646-4e4d-97b7-96ebad66a340-inventory\") pod \"configure-network-edpm-compute-no-nodes-edpm-compute-no-nokr9g2\" (UID: \"46eabd98-9646-4e4d-97b7-96ebad66a340\") " pod="openstack-kuttl-tests/configure-network-edpm-compute-no-nodes-edpm-compute-no-nokr9g2" Jan 20 18:04:27 crc kubenswrapper[4558]: I0120 18:04:27.467313 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/46eabd98-9646-4e4d-97b7-96ebad66a340-inventory\") pod \"configure-network-edpm-compute-no-nodes-edpm-compute-no-nokr9g2\" (UID: \"46eabd98-9646-4e4d-97b7-96ebad66a340\") " pod="openstack-kuttl-tests/configure-network-edpm-compute-no-nodes-edpm-compute-no-nokr9g2" Jan 20 18:04:27 crc kubenswrapper[4558]: I0120 18:04:27.467408 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/46eabd98-9646-4e4d-97b7-96ebad66a340-ssh-key-edpm-compute-no-nodes\") pod \"configure-network-edpm-compute-no-nodes-edpm-compute-no-nokr9g2\" (UID: \"46eabd98-9646-4e4d-97b7-96ebad66a340\") " pod="openstack-kuttl-tests/configure-network-edpm-compute-no-nodes-edpm-compute-no-nokr9g2" Jan 20 18:04:27 crc kubenswrapper[4558]: I0120 18:04:27.476255 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7nt88\" (UniqueName: \"kubernetes.io/projected/46eabd98-9646-4e4d-97b7-96ebad66a340-kube-api-access-7nt88\") pod \"configure-network-edpm-compute-no-nodes-edpm-compute-no-nokr9g2\" (UID: \"46eabd98-9646-4e4d-97b7-96ebad66a340\") " pod="openstack-kuttl-tests/configure-network-edpm-compute-no-nodes-edpm-compute-no-nokr9g2" Jan 20 18:04:27 crc kubenswrapper[4558]: I0120 18:04:27.572001 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/configure-network-edpm-compute-no-nodes-edpm-compute-no-nokr9g2" Jan 20 18:04:27 crc kubenswrapper[4558]: I0120 18:04:27.793495 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/configure-network-edpm-compute-no-nodes-edpm-compute-no-nokr9g2"] Jan 20 18:04:28 crc kubenswrapper[4558]: W0120 18:04:28.098291 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod46eabd98_9646_4e4d_97b7_96ebad66a340.slice/crio-f9a351b3c47bdc167313cd665ea2ea276f7f5131ba251529be38340b362a1a4a WatchSource:0}: Error finding container f9a351b3c47bdc167313cd665ea2ea276f7f5131ba251529be38340b362a1a4a: Status 404 returned error can't find the container with id f9a351b3c47bdc167313cd665ea2ea276f7f5131ba251529be38340b362a1a4a Jan 20 18:04:28 crc kubenswrapper[4558]: I0120 18:04:28.965823 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/configure-network-edpm-compute-no-nodes-edpm-compute-no-nokr9g2" event={"ID":"46eabd98-9646-4e4d-97b7-96ebad66a340","Type":"ContainerStarted","Data":"ad46975a195c5062c19fee47d527fae8580c0225ca40e53e7198e0dd9aa58a65"} Jan 20 18:04:28 crc kubenswrapper[4558]: I0120 18:04:28.967342 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/configure-network-edpm-compute-no-nodes-edpm-compute-no-nokr9g2" event={"ID":"46eabd98-9646-4e4d-97b7-96ebad66a340","Type":"ContainerStarted","Data":"f9a351b3c47bdc167313cd665ea2ea276f7f5131ba251529be38340b362a1a4a"} Jan 20 18:04:28 crc kubenswrapper[4558]: I0120 18:04:28.986819 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/configure-network-edpm-compute-no-nodes-edpm-compute-no-nokr9g2" podStartSLOduration=1.471944081 podStartE2EDuration="1.986785615s" podCreationTimestamp="2026-01-20 18:04:27 +0000 UTC" firstStartedPulling="2026-01-20 18:04:28.100742738 +0000 UTC m=+4961.861080705" lastFinishedPulling="2026-01-20 18:04:28.615584272 +0000 UTC m=+4962.375922239" observedRunningTime="2026-01-20 18:04:28.98108704 +0000 UTC m=+4962.741425007" watchObservedRunningTime="2026-01-20 18:04:28.986785615 +0000 UTC m=+4962.747123582" Jan 20 18:04:29 crc kubenswrapper[4558]: I0120 18:04:29.978490 4558 generic.go:334] "Generic (PLEG): container finished" podID="46eabd98-9646-4e4d-97b7-96ebad66a340" containerID="ad46975a195c5062c19fee47d527fae8580c0225ca40e53e7198e0dd9aa58a65" exitCode=0 Jan 20 18:04:29 crc kubenswrapper[4558]: I0120 18:04:29.978606 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/configure-network-edpm-compute-no-nodes-edpm-compute-no-nokr9g2" event={"ID":"46eabd98-9646-4e4d-97b7-96ebad66a340","Type":"ContainerDied","Data":"ad46975a195c5062c19fee47d527fae8580c0225ca40e53e7198e0dd9aa58a65"} Jan 20 18:04:31 crc kubenswrapper[4558]: I0120 18:04:31.264588 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/configure-network-edpm-compute-no-nodes-edpm-compute-no-nokr9g2" Jan 20 18:04:31 crc kubenswrapper[4558]: I0120 18:04:31.334875 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7nt88\" (UniqueName: \"kubernetes.io/projected/46eabd98-9646-4e4d-97b7-96ebad66a340-kube-api-access-7nt88\") pod \"46eabd98-9646-4e4d-97b7-96ebad66a340\" (UID: \"46eabd98-9646-4e4d-97b7-96ebad66a340\") " Jan 20 18:04:31 crc kubenswrapper[4558]: I0120 18:04:31.334991 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/46eabd98-9646-4e4d-97b7-96ebad66a340-ssh-key-edpm-compute-no-nodes\") pod \"46eabd98-9646-4e4d-97b7-96ebad66a340\" (UID: \"46eabd98-9646-4e4d-97b7-96ebad66a340\") " Jan 20 18:04:31 crc kubenswrapper[4558]: I0120 18:04:31.335020 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/46eabd98-9646-4e4d-97b7-96ebad66a340-inventory\") pod \"46eabd98-9646-4e4d-97b7-96ebad66a340\" (UID: \"46eabd98-9646-4e4d-97b7-96ebad66a340\") " Jan 20 18:04:31 crc kubenswrapper[4558]: I0120 18:04:31.340417 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/46eabd98-9646-4e4d-97b7-96ebad66a340-kube-api-access-7nt88" (OuterVolumeSpecName: "kube-api-access-7nt88") pod "46eabd98-9646-4e4d-97b7-96ebad66a340" (UID: "46eabd98-9646-4e4d-97b7-96ebad66a340"). InnerVolumeSpecName "kube-api-access-7nt88". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:04:31 crc kubenswrapper[4558]: I0120 18:04:31.354003 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/46eabd98-9646-4e4d-97b7-96ebad66a340-ssh-key-edpm-compute-no-nodes" (OuterVolumeSpecName: "ssh-key-edpm-compute-no-nodes") pod "46eabd98-9646-4e4d-97b7-96ebad66a340" (UID: "46eabd98-9646-4e4d-97b7-96ebad66a340"). InnerVolumeSpecName "ssh-key-edpm-compute-no-nodes". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:04:31 crc kubenswrapper[4558]: I0120 18:04:31.355586 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/46eabd98-9646-4e4d-97b7-96ebad66a340-inventory" (OuterVolumeSpecName: "inventory") pod "46eabd98-9646-4e4d-97b7-96ebad66a340" (UID: "46eabd98-9646-4e4d-97b7-96ebad66a340"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:04:31 crc kubenswrapper[4558]: I0120 18:04:31.436766 4558 reconciler_common.go:293] "Volume detached for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/46eabd98-9646-4e4d-97b7-96ebad66a340-ssh-key-edpm-compute-no-nodes\") on node \"crc\" DevicePath \"\"" Jan 20 18:04:31 crc kubenswrapper[4558]: I0120 18:04:31.436799 4558 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/46eabd98-9646-4e4d-97b7-96ebad66a340-inventory\") on node \"crc\" DevicePath \"\"" Jan 20 18:04:31 crc kubenswrapper[4558]: I0120 18:04:31.436814 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7nt88\" (UniqueName: \"kubernetes.io/projected/46eabd98-9646-4e4d-97b7-96ebad66a340-kube-api-access-7nt88\") on node \"crc\" DevicePath \"\"" Jan 20 18:04:32 crc kubenswrapper[4558]: I0120 18:04:32.000660 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/configure-network-edpm-compute-no-nodes-edpm-compute-no-nokr9g2" event={"ID":"46eabd98-9646-4e4d-97b7-96ebad66a340","Type":"ContainerDied","Data":"f9a351b3c47bdc167313cd665ea2ea276f7f5131ba251529be38340b362a1a4a"} Jan 20 18:04:32 crc kubenswrapper[4558]: I0120 18:04:32.000996 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f9a351b3c47bdc167313cd665ea2ea276f7f5131ba251529be38340b362a1a4a" Jan 20 18:04:32 crc kubenswrapper[4558]: I0120 18:04:32.000746 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/configure-network-edpm-compute-no-nodes-edpm-compute-no-nokr9g2" Jan 20 18:04:32 crc kubenswrapper[4558]: I0120 18:04:32.045019 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/validate-network-edpm-compute-no-nodes-edpm-compute-no-nodgr7qx"] Jan 20 18:04:32 crc kubenswrapper[4558]: E0120 18:04:32.045517 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="46eabd98-9646-4e4d-97b7-96ebad66a340" containerName="configure-network-edpm-compute-no-nodes-edpm-compute-no-nodes" Jan 20 18:04:32 crc kubenswrapper[4558]: I0120 18:04:32.045588 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="46eabd98-9646-4e4d-97b7-96ebad66a340" containerName="configure-network-edpm-compute-no-nodes-edpm-compute-no-nodes" Jan 20 18:04:32 crc kubenswrapper[4558]: I0120 18:04:32.045762 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="46eabd98-9646-4e4d-97b7-96ebad66a340" containerName="configure-network-edpm-compute-no-nodes-edpm-compute-no-nodes" Jan 20 18:04:32 crc kubenswrapper[4558]: I0120 18:04:32.046303 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/validate-network-edpm-compute-no-nodes-edpm-compute-no-nodgr7qx" Jan 20 18:04:32 crc kubenswrapper[4558]: I0120 18:04:32.048062 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"edpm-compute-no-nodes-dockercfg-tljmj" Jan 20 18:04:32 crc kubenswrapper[4558]: I0120 18:04:32.048519 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplane-ansible-ssh-private-key-secret" Jan 20 18:04:32 crc kubenswrapper[4558]: I0120 18:04:32.049149 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-aee-default-env" Jan 20 18:04:32 crc kubenswrapper[4558]: I0120 18:04:32.053775 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplanenodeset-edpm-compute-no-nodes" Jan 20 18:04:32 crc kubenswrapper[4558]: I0120 18:04:32.056622 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/validate-network-edpm-compute-no-nodes-edpm-compute-no-nodgr7qx"] Jan 20 18:04:32 crc kubenswrapper[4558]: I0120 18:04:32.157036 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/7783d091-b00b-4836-b2e5-b67ed88fba86-ssh-key-edpm-compute-no-nodes\") pod \"validate-network-edpm-compute-no-nodes-edpm-compute-no-nodgr7qx\" (UID: \"7783d091-b00b-4836-b2e5-b67ed88fba86\") " pod="openstack-kuttl-tests/validate-network-edpm-compute-no-nodes-edpm-compute-no-nodgr7qx" Jan 20 18:04:32 crc kubenswrapper[4558]: I0120 18:04:32.157204 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7783d091-b00b-4836-b2e5-b67ed88fba86-inventory\") pod \"validate-network-edpm-compute-no-nodes-edpm-compute-no-nodgr7qx\" (UID: \"7783d091-b00b-4836-b2e5-b67ed88fba86\") " pod="openstack-kuttl-tests/validate-network-edpm-compute-no-nodes-edpm-compute-no-nodgr7qx" Jan 20 18:04:32 crc kubenswrapper[4558]: I0120 18:04:32.157242 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vprhs\" (UniqueName: \"kubernetes.io/projected/7783d091-b00b-4836-b2e5-b67ed88fba86-kube-api-access-vprhs\") pod \"validate-network-edpm-compute-no-nodes-edpm-compute-no-nodgr7qx\" (UID: \"7783d091-b00b-4836-b2e5-b67ed88fba86\") " pod="openstack-kuttl-tests/validate-network-edpm-compute-no-nodes-edpm-compute-no-nodgr7qx" Jan 20 18:04:32 crc kubenswrapper[4558]: I0120 18:04:32.258857 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7783d091-b00b-4836-b2e5-b67ed88fba86-inventory\") pod \"validate-network-edpm-compute-no-nodes-edpm-compute-no-nodgr7qx\" (UID: \"7783d091-b00b-4836-b2e5-b67ed88fba86\") " pod="openstack-kuttl-tests/validate-network-edpm-compute-no-nodes-edpm-compute-no-nodgr7qx" Jan 20 18:04:32 crc kubenswrapper[4558]: I0120 18:04:32.259267 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vprhs\" (UniqueName: \"kubernetes.io/projected/7783d091-b00b-4836-b2e5-b67ed88fba86-kube-api-access-vprhs\") pod \"validate-network-edpm-compute-no-nodes-edpm-compute-no-nodgr7qx\" (UID: \"7783d091-b00b-4836-b2e5-b67ed88fba86\") " pod="openstack-kuttl-tests/validate-network-edpm-compute-no-nodes-edpm-compute-no-nodgr7qx" Jan 20 18:04:32 crc kubenswrapper[4558]: I0120 18:04:32.259400 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/7783d091-b00b-4836-b2e5-b67ed88fba86-ssh-key-edpm-compute-no-nodes\") pod \"validate-network-edpm-compute-no-nodes-edpm-compute-no-nodgr7qx\" (UID: \"7783d091-b00b-4836-b2e5-b67ed88fba86\") " pod="openstack-kuttl-tests/validate-network-edpm-compute-no-nodes-edpm-compute-no-nodgr7qx" Jan 20 18:04:32 crc kubenswrapper[4558]: I0120 18:04:32.264025 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7783d091-b00b-4836-b2e5-b67ed88fba86-inventory\") pod \"validate-network-edpm-compute-no-nodes-edpm-compute-no-nodgr7qx\" (UID: \"7783d091-b00b-4836-b2e5-b67ed88fba86\") " pod="openstack-kuttl-tests/validate-network-edpm-compute-no-nodes-edpm-compute-no-nodgr7qx" Jan 20 18:04:32 crc kubenswrapper[4558]: I0120 18:04:32.264648 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/7783d091-b00b-4836-b2e5-b67ed88fba86-ssh-key-edpm-compute-no-nodes\") pod \"validate-network-edpm-compute-no-nodes-edpm-compute-no-nodgr7qx\" (UID: \"7783d091-b00b-4836-b2e5-b67ed88fba86\") " pod="openstack-kuttl-tests/validate-network-edpm-compute-no-nodes-edpm-compute-no-nodgr7qx" Jan 20 18:04:32 crc kubenswrapper[4558]: I0120 18:04:32.275755 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vprhs\" (UniqueName: \"kubernetes.io/projected/7783d091-b00b-4836-b2e5-b67ed88fba86-kube-api-access-vprhs\") pod \"validate-network-edpm-compute-no-nodes-edpm-compute-no-nodgr7qx\" (UID: \"7783d091-b00b-4836-b2e5-b67ed88fba86\") " pod="openstack-kuttl-tests/validate-network-edpm-compute-no-nodes-edpm-compute-no-nodgr7qx" Jan 20 18:04:32 crc kubenswrapper[4558]: I0120 18:04:32.361636 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/validate-network-edpm-compute-no-nodes-edpm-compute-no-nodgr7qx" Jan 20 18:04:32 crc kubenswrapper[4558]: I0120 18:04:32.761782 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/validate-network-edpm-compute-no-nodes-edpm-compute-no-nodgr7qx"] Jan 20 18:04:32 crc kubenswrapper[4558]: W0120 18:04:32.764033 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7783d091_b00b_4836_b2e5_b67ed88fba86.slice/crio-61fb2ae71b19d1bfab518201638dc72b97c7dbaae41ecaf69837d0e4d2b723b6 WatchSource:0}: Error finding container 61fb2ae71b19d1bfab518201638dc72b97c7dbaae41ecaf69837d0e4d2b723b6: Status 404 returned error can't find the container with id 61fb2ae71b19d1bfab518201638dc72b97c7dbaae41ecaf69837d0e4d2b723b6 Jan 20 18:04:33 crc kubenswrapper[4558]: I0120 18:04:33.013602 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/validate-network-edpm-compute-no-nodes-edpm-compute-no-nodgr7qx" event={"ID":"7783d091-b00b-4836-b2e5-b67ed88fba86","Type":"ContainerStarted","Data":"61fb2ae71b19d1bfab518201638dc72b97c7dbaae41ecaf69837d0e4d2b723b6"} Jan 20 18:04:34 crc kubenswrapper[4558]: I0120 18:04:34.025977 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/validate-network-edpm-compute-no-nodes-edpm-compute-no-nodgr7qx" event={"ID":"7783d091-b00b-4836-b2e5-b67ed88fba86","Type":"ContainerStarted","Data":"3aefef1f6fb33a2f41fd9e6a604a0537ae82ad6b586f21de3356bbe2136617bd"} Jan 20 18:04:34 crc kubenswrapper[4558]: I0120 18:04:34.045340 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/validate-network-edpm-compute-no-nodes-edpm-compute-no-nodgr7qx" podStartSLOduration=1.533677193 podStartE2EDuration="2.045322186s" podCreationTimestamp="2026-01-20 18:04:32 +0000 UTC" firstStartedPulling="2026-01-20 18:04:32.766834315 +0000 UTC m=+4966.527172283" lastFinishedPulling="2026-01-20 18:04:33.278479309 +0000 UTC m=+4967.038817276" observedRunningTime="2026-01-20 18:04:34.038518444 +0000 UTC m=+4967.798856411" watchObservedRunningTime="2026-01-20 18:04:34.045322186 +0000 UTC m=+4967.805660154" Jan 20 18:04:35 crc kubenswrapper[4558]: I0120 18:04:35.038770 4558 generic.go:334] "Generic (PLEG): container finished" podID="7783d091-b00b-4836-b2e5-b67ed88fba86" containerID="3aefef1f6fb33a2f41fd9e6a604a0537ae82ad6b586f21de3356bbe2136617bd" exitCode=0 Jan 20 18:04:35 crc kubenswrapper[4558]: I0120 18:04:35.038831 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/validate-network-edpm-compute-no-nodes-edpm-compute-no-nodgr7qx" event={"ID":"7783d091-b00b-4836-b2e5-b67ed88fba86","Type":"ContainerDied","Data":"3aefef1f6fb33a2f41fd9e6a604a0537ae82ad6b586f21de3356bbe2136617bd"} Jan 20 18:04:35 crc kubenswrapper[4558]: I0120 18:04:35.567233 4558 scope.go:117] "RemoveContainer" containerID="6b02d31ec11fe5435af988e35303a6f66b5afa5ef952ce5f7cc4b3cd5f4d9497" Jan 20 18:04:35 crc kubenswrapper[4558]: E0120 18:04:35.567582 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 18:04:36 crc kubenswrapper[4558]: I0120 18:04:36.299312 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/validate-network-edpm-compute-no-nodes-edpm-compute-no-nodgr7qx" Jan 20 18:04:36 crc kubenswrapper[4558]: I0120 18:04:36.420990 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vprhs\" (UniqueName: \"kubernetes.io/projected/7783d091-b00b-4836-b2e5-b67ed88fba86-kube-api-access-vprhs\") pod \"7783d091-b00b-4836-b2e5-b67ed88fba86\" (UID: \"7783d091-b00b-4836-b2e5-b67ed88fba86\") " Jan 20 18:04:36 crc kubenswrapper[4558]: I0120 18:04:36.421072 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/7783d091-b00b-4836-b2e5-b67ed88fba86-ssh-key-edpm-compute-no-nodes\") pod \"7783d091-b00b-4836-b2e5-b67ed88fba86\" (UID: \"7783d091-b00b-4836-b2e5-b67ed88fba86\") " Jan 20 18:04:36 crc kubenswrapper[4558]: I0120 18:04:36.421100 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7783d091-b00b-4836-b2e5-b67ed88fba86-inventory\") pod \"7783d091-b00b-4836-b2e5-b67ed88fba86\" (UID: \"7783d091-b00b-4836-b2e5-b67ed88fba86\") " Jan 20 18:04:36 crc kubenswrapper[4558]: I0120 18:04:36.793068 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7783d091-b00b-4836-b2e5-b67ed88fba86-kube-api-access-vprhs" (OuterVolumeSpecName: "kube-api-access-vprhs") pod "7783d091-b00b-4836-b2e5-b67ed88fba86" (UID: "7783d091-b00b-4836-b2e5-b67ed88fba86"). InnerVolumeSpecName "kube-api-access-vprhs". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:04:36 crc kubenswrapper[4558]: I0120 18:04:36.807370 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7783d091-b00b-4836-b2e5-b67ed88fba86-ssh-key-edpm-compute-no-nodes" (OuterVolumeSpecName: "ssh-key-edpm-compute-no-nodes") pod "7783d091-b00b-4836-b2e5-b67ed88fba86" (UID: "7783d091-b00b-4836-b2e5-b67ed88fba86"). InnerVolumeSpecName "ssh-key-edpm-compute-no-nodes". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:04:36 crc kubenswrapper[4558]: I0120 18:04:36.807645 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7783d091-b00b-4836-b2e5-b67ed88fba86-inventory" (OuterVolumeSpecName: "inventory") pod "7783d091-b00b-4836-b2e5-b67ed88fba86" (UID: "7783d091-b00b-4836-b2e5-b67ed88fba86"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:04:36 crc kubenswrapper[4558]: I0120 18:04:36.830271 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vprhs\" (UniqueName: \"kubernetes.io/projected/7783d091-b00b-4836-b2e5-b67ed88fba86-kube-api-access-vprhs\") on node \"crc\" DevicePath \"\"" Jan 20 18:04:36 crc kubenswrapper[4558]: I0120 18:04:36.830306 4558 reconciler_common.go:293] "Volume detached for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/7783d091-b00b-4836-b2e5-b67ed88fba86-ssh-key-edpm-compute-no-nodes\") on node \"crc\" DevicePath \"\"" Jan 20 18:04:36 crc kubenswrapper[4558]: I0120 18:04:36.830321 4558 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7783d091-b00b-4836-b2e5-b67ed88fba86-inventory\") on node \"crc\" DevicePath \"\"" Jan 20 18:04:37 crc kubenswrapper[4558]: I0120 18:04:37.070412 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/validate-network-edpm-compute-no-nodes-edpm-compute-no-nodgr7qx" event={"ID":"7783d091-b00b-4836-b2e5-b67ed88fba86","Type":"ContainerDied","Data":"61fb2ae71b19d1bfab518201638dc72b97c7dbaae41ecaf69837d0e4d2b723b6"} Jan 20 18:04:37 crc kubenswrapper[4558]: I0120 18:04:37.070499 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="61fb2ae71b19d1bfab518201638dc72b97c7dbaae41ecaf69837d0e4d2b723b6" Jan 20 18:04:37 crc kubenswrapper[4558]: I0120 18:04:37.070604 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/validate-network-edpm-compute-no-nodes-edpm-compute-no-nodgr7qx" Jan 20 18:04:37 crc kubenswrapper[4558]: I0120 18:04:37.113458 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/install-os-edpm-compute-no-nodes-edpm-compute-no-nodes-qmkd7"] Jan 20 18:04:37 crc kubenswrapper[4558]: E0120 18:04:37.113850 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7783d091-b00b-4836-b2e5-b67ed88fba86" containerName="validate-network-edpm-compute-no-nodes-edpm-compute-no-nodes" Jan 20 18:04:37 crc kubenswrapper[4558]: I0120 18:04:37.113881 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="7783d091-b00b-4836-b2e5-b67ed88fba86" containerName="validate-network-edpm-compute-no-nodes-edpm-compute-no-nodes" Jan 20 18:04:37 crc kubenswrapper[4558]: I0120 18:04:37.114042 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="7783d091-b00b-4836-b2e5-b67ed88fba86" containerName="validate-network-edpm-compute-no-nodes-edpm-compute-no-nodes" Jan 20 18:04:37 crc kubenswrapper[4558]: I0120 18:04:37.114647 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/install-os-edpm-compute-no-nodes-edpm-compute-no-nodes-qmkd7" Jan 20 18:04:37 crc kubenswrapper[4558]: I0120 18:04:37.116573 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplane-ansible-ssh-private-key-secret" Jan 20 18:04:37 crc kubenswrapper[4558]: I0120 18:04:37.116772 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-aee-default-env" Jan 20 18:04:37 crc kubenswrapper[4558]: I0120 18:04:37.116804 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplanenodeset-edpm-compute-no-nodes" Jan 20 18:04:37 crc kubenswrapper[4558]: I0120 18:04:37.116919 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"edpm-compute-no-nodes-dockercfg-tljmj" Jan 20 18:04:37 crc kubenswrapper[4558]: I0120 18:04:37.120608 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/install-os-edpm-compute-no-nodes-edpm-compute-no-nodes-qmkd7"] Jan 20 18:04:37 crc kubenswrapper[4558]: I0120 18:04:37.236034 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b03f4166-30cb-4607-96b9-eeeb2c5c48f0-inventory\") pod \"install-os-edpm-compute-no-nodes-edpm-compute-no-nodes-qmkd7\" (UID: \"b03f4166-30cb-4607-96b9-eeeb2c5c48f0\") " pod="openstack-kuttl-tests/install-os-edpm-compute-no-nodes-edpm-compute-no-nodes-qmkd7" Jan 20 18:04:37 crc kubenswrapper[4558]: I0120 18:04:37.236098 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9mdnc\" (UniqueName: \"kubernetes.io/projected/b03f4166-30cb-4607-96b9-eeeb2c5c48f0-kube-api-access-9mdnc\") pod \"install-os-edpm-compute-no-nodes-edpm-compute-no-nodes-qmkd7\" (UID: \"b03f4166-30cb-4607-96b9-eeeb2c5c48f0\") " pod="openstack-kuttl-tests/install-os-edpm-compute-no-nodes-edpm-compute-no-nodes-qmkd7" Jan 20 18:04:37 crc kubenswrapper[4558]: I0120 18:04:37.236194 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/b03f4166-30cb-4607-96b9-eeeb2c5c48f0-ssh-key-edpm-compute-no-nodes\") pod \"install-os-edpm-compute-no-nodes-edpm-compute-no-nodes-qmkd7\" (UID: \"b03f4166-30cb-4607-96b9-eeeb2c5c48f0\") " pod="openstack-kuttl-tests/install-os-edpm-compute-no-nodes-edpm-compute-no-nodes-qmkd7" Jan 20 18:04:37 crc kubenswrapper[4558]: I0120 18:04:37.338063 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/b03f4166-30cb-4607-96b9-eeeb2c5c48f0-ssh-key-edpm-compute-no-nodes\") pod \"install-os-edpm-compute-no-nodes-edpm-compute-no-nodes-qmkd7\" (UID: \"b03f4166-30cb-4607-96b9-eeeb2c5c48f0\") " pod="openstack-kuttl-tests/install-os-edpm-compute-no-nodes-edpm-compute-no-nodes-qmkd7" Jan 20 18:04:37 crc kubenswrapper[4558]: I0120 18:04:37.338273 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b03f4166-30cb-4607-96b9-eeeb2c5c48f0-inventory\") pod \"install-os-edpm-compute-no-nodes-edpm-compute-no-nodes-qmkd7\" (UID: \"b03f4166-30cb-4607-96b9-eeeb2c5c48f0\") " pod="openstack-kuttl-tests/install-os-edpm-compute-no-nodes-edpm-compute-no-nodes-qmkd7" Jan 20 18:04:37 crc kubenswrapper[4558]: I0120 18:04:37.338317 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9mdnc\" (UniqueName: \"kubernetes.io/projected/b03f4166-30cb-4607-96b9-eeeb2c5c48f0-kube-api-access-9mdnc\") pod \"install-os-edpm-compute-no-nodes-edpm-compute-no-nodes-qmkd7\" (UID: \"b03f4166-30cb-4607-96b9-eeeb2c5c48f0\") " pod="openstack-kuttl-tests/install-os-edpm-compute-no-nodes-edpm-compute-no-nodes-qmkd7" Jan 20 18:04:37 crc kubenswrapper[4558]: I0120 18:04:37.342348 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b03f4166-30cb-4607-96b9-eeeb2c5c48f0-inventory\") pod \"install-os-edpm-compute-no-nodes-edpm-compute-no-nodes-qmkd7\" (UID: \"b03f4166-30cb-4607-96b9-eeeb2c5c48f0\") " pod="openstack-kuttl-tests/install-os-edpm-compute-no-nodes-edpm-compute-no-nodes-qmkd7" Jan 20 18:04:37 crc kubenswrapper[4558]: I0120 18:04:37.342578 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/b03f4166-30cb-4607-96b9-eeeb2c5c48f0-ssh-key-edpm-compute-no-nodes\") pod \"install-os-edpm-compute-no-nodes-edpm-compute-no-nodes-qmkd7\" (UID: \"b03f4166-30cb-4607-96b9-eeeb2c5c48f0\") " pod="openstack-kuttl-tests/install-os-edpm-compute-no-nodes-edpm-compute-no-nodes-qmkd7" Jan 20 18:04:37 crc kubenswrapper[4558]: I0120 18:04:37.353900 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9mdnc\" (UniqueName: \"kubernetes.io/projected/b03f4166-30cb-4607-96b9-eeeb2c5c48f0-kube-api-access-9mdnc\") pod \"install-os-edpm-compute-no-nodes-edpm-compute-no-nodes-qmkd7\" (UID: \"b03f4166-30cb-4607-96b9-eeeb2c5c48f0\") " pod="openstack-kuttl-tests/install-os-edpm-compute-no-nodes-edpm-compute-no-nodes-qmkd7" Jan 20 18:04:37 crc kubenswrapper[4558]: I0120 18:04:37.431288 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/install-os-edpm-compute-no-nodes-edpm-compute-no-nodes-qmkd7" Jan 20 18:04:37 crc kubenswrapper[4558]: I0120 18:04:37.806611 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/install-os-edpm-compute-no-nodes-edpm-compute-no-nodes-qmkd7"] Jan 20 18:04:38 crc kubenswrapper[4558]: I0120 18:04:38.080835 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/install-os-edpm-compute-no-nodes-edpm-compute-no-nodes-qmkd7" event={"ID":"b03f4166-30cb-4607-96b9-eeeb2c5c48f0","Type":"ContainerStarted","Data":"a89dc4b182f3aa5cba492e4b9e70beb726ab9583412b97a1d3d8b1192379e03b"} Jan 20 18:04:39 crc kubenswrapper[4558]: I0120 18:04:39.089543 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/install-os-edpm-compute-no-nodes-edpm-compute-no-nodes-qmkd7" event={"ID":"b03f4166-30cb-4607-96b9-eeeb2c5c48f0","Type":"ContainerStarted","Data":"1821ba1d87f40c241193772646ccf4c847153889128ed710a76f71ee8bc72037"} Jan 20 18:04:40 crc kubenswrapper[4558]: I0120 18:04:40.103706 4558 generic.go:334] "Generic (PLEG): container finished" podID="b03f4166-30cb-4607-96b9-eeeb2c5c48f0" containerID="1821ba1d87f40c241193772646ccf4c847153889128ed710a76f71ee8bc72037" exitCode=0 Jan 20 18:04:40 crc kubenswrapper[4558]: I0120 18:04:40.103817 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/install-os-edpm-compute-no-nodes-edpm-compute-no-nodes-qmkd7" event={"ID":"b03f4166-30cb-4607-96b9-eeeb2c5c48f0","Type":"ContainerDied","Data":"1821ba1d87f40c241193772646ccf4c847153889128ed710a76f71ee8bc72037"} Jan 20 18:04:41 crc kubenswrapper[4558]: I0120 18:04:41.331973 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/install-os-edpm-compute-no-nodes-edpm-compute-no-nodes-qmkd7" Jan 20 18:04:41 crc kubenswrapper[4558]: I0120 18:04:41.395226 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9mdnc\" (UniqueName: \"kubernetes.io/projected/b03f4166-30cb-4607-96b9-eeeb2c5c48f0-kube-api-access-9mdnc\") pod \"b03f4166-30cb-4607-96b9-eeeb2c5c48f0\" (UID: \"b03f4166-30cb-4607-96b9-eeeb2c5c48f0\") " Jan 20 18:04:41 crc kubenswrapper[4558]: I0120 18:04:41.395329 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b03f4166-30cb-4607-96b9-eeeb2c5c48f0-inventory\") pod \"b03f4166-30cb-4607-96b9-eeeb2c5c48f0\" (UID: \"b03f4166-30cb-4607-96b9-eeeb2c5c48f0\") " Jan 20 18:04:41 crc kubenswrapper[4558]: I0120 18:04:41.395413 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/b03f4166-30cb-4607-96b9-eeeb2c5c48f0-ssh-key-edpm-compute-no-nodes\") pod \"b03f4166-30cb-4607-96b9-eeeb2c5c48f0\" (UID: \"b03f4166-30cb-4607-96b9-eeeb2c5c48f0\") " Jan 20 18:04:41 crc kubenswrapper[4558]: I0120 18:04:41.409035 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b03f4166-30cb-4607-96b9-eeeb2c5c48f0-kube-api-access-9mdnc" (OuterVolumeSpecName: "kube-api-access-9mdnc") pod "b03f4166-30cb-4607-96b9-eeeb2c5c48f0" (UID: "b03f4166-30cb-4607-96b9-eeeb2c5c48f0"). InnerVolumeSpecName "kube-api-access-9mdnc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:04:41 crc kubenswrapper[4558]: I0120 18:04:41.413777 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b03f4166-30cb-4607-96b9-eeeb2c5c48f0-inventory" (OuterVolumeSpecName: "inventory") pod "b03f4166-30cb-4607-96b9-eeeb2c5c48f0" (UID: "b03f4166-30cb-4607-96b9-eeeb2c5c48f0"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:04:41 crc kubenswrapper[4558]: I0120 18:04:41.414031 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b03f4166-30cb-4607-96b9-eeeb2c5c48f0-ssh-key-edpm-compute-no-nodes" (OuterVolumeSpecName: "ssh-key-edpm-compute-no-nodes") pod "b03f4166-30cb-4607-96b9-eeeb2c5c48f0" (UID: "b03f4166-30cb-4607-96b9-eeeb2c5c48f0"). InnerVolumeSpecName "ssh-key-edpm-compute-no-nodes". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:04:41 crc kubenswrapper[4558]: I0120 18:04:41.497060 4558 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b03f4166-30cb-4607-96b9-eeeb2c5c48f0-inventory\") on node \"crc\" DevicePath \"\"" Jan 20 18:04:41 crc kubenswrapper[4558]: I0120 18:04:41.497133 4558 reconciler_common.go:293] "Volume detached for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/b03f4166-30cb-4607-96b9-eeeb2c5c48f0-ssh-key-edpm-compute-no-nodes\") on node \"crc\" DevicePath \"\"" Jan 20 18:04:41 crc kubenswrapper[4558]: I0120 18:04:41.497179 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9mdnc\" (UniqueName: \"kubernetes.io/projected/b03f4166-30cb-4607-96b9-eeeb2c5c48f0-kube-api-access-9mdnc\") on node \"crc\" DevicePath \"\"" Jan 20 18:04:42 crc kubenswrapper[4558]: I0120 18:04:42.119687 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/install-os-edpm-compute-no-nodes-edpm-compute-no-nodes-qmkd7" event={"ID":"b03f4166-30cb-4607-96b9-eeeb2c5c48f0","Type":"ContainerDied","Data":"a89dc4b182f3aa5cba492e4b9e70beb726ab9583412b97a1d3d8b1192379e03b"} Jan 20 18:04:42 crc kubenswrapper[4558]: I0120 18:04:42.120023 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a89dc4b182f3aa5cba492e4b9e70beb726ab9583412b97a1d3d8b1192379e03b" Jan 20 18:04:42 crc kubenswrapper[4558]: I0120 18:04:42.119754 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/install-os-edpm-compute-no-nodes-edpm-compute-no-nodes-qmkd7" Jan 20 18:04:42 crc kubenswrapper[4558]: I0120 18:04:42.166360 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/configure-os-edpm-compute-no-nodes-edpm-compute-no-nodes-z7vb5"] Jan 20 18:04:42 crc kubenswrapper[4558]: E0120 18:04:42.166728 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b03f4166-30cb-4607-96b9-eeeb2c5c48f0" containerName="install-os-edpm-compute-no-nodes-edpm-compute-no-nodes" Jan 20 18:04:42 crc kubenswrapper[4558]: I0120 18:04:42.166748 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="b03f4166-30cb-4607-96b9-eeeb2c5c48f0" containerName="install-os-edpm-compute-no-nodes-edpm-compute-no-nodes" Jan 20 18:04:42 crc kubenswrapper[4558]: I0120 18:04:42.166905 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="b03f4166-30cb-4607-96b9-eeeb2c5c48f0" containerName="install-os-edpm-compute-no-nodes-edpm-compute-no-nodes" Jan 20 18:04:42 crc kubenswrapper[4558]: I0120 18:04:42.167512 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/configure-os-edpm-compute-no-nodes-edpm-compute-no-nodes-z7vb5" Jan 20 18:04:42 crc kubenswrapper[4558]: I0120 18:04:42.170950 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplane-ansible-ssh-private-key-secret" Jan 20 18:04:42 crc kubenswrapper[4558]: I0120 18:04:42.171035 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-aee-default-env" Jan 20 18:04:42 crc kubenswrapper[4558]: I0120 18:04:42.171403 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"edpm-compute-no-nodes-dockercfg-tljmj" Jan 20 18:04:42 crc kubenswrapper[4558]: I0120 18:04:42.171536 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplanenodeset-edpm-compute-no-nodes" Jan 20 18:04:42 crc kubenswrapper[4558]: I0120 18:04:42.175543 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/configure-os-edpm-compute-no-nodes-edpm-compute-no-nodes-z7vb5"] Jan 20 18:04:42 crc kubenswrapper[4558]: I0120 18:04:42.206302 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/973eb657-4c4d-42a9-a08d-9b29e9868beb-ssh-key-edpm-compute-no-nodes\") pod \"configure-os-edpm-compute-no-nodes-edpm-compute-no-nodes-z7vb5\" (UID: \"973eb657-4c4d-42a9-a08d-9b29e9868beb\") " pod="openstack-kuttl-tests/configure-os-edpm-compute-no-nodes-edpm-compute-no-nodes-z7vb5" Jan 20 18:04:42 crc kubenswrapper[4558]: I0120 18:04:42.206588 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/973eb657-4c4d-42a9-a08d-9b29e9868beb-inventory\") pod \"configure-os-edpm-compute-no-nodes-edpm-compute-no-nodes-z7vb5\" (UID: \"973eb657-4c4d-42a9-a08d-9b29e9868beb\") " pod="openstack-kuttl-tests/configure-os-edpm-compute-no-nodes-edpm-compute-no-nodes-z7vb5" Jan 20 18:04:42 crc kubenswrapper[4558]: I0120 18:04:42.206727 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mkzff\" (UniqueName: \"kubernetes.io/projected/973eb657-4c4d-42a9-a08d-9b29e9868beb-kube-api-access-mkzff\") pod \"configure-os-edpm-compute-no-nodes-edpm-compute-no-nodes-z7vb5\" (UID: \"973eb657-4c4d-42a9-a08d-9b29e9868beb\") " pod="openstack-kuttl-tests/configure-os-edpm-compute-no-nodes-edpm-compute-no-nodes-z7vb5" Jan 20 18:04:42 crc kubenswrapper[4558]: I0120 18:04:42.307881 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/973eb657-4c4d-42a9-a08d-9b29e9868beb-ssh-key-edpm-compute-no-nodes\") pod \"configure-os-edpm-compute-no-nodes-edpm-compute-no-nodes-z7vb5\" (UID: \"973eb657-4c4d-42a9-a08d-9b29e9868beb\") " pod="openstack-kuttl-tests/configure-os-edpm-compute-no-nodes-edpm-compute-no-nodes-z7vb5" Jan 20 18:04:42 crc kubenswrapper[4558]: I0120 18:04:42.307951 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/973eb657-4c4d-42a9-a08d-9b29e9868beb-inventory\") pod \"configure-os-edpm-compute-no-nodes-edpm-compute-no-nodes-z7vb5\" (UID: \"973eb657-4c4d-42a9-a08d-9b29e9868beb\") " pod="openstack-kuttl-tests/configure-os-edpm-compute-no-nodes-edpm-compute-no-nodes-z7vb5" Jan 20 18:04:42 crc kubenswrapper[4558]: I0120 18:04:42.307990 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mkzff\" (UniqueName: \"kubernetes.io/projected/973eb657-4c4d-42a9-a08d-9b29e9868beb-kube-api-access-mkzff\") pod \"configure-os-edpm-compute-no-nodes-edpm-compute-no-nodes-z7vb5\" (UID: \"973eb657-4c4d-42a9-a08d-9b29e9868beb\") " pod="openstack-kuttl-tests/configure-os-edpm-compute-no-nodes-edpm-compute-no-nodes-z7vb5" Jan 20 18:04:42 crc kubenswrapper[4558]: I0120 18:04:42.312322 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/973eb657-4c4d-42a9-a08d-9b29e9868beb-ssh-key-edpm-compute-no-nodes\") pod \"configure-os-edpm-compute-no-nodes-edpm-compute-no-nodes-z7vb5\" (UID: \"973eb657-4c4d-42a9-a08d-9b29e9868beb\") " pod="openstack-kuttl-tests/configure-os-edpm-compute-no-nodes-edpm-compute-no-nodes-z7vb5" Jan 20 18:04:42 crc kubenswrapper[4558]: I0120 18:04:42.312819 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/973eb657-4c4d-42a9-a08d-9b29e9868beb-inventory\") pod \"configure-os-edpm-compute-no-nodes-edpm-compute-no-nodes-z7vb5\" (UID: \"973eb657-4c4d-42a9-a08d-9b29e9868beb\") " pod="openstack-kuttl-tests/configure-os-edpm-compute-no-nodes-edpm-compute-no-nodes-z7vb5" Jan 20 18:04:42 crc kubenswrapper[4558]: I0120 18:04:42.324610 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mkzff\" (UniqueName: \"kubernetes.io/projected/973eb657-4c4d-42a9-a08d-9b29e9868beb-kube-api-access-mkzff\") pod \"configure-os-edpm-compute-no-nodes-edpm-compute-no-nodes-z7vb5\" (UID: \"973eb657-4c4d-42a9-a08d-9b29e9868beb\") " pod="openstack-kuttl-tests/configure-os-edpm-compute-no-nodes-edpm-compute-no-nodes-z7vb5" Jan 20 18:04:42 crc kubenswrapper[4558]: I0120 18:04:42.481417 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/configure-os-edpm-compute-no-nodes-edpm-compute-no-nodes-z7vb5" Jan 20 18:04:42 crc kubenswrapper[4558]: I0120 18:04:42.867910 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/configure-os-edpm-compute-no-nodes-edpm-compute-no-nodes-z7vb5"] Jan 20 18:04:42 crc kubenswrapper[4558]: W0120 18:04:42.870901 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod973eb657_4c4d_42a9_a08d_9b29e9868beb.slice/crio-f263a73b9c0cf5a510d20fad40d83b5f03e28759676b4ffd3495a19fd85b5d11 WatchSource:0}: Error finding container f263a73b9c0cf5a510d20fad40d83b5f03e28759676b4ffd3495a19fd85b5d11: Status 404 returned error can't find the container with id f263a73b9c0cf5a510d20fad40d83b5f03e28759676b4ffd3495a19fd85b5d11 Jan 20 18:04:43 crc kubenswrapper[4558]: I0120 18:04:43.129624 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/configure-os-edpm-compute-no-nodes-edpm-compute-no-nodes-z7vb5" event={"ID":"973eb657-4c4d-42a9-a08d-9b29e9868beb","Type":"ContainerStarted","Data":"f263a73b9c0cf5a510d20fad40d83b5f03e28759676b4ffd3495a19fd85b5d11"} Jan 20 18:04:44 crc kubenswrapper[4558]: I0120 18:04:44.149886 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/configure-os-edpm-compute-no-nodes-edpm-compute-no-nodes-z7vb5" event={"ID":"973eb657-4c4d-42a9-a08d-9b29e9868beb","Type":"ContainerStarted","Data":"b4696041322202470e8a3951ad333ac41c5429c650b672e321ea7213c3833688"} Jan 20 18:04:44 crc kubenswrapper[4558]: I0120 18:04:44.164265 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/configure-os-edpm-compute-no-nodes-edpm-compute-no-nodes-z7vb5" podStartSLOduration=1.560059911 podStartE2EDuration="2.164252252s" podCreationTimestamp="2026-01-20 18:04:42 +0000 UTC" firstStartedPulling="2026-01-20 18:04:42.873216218 +0000 UTC m=+4976.633554185" lastFinishedPulling="2026-01-20 18:04:43.477408559 +0000 UTC m=+4977.237746526" observedRunningTime="2026-01-20 18:04:44.162178051 +0000 UTC m=+4977.922516009" watchObservedRunningTime="2026-01-20 18:04:44.164252252 +0000 UTC m=+4977.924590218" Jan 20 18:04:45 crc kubenswrapper[4558]: I0120 18:04:45.161873 4558 generic.go:334] "Generic (PLEG): container finished" podID="973eb657-4c4d-42a9-a08d-9b29e9868beb" containerID="b4696041322202470e8a3951ad333ac41c5429c650b672e321ea7213c3833688" exitCode=0 Jan 20 18:04:45 crc kubenswrapper[4558]: I0120 18:04:45.161934 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/configure-os-edpm-compute-no-nodes-edpm-compute-no-nodes-z7vb5" event={"ID":"973eb657-4c4d-42a9-a08d-9b29e9868beb","Type":"ContainerDied","Data":"b4696041322202470e8a3951ad333ac41c5429c650b672e321ea7213c3833688"} Jan 20 18:04:46 crc kubenswrapper[4558]: I0120 18:04:46.417393 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/configure-os-edpm-compute-no-nodes-edpm-compute-no-nodes-z7vb5" Jan 20 18:04:46 crc kubenswrapper[4558]: I0120 18:04:46.463817 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/973eb657-4c4d-42a9-a08d-9b29e9868beb-inventory\") pod \"973eb657-4c4d-42a9-a08d-9b29e9868beb\" (UID: \"973eb657-4c4d-42a9-a08d-9b29e9868beb\") " Jan 20 18:04:46 crc kubenswrapper[4558]: I0120 18:04:46.463910 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/973eb657-4c4d-42a9-a08d-9b29e9868beb-ssh-key-edpm-compute-no-nodes\") pod \"973eb657-4c4d-42a9-a08d-9b29e9868beb\" (UID: \"973eb657-4c4d-42a9-a08d-9b29e9868beb\") " Jan 20 18:04:46 crc kubenswrapper[4558]: I0120 18:04:46.464012 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mkzff\" (UniqueName: \"kubernetes.io/projected/973eb657-4c4d-42a9-a08d-9b29e9868beb-kube-api-access-mkzff\") pod \"973eb657-4c4d-42a9-a08d-9b29e9868beb\" (UID: \"973eb657-4c4d-42a9-a08d-9b29e9868beb\") " Jan 20 18:04:46 crc kubenswrapper[4558]: I0120 18:04:46.469503 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/973eb657-4c4d-42a9-a08d-9b29e9868beb-kube-api-access-mkzff" (OuterVolumeSpecName: "kube-api-access-mkzff") pod "973eb657-4c4d-42a9-a08d-9b29e9868beb" (UID: "973eb657-4c4d-42a9-a08d-9b29e9868beb"). InnerVolumeSpecName "kube-api-access-mkzff". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:04:46 crc kubenswrapper[4558]: I0120 18:04:46.482056 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/973eb657-4c4d-42a9-a08d-9b29e9868beb-ssh-key-edpm-compute-no-nodes" (OuterVolumeSpecName: "ssh-key-edpm-compute-no-nodes") pod "973eb657-4c4d-42a9-a08d-9b29e9868beb" (UID: "973eb657-4c4d-42a9-a08d-9b29e9868beb"). InnerVolumeSpecName "ssh-key-edpm-compute-no-nodes". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:04:46 crc kubenswrapper[4558]: I0120 18:04:46.483603 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/973eb657-4c4d-42a9-a08d-9b29e9868beb-inventory" (OuterVolumeSpecName: "inventory") pod "973eb657-4c4d-42a9-a08d-9b29e9868beb" (UID: "973eb657-4c4d-42a9-a08d-9b29e9868beb"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:04:46 crc kubenswrapper[4558]: I0120 18:04:46.567199 4558 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/973eb657-4c4d-42a9-a08d-9b29e9868beb-inventory\") on node \"crc\" DevicePath \"\"" Jan 20 18:04:46 crc kubenswrapper[4558]: I0120 18:04:46.567243 4558 reconciler_common.go:293] "Volume detached for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/973eb657-4c4d-42a9-a08d-9b29e9868beb-ssh-key-edpm-compute-no-nodes\") on node \"crc\" DevicePath \"\"" Jan 20 18:04:46 crc kubenswrapper[4558]: I0120 18:04:46.567264 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mkzff\" (UniqueName: \"kubernetes.io/projected/973eb657-4c4d-42a9-a08d-9b29e9868beb-kube-api-access-mkzff\") on node \"crc\" DevicePath \"\"" Jan 20 18:04:47 crc kubenswrapper[4558]: I0120 18:04:47.183019 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/configure-os-edpm-compute-no-nodes-edpm-compute-no-nodes-z7vb5" event={"ID":"973eb657-4c4d-42a9-a08d-9b29e9868beb","Type":"ContainerDied","Data":"f263a73b9c0cf5a510d20fad40d83b5f03e28759676b4ffd3495a19fd85b5d11"} Jan 20 18:04:47 crc kubenswrapper[4558]: I0120 18:04:47.183549 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f263a73b9c0cf5a510d20fad40d83b5f03e28759676b4ffd3495a19fd85b5d11" Jan 20 18:04:47 crc kubenswrapper[4558]: I0120 18:04:47.183307 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/configure-os-edpm-compute-no-nodes-edpm-compute-no-nodes-z7vb5" Jan 20 18:04:47 crc kubenswrapper[4558]: I0120 18:04:47.220290 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/run-os-edpm-compute-no-nodes-edpm-compute-no-nodes-lvhc9"] Jan 20 18:04:47 crc kubenswrapper[4558]: E0120 18:04:47.220590 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="973eb657-4c4d-42a9-a08d-9b29e9868beb" containerName="configure-os-edpm-compute-no-nodes-edpm-compute-no-nodes" Jan 20 18:04:47 crc kubenswrapper[4558]: I0120 18:04:47.220609 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="973eb657-4c4d-42a9-a08d-9b29e9868beb" containerName="configure-os-edpm-compute-no-nodes-edpm-compute-no-nodes" Jan 20 18:04:47 crc kubenswrapper[4558]: I0120 18:04:47.220743 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="973eb657-4c4d-42a9-a08d-9b29e9868beb" containerName="configure-os-edpm-compute-no-nodes-edpm-compute-no-nodes" Jan 20 18:04:47 crc kubenswrapper[4558]: I0120 18:04:47.221209 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/run-os-edpm-compute-no-nodes-edpm-compute-no-nodes-lvhc9" Jan 20 18:04:47 crc kubenswrapper[4558]: I0120 18:04:47.222962 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-aee-default-env" Jan 20 18:04:47 crc kubenswrapper[4558]: I0120 18:04:47.223235 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"edpm-compute-no-nodes-dockercfg-tljmj" Jan 20 18:04:47 crc kubenswrapper[4558]: I0120 18:04:47.226144 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplanenodeset-edpm-compute-no-nodes" Jan 20 18:04:47 crc kubenswrapper[4558]: I0120 18:04:47.226507 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplane-ansible-ssh-private-key-secret" Jan 20 18:04:47 crc kubenswrapper[4558]: I0120 18:04:47.234976 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/run-os-edpm-compute-no-nodes-edpm-compute-no-nodes-lvhc9"] Jan 20 18:04:47 crc kubenswrapper[4558]: I0120 18:04:47.275738 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/98619227-1a55-4062-9c6f-f0157fe8bde4-inventory\") pod \"run-os-edpm-compute-no-nodes-edpm-compute-no-nodes-lvhc9\" (UID: \"98619227-1a55-4062-9c6f-f0157fe8bde4\") " pod="openstack-kuttl-tests/run-os-edpm-compute-no-nodes-edpm-compute-no-nodes-lvhc9" Jan 20 18:04:47 crc kubenswrapper[4558]: I0120 18:04:47.275929 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-52grs\" (UniqueName: \"kubernetes.io/projected/98619227-1a55-4062-9c6f-f0157fe8bde4-kube-api-access-52grs\") pod \"run-os-edpm-compute-no-nodes-edpm-compute-no-nodes-lvhc9\" (UID: \"98619227-1a55-4062-9c6f-f0157fe8bde4\") " pod="openstack-kuttl-tests/run-os-edpm-compute-no-nodes-edpm-compute-no-nodes-lvhc9" Jan 20 18:04:47 crc kubenswrapper[4558]: I0120 18:04:47.275979 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/98619227-1a55-4062-9c6f-f0157fe8bde4-ssh-key-edpm-compute-no-nodes\") pod \"run-os-edpm-compute-no-nodes-edpm-compute-no-nodes-lvhc9\" (UID: \"98619227-1a55-4062-9c6f-f0157fe8bde4\") " pod="openstack-kuttl-tests/run-os-edpm-compute-no-nodes-edpm-compute-no-nodes-lvhc9" Jan 20 18:04:47 crc kubenswrapper[4558]: I0120 18:04:47.377747 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-52grs\" (UniqueName: \"kubernetes.io/projected/98619227-1a55-4062-9c6f-f0157fe8bde4-kube-api-access-52grs\") pod \"run-os-edpm-compute-no-nodes-edpm-compute-no-nodes-lvhc9\" (UID: \"98619227-1a55-4062-9c6f-f0157fe8bde4\") " pod="openstack-kuttl-tests/run-os-edpm-compute-no-nodes-edpm-compute-no-nodes-lvhc9" Jan 20 18:04:47 crc kubenswrapper[4558]: I0120 18:04:47.377820 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/98619227-1a55-4062-9c6f-f0157fe8bde4-ssh-key-edpm-compute-no-nodes\") pod \"run-os-edpm-compute-no-nodes-edpm-compute-no-nodes-lvhc9\" (UID: \"98619227-1a55-4062-9c6f-f0157fe8bde4\") " pod="openstack-kuttl-tests/run-os-edpm-compute-no-nodes-edpm-compute-no-nodes-lvhc9" Jan 20 18:04:47 crc kubenswrapper[4558]: I0120 18:04:47.377905 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/98619227-1a55-4062-9c6f-f0157fe8bde4-inventory\") pod \"run-os-edpm-compute-no-nodes-edpm-compute-no-nodes-lvhc9\" (UID: \"98619227-1a55-4062-9c6f-f0157fe8bde4\") " pod="openstack-kuttl-tests/run-os-edpm-compute-no-nodes-edpm-compute-no-nodes-lvhc9" Jan 20 18:04:47 crc kubenswrapper[4558]: I0120 18:04:47.382826 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/98619227-1a55-4062-9c6f-f0157fe8bde4-ssh-key-edpm-compute-no-nodes\") pod \"run-os-edpm-compute-no-nodes-edpm-compute-no-nodes-lvhc9\" (UID: \"98619227-1a55-4062-9c6f-f0157fe8bde4\") " pod="openstack-kuttl-tests/run-os-edpm-compute-no-nodes-edpm-compute-no-nodes-lvhc9" Jan 20 18:04:47 crc kubenswrapper[4558]: I0120 18:04:47.382979 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/98619227-1a55-4062-9c6f-f0157fe8bde4-inventory\") pod \"run-os-edpm-compute-no-nodes-edpm-compute-no-nodes-lvhc9\" (UID: \"98619227-1a55-4062-9c6f-f0157fe8bde4\") " pod="openstack-kuttl-tests/run-os-edpm-compute-no-nodes-edpm-compute-no-nodes-lvhc9" Jan 20 18:04:47 crc kubenswrapper[4558]: I0120 18:04:47.393274 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-52grs\" (UniqueName: \"kubernetes.io/projected/98619227-1a55-4062-9c6f-f0157fe8bde4-kube-api-access-52grs\") pod \"run-os-edpm-compute-no-nodes-edpm-compute-no-nodes-lvhc9\" (UID: \"98619227-1a55-4062-9c6f-f0157fe8bde4\") " pod="openstack-kuttl-tests/run-os-edpm-compute-no-nodes-edpm-compute-no-nodes-lvhc9" Jan 20 18:04:47 crc kubenswrapper[4558]: I0120 18:04:47.534665 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/run-os-edpm-compute-no-nodes-edpm-compute-no-nodes-lvhc9" Jan 20 18:04:47 crc kubenswrapper[4558]: I0120 18:04:47.732290 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/run-os-edpm-compute-no-nodes-edpm-compute-no-nodes-lvhc9"] Jan 20 18:04:47 crc kubenswrapper[4558]: W0120 18:04:47.737769 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod98619227_1a55_4062_9c6f_f0157fe8bde4.slice/crio-c3bbd78d214e730081df072b6dcf9ad04c5a12398b98c4da25e753c7255e3dd5 WatchSource:0}: Error finding container c3bbd78d214e730081df072b6dcf9ad04c5a12398b98c4da25e753c7255e3dd5: Status 404 returned error can't find the container with id c3bbd78d214e730081df072b6dcf9ad04c5a12398b98c4da25e753c7255e3dd5 Jan 20 18:04:48 crc kubenswrapper[4558]: I0120 18:04:48.191377 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/run-os-edpm-compute-no-nodes-edpm-compute-no-nodes-lvhc9" event={"ID":"98619227-1a55-4062-9c6f-f0157fe8bde4","Type":"ContainerStarted","Data":"c3bbd78d214e730081df072b6dcf9ad04c5a12398b98c4da25e753c7255e3dd5"} Jan 20 18:04:48 crc kubenswrapper[4558]: I0120 18:04:48.566320 4558 scope.go:117] "RemoveContainer" containerID="6b02d31ec11fe5435af988e35303a6f66b5afa5ef952ce5f7cc4b3cd5f4d9497" Jan 20 18:04:48 crc kubenswrapper[4558]: E0120 18:04:48.566922 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 18:04:49 crc kubenswrapper[4558]: I0120 18:04:49.202690 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/run-os-edpm-compute-no-nodes-edpm-compute-no-nodes-lvhc9" event={"ID":"98619227-1a55-4062-9c6f-f0157fe8bde4","Type":"ContainerStarted","Data":"895d0ce06ac44b93eb9c9c2c176abc2e5341b42281a2cbfffa4e072ddbe5ca99"} Jan 20 18:04:49 crc kubenswrapper[4558]: I0120 18:04:49.217515 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/run-os-edpm-compute-no-nodes-edpm-compute-no-nodes-lvhc9" podStartSLOduration=1.589733754 podStartE2EDuration="2.217499549s" podCreationTimestamp="2026-01-20 18:04:47 +0000 UTC" firstStartedPulling="2026-01-20 18:04:47.7397891 +0000 UTC m=+4981.500127067" lastFinishedPulling="2026-01-20 18:04:48.367554905 +0000 UTC m=+4982.127892862" observedRunningTime="2026-01-20 18:04:49.21538845 +0000 UTC m=+4982.975726417" watchObservedRunningTime="2026-01-20 18:04:49.217499549 +0000 UTC m=+4982.977837516" Jan 20 18:04:50 crc kubenswrapper[4558]: I0120 18:04:50.214802 4558 generic.go:334] "Generic (PLEG): container finished" podID="98619227-1a55-4062-9c6f-f0157fe8bde4" containerID="895d0ce06ac44b93eb9c9c2c176abc2e5341b42281a2cbfffa4e072ddbe5ca99" exitCode=0 Jan 20 18:04:50 crc kubenswrapper[4558]: I0120 18:04:50.214897 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/run-os-edpm-compute-no-nodes-edpm-compute-no-nodes-lvhc9" event={"ID":"98619227-1a55-4062-9c6f-f0157fe8bde4","Type":"ContainerDied","Data":"895d0ce06ac44b93eb9c9c2c176abc2e5341b42281a2cbfffa4e072ddbe5ca99"} Jan 20 18:04:51 crc kubenswrapper[4558]: I0120 18:04:51.461357 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/run-os-edpm-compute-no-nodes-edpm-compute-no-nodes-lvhc9" Jan 20 18:04:51 crc kubenswrapper[4558]: I0120 18:04:51.646017 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/98619227-1a55-4062-9c6f-f0157fe8bde4-inventory\") pod \"98619227-1a55-4062-9c6f-f0157fe8bde4\" (UID: \"98619227-1a55-4062-9c6f-f0157fe8bde4\") " Jan 20 18:04:51 crc kubenswrapper[4558]: I0120 18:04:51.646237 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/98619227-1a55-4062-9c6f-f0157fe8bde4-ssh-key-edpm-compute-no-nodes\") pod \"98619227-1a55-4062-9c6f-f0157fe8bde4\" (UID: \"98619227-1a55-4062-9c6f-f0157fe8bde4\") " Jan 20 18:04:51 crc kubenswrapper[4558]: I0120 18:04:51.646415 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-52grs\" (UniqueName: \"kubernetes.io/projected/98619227-1a55-4062-9c6f-f0157fe8bde4-kube-api-access-52grs\") pod \"98619227-1a55-4062-9c6f-f0157fe8bde4\" (UID: \"98619227-1a55-4062-9c6f-f0157fe8bde4\") " Jan 20 18:04:51 crc kubenswrapper[4558]: I0120 18:04:51.652581 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/98619227-1a55-4062-9c6f-f0157fe8bde4-kube-api-access-52grs" (OuterVolumeSpecName: "kube-api-access-52grs") pod "98619227-1a55-4062-9c6f-f0157fe8bde4" (UID: "98619227-1a55-4062-9c6f-f0157fe8bde4"). InnerVolumeSpecName "kube-api-access-52grs". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:04:51 crc kubenswrapper[4558]: I0120 18:04:51.666877 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/98619227-1a55-4062-9c6f-f0157fe8bde4-inventory" (OuterVolumeSpecName: "inventory") pod "98619227-1a55-4062-9c6f-f0157fe8bde4" (UID: "98619227-1a55-4062-9c6f-f0157fe8bde4"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:04:51 crc kubenswrapper[4558]: I0120 18:04:51.668154 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/98619227-1a55-4062-9c6f-f0157fe8bde4-ssh-key-edpm-compute-no-nodes" (OuterVolumeSpecName: "ssh-key-edpm-compute-no-nodes") pod "98619227-1a55-4062-9c6f-f0157fe8bde4" (UID: "98619227-1a55-4062-9c6f-f0157fe8bde4"). InnerVolumeSpecName "ssh-key-edpm-compute-no-nodes". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:04:51 crc kubenswrapper[4558]: I0120 18:04:51.748446 4558 reconciler_common.go:293] "Volume detached for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/98619227-1a55-4062-9c6f-f0157fe8bde4-ssh-key-edpm-compute-no-nodes\") on node \"crc\" DevicePath \"\"" Jan 20 18:04:51 crc kubenswrapper[4558]: I0120 18:04:51.748485 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-52grs\" (UniqueName: \"kubernetes.io/projected/98619227-1a55-4062-9c6f-f0157fe8bde4-kube-api-access-52grs\") on node \"crc\" DevicePath \"\"" Jan 20 18:04:51 crc kubenswrapper[4558]: I0120 18:04:51.748499 4558 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/98619227-1a55-4062-9c6f-f0157fe8bde4-inventory\") on node \"crc\" DevicePath \"\"" Jan 20 18:04:52 crc kubenswrapper[4558]: I0120 18:04:52.236639 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/run-os-edpm-compute-no-nodes-edpm-compute-no-nodes-lvhc9" event={"ID":"98619227-1a55-4062-9c6f-f0157fe8bde4","Type":"ContainerDied","Data":"c3bbd78d214e730081df072b6dcf9ad04c5a12398b98c4da25e753c7255e3dd5"} Jan 20 18:04:52 crc kubenswrapper[4558]: I0120 18:04:52.236690 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c3bbd78d214e730081df072b6dcf9ad04c5a12398b98c4da25e753c7255e3dd5" Jan 20 18:04:52 crc kubenswrapper[4558]: I0120 18:04:52.236725 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/run-os-edpm-compute-no-nodes-edpm-compute-no-nodes-lvhc9" Jan 20 18:04:52 crc kubenswrapper[4558]: I0120 18:04:52.283274 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/install-certs-edpm-compute-no-nodes-edpm-compute-no-nodes-rjzbd"] Jan 20 18:04:52 crc kubenswrapper[4558]: E0120 18:04:52.283903 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="98619227-1a55-4062-9c6f-f0157fe8bde4" containerName="run-os-edpm-compute-no-nodes-edpm-compute-no-nodes" Jan 20 18:04:52 crc kubenswrapper[4558]: I0120 18:04:52.283995 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="98619227-1a55-4062-9c6f-f0157fe8bde4" containerName="run-os-edpm-compute-no-nodes-edpm-compute-no-nodes" Jan 20 18:04:52 crc kubenswrapper[4558]: I0120 18:04:52.284231 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="98619227-1a55-4062-9c6f-f0157fe8bde4" containerName="run-os-edpm-compute-no-nodes-edpm-compute-no-nodes" Jan 20 18:04:52 crc kubenswrapper[4558]: I0120 18:04:52.284821 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/install-certs-edpm-compute-no-nodes-edpm-compute-no-nodes-rjzbd" Jan 20 18:04:52 crc kubenswrapper[4558]: I0120 18:04:52.286537 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplanenodeset-edpm-compute-no-nodes" Jan 20 18:04:52 crc kubenswrapper[4558]: I0120 18:04:52.289234 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-aee-default-env" Jan 20 18:04:52 crc kubenswrapper[4558]: I0120 18:04:52.289564 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"combined-ca-bundle" Jan 20 18:04:52 crc kubenswrapper[4558]: I0120 18:04:52.289703 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"edpm-compute-no-nodes-dockercfg-tljmj" Jan 20 18:04:52 crc kubenswrapper[4558]: I0120 18:04:52.289833 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplane-ansible-ssh-private-key-secret" Jan 20 18:04:52 crc kubenswrapper[4558]: I0120 18:04:52.295773 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/install-certs-edpm-compute-no-nodes-edpm-compute-no-nodes-rjzbd"] Jan 20 18:04:52 crc kubenswrapper[4558]: I0120 18:04:52.356951 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/224d64cb-74ea-4d58-8e45-c537cd02101e-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-compute-no-nodes-edpm-compute-no-nodes-rjzbd\" (UID: \"224d64cb-74ea-4d58-8e45-c537cd02101e\") " pod="openstack-kuttl-tests/install-certs-edpm-compute-no-nodes-edpm-compute-no-nodes-rjzbd" Jan 20 18:04:52 crc kubenswrapper[4558]: I0120 18:04:52.357002 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/224d64cb-74ea-4d58-8e45-c537cd02101e-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-compute-no-nodes-edpm-compute-no-nodes-rjzbd\" (UID: \"224d64cb-74ea-4d58-8e45-c537cd02101e\") " pod="openstack-kuttl-tests/install-certs-edpm-compute-no-nodes-edpm-compute-no-nodes-rjzbd" Jan 20 18:04:52 crc kubenswrapper[4558]: I0120 18:04:52.357119 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/224d64cb-74ea-4d58-8e45-c537cd02101e-nova-combined-ca-bundle\") pod \"install-certs-edpm-compute-no-nodes-edpm-compute-no-nodes-rjzbd\" (UID: \"224d64cb-74ea-4d58-8e45-c537cd02101e\") " pod="openstack-kuttl-tests/install-certs-edpm-compute-no-nodes-edpm-compute-no-nodes-rjzbd" Jan 20 18:04:52 crc kubenswrapper[4558]: I0120 18:04:52.357233 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/224d64cb-74ea-4d58-8e45-c537cd02101e-neutron-dhcp-combined-ca-bundle\") pod \"install-certs-edpm-compute-no-nodes-edpm-compute-no-nodes-rjzbd\" (UID: \"224d64cb-74ea-4d58-8e45-c537cd02101e\") " pod="openstack-kuttl-tests/install-certs-edpm-compute-no-nodes-edpm-compute-no-nodes-rjzbd" Jan 20 18:04:52 crc kubenswrapper[4558]: I0120 18:04:52.357420 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/224d64cb-74ea-4d58-8e45-c537cd02101e-ovn-combined-ca-bundle\") pod \"install-certs-edpm-compute-no-nodes-edpm-compute-no-nodes-rjzbd\" (UID: \"224d64cb-74ea-4d58-8e45-c537cd02101e\") " pod="openstack-kuttl-tests/install-certs-edpm-compute-no-nodes-edpm-compute-no-nodes-rjzbd" Jan 20 18:04:52 crc kubenswrapper[4558]: I0120 18:04:52.357504 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/224d64cb-74ea-4d58-8e45-c537cd02101e-neutron-sriov-combined-ca-bundle\") pod \"install-certs-edpm-compute-no-nodes-edpm-compute-no-nodes-rjzbd\" (UID: \"224d64cb-74ea-4d58-8e45-c537cd02101e\") " pod="openstack-kuttl-tests/install-certs-edpm-compute-no-nodes-edpm-compute-no-nodes-rjzbd" Jan 20 18:04:52 crc kubenswrapper[4558]: I0120 18:04:52.357609 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/224d64cb-74ea-4d58-8e45-c537cd02101e-neutron-ovn-combined-ca-bundle\") pod \"install-certs-edpm-compute-no-nodes-edpm-compute-no-nodes-rjzbd\" (UID: \"224d64cb-74ea-4d58-8e45-c537cd02101e\") " pod="openstack-kuttl-tests/install-certs-edpm-compute-no-nodes-edpm-compute-no-nodes-rjzbd" Jan 20 18:04:52 crc kubenswrapper[4558]: I0120 18:04:52.357678 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/224d64cb-74ea-4d58-8e45-c537cd02101e-inventory\") pod \"install-certs-edpm-compute-no-nodes-edpm-compute-no-nodes-rjzbd\" (UID: \"224d64cb-74ea-4d58-8e45-c537cd02101e\") " pod="openstack-kuttl-tests/install-certs-edpm-compute-no-nodes-edpm-compute-no-nodes-rjzbd" Jan 20 18:04:52 crc kubenswrapper[4558]: I0120 18:04:52.357727 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/224d64cb-74ea-4d58-8e45-c537cd02101e-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-compute-no-nodes-edpm-compute-no-nodes-rjzbd\" (UID: \"224d64cb-74ea-4d58-8e45-c537cd02101e\") " pod="openstack-kuttl-tests/install-certs-edpm-compute-no-nodes-edpm-compute-no-nodes-rjzbd" Jan 20 18:04:52 crc kubenswrapper[4558]: I0120 18:04:52.357755 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pmbzv\" (UniqueName: \"kubernetes.io/projected/224d64cb-74ea-4d58-8e45-c537cd02101e-kube-api-access-pmbzv\") pod \"install-certs-edpm-compute-no-nodes-edpm-compute-no-nodes-rjzbd\" (UID: \"224d64cb-74ea-4d58-8e45-c537cd02101e\") " pod="openstack-kuttl-tests/install-certs-edpm-compute-no-nodes-edpm-compute-no-nodes-rjzbd" Jan 20 18:04:52 crc kubenswrapper[4558]: I0120 18:04:52.357851 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/224d64cb-74ea-4d58-8e45-c537cd02101e-ssh-key-edpm-compute-no-nodes\") pod \"install-certs-edpm-compute-no-nodes-edpm-compute-no-nodes-rjzbd\" (UID: \"224d64cb-74ea-4d58-8e45-c537cd02101e\") " pod="openstack-kuttl-tests/install-certs-edpm-compute-no-nodes-edpm-compute-no-nodes-rjzbd" Jan 20 18:04:52 crc kubenswrapper[4558]: I0120 18:04:52.458547 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/224d64cb-74ea-4d58-8e45-c537cd02101e-ovn-combined-ca-bundle\") pod \"install-certs-edpm-compute-no-nodes-edpm-compute-no-nodes-rjzbd\" (UID: \"224d64cb-74ea-4d58-8e45-c537cd02101e\") " pod="openstack-kuttl-tests/install-certs-edpm-compute-no-nodes-edpm-compute-no-nodes-rjzbd" Jan 20 18:04:52 crc kubenswrapper[4558]: I0120 18:04:52.458594 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/224d64cb-74ea-4d58-8e45-c537cd02101e-neutron-sriov-combined-ca-bundle\") pod \"install-certs-edpm-compute-no-nodes-edpm-compute-no-nodes-rjzbd\" (UID: \"224d64cb-74ea-4d58-8e45-c537cd02101e\") " pod="openstack-kuttl-tests/install-certs-edpm-compute-no-nodes-edpm-compute-no-nodes-rjzbd" Jan 20 18:04:52 crc kubenswrapper[4558]: I0120 18:04:52.458628 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/224d64cb-74ea-4d58-8e45-c537cd02101e-neutron-ovn-combined-ca-bundle\") pod \"install-certs-edpm-compute-no-nodes-edpm-compute-no-nodes-rjzbd\" (UID: \"224d64cb-74ea-4d58-8e45-c537cd02101e\") " pod="openstack-kuttl-tests/install-certs-edpm-compute-no-nodes-edpm-compute-no-nodes-rjzbd" Jan 20 18:04:52 crc kubenswrapper[4558]: I0120 18:04:52.458652 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/224d64cb-74ea-4d58-8e45-c537cd02101e-inventory\") pod \"install-certs-edpm-compute-no-nodes-edpm-compute-no-nodes-rjzbd\" (UID: \"224d64cb-74ea-4d58-8e45-c537cd02101e\") " pod="openstack-kuttl-tests/install-certs-edpm-compute-no-nodes-edpm-compute-no-nodes-rjzbd" Jan 20 18:04:52 crc kubenswrapper[4558]: I0120 18:04:52.458678 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/224d64cb-74ea-4d58-8e45-c537cd02101e-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-compute-no-nodes-edpm-compute-no-nodes-rjzbd\" (UID: \"224d64cb-74ea-4d58-8e45-c537cd02101e\") " pod="openstack-kuttl-tests/install-certs-edpm-compute-no-nodes-edpm-compute-no-nodes-rjzbd" Jan 20 18:04:52 crc kubenswrapper[4558]: I0120 18:04:52.458696 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pmbzv\" (UniqueName: \"kubernetes.io/projected/224d64cb-74ea-4d58-8e45-c537cd02101e-kube-api-access-pmbzv\") pod \"install-certs-edpm-compute-no-nodes-edpm-compute-no-nodes-rjzbd\" (UID: \"224d64cb-74ea-4d58-8e45-c537cd02101e\") " pod="openstack-kuttl-tests/install-certs-edpm-compute-no-nodes-edpm-compute-no-nodes-rjzbd" Jan 20 18:04:52 crc kubenswrapper[4558]: I0120 18:04:52.458714 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/224d64cb-74ea-4d58-8e45-c537cd02101e-ssh-key-edpm-compute-no-nodes\") pod \"install-certs-edpm-compute-no-nodes-edpm-compute-no-nodes-rjzbd\" (UID: \"224d64cb-74ea-4d58-8e45-c537cd02101e\") " pod="openstack-kuttl-tests/install-certs-edpm-compute-no-nodes-edpm-compute-no-nodes-rjzbd" Jan 20 18:04:52 crc kubenswrapper[4558]: I0120 18:04:52.458732 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/224d64cb-74ea-4d58-8e45-c537cd02101e-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-compute-no-nodes-edpm-compute-no-nodes-rjzbd\" (UID: \"224d64cb-74ea-4d58-8e45-c537cd02101e\") " pod="openstack-kuttl-tests/install-certs-edpm-compute-no-nodes-edpm-compute-no-nodes-rjzbd" Jan 20 18:04:52 crc kubenswrapper[4558]: I0120 18:04:52.458748 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/224d64cb-74ea-4d58-8e45-c537cd02101e-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-compute-no-nodes-edpm-compute-no-nodes-rjzbd\" (UID: \"224d64cb-74ea-4d58-8e45-c537cd02101e\") " pod="openstack-kuttl-tests/install-certs-edpm-compute-no-nodes-edpm-compute-no-nodes-rjzbd" Jan 20 18:04:52 crc kubenswrapper[4558]: I0120 18:04:52.458774 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/224d64cb-74ea-4d58-8e45-c537cd02101e-nova-combined-ca-bundle\") pod \"install-certs-edpm-compute-no-nodes-edpm-compute-no-nodes-rjzbd\" (UID: \"224d64cb-74ea-4d58-8e45-c537cd02101e\") " pod="openstack-kuttl-tests/install-certs-edpm-compute-no-nodes-edpm-compute-no-nodes-rjzbd" Jan 20 18:04:52 crc kubenswrapper[4558]: I0120 18:04:52.458802 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/224d64cb-74ea-4d58-8e45-c537cd02101e-neutron-dhcp-combined-ca-bundle\") pod \"install-certs-edpm-compute-no-nodes-edpm-compute-no-nodes-rjzbd\" (UID: \"224d64cb-74ea-4d58-8e45-c537cd02101e\") " pod="openstack-kuttl-tests/install-certs-edpm-compute-no-nodes-edpm-compute-no-nodes-rjzbd" Jan 20 18:04:52 crc kubenswrapper[4558]: I0120 18:04:52.462000 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/224d64cb-74ea-4d58-8e45-c537cd02101e-neutron-dhcp-combined-ca-bundle\") pod \"install-certs-edpm-compute-no-nodes-edpm-compute-no-nodes-rjzbd\" (UID: \"224d64cb-74ea-4d58-8e45-c537cd02101e\") " pod="openstack-kuttl-tests/install-certs-edpm-compute-no-nodes-edpm-compute-no-nodes-rjzbd" Jan 20 18:04:52 crc kubenswrapper[4558]: I0120 18:04:52.463210 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/224d64cb-74ea-4d58-8e45-c537cd02101e-ssh-key-edpm-compute-no-nodes\") pod \"install-certs-edpm-compute-no-nodes-edpm-compute-no-nodes-rjzbd\" (UID: \"224d64cb-74ea-4d58-8e45-c537cd02101e\") " pod="openstack-kuttl-tests/install-certs-edpm-compute-no-nodes-edpm-compute-no-nodes-rjzbd" Jan 20 18:04:52 crc kubenswrapper[4558]: I0120 18:04:52.463260 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/224d64cb-74ea-4d58-8e45-c537cd02101e-inventory\") pod \"install-certs-edpm-compute-no-nodes-edpm-compute-no-nodes-rjzbd\" (UID: \"224d64cb-74ea-4d58-8e45-c537cd02101e\") " pod="openstack-kuttl-tests/install-certs-edpm-compute-no-nodes-edpm-compute-no-nodes-rjzbd" Jan 20 18:04:52 crc kubenswrapper[4558]: I0120 18:04:52.463260 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/224d64cb-74ea-4d58-8e45-c537cd02101e-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-compute-no-nodes-edpm-compute-no-nodes-rjzbd\" (UID: \"224d64cb-74ea-4d58-8e45-c537cd02101e\") " pod="openstack-kuttl-tests/install-certs-edpm-compute-no-nodes-edpm-compute-no-nodes-rjzbd" Jan 20 18:04:52 crc kubenswrapper[4558]: I0120 18:04:52.464432 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/224d64cb-74ea-4d58-8e45-c537cd02101e-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-compute-no-nodes-edpm-compute-no-nodes-rjzbd\" (UID: \"224d64cb-74ea-4d58-8e45-c537cd02101e\") " pod="openstack-kuttl-tests/install-certs-edpm-compute-no-nodes-edpm-compute-no-nodes-rjzbd" Jan 20 18:04:52 crc kubenswrapper[4558]: I0120 18:04:52.464735 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/224d64cb-74ea-4d58-8e45-c537cd02101e-ovn-combined-ca-bundle\") pod \"install-certs-edpm-compute-no-nodes-edpm-compute-no-nodes-rjzbd\" (UID: \"224d64cb-74ea-4d58-8e45-c537cd02101e\") " pod="openstack-kuttl-tests/install-certs-edpm-compute-no-nodes-edpm-compute-no-nodes-rjzbd" Jan 20 18:04:52 crc kubenswrapper[4558]: I0120 18:04:52.464757 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/224d64cb-74ea-4d58-8e45-c537cd02101e-neutron-ovn-combined-ca-bundle\") pod \"install-certs-edpm-compute-no-nodes-edpm-compute-no-nodes-rjzbd\" (UID: \"224d64cb-74ea-4d58-8e45-c537cd02101e\") " pod="openstack-kuttl-tests/install-certs-edpm-compute-no-nodes-edpm-compute-no-nodes-rjzbd" Jan 20 18:04:52 crc kubenswrapper[4558]: I0120 18:04:52.464890 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/224d64cb-74ea-4d58-8e45-c537cd02101e-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-compute-no-nodes-edpm-compute-no-nodes-rjzbd\" (UID: \"224d64cb-74ea-4d58-8e45-c537cd02101e\") " pod="openstack-kuttl-tests/install-certs-edpm-compute-no-nodes-edpm-compute-no-nodes-rjzbd" Jan 20 18:04:52 crc kubenswrapper[4558]: I0120 18:04:52.465814 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/224d64cb-74ea-4d58-8e45-c537cd02101e-neutron-sriov-combined-ca-bundle\") pod \"install-certs-edpm-compute-no-nodes-edpm-compute-no-nodes-rjzbd\" (UID: \"224d64cb-74ea-4d58-8e45-c537cd02101e\") " pod="openstack-kuttl-tests/install-certs-edpm-compute-no-nodes-edpm-compute-no-nodes-rjzbd" Jan 20 18:04:52 crc kubenswrapper[4558]: I0120 18:04:52.465812 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/224d64cb-74ea-4d58-8e45-c537cd02101e-nova-combined-ca-bundle\") pod \"install-certs-edpm-compute-no-nodes-edpm-compute-no-nodes-rjzbd\" (UID: \"224d64cb-74ea-4d58-8e45-c537cd02101e\") " pod="openstack-kuttl-tests/install-certs-edpm-compute-no-nodes-edpm-compute-no-nodes-rjzbd" Jan 20 18:04:52 crc kubenswrapper[4558]: I0120 18:04:52.472575 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pmbzv\" (UniqueName: \"kubernetes.io/projected/224d64cb-74ea-4d58-8e45-c537cd02101e-kube-api-access-pmbzv\") pod \"install-certs-edpm-compute-no-nodes-edpm-compute-no-nodes-rjzbd\" (UID: \"224d64cb-74ea-4d58-8e45-c537cd02101e\") " pod="openstack-kuttl-tests/install-certs-edpm-compute-no-nodes-edpm-compute-no-nodes-rjzbd" Jan 20 18:04:52 crc kubenswrapper[4558]: I0120 18:04:52.597979 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/install-certs-edpm-compute-no-nodes-edpm-compute-no-nodes-rjzbd" Jan 20 18:04:52 crc kubenswrapper[4558]: I0120 18:04:52.992486 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/install-certs-edpm-compute-no-nodes-edpm-compute-no-nodes-rjzbd"] Jan 20 18:04:52 crc kubenswrapper[4558]: W0120 18:04:52.996035 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod224d64cb_74ea_4d58_8e45_c537cd02101e.slice/crio-7b38911be0655a0b6ada8bd95aedfcdf095c0664b35c9b930c4e9cf82ac391c2 WatchSource:0}: Error finding container 7b38911be0655a0b6ada8bd95aedfcdf095c0664b35c9b930c4e9cf82ac391c2: Status 404 returned error can't find the container with id 7b38911be0655a0b6ada8bd95aedfcdf095c0664b35c9b930c4e9cf82ac391c2 Jan 20 18:04:53 crc kubenswrapper[4558]: I0120 18:04:53.245827 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/install-certs-edpm-compute-no-nodes-edpm-compute-no-nodes-rjzbd" event={"ID":"224d64cb-74ea-4d58-8e45-c537cd02101e","Type":"ContainerStarted","Data":"7b38911be0655a0b6ada8bd95aedfcdf095c0664b35c9b930c4e9cf82ac391c2"} Jan 20 18:04:54 crc kubenswrapper[4558]: I0120 18:04:54.257936 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/install-certs-edpm-compute-no-nodes-edpm-compute-no-nodes-rjzbd" event={"ID":"224d64cb-74ea-4d58-8e45-c537cd02101e","Type":"ContainerStarted","Data":"bd147f3029d01aebcf7375d84d676da29d4589fa9d6a96becd9f8ed0e2cc18ee"} Jan 20 18:04:54 crc kubenswrapper[4558]: I0120 18:04:54.277260 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/install-certs-edpm-compute-no-nodes-edpm-compute-no-nodes-rjzbd" podStartSLOduration=1.792597048 podStartE2EDuration="2.277243791s" podCreationTimestamp="2026-01-20 18:04:52 +0000 UTC" firstStartedPulling="2026-01-20 18:04:52.998091491 +0000 UTC m=+4986.758429458" lastFinishedPulling="2026-01-20 18:04:53.482738233 +0000 UTC m=+4987.243076201" observedRunningTime="2026-01-20 18:04:54.271967602 +0000 UTC m=+4988.032305568" watchObservedRunningTime="2026-01-20 18:04:54.277243791 +0000 UTC m=+4988.037581759" Jan 20 18:04:55 crc kubenswrapper[4558]: I0120 18:04:55.268246 4558 generic.go:334] "Generic (PLEG): container finished" podID="224d64cb-74ea-4d58-8e45-c537cd02101e" containerID="bd147f3029d01aebcf7375d84d676da29d4589fa9d6a96becd9f8ed0e2cc18ee" exitCode=0 Jan 20 18:04:55 crc kubenswrapper[4558]: I0120 18:04:55.268389 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/install-certs-edpm-compute-no-nodes-edpm-compute-no-nodes-rjzbd" event={"ID":"224d64cb-74ea-4d58-8e45-c537cd02101e","Type":"ContainerDied","Data":"bd147f3029d01aebcf7375d84d676da29d4589fa9d6a96becd9f8ed0e2cc18ee"} Jan 20 18:04:56 crc kubenswrapper[4558]: I0120 18:04:56.531831 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/install-certs-edpm-compute-no-nodes-edpm-compute-no-nodes-rjzbd" Jan 20 18:04:56 crc kubenswrapper[4558]: I0120 18:04:56.724259 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/224d64cb-74ea-4d58-8e45-c537cd02101e-neutron-dhcp-combined-ca-bundle\") pod \"224d64cb-74ea-4d58-8e45-c537cd02101e\" (UID: \"224d64cb-74ea-4d58-8e45-c537cd02101e\") " Jan 20 18:04:56 crc kubenswrapper[4558]: I0120 18:04:56.724345 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/224d64cb-74ea-4d58-8e45-c537cd02101e-neutron-sriov-combined-ca-bundle\") pod \"224d64cb-74ea-4d58-8e45-c537cd02101e\" (UID: \"224d64cb-74ea-4d58-8e45-c537cd02101e\") " Jan 20 18:04:56 crc kubenswrapper[4558]: I0120 18:04:56.724380 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/224d64cb-74ea-4d58-8e45-c537cd02101e-ssh-key-edpm-compute-no-nodes\") pod \"224d64cb-74ea-4d58-8e45-c537cd02101e\" (UID: \"224d64cb-74ea-4d58-8e45-c537cd02101e\") " Jan 20 18:04:56 crc kubenswrapper[4558]: I0120 18:04:56.724413 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/224d64cb-74ea-4d58-8e45-c537cd02101e-ovn-combined-ca-bundle\") pod \"224d64cb-74ea-4d58-8e45-c537cd02101e\" (UID: \"224d64cb-74ea-4d58-8e45-c537cd02101e\") " Jan 20 18:04:56 crc kubenswrapper[4558]: I0120 18:04:56.724627 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/224d64cb-74ea-4d58-8e45-c537cd02101e-neutron-ovn-combined-ca-bundle\") pod \"224d64cb-74ea-4d58-8e45-c537cd02101e\" (UID: \"224d64cb-74ea-4d58-8e45-c537cd02101e\") " Jan 20 18:04:56 crc kubenswrapper[4558]: I0120 18:04:56.724677 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/224d64cb-74ea-4d58-8e45-c537cd02101e-bootstrap-combined-ca-bundle\") pod \"224d64cb-74ea-4d58-8e45-c537cd02101e\" (UID: \"224d64cb-74ea-4d58-8e45-c537cd02101e\") " Jan 20 18:04:56 crc kubenswrapper[4558]: I0120 18:04:56.724790 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/224d64cb-74ea-4d58-8e45-c537cd02101e-inventory\") pod \"224d64cb-74ea-4d58-8e45-c537cd02101e\" (UID: \"224d64cb-74ea-4d58-8e45-c537cd02101e\") " Jan 20 18:04:56 crc kubenswrapper[4558]: I0120 18:04:56.724831 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/224d64cb-74ea-4d58-8e45-c537cd02101e-nova-combined-ca-bundle\") pod \"224d64cb-74ea-4d58-8e45-c537cd02101e\" (UID: \"224d64cb-74ea-4d58-8e45-c537cd02101e\") " Jan 20 18:04:56 crc kubenswrapper[4558]: I0120 18:04:56.724889 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pmbzv\" (UniqueName: \"kubernetes.io/projected/224d64cb-74ea-4d58-8e45-c537cd02101e-kube-api-access-pmbzv\") pod \"224d64cb-74ea-4d58-8e45-c537cd02101e\" (UID: \"224d64cb-74ea-4d58-8e45-c537cd02101e\") " Jan 20 18:04:56 crc kubenswrapper[4558]: I0120 18:04:56.725791 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/224d64cb-74ea-4d58-8e45-c537cd02101e-libvirt-combined-ca-bundle\") pod \"224d64cb-74ea-4d58-8e45-c537cd02101e\" (UID: \"224d64cb-74ea-4d58-8e45-c537cd02101e\") " Jan 20 18:04:56 crc kubenswrapper[4558]: I0120 18:04:56.725887 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/224d64cb-74ea-4d58-8e45-c537cd02101e-neutron-metadata-combined-ca-bundle\") pod \"224d64cb-74ea-4d58-8e45-c537cd02101e\" (UID: \"224d64cb-74ea-4d58-8e45-c537cd02101e\") " Jan 20 18:04:56 crc kubenswrapper[4558]: I0120 18:04:56.996430 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/224d64cb-74ea-4d58-8e45-c537cd02101e-neutron-sriov-combined-ca-bundle" (OuterVolumeSpecName: "neutron-sriov-combined-ca-bundle") pod "224d64cb-74ea-4d58-8e45-c537cd02101e" (UID: "224d64cb-74ea-4d58-8e45-c537cd02101e"). InnerVolumeSpecName "neutron-sriov-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:04:56 crc kubenswrapper[4558]: I0120 18:04:56.996478 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/224d64cb-74ea-4d58-8e45-c537cd02101e-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "224d64cb-74ea-4d58-8e45-c537cd02101e" (UID: "224d64cb-74ea-4d58-8e45-c537cd02101e"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:04:56 crc kubenswrapper[4558]: I0120 18:04:56.996989 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/224d64cb-74ea-4d58-8e45-c537cd02101e-neutron-dhcp-combined-ca-bundle" (OuterVolumeSpecName: "neutron-dhcp-combined-ca-bundle") pod "224d64cb-74ea-4d58-8e45-c537cd02101e" (UID: "224d64cb-74ea-4d58-8e45-c537cd02101e"). InnerVolumeSpecName "neutron-dhcp-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:04:56 crc kubenswrapper[4558]: I0120 18:04:56.997344 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/224d64cb-74ea-4d58-8e45-c537cd02101e-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "224d64cb-74ea-4d58-8e45-c537cd02101e" (UID: "224d64cb-74ea-4d58-8e45-c537cd02101e"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:04:56 crc kubenswrapper[4558]: I0120 18:04:56.997342 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/224d64cb-74ea-4d58-8e45-c537cd02101e-kube-api-access-pmbzv" (OuterVolumeSpecName: "kube-api-access-pmbzv") pod "224d64cb-74ea-4d58-8e45-c537cd02101e" (UID: "224d64cb-74ea-4d58-8e45-c537cd02101e"). InnerVolumeSpecName "kube-api-access-pmbzv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:04:56 crc kubenswrapper[4558]: I0120 18:04:56.997537 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/224d64cb-74ea-4d58-8e45-c537cd02101e-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "224d64cb-74ea-4d58-8e45-c537cd02101e" (UID: "224d64cb-74ea-4d58-8e45-c537cd02101e"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:04:56 crc kubenswrapper[4558]: I0120 18:04:56.997900 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/224d64cb-74ea-4d58-8e45-c537cd02101e-neutron-ovn-combined-ca-bundle" (OuterVolumeSpecName: "neutron-ovn-combined-ca-bundle") pod "224d64cb-74ea-4d58-8e45-c537cd02101e" (UID: "224d64cb-74ea-4d58-8e45-c537cd02101e"). InnerVolumeSpecName "neutron-ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:04:57 crc kubenswrapper[4558]: I0120 18:04:57.006302 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/224d64cb-74ea-4d58-8e45-c537cd02101e-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "224d64cb-74ea-4d58-8e45-c537cd02101e" (UID: "224d64cb-74ea-4d58-8e45-c537cd02101e"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:04:57 crc kubenswrapper[4558]: I0120 18:04:57.006890 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/224d64cb-74ea-4d58-8e45-c537cd02101e-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "224d64cb-74ea-4d58-8e45-c537cd02101e" (UID: "224d64cb-74ea-4d58-8e45-c537cd02101e"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:04:57 crc kubenswrapper[4558]: I0120 18:04:57.025247 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/224d64cb-74ea-4d58-8e45-c537cd02101e-ssh-key-edpm-compute-no-nodes" (OuterVolumeSpecName: "ssh-key-edpm-compute-no-nodes") pod "224d64cb-74ea-4d58-8e45-c537cd02101e" (UID: "224d64cb-74ea-4d58-8e45-c537cd02101e"). InnerVolumeSpecName "ssh-key-edpm-compute-no-nodes". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:04:57 crc kubenswrapper[4558]: I0120 18:04:57.030276 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/224d64cb-74ea-4d58-8e45-c537cd02101e-inventory" (OuterVolumeSpecName: "inventory") pod "224d64cb-74ea-4d58-8e45-c537cd02101e" (UID: "224d64cb-74ea-4d58-8e45-c537cd02101e"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:04:57 crc kubenswrapper[4558]: I0120 18:04:57.031603 4558 reconciler_common.go:293] "Volume detached for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/224d64cb-74ea-4d58-8e45-c537cd02101e-neutron-dhcp-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 18:04:57 crc kubenswrapper[4558]: I0120 18:04:57.031627 4558 reconciler_common.go:293] "Volume detached for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/224d64cb-74ea-4d58-8e45-c537cd02101e-neutron-sriov-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 18:04:57 crc kubenswrapper[4558]: I0120 18:04:57.031639 4558 reconciler_common.go:293] "Volume detached for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/224d64cb-74ea-4d58-8e45-c537cd02101e-ssh-key-edpm-compute-no-nodes\") on node \"crc\" DevicePath \"\"" Jan 20 18:04:57 crc kubenswrapper[4558]: I0120 18:04:57.031653 4558 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/224d64cb-74ea-4d58-8e45-c537cd02101e-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 18:04:57 crc kubenswrapper[4558]: I0120 18:04:57.031663 4558 reconciler_common.go:293] "Volume detached for volume \"neutron-ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/224d64cb-74ea-4d58-8e45-c537cd02101e-neutron-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 18:04:57 crc kubenswrapper[4558]: I0120 18:04:57.031672 4558 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/224d64cb-74ea-4d58-8e45-c537cd02101e-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 18:04:57 crc kubenswrapper[4558]: I0120 18:04:57.031683 4558 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/224d64cb-74ea-4d58-8e45-c537cd02101e-inventory\") on node \"crc\" DevicePath \"\"" Jan 20 18:04:57 crc kubenswrapper[4558]: I0120 18:04:57.031693 4558 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/224d64cb-74ea-4d58-8e45-c537cd02101e-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 18:04:57 crc kubenswrapper[4558]: I0120 18:04:57.031703 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pmbzv\" (UniqueName: \"kubernetes.io/projected/224d64cb-74ea-4d58-8e45-c537cd02101e-kube-api-access-pmbzv\") on node \"crc\" DevicePath \"\"" Jan 20 18:04:57 crc kubenswrapper[4558]: I0120 18:04:57.031711 4558 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/224d64cb-74ea-4d58-8e45-c537cd02101e-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 18:04:57 crc kubenswrapper[4558]: I0120 18:04:57.031720 4558 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/224d64cb-74ea-4d58-8e45-c537cd02101e-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 18:04:57 crc kubenswrapper[4558]: I0120 18:04:57.291643 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/install-certs-edpm-compute-no-nodes-edpm-compute-no-nodes-rjzbd" event={"ID":"224d64cb-74ea-4d58-8e45-c537cd02101e","Type":"ContainerDied","Data":"7b38911be0655a0b6ada8bd95aedfcdf095c0664b35c9b930c4e9cf82ac391c2"} Jan 20 18:04:57 crc kubenswrapper[4558]: I0120 18:04:57.291709 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7b38911be0655a0b6ada8bd95aedfcdf095c0664b35c9b930c4e9cf82ac391c2" Jan 20 18:04:57 crc kubenswrapper[4558]: I0120 18:04:57.291803 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/install-certs-edpm-compute-no-nodes-edpm-compute-no-nodes-rjzbd" Jan 20 18:04:57 crc kubenswrapper[4558]: I0120 18:04:57.366538 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ovn-edpm-compute-no-nodes-edpm-compute-no-nodes-82r5j"] Jan 20 18:04:57 crc kubenswrapper[4558]: E0120 18:04:57.366927 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="224d64cb-74ea-4d58-8e45-c537cd02101e" containerName="install-certs-edpm-compute-no-nodes-edpm-compute-no-nodes" Jan 20 18:04:57 crc kubenswrapper[4558]: I0120 18:04:57.366950 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="224d64cb-74ea-4d58-8e45-c537cd02101e" containerName="install-certs-edpm-compute-no-nodes-edpm-compute-no-nodes" Jan 20 18:04:57 crc kubenswrapper[4558]: I0120 18:04:57.367128 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="224d64cb-74ea-4d58-8e45-c537cd02101e" containerName="install-certs-edpm-compute-no-nodes-edpm-compute-no-nodes" Jan 20 18:04:57 crc kubenswrapper[4558]: I0120 18:04:57.367719 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-edpm-compute-no-nodes-edpm-compute-no-nodes-82r5j" Jan 20 18:04:57 crc kubenswrapper[4558]: I0120 18:04:57.371343 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"edpm-compute-no-nodes-dockercfg-tljmj" Jan 20 18:04:57 crc kubenswrapper[4558]: I0120 18:04:57.371343 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplanenodeset-edpm-compute-no-nodes" Jan 20 18:04:57 crc kubenswrapper[4558]: I0120 18:04:57.371965 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"ovncontroller-config" Jan 20 18:04:57 crc kubenswrapper[4558]: I0120 18:04:57.373302 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"combined-ca-bundle" Jan 20 18:04:57 crc kubenswrapper[4558]: I0120 18:04:57.373321 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-aee-default-env" Jan 20 18:04:57 crc kubenswrapper[4558]: I0120 18:04:57.374775 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplane-ansible-ssh-private-key-secret" Jan 20 18:04:57 crc kubenswrapper[4558]: I0120 18:04:57.388120 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovn-edpm-compute-no-nodes-edpm-compute-no-nodes-82r5j"] Jan 20 18:04:57 crc kubenswrapper[4558]: I0120 18:04:57.539216 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/efb0584b-776b-4a1d-8b88-c4c126e9cb00-ovncontroller-config-0\") pod \"ovn-edpm-compute-no-nodes-edpm-compute-no-nodes-82r5j\" (UID: \"efb0584b-776b-4a1d-8b88-c4c126e9cb00\") " pod="openstack-kuttl-tests/ovn-edpm-compute-no-nodes-edpm-compute-no-nodes-82r5j" Jan 20 18:04:57 crc kubenswrapper[4558]: I0120 18:04:57.539323 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/efb0584b-776b-4a1d-8b88-c4c126e9cb00-ssh-key-edpm-compute-no-nodes\") pod \"ovn-edpm-compute-no-nodes-edpm-compute-no-nodes-82r5j\" (UID: \"efb0584b-776b-4a1d-8b88-c4c126e9cb00\") " pod="openstack-kuttl-tests/ovn-edpm-compute-no-nodes-edpm-compute-no-nodes-82r5j" Jan 20 18:04:57 crc kubenswrapper[4558]: I0120 18:04:57.539824 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/efb0584b-776b-4a1d-8b88-c4c126e9cb00-inventory\") pod \"ovn-edpm-compute-no-nodes-edpm-compute-no-nodes-82r5j\" (UID: \"efb0584b-776b-4a1d-8b88-c4c126e9cb00\") " pod="openstack-kuttl-tests/ovn-edpm-compute-no-nodes-edpm-compute-no-nodes-82r5j" Jan 20 18:04:57 crc kubenswrapper[4558]: I0120 18:04:57.539920 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fqbtb\" (UniqueName: \"kubernetes.io/projected/efb0584b-776b-4a1d-8b88-c4c126e9cb00-kube-api-access-fqbtb\") pod \"ovn-edpm-compute-no-nodes-edpm-compute-no-nodes-82r5j\" (UID: \"efb0584b-776b-4a1d-8b88-c4c126e9cb00\") " pod="openstack-kuttl-tests/ovn-edpm-compute-no-nodes-edpm-compute-no-nodes-82r5j" Jan 20 18:04:57 crc kubenswrapper[4558]: I0120 18:04:57.540037 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/efb0584b-776b-4a1d-8b88-c4c126e9cb00-ovn-combined-ca-bundle\") pod \"ovn-edpm-compute-no-nodes-edpm-compute-no-nodes-82r5j\" (UID: \"efb0584b-776b-4a1d-8b88-c4c126e9cb00\") " pod="openstack-kuttl-tests/ovn-edpm-compute-no-nodes-edpm-compute-no-nodes-82r5j" Jan 20 18:04:57 crc kubenswrapper[4558]: I0120 18:04:57.642075 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/efb0584b-776b-4a1d-8b88-c4c126e9cb00-ovncontroller-config-0\") pod \"ovn-edpm-compute-no-nodes-edpm-compute-no-nodes-82r5j\" (UID: \"efb0584b-776b-4a1d-8b88-c4c126e9cb00\") " pod="openstack-kuttl-tests/ovn-edpm-compute-no-nodes-edpm-compute-no-nodes-82r5j" Jan 20 18:04:57 crc kubenswrapper[4558]: I0120 18:04:57.642151 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/efb0584b-776b-4a1d-8b88-c4c126e9cb00-ssh-key-edpm-compute-no-nodes\") pod \"ovn-edpm-compute-no-nodes-edpm-compute-no-nodes-82r5j\" (UID: \"efb0584b-776b-4a1d-8b88-c4c126e9cb00\") " pod="openstack-kuttl-tests/ovn-edpm-compute-no-nodes-edpm-compute-no-nodes-82r5j" Jan 20 18:04:57 crc kubenswrapper[4558]: I0120 18:04:57.642208 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/efb0584b-776b-4a1d-8b88-c4c126e9cb00-inventory\") pod \"ovn-edpm-compute-no-nodes-edpm-compute-no-nodes-82r5j\" (UID: \"efb0584b-776b-4a1d-8b88-c4c126e9cb00\") " pod="openstack-kuttl-tests/ovn-edpm-compute-no-nodes-edpm-compute-no-nodes-82r5j" Jan 20 18:04:57 crc kubenswrapper[4558]: I0120 18:04:57.642245 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fqbtb\" (UniqueName: \"kubernetes.io/projected/efb0584b-776b-4a1d-8b88-c4c126e9cb00-kube-api-access-fqbtb\") pod \"ovn-edpm-compute-no-nodes-edpm-compute-no-nodes-82r5j\" (UID: \"efb0584b-776b-4a1d-8b88-c4c126e9cb00\") " pod="openstack-kuttl-tests/ovn-edpm-compute-no-nodes-edpm-compute-no-nodes-82r5j" Jan 20 18:04:57 crc kubenswrapper[4558]: I0120 18:04:57.642287 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/efb0584b-776b-4a1d-8b88-c4c126e9cb00-ovn-combined-ca-bundle\") pod \"ovn-edpm-compute-no-nodes-edpm-compute-no-nodes-82r5j\" (UID: \"efb0584b-776b-4a1d-8b88-c4c126e9cb00\") " pod="openstack-kuttl-tests/ovn-edpm-compute-no-nodes-edpm-compute-no-nodes-82r5j" Jan 20 18:04:57 crc kubenswrapper[4558]: I0120 18:04:57.643375 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/efb0584b-776b-4a1d-8b88-c4c126e9cb00-ovncontroller-config-0\") pod \"ovn-edpm-compute-no-nodes-edpm-compute-no-nodes-82r5j\" (UID: \"efb0584b-776b-4a1d-8b88-c4c126e9cb00\") " pod="openstack-kuttl-tests/ovn-edpm-compute-no-nodes-edpm-compute-no-nodes-82r5j" Jan 20 18:04:57 crc kubenswrapper[4558]: I0120 18:04:57.647252 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/efb0584b-776b-4a1d-8b88-c4c126e9cb00-inventory\") pod \"ovn-edpm-compute-no-nodes-edpm-compute-no-nodes-82r5j\" (UID: \"efb0584b-776b-4a1d-8b88-c4c126e9cb00\") " pod="openstack-kuttl-tests/ovn-edpm-compute-no-nodes-edpm-compute-no-nodes-82r5j" Jan 20 18:04:57 crc kubenswrapper[4558]: I0120 18:04:57.649653 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/efb0584b-776b-4a1d-8b88-c4c126e9cb00-ssh-key-edpm-compute-no-nodes\") pod \"ovn-edpm-compute-no-nodes-edpm-compute-no-nodes-82r5j\" (UID: \"efb0584b-776b-4a1d-8b88-c4c126e9cb00\") " pod="openstack-kuttl-tests/ovn-edpm-compute-no-nodes-edpm-compute-no-nodes-82r5j" Jan 20 18:04:57 crc kubenswrapper[4558]: I0120 18:04:57.651625 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/efb0584b-776b-4a1d-8b88-c4c126e9cb00-ovn-combined-ca-bundle\") pod \"ovn-edpm-compute-no-nodes-edpm-compute-no-nodes-82r5j\" (UID: \"efb0584b-776b-4a1d-8b88-c4c126e9cb00\") " pod="openstack-kuttl-tests/ovn-edpm-compute-no-nodes-edpm-compute-no-nodes-82r5j" Jan 20 18:04:57 crc kubenswrapper[4558]: I0120 18:04:57.662597 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fqbtb\" (UniqueName: \"kubernetes.io/projected/efb0584b-776b-4a1d-8b88-c4c126e9cb00-kube-api-access-fqbtb\") pod \"ovn-edpm-compute-no-nodes-edpm-compute-no-nodes-82r5j\" (UID: \"efb0584b-776b-4a1d-8b88-c4c126e9cb00\") " pod="openstack-kuttl-tests/ovn-edpm-compute-no-nodes-edpm-compute-no-nodes-82r5j" Jan 20 18:04:57 crc kubenswrapper[4558]: I0120 18:04:57.685721 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-edpm-compute-no-nodes-edpm-compute-no-nodes-82r5j" Jan 20 18:04:58 crc kubenswrapper[4558]: I0120 18:04:58.080760 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovn-edpm-compute-no-nodes-edpm-compute-no-nodes-82r5j"] Jan 20 18:04:58 crc kubenswrapper[4558]: W0120 18:04:58.085019 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podefb0584b_776b_4a1d_8b88_c4c126e9cb00.slice/crio-2eafb2aa22237ccdc76a1644874760ae80a75014427e3666186d46a4af04eeb8 WatchSource:0}: Error finding container 2eafb2aa22237ccdc76a1644874760ae80a75014427e3666186d46a4af04eeb8: Status 404 returned error can't find the container with id 2eafb2aa22237ccdc76a1644874760ae80a75014427e3666186d46a4af04eeb8 Jan 20 18:04:58 crc kubenswrapper[4558]: I0120 18:04:58.300525 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-edpm-compute-no-nodes-edpm-compute-no-nodes-82r5j" event={"ID":"efb0584b-776b-4a1d-8b88-c4c126e9cb00","Type":"ContainerStarted","Data":"2eafb2aa22237ccdc76a1644874760ae80a75014427e3666186d46a4af04eeb8"} Jan 20 18:04:59 crc kubenswrapper[4558]: I0120 18:04:59.311268 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-edpm-compute-no-nodes-edpm-compute-no-nodes-82r5j" event={"ID":"efb0584b-776b-4a1d-8b88-c4c126e9cb00","Type":"ContainerStarted","Data":"d04be593f095fca90b98ce9ff32afc8b0eba6d0b7dab93ce693dbf4f1dea98e7"} Jan 20 18:04:59 crc kubenswrapper[4558]: I0120 18:04:59.332379 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ovn-edpm-compute-no-nodes-edpm-compute-no-nodes-82r5j" podStartSLOduration=1.8206197309999999 podStartE2EDuration="2.332351728s" podCreationTimestamp="2026-01-20 18:04:57 +0000 UTC" firstStartedPulling="2026-01-20 18:04:58.089179126 +0000 UTC m=+4991.849517094" lastFinishedPulling="2026-01-20 18:04:58.600911124 +0000 UTC m=+4992.361249091" observedRunningTime="2026-01-20 18:04:59.325617806 +0000 UTC m=+4993.085955773" watchObservedRunningTime="2026-01-20 18:04:59.332351728 +0000 UTC m=+4993.092689694" Jan 20 18:04:59 crc kubenswrapper[4558]: I0120 18:04:59.566855 4558 scope.go:117] "RemoveContainer" containerID="6b02d31ec11fe5435af988e35303a6f66b5afa5ef952ce5f7cc4b3cd5f4d9497" Jan 20 18:04:59 crc kubenswrapper[4558]: E0120 18:04:59.567080 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 18:05:00 crc kubenswrapper[4558]: I0120 18:05:00.324188 4558 generic.go:334] "Generic (PLEG): container finished" podID="efb0584b-776b-4a1d-8b88-c4c126e9cb00" containerID="d04be593f095fca90b98ce9ff32afc8b0eba6d0b7dab93ce693dbf4f1dea98e7" exitCode=0 Jan 20 18:05:00 crc kubenswrapper[4558]: I0120 18:05:00.324271 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-edpm-compute-no-nodes-edpm-compute-no-nodes-82r5j" event={"ID":"efb0584b-776b-4a1d-8b88-c4c126e9cb00","Type":"ContainerDied","Data":"d04be593f095fca90b98ce9ff32afc8b0eba6d0b7dab93ce693dbf4f1dea98e7"} Jan 20 18:05:01 crc kubenswrapper[4558]: I0120 18:05:01.566289 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-edpm-compute-no-nodes-edpm-compute-no-nodes-82r5j" Jan 20 18:05:01 crc kubenswrapper[4558]: I0120 18:05:01.599623 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/efb0584b-776b-4a1d-8b88-c4c126e9cb00-ovn-combined-ca-bundle\") pod \"efb0584b-776b-4a1d-8b88-c4c126e9cb00\" (UID: \"efb0584b-776b-4a1d-8b88-c4c126e9cb00\") " Jan 20 18:05:01 crc kubenswrapper[4558]: I0120 18:05:01.599676 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/efb0584b-776b-4a1d-8b88-c4c126e9cb00-ssh-key-edpm-compute-no-nodes\") pod \"efb0584b-776b-4a1d-8b88-c4c126e9cb00\" (UID: \"efb0584b-776b-4a1d-8b88-c4c126e9cb00\") " Jan 20 18:05:01 crc kubenswrapper[4558]: I0120 18:05:01.599735 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqbtb\" (UniqueName: \"kubernetes.io/projected/efb0584b-776b-4a1d-8b88-c4c126e9cb00-kube-api-access-fqbtb\") pod \"efb0584b-776b-4a1d-8b88-c4c126e9cb00\" (UID: \"efb0584b-776b-4a1d-8b88-c4c126e9cb00\") " Jan 20 18:05:01 crc kubenswrapper[4558]: I0120 18:05:01.599899 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/efb0584b-776b-4a1d-8b88-c4c126e9cb00-inventory\") pod \"efb0584b-776b-4a1d-8b88-c4c126e9cb00\" (UID: \"efb0584b-776b-4a1d-8b88-c4c126e9cb00\") " Jan 20 18:05:01 crc kubenswrapper[4558]: I0120 18:05:01.599934 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/efb0584b-776b-4a1d-8b88-c4c126e9cb00-ovncontroller-config-0\") pod \"efb0584b-776b-4a1d-8b88-c4c126e9cb00\" (UID: \"efb0584b-776b-4a1d-8b88-c4c126e9cb00\") " Jan 20 18:05:01 crc kubenswrapper[4558]: I0120 18:05:01.604438 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efb0584b-776b-4a1d-8b88-c4c126e9cb00-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "efb0584b-776b-4a1d-8b88-c4c126e9cb00" (UID: "efb0584b-776b-4a1d-8b88-c4c126e9cb00"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:05:01 crc kubenswrapper[4558]: I0120 18:05:01.604971 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efb0584b-776b-4a1d-8b88-c4c126e9cb00-kube-api-access-fqbtb" (OuterVolumeSpecName: "kube-api-access-fqbtb") pod "efb0584b-776b-4a1d-8b88-c4c126e9cb00" (UID: "efb0584b-776b-4a1d-8b88-c4c126e9cb00"). InnerVolumeSpecName "kube-api-access-fqbtb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:05:01 crc kubenswrapper[4558]: E0120 18:05:01.615778 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/efb0584b-776b-4a1d-8b88-c4c126e9cb00-ssh-key-edpm-compute-no-nodes podName:efb0584b-776b-4a1d-8b88-c4c126e9cb00 nodeName:}" failed. No retries permitted until 2026-01-20 18:05:02.115753836 +0000 UTC m=+4995.876091803 (durationBeforeRetry 500ms). Error: error cleaning subPath mounts for volume "ssh-key-edpm-compute-no-nodes" (UniqueName: "kubernetes.io/secret/efb0584b-776b-4a1d-8b88-c4c126e9cb00-ssh-key-edpm-compute-no-nodes") pod "efb0584b-776b-4a1d-8b88-c4c126e9cb00" (UID: "efb0584b-776b-4a1d-8b88-c4c126e9cb00") : error deleting /var/lib/kubelet/pods/efb0584b-776b-4a1d-8b88-c4c126e9cb00/volume-subpaths: remove /var/lib/kubelet/pods/efb0584b-776b-4a1d-8b88-c4c126e9cb00/volume-subpaths: no such file or directory Jan 20 18:05:01 crc kubenswrapper[4558]: E0120 18:05:01.615816 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/efb0584b-776b-4a1d-8b88-c4c126e9cb00-ovncontroller-config-0 podName:efb0584b-776b-4a1d-8b88-c4c126e9cb00 nodeName:}" failed. No retries permitted until 2026-01-20 18:05:02.11580921 +0000 UTC m=+4995.876147177 (durationBeforeRetry 500ms). Error: error cleaning subPath mounts for volume "ovncontroller-config-0" (UniqueName: "kubernetes.io/configmap/efb0584b-776b-4a1d-8b88-c4c126e9cb00-ovncontroller-config-0") pod "efb0584b-776b-4a1d-8b88-c4c126e9cb00" (UID: "efb0584b-776b-4a1d-8b88-c4c126e9cb00") : error deleting /var/lib/kubelet/pods/efb0584b-776b-4a1d-8b88-c4c126e9cb00/volume-subpaths: remove /var/lib/kubelet/pods/efb0584b-776b-4a1d-8b88-c4c126e9cb00/volume-subpaths: no such file or directory Jan 20 18:05:01 crc kubenswrapper[4558]: I0120 18:05:01.617429 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efb0584b-776b-4a1d-8b88-c4c126e9cb00-inventory" (OuterVolumeSpecName: "inventory") pod "efb0584b-776b-4a1d-8b88-c4c126e9cb00" (UID: "efb0584b-776b-4a1d-8b88-c4c126e9cb00"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:05:01 crc kubenswrapper[4558]: I0120 18:05:01.702376 4558 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/efb0584b-776b-4a1d-8b88-c4c126e9cb00-inventory\") on node \"crc\" DevicePath \"\"" Jan 20 18:05:01 crc kubenswrapper[4558]: I0120 18:05:01.702412 4558 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/efb0584b-776b-4a1d-8b88-c4c126e9cb00-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 18:05:01 crc kubenswrapper[4558]: I0120 18:05:01.702427 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqbtb\" (UniqueName: \"kubernetes.io/projected/efb0584b-776b-4a1d-8b88-c4c126e9cb00-kube-api-access-fqbtb\") on node \"crc\" DevicePath \"\"" Jan 20 18:05:02 crc kubenswrapper[4558]: I0120 18:05:02.209333 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/efb0584b-776b-4a1d-8b88-c4c126e9cb00-ssh-key-edpm-compute-no-nodes\") pod \"efb0584b-776b-4a1d-8b88-c4c126e9cb00\" (UID: \"efb0584b-776b-4a1d-8b88-c4c126e9cb00\") " Jan 20 18:05:02 crc kubenswrapper[4558]: I0120 18:05:02.209425 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/efb0584b-776b-4a1d-8b88-c4c126e9cb00-ovncontroller-config-0\") pod \"efb0584b-776b-4a1d-8b88-c4c126e9cb00\" (UID: \"efb0584b-776b-4a1d-8b88-c4c126e9cb00\") " Jan 20 18:05:02 crc kubenswrapper[4558]: I0120 18:05:02.209850 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/efb0584b-776b-4a1d-8b88-c4c126e9cb00-ovncontroller-config-0" (OuterVolumeSpecName: "ovncontroller-config-0") pod "efb0584b-776b-4a1d-8b88-c4c126e9cb00" (UID: "efb0584b-776b-4a1d-8b88-c4c126e9cb00"). InnerVolumeSpecName "ovncontroller-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:05:02 crc kubenswrapper[4558]: I0120 18:05:02.212825 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efb0584b-776b-4a1d-8b88-c4c126e9cb00-ssh-key-edpm-compute-no-nodes" (OuterVolumeSpecName: "ssh-key-edpm-compute-no-nodes") pod "efb0584b-776b-4a1d-8b88-c4c126e9cb00" (UID: "efb0584b-776b-4a1d-8b88-c4c126e9cb00"). InnerVolumeSpecName "ssh-key-edpm-compute-no-nodes". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:05:02 crc kubenswrapper[4558]: I0120 18:05:02.312149 4558 reconciler_common.go:293] "Volume detached for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/efb0584b-776b-4a1d-8b88-c4c126e9cb00-ovncontroller-config-0\") on node \"crc\" DevicePath \"\"" Jan 20 18:05:02 crc kubenswrapper[4558]: I0120 18:05:02.312212 4558 reconciler_common.go:293] "Volume detached for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/efb0584b-776b-4a1d-8b88-c4c126e9cb00-ssh-key-edpm-compute-no-nodes\") on node \"crc\" DevicePath \"\"" Jan 20 18:05:02 crc kubenswrapper[4558]: I0120 18:05:02.343985 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-edpm-compute-no-nodes-edpm-compute-no-nodes-82r5j" event={"ID":"efb0584b-776b-4a1d-8b88-c4c126e9cb00","Type":"ContainerDied","Data":"2eafb2aa22237ccdc76a1644874760ae80a75014427e3666186d46a4af04eeb8"} Jan 20 18:05:02 crc kubenswrapper[4558]: I0120 18:05:02.344043 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2eafb2aa22237ccdc76a1644874760ae80a75014427e3666186d46a4af04eeb8" Jan 20 18:05:02 crc kubenswrapper[4558]: I0120 18:05:02.344115 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-edpm-compute-no-nodes-edpm-compute-no-nodes-82r5j" Jan 20 18:05:02 crc kubenswrapper[4558]: I0120 18:05:02.388250 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/neutron-metadata-edpm-compute-no-nodes-edpm-compute-no-nodssvgn"] Jan 20 18:05:02 crc kubenswrapper[4558]: E0120 18:05:02.388564 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="efb0584b-776b-4a1d-8b88-c4c126e9cb00" containerName="ovn-edpm-compute-no-nodes-edpm-compute-no-nodes" Jan 20 18:05:02 crc kubenswrapper[4558]: I0120 18:05:02.388591 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="efb0584b-776b-4a1d-8b88-c4c126e9cb00" containerName="ovn-edpm-compute-no-nodes-edpm-compute-no-nodes" Jan 20 18:05:02 crc kubenswrapper[4558]: I0120 18:05:02.388762 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="efb0584b-776b-4a1d-8b88-c4c126e9cb00" containerName="ovn-edpm-compute-no-nodes-edpm-compute-no-nodes" Jan 20 18:05:02 crc kubenswrapper[4558]: I0120 18:05:02.389255 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-metadata-edpm-compute-no-nodes-edpm-compute-no-nodssvgn" Jan 20 18:05:02 crc kubenswrapper[4558]: I0120 18:05:02.394414 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-aee-default-env" Jan 20 18:05:02 crc kubenswrapper[4558]: I0120 18:05:02.394852 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-metadata-neutron-config" Jan 20 18:05:02 crc kubenswrapper[4558]: I0120 18:05:02.394893 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"edpm-compute-no-nodes-dockercfg-tljmj" Jan 20 18:05:02 crc kubenswrapper[4558]: I0120 18:05:02.394926 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"combined-ca-bundle" Jan 20 18:05:02 crc kubenswrapper[4558]: I0120 18:05:02.394852 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"neutron-ovn-metadata-agent-neutron-config" Jan 20 18:05:02 crc kubenswrapper[4558]: I0120 18:05:02.394857 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplanenodeset-edpm-compute-no-nodes" Jan 20 18:05:02 crc kubenswrapper[4558]: I0120 18:05:02.395358 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplane-ansible-ssh-private-key-secret" Jan 20 18:05:02 crc kubenswrapper[4558]: I0120 18:05:02.400614 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-metadata-edpm-compute-no-nodes-edpm-compute-no-nodssvgn"] Jan 20 18:05:02 crc kubenswrapper[4558]: I0120 18:05:02.413744 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-neutron-config-1\" (UniqueName: \"kubernetes.io/secret/936a9e47-9698-4bf5-ac34-023aa7b95132-nova-metadata-neutron-config-1\") pod \"neutron-metadata-edpm-compute-no-nodes-edpm-compute-no-nodssvgn\" (UID: \"936a9e47-9698-4bf5-ac34-023aa7b95132\") " pod="openstack-kuttl-tests/neutron-metadata-edpm-compute-no-nodes-edpm-compute-no-nodssvgn" Jan 20 18:05:02 crc kubenswrapper[4558]: I0120 18:05:02.413787 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/936a9e47-9698-4bf5-ac34-023aa7b95132-ssh-key-edpm-compute-no-nodes\") pod \"neutron-metadata-edpm-compute-no-nodes-edpm-compute-no-nodssvgn\" (UID: \"936a9e47-9698-4bf5-ac34-023aa7b95132\") " pod="openstack-kuttl-tests/neutron-metadata-edpm-compute-no-nodes-edpm-compute-no-nodssvgn" Jan 20 18:05:02 crc kubenswrapper[4558]: I0120 18:05:02.413813 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dcfnd\" (UniqueName: \"kubernetes.io/projected/936a9e47-9698-4bf5-ac34-023aa7b95132-kube-api-access-dcfnd\") pod \"neutron-metadata-edpm-compute-no-nodes-edpm-compute-no-nodssvgn\" (UID: \"936a9e47-9698-4bf5-ac34-023aa7b95132\") " pod="openstack-kuttl-tests/neutron-metadata-edpm-compute-no-nodes-edpm-compute-no-nodssvgn" Jan 20 18:05:02 crc kubenswrapper[4558]: I0120 18:05:02.413830 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-neutron-config-2\" (UniqueName: \"kubernetes.io/secret/936a9e47-9698-4bf5-ac34-023aa7b95132-nova-metadata-neutron-config-2\") pod \"neutron-metadata-edpm-compute-no-nodes-edpm-compute-no-nodssvgn\" (UID: \"936a9e47-9698-4bf5-ac34-023aa7b95132\") " pod="openstack-kuttl-tests/neutron-metadata-edpm-compute-no-nodes-edpm-compute-no-nodssvgn" Jan 20 18:05:02 crc kubenswrapper[4558]: I0120 18:05:02.413885 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/936a9e47-9698-4bf5-ac34-023aa7b95132-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-compute-no-nodes-edpm-compute-no-nodssvgn\" (UID: \"936a9e47-9698-4bf5-ac34-023aa7b95132\") " pod="openstack-kuttl-tests/neutron-metadata-edpm-compute-no-nodes-edpm-compute-no-nodssvgn" Jan 20 18:05:02 crc kubenswrapper[4558]: I0120 18:05:02.413905 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/936a9e47-9698-4bf5-ac34-023aa7b95132-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-compute-no-nodes-edpm-compute-no-nodssvgn\" (UID: \"936a9e47-9698-4bf5-ac34-023aa7b95132\") " pod="openstack-kuttl-tests/neutron-metadata-edpm-compute-no-nodes-edpm-compute-no-nodssvgn" Jan 20 18:05:02 crc kubenswrapper[4558]: I0120 18:05:02.413941 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/936a9e47-9698-4bf5-ac34-023aa7b95132-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-compute-no-nodes-edpm-compute-no-nodssvgn\" (UID: \"936a9e47-9698-4bf5-ac34-023aa7b95132\") " pod="openstack-kuttl-tests/neutron-metadata-edpm-compute-no-nodes-edpm-compute-no-nodssvgn" Jan 20 18:05:02 crc kubenswrapper[4558]: I0120 18:05:02.413980 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/936a9e47-9698-4bf5-ac34-023aa7b95132-inventory\") pod \"neutron-metadata-edpm-compute-no-nodes-edpm-compute-no-nodssvgn\" (UID: \"936a9e47-9698-4bf5-ac34-023aa7b95132\") " pod="openstack-kuttl-tests/neutron-metadata-edpm-compute-no-nodes-edpm-compute-no-nodssvgn" Jan 20 18:05:02 crc kubenswrapper[4558]: I0120 18:05:02.514685 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-neutron-config-1\" (UniqueName: \"kubernetes.io/secret/936a9e47-9698-4bf5-ac34-023aa7b95132-nova-metadata-neutron-config-1\") pod \"neutron-metadata-edpm-compute-no-nodes-edpm-compute-no-nodssvgn\" (UID: \"936a9e47-9698-4bf5-ac34-023aa7b95132\") " pod="openstack-kuttl-tests/neutron-metadata-edpm-compute-no-nodes-edpm-compute-no-nodssvgn" Jan 20 18:05:02 crc kubenswrapper[4558]: I0120 18:05:02.514739 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/936a9e47-9698-4bf5-ac34-023aa7b95132-ssh-key-edpm-compute-no-nodes\") pod \"neutron-metadata-edpm-compute-no-nodes-edpm-compute-no-nodssvgn\" (UID: \"936a9e47-9698-4bf5-ac34-023aa7b95132\") " pod="openstack-kuttl-tests/neutron-metadata-edpm-compute-no-nodes-edpm-compute-no-nodssvgn" Jan 20 18:05:02 crc kubenswrapper[4558]: I0120 18:05:02.514778 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dcfnd\" (UniqueName: \"kubernetes.io/projected/936a9e47-9698-4bf5-ac34-023aa7b95132-kube-api-access-dcfnd\") pod \"neutron-metadata-edpm-compute-no-nodes-edpm-compute-no-nodssvgn\" (UID: \"936a9e47-9698-4bf5-ac34-023aa7b95132\") " pod="openstack-kuttl-tests/neutron-metadata-edpm-compute-no-nodes-edpm-compute-no-nodssvgn" Jan 20 18:05:02 crc kubenswrapper[4558]: I0120 18:05:02.514802 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-neutron-config-2\" (UniqueName: \"kubernetes.io/secret/936a9e47-9698-4bf5-ac34-023aa7b95132-nova-metadata-neutron-config-2\") pod \"neutron-metadata-edpm-compute-no-nodes-edpm-compute-no-nodssvgn\" (UID: \"936a9e47-9698-4bf5-ac34-023aa7b95132\") " pod="openstack-kuttl-tests/neutron-metadata-edpm-compute-no-nodes-edpm-compute-no-nodssvgn" Jan 20 18:05:02 crc kubenswrapper[4558]: I0120 18:05:02.514898 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/936a9e47-9698-4bf5-ac34-023aa7b95132-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-compute-no-nodes-edpm-compute-no-nodssvgn\" (UID: \"936a9e47-9698-4bf5-ac34-023aa7b95132\") " pod="openstack-kuttl-tests/neutron-metadata-edpm-compute-no-nodes-edpm-compute-no-nodssvgn" Jan 20 18:05:02 crc kubenswrapper[4558]: I0120 18:05:02.514921 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/936a9e47-9698-4bf5-ac34-023aa7b95132-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-compute-no-nodes-edpm-compute-no-nodssvgn\" (UID: \"936a9e47-9698-4bf5-ac34-023aa7b95132\") " pod="openstack-kuttl-tests/neutron-metadata-edpm-compute-no-nodes-edpm-compute-no-nodssvgn" Jan 20 18:05:02 crc kubenswrapper[4558]: I0120 18:05:02.514951 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/936a9e47-9698-4bf5-ac34-023aa7b95132-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-compute-no-nodes-edpm-compute-no-nodssvgn\" (UID: \"936a9e47-9698-4bf5-ac34-023aa7b95132\") " pod="openstack-kuttl-tests/neutron-metadata-edpm-compute-no-nodes-edpm-compute-no-nodssvgn" Jan 20 18:05:02 crc kubenswrapper[4558]: I0120 18:05:02.515004 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/936a9e47-9698-4bf5-ac34-023aa7b95132-inventory\") pod \"neutron-metadata-edpm-compute-no-nodes-edpm-compute-no-nodssvgn\" (UID: \"936a9e47-9698-4bf5-ac34-023aa7b95132\") " pod="openstack-kuttl-tests/neutron-metadata-edpm-compute-no-nodes-edpm-compute-no-nodssvgn" Jan 20 18:05:02 crc kubenswrapper[4558]: I0120 18:05:02.519305 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/936a9e47-9698-4bf5-ac34-023aa7b95132-inventory\") pod \"neutron-metadata-edpm-compute-no-nodes-edpm-compute-no-nodssvgn\" (UID: \"936a9e47-9698-4bf5-ac34-023aa7b95132\") " pod="openstack-kuttl-tests/neutron-metadata-edpm-compute-no-nodes-edpm-compute-no-nodssvgn" Jan 20 18:05:02 crc kubenswrapper[4558]: I0120 18:05:02.519305 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-neutron-config-1\" (UniqueName: \"kubernetes.io/secret/936a9e47-9698-4bf5-ac34-023aa7b95132-nova-metadata-neutron-config-1\") pod \"neutron-metadata-edpm-compute-no-nodes-edpm-compute-no-nodssvgn\" (UID: \"936a9e47-9698-4bf5-ac34-023aa7b95132\") " pod="openstack-kuttl-tests/neutron-metadata-edpm-compute-no-nodes-edpm-compute-no-nodssvgn" Jan 20 18:05:02 crc kubenswrapper[4558]: I0120 18:05:02.519949 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/936a9e47-9698-4bf5-ac34-023aa7b95132-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-compute-no-nodes-edpm-compute-no-nodssvgn\" (UID: \"936a9e47-9698-4bf5-ac34-023aa7b95132\") " pod="openstack-kuttl-tests/neutron-metadata-edpm-compute-no-nodes-edpm-compute-no-nodssvgn" Jan 20 18:05:02 crc kubenswrapper[4558]: I0120 18:05:02.520136 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-neutron-config-2\" (UniqueName: \"kubernetes.io/secret/936a9e47-9698-4bf5-ac34-023aa7b95132-nova-metadata-neutron-config-2\") pod \"neutron-metadata-edpm-compute-no-nodes-edpm-compute-no-nodssvgn\" (UID: \"936a9e47-9698-4bf5-ac34-023aa7b95132\") " pod="openstack-kuttl-tests/neutron-metadata-edpm-compute-no-nodes-edpm-compute-no-nodssvgn" Jan 20 18:05:02 crc kubenswrapper[4558]: I0120 18:05:02.520645 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/936a9e47-9698-4bf5-ac34-023aa7b95132-ssh-key-edpm-compute-no-nodes\") pod \"neutron-metadata-edpm-compute-no-nodes-edpm-compute-no-nodssvgn\" (UID: \"936a9e47-9698-4bf5-ac34-023aa7b95132\") " pod="openstack-kuttl-tests/neutron-metadata-edpm-compute-no-nodes-edpm-compute-no-nodssvgn" Jan 20 18:05:02 crc kubenswrapper[4558]: I0120 18:05:02.521071 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/936a9e47-9698-4bf5-ac34-023aa7b95132-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-compute-no-nodes-edpm-compute-no-nodssvgn\" (UID: \"936a9e47-9698-4bf5-ac34-023aa7b95132\") " pod="openstack-kuttl-tests/neutron-metadata-edpm-compute-no-nodes-edpm-compute-no-nodssvgn" Jan 20 18:05:02 crc kubenswrapper[4558]: I0120 18:05:02.521601 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/936a9e47-9698-4bf5-ac34-023aa7b95132-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-compute-no-nodes-edpm-compute-no-nodssvgn\" (UID: \"936a9e47-9698-4bf5-ac34-023aa7b95132\") " pod="openstack-kuttl-tests/neutron-metadata-edpm-compute-no-nodes-edpm-compute-no-nodssvgn" Jan 20 18:05:02 crc kubenswrapper[4558]: I0120 18:05:02.532094 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dcfnd\" (UniqueName: \"kubernetes.io/projected/936a9e47-9698-4bf5-ac34-023aa7b95132-kube-api-access-dcfnd\") pod \"neutron-metadata-edpm-compute-no-nodes-edpm-compute-no-nodssvgn\" (UID: \"936a9e47-9698-4bf5-ac34-023aa7b95132\") " pod="openstack-kuttl-tests/neutron-metadata-edpm-compute-no-nodes-edpm-compute-no-nodssvgn" Jan 20 18:05:02 crc kubenswrapper[4558]: I0120 18:05:02.704141 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-metadata-edpm-compute-no-nodes-edpm-compute-no-nodssvgn" Jan 20 18:05:03 crc kubenswrapper[4558]: I0120 18:05:03.421105 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-metadata-edpm-compute-no-nodes-edpm-compute-no-nodssvgn"] Jan 20 18:05:03 crc kubenswrapper[4558]: W0120 18:05:03.424488 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod936a9e47_9698_4bf5_ac34_023aa7b95132.slice/crio-2e6b056e7ac5b233b4d5d33fd3929d4d174dd129c3a23bf85ab4c07ed8d43ce6 WatchSource:0}: Error finding container 2e6b056e7ac5b233b4d5d33fd3929d4d174dd129c3a23bf85ab4c07ed8d43ce6: Status 404 returned error can't find the container with id 2e6b056e7ac5b233b4d5d33fd3929d4d174dd129c3a23bf85ab4c07ed8d43ce6 Jan 20 18:05:04 crc kubenswrapper[4558]: I0120 18:05:04.364027 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-metadata-edpm-compute-no-nodes-edpm-compute-no-nodssvgn" event={"ID":"936a9e47-9698-4bf5-ac34-023aa7b95132","Type":"ContainerStarted","Data":"69b6158d5b7b70a3e9028f7e9027602fde3cab51c171d50a19460a849f1bc9df"} Jan 20 18:05:04 crc kubenswrapper[4558]: I0120 18:05:04.365478 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-metadata-edpm-compute-no-nodes-edpm-compute-no-nodssvgn" event={"ID":"936a9e47-9698-4bf5-ac34-023aa7b95132","Type":"ContainerStarted","Data":"2e6b056e7ac5b233b4d5d33fd3929d4d174dd129c3a23bf85ab4c07ed8d43ce6"} Jan 20 18:05:04 crc kubenswrapper[4558]: I0120 18:05:04.384078 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/neutron-metadata-edpm-compute-no-nodes-edpm-compute-no-nodssvgn" podStartSLOduration=1.8646985379999999 podStartE2EDuration="2.38404927s" podCreationTimestamp="2026-01-20 18:05:02 +0000 UTC" firstStartedPulling="2026-01-20 18:05:03.427129542 +0000 UTC m=+4997.187467509" lastFinishedPulling="2026-01-20 18:05:03.946480264 +0000 UTC m=+4997.706818241" observedRunningTime="2026-01-20 18:05:04.382516107 +0000 UTC m=+4998.142854075" watchObservedRunningTime="2026-01-20 18:05:04.38404927 +0000 UTC m=+4998.144387237" Jan 20 18:05:05 crc kubenswrapper[4558]: I0120 18:05:05.374763 4558 generic.go:334] "Generic (PLEG): container finished" podID="936a9e47-9698-4bf5-ac34-023aa7b95132" containerID="69b6158d5b7b70a3e9028f7e9027602fde3cab51c171d50a19460a849f1bc9df" exitCode=0 Jan 20 18:05:05 crc kubenswrapper[4558]: I0120 18:05:05.374828 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-metadata-edpm-compute-no-nodes-edpm-compute-no-nodssvgn" event={"ID":"936a9e47-9698-4bf5-ac34-023aa7b95132","Type":"ContainerDied","Data":"69b6158d5b7b70a3e9028f7e9027602fde3cab51c171d50a19460a849f1bc9df"} Jan 20 18:05:06 crc kubenswrapper[4558]: I0120 18:05:06.652655 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-metadata-edpm-compute-no-nodes-edpm-compute-no-nodssvgn" Jan 20 18:05:06 crc kubenswrapper[4558]: I0120 18:05:06.780275 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/936a9e47-9698-4bf5-ac34-023aa7b95132-ssh-key-edpm-compute-no-nodes\") pod \"936a9e47-9698-4bf5-ac34-023aa7b95132\" (UID: \"936a9e47-9698-4bf5-ac34-023aa7b95132\") " Jan 20 18:05:06 crc kubenswrapper[4558]: I0120 18:05:06.780390 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/936a9e47-9698-4bf5-ac34-023aa7b95132-neutron-metadata-combined-ca-bundle\") pod \"936a9e47-9698-4bf5-ac34-023aa7b95132\" (UID: \"936a9e47-9698-4bf5-ac34-023aa7b95132\") " Jan 20 18:05:06 crc kubenswrapper[4558]: I0120 18:05:06.780461 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/936a9e47-9698-4bf5-ac34-023aa7b95132-nova-metadata-neutron-config-0\") pod \"936a9e47-9698-4bf5-ac34-023aa7b95132\" (UID: \"936a9e47-9698-4bf5-ac34-023aa7b95132\") " Jan 20 18:05:06 crc kubenswrapper[4558]: I0120 18:05:06.780491 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-neutron-config-2\" (UniqueName: \"kubernetes.io/secret/936a9e47-9698-4bf5-ac34-023aa7b95132-nova-metadata-neutron-config-2\") pod \"936a9e47-9698-4bf5-ac34-023aa7b95132\" (UID: \"936a9e47-9698-4bf5-ac34-023aa7b95132\") " Jan 20 18:05:06 crc kubenswrapper[4558]: I0120 18:05:06.780559 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/936a9e47-9698-4bf5-ac34-023aa7b95132-inventory\") pod \"936a9e47-9698-4bf5-ac34-023aa7b95132\" (UID: \"936a9e47-9698-4bf5-ac34-023aa7b95132\") " Jan 20 18:05:06 crc kubenswrapper[4558]: I0120 18:05:06.780600 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-neutron-config-1\" (UniqueName: \"kubernetes.io/secret/936a9e47-9698-4bf5-ac34-023aa7b95132-nova-metadata-neutron-config-1\") pod \"936a9e47-9698-4bf5-ac34-023aa7b95132\" (UID: \"936a9e47-9698-4bf5-ac34-023aa7b95132\") " Jan 20 18:05:06 crc kubenswrapper[4558]: I0120 18:05:06.780645 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dcfnd\" (UniqueName: \"kubernetes.io/projected/936a9e47-9698-4bf5-ac34-023aa7b95132-kube-api-access-dcfnd\") pod \"936a9e47-9698-4bf5-ac34-023aa7b95132\" (UID: \"936a9e47-9698-4bf5-ac34-023aa7b95132\") " Jan 20 18:05:06 crc kubenswrapper[4558]: I0120 18:05:06.780684 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/936a9e47-9698-4bf5-ac34-023aa7b95132-neutron-ovn-metadata-agent-neutron-config-0\") pod \"936a9e47-9698-4bf5-ac34-023aa7b95132\" (UID: \"936a9e47-9698-4bf5-ac34-023aa7b95132\") " Jan 20 18:05:06 crc kubenswrapper[4558]: I0120 18:05:06.788334 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/936a9e47-9698-4bf5-ac34-023aa7b95132-kube-api-access-dcfnd" (OuterVolumeSpecName: "kube-api-access-dcfnd") pod "936a9e47-9698-4bf5-ac34-023aa7b95132" (UID: "936a9e47-9698-4bf5-ac34-023aa7b95132"). InnerVolumeSpecName "kube-api-access-dcfnd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:05:06 crc kubenswrapper[4558]: I0120 18:05:06.789968 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/936a9e47-9698-4bf5-ac34-023aa7b95132-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "936a9e47-9698-4bf5-ac34-023aa7b95132" (UID: "936a9e47-9698-4bf5-ac34-023aa7b95132"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:05:06 crc kubenswrapper[4558]: I0120 18:05:06.803700 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/936a9e47-9698-4bf5-ac34-023aa7b95132-nova-metadata-neutron-config-1" (OuterVolumeSpecName: "nova-metadata-neutron-config-1") pod "936a9e47-9698-4bf5-ac34-023aa7b95132" (UID: "936a9e47-9698-4bf5-ac34-023aa7b95132"). InnerVolumeSpecName "nova-metadata-neutron-config-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:05:06 crc kubenswrapper[4558]: I0120 18:05:06.804402 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/936a9e47-9698-4bf5-ac34-023aa7b95132-nova-metadata-neutron-config-0" (OuterVolumeSpecName: "nova-metadata-neutron-config-0") pod "936a9e47-9698-4bf5-ac34-023aa7b95132" (UID: "936a9e47-9698-4bf5-ac34-023aa7b95132"). InnerVolumeSpecName "nova-metadata-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:05:06 crc kubenswrapper[4558]: I0120 18:05:06.806102 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/936a9e47-9698-4bf5-ac34-023aa7b95132-inventory" (OuterVolumeSpecName: "inventory") pod "936a9e47-9698-4bf5-ac34-023aa7b95132" (UID: "936a9e47-9698-4bf5-ac34-023aa7b95132"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:05:06 crc kubenswrapper[4558]: I0120 18:05:06.806715 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/936a9e47-9698-4bf5-ac34-023aa7b95132-nova-metadata-neutron-config-2" (OuterVolumeSpecName: "nova-metadata-neutron-config-2") pod "936a9e47-9698-4bf5-ac34-023aa7b95132" (UID: "936a9e47-9698-4bf5-ac34-023aa7b95132"). InnerVolumeSpecName "nova-metadata-neutron-config-2". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:05:06 crc kubenswrapper[4558]: I0120 18:05:06.806852 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/936a9e47-9698-4bf5-ac34-023aa7b95132-neutron-ovn-metadata-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-ovn-metadata-agent-neutron-config-0") pod "936a9e47-9698-4bf5-ac34-023aa7b95132" (UID: "936a9e47-9698-4bf5-ac34-023aa7b95132"). InnerVolumeSpecName "neutron-ovn-metadata-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:05:06 crc kubenswrapper[4558]: I0120 18:05:06.808567 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/936a9e47-9698-4bf5-ac34-023aa7b95132-ssh-key-edpm-compute-no-nodes" (OuterVolumeSpecName: "ssh-key-edpm-compute-no-nodes") pod "936a9e47-9698-4bf5-ac34-023aa7b95132" (UID: "936a9e47-9698-4bf5-ac34-023aa7b95132"). InnerVolumeSpecName "ssh-key-edpm-compute-no-nodes". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:05:06 crc kubenswrapper[4558]: I0120 18:05:06.883465 4558 reconciler_common.go:293] "Volume detached for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/936a9e47-9698-4bf5-ac34-023aa7b95132-ssh-key-edpm-compute-no-nodes\") on node \"crc\" DevicePath \"\"" Jan 20 18:05:06 crc kubenswrapper[4558]: I0120 18:05:06.883503 4558 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/936a9e47-9698-4bf5-ac34-023aa7b95132-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 18:05:06 crc kubenswrapper[4558]: I0120 18:05:06.883518 4558 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/936a9e47-9698-4bf5-ac34-023aa7b95132-nova-metadata-neutron-config-0\") on node \"crc\" DevicePath \"\"" Jan 20 18:05:06 crc kubenswrapper[4558]: I0120 18:05:06.883530 4558 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-neutron-config-2\" (UniqueName: \"kubernetes.io/secret/936a9e47-9698-4bf5-ac34-023aa7b95132-nova-metadata-neutron-config-2\") on node \"crc\" DevicePath \"\"" Jan 20 18:05:06 crc kubenswrapper[4558]: I0120 18:05:06.883541 4558 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/936a9e47-9698-4bf5-ac34-023aa7b95132-inventory\") on node \"crc\" DevicePath \"\"" Jan 20 18:05:06 crc kubenswrapper[4558]: I0120 18:05:06.883552 4558 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-neutron-config-1\" (UniqueName: \"kubernetes.io/secret/936a9e47-9698-4bf5-ac34-023aa7b95132-nova-metadata-neutron-config-1\") on node \"crc\" DevicePath \"\"" Jan 20 18:05:06 crc kubenswrapper[4558]: I0120 18:05:06.883566 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dcfnd\" (UniqueName: \"kubernetes.io/projected/936a9e47-9698-4bf5-ac34-023aa7b95132-kube-api-access-dcfnd\") on node \"crc\" DevicePath \"\"" Jan 20 18:05:06 crc kubenswrapper[4558]: I0120 18:05:06.883578 4558 reconciler_common.go:293] "Volume detached for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/936a9e47-9698-4bf5-ac34-023aa7b95132-neutron-ovn-metadata-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Jan 20 18:05:07 crc kubenswrapper[4558]: I0120 18:05:07.403908 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-metadata-edpm-compute-no-nodes-edpm-compute-no-nodssvgn" event={"ID":"936a9e47-9698-4bf5-ac34-023aa7b95132","Type":"ContainerDied","Data":"2e6b056e7ac5b233b4d5d33fd3929d4d174dd129c3a23bf85ab4c07ed8d43ce6"} Jan 20 18:05:07 crc kubenswrapper[4558]: I0120 18:05:07.403957 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2e6b056e7ac5b233b4d5d33fd3929d4d174dd129c3a23bf85ab4c07ed8d43ce6" Jan 20 18:05:07 crc kubenswrapper[4558]: I0120 18:05:07.403962 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-metadata-edpm-compute-no-nodes-edpm-compute-no-nodssvgn" Jan 20 18:05:07 crc kubenswrapper[4558]: I0120 18:05:07.725066 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/neutron-ovn-edpm-compute-no-nodes-edpm-compute-no-nodes-z2g5q"] Jan 20 18:05:07 crc kubenswrapper[4558]: E0120 18:05:07.725695 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="936a9e47-9698-4bf5-ac34-023aa7b95132" containerName="neutron-metadata-edpm-compute-no-nodes-edpm-compute-no-nodes" Jan 20 18:05:07 crc kubenswrapper[4558]: I0120 18:05:07.725710 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="936a9e47-9698-4bf5-ac34-023aa7b95132" containerName="neutron-metadata-edpm-compute-no-nodes-edpm-compute-no-nodes" Jan 20 18:05:07 crc kubenswrapper[4558]: I0120 18:05:07.725864 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="936a9e47-9698-4bf5-ac34-023aa7b95132" containerName="neutron-metadata-edpm-compute-no-nodes-edpm-compute-no-nodes" Jan 20 18:05:07 crc kubenswrapper[4558]: I0120 18:05:07.726382 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-ovn-edpm-compute-no-nodes-edpm-compute-no-nodes-z2g5q" Jan 20 18:05:07 crc kubenswrapper[4558]: I0120 18:05:07.727614 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"combined-ca-bundle" Jan 20 18:05:07 crc kubenswrapper[4558]: I0120 18:05:07.727945 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplanenodeset-edpm-compute-no-nodes" Jan 20 18:05:07 crc kubenswrapper[4558]: I0120 18:05:07.728184 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"edpm-compute-no-nodes-dockercfg-tljmj" Jan 20 18:05:07 crc kubenswrapper[4558]: I0120 18:05:07.728395 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplane-ansible-ssh-private-key-secret" Jan 20 18:05:07 crc kubenswrapper[4558]: I0120 18:05:07.728615 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"neutron-ovn-agent-neutron-config" Jan 20 18:05:07 crc kubenswrapper[4558]: I0120 18:05:07.729260 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-aee-default-env" Jan 20 18:05:07 crc kubenswrapper[4558]: I0120 18:05:07.736255 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-ovn-edpm-compute-no-nodes-edpm-compute-no-nodes-z2g5q"] Jan 20 18:05:07 crc kubenswrapper[4558]: I0120 18:05:07.903563 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e53d0e83-ebc6-4441-9c69-94b482e0f2b8-neutron-ovn-combined-ca-bundle\") pod \"neutron-ovn-edpm-compute-no-nodes-edpm-compute-no-nodes-z2g5q\" (UID: \"e53d0e83-ebc6-4441-9c69-94b482e0f2b8\") " pod="openstack-kuttl-tests/neutron-ovn-edpm-compute-no-nodes-edpm-compute-no-nodes-z2g5q" Jan 20 18:05:07 crc kubenswrapper[4558]: I0120 18:05:07.903616 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-ovn-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/e53d0e83-ebc6-4441-9c69-94b482e0f2b8-neutron-ovn-agent-neutron-config-0\") pod \"neutron-ovn-edpm-compute-no-nodes-edpm-compute-no-nodes-z2g5q\" (UID: \"e53d0e83-ebc6-4441-9c69-94b482e0f2b8\") " pod="openstack-kuttl-tests/neutron-ovn-edpm-compute-no-nodes-edpm-compute-no-nodes-z2g5q" Jan 20 18:05:07 crc kubenswrapper[4558]: I0120 18:05:07.903697 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kg4g9\" (UniqueName: \"kubernetes.io/projected/e53d0e83-ebc6-4441-9c69-94b482e0f2b8-kube-api-access-kg4g9\") pod \"neutron-ovn-edpm-compute-no-nodes-edpm-compute-no-nodes-z2g5q\" (UID: \"e53d0e83-ebc6-4441-9c69-94b482e0f2b8\") " pod="openstack-kuttl-tests/neutron-ovn-edpm-compute-no-nodes-edpm-compute-no-nodes-z2g5q" Jan 20 18:05:07 crc kubenswrapper[4558]: I0120 18:05:07.903759 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/e53d0e83-ebc6-4441-9c69-94b482e0f2b8-ssh-key-edpm-compute-no-nodes\") pod \"neutron-ovn-edpm-compute-no-nodes-edpm-compute-no-nodes-z2g5q\" (UID: \"e53d0e83-ebc6-4441-9c69-94b482e0f2b8\") " pod="openstack-kuttl-tests/neutron-ovn-edpm-compute-no-nodes-edpm-compute-no-nodes-z2g5q" Jan 20 18:05:07 crc kubenswrapper[4558]: I0120 18:05:07.903807 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e53d0e83-ebc6-4441-9c69-94b482e0f2b8-inventory\") pod \"neutron-ovn-edpm-compute-no-nodes-edpm-compute-no-nodes-z2g5q\" (UID: \"e53d0e83-ebc6-4441-9c69-94b482e0f2b8\") " pod="openstack-kuttl-tests/neutron-ovn-edpm-compute-no-nodes-edpm-compute-no-nodes-z2g5q" Jan 20 18:05:08 crc kubenswrapper[4558]: I0120 18:05:08.005087 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e53d0e83-ebc6-4441-9c69-94b482e0f2b8-neutron-ovn-combined-ca-bundle\") pod \"neutron-ovn-edpm-compute-no-nodes-edpm-compute-no-nodes-z2g5q\" (UID: \"e53d0e83-ebc6-4441-9c69-94b482e0f2b8\") " pod="openstack-kuttl-tests/neutron-ovn-edpm-compute-no-nodes-edpm-compute-no-nodes-z2g5q" Jan 20 18:05:08 crc kubenswrapper[4558]: I0120 18:05:08.005143 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-ovn-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/e53d0e83-ebc6-4441-9c69-94b482e0f2b8-neutron-ovn-agent-neutron-config-0\") pod \"neutron-ovn-edpm-compute-no-nodes-edpm-compute-no-nodes-z2g5q\" (UID: \"e53d0e83-ebc6-4441-9c69-94b482e0f2b8\") " pod="openstack-kuttl-tests/neutron-ovn-edpm-compute-no-nodes-edpm-compute-no-nodes-z2g5q" Jan 20 18:05:08 crc kubenswrapper[4558]: I0120 18:05:08.005233 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kg4g9\" (UniqueName: \"kubernetes.io/projected/e53d0e83-ebc6-4441-9c69-94b482e0f2b8-kube-api-access-kg4g9\") pod \"neutron-ovn-edpm-compute-no-nodes-edpm-compute-no-nodes-z2g5q\" (UID: \"e53d0e83-ebc6-4441-9c69-94b482e0f2b8\") " pod="openstack-kuttl-tests/neutron-ovn-edpm-compute-no-nodes-edpm-compute-no-nodes-z2g5q" Jan 20 18:05:08 crc kubenswrapper[4558]: I0120 18:05:08.005261 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/e53d0e83-ebc6-4441-9c69-94b482e0f2b8-ssh-key-edpm-compute-no-nodes\") pod \"neutron-ovn-edpm-compute-no-nodes-edpm-compute-no-nodes-z2g5q\" (UID: \"e53d0e83-ebc6-4441-9c69-94b482e0f2b8\") " pod="openstack-kuttl-tests/neutron-ovn-edpm-compute-no-nodes-edpm-compute-no-nodes-z2g5q" Jan 20 18:05:08 crc kubenswrapper[4558]: I0120 18:05:08.005293 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e53d0e83-ebc6-4441-9c69-94b482e0f2b8-inventory\") pod \"neutron-ovn-edpm-compute-no-nodes-edpm-compute-no-nodes-z2g5q\" (UID: \"e53d0e83-ebc6-4441-9c69-94b482e0f2b8\") " pod="openstack-kuttl-tests/neutron-ovn-edpm-compute-no-nodes-edpm-compute-no-nodes-z2g5q" Jan 20 18:05:08 crc kubenswrapper[4558]: I0120 18:05:08.010562 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-ovn-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/e53d0e83-ebc6-4441-9c69-94b482e0f2b8-neutron-ovn-agent-neutron-config-0\") pod \"neutron-ovn-edpm-compute-no-nodes-edpm-compute-no-nodes-z2g5q\" (UID: \"e53d0e83-ebc6-4441-9c69-94b482e0f2b8\") " pod="openstack-kuttl-tests/neutron-ovn-edpm-compute-no-nodes-edpm-compute-no-nodes-z2g5q" Jan 20 18:05:08 crc kubenswrapper[4558]: I0120 18:05:08.011120 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e53d0e83-ebc6-4441-9c69-94b482e0f2b8-neutron-ovn-combined-ca-bundle\") pod \"neutron-ovn-edpm-compute-no-nodes-edpm-compute-no-nodes-z2g5q\" (UID: \"e53d0e83-ebc6-4441-9c69-94b482e0f2b8\") " pod="openstack-kuttl-tests/neutron-ovn-edpm-compute-no-nodes-edpm-compute-no-nodes-z2g5q" Jan 20 18:05:08 crc kubenswrapper[4558]: I0120 18:05:08.011375 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/e53d0e83-ebc6-4441-9c69-94b482e0f2b8-ssh-key-edpm-compute-no-nodes\") pod \"neutron-ovn-edpm-compute-no-nodes-edpm-compute-no-nodes-z2g5q\" (UID: \"e53d0e83-ebc6-4441-9c69-94b482e0f2b8\") " pod="openstack-kuttl-tests/neutron-ovn-edpm-compute-no-nodes-edpm-compute-no-nodes-z2g5q" Jan 20 18:05:08 crc kubenswrapper[4558]: I0120 18:05:08.011653 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e53d0e83-ebc6-4441-9c69-94b482e0f2b8-inventory\") pod \"neutron-ovn-edpm-compute-no-nodes-edpm-compute-no-nodes-z2g5q\" (UID: \"e53d0e83-ebc6-4441-9c69-94b482e0f2b8\") " pod="openstack-kuttl-tests/neutron-ovn-edpm-compute-no-nodes-edpm-compute-no-nodes-z2g5q" Jan 20 18:05:08 crc kubenswrapper[4558]: I0120 18:05:08.020563 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kg4g9\" (UniqueName: \"kubernetes.io/projected/e53d0e83-ebc6-4441-9c69-94b482e0f2b8-kube-api-access-kg4g9\") pod \"neutron-ovn-edpm-compute-no-nodes-edpm-compute-no-nodes-z2g5q\" (UID: \"e53d0e83-ebc6-4441-9c69-94b482e0f2b8\") " pod="openstack-kuttl-tests/neutron-ovn-edpm-compute-no-nodes-edpm-compute-no-nodes-z2g5q" Jan 20 18:05:08 crc kubenswrapper[4558]: I0120 18:05:08.046517 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-ovn-edpm-compute-no-nodes-edpm-compute-no-nodes-z2g5q" Jan 20 18:05:08 crc kubenswrapper[4558]: I0120 18:05:08.439016 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-ovn-edpm-compute-no-nodes-edpm-compute-no-nodes-z2g5q"] Jan 20 18:05:08 crc kubenswrapper[4558]: W0120 18:05:08.440038 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode53d0e83_ebc6_4441_9c69_94b482e0f2b8.slice/crio-ddf98127fb6256d6d237634307b30b6341d03a3c3d96047084fa5945fbaa4a1b WatchSource:0}: Error finding container ddf98127fb6256d6d237634307b30b6341d03a3c3d96047084fa5945fbaa4a1b: Status 404 returned error can't find the container with id ddf98127fb6256d6d237634307b30b6341d03a3c3d96047084fa5945fbaa4a1b Jan 20 18:05:08 crc kubenswrapper[4558]: I0120 18:05:08.442221 4558 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 20 18:05:09 crc kubenswrapper[4558]: I0120 18:05:09.425414 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-ovn-edpm-compute-no-nodes-edpm-compute-no-nodes-z2g5q" event={"ID":"e53d0e83-ebc6-4441-9c69-94b482e0f2b8","Type":"ContainerStarted","Data":"264d5eb52b0f8b6b9eb56b3a0c431e49acbf761a45324cb457decc2d43d91e79"} Jan 20 18:05:09 crc kubenswrapper[4558]: I0120 18:05:09.425830 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-ovn-edpm-compute-no-nodes-edpm-compute-no-nodes-z2g5q" event={"ID":"e53d0e83-ebc6-4441-9c69-94b482e0f2b8","Type":"ContainerStarted","Data":"ddf98127fb6256d6d237634307b30b6341d03a3c3d96047084fa5945fbaa4a1b"} Jan 20 18:05:09 crc kubenswrapper[4558]: I0120 18:05:09.446885 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/neutron-ovn-edpm-compute-no-nodes-edpm-compute-no-nodes-z2g5q" podStartSLOduration=1.953850363 podStartE2EDuration="2.446859798s" podCreationTimestamp="2026-01-20 18:05:07 +0000 UTC" firstStartedPulling="2026-01-20 18:05:08.441973553 +0000 UTC m=+5002.202311520" lastFinishedPulling="2026-01-20 18:05:08.934982988 +0000 UTC m=+5002.695320955" observedRunningTime="2026-01-20 18:05:09.442968292 +0000 UTC m=+5003.203306258" watchObservedRunningTime="2026-01-20 18:05:09.446859798 +0000 UTC m=+5003.207197765" Jan 20 18:05:10 crc kubenswrapper[4558]: I0120 18:05:10.438382 4558 generic.go:334] "Generic (PLEG): container finished" podID="e53d0e83-ebc6-4441-9c69-94b482e0f2b8" containerID="264d5eb52b0f8b6b9eb56b3a0c431e49acbf761a45324cb457decc2d43d91e79" exitCode=0 Jan 20 18:05:10 crc kubenswrapper[4558]: I0120 18:05:10.438497 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-ovn-edpm-compute-no-nodes-edpm-compute-no-nodes-z2g5q" event={"ID":"e53d0e83-ebc6-4441-9c69-94b482e0f2b8","Type":"ContainerDied","Data":"264d5eb52b0f8b6b9eb56b3a0c431e49acbf761a45324cb457decc2d43d91e79"} Jan 20 18:05:11 crc kubenswrapper[4558]: I0120 18:05:11.662995 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-ovn-edpm-compute-no-nodes-edpm-compute-no-nodes-z2g5q" Jan 20 18:05:11 crc kubenswrapper[4558]: I0120 18:05:11.765953 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e53d0e83-ebc6-4441-9c69-94b482e0f2b8-neutron-ovn-combined-ca-bundle\") pod \"e53d0e83-ebc6-4441-9c69-94b482e0f2b8\" (UID: \"e53d0e83-ebc6-4441-9c69-94b482e0f2b8\") " Jan 20 18:05:11 crc kubenswrapper[4558]: I0120 18:05:11.766082 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kg4g9\" (UniqueName: \"kubernetes.io/projected/e53d0e83-ebc6-4441-9c69-94b482e0f2b8-kube-api-access-kg4g9\") pod \"e53d0e83-ebc6-4441-9c69-94b482e0f2b8\" (UID: \"e53d0e83-ebc6-4441-9c69-94b482e0f2b8\") " Jan 20 18:05:11 crc kubenswrapper[4558]: I0120 18:05:11.766137 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e53d0e83-ebc6-4441-9c69-94b482e0f2b8-inventory\") pod \"e53d0e83-ebc6-4441-9c69-94b482e0f2b8\" (UID: \"e53d0e83-ebc6-4441-9c69-94b482e0f2b8\") " Jan 20 18:05:11 crc kubenswrapper[4558]: I0120 18:05:11.766176 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/e53d0e83-ebc6-4441-9c69-94b482e0f2b8-ssh-key-edpm-compute-no-nodes\") pod \"e53d0e83-ebc6-4441-9c69-94b482e0f2b8\" (UID: \"e53d0e83-ebc6-4441-9c69-94b482e0f2b8\") " Jan 20 18:05:11 crc kubenswrapper[4558]: I0120 18:05:11.766207 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-ovn-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/e53d0e83-ebc6-4441-9c69-94b482e0f2b8-neutron-ovn-agent-neutron-config-0\") pod \"e53d0e83-ebc6-4441-9c69-94b482e0f2b8\" (UID: \"e53d0e83-ebc6-4441-9c69-94b482e0f2b8\") " Jan 20 18:05:11 crc kubenswrapper[4558]: I0120 18:05:11.771155 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e53d0e83-ebc6-4441-9c69-94b482e0f2b8-neutron-ovn-combined-ca-bundle" (OuterVolumeSpecName: "neutron-ovn-combined-ca-bundle") pod "e53d0e83-ebc6-4441-9c69-94b482e0f2b8" (UID: "e53d0e83-ebc6-4441-9c69-94b482e0f2b8"). InnerVolumeSpecName "neutron-ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:05:11 crc kubenswrapper[4558]: I0120 18:05:11.771351 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e53d0e83-ebc6-4441-9c69-94b482e0f2b8-kube-api-access-kg4g9" (OuterVolumeSpecName: "kube-api-access-kg4g9") pod "e53d0e83-ebc6-4441-9c69-94b482e0f2b8" (UID: "e53d0e83-ebc6-4441-9c69-94b482e0f2b8"). InnerVolumeSpecName "kube-api-access-kg4g9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:05:11 crc kubenswrapper[4558]: E0120 18:05:11.783457 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e53d0e83-ebc6-4441-9c69-94b482e0f2b8-inventory podName:e53d0e83-ebc6-4441-9c69-94b482e0f2b8 nodeName:}" failed. No retries permitted until 2026-01-20 18:05:12.283429101 +0000 UTC m=+5006.043767068 (durationBeforeRetry 500ms). Error: error cleaning subPath mounts for volume "inventory" (UniqueName: "kubernetes.io/secret/e53d0e83-ebc6-4441-9c69-94b482e0f2b8-inventory") pod "e53d0e83-ebc6-4441-9c69-94b482e0f2b8" (UID: "e53d0e83-ebc6-4441-9c69-94b482e0f2b8") : error deleting /var/lib/kubelet/pods/e53d0e83-ebc6-4441-9c69-94b482e0f2b8/volume-subpaths: remove /var/lib/kubelet/pods/e53d0e83-ebc6-4441-9c69-94b482e0f2b8/volume-subpaths: no such file or directory Jan 20 18:05:11 crc kubenswrapper[4558]: I0120 18:05:11.784097 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e53d0e83-ebc6-4441-9c69-94b482e0f2b8-neutron-ovn-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-ovn-agent-neutron-config-0") pod "e53d0e83-ebc6-4441-9c69-94b482e0f2b8" (UID: "e53d0e83-ebc6-4441-9c69-94b482e0f2b8"). InnerVolumeSpecName "neutron-ovn-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:05:11 crc kubenswrapper[4558]: I0120 18:05:11.786296 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e53d0e83-ebc6-4441-9c69-94b482e0f2b8-ssh-key-edpm-compute-no-nodes" (OuterVolumeSpecName: "ssh-key-edpm-compute-no-nodes") pod "e53d0e83-ebc6-4441-9c69-94b482e0f2b8" (UID: "e53d0e83-ebc6-4441-9c69-94b482e0f2b8"). InnerVolumeSpecName "ssh-key-edpm-compute-no-nodes". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:05:11 crc kubenswrapper[4558]: I0120 18:05:11.868729 4558 reconciler_common.go:293] "Volume detached for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/e53d0e83-ebc6-4441-9c69-94b482e0f2b8-ssh-key-edpm-compute-no-nodes\") on node \"crc\" DevicePath \"\"" Jan 20 18:05:11 crc kubenswrapper[4558]: I0120 18:05:11.868763 4558 reconciler_common.go:293] "Volume detached for volume \"neutron-ovn-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/e53d0e83-ebc6-4441-9c69-94b482e0f2b8-neutron-ovn-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Jan 20 18:05:11 crc kubenswrapper[4558]: I0120 18:05:11.868774 4558 reconciler_common.go:293] "Volume detached for volume \"neutron-ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e53d0e83-ebc6-4441-9c69-94b482e0f2b8-neutron-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 18:05:11 crc kubenswrapper[4558]: I0120 18:05:11.868786 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kg4g9\" (UniqueName: \"kubernetes.io/projected/e53d0e83-ebc6-4441-9c69-94b482e0f2b8-kube-api-access-kg4g9\") on node \"crc\" DevicePath \"\"" Jan 20 18:05:12 crc kubenswrapper[4558]: I0120 18:05:12.343463 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-c8rjz"] Jan 20 18:05:12 crc kubenswrapper[4558]: E0120 18:05:12.343850 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e53d0e83-ebc6-4441-9c69-94b482e0f2b8" containerName="neutron-ovn-edpm-compute-no-nodes-edpm-compute-no-nodes" Jan 20 18:05:12 crc kubenswrapper[4558]: I0120 18:05:12.343876 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="e53d0e83-ebc6-4441-9c69-94b482e0f2b8" containerName="neutron-ovn-edpm-compute-no-nodes-edpm-compute-no-nodes" Jan 20 18:05:12 crc kubenswrapper[4558]: I0120 18:05:12.344073 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="e53d0e83-ebc6-4441-9c69-94b482e0f2b8" containerName="neutron-ovn-edpm-compute-no-nodes-edpm-compute-no-nodes" Jan 20 18:05:12 crc kubenswrapper[4558]: I0120 18:05:12.345316 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-c8rjz" Jan 20 18:05:12 crc kubenswrapper[4558]: I0120 18:05:12.353263 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-c8rjz"] Jan 20 18:05:12 crc kubenswrapper[4558]: I0120 18:05:12.375500 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e53d0e83-ebc6-4441-9c69-94b482e0f2b8-inventory\") pod \"e53d0e83-ebc6-4441-9c69-94b482e0f2b8\" (UID: \"e53d0e83-ebc6-4441-9c69-94b482e0f2b8\") " Jan 20 18:05:12 crc kubenswrapper[4558]: I0120 18:05:12.375923 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pkgv9\" (UniqueName: \"kubernetes.io/projected/810b3c6b-1c22-49e0-aae7-16ac3637d8e8-kube-api-access-pkgv9\") pod \"community-operators-c8rjz\" (UID: \"810b3c6b-1c22-49e0-aae7-16ac3637d8e8\") " pod="openshift-marketplace/community-operators-c8rjz" Jan 20 18:05:12 crc kubenswrapper[4558]: I0120 18:05:12.376205 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/810b3c6b-1c22-49e0-aae7-16ac3637d8e8-utilities\") pod \"community-operators-c8rjz\" (UID: \"810b3c6b-1c22-49e0-aae7-16ac3637d8e8\") " pod="openshift-marketplace/community-operators-c8rjz" Jan 20 18:05:12 crc kubenswrapper[4558]: I0120 18:05:12.376484 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/810b3c6b-1c22-49e0-aae7-16ac3637d8e8-catalog-content\") pod \"community-operators-c8rjz\" (UID: \"810b3c6b-1c22-49e0-aae7-16ac3637d8e8\") " pod="openshift-marketplace/community-operators-c8rjz" Jan 20 18:05:12 crc kubenswrapper[4558]: I0120 18:05:12.378737 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e53d0e83-ebc6-4441-9c69-94b482e0f2b8-inventory" (OuterVolumeSpecName: "inventory") pod "e53d0e83-ebc6-4441-9c69-94b482e0f2b8" (UID: "e53d0e83-ebc6-4441-9c69-94b482e0f2b8"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:05:12 crc kubenswrapper[4558]: I0120 18:05:12.454282 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-ovn-edpm-compute-no-nodes-edpm-compute-no-nodes-z2g5q" event={"ID":"e53d0e83-ebc6-4441-9c69-94b482e0f2b8","Type":"ContainerDied","Data":"ddf98127fb6256d6d237634307b30b6341d03a3c3d96047084fa5945fbaa4a1b"} Jan 20 18:05:12 crc kubenswrapper[4558]: I0120 18:05:12.454317 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-ovn-edpm-compute-no-nodes-edpm-compute-no-nodes-z2g5q" Jan 20 18:05:12 crc kubenswrapper[4558]: I0120 18:05:12.454322 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ddf98127fb6256d6d237634307b30b6341d03a3c3d96047084fa5945fbaa4a1b" Jan 20 18:05:12 crc kubenswrapper[4558]: I0120 18:05:12.480452 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/810b3c6b-1c22-49e0-aae7-16ac3637d8e8-catalog-content\") pod \"community-operators-c8rjz\" (UID: \"810b3c6b-1c22-49e0-aae7-16ac3637d8e8\") " pod="openshift-marketplace/community-operators-c8rjz" Jan 20 18:05:12 crc kubenswrapper[4558]: I0120 18:05:12.480555 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pkgv9\" (UniqueName: \"kubernetes.io/projected/810b3c6b-1c22-49e0-aae7-16ac3637d8e8-kube-api-access-pkgv9\") pod \"community-operators-c8rjz\" (UID: \"810b3c6b-1c22-49e0-aae7-16ac3637d8e8\") " pod="openshift-marketplace/community-operators-c8rjz" Jan 20 18:05:12 crc kubenswrapper[4558]: I0120 18:05:12.480599 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/810b3c6b-1c22-49e0-aae7-16ac3637d8e8-utilities\") pod \"community-operators-c8rjz\" (UID: \"810b3c6b-1c22-49e0-aae7-16ac3637d8e8\") " pod="openshift-marketplace/community-operators-c8rjz" Jan 20 18:05:12 crc kubenswrapper[4558]: I0120 18:05:12.480670 4558 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e53d0e83-ebc6-4441-9c69-94b482e0f2b8-inventory\") on node \"crc\" DevicePath \"\"" Jan 20 18:05:12 crc kubenswrapper[4558]: I0120 18:05:12.480950 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/810b3c6b-1c22-49e0-aae7-16ac3637d8e8-catalog-content\") pod \"community-operators-c8rjz\" (UID: \"810b3c6b-1c22-49e0-aae7-16ac3637d8e8\") " pod="openshift-marketplace/community-operators-c8rjz" Jan 20 18:05:12 crc kubenswrapper[4558]: I0120 18:05:12.480972 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/810b3c6b-1c22-49e0-aae7-16ac3637d8e8-utilities\") pod \"community-operators-c8rjz\" (UID: \"810b3c6b-1c22-49e0-aae7-16ac3637d8e8\") " pod="openshift-marketplace/community-operators-c8rjz" Jan 20 18:05:12 crc kubenswrapper[4558]: I0120 18:05:12.498650 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pkgv9\" (UniqueName: \"kubernetes.io/projected/810b3c6b-1c22-49e0-aae7-16ac3637d8e8-kube-api-access-pkgv9\") pod \"community-operators-c8rjz\" (UID: \"810b3c6b-1c22-49e0-aae7-16ac3637d8e8\") " pod="openshift-marketplace/community-operators-c8rjz" Jan 20 18:05:12 crc kubenswrapper[4558]: I0120 18:05:12.511553 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/neutron-sriov-edpm-compute-no-nodes-edpm-compute-no-nodes-ftqwk"] Jan 20 18:05:12 crc kubenswrapper[4558]: I0120 18:05:12.512548 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-sriov-edpm-compute-no-nodes-edpm-compute-no-nodes-ftqwk" Jan 20 18:05:12 crc kubenswrapper[4558]: I0120 18:05:12.514933 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"neutron-sriov-agent-neutron-config" Jan 20 18:05:12 crc kubenswrapper[4558]: I0120 18:05:12.516388 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-aee-default-env" Jan 20 18:05:12 crc kubenswrapper[4558]: I0120 18:05:12.516423 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplanenodeset-edpm-compute-no-nodes" Jan 20 18:05:12 crc kubenswrapper[4558]: I0120 18:05:12.516399 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"combined-ca-bundle" Jan 20 18:05:12 crc kubenswrapper[4558]: I0120 18:05:12.516561 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"edpm-compute-no-nodes-dockercfg-tljmj" Jan 20 18:05:12 crc kubenswrapper[4558]: I0120 18:05:12.516821 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplane-ansible-ssh-private-key-secret" Jan 20 18:05:12 crc kubenswrapper[4558]: I0120 18:05:12.521368 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-sriov-edpm-compute-no-nodes-edpm-compute-no-nodes-ftqwk"] Jan 20 18:05:12 crc kubenswrapper[4558]: I0120 18:05:12.565680 4558 scope.go:117] "RemoveContainer" containerID="6b02d31ec11fe5435af988e35303a6f66b5afa5ef952ce5f7cc4b3cd5f4d9497" Jan 20 18:05:12 crc kubenswrapper[4558]: E0120 18:05:12.565898 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 18:05:12 crc kubenswrapper[4558]: I0120 18:05:12.581666 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-99kj5\" (UniqueName: \"kubernetes.io/projected/c6bad0e2-6a8e-499d-854b-18c87c491d0e-kube-api-access-99kj5\") pod \"neutron-sriov-edpm-compute-no-nodes-edpm-compute-no-nodes-ftqwk\" (UID: \"c6bad0e2-6a8e-499d-854b-18c87c491d0e\") " pod="openstack-kuttl-tests/neutron-sriov-edpm-compute-no-nodes-edpm-compute-no-nodes-ftqwk" Jan 20 18:05:12 crc kubenswrapper[4558]: I0120 18:05:12.581745 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-sriov-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/c6bad0e2-6a8e-499d-854b-18c87c491d0e-neutron-sriov-agent-neutron-config-0\") pod \"neutron-sriov-edpm-compute-no-nodes-edpm-compute-no-nodes-ftqwk\" (UID: \"c6bad0e2-6a8e-499d-854b-18c87c491d0e\") " pod="openstack-kuttl-tests/neutron-sriov-edpm-compute-no-nodes-edpm-compute-no-nodes-ftqwk" Jan 20 18:05:12 crc kubenswrapper[4558]: I0120 18:05:12.581797 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c6bad0e2-6a8e-499d-854b-18c87c491d0e-inventory\") pod \"neutron-sriov-edpm-compute-no-nodes-edpm-compute-no-nodes-ftqwk\" (UID: \"c6bad0e2-6a8e-499d-854b-18c87c491d0e\") " pod="openstack-kuttl-tests/neutron-sriov-edpm-compute-no-nodes-edpm-compute-no-nodes-ftqwk" Jan 20 18:05:12 crc kubenswrapper[4558]: I0120 18:05:12.581852 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/c6bad0e2-6a8e-499d-854b-18c87c491d0e-ssh-key-edpm-compute-no-nodes\") pod \"neutron-sriov-edpm-compute-no-nodes-edpm-compute-no-nodes-ftqwk\" (UID: \"c6bad0e2-6a8e-499d-854b-18c87c491d0e\") " pod="openstack-kuttl-tests/neutron-sriov-edpm-compute-no-nodes-edpm-compute-no-nodes-ftqwk" Jan 20 18:05:12 crc kubenswrapper[4558]: I0120 18:05:12.581894 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c6bad0e2-6a8e-499d-854b-18c87c491d0e-neutron-sriov-combined-ca-bundle\") pod \"neutron-sriov-edpm-compute-no-nodes-edpm-compute-no-nodes-ftqwk\" (UID: \"c6bad0e2-6a8e-499d-854b-18c87c491d0e\") " pod="openstack-kuttl-tests/neutron-sriov-edpm-compute-no-nodes-edpm-compute-no-nodes-ftqwk" Jan 20 18:05:12 crc kubenswrapper[4558]: I0120 18:05:12.665491 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-c8rjz" Jan 20 18:05:12 crc kubenswrapper[4558]: I0120 18:05:12.683438 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-99kj5\" (UniqueName: \"kubernetes.io/projected/c6bad0e2-6a8e-499d-854b-18c87c491d0e-kube-api-access-99kj5\") pod \"neutron-sriov-edpm-compute-no-nodes-edpm-compute-no-nodes-ftqwk\" (UID: \"c6bad0e2-6a8e-499d-854b-18c87c491d0e\") " pod="openstack-kuttl-tests/neutron-sriov-edpm-compute-no-nodes-edpm-compute-no-nodes-ftqwk" Jan 20 18:05:12 crc kubenswrapper[4558]: I0120 18:05:12.683496 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-sriov-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/c6bad0e2-6a8e-499d-854b-18c87c491d0e-neutron-sriov-agent-neutron-config-0\") pod \"neutron-sriov-edpm-compute-no-nodes-edpm-compute-no-nodes-ftqwk\" (UID: \"c6bad0e2-6a8e-499d-854b-18c87c491d0e\") " pod="openstack-kuttl-tests/neutron-sriov-edpm-compute-no-nodes-edpm-compute-no-nodes-ftqwk" Jan 20 18:05:12 crc kubenswrapper[4558]: I0120 18:05:12.683524 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c6bad0e2-6a8e-499d-854b-18c87c491d0e-inventory\") pod \"neutron-sriov-edpm-compute-no-nodes-edpm-compute-no-nodes-ftqwk\" (UID: \"c6bad0e2-6a8e-499d-854b-18c87c491d0e\") " pod="openstack-kuttl-tests/neutron-sriov-edpm-compute-no-nodes-edpm-compute-no-nodes-ftqwk" Jan 20 18:05:12 crc kubenswrapper[4558]: I0120 18:05:12.683569 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/c6bad0e2-6a8e-499d-854b-18c87c491d0e-ssh-key-edpm-compute-no-nodes\") pod \"neutron-sriov-edpm-compute-no-nodes-edpm-compute-no-nodes-ftqwk\" (UID: \"c6bad0e2-6a8e-499d-854b-18c87c491d0e\") " pod="openstack-kuttl-tests/neutron-sriov-edpm-compute-no-nodes-edpm-compute-no-nodes-ftqwk" Jan 20 18:05:12 crc kubenswrapper[4558]: I0120 18:05:12.683594 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c6bad0e2-6a8e-499d-854b-18c87c491d0e-neutron-sriov-combined-ca-bundle\") pod \"neutron-sriov-edpm-compute-no-nodes-edpm-compute-no-nodes-ftqwk\" (UID: \"c6bad0e2-6a8e-499d-854b-18c87c491d0e\") " pod="openstack-kuttl-tests/neutron-sriov-edpm-compute-no-nodes-edpm-compute-no-nodes-ftqwk" Jan 20 18:05:12 crc kubenswrapper[4558]: I0120 18:05:12.686819 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c6bad0e2-6a8e-499d-854b-18c87c491d0e-neutron-sriov-combined-ca-bundle\") pod \"neutron-sriov-edpm-compute-no-nodes-edpm-compute-no-nodes-ftqwk\" (UID: \"c6bad0e2-6a8e-499d-854b-18c87c491d0e\") " pod="openstack-kuttl-tests/neutron-sriov-edpm-compute-no-nodes-edpm-compute-no-nodes-ftqwk" Jan 20 18:05:12 crc kubenswrapper[4558]: I0120 18:05:12.687103 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/c6bad0e2-6a8e-499d-854b-18c87c491d0e-ssh-key-edpm-compute-no-nodes\") pod \"neutron-sriov-edpm-compute-no-nodes-edpm-compute-no-nodes-ftqwk\" (UID: \"c6bad0e2-6a8e-499d-854b-18c87c491d0e\") " pod="openstack-kuttl-tests/neutron-sriov-edpm-compute-no-nodes-edpm-compute-no-nodes-ftqwk" Jan 20 18:05:12 crc kubenswrapper[4558]: I0120 18:05:12.690292 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c6bad0e2-6a8e-499d-854b-18c87c491d0e-inventory\") pod \"neutron-sriov-edpm-compute-no-nodes-edpm-compute-no-nodes-ftqwk\" (UID: \"c6bad0e2-6a8e-499d-854b-18c87c491d0e\") " pod="openstack-kuttl-tests/neutron-sriov-edpm-compute-no-nodes-edpm-compute-no-nodes-ftqwk" Jan 20 18:05:12 crc kubenswrapper[4558]: I0120 18:05:12.690842 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-sriov-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/c6bad0e2-6a8e-499d-854b-18c87c491d0e-neutron-sriov-agent-neutron-config-0\") pod \"neutron-sriov-edpm-compute-no-nodes-edpm-compute-no-nodes-ftqwk\" (UID: \"c6bad0e2-6a8e-499d-854b-18c87c491d0e\") " pod="openstack-kuttl-tests/neutron-sriov-edpm-compute-no-nodes-edpm-compute-no-nodes-ftqwk" Jan 20 18:05:12 crc kubenswrapper[4558]: I0120 18:05:12.700439 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-99kj5\" (UniqueName: \"kubernetes.io/projected/c6bad0e2-6a8e-499d-854b-18c87c491d0e-kube-api-access-99kj5\") pod \"neutron-sriov-edpm-compute-no-nodes-edpm-compute-no-nodes-ftqwk\" (UID: \"c6bad0e2-6a8e-499d-854b-18c87c491d0e\") " pod="openstack-kuttl-tests/neutron-sriov-edpm-compute-no-nodes-edpm-compute-no-nodes-ftqwk" Jan 20 18:05:12 crc kubenswrapper[4558]: I0120 18:05:12.825832 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-sriov-edpm-compute-no-nodes-edpm-compute-no-nodes-ftqwk" Jan 20 18:05:13 crc kubenswrapper[4558]: I0120 18:05:13.108384 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-c8rjz"] Jan 20 18:05:13 crc kubenswrapper[4558]: W0120 18:05:13.113840 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod810b3c6b_1c22_49e0_aae7_16ac3637d8e8.slice/crio-dfec627261b37f90e48dbfecb3be10bdf73bafdf778bd3891ada6190fc7ea086 WatchSource:0}: Error finding container dfec627261b37f90e48dbfecb3be10bdf73bafdf778bd3891ada6190fc7ea086: Status 404 returned error can't find the container with id dfec627261b37f90e48dbfecb3be10bdf73bafdf778bd3891ada6190fc7ea086 Jan 20 18:05:13 crc kubenswrapper[4558]: I0120 18:05:13.243077 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-sriov-edpm-compute-no-nodes-edpm-compute-no-nodes-ftqwk"] Jan 20 18:05:13 crc kubenswrapper[4558]: W0120 18:05:13.244617 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc6bad0e2_6a8e_499d_854b_18c87c491d0e.slice/crio-b834981d75d4687c964c9f29783eb06714f8307c8ae1c0e134632164899c09e2 WatchSource:0}: Error finding container b834981d75d4687c964c9f29783eb06714f8307c8ae1c0e134632164899c09e2: Status 404 returned error can't find the container with id b834981d75d4687c964c9f29783eb06714f8307c8ae1c0e134632164899c09e2 Jan 20 18:05:13 crc kubenswrapper[4558]: I0120 18:05:13.462642 4558 generic.go:334] "Generic (PLEG): container finished" podID="810b3c6b-1c22-49e0-aae7-16ac3637d8e8" containerID="d711c81e7e19fcd3948d666572ed43b4ad490ecc127a3dd6a3e199b409206a27" exitCode=0 Jan 20 18:05:13 crc kubenswrapper[4558]: I0120 18:05:13.462719 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-c8rjz" event={"ID":"810b3c6b-1c22-49e0-aae7-16ac3637d8e8","Type":"ContainerDied","Data":"d711c81e7e19fcd3948d666572ed43b4ad490ecc127a3dd6a3e199b409206a27"} Jan 20 18:05:13 crc kubenswrapper[4558]: I0120 18:05:13.462754 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-c8rjz" event={"ID":"810b3c6b-1c22-49e0-aae7-16ac3637d8e8","Type":"ContainerStarted","Data":"dfec627261b37f90e48dbfecb3be10bdf73bafdf778bd3891ada6190fc7ea086"} Jan 20 18:05:13 crc kubenswrapper[4558]: I0120 18:05:13.464533 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-sriov-edpm-compute-no-nodes-edpm-compute-no-nodes-ftqwk" event={"ID":"c6bad0e2-6a8e-499d-854b-18c87c491d0e","Type":"ContainerStarted","Data":"b834981d75d4687c964c9f29783eb06714f8307c8ae1c0e134632164899c09e2"} Jan 20 18:05:14 crc kubenswrapper[4558]: I0120 18:05:14.474969 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-sriov-edpm-compute-no-nodes-edpm-compute-no-nodes-ftqwk" event={"ID":"c6bad0e2-6a8e-499d-854b-18c87c491d0e","Type":"ContainerStarted","Data":"e1a51d79bc486e13aed05e565704ae5ce3ed66310a43a712dce635529f2dc42a"} Jan 20 18:05:14 crc kubenswrapper[4558]: I0120 18:05:14.499127 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/neutron-sriov-edpm-compute-no-nodes-edpm-compute-no-nodes-ftqwk" podStartSLOduration=2.040757297 podStartE2EDuration="2.499102206s" podCreationTimestamp="2026-01-20 18:05:12 +0000 UTC" firstStartedPulling="2026-01-20 18:05:13.247657472 +0000 UTC m=+5007.007995439" lastFinishedPulling="2026-01-20 18:05:13.70600239 +0000 UTC m=+5007.466340348" observedRunningTime="2026-01-20 18:05:14.494369707 +0000 UTC m=+5008.254707674" watchObservedRunningTime="2026-01-20 18:05:14.499102206 +0000 UTC m=+5008.259440163" Jan 20 18:05:15 crc kubenswrapper[4558]: I0120 18:05:15.487920 4558 generic.go:334] "Generic (PLEG): container finished" podID="810b3c6b-1c22-49e0-aae7-16ac3637d8e8" containerID="385f728b08bc92abf09cc4a254cca6041a40e22f48c00bd7ed4771d63fe7a9c8" exitCode=0 Jan 20 18:05:15 crc kubenswrapper[4558]: I0120 18:05:15.488014 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-c8rjz" event={"ID":"810b3c6b-1c22-49e0-aae7-16ac3637d8e8","Type":"ContainerDied","Data":"385f728b08bc92abf09cc4a254cca6041a40e22f48c00bd7ed4771d63fe7a9c8"} Jan 20 18:05:15 crc kubenswrapper[4558]: I0120 18:05:15.490513 4558 generic.go:334] "Generic (PLEG): container finished" podID="c6bad0e2-6a8e-499d-854b-18c87c491d0e" containerID="e1a51d79bc486e13aed05e565704ae5ce3ed66310a43a712dce635529f2dc42a" exitCode=0 Jan 20 18:05:15 crc kubenswrapper[4558]: I0120 18:05:15.490573 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-sriov-edpm-compute-no-nodes-edpm-compute-no-nodes-ftqwk" event={"ID":"c6bad0e2-6a8e-499d-854b-18c87c491d0e","Type":"ContainerDied","Data":"e1a51d79bc486e13aed05e565704ae5ce3ed66310a43a712dce635529f2dc42a"} Jan 20 18:05:16 crc kubenswrapper[4558]: I0120 18:05:16.503735 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-c8rjz" event={"ID":"810b3c6b-1c22-49e0-aae7-16ac3637d8e8","Type":"ContainerStarted","Data":"caa8694faaf76eabfd0ede479f32868edd61c9728f76783ad3cc9b252dc70bd2"} Jan 20 18:05:16 crc kubenswrapper[4558]: I0120 18:05:16.522039 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-c8rjz" podStartSLOduration=2.037470548 podStartE2EDuration="4.522017999s" podCreationTimestamp="2026-01-20 18:05:12 +0000 UTC" firstStartedPulling="2026-01-20 18:05:13.463929956 +0000 UTC m=+5007.224267914" lastFinishedPulling="2026-01-20 18:05:15.948477398 +0000 UTC m=+5009.708815365" observedRunningTime="2026-01-20 18:05:16.520674001 +0000 UTC m=+5010.281011969" watchObservedRunningTime="2026-01-20 18:05:16.522017999 +0000 UTC m=+5010.282355966" Jan 20 18:05:16 crc kubenswrapper[4558]: I0120 18:05:16.742883 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-sriov-edpm-compute-no-nodes-edpm-compute-no-nodes-ftqwk" Jan 20 18:05:16 crc kubenswrapper[4558]: I0120 18:05:16.858700 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c6bad0e2-6a8e-499d-854b-18c87c491d0e-inventory\") pod \"c6bad0e2-6a8e-499d-854b-18c87c491d0e\" (UID: \"c6bad0e2-6a8e-499d-854b-18c87c491d0e\") " Jan 20 18:05:16 crc kubenswrapper[4558]: I0120 18:05:16.858769 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-sriov-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/c6bad0e2-6a8e-499d-854b-18c87c491d0e-neutron-sriov-agent-neutron-config-0\") pod \"c6bad0e2-6a8e-499d-854b-18c87c491d0e\" (UID: \"c6bad0e2-6a8e-499d-854b-18c87c491d0e\") " Jan 20 18:05:16 crc kubenswrapper[4558]: I0120 18:05:16.858940 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-99kj5\" (UniqueName: \"kubernetes.io/projected/c6bad0e2-6a8e-499d-854b-18c87c491d0e-kube-api-access-99kj5\") pod \"c6bad0e2-6a8e-499d-854b-18c87c491d0e\" (UID: \"c6bad0e2-6a8e-499d-854b-18c87c491d0e\") " Jan 20 18:05:16 crc kubenswrapper[4558]: I0120 18:05:16.859049 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/c6bad0e2-6a8e-499d-854b-18c87c491d0e-ssh-key-edpm-compute-no-nodes\") pod \"c6bad0e2-6a8e-499d-854b-18c87c491d0e\" (UID: \"c6bad0e2-6a8e-499d-854b-18c87c491d0e\") " Jan 20 18:05:16 crc kubenswrapper[4558]: I0120 18:05:16.859075 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c6bad0e2-6a8e-499d-854b-18c87c491d0e-neutron-sriov-combined-ca-bundle\") pod \"c6bad0e2-6a8e-499d-854b-18c87c491d0e\" (UID: \"c6bad0e2-6a8e-499d-854b-18c87c491d0e\") " Jan 20 18:05:16 crc kubenswrapper[4558]: I0120 18:05:16.866002 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c6bad0e2-6a8e-499d-854b-18c87c491d0e-neutron-sriov-combined-ca-bundle" (OuterVolumeSpecName: "neutron-sriov-combined-ca-bundle") pod "c6bad0e2-6a8e-499d-854b-18c87c491d0e" (UID: "c6bad0e2-6a8e-499d-854b-18c87c491d0e"). InnerVolumeSpecName "neutron-sriov-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:05:16 crc kubenswrapper[4558]: I0120 18:05:16.866029 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c6bad0e2-6a8e-499d-854b-18c87c491d0e-kube-api-access-99kj5" (OuterVolumeSpecName: "kube-api-access-99kj5") pod "c6bad0e2-6a8e-499d-854b-18c87c491d0e" (UID: "c6bad0e2-6a8e-499d-854b-18c87c491d0e"). InnerVolumeSpecName "kube-api-access-99kj5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:05:16 crc kubenswrapper[4558]: I0120 18:05:16.878952 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c6bad0e2-6a8e-499d-854b-18c87c491d0e-ssh-key-edpm-compute-no-nodes" (OuterVolumeSpecName: "ssh-key-edpm-compute-no-nodes") pod "c6bad0e2-6a8e-499d-854b-18c87c491d0e" (UID: "c6bad0e2-6a8e-499d-854b-18c87c491d0e"). InnerVolumeSpecName "ssh-key-edpm-compute-no-nodes". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:05:16 crc kubenswrapper[4558]: I0120 18:05:16.878996 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c6bad0e2-6a8e-499d-854b-18c87c491d0e-neutron-sriov-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-sriov-agent-neutron-config-0") pod "c6bad0e2-6a8e-499d-854b-18c87c491d0e" (UID: "c6bad0e2-6a8e-499d-854b-18c87c491d0e"). InnerVolumeSpecName "neutron-sriov-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:05:16 crc kubenswrapper[4558]: I0120 18:05:16.879285 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c6bad0e2-6a8e-499d-854b-18c87c491d0e-inventory" (OuterVolumeSpecName: "inventory") pod "c6bad0e2-6a8e-499d-854b-18c87c491d0e" (UID: "c6bad0e2-6a8e-499d-854b-18c87c491d0e"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:05:16 crc kubenswrapper[4558]: I0120 18:05:16.961188 4558 reconciler_common.go:293] "Volume detached for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/c6bad0e2-6a8e-499d-854b-18c87c491d0e-ssh-key-edpm-compute-no-nodes\") on node \"crc\" DevicePath \"\"" Jan 20 18:05:16 crc kubenswrapper[4558]: I0120 18:05:16.961221 4558 reconciler_common.go:293] "Volume detached for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c6bad0e2-6a8e-499d-854b-18c87c491d0e-neutron-sriov-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 18:05:16 crc kubenswrapper[4558]: I0120 18:05:16.961235 4558 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c6bad0e2-6a8e-499d-854b-18c87c491d0e-inventory\") on node \"crc\" DevicePath \"\"" Jan 20 18:05:16 crc kubenswrapper[4558]: I0120 18:05:16.961248 4558 reconciler_common.go:293] "Volume detached for volume \"neutron-sriov-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/c6bad0e2-6a8e-499d-854b-18c87c491d0e-neutron-sriov-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Jan 20 18:05:16 crc kubenswrapper[4558]: I0120 18:05:16.961260 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-99kj5\" (UniqueName: \"kubernetes.io/projected/c6bad0e2-6a8e-499d-854b-18c87c491d0e-kube-api-access-99kj5\") on node \"crc\" DevicePath \"\"" Jan 20 18:05:17 crc kubenswrapper[4558]: I0120 18:05:17.514271 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-sriov-edpm-compute-no-nodes-edpm-compute-no-nodes-ftqwk" event={"ID":"c6bad0e2-6a8e-499d-854b-18c87c491d0e","Type":"ContainerDied","Data":"b834981d75d4687c964c9f29783eb06714f8307c8ae1c0e134632164899c09e2"} Jan 20 18:05:17 crc kubenswrapper[4558]: I0120 18:05:17.514333 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b834981d75d4687c964c9f29783eb06714f8307c8ae1c0e134632164899c09e2" Jan 20 18:05:17 crc kubenswrapper[4558]: I0120 18:05:17.514334 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-sriov-edpm-compute-no-nodes-edpm-compute-no-nodes-ftqwk" Jan 20 18:05:17 crc kubenswrapper[4558]: I0120 18:05:17.554597 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/neutron-dhcp-edpm-compute-no-nodes-edpm-compute-no-nodes-s2hp4"] Jan 20 18:05:17 crc kubenswrapper[4558]: E0120 18:05:17.555237 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c6bad0e2-6a8e-499d-854b-18c87c491d0e" containerName="neutron-sriov-edpm-compute-no-nodes-edpm-compute-no-nodes" Jan 20 18:05:17 crc kubenswrapper[4558]: I0120 18:05:17.555256 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="c6bad0e2-6a8e-499d-854b-18c87c491d0e" containerName="neutron-sriov-edpm-compute-no-nodes-edpm-compute-no-nodes" Jan 20 18:05:17 crc kubenswrapper[4558]: I0120 18:05:17.555405 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="c6bad0e2-6a8e-499d-854b-18c87c491d0e" containerName="neutron-sriov-edpm-compute-no-nodes-edpm-compute-no-nodes" Jan 20 18:05:17 crc kubenswrapper[4558]: I0120 18:05:17.555866 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-dhcp-edpm-compute-no-nodes-edpm-compute-no-nodes-s2hp4" Jan 20 18:05:17 crc kubenswrapper[4558]: I0120 18:05:17.559062 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"neutron-dhcp-agent-neutron-config" Jan 20 18:05:17 crc kubenswrapper[4558]: I0120 18:05:17.559742 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-aee-default-env" Jan 20 18:05:17 crc kubenswrapper[4558]: I0120 18:05:17.559895 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"edpm-compute-no-nodes-dockercfg-tljmj" Jan 20 18:05:17 crc kubenswrapper[4558]: I0120 18:05:17.560344 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplanenodeset-edpm-compute-no-nodes" Jan 20 18:05:17 crc kubenswrapper[4558]: I0120 18:05:17.561000 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplane-ansible-ssh-private-key-secret" Jan 20 18:05:17 crc kubenswrapper[4558]: I0120 18:05:17.561753 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"combined-ca-bundle" Jan 20 18:05:17 crc kubenswrapper[4558]: I0120 18:05:17.566709 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-dhcp-edpm-compute-no-nodes-edpm-compute-no-nodes-s2hp4"] Jan 20 18:05:17 crc kubenswrapper[4558]: I0120 18:05:17.670527 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21734838-c50a-446f-bece-4a50f5a897d4-neutron-dhcp-combined-ca-bundle\") pod \"neutron-dhcp-edpm-compute-no-nodes-edpm-compute-no-nodes-s2hp4\" (UID: \"21734838-c50a-446f-bece-4a50f5a897d4\") " pod="openstack-kuttl-tests/neutron-dhcp-edpm-compute-no-nodes-edpm-compute-no-nodes-s2hp4" Jan 20 18:05:17 crc kubenswrapper[4558]: I0120 18:05:17.670588 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/21734838-c50a-446f-bece-4a50f5a897d4-ssh-key-edpm-compute-no-nodes\") pod \"neutron-dhcp-edpm-compute-no-nodes-edpm-compute-no-nodes-s2hp4\" (UID: \"21734838-c50a-446f-bece-4a50f5a897d4\") " pod="openstack-kuttl-tests/neutron-dhcp-edpm-compute-no-nodes-edpm-compute-no-nodes-s2hp4" Jan 20 18:05:17 crc kubenswrapper[4558]: I0120 18:05:17.671371 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/21734838-c50a-446f-bece-4a50f5a897d4-inventory\") pod \"neutron-dhcp-edpm-compute-no-nodes-edpm-compute-no-nodes-s2hp4\" (UID: \"21734838-c50a-446f-bece-4a50f5a897d4\") " pod="openstack-kuttl-tests/neutron-dhcp-edpm-compute-no-nodes-edpm-compute-no-nodes-s2hp4" Jan 20 18:05:17 crc kubenswrapper[4558]: I0120 18:05:17.671471 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-dhcp-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/21734838-c50a-446f-bece-4a50f5a897d4-neutron-dhcp-agent-neutron-config-0\") pod \"neutron-dhcp-edpm-compute-no-nodes-edpm-compute-no-nodes-s2hp4\" (UID: \"21734838-c50a-446f-bece-4a50f5a897d4\") " pod="openstack-kuttl-tests/neutron-dhcp-edpm-compute-no-nodes-edpm-compute-no-nodes-s2hp4" Jan 20 18:05:17 crc kubenswrapper[4558]: I0120 18:05:17.671507 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jfqg4\" (UniqueName: \"kubernetes.io/projected/21734838-c50a-446f-bece-4a50f5a897d4-kube-api-access-jfqg4\") pod \"neutron-dhcp-edpm-compute-no-nodes-edpm-compute-no-nodes-s2hp4\" (UID: \"21734838-c50a-446f-bece-4a50f5a897d4\") " pod="openstack-kuttl-tests/neutron-dhcp-edpm-compute-no-nodes-edpm-compute-no-nodes-s2hp4" Jan 20 18:05:17 crc kubenswrapper[4558]: I0120 18:05:17.773801 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21734838-c50a-446f-bece-4a50f5a897d4-neutron-dhcp-combined-ca-bundle\") pod \"neutron-dhcp-edpm-compute-no-nodes-edpm-compute-no-nodes-s2hp4\" (UID: \"21734838-c50a-446f-bece-4a50f5a897d4\") " pod="openstack-kuttl-tests/neutron-dhcp-edpm-compute-no-nodes-edpm-compute-no-nodes-s2hp4" Jan 20 18:05:17 crc kubenswrapper[4558]: I0120 18:05:17.773956 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/21734838-c50a-446f-bece-4a50f5a897d4-ssh-key-edpm-compute-no-nodes\") pod \"neutron-dhcp-edpm-compute-no-nodes-edpm-compute-no-nodes-s2hp4\" (UID: \"21734838-c50a-446f-bece-4a50f5a897d4\") " pod="openstack-kuttl-tests/neutron-dhcp-edpm-compute-no-nodes-edpm-compute-no-nodes-s2hp4" Jan 20 18:05:17 crc kubenswrapper[4558]: I0120 18:05:17.774014 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/21734838-c50a-446f-bece-4a50f5a897d4-inventory\") pod \"neutron-dhcp-edpm-compute-no-nodes-edpm-compute-no-nodes-s2hp4\" (UID: \"21734838-c50a-446f-bece-4a50f5a897d4\") " pod="openstack-kuttl-tests/neutron-dhcp-edpm-compute-no-nodes-edpm-compute-no-nodes-s2hp4" Jan 20 18:05:17 crc kubenswrapper[4558]: I0120 18:05:17.774261 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-dhcp-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/21734838-c50a-446f-bece-4a50f5a897d4-neutron-dhcp-agent-neutron-config-0\") pod \"neutron-dhcp-edpm-compute-no-nodes-edpm-compute-no-nodes-s2hp4\" (UID: \"21734838-c50a-446f-bece-4a50f5a897d4\") " pod="openstack-kuttl-tests/neutron-dhcp-edpm-compute-no-nodes-edpm-compute-no-nodes-s2hp4" Jan 20 18:05:17 crc kubenswrapper[4558]: I0120 18:05:17.774302 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jfqg4\" (UniqueName: \"kubernetes.io/projected/21734838-c50a-446f-bece-4a50f5a897d4-kube-api-access-jfqg4\") pod \"neutron-dhcp-edpm-compute-no-nodes-edpm-compute-no-nodes-s2hp4\" (UID: \"21734838-c50a-446f-bece-4a50f5a897d4\") " pod="openstack-kuttl-tests/neutron-dhcp-edpm-compute-no-nodes-edpm-compute-no-nodes-s2hp4" Jan 20 18:05:17 crc kubenswrapper[4558]: I0120 18:05:17.780822 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-dhcp-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/21734838-c50a-446f-bece-4a50f5a897d4-neutron-dhcp-agent-neutron-config-0\") pod \"neutron-dhcp-edpm-compute-no-nodes-edpm-compute-no-nodes-s2hp4\" (UID: \"21734838-c50a-446f-bece-4a50f5a897d4\") " pod="openstack-kuttl-tests/neutron-dhcp-edpm-compute-no-nodes-edpm-compute-no-nodes-s2hp4" Jan 20 18:05:17 crc kubenswrapper[4558]: I0120 18:05:17.780879 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/21734838-c50a-446f-bece-4a50f5a897d4-inventory\") pod \"neutron-dhcp-edpm-compute-no-nodes-edpm-compute-no-nodes-s2hp4\" (UID: \"21734838-c50a-446f-bece-4a50f5a897d4\") " pod="openstack-kuttl-tests/neutron-dhcp-edpm-compute-no-nodes-edpm-compute-no-nodes-s2hp4" Jan 20 18:05:17 crc kubenswrapper[4558]: I0120 18:05:17.781114 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/21734838-c50a-446f-bece-4a50f5a897d4-ssh-key-edpm-compute-no-nodes\") pod \"neutron-dhcp-edpm-compute-no-nodes-edpm-compute-no-nodes-s2hp4\" (UID: \"21734838-c50a-446f-bece-4a50f5a897d4\") " pod="openstack-kuttl-tests/neutron-dhcp-edpm-compute-no-nodes-edpm-compute-no-nodes-s2hp4" Jan 20 18:05:17 crc kubenswrapper[4558]: I0120 18:05:17.784773 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21734838-c50a-446f-bece-4a50f5a897d4-neutron-dhcp-combined-ca-bundle\") pod \"neutron-dhcp-edpm-compute-no-nodes-edpm-compute-no-nodes-s2hp4\" (UID: \"21734838-c50a-446f-bece-4a50f5a897d4\") " pod="openstack-kuttl-tests/neutron-dhcp-edpm-compute-no-nodes-edpm-compute-no-nodes-s2hp4" Jan 20 18:05:17 crc kubenswrapper[4558]: I0120 18:05:17.789981 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jfqg4\" (UniqueName: \"kubernetes.io/projected/21734838-c50a-446f-bece-4a50f5a897d4-kube-api-access-jfqg4\") pod \"neutron-dhcp-edpm-compute-no-nodes-edpm-compute-no-nodes-s2hp4\" (UID: \"21734838-c50a-446f-bece-4a50f5a897d4\") " pod="openstack-kuttl-tests/neutron-dhcp-edpm-compute-no-nodes-edpm-compute-no-nodes-s2hp4" Jan 20 18:05:17 crc kubenswrapper[4558]: I0120 18:05:17.870106 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-dhcp-edpm-compute-no-nodes-edpm-compute-no-nodes-s2hp4" Jan 20 18:05:18 crc kubenswrapper[4558]: I0120 18:05:18.292791 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-dhcp-edpm-compute-no-nodes-edpm-compute-no-nodes-s2hp4"] Jan 20 18:05:18 crc kubenswrapper[4558]: I0120 18:05:18.523923 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-dhcp-edpm-compute-no-nodes-edpm-compute-no-nodes-s2hp4" event={"ID":"21734838-c50a-446f-bece-4a50f5a897d4","Type":"ContainerStarted","Data":"5edd2205eecae718acb79a9f30e6d92eb6c900d31d9b124f3c9d17167e2f5b29"} Jan 20 18:05:19 crc kubenswrapper[4558]: I0120 18:05:19.555035 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-dhcp-edpm-compute-no-nodes-edpm-compute-no-nodes-s2hp4" event={"ID":"21734838-c50a-446f-bece-4a50f5a897d4","Type":"ContainerStarted","Data":"5b4e3afbb4029a0e08acb0dfd1e242d44b8ef4811e92f285fec0fa23cb526282"} Jan 20 18:05:19 crc kubenswrapper[4558]: I0120 18:05:19.582255 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/neutron-dhcp-edpm-compute-no-nodes-edpm-compute-no-nodes-s2hp4" podStartSLOduration=2.048431486 podStartE2EDuration="2.582229974s" podCreationTimestamp="2026-01-20 18:05:17 +0000 UTC" firstStartedPulling="2026-01-20 18:05:18.302302196 +0000 UTC m=+5012.062640162" lastFinishedPulling="2026-01-20 18:05:18.836100683 +0000 UTC m=+5012.596438650" observedRunningTime="2026-01-20 18:05:19.578718902 +0000 UTC m=+5013.339056870" watchObservedRunningTime="2026-01-20 18:05:19.582229974 +0000 UTC m=+5013.342567941" Jan 20 18:05:20 crc kubenswrapper[4558]: I0120 18:05:20.566264 4558 generic.go:334] "Generic (PLEG): container finished" podID="21734838-c50a-446f-bece-4a50f5a897d4" containerID="5b4e3afbb4029a0e08acb0dfd1e242d44b8ef4811e92f285fec0fa23cb526282" exitCode=0 Jan 20 18:05:20 crc kubenswrapper[4558]: I0120 18:05:20.578220 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-dhcp-edpm-compute-no-nodes-edpm-compute-no-nodes-s2hp4" event={"ID":"21734838-c50a-446f-bece-4a50f5a897d4","Type":"ContainerDied","Data":"5b4e3afbb4029a0e08acb0dfd1e242d44b8ef4811e92f285fec0fa23cb526282"} Jan 20 18:05:21 crc kubenswrapper[4558]: I0120 18:05:21.830881 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-dhcp-edpm-compute-no-nodes-edpm-compute-no-nodes-s2hp4" Jan 20 18:05:21 crc kubenswrapper[4558]: I0120 18:05:21.933322 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/21734838-c50a-446f-bece-4a50f5a897d4-inventory\") pod \"21734838-c50a-446f-bece-4a50f5a897d4\" (UID: \"21734838-c50a-446f-bece-4a50f5a897d4\") " Jan 20 18:05:21 crc kubenswrapper[4558]: I0120 18:05:21.933463 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/21734838-c50a-446f-bece-4a50f5a897d4-ssh-key-edpm-compute-no-nodes\") pod \"21734838-c50a-446f-bece-4a50f5a897d4\" (UID: \"21734838-c50a-446f-bece-4a50f5a897d4\") " Jan 20 18:05:21 crc kubenswrapper[4558]: I0120 18:05:21.933528 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-dhcp-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/21734838-c50a-446f-bece-4a50f5a897d4-neutron-dhcp-agent-neutron-config-0\") pod \"21734838-c50a-446f-bece-4a50f5a897d4\" (UID: \"21734838-c50a-446f-bece-4a50f5a897d4\") " Jan 20 18:05:21 crc kubenswrapper[4558]: I0120 18:05:21.933557 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21734838-c50a-446f-bece-4a50f5a897d4-neutron-dhcp-combined-ca-bundle\") pod \"21734838-c50a-446f-bece-4a50f5a897d4\" (UID: \"21734838-c50a-446f-bece-4a50f5a897d4\") " Jan 20 18:05:21 crc kubenswrapper[4558]: I0120 18:05:21.933698 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jfqg4\" (UniqueName: \"kubernetes.io/projected/21734838-c50a-446f-bece-4a50f5a897d4-kube-api-access-jfqg4\") pod \"21734838-c50a-446f-bece-4a50f5a897d4\" (UID: \"21734838-c50a-446f-bece-4a50f5a897d4\") " Jan 20 18:05:22 crc kubenswrapper[4558]: I0120 18:05:22.293837 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/21734838-c50a-446f-bece-4a50f5a897d4-neutron-dhcp-combined-ca-bundle" (OuterVolumeSpecName: "neutron-dhcp-combined-ca-bundle") pod "21734838-c50a-446f-bece-4a50f5a897d4" (UID: "21734838-c50a-446f-bece-4a50f5a897d4"). InnerVolumeSpecName "neutron-dhcp-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:05:22 crc kubenswrapper[4558]: I0120 18:05:22.294143 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/21734838-c50a-446f-bece-4a50f5a897d4-kube-api-access-jfqg4" (OuterVolumeSpecName: "kube-api-access-jfqg4") pod "21734838-c50a-446f-bece-4a50f5a897d4" (UID: "21734838-c50a-446f-bece-4a50f5a897d4"). InnerVolumeSpecName "kube-api-access-jfqg4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:05:22 crc kubenswrapper[4558]: I0120 18:05:22.341225 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jfqg4\" (UniqueName: \"kubernetes.io/projected/21734838-c50a-446f-bece-4a50f5a897d4-kube-api-access-jfqg4\") on node \"crc\" DevicePath \"\"" Jan 20 18:05:22 crc kubenswrapper[4558]: I0120 18:05:22.341280 4558 reconciler_common.go:293] "Volume detached for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21734838-c50a-446f-bece-4a50f5a897d4-neutron-dhcp-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 18:05:22 crc kubenswrapper[4558]: I0120 18:05:22.408358 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/21734838-c50a-446f-bece-4a50f5a897d4-ssh-key-edpm-compute-no-nodes" (OuterVolumeSpecName: "ssh-key-edpm-compute-no-nodes") pod "21734838-c50a-446f-bece-4a50f5a897d4" (UID: "21734838-c50a-446f-bece-4a50f5a897d4"). InnerVolumeSpecName "ssh-key-edpm-compute-no-nodes". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:05:22 crc kubenswrapper[4558]: I0120 18:05:22.409077 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/21734838-c50a-446f-bece-4a50f5a897d4-inventory" (OuterVolumeSpecName: "inventory") pod "21734838-c50a-446f-bece-4a50f5a897d4" (UID: "21734838-c50a-446f-bece-4a50f5a897d4"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:05:22 crc kubenswrapper[4558]: I0120 18:05:22.409146 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/21734838-c50a-446f-bece-4a50f5a897d4-neutron-dhcp-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-dhcp-agent-neutron-config-0") pod "21734838-c50a-446f-bece-4a50f5a897d4" (UID: "21734838-c50a-446f-bece-4a50f5a897d4"). InnerVolumeSpecName "neutron-dhcp-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:05:22 crc kubenswrapper[4558]: I0120 18:05:22.442880 4558 reconciler_common.go:293] "Volume detached for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/21734838-c50a-446f-bece-4a50f5a897d4-ssh-key-edpm-compute-no-nodes\") on node \"crc\" DevicePath \"\"" Jan 20 18:05:22 crc kubenswrapper[4558]: I0120 18:05:22.442909 4558 reconciler_common.go:293] "Volume detached for volume \"neutron-dhcp-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/21734838-c50a-446f-bece-4a50f5a897d4-neutron-dhcp-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Jan 20 18:05:22 crc kubenswrapper[4558]: I0120 18:05:22.442923 4558 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/21734838-c50a-446f-bece-4a50f5a897d4-inventory\") on node \"crc\" DevicePath \"\"" Jan 20 18:05:22 crc kubenswrapper[4558]: I0120 18:05:22.584316 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-dhcp-edpm-compute-no-nodes-edpm-compute-no-nodes-s2hp4" event={"ID":"21734838-c50a-446f-bece-4a50f5a897d4","Type":"ContainerDied","Data":"5edd2205eecae718acb79a9f30e6d92eb6c900d31d9b124f3c9d17167e2f5b29"} Jan 20 18:05:22 crc kubenswrapper[4558]: I0120 18:05:22.584370 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5edd2205eecae718acb79a9f30e6d92eb6c900d31d9b124f3c9d17167e2f5b29" Jan 20 18:05:22 crc kubenswrapper[4558]: I0120 18:05:22.584383 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-dhcp-edpm-compute-no-nodes-edpm-compute-no-nodes-s2hp4" Jan 20 18:05:22 crc kubenswrapper[4558]: I0120 18:05:22.647085 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/libvirt-edpm-compute-no-nodes-edpm-compute-no-nodes-bw44r"] Jan 20 18:05:22 crc kubenswrapper[4558]: E0120 18:05:22.647649 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="21734838-c50a-446f-bece-4a50f5a897d4" containerName="neutron-dhcp-edpm-compute-no-nodes-edpm-compute-no-nodes" Jan 20 18:05:22 crc kubenswrapper[4558]: I0120 18:05:22.647677 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="21734838-c50a-446f-bece-4a50f5a897d4" containerName="neutron-dhcp-edpm-compute-no-nodes-edpm-compute-no-nodes" Jan 20 18:05:22 crc kubenswrapper[4558]: I0120 18:05:22.647908 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="21734838-c50a-446f-bece-4a50f5a897d4" containerName="neutron-dhcp-edpm-compute-no-nodes-edpm-compute-no-nodes" Jan 20 18:05:22 crc kubenswrapper[4558]: I0120 18:05:22.648645 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/libvirt-edpm-compute-no-nodes-edpm-compute-no-nodes-bw44r" Jan 20 18:05:22 crc kubenswrapper[4558]: I0120 18:05:22.650793 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"combined-ca-bundle" Jan 20 18:05:22 crc kubenswrapper[4558]: I0120 18:05:22.655584 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"libvirt-secret" Jan 20 18:05:22 crc kubenswrapper[4558]: I0120 18:05:22.655733 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"edpm-compute-no-nodes-dockercfg-tljmj" Jan 20 18:05:22 crc kubenswrapper[4558]: I0120 18:05:22.655875 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplane-ansible-ssh-private-key-secret" Jan 20 18:05:22 crc kubenswrapper[4558]: I0120 18:05:22.656431 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-aee-default-env" Jan 20 18:05:22 crc kubenswrapper[4558]: I0120 18:05:22.656745 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplanenodeset-edpm-compute-no-nodes" Jan 20 18:05:22 crc kubenswrapper[4558]: I0120 18:05:22.665575 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-c8rjz" Jan 20 18:05:22 crc kubenswrapper[4558]: I0120 18:05:22.665610 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-c8rjz" Jan 20 18:05:22 crc kubenswrapper[4558]: I0120 18:05:22.673241 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/libvirt-edpm-compute-no-nodes-edpm-compute-no-nodes-bw44r"] Jan 20 18:05:22 crc kubenswrapper[4558]: I0120 18:05:22.706409 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-c8rjz" Jan 20 18:05:22 crc kubenswrapper[4558]: I0120 18:05:22.746119 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/5ac3bfae-05eb-4f64-ab7f-a48397124f99-libvirt-secret-0\") pod \"libvirt-edpm-compute-no-nodes-edpm-compute-no-nodes-bw44r\" (UID: \"5ac3bfae-05eb-4f64-ab7f-a48397124f99\") " pod="openstack-kuttl-tests/libvirt-edpm-compute-no-nodes-edpm-compute-no-nodes-bw44r" Jan 20 18:05:22 crc kubenswrapper[4558]: I0120 18:05:22.746268 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/5ac3bfae-05eb-4f64-ab7f-a48397124f99-ssh-key-edpm-compute-no-nodes\") pod \"libvirt-edpm-compute-no-nodes-edpm-compute-no-nodes-bw44r\" (UID: \"5ac3bfae-05eb-4f64-ab7f-a48397124f99\") " pod="openstack-kuttl-tests/libvirt-edpm-compute-no-nodes-edpm-compute-no-nodes-bw44r" Jan 20 18:05:22 crc kubenswrapper[4558]: I0120 18:05:22.746314 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5ac3bfae-05eb-4f64-ab7f-a48397124f99-inventory\") pod \"libvirt-edpm-compute-no-nodes-edpm-compute-no-nodes-bw44r\" (UID: \"5ac3bfae-05eb-4f64-ab7f-a48397124f99\") " pod="openstack-kuttl-tests/libvirt-edpm-compute-no-nodes-edpm-compute-no-nodes-bw44r" Jan 20 18:05:22 crc kubenswrapper[4558]: I0120 18:05:22.746437 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cshgz\" (UniqueName: \"kubernetes.io/projected/5ac3bfae-05eb-4f64-ab7f-a48397124f99-kube-api-access-cshgz\") pod \"libvirt-edpm-compute-no-nodes-edpm-compute-no-nodes-bw44r\" (UID: \"5ac3bfae-05eb-4f64-ab7f-a48397124f99\") " pod="openstack-kuttl-tests/libvirt-edpm-compute-no-nodes-edpm-compute-no-nodes-bw44r" Jan 20 18:05:22 crc kubenswrapper[4558]: I0120 18:05:22.746569 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5ac3bfae-05eb-4f64-ab7f-a48397124f99-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-compute-no-nodes-edpm-compute-no-nodes-bw44r\" (UID: \"5ac3bfae-05eb-4f64-ab7f-a48397124f99\") " pod="openstack-kuttl-tests/libvirt-edpm-compute-no-nodes-edpm-compute-no-nodes-bw44r" Jan 20 18:05:22 crc kubenswrapper[4558]: I0120 18:05:22.848504 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/5ac3bfae-05eb-4f64-ab7f-a48397124f99-libvirt-secret-0\") pod \"libvirt-edpm-compute-no-nodes-edpm-compute-no-nodes-bw44r\" (UID: \"5ac3bfae-05eb-4f64-ab7f-a48397124f99\") " pod="openstack-kuttl-tests/libvirt-edpm-compute-no-nodes-edpm-compute-no-nodes-bw44r" Jan 20 18:05:22 crc kubenswrapper[4558]: I0120 18:05:22.848551 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/5ac3bfae-05eb-4f64-ab7f-a48397124f99-ssh-key-edpm-compute-no-nodes\") pod \"libvirt-edpm-compute-no-nodes-edpm-compute-no-nodes-bw44r\" (UID: \"5ac3bfae-05eb-4f64-ab7f-a48397124f99\") " pod="openstack-kuttl-tests/libvirt-edpm-compute-no-nodes-edpm-compute-no-nodes-bw44r" Jan 20 18:05:22 crc kubenswrapper[4558]: I0120 18:05:22.848588 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5ac3bfae-05eb-4f64-ab7f-a48397124f99-inventory\") pod \"libvirt-edpm-compute-no-nodes-edpm-compute-no-nodes-bw44r\" (UID: \"5ac3bfae-05eb-4f64-ab7f-a48397124f99\") " pod="openstack-kuttl-tests/libvirt-edpm-compute-no-nodes-edpm-compute-no-nodes-bw44r" Jan 20 18:05:22 crc kubenswrapper[4558]: I0120 18:05:22.848648 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cshgz\" (UniqueName: \"kubernetes.io/projected/5ac3bfae-05eb-4f64-ab7f-a48397124f99-kube-api-access-cshgz\") pod \"libvirt-edpm-compute-no-nodes-edpm-compute-no-nodes-bw44r\" (UID: \"5ac3bfae-05eb-4f64-ab7f-a48397124f99\") " pod="openstack-kuttl-tests/libvirt-edpm-compute-no-nodes-edpm-compute-no-nodes-bw44r" Jan 20 18:05:22 crc kubenswrapper[4558]: I0120 18:05:22.848704 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5ac3bfae-05eb-4f64-ab7f-a48397124f99-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-compute-no-nodes-edpm-compute-no-nodes-bw44r\" (UID: \"5ac3bfae-05eb-4f64-ab7f-a48397124f99\") " pod="openstack-kuttl-tests/libvirt-edpm-compute-no-nodes-edpm-compute-no-nodes-bw44r" Jan 20 18:05:22 crc kubenswrapper[4558]: I0120 18:05:22.853632 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5ac3bfae-05eb-4f64-ab7f-a48397124f99-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-compute-no-nodes-edpm-compute-no-nodes-bw44r\" (UID: \"5ac3bfae-05eb-4f64-ab7f-a48397124f99\") " pod="openstack-kuttl-tests/libvirt-edpm-compute-no-nodes-edpm-compute-no-nodes-bw44r" Jan 20 18:05:22 crc kubenswrapper[4558]: I0120 18:05:22.853654 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5ac3bfae-05eb-4f64-ab7f-a48397124f99-inventory\") pod \"libvirt-edpm-compute-no-nodes-edpm-compute-no-nodes-bw44r\" (UID: \"5ac3bfae-05eb-4f64-ab7f-a48397124f99\") " pod="openstack-kuttl-tests/libvirt-edpm-compute-no-nodes-edpm-compute-no-nodes-bw44r" Jan 20 18:05:22 crc kubenswrapper[4558]: I0120 18:05:22.853729 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/5ac3bfae-05eb-4f64-ab7f-a48397124f99-libvirt-secret-0\") pod \"libvirt-edpm-compute-no-nodes-edpm-compute-no-nodes-bw44r\" (UID: \"5ac3bfae-05eb-4f64-ab7f-a48397124f99\") " pod="openstack-kuttl-tests/libvirt-edpm-compute-no-nodes-edpm-compute-no-nodes-bw44r" Jan 20 18:05:22 crc kubenswrapper[4558]: I0120 18:05:22.854002 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/5ac3bfae-05eb-4f64-ab7f-a48397124f99-ssh-key-edpm-compute-no-nodes\") pod \"libvirt-edpm-compute-no-nodes-edpm-compute-no-nodes-bw44r\" (UID: \"5ac3bfae-05eb-4f64-ab7f-a48397124f99\") " pod="openstack-kuttl-tests/libvirt-edpm-compute-no-nodes-edpm-compute-no-nodes-bw44r" Jan 20 18:05:22 crc kubenswrapper[4558]: I0120 18:05:22.862880 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cshgz\" (UniqueName: \"kubernetes.io/projected/5ac3bfae-05eb-4f64-ab7f-a48397124f99-kube-api-access-cshgz\") pod \"libvirt-edpm-compute-no-nodes-edpm-compute-no-nodes-bw44r\" (UID: \"5ac3bfae-05eb-4f64-ab7f-a48397124f99\") " pod="openstack-kuttl-tests/libvirt-edpm-compute-no-nodes-edpm-compute-no-nodes-bw44r" Jan 20 18:05:22 crc kubenswrapper[4558]: I0120 18:05:22.972065 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/libvirt-edpm-compute-no-nodes-edpm-compute-no-nodes-bw44r" Jan 20 18:05:23 crc kubenswrapper[4558]: I0120 18:05:23.380622 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/libvirt-edpm-compute-no-nodes-edpm-compute-no-nodes-bw44r"] Jan 20 18:05:23 crc kubenswrapper[4558]: W0120 18:05:23.383466 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5ac3bfae_05eb_4f64_ab7f_a48397124f99.slice/crio-f47d56dfdfa36e6b5c3e917fd678e32fe711ae155f95c7d1e4037b28d8421b8c WatchSource:0}: Error finding container f47d56dfdfa36e6b5c3e917fd678e32fe711ae155f95c7d1e4037b28d8421b8c: Status 404 returned error can't find the container with id f47d56dfdfa36e6b5c3e917fd678e32fe711ae155f95c7d1e4037b28d8421b8c Jan 20 18:05:23 crc kubenswrapper[4558]: I0120 18:05:23.609417 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/libvirt-edpm-compute-no-nodes-edpm-compute-no-nodes-bw44r" event={"ID":"5ac3bfae-05eb-4f64-ab7f-a48397124f99","Type":"ContainerStarted","Data":"f47d56dfdfa36e6b5c3e917fd678e32fe711ae155f95c7d1e4037b28d8421b8c"} Jan 20 18:05:24 crc kubenswrapper[4558]: I0120 18:05:24.024408 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-c8rjz" Jan 20 18:05:24 crc kubenswrapper[4558]: I0120 18:05:24.073904 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-c8rjz"] Jan 20 18:05:24 crc kubenswrapper[4558]: I0120 18:05:24.609834 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/libvirt-edpm-compute-no-nodes-edpm-compute-no-nodes-bw44r" event={"ID":"5ac3bfae-05eb-4f64-ab7f-a48397124f99","Type":"ContainerStarted","Data":"f27df2605198d2649641178381f3331ced93d1b1a1951424947c4cd32d08d215"} Jan 20 18:05:24 crc kubenswrapper[4558]: I0120 18:05:24.625115 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/libvirt-edpm-compute-no-nodes-edpm-compute-no-nodes-bw44r" podStartSLOduration=2.12905096 podStartE2EDuration="2.62509467s" podCreationTimestamp="2026-01-20 18:05:22 +0000 UTC" firstStartedPulling="2026-01-20 18:05:23.385933661 +0000 UTC m=+5017.146271629" lastFinishedPulling="2026-01-20 18:05:23.881977373 +0000 UTC m=+5017.642315339" observedRunningTime="2026-01-20 18:05:24.621490893 +0000 UTC m=+5018.381828861" watchObservedRunningTime="2026-01-20 18:05:24.62509467 +0000 UTC m=+5018.385432637" Jan 20 18:05:25 crc kubenswrapper[4558]: I0120 18:05:25.566570 4558 scope.go:117] "RemoveContainer" containerID="6b02d31ec11fe5435af988e35303a6f66b5afa5ef952ce5f7cc4b3cd5f4d9497" Jan 20 18:05:25 crc kubenswrapper[4558]: E0120 18:05:25.566850 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 18:05:25 crc kubenswrapper[4558]: I0120 18:05:25.625477 4558 generic.go:334] "Generic (PLEG): container finished" podID="5ac3bfae-05eb-4f64-ab7f-a48397124f99" containerID="f27df2605198d2649641178381f3331ced93d1b1a1951424947c4cd32d08d215" exitCode=0 Jan 20 18:05:25 crc kubenswrapper[4558]: I0120 18:05:25.625538 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/libvirt-edpm-compute-no-nodes-edpm-compute-no-nodes-bw44r" event={"ID":"5ac3bfae-05eb-4f64-ab7f-a48397124f99","Type":"ContainerDied","Data":"f27df2605198d2649641178381f3331ced93d1b1a1951424947c4cd32d08d215"} Jan 20 18:05:25 crc kubenswrapper[4558]: I0120 18:05:25.625690 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-c8rjz" podUID="810b3c6b-1c22-49e0-aae7-16ac3637d8e8" containerName="registry-server" containerID="cri-o://caa8694faaf76eabfd0ede479f32868edd61c9728f76783ad3cc9b252dc70bd2" gracePeriod=2 Jan 20 18:05:25 crc kubenswrapper[4558]: I0120 18:05:25.983580 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-c8rjz" Jan 20 18:05:26 crc kubenswrapper[4558]: I0120 18:05:26.097623 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/810b3c6b-1c22-49e0-aae7-16ac3637d8e8-utilities\") pod \"810b3c6b-1c22-49e0-aae7-16ac3637d8e8\" (UID: \"810b3c6b-1c22-49e0-aae7-16ac3637d8e8\") " Jan 20 18:05:26 crc kubenswrapper[4558]: I0120 18:05:26.097881 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pkgv9\" (UniqueName: \"kubernetes.io/projected/810b3c6b-1c22-49e0-aae7-16ac3637d8e8-kube-api-access-pkgv9\") pod \"810b3c6b-1c22-49e0-aae7-16ac3637d8e8\" (UID: \"810b3c6b-1c22-49e0-aae7-16ac3637d8e8\") " Jan 20 18:05:26 crc kubenswrapper[4558]: I0120 18:05:26.098072 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/810b3c6b-1c22-49e0-aae7-16ac3637d8e8-catalog-content\") pod \"810b3c6b-1c22-49e0-aae7-16ac3637d8e8\" (UID: \"810b3c6b-1c22-49e0-aae7-16ac3637d8e8\") " Jan 20 18:05:26 crc kubenswrapper[4558]: I0120 18:05:26.098584 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/810b3c6b-1c22-49e0-aae7-16ac3637d8e8-utilities" (OuterVolumeSpecName: "utilities") pod "810b3c6b-1c22-49e0-aae7-16ac3637d8e8" (UID: "810b3c6b-1c22-49e0-aae7-16ac3637d8e8"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 18:05:26 crc kubenswrapper[4558]: I0120 18:05:26.099062 4558 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/810b3c6b-1c22-49e0-aae7-16ac3637d8e8-utilities\") on node \"crc\" DevicePath \"\"" Jan 20 18:05:26 crc kubenswrapper[4558]: I0120 18:05:26.104141 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/810b3c6b-1c22-49e0-aae7-16ac3637d8e8-kube-api-access-pkgv9" (OuterVolumeSpecName: "kube-api-access-pkgv9") pod "810b3c6b-1c22-49e0-aae7-16ac3637d8e8" (UID: "810b3c6b-1c22-49e0-aae7-16ac3637d8e8"). InnerVolumeSpecName "kube-api-access-pkgv9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:05:26 crc kubenswrapper[4558]: I0120 18:05:26.143856 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/810b3c6b-1c22-49e0-aae7-16ac3637d8e8-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "810b3c6b-1c22-49e0-aae7-16ac3637d8e8" (UID: "810b3c6b-1c22-49e0-aae7-16ac3637d8e8"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 18:05:26 crc kubenswrapper[4558]: I0120 18:05:26.200071 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pkgv9\" (UniqueName: \"kubernetes.io/projected/810b3c6b-1c22-49e0-aae7-16ac3637d8e8-kube-api-access-pkgv9\") on node \"crc\" DevicePath \"\"" Jan 20 18:05:26 crc kubenswrapper[4558]: I0120 18:05:26.200108 4558 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/810b3c6b-1c22-49e0-aae7-16ac3637d8e8-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 20 18:05:26 crc kubenswrapper[4558]: I0120 18:05:26.636901 4558 generic.go:334] "Generic (PLEG): container finished" podID="810b3c6b-1c22-49e0-aae7-16ac3637d8e8" containerID="caa8694faaf76eabfd0ede479f32868edd61c9728f76783ad3cc9b252dc70bd2" exitCode=0 Jan 20 18:05:26 crc kubenswrapper[4558]: I0120 18:05:26.636968 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-c8rjz" Jan 20 18:05:26 crc kubenswrapper[4558]: I0120 18:05:26.636987 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-c8rjz" event={"ID":"810b3c6b-1c22-49e0-aae7-16ac3637d8e8","Type":"ContainerDied","Data":"caa8694faaf76eabfd0ede479f32868edd61c9728f76783ad3cc9b252dc70bd2"} Jan 20 18:05:26 crc kubenswrapper[4558]: I0120 18:05:26.637049 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-c8rjz" event={"ID":"810b3c6b-1c22-49e0-aae7-16ac3637d8e8","Type":"ContainerDied","Data":"dfec627261b37f90e48dbfecb3be10bdf73bafdf778bd3891ada6190fc7ea086"} Jan 20 18:05:26 crc kubenswrapper[4558]: I0120 18:05:26.637080 4558 scope.go:117] "RemoveContainer" containerID="caa8694faaf76eabfd0ede479f32868edd61c9728f76783ad3cc9b252dc70bd2" Jan 20 18:05:26 crc kubenswrapper[4558]: I0120 18:05:26.663368 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-c8rjz"] Jan 20 18:05:26 crc kubenswrapper[4558]: I0120 18:05:26.665530 4558 scope.go:117] "RemoveContainer" containerID="385f728b08bc92abf09cc4a254cca6041a40e22f48c00bd7ed4771d63fe7a9c8" Jan 20 18:05:26 crc kubenswrapper[4558]: I0120 18:05:26.669471 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-c8rjz"] Jan 20 18:05:26 crc kubenswrapper[4558]: I0120 18:05:26.683537 4558 scope.go:117] "RemoveContainer" containerID="d711c81e7e19fcd3948d666572ed43b4ad490ecc127a3dd6a3e199b409206a27" Jan 20 18:05:26 crc kubenswrapper[4558]: I0120 18:05:26.705974 4558 scope.go:117] "RemoveContainer" containerID="caa8694faaf76eabfd0ede479f32868edd61c9728f76783ad3cc9b252dc70bd2" Jan 20 18:05:26 crc kubenswrapper[4558]: E0120 18:05:26.706398 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"caa8694faaf76eabfd0ede479f32868edd61c9728f76783ad3cc9b252dc70bd2\": container with ID starting with caa8694faaf76eabfd0ede479f32868edd61c9728f76783ad3cc9b252dc70bd2 not found: ID does not exist" containerID="caa8694faaf76eabfd0ede479f32868edd61c9728f76783ad3cc9b252dc70bd2" Jan 20 18:05:26 crc kubenswrapper[4558]: I0120 18:05:26.706443 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"caa8694faaf76eabfd0ede479f32868edd61c9728f76783ad3cc9b252dc70bd2"} err="failed to get container status \"caa8694faaf76eabfd0ede479f32868edd61c9728f76783ad3cc9b252dc70bd2\": rpc error: code = NotFound desc = could not find container \"caa8694faaf76eabfd0ede479f32868edd61c9728f76783ad3cc9b252dc70bd2\": container with ID starting with caa8694faaf76eabfd0ede479f32868edd61c9728f76783ad3cc9b252dc70bd2 not found: ID does not exist" Jan 20 18:05:26 crc kubenswrapper[4558]: I0120 18:05:26.706475 4558 scope.go:117] "RemoveContainer" containerID="385f728b08bc92abf09cc4a254cca6041a40e22f48c00bd7ed4771d63fe7a9c8" Jan 20 18:05:26 crc kubenswrapper[4558]: E0120 18:05:26.706844 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"385f728b08bc92abf09cc4a254cca6041a40e22f48c00bd7ed4771d63fe7a9c8\": container with ID starting with 385f728b08bc92abf09cc4a254cca6041a40e22f48c00bd7ed4771d63fe7a9c8 not found: ID does not exist" containerID="385f728b08bc92abf09cc4a254cca6041a40e22f48c00bd7ed4771d63fe7a9c8" Jan 20 18:05:26 crc kubenswrapper[4558]: I0120 18:05:26.706883 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"385f728b08bc92abf09cc4a254cca6041a40e22f48c00bd7ed4771d63fe7a9c8"} err="failed to get container status \"385f728b08bc92abf09cc4a254cca6041a40e22f48c00bd7ed4771d63fe7a9c8\": rpc error: code = NotFound desc = could not find container \"385f728b08bc92abf09cc4a254cca6041a40e22f48c00bd7ed4771d63fe7a9c8\": container with ID starting with 385f728b08bc92abf09cc4a254cca6041a40e22f48c00bd7ed4771d63fe7a9c8 not found: ID does not exist" Jan 20 18:05:26 crc kubenswrapper[4558]: I0120 18:05:26.706905 4558 scope.go:117] "RemoveContainer" containerID="d711c81e7e19fcd3948d666572ed43b4ad490ecc127a3dd6a3e199b409206a27" Jan 20 18:05:26 crc kubenswrapper[4558]: E0120 18:05:26.707198 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d711c81e7e19fcd3948d666572ed43b4ad490ecc127a3dd6a3e199b409206a27\": container with ID starting with d711c81e7e19fcd3948d666572ed43b4ad490ecc127a3dd6a3e199b409206a27 not found: ID does not exist" containerID="d711c81e7e19fcd3948d666572ed43b4ad490ecc127a3dd6a3e199b409206a27" Jan 20 18:05:26 crc kubenswrapper[4558]: I0120 18:05:26.707233 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d711c81e7e19fcd3948d666572ed43b4ad490ecc127a3dd6a3e199b409206a27"} err="failed to get container status \"d711c81e7e19fcd3948d666572ed43b4ad490ecc127a3dd6a3e199b409206a27\": rpc error: code = NotFound desc = could not find container \"d711c81e7e19fcd3948d666572ed43b4ad490ecc127a3dd6a3e199b409206a27\": container with ID starting with d711c81e7e19fcd3948d666572ed43b4ad490ecc127a3dd6a3e199b409206a27 not found: ID does not exist" Jan 20 18:05:26 crc kubenswrapper[4558]: I0120 18:05:26.888366 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/libvirt-edpm-compute-no-nodes-edpm-compute-no-nodes-bw44r" Jan 20 18:05:27 crc kubenswrapper[4558]: I0120 18:05:27.009727 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cshgz\" (UniqueName: \"kubernetes.io/projected/5ac3bfae-05eb-4f64-ab7f-a48397124f99-kube-api-access-cshgz\") pod \"5ac3bfae-05eb-4f64-ab7f-a48397124f99\" (UID: \"5ac3bfae-05eb-4f64-ab7f-a48397124f99\") " Jan 20 18:05:27 crc kubenswrapper[4558]: I0120 18:05:27.009842 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/5ac3bfae-05eb-4f64-ab7f-a48397124f99-libvirt-secret-0\") pod \"5ac3bfae-05eb-4f64-ab7f-a48397124f99\" (UID: \"5ac3bfae-05eb-4f64-ab7f-a48397124f99\") " Jan 20 18:05:27 crc kubenswrapper[4558]: I0120 18:05:27.009948 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5ac3bfae-05eb-4f64-ab7f-a48397124f99-inventory\") pod \"5ac3bfae-05eb-4f64-ab7f-a48397124f99\" (UID: \"5ac3bfae-05eb-4f64-ab7f-a48397124f99\") " Jan 20 18:05:27 crc kubenswrapper[4558]: I0120 18:05:27.009985 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/5ac3bfae-05eb-4f64-ab7f-a48397124f99-ssh-key-edpm-compute-no-nodes\") pod \"5ac3bfae-05eb-4f64-ab7f-a48397124f99\" (UID: \"5ac3bfae-05eb-4f64-ab7f-a48397124f99\") " Jan 20 18:05:27 crc kubenswrapper[4558]: I0120 18:05:27.010031 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5ac3bfae-05eb-4f64-ab7f-a48397124f99-libvirt-combined-ca-bundle\") pod \"5ac3bfae-05eb-4f64-ab7f-a48397124f99\" (UID: \"5ac3bfae-05eb-4f64-ab7f-a48397124f99\") " Jan 20 18:05:27 crc kubenswrapper[4558]: I0120 18:05:27.014732 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5ac3bfae-05eb-4f64-ab7f-a48397124f99-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "5ac3bfae-05eb-4f64-ab7f-a48397124f99" (UID: "5ac3bfae-05eb-4f64-ab7f-a48397124f99"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:05:27 crc kubenswrapper[4558]: I0120 18:05:27.014758 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5ac3bfae-05eb-4f64-ab7f-a48397124f99-kube-api-access-cshgz" (OuterVolumeSpecName: "kube-api-access-cshgz") pod "5ac3bfae-05eb-4f64-ab7f-a48397124f99" (UID: "5ac3bfae-05eb-4f64-ab7f-a48397124f99"). InnerVolumeSpecName "kube-api-access-cshgz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:05:27 crc kubenswrapper[4558]: I0120 18:05:27.030259 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5ac3bfae-05eb-4f64-ab7f-a48397124f99-ssh-key-edpm-compute-no-nodes" (OuterVolumeSpecName: "ssh-key-edpm-compute-no-nodes") pod "5ac3bfae-05eb-4f64-ab7f-a48397124f99" (UID: "5ac3bfae-05eb-4f64-ab7f-a48397124f99"). InnerVolumeSpecName "ssh-key-edpm-compute-no-nodes". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:05:27 crc kubenswrapper[4558]: I0120 18:05:27.030701 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5ac3bfae-05eb-4f64-ab7f-a48397124f99-inventory" (OuterVolumeSpecName: "inventory") pod "5ac3bfae-05eb-4f64-ab7f-a48397124f99" (UID: "5ac3bfae-05eb-4f64-ab7f-a48397124f99"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:05:27 crc kubenswrapper[4558]: I0120 18:05:27.030841 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5ac3bfae-05eb-4f64-ab7f-a48397124f99-libvirt-secret-0" (OuterVolumeSpecName: "libvirt-secret-0") pod "5ac3bfae-05eb-4f64-ab7f-a48397124f99" (UID: "5ac3bfae-05eb-4f64-ab7f-a48397124f99"). InnerVolumeSpecName "libvirt-secret-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:05:27 crc kubenswrapper[4558]: I0120 18:05:27.112527 4558 reconciler_common.go:293] "Volume detached for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/5ac3bfae-05eb-4f64-ab7f-a48397124f99-libvirt-secret-0\") on node \"crc\" DevicePath \"\"" Jan 20 18:05:27 crc kubenswrapper[4558]: I0120 18:05:27.112677 4558 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5ac3bfae-05eb-4f64-ab7f-a48397124f99-inventory\") on node \"crc\" DevicePath \"\"" Jan 20 18:05:27 crc kubenswrapper[4558]: I0120 18:05:27.112736 4558 reconciler_common.go:293] "Volume detached for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/5ac3bfae-05eb-4f64-ab7f-a48397124f99-ssh-key-edpm-compute-no-nodes\") on node \"crc\" DevicePath \"\"" Jan 20 18:05:27 crc kubenswrapper[4558]: I0120 18:05:27.112792 4558 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5ac3bfae-05eb-4f64-ab7f-a48397124f99-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 18:05:27 crc kubenswrapper[4558]: I0120 18:05:27.112844 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cshgz\" (UniqueName: \"kubernetes.io/projected/5ac3bfae-05eb-4f64-ab7f-a48397124f99-kube-api-access-cshgz\") on node \"crc\" DevicePath \"\"" Jan 20 18:05:27 crc kubenswrapper[4558]: I0120 18:05:27.648768 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/libvirt-edpm-compute-no-nodes-edpm-compute-no-nodes-bw44r" Jan 20 18:05:27 crc kubenswrapper[4558]: I0120 18:05:27.648766 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/libvirt-edpm-compute-no-nodes-edpm-compute-no-nodes-bw44r" event={"ID":"5ac3bfae-05eb-4f64-ab7f-a48397124f99","Type":"ContainerDied","Data":"f47d56dfdfa36e6b5c3e917fd678e32fe711ae155f95c7d1e4037b28d8421b8c"} Jan 20 18:05:27 crc kubenswrapper[4558]: I0120 18:05:27.648909 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f47d56dfdfa36e6b5c3e917fd678e32fe711ae155f95c7d1e4037b28d8421b8c" Jan 20 18:05:27 crc kubenswrapper[4558]: I0120 18:05:27.700589 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-edpm-compute-no-nodes-edpm-compute-no-nodes-5t59k"] Jan 20 18:05:27 crc kubenswrapper[4558]: E0120 18:05:27.700931 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="810b3c6b-1c22-49e0-aae7-16ac3637d8e8" containerName="extract-utilities" Jan 20 18:05:27 crc kubenswrapper[4558]: I0120 18:05:27.700948 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="810b3c6b-1c22-49e0-aae7-16ac3637d8e8" containerName="extract-utilities" Jan 20 18:05:27 crc kubenswrapper[4558]: E0120 18:05:27.700965 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="810b3c6b-1c22-49e0-aae7-16ac3637d8e8" containerName="extract-content" Jan 20 18:05:27 crc kubenswrapper[4558]: I0120 18:05:27.700973 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="810b3c6b-1c22-49e0-aae7-16ac3637d8e8" containerName="extract-content" Jan 20 18:05:27 crc kubenswrapper[4558]: E0120 18:05:27.700995 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="810b3c6b-1c22-49e0-aae7-16ac3637d8e8" containerName="registry-server" Jan 20 18:05:27 crc kubenswrapper[4558]: I0120 18:05:27.701000 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="810b3c6b-1c22-49e0-aae7-16ac3637d8e8" containerName="registry-server" Jan 20 18:05:27 crc kubenswrapper[4558]: E0120 18:05:27.701013 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5ac3bfae-05eb-4f64-ab7f-a48397124f99" containerName="libvirt-edpm-compute-no-nodes-edpm-compute-no-nodes" Jan 20 18:05:27 crc kubenswrapper[4558]: I0120 18:05:27.701020 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="5ac3bfae-05eb-4f64-ab7f-a48397124f99" containerName="libvirt-edpm-compute-no-nodes-edpm-compute-no-nodes" Jan 20 18:05:27 crc kubenswrapper[4558]: I0120 18:05:27.701142 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="5ac3bfae-05eb-4f64-ab7f-a48397124f99" containerName="libvirt-edpm-compute-no-nodes-edpm-compute-no-nodes" Jan 20 18:05:27 crc kubenswrapper[4558]: I0120 18:05:27.701180 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="810b3c6b-1c22-49e0-aae7-16ac3637d8e8" containerName="registry-server" Jan 20 18:05:27 crc kubenswrapper[4558]: I0120 18:05:27.701688 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-edpm-compute-no-nodes-edpm-compute-no-nodes-5t59k" Jan 20 18:05:27 crc kubenswrapper[4558]: I0120 18:05:27.704726 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell1-compute-config" Jan 20 18:05:27 crc kubenswrapper[4558]: I0120 18:05:27.705239 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplanenodeset-edpm-compute-no-nodes" Jan 20 18:05:27 crc kubenswrapper[4558]: I0120 18:05:27.705359 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplane-ansible-ssh-private-key-secret" Jan 20 18:05:27 crc kubenswrapper[4558]: I0120 18:05:27.706003 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-migration-ssh-key" Jan 20 18:05:27 crc kubenswrapper[4558]: I0120 18:05:27.706140 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"combined-ca-bundle" Jan 20 18:05:27 crc kubenswrapper[4558]: I0120 18:05:27.706282 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-aee-default-env" Jan 20 18:05:27 crc kubenswrapper[4558]: I0120 18:05:27.706381 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"edpm-compute-no-nodes-dockercfg-tljmj" Jan 20 18:05:27 crc kubenswrapper[4558]: I0120 18:05:27.717625 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-edpm-compute-no-nodes-edpm-compute-no-nodes-5t59k"] Jan 20 18:05:27 crc kubenswrapper[4558]: I0120 18:05:27.827799 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/231da886-60dd-4772-a429-3e2fc309990d-nova-cell1-compute-config-1\") pod \"nova-edpm-compute-no-nodes-edpm-compute-no-nodes-5t59k\" (UID: \"231da886-60dd-4772-a429-3e2fc309990d\") " pod="openstack-kuttl-tests/nova-edpm-compute-no-nodes-edpm-compute-no-nodes-5t59k" Jan 20 18:05:27 crc kubenswrapper[4558]: I0120 18:05:27.828209 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/231da886-60dd-4772-a429-3e2fc309990d-nova-migration-ssh-key-0\") pod \"nova-edpm-compute-no-nodes-edpm-compute-no-nodes-5t59k\" (UID: \"231da886-60dd-4772-a429-3e2fc309990d\") " pod="openstack-kuttl-tests/nova-edpm-compute-no-nodes-edpm-compute-no-nodes-5t59k" Jan 20 18:05:27 crc kubenswrapper[4558]: I0120 18:05:27.828353 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/231da886-60dd-4772-a429-3e2fc309990d-ssh-key-edpm-compute-no-nodes\") pod \"nova-edpm-compute-no-nodes-edpm-compute-no-nodes-5t59k\" (UID: \"231da886-60dd-4772-a429-3e2fc309990d\") " pod="openstack-kuttl-tests/nova-edpm-compute-no-nodes-edpm-compute-no-nodes-5t59k" Jan 20 18:05:27 crc kubenswrapper[4558]: I0120 18:05:27.828438 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mrfn2\" (UniqueName: \"kubernetes.io/projected/231da886-60dd-4772-a429-3e2fc309990d-kube-api-access-mrfn2\") pod \"nova-edpm-compute-no-nodes-edpm-compute-no-nodes-5t59k\" (UID: \"231da886-60dd-4772-a429-3e2fc309990d\") " pod="openstack-kuttl-tests/nova-edpm-compute-no-nodes-edpm-compute-no-nodes-5t59k" Jan 20 18:05:27 crc kubenswrapper[4558]: I0120 18:05:27.828614 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/231da886-60dd-4772-a429-3e2fc309990d-nova-migration-ssh-key-1\") pod \"nova-edpm-compute-no-nodes-edpm-compute-no-nodes-5t59k\" (UID: \"231da886-60dd-4772-a429-3e2fc309990d\") " pod="openstack-kuttl-tests/nova-edpm-compute-no-nodes-edpm-compute-no-nodes-5t59k" Jan 20 18:05:27 crc kubenswrapper[4558]: I0120 18:05:27.828781 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/231da886-60dd-4772-a429-3e2fc309990d-nova-combined-ca-bundle\") pod \"nova-edpm-compute-no-nodes-edpm-compute-no-nodes-5t59k\" (UID: \"231da886-60dd-4772-a429-3e2fc309990d\") " pod="openstack-kuttl-tests/nova-edpm-compute-no-nodes-edpm-compute-no-nodes-5t59k" Jan 20 18:05:27 crc kubenswrapper[4558]: I0120 18:05:27.828881 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/231da886-60dd-4772-a429-3e2fc309990d-nova-cell1-compute-config-0\") pod \"nova-edpm-compute-no-nodes-edpm-compute-no-nodes-5t59k\" (UID: \"231da886-60dd-4772-a429-3e2fc309990d\") " pod="openstack-kuttl-tests/nova-edpm-compute-no-nodes-edpm-compute-no-nodes-5t59k" Jan 20 18:05:27 crc kubenswrapper[4558]: I0120 18:05:27.829086 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/231da886-60dd-4772-a429-3e2fc309990d-inventory\") pod \"nova-edpm-compute-no-nodes-edpm-compute-no-nodes-5t59k\" (UID: \"231da886-60dd-4772-a429-3e2fc309990d\") " pod="openstack-kuttl-tests/nova-edpm-compute-no-nodes-edpm-compute-no-nodes-5t59k" Jan 20 18:05:27 crc kubenswrapper[4558]: I0120 18:05:27.930434 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/231da886-60dd-4772-a429-3e2fc309990d-nova-cell1-compute-config-0\") pod \"nova-edpm-compute-no-nodes-edpm-compute-no-nodes-5t59k\" (UID: \"231da886-60dd-4772-a429-3e2fc309990d\") " pod="openstack-kuttl-tests/nova-edpm-compute-no-nodes-edpm-compute-no-nodes-5t59k" Jan 20 18:05:27 crc kubenswrapper[4558]: I0120 18:05:27.930558 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/231da886-60dd-4772-a429-3e2fc309990d-inventory\") pod \"nova-edpm-compute-no-nodes-edpm-compute-no-nodes-5t59k\" (UID: \"231da886-60dd-4772-a429-3e2fc309990d\") " pod="openstack-kuttl-tests/nova-edpm-compute-no-nodes-edpm-compute-no-nodes-5t59k" Jan 20 18:05:27 crc kubenswrapper[4558]: I0120 18:05:27.930659 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/231da886-60dd-4772-a429-3e2fc309990d-nova-cell1-compute-config-1\") pod \"nova-edpm-compute-no-nodes-edpm-compute-no-nodes-5t59k\" (UID: \"231da886-60dd-4772-a429-3e2fc309990d\") " pod="openstack-kuttl-tests/nova-edpm-compute-no-nodes-edpm-compute-no-nodes-5t59k" Jan 20 18:05:27 crc kubenswrapper[4558]: I0120 18:05:27.930937 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/231da886-60dd-4772-a429-3e2fc309990d-nova-migration-ssh-key-0\") pod \"nova-edpm-compute-no-nodes-edpm-compute-no-nodes-5t59k\" (UID: \"231da886-60dd-4772-a429-3e2fc309990d\") " pod="openstack-kuttl-tests/nova-edpm-compute-no-nodes-edpm-compute-no-nodes-5t59k" Jan 20 18:05:27 crc kubenswrapper[4558]: I0120 18:05:27.931045 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/231da886-60dd-4772-a429-3e2fc309990d-ssh-key-edpm-compute-no-nodes\") pod \"nova-edpm-compute-no-nodes-edpm-compute-no-nodes-5t59k\" (UID: \"231da886-60dd-4772-a429-3e2fc309990d\") " pod="openstack-kuttl-tests/nova-edpm-compute-no-nodes-edpm-compute-no-nodes-5t59k" Jan 20 18:05:27 crc kubenswrapper[4558]: I0120 18:05:27.931094 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mrfn2\" (UniqueName: \"kubernetes.io/projected/231da886-60dd-4772-a429-3e2fc309990d-kube-api-access-mrfn2\") pod \"nova-edpm-compute-no-nodes-edpm-compute-no-nodes-5t59k\" (UID: \"231da886-60dd-4772-a429-3e2fc309990d\") " pod="openstack-kuttl-tests/nova-edpm-compute-no-nodes-edpm-compute-no-nodes-5t59k" Jan 20 18:05:27 crc kubenswrapper[4558]: I0120 18:05:27.931204 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/231da886-60dd-4772-a429-3e2fc309990d-nova-migration-ssh-key-1\") pod \"nova-edpm-compute-no-nodes-edpm-compute-no-nodes-5t59k\" (UID: \"231da886-60dd-4772-a429-3e2fc309990d\") " pod="openstack-kuttl-tests/nova-edpm-compute-no-nodes-edpm-compute-no-nodes-5t59k" Jan 20 18:05:27 crc kubenswrapper[4558]: I0120 18:05:27.931422 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/231da886-60dd-4772-a429-3e2fc309990d-nova-combined-ca-bundle\") pod \"nova-edpm-compute-no-nodes-edpm-compute-no-nodes-5t59k\" (UID: \"231da886-60dd-4772-a429-3e2fc309990d\") " pod="openstack-kuttl-tests/nova-edpm-compute-no-nodes-edpm-compute-no-nodes-5t59k" Jan 20 18:05:27 crc kubenswrapper[4558]: I0120 18:05:27.936206 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/231da886-60dd-4772-a429-3e2fc309990d-nova-migration-ssh-key-0\") pod \"nova-edpm-compute-no-nodes-edpm-compute-no-nodes-5t59k\" (UID: \"231da886-60dd-4772-a429-3e2fc309990d\") " pod="openstack-kuttl-tests/nova-edpm-compute-no-nodes-edpm-compute-no-nodes-5t59k" Jan 20 18:05:27 crc kubenswrapper[4558]: I0120 18:05:27.936315 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/231da886-60dd-4772-a429-3e2fc309990d-nova-cell1-compute-config-1\") pod \"nova-edpm-compute-no-nodes-edpm-compute-no-nodes-5t59k\" (UID: \"231da886-60dd-4772-a429-3e2fc309990d\") " pod="openstack-kuttl-tests/nova-edpm-compute-no-nodes-edpm-compute-no-nodes-5t59k" Jan 20 18:05:27 crc kubenswrapper[4558]: I0120 18:05:27.936409 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/231da886-60dd-4772-a429-3e2fc309990d-ssh-key-edpm-compute-no-nodes\") pod \"nova-edpm-compute-no-nodes-edpm-compute-no-nodes-5t59k\" (UID: \"231da886-60dd-4772-a429-3e2fc309990d\") " pod="openstack-kuttl-tests/nova-edpm-compute-no-nodes-edpm-compute-no-nodes-5t59k" Jan 20 18:05:27 crc kubenswrapper[4558]: I0120 18:05:27.936751 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/231da886-60dd-4772-a429-3e2fc309990d-nova-combined-ca-bundle\") pod \"nova-edpm-compute-no-nodes-edpm-compute-no-nodes-5t59k\" (UID: \"231da886-60dd-4772-a429-3e2fc309990d\") " pod="openstack-kuttl-tests/nova-edpm-compute-no-nodes-edpm-compute-no-nodes-5t59k" Jan 20 18:05:27 crc kubenswrapper[4558]: I0120 18:05:27.936898 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/231da886-60dd-4772-a429-3e2fc309990d-nova-migration-ssh-key-1\") pod \"nova-edpm-compute-no-nodes-edpm-compute-no-nodes-5t59k\" (UID: \"231da886-60dd-4772-a429-3e2fc309990d\") " pod="openstack-kuttl-tests/nova-edpm-compute-no-nodes-edpm-compute-no-nodes-5t59k" Jan 20 18:05:27 crc kubenswrapper[4558]: I0120 18:05:27.937375 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/231da886-60dd-4772-a429-3e2fc309990d-nova-cell1-compute-config-0\") pod \"nova-edpm-compute-no-nodes-edpm-compute-no-nodes-5t59k\" (UID: \"231da886-60dd-4772-a429-3e2fc309990d\") " pod="openstack-kuttl-tests/nova-edpm-compute-no-nodes-edpm-compute-no-nodes-5t59k" Jan 20 18:05:27 crc kubenswrapper[4558]: I0120 18:05:27.937512 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/231da886-60dd-4772-a429-3e2fc309990d-inventory\") pod \"nova-edpm-compute-no-nodes-edpm-compute-no-nodes-5t59k\" (UID: \"231da886-60dd-4772-a429-3e2fc309990d\") " pod="openstack-kuttl-tests/nova-edpm-compute-no-nodes-edpm-compute-no-nodes-5t59k" Jan 20 18:05:27 crc kubenswrapper[4558]: I0120 18:05:27.946707 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mrfn2\" (UniqueName: \"kubernetes.io/projected/231da886-60dd-4772-a429-3e2fc309990d-kube-api-access-mrfn2\") pod \"nova-edpm-compute-no-nodes-edpm-compute-no-nodes-5t59k\" (UID: \"231da886-60dd-4772-a429-3e2fc309990d\") " pod="openstack-kuttl-tests/nova-edpm-compute-no-nodes-edpm-compute-no-nodes-5t59k" Jan 20 18:05:28 crc kubenswrapper[4558]: I0120 18:05:28.017867 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-edpm-compute-no-nodes-edpm-compute-no-nodes-5t59k" Jan 20 18:05:28 crc kubenswrapper[4558]: I0120 18:05:28.413768 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-edpm-compute-no-nodes-edpm-compute-no-nodes-5t59k"] Jan 20 18:05:28 crc kubenswrapper[4558]: W0120 18:05:28.415052 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod231da886_60dd_4772_a429_3e2fc309990d.slice/crio-e29c6ca1ff6997c7d0ae35702a4bc72bc85e92902facd69e12267a819e4e9f03 WatchSource:0}: Error finding container e29c6ca1ff6997c7d0ae35702a4bc72bc85e92902facd69e12267a819e4e9f03: Status 404 returned error can't find the container with id e29c6ca1ff6997c7d0ae35702a4bc72bc85e92902facd69e12267a819e4e9f03 Jan 20 18:05:28 crc kubenswrapper[4558]: I0120 18:05:28.576159 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="810b3c6b-1c22-49e0-aae7-16ac3637d8e8" path="/var/lib/kubelet/pods/810b3c6b-1c22-49e0-aae7-16ac3637d8e8/volumes" Jan 20 18:05:28 crc kubenswrapper[4558]: I0120 18:05:28.665480 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-edpm-compute-no-nodes-edpm-compute-no-nodes-5t59k" event={"ID":"231da886-60dd-4772-a429-3e2fc309990d","Type":"ContainerStarted","Data":"e29c6ca1ff6997c7d0ae35702a4bc72bc85e92902facd69e12267a819e4e9f03"} Jan 20 18:05:29 crc kubenswrapper[4558]: I0120 18:05:29.678184 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-edpm-compute-no-nodes-edpm-compute-no-nodes-5t59k" event={"ID":"231da886-60dd-4772-a429-3e2fc309990d","Type":"ContainerStarted","Data":"5a2c0130729c72b0a7cfc472b088a1862bb308a64d24b8cdda68f8113a394887"} Jan 20 18:05:29 crc kubenswrapper[4558]: I0120 18:05:29.698738 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-edpm-compute-no-nodes-edpm-compute-no-nodes-5t59k" podStartSLOduration=2.133365995 podStartE2EDuration="2.698717506s" podCreationTimestamp="2026-01-20 18:05:27 +0000 UTC" firstStartedPulling="2026-01-20 18:05:28.417767056 +0000 UTC m=+5022.178105024" lastFinishedPulling="2026-01-20 18:05:28.983118569 +0000 UTC m=+5022.743456535" observedRunningTime="2026-01-20 18:05:29.694411129 +0000 UTC m=+5023.454749097" watchObservedRunningTime="2026-01-20 18:05:29.698717506 +0000 UTC m=+5023.459055474" Jan 20 18:05:30 crc kubenswrapper[4558]: I0120 18:05:30.689029 4558 generic.go:334] "Generic (PLEG): container finished" podID="231da886-60dd-4772-a429-3e2fc309990d" containerID="5a2c0130729c72b0a7cfc472b088a1862bb308a64d24b8cdda68f8113a394887" exitCode=0 Jan 20 18:05:30 crc kubenswrapper[4558]: I0120 18:05:30.689080 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-edpm-compute-no-nodes-edpm-compute-no-nodes-5t59k" event={"ID":"231da886-60dd-4772-a429-3e2fc309990d","Type":"ContainerDied","Data":"5a2c0130729c72b0a7cfc472b088a1862bb308a64d24b8cdda68f8113a394887"} Jan 20 18:05:31 crc kubenswrapper[4558]: I0120 18:05:31.422964 4558 scope.go:117] "RemoveContainer" containerID="7310cf3e7f48f89d7e00a18bb1a190b6e8a5082773a3a4ced4740251c39dede3" Jan 20 18:05:31 crc kubenswrapper[4558]: I0120 18:05:31.443883 4558 scope.go:117] "RemoveContainer" containerID="213d4a39fc3d76e3e28077d4f05dc4a4b9af3c199c14c25a05cac8843f18e8d9" Jan 20 18:05:31 crc kubenswrapper[4558]: I0120 18:05:31.467749 4558 scope.go:117] "RemoveContainer" containerID="790091312ed918acd438cbb2df2c660600bb30ed84ef002a58f4cd77a21666a1" Jan 20 18:05:31 crc kubenswrapper[4558]: I0120 18:05:31.487797 4558 scope.go:117] "RemoveContainer" containerID="9ea6ff980fb53a92de35240296a25aa2e7062286f09a7e38918440c0642fed3a" Jan 20 18:05:31 crc kubenswrapper[4558]: I0120 18:05:31.949703 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-edpm-compute-no-nodes-edpm-compute-no-nodes-5t59k" Jan 20 18:05:32 crc kubenswrapper[4558]: I0120 18:05:32.094497 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/231da886-60dd-4772-a429-3e2fc309990d-nova-combined-ca-bundle\") pod \"231da886-60dd-4772-a429-3e2fc309990d\" (UID: \"231da886-60dd-4772-a429-3e2fc309990d\") " Jan 20 18:05:32 crc kubenswrapper[4558]: I0120 18:05:32.094555 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/231da886-60dd-4772-a429-3e2fc309990d-ssh-key-edpm-compute-no-nodes\") pod \"231da886-60dd-4772-a429-3e2fc309990d\" (UID: \"231da886-60dd-4772-a429-3e2fc309990d\") " Jan 20 18:05:32 crc kubenswrapper[4558]: I0120 18:05:32.094616 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/231da886-60dd-4772-a429-3e2fc309990d-nova-cell1-compute-config-0\") pod \"231da886-60dd-4772-a429-3e2fc309990d\" (UID: \"231da886-60dd-4772-a429-3e2fc309990d\") " Jan 20 18:05:32 crc kubenswrapper[4558]: I0120 18:05:32.094658 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mrfn2\" (UniqueName: \"kubernetes.io/projected/231da886-60dd-4772-a429-3e2fc309990d-kube-api-access-mrfn2\") pod \"231da886-60dd-4772-a429-3e2fc309990d\" (UID: \"231da886-60dd-4772-a429-3e2fc309990d\") " Jan 20 18:05:32 crc kubenswrapper[4558]: I0120 18:05:32.094694 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/231da886-60dd-4772-a429-3e2fc309990d-nova-cell1-compute-config-1\") pod \"231da886-60dd-4772-a429-3e2fc309990d\" (UID: \"231da886-60dd-4772-a429-3e2fc309990d\") " Jan 20 18:05:32 crc kubenswrapper[4558]: I0120 18:05:32.094753 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/231da886-60dd-4772-a429-3e2fc309990d-nova-migration-ssh-key-0\") pod \"231da886-60dd-4772-a429-3e2fc309990d\" (UID: \"231da886-60dd-4772-a429-3e2fc309990d\") " Jan 20 18:05:32 crc kubenswrapper[4558]: I0120 18:05:32.094795 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/231da886-60dd-4772-a429-3e2fc309990d-inventory\") pod \"231da886-60dd-4772-a429-3e2fc309990d\" (UID: \"231da886-60dd-4772-a429-3e2fc309990d\") " Jan 20 18:05:32 crc kubenswrapper[4558]: I0120 18:05:32.094955 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/231da886-60dd-4772-a429-3e2fc309990d-nova-migration-ssh-key-1\") pod \"231da886-60dd-4772-a429-3e2fc309990d\" (UID: \"231da886-60dd-4772-a429-3e2fc309990d\") " Jan 20 18:05:32 crc kubenswrapper[4558]: I0120 18:05:32.101043 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/231da886-60dd-4772-a429-3e2fc309990d-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "231da886-60dd-4772-a429-3e2fc309990d" (UID: "231da886-60dd-4772-a429-3e2fc309990d"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:05:32 crc kubenswrapper[4558]: I0120 18:05:32.101299 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/231da886-60dd-4772-a429-3e2fc309990d-kube-api-access-mrfn2" (OuterVolumeSpecName: "kube-api-access-mrfn2") pod "231da886-60dd-4772-a429-3e2fc309990d" (UID: "231da886-60dd-4772-a429-3e2fc309990d"). InnerVolumeSpecName "kube-api-access-mrfn2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:05:32 crc kubenswrapper[4558]: E0120 18:05:32.114021 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/231da886-60dd-4772-a429-3e2fc309990d-ssh-key-edpm-compute-no-nodes podName:231da886-60dd-4772-a429-3e2fc309990d nodeName:}" failed. No retries permitted until 2026-01-20 18:05:32.613991251 +0000 UTC m=+5026.374329218 (durationBeforeRetry 500ms). Error: error cleaning subPath mounts for volume "ssh-key-edpm-compute-no-nodes" (UniqueName: "kubernetes.io/secret/231da886-60dd-4772-a429-3e2fc309990d-ssh-key-edpm-compute-no-nodes") pod "231da886-60dd-4772-a429-3e2fc309990d" (UID: "231da886-60dd-4772-a429-3e2fc309990d") : error deleting /var/lib/kubelet/pods/231da886-60dd-4772-a429-3e2fc309990d/volume-subpaths: remove /var/lib/kubelet/pods/231da886-60dd-4772-a429-3e2fc309990d/volume-subpaths: no such file or directory Jan 20 18:05:32 crc kubenswrapper[4558]: I0120 18:05:32.115636 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/231da886-60dd-4772-a429-3e2fc309990d-inventory" (OuterVolumeSpecName: "inventory") pod "231da886-60dd-4772-a429-3e2fc309990d" (UID: "231da886-60dd-4772-a429-3e2fc309990d"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:05:32 crc kubenswrapper[4558]: I0120 18:05:32.116006 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/231da886-60dd-4772-a429-3e2fc309990d-nova-migration-ssh-key-1" (OuterVolumeSpecName: "nova-migration-ssh-key-1") pod "231da886-60dd-4772-a429-3e2fc309990d" (UID: "231da886-60dd-4772-a429-3e2fc309990d"). InnerVolumeSpecName "nova-migration-ssh-key-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:05:32 crc kubenswrapper[4558]: I0120 18:05:32.116322 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/231da886-60dd-4772-a429-3e2fc309990d-nova-migration-ssh-key-0" (OuterVolumeSpecName: "nova-migration-ssh-key-0") pod "231da886-60dd-4772-a429-3e2fc309990d" (UID: "231da886-60dd-4772-a429-3e2fc309990d"). InnerVolumeSpecName "nova-migration-ssh-key-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:05:32 crc kubenswrapper[4558]: I0120 18:05:32.116599 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/231da886-60dd-4772-a429-3e2fc309990d-nova-cell1-compute-config-1" (OuterVolumeSpecName: "nova-cell1-compute-config-1") pod "231da886-60dd-4772-a429-3e2fc309990d" (UID: "231da886-60dd-4772-a429-3e2fc309990d"). InnerVolumeSpecName "nova-cell1-compute-config-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:05:32 crc kubenswrapper[4558]: I0120 18:05:32.116969 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/231da886-60dd-4772-a429-3e2fc309990d-nova-cell1-compute-config-0" (OuterVolumeSpecName: "nova-cell1-compute-config-0") pod "231da886-60dd-4772-a429-3e2fc309990d" (UID: "231da886-60dd-4772-a429-3e2fc309990d"). InnerVolumeSpecName "nova-cell1-compute-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:05:32 crc kubenswrapper[4558]: I0120 18:05:32.197778 4558 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/231da886-60dd-4772-a429-3e2fc309990d-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 18:05:32 crc kubenswrapper[4558]: I0120 18:05:32.197808 4558 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/231da886-60dd-4772-a429-3e2fc309990d-nova-cell1-compute-config-0\") on node \"crc\" DevicePath \"\"" Jan 20 18:05:32 crc kubenswrapper[4558]: I0120 18:05:32.197821 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mrfn2\" (UniqueName: \"kubernetes.io/projected/231da886-60dd-4772-a429-3e2fc309990d-kube-api-access-mrfn2\") on node \"crc\" DevicePath \"\"" Jan 20 18:05:32 crc kubenswrapper[4558]: I0120 18:05:32.197832 4558 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/231da886-60dd-4772-a429-3e2fc309990d-nova-cell1-compute-config-1\") on node \"crc\" DevicePath \"\"" Jan 20 18:05:32 crc kubenswrapper[4558]: I0120 18:05:32.197842 4558 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/231da886-60dd-4772-a429-3e2fc309990d-nova-migration-ssh-key-0\") on node \"crc\" DevicePath \"\"" Jan 20 18:05:32 crc kubenswrapper[4558]: I0120 18:05:32.197857 4558 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/231da886-60dd-4772-a429-3e2fc309990d-inventory\") on node \"crc\" DevicePath \"\"" Jan 20 18:05:32 crc kubenswrapper[4558]: I0120 18:05:32.197867 4558 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/231da886-60dd-4772-a429-3e2fc309990d-nova-migration-ssh-key-1\") on node \"crc\" DevicePath \"\"" Jan 20 18:05:32 crc kubenswrapper[4558]: I0120 18:05:32.703753 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/231da886-60dd-4772-a429-3e2fc309990d-ssh-key-edpm-compute-no-nodes\") pod \"231da886-60dd-4772-a429-3e2fc309990d\" (UID: \"231da886-60dd-4772-a429-3e2fc309990d\") " Jan 20 18:05:32 crc kubenswrapper[4558]: I0120 18:05:32.711513 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-edpm-compute-no-nodes-edpm-compute-no-nodes-5t59k" Jan 20 18:05:32 crc kubenswrapper[4558]: I0120 18:05:32.792623 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/231da886-60dd-4772-a429-3e2fc309990d-ssh-key-edpm-compute-no-nodes" (OuterVolumeSpecName: "ssh-key-edpm-compute-no-nodes") pod "231da886-60dd-4772-a429-3e2fc309990d" (UID: "231da886-60dd-4772-a429-3e2fc309990d"). InnerVolumeSpecName "ssh-key-edpm-compute-no-nodes". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:05:32 crc kubenswrapper[4558]: I0120 18:05:32.807330 4558 reconciler_common.go:293] "Volume detached for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/231da886-60dd-4772-a429-3e2fc309990d-ssh-key-edpm-compute-no-nodes\") on node \"crc\" DevicePath \"\"" Jan 20 18:05:32 crc kubenswrapper[4558]: I0120 18:05:32.946064 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-edpm-compute-no-nodes-edpm-compute-no-nodes-5t59k" event={"ID":"231da886-60dd-4772-a429-3e2fc309990d","Type":"ContainerDied","Data":"e29c6ca1ff6997c7d0ae35702a4bc72bc85e92902facd69e12267a819e4e9f03"} Jan 20 18:05:32 crc kubenswrapper[4558]: I0120 18:05:32.946131 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e29c6ca1ff6997c7d0ae35702a4bc72bc85e92902facd69e12267a819e4e9f03" Jan 20 18:05:33 crc kubenswrapper[4558]: I0120 18:05:33.732123 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/custom-svc-edpm-compute-no-nodes-ovrd-edpm-compute-no-nodetmjvn"] Jan 20 18:05:33 crc kubenswrapper[4558]: E0120 18:05:33.732431 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="231da886-60dd-4772-a429-3e2fc309990d" containerName="nova-edpm-compute-no-nodes-edpm-compute-no-nodes" Jan 20 18:05:33 crc kubenswrapper[4558]: I0120 18:05:33.732446 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="231da886-60dd-4772-a429-3e2fc309990d" containerName="nova-edpm-compute-no-nodes-edpm-compute-no-nodes" Jan 20 18:05:33 crc kubenswrapper[4558]: I0120 18:05:33.732614 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="231da886-60dd-4772-a429-3e2fc309990d" containerName="nova-edpm-compute-no-nodes-edpm-compute-no-nodes" Jan 20 18:05:33 crc kubenswrapper[4558]: I0120 18:05:33.733119 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/custom-svc-edpm-compute-no-nodes-ovrd-edpm-compute-no-nodetmjvn" Jan 20 18:05:33 crc kubenswrapper[4558]: I0120 18:05:33.735012 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"combined-ca-bundle" Jan 20 18:05:33 crc kubenswrapper[4558]: I0120 18:05:33.735303 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-aee-default-env" Jan 20 18:05:33 crc kubenswrapper[4558]: I0120 18:05:33.735566 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"edpm-compute-no-nodes-dockercfg-tljmj" Jan 20 18:05:33 crc kubenswrapper[4558]: I0120 18:05:33.738411 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplanenodeset-edpm-compute-no-nodes" Jan 20 18:05:33 crc kubenswrapper[4558]: I0120 18:05:33.738699 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplane-ansible-ssh-private-key-secret" Jan 20 18:05:33 crc kubenswrapper[4558]: I0120 18:05:33.757831 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/custom-svc-edpm-compute-no-nodes-ovrd-edpm-compute-no-nodetmjvn"] Jan 20 18:05:33 crc kubenswrapper[4558]: I0120 18:05:33.925709 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"custom-svc-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ac9e577e-5aa4-4923-8f3f-a4f1fa6ce515-custom-svc-combined-ca-bundle\") pod \"custom-svc-edpm-compute-no-nodes-ovrd-edpm-compute-no-nodetmjvn\" (UID: \"ac9e577e-5aa4-4923-8f3f-a4f1fa6ce515\") " pod="openstack-kuttl-tests/custom-svc-edpm-compute-no-nodes-ovrd-edpm-compute-no-nodetmjvn" Jan 20 18:05:33 crc kubenswrapper[4558]: I0120 18:05:33.926054 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ac9e577e-5aa4-4923-8f3f-a4f1fa6ce515-inventory\") pod \"custom-svc-edpm-compute-no-nodes-ovrd-edpm-compute-no-nodetmjvn\" (UID: \"ac9e577e-5aa4-4923-8f3f-a4f1fa6ce515\") " pod="openstack-kuttl-tests/custom-svc-edpm-compute-no-nodes-ovrd-edpm-compute-no-nodetmjvn" Jan 20 18:05:33 crc kubenswrapper[4558]: I0120 18:05:33.926081 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/ac9e577e-5aa4-4923-8f3f-a4f1fa6ce515-ssh-key-edpm-compute-no-nodes\") pod \"custom-svc-edpm-compute-no-nodes-ovrd-edpm-compute-no-nodetmjvn\" (UID: \"ac9e577e-5aa4-4923-8f3f-a4f1fa6ce515\") " pod="openstack-kuttl-tests/custom-svc-edpm-compute-no-nodes-ovrd-edpm-compute-no-nodetmjvn" Jan 20 18:05:33 crc kubenswrapper[4558]: I0120 18:05:33.926127 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w2jnh\" (UniqueName: \"kubernetes.io/projected/ac9e577e-5aa4-4923-8f3f-a4f1fa6ce515-kube-api-access-w2jnh\") pod \"custom-svc-edpm-compute-no-nodes-ovrd-edpm-compute-no-nodetmjvn\" (UID: \"ac9e577e-5aa4-4923-8f3f-a4f1fa6ce515\") " pod="openstack-kuttl-tests/custom-svc-edpm-compute-no-nodes-ovrd-edpm-compute-no-nodetmjvn" Jan 20 18:05:34 crc kubenswrapper[4558]: I0120 18:05:34.027592 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"custom-svc-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ac9e577e-5aa4-4923-8f3f-a4f1fa6ce515-custom-svc-combined-ca-bundle\") pod \"custom-svc-edpm-compute-no-nodes-ovrd-edpm-compute-no-nodetmjvn\" (UID: \"ac9e577e-5aa4-4923-8f3f-a4f1fa6ce515\") " pod="openstack-kuttl-tests/custom-svc-edpm-compute-no-nodes-ovrd-edpm-compute-no-nodetmjvn" Jan 20 18:05:34 crc kubenswrapper[4558]: I0120 18:05:34.027650 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ac9e577e-5aa4-4923-8f3f-a4f1fa6ce515-inventory\") pod \"custom-svc-edpm-compute-no-nodes-ovrd-edpm-compute-no-nodetmjvn\" (UID: \"ac9e577e-5aa4-4923-8f3f-a4f1fa6ce515\") " pod="openstack-kuttl-tests/custom-svc-edpm-compute-no-nodes-ovrd-edpm-compute-no-nodetmjvn" Jan 20 18:05:34 crc kubenswrapper[4558]: I0120 18:05:34.027679 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/ac9e577e-5aa4-4923-8f3f-a4f1fa6ce515-ssh-key-edpm-compute-no-nodes\") pod \"custom-svc-edpm-compute-no-nodes-ovrd-edpm-compute-no-nodetmjvn\" (UID: \"ac9e577e-5aa4-4923-8f3f-a4f1fa6ce515\") " pod="openstack-kuttl-tests/custom-svc-edpm-compute-no-nodes-ovrd-edpm-compute-no-nodetmjvn" Jan 20 18:05:34 crc kubenswrapper[4558]: I0120 18:05:34.027731 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w2jnh\" (UniqueName: \"kubernetes.io/projected/ac9e577e-5aa4-4923-8f3f-a4f1fa6ce515-kube-api-access-w2jnh\") pod \"custom-svc-edpm-compute-no-nodes-ovrd-edpm-compute-no-nodetmjvn\" (UID: \"ac9e577e-5aa4-4923-8f3f-a4f1fa6ce515\") " pod="openstack-kuttl-tests/custom-svc-edpm-compute-no-nodes-ovrd-edpm-compute-no-nodetmjvn" Jan 20 18:05:34 crc kubenswrapper[4558]: I0120 18:05:34.032797 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/ac9e577e-5aa4-4923-8f3f-a4f1fa6ce515-ssh-key-edpm-compute-no-nodes\") pod \"custom-svc-edpm-compute-no-nodes-ovrd-edpm-compute-no-nodetmjvn\" (UID: \"ac9e577e-5aa4-4923-8f3f-a4f1fa6ce515\") " pod="openstack-kuttl-tests/custom-svc-edpm-compute-no-nodes-ovrd-edpm-compute-no-nodetmjvn" Jan 20 18:05:34 crc kubenswrapper[4558]: I0120 18:05:34.032807 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ac9e577e-5aa4-4923-8f3f-a4f1fa6ce515-inventory\") pod \"custom-svc-edpm-compute-no-nodes-ovrd-edpm-compute-no-nodetmjvn\" (UID: \"ac9e577e-5aa4-4923-8f3f-a4f1fa6ce515\") " pod="openstack-kuttl-tests/custom-svc-edpm-compute-no-nodes-ovrd-edpm-compute-no-nodetmjvn" Jan 20 18:05:34 crc kubenswrapper[4558]: I0120 18:05:34.032978 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"custom-svc-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ac9e577e-5aa4-4923-8f3f-a4f1fa6ce515-custom-svc-combined-ca-bundle\") pod \"custom-svc-edpm-compute-no-nodes-ovrd-edpm-compute-no-nodetmjvn\" (UID: \"ac9e577e-5aa4-4923-8f3f-a4f1fa6ce515\") " pod="openstack-kuttl-tests/custom-svc-edpm-compute-no-nodes-ovrd-edpm-compute-no-nodetmjvn" Jan 20 18:05:34 crc kubenswrapper[4558]: I0120 18:05:34.044442 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w2jnh\" (UniqueName: \"kubernetes.io/projected/ac9e577e-5aa4-4923-8f3f-a4f1fa6ce515-kube-api-access-w2jnh\") pod \"custom-svc-edpm-compute-no-nodes-ovrd-edpm-compute-no-nodetmjvn\" (UID: \"ac9e577e-5aa4-4923-8f3f-a4f1fa6ce515\") " pod="openstack-kuttl-tests/custom-svc-edpm-compute-no-nodes-ovrd-edpm-compute-no-nodetmjvn" Jan 20 18:05:34 crc kubenswrapper[4558]: I0120 18:05:34.046111 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/custom-svc-edpm-compute-no-nodes-ovrd-edpm-compute-no-nodetmjvn" Jan 20 18:05:34 crc kubenswrapper[4558]: I0120 18:05:34.423682 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/custom-svc-edpm-compute-no-nodes-ovrd-edpm-compute-no-nodetmjvn"] Jan 20 18:05:34 crc kubenswrapper[4558]: I0120 18:05:34.745882 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/custom-svc-edpm-compute-no-nodes-ovrd-edpm-compute-no-nodetmjvn" event={"ID":"ac9e577e-5aa4-4923-8f3f-a4f1fa6ce515","Type":"ContainerStarted","Data":"352c25bd7383adc859269a1a36614f86a88073f8ff0dd3e5a6b067aca6bf8609"} Jan 20 18:05:35 crc kubenswrapper[4558]: I0120 18:05:35.757739 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/custom-svc-edpm-compute-no-nodes-ovrd-edpm-compute-no-nodetmjvn" event={"ID":"ac9e577e-5aa4-4923-8f3f-a4f1fa6ce515","Type":"ContainerStarted","Data":"d8a8562f0ad946b83659daace7ac1118fa02810695cdf4d2a3584b817bbaff84"} Jan 20 18:05:35 crc kubenswrapper[4558]: I0120 18:05:35.773640 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/custom-svc-edpm-compute-no-nodes-ovrd-edpm-compute-no-nodetmjvn" podStartSLOduration=2.060585221 podStartE2EDuration="2.773618726s" podCreationTimestamp="2026-01-20 18:05:33 +0000 UTC" firstStartedPulling="2026-01-20 18:05:34.427614501 +0000 UTC m=+5028.187952468" lastFinishedPulling="2026-01-20 18:05:35.140648005 +0000 UTC m=+5028.900985973" observedRunningTime="2026-01-20 18:05:35.770999121 +0000 UTC m=+5029.531337088" watchObservedRunningTime="2026-01-20 18:05:35.773618726 +0000 UTC m=+5029.533956693" Jan 20 18:05:36 crc kubenswrapper[4558]: I0120 18:05:36.570929 4558 scope.go:117] "RemoveContainer" containerID="6b02d31ec11fe5435af988e35303a6f66b5afa5ef952ce5f7cc4b3cd5f4d9497" Jan 20 18:05:36 crc kubenswrapper[4558]: E0120 18:05:36.571206 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 18:05:37 crc kubenswrapper[4558]: I0120 18:05:37.778191 4558 generic.go:334] "Generic (PLEG): container finished" podID="ac9e577e-5aa4-4923-8f3f-a4f1fa6ce515" containerID="d8a8562f0ad946b83659daace7ac1118fa02810695cdf4d2a3584b817bbaff84" exitCode=0 Jan 20 18:05:37 crc kubenswrapper[4558]: I0120 18:05:37.778250 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/custom-svc-edpm-compute-no-nodes-ovrd-edpm-compute-no-nodetmjvn" event={"ID":"ac9e577e-5aa4-4923-8f3f-a4f1fa6ce515","Type":"ContainerDied","Data":"d8a8562f0ad946b83659daace7ac1118fa02810695cdf4d2a3584b817bbaff84"} Jan 20 18:05:39 crc kubenswrapper[4558]: I0120 18:05:39.016340 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/custom-svc-edpm-compute-no-nodes-ovrd-edpm-compute-no-nodetmjvn" Jan 20 18:05:39 crc kubenswrapper[4558]: I0120 18:05:39.199720 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"custom-svc-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ac9e577e-5aa4-4923-8f3f-a4f1fa6ce515-custom-svc-combined-ca-bundle\") pod \"ac9e577e-5aa4-4923-8f3f-a4f1fa6ce515\" (UID: \"ac9e577e-5aa4-4923-8f3f-a4f1fa6ce515\") " Jan 20 18:05:39 crc kubenswrapper[4558]: I0120 18:05:39.199786 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w2jnh\" (UniqueName: \"kubernetes.io/projected/ac9e577e-5aa4-4923-8f3f-a4f1fa6ce515-kube-api-access-w2jnh\") pod \"ac9e577e-5aa4-4923-8f3f-a4f1fa6ce515\" (UID: \"ac9e577e-5aa4-4923-8f3f-a4f1fa6ce515\") " Jan 20 18:05:39 crc kubenswrapper[4558]: I0120 18:05:39.199824 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ac9e577e-5aa4-4923-8f3f-a4f1fa6ce515-inventory\") pod \"ac9e577e-5aa4-4923-8f3f-a4f1fa6ce515\" (UID: \"ac9e577e-5aa4-4923-8f3f-a4f1fa6ce515\") " Jan 20 18:05:39 crc kubenswrapper[4558]: I0120 18:05:39.199955 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/ac9e577e-5aa4-4923-8f3f-a4f1fa6ce515-ssh-key-edpm-compute-no-nodes\") pod \"ac9e577e-5aa4-4923-8f3f-a4f1fa6ce515\" (UID: \"ac9e577e-5aa4-4923-8f3f-a4f1fa6ce515\") " Jan 20 18:05:39 crc kubenswrapper[4558]: I0120 18:05:39.206017 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ac9e577e-5aa4-4923-8f3f-a4f1fa6ce515-custom-svc-combined-ca-bundle" (OuterVolumeSpecName: "custom-svc-combined-ca-bundle") pod "ac9e577e-5aa4-4923-8f3f-a4f1fa6ce515" (UID: "ac9e577e-5aa4-4923-8f3f-a4f1fa6ce515"). InnerVolumeSpecName "custom-svc-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:05:39 crc kubenswrapper[4558]: I0120 18:05:39.206342 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ac9e577e-5aa4-4923-8f3f-a4f1fa6ce515-kube-api-access-w2jnh" (OuterVolumeSpecName: "kube-api-access-w2jnh") pod "ac9e577e-5aa4-4923-8f3f-a4f1fa6ce515" (UID: "ac9e577e-5aa4-4923-8f3f-a4f1fa6ce515"). InnerVolumeSpecName "kube-api-access-w2jnh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:05:39 crc kubenswrapper[4558]: I0120 18:05:39.218826 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ac9e577e-5aa4-4923-8f3f-a4f1fa6ce515-inventory" (OuterVolumeSpecName: "inventory") pod "ac9e577e-5aa4-4923-8f3f-a4f1fa6ce515" (UID: "ac9e577e-5aa4-4923-8f3f-a4f1fa6ce515"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:05:39 crc kubenswrapper[4558]: I0120 18:05:39.220294 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ac9e577e-5aa4-4923-8f3f-a4f1fa6ce515-ssh-key-edpm-compute-no-nodes" (OuterVolumeSpecName: "ssh-key-edpm-compute-no-nodes") pod "ac9e577e-5aa4-4923-8f3f-a4f1fa6ce515" (UID: "ac9e577e-5aa4-4923-8f3f-a4f1fa6ce515"). InnerVolumeSpecName "ssh-key-edpm-compute-no-nodes". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:05:39 crc kubenswrapper[4558]: I0120 18:05:39.301848 4558 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ac9e577e-5aa4-4923-8f3f-a4f1fa6ce515-inventory\") on node \"crc\" DevicePath \"\"" Jan 20 18:05:39 crc kubenswrapper[4558]: I0120 18:05:39.301882 4558 reconciler_common.go:293] "Volume detached for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/ac9e577e-5aa4-4923-8f3f-a4f1fa6ce515-ssh-key-edpm-compute-no-nodes\") on node \"crc\" DevicePath \"\"" Jan 20 18:05:39 crc kubenswrapper[4558]: I0120 18:05:39.301901 4558 reconciler_common.go:293] "Volume detached for volume \"custom-svc-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ac9e577e-5aa4-4923-8f3f-a4f1fa6ce515-custom-svc-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 18:05:39 crc kubenswrapper[4558]: I0120 18:05:39.301916 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w2jnh\" (UniqueName: \"kubernetes.io/projected/ac9e577e-5aa4-4923-8f3f-a4f1fa6ce515-kube-api-access-w2jnh\") on node \"crc\" DevicePath \"\"" Jan 20 18:05:39 crc kubenswrapper[4558]: I0120 18:05:39.798824 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/custom-svc-edpm-compute-no-nodes-ovrd-edpm-compute-no-nodetmjvn" event={"ID":"ac9e577e-5aa4-4923-8f3f-a4f1fa6ce515","Type":"ContainerDied","Data":"352c25bd7383adc859269a1a36614f86a88073f8ff0dd3e5a6b067aca6bf8609"} Jan 20 18:05:39 crc kubenswrapper[4558]: I0120 18:05:39.798889 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="352c25bd7383adc859269a1a36614f86a88073f8ff0dd3e5a6b067aca6bf8609" Jan 20 18:05:39 crc kubenswrapper[4558]: I0120 18:05:39.798890 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/custom-svc-edpm-compute-no-nodes-ovrd-edpm-compute-no-nodetmjvn" Jan 20 18:05:48 crc kubenswrapper[4558]: I0120 18:05:48.565979 4558 scope.go:117] "RemoveContainer" containerID="6b02d31ec11fe5435af988e35303a6f66b5afa5ef952ce5f7cc4b3cd5f4d9497" Jan 20 18:05:48 crc kubenswrapper[4558]: E0120 18:05:48.567042 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 18:06:03 crc kubenswrapper[4558]: I0120 18:06:03.565898 4558 scope.go:117] "RemoveContainer" containerID="6b02d31ec11fe5435af988e35303a6f66b5afa5ef952ce5f7cc4b3cd5f4d9497" Jan 20 18:06:03 crc kubenswrapper[4558]: E0120 18:06:03.566832 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 18:06:03 crc kubenswrapper[4558]: I0120 18:06:03.791115 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ovn-edpm-compute-no-nodes-updated-ovn-cm-edpm-compute-no-n8f225"] Jan 20 18:06:03 crc kubenswrapper[4558]: E0120 18:06:03.791502 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ac9e577e-5aa4-4923-8f3f-a4f1fa6ce515" containerName="custom-svc-edpm-compute-no-nodes-ovrd-edpm-compute-no-nodes" Jan 20 18:06:03 crc kubenswrapper[4558]: I0120 18:06:03.791517 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ac9e577e-5aa4-4923-8f3f-a4f1fa6ce515" containerName="custom-svc-edpm-compute-no-nodes-ovrd-edpm-compute-no-nodes" Jan 20 18:06:03 crc kubenswrapper[4558]: I0120 18:06:03.791657 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="ac9e577e-5aa4-4923-8f3f-a4f1fa6ce515" containerName="custom-svc-edpm-compute-no-nodes-ovrd-edpm-compute-no-nodes" Jan 20 18:06:03 crc kubenswrapper[4558]: I0120 18:06:03.792225 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-edpm-compute-no-nodes-updated-ovn-cm-edpm-compute-no-n8f225" Jan 20 18:06:03 crc kubenswrapper[4558]: I0120 18:06:03.794898 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"ovncontroller-config" Jan 20 18:06:03 crc kubenswrapper[4558]: I0120 18:06:03.795185 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"combined-ca-bundle" Jan 20 18:06:03 crc kubenswrapper[4558]: I0120 18:06:03.795420 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"edpm-compute-no-nodes-dockercfg-tljmj" Jan 20 18:06:03 crc kubenswrapper[4558]: I0120 18:06:03.796655 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-aee-default-env" Jan 20 18:06:03 crc kubenswrapper[4558]: I0120 18:06:03.797682 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplanenodeset-edpm-compute-no-nodes" Jan 20 18:06:03 crc kubenswrapper[4558]: I0120 18:06:03.798546 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplane-ansible-ssh-private-key-secret" Jan 20 18:06:03 crc kubenswrapper[4558]: I0120 18:06:03.803201 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovn-edpm-compute-no-nodes-updated-ovn-cm-edpm-compute-no-n8f225"] Jan 20 18:06:03 crc kubenswrapper[4558]: I0120 18:06:03.939648 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/b16dc76f-2b51-48f2-9fe8-ff79228fba59-ssh-key-edpm-compute-no-nodes\") pod \"ovn-edpm-compute-no-nodes-updated-ovn-cm-edpm-compute-no-n8f225\" (UID: \"b16dc76f-2b51-48f2-9fe8-ff79228fba59\") " pod="openstack-kuttl-tests/ovn-edpm-compute-no-nodes-updated-ovn-cm-edpm-compute-no-n8f225" Jan 20 18:06:03 crc kubenswrapper[4558]: I0120 18:06:03.939731 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fxxwm\" (UniqueName: \"kubernetes.io/projected/b16dc76f-2b51-48f2-9fe8-ff79228fba59-kube-api-access-fxxwm\") pod \"ovn-edpm-compute-no-nodes-updated-ovn-cm-edpm-compute-no-n8f225\" (UID: \"b16dc76f-2b51-48f2-9fe8-ff79228fba59\") " pod="openstack-kuttl-tests/ovn-edpm-compute-no-nodes-updated-ovn-cm-edpm-compute-no-n8f225" Jan 20 18:06:03 crc kubenswrapper[4558]: I0120 18:06:03.939774 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b16dc76f-2b51-48f2-9fe8-ff79228fba59-inventory\") pod \"ovn-edpm-compute-no-nodes-updated-ovn-cm-edpm-compute-no-n8f225\" (UID: \"b16dc76f-2b51-48f2-9fe8-ff79228fba59\") " pod="openstack-kuttl-tests/ovn-edpm-compute-no-nodes-updated-ovn-cm-edpm-compute-no-n8f225" Jan 20 18:06:03 crc kubenswrapper[4558]: I0120 18:06:03.939987 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/b16dc76f-2b51-48f2-9fe8-ff79228fba59-ovncontroller-config-0\") pod \"ovn-edpm-compute-no-nodes-updated-ovn-cm-edpm-compute-no-n8f225\" (UID: \"b16dc76f-2b51-48f2-9fe8-ff79228fba59\") " pod="openstack-kuttl-tests/ovn-edpm-compute-no-nodes-updated-ovn-cm-edpm-compute-no-n8f225" Jan 20 18:06:03 crc kubenswrapper[4558]: I0120 18:06:03.940053 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b16dc76f-2b51-48f2-9fe8-ff79228fba59-ovn-combined-ca-bundle\") pod \"ovn-edpm-compute-no-nodes-updated-ovn-cm-edpm-compute-no-n8f225\" (UID: \"b16dc76f-2b51-48f2-9fe8-ff79228fba59\") " pod="openstack-kuttl-tests/ovn-edpm-compute-no-nodes-updated-ovn-cm-edpm-compute-no-n8f225" Jan 20 18:06:04 crc kubenswrapper[4558]: I0120 18:06:04.042977 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/b16dc76f-2b51-48f2-9fe8-ff79228fba59-ssh-key-edpm-compute-no-nodes\") pod \"ovn-edpm-compute-no-nodes-updated-ovn-cm-edpm-compute-no-n8f225\" (UID: \"b16dc76f-2b51-48f2-9fe8-ff79228fba59\") " pod="openstack-kuttl-tests/ovn-edpm-compute-no-nodes-updated-ovn-cm-edpm-compute-no-n8f225" Jan 20 18:06:04 crc kubenswrapper[4558]: I0120 18:06:04.043045 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fxxwm\" (UniqueName: \"kubernetes.io/projected/b16dc76f-2b51-48f2-9fe8-ff79228fba59-kube-api-access-fxxwm\") pod \"ovn-edpm-compute-no-nodes-updated-ovn-cm-edpm-compute-no-n8f225\" (UID: \"b16dc76f-2b51-48f2-9fe8-ff79228fba59\") " pod="openstack-kuttl-tests/ovn-edpm-compute-no-nodes-updated-ovn-cm-edpm-compute-no-n8f225" Jan 20 18:06:04 crc kubenswrapper[4558]: I0120 18:06:04.043085 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b16dc76f-2b51-48f2-9fe8-ff79228fba59-inventory\") pod \"ovn-edpm-compute-no-nodes-updated-ovn-cm-edpm-compute-no-n8f225\" (UID: \"b16dc76f-2b51-48f2-9fe8-ff79228fba59\") " pod="openstack-kuttl-tests/ovn-edpm-compute-no-nodes-updated-ovn-cm-edpm-compute-no-n8f225" Jan 20 18:06:04 crc kubenswrapper[4558]: I0120 18:06:04.043151 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/b16dc76f-2b51-48f2-9fe8-ff79228fba59-ovncontroller-config-0\") pod \"ovn-edpm-compute-no-nodes-updated-ovn-cm-edpm-compute-no-n8f225\" (UID: \"b16dc76f-2b51-48f2-9fe8-ff79228fba59\") " pod="openstack-kuttl-tests/ovn-edpm-compute-no-nodes-updated-ovn-cm-edpm-compute-no-n8f225" Jan 20 18:06:04 crc kubenswrapper[4558]: I0120 18:06:04.043232 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b16dc76f-2b51-48f2-9fe8-ff79228fba59-ovn-combined-ca-bundle\") pod \"ovn-edpm-compute-no-nodes-updated-ovn-cm-edpm-compute-no-n8f225\" (UID: \"b16dc76f-2b51-48f2-9fe8-ff79228fba59\") " pod="openstack-kuttl-tests/ovn-edpm-compute-no-nodes-updated-ovn-cm-edpm-compute-no-n8f225" Jan 20 18:06:04 crc kubenswrapper[4558]: I0120 18:06:04.046080 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/b16dc76f-2b51-48f2-9fe8-ff79228fba59-ovncontroller-config-0\") pod \"ovn-edpm-compute-no-nodes-updated-ovn-cm-edpm-compute-no-n8f225\" (UID: \"b16dc76f-2b51-48f2-9fe8-ff79228fba59\") " pod="openstack-kuttl-tests/ovn-edpm-compute-no-nodes-updated-ovn-cm-edpm-compute-no-n8f225" Jan 20 18:06:04 crc kubenswrapper[4558]: I0120 18:06:04.051748 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/b16dc76f-2b51-48f2-9fe8-ff79228fba59-ssh-key-edpm-compute-no-nodes\") pod \"ovn-edpm-compute-no-nodes-updated-ovn-cm-edpm-compute-no-n8f225\" (UID: \"b16dc76f-2b51-48f2-9fe8-ff79228fba59\") " pod="openstack-kuttl-tests/ovn-edpm-compute-no-nodes-updated-ovn-cm-edpm-compute-no-n8f225" Jan 20 18:06:04 crc kubenswrapper[4558]: I0120 18:06:04.059634 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b16dc76f-2b51-48f2-9fe8-ff79228fba59-inventory\") pod \"ovn-edpm-compute-no-nodes-updated-ovn-cm-edpm-compute-no-n8f225\" (UID: \"b16dc76f-2b51-48f2-9fe8-ff79228fba59\") " pod="openstack-kuttl-tests/ovn-edpm-compute-no-nodes-updated-ovn-cm-edpm-compute-no-n8f225" Jan 20 18:06:04 crc kubenswrapper[4558]: I0120 18:06:04.061608 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b16dc76f-2b51-48f2-9fe8-ff79228fba59-ovn-combined-ca-bundle\") pod \"ovn-edpm-compute-no-nodes-updated-ovn-cm-edpm-compute-no-n8f225\" (UID: \"b16dc76f-2b51-48f2-9fe8-ff79228fba59\") " pod="openstack-kuttl-tests/ovn-edpm-compute-no-nodes-updated-ovn-cm-edpm-compute-no-n8f225" Jan 20 18:06:04 crc kubenswrapper[4558]: I0120 18:06:04.063194 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fxxwm\" (UniqueName: \"kubernetes.io/projected/b16dc76f-2b51-48f2-9fe8-ff79228fba59-kube-api-access-fxxwm\") pod \"ovn-edpm-compute-no-nodes-updated-ovn-cm-edpm-compute-no-n8f225\" (UID: \"b16dc76f-2b51-48f2-9fe8-ff79228fba59\") " pod="openstack-kuttl-tests/ovn-edpm-compute-no-nodes-updated-ovn-cm-edpm-compute-no-n8f225" Jan 20 18:06:04 crc kubenswrapper[4558]: I0120 18:06:04.115337 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-edpm-compute-no-nodes-updated-ovn-cm-edpm-compute-no-n8f225" Jan 20 18:06:04 crc kubenswrapper[4558]: I0120 18:06:04.525655 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovn-edpm-compute-no-nodes-updated-ovn-cm-edpm-compute-no-n8f225"] Jan 20 18:06:05 crc kubenswrapper[4558]: I0120 18:06:05.039960 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-edpm-compute-no-nodes-updated-ovn-cm-edpm-compute-no-n8f225" event={"ID":"b16dc76f-2b51-48f2-9fe8-ff79228fba59","Type":"ContainerStarted","Data":"e70037f80248c65ef2a84e811e40a9166622f01d85e3078016cf2728f532d4bb"} Jan 20 18:06:06 crc kubenswrapper[4558]: I0120 18:06:06.049888 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-edpm-compute-no-nodes-updated-ovn-cm-edpm-compute-no-n8f225" event={"ID":"b16dc76f-2b51-48f2-9fe8-ff79228fba59","Type":"ContainerStarted","Data":"48e898b9396791e356c286a06a348c31e78843284731ff15b0be127e3cd6f6cc"} Jan 20 18:06:06 crc kubenswrapper[4558]: I0120 18:06:06.069935 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ovn-edpm-compute-no-nodes-updated-ovn-cm-edpm-compute-no-n8f225" podStartSLOduration=2.269210997 podStartE2EDuration="3.069912244s" podCreationTimestamp="2026-01-20 18:06:03 +0000 UTC" firstStartedPulling="2026-01-20 18:06:04.530802442 +0000 UTC m=+5058.291140400" lastFinishedPulling="2026-01-20 18:06:05.33150368 +0000 UTC m=+5059.091841647" observedRunningTime="2026-01-20 18:06:06.063248976 +0000 UTC m=+5059.823586943" watchObservedRunningTime="2026-01-20 18:06:06.069912244 +0000 UTC m=+5059.830250211" Jan 20 18:06:07 crc kubenswrapper[4558]: I0120 18:06:07.063214 4558 generic.go:334] "Generic (PLEG): container finished" podID="b16dc76f-2b51-48f2-9fe8-ff79228fba59" containerID="48e898b9396791e356c286a06a348c31e78843284731ff15b0be127e3cd6f6cc" exitCode=0 Jan 20 18:06:07 crc kubenswrapper[4558]: I0120 18:06:07.063305 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-edpm-compute-no-nodes-updated-ovn-cm-edpm-compute-no-n8f225" event={"ID":"b16dc76f-2b51-48f2-9fe8-ff79228fba59","Type":"ContainerDied","Data":"48e898b9396791e356c286a06a348c31e78843284731ff15b0be127e3cd6f6cc"} Jan 20 18:06:08 crc kubenswrapper[4558]: I0120 18:06:08.328262 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-edpm-compute-no-nodes-updated-ovn-cm-edpm-compute-no-n8f225" Jan 20 18:06:08 crc kubenswrapper[4558]: I0120 18:06:08.515451 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b16dc76f-2b51-48f2-9fe8-ff79228fba59-inventory\") pod \"b16dc76f-2b51-48f2-9fe8-ff79228fba59\" (UID: \"b16dc76f-2b51-48f2-9fe8-ff79228fba59\") " Jan 20 18:06:08 crc kubenswrapper[4558]: I0120 18:06:08.515614 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/b16dc76f-2b51-48f2-9fe8-ff79228fba59-ssh-key-edpm-compute-no-nodes\") pod \"b16dc76f-2b51-48f2-9fe8-ff79228fba59\" (UID: \"b16dc76f-2b51-48f2-9fe8-ff79228fba59\") " Jan 20 18:06:08 crc kubenswrapper[4558]: I0120 18:06:08.515686 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fxxwm\" (UniqueName: \"kubernetes.io/projected/b16dc76f-2b51-48f2-9fe8-ff79228fba59-kube-api-access-fxxwm\") pod \"b16dc76f-2b51-48f2-9fe8-ff79228fba59\" (UID: \"b16dc76f-2b51-48f2-9fe8-ff79228fba59\") " Jan 20 18:06:08 crc kubenswrapper[4558]: I0120 18:06:08.515722 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b16dc76f-2b51-48f2-9fe8-ff79228fba59-ovn-combined-ca-bundle\") pod \"b16dc76f-2b51-48f2-9fe8-ff79228fba59\" (UID: \"b16dc76f-2b51-48f2-9fe8-ff79228fba59\") " Jan 20 18:06:08 crc kubenswrapper[4558]: I0120 18:06:08.516745 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/b16dc76f-2b51-48f2-9fe8-ff79228fba59-ovncontroller-config-0\") pod \"b16dc76f-2b51-48f2-9fe8-ff79228fba59\" (UID: \"b16dc76f-2b51-48f2-9fe8-ff79228fba59\") " Jan 20 18:06:08 crc kubenswrapper[4558]: I0120 18:06:08.521853 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b16dc76f-2b51-48f2-9fe8-ff79228fba59-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "b16dc76f-2b51-48f2-9fe8-ff79228fba59" (UID: "b16dc76f-2b51-48f2-9fe8-ff79228fba59"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:06:08 crc kubenswrapper[4558]: I0120 18:06:08.522731 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b16dc76f-2b51-48f2-9fe8-ff79228fba59-kube-api-access-fxxwm" (OuterVolumeSpecName: "kube-api-access-fxxwm") pod "b16dc76f-2b51-48f2-9fe8-ff79228fba59" (UID: "b16dc76f-2b51-48f2-9fe8-ff79228fba59"). InnerVolumeSpecName "kube-api-access-fxxwm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:06:08 crc kubenswrapper[4558]: E0120 18:06:08.535428 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/b16dc76f-2b51-48f2-9fe8-ff79228fba59-ovncontroller-config-0 podName:b16dc76f-2b51-48f2-9fe8-ff79228fba59 nodeName:}" failed. No retries permitted until 2026-01-20 18:06:09.035397145 +0000 UTC m=+5062.795735112 (durationBeforeRetry 500ms). Error: error cleaning subPath mounts for volume "ovncontroller-config-0" (UniqueName: "kubernetes.io/configmap/b16dc76f-2b51-48f2-9fe8-ff79228fba59-ovncontroller-config-0") pod "b16dc76f-2b51-48f2-9fe8-ff79228fba59" (UID: "b16dc76f-2b51-48f2-9fe8-ff79228fba59") : error deleting /var/lib/kubelet/pods/b16dc76f-2b51-48f2-9fe8-ff79228fba59/volume-subpaths: remove /var/lib/kubelet/pods/b16dc76f-2b51-48f2-9fe8-ff79228fba59/volume-subpaths: no such file or directory Jan 20 18:06:08 crc kubenswrapper[4558]: I0120 18:06:08.536818 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b16dc76f-2b51-48f2-9fe8-ff79228fba59-ssh-key-edpm-compute-no-nodes" (OuterVolumeSpecName: "ssh-key-edpm-compute-no-nodes") pod "b16dc76f-2b51-48f2-9fe8-ff79228fba59" (UID: "b16dc76f-2b51-48f2-9fe8-ff79228fba59"). InnerVolumeSpecName "ssh-key-edpm-compute-no-nodes". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:06:08 crc kubenswrapper[4558]: I0120 18:06:08.537108 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b16dc76f-2b51-48f2-9fe8-ff79228fba59-inventory" (OuterVolumeSpecName: "inventory") pod "b16dc76f-2b51-48f2-9fe8-ff79228fba59" (UID: "b16dc76f-2b51-48f2-9fe8-ff79228fba59"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:06:08 crc kubenswrapper[4558]: I0120 18:06:08.618821 4558 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b16dc76f-2b51-48f2-9fe8-ff79228fba59-inventory\") on node \"crc\" DevicePath \"\"" Jan 20 18:06:08 crc kubenswrapper[4558]: I0120 18:06:08.618856 4558 reconciler_common.go:293] "Volume detached for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/b16dc76f-2b51-48f2-9fe8-ff79228fba59-ssh-key-edpm-compute-no-nodes\") on node \"crc\" DevicePath \"\"" Jan 20 18:06:08 crc kubenswrapper[4558]: I0120 18:06:08.618882 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fxxwm\" (UniqueName: \"kubernetes.io/projected/b16dc76f-2b51-48f2-9fe8-ff79228fba59-kube-api-access-fxxwm\") on node \"crc\" DevicePath \"\"" Jan 20 18:06:08 crc kubenswrapper[4558]: I0120 18:06:08.618895 4558 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b16dc76f-2b51-48f2-9fe8-ff79228fba59-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 18:06:09 crc kubenswrapper[4558]: I0120 18:06:09.080866 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-edpm-compute-no-nodes-updated-ovn-cm-edpm-compute-no-n8f225" event={"ID":"b16dc76f-2b51-48f2-9fe8-ff79228fba59","Type":"ContainerDied","Data":"e70037f80248c65ef2a84e811e40a9166622f01d85e3078016cf2728f532d4bb"} Jan 20 18:06:09 crc kubenswrapper[4558]: I0120 18:06:09.080922 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e70037f80248c65ef2a84e811e40a9166622f01d85e3078016cf2728f532d4bb" Jan 20 18:06:09 crc kubenswrapper[4558]: I0120 18:06:09.080934 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-edpm-compute-no-nodes-updated-ovn-cm-edpm-compute-no-n8f225" Jan 20 18:06:09 crc kubenswrapper[4558]: I0120 18:06:09.126780 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/b16dc76f-2b51-48f2-9fe8-ff79228fba59-ovncontroller-config-0\") pod \"b16dc76f-2b51-48f2-9fe8-ff79228fba59\" (UID: \"b16dc76f-2b51-48f2-9fe8-ff79228fba59\") " Jan 20 18:06:09 crc kubenswrapper[4558]: I0120 18:06:09.127265 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b16dc76f-2b51-48f2-9fe8-ff79228fba59-ovncontroller-config-0" (OuterVolumeSpecName: "ovncontroller-config-0") pod "b16dc76f-2b51-48f2-9fe8-ff79228fba59" (UID: "b16dc76f-2b51-48f2-9fe8-ff79228fba59"). InnerVolumeSpecName "ovncontroller-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:06:09 crc kubenswrapper[4558]: I0120 18:06:09.228588 4558 reconciler_common.go:293] "Volume detached for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/b16dc76f-2b51-48f2-9fe8-ff79228fba59-ovncontroller-config-0\") on node \"crc\" DevicePath \"\"" Jan 20 18:06:10 crc kubenswrapper[4558]: I0120 18:06:10.961995 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-67886899f9-wnvb9"] Jan 20 18:06:10 crc kubenswrapper[4558]: E0120 18:06:10.962350 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b16dc76f-2b51-48f2-9fe8-ff79228fba59" containerName="ovn-edpm-compute-no-nodes-updated-ovn-cm-edpm-compute-no-nodes" Jan 20 18:06:10 crc kubenswrapper[4558]: I0120 18:06:10.962365 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="b16dc76f-2b51-48f2-9fe8-ff79228fba59" containerName="ovn-edpm-compute-no-nodes-updated-ovn-cm-edpm-compute-no-nodes" Jan 20 18:06:10 crc kubenswrapper[4558]: I0120 18:06:10.962531 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="b16dc76f-2b51-48f2-9fe8-ff79228fba59" containerName="ovn-edpm-compute-no-nodes-updated-ovn-cm-edpm-compute-no-nodes" Jan 20 18:06:10 crc kubenswrapper[4558]: I0120 18:06:10.963272 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-67886899f9-wnvb9" Jan 20 18:06:10 crc kubenswrapper[4558]: I0120 18:06:10.967522 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"edpm-compute-beta-nodeset" Jan 20 18:06:10 crc kubenswrapper[4558]: I0120 18:06:10.978376 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-67886899f9-wnvb9"] Jan 20 18:06:11 crc kubenswrapper[4558]: I0120 18:06:11.157324 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/configmap/47dec77f-e154-411f-88ab-8519ce4c237f-edpm-compute-no-nodes\") pod \"dnsmasq-dnsmasq-67886899f9-wnvb9\" (UID: \"47dec77f-e154-411f-88ab-8519ce4c237f\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-67886899f9-wnvb9" Jan 20 18:06:11 crc kubenswrapper[4558]: I0120 18:06:11.157388 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/47dec77f-e154-411f-88ab-8519ce4c237f-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-67886899f9-wnvb9\" (UID: \"47dec77f-e154-411f-88ab-8519ce4c237f\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-67886899f9-wnvb9" Jan 20 18:06:11 crc kubenswrapper[4558]: I0120 18:06:11.157433 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"edpm-compute-beta-nodeset\" (UniqueName: \"kubernetes.io/configmap/47dec77f-e154-411f-88ab-8519ce4c237f-edpm-compute-beta-nodeset\") pod \"dnsmasq-dnsmasq-67886899f9-wnvb9\" (UID: \"47dec77f-e154-411f-88ab-8519ce4c237f\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-67886899f9-wnvb9" Jan 20 18:06:11 crc kubenswrapper[4558]: I0120 18:06:11.157513 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/47dec77f-e154-411f-88ab-8519ce4c237f-config\") pod \"dnsmasq-dnsmasq-67886899f9-wnvb9\" (UID: \"47dec77f-e154-411f-88ab-8519ce4c237f\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-67886899f9-wnvb9" Jan 20 18:06:11 crc kubenswrapper[4558]: I0120 18:06:11.157702 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j6drd\" (UniqueName: \"kubernetes.io/projected/47dec77f-e154-411f-88ab-8519ce4c237f-kube-api-access-j6drd\") pod \"dnsmasq-dnsmasq-67886899f9-wnvb9\" (UID: \"47dec77f-e154-411f-88ab-8519ce4c237f\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-67886899f9-wnvb9" Jan 20 18:06:11 crc kubenswrapper[4558]: I0120 18:06:11.259442 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/configmap/47dec77f-e154-411f-88ab-8519ce4c237f-edpm-compute-no-nodes\") pod \"dnsmasq-dnsmasq-67886899f9-wnvb9\" (UID: \"47dec77f-e154-411f-88ab-8519ce4c237f\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-67886899f9-wnvb9" Jan 20 18:06:11 crc kubenswrapper[4558]: I0120 18:06:11.259513 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/47dec77f-e154-411f-88ab-8519ce4c237f-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-67886899f9-wnvb9\" (UID: \"47dec77f-e154-411f-88ab-8519ce4c237f\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-67886899f9-wnvb9" Jan 20 18:06:11 crc kubenswrapper[4558]: I0120 18:06:11.259560 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"edpm-compute-beta-nodeset\" (UniqueName: \"kubernetes.io/configmap/47dec77f-e154-411f-88ab-8519ce4c237f-edpm-compute-beta-nodeset\") pod \"dnsmasq-dnsmasq-67886899f9-wnvb9\" (UID: \"47dec77f-e154-411f-88ab-8519ce4c237f\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-67886899f9-wnvb9" Jan 20 18:06:11 crc kubenswrapper[4558]: I0120 18:06:11.259590 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/47dec77f-e154-411f-88ab-8519ce4c237f-config\") pod \"dnsmasq-dnsmasq-67886899f9-wnvb9\" (UID: \"47dec77f-e154-411f-88ab-8519ce4c237f\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-67886899f9-wnvb9" Jan 20 18:06:11 crc kubenswrapper[4558]: I0120 18:06:11.259622 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j6drd\" (UniqueName: \"kubernetes.io/projected/47dec77f-e154-411f-88ab-8519ce4c237f-kube-api-access-j6drd\") pod \"dnsmasq-dnsmasq-67886899f9-wnvb9\" (UID: \"47dec77f-e154-411f-88ab-8519ce4c237f\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-67886899f9-wnvb9" Jan 20 18:06:11 crc kubenswrapper[4558]: I0120 18:06:11.260508 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/configmap/47dec77f-e154-411f-88ab-8519ce4c237f-edpm-compute-no-nodes\") pod \"dnsmasq-dnsmasq-67886899f9-wnvb9\" (UID: \"47dec77f-e154-411f-88ab-8519ce4c237f\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-67886899f9-wnvb9" Jan 20 18:06:11 crc kubenswrapper[4558]: I0120 18:06:11.260508 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/47dec77f-e154-411f-88ab-8519ce4c237f-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-67886899f9-wnvb9\" (UID: \"47dec77f-e154-411f-88ab-8519ce4c237f\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-67886899f9-wnvb9" Jan 20 18:06:11 crc kubenswrapper[4558]: I0120 18:06:11.260787 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"edpm-compute-beta-nodeset\" (UniqueName: \"kubernetes.io/configmap/47dec77f-e154-411f-88ab-8519ce4c237f-edpm-compute-beta-nodeset\") pod \"dnsmasq-dnsmasq-67886899f9-wnvb9\" (UID: \"47dec77f-e154-411f-88ab-8519ce4c237f\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-67886899f9-wnvb9" Jan 20 18:06:11 crc kubenswrapper[4558]: I0120 18:06:11.260790 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/47dec77f-e154-411f-88ab-8519ce4c237f-config\") pod \"dnsmasq-dnsmasq-67886899f9-wnvb9\" (UID: \"47dec77f-e154-411f-88ab-8519ce4c237f\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-67886899f9-wnvb9" Jan 20 18:06:11 crc kubenswrapper[4558]: I0120 18:06:11.278496 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j6drd\" (UniqueName: \"kubernetes.io/projected/47dec77f-e154-411f-88ab-8519ce4c237f-kube-api-access-j6drd\") pod \"dnsmasq-dnsmasq-67886899f9-wnvb9\" (UID: \"47dec77f-e154-411f-88ab-8519ce4c237f\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-67886899f9-wnvb9" Jan 20 18:06:11 crc kubenswrapper[4558]: I0120 18:06:11.577854 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-67886899f9-wnvb9" Jan 20 18:06:12 crc kubenswrapper[4558]: I0120 18:06:12.182215 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-67886899f9-wnvb9"] Jan 20 18:06:13 crc kubenswrapper[4558]: I0120 18:06:13.119618 4558 generic.go:334] "Generic (PLEG): container finished" podID="47dec77f-e154-411f-88ab-8519ce4c237f" containerID="94ef870003c23785447a149bbe5c270f08286b93c2e4be50cc2eb7c40a01e920" exitCode=0 Jan 20 18:06:13 crc kubenswrapper[4558]: I0120 18:06:13.120076 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-67886899f9-wnvb9" event={"ID":"47dec77f-e154-411f-88ab-8519ce4c237f","Type":"ContainerDied","Data":"94ef870003c23785447a149bbe5c270f08286b93c2e4be50cc2eb7c40a01e920"} Jan 20 18:06:13 crc kubenswrapper[4558]: I0120 18:06:13.120517 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-67886899f9-wnvb9" event={"ID":"47dec77f-e154-411f-88ab-8519ce4c237f","Type":"ContainerStarted","Data":"510f21eee6378db0a51f18ed0ab08e3d1b5c0212d0696a3063c54c2967b5010e"} Jan 20 18:06:14 crc kubenswrapper[4558]: I0120 18:06:14.131648 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-67886899f9-wnvb9" event={"ID":"47dec77f-e154-411f-88ab-8519ce4c237f","Type":"ContainerStarted","Data":"e1a02a263499579047379b8549ce290f106baf12c89cff06aedd75bd32d72796"} Jan 20 18:06:14 crc kubenswrapper[4558]: I0120 18:06:14.131961 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-67886899f9-wnvb9" Jan 20 18:06:14 crc kubenswrapper[4558]: I0120 18:06:14.147963 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-67886899f9-wnvb9" podStartSLOduration=4.147946972 podStartE2EDuration="4.147946972s" podCreationTimestamp="2026-01-20 18:06:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 18:06:14.144258516 +0000 UTC m=+5067.904596484" watchObservedRunningTime="2026-01-20 18:06:14.147946972 +0000 UTC m=+5067.908284928" Jan 20 18:06:18 crc kubenswrapper[4558]: I0120 18:06:18.566770 4558 scope.go:117] "RemoveContainer" containerID="6b02d31ec11fe5435af988e35303a6f66b5afa5ef952ce5f7cc4b3cd5f4d9497" Jan 20 18:06:18 crc kubenswrapper[4558]: E0120 18:06:18.567459 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 18:06:21 crc kubenswrapper[4558]: I0120 18:06:21.579318 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-67886899f9-wnvb9" Jan 20 18:06:21 crc kubenswrapper[4558]: I0120 18:06:21.621551 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-64864b6d57-v78fc"] Jan 20 18:06:21 crc kubenswrapper[4558]: I0120 18:06:21.621818 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-64864b6d57-v78fc" podUID="bd900914-8370-4987-bd89-cc9608d5d01b" containerName="dnsmasq-dns" containerID="cri-o://aeddbdc5fc86255ca17886304a7754ff9cd3918d4e51744d680e59862e309c07" gracePeriod=10 Jan 20 18:06:22 crc kubenswrapper[4558]: I0120 18:06:22.215086 4558 generic.go:334] "Generic (PLEG): container finished" podID="bd900914-8370-4987-bd89-cc9608d5d01b" containerID="aeddbdc5fc86255ca17886304a7754ff9cd3918d4e51744d680e59862e309c07" exitCode=0 Jan 20 18:06:22 crc kubenswrapper[4558]: I0120 18:06:22.215210 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-64864b6d57-v78fc" event={"ID":"bd900914-8370-4987-bd89-cc9608d5d01b","Type":"ContainerDied","Data":"aeddbdc5fc86255ca17886304a7754ff9cd3918d4e51744d680e59862e309c07"} Jan 20 18:06:22 crc kubenswrapper[4558]: I0120 18:06:22.293463 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-64864b6d57-v78fc" Jan 20 18:06:22 crc kubenswrapper[4558]: I0120 18:06:22.323491 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2czk9\" (UniqueName: \"kubernetes.io/projected/bd900914-8370-4987-bd89-cc9608d5d01b-kube-api-access-2czk9\") pod \"bd900914-8370-4987-bd89-cc9608d5d01b\" (UID: \"bd900914-8370-4987-bd89-cc9608d5d01b\") " Jan 20 18:06:22 crc kubenswrapper[4558]: I0120 18:06:22.323609 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/configmap/bd900914-8370-4987-bd89-cc9608d5d01b-edpm-compute-no-nodes\") pod \"bd900914-8370-4987-bd89-cc9608d5d01b\" (UID: \"bd900914-8370-4987-bd89-cc9608d5d01b\") " Jan 20 18:06:22 crc kubenswrapper[4558]: I0120 18:06:22.323703 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/bd900914-8370-4987-bd89-cc9608d5d01b-dnsmasq-svc\") pod \"bd900914-8370-4987-bd89-cc9608d5d01b\" (UID: \"bd900914-8370-4987-bd89-cc9608d5d01b\") " Jan 20 18:06:22 crc kubenswrapper[4558]: I0120 18:06:22.323763 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bd900914-8370-4987-bd89-cc9608d5d01b-config\") pod \"bd900914-8370-4987-bd89-cc9608d5d01b\" (UID: \"bd900914-8370-4987-bd89-cc9608d5d01b\") " Jan 20 18:06:22 crc kubenswrapper[4558]: I0120 18:06:22.329014 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd900914-8370-4987-bd89-cc9608d5d01b-kube-api-access-2czk9" (OuterVolumeSpecName: "kube-api-access-2czk9") pod "bd900914-8370-4987-bd89-cc9608d5d01b" (UID: "bd900914-8370-4987-bd89-cc9608d5d01b"). InnerVolumeSpecName "kube-api-access-2czk9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:06:22 crc kubenswrapper[4558]: I0120 18:06:22.355326 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bd900914-8370-4987-bd89-cc9608d5d01b-config" (OuterVolumeSpecName: "config") pod "bd900914-8370-4987-bd89-cc9608d5d01b" (UID: "bd900914-8370-4987-bd89-cc9608d5d01b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:06:22 crc kubenswrapper[4558]: I0120 18:06:22.355955 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bd900914-8370-4987-bd89-cc9608d5d01b-edpm-compute-no-nodes" (OuterVolumeSpecName: "edpm-compute-no-nodes") pod "bd900914-8370-4987-bd89-cc9608d5d01b" (UID: "bd900914-8370-4987-bd89-cc9608d5d01b"). InnerVolumeSpecName "edpm-compute-no-nodes". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:06:22 crc kubenswrapper[4558]: I0120 18:06:22.356655 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bd900914-8370-4987-bd89-cc9608d5d01b-dnsmasq-svc" (OuterVolumeSpecName: "dnsmasq-svc") pod "bd900914-8370-4987-bd89-cc9608d5d01b" (UID: "bd900914-8370-4987-bd89-cc9608d5d01b"). InnerVolumeSpecName "dnsmasq-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:06:22 crc kubenswrapper[4558]: I0120 18:06:22.425428 4558 reconciler_common.go:293] "Volume detached for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/bd900914-8370-4987-bd89-cc9608d5d01b-dnsmasq-svc\") on node \"crc\" DevicePath \"\"" Jan 20 18:06:22 crc kubenswrapper[4558]: I0120 18:06:22.425462 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bd900914-8370-4987-bd89-cc9608d5d01b-config\") on node \"crc\" DevicePath \"\"" Jan 20 18:06:22 crc kubenswrapper[4558]: I0120 18:06:22.425479 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2czk9\" (UniqueName: \"kubernetes.io/projected/bd900914-8370-4987-bd89-cc9608d5d01b-kube-api-access-2czk9\") on node \"crc\" DevicePath \"\"" Jan 20 18:06:22 crc kubenswrapper[4558]: I0120 18:06:22.425490 4558 reconciler_common.go:293] "Volume detached for volume \"edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/configmap/bd900914-8370-4987-bd89-cc9608d5d01b-edpm-compute-no-nodes\") on node \"crc\" DevicePath \"\"" Jan 20 18:06:23 crc kubenswrapper[4558]: I0120 18:06:23.228252 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-64864b6d57-v78fc" event={"ID":"bd900914-8370-4987-bd89-cc9608d5d01b","Type":"ContainerDied","Data":"25ecc8b655769dde8f48a50be41f2b4714a03216001496ff858b3b62fa399bea"} Jan 20 18:06:23 crc kubenswrapper[4558]: I0120 18:06:23.228341 4558 scope.go:117] "RemoveContainer" containerID="aeddbdc5fc86255ca17886304a7754ff9cd3918d4e51744d680e59862e309c07" Jan 20 18:06:23 crc kubenswrapper[4558]: I0120 18:06:23.228338 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-64864b6d57-v78fc" Jan 20 18:06:23 crc kubenswrapper[4558]: I0120 18:06:23.252032 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-64864b6d57-v78fc"] Jan 20 18:06:23 crc kubenswrapper[4558]: I0120 18:06:23.252641 4558 scope.go:117] "RemoveContainer" containerID="156f065bcf6a5f8adb31adccc30b5afd280bc7cc62eae9c43f5f1b8b3833581f" Jan 20 18:06:23 crc kubenswrapper[4558]: I0120 18:06:23.257782 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-64864b6d57-v78fc"] Jan 20 18:06:24 crc kubenswrapper[4558]: I0120 18:06:24.586036 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd900914-8370-4987-bd89-cc9608d5d01b" path="/var/lib/kubelet/pods/bd900914-8370-4987-bd89-cc9608d5d01b/volumes" Jan 20 18:06:25 crc kubenswrapper[4558]: I0120 18:06:25.933145 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/download-cache-edpm-multinodeset-edpm-compute-no-nodes-lrxfd"] Jan 20 18:06:25 crc kubenswrapper[4558]: E0120 18:06:25.933798 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bd900914-8370-4987-bd89-cc9608d5d01b" containerName="init" Jan 20 18:06:25 crc kubenswrapper[4558]: I0120 18:06:25.933813 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="bd900914-8370-4987-bd89-cc9608d5d01b" containerName="init" Jan 20 18:06:25 crc kubenswrapper[4558]: E0120 18:06:25.933829 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bd900914-8370-4987-bd89-cc9608d5d01b" containerName="dnsmasq-dns" Jan 20 18:06:25 crc kubenswrapper[4558]: I0120 18:06:25.933835 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="bd900914-8370-4987-bd89-cc9608d5d01b" containerName="dnsmasq-dns" Jan 20 18:06:25 crc kubenswrapper[4558]: I0120 18:06:25.934030 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="bd900914-8370-4987-bd89-cc9608d5d01b" containerName="dnsmasq-dns" Jan 20 18:06:25 crc kubenswrapper[4558]: I0120 18:06:25.934916 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/download-cache-edpm-multinodeset-edpm-compute-no-nodes-lrxfd" Jan 20 18:06:25 crc kubenswrapper[4558]: I0120 18:06:25.936851 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-aee-default-env" Jan 20 18:06:25 crc kubenswrapper[4558]: I0120 18:06:25.937034 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"edpm-compute-no-nodes-dockercfg-tljmj" Jan 20 18:06:25 crc kubenswrapper[4558]: I0120 18:06:25.937316 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplane-ansible-ssh-private-key-secret" Jan 20 18:06:25 crc kubenswrapper[4558]: I0120 18:06:25.937360 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplanenodeset-edpm-compute-no-nodes" Jan 20 18:06:25 crc kubenswrapper[4558]: I0120 18:06:25.940263 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/download-cache-edpm-multinodeset-edpm-compute-beta-nodeset4f842"] Jan 20 18:06:25 crc kubenswrapper[4558]: I0120 18:06:25.941097 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/download-cache-edpm-multinodeset-edpm-compute-beta-nodeset4f842" Jan 20 18:06:25 crc kubenswrapper[4558]: I0120 18:06:25.942684 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplanenodeset-edpm-compute-beta-nodeset" Jan 20 18:06:25 crc kubenswrapper[4558]: I0120 18:06:25.942786 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"edpm-compute-beta-nodeset-dockercfg-xmthb" Jan 20 18:06:25 crc kubenswrapper[4558]: I0120 18:06:25.947543 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/download-cache-edpm-multinodeset-edpm-compute-no-nodes-lrxfd"] Jan 20 18:06:25 crc kubenswrapper[4558]: I0120 18:06:25.956312 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/download-cache-edpm-multinodeset-edpm-compute-beta-nodeset4f842"] Jan 20 18:06:25 crc kubenswrapper[4558]: I0120 18:06:25.970227 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/dd325333-0db8-4cec-8224-89aa1c81cfca-ssh-key-edpm-compute-no-nodes\") pod \"download-cache-edpm-multinodeset-edpm-compute-no-nodes-lrxfd\" (UID: \"dd325333-0db8-4cec-8224-89aa1c81cfca\") " pod="openstack-kuttl-tests/download-cache-edpm-multinodeset-edpm-compute-no-nodes-lrxfd" Jan 20 18:06:25 crc kubenswrapper[4558]: I0120 18:06:25.970324 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-edpm-compute-beta-nodeset\" (UniqueName: \"kubernetes.io/secret/3f1f313a-0685-4de7-b389-fc57f2b52d7f-ssh-key-edpm-compute-beta-nodeset\") pod \"download-cache-edpm-multinodeset-edpm-compute-beta-nodeset4f842\" (UID: \"3f1f313a-0685-4de7-b389-fc57f2b52d7f\") " pod="openstack-kuttl-tests/download-cache-edpm-multinodeset-edpm-compute-beta-nodeset4f842" Jan 20 18:06:25 crc kubenswrapper[4558]: I0120 18:06:25.970368 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3f1f313a-0685-4de7-b389-fc57f2b52d7f-inventory\") pod \"download-cache-edpm-multinodeset-edpm-compute-beta-nodeset4f842\" (UID: \"3f1f313a-0685-4de7-b389-fc57f2b52d7f\") " pod="openstack-kuttl-tests/download-cache-edpm-multinodeset-edpm-compute-beta-nodeset4f842" Jan 20 18:06:25 crc kubenswrapper[4558]: I0120 18:06:25.970405 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mdt25\" (UniqueName: \"kubernetes.io/projected/dd325333-0db8-4cec-8224-89aa1c81cfca-kube-api-access-mdt25\") pod \"download-cache-edpm-multinodeset-edpm-compute-no-nodes-lrxfd\" (UID: \"dd325333-0db8-4cec-8224-89aa1c81cfca\") " pod="openstack-kuttl-tests/download-cache-edpm-multinodeset-edpm-compute-no-nodes-lrxfd" Jan 20 18:06:25 crc kubenswrapper[4558]: I0120 18:06:25.970431 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/dd325333-0db8-4cec-8224-89aa1c81cfca-inventory\") pod \"download-cache-edpm-multinodeset-edpm-compute-no-nodes-lrxfd\" (UID: \"dd325333-0db8-4cec-8224-89aa1c81cfca\") " pod="openstack-kuttl-tests/download-cache-edpm-multinodeset-edpm-compute-no-nodes-lrxfd" Jan 20 18:06:25 crc kubenswrapper[4558]: I0120 18:06:25.970454 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-74t8m\" (UniqueName: \"kubernetes.io/projected/3f1f313a-0685-4de7-b389-fc57f2b52d7f-kube-api-access-74t8m\") pod \"download-cache-edpm-multinodeset-edpm-compute-beta-nodeset4f842\" (UID: \"3f1f313a-0685-4de7-b389-fc57f2b52d7f\") " pod="openstack-kuttl-tests/download-cache-edpm-multinodeset-edpm-compute-beta-nodeset4f842" Jan 20 18:06:26 crc kubenswrapper[4558]: I0120 18:06:26.071508 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-edpm-compute-beta-nodeset\" (UniqueName: \"kubernetes.io/secret/3f1f313a-0685-4de7-b389-fc57f2b52d7f-ssh-key-edpm-compute-beta-nodeset\") pod \"download-cache-edpm-multinodeset-edpm-compute-beta-nodeset4f842\" (UID: \"3f1f313a-0685-4de7-b389-fc57f2b52d7f\") " pod="openstack-kuttl-tests/download-cache-edpm-multinodeset-edpm-compute-beta-nodeset4f842" Jan 20 18:06:26 crc kubenswrapper[4558]: I0120 18:06:26.071652 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3f1f313a-0685-4de7-b389-fc57f2b52d7f-inventory\") pod \"download-cache-edpm-multinodeset-edpm-compute-beta-nodeset4f842\" (UID: \"3f1f313a-0685-4de7-b389-fc57f2b52d7f\") " pod="openstack-kuttl-tests/download-cache-edpm-multinodeset-edpm-compute-beta-nodeset4f842" Jan 20 18:06:26 crc kubenswrapper[4558]: I0120 18:06:26.071768 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mdt25\" (UniqueName: \"kubernetes.io/projected/dd325333-0db8-4cec-8224-89aa1c81cfca-kube-api-access-mdt25\") pod \"download-cache-edpm-multinodeset-edpm-compute-no-nodes-lrxfd\" (UID: \"dd325333-0db8-4cec-8224-89aa1c81cfca\") " pod="openstack-kuttl-tests/download-cache-edpm-multinodeset-edpm-compute-no-nodes-lrxfd" Jan 20 18:06:26 crc kubenswrapper[4558]: I0120 18:06:26.071851 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/dd325333-0db8-4cec-8224-89aa1c81cfca-inventory\") pod \"download-cache-edpm-multinodeset-edpm-compute-no-nodes-lrxfd\" (UID: \"dd325333-0db8-4cec-8224-89aa1c81cfca\") " pod="openstack-kuttl-tests/download-cache-edpm-multinodeset-edpm-compute-no-nodes-lrxfd" Jan 20 18:06:26 crc kubenswrapper[4558]: I0120 18:06:26.071939 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-74t8m\" (UniqueName: \"kubernetes.io/projected/3f1f313a-0685-4de7-b389-fc57f2b52d7f-kube-api-access-74t8m\") pod \"download-cache-edpm-multinodeset-edpm-compute-beta-nodeset4f842\" (UID: \"3f1f313a-0685-4de7-b389-fc57f2b52d7f\") " pod="openstack-kuttl-tests/download-cache-edpm-multinodeset-edpm-compute-beta-nodeset4f842" Jan 20 18:06:26 crc kubenswrapper[4558]: I0120 18:06:26.072044 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/dd325333-0db8-4cec-8224-89aa1c81cfca-ssh-key-edpm-compute-no-nodes\") pod \"download-cache-edpm-multinodeset-edpm-compute-no-nodes-lrxfd\" (UID: \"dd325333-0db8-4cec-8224-89aa1c81cfca\") " pod="openstack-kuttl-tests/download-cache-edpm-multinodeset-edpm-compute-no-nodes-lrxfd" Jan 20 18:06:26 crc kubenswrapper[4558]: I0120 18:06:26.078819 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/dd325333-0db8-4cec-8224-89aa1c81cfca-ssh-key-edpm-compute-no-nodes\") pod \"download-cache-edpm-multinodeset-edpm-compute-no-nodes-lrxfd\" (UID: \"dd325333-0db8-4cec-8224-89aa1c81cfca\") " pod="openstack-kuttl-tests/download-cache-edpm-multinodeset-edpm-compute-no-nodes-lrxfd" Jan 20 18:06:26 crc kubenswrapper[4558]: I0120 18:06:26.078819 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-edpm-compute-beta-nodeset\" (UniqueName: \"kubernetes.io/secret/3f1f313a-0685-4de7-b389-fc57f2b52d7f-ssh-key-edpm-compute-beta-nodeset\") pod \"download-cache-edpm-multinodeset-edpm-compute-beta-nodeset4f842\" (UID: \"3f1f313a-0685-4de7-b389-fc57f2b52d7f\") " pod="openstack-kuttl-tests/download-cache-edpm-multinodeset-edpm-compute-beta-nodeset4f842" Jan 20 18:06:26 crc kubenswrapper[4558]: I0120 18:06:26.079344 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/dd325333-0db8-4cec-8224-89aa1c81cfca-inventory\") pod \"download-cache-edpm-multinodeset-edpm-compute-no-nodes-lrxfd\" (UID: \"dd325333-0db8-4cec-8224-89aa1c81cfca\") " pod="openstack-kuttl-tests/download-cache-edpm-multinodeset-edpm-compute-no-nodes-lrxfd" Jan 20 18:06:26 crc kubenswrapper[4558]: I0120 18:06:26.080579 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3f1f313a-0685-4de7-b389-fc57f2b52d7f-inventory\") pod \"download-cache-edpm-multinodeset-edpm-compute-beta-nodeset4f842\" (UID: \"3f1f313a-0685-4de7-b389-fc57f2b52d7f\") " pod="openstack-kuttl-tests/download-cache-edpm-multinodeset-edpm-compute-beta-nodeset4f842" Jan 20 18:06:26 crc kubenswrapper[4558]: I0120 18:06:26.086581 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mdt25\" (UniqueName: \"kubernetes.io/projected/dd325333-0db8-4cec-8224-89aa1c81cfca-kube-api-access-mdt25\") pod \"download-cache-edpm-multinodeset-edpm-compute-no-nodes-lrxfd\" (UID: \"dd325333-0db8-4cec-8224-89aa1c81cfca\") " pod="openstack-kuttl-tests/download-cache-edpm-multinodeset-edpm-compute-no-nodes-lrxfd" Jan 20 18:06:26 crc kubenswrapper[4558]: I0120 18:06:26.087984 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-74t8m\" (UniqueName: \"kubernetes.io/projected/3f1f313a-0685-4de7-b389-fc57f2b52d7f-kube-api-access-74t8m\") pod \"download-cache-edpm-multinodeset-edpm-compute-beta-nodeset4f842\" (UID: \"3f1f313a-0685-4de7-b389-fc57f2b52d7f\") " pod="openstack-kuttl-tests/download-cache-edpm-multinodeset-edpm-compute-beta-nodeset4f842" Jan 20 18:06:26 crc kubenswrapper[4558]: I0120 18:06:26.250406 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/download-cache-edpm-multinodeset-edpm-compute-no-nodes-lrxfd" Jan 20 18:06:26 crc kubenswrapper[4558]: I0120 18:06:26.258983 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/download-cache-edpm-multinodeset-edpm-compute-beta-nodeset4f842" Jan 20 18:06:26 crc kubenswrapper[4558]: I0120 18:06:26.660754 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/download-cache-edpm-multinodeset-edpm-compute-beta-nodeset4f842"] Jan 20 18:06:26 crc kubenswrapper[4558]: W0120 18:06:26.665652 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3f1f313a_0685_4de7_b389_fc57f2b52d7f.slice/crio-e4efbd72b62d30ab99031963f27872fe4cb97f4d76bc3ed69c9bb5f4f46190a8 WatchSource:0}: Error finding container e4efbd72b62d30ab99031963f27872fe4cb97f4d76bc3ed69c9bb5f4f46190a8: Status 404 returned error can't find the container with id e4efbd72b62d30ab99031963f27872fe4cb97f4d76bc3ed69c9bb5f4f46190a8 Jan 20 18:06:26 crc kubenswrapper[4558]: W0120 18:06:26.706341 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddd325333_0db8_4cec_8224_89aa1c81cfca.slice/crio-aec04e765936b1e371a1be16807e533e15dcb1ecae41ba52f866fa0dc4e37e12 WatchSource:0}: Error finding container aec04e765936b1e371a1be16807e533e15dcb1ecae41ba52f866fa0dc4e37e12: Status 404 returned error can't find the container with id aec04e765936b1e371a1be16807e533e15dcb1ecae41ba52f866fa0dc4e37e12 Jan 20 18:06:26 crc kubenswrapper[4558]: I0120 18:06:26.707273 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/download-cache-edpm-multinodeset-edpm-compute-no-nodes-lrxfd"] Jan 20 18:06:27 crc kubenswrapper[4558]: I0120 18:06:27.268528 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/download-cache-edpm-multinodeset-edpm-compute-no-nodes-lrxfd" event={"ID":"dd325333-0db8-4cec-8224-89aa1c81cfca","Type":"ContainerStarted","Data":"aec04e765936b1e371a1be16807e533e15dcb1ecae41ba52f866fa0dc4e37e12"} Jan 20 18:06:27 crc kubenswrapper[4558]: I0120 18:06:27.273398 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/download-cache-edpm-multinodeset-edpm-compute-beta-nodeset4f842" event={"ID":"3f1f313a-0685-4de7-b389-fc57f2b52d7f","Type":"ContainerStarted","Data":"e4efbd72b62d30ab99031963f27872fe4cb97f4d76bc3ed69c9bb5f4f46190a8"} Jan 20 18:06:28 crc kubenswrapper[4558]: I0120 18:06:28.284150 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/download-cache-edpm-multinodeset-edpm-compute-no-nodes-lrxfd" event={"ID":"dd325333-0db8-4cec-8224-89aa1c81cfca","Type":"ContainerStarted","Data":"fd7d81dd40dc80373f8ddc8ea35dc5e972b9a241dd9309b33bf870102020f026"} Jan 20 18:06:28 crc kubenswrapper[4558]: I0120 18:06:28.288120 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/download-cache-edpm-multinodeset-edpm-compute-beta-nodeset4f842" event={"ID":"3f1f313a-0685-4de7-b389-fc57f2b52d7f","Type":"ContainerStarted","Data":"89048fbe36b5d5a3b2ebe0e98c9e8143e7dcd0bde06fa2aee6817d55e83ca0c4"} Jan 20 18:06:28 crc kubenswrapper[4558]: I0120 18:06:28.300650 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/download-cache-edpm-multinodeset-edpm-compute-no-nodes-lrxfd" podStartSLOduration=2.701284182 podStartE2EDuration="3.300633066s" podCreationTimestamp="2026-01-20 18:06:25 +0000 UTC" firstStartedPulling="2026-01-20 18:06:26.708513195 +0000 UTC m=+5080.468851162" lastFinishedPulling="2026-01-20 18:06:27.307862079 +0000 UTC m=+5081.068200046" observedRunningTime="2026-01-20 18:06:28.297259673 +0000 UTC m=+5082.057597640" watchObservedRunningTime="2026-01-20 18:06:28.300633066 +0000 UTC m=+5082.060971034" Jan 20 18:06:28 crc kubenswrapper[4558]: I0120 18:06:28.314204 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/download-cache-edpm-multinodeset-edpm-compute-beta-nodeset4f842" podStartSLOduration=2.687797144 podStartE2EDuration="3.314180468s" podCreationTimestamp="2026-01-20 18:06:25 +0000 UTC" firstStartedPulling="2026-01-20 18:06:26.668495935 +0000 UTC m=+5080.428833902" lastFinishedPulling="2026-01-20 18:06:27.294879259 +0000 UTC m=+5081.055217226" observedRunningTime="2026-01-20 18:06:28.311087322 +0000 UTC m=+5082.071425290" watchObservedRunningTime="2026-01-20 18:06:28.314180468 +0000 UTC m=+5082.074518435" Jan 20 18:06:29 crc kubenswrapper[4558]: I0120 18:06:29.304663 4558 generic.go:334] "Generic (PLEG): container finished" podID="3f1f313a-0685-4de7-b389-fc57f2b52d7f" containerID="89048fbe36b5d5a3b2ebe0e98c9e8143e7dcd0bde06fa2aee6817d55e83ca0c4" exitCode=0 Jan 20 18:06:29 crc kubenswrapper[4558]: I0120 18:06:29.305356 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/download-cache-edpm-multinodeset-edpm-compute-beta-nodeset4f842" event={"ID":"3f1f313a-0685-4de7-b389-fc57f2b52d7f","Type":"ContainerDied","Data":"89048fbe36b5d5a3b2ebe0e98c9e8143e7dcd0bde06fa2aee6817d55e83ca0c4"} Jan 20 18:06:29 crc kubenswrapper[4558]: I0120 18:06:29.308065 4558 generic.go:334] "Generic (PLEG): container finished" podID="dd325333-0db8-4cec-8224-89aa1c81cfca" containerID="fd7d81dd40dc80373f8ddc8ea35dc5e972b9a241dd9309b33bf870102020f026" exitCode=0 Jan 20 18:06:29 crc kubenswrapper[4558]: I0120 18:06:29.308097 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/download-cache-edpm-multinodeset-edpm-compute-no-nodes-lrxfd" event={"ID":"dd325333-0db8-4cec-8224-89aa1c81cfca","Type":"ContainerDied","Data":"fd7d81dd40dc80373f8ddc8ea35dc5e972b9a241dd9309b33bf870102020f026"} Jan 20 18:06:30 crc kubenswrapper[4558]: I0120 18:06:30.565345 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/download-cache-edpm-multinodeset-edpm-compute-beta-nodeset4f842" Jan 20 18:06:30 crc kubenswrapper[4558]: I0120 18:06:30.566267 4558 scope.go:117] "RemoveContainer" containerID="6b02d31ec11fe5435af988e35303a6f66b5afa5ef952ce5f7cc4b3cd5f4d9497" Jan 20 18:06:30 crc kubenswrapper[4558]: E0120 18:06:30.567296 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 18:06:30 crc kubenswrapper[4558]: I0120 18:06:30.638664 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/download-cache-edpm-multinodeset-edpm-compute-no-nodes-lrxfd" Jan 20 18:06:30 crc kubenswrapper[4558]: I0120 18:06:30.742553 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/dd325333-0db8-4cec-8224-89aa1c81cfca-inventory\") pod \"dd325333-0db8-4cec-8224-89aa1c81cfca\" (UID: \"dd325333-0db8-4cec-8224-89aa1c81cfca\") " Jan 20 18:06:30 crc kubenswrapper[4558]: I0120 18:06:30.742620 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-edpm-compute-beta-nodeset\" (UniqueName: \"kubernetes.io/secret/3f1f313a-0685-4de7-b389-fc57f2b52d7f-ssh-key-edpm-compute-beta-nodeset\") pod \"3f1f313a-0685-4de7-b389-fc57f2b52d7f\" (UID: \"3f1f313a-0685-4de7-b389-fc57f2b52d7f\") " Jan 20 18:06:30 crc kubenswrapper[4558]: I0120 18:06:30.742665 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-74t8m\" (UniqueName: \"kubernetes.io/projected/3f1f313a-0685-4de7-b389-fc57f2b52d7f-kube-api-access-74t8m\") pod \"3f1f313a-0685-4de7-b389-fc57f2b52d7f\" (UID: \"3f1f313a-0685-4de7-b389-fc57f2b52d7f\") " Jan 20 18:06:30 crc kubenswrapper[4558]: I0120 18:06:30.742722 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/dd325333-0db8-4cec-8224-89aa1c81cfca-ssh-key-edpm-compute-no-nodes\") pod \"dd325333-0db8-4cec-8224-89aa1c81cfca\" (UID: \"dd325333-0db8-4cec-8224-89aa1c81cfca\") " Jan 20 18:06:30 crc kubenswrapper[4558]: I0120 18:06:30.742748 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mdt25\" (UniqueName: \"kubernetes.io/projected/dd325333-0db8-4cec-8224-89aa1c81cfca-kube-api-access-mdt25\") pod \"dd325333-0db8-4cec-8224-89aa1c81cfca\" (UID: \"dd325333-0db8-4cec-8224-89aa1c81cfca\") " Jan 20 18:06:30 crc kubenswrapper[4558]: I0120 18:06:30.742816 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3f1f313a-0685-4de7-b389-fc57f2b52d7f-inventory\") pod \"3f1f313a-0685-4de7-b389-fc57f2b52d7f\" (UID: \"3f1f313a-0685-4de7-b389-fc57f2b52d7f\") " Jan 20 18:06:30 crc kubenswrapper[4558]: I0120 18:06:30.750802 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3f1f313a-0685-4de7-b389-fc57f2b52d7f-kube-api-access-74t8m" (OuterVolumeSpecName: "kube-api-access-74t8m") pod "3f1f313a-0685-4de7-b389-fc57f2b52d7f" (UID: "3f1f313a-0685-4de7-b389-fc57f2b52d7f"). InnerVolumeSpecName "kube-api-access-74t8m". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:06:30 crc kubenswrapper[4558]: I0120 18:06:30.751046 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dd325333-0db8-4cec-8224-89aa1c81cfca-kube-api-access-mdt25" (OuterVolumeSpecName: "kube-api-access-mdt25") pod "dd325333-0db8-4cec-8224-89aa1c81cfca" (UID: "dd325333-0db8-4cec-8224-89aa1c81cfca"). InnerVolumeSpecName "kube-api-access-mdt25". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:06:30 crc kubenswrapper[4558]: I0120 18:06:30.766778 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3f1f313a-0685-4de7-b389-fc57f2b52d7f-inventory" (OuterVolumeSpecName: "inventory") pod "3f1f313a-0685-4de7-b389-fc57f2b52d7f" (UID: "3f1f313a-0685-4de7-b389-fc57f2b52d7f"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:06:30 crc kubenswrapper[4558]: I0120 18:06:30.767048 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3f1f313a-0685-4de7-b389-fc57f2b52d7f-ssh-key-edpm-compute-beta-nodeset" (OuterVolumeSpecName: "ssh-key-edpm-compute-beta-nodeset") pod "3f1f313a-0685-4de7-b389-fc57f2b52d7f" (UID: "3f1f313a-0685-4de7-b389-fc57f2b52d7f"). InnerVolumeSpecName "ssh-key-edpm-compute-beta-nodeset". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:06:30 crc kubenswrapper[4558]: I0120 18:06:30.767886 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dd325333-0db8-4cec-8224-89aa1c81cfca-ssh-key-edpm-compute-no-nodes" (OuterVolumeSpecName: "ssh-key-edpm-compute-no-nodes") pod "dd325333-0db8-4cec-8224-89aa1c81cfca" (UID: "dd325333-0db8-4cec-8224-89aa1c81cfca"). InnerVolumeSpecName "ssh-key-edpm-compute-no-nodes". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:06:30 crc kubenswrapper[4558]: I0120 18:06:30.768329 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dd325333-0db8-4cec-8224-89aa1c81cfca-inventory" (OuterVolumeSpecName: "inventory") pod "dd325333-0db8-4cec-8224-89aa1c81cfca" (UID: "dd325333-0db8-4cec-8224-89aa1c81cfca"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:06:30 crc kubenswrapper[4558]: I0120 18:06:30.844653 4558 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3f1f313a-0685-4de7-b389-fc57f2b52d7f-inventory\") on node \"crc\" DevicePath \"\"" Jan 20 18:06:30 crc kubenswrapper[4558]: I0120 18:06:30.844684 4558 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/dd325333-0db8-4cec-8224-89aa1c81cfca-inventory\") on node \"crc\" DevicePath \"\"" Jan 20 18:06:30 crc kubenswrapper[4558]: I0120 18:06:30.844697 4558 reconciler_common.go:293] "Volume detached for volume \"ssh-key-edpm-compute-beta-nodeset\" (UniqueName: \"kubernetes.io/secret/3f1f313a-0685-4de7-b389-fc57f2b52d7f-ssh-key-edpm-compute-beta-nodeset\") on node \"crc\" DevicePath \"\"" Jan 20 18:06:30 crc kubenswrapper[4558]: I0120 18:06:30.844715 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-74t8m\" (UniqueName: \"kubernetes.io/projected/3f1f313a-0685-4de7-b389-fc57f2b52d7f-kube-api-access-74t8m\") on node \"crc\" DevicePath \"\"" Jan 20 18:06:30 crc kubenswrapper[4558]: I0120 18:06:30.844728 4558 reconciler_common.go:293] "Volume detached for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/dd325333-0db8-4cec-8224-89aa1c81cfca-ssh-key-edpm-compute-no-nodes\") on node \"crc\" DevicePath \"\"" Jan 20 18:06:30 crc kubenswrapper[4558]: I0120 18:06:30.844739 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mdt25\" (UniqueName: \"kubernetes.io/projected/dd325333-0db8-4cec-8224-89aa1c81cfca-kube-api-access-mdt25\") on node \"crc\" DevicePath \"\"" Jan 20 18:06:31 crc kubenswrapper[4558]: I0120 18:06:31.333327 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/download-cache-edpm-multinodeset-edpm-compute-beta-nodeset4f842" event={"ID":"3f1f313a-0685-4de7-b389-fc57f2b52d7f","Type":"ContainerDied","Data":"e4efbd72b62d30ab99031963f27872fe4cb97f4d76bc3ed69c9bb5f4f46190a8"} Jan 20 18:06:31 crc kubenswrapper[4558]: I0120 18:06:31.333368 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/download-cache-edpm-multinodeset-edpm-compute-beta-nodeset4f842" Jan 20 18:06:31 crc kubenswrapper[4558]: I0120 18:06:31.333397 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e4efbd72b62d30ab99031963f27872fe4cb97f4d76bc3ed69c9bb5f4f46190a8" Jan 20 18:06:31 crc kubenswrapper[4558]: I0120 18:06:31.338194 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/download-cache-edpm-multinodeset-edpm-compute-no-nodes-lrxfd" event={"ID":"dd325333-0db8-4cec-8224-89aa1c81cfca","Type":"ContainerDied","Data":"aec04e765936b1e371a1be16807e533e15dcb1ecae41ba52f866fa0dc4e37e12"} Jan 20 18:06:31 crc kubenswrapper[4558]: I0120 18:06:31.338272 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="aec04e765936b1e371a1be16807e533e15dcb1ecae41ba52f866fa0dc4e37e12" Jan 20 18:06:31 crc kubenswrapper[4558]: I0120 18:06:31.338373 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/download-cache-edpm-multinodeset-edpm-compute-no-nodes-lrxfd" Jan 20 18:06:31 crc kubenswrapper[4558]: I0120 18:06:31.372074 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/bootstrap-edpm-multinodeset-edpm-compute-no-nodes-ndqgs"] Jan 20 18:06:31 crc kubenswrapper[4558]: E0120 18:06:31.372594 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3f1f313a-0685-4de7-b389-fc57f2b52d7f" containerName="download-cache-edpm-multinodeset-edpm-compute-beta-nodeset" Jan 20 18:06:31 crc kubenswrapper[4558]: I0120 18:06:31.372618 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="3f1f313a-0685-4de7-b389-fc57f2b52d7f" containerName="download-cache-edpm-multinodeset-edpm-compute-beta-nodeset" Jan 20 18:06:31 crc kubenswrapper[4558]: E0120 18:06:31.372655 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd325333-0db8-4cec-8224-89aa1c81cfca" containerName="download-cache-edpm-multinodeset-edpm-compute-no-nodes" Jan 20 18:06:31 crc kubenswrapper[4558]: I0120 18:06:31.372667 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd325333-0db8-4cec-8224-89aa1c81cfca" containerName="download-cache-edpm-multinodeset-edpm-compute-no-nodes" Jan 20 18:06:31 crc kubenswrapper[4558]: I0120 18:06:31.372931 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="3f1f313a-0685-4de7-b389-fc57f2b52d7f" containerName="download-cache-edpm-multinodeset-edpm-compute-beta-nodeset" Jan 20 18:06:31 crc kubenswrapper[4558]: I0120 18:06:31.372966 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="dd325333-0db8-4cec-8224-89aa1c81cfca" containerName="download-cache-edpm-multinodeset-edpm-compute-no-nodes" Jan 20 18:06:31 crc kubenswrapper[4558]: I0120 18:06:31.373905 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/bootstrap-edpm-multinodeset-edpm-compute-no-nodes-ndqgs" Jan 20 18:06:31 crc kubenswrapper[4558]: I0120 18:06:31.376418 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"combined-ca-bundle" Jan 20 18:06:31 crc kubenswrapper[4558]: I0120 18:06:31.376518 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"edpm-compute-no-nodes-dockercfg-tljmj" Jan 20 18:06:31 crc kubenswrapper[4558]: I0120 18:06:31.376523 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplane-ansible-ssh-private-key-secret" Jan 20 18:06:31 crc kubenswrapper[4558]: I0120 18:06:31.376625 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplanenodeset-edpm-compute-no-nodes" Jan 20 18:06:31 crc kubenswrapper[4558]: I0120 18:06:31.376713 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-aee-default-env" Jan 20 18:06:31 crc kubenswrapper[4558]: I0120 18:06:31.384772 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/bootstrap-edpm-multinodeset-edpm-compute-no-nodes-ndqgs"] Jan 20 18:06:31 crc kubenswrapper[4558]: I0120 18:06:31.394098 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/bootstrap-edpm-multinodeset-edpm-compute-beta-nodeset-lh947"] Jan 20 18:06:31 crc kubenswrapper[4558]: I0120 18:06:31.395021 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/bootstrap-edpm-multinodeset-edpm-compute-beta-nodeset-lh947" Jan 20 18:06:31 crc kubenswrapper[4558]: I0120 18:06:31.398804 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"edpm-compute-beta-nodeset-dockercfg-xmthb" Jan 20 18:06:31 crc kubenswrapper[4558]: I0120 18:06:31.399017 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplanenodeset-edpm-compute-beta-nodeset" Jan 20 18:06:31 crc kubenswrapper[4558]: I0120 18:06:31.408993 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/bootstrap-edpm-multinodeset-edpm-compute-beta-nodeset-lh947"] Jan 20 18:06:31 crc kubenswrapper[4558]: I0120 18:06:31.454336 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/26fe7558-d4f3-4699-b772-138db97df828-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-multinodeset-edpm-compute-no-nodes-ndqgs\" (UID: \"26fe7558-d4f3-4699-b772-138db97df828\") " pod="openstack-kuttl-tests/bootstrap-edpm-multinodeset-edpm-compute-no-nodes-ndqgs" Jan 20 18:06:31 crc kubenswrapper[4558]: I0120 18:06:31.454405 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gst6q\" (UniqueName: \"kubernetes.io/projected/62c94a76-dced-48dd-9d0b-add9b0a76a81-kube-api-access-gst6q\") pod \"bootstrap-edpm-multinodeset-edpm-compute-beta-nodeset-lh947\" (UID: \"62c94a76-dced-48dd-9d0b-add9b0a76a81\") " pod="openstack-kuttl-tests/bootstrap-edpm-multinodeset-edpm-compute-beta-nodeset-lh947" Jan 20 18:06:31 crc kubenswrapper[4558]: I0120 18:06:31.454474 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-edpm-compute-beta-nodeset\" (UniqueName: \"kubernetes.io/secret/62c94a76-dced-48dd-9d0b-add9b0a76a81-ssh-key-edpm-compute-beta-nodeset\") pod \"bootstrap-edpm-multinodeset-edpm-compute-beta-nodeset-lh947\" (UID: \"62c94a76-dced-48dd-9d0b-add9b0a76a81\") " pod="openstack-kuttl-tests/bootstrap-edpm-multinodeset-edpm-compute-beta-nodeset-lh947" Jan 20 18:06:31 crc kubenswrapper[4558]: I0120 18:06:31.454549 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/62c94a76-dced-48dd-9d0b-add9b0a76a81-inventory\") pod \"bootstrap-edpm-multinodeset-edpm-compute-beta-nodeset-lh947\" (UID: \"62c94a76-dced-48dd-9d0b-add9b0a76a81\") " pod="openstack-kuttl-tests/bootstrap-edpm-multinodeset-edpm-compute-beta-nodeset-lh947" Jan 20 18:06:31 crc kubenswrapper[4558]: I0120 18:06:31.454600 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/26fe7558-d4f3-4699-b772-138db97df828-ssh-key-edpm-compute-no-nodes\") pod \"bootstrap-edpm-multinodeset-edpm-compute-no-nodes-ndqgs\" (UID: \"26fe7558-d4f3-4699-b772-138db97df828\") " pod="openstack-kuttl-tests/bootstrap-edpm-multinodeset-edpm-compute-no-nodes-ndqgs" Jan 20 18:06:31 crc kubenswrapper[4558]: I0120 18:06:31.454662 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cmvpj\" (UniqueName: \"kubernetes.io/projected/26fe7558-d4f3-4699-b772-138db97df828-kube-api-access-cmvpj\") pod \"bootstrap-edpm-multinodeset-edpm-compute-no-nodes-ndqgs\" (UID: \"26fe7558-d4f3-4699-b772-138db97df828\") " pod="openstack-kuttl-tests/bootstrap-edpm-multinodeset-edpm-compute-no-nodes-ndqgs" Jan 20 18:06:31 crc kubenswrapper[4558]: I0120 18:06:31.454704 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/26fe7558-d4f3-4699-b772-138db97df828-inventory\") pod \"bootstrap-edpm-multinodeset-edpm-compute-no-nodes-ndqgs\" (UID: \"26fe7558-d4f3-4699-b772-138db97df828\") " pod="openstack-kuttl-tests/bootstrap-edpm-multinodeset-edpm-compute-no-nodes-ndqgs" Jan 20 18:06:31 crc kubenswrapper[4558]: I0120 18:06:31.454819 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/62c94a76-dced-48dd-9d0b-add9b0a76a81-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-multinodeset-edpm-compute-beta-nodeset-lh947\" (UID: \"62c94a76-dced-48dd-9d0b-add9b0a76a81\") " pod="openstack-kuttl-tests/bootstrap-edpm-multinodeset-edpm-compute-beta-nodeset-lh947" Jan 20 18:06:31 crc kubenswrapper[4558]: I0120 18:06:31.556017 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/62c94a76-dced-48dd-9d0b-add9b0a76a81-inventory\") pod \"bootstrap-edpm-multinodeset-edpm-compute-beta-nodeset-lh947\" (UID: \"62c94a76-dced-48dd-9d0b-add9b0a76a81\") " pod="openstack-kuttl-tests/bootstrap-edpm-multinodeset-edpm-compute-beta-nodeset-lh947" Jan 20 18:06:31 crc kubenswrapper[4558]: I0120 18:06:31.556088 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/26fe7558-d4f3-4699-b772-138db97df828-ssh-key-edpm-compute-no-nodes\") pod \"bootstrap-edpm-multinodeset-edpm-compute-no-nodes-ndqgs\" (UID: \"26fe7558-d4f3-4699-b772-138db97df828\") " pod="openstack-kuttl-tests/bootstrap-edpm-multinodeset-edpm-compute-no-nodes-ndqgs" Jan 20 18:06:31 crc kubenswrapper[4558]: I0120 18:06:31.556137 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cmvpj\" (UniqueName: \"kubernetes.io/projected/26fe7558-d4f3-4699-b772-138db97df828-kube-api-access-cmvpj\") pod \"bootstrap-edpm-multinodeset-edpm-compute-no-nodes-ndqgs\" (UID: \"26fe7558-d4f3-4699-b772-138db97df828\") " pod="openstack-kuttl-tests/bootstrap-edpm-multinodeset-edpm-compute-no-nodes-ndqgs" Jan 20 18:06:31 crc kubenswrapper[4558]: I0120 18:06:31.556181 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/26fe7558-d4f3-4699-b772-138db97df828-inventory\") pod \"bootstrap-edpm-multinodeset-edpm-compute-no-nodes-ndqgs\" (UID: \"26fe7558-d4f3-4699-b772-138db97df828\") " pod="openstack-kuttl-tests/bootstrap-edpm-multinodeset-edpm-compute-no-nodes-ndqgs" Jan 20 18:06:31 crc kubenswrapper[4558]: I0120 18:06:31.556220 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/62c94a76-dced-48dd-9d0b-add9b0a76a81-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-multinodeset-edpm-compute-beta-nodeset-lh947\" (UID: \"62c94a76-dced-48dd-9d0b-add9b0a76a81\") " pod="openstack-kuttl-tests/bootstrap-edpm-multinodeset-edpm-compute-beta-nodeset-lh947" Jan 20 18:06:31 crc kubenswrapper[4558]: I0120 18:06:31.556286 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/26fe7558-d4f3-4699-b772-138db97df828-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-multinodeset-edpm-compute-no-nodes-ndqgs\" (UID: \"26fe7558-d4f3-4699-b772-138db97df828\") " pod="openstack-kuttl-tests/bootstrap-edpm-multinodeset-edpm-compute-no-nodes-ndqgs" Jan 20 18:06:31 crc kubenswrapper[4558]: I0120 18:06:31.556318 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gst6q\" (UniqueName: \"kubernetes.io/projected/62c94a76-dced-48dd-9d0b-add9b0a76a81-kube-api-access-gst6q\") pod \"bootstrap-edpm-multinodeset-edpm-compute-beta-nodeset-lh947\" (UID: \"62c94a76-dced-48dd-9d0b-add9b0a76a81\") " pod="openstack-kuttl-tests/bootstrap-edpm-multinodeset-edpm-compute-beta-nodeset-lh947" Jan 20 18:06:31 crc kubenswrapper[4558]: I0120 18:06:31.556346 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-edpm-compute-beta-nodeset\" (UniqueName: \"kubernetes.io/secret/62c94a76-dced-48dd-9d0b-add9b0a76a81-ssh-key-edpm-compute-beta-nodeset\") pod \"bootstrap-edpm-multinodeset-edpm-compute-beta-nodeset-lh947\" (UID: \"62c94a76-dced-48dd-9d0b-add9b0a76a81\") " pod="openstack-kuttl-tests/bootstrap-edpm-multinodeset-edpm-compute-beta-nodeset-lh947" Jan 20 18:06:31 crc kubenswrapper[4558]: I0120 18:06:31.560653 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/62c94a76-dced-48dd-9d0b-add9b0a76a81-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-multinodeset-edpm-compute-beta-nodeset-lh947\" (UID: \"62c94a76-dced-48dd-9d0b-add9b0a76a81\") " pod="openstack-kuttl-tests/bootstrap-edpm-multinodeset-edpm-compute-beta-nodeset-lh947" Jan 20 18:06:31 crc kubenswrapper[4558]: I0120 18:06:31.560699 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/26fe7558-d4f3-4699-b772-138db97df828-ssh-key-edpm-compute-no-nodes\") pod \"bootstrap-edpm-multinodeset-edpm-compute-no-nodes-ndqgs\" (UID: \"26fe7558-d4f3-4699-b772-138db97df828\") " pod="openstack-kuttl-tests/bootstrap-edpm-multinodeset-edpm-compute-no-nodes-ndqgs" Jan 20 18:06:31 crc kubenswrapper[4558]: I0120 18:06:31.561139 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/26fe7558-d4f3-4699-b772-138db97df828-inventory\") pod \"bootstrap-edpm-multinodeset-edpm-compute-no-nodes-ndqgs\" (UID: \"26fe7558-d4f3-4699-b772-138db97df828\") " pod="openstack-kuttl-tests/bootstrap-edpm-multinodeset-edpm-compute-no-nodes-ndqgs" Jan 20 18:06:31 crc kubenswrapper[4558]: I0120 18:06:31.561471 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/62c94a76-dced-48dd-9d0b-add9b0a76a81-inventory\") pod \"bootstrap-edpm-multinodeset-edpm-compute-beta-nodeset-lh947\" (UID: \"62c94a76-dced-48dd-9d0b-add9b0a76a81\") " pod="openstack-kuttl-tests/bootstrap-edpm-multinodeset-edpm-compute-beta-nodeset-lh947" Jan 20 18:06:31 crc kubenswrapper[4558]: I0120 18:06:31.561780 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/26fe7558-d4f3-4699-b772-138db97df828-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-multinodeset-edpm-compute-no-nodes-ndqgs\" (UID: \"26fe7558-d4f3-4699-b772-138db97df828\") " pod="openstack-kuttl-tests/bootstrap-edpm-multinodeset-edpm-compute-no-nodes-ndqgs" Jan 20 18:06:31 crc kubenswrapper[4558]: I0120 18:06:31.561870 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-edpm-compute-beta-nodeset\" (UniqueName: \"kubernetes.io/secret/62c94a76-dced-48dd-9d0b-add9b0a76a81-ssh-key-edpm-compute-beta-nodeset\") pod \"bootstrap-edpm-multinodeset-edpm-compute-beta-nodeset-lh947\" (UID: \"62c94a76-dced-48dd-9d0b-add9b0a76a81\") " pod="openstack-kuttl-tests/bootstrap-edpm-multinodeset-edpm-compute-beta-nodeset-lh947" Jan 20 18:06:31 crc kubenswrapper[4558]: I0120 18:06:31.569904 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cmvpj\" (UniqueName: \"kubernetes.io/projected/26fe7558-d4f3-4699-b772-138db97df828-kube-api-access-cmvpj\") pod \"bootstrap-edpm-multinodeset-edpm-compute-no-nodes-ndqgs\" (UID: \"26fe7558-d4f3-4699-b772-138db97df828\") " pod="openstack-kuttl-tests/bootstrap-edpm-multinodeset-edpm-compute-no-nodes-ndqgs" Jan 20 18:06:31 crc kubenswrapper[4558]: I0120 18:06:31.571015 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gst6q\" (UniqueName: \"kubernetes.io/projected/62c94a76-dced-48dd-9d0b-add9b0a76a81-kube-api-access-gst6q\") pod \"bootstrap-edpm-multinodeset-edpm-compute-beta-nodeset-lh947\" (UID: \"62c94a76-dced-48dd-9d0b-add9b0a76a81\") " pod="openstack-kuttl-tests/bootstrap-edpm-multinodeset-edpm-compute-beta-nodeset-lh947" Jan 20 18:06:31 crc kubenswrapper[4558]: I0120 18:06:31.579982 4558 scope.go:117] "RemoveContainer" containerID="a9b20114fcbec2fe8dacb3c269b720dd7f56c474c36782e17d4c5ce75e0f68db" Jan 20 18:06:31 crc kubenswrapper[4558]: I0120 18:06:31.616026 4558 scope.go:117] "RemoveContainer" containerID="1c439f89c744152402cb33f807cfdfc137423ba15d981e9a3577d1ec69c10d38" Jan 20 18:06:31 crc kubenswrapper[4558]: I0120 18:06:31.637661 4558 scope.go:117] "RemoveContainer" containerID="f3480dff1283cc8b54d75770a199bbd3ab7c2e7281d8214dc64c90ffcc63d0c2" Jan 20 18:06:31 crc kubenswrapper[4558]: I0120 18:06:31.655447 4558 scope.go:117] "RemoveContainer" containerID="bf1073626acf9931e1253c7cee3519239421ae4bd985a32c838e184014ba91cd" Jan 20 18:06:31 crc kubenswrapper[4558]: I0120 18:06:31.676453 4558 scope.go:117] "RemoveContainer" containerID="ed8fbbf22228e4cf6c5612b4ee404afc61a0ce4a54631e2454f7c9637ff8c312" Jan 20 18:06:31 crc kubenswrapper[4558]: I0120 18:06:31.690222 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/bootstrap-edpm-multinodeset-edpm-compute-no-nodes-ndqgs" Jan 20 18:06:31 crc kubenswrapper[4558]: I0120 18:06:31.699525 4558 scope.go:117] "RemoveContainer" containerID="74a7d32f8723f55fc502de179d8dd728dfde29b93a7ef96158a0b8cd466e4b57" Jan 20 18:06:31 crc kubenswrapper[4558]: I0120 18:06:31.705634 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/bootstrap-edpm-multinodeset-edpm-compute-beta-nodeset-lh947" Jan 20 18:06:31 crc kubenswrapper[4558]: I0120 18:06:31.732124 4558 scope.go:117] "RemoveContainer" containerID="ce204e839c11f34c659e55d45c47dba075ea61f30450f3a9bb1d8ed89d47c69f" Jan 20 18:06:32 crc kubenswrapper[4558]: I0120 18:06:32.107244 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/bootstrap-edpm-multinodeset-edpm-compute-no-nodes-ndqgs"] Jan 20 18:06:32 crc kubenswrapper[4558]: I0120 18:06:32.158907 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/bootstrap-edpm-multinodeset-edpm-compute-beta-nodeset-lh947"] Jan 20 18:06:32 crc kubenswrapper[4558]: W0120 18:06:32.498077 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod26fe7558_d4f3_4699_b772_138db97df828.slice/crio-ed3f3b54c683829d717a34cd8f5c7902d6df22fa3143244f8cf18608929aa1c3 WatchSource:0}: Error finding container ed3f3b54c683829d717a34cd8f5c7902d6df22fa3143244f8cf18608929aa1c3: Status 404 returned error can't find the container with id ed3f3b54c683829d717a34cd8f5c7902d6df22fa3143244f8cf18608929aa1c3 Jan 20 18:06:32 crc kubenswrapper[4558]: W0120 18:06:32.498953 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod62c94a76_dced_48dd_9d0b_add9b0a76a81.slice/crio-e9064e50635166cd186ae13172f8ce1fdda4dc7618e6763ed7363cdb6bf781b1 WatchSource:0}: Error finding container e9064e50635166cd186ae13172f8ce1fdda4dc7618e6763ed7363cdb6bf781b1: Status 404 returned error can't find the container with id e9064e50635166cd186ae13172f8ce1fdda4dc7618e6763ed7363cdb6bf781b1 Jan 20 18:06:33 crc kubenswrapper[4558]: I0120 18:06:33.374937 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/bootstrap-edpm-multinodeset-edpm-compute-beta-nodeset-lh947" event={"ID":"62c94a76-dced-48dd-9d0b-add9b0a76a81","Type":"ContainerStarted","Data":"6fa574d56a21423d1f991b5e4446614a49a0945522c92b50da02bef52461ba54"} Jan 20 18:06:33 crc kubenswrapper[4558]: I0120 18:06:33.375267 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/bootstrap-edpm-multinodeset-edpm-compute-beta-nodeset-lh947" event={"ID":"62c94a76-dced-48dd-9d0b-add9b0a76a81","Type":"ContainerStarted","Data":"e9064e50635166cd186ae13172f8ce1fdda4dc7618e6763ed7363cdb6bf781b1"} Jan 20 18:06:33 crc kubenswrapper[4558]: I0120 18:06:33.377622 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/bootstrap-edpm-multinodeset-edpm-compute-no-nodes-ndqgs" event={"ID":"26fe7558-d4f3-4699-b772-138db97df828","Type":"ContainerStarted","Data":"3c6a200a4e82ee7b54601eab3dc4e80432bfa9c68c2b524c193e9a8aa3493ae8"} Jan 20 18:06:33 crc kubenswrapper[4558]: I0120 18:06:33.377685 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/bootstrap-edpm-multinodeset-edpm-compute-no-nodes-ndqgs" event={"ID":"26fe7558-d4f3-4699-b772-138db97df828","Type":"ContainerStarted","Data":"ed3f3b54c683829d717a34cd8f5c7902d6df22fa3143244f8cf18608929aa1c3"} Jan 20 18:06:33 crc kubenswrapper[4558]: I0120 18:06:33.394759 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/bootstrap-edpm-multinodeset-edpm-compute-beta-nodeset-lh947" podStartSLOduration=1.856391344 podStartE2EDuration="2.394743403s" podCreationTimestamp="2026-01-20 18:06:31 +0000 UTC" firstStartedPulling="2026-01-20 18:06:32.501079248 +0000 UTC m=+5086.261417214" lastFinishedPulling="2026-01-20 18:06:33.039431305 +0000 UTC m=+5086.799769273" observedRunningTime="2026-01-20 18:06:33.38951932 +0000 UTC m=+5087.149857288" watchObservedRunningTime="2026-01-20 18:06:33.394743403 +0000 UTC m=+5087.155081371" Jan 20 18:06:33 crc kubenswrapper[4558]: I0120 18:06:33.403901 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/bootstrap-edpm-multinodeset-edpm-compute-no-nodes-ndqgs" podStartSLOduration=1.866049624 podStartE2EDuration="2.403884169s" podCreationTimestamp="2026-01-20 18:06:31 +0000 UTC" firstStartedPulling="2026-01-20 18:06:32.50077209 +0000 UTC m=+5086.261110067" lastFinishedPulling="2026-01-20 18:06:33.038606645 +0000 UTC m=+5086.798944612" observedRunningTime="2026-01-20 18:06:33.402820048 +0000 UTC m=+5087.163158016" watchObservedRunningTime="2026-01-20 18:06:33.403884169 +0000 UTC m=+5087.164222136" Jan 20 18:06:35 crc kubenswrapper[4558]: I0120 18:06:35.399560 4558 generic.go:334] "Generic (PLEG): container finished" podID="62c94a76-dced-48dd-9d0b-add9b0a76a81" containerID="6fa574d56a21423d1f991b5e4446614a49a0945522c92b50da02bef52461ba54" exitCode=0 Jan 20 18:06:35 crc kubenswrapper[4558]: I0120 18:06:35.399649 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/bootstrap-edpm-multinodeset-edpm-compute-beta-nodeset-lh947" event={"ID":"62c94a76-dced-48dd-9d0b-add9b0a76a81","Type":"ContainerDied","Data":"6fa574d56a21423d1f991b5e4446614a49a0945522c92b50da02bef52461ba54"} Jan 20 18:06:35 crc kubenswrapper[4558]: I0120 18:06:35.402516 4558 generic.go:334] "Generic (PLEG): container finished" podID="26fe7558-d4f3-4699-b772-138db97df828" containerID="3c6a200a4e82ee7b54601eab3dc4e80432bfa9c68c2b524c193e9a8aa3493ae8" exitCode=0 Jan 20 18:06:35 crc kubenswrapper[4558]: I0120 18:06:35.402569 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/bootstrap-edpm-multinodeset-edpm-compute-no-nodes-ndqgs" event={"ID":"26fe7558-d4f3-4699-b772-138db97df828","Type":"ContainerDied","Data":"3c6a200a4e82ee7b54601eab3dc4e80432bfa9c68c2b524c193e9a8aa3493ae8"} Jan 20 18:06:36 crc kubenswrapper[4558]: I0120 18:06:36.696612 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/bootstrap-edpm-multinodeset-edpm-compute-no-nodes-ndqgs" Jan 20 18:06:36 crc kubenswrapper[4558]: I0120 18:06:36.701237 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/bootstrap-edpm-multinodeset-edpm-compute-beta-nodeset-lh947" Jan 20 18:06:36 crc kubenswrapper[4558]: I0120 18:06:36.831421 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/62c94a76-dced-48dd-9d0b-add9b0a76a81-bootstrap-combined-ca-bundle\") pod \"62c94a76-dced-48dd-9d0b-add9b0a76a81\" (UID: \"62c94a76-dced-48dd-9d0b-add9b0a76a81\") " Jan 20 18:06:36 crc kubenswrapper[4558]: I0120 18:06:36.831505 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gst6q\" (UniqueName: \"kubernetes.io/projected/62c94a76-dced-48dd-9d0b-add9b0a76a81-kube-api-access-gst6q\") pod \"62c94a76-dced-48dd-9d0b-add9b0a76a81\" (UID: \"62c94a76-dced-48dd-9d0b-add9b0a76a81\") " Jan 20 18:06:36 crc kubenswrapper[4558]: I0120 18:06:36.831799 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/62c94a76-dced-48dd-9d0b-add9b0a76a81-inventory\") pod \"62c94a76-dced-48dd-9d0b-add9b0a76a81\" (UID: \"62c94a76-dced-48dd-9d0b-add9b0a76a81\") " Jan 20 18:06:36 crc kubenswrapper[4558]: I0120 18:06:36.831899 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/26fe7558-d4f3-4699-b772-138db97df828-ssh-key-edpm-compute-no-nodes\") pod \"26fe7558-d4f3-4699-b772-138db97df828\" (UID: \"26fe7558-d4f3-4699-b772-138db97df828\") " Jan 20 18:06:36 crc kubenswrapper[4558]: I0120 18:06:36.831928 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-edpm-compute-beta-nodeset\" (UniqueName: \"kubernetes.io/secret/62c94a76-dced-48dd-9d0b-add9b0a76a81-ssh-key-edpm-compute-beta-nodeset\") pod \"62c94a76-dced-48dd-9d0b-add9b0a76a81\" (UID: \"62c94a76-dced-48dd-9d0b-add9b0a76a81\") " Jan 20 18:06:36 crc kubenswrapper[4558]: I0120 18:06:36.831968 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/26fe7558-d4f3-4699-b772-138db97df828-inventory\") pod \"26fe7558-d4f3-4699-b772-138db97df828\" (UID: \"26fe7558-d4f3-4699-b772-138db97df828\") " Jan 20 18:06:36 crc kubenswrapper[4558]: I0120 18:06:36.832035 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cmvpj\" (UniqueName: \"kubernetes.io/projected/26fe7558-d4f3-4699-b772-138db97df828-kube-api-access-cmvpj\") pod \"26fe7558-d4f3-4699-b772-138db97df828\" (UID: \"26fe7558-d4f3-4699-b772-138db97df828\") " Jan 20 18:06:36 crc kubenswrapper[4558]: I0120 18:06:36.832084 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/26fe7558-d4f3-4699-b772-138db97df828-bootstrap-combined-ca-bundle\") pod \"26fe7558-d4f3-4699-b772-138db97df828\" (UID: \"26fe7558-d4f3-4699-b772-138db97df828\") " Jan 20 18:06:36 crc kubenswrapper[4558]: I0120 18:06:36.839432 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/62c94a76-dced-48dd-9d0b-add9b0a76a81-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "62c94a76-dced-48dd-9d0b-add9b0a76a81" (UID: "62c94a76-dced-48dd-9d0b-add9b0a76a81"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:06:36 crc kubenswrapper[4558]: I0120 18:06:36.839601 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/26fe7558-d4f3-4699-b772-138db97df828-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "26fe7558-d4f3-4699-b772-138db97df828" (UID: "26fe7558-d4f3-4699-b772-138db97df828"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:06:36 crc kubenswrapper[4558]: I0120 18:06:36.839805 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/26fe7558-d4f3-4699-b772-138db97df828-kube-api-access-cmvpj" (OuterVolumeSpecName: "kube-api-access-cmvpj") pod "26fe7558-d4f3-4699-b772-138db97df828" (UID: "26fe7558-d4f3-4699-b772-138db97df828"). InnerVolumeSpecName "kube-api-access-cmvpj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:06:36 crc kubenswrapper[4558]: I0120 18:06:36.840088 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/62c94a76-dced-48dd-9d0b-add9b0a76a81-kube-api-access-gst6q" (OuterVolumeSpecName: "kube-api-access-gst6q") pod "62c94a76-dced-48dd-9d0b-add9b0a76a81" (UID: "62c94a76-dced-48dd-9d0b-add9b0a76a81"). InnerVolumeSpecName "kube-api-access-gst6q". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:06:36 crc kubenswrapper[4558]: I0120 18:06:36.854131 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/62c94a76-dced-48dd-9d0b-add9b0a76a81-ssh-key-edpm-compute-beta-nodeset" (OuterVolumeSpecName: "ssh-key-edpm-compute-beta-nodeset") pod "62c94a76-dced-48dd-9d0b-add9b0a76a81" (UID: "62c94a76-dced-48dd-9d0b-add9b0a76a81"). InnerVolumeSpecName "ssh-key-edpm-compute-beta-nodeset". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:06:36 crc kubenswrapper[4558]: I0120 18:06:36.854398 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/26fe7558-d4f3-4699-b772-138db97df828-inventory" (OuterVolumeSpecName: "inventory") pod "26fe7558-d4f3-4699-b772-138db97df828" (UID: "26fe7558-d4f3-4699-b772-138db97df828"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:06:36 crc kubenswrapper[4558]: I0120 18:06:36.854505 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/26fe7558-d4f3-4699-b772-138db97df828-ssh-key-edpm-compute-no-nodes" (OuterVolumeSpecName: "ssh-key-edpm-compute-no-nodes") pod "26fe7558-d4f3-4699-b772-138db97df828" (UID: "26fe7558-d4f3-4699-b772-138db97df828"). InnerVolumeSpecName "ssh-key-edpm-compute-no-nodes". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:06:36 crc kubenswrapper[4558]: I0120 18:06:36.854814 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/62c94a76-dced-48dd-9d0b-add9b0a76a81-inventory" (OuterVolumeSpecName: "inventory") pod "62c94a76-dced-48dd-9d0b-add9b0a76a81" (UID: "62c94a76-dced-48dd-9d0b-add9b0a76a81"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:06:36 crc kubenswrapper[4558]: I0120 18:06:36.934940 4558 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/62c94a76-dced-48dd-9d0b-add9b0a76a81-inventory\") on node \"crc\" DevicePath \"\"" Jan 20 18:06:36 crc kubenswrapper[4558]: I0120 18:06:36.934976 4558 reconciler_common.go:293] "Volume detached for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/26fe7558-d4f3-4699-b772-138db97df828-ssh-key-edpm-compute-no-nodes\") on node \"crc\" DevicePath \"\"" Jan 20 18:06:36 crc kubenswrapper[4558]: I0120 18:06:36.934994 4558 reconciler_common.go:293] "Volume detached for volume \"ssh-key-edpm-compute-beta-nodeset\" (UniqueName: \"kubernetes.io/secret/62c94a76-dced-48dd-9d0b-add9b0a76a81-ssh-key-edpm-compute-beta-nodeset\") on node \"crc\" DevicePath \"\"" Jan 20 18:06:36 crc kubenswrapper[4558]: I0120 18:06:36.935006 4558 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/26fe7558-d4f3-4699-b772-138db97df828-inventory\") on node \"crc\" DevicePath \"\"" Jan 20 18:06:36 crc kubenswrapper[4558]: I0120 18:06:36.935019 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cmvpj\" (UniqueName: \"kubernetes.io/projected/26fe7558-d4f3-4699-b772-138db97df828-kube-api-access-cmvpj\") on node \"crc\" DevicePath \"\"" Jan 20 18:06:36 crc kubenswrapper[4558]: I0120 18:06:36.935028 4558 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/26fe7558-d4f3-4699-b772-138db97df828-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 18:06:36 crc kubenswrapper[4558]: I0120 18:06:36.935039 4558 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/62c94a76-dced-48dd-9d0b-add9b0a76a81-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 18:06:36 crc kubenswrapper[4558]: I0120 18:06:36.935050 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gst6q\" (UniqueName: \"kubernetes.io/projected/62c94a76-dced-48dd-9d0b-add9b0a76a81-kube-api-access-gst6q\") on node \"crc\" DevicePath \"\"" Jan 20 18:06:37 crc kubenswrapper[4558]: I0120 18:06:37.421414 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/bootstrap-edpm-multinodeset-edpm-compute-beta-nodeset-lh947" event={"ID":"62c94a76-dced-48dd-9d0b-add9b0a76a81","Type":"ContainerDied","Data":"e9064e50635166cd186ae13172f8ce1fdda4dc7618e6763ed7363cdb6bf781b1"} Jan 20 18:06:37 crc kubenswrapper[4558]: I0120 18:06:37.421468 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e9064e50635166cd186ae13172f8ce1fdda4dc7618e6763ed7363cdb6bf781b1" Jan 20 18:06:37 crc kubenswrapper[4558]: I0120 18:06:37.421476 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/bootstrap-edpm-multinodeset-edpm-compute-beta-nodeset-lh947" Jan 20 18:06:37 crc kubenswrapper[4558]: I0120 18:06:37.424020 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/bootstrap-edpm-multinodeset-edpm-compute-no-nodes-ndqgs" event={"ID":"26fe7558-d4f3-4699-b772-138db97df828","Type":"ContainerDied","Data":"ed3f3b54c683829d717a34cd8f5c7902d6df22fa3143244f8cf18608929aa1c3"} Jan 20 18:06:37 crc kubenswrapper[4558]: I0120 18:06:37.424073 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/bootstrap-edpm-multinodeset-edpm-compute-no-nodes-ndqgs" Jan 20 18:06:37 crc kubenswrapper[4558]: I0120 18:06:37.424077 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ed3f3b54c683829d717a34cd8f5c7902d6df22fa3143244f8cf18608929aa1c3" Jan 20 18:06:37 crc kubenswrapper[4558]: I0120 18:06:37.486326 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/configure-network-edpm-multinodeset-edpm-compute-no-nodes-6rm52"] Jan 20 18:06:37 crc kubenswrapper[4558]: E0120 18:06:37.486682 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="62c94a76-dced-48dd-9d0b-add9b0a76a81" containerName="bootstrap-edpm-multinodeset-edpm-compute-beta-nodeset" Jan 20 18:06:37 crc kubenswrapper[4558]: I0120 18:06:37.486702 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="62c94a76-dced-48dd-9d0b-add9b0a76a81" containerName="bootstrap-edpm-multinodeset-edpm-compute-beta-nodeset" Jan 20 18:06:37 crc kubenswrapper[4558]: E0120 18:06:37.486730 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="26fe7558-d4f3-4699-b772-138db97df828" containerName="bootstrap-edpm-multinodeset-edpm-compute-no-nodes" Jan 20 18:06:37 crc kubenswrapper[4558]: I0120 18:06:37.486737 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="26fe7558-d4f3-4699-b772-138db97df828" containerName="bootstrap-edpm-multinodeset-edpm-compute-no-nodes" Jan 20 18:06:37 crc kubenswrapper[4558]: I0120 18:06:37.486912 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="26fe7558-d4f3-4699-b772-138db97df828" containerName="bootstrap-edpm-multinodeset-edpm-compute-no-nodes" Jan 20 18:06:37 crc kubenswrapper[4558]: I0120 18:06:37.486933 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="62c94a76-dced-48dd-9d0b-add9b0a76a81" containerName="bootstrap-edpm-multinodeset-edpm-compute-beta-nodeset" Jan 20 18:06:37 crc kubenswrapper[4558]: I0120 18:06:37.487472 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/configure-network-edpm-multinodeset-edpm-compute-no-nodes-6rm52" Jan 20 18:06:37 crc kubenswrapper[4558]: I0120 18:06:37.489319 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplanenodeset-edpm-compute-no-nodes" Jan 20 18:06:37 crc kubenswrapper[4558]: I0120 18:06:37.491668 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplane-ansible-ssh-private-key-secret" Jan 20 18:06:37 crc kubenswrapper[4558]: I0120 18:06:37.492190 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-aee-default-env" Jan 20 18:06:37 crc kubenswrapper[4558]: I0120 18:06:37.492362 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"edpm-compute-no-nodes-dockercfg-tljmj" Jan 20 18:06:37 crc kubenswrapper[4558]: I0120 18:06:37.499020 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/configure-network-edpm-multinodeset-edpm-compute-no-nodes-6rm52"] Jan 20 18:06:37 crc kubenswrapper[4558]: I0120 18:06:37.643651 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/955714d3-c5b2-49b4-ae36-97c534bc79ea-ssh-key-edpm-compute-no-nodes\") pod \"configure-network-edpm-multinodeset-edpm-compute-no-nodes-6rm52\" (UID: \"955714d3-c5b2-49b4-ae36-97c534bc79ea\") " pod="openstack-kuttl-tests/configure-network-edpm-multinodeset-edpm-compute-no-nodes-6rm52" Jan 20 18:06:37 crc kubenswrapper[4558]: I0120 18:06:37.643963 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n7vbx\" (UniqueName: \"kubernetes.io/projected/955714d3-c5b2-49b4-ae36-97c534bc79ea-kube-api-access-n7vbx\") pod \"configure-network-edpm-multinodeset-edpm-compute-no-nodes-6rm52\" (UID: \"955714d3-c5b2-49b4-ae36-97c534bc79ea\") " pod="openstack-kuttl-tests/configure-network-edpm-multinodeset-edpm-compute-no-nodes-6rm52" Jan 20 18:06:37 crc kubenswrapper[4558]: I0120 18:06:37.644454 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/955714d3-c5b2-49b4-ae36-97c534bc79ea-inventory\") pod \"configure-network-edpm-multinodeset-edpm-compute-no-nodes-6rm52\" (UID: \"955714d3-c5b2-49b4-ae36-97c534bc79ea\") " pod="openstack-kuttl-tests/configure-network-edpm-multinodeset-edpm-compute-no-nodes-6rm52" Jan 20 18:06:37 crc kubenswrapper[4558]: I0120 18:06:37.745189 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/955714d3-c5b2-49b4-ae36-97c534bc79ea-ssh-key-edpm-compute-no-nodes\") pod \"configure-network-edpm-multinodeset-edpm-compute-no-nodes-6rm52\" (UID: \"955714d3-c5b2-49b4-ae36-97c534bc79ea\") " pod="openstack-kuttl-tests/configure-network-edpm-multinodeset-edpm-compute-no-nodes-6rm52" Jan 20 18:06:37 crc kubenswrapper[4558]: I0120 18:06:37.745259 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n7vbx\" (UniqueName: \"kubernetes.io/projected/955714d3-c5b2-49b4-ae36-97c534bc79ea-kube-api-access-n7vbx\") pod \"configure-network-edpm-multinodeset-edpm-compute-no-nodes-6rm52\" (UID: \"955714d3-c5b2-49b4-ae36-97c534bc79ea\") " pod="openstack-kuttl-tests/configure-network-edpm-multinodeset-edpm-compute-no-nodes-6rm52" Jan 20 18:06:37 crc kubenswrapper[4558]: I0120 18:06:37.745307 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/955714d3-c5b2-49b4-ae36-97c534bc79ea-inventory\") pod \"configure-network-edpm-multinodeset-edpm-compute-no-nodes-6rm52\" (UID: \"955714d3-c5b2-49b4-ae36-97c534bc79ea\") " pod="openstack-kuttl-tests/configure-network-edpm-multinodeset-edpm-compute-no-nodes-6rm52" Jan 20 18:06:37 crc kubenswrapper[4558]: I0120 18:06:37.749656 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/955714d3-c5b2-49b4-ae36-97c534bc79ea-ssh-key-edpm-compute-no-nodes\") pod \"configure-network-edpm-multinodeset-edpm-compute-no-nodes-6rm52\" (UID: \"955714d3-c5b2-49b4-ae36-97c534bc79ea\") " pod="openstack-kuttl-tests/configure-network-edpm-multinodeset-edpm-compute-no-nodes-6rm52" Jan 20 18:06:37 crc kubenswrapper[4558]: I0120 18:06:37.749701 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/955714d3-c5b2-49b4-ae36-97c534bc79ea-inventory\") pod \"configure-network-edpm-multinodeset-edpm-compute-no-nodes-6rm52\" (UID: \"955714d3-c5b2-49b4-ae36-97c534bc79ea\") " pod="openstack-kuttl-tests/configure-network-edpm-multinodeset-edpm-compute-no-nodes-6rm52" Jan 20 18:06:37 crc kubenswrapper[4558]: I0120 18:06:37.760822 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n7vbx\" (UniqueName: \"kubernetes.io/projected/955714d3-c5b2-49b4-ae36-97c534bc79ea-kube-api-access-n7vbx\") pod \"configure-network-edpm-multinodeset-edpm-compute-no-nodes-6rm52\" (UID: \"955714d3-c5b2-49b4-ae36-97c534bc79ea\") " pod="openstack-kuttl-tests/configure-network-edpm-multinodeset-edpm-compute-no-nodes-6rm52" Jan 20 18:06:37 crc kubenswrapper[4558]: I0120 18:06:37.802022 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/configure-network-edpm-multinodeset-edpm-compute-no-nodes-6rm52" Jan 20 18:06:38 crc kubenswrapper[4558]: I0120 18:06:38.191407 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/configure-network-edpm-multinodeset-edpm-compute-no-nodes-6rm52"] Jan 20 18:06:38 crc kubenswrapper[4558]: I0120 18:06:38.436892 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/configure-network-edpm-multinodeset-edpm-compute-no-nodes-6rm52" event={"ID":"955714d3-c5b2-49b4-ae36-97c534bc79ea","Type":"ContainerStarted","Data":"4cc0291f48ead7c59c7d46606c3c16603d8931e35cd2f96519cca8e486696f55"} Jan 20 18:06:39 crc kubenswrapper[4558]: I0120 18:06:39.447893 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/configure-network-edpm-multinodeset-edpm-compute-no-nodes-6rm52" event={"ID":"955714d3-c5b2-49b4-ae36-97c534bc79ea","Type":"ContainerStarted","Data":"899816b732eb18cc306c370d7540b48a90ca9cb17541084636e709a22ad32b62"} Jan 20 18:06:39 crc kubenswrapper[4558]: I0120 18:06:39.467337 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/configure-network-edpm-multinodeset-edpm-compute-no-nodes-6rm52" podStartSLOduration=1.891512478 podStartE2EDuration="2.467316233s" podCreationTimestamp="2026-01-20 18:06:37 +0000 UTC" firstStartedPulling="2026-01-20 18:06:38.198798177 +0000 UTC m=+5091.959136145" lastFinishedPulling="2026-01-20 18:06:38.774601932 +0000 UTC m=+5092.534939900" observedRunningTime="2026-01-20 18:06:39.463811503 +0000 UTC m=+5093.224149470" watchObservedRunningTime="2026-01-20 18:06:39.467316233 +0000 UTC m=+5093.227654190" Jan 20 18:06:40 crc kubenswrapper[4558]: I0120 18:06:40.459638 4558 generic.go:334] "Generic (PLEG): container finished" podID="955714d3-c5b2-49b4-ae36-97c534bc79ea" containerID="899816b732eb18cc306c370d7540b48a90ca9cb17541084636e709a22ad32b62" exitCode=0 Jan 20 18:06:40 crc kubenswrapper[4558]: I0120 18:06:40.459707 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/configure-network-edpm-multinodeset-edpm-compute-no-nodes-6rm52" event={"ID":"955714d3-c5b2-49b4-ae36-97c534bc79ea","Type":"ContainerDied","Data":"899816b732eb18cc306c370d7540b48a90ca9cb17541084636e709a22ad32b62"} Jan 20 18:06:41 crc kubenswrapper[4558]: I0120 18:06:41.702731 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/configure-network-edpm-multinodeset-edpm-compute-no-nodes-6rm52" Jan 20 18:06:41 crc kubenswrapper[4558]: I0120 18:06:41.708041 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/955714d3-c5b2-49b4-ae36-97c534bc79ea-ssh-key-edpm-compute-no-nodes\") pod \"955714d3-c5b2-49b4-ae36-97c534bc79ea\" (UID: \"955714d3-c5b2-49b4-ae36-97c534bc79ea\") " Jan 20 18:06:41 crc kubenswrapper[4558]: I0120 18:06:41.708222 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/955714d3-c5b2-49b4-ae36-97c534bc79ea-inventory\") pod \"955714d3-c5b2-49b4-ae36-97c534bc79ea\" (UID: \"955714d3-c5b2-49b4-ae36-97c534bc79ea\") " Jan 20 18:06:41 crc kubenswrapper[4558]: I0120 18:06:41.708273 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n7vbx\" (UniqueName: \"kubernetes.io/projected/955714d3-c5b2-49b4-ae36-97c534bc79ea-kube-api-access-n7vbx\") pod \"955714d3-c5b2-49b4-ae36-97c534bc79ea\" (UID: \"955714d3-c5b2-49b4-ae36-97c534bc79ea\") " Jan 20 18:06:41 crc kubenswrapper[4558]: I0120 18:06:41.717699 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/955714d3-c5b2-49b4-ae36-97c534bc79ea-kube-api-access-n7vbx" (OuterVolumeSpecName: "kube-api-access-n7vbx") pod "955714d3-c5b2-49b4-ae36-97c534bc79ea" (UID: "955714d3-c5b2-49b4-ae36-97c534bc79ea"). InnerVolumeSpecName "kube-api-access-n7vbx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:06:41 crc kubenswrapper[4558]: I0120 18:06:41.729659 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/955714d3-c5b2-49b4-ae36-97c534bc79ea-ssh-key-edpm-compute-no-nodes" (OuterVolumeSpecName: "ssh-key-edpm-compute-no-nodes") pod "955714d3-c5b2-49b4-ae36-97c534bc79ea" (UID: "955714d3-c5b2-49b4-ae36-97c534bc79ea"). InnerVolumeSpecName "ssh-key-edpm-compute-no-nodes". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:06:41 crc kubenswrapper[4558]: I0120 18:06:41.734248 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/955714d3-c5b2-49b4-ae36-97c534bc79ea-inventory" (OuterVolumeSpecName: "inventory") pod "955714d3-c5b2-49b4-ae36-97c534bc79ea" (UID: "955714d3-c5b2-49b4-ae36-97c534bc79ea"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:06:41 crc kubenswrapper[4558]: I0120 18:06:41.809571 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n7vbx\" (UniqueName: \"kubernetes.io/projected/955714d3-c5b2-49b4-ae36-97c534bc79ea-kube-api-access-n7vbx\") on node \"crc\" DevicePath \"\"" Jan 20 18:06:41 crc kubenswrapper[4558]: I0120 18:06:41.809601 4558 reconciler_common.go:293] "Volume detached for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/955714d3-c5b2-49b4-ae36-97c534bc79ea-ssh-key-edpm-compute-no-nodes\") on node \"crc\" DevicePath \"\"" Jan 20 18:06:41 crc kubenswrapper[4558]: I0120 18:06:41.809614 4558 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/955714d3-c5b2-49b4-ae36-97c534bc79ea-inventory\") on node \"crc\" DevicePath \"\"" Jan 20 18:06:42 crc kubenswrapper[4558]: I0120 18:06:42.483782 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/configure-network-edpm-multinodeset-edpm-compute-no-nodes-6rm52" event={"ID":"955714d3-c5b2-49b4-ae36-97c534bc79ea","Type":"ContainerDied","Data":"4cc0291f48ead7c59c7d46606c3c16603d8931e35cd2f96519cca8e486696f55"} Jan 20 18:06:42 crc kubenswrapper[4558]: I0120 18:06:42.483830 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/configure-network-edpm-multinodeset-edpm-compute-no-nodes-6rm52" Jan 20 18:06:42 crc kubenswrapper[4558]: I0120 18:06:42.485335 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4cc0291f48ead7c59c7d46606c3c16603d8931e35cd2f96519cca8e486696f55" Jan 20 18:06:42 crc kubenswrapper[4558]: I0120 18:06:42.522948 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/validate-network-edpm-multinodeset-edpm-compute-no-nodes-k4xfs"] Jan 20 18:06:42 crc kubenswrapper[4558]: E0120 18:06:42.523326 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="955714d3-c5b2-49b4-ae36-97c534bc79ea" containerName="configure-network-edpm-multinodeset-edpm-compute-no-nodes" Jan 20 18:06:42 crc kubenswrapper[4558]: I0120 18:06:42.523344 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="955714d3-c5b2-49b4-ae36-97c534bc79ea" containerName="configure-network-edpm-multinodeset-edpm-compute-no-nodes" Jan 20 18:06:42 crc kubenswrapper[4558]: I0120 18:06:42.523472 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="955714d3-c5b2-49b4-ae36-97c534bc79ea" containerName="configure-network-edpm-multinodeset-edpm-compute-no-nodes" Jan 20 18:06:42 crc kubenswrapper[4558]: I0120 18:06:42.523955 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/validate-network-edpm-multinodeset-edpm-compute-no-nodes-k4xfs" Jan 20 18:06:42 crc kubenswrapper[4558]: I0120 18:06:42.525487 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"edpm-compute-no-nodes-dockercfg-tljmj" Jan 20 18:06:42 crc kubenswrapper[4558]: I0120 18:06:42.525959 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplanenodeset-edpm-compute-no-nodes" Jan 20 18:06:42 crc kubenswrapper[4558]: I0120 18:06:42.526370 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplane-ansible-ssh-private-key-secret" Jan 20 18:06:42 crc kubenswrapper[4558]: I0120 18:06:42.527695 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-aee-default-env" Jan 20 18:06:42 crc kubenswrapper[4558]: I0120 18:06:42.534619 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/validate-network-edpm-multinodeset-edpm-compute-no-nodes-k4xfs"] Jan 20 18:06:42 crc kubenswrapper[4558]: I0120 18:06:42.620909 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cf58f\" (UniqueName: \"kubernetes.io/projected/c4b70b5c-da1d-40da-8a60-e2b03e2e3a10-kube-api-access-cf58f\") pod \"validate-network-edpm-multinodeset-edpm-compute-no-nodes-k4xfs\" (UID: \"c4b70b5c-da1d-40da-8a60-e2b03e2e3a10\") " pod="openstack-kuttl-tests/validate-network-edpm-multinodeset-edpm-compute-no-nodes-k4xfs" Jan 20 18:06:42 crc kubenswrapper[4558]: I0120 18:06:42.621107 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c4b70b5c-da1d-40da-8a60-e2b03e2e3a10-inventory\") pod \"validate-network-edpm-multinodeset-edpm-compute-no-nodes-k4xfs\" (UID: \"c4b70b5c-da1d-40da-8a60-e2b03e2e3a10\") " pod="openstack-kuttl-tests/validate-network-edpm-multinodeset-edpm-compute-no-nodes-k4xfs" Jan 20 18:06:42 crc kubenswrapper[4558]: I0120 18:06:42.621227 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/c4b70b5c-da1d-40da-8a60-e2b03e2e3a10-ssh-key-edpm-compute-no-nodes\") pod \"validate-network-edpm-multinodeset-edpm-compute-no-nodes-k4xfs\" (UID: \"c4b70b5c-da1d-40da-8a60-e2b03e2e3a10\") " pod="openstack-kuttl-tests/validate-network-edpm-multinodeset-edpm-compute-no-nodes-k4xfs" Jan 20 18:06:42 crc kubenswrapper[4558]: I0120 18:06:42.723095 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cf58f\" (UniqueName: \"kubernetes.io/projected/c4b70b5c-da1d-40da-8a60-e2b03e2e3a10-kube-api-access-cf58f\") pod \"validate-network-edpm-multinodeset-edpm-compute-no-nodes-k4xfs\" (UID: \"c4b70b5c-da1d-40da-8a60-e2b03e2e3a10\") " pod="openstack-kuttl-tests/validate-network-edpm-multinodeset-edpm-compute-no-nodes-k4xfs" Jan 20 18:06:42 crc kubenswrapper[4558]: I0120 18:06:42.723240 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c4b70b5c-da1d-40da-8a60-e2b03e2e3a10-inventory\") pod \"validate-network-edpm-multinodeset-edpm-compute-no-nodes-k4xfs\" (UID: \"c4b70b5c-da1d-40da-8a60-e2b03e2e3a10\") " pod="openstack-kuttl-tests/validate-network-edpm-multinodeset-edpm-compute-no-nodes-k4xfs" Jan 20 18:06:42 crc kubenswrapper[4558]: I0120 18:06:42.723305 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/c4b70b5c-da1d-40da-8a60-e2b03e2e3a10-ssh-key-edpm-compute-no-nodes\") pod \"validate-network-edpm-multinodeset-edpm-compute-no-nodes-k4xfs\" (UID: \"c4b70b5c-da1d-40da-8a60-e2b03e2e3a10\") " pod="openstack-kuttl-tests/validate-network-edpm-multinodeset-edpm-compute-no-nodes-k4xfs" Jan 20 18:06:42 crc kubenswrapper[4558]: I0120 18:06:42.729575 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c4b70b5c-da1d-40da-8a60-e2b03e2e3a10-inventory\") pod \"validate-network-edpm-multinodeset-edpm-compute-no-nodes-k4xfs\" (UID: \"c4b70b5c-da1d-40da-8a60-e2b03e2e3a10\") " pod="openstack-kuttl-tests/validate-network-edpm-multinodeset-edpm-compute-no-nodes-k4xfs" Jan 20 18:06:42 crc kubenswrapper[4558]: I0120 18:06:42.729660 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/c4b70b5c-da1d-40da-8a60-e2b03e2e3a10-ssh-key-edpm-compute-no-nodes\") pod \"validate-network-edpm-multinodeset-edpm-compute-no-nodes-k4xfs\" (UID: \"c4b70b5c-da1d-40da-8a60-e2b03e2e3a10\") " pod="openstack-kuttl-tests/validate-network-edpm-multinodeset-edpm-compute-no-nodes-k4xfs" Jan 20 18:06:42 crc kubenswrapper[4558]: I0120 18:06:42.737969 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cf58f\" (UniqueName: \"kubernetes.io/projected/c4b70b5c-da1d-40da-8a60-e2b03e2e3a10-kube-api-access-cf58f\") pod \"validate-network-edpm-multinodeset-edpm-compute-no-nodes-k4xfs\" (UID: \"c4b70b5c-da1d-40da-8a60-e2b03e2e3a10\") " pod="openstack-kuttl-tests/validate-network-edpm-multinodeset-edpm-compute-no-nodes-k4xfs" Jan 20 18:06:42 crc kubenswrapper[4558]: I0120 18:06:42.838295 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/validate-network-edpm-multinodeset-edpm-compute-no-nodes-k4xfs" Jan 20 18:06:43 crc kubenswrapper[4558]: I0120 18:06:43.219339 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/validate-network-edpm-multinodeset-edpm-compute-no-nodes-k4xfs"] Jan 20 18:06:43 crc kubenswrapper[4558]: I0120 18:06:43.498249 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/validate-network-edpm-multinodeset-edpm-compute-no-nodes-k4xfs" event={"ID":"c4b70b5c-da1d-40da-8a60-e2b03e2e3a10","Type":"ContainerStarted","Data":"445e47e17fcb150df1d08c655ccbf97102afc21dbfa2a76f0890ccb7e13def89"} Jan 20 18:06:43 crc kubenswrapper[4558]: I0120 18:06:43.566370 4558 scope.go:117] "RemoveContainer" containerID="6b02d31ec11fe5435af988e35303a6f66b5afa5ef952ce5f7cc4b3cd5f4d9497" Jan 20 18:06:43 crc kubenswrapper[4558]: E0120 18:06:43.566587 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 18:06:44 crc kubenswrapper[4558]: I0120 18:06:44.509502 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/validate-network-edpm-multinodeset-edpm-compute-no-nodes-k4xfs" event={"ID":"c4b70b5c-da1d-40da-8a60-e2b03e2e3a10","Type":"ContainerStarted","Data":"e38d8d3187d68b2c810f9407993637c90f2e49f2057cd2d9bd1041ee58f14219"} Jan 20 18:06:44 crc kubenswrapper[4558]: I0120 18:06:44.535367 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/validate-network-edpm-multinodeset-edpm-compute-no-nodes-k4xfs" podStartSLOduration=1.979331265 podStartE2EDuration="2.535327779s" podCreationTimestamp="2026-01-20 18:06:42 +0000 UTC" firstStartedPulling="2026-01-20 18:06:43.227927317 +0000 UTC m=+5096.988265284" lastFinishedPulling="2026-01-20 18:06:43.783923831 +0000 UTC m=+5097.544261798" observedRunningTime="2026-01-20 18:06:44.527501894 +0000 UTC m=+5098.287839861" watchObservedRunningTime="2026-01-20 18:06:44.535327779 +0000 UTC m=+5098.295665746" Jan 20 18:06:45 crc kubenswrapper[4558]: I0120 18:06:45.519927 4558 generic.go:334] "Generic (PLEG): container finished" podID="c4b70b5c-da1d-40da-8a60-e2b03e2e3a10" containerID="e38d8d3187d68b2c810f9407993637c90f2e49f2057cd2d9bd1041ee58f14219" exitCode=0 Jan 20 18:06:45 crc kubenswrapper[4558]: I0120 18:06:45.520140 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/validate-network-edpm-multinodeset-edpm-compute-no-nodes-k4xfs" event={"ID":"c4b70b5c-da1d-40da-8a60-e2b03e2e3a10","Type":"ContainerDied","Data":"e38d8d3187d68b2c810f9407993637c90f2e49f2057cd2d9bd1041ee58f14219"} Jan 20 18:06:46 crc kubenswrapper[4558]: I0120 18:06:46.763871 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/validate-network-edpm-multinodeset-edpm-compute-no-nodes-k4xfs" Jan 20 18:06:46 crc kubenswrapper[4558]: I0120 18:06:46.786292 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cf58f\" (UniqueName: \"kubernetes.io/projected/c4b70b5c-da1d-40da-8a60-e2b03e2e3a10-kube-api-access-cf58f\") pod \"c4b70b5c-da1d-40da-8a60-e2b03e2e3a10\" (UID: \"c4b70b5c-da1d-40da-8a60-e2b03e2e3a10\") " Jan 20 18:06:46 crc kubenswrapper[4558]: I0120 18:06:46.786346 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c4b70b5c-da1d-40da-8a60-e2b03e2e3a10-inventory\") pod \"c4b70b5c-da1d-40da-8a60-e2b03e2e3a10\" (UID: \"c4b70b5c-da1d-40da-8a60-e2b03e2e3a10\") " Jan 20 18:06:46 crc kubenswrapper[4558]: I0120 18:06:46.786385 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/c4b70b5c-da1d-40da-8a60-e2b03e2e3a10-ssh-key-edpm-compute-no-nodes\") pod \"c4b70b5c-da1d-40da-8a60-e2b03e2e3a10\" (UID: \"c4b70b5c-da1d-40da-8a60-e2b03e2e3a10\") " Jan 20 18:06:46 crc kubenswrapper[4558]: I0120 18:06:46.794860 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c4b70b5c-da1d-40da-8a60-e2b03e2e3a10-kube-api-access-cf58f" (OuterVolumeSpecName: "kube-api-access-cf58f") pod "c4b70b5c-da1d-40da-8a60-e2b03e2e3a10" (UID: "c4b70b5c-da1d-40da-8a60-e2b03e2e3a10"). InnerVolumeSpecName "kube-api-access-cf58f". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:06:46 crc kubenswrapper[4558]: I0120 18:06:46.809557 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c4b70b5c-da1d-40da-8a60-e2b03e2e3a10-inventory" (OuterVolumeSpecName: "inventory") pod "c4b70b5c-da1d-40da-8a60-e2b03e2e3a10" (UID: "c4b70b5c-da1d-40da-8a60-e2b03e2e3a10"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:06:46 crc kubenswrapper[4558]: I0120 18:06:46.813116 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c4b70b5c-da1d-40da-8a60-e2b03e2e3a10-ssh-key-edpm-compute-no-nodes" (OuterVolumeSpecName: "ssh-key-edpm-compute-no-nodes") pod "c4b70b5c-da1d-40da-8a60-e2b03e2e3a10" (UID: "c4b70b5c-da1d-40da-8a60-e2b03e2e3a10"). InnerVolumeSpecName "ssh-key-edpm-compute-no-nodes". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:06:46 crc kubenswrapper[4558]: I0120 18:06:46.888403 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cf58f\" (UniqueName: \"kubernetes.io/projected/c4b70b5c-da1d-40da-8a60-e2b03e2e3a10-kube-api-access-cf58f\") on node \"crc\" DevicePath \"\"" Jan 20 18:06:46 crc kubenswrapper[4558]: I0120 18:06:46.888436 4558 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c4b70b5c-da1d-40da-8a60-e2b03e2e3a10-inventory\") on node \"crc\" DevicePath \"\"" Jan 20 18:06:46 crc kubenswrapper[4558]: I0120 18:06:46.888447 4558 reconciler_common.go:293] "Volume detached for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/c4b70b5c-da1d-40da-8a60-e2b03e2e3a10-ssh-key-edpm-compute-no-nodes\") on node \"crc\" DevicePath \"\"" Jan 20 18:06:47 crc kubenswrapper[4558]: I0120 18:06:47.540289 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/validate-network-edpm-multinodeset-edpm-compute-no-nodes-k4xfs" event={"ID":"c4b70b5c-da1d-40da-8a60-e2b03e2e3a10","Type":"ContainerDied","Data":"445e47e17fcb150df1d08c655ccbf97102afc21dbfa2a76f0890ccb7e13def89"} Jan 20 18:06:47 crc kubenswrapper[4558]: I0120 18:06:47.540726 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="445e47e17fcb150df1d08c655ccbf97102afc21dbfa2a76f0890ccb7e13def89" Jan 20 18:06:47 crc kubenswrapper[4558]: I0120 18:06:47.540354 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/validate-network-edpm-multinodeset-edpm-compute-no-nodes-k4xfs" Jan 20 18:06:47 crc kubenswrapper[4558]: I0120 18:06:47.591014 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/install-os-edpm-multinodeset-edpm-compute-no-nodes-xq8w6"] Jan 20 18:06:47 crc kubenswrapper[4558]: E0120 18:06:47.591370 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c4b70b5c-da1d-40da-8a60-e2b03e2e3a10" containerName="validate-network-edpm-multinodeset-edpm-compute-no-nodes" Jan 20 18:06:47 crc kubenswrapper[4558]: I0120 18:06:47.591391 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="c4b70b5c-da1d-40da-8a60-e2b03e2e3a10" containerName="validate-network-edpm-multinodeset-edpm-compute-no-nodes" Jan 20 18:06:47 crc kubenswrapper[4558]: I0120 18:06:47.591556 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="c4b70b5c-da1d-40da-8a60-e2b03e2e3a10" containerName="validate-network-edpm-multinodeset-edpm-compute-no-nodes" Jan 20 18:06:47 crc kubenswrapper[4558]: I0120 18:06:47.592038 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/install-os-edpm-multinodeset-edpm-compute-no-nodes-xq8w6" Jan 20 18:06:47 crc kubenswrapper[4558]: I0120 18:06:47.594653 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-aee-default-env" Jan 20 18:06:47 crc kubenswrapper[4558]: I0120 18:06:47.594687 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplanenodeset-edpm-compute-no-nodes" Jan 20 18:06:47 crc kubenswrapper[4558]: I0120 18:06:47.594737 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplane-ansible-ssh-private-key-secret" Jan 20 18:06:47 crc kubenswrapper[4558]: I0120 18:06:47.594867 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"edpm-compute-no-nodes-dockercfg-tljmj" Jan 20 18:06:47 crc kubenswrapper[4558]: I0120 18:06:47.598134 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ab9e8e32-6e48-4af0-ab56-2ec5fe7d986f-inventory\") pod \"install-os-edpm-multinodeset-edpm-compute-no-nodes-xq8w6\" (UID: \"ab9e8e32-6e48-4af0-ab56-2ec5fe7d986f\") " pod="openstack-kuttl-tests/install-os-edpm-multinodeset-edpm-compute-no-nodes-xq8w6" Jan 20 18:06:47 crc kubenswrapper[4558]: I0120 18:06:47.598463 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/ab9e8e32-6e48-4af0-ab56-2ec5fe7d986f-ssh-key-edpm-compute-no-nodes\") pod \"install-os-edpm-multinodeset-edpm-compute-no-nodes-xq8w6\" (UID: \"ab9e8e32-6e48-4af0-ab56-2ec5fe7d986f\") " pod="openstack-kuttl-tests/install-os-edpm-multinodeset-edpm-compute-no-nodes-xq8w6" Jan 20 18:06:47 crc kubenswrapper[4558]: I0120 18:06:47.598644 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gwsn4\" (UniqueName: \"kubernetes.io/projected/ab9e8e32-6e48-4af0-ab56-2ec5fe7d986f-kube-api-access-gwsn4\") pod \"install-os-edpm-multinodeset-edpm-compute-no-nodes-xq8w6\" (UID: \"ab9e8e32-6e48-4af0-ab56-2ec5fe7d986f\") " pod="openstack-kuttl-tests/install-os-edpm-multinodeset-edpm-compute-no-nodes-xq8w6" Jan 20 18:06:47 crc kubenswrapper[4558]: I0120 18:06:47.602194 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/install-os-edpm-multinodeset-edpm-compute-no-nodes-xq8w6"] Jan 20 18:06:47 crc kubenswrapper[4558]: I0120 18:06:47.700424 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/ab9e8e32-6e48-4af0-ab56-2ec5fe7d986f-ssh-key-edpm-compute-no-nodes\") pod \"install-os-edpm-multinodeset-edpm-compute-no-nodes-xq8w6\" (UID: \"ab9e8e32-6e48-4af0-ab56-2ec5fe7d986f\") " pod="openstack-kuttl-tests/install-os-edpm-multinodeset-edpm-compute-no-nodes-xq8w6" Jan 20 18:06:47 crc kubenswrapper[4558]: I0120 18:06:47.700694 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gwsn4\" (UniqueName: \"kubernetes.io/projected/ab9e8e32-6e48-4af0-ab56-2ec5fe7d986f-kube-api-access-gwsn4\") pod \"install-os-edpm-multinodeset-edpm-compute-no-nodes-xq8w6\" (UID: \"ab9e8e32-6e48-4af0-ab56-2ec5fe7d986f\") " pod="openstack-kuttl-tests/install-os-edpm-multinodeset-edpm-compute-no-nodes-xq8w6" Jan 20 18:06:47 crc kubenswrapper[4558]: I0120 18:06:47.700758 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ab9e8e32-6e48-4af0-ab56-2ec5fe7d986f-inventory\") pod \"install-os-edpm-multinodeset-edpm-compute-no-nodes-xq8w6\" (UID: \"ab9e8e32-6e48-4af0-ab56-2ec5fe7d986f\") " pod="openstack-kuttl-tests/install-os-edpm-multinodeset-edpm-compute-no-nodes-xq8w6" Jan 20 18:06:47 crc kubenswrapper[4558]: I0120 18:06:47.705266 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ab9e8e32-6e48-4af0-ab56-2ec5fe7d986f-inventory\") pod \"install-os-edpm-multinodeset-edpm-compute-no-nodes-xq8w6\" (UID: \"ab9e8e32-6e48-4af0-ab56-2ec5fe7d986f\") " pod="openstack-kuttl-tests/install-os-edpm-multinodeset-edpm-compute-no-nodes-xq8w6" Jan 20 18:06:47 crc kubenswrapper[4558]: I0120 18:06:47.705924 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/ab9e8e32-6e48-4af0-ab56-2ec5fe7d986f-ssh-key-edpm-compute-no-nodes\") pod \"install-os-edpm-multinodeset-edpm-compute-no-nodes-xq8w6\" (UID: \"ab9e8e32-6e48-4af0-ab56-2ec5fe7d986f\") " pod="openstack-kuttl-tests/install-os-edpm-multinodeset-edpm-compute-no-nodes-xq8w6" Jan 20 18:06:47 crc kubenswrapper[4558]: I0120 18:06:47.715932 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gwsn4\" (UniqueName: \"kubernetes.io/projected/ab9e8e32-6e48-4af0-ab56-2ec5fe7d986f-kube-api-access-gwsn4\") pod \"install-os-edpm-multinodeset-edpm-compute-no-nodes-xq8w6\" (UID: \"ab9e8e32-6e48-4af0-ab56-2ec5fe7d986f\") " pod="openstack-kuttl-tests/install-os-edpm-multinodeset-edpm-compute-no-nodes-xq8w6" Jan 20 18:06:47 crc kubenswrapper[4558]: I0120 18:06:47.905654 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/install-os-edpm-multinodeset-edpm-compute-no-nodes-xq8w6" Jan 20 18:06:48 crc kubenswrapper[4558]: I0120 18:06:48.287600 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/install-os-edpm-multinodeset-edpm-compute-no-nodes-xq8w6"] Jan 20 18:06:48 crc kubenswrapper[4558]: I0120 18:06:48.551113 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/install-os-edpm-multinodeset-edpm-compute-no-nodes-xq8w6" event={"ID":"ab9e8e32-6e48-4af0-ab56-2ec5fe7d986f","Type":"ContainerStarted","Data":"c4be0bff39696bf284ee43d0ebbf386878a393d07664ad9a46ea74873b835950"} Jan 20 18:06:49 crc kubenswrapper[4558]: I0120 18:06:49.563448 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/install-os-edpm-multinodeset-edpm-compute-no-nodes-xq8w6" event={"ID":"ab9e8e32-6e48-4af0-ab56-2ec5fe7d986f","Type":"ContainerStarted","Data":"5143fa3eb96cd97462024f03bea00ab9e6ac1a72f72377f2c6a7b950245e7d13"} Jan 20 18:06:49 crc kubenswrapper[4558]: I0120 18:06:49.581694 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/install-os-edpm-multinodeset-edpm-compute-no-nodes-xq8w6" podStartSLOduration=2.038290111 podStartE2EDuration="2.581678509s" podCreationTimestamp="2026-01-20 18:06:47 +0000 UTC" firstStartedPulling="2026-01-20 18:06:48.295220393 +0000 UTC m=+5102.055558350" lastFinishedPulling="2026-01-20 18:06:48.838608781 +0000 UTC m=+5102.598946748" observedRunningTime="2026-01-20 18:06:49.578765382 +0000 UTC m=+5103.339103340" watchObservedRunningTime="2026-01-20 18:06:49.581678509 +0000 UTC m=+5103.342016476" Jan 20 18:06:50 crc kubenswrapper[4558]: I0120 18:06:50.574035 4558 generic.go:334] "Generic (PLEG): container finished" podID="ab9e8e32-6e48-4af0-ab56-2ec5fe7d986f" containerID="5143fa3eb96cd97462024f03bea00ab9e6ac1a72f72377f2c6a7b950245e7d13" exitCode=0 Jan 20 18:06:50 crc kubenswrapper[4558]: I0120 18:06:50.575049 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/install-os-edpm-multinodeset-edpm-compute-no-nodes-xq8w6" event={"ID":"ab9e8e32-6e48-4af0-ab56-2ec5fe7d986f","Type":"ContainerDied","Data":"5143fa3eb96cd97462024f03bea00ab9e6ac1a72f72377f2c6a7b950245e7d13"} Jan 20 18:06:51 crc kubenswrapper[4558]: I0120 18:06:51.828074 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/install-os-edpm-multinodeset-edpm-compute-no-nodes-xq8w6" Jan 20 18:06:51 crc kubenswrapper[4558]: I0120 18:06:51.970063 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ab9e8e32-6e48-4af0-ab56-2ec5fe7d986f-inventory\") pod \"ab9e8e32-6e48-4af0-ab56-2ec5fe7d986f\" (UID: \"ab9e8e32-6e48-4af0-ab56-2ec5fe7d986f\") " Jan 20 18:06:51 crc kubenswrapper[4558]: I0120 18:06:51.970300 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gwsn4\" (UniqueName: \"kubernetes.io/projected/ab9e8e32-6e48-4af0-ab56-2ec5fe7d986f-kube-api-access-gwsn4\") pod \"ab9e8e32-6e48-4af0-ab56-2ec5fe7d986f\" (UID: \"ab9e8e32-6e48-4af0-ab56-2ec5fe7d986f\") " Jan 20 18:06:51 crc kubenswrapper[4558]: I0120 18:06:51.970370 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/ab9e8e32-6e48-4af0-ab56-2ec5fe7d986f-ssh-key-edpm-compute-no-nodes\") pod \"ab9e8e32-6e48-4af0-ab56-2ec5fe7d986f\" (UID: \"ab9e8e32-6e48-4af0-ab56-2ec5fe7d986f\") " Jan 20 18:06:51 crc kubenswrapper[4558]: I0120 18:06:51.977188 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ab9e8e32-6e48-4af0-ab56-2ec5fe7d986f-kube-api-access-gwsn4" (OuterVolumeSpecName: "kube-api-access-gwsn4") pod "ab9e8e32-6e48-4af0-ab56-2ec5fe7d986f" (UID: "ab9e8e32-6e48-4af0-ab56-2ec5fe7d986f"). InnerVolumeSpecName "kube-api-access-gwsn4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:06:51 crc kubenswrapper[4558]: I0120 18:06:51.993236 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ab9e8e32-6e48-4af0-ab56-2ec5fe7d986f-inventory" (OuterVolumeSpecName: "inventory") pod "ab9e8e32-6e48-4af0-ab56-2ec5fe7d986f" (UID: "ab9e8e32-6e48-4af0-ab56-2ec5fe7d986f"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:06:51 crc kubenswrapper[4558]: I0120 18:06:51.997055 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ab9e8e32-6e48-4af0-ab56-2ec5fe7d986f-ssh-key-edpm-compute-no-nodes" (OuterVolumeSpecName: "ssh-key-edpm-compute-no-nodes") pod "ab9e8e32-6e48-4af0-ab56-2ec5fe7d986f" (UID: "ab9e8e32-6e48-4af0-ab56-2ec5fe7d986f"). InnerVolumeSpecName "ssh-key-edpm-compute-no-nodes". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:06:52 crc kubenswrapper[4558]: I0120 18:06:52.072151 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gwsn4\" (UniqueName: \"kubernetes.io/projected/ab9e8e32-6e48-4af0-ab56-2ec5fe7d986f-kube-api-access-gwsn4\") on node \"crc\" DevicePath \"\"" Jan 20 18:06:52 crc kubenswrapper[4558]: I0120 18:06:52.072204 4558 reconciler_common.go:293] "Volume detached for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/ab9e8e32-6e48-4af0-ab56-2ec5fe7d986f-ssh-key-edpm-compute-no-nodes\") on node \"crc\" DevicePath \"\"" Jan 20 18:06:52 crc kubenswrapper[4558]: I0120 18:06:52.072218 4558 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ab9e8e32-6e48-4af0-ab56-2ec5fe7d986f-inventory\") on node \"crc\" DevicePath \"\"" Jan 20 18:06:52 crc kubenswrapper[4558]: I0120 18:06:52.593434 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/install-os-edpm-multinodeset-edpm-compute-no-nodes-xq8w6" event={"ID":"ab9e8e32-6e48-4af0-ab56-2ec5fe7d986f","Type":"ContainerDied","Data":"c4be0bff39696bf284ee43d0ebbf386878a393d07664ad9a46ea74873b835950"} Jan 20 18:06:52 crc kubenswrapper[4558]: I0120 18:06:52.593488 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c4be0bff39696bf284ee43d0ebbf386878a393d07664ad9a46ea74873b835950" Jan 20 18:06:52 crc kubenswrapper[4558]: I0120 18:06:52.593741 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/install-os-edpm-multinodeset-edpm-compute-no-nodes-xq8w6" Jan 20 18:06:52 crc kubenswrapper[4558]: I0120 18:06:52.643106 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/configure-os-edpm-multinodeset-edpm-compute-no-nodes-t4wsc"] Jan 20 18:06:52 crc kubenswrapper[4558]: E0120 18:06:52.643592 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab9e8e32-6e48-4af0-ab56-2ec5fe7d986f" containerName="install-os-edpm-multinodeset-edpm-compute-no-nodes" Jan 20 18:06:52 crc kubenswrapper[4558]: I0120 18:06:52.643617 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab9e8e32-6e48-4af0-ab56-2ec5fe7d986f" containerName="install-os-edpm-multinodeset-edpm-compute-no-nodes" Jan 20 18:06:52 crc kubenswrapper[4558]: I0120 18:06:52.643823 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="ab9e8e32-6e48-4af0-ab56-2ec5fe7d986f" containerName="install-os-edpm-multinodeset-edpm-compute-no-nodes" Jan 20 18:06:52 crc kubenswrapper[4558]: I0120 18:06:52.644554 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/configure-os-edpm-multinodeset-edpm-compute-no-nodes-t4wsc" Jan 20 18:06:52 crc kubenswrapper[4558]: I0120 18:06:52.646229 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplane-ansible-ssh-private-key-secret" Jan 20 18:06:52 crc kubenswrapper[4558]: I0120 18:06:52.646815 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"edpm-compute-no-nodes-dockercfg-tljmj" Jan 20 18:06:52 crc kubenswrapper[4558]: I0120 18:06:52.648456 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplanenodeset-edpm-compute-no-nodes" Jan 20 18:06:52 crc kubenswrapper[4558]: I0120 18:06:52.651487 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/configure-os-edpm-multinodeset-edpm-compute-no-nodes-t4wsc"] Jan 20 18:06:52 crc kubenswrapper[4558]: I0120 18:06:52.652400 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-aee-default-env" Jan 20 18:06:52 crc kubenswrapper[4558]: I0120 18:06:52.780810 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e980f297-354c-4199-b212-827d9adba246-inventory\") pod \"configure-os-edpm-multinodeset-edpm-compute-no-nodes-t4wsc\" (UID: \"e980f297-354c-4199-b212-827d9adba246\") " pod="openstack-kuttl-tests/configure-os-edpm-multinodeset-edpm-compute-no-nodes-t4wsc" Jan 20 18:06:52 crc kubenswrapper[4558]: I0120 18:06:52.780888 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/e980f297-354c-4199-b212-827d9adba246-ssh-key-edpm-compute-no-nodes\") pod \"configure-os-edpm-multinodeset-edpm-compute-no-nodes-t4wsc\" (UID: \"e980f297-354c-4199-b212-827d9adba246\") " pod="openstack-kuttl-tests/configure-os-edpm-multinodeset-edpm-compute-no-nodes-t4wsc" Jan 20 18:06:52 crc kubenswrapper[4558]: I0120 18:06:52.780952 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rjbbn\" (UniqueName: \"kubernetes.io/projected/e980f297-354c-4199-b212-827d9adba246-kube-api-access-rjbbn\") pod \"configure-os-edpm-multinodeset-edpm-compute-no-nodes-t4wsc\" (UID: \"e980f297-354c-4199-b212-827d9adba246\") " pod="openstack-kuttl-tests/configure-os-edpm-multinodeset-edpm-compute-no-nodes-t4wsc" Jan 20 18:06:52 crc kubenswrapper[4558]: I0120 18:06:52.882616 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e980f297-354c-4199-b212-827d9adba246-inventory\") pod \"configure-os-edpm-multinodeset-edpm-compute-no-nodes-t4wsc\" (UID: \"e980f297-354c-4199-b212-827d9adba246\") " pod="openstack-kuttl-tests/configure-os-edpm-multinodeset-edpm-compute-no-nodes-t4wsc" Jan 20 18:06:52 crc kubenswrapper[4558]: I0120 18:06:52.882674 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/e980f297-354c-4199-b212-827d9adba246-ssh-key-edpm-compute-no-nodes\") pod \"configure-os-edpm-multinodeset-edpm-compute-no-nodes-t4wsc\" (UID: \"e980f297-354c-4199-b212-827d9adba246\") " pod="openstack-kuttl-tests/configure-os-edpm-multinodeset-edpm-compute-no-nodes-t4wsc" Jan 20 18:06:52 crc kubenswrapper[4558]: I0120 18:06:52.882706 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rjbbn\" (UniqueName: \"kubernetes.io/projected/e980f297-354c-4199-b212-827d9adba246-kube-api-access-rjbbn\") pod \"configure-os-edpm-multinodeset-edpm-compute-no-nodes-t4wsc\" (UID: \"e980f297-354c-4199-b212-827d9adba246\") " pod="openstack-kuttl-tests/configure-os-edpm-multinodeset-edpm-compute-no-nodes-t4wsc" Jan 20 18:06:52 crc kubenswrapper[4558]: I0120 18:06:52.888802 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e980f297-354c-4199-b212-827d9adba246-inventory\") pod \"configure-os-edpm-multinodeset-edpm-compute-no-nodes-t4wsc\" (UID: \"e980f297-354c-4199-b212-827d9adba246\") " pod="openstack-kuttl-tests/configure-os-edpm-multinodeset-edpm-compute-no-nodes-t4wsc" Jan 20 18:06:52 crc kubenswrapper[4558]: I0120 18:06:52.889597 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/e980f297-354c-4199-b212-827d9adba246-ssh-key-edpm-compute-no-nodes\") pod \"configure-os-edpm-multinodeset-edpm-compute-no-nodes-t4wsc\" (UID: \"e980f297-354c-4199-b212-827d9adba246\") " pod="openstack-kuttl-tests/configure-os-edpm-multinodeset-edpm-compute-no-nodes-t4wsc" Jan 20 18:06:52 crc kubenswrapper[4558]: I0120 18:06:52.902803 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rjbbn\" (UniqueName: \"kubernetes.io/projected/e980f297-354c-4199-b212-827d9adba246-kube-api-access-rjbbn\") pod \"configure-os-edpm-multinodeset-edpm-compute-no-nodes-t4wsc\" (UID: \"e980f297-354c-4199-b212-827d9adba246\") " pod="openstack-kuttl-tests/configure-os-edpm-multinodeset-edpm-compute-no-nodes-t4wsc" Jan 20 18:06:52 crc kubenswrapper[4558]: I0120 18:06:52.959955 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/configure-os-edpm-multinodeset-edpm-compute-no-nodes-t4wsc" Jan 20 18:06:53 crc kubenswrapper[4558]: I0120 18:06:53.394532 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/configure-os-edpm-multinodeset-edpm-compute-no-nodes-t4wsc"] Jan 20 18:06:53 crc kubenswrapper[4558]: I0120 18:06:53.605824 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/configure-os-edpm-multinodeset-edpm-compute-no-nodes-t4wsc" event={"ID":"e980f297-354c-4199-b212-827d9adba246","Type":"ContainerStarted","Data":"799de5bc8c197274a39bfd1ee80318211f5f2bdd5f3cee24ea7c8dcaa6e4b00c"} Jan 20 18:06:54 crc kubenswrapper[4558]: I0120 18:06:54.618105 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/configure-os-edpm-multinodeset-edpm-compute-no-nodes-t4wsc" event={"ID":"e980f297-354c-4199-b212-827d9adba246","Type":"ContainerStarted","Data":"bf4d3811eee2d6c2a76a04c8bc4886d5e4064262d451154bc849330373e28b8c"} Jan 20 18:06:54 crc kubenswrapper[4558]: I0120 18:06:54.636219 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/configure-os-edpm-multinodeset-edpm-compute-no-nodes-t4wsc" podStartSLOduration=2.086149288 podStartE2EDuration="2.636199792s" podCreationTimestamp="2026-01-20 18:06:52 +0000 UTC" firstStartedPulling="2026-01-20 18:06:53.37107174 +0000 UTC m=+5107.131409707" lastFinishedPulling="2026-01-20 18:06:53.921122244 +0000 UTC m=+5107.681460211" observedRunningTime="2026-01-20 18:06:54.632957365 +0000 UTC m=+5108.393295332" watchObservedRunningTime="2026-01-20 18:06:54.636199792 +0000 UTC m=+5108.396537759" Jan 20 18:06:55 crc kubenswrapper[4558]: I0120 18:06:55.028452 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-vbwj6"] Jan 20 18:06:55 crc kubenswrapper[4558]: I0120 18:06:55.029958 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vbwj6" Jan 20 18:06:55 crc kubenswrapper[4558]: I0120 18:06:55.036721 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-vbwj6"] Jan 20 18:06:55 crc kubenswrapper[4558]: I0120 18:06:55.219010 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d49ebbdd-1136-455b-9b2a-ab3b0762be10-utilities\") pod \"redhat-marketplace-vbwj6\" (UID: \"d49ebbdd-1136-455b-9b2a-ab3b0762be10\") " pod="openshift-marketplace/redhat-marketplace-vbwj6" Jan 20 18:06:55 crc kubenswrapper[4558]: I0120 18:06:55.219140 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hrdll\" (UniqueName: \"kubernetes.io/projected/d49ebbdd-1136-455b-9b2a-ab3b0762be10-kube-api-access-hrdll\") pod \"redhat-marketplace-vbwj6\" (UID: \"d49ebbdd-1136-455b-9b2a-ab3b0762be10\") " pod="openshift-marketplace/redhat-marketplace-vbwj6" Jan 20 18:06:55 crc kubenswrapper[4558]: I0120 18:06:55.219244 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d49ebbdd-1136-455b-9b2a-ab3b0762be10-catalog-content\") pod \"redhat-marketplace-vbwj6\" (UID: \"d49ebbdd-1136-455b-9b2a-ab3b0762be10\") " pod="openshift-marketplace/redhat-marketplace-vbwj6" Jan 20 18:06:55 crc kubenswrapper[4558]: I0120 18:06:55.320721 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d49ebbdd-1136-455b-9b2a-ab3b0762be10-utilities\") pod \"redhat-marketplace-vbwj6\" (UID: \"d49ebbdd-1136-455b-9b2a-ab3b0762be10\") " pod="openshift-marketplace/redhat-marketplace-vbwj6" Jan 20 18:06:55 crc kubenswrapper[4558]: I0120 18:06:55.321018 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hrdll\" (UniqueName: \"kubernetes.io/projected/d49ebbdd-1136-455b-9b2a-ab3b0762be10-kube-api-access-hrdll\") pod \"redhat-marketplace-vbwj6\" (UID: \"d49ebbdd-1136-455b-9b2a-ab3b0762be10\") " pod="openshift-marketplace/redhat-marketplace-vbwj6" Jan 20 18:06:55 crc kubenswrapper[4558]: I0120 18:06:55.321190 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d49ebbdd-1136-455b-9b2a-ab3b0762be10-catalog-content\") pod \"redhat-marketplace-vbwj6\" (UID: \"d49ebbdd-1136-455b-9b2a-ab3b0762be10\") " pod="openshift-marketplace/redhat-marketplace-vbwj6" Jan 20 18:06:55 crc kubenswrapper[4558]: I0120 18:06:55.321376 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d49ebbdd-1136-455b-9b2a-ab3b0762be10-utilities\") pod \"redhat-marketplace-vbwj6\" (UID: \"d49ebbdd-1136-455b-9b2a-ab3b0762be10\") " pod="openshift-marketplace/redhat-marketplace-vbwj6" Jan 20 18:06:55 crc kubenswrapper[4558]: I0120 18:06:55.321731 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d49ebbdd-1136-455b-9b2a-ab3b0762be10-catalog-content\") pod \"redhat-marketplace-vbwj6\" (UID: \"d49ebbdd-1136-455b-9b2a-ab3b0762be10\") " pod="openshift-marketplace/redhat-marketplace-vbwj6" Jan 20 18:06:55 crc kubenswrapper[4558]: I0120 18:06:55.598347 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hrdll\" (UniqueName: \"kubernetes.io/projected/d49ebbdd-1136-455b-9b2a-ab3b0762be10-kube-api-access-hrdll\") pod \"redhat-marketplace-vbwj6\" (UID: \"d49ebbdd-1136-455b-9b2a-ab3b0762be10\") " pod="openshift-marketplace/redhat-marketplace-vbwj6" Jan 20 18:06:55 crc kubenswrapper[4558]: I0120 18:06:55.630916 4558 generic.go:334] "Generic (PLEG): container finished" podID="e980f297-354c-4199-b212-827d9adba246" containerID="bf4d3811eee2d6c2a76a04c8bc4886d5e4064262d451154bc849330373e28b8c" exitCode=0 Jan 20 18:06:55 crc kubenswrapper[4558]: I0120 18:06:55.631448 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/configure-os-edpm-multinodeset-edpm-compute-no-nodes-t4wsc" event={"ID":"e980f297-354c-4199-b212-827d9adba246","Type":"ContainerDied","Data":"bf4d3811eee2d6c2a76a04c8bc4886d5e4064262d451154bc849330373e28b8c"} Jan 20 18:06:55 crc kubenswrapper[4558]: I0120 18:06:55.659811 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vbwj6" Jan 20 18:06:56 crc kubenswrapper[4558]: I0120 18:06:56.121484 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-vbwj6"] Jan 20 18:06:56 crc kubenswrapper[4558]: W0120 18:06:56.125514 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd49ebbdd_1136_455b_9b2a_ab3b0762be10.slice/crio-ec3dee9e53ea4fe0aea41970d23f5b27ae88fab11e3eb83e183a3cc75ee8b60b WatchSource:0}: Error finding container ec3dee9e53ea4fe0aea41970d23f5b27ae88fab11e3eb83e183a3cc75ee8b60b: Status 404 returned error can't find the container with id ec3dee9e53ea4fe0aea41970d23f5b27ae88fab11e3eb83e183a3cc75ee8b60b Jan 20 18:06:56 crc kubenswrapper[4558]: I0120 18:06:56.572719 4558 scope.go:117] "RemoveContainer" containerID="6b02d31ec11fe5435af988e35303a6f66b5afa5ef952ce5f7cc4b3cd5f4d9497" Jan 20 18:06:56 crc kubenswrapper[4558]: E0120 18:06:56.573082 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 18:06:56 crc kubenswrapper[4558]: I0120 18:06:56.644265 4558 generic.go:334] "Generic (PLEG): container finished" podID="d49ebbdd-1136-455b-9b2a-ab3b0762be10" containerID="507ec83b72f3da5bb7281aa95c5bb4b9dc0916ec6b16381a18bbf2156569740b" exitCode=0 Jan 20 18:06:56 crc kubenswrapper[4558]: I0120 18:06:56.644360 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vbwj6" event={"ID":"d49ebbdd-1136-455b-9b2a-ab3b0762be10","Type":"ContainerDied","Data":"507ec83b72f3da5bb7281aa95c5bb4b9dc0916ec6b16381a18bbf2156569740b"} Jan 20 18:06:56 crc kubenswrapper[4558]: I0120 18:06:56.644415 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vbwj6" event={"ID":"d49ebbdd-1136-455b-9b2a-ab3b0762be10","Type":"ContainerStarted","Data":"ec3dee9e53ea4fe0aea41970d23f5b27ae88fab11e3eb83e183a3cc75ee8b60b"} Jan 20 18:06:56 crc kubenswrapper[4558]: I0120 18:06:56.886926 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/configure-os-edpm-multinodeset-edpm-compute-no-nodes-t4wsc" Jan 20 18:06:56 crc kubenswrapper[4558]: I0120 18:06:56.949956 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rjbbn\" (UniqueName: \"kubernetes.io/projected/e980f297-354c-4199-b212-827d9adba246-kube-api-access-rjbbn\") pod \"e980f297-354c-4199-b212-827d9adba246\" (UID: \"e980f297-354c-4199-b212-827d9adba246\") " Jan 20 18:06:56 crc kubenswrapper[4558]: I0120 18:06:56.950090 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/e980f297-354c-4199-b212-827d9adba246-ssh-key-edpm-compute-no-nodes\") pod \"e980f297-354c-4199-b212-827d9adba246\" (UID: \"e980f297-354c-4199-b212-827d9adba246\") " Jan 20 18:06:56 crc kubenswrapper[4558]: I0120 18:06:56.950141 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e980f297-354c-4199-b212-827d9adba246-inventory\") pod \"e980f297-354c-4199-b212-827d9adba246\" (UID: \"e980f297-354c-4199-b212-827d9adba246\") " Jan 20 18:06:56 crc kubenswrapper[4558]: I0120 18:06:56.957986 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e980f297-354c-4199-b212-827d9adba246-kube-api-access-rjbbn" (OuterVolumeSpecName: "kube-api-access-rjbbn") pod "e980f297-354c-4199-b212-827d9adba246" (UID: "e980f297-354c-4199-b212-827d9adba246"). InnerVolumeSpecName "kube-api-access-rjbbn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:06:56 crc kubenswrapper[4558]: I0120 18:06:56.968853 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e980f297-354c-4199-b212-827d9adba246-ssh-key-edpm-compute-no-nodes" (OuterVolumeSpecName: "ssh-key-edpm-compute-no-nodes") pod "e980f297-354c-4199-b212-827d9adba246" (UID: "e980f297-354c-4199-b212-827d9adba246"). InnerVolumeSpecName "ssh-key-edpm-compute-no-nodes". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:06:56 crc kubenswrapper[4558]: I0120 18:06:56.969864 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e980f297-354c-4199-b212-827d9adba246-inventory" (OuterVolumeSpecName: "inventory") pod "e980f297-354c-4199-b212-827d9adba246" (UID: "e980f297-354c-4199-b212-827d9adba246"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:06:57 crc kubenswrapper[4558]: I0120 18:06:57.054139 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rjbbn\" (UniqueName: \"kubernetes.io/projected/e980f297-354c-4199-b212-827d9adba246-kube-api-access-rjbbn\") on node \"crc\" DevicePath \"\"" Jan 20 18:06:57 crc kubenswrapper[4558]: I0120 18:06:57.054194 4558 reconciler_common.go:293] "Volume detached for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/e980f297-354c-4199-b212-827d9adba246-ssh-key-edpm-compute-no-nodes\") on node \"crc\" DevicePath \"\"" Jan 20 18:06:57 crc kubenswrapper[4558]: I0120 18:06:57.054213 4558 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e980f297-354c-4199-b212-827d9adba246-inventory\") on node \"crc\" DevicePath \"\"" Jan 20 18:06:57 crc kubenswrapper[4558]: I0120 18:06:57.659471 4558 generic.go:334] "Generic (PLEG): container finished" podID="d49ebbdd-1136-455b-9b2a-ab3b0762be10" containerID="14859c94b3a97a4a167195a091a21cb8405b3f5caeb435a5e88ab30ce5ed7e79" exitCode=0 Jan 20 18:06:57 crc kubenswrapper[4558]: I0120 18:06:57.659591 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vbwj6" event={"ID":"d49ebbdd-1136-455b-9b2a-ab3b0762be10","Type":"ContainerDied","Data":"14859c94b3a97a4a167195a091a21cb8405b3f5caeb435a5e88ab30ce5ed7e79"} Jan 20 18:06:57 crc kubenswrapper[4558]: I0120 18:06:57.662298 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/configure-os-edpm-multinodeset-edpm-compute-no-nodes-t4wsc" event={"ID":"e980f297-354c-4199-b212-827d9adba246","Type":"ContainerDied","Data":"799de5bc8c197274a39bfd1ee80318211f5f2bdd5f3cee24ea7c8dcaa6e4b00c"} Jan 20 18:06:57 crc kubenswrapper[4558]: I0120 18:06:57.662341 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/configure-os-edpm-multinodeset-edpm-compute-no-nodes-t4wsc" Jan 20 18:06:57 crc kubenswrapper[4558]: I0120 18:06:57.662344 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="799de5bc8c197274a39bfd1ee80318211f5f2bdd5f3cee24ea7c8dcaa6e4b00c" Jan 20 18:06:57 crc kubenswrapper[4558]: I0120 18:06:57.717722 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/run-os-edpm-multinodeset-edpm-compute-no-nodes-8ll6g"] Jan 20 18:06:57 crc kubenswrapper[4558]: E0120 18:06:57.718096 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e980f297-354c-4199-b212-827d9adba246" containerName="configure-os-edpm-multinodeset-edpm-compute-no-nodes" Jan 20 18:06:57 crc kubenswrapper[4558]: I0120 18:06:57.718118 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="e980f297-354c-4199-b212-827d9adba246" containerName="configure-os-edpm-multinodeset-edpm-compute-no-nodes" Jan 20 18:06:57 crc kubenswrapper[4558]: I0120 18:06:57.718315 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="e980f297-354c-4199-b212-827d9adba246" containerName="configure-os-edpm-multinodeset-edpm-compute-no-nodes" Jan 20 18:06:57 crc kubenswrapper[4558]: I0120 18:06:57.718859 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/run-os-edpm-multinodeset-edpm-compute-no-nodes-8ll6g" Jan 20 18:06:57 crc kubenswrapper[4558]: I0120 18:06:57.721820 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplane-ansible-ssh-private-key-secret" Jan 20 18:06:57 crc kubenswrapper[4558]: I0120 18:06:57.722081 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplanenodeset-edpm-compute-no-nodes" Jan 20 18:06:57 crc kubenswrapper[4558]: I0120 18:06:57.722666 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"edpm-compute-no-nodes-dockercfg-tljmj" Jan 20 18:06:57 crc kubenswrapper[4558]: I0120 18:06:57.728616 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/run-os-edpm-multinodeset-edpm-compute-no-nodes-8ll6g"] Jan 20 18:06:57 crc kubenswrapper[4558]: I0120 18:06:57.730995 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-aee-default-env" Jan 20 18:06:57 crc kubenswrapper[4558]: I0120 18:06:57.868785 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8mf6s\" (UniqueName: \"kubernetes.io/projected/438cb630-c06a-4838-a105-cc05955aafbe-kube-api-access-8mf6s\") pod \"run-os-edpm-multinodeset-edpm-compute-no-nodes-8ll6g\" (UID: \"438cb630-c06a-4838-a105-cc05955aafbe\") " pod="openstack-kuttl-tests/run-os-edpm-multinodeset-edpm-compute-no-nodes-8ll6g" Jan 20 18:06:57 crc kubenswrapper[4558]: I0120 18:06:57.868841 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/438cb630-c06a-4838-a105-cc05955aafbe-inventory\") pod \"run-os-edpm-multinodeset-edpm-compute-no-nodes-8ll6g\" (UID: \"438cb630-c06a-4838-a105-cc05955aafbe\") " pod="openstack-kuttl-tests/run-os-edpm-multinodeset-edpm-compute-no-nodes-8ll6g" Jan 20 18:06:57 crc kubenswrapper[4558]: I0120 18:06:57.868892 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/438cb630-c06a-4838-a105-cc05955aafbe-ssh-key-edpm-compute-no-nodes\") pod \"run-os-edpm-multinodeset-edpm-compute-no-nodes-8ll6g\" (UID: \"438cb630-c06a-4838-a105-cc05955aafbe\") " pod="openstack-kuttl-tests/run-os-edpm-multinodeset-edpm-compute-no-nodes-8ll6g" Jan 20 18:06:57 crc kubenswrapper[4558]: I0120 18:06:57.969544 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8mf6s\" (UniqueName: \"kubernetes.io/projected/438cb630-c06a-4838-a105-cc05955aafbe-kube-api-access-8mf6s\") pod \"run-os-edpm-multinodeset-edpm-compute-no-nodes-8ll6g\" (UID: \"438cb630-c06a-4838-a105-cc05955aafbe\") " pod="openstack-kuttl-tests/run-os-edpm-multinodeset-edpm-compute-no-nodes-8ll6g" Jan 20 18:06:57 crc kubenswrapper[4558]: I0120 18:06:57.969590 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/438cb630-c06a-4838-a105-cc05955aafbe-inventory\") pod \"run-os-edpm-multinodeset-edpm-compute-no-nodes-8ll6g\" (UID: \"438cb630-c06a-4838-a105-cc05955aafbe\") " pod="openstack-kuttl-tests/run-os-edpm-multinodeset-edpm-compute-no-nodes-8ll6g" Jan 20 18:06:57 crc kubenswrapper[4558]: I0120 18:06:57.969629 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/438cb630-c06a-4838-a105-cc05955aafbe-ssh-key-edpm-compute-no-nodes\") pod \"run-os-edpm-multinodeset-edpm-compute-no-nodes-8ll6g\" (UID: \"438cb630-c06a-4838-a105-cc05955aafbe\") " pod="openstack-kuttl-tests/run-os-edpm-multinodeset-edpm-compute-no-nodes-8ll6g" Jan 20 18:06:57 crc kubenswrapper[4558]: I0120 18:06:57.975916 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/438cb630-c06a-4838-a105-cc05955aafbe-ssh-key-edpm-compute-no-nodes\") pod \"run-os-edpm-multinodeset-edpm-compute-no-nodes-8ll6g\" (UID: \"438cb630-c06a-4838-a105-cc05955aafbe\") " pod="openstack-kuttl-tests/run-os-edpm-multinodeset-edpm-compute-no-nodes-8ll6g" Jan 20 18:06:57 crc kubenswrapper[4558]: I0120 18:06:57.984326 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/438cb630-c06a-4838-a105-cc05955aafbe-inventory\") pod \"run-os-edpm-multinodeset-edpm-compute-no-nodes-8ll6g\" (UID: \"438cb630-c06a-4838-a105-cc05955aafbe\") " pod="openstack-kuttl-tests/run-os-edpm-multinodeset-edpm-compute-no-nodes-8ll6g" Jan 20 18:06:57 crc kubenswrapper[4558]: I0120 18:06:57.984819 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8mf6s\" (UniqueName: \"kubernetes.io/projected/438cb630-c06a-4838-a105-cc05955aafbe-kube-api-access-8mf6s\") pod \"run-os-edpm-multinodeset-edpm-compute-no-nodes-8ll6g\" (UID: \"438cb630-c06a-4838-a105-cc05955aafbe\") " pod="openstack-kuttl-tests/run-os-edpm-multinodeset-edpm-compute-no-nodes-8ll6g" Jan 20 18:06:58 crc kubenswrapper[4558]: I0120 18:06:58.033593 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/run-os-edpm-multinodeset-edpm-compute-no-nodes-8ll6g" Jan 20 18:06:58 crc kubenswrapper[4558]: I0120 18:06:58.439082 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/run-os-edpm-multinodeset-edpm-compute-no-nodes-8ll6g"] Jan 20 18:06:58 crc kubenswrapper[4558]: W0120 18:06:58.442497 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod438cb630_c06a_4838_a105_cc05955aafbe.slice/crio-1059cecd723e4b324dbc2ee509c7235e80c8e9fc6b8737a269718160ed92d90c WatchSource:0}: Error finding container 1059cecd723e4b324dbc2ee509c7235e80c8e9fc6b8737a269718160ed92d90c: Status 404 returned error can't find the container with id 1059cecd723e4b324dbc2ee509c7235e80c8e9fc6b8737a269718160ed92d90c Jan 20 18:06:58 crc kubenswrapper[4558]: I0120 18:06:58.675055 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vbwj6" event={"ID":"d49ebbdd-1136-455b-9b2a-ab3b0762be10","Type":"ContainerStarted","Data":"b7ba2e2ebe08bfd5eb017bae6bd2465a3ca074288f7d96a089aa5a2bd39ed3b3"} Jan 20 18:06:58 crc kubenswrapper[4558]: I0120 18:06:58.677548 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/run-os-edpm-multinodeset-edpm-compute-no-nodes-8ll6g" event={"ID":"438cb630-c06a-4838-a105-cc05955aafbe","Type":"ContainerStarted","Data":"1059cecd723e4b324dbc2ee509c7235e80c8e9fc6b8737a269718160ed92d90c"} Jan 20 18:06:58 crc kubenswrapper[4558]: I0120 18:06:58.701744 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-vbwj6" podStartSLOduration=2.085332017 podStartE2EDuration="3.701725777s" podCreationTimestamp="2026-01-20 18:06:55 +0000 UTC" firstStartedPulling="2026-01-20 18:06:56.64580101 +0000 UTC m=+5110.406138977" lastFinishedPulling="2026-01-20 18:06:58.262194771 +0000 UTC m=+5112.022532737" observedRunningTime="2026-01-20 18:06:58.69288838 +0000 UTC m=+5112.453226347" watchObservedRunningTime="2026-01-20 18:06:58.701725777 +0000 UTC m=+5112.462063744" Jan 20 18:06:59 crc kubenswrapper[4558]: I0120 18:06:59.689247 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/run-os-edpm-multinodeset-edpm-compute-no-nodes-8ll6g" event={"ID":"438cb630-c06a-4838-a105-cc05955aafbe","Type":"ContainerStarted","Data":"c53a5a80f62744434ae4cb90eb9c46ceac977fddfec911746dba64fed01b7d9c"} Jan 20 18:06:59 crc kubenswrapper[4558]: I0120 18:06:59.707835 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/run-os-edpm-multinodeset-edpm-compute-no-nodes-8ll6g" podStartSLOduration=2.025406901 podStartE2EDuration="2.707811137s" podCreationTimestamp="2026-01-20 18:06:57 +0000 UTC" firstStartedPulling="2026-01-20 18:06:58.445545005 +0000 UTC m=+5112.205882972" lastFinishedPulling="2026-01-20 18:06:59.127949241 +0000 UTC m=+5112.888287208" observedRunningTime="2026-01-20 18:06:59.704056568 +0000 UTC m=+5113.464394535" watchObservedRunningTime="2026-01-20 18:06:59.707811137 +0000 UTC m=+5113.468149103" Jan 20 18:07:00 crc kubenswrapper[4558]: I0120 18:07:00.700295 4558 generic.go:334] "Generic (PLEG): container finished" podID="438cb630-c06a-4838-a105-cc05955aafbe" containerID="c53a5a80f62744434ae4cb90eb9c46ceac977fddfec911746dba64fed01b7d9c" exitCode=0 Jan 20 18:07:00 crc kubenswrapper[4558]: I0120 18:07:00.700354 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/run-os-edpm-multinodeset-edpm-compute-no-nodes-8ll6g" event={"ID":"438cb630-c06a-4838-a105-cc05955aafbe","Type":"ContainerDied","Data":"c53a5a80f62744434ae4cb90eb9c46ceac977fddfec911746dba64fed01b7d9c"} Jan 20 18:07:01 crc kubenswrapper[4558]: I0120 18:07:01.978267 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/run-os-edpm-multinodeset-edpm-compute-no-nodes-8ll6g" Jan 20 18:07:02 crc kubenswrapper[4558]: I0120 18:07:02.029487 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8mf6s\" (UniqueName: \"kubernetes.io/projected/438cb630-c06a-4838-a105-cc05955aafbe-kube-api-access-8mf6s\") pod \"438cb630-c06a-4838-a105-cc05955aafbe\" (UID: \"438cb630-c06a-4838-a105-cc05955aafbe\") " Jan 20 18:07:02 crc kubenswrapper[4558]: I0120 18:07:02.029779 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/438cb630-c06a-4838-a105-cc05955aafbe-inventory\") pod \"438cb630-c06a-4838-a105-cc05955aafbe\" (UID: \"438cb630-c06a-4838-a105-cc05955aafbe\") " Jan 20 18:07:02 crc kubenswrapper[4558]: I0120 18:07:02.029848 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/438cb630-c06a-4838-a105-cc05955aafbe-ssh-key-edpm-compute-no-nodes\") pod \"438cb630-c06a-4838-a105-cc05955aafbe\" (UID: \"438cb630-c06a-4838-a105-cc05955aafbe\") " Jan 20 18:07:02 crc kubenswrapper[4558]: I0120 18:07:02.036009 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/438cb630-c06a-4838-a105-cc05955aafbe-kube-api-access-8mf6s" (OuterVolumeSpecName: "kube-api-access-8mf6s") pod "438cb630-c06a-4838-a105-cc05955aafbe" (UID: "438cb630-c06a-4838-a105-cc05955aafbe"). InnerVolumeSpecName "kube-api-access-8mf6s". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:07:02 crc kubenswrapper[4558]: I0120 18:07:02.052335 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/438cb630-c06a-4838-a105-cc05955aafbe-ssh-key-edpm-compute-no-nodes" (OuterVolumeSpecName: "ssh-key-edpm-compute-no-nodes") pod "438cb630-c06a-4838-a105-cc05955aafbe" (UID: "438cb630-c06a-4838-a105-cc05955aafbe"). InnerVolumeSpecName "ssh-key-edpm-compute-no-nodes". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:07:02 crc kubenswrapper[4558]: I0120 18:07:02.052412 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/438cb630-c06a-4838-a105-cc05955aafbe-inventory" (OuterVolumeSpecName: "inventory") pod "438cb630-c06a-4838-a105-cc05955aafbe" (UID: "438cb630-c06a-4838-a105-cc05955aafbe"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:07:02 crc kubenswrapper[4558]: I0120 18:07:02.132820 4558 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/438cb630-c06a-4838-a105-cc05955aafbe-inventory\") on node \"crc\" DevicePath \"\"" Jan 20 18:07:02 crc kubenswrapper[4558]: I0120 18:07:02.132858 4558 reconciler_common.go:293] "Volume detached for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/438cb630-c06a-4838-a105-cc05955aafbe-ssh-key-edpm-compute-no-nodes\") on node \"crc\" DevicePath \"\"" Jan 20 18:07:02 crc kubenswrapper[4558]: I0120 18:07:02.132885 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8mf6s\" (UniqueName: \"kubernetes.io/projected/438cb630-c06a-4838-a105-cc05955aafbe-kube-api-access-8mf6s\") on node \"crc\" DevicePath \"\"" Jan 20 18:07:02 crc kubenswrapper[4558]: I0120 18:07:02.721843 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/run-os-edpm-multinodeset-edpm-compute-no-nodes-8ll6g" event={"ID":"438cb630-c06a-4838-a105-cc05955aafbe","Type":"ContainerDied","Data":"1059cecd723e4b324dbc2ee509c7235e80c8e9fc6b8737a269718160ed92d90c"} Jan 20 18:07:02 crc kubenswrapper[4558]: I0120 18:07:02.721903 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1059cecd723e4b324dbc2ee509c7235e80c8e9fc6b8737a269718160ed92d90c" Jan 20 18:07:02 crc kubenswrapper[4558]: I0120 18:07:02.722106 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/run-os-edpm-multinodeset-edpm-compute-no-nodes-8ll6g" Jan 20 18:07:02 crc kubenswrapper[4558]: I0120 18:07:02.778560 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/install-certs-edpm-multinodeset-edpm-compute-no-nodes-7hr67"] Jan 20 18:07:02 crc kubenswrapper[4558]: E0120 18:07:02.779192 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="438cb630-c06a-4838-a105-cc05955aafbe" containerName="run-os-edpm-multinodeset-edpm-compute-no-nodes" Jan 20 18:07:02 crc kubenswrapper[4558]: I0120 18:07:02.779213 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="438cb630-c06a-4838-a105-cc05955aafbe" containerName="run-os-edpm-multinodeset-edpm-compute-no-nodes" Jan 20 18:07:02 crc kubenswrapper[4558]: I0120 18:07:02.779419 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="438cb630-c06a-4838-a105-cc05955aafbe" containerName="run-os-edpm-multinodeset-edpm-compute-no-nodes" Jan 20 18:07:02 crc kubenswrapper[4558]: I0120 18:07:02.780204 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/install-certs-edpm-multinodeset-edpm-compute-no-nodes-7hr67" Jan 20 18:07:02 crc kubenswrapper[4558]: I0120 18:07:02.782316 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"edpm-compute-no-nodes-dockercfg-tljmj" Jan 20 18:07:02 crc kubenswrapper[4558]: I0120 18:07:02.782898 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-aee-default-env" Jan 20 18:07:02 crc kubenswrapper[4558]: I0120 18:07:02.782981 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"combined-ca-bundle" Jan 20 18:07:02 crc kubenswrapper[4558]: I0120 18:07:02.784037 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplanenodeset-edpm-compute-no-nodes" Jan 20 18:07:02 crc kubenswrapper[4558]: I0120 18:07:02.784321 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplane-ansible-ssh-private-key-secret" Jan 20 18:07:02 crc kubenswrapper[4558]: I0120 18:07:02.794701 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/install-certs-edpm-multinodeset-edpm-compute-no-nodes-7hr67"] Jan 20 18:07:02 crc kubenswrapper[4558]: I0120 18:07:02.847099 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/901b7beb-82dc-41fb-90ef-90b10ac3bcff-nova-combined-ca-bundle\") pod \"install-certs-edpm-multinodeset-edpm-compute-no-nodes-7hr67\" (UID: \"901b7beb-82dc-41fb-90ef-90b10ac3bcff\") " pod="openstack-kuttl-tests/install-certs-edpm-multinodeset-edpm-compute-no-nodes-7hr67" Jan 20 18:07:02 crc kubenswrapper[4558]: I0120 18:07:02.847152 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/901b7beb-82dc-41fb-90ef-90b10ac3bcff-ovn-combined-ca-bundle\") pod \"install-certs-edpm-multinodeset-edpm-compute-no-nodes-7hr67\" (UID: \"901b7beb-82dc-41fb-90ef-90b10ac3bcff\") " pod="openstack-kuttl-tests/install-certs-edpm-multinodeset-edpm-compute-no-nodes-7hr67" Jan 20 18:07:02 crc kubenswrapper[4558]: I0120 18:07:02.847213 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/901b7beb-82dc-41fb-90ef-90b10ac3bcff-neutron-sriov-combined-ca-bundle\") pod \"install-certs-edpm-multinodeset-edpm-compute-no-nodes-7hr67\" (UID: \"901b7beb-82dc-41fb-90ef-90b10ac3bcff\") " pod="openstack-kuttl-tests/install-certs-edpm-multinodeset-edpm-compute-no-nodes-7hr67" Jan 20 18:07:02 crc kubenswrapper[4558]: I0120 18:07:02.847241 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/901b7beb-82dc-41fb-90ef-90b10ac3bcff-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-multinodeset-edpm-compute-no-nodes-7hr67\" (UID: \"901b7beb-82dc-41fb-90ef-90b10ac3bcff\") " pod="openstack-kuttl-tests/install-certs-edpm-multinodeset-edpm-compute-no-nodes-7hr67" Jan 20 18:07:02 crc kubenswrapper[4558]: I0120 18:07:02.847275 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r58kp\" (UniqueName: \"kubernetes.io/projected/901b7beb-82dc-41fb-90ef-90b10ac3bcff-kube-api-access-r58kp\") pod \"install-certs-edpm-multinodeset-edpm-compute-no-nodes-7hr67\" (UID: \"901b7beb-82dc-41fb-90ef-90b10ac3bcff\") " pod="openstack-kuttl-tests/install-certs-edpm-multinodeset-edpm-compute-no-nodes-7hr67" Jan 20 18:07:02 crc kubenswrapper[4558]: I0120 18:07:02.847361 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/901b7beb-82dc-41fb-90ef-90b10ac3bcff-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-multinodeset-edpm-compute-no-nodes-7hr67\" (UID: \"901b7beb-82dc-41fb-90ef-90b10ac3bcff\") " pod="openstack-kuttl-tests/install-certs-edpm-multinodeset-edpm-compute-no-nodes-7hr67" Jan 20 18:07:02 crc kubenswrapper[4558]: I0120 18:07:02.847404 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/901b7beb-82dc-41fb-90ef-90b10ac3bcff-neutron-dhcp-combined-ca-bundle\") pod \"install-certs-edpm-multinodeset-edpm-compute-no-nodes-7hr67\" (UID: \"901b7beb-82dc-41fb-90ef-90b10ac3bcff\") " pod="openstack-kuttl-tests/install-certs-edpm-multinodeset-edpm-compute-no-nodes-7hr67" Jan 20 18:07:02 crc kubenswrapper[4558]: I0120 18:07:02.847461 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/901b7beb-82dc-41fb-90ef-90b10ac3bcff-inventory\") pod \"install-certs-edpm-multinodeset-edpm-compute-no-nodes-7hr67\" (UID: \"901b7beb-82dc-41fb-90ef-90b10ac3bcff\") " pod="openstack-kuttl-tests/install-certs-edpm-multinodeset-edpm-compute-no-nodes-7hr67" Jan 20 18:07:02 crc kubenswrapper[4558]: I0120 18:07:02.847538 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/901b7beb-82dc-41fb-90ef-90b10ac3bcff-ssh-key-edpm-compute-no-nodes\") pod \"install-certs-edpm-multinodeset-edpm-compute-no-nodes-7hr67\" (UID: \"901b7beb-82dc-41fb-90ef-90b10ac3bcff\") " pod="openstack-kuttl-tests/install-certs-edpm-multinodeset-edpm-compute-no-nodes-7hr67" Jan 20 18:07:02 crc kubenswrapper[4558]: I0120 18:07:02.847564 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/901b7beb-82dc-41fb-90ef-90b10ac3bcff-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-multinodeset-edpm-compute-no-nodes-7hr67\" (UID: \"901b7beb-82dc-41fb-90ef-90b10ac3bcff\") " pod="openstack-kuttl-tests/install-certs-edpm-multinodeset-edpm-compute-no-nodes-7hr67" Jan 20 18:07:02 crc kubenswrapper[4558]: I0120 18:07:02.847588 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/901b7beb-82dc-41fb-90ef-90b10ac3bcff-neutron-ovn-combined-ca-bundle\") pod \"install-certs-edpm-multinodeset-edpm-compute-no-nodes-7hr67\" (UID: \"901b7beb-82dc-41fb-90ef-90b10ac3bcff\") " pod="openstack-kuttl-tests/install-certs-edpm-multinodeset-edpm-compute-no-nodes-7hr67" Jan 20 18:07:02 crc kubenswrapper[4558]: I0120 18:07:02.949421 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/901b7beb-82dc-41fb-90ef-90b10ac3bcff-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-multinodeset-edpm-compute-no-nodes-7hr67\" (UID: \"901b7beb-82dc-41fb-90ef-90b10ac3bcff\") " pod="openstack-kuttl-tests/install-certs-edpm-multinodeset-edpm-compute-no-nodes-7hr67" Jan 20 18:07:02 crc kubenswrapper[4558]: I0120 18:07:02.949479 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r58kp\" (UniqueName: \"kubernetes.io/projected/901b7beb-82dc-41fb-90ef-90b10ac3bcff-kube-api-access-r58kp\") pod \"install-certs-edpm-multinodeset-edpm-compute-no-nodes-7hr67\" (UID: \"901b7beb-82dc-41fb-90ef-90b10ac3bcff\") " pod="openstack-kuttl-tests/install-certs-edpm-multinodeset-edpm-compute-no-nodes-7hr67" Jan 20 18:07:02 crc kubenswrapper[4558]: I0120 18:07:02.949547 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/901b7beb-82dc-41fb-90ef-90b10ac3bcff-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-multinodeset-edpm-compute-no-nodes-7hr67\" (UID: \"901b7beb-82dc-41fb-90ef-90b10ac3bcff\") " pod="openstack-kuttl-tests/install-certs-edpm-multinodeset-edpm-compute-no-nodes-7hr67" Jan 20 18:07:02 crc kubenswrapper[4558]: I0120 18:07:02.949570 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/901b7beb-82dc-41fb-90ef-90b10ac3bcff-neutron-dhcp-combined-ca-bundle\") pod \"install-certs-edpm-multinodeset-edpm-compute-no-nodes-7hr67\" (UID: \"901b7beb-82dc-41fb-90ef-90b10ac3bcff\") " pod="openstack-kuttl-tests/install-certs-edpm-multinodeset-edpm-compute-no-nodes-7hr67" Jan 20 18:07:02 crc kubenswrapper[4558]: I0120 18:07:02.949599 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/901b7beb-82dc-41fb-90ef-90b10ac3bcff-inventory\") pod \"install-certs-edpm-multinodeset-edpm-compute-no-nodes-7hr67\" (UID: \"901b7beb-82dc-41fb-90ef-90b10ac3bcff\") " pod="openstack-kuttl-tests/install-certs-edpm-multinodeset-edpm-compute-no-nodes-7hr67" Jan 20 18:07:02 crc kubenswrapper[4558]: I0120 18:07:02.949648 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/901b7beb-82dc-41fb-90ef-90b10ac3bcff-ssh-key-edpm-compute-no-nodes\") pod \"install-certs-edpm-multinodeset-edpm-compute-no-nodes-7hr67\" (UID: \"901b7beb-82dc-41fb-90ef-90b10ac3bcff\") " pod="openstack-kuttl-tests/install-certs-edpm-multinodeset-edpm-compute-no-nodes-7hr67" Jan 20 18:07:02 crc kubenswrapper[4558]: I0120 18:07:02.949673 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/901b7beb-82dc-41fb-90ef-90b10ac3bcff-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-multinodeset-edpm-compute-no-nodes-7hr67\" (UID: \"901b7beb-82dc-41fb-90ef-90b10ac3bcff\") " pod="openstack-kuttl-tests/install-certs-edpm-multinodeset-edpm-compute-no-nodes-7hr67" Jan 20 18:07:02 crc kubenswrapper[4558]: I0120 18:07:02.949693 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/901b7beb-82dc-41fb-90ef-90b10ac3bcff-neutron-ovn-combined-ca-bundle\") pod \"install-certs-edpm-multinodeset-edpm-compute-no-nodes-7hr67\" (UID: \"901b7beb-82dc-41fb-90ef-90b10ac3bcff\") " pod="openstack-kuttl-tests/install-certs-edpm-multinodeset-edpm-compute-no-nodes-7hr67" Jan 20 18:07:02 crc kubenswrapper[4558]: I0120 18:07:02.949726 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/901b7beb-82dc-41fb-90ef-90b10ac3bcff-nova-combined-ca-bundle\") pod \"install-certs-edpm-multinodeset-edpm-compute-no-nodes-7hr67\" (UID: \"901b7beb-82dc-41fb-90ef-90b10ac3bcff\") " pod="openstack-kuttl-tests/install-certs-edpm-multinodeset-edpm-compute-no-nodes-7hr67" Jan 20 18:07:02 crc kubenswrapper[4558]: I0120 18:07:02.949758 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/901b7beb-82dc-41fb-90ef-90b10ac3bcff-ovn-combined-ca-bundle\") pod \"install-certs-edpm-multinodeset-edpm-compute-no-nodes-7hr67\" (UID: \"901b7beb-82dc-41fb-90ef-90b10ac3bcff\") " pod="openstack-kuttl-tests/install-certs-edpm-multinodeset-edpm-compute-no-nodes-7hr67" Jan 20 18:07:02 crc kubenswrapper[4558]: I0120 18:07:02.949781 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/901b7beb-82dc-41fb-90ef-90b10ac3bcff-neutron-sriov-combined-ca-bundle\") pod \"install-certs-edpm-multinodeset-edpm-compute-no-nodes-7hr67\" (UID: \"901b7beb-82dc-41fb-90ef-90b10ac3bcff\") " pod="openstack-kuttl-tests/install-certs-edpm-multinodeset-edpm-compute-no-nodes-7hr67" Jan 20 18:07:02 crc kubenswrapper[4558]: I0120 18:07:02.955648 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/901b7beb-82dc-41fb-90ef-90b10ac3bcff-neutron-dhcp-combined-ca-bundle\") pod \"install-certs-edpm-multinodeset-edpm-compute-no-nodes-7hr67\" (UID: \"901b7beb-82dc-41fb-90ef-90b10ac3bcff\") " pod="openstack-kuttl-tests/install-certs-edpm-multinodeset-edpm-compute-no-nodes-7hr67" Jan 20 18:07:02 crc kubenswrapper[4558]: I0120 18:07:02.955829 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/901b7beb-82dc-41fb-90ef-90b10ac3bcff-nova-combined-ca-bundle\") pod \"install-certs-edpm-multinodeset-edpm-compute-no-nodes-7hr67\" (UID: \"901b7beb-82dc-41fb-90ef-90b10ac3bcff\") " pod="openstack-kuttl-tests/install-certs-edpm-multinodeset-edpm-compute-no-nodes-7hr67" Jan 20 18:07:02 crc kubenswrapper[4558]: I0120 18:07:02.955949 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/901b7beb-82dc-41fb-90ef-90b10ac3bcff-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-multinodeset-edpm-compute-no-nodes-7hr67\" (UID: \"901b7beb-82dc-41fb-90ef-90b10ac3bcff\") " pod="openstack-kuttl-tests/install-certs-edpm-multinodeset-edpm-compute-no-nodes-7hr67" Jan 20 18:07:02 crc kubenswrapper[4558]: I0120 18:07:02.956204 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/901b7beb-82dc-41fb-90ef-90b10ac3bcff-neutron-ovn-combined-ca-bundle\") pod \"install-certs-edpm-multinodeset-edpm-compute-no-nodes-7hr67\" (UID: \"901b7beb-82dc-41fb-90ef-90b10ac3bcff\") " pod="openstack-kuttl-tests/install-certs-edpm-multinodeset-edpm-compute-no-nodes-7hr67" Jan 20 18:07:02 crc kubenswrapper[4558]: I0120 18:07:02.956600 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/901b7beb-82dc-41fb-90ef-90b10ac3bcff-ssh-key-edpm-compute-no-nodes\") pod \"install-certs-edpm-multinodeset-edpm-compute-no-nodes-7hr67\" (UID: \"901b7beb-82dc-41fb-90ef-90b10ac3bcff\") " pod="openstack-kuttl-tests/install-certs-edpm-multinodeset-edpm-compute-no-nodes-7hr67" Jan 20 18:07:02 crc kubenswrapper[4558]: I0120 18:07:02.957425 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/901b7beb-82dc-41fb-90ef-90b10ac3bcff-ovn-combined-ca-bundle\") pod \"install-certs-edpm-multinodeset-edpm-compute-no-nodes-7hr67\" (UID: \"901b7beb-82dc-41fb-90ef-90b10ac3bcff\") " pod="openstack-kuttl-tests/install-certs-edpm-multinodeset-edpm-compute-no-nodes-7hr67" Jan 20 18:07:02 crc kubenswrapper[4558]: I0120 18:07:02.958822 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/901b7beb-82dc-41fb-90ef-90b10ac3bcff-neutron-sriov-combined-ca-bundle\") pod \"install-certs-edpm-multinodeset-edpm-compute-no-nodes-7hr67\" (UID: \"901b7beb-82dc-41fb-90ef-90b10ac3bcff\") " pod="openstack-kuttl-tests/install-certs-edpm-multinodeset-edpm-compute-no-nodes-7hr67" Jan 20 18:07:02 crc kubenswrapper[4558]: I0120 18:07:02.958987 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/901b7beb-82dc-41fb-90ef-90b10ac3bcff-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-multinodeset-edpm-compute-no-nodes-7hr67\" (UID: \"901b7beb-82dc-41fb-90ef-90b10ac3bcff\") " pod="openstack-kuttl-tests/install-certs-edpm-multinodeset-edpm-compute-no-nodes-7hr67" Jan 20 18:07:02 crc kubenswrapper[4558]: I0120 18:07:02.959023 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/901b7beb-82dc-41fb-90ef-90b10ac3bcff-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-multinodeset-edpm-compute-no-nodes-7hr67\" (UID: \"901b7beb-82dc-41fb-90ef-90b10ac3bcff\") " pod="openstack-kuttl-tests/install-certs-edpm-multinodeset-edpm-compute-no-nodes-7hr67" Jan 20 18:07:02 crc kubenswrapper[4558]: I0120 18:07:02.959001 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/901b7beb-82dc-41fb-90ef-90b10ac3bcff-inventory\") pod \"install-certs-edpm-multinodeset-edpm-compute-no-nodes-7hr67\" (UID: \"901b7beb-82dc-41fb-90ef-90b10ac3bcff\") " pod="openstack-kuttl-tests/install-certs-edpm-multinodeset-edpm-compute-no-nodes-7hr67" Jan 20 18:07:02 crc kubenswrapper[4558]: I0120 18:07:02.965679 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r58kp\" (UniqueName: \"kubernetes.io/projected/901b7beb-82dc-41fb-90ef-90b10ac3bcff-kube-api-access-r58kp\") pod \"install-certs-edpm-multinodeset-edpm-compute-no-nodes-7hr67\" (UID: \"901b7beb-82dc-41fb-90ef-90b10ac3bcff\") " pod="openstack-kuttl-tests/install-certs-edpm-multinodeset-edpm-compute-no-nodes-7hr67" Jan 20 18:07:03 crc kubenswrapper[4558]: I0120 18:07:03.099767 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/install-certs-edpm-multinodeset-edpm-compute-no-nodes-7hr67" Jan 20 18:07:03 crc kubenswrapper[4558]: I0120 18:07:03.484618 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/install-certs-edpm-multinodeset-edpm-compute-no-nodes-7hr67"] Jan 20 18:07:03 crc kubenswrapper[4558]: W0120 18:07:03.489847 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod901b7beb_82dc_41fb_90ef_90b10ac3bcff.slice/crio-492c52b846e08ee64e47c33646ce18a7e2259b3722457db52e72f9719e957871 WatchSource:0}: Error finding container 492c52b846e08ee64e47c33646ce18a7e2259b3722457db52e72f9719e957871: Status 404 returned error can't find the container with id 492c52b846e08ee64e47c33646ce18a7e2259b3722457db52e72f9719e957871 Jan 20 18:07:03 crc kubenswrapper[4558]: I0120 18:07:03.733591 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/install-certs-edpm-multinodeset-edpm-compute-no-nodes-7hr67" event={"ID":"901b7beb-82dc-41fb-90ef-90b10ac3bcff","Type":"ContainerStarted","Data":"492c52b846e08ee64e47c33646ce18a7e2259b3722457db52e72f9719e957871"} Jan 20 18:07:04 crc kubenswrapper[4558]: I0120 18:07:04.746041 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/install-certs-edpm-multinodeset-edpm-compute-no-nodes-7hr67" event={"ID":"901b7beb-82dc-41fb-90ef-90b10ac3bcff","Type":"ContainerStarted","Data":"2185d2eb9908585da26e7d011d3e7ff0d23cc38c3ae717c53700ba11888b4945"} Jan 20 18:07:04 crc kubenswrapper[4558]: I0120 18:07:04.770487 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/install-certs-edpm-multinodeset-edpm-compute-no-nodes-7hr67" podStartSLOduration=2.267356307 podStartE2EDuration="2.770467444s" podCreationTimestamp="2026-01-20 18:07:02 +0000 UTC" firstStartedPulling="2026-01-20 18:07:03.492729273 +0000 UTC m=+5117.253067231" lastFinishedPulling="2026-01-20 18:07:03.995840401 +0000 UTC m=+5117.756178368" observedRunningTime="2026-01-20 18:07:04.762692736 +0000 UTC m=+5118.523030703" watchObservedRunningTime="2026-01-20 18:07:04.770467444 +0000 UTC m=+5118.530805411" Jan 20 18:07:05 crc kubenswrapper[4558]: I0120 18:07:05.660868 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-vbwj6" Jan 20 18:07:05 crc kubenswrapper[4558]: I0120 18:07:05.661288 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-vbwj6" Jan 20 18:07:05 crc kubenswrapper[4558]: I0120 18:07:05.761127 4558 generic.go:334] "Generic (PLEG): container finished" podID="901b7beb-82dc-41fb-90ef-90b10ac3bcff" containerID="2185d2eb9908585da26e7d011d3e7ff0d23cc38c3ae717c53700ba11888b4945" exitCode=0 Jan 20 18:07:05 crc kubenswrapper[4558]: I0120 18:07:05.761248 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/install-certs-edpm-multinodeset-edpm-compute-no-nodes-7hr67" event={"ID":"901b7beb-82dc-41fb-90ef-90b10ac3bcff","Type":"ContainerDied","Data":"2185d2eb9908585da26e7d011d3e7ff0d23cc38c3ae717c53700ba11888b4945"} Jan 20 18:07:06 crc kubenswrapper[4558]: I0120 18:07:06.025779 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-vbwj6" Jan 20 18:07:06 crc kubenswrapper[4558]: I0120 18:07:06.061556 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-vbwj6" Jan 20 18:07:06 crc kubenswrapper[4558]: I0120 18:07:06.265258 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-vbwj6"] Jan 20 18:07:07 crc kubenswrapper[4558]: I0120 18:07:07.017381 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/install-certs-edpm-multinodeset-edpm-compute-no-nodes-7hr67" Jan 20 18:07:07 crc kubenswrapper[4558]: I0120 18:07:07.129092 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/901b7beb-82dc-41fb-90ef-90b10ac3bcff-inventory\") pod \"901b7beb-82dc-41fb-90ef-90b10ac3bcff\" (UID: \"901b7beb-82dc-41fb-90ef-90b10ac3bcff\") " Jan 20 18:07:07 crc kubenswrapper[4558]: I0120 18:07:07.129145 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/901b7beb-82dc-41fb-90ef-90b10ac3bcff-neutron-metadata-combined-ca-bundle\") pod \"901b7beb-82dc-41fb-90ef-90b10ac3bcff\" (UID: \"901b7beb-82dc-41fb-90ef-90b10ac3bcff\") " Jan 20 18:07:07 crc kubenswrapper[4558]: I0120 18:07:07.129195 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/901b7beb-82dc-41fb-90ef-90b10ac3bcff-neutron-sriov-combined-ca-bundle\") pod \"901b7beb-82dc-41fb-90ef-90b10ac3bcff\" (UID: \"901b7beb-82dc-41fb-90ef-90b10ac3bcff\") " Jan 20 18:07:07 crc kubenswrapper[4558]: I0120 18:07:07.129273 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/901b7beb-82dc-41fb-90ef-90b10ac3bcff-neutron-dhcp-combined-ca-bundle\") pod \"901b7beb-82dc-41fb-90ef-90b10ac3bcff\" (UID: \"901b7beb-82dc-41fb-90ef-90b10ac3bcff\") " Jan 20 18:07:07 crc kubenswrapper[4558]: I0120 18:07:07.129345 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/901b7beb-82dc-41fb-90ef-90b10ac3bcff-ssh-key-edpm-compute-no-nodes\") pod \"901b7beb-82dc-41fb-90ef-90b10ac3bcff\" (UID: \"901b7beb-82dc-41fb-90ef-90b10ac3bcff\") " Jan 20 18:07:07 crc kubenswrapper[4558]: I0120 18:07:07.129419 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/901b7beb-82dc-41fb-90ef-90b10ac3bcff-bootstrap-combined-ca-bundle\") pod \"901b7beb-82dc-41fb-90ef-90b10ac3bcff\" (UID: \"901b7beb-82dc-41fb-90ef-90b10ac3bcff\") " Jan 20 18:07:07 crc kubenswrapper[4558]: I0120 18:07:07.129501 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/901b7beb-82dc-41fb-90ef-90b10ac3bcff-nova-combined-ca-bundle\") pod \"901b7beb-82dc-41fb-90ef-90b10ac3bcff\" (UID: \"901b7beb-82dc-41fb-90ef-90b10ac3bcff\") " Jan 20 18:07:07 crc kubenswrapper[4558]: I0120 18:07:07.129566 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/901b7beb-82dc-41fb-90ef-90b10ac3bcff-neutron-ovn-combined-ca-bundle\") pod \"901b7beb-82dc-41fb-90ef-90b10ac3bcff\" (UID: \"901b7beb-82dc-41fb-90ef-90b10ac3bcff\") " Jan 20 18:07:07 crc kubenswrapper[4558]: I0120 18:07:07.129622 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r58kp\" (UniqueName: \"kubernetes.io/projected/901b7beb-82dc-41fb-90ef-90b10ac3bcff-kube-api-access-r58kp\") pod \"901b7beb-82dc-41fb-90ef-90b10ac3bcff\" (UID: \"901b7beb-82dc-41fb-90ef-90b10ac3bcff\") " Jan 20 18:07:07 crc kubenswrapper[4558]: I0120 18:07:07.129692 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/901b7beb-82dc-41fb-90ef-90b10ac3bcff-ovn-combined-ca-bundle\") pod \"901b7beb-82dc-41fb-90ef-90b10ac3bcff\" (UID: \"901b7beb-82dc-41fb-90ef-90b10ac3bcff\") " Jan 20 18:07:07 crc kubenswrapper[4558]: I0120 18:07:07.129760 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/901b7beb-82dc-41fb-90ef-90b10ac3bcff-libvirt-combined-ca-bundle\") pod \"901b7beb-82dc-41fb-90ef-90b10ac3bcff\" (UID: \"901b7beb-82dc-41fb-90ef-90b10ac3bcff\") " Jan 20 18:07:07 crc kubenswrapper[4558]: I0120 18:07:07.136566 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/901b7beb-82dc-41fb-90ef-90b10ac3bcff-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "901b7beb-82dc-41fb-90ef-90b10ac3bcff" (UID: "901b7beb-82dc-41fb-90ef-90b10ac3bcff"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:07:07 crc kubenswrapper[4558]: I0120 18:07:07.136885 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/901b7beb-82dc-41fb-90ef-90b10ac3bcff-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "901b7beb-82dc-41fb-90ef-90b10ac3bcff" (UID: "901b7beb-82dc-41fb-90ef-90b10ac3bcff"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:07:07 crc kubenswrapper[4558]: I0120 18:07:07.137236 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/901b7beb-82dc-41fb-90ef-90b10ac3bcff-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "901b7beb-82dc-41fb-90ef-90b10ac3bcff" (UID: "901b7beb-82dc-41fb-90ef-90b10ac3bcff"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:07:07 crc kubenswrapper[4558]: I0120 18:07:07.137288 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/901b7beb-82dc-41fb-90ef-90b10ac3bcff-neutron-ovn-combined-ca-bundle" (OuterVolumeSpecName: "neutron-ovn-combined-ca-bundle") pod "901b7beb-82dc-41fb-90ef-90b10ac3bcff" (UID: "901b7beb-82dc-41fb-90ef-90b10ac3bcff"). InnerVolumeSpecName "neutron-ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:07:07 crc kubenswrapper[4558]: I0120 18:07:07.137359 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/901b7beb-82dc-41fb-90ef-90b10ac3bcff-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "901b7beb-82dc-41fb-90ef-90b10ac3bcff" (UID: "901b7beb-82dc-41fb-90ef-90b10ac3bcff"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:07:07 crc kubenswrapper[4558]: I0120 18:07:07.137677 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/901b7beb-82dc-41fb-90ef-90b10ac3bcff-neutron-sriov-combined-ca-bundle" (OuterVolumeSpecName: "neutron-sriov-combined-ca-bundle") pod "901b7beb-82dc-41fb-90ef-90b10ac3bcff" (UID: "901b7beb-82dc-41fb-90ef-90b10ac3bcff"). InnerVolumeSpecName "neutron-sriov-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:07:07 crc kubenswrapper[4558]: I0120 18:07:07.137900 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/901b7beb-82dc-41fb-90ef-90b10ac3bcff-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "901b7beb-82dc-41fb-90ef-90b10ac3bcff" (UID: "901b7beb-82dc-41fb-90ef-90b10ac3bcff"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:07:07 crc kubenswrapper[4558]: I0120 18:07:07.138462 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/901b7beb-82dc-41fb-90ef-90b10ac3bcff-kube-api-access-r58kp" (OuterVolumeSpecName: "kube-api-access-r58kp") pod "901b7beb-82dc-41fb-90ef-90b10ac3bcff" (UID: "901b7beb-82dc-41fb-90ef-90b10ac3bcff"). InnerVolumeSpecName "kube-api-access-r58kp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:07:07 crc kubenswrapper[4558]: I0120 18:07:07.138756 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/901b7beb-82dc-41fb-90ef-90b10ac3bcff-neutron-dhcp-combined-ca-bundle" (OuterVolumeSpecName: "neutron-dhcp-combined-ca-bundle") pod "901b7beb-82dc-41fb-90ef-90b10ac3bcff" (UID: "901b7beb-82dc-41fb-90ef-90b10ac3bcff"). InnerVolumeSpecName "neutron-dhcp-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:07:07 crc kubenswrapper[4558]: I0120 18:07:07.152541 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/901b7beb-82dc-41fb-90ef-90b10ac3bcff-inventory" (OuterVolumeSpecName: "inventory") pod "901b7beb-82dc-41fb-90ef-90b10ac3bcff" (UID: "901b7beb-82dc-41fb-90ef-90b10ac3bcff"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:07:07 crc kubenswrapper[4558]: I0120 18:07:07.153570 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/901b7beb-82dc-41fb-90ef-90b10ac3bcff-ssh-key-edpm-compute-no-nodes" (OuterVolumeSpecName: "ssh-key-edpm-compute-no-nodes") pod "901b7beb-82dc-41fb-90ef-90b10ac3bcff" (UID: "901b7beb-82dc-41fb-90ef-90b10ac3bcff"). InnerVolumeSpecName "ssh-key-edpm-compute-no-nodes". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:07:07 crc kubenswrapper[4558]: I0120 18:07:07.232248 4558 reconciler_common.go:293] "Volume detached for volume \"neutron-ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/901b7beb-82dc-41fb-90ef-90b10ac3bcff-neutron-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 18:07:07 crc kubenswrapper[4558]: I0120 18:07:07.232445 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r58kp\" (UniqueName: \"kubernetes.io/projected/901b7beb-82dc-41fb-90ef-90b10ac3bcff-kube-api-access-r58kp\") on node \"crc\" DevicePath \"\"" Jan 20 18:07:07 crc kubenswrapper[4558]: I0120 18:07:07.232510 4558 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/901b7beb-82dc-41fb-90ef-90b10ac3bcff-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 18:07:07 crc kubenswrapper[4558]: I0120 18:07:07.232579 4558 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/901b7beb-82dc-41fb-90ef-90b10ac3bcff-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 18:07:07 crc kubenswrapper[4558]: I0120 18:07:07.232635 4558 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/901b7beb-82dc-41fb-90ef-90b10ac3bcff-inventory\") on node \"crc\" DevicePath \"\"" Jan 20 18:07:07 crc kubenswrapper[4558]: I0120 18:07:07.232687 4558 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/901b7beb-82dc-41fb-90ef-90b10ac3bcff-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 18:07:07 crc kubenswrapper[4558]: I0120 18:07:07.232743 4558 reconciler_common.go:293] "Volume detached for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/901b7beb-82dc-41fb-90ef-90b10ac3bcff-neutron-sriov-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 18:07:07 crc kubenswrapper[4558]: I0120 18:07:07.232803 4558 reconciler_common.go:293] "Volume detached for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/901b7beb-82dc-41fb-90ef-90b10ac3bcff-neutron-dhcp-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 18:07:07 crc kubenswrapper[4558]: I0120 18:07:07.232898 4558 reconciler_common.go:293] "Volume detached for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/901b7beb-82dc-41fb-90ef-90b10ac3bcff-ssh-key-edpm-compute-no-nodes\") on node \"crc\" DevicePath \"\"" Jan 20 18:07:07 crc kubenswrapper[4558]: I0120 18:07:07.232969 4558 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/901b7beb-82dc-41fb-90ef-90b10ac3bcff-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 18:07:07 crc kubenswrapper[4558]: I0120 18:07:07.233024 4558 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/901b7beb-82dc-41fb-90ef-90b10ac3bcff-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 18:07:07 crc kubenswrapper[4558]: I0120 18:07:07.784286 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/install-certs-edpm-multinodeset-edpm-compute-no-nodes-7hr67" event={"ID":"901b7beb-82dc-41fb-90ef-90b10ac3bcff","Type":"ContainerDied","Data":"492c52b846e08ee64e47c33646ce18a7e2259b3722457db52e72f9719e957871"} Jan 20 18:07:07 crc kubenswrapper[4558]: I0120 18:07:07.784650 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="492c52b846e08ee64e47c33646ce18a7e2259b3722457db52e72f9719e957871" Jan 20 18:07:07 crc kubenswrapper[4558]: I0120 18:07:07.784489 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-vbwj6" podUID="d49ebbdd-1136-455b-9b2a-ab3b0762be10" containerName="registry-server" containerID="cri-o://b7ba2e2ebe08bfd5eb017bae6bd2465a3ca074288f7d96a089aa5a2bd39ed3b3" gracePeriod=2 Jan 20 18:07:07 crc kubenswrapper[4558]: I0120 18:07:07.784344 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/install-certs-edpm-multinodeset-edpm-compute-no-nodes-7hr67" Jan 20 18:07:07 crc kubenswrapper[4558]: I0120 18:07:07.868297 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/ovn-edpm-multinodeset-edpm-compute-no-nodes-22vhv"] Jan 20 18:07:07 crc kubenswrapper[4558]: E0120 18:07:07.868657 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="901b7beb-82dc-41fb-90ef-90b10ac3bcff" containerName="install-certs-edpm-multinodeset-edpm-compute-no-nodes" Jan 20 18:07:07 crc kubenswrapper[4558]: I0120 18:07:07.868678 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="901b7beb-82dc-41fb-90ef-90b10ac3bcff" containerName="install-certs-edpm-multinodeset-edpm-compute-no-nodes" Jan 20 18:07:07 crc kubenswrapper[4558]: I0120 18:07:07.868851 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="901b7beb-82dc-41fb-90ef-90b10ac3bcff" containerName="install-certs-edpm-multinodeset-edpm-compute-no-nodes" Jan 20 18:07:07 crc kubenswrapper[4558]: I0120 18:07:07.869461 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-edpm-multinodeset-edpm-compute-no-nodes-22vhv" Jan 20 18:07:07 crc kubenswrapper[4558]: I0120 18:07:07.871441 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"combined-ca-bundle" Jan 20 18:07:07 crc kubenswrapper[4558]: I0120 18:07:07.871869 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplane-ansible-ssh-private-key-secret" Jan 20 18:07:07 crc kubenswrapper[4558]: I0120 18:07:07.872097 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplanenodeset-edpm-compute-no-nodes" Jan 20 18:07:07 crc kubenswrapper[4558]: I0120 18:07:07.872290 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"ovncontroller-config" Jan 20 18:07:07 crc kubenswrapper[4558]: I0120 18:07:07.872416 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-aee-default-env" Jan 20 18:07:07 crc kubenswrapper[4558]: I0120 18:07:07.877808 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovn-edpm-multinodeset-edpm-compute-no-nodes-22vhv"] Jan 20 18:07:07 crc kubenswrapper[4558]: I0120 18:07:07.878574 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"edpm-compute-no-nodes-dockercfg-tljmj" Jan 20 18:07:07 crc kubenswrapper[4558]: I0120 18:07:07.946368 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xxh7w\" (UniqueName: \"kubernetes.io/projected/e84aff63-9fc4-4604-8b6d-69bd89bf437d-kube-api-access-xxh7w\") pod \"ovn-edpm-multinodeset-edpm-compute-no-nodes-22vhv\" (UID: \"e84aff63-9fc4-4604-8b6d-69bd89bf437d\") " pod="openstack-kuttl-tests/ovn-edpm-multinodeset-edpm-compute-no-nodes-22vhv" Jan 20 18:07:07 crc kubenswrapper[4558]: I0120 18:07:07.946411 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e84aff63-9fc4-4604-8b6d-69bd89bf437d-inventory\") pod \"ovn-edpm-multinodeset-edpm-compute-no-nodes-22vhv\" (UID: \"e84aff63-9fc4-4604-8b6d-69bd89bf437d\") " pod="openstack-kuttl-tests/ovn-edpm-multinodeset-edpm-compute-no-nodes-22vhv" Jan 20 18:07:07 crc kubenswrapper[4558]: I0120 18:07:07.946604 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/e84aff63-9fc4-4604-8b6d-69bd89bf437d-ovncontroller-config-0\") pod \"ovn-edpm-multinodeset-edpm-compute-no-nodes-22vhv\" (UID: \"e84aff63-9fc4-4604-8b6d-69bd89bf437d\") " pod="openstack-kuttl-tests/ovn-edpm-multinodeset-edpm-compute-no-nodes-22vhv" Jan 20 18:07:07 crc kubenswrapper[4558]: I0120 18:07:07.946682 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e84aff63-9fc4-4604-8b6d-69bd89bf437d-ovn-combined-ca-bundle\") pod \"ovn-edpm-multinodeset-edpm-compute-no-nodes-22vhv\" (UID: \"e84aff63-9fc4-4604-8b6d-69bd89bf437d\") " pod="openstack-kuttl-tests/ovn-edpm-multinodeset-edpm-compute-no-nodes-22vhv" Jan 20 18:07:07 crc kubenswrapper[4558]: I0120 18:07:07.946724 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/e84aff63-9fc4-4604-8b6d-69bd89bf437d-ssh-key-edpm-compute-no-nodes\") pod \"ovn-edpm-multinodeset-edpm-compute-no-nodes-22vhv\" (UID: \"e84aff63-9fc4-4604-8b6d-69bd89bf437d\") " pod="openstack-kuttl-tests/ovn-edpm-multinodeset-edpm-compute-no-nodes-22vhv" Jan 20 18:07:08 crc kubenswrapper[4558]: I0120 18:07:08.047640 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/e84aff63-9fc4-4604-8b6d-69bd89bf437d-ovncontroller-config-0\") pod \"ovn-edpm-multinodeset-edpm-compute-no-nodes-22vhv\" (UID: \"e84aff63-9fc4-4604-8b6d-69bd89bf437d\") " pod="openstack-kuttl-tests/ovn-edpm-multinodeset-edpm-compute-no-nodes-22vhv" Jan 20 18:07:08 crc kubenswrapper[4558]: I0120 18:07:08.047699 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e84aff63-9fc4-4604-8b6d-69bd89bf437d-ovn-combined-ca-bundle\") pod \"ovn-edpm-multinodeset-edpm-compute-no-nodes-22vhv\" (UID: \"e84aff63-9fc4-4604-8b6d-69bd89bf437d\") " pod="openstack-kuttl-tests/ovn-edpm-multinodeset-edpm-compute-no-nodes-22vhv" Jan 20 18:07:08 crc kubenswrapper[4558]: I0120 18:07:08.047737 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/e84aff63-9fc4-4604-8b6d-69bd89bf437d-ssh-key-edpm-compute-no-nodes\") pod \"ovn-edpm-multinodeset-edpm-compute-no-nodes-22vhv\" (UID: \"e84aff63-9fc4-4604-8b6d-69bd89bf437d\") " pod="openstack-kuttl-tests/ovn-edpm-multinodeset-edpm-compute-no-nodes-22vhv" Jan 20 18:07:08 crc kubenswrapper[4558]: I0120 18:07:08.047760 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xxh7w\" (UniqueName: \"kubernetes.io/projected/e84aff63-9fc4-4604-8b6d-69bd89bf437d-kube-api-access-xxh7w\") pod \"ovn-edpm-multinodeset-edpm-compute-no-nodes-22vhv\" (UID: \"e84aff63-9fc4-4604-8b6d-69bd89bf437d\") " pod="openstack-kuttl-tests/ovn-edpm-multinodeset-edpm-compute-no-nodes-22vhv" Jan 20 18:07:08 crc kubenswrapper[4558]: I0120 18:07:08.047780 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e84aff63-9fc4-4604-8b6d-69bd89bf437d-inventory\") pod \"ovn-edpm-multinodeset-edpm-compute-no-nodes-22vhv\" (UID: \"e84aff63-9fc4-4604-8b6d-69bd89bf437d\") " pod="openstack-kuttl-tests/ovn-edpm-multinodeset-edpm-compute-no-nodes-22vhv" Jan 20 18:07:08 crc kubenswrapper[4558]: I0120 18:07:08.048797 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/e84aff63-9fc4-4604-8b6d-69bd89bf437d-ovncontroller-config-0\") pod \"ovn-edpm-multinodeset-edpm-compute-no-nodes-22vhv\" (UID: \"e84aff63-9fc4-4604-8b6d-69bd89bf437d\") " pod="openstack-kuttl-tests/ovn-edpm-multinodeset-edpm-compute-no-nodes-22vhv" Jan 20 18:07:08 crc kubenswrapper[4558]: I0120 18:07:08.053342 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e84aff63-9fc4-4604-8b6d-69bd89bf437d-ovn-combined-ca-bundle\") pod \"ovn-edpm-multinodeset-edpm-compute-no-nodes-22vhv\" (UID: \"e84aff63-9fc4-4604-8b6d-69bd89bf437d\") " pod="openstack-kuttl-tests/ovn-edpm-multinodeset-edpm-compute-no-nodes-22vhv" Jan 20 18:07:08 crc kubenswrapper[4558]: I0120 18:07:08.053717 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e84aff63-9fc4-4604-8b6d-69bd89bf437d-inventory\") pod \"ovn-edpm-multinodeset-edpm-compute-no-nodes-22vhv\" (UID: \"e84aff63-9fc4-4604-8b6d-69bd89bf437d\") " pod="openstack-kuttl-tests/ovn-edpm-multinodeset-edpm-compute-no-nodes-22vhv" Jan 20 18:07:08 crc kubenswrapper[4558]: I0120 18:07:08.059144 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/e84aff63-9fc4-4604-8b6d-69bd89bf437d-ssh-key-edpm-compute-no-nodes\") pod \"ovn-edpm-multinodeset-edpm-compute-no-nodes-22vhv\" (UID: \"e84aff63-9fc4-4604-8b6d-69bd89bf437d\") " pod="openstack-kuttl-tests/ovn-edpm-multinodeset-edpm-compute-no-nodes-22vhv" Jan 20 18:07:08 crc kubenswrapper[4558]: I0120 18:07:08.062443 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xxh7w\" (UniqueName: \"kubernetes.io/projected/e84aff63-9fc4-4604-8b6d-69bd89bf437d-kube-api-access-xxh7w\") pod \"ovn-edpm-multinodeset-edpm-compute-no-nodes-22vhv\" (UID: \"e84aff63-9fc4-4604-8b6d-69bd89bf437d\") " pod="openstack-kuttl-tests/ovn-edpm-multinodeset-edpm-compute-no-nodes-22vhv" Jan 20 18:07:08 crc kubenswrapper[4558]: I0120 18:07:08.140525 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vbwj6" Jan 20 18:07:08 crc kubenswrapper[4558]: I0120 18:07:08.190527 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-edpm-multinodeset-edpm-compute-no-nodes-22vhv" Jan 20 18:07:08 crc kubenswrapper[4558]: I0120 18:07:08.253135 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d49ebbdd-1136-455b-9b2a-ab3b0762be10-utilities\") pod \"d49ebbdd-1136-455b-9b2a-ab3b0762be10\" (UID: \"d49ebbdd-1136-455b-9b2a-ab3b0762be10\") " Jan 20 18:07:08 crc kubenswrapper[4558]: I0120 18:07:08.253186 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d49ebbdd-1136-455b-9b2a-ab3b0762be10-catalog-content\") pod \"d49ebbdd-1136-455b-9b2a-ab3b0762be10\" (UID: \"d49ebbdd-1136-455b-9b2a-ab3b0762be10\") " Jan 20 18:07:08 crc kubenswrapper[4558]: I0120 18:07:08.253265 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hrdll\" (UniqueName: \"kubernetes.io/projected/d49ebbdd-1136-455b-9b2a-ab3b0762be10-kube-api-access-hrdll\") pod \"d49ebbdd-1136-455b-9b2a-ab3b0762be10\" (UID: \"d49ebbdd-1136-455b-9b2a-ab3b0762be10\") " Jan 20 18:07:08 crc kubenswrapper[4558]: I0120 18:07:08.253909 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d49ebbdd-1136-455b-9b2a-ab3b0762be10-utilities" (OuterVolumeSpecName: "utilities") pod "d49ebbdd-1136-455b-9b2a-ab3b0762be10" (UID: "d49ebbdd-1136-455b-9b2a-ab3b0762be10"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 18:07:08 crc kubenswrapper[4558]: I0120 18:07:08.259335 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d49ebbdd-1136-455b-9b2a-ab3b0762be10-kube-api-access-hrdll" (OuterVolumeSpecName: "kube-api-access-hrdll") pod "d49ebbdd-1136-455b-9b2a-ab3b0762be10" (UID: "d49ebbdd-1136-455b-9b2a-ab3b0762be10"). InnerVolumeSpecName "kube-api-access-hrdll". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:07:08 crc kubenswrapper[4558]: I0120 18:07:08.281662 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d49ebbdd-1136-455b-9b2a-ab3b0762be10-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d49ebbdd-1136-455b-9b2a-ab3b0762be10" (UID: "d49ebbdd-1136-455b-9b2a-ab3b0762be10"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 18:07:08 crc kubenswrapper[4558]: I0120 18:07:08.354854 4558 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d49ebbdd-1136-455b-9b2a-ab3b0762be10-utilities\") on node \"crc\" DevicePath \"\"" Jan 20 18:07:08 crc kubenswrapper[4558]: I0120 18:07:08.354906 4558 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d49ebbdd-1136-455b-9b2a-ab3b0762be10-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 20 18:07:08 crc kubenswrapper[4558]: I0120 18:07:08.354923 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hrdll\" (UniqueName: \"kubernetes.io/projected/d49ebbdd-1136-455b-9b2a-ab3b0762be10-kube-api-access-hrdll\") on node \"crc\" DevicePath \"\"" Jan 20 18:07:08 crc kubenswrapper[4558]: W0120 18:07:08.575157 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode84aff63_9fc4_4604_8b6d_69bd89bf437d.slice/crio-9e00d4ad46bc272aabc573fb9d0af19c085f3cb3ae82ff9d17a5e59d9e7880ad WatchSource:0}: Error finding container 9e00d4ad46bc272aabc573fb9d0af19c085f3cb3ae82ff9d17a5e59d9e7880ad: Status 404 returned error can't find the container with id 9e00d4ad46bc272aabc573fb9d0af19c085f3cb3ae82ff9d17a5e59d9e7880ad Jan 20 18:07:08 crc kubenswrapper[4558]: I0120 18:07:08.576982 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/ovn-edpm-multinodeset-edpm-compute-no-nodes-22vhv"] Jan 20 18:07:08 crc kubenswrapper[4558]: I0120 18:07:08.799011 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-edpm-multinodeset-edpm-compute-no-nodes-22vhv" event={"ID":"e84aff63-9fc4-4604-8b6d-69bd89bf437d","Type":"ContainerStarted","Data":"9e00d4ad46bc272aabc573fb9d0af19c085f3cb3ae82ff9d17a5e59d9e7880ad"} Jan 20 18:07:08 crc kubenswrapper[4558]: I0120 18:07:08.802785 4558 generic.go:334] "Generic (PLEG): container finished" podID="d49ebbdd-1136-455b-9b2a-ab3b0762be10" containerID="b7ba2e2ebe08bfd5eb017bae6bd2465a3ca074288f7d96a089aa5a2bd39ed3b3" exitCode=0 Jan 20 18:07:08 crc kubenswrapper[4558]: I0120 18:07:08.802855 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vbwj6" event={"ID":"d49ebbdd-1136-455b-9b2a-ab3b0762be10","Type":"ContainerDied","Data":"b7ba2e2ebe08bfd5eb017bae6bd2465a3ca074288f7d96a089aa5a2bd39ed3b3"} Jan 20 18:07:08 crc kubenswrapper[4558]: I0120 18:07:08.802915 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vbwj6" event={"ID":"d49ebbdd-1136-455b-9b2a-ab3b0762be10","Type":"ContainerDied","Data":"ec3dee9e53ea4fe0aea41970d23f5b27ae88fab11e3eb83e183a3cc75ee8b60b"} Jan 20 18:07:08 crc kubenswrapper[4558]: I0120 18:07:08.802946 4558 scope.go:117] "RemoveContainer" containerID="b7ba2e2ebe08bfd5eb017bae6bd2465a3ca074288f7d96a089aa5a2bd39ed3b3" Jan 20 18:07:08 crc kubenswrapper[4558]: I0120 18:07:08.803094 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vbwj6" Jan 20 18:07:08 crc kubenswrapper[4558]: I0120 18:07:08.832293 4558 scope.go:117] "RemoveContainer" containerID="14859c94b3a97a4a167195a091a21cb8405b3f5caeb435a5e88ab30ce5ed7e79" Jan 20 18:07:08 crc kubenswrapper[4558]: I0120 18:07:08.836562 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-vbwj6"] Jan 20 18:07:08 crc kubenswrapper[4558]: I0120 18:07:08.841185 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-vbwj6"] Jan 20 18:07:08 crc kubenswrapper[4558]: I0120 18:07:08.858420 4558 scope.go:117] "RemoveContainer" containerID="507ec83b72f3da5bb7281aa95c5bb4b9dc0916ec6b16381a18bbf2156569740b" Jan 20 18:07:08 crc kubenswrapper[4558]: I0120 18:07:08.875015 4558 scope.go:117] "RemoveContainer" containerID="b7ba2e2ebe08bfd5eb017bae6bd2465a3ca074288f7d96a089aa5a2bd39ed3b3" Jan 20 18:07:08 crc kubenswrapper[4558]: E0120 18:07:08.875478 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b7ba2e2ebe08bfd5eb017bae6bd2465a3ca074288f7d96a089aa5a2bd39ed3b3\": container with ID starting with b7ba2e2ebe08bfd5eb017bae6bd2465a3ca074288f7d96a089aa5a2bd39ed3b3 not found: ID does not exist" containerID="b7ba2e2ebe08bfd5eb017bae6bd2465a3ca074288f7d96a089aa5a2bd39ed3b3" Jan 20 18:07:08 crc kubenswrapper[4558]: I0120 18:07:08.875516 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b7ba2e2ebe08bfd5eb017bae6bd2465a3ca074288f7d96a089aa5a2bd39ed3b3"} err="failed to get container status \"b7ba2e2ebe08bfd5eb017bae6bd2465a3ca074288f7d96a089aa5a2bd39ed3b3\": rpc error: code = NotFound desc = could not find container \"b7ba2e2ebe08bfd5eb017bae6bd2465a3ca074288f7d96a089aa5a2bd39ed3b3\": container with ID starting with b7ba2e2ebe08bfd5eb017bae6bd2465a3ca074288f7d96a089aa5a2bd39ed3b3 not found: ID does not exist" Jan 20 18:07:08 crc kubenswrapper[4558]: I0120 18:07:08.875542 4558 scope.go:117] "RemoveContainer" containerID="14859c94b3a97a4a167195a091a21cb8405b3f5caeb435a5e88ab30ce5ed7e79" Jan 20 18:07:08 crc kubenswrapper[4558]: E0120 18:07:08.875835 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"14859c94b3a97a4a167195a091a21cb8405b3f5caeb435a5e88ab30ce5ed7e79\": container with ID starting with 14859c94b3a97a4a167195a091a21cb8405b3f5caeb435a5e88ab30ce5ed7e79 not found: ID does not exist" containerID="14859c94b3a97a4a167195a091a21cb8405b3f5caeb435a5e88ab30ce5ed7e79" Jan 20 18:07:08 crc kubenswrapper[4558]: I0120 18:07:08.875857 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"14859c94b3a97a4a167195a091a21cb8405b3f5caeb435a5e88ab30ce5ed7e79"} err="failed to get container status \"14859c94b3a97a4a167195a091a21cb8405b3f5caeb435a5e88ab30ce5ed7e79\": rpc error: code = NotFound desc = could not find container \"14859c94b3a97a4a167195a091a21cb8405b3f5caeb435a5e88ab30ce5ed7e79\": container with ID starting with 14859c94b3a97a4a167195a091a21cb8405b3f5caeb435a5e88ab30ce5ed7e79 not found: ID does not exist" Jan 20 18:07:08 crc kubenswrapper[4558]: I0120 18:07:08.875872 4558 scope.go:117] "RemoveContainer" containerID="507ec83b72f3da5bb7281aa95c5bb4b9dc0916ec6b16381a18bbf2156569740b" Jan 20 18:07:08 crc kubenswrapper[4558]: E0120 18:07:08.876154 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"507ec83b72f3da5bb7281aa95c5bb4b9dc0916ec6b16381a18bbf2156569740b\": container with ID starting with 507ec83b72f3da5bb7281aa95c5bb4b9dc0916ec6b16381a18bbf2156569740b not found: ID does not exist" containerID="507ec83b72f3da5bb7281aa95c5bb4b9dc0916ec6b16381a18bbf2156569740b" Jan 20 18:07:08 crc kubenswrapper[4558]: I0120 18:07:08.876194 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"507ec83b72f3da5bb7281aa95c5bb4b9dc0916ec6b16381a18bbf2156569740b"} err="failed to get container status \"507ec83b72f3da5bb7281aa95c5bb4b9dc0916ec6b16381a18bbf2156569740b\": rpc error: code = NotFound desc = could not find container \"507ec83b72f3da5bb7281aa95c5bb4b9dc0916ec6b16381a18bbf2156569740b\": container with ID starting with 507ec83b72f3da5bb7281aa95c5bb4b9dc0916ec6b16381a18bbf2156569740b not found: ID does not exist" Jan 20 18:07:09 crc kubenswrapper[4558]: I0120 18:07:09.565898 4558 scope.go:117] "RemoveContainer" containerID="6b02d31ec11fe5435af988e35303a6f66b5afa5ef952ce5f7cc4b3cd5f4d9497" Jan 20 18:07:09 crc kubenswrapper[4558]: E0120 18:07:09.566151 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 18:07:09 crc kubenswrapper[4558]: I0120 18:07:09.819213 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-edpm-multinodeset-edpm-compute-no-nodes-22vhv" event={"ID":"e84aff63-9fc4-4604-8b6d-69bd89bf437d","Type":"ContainerStarted","Data":"7ded989e29f2ec4208dd35a489afec217e575c855f2404f510c9c3689f89afa3"} Jan 20 18:07:09 crc kubenswrapper[4558]: I0120 18:07:09.833914 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/ovn-edpm-multinodeset-edpm-compute-no-nodes-22vhv" podStartSLOduration=2.290369596 podStartE2EDuration="2.833892668s" podCreationTimestamp="2026-01-20 18:07:07 +0000 UTC" firstStartedPulling="2026-01-20 18:07:08.577533494 +0000 UTC m=+5122.337871450" lastFinishedPulling="2026-01-20 18:07:09.121056554 +0000 UTC m=+5122.881394522" observedRunningTime="2026-01-20 18:07:09.833850138 +0000 UTC m=+5123.594188095" watchObservedRunningTime="2026-01-20 18:07:09.833892668 +0000 UTC m=+5123.594230635" Jan 20 18:07:10 crc kubenswrapper[4558]: I0120 18:07:10.575374 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d49ebbdd-1136-455b-9b2a-ab3b0762be10" path="/var/lib/kubelet/pods/d49ebbdd-1136-455b-9b2a-ab3b0762be10/volumes" Jan 20 18:07:10 crc kubenswrapper[4558]: I0120 18:07:10.829588 4558 generic.go:334] "Generic (PLEG): container finished" podID="e84aff63-9fc4-4604-8b6d-69bd89bf437d" containerID="7ded989e29f2ec4208dd35a489afec217e575c855f2404f510c9c3689f89afa3" exitCode=0 Jan 20 18:07:10 crc kubenswrapper[4558]: I0120 18:07:10.829658 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-edpm-multinodeset-edpm-compute-no-nodes-22vhv" event={"ID":"e84aff63-9fc4-4604-8b6d-69bd89bf437d","Type":"ContainerDied","Data":"7ded989e29f2ec4208dd35a489afec217e575c855f2404f510c9c3689f89afa3"} Jan 20 18:07:12 crc kubenswrapper[4558]: I0120 18:07:12.080911 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-edpm-multinodeset-edpm-compute-no-nodes-22vhv" Jan 20 18:07:12 crc kubenswrapper[4558]: I0120 18:07:12.113867 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/e84aff63-9fc4-4604-8b6d-69bd89bf437d-ovncontroller-config-0\") pod \"e84aff63-9fc4-4604-8b6d-69bd89bf437d\" (UID: \"e84aff63-9fc4-4604-8b6d-69bd89bf437d\") " Jan 20 18:07:12 crc kubenswrapper[4558]: I0120 18:07:12.114005 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xxh7w\" (UniqueName: \"kubernetes.io/projected/e84aff63-9fc4-4604-8b6d-69bd89bf437d-kube-api-access-xxh7w\") pod \"e84aff63-9fc4-4604-8b6d-69bd89bf437d\" (UID: \"e84aff63-9fc4-4604-8b6d-69bd89bf437d\") " Jan 20 18:07:12 crc kubenswrapper[4558]: I0120 18:07:12.114067 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/e84aff63-9fc4-4604-8b6d-69bd89bf437d-ssh-key-edpm-compute-no-nodes\") pod \"e84aff63-9fc4-4604-8b6d-69bd89bf437d\" (UID: \"e84aff63-9fc4-4604-8b6d-69bd89bf437d\") " Jan 20 18:07:12 crc kubenswrapper[4558]: I0120 18:07:12.114121 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e84aff63-9fc4-4604-8b6d-69bd89bf437d-inventory\") pod \"e84aff63-9fc4-4604-8b6d-69bd89bf437d\" (UID: \"e84aff63-9fc4-4604-8b6d-69bd89bf437d\") " Jan 20 18:07:12 crc kubenswrapper[4558]: I0120 18:07:12.114152 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e84aff63-9fc4-4604-8b6d-69bd89bf437d-ovn-combined-ca-bundle\") pod \"e84aff63-9fc4-4604-8b6d-69bd89bf437d\" (UID: \"e84aff63-9fc4-4604-8b6d-69bd89bf437d\") " Jan 20 18:07:12 crc kubenswrapper[4558]: I0120 18:07:12.119289 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e84aff63-9fc4-4604-8b6d-69bd89bf437d-kube-api-access-xxh7w" (OuterVolumeSpecName: "kube-api-access-xxh7w") pod "e84aff63-9fc4-4604-8b6d-69bd89bf437d" (UID: "e84aff63-9fc4-4604-8b6d-69bd89bf437d"). InnerVolumeSpecName "kube-api-access-xxh7w". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:07:12 crc kubenswrapper[4558]: I0120 18:07:12.124418 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e84aff63-9fc4-4604-8b6d-69bd89bf437d-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "e84aff63-9fc4-4604-8b6d-69bd89bf437d" (UID: "e84aff63-9fc4-4604-8b6d-69bd89bf437d"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:07:12 crc kubenswrapper[4558]: I0120 18:07:12.134207 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e84aff63-9fc4-4604-8b6d-69bd89bf437d-ovncontroller-config-0" (OuterVolumeSpecName: "ovncontroller-config-0") pod "e84aff63-9fc4-4604-8b6d-69bd89bf437d" (UID: "e84aff63-9fc4-4604-8b6d-69bd89bf437d"). InnerVolumeSpecName "ovncontroller-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:07:12 crc kubenswrapper[4558]: I0120 18:07:12.134522 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e84aff63-9fc4-4604-8b6d-69bd89bf437d-inventory" (OuterVolumeSpecName: "inventory") pod "e84aff63-9fc4-4604-8b6d-69bd89bf437d" (UID: "e84aff63-9fc4-4604-8b6d-69bd89bf437d"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:07:12 crc kubenswrapper[4558]: I0120 18:07:12.135662 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e84aff63-9fc4-4604-8b6d-69bd89bf437d-ssh-key-edpm-compute-no-nodes" (OuterVolumeSpecName: "ssh-key-edpm-compute-no-nodes") pod "e84aff63-9fc4-4604-8b6d-69bd89bf437d" (UID: "e84aff63-9fc4-4604-8b6d-69bd89bf437d"). InnerVolumeSpecName "ssh-key-edpm-compute-no-nodes". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:07:12 crc kubenswrapper[4558]: I0120 18:07:12.216296 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xxh7w\" (UniqueName: \"kubernetes.io/projected/e84aff63-9fc4-4604-8b6d-69bd89bf437d-kube-api-access-xxh7w\") on node \"crc\" DevicePath \"\"" Jan 20 18:07:12 crc kubenswrapper[4558]: I0120 18:07:12.216335 4558 reconciler_common.go:293] "Volume detached for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/e84aff63-9fc4-4604-8b6d-69bd89bf437d-ssh-key-edpm-compute-no-nodes\") on node \"crc\" DevicePath \"\"" Jan 20 18:07:12 crc kubenswrapper[4558]: I0120 18:07:12.216349 4558 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e84aff63-9fc4-4604-8b6d-69bd89bf437d-inventory\") on node \"crc\" DevicePath \"\"" Jan 20 18:07:12 crc kubenswrapper[4558]: I0120 18:07:12.216365 4558 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e84aff63-9fc4-4604-8b6d-69bd89bf437d-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 18:07:12 crc kubenswrapper[4558]: I0120 18:07:12.216379 4558 reconciler_common.go:293] "Volume detached for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/e84aff63-9fc4-4604-8b6d-69bd89bf437d-ovncontroller-config-0\") on node \"crc\" DevicePath \"\"" Jan 20 18:07:12 crc kubenswrapper[4558]: I0120 18:07:12.850440 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/ovn-edpm-multinodeset-edpm-compute-no-nodes-22vhv" event={"ID":"e84aff63-9fc4-4604-8b6d-69bd89bf437d","Type":"ContainerDied","Data":"9e00d4ad46bc272aabc573fb9d0af19c085f3cb3ae82ff9d17a5e59d9e7880ad"} Jan 20 18:07:12 crc kubenswrapper[4558]: I0120 18:07:12.850497 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9e00d4ad46bc272aabc573fb9d0af19c085f3cb3ae82ff9d17a5e59d9e7880ad" Jan 20 18:07:12 crc kubenswrapper[4558]: I0120 18:07:12.850520 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/ovn-edpm-multinodeset-edpm-compute-no-nodes-22vhv" Jan 20 18:07:12 crc kubenswrapper[4558]: I0120 18:07:12.910703 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/neutron-metadata-edpm-multinodeset-edpm-compute-no-nodes-5ljg4"] Jan 20 18:07:12 crc kubenswrapper[4558]: E0120 18:07:12.911020 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e84aff63-9fc4-4604-8b6d-69bd89bf437d" containerName="ovn-edpm-multinodeset-edpm-compute-no-nodes" Jan 20 18:07:12 crc kubenswrapper[4558]: I0120 18:07:12.911040 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="e84aff63-9fc4-4604-8b6d-69bd89bf437d" containerName="ovn-edpm-multinodeset-edpm-compute-no-nodes" Jan 20 18:07:12 crc kubenswrapper[4558]: E0120 18:07:12.911054 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d49ebbdd-1136-455b-9b2a-ab3b0762be10" containerName="extract-content" Jan 20 18:07:12 crc kubenswrapper[4558]: I0120 18:07:12.911060 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d49ebbdd-1136-455b-9b2a-ab3b0762be10" containerName="extract-content" Jan 20 18:07:12 crc kubenswrapper[4558]: E0120 18:07:12.911074 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d49ebbdd-1136-455b-9b2a-ab3b0762be10" containerName="registry-server" Jan 20 18:07:12 crc kubenswrapper[4558]: I0120 18:07:12.911080 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d49ebbdd-1136-455b-9b2a-ab3b0762be10" containerName="registry-server" Jan 20 18:07:12 crc kubenswrapper[4558]: E0120 18:07:12.911104 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d49ebbdd-1136-455b-9b2a-ab3b0762be10" containerName="extract-utilities" Jan 20 18:07:12 crc kubenswrapper[4558]: I0120 18:07:12.911111 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d49ebbdd-1136-455b-9b2a-ab3b0762be10" containerName="extract-utilities" Jan 20 18:07:12 crc kubenswrapper[4558]: I0120 18:07:12.911257 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="d49ebbdd-1136-455b-9b2a-ab3b0762be10" containerName="registry-server" Jan 20 18:07:12 crc kubenswrapper[4558]: I0120 18:07:12.911276 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="e84aff63-9fc4-4604-8b6d-69bd89bf437d" containerName="ovn-edpm-multinodeset-edpm-compute-no-nodes" Jan 20 18:07:12 crc kubenswrapper[4558]: I0120 18:07:12.911719 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-metadata-edpm-multinodeset-edpm-compute-no-nodes-5ljg4" Jan 20 18:07:12 crc kubenswrapper[4558]: I0120 18:07:12.914059 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"neutron-ovn-metadata-agent-neutron-config" Jan 20 18:07:12 crc kubenswrapper[4558]: I0120 18:07:12.914071 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"combined-ca-bundle" Jan 20 18:07:12 crc kubenswrapper[4558]: I0120 18:07:12.914454 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"edpm-compute-no-nodes-dockercfg-tljmj" Jan 20 18:07:12 crc kubenswrapper[4558]: I0120 18:07:12.914540 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplanenodeset-edpm-compute-no-nodes" Jan 20 18:07:12 crc kubenswrapper[4558]: I0120 18:07:12.914594 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-metadata-neutron-config" Jan 20 18:07:12 crc kubenswrapper[4558]: I0120 18:07:12.915000 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-aee-default-env" Jan 20 18:07:12 crc kubenswrapper[4558]: I0120 18:07:12.915287 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplane-ansible-ssh-private-key-secret" Jan 20 18:07:12 crc kubenswrapper[4558]: I0120 18:07:12.919620 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-metadata-edpm-multinodeset-edpm-compute-no-nodes-5ljg4"] Jan 20 18:07:12 crc kubenswrapper[4558]: I0120 18:07:12.923432 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dd9sk\" (UniqueName: \"kubernetes.io/projected/7eb53d33-93c4-4ebc-ae45-a1e0c7f151b1-kube-api-access-dd9sk\") pod \"neutron-metadata-edpm-multinodeset-edpm-compute-no-nodes-5ljg4\" (UID: \"7eb53d33-93c4-4ebc-ae45-a1e0c7f151b1\") " pod="openstack-kuttl-tests/neutron-metadata-edpm-multinodeset-edpm-compute-no-nodes-5ljg4" Jan 20 18:07:12 crc kubenswrapper[4558]: I0120 18:07:12.923486 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-neutron-config-2\" (UniqueName: \"kubernetes.io/secret/7eb53d33-93c4-4ebc-ae45-a1e0c7f151b1-nova-metadata-neutron-config-2\") pod \"neutron-metadata-edpm-multinodeset-edpm-compute-no-nodes-5ljg4\" (UID: \"7eb53d33-93c4-4ebc-ae45-a1e0c7f151b1\") " pod="openstack-kuttl-tests/neutron-metadata-edpm-multinodeset-edpm-compute-no-nodes-5ljg4" Jan 20 18:07:12 crc kubenswrapper[4558]: I0120 18:07:12.923510 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/7eb53d33-93c4-4ebc-ae45-a1e0c7f151b1-ssh-key-edpm-compute-no-nodes\") pod \"neutron-metadata-edpm-multinodeset-edpm-compute-no-nodes-5ljg4\" (UID: \"7eb53d33-93c4-4ebc-ae45-a1e0c7f151b1\") " pod="openstack-kuttl-tests/neutron-metadata-edpm-multinodeset-edpm-compute-no-nodes-5ljg4" Jan 20 18:07:12 crc kubenswrapper[4558]: I0120 18:07:12.923629 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-neutron-config-1\" (UniqueName: \"kubernetes.io/secret/7eb53d33-93c4-4ebc-ae45-a1e0c7f151b1-nova-metadata-neutron-config-1\") pod \"neutron-metadata-edpm-multinodeset-edpm-compute-no-nodes-5ljg4\" (UID: \"7eb53d33-93c4-4ebc-ae45-a1e0c7f151b1\") " pod="openstack-kuttl-tests/neutron-metadata-edpm-multinodeset-edpm-compute-no-nodes-5ljg4" Jan 20 18:07:12 crc kubenswrapper[4558]: I0120 18:07:12.923678 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/7eb53d33-93c4-4ebc-ae45-a1e0c7f151b1-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-multinodeset-edpm-compute-no-nodes-5ljg4\" (UID: \"7eb53d33-93c4-4ebc-ae45-a1e0c7f151b1\") " pod="openstack-kuttl-tests/neutron-metadata-edpm-multinodeset-edpm-compute-no-nodes-5ljg4" Jan 20 18:07:12 crc kubenswrapper[4558]: I0120 18:07:12.923698 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/7eb53d33-93c4-4ebc-ae45-a1e0c7f151b1-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-multinodeset-edpm-compute-no-nodes-5ljg4\" (UID: \"7eb53d33-93c4-4ebc-ae45-a1e0c7f151b1\") " pod="openstack-kuttl-tests/neutron-metadata-edpm-multinodeset-edpm-compute-no-nodes-5ljg4" Jan 20 18:07:12 crc kubenswrapper[4558]: I0120 18:07:12.923763 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7eb53d33-93c4-4ebc-ae45-a1e0c7f151b1-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-multinodeset-edpm-compute-no-nodes-5ljg4\" (UID: \"7eb53d33-93c4-4ebc-ae45-a1e0c7f151b1\") " pod="openstack-kuttl-tests/neutron-metadata-edpm-multinodeset-edpm-compute-no-nodes-5ljg4" Jan 20 18:07:12 crc kubenswrapper[4558]: I0120 18:07:12.923786 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7eb53d33-93c4-4ebc-ae45-a1e0c7f151b1-inventory\") pod \"neutron-metadata-edpm-multinodeset-edpm-compute-no-nodes-5ljg4\" (UID: \"7eb53d33-93c4-4ebc-ae45-a1e0c7f151b1\") " pod="openstack-kuttl-tests/neutron-metadata-edpm-multinodeset-edpm-compute-no-nodes-5ljg4" Jan 20 18:07:13 crc kubenswrapper[4558]: I0120 18:07:13.025978 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dd9sk\" (UniqueName: \"kubernetes.io/projected/7eb53d33-93c4-4ebc-ae45-a1e0c7f151b1-kube-api-access-dd9sk\") pod \"neutron-metadata-edpm-multinodeset-edpm-compute-no-nodes-5ljg4\" (UID: \"7eb53d33-93c4-4ebc-ae45-a1e0c7f151b1\") " pod="openstack-kuttl-tests/neutron-metadata-edpm-multinodeset-edpm-compute-no-nodes-5ljg4" Jan 20 18:07:13 crc kubenswrapper[4558]: I0120 18:07:13.026138 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-neutron-config-2\" (UniqueName: \"kubernetes.io/secret/7eb53d33-93c4-4ebc-ae45-a1e0c7f151b1-nova-metadata-neutron-config-2\") pod \"neutron-metadata-edpm-multinodeset-edpm-compute-no-nodes-5ljg4\" (UID: \"7eb53d33-93c4-4ebc-ae45-a1e0c7f151b1\") " pod="openstack-kuttl-tests/neutron-metadata-edpm-multinodeset-edpm-compute-no-nodes-5ljg4" Jan 20 18:07:13 crc kubenswrapper[4558]: I0120 18:07:13.026215 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/7eb53d33-93c4-4ebc-ae45-a1e0c7f151b1-ssh-key-edpm-compute-no-nodes\") pod \"neutron-metadata-edpm-multinodeset-edpm-compute-no-nodes-5ljg4\" (UID: \"7eb53d33-93c4-4ebc-ae45-a1e0c7f151b1\") " pod="openstack-kuttl-tests/neutron-metadata-edpm-multinodeset-edpm-compute-no-nodes-5ljg4" Jan 20 18:07:13 crc kubenswrapper[4558]: I0120 18:07:13.026257 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-neutron-config-1\" (UniqueName: \"kubernetes.io/secret/7eb53d33-93c4-4ebc-ae45-a1e0c7f151b1-nova-metadata-neutron-config-1\") pod \"neutron-metadata-edpm-multinodeset-edpm-compute-no-nodes-5ljg4\" (UID: \"7eb53d33-93c4-4ebc-ae45-a1e0c7f151b1\") " pod="openstack-kuttl-tests/neutron-metadata-edpm-multinodeset-edpm-compute-no-nodes-5ljg4" Jan 20 18:07:13 crc kubenswrapper[4558]: I0120 18:07:13.026339 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/7eb53d33-93c4-4ebc-ae45-a1e0c7f151b1-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-multinodeset-edpm-compute-no-nodes-5ljg4\" (UID: \"7eb53d33-93c4-4ebc-ae45-a1e0c7f151b1\") " pod="openstack-kuttl-tests/neutron-metadata-edpm-multinodeset-edpm-compute-no-nodes-5ljg4" Jan 20 18:07:13 crc kubenswrapper[4558]: I0120 18:07:13.026369 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/7eb53d33-93c4-4ebc-ae45-a1e0c7f151b1-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-multinodeset-edpm-compute-no-nodes-5ljg4\" (UID: \"7eb53d33-93c4-4ebc-ae45-a1e0c7f151b1\") " pod="openstack-kuttl-tests/neutron-metadata-edpm-multinodeset-edpm-compute-no-nodes-5ljg4" Jan 20 18:07:13 crc kubenswrapper[4558]: I0120 18:07:13.026475 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7eb53d33-93c4-4ebc-ae45-a1e0c7f151b1-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-multinodeset-edpm-compute-no-nodes-5ljg4\" (UID: \"7eb53d33-93c4-4ebc-ae45-a1e0c7f151b1\") " pod="openstack-kuttl-tests/neutron-metadata-edpm-multinodeset-edpm-compute-no-nodes-5ljg4" Jan 20 18:07:13 crc kubenswrapper[4558]: I0120 18:07:13.026504 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7eb53d33-93c4-4ebc-ae45-a1e0c7f151b1-inventory\") pod \"neutron-metadata-edpm-multinodeset-edpm-compute-no-nodes-5ljg4\" (UID: \"7eb53d33-93c4-4ebc-ae45-a1e0c7f151b1\") " pod="openstack-kuttl-tests/neutron-metadata-edpm-multinodeset-edpm-compute-no-nodes-5ljg4" Jan 20 18:07:13 crc kubenswrapper[4558]: I0120 18:07:13.030458 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-neutron-config-2\" (UniqueName: \"kubernetes.io/secret/7eb53d33-93c4-4ebc-ae45-a1e0c7f151b1-nova-metadata-neutron-config-2\") pod \"neutron-metadata-edpm-multinodeset-edpm-compute-no-nodes-5ljg4\" (UID: \"7eb53d33-93c4-4ebc-ae45-a1e0c7f151b1\") " pod="openstack-kuttl-tests/neutron-metadata-edpm-multinodeset-edpm-compute-no-nodes-5ljg4" Jan 20 18:07:13 crc kubenswrapper[4558]: I0120 18:07:13.030753 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/7eb53d33-93c4-4ebc-ae45-a1e0c7f151b1-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-multinodeset-edpm-compute-no-nodes-5ljg4\" (UID: \"7eb53d33-93c4-4ebc-ae45-a1e0c7f151b1\") " pod="openstack-kuttl-tests/neutron-metadata-edpm-multinodeset-edpm-compute-no-nodes-5ljg4" Jan 20 18:07:13 crc kubenswrapper[4558]: I0120 18:07:13.031282 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7eb53d33-93c4-4ebc-ae45-a1e0c7f151b1-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-multinodeset-edpm-compute-no-nodes-5ljg4\" (UID: \"7eb53d33-93c4-4ebc-ae45-a1e0c7f151b1\") " pod="openstack-kuttl-tests/neutron-metadata-edpm-multinodeset-edpm-compute-no-nodes-5ljg4" Jan 20 18:07:13 crc kubenswrapper[4558]: I0120 18:07:13.031322 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7eb53d33-93c4-4ebc-ae45-a1e0c7f151b1-inventory\") pod \"neutron-metadata-edpm-multinodeset-edpm-compute-no-nodes-5ljg4\" (UID: \"7eb53d33-93c4-4ebc-ae45-a1e0c7f151b1\") " pod="openstack-kuttl-tests/neutron-metadata-edpm-multinodeset-edpm-compute-no-nodes-5ljg4" Jan 20 18:07:13 crc kubenswrapper[4558]: I0120 18:07:13.031421 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-neutron-config-1\" (UniqueName: \"kubernetes.io/secret/7eb53d33-93c4-4ebc-ae45-a1e0c7f151b1-nova-metadata-neutron-config-1\") pod \"neutron-metadata-edpm-multinodeset-edpm-compute-no-nodes-5ljg4\" (UID: \"7eb53d33-93c4-4ebc-ae45-a1e0c7f151b1\") " pod="openstack-kuttl-tests/neutron-metadata-edpm-multinodeset-edpm-compute-no-nodes-5ljg4" Jan 20 18:07:13 crc kubenswrapper[4558]: I0120 18:07:13.031519 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/7eb53d33-93c4-4ebc-ae45-a1e0c7f151b1-ssh-key-edpm-compute-no-nodes\") pod \"neutron-metadata-edpm-multinodeset-edpm-compute-no-nodes-5ljg4\" (UID: \"7eb53d33-93c4-4ebc-ae45-a1e0c7f151b1\") " pod="openstack-kuttl-tests/neutron-metadata-edpm-multinodeset-edpm-compute-no-nodes-5ljg4" Jan 20 18:07:13 crc kubenswrapper[4558]: I0120 18:07:13.032322 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/7eb53d33-93c4-4ebc-ae45-a1e0c7f151b1-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-multinodeset-edpm-compute-no-nodes-5ljg4\" (UID: \"7eb53d33-93c4-4ebc-ae45-a1e0c7f151b1\") " pod="openstack-kuttl-tests/neutron-metadata-edpm-multinodeset-edpm-compute-no-nodes-5ljg4" Jan 20 18:07:13 crc kubenswrapper[4558]: I0120 18:07:13.041130 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dd9sk\" (UniqueName: \"kubernetes.io/projected/7eb53d33-93c4-4ebc-ae45-a1e0c7f151b1-kube-api-access-dd9sk\") pod \"neutron-metadata-edpm-multinodeset-edpm-compute-no-nodes-5ljg4\" (UID: \"7eb53d33-93c4-4ebc-ae45-a1e0c7f151b1\") " pod="openstack-kuttl-tests/neutron-metadata-edpm-multinodeset-edpm-compute-no-nodes-5ljg4" Jan 20 18:07:13 crc kubenswrapper[4558]: I0120 18:07:13.225518 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-metadata-edpm-multinodeset-edpm-compute-no-nodes-5ljg4" Jan 20 18:07:13 crc kubenswrapper[4558]: I0120 18:07:13.633356 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-metadata-edpm-multinodeset-edpm-compute-no-nodes-5ljg4"] Jan 20 18:07:13 crc kubenswrapper[4558]: W0120 18:07:13.636866 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7eb53d33_93c4_4ebc_ae45_a1e0c7f151b1.slice/crio-b38676344e70602f30dd71dea613035f3773ca6a77ad462b0b321602ba657949 WatchSource:0}: Error finding container b38676344e70602f30dd71dea613035f3773ca6a77ad462b0b321602ba657949: Status 404 returned error can't find the container with id b38676344e70602f30dd71dea613035f3773ca6a77ad462b0b321602ba657949 Jan 20 18:07:13 crc kubenswrapper[4558]: I0120 18:07:13.869255 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-metadata-edpm-multinodeset-edpm-compute-no-nodes-5ljg4" event={"ID":"7eb53d33-93c4-4ebc-ae45-a1e0c7f151b1","Type":"ContainerStarted","Data":"b38676344e70602f30dd71dea613035f3773ca6a77ad462b0b321602ba657949"} Jan 20 18:07:14 crc kubenswrapper[4558]: I0120 18:07:14.879589 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-metadata-edpm-multinodeset-edpm-compute-no-nodes-5ljg4" event={"ID":"7eb53d33-93c4-4ebc-ae45-a1e0c7f151b1","Type":"ContainerStarted","Data":"fbbbd3e423e43fc9a81129aadd7c7dad474a4a0d21b4964b9d8b4a1efdd4f7b8"} Jan 20 18:07:14 crc kubenswrapper[4558]: I0120 18:07:14.901371 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/neutron-metadata-edpm-multinodeset-edpm-compute-no-nodes-5ljg4" podStartSLOduration=2.340080294 podStartE2EDuration="2.901353799s" podCreationTimestamp="2026-01-20 18:07:12 +0000 UTC" firstStartedPulling="2026-01-20 18:07:13.639410375 +0000 UTC m=+5127.399748342" lastFinishedPulling="2026-01-20 18:07:14.20068388 +0000 UTC m=+5127.961021847" observedRunningTime="2026-01-20 18:07:14.894063892 +0000 UTC m=+5128.654401880" watchObservedRunningTime="2026-01-20 18:07:14.901353799 +0000 UTC m=+5128.661691766" Jan 20 18:07:15 crc kubenswrapper[4558]: I0120 18:07:15.890868 4558 generic.go:334] "Generic (PLEG): container finished" podID="7eb53d33-93c4-4ebc-ae45-a1e0c7f151b1" containerID="fbbbd3e423e43fc9a81129aadd7c7dad474a4a0d21b4964b9d8b4a1efdd4f7b8" exitCode=0 Jan 20 18:07:15 crc kubenswrapper[4558]: I0120 18:07:15.890930 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-metadata-edpm-multinodeset-edpm-compute-no-nodes-5ljg4" event={"ID":"7eb53d33-93c4-4ebc-ae45-a1e0c7f151b1","Type":"ContainerDied","Data":"fbbbd3e423e43fc9a81129aadd7c7dad474a4a0d21b4964b9d8b4a1efdd4f7b8"} Jan 20 18:07:17 crc kubenswrapper[4558]: I0120 18:07:17.149723 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-metadata-edpm-multinodeset-edpm-compute-no-nodes-5ljg4" Jan 20 18:07:17 crc kubenswrapper[4558]: I0120 18:07:17.290867 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-neutron-config-2\" (UniqueName: \"kubernetes.io/secret/7eb53d33-93c4-4ebc-ae45-a1e0c7f151b1-nova-metadata-neutron-config-2\") pod \"7eb53d33-93c4-4ebc-ae45-a1e0c7f151b1\" (UID: \"7eb53d33-93c4-4ebc-ae45-a1e0c7f151b1\") " Jan 20 18:07:17 crc kubenswrapper[4558]: I0120 18:07:17.290911 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/7eb53d33-93c4-4ebc-ae45-a1e0c7f151b1-nova-metadata-neutron-config-0\") pod \"7eb53d33-93c4-4ebc-ae45-a1e0c7f151b1\" (UID: \"7eb53d33-93c4-4ebc-ae45-a1e0c7f151b1\") " Jan 20 18:07:17 crc kubenswrapper[4558]: I0120 18:07:17.290942 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dd9sk\" (UniqueName: \"kubernetes.io/projected/7eb53d33-93c4-4ebc-ae45-a1e0c7f151b1-kube-api-access-dd9sk\") pod \"7eb53d33-93c4-4ebc-ae45-a1e0c7f151b1\" (UID: \"7eb53d33-93c4-4ebc-ae45-a1e0c7f151b1\") " Jan 20 18:07:17 crc kubenswrapper[4558]: I0120 18:07:17.290964 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7eb53d33-93c4-4ebc-ae45-a1e0c7f151b1-neutron-metadata-combined-ca-bundle\") pod \"7eb53d33-93c4-4ebc-ae45-a1e0c7f151b1\" (UID: \"7eb53d33-93c4-4ebc-ae45-a1e0c7f151b1\") " Jan 20 18:07:17 crc kubenswrapper[4558]: I0120 18:07:17.290995 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/7eb53d33-93c4-4ebc-ae45-a1e0c7f151b1-ssh-key-edpm-compute-no-nodes\") pod \"7eb53d33-93c4-4ebc-ae45-a1e0c7f151b1\" (UID: \"7eb53d33-93c4-4ebc-ae45-a1e0c7f151b1\") " Jan 20 18:07:17 crc kubenswrapper[4558]: I0120 18:07:17.291040 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/7eb53d33-93c4-4ebc-ae45-a1e0c7f151b1-neutron-ovn-metadata-agent-neutron-config-0\") pod \"7eb53d33-93c4-4ebc-ae45-a1e0c7f151b1\" (UID: \"7eb53d33-93c4-4ebc-ae45-a1e0c7f151b1\") " Jan 20 18:07:17 crc kubenswrapper[4558]: I0120 18:07:17.291059 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7eb53d33-93c4-4ebc-ae45-a1e0c7f151b1-inventory\") pod \"7eb53d33-93c4-4ebc-ae45-a1e0c7f151b1\" (UID: \"7eb53d33-93c4-4ebc-ae45-a1e0c7f151b1\") " Jan 20 18:07:17 crc kubenswrapper[4558]: I0120 18:07:17.291094 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-neutron-config-1\" (UniqueName: \"kubernetes.io/secret/7eb53d33-93c4-4ebc-ae45-a1e0c7f151b1-nova-metadata-neutron-config-1\") pod \"7eb53d33-93c4-4ebc-ae45-a1e0c7f151b1\" (UID: \"7eb53d33-93c4-4ebc-ae45-a1e0c7f151b1\") " Jan 20 18:07:17 crc kubenswrapper[4558]: I0120 18:07:17.297264 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7eb53d33-93c4-4ebc-ae45-a1e0c7f151b1-kube-api-access-dd9sk" (OuterVolumeSpecName: "kube-api-access-dd9sk") pod "7eb53d33-93c4-4ebc-ae45-a1e0c7f151b1" (UID: "7eb53d33-93c4-4ebc-ae45-a1e0c7f151b1"). InnerVolumeSpecName "kube-api-access-dd9sk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:07:17 crc kubenswrapper[4558]: I0120 18:07:17.303317 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7eb53d33-93c4-4ebc-ae45-a1e0c7f151b1-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "7eb53d33-93c4-4ebc-ae45-a1e0c7f151b1" (UID: "7eb53d33-93c4-4ebc-ae45-a1e0c7f151b1"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:07:17 crc kubenswrapper[4558]: I0120 18:07:17.317019 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7eb53d33-93c4-4ebc-ae45-a1e0c7f151b1-ssh-key-edpm-compute-no-nodes" (OuterVolumeSpecName: "ssh-key-edpm-compute-no-nodes") pod "7eb53d33-93c4-4ebc-ae45-a1e0c7f151b1" (UID: "7eb53d33-93c4-4ebc-ae45-a1e0c7f151b1"). InnerVolumeSpecName "ssh-key-edpm-compute-no-nodes". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:07:17 crc kubenswrapper[4558]: I0120 18:07:17.320776 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7eb53d33-93c4-4ebc-ae45-a1e0c7f151b1-neutron-ovn-metadata-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-ovn-metadata-agent-neutron-config-0") pod "7eb53d33-93c4-4ebc-ae45-a1e0c7f151b1" (UID: "7eb53d33-93c4-4ebc-ae45-a1e0c7f151b1"). InnerVolumeSpecName "neutron-ovn-metadata-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:07:17 crc kubenswrapper[4558]: I0120 18:07:17.320811 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7eb53d33-93c4-4ebc-ae45-a1e0c7f151b1-nova-metadata-neutron-config-2" (OuterVolumeSpecName: "nova-metadata-neutron-config-2") pod "7eb53d33-93c4-4ebc-ae45-a1e0c7f151b1" (UID: "7eb53d33-93c4-4ebc-ae45-a1e0c7f151b1"). InnerVolumeSpecName "nova-metadata-neutron-config-2". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:07:17 crc kubenswrapper[4558]: I0120 18:07:17.320845 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7eb53d33-93c4-4ebc-ae45-a1e0c7f151b1-nova-metadata-neutron-config-0" (OuterVolumeSpecName: "nova-metadata-neutron-config-0") pod "7eb53d33-93c4-4ebc-ae45-a1e0c7f151b1" (UID: "7eb53d33-93c4-4ebc-ae45-a1e0c7f151b1"). InnerVolumeSpecName "nova-metadata-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:07:17 crc kubenswrapper[4558]: I0120 18:07:17.320990 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7eb53d33-93c4-4ebc-ae45-a1e0c7f151b1-nova-metadata-neutron-config-1" (OuterVolumeSpecName: "nova-metadata-neutron-config-1") pod "7eb53d33-93c4-4ebc-ae45-a1e0c7f151b1" (UID: "7eb53d33-93c4-4ebc-ae45-a1e0c7f151b1"). InnerVolumeSpecName "nova-metadata-neutron-config-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:07:17 crc kubenswrapper[4558]: I0120 18:07:17.322872 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7eb53d33-93c4-4ebc-ae45-a1e0c7f151b1-inventory" (OuterVolumeSpecName: "inventory") pod "7eb53d33-93c4-4ebc-ae45-a1e0c7f151b1" (UID: "7eb53d33-93c4-4ebc-ae45-a1e0c7f151b1"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:07:17 crc kubenswrapper[4558]: I0120 18:07:17.393157 4558 reconciler_common.go:293] "Volume detached for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/7eb53d33-93c4-4ebc-ae45-a1e0c7f151b1-neutron-ovn-metadata-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Jan 20 18:07:17 crc kubenswrapper[4558]: I0120 18:07:17.393206 4558 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/7eb53d33-93c4-4ebc-ae45-a1e0c7f151b1-inventory\") on node \"crc\" DevicePath \"\"" Jan 20 18:07:17 crc kubenswrapper[4558]: I0120 18:07:17.393224 4558 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-neutron-config-1\" (UniqueName: \"kubernetes.io/secret/7eb53d33-93c4-4ebc-ae45-a1e0c7f151b1-nova-metadata-neutron-config-1\") on node \"crc\" DevicePath \"\"" Jan 20 18:07:17 crc kubenswrapper[4558]: I0120 18:07:17.393238 4558 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-neutron-config-2\" (UniqueName: \"kubernetes.io/secret/7eb53d33-93c4-4ebc-ae45-a1e0c7f151b1-nova-metadata-neutron-config-2\") on node \"crc\" DevicePath \"\"" Jan 20 18:07:17 crc kubenswrapper[4558]: I0120 18:07:17.393248 4558 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/7eb53d33-93c4-4ebc-ae45-a1e0c7f151b1-nova-metadata-neutron-config-0\") on node \"crc\" DevicePath \"\"" Jan 20 18:07:17 crc kubenswrapper[4558]: I0120 18:07:17.393262 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dd9sk\" (UniqueName: \"kubernetes.io/projected/7eb53d33-93c4-4ebc-ae45-a1e0c7f151b1-kube-api-access-dd9sk\") on node \"crc\" DevicePath \"\"" Jan 20 18:07:17 crc kubenswrapper[4558]: I0120 18:07:17.393275 4558 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7eb53d33-93c4-4ebc-ae45-a1e0c7f151b1-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 18:07:17 crc kubenswrapper[4558]: I0120 18:07:17.393288 4558 reconciler_common.go:293] "Volume detached for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/7eb53d33-93c4-4ebc-ae45-a1e0c7f151b1-ssh-key-edpm-compute-no-nodes\") on node \"crc\" DevicePath \"\"" Jan 20 18:07:17 crc kubenswrapper[4558]: I0120 18:07:17.913071 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-metadata-edpm-multinodeset-edpm-compute-no-nodes-5ljg4" event={"ID":"7eb53d33-93c4-4ebc-ae45-a1e0c7f151b1","Type":"ContainerDied","Data":"b38676344e70602f30dd71dea613035f3773ca6a77ad462b0b321602ba657949"} Jan 20 18:07:17 crc kubenswrapper[4558]: I0120 18:07:17.913126 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b38676344e70602f30dd71dea613035f3773ca6a77ad462b0b321602ba657949" Jan 20 18:07:17 crc kubenswrapper[4558]: I0120 18:07:17.913126 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-metadata-edpm-multinodeset-edpm-compute-no-nodes-5ljg4" Jan 20 18:07:17 crc kubenswrapper[4558]: I0120 18:07:17.972128 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/neutron-ovn-edpm-multinodeset-edpm-compute-no-nodes-5rkzf"] Jan 20 18:07:17 crc kubenswrapper[4558]: E0120 18:07:17.972494 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7eb53d33-93c4-4ebc-ae45-a1e0c7f151b1" containerName="neutron-metadata-edpm-multinodeset-edpm-compute-no-nodes" Jan 20 18:07:17 crc kubenswrapper[4558]: I0120 18:07:17.972514 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="7eb53d33-93c4-4ebc-ae45-a1e0c7f151b1" containerName="neutron-metadata-edpm-multinodeset-edpm-compute-no-nodes" Jan 20 18:07:17 crc kubenswrapper[4558]: I0120 18:07:17.972701 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="7eb53d33-93c4-4ebc-ae45-a1e0c7f151b1" containerName="neutron-metadata-edpm-multinodeset-edpm-compute-no-nodes" Jan 20 18:07:17 crc kubenswrapper[4558]: I0120 18:07:17.973185 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-ovn-edpm-multinodeset-edpm-compute-no-nodes-5rkzf" Jan 20 18:07:17 crc kubenswrapper[4558]: I0120 18:07:17.974659 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"combined-ca-bundle" Jan 20 18:07:17 crc kubenswrapper[4558]: I0120 18:07:17.974724 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplanenodeset-edpm-compute-no-nodes" Jan 20 18:07:17 crc kubenswrapper[4558]: I0120 18:07:17.975271 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"neutron-ovn-agent-neutron-config" Jan 20 18:07:17 crc kubenswrapper[4558]: I0120 18:07:17.975396 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"edpm-compute-no-nodes-dockercfg-tljmj" Jan 20 18:07:17 crc kubenswrapper[4558]: I0120 18:07:17.975597 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplane-ansible-ssh-private-key-secret" Jan 20 18:07:17 crc kubenswrapper[4558]: I0120 18:07:17.978454 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-aee-default-env" Jan 20 18:07:17 crc kubenswrapper[4558]: I0120 18:07:17.985227 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-ovn-edpm-multinodeset-edpm-compute-no-nodes-5rkzf"] Jan 20 18:07:18 crc kubenswrapper[4558]: I0120 18:07:18.103792 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1e66f491-e230-42f2-aab5-40c0ab8f3090-inventory\") pod \"neutron-ovn-edpm-multinodeset-edpm-compute-no-nodes-5rkzf\" (UID: \"1e66f491-e230-42f2-aab5-40c0ab8f3090\") " pod="openstack-kuttl-tests/neutron-ovn-edpm-multinodeset-edpm-compute-no-nodes-5rkzf" Jan 20 18:07:18 crc kubenswrapper[4558]: I0120 18:07:18.103963 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-ovn-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/1e66f491-e230-42f2-aab5-40c0ab8f3090-neutron-ovn-agent-neutron-config-0\") pod \"neutron-ovn-edpm-multinodeset-edpm-compute-no-nodes-5rkzf\" (UID: \"1e66f491-e230-42f2-aab5-40c0ab8f3090\") " pod="openstack-kuttl-tests/neutron-ovn-edpm-multinodeset-edpm-compute-no-nodes-5rkzf" Jan 20 18:07:18 crc kubenswrapper[4558]: I0120 18:07:18.104007 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6j954\" (UniqueName: \"kubernetes.io/projected/1e66f491-e230-42f2-aab5-40c0ab8f3090-kube-api-access-6j954\") pod \"neutron-ovn-edpm-multinodeset-edpm-compute-no-nodes-5rkzf\" (UID: \"1e66f491-e230-42f2-aab5-40c0ab8f3090\") " pod="openstack-kuttl-tests/neutron-ovn-edpm-multinodeset-edpm-compute-no-nodes-5rkzf" Jan 20 18:07:18 crc kubenswrapper[4558]: I0120 18:07:18.104049 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1e66f491-e230-42f2-aab5-40c0ab8f3090-neutron-ovn-combined-ca-bundle\") pod \"neutron-ovn-edpm-multinodeset-edpm-compute-no-nodes-5rkzf\" (UID: \"1e66f491-e230-42f2-aab5-40c0ab8f3090\") " pod="openstack-kuttl-tests/neutron-ovn-edpm-multinodeset-edpm-compute-no-nodes-5rkzf" Jan 20 18:07:18 crc kubenswrapper[4558]: I0120 18:07:18.104087 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/1e66f491-e230-42f2-aab5-40c0ab8f3090-ssh-key-edpm-compute-no-nodes\") pod \"neutron-ovn-edpm-multinodeset-edpm-compute-no-nodes-5rkzf\" (UID: \"1e66f491-e230-42f2-aab5-40c0ab8f3090\") " pod="openstack-kuttl-tests/neutron-ovn-edpm-multinodeset-edpm-compute-no-nodes-5rkzf" Jan 20 18:07:18 crc kubenswrapper[4558]: I0120 18:07:18.205225 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-ovn-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/1e66f491-e230-42f2-aab5-40c0ab8f3090-neutron-ovn-agent-neutron-config-0\") pod \"neutron-ovn-edpm-multinodeset-edpm-compute-no-nodes-5rkzf\" (UID: \"1e66f491-e230-42f2-aab5-40c0ab8f3090\") " pod="openstack-kuttl-tests/neutron-ovn-edpm-multinodeset-edpm-compute-no-nodes-5rkzf" Jan 20 18:07:18 crc kubenswrapper[4558]: I0120 18:07:18.205266 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6j954\" (UniqueName: \"kubernetes.io/projected/1e66f491-e230-42f2-aab5-40c0ab8f3090-kube-api-access-6j954\") pod \"neutron-ovn-edpm-multinodeset-edpm-compute-no-nodes-5rkzf\" (UID: \"1e66f491-e230-42f2-aab5-40c0ab8f3090\") " pod="openstack-kuttl-tests/neutron-ovn-edpm-multinodeset-edpm-compute-no-nodes-5rkzf" Jan 20 18:07:18 crc kubenswrapper[4558]: I0120 18:07:18.205304 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1e66f491-e230-42f2-aab5-40c0ab8f3090-neutron-ovn-combined-ca-bundle\") pod \"neutron-ovn-edpm-multinodeset-edpm-compute-no-nodes-5rkzf\" (UID: \"1e66f491-e230-42f2-aab5-40c0ab8f3090\") " pod="openstack-kuttl-tests/neutron-ovn-edpm-multinodeset-edpm-compute-no-nodes-5rkzf" Jan 20 18:07:18 crc kubenswrapper[4558]: I0120 18:07:18.205334 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/1e66f491-e230-42f2-aab5-40c0ab8f3090-ssh-key-edpm-compute-no-nodes\") pod \"neutron-ovn-edpm-multinodeset-edpm-compute-no-nodes-5rkzf\" (UID: \"1e66f491-e230-42f2-aab5-40c0ab8f3090\") " pod="openstack-kuttl-tests/neutron-ovn-edpm-multinodeset-edpm-compute-no-nodes-5rkzf" Jan 20 18:07:18 crc kubenswrapper[4558]: I0120 18:07:18.205372 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1e66f491-e230-42f2-aab5-40c0ab8f3090-inventory\") pod \"neutron-ovn-edpm-multinodeset-edpm-compute-no-nodes-5rkzf\" (UID: \"1e66f491-e230-42f2-aab5-40c0ab8f3090\") " pod="openstack-kuttl-tests/neutron-ovn-edpm-multinodeset-edpm-compute-no-nodes-5rkzf" Jan 20 18:07:18 crc kubenswrapper[4558]: I0120 18:07:18.210964 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-ovn-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/1e66f491-e230-42f2-aab5-40c0ab8f3090-neutron-ovn-agent-neutron-config-0\") pod \"neutron-ovn-edpm-multinodeset-edpm-compute-no-nodes-5rkzf\" (UID: \"1e66f491-e230-42f2-aab5-40c0ab8f3090\") " pod="openstack-kuttl-tests/neutron-ovn-edpm-multinodeset-edpm-compute-no-nodes-5rkzf" Jan 20 18:07:18 crc kubenswrapper[4558]: I0120 18:07:18.211157 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1e66f491-e230-42f2-aab5-40c0ab8f3090-inventory\") pod \"neutron-ovn-edpm-multinodeset-edpm-compute-no-nodes-5rkzf\" (UID: \"1e66f491-e230-42f2-aab5-40c0ab8f3090\") " pod="openstack-kuttl-tests/neutron-ovn-edpm-multinodeset-edpm-compute-no-nodes-5rkzf" Jan 20 18:07:18 crc kubenswrapper[4558]: I0120 18:07:18.211491 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1e66f491-e230-42f2-aab5-40c0ab8f3090-neutron-ovn-combined-ca-bundle\") pod \"neutron-ovn-edpm-multinodeset-edpm-compute-no-nodes-5rkzf\" (UID: \"1e66f491-e230-42f2-aab5-40c0ab8f3090\") " pod="openstack-kuttl-tests/neutron-ovn-edpm-multinodeset-edpm-compute-no-nodes-5rkzf" Jan 20 18:07:18 crc kubenswrapper[4558]: I0120 18:07:18.211585 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/1e66f491-e230-42f2-aab5-40c0ab8f3090-ssh-key-edpm-compute-no-nodes\") pod \"neutron-ovn-edpm-multinodeset-edpm-compute-no-nodes-5rkzf\" (UID: \"1e66f491-e230-42f2-aab5-40c0ab8f3090\") " pod="openstack-kuttl-tests/neutron-ovn-edpm-multinodeset-edpm-compute-no-nodes-5rkzf" Jan 20 18:07:18 crc kubenswrapper[4558]: I0120 18:07:18.220772 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6j954\" (UniqueName: \"kubernetes.io/projected/1e66f491-e230-42f2-aab5-40c0ab8f3090-kube-api-access-6j954\") pod \"neutron-ovn-edpm-multinodeset-edpm-compute-no-nodes-5rkzf\" (UID: \"1e66f491-e230-42f2-aab5-40c0ab8f3090\") " pod="openstack-kuttl-tests/neutron-ovn-edpm-multinodeset-edpm-compute-no-nodes-5rkzf" Jan 20 18:07:18 crc kubenswrapper[4558]: I0120 18:07:18.288570 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-ovn-edpm-multinodeset-edpm-compute-no-nodes-5rkzf" Jan 20 18:07:18 crc kubenswrapper[4558]: I0120 18:07:18.674918 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-ovn-edpm-multinodeset-edpm-compute-no-nodes-5rkzf"] Jan 20 18:07:18 crc kubenswrapper[4558]: W0120 18:07:18.678334 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1e66f491_e230_42f2_aab5_40c0ab8f3090.slice/crio-6f727bf43fd18d92420aa3164049f8b1aab3934424fd7bb5c63f1b7587d9094f WatchSource:0}: Error finding container 6f727bf43fd18d92420aa3164049f8b1aab3934424fd7bb5c63f1b7587d9094f: Status 404 returned error can't find the container with id 6f727bf43fd18d92420aa3164049f8b1aab3934424fd7bb5c63f1b7587d9094f Jan 20 18:07:18 crc kubenswrapper[4558]: I0120 18:07:18.930073 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-ovn-edpm-multinodeset-edpm-compute-no-nodes-5rkzf" event={"ID":"1e66f491-e230-42f2-aab5-40c0ab8f3090","Type":"ContainerStarted","Data":"6f727bf43fd18d92420aa3164049f8b1aab3934424fd7bb5c63f1b7587d9094f"} Jan 20 18:07:19 crc kubenswrapper[4558]: I0120 18:07:19.941663 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-ovn-edpm-multinodeset-edpm-compute-no-nodes-5rkzf" event={"ID":"1e66f491-e230-42f2-aab5-40c0ab8f3090","Type":"ContainerStarted","Data":"39db11afea3e6c7fc12ff8f00fef07a479fffc07bbfefb56685639a3dc257c1f"} Jan 20 18:07:19 crc kubenswrapper[4558]: I0120 18:07:19.961118 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/neutron-ovn-edpm-multinodeset-edpm-compute-no-nodes-5rkzf" podStartSLOduration=2.447829124 podStartE2EDuration="2.961100225s" podCreationTimestamp="2026-01-20 18:07:17 +0000 UTC" firstStartedPulling="2026-01-20 18:07:18.68070103 +0000 UTC m=+5132.441038998" lastFinishedPulling="2026-01-20 18:07:19.193972132 +0000 UTC m=+5132.954310099" observedRunningTime="2026-01-20 18:07:19.958104273 +0000 UTC m=+5133.718442240" watchObservedRunningTime="2026-01-20 18:07:19.961100225 +0000 UTC m=+5133.721438193" Jan 20 18:07:20 crc kubenswrapper[4558]: I0120 18:07:20.950839 4558 generic.go:334] "Generic (PLEG): container finished" podID="1e66f491-e230-42f2-aab5-40c0ab8f3090" containerID="39db11afea3e6c7fc12ff8f00fef07a479fffc07bbfefb56685639a3dc257c1f" exitCode=0 Jan 20 18:07:20 crc kubenswrapper[4558]: I0120 18:07:20.950934 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-ovn-edpm-multinodeset-edpm-compute-no-nodes-5rkzf" event={"ID":"1e66f491-e230-42f2-aab5-40c0ab8f3090","Type":"ContainerDied","Data":"39db11afea3e6c7fc12ff8f00fef07a479fffc07bbfefb56685639a3dc257c1f"} Jan 20 18:07:21 crc kubenswrapper[4558]: I0120 18:07:21.567068 4558 scope.go:117] "RemoveContainer" containerID="6b02d31ec11fe5435af988e35303a6f66b5afa5ef952ce5f7cc4b3cd5f4d9497" Jan 20 18:07:21 crc kubenswrapper[4558]: E0120 18:07:21.567339 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 18:07:22 crc kubenswrapper[4558]: I0120 18:07:22.226013 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-ovn-edpm-multinodeset-edpm-compute-no-nodes-5rkzf" Jan 20 18:07:22 crc kubenswrapper[4558]: I0120 18:07:22.369001 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6j954\" (UniqueName: \"kubernetes.io/projected/1e66f491-e230-42f2-aab5-40c0ab8f3090-kube-api-access-6j954\") pod \"1e66f491-e230-42f2-aab5-40c0ab8f3090\" (UID: \"1e66f491-e230-42f2-aab5-40c0ab8f3090\") " Jan 20 18:07:22 crc kubenswrapper[4558]: I0120 18:07:22.369075 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/1e66f491-e230-42f2-aab5-40c0ab8f3090-ssh-key-edpm-compute-no-nodes\") pod \"1e66f491-e230-42f2-aab5-40c0ab8f3090\" (UID: \"1e66f491-e230-42f2-aab5-40c0ab8f3090\") " Jan 20 18:07:22 crc kubenswrapper[4558]: I0120 18:07:22.369183 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-ovn-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/1e66f491-e230-42f2-aab5-40c0ab8f3090-neutron-ovn-agent-neutron-config-0\") pod \"1e66f491-e230-42f2-aab5-40c0ab8f3090\" (UID: \"1e66f491-e230-42f2-aab5-40c0ab8f3090\") " Jan 20 18:07:22 crc kubenswrapper[4558]: I0120 18:07:22.369286 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1e66f491-e230-42f2-aab5-40c0ab8f3090-neutron-ovn-combined-ca-bundle\") pod \"1e66f491-e230-42f2-aab5-40c0ab8f3090\" (UID: \"1e66f491-e230-42f2-aab5-40c0ab8f3090\") " Jan 20 18:07:22 crc kubenswrapper[4558]: I0120 18:07:22.369341 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1e66f491-e230-42f2-aab5-40c0ab8f3090-inventory\") pod \"1e66f491-e230-42f2-aab5-40c0ab8f3090\" (UID: \"1e66f491-e230-42f2-aab5-40c0ab8f3090\") " Jan 20 18:07:22 crc kubenswrapper[4558]: I0120 18:07:22.375422 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1e66f491-e230-42f2-aab5-40c0ab8f3090-kube-api-access-6j954" (OuterVolumeSpecName: "kube-api-access-6j954") pod "1e66f491-e230-42f2-aab5-40c0ab8f3090" (UID: "1e66f491-e230-42f2-aab5-40c0ab8f3090"). InnerVolumeSpecName "kube-api-access-6j954". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:07:22 crc kubenswrapper[4558]: I0120 18:07:22.375576 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1e66f491-e230-42f2-aab5-40c0ab8f3090-neutron-ovn-combined-ca-bundle" (OuterVolumeSpecName: "neutron-ovn-combined-ca-bundle") pod "1e66f491-e230-42f2-aab5-40c0ab8f3090" (UID: "1e66f491-e230-42f2-aab5-40c0ab8f3090"). InnerVolumeSpecName "neutron-ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:07:22 crc kubenswrapper[4558]: I0120 18:07:22.390961 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1e66f491-e230-42f2-aab5-40c0ab8f3090-ssh-key-edpm-compute-no-nodes" (OuterVolumeSpecName: "ssh-key-edpm-compute-no-nodes") pod "1e66f491-e230-42f2-aab5-40c0ab8f3090" (UID: "1e66f491-e230-42f2-aab5-40c0ab8f3090"). InnerVolumeSpecName "ssh-key-edpm-compute-no-nodes". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:07:22 crc kubenswrapper[4558]: I0120 18:07:22.390991 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1e66f491-e230-42f2-aab5-40c0ab8f3090-neutron-ovn-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-ovn-agent-neutron-config-0") pod "1e66f491-e230-42f2-aab5-40c0ab8f3090" (UID: "1e66f491-e230-42f2-aab5-40c0ab8f3090"). InnerVolumeSpecName "neutron-ovn-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:07:22 crc kubenswrapper[4558]: I0120 18:07:22.391056 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1e66f491-e230-42f2-aab5-40c0ab8f3090-inventory" (OuterVolumeSpecName: "inventory") pod "1e66f491-e230-42f2-aab5-40c0ab8f3090" (UID: "1e66f491-e230-42f2-aab5-40c0ab8f3090"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:07:22 crc kubenswrapper[4558]: I0120 18:07:22.471482 4558 reconciler_common.go:293] "Volume detached for volume \"neutron-ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1e66f491-e230-42f2-aab5-40c0ab8f3090-neutron-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 18:07:22 crc kubenswrapper[4558]: I0120 18:07:22.471589 4558 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1e66f491-e230-42f2-aab5-40c0ab8f3090-inventory\") on node \"crc\" DevicePath \"\"" Jan 20 18:07:22 crc kubenswrapper[4558]: I0120 18:07:22.471650 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6j954\" (UniqueName: \"kubernetes.io/projected/1e66f491-e230-42f2-aab5-40c0ab8f3090-kube-api-access-6j954\") on node \"crc\" DevicePath \"\"" Jan 20 18:07:22 crc kubenswrapper[4558]: I0120 18:07:22.471709 4558 reconciler_common.go:293] "Volume detached for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/1e66f491-e230-42f2-aab5-40c0ab8f3090-ssh-key-edpm-compute-no-nodes\") on node \"crc\" DevicePath \"\"" Jan 20 18:07:22 crc kubenswrapper[4558]: I0120 18:07:22.471757 4558 reconciler_common.go:293] "Volume detached for volume \"neutron-ovn-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/1e66f491-e230-42f2-aab5-40c0ab8f3090-neutron-ovn-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Jan 20 18:07:22 crc kubenswrapper[4558]: I0120 18:07:22.975260 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-ovn-edpm-multinodeset-edpm-compute-no-nodes-5rkzf" event={"ID":"1e66f491-e230-42f2-aab5-40c0ab8f3090","Type":"ContainerDied","Data":"6f727bf43fd18d92420aa3164049f8b1aab3934424fd7bb5c63f1b7587d9094f"} Jan 20 18:07:22 crc kubenswrapper[4558]: I0120 18:07:22.975309 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6f727bf43fd18d92420aa3164049f8b1aab3934424fd7bb5c63f1b7587d9094f" Jan 20 18:07:22 crc kubenswrapper[4558]: I0120 18:07:22.975377 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-ovn-edpm-multinodeset-edpm-compute-no-nodes-5rkzf" Jan 20 18:07:23 crc kubenswrapper[4558]: I0120 18:07:23.033950 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/neutron-sriov-edpm-multinodeset-edpm-compute-no-nodes-t57nm"] Jan 20 18:07:23 crc kubenswrapper[4558]: E0120 18:07:23.034533 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1e66f491-e230-42f2-aab5-40c0ab8f3090" containerName="neutron-ovn-edpm-multinodeset-edpm-compute-no-nodes" Jan 20 18:07:23 crc kubenswrapper[4558]: I0120 18:07:23.034616 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="1e66f491-e230-42f2-aab5-40c0ab8f3090" containerName="neutron-ovn-edpm-multinodeset-edpm-compute-no-nodes" Jan 20 18:07:23 crc kubenswrapper[4558]: I0120 18:07:23.034829 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="1e66f491-e230-42f2-aab5-40c0ab8f3090" containerName="neutron-ovn-edpm-multinodeset-edpm-compute-no-nodes" Jan 20 18:07:23 crc kubenswrapper[4558]: I0120 18:07:23.035405 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-sriov-edpm-multinodeset-edpm-compute-no-nodes-t57nm" Jan 20 18:07:23 crc kubenswrapper[4558]: I0120 18:07:23.038217 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"combined-ca-bundle" Jan 20 18:07:23 crc kubenswrapper[4558]: I0120 18:07:23.038399 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"neutron-sriov-agent-neutron-config" Jan 20 18:07:23 crc kubenswrapper[4558]: I0120 18:07:23.039076 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"edpm-compute-no-nodes-dockercfg-tljmj" Jan 20 18:07:23 crc kubenswrapper[4558]: I0120 18:07:23.039279 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplanenodeset-edpm-compute-no-nodes" Jan 20 18:07:23 crc kubenswrapper[4558]: I0120 18:07:23.039328 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplane-ansible-ssh-private-key-secret" Jan 20 18:07:23 crc kubenswrapper[4558]: I0120 18:07:23.040821 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-aee-default-env" Jan 20 18:07:23 crc kubenswrapper[4558]: I0120 18:07:23.049254 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-sriov-edpm-multinodeset-edpm-compute-no-nodes-t57nm"] Jan 20 18:07:23 crc kubenswrapper[4558]: I0120 18:07:23.180771 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-sriov-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/8447470c-f397-401f-ad5c-048f3b889af9-neutron-sriov-agent-neutron-config-0\") pod \"neutron-sriov-edpm-multinodeset-edpm-compute-no-nodes-t57nm\" (UID: \"8447470c-f397-401f-ad5c-048f3b889af9\") " pod="openstack-kuttl-tests/neutron-sriov-edpm-multinodeset-edpm-compute-no-nodes-t57nm" Jan 20 18:07:23 crc kubenswrapper[4558]: I0120 18:07:23.180844 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8447470c-f397-401f-ad5c-048f3b889af9-neutron-sriov-combined-ca-bundle\") pod \"neutron-sriov-edpm-multinodeset-edpm-compute-no-nodes-t57nm\" (UID: \"8447470c-f397-401f-ad5c-048f3b889af9\") " pod="openstack-kuttl-tests/neutron-sriov-edpm-multinodeset-edpm-compute-no-nodes-t57nm" Jan 20 18:07:23 crc kubenswrapper[4558]: I0120 18:07:23.180877 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/8447470c-f397-401f-ad5c-048f3b889af9-ssh-key-edpm-compute-no-nodes\") pod \"neutron-sriov-edpm-multinodeset-edpm-compute-no-nodes-t57nm\" (UID: \"8447470c-f397-401f-ad5c-048f3b889af9\") " pod="openstack-kuttl-tests/neutron-sriov-edpm-multinodeset-edpm-compute-no-nodes-t57nm" Jan 20 18:07:23 crc kubenswrapper[4558]: I0120 18:07:23.180917 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nv2lx\" (UniqueName: \"kubernetes.io/projected/8447470c-f397-401f-ad5c-048f3b889af9-kube-api-access-nv2lx\") pod \"neutron-sriov-edpm-multinodeset-edpm-compute-no-nodes-t57nm\" (UID: \"8447470c-f397-401f-ad5c-048f3b889af9\") " pod="openstack-kuttl-tests/neutron-sriov-edpm-multinodeset-edpm-compute-no-nodes-t57nm" Jan 20 18:07:23 crc kubenswrapper[4558]: I0120 18:07:23.181049 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8447470c-f397-401f-ad5c-048f3b889af9-inventory\") pod \"neutron-sriov-edpm-multinodeset-edpm-compute-no-nodes-t57nm\" (UID: \"8447470c-f397-401f-ad5c-048f3b889af9\") " pod="openstack-kuttl-tests/neutron-sriov-edpm-multinodeset-edpm-compute-no-nodes-t57nm" Jan 20 18:07:23 crc kubenswrapper[4558]: I0120 18:07:23.282409 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-sriov-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/8447470c-f397-401f-ad5c-048f3b889af9-neutron-sriov-agent-neutron-config-0\") pod \"neutron-sriov-edpm-multinodeset-edpm-compute-no-nodes-t57nm\" (UID: \"8447470c-f397-401f-ad5c-048f3b889af9\") " pod="openstack-kuttl-tests/neutron-sriov-edpm-multinodeset-edpm-compute-no-nodes-t57nm" Jan 20 18:07:23 crc kubenswrapper[4558]: I0120 18:07:23.282478 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8447470c-f397-401f-ad5c-048f3b889af9-neutron-sriov-combined-ca-bundle\") pod \"neutron-sriov-edpm-multinodeset-edpm-compute-no-nodes-t57nm\" (UID: \"8447470c-f397-401f-ad5c-048f3b889af9\") " pod="openstack-kuttl-tests/neutron-sriov-edpm-multinodeset-edpm-compute-no-nodes-t57nm" Jan 20 18:07:23 crc kubenswrapper[4558]: I0120 18:07:23.282794 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/8447470c-f397-401f-ad5c-048f3b889af9-ssh-key-edpm-compute-no-nodes\") pod \"neutron-sriov-edpm-multinodeset-edpm-compute-no-nodes-t57nm\" (UID: \"8447470c-f397-401f-ad5c-048f3b889af9\") " pod="openstack-kuttl-tests/neutron-sriov-edpm-multinodeset-edpm-compute-no-nodes-t57nm" Jan 20 18:07:23 crc kubenswrapper[4558]: I0120 18:07:23.282828 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nv2lx\" (UniqueName: \"kubernetes.io/projected/8447470c-f397-401f-ad5c-048f3b889af9-kube-api-access-nv2lx\") pod \"neutron-sriov-edpm-multinodeset-edpm-compute-no-nodes-t57nm\" (UID: \"8447470c-f397-401f-ad5c-048f3b889af9\") " pod="openstack-kuttl-tests/neutron-sriov-edpm-multinodeset-edpm-compute-no-nodes-t57nm" Jan 20 18:07:23 crc kubenswrapper[4558]: I0120 18:07:23.283028 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8447470c-f397-401f-ad5c-048f3b889af9-inventory\") pod \"neutron-sriov-edpm-multinodeset-edpm-compute-no-nodes-t57nm\" (UID: \"8447470c-f397-401f-ad5c-048f3b889af9\") " pod="openstack-kuttl-tests/neutron-sriov-edpm-multinodeset-edpm-compute-no-nodes-t57nm" Jan 20 18:07:23 crc kubenswrapper[4558]: I0120 18:07:23.286907 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-sriov-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/8447470c-f397-401f-ad5c-048f3b889af9-neutron-sriov-agent-neutron-config-0\") pod \"neutron-sriov-edpm-multinodeset-edpm-compute-no-nodes-t57nm\" (UID: \"8447470c-f397-401f-ad5c-048f3b889af9\") " pod="openstack-kuttl-tests/neutron-sriov-edpm-multinodeset-edpm-compute-no-nodes-t57nm" Jan 20 18:07:23 crc kubenswrapper[4558]: I0120 18:07:23.286920 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8447470c-f397-401f-ad5c-048f3b889af9-neutron-sriov-combined-ca-bundle\") pod \"neutron-sriov-edpm-multinodeset-edpm-compute-no-nodes-t57nm\" (UID: \"8447470c-f397-401f-ad5c-048f3b889af9\") " pod="openstack-kuttl-tests/neutron-sriov-edpm-multinodeset-edpm-compute-no-nodes-t57nm" Jan 20 18:07:23 crc kubenswrapper[4558]: I0120 18:07:23.287054 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8447470c-f397-401f-ad5c-048f3b889af9-inventory\") pod \"neutron-sriov-edpm-multinodeset-edpm-compute-no-nodes-t57nm\" (UID: \"8447470c-f397-401f-ad5c-048f3b889af9\") " pod="openstack-kuttl-tests/neutron-sriov-edpm-multinodeset-edpm-compute-no-nodes-t57nm" Jan 20 18:07:23 crc kubenswrapper[4558]: I0120 18:07:23.287589 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/8447470c-f397-401f-ad5c-048f3b889af9-ssh-key-edpm-compute-no-nodes\") pod \"neutron-sriov-edpm-multinodeset-edpm-compute-no-nodes-t57nm\" (UID: \"8447470c-f397-401f-ad5c-048f3b889af9\") " pod="openstack-kuttl-tests/neutron-sriov-edpm-multinodeset-edpm-compute-no-nodes-t57nm" Jan 20 18:07:23 crc kubenswrapper[4558]: I0120 18:07:23.298065 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nv2lx\" (UniqueName: \"kubernetes.io/projected/8447470c-f397-401f-ad5c-048f3b889af9-kube-api-access-nv2lx\") pod \"neutron-sriov-edpm-multinodeset-edpm-compute-no-nodes-t57nm\" (UID: \"8447470c-f397-401f-ad5c-048f3b889af9\") " pod="openstack-kuttl-tests/neutron-sriov-edpm-multinodeset-edpm-compute-no-nodes-t57nm" Jan 20 18:07:23 crc kubenswrapper[4558]: I0120 18:07:23.349625 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-sriov-edpm-multinodeset-edpm-compute-no-nodes-t57nm" Jan 20 18:07:23 crc kubenswrapper[4558]: I0120 18:07:23.741326 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-sriov-edpm-multinodeset-edpm-compute-no-nodes-t57nm"] Jan 20 18:07:23 crc kubenswrapper[4558]: I0120 18:07:23.987095 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-sriov-edpm-multinodeset-edpm-compute-no-nodes-t57nm" event={"ID":"8447470c-f397-401f-ad5c-048f3b889af9","Type":"ContainerStarted","Data":"cdf2d4ffcb2842b962fcee4e91add3dd04827356fb404ec900da0f857b3112ba"} Jan 20 18:07:24 crc kubenswrapper[4558]: I0120 18:07:24.996605 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-sriov-edpm-multinodeset-edpm-compute-no-nodes-t57nm" event={"ID":"8447470c-f397-401f-ad5c-048f3b889af9","Type":"ContainerStarted","Data":"ba04ebadbcc7e6eaa5503d46e3109eb8b44ec6528c1aa862c2caa95105251353"} Jan 20 18:07:25 crc kubenswrapper[4558]: I0120 18:07:25.017107 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/neutron-sriov-edpm-multinodeset-edpm-compute-no-nodes-t57nm" podStartSLOduration=1.477660414 podStartE2EDuration="2.017087965s" podCreationTimestamp="2026-01-20 18:07:23 +0000 UTC" firstStartedPulling="2026-01-20 18:07:23.748597411 +0000 UTC m=+5137.508935378" lastFinishedPulling="2026-01-20 18:07:24.288024962 +0000 UTC m=+5138.048362929" observedRunningTime="2026-01-20 18:07:25.014639581 +0000 UTC m=+5138.774977539" watchObservedRunningTime="2026-01-20 18:07:25.017087965 +0000 UTC m=+5138.777425932" Jan 20 18:07:26 crc kubenswrapper[4558]: I0120 18:07:26.008917 4558 generic.go:334] "Generic (PLEG): container finished" podID="8447470c-f397-401f-ad5c-048f3b889af9" containerID="ba04ebadbcc7e6eaa5503d46e3109eb8b44ec6528c1aa862c2caa95105251353" exitCode=0 Jan 20 18:07:26 crc kubenswrapper[4558]: I0120 18:07:26.009115 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-sriov-edpm-multinodeset-edpm-compute-no-nodes-t57nm" event={"ID":"8447470c-f397-401f-ad5c-048f3b889af9","Type":"ContainerDied","Data":"ba04ebadbcc7e6eaa5503d46e3109eb8b44ec6528c1aa862c2caa95105251353"} Jan 20 18:07:27 crc kubenswrapper[4558]: I0120 18:07:27.298867 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-sriov-edpm-multinodeset-edpm-compute-no-nodes-t57nm" Jan 20 18:07:27 crc kubenswrapper[4558]: I0120 18:07:27.443017 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-sriov-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/8447470c-f397-401f-ad5c-048f3b889af9-neutron-sriov-agent-neutron-config-0\") pod \"8447470c-f397-401f-ad5c-048f3b889af9\" (UID: \"8447470c-f397-401f-ad5c-048f3b889af9\") " Jan 20 18:07:27 crc kubenswrapper[4558]: I0120 18:07:27.443445 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/8447470c-f397-401f-ad5c-048f3b889af9-ssh-key-edpm-compute-no-nodes\") pod \"8447470c-f397-401f-ad5c-048f3b889af9\" (UID: \"8447470c-f397-401f-ad5c-048f3b889af9\") " Jan 20 18:07:27 crc kubenswrapper[4558]: I0120 18:07:27.443475 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8447470c-f397-401f-ad5c-048f3b889af9-inventory\") pod \"8447470c-f397-401f-ad5c-048f3b889af9\" (UID: \"8447470c-f397-401f-ad5c-048f3b889af9\") " Jan 20 18:07:27 crc kubenswrapper[4558]: I0120 18:07:27.443504 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nv2lx\" (UniqueName: \"kubernetes.io/projected/8447470c-f397-401f-ad5c-048f3b889af9-kube-api-access-nv2lx\") pod \"8447470c-f397-401f-ad5c-048f3b889af9\" (UID: \"8447470c-f397-401f-ad5c-048f3b889af9\") " Jan 20 18:07:27 crc kubenswrapper[4558]: I0120 18:07:27.443559 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8447470c-f397-401f-ad5c-048f3b889af9-neutron-sriov-combined-ca-bundle\") pod \"8447470c-f397-401f-ad5c-048f3b889af9\" (UID: \"8447470c-f397-401f-ad5c-048f3b889af9\") " Jan 20 18:07:27 crc kubenswrapper[4558]: I0120 18:07:27.449052 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8447470c-f397-401f-ad5c-048f3b889af9-neutron-sriov-combined-ca-bundle" (OuterVolumeSpecName: "neutron-sriov-combined-ca-bundle") pod "8447470c-f397-401f-ad5c-048f3b889af9" (UID: "8447470c-f397-401f-ad5c-048f3b889af9"). InnerVolumeSpecName "neutron-sriov-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:07:27 crc kubenswrapper[4558]: I0120 18:07:27.450473 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8447470c-f397-401f-ad5c-048f3b889af9-kube-api-access-nv2lx" (OuterVolumeSpecName: "kube-api-access-nv2lx") pod "8447470c-f397-401f-ad5c-048f3b889af9" (UID: "8447470c-f397-401f-ad5c-048f3b889af9"). InnerVolumeSpecName "kube-api-access-nv2lx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:07:27 crc kubenswrapper[4558]: I0120 18:07:27.464518 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8447470c-f397-401f-ad5c-048f3b889af9-ssh-key-edpm-compute-no-nodes" (OuterVolumeSpecName: "ssh-key-edpm-compute-no-nodes") pod "8447470c-f397-401f-ad5c-048f3b889af9" (UID: "8447470c-f397-401f-ad5c-048f3b889af9"). InnerVolumeSpecName "ssh-key-edpm-compute-no-nodes". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:07:27 crc kubenswrapper[4558]: I0120 18:07:27.464939 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8447470c-f397-401f-ad5c-048f3b889af9-neutron-sriov-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-sriov-agent-neutron-config-0") pod "8447470c-f397-401f-ad5c-048f3b889af9" (UID: "8447470c-f397-401f-ad5c-048f3b889af9"). InnerVolumeSpecName "neutron-sriov-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:07:27 crc kubenswrapper[4558]: I0120 18:07:27.465117 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8447470c-f397-401f-ad5c-048f3b889af9-inventory" (OuterVolumeSpecName: "inventory") pod "8447470c-f397-401f-ad5c-048f3b889af9" (UID: "8447470c-f397-401f-ad5c-048f3b889af9"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:07:27 crc kubenswrapper[4558]: I0120 18:07:27.545068 4558 reconciler_common.go:293] "Volume detached for volume \"neutron-sriov-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/8447470c-f397-401f-ad5c-048f3b889af9-neutron-sriov-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Jan 20 18:07:27 crc kubenswrapper[4558]: I0120 18:07:27.545098 4558 reconciler_common.go:293] "Volume detached for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/8447470c-f397-401f-ad5c-048f3b889af9-ssh-key-edpm-compute-no-nodes\") on node \"crc\" DevicePath \"\"" Jan 20 18:07:27 crc kubenswrapper[4558]: I0120 18:07:27.545112 4558 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8447470c-f397-401f-ad5c-048f3b889af9-inventory\") on node \"crc\" DevicePath \"\"" Jan 20 18:07:27 crc kubenswrapper[4558]: I0120 18:07:27.545135 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nv2lx\" (UniqueName: \"kubernetes.io/projected/8447470c-f397-401f-ad5c-048f3b889af9-kube-api-access-nv2lx\") on node \"crc\" DevicePath \"\"" Jan 20 18:07:27 crc kubenswrapper[4558]: I0120 18:07:27.545147 4558 reconciler_common.go:293] "Volume detached for volume \"neutron-sriov-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8447470c-f397-401f-ad5c-048f3b889af9-neutron-sriov-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 18:07:28 crc kubenswrapper[4558]: I0120 18:07:28.034849 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-sriov-edpm-multinodeset-edpm-compute-no-nodes-t57nm" event={"ID":"8447470c-f397-401f-ad5c-048f3b889af9","Type":"ContainerDied","Data":"cdf2d4ffcb2842b962fcee4e91add3dd04827356fb404ec900da0f857b3112ba"} Jan 20 18:07:28 crc kubenswrapper[4558]: I0120 18:07:28.034942 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cdf2d4ffcb2842b962fcee4e91add3dd04827356fb404ec900da0f857b3112ba" Jan 20 18:07:28 crc kubenswrapper[4558]: I0120 18:07:28.034968 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-sriov-edpm-multinodeset-edpm-compute-no-nodes-t57nm" Jan 20 18:07:28 crc kubenswrapper[4558]: I0120 18:07:28.099305 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/neutron-dhcp-edpm-multinodeset-edpm-compute-no-nodes-j5j2l"] Jan 20 18:07:28 crc kubenswrapper[4558]: E0120 18:07:28.099656 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8447470c-f397-401f-ad5c-048f3b889af9" containerName="neutron-sriov-edpm-multinodeset-edpm-compute-no-nodes" Jan 20 18:07:28 crc kubenswrapper[4558]: I0120 18:07:28.099675 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="8447470c-f397-401f-ad5c-048f3b889af9" containerName="neutron-sriov-edpm-multinodeset-edpm-compute-no-nodes" Jan 20 18:07:28 crc kubenswrapper[4558]: I0120 18:07:28.099826 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="8447470c-f397-401f-ad5c-048f3b889af9" containerName="neutron-sriov-edpm-multinodeset-edpm-compute-no-nodes" Jan 20 18:07:28 crc kubenswrapper[4558]: I0120 18:07:28.100397 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-dhcp-edpm-multinodeset-edpm-compute-no-nodes-j5j2l" Jan 20 18:07:28 crc kubenswrapper[4558]: I0120 18:07:28.102874 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-aee-default-env" Jan 20 18:07:28 crc kubenswrapper[4558]: I0120 18:07:28.103117 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplane-ansible-ssh-private-key-secret" Jan 20 18:07:28 crc kubenswrapper[4558]: I0120 18:07:28.103216 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"neutron-dhcp-agent-neutron-config" Jan 20 18:07:28 crc kubenswrapper[4558]: I0120 18:07:28.103343 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"edpm-compute-no-nodes-dockercfg-tljmj" Jan 20 18:07:28 crc kubenswrapper[4558]: I0120 18:07:28.104020 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplanenodeset-edpm-compute-no-nodes" Jan 20 18:07:28 crc kubenswrapper[4558]: I0120 18:07:28.105776 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"combined-ca-bundle" Jan 20 18:07:28 crc kubenswrapper[4558]: I0120 18:07:28.110914 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-dhcp-edpm-multinodeset-edpm-compute-no-nodes-j5j2l"] Jan 20 18:07:28 crc kubenswrapper[4558]: I0120 18:07:28.158540 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z5qlt\" (UniqueName: \"kubernetes.io/projected/f740f062-fc5a-4f8e-a109-d608844f290d-kube-api-access-z5qlt\") pod \"neutron-dhcp-edpm-multinodeset-edpm-compute-no-nodes-j5j2l\" (UID: \"f740f062-fc5a-4f8e-a109-d608844f290d\") " pod="openstack-kuttl-tests/neutron-dhcp-edpm-multinodeset-edpm-compute-no-nodes-j5j2l" Jan 20 18:07:28 crc kubenswrapper[4558]: I0120 18:07:28.158827 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-dhcp-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/f740f062-fc5a-4f8e-a109-d608844f290d-neutron-dhcp-agent-neutron-config-0\") pod \"neutron-dhcp-edpm-multinodeset-edpm-compute-no-nodes-j5j2l\" (UID: \"f740f062-fc5a-4f8e-a109-d608844f290d\") " pod="openstack-kuttl-tests/neutron-dhcp-edpm-multinodeset-edpm-compute-no-nodes-j5j2l" Jan 20 18:07:28 crc kubenswrapper[4558]: I0120 18:07:28.159040 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f740f062-fc5a-4f8e-a109-d608844f290d-neutron-dhcp-combined-ca-bundle\") pod \"neutron-dhcp-edpm-multinodeset-edpm-compute-no-nodes-j5j2l\" (UID: \"f740f062-fc5a-4f8e-a109-d608844f290d\") " pod="openstack-kuttl-tests/neutron-dhcp-edpm-multinodeset-edpm-compute-no-nodes-j5j2l" Jan 20 18:07:28 crc kubenswrapper[4558]: I0120 18:07:28.159121 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/f740f062-fc5a-4f8e-a109-d608844f290d-ssh-key-edpm-compute-no-nodes\") pod \"neutron-dhcp-edpm-multinodeset-edpm-compute-no-nodes-j5j2l\" (UID: \"f740f062-fc5a-4f8e-a109-d608844f290d\") " pod="openstack-kuttl-tests/neutron-dhcp-edpm-multinodeset-edpm-compute-no-nodes-j5j2l" Jan 20 18:07:28 crc kubenswrapper[4558]: I0120 18:07:28.159189 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f740f062-fc5a-4f8e-a109-d608844f290d-inventory\") pod \"neutron-dhcp-edpm-multinodeset-edpm-compute-no-nodes-j5j2l\" (UID: \"f740f062-fc5a-4f8e-a109-d608844f290d\") " pod="openstack-kuttl-tests/neutron-dhcp-edpm-multinodeset-edpm-compute-no-nodes-j5j2l" Jan 20 18:07:28 crc kubenswrapper[4558]: I0120 18:07:28.260315 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f740f062-fc5a-4f8e-a109-d608844f290d-neutron-dhcp-combined-ca-bundle\") pod \"neutron-dhcp-edpm-multinodeset-edpm-compute-no-nodes-j5j2l\" (UID: \"f740f062-fc5a-4f8e-a109-d608844f290d\") " pod="openstack-kuttl-tests/neutron-dhcp-edpm-multinodeset-edpm-compute-no-nodes-j5j2l" Jan 20 18:07:28 crc kubenswrapper[4558]: I0120 18:07:28.260366 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/f740f062-fc5a-4f8e-a109-d608844f290d-ssh-key-edpm-compute-no-nodes\") pod \"neutron-dhcp-edpm-multinodeset-edpm-compute-no-nodes-j5j2l\" (UID: \"f740f062-fc5a-4f8e-a109-d608844f290d\") " pod="openstack-kuttl-tests/neutron-dhcp-edpm-multinodeset-edpm-compute-no-nodes-j5j2l" Jan 20 18:07:28 crc kubenswrapper[4558]: I0120 18:07:28.260392 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f740f062-fc5a-4f8e-a109-d608844f290d-inventory\") pod \"neutron-dhcp-edpm-multinodeset-edpm-compute-no-nodes-j5j2l\" (UID: \"f740f062-fc5a-4f8e-a109-d608844f290d\") " pod="openstack-kuttl-tests/neutron-dhcp-edpm-multinodeset-edpm-compute-no-nodes-j5j2l" Jan 20 18:07:28 crc kubenswrapper[4558]: I0120 18:07:28.260439 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z5qlt\" (UniqueName: \"kubernetes.io/projected/f740f062-fc5a-4f8e-a109-d608844f290d-kube-api-access-z5qlt\") pod \"neutron-dhcp-edpm-multinodeset-edpm-compute-no-nodes-j5j2l\" (UID: \"f740f062-fc5a-4f8e-a109-d608844f290d\") " pod="openstack-kuttl-tests/neutron-dhcp-edpm-multinodeset-edpm-compute-no-nodes-j5j2l" Jan 20 18:07:28 crc kubenswrapper[4558]: I0120 18:07:28.260526 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-dhcp-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/f740f062-fc5a-4f8e-a109-d608844f290d-neutron-dhcp-agent-neutron-config-0\") pod \"neutron-dhcp-edpm-multinodeset-edpm-compute-no-nodes-j5j2l\" (UID: \"f740f062-fc5a-4f8e-a109-d608844f290d\") " pod="openstack-kuttl-tests/neutron-dhcp-edpm-multinodeset-edpm-compute-no-nodes-j5j2l" Jan 20 18:07:28 crc kubenswrapper[4558]: I0120 18:07:28.265970 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f740f062-fc5a-4f8e-a109-d608844f290d-inventory\") pod \"neutron-dhcp-edpm-multinodeset-edpm-compute-no-nodes-j5j2l\" (UID: \"f740f062-fc5a-4f8e-a109-d608844f290d\") " pod="openstack-kuttl-tests/neutron-dhcp-edpm-multinodeset-edpm-compute-no-nodes-j5j2l" Jan 20 18:07:28 crc kubenswrapper[4558]: I0120 18:07:28.265970 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/f740f062-fc5a-4f8e-a109-d608844f290d-ssh-key-edpm-compute-no-nodes\") pod \"neutron-dhcp-edpm-multinodeset-edpm-compute-no-nodes-j5j2l\" (UID: \"f740f062-fc5a-4f8e-a109-d608844f290d\") " pod="openstack-kuttl-tests/neutron-dhcp-edpm-multinodeset-edpm-compute-no-nodes-j5j2l" Jan 20 18:07:28 crc kubenswrapper[4558]: I0120 18:07:28.265970 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f740f062-fc5a-4f8e-a109-d608844f290d-neutron-dhcp-combined-ca-bundle\") pod \"neutron-dhcp-edpm-multinodeset-edpm-compute-no-nodes-j5j2l\" (UID: \"f740f062-fc5a-4f8e-a109-d608844f290d\") " pod="openstack-kuttl-tests/neutron-dhcp-edpm-multinodeset-edpm-compute-no-nodes-j5j2l" Jan 20 18:07:28 crc kubenswrapper[4558]: I0120 18:07:28.266136 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-dhcp-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/f740f062-fc5a-4f8e-a109-d608844f290d-neutron-dhcp-agent-neutron-config-0\") pod \"neutron-dhcp-edpm-multinodeset-edpm-compute-no-nodes-j5j2l\" (UID: \"f740f062-fc5a-4f8e-a109-d608844f290d\") " pod="openstack-kuttl-tests/neutron-dhcp-edpm-multinodeset-edpm-compute-no-nodes-j5j2l" Jan 20 18:07:28 crc kubenswrapper[4558]: I0120 18:07:28.275308 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z5qlt\" (UniqueName: \"kubernetes.io/projected/f740f062-fc5a-4f8e-a109-d608844f290d-kube-api-access-z5qlt\") pod \"neutron-dhcp-edpm-multinodeset-edpm-compute-no-nodes-j5j2l\" (UID: \"f740f062-fc5a-4f8e-a109-d608844f290d\") " pod="openstack-kuttl-tests/neutron-dhcp-edpm-multinodeset-edpm-compute-no-nodes-j5j2l" Jan 20 18:07:28 crc kubenswrapper[4558]: I0120 18:07:28.414236 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-dhcp-edpm-multinodeset-edpm-compute-no-nodes-j5j2l" Jan 20 18:07:28 crc kubenswrapper[4558]: I0120 18:07:28.811547 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/neutron-dhcp-edpm-multinodeset-edpm-compute-no-nodes-j5j2l"] Jan 20 18:07:28 crc kubenswrapper[4558]: W0120 18:07:28.816032 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf740f062_fc5a_4f8e_a109_d608844f290d.slice/crio-a70f435e59569f5e91af42aac7be3d21aa899be284b586fff47058a66cf8a4ed WatchSource:0}: Error finding container a70f435e59569f5e91af42aac7be3d21aa899be284b586fff47058a66cf8a4ed: Status 404 returned error can't find the container with id a70f435e59569f5e91af42aac7be3d21aa899be284b586fff47058a66cf8a4ed Jan 20 18:07:29 crc kubenswrapper[4558]: I0120 18:07:29.047481 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-dhcp-edpm-multinodeset-edpm-compute-no-nodes-j5j2l" event={"ID":"f740f062-fc5a-4f8e-a109-d608844f290d","Type":"ContainerStarted","Data":"a70f435e59569f5e91af42aac7be3d21aa899be284b586fff47058a66cf8a4ed"} Jan 20 18:07:30 crc kubenswrapper[4558]: I0120 18:07:30.061639 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-dhcp-edpm-multinodeset-edpm-compute-no-nodes-j5j2l" event={"ID":"f740f062-fc5a-4f8e-a109-d608844f290d","Type":"ContainerStarted","Data":"fd4e6633d0b1d139f29d4fabd28e07904c377771ba3431b9957079f44f7cc234"} Jan 20 18:07:30 crc kubenswrapper[4558]: I0120 18:07:30.089831 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/neutron-dhcp-edpm-multinodeset-edpm-compute-no-nodes-j5j2l" podStartSLOduration=1.44018285 podStartE2EDuration="2.089808385s" podCreationTimestamp="2026-01-20 18:07:28 +0000 UTC" firstStartedPulling="2026-01-20 18:07:28.819185973 +0000 UTC m=+5142.579523940" lastFinishedPulling="2026-01-20 18:07:29.468811508 +0000 UTC m=+5143.229149475" observedRunningTime="2026-01-20 18:07:30.083554597 +0000 UTC m=+5143.843892564" watchObservedRunningTime="2026-01-20 18:07:30.089808385 +0000 UTC m=+5143.850146352" Jan 20 18:07:31 crc kubenswrapper[4558]: I0120 18:07:31.073231 4558 generic.go:334] "Generic (PLEG): container finished" podID="f740f062-fc5a-4f8e-a109-d608844f290d" containerID="fd4e6633d0b1d139f29d4fabd28e07904c377771ba3431b9957079f44f7cc234" exitCode=0 Jan 20 18:07:31 crc kubenswrapper[4558]: I0120 18:07:31.073336 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-dhcp-edpm-multinodeset-edpm-compute-no-nodes-j5j2l" event={"ID":"f740f062-fc5a-4f8e-a109-d608844f290d","Type":"ContainerDied","Data":"fd4e6633d0b1d139f29d4fabd28e07904c377771ba3431b9957079f44f7cc234"} Jan 20 18:07:31 crc kubenswrapper[4558]: I0120 18:07:31.915337 4558 scope.go:117] "RemoveContainer" containerID="b1e8d55d144061fe99552080982b3674f1edcf3751fb0f212ec977996a16cba9" Jan 20 18:07:31 crc kubenswrapper[4558]: I0120 18:07:31.944243 4558 scope.go:117] "RemoveContainer" containerID="9eb7ac8f0ca68c77c099de96b56d1159e971f0399ad8ba04172b97e40aae2b12" Jan 20 18:07:31 crc kubenswrapper[4558]: I0120 18:07:31.966435 4558 scope.go:117] "RemoveContainer" containerID="b9f095dcb40f0fa598a4805ecd94fad1a657b559a8d086c77cf3682fb4f334a2" Jan 20 18:07:31 crc kubenswrapper[4558]: I0120 18:07:31.991139 4558 scope.go:117] "RemoveContainer" containerID="cb9d97b6167536a1e4f73d87f108cd283463df50132925265a657a9ec4c91f10" Jan 20 18:07:32 crc kubenswrapper[4558]: I0120 18:07:32.016273 4558 scope.go:117] "RemoveContainer" containerID="130c862694f4bf857ee52c143d80ecb5bfd0e0e61fd692aff1924680329ca140" Jan 20 18:07:32 crc kubenswrapper[4558]: I0120 18:07:32.047624 4558 scope.go:117] "RemoveContainer" containerID="f66d5e3b991f3f68fa91c7c17d38fd34980830c10782a4b3d415cb3841f21fde" Jan 20 18:07:32 crc kubenswrapper[4558]: I0120 18:07:32.071421 4558 scope.go:117] "RemoveContainer" containerID="8b87b8214b3bb5ab1c7a2b632f239b5c65e546850d535e6893d19fc3331a702a" Jan 20 18:07:32 crc kubenswrapper[4558]: I0120 18:07:32.097156 4558 scope.go:117] "RemoveContainer" containerID="b982130ff786a10b03fd153dfb0a401619acd1328bd5a3ffcd2bf61c96ab54ca" Jan 20 18:07:32 crc kubenswrapper[4558]: I0120 18:07:32.128137 4558 scope.go:117] "RemoveContainer" containerID="12a4698521dc5737817cfa0bf5d12f229dda374c9b5eb85943f9d288ac3776b0" Jan 20 18:07:32 crc kubenswrapper[4558]: I0120 18:07:32.349253 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-dhcp-edpm-multinodeset-edpm-compute-no-nodes-j5j2l" Jan 20 18:07:32 crc kubenswrapper[4558]: I0120 18:07:32.415235 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f740f062-fc5a-4f8e-a109-d608844f290d-inventory\") pod \"f740f062-fc5a-4f8e-a109-d608844f290d\" (UID: \"f740f062-fc5a-4f8e-a109-d608844f290d\") " Jan 20 18:07:32 crc kubenswrapper[4558]: I0120 18:07:32.415327 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/f740f062-fc5a-4f8e-a109-d608844f290d-ssh-key-edpm-compute-no-nodes\") pod \"f740f062-fc5a-4f8e-a109-d608844f290d\" (UID: \"f740f062-fc5a-4f8e-a109-d608844f290d\") " Jan 20 18:07:32 crc kubenswrapper[4558]: I0120 18:07:32.415373 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-dhcp-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/f740f062-fc5a-4f8e-a109-d608844f290d-neutron-dhcp-agent-neutron-config-0\") pod \"f740f062-fc5a-4f8e-a109-d608844f290d\" (UID: \"f740f062-fc5a-4f8e-a109-d608844f290d\") " Jan 20 18:07:32 crc kubenswrapper[4558]: I0120 18:07:32.415462 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f740f062-fc5a-4f8e-a109-d608844f290d-neutron-dhcp-combined-ca-bundle\") pod \"f740f062-fc5a-4f8e-a109-d608844f290d\" (UID: \"f740f062-fc5a-4f8e-a109-d608844f290d\") " Jan 20 18:07:32 crc kubenswrapper[4558]: I0120 18:07:32.415532 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z5qlt\" (UniqueName: \"kubernetes.io/projected/f740f062-fc5a-4f8e-a109-d608844f290d-kube-api-access-z5qlt\") pod \"f740f062-fc5a-4f8e-a109-d608844f290d\" (UID: \"f740f062-fc5a-4f8e-a109-d608844f290d\") " Jan 20 18:07:32 crc kubenswrapper[4558]: I0120 18:07:32.421156 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f740f062-fc5a-4f8e-a109-d608844f290d-neutron-dhcp-combined-ca-bundle" (OuterVolumeSpecName: "neutron-dhcp-combined-ca-bundle") pod "f740f062-fc5a-4f8e-a109-d608844f290d" (UID: "f740f062-fc5a-4f8e-a109-d608844f290d"). InnerVolumeSpecName "neutron-dhcp-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:07:32 crc kubenswrapper[4558]: I0120 18:07:32.421374 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f740f062-fc5a-4f8e-a109-d608844f290d-kube-api-access-z5qlt" (OuterVolumeSpecName: "kube-api-access-z5qlt") pod "f740f062-fc5a-4f8e-a109-d608844f290d" (UID: "f740f062-fc5a-4f8e-a109-d608844f290d"). InnerVolumeSpecName "kube-api-access-z5qlt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:07:32 crc kubenswrapper[4558]: I0120 18:07:32.435625 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f740f062-fc5a-4f8e-a109-d608844f290d-neutron-dhcp-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-dhcp-agent-neutron-config-0") pod "f740f062-fc5a-4f8e-a109-d608844f290d" (UID: "f740f062-fc5a-4f8e-a109-d608844f290d"). InnerVolumeSpecName "neutron-dhcp-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:07:32 crc kubenswrapper[4558]: I0120 18:07:32.435930 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f740f062-fc5a-4f8e-a109-d608844f290d-inventory" (OuterVolumeSpecName: "inventory") pod "f740f062-fc5a-4f8e-a109-d608844f290d" (UID: "f740f062-fc5a-4f8e-a109-d608844f290d"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:07:32 crc kubenswrapper[4558]: I0120 18:07:32.437053 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f740f062-fc5a-4f8e-a109-d608844f290d-ssh-key-edpm-compute-no-nodes" (OuterVolumeSpecName: "ssh-key-edpm-compute-no-nodes") pod "f740f062-fc5a-4f8e-a109-d608844f290d" (UID: "f740f062-fc5a-4f8e-a109-d608844f290d"). InnerVolumeSpecName "ssh-key-edpm-compute-no-nodes". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:07:32 crc kubenswrapper[4558]: I0120 18:07:32.517094 4558 reconciler_common.go:293] "Volume detached for volume \"neutron-dhcp-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/f740f062-fc5a-4f8e-a109-d608844f290d-neutron-dhcp-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Jan 20 18:07:32 crc kubenswrapper[4558]: I0120 18:07:32.517123 4558 reconciler_common.go:293] "Volume detached for volume \"neutron-dhcp-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f740f062-fc5a-4f8e-a109-d608844f290d-neutron-dhcp-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 18:07:32 crc kubenswrapper[4558]: I0120 18:07:32.517138 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z5qlt\" (UniqueName: \"kubernetes.io/projected/f740f062-fc5a-4f8e-a109-d608844f290d-kube-api-access-z5qlt\") on node \"crc\" DevicePath \"\"" Jan 20 18:07:32 crc kubenswrapper[4558]: I0120 18:07:32.517150 4558 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f740f062-fc5a-4f8e-a109-d608844f290d-inventory\") on node \"crc\" DevicePath \"\"" Jan 20 18:07:32 crc kubenswrapper[4558]: I0120 18:07:32.517160 4558 reconciler_common.go:293] "Volume detached for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/f740f062-fc5a-4f8e-a109-d608844f290d-ssh-key-edpm-compute-no-nodes\") on node \"crc\" DevicePath \"\"" Jan 20 18:07:33 crc kubenswrapper[4558]: I0120 18:07:33.105099 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/neutron-dhcp-edpm-multinodeset-edpm-compute-no-nodes-j5j2l" event={"ID":"f740f062-fc5a-4f8e-a109-d608844f290d","Type":"ContainerDied","Data":"a70f435e59569f5e91af42aac7be3d21aa899be284b586fff47058a66cf8a4ed"} Jan 20 18:07:33 crc kubenswrapper[4558]: I0120 18:07:33.105150 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/neutron-dhcp-edpm-multinodeset-edpm-compute-no-nodes-j5j2l" Jan 20 18:07:33 crc kubenswrapper[4558]: I0120 18:07:33.105188 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a70f435e59569f5e91af42aac7be3d21aa899be284b586fff47058a66cf8a4ed" Jan 20 18:07:33 crc kubenswrapper[4558]: I0120 18:07:33.149239 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/libvirt-edpm-multinodeset-edpm-compute-no-nodes-wx29d"] Jan 20 18:07:33 crc kubenswrapper[4558]: E0120 18:07:33.149936 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f740f062-fc5a-4f8e-a109-d608844f290d" containerName="neutron-dhcp-edpm-multinodeset-edpm-compute-no-nodes" Jan 20 18:07:33 crc kubenswrapper[4558]: I0120 18:07:33.149957 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="f740f062-fc5a-4f8e-a109-d608844f290d" containerName="neutron-dhcp-edpm-multinodeset-edpm-compute-no-nodes" Jan 20 18:07:33 crc kubenswrapper[4558]: I0120 18:07:33.150141 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="f740f062-fc5a-4f8e-a109-d608844f290d" containerName="neutron-dhcp-edpm-multinodeset-edpm-compute-no-nodes" Jan 20 18:07:33 crc kubenswrapper[4558]: I0120 18:07:33.150731 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/libvirt-edpm-multinodeset-edpm-compute-no-nodes-wx29d" Jan 20 18:07:33 crc kubenswrapper[4558]: I0120 18:07:33.152780 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"edpm-compute-no-nodes-dockercfg-tljmj" Jan 20 18:07:33 crc kubenswrapper[4558]: I0120 18:07:33.152813 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"combined-ca-bundle" Jan 20 18:07:33 crc kubenswrapper[4558]: I0120 18:07:33.152837 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"libvirt-secret" Jan 20 18:07:33 crc kubenswrapper[4558]: I0120 18:07:33.153079 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-aee-default-env" Jan 20 18:07:33 crc kubenswrapper[4558]: I0120 18:07:33.154755 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplane-ansible-ssh-private-key-secret" Jan 20 18:07:33 crc kubenswrapper[4558]: I0120 18:07:33.156185 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplanenodeset-edpm-compute-no-nodes" Jan 20 18:07:33 crc kubenswrapper[4558]: I0120 18:07:33.161575 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/libvirt-edpm-multinodeset-edpm-compute-no-nodes-wx29d"] Jan 20 18:07:33 crc kubenswrapper[4558]: I0120 18:07:33.226594 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/d0d07753-075f-4e29-9c69-0cdc8c2a7824-libvirt-secret-0\") pod \"libvirt-edpm-multinodeset-edpm-compute-no-nodes-wx29d\" (UID: \"d0d07753-075f-4e29-9c69-0cdc8c2a7824\") " pod="openstack-kuttl-tests/libvirt-edpm-multinodeset-edpm-compute-no-nodes-wx29d" Jan 20 18:07:33 crc kubenswrapper[4558]: I0120 18:07:33.226966 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/d0d07753-075f-4e29-9c69-0cdc8c2a7824-ssh-key-edpm-compute-no-nodes\") pod \"libvirt-edpm-multinodeset-edpm-compute-no-nodes-wx29d\" (UID: \"d0d07753-075f-4e29-9c69-0cdc8c2a7824\") " pod="openstack-kuttl-tests/libvirt-edpm-multinodeset-edpm-compute-no-nodes-wx29d" Jan 20 18:07:33 crc kubenswrapper[4558]: I0120 18:07:33.227089 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vqv2j\" (UniqueName: \"kubernetes.io/projected/d0d07753-075f-4e29-9c69-0cdc8c2a7824-kube-api-access-vqv2j\") pod \"libvirt-edpm-multinodeset-edpm-compute-no-nodes-wx29d\" (UID: \"d0d07753-075f-4e29-9c69-0cdc8c2a7824\") " pod="openstack-kuttl-tests/libvirt-edpm-multinodeset-edpm-compute-no-nodes-wx29d" Jan 20 18:07:33 crc kubenswrapper[4558]: I0120 18:07:33.227175 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d0d07753-075f-4e29-9c69-0cdc8c2a7824-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-multinodeset-edpm-compute-no-nodes-wx29d\" (UID: \"d0d07753-075f-4e29-9c69-0cdc8c2a7824\") " pod="openstack-kuttl-tests/libvirt-edpm-multinodeset-edpm-compute-no-nodes-wx29d" Jan 20 18:07:33 crc kubenswrapper[4558]: I0120 18:07:33.227266 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d0d07753-075f-4e29-9c69-0cdc8c2a7824-inventory\") pod \"libvirt-edpm-multinodeset-edpm-compute-no-nodes-wx29d\" (UID: \"d0d07753-075f-4e29-9c69-0cdc8c2a7824\") " pod="openstack-kuttl-tests/libvirt-edpm-multinodeset-edpm-compute-no-nodes-wx29d" Jan 20 18:07:33 crc kubenswrapper[4558]: I0120 18:07:33.327763 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/d0d07753-075f-4e29-9c69-0cdc8c2a7824-ssh-key-edpm-compute-no-nodes\") pod \"libvirt-edpm-multinodeset-edpm-compute-no-nodes-wx29d\" (UID: \"d0d07753-075f-4e29-9c69-0cdc8c2a7824\") " pod="openstack-kuttl-tests/libvirt-edpm-multinodeset-edpm-compute-no-nodes-wx29d" Jan 20 18:07:33 crc kubenswrapper[4558]: I0120 18:07:33.327817 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vqv2j\" (UniqueName: \"kubernetes.io/projected/d0d07753-075f-4e29-9c69-0cdc8c2a7824-kube-api-access-vqv2j\") pod \"libvirt-edpm-multinodeset-edpm-compute-no-nodes-wx29d\" (UID: \"d0d07753-075f-4e29-9c69-0cdc8c2a7824\") " pod="openstack-kuttl-tests/libvirt-edpm-multinodeset-edpm-compute-no-nodes-wx29d" Jan 20 18:07:33 crc kubenswrapper[4558]: I0120 18:07:33.327840 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d0d07753-075f-4e29-9c69-0cdc8c2a7824-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-multinodeset-edpm-compute-no-nodes-wx29d\" (UID: \"d0d07753-075f-4e29-9c69-0cdc8c2a7824\") " pod="openstack-kuttl-tests/libvirt-edpm-multinodeset-edpm-compute-no-nodes-wx29d" Jan 20 18:07:33 crc kubenswrapper[4558]: I0120 18:07:33.327869 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d0d07753-075f-4e29-9c69-0cdc8c2a7824-inventory\") pod \"libvirt-edpm-multinodeset-edpm-compute-no-nodes-wx29d\" (UID: \"d0d07753-075f-4e29-9c69-0cdc8c2a7824\") " pod="openstack-kuttl-tests/libvirt-edpm-multinodeset-edpm-compute-no-nodes-wx29d" Jan 20 18:07:33 crc kubenswrapper[4558]: I0120 18:07:33.327917 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/d0d07753-075f-4e29-9c69-0cdc8c2a7824-libvirt-secret-0\") pod \"libvirt-edpm-multinodeset-edpm-compute-no-nodes-wx29d\" (UID: \"d0d07753-075f-4e29-9c69-0cdc8c2a7824\") " pod="openstack-kuttl-tests/libvirt-edpm-multinodeset-edpm-compute-no-nodes-wx29d" Jan 20 18:07:33 crc kubenswrapper[4558]: I0120 18:07:33.337813 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/d0d07753-075f-4e29-9c69-0cdc8c2a7824-libvirt-secret-0\") pod \"libvirt-edpm-multinodeset-edpm-compute-no-nodes-wx29d\" (UID: \"d0d07753-075f-4e29-9c69-0cdc8c2a7824\") " pod="openstack-kuttl-tests/libvirt-edpm-multinodeset-edpm-compute-no-nodes-wx29d" Jan 20 18:07:33 crc kubenswrapper[4558]: I0120 18:07:33.340631 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/d0d07753-075f-4e29-9c69-0cdc8c2a7824-ssh-key-edpm-compute-no-nodes\") pod \"libvirt-edpm-multinodeset-edpm-compute-no-nodes-wx29d\" (UID: \"d0d07753-075f-4e29-9c69-0cdc8c2a7824\") " pod="openstack-kuttl-tests/libvirt-edpm-multinodeset-edpm-compute-no-nodes-wx29d" Jan 20 18:07:33 crc kubenswrapper[4558]: I0120 18:07:33.342616 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d0d07753-075f-4e29-9c69-0cdc8c2a7824-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-multinodeset-edpm-compute-no-nodes-wx29d\" (UID: \"d0d07753-075f-4e29-9c69-0cdc8c2a7824\") " pod="openstack-kuttl-tests/libvirt-edpm-multinodeset-edpm-compute-no-nodes-wx29d" Jan 20 18:07:33 crc kubenswrapper[4558]: I0120 18:07:33.343627 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d0d07753-075f-4e29-9c69-0cdc8c2a7824-inventory\") pod \"libvirt-edpm-multinodeset-edpm-compute-no-nodes-wx29d\" (UID: \"d0d07753-075f-4e29-9c69-0cdc8c2a7824\") " pod="openstack-kuttl-tests/libvirt-edpm-multinodeset-edpm-compute-no-nodes-wx29d" Jan 20 18:07:33 crc kubenswrapper[4558]: I0120 18:07:33.365822 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vqv2j\" (UniqueName: \"kubernetes.io/projected/d0d07753-075f-4e29-9c69-0cdc8c2a7824-kube-api-access-vqv2j\") pod \"libvirt-edpm-multinodeset-edpm-compute-no-nodes-wx29d\" (UID: \"d0d07753-075f-4e29-9c69-0cdc8c2a7824\") " pod="openstack-kuttl-tests/libvirt-edpm-multinodeset-edpm-compute-no-nodes-wx29d" Jan 20 18:07:33 crc kubenswrapper[4558]: I0120 18:07:33.464080 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/libvirt-edpm-multinodeset-edpm-compute-no-nodes-wx29d" Jan 20 18:07:33 crc kubenswrapper[4558]: W0120 18:07:33.861206 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd0d07753_075f_4e29_9c69_0cdc8c2a7824.slice/crio-afd8215cf51d8f22401f7e26c53170a11ea73b4833172460880c2499f65555b9 WatchSource:0}: Error finding container afd8215cf51d8f22401f7e26c53170a11ea73b4833172460880c2499f65555b9: Status 404 returned error can't find the container with id afd8215cf51d8f22401f7e26c53170a11ea73b4833172460880c2499f65555b9 Jan 20 18:07:33 crc kubenswrapper[4558]: I0120 18:07:33.864682 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/libvirt-edpm-multinodeset-edpm-compute-no-nodes-wx29d"] Jan 20 18:07:34 crc kubenswrapper[4558]: I0120 18:07:34.115559 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/libvirt-edpm-multinodeset-edpm-compute-no-nodes-wx29d" event={"ID":"d0d07753-075f-4e29-9c69-0cdc8c2a7824","Type":"ContainerStarted","Data":"afd8215cf51d8f22401f7e26c53170a11ea73b4833172460880c2499f65555b9"} Jan 20 18:07:34 crc kubenswrapper[4558]: I0120 18:07:34.567221 4558 scope.go:117] "RemoveContainer" containerID="6b02d31ec11fe5435af988e35303a6f66b5afa5ef952ce5f7cc4b3cd5f4d9497" Jan 20 18:07:34 crc kubenswrapper[4558]: E0120 18:07:34.567448 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 18:07:35 crc kubenswrapper[4558]: I0120 18:07:35.128904 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/libvirt-edpm-multinodeset-edpm-compute-no-nodes-wx29d" event={"ID":"d0d07753-075f-4e29-9c69-0cdc8c2a7824","Type":"ContainerStarted","Data":"0df63fae7a1cc2e80032258b3ae2cdc72ec9d33fa9c69df318d943fbe7a5d952"} Jan 20 18:07:35 crc kubenswrapper[4558]: I0120 18:07:35.152779 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/libvirt-edpm-multinodeset-edpm-compute-no-nodes-wx29d" podStartSLOduration=1.620079494 podStartE2EDuration="2.15276081s" podCreationTimestamp="2026-01-20 18:07:33 +0000 UTC" firstStartedPulling="2026-01-20 18:07:33.863778237 +0000 UTC m=+5147.624116204" lastFinishedPulling="2026-01-20 18:07:34.396459554 +0000 UTC m=+5148.156797520" observedRunningTime="2026-01-20 18:07:35.147317736 +0000 UTC m=+5148.907655703" watchObservedRunningTime="2026-01-20 18:07:35.15276081 +0000 UTC m=+5148.913098777" Jan 20 18:07:36 crc kubenswrapper[4558]: I0120 18:07:36.140849 4558 generic.go:334] "Generic (PLEG): container finished" podID="d0d07753-075f-4e29-9c69-0cdc8c2a7824" containerID="0df63fae7a1cc2e80032258b3ae2cdc72ec9d33fa9c69df318d943fbe7a5d952" exitCode=0 Jan 20 18:07:36 crc kubenswrapper[4558]: I0120 18:07:36.140936 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/libvirt-edpm-multinodeset-edpm-compute-no-nodes-wx29d" event={"ID":"d0d07753-075f-4e29-9c69-0cdc8c2a7824","Type":"ContainerDied","Data":"0df63fae7a1cc2e80032258b3ae2cdc72ec9d33fa9c69df318d943fbe7a5d952"} Jan 20 18:07:37 crc kubenswrapper[4558]: I0120 18:07:37.446915 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/libvirt-edpm-multinodeset-edpm-compute-no-nodes-wx29d" Jan 20 18:07:37 crc kubenswrapper[4558]: I0120 18:07:37.491400 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d0d07753-075f-4e29-9c69-0cdc8c2a7824-libvirt-combined-ca-bundle\") pod \"d0d07753-075f-4e29-9c69-0cdc8c2a7824\" (UID: \"d0d07753-075f-4e29-9c69-0cdc8c2a7824\") " Jan 20 18:07:37 crc kubenswrapper[4558]: I0120 18:07:37.491467 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d0d07753-075f-4e29-9c69-0cdc8c2a7824-inventory\") pod \"d0d07753-075f-4e29-9c69-0cdc8c2a7824\" (UID: \"d0d07753-075f-4e29-9c69-0cdc8c2a7824\") " Jan 20 18:07:37 crc kubenswrapper[4558]: I0120 18:07:37.491616 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vqv2j\" (UniqueName: \"kubernetes.io/projected/d0d07753-075f-4e29-9c69-0cdc8c2a7824-kube-api-access-vqv2j\") pod \"d0d07753-075f-4e29-9c69-0cdc8c2a7824\" (UID: \"d0d07753-075f-4e29-9c69-0cdc8c2a7824\") " Jan 20 18:07:37 crc kubenswrapper[4558]: I0120 18:07:37.491643 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/d0d07753-075f-4e29-9c69-0cdc8c2a7824-libvirt-secret-0\") pod \"d0d07753-075f-4e29-9c69-0cdc8c2a7824\" (UID: \"d0d07753-075f-4e29-9c69-0cdc8c2a7824\") " Jan 20 18:07:37 crc kubenswrapper[4558]: I0120 18:07:37.491730 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/d0d07753-075f-4e29-9c69-0cdc8c2a7824-ssh-key-edpm-compute-no-nodes\") pod \"d0d07753-075f-4e29-9c69-0cdc8c2a7824\" (UID: \"d0d07753-075f-4e29-9c69-0cdc8c2a7824\") " Jan 20 18:07:37 crc kubenswrapper[4558]: I0120 18:07:37.498181 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d0d07753-075f-4e29-9c69-0cdc8c2a7824-kube-api-access-vqv2j" (OuterVolumeSpecName: "kube-api-access-vqv2j") pod "d0d07753-075f-4e29-9c69-0cdc8c2a7824" (UID: "d0d07753-075f-4e29-9c69-0cdc8c2a7824"). InnerVolumeSpecName "kube-api-access-vqv2j". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:07:37 crc kubenswrapper[4558]: I0120 18:07:37.498370 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d0d07753-075f-4e29-9c69-0cdc8c2a7824-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "d0d07753-075f-4e29-9c69-0cdc8c2a7824" (UID: "d0d07753-075f-4e29-9c69-0cdc8c2a7824"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:07:37 crc kubenswrapper[4558]: I0120 18:07:37.513363 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d0d07753-075f-4e29-9c69-0cdc8c2a7824-ssh-key-edpm-compute-no-nodes" (OuterVolumeSpecName: "ssh-key-edpm-compute-no-nodes") pod "d0d07753-075f-4e29-9c69-0cdc8c2a7824" (UID: "d0d07753-075f-4e29-9c69-0cdc8c2a7824"). InnerVolumeSpecName "ssh-key-edpm-compute-no-nodes". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:07:37 crc kubenswrapper[4558]: I0120 18:07:37.513921 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d0d07753-075f-4e29-9c69-0cdc8c2a7824-libvirt-secret-0" (OuterVolumeSpecName: "libvirt-secret-0") pod "d0d07753-075f-4e29-9c69-0cdc8c2a7824" (UID: "d0d07753-075f-4e29-9c69-0cdc8c2a7824"). InnerVolumeSpecName "libvirt-secret-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:07:37 crc kubenswrapper[4558]: I0120 18:07:37.517286 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d0d07753-075f-4e29-9c69-0cdc8c2a7824-inventory" (OuterVolumeSpecName: "inventory") pod "d0d07753-075f-4e29-9c69-0cdc8c2a7824" (UID: "d0d07753-075f-4e29-9c69-0cdc8c2a7824"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:07:37 crc kubenswrapper[4558]: I0120 18:07:37.593622 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vqv2j\" (UniqueName: \"kubernetes.io/projected/d0d07753-075f-4e29-9c69-0cdc8c2a7824-kube-api-access-vqv2j\") on node \"crc\" DevicePath \"\"" Jan 20 18:07:37 crc kubenswrapper[4558]: I0120 18:07:37.593661 4558 reconciler_common.go:293] "Volume detached for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/d0d07753-075f-4e29-9c69-0cdc8c2a7824-libvirt-secret-0\") on node \"crc\" DevicePath \"\"" Jan 20 18:07:37 crc kubenswrapper[4558]: I0120 18:07:37.593673 4558 reconciler_common.go:293] "Volume detached for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/d0d07753-075f-4e29-9c69-0cdc8c2a7824-ssh-key-edpm-compute-no-nodes\") on node \"crc\" DevicePath \"\"" Jan 20 18:07:37 crc kubenswrapper[4558]: I0120 18:07:37.593683 4558 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d0d07753-075f-4e29-9c69-0cdc8c2a7824-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 18:07:37 crc kubenswrapper[4558]: I0120 18:07:37.593696 4558 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d0d07753-075f-4e29-9c69-0cdc8c2a7824-inventory\") on node \"crc\" DevicePath \"\"" Jan 20 18:07:38 crc kubenswrapper[4558]: I0120 18:07:38.163634 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/libvirt-edpm-multinodeset-edpm-compute-no-nodes-wx29d" event={"ID":"d0d07753-075f-4e29-9c69-0cdc8c2a7824","Type":"ContainerDied","Data":"afd8215cf51d8f22401f7e26c53170a11ea73b4833172460880c2499f65555b9"} Jan 20 18:07:38 crc kubenswrapper[4558]: I0120 18:07:38.164065 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="afd8215cf51d8f22401f7e26c53170a11ea73b4833172460880c2499f65555b9" Jan 20 18:07:38 crc kubenswrapper[4558]: I0120 18:07:38.163719 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/libvirt-edpm-multinodeset-edpm-compute-no-nodes-wx29d" Jan 20 18:07:38 crc kubenswrapper[4558]: I0120 18:07:38.219317 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/nova-edpm-multinodeset-edpm-compute-no-nodes-5qjb4"] Jan 20 18:07:38 crc kubenswrapper[4558]: E0120 18:07:38.219662 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d0d07753-075f-4e29-9c69-0cdc8c2a7824" containerName="libvirt-edpm-multinodeset-edpm-compute-no-nodes" Jan 20 18:07:38 crc kubenswrapper[4558]: I0120 18:07:38.219683 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d0d07753-075f-4e29-9c69-0cdc8c2a7824" containerName="libvirt-edpm-multinodeset-edpm-compute-no-nodes" Jan 20 18:07:38 crc kubenswrapper[4558]: I0120 18:07:38.219839 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="d0d07753-075f-4e29-9c69-0cdc8c2a7824" containerName="libvirt-edpm-multinodeset-edpm-compute-no-nodes" Jan 20 18:07:38 crc kubenswrapper[4558]: I0120 18:07:38.220367 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-edpm-multinodeset-edpm-compute-no-nodes-5qjb4" Jan 20 18:07:38 crc kubenswrapper[4558]: I0120 18:07:38.221992 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-migration-ssh-key" Jan 20 18:07:38 crc kubenswrapper[4558]: I0120 18:07:38.222270 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"edpm-compute-no-nodes-dockercfg-tljmj" Jan 20 18:07:38 crc kubenswrapper[4558]: I0120 18:07:38.222491 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplanenodeset-edpm-compute-no-nodes" Jan 20 18:07:38 crc kubenswrapper[4558]: I0120 18:07:38.222796 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplane-ansible-ssh-private-key-secret" Jan 20 18:07:38 crc kubenswrapper[4558]: I0120 18:07:38.222887 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"combined-ca-bundle" Jan 20 18:07:38 crc kubenswrapper[4558]: I0120 18:07:38.223051 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-aee-default-env" Jan 20 18:07:38 crc kubenswrapper[4558]: I0120 18:07:38.223453 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"nova-cell1-compute-config" Jan 20 18:07:38 crc kubenswrapper[4558]: I0120 18:07:38.232910 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-edpm-multinodeset-edpm-compute-no-nodes-5qjb4"] Jan 20 18:07:38 crc kubenswrapper[4558]: I0120 18:07:38.304737 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/402c8a53-bf8a-4dd1-b245-d870ad87a086-nova-migration-ssh-key-0\") pod \"nova-edpm-multinodeset-edpm-compute-no-nodes-5qjb4\" (UID: \"402c8a53-bf8a-4dd1-b245-d870ad87a086\") " pod="openstack-kuttl-tests/nova-edpm-multinodeset-edpm-compute-no-nodes-5qjb4" Jan 20 18:07:38 crc kubenswrapper[4558]: I0120 18:07:38.304793 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2vv52\" (UniqueName: \"kubernetes.io/projected/402c8a53-bf8a-4dd1-b245-d870ad87a086-kube-api-access-2vv52\") pod \"nova-edpm-multinodeset-edpm-compute-no-nodes-5qjb4\" (UID: \"402c8a53-bf8a-4dd1-b245-d870ad87a086\") " pod="openstack-kuttl-tests/nova-edpm-multinodeset-edpm-compute-no-nodes-5qjb4" Jan 20 18:07:38 crc kubenswrapper[4558]: I0120 18:07:38.304817 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/402c8a53-bf8a-4dd1-b245-d870ad87a086-nova-combined-ca-bundle\") pod \"nova-edpm-multinodeset-edpm-compute-no-nodes-5qjb4\" (UID: \"402c8a53-bf8a-4dd1-b245-d870ad87a086\") " pod="openstack-kuttl-tests/nova-edpm-multinodeset-edpm-compute-no-nodes-5qjb4" Jan 20 18:07:38 crc kubenswrapper[4558]: I0120 18:07:38.304841 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/402c8a53-bf8a-4dd1-b245-d870ad87a086-ssh-key-edpm-compute-no-nodes\") pod \"nova-edpm-multinodeset-edpm-compute-no-nodes-5qjb4\" (UID: \"402c8a53-bf8a-4dd1-b245-d870ad87a086\") " pod="openstack-kuttl-tests/nova-edpm-multinodeset-edpm-compute-no-nodes-5qjb4" Jan 20 18:07:38 crc kubenswrapper[4558]: I0120 18:07:38.304868 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/402c8a53-bf8a-4dd1-b245-d870ad87a086-inventory\") pod \"nova-edpm-multinodeset-edpm-compute-no-nodes-5qjb4\" (UID: \"402c8a53-bf8a-4dd1-b245-d870ad87a086\") " pod="openstack-kuttl-tests/nova-edpm-multinodeset-edpm-compute-no-nodes-5qjb4" Jan 20 18:07:38 crc kubenswrapper[4558]: I0120 18:07:38.304900 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/402c8a53-bf8a-4dd1-b245-d870ad87a086-nova-cell1-compute-config-1\") pod \"nova-edpm-multinodeset-edpm-compute-no-nodes-5qjb4\" (UID: \"402c8a53-bf8a-4dd1-b245-d870ad87a086\") " pod="openstack-kuttl-tests/nova-edpm-multinodeset-edpm-compute-no-nodes-5qjb4" Jan 20 18:07:38 crc kubenswrapper[4558]: I0120 18:07:38.304978 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/402c8a53-bf8a-4dd1-b245-d870ad87a086-nova-cell1-compute-config-0\") pod \"nova-edpm-multinodeset-edpm-compute-no-nodes-5qjb4\" (UID: \"402c8a53-bf8a-4dd1-b245-d870ad87a086\") " pod="openstack-kuttl-tests/nova-edpm-multinodeset-edpm-compute-no-nodes-5qjb4" Jan 20 18:07:38 crc kubenswrapper[4558]: I0120 18:07:38.304996 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/402c8a53-bf8a-4dd1-b245-d870ad87a086-nova-migration-ssh-key-1\") pod \"nova-edpm-multinodeset-edpm-compute-no-nodes-5qjb4\" (UID: \"402c8a53-bf8a-4dd1-b245-d870ad87a086\") " pod="openstack-kuttl-tests/nova-edpm-multinodeset-edpm-compute-no-nodes-5qjb4" Jan 20 18:07:38 crc kubenswrapper[4558]: I0120 18:07:38.407035 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/402c8a53-bf8a-4dd1-b245-d870ad87a086-nova-migration-ssh-key-0\") pod \"nova-edpm-multinodeset-edpm-compute-no-nodes-5qjb4\" (UID: \"402c8a53-bf8a-4dd1-b245-d870ad87a086\") " pod="openstack-kuttl-tests/nova-edpm-multinodeset-edpm-compute-no-nodes-5qjb4" Jan 20 18:07:38 crc kubenswrapper[4558]: I0120 18:07:38.407090 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2vv52\" (UniqueName: \"kubernetes.io/projected/402c8a53-bf8a-4dd1-b245-d870ad87a086-kube-api-access-2vv52\") pod \"nova-edpm-multinodeset-edpm-compute-no-nodes-5qjb4\" (UID: \"402c8a53-bf8a-4dd1-b245-d870ad87a086\") " pod="openstack-kuttl-tests/nova-edpm-multinodeset-edpm-compute-no-nodes-5qjb4" Jan 20 18:07:38 crc kubenswrapper[4558]: I0120 18:07:38.407122 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/402c8a53-bf8a-4dd1-b245-d870ad87a086-nova-combined-ca-bundle\") pod \"nova-edpm-multinodeset-edpm-compute-no-nodes-5qjb4\" (UID: \"402c8a53-bf8a-4dd1-b245-d870ad87a086\") " pod="openstack-kuttl-tests/nova-edpm-multinodeset-edpm-compute-no-nodes-5qjb4" Jan 20 18:07:38 crc kubenswrapper[4558]: I0120 18:07:38.407151 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/402c8a53-bf8a-4dd1-b245-d870ad87a086-ssh-key-edpm-compute-no-nodes\") pod \"nova-edpm-multinodeset-edpm-compute-no-nodes-5qjb4\" (UID: \"402c8a53-bf8a-4dd1-b245-d870ad87a086\") " pod="openstack-kuttl-tests/nova-edpm-multinodeset-edpm-compute-no-nodes-5qjb4" Jan 20 18:07:38 crc kubenswrapper[4558]: I0120 18:07:38.407191 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/402c8a53-bf8a-4dd1-b245-d870ad87a086-inventory\") pod \"nova-edpm-multinodeset-edpm-compute-no-nodes-5qjb4\" (UID: \"402c8a53-bf8a-4dd1-b245-d870ad87a086\") " pod="openstack-kuttl-tests/nova-edpm-multinodeset-edpm-compute-no-nodes-5qjb4" Jan 20 18:07:38 crc kubenswrapper[4558]: I0120 18:07:38.407213 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/402c8a53-bf8a-4dd1-b245-d870ad87a086-nova-cell1-compute-config-1\") pod \"nova-edpm-multinodeset-edpm-compute-no-nodes-5qjb4\" (UID: \"402c8a53-bf8a-4dd1-b245-d870ad87a086\") " pod="openstack-kuttl-tests/nova-edpm-multinodeset-edpm-compute-no-nodes-5qjb4" Jan 20 18:07:38 crc kubenswrapper[4558]: I0120 18:07:38.407257 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/402c8a53-bf8a-4dd1-b245-d870ad87a086-nova-cell1-compute-config-0\") pod \"nova-edpm-multinodeset-edpm-compute-no-nodes-5qjb4\" (UID: \"402c8a53-bf8a-4dd1-b245-d870ad87a086\") " pod="openstack-kuttl-tests/nova-edpm-multinodeset-edpm-compute-no-nodes-5qjb4" Jan 20 18:07:38 crc kubenswrapper[4558]: I0120 18:07:38.407279 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/402c8a53-bf8a-4dd1-b245-d870ad87a086-nova-migration-ssh-key-1\") pod \"nova-edpm-multinodeset-edpm-compute-no-nodes-5qjb4\" (UID: \"402c8a53-bf8a-4dd1-b245-d870ad87a086\") " pod="openstack-kuttl-tests/nova-edpm-multinodeset-edpm-compute-no-nodes-5qjb4" Jan 20 18:07:38 crc kubenswrapper[4558]: I0120 18:07:38.411300 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/402c8a53-bf8a-4dd1-b245-d870ad87a086-nova-migration-ssh-key-0\") pod \"nova-edpm-multinodeset-edpm-compute-no-nodes-5qjb4\" (UID: \"402c8a53-bf8a-4dd1-b245-d870ad87a086\") " pod="openstack-kuttl-tests/nova-edpm-multinodeset-edpm-compute-no-nodes-5qjb4" Jan 20 18:07:38 crc kubenswrapper[4558]: I0120 18:07:38.411364 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/402c8a53-bf8a-4dd1-b245-d870ad87a086-inventory\") pod \"nova-edpm-multinodeset-edpm-compute-no-nodes-5qjb4\" (UID: \"402c8a53-bf8a-4dd1-b245-d870ad87a086\") " pod="openstack-kuttl-tests/nova-edpm-multinodeset-edpm-compute-no-nodes-5qjb4" Jan 20 18:07:38 crc kubenswrapper[4558]: I0120 18:07:38.411746 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/402c8a53-bf8a-4dd1-b245-d870ad87a086-nova-migration-ssh-key-1\") pod \"nova-edpm-multinodeset-edpm-compute-no-nodes-5qjb4\" (UID: \"402c8a53-bf8a-4dd1-b245-d870ad87a086\") " pod="openstack-kuttl-tests/nova-edpm-multinodeset-edpm-compute-no-nodes-5qjb4" Jan 20 18:07:38 crc kubenswrapper[4558]: I0120 18:07:38.411758 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/402c8a53-bf8a-4dd1-b245-d870ad87a086-nova-combined-ca-bundle\") pod \"nova-edpm-multinodeset-edpm-compute-no-nodes-5qjb4\" (UID: \"402c8a53-bf8a-4dd1-b245-d870ad87a086\") " pod="openstack-kuttl-tests/nova-edpm-multinodeset-edpm-compute-no-nodes-5qjb4" Jan 20 18:07:38 crc kubenswrapper[4558]: I0120 18:07:38.412422 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/402c8a53-bf8a-4dd1-b245-d870ad87a086-ssh-key-edpm-compute-no-nodes\") pod \"nova-edpm-multinodeset-edpm-compute-no-nodes-5qjb4\" (UID: \"402c8a53-bf8a-4dd1-b245-d870ad87a086\") " pod="openstack-kuttl-tests/nova-edpm-multinodeset-edpm-compute-no-nodes-5qjb4" Jan 20 18:07:38 crc kubenswrapper[4558]: I0120 18:07:38.412817 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/402c8a53-bf8a-4dd1-b245-d870ad87a086-nova-cell1-compute-config-1\") pod \"nova-edpm-multinodeset-edpm-compute-no-nodes-5qjb4\" (UID: \"402c8a53-bf8a-4dd1-b245-d870ad87a086\") " pod="openstack-kuttl-tests/nova-edpm-multinodeset-edpm-compute-no-nodes-5qjb4" Jan 20 18:07:38 crc kubenswrapper[4558]: I0120 18:07:38.413608 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/402c8a53-bf8a-4dd1-b245-d870ad87a086-nova-cell1-compute-config-0\") pod \"nova-edpm-multinodeset-edpm-compute-no-nodes-5qjb4\" (UID: \"402c8a53-bf8a-4dd1-b245-d870ad87a086\") " pod="openstack-kuttl-tests/nova-edpm-multinodeset-edpm-compute-no-nodes-5qjb4" Jan 20 18:07:38 crc kubenswrapper[4558]: I0120 18:07:38.422117 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2vv52\" (UniqueName: \"kubernetes.io/projected/402c8a53-bf8a-4dd1-b245-d870ad87a086-kube-api-access-2vv52\") pod \"nova-edpm-multinodeset-edpm-compute-no-nodes-5qjb4\" (UID: \"402c8a53-bf8a-4dd1-b245-d870ad87a086\") " pod="openstack-kuttl-tests/nova-edpm-multinodeset-edpm-compute-no-nodes-5qjb4" Jan 20 18:07:38 crc kubenswrapper[4558]: I0120 18:07:38.535695 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-edpm-multinodeset-edpm-compute-no-nodes-5qjb4" Jan 20 18:07:38 crc kubenswrapper[4558]: I0120 18:07:38.942836 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/nova-edpm-multinodeset-edpm-compute-no-nodes-5qjb4"] Jan 20 18:07:39 crc kubenswrapper[4558]: I0120 18:07:39.173990 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-edpm-multinodeset-edpm-compute-no-nodes-5qjb4" event={"ID":"402c8a53-bf8a-4dd1-b245-d870ad87a086","Type":"ContainerStarted","Data":"6704975b66af23a941260731afd4f8628c7cfc3a6b2b988886619177fade297e"} Jan 20 18:07:40 crc kubenswrapper[4558]: I0120 18:07:40.189080 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-edpm-multinodeset-edpm-compute-no-nodes-5qjb4" event={"ID":"402c8a53-bf8a-4dd1-b245-d870ad87a086","Type":"ContainerStarted","Data":"90c82ce4dee750ed2e69c30f5db645a7b43c50b51022061a3c35865af695b662"} Jan 20 18:07:40 crc kubenswrapper[4558]: I0120 18:07:40.210248 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/nova-edpm-multinodeset-edpm-compute-no-nodes-5qjb4" podStartSLOduration=1.70342405 podStartE2EDuration="2.210233302s" podCreationTimestamp="2026-01-20 18:07:38 +0000 UTC" firstStartedPulling="2026-01-20 18:07:38.951749552 +0000 UTC m=+5152.712087520" lastFinishedPulling="2026-01-20 18:07:39.458558805 +0000 UTC m=+5153.218896772" observedRunningTime="2026-01-20 18:07:40.207028316 +0000 UTC m=+5153.967366283" watchObservedRunningTime="2026-01-20 18:07:40.210233302 +0000 UTC m=+5153.970571269" Jan 20 18:07:41 crc kubenswrapper[4558]: I0120 18:07:41.202799 4558 generic.go:334] "Generic (PLEG): container finished" podID="402c8a53-bf8a-4dd1-b245-d870ad87a086" containerID="90c82ce4dee750ed2e69c30f5db645a7b43c50b51022061a3c35865af695b662" exitCode=0 Jan 20 18:07:41 crc kubenswrapper[4558]: I0120 18:07:41.202856 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-edpm-multinodeset-edpm-compute-no-nodes-5qjb4" event={"ID":"402c8a53-bf8a-4dd1-b245-d870ad87a086","Type":"ContainerDied","Data":"90c82ce4dee750ed2e69c30f5db645a7b43c50b51022061a3c35865af695b662"} Jan 20 18:07:42 crc kubenswrapper[4558]: I0120 18:07:42.504995 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-edpm-multinodeset-edpm-compute-no-nodes-5qjb4" Jan 20 18:07:42 crc kubenswrapper[4558]: I0120 18:07:42.573045 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/402c8a53-bf8a-4dd1-b245-d870ad87a086-ssh-key-edpm-compute-no-nodes\") pod \"402c8a53-bf8a-4dd1-b245-d870ad87a086\" (UID: \"402c8a53-bf8a-4dd1-b245-d870ad87a086\") " Jan 20 18:07:42 crc kubenswrapper[4558]: I0120 18:07:42.573195 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/402c8a53-bf8a-4dd1-b245-d870ad87a086-nova-migration-ssh-key-0\") pod \"402c8a53-bf8a-4dd1-b245-d870ad87a086\" (UID: \"402c8a53-bf8a-4dd1-b245-d870ad87a086\") " Jan 20 18:07:42 crc kubenswrapper[4558]: I0120 18:07:42.573243 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/402c8a53-bf8a-4dd1-b245-d870ad87a086-nova-migration-ssh-key-1\") pod \"402c8a53-bf8a-4dd1-b245-d870ad87a086\" (UID: \"402c8a53-bf8a-4dd1-b245-d870ad87a086\") " Jan 20 18:07:42 crc kubenswrapper[4558]: I0120 18:07:42.573276 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/402c8a53-bf8a-4dd1-b245-d870ad87a086-nova-combined-ca-bundle\") pod \"402c8a53-bf8a-4dd1-b245-d870ad87a086\" (UID: \"402c8a53-bf8a-4dd1-b245-d870ad87a086\") " Jan 20 18:07:42 crc kubenswrapper[4558]: I0120 18:07:42.573298 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/402c8a53-bf8a-4dd1-b245-d870ad87a086-inventory\") pod \"402c8a53-bf8a-4dd1-b245-d870ad87a086\" (UID: \"402c8a53-bf8a-4dd1-b245-d870ad87a086\") " Jan 20 18:07:42 crc kubenswrapper[4558]: I0120 18:07:42.573342 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/402c8a53-bf8a-4dd1-b245-d870ad87a086-nova-cell1-compute-config-0\") pod \"402c8a53-bf8a-4dd1-b245-d870ad87a086\" (UID: \"402c8a53-bf8a-4dd1-b245-d870ad87a086\") " Jan 20 18:07:42 crc kubenswrapper[4558]: I0120 18:07:42.573376 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/402c8a53-bf8a-4dd1-b245-d870ad87a086-nova-cell1-compute-config-1\") pod \"402c8a53-bf8a-4dd1-b245-d870ad87a086\" (UID: \"402c8a53-bf8a-4dd1-b245-d870ad87a086\") " Jan 20 18:07:42 crc kubenswrapper[4558]: I0120 18:07:42.573450 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2vv52\" (UniqueName: \"kubernetes.io/projected/402c8a53-bf8a-4dd1-b245-d870ad87a086-kube-api-access-2vv52\") pod \"402c8a53-bf8a-4dd1-b245-d870ad87a086\" (UID: \"402c8a53-bf8a-4dd1-b245-d870ad87a086\") " Jan 20 18:07:42 crc kubenswrapper[4558]: I0120 18:07:42.580193 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/402c8a53-bf8a-4dd1-b245-d870ad87a086-kube-api-access-2vv52" (OuterVolumeSpecName: "kube-api-access-2vv52") pod "402c8a53-bf8a-4dd1-b245-d870ad87a086" (UID: "402c8a53-bf8a-4dd1-b245-d870ad87a086"). InnerVolumeSpecName "kube-api-access-2vv52". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:07:42 crc kubenswrapper[4558]: I0120 18:07:42.581441 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/402c8a53-bf8a-4dd1-b245-d870ad87a086-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "402c8a53-bf8a-4dd1-b245-d870ad87a086" (UID: "402c8a53-bf8a-4dd1-b245-d870ad87a086"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:07:42 crc kubenswrapper[4558]: I0120 18:07:42.592798 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/402c8a53-bf8a-4dd1-b245-d870ad87a086-nova-migration-ssh-key-1" (OuterVolumeSpecName: "nova-migration-ssh-key-1") pod "402c8a53-bf8a-4dd1-b245-d870ad87a086" (UID: "402c8a53-bf8a-4dd1-b245-d870ad87a086"). InnerVolumeSpecName "nova-migration-ssh-key-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:07:42 crc kubenswrapper[4558]: I0120 18:07:42.594016 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/402c8a53-bf8a-4dd1-b245-d870ad87a086-inventory" (OuterVolumeSpecName: "inventory") pod "402c8a53-bf8a-4dd1-b245-d870ad87a086" (UID: "402c8a53-bf8a-4dd1-b245-d870ad87a086"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:07:42 crc kubenswrapper[4558]: I0120 18:07:42.594413 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/402c8a53-bf8a-4dd1-b245-d870ad87a086-nova-migration-ssh-key-0" (OuterVolumeSpecName: "nova-migration-ssh-key-0") pod "402c8a53-bf8a-4dd1-b245-d870ad87a086" (UID: "402c8a53-bf8a-4dd1-b245-d870ad87a086"). InnerVolumeSpecName "nova-migration-ssh-key-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:07:42 crc kubenswrapper[4558]: I0120 18:07:42.595282 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/402c8a53-bf8a-4dd1-b245-d870ad87a086-nova-cell1-compute-config-0" (OuterVolumeSpecName: "nova-cell1-compute-config-0") pod "402c8a53-bf8a-4dd1-b245-d870ad87a086" (UID: "402c8a53-bf8a-4dd1-b245-d870ad87a086"). InnerVolumeSpecName "nova-cell1-compute-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:07:42 crc kubenswrapper[4558]: I0120 18:07:42.596356 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/402c8a53-bf8a-4dd1-b245-d870ad87a086-ssh-key-edpm-compute-no-nodes" (OuterVolumeSpecName: "ssh-key-edpm-compute-no-nodes") pod "402c8a53-bf8a-4dd1-b245-d870ad87a086" (UID: "402c8a53-bf8a-4dd1-b245-d870ad87a086"). InnerVolumeSpecName "ssh-key-edpm-compute-no-nodes". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:07:42 crc kubenswrapper[4558]: I0120 18:07:42.597039 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/402c8a53-bf8a-4dd1-b245-d870ad87a086-nova-cell1-compute-config-1" (OuterVolumeSpecName: "nova-cell1-compute-config-1") pod "402c8a53-bf8a-4dd1-b245-d870ad87a086" (UID: "402c8a53-bf8a-4dd1-b245-d870ad87a086"). InnerVolumeSpecName "nova-cell1-compute-config-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:07:42 crc kubenswrapper[4558]: I0120 18:07:42.675743 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2vv52\" (UniqueName: \"kubernetes.io/projected/402c8a53-bf8a-4dd1-b245-d870ad87a086-kube-api-access-2vv52\") on node \"crc\" DevicePath \"\"" Jan 20 18:07:42 crc kubenswrapper[4558]: I0120 18:07:42.675783 4558 reconciler_common.go:293] "Volume detached for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/402c8a53-bf8a-4dd1-b245-d870ad87a086-ssh-key-edpm-compute-no-nodes\") on node \"crc\" DevicePath \"\"" Jan 20 18:07:42 crc kubenswrapper[4558]: I0120 18:07:42.675800 4558 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/402c8a53-bf8a-4dd1-b245-d870ad87a086-nova-migration-ssh-key-0\") on node \"crc\" DevicePath \"\"" Jan 20 18:07:42 crc kubenswrapper[4558]: I0120 18:07:42.675908 4558 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/402c8a53-bf8a-4dd1-b245-d870ad87a086-nova-migration-ssh-key-1\") on node \"crc\" DevicePath \"\"" Jan 20 18:07:42 crc kubenswrapper[4558]: I0120 18:07:42.675924 4558 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/402c8a53-bf8a-4dd1-b245-d870ad87a086-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 18:07:42 crc kubenswrapper[4558]: I0120 18:07:42.675937 4558 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/402c8a53-bf8a-4dd1-b245-d870ad87a086-inventory\") on node \"crc\" DevicePath \"\"" Jan 20 18:07:42 crc kubenswrapper[4558]: I0120 18:07:42.675949 4558 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/402c8a53-bf8a-4dd1-b245-d870ad87a086-nova-cell1-compute-config-0\") on node \"crc\" DevicePath \"\"" Jan 20 18:07:42 crc kubenswrapper[4558]: I0120 18:07:42.675959 4558 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/402c8a53-bf8a-4dd1-b245-d870ad87a086-nova-cell1-compute-config-1\") on node \"crc\" DevicePath \"\"" Jan 20 18:07:43 crc kubenswrapper[4558]: I0120 18:07:43.227461 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/nova-edpm-multinodeset-edpm-compute-no-nodes-5qjb4" event={"ID":"402c8a53-bf8a-4dd1-b245-d870ad87a086","Type":"ContainerDied","Data":"6704975b66af23a941260731afd4f8628c7cfc3a6b2b988886619177fade297e"} Jan 20 18:07:43 crc kubenswrapper[4558]: I0120 18:07:43.227516 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6704975b66af23a941260731afd4f8628c7cfc3a6b2b988886619177fade297e" Jan 20 18:07:43 crc kubenswrapper[4558]: I0120 18:07:43.227557 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/nova-edpm-multinodeset-edpm-compute-no-nodes-5qjb4" Jan 20 18:07:45 crc kubenswrapper[4558]: I0120 18:07:45.372216 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/configure-network-edpm-multinodeset-edpm-compute-no-nodes-6rm52"] Jan 20 18:07:45 crc kubenswrapper[4558]: I0120 18:07:45.378862 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/install-os-edpm-multinodeset-edpm-compute-no-nodes-xq8w6"] Jan 20 18:07:45 crc kubenswrapper[4558]: I0120 18:07:45.387319 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/download-cache-edpm-multinodeset-edpm-compute-beta-nodeset4f842"] Jan 20 18:07:45 crc kubenswrapper[4558]: I0120 18:07:45.392955 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/install-os-edpm-multinodeset-edpm-compute-no-nodes-xq8w6"] Jan 20 18:07:45 crc kubenswrapper[4558]: I0120 18:07:45.399218 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-sriov-edpm-multinodeset-edpm-compute-no-nodes-t57nm"] Jan 20 18:07:45 crc kubenswrapper[4558]: I0120 18:07:45.409662 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/configure-network-edpm-multinodeset-edpm-compute-no-nodes-6rm52"] Jan 20 18:07:45 crc kubenswrapper[4558]: I0120 18:07:45.413851 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/bootstrap-edpm-multinodeset-edpm-compute-beta-nodeset-lh947"] Jan 20 18:07:45 crc kubenswrapper[4558]: I0120 18:07:45.424060 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/bootstrap-edpm-multinodeset-edpm-compute-beta-nodeset-lh947"] Jan 20 18:07:45 crc kubenswrapper[4558]: I0120 18:07:45.427219 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/neutron-sriov-edpm-multinodeset-edpm-compute-no-nodes-t57nm"] Jan 20 18:07:45 crc kubenswrapper[4558]: I0120 18:07:45.431628 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/download-cache-edpm-multinodeset-edpm-compute-beta-nodeset4f842"] Jan 20 18:07:45 crc kubenswrapper[4558]: I0120 18:07:45.439103 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/validate-network-edpm-multinodeset-edpm-compute-no-nodes-k4xfs"] Jan 20 18:07:45 crc kubenswrapper[4558]: I0120 18:07:45.446199 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-ovn-edpm-multinodeset-edpm-compute-no-nodes-5rkzf"] Jan 20 18:07:45 crc kubenswrapper[4558]: I0120 18:07:45.446936 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/bootstrap-edpm-multinodeset-edpm-compute-no-nodes-ndqgs"] Jan 20 18:07:45 crc kubenswrapper[4558]: I0120 18:07:45.473225 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-dhcp-edpm-multinodeset-edpm-compute-no-nodes-j5j2l"] Jan 20 18:07:45 crc kubenswrapper[4558]: I0120 18:07:45.489255 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/validate-network-edpm-multinodeset-edpm-compute-no-nodes-k4xfs"] Jan 20 18:07:45 crc kubenswrapper[4558]: I0120 18:07:45.502116 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/neutron-ovn-edpm-multinodeset-edpm-compute-no-nodes-5rkzf"] Jan 20 18:07:45 crc kubenswrapper[4558]: I0120 18:07:45.507485 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/bootstrap-edpm-multinodeset-edpm-compute-no-nodes-ndqgs"] Jan 20 18:07:45 crc kubenswrapper[4558]: I0120 18:07:45.553203 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/libvirt-edpm-multinodeset-edpm-compute-no-nodes-wx29d"] Jan 20 18:07:45 crc kubenswrapper[4558]: I0120 18:07:45.565108 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/libvirt-edpm-multinodeset-edpm-compute-no-nodes-wx29d"] Jan 20 18:07:45 crc kubenswrapper[4558]: I0120 18:07:45.566039 4558 scope.go:117] "RemoveContainer" containerID="6b02d31ec11fe5435af988e35303a6f66b5afa5ef952ce5f7cc4b3cd5f4d9497" Jan 20 18:07:45 crc kubenswrapper[4558]: E0120 18:07:45.566390 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 18:07:45 crc kubenswrapper[4558]: I0120 18:07:45.571151 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/neutron-dhcp-edpm-multinodeset-edpm-compute-no-nodes-j5j2l"] Jan 20 18:07:45 crc kubenswrapper[4558]: I0120 18:07:45.575836 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/install-certs-edpm-multinodeset-edpm-compute-no-nodes-7hr67"] Jan 20 18:07:45 crc kubenswrapper[4558]: I0120 18:07:45.580906 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-edpm-multinodeset-edpm-compute-no-nodes-5qjb4"] Jan 20 18:07:45 crc kubenswrapper[4558]: I0120 18:07:45.586359 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/download-cache-edpm-multinodeset-edpm-compute-no-nodes-lrxfd"] Jan 20 18:07:45 crc kubenswrapper[4558]: I0120 18:07:45.591346 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/download-cache-edpm-multinodeset-edpm-compute-no-nodes-lrxfd"] Jan 20 18:07:45 crc kubenswrapper[4558]: I0120 18:07:45.596054 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/configure-os-edpm-multinodeset-edpm-compute-no-nodes-t4wsc"] Jan 20 18:07:45 crc kubenswrapper[4558]: I0120 18:07:45.601220 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-metadata-edpm-multinodeset-edpm-compute-no-nodes-5ljg4"] Jan 20 18:07:45 crc kubenswrapper[4558]: I0120 18:07:45.606121 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/run-os-edpm-multinodeset-edpm-compute-no-nodes-8ll6g"] Jan 20 18:07:45 crc kubenswrapper[4558]: I0120 18:07:45.612090 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovn-edpm-multinodeset-edpm-compute-no-nodes-22vhv"] Jan 20 18:07:45 crc kubenswrapper[4558]: I0120 18:07:45.616396 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/install-certs-edpm-multinodeset-edpm-compute-no-nodes-7hr67"] Jan 20 18:07:45 crc kubenswrapper[4558]: I0120 18:07:45.623092 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-edpm-multinodeset-edpm-compute-no-nodes-5qjb4"] Jan 20 18:07:45 crc kubenswrapper[4558]: I0120 18:07:45.629454 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/neutron-metadata-edpm-multinodeset-edpm-compute-no-nodes-5ljg4"] Jan 20 18:07:45 crc kubenswrapper[4558]: I0120 18:07:45.637124 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ovn-edpm-multinodeset-edpm-compute-no-nodes-22vhv"] Jan 20 18:07:45 crc kubenswrapper[4558]: I0120 18:07:45.641217 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/configure-os-edpm-multinodeset-edpm-compute-no-nodes-t4wsc"] Jan 20 18:07:45 crc kubenswrapper[4558]: I0120 18:07:45.643631 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/run-os-edpm-multinodeset-edpm-compute-no-nodes-8ll6g"] Jan 20 18:07:45 crc kubenswrapper[4558]: I0120 18:07:45.646530 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovn-edpm-compute-no-nodes-updated-ovn-cm-edpm-compute-no-n8f225"] Jan 20 18:07:45 crc kubenswrapper[4558]: I0120 18:07:45.650479 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/validate-network-edpm-compute-no-nodes-edpm-compute-no-nodgr7qx"] Jan 20 18:07:45 crc kubenswrapper[4558]: I0120 18:07:45.654422 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/download-cache-edpm-compute-no-nodes-edpm-compute-no-nodesm9d27"] Jan 20 18:07:45 crc kubenswrapper[4558]: I0120 18:07:45.660048 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-dhcp-edpm-compute-no-nodes-edpm-compute-no-nodes-s2hp4"] Jan 20 18:07:45 crc kubenswrapper[4558]: I0120 18:07:45.666819 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/nova-edpm-compute-no-nodes-edpm-compute-no-nodes-5t59k"] Jan 20 18:07:45 crc kubenswrapper[4558]: I0120 18:07:45.671178 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/ovn-edpm-compute-no-nodes-edpm-compute-no-nodes-82r5j"] Jan 20 18:07:45 crc kubenswrapper[4558]: I0120 18:07:45.675320 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/neutron-dhcp-edpm-compute-no-nodes-edpm-compute-no-nodes-s2hp4"] Jan 20 18:07:45 crc kubenswrapper[4558]: I0120 18:07:45.679437 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/libvirt-edpm-compute-no-nodes-edpm-compute-no-nodes-bw44r"] Jan 20 18:07:45 crc kubenswrapper[4558]: I0120 18:07:45.683635 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ovn-edpm-compute-no-nodes-updated-ovn-cm-edpm-compute-no-n8f225"] Jan 20 18:07:45 crc kubenswrapper[4558]: I0120 18:07:45.687517 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-metadata-edpm-compute-no-nodes-edpm-compute-no-nodssvgn"] Jan 20 18:07:45 crc kubenswrapper[4558]: I0120 18:07:45.691497 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/run-os-edpm-compute-no-nodes-edpm-compute-no-nodes-lvhc9"] Jan 20 18:07:45 crc kubenswrapper[4558]: I0120 18:07:45.695521 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/custom-svc-edpm-compute-no-nodes-ovrd-edpm-compute-no-nodetmjvn"] Jan 20 18:07:45 crc kubenswrapper[4558]: I0120 18:07:45.699647 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/bootstrap-edpm-compute-no-nodes-edpm-compute-no-nodes-hmt8x"] Jan 20 18:07:45 crc kubenswrapper[4558]: I0120 18:07:45.704269 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/download-cache-edpm-compute-no-nodes-edpm-compute-no-nodesm9d27"] Jan 20 18:07:45 crc kubenswrapper[4558]: I0120 18:07:45.708434 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/ovn-edpm-compute-no-nodes-edpm-compute-no-nodes-82r5j"] Jan 20 18:07:45 crc kubenswrapper[4558]: I0120 18:07:45.712793 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/libvirt-edpm-compute-no-nodes-edpm-compute-no-nodes-bw44r"] Jan 20 18:07:45 crc kubenswrapper[4558]: I0120 18:07:45.716806 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/configure-network-edpm-compute-no-nodes-edpm-compute-no-nokr9g2"] Jan 20 18:07:45 crc kubenswrapper[4558]: I0120 18:07:45.720649 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/bootstrap-edpm-compute-no-nodes-edpm-compute-no-nodes-hmt8x"] Jan 20 18:07:45 crc kubenswrapper[4558]: I0120 18:07:45.724531 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-ovn-edpm-compute-no-nodes-edpm-compute-no-nodes-z2g5q"] Jan 20 18:07:45 crc kubenswrapper[4558]: I0120 18:07:45.728397 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/run-os-edpm-compute-no-nodes-edpm-compute-no-nodes-lvhc9"] Jan 20 18:07:45 crc kubenswrapper[4558]: I0120 18:07:45.732212 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/neutron-metadata-edpm-compute-no-nodes-edpm-compute-no-nodssvgn"] Jan 20 18:07:45 crc kubenswrapper[4558]: I0120 18:07:45.736277 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/nova-edpm-compute-no-nodes-edpm-compute-no-nodes-5t59k"] Jan 20 18:07:45 crc kubenswrapper[4558]: I0120 18:07:45.740243 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/install-os-edpm-compute-no-nodes-edpm-compute-no-nodes-qmkd7"] Jan 20 18:07:45 crc kubenswrapper[4558]: I0120 18:07:45.744438 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/validate-network-edpm-compute-no-nodes-edpm-compute-no-nodgr7qx"] Jan 20 18:07:45 crc kubenswrapper[4558]: I0120 18:07:45.748587 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/neutron-sriov-edpm-compute-no-nodes-edpm-compute-no-nodes-ftqwk"] Jan 20 18:07:45 crc kubenswrapper[4558]: I0120 18:07:45.752668 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/install-certs-edpm-compute-no-nodes-edpm-compute-no-nodes-rjzbd"] Jan 20 18:07:45 crc kubenswrapper[4558]: I0120 18:07:45.756692 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/configure-os-edpm-compute-no-nodes-edpm-compute-no-nodes-z7vb5"] Jan 20 18:07:45 crc kubenswrapper[4558]: I0120 18:07:45.760746 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/configure-network-edpm-compute-no-nodes-edpm-compute-no-nokr9g2"] Jan 20 18:07:45 crc kubenswrapper[4558]: I0120 18:07:45.764699 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/custom-svc-edpm-compute-no-nodes-ovrd-edpm-compute-no-nodetmjvn"] Jan 20 18:07:45 crc kubenswrapper[4558]: I0120 18:07:45.768539 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/neutron-ovn-edpm-compute-no-nodes-edpm-compute-no-nodes-z2g5q"] Jan 20 18:07:45 crc kubenswrapper[4558]: I0120 18:07:45.772468 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/install-os-edpm-compute-no-nodes-edpm-compute-no-nodes-qmkd7"] Jan 20 18:07:45 crc kubenswrapper[4558]: I0120 18:07:45.776500 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/neutron-sriov-edpm-compute-no-nodes-edpm-compute-no-nodes-ftqwk"] Jan 20 18:07:45 crc kubenswrapper[4558]: I0120 18:07:45.780347 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/configure-os-edpm-compute-no-nodes-edpm-compute-no-nodes-z7vb5"] Jan 20 18:07:45 crc kubenswrapper[4558]: I0120 18:07:45.784213 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/install-certs-edpm-compute-no-nodes-edpm-compute-no-nodes-rjzbd"] Jan 20 18:07:45 crc kubenswrapper[4558]: I0120 18:07:45.788064 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-64864b6d57-bmrqf"] Jan 20 18:07:45 crc kubenswrapper[4558]: E0120 18:07:45.788623 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="402c8a53-bf8a-4dd1-b245-d870ad87a086" containerName="nova-edpm-multinodeset-edpm-compute-no-nodes" Jan 20 18:07:45 crc kubenswrapper[4558]: I0120 18:07:45.788650 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="402c8a53-bf8a-4dd1-b245-d870ad87a086" containerName="nova-edpm-multinodeset-edpm-compute-no-nodes" Jan 20 18:07:45 crc kubenswrapper[4558]: I0120 18:07:45.788910 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="402c8a53-bf8a-4dd1-b245-d870ad87a086" containerName="nova-edpm-multinodeset-edpm-compute-no-nodes" Jan 20 18:07:45 crc kubenswrapper[4558]: I0120 18:07:45.790134 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-64864b6d57-bmrqf" Jan 20 18:07:45 crc kubenswrapper[4558]: I0120 18:07:45.792073 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-64864b6d57-bmrqf"] Jan 20 18:07:45 crc kubenswrapper[4558]: I0120 18:07:45.795808 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-64864b6d57-bmrqf"] Jan 20 18:07:45 crc kubenswrapper[4558]: I0120 18:07:45.799639 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-mkxmj"] Jan 20 18:07:45 crc kubenswrapper[4558]: I0120 18:07:45.801412 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-mkxmj" Jan 20 18:07:45 crc kubenswrapper[4558]: E0120 18:07:45.817965 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[config dnsmasq-svc edpm-compute-no-nodes kube-api-access-r8nsd], unattached volumes=[], failed to process volumes=[config dnsmasq-svc edpm-compute-no-nodes kube-api-access-r8nsd]: context canceled" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-64864b6d57-bmrqf" podUID="e010402e-1507-46e1-a78f-facf49687d8e" Jan 20 18:07:45 crc kubenswrapper[4558]: I0120 18:07:45.823674 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-mkxmj"] Jan 20 18:07:45 crc kubenswrapper[4558]: I0120 18:07:45.928854 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a6a8563d-683b-444a-9739-fca985cbc165-config\") pod \"dnsmasq-dnsmasq-84b9f45d47-mkxmj\" (UID: \"a6a8563d-683b-444a-9739-fca985cbc165\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-mkxmj" Jan 20 18:07:45 crc kubenswrapper[4558]: I0120 18:07:45.929110 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/e010402e-1507-46e1-a78f-facf49687d8e-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-64864b6d57-bmrqf\" (UID: \"e010402e-1507-46e1-a78f-facf49687d8e\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-64864b6d57-bmrqf" Jan 20 18:07:45 crc kubenswrapper[4558]: I0120 18:07:45.929171 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/configmap/e010402e-1507-46e1-a78f-facf49687d8e-edpm-compute-no-nodes\") pod \"dnsmasq-dnsmasq-64864b6d57-bmrqf\" (UID: \"e010402e-1507-46e1-a78f-facf49687d8e\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-64864b6d57-bmrqf" Jan 20 18:07:45 crc kubenswrapper[4558]: I0120 18:07:45.929204 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e010402e-1507-46e1-a78f-facf49687d8e-config\") pod \"dnsmasq-dnsmasq-64864b6d57-bmrqf\" (UID: \"e010402e-1507-46e1-a78f-facf49687d8e\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-64864b6d57-bmrqf" Jan 20 18:07:45 crc kubenswrapper[4558]: I0120 18:07:45.929851 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r8nsd\" (UniqueName: \"kubernetes.io/projected/e010402e-1507-46e1-a78f-facf49687d8e-kube-api-access-r8nsd\") pod \"dnsmasq-dnsmasq-64864b6d57-bmrqf\" (UID: \"e010402e-1507-46e1-a78f-facf49687d8e\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-64864b6d57-bmrqf" Jan 20 18:07:45 crc kubenswrapper[4558]: I0120 18:07:45.929900 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/a6a8563d-683b-444a-9739-fca985cbc165-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-84b9f45d47-mkxmj\" (UID: \"a6a8563d-683b-444a-9739-fca985cbc165\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-mkxmj" Jan 20 18:07:45 crc kubenswrapper[4558]: I0120 18:07:45.929919 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6pl82\" (UniqueName: \"kubernetes.io/projected/a6a8563d-683b-444a-9739-fca985cbc165-kube-api-access-6pl82\") pod \"dnsmasq-dnsmasq-84b9f45d47-mkxmj\" (UID: \"a6a8563d-683b-444a-9739-fca985cbc165\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-mkxmj" Jan 20 18:07:46 crc kubenswrapper[4558]: I0120 18:07:46.031268 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/configmap/e010402e-1507-46e1-a78f-facf49687d8e-edpm-compute-no-nodes\") pod \"dnsmasq-dnsmasq-64864b6d57-bmrqf\" (UID: \"e010402e-1507-46e1-a78f-facf49687d8e\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-64864b6d57-bmrqf" Jan 20 18:07:46 crc kubenswrapper[4558]: I0120 18:07:46.031309 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e010402e-1507-46e1-a78f-facf49687d8e-config\") pod \"dnsmasq-dnsmasq-64864b6d57-bmrqf\" (UID: \"e010402e-1507-46e1-a78f-facf49687d8e\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-64864b6d57-bmrqf" Jan 20 18:07:46 crc kubenswrapper[4558]: I0120 18:07:46.031352 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r8nsd\" (UniqueName: \"kubernetes.io/projected/e010402e-1507-46e1-a78f-facf49687d8e-kube-api-access-r8nsd\") pod \"dnsmasq-dnsmasq-64864b6d57-bmrqf\" (UID: \"e010402e-1507-46e1-a78f-facf49687d8e\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-64864b6d57-bmrqf" Jan 20 18:07:46 crc kubenswrapper[4558]: I0120 18:07:46.031373 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/a6a8563d-683b-444a-9739-fca985cbc165-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-84b9f45d47-mkxmj\" (UID: \"a6a8563d-683b-444a-9739-fca985cbc165\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-mkxmj" Jan 20 18:07:46 crc kubenswrapper[4558]: I0120 18:07:46.031392 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6pl82\" (UniqueName: \"kubernetes.io/projected/a6a8563d-683b-444a-9739-fca985cbc165-kube-api-access-6pl82\") pod \"dnsmasq-dnsmasq-84b9f45d47-mkxmj\" (UID: \"a6a8563d-683b-444a-9739-fca985cbc165\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-mkxmj" Jan 20 18:07:46 crc kubenswrapper[4558]: I0120 18:07:46.031415 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a6a8563d-683b-444a-9739-fca985cbc165-config\") pod \"dnsmasq-dnsmasq-84b9f45d47-mkxmj\" (UID: \"a6a8563d-683b-444a-9739-fca985cbc165\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-mkxmj" Jan 20 18:07:46 crc kubenswrapper[4558]: E0120 18:07:46.031424 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/edpm-compute-no-nodes: configmap "edpm-compute-no-nodes" not found Jan 20 18:07:46 crc kubenswrapper[4558]: I0120 18:07:46.031446 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/e010402e-1507-46e1-a78f-facf49687d8e-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-64864b6d57-bmrqf\" (UID: \"e010402e-1507-46e1-a78f-facf49687d8e\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-64864b6d57-bmrqf" Jan 20 18:07:46 crc kubenswrapper[4558]: E0120 18:07:46.031474 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/e010402e-1507-46e1-a78f-facf49687d8e-edpm-compute-no-nodes podName:e010402e-1507-46e1-a78f-facf49687d8e nodeName:}" failed. No retries permitted until 2026-01-20 18:07:46.531459641 +0000 UTC m=+5160.291797608 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "edpm-compute-no-nodes" (UniqueName: "kubernetes.io/configmap/e010402e-1507-46e1-a78f-facf49687d8e-edpm-compute-no-nodes") pod "dnsmasq-dnsmasq-64864b6d57-bmrqf" (UID: "e010402e-1507-46e1-a78f-facf49687d8e") : configmap "edpm-compute-no-nodes" not found Jan 20 18:07:46 crc kubenswrapper[4558]: I0120 18:07:46.032118 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/e010402e-1507-46e1-a78f-facf49687d8e-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-64864b6d57-bmrqf\" (UID: \"e010402e-1507-46e1-a78f-facf49687d8e\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-64864b6d57-bmrqf" Jan 20 18:07:46 crc kubenswrapper[4558]: I0120 18:07:46.032291 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/a6a8563d-683b-444a-9739-fca985cbc165-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-84b9f45d47-mkxmj\" (UID: \"a6a8563d-683b-444a-9739-fca985cbc165\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-mkxmj" Jan 20 18:07:46 crc kubenswrapper[4558]: I0120 18:07:46.032623 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a6a8563d-683b-444a-9739-fca985cbc165-config\") pod \"dnsmasq-dnsmasq-84b9f45d47-mkxmj\" (UID: \"a6a8563d-683b-444a-9739-fca985cbc165\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-mkxmj" Jan 20 18:07:46 crc kubenswrapper[4558]: I0120 18:07:46.032721 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e010402e-1507-46e1-a78f-facf49687d8e-config\") pod \"dnsmasq-dnsmasq-64864b6d57-bmrqf\" (UID: \"e010402e-1507-46e1-a78f-facf49687d8e\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-64864b6d57-bmrqf" Jan 20 18:07:46 crc kubenswrapper[4558]: I0120 18:07:46.050570 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6pl82\" (UniqueName: \"kubernetes.io/projected/a6a8563d-683b-444a-9739-fca985cbc165-kube-api-access-6pl82\") pod \"dnsmasq-dnsmasq-84b9f45d47-mkxmj\" (UID: \"a6a8563d-683b-444a-9739-fca985cbc165\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-mkxmj" Jan 20 18:07:46 crc kubenswrapper[4558]: I0120 18:07:46.050631 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r8nsd\" (UniqueName: \"kubernetes.io/projected/e010402e-1507-46e1-a78f-facf49687d8e-kube-api-access-r8nsd\") pod \"dnsmasq-dnsmasq-64864b6d57-bmrqf\" (UID: \"e010402e-1507-46e1-a78f-facf49687d8e\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-64864b6d57-bmrqf" Jan 20 18:07:46 crc kubenswrapper[4558]: I0120 18:07:46.127308 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-mkxmj" Jan 20 18:07:46 crc kubenswrapper[4558]: I0120 18:07:46.256840 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-64864b6d57-bmrqf" Jan 20 18:07:46 crc kubenswrapper[4558]: I0120 18:07:46.268350 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-64864b6d57-bmrqf" Jan 20 18:07:46 crc kubenswrapper[4558]: I0120 18:07:46.437313 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/e010402e-1507-46e1-a78f-facf49687d8e-dnsmasq-svc\") pod \"e010402e-1507-46e1-a78f-facf49687d8e\" (UID: \"e010402e-1507-46e1-a78f-facf49687d8e\") " Jan 20 18:07:46 crc kubenswrapper[4558]: I0120 18:07:46.437395 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r8nsd\" (UniqueName: \"kubernetes.io/projected/e010402e-1507-46e1-a78f-facf49687d8e-kube-api-access-r8nsd\") pod \"e010402e-1507-46e1-a78f-facf49687d8e\" (UID: \"e010402e-1507-46e1-a78f-facf49687d8e\") " Jan 20 18:07:46 crc kubenswrapper[4558]: I0120 18:07:46.437435 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e010402e-1507-46e1-a78f-facf49687d8e-config\") pod \"e010402e-1507-46e1-a78f-facf49687d8e\" (UID: \"e010402e-1507-46e1-a78f-facf49687d8e\") " Jan 20 18:07:46 crc kubenswrapper[4558]: I0120 18:07:46.438192 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e010402e-1507-46e1-a78f-facf49687d8e-config" (OuterVolumeSpecName: "config") pod "e010402e-1507-46e1-a78f-facf49687d8e" (UID: "e010402e-1507-46e1-a78f-facf49687d8e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:07:46 crc kubenswrapper[4558]: I0120 18:07:46.438234 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e010402e-1507-46e1-a78f-facf49687d8e-dnsmasq-svc" (OuterVolumeSpecName: "dnsmasq-svc") pod "e010402e-1507-46e1-a78f-facf49687d8e" (UID: "e010402e-1507-46e1-a78f-facf49687d8e"). InnerVolumeSpecName "dnsmasq-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:07:46 crc kubenswrapper[4558]: I0120 18:07:46.438285 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e010402e-1507-46e1-a78f-facf49687d8e-config\") on node \"crc\" DevicePath \"\"" Jan 20 18:07:46 crc kubenswrapper[4558]: I0120 18:07:46.444391 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e010402e-1507-46e1-a78f-facf49687d8e-kube-api-access-r8nsd" (OuterVolumeSpecName: "kube-api-access-r8nsd") pod "e010402e-1507-46e1-a78f-facf49687d8e" (UID: "e010402e-1507-46e1-a78f-facf49687d8e"). InnerVolumeSpecName "kube-api-access-r8nsd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:07:46 crc kubenswrapper[4558]: I0120 18:07:46.514891 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-mkxmj"] Jan 20 18:07:46 crc kubenswrapper[4558]: I0120 18:07:46.540490 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/configmap/e010402e-1507-46e1-a78f-facf49687d8e-edpm-compute-no-nodes\") pod \"dnsmasq-dnsmasq-64864b6d57-bmrqf\" (UID: \"e010402e-1507-46e1-a78f-facf49687d8e\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-64864b6d57-bmrqf" Jan 20 18:07:46 crc kubenswrapper[4558]: E0120 18:07:46.540621 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/edpm-compute-no-nodes: configmap "edpm-compute-no-nodes" not found Jan 20 18:07:46 crc kubenswrapper[4558]: E0120 18:07:46.540703 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/e010402e-1507-46e1-a78f-facf49687d8e-edpm-compute-no-nodes podName:e010402e-1507-46e1-a78f-facf49687d8e nodeName:}" failed. No retries permitted until 2026-01-20 18:07:47.54068739 +0000 UTC m=+5161.301025357 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "edpm-compute-no-nodes" (UniqueName: "kubernetes.io/configmap/e010402e-1507-46e1-a78f-facf49687d8e-edpm-compute-no-nodes") pod "dnsmasq-dnsmasq-64864b6d57-bmrqf" (UID: "e010402e-1507-46e1-a78f-facf49687d8e") : configmap "edpm-compute-no-nodes" not found Jan 20 18:07:46 crc kubenswrapper[4558]: I0120 18:07:46.540746 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r8nsd\" (UniqueName: \"kubernetes.io/projected/e010402e-1507-46e1-a78f-facf49687d8e-kube-api-access-r8nsd\") on node \"crc\" DevicePath \"\"" Jan 20 18:07:46 crc kubenswrapper[4558]: I0120 18:07:46.540764 4558 reconciler_common.go:293] "Volume detached for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/e010402e-1507-46e1-a78f-facf49687d8e-dnsmasq-svc\") on node \"crc\" DevicePath \"\"" Jan 20 18:07:46 crc kubenswrapper[4558]: I0120 18:07:46.574595 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1897fe4b-a1b9-4dc9-b07d-cadf99938fab" path="/var/lib/kubelet/pods/1897fe4b-a1b9-4dc9-b07d-cadf99938fab/volumes" Jan 20 18:07:46 crc kubenswrapper[4558]: I0120 18:07:46.575219 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1e66f491-e230-42f2-aab5-40c0ab8f3090" path="/var/lib/kubelet/pods/1e66f491-e230-42f2-aab5-40c0ab8f3090/volumes" Jan 20 18:07:46 crc kubenswrapper[4558]: I0120 18:07:46.575661 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="21734838-c50a-446f-bece-4a50f5a897d4" path="/var/lib/kubelet/pods/21734838-c50a-446f-bece-4a50f5a897d4/volumes" Jan 20 18:07:46 crc kubenswrapper[4558]: I0120 18:07:46.576122 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="224d64cb-74ea-4d58-8e45-c537cd02101e" path="/var/lib/kubelet/pods/224d64cb-74ea-4d58-8e45-c537cd02101e/volumes" Jan 20 18:07:46 crc kubenswrapper[4558]: I0120 18:07:46.577062 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="231da886-60dd-4772-a429-3e2fc309990d" path="/var/lib/kubelet/pods/231da886-60dd-4772-a429-3e2fc309990d/volumes" Jan 20 18:07:46 crc kubenswrapper[4558]: I0120 18:07:46.578124 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="26fe7558-d4f3-4699-b772-138db97df828" path="/var/lib/kubelet/pods/26fe7558-d4f3-4699-b772-138db97df828/volumes" Jan 20 18:07:46 crc kubenswrapper[4558]: I0120 18:07:46.578596 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3f1f313a-0685-4de7-b389-fc57f2b52d7f" path="/var/lib/kubelet/pods/3f1f313a-0685-4de7-b389-fc57f2b52d7f/volumes" Jan 20 18:07:46 crc kubenswrapper[4558]: I0120 18:07:46.579588 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="402c8a53-bf8a-4dd1-b245-d870ad87a086" path="/var/lib/kubelet/pods/402c8a53-bf8a-4dd1-b245-d870ad87a086/volumes" Jan 20 18:07:46 crc kubenswrapper[4558]: I0120 18:07:46.580048 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="438cb630-c06a-4838-a105-cc05955aafbe" path="/var/lib/kubelet/pods/438cb630-c06a-4838-a105-cc05955aafbe/volumes" Jan 20 18:07:46 crc kubenswrapper[4558]: I0120 18:07:46.581841 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="46eabd98-9646-4e4d-97b7-96ebad66a340" path="/var/lib/kubelet/pods/46eabd98-9646-4e4d-97b7-96ebad66a340/volumes" Jan 20 18:07:46 crc kubenswrapper[4558]: I0120 18:07:46.582724 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4a10dbb7-8193-4f7c-89b2-889ab7842c98" path="/var/lib/kubelet/pods/4a10dbb7-8193-4f7c-89b2-889ab7842c98/volumes" Jan 20 18:07:46 crc kubenswrapper[4558]: I0120 18:07:46.583206 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5ac3bfae-05eb-4f64-ab7f-a48397124f99" path="/var/lib/kubelet/pods/5ac3bfae-05eb-4f64-ab7f-a48397124f99/volumes" Jan 20 18:07:46 crc kubenswrapper[4558]: I0120 18:07:46.583648 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="62c94a76-dced-48dd-9d0b-add9b0a76a81" path="/var/lib/kubelet/pods/62c94a76-dced-48dd-9d0b-add9b0a76a81/volumes" Jan 20 18:07:46 crc kubenswrapper[4558]: I0120 18:07:46.584228 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7783d091-b00b-4836-b2e5-b67ed88fba86" path="/var/lib/kubelet/pods/7783d091-b00b-4836-b2e5-b67ed88fba86/volumes" Jan 20 18:07:46 crc kubenswrapper[4558]: I0120 18:07:46.585125 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7eb53d33-93c4-4ebc-ae45-a1e0c7f151b1" path="/var/lib/kubelet/pods/7eb53d33-93c4-4ebc-ae45-a1e0c7f151b1/volumes" Jan 20 18:07:46 crc kubenswrapper[4558]: I0120 18:07:46.585673 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8447470c-f397-401f-ad5c-048f3b889af9" path="/var/lib/kubelet/pods/8447470c-f397-401f-ad5c-048f3b889af9/volumes" Jan 20 18:07:46 crc kubenswrapper[4558]: I0120 18:07:46.586280 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="901b7beb-82dc-41fb-90ef-90b10ac3bcff" path="/var/lib/kubelet/pods/901b7beb-82dc-41fb-90ef-90b10ac3bcff/volumes" Jan 20 18:07:46 crc kubenswrapper[4558]: I0120 18:07:46.587230 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="936a9e47-9698-4bf5-ac34-023aa7b95132" path="/var/lib/kubelet/pods/936a9e47-9698-4bf5-ac34-023aa7b95132/volumes" Jan 20 18:07:46 crc kubenswrapper[4558]: I0120 18:07:46.587722 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="955714d3-c5b2-49b4-ae36-97c534bc79ea" path="/var/lib/kubelet/pods/955714d3-c5b2-49b4-ae36-97c534bc79ea/volumes" Jan 20 18:07:46 crc kubenswrapper[4558]: I0120 18:07:46.588189 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="973eb657-4c4d-42a9-a08d-9b29e9868beb" path="/var/lib/kubelet/pods/973eb657-4c4d-42a9-a08d-9b29e9868beb/volumes" Jan 20 18:07:46 crc kubenswrapper[4558]: I0120 18:07:46.588625 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="98619227-1a55-4062-9c6f-f0157fe8bde4" path="/var/lib/kubelet/pods/98619227-1a55-4062-9c6f-f0157fe8bde4/volumes" Jan 20 18:07:46 crc kubenswrapper[4558]: I0120 18:07:46.589453 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ab9e8e32-6e48-4af0-ab56-2ec5fe7d986f" path="/var/lib/kubelet/pods/ab9e8e32-6e48-4af0-ab56-2ec5fe7d986f/volumes" Jan 20 18:07:46 crc kubenswrapper[4558]: I0120 18:07:46.589893 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ac9e577e-5aa4-4923-8f3f-a4f1fa6ce515" path="/var/lib/kubelet/pods/ac9e577e-5aa4-4923-8f3f-a4f1fa6ce515/volumes" Jan 20 18:07:46 crc kubenswrapper[4558]: I0120 18:07:46.590499 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b03f4166-30cb-4607-96b9-eeeb2c5c48f0" path="/var/lib/kubelet/pods/b03f4166-30cb-4607-96b9-eeeb2c5c48f0/volumes" Jan 20 18:07:46 crc kubenswrapper[4558]: I0120 18:07:46.591381 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b16dc76f-2b51-48f2-9fe8-ff79228fba59" path="/var/lib/kubelet/pods/b16dc76f-2b51-48f2-9fe8-ff79228fba59/volumes" Jan 20 18:07:46 crc kubenswrapper[4558]: I0120 18:07:46.591834 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c4b70b5c-da1d-40da-8a60-e2b03e2e3a10" path="/var/lib/kubelet/pods/c4b70b5c-da1d-40da-8a60-e2b03e2e3a10/volumes" Jan 20 18:07:46 crc kubenswrapper[4558]: I0120 18:07:46.592696 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c6bad0e2-6a8e-499d-854b-18c87c491d0e" path="/var/lib/kubelet/pods/c6bad0e2-6a8e-499d-854b-18c87c491d0e/volumes" Jan 20 18:07:46 crc kubenswrapper[4558]: I0120 18:07:46.593146 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d0d07753-075f-4e29-9c69-0cdc8c2a7824" path="/var/lib/kubelet/pods/d0d07753-075f-4e29-9c69-0cdc8c2a7824/volumes" Jan 20 18:07:46 crc kubenswrapper[4558]: I0120 18:07:46.593591 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dd325333-0db8-4cec-8224-89aa1c81cfca" path="/var/lib/kubelet/pods/dd325333-0db8-4cec-8224-89aa1c81cfca/volumes" Jan 20 18:07:46 crc kubenswrapper[4558]: I0120 18:07:46.594464 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e53d0e83-ebc6-4441-9c69-94b482e0f2b8" path="/var/lib/kubelet/pods/e53d0e83-ebc6-4441-9c69-94b482e0f2b8/volumes" Jan 20 18:07:46 crc kubenswrapper[4558]: I0120 18:07:46.594944 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e84aff63-9fc4-4604-8b6d-69bd89bf437d" path="/var/lib/kubelet/pods/e84aff63-9fc4-4604-8b6d-69bd89bf437d/volumes" Jan 20 18:07:46 crc kubenswrapper[4558]: I0120 18:07:46.595388 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e980f297-354c-4199-b212-827d9adba246" path="/var/lib/kubelet/pods/e980f297-354c-4199-b212-827d9adba246/volumes" Jan 20 18:07:46 crc kubenswrapper[4558]: I0120 18:07:46.596215 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efb0584b-776b-4a1d-8b88-c4c126e9cb00" path="/var/lib/kubelet/pods/efb0584b-776b-4a1d-8b88-c4c126e9cb00/volumes" Jan 20 18:07:46 crc kubenswrapper[4558]: I0120 18:07:46.596635 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f740f062-fc5a-4f8e-a109-d608844f290d" path="/var/lib/kubelet/pods/f740f062-fc5a-4f8e-a109-d608844f290d/volumes" Jan 20 18:07:47 crc kubenswrapper[4558]: I0120 18:07:47.268114 4558 generic.go:334] "Generic (PLEG): container finished" podID="a6a8563d-683b-444a-9739-fca985cbc165" containerID="e1998b979158206525b096e9dc4b6769c81878febfb4ed24f582cee3262f1951" exitCode=0 Jan 20 18:07:47 crc kubenswrapper[4558]: I0120 18:07:47.268228 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-mkxmj" event={"ID":"a6a8563d-683b-444a-9739-fca985cbc165","Type":"ContainerDied","Data":"e1998b979158206525b096e9dc4b6769c81878febfb4ed24f582cee3262f1951"} Jan 20 18:07:47 crc kubenswrapper[4558]: I0120 18:07:47.268318 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-mkxmj" event={"ID":"a6a8563d-683b-444a-9739-fca985cbc165","Type":"ContainerStarted","Data":"2df759568e92b812ff83a7b294d1d47a25245684c912240484688ff1084185a0"} Jan 20 18:07:47 crc kubenswrapper[4558]: I0120 18:07:47.268276 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-64864b6d57-bmrqf" Jan 20 18:07:47 crc kubenswrapper[4558]: I0120 18:07:47.552486 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/configmap/e010402e-1507-46e1-a78f-facf49687d8e-edpm-compute-no-nodes\") pod \"dnsmasq-dnsmasq-64864b6d57-bmrqf\" (UID: \"e010402e-1507-46e1-a78f-facf49687d8e\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-64864b6d57-bmrqf" Jan 20 18:07:47 crc kubenswrapper[4558]: E0120 18:07:47.552628 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/edpm-compute-no-nodes: configmap "edpm-compute-no-nodes" not found Jan 20 18:07:47 crc kubenswrapper[4558]: E0120 18:07:47.552688 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/e010402e-1507-46e1-a78f-facf49687d8e-edpm-compute-no-nodes podName:e010402e-1507-46e1-a78f-facf49687d8e nodeName:}" failed. No retries permitted until 2026-01-20 18:07:49.552670329 +0000 UTC m=+5163.313008296 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "edpm-compute-no-nodes" (UniqueName: "kubernetes.io/configmap/e010402e-1507-46e1-a78f-facf49687d8e-edpm-compute-no-nodes") pod "dnsmasq-dnsmasq-64864b6d57-bmrqf" (UID: "e010402e-1507-46e1-a78f-facf49687d8e") : configmap "edpm-compute-no-nodes" not found Jan 20 18:07:47 crc kubenswrapper[4558]: I0120 18:07:47.590031 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-64864b6d57-bmrqf"] Jan 20 18:07:47 crc kubenswrapper[4558]: I0120 18:07:47.593441 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-64864b6d57-bmrqf"] Jan 20 18:07:47 crc kubenswrapper[4558]: I0120 18:07:47.657471 4558 reconciler_common.go:293] "Volume detached for volume \"edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/configmap/e010402e-1507-46e1-a78f-facf49687d8e-edpm-compute-no-nodes\") on node \"crc\" DevicePath \"\"" Jan 20 18:07:48 crc kubenswrapper[4558]: I0120 18:07:48.280489 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-mkxmj" event={"ID":"a6a8563d-683b-444a-9739-fca985cbc165","Type":"ContainerStarted","Data":"ad22d47d6cea203448127e217c0302e1a43a5057f82208f929bc838b64f6be62"} Jan 20 18:07:48 crc kubenswrapper[4558]: I0120 18:07:48.280650 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-mkxmj" Jan 20 18:07:48 crc kubenswrapper[4558]: I0120 18:07:48.301038 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-mkxmj" podStartSLOduration=3.301016428 podStartE2EDuration="3.301016428s" podCreationTimestamp="2026-01-20 18:07:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 18:07:48.295890551 +0000 UTC m=+5162.056228528" watchObservedRunningTime="2026-01-20 18:07:48.301016428 +0000 UTC m=+5162.061354395" Jan 20 18:07:48 crc kubenswrapper[4558]: I0120 18:07:48.577208 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e010402e-1507-46e1-a78f-facf49687d8e" path="/var/lib/kubelet/pods/e010402e-1507-46e1-a78f-facf49687d8e/volumes" Jan 20 18:07:52 crc kubenswrapper[4558]: I0120 18:07:52.023783 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["crc-storage/crc-storage-crc-dw4hb"] Jan 20 18:07:52 crc kubenswrapper[4558]: I0120 18:07:52.028641 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["crc-storage/crc-storage-crc-dw4hb"] Jan 20 18:07:52 crc kubenswrapper[4558]: I0120 18:07:52.123326 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["crc-storage/crc-storage-crc-v4sjc"] Jan 20 18:07:52 crc kubenswrapper[4558]: I0120 18:07:52.124411 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-v4sjc" Jan 20 18:07:52 crc kubenswrapper[4558]: I0120 18:07:52.126126 4558 reflector.go:368] Caches populated for *v1.Secret from object-"crc-storage"/"crc-storage-dockercfg-k9ht8" Jan 20 18:07:52 crc kubenswrapper[4558]: I0120 18:07:52.126253 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"crc-storage" Jan 20 18:07:52 crc kubenswrapper[4558]: I0120 18:07:52.127740 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/c6552d96-1292-4991-8189-3bce0797d0f7-crc-storage\") pod \"crc-storage-crc-v4sjc\" (UID: \"c6552d96-1292-4991-8189-3bce0797d0f7\") " pod="crc-storage/crc-storage-crc-v4sjc" Jan 20 18:07:52 crc kubenswrapper[4558]: I0120 18:07:52.127945 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pnlmc\" (UniqueName: \"kubernetes.io/projected/c6552d96-1292-4991-8189-3bce0797d0f7-kube-api-access-pnlmc\") pod \"crc-storage-crc-v4sjc\" (UID: \"c6552d96-1292-4991-8189-3bce0797d0f7\") " pod="crc-storage/crc-storage-crc-v4sjc" Jan 20 18:07:52 crc kubenswrapper[4558]: I0120 18:07:52.128148 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/c6552d96-1292-4991-8189-3bce0797d0f7-node-mnt\") pod \"crc-storage-crc-v4sjc\" (UID: \"c6552d96-1292-4991-8189-3bce0797d0f7\") " pod="crc-storage/crc-storage-crc-v4sjc" Jan 20 18:07:52 crc kubenswrapper[4558]: I0120 18:07:52.130427 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"kube-root-ca.crt" Jan 20 18:07:52 crc kubenswrapper[4558]: I0120 18:07:52.130444 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"openshift-service-ca.crt" Jan 20 18:07:52 crc kubenswrapper[4558]: I0120 18:07:52.131529 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-v4sjc"] Jan 20 18:07:52 crc kubenswrapper[4558]: I0120 18:07:52.229793 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/c6552d96-1292-4991-8189-3bce0797d0f7-crc-storage\") pod \"crc-storage-crc-v4sjc\" (UID: \"c6552d96-1292-4991-8189-3bce0797d0f7\") " pod="crc-storage/crc-storage-crc-v4sjc" Jan 20 18:07:52 crc kubenswrapper[4558]: I0120 18:07:52.229851 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pnlmc\" (UniqueName: \"kubernetes.io/projected/c6552d96-1292-4991-8189-3bce0797d0f7-kube-api-access-pnlmc\") pod \"crc-storage-crc-v4sjc\" (UID: \"c6552d96-1292-4991-8189-3bce0797d0f7\") " pod="crc-storage/crc-storage-crc-v4sjc" Jan 20 18:07:52 crc kubenswrapper[4558]: I0120 18:07:52.229906 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/c6552d96-1292-4991-8189-3bce0797d0f7-node-mnt\") pod \"crc-storage-crc-v4sjc\" (UID: \"c6552d96-1292-4991-8189-3bce0797d0f7\") " pod="crc-storage/crc-storage-crc-v4sjc" Jan 20 18:07:52 crc kubenswrapper[4558]: I0120 18:07:52.230217 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/c6552d96-1292-4991-8189-3bce0797d0f7-node-mnt\") pod \"crc-storage-crc-v4sjc\" (UID: \"c6552d96-1292-4991-8189-3bce0797d0f7\") " pod="crc-storage/crc-storage-crc-v4sjc" Jan 20 18:07:52 crc kubenswrapper[4558]: I0120 18:07:52.230480 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/c6552d96-1292-4991-8189-3bce0797d0f7-crc-storage\") pod \"crc-storage-crc-v4sjc\" (UID: \"c6552d96-1292-4991-8189-3bce0797d0f7\") " pod="crc-storage/crc-storage-crc-v4sjc" Jan 20 18:07:52 crc kubenswrapper[4558]: I0120 18:07:52.248347 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pnlmc\" (UniqueName: \"kubernetes.io/projected/c6552d96-1292-4991-8189-3bce0797d0f7-kube-api-access-pnlmc\") pod \"crc-storage-crc-v4sjc\" (UID: \"c6552d96-1292-4991-8189-3bce0797d0f7\") " pod="crc-storage/crc-storage-crc-v4sjc" Jan 20 18:07:52 crc kubenswrapper[4558]: I0120 18:07:52.443424 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-v4sjc" Jan 20 18:07:52 crc kubenswrapper[4558]: I0120 18:07:52.574191 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9879d8af-ef37-40f5-8014-28a5eb53c952" path="/var/lib/kubelet/pods/9879d8af-ef37-40f5-8014-28a5eb53c952/volumes" Jan 20 18:07:52 crc kubenswrapper[4558]: I0120 18:07:52.824999 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-v4sjc"] Jan 20 18:07:53 crc kubenswrapper[4558]: I0120 18:07:53.331767 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-v4sjc" event={"ID":"c6552d96-1292-4991-8189-3bce0797d0f7","Type":"ContainerStarted","Data":"03912e1b5b3285f8899e3ee95a02ea90a1ec00410c47b17e53035bfd2a5e85f7"} Jan 20 18:07:54 crc kubenswrapper[4558]: I0120 18:07:54.344966 4558 generic.go:334] "Generic (PLEG): container finished" podID="c6552d96-1292-4991-8189-3bce0797d0f7" containerID="5ed05159451f1f29587c6b9b2f7f4cebe006caa13c327bf5763d8a47e37ee283" exitCode=0 Jan 20 18:07:54 crc kubenswrapper[4558]: I0120 18:07:54.345085 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-v4sjc" event={"ID":"c6552d96-1292-4991-8189-3bce0797d0f7","Type":"ContainerDied","Data":"5ed05159451f1f29587c6b9b2f7f4cebe006caa13c327bf5763d8a47e37ee283"} Jan 20 18:07:55 crc kubenswrapper[4558]: I0120 18:07:55.644970 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-v4sjc" Jan 20 18:07:55 crc kubenswrapper[4558]: I0120 18:07:55.785635 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pnlmc\" (UniqueName: \"kubernetes.io/projected/c6552d96-1292-4991-8189-3bce0797d0f7-kube-api-access-pnlmc\") pod \"c6552d96-1292-4991-8189-3bce0797d0f7\" (UID: \"c6552d96-1292-4991-8189-3bce0797d0f7\") " Jan 20 18:07:55 crc kubenswrapper[4558]: I0120 18:07:55.785804 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/c6552d96-1292-4991-8189-3bce0797d0f7-crc-storage\") pod \"c6552d96-1292-4991-8189-3bce0797d0f7\" (UID: \"c6552d96-1292-4991-8189-3bce0797d0f7\") " Jan 20 18:07:55 crc kubenswrapper[4558]: I0120 18:07:55.785929 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/c6552d96-1292-4991-8189-3bce0797d0f7-node-mnt\") pod \"c6552d96-1292-4991-8189-3bce0797d0f7\" (UID: \"c6552d96-1292-4991-8189-3bce0797d0f7\") " Jan 20 18:07:55 crc kubenswrapper[4558]: I0120 18:07:55.786278 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/c6552d96-1292-4991-8189-3bce0797d0f7-node-mnt" (OuterVolumeSpecName: "node-mnt") pod "c6552d96-1292-4991-8189-3bce0797d0f7" (UID: "c6552d96-1292-4991-8189-3bce0797d0f7"). InnerVolumeSpecName "node-mnt". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 18:07:55 crc kubenswrapper[4558]: I0120 18:07:55.792354 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c6552d96-1292-4991-8189-3bce0797d0f7-kube-api-access-pnlmc" (OuterVolumeSpecName: "kube-api-access-pnlmc") pod "c6552d96-1292-4991-8189-3bce0797d0f7" (UID: "c6552d96-1292-4991-8189-3bce0797d0f7"). InnerVolumeSpecName "kube-api-access-pnlmc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:07:55 crc kubenswrapper[4558]: I0120 18:07:55.805932 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c6552d96-1292-4991-8189-3bce0797d0f7-crc-storage" (OuterVolumeSpecName: "crc-storage") pod "c6552d96-1292-4991-8189-3bce0797d0f7" (UID: "c6552d96-1292-4991-8189-3bce0797d0f7"). InnerVolumeSpecName "crc-storage". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:07:55 crc kubenswrapper[4558]: I0120 18:07:55.887545 4558 reconciler_common.go:293] "Volume detached for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/c6552d96-1292-4991-8189-3bce0797d0f7-node-mnt\") on node \"crc\" DevicePath \"\"" Jan 20 18:07:55 crc kubenswrapper[4558]: I0120 18:07:55.887586 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pnlmc\" (UniqueName: \"kubernetes.io/projected/c6552d96-1292-4991-8189-3bce0797d0f7-kube-api-access-pnlmc\") on node \"crc\" DevicePath \"\"" Jan 20 18:07:55 crc kubenswrapper[4558]: I0120 18:07:55.887598 4558 reconciler_common.go:293] "Volume detached for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/c6552d96-1292-4991-8189-3bce0797d0f7-crc-storage\") on node \"crc\" DevicePath \"\"" Jan 20 18:07:56 crc kubenswrapper[4558]: I0120 18:07:56.129597 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-mkxmj" Jan 20 18:07:56 crc kubenswrapper[4558]: I0120 18:07:56.176274 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-67886899f9-wnvb9"] Jan 20 18:07:56 crc kubenswrapper[4558]: I0120 18:07:56.176488 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-67886899f9-wnvb9" podUID="47dec77f-e154-411f-88ab-8519ce4c237f" containerName="dnsmasq-dns" containerID="cri-o://e1a02a263499579047379b8549ce290f106baf12c89cff06aedd75bd32d72796" gracePeriod=10 Jan 20 18:07:56 crc kubenswrapper[4558]: I0120 18:07:56.370484 4558 generic.go:334] "Generic (PLEG): container finished" podID="47dec77f-e154-411f-88ab-8519ce4c237f" containerID="e1a02a263499579047379b8549ce290f106baf12c89cff06aedd75bd32d72796" exitCode=0 Jan 20 18:07:56 crc kubenswrapper[4558]: I0120 18:07:56.370677 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-67886899f9-wnvb9" event={"ID":"47dec77f-e154-411f-88ab-8519ce4c237f","Type":"ContainerDied","Data":"e1a02a263499579047379b8549ce290f106baf12c89cff06aedd75bd32d72796"} Jan 20 18:07:56 crc kubenswrapper[4558]: I0120 18:07:56.373188 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-v4sjc" event={"ID":"c6552d96-1292-4991-8189-3bce0797d0f7","Type":"ContainerDied","Data":"03912e1b5b3285f8899e3ee95a02ea90a1ec00410c47b17e53035bfd2a5e85f7"} Jan 20 18:07:56 crc kubenswrapper[4558]: I0120 18:07:56.373217 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="03912e1b5b3285f8899e3ee95a02ea90a1ec00410c47b17e53035bfd2a5e85f7" Jan 20 18:07:56 crc kubenswrapper[4558]: I0120 18:07:56.373285 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-v4sjc" Jan 20 18:07:56 crc kubenswrapper[4558]: I0120 18:07:56.514072 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-67886899f9-wnvb9" Jan 20 18:07:56 crc kubenswrapper[4558]: I0120 18:07:56.700744 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j6drd\" (UniqueName: \"kubernetes.io/projected/47dec77f-e154-411f-88ab-8519ce4c237f-kube-api-access-j6drd\") pod \"47dec77f-e154-411f-88ab-8519ce4c237f\" (UID: \"47dec77f-e154-411f-88ab-8519ce4c237f\") " Jan 20 18:07:56 crc kubenswrapper[4558]: I0120 18:07:56.700798 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/47dec77f-e154-411f-88ab-8519ce4c237f-dnsmasq-svc\") pod \"47dec77f-e154-411f-88ab-8519ce4c237f\" (UID: \"47dec77f-e154-411f-88ab-8519ce4c237f\") " Jan 20 18:07:56 crc kubenswrapper[4558]: I0120 18:07:56.700827 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/47dec77f-e154-411f-88ab-8519ce4c237f-config\") pod \"47dec77f-e154-411f-88ab-8519ce4c237f\" (UID: \"47dec77f-e154-411f-88ab-8519ce4c237f\") " Jan 20 18:07:56 crc kubenswrapper[4558]: I0120 18:07:56.700957 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/configmap/47dec77f-e154-411f-88ab-8519ce4c237f-edpm-compute-no-nodes\") pod \"47dec77f-e154-411f-88ab-8519ce4c237f\" (UID: \"47dec77f-e154-411f-88ab-8519ce4c237f\") " Jan 20 18:07:56 crc kubenswrapper[4558]: I0120 18:07:56.701268 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"edpm-compute-beta-nodeset\" (UniqueName: \"kubernetes.io/configmap/47dec77f-e154-411f-88ab-8519ce4c237f-edpm-compute-beta-nodeset\") pod \"47dec77f-e154-411f-88ab-8519ce4c237f\" (UID: \"47dec77f-e154-411f-88ab-8519ce4c237f\") " Jan 20 18:07:56 crc kubenswrapper[4558]: I0120 18:07:56.706107 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/47dec77f-e154-411f-88ab-8519ce4c237f-kube-api-access-j6drd" (OuterVolumeSpecName: "kube-api-access-j6drd") pod "47dec77f-e154-411f-88ab-8519ce4c237f" (UID: "47dec77f-e154-411f-88ab-8519ce4c237f"). InnerVolumeSpecName "kube-api-access-j6drd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:07:56 crc kubenswrapper[4558]: I0120 18:07:56.729993 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/47dec77f-e154-411f-88ab-8519ce4c237f-edpm-compute-no-nodes" (OuterVolumeSpecName: "edpm-compute-no-nodes") pod "47dec77f-e154-411f-88ab-8519ce4c237f" (UID: "47dec77f-e154-411f-88ab-8519ce4c237f"). InnerVolumeSpecName "edpm-compute-no-nodes". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:07:56 crc kubenswrapper[4558]: I0120 18:07:56.732432 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/47dec77f-e154-411f-88ab-8519ce4c237f-dnsmasq-svc" (OuterVolumeSpecName: "dnsmasq-svc") pod "47dec77f-e154-411f-88ab-8519ce4c237f" (UID: "47dec77f-e154-411f-88ab-8519ce4c237f"). InnerVolumeSpecName "dnsmasq-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:07:56 crc kubenswrapper[4558]: I0120 18:07:56.734325 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/47dec77f-e154-411f-88ab-8519ce4c237f-edpm-compute-beta-nodeset" (OuterVolumeSpecName: "edpm-compute-beta-nodeset") pod "47dec77f-e154-411f-88ab-8519ce4c237f" (UID: "47dec77f-e154-411f-88ab-8519ce4c237f"). InnerVolumeSpecName "edpm-compute-beta-nodeset". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:07:56 crc kubenswrapper[4558]: I0120 18:07:56.734464 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/47dec77f-e154-411f-88ab-8519ce4c237f-config" (OuterVolumeSpecName: "config") pod "47dec77f-e154-411f-88ab-8519ce4c237f" (UID: "47dec77f-e154-411f-88ab-8519ce4c237f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:07:56 crc kubenswrapper[4558]: I0120 18:07:56.804943 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j6drd\" (UniqueName: \"kubernetes.io/projected/47dec77f-e154-411f-88ab-8519ce4c237f-kube-api-access-j6drd\") on node \"crc\" DevicePath \"\"" Jan 20 18:07:56 crc kubenswrapper[4558]: I0120 18:07:56.805047 4558 reconciler_common.go:293] "Volume detached for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/47dec77f-e154-411f-88ab-8519ce4c237f-dnsmasq-svc\") on node \"crc\" DevicePath \"\"" Jan 20 18:07:56 crc kubenswrapper[4558]: I0120 18:07:56.805111 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/47dec77f-e154-411f-88ab-8519ce4c237f-config\") on node \"crc\" DevicePath \"\"" Jan 20 18:07:56 crc kubenswrapper[4558]: I0120 18:07:56.805180 4558 reconciler_common.go:293] "Volume detached for volume \"edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/configmap/47dec77f-e154-411f-88ab-8519ce4c237f-edpm-compute-no-nodes\") on node \"crc\" DevicePath \"\"" Jan 20 18:07:56 crc kubenswrapper[4558]: I0120 18:07:56.805250 4558 reconciler_common.go:293] "Volume detached for volume \"edpm-compute-beta-nodeset\" (UniqueName: \"kubernetes.io/configmap/47dec77f-e154-411f-88ab-8519ce4c237f-edpm-compute-beta-nodeset\") on node \"crc\" DevicePath \"\"" Jan 20 18:07:57 crc kubenswrapper[4558]: I0120 18:07:57.396506 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-67886899f9-wnvb9" event={"ID":"47dec77f-e154-411f-88ab-8519ce4c237f","Type":"ContainerDied","Data":"510f21eee6378db0a51f18ed0ab08e3d1b5c0212d0696a3063c54c2967b5010e"} Jan 20 18:07:57 crc kubenswrapper[4558]: I0120 18:07:57.396770 4558 scope.go:117] "RemoveContainer" containerID="e1a02a263499579047379b8549ce290f106baf12c89cff06aedd75bd32d72796" Jan 20 18:07:57 crc kubenswrapper[4558]: I0120 18:07:57.396669 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-67886899f9-wnvb9" Jan 20 18:07:57 crc kubenswrapper[4558]: I0120 18:07:57.420212 4558 scope.go:117] "RemoveContainer" containerID="94ef870003c23785447a149bbe5c270f08286b93c2e4be50cc2eb7c40a01e920" Jan 20 18:07:57 crc kubenswrapper[4558]: I0120 18:07:57.427915 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-67886899f9-wnvb9"] Jan 20 18:07:57 crc kubenswrapper[4558]: I0120 18:07:57.433934 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-67886899f9-wnvb9"] Jan 20 18:07:58 crc kubenswrapper[4558]: I0120 18:07:58.437774 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["crc-storage/crc-storage-crc-v4sjc"] Jan 20 18:07:58 crc kubenswrapper[4558]: I0120 18:07:58.441839 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["crc-storage/crc-storage-crc-v4sjc"] Jan 20 18:07:58 crc kubenswrapper[4558]: I0120 18:07:58.533475 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["crc-storage/crc-storage-crc-w5fgr"] Jan 20 18:07:58 crc kubenswrapper[4558]: E0120 18:07:58.533799 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="47dec77f-e154-411f-88ab-8519ce4c237f" containerName="init" Jan 20 18:07:58 crc kubenswrapper[4558]: I0120 18:07:58.533819 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="47dec77f-e154-411f-88ab-8519ce4c237f" containerName="init" Jan 20 18:07:58 crc kubenswrapper[4558]: E0120 18:07:58.533840 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="47dec77f-e154-411f-88ab-8519ce4c237f" containerName="dnsmasq-dns" Jan 20 18:07:58 crc kubenswrapper[4558]: I0120 18:07:58.533847 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="47dec77f-e154-411f-88ab-8519ce4c237f" containerName="dnsmasq-dns" Jan 20 18:07:58 crc kubenswrapper[4558]: E0120 18:07:58.533864 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c6552d96-1292-4991-8189-3bce0797d0f7" containerName="storage" Jan 20 18:07:58 crc kubenswrapper[4558]: I0120 18:07:58.533870 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="c6552d96-1292-4991-8189-3bce0797d0f7" containerName="storage" Jan 20 18:07:58 crc kubenswrapper[4558]: I0120 18:07:58.534016 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="47dec77f-e154-411f-88ab-8519ce4c237f" containerName="dnsmasq-dns" Jan 20 18:07:58 crc kubenswrapper[4558]: I0120 18:07:58.534048 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="c6552d96-1292-4991-8189-3bce0797d0f7" containerName="storage" Jan 20 18:07:58 crc kubenswrapper[4558]: I0120 18:07:58.534556 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-w5fgr" Jan 20 18:07:58 crc kubenswrapper[4558]: I0120 18:07:58.537072 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"openshift-service-ca.crt" Jan 20 18:07:58 crc kubenswrapper[4558]: I0120 18:07:58.537340 4558 reflector.go:368] Caches populated for *v1.Secret from object-"crc-storage"/"crc-storage-dockercfg-k9ht8" Jan 20 18:07:58 crc kubenswrapper[4558]: I0120 18:07:58.537379 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"crc-storage" Jan 20 18:07:58 crc kubenswrapper[4558]: I0120 18:07:58.538655 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"kube-root-ca.crt" Jan 20 18:07:58 crc kubenswrapper[4558]: I0120 18:07:58.542632 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-w5fgr"] Jan 20 18:07:58 crc kubenswrapper[4558]: I0120 18:07:58.566473 4558 scope.go:117] "RemoveContainer" containerID="6b02d31ec11fe5435af988e35303a6f66b5afa5ef952ce5f7cc4b3cd5f4d9497" Jan 20 18:07:58 crc kubenswrapper[4558]: E0120 18:07:58.566804 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 18:07:58 crc kubenswrapper[4558]: I0120 18:07:58.573214 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="47dec77f-e154-411f-88ab-8519ce4c237f" path="/var/lib/kubelet/pods/47dec77f-e154-411f-88ab-8519ce4c237f/volumes" Jan 20 18:07:58 crc kubenswrapper[4558]: I0120 18:07:58.573754 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c6552d96-1292-4991-8189-3bce0797d0f7" path="/var/lib/kubelet/pods/c6552d96-1292-4991-8189-3bce0797d0f7/volumes" Jan 20 18:07:58 crc kubenswrapper[4558]: I0120 18:07:58.630907 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nghrc\" (UniqueName: \"kubernetes.io/projected/bd3b2016-61a9-4860-8f89-df916242b621-kube-api-access-nghrc\") pod \"crc-storage-crc-w5fgr\" (UID: \"bd3b2016-61a9-4860-8f89-df916242b621\") " pod="crc-storage/crc-storage-crc-w5fgr" Jan 20 18:07:58 crc kubenswrapper[4558]: I0120 18:07:58.631523 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/bd3b2016-61a9-4860-8f89-df916242b621-node-mnt\") pod \"crc-storage-crc-w5fgr\" (UID: \"bd3b2016-61a9-4860-8f89-df916242b621\") " pod="crc-storage/crc-storage-crc-w5fgr" Jan 20 18:07:58 crc kubenswrapper[4558]: I0120 18:07:58.631653 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/bd3b2016-61a9-4860-8f89-df916242b621-crc-storage\") pod \"crc-storage-crc-w5fgr\" (UID: \"bd3b2016-61a9-4860-8f89-df916242b621\") " pod="crc-storage/crc-storage-crc-w5fgr" Jan 20 18:07:58 crc kubenswrapper[4558]: I0120 18:07:58.732864 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nghrc\" (UniqueName: \"kubernetes.io/projected/bd3b2016-61a9-4860-8f89-df916242b621-kube-api-access-nghrc\") pod \"crc-storage-crc-w5fgr\" (UID: \"bd3b2016-61a9-4860-8f89-df916242b621\") " pod="crc-storage/crc-storage-crc-w5fgr" Jan 20 18:07:58 crc kubenswrapper[4558]: I0120 18:07:58.733157 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/bd3b2016-61a9-4860-8f89-df916242b621-node-mnt\") pod \"crc-storage-crc-w5fgr\" (UID: \"bd3b2016-61a9-4860-8f89-df916242b621\") " pod="crc-storage/crc-storage-crc-w5fgr" Jan 20 18:07:58 crc kubenswrapper[4558]: I0120 18:07:58.733355 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/bd3b2016-61a9-4860-8f89-df916242b621-crc-storage\") pod \"crc-storage-crc-w5fgr\" (UID: \"bd3b2016-61a9-4860-8f89-df916242b621\") " pod="crc-storage/crc-storage-crc-w5fgr" Jan 20 18:07:58 crc kubenswrapper[4558]: I0120 18:07:58.733408 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/bd3b2016-61a9-4860-8f89-df916242b621-node-mnt\") pod \"crc-storage-crc-w5fgr\" (UID: \"bd3b2016-61a9-4860-8f89-df916242b621\") " pod="crc-storage/crc-storage-crc-w5fgr" Jan 20 18:07:58 crc kubenswrapper[4558]: I0120 18:07:58.734073 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/bd3b2016-61a9-4860-8f89-df916242b621-crc-storage\") pod \"crc-storage-crc-w5fgr\" (UID: \"bd3b2016-61a9-4860-8f89-df916242b621\") " pod="crc-storage/crc-storage-crc-w5fgr" Jan 20 18:07:58 crc kubenswrapper[4558]: I0120 18:07:58.749727 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nghrc\" (UniqueName: \"kubernetes.io/projected/bd3b2016-61a9-4860-8f89-df916242b621-kube-api-access-nghrc\") pod \"crc-storage-crc-w5fgr\" (UID: \"bd3b2016-61a9-4860-8f89-df916242b621\") " pod="crc-storage/crc-storage-crc-w5fgr" Jan 20 18:07:58 crc kubenswrapper[4558]: I0120 18:07:58.851424 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-w5fgr" Jan 20 18:07:59 crc kubenswrapper[4558]: I0120 18:07:59.232888 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-w5fgr"] Jan 20 18:07:59 crc kubenswrapper[4558]: I0120 18:07:59.419009 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-w5fgr" event={"ID":"bd3b2016-61a9-4860-8f89-df916242b621","Type":"ContainerStarted","Data":"fe90a8435ff3ed8145f4425a1ce672a421b4db5857f6d837365b92642b2a928c"} Jan 20 18:08:00 crc kubenswrapper[4558]: I0120 18:08:00.431369 4558 generic.go:334] "Generic (PLEG): container finished" podID="bd3b2016-61a9-4860-8f89-df916242b621" containerID="b13c9d83994729b60e9bfa90e3160a1b65ced814b36191c04ca3d9619fcd97dc" exitCode=0 Jan 20 18:08:00 crc kubenswrapper[4558]: I0120 18:08:00.431565 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-w5fgr" event={"ID":"bd3b2016-61a9-4860-8f89-df916242b621","Type":"ContainerDied","Data":"b13c9d83994729b60e9bfa90e3160a1b65ced814b36191c04ca3d9619fcd97dc"} Jan 20 18:08:01 crc kubenswrapper[4558]: I0120 18:08:01.702321 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-w5fgr" Jan 20 18:08:01 crc kubenswrapper[4558]: I0120 18:08:01.876796 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/bd3b2016-61a9-4860-8f89-df916242b621-node-mnt\") pod \"bd3b2016-61a9-4860-8f89-df916242b621\" (UID: \"bd3b2016-61a9-4860-8f89-df916242b621\") " Jan 20 18:08:01 crc kubenswrapper[4558]: I0120 18:08:01.877007 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/bd3b2016-61a9-4860-8f89-df916242b621-crc-storage\") pod \"bd3b2016-61a9-4860-8f89-df916242b621\" (UID: \"bd3b2016-61a9-4860-8f89-df916242b621\") " Jan 20 18:08:01 crc kubenswrapper[4558]: I0120 18:08:01.877268 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nghrc\" (UniqueName: \"kubernetes.io/projected/bd3b2016-61a9-4860-8f89-df916242b621-kube-api-access-nghrc\") pod \"bd3b2016-61a9-4860-8f89-df916242b621\" (UID: \"bd3b2016-61a9-4860-8f89-df916242b621\") " Jan 20 18:08:01 crc kubenswrapper[4558]: I0120 18:08:01.878014 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/bd3b2016-61a9-4860-8f89-df916242b621-node-mnt" (OuterVolumeSpecName: "node-mnt") pod "bd3b2016-61a9-4860-8f89-df916242b621" (UID: "bd3b2016-61a9-4860-8f89-df916242b621"). InnerVolumeSpecName "node-mnt". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 18:08:01 crc kubenswrapper[4558]: I0120 18:08:01.881395 4558 reconciler_common.go:293] "Volume detached for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/bd3b2016-61a9-4860-8f89-df916242b621-node-mnt\") on node \"crc\" DevicePath \"\"" Jan 20 18:08:01 crc kubenswrapper[4558]: I0120 18:08:01.894379 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd3b2016-61a9-4860-8f89-df916242b621-kube-api-access-nghrc" (OuterVolumeSpecName: "kube-api-access-nghrc") pod "bd3b2016-61a9-4860-8f89-df916242b621" (UID: "bd3b2016-61a9-4860-8f89-df916242b621"). InnerVolumeSpecName "kube-api-access-nghrc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:08:01 crc kubenswrapper[4558]: I0120 18:08:01.897931 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bd3b2016-61a9-4860-8f89-df916242b621-crc-storage" (OuterVolumeSpecName: "crc-storage") pod "bd3b2016-61a9-4860-8f89-df916242b621" (UID: "bd3b2016-61a9-4860-8f89-df916242b621"). InnerVolumeSpecName "crc-storage". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:08:01 crc kubenswrapper[4558]: I0120 18:08:01.983636 4558 reconciler_common.go:293] "Volume detached for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/bd3b2016-61a9-4860-8f89-df916242b621-crc-storage\") on node \"crc\" DevicePath \"\"" Jan 20 18:08:01 crc kubenswrapper[4558]: I0120 18:08:01.983680 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nghrc\" (UniqueName: \"kubernetes.io/projected/bd3b2016-61a9-4860-8f89-df916242b621-kube-api-access-nghrc\") on node \"crc\" DevicePath \"\"" Jan 20 18:08:02 crc kubenswrapper[4558]: I0120 18:08:02.453088 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-w5fgr" event={"ID":"bd3b2016-61a9-4860-8f89-df916242b621","Type":"ContainerDied","Data":"fe90a8435ff3ed8145f4425a1ce672a421b4db5857f6d837365b92642b2a928c"} Jan 20 18:08:02 crc kubenswrapper[4558]: I0120 18:08:02.453145 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fe90a8435ff3ed8145f4425a1ce672a421b4db5857f6d837365b92642b2a928c" Jan 20 18:08:02 crc kubenswrapper[4558]: I0120 18:08:02.453419 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-w5fgr" Jan 20 18:08:04 crc kubenswrapper[4558]: I0120 18:08:04.805055 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-78c7b787f5-cktvd"] Jan 20 18:08:04 crc kubenswrapper[4558]: E0120 18:08:04.805701 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bd3b2016-61a9-4860-8f89-df916242b621" containerName="storage" Jan 20 18:08:04 crc kubenswrapper[4558]: I0120 18:08:04.805715 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="bd3b2016-61a9-4860-8f89-df916242b621" containerName="storage" Jan 20 18:08:04 crc kubenswrapper[4558]: I0120 18:08:04.805873 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="bd3b2016-61a9-4860-8f89-df916242b621" containerName="storage" Jan 20 18:08:04 crc kubenswrapper[4558]: I0120 18:08:04.806591 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-78c7b787f5-cktvd" Jan 20 18:08:04 crc kubenswrapper[4558]: I0120 18:08:04.809258 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-edpm-tls" Jan 20 18:08:04 crc kubenswrapper[4558]: I0120 18:08:04.816854 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-78c7b787f5-cktvd"] Jan 20 18:08:04 crc kubenswrapper[4558]: I0120 18:08:04.876280 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-78c7b787f5-cktvd"] Jan 20 18:08:04 crc kubenswrapper[4558]: E0120 18:08:04.876951 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[config dnsmasq-svc kube-api-access-4rf2j openstack-edpm-tls], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-78c7b787f5-cktvd" podUID="eb7a5c09-9668-4dba-bb86-14d4dbc063f9" Jan 20 18:08:04 crc kubenswrapper[4558]: I0120 18:08:04.902361 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-79cc674687-m284h"] Jan 20 18:08:04 crc kubenswrapper[4558]: I0120 18:08:04.903508 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-79cc674687-m284h" Jan 20 18:08:04 crc kubenswrapper[4558]: I0120 18:08:04.913671 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-79cc674687-m284h"] Jan 20 18:08:04 crc kubenswrapper[4558]: I0120 18:08:04.925330 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4rf2j\" (UniqueName: \"kubernetes.io/projected/eb7a5c09-9668-4dba-bb86-14d4dbc063f9-kube-api-access-4rf2j\") pod \"dnsmasq-dnsmasq-78c7b787f5-cktvd\" (UID: \"eb7a5c09-9668-4dba-bb86-14d4dbc063f9\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-78c7b787f5-cktvd" Jan 20 18:08:04 crc kubenswrapper[4558]: I0120 18:08:04.925368 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/eb7a5c09-9668-4dba-bb86-14d4dbc063f9-config\") pod \"dnsmasq-dnsmasq-78c7b787f5-cktvd\" (UID: \"eb7a5c09-9668-4dba-bb86-14d4dbc063f9\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-78c7b787f5-cktvd" Jan 20 18:08:04 crc kubenswrapper[4558]: I0120 18:08:04.925409 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/eb7a5c09-9668-4dba-bb86-14d4dbc063f9-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-78c7b787f5-cktvd\" (UID: \"eb7a5c09-9668-4dba-bb86-14d4dbc063f9\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-78c7b787f5-cktvd" Jan 20 18:08:04 crc kubenswrapper[4558]: I0120 18:08:04.925436 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-tls\" (UniqueName: \"kubernetes.io/configmap/eb7a5c09-9668-4dba-bb86-14d4dbc063f9-openstack-edpm-tls\") pod \"dnsmasq-dnsmasq-78c7b787f5-cktvd\" (UID: \"eb7a5c09-9668-4dba-bb86-14d4dbc063f9\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-78c7b787f5-cktvd" Jan 20 18:08:05 crc kubenswrapper[4558]: I0120 18:08:05.026847 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/86191493-d485-4033-941c-1942d2499f99-config\") pod \"dnsmasq-dnsmasq-79cc674687-m284h\" (UID: \"86191493-d485-4033-941c-1942d2499f99\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-79cc674687-m284h" Jan 20 18:08:05 crc kubenswrapper[4558]: I0120 18:08:05.026908 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-tls\" (UniqueName: \"kubernetes.io/configmap/86191493-d485-4033-941c-1942d2499f99-openstack-edpm-tls\") pod \"dnsmasq-dnsmasq-79cc674687-m284h\" (UID: \"86191493-d485-4033-941c-1942d2499f99\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-79cc674687-m284h" Jan 20 18:08:05 crc kubenswrapper[4558]: I0120 18:08:05.026997 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/86191493-d485-4033-941c-1942d2499f99-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-79cc674687-m284h\" (UID: \"86191493-d485-4033-941c-1942d2499f99\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-79cc674687-m284h" Jan 20 18:08:05 crc kubenswrapper[4558]: I0120 18:08:05.027065 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v8zln\" (UniqueName: \"kubernetes.io/projected/86191493-d485-4033-941c-1942d2499f99-kube-api-access-v8zln\") pod \"dnsmasq-dnsmasq-79cc674687-m284h\" (UID: \"86191493-d485-4033-941c-1942d2499f99\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-79cc674687-m284h" Jan 20 18:08:05 crc kubenswrapper[4558]: I0120 18:08:05.027253 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4rf2j\" (UniqueName: \"kubernetes.io/projected/eb7a5c09-9668-4dba-bb86-14d4dbc063f9-kube-api-access-4rf2j\") pod \"dnsmasq-dnsmasq-78c7b787f5-cktvd\" (UID: \"eb7a5c09-9668-4dba-bb86-14d4dbc063f9\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-78c7b787f5-cktvd" Jan 20 18:08:05 crc kubenswrapper[4558]: I0120 18:08:05.027300 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/eb7a5c09-9668-4dba-bb86-14d4dbc063f9-config\") pod \"dnsmasq-dnsmasq-78c7b787f5-cktvd\" (UID: \"eb7a5c09-9668-4dba-bb86-14d4dbc063f9\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-78c7b787f5-cktvd" Jan 20 18:08:05 crc kubenswrapper[4558]: I0120 18:08:05.027373 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/eb7a5c09-9668-4dba-bb86-14d4dbc063f9-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-78c7b787f5-cktvd\" (UID: \"eb7a5c09-9668-4dba-bb86-14d4dbc063f9\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-78c7b787f5-cktvd" Jan 20 18:08:05 crc kubenswrapper[4558]: I0120 18:08:05.027433 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-tls\" (UniqueName: \"kubernetes.io/configmap/eb7a5c09-9668-4dba-bb86-14d4dbc063f9-openstack-edpm-tls\") pod \"dnsmasq-dnsmasq-78c7b787f5-cktvd\" (UID: \"eb7a5c09-9668-4dba-bb86-14d4dbc063f9\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-78c7b787f5-cktvd" Jan 20 18:08:05 crc kubenswrapper[4558]: I0120 18:08:05.028400 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/eb7a5c09-9668-4dba-bb86-14d4dbc063f9-config\") pod \"dnsmasq-dnsmasq-78c7b787f5-cktvd\" (UID: \"eb7a5c09-9668-4dba-bb86-14d4dbc063f9\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-78c7b787f5-cktvd" Jan 20 18:08:05 crc kubenswrapper[4558]: I0120 18:08:05.028416 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-tls\" (UniqueName: \"kubernetes.io/configmap/eb7a5c09-9668-4dba-bb86-14d4dbc063f9-openstack-edpm-tls\") pod \"dnsmasq-dnsmasq-78c7b787f5-cktvd\" (UID: \"eb7a5c09-9668-4dba-bb86-14d4dbc063f9\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-78c7b787f5-cktvd" Jan 20 18:08:05 crc kubenswrapper[4558]: I0120 18:08:05.028424 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/eb7a5c09-9668-4dba-bb86-14d4dbc063f9-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-78c7b787f5-cktvd\" (UID: \"eb7a5c09-9668-4dba-bb86-14d4dbc063f9\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-78c7b787f5-cktvd" Jan 20 18:08:05 crc kubenswrapper[4558]: I0120 18:08:05.045318 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4rf2j\" (UniqueName: \"kubernetes.io/projected/eb7a5c09-9668-4dba-bb86-14d4dbc063f9-kube-api-access-4rf2j\") pod \"dnsmasq-dnsmasq-78c7b787f5-cktvd\" (UID: \"eb7a5c09-9668-4dba-bb86-14d4dbc063f9\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-78c7b787f5-cktvd" Jan 20 18:08:05 crc kubenswrapper[4558]: I0120 18:08:05.128528 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/86191493-d485-4033-941c-1942d2499f99-config\") pod \"dnsmasq-dnsmasq-79cc674687-m284h\" (UID: \"86191493-d485-4033-941c-1942d2499f99\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-79cc674687-m284h" Jan 20 18:08:05 crc kubenswrapper[4558]: I0120 18:08:05.128581 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-tls\" (UniqueName: \"kubernetes.io/configmap/86191493-d485-4033-941c-1942d2499f99-openstack-edpm-tls\") pod \"dnsmasq-dnsmasq-79cc674687-m284h\" (UID: \"86191493-d485-4033-941c-1942d2499f99\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-79cc674687-m284h" Jan 20 18:08:05 crc kubenswrapper[4558]: I0120 18:08:05.128615 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/86191493-d485-4033-941c-1942d2499f99-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-79cc674687-m284h\" (UID: \"86191493-d485-4033-941c-1942d2499f99\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-79cc674687-m284h" Jan 20 18:08:05 crc kubenswrapper[4558]: I0120 18:08:05.128660 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v8zln\" (UniqueName: \"kubernetes.io/projected/86191493-d485-4033-941c-1942d2499f99-kube-api-access-v8zln\") pod \"dnsmasq-dnsmasq-79cc674687-m284h\" (UID: \"86191493-d485-4033-941c-1942d2499f99\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-79cc674687-m284h" Jan 20 18:08:05 crc kubenswrapper[4558]: I0120 18:08:05.129325 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/86191493-d485-4033-941c-1942d2499f99-config\") pod \"dnsmasq-dnsmasq-79cc674687-m284h\" (UID: \"86191493-d485-4033-941c-1942d2499f99\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-79cc674687-m284h" Jan 20 18:08:05 crc kubenswrapper[4558]: I0120 18:08:05.129391 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-tls\" (UniqueName: \"kubernetes.io/configmap/86191493-d485-4033-941c-1942d2499f99-openstack-edpm-tls\") pod \"dnsmasq-dnsmasq-79cc674687-m284h\" (UID: \"86191493-d485-4033-941c-1942d2499f99\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-79cc674687-m284h" Jan 20 18:08:05 crc kubenswrapper[4558]: I0120 18:08:05.129504 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/86191493-d485-4033-941c-1942d2499f99-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-79cc674687-m284h\" (UID: \"86191493-d485-4033-941c-1942d2499f99\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-79cc674687-m284h" Jan 20 18:08:05 crc kubenswrapper[4558]: I0120 18:08:05.142111 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v8zln\" (UniqueName: \"kubernetes.io/projected/86191493-d485-4033-941c-1942d2499f99-kube-api-access-v8zln\") pod \"dnsmasq-dnsmasq-79cc674687-m284h\" (UID: \"86191493-d485-4033-941c-1942d2499f99\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-79cc674687-m284h" Jan 20 18:08:05 crc kubenswrapper[4558]: I0120 18:08:05.217588 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-79cc674687-m284h" Jan 20 18:08:05 crc kubenswrapper[4558]: I0120 18:08:05.480102 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-78c7b787f5-cktvd" Jan 20 18:08:05 crc kubenswrapper[4558]: I0120 18:08:05.491283 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-78c7b787f5-cktvd" Jan 20 18:08:05 crc kubenswrapper[4558]: I0120 18:08:05.536335 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-tls\" (UniqueName: \"kubernetes.io/configmap/eb7a5c09-9668-4dba-bb86-14d4dbc063f9-openstack-edpm-tls\") pod \"eb7a5c09-9668-4dba-bb86-14d4dbc063f9\" (UID: \"eb7a5c09-9668-4dba-bb86-14d4dbc063f9\") " Jan 20 18:08:05 crc kubenswrapper[4558]: I0120 18:08:05.536386 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4rf2j\" (UniqueName: \"kubernetes.io/projected/eb7a5c09-9668-4dba-bb86-14d4dbc063f9-kube-api-access-4rf2j\") pod \"eb7a5c09-9668-4dba-bb86-14d4dbc063f9\" (UID: \"eb7a5c09-9668-4dba-bb86-14d4dbc063f9\") " Jan 20 18:08:05 crc kubenswrapper[4558]: I0120 18:08:05.536470 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/eb7a5c09-9668-4dba-bb86-14d4dbc063f9-dnsmasq-svc\") pod \"eb7a5c09-9668-4dba-bb86-14d4dbc063f9\" (UID: \"eb7a5c09-9668-4dba-bb86-14d4dbc063f9\") " Jan 20 18:08:05 crc kubenswrapper[4558]: I0120 18:08:05.536506 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/eb7a5c09-9668-4dba-bb86-14d4dbc063f9-config\") pod \"eb7a5c09-9668-4dba-bb86-14d4dbc063f9\" (UID: \"eb7a5c09-9668-4dba-bb86-14d4dbc063f9\") " Jan 20 18:08:05 crc kubenswrapper[4558]: I0120 18:08:05.536994 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/eb7a5c09-9668-4dba-bb86-14d4dbc063f9-openstack-edpm-tls" (OuterVolumeSpecName: "openstack-edpm-tls") pod "eb7a5c09-9668-4dba-bb86-14d4dbc063f9" (UID: "eb7a5c09-9668-4dba-bb86-14d4dbc063f9"). InnerVolumeSpecName "openstack-edpm-tls". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:08:05 crc kubenswrapper[4558]: I0120 18:08:05.537130 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/eb7a5c09-9668-4dba-bb86-14d4dbc063f9-config" (OuterVolumeSpecName: "config") pod "eb7a5c09-9668-4dba-bb86-14d4dbc063f9" (UID: "eb7a5c09-9668-4dba-bb86-14d4dbc063f9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:08:05 crc kubenswrapper[4558]: I0120 18:08:05.537640 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/eb7a5c09-9668-4dba-bb86-14d4dbc063f9-dnsmasq-svc" (OuterVolumeSpecName: "dnsmasq-svc") pod "eb7a5c09-9668-4dba-bb86-14d4dbc063f9" (UID: "eb7a5c09-9668-4dba-bb86-14d4dbc063f9"). InnerVolumeSpecName "dnsmasq-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:08:05 crc kubenswrapper[4558]: I0120 18:08:05.540021 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eb7a5c09-9668-4dba-bb86-14d4dbc063f9-kube-api-access-4rf2j" (OuterVolumeSpecName: "kube-api-access-4rf2j") pod "eb7a5c09-9668-4dba-bb86-14d4dbc063f9" (UID: "eb7a5c09-9668-4dba-bb86-14d4dbc063f9"). InnerVolumeSpecName "kube-api-access-4rf2j". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:08:05 crc kubenswrapper[4558]: I0120 18:08:05.604404 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-79cc674687-m284h"] Jan 20 18:08:05 crc kubenswrapper[4558]: I0120 18:08:05.637963 4558 reconciler_common.go:293] "Volume detached for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/eb7a5c09-9668-4dba-bb86-14d4dbc063f9-dnsmasq-svc\") on node \"crc\" DevicePath \"\"" Jan 20 18:08:05 crc kubenswrapper[4558]: I0120 18:08:05.638001 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/eb7a5c09-9668-4dba-bb86-14d4dbc063f9-config\") on node \"crc\" DevicePath \"\"" Jan 20 18:08:05 crc kubenswrapper[4558]: I0120 18:08:05.638017 4558 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-tls\" (UniqueName: \"kubernetes.io/configmap/eb7a5c09-9668-4dba-bb86-14d4dbc063f9-openstack-edpm-tls\") on node \"crc\" DevicePath \"\"" Jan 20 18:08:05 crc kubenswrapper[4558]: I0120 18:08:05.638035 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4rf2j\" (UniqueName: \"kubernetes.io/projected/eb7a5c09-9668-4dba-bb86-14d4dbc063f9-kube-api-access-4rf2j\") on node \"crc\" DevicePath \"\"" Jan 20 18:08:06 crc kubenswrapper[4558]: I0120 18:08:06.493398 4558 generic.go:334] "Generic (PLEG): container finished" podID="86191493-d485-4033-941c-1942d2499f99" containerID="8d71452039d14ffa8a2819d7d187e816135404e2a83b38eaa9e01696adf14966" exitCode=0 Jan 20 18:08:06 crc kubenswrapper[4558]: I0120 18:08:06.493526 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-79cc674687-m284h" event={"ID":"86191493-d485-4033-941c-1942d2499f99","Type":"ContainerDied","Data":"8d71452039d14ffa8a2819d7d187e816135404e2a83b38eaa9e01696adf14966"} Jan 20 18:08:06 crc kubenswrapper[4558]: I0120 18:08:06.493748 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-79cc674687-m284h" event={"ID":"86191493-d485-4033-941c-1942d2499f99","Type":"ContainerStarted","Data":"3c68ed8d1528d8b1be0e5bfc9cc772e135eec3b3ee0b1409ba8ee7a25b3c3cec"} Jan 20 18:08:06 crc kubenswrapper[4558]: I0120 18:08:06.493789 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-78c7b787f5-cktvd" Jan 20 18:08:06 crc kubenswrapper[4558]: I0120 18:08:06.589735 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-78c7b787f5-cktvd"] Jan 20 18:08:06 crc kubenswrapper[4558]: I0120 18:08:06.589987 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-78c7b787f5-cktvd"] Jan 20 18:08:07 crc kubenswrapper[4558]: I0120 18:08:07.504154 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-79cc674687-m284h" event={"ID":"86191493-d485-4033-941c-1942d2499f99","Type":"ContainerStarted","Data":"db84b778d8c36e69c27fd70bfe07b24f9d8e00da4abded0a905f303870018bf0"} Jan 20 18:08:07 crc kubenswrapper[4558]: I0120 18:08:07.504338 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-79cc674687-m284h" Jan 20 18:08:07 crc kubenswrapper[4558]: I0120 18:08:07.527377 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-79cc674687-m284h" podStartSLOduration=3.527358635 podStartE2EDuration="3.527358635s" podCreationTimestamp="2026-01-20 18:08:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 18:08:07.52482427 +0000 UTC m=+5181.285162237" watchObservedRunningTime="2026-01-20 18:08:07.527358635 +0000 UTC m=+5181.287696602" Jan 20 18:08:08 crc kubenswrapper[4558]: I0120 18:08:08.576368 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eb7a5c09-9668-4dba-bb86-14d4dbc063f9" path="/var/lib/kubelet/pods/eb7a5c09-9668-4dba-bb86-14d4dbc063f9/volumes" Jan 20 18:08:12 crc kubenswrapper[4558]: I0120 18:08:12.566592 4558 scope.go:117] "RemoveContainer" containerID="6b02d31ec11fe5435af988e35303a6f66b5afa5ef952ce5f7cc4b3cd5f4d9497" Jan 20 18:08:12 crc kubenswrapper[4558]: E0120 18:08:12.566878 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 18:08:15 crc kubenswrapper[4558]: I0120 18:08:15.219417 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-79cc674687-m284h" Jan 20 18:08:15 crc kubenswrapper[4558]: I0120 18:08:15.269324 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-mkxmj"] Jan 20 18:08:15 crc kubenswrapper[4558]: I0120 18:08:15.269584 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-mkxmj" podUID="a6a8563d-683b-444a-9739-fca985cbc165" containerName="dnsmasq-dns" containerID="cri-o://ad22d47d6cea203448127e217c0302e1a43a5057f82208f929bc838b64f6be62" gracePeriod=10 Jan 20 18:08:15 crc kubenswrapper[4558]: I0120 18:08:15.585480 4558 generic.go:334] "Generic (PLEG): container finished" podID="a6a8563d-683b-444a-9739-fca985cbc165" containerID="ad22d47d6cea203448127e217c0302e1a43a5057f82208f929bc838b64f6be62" exitCode=0 Jan 20 18:08:15 crc kubenswrapper[4558]: I0120 18:08:15.585559 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-mkxmj" event={"ID":"a6a8563d-683b-444a-9739-fca985cbc165","Type":"ContainerDied","Data":"ad22d47d6cea203448127e217c0302e1a43a5057f82208f929bc838b64f6be62"} Jan 20 18:08:15 crc kubenswrapper[4558]: I0120 18:08:15.636447 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-mkxmj" Jan 20 18:08:15 crc kubenswrapper[4558]: I0120 18:08:15.792634 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6pl82\" (UniqueName: \"kubernetes.io/projected/a6a8563d-683b-444a-9739-fca985cbc165-kube-api-access-6pl82\") pod \"a6a8563d-683b-444a-9739-fca985cbc165\" (UID: \"a6a8563d-683b-444a-9739-fca985cbc165\") " Jan 20 18:08:15 crc kubenswrapper[4558]: I0120 18:08:15.792732 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a6a8563d-683b-444a-9739-fca985cbc165-config\") pod \"a6a8563d-683b-444a-9739-fca985cbc165\" (UID: \"a6a8563d-683b-444a-9739-fca985cbc165\") " Jan 20 18:08:15 crc kubenswrapper[4558]: I0120 18:08:15.792796 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/a6a8563d-683b-444a-9739-fca985cbc165-dnsmasq-svc\") pod \"a6a8563d-683b-444a-9739-fca985cbc165\" (UID: \"a6a8563d-683b-444a-9739-fca985cbc165\") " Jan 20 18:08:15 crc kubenswrapper[4558]: I0120 18:08:15.800918 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a6a8563d-683b-444a-9739-fca985cbc165-kube-api-access-6pl82" (OuterVolumeSpecName: "kube-api-access-6pl82") pod "a6a8563d-683b-444a-9739-fca985cbc165" (UID: "a6a8563d-683b-444a-9739-fca985cbc165"). InnerVolumeSpecName "kube-api-access-6pl82". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:08:15 crc kubenswrapper[4558]: I0120 18:08:15.825478 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a6a8563d-683b-444a-9739-fca985cbc165-config" (OuterVolumeSpecName: "config") pod "a6a8563d-683b-444a-9739-fca985cbc165" (UID: "a6a8563d-683b-444a-9739-fca985cbc165"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:08:15 crc kubenswrapper[4558]: I0120 18:08:15.829659 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a6a8563d-683b-444a-9739-fca985cbc165-dnsmasq-svc" (OuterVolumeSpecName: "dnsmasq-svc") pod "a6a8563d-683b-444a-9739-fca985cbc165" (UID: "a6a8563d-683b-444a-9739-fca985cbc165"). InnerVolumeSpecName "dnsmasq-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:08:15 crc kubenswrapper[4558]: I0120 18:08:15.895219 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6pl82\" (UniqueName: \"kubernetes.io/projected/a6a8563d-683b-444a-9739-fca985cbc165-kube-api-access-6pl82\") on node \"crc\" DevicePath \"\"" Jan 20 18:08:15 crc kubenswrapper[4558]: I0120 18:08:15.895259 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a6a8563d-683b-444a-9739-fca985cbc165-config\") on node \"crc\" DevicePath \"\"" Jan 20 18:08:15 crc kubenswrapper[4558]: I0120 18:08:15.895270 4558 reconciler_common.go:293] "Volume detached for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/a6a8563d-683b-444a-9739-fca985cbc165-dnsmasq-svc\") on node \"crc\" DevicePath \"\"" Jan 20 18:08:16 crc kubenswrapper[4558]: I0120 18:08:16.594600 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-mkxmj" event={"ID":"a6a8563d-683b-444a-9739-fca985cbc165","Type":"ContainerDied","Data":"2df759568e92b812ff83a7b294d1d47a25245684c912240484688ff1084185a0"} Jan 20 18:08:16 crc kubenswrapper[4558]: I0120 18:08:16.594927 4558 scope.go:117] "RemoveContainer" containerID="ad22d47d6cea203448127e217c0302e1a43a5057f82208f929bc838b64f6be62" Jan 20 18:08:16 crc kubenswrapper[4558]: I0120 18:08:16.595030 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-mkxmj" Jan 20 18:08:16 crc kubenswrapper[4558]: I0120 18:08:16.616369 4558 scope.go:117] "RemoveContainer" containerID="e1998b979158206525b096e9dc4b6769c81878febfb4ed24f582cee3262f1951" Jan 20 18:08:16 crc kubenswrapper[4558]: I0120 18:08:16.625062 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-mkxmj"] Jan 20 18:08:16 crc kubenswrapper[4558]: I0120 18:08:16.635730 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-mkxmj"] Jan 20 18:08:17 crc kubenswrapper[4558]: I0120 18:08:17.524475 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/install-certs-ovrd-openstack-edpm-tls-openstack-edpm-tls-bk9cj"] Jan 20 18:08:17 crc kubenswrapper[4558]: E0120 18:08:17.525095 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a6a8563d-683b-444a-9739-fca985cbc165" containerName="init" Jan 20 18:08:17 crc kubenswrapper[4558]: I0120 18:08:17.525193 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="a6a8563d-683b-444a-9739-fca985cbc165" containerName="init" Jan 20 18:08:17 crc kubenswrapper[4558]: E0120 18:08:17.525268 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a6a8563d-683b-444a-9739-fca985cbc165" containerName="dnsmasq-dns" Jan 20 18:08:17 crc kubenswrapper[4558]: I0120 18:08:17.525316 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="a6a8563d-683b-444a-9739-fca985cbc165" containerName="dnsmasq-dns" Jan 20 18:08:17 crc kubenswrapper[4558]: I0120 18:08:17.525516 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="a6a8563d-683b-444a-9739-fca985cbc165" containerName="dnsmasq-dns" Jan 20 18:08:17 crc kubenswrapper[4558]: I0120 18:08:17.525996 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/install-certs-ovrd-openstack-edpm-tls-openstack-edpm-tls-bk9cj" Jan 20 18:08:17 crc kubenswrapper[4558]: I0120 18:08:17.528027 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"combined-ca-bundle" Jan 20 18:08:17 crc kubenswrapper[4558]: I0120 18:08:17.528940 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"openstack-edpm-tls-dockercfg-bzbzv" Jan 20 18:08:17 crc kubenswrapper[4558]: I0120 18:08:17.530041 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplane-ansible-ssh-private-key-secret" Jan 20 18:08:17 crc kubenswrapper[4558]: I0120 18:08:17.530366 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"openstack-edpm-tls-tls-dnsnames-default-certs-0" Jan 20 18:08:17 crc kubenswrapper[4558]: I0120 18:08:17.530635 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplanenodeset-openstack-edpm-tls" Jan 20 18:08:17 crc kubenswrapper[4558]: I0120 18:08:17.530844 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"openstack-edpm-tls-tls-dnsnames-second-certs-0" Jan 20 18:08:17 crc kubenswrapper[4558]: I0120 18:08:17.531037 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-aee-default-env" Jan 20 18:08:17 crc kubenswrapper[4558]: I0120 18:08:17.535856 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/install-certs-ovrd-openstack-edpm-tls-openstack-edpm-tls-bk9cj"] Jan 20 18:08:17 crc kubenswrapper[4558]: I0120 18:08:17.619188 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-tls-tls-dnsnames-second-certs-0\" (UniqueName: \"kubernetes.io/projected/4ac8c866-8e21-425c-ac4e-350c32f5c8a7-openstack-edpm-tls-tls-dnsnames-second-certs-0\") pod \"install-certs-ovrd-openstack-edpm-tls-openstack-edpm-tls-bk9cj\" (UID: \"4ac8c866-8e21-425c-ac4e-350c32f5c8a7\") " pod="openstack-kuttl-tests/install-certs-ovrd-openstack-edpm-tls-openstack-edpm-tls-bk9cj" Jan 20 18:08:17 crc kubenswrapper[4558]: I0120 18:08:17.619229 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"install-certs-ovrd-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4ac8c866-8e21-425c-ac4e-350c32f5c8a7-install-certs-ovrd-combined-ca-bundle\") pod \"install-certs-ovrd-openstack-edpm-tls-openstack-edpm-tls-bk9cj\" (UID: \"4ac8c866-8e21-425c-ac4e-350c32f5c8a7\") " pod="openstack-kuttl-tests/install-certs-ovrd-openstack-edpm-tls-openstack-edpm-tls-bk9cj" Jan 20 18:08:17 crc kubenswrapper[4558]: I0120 18:08:17.619254 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4ac8c866-8e21-425c-ac4e-350c32f5c8a7-inventory\") pod \"install-certs-ovrd-openstack-edpm-tls-openstack-edpm-tls-bk9cj\" (UID: \"4ac8c866-8e21-425c-ac4e-350c32f5c8a7\") " pod="openstack-kuttl-tests/install-certs-ovrd-openstack-edpm-tls-openstack-edpm-tls-bk9cj" Jan 20 18:08:17 crc kubenswrapper[4558]: I0120 18:08:17.619283 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-tls\" (UniqueName: \"kubernetes.io/secret/4ac8c866-8e21-425c-ac4e-350c32f5c8a7-ssh-key-openstack-edpm-tls\") pod \"install-certs-ovrd-openstack-edpm-tls-openstack-edpm-tls-bk9cj\" (UID: \"4ac8c866-8e21-425c-ac4e-350c32f5c8a7\") " pod="openstack-kuttl-tests/install-certs-ovrd-openstack-edpm-tls-openstack-edpm-tls-bk9cj" Jan 20 18:08:17 crc kubenswrapper[4558]: I0120 18:08:17.619327 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-tls-tls-dnsnames-default-certs-0\" (UniqueName: \"kubernetes.io/projected/4ac8c866-8e21-425c-ac4e-350c32f5c8a7-openstack-edpm-tls-tls-dnsnames-default-certs-0\") pod \"install-certs-ovrd-openstack-edpm-tls-openstack-edpm-tls-bk9cj\" (UID: \"4ac8c866-8e21-425c-ac4e-350c32f5c8a7\") " pod="openstack-kuttl-tests/install-certs-ovrd-openstack-edpm-tls-openstack-edpm-tls-bk9cj" Jan 20 18:08:17 crc kubenswrapper[4558]: I0120 18:08:17.619395 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-dnsnames-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4ac8c866-8e21-425c-ac4e-350c32f5c8a7-tls-dnsnames-combined-ca-bundle\") pod \"install-certs-ovrd-openstack-edpm-tls-openstack-edpm-tls-bk9cj\" (UID: \"4ac8c866-8e21-425c-ac4e-350c32f5c8a7\") " pod="openstack-kuttl-tests/install-certs-ovrd-openstack-edpm-tls-openstack-edpm-tls-bk9cj" Jan 20 18:08:17 crc kubenswrapper[4558]: I0120 18:08:17.619457 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zmtjm\" (UniqueName: \"kubernetes.io/projected/4ac8c866-8e21-425c-ac4e-350c32f5c8a7-kube-api-access-zmtjm\") pod \"install-certs-ovrd-openstack-edpm-tls-openstack-edpm-tls-bk9cj\" (UID: \"4ac8c866-8e21-425c-ac4e-350c32f5c8a7\") " pod="openstack-kuttl-tests/install-certs-ovrd-openstack-edpm-tls-openstack-edpm-tls-bk9cj" Jan 20 18:08:17 crc kubenswrapper[4558]: I0120 18:08:17.719936 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-tls-tls-dnsnames-second-certs-0\" (UniqueName: \"kubernetes.io/projected/4ac8c866-8e21-425c-ac4e-350c32f5c8a7-openstack-edpm-tls-tls-dnsnames-second-certs-0\") pod \"install-certs-ovrd-openstack-edpm-tls-openstack-edpm-tls-bk9cj\" (UID: \"4ac8c866-8e21-425c-ac4e-350c32f5c8a7\") " pod="openstack-kuttl-tests/install-certs-ovrd-openstack-edpm-tls-openstack-edpm-tls-bk9cj" Jan 20 18:08:17 crc kubenswrapper[4558]: I0120 18:08:17.719985 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"install-certs-ovrd-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4ac8c866-8e21-425c-ac4e-350c32f5c8a7-install-certs-ovrd-combined-ca-bundle\") pod \"install-certs-ovrd-openstack-edpm-tls-openstack-edpm-tls-bk9cj\" (UID: \"4ac8c866-8e21-425c-ac4e-350c32f5c8a7\") " pod="openstack-kuttl-tests/install-certs-ovrd-openstack-edpm-tls-openstack-edpm-tls-bk9cj" Jan 20 18:08:17 crc kubenswrapper[4558]: I0120 18:08:17.720013 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4ac8c866-8e21-425c-ac4e-350c32f5c8a7-inventory\") pod \"install-certs-ovrd-openstack-edpm-tls-openstack-edpm-tls-bk9cj\" (UID: \"4ac8c866-8e21-425c-ac4e-350c32f5c8a7\") " pod="openstack-kuttl-tests/install-certs-ovrd-openstack-edpm-tls-openstack-edpm-tls-bk9cj" Jan 20 18:08:17 crc kubenswrapper[4558]: I0120 18:08:17.720050 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-tls\" (UniqueName: \"kubernetes.io/secret/4ac8c866-8e21-425c-ac4e-350c32f5c8a7-ssh-key-openstack-edpm-tls\") pod \"install-certs-ovrd-openstack-edpm-tls-openstack-edpm-tls-bk9cj\" (UID: \"4ac8c866-8e21-425c-ac4e-350c32f5c8a7\") " pod="openstack-kuttl-tests/install-certs-ovrd-openstack-edpm-tls-openstack-edpm-tls-bk9cj" Jan 20 18:08:17 crc kubenswrapper[4558]: I0120 18:08:17.720079 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-tls-tls-dnsnames-default-certs-0\" (UniqueName: \"kubernetes.io/projected/4ac8c866-8e21-425c-ac4e-350c32f5c8a7-openstack-edpm-tls-tls-dnsnames-default-certs-0\") pod \"install-certs-ovrd-openstack-edpm-tls-openstack-edpm-tls-bk9cj\" (UID: \"4ac8c866-8e21-425c-ac4e-350c32f5c8a7\") " pod="openstack-kuttl-tests/install-certs-ovrd-openstack-edpm-tls-openstack-edpm-tls-bk9cj" Jan 20 18:08:17 crc kubenswrapper[4558]: I0120 18:08:17.720113 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-dnsnames-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4ac8c866-8e21-425c-ac4e-350c32f5c8a7-tls-dnsnames-combined-ca-bundle\") pod \"install-certs-ovrd-openstack-edpm-tls-openstack-edpm-tls-bk9cj\" (UID: \"4ac8c866-8e21-425c-ac4e-350c32f5c8a7\") " pod="openstack-kuttl-tests/install-certs-ovrd-openstack-edpm-tls-openstack-edpm-tls-bk9cj" Jan 20 18:08:17 crc kubenswrapper[4558]: I0120 18:08:17.720147 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zmtjm\" (UniqueName: \"kubernetes.io/projected/4ac8c866-8e21-425c-ac4e-350c32f5c8a7-kube-api-access-zmtjm\") pod \"install-certs-ovrd-openstack-edpm-tls-openstack-edpm-tls-bk9cj\" (UID: \"4ac8c866-8e21-425c-ac4e-350c32f5c8a7\") " pod="openstack-kuttl-tests/install-certs-ovrd-openstack-edpm-tls-openstack-edpm-tls-bk9cj" Jan 20 18:08:17 crc kubenswrapper[4558]: I0120 18:08:17.726705 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4ac8c866-8e21-425c-ac4e-350c32f5c8a7-inventory\") pod \"install-certs-ovrd-openstack-edpm-tls-openstack-edpm-tls-bk9cj\" (UID: \"4ac8c866-8e21-425c-ac4e-350c32f5c8a7\") " pod="openstack-kuttl-tests/install-certs-ovrd-openstack-edpm-tls-openstack-edpm-tls-bk9cj" Jan 20 18:08:17 crc kubenswrapper[4558]: I0120 18:08:17.726783 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-tls-tls-dnsnames-default-certs-0\" (UniqueName: \"kubernetes.io/projected/4ac8c866-8e21-425c-ac4e-350c32f5c8a7-openstack-edpm-tls-tls-dnsnames-default-certs-0\") pod \"install-certs-ovrd-openstack-edpm-tls-openstack-edpm-tls-bk9cj\" (UID: \"4ac8c866-8e21-425c-ac4e-350c32f5c8a7\") " pod="openstack-kuttl-tests/install-certs-ovrd-openstack-edpm-tls-openstack-edpm-tls-bk9cj" Jan 20 18:08:17 crc kubenswrapper[4558]: I0120 18:08:17.726806 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-tls-tls-dnsnames-second-certs-0\" (UniqueName: \"kubernetes.io/projected/4ac8c866-8e21-425c-ac4e-350c32f5c8a7-openstack-edpm-tls-tls-dnsnames-second-certs-0\") pod \"install-certs-ovrd-openstack-edpm-tls-openstack-edpm-tls-bk9cj\" (UID: \"4ac8c866-8e21-425c-ac4e-350c32f5c8a7\") " pod="openstack-kuttl-tests/install-certs-ovrd-openstack-edpm-tls-openstack-edpm-tls-bk9cj" Jan 20 18:08:17 crc kubenswrapper[4558]: I0120 18:08:17.726904 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-tls\" (UniqueName: \"kubernetes.io/secret/4ac8c866-8e21-425c-ac4e-350c32f5c8a7-ssh-key-openstack-edpm-tls\") pod \"install-certs-ovrd-openstack-edpm-tls-openstack-edpm-tls-bk9cj\" (UID: \"4ac8c866-8e21-425c-ac4e-350c32f5c8a7\") " pod="openstack-kuttl-tests/install-certs-ovrd-openstack-edpm-tls-openstack-edpm-tls-bk9cj" Jan 20 18:08:17 crc kubenswrapper[4558]: I0120 18:08:17.727031 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"install-certs-ovrd-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4ac8c866-8e21-425c-ac4e-350c32f5c8a7-install-certs-ovrd-combined-ca-bundle\") pod \"install-certs-ovrd-openstack-edpm-tls-openstack-edpm-tls-bk9cj\" (UID: \"4ac8c866-8e21-425c-ac4e-350c32f5c8a7\") " pod="openstack-kuttl-tests/install-certs-ovrd-openstack-edpm-tls-openstack-edpm-tls-bk9cj" Jan 20 18:08:17 crc kubenswrapper[4558]: I0120 18:08:17.728459 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-dnsnames-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4ac8c866-8e21-425c-ac4e-350c32f5c8a7-tls-dnsnames-combined-ca-bundle\") pod \"install-certs-ovrd-openstack-edpm-tls-openstack-edpm-tls-bk9cj\" (UID: \"4ac8c866-8e21-425c-ac4e-350c32f5c8a7\") " pod="openstack-kuttl-tests/install-certs-ovrd-openstack-edpm-tls-openstack-edpm-tls-bk9cj" Jan 20 18:08:17 crc kubenswrapper[4558]: I0120 18:08:17.735665 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zmtjm\" (UniqueName: \"kubernetes.io/projected/4ac8c866-8e21-425c-ac4e-350c32f5c8a7-kube-api-access-zmtjm\") pod \"install-certs-ovrd-openstack-edpm-tls-openstack-edpm-tls-bk9cj\" (UID: \"4ac8c866-8e21-425c-ac4e-350c32f5c8a7\") " pod="openstack-kuttl-tests/install-certs-ovrd-openstack-edpm-tls-openstack-edpm-tls-bk9cj" Jan 20 18:08:17 crc kubenswrapper[4558]: I0120 18:08:17.841691 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/install-certs-ovrd-openstack-edpm-tls-openstack-edpm-tls-bk9cj" Jan 20 18:08:18 crc kubenswrapper[4558]: I0120 18:08:18.219227 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/install-certs-ovrd-openstack-edpm-tls-openstack-edpm-tls-bk9cj"] Jan 20 18:08:18 crc kubenswrapper[4558]: W0120 18:08:18.223931 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4ac8c866_8e21_425c_ac4e_350c32f5c8a7.slice/crio-4211d953579bb91a079b1a3aac73df8beca5f03d5b75ef9e2b367954ddbde6b3 WatchSource:0}: Error finding container 4211d953579bb91a079b1a3aac73df8beca5f03d5b75ef9e2b367954ddbde6b3: Status 404 returned error can't find the container with id 4211d953579bb91a079b1a3aac73df8beca5f03d5b75ef9e2b367954ddbde6b3 Jan 20 18:08:18 crc kubenswrapper[4558]: I0120 18:08:18.575442 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a6a8563d-683b-444a-9739-fca985cbc165" path="/var/lib/kubelet/pods/a6a8563d-683b-444a-9739-fca985cbc165/volumes" Jan 20 18:08:18 crc kubenswrapper[4558]: I0120 18:08:18.617312 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/install-certs-ovrd-openstack-edpm-tls-openstack-edpm-tls-bk9cj" event={"ID":"4ac8c866-8e21-425c-ac4e-350c32f5c8a7","Type":"ContainerStarted","Data":"4211d953579bb91a079b1a3aac73df8beca5f03d5b75ef9e2b367954ddbde6b3"} Jan 20 18:08:19 crc kubenswrapper[4558]: I0120 18:08:19.628368 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/install-certs-ovrd-openstack-edpm-tls-openstack-edpm-tls-bk9cj" event={"ID":"4ac8c866-8e21-425c-ac4e-350c32f5c8a7","Type":"ContainerStarted","Data":"d9e7d8f26c60d79188cdb6ee5f48b78d1911f3dfef9d85293a3d9cfc2efbd204"} Jan 20 18:08:19 crc kubenswrapper[4558]: I0120 18:08:19.652245 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/install-certs-ovrd-openstack-edpm-tls-openstack-edpm-tls-bk9cj" podStartSLOduration=2.128625977 podStartE2EDuration="2.652227903s" podCreationTimestamp="2026-01-20 18:08:17 +0000 UTC" firstStartedPulling="2026-01-20 18:08:18.226070855 +0000 UTC m=+5191.986408822" lastFinishedPulling="2026-01-20 18:08:18.749672781 +0000 UTC m=+5192.510010748" observedRunningTime="2026-01-20 18:08:19.64334926 +0000 UTC m=+5193.403687227" watchObservedRunningTime="2026-01-20 18:08:19.652227903 +0000 UTC m=+5193.412565871" Jan 20 18:08:21 crc kubenswrapper[4558]: I0120 18:08:21.648422 4558 generic.go:334] "Generic (PLEG): container finished" podID="4ac8c866-8e21-425c-ac4e-350c32f5c8a7" containerID="d9e7d8f26c60d79188cdb6ee5f48b78d1911f3dfef9d85293a3d9cfc2efbd204" exitCode=0 Jan 20 18:08:21 crc kubenswrapper[4558]: I0120 18:08:21.648517 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/install-certs-ovrd-openstack-edpm-tls-openstack-edpm-tls-bk9cj" event={"ID":"4ac8c866-8e21-425c-ac4e-350c32f5c8a7","Type":"ContainerDied","Data":"d9e7d8f26c60d79188cdb6ee5f48b78d1911f3dfef9d85293a3d9cfc2efbd204"} Jan 20 18:08:22 crc kubenswrapper[4558]: I0120 18:08:22.911844 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/install-certs-ovrd-openstack-edpm-tls-openstack-edpm-tls-bk9cj" Jan 20 18:08:22 crc kubenswrapper[4558]: I0120 18:08:22.993459 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zmtjm\" (UniqueName: \"kubernetes.io/projected/4ac8c866-8e21-425c-ac4e-350c32f5c8a7-kube-api-access-zmtjm\") pod \"4ac8c866-8e21-425c-ac4e-350c32f5c8a7\" (UID: \"4ac8c866-8e21-425c-ac4e-350c32f5c8a7\") " Jan 20 18:08:22 crc kubenswrapper[4558]: I0120 18:08:22.993829 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-tls\" (UniqueName: \"kubernetes.io/secret/4ac8c866-8e21-425c-ac4e-350c32f5c8a7-ssh-key-openstack-edpm-tls\") pod \"4ac8c866-8e21-425c-ac4e-350c32f5c8a7\" (UID: \"4ac8c866-8e21-425c-ac4e-350c32f5c8a7\") " Jan 20 18:08:22 crc kubenswrapper[4558]: I0120 18:08:22.993876 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-tls-tls-dnsnames-default-certs-0\" (UniqueName: \"kubernetes.io/projected/4ac8c866-8e21-425c-ac4e-350c32f5c8a7-openstack-edpm-tls-tls-dnsnames-default-certs-0\") pod \"4ac8c866-8e21-425c-ac4e-350c32f5c8a7\" (UID: \"4ac8c866-8e21-425c-ac4e-350c32f5c8a7\") " Jan 20 18:08:22 crc kubenswrapper[4558]: I0120 18:08:22.993910 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4ac8c866-8e21-425c-ac4e-350c32f5c8a7-inventory\") pod \"4ac8c866-8e21-425c-ac4e-350c32f5c8a7\" (UID: \"4ac8c866-8e21-425c-ac4e-350c32f5c8a7\") " Jan 20 18:08:22 crc kubenswrapper[4558]: I0120 18:08:22.993928 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-tls-tls-dnsnames-second-certs-0\" (UniqueName: \"kubernetes.io/projected/4ac8c866-8e21-425c-ac4e-350c32f5c8a7-openstack-edpm-tls-tls-dnsnames-second-certs-0\") pod \"4ac8c866-8e21-425c-ac4e-350c32f5c8a7\" (UID: \"4ac8c866-8e21-425c-ac4e-350c32f5c8a7\") " Jan 20 18:08:22 crc kubenswrapper[4558]: I0120 18:08:22.993955 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"install-certs-ovrd-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4ac8c866-8e21-425c-ac4e-350c32f5c8a7-install-certs-ovrd-combined-ca-bundle\") pod \"4ac8c866-8e21-425c-ac4e-350c32f5c8a7\" (UID: \"4ac8c866-8e21-425c-ac4e-350c32f5c8a7\") " Jan 20 18:08:22 crc kubenswrapper[4558]: I0120 18:08:22.993997 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tls-dnsnames-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4ac8c866-8e21-425c-ac4e-350c32f5c8a7-tls-dnsnames-combined-ca-bundle\") pod \"4ac8c866-8e21-425c-ac4e-350c32f5c8a7\" (UID: \"4ac8c866-8e21-425c-ac4e-350c32f5c8a7\") " Jan 20 18:08:22 crc kubenswrapper[4558]: I0120 18:08:22.999373 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4ac8c866-8e21-425c-ac4e-350c32f5c8a7-openstack-edpm-tls-tls-dnsnames-second-certs-0" (OuterVolumeSpecName: "openstack-edpm-tls-tls-dnsnames-second-certs-0") pod "4ac8c866-8e21-425c-ac4e-350c32f5c8a7" (UID: "4ac8c866-8e21-425c-ac4e-350c32f5c8a7"). InnerVolumeSpecName "openstack-edpm-tls-tls-dnsnames-second-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:08:23 crc kubenswrapper[4558]: I0120 18:08:23.000831 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4ac8c866-8e21-425c-ac4e-350c32f5c8a7-openstack-edpm-tls-tls-dnsnames-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-tls-tls-dnsnames-default-certs-0") pod "4ac8c866-8e21-425c-ac4e-350c32f5c8a7" (UID: "4ac8c866-8e21-425c-ac4e-350c32f5c8a7"). InnerVolumeSpecName "openstack-edpm-tls-tls-dnsnames-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:08:23 crc kubenswrapper[4558]: I0120 18:08:23.000840 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4ac8c866-8e21-425c-ac4e-350c32f5c8a7-kube-api-access-zmtjm" (OuterVolumeSpecName: "kube-api-access-zmtjm") pod "4ac8c866-8e21-425c-ac4e-350c32f5c8a7" (UID: "4ac8c866-8e21-425c-ac4e-350c32f5c8a7"). InnerVolumeSpecName "kube-api-access-zmtjm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:08:23 crc kubenswrapper[4558]: I0120 18:08:23.001087 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4ac8c866-8e21-425c-ac4e-350c32f5c8a7-tls-dnsnames-combined-ca-bundle" (OuterVolumeSpecName: "tls-dnsnames-combined-ca-bundle") pod "4ac8c866-8e21-425c-ac4e-350c32f5c8a7" (UID: "4ac8c866-8e21-425c-ac4e-350c32f5c8a7"). InnerVolumeSpecName "tls-dnsnames-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:08:23 crc kubenswrapper[4558]: I0120 18:08:23.001422 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4ac8c866-8e21-425c-ac4e-350c32f5c8a7-install-certs-ovrd-combined-ca-bundle" (OuterVolumeSpecName: "install-certs-ovrd-combined-ca-bundle") pod "4ac8c866-8e21-425c-ac4e-350c32f5c8a7" (UID: "4ac8c866-8e21-425c-ac4e-350c32f5c8a7"). InnerVolumeSpecName "install-certs-ovrd-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:08:23 crc kubenswrapper[4558]: I0120 18:08:23.014447 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4ac8c866-8e21-425c-ac4e-350c32f5c8a7-inventory" (OuterVolumeSpecName: "inventory") pod "4ac8c866-8e21-425c-ac4e-350c32f5c8a7" (UID: "4ac8c866-8e21-425c-ac4e-350c32f5c8a7"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:08:23 crc kubenswrapper[4558]: I0120 18:08:23.015125 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4ac8c866-8e21-425c-ac4e-350c32f5c8a7-ssh-key-openstack-edpm-tls" (OuterVolumeSpecName: "ssh-key-openstack-edpm-tls") pod "4ac8c866-8e21-425c-ac4e-350c32f5c8a7" (UID: "4ac8c866-8e21-425c-ac4e-350c32f5c8a7"). InnerVolumeSpecName "ssh-key-openstack-edpm-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:08:23 crc kubenswrapper[4558]: I0120 18:08:23.096059 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zmtjm\" (UniqueName: \"kubernetes.io/projected/4ac8c866-8e21-425c-ac4e-350c32f5c8a7-kube-api-access-zmtjm\") on node \"crc\" DevicePath \"\"" Jan 20 18:08:23 crc kubenswrapper[4558]: I0120 18:08:23.096086 4558 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-tls\" (UniqueName: \"kubernetes.io/secret/4ac8c866-8e21-425c-ac4e-350c32f5c8a7-ssh-key-openstack-edpm-tls\") on node \"crc\" DevicePath \"\"" Jan 20 18:08:23 crc kubenswrapper[4558]: I0120 18:08:23.096099 4558 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-tls-tls-dnsnames-default-certs-0\" (UniqueName: \"kubernetes.io/projected/4ac8c866-8e21-425c-ac4e-350c32f5c8a7-openstack-edpm-tls-tls-dnsnames-default-certs-0\") on node \"crc\" DevicePath \"\"" Jan 20 18:08:23 crc kubenswrapper[4558]: I0120 18:08:23.096112 4558 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/4ac8c866-8e21-425c-ac4e-350c32f5c8a7-inventory\") on node \"crc\" DevicePath \"\"" Jan 20 18:08:23 crc kubenswrapper[4558]: I0120 18:08:23.096122 4558 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-tls-tls-dnsnames-second-certs-0\" (UniqueName: \"kubernetes.io/projected/4ac8c866-8e21-425c-ac4e-350c32f5c8a7-openstack-edpm-tls-tls-dnsnames-second-certs-0\") on node \"crc\" DevicePath \"\"" Jan 20 18:08:23 crc kubenswrapper[4558]: I0120 18:08:23.096133 4558 reconciler_common.go:293] "Volume detached for volume \"install-certs-ovrd-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4ac8c866-8e21-425c-ac4e-350c32f5c8a7-install-certs-ovrd-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 18:08:23 crc kubenswrapper[4558]: I0120 18:08:23.096144 4558 reconciler_common.go:293] "Volume detached for volume \"tls-dnsnames-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4ac8c866-8e21-425c-ac4e-350c32f5c8a7-tls-dnsnames-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 18:08:23 crc kubenswrapper[4558]: I0120 18:08:23.667666 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/install-certs-ovrd-openstack-edpm-tls-openstack-edpm-tls-bk9cj" event={"ID":"4ac8c866-8e21-425c-ac4e-350c32f5c8a7","Type":"ContainerDied","Data":"4211d953579bb91a079b1a3aac73df8beca5f03d5b75ef9e2b367954ddbde6b3"} Jan 20 18:08:23 crc kubenswrapper[4558]: I0120 18:08:23.667970 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4211d953579bb91a079b1a3aac73df8beca5f03d5b75ef9e2b367954ddbde6b3" Jan 20 18:08:23 crc kubenswrapper[4558]: I0120 18:08:23.667729 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/install-certs-ovrd-openstack-edpm-tls-openstack-edpm-tls-bk9cj" Jan 20 18:08:23 crc kubenswrapper[4558]: I0120 18:08:23.728109 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/tls-dnsnames-openstack-edpm-tls-openstack-edpm-tls-csjhv"] Jan 20 18:08:23 crc kubenswrapper[4558]: E0120 18:08:23.728485 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4ac8c866-8e21-425c-ac4e-350c32f5c8a7" containerName="install-certs-ovrd-openstack-edpm-tls-openstack-edpm-tls" Jan 20 18:08:23 crc kubenswrapper[4558]: I0120 18:08:23.728506 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="4ac8c866-8e21-425c-ac4e-350c32f5c8a7" containerName="install-certs-ovrd-openstack-edpm-tls-openstack-edpm-tls" Jan 20 18:08:23 crc kubenswrapper[4558]: I0120 18:08:23.728659 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="4ac8c866-8e21-425c-ac4e-350c32f5c8a7" containerName="install-certs-ovrd-openstack-edpm-tls-openstack-edpm-tls" Jan 20 18:08:23 crc kubenswrapper[4558]: I0120 18:08:23.729233 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/tls-dnsnames-openstack-edpm-tls-openstack-edpm-tls-csjhv" Jan 20 18:08:23 crc kubenswrapper[4558]: I0120 18:08:23.731633 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"combined-ca-bundle" Jan 20 18:08:23 crc kubenswrapper[4558]: I0120 18:08:23.731803 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplane-ansible-ssh-private-key-secret" Jan 20 18:08:23 crc kubenswrapper[4558]: I0120 18:08:23.731983 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-aee-default-env" Jan 20 18:08:23 crc kubenswrapper[4558]: I0120 18:08:23.731997 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"openstack-edpm-tls-dockercfg-bzbzv" Jan 20 18:08:23 crc kubenswrapper[4558]: I0120 18:08:23.733642 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplanenodeset-openstack-edpm-tls" Jan 20 18:08:23 crc kubenswrapper[4558]: I0120 18:08:23.738800 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/tls-dnsnames-openstack-edpm-tls-openstack-edpm-tls-csjhv"] Jan 20 18:08:23 crc kubenswrapper[4558]: I0120 18:08:23.805372 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-tls\" (UniqueName: \"kubernetes.io/secret/6cff89d4-4dca-4d8b-98f1-0d94ebb8fad8-ssh-key-openstack-edpm-tls\") pod \"tls-dnsnames-openstack-edpm-tls-openstack-edpm-tls-csjhv\" (UID: \"6cff89d4-4dca-4d8b-98f1-0d94ebb8fad8\") " pod="openstack-kuttl-tests/tls-dnsnames-openstack-edpm-tls-openstack-edpm-tls-csjhv" Jan 20 18:08:23 crc kubenswrapper[4558]: I0120 18:08:23.805473 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6cff89d4-4dca-4d8b-98f1-0d94ebb8fad8-inventory\") pod \"tls-dnsnames-openstack-edpm-tls-openstack-edpm-tls-csjhv\" (UID: \"6cff89d4-4dca-4d8b-98f1-0d94ebb8fad8\") " pod="openstack-kuttl-tests/tls-dnsnames-openstack-edpm-tls-openstack-edpm-tls-csjhv" Jan 20 18:08:23 crc kubenswrapper[4558]: I0120 18:08:23.805567 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-dnsnames-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6cff89d4-4dca-4d8b-98f1-0d94ebb8fad8-tls-dnsnames-combined-ca-bundle\") pod \"tls-dnsnames-openstack-edpm-tls-openstack-edpm-tls-csjhv\" (UID: \"6cff89d4-4dca-4d8b-98f1-0d94ebb8fad8\") " pod="openstack-kuttl-tests/tls-dnsnames-openstack-edpm-tls-openstack-edpm-tls-csjhv" Jan 20 18:08:23 crc kubenswrapper[4558]: I0120 18:08:23.805608 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xjnw6\" (UniqueName: \"kubernetes.io/projected/6cff89d4-4dca-4d8b-98f1-0d94ebb8fad8-kube-api-access-xjnw6\") pod \"tls-dnsnames-openstack-edpm-tls-openstack-edpm-tls-csjhv\" (UID: \"6cff89d4-4dca-4d8b-98f1-0d94ebb8fad8\") " pod="openstack-kuttl-tests/tls-dnsnames-openstack-edpm-tls-openstack-edpm-tls-csjhv" Jan 20 18:08:23 crc kubenswrapper[4558]: I0120 18:08:23.906744 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xjnw6\" (UniqueName: \"kubernetes.io/projected/6cff89d4-4dca-4d8b-98f1-0d94ebb8fad8-kube-api-access-xjnw6\") pod \"tls-dnsnames-openstack-edpm-tls-openstack-edpm-tls-csjhv\" (UID: \"6cff89d4-4dca-4d8b-98f1-0d94ebb8fad8\") " pod="openstack-kuttl-tests/tls-dnsnames-openstack-edpm-tls-openstack-edpm-tls-csjhv" Jan 20 18:08:23 crc kubenswrapper[4558]: I0120 18:08:23.906851 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-tls\" (UniqueName: \"kubernetes.io/secret/6cff89d4-4dca-4d8b-98f1-0d94ebb8fad8-ssh-key-openstack-edpm-tls\") pod \"tls-dnsnames-openstack-edpm-tls-openstack-edpm-tls-csjhv\" (UID: \"6cff89d4-4dca-4d8b-98f1-0d94ebb8fad8\") " pod="openstack-kuttl-tests/tls-dnsnames-openstack-edpm-tls-openstack-edpm-tls-csjhv" Jan 20 18:08:23 crc kubenswrapper[4558]: I0120 18:08:23.906881 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6cff89d4-4dca-4d8b-98f1-0d94ebb8fad8-inventory\") pod \"tls-dnsnames-openstack-edpm-tls-openstack-edpm-tls-csjhv\" (UID: \"6cff89d4-4dca-4d8b-98f1-0d94ebb8fad8\") " pod="openstack-kuttl-tests/tls-dnsnames-openstack-edpm-tls-openstack-edpm-tls-csjhv" Jan 20 18:08:23 crc kubenswrapper[4558]: I0120 18:08:23.906925 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-dnsnames-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6cff89d4-4dca-4d8b-98f1-0d94ebb8fad8-tls-dnsnames-combined-ca-bundle\") pod \"tls-dnsnames-openstack-edpm-tls-openstack-edpm-tls-csjhv\" (UID: \"6cff89d4-4dca-4d8b-98f1-0d94ebb8fad8\") " pod="openstack-kuttl-tests/tls-dnsnames-openstack-edpm-tls-openstack-edpm-tls-csjhv" Jan 20 18:08:23 crc kubenswrapper[4558]: I0120 18:08:23.911124 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6cff89d4-4dca-4d8b-98f1-0d94ebb8fad8-inventory\") pod \"tls-dnsnames-openstack-edpm-tls-openstack-edpm-tls-csjhv\" (UID: \"6cff89d4-4dca-4d8b-98f1-0d94ebb8fad8\") " pod="openstack-kuttl-tests/tls-dnsnames-openstack-edpm-tls-openstack-edpm-tls-csjhv" Jan 20 18:08:23 crc kubenswrapper[4558]: I0120 18:08:23.912038 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-dnsnames-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6cff89d4-4dca-4d8b-98f1-0d94ebb8fad8-tls-dnsnames-combined-ca-bundle\") pod \"tls-dnsnames-openstack-edpm-tls-openstack-edpm-tls-csjhv\" (UID: \"6cff89d4-4dca-4d8b-98f1-0d94ebb8fad8\") " pod="openstack-kuttl-tests/tls-dnsnames-openstack-edpm-tls-openstack-edpm-tls-csjhv" Jan 20 18:08:23 crc kubenswrapper[4558]: I0120 18:08:23.912794 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-tls\" (UniqueName: \"kubernetes.io/secret/6cff89d4-4dca-4d8b-98f1-0d94ebb8fad8-ssh-key-openstack-edpm-tls\") pod \"tls-dnsnames-openstack-edpm-tls-openstack-edpm-tls-csjhv\" (UID: \"6cff89d4-4dca-4d8b-98f1-0d94ebb8fad8\") " pod="openstack-kuttl-tests/tls-dnsnames-openstack-edpm-tls-openstack-edpm-tls-csjhv" Jan 20 18:08:23 crc kubenswrapper[4558]: I0120 18:08:23.921669 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xjnw6\" (UniqueName: \"kubernetes.io/projected/6cff89d4-4dca-4d8b-98f1-0d94ebb8fad8-kube-api-access-xjnw6\") pod \"tls-dnsnames-openstack-edpm-tls-openstack-edpm-tls-csjhv\" (UID: \"6cff89d4-4dca-4d8b-98f1-0d94ebb8fad8\") " pod="openstack-kuttl-tests/tls-dnsnames-openstack-edpm-tls-openstack-edpm-tls-csjhv" Jan 20 18:08:24 crc kubenswrapper[4558]: I0120 18:08:24.046438 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/tls-dnsnames-openstack-edpm-tls-openstack-edpm-tls-csjhv" Jan 20 18:08:24 crc kubenswrapper[4558]: I0120 18:08:24.432597 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/tls-dnsnames-openstack-edpm-tls-openstack-edpm-tls-csjhv"] Jan 20 18:08:24 crc kubenswrapper[4558]: I0120 18:08:24.677866 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/tls-dnsnames-openstack-edpm-tls-openstack-edpm-tls-csjhv" event={"ID":"6cff89d4-4dca-4d8b-98f1-0d94ebb8fad8","Type":"ContainerStarted","Data":"04c72825b21d76dba8f5029939b377bf1e38d3ee810e1fa09e4a82926a4ca44f"} Jan 20 18:08:25 crc kubenswrapper[4558]: I0120 18:08:25.566058 4558 scope.go:117] "RemoveContainer" containerID="6b02d31ec11fe5435af988e35303a6f66b5afa5ef952ce5f7cc4b3cd5f4d9497" Jan 20 18:08:25 crc kubenswrapper[4558]: E0120 18:08:25.566580 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 18:08:25 crc kubenswrapper[4558]: I0120 18:08:25.723502 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/tls-dnsnames-openstack-edpm-tls-openstack-edpm-tls-csjhv" event={"ID":"6cff89d4-4dca-4d8b-98f1-0d94ebb8fad8","Type":"ContainerStarted","Data":"33e3f76ef54188f55f12e6b1007e9b83a0fe891ae572d234e642967bbecc1af7"} Jan 20 18:08:25 crc kubenswrapper[4558]: I0120 18:08:25.737573 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/tls-dnsnames-openstack-edpm-tls-openstack-edpm-tls-csjhv" podStartSLOduration=2.173926522 podStartE2EDuration="2.737559384s" podCreationTimestamp="2026-01-20 18:08:23 +0000 UTC" firstStartedPulling="2026-01-20 18:08:24.438382363 +0000 UTC m=+5198.198720330" lastFinishedPulling="2026-01-20 18:08:25.002015225 +0000 UTC m=+5198.762353192" observedRunningTime="2026-01-20 18:08:25.735535528 +0000 UTC m=+5199.495873496" watchObservedRunningTime="2026-01-20 18:08:25.737559384 +0000 UTC m=+5199.497897351" Jan 20 18:08:27 crc kubenswrapper[4558]: I0120 18:08:27.743614 4558 generic.go:334] "Generic (PLEG): container finished" podID="6cff89d4-4dca-4d8b-98f1-0d94ebb8fad8" containerID="33e3f76ef54188f55f12e6b1007e9b83a0fe891ae572d234e642967bbecc1af7" exitCode=0 Jan 20 18:08:27 crc kubenswrapper[4558]: I0120 18:08:27.743694 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/tls-dnsnames-openstack-edpm-tls-openstack-edpm-tls-csjhv" event={"ID":"6cff89d4-4dca-4d8b-98f1-0d94ebb8fad8","Type":"ContainerDied","Data":"33e3f76ef54188f55f12e6b1007e9b83a0fe891ae572d234e642967bbecc1af7"} Jan 20 18:08:28 crc kubenswrapper[4558]: I0120 18:08:28.990984 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/tls-dnsnames-openstack-edpm-tls-openstack-edpm-tls-csjhv" Jan 20 18:08:29 crc kubenswrapper[4558]: I0120 18:08:29.185830 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6cff89d4-4dca-4d8b-98f1-0d94ebb8fad8-inventory\") pod \"6cff89d4-4dca-4d8b-98f1-0d94ebb8fad8\" (UID: \"6cff89d4-4dca-4d8b-98f1-0d94ebb8fad8\") " Jan 20 18:08:29 crc kubenswrapper[4558]: I0120 18:08:29.185950 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-tls\" (UniqueName: \"kubernetes.io/secret/6cff89d4-4dca-4d8b-98f1-0d94ebb8fad8-ssh-key-openstack-edpm-tls\") pod \"6cff89d4-4dca-4d8b-98f1-0d94ebb8fad8\" (UID: \"6cff89d4-4dca-4d8b-98f1-0d94ebb8fad8\") " Jan 20 18:08:29 crc kubenswrapper[4558]: I0120 18:08:29.185974 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tls-dnsnames-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6cff89d4-4dca-4d8b-98f1-0d94ebb8fad8-tls-dnsnames-combined-ca-bundle\") pod \"6cff89d4-4dca-4d8b-98f1-0d94ebb8fad8\" (UID: \"6cff89d4-4dca-4d8b-98f1-0d94ebb8fad8\") " Jan 20 18:08:29 crc kubenswrapper[4558]: I0120 18:08:29.186685 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xjnw6\" (UniqueName: \"kubernetes.io/projected/6cff89d4-4dca-4d8b-98f1-0d94ebb8fad8-kube-api-access-xjnw6\") pod \"6cff89d4-4dca-4d8b-98f1-0d94ebb8fad8\" (UID: \"6cff89d4-4dca-4d8b-98f1-0d94ebb8fad8\") " Jan 20 18:08:29 crc kubenswrapper[4558]: I0120 18:08:29.192536 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6cff89d4-4dca-4d8b-98f1-0d94ebb8fad8-tls-dnsnames-combined-ca-bundle" (OuterVolumeSpecName: "tls-dnsnames-combined-ca-bundle") pod "6cff89d4-4dca-4d8b-98f1-0d94ebb8fad8" (UID: "6cff89d4-4dca-4d8b-98f1-0d94ebb8fad8"). InnerVolumeSpecName "tls-dnsnames-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:08:29 crc kubenswrapper[4558]: I0120 18:08:29.192745 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6cff89d4-4dca-4d8b-98f1-0d94ebb8fad8-kube-api-access-xjnw6" (OuterVolumeSpecName: "kube-api-access-xjnw6") pod "6cff89d4-4dca-4d8b-98f1-0d94ebb8fad8" (UID: "6cff89d4-4dca-4d8b-98f1-0d94ebb8fad8"). InnerVolumeSpecName "kube-api-access-xjnw6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:08:29 crc kubenswrapper[4558]: I0120 18:08:29.205355 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6cff89d4-4dca-4d8b-98f1-0d94ebb8fad8-ssh-key-openstack-edpm-tls" (OuterVolumeSpecName: "ssh-key-openstack-edpm-tls") pod "6cff89d4-4dca-4d8b-98f1-0d94ebb8fad8" (UID: "6cff89d4-4dca-4d8b-98f1-0d94ebb8fad8"). InnerVolumeSpecName "ssh-key-openstack-edpm-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:08:29 crc kubenswrapper[4558]: I0120 18:08:29.205763 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6cff89d4-4dca-4d8b-98f1-0d94ebb8fad8-inventory" (OuterVolumeSpecName: "inventory") pod "6cff89d4-4dca-4d8b-98f1-0d94ebb8fad8" (UID: "6cff89d4-4dca-4d8b-98f1-0d94ebb8fad8"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:08:29 crc kubenswrapper[4558]: I0120 18:08:29.288786 4558 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-tls\" (UniqueName: \"kubernetes.io/secret/6cff89d4-4dca-4d8b-98f1-0d94ebb8fad8-ssh-key-openstack-edpm-tls\") on node \"crc\" DevicePath \"\"" Jan 20 18:08:29 crc kubenswrapper[4558]: I0120 18:08:29.288977 4558 reconciler_common.go:293] "Volume detached for volume \"tls-dnsnames-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6cff89d4-4dca-4d8b-98f1-0d94ebb8fad8-tls-dnsnames-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 18:08:29 crc kubenswrapper[4558]: I0120 18:08:29.289049 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xjnw6\" (UniqueName: \"kubernetes.io/projected/6cff89d4-4dca-4d8b-98f1-0d94ebb8fad8-kube-api-access-xjnw6\") on node \"crc\" DevicePath \"\"" Jan 20 18:08:29 crc kubenswrapper[4558]: I0120 18:08:29.289099 4558 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6cff89d4-4dca-4d8b-98f1-0d94ebb8fad8-inventory\") on node \"crc\" DevicePath \"\"" Jan 20 18:08:29 crc kubenswrapper[4558]: I0120 18:08:29.762368 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/tls-dnsnames-openstack-edpm-tls-openstack-edpm-tls-csjhv" event={"ID":"6cff89d4-4dca-4d8b-98f1-0d94ebb8fad8","Type":"ContainerDied","Data":"04c72825b21d76dba8f5029939b377bf1e38d3ee810e1fa09e4a82926a4ca44f"} Jan 20 18:08:29 crc kubenswrapper[4558]: I0120 18:08:29.762421 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="04c72825b21d76dba8f5029939b377bf1e38d3ee810e1fa09e4a82926a4ca44f" Jan 20 18:08:29 crc kubenswrapper[4558]: I0120 18:08:29.762430 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/tls-dnsnames-openstack-edpm-tls-openstack-edpm-tls-csjhv" Jan 20 18:08:31 crc kubenswrapper[4558]: I0120 18:08:31.208309 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/install-certs-ovrd-openstack-edpm-tls-ovrd-openstack-edpm-jkd5h"] Jan 20 18:08:31 crc kubenswrapper[4558]: E0120 18:08:31.208854 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6cff89d4-4dca-4d8b-98f1-0d94ebb8fad8" containerName="tls-dnsnames-openstack-edpm-tls-openstack-edpm-tls" Jan 20 18:08:31 crc kubenswrapper[4558]: I0120 18:08:31.208869 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="6cff89d4-4dca-4d8b-98f1-0d94ebb8fad8" containerName="tls-dnsnames-openstack-edpm-tls-openstack-edpm-tls" Jan 20 18:08:31 crc kubenswrapper[4558]: I0120 18:08:31.209072 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="6cff89d4-4dca-4d8b-98f1-0d94ebb8fad8" containerName="tls-dnsnames-openstack-edpm-tls-openstack-edpm-tls" Jan 20 18:08:31 crc kubenswrapper[4558]: I0120 18:08:31.209523 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/install-certs-ovrd-openstack-edpm-tls-ovrd-openstack-edpm-jkd5h" Jan 20 18:08:31 crc kubenswrapper[4558]: I0120 18:08:31.211893 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"combined-ca-bundle" Jan 20 18:08:31 crc kubenswrapper[4558]: I0120 18:08:31.212052 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"openstack-edpm-tls-tls-dns-ips-default-certs-0" Jan 20 18:08:31 crc kubenswrapper[4558]: I0120 18:08:31.212194 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"openstack-edpm-tls-dockercfg-bzbzv" Jan 20 18:08:31 crc kubenswrapper[4558]: I0120 18:08:31.212389 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplanenodeset-openstack-edpm-tls" Jan 20 18:08:31 crc kubenswrapper[4558]: I0120 18:08:31.212512 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-aee-default-env" Jan 20 18:08:31 crc kubenswrapper[4558]: I0120 18:08:31.213324 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplane-ansible-ssh-private-key-secret" Jan 20 18:08:31 crc kubenswrapper[4558]: I0120 18:08:31.213472 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"openstack-edpm-tls-custom-tls-dns-default-certs-0" Jan 20 18:08:31 crc kubenswrapper[4558]: I0120 18:08:31.215009 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-tls\" (UniqueName: \"kubernetes.io/secret/e7f7df71-1122-4bcf-bc50-84b25abb680e-ssh-key-openstack-edpm-tls\") pod \"install-certs-ovrd-openstack-edpm-tls-ovrd-openstack-edpm-jkd5h\" (UID: \"e7f7df71-1122-4bcf-bc50-84b25abb680e\") " pod="openstack-kuttl-tests/install-certs-ovrd-openstack-edpm-tls-ovrd-openstack-edpm-jkd5h" Jan 20 18:08:31 crc kubenswrapper[4558]: I0120 18:08:31.215054 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-tls-tls-dns-ips-default-certs-0\" (UniqueName: \"kubernetes.io/projected/e7f7df71-1122-4bcf-bc50-84b25abb680e-openstack-edpm-tls-tls-dns-ips-default-certs-0\") pod \"install-certs-ovrd-openstack-edpm-tls-ovrd-openstack-edpm-jkd5h\" (UID: \"e7f7df71-1122-4bcf-bc50-84b25abb680e\") " pod="openstack-kuttl-tests/install-certs-ovrd-openstack-edpm-tls-ovrd-openstack-edpm-jkd5h" Jan 20 18:08:31 crc kubenswrapper[4558]: I0120 18:08:31.215081 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"install-certs-ovrd-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7f7df71-1122-4bcf-bc50-84b25abb680e-install-certs-ovrd-combined-ca-bundle\") pod \"install-certs-ovrd-openstack-edpm-tls-ovrd-openstack-edpm-jkd5h\" (UID: \"e7f7df71-1122-4bcf-bc50-84b25abb680e\") " pod="openstack-kuttl-tests/install-certs-ovrd-openstack-edpm-tls-ovrd-openstack-edpm-jkd5h" Jan 20 18:08:31 crc kubenswrapper[4558]: I0120 18:08:31.215106 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"custom-tls-dns-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7f7df71-1122-4bcf-bc50-84b25abb680e-custom-tls-dns-combined-ca-bundle\") pod \"install-certs-ovrd-openstack-edpm-tls-ovrd-openstack-edpm-jkd5h\" (UID: \"e7f7df71-1122-4bcf-bc50-84b25abb680e\") " pod="openstack-kuttl-tests/install-certs-ovrd-openstack-edpm-tls-ovrd-openstack-edpm-jkd5h" Jan 20 18:08:31 crc kubenswrapper[4558]: I0120 18:08:31.215132 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e7f7df71-1122-4bcf-bc50-84b25abb680e-inventory\") pod \"install-certs-ovrd-openstack-edpm-tls-ovrd-openstack-edpm-jkd5h\" (UID: \"e7f7df71-1122-4bcf-bc50-84b25abb680e\") " pod="openstack-kuttl-tests/install-certs-ovrd-openstack-edpm-tls-ovrd-openstack-edpm-jkd5h" Jan 20 18:08:31 crc kubenswrapper[4558]: I0120 18:08:31.215150 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-dns-ips-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7f7df71-1122-4bcf-bc50-84b25abb680e-tls-dns-ips-combined-ca-bundle\") pod \"install-certs-ovrd-openstack-edpm-tls-ovrd-openstack-edpm-jkd5h\" (UID: \"e7f7df71-1122-4bcf-bc50-84b25abb680e\") " pod="openstack-kuttl-tests/install-certs-ovrd-openstack-edpm-tls-ovrd-openstack-edpm-jkd5h" Jan 20 18:08:31 crc kubenswrapper[4558]: I0120 18:08:31.215192 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-tls-custom-tls-dns-default-certs-0\" (UniqueName: \"kubernetes.io/projected/e7f7df71-1122-4bcf-bc50-84b25abb680e-openstack-edpm-tls-custom-tls-dns-default-certs-0\") pod \"install-certs-ovrd-openstack-edpm-tls-ovrd-openstack-edpm-jkd5h\" (UID: \"e7f7df71-1122-4bcf-bc50-84b25abb680e\") " pod="openstack-kuttl-tests/install-certs-ovrd-openstack-edpm-tls-ovrd-openstack-edpm-jkd5h" Jan 20 18:08:31 crc kubenswrapper[4558]: I0120 18:08:31.215215 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p6gjt\" (UniqueName: \"kubernetes.io/projected/e7f7df71-1122-4bcf-bc50-84b25abb680e-kube-api-access-p6gjt\") pod \"install-certs-ovrd-openstack-edpm-tls-ovrd-openstack-edpm-jkd5h\" (UID: \"e7f7df71-1122-4bcf-bc50-84b25abb680e\") " pod="openstack-kuttl-tests/install-certs-ovrd-openstack-edpm-tls-ovrd-openstack-edpm-jkd5h" Jan 20 18:08:31 crc kubenswrapper[4558]: I0120 18:08:31.224119 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/install-certs-ovrd-openstack-edpm-tls-ovrd-openstack-edpm-jkd5h"] Jan 20 18:08:31 crc kubenswrapper[4558]: I0120 18:08:31.316982 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-tls\" (UniqueName: \"kubernetes.io/secret/e7f7df71-1122-4bcf-bc50-84b25abb680e-ssh-key-openstack-edpm-tls\") pod \"install-certs-ovrd-openstack-edpm-tls-ovrd-openstack-edpm-jkd5h\" (UID: \"e7f7df71-1122-4bcf-bc50-84b25abb680e\") " pod="openstack-kuttl-tests/install-certs-ovrd-openstack-edpm-tls-ovrd-openstack-edpm-jkd5h" Jan 20 18:08:31 crc kubenswrapper[4558]: I0120 18:08:31.317038 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-tls-tls-dns-ips-default-certs-0\" (UniqueName: \"kubernetes.io/projected/e7f7df71-1122-4bcf-bc50-84b25abb680e-openstack-edpm-tls-tls-dns-ips-default-certs-0\") pod \"install-certs-ovrd-openstack-edpm-tls-ovrd-openstack-edpm-jkd5h\" (UID: \"e7f7df71-1122-4bcf-bc50-84b25abb680e\") " pod="openstack-kuttl-tests/install-certs-ovrd-openstack-edpm-tls-ovrd-openstack-edpm-jkd5h" Jan 20 18:08:31 crc kubenswrapper[4558]: I0120 18:08:31.317064 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"install-certs-ovrd-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7f7df71-1122-4bcf-bc50-84b25abb680e-install-certs-ovrd-combined-ca-bundle\") pod \"install-certs-ovrd-openstack-edpm-tls-ovrd-openstack-edpm-jkd5h\" (UID: \"e7f7df71-1122-4bcf-bc50-84b25abb680e\") " pod="openstack-kuttl-tests/install-certs-ovrd-openstack-edpm-tls-ovrd-openstack-edpm-jkd5h" Jan 20 18:08:31 crc kubenswrapper[4558]: I0120 18:08:31.317089 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"custom-tls-dns-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7f7df71-1122-4bcf-bc50-84b25abb680e-custom-tls-dns-combined-ca-bundle\") pod \"install-certs-ovrd-openstack-edpm-tls-ovrd-openstack-edpm-jkd5h\" (UID: \"e7f7df71-1122-4bcf-bc50-84b25abb680e\") " pod="openstack-kuttl-tests/install-certs-ovrd-openstack-edpm-tls-ovrd-openstack-edpm-jkd5h" Jan 20 18:08:31 crc kubenswrapper[4558]: I0120 18:08:31.317114 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e7f7df71-1122-4bcf-bc50-84b25abb680e-inventory\") pod \"install-certs-ovrd-openstack-edpm-tls-ovrd-openstack-edpm-jkd5h\" (UID: \"e7f7df71-1122-4bcf-bc50-84b25abb680e\") " pod="openstack-kuttl-tests/install-certs-ovrd-openstack-edpm-tls-ovrd-openstack-edpm-jkd5h" Jan 20 18:08:31 crc kubenswrapper[4558]: I0120 18:08:31.317132 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-dns-ips-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7f7df71-1122-4bcf-bc50-84b25abb680e-tls-dns-ips-combined-ca-bundle\") pod \"install-certs-ovrd-openstack-edpm-tls-ovrd-openstack-edpm-jkd5h\" (UID: \"e7f7df71-1122-4bcf-bc50-84b25abb680e\") " pod="openstack-kuttl-tests/install-certs-ovrd-openstack-edpm-tls-ovrd-openstack-edpm-jkd5h" Jan 20 18:08:31 crc kubenswrapper[4558]: I0120 18:08:31.317159 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-tls-custom-tls-dns-default-certs-0\" (UniqueName: \"kubernetes.io/projected/e7f7df71-1122-4bcf-bc50-84b25abb680e-openstack-edpm-tls-custom-tls-dns-default-certs-0\") pod \"install-certs-ovrd-openstack-edpm-tls-ovrd-openstack-edpm-jkd5h\" (UID: \"e7f7df71-1122-4bcf-bc50-84b25abb680e\") " pod="openstack-kuttl-tests/install-certs-ovrd-openstack-edpm-tls-ovrd-openstack-edpm-jkd5h" Jan 20 18:08:31 crc kubenswrapper[4558]: I0120 18:08:31.317197 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p6gjt\" (UniqueName: \"kubernetes.io/projected/e7f7df71-1122-4bcf-bc50-84b25abb680e-kube-api-access-p6gjt\") pod \"install-certs-ovrd-openstack-edpm-tls-ovrd-openstack-edpm-jkd5h\" (UID: \"e7f7df71-1122-4bcf-bc50-84b25abb680e\") " pod="openstack-kuttl-tests/install-certs-ovrd-openstack-edpm-tls-ovrd-openstack-edpm-jkd5h" Jan 20 18:08:31 crc kubenswrapper[4558]: I0120 18:08:31.326001 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-dns-ips-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7f7df71-1122-4bcf-bc50-84b25abb680e-tls-dns-ips-combined-ca-bundle\") pod \"install-certs-ovrd-openstack-edpm-tls-ovrd-openstack-edpm-jkd5h\" (UID: \"e7f7df71-1122-4bcf-bc50-84b25abb680e\") " pod="openstack-kuttl-tests/install-certs-ovrd-openstack-edpm-tls-ovrd-openstack-edpm-jkd5h" Jan 20 18:08:31 crc kubenswrapper[4558]: I0120 18:08:31.326682 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-tls-custom-tls-dns-default-certs-0\" (UniqueName: \"kubernetes.io/projected/e7f7df71-1122-4bcf-bc50-84b25abb680e-openstack-edpm-tls-custom-tls-dns-default-certs-0\") pod \"install-certs-ovrd-openstack-edpm-tls-ovrd-openstack-edpm-jkd5h\" (UID: \"e7f7df71-1122-4bcf-bc50-84b25abb680e\") " pod="openstack-kuttl-tests/install-certs-ovrd-openstack-edpm-tls-ovrd-openstack-edpm-jkd5h" Jan 20 18:08:31 crc kubenswrapper[4558]: I0120 18:08:31.326875 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"install-certs-ovrd-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7f7df71-1122-4bcf-bc50-84b25abb680e-install-certs-ovrd-combined-ca-bundle\") pod \"install-certs-ovrd-openstack-edpm-tls-ovrd-openstack-edpm-jkd5h\" (UID: \"e7f7df71-1122-4bcf-bc50-84b25abb680e\") " pod="openstack-kuttl-tests/install-certs-ovrd-openstack-edpm-tls-ovrd-openstack-edpm-jkd5h" Jan 20 18:08:31 crc kubenswrapper[4558]: I0120 18:08:31.326901 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-tls-tls-dns-ips-default-certs-0\" (UniqueName: \"kubernetes.io/projected/e7f7df71-1122-4bcf-bc50-84b25abb680e-openstack-edpm-tls-tls-dns-ips-default-certs-0\") pod \"install-certs-ovrd-openstack-edpm-tls-ovrd-openstack-edpm-jkd5h\" (UID: \"e7f7df71-1122-4bcf-bc50-84b25abb680e\") " pod="openstack-kuttl-tests/install-certs-ovrd-openstack-edpm-tls-ovrd-openstack-edpm-jkd5h" Jan 20 18:08:31 crc kubenswrapper[4558]: I0120 18:08:31.327372 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-tls\" (UniqueName: \"kubernetes.io/secret/e7f7df71-1122-4bcf-bc50-84b25abb680e-ssh-key-openstack-edpm-tls\") pod \"install-certs-ovrd-openstack-edpm-tls-ovrd-openstack-edpm-jkd5h\" (UID: \"e7f7df71-1122-4bcf-bc50-84b25abb680e\") " pod="openstack-kuttl-tests/install-certs-ovrd-openstack-edpm-tls-ovrd-openstack-edpm-jkd5h" Jan 20 18:08:31 crc kubenswrapper[4558]: I0120 18:08:31.328668 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e7f7df71-1122-4bcf-bc50-84b25abb680e-inventory\") pod \"install-certs-ovrd-openstack-edpm-tls-ovrd-openstack-edpm-jkd5h\" (UID: \"e7f7df71-1122-4bcf-bc50-84b25abb680e\") " pod="openstack-kuttl-tests/install-certs-ovrd-openstack-edpm-tls-ovrd-openstack-edpm-jkd5h" Jan 20 18:08:31 crc kubenswrapper[4558]: I0120 18:08:31.329562 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"custom-tls-dns-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7f7df71-1122-4bcf-bc50-84b25abb680e-custom-tls-dns-combined-ca-bundle\") pod \"install-certs-ovrd-openstack-edpm-tls-ovrd-openstack-edpm-jkd5h\" (UID: \"e7f7df71-1122-4bcf-bc50-84b25abb680e\") " pod="openstack-kuttl-tests/install-certs-ovrd-openstack-edpm-tls-ovrd-openstack-edpm-jkd5h" Jan 20 18:08:31 crc kubenswrapper[4558]: I0120 18:08:31.336285 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p6gjt\" (UniqueName: \"kubernetes.io/projected/e7f7df71-1122-4bcf-bc50-84b25abb680e-kube-api-access-p6gjt\") pod \"install-certs-ovrd-openstack-edpm-tls-ovrd-openstack-edpm-jkd5h\" (UID: \"e7f7df71-1122-4bcf-bc50-84b25abb680e\") " pod="openstack-kuttl-tests/install-certs-ovrd-openstack-edpm-tls-ovrd-openstack-edpm-jkd5h" Jan 20 18:08:31 crc kubenswrapper[4558]: I0120 18:08:31.524431 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/install-certs-ovrd-openstack-edpm-tls-ovrd-openstack-edpm-jkd5h" Jan 20 18:08:31 crc kubenswrapper[4558]: I0120 18:08:31.911092 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/install-certs-ovrd-openstack-edpm-tls-ovrd-openstack-edpm-jkd5h"] Jan 20 18:08:32 crc kubenswrapper[4558]: I0120 18:08:32.294697 4558 scope.go:117] "RemoveContainer" containerID="47e77ca48159e1b5a6cd274fb0aa21214e2c821bd50bbe97a5ed961159dd768e" Jan 20 18:08:32 crc kubenswrapper[4558]: I0120 18:08:32.324548 4558 scope.go:117] "RemoveContainer" containerID="7e72c105d94fc259543c84539cc70f2de6cdb6e3c353a725bb352f82454a1602" Jan 20 18:08:32 crc kubenswrapper[4558]: I0120 18:08:32.358259 4558 scope.go:117] "RemoveContainer" containerID="3322f0fcd065a6f620f07166f64f841b34aefae668a3e32059ae25ae20d153d3" Jan 20 18:08:32 crc kubenswrapper[4558]: I0120 18:08:32.380842 4558 scope.go:117] "RemoveContainer" containerID="1a997d470884a39f7eb44c72758d6bc648bf9af5d98e3af36150fd95c134b997" Jan 20 18:08:32 crc kubenswrapper[4558]: I0120 18:08:32.404145 4558 scope.go:117] "RemoveContainer" containerID="71f1c783db8c7f691afadbbc4458f473a50d37f859dca6011229e49700f8765f" Jan 20 18:08:32 crc kubenswrapper[4558]: I0120 18:08:32.432280 4558 scope.go:117] "RemoveContainer" containerID="0a753af56b757f6bc5348351bfd3b389aa51323a3a5c3556062e047b14e49abf" Jan 20 18:08:32 crc kubenswrapper[4558]: I0120 18:08:32.465553 4558 scope.go:117] "RemoveContainer" containerID="129f8671268b133fccb5a9037db7cdd2b2b252cea20ad1db03c7b2261495a37b" Jan 20 18:08:32 crc kubenswrapper[4558]: I0120 18:08:32.488546 4558 scope.go:117] "RemoveContainer" containerID="170fc088791aa1343a57b7094aab97b4039b8fc0f4af19be12926ec07034f0c3" Jan 20 18:08:32 crc kubenswrapper[4558]: I0120 18:08:32.520104 4558 scope.go:117] "RemoveContainer" containerID="2f4911e27581efd12811ebf25498eff366913a3d353db9bc01e191405fa119d3" Jan 20 18:08:32 crc kubenswrapper[4558]: I0120 18:08:32.544208 4558 scope.go:117] "RemoveContainer" containerID="7c8c295aa796a74bd860c38675342562e1092a52b2c7074578717f0739271d19" Jan 20 18:08:32 crc kubenswrapper[4558]: I0120 18:08:32.569418 4558 scope.go:117] "RemoveContainer" containerID="a7d59ab3dd89c04918067b3b577e740c68bc67311390ae44c4fedb735b11dddb" Jan 20 18:08:32 crc kubenswrapper[4558]: I0120 18:08:32.595703 4558 scope.go:117] "RemoveContainer" containerID="566a6eca35e66f52dd932aa03a7f5bd111bb8cc6fd61cc7a80d8c92da9dcf755" Jan 20 18:08:32 crc kubenswrapper[4558]: I0120 18:08:32.616578 4558 scope.go:117] "RemoveContainer" containerID="f55b28c638183d096a150ac70c469c05ed01edf6198fb78f20a01896ce7f4508" Jan 20 18:08:32 crc kubenswrapper[4558]: I0120 18:08:32.792135 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/install-certs-ovrd-openstack-edpm-tls-ovrd-openstack-edpm-jkd5h" event={"ID":"e7f7df71-1122-4bcf-bc50-84b25abb680e","Type":"ContainerStarted","Data":"0a652eeb8cc936df965f49bb81a148ca20240793d59d9727a5b1b22b3298a64f"} Jan 20 18:08:32 crc kubenswrapper[4558]: I0120 18:08:32.792194 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/install-certs-ovrd-openstack-edpm-tls-ovrd-openstack-edpm-jkd5h" event={"ID":"e7f7df71-1122-4bcf-bc50-84b25abb680e","Type":"ContainerStarted","Data":"9698ae42ff926ceacca041c1111b85c3ea7e717bf440a9224b9d4925f685ccf9"} Jan 20 18:08:32 crc kubenswrapper[4558]: I0120 18:08:32.813288 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/install-certs-ovrd-openstack-edpm-tls-ovrd-openstack-edpm-jkd5h" podStartSLOduration=1.265923875 podStartE2EDuration="1.813268471s" podCreationTimestamp="2026-01-20 18:08:31 +0000 UTC" firstStartedPulling="2026-01-20 18:08:31.918312455 +0000 UTC m=+5205.678650423" lastFinishedPulling="2026-01-20 18:08:32.465657062 +0000 UTC m=+5206.225995019" observedRunningTime="2026-01-20 18:08:32.806447315 +0000 UTC m=+5206.566785282" watchObservedRunningTime="2026-01-20 18:08:32.813268471 +0000 UTC m=+5206.573606438" Jan 20 18:08:35 crc kubenswrapper[4558]: I0120 18:08:35.832417 4558 generic.go:334] "Generic (PLEG): container finished" podID="e7f7df71-1122-4bcf-bc50-84b25abb680e" containerID="0a652eeb8cc936df965f49bb81a148ca20240793d59d9727a5b1b22b3298a64f" exitCode=0 Jan 20 18:08:35 crc kubenswrapper[4558]: I0120 18:08:35.832522 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/install-certs-ovrd-openstack-edpm-tls-ovrd-openstack-edpm-jkd5h" event={"ID":"e7f7df71-1122-4bcf-bc50-84b25abb680e","Type":"ContainerDied","Data":"0a652eeb8cc936df965f49bb81a148ca20240793d59d9727a5b1b22b3298a64f"} Jan 20 18:08:37 crc kubenswrapper[4558]: I0120 18:08:37.092701 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/install-certs-ovrd-openstack-edpm-tls-ovrd-openstack-edpm-jkd5h" Jan 20 18:08:37 crc kubenswrapper[4558]: I0120 18:08:37.200770 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p6gjt\" (UniqueName: \"kubernetes.io/projected/e7f7df71-1122-4bcf-bc50-84b25abb680e-kube-api-access-p6gjt\") pod \"e7f7df71-1122-4bcf-bc50-84b25abb680e\" (UID: \"e7f7df71-1122-4bcf-bc50-84b25abb680e\") " Jan 20 18:08:37 crc kubenswrapper[4558]: I0120 18:08:37.200827 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tls-dns-ips-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7f7df71-1122-4bcf-bc50-84b25abb680e-tls-dns-ips-combined-ca-bundle\") pod \"e7f7df71-1122-4bcf-bc50-84b25abb680e\" (UID: \"e7f7df71-1122-4bcf-bc50-84b25abb680e\") " Jan 20 18:08:37 crc kubenswrapper[4558]: I0120 18:08:37.200912 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"install-certs-ovrd-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7f7df71-1122-4bcf-bc50-84b25abb680e-install-certs-ovrd-combined-ca-bundle\") pod \"e7f7df71-1122-4bcf-bc50-84b25abb680e\" (UID: \"e7f7df71-1122-4bcf-bc50-84b25abb680e\") " Jan 20 18:08:37 crc kubenswrapper[4558]: I0120 18:08:37.201359 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-tls-tls-dns-ips-default-certs-0\" (UniqueName: \"kubernetes.io/projected/e7f7df71-1122-4bcf-bc50-84b25abb680e-openstack-edpm-tls-tls-dns-ips-default-certs-0\") pod \"e7f7df71-1122-4bcf-bc50-84b25abb680e\" (UID: \"e7f7df71-1122-4bcf-bc50-84b25abb680e\") " Jan 20 18:08:37 crc kubenswrapper[4558]: I0120 18:08:37.202397 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e7f7df71-1122-4bcf-bc50-84b25abb680e-inventory\") pod \"e7f7df71-1122-4bcf-bc50-84b25abb680e\" (UID: \"e7f7df71-1122-4bcf-bc50-84b25abb680e\") " Jan 20 18:08:37 crc kubenswrapper[4558]: I0120 18:08:37.202626 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"custom-tls-dns-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7f7df71-1122-4bcf-bc50-84b25abb680e-custom-tls-dns-combined-ca-bundle\") pod \"e7f7df71-1122-4bcf-bc50-84b25abb680e\" (UID: \"e7f7df71-1122-4bcf-bc50-84b25abb680e\") " Jan 20 18:08:37 crc kubenswrapper[4558]: I0120 18:08:37.203106 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-tls\" (UniqueName: \"kubernetes.io/secret/e7f7df71-1122-4bcf-bc50-84b25abb680e-ssh-key-openstack-edpm-tls\") pod \"e7f7df71-1122-4bcf-bc50-84b25abb680e\" (UID: \"e7f7df71-1122-4bcf-bc50-84b25abb680e\") " Jan 20 18:08:37 crc kubenswrapper[4558]: I0120 18:08:37.203224 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-tls-custom-tls-dns-default-certs-0\" (UniqueName: \"kubernetes.io/projected/e7f7df71-1122-4bcf-bc50-84b25abb680e-openstack-edpm-tls-custom-tls-dns-default-certs-0\") pod \"e7f7df71-1122-4bcf-bc50-84b25abb680e\" (UID: \"e7f7df71-1122-4bcf-bc50-84b25abb680e\") " Jan 20 18:08:37 crc kubenswrapper[4558]: I0120 18:08:37.208675 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7f7df71-1122-4bcf-bc50-84b25abb680e-install-certs-ovrd-combined-ca-bundle" (OuterVolumeSpecName: "install-certs-ovrd-combined-ca-bundle") pod "e7f7df71-1122-4bcf-bc50-84b25abb680e" (UID: "e7f7df71-1122-4bcf-bc50-84b25abb680e"). InnerVolumeSpecName "install-certs-ovrd-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:08:37 crc kubenswrapper[4558]: I0120 18:08:37.208848 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7f7df71-1122-4bcf-bc50-84b25abb680e-tls-dns-ips-combined-ca-bundle" (OuterVolumeSpecName: "tls-dns-ips-combined-ca-bundle") pod "e7f7df71-1122-4bcf-bc50-84b25abb680e" (UID: "e7f7df71-1122-4bcf-bc50-84b25abb680e"). InnerVolumeSpecName "tls-dns-ips-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:08:37 crc kubenswrapper[4558]: I0120 18:08:37.208860 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7f7df71-1122-4bcf-bc50-84b25abb680e-openstack-edpm-tls-custom-tls-dns-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-tls-custom-tls-dns-default-certs-0") pod "e7f7df71-1122-4bcf-bc50-84b25abb680e" (UID: "e7f7df71-1122-4bcf-bc50-84b25abb680e"). InnerVolumeSpecName "openstack-edpm-tls-custom-tls-dns-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:08:37 crc kubenswrapper[4558]: I0120 18:08:37.208937 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7f7df71-1122-4bcf-bc50-84b25abb680e-openstack-edpm-tls-tls-dns-ips-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-tls-tls-dns-ips-default-certs-0") pod "e7f7df71-1122-4bcf-bc50-84b25abb680e" (UID: "e7f7df71-1122-4bcf-bc50-84b25abb680e"). InnerVolumeSpecName "openstack-edpm-tls-tls-dns-ips-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:08:37 crc kubenswrapper[4558]: I0120 18:08:37.209495 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7f7df71-1122-4bcf-bc50-84b25abb680e-custom-tls-dns-combined-ca-bundle" (OuterVolumeSpecName: "custom-tls-dns-combined-ca-bundle") pod "e7f7df71-1122-4bcf-bc50-84b25abb680e" (UID: "e7f7df71-1122-4bcf-bc50-84b25abb680e"). InnerVolumeSpecName "custom-tls-dns-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:08:37 crc kubenswrapper[4558]: I0120 18:08:37.210365 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7f7df71-1122-4bcf-bc50-84b25abb680e-kube-api-access-p6gjt" (OuterVolumeSpecName: "kube-api-access-p6gjt") pod "e7f7df71-1122-4bcf-bc50-84b25abb680e" (UID: "e7f7df71-1122-4bcf-bc50-84b25abb680e"). InnerVolumeSpecName "kube-api-access-p6gjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:08:37 crc kubenswrapper[4558]: I0120 18:08:37.223390 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7f7df71-1122-4bcf-bc50-84b25abb680e-inventory" (OuterVolumeSpecName: "inventory") pod "e7f7df71-1122-4bcf-bc50-84b25abb680e" (UID: "e7f7df71-1122-4bcf-bc50-84b25abb680e"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:08:37 crc kubenswrapper[4558]: I0120 18:08:37.226642 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7f7df71-1122-4bcf-bc50-84b25abb680e-ssh-key-openstack-edpm-tls" (OuterVolumeSpecName: "ssh-key-openstack-edpm-tls") pod "e7f7df71-1122-4bcf-bc50-84b25abb680e" (UID: "e7f7df71-1122-4bcf-bc50-84b25abb680e"). InnerVolumeSpecName "ssh-key-openstack-edpm-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:08:37 crc kubenswrapper[4558]: I0120 18:08:37.306243 4558 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-tls-tls-dns-ips-default-certs-0\" (UniqueName: \"kubernetes.io/projected/e7f7df71-1122-4bcf-bc50-84b25abb680e-openstack-edpm-tls-tls-dns-ips-default-certs-0\") on node \"crc\" DevicePath \"\"" Jan 20 18:08:37 crc kubenswrapper[4558]: I0120 18:08:37.306287 4558 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e7f7df71-1122-4bcf-bc50-84b25abb680e-inventory\") on node \"crc\" DevicePath \"\"" Jan 20 18:08:37 crc kubenswrapper[4558]: I0120 18:08:37.306304 4558 reconciler_common.go:293] "Volume detached for volume \"custom-tls-dns-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7f7df71-1122-4bcf-bc50-84b25abb680e-custom-tls-dns-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 18:08:37 crc kubenswrapper[4558]: I0120 18:08:37.306513 4558 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-tls\" (UniqueName: \"kubernetes.io/secret/e7f7df71-1122-4bcf-bc50-84b25abb680e-ssh-key-openstack-edpm-tls\") on node \"crc\" DevicePath \"\"" Jan 20 18:08:37 crc kubenswrapper[4558]: I0120 18:08:37.306531 4558 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-tls-custom-tls-dns-default-certs-0\" (UniqueName: \"kubernetes.io/projected/e7f7df71-1122-4bcf-bc50-84b25abb680e-openstack-edpm-tls-custom-tls-dns-default-certs-0\") on node \"crc\" DevicePath \"\"" Jan 20 18:08:37 crc kubenswrapper[4558]: I0120 18:08:37.306546 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p6gjt\" (UniqueName: \"kubernetes.io/projected/e7f7df71-1122-4bcf-bc50-84b25abb680e-kube-api-access-p6gjt\") on node \"crc\" DevicePath \"\"" Jan 20 18:08:37 crc kubenswrapper[4558]: I0120 18:08:37.306557 4558 reconciler_common.go:293] "Volume detached for volume \"tls-dns-ips-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7f7df71-1122-4bcf-bc50-84b25abb680e-tls-dns-ips-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 18:08:37 crc kubenswrapper[4558]: I0120 18:08:37.306568 4558 reconciler_common.go:293] "Volume detached for volume \"install-certs-ovrd-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e7f7df71-1122-4bcf-bc50-84b25abb680e-install-certs-ovrd-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 18:08:37 crc kubenswrapper[4558]: I0120 18:08:37.853438 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/install-certs-ovrd-openstack-edpm-tls-ovrd-openstack-edpm-jkd5h" event={"ID":"e7f7df71-1122-4bcf-bc50-84b25abb680e","Type":"ContainerDied","Data":"9698ae42ff926ceacca041c1111b85c3ea7e717bf440a9224b9d4925f685ccf9"} Jan 20 18:08:37 crc kubenswrapper[4558]: I0120 18:08:37.853837 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9698ae42ff926ceacca041c1111b85c3ea7e717bf440a9224b9d4925f685ccf9" Jan 20 18:08:37 crc kubenswrapper[4558]: I0120 18:08:37.853480 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/install-certs-ovrd-openstack-edpm-tls-ovrd-openstack-edpm-jkd5h" Jan 20 18:08:37 crc kubenswrapper[4558]: I0120 18:08:37.904661 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/tls-dns-ips-openstack-edpm-tls-ovrd-openstack-edpm-tls-4j2nb"] Jan 20 18:08:37 crc kubenswrapper[4558]: E0120 18:08:37.905026 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e7f7df71-1122-4bcf-bc50-84b25abb680e" containerName="install-certs-ovrd-openstack-edpm-tls-ovrd-openstack-edpm-tls" Jan 20 18:08:37 crc kubenswrapper[4558]: I0120 18:08:37.905313 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="e7f7df71-1122-4bcf-bc50-84b25abb680e" containerName="install-certs-ovrd-openstack-edpm-tls-ovrd-openstack-edpm-tls" Jan 20 18:08:37 crc kubenswrapper[4558]: I0120 18:08:37.905825 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="e7f7df71-1122-4bcf-bc50-84b25abb680e" containerName="install-certs-ovrd-openstack-edpm-tls-ovrd-openstack-edpm-tls" Jan 20 18:08:37 crc kubenswrapper[4558]: I0120 18:08:37.906356 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/tls-dns-ips-openstack-edpm-tls-ovrd-openstack-edpm-tls-4j2nb" Jan 20 18:08:37 crc kubenswrapper[4558]: I0120 18:08:37.910706 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"openstack-edpm-tls-dockercfg-bzbzv" Jan 20 18:08:37 crc kubenswrapper[4558]: I0120 18:08:37.910743 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplanenodeset-openstack-edpm-tls" Jan 20 18:08:37 crc kubenswrapper[4558]: I0120 18:08:37.910862 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-aee-default-env" Jan 20 18:08:37 crc kubenswrapper[4558]: I0120 18:08:37.910961 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplane-ansible-ssh-private-key-secret" Jan 20 18:08:37 crc kubenswrapper[4558]: I0120 18:08:37.911045 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"combined-ca-bundle" Jan 20 18:08:37 crc kubenswrapper[4558]: I0120 18:08:37.913442 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-57pgf\" (UniqueName: \"kubernetes.io/projected/11d2b4a2-d597-4764-b6f3-7ef906ca8403-kube-api-access-57pgf\") pod \"tls-dns-ips-openstack-edpm-tls-ovrd-openstack-edpm-tls-4j2nb\" (UID: \"11d2b4a2-d597-4764-b6f3-7ef906ca8403\") " pod="openstack-kuttl-tests/tls-dns-ips-openstack-edpm-tls-ovrd-openstack-edpm-tls-4j2nb" Jan 20 18:08:37 crc kubenswrapper[4558]: I0120 18:08:37.913495 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-tls\" (UniqueName: \"kubernetes.io/secret/11d2b4a2-d597-4764-b6f3-7ef906ca8403-ssh-key-openstack-edpm-tls\") pod \"tls-dns-ips-openstack-edpm-tls-ovrd-openstack-edpm-tls-4j2nb\" (UID: \"11d2b4a2-d597-4764-b6f3-7ef906ca8403\") " pod="openstack-kuttl-tests/tls-dns-ips-openstack-edpm-tls-ovrd-openstack-edpm-tls-4j2nb" Jan 20 18:08:37 crc kubenswrapper[4558]: I0120 18:08:37.913558 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-dns-ips-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/11d2b4a2-d597-4764-b6f3-7ef906ca8403-tls-dns-ips-combined-ca-bundle\") pod \"tls-dns-ips-openstack-edpm-tls-ovrd-openstack-edpm-tls-4j2nb\" (UID: \"11d2b4a2-d597-4764-b6f3-7ef906ca8403\") " pod="openstack-kuttl-tests/tls-dns-ips-openstack-edpm-tls-ovrd-openstack-edpm-tls-4j2nb" Jan 20 18:08:37 crc kubenswrapper[4558]: I0120 18:08:37.913628 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/11d2b4a2-d597-4764-b6f3-7ef906ca8403-inventory\") pod \"tls-dns-ips-openstack-edpm-tls-ovrd-openstack-edpm-tls-4j2nb\" (UID: \"11d2b4a2-d597-4764-b6f3-7ef906ca8403\") " pod="openstack-kuttl-tests/tls-dns-ips-openstack-edpm-tls-ovrd-openstack-edpm-tls-4j2nb" Jan 20 18:08:37 crc kubenswrapper[4558]: I0120 18:08:37.914903 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/tls-dns-ips-openstack-edpm-tls-ovrd-openstack-edpm-tls-4j2nb"] Jan 20 18:08:38 crc kubenswrapper[4558]: I0120 18:08:38.014729 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-dns-ips-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/11d2b4a2-d597-4764-b6f3-7ef906ca8403-tls-dns-ips-combined-ca-bundle\") pod \"tls-dns-ips-openstack-edpm-tls-ovrd-openstack-edpm-tls-4j2nb\" (UID: \"11d2b4a2-d597-4764-b6f3-7ef906ca8403\") " pod="openstack-kuttl-tests/tls-dns-ips-openstack-edpm-tls-ovrd-openstack-edpm-tls-4j2nb" Jan 20 18:08:38 crc kubenswrapper[4558]: I0120 18:08:38.014779 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/11d2b4a2-d597-4764-b6f3-7ef906ca8403-inventory\") pod \"tls-dns-ips-openstack-edpm-tls-ovrd-openstack-edpm-tls-4j2nb\" (UID: \"11d2b4a2-d597-4764-b6f3-7ef906ca8403\") " pod="openstack-kuttl-tests/tls-dns-ips-openstack-edpm-tls-ovrd-openstack-edpm-tls-4j2nb" Jan 20 18:08:38 crc kubenswrapper[4558]: I0120 18:08:38.014841 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-57pgf\" (UniqueName: \"kubernetes.io/projected/11d2b4a2-d597-4764-b6f3-7ef906ca8403-kube-api-access-57pgf\") pod \"tls-dns-ips-openstack-edpm-tls-ovrd-openstack-edpm-tls-4j2nb\" (UID: \"11d2b4a2-d597-4764-b6f3-7ef906ca8403\") " pod="openstack-kuttl-tests/tls-dns-ips-openstack-edpm-tls-ovrd-openstack-edpm-tls-4j2nb" Jan 20 18:08:38 crc kubenswrapper[4558]: I0120 18:08:38.014872 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-tls\" (UniqueName: \"kubernetes.io/secret/11d2b4a2-d597-4764-b6f3-7ef906ca8403-ssh-key-openstack-edpm-tls\") pod \"tls-dns-ips-openstack-edpm-tls-ovrd-openstack-edpm-tls-4j2nb\" (UID: \"11d2b4a2-d597-4764-b6f3-7ef906ca8403\") " pod="openstack-kuttl-tests/tls-dns-ips-openstack-edpm-tls-ovrd-openstack-edpm-tls-4j2nb" Jan 20 18:08:38 crc kubenswrapper[4558]: I0120 18:08:38.018385 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-tls\" (UniqueName: \"kubernetes.io/secret/11d2b4a2-d597-4764-b6f3-7ef906ca8403-ssh-key-openstack-edpm-tls\") pod \"tls-dns-ips-openstack-edpm-tls-ovrd-openstack-edpm-tls-4j2nb\" (UID: \"11d2b4a2-d597-4764-b6f3-7ef906ca8403\") " pod="openstack-kuttl-tests/tls-dns-ips-openstack-edpm-tls-ovrd-openstack-edpm-tls-4j2nb" Jan 20 18:08:38 crc kubenswrapper[4558]: I0120 18:08:38.018509 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-dns-ips-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/11d2b4a2-d597-4764-b6f3-7ef906ca8403-tls-dns-ips-combined-ca-bundle\") pod \"tls-dns-ips-openstack-edpm-tls-ovrd-openstack-edpm-tls-4j2nb\" (UID: \"11d2b4a2-d597-4764-b6f3-7ef906ca8403\") " pod="openstack-kuttl-tests/tls-dns-ips-openstack-edpm-tls-ovrd-openstack-edpm-tls-4j2nb" Jan 20 18:08:38 crc kubenswrapper[4558]: I0120 18:08:38.023611 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/11d2b4a2-d597-4764-b6f3-7ef906ca8403-inventory\") pod \"tls-dns-ips-openstack-edpm-tls-ovrd-openstack-edpm-tls-4j2nb\" (UID: \"11d2b4a2-d597-4764-b6f3-7ef906ca8403\") " pod="openstack-kuttl-tests/tls-dns-ips-openstack-edpm-tls-ovrd-openstack-edpm-tls-4j2nb" Jan 20 18:08:38 crc kubenswrapper[4558]: I0120 18:08:38.033774 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-57pgf\" (UniqueName: \"kubernetes.io/projected/11d2b4a2-d597-4764-b6f3-7ef906ca8403-kube-api-access-57pgf\") pod \"tls-dns-ips-openstack-edpm-tls-ovrd-openstack-edpm-tls-4j2nb\" (UID: \"11d2b4a2-d597-4764-b6f3-7ef906ca8403\") " pod="openstack-kuttl-tests/tls-dns-ips-openstack-edpm-tls-ovrd-openstack-edpm-tls-4j2nb" Jan 20 18:08:38 crc kubenswrapper[4558]: I0120 18:08:38.220444 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/tls-dns-ips-openstack-edpm-tls-ovrd-openstack-edpm-tls-4j2nb" Jan 20 18:08:38 crc kubenswrapper[4558]: I0120 18:08:38.566924 4558 scope.go:117] "RemoveContainer" containerID="6b02d31ec11fe5435af988e35303a6f66b5afa5ef952ce5f7cc4b3cd5f4d9497" Jan 20 18:08:38 crc kubenswrapper[4558]: I0120 18:08:38.613417 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/tls-dns-ips-openstack-edpm-tls-ovrd-openstack-edpm-tls-4j2nb"] Jan 20 18:08:38 crc kubenswrapper[4558]: I0120 18:08:38.865728 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" event={"ID":"68337d27-3fa6-4a29-88b0-82e60c3739eb","Type":"ContainerStarted","Data":"fdbebed64bed48194784e96161b865cfbbfe6287c24bb11eb0fd195beaf71fc7"} Jan 20 18:08:38 crc kubenswrapper[4558]: I0120 18:08:38.868076 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/tls-dns-ips-openstack-edpm-tls-ovrd-openstack-edpm-tls-4j2nb" event={"ID":"11d2b4a2-d597-4764-b6f3-7ef906ca8403","Type":"ContainerStarted","Data":"38f4550201c149dc47a407ab2b3a8b4e7c85209dd822f76b8d90dc1963ccfcd7"} Jan 20 18:08:39 crc kubenswrapper[4558]: I0120 18:08:39.887292 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/tls-dns-ips-openstack-edpm-tls-ovrd-openstack-edpm-tls-4j2nb" event={"ID":"11d2b4a2-d597-4764-b6f3-7ef906ca8403","Type":"ContainerStarted","Data":"fe4a38c14f60d90568e400fb5d73ba13f25589faa6542fe3038d1945adb7a0d7"} Jan 20 18:08:39 crc kubenswrapper[4558]: I0120 18:08:39.906593 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/tls-dns-ips-openstack-edpm-tls-ovrd-openstack-edpm-tls-4j2nb" podStartSLOduration=2.44154218 podStartE2EDuration="2.906579262s" podCreationTimestamp="2026-01-20 18:08:37 +0000 UTC" firstStartedPulling="2026-01-20 18:08:38.626536657 +0000 UTC m=+5212.386874625" lastFinishedPulling="2026-01-20 18:08:39.09157373 +0000 UTC m=+5212.851911707" observedRunningTime="2026-01-20 18:08:39.906076857 +0000 UTC m=+5213.666414824" watchObservedRunningTime="2026-01-20 18:08:39.906579262 +0000 UTC m=+5213.666917218" Jan 20 18:08:41 crc kubenswrapper[4558]: I0120 18:08:41.907666 4558 generic.go:334] "Generic (PLEG): container finished" podID="11d2b4a2-d597-4764-b6f3-7ef906ca8403" containerID="fe4a38c14f60d90568e400fb5d73ba13f25589faa6542fe3038d1945adb7a0d7" exitCode=0 Jan 20 18:08:41 crc kubenswrapper[4558]: I0120 18:08:41.907750 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/tls-dns-ips-openstack-edpm-tls-ovrd-openstack-edpm-tls-4j2nb" event={"ID":"11d2b4a2-d597-4764-b6f3-7ef906ca8403","Type":"ContainerDied","Data":"fe4a38c14f60d90568e400fb5d73ba13f25589faa6542fe3038d1945adb7a0d7"} Jan 20 18:08:43 crc kubenswrapper[4558]: I0120 18:08:43.152102 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/tls-dns-ips-openstack-edpm-tls-ovrd-openstack-edpm-tls-4j2nb" Jan 20 18:08:43 crc kubenswrapper[4558]: I0120 18:08:43.288638 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-57pgf\" (UniqueName: \"kubernetes.io/projected/11d2b4a2-d597-4764-b6f3-7ef906ca8403-kube-api-access-57pgf\") pod \"11d2b4a2-d597-4764-b6f3-7ef906ca8403\" (UID: \"11d2b4a2-d597-4764-b6f3-7ef906ca8403\") " Jan 20 18:08:43 crc kubenswrapper[4558]: I0120 18:08:43.288855 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-tls\" (UniqueName: \"kubernetes.io/secret/11d2b4a2-d597-4764-b6f3-7ef906ca8403-ssh-key-openstack-edpm-tls\") pod \"11d2b4a2-d597-4764-b6f3-7ef906ca8403\" (UID: \"11d2b4a2-d597-4764-b6f3-7ef906ca8403\") " Jan 20 18:08:43 crc kubenswrapper[4558]: I0120 18:08:43.288933 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tls-dns-ips-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/11d2b4a2-d597-4764-b6f3-7ef906ca8403-tls-dns-ips-combined-ca-bundle\") pod \"11d2b4a2-d597-4764-b6f3-7ef906ca8403\" (UID: \"11d2b4a2-d597-4764-b6f3-7ef906ca8403\") " Jan 20 18:08:43 crc kubenswrapper[4558]: I0120 18:08:43.289215 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/11d2b4a2-d597-4764-b6f3-7ef906ca8403-inventory\") pod \"11d2b4a2-d597-4764-b6f3-7ef906ca8403\" (UID: \"11d2b4a2-d597-4764-b6f3-7ef906ca8403\") " Jan 20 18:08:43 crc kubenswrapper[4558]: I0120 18:08:43.295995 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/11d2b4a2-d597-4764-b6f3-7ef906ca8403-kube-api-access-57pgf" (OuterVolumeSpecName: "kube-api-access-57pgf") pod "11d2b4a2-d597-4764-b6f3-7ef906ca8403" (UID: "11d2b4a2-d597-4764-b6f3-7ef906ca8403"). InnerVolumeSpecName "kube-api-access-57pgf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:08:43 crc kubenswrapper[4558]: I0120 18:08:43.296136 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/11d2b4a2-d597-4764-b6f3-7ef906ca8403-tls-dns-ips-combined-ca-bundle" (OuterVolumeSpecName: "tls-dns-ips-combined-ca-bundle") pod "11d2b4a2-d597-4764-b6f3-7ef906ca8403" (UID: "11d2b4a2-d597-4764-b6f3-7ef906ca8403"). InnerVolumeSpecName "tls-dns-ips-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:08:43 crc kubenswrapper[4558]: E0120 18:08:43.307219 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/11d2b4a2-d597-4764-b6f3-7ef906ca8403-inventory podName:11d2b4a2-d597-4764-b6f3-7ef906ca8403 nodeName:}" failed. No retries permitted until 2026-01-20 18:08:43.807152454 +0000 UTC m=+5217.567490421 (durationBeforeRetry 500ms). Error: error cleaning subPath mounts for volume "inventory" (UniqueName: "kubernetes.io/secret/11d2b4a2-d597-4764-b6f3-7ef906ca8403-inventory") pod "11d2b4a2-d597-4764-b6f3-7ef906ca8403" (UID: "11d2b4a2-d597-4764-b6f3-7ef906ca8403") : error deleting /var/lib/kubelet/pods/11d2b4a2-d597-4764-b6f3-7ef906ca8403/volume-subpaths: remove /var/lib/kubelet/pods/11d2b4a2-d597-4764-b6f3-7ef906ca8403/volume-subpaths: no such file or directory Jan 20 18:08:43 crc kubenswrapper[4558]: I0120 18:08:43.309850 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/11d2b4a2-d597-4764-b6f3-7ef906ca8403-ssh-key-openstack-edpm-tls" (OuterVolumeSpecName: "ssh-key-openstack-edpm-tls") pod "11d2b4a2-d597-4764-b6f3-7ef906ca8403" (UID: "11d2b4a2-d597-4764-b6f3-7ef906ca8403"). InnerVolumeSpecName "ssh-key-openstack-edpm-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:08:43 crc kubenswrapper[4558]: I0120 18:08:43.392020 4558 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-tls\" (UniqueName: \"kubernetes.io/secret/11d2b4a2-d597-4764-b6f3-7ef906ca8403-ssh-key-openstack-edpm-tls\") on node \"crc\" DevicePath \"\"" Jan 20 18:08:43 crc kubenswrapper[4558]: I0120 18:08:43.392052 4558 reconciler_common.go:293] "Volume detached for volume \"tls-dns-ips-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/11d2b4a2-d597-4764-b6f3-7ef906ca8403-tls-dns-ips-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 18:08:43 crc kubenswrapper[4558]: I0120 18:08:43.392066 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-57pgf\" (UniqueName: \"kubernetes.io/projected/11d2b4a2-d597-4764-b6f3-7ef906ca8403-kube-api-access-57pgf\") on node \"crc\" DevicePath \"\"" Jan 20 18:08:43 crc kubenswrapper[4558]: I0120 18:08:43.901025 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/11d2b4a2-d597-4764-b6f3-7ef906ca8403-inventory\") pod \"11d2b4a2-d597-4764-b6f3-7ef906ca8403\" (UID: \"11d2b4a2-d597-4764-b6f3-7ef906ca8403\") " Jan 20 18:08:43 crc kubenswrapper[4558]: I0120 18:08:43.905246 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/11d2b4a2-d597-4764-b6f3-7ef906ca8403-inventory" (OuterVolumeSpecName: "inventory") pod "11d2b4a2-d597-4764-b6f3-7ef906ca8403" (UID: "11d2b4a2-d597-4764-b6f3-7ef906ca8403"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:08:43 crc kubenswrapper[4558]: I0120 18:08:43.930345 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/tls-dns-ips-openstack-edpm-tls-ovrd-openstack-edpm-tls-4j2nb" event={"ID":"11d2b4a2-d597-4764-b6f3-7ef906ca8403","Type":"ContainerDied","Data":"38f4550201c149dc47a407ab2b3a8b4e7c85209dd822f76b8d90dc1963ccfcd7"} Jan 20 18:08:43 crc kubenswrapper[4558]: I0120 18:08:43.930413 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/tls-dns-ips-openstack-edpm-tls-ovrd-openstack-edpm-tls-4j2nb" Jan 20 18:08:43 crc kubenswrapper[4558]: I0120 18:08:43.930423 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="38f4550201c149dc47a407ab2b3a8b4e7c85209dd822f76b8d90dc1963ccfcd7" Jan 20 18:08:43 crc kubenswrapper[4558]: I0120 18:08:43.978611 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/custom-tls-dns-openstack-edpm-tls-ovrd-openstack-edpm-tls-mxq2k"] Jan 20 18:08:43 crc kubenswrapper[4558]: E0120 18:08:43.979041 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="11d2b4a2-d597-4764-b6f3-7ef906ca8403" containerName="tls-dns-ips-openstack-edpm-tls-ovrd-openstack-edpm-tls" Jan 20 18:08:43 crc kubenswrapper[4558]: I0120 18:08:43.979063 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="11d2b4a2-d597-4764-b6f3-7ef906ca8403" containerName="tls-dns-ips-openstack-edpm-tls-ovrd-openstack-edpm-tls" Jan 20 18:08:43 crc kubenswrapper[4558]: I0120 18:08:43.979252 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="11d2b4a2-d597-4764-b6f3-7ef906ca8403" containerName="tls-dns-ips-openstack-edpm-tls-ovrd-openstack-edpm-tls" Jan 20 18:08:43 crc kubenswrapper[4558]: I0120 18:08:43.979807 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/custom-tls-dns-openstack-edpm-tls-ovrd-openstack-edpm-tls-mxq2k" Jan 20 18:08:43 crc kubenswrapper[4558]: I0120 18:08:43.982291 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"combined-ca-bundle" Jan 20 18:08:43 crc kubenswrapper[4558]: I0120 18:08:43.982511 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-aee-default-env" Jan 20 18:08:43 crc kubenswrapper[4558]: I0120 18:08:43.982664 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"openstack-edpm-tls-dockercfg-bzbzv" Jan 20 18:08:43 crc kubenswrapper[4558]: I0120 18:08:43.982787 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplane-ansible-ssh-private-key-secret" Jan 20 18:08:43 crc kubenswrapper[4558]: I0120 18:08:43.983448 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplanenodeset-openstack-edpm-tls" Jan 20 18:08:43 crc kubenswrapper[4558]: I0120 18:08:43.993407 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/custom-tls-dns-openstack-edpm-tls-ovrd-openstack-edpm-tls-mxq2k"] Jan 20 18:08:44 crc kubenswrapper[4558]: I0120 18:08:44.009458 4558 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/11d2b4a2-d597-4764-b6f3-7ef906ca8403-inventory\") on node \"crc\" DevicePath \"\"" Jan 20 18:08:44 crc kubenswrapper[4558]: I0120 18:08:44.111124 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l6b7t\" (UniqueName: \"kubernetes.io/projected/b98cccb1-0d46-4be3-b3ce-f63a96e958d5-kube-api-access-l6b7t\") pod \"custom-tls-dns-openstack-edpm-tls-ovrd-openstack-edpm-tls-mxq2k\" (UID: \"b98cccb1-0d46-4be3-b3ce-f63a96e958d5\") " pod="openstack-kuttl-tests/custom-tls-dns-openstack-edpm-tls-ovrd-openstack-edpm-tls-mxq2k" Jan 20 18:08:44 crc kubenswrapper[4558]: I0120 18:08:44.111405 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"custom-tls-dns-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b98cccb1-0d46-4be3-b3ce-f63a96e958d5-custom-tls-dns-combined-ca-bundle\") pod \"custom-tls-dns-openstack-edpm-tls-ovrd-openstack-edpm-tls-mxq2k\" (UID: \"b98cccb1-0d46-4be3-b3ce-f63a96e958d5\") " pod="openstack-kuttl-tests/custom-tls-dns-openstack-edpm-tls-ovrd-openstack-edpm-tls-mxq2k" Jan 20 18:08:44 crc kubenswrapper[4558]: I0120 18:08:44.111580 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b98cccb1-0d46-4be3-b3ce-f63a96e958d5-inventory\") pod \"custom-tls-dns-openstack-edpm-tls-ovrd-openstack-edpm-tls-mxq2k\" (UID: \"b98cccb1-0d46-4be3-b3ce-f63a96e958d5\") " pod="openstack-kuttl-tests/custom-tls-dns-openstack-edpm-tls-ovrd-openstack-edpm-tls-mxq2k" Jan 20 18:08:44 crc kubenswrapper[4558]: I0120 18:08:44.111673 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-tls\" (UniqueName: \"kubernetes.io/secret/b98cccb1-0d46-4be3-b3ce-f63a96e958d5-ssh-key-openstack-edpm-tls\") pod \"custom-tls-dns-openstack-edpm-tls-ovrd-openstack-edpm-tls-mxq2k\" (UID: \"b98cccb1-0d46-4be3-b3ce-f63a96e958d5\") " pod="openstack-kuttl-tests/custom-tls-dns-openstack-edpm-tls-ovrd-openstack-edpm-tls-mxq2k" Jan 20 18:08:44 crc kubenswrapper[4558]: I0120 18:08:44.212789 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-tls\" (UniqueName: \"kubernetes.io/secret/b98cccb1-0d46-4be3-b3ce-f63a96e958d5-ssh-key-openstack-edpm-tls\") pod \"custom-tls-dns-openstack-edpm-tls-ovrd-openstack-edpm-tls-mxq2k\" (UID: \"b98cccb1-0d46-4be3-b3ce-f63a96e958d5\") " pod="openstack-kuttl-tests/custom-tls-dns-openstack-edpm-tls-ovrd-openstack-edpm-tls-mxq2k" Jan 20 18:08:44 crc kubenswrapper[4558]: I0120 18:08:44.212893 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l6b7t\" (UniqueName: \"kubernetes.io/projected/b98cccb1-0d46-4be3-b3ce-f63a96e958d5-kube-api-access-l6b7t\") pod \"custom-tls-dns-openstack-edpm-tls-ovrd-openstack-edpm-tls-mxq2k\" (UID: \"b98cccb1-0d46-4be3-b3ce-f63a96e958d5\") " pod="openstack-kuttl-tests/custom-tls-dns-openstack-edpm-tls-ovrd-openstack-edpm-tls-mxq2k" Jan 20 18:08:44 crc kubenswrapper[4558]: I0120 18:08:44.212950 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"custom-tls-dns-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b98cccb1-0d46-4be3-b3ce-f63a96e958d5-custom-tls-dns-combined-ca-bundle\") pod \"custom-tls-dns-openstack-edpm-tls-ovrd-openstack-edpm-tls-mxq2k\" (UID: \"b98cccb1-0d46-4be3-b3ce-f63a96e958d5\") " pod="openstack-kuttl-tests/custom-tls-dns-openstack-edpm-tls-ovrd-openstack-edpm-tls-mxq2k" Jan 20 18:08:44 crc kubenswrapper[4558]: I0120 18:08:44.213007 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b98cccb1-0d46-4be3-b3ce-f63a96e958d5-inventory\") pod \"custom-tls-dns-openstack-edpm-tls-ovrd-openstack-edpm-tls-mxq2k\" (UID: \"b98cccb1-0d46-4be3-b3ce-f63a96e958d5\") " pod="openstack-kuttl-tests/custom-tls-dns-openstack-edpm-tls-ovrd-openstack-edpm-tls-mxq2k" Jan 20 18:08:44 crc kubenswrapper[4558]: I0120 18:08:44.217498 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"custom-tls-dns-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b98cccb1-0d46-4be3-b3ce-f63a96e958d5-custom-tls-dns-combined-ca-bundle\") pod \"custom-tls-dns-openstack-edpm-tls-ovrd-openstack-edpm-tls-mxq2k\" (UID: \"b98cccb1-0d46-4be3-b3ce-f63a96e958d5\") " pod="openstack-kuttl-tests/custom-tls-dns-openstack-edpm-tls-ovrd-openstack-edpm-tls-mxq2k" Jan 20 18:08:44 crc kubenswrapper[4558]: I0120 18:08:44.217563 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b98cccb1-0d46-4be3-b3ce-f63a96e958d5-inventory\") pod \"custom-tls-dns-openstack-edpm-tls-ovrd-openstack-edpm-tls-mxq2k\" (UID: \"b98cccb1-0d46-4be3-b3ce-f63a96e958d5\") " pod="openstack-kuttl-tests/custom-tls-dns-openstack-edpm-tls-ovrd-openstack-edpm-tls-mxq2k" Jan 20 18:08:44 crc kubenswrapper[4558]: I0120 18:08:44.217640 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-tls\" (UniqueName: \"kubernetes.io/secret/b98cccb1-0d46-4be3-b3ce-f63a96e958d5-ssh-key-openstack-edpm-tls\") pod \"custom-tls-dns-openstack-edpm-tls-ovrd-openstack-edpm-tls-mxq2k\" (UID: \"b98cccb1-0d46-4be3-b3ce-f63a96e958d5\") " pod="openstack-kuttl-tests/custom-tls-dns-openstack-edpm-tls-ovrd-openstack-edpm-tls-mxq2k" Jan 20 18:08:44 crc kubenswrapper[4558]: I0120 18:08:44.228283 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l6b7t\" (UniqueName: \"kubernetes.io/projected/b98cccb1-0d46-4be3-b3ce-f63a96e958d5-kube-api-access-l6b7t\") pod \"custom-tls-dns-openstack-edpm-tls-ovrd-openstack-edpm-tls-mxq2k\" (UID: \"b98cccb1-0d46-4be3-b3ce-f63a96e958d5\") " pod="openstack-kuttl-tests/custom-tls-dns-openstack-edpm-tls-ovrd-openstack-edpm-tls-mxq2k" Jan 20 18:08:44 crc kubenswrapper[4558]: I0120 18:08:44.295368 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/custom-tls-dns-openstack-edpm-tls-ovrd-openstack-edpm-tls-mxq2k" Jan 20 18:08:44 crc kubenswrapper[4558]: I0120 18:08:44.742831 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/custom-tls-dns-openstack-edpm-tls-ovrd-openstack-edpm-tls-mxq2k"] Jan 20 18:08:44 crc kubenswrapper[4558]: I0120 18:08:44.942869 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/custom-tls-dns-openstack-edpm-tls-ovrd-openstack-edpm-tls-mxq2k" event={"ID":"b98cccb1-0d46-4be3-b3ce-f63a96e958d5","Type":"ContainerStarted","Data":"6f5e6ed90a1a795b614b57b7d91a1e962f0343918cd8485f95288e4309bf93e9"} Jan 20 18:08:45 crc kubenswrapper[4558]: I0120 18:08:45.952695 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/custom-tls-dns-openstack-edpm-tls-ovrd-openstack-edpm-tls-mxq2k" event={"ID":"b98cccb1-0d46-4be3-b3ce-f63a96e958d5","Type":"ContainerStarted","Data":"855a300217926f1ebcb2f57d692b96e2447d32d17ccd7d30ba4aa52782efcb6b"} Jan 20 18:08:45 crc kubenswrapper[4558]: I0120 18:08:45.980974 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/custom-tls-dns-openstack-edpm-tls-ovrd-openstack-edpm-tls-mxq2k" podStartSLOduration=2.481196498 podStartE2EDuration="2.980944513s" podCreationTimestamp="2026-01-20 18:08:43 +0000 UTC" firstStartedPulling="2026-01-20 18:08:44.738948875 +0000 UTC m=+5218.499286832" lastFinishedPulling="2026-01-20 18:08:45.23869688 +0000 UTC m=+5218.999034847" observedRunningTime="2026-01-20 18:08:45.969996078 +0000 UTC m=+5219.730334044" watchObservedRunningTime="2026-01-20 18:08:45.980944513 +0000 UTC m=+5219.741282480" Jan 20 18:08:47 crc kubenswrapper[4558]: I0120 18:08:47.975532 4558 generic.go:334] "Generic (PLEG): container finished" podID="b98cccb1-0d46-4be3-b3ce-f63a96e958d5" containerID="855a300217926f1ebcb2f57d692b96e2447d32d17ccd7d30ba4aa52782efcb6b" exitCode=0 Jan 20 18:08:47 crc kubenswrapper[4558]: I0120 18:08:47.975617 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/custom-tls-dns-openstack-edpm-tls-ovrd-openstack-edpm-tls-mxq2k" event={"ID":"b98cccb1-0d46-4be3-b3ce-f63a96e958d5","Type":"ContainerDied","Data":"855a300217926f1ebcb2f57d692b96e2447d32d17ccd7d30ba4aa52782efcb6b"} Jan 20 18:08:49 crc kubenswrapper[4558]: I0120 18:08:49.218681 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/custom-tls-dns-openstack-edpm-tls-ovrd-openstack-edpm-tls-mxq2k" Jan 20 18:08:49 crc kubenswrapper[4558]: I0120 18:08:49.399354 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"custom-tls-dns-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b98cccb1-0d46-4be3-b3ce-f63a96e958d5-custom-tls-dns-combined-ca-bundle\") pod \"b98cccb1-0d46-4be3-b3ce-f63a96e958d5\" (UID: \"b98cccb1-0d46-4be3-b3ce-f63a96e958d5\") " Jan 20 18:08:49 crc kubenswrapper[4558]: I0120 18:08:49.400106 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l6b7t\" (UniqueName: \"kubernetes.io/projected/b98cccb1-0d46-4be3-b3ce-f63a96e958d5-kube-api-access-l6b7t\") pod \"b98cccb1-0d46-4be3-b3ce-f63a96e958d5\" (UID: \"b98cccb1-0d46-4be3-b3ce-f63a96e958d5\") " Jan 20 18:08:49 crc kubenswrapper[4558]: I0120 18:08:49.400220 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b98cccb1-0d46-4be3-b3ce-f63a96e958d5-inventory\") pod \"b98cccb1-0d46-4be3-b3ce-f63a96e958d5\" (UID: \"b98cccb1-0d46-4be3-b3ce-f63a96e958d5\") " Jan 20 18:08:49 crc kubenswrapper[4558]: I0120 18:08:49.400286 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-tls\" (UniqueName: \"kubernetes.io/secret/b98cccb1-0d46-4be3-b3ce-f63a96e958d5-ssh-key-openstack-edpm-tls\") pod \"b98cccb1-0d46-4be3-b3ce-f63a96e958d5\" (UID: \"b98cccb1-0d46-4be3-b3ce-f63a96e958d5\") " Jan 20 18:08:49 crc kubenswrapper[4558]: I0120 18:08:49.405525 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b98cccb1-0d46-4be3-b3ce-f63a96e958d5-custom-tls-dns-combined-ca-bundle" (OuterVolumeSpecName: "custom-tls-dns-combined-ca-bundle") pod "b98cccb1-0d46-4be3-b3ce-f63a96e958d5" (UID: "b98cccb1-0d46-4be3-b3ce-f63a96e958d5"). InnerVolumeSpecName "custom-tls-dns-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:08:49 crc kubenswrapper[4558]: I0120 18:08:49.406313 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b98cccb1-0d46-4be3-b3ce-f63a96e958d5-kube-api-access-l6b7t" (OuterVolumeSpecName: "kube-api-access-l6b7t") pod "b98cccb1-0d46-4be3-b3ce-f63a96e958d5" (UID: "b98cccb1-0d46-4be3-b3ce-f63a96e958d5"). InnerVolumeSpecName "kube-api-access-l6b7t". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:08:49 crc kubenswrapper[4558]: I0120 18:08:49.421463 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b98cccb1-0d46-4be3-b3ce-f63a96e958d5-inventory" (OuterVolumeSpecName: "inventory") pod "b98cccb1-0d46-4be3-b3ce-f63a96e958d5" (UID: "b98cccb1-0d46-4be3-b3ce-f63a96e958d5"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:08:49 crc kubenswrapper[4558]: I0120 18:08:49.421834 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b98cccb1-0d46-4be3-b3ce-f63a96e958d5-ssh-key-openstack-edpm-tls" (OuterVolumeSpecName: "ssh-key-openstack-edpm-tls") pod "b98cccb1-0d46-4be3-b3ce-f63a96e958d5" (UID: "b98cccb1-0d46-4be3-b3ce-f63a96e958d5"). InnerVolumeSpecName "ssh-key-openstack-edpm-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:08:49 crc kubenswrapper[4558]: I0120 18:08:49.501947 4558 reconciler_common.go:293] "Volume detached for volume \"custom-tls-dns-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b98cccb1-0d46-4be3-b3ce-f63a96e958d5-custom-tls-dns-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 18:08:49 crc kubenswrapper[4558]: I0120 18:08:49.501980 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l6b7t\" (UniqueName: \"kubernetes.io/projected/b98cccb1-0d46-4be3-b3ce-f63a96e958d5-kube-api-access-l6b7t\") on node \"crc\" DevicePath \"\"" Jan 20 18:08:49 crc kubenswrapper[4558]: I0120 18:08:49.501992 4558 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b98cccb1-0d46-4be3-b3ce-f63a96e958d5-inventory\") on node \"crc\" DevicePath \"\"" Jan 20 18:08:49 crc kubenswrapper[4558]: I0120 18:08:49.502002 4558 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-tls\" (UniqueName: \"kubernetes.io/secret/b98cccb1-0d46-4be3-b3ce-f63a96e958d5-ssh-key-openstack-edpm-tls\") on node \"crc\" DevicePath \"\"" Jan 20 18:08:49 crc kubenswrapper[4558]: I0120 18:08:49.995036 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/custom-tls-dns-openstack-edpm-tls-ovrd-openstack-edpm-tls-mxq2k" event={"ID":"b98cccb1-0d46-4be3-b3ce-f63a96e958d5","Type":"ContainerDied","Data":"6f5e6ed90a1a795b614b57b7d91a1e962f0343918cd8485f95288e4309bf93e9"} Jan 20 18:08:49 crc kubenswrapper[4558]: I0120 18:08:49.995087 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6f5e6ed90a1a795b614b57b7d91a1e962f0343918cd8485f95288e4309bf93e9" Jan 20 18:08:49 crc kubenswrapper[4558]: I0120 18:08:49.995143 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/custom-tls-dns-openstack-edpm-tls-ovrd-openstack-edpm-tls-mxq2k" Jan 20 18:08:51 crc kubenswrapper[4558]: I0120 18:08:51.304947 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7"] Jan 20 18:08:51 crc kubenswrapper[4558]: E0120 18:08:51.305443 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b98cccb1-0d46-4be3-b3ce-f63a96e958d5" containerName="custom-tls-dns-openstack-edpm-tls-ovrd-openstack-edpm-tls" Jan 20 18:08:51 crc kubenswrapper[4558]: I0120 18:08:51.305456 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="b98cccb1-0d46-4be3-b3ce-f63a96e958d5" containerName="custom-tls-dns-openstack-edpm-tls-ovrd-openstack-edpm-tls" Jan 20 18:08:51 crc kubenswrapper[4558]: I0120 18:08:51.305588 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="b98cccb1-0d46-4be3-b3ce-f63a96e958d5" containerName="custom-tls-dns-openstack-edpm-tls-ovrd-openstack-edpm-tls" Jan 20 18:08:51 crc kubenswrapper[4558]: I0120 18:08:51.306032 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7" Jan 20 18:08:51 crc kubenswrapper[4558]: I0120 18:08:51.308011 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"openstack-edpm-tls-tls-dns-ips-default-certs-0" Jan 20 18:08:51 crc kubenswrapper[4558]: I0120 18:08:51.308153 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplanenodeset-openstack-edpm-tls" Jan 20 18:08:51 crc kubenswrapper[4558]: I0120 18:08:51.308239 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-aee-default-env" Jan 20 18:08:51 crc kubenswrapper[4558]: I0120 18:08:51.308279 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"openstack-edpm-tls-custom-tls-dns-default-certs-0" Jan 20 18:08:51 crc kubenswrapper[4558]: I0120 18:08:51.308490 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"openstack-edpm-tls-dockercfg-bzbzv" Jan 20 18:08:51 crc kubenswrapper[4558]: I0120 18:08:51.308552 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"combined-ca-bundle" Jan 20 18:08:51 crc kubenswrapper[4558]: I0120 18:08:51.310506 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplane-ansible-ssh-private-key-secret" Jan 20 18:08:51 crc kubenswrapper[4558]: I0120 18:08:51.317292 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7"] Jan 20 18:08:51 crc kubenswrapper[4558]: I0120 18:08:51.424853 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jvdx9\" (UniqueName: \"kubernetes.io/projected/cb720483-bd8a-47cb-978d-ecdcb6765105-kube-api-access-jvdx9\") pod \"install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7\" (UID: \"cb720483-bd8a-47cb-978d-ecdcb6765105\") " pod="openstack-kuttl-tests/install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7" Jan 20 18:08:51 crc kubenswrapper[4558]: I0120 18:08:51.425129 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-tls-tls-dns-ips-default-certs-0\" (UniqueName: \"kubernetes.io/projected/cb720483-bd8a-47cb-978d-ecdcb6765105-openstack-edpm-tls-tls-dns-ips-default-certs-0\") pod \"install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7\" (UID: \"cb720483-bd8a-47cb-978d-ecdcb6765105\") " pod="openstack-kuttl-tests/install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7" Jan 20 18:08:51 crc kubenswrapper[4558]: I0120 18:08:51.425159 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-tls\" (UniqueName: \"kubernetes.io/secret/cb720483-bd8a-47cb-978d-ecdcb6765105-ssh-key-openstack-edpm-tls\") pod \"install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7\" (UID: \"cb720483-bd8a-47cb-978d-ecdcb6765105\") " pod="openstack-kuttl-tests/install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7" Jan 20 18:08:51 crc kubenswrapper[4558]: I0120 18:08:51.425206 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/cb720483-bd8a-47cb-978d-ecdcb6765105-inventory\") pod \"install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7\" (UID: \"cb720483-bd8a-47cb-978d-ecdcb6765105\") " pod="openstack-kuttl-tests/install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7" Jan 20 18:08:51 crc kubenswrapper[4558]: I0120 18:08:51.425238 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"custom-tls-dns-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cb720483-bd8a-47cb-978d-ecdcb6765105-custom-tls-dns-combined-ca-bundle\") pod \"install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7\" (UID: \"cb720483-bd8a-47cb-978d-ecdcb6765105\") " pod="openstack-kuttl-tests/install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7" Jan 20 18:08:51 crc kubenswrapper[4558]: I0120 18:08:51.425266 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-dns-ips-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cb720483-bd8a-47cb-978d-ecdcb6765105-tls-dns-ips-combined-ca-bundle\") pod \"install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7\" (UID: \"cb720483-bd8a-47cb-978d-ecdcb6765105\") " pod="openstack-kuttl-tests/install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7" Jan 20 18:08:51 crc kubenswrapper[4558]: I0120 18:08:51.425283 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"install-certs-ovrd-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cb720483-bd8a-47cb-978d-ecdcb6765105-install-certs-ovrd-combined-ca-bundle\") pod \"install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7\" (UID: \"cb720483-bd8a-47cb-978d-ecdcb6765105\") " pod="openstack-kuttl-tests/install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7" Jan 20 18:08:51 crc kubenswrapper[4558]: I0120 18:08:51.425331 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-tls-custom-tls-dns-default-certs-0\" (UniqueName: \"kubernetes.io/projected/cb720483-bd8a-47cb-978d-ecdcb6765105-openstack-edpm-tls-custom-tls-dns-default-certs-0\") pod \"install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7\" (UID: \"cb720483-bd8a-47cb-978d-ecdcb6765105\") " pod="openstack-kuttl-tests/install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7" Jan 20 18:08:51 crc kubenswrapper[4558]: I0120 18:08:51.527260 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/cb720483-bd8a-47cb-978d-ecdcb6765105-inventory\") pod \"install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7\" (UID: \"cb720483-bd8a-47cb-978d-ecdcb6765105\") " pod="openstack-kuttl-tests/install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7" Jan 20 18:08:51 crc kubenswrapper[4558]: I0120 18:08:51.527358 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"custom-tls-dns-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cb720483-bd8a-47cb-978d-ecdcb6765105-custom-tls-dns-combined-ca-bundle\") pod \"install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7\" (UID: \"cb720483-bd8a-47cb-978d-ecdcb6765105\") " pod="openstack-kuttl-tests/install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7" Jan 20 18:08:51 crc kubenswrapper[4558]: I0120 18:08:51.527419 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-dns-ips-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cb720483-bd8a-47cb-978d-ecdcb6765105-tls-dns-ips-combined-ca-bundle\") pod \"install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7\" (UID: \"cb720483-bd8a-47cb-978d-ecdcb6765105\") " pod="openstack-kuttl-tests/install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7" Jan 20 18:08:51 crc kubenswrapper[4558]: I0120 18:08:51.527452 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"install-certs-ovrd-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cb720483-bd8a-47cb-978d-ecdcb6765105-install-certs-ovrd-combined-ca-bundle\") pod \"install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7\" (UID: \"cb720483-bd8a-47cb-978d-ecdcb6765105\") " pod="openstack-kuttl-tests/install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7" Jan 20 18:08:51 crc kubenswrapper[4558]: I0120 18:08:51.527477 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-tls-custom-tls-dns-default-certs-0\" (UniqueName: \"kubernetes.io/projected/cb720483-bd8a-47cb-978d-ecdcb6765105-openstack-edpm-tls-custom-tls-dns-default-certs-0\") pod \"install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7\" (UID: \"cb720483-bd8a-47cb-978d-ecdcb6765105\") " pod="openstack-kuttl-tests/install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7" Jan 20 18:08:51 crc kubenswrapper[4558]: I0120 18:08:51.527664 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jvdx9\" (UniqueName: \"kubernetes.io/projected/cb720483-bd8a-47cb-978d-ecdcb6765105-kube-api-access-jvdx9\") pod \"install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7\" (UID: \"cb720483-bd8a-47cb-978d-ecdcb6765105\") " pod="openstack-kuttl-tests/install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7" Jan 20 18:08:51 crc kubenswrapper[4558]: I0120 18:08:51.527706 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-tls-tls-dns-ips-default-certs-0\" (UniqueName: \"kubernetes.io/projected/cb720483-bd8a-47cb-978d-ecdcb6765105-openstack-edpm-tls-tls-dns-ips-default-certs-0\") pod \"install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7\" (UID: \"cb720483-bd8a-47cb-978d-ecdcb6765105\") " pod="openstack-kuttl-tests/install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7" Jan 20 18:08:51 crc kubenswrapper[4558]: I0120 18:08:51.527754 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-tls\" (UniqueName: \"kubernetes.io/secret/cb720483-bd8a-47cb-978d-ecdcb6765105-ssh-key-openstack-edpm-tls\") pod \"install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7\" (UID: \"cb720483-bd8a-47cb-978d-ecdcb6765105\") " pod="openstack-kuttl-tests/install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7" Jan 20 18:08:51 crc kubenswrapper[4558]: I0120 18:08:51.533395 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"install-certs-ovrd-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cb720483-bd8a-47cb-978d-ecdcb6765105-install-certs-ovrd-combined-ca-bundle\") pod \"install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7\" (UID: \"cb720483-bd8a-47cb-978d-ecdcb6765105\") " pod="openstack-kuttl-tests/install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7" Jan 20 18:08:51 crc kubenswrapper[4558]: I0120 18:08:51.533430 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-tls-custom-tls-dns-default-certs-0\" (UniqueName: \"kubernetes.io/projected/cb720483-bd8a-47cb-978d-ecdcb6765105-openstack-edpm-tls-custom-tls-dns-default-certs-0\") pod \"install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7\" (UID: \"cb720483-bd8a-47cb-978d-ecdcb6765105\") " pod="openstack-kuttl-tests/install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7" Jan 20 18:08:51 crc kubenswrapper[4558]: I0120 18:08:51.533944 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-tls-tls-dns-ips-default-certs-0\" (UniqueName: \"kubernetes.io/projected/cb720483-bd8a-47cb-978d-ecdcb6765105-openstack-edpm-tls-tls-dns-ips-default-certs-0\") pod \"install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7\" (UID: \"cb720483-bd8a-47cb-978d-ecdcb6765105\") " pod="openstack-kuttl-tests/install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7" Jan 20 18:08:51 crc kubenswrapper[4558]: I0120 18:08:51.534070 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/cb720483-bd8a-47cb-978d-ecdcb6765105-inventory\") pod \"install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7\" (UID: \"cb720483-bd8a-47cb-978d-ecdcb6765105\") " pod="openstack-kuttl-tests/install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7" Jan 20 18:08:51 crc kubenswrapper[4558]: I0120 18:08:51.534836 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-dns-ips-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cb720483-bd8a-47cb-978d-ecdcb6765105-tls-dns-ips-combined-ca-bundle\") pod \"install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7\" (UID: \"cb720483-bd8a-47cb-978d-ecdcb6765105\") " pod="openstack-kuttl-tests/install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7" Jan 20 18:08:51 crc kubenswrapper[4558]: I0120 18:08:51.535069 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-tls\" (UniqueName: \"kubernetes.io/secret/cb720483-bd8a-47cb-978d-ecdcb6765105-ssh-key-openstack-edpm-tls\") pod \"install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7\" (UID: \"cb720483-bd8a-47cb-978d-ecdcb6765105\") " pod="openstack-kuttl-tests/install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7" Jan 20 18:08:51 crc kubenswrapper[4558]: I0120 18:08:51.535669 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"custom-tls-dns-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cb720483-bd8a-47cb-978d-ecdcb6765105-custom-tls-dns-combined-ca-bundle\") pod \"install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7\" (UID: \"cb720483-bd8a-47cb-978d-ecdcb6765105\") " pod="openstack-kuttl-tests/install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7" Jan 20 18:08:51 crc kubenswrapper[4558]: I0120 18:08:51.545020 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jvdx9\" (UniqueName: \"kubernetes.io/projected/cb720483-bd8a-47cb-978d-ecdcb6765105-kube-api-access-jvdx9\") pod \"install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7\" (UID: \"cb720483-bd8a-47cb-978d-ecdcb6765105\") " pod="openstack-kuttl-tests/install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7" Jan 20 18:08:51 crc kubenswrapper[4558]: I0120 18:08:51.619523 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7" Jan 20 18:08:52 crc kubenswrapper[4558]: I0120 18:08:52.007999 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7"] Jan 20 18:08:53 crc kubenswrapper[4558]: I0120 18:08:53.023676 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7" event={"ID":"cb720483-bd8a-47cb-978d-ecdcb6765105","Type":"ContainerStarted","Data":"623eb719746436bc27eddef3e09e0db86eb6f58e32b2675bb5567415add042a7"} Jan 20 18:08:53 crc kubenswrapper[4558]: I0120 18:08:53.024025 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7" event={"ID":"cb720483-bd8a-47cb-978d-ecdcb6765105","Type":"ContainerStarted","Data":"5a98f1548a2feaf136b910d2665944d50a69349b3f97e57edeb4b56dc7013e23"} Jan 20 18:08:53 crc kubenswrapper[4558]: I0120 18:08:53.047944 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7" podStartSLOduration=1.579448459 podStartE2EDuration="2.047928916s" podCreationTimestamp="2026-01-20 18:08:51 +0000 UTC" firstStartedPulling="2026-01-20 18:08:52.014309415 +0000 UTC m=+5225.774647392" lastFinishedPulling="2026-01-20 18:08:52.482789882 +0000 UTC m=+5226.243127849" observedRunningTime="2026-01-20 18:08:53.042667543 +0000 UTC m=+5226.803005510" watchObservedRunningTime="2026-01-20 18:08:53.047928916 +0000 UTC m=+5226.808266882" Jan 20 18:08:55 crc kubenswrapper[4558]: I0120 18:08:55.045132 4558 generic.go:334] "Generic (PLEG): container finished" podID="cb720483-bd8a-47cb-978d-ecdcb6765105" containerID="623eb719746436bc27eddef3e09e0db86eb6f58e32b2675bb5567415add042a7" exitCode=0 Jan 20 18:08:55 crc kubenswrapper[4558]: I0120 18:08:55.045231 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7" event={"ID":"cb720483-bd8a-47cb-978d-ecdcb6765105","Type":"ContainerDied","Data":"623eb719746436bc27eddef3e09e0db86eb6f58e32b2675bb5567415add042a7"} Jan 20 18:08:56 crc kubenswrapper[4558]: I0120 18:08:56.295817 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7" Jan 20 18:08:56 crc kubenswrapper[4558]: I0120 18:08:56.300367 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-tls\" (UniqueName: \"kubernetes.io/secret/cb720483-bd8a-47cb-978d-ecdcb6765105-ssh-key-openstack-edpm-tls\") pod \"cb720483-bd8a-47cb-978d-ecdcb6765105\" (UID: \"cb720483-bd8a-47cb-978d-ecdcb6765105\") " Jan 20 18:08:56 crc kubenswrapper[4558]: I0120 18:08:56.300474 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tls-dns-ips-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cb720483-bd8a-47cb-978d-ecdcb6765105-tls-dns-ips-combined-ca-bundle\") pod \"cb720483-bd8a-47cb-978d-ecdcb6765105\" (UID: \"cb720483-bd8a-47cb-978d-ecdcb6765105\") " Jan 20 18:08:56 crc kubenswrapper[4558]: I0120 18:08:56.306356 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cb720483-bd8a-47cb-978d-ecdcb6765105-tls-dns-ips-combined-ca-bundle" (OuterVolumeSpecName: "tls-dns-ips-combined-ca-bundle") pod "cb720483-bd8a-47cb-978d-ecdcb6765105" (UID: "cb720483-bd8a-47cb-978d-ecdcb6765105"). InnerVolumeSpecName "tls-dns-ips-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:08:56 crc kubenswrapper[4558]: I0120 18:08:56.331845 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cb720483-bd8a-47cb-978d-ecdcb6765105-ssh-key-openstack-edpm-tls" (OuterVolumeSpecName: "ssh-key-openstack-edpm-tls") pod "cb720483-bd8a-47cb-978d-ecdcb6765105" (UID: "cb720483-bd8a-47cb-978d-ecdcb6765105"). InnerVolumeSpecName "ssh-key-openstack-edpm-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:08:56 crc kubenswrapper[4558]: I0120 18:08:56.402122 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"install-certs-ovrd-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cb720483-bd8a-47cb-978d-ecdcb6765105-install-certs-ovrd-combined-ca-bundle\") pod \"cb720483-bd8a-47cb-978d-ecdcb6765105\" (UID: \"cb720483-bd8a-47cb-978d-ecdcb6765105\") " Jan 20 18:08:56 crc kubenswrapper[4558]: I0120 18:08:56.402207 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"custom-tls-dns-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cb720483-bd8a-47cb-978d-ecdcb6765105-custom-tls-dns-combined-ca-bundle\") pod \"cb720483-bd8a-47cb-978d-ecdcb6765105\" (UID: \"cb720483-bd8a-47cb-978d-ecdcb6765105\") " Jan 20 18:08:56 crc kubenswrapper[4558]: I0120 18:08:56.402244 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jvdx9\" (UniqueName: \"kubernetes.io/projected/cb720483-bd8a-47cb-978d-ecdcb6765105-kube-api-access-jvdx9\") pod \"cb720483-bd8a-47cb-978d-ecdcb6765105\" (UID: \"cb720483-bd8a-47cb-978d-ecdcb6765105\") " Jan 20 18:08:56 crc kubenswrapper[4558]: I0120 18:08:56.402281 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-tls-tls-dns-ips-default-certs-0\" (UniqueName: \"kubernetes.io/projected/cb720483-bd8a-47cb-978d-ecdcb6765105-openstack-edpm-tls-tls-dns-ips-default-certs-0\") pod \"cb720483-bd8a-47cb-978d-ecdcb6765105\" (UID: \"cb720483-bd8a-47cb-978d-ecdcb6765105\") " Jan 20 18:08:56 crc kubenswrapper[4558]: I0120 18:08:56.402308 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-tls-custom-tls-dns-default-certs-0\" (UniqueName: \"kubernetes.io/projected/cb720483-bd8a-47cb-978d-ecdcb6765105-openstack-edpm-tls-custom-tls-dns-default-certs-0\") pod \"cb720483-bd8a-47cb-978d-ecdcb6765105\" (UID: \"cb720483-bd8a-47cb-978d-ecdcb6765105\") " Jan 20 18:08:56 crc kubenswrapper[4558]: I0120 18:08:56.402339 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/cb720483-bd8a-47cb-978d-ecdcb6765105-inventory\") pod \"cb720483-bd8a-47cb-978d-ecdcb6765105\" (UID: \"cb720483-bd8a-47cb-978d-ecdcb6765105\") " Jan 20 18:08:56 crc kubenswrapper[4558]: I0120 18:08:56.402957 4558 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-tls\" (UniqueName: \"kubernetes.io/secret/cb720483-bd8a-47cb-978d-ecdcb6765105-ssh-key-openstack-edpm-tls\") on node \"crc\" DevicePath \"\"" Jan 20 18:08:56 crc kubenswrapper[4558]: I0120 18:08:56.402976 4558 reconciler_common.go:293] "Volume detached for volume \"tls-dns-ips-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cb720483-bd8a-47cb-978d-ecdcb6765105-tls-dns-ips-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 18:08:56 crc kubenswrapper[4558]: I0120 18:08:56.416936 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cb720483-bd8a-47cb-978d-ecdcb6765105-openstack-edpm-tls-custom-tls-dns-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-tls-custom-tls-dns-default-certs-0") pod "cb720483-bd8a-47cb-978d-ecdcb6765105" (UID: "cb720483-bd8a-47cb-978d-ecdcb6765105"). InnerVolumeSpecName "openstack-edpm-tls-custom-tls-dns-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:08:56 crc kubenswrapper[4558]: I0120 18:08:56.417200 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cb720483-bd8a-47cb-978d-ecdcb6765105-install-certs-ovrd-combined-ca-bundle" (OuterVolumeSpecName: "install-certs-ovrd-combined-ca-bundle") pod "cb720483-bd8a-47cb-978d-ecdcb6765105" (UID: "cb720483-bd8a-47cb-978d-ecdcb6765105"). InnerVolumeSpecName "install-certs-ovrd-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:08:56 crc kubenswrapper[4558]: I0120 18:08:56.420298 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cb720483-bd8a-47cb-978d-ecdcb6765105-openstack-edpm-tls-tls-dns-ips-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-tls-tls-dns-ips-default-certs-0") pod "cb720483-bd8a-47cb-978d-ecdcb6765105" (UID: "cb720483-bd8a-47cb-978d-ecdcb6765105"). InnerVolumeSpecName "openstack-edpm-tls-tls-dns-ips-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:08:56 crc kubenswrapper[4558]: I0120 18:08:56.425317 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cb720483-bd8a-47cb-978d-ecdcb6765105-kube-api-access-jvdx9" (OuterVolumeSpecName: "kube-api-access-jvdx9") pod "cb720483-bd8a-47cb-978d-ecdcb6765105" (UID: "cb720483-bd8a-47cb-978d-ecdcb6765105"). InnerVolumeSpecName "kube-api-access-jvdx9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:08:56 crc kubenswrapper[4558]: I0120 18:08:56.428340 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cb720483-bd8a-47cb-978d-ecdcb6765105-custom-tls-dns-combined-ca-bundle" (OuterVolumeSpecName: "custom-tls-dns-combined-ca-bundle") pod "cb720483-bd8a-47cb-978d-ecdcb6765105" (UID: "cb720483-bd8a-47cb-978d-ecdcb6765105"). InnerVolumeSpecName "custom-tls-dns-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:08:56 crc kubenswrapper[4558]: I0120 18:08:56.480277 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cb720483-bd8a-47cb-978d-ecdcb6765105-inventory" (OuterVolumeSpecName: "inventory") pod "cb720483-bd8a-47cb-978d-ecdcb6765105" (UID: "cb720483-bd8a-47cb-978d-ecdcb6765105"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:08:56 crc kubenswrapper[4558]: I0120 18:08:56.504641 4558 reconciler_common.go:293] "Volume detached for volume \"install-certs-ovrd-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cb720483-bd8a-47cb-978d-ecdcb6765105-install-certs-ovrd-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 18:08:56 crc kubenswrapper[4558]: I0120 18:08:56.504691 4558 reconciler_common.go:293] "Volume detached for volume \"custom-tls-dns-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cb720483-bd8a-47cb-978d-ecdcb6765105-custom-tls-dns-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 18:08:56 crc kubenswrapper[4558]: I0120 18:08:56.504706 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jvdx9\" (UniqueName: \"kubernetes.io/projected/cb720483-bd8a-47cb-978d-ecdcb6765105-kube-api-access-jvdx9\") on node \"crc\" DevicePath \"\"" Jan 20 18:08:56 crc kubenswrapper[4558]: I0120 18:08:56.504719 4558 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-tls-tls-dns-ips-default-certs-0\" (UniqueName: \"kubernetes.io/projected/cb720483-bd8a-47cb-978d-ecdcb6765105-openstack-edpm-tls-tls-dns-ips-default-certs-0\") on node \"crc\" DevicePath \"\"" Jan 20 18:08:56 crc kubenswrapper[4558]: I0120 18:08:56.504734 4558 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-tls-custom-tls-dns-default-certs-0\" (UniqueName: \"kubernetes.io/projected/cb720483-bd8a-47cb-978d-ecdcb6765105-openstack-edpm-tls-custom-tls-dns-default-certs-0\") on node \"crc\" DevicePath \"\"" Jan 20 18:08:56 crc kubenswrapper[4558]: I0120 18:08:56.504746 4558 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/cb720483-bd8a-47cb-978d-ecdcb6765105-inventory\") on node \"crc\" DevicePath \"\"" Jan 20 18:08:56 crc kubenswrapper[4558]: I0120 18:08:56.792377 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-p6bsn"] Jan 20 18:08:56 crc kubenswrapper[4558]: E0120 18:08:56.792788 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cb720483-bd8a-47cb-978d-ecdcb6765105" containerName="install-certs-ovrd-certs-refresh-openstack-edpm-tls" Jan 20 18:08:56 crc kubenswrapper[4558]: I0120 18:08:56.792808 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="cb720483-bd8a-47cb-978d-ecdcb6765105" containerName="install-certs-ovrd-certs-refresh-openstack-edpm-tls" Jan 20 18:08:56 crc kubenswrapper[4558]: I0120 18:08:56.793029 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="cb720483-bd8a-47cb-978d-ecdcb6765105" containerName="install-certs-ovrd-certs-refresh-openstack-edpm-tls" Jan 20 18:08:56 crc kubenswrapper[4558]: I0120 18:08:56.793734 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-p6bsn" Jan 20 18:08:56 crc kubenswrapper[4558]: I0120 18:08:56.807734 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jvdx9\" (UniqueName: \"kubernetes.io/projected/6fb49239-c076-46dd-b4a3-dcff5212b012-kube-api-access-jvdx9\") pod \"install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-p6bsn\" (UID: \"6fb49239-c076-46dd-b4a3-dcff5212b012\") " pod="openstack-kuttl-tests/install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-p6bsn" Jan 20 18:08:56 crc kubenswrapper[4558]: I0120 18:08:56.807837 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-tls-tls-dns-ips-default-certs-0\" (UniqueName: \"kubernetes.io/projected/6fb49239-c076-46dd-b4a3-dcff5212b012-openstack-edpm-tls-tls-dns-ips-default-certs-0\") pod \"install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-p6bsn\" (UID: \"6fb49239-c076-46dd-b4a3-dcff5212b012\") " pod="openstack-kuttl-tests/install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-p6bsn" Jan 20 18:08:56 crc kubenswrapper[4558]: I0120 18:08:56.807931 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-dns-ips-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6fb49239-c076-46dd-b4a3-dcff5212b012-tls-dns-ips-combined-ca-bundle\") pod \"install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-p6bsn\" (UID: \"6fb49239-c076-46dd-b4a3-dcff5212b012\") " pod="openstack-kuttl-tests/install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-p6bsn" Jan 20 18:08:56 crc kubenswrapper[4558]: I0120 18:08:56.807965 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-tls-custom-tls-dns-default-certs-0\" (UniqueName: \"kubernetes.io/projected/6fb49239-c076-46dd-b4a3-dcff5212b012-openstack-edpm-tls-custom-tls-dns-default-certs-0\") pod \"install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-p6bsn\" (UID: \"6fb49239-c076-46dd-b4a3-dcff5212b012\") " pod="openstack-kuttl-tests/install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-p6bsn" Jan 20 18:08:56 crc kubenswrapper[4558]: I0120 18:08:56.808038 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"custom-tls-dns-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6fb49239-c076-46dd-b4a3-dcff5212b012-custom-tls-dns-combined-ca-bundle\") pod \"install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-p6bsn\" (UID: \"6fb49239-c076-46dd-b4a3-dcff5212b012\") " pod="openstack-kuttl-tests/install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-p6bsn" Jan 20 18:08:56 crc kubenswrapper[4558]: I0120 18:08:56.808080 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6fb49239-c076-46dd-b4a3-dcff5212b012-inventory\") pod \"install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-p6bsn\" (UID: \"6fb49239-c076-46dd-b4a3-dcff5212b012\") " pod="openstack-kuttl-tests/install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-p6bsn" Jan 20 18:08:56 crc kubenswrapper[4558]: I0120 18:08:56.808155 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"install-certs-ovrd-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6fb49239-c076-46dd-b4a3-dcff5212b012-install-certs-ovrd-combined-ca-bundle\") pod \"install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-p6bsn\" (UID: \"6fb49239-c076-46dd-b4a3-dcff5212b012\") " pod="openstack-kuttl-tests/install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-p6bsn" Jan 20 18:08:56 crc kubenswrapper[4558]: I0120 18:08:56.808351 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-tls\" (UniqueName: \"kubernetes.io/secret/6fb49239-c076-46dd-b4a3-dcff5212b012-ssh-key-openstack-edpm-tls\") pod \"install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-p6bsn\" (UID: \"6fb49239-c076-46dd-b4a3-dcff5212b012\") " pod="openstack-kuttl-tests/install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-p6bsn" Jan 20 18:08:56 crc kubenswrapper[4558]: I0120 18:08:56.842182 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-p6bsn"] Jan 20 18:08:56 crc kubenswrapper[4558]: I0120 18:08:56.910370 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-tls-tls-dns-ips-default-certs-0\" (UniqueName: \"kubernetes.io/projected/6fb49239-c076-46dd-b4a3-dcff5212b012-openstack-edpm-tls-tls-dns-ips-default-certs-0\") pod \"install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-p6bsn\" (UID: \"6fb49239-c076-46dd-b4a3-dcff5212b012\") " pod="openstack-kuttl-tests/install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-p6bsn" Jan 20 18:08:56 crc kubenswrapper[4558]: I0120 18:08:56.910871 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-dns-ips-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6fb49239-c076-46dd-b4a3-dcff5212b012-tls-dns-ips-combined-ca-bundle\") pod \"install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-p6bsn\" (UID: \"6fb49239-c076-46dd-b4a3-dcff5212b012\") " pod="openstack-kuttl-tests/install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-p6bsn" Jan 20 18:08:56 crc kubenswrapper[4558]: I0120 18:08:56.910921 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-tls-custom-tls-dns-default-certs-0\" (UniqueName: \"kubernetes.io/projected/6fb49239-c076-46dd-b4a3-dcff5212b012-openstack-edpm-tls-custom-tls-dns-default-certs-0\") pod \"install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-p6bsn\" (UID: \"6fb49239-c076-46dd-b4a3-dcff5212b012\") " pod="openstack-kuttl-tests/install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-p6bsn" Jan 20 18:08:56 crc kubenswrapper[4558]: I0120 18:08:56.911013 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"custom-tls-dns-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6fb49239-c076-46dd-b4a3-dcff5212b012-custom-tls-dns-combined-ca-bundle\") pod \"install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-p6bsn\" (UID: \"6fb49239-c076-46dd-b4a3-dcff5212b012\") " pod="openstack-kuttl-tests/install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-p6bsn" Jan 20 18:08:56 crc kubenswrapper[4558]: I0120 18:08:56.911054 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6fb49239-c076-46dd-b4a3-dcff5212b012-inventory\") pod \"install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-p6bsn\" (UID: \"6fb49239-c076-46dd-b4a3-dcff5212b012\") " pod="openstack-kuttl-tests/install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-p6bsn" Jan 20 18:08:56 crc kubenswrapper[4558]: I0120 18:08:56.911086 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"install-certs-ovrd-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6fb49239-c076-46dd-b4a3-dcff5212b012-install-certs-ovrd-combined-ca-bundle\") pod \"install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-p6bsn\" (UID: \"6fb49239-c076-46dd-b4a3-dcff5212b012\") " pod="openstack-kuttl-tests/install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-p6bsn" Jan 20 18:08:56 crc kubenswrapper[4558]: I0120 18:08:56.911133 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-tls\" (UniqueName: \"kubernetes.io/secret/6fb49239-c076-46dd-b4a3-dcff5212b012-ssh-key-openstack-edpm-tls\") pod \"install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-p6bsn\" (UID: \"6fb49239-c076-46dd-b4a3-dcff5212b012\") " pod="openstack-kuttl-tests/install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-p6bsn" Jan 20 18:08:56 crc kubenswrapper[4558]: I0120 18:08:56.911238 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jvdx9\" (UniqueName: \"kubernetes.io/projected/6fb49239-c076-46dd-b4a3-dcff5212b012-kube-api-access-jvdx9\") pod \"install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-p6bsn\" (UID: \"6fb49239-c076-46dd-b4a3-dcff5212b012\") " pod="openstack-kuttl-tests/install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-p6bsn" Jan 20 18:08:56 crc kubenswrapper[4558]: I0120 18:08:56.915458 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-dns-ips-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6fb49239-c076-46dd-b4a3-dcff5212b012-tls-dns-ips-combined-ca-bundle\") pod \"install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-p6bsn\" (UID: \"6fb49239-c076-46dd-b4a3-dcff5212b012\") " pod="openstack-kuttl-tests/install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-p6bsn" Jan 20 18:08:56 crc kubenswrapper[4558]: I0120 18:08:56.915712 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-tls\" (UniqueName: \"kubernetes.io/secret/6fb49239-c076-46dd-b4a3-dcff5212b012-ssh-key-openstack-edpm-tls\") pod \"install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-p6bsn\" (UID: \"6fb49239-c076-46dd-b4a3-dcff5212b012\") " pod="openstack-kuttl-tests/install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-p6bsn" Jan 20 18:08:56 crc kubenswrapper[4558]: I0120 18:08:56.915754 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"custom-tls-dns-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6fb49239-c076-46dd-b4a3-dcff5212b012-custom-tls-dns-combined-ca-bundle\") pod \"install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-p6bsn\" (UID: \"6fb49239-c076-46dd-b4a3-dcff5212b012\") " pod="openstack-kuttl-tests/install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-p6bsn" Jan 20 18:08:56 crc kubenswrapper[4558]: I0120 18:08:56.916112 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"install-certs-ovrd-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6fb49239-c076-46dd-b4a3-dcff5212b012-install-certs-ovrd-combined-ca-bundle\") pod \"install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-p6bsn\" (UID: \"6fb49239-c076-46dd-b4a3-dcff5212b012\") " pod="openstack-kuttl-tests/install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-p6bsn" Jan 20 18:08:56 crc kubenswrapper[4558]: I0120 18:08:56.916156 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-tls-custom-tls-dns-default-certs-0\" (UniqueName: \"kubernetes.io/projected/6fb49239-c076-46dd-b4a3-dcff5212b012-openstack-edpm-tls-custom-tls-dns-default-certs-0\") pod \"install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-p6bsn\" (UID: \"6fb49239-c076-46dd-b4a3-dcff5212b012\") " pod="openstack-kuttl-tests/install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-p6bsn" Jan 20 18:08:56 crc kubenswrapper[4558]: I0120 18:08:56.916146 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6fb49239-c076-46dd-b4a3-dcff5212b012-inventory\") pod \"install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-p6bsn\" (UID: \"6fb49239-c076-46dd-b4a3-dcff5212b012\") " pod="openstack-kuttl-tests/install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-p6bsn" Jan 20 18:08:56 crc kubenswrapper[4558]: I0120 18:08:56.916882 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-tls-tls-dns-ips-default-certs-0\" (UniqueName: \"kubernetes.io/projected/6fb49239-c076-46dd-b4a3-dcff5212b012-openstack-edpm-tls-tls-dns-ips-default-certs-0\") pod \"install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-p6bsn\" (UID: \"6fb49239-c076-46dd-b4a3-dcff5212b012\") " pod="openstack-kuttl-tests/install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-p6bsn" Jan 20 18:08:56 crc kubenswrapper[4558]: I0120 18:08:56.925032 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jvdx9\" (UniqueName: \"kubernetes.io/projected/6fb49239-c076-46dd-b4a3-dcff5212b012-kube-api-access-jvdx9\") pod \"install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-p6bsn\" (UID: \"6fb49239-c076-46dd-b4a3-dcff5212b012\") " pod="openstack-kuttl-tests/install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-p6bsn" Jan 20 18:08:57 crc kubenswrapper[4558]: I0120 18:08:57.066260 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7" event={"ID":"cb720483-bd8a-47cb-978d-ecdcb6765105","Type":"ContainerDied","Data":"5a98f1548a2feaf136b910d2665944d50a69349b3f97e57edeb4b56dc7013e23"} Jan 20 18:08:57 crc kubenswrapper[4558]: I0120 18:08:57.066309 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5a98f1548a2feaf136b910d2665944d50a69349b3f97e57edeb4b56dc7013e23" Jan 20 18:08:57 crc kubenswrapper[4558]: I0120 18:08:57.066335 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7" Jan 20 18:08:57 crc kubenswrapper[4558]: I0120 18:08:57.112256 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-p6bsn" Jan 20 18:08:57 crc kubenswrapper[4558]: E0120 18:08:57.112389 4558 kubelet_pods.go:538] "Hostname for pod was too long, truncated it" podName="install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-p6bsn" hostnameMaxLen=63 truncatedHostname="install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug" Jan 20 18:08:57 crc kubenswrapper[4558]: I0120 18:08:57.130189 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/tls-dns-ips-certs-refresh-openstack-edpm-tls-cg4jg"] Jan 20 18:08:57 crc kubenswrapper[4558]: I0120 18:08:57.131868 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/tls-dns-ips-certs-refresh-openstack-edpm-tls-cg4jg" Jan 20 18:08:57 crc kubenswrapper[4558]: I0120 18:08:57.138974 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/tls-dns-ips-certs-refresh-openstack-edpm-tls-cg4jg"] Jan 20 18:08:57 crc kubenswrapper[4558]: I0120 18:08:57.213843 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-dns-ips-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aeae9158-dc5c-42e5-8741-1589bb842264-tls-dns-ips-combined-ca-bundle\") pod \"tls-dns-ips-certs-refresh-openstack-edpm-tls-cg4jg\" (UID: \"aeae9158-dc5c-42e5-8741-1589bb842264\") " pod="openstack-kuttl-tests/tls-dns-ips-certs-refresh-openstack-edpm-tls-cg4jg" Jan 20 18:08:57 crc kubenswrapper[4558]: I0120 18:08:57.213895 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/aeae9158-dc5c-42e5-8741-1589bb842264-inventory\") pod \"tls-dns-ips-certs-refresh-openstack-edpm-tls-cg4jg\" (UID: \"aeae9158-dc5c-42e5-8741-1589bb842264\") " pod="openstack-kuttl-tests/tls-dns-ips-certs-refresh-openstack-edpm-tls-cg4jg" Jan 20 18:08:57 crc kubenswrapper[4558]: I0120 18:08:57.213923 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-tls\" (UniqueName: \"kubernetes.io/secret/aeae9158-dc5c-42e5-8741-1589bb842264-ssh-key-openstack-edpm-tls\") pod \"tls-dns-ips-certs-refresh-openstack-edpm-tls-cg4jg\" (UID: \"aeae9158-dc5c-42e5-8741-1589bb842264\") " pod="openstack-kuttl-tests/tls-dns-ips-certs-refresh-openstack-edpm-tls-cg4jg" Jan 20 18:08:57 crc kubenswrapper[4558]: I0120 18:08:57.213979 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xg6bx\" (UniqueName: \"kubernetes.io/projected/aeae9158-dc5c-42e5-8741-1589bb842264-kube-api-access-xg6bx\") pod \"tls-dns-ips-certs-refresh-openstack-edpm-tls-cg4jg\" (UID: \"aeae9158-dc5c-42e5-8741-1589bb842264\") " pod="openstack-kuttl-tests/tls-dns-ips-certs-refresh-openstack-edpm-tls-cg4jg" Jan 20 18:08:57 crc kubenswrapper[4558]: I0120 18:08:57.316212 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-dns-ips-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aeae9158-dc5c-42e5-8741-1589bb842264-tls-dns-ips-combined-ca-bundle\") pod \"tls-dns-ips-certs-refresh-openstack-edpm-tls-cg4jg\" (UID: \"aeae9158-dc5c-42e5-8741-1589bb842264\") " pod="openstack-kuttl-tests/tls-dns-ips-certs-refresh-openstack-edpm-tls-cg4jg" Jan 20 18:08:57 crc kubenswrapper[4558]: I0120 18:08:57.316529 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/aeae9158-dc5c-42e5-8741-1589bb842264-inventory\") pod \"tls-dns-ips-certs-refresh-openstack-edpm-tls-cg4jg\" (UID: \"aeae9158-dc5c-42e5-8741-1589bb842264\") " pod="openstack-kuttl-tests/tls-dns-ips-certs-refresh-openstack-edpm-tls-cg4jg" Jan 20 18:08:57 crc kubenswrapper[4558]: I0120 18:08:57.316570 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-tls\" (UniqueName: \"kubernetes.io/secret/aeae9158-dc5c-42e5-8741-1589bb842264-ssh-key-openstack-edpm-tls\") pod \"tls-dns-ips-certs-refresh-openstack-edpm-tls-cg4jg\" (UID: \"aeae9158-dc5c-42e5-8741-1589bb842264\") " pod="openstack-kuttl-tests/tls-dns-ips-certs-refresh-openstack-edpm-tls-cg4jg" Jan 20 18:08:57 crc kubenswrapper[4558]: I0120 18:08:57.316617 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xg6bx\" (UniqueName: \"kubernetes.io/projected/aeae9158-dc5c-42e5-8741-1589bb842264-kube-api-access-xg6bx\") pod \"tls-dns-ips-certs-refresh-openstack-edpm-tls-cg4jg\" (UID: \"aeae9158-dc5c-42e5-8741-1589bb842264\") " pod="openstack-kuttl-tests/tls-dns-ips-certs-refresh-openstack-edpm-tls-cg4jg" Jan 20 18:08:57 crc kubenswrapper[4558]: I0120 18:08:57.322215 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-dns-ips-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aeae9158-dc5c-42e5-8741-1589bb842264-tls-dns-ips-combined-ca-bundle\") pod \"tls-dns-ips-certs-refresh-openstack-edpm-tls-cg4jg\" (UID: \"aeae9158-dc5c-42e5-8741-1589bb842264\") " pod="openstack-kuttl-tests/tls-dns-ips-certs-refresh-openstack-edpm-tls-cg4jg" Jan 20 18:08:57 crc kubenswrapper[4558]: I0120 18:08:57.322250 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-tls\" (UniqueName: \"kubernetes.io/secret/aeae9158-dc5c-42e5-8741-1589bb842264-ssh-key-openstack-edpm-tls\") pod \"tls-dns-ips-certs-refresh-openstack-edpm-tls-cg4jg\" (UID: \"aeae9158-dc5c-42e5-8741-1589bb842264\") " pod="openstack-kuttl-tests/tls-dns-ips-certs-refresh-openstack-edpm-tls-cg4jg" Jan 20 18:08:57 crc kubenswrapper[4558]: I0120 18:08:57.322794 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/aeae9158-dc5c-42e5-8741-1589bb842264-inventory\") pod \"tls-dns-ips-certs-refresh-openstack-edpm-tls-cg4jg\" (UID: \"aeae9158-dc5c-42e5-8741-1589bb842264\") " pod="openstack-kuttl-tests/tls-dns-ips-certs-refresh-openstack-edpm-tls-cg4jg" Jan 20 18:08:57 crc kubenswrapper[4558]: I0120 18:08:57.335957 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xg6bx\" (UniqueName: \"kubernetes.io/projected/aeae9158-dc5c-42e5-8741-1589bb842264-kube-api-access-xg6bx\") pod \"tls-dns-ips-certs-refresh-openstack-edpm-tls-cg4jg\" (UID: \"aeae9158-dc5c-42e5-8741-1589bb842264\") " pod="openstack-kuttl-tests/tls-dns-ips-certs-refresh-openstack-edpm-tls-cg4jg" Jan 20 18:08:57 crc kubenswrapper[4558]: I0120 18:08:57.449882 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/tls-dns-ips-certs-refresh-openstack-edpm-tls-cg4jg" Jan 20 18:08:57 crc kubenswrapper[4558]: I0120 18:08:57.494370 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-p6bsn"] Jan 20 18:08:57 crc kubenswrapper[4558]: W0120 18:08:57.508385 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6fb49239_c076_46dd_b4a3_dcff5212b012.slice/crio-8f2e79309c89e31bb256f967b4e31df83bec6251b2865a3d03e2971e4af1f7ed WatchSource:0}: Error finding container 8f2e79309c89e31bb256f967b4e31df83bec6251b2865a3d03e2971e4af1f7ed: Status 404 returned error can't find the container with id 8f2e79309c89e31bb256f967b4e31df83bec6251b2865a3d03e2971e4af1f7ed Jan 20 18:08:57 crc kubenswrapper[4558]: E0120 18:08:57.509735 4558 kubelet_pods.go:538] "Hostname for pod was too long, truncated it" podName="install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-p6bsn" hostnameMaxLen=63 truncatedHostname="install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug" Jan 20 18:08:57 crc kubenswrapper[4558]: W0120 18:08:57.863589 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podaeae9158_dc5c_42e5_8741_1589bb842264.slice/crio-e44c70f2c7b39d4768cc3711039d0c329589e524e3a2fa8601d5264f7041807d WatchSource:0}: Error finding container e44c70f2c7b39d4768cc3711039d0c329589e524e3a2fa8601d5264f7041807d: Status 404 returned error can't find the container with id e44c70f2c7b39d4768cc3711039d0c329589e524e3a2fa8601d5264f7041807d Jan 20 18:08:57 crc kubenswrapper[4558]: I0120 18:08:57.864064 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/tls-dns-ips-certs-refresh-openstack-edpm-tls-cg4jg"] Jan 20 18:08:58 crc kubenswrapper[4558]: E0120 18:08:58.002206 4558 kubelet_pods.go:538] "Hostname for pod was too long, truncated it" podName="install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-p6bsn" hostnameMaxLen=63 truncatedHostname="install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug" Jan 20 18:08:58 crc kubenswrapper[4558]: I0120 18:08:58.077369 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-p6bsn" event={"ID":"6fb49239-c076-46dd-b4a3-dcff5212b012","Type":"ContainerStarted","Data":"8f2e79309c89e31bb256f967b4e31df83bec6251b2865a3d03e2971e4af1f7ed"} Jan 20 18:08:58 crc kubenswrapper[4558]: I0120 18:08:58.078832 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/tls-dns-ips-certs-refresh-openstack-edpm-tls-cg4jg" event={"ID":"aeae9158-dc5c-42e5-8741-1589bb842264","Type":"ContainerStarted","Data":"e44c70f2c7b39d4768cc3711039d0c329589e524e3a2fa8601d5264f7041807d"} Jan 20 18:08:59 crc kubenswrapper[4558]: I0120 18:08:59.090868 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/tls-dns-ips-certs-refresh-openstack-edpm-tls-cg4jg" event={"ID":"aeae9158-dc5c-42e5-8741-1589bb842264","Type":"ContainerStarted","Data":"79e7dba817d9ddcf1ccde8404bcb21d393c65f2484740830c37c7ba4e867d889"} Jan 20 18:08:59 crc kubenswrapper[4558]: I0120 18:08:59.094934 4558 generic.go:334] "Generic (PLEG): container finished" podID="6fb49239-c076-46dd-b4a3-dcff5212b012" containerID="056f1e582b3403bb364141ad34d8d708eb6772e943e462eb1e4e242b2438c2fc" exitCode=0 Jan 20 18:08:59 crc kubenswrapper[4558]: I0120 18:08:59.095066 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-p6bsn" event={"ID":"6fb49239-c076-46dd-b4a3-dcff5212b012","Type":"ContainerDied","Data":"056f1e582b3403bb364141ad34d8d708eb6772e943e462eb1e4e242b2438c2fc"} Jan 20 18:08:59 crc kubenswrapper[4558]: I0120 18:08:59.113252 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/tls-dns-ips-certs-refresh-openstack-edpm-tls-cg4jg" podStartSLOduration=1.579657753 podStartE2EDuration="2.113226057s" podCreationTimestamp="2026-01-20 18:08:57 +0000 UTC" firstStartedPulling="2026-01-20 18:08:57.865909645 +0000 UTC m=+5231.626247612" lastFinishedPulling="2026-01-20 18:08:58.399477948 +0000 UTC m=+5232.159815916" observedRunningTime="2026-01-20 18:08:59.108188475 +0000 UTC m=+5232.868526442" watchObservedRunningTime="2026-01-20 18:08:59.113226057 +0000 UTC m=+5232.873564024" Jan 20 18:08:59 crc kubenswrapper[4558]: I0120 18:08:59.157715 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-p6bsn"] Jan 20 18:08:59 crc kubenswrapper[4558]: I0120 18:08:59.162267 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-p6bsn"] Jan 20 18:08:59 crc kubenswrapper[4558]: I0120 18:08:59.409994 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-wnm77"] Jan 20 18:08:59 crc kubenswrapper[4558]: E0120 18:08:59.410597 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6fb49239-c076-46dd-b4a3-dcff5212b012" containerName="install-certs-ovrd-certs-refresh-openstack-edpm-tls" Jan 20 18:08:59 crc kubenswrapper[4558]: I0120 18:08:59.410685 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="6fb49239-c076-46dd-b4a3-dcff5212b012" containerName="install-certs-ovrd-certs-refresh-openstack-edpm-tls" Jan 20 18:08:59 crc kubenswrapper[4558]: I0120 18:08:59.410934 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="6fb49239-c076-46dd-b4a3-dcff5212b012" containerName="install-certs-ovrd-certs-refresh-openstack-edpm-tls" Jan 20 18:08:59 crc kubenswrapper[4558]: I0120 18:08:59.411637 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-wnm77" Jan 20 18:08:59 crc kubenswrapper[4558]: I0120 18:08:59.423926 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-wnm77"] Jan 20 18:08:59 crc kubenswrapper[4558]: I0120 18:08:59.554791 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-tls-custom-tls-dns-default-certs-0\" (UniqueName: \"kubernetes.io/projected/b259b684-2b3d-4594-b2a9-5f77958f1c21-openstack-edpm-tls-custom-tls-dns-default-certs-0\") pod \"install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-wnm77\" (UID: \"b259b684-2b3d-4594-b2a9-5f77958f1c21\") " pod="openstack-kuttl-tests/install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-wnm77" Jan 20 18:08:59 crc kubenswrapper[4558]: I0120 18:08:59.555038 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-tls\" (UniqueName: \"kubernetes.io/secret/b259b684-2b3d-4594-b2a9-5f77958f1c21-ssh-key-openstack-edpm-tls\") pod \"install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-wnm77\" (UID: \"b259b684-2b3d-4594-b2a9-5f77958f1c21\") " pod="openstack-kuttl-tests/install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-wnm77" Jan 20 18:08:59 crc kubenswrapper[4558]: I0120 18:08:59.555180 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-tls-tls-dns-ips-default-certs-0\" (UniqueName: \"kubernetes.io/projected/b259b684-2b3d-4594-b2a9-5f77958f1c21-openstack-edpm-tls-tls-dns-ips-default-certs-0\") pod \"install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-wnm77\" (UID: \"b259b684-2b3d-4594-b2a9-5f77958f1c21\") " pod="openstack-kuttl-tests/install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-wnm77" Jan 20 18:08:59 crc kubenswrapper[4558]: I0120 18:08:59.555284 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"install-certs-ovrd-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b259b684-2b3d-4594-b2a9-5f77958f1c21-install-certs-ovrd-combined-ca-bundle\") pod \"install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-wnm77\" (UID: \"b259b684-2b3d-4594-b2a9-5f77958f1c21\") " pod="openstack-kuttl-tests/install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-wnm77" Jan 20 18:08:59 crc kubenswrapper[4558]: I0120 18:08:59.555400 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"custom-tls-dns-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b259b684-2b3d-4594-b2a9-5f77958f1c21-custom-tls-dns-combined-ca-bundle\") pod \"install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-wnm77\" (UID: \"b259b684-2b3d-4594-b2a9-5f77958f1c21\") " pod="openstack-kuttl-tests/install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-wnm77" Jan 20 18:08:59 crc kubenswrapper[4558]: I0120 18:08:59.555619 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-dns-ips-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b259b684-2b3d-4594-b2a9-5f77958f1c21-tls-dns-ips-combined-ca-bundle\") pod \"install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-wnm77\" (UID: \"b259b684-2b3d-4594-b2a9-5f77958f1c21\") " pod="openstack-kuttl-tests/install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-wnm77" Jan 20 18:08:59 crc kubenswrapper[4558]: I0120 18:08:59.555726 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jvdx9\" (UniqueName: \"kubernetes.io/projected/b259b684-2b3d-4594-b2a9-5f77958f1c21-kube-api-access-jvdx9\") pod \"install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-wnm77\" (UID: \"b259b684-2b3d-4594-b2a9-5f77958f1c21\") " pod="openstack-kuttl-tests/install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-wnm77" Jan 20 18:08:59 crc kubenswrapper[4558]: I0120 18:08:59.555836 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b259b684-2b3d-4594-b2a9-5f77958f1c21-inventory\") pod \"install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-wnm77\" (UID: \"b259b684-2b3d-4594-b2a9-5f77958f1c21\") " pod="openstack-kuttl-tests/install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-wnm77" Jan 20 18:08:59 crc kubenswrapper[4558]: I0120 18:08:59.657151 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-tls-custom-tls-dns-default-certs-0\" (UniqueName: \"kubernetes.io/projected/b259b684-2b3d-4594-b2a9-5f77958f1c21-openstack-edpm-tls-custom-tls-dns-default-certs-0\") pod \"install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-wnm77\" (UID: \"b259b684-2b3d-4594-b2a9-5f77958f1c21\") " pod="openstack-kuttl-tests/install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-wnm77" Jan 20 18:08:59 crc kubenswrapper[4558]: I0120 18:08:59.657217 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-tls\" (UniqueName: \"kubernetes.io/secret/b259b684-2b3d-4594-b2a9-5f77958f1c21-ssh-key-openstack-edpm-tls\") pod \"install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-wnm77\" (UID: \"b259b684-2b3d-4594-b2a9-5f77958f1c21\") " pod="openstack-kuttl-tests/install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-wnm77" Jan 20 18:08:59 crc kubenswrapper[4558]: I0120 18:08:59.657253 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-tls-tls-dns-ips-default-certs-0\" (UniqueName: \"kubernetes.io/projected/b259b684-2b3d-4594-b2a9-5f77958f1c21-openstack-edpm-tls-tls-dns-ips-default-certs-0\") pod \"install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-wnm77\" (UID: \"b259b684-2b3d-4594-b2a9-5f77958f1c21\") " pod="openstack-kuttl-tests/install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-wnm77" Jan 20 18:08:59 crc kubenswrapper[4558]: I0120 18:08:59.657281 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"install-certs-ovrd-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b259b684-2b3d-4594-b2a9-5f77958f1c21-install-certs-ovrd-combined-ca-bundle\") pod \"install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-wnm77\" (UID: \"b259b684-2b3d-4594-b2a9-5f77958f1c21\") " pod="openstack-kuttl-tests/install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-wnm77" Jan 20 18:08:59 crc kubenswrapper[4558]: I0120 18:08:59.657326 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"custom-tls-dns-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b259b684-2b3d-4594-b2a9-5f77958f1c21-custom-tls-dns-combined-ca-bundle\") pod \"install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-wnm77\" (UID: \"b259b684-2b3d-4594-b2a9-5f77958f1c21\") " pod="openstack-kuttl-tests/install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-wnm77" Jan 20 18:08:59 crc kubenswrapper[4558]: I0120 18:08:59.657348 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-dns-ips-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b259b684-2b3d-4594-b2a9-5f77958f1c21-tls-dns-ips-combined-ca-bundle\") pod \"install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-wnm77\" (UID: \"b259b684-2b3d-4594-b2a9-5f77958f1c21\") " pod="openstack-kuttl-tests/install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-wnm77" Jan 20 18:08:59 crc kubenswrapper[4558]: I0120 18:08:59.657380 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jvdx9\" (UniqueName: \"kubernetes.io/projected/b259b684-2b3d-4594-b2a9-5f77958f1c21-kube-api-access-jvdx9\") pod \"install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-wnm77\" (UID: \"b259b684-2b3d-4594-b2a9-5f77958f1c21\") " pod="openstack-kuttl-tests/install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-wnm77" Jan 20 18:08:59 crc kubenswrapper[4558]: I0120 18:08:59.657405 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b259b684-2b3d-4594-b2a9-5f77958f1c21-inventory\") pod \"install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-wnm77\" (UID: \"b259b684-2b3d-4594-b2a9-5f77958f1c21\") " pod="openstack-kuttl-tests/install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-wnm77" Jan 20 18:08:59 crc kubenswrapper[4558]: I0120 18:08:59.664352 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-tls\" (UniqueName: \"kubernetes.io/secret/b259b684-2b3d-4594-b2a9-5f77958f1c21-ssh-key-openstack-edpm-tls\") pod \"install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-wnm77\" (UID: \"b259b684-2b3d-4594-b2a9-5f77958f1c21\") " pod="openstack-kuttl-tests/install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-wnm77" Jan 20 18:08:59 crc kubenswrapper[4558]: I0120 18:08:59.664590 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"install-certs-ovrd-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b259b684-2b3d-4594-b2a9-5f77958f1c21-install-certs-ovrd-combined-ca-bundle\") pod \"install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-wnm77\" (UID: \"b259b684-2b3d-4594-b2a9-5f77958f1c21\") " pod="openstack-kuttl-tests/install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-wnm77" Jan 20 18:08:59 crc kubenswrapper[4558]: I0120 18:08:59.665031 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-dns-ips-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b259b684-2b3d-4594-b2a9-5f77958f1c21-tls-dns-ips-combined-ca-bundle\") pod \"install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-wnm77\" (UID: \"b259b684-2b3d-4594-b2a9-5f77958f1c21\") " pod="openstack-kuttl-tests/install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-wnm77" Jan 20 18:08:59 crc kubenswrapper[4558]: I0120 18:08:59.666477 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-tls-tls-dns-ips-default-certs-0\" (UniqueName: \"kubernetes.io/projected/b259b684-2b3d-4594-b2a9-5f77958f1c21-openstack-edpm-tls-tls-dns-ips-default-certs-0\") pod \"install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-wnm77\" (UID: \"b259b684-2b3d-4594-b2a9-5f77958f1c21\") " pod="openstack-kuttl-tests/install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-wnm77" Jan 20 18:08:59 crc kubenswrapper[4558]: I0120 18:08:59.666700 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-tls-custom-tls-dns-default-certs-0\" (UniqueName: \"kubernetes.io/projected/b259b684-2b3d-4594-b2a9-5f77958f1c21-openstack-edpm-tls-custom-tls-dns-default-certs-0\") pod \"install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-wnm77\" (UID: \"b259b684-2b3d-4594-b2a9-5f77958f1c21\") " pod="openstack-kuttl-tests/install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-wnm77" Jan 20 18:08:59 crc kubenswrapper[4558]: I0120 18:08:59.667828 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b259b684-2b3d-4594-b2a9-5f77958f1c21-inventory\") pod \"install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-wnm77\" (UID: \"b259b684-2b3d-4594-b2a9-5f77958f1c21\") " pod="openstack-kuttl-tests/install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-wnm77" Jan 20 18:08:59 crc kubenswrapper[4558]: I0120 18:08:59.668343 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"custom-tls-dns-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b259b684-2b3d-4594-b2a9-5f77958f1c21-custom-tls-dns-combined-ca-bundle\") pod \"install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-wnm77\" (UID: \"b259b684-2b3d-4594-b2a9-5f77958f1c21\") " pod="openstack-kuttl-tests/install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-wnm77" Jan 20 18:08:59 crc kubenswrapper[4558]: I0120 18:08:59.675007 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jvdx9\" (UniqueName: \"kubernetes.io/projected/b259b684-2b3d-4594-b2a9-5f77958f1c21-kube-api-access-jvdx9\") pod \"install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-wnm77\" (UID: \"b259b684-2b3d-4594-b2a9-5f77958f1c21\") " pod="openstack-kuttl-tests/install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-wnm77" Jan 20 18:08:59 crc kubenswrapper[4558]: I0120 18:08:59.733021 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-wnm77" Jan 20 18:08:59 crc kubenswrapper[4558]: E0120 18:08:59.733127 4558 kubelet_pods.go:538] "Hostname for pod was too long, truncated it" podName="install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-wnm77" hostnameMaxLen=63 truncatedHostname="install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug" Jan 20 18:09:00 crc kubenswrapper[4558]: I0120 18:09:00.123733 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-wnm77"] Jan 20 18:09:00 crc kubenswrapper[4558]: W0120 18:09:00.137532 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb259b684_2b3d_4594_b2a9_5f77958f1c21.slice/crio-90de5d48243856a9ac350b8a73b67b40516a86dc7f5a50e2ac9179fb04311123 WatchSource:0}: Error finding container 90de5d48243856a9ac350b8a73b67b40516a86dc7f5a50e2ac9179fb04311123: Status 404 returned error can't find the container with id 90de5d48243856a9ac350b8a73b67b40516a86dc7f5a50e2ac9179fb04311123 Jan 20 18:09:00 crc kubenswrapper[4558]: E0120 18:09:00.138842 4558 kubelet_pods.go:538] "Hostname for pod was too long, truncated it" podName="install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-wnm77" hostnameMaxLen=63 truncatedHostname="install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug" Jan 20 18:09:00 crc kubenswrapper[4558]: I0120 18:09:00.284450 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-p6bsn" Jan 20 18:09:00 crc kubenswrapper[4558]: I0120 18:09:00.468810 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-tls-tls-dns-ips-default-certs-0\" (UniqueName: \"kubernetes.io/projected/6fb49239-c076-46dd-b4a3-dcff5212b012-openstack-edpm-tls-tls-dns-ips-default-certs-0\") pod \"6fb49239-c076-46dd-b4a3-dcff5212b012\" (UID: \"6fb49239-c076-46dd-b4a3-dcff5212b012\") " Jan 20 18:09:00 crc kubenswrapper[4558]: I0120 18:09:00.468969 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tls-dns-ips-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6fb49239-c076-46dd-b4a3-dcff5212b012-tls-dns-ips-combined-ca-bundle\") pod \"6fb49239-c076-46dd-b4a3-dcff5212b012\" (UID: \"6fb49239-c076-46dd-b4a3-dcff5212b012\") " Jan 20 18:09:00 crc kubenswrapper[4558]: I0120 18:09:00.469035 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"custom-tls-dns-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6fb49239-c076-46dd-b4a3-dcff5212b012-custom-tls-dns-combined-ca-bundle\") pod \"6fb49239-c076-46dd-b4a3-dcff5212b012\" (UID: \"6fb49239-c076-46dd-b4a3-dcff5212b012\") " Jan 20 18:09:00 crc kubenswrapper[4558]: I0120 18:09:00.469933 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-tls-custom-tls-dns-default-certs-0\" (UniqueName: \"kubernetes.io/projected/6fb49239-c076-46dd-b4a3-dcff5212b012-openstack-edpm-tls-custom-tls-dns-default-certs-0\") pod \"6fb49239-c076-46dd-b4a3-dcff5212b012\" (UID: \"6fb49239-c076-46dd-b4a3-dcff5212b012\") " Jan 20 18:09:00 crc kubenswrapper[4558]: I0120 18:09:00.469999 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-tls\" (UniqueName: \"kubernetes.io/secret/6fb49239-c076-46dd-b4a3-dcff5212b012-ssh-key-openstack-edpm-tls\") pod \"6fb49239-c076-46dd-b4a3-dcff5212b012\" (UID: \"6fb49239-c076-46dd-b4a3-dcff5212b012\") " Jan 20 18:09:00 crc kubenswrapper[4558]: I0120 18:09:00.470030 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6fb49239-c076-46dd-b4a3-dcff5212b012-inventory\") pod \"6fb49239-c076-46dd-b4a3-dcff5212b012\" (UID: \"6fb49239-c076-46dd-b4a3-dcff5212b012\") " Jan 20 18:09:00 crc kubenswrapper[4558]: I0120 18:09:00.470067 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"install-certs-ovrd-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6fb49239-c076-46dd-b4a3-dcff5212b012-install-certs-ovrd-combined-ca-bundle\") pod \"6fb49239-c076-46dd-b4a3-dcff5212b012\" (UID: \"6fb49239-c076-46dd-b4a3-dcff5212b012\") " Jan 20 18:09:00 crc kubenswrapper[4558]: I0120 18:09:00.470150 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jvdx9\" (UniqueName: \"kubernetes.io/projected/6fb49239-c076-46dd-b4a3-dcff5212b012-kube-api-access-jvdx9\") pod \"6fb49239-c076-46dd-b4a3-dcff5212b012\" (UID: \"6fb49239-c076-46dd-b4a3-dcff5212b012\") " Jan 20 18:09:00 crc kubenswrapper[4558]: I0120 18:09:00.475747 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6fb49239-c076-46dd-b4a3-dcff5212b012-custom-tls-dns-combined-ca-bundle" (OuterVolumeSpecName: "custom-tls-dns-combined-ca-bundle") pod "6fb49239-c076-46dd-b4a3-dcff5212b012" (UID: "6fb49239-c076-46dd-b4a3-dcff5212b012"). InnerVolumeSpecName "custom-tls-dns-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:09:00 crc kubenswrapper[4558]: I0120 18:09:00.476031 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6fb49239-c076-46dd-b4a3-dcff5212b012-install-certs-ovrd-combined-ca-bundle" (OuterVolumeSpecName: "install-certs-ovrd-combined-ca-bundle") pod "6fb49239-c076-46dd-b4a3-dcff5212b012" (UID: "6fb49239-c076-46dd-b4a3-dcff5212b012"). InnerVolumeSpecName "install-certs-ovrd-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:09:00 crc kubenswrapper[4558]: I0120 18:09:00.476367 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6fb49239-c076-46dd-b4a3-dcff5212b012-tls-dns-ips-combined-ca-bundle" (OuterVolumeSpecName: "tls-dns-ips-combined-ca-bundle") pod "6fb49239-c076-46dd-b4a3-dcff5212b012" (UID: "6fb49239-c076-46dd-b4a3-dcff5212b012"). InnerVolumeSpecName "tls-dns-ips-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:09:00 crc kubenswrapper[4558]: I0120 18:09:00.477051 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6fb49239-c076-46dd-b4a3-dcff5212b012-openstack-edpm-tls-tls-dns-ips-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-tls-tls-dns-ips-default-certs-0") pod "6fb49239-c076-46dd-b4a3-dcff5212b012" (UID: "6fb49239-c076-46dd-b4a3-dcff5212b012"). InnerVolumeSpecName "openstack-edpm-tls-tls-dns-ips-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:09:00 crc kubenswrapper[4558]: I0120 18:09:00.477097 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6fb49239-c076-46dd-b4a3-dcff5212b012-openstack-edpm-tls-custom-tls-dns-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-tls-custom-tls-dns-default-certs-0") pod "6fb49239-c076-46dd-b4a3-dcff5212b012" (UID: "6fb49239-c076-46dd-b4a3-dcff5212b012"). InnerVolumeSpecName "openstack-edpm-tls-custom-tls-dns-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:09:00 crc kubenswrapper[4558]: I0120 18:09:00.477116 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6fb49239-c076-46dd-b4a3-dcff5212b012-kube-api-access-jvdx9" (OuterVolumeSpecName: "kube-api-access-jvdx9") pod "6fb49239-c076-46dd-b4a3-dcff5212b012" (UID: "6fb49239-c076-46dd-b4a3-dcff5212b012"). InnerVolumeSpecName "kube-api-access-jvdx9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:09:00 crc kubenswrapper[4558]: I0120 18:09:00.490320 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6fb49239-c076-46dd-b4a3-dcff5212b012-ssh-key-openstack-edpm-tls" (OuterVolumeSpecName: "ssh-key-openstack-edpm-tls") pod "6fb49239-c076-46dd-b4a3-dcff5212b012" (UID: "6fb49239-c076-46dd-b4a3-dcff5212b012"). InnerVolumeSpecName "ssh-key-openstack-edpm-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:09:00 crc kubenswrapper[4558]: I0120 18:09:00.491351 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6fb49239-c076-46dd-b4a3-dcff5212b012-inventory" (OuterVolumeSpecName: "inventory") pod "6fb49239-c076-46dd-b4a3-dcff5212b012" (UID: "6fb49239-c076-46dd-b4a3-dcff5212b012"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:09:00 crc kubenswrapper[4558]: I0120 18:09:00.571913 4558 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-tls-custom-tls-dns-default-certs-0\" (UniqueName: \"kubernetes.io/projected/6fb49239-c076-46dd-b4a3-dcff5212b012-openstack-edpm-tls-custom-tls-dns-default-certs-0\") on node \"crc\" DevicePath \"\"" Jan 20 18:09:00 crc kubenswrapper[4558]: I0120 18:09:00.571943 4558 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-tls\" (UniqueName: \"kubernetes.io/secret/6fb49239-c076-46dd-b4a3-dcff5212b012-ssh-key-openstack-edpm-tls\") on node \"crc\" DevicePath \"\"" Jan 20 18:09:00 crc kubenswrapper[4558]: I0120 18:09:00.571955 4558 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6fb49239-c076-46dd-b4a3-dcff5212b012-inventory\") on node \"crc\" DevicePath \"\"" Jan 20 18:09:00 crc kubenswrapper[4558]: I0120 18:09:00.571965 4558 reconciler_common.go:293] "Volume detached for volume \"install-certs-ovrd-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6fb49239-c076-46dd-b4a3-dcff5212b012-install-certs-ovrd-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 18:09:00 crc kubenswrapper[4558]: I0120 18:09:00.571977 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jvdx9\" (UniqueName: \"kubernetes.io/projected/6fb49239-c076-46dd-b4a3-dcff5212b012-kube-api-access-jvdx9\") on node \"crc\" DevicePath \"\"" Jan 20 18:09:00 crc kubenswrapper[4558]: I0120 18:09:00.571988 4558 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-tls-tls-dns-ips-default-certs-0\" (UniqueName: \"kubernetes.io/projected/6fb49239-c076-46dd-b4a3-dcff5212b012-openstack-edpm-tls-tls-dns-ips-default-certs-0\") on node \"crc\" DevicePath \"\"" Jan 20 18:09:00 crc kubenswrapper[4558]: I0120 18:09:00.571999 4558 reconciler_common.go:293] "Volume detached for volume \"tls-dns-ips-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6fb49239-c076-46dd-b4a3-dcff5212b012-tls-dns-ips-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 18:09:00 crc kubenswrapper[4558]: I0120 18:09:00.572010 4558 reconciler_common.go:293] "Volume detached for volume \"custom-tls-dns-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6fb49239-c076-46dd-b4a3-dcff5212b012-custom-tls-dns-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 18:09:00 crc kubenswrapper[4558]: I0120 18:09:00.576284 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6fb49239-c076-46dd-b4a3-dcff5212b012" path="/var/lib/kubelet/pods/6fb49239-c076-46dd-b4a3-dcff5212b012/volumes" Jan 20 18:09:00 crc kubenswrapper[4558]: E0120 18:09:00.744599 4558 kubelet_pods.go:538] "Hostname for pod was too long, truncated it" podName="install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-wnm77" hostnameMaxLen=63 truncatedHostname="install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug" Jan 20 18:09:01 crc kubenswrapper[4558]: I0120 18:09:01.124235 4558 generic.go:334] "Generic (PLEG): container finished" podID="b259b684-2b3d-4594-b2a9-5f77958f1c21" containerID="aafd0ea0f37369de05dd2cb0384673472fdcd5e52d5ea8a13fbfa0ac03ce4bf4" exitCode=0 Jan 20 18:09:01 crc kubenswrapper[4558]: I0120 18:09:01.124301 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-wnm77" event={"ID":"b259b684-2b3d-4594-b2a9-5f77958f1c21","Type":"ContainerDied","Data":"aafd0ea0f37369de05dd2cb0384673472fdcd5e52d5ea8a13fbfa0ac03ce4bf4"} Jan 20 18:09:01 crc kubenswrapper[4558]: I0120 18:09:01.125455 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-wnm77" event={"ID":"b259b684-2b3d-4594-b2a9-5f77958f1c21","Type":"ContainerStarted","Data":"90de5d48243856a9ac350b8a73b67b40516a86dc7f5a50e2ac9179fb04311123"} Jan 20 18:09:01 crc kubenswrapper[4558]: I0120 18:09:01.126277 4558 generic.go:334] "Generic (PLEG): container finished" podID="aeae9158-dc5c-42e5-8741-1589bb842264" containerID="79e7dba817d9ddcf1ccde8404bcb21d393c65f2484740830c37c7ba4e867d889" exitCode=0 Jan 20 18:09:01 crc kubenswrapper[4558]: I0120 18:09:01.126329 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/tls-dns-ips-certs-refresh-openstack-edpm-tls-cg4jg" event={"ID":"aeae9158-dc5c-42e5-8741-1589bb842264","Type":"ContainerDied","Data":"79e7dba817d9ddcf1ccde8404bcb21d393c65f2484740830c37c7ba4e867d889"} Jan 20 18:09:01 crc kubenswrapper[4558]: I0120 18:09:01.130356 4558 scope.go:117] "RemoveContainer" containerID="056f1e582b3403bb364141ad34d8d708eb6772e943e462eb1e4e242b2438c2fc" Jan 20 18:09:01 crc kubenswrapper[4558]: I0120 18:09:01.130426 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-p6bsn" Jan 20 18:09:01 crc kubenswrapper[4558]: I0120 18:09:01.170984 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-wnm77"] Jan 20 18:09:01 crc kubenswrapper[4558]: I0120 18:09:01.179192 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-wnm77"] Jan 20 18:09:01 crc kubenswrapper[4558]: I0120 18:09:01.231678 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/install-certs-ovrd-openstack-edpm-tls-ovrd-openstack-edpm-jkd5h"] Jan 20 18:09:01 crc kubenswrapper[4558]: I0120 18:09:01.236020 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/install-certs-ovrd-openstack-edpm-tls-ovrd-openstack-edpm-jkd5h"] Jan 20 18:09:01 crc kubenswrapper[4558]: I0120 18:09:01.239932 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/tls-dnsnames-openstack-edpm-tls-openstack-edpm-tls-csjhv"] Jan 20 18:09:01 crc kubenswrapper[4558]: I0120 18:09:01.243605 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/install-certs-ovrd-openstack-edpm-tls-openstack-edpm-tls-bk9cj"] Jan 20 18:09:01 crc kubenswrapper[4558]: I0120 18:09:01.253833 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/tls-dns-ips-openstack-edpm-tls-ovrd-openstack-edpm-tls-4j2nb"] Jan 20 18:09:01 crc kubenswrapper[4558]: I0120 18:09:01.265964 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/tls-dns-ips-certs-refresh-openstack-edpm-tls-cg4jg"] Jan 20 18:09:01 crc kubenswrapper[4558]: I0120 18:09:01.277717 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/custom-tls-dns-openstack-edpm-tls-ovrd-openstack-edpm-tls-mxq2k"] Jan 20 18:09:01 crc kubenswrapper[4558]: I0120 18:09:01.285373 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/tls-dnsnames-openstack-edpm-tls-openstack-edpm-tls-csjhv"] Jan 20 18:09:01 crc kubenswrapper[4558]: I0120 18:09:01.290728 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/install-certs-ovrd-openstack-edpm-tls-openstack-edpm-tls-bk9cj"] Jan 20 18:09:01 crc kubenswrapper[4558]: I0120 18:09:01.296457 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/tls-dns-ips-openstack-edpm-tls-ovrd-openstack-edpm-tls-4j2nb"] Jan 20 18:09:01 crc kubenswrapper[4558]: I0120 18:09:01.301274 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7"] Jan 20 18:09:01 crc kubenswrapper[4558]: I0120 18:09:01.304652 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/custom-tls-dns-openstack-edpm-tls-ovrd-openstack-edpm-tls-mxq2k"] Jan 20 18:09:01 crc kubenswrapper[4558]: I0120 18:09:01.308507 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7"] Jan 20 18:09:01 crc kubenswrapper[4558]: I0120 18:09:01.319972 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-cnfpf"] Jan 20 18:09:01 crc kubenswrapper[4558]: E0120 18:09:01.320409 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b259b684-2b3d-4594-b2a9-5f77958f1c21" containerName="install-certs-ovrd-certs-refresh-openstack-edpm-tls" Jan 20 18:09:01 crc kubenswrapper[4558]: I0120 18:09:01.320434 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="b259b684-2b3d-4594-b2a9-5f77958f1c21" containerName="install-certs-ovrd-certs-refresh-openstack-edpm-tls" Jan 20 18:09:01 crc kubenswrapper[4558]: I0120 18:09:01.320626 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="b259b684-2b3d-4594-b2a9-5f77958f1c21" containerName="install-certs-ovrd-certs-refresh-openstack-edpm-tls" Jan 20 18:09:01 crc kubenswrapper[4558]: I0120 18:09:01.321560 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-cnfpf" Jan 20 18:09:01 crc kubenswrapper[4558]: I0120 18:09:01.325693 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-cnfpf"] Jan 20 18:09:01 crc kubenswrapper[4558]: I0120 18:09:01.384483 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/bb55b2c7-9f9d-444a-8378-02a358ba4958-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-84b9f45d47-cnfpf\" (UID: \"bb55b2c7-9f9d-444a-8378-02a358ba4958\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-cnfpf" Jan 20 18:09:01 crc kubenswrapper[4558]: I0120 18:09:01.384558 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bb55b2c7-9f9d-444a-8378-02a358ba4958-config\") pod \"dnsmasq-dnsmasq-84b9f45d47-cnfpf\" (UID: \"bb55b2c7-9f9d-444a-8378-02a358ba4958\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-cnfpf" Jan 20 18:09:01 crc kubenswrapper[4558]: I0120 18:09:01.384602 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q2kmc\" (UniqueName: \"kubernetes.io/projected/bb55b2c7-9f9d-444a-8378-02a358ba4958-kube-api-access-q2kmc\") pod \"dnsmasq-dnsmasq-84b9f45d47-cnfpf\" (UID: \"bb55b2c7-9f9d-444a-8378-02a358ba4958\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-cnfpf" Jan 20 18:09:01 crc kubenswrapper[4558]: I0120 18:09:01.485754 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/bb55b2c7-9f9d-444a-8378-02a358ba4958-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-84b9f45d47-cnfpf\" (UID: \"bb55b2c7-9f9d-444a-8378-02a358ba4958\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-cnfpf" Jan 20 18:09:01 crc kubenswrapper[4558]: I0120 18:09:01.485856 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bb55b2c7-9f9d-444a-8378-02a358ba4958-config\") pod \"dnsmasq-dnsmasq-84b9f45d47-cnfpf\" (UID: \"bb55b2c7-9f9d-444a-8378-02a358ba4958\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-cnfpf" Jan 20 18:09:01 crc kubenswrapper[4558]: I0120 18:09:01.485922 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q2kmc\" (UniqueName: \"kubernetes.io/projected/bb55b2c7-9f9d-444a-8378-02a358ba4958-kube-api-access-q2kmc\") pod \"dnsmasq-dnsmasq-84b9f45d47-cnfpf\" (UID: \"bb55b2c7-9f9d-444a-8378-02a358ba4958\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-cnfpf" Jan 20 18:09:01 crc kubenswrapper[4558]: I0120 18:09:01.486878 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/bb55b2c7-9f9d-444a-8378-02a358ba4958-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-84b9f45d47-cnfpf\" (UID: \"bb55b2c7-9f9d-444a-8378-02a358ba4958\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-cnfpf" Jan 20 18:09:01 crc kubenswrapper[4558]: I0120 18:09:01.486949 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bb55b2c7-9f9d-444a-8378-02a358ba4958-config\") pod \"dnsmasq-dnsmasq-84b9f45d47-cnfpf\" (UID: \"bb55b2c7-9f9d-444a-8378-02a358ba4958\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-cnfpf" Jan 20 18:09:01 crc kubenswrapper[4558]: I0120 18:09:01.501928 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q2kmc\" (UniqueName: \"kubernetes.io/projected/bb55b2c7-9f9d-444a-8378-02a358ba4958-kube-api-access-q2kmc\") pod \"dnsmasq-dnsmasq-84b9f45d47-cnfpf\" (UID: \"bb55b2c7-9f9d-444a-8378-02a358ba4958\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-cnfpf" Jan 20 18:09:01 crc kubenswrapper[4558]: I0120 18:09:01.643038 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-cnfpf" Jan 20 18:09:02 crc kubenswrapper[4558]: I0120 18:09:02.032060 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-cnfpf"] Jan 20 18:09:02 crc kubenswrapper[4558]: I0120 18:09:02.140406 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-cnfpf" event={"ID":"bb55b2c7-9f9d-444a-8378-02a358ba4958","Type":"ContainerStarted","Data":"f9cafb1d0e0c28bfcee26abf7a3698a02a1ef22597823bf147dcf1040724c800"} Jan 20 18:09:02 crc kubenswrapper[4558]: I0120 18:09:02.461872 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/tls-dns-ips-certs-refresh-openstack-edpm-tls-cg4jg" Jan 20 18:09:02 crc kubenswrapper[4558]: I0120 18:09:02.486526 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-wnm77" Jan 20 18:09:02 crc kubenswrapper[4558]: I0120 18:09:02.499870 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jvdx9\" (UniqueName: \"kubernetes.io/projected/b259b684-2b3d-4594-b2a9-5f77958f1c21-kube-api-access-jvdx9\") pod \"b259b684-2b3d-4594-b2a9-5f77958f1c21\" (UID: \"b259b684-2b3d-4594-b2a9-5f77958f1c21\") " Jan 20 18:09:02 crc kubenswrapper[4558]: I0120 18:09:02.499930 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-tls-custom-tls-dns-default-certs-0\" (UniqueName: \"kubernetes.io/projected/b259b684-2b3d-4594-b2a9-5f77958f1c21-openstack-edpm-tls-custom-tls-dns-default-certs-0\") pod \"b259b684-2b3d-4594-b2a9-5f77958f1c21\" (UID: \"b259b684-2b3d-4594-b2a9-5f77958f1c21\") " Jan 20 18:09:02 crc kubenswrapper[4558]: I0120 18:09:02.499967 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-tls-tls-dns-ips-default-certs-0\" (UniqueName: \"kubernetes.io/projected/b259b684-2b3d-4594-b2a9-5f77958f1c21-openstack-edpm-tls-tls-dns-ips-default-certs-0\") pod \"b259b684-2b3d-4594-b2a9-5f77958f1c21\" (UID: \"b259b684-2b3d-4594-b2a9-5f77958f1c21\") " Jan 20 18:09:02 crc kubenswrapper[4558]: I0120 18:09:02.500002 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-tls\" (UniqueName: \"kubernetes.io/secret/b259b684-2b3d-4594-b2a9-5f77958f1c21-ssh-key-openstack-edpm-tls\") pod \"b259b684-2b3d-4594-b2a9-5f77958f1c21\" (UID: \"b259b684-2b3d-4594-b2a9-5f77958f1c21\") " Jan 20 18:09:02 crc kubenswrapper[4558]: I0120 18:09:02.500024 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"custom-tls-dns-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b259b684-2b3d-4594-b2a9-5f77958f1c21-custom-tls-dns-combined-ca-bundle\") pod \"b259b684-2b3d-4594-b2a9-5f77958f1c21\" (UID: \"b259b684-2b3d-4594-b2a9-5f77958f1c21\") " Jan 20 18:09:02 crc kubenswrapper[4558]: I0120 18:09:02.500065 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tls-dns-ips-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b259b684-2b3d-4594-b2a9-5f77958f1c21-tls-dns-ips-combined-ca-bundle\") pod \"b259b684-2b3d-4594-b2a9-5f77958f1c21\" (UID: \"b259b684-2b3d-4594-b2a9-5f77958f1c21\") " Jan 20 18:09:02 crc kubenswrapper[4558]: I0120 18:09:02.500129 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"install-certs-ovrd-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b259b684-2b3d-4594-b2a9-5f77958f1c21-install-certs-ovrd-combined-ca-bundle\") pod \"b259b684-2b3d-4594-b2a9-5f77958f1c21\" (UID: \"b259b684-2b3d-4594-b2a9-5f77958f1c21\") " Jan 20 18:09:02 crc kubenswrapper[4558]: I0120 18:09:02.500148 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b259b684-2b3d-4594-b2a9-5f77958f1c21-inventory\") pod \"b259b684-2b3d-4594-b2a9-5f77958f1c21\" (UID: \"b259b684-2b3d-4594-b2a9-5f77958f1c21\") " Jan 20 18:09:02 crc kubenswrapper[4558]: I0120 18:09:02.500215 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xg6bx\" (UniqueName: \"kubernetes.io/projected/aeae9158-dc5c-42e5-8741-1589bb842264-kube-api-access-xg6bx\") pod \"aeae9158-dc5c-42e5-8741-1589bb842264\" (UID: \"aeae9158-dc5c-42e5-8741-1589bb842264\") " Jan 20 18:09:02 crc kubenswrapper[4558]: I0120 18:09:02.500259 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/aeae9158-dc5c-42e5-8741-1589bb842264-inventory\") pod \"aeae9158-dc5c-42e5-8741-1589bb842264\" (UID: \"aeae9158-dc5c-42e5-8741-1589bb842264\") " Jan 20 18:09:02 crc kubenswrapper[4558]: I0120 18:09:02.500280 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-tls\" (UniqueName: \"kubernetes.io/secret/aeae9158-dc5c-42e5-8741-1589bb842264-ssh-key-openstack-edpm-tls\") pod \"aeae9158-dc5c-42e5-8741-1589bb842264\" (UID: \"aeae9158-dc5c-42e5-8741-1589bb842264\") " Jan 20 18:09:02 crc kubenswrapper[4558]: I0120 18:09:02.500310 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tls-dns-ips-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aeae9158-dc5c-42e5-8741-1589bb842264-tls-dns-ips-combined-ca-bundle\") pod \"aeae9158-dc5c-42e5-8741-1589bb842264\" (UID: \"aeae9158-dc5c-42e5-8741-1589bb842264\") " Jan 20 18:09:02 crc kubenswrapper[4558]: I0120 18:09:02.504527 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b259b684-2b3d-4594-b2a9-5f77958f1c21-openstack-edpm-tls-custom-tls-dns-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-tls-custom-tls-dns-default-certs-0") pod "b259b684-2b3d-4594-b2a9-5f77958f1c21" (UID: "b259b684-2b3d-4594-b2a9-5f77958f1c21"). InnerVolumeSpecName "openstack-edpm-tls-custom-tls-dns-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:09:02 crc kubenswrapper[4558]: I0120 18:09:02.505316 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b259b684-2b3d-4594-b2a9-5f77958f1c21-install-certs-ovrd-combined-ca-bundle" (OuterVolumeSpecName: "install-certs-ovrd-combined-ca-bundle") pod "b259b684-2b3d-4594-b2a9-5f77958f1c21" (UID: "b259b684-2b3d-4594-b2a9-5f77958f1c21"). InnerVolumeSpecName "install-certs-ovrd-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:09:02 crc kubenswrapper[4558]: I0120 18:09:02.505341 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aeae9158-dc5c-42e5-8741-1589bb842264-kube-api-access-xg6bx" (OuterVolumeSpecName: "kube-api-access-xg6bx") pod "aeae9158-dc5c-42e5-8741-1589bb842264" (UID: "aeae9158-dc5c-42e5-8741-1589bb842264"). InnerVolumeSpecName "kube-api-access-xg6bx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:09:02 crc kubenswrapper[4558]: I0120 18:09:02.505347 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b259b684-2b3d-4594-b2a9-5f77958f1c21-custom-tls-dns-combined-ca-bundle" (OuterVolumeSpecName: "custom-tls-dns-combined-ca-bundle") pod "b259b684-2b3d-4594-b2a9-5f77958f1c21" (UID: "b259b684-2b3d-4594-b2a9-5f77958f1c21"). InnerVolumeSpecName "custom-tls-dns-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:09:02 crc kubenswrapper[4558]: I0120 18:09:02.505373 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aeae9158-dc5c-42e5-8741-1589bb842264-tls-dns-ips-combined-ca-bundle" (OuterVolumeSpecName: "tls-dns-ips-combined-ca-bundle") pod "aeae9158-dc5c-42e5-8741-1589bb842264" (UID: "aeae9158-dc5c-42e5-8741-1589bb842264"). InnerVolumeSpecName "tls-dns-ips-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:09:02 crc kubenswrapper[4558]: I0120 18:09:02.505450 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b259b684-2b3d-4594-b2a9-5f77958f1c21-openstack-edpm-tls-tls-dns-ips-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-tls-tls-dns-ips-default-certs-0") pod "b259b684-2b3d-4594-b2a9-5f77958f1c21" (UID: "b259b684-2b3d-4594-b2a9-5f77958f1c21"). InnerVolumeSpecName "openstack-edpm-tls-tls-dns-ips-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:09:02 crc kubenswrapper[4558]: I0120 18:09:02.505969 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b259b684-2b3d-4594-b2a9-5f77958f1c21-kube-api-access-jvdx9" (OuterVolumeSpecName: "kube-api-access-jvdx9") pod "b259b684-2b3d-4594-b2a9-5f77958f1c21" (UID: "b259b684-2b3d-4594-b2a9-5f77958f1c21"). InnerVolumeSpecName "kube-api-access-jvdx9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:09:02 crc kubenswrapper[4558]: I0120 18:09:02.506722 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b259b684-2b3d-4594-b2a9-5f77958f1c21-tls-dns-ips-combined-ca-bundle" (OuterVolumeSpecName: "tls-dns-ips-combined-ca-bundle") pod "b259b684-2b3d-4594-b2a9-5f77958f1c21" (UID: "b259b684-2b3d-4594-b2a9-5f77958f1c21"). InnerVolumeSpecName "tls-dns-ips-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:09:02 crc kubenswrapper[4558]: I0120 18:09:02.520830 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aeae9158-dc5c-42e5-8741-1589bb842264-inventory" (OuterVolumeSpecName: "inventory") pod "aeae9158-dc5c-42e5-8741-1589bb842264" (UID: "aeae9158-dc5c-42e5-8741-1589bb842264"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:09:02 crc kubenswrapper[4558]: I0120 18:09:02.521816 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b259b684-2b3d-4594-b2a9-5f77958f1c21-ssh-key-openstack-edpm-tls" (OuterVolumeSpecName: "ssh-key-openstack-edpm-tls") pod "b259b684-2b3d-4594-b2a9-5f77958f1c21" (UID: "b259b684-2b3d-4594-b2a9-5f77958f1c21"). InnerVolumeSpecName "ssh-key-openstack-edpm-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:09:02 crc kubenswrapper[4558]: I0120 18:09:02.524300 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b259b684-2b3d-4594-b2a9-5f77958f1c21-inventory" (OuterVolumeSpecName: "inventory") pod "b259b684-2b3d-4594-b2a9-5f77958f1c21" (UID: "b259b684-2b3d-4594-b2a9-5f77958f1c21"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:09:02 crc kubenswrapper[4558]: I0120 18:09:02.528485 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aeae9158-dc5c-42e5-8741-1589bb842264-ssh-key-openstack-edpm-tls" (OuterVolumeSpecName: "ssh-key-openstack-edpm-tls") pod "aeae9158-dc5c-42e5-8741-1589bb842264" (UID: "aeae9158-dc5c-42e5-8741-1589bb842264"). InnerVolumeSpecName "ssh-key-openstack-edpm-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:09:02 crc kubenswrapper[4558]: I0120 18:09:02.575120 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="11d2b4a2-d597-4764-b6f3-7ef906ca8403" path="/var/lib/kubelet/pods/11d2b4a2-d597-4764-b6f3-7ef906ca8403/volumes" Jan 20 18:09:02 crc kubenswrapper[4558]: I0120 18:09:02.576405 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4ac8c866-8e21-425c-ac4e-350c32f5c8a7" path="/var/lib/kubelet/pods/4ac8c866-8e21-425c-ac4e-350c32f5c8a7/volumes" Jan 20 18:09:02 crc kubenswrapper[4558]: I0120 18:09:02.577047 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6cff89d4-4dca-4d8b-98f1-0d94ebb8fad8" path="/var/lib/kubelet/pods/6cff89d4-4dca-4d8b-98f1-0d94ebb8fad8/volumes" Jan 20 18:09:02 crc kubenswrapper[4558]: I0120 18:09:02.577659 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b259b684-2b3d-4594-b2a9-5f77958f1c21" path="/var/lib/kubelet/pods/b259b684-2b3d-4594-b2a9-5f77958f1c21/volumes" Jan 20 18:09:02 crc kubenswrapper[4558]: I0120 18:09:02.578258 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b98cccb1-0d46-4be3-b3ce-f63a96e958d5" path="/var/lib/kubelet/pods/b98cccb1-0d46-4be3-b3ce-f63a96e958d5/volumes" Jan 20 18:09:02 crc kubenswrapper[4558]: I0120 18:09:02.579331 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cb720483-bd8a-47cb-978d-ecdcb6765105" path="/var/lib/kubelet/pods/cb720483-bd8a-47cb-978d-ecdcb6765105/volumes" Jan 20 18:09:02 crc kubenswrapper[4558]: I0120 18:09:02.579936 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7f7df71-1122-4bcf-bc50-84b25abb680e" path="/var/lib/kubelet/pods/e7f7df71-1122-4bcf-bc50-84b25abb680e/volumes" Jan 20 18:09:02 crc kubenswrapper[4558]: I0120 18:09:02.602506 4558 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/aeae9158-dc5c-42e5-8741-1589bb842264-inventory\") on node \"crc\" DevicePath \"\"" Jan 20 18:09:02 crc kubenswrapper[4558]: I0120 18:09:02.602552 4558 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-tls\" (UniqueName: \"kubernetes.io/secret/aeae9158-dc5c-42e5-8741-1589bb842264-ssh-key-openstack-edpm-tls\") on node \"crc\" DevicePath \"\"" Jan 20 18:09:02 crc kubenswrapper[4558]: I0120 18:09:02.602567 4558 reconciler_common.go:293] "Volume detached for volume \"tls-dns-ips-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aeae9158-dc5c-42e5-8741-1589bb842264-tls-dns-ips-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 18:09:02 crc kubenswrapper[4558]: I0120 18:09:02.602580 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jvdx9\" (UniqueName: \"kubernetes.io/projected/b259b684-2b3d-4594-b2a9-5f77958f1c21-kube-api-access-jvdx9\") on node \"crc\" DevicePath \"\"" Jan 20 18:09:02 crc kubenswrapper[4558]: I0120 18:09:02.602593 4558 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-tls-custom-tls-dns-default-certs-0\" (UniqueName: \"kubernetes.io/projected/b259b684-2b3d-4594-b2a9-5f77958f1c21-openstack-edpm-tls-custom-tls-dns-default-certs-0\") on node \"crc\" DevicePath \"\"" Jan 20 18:09:02 crc kubenswrapper[4558]: I0120 18:09:02.602605 4558 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-tls-tls-dns-ips-default-certs-0\" (UniqueName: \"kubernetes.io/projected/b259b684-2b3d-4594-b2a9-5f77958f1c21-openstack-edpm-tls-tls-dns-ips-default-certs-0\") on node \"crc\" DevicePath \"\"" Jan 20 18:09:02 crc kubenswrapper[4558]: I0120 18:09:02.602618 4558 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-tls\" (UniqueName: \"kubernetes.io/secret/b259b684-2b3d-4594-b2a9-5f77958f1c21-ssh-key-openstack-edpm-tls\") on node \"crc\" DevicePath \"\"" Jan 20 18:09:02 crc kubenswrapper[4558]: I0120 18:09:02.602629 4558 reconciler_common.go:293] "Volume detached for volume \"custom-tls-dns-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b259b684-2b3d-4594-b2a9-5f77958f1c21-custom-tls-dns-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 18:09:02 crc kubenswrapper[4558]: I0120 18:09:02.602642 4558 reconciler_common.go:293] "Volume detached for volume \"tls-dns-ips-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b259b684-2b3d-4594-b2a9-5f77958f1c21-tls-dns-ips-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 18:09:02 crc kubenswrapper[4558]: I0120 18:09:02.602653 4558 reconciler_common.go:293] "Volume detached for volume \"install-certs-ovrd-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b259b684-2b3d-4594-b2a9-5f77958f1c21-install-certs-ovrd-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 18:09:02 crc kubenswrapper[4558]: I0120 18:09:02.602665 4558 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b259b684-2b3d-4594-b2a9-5f77958f1c21-inventory\") on node \"crc\" DevicePath \"\"" Jan 20 18:09:02 crc kubenswrapper[4558]: I0120 18:09:02.602679 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xg6bx\" (UniqueName: \"kubernetes.io/projected/aeae9158-dc5c-42e5-8741-1589bb842264-kube-api-access-xg6bx\") on node \"crc\" DevicePath \"\"" Jan 20 18:09:03 crc kubenswrapper[4558]: I0120 18:09:03.154601 4558 generic.go:334] "Generic (PLEG): container finished" podID="bb55b2c7-9f9d-444a-8378-02a358ba4958" containerID="45dd7fb1bc5040e597615af2dd59eed883a5d2bd53d08dab58c0fc2fd03e12c8" exitCode=0 Jan 20 18:09:03 crc kubenswrapper[4558]: I0120 18:09:03.154729 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-cnfpf" event={"ID":"bb55b2c7-9f9d-444a-8378-02a358ba4958","Type":"ContainerDied","Data":"45dd7fb1bc5040e597615af2dd59eed883a5d2bd53d08dab58c0fc2fd03e12c8"} Jan 20 18:09:03 crc kubenswrapper[4558]: I0120 18:09:03.158048 4558 scope.go:117] "RemoveContainer" containerID="aafd0ea0f37369de05dd2cb0384673472fdcd5e52d5ea8a13fbfa0ac03ce4bf4" Jan 20 18:09:03 crc kubenswrapper[4558]: I0120 18:09:03.158209 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/install-certs-ovrd-certs-refresh-openstack-edpm-tls-jxqv7-debug-wnm77" Jan 20 18:09:03 crc kubenswrapper[4558]: I0120 18:09:03.161747 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/tls-dns-ips-certs-refresh-openstack-edpm-tls-cg4jg" event={"ID":"aeae9158-dc5c-42e5-8741-1589bb842264","Type":"ContainerDied","Data":"e44c70f2c7b39d4768cc3711039d0c329589e524e3a2fa8601d5264f7041807d"} Jan 20 18:09:03 crc kubenswrapper[4558]: I0120 18:09:03.161787 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e44c70f2c7b39d4768cc3711039d0c329589e524e3a2fa8601d5264f7041807d" Jan 20 18:09:03 crc kubenswrapper[4558]: I0120 18:09:03.161842 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/tls-dns-ips-certs-refresh-openstack-edpm-tls-cg4jg" Jan 20 18:09:03 crc kubenswrapper[4558]: I0120 18:09:03.284547 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/tls-dns-ips-certs-refresh-openstack-edpm-tls-cg4jg"] Jan 20 18:09:03 crc kubenswrapper[4558]: I0120 18:09:03.290212 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/tls-dns-ips-certs-refresh-openstack-edpm-tls-cg4jg"] Jan 20 18:09:04 crc kubenswrapper[4558]: I0120 18:09:04.182670 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-cnfpf" event={"ID":"bb55b2c7-9f9d-444a-8378-02a358ba4958","Type":"ContainerStarted","Data":"23e9fa21abe062da76f61c27d04e80c109c18f826f16c6b4a83280b385731918"} Jan 20 18:09:04 crc kubenswrapper[4558]: I0120 18:09:04.183036 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-cnfpf" Jan 20 18:09:04 crc kubenswrapper[4558]: I0120 18:09:04.197350 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-cnfpf" podStartSLOduration=3.197332285 podStartE2EDuration="3.197332285s" podCreationTimestamp="2026-01-20 18:09:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 18:09:04.195811034 +0000 UTC m=+5237.956149001" watchObservedRunningTime="2026-01-20 18:09:04.197332285 +0000 UTC m=+5237.957670251" Jan 20 18:09:04 crc kubenswrapper[4558]: I0120 18:09:04.576598 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aeae9158-dc5c-42e5-8741-1589bb842264" path="/var/lib/kubelet/pods/aeae9158-dc5c-42e5-8741-1589bb842264/volumes" Jan 20 18:09:07 crc kubenswrapper[4558]: I0120 18:09:07.580335 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["crc-storage/crc-storage-crc-w5fgr"] Jan 20 18:09:07 crc kubenswrapper[4558]: I0120 18:09:07.585157 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["crc-storage/crc-storage-crc-w5fgr"] Jan 20 18:09:07 crc kubenswrapper[4558]: I0120 18:09:07.686221 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["crc-storage/crc-storage-crc-5kk2q"] Jan 20 18:09:07 crc kubenswrapper[4558]: E0120 18:09:07.686768 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aeae9158-dc5c-42e5-8741-1589bb842264" containerName="tls-dns-ips-certs-refresh-openstack-edpm-tls" Jan 20 18:09:07 crc kubenswrapper[4558]: I0120 18:09:07.686795 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="aeae9158-dc5c-42e5-8741-1589bb842264" containerName="tls-dns-ips-certs-refresh-openstack-edpm-tls" Jan 20 18:09:07 crc kubenswrapper[4558]: I0120 18:09:07.686986 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="aeae9158-dc5c-42e5-8741-1589bb842264" containerName="tls-dns-ips-certs-refresh-openstack-edpm-tls" Jan 20 18:09:07 crc kubenswrapper[4558]: I0120 18:09:07.687711 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-5kk2q" Jan 20 18:09:07 crc kubenswrapper[4558]: I0120 18:09:07.689784 4558 reflector.go:368] Caches populated for *v1.Secret from object-"crc-storage"/"crc-storage-dockercfg-k9ht8" Jan 20 18:09:07 crc kubenswrapper[4558]: I0120 18:09:07.689860 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"kube-root-ca.crt" Jan 20 18:09:07 crc kubenswrapper[4558]: I0120 18:09:07.690714 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"openshift-service-ca.crt" Jan 20 18:09:07 crc kubenswrapper[4558]: I0120 18:09:07.693753 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-5kk2q"] Jan 20 18:09:07 crc kubenswrapper[4558]: I0120 18:09:07.696082 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"crc-storage" Jan 20 18:09:07 crc kubenswrapper[4558]: I0120 18:09:07.878038 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/6f9ed19c-dd84-413c-9b4a-665ffbae2d13-node-mnt\") pod \"crc-storage-crc-5kk2q\" (UID: \"6f9ed19c-dd84-413c-9b4a-665ffbae2d13\") " pod="crc-storage/crc-storage-crc-5kk2q" Jan 20 18:09:07 crc kubenswrapper[4558]: I0120 18:09:07.878676 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vtm54\" (UniqueName: \"kubernetes.io/projected/6f9ed19c-dd84-413c-9b4a-665ffbae2d13-kube-api-access-vtm54\") pod \"crc-storage-crc-5kk2q\" (UID: \"6f9ed19c-dd84-413c-9b4a-665ffbae2d13\") " pod="crc-storage/crc-storage-crc-5kk2q" Jan 20 18:09:07 crc kubenswrapper[4558]: I0120 18:09:07.878720 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/6f9ed19c-dd84-413c-9b4a-665ffbae2d13-crc-storage\") pod \"crc-storage-crc-5kk2q\" (UID: \"6f9ed19c-dd84-413c-9b4a-665ffbae2d13\") " pod="crc-storage/crc-storage-crc-5kk2q" Jan 20 18:09:07 crc kubenswrapper[4558]: I0120 18:09:07.983510 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vtm54\" (UniqueName: \"kubernetes.io/projected/6f9ed19c-dd84-413c-9b4a-665ffbae2d13-kube-api-access-vtm54\") pod \"crc-storage-crc-5kk2q\" (UID: \"6f9ed19c-dd84-413c-9b4a-665ffbae2d13\") " pod="crc-storage/crc-storage-crc-5kk2q" Jan 20 18:09:07 crc kubenswrapper[4558]: I0120 18:09:07.983756 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/6f9ed19c-dd84-413c-9b4a-665ffbae2d13-crc-storage\") pod \"crc-storage-crc-5kk2q\" (UID: \"6f9ed19c-dd84-413c-9b4a-665ffbae2d13\") " pod="crc-storage/crc-storage-crc-5kk2q" Jan 20 18:09:07 crc kubenswrapper[4558]: I0120 18:09:07.984022 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/6f9ed19c-dd84-413c-9b4a-665ffbae2d13-node-mnt\") pod \"crc-storage-crc-5kk2q\" (UID: \"6f9ed19c-dd84-413c-9b4a-665ffbae2d13\") " pod="crc-storage/crc-storage-crc-5kk2q" Jan 20 18:09:07 crc kubenswrapper[4558]: I0120 18:09:07.984567 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/6f9ed19c-dd84-413c-9b4a-665ffbae2d13-node-mnt\") pod \"crc-storage-crc-5kk2q\" (UID: \"6f9ed19c-dd84-413c-9b4a-665ffbae2d13\") " pod="crc-storage/crc-storage-crc-5kk2q" Jan 20 18:09:07 crc kubenswrapper[4558]: I0120 18:09:07.984822 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/6f9ed19c-dd84-413c-9b4a-665ffbae2d13-crc-storage\") pod \"crc-storage-crc-5kk2q\" (UID: \"6f9ed19c-dd84-413c-9b4a-665ffbae2d13\") " pod="crc-storage/crc-storage-crc-5kk2q" Jan 20 18:09:08 crc kubenswrapper[4558]: I0120 18:09:08.001143 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vtm54\" (UniqueName: \"kubernetes.io/projected/6f9ed19c-dd84-413c-9b4a-665ffbae2d13-kube-api-access-vtm54\") pod \"crc-storage-crc-5kk2q\" (UID: \"6f9ed19c-dd84-413c-9b4a-665ffbae2d13\") " pod="crc-storage/crc-storage-crc-5kk2q" Jan 20 18:09:08 crc kubenswrapper[4558]: I0120 18:09:08.007727 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-5kk2q" Jan 20 18:09:08 crc kubenswrapper[4558]: I0120 18:09:08.391241 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-5kk2q"] Jan 20 18:09:08 crc kubenswrapper[4558]: I0120 18:09:08.574470 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd3b2016-61a9-4860-8f89-df916242b621" path="/var/lib/kubelet/pods/bd3b2016-61a9-4860-8f89-df916242b621/volumes" Jan 20 18:09:09 crc kubenswrapper[4558]: I0120 18:09:09.236396 4558 generic.go:334] "Generic (PLEG): container finished" podID="6f9ed19c-dd84-413c-9b4a-665ffbae2d13" containerID="0dcbbf8e86c4848e3976176c51bcf2b7b6b8fea99643dd8fc7dc65a40d20806a" exitCode=0 Jan 20 18:09:09 crc kubenswrapper[4558]: I0120 18:09:09.236501 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-5kk2q" event={"ID":"6f9ed19c-dd84-413c-9b4a-665ffbae2d13","Type":"ContainerDied","Data":"0dcbbf8e86c4848e3976176c51bcf2b7b6b8fea99643dd8fc7dc65a40d20806a"} Jan 20 18:09:09 crc kubenswrapper[4558]: I0120 18:09:09.236738 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-5kk2q" event={"ID":"6f9ed19c-dd84-413c-9b4a-665ffbae2d13","Type":"ContainerStarted","Data":"6e8f60d0665c5971850744fa7edbfa9b84eeb58fe92bf302bbefa86c6667e217"} Jan 20 18:09:10 crc kubenswrapper[4558]: I0120 18:09:10.500204 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-5kk2q" Jan 20 18:09:10 crc kubenswrapper[4558]: I0120 18:09:10.538204 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vtm54\" (UniqueName: \"kubernetes.io/projected/6f9ed19c-dd84-413c-9b4a-665ffbae2d13-kube-api-access-vtm54\") pod \"6f9ed19c-dd84-413c-9b4a-665ffbae2d13\" (UID: \"6f9ed19c-dd84-413c-9b4a-665ffbae2d13\") " Jan 20 18:09:10 crc kubenswrapper[4558]: I0120 18:09:10.538271 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/6f9ed19c-dd84-413c-9b4a-665ffbae2d13-crc-storage\") pod \"6f9ed19c-dd84-413c-9b4a-665ffbae2d13\" (UID: \"6f9ed19c-dd84-413c-9b4a-665ffbae2d13\") " Jan 20 18:09:10 crc kubenswrapper[4558]: I0120 18:09:10.538343 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/6f9ed19c-dd84-413c-9b4a-665ffbae2d13-node-mnt\") pod \"6f9ed19c-dd84-413c-9b4a-665ffbae2d13\" (UID: \"6f9ed19c-dd84-413c-9b4a-665ffbae2d13\") " Jan 20 18:09:10 crc kubenswrapper[4558]: I0120 18:09:10.538556 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/6f9ed19c-dd84-413c-9b4a-665ffbae2d13-node-mnt" (OuterVolumeSpecName: "node-mnt") pod "6f9ed19c-dd84-413c-9b4a-665ffbae2d13" (UID: "6f9ed19c-dd84-413c-9b4a-665ffbae2d13"). InnerVolumeSpecName "node-mnt". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 18:09:10 crc kubenswrapper[4558]: I0120 18:09:10.543399 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6f9ed19c-dd84-413c-9b4a-665ffbae2d13-kube-api-access-vtm54" (OuterVolumeSpecName: "kube-api-access-vtm54") pod "6f9ed19c-dd84-413c-9b4a-665ffbae2d13" (UID: "6f9ed19c-dd84-413c-9b4a-665ffbae2d13"). InnerVolumeSpecName "kube-api-access-vtm54". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:09:10 crc kubenswrapper[4558]: I0120 18:09:10.557098 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6f9ed19c-dd84-413c-9b4a-665ffbae2d13-crc-storage" (OuterVolumeSpecName: "crc-storage") pod "6f9ed19c-dd84-413c-9b4a-665ffbae2d13" (UID: "6f9ed19c-dd84-413c-9b4a-665ffbae2d13"). InnerVolumeSpecName "crc-storage". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:09:10 crc kubenswrapper[4558]: I0120 18:09:10.640697 4558 reconciler_common.go:293] "Volume detached for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/6f9ed19c-dd84-413c-9b4a-665ffbae2d13-crc-storage\") on node \"crc\" DevicePath \"\"" Jan 20 18:09:10 crc kubenswrapper[4558]: I0120 18:09:10.640735 4558 reconciler_common.go:293] "Volume detached for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/6f9ed19c-dd84-413c-9b4a-665ffbae2d13-node-mnt\") on node \"crc\" DevicePath \"\"" Jan 20 18:09:10 crc kubenswrapper[4558]: I0120 18:09:10.640747 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vtm54\" (UniqueName: \"kubernetes.io/projected/6f9ed19c-dd84-413c-9b4a-665ffbae2d13-kube-api-access-vtm54\") on node \"crc\" DevicePath \"\"" Jan 20 18:09:11 crc kubenswrapper[4558]: I0120 18:09:11.261736 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-5kk2q" event={"ID":"6f9ed19c-dd84-413c-9b4a-665ffbae2d13","Type":"ContainerDied","Data":"6e8f60d0665c5971850744fa7edbfa9b84eeb58fe92bf302bbefa86c6667e217"} Jan 20 18:09:11 crc kubenswrapper[4558]: I0120 18:09:11.261777 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6e8f60d0665c5971850744fa7edbfa9b84eeb58fe92bf302bbefa86c6667e217" Jan 20 18:09:11 crc kubenswrapper[4558]: I0120 18:09:11.261834 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-5kk2q" Jan 20 18:09:11 crc kubenswrapper[4558]: I0120 18:09:11.644332 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-cnfpf" Jan 20 18:09:11 crc kubenswrapper[4558]: I0120 18:09:11.689521 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-79cc674687-m284h"] Jan 20 18:09:11 crc kubenswrapper[4558]: I0120 18:09:11.689726 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-79cc674687-m284h" podUID="86191493-d485-4033-941c-1942d2499f99" containerName="dnsmasq-dns" containerID="cri-o://db84b778d8c36e69c27fd70bfe07b24f9d8e00da4abded0a905f303870018bf0" gracePeriod=10 Jan 20 18:09:12 crc kubenswrapper[4558]: I0120 18:09:12.063959 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-79cc674687-m284h" Jan 20 18:09:12 crc kubenswrapper[4558]: I0120 18:09:12.159017 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/86191493-d485-4033-941c-1942d2499f99-config\") pod \"86191493-d485-4033-941c-1942d2499f99\" (UID: \"86191493-d485-4033-941c-1942d2499f99\") " Jan 20 18:09:12 crc kubenswrapper[4558]: I0120 18:09:12.159092 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/86191493-d485-4033-941c-1942d2499f99-dnsmasq-svc\") pod \"86191493-d485-4033-941c-1942d2499f99\" (UID: \"86191493-d485-4033-941c-1942d2499f99\") " Jan 20 18:09:12 crc kubenswrapper[4558]: I0120 18:09:12.159138 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v8zln\" (UniqueName: \"kubernetes.io/projected/86191493-d485-4033-941c-1942d2499f99-kube-api-access-v8zln\") pod \"86191493-d485-4033-941c-1942d2499f99\" (UID: \"86191493-d485-4033-941c-1942d2499f99\") " Jan 20 18:09:12 crc kubenswrapper[4558]: I0120 18:09:12.159212 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-tls\" (UniqueName: \"kubernetes.io/configmap/86191493-d485-4033-941c-1942d2499f99-openstack-edpm-tls\") pod \"86191493-d485-4033-941c-1942d2499f99\" (UID: \"86191493-d485-4033-941c-1942d2499f99\") " Jan 20 18:09:12 crc kubenswrapper[4558]: I0120 18:09:12.166867 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/86191493-d485-4033-941c-1942d2499f99-kube-api-access-v8zln" (OuterVolumeSpecName: "kube-api-access-v8zln") pod "86191493-d485-4033-941c-1942d2499f99" (UID: "86191493-d485-4033-941c-1942d2499f99"). InnerVolumeSpecName "kube-api-access-v8zln". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:09:12 crc kubenswrapper[4558]: I0120 18:09:12.189375 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/86191493-d485-4033-941c-1942d2499f99-dnsmasq-svc" (OuterVolumeSpecName: "dnsmasq-svc") pod "86191493-d485-4033-941c-1942d2499f99" (UID: "86191493-d485-4033-941c-1942d2499f99"). InnerVolumeSpecName "dnsmasq-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:09:12 crc kubenswrapper[4558]: I0120 18:09:12.191949 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/86191493-d485-4033-941c-1942d2499f99-openstack-edpm-tls" (OuterVolumeSpecName: "openstack-edpm-tls") pod "86191493-d485-4033-941c-1942d2499f99" (UID: "86191493-d485-4033-941c-1942d2499f99"). InnerVolumeSpecName "openstack-edpm-tls". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:09:12 crc kubenswrapper[4558]: I0120 18:09:12.196121 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/86191493-d485-4033-941c-1942d2499f99-config" (OuterVolumeSpecName: "config") pod "86191493-d485-4033-941c-1942d2499f99" (UID: "86191493-d485-4033-941c-1942d2499f99"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:09:12 crc kubenswrapper[4558]: I0120 18:09:12.260254 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v8zln\" (UniqueName: \"kubernetes.io/projected/86191493-d485-4033-941c-1942d2499f99-kube-api-access-v8zln\") on node \"crc\" DevicePath \"\"" Jan 20 18:09:12 crc kubenswrapper[4558]: I0120 18:09:12.260288 4558 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-tls\" (UniqueName: \"kubernetes.io/configmap/86191493-d485-4033-941c-1942d2499f99-openstack-edpm-tls\") on node \"crc\" DevicePath \"\"" Jan 20 18:09:12 crc kubenswrapper[4558]: I0120 18:09:12.260300 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/86191493-d485-4033-941c-1942d2499f99-config\") on node \"crc\" DevicePath \"\"" Jan 20 18:09:12 crc kubenswrapper[4558]: I0120 18:09:12.260309 4558 reconciler_common.go:293] "Volume detached for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/86191493-d485-4033-941c-1942d2499f99-dnsmasq-svc\") on node \"crc\" DevicePath \"\"" Jan 20 18:09:12 crc kubenswrapper[4558]: I0120 18:09:12.270772 4558 generic.go:334] "Generic (PLEG): container finished" podID="86191493-d485-4033-941c-1942d2499f99" containerID="db84b778d8c36e69c27fd70bfe07b24f9d8e00da4abded0a905f303870018bf0" exitCode=0 Jan 20 18:09:12 crc kubenswrapper[4558]: I0120 18:09:12.270814 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-79cc674687-m284h" event={"ID":"86191493-d485-4033-941c-1942d2499f99","Type":"ContainerDied","Data":"db84b778d8c36e69c27fd70bfe07b24f9d8e00da4abded0a905f303870018bf0"} Jan 20 18:09:12 crc kubenswrapper[4558]: I0120 18:09:12.270837 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-79cc674687-m284h" Jan 20 18:09:12 crc kubenswrapper[4558]: I0120 18:09:12.270858 4558 scope.go:117] "RemoveContainer" containerID="db84b778d8c36e69c27fd70bfe07b24f9d8e00da4abded0a905f303870018bf0" Jan 20 18:09:12 crc kubenswrapper[4558]: I0120 18:09:12.270845 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-79cc674687-m284h" event={"ID":"86191493-d485-4033-941c-1942d2499f99","Type":"ContainerDied","Data":"3c68ed8d1528d8b1be0e5bfc9cc772e135eec3b3ee0b1409ba8ee7a25b3c3cec"} Jan 20 18:09:12 crc kubenswrapper[4558]: I0120 18:09:12.291690 4558 scope.go:117] "RemoveContainer" containerID="8d71452039d14ffa8a2819d7d187e816135404e2a83b38eaa9e01696adf14966" Jan 20 18:09:12 crc kubenswrapper[4558]: I0120 18:09:12.302356 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-79cc674687-m284h"] Jan 20 18:09:12 crc kubenswrapper[4558]: I0120 18:09:12.307434 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-79cc674687-m284h"] Jan 20 18:09:12 crc kubenswrapper[4558]: I0120 18:09:12.324914 4558 scope.go:117] "RemoveContainer" containerID="db84b778d8c36e69c27fd70bfe07b24f9d8e00da4abded0a905f303870018bf0" Jan 20 18:09:12 crc kubenswrapper[4558]: E0120 18:09:12.325333 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"db84b778d8c36e69c27fd70bfe07b24f9d8e00da4abded0a905f303870018bf0\": container with ID starting with db84b778d8c36e69c27fd70bfe07b24f9d8e00da4abded0a905f303870018bf0 not found: ID does not exist" containerID="db84b778d8c36e69c27fd70bfe07b24f9d8e00da4abded0a905f303870018bf0" Jan 20 18:09:12 crc kubenswrapper[4558]: I0120 18:09:12.325374 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"db84b778d8c36e69c27fd70bfe07b24f9d8e00da4abded0a905f303870018bf0"} err="failed to get container status \"db84b778d8c36e69c27fd70bfe07b24f9d8e00da4abded0a905f303870018bf0\": rpc error: code = NotFound desc = could not find container \"db84b778d8c36e69c27fd70bfe07b24f9d8e00da4abded0a905f303870018bf0\": container with ID starting with db84b778d8c36e69c27fd70bfe07b24f9d8e00da4abded0a905f303870018bf0 not found: ID does not exist" Jan 20 18:09:12 crc kubenswrapper[4558]: I0120 18:09:12.325399 4558 scope.go:117] "RemoveContainer" containerID="8d71452039d14ffa8a2819d7d187e816135404e2a83b38eaa9e01696adf14966" Jan 20 18:09:12 crc kubenswrapper[4558]: E0120 18:09:12.325735 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8d71452039d14ffa8a2819d7d187e816135404e2a83b38eaa9e01696adf14966\": container with ID starting with 8d71452039d14ffa8a2819d7d187e816135404e2a83b38eaa9e01696adf14966 not found: ID does not exist" containerID="8d71452039d14ffa8a2819d7d187e816135404e2a83b38eaa9e01696adf14966" Jan 20 18:09:12 crc kubenswrapper[4558]: I0120 18:09:12.325829 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8d71452039d14ffa8a2819d7d187e816135404e2a83b38eaa9e01696adf14966"} err="failed to get container status \"8d71452039d14ffa8a2819d7d187e816135404e2a83b38eaa9e01696adf14966\": rpc error: code = NotFound desc = could not find container \"8d71452039d14ffa8a2819d7d187e816135404e2a83b38eaa9e01696adf14966\": container with ID starting with 8d71452039d14ffa8a2819d7d187e816135404e2a83b38eaa9e01696adf14966 not found: ID does not exist" Jan 20 18:09:12 crc kubenswrapper[4558]: I0120 18:09:12.574584 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="86191493-d485-4033-941c-1942d2499f99" path="/var/lib/kubelet/pods/86191493-d485-4033-941c-1942d2499f99/volumes" Jan 20 18:09:13 crc kubenswrapper[4558]: I0120 18:09:13.263465 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["crc-storage/crc-storage-crc-5kk2q"] Jan 20 18:09:13 crc kubenswrapper[4558]: I0120 18:09:13.269601 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["crc-storage/crc-storage-crc-5kk2q"] Jan 20 18:09:13 crc kubenswrapper[4558]: I0120 18:09:13.375620 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["crc-storage/crc-storage-crc-2cwnp"] Jan 20 18:09:13 crc kubenswrapper[4558]: E0120 18:09:13.375966 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="86191493-d485-4033-941c-1942d2499f99" containerName="dnsmasq-dns" Jan 20 18:09:13 crc kubenswrapper[4558]: I0120 18:09:13.375983 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="86191493-d485-4033-941c-1942d2499f99" containerName="dnsmasq-dns" Jan 20 18:09:13 crc kubenswrapper[4558]: E0120 18:09:13.375995 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="86191493-d485-4033-941c-1942d2499f99" containerName="init" Jan 20 18:09:13 crc kubenswrapper[4558]: I0120 18:09:13.376001 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="86191493-d485-4033-941c-1942d2499f99" containerName="init" Jan 20 18:09:13 crc kubenswrapper[4558]: E0120 18:09:13.376012 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6f9ed19c-dd84-413c-9b4a-665ffbae2d13" containerName="storage" Jan 20 18:09:13 crc kubenswrapper[4558]: I0120 18:09:13.376018 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="6f9ed19c-dd84-413c-9b4a-665ffbae2d13" containerName="storage" Jan 20 18:09:13 crc kubenswrapper[4558]: I0120 18:09:13.376147 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="6f9ed19c-dd84-413c-9b4a-665ffbae2d13" containerName="storage" Jan 20 18:09:13 crc kubenswrapper[4558]: I0120 18:09:13.376190 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="86191493-d485-4033-941c-1942d2499f99" containerName="dnsmasq-dns" Jan 20 18:09:13 crc kubenswrapper[4558]: I0120 18:09:13.376701 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-2cwnp" Jan 20 18:09:13 crc kubenswrapper[4558]: I0120 18:09:13.378109 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"crc-storage" Jan 20 18:09:13 crc kubenswrapper[4558]: I0120 18:09:13.378589 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"kube-root-ca.crt" Jan 20 18:09:13 crc kubenswrapper[4558]: I0120 18:09:13.379497 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"openshift-service-ca.crt" Jan 20 18:09:13 crc kubenswrapper[4558]: I0120 18:09:13.383072 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-2cwnp"] Jan 20 18:09:13 crc kubenswrapper[4558]: I0120 18:09:13.383191 4558 reflector.go:368] Caches populated for *v1.Secret from object-"crc-storage"/"crc-storage-dockercfg-k9ht8" Jan 20 18:09:13 crc kubenswrapper[4558]: I0120 18:09:13.476589 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8cj98\" (UniqueName: \"kubernetes.io/projected/c05e348b-ff2f-41d2-89b8-763acfa5b7b5-kube-api-access-8cj98\") pod \"crc-storage-crc-2cwnp\" (UID: \"c05e348b-ff2f-41d2-89b8-763acfa5b7b5\") " pod="crc-storage/crc-storage-crc-2cwnp" Jan 20 18:09:13 crc kubenswrapper[4558]: I0120 18:09:13.476757 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/c05e348b-ff2f-41d2-89b8-763acfa5b7b5-node-mnt\") pod \"crc-storage-crc-2cwnp\" (UID: \"c05e348b-ff2f-41d2-89b8-763acfa5b7b5\") " pod="crc-storage/crc-storage-crc-2cwnp" Jan 20 18:09:13 crc kubenswrapper[4558]: I0120 18:09:13.476800 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/c05e348b-ff2f-41d2-89b8-763acfa5b7b5-crc-storage\") pod \"crc-storage-crc-2cwnp\" (UID: \"c05e348b-ff2f-41d2-89b8-763acfa5b7b5\") " pod="crc-storage/crc-storage-crc-2cwnp" Jan 20 18:09:13 crc kubenswrapper[4558]: I0120 18:09:13.577536 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/c05e348b-ff2f-41d2-89b8-763acfa5b7b5-node-mnt\") pod \"crc-storage-crc-2cwnp\" (UID: \"c05e348b-ff2f-41d2-89b8-763acfa5b7b5\") " pod="crc-storage/crc-storage-crc-2cwnp" Jan 20 18:09:13 crc kubenswrapper[4558]: I0120 18:09:13.577602 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/c05e348b-ff2f-41d2-89b8-763acfa5b7b5-crc-storage\") pod \"crc-storage-crc-2cwnp\" (UID: \"c05e348b-ff2f-41d2-89b8-763acfa5b7b5\") " pod="crc-storage/crc-storage-crc-2cwnp" Jan 20 18:09:13 crc kubenswrapper[4558]: I0120 18:09:13.577681 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8cj98\" (UniqueName: \"kubernetes.io/projected/c05e348b-ff2f-41d2-89b8-763acfa5b7b5-kube-api-access-8cj98\") pod \"crc-storage-crc-2cwnp\" (UID: \"c05e348b-ff2f-41d2-89b8-763acfa5b7b5\") " pod="crc-storage/crc-storage-crc-2cwnp" Jan 20 18:09:13 crc kubenswrapper[4558]: I0120 18:09:13.577933 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/c05e348b-ff2f-41d2-89b8-763acfa5b7b5-node-mnt\") pod \"crc-storage-crc-2cwnp\" (UID: \"c05e348b-ff2f-41d2-89b8-763acfa5b7b5\") " pod="crc-storage/crc-storage-crc-2cwnp" Jan 20 18:09:13 crc kubenswrapper[4558]: I0120 18:09:13.578384 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/c05e348b-ff2f-41d2-89b8-763acfa5b7b5-crc-storage\") pod \"crc-storage-crc-2cwnp\" (UID: \"c05e348b-ff2f-41d2-89b8-763acfa5b7b5\") " pod="crc-storage/crc-storage-crc-2cwnp" Jan 20 18:09:13 crc kubenswrapper[4558]: I0120 18:09:13.594332 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8cj98\" (UniqueName: \"kubernetes.io/projected/c05e348b-ff2f-41d2-89b8-763acfa5b7b5-kube-api-access-8cj98\") pod \"crc-storage-crc-2cwnp\" (UID: \"c05e348b-ff2f-41d2-89b8-763acfa5b7b5\") " pod="crc-storage/crc-storage-crc-2cwnp" Jan 20 18:09:13 crc kubenswrapper[4558]: I0120 18:09:13.692197 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-2cwnp" Jan 20 18:09:14 crc kubenswrapper[4558]: I0120 18:09:14.093981 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-2cwnp"] Jan 20 18:09:14 crc kubenswrapper[4558]: I0120 18:09:14.293995 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-2cwnp" event={"ID":"c05e348b-ff2f-41d2-89b8-763acfa5b7b5","Type":"ContainerStarted","Data":"f85de9b68c897e8e3cd39851e5dc100c99d46f7a815b3cd97dca9a1f3a489e24"} Jan 20 18:09:14 crc kubenswrapper[4558]: I0120 18:09:14.574968 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6f9ed19c-dd84-413c-9b4a-665ffbae2d13" path="/var/lib/kubelet/pods/6f9ed19c-dd84-413c-9b4a-665ffbae2d13/volumes" Jan 20 18:09:15 crc kubenswrapper[4558]: I0120 18:09:15.308795 4558 generic.go:334] "Generic (PLEG): container finished" podID="c05e348b-ff2f-41d2-89b8-763acfa5b7b5" containerID="bce9460f3bc5575a7f63631bdcf0321bf4450bb60aa3376a029bb8eca3000348" exitCode=0 Jan 20 18:09:15 crc kubenswrapper[4558]: I0120 18:09:15.308914 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-2cwnp" event={"ID":"c05e348b-ff2f-41d2-89b8-763acfa5b7b5","Type":"ContainerDied","Data":"bce9460f3bc5575a7f63631bdcf0321bf4450bb60aa3376a029bb8eca3000348"} Jan 20 18:09:16 crc kubenswrapper[4558]: I0120 18:09:16.539855 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-2cwnp" Jan 20 18:09:16 crc kubenswrapper[4558]: I0120 18:09:16.618818 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/c05e348b-ff2f-41d2-89b8-763acfa5b7b5-node-mnt\") pod \"c05e348b-ff2f-41d2-89b8-763acfa5b7b5\" (UID: \"c05e348b-ff2f-41d2-89b8-763acfa5b7b5\") " Jan 20 18:09:16 crc kubenswrapper[4558]: I0120 18:09:16.618878 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8cj98\" (UniqueName: \"kubernetes.io/projected/c05e348b-ff2f-41d2-89b8-763acfa5b7b5-kube-api-access-8cj98\") pod \"c05e348b-ff2f-41d2-89b8-763acfa5b7b5\" (UID: \"c05e348b-ff2f-41d2-89b8-763acfa5b7b5\") " Jan 20 18:09:16 crc kubenswrapper[4558]: I0120 18:09:16.618961 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/c05e348b-ff2f-41d2-89b8-763acfa5b7b5-node-mnt" (OuterVolumeSpecName: "node-mnt") pod "c05e348b-ff2f-41d2-89b8-763acfa5b7b5" (UID: "c05e348b-ff2f-41d2-89b8-763acfa5b7b5"). InnerVolumeSpecName "node-mnt". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 18:09:16 crc kubenswrapper[4558]: I0120 18:09:16.619041 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/c05e348b-ff2f-41d2-89b8-763acfa5b7b5-crc-storage\") pod \"c05e348b-ff2f-41d2-89b8-763acfa5b7b5\" (UID: \"c05e348b-ff2f-41d2-89b8-763acfa5b7b5\") " Jan 20 18:09:16 crc kubenswrapper[4558]: I0120 18:09:16.620433 4558 reconciler_common.go:293] "Volume detached for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/c05e348b-ff2f-41d2-89b8-763acfa5b7b5-node-mnt\") on node \"crc\" DevicePath \"\"" Jan 20 18:09:16 crc kubenswrapper[4558]: I0120 18:09:16.625312 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c05e348b-ff2f-41d2-89b8-763acfa5b7b5-kube-api-access-8cj98" (OuterVolumeSpecName: "kube-api-access-8cj98") pod "c05e348b-ff2f-41d2-89b8-763acfa5b7b5" (UID: "c05e348b-ff2f-41d2-89b8-763acfa5b7b5"). InnerVolumeSpecName "kube-api-access-8cj98". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:09:16 crc kubenswrapper[4558]: I0120 18:09:16.637322 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c05e348b-ff2f-41d2-89b8-763acfa5b7b5-crc-storage" (OuterVolumeSpecName: "crc-storage") pod "c05e348b-ff2f-41d2-89b8-763acfa5b7b5" (UID: "c05e348b-ff2f-41d2-89b8-763acfa5b7b5"). InnerVolumeSpecName "crc-storage". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:09:16 crc kubenswrapper[4558]: I0120 18:09:16.721545 4558 reconciler_common.go:293] "Volume detached for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/c05e348b-ff2f-41d2-89b8-763acfa5b7b5-crc-storage\") on node \"crc\" DevicePath \"\"" Jan 20 18:09:16 crc kubenswrapper[4558]: I0120 18:09:16.721575 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8cj98\" (UniqueName: \"kubernetes.io/projected/c05e348b-ff2f-41d2-89b8-763acfa5b7b5-kube-api-access-8cj98\") on node \"crc\" DevicePath \"\"" Jan 20 18:09:17 crc kubenswrapper[4558]: I0120 18:09:17.334955 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-2cwnp" event={"ID":"c05e348b-ff2f-41d2-89b8-763acfa5b7b5","Type":"ContainerDied","Data":"f85de9b68c897e8e3cd39851e5dc100c99d46f7a815b3cd97dca9a1f3a489e24"} Jan 20 18:09:17 crc kubenswrapper[4558]: I0120 18:09:17.335235 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f85de9b68c897e8e3cd39851e5dc100c99d46f7a815b3cd97dca9a1f3a489e24" Jan 20 18:09:17 crc kubenswrapper[4558]: I0120 18:09:17.335298 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-2cwnp" Jan 20 18:09:19 crc kubenswrapper[4558]: I0120 18:09:19.547981 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-76cd9645f5-4nltx"] Jan 20 18:09:19 crc kubenswrapper[4558]: E0120 18:09:19.548345 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c05e348b-ff2f-41d2-89b8-763acfa5b7b5" containerName="storage" Jan 20 18:09:19 crc kubenswrapper[4558]: I0120 18:09:19.548359 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="c05e348b-ff2f-41d2-89b8-763acfa5b7b5" containerName="storage" Jan 20 18:09:19 crc kubenswrapper[4558]: I0120 18:09:19.548565 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="c05e348b-ff2f-41d2-89b8-763acfa5b7b5" containerName="storage" Jan 20 18:09:19 crc kubenswrapper[4558]: I0120 18:09:19.549395 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-76cd9645f5-4nltx" Jan 20 18:09:19 crc kubenswrapper[4558]: I0120 18:09:19.551681 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"edpm-extramounts" Jan 20 18:09:19 crc kubenswrapper[4558]: I0120 18:09:19.556323 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-76cd9645f5-4nltx"] Jan 20 18:09:19 crc kubenswrapper[4558]: I0120 18:09:19.659091 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/2875a937-0d66-4a2f-8284-15c3d04fb18b-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-76cd9645f5-4nltx\" (UID: \"2875a937-0d66-4a2f-8284-15c3d04fb18b\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-76cd9645f5-4nltx" Jan 20 18:09:19 crc kubenswrapper[4558]: I0120 18:09:19.659411 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2875a937-0d66-4a2f-8284-15c3d04fb18b-config\") pod \"dnsmasq-dnsmasq-76cd9645f5-4nltx\" (UID: \"2875a937-0d66-4a2f-8284-15c3d04fb18b\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-76cd9645f5-4nltx" Jan 20 18:09:19 crc kubenswrapper[4558]: I0120 18:09:19.659606 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kkbng\" (UniqueName: \"kubernetes.io/projected/2875a937-0d66-4a2f-8284-15c3d04fb18b-kube-api-access-kkbng\") pod \"dnsmasq-dnsmasq-76cd9645f5-4nltx\" (UID: \"2875a937-0d66-4a2f-8284-15c3d04fb18b\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-76cd9645f5-4nltx" Jan 20 18:09:19 crc kubenswrapper[4558]: I0120 18:09:19.660248 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"edpm-extramounts\" (UniqueName: \"kubernetes.io/configmap/2875a937-0d66-4a2f-8284-15c3d04fb18b-edpm-extramounts\") pod \"dnsmasq-dnsmasq-76cd9645f5-4nltx\" (UID: \"2875a937-0d66-4a2f-8284-15c3d04fb18b\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-76cd9645f5-4nltx" Jan 20 18:09:19 crc kubenswrapper[4558]: I0120 18:09:19.761367 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kkbng\" (UniqueName: \"kubernetes.io/projected/2875a937-0d66-4a2f-8284-15c3d04fb18b-kube-api-access-kkbng\") pod \"dnsmasq-dnsmasq-76cd9645f5-4nltx\" (UID: \"2875a937-0d66-4a2f-8284-15c3d04fb18b\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-76cd9645f5-4nltx" Jan 20 18:09:19 crc kubenswrapper[4558]: I0120 18:09:19.761562 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"edpm-extramounts\" (UniqueName: \"kubernetes.io/configmap/2875a937-0d66-4a2f-8284-15c3d04fb18b-edpm-extramounts\") pod \"dnsmasq-dnsmasq-76cd9645f5-4nltx\" (UID: \"2875a937-0d66-4a2f-8284-15c3d04fb18b\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-76cd9645f5-4nltx" Jan 20 18:09:19 crc kubenswrapper[4558]: I0120 18:09:19.761748 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/2875a937-0d66-4a2f-8284-15c3d04fb18b-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-76cd9645f5-4nltx\" (UID: \"2875a937-0d66-4a2f-8284-15c3d04fb18b\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-76cd9645f5-4nltx" Jan 20 18:09:19 crc kubenswrapper[4558]: I0120 18:09:19.761860 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2875a937-0d66-4a2f-8284-15c3d04fb18b-config\") pod \"dnsmasq-dnsmasq-76cd9645f5-4nltx\" (UID: \"2875a937-0d66-4a2f-8284-15c3d04fb18b\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-76cd9645f5-4nltx" Jan 20 18:09:19 crc kubenswrapper[4558]: I0120 18:09:19.762823 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/2875a937-0d66-4a2f-8284-15c3d04fb18b-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-76cd9645f5-4nltx\" (UID: \"2875a937-0d66-4a2f-8284-15c3d04fb18b\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-76cd9645f5-4nltx" Jan 20 18:09:19 crc kubenswrapper[4558]: I0120 18:09:19.762834 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2875a937-0d66-4a2f-8284-15c3d04fb18b-config\") pod \"dnsmasq-dnsmasq-76cd9645f5-4nltx\" (UID: \"2875a937-0d66-4a2f-8284-15c3d04fb18b\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-76cd9645f5-4nltx" Jan 20 18:09:19 crc kubenswrapper[4558]: I0120 18:09:19.762974 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"edpm-extramounts\" (UniqueName: \"kubernetes.io/configmap/2875a937-0d66-4a2f-8284-15c3d04fb18b-edpm-extramounts\") pod \"dnsmasq-dnsmasq-76cd9645f5-4nltx\" (UID: \"2875a937-0d66-4a2f-8284-15c3d04fb18b\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-76cd9645f5-4nltx" Jan 20 18:09:19 crc kubenswrapper[4558]: I0120 18:09:19.778030 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kkbng\" (UniqueName: \"kubernetes.io/projected/2875a937-0d66-4a2f-8284-15c3d04fb18b-kube-api-access-kkbng\") pod \"dnsmasq-dnsmasq-76cd9645f5-4nltx\" (UID: \"2875a937-0d66-4a2f-8284-15c3d04fb18b\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-76cd9645f5-4nltx" Jan 20 18:09:19 crc kubenswrapper[4558]: I0120 18:09:19.863405 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-76cd9645f5-4nltx" Jan 20 18:09:20 crc kubenswrapper[4558]: I0120 18:09:20.245961 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-76cd9645f5-4nltx"] Jan 20 18:09:20 crc kubenswrapper[4558]: W0120 18:09:20.249639 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2875a937_0d66_4a2f_8284_15c3d04fb18b.slice/crio-e924dead23b4b43824318eaf2cdad0d92527263ad13dbf0631913794bc283d03 WatchSource:0}: Error finding container e924dead23b4b43824318eaf2cdad0d92527263ad13dbf0631913794bc283d03: Status 404 returned error can't find the container with id e924dead23b4b43824318eaf2cdad0d92527263ad13dbf0631913794bc283d03 Jan 20 18:09:20 crc kubenswrapper[4558]: I0120 18:09:20.356389 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-76cd9645f5-4nltx" event={"ID":"2875a937-0d66-4a2f-8284-15c3d04fb18b","Type":"ContainerStarted","Data":"e924dead23b4b43824318eaf2cdad0d92527263ad13dbf0631913794bc283d03"} Jan 20 18:09:21 crc kubenswrapper[4558]: I0120 18:09:21.366261 4558 generic.go:334] "Generic (PLEG): container finished" podID="2875a937-0d66-4a2f-8284-15c3d04fb18b" containerID="f312b42b96e28e625be99141e336dac5e2c77a900a5fd9fe3469d727d8d185e3" exitCode=0 Jan 20 18:09:21 crc kubenswrapper[4558]: I0120 18:09:21.366308 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-76cd9645f5-4nltx" event={"ID":"2875a937-0d66-4a2f-8284-15c3d04fb18b","Type":"ContainerDied","Data":"f312b42b96e28e625be99141e336dac5e2c77a900a5fd9fe3469d727d8d185e3"} Jan 20 18:09:22 crc kubenswrapper[4558]: I0120 18:09:22.376536 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-76cd9645f5-4nltx" event={"ID":"2875a937-0d66-4a2f-8284-15c3d04fb18b","Type":"ContainerStarted","Data":"e7b368f493a8a85c2a0e97578c74841e25761e54551addd120780deda7caa493"} Jan 20 18:09:22 crc kubenswrapper[4558]: I0120 18:09:22.376962 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-76cd9645f5-4nltx" Jan 20 18:09:22 crc kubenswrapper[4558]: I0120 18:09:22.391302 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-76cd9645f5-4nltx" podStartSLOduration=3.391283978 podStartE2EDuration="3.391283978s" podCreationTimestamp="2026-01-20 18:09:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 18:09:22.389282255 +0000 UTC m=+5256.149620222" watchObservedRunningTime="2026-01-20 18:09:22.391283978 +0000 UTC m=+5256.151621945" Jan 20 18:09:29 crc kubenswrapper[4558]: I0120 18:09:29.864613 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-76cd9645f5-4nltx" Jan 20 18:09:29 crc kubenswrapper[4558]: I0120 18:09:29.906559 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-cnfpf"] Jan 20 18:09:29 crc kubenswrapper[4558]: I0120 18:09:29.906775 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-cnfpf" podUID="bb55b2c7-9f9d-444a-8378-02a358ba4958" containerName="dnsmasq-dns" containerID="cri-o://23e9fa21abe062da76f61c27d04e80c109c18f826f16c6b4a83280b385731918" gracePeriod=10 Jan 20 18:09:30 crc kubenswrapper[4558]: I0120 18:09:30.253118 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-cnfpf" Jan 20 18:09:30 crc kubenswrapper[4558]: I0120 18:09:30.307471 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q2kmc\" (UniqueName: \"kubernetes.io/projected/bb55b2c7-9f9d-444a-8378-02a358ba4958-kube-api-access-q2kmc\") pod \"bb55b2c7-9f9d-444a-8378-02a358ba4958\" (UID: \"bb55b2c7-9f9d-444a-8378-02a358ba4958\") " Jan 20 18:09:30 crc kubenswrapper[4558]: I0120 18:09:30.314232 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bb55b2c7-9f9d-444a-8378-02a358ba4958-kube-api-access-q2kmc" (OuterVolumeSpecName: "kube-api-access-q2kmc") pod "bb55b2c7-9f9d-444a-8378-02a358ba4958" (UID: "bb55b2c7-9f9d-444a-8378-02a358ba4958"). InnerVolumeSpecName "kube-api-access-q2kmc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:09:30 crc kubenswrapper[4558]: I0120 18:09:30.408602 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/bb55b2c7-9f9d-444a-8378-02a358ba4958-dnsmasq-svc\") pod \"bb55b2c7-9f9d-444a-8378-02a358ba4958\" (UID: \"bb55b2c7-9f9d-444a-8378-02a358ba4958\") " Jan 20 18:09:30 crc kubenswrapper[4558]: I0120 18:09:30.408675 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bb55b2c7-9f9d-444a-8378-02a358ba4958-config\") pod \"bb55b2c7-9f9d-444a-8378-02a358ba4958\" (UID: \"bb55b2c7-9f9d-444a-8378-02a358ba4958\") " Jan 20 18:09:30 crc kubenswrapper[4558]: I0120 18:09:30.408901 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q2kmc\" (UniqueName: \"kubernetes.io/projected/bb55b2c7-9f9d-444a-8378-02a358ba4958-kube-api-access-q2kmc\") on node \"crc\" DevicePath \"\"" Jan 20 18:09:30 crc kubenswrapper[4558]: I0120 18:09:30.435898 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bb55b2c7-9f9d-444a-8378-02a358ba4958-dnsmasq-svc" (OuterVolumeSpecName: "dnsmasq-svc") pod "bb55b2c7-9f9d-444a-8378-02a358ba4958" (UID: "bb55b2c7-9f9d-444a-8378-02a358ba4958"). InnerVolumeSpecName "dnsmasq-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:09:30 crc kubenswrapper[4558]: I0120 18:09:30.437721 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bb55b2c7-9f9d-444a-8378-02a358ba4958-config" (OuterVolumeSpecName: "config") pod "bb55b2c7-9f9d-444a-8378-02a358ba4958" (UID: "bb55b2c7-9f9d-444a-8378-02a358ba4958"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:09:30 crc kubenswrapper[4558]: I0120 18:09:30.441849 4558 generic.go:334] "Generic (PLEG): container finished" podID="bb55b2c7-9f9d-444a-8378-02a358ba4958" containerID="23e9fa21abe062da76f61c27d04e80c109c18f826f16c6b4a83280b385731918" exitCode=0 Jan 20 18:09:30 crc kubenswrapper[4558]: I0120 18:09:30.441905 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-cnfpf" event={"ID":"bb55b2c7-9f9d-444a-8378-02a358ba4958","Type":"ContainerDied","Data":"23e9fa21abe062da76f61c27d04e80c109c18f826f16c6b4a83280b385731918"} Jan 20 18:09:30 crc kubenswrapper[4558]: I0120 18:09:30.441936 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-cnfpf" event={"ID":"bb55b2c7-9f9d-444a-8378-02a358ba4958","Type":"ContainerDied","Data":"f9cafb1d0e0c28bfcee26abf7a3698a02a1ef22597823bf147dcf1040724c800"} Jan 20 18:09:30 crc kubenswrapper[4558]: I0120 18:09:30.441955 4558 scope.go:117] "RemoveContainer" containerID="23e9fa21abe062da76f61c27d04e80c109c18f826f16c6b4a83280b385731918" Jan 20 18:09:30 crc kubenswrapper[4558]: I0120 18:09:30.442111 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-cnfpf" Jan 20 18:09:30 crc kubenswrapper[4558]: I0120 18:09:30.465700 4558 scope.go:117] "RemoveContainer" containerID="45dd7fb1bc5040e597615af2dd59eed883a5d2bd53d08dab58c0fc2fd03e12c8" Jan 20 18:09:30 crc kubenswrapper[4558]: I0120 18:09:30.468736 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-cnfpf"] Jan 20 18:09:30 crc kubenswrapper[4558]: I0120 18:09:30.474009 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-cnfpf"] Jan 20 18:09:30 crc kubenswrapper[4558]: I0120 18:09:30.488124 4558 scope.go:117] "RemoveContainer" containerID="23e9fa21abe062da76f61c27d04e80c109c18f826f16c6b4a83280b385731918" Jan 20 18:09:30 crc kubenswrapper[4558]: E0120 18:09:30.488567 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"23e9fa21abe062da76f61c27d04e80c109c18f826f16c6b4a83280b385731918\": container with ID starting with 23e9fa21abe062da76f61c27d04e80c109c18f826f16c6b4a83280b385731918 not found: ID does not exist" containerID="23e9fa21abe062da76f61c27d04e80c109c18f826f16c6b4a83280b385731918" Jan 20 18:09:30 crc kubenswrapper[4558]: I0120 18:09:30.488634 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"23e9fa21abe062da76f61c27d04e80c109c18f826f16c6b4a83280b385731918"} err="failed to get container status \"23e9fa21abe062da76f61c27d04e80c109c18f826f16c6b4a83280b385731918\": rpc error: code = NotFound desc = could not find container \"23e9fa21abe062da76f61c27d04e80c109c18f826f16c6b4a83280b385731918\": container with ID starting with 23e9fa21abe062da76f61c27d04e80c109c18f826f16c6b4a83280b385731918 not found: ID does not exist" Jan 20 18:09:30 crc kubenswrapper[4558]: I0120 18:09:30.488679 4558 scope.go:117] "RemoveContainer" containerID="45dd7fb1bc5040e597615af2dd59eed883a5d2bd53d08dab58c0fc2fd03e12c8" Jan 20 18:09:30 crc kubenswrapper[4558]: E0120 18:09:30.489087 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"45dd7fb1bc5040e597615af2dd59eed883a5d2bd53d08dab58c0fc2fd03e12c8\": container with ID starting with 45dd7fb1bc5040e597615af2dd59eed883a5d2bd53d08dab58c0fc2fd03e12c8 not found: ID does not exist" containerID="45dd7fb1bc5040e597615af2dd59eed883a5d2bd53d08dab58c0fc2fd03e12c8" Jan 20 18:09:30 crc kubenswrapper[4558]: I0120 18:09:30.489191 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"45dd7fb1bc5040e597615af2dd59eed883a5d2bd53d08dab58c0fc2fd03e12c8"} err="failed to get container status \"45dd7fb1bc5040e597615af2dd59eed883a5d2bd53d08dab58c0fc2fd03e12c8\": rpc error: code = NotFound desc = could not find container \"45dd7fb1bc5040e597615af2dd59eed883a5d2bd53d08dab58c0fc2fd03e12c8\": container with ID starting with 45dd7fb1bc5040e597615af2dd59eed883a5d2bd53d08dab58c0fc2fd03e12c8 not found: ID does not exist" Jan 20 18:09:30 crc kubenswrapper[4558]: I0120 18:09:30.510248 4558 reconciler_common.go:293] "Volume detached for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/bb55b2c7-9f9d-444a-8378-02a358ba4958-dnsmasq-svc\") on node \"crc\" DevicePath \"\"" Jan 20 18:09:30 crc kubenswrapper[4558]: I0120 18:09:30.510280 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bb55b2c7-9f9d-444a-8378-02a358ba4958-config\") on node \"crc\" DevicePath \"\"" Jan 20 18:09:30 crc kubenswrapper[4558]: I0120 18:09:30.574499 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bb55b2c7-9f9d-444a-8378-02a358ba4958" path="/var/lib/kubelet/pods/bb55b2c7-9f9d-444a-8378-02a358ba4958/volumes" Jan 20 18:09:32 crc kubenswrapper[4558]: I0120 18:09:32.875322 4558 scope.go:117] "RemoveContainer" containerID="5cafd2c19011fe93c117ac03c2453a8fd0a8f11382645004596df4482604d641" Jan 20 18:09:32 crc kubenswrapper[4558]: I0120 18:09:32.902138 4558 scope.go:117] "RemoveContainer" containerID="bd1569896700f9b7af284848f74e4dfdba0e2cb1c7e54f9f4ac7ce817d50ebac" Jan 20 18:09:32 crc kubenswrapper[4558]: I0120 18:09:32.926022 4558 scope.go:117] "RemoveContainer" containerID="ab4aac561b9cf5e22720dd49c2034c427640d4636cf082503d78cb49e91bf3ab" Jan 20 18:09:32 crc kubenswrapper[4558]: I0120 18:09:32.949374 4558 scope.go:117] "RemoveContainer" containerID="5bf40472223623fb14809cdce4ee579c75362a851d306e78c2354777d1677c4c" Jan 20 18:09:32 crc kubenswrapper[4558]: I0120 18:09:32.971064 4558 scope.go:117] "RemoveContainer" containerID="658800ecf53ee1c56db45161afaf9b0aa0d93eb5e57b5b87faae5600e01895ec" Jan 20 18:09:32 crc kubenswrapper[4558]: I0120 18:09:32.987762 4558 scope.go:117] "RemoveContainer" containerID="e03cc394c485d82132b145aeda9118ad39cd87b0221b7049886130d0c947ec41" Jan 20 18:09:33 crc kubenswrapper[4558]: I0120 18:09:33.003192 4558 scope.go:117] "RemoveContainer" containerID="689c9cb422cdae5161aa35080cc015b394f770bc490f68a53ed82912791d7313" Jan 20 18:09:34 crc kubenswrapper[4558]: I0120 18:09:34.652307 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-dk5nb"] Jan 20 18:09:34 crc kubenswrapper[4558]: E0120 18:09:34.652589 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bb55b2c7-9f9d-444a-8378-02a358ba4958" containerName="dnsmasq-dns" Jan 20 18:09:34 crc kubenswrapper[4558]: I0120 18:09:34.652602 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="bb55b2c7-9f9d-444a-8378-02a358ba4958" containerName="dnsmasq-dns" Jan 20 18:09:34 crc kubenswrapper[4558]: E0120 18:09:34.652627 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bb55b2c7-9f9d-444a-8378-02a358ba4958" containerName="init" Jan 20 18:09:34 crc kubenswrapper[4558]: I0120 18:09:34.652633 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="bb55b2c7-9f9d-444a-8378-02a358ba4958" containerName="init" Jan 20 18:09:34 crc kubenswrapper[4558]: I0120 18:09:34.652755 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="bb55b2c7-9f9d-444a-8378-02a358ba4958" containerName="dnsmasq-dns" Jan 20 18:09:34 crc kubenswrapper[4558]: I0120 18:09:34.653390 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-dk5nb" Jan 20 18:09:34 crc kubenswrapper[4558]: I0120 18:09:34.661008 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-dk5nb"] Jan 20 18:09:34 crc kubenswrapper[4558]: I0120 18:09:34.765221 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c9pvp\" (UniqueName: \"kubernetes.io/projected/3a51fa49-3ed3-4b8e-ba75-2fa83f5edcbd-kube-api-access-c9pvp\") pod \"dnsmasq-dnsmasq-84b9f45d47-dk5nb\" (UID: \"3a51fa49-3ed3-4b8e-ba75-2fa83f5edcbd\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-dk5nb" Jan 20 18:09:34 crc kubenswrapper[4558]: I0120 18:09:34.765751 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/3a51fa49-3ed3-4b8e-ba75-2fa83f5edcbd-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-84b9f45d47-dk5nb\" (UID: \"3a51fa49-3ed3-4b8e-ba75-2fa83f5edcbd\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-dk5nb" Jan 20 18:09:34 crc kubenswrapper[4558]: I0120 18:09:34.765867 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3a51fa49-3ed3-4b8e-ba75-2fa83f5edcbd-config\") pod \"dnsmasq-dnsmasq-84b9f45d47-dk5nb\" (UID: \"3a51fa49-3ed3-4b8e-ba75-2fa83f5edcbd\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-dk5nb" Jan 20 18:09:34 crc kubenswrapper[4558]: I0120 18:09:34.867804 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/3a51fa49-3ed3-4b8e-ba75-2fa83f5edcbd-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-84b9f45d47-dk5nb\" (UID: \"3a51fa49-3ed3-4b8e-ba75-2fa83f5edcbd\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-dk5nb" Jan 20 18:09:34 crc kubenswrapper[4558]: I0120 18:09:34.867918 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3a51fa49-3ed3-4b8e-ba75-2fa83f5edcbd-config\") pod \"dnsmasq-dnsmasq-84b9f45d47-dk5nb\" (UID: \"3a51fa49-3ed3-4b8e-ba75-2fa83f5edcbd\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-dk5nb" Jan 20 18:09:34 crc kubenswrapper[4558]: I0120 18:09:34.868032 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c9pvp\" (UniqueName: \"kubernetes.io/projected/3a51fa49-3ed3-4b8e-ba75-2fa83f5edcbd-kube-api-access-c9pvp\") pod \"dnsmasq-dnsmasq-84b9f45d47-dk5nb\" (UID: \"3a51fa49-3ed3-4b8e-ba75-2fa83f5edcbd\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-dk5nb" Jan 20 18:09:34 crc kubenswrapper[4558]: I0120 18:09:34.868841 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/3a51fa49-3ed3-4b8e-ba75-2fa83f5edcbd-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-84b9f45d47-dk5nb\" (UID: \"3a51fa49-3ed3-4b8e-ba75-2fa83f5edcbd\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-dk5nb" Jan 20 18:09:34 crc kubenswrapper[4558]: I0120 18:09:34.868871 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3a51fa49-3ed3-4b8e-ba75-2fa83f5edcbd-config\") pod \"dnsmasq-dnsmasq-84b9f45d47-dk5nb\" (UID: \"3a51fa49-3ed3-4b8e-ba75-2fa83f5edcbd\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-dk5nb" Jan 20 18:09:34 crc kubenswrapper[4558]: I0120 18:09:34.887528 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c9pvp\" (UniqueName: \"kubernetes.io/projected/3a51fa49-3ed3-4b8e-ba75-2fa83f5edcbd-kube-api-access-c9pvp\") pod \"dnsmasq-dnsmasq-84b9f45d47-dk5nb\" (UID: \"3a51fa49-3ed3-4b8e-ba75-2fa83f5edcbd\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-dk5nb" Jan 20 18:09:34 crc kubenswrapper[4558]: I0120 18:09:34.969236 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-dk5nb" Jan 20 18:09:35 crc kubenswrapper[4558]: I0120 18:09:35.362684 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-dk5nb"] Jan 20 18:09:35 crc kubenswrapper[4558]: I0120 18:09:35.482468 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-dk5nb" event={"ID":"3a51fa49-3ed3-4b8e-ba75-2fa83f5edcbd","Type":"ContainerStarted","Data":"dd1528fe842294d5cded7f8bf6e5b324b03043597c193a11cc6104d0fecda0af"} Jan 20 18:09:36 crc kubenswrapper[4558]: I0120 18:09:36.493460 4558 generic.go:334] "Generic (PLEG): container finished" podID="3a51fa49-3ed3-4b8e-ba75-2fa83f5edcbd" containerID="794a063720b0483269dc27756f88e21b3da791506ad1daebf1a349255226d627" exitCode=0 Jan 20 18:09:36 crc kubenswrapper[4558]: I0120 18:09:36.493583 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-dk5nb" event={"ID":"3a51fa49-3ed3-4b8e-ba75-2fa83f5edcbd","Type":"ContainerDied","Data":"794a063720b0483269dc27756f88e21b3da791506ad1daebf1a349255226d627"} Jan 20 18:09:37 crc kubenswrapper[4558]: I0120 18:09:37.505265 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-dk5nb" event={"ID":"3a51fa49-3ed3-4b8e-ba75-2fa83f5edcbd","Type":"ContainerStarted","Data":"b090553c9ad2f835c8c00cd860013f3e064b6a1b9e9e43aef5b5a4c88d6d1599"} Jan 20 18:09:37 crc kubenswrapper[4558]: I0120 18:09:37.505734 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-dk5nb" Jan 20 18:09:37 crc kubenswrapper[4558]: I0120 18:09:37.528744 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-dk5nb" podStartSLOduration=3.528724607 podStartE2EDuration="3.528724607s" podCreationTimestamp="2026-01-20 18:09:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 18:09:37.5193601 +0000 UTC m=+5271.279698066" watchObservedRunningTime="2026-01-20 18:09:37.528724607 +0000 UTC m=+5271.289062574" Jan 20 18:09:41 crc kubenswrapper[4558]: I0120 18:09:41.372993 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["crc-storage/crc-storage-crc-2cwnp"] Jan 20 18:09:41 crc kubenswrapper[4558]: I0120 18:09:41.376572 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["crc-storage/crc-storage-crc-2cwnp"] Jan 20 18:09:41 crc kubenswrapper[4558]: I0120 18:09:41.500863 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["crc-storage/crc-storage-crc-4lzfd"] Jan 20 18:09:41 crc kubenswrapper[4558]: I0120 18:09:41.502636 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-4lzfd" Jan 20 18:09:41 crc kubenswrapper[4558]: I0120 18:09:41.504941 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"kube-root-ca.crt" Jan 20 18:09:41 crc kubenswrapper[4558]: I0120 18:09:41.505271 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"crc-storage" Jan 20 18:09:41 crc kubenswrapper[4558]: I0120 18:09:41.505350 4558 reflector.go:368] Caches populated for *v1.Secret from object-"crc-storage"/"crc-storage-dockercfg-k9ht8" Jan 20 18:09:41 crc kubenswrapper[4558]: I0120 18:09:41.509135 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"openshift-service-ca.crt" Jan 20 18:09:41 crc kubenswrapper[4558]: I0120 18:09:41.509476 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-4lzfd"] Jan 20 18:09:41 crc kubenswrapper[4558]: I0120 18:09:41.667979 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/f69e8d4c-fa57-423f-87f6-824a614af614-crc-storage\") pod \"crc-storage-crc-4lzfd\" (UID: \"f69e8d4c-fa57-423f-87f6-824a614af614\") " pod="crc-storage/crc-storage-crc-4lzfd" Jan 20 18:09:41 crc kubenswrapper[4558]: I0120 18:09:41.668135 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/f69e8d4c-fa57-423f-87f6-824a614af614-node-mnt\") pod \"crc-storage-crc-4lzfd\" (UID: \"f69e8d4c-fa57-423f-87f6-824a614af614\") " pod="crc-storage/crc-storage-crc-4lzfd" Jan 20 18:09:41 crc kubenswrapper[4558]: I0120 18:09:41.668406 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-72xmd\" (UniqueName: \"kubernetes.io/projected/f69e8d4c-fa57-423f-87f6-824a614af614-kube-api-access-72xmd\") pod \"crc-storage-crc-4lzfd\" (UID: \"f69e8d4c-fa57-423f-87f6-824a614af614\") " pod="crc-storage/crc-storage-crc-4lzfd" Jan 20 18:09:41 crc kubenswrapper[4558]: I0120 18:09:41.769561 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/f69e8d4c-fa57-423f-87f6-824a614af614-crc-storage\") pod \"crc-storage-crc-4lzfd\" (UID: \"f69e8d4c-fa57-423f-87f6-824a614af614\") " pod="crc-storage/crc-storage-crc-4lzfd" Jan 20 18:09:41 crc kubenswrapper[4558]: I0120 18:09:41.769679 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/f69e8d4c-fa57-423f-87f6-824a614af614-node-mnt\") pod \"crc-storage-crc-4lzfd\" (UID: \"f69e8d4c-fa57-423f-87f6-824a614af614\") " pod="crc-storage/crc-storage-crc-4lzfd" Jan 20 18:09:41 crc kubenswrapper[4558]: I0120 18:09:41.769751 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-72xmd\" (UniqueName: \"kubernetes.io/projected/f69e8d4c-fa57-423f-87f6-824a614af614-kube-api-access-72xmd\") pod \"crc-storage-crc-4lzfd\" (UID: \"f69e8d4c-fa57-423f-87f6-824a614af614\") " pod="crc-storage/crc-storage-crc-4lzfd" Jan 20 18:09:41 crc kubenswrapper[4558]: I0120 18:09:41.770139 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/f69e8d4c-fa57-423f-87f6-824a614af614-node-mnt\") pod \"crc-storage-crc-4lzfd\" (UID: \"f69e8d4c-fa57-423f-87f6-824a614af614\") " pod="crc-storage/crc-storage-crc-4lzfd" Jan 20 18:09:41 crc kubenswrapper[4558]: I0120 18:09:41.770439 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/f69e8d4c-fa57-423f-87f6-824a614af614-crc-storage\") pod \"crc-storage-crc-4lzfd\" (UID: \"f69e8d4c-fa57-423f-87f6-824a614af614\") " pod="crc-storage/crc-storage-crc-4lzfd" Jan 20 18:09:41 crc kubenswrapper[4558]: I0120 18:09:41.788350 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-72xmd\" (UniqueName: \"kubernetes.io/projected/f69e8d4c-fa57-423f-87f6-824a614af614-kube-api-access-72xmd\") pod \"crc-storage-crc-4lzfd\" (UID: \"f69e8d4c-fa57-423f-87f6-824a614af614\") " pod="crc-storage/crc-storage-crc-4lzfd" Jan 20 18:09:41 crc kubenswrapper[4558]: I0120 18:09:41.825663 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-4lzfd" Jan 20 18:09:42 crc kubenswrapper[4558]: I0120 18:09:42.210763 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-4lzfd"] Jan 20 18:09:42 crc kubenswrapper[4558]: I0120 18:09:42.546620 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-4lzfd" event={"ID":"f69e8d4c-fa57-423f-87f6-824a614af614","Type":"ContainerStarted","Data":"92af5e45b1a65d68f8a112be94429676ee8db2036550f2027122de480e4d0204"} Jan 20 18:09:42 crc kubenswrapper[4558]: I0120 18:09:42.572992 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c05e348b-ff2f-41d2-89b8-763acfa5b7b5" path="/var/lib/kubelet/pods/c05e348b-ff2f-41d2-89b8-763acfa5b7b5/volumes" Jan 20 18:09:43 crc kubenswrapper[4558]: I0120 18:09:43.559662 4558 generic.go:334] "Generic (PLEG): container finished" podID="f69e8d4c-fa57-423f-87f6-824a614af614" containerID="3d787a67513a01467bd03013a799bf24e5990a92f4c25bae72c6fb5e4a72cc1a" exitCode=0 Jan 20 18:09:43 crc kubenswrapper[4558]: I0120 18:09:43.559759 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-4lzfd" event={"ID":"f69e8d4c-fa57-423f-87f6-824a614af614","Type":"ContainerDied","Data":"3d787a67513a01467bd03013a799bf24e5990a92f4c25bae72c6fb5e4a72cc1a"} Jan 20 18:09:44 crc kubenswrapper[4558]: I0120 18:09:44.826621 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-4lzfd" Jan 20 18:09:44 crc kubenswrapper[4558]: I0120 18:09:44.971329 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-dk5nb" Jan 20 18:09:45 crc kubenswrapper[4558]: I0120 18:09:45.017303 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/f69e8d4c-fa57-423f-87f6-824a614af614-crc-storage\") pod \"f69e8d4c-fa57-423f-87f6-824a614af614\" (UID: \"f69e8d4c-fa57-423f-87f6-824a614af614\") " Jan 20 18:09:45 crc kubenswrapper[4558]: I0120 18:09:45.017383 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/f69e8d4c-fa57-423f-87f6-824a614af614-node-mnt\") pod \"f69e8d4c-fa57-423f-87f6-824a614af614\" (UID: \"f69e8d4c-fa57-423f-87f6-824a614af614\") " Jan 20 18:09:45 crc kubenswrapper[4558]: I0120 18:09:45.017439 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f69e8d4c-fa57-423f-87f6-824a614af614-node-mnt" (OuterVolumeSpecName: "node-mnt") pod "f69e8d4c-fa57-423f-87f6-824a614af614" (UID: "f69e8d4c-fa57-423f-87f6-824a614af614"). InnerVolumeSpecName "node-mnt". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 18:09:45 crc kubenswrapper[4558]: I0120 18:09:45.017814 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-72xmd\" (UniqueName: \"kubernetes.io/projected/f69e8d4c-fa57-423f-87f6-824a614af614-kube-api-access-72xmd\") pod \"f69e8d4c-fa57-423f-87f6-824a614af614\" (UID: \"f69e8d4c-fa57-423f-87f6-824a614af614\") " Jan 20 18:09:45 crc kubenswrapper[4558]: I0120 18:09:45.018718 4558 reconciler_common.go:293] "Volume detached for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/f69e8d4c-fa57-423f-87f6-824a614af614-node-mnt\") on node \"crc\" DevicePath \"\"" Jan 20 18:09:45 crc kubenswrapper[4558]: I0120 18:09:45.023755 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-76cd9645f5-4nltx"] Jan 20 18:09:45 crc kubenswrapper[4558]: I0120 18:09:45.024721 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-76cd9645f5-4nltx" podUID="2875a937-0d66-4a2f-8284-15c3d04fb18b" containerName="dnsmasq-dns" containerID="cri-o://e7b368f493a8a85c2a0e97578c74841e25761e54551addd120780deda7caa493" gracePeriod=10 Jan 20 18:09:45 crc kubenswrapper[4558]: I0120 18:09:45.026586 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f69e8d4c-fa57-423f-87f6-824a614af614-kube-api-access-72xmd" (OuterVolumeSpecName: "kube-api-access-72xmd") pod "f69e8d4c-fa57-423f-87f6-824a614af614" (UID: "f69e8d4c-fa57-423f-87f6-824a614af614"). InnerVolumeSpecName "kube-api-access-72xmd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:09:45 crc kubenswrapper[4558]: I0120 18:09:45.038297 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f69e8d4c-fa57-423f-87f6-824a614af614-crc-storage" (OuterVolumeSpecName: "crc-storage") pod "f69e8d4c-fa57-423f-87f6-824a614af614" (UID: "f69e8d4c-fa57-423f-87f6-824a614af614"). InnerVolumeSpecName "crc-storage". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:09:45 crc kubenswrapper[4558]: I0120 18:09:45.119671 4558 reconciler_common.go:293] "Volume detached for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/f69e8d4c-fa57-423f-87f6-824a614af614-crc-storage\") on node \"crc\" DevicePath \"\"" Jan 20 18:09:45 crc kubenswrapper[4558]: I0120 18:09:45.119704 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-72xmd\" (UniqueName: \"kubernetes.io/projected/f69e8d4c-fa57-423f-87f6-824a614af614-kube-api-access-72xmd\") on node \"crc\" DevicePath \"\"" Jan 20 18:09:45 crc kubenswrapper[4558]: I0120 18:09:45.346397 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-76cd9645f5-4nltx" Jan 20 18:09:45 crc kubenswrapper[4558]: I0120 18:09:45.423957 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2875a937-0d66-4a2f-8284-15c3d04fb18b-config\") pod \"2875a937-0d66-4a2f-8284-15c3d04fb18b\" (UID: \"2875a937-0d66-4a2f-8284-15c3d04fb18b\") " Jan 20 18:09:45 crc kubenswrapper[4558]: I0120 18:09:45.424077 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/2875a937-0d66-4a2f-8284-15c3d04fb18b-dnsmasq-svc\") pod \"2875a937-0d66-4a2f-8284-15c3d04fb18b\" (UID: \"2875a937-0d66-4a2f-8284-15c3d04fb18b\") " Jan 20 18:09:45 crc kubenswrapper[4558]: I0120 18:09:45.424238 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kkbng\" (UniqueName: \"kubernetes.io/projected/2875a937-0d66-4a2f-8284-15c3d04fb18b-kube-api-access-kkbng\") pod \"2875a937-0d66-4a2f-8284-15c3d04fb18b\" (UID: \"2875a937-0d66-4a2f-8284-15c3d04fb18b\") " Jan 20 18:09:45 crc kubenswrapper[4558]: I0120 18:09:45.424332 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"edpm-extramounts\" (UniqueName: \"kubernetes.io/configmap/2875a937-0d66-4a2f-8284-15c3d04fb18b-edpm-extramounts\") pod \"2875a937-0d66-4a2f-8284-15c3d04fb18b\" (UID: \"2875a937-0d66-4a2f-8284-15c3d04fb18b\") " Jan 20 18:09:45 crc kubenswrapper[4558]: I0120 18:09:45.428940 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2875a937-0d66-4a2f-8284-15c3d04fb18b-kube-api-access-kkbng" (OuterVolumeSpecName: "kube-api-access-kkbng") pod "2875a937-0d66-4a2f-8284-15c3d04fb18b" (UID: "2875a937-0d66-4a2f-8284-15c3d04fb18b"). InnerVolumeSpecName "kube-api-access-kkbng". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:09:45 crc kubenswrapper[4558]: I0120 18:09:45.455663 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2875a937-0d66-4a2f-8284-15c3d04fb18b-dnsmasq-svc" (OuterVolumeSpecName: "dnsmasq-svc") pod "2875a937-0d66-4a2f-8284-15c3d04fb18b" (UID: "2875a937-0d66-4a2f-8284-15c3d04fb18b"). InnerVolumeSpecName "dnsmasq-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:09:45 crc kubenswrapper[4558]: I0120 18:09:45.455674 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2875a937-0d66-4a2f-8284-15c3d04fb18b-config" (OuterVolumeSpecName: "config") pod "2875a937-0d66-4a2f-8284-15c3d04fb18b" (UID: "2875a937-0d66-4a2f-8284-15c3d04fb18b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:09:45 crc kubenswrapper[4558]: I0120 18:09:45.456703 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2875a937-0d66-4a2f-8284-15c3d04fb18b-edpm-extramounts" (OuterVolumeSpecName: "edpm-extramounts") pod "2875a937-0d66-4a2f-8284-15c3d04fb18b" (UID: "2875a937-0d66-4a2f-8284-15c3d04fb18b"). InnerVolumeSpecName "edpm-extramounts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:09:45 crc kubenswrapper[4558]: I0120 18:09:45.525905 4558 reconciler_common.go:293] "Volume detached for volume \"edpm-extramounts\" (UniqueName: \"kubernetes.io/configmap/2875a937-0d66-4a2f-8284-15c3d04fb18b-edpm-extramounts\") on node \"crc\" DevicePath \"\"" Jan 20 18:09:45 crc kubenswrapper[4558]: I0120 18:09:45.525938 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2875a937-0d66-4a2f-8284-15c3d04fb18b-config\") on node \"crc\" DevicePath \"\"" Jan 20 18:09:45 crc kubenswrapper[4558]: I0120 18:09:45.525950 4558 reconciler_common.go:293] "Volume detached for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/2875a937-0d66-4a2f-8284-15c3d04fb18b-dnsmasq-svc\") on node \"crc\" DevicePath \"\"" Jan 20 18:09:45 crc kubenswrapper[4558]: I0120 18:09:45.525964 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kkbng\" (UniqueName: \"kubernetes.io/projected/2875a937-0d66-4a2f-8284-15c3d04fb18b-kube-api-access-kkbng\") on node \"crc\" DevicePath \"\"" Jan 20 18:09:45 crc kubenswrapper[4558]: I0120 18:09:45.578714 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-4lzfd" Jan 20 18:09:45 crc kubenswrapper[4558]: I0120 18:09:45.578709 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-4lzfd" event={"ID":"f69e8d4c-fa57-423f-87f6-824a614af614","Type":"ContainerDied","Data":"92af5e45b1a65d68f8a112be94429676ee8db2036550f2027122de480e4d0204"} Jan 20 18:09:45 crc kubenswrapper[4558]: I0120 18:09:45.578865 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="92af5e45b1a65d68f8a112be94429676ee8db2036550f2027122de480e4d0204" Jan 20 18:09:45 crc kubenswrapper[4558]: I0120 18:09:45.581560 4558 generic.go:334] "Generic (PLEG): container finished" podID="2875a937-0d66-4a2f-8284-15c3d04fb18b" containerID="e7b368f493a8a85c2a0e97578c74841e25761e54551addd120780deda7caa493" exitCode=0 Jan 20 18:09:45 crc kubenswrapper[4558]: I0120 18:09:45.581626 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-76cd9645f5-4nltx" event={"ID":"2875a937-0d66-4a2f-8284-15c3d04fb18b","Type":"ContainerDied","Data":"e7b368f493a8a85c2a0e97578c74841e25761e54551addd120780deda7caa493"} Jan 20 18:09:45 crc kubenswrapper[4558]: I0120 18:09:45.581671 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-76cd9645f5-4nltx" Jan 20 18:09:45 crc kubenswrapper[4558]: I0120 18:09:45.581684 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-76cd9645f5-4nltx" event={"ID":"2875a937-0d66-4a2f-8284-15c3d04fb18b","Type":"ContainerDied","Data":"e924dead23b4b43824318eaf2cdad0d92527263ad13dbf0631913794bc283d03"} Jan 20 18:09:45 crc kubenswrapper[4558]: I0120 18:09:45.581714 4558 scope.go:117] "RemoveContainer" containerID="e7b368f493a8a85c2a0e97578c74841e25761e54551addd120780deda7caa493" Jan 20 18:09:45 crc kubenswrapper[4558]: I0120 18:09:45.604405 4558 scope.go:117] "RemoveContainer" containerID="f312b42b96e28e625be99141e336dac5e2c77a900a5fd9fe3469d727d8d185e3" Jan 20 18:09:45 crc kubenswrapper[4558]: I0120 18:09:45.621976 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-76cd9645f5-4nltx"] Jan 20 18:09:45 crc kubenswrapper[4558]: I0120 18:09:45.623960 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-76cd9645f5-4nltx"] Jan 20 18:09:45 crc kubenswrapper[4558]: I0120 18:09:45.640757 4558 scope.go:117] "RemoveContainer" containerID="e7b368f493a8a85c2a0e97578c74841e25761e54551addd120780deda7caa493" Jan 20 18:09:45 crc kubenswrapper[4558]: E0120 18:09:45.641311 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e7b368f493a8a85c2a0e97578c74841e25761e54551addd120780deda7caa493\": container with ID starting with e7b368f493a8a85c2a0e97578c74841e25761e54551addd120780deda7caa493 not found: ID does not exist" containerID="e7b368f493a8a85c2a0e97578c74841e25761e54551addd120780deda7caa493" Jan 20 18:09:45 crc kubenswrapper[4558]: I0120 18:09:45.641367 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e7b368f493a8a85c2a0e97578c74841e25761e54551addd120780deda7caa493"} err="failed to get container status \"e7b368f493a8a85c2a0e97578c74841e25761e54551addd120780deda7caa493\": rpc error: code = NotFound desc = could not find container \"e7b368f493a8a85c2a0e97578c74841e25761e54551addd120780deda7caa493\": container with ID starting with e7b368f493a8a85c2a0e97578c74841e25761e54551addd120780deda7caa493 not found: ID does not exist" Jan 20 18:09:45 crc kubenswrapper[4558]: I0120 18:09:45.641404 4558 scope.go:117] "RemoveContainer" containerID="f312b42b96e28e625be99141e336dac5e2c77a900a5fd9fe3469d727d8d185e3" Jan 20 18:09:45 crc kubenswrapper[4558]: E0120 18:09:45.641829 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f312b42b96e28e625be99141e336dac5e2c77a900a5fd9fe3469d727d8d185e3\": container with ID starting with f312b42b96e28e625be99141e336dac5e2c77a900a5fd9fe3469d727d8d185e3 not found: ID does not exist" containerID="f312b42b96e28e625be99141e336dac5e2c77a900a5fd9fe3469d727d8d185e3" Jan 20 18:09:45 crc kubenswrapper[4558]: I0120 18:09:45.641930 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f312b42b96e28e625be99141e336dac5e2c77a900a5fd9fe3469d727d8d185e3"} err="failed to get container status \"f312b42b96e28e625be99141e336dac5e2c77a900a5fd9fe3469d727d8d185e3\": rpc error: code = NotFound desc = could not find container \"f312b42b96e28e625be99141e336dac5e2c77a900a5fd9fe3469d727d8d185e3\": container with ID starting with f312b42b96e28e625be99141e336dac5e2c77a900a5fd9fe3469d727d8d185e3 not found: ID does not exist" Jan 20 18:09:46 crc kubenswrapper[4558]: I0120 18:09:46.575852 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2875a937-0d66-4a2f-8284-15c3d04fb18b" path="/var/lib/kubelet/pods/2875a937-0d66-4a2f-8284-15c3d04fb18b/volumes" Jan 20 18:09:47 crc kubenswrapper[4558]: I0120 18:09:47.753237 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["crc-storage/crc-storage-crc-4lzfd"] Jan 20 18:09:47 crc kubenswrapper[4558]: I0120 18:09:47.759585 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["crc-storage/crc-storage-crc-4lzfd"] Jan 20 18:09:47 crc kubenswrapper[4558]: I0120 18:09:47.850292 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["crc-storage/crc-storage-crc-hlkmf"] Jan 20 18:09:47 crc kubenswrapper[4558]: E0120 18:09:47.850744 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f69e8d4c-fa57-423f-87f6-824a614af614" containerName="storage" Jan 20 18:09:47 crc kubenswrapper[4558]: I0120 18:09:47.850766 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="f69e8d4c-fa57-423f-87f6-824a614af614" containerName="storage" Jan 20 18:09:47 crc kubenswrapper[4558]: E0120 18:09:47.850799 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2875a937-0d66-4a2f-8284-15c3d04fb18b" containerName="init" Jan 20 18:09:47 crc kubenswrapper[4558]: I0120 18:09:47.850806 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="2875a937-0d66-4a2f-8284-15c3d04fb18b" containerName="init" Jan 20 18:09:47 crc kubenswrapper[4558]: E0120 18:09:47.850827 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2875a937-0d66-4a2f-8284-15c3d04fb18b" containerName="dnsmasq-dns" Jan 20 18:09:47 crc kubenswrapper[4558]: I0120 18:09:47.850833 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="2875a937-0d66-4a2f-8284-15c3d04fb18b" containerName="dnsmasq-dns" Jan 20 18:09:47 crc kubenswrapper[4558]: I0120 18:09:47.851053 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="f69e8d4c-fa57-423f-87f6-824a614af614" containerName="storage" Jan 20 18:09:47 crc kubenswrapper[4558]: I0120 18:09:47.851075 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="2875a937-0d66-4a2f-8284-15c3d04fb18b" containerName="dnsmasq-dns" Jan 20 18:09:47 crc kubenswrapper[4558]: I0120 18:09:47.851793 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-hlkmf" Jan 20 18:09:47 crc kubenswrapper[4558]: I0120 18:09:47.853867 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"openshift-service-ca.crt" Jan 20 18:09:47 crc kubenswrapper[4558]: I0120 18:09:47.854070 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"crc-storage" Jan 20 18:09:47 crc kubenswrapper[4558]: I0120 18:09:47.854468 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"kube-root-ca.crt" Jan 20 18:09:47 crc kubenswrapper[4558]: I0120 18:09:47.854473 4558 reflector.go:368] Caches populated for *v1.Secret from object-"crc-storage"/"crc-storage-dockercfg-k9ht8" Jan 20 18:09:47 crc kubenswrapper[4558]: I0120 18:09:47.859010 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/0cff9a6b-f955-41c2-9326-79d04336e59a-crc-storage\") pod \"crc-storage-crc-hlkmf\" (UID: \"0cff9a6b-f955-41c2-9326-79d04336e59a\") " pod="crc-storage/crc-storage-crc-hlkmf" Jan 20 18:09:47 crc kubenswrapper[4558]: I0120 18:09:47.859085 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tckbk\" (UniqueName: \"kubernetes.io/projected/0cff9a6b-f955-41c2-9326-79d04336e59a-kube-api-access-tckbk\") pod \"crc-storage-crc-hlkmf\" (UID: \"0cff9a6b-f955-41c2-9326-79d04336e59a\") " pod="crc-storage/crc-storage-crc-hlkmf" Jan 20 18:09:47 crc kubenswrapper[4558]: I0120 18:09:47.859354 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/0cff9a6b-f955-41c2-9326-79d04336e59a-node-mnt\") pod \"crc-storage-crc-hlkmf\" (UID: \"0cff9a6b-f955-41c2-9326-79d04336e59a\") " pod="crc-storage/crc-storage-crc-hlkmf" Jan 20 18:09:47 crc kubenswrapper[4558]: I0120 18:09:47.864789 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-hlkmf"] Jan 20 18:09:47 crc kubenswrapper[4558]: I0120 18:09:47.960027 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/0cff9a6b-f955-41c2-9326-79d04336e59a-node-mnt\") pod \"crc-storage-crc-hlkmf\" (UID: \"0cff9a6b-f955-41c2-9326-79d04336e59a\") " pod="crc-storage/crc-storage-crc-hlkmf" Jan 20 18:09:47 crc kubenswrapper[4558]: I0120 18:09:47.960117 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/0cff9a6b-f955-41c2-9326-79d04336e59a-crc-storage\") pod \"crc-storage-crc-hlkmf\" (UID: \"0cff9a6b-f955-41c2-9326-79d04336e59a\") " pod="crc-storage/crc-storage-crc-hlkmf" Jan 20 18:09:47 crc kubenswrapper[4558]: I0120 18:09:47.960355 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/0cff9a6b-f955-41c2-9326-79d04336e59a-node-mnt\") pod \"crc-storage-crc-hlkmf\" (UID: \"0cff9a6b-f955-41c2-9326-79d04336e59a\") " pod="crc-storage/crc-storage-crc-hlkmf" Jan 20 18:09:47 crc kubenswrapper[4558]: I0120 18:09:47.960588 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tckbk\" (UniqueName: \"kubernetes.io/projected/0cff9a6b-f955-41c2-9326-79d04336e59a-kube-api-access-tckbk\") pod \"crc-storage-crc-hlkmf\" (UID: \"0cff9a6b-f955-41c2-9326-79d04336e59a\") " pod="crc-storage/crc-storage-crc-hlkmf" Jan 20 18:09:47 crc kubenswrapper[4558]: I0120 18:09:47.961499 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/0cff9a6b-f955-41c2-9326-79d04336e59a-crc-storage\") pod \"crc-storage-crc-hlkmf\" (UID: \"0cff9a6b-f955-41c2-9326-79d04336e59a\") " pod="crc-storage/crc-storage-crc-hlkmf" Jan 20 18:09:47 crc kubenswrapper[4558]: I0120 18:09:47.976375 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tckbk\" (UniqueName: \"kubernetes.io/projected/0cff9a6b-f955-41c2-9326-79d04336e59a-kube-api-access-tckbk\") pod \"crc-storage-crc-hlkmf\" (UID: \"0cff9a6b-f955-41c2-9326-79d04336e59a\") " pod="crc-storage/crc-storage-crc-hlkmf" Jan 20 18:09:48 crc kubenswrapper[4558]: I0120 18:09:48.170385 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-hlkmf" Jan 20 18:09:48 crc kubenswrapper[4558]: I0120 18:09:48.377937 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-hlkmf"] Jan 20 18:09:48 crc kubenswrapper[4558]: I0120 18:09:48.575017 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f69e8d4c-fa57-423f-87f6-824a614af614" path="/var/lib/kubelet/pods/f69e8d4c-fa57-423f-87f6-824a614af614/volumes" Jan 20 18:09:48 crc kubenswrapper[4558]: I0120 18:09:48.610552 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-hlkmf" event={"ID":"0cff9a6b-f955-41c2-9326-79d04336e59a","Type":"ContainerStarted","Data":"660f98907214db6818c99f9ed7e02a2aa26d23a5df234601aaafca7c8024b73f"} Jan 20 18:09:49 crc kubenswrapper[4558]: I0120 18:09:49.619883 4558 generic.go:334] "Generic (PLEG): container finished" podID="0cff9a6b-f955-41c2-9326-79d04336e59a" containerID="2c136bf4f1fc73828df55445d8c8192f0b65b0db901cbd4618de6f84b06105f3" exitCode=0 Jan 20 18:09:49 crc kubenswrapper[4558]: I0120 18:09:49.620035 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-hlkmf" event={"ID":"0cff9a6b-f955-41c2-9326-79d04336e59a","Type":"ContainerDied","Data":"2c136bf4f1fc73828df55445d8c8192f0b65b0db901cbd4618de6f84b06105f3"} Jan 20 18:09:50 crc kubenswrapper[4558]: I0120 18:09:50.847512 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-hlkmf" Jan 20 18:09:51 crc kubenswrapper[4558]: I0120 18:09:51.004311 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/0cff9a6b-f955-41c2-9326-79d04336e59a-node-mnt\") pod \"0cff9a6b-f955-41c2-9326-79d04336e59a\" (UID: \"0cff9a6b-f955-41c2-9326-79d04336e59a\") " Jan 20 18:09:51 crc kubenswrapper[4558]: I0120 18:09:51.004439 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0cff9a6b-f955-41c2-9326-79d04336e59a-node-mnt" (OuterVolumeSpecName: "node-mnt") pod "0cff9a6b-f955-41c2-9326-79d04336e59a" (UID: "0cff9a6b-f955-41c2-9326-79d04336e59a"). InnerVolumeSpecName "node-mnt". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 18:09:51 crc kubenswrapper[4558]: I0120 18:09:51.004706 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/0cff9a6b-f955-41c2-9326-79d04336e59a-crc-storage\") pod \"0cff9a6b-f955-41c2-9326-79d04336e59a\" (UID: \"0cff9a6b-f955-41c2-9326-79d04336e59a\") " Jan 20 18:09:51 crc kubenswrapper[4558]: I0120 18:09:51.004780 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tckbk\" (UniqueName: \"kubernetes.io/projected/0cff9a6b-f955-41c2-9326-79d04336e59a-kube-api-access-tckbk\") pod \"0cff9a6b-f955-41c2-9326-79d04336e59a\" (UID: \"0cff9a6b-f955-41c2-9326-79d04336e59a\") " Jan 20 18:09:51 crc kubenswrapper[4558]: I0120 18:09:51.005140 4558 reconciler_common.go:293] "Volume detached for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/0cff9a6b-f955-41c2-9326-79d04336e59a-node-mnt\") on node \"crc\" DevicePath \"\"" Jan 20 18:09:51 crc kubenswrapper[4558]: I0120 18:09:51.009281 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0cff9a6b-f955-41c2-9326-79d04336e59a-kube-api-access-tckbk" (OuterVolumeSpecName: "kube-api-access-tckbk") pod "0cff9a6b-f955-41c2-9326-79d04336e59a" (UID: "0cff9a6b-f955-41c2-9326-79d04336e59a"). InnerVolumeSpecName "kube-api-access-tckbk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:09:51 crc kubenswrapper[4558]: I0120 18:09:51.020350 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0cff9a6b-f955-41c2-9326-79d04336e59a-crc-storage" (OuterVolumeSpecName: "crc-storage") pod "0cff9a6b-f955-41c2-9326-79d04336e59a" (UID: "0cff9a6b-f955-41c2-9326-79d04336e59a"). InnerVolumeSpecName "crc-storage". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:09:51 crc kubenswrapper[4558]: I0120 18:09:51.106435 4558 reconciler_common.go:293] "Volume detached for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/0cff9a6b-f955-41c2-9326-79d04336e59a-crc-storage\") on node \"crc\" DevicePath \"\"" Jan 20 18:09:51 crc kubenswrapper[4558]: I0120 18:09:51.106466 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tckbk\" (UniqueName: \"kubernetes.io/projected/0cff9a6b-f955-41c2-9326-79d04336e59a-kube-api-access-tckbk\") on node \"crc\" DevicePath \"\"" Jan 20 18:09:51 crc kubenswrapper[4558]: I0120 18:09:51.636913 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-hlkmf" event={"ID":"0cff9a6b-f955-41c2-9326-79d04336e59a","Type":"ContainerDied","Data":"660f98907214db6818c99f9ed7e02a2aa26d23a5df234601aaafca7c8024b73f"} Jan 20 18:09:51 crc kubenswrapper[4558]: I0120 18:09:51.636951 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="660f98907214db6818c99f9ed7e02a2aa26d23a5df234601aaafca7c8024b73f" Jan 20 18:09:51 crc kubenswrapper[4558]: I0120 18:09:51.637002 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-hlkmf" Jan 20 18:09:53 crc kubenswrapper[4558]: I0120 18:09:53.786649 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-58854494b5-8cdrf"] Jan 20 18:09:53 crc kubenswrapper[4558]: E0120 18:09:53.787130 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0cff9a6b-f955-41c2-9326-79d04336e59a" containerName="storage" Jan 20 18:09:53 crc kubenswrapper[4558]: I0120 18:09:53.787143 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="0cff9a6b-f955-41c2-9326-79d04336e59a" containerName="storage" Jan 20 18:09:53 crc kubenswrapper[4558]: I0120 18:09:53.787433 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="0cff9a6b-f955-41c2-9326-79d04336e59a" containerName="storage" Jan 20 18:09:53 crc kubenswrapper[4558]: I0120 18:09:53.788060 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-58854494b5-8cdrf" Jan 20 18:09:53 crc kubenswrapper[4558]: I0120 18:09:53.789822 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-edpm-multinode" Jan 20 18:09:53 crc kubenswrapper[4558]: I0120 18:09:53.796281 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-58854494b5-8cdrf"] Jan 20 18:09:53 crc kubenswrapper[4558]: I0120 18:09:53.839531 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/dc21ff3e-2d6f-4b52-aff7-25db4937bea6-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-58854494b5-8cdrf\" (UID: \"dc21ff3e-2d6f-4b52-aff7-25db4937bea6\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-58854494b5-8cdrf" Jan 20 18:09:53 crc kubenswrapper[4558]: I0120 18:09:53.839707 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dc21ff3e-2d6f-4b52-aff7-25db4937bea6-config\") pod \"dnsmasq-dnsmasq-58854494b5-8cdrf\" (UID: \"dc21ff3e-2d6f-4b52-aff7-25db4937bea6\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-58854494b5-8cdrf" Jan 20 18:09:53 crc kubenswrapper[4558]: I0120 18:09:53.839754 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z4bsk\" (UniqueName: \"kubernetes.io/projected/dc21ff3e-2d6f-4b52-aff7-25db4937bea6-kube-api-access-z4bsk\") pod \"dnsmasq-dnsmasq-58854494b5-8cdrf\" (UID: \"dc21ff3e-2d6f-4b52-aff7-25db4937bea6\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-58854494b5-8cdrf" Jan 20 18:09:53 crc kubenswrapper[4558]: I0120 18:09:53.839790 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-multinode\" (UniqueName: \"kubernetes.io/configmap/dc21ff3e-2d6f-4b52-aff7-25db4937bea6-openstack-edpm-multinode\") pod \"dnsmasq-dnsmasq-58854494b5-8cdrf\" (UID: \"dc21ff3e-2d6f-4b52-aff7-25db4937bea6\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-58854494b5-8cdrf" Jan 20 18:09:53 crc kubenswrapper[4558]: I0120 18:09:53.941181 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dc21ff3e-2d6f-4b52-aff7-25db4937bea6-config\") pod \"dnsmasq-dnsmasq-58854494b5-8cdrf\" (UID: \"dc21ff3e-2d6f-4b52-aff7-25db4937bea6\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-58854494b5-8cdrf" Jan 20 18:09:53 crc kubenswrapper[4558]: I0120 18:09:53.941230 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z4bsk\" (UniqueName: \"kubernetes.io/projected/dc21ff3e-2d6f-4b52-aff7-25db4937bea6-kube-api-access-z4bsk\") pod \"dnsmasq-dnsmasq-58854494b5-8cdrf\" (UID: \"dc21ff3e-2d6f-4b52-aff7-25db4937bea6\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-58854494b5-8cdrf" Jan 20 18:09:53 crc kubenswrapper[4558]: I0120 18:09:53.941266 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-multinode\" (UniqueName: \"kubernetes.io/configmap/dc21ff3e-2d6f-4b52-aff7-25db4937bea6-openstack-edpm-multinode\") pod \"dnsmasq-dnsmasq-58854494b5-8cdrf\" (UID: \"dc21ff3e-2d6f-4b52-aff7-25db4937bea6\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-58854494b5-8cdrf" Jan 20 18:09:53 crc kubenswrapper[4558]: I0120 18:09:53.941319 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/dc21ff3e-2d6f-4b52-aff7-25db4937bea6-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-58854494b5-8cdrf\" (UID: \"dc21ff3e-2d6f-4b52-aff7-25db4937bea6\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-58854494b5-8cdrf" Jan 20 18:09:53 crc kubenswrapper[4558]: I0120 18:09:53.942088 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/dc21ff3e-2d6f-4b52-aff7-25db4937bea6-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-58854494b5-8cdrf\" (UID: \"dc21ff3e-2d6f-4b52-aff7-25db4937bea6\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-58854494b5-8cdrf" Jan 20 18:09:53 crc kubenswrapper[4558]: I0120 18:09:53.942130 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-multinode\" (UniqueName: \"kubernetes.io/configmap/dc21ff3e-2d6f-4b52-aff7-25db4937bea6-openstack-edpm-multinode\") pod \"dnsmasq-dnsmasq-58854494b5-8cdrf\" (UID: \"dc21ff3e-2d6f-4b52-aff7-25db4937bea6\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-58854494b5-8cdrf" Jan 20 18:09:53 crc kubenswrapper[4558]: I0120 18:09:53.942134 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dc21ff3e-2d6f-4b52-aff7-25db4937bea6-config\") pod \"dnsmasq-dnsmasq-58854494b5-8cdrf\" (UID: \"dc21ff3e-2d6f-4b52-aff7-25db4937bea6\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-58854494b5-8cdrf" Jan 20 18:09:53 crc kubenswrapper[4558]: I0120 18:09:53.957085 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z4bsk\" (UniqueName: \"kubernetes.io/projected/dc21ff3e-2d6f-4b52-aff7-25db4937bea6-kube-api-access-z4bsk\") pod \"dnsmasq-dnsmasq-58854494b5-8cdrf\" (UID: \"dc21ff3e-2d6f-4b52-aff7-25db4937bea6\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-58854494b5-8cdrf" Jan 20 18:09:54 crc kubenswrapper[4558]: I0120 18:09:54.103005 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-58854494b5-8cdrf" Jan 20 18:09:54 crc kubenswrapper[4558]: I0120 18:09:54.487260 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-58854494b5-8cdrf"] Jan 20 18:09:54 crc kubenswrapper[4558]: I0120 18:09:54.662249 4558 generic.go:334] "Generic (PLEG): container finished" podID="dc21ff3e-2d6f-4b52-aff7-25db4937bea6" containerID="5331eabdc3f1c776e6d2173468268d41767dbaea4e12973cedd48e4c20dbf984" exitCode=0 Jan 20 18:09:54 crc kubenswrapper[4558]: I0120 18:09:54.662297 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-58854494b5-8cdrf" event={"ID":"dc21ff3e-2d6f-4b52-aff7-25db4937bea6","Type":"ContainerDied","Data":"5331eabdc3f1c776e6d2173468268d41767dbaea4e12973cedd48e4c20dbf984"} Jan 20 18:09:54 crc kubenswrapper[4558]: I0120 18:09:54.662524 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-58854494b5-8cdrf" event={"ID":"dc21ff3e-2d6f-4b52-aff7-25db4937bea6","Type":"ContainerStarted","Data":"bc9b80b600a905713a84d9caeed8450941391593d6384bb756760c98a606f709"} Jan 20 18:09:55 crc kubenswrapper[4558]: I0120 18:09:55.671946 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-58854494b5-8cdrf" event={"ID":"dc21ff3e-2d6f-4b52-aff7-25db4937bea6","Type":"ContainerStarted","Data":"6d4da5ea489121df042ad5de2999ecc10efd718364311f2f2c27398d667f0d83"} Jan 20 18:09:55 crc kubenswrapper[4558]: I0120 18:09:55.672071 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-58854494b5-8cdrf" Jan 20 18:09:55 crc kubenswrapper[4558]: I0120 18:09:55.689466 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-58854494b5-8cdrf" podStartSLOduration=2.689451715 podStartE2EDuration="2.689451715s" podCreationTimestamp="2026-01-20 18:09:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 18:09:55.683734646 +0000 UTC m=+5289.444072612" watchObservedRunningTime="2026-01-20 18:09:55.689451715 +0000 UTC m=+5289.449789681" Jan 20 18:09:59 crc kubenswrapper[4558]: I0120 18:09:59.104409 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-58854494b5-8cdrf" Jan 20 18:09:59 crc kubenswrapper[4558]: I0120 18:09:59.147977 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-dk5nb"] Jan 20 18:09:59 crc kubenswrapper[4558]: I0120 18:09:59.148192 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-dk5nb" podUID="3a51fa49-3ed3-4b8e-ba75-2fa83f5edcbd" containerName="dnsmasq-dns" containerID="cri-o://b090553c9ad2f835c8c00cd860013f3e064b6a1b9e9e43aef5b5a4c88d6d1599" gracePeriod=10 Jan 20 18:09:59 crc kubenswrapper[4558]: I0120 18:09:59.239735 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-59887957c5-cml4d"] Jan 20 18:09:59 crc kubenswrapper[4558]: I0120 18:09:59.244194 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-59887957c5-cml4d" Jan 20 18:09:59 crc kubenswrapper[4558]: I0120 18:09:59.245848 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-59887957c5-cml4d"] Jan 20 18:09:59 crc kubenswrapper[4558]: I0120 18:09:59.316661 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/b423fbfc-09c9-4a39-bfe4-52c733a9c9e0-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-59887957c5-cml4d\" (UID: \"b423fbfc-09c9-4a39-bfe4-52c733a9c9e0\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-59887957c5-cml4d" Jan 20 18:09:59 crc kubenswrapper[4558]: I0120 18:09:59.316722 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-multinode\" (UniqueName: \"kubernetes.io/configmap/b423fbfc-09c9-4a39-bfe4-52c733a9c9e0-openstack-edpm-multinode\") pod \"dnsmasq-dnsmasq-59887957c5-cml4d\" (UID: \"b423fbfc-09c9-4a39-bfe4-52c733a9c9e0\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-59887957c5-cml4d" Jan 20 18:09:59 crc kubenswrapper[4558]: I0120 18:09:59.316746 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cl4f4\" (UniqueName: \"kubernetes.io/projected/b423fbfc-09c9-4a39-bfe4-52c733a9c9e0-kube-api-access-cl4f4\") pod \"dnsmasq-dnsmasq-59887957c5-cml4d\" (UID: \"b423fbfc-09c9-4a39-bfe4-52c733a9c9e0\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-59887957c5-cml4d" Jan 20 18:09:59 crc kubenswrapper[4558]: I0120 18:09:59.316781 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b423fbfc-09c9-4a39-bfe4-52c733a9c9e0-config\") pod \"dnsmasq-dnsmasq-59887957c5-cml4d\" (UID: \"b423fbfc-09c9-4a39-bfe4-52c733a9c9e0\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-59887957c5-cml4d" Jan 20 18:09:59 crc kubenswrapper[4558]: I0120 18:09:59.418709 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/b423fbfc-09c9-4a39-bfe4-52c733a9c9e0-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-59887957c5-cml4d\" (UID: \"b423fbfc-09c9-4a39-bfe4-52c733a9c9e0\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-59887957c5-cml4d" Jan 20 18:09:59 crc kubenswrapper[4558]: I0120 18:09:59.419127 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-multinode\" (UniqueName: \"kubernetes.io/configmap/b423fbfc-09c9-4a39-bfe4-52c733a9c9e0-openstack-edpm-multinode\") pod \"dnsmasq-dnsmasq-59887957c5-cml4d\" (UID: \"b423fbfc-09c9-4a39-bfe4-52c733a9c9e0\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-59887957c5-cml4d" Jan 20 18:09:59 crc kubenswrapper[4558]: I0120 18:09:59.419156 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cl4f4\" (UniqueName: \"kubernetes.io/projected/b423fbfc-09c9-4a39-bfe4-52c733a9c9e0-kube-api-access-cl4f4\") pod \"dnsmasq-dnsmasq-59887957c5-cml4d\" (UID: \"b423fbfc-09c9-4a39-bfe4-52c733a9c9e0\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-59887957c5-cml4d" Jan 20 18:09:59 crc kubenswrapper[4558]: I0120 18:09:59.419215 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b423fbfc-09c9-4a39-bfe4-52c733a9c9e0-config\") pod \"dnsmasq-dnsmasq-59887957c5-cml4d\" (UID: \"b423fbfc-09c9-4a39-bfe4-52c733a9c9e0\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-59887957c5-cml4d" Jan 20 18:09:59 crc kubenswrapper[4558]: I0120 18:09:59.419876 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/b423fbfc-09c9-4a39-bfe4-52c733a9c9e0-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-59887957c5-cml4d\" (UID: \"b423fbfc-09c9-4a39-bfe4-52c733a9c9e0\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-59887957c5-cml4d" Jan 20 18:09:59 crc kubenswrapper[4558]: I0120 18:09:59.420274 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b423fbfc-09c9-4a39-bfe4-52c733a9c9e0-config\") pod \"dnsmasq-dnsmasq-59887957c5-cml4d\" (UID: \"b423fbfc-09c9-4a39-bfe4-52c733a9c9e0\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-59887957c5-cml4d" Jan 20 18:09:59 crc kubenswrapper[4558]: I0120 18:09:59.420646 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-multinode\" (UniqueName: \"kubernetes.io/configmap/b423fbfc-09c9-4a39-bfe4-52c733a9c9e0-openstack-edpm-multinode\") pod \"dnsmasq-dnsmasq-59887957c5-cml4d\" (UID: \"b423fbfc-09c9-4a39-bfe4-52c733a9c9e0\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-59887957c5-cml4d" Jan 20 18:09:59 crc kubenswrapper[4558]: I0120 18:09:59.437342 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cl4f4\" (UniqueName: \"kubernetes.io/projected/b423fbfc-09c9-4a39-bfe4-52c733a9c9e0-kube-api-access-cl4f4\") pod \"dnsmasq-dnsmasq-59887957c5-cml4d\" (UID: \"b423fbfc-09c9-4a39-bfe4-52c733a9c9e0\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-59887957c5-cml4d" Jan 20 18:09:59 crc kubenswrapper[4558]: I0120 18:09:59.509255 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-dk5nb" Jan 20 18:09:59 crc kubenswrapper[4558]: I0120 18:09:59.559155 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-59887957c5-cml4d" Jan 20 18:09:59 crc kubenswrapper[4558]: I0120 18:09:59.622920 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/3a51fa49-3ed3-4b8e-ba75-2fa83f5edcbd-dnsmasq-svc\") pod \"3a51fa49-3ed3-4b8e-ba75-2fa83f5edcbd\" (UID: \"3a51fa49-3ed3-4b8e-ba75-2fa83f5edcbd\") " Jan 20 18:09:59 crc kubenswrapper[4558]: I0120 18:09:59.623051 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3a51fa49-3ed3-4b8e-ba75-2fa83f5edcbd-config\") pod \"3a51fa49-3ed3-4b8e-ba75-2fa83f5edcbd\" (UID: \"3a51fa49-3ed3-4b8e-ba75-2fa83f5edcbd\") " Jan 20 18:09:59 crc kubenswrapper[4558]: I0120 18:09:59.623150 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c9pvp\" (UniqueName: \"kubernetes.io/projected/3a51fa49-3ed3-4b8e-ba75-2fa83f5edcbd-kube-api-access-c9pvp\") pod \"3a51fa49-3ed3-4b8e-ba75-2fa83f5edcbd\" (UID: \"3a51fa49-3ed3-4b8e-ba75-2fa83f5edcbd\") " Jan 20 18:09:59 crc kubenswrapper[4558]: I0120 18:09:59.626210 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3a51fa49-3ed3-4b8e-ba75-2fa83f5edcbd-kube-api-access-c9pvp" (OuterVolumeSpecName: "kube-api-access-c9pvp") pod "3a51fa49-3ed3-4b8e-ba75-2fa83f5edcbd" (UID: "3a51fa49-3ed3-4b8e-ba75-2fa83f5edcbd"). InnerVolumeSpecName "kube-api-access-c9pvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:09:59 crc kubenswrapper[4558]: I0120 18:09:59.681643 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3a51fa49-3ed3-4b8e-ba75-2fa83f5edcbd-dnsmasq-svc" (OuterVolumeSpecName: "dnsmasq-svc") pod "3a51fa49-3ed3-4b8e-ba75-2fa83f5edcbd" (UID: "3a51fa49-3ed3-4b8e-ba75-2fa83f5edcbd"). InnerVolumeSpecName "dnsmasq-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:09:59 crc kubenswrapper[4558]: I0120 18:09:59.684678 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3a51fa49-3ed3-4b8e-ba75-2fa83f5edcbd-config" (OuterVolumeSpecName: "config") pod "3a51fa49-3ed3-4b8e-ba75-2fa83f5edcbd" (UID: "3a51fa49-3ed3-4b8e-ba75-2fa83f5edcbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:09:59 crc kubenswrapper[4558]: I0120 18:09:59.725259 4558 generic.go:334] "Generic (PLEG): container finished" podID="3a51fa49-3ed3-4b8e-ba75-2fa83f5edcbd" containerID="b090553c9ad2f835c8c00cd860013f3e064b6a1b9e9e43aef5b5a4c88d6d1599" exitCode=0 Jan 20 18:09:59 crc kubenswrapper[4558]: I0120 18:09:59.725306 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-dk5nb" event={"ID":"3a51fa49-3ed3-4b8e-ba75-2fa83f5edcbd","Type":"ContainerDied","Data":"b090553c9ad2f835c8c00cd860013f3e064b6a1b9e9e43aef5b5a4c88d6d1599"} Jan 20 18:09:59 crc kubenswrapper[4558]: I0120 18:09:59.725345 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-dk5nb" event={"ID":"3a51fa49-3ed3-4b8e-ba75-2fa83f5edcbd","Type":"ContainerDied","Data":"dd1528fe842294d5cded7f8bf6e5b324b03043597c193a11cc6104d0fecda0af"} Jan 20 18:09:59 crc kubenswrapper[4558]: I0120 18:09:59.725364 4558 scope.go:117] "RemoveContainer" containerID="b090553c9ad2f835c8c00cd860013f3e064b6a1b9e9e43aef5b5a4c88d6d1599" Jan 20 18:09:59 crc kubenswrapper[4558]: I0120 18:09:59.725363 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-dk5nb" Jan 20 18:09:59 crc kubenswrapper[4558]: I0120 18:09:59.726443 4558 reconciler_common.go:293] "Volume detached for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/3a51fa49-3ed3-4b8e-ba75-2fa83f5edcbd-dnsmasq-svc\") on node \"crc\" DevicePath \"\"" Jan 20 18:09:59 crc kubenswrapper[4558]: I0120 18:09:59.726475 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3a51fa49-3ed3-4b8e-ba75-2fa83f5edcbd-config\") on node \"crc\" DevicePath \"\"" Jan 20 18:09:59 crc kubenswrapper[4558]: I0120 18:09:59.726492 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c9pvp\" (UniqueName: \"kubernetes.io/projected/3a51fa49-3ed3-4b8e-ba75-2fa83f5edcbd-kube-api-access-c9pvp\") on node \"crc\" DevicePath \"\"" Jan 20 18:09:59 crc kubenswrapper[4558]: I0120 18:09:59.753077 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-dk5nb"] Jan 20 18:09:59 crc kubenswrapper[4558]: I0120 18:09:59.758242 4558 scope.go:117] "RemoveContainer" containerID="794a063720b0483269dc27756f88e21b3da791506ad1daebf1a349255226d627" Jan 20 18:09:59 crc kubenswrapper[4558]: I0120 18:09:59.758734 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-dk5nb"] Jan 20 18:09:59 crc kubenswrapper[4558]: I0120 18:09:59.787251 4558 scope.go:117] "RemoveContainer" containerID="b090553c9ad2f835c8c00cd860013f3e064b6a1b9e9e43aef5b5a4c88d6d1599" Jan 20 18:09:59 crc kubenswrapper[4558]: E0120 18:09:59.787706 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b090553c9ad2f835c8c00cd860013f3e064b6a1b9e9e43aef5b5a4c88d6d1599\": container with ID starting with b090553c9ad2f835c8c00cd860013f3e064b6a1b9e9e43aef5b5a4c88d6d1599 not found: ID does not exist" containerID="b090553c9ad2f835c8c00cd860013f3e064b6a1b9e9e43aef5b5a4c88d6d1599" Jan 20 18:09:59 crc kubenswrapper[4558]: I0120 18:09:59.787749 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b090553c9ad2f835c8c00cd860013f3e064b6a1b9e9e43aef5b5a4c88d6d1599"} err="failed to get container status \"b090553c9ad2f835c8c00cd860013f3e064b6a1b9e9e43aef5b5a4c88d6d1599\": rpc error: code = NotFound desc = could not find container \"b090553c9ad2f835c8c00cd860013f3e064b6a1b9e9e43aef5b5a4c88d6d1599\": container with ID starting with b090553c9ad2f835c8c00cd860013f3e064b6a1b9e9e43aef5b5a4c88d6d1599 not found: ID does not exist" Jan 20 18:09:59 crc kubenswrapper[4558]: I0120 18:09:59.787776 4558 scope.go:117] "RemoveContainer" containerID="794a063720b0483269dc27756f88e21b3da791506ad1daebf1a349255226d627" Jan 20 18:09:59 crc kubenswrapper[4558]: E0120 18:09:59.788067 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"794a063720b0483269dc27756f88e21b3da791506ad1daebf1a349255226d627\": container with ID starting with 794a063720b0483269dc27756f88e21b3da791506ad1daebf1a349255226d627 not found: ID does not exist" containerID="794a063720b0483269dc27756f88e21b3da791506ad1daebf1a349255226d627" Jan 20 18:09:59 crc kubenswrapper[4558]: I0120 18:09:59.788094 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"794a063720b0483269dc27756f88e21b3da791506ad1daebf1a349255226d627"} err="failed to get container status \"794a063720b0483269dc27756f88e21b3da791506ad1daebf1a349255226d627\": rpc error: code = NotFound desc = could not find container \"794a063720b0483269dc27756f88e21b3da791506ad1daebf1a349255226d627\": container with ID starting with 794a063720b0483269dc27756f88e21b3da791506ad1daebf1a349255226d627 not found: ID does not exist" Jan 20 18:09:59 crc kubenswrapper[4558]: I0120 18:09:59.956147 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-59887957c5-cml4d"] Jan 20 18:10:00 crc kubenswrapper[4558]: I0120 18:10:00.581054 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3a51fa49-3ed3-4b8e-ba75-2fa83f5edcbd" path="/var/lib/kubelet/pods/3a51fa49-3ed3-4b8e-ba75-2fa83f5edcbd/volumes" Jan 20 18:10:00 crc kubenswrapper[4558]: I0120 18:10:00.747440 4558 generic.go:334] "Generic (PLEG): container finished" podID="b423fbfc-09c9-4a39-bfe4-52c733a9c9e0" containerID="307d2b16f5bcff2bcc1b364dcfbd74fa8f93fdbc2af2a5441f005e7cb9ba5303" exitCode=0 Jan 20 18:10:00 crc kubenswrapper[4558]: I0120 18:10:00.747555 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-59887957c5-cml4d" event={"ID":"b423fbfc-09c9-4a39-bfe4-52c733a9c9e0","Type":"ContainerDied","Data":"307d2b16f5bcff2bcc1b364dcfbd74fa8f93fdbc2af2a5441f005e7cb9ba5303"} Jan 20 18:10:00 crc kubenswrapper[4558]: I0120 18:10:00.747595 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-59887957c5-cml4d" event={"ID":"b423fbfc-09c9-4a39-bfe4-52c733a9c9e0","Type":"ContainerStarted","Data":"6855650a2d3cdfd01273b68bafa94d667e4aa0be0282decba28d08d18a89611c"} Jan 20 18:10:01 crc kubenswrapper[4558]: I0120 18:10:01.759995 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-59887957c5-cml4d" event={"ID":"b423fbfc-09c9-4a39-bfe4-52c733a9c9e0","Type":"ContainerStarted","Data":"2a3c692dbbf0d3e455b76722c242e0826e2349ebb3e7c8269f71da18ad124ef4"} Jan 20 18:10:02 crc kubenswrapper[4558]: I0120 18:10:02.766338 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-59887957c5-cml4d" Jan 20 18:10:09 crc kubenswrapper[4558]: I0120 18:10:09.560356 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-59887957c5-cml4d" Jan 20 18:10:09 crc kubenswrapper[4558]: I0120 18:10:09.580335 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-59887957c5-cml4d" podStartSLOduration=10.580317145 podStartE2EDuration="10.580317145s" podCreationTimestamp="2026-01-20 18:09:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 18:10:01.779997818 +0000 UTC m=+5295.540335896" watchObservedRunningTime="2026-01-20 18:10:09.580317145 +0000 UTC m=+5303.340655101" Jan 20 18:10:09 crc kubenswrapper[4558]: I0120 18:10:09.614696 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-58854494b5-8cdrf"] Jan 20 18:10:09 crc kubenswrapper[4558]: I0120 18:10:09.614922 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-58854494b5-8cdrf" podUID="dc21ff3e-2d6f-4b52-aff7-25db4937bea6" containerName="dnsmasq-dns" containerID="cri-o://6d4da5ea489121df042ad5de2999ecc10efd718364311f2f2c27398d667f0d83" gracePeriod=10 Jan 20 18:10:09 crc kubenswrapper[4558]: I0120 18:10:09.828232 4558 generic.go:334] "Generic (PLEG): container finished" podID="dc21ff3e-2d6f-4b52-aff7-25db4937bea6" containerID="6d4da5ea489121df042ad5de2999ecc10efd718364311f2f2c27398d667f0d83" exitCode=0 Jan 20 18:10:09 crc kubenswrapper[4558]: I0120 18:10:09.828283 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-58854494b5-8cdrf" event={"ID":"dc21ff3e-2d6f-4b52-aff7-25db4937bea6","Type":"ContainerDied","Data":"6d4da5ea489121df042ad5de2999ecc10efd718364311f2f2c27398d667f0d83"} Jan 20 18:10:09 crc kubenswrapper[4558]: I0120 18:10:09.974386 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-58854494b5-8cdrf" Jan 20 18:10:10 crc kubenswrapper[4558]: I0120 18:10:10.124919 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-5d55f47b6c-fvphz"] Jan 20 18:10:10 crc kubenswrapper[4558]: E0120 18:10:10.125234 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc21ff3e-2d6f-4b52-aff7-25db4937bea6" containerName="init" Jan 20 18:10:10 crc kubenswrapper[4558]: I0120 18:10:10.125252 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc21ff3e-2d6f-4b52-aff7-25db4937bea6" containerName="init" Jan 20 18:10:10 crc kubenswrapper[4558]: E0120 18:10:10.125273 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc21ff3e-2d6f-4b52-aff7-25db4937bea6" containerName="dnsmasq-dns" Jan 20 18:10:10 crc kubenswrapper[4558]: I0120 18:10:10.125280 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc21ff3e-2d6f-4b52-aff7-25db4937bea6" containerName="dnsmasq-dns" Jan 20 18:10:10 crc kubenswrapper[4558]: E0120 18:10:10.125299 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3a51fa49-3ed3-4b8e-ba75-2fa83f5edcbd" containerName="init" Jan 20 18:10:10 crc kubenswrapper[4558]: I0120 18:10:10.125304 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="3a51fa49-3ed3-4b8e-ba75-2fa83f5edcbd" containerName="init" Jan 20 18:10:10 crc kubenswrapper[4558]: E0120 18:10:10.125314 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3a51fa49-3ed3-4b8e-ba75-2fa83f5edcbd" containerName="dnsmasq-dns" Jan 20 18:10:10 crc kubenswrapper[4558]: I0120 18:10:10.125320 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="3a51fa49-3ed3-4b8e-ba75-2fa83f5edcbd" containerName="dnsmasq-dns" Jan 20 18:10:10 crc kubenswrapper[4558]: I0120 18:10:10.125464 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="dc21ff3e-2d6f-4b52-aff7-25db4937bea6" containerName="dnsmasq-dns" Jan 20 18:10:10 crc kubenswrapper[4558]: I0120 18:10:10.125478 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="3a51fa49-3ed3-4b8e-ba75-2fa83f5edcbd" containerName="dnsmasq-dns" Jan 20 18:10:10 crc kubenswrapper[4558]: I0120 18:10:10.126123 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-5d55f47b6c-fvphz" Jan 20 18:10:10 crc kubenswrapper[4558]: I0120 18:10:10.135753 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-5d55f47b6c-fvphz"] Jan 20 18:10:10 crc kubenswrapper[4558]: I0120 18:10:10.167398 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/dc21ff3e-2d6f-4b52-aff7-25db4937bea6-dnsmasq-svc\") pod \"dc21ff3e-2d6f-4b52-aff7-25db4937bea6\" (UID: \"dc21ff3e-2d6f-4b52-aff7-25db4937bea6\") " Jan 20 18:10:10 crc kubenswrapper[4558]: I0120 18:10:10.167665 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z4bsk\" (UniqueName: \"kubernetes.io/projected/dc21ff3e-2d6f-4b52-aff7-25db4937bea6-kube-api-access-z4bsk\") pod \"dc21ff3e-2d6f-4b52-aff7-25db4937bea6\" (UID: \"dc21ff3e-2d6f-4b52-aff7-25db4937bea6\") " Jan 20 18:10:10 crc kubenswrapper[4558]: I0120 18:10:10.167752 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-multinode\" (UniqueName: \"kubernetes.io/configmap/dc21ff3e-2d6f-4b52-aff7-25db4937bea6-openstack-edpm-multinode\") pod \"dc21ff3e-2d6f-4b52-aff7-25db4937bea6\" (UID: \"dc21ff3e-2d6f-4b52-aff7-25db4937bea6\") " Jan 20 18:10:10 crc kubenswrapper[4558]: I0120 18:10:10.167876 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dc21ff3e-2d6f-4b52-aff7-25db4937bea6-config\") pod \"dc21ff3e-2d6f-4b52-aff7-25db4937bea6\" (UID: \"dc21ff3e-2d6f-4b52-aff7-25db4937bea6\") " Jan 20 18:10:10 crc kubenswrapper[4558]: I0120 18:10:10.168387 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-multinode\" (UniqueName: \"kubernetes.io/configmap/4bb9fb0f-afd6-4edc-af2e-aa967469a7a1-openstack-edpm-multinode\") pod \"dnsmasq-dnsmasq-5d55f47b6c-fvphz\" (UID: \"4bb9fb0f-afd6-4edc-af2e-aa967469a7a1\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-5d55f47b6c-fvphz" Jan 20 18:10:10 crc kubenswrapper[4558]: I0120 18:10:10.168427 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4bb9fb0f-afd6-4edc-af2e-aa967469a7a1-config\") pod \"dnsmasq-dnsmasq-5d55f47b6c-fvphz\" (UID: \"4bb9fb0f-afd6-4edc-af2e-aa967469a7a1\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-5d55f47b6c-fvphz" Jan 20 18:10:10 crc kubenswrapper[4558]: I0120 18:10:10.168542 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/4bb9fb0f-afd6-4edc-af2e-aa967469a7a1-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-5d55f47b6c-fvphz\" (UID: \"4bb9fb0f-afd6-4edc-af2e-aa967469a7a1\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-5d55f47b6c-fvphz" Jan 20 18:10:10 crc kubenswrapper[4558]: I0120 18:10:10.168634 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mqgrw\" (UniqueName: \"kubernetes.io/projected/4bb9fb0f-afd6-4edc-af2e-aa967469a7a1-kube-api-access-mqgrw\") pod \"dnsmasq-dnsmasq-5d55f47b6c-fvphz\" (UID: \"4bb9fb0f-afd6-4edc-af2e-aa967469a7a1\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-5d55f47b6c-fvphz" Jan 20 18:10:10 crc kubenswrapper[4558]: I0120 18:10:10.174999 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dc21ff3e-2d6f-4b52-aff7-25db4937bea6-kube-api-access-z4bsk" (OuterVolumeSpecName: "kube-api-access-z4bsk") pod "dc21ff3e-2d6f-4b52-aff7-25db4937bea6" (UID: "dc21ff3e-2d6f-4b52-aff7-25db4937bea6"). InnerVolumeSpecName "kube-api-access-z4bsk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:10:10 crc kubenswrapper[4558]: I0120 18:10:10.197763 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dc21ff3e-2d6f-4b52-aff7-25db4937bea6-dnsmasq-svc" (OuterVolumeSpecName: "dnsmasq-svc") pod "dc21ff3e-2d6f-4b52-aff7-25db4937bea6" (UID: "dc21ff3e-2d6f-4b52-aff7-25db4937bea6"). InnerVolumeSpecName "dnsmasq-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:10:10 crc kubenswrapper[4558]: I0120 18:10:10.198093 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dc21ff3e-2d6f-4b52-aff7-25db4937bea6-config" (OuterVolumeSpecName: "config") pod "dc21ff3e-2d6f-4b52-aff7-25db4937bea6" (UID: "dc21ff3e-2d6f-4b52-aff7-25db4937bea6"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:10:10 crc kubenswrapper[4558]: I0120 18:10:10.214129 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dc21ff3e-2d6f-4b52-aff7-25db4937bea6-openstack-edpm-multinode" (OuterVolumeSpecName: "openstack-edpm-multinode") pod "dc21ff3e-2d6f-4b52-aff7-25db4937bea6" (UID: "dc21ff3e-2d6f-4b52-aff7-25db4937bea6"). InnerVolumeSpecName "openstack-edpm-multinode". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:10:10 crc kubenswrapper[4558]: I0120 18:10:10.270132 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/4bb9fb0f-afd6-4edc-af2e-aa967469a7a1-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-5d55f47b6c-fvphz\" (UID: \"4bb9fb0f-afd6-4edc-af2e-aa967469a7a1\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-5d55f47b6c-fvphz" Jan 20 18:10:10 crc kubenswrapper[4558]: I0120 18:10:10.270217 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mqgrw\" (UniqueName: \"kubernetes.io/projected/4bb9fb0f-afd6-4edc-af2e-aa967469a7a1-kube-api-access-mqgrw\") pod \"dnsmasq-dnsmasq-5d55f47b6c-fvphz\" (UID: \"4bb9fb0f-afd6-4edc-af2e-aa967469a7a1\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-5d55f47b6c-fvphz" Jan 20 18:10:10 crc kubenswrapper[4558]: I0120 18:10:10.270298 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-multinode\" (UniqueName: \"kubernetes.io/configmap/4bb9fb0f-afd6-4edc-af2e-aa967469a7a1-openstack-edpm-multinode\") pod \"dnsmasq-dnsmasq-5d55f47b6c-fvphz\" (UID: \"4bb9fb0f-afd6-4edc-af2e-aa967469a7a1\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-5d55f47b6c-fvphz" Jan 20 18:10:10 crc kubenswrapper[4558]: I0120 18:10:10.270328 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4bb9fb0f-afd6-4edc-af2e-aa967469a7a1-config\") pod \"dnsmasq-dnsmasq-5d55f47b6c-fvphz\" (UID: \"4bb9fb0f-afd6-4edc-af2e-aa967469a7a1\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-5d55f47b6c-fvphz" Jan 20 18:10:10 crc kubenswrapper[4558]: I0120 18:10:10.270388 4558 reconciler_common.go:293] "Volume detached for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/dc21ff3e-2d6f-4b52-aff7-25db4937bea6-dnsmasq-svc\") on node \"crc\" DevicePath \"\"" Jan 20 18:10:10 crc kubenswrapper[4558]: I0120 18:10:10.270400 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z4bsk\" (UniqueName: \"kubernetes.io/projected/dc21ff3e-2d6f-4b52-aff7-25db4937bea6-kube-api-access-z4bsk\") on node \"crc\" DevicePath \"\"" Jan 20 18:10:10 crc kubenswrapper[4558]: I0120 18:10:10.270412 4558 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-multinode\" (UniqueName: \"kubernetes.io/configmap/dc21ff3e-2d6f-4b52-aff7-25db4937bea6-openstack-edpm-multinode\") on node \"crc\" DevicePath \"\"" Jan 20 18:10:10 crc kubenswrapper[4558]: I0120 18:10:10.270424 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dc21ff3e-2d6f-4b52-aff7-25db4937bea6-config\") on node \"crc\" DevicePath \"\"" Jan 20 18:10:10 crc kubenswrapper[4558]: I0120 18:10:10.271017 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/4bb9fb0f-afd6-4edc-af2e-aa967469a7a1-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-5d55f47b6c-fvphz\" (UID: \"4bb9fb0f-afd6-4edc-af2e-aa967469a7a1\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-5d55f47b6c-fvphz" Jan 20 18:10:10 crc kubenswrapper[4558]: I0120 18:10:10.271257 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4bb9fb0f-afd6-4edc-af2e-aa967469a7a1-config\") pod \"dnsmasq-dnsmasq-5d55f47b6c-fvphz\" (UID: \"4bb9fb0f-afd6-4edc-af2e-aa967469a7a1\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-5d55f47b6c-fvphz" Jan 20 18:10:10 crc kubenswrapper[4558]: I0120 18:10:10.271324 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-multinode\" (UniqueName: \"kubernetes.io/configmap/4bb9fb0f-afd6-4edc-af2e-aa967469a7a1-openstack-edpm-multinode\") pod \"dnsmasq-dnsmasq-5d55f47b6c-fvphz\" (UID: \"4bb9fb0f-afd6-4edc-af2e-aa967469a7a1\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-5d55f47b6c-fvphz" Jan 20 18:10:10 crc kubenswrapper[4558]: I0120 18:10:10.285954 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mqgrw\" (UniqueName: \"kubernetes.io/projected/4bb9fb0f-afd6-4edc-af2e-aa967469a7a1-kube-api-access-mqgrw\") pod \"dnsmasq-dnsmasq-5d55f47b6c-fvphz\" (UID: \"4bb9fb0f-afd6-4edc-af2e-aa967469a7a1\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-5d55f47b6c-fvphz" Jan 20 18:10:10 crc kubenswrapper[4558]: I0120 18:10:10.442204 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-5d55f47b6c-fvphz" Jan 20 18:10:10 crc kubenswrapper[4558]: I0120 18:10:10.830712 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-5d55f47b6c-fvphz"] Jan 20 18:10:10 crc kubenswrapper[4558]: W0120 18:10:10.833632 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4bb9fb0f_afd6_4edc_af2e_aa967469a7a1.slice/crio-5716ce0c4ff55f2ae9f1e63f06de899e28c933679957c3c208b99397860b2be0 WatchSource:0}: Error finding container 5716ce0c4ff55f2ae9f1e63f06de899e28c933679957c3c208b99397860b2be0: Status 404 returned error can't find the container with id 5716ce0c4ff55f2ae9f1e63f06de899e28c933679957c3c208b99397860b2be0 Jan 20 18:10:10 crc kubenswrapper[4558]: I0120 18:10:10.840373 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-58854494b5-8cdrf" event={"ID":"dc21ff3e-2d6f-4b52-aff7-25db4937bea6","Type":"ContainerDied","Data":"bc9b80b600a905713a84d9caeed8450941391593d6384bb756760c98a606f709"} Jan 20 18:10:10 crc kubenswrapper[4558]: I0120 18:10:10.840438 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-58854494b5-8cdrf" Jan 20 18:10:10 crc kubenswrapper[4558]: I0120 18:10:10.840465 4558 scope.go:117] "RemoveContainer" containerID="6d4da5ea489121df042ad5de2999ecc10efd718364311f2f2c27398d667f0d83" Jan 20 18:10:10 crc kubenswrapper[4558]: I0120 18:10:10.922300 4558 scope.go:117] "RemoveContainer" containerID="5331eabdc3f1c776e6d2173468268d41767dbaea4e12973cedd48e4c20dbf984" Jan 20 18:10:10 crc kubenswrapper[4558]: I0120 18:10:10.952219 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-58854494b5-8cdrf"] Jan 20 18:10:10 crc kubenswrapper[4558]: I0120 18:10:10.959300 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-58854494b5-8cdrf"] Jan 20 18:10:11 crc kubenswrapper[4558]: I0120 18:10:11.850810 4558 generic.go:334] "Generic (PLEG): container finished" podID="4bb9fb0f-afd6-4edc-af2e-aa967469a7a1" containerID="d9743af7c9f73d7276e8ca33fbcde1589d7af9645a6833c5edfd19df6dec3031" exitCode=0 Jan 20 18:10:11 crc kubenswrapper[4558]: I0120 18:10:11.851005 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-5d55f47b6c-fvphz" event={"ID":"4bb9fb0f-afd6-4edc-af2e-aa967469a7a1","Type":"ContainerDied","Data":"d9743af7c9f73d7276e8ca33fbcde1589d7af9645a6833c5edfd19df6dec3031"} Jan 20 18:10:11 crc kubenswrapper[4558]: I0120 18:10:11.851327 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-5d55f47b6c-fvphz" event={"ID":"4bb9fb0f-afd6-4edc-af2e-aa967469a7a1","Type":"ContainerStarted","Data":"5716ce0c4ff55f2ae9f1e63f06de899e28c933679957c3c208b99397860b2be0"} Jan 20 18:10:12 crc kubenswrapper[4558]: I0120 18:10:12.575774 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dc21ff3e-2d6f-4b52-aff7-25db4937bea6" path="/var/lib/kubelet/pods/dc21ff3e-2d6f-4b52-aff7-25db4937bea6/volumes" Jan 20 18:10:12 crc kubenswrapper[4558]: I0120 18:10:12.861555 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-5d55f47b6c-fvphz" event={"ID":"4bb9fb0f-afd6-4edc-af2e-aa967469a7a1","Type":"ContainerStarted","Data":"2c40ec210f462f6e681c3d856258d487e8b971cde7c7b07ba9dc5eb2b43e2390"} Jan 20 18:10:12 crc kubenswrapper[4558]: I0120 18:10:12.861695 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-5d55f47b6c-fvphz" Jan 20 18:10:12 crc kubenswrapper[4558]: I0120 18:10:12.876310 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-5d55f47b6c-fvphz" podStartSLOduration=2.876292011 podStartE2EDuration="2.876292011s" podCreationTimestamp="2026-01-20 18:10:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 18:10:12.873479022 +0000 UTC m=+5306.633816989" watchObservedRunningTime="2026-01-20 18:10:12.876292011 +0000 UTC m=+5306.636629979" Jan 20 18:10:20 crc kubenswrapper[4558]: I0120 18:10:20.444212 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-5d55f47b6c-fvphz" Jan 20 18:10:20 crc kubenswrapper[4558]: I0120 18:10:20.523967 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-59887957c5-cml4d"] Jan 20 18:10:20 crc kubenswrapper[4558]: I0120 18:10:20.524342 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-59887957c5-cml4d" podUID="b423fbfc-09c9-4a39-bfe4-52c733a9c9e0" containerName="dnsmasq-dns" containerID="cri-o://2a3c692dbbf0d3e455b76722c242e0826e2349ebb3e7c8269f71da18ad124ef4" gracePeriod=10 Jan 20 18:10:20 crc kubenswrapper[4558]: I0120 18:10:20.734459 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-964c896d7-mnk2h"] Jan 20 18:10:20 crc kubenswrapper[4558]: I0120 18:10:20.735415 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-964c896d7-mnk2h" Jan 20 18:10:20 crc kubenswrapper[4558]: I0120 18:10:20.751048 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-964c896d7-mnk2h"] Jan 20 18:10:20 crc kubenswrapper[4558]: I0120 18:10:20.826305 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7b7bv\" (UniqueName: \"kubernetes.io/projected/e56a243a-71b7-471f-b519-57917fa67e7b-kube-api-access-7b7bv\") pod \"dnsmasq-dnsmasq-964c896d7-mnk2h\" (UID: \"e56a243a-71b7-471f-b519-57917fa67e7b\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-964c896d7-mnk2h" Jan 20 18:10:20 crc kubenswrapper[4558]: I0120 18:10:20.826377 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/e56a243a-71b7-471f-b519-57917fa67e7b-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-964c896d7-mnk2h\" (UID: \"e56a243a-71b7-471f-b519-57917fa67e7b\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-964c896d7-mnk2h" Jan 20 18:10:20 crc kubenswrapper[4558]: I0120 18:10:20.826620 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e56a243a-71b7-471f-b519-57917fa67e7b-config\") pod \"dnsmasq-dnsmasq-964c896d7-mnk2h\" (UID: \"e56a243a-71b7-471f-b519-57917fa67e7b\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-964c896d7-mnk2h" Jan 20 18:10:20 crc kubenswrapper[4558]: I0120 18:10:20.826670 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-multinode\" (UniqueName: \"kubernetes.io/configmap/e56a243a-71b7-471f-b519-57917fa67e7b-openstack-edpm-multinode\") pod \"dnsmasq-dnsmasq-964c896d7-mnk2h\" (UID: \"e56a243a-71b7-471f-b519-57917fa67e7b\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-964c896d7-mnk2h" Jan 20 18:10:20 crc kubenswrapper[4558]: I0120 18:10:20.928027 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7b7bv\" (UniqueName: \"kubernetes.io/projected/e56a243a-71b7-471f-b519-57917fa67e7b-kube-api-access-7b7bv\") pod \"dnsmasq-dnsmasq-964c896d7-mnk2h\" (UID: \"e56a243a-71b7-471f-b519-57917fa67e7b\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-964c896d7-mnk2h" Jan 20 18:10:20 crc kubenswrapper[4558]: I0120 18:10:20.928138 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/e56a243a-71b7-471f-b519-57917fa67e7b-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-964c896d7-mnk2h\" (UID: \"e56a243a-71b7-471f-b519-57917fa67e7b\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-964c896d7-mnk2h" Jan 20 18:10:20 crc kubenswrapper[4558]: I0120 18:10:20.928275 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e56a243a-71b7-471f-b519-57917fa67e7b-config\") pod \"dnsmasq-dnsmasq-964c896d7-mnk2h\" (UID: \"e56a243a-71b7-471f-b519-57917fa67e7b\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-964c896d7-mnk2h" Jan 20 18:10:20 crc kubenswrapper[4558]: I0120 18:10:20.928302 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-multinode\" (UniqueName: \"kubernetes.io/configmap/e56a243a-71b7-471f-b519-57917fa67e7b-openstack-edpm-multinode\") pod \"dnsmasq-dnsmasq-964c896d7-mnk2h\" (UID: \"e56a243a-71b7-471f-b519-57917fa67e7b\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-964c896d7-mnk2h" Jan 20 18:10:20 crc kubenswrapper[4558]: I0120 18:10:20.929287 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e56a243a-71b7-471f-b519-57917fa67e7b-config\") pod \"dnsmasq-dnsmasq-964c896d7-mnk2h\" (UID: \"e56a243a-71b7-471f-b519-57917fa67e7b\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-964c896d7-mnk2h" Jan 20 18:10:20 crc kubenswrapper[4558]: I0120 18:10:20.929501 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-multinode\" (UniqueName: \"kubernetes.io/configmap/e56a243a-71b7-471f-b519-57917fa67e7b-openstack-edpm-multinode\") pod \"dnsmasq-dnsmasq-964c896d7-mnk2h\" (UID: \"e56a243a-71b7-471f-b519-57917fa67e7b\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-964c896d7-mnk2h" Jan 20 18:10:20 crc kubenswrapper[4558]: I0120 18:10:20.929529 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/e56a243a-71b7-471f-b519-57917fa67e7b-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-964c896d7-mnk2h\" (UID: \"e56a243a-71b7-471f-b519-57917fa67e7b\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-964c896d7-mnk2h" Jan 20 18:10:20 crc kubenswrapper[4558]: I0120 18:10:20.931104 4558 generic.go:334] "Generic (PLEG): container finished" podID="b423fbfc-09c9-4a39-bfe4-52c733a9c9e0" containerID="2a3c692dbbf0d3e455b76722c242e0826e2349ebb3e7c8269f71da18ad124ef4" exitCode=0 Jan 20 18:10:20 crc kubenswrapper[4558]: I0120 18:10:20.931147 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-59887957c5-cml4d" event={"ID":"b423fbfc-09c9-4a39-bfe4-52c733a9c9e0","Type":"ContainerDied","Data":"2a3c692dbbf0d3e455b76722c242e0826e2349ebb3e7c8269f71da18ad124ef4"} Jan 20 18:10:20 crc kubenswrapper[4558]: I0120 18:10:20.946245 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7b7bv\" (UniqueName: \"kubernetes.io/projected/e56a243a-71b7-471f-b519-57917fa67e7b-kube-api-access-7b7bv\") pod \"dnsmasq-dnsmasq-964c896d7-mnk2h\" (UID: \"e56a243a-71b7-471f-b519-57917fa67e7b\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-964c896d7-mnk2h" Jan 20 18:10:20 crc kubenswrapper[4558]: I0120 18:10:20.998267 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-59887957c5-cml4d" Jan 20 18:10:21 crc kubenswrapper[4558]: I0120 18:10:21.029725 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b423fbfc-09c9-4a39-bfe4-52c733a9c9e0-config\") pod \"b423fbfc-09c9-4a39-bfe4-52c733a9c9e0\" (UID: \"b423fbfc-09c9-4a39-bfe4-52c733a9c9e0\") " Jan 20 18:10:21 crc kubenswrapper[4558]: I0120 18:10:21.029984 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cl4f4\" (UniqueName: \"kubernetes.io/projected/b423fbfc-09c9-4a39-bfe4-52c733a9c9e0-kube-api-access-cl4f4\") pod \"b423fbfc-09c9-4a39-bfe4-52c733a9c9e0\" (UID: \"b423fbfc-09c9-4a39-bfe4-52c733a9c9e0\") " Jan 20 18:10:21 crc kubenswrapper[4558]: I0120 18:10:21.030044 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/b423fbfc-09c9-4a39-bfe4-52c733a9c9e0-dnsmasq-svc\") pod \"b423fbfc-09c9-4a39-bfe4-52c733a9c9e0\" (UID: \"b423fbfc-09c9-4a39-bfe4-52c733a9c9e0\") " Jan 20 18:10:21 crc kubenswrapper[4558]: I0120 18:10:21.030091 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-multinode\" (UniqueName: \"kubernetes.io/configmap/b423fbfc-09c9-4a39-bfe4-52c733a9c9e0-openstack-edpm-multinode\") pod \"b423fbfc-09c9-4a39-bfe4-52c733a9c9e0\" (UID: \"b423fbfc-09c9-4a39-bfe4-52c733a9c9e0\") " Jan 20 18:10:21 crc kubenswrapper[4558]: I0120 18:10:21.039112 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b423fbfc-09c9-4a39-bfe4-52c733a9c9e0-kube-api-access-cl4f4" (OuterVolumeSpecName: "kube-api-access-cl4f4") pod "b423fbfc-09c9-4a39-bfe4-52c733a9c9e0" (UID: "b423fbfc-09c9-4a39-bfe4-52c733a9c9e0"). InnerVolumeSpecName "kube-api-access-cl4f4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:10:21 crc kubenswrapper[4558]: I0120 18:10:21.056959 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-964c896d7-mnk2h" Jan 20 18:10:21 crc kubenswrapper[4558]: I0120 18:10:21.060659 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b423fbfc-09c9-4a39-bfe4-52c733a9c9e0-dnsmasq-svc" (OuterVolumeSpecName: "dnsmasq-svc") pod "b423fbfc-09c9-4a39-bfe4-52c733a9c9e0" (UID: "b423fbfc-09c9-4a39-bfe4-52c733a9c9e0"). InnerVolumeSpecName "dnsmasq-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:10:21 crc kubenswrapper[4558]: I0120 18:10:21.061048 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b423fbfc-09c9-4a39-bfe4-52c733a9c9e0-config" (OuterVolumeSpecName: "config") pod "b423fbfc-09c9-4a39-bfe4-52c733a9c9e0" (UID: "b423fbfc-09c9-4a39-bfe4-52c733a9c9e0"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:10:21 crc kubenswrapper[4558]: I0120 18:10:21.062839 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b423fbfc-09c9-4a39-bfe4-52c733a9c9e0-openstack-edpm-multinode" (OuterVolumeSpecName: "openstack-edpm-multinode") pod "b423fbfc-09c9-4a39-bfe4-52c733a9c9e0" (UID: "b423fbfc-09c9-4a39-bfe4-52c733a9c9e0"). InnerVolumeSpecName "openstack-edpm-multinode". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:10:21 crc kubenswrapper[4558]: I0120 18:10:21.133112 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b423fbfc-09c9-4a39-bfe4-52c733a9c9e0-config\") on node \"crc\" DevicePath \"\"" Jan 20 18:10:21 crc kubenswrapper[4558]: I0120 18:10:21.133144 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cl4f4\" (UniqueName: \"kubernetes.io/projected/b423fbfc-09c9-4a39-bfe4-52c733a9c9e0-kube-api-access-cl4f4\") on node \"crc\" DevicePath \"\"" Jan 20 18:10:21 crc kubenswrapper[4558]: I0120 18:10:21.133155 4558 reconciler_common.go:293] "Volume detached for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/b423fbfc-09c9-4a39-bfe4-52c733a9c9e0-dnsmasq-svc\") on node \"crc\" DevicePath \"\"" Jan 20 18:10:21 crc kubenswrapper[4558]: I0120 18:10:21.133178 4558 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-multinode\" (UniqueName: \"kubernetes.io/configmap/b423fbfc-09c9-4a39-bfe4-52c733a9c9e0-openstack-edpm-multinode\") on node \"crc\" DevicePath \"\"" Jan 20 18:10:21 crc kubenswrapper[4558]: I0120 18:10:21.429784 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-964c896d7-mnk2h"] Jan 20 18:10:21 crc kubenswrapper[4558]: W0120 18:10:21.434621 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode56a243a_71b7_471f_b519_57917fa67e7b.slice/crio-bd7fb419e18dfc12fdd5d364c048f12418331774c10f59264194e509027b1e4b WatchSource:0}: Error finding container bd7fb419e18dfc12fdd5d364c048f12418331774c10f59264194e509027b1e4b: Status 404 returned error can't find the container with id bd7fb419e18dfc12fdd5d364c048f12418331774c10f59264194e509027b1e4b Jan 20 18:10:21 crc kubenswrapper[4558]: I0120 18:10:21.942182 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-59887957c5-cml4d" event={"ID":"b423fbfc-09c9-4a39-bfe4-52c733a9c9e0","Type":"ContainerDied","Data":"6855650a2d3cdfd01273b68bafa94d667e4aa0be0282decba28d08d18a89611c"} Jan 20 18:10:21 crc kubenswrapper[4558]: I0120 18:10:21.942244 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-59887957c5-cml4d" Jan 20 18:10:21 crc kubenswrapper[4558]: I0120 18:10:21.943334 4558 scope.go:117] "RemoveContainer" containerID="2a3c692dbbf0d3e455b76722c242e0826e2349ebb3e7c8269f71da18ad124ef4" Jan 20 18:10:21 crc kubenswrapper[4558]: I0120 18:10:21.944107 4558 generic.go:334] "Generic (PLEG): container finished" podID="e56a243a-71b7-471f-b519-57917fa67e7b" containerID="2cce52590ef71014c25bc35352899fb8a67a23cb987bf4aa292c690ea776bb17" exitCode=0 Jan 20 18:10:21 crc kubenswrapper[4558]: I0120 18:10:21.944188 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-964c896d7-mnk2h" event={"ID":"e56a243a-71b7-471f-b519-57917fa67e7b","Type":"ContainerDied","Data":"2cce52590ef71014c25bc35352899fb8a67a23cb987bf4aa292c690ea776bb17"} Jan 20 18:10:21 crc kubenswrapper[4558]: I0120 18:10:21.944303 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-964c896d7-mnk2h" event={"ID":"e56a243a-71b7-471f-b519-57917fa67e7b","Type":"ContainerStarted","Data":"bd7fb419e18dfc12fdd5d364c048f12418331774c10f59264194e509027b1e4b"} Jan 20 18:10:21 crc kubenswrapper[4558]: I0120 18:10:21.975352 4558 scope.go:117] "RemoveContainer" containerID="307d2b16f5bcff2bcc1b364dcfbd74fa8f93fdbc2af2a5441f005e7cb9ba5303" Jan 20 18:10:22 crc kubenswrapper[4558]: I0120 18:10:22.057232 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-59887957c5-cml4d"] Jan 20 18:10:22 crc kubenswrapper[4558]: I0120 18:10:22.062736 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-59887957c5-cml4d"] Jan 20 18:10:22 crc kubenswrapper[4558]: I0120 18:10:22.585636 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b423fbfc-09c9-4a39-bfe4-52c733a9c9e0" path="/var/lib/kubelet/pods/b423fbfc-09c9-4a39-bfe4-52c733a9c9e0/volumes" Jan 20 18:10:22 crc kubenswrapper[4558]: I0120 18:10:22.957152 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-964c896d7-mnk2h" event={"ID":"e56a243a-71b7-471f-b519-57917fa67e7b","Type":"ContainerStarted","Data":"9788485e2fec27e60d7a4c77df74bbae64ee23a3a8c66f281b42423c151060ca"} Jan 20 18:10:22 crc kubenswrapper[4558]: I0120 18:10:22.957296 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-964c896d7-mnk2h" Jan 20 18:10:22 crc kubenswrapper[4558]: I0120 18:10:22.973810 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-964c896d7-mnk2h" podStartSLOduration=2.973791884 podStartE2EDuration="2.973791884s" podCreationTimestamp="2026-01-20 18:10:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 18:10:22.97022085 +0000 UTC m=+5316.730558816" watchObservedRunningTime="2026-01-20 18:10:22.973791884 +0000 UTC m=+5316.734129850" Jan 20 18:10:31 crc kubenswrapper[4558]: I0120 18:10:31.059420 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-964c896d7-mnk2h" Jan 20 18:10:31 crc kubenswrapper[4558]: I0120 18:10:31.106413 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-5d55f47b6c-fvphz"] Jan 20 18:10:31 crc kubenswrapper[4558]: I0120 18:10:31.106677 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-5d55f47b6c-fvphz" podUID="4bb9fb0f-afd6-4edc-af2e-aa967469a7a1" containerName="dnsmasq-dns" containerID="cri-o://2c40ec210f462f6e681c3d856258d487e8b971cde7c7b07ba9dc5eb2b43e2390" gracePeriod=10 Jan 20 18:10:31 crc kubenswrapper[4558]: I0120 18:10:31.460136 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-rwjb8"] Jan 20 18:10:31 crc kubenswrapper[4558]: E0120 18:10:31.460423 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b423fbfc-09c9-4a39-bfe4-52c733a9c9e0" containerName="init" Jan 20 18:10:31 crc kubenswrapper[4558]: I0120 18:10:31.460441 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="b423fbfc-09c9-4a39-bfe4-52c733a9c9e0" containerName="init" Jan 20 18:10:31 crc kubenswrapper[4558]: E0120 18:10:31.460468 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b423fbfc-09c9-4a39-bfe4-52c733a9c9e0" containerName="dnsmasq-dns" Jan 20 18:10:31 crc kubenswrapper[4558]: I0120 18:10:31.460474 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="b423fbfc-09c9-4a39-bfe4-52c733a9c9e0" containerName="dnsmasq-dns" Jan 20 18:10:31 crc kubenswrapper[4558]: I0120 18:10:31.460610 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="b423fbfc-09c9-4a39-bfe4-52c733a9c9e0" containerName="dnsmasq-dns" Jan 20 18:10:31 crc kubenswrapper[4558]: I0120 18:10:31.461303 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-rwjb8" Jan 20 18:10:31 crc kubenswrapper[4558]: I0120 18:10:31.479352 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-rwjb8"] Jan 20 18:10:31 crc kubenswrapper[4558]: I0120 18:10:31.480341 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-5d55f47b6c-fvphz" Jan 20 18:10:31 crc kubenswrapper[4558]: I0120 18:10:31.504203 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4bb9fb0f-afd6-4edc-af2e-aa967469a7a1-config\") pod \"4bb9fb0f-afd6-4edc-af2e-aa967469a7a1\" (UID: \"4bb9fb0f-afd6-4edc-af2e-aa967469a7a1\") " Jan 20 18:10:31 crc kubenswrapper[4558]: I0120 18:10:31.504272 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-multinode\" (UniqueName: \"kubernetes.io/configmap/4bb9fb0f-afd6-4edc-af2e-aa967469a7a1-openstack-edpm-multinode\") pod \"4bb9fb0f-afd6-4edc-af2e-aa967469a7a1\" (UID: \"4bb9fb0f-afd6-4edc-af2e-aa967469a7a1\") " Jan 20 18:10:31 crc kubenswrapper[4558]: I0120 18:10:31.504476 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/7ca67738-2068-4c2f-beeb-60751657efef-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-84b9f45d47-rwjb8\" (UID: \"7ca67738-2068-4c2f-beeb-60751657efef\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-rwjb8" Jan 20 18:10:31 crc kubenswrapper[4558]: I0120 18:10:31.504556 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7ca67738-2068-4c2f-beeb-60751657efef-config\") pod \"dnsmasq-dnsmasq-84b9f45d47-rwjb8\" (UID: \"7ca67738-2068-4c2f-beeb-60751657efef\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-rwjb8" Jan 20 18:10:31 crc kubenswrapper[4558]: I0120 18:10:31.504583 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-znhcd\" (UniqueName: \"kubernetes.io/projected/7ca67738-2068-4c2f-beeb-60751657efef-kube-api-access-znhcd\") pod \"dnsmasq-dnsmasq-84b9f45d47-rwjb8\" (UID: \"7ca67738-2068-4c2f-beeb-60751657efef\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-rwjb8" Jan 20 18:10:31 crc kubenswrapper[4558]: I0120 18:10:31.587851 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb9fb0f-afd6-4edc-af2e-aa967469a7a1-openstack-edpm-multinode" (OuterVolumeSpecName: "openstack-edpm-multinode") pod "4bb9fb0f-afd6-4edc-af2e-aa967469a7a1" (UID: "4bb9fb0f-afd6-4edc-af2e-aa967469a7a1"). InnerVolumeSpecName "openstack-edpm-multinode". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:10:31 crc kubenswrapper[4558]: I0120 18:10:31.603597 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb9fb0f-afd6-4edc-af2e-aa967469a7a1-config" (OuterVolumeSpecName: "config") pod "4bb9fb0f-afd6-4edc-af2e-aa967469a7a1" (UID: "4bb9fb0f-afd6-4edc-af2e-aa967469a7a1"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:10:31 crc kubenswrapper[4558]: I0120 18:10:31.605577 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mqgrw\" (UniqueName: \"kubernetes.io/projected/4bb9fb0f-afd6-4edc-af2e-aa967469a7a1-kube-api-access-mqgrw\") pod \"4bb9fb0f-afd6-4edc-af2e-aa967469a7a1\" (UID: \"4bb9fb0f-afd6-4edc-af2e-aa967469a7a1\") " Jan 20 18:10:31 crc kubenswrapper[4558]: I0120 18:10:31.605635 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/4bb9fb0f-afd6-4edc-af2e-aa967469a7a1-dnsmasq-svc\") pod \"4bb9fb0f-afd6-4edc-af2e-aa967469a7a1\" (UID: \"4bb9fb0f-afd6-4edc-af2e-aa967469a7a1\") " Jan 20 18:10:31 crc kubenswrapper[4558]: I0120 18:10:31.605926 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7ca67738-2068-4c2f-beeb-60751657efef-config\") pod \"dnsmasq-dnsmasq-84b9f45d47-rwjb8\" (UID: \"7ca67738-2068-4c2f-beeb-60751657efef\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-rwjb8" Jan 20 18:10:31 crc kubenswrapper[4558]: I0120 18:10:31.605955 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-znhcd\" (UniqueName: \"kubernetes.io/projected/7ca67738-2068-4c2f-beeb-60751657efef-kube-api-access-znhcd\") pod \"dnsmasq-dnsmasq-84b9f45d47-rwjb8\" (UID: \"7ca67738-2068-4c2f-beeb-60751657efef\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-rwjb8" Jan 20 18:10:31 crc kubenswrapper[4558]: I0120 18:10:31.606019 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/7ca67738-2068-4c2f-beeb-60751657efef-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-84b9f45d47-rwjb8\" (UID: \"7ca67738-2068-4c2f-beeb-60751657efef\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-rwjb8" Jan 20 18:10:31 crc kubenswrapper[4558]: I0120 18:10:31.606092 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4bb9fb0f-afd6-4edc-af2e-aa967469a7a1-config\") on node \"crc\" DevicePath \"\"" Jan 20 18:10:31 crc kubenswrapper[4558]: I0120 18:10:31.606106 4558 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-multinode\" (UniqueName: \"kubernetes.io/configmap/4bb9fb0f-afd6-4edc-af2e-aa967469a7a1-openstack-edpm-multinode\") on node \"crc\" DevicePath \"\"" Jan 20 18:10:31 crc kubenswrapper[4558]: I0120 18:10:31.607924 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7ca67738-2068-4c2f-beeb-60751657efef-config\") pod \"dnsmasq-dnsmasq-84b9f45d47-rwjb8\" (UID: \"7ca67738-2068-4c2f-beeb-60751657efef\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-rwjb8" Jan 20 18:10:31 crc kubenswrapper[4558]: I0120 18:10:31.617325 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb9fb0f-afd6-4edc-af2e-aa967469a7a1-kube-api-access-mqgrw" (OuterVolumeSpecName: "kube-api-access-mqgrw") pod "4bb9fb0f-afd6-4edc-af2e-aa967469a7a1" (UID: "4bb9fb0f-afd6-4edc-af2e-aa967469a7a1"). InnerVolumeSpecName "kube-api-access-mqgrw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:10:31 crc kubenswrapper[4558]: I0120 18:10:31.617987 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/7ca67738-2068-4c2f-beeb-60751657efef-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-84b9f45d47-rwjb8\" (UID: \"7ca67738-2068-4c2f-beeb-60751657efef\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-rwjb8" Jan 20 18:10:31 crc kubenswrapper[4558]: I0120 18:10:31.641946 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-znhcd\" (UniqueName: \"kubernetes.io/projected/7ca67738-2068-4c2f-beeb-60751657efef-kube-api-access-znhcd\") pod \"dnsmasq-dnsmasq-84b9f45d47-rwjb8\" (UID: \"7ca67738-2068-4c2f-beeb-60751657efef\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-rwjb8" Jan 20 18:10:31 crc kubenswrapper[4558]: I0120 18:10:31.652561 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb9fb0f-afd6-4edc-af2e-aa967469a7a1-dnsmasq-svc" (OuterVolumeSpecName: "dnsmasq-svc") pod "4bb9fb0f-afd6-4edc-af2e-aa967469a7a1" (UID: "4bb9fb0f-afd6-4edc-af2e-aa967469a7a1"). InnerVolumeSpecName "dnsmasq-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:10:31 crc kubenswrapper[4558]: I0120 18:10:31.707292 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mqgrw\" (UniqueName: \"kubernetes.io/projected/4bb9fb0f-afd6-4edc-af2e-aa967469a7a1-kube-api-access-mqgrw\") on node \"crc\" DevicePath \"\"" Jan 20 18:10:31 crc kubenswrapper[4558]: I0120 18:10:31.707318 4558 reconciler_common.go:293] "Volume detached for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/4bb9fb0f-afd6-4edc-af2e-aa967469a7a1-dnsmasq-svc\") on node \"crc\" DevicePath \"\"" Jan 20 18:10:31 crc kubenswrapper[4558]: I0120 18:10:31.792928 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-rwjb8" Jan 20 18:10:32 crc kubenswrapper[4558]: I0120 18:10:32.043225 4558 generic.go:334] "Generic (PLEG): container finished" podID="4bb9fb0f-afd6-4edc-af2e-aa967469a7a1" containerID="2c40ec210f462f6e681c3d856258d487e8b971cde7c7b07ba9dc5eb2b43e2390" exitCode=0 Jan 20 18:10:32 crc kubenswrapper[4558]: I0120 18:10:32.043281 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-5d55f47b6c-fvphz" event={"ID":"4bb9fb0f-afd6-4edc-af2e-aa967469a7a1","Type":"ContainerDied","Data":"2c40ec210f462f6e681c3d856258d487e8b971cde7c7b07ba9dc5eb2b43e2390"} Jan 20 18:10:32 crc kubenswrapper[4558]: I0120 18:10:32.043589 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-5d55f47b6c-fvphz" event={"ID":"4bb9fb0f-afd6-4edc-af2e-aa967469a7a1","Type":"ContainerDied","Data":"5716ce0c4ff55f2ae9f1e63f06de899e28c933679957c3c208b99397860b2be0"} Jan 20 18:10:32 crc kubenswrapper[4558]: I0120 18:10:32.043625 4558 scope.go:117] "RemoveContainer" containerID="2c40ec210f462f6e681c3d856258d487e8b971cde7c7b07ba9dc5eb2b43e2390" Jan 20 18:10:32 crc kubenswrapper[4558]: I0120 18:10:32.043320 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-5d55f47b6c-fvphz" Jan 20 18:10:32 crc kubenswrapper[4558]: I0120 18:10:32.073130 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-5d55f47b6c-fvphz"] Jan 20 18:10:32 crc kubenswrapper[4558]: I0120 18:10:32.076029 4558 scope.go:117] "RemoveContainer" containerID="d9743af7c9f73d7276e8ca33fbcde1589d7af9645a6833c5edfd19df6dec3031" Jan 20 18:10:32 crc kubenswrapper[4558]: I0120 18:10:32.077414 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-5d55f47b6c-fvphz"] Jan 20 18:10:32 crc kubenswrapper[4558]: I0120 18:10:32.110032 4558 scope.go:117] "RemoveContainer" containerID="2c40ec210f462f6e681c3d856258d487e8b971cde7c7b07ba9dc5eb2b43e2390" Jan 20 18:10:32 crc kubenswrapper[4558]: E0120 18:10:32.110472 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2c40ec210f462f6e681c3d856258d487e8b971cde7c7b07ba9dc5eb2b43e2390\": container with ID starting with 2c40ec210f462f6e681c3d856258d487e8b971cde7c7b07ba9dc5eb2b43e2390 not found: ID does not exist" containerID="2c40ec210f462f6e681c3d856258d487e8b971cde7c7b07ba9dc5eb2b43e2390" Jan 20 18:10:32 crc kubenswrapper[4558]: I0120 18:10:32.110525 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2c40ec210f462f6e681c3d856258d487e8b971cde7c7b07ba9dc5eb2b43e2390"} err="failed to get container status \"2c40ec210f462f6e681c3d856258d487e8b971cde7c7b07ba9dc5eb2b43e2390\": rpc error: code = NotFound desc = could not find container \"2c40ec210f462f6e681c3d856258d487e8b971cde7c7b07ba9dc5eb2b43e2390\": container with ID starting with 2c40ec210f462f6e681c3d856258d487e8b971cde7c7b07ba9dc5eb2b43e2390 not found: ID does not exist" Jan 20 18:10:32 crc kubenswrapper[4558]: I0120 18:10:32.110548 4558 scope.go:117] "RemoveContainer" containerID="d9743af7c9f73d7276e8ca33fbcde1589d7af9645a6833c5edfd19df6dec3031" Jan 20 18:10:32 crc kubenswrapper[4558]: E0120 18:10:32.110968 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d9743af7c9f73d7276e8ca33fbcde1589d7af9645a6833c5edfd19df6dec3031\": container with ID starting with d9743af7c9f73d7276e8ca33fbcde1589d7af9645a6833c5edfd19df6dec3031 not found: ID does not exist" containerID="d9743af7c9f73d7276e8ca33fbcde1589d7af9645a6833c5edfd19df6dec3031" Jan 20 18:10:32 crc kubenswrapper[4558]: I0120 18:10:32.110999 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d9743af7c9f73d7276e8ca33fbcde1589d7af9645a6833c5edfd19df6dec3031"} err="failed to get container status \"d9743af7c9f73d7276e8ca33fbcde1589d7af9645a6833c5edfd19df6dec3031\": rpc error: code = NotFound desc = could not find container \"d9743af7c9f73d7276e8ca33fbcde1589d7af9645a6833c5edfd19df6dec3031\": container with ID starting with d9743af7c9f73d7276e8ca33fbcde1589d7af9645a6833c5edfd19df6dec3031 not found: ID does not exist" Jan 20 18:10:32 crc kubenswrapper[4558]: I0120 18:10:32.183548 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-rwjb8"] Jan 20 18:10:32 crc kubenswrapper[4558]: I0120 18:10:32.575020 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb9fb0f-afd6-4edc-af2e-aa967469a7a1" path="/var/lib/kubelet/pods/4bb9fb0f-afd6-4edc-af2e-aa967469a7a1/volumes" Jan 20 18:10:33 crc kubenswrapper[4558]: I0120 18:10:33.055372 4558 generic.go:334] "Generic (PLEG): container finished" podID="7ca67738-2068-4c2f-beeb-60751657efef" containerID="5102cc42ead7c78a1e402c9fc2c904273cf19126d28bbe80fc61a00a8ba6fc03" exitCode=0 Jan 20 18:10:33 crc kubenswrapper[4558]: I0120 18:10:33.055438 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-rwjb8" event={"ID":"7ca67738-2068-4c2f-beeb-60751657efef","Type":"ContainerDied","Data":"5102cc42ead7c78a1e402c9fc2c904273cf19126d28bbe80fc61a00a8ba6fc03"} Jan 20 18:10:33 crc kubenswrapper[4558]: I0120 18:10:33.055519 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-rwjb8" event={"ID":"7ca67738-2068-4c2f-beeb-60751657efef","Type":"ContainerStarted","Data":"41792560071babf587f67cfb1c29aeb1df680bead48e72fb8223ff8a6e2a7e62"} Jan 20 18:10:33 crc kubenswrapper[4558]: I0120 18:10:33.168469 4558 scope.go:117] "RemoveContainer" containerID="ad46975a195c5062c19fee47d527fae8580c0225ca40e53e7198e0dd9aa58a65" Jan 20 18:10:33 crc kubenswrapper[4558]: I0120 18:10:33.195724 4558 scope.go:117] "RemoveContainer" containerID="cb81cdcb32f844625692122a4816f3b04fc8f25dd62b7b1032d238a783481362" Jan 20 18:10:33 crc kubenswrapper[4558]: I0120 18:10:33.219760 4558 scope.go:117] "RemoveContainer" containerID="01b210cb61c80d24600fe03b823f4355f9b8b34f0d57df6d5dc8f492716ad30c" Jan 20 18:10:33 crc kubenswrapper[4558]: I0120 18:10:33.252496 4558 scope.go:117] "RemoveContainer" containerID="56b1bad1c26f54ce418c79aa5b1f06a6259bc82bb730f7f529d2783a2c39c82c" Jan 20 18:10:33 crc kubenswrapper[4558]: I0120 18:10:33.292558 4558 scope.go:117] "RemoveContainer" containerID="ec9467e4d7a0341c215effcc90ce66f3cb78441df4ad8336af7f57524447d2d4" Jan 20 18:10:33 crc kubenswrapper[4558]: I0120 18:10:33.313259 4558 scope.go:117] "RemoveContainer" containerID="1185e47915505c6785f0d83431115dfd450327f88c169d9a61b3f3becbcc4354" Jan 20 18:10:33 crc kubenswrapper[4558]: I0120 18:10:33.333651 4558 scope.go:117] "RemoveContainer" containerID="91cf12afee15a6239013ae4102301de64ba2bfeb10aa3fdc172976b1b986c0ca" Jan 20 18:10:34 crc kubenswrapper[4558]: I0120 18:10:34.076647 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-rwjb8" event={"ID":"7ca67738-2068-4c2f-beeb-60751657efef","Type":"ContainerStarted","Data":"b8c9f852e28de0d1a4c775128c08cb0b2bd3e2b52c3fc03212de7e7fefe37ca3"} Jan 20 18:10:34 crc kubenswrapper[4558]: I0120 18:10:34.077313 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-rwjb8" Jan 20 18:10:34 crc kubenswrapper[4558]: I0120 18:10:34.094472 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-rwjb8" podStartSLOduration=3.094457038 podStartE2EDuration="3.094457038s" podCreationTimestamp="2026-01-20 18:10:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 18:10:34.093443291 +0000 UTC m=+5327.853781259" watchObservedRunningTime="2026-01-20 18:10:34.094457038 +0000 UTC m=+5327.854795006" Jan 20 18:10:37 crc kubenswrapper[4558]: I0120 18:10:37.947189 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["crc-storage/crc-storage-crc-hlkmf"] Jan 20 18:10:37 crc kubenswrapper[4558]: I0120 18:10:37.953642 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["crc-storage/crc-storage-crc-hlkmf"] Jan 20 18:10:38 crc kubenswrapper[4558]: I0120 18:10:38.062759 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["crc-storage/crc-storage-crc-ms4mx"] Jan 20 18:10:38 crc kubenswrapper[4558]: E0120 18:10:38.063124 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4bb9fb0f-afd6-4edc-af2e-aa967469a7a1" containerName="init" Jan 20 18:10:38 crc kubenswrapper[4558]: I0120 18:10:38.063145 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="4bb9fb0f-afd6-4edc-af2e-aa967469a7a1" containerName="init" Jan 20 18:10:38 crc kubenswrapper[4558]: E0120 18:10:38.063187 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4bb9fb0f-afd6-4edc-af2e-aa967469a7a1" containerName="dnsmasq-dns" Jan 20 18:10:38 crc kubenswrapper[4558]: I0120 18:10:38.063196 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="4bb9fb0f-afd6-4edc-af2e-aa967469a7a1" containerName="dnsmasq-dns" Jan 20 18:10:38 crc kubenswrapper[4558]: I0120 18:10:38.063359 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="4bb9fb0f-afd6-4edc-af2e-aa967469a7a1" containerName="dnsmasq-dns" Jan 20 18:10:38 crc kubenswrapper[4558]: I0120 18:10:38.063944 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-ms4mx" Jan 20 18:10:38 crc kubenswrapper[4558]: I0120 18:10:38.065999 4558 reflector.go:368] Caches populated for *v1.Secret from object-"crc-storage"/"crc-storage-dockercfg-k9ht8" Jan 20 18:10:38 crc kubenswrapper[4558]: I0120 18:10:38.066219 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"crc-storage" Jan 20 18:10:38 crc kubenswrapper[4558]: I0120 18:10:38.066371 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"kube-root-ca.crt" Jan 20 18:10:38 crc kubenswrapper[4558]: I0120 18:10:38.066520 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"openshift-service-ca.crt" Jan 20 18:10:38 crc kubenswrapper[4558]: I0120 18:10:38.071076 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-ms4mx"] Jan 20 18:10:38 crc kubenswrapper[4558]: I0120 18:10:38.198449 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/ab4d389d-9378-428d-9798-b13f1e79a18f-crc-storage\") pod \"crc-storage-crc-ms4mx\" (UID: \"ab4d389d-9378-428d-9798-b13f1e79a18f\") " pod="crc-storage/crc-storage-crc-ms4mx" Jan 20 18:10:38 crc kubenswrapper[4558]: I0120 18:10:38.198554 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/ab4d389d-9378-428d-9798-b13f1e79a18f-node-mnt\") pod \"crc-storage-crc-ms4mx\" (UID: \"ab4d389d-9378-428d-9798-b13f1e79a18f\") " pod="crc-storage/crc-storage-crc-ms4mx" Jan 20 18:10:38 crc kubenswrapper[4558]: I0120 18:10:38.198805 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dns9v\" (UniqueName: \"kubernetes.io/projected/ab4d389d-9378-428d-9798-b13f1e79a18f-kube-api-access-dns9v\") pod \"crc-storage-crc-ms4mx\" (UID: \"ab4d389d-9378-428d-9798-b13f1e79a18f\") " pod="crc-storage/crc-storage-crc-ms4mx" Jan 20 18:10:38 crc kubenswrapper[4558]: I0120 18:10:38.300079 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dns9v\" (UniqueName: \"kubernetes.io/projected/ab4d389d-9378-428d-9798-b13f1e79a18f-kube-api-access-dns9v\") pod \"crc-storage-crc-ms4mx\" (UID: \"ab4d389d-9378-428d-9798-b13f1e79a18f\") " pod="crc-storage/crc-storage-crc-ms4mx" Jan 20 18:10:38 crc kubenswrapper[4558]: I0120 18:10:38.300182 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/ab4d389d-9378-428d-9798-b13f1e79a18f-crc-storage\") pod \"crc-storage-crc-ms4mx\" (UID: \"ab4d389d-9378-428d-9798-b13f1e79a18f\") " pod="crc-storage/crc-storage-crc-ms4mx" Jan 20 18:10:38 crc kubenswrapper[4558]: I0120 18:10:38.300218 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/ab4d389d-9378-428d-9798-b13f1e79a18f-node-mnt\") pod \"crc-storage-crc-ms4mx\" (UID: \"ab4d389d-9378-428d-9798-b13f1e79a18f\") " pod="crc-storage/crc-storage-crc-ms4mx" Jan 20 18:10:38 crc kubenswrapper[4558]: I0120 18:10:38.300535 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/ab4d389d-9378-428d-9798-b13f1e79a18f-node-mnt\") pod \"crc-storage-crc-ms4mx\" (UID: \"ab4d389d-9378-428d-9798-b13f1e79a18f\") " pod="crc-storage/crc-storage-crc-ms4mx" Jan 20 18:10:38 crc kubenswrapper[4558]: I0120 18:10:38.300994 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/ab4d389d-9378-428d-9798-b13f1e79a18f-crc-storage\") pod \"crc-storage-crc-ms4mx\" (UID: \"ab4d389d-9378-428d-9798-b13f1e79a18f\") " pod="crc-storage/crc-storage-crc-ms4mx" Jan 20 18:10:38 crc kubenswrapper[4558]: I0120 18:10:38.319946 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dns9v\" (UniqueName: \"kubernetes.io/projected/ab4d389d-9378-428d-9798-b13f1e79a18f-kube-api-access-dns9v\") pod \"crc-storage-crc-ms4mx\" (UID: \"ab4d389d-9378-428d-9798-b13f1e79a18f\") " pod="crc-storage/crc-storage-crc-ms4mx" Jan 20 18:10:38 crc kubenswrapper[4558]: I0120 18:10:38.381367 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-ms4mx" Jan 20 18:10:38 crc kubenswrapper[4558]: I0120 18:10:38.575545 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0cff9a6b-f955-41c2-9326-79d04336e59a" path="/var/lib/kubelet/pods/0cff9a6b-f955-41c2-9326-79d04336e59a/volumes" Jan 20 18:10:39 crc kubenswrapper[4558]: I0120 18:10:39.207457 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-ms4mx"] Jan 20 18:10:39 crc kubenswrapper[4558]: I0120 18:10:39.218270 4558 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 20 18:10:40 crc kubenswrapper[4558]: I0120 18:10:40.137081 4558 generic.go:334] "Generic (PLEG): container finished" podID="ab4d389d-9378-428d-9798-b13f1e79a18f" containerID="7892fbaf8b021f5bdd6f5aadb0da02b4b0bef32ad452d5c9ed7e87377da2a546" exitCode=0 Jan 20 18:10:40 crc kubenswrapper[4558]: I0120 18:10:40.137127 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-ms4mx" event={"ID":"ab4d389d-9378-428d-9798-b13f1e79a18f","Type":"ContainerDied","Data":"7892fbaf8b021f5bdd6f5aadb0da02b4b0bef32ad452d5c9ed7e87377da2a546"} Jan 20 18:10:40 crc kubenswrapper[4558]: I0120 18:10:40.137388 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-ms4mx" event={"ID":"ab4d389d-9378-428d-9798-b13f1e79a18f","Type":"ContainerStarted","Data":"887650f57862dc904d8c31212d6b28c1786846c7b4b84a8eafa9dacac5bf35ae"} Jan 20 18:10:41 crc kubenswrapper[4558]: I0120 18:10:41.407232 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-ms4mx" Jan 20 18:10:41 crc kubenswrapper[4558]: I0120 18:10:41.546844 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/ab4d389d-9378-428d-9798-b13f1e79a18f-node-mnt\") pod \"ab4d389d-9378-428d-9798-b13f1e79a18f\" (UID: \"ab4d389d-9378-428d-9798-b13f1e79a18f\") " Jan 20 18:10:41 crc kubenswrapper[4558]: I0120 18:10:41.546964 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ab4d389d-9378-428d-9798-b13f1e79a18f-node-mnt" (OuterVolumeSpecName: "node-mnt") pod "ab4d389d-9378-428d-9798-b13f1e79a18f" (UID: "ab4d389d-9378-428d-9798-b13f1e79a18f"). InnerVolumeSpecName "node-mnt". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 18:10:41 crc kubenswrapper[4558]: I0120 18:10:41.547018 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dns9v\" (UniqueName: \"kubernetes.io/projected/ab4d389d-9378-428d-9798-b13f1e79a18f-kube-api-access-dns9v\") pod \"ab4d389d-9378-428d-9798-b13f1e79a18f\" (UID: \"ab4d389d-9378-428d-9798-b13f1e79a18f\") " Jan 20 18:10:41 crc kubenswrapper[4558]: I0120 18:10:41.547175 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/ab4d389d-9378-428d-9798-b13f1e79a18f-crc-storage\") pod \"ab4d389d-9378-428d-9798-b13f1e79a18f\" (UID: \"ab4d389d-9378-428d-9798-b13f1e79a18f\") " Jan 20 18:10:41 crc kubenswrapper[4558]: I0120 18:10:41.547536 4558 reconciler_common.go:293] "Volume detached for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/ab4d389d-9378-428d-9798-b13f1e79a18f-node-mnt\") on node \"crc\" DevicePath \"\"" Jan 20 18:10:41 crc kubenswrapper[4558]: I0120 18:10:41.553570 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ab4d389d-9378-428d-9798-b13f1e79a18f-kube-api-access-dns9v" (OuterVolumeSpecName: "kube-api-access-dns9v") pod "ab4d389d-9378-428d-9798-b13f1e79a18f" (UID: "ab4d389d-9378-428d-9798-b13f1e79a18f"). InnerVolumeSpecName "kube-api-access-dns9v". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:10:41 crc kubenswrapper[4558]: I0120 18:10:41.565960 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ab4d389d-9378-428d-9798-b13f1e79a18f-crc-storage" (OuterVolumeSpecName: "crc-storage") pod "ab4d389d-9378-428d-9798-b13f1e79a18f" (UID: "ab4d389d-9378-428d-9798-b13f1e79a18f"). InnerVolumeSpecName "crc-storage". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:10:41 crc kubenswrapper[4558]: I0120 18:10:41.649564 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dns9v\" (UniqueName: \"kubernetes.io/projected/ab4d389d-9378-428d-9798-b13f1e79a18f-kube-api-access-dns9v\") on node \"crc\" DevicePath \"\"" Jan 20 18:10:41 crc kubenswrapper[4558]: I0120 18:10:41.649598 4558 reconciler_common.go:293] "Volume detached for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/ab4d389d-9378-428d-9798-b13f1e79a18f-crc-storage\") on node \"crc\" DevicePath \"\"" Jan 20 18:10:41 crc kubenswrapper[4558]: I0120 18:10:41.794472 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-rwjb8" Jan 20 18:10:41 crc kubenswrapper[4558]: I0120 18:10:41.845079 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-964c896d7-mnk2h"] Jan 20 18:10:41 crc kubenswrapper[4558]: I0120 18:10:41.845718 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-964c896d7-mnk2h" podUID="e56a243a-71b7-471f-b519-57917fa67e7b" containerName="dnsmasq-dns" containerID="cri-o://9788485e2fec27e60d7a4c77df74bbae64ee23a3a8c66f281b42423c151060ca" gracePeriod=10 Jan 20 18:10:42 crc kubenswrapper[4558]: I0120 18:10:42.160364 4558 generic.go:334] "Generic (PLEG): container finished" podID="e56a243a-71b7-471f-b519-57917fa67e7b" containerID="9788485e2fec27e60d7a4c77df74bbae64ee23a3a8c66f281b42423c151060ca" exitCode=0 Jan 20 18:10:42 crc kubenswrapper[4558]: I0120 18:10:42.160473 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-964c896d7-mnk2h" event={"ID":"e56a243a-71b7-471f-b519-57917fa67e7b","Type":"ContainerDied","Data":"9788485e2fec27e60d7a4c77df74bbae64ee23a3a8c66f281b42423c151060ca"} Jan 20 18:10:42 crc kubenswrapper[4558]: I0120 18:10:42.161994 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-ms4mx" event={"ID":"ab4d389d-9378-428d-9798-b13f1e79a18f","Type":"ContainerDied","Data":"887650f57862dc904d8c31212d6b28c1786846c7b4b84a8eafa9dacac5bf35ae"} Jan 20 18:10:42 crc kubenswrapper[4558]: I0120 18:10:42.162018 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="887650f57862dc904d8c31212d6b28c1786846c7b4b84a8eafa9dacac5bf35ae" Jan 20 18:10:42 crc kubenswrapper[4558]: I0120 18:10:42.162078 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-ms4mx" Jan 20 18:10:42 crc kubenswrapper[4558]: I0120 18:10:42.172483 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-964c896d7-mnk2h" Jan 20 18:10:42 crc kubenswrapper[4558]: I0120 18:10:42.362026 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7b7bv\" (UniqueName: \"kubernetes.io/projected/e56a243a-71b7-471f-b519-57917fa67e7b-kube-api-access-7b7bv\") pod \"e56a243a-71b7-471f-b519-57917fa67e7b\" (UID: \"e56a243a-71b7-471f-b519-57917fa67e7b\") " Jan 20 18:10:42 crc kubenswrapper[4558]: I0120 18:10:42.362125 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/e56a243a-71b7-471f-b519-57917fa67e7b-dnsmasq-svc\") pod \"e56a243a-71b7-471f-b519-57917fa67e7b\" (UID: \"e56a243a-71b7-471f-b519-57917fa67e7b\") " Jan 20 18:10:42 crc kubenswrapper[4558]: I0120 18:10:42.362248 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-multinode\" (UniqueName: \"kubernetes.io/configmap/e56a243a-71b7-471f-b519-57917fa67e7b-openstack-edpm-multinode\") pod \"e56a243a-71b7-471f-b519-57917fa67e7b\" (UID: \"e56a243a-71b7-471f-b519-57917fa67e7b\") " Jan 20 18:10:42 crc kubenswrapper[4558]: I0120 18:10:42.362282 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e56a243a-71b7-471f-b519-57917fa67e7b-config\") pod \"e56a243a-71b7-471f-b519-57917fa67e7b\" (UID: \"e56a243a-71b7-471f-b519-57917fa67e7b\") " Jan 20 18:10:42 crc kubenswrapper[4558]: I0120 18:10:42.367662 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e56a243a-71b7-471f-b519-57917fa67e7b-kube-api-access-7b7bv" (OuterVolumeSpecName: "kube-api-access-7b7bv") pod "e56a243a-71b7-471f-b519-57917fa67e7b" (UID: "e56a243a-71b7-471f-b519-57917fa67e7b"). InnerVolumeSpecName "kube-api-access-7b7bv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:10:42 crc kubenswrapper[4558]: I0120 18:10:42.392350 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e56a243a-71b7-471f-b519-57917fa67e7b-config" (OuterVolumeSpecName: "config") pod "e56a243a-71b7-471f-b519-57917fa67e7b" (UID: "e56a243a-71b7-471f-b519-57917fa67e7b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:10:42 crc kubenswrapper[4558]: I0120 18:10:42.395779 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e56a243a-71b7-471f-b519-57917fa67e7b-dnsmasq-svc" (OuterVolumeSpecName: "dnsmasq-svc") pod "e56a243a-71b7-471f-b519-57917fa67e7b" (UID: "e56a243a-71b7-471f-b519-57917fa67e7b"). InnerVolumeSpecName "dnsmasq-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:10:42 crc kubenswrapper[4558]: I0120 18:10:42.398366 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e56a243a-71b7-471f-b519-57917fa67e7b-openstack-edpm-multinode" (OuterVolumeSpecName: "openstack-edpm-multinode") pod "e56a243a-71b7-471f-b519-57917fa67e7b" (UID: "e56a243a-71b7-471f-b519-57917fa67e7b"). InnerVolumeSpecName "openstack-edpm-multinode". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:10:42 crc kubenswrapper[4558]: I0120 18:10:42.464138 4558 reconciler_common.go:293] "Volume detached for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/e56a243a-71b7-471f-b519-57917fa67e7b-dnsmasq-svc\") on node \"crc\" DevicePath \"\"" Jan 20 18:10:42 crc kubenswrapper[4558]: I0120 18:10:42.464193 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e56a243a-71b7-471f-b519-57917fa67e7b-config\") on node \"crc\" DevicePath \"\"" Jan 20 18:10:42 crc kubenswrapper[4558]: I0120 18:10:42.464210 4558 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-multinode\" (UniqueName: \"kubernetes.io/configmap/e56a243a-71b7-471f-b519-57917fa67e7b-openstack-edpm-multinode\") on node \"crc\" DevicePath \"\"" Jan 20 18:10:42 crc kubenswrapper[4558]: I0120 18:10:42.464228 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7b7bv\" (UniqueName: \"kubernetes.io/projected/e56a243a-71b7-471f-b519-57917fa67e7b-kube-api-access-7b7bv\") on node \"crc\" DevicePath \"\"" Jan 20 18:10:43 crc kubenswrapper[4558]: I0120 18:10:43.173623 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-964c896d7-mnk2h" event={"ID":"e56a243a-71b7-471f-b519-57917fa67e7b","Type":"ContainerDied","Data":"bd7fb419e18dfc12fdd5d364c048f12418331774c10f59264194e509027b1e4b"} Jan 20 18:10:43 crc kubenswrapper[4558]: I0120 18:10:43.173726 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-964c896d7-mnk2h" Jan 20 18:10:43 crc kubenswrapper[4558]: I0120 18:10:43.173984 4558 scope.go:117] "RemoveContainer" containerID="9788485e2fec27e60d7a4c77df74bbae64ee23a3a8c66f281b42423c151060ca" Jan 20 18:10:43 crc kubenswrapper[4558]: I0120 18:10:43.194395 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-964c896d7-mnk2h"] Jan 20 18:10:43 crc kubenswrapper[4558]: I0120 18:10:43.195240 4558 scope.go:117] "RemoveContainer" containerID="2cce52590ef71014c25bc35352899fb8a67a23cb987bf4aa292c690ea776bb17" Jan 20 18:10:43 crc kubenswrapper[4558]: I0120 18:10:43.198511 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-964c896d7-mnk2h"] Jan 20 18:10:44 crc kubenswrapper[4558]: I0120 18:10:44.369643 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["crc-storage/crc-storage-crc-ms4mx"] Jan 20 18:10:44 crc kubenswrapper[4558]: I0120 18:10:44.373678 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["crc-storage/crc-storage-crc-ms4mx"] Jan 20 18:10:44 crc kubenswrapper[4558]: I0120 18:10:44.485722 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["crc-storage/crc-storage-crc-gt7mt"] Jan 20 18:10:44 crc kubenswrapper[4558]: E0120 18:10:44.486027 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e56a243a-71b7-471f-b519-57917fa67e7b" containerName="init" Jan 20 18:10:44 crc kubenswrapper[4558]: I0120 18:10:44.486042 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="e56a243a-71b7-471f-b519-57917fa67e7b" containerName="init" Jan 20 18:10:44 crc kubenswrapper[4558]: E0120 18:10:44.486058 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e56a243a-71b7-471f-b519-57917fa67e7b" containerName="dnsmasq-dns" Jan 20 18:10:44 crc kubenswrapper[4558]: I0120 18:10:44.486063 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="e56a243a-71b7-471f-b519-57917fa67e7b" containerName="dnsmasq-dns" Jan 20 18:10:44 crc kubenswrapper[4558]: E0120 18:10:44.486072 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab4d389d-9378-428d-9798-b13f1e79a18f" containerName="storage" Jan 20 18:10:44 crc kubenswrapper[4558]: I0120 18:10:44.486078 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab4d389d-9378-428d-9798-b13f1e79a18f" containerName="storage" Jan 20 18:10:44 crc kubenswrapper[4558]: I0120 18:10:44.486209 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="e56a243a-71b7-471f-b519-57917fa67e7b" containerName="dnsmasq-dns" Jan 20 18:10:44 crc kubenswrapper[4558]: I0120 18:10:44.486233 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="ab4d389d-9378-428d-9798-b13f1e79a18f" containerName="storage" Jan 20 18:10:44 crc kubenswrapper[4558]: I0120 18:10:44.486681 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-gt7mt" Jan 20 18:10:44 crc kubenswrapper[4558]: I0120 18:10:44.488524 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"kube-root-ca.crt" Jan 20 18:10:44 crc kubenswrapper[4558]: I0120 18:10:44.488669 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"openshift-service-ca.crt" Jan 20 18:10:44 crc kubenswrapper[4558]: I0120 18:10:44.489095 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"crc-storage" Jan 20 18:10:44 crc kubenswrapper[4558]: I0120 18:10:44.489151 4558 reflector.go:368] Caches populated for *v1.Secret from object-"crc-storage"/"crc-storage-dockercfg-k9ht8" Jan 20 18:10:44 crc kubenswrapper[4558]: I0120 18:10:44.492407 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gqk65\" (UniqueName: \"kubernetes.io/projected/e8de84db-3651-48e8-97df-4c84bdcdc0aa-kube-api-access-gqk65\") pod \"crc-storage-crc-gt7mt\" (UID: \"e8de84db-3651-48e8-97df-4c84bdcdc0aa\") " pod="crc-storage/crc-storage-crc-gt7mt" Jan 20 18:10:44 crc kubenswrapper[4558]: I0120 18:10:44.492514 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/e8de84db-3651-48e8-97df-4c84bdcdc0aa-crc-storage\") pod \"crc-storage-crc-gt7mt\" (UID: \"e8de84db-3651-48e8-97df-4c84bdcdc0aa\") " pod="crc-storage/crc-storage-crc-gt7mt" Jan 20 18:10:44 crc kubenswrapper[4558]: I0120 18:10:44.492707 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/e8de84db-3651-48e8-97df-4c84bdcdc0aa-node-mnt\") pod \"crc-storage-crc-gt7mt\" (UID: \"e8de84db-3651-48e8-97df-4c84bdcdc0aa\") " pod="crc-storage/crc-storage-crc-gt7mt" Jan 20 18:10:44 crc kubenswrapper[4558]: I0120 18:10:44.494363 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-gt7mt"] Jan 20 18:10:44 crc kubenswrapper[4558]: I0120 18:10:44.573547 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ab4d389d-9378-428d-9798-b13f1e79a18f" path="/var/lib/kubelet/pods/ab4d389d-9378-428d-9798-b13f1e79a18f/volumes" Jan 20 18:10:44 crc kubenswrapper[4558]: I0120 18:10:44.574241 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e56a243a-71b7-471f-b519-57917fa67e7b" path="/var/lib/kubelet/pods/e56a243a-71b7-471f-b519-57917fa67e7b/volumes" Jan 20 18:10:44 crc kubenswrapper[4558]: I0120 18:10:44.593993 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gqk65\" (UniqueName: \"kubernetes.io/projected/e8de84db-3651-48e8-97df-4c84bdcdc0aa-kube-api-access-gqk65\") pod \"crc-storage-crc-gt7mt\" (UID: \"e8de84db-3651-48e8-97df-4c84bdcdc0aa\") " pod="crc-storage/crc-storage-crc-gt7mt" Jan 20 18:10:44 crc kubenswrapper[4558]: I0120 18:10:44.594044 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/e8de84db-3651-48e8-97df-4c84bdcdc0aa-crc-storage\") pod \"crc-storage-crc-gt7mt\" (UID: \"e8de84db-3651-48e8-97df-4c84bdcdc0aa\") " pod="crc-storage/crc-storage-crc-gt7mt" Jan 20 18:10:44 crc kubenswrapper[4558]: I0120 18:10:44.594103 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/e8de84db-3651-48e8-97df-4c84bdcdc0aa-node-mnt\") pod \"crc-storage-crc-gt7mt\" (UID: \"e8de84db-3651-48e8-97df-4c84bdcdc0aa\") " pod="crc-storage/crc-storage-crc-gt7mt" Jan 20 18:10:44 crc kubenswrapper[4558]: I0120 18:10:44.594433 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/e8de84db-3651-48e8-97df-4c84bdcdc0aa-node-mnt\") pod \"crc-storage-crc-gt7mt\" (UID: \"e8de84db-3651-48e8-97df-4c84bdcdc0aa\") " pod="crc-storage/crc-storage-crc-gt7mt" Jan 20 18:10:44 crc kubenswrapper[4558]: I0120 18:10:44.595274 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/e8de84db-3651-48e8-97df-4c84bdcdc0aa-crc-storage\") pod \"crc-storage-crc-gt7mt\" (UID: \"e8de84db-3651-48e8-97df-4c84bdcdc0aa\") " pod="crc-storage/crc-storage-crc-gt7mt" Jan 20 18:10:44 crc kubenswrapper[4558]: I0120 18:10:44.611241 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gqk65\" (UniqueName: \"kubernetes.io/projected/e8de84db-3651-48e8-97df-4c84bdcdc0aa-kube-api-access-gqk65\") pod \"crc-storage-crc-gt7mt\" (UID: \"e8de84db-3651-48e8-97df-4c84bdcdc0aa\") " pod="crc-storage/crc-storage-crc-gt7mt" Jan 20 18:10:44 crc kubenswrapper[4558]: I0120 18:10:44.799590 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-gt7mt" Jan 20 18:10:45 crc kubenswrapper[4558]: I0120 18:10:45.174892 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-gt7mt"] Jan 20 18:10:45 crc kubenswrapper[4558]: W0120 18:10:45.177959 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode8de84db_3651_48e8_97df_4c84bdcdc0aa.slice/crio-a3a01879e212fc4bb563601f96d220adf5505b07792e011f5ebda1df19b7203d WatchSource:0}: Error finding container a3a01879e212fc4bb563601f96d220adf5505b07792e011f5ebda1df19b7203d: Status 404 returned error can't find the container with id a3a01879e212fc4bb563601f96d220adf5505b07792e011f5ebda1df19b7203d Jan 20 18:10:45 crc kubenswrapper[4558]: I0120 18:10:45.191976 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-gt7mt" event={"ID":"e8de84db-3651-48e8-97df-4c84bdcdc0aa","Type":"ContainerStarted","Data":"a3a01879e212fc4bb563601f96d220adf5505b07792e011f5ebda1df19b7203d"} Jan 20 18:10:46 crc kubenswrapper[4558]: I0120 18:10:46.204519 4558 generic.go:334] "Generic (PLEG): container finished" podID="e8de84db-3651-48e8-97df-4c84bdcdc0aa" containerID="facfc46d046a836828f83df5cf26fe5cc67ca5798724e57eb89ab4ffd84463bc" exitCode=0 Jan 20 18:10:46 crc kubenswrapper[4558]: I0120 18:10:46.204626 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-gt7mt" event={"ID":"e8de84db-3651-48e8-97df-4c84bdcdc0aa","Type":"ContainerDied","Data":"facfc46d046a836828f83df5cf26fe5cc67ca5798724e57eb89ab4ffd84463bc"} Jan 20 18:10:47 crc kubenswrapper[4558]: I0120 18:10:47.441455 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-gt7mt" Jan 20 18:10:47 crc kubenswrapper[4558]: I0120 18:10:47.638113 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/e8de84db-3651-48e8-97df-4c84bdcdc0aa-node-mnt\") pod \"e8de84db-3651-48e8-97df-4c84bdcdc0aa\" (UID: \"e8de84db-3651-48e8-97df-4c84bdcdc0aa\") " Jan 20 18:10:47 crc kubenswrapper[4558]: I0120 18:10:47.638238 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gqk65\" (UniqueName: \"kubernetes.io/projected/e8de84db-3651-48e8-97df-4c84bdcdc0aa-kube-api-access-gqk65\") pod \"e8de84db-3651-48e8-97df-4c84bdcdc0aa\" (UID: \"e8de84db-3651-48e8-97df-4c84bdcdc0aa\") " Jan 20 18:10:47 crc kubenswrapper[4558]: I0120 18:10:47.638276 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/e8de84db-3651-48e8-97df-4c84bdcdc0aa-crc-storage\") pod \"e8de84db-3651-48e8-97df-4c84bdcdc0aa\" (UID: \"e8de84db-3651-48e8-97df-4c84bdcdc0aa\") " Jan 20 18:10:47 crc kubenswrapper[4558]: I0120 18:10:47.638363 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e8de84db-3651-48e8-97df-4c84bdcdc0aa-node-mnt" (OuterVolumeSpecName: "node-mnt") pod "e8de84db-3651-48e8-97df-4c84bdcdc0aa" (UID: "e8de84db-3651-48e8-97df-4c84bdcdc0aa"). InnerVolumeSpecName "node-mnt". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 18:10:47 crc kubenswrapper[4558]: I0120 18:10:47.638586 4558 reconciler_common.go:293] "Volume detached for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/e8de84db-3651-48e8-97df-4c84bdcdc0aa-node-mnt\") on node \"crc\" DevicePath \"\"" Jan 20 18:10:47 crc kubenswrapper[4558]: I0120 18:10:47.644626 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e8de84db-3651-48e8-97df-4c84bdcdc0aa-kube-api-access-gqk65" (OuterVolumeSpecName: "kube-api-access-gqk65") pod "e8de84db-3651-48e8-97df-4c84bdcdc0aa" (UID: "e8de84db-3651-48e8-97df-4c84bdcdc0aa"). InnerVolumeSpecName "kube-api-access-gqk65". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:10:47 crc kubenswrapper[4558]: I0120 18:10:47.656530 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e8de84db-3651-48e8-97df-4c84bdcdc0aa-crc-storage" (OuterVolumeSpecName: "crc-storage") pod "e8de84db-3651-48e8-97df-4c84bdcdc0aa" (UID: "e8de84db-3651-48e8-97df-4c84bdcdc0aa"). InnerVolumeSpecName "crc-storage". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:10:47 crc kubenswrapper[4558]: I0120 18:10:47.739120 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gqk65\" (UniqueName: \"kubernetes.io/projected/e8de84db-3651-48e8-97df-4c84bdcdc0aa-kube-api-access-gqk65\") on node \"crc\" DevicePath \"\"" Jan 20 18:10:47 crc kubenswrapper[4558]: I0120 18:10:47.739149 4558 reconciler_common.go:293] "Volume detached for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/e8de84db-3651-48e8-97df-4c84bdcdc0aa-crc-storage\") on node \"crc\" DevicePath \"\"" Jan 20 18:10:48 crc kubenswrapper[4558]: I0120 18:10:48.226292 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-gt7mt" event={"ID":"e8de84db-3651-48e8-97df-4c84bdcdc0aa","Type":"ContainerDied","Data":"a3a01879e212fc4bb563601f96d220adf5505b07792e011f5ebda1df19b7203d"} Jan 20 18:10:48 crc kubenswrapper[4558]: I0120 18:10:48.226360 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a3a01879e212fc4bb563601f96d220adf5505b07792e011f5ebda1df19b7203d" Jan 20 18:10:48 crc kubenswrapper[4558]: I0120 18:10:48.226358 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-gt7mt" Jan 20 18:10:50 crc kubenswrapper[4558]: I0120 18:10:50.452064 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-64864b6d57-cfjrb"] Jan 20 18:10:50 crc kubenswrapper[4558]: E0120 18:10:50.452856 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e8de84db-3651-48e8-97df-4c84bdcdc0aa" containerName="storage" Jan 20 18:10:50 crc kubenswrapper[4558]: I0120 18:10:50.452870 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="e8de84db-3651-48e8-97df-4c84bdcdc0aa" containerName="storage" Jan 20 18:10:50 crc kubenswrapper[4558]: I0120 18:10:50.453033 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="e8de84db-3651-48e8-97df-4c84bdcdc0aa" containerName="storage" Jan 20 18:10:50 crc kubenswrapper[4558]: I0120 18:10:50.453696 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-64864b6d57-cfjrb" Jan 20 18:10:50 crc kubenswrapper[4558]: I0120 18:10:50.455616 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"edpm-compute-no-nodes" Jan 20 18:10:50 crc kubenswrapper[4558]: I0120 18:10:50.465579 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-64864b6d57-cfjrb"] Jan 20 18:10:50 crc kubenswrapper[4558]: I0120 18:10:50.577662 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b8372fe9-62b1-418d-be4a-234c7416d7e1-config\") pod \"dnsmasq-dnsmasq-64864b6d57-cfjrb\" (UID: \"b8372fe9-62b1-418d-be4a-234c7416d7e1\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-64864b6d57-cfjrb" Jan 20 18:10:50 crc kubenswrapper[4558]: I0120 18:10:50.577789 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-74bvd\" (UniqueName: \"kubernetes.io/projected/b8372fe9-62b1-418d-be4a-234c7416d7e1-kube-api-access-74bvd\") pod \"dnsmasq-dnsmasq-64864b6d57-cfjrb\" (UID: \"b8372fe9-62b1-418d-be4a-234c7416d7e1\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-64864b6d57-cfjrb" Jan 20 18:10:50 crc kubenswrapper[4558]: I0120 18:10:50.577836 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/configmap/b8372fe9-62b1-418d-be4a-234c7416d7e1-edpm-compute-no-nodes\") pod \"dnsmasq-dnsmasq-64864b6d57-cfjrb\" (UID: \"b8372fe9-62b1-418d-be4a-234c7416d7e1\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-64864b6d57-cfjrb" Jan 20 18:10:50 crc kubenswrapper[4558]: I0120 18:10:50.577857 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/b8372fe9-62b1-418d-be4a-234c7416d7e1-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-64864b6d57-cfjrb\" (UID: \"b8372fe9-62b1-418d-be4a-234c7416d7e1\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-64864b6d57-cfjrb" Jan 20 18:10:50 crc kubenswrapper[4558]: I0120 18:10:50.679450 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-74bvd\" (UniqueName: \"kubernetes.io/projected/b8372fe9-62b1-418d-be4a-234c7416d7e1-kube-api-access-74bvd\") pod \"dnsmasq-dnsmasq-64864b6d57-cfjrb\" (UID: \"b8372fe9-62b1-418d-be4a-234c7416d7e1\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-64864b6d57-cfjrb" Jan 20 18:10:50 crc kubenswrapper[4558]: I0120 18:10:50.679526 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/configmap/b8372fe9-62b1-418d-be4a-234c7416d7e1-edpm-compute-no-nodes\") pod \"dnsmasq-dnsmasq-64864b6d57-cfjrb\" (UID: \"b8372fe9-62b1-418d-be4a-234c7416d7e1\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-64864b6d57-cfjrb" Jan 20 18:10:50 crc kubenswrapper[4558]: I0120 18:10:50.679562 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/b8372fe9-62b1-418d-be4a-234c7416d7e1-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-64864b6d57-cfjrb\" (UID: \"b8372fe9-62b1-418d-be4a-234c7416d7e1\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-64864b6d57-cfjrb" Jan 20 18:10:50 crc kubenswrapper[4558]: I0120 18:10:50.679636 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b8372fe9-62b1-418d-be4a-234c7416d7e1-config\") pod \"dnsmasq-dnsmasq-64864b6d57-cfjrb\" (UID: \"b8372fe9-62b1-418d-be4a-234c7416d7e1\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-64864b6d57-cfjrb" Jan 20 18:10:50 crc kubenswrapper[4558]: I0120 18:10:50.680599 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/configmap/b8372fe9-62b1-418d-be4a-234c7416d7e1-edpm-compute-no-nodes\") pod \"dnsmasq-dnsmasq-64864b6d57-cfjrb\" (UID: \"b8372fe9-62b1-418d-be4a-234c7416d7e1\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-64864b6d57-cfjrb" Jan 20 18:10:50 crc kubenswrapper[4558]: I0120 18:10:50.680628 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b8372fe9-62b1-418d-be4a-234c7416d7e1-config\") pod \"dnsmasq-dnsmasq-64864b6d57-cfjrb\" (UID: \"b8372fe9-62b1-418d-be4a-234c7416d7e1\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-64864b6d57-cfjrb" Jan 20 18:10:50 crc kubenswrapper[4558]: I0120 18:10:50.680770 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/b8372fe9-62b1-418d-be4a-234c7416d7e1-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-64864b6d57-cfjrb\" (UID: \"b8372fe9-62b1-418d-be4a-234c7416d7e1\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-64864b6d57-cfjrb" Jan 20 18:10:50 crc kubenswrapper[4558]: I0120 18:10:50.699813 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-74bvd\" (UniqueName: \"kubernetes.io/projected/b8372fe9-62b1-418d-be4a-234c7416d7e1-kube-api-access-74bvd\") pod \"dnsmasq-dnsmasq-64864b6d57-cfjrb\" (UID: \"b8372fe9-62b1-418d-be4a-234c7416d7e1\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-64864b6d57-cfjrb" Jan 20 18:10:50 crc kubenswrapper[4558]: I0120 18:10:50.768732 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-64864b6d57-cfjrb" Jan 20 18:10:51 crc kubenswrapper[4558]: I0120 18:10:51.178671 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-64864b6d57-cfjrb"] Jan 20 18:10:51 crc kubenswrapper[4558]: W0120 18:10:51.183578 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb8372fe9_62b1_418d_be4a_234c7416d7e1.slice/crio-dfb33f9cc859bdc355b7d199ded9066441ddfdfde41fc9de591c33b125154cf0 WatchSource:0}: Error finding container dfb33f9cc859bdc355b7d199ded9066441ddfdfde41fc9de591c33b125154cf0: Status 404 returned error can't find the container with id dfb33f9cc859bdc355b7d199ded9066441ddfdfde41fc9de591c33b125154cf0 Jan 20 18:10:51 crc kubenswrapper[4558]: I0120 18:10:51.253113 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-64864b6d57-cfjrb" event={"ID":"b8372fe9-62b1-418d-be4a-234c7416d7e1","Type":"ContainerStarted","Data":"dfb33f9cc859bdc355b7d199ded9066441ddfdfde41fc9de591c33b125154cf0"} Jan 20 18:10:52 crc kubenswrapper[4558]: I0120 18:10:52.263626 4558 generic.go:334] "Generic (PLEG): container finished" podID="b8372fe9-62b1-418d-be4a-234c7416d7e1" containerID="bebbff466df8f1cdf15dc8767371ebae6f6d39e5c8bb00294a2906586d9583b0" exitCode=0 Jan 20 18:10:52 crc kubenswrapper[4558]: I0120 18:10:52.263738 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-64864b6d57-cfjrb" event={"ID":"b8372fe9-62b1-418d-be4a-234c7416d7e1","Type":"ContainerDied","Data":"bebbff466df8f1cdf15dc8767371ebae6f6d39e5c8bb00294a2906586d9583b0"} Jan 20 18:10:53 crc kubenswrapper[4558]: I0120 18:10:53.275508 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-64864b6d57-cfjrb" event={"ID":"b8372fe9-62b1-418d-be4a-234c7416d7e1","Type":"ContainerStarted","Data":"1dac112ee2b711300350d0ec2c657690990bce3b265688f23b6f9b2573bc2710"} Jan 20 18:10:53 crc kubenswrapper[4558]: I0120 18:10:53.276442 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-64864b6d57-cfjrb" Jan 20 18:10:53 crc kubenswrapper[4558]: I0120 18:10:53.300372 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-64864b6d57-cfjrb" podStartSLOduration=3.300358069 podStartE2EDuration="3.300358069s" podCreationTimestamp="2026-01-20 18:10:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 18:10:53.294836007 +0000 UTC m=+5347.055173974" watchObservedRunningTime="2026-01-20 18:10:53.300358069 +0000 UTC m=+5347.060696037" Jan 20 18:10:57 crc kubenswrapper[4558]: I0120 18:10:57.329875 4558 patch_prober.go:28] interesting pod/machine-config-daemon-2vr4r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 20 18:10:57 crc kubenswrapper[4558]: I0120 18:10:57.330649 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 20 18:11:00 crc kubenswrapper[4558]: I0120 18:11:00.770338 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-64864b6d57-cfjrb" Jan 20 18:11:00 crc kubenswrapper[4558]: I0120 18:11:00.808449 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-rwjb8"] Jan 20 18:11:00 crc kubenswrapper[4558]: I0120 18:11:00.809210 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-rwjb8" podUID="7ca67738-2068-4c2f-beeb-60751657efef" containerName="dnsmasq-dns" containerID="cri-o://b8c9f852e28de0d1a4c775128c08cb0b2bd3e2b52c3fc03212de7e7fefe37ca3" gracePeriod=10 Jan 20 18:11:01 crc kubenswrapper[4558]: I0120 18:11:01.168996 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-rwjb8" Jan 20 18:11:01 crc kubenswrapper[4558]: I0120 18:11:01.325094 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-znhcd\" (UniqueName: \"kubernetes.io/projected/7ca67738-2068-4c2f-beeb-60751657efef-kube-api-access-znhcd\") pod \"7ca67738-2068-4c2f-beeb-60751657efef\" (UID: \"7ca67738-2068-4c2f-beeb-60751657efef\") " Jan 20 18:11:01 crc kubenswrapper[4558]: I0120 18:11:01.325546 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/7ca67738-2068-4c2f-beeb-60751657efef-dnsmasq-svc\") pod \"7ca67738-2068-4c2f-beeb-60751657efef\" (UID: \"7ca67738-2068-4c2f-beeb-60751657efef\") " Jan 20 18:11:01 crc kubenswrapper[4558]: I0120 18:11:01.325747 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7ca67738-2068-4c2f-beeb-60751657efef-config\") pod \"7ca67738-2068-4c2f-beeb-60751657efef\" (UID: \"7ca67738-2068-4c2f-beeb-60751657efef\") " Jan 20 18:11:01 crc kubenswrapper[4558]: I0120 18:11:01.332198 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7ca67738-2068-4c2f-beeb-60751657efef-kube-api-access-znhcd" (OuterVolumeSpecName: "kube-api-access-znhcd") pod "7ca67738-2068-4c2f-beeb-60751657efef" (UID: "7ca67738-2068-4c2f-beeb-60751657efef"). InnerVolumeSpecName "kube-api-access-znhcd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:11:01 crc kubenswrapper[4558]: I0120 18:11:01.349147 4558 generic.go:334] "Generic (PLEG): container finished" podID="7ca67738-2068-4c2f-beeb-60751657efef" containerID="b8c9f852e28de0d1a4c775128c08cb0b2bd3e2b52c3fc03212de7e7fefe37ca3" exitCode=0 Jan 20 18:11:01 crc kubenswrapper[4558]: I0120 18:11:01.349246 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-rwjb8" Jan 20 18:11:01 crc kubenswrapper[4558]: I0120 18:11:01.349329 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-rwjb8" event={"ID":"7ca67738-2068-4c2f-beeb-60751657efef","Type":"ContainerDied","Data":"b8c9f852e28de0d1a4c775128c08cb0b2bd3e2b52c3fc03212de7e7fefe37ca3"} Jan 20 18:11:01 crc kubenswrapper[4558]: I0120 18:11:01.349449 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-rwjb8" event={"ID":"7ca67738-2068-4c2f-beeb-60751657efef","Type":"ContainerDied","Data":"41792560071babf587f67cfb1c29aeb1df680bead48e72fb8223ff8a6e2a7e62"} Jan 20 18:11:01 crc kubenswrapper[4558]: I0120 18:11:01.349525 4558 scope.go:117] "RemoveContainer" containerID="b8c9f852e28de0d1a4c775128c08cb0b2bd3e2b52c3fc03212de7e7fefe37ca3" Jan 20 18:11:01 crc kubenswrapper[4558]: I0120 18:11:01.360401 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7ca67738-2068-4c2f-beeb-60751657efef-dnsmasq-svc" (OuterVolumeSpecName: "dnsmasq-svc") pod "7ca67738-2068-4c2f-beeb-60751657efef" (UID: "7ca67738-2068-4c2f-beeb-60751657efef"). InnerVolumeSpecName "dnsmasq-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:11:01 crc kubenswrapper[4558]: I0120 18:11:01.360810 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7ca67738-2068-4c2f-beeb-60751657efef-config" (OuterVolumeSpecName: "config") pod "7ca67738-2068-4c2f-beeb-60751657efef" (UID: "7ca67738-2068-4c2f-beeb-60751657efef"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:11:01 crc kubenswrapper[4558]: I0120 18:11:01.383963 4558 scope.go:117] "RemoveContainer" containerID="5102cc42ead7c78a1e402c9fc2c904273cf19126d28bbe80fc61a00a8ba6fc03" Jan 20 18:11:01 crc kubenswrapper[4558]: I0120 18:11:01.402919 4558 scope.go:117] "RemoveContainer" containerID="b8c9f852e28de0d1a4c775128c08cb0b2bd3e2b52c3fc03212de7e7fefe37ca3" Jan 20 18:11:01 crc kubenswrapper[4558]: E0120 18:11:01.403321 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b8c9f852e28de0d1a4c775128c08cb0b2bd3e2b52c3fc03212de7e7fefe37ca3\": container with ID starting with b8c9f852e28de0d1a4c775128c08cb0b2bd3e2b52c3fc03212de7e7fefe37ca3 not found: ID does not exist" containerID="b8c9f852e28de0d1a4c775128c08cb0b2bd3e2b52c3fc03212de7e7fefe37ca3" Jan 20 18:11:01 crc kubenswrapper[4558]: I0120 18:11:01.403420 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b8c9f852e28de0d1a4c775128c08cb0b2bd3e2b52c3fc03212de7e7fefe37ca3"} err="failed to get container status \"b8c9f852e28de0d1a4c775128c08cb0b2bd3e2b52c3fc03212de7e7fefe37ca3\": rpc error: code = NotFound desc = could not find container \"b8c9f852e28de0d1a4c775128c08cb0b2bd3e2b52c3fc03212de7e7fefe37ca3\": container with ID starting with b8c9f852e28de0d1a4c775128c08cb0b2bd3e2b52c3fc03212de7e7fefe37ca3 not found: ID does not exist" Jan 20 18:11:01 crc kubenswrapper[4558]: I0120 18:11:01.403503 4558 scope.go:117] "RemoveContainer" containerID="5102cc42ead7c78a1e402c9fc2c904273cf19126d28bbe80fc61a00a8ba6fc03" Jan 20 18:11:01 crc kubenswrapper[4558]: E0120 18:11:01.404012 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5102cc42ead7c78a1e402c9fc2c904273cf19126d28bbe80fc61a00a8ba6fc03\": container with ID starting with 5102cc42ead7c78a1e402c9fc2c904273cf19126d28bbe80fc61a00a8ba6fc03 not found: ID does not exist" containerID="5102cc42ead7c78a1e402c9fc2c904273cf19126d28bbe80fc61a00a8ba6fc03" Jan 20 18:11:01 crc kubenswrapper[4558]: I0120 18:11:01.404056 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5102cc42ead7c78a1e402c9fc2c904273cf19126d28bbe80fc61a00a8ba6fc03"} err="failed to get container status \"5102cc42ead7c78a1e402c9fc2c904273cf19126d28bbe80fc61a00a8ba6fc03\": rpc error: code = NotFound desc = could not find container \"5102cc42ead7c78a1e402c9fc2c904273cf19126d28bbe80fc61a00a8ba6fc03\": container with ID starting with 5102cc42ead7c78a1e402c9fc2c904273cf19126d28bbe80fc61a00a8ba6fc03 not found: ID does not exist" Jan 20 18:11:01 crc kubenswrapper[4558]: I0120 18:11:01.428627 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-znhcd\" (UniqueName: \"kubernetes.io/projected/7ca67738-2068-4c2f-beeb-60751657efef-kube-api-access-znhcd\") on node \"crc\" DevicePath \"\"" Jan 20 18:11:01 crc kubenswrapper[4558]: I0120 18:11:01.428665 4558 reconciler_common.go:293] "Volume detached for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/7ca67738-2068-4c2f-beeb-60751657efef-dnsmasq-svc\") on node \"crc\" DevicePath \"\"" Jan 20 18:11:01 crc kubenswrapper[4558]: I0120 18:11:01.428680 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7ca67738-2068-4c2f-beeb-60751657efef-config\") on node \"crc\" DevicePath \"\"" Jan 20 18:11:01 crc kubenswrapper[4558]: I0120 18:11:01.677088 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-rwjb8"] Jan 20 18:11:01 crc kubenswrapper[4558]: I0120 18:11:01.684227 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-rwjb8"] Jan 20 18:11:02 crc kubenswrapper[4558]: I0120 18:11:02.575628 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7ca67738-2068-4c2f-beeb-60751657efef" path="/var/lib/kubelet/pods/7ca67738-2068-4c2f-beeb-60751657efef/volumes" Jan 20 18:11:05 crc kubenswrapper[4558]: I0120 18:11:05.410212 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/kuttl-service-edpm-compute-no-nodes-edpm-compute-no-nodes-jlc59"] Jan 20 18:11:05 crc kubenswrapper[4558]: E0120 18:11:05.410790 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7ca67738-2068-4c2f-beeb-60751657efef" containerName="init" Jan 20 18:11:05 crc kubenswrapper[4558]: I0120 18:11:05.410803 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="7ca67738-2068-4c2f-beeb-60751657efef" containerName="init" Jan 20 18:11:05 crc kubenswrapper[4558]: E0120 18:11:05.410822 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7ca67738-2068-4c2f-beeb-60751657efef" containerName="dnsmasq-dns" Jan 20 18:11:05 crc kubenswrapper[4558]: I0120 18:11:05.410828 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="7ca67738-2068-4c2f-beeb-60751657efef" containerName="dnsmasq-dns" Jan 20 18:11:05 crc kubenswrapper[4558]: I0120 18:11:05.411000 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="7ca67738-2068-4c2f-beeb-60751657efef" containerName="dnsmasq-dns" Jan 20 18:11:05 crc kubenswrapper[4558]: I0120 18:11:05.411468 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/kuttl-service-edpm-compute-no-nodes-edpm-compute-no-nodes-jlc59" Jan 20 18:11:05 crc kubenswrapper[4558]: I0120 18:11:05.413590 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplanenodeset-edpm-compute-no-nodes" Jan 20 18:11:05 crc kubenswrapper[4558]: I0120 18:11:05.413743 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"kuttl-service-cm-2" Jan 20 18:11:05 crc kubenswrapper[4558]: I0120 18:11:05.413919 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplane-ansible-ssh-private-key-secret" Jan 20 18:11:05 crc kubenswrapper[4558]: I0120 18:11:05.414116 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"edpm-compute-no-nodes-dockercfg-dlxmk" Jan 20 18:11:05 crc kubenswrapper[4558]: I0120 18:11:05.414255 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-aee-default-env" Jan 20 18:11:05 crc kubenswrapper[4558]: I0120 18:11:05.414409 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"combined-ca-bundle" Jan 20 18:11:05 crc kubenswrapper[4558]: I0120 18:11:05.414522 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"kuttl-service-cm-1" Jan 20 18:11:05 crc kubenswrapper[4558]: I0120 18:11:05.418231 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"kuttl-service-cm-0" Jan 20 18:11:05 crc kubenswrapper[4558]: I0120 18:11:05.424398 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/kuttl-service-edpm-compute-no-nodes-edpm-compute-no-nodes-jlc59"] Jan 20 18:11:05 crc kubenswrapper[4558]: I0120 18:11:05.583798 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kuttl-service-cm-0-0\" (UniqueName: \"kubernetes.io/configmap/9b20dd6f-c3af-49f2-bcc0-b13bf183b34e-kuttl-service-cm-0-0\") pod \"kuttl-service-edpm-compute-no-nodes-edpm-compute-no-nodes-jlc59\" (UID: \"9b20dd6f-c3af-49f2-bcc0-b13bf183b34e\") " pod="openstack-kuttl-tests/kuttl-service-edpm-compute-no-nodes-edpm-compute-no-nodes-jlc59" Jan 20 18:11:05 crc kubenswrapper[4558]: I0120 18:11:05.583873 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jxz4l\" (UniqueName: \"kubernetes.io/projected/9b20dd6f-c3af-49f2-bcc0-b13bf183b34e-kube-api-access-jxz4l\") pod \"kuttl-service-edpm-compute-no-nodes-edpm-compute-no-nodes-jlc59\" (UID: \"9b20dd6f-c3af-49f2-bcc0-b13bf183b34e\") " pod="openstack-kuttl-tests/kuttl-service-edpm-compute-no-nodes-edpm-compute-no-nodes-jlc59" Jan 20 18:11:05 crc kubenswrapper[4558]: I0120 18:11:05.583946 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kuttl-service-cm-1-2\" (UniqueName: \"kubernetes.io/configmap/9b20dd6f-c3af-49f2-bcc0-b13bf183b34e-kuttl-service-cm-1-2\") pod \"kuttl-service-edpm-compute-no-nodes-edpm-compute-no-nodes-jlc59\" (UID: \"9b20dd6f-c3af-49f2-bcc0-b13bf183b34e\") " pod="openstack-kuttl-tests/kuttl-service-edpm-compute-no-nodes-edpm-compute-no-nodes-jlc59" Jan 20 18:11:05 crc kubenswrapper[4558]: I0120 18:11:05.583976 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kuttl-service-cm-1-1\" (UniqueName: \"kubernetes.io/configmap/9b20dd6f-c3af-49f2-bcc0-b13bf183b34e-kuttl-service-cm-1-1\") pod \"kuttl-service-edpm-compute-no-nodes-edpm-compute-no-nodes-jlc59\" (UID: \"9b20dd6f-c3af-49f2-bcc0-b13bf183b34e\") " pod="openstack-kuttl-tests/kuttl-service-edpm-compute-no-nodes-edpm-compute-no-nodes-jlc59" Jan 20 18:11:05 crc kubenswrapper[4558]: I0120 18:11:05.584008 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9b20dd6f-c3af-49f2-bcc0-b13bf183b34e-inventory\") pod \"kuttl-service-edpm-compute-no-nodes-edpm-compute-no-nodes-jlc59\" (UID: \"9b20dd6f-c3af-49f2-bcc0-b13bf183b34e\") " pod="openstack-kuttl-tests/kuttl-service-edpm-compute-no-nodes-edpm-compute-no-nodes-jlc59" Jan 20 18:11:05 crc kubenswrapper[4558]: I0120 18:11:05.584032 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/9b20dd6f-c3af-49f2-bcc0-b13bf183b34e-ssh-key-edpm-compute-no-nodes\") pod \"kuttl-service-edpm-compute-no-nodes-edpm-compute-no-nodes-jlc59\" (UID: \"9b20dd6f-c3af-49f2-bcc0-b13bf183b34e\") " pod="openstack-kuttl-tests/kuttl-service-edpm-compute-no-nodes-edpm-compute-no-nodes-jlc59" Jan 20 18:11:05 crc kubenswrapper[4558]: I0120 18:11:05.584176 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kuttl-service-cm-0-1\" (UniqueName: \"kubernetes.io/configmap/9b20dd6f-c3af-49f2-bcc0-b13bf183b34e-kuttl-service-cm-0-1\") pod \"kuttl-service-edpm-compute-no-nodes-edpm-compute-no-nodes-jlc59\" (UID: \"9b20dd6f-c3af-49f2-bcc0-b13bf183b34e\") " pod="openstack-kuttl-tests/kuttl-service-edpm-compute-no-nodes-edpm-compute-no-nodes-jlc59" Jan 20 18:11:05 crc kubenswrapper[4558]: I0120 18:11:05.584237 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kuttl-service-cm-0-2\" (UniqueName: \"kubernetes.io/configmap/9b20dd6f-c3af-49f2-bcc0-b13bf183b34e-kuttl-service-cm-0-2\") pod \"kuttl-service-edpm-compute-no-nodes-edpm-compute-no-nodes-jlc59\" (UID: \"9b20dd6f-c3af-49f2-bcc0-b13bf183b34e\") " pod="openstack-kuttl-tests/kuttl-service-edpm-compute-no-nodes-edpm-compute-no-nodes-jlc59" Jan 20 18:11:05 crc kubenswrapper[4558]: I0120 18:11:05.584257 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kuttl-service-cm-2-0\" (UniqueName: \"kubernetes.io/configmap/9b20dd6f-c3af-49f2-bcc0-b13bf183b34e-kuttl-service-cm-2-0\") pod \"kuttl-service-edpm-compute-no-nodes-edpm-compute-no-nodes-jlc59\" (UID: \"9b20dd6f-c3af-49f2-bcc0-b13bf183b34e\") " pod="openstack-kuttl-tests/kuttl-service-edpm-compute-no-nodes-edpm-compute-no-nodes-jlc59" Jan 20 18:11:05 crc kubenswrapper[4558]: I0120 18:11:05.584290 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kuttl-service-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9b20dd6f-c3af-49f2-bcc0-b13bf183b34e-kuttl-service-combined-ca-bundle\") pod \"kuttl-service-edpm-compute-no-nodes-edpm-compute-no-nodes-jlc59\" (UID: \"9b20dd6f-c3af-49f2-bcc0-b13bf183b34e\") " pod="openstack-kuttl-tests/kuttl-service-edpm-compute-no-nodes-edpm-compute-no-nodes-jlc59" Jan 20 18:11:05 crc kubenswrapper[4558]: I0120 18:11:05.584312 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kuttl-service-cm-1-0\" (UniqueName: \"kubernetes.io/configmap/9b20dd6f-c3af-49f2-bcc0-b13bf183b34e-kuttl-service-cm-1-0\") pod \"kuttl-service-edpm-compute-no-nodes-edpm-compute-no-nodes-jlc59\" (UID: \"9b20dd6f-c3af-49f2-bcc0-b13bf183b34e\") " pod="openstack-kuttl-tests/kuttl-service-edpm-compute-no-nodes-edpm-compute-no-nodes-jlc59" Jan 20 18:11:05 crc kubenswrapper[4558]: I0120 18:11:05.686051 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kuttl-service-cm-0-1\" (UniqueName: \"kubernetes.io/configmap/9b20dd6f-c3af-49f2-bcc0-b13bf183b34e-kuttl-service-cm-0-1\") pod \"kuttl-service-edpm-compute-no-nodes-edpm-compute-no-nodes-jlc59\" (UID: \"9b20dd6f-c3af-49f2-bcc0-b13bf183b34e\") " pod="openstack-kuttl-tests/kuttl-service-edpm-compute-no-nodes-edpm-compute-no-nodes-jlc59" Jan 20 18:11:05 crc kubenswrapper[4558]: I0120 18:11:05.686115 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kuttl-service-cm-0-2\" (UniqueName: \"kubernetes.io/configmap/9b20dd6f-c3af-49f2-bcc0-b13bf183b34e-kuttl-service-cm-0-2\") pod \"kuttl-service-edpm-compute-no-nodes-edpm-compute-no-nodes-jlc59\" (UID: \"9b20dd6f-c3af-49f2-bcc0-b13bf183b34e\") " pod="openstack-kuttl-tests/kuttl-service-edpm-compute-no-nodes-edpm-compute-no-nodes-jlc59" Jan 20 18:11:05 crc kubenswrapper[4558]: I0120 18:11:05.686138 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kuttl-service-cm-2-0\" (UniqueName: \"kubernetes.io/configmap/9b20dd6f-c3af-49f2-bcc0-b13bf183b34e-kuttl-service-cm-2-0\") pod \"kuttl-service-edpm-compute-no-nodes-edpm-compute-no-nodes-jlc59\" (UID: \"9b20dd6f-c3af-49f2-bcc0-b13bf183b34e\") " pod="openstack-kuttl-tests/kuttl-service-edpm-compute-no-nodes-edpm-compute-no-nodes-jlc59" Jan 20 18:11:05 crc kubenswrapper[4558]: I0120 18:11:05.686206 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kuttl-service-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9b20dd6f-c3af-49f2-bcc0-b13bf183b34e-kuttl-service-combined-ca-bundle\") pod \"kuttl-service-edpm-compute-no-nodes-edpm-compute-no-nodes-jlc59\" (UID: \"9b20dd6f-c3af-49f2-bcc0-b13bf183b34e\") " pod="openstack-kuttl-tests/kuttl-service-edpm-compute-no-nodes-edpm-compute-no-nodes-jlc59" Jan 20 18:11:05 crc kubenswrapper[4558]: I0120 18:11:05.686234 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kuttl-service-cm-1-0\" (UniqueName: \"kubernetes.io/configmap/9b20dd6f-c3af-49f2-bcc0-b13bf183b34e-kuttl-service-cm-1-0\") pod \"kuttl-service-edpm-compute-no-nodes-edpm-compute-no-nodes-jlc59\" (UID: \"9b20dd6f-c3af-49f2-bcc0-b13bf183b34e\") " pod="openstack-kuttl-tests/kuttl-service-edpm-compute-no-nodes-edpm-compute-no-nodes-jlc59" Jan 20 18:11:05 crc kubenswrapper[4558]: I0120 18:11:05.686268 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kuttl-service-cm-0-0\" (UniqueName: \"kubernetes.io/configmap/9b20dd6f-c3af-49f2-bcc0-b13bf183b34e-kuttl-service-cm-0-0\") pod \"kuttl-service-edpm-compute-no-nodes-edpm-compute-no-nodes-jlc59\" (UID: \"9b20dd6f-c3af-49f2-bcc0-b13bf183b34e\") " pod="openstack-kuttl-tests/kuttl-service-edpm-compute-no-nodes-edpm-compute-no-nodes-jlc59" Jan 20 18:11:05 crc kubenswrapper[4558]: I0120 18:11:05.686332 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jxz4l\" (UniqueName: \"kubernetes.io/projected/9b20dd6f-c3af-49f2-bcc0-b13bf183b34e-kube-api-access-jxz4l\") pod \"kuttl-service-edpm-compute-no-nodes-edpm-compute-no-nodes-jlc59\" (UID: \"9b20dd6f-c3af-49f2-bcc0-b13bf183b34e\") " pod="openstack-kuttl-tests/kuttl-service-edpm-compute-no-nodes-edpm-compute-no-nodes-jlc59" Jan 20 18:11:05 crc kubenswrapper[4558]: I0120 18:11:05.686408 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kuttl-service-cm-1-2\" (UniqueName: \"kubernetes.io/configmap/9b20dd6f-c3af-49f2-bcc0-b13bf183b34e-kuttl-service-cm-1-2\") pod \"kuttl-service-edpm-compute-no-nodes-edpm-compute-no-nodes-jlc59\" (UID: \"9b20dd6f-c3af-49f2-bcc0-b13bf183b34e\") " pod="openstack-kuttl-tests/kuttl-service-edpm-compute-no-nodes-edpm-compute-no-nodes-jlc59" Jan 20 18:11:05 crc kubenswrapper[4558]: I0120 18:11:05.686436 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kuttl-service-cm-1-1\" (UniqueName: \"kubernetes.io/configmap/9b20dd6f-c3af-49f2-bcc0-b13bf183b34e-kuttl-service-cm-1-1\") pod \"kuttl-service-edpm-compute-no-nodes-edpm-compute-no-nodes-jlc59\" (UID: \"9b20dd6f-c3af-49f2-bcc0-b13bf183b34e\") " pod="openstack-kuttl-tests/kuttl-service-edpm-compute-no-nodes-edpm-compute-no-nodes-jlc59" Jan 20 18:11:05 crc kubenswrapper[4558]: I0120 18:11:05.686476 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9b20dd6f-c3af-49f2-bcc0-b13bf183b34e-inventory\") pod \"kuttl-service-edpm-compute-no-nodes-edpm-compute-no-nodes-jlc59\" (UID: \"9b20dd6f-c3af-49f2-bcc0-b13bf183b34e\") " pod="openstack-kuttl-tests/kuttl-service-edpm-compute-no-nodes-edpm-compute-no-nodes-jlc59" Jan 20 18:11:05 crc kubenswrapper[4558]: I0120 18:11:05.686500 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/9b20dd6f-c3af-49f2-bcc0-b13bf183b34e-ssh-key-edpm-compute-no-nodes\") pod \"kuttl-service-edpm-compute-no-nodes-edpm-compute-no-nodes-jlc59\" (UID: \"9b20dd6f-c3af-49f2-bcc0-b13bf183b34e\") " pod="openstack-kuttl-tests/kuttl-service-edpm-compute-no-nodes-edpm-compute-no-nodes-jlc59" Jan 20 18:11:05 crc kubenswrapper[4558]: I0120 18:11:05.687177 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kuttl-service-cm-0-1\" (UniqueName: \"kubernetes.io/configmap/9b20dd6f-c3af-49f2-bcc0-b13bf183b34e-kuttl-service-cm-0-1\") pod \"kuttl-service-edpm-compute-no-nodes-edpm-compute-no-nodes-jlc59\" (UID: \"9b20dd6f-c3af-49f2-bcc0-b13bf183b34e\") " pod="openstack-kuttl-tests/kuttl-service-edpm-compute-no-nodes-edpm-compute-no-nodes-jlc59" Jan 20 18:11:05 crc kubenswrapper[4558]: I0120 18:11:05.688098 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kuttl-service-cm-0-2\" (UniqueName: \"kubernetes.io/configmap/9b20dd6f-c3af-49f2-bcc0-b13bf183b34e-kuttl-service-cm-0-2\") pod \"kuttl-service-edpm-compute-no-nodes-edpm-compute-no-nodes-jlc59\" (UID: \"9b20dd6f-c3af-49f2-bcc0-b13bf183b34e\") " pod="openstack-kuttl-tests/kuttl-service-edpm-compute-no-nodes-edpm-compute-no-nodes-jlc59" Jan 20 18:11:05 crc kubenswrapper[4558]: I0120 18:11:05.688558 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kuttl-service-cm-0-0\" (UniqueName: \"kubernetes.io/configmap/9b20dd6f-c3af-49f2-bcc0-b13bf183b34e-kuttl-service-cm-0-0\") pod \"kuttl-service-edpm-compute-no-nodes-edpm-compute-no-nodes-jlc59\" (UID: \"9b20dd6f-c3af-49f2-bcc0-b13bf183b34e\") " pod="openstack-kuttl-tests/kuttl-service-edpm-compute-no-nodes-edpm-compute-no-nodes-jlc59" Jan 20 18:11:05 crc kubenswrapper[4558]: I0120 18:11:05.688723 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kuttl-service-cm-1-1\" (UniqueName: \"kubernetes.io/configmap/9b20dd6f-c3af-49f2-bcc0-b13bf183b34e-kuttl-service-cm-1-1\") pod \"kuttl-service-edpm-compute-no-nodes-edpm-compute-no-nodes-jlc59\" (UID: \"9b20dd6f-c3af-49f2-bcc0-b13bf183b34e\") " pod="openstack-kuttl-tests/kuttl-service-edpm-compute-no-nodes-edpm-compute-no-nodes-jlc59" Jan 20 18:11:05 crc kubenswrapper[4558]: I0120 18:11:05.688744 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kuttl-service-cm-1-2\" (UniqueName: \"kubernetes.io/configmap/9b20dd6f-c3af-49f2-bcc0-b13bf183b34e-kuttl-service-cm-1-2\") pod \"kuttl-service-edpm-compute-no-nodes-edpm-compute-no-nodes-jlc59\" (UID: \"9b20dd6f-c3af-49f2-bcc0-b13bf183b34e\") " pod="openstack-kuttl-tests/kuttl-service-edpm-compute-no-nodes-edpm-compute-no-nodes-jlc59" Jan 20 18:11:05 crc kubenswrapper[4558]: I0120 18:11:05.688959 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kuttl-service-cm-1-0\" (UniqueName: \"kubernetes.io/configmap/9b20dd6f-c3af-49f2-bcc0-b13bf183b34e-kuttl-service-cm-1-0\") pod \"kuttl-service-edpm-compute-no-nodes-edpm-compute-no-nodes-jlc59\" (UID: \"9b20dd6f-c3af-49f2-bcc0-b13bf183b34e\") " pod="openstack-kuttl-tests/kuttl-service-edpm-compute-no-nodes-edpm-compute-no-nodes-jlc59" Jan 20 18:11:05 crc kubenswrapper[4558]: I0120 18:11:05.689040 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kuttl-service-cm-2-0\" (UniqueName: \"kubernetes.io/configmap/9b20dd6f-c3af-49f2-bcc0-b13bf183b34e-kuttl-service-cm-2-0\") pod \"kuttl-service-edpm-compute-no-nodes-edpm-compute-no-nodes-jlc59\" (UID: \"9b20dd6f-c3af-49f2-bcc0-b13bf183b34e\") " pod="openstack-kuttl-tests/kuttl-service-edpm-compute-no-nodes-edpm-compute-no-nodes-jlc59" Jan 20 18:11:05 crc kubenswrapper[4558]: I0120 18:11:05.693416 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/9b20dd6f-c3af-49f2-bcc0-b13bf183b34e-ssh-key-edpm-compute-no-nodes\") pod \"kuttl-service-edpm-compute-no-nodes-edpm-compute-no-nodes-jlc59\" (UID: \"9b20dd6f-c3af-49f2-bcc0-b13bf183b34e\") " pod="openstack-kuttl-tests/kuttl-service-edpm-compute-no-nodes-edpm-compute-no-nodes-jlc59" Jan 20 18:11:05 crc kubenswrapper[4558]: I0120 18:11:05.693561 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kuttl-service-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9b20dd6f-c3af-49f2-bcc0-b13bf183b34e-kuttl-service-combined-ca-bundle\") pod \"kuttl-service-edpm-compute-no-nodes-edpm-compute-no-nodes-jlc59\" (UID: \"9b20dd6f-c3af-49f2-bcc0-b13bf183b34e\") " pod="openstack-kuttl-tests/kuttl-service-edpm-compute-no-nodes-edpm-compute-no-nodes-jlc59" Jan 20 18:11:05 crc kubenswrapper[4558]: I0120 18:11:05.693884 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9b20dd6f-c3af-49f2-bcc0-b13bf183b34e-inventory\") pod \"kuttl-service-edpm-compute-no-nodes-edpm-compute-no-nodes-jlc59\" (UID: \"9b20dd6f-c3af-49f2-bcc0-b13bf183b34e\") " pod="openstack-kuttl-tests/kuttl-service-edpm-compute-no-nodes-edpm-compute-no-nodes-jlc59" Jan 20 18:11:05 crc kubenswrapper[4558]: I0120 18:11:05.704550 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jxz4l\" (UniqueName: \"kubernetes.io/projected/9b20dd6f-c3af-49f2-bcc0-b13bf183b34e-kube-api-access-jxz4l\") pod \"kuttl-service-edpm-compute-no-nodes-edpm-compute-no-nodes-jlc59\" (UID: \"9b20dd6f-c3af-49f2-bcc0-b13bf183b34e\") " pod="openstack-kuttl-tests/kuttl-service-edpm-compute-no-nodes-edpm-compute-no-nodes-jlc59" Jan 20 18:11:05 crc kubenswrapper[4558]: I0120 18:11:05.727991 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/kuttl-service-edpm-compute-no-nodes-edpm-compute-no-nodes-jlc59" Jan 20 18:11:06 crc kubenswrapper[4558]: I0120 18:11:06.127971 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/kuttl-service-edpm-compute-no-nodes-edpm-compute-no-nodes-jlc59"] Jan 20 18:11:06 crc kubenswrapper[4558]: W0120 18:11:06.131399 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9b20dd6f_c3af_49f2_bcc0_b13bf183b34e.slice/crio-b5a3af83db95a9f52059965e565a0e9640ebd2dd88e65af7fa5446fd956ae98e WatchSource:0}: Error finding container b5a3af83db95a9f52059965e565a0e9640ebd2dd88e65af7fa5446fd956ae98e: Status 404 returned error can't find the container with id b5a3af83db95a9f52059965e565a0e9640ebd2dd88e65af7fa5446fd956ae98e Jan 20 18:11:06 crc kubenswrapper[4558]: I0120 18:11:06.398725 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/kuttl-service-edpm-compute-no-nodes-edpm-compute-no-nodes-jlc59" event={"ID":"9b20dd6f-c3af-49f2-bcc0-b13bf183b34e","Type":"ContainerStarted","Data":"b5a3af83db95a9f52059965e565a0e9640ebd2dd88e65af7fa5446fd956ae98e"} Jan 20 18:11:07 crc kubenswrapper[4558]: I0120 18:11:07.409406 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/kuttl-service-edpm-compute-no-nodes-edpm-compute-no-nodes-jlc59" event={"ID":"9b20dd6f-c3af-49f2-bcc0-b13bf183b34e","Type":"ContainerStarted","Data":"3c01c9129dba8e4eb6c418cd9370953b2dc3f6c1833be57cd391ffb18f2feaf5"} Jan 20 18:11:07 crc kubenswrapper[4558]: I0120 18:11:07.445440 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/kuttl-service-edpm-compute-no-nodes-edpm-compute-no-nodes-jlc59" podStartSLOduration=1.659150307 podStartE2EDuration="2.44542515s" podCreationTimestamp="2026-01-20 18:11:05 +0000 UTC" firstStartedPulling="2026-01-20 18:11:06.133977238 +0000 UTC m=+5359.894315205" lastFinishedPulling="2026-01-20 18:11:06.92025209 +0000 UTC m=+5360.680590048" observedRunningTime="2026-01-20 18:11:07.440781098 +0000 UTC m=+5361.201119065" watchObservedRunningTime="2026-01-20 18:11:07.44542515 +0000 UTC m=+5361.205763117" Jan 20 18:11:08 crc kubenswrapper[4558]: I0120 18:11:08.488356 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/kuttl-service-edpm-compute-no-nodes-edpm-compute-no-nodes-jlc59"] Jan 20 18:11:08 crc kubenswrapper[4558]: I0120 18:11:08.533632 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-7kvrx"] Jan 20 18:11:08 crc kubenswrapper[4558]: I0120 18:11:08.535183 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-7kvrx" Jan 20 18:11:08 crc kubenswrapper[4558]: I0120 18:11:08.545890 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-7kvrx"] Jan 20 18:11:08 crc kubenswrapper[4558]: I0120 18:11:08.548729 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/3737f1f8-bbaa-464b-825a-2d6080e8bc3c-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-84b9f45d47-7kvrx\" (UID: \"3737f1f8-bbaa-464b-825a-2d6080e8bc3c\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-7kvrx" Jan 20 18:11:08 crc kubenswrapper[4558]: I0120 18:11:08.548806 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3737f1f8-bbaa-464b-825a-2d6080e8bc3c-config\") pod \"dnsmasq-dnsmasq-84b9f45d47-7kvrx\" (UID: \"3737f1f8-bbaa-464b-825a-2d6080e8bc3c\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-7kvrx" Jan 20 18:11:08 crc kubenswrapper[4558]: I0120 18:11:08.548884 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-stmjg\" (UniqueName: \"kubernetes.io/projected/3737f1f8-bbaa-464b-825a-2d6080e8bc3c-kube-api-access-stmjg\") pod \"dnsmasq-dnsmasq-84b9f45d47-7kvrx\" (UID: \"3737f1f8-bbaa-464b-825a-2d6080e8bc3c\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-7kvrx" Jan 20 18:11:08 crc kubenswrapper[4558]: I0120 18:11:08.650361 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/3737f1f8-bbaa-464b-825a-2d6080e8bc3c-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-84b9f45d47-7kvrx\" (UID: \"3737f1f8-bbaa-464b-825a-2d6080e8bc3c\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-7kvrx" Jan 20 18:11:08 crc kubenswrapper[4558]: I0120 18:11:08.650753 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3737f1f8-bbaa-464b-825a-2d6080e8bc3c-config\") pod \"dnsmasq-dnsmasq-84b9f45d47-7kvrx\" (UID: \"3737f1f8-bbaa-464b-825a-2d6080e8bc3c\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-7kvrx" Jan 20 18:11:08 crc kubenswrapper[4558]: I0120 18:11:08.650802 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-stmjg\" (UniqueName: \"kubernetes.io/projected/3737f1f8-bbaa-464b-825a-2d6080e8bc3c-kube-api-access-stmjg\") pod \"dnsmasq-dnsmasq-84b9f45d47-7kvrx\" (UID: \"3737f1f8-bbaa-464b-825a-2d6080e8bc3c\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-7kvrx" Jan 20 18:11:08 crc kubenswrapper[4558]: I0120 18:11:08.651751 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3737f1f8-bbaa-464b-825a-2d6080e8bc3c-config\") pod \"dnsmasq-dnsmasq-84b9f45d47-7kvrx\" (UID: \"3737f1f8-bbaa-464b-825a-2d6080e8bc3c\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-7kvrx" Jan 20 18:11:08 crc kubenswrapper[4558]: I0120 18:11:08.652008 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/3737f1f8-bbaa-464b-825a-2d6080e8bc3c-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-84b9f45d47-7kvrx\" (UID: \"3737f1f8-bbaa-464b-825a-2d6080e8bc3c\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-7kvrx" Jan 20 18:11:08 crc kubenswrapper[4558]: I0120 18:11:08.678037 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-stmjg\" (UniqueName: \"kubernetes.io/projected/3737f1f8-bbaa-464b-825a-2d6080e8bc3c-kube-api-access-stmjg\") pod \"dnsmasq-dnsmasq-84b9f45d47-7kvrx\" (UID: \"3737f1f8-bbaa-464b-825a-2d6080e8bc3c\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-7kvrx" Jan 20 18:11:08 crc kubenswrapper[4558]: I0120 18:11:08.854052 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-7kvrx" Jan 20 18:11:09 crc kubenswrapper[4558]: I0120 18:11:09.263830 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-7kvrx"] Jan 20 18:11:09 crc kubenswrapper[4558]: I0120 18:11:09.429782 4558 generic.go:334] "Generic (PLEG): container finished" podID="3737f1f8-bbaa-464b-825a-2d6080e8bc3c" containerID="1c9be81a1d6c6c1e1d522a23cd23d731473da8b07de763e31630d3e338acb1bf" exitCode=0 Jan 20 18:11:09 crc kubenswrapper[4558]: I0120 18:11:09.429859 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-7kvrx" event={"ID":"3737f1f8-bbaa-464b-825a-2d6080e8bc3c","Type":"ContainerDied","Data":"1c9be81a1d6c6c1e1d522a23cd23d731473da8b07de763e31630d3e338acb1bf"} Jan 20 18:11:09 crc kubenswrapper[4558]: I0120 18:11:09.430229 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-7kvrx" event={"ID":"3737f1f8-bbaa-464b-825a-2d6080e8bc3c","Type":"ContainerStarted","Data":"d5238d925061d4943fa81def51dee7f0111626d012d966358afbdde112bdc855"} Jan 20 18:11:09 crc kubenswrapper[4558]: I0120 18:11:09.430341 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/kuttl-service-edpm-compute-no-nodes-edpm-compute-no-nodes-jlc59" podUID="9b20dd6f-c3af-49f2-bcc0-b13bf183b34e" containerName="kuttl-service-edpm-compute-no-nodes-edpm-compute-no-nodes" containerID="cri-o://3c01c9129dba8e4eb6c418cd9370953b2dc3f6c1833be57cd391ffb18f2feaf5" gracePeriod=30 Jan 20 18:11:10 crc kubenswrapper[4558]: I0120 18:11:10.441744 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-7kvrx" event={"ID":"3737f1f8-bbaa-464b-825a-2d6080e8bc3c","Type":"ContainerStarted","Data":"9b96d50f333d399bfe2cd143ea78aadb1c68e344d0f3c4b9e8933659e5f38e12"} Jan 20 18:11:10 crc kubenswrapper[4558]: I0120 18:11:10.442189 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-7kvrx" Jan 20 18:11:10 crc kubenswrapper[4558]: I0120 18:11:10.462499 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-7kvrx" podStartSLOduration=2.462479441 podStartE2EDuration="2.462479441s" podCreationTimestamp="2026-01-20 18:11:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 18:11:10.459976404 +0000 UTC m=+5364.220314371" watchObservedRunningTime="2026-01-20 18:11:10.462479441 +0000 UTC m=+5364.222817408" Jan 20 18:11:15 crc kubenswrapper[4558]: I0120 18:11:15.006513 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["crc-storage/crc-storage-crc-gt7mt"] Jan 20 18:11:15 crc kubenswrapper[4558]: I0120 18:11:15.010782 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["crc-storage/crc-storage-crc-gt7mt"] Jan 20 18:11:15 crc kubenswrapper[4558]: I0120 18:11:15.122419 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["crc-storage/crc-storage-crc-q7mjn"] Jan 20 18:11:15 crc kubenswrapper[4558]: I0120 18:11:15.124021 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-q7mjn" Jan 20 18:11:15 crc kubenswrapper[4558]: I0120 18:11:15.128395 4558 reflector.go:368] Caches populated for *v1.Secret from object-"crc-storage"/"crc-storage-dockercfg-k9ht8" Jan 20 18:11:15 crc kubenswrapper[4558]: I0120 18:11:15.129599 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"openshift-service-ca.crt" Jan 20 18:11:15 crc kubenswrapper[4558]: I0120 18:11:15.129802 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"crc-storage" Jan 20 18:11:15 crc kubenswrapper[4558]: I0120 18:11:15.130686 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"kube-root-ca.crt" Jan 20 18:11:15 crc kubenswrapper[4558]: I0120 18:11:15.133428 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-q7mjn"] Jan 20 18:11:15 crc kubenswrapper[4558]: I0120 18:11:15.157694 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kxdp4\" (UniqueName: \"kubernetes.io/projected/87a2a57d-1ee8-4dc9-8e14-e136a5f849b4-kube-api-access-kxdp4\") pod \"crc-storage-crc-q7mjn\" (UID: \"87a2a57d-1ee8-4dc9-8e14-e136a5f849b4\") " pod="crc-storage/crc-storage-crc-q7mjn" Jan 20 18:11:15 crc kubenswrapper[4558]: I0120 18:11:15.157808 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/87a2a57d-1ee8-4dc9-8e14-e136a5f849b4-node-mnt\") pod \"crc-storage-crc-q7mjn\" (UID: \"87a2a57d-1ee8-4dc9-8e14-e136a5f849b4\") " pod="crc-storage/crc-storage-crc-q7mjn" Jan 20 18:11:15 crc kubenswrapper[4558]: I0120 18:11:15.157851 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/87a2a57d-1ee8-4dc9-8e14-e136a5f849b4-crc-storage\") pod \"crc-storage-crc-q7mjn\" (UID: \"87a2a57d-1ee8-4dc9-8e14-e136a5f849b4\") " pod="crc-storage/crc-storage-crc-q7mjn" Jan 20 18:11:15 crc kubenswrapper[4558]: I0120 18:11:15.259579 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/87a2a57d-1ee8-4dc9-8e14-e136a5f849b4-crc-storage\") pod \"crc-storage-crc-q7mjn\" (UID: \"87a2a57d-1ee8-4dc9-8e14-e136a5f849b4\") " pod="crc-storage/crc-storage-crc-q7mjn" Jan 20 18:11:15 crc kubenswrapper[4558]: I0120 18:11:15.259700 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kxdp4\" (UniqueName: \"kubernetes.io/projected/87a2a57d-1ee8-4dc9-8e14-e136a5f849b4-kube-api-access-kxdp4\") pod \"crc-storage-crc-q7mjn\" (UID: \"87a2a57d-1ee8-4dc9-8e14-e136a5f849b4\") " pod="crc-storage/crc-storage-crc-q7mjn" Jan 20 18:11:15 crc kubenswrapper[4558]: I0120 18:11:15.259759 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/87a2a57d-1ee8-4dc9-8e14-e136a5f849b4-node-mnt\") pod \"crc-storage-crc-q7mjn\" (UID: \"87a2a57d-1ee8-4dc9-8e14-e136a5f849b4\") " pod="crc-storage/crc-storage-crc-q7mjn" Jan 20 18:11:15 crc kubenswrapper[4558]: I0120 18:11:15.260037 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/87a2a57d-1ee8-4dc9-8e14-e136a5f849b4-node-mnt\") pod \"crc-storage-crc-q7mjn\" (UID: \"87a2a57d-1ee8-4dc9-8e14-e136a5f849b4\") " pod="crc-storage/crc-storage-crc-q7mjn" Jan 20 18:11:15 crc kubenswrapper[4558]: I0120 18:11:15.260382 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/87a2a57d-1ee8-4dc9-8e14-e136a5f849b4-crc-storage\") pod \"crc-storage-crc-q7mjn\" (UID: \"87a2a57d-1ee8-4dc9-8e14-e136a5f849b4\") " pod="crc-storage/crc-storage-crc-q7mjn" Jan 20 18:11:15 crc kubenswrapper[4558]: I0120 18:11:15.277441 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kxdp4\" (UniqueName: \"kubernetes.io/projected/87a2a57d-1ee8-4dc9-8e14-e136a5f849b4-kube-api-access-kxdp4\") pod \"crc-storage-crc-q7mjn\" (UID: \"87a2a57d-1ee8-4dc9-8e14-e136a5f849b4\") " pod="crc-storage/crc-storage-crc-q7mjn" Jan 20 18:11:15 crc kubenswrapper[4558]: I0120 18:11:15.445711 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-q7mjn" Jan 20 18:11:15 crc kubenswrapper[4558]: I0120 18:11:15.819979 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-q7mjn"] Jan 20 18:11:16 crc kubenswrapper[4558]: I0120 18:11:16.506125 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-q7mjn" event={"ID":"87a2a57d-1ee8-4dc9-8e14-e136a5f849b4","Type":"ContainerStarted","Data":"ab706373fa7f7b9f5d1319932c3c5f8e65e5ba834bf7b5a2620b7b79add518c2"} Jan 20 18:11:16 crc kubenswrapper[4558]: I0120 18:11:16.577656 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e8de84db-3651-48e8-97df-4c84bdcdc0aa" path="/var/lib/kubelet/pods/e8de84db-3651-48e8-97df-4c84bdcdc0aa/volumes" Jan 20 18:11:17 crc kubenswrapper[4558]: I0120 18:11:17.514876 4558 generic.go:334] "Generic (PLEG): container finished" podID="87a2a57d-1ee8-4dc9-8e14-e136a5f849b4" containerID="3d1d1b07c7f18dbf75745ca312859d024aefcf96fa2be134dbdf2d603d158e21" exitCode=0 Jan 20 18:11:17 crc kubenswrapper[4558]: I0120 18:11:17.514932 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-q7mjn" event={"ID":"87a2a57d-1ee8-4dc9-8e14-e136a5f849b4","Type":"ContainerDied","Data":"3d1d1b07c7f18dbf75745ca312859d024aefcf96fa2be134dbdf2d603d158e21"} Jan 20 18:11:17 crc kubenswrapper[4558]: I0120 18:11:17.518736 4558 generic.go:334] "Generic (PLEG): container finished" podID="9b20dd6f-c3af-49f2-bcc0-b13bf183b34e" containerID="3c01c9129dba8e4eb6c418cd9370953b2dc3f6c1833be57cd391ffb18f2feaf5" exitCode=254 Jan 20 18:11:17 crc kubenswrapper[4558]: I0120 18:11:17.518775 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/kuttl-service-edpm-compute-no-nodes-edpm-compute-no-nodes-jlc59" event={"ID":"9b20dd6f-c3af-49f2-bcc0-b13bf183b34e","Type":"ContainerDied","Data":"3c01c9129dba8e4eb6c418cd9370953b2dc3f6c1833be57cd391ffb18f2feaf5"} Jan 20 18:11:17 crc kubenswrapper[4558]: I0120 18:11:17.692943 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/kuttl-service-edpm-compute-no-nodes-edpm-compute-no-nodes-jlc59" Jan 20 18:11:17 crc kubenswrapper[4558]: I0120 18:11:17.698685 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kuttl-service-cm-0-1\" (UniqueName: \"kubernetes.io/configmap/9b20dd6f-c3af-49f2-bcc0-b13bf183b34e-kuttl-service-cm-0-1\") pod \"9b20dd6f-c3af-49f2-bcc0-b13bf183b34e\" (UID: \"9b20dd6f-c3af-49f2-bcc0-b13bf183b34e\") " Jan 20 18:11:17 crc kubenswrapper[4558]: I0120 18:11:17.699045 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kuttl-service-cm-1-2\" (UniqueName: \"kubernetes.io/configmap/9b20dd6f-c3af-49f2-bcc0-b13bf183b34e-kuttl-service-cm-1-2\") pod \"9b20dd6f-c3af-49f2-bcc0-b13bf183b34e\" (UID: \"9b20dd6f-c3af-49f2-bcc0-b13bf183b34e\") " Jan 20 18:11:17 crc kubenswrapper[4558]: I0120 18:11:17.728053 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9b20dd6f-c3af-49f2-bcc0-b13bf183b34e-kuttl-service-cm-1-2" (OuterVolumeSpecName: "kuttl-service-cm-1-2") pod "9b20dd6f-c3af-49f2-bcc0-b13bf183b34e" (UID: "9b20dd6f-c3af-49f2-bcc0-b13bf183b34e"). InnerVolumeSpecName "kuttl-service-cm-1-2". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:11:17 crc kubenswrapper[4558]: I0120 18:11:17.730758 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9b20dd6f-c3af-49f2-bcc0-b13bf183b34e-kuttl-service-cm-0-1" (OuterVolumeSpecName: "kuttl-service-cm-0-1") pod "9b20dd6f-c3af-49f2-bcc0-b13bf183b34e" (UID: "9b20dd6f-c3af-49f2-bcc0-b13bf183b34e"). InnerVolumeSpecName "kuttl-service-cm-0-1". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:11:17 crc kubenswrapper[4558]: I0120 18:11:17.801388 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kuttl-service-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9b20dd6f-c3af-49f2-bcc0-b13bf183b34e-kuttl-service-combined-ca-bundle\") pod \"9b20dd6f-c3af-49f2-bcc0-b13bf183b34e\" (UID: \"9b20dd6f-c3af-49f2-bcc0-b13bf183b34e\") " Jan 20 18:11:17 crc kubenswrapper[4558]: I0120 18:11:17.801434 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kuttl-service-cm-0-0\" (UniqueName: \"kubernetes.io/configmap/9b20dd6f-c3af-49f2-bcc0-b13bf183b34e-kuttl-service-cm-0-0\") pod \"9b20dd6f-c3af-49f2-bcc0-b13bf183b34e\" (UID: \"9b20dd6f-c3af-49f2-bcc0-b13bf183b34e\") " Jan 20 18:11:17 crc kubenswrapper[4558]: I0120 18:11:17.801473 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jxz4l\" (UniqueName: \"kubernetes.io/projected/9b20dd6f-c3af-49f2-bcc0-b13bf183b34e-kube-api-access-jxz4l\") pod \"9b20dd6f-c3af-49f2-bcc0-b13bf183b34e\" (UID: \"9b20dd6f-c3af-49f2-bcc0-b13bf183b34e\") " Jan 20 18:11:17 crc kubenswrapper[4558]: I0120 18:11:17.801494 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kuttl-service-cm-1-1\" (UniqueName: \"kubernetes.io/configmap/9b20dd6f-c3af-49f2-bcc0-b13bf183b34e-kuttl-service-cm-1-1\") pod \"9b20dd6f-c3af-49f2-bcc0-b13bf183b34e\" (UID: \"9b20dd6f-c3af-49f2-bcc0-b13bf183b34e\") " Jan 20 18:11:17 crc kubenswrapper[4558]: I0120 18:11:17.801518 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kuttl-service-cm-2-0\" (UniqueName: \"kubernetes.io/configmap/9b20dd6f-c3af-49f2-bcc0-b13bf183b34e-kuttl-service-cm-2-0\") pod \"9b20dd6f-c3af-49f2-bcc0-b13bf183b34e\" (UID: \"9b20dd6f-c3af-49f2-bcc0-b13bf183b34e\") " Jan 20 18:11:17 crc kubenswrapper[4558]: I0120 18:11:17.801550 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kuttl-service-cm-0-2\" (UniqueName: \"kubernetes.io/configmap/9b20dd6f-c3af-49f2-bcc0-b13bf183b34e-kuttl-service-cm-0-2\") pod \"9b20dd6f-c3af-49f2-bcc0-b13bf183b34e\" (UID: \"9b20dd6f-c3af-49f2-bcc0-b13bf183b34e\") " Jan 20 18:11:17 crc kubenswrapper[4558]: I0120 18:11:17.801595 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9b20dd6f-c3af-49f2-bcc0-b13bf183b34e-inventory\") pod \"9b20dd6f-c3af-49f2-bcc0-b13bf183b34e\" (UID: \"9b20dd6f-c3af-49f2-bcc0-b13bf183b34e\") " Jan 20 18:11:17 crc kubenswrapper[4558]: I0120 18:11:17.801619 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/9b20dd6f-c3af-49f2-bcc0-b13bf183b34e-ssh-key-edpm-compute-no-nodes\") pod \"9b20dd6f-c3af-49f2-bcc0-b13bf183b34e\" (UID: \"9b20dd6f-c3af-49f2-bcc0-b13bf183b34e\") " Jan 20 18:11:17 crc kubenswrapper[4558]: I0120 18:11:17.801720 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kuttl-service-cm-1-0\" (UniqueName: \"kubernetes.io/configmap/9b20dd6f-c3af-49f2-bcc0-b13bf183b34e-kuttl-service-cm-1-0\") pod \"9b20dd6f-c3af-49f2-bcc0-b13bf183b34e\" (UID: \"9b20dd6f-c3af-49f2-bcc0-b13bf183b34e\") " Jan 20 18:11:17 crc kubenswrapper[4558]: I0120 18:11:17.802097 4558 reconciler_common.go:293] "Volume detached for volume \"kuttl-service-cm-0-1\" (UniqueName: \"kubernetes.io/configmap/9b20dd6f-c3af-49f2-bcc0-b13bf183b34e-kuttl-service-cm-0-1\") on node \"crc\" DevicePath \"\"" Jan 20 18:11:17 crc kubenswrapper[4558]: I0120 18:11:17.802113 4558 reconciler_common.go:293] "Volume detached for volume \"kuttl-service-cm-1-2\" (UniqueName: \"kubernetes.io/configmap/9b20dd6f-c3af-49f2-bcc0-b13bf183b34e-kuttl-service-cm-1-2\") on node \"crc\" DevicePath \"\"" Jan 20 18:11:17 crc kubenswrapper[4558]: I0120 18:11:17.805832 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9b20dd6f-c3af-49f2-bcc0-b13bf183b34e-kube-api-access-jxz4l" (OuterVolumeSpecName: "kube-api-access-jxz4l") pod "9b20dd6f-c3af-49f2-bcc0-b13bf183b34e" (UID: "9b20dd6f-c3af-49f2-bcc0-b13bf183b34e"). InnerVolumeSpecName "kube-api-access-jxz4l". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:11:17 crc kubenswrapper[4558]: I0120 18:11:17.807379 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9b20dd6f-c3af-49f2-bcc0-b13bf183b34e-kuttl-service-combined-ca-bundle" (OuterVolumeSpecName: "kuttl-service-combined-ca-bundle") pod "9b20dd6f-c3af-49f2-bcc0-b13bf183b34e" (UID: "9b20dd6f-c3af-49f2-bcc0-b13bf183b34e"). InnerVolumeSpecName "kuttl-service-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:11:17 crc kubenswrapper[4558]: I0120 18:11:17.819535 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9b20dd6f-c3af-49f2-bcc0-b13bf183b34e-kuttl-service-cm-2-0" (OuterVolumeSpecName: "kuttl-service-cm-2-0") pod "9b20dd6f-c3af-49f2-bcc0-b13bf183b34e" (UID: "9b20dd6f-c3af-49f2-bcc0-b13bf183b34e"). InnerVolumeSpecName "kuttl-service-cm-2-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:11:17 crc kubenswrapper[4558]: I0120 18:11:17.819714 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9b20dd6f-c3af-49f2-bcc0-b13bf183b34e-kuttl-service-cm-1-0" (OuterVolumeSpecName: "kuttl-service-cm-1-0") pod "9b20dd6f-c3af-49f2-bcc0-b13bf183b34e" (UID: "9b20dd6f-c3af-49f2-bcc0-b13bf183b34e"). InnerVolumeSpecName "kuttl-service-cm-1-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:11:17 crc kubenswrapper[4558]: I0120 18:11:17.821786 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9b20dd6f-c3af-49f2-bcc0-b13bf183b34e-kuttl-service-cm-0-2" (OuterVolumeSpecName: "kuttl-service-cm-0-2") pod "9b20dd6f-c3af-49f2-bcc0-b13bf183b34e" (UID: "9b20dd6f-c3af-49f2-bcc0-b13bf183b34e"). InnerVolumeSpecName "kuttl-service-cm-0-2". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:11:17 crc kubenswrapper[4558]: I0120 18:11:17.822317 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9b20dd6f-c3af-49f2-bcc0-b13bf183b34e-kuttl-service-cm-0-0" (OuterVolumeSpecName: "kuttl-service-cm-0-0") pod "9b20dd6f-c3af-49f2-bcc0-b13bf183b34e" (UID: "9b20dd6f-c3af-49f2-bcc0-b13bf183b34e"). InnerVolumeSpecName "kuttl-service-cm-0-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:11:17 crc kubenswrapper[4558]: I0120 18:11:17.825242 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9b20dd6f-c3af-49f2-bcc0-b13bf183b34e-ssh-key-edpm-compute-no-nodes" (OuterVolumeSpecName: "ssh-key-edpm-compute-no-nodes") pod "9b20dd6f-c3af-49f2-bcc0-b13bf183b34e" (UID: "9b20dd6f-c3af-49f2-bcc0-b13bf183b34e"). InnerVolumeSpecName "ssh-key-edpm-compute-no-nodes". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:11:17 crc kubenswrapper[4558]: I0120 18:11:17.829981 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9b20dd6f-c3af-49f2-bcc0-b13bf183b34e-inventory" (OuterVolumeSpecName: "inventory") pod "9b20dd6f-c3af-49f2-bcc0-b13bf183b34e" (UID: "9b20dd6f-c3af-49f2-bcc0-b13bf183b34e"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:11:17 crc kubenswrapper[4558]: I0120 18:11:17.831579 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9b20dd6f-c3af-49f2-bcc0-b13bf183b34e-kuttl-service-cm-1-1" (OuterVolumeSpecName: "kuttl-service-cm-1-1") pod "9b20dd6f-c3af-49f2-bcc0-b13bf183b34e" (UID: "9b20dd6f-c3af-49f2-bcc0-b13bf183b34e"). InnerVolumeSpecName "kuttl-service-cm-1-1". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:11:17 crc kubenswrapper[4558]: I0120 18:11:17.903858 4558 reconciler_common.go:293] "Volume detached for volume \"kuttl-service-cm-1-0\" (UniqueName: \"kubernetes.io/configmap/9b20dd6f-c3af-49f2-bcc0-b13bf183b34e-kuttl-service-cm-1-0\") on node \"crc\" DevicePath \"\"" Jan 20 18:11:17 crc kubenswrapper[4558]: I0120 18:11:17.903890 4558 reconciler_common.go:293] "Volume detached for volume \"kuttl-service-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9b20dd6f-c3af-49f2-bcc0-b13bf183b34e-kuttl-service-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 18:11:17 crc kubenswrapper[4558]: I0120 18:11:17.903910 4558 reconciler_common.go:293] "Volume detached for volume \"kuttl-service-cm-0-0\" (UniqueName: \"kubernetes.io/configmap/9b20dd6f-c3af-49f2-bcc0-b13bf183b34e-kuttl-service-cm-0-0\") on node \"crc\" DevicePath \"\"" Jan 20 18:11:17 crc kubenswrapper[4558]: I0120 18:11:17.903926 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jxz4l\" (UniqueName: \"kubernetes.io/projected/9b20dd6f-c3af-49f2-bcc0-b13bf183b34e-kube-api-access-jxz4l\") on node \"crc\" DevicePath \"\"" Jan 20 18:11:17 crc kubenswrapper[4558]: I0120 18:11:17.903937 4558 reconciler_common.go:293] "Volume detached for volume \"kuttl-service-cm-1-1\" (UniqueName: \"kubernetes.io/configmap/9b20dd6f-c3af-49f2-bcc0-b13bf183b34e-kuttl-service-cm-1-1\") on node \"crc\" DevicePath \"\"" Jan 20 18:11:17 crc kubenswrapper[4558]: I0120 18:11:17.903947 4558 reconciler_common.go:293] "Volume detached for volume \"kuttl-service-cm-2-0\" (UniqueName: \"kubernetes.io/configmap/9b20dd6f-c3af-49f2-bcc0-b13bf183b34e-kuttl-service-cm-2-0\") on node \"crc\" DevicePath \"\"" Jan 20 18:11:17 crc kubenswrapper[4558]: I0120 18:11:17.903957 4558 reconciler_common.go:293] "Volume detached for volume \"kuttl-service-cm-0-2\" (UniqueName: \"kubernetes.io/configmap/9b20dd6f-c3af-49f2-bcc0-b13bf183b34e-kuttl-service-cm-0-2\") on node \"crc\" DevicePath \"\"" Jan 20 18:11:17 crc kubenswrapper[4558]: I0120 18:11:17.903966 4558 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9b20dd6f-c3af-49f2-bcc0-b13bf183b34e-inventory\") on node \"crc\" DevicePath \"\"" Jan 20 18:11:17 crc kubenswrapper[4558]: I0120 18:11:17.903975 4558 reconciler_common.go:293] "Volume detached for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/9b20dd6f-c3af-49f2-bcc0-b13bf183b34e-ssh-key-edpm-compute-no-nodes\") on node \"crc\" DevicePath \"\"" Jan 20 18:11:18 crc kubenswrapper[4558]: I0120 18:11:18.529199 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/kuttl-service-edpm-compute-no-nodes-edpm-compute-no-nodes-jlc59" Jan 20 18:11:18 crc kubenswrapper[4558]: I0120 18:11:18.529181 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/kuttl-service-edpm-compute-no-nodes-edpm-compute-no-nodes-jlc59" event={"ID":"9b20dd6f-c3af-49f2-bcc0-b13bf183b34e","Type":"ContainerDied","Data":"b5a3af83db95a9f52059965e565a0e9640ebd2dd88e65af7fa5446fd956ae98e"} Jan 20 18:11:18 crc kubenswrapper[4558]: I0120 18:11:18.529298 4558 scope.go:117] "RemoveContainer" containerID="3c01c9129dba8e4eb6c418cd9370953b2dc3f6c1833be57cd391ffb18f2feaf5" Jan 20 18:11:18 crc kubenswrapper[4558]: I0120 18:11:18.558021 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/kuttl-service-edpm-compute-no-nodes-edpm-compute-no-nodes-jlc59"] Jan 20 18:11:18 crc kubenswrapper[4558]: I0120 18:11:18.564145 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/kuttl-service-edpm-compute-no-nodes-edpm-compute-no-nodes-jlc59"] Jan 20 18:11:18 crc kubenswrapper[4558]: I0120 18:11:18.574860 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9b20dd6f-c3af-49f2-bcc0-b13bf183b34e" path="/var/lib/kubelet/pods/9b20dd6f-c3af-49f2-bcc0-b13bf183b34e/volumes" Jan 20 18:11:18 crc kubenswrapper[4558]: I0120 18:11:18.785963 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-q7mjn" Jan 20 18:11:18 crc kubenswrapper[4558]: I0120 18:11:18.855622 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-7kvrx" Jan 20 18:11:18 crc kubenswrapper[4558]: I0120 18:11:18.900183 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-64864b6d57-cfjrb"] Jan 20 18:11:18 crc kubenswrapper[4558]: I0120 18:11:18.900436 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-64864b6d57-cfjrb" podUID="b8372fe9-62b1-418d-be4a-234c7416d7e1" containerName="dnsmasq-dns" containerID="cri-o://1dac112ee2b711300350d0ec2c657690990bce3b265688f23b6f9b2573bc2710" gracePeriod=10 Jan 20 18:11:18 crc kubenswrapper[4558]: I0120 18:11:18.920021 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kxdp4\" (UniqueName: \"kubernetes.io/projected/87a2a57d-1ee8-4dc9-8e14-e136a5f849b4-kube-api-access-kxdp4\") pod \"87a2a57d-1ee8-4dc9-8e14-e136a5f849b4\" (UID: \"87a2a57d-1ee8-4dc9-8e14-e136a5f849b4\") " Jan 20 18:11:18 crc kubenswrapper[4558]: I0120 18:11:18.920149 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/87a2a57d-1ee8-4dc9-8e14-e136a5f849b4-crc-storage\") pod \"87a2a57d-1ee8-4dc9-8e14-e136a5f849b4\" (UID: \"87a2a57d-1ee8-4dc9-8e14-e136a5f849b4\") " Jan 20 18:11:18 crc kubenswrapper[4558]: I0120 18:11:18.920213 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/87a2a57d-1ee8-4dc9-8e14-e136a5f849b4-node-mnt\") pod \"87a2a57d-1ee8-4dc9-8e14-e136a5f849b4\" (UID: \"87a2a57d-1ee8-4dc9-8e14-e136a5f849b4\") " Jan 20 18:11:18 crc kubenswrapper[4558]: I0120 18:11:18.920258 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/87a2a57d-1ee8-4dc9-8e14-e136a5f849b4-node-mnt" (OuterVolumeSpecName: "node-mnt") pod "87a2a57d-1ee8-4dc9-8e14-e136a5f849b4" (UID: "87a2a57d-1ee8-4dc9-8e14-e136a5f849b4"). InnerVolumeSpecName "node-mnt". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 18:11:18 crc kubenswrapper[4558]: I0120 18:11:18.920874 4558 reconciler_common.go:293] "Volume detached for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/87a2a57d-1ee8-4dc9-8e14-e136a5f849b4-node-mnt\") on node \"crc\" DevicePath \"\"" Jan 20 18:11:18 crc kubenswrapper[4558]: I0120 18:11:18.929368 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87a2a57d-1ee8-4dc9-8e14-e136a5f849b4-kube-api-access-kxdp4" (OuterVolumeSpecName: "kube-api-access-kxdp4") pod "87a2a57d-1ee8-4dc9-8e14-e136a5f849b4" (UID: "87a2a57d-1ee8-4dc9-8e14-e136a5f849b4"). InnerVolumeSpecName "kube-api-access-kxdp4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:11:18 crc kubenswrapper[4558]: I0120 18:11:18.940340 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87a2a57d-1ee8-4dc9-8e14-e136a5f849b4-crc-storage" (OuterVolumeSpecName: "crc-storage") pod "87a2a57d-1ee8-4dc9-8e14-e136a5f849b4" (UID: "87a2a57d-1ee8-4dc9-8e14-e136a5f849b4"). InnerVolumeSpecName "crc-storage". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:11:19 crc kubenswrapper[4558]: I0120 18:11:19.022849 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kxdp4\" (UniqueName: \"kubernetes.io/projected/87a2a57d-1ee8-4dc9-8e14-e136a5f849b4-kube-api-access-kxdp4\") on node \"crc\" DevicePath \"\"" Jan 20 18:11:19 crc kubenswrapper[4558]: I0120 18:11:19.022916 4558 reconciler_common.go:293] "Volume detached for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/87a2a57d-1ee8-4dc9-8e14-e136a5f849b4-crc-storage\") on node \"crc\" DevicePath \"\"" Jan 20 18:11:19 crc kubenswrapper[4558]: I0120 18:11:19.213425 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-64864b6d57-cfjrb" Jan 20 18:11:19 crc kubenswrapper[4558]: I0120 18:11:19.224612 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b8372fe9-62b1-418d-be4a-234c7416d7e1-config\") pod \"b8372fe9-62b1-418d-be4a-234c7416d7e1\" (UID: \"b8372fe9-62b1-418d-be4a-234c7416d7e1\") " Jan 20 18:11:19 crc kubenswrapper[4558]: I0120 18:11:19.224735 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/b8372fe9-62b1-418d-be4a-234c7416d7e1-dnsmasq-svc\") pod \"b8372fe9-62b1-418d-be4a-234c7416d7e1\" (UID: \"b8372fe9-62b1-418d-be4a-234c7416d7e1\") " Jan 20 18:11:19 crc kubenswrapper[4558]: I0120 18:11:19.224787 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/configmap/b8372fe9-62b1-418d-be4a-234c7416d7e1-edpm-compute-no-nodes\") pod \"b8372fe9-62b1-418d-be4a-234c7416d7e1\" (UID: \"b8372fe9-62b1-418d-be4a-234c7416d7e1\") " Jan 20 18:11:19 crc kubenswrapper[4558]: I0120 18:11:19.224969 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-74bvd\" (UniqueName: \"kubernetes.io/projected/b8372fe9-62b1-418d-be4a-234c7416d7e1-kube-api-access-74bvd\") pod \"b8372fe9-62b1-418d-be4a-234c7416d7e1\" (UID: \"b8372fe9-62b1-418d-be4a-234c7416d7e1\") " Jan 20 18:11:19 crc kubenswrapper[4558]: I0120 18:11:19.230741 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b8372fe9-62b1-418d-be4a-234c7416d7e1-kube-api-access-74bvd" (OuterVolumeSpecName: "kube-api-access-74bvd") pod "b8372fe9-62b1-418d-be4a-234c7416d7e1" (UID: "b8372fe9-62b1-418d-be4a-234c7416d7e1"). InnerVolumeSpecName "kube-api-access-74bvd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:11:19 crc kubenswrapper[4558]: I0120 18:11:19.261854 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b8372fe9-62b1-418d-be4a-234c7416d7e1-config" (OuterVolumeSpecName: "config") pod "b8372fe9-62b1-418d-be4a-234c7416d7e1" (UID: "b8372fe9-62b1-418d-be4a-234c7416d7e1"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:11:19 crc kubenswrapper[4558]: I0120 18:11:19.266082 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b8372fe9-62b1-418d-be4a-234c7416d7e1-edpm-compute-no-nodes" (OuterVolumeSpecName: "edpm-compute-no-nodes") pod "b8372fe9-62b1-418d-be4a-234c7416d7e1" (UID: "b8372fe9-62b1-418d-be4a-234c7416d7e1"). InnerVolumeSpecName "edpm-compute-no-nodes". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:11:19 crc kubenswrapper[4558]: I0120 18:11:19.270699 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b8372fe9-62b1-418d-be4a-234c7416d7e1-dnsmasq-svc" (OuterVolumeSpecName: "dnsmasq-svc") pod "b8372fe9-62b1-418d-be4a-234c7416d7e1" (UID: "b8372fe9-62b1-418d-be4a-234c7416d7e1"). InnerVolumeSpecName "dnsmasq-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:11:19 crc kubenswrapper[4558]: I0120 18:11:19.327698 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b8372fe9-62b1-418d-be4a-234c7416d7e1-config\") on node \"crc\" DevicePath \"\"" Jan 20 18:11:19 crc kubenswrapper[4558]: I0120 18:11:19.327735 4558 reconciler_common.go:293] "Volume detached for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/b8372fe9-62b1-418d-be4a-234c7416d7e1-dnsmasq-svc\") on node \"crc\" DevicePath \"\"" Jan 20 18:11:19 crc kubenswrapper[4558]: I0120 18:11:19.327752 4558 reconciler_common.go:293] "Volume detached for volume \"edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/configmap/b8372fe9-62b1-418d-be4a-234c7416d7e1-edpm-compute-no-nodes\") on node \"crc\" DevicePath \"\"" Jan 20 18:11:19 crc kubenswrapper[4558]: I0120 18:11:19.327782 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-74bvd\" (UniqueName: \"kubernetes.io/projected/b8372fe9-62b1-418d-be4a-234c7416d7e1-kube-api-access-74bvd\") on node \"crc\" DevicePath \"\"" Jan 20 18:11:19 crc kubenswrapper[4558]: I0120 18:11:19.555480 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-q7mjn" Jan 20 18:11:19 crc kubenswrapper[4558]: I0120 18:11:19.555500 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-q7mjn" event={"ID":"87a2a57d-1ee8-4dc9-8e14-e136a5f849b4","Type":"ContainerDied","Data":"ab706373fa7f7b9f5d1319932c3c5f8e65e5ba834bf7b5a2620b7b79add518c2"} Jan 20 18:11:19 crc kubenswrapper[4558]: I0120 18:11:19.555623 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ab706373fa7f7b9f5d1319932c3c5f8e65e5ba834bf7b5a2620b7b79add518c2" Jan 20 18:11:19 crc kubenswrapper[4558]: I0120 18:11:19.558590 4558 generic.go:334] "Generic (PLEG): container finished" podID="b8372fe9-62b1-418d-be4a-234c7416d7e1" containerID="1dac112ee2b711300350d0ec2c657690990bce3b265688f23b6f9b2573bc2710" exitCode=0 Jan 20 18:11:19 crc kubenswrapper[4558]: I0120 18:11:19.558652 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-64864b6d57-cfjrb" Jan 20 18:11:19 crc kubenswrapper[4558]: I0120 18:11:19.558649 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-64864b6d57-cfjrb" event={"ID":"b8372fe9-62b1-418d-be4a-234c7416d7e1","Type":"ContainerDied","Data":"1dac112ee2b711300350d0ec2c657690990bce3b265688f23b6f9b2573bc2710"} Jan 20 18:11:19 crc kubenswrapper[4558]: I0120 18:11:19.558772 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-64864b6d57-cfjrb" event={"ID":"b8372fe9-62b1-418d-be4a-234c7416d7e1","Type":"ContainerDied","Data":"dfb33f9cc859bdc355b7d199ded9066441ddfdfde41fc9de591c33b125154cf0"} Jan 20 18:11:19 crc kubenswrapper[4558]: I0120 18:11:19.558801 4558 scope.go:117] "RemoveContainer" containerID="1dac112ee2b711300350d0ec2c657690990bce3b265688f23b6f9b2573bc2710" Jan 20 18:11:19 crc kubenswrapper[4558]: I0120 18:11:19.584536 4558 scope.go:117] "RemoveContainer" containerID="bebbff466df8f1cdf15dc8767371ebae6f6d39e5c8bb00294a2906586d9583b0" Jan 20 18:11:19 crc kubenswrapper[4558]: I0120 18:11:19.597245 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-64864b6d57-cfjrb"] Jan 20 18:11:19 crc kubenswrapper[4558]: I0120 18:11:19.603368 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-64864b6d57-cfjrb"] Jan 20 18:11:19 crc kubenswrapper[4558]: I0120 18:11:19.614661 4558 scope.go:117] "RemoveContainer" containerID="1dac112ee2b711300350d0ec2c657690990bce3b265688f23b6f9b2573bc2710" Jan 20 18:11:19 crc kubenswrapper[4558]: E0120 18:11:19.615081 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1dac112ee2b711300350d0ec2c657690990bce3b265688f23b6f9b2573bc2710\": container with ID starting with 1dac112ee2b711300350d0ec2c657690990bce3b265688f23b6f9b2573bc2710 not found: ID does not exist" containerID="1dac112ee2b711300350d0ec2c657690990bce3b265688f23b6f9b2573bc2710" Jan 20 18:11:19 crc kubenswrapper[4558]: I0120 18:11:19.615137 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1dac112ee2b711300350d0ec2c657690990bce3b265688f23b6f9b2573bc2710"} err="failed to get container status \"1dac112ee2b711300350d0ec2c657690990bce3b265688f23b6f9b2573bc2710\": rpc error: code = NotFound desc = could not find container \"1dac112ee2b711300350d0ec2c657690990bce3b265688f23b6f9b2573bc2710\": container with ID starting with 1dac112ee2b711300350d0ec2c657690990bce3b265688f23b6f9b2573bc2710 not found: ID does not exist" Jan 20 18:11:19 crc kubenswrapper[4558]: I0120 18:11:19.615199 4558 scope.go:117] "RemoveContainer" containerID="bebbff466df8f1cdf15dc8767371ebae6f6d39e5c8bb00294a2906586d9583b0" Jan 20 18:11:19 crc kubenswrapper[4558]: E0120 18:11:19.615494 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bebbff466df8f1cdf15dc8767371ebae6f6d39e5c8bb00294a2906586d9583b0\": container with ID starting with bebbff466df8f1cdf15dc8767371ebae6f6d39e5c8bb00294a2906586d9583b0 not found: ID does not exist" containerID="bebbff466df8f1cdf15dc8767371ebae6f6d39e5c8bb00294a2906586d9583b0" Jan 20 18:11:19 crc kubenswrapper[4558]: I0120 18:11:19.615529 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bebbff466df8f1cdf15dc8767371ebae6f6d39e5c8bb00294a2906586d9583b0"} err="failed to get container status \"bebbff466df8f1cdf15dc8767371ebae6f6d39e5c8bb00294a2906586d9583b0\": rpc error: code = NotFound desc = could not find container \"bebbff466df8f1cdf15dc8767371ebae6f6d39e5c8bb00294a2906586d9583b0\": container with ID starting with bebbff466df8f1cdf15dc8767371ebae6f6d39e5c8bb00294a2906586d9583b0 not found: ID does not exist" Jan 20 18:11:20 crc kubenswrapper[4558]: I0120 18:11:20.572337 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b8372fe9-62b1-418d-be4a-234c7416d7e1" path="/var/lib/kubelet/pods/b8372fe9-62b1-418d-be4a-234c7416d7e1/volumes" Jan 20 18:11:21 crc kubenswrapper[4558]: I0120 18:11:21.596227 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["crc-storage/crc-storage-crc-q7mjn"] Jan 20 18:11:21 crc kubenswrapper[4558]: I0120 18:11:21.602530 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["crc-storage/crc-storage-crc-q7mjn"] Jan 20 18:11:21 crc kubenswrapper[4558]: I0120 18:11:21.702916 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["crc-storage/crc-storage-crc-8t8cr"] Jan 20 18:11:21 crc kubenswrapper[4558]: E0120 18:11:21.703282 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b8372fe9-62b1-418d-be4a-234c7416d7e1" containerName="init" Jan 20 18:11:21 crc kubenswrapper[4558]: I0120 18:11:21.703303 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="b8372fe9-62b1-418d-be4a-234c7416d7e1" containerName="init" Jan 20 18:11:21 crc kubenswrapper[4558]: E0120 18:11:21.703325 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="87a2a57d-1ee8-4dc9-8e14-e136a5f849b4" containerName="storage" Jan 20 18:11:21 crc kubenswrapper[4558]: I0120 18:11:21.703331 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="87a2a57d-1ee8-4dc9-8e14-e136a5f849b4" containerName="storage" Jan 20 18:11:21 crc kubenswrapper[4558]: E0120 18:11:21.703351 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b8372fe9-62b1-418d-be4a-234c7416d7e1" containerName="dnsmasq-dns" Jan 20 18:11:21 crc kubenswrapper[4558]: I0120 18:11:21.703358 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="b8372fe9-62b1-418d-be4a-234c7416d7e1" containerName="dnsmasq-dns" Jan 20 18:11:21 crc kubenswrapper[4558]: E0120 18:11:21.703378 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9b20dd6f-c3af-49f2-bcc0-b13bf183b34e" containerName="kuttl-service-edpm-compute-no-nodes-edpm-compute-no-nodes" Jan 20 18:11:21 crc kubenswrapper[4558]: I0120 18:11:21.703386 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="9b20dd6f-c3af-49f2-bcc0-b13bf183b34e" containerName="kuttl-service-edpm-compute-no-nodes-edpm-compute-no-nodes" Jan 20 18:11:21 crc kubenswrapper[4558]: I0120 18:11:21.703567 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="87a2a57d-1ee8-4dc9-8e14-e136a5f849b4" containerName="storage" Jan 20 18:11:21 crc kubenswrapper[4558]: I0120 18:11:21.703586 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="b8372fe9-62b1-418d-be4a-234c7416d7e1" containerName="dnsmasq-dns" Jan 20 18:11:21 crc kubenswrapper[4558]: I0120 18:11:21.703598 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="9b20dd6f-c3af-49f2-bcc0-b13bf183b34e" containerName="kuttl-service-edpm-compute-no-nodes-edpm-compute-no-nodes" Jan 20 18:11:21 crc kubenswrapper[4558]: I0120 18:11:21.704105 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-8t8cr" Jan 20 18:11:21 crc kubenswrapper[4558]: I0120 18:11:21.705646 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"kube-root-ca.crt" Jan 20 18:11:21 crc kubenswrapper[4558]: I0120 18:11:21.706105 4558 reflector.go:368] Caches populated for *v1.Secret from object-"crc-storage"/"crc-storage-dockercfg-k9ht8" Jan 20 18:11:21 crc kubenswrapper[4558]: I0120 18:11:21.706373 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"openshift-service-ca.crt" Jan 20 18:11:21 crc kubenswrapper[4558]: I0120 18:11:21.707135 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"crc-storage" Jan 20 18:11:21 crc kubenswrapper[4558]: I0120 18:11:21.711347 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-8t8cr"] Jan 20 18:11:21 crc kubenswrapper[4558]: I0120 18:11:21.857567 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/223e5b07-c7fa-4dcf-ab10-62ca9ed78541-crc-storage\") pod \"crc-storage-crc-8t8cr\" (UID: \"223e5b07-c7fa-4dcf-ab10-62ca9ed78541\") " pod="crc-storage/crc-storage-crc-8t8cr" Jan 20 18:11:21 crc kubenswrapper[4558]: I0120 18:11:21.857761 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p587v\" (UniqueName: \"kubernetes.io/projected/223e5b07-c7fa-4dcf-ab10-62ca9ed78541-kube-api-access-p587v\") pod \"crc-storage-crc-8t8cr\" (UID: \"223e5b07-c7fa-4dcf-ab10-62ca9ed78541\") " pod="crc-storage/crc-storage-crc-8t8cr" Jan 20 18:11:21 crc kubenswrapper[4558]: I0120 18:11:21.857915 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/223e5b07-c7fa-4dcf-ab10-62ca9ed78541-node-mnt\") pod \"crc-storage-crc-8t8cr\" (UID: \"223e5b07-c7fa-4dcf-ab10-62ca9ed78541\") " pod="crc-storage/crc-storage-crc-8t8cr" Jan 20 18:11:21 crc kubenswrapper[4558]: I0120 18:11:21.958969 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/223e5b07-c7fa-4dcf-ab10-62ca9ed78541-crc-storage\") pod \"crc-storage-crc-8t8cr\" (UID: \"223e5b07-c7fa-4dcf-ab10-62ca9ed78541\") " pod="crc-storage/crc-storage-crc-8t8cr" Jan 20 18:11:21 crc kubenswrapper[4558]: I0120 18:11:21.959100 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p587v\" (UniqueName: \"kubernetes.io/projected/223e5b07-c7fa-4dcf-ab10-62ca9ed78541-kube-api-access-p587v\") pod \"crc-storage-crc-8t8cr\" (UID: \"223e5b07-c7fa-4dcf-ab10-62ca9ed78541\") " pod="crc-storage/crc-storage-crc-8t8cr" Jan 20 18:11:21 crc kubenswrapper[4558]: I0120 18:11:21.959513 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/223e5b07-c7fa-4dcf-ab10-62ca9ed78541-node-mnt\") pod \"crc-storage-crc-8t8cr\" (UID: \"223e5b07-c7fa-4dcf-ab10-62ca9ed78541\") " pod="crc-storage/crc-storage-crc-8t8cr" Jan 20 18:11:21 crc kubenswrapper[4558]: I0120 18:11:21.959784 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/223e5b07-c7fa-4dcf-ab10-62ca9ed78541-crc-storage\") pod \"crc-storage-crc-8t8cr\" (UID: \"223e5b07-c7fa-4dcf-ab10-62ca9ed78541\") " pod="crc-storage/crc-storage-crc-8t8cr" Jan 20 18:11:21 crc kubenswrapper[4558]: I0120 18:11:21.959804 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/223e5b07-c7fa-4dcf-ab10-62ca9ed78541-node-mnt\") pod \"crc-storage-crc-8t8cr\" (UID: \"223e5b07-c7fa-4dcf-ab10-62ca9ed78541\") " pod="crc-storage/crc-storage-crc-8t8cr" Jan 20 18:11:21 crc kubenswrapper[4558]: I0120 18:11:21.978116 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p587v\" (UniqueName: \"kubernetes.io/projected/223e5b07-c7fa-4dcf-ab10-62ca9ed78541-kube-api-access-p587v\") pod \"crc-storage-crc-8t8cr\" (UID: \"223e5b07-c7fa-4dcf-ab10-62ca9ed78541\") " pod="crc-storage/crc-storage-crc-8t8cr" Jan 20 18:11:22 crc kubenswrapper[4558]: I0120 18:11:22.021869 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-8t8cr" Jan 20 18:11:22 crc kubenswrapper[4558]: I0120 18:11:22.440379 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-8t8cr"] Jan 20 18:11:22 crc kubenswrapper[4558]: W0120 18:11:22.443200 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod223e5b07_c7fa_4dcf_ab10_62ca9ed78541.slice/crio-14c4f6b5e7583f0d18d9d6df9f5995a45e84c01cfd89e3e07e85c9e0447bb57d WatchSource:0}: Error finding container 14c4f6b5e7583f0d18d9d6df9f5995a45e84c01cfd89e3e07e85c9e0447bb57d: Status 404 returned error can't find the container with id 14c4f6b5e7583f0d18d9d6df9f5995a45e84c01cfd89e3e07e85c9e0447bb57d Jan 20 18:11:22 crc kubenswrapper[4558]: I0120 18:11:22.575732 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87a2a57d-1ee8-4dc9-8e14-e136a5f849b4" path="/var/lib/kubelet/pods/87a2a57d-1ee8-4dc9-8e14-e136a5f849b4/volumes" Jan 20 18:11:22 crc kubenswrapper[4558]: I0120 18:11:22.582301 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-8t8cr" event={"ID":"223e5b07-c7fa-4dcf-ab10-62ca9ed78541","Type":"ContainerStarted","Data":"14c4f6b5e7583f0d18d9d6df9f5995a45e84c01cfd89e3e07e85c9e0447bb57d"} Jan 20 18:11:23 crc kubenswrapper[4558]: I0120 18:11:23.596072 4558 generic.go:334] "Generic (PLEG): container finished" podID="223e5b07-c7fa-4dcf-ab10-62ca9ed78541" containerID="b9e698a81357dab17b52a01c935e1efa7fc99133ba3cb959933757a8c6e559df" exitCode=0 Jan 20 18:11:23 crc kubenswrapper[4558]: I0120 18:11:23.596220 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-8t8cr" event={"ID":"223e5b07-c7fa-4dcf-ab10-62ca9ed78541","Type":"ContainerDied","Data":"b9e698a81357dab17b52a01c935e1efa7fc99133ba3cb959933757a8c6e559df"} Jan 20 18:11:24 crc kubenswrapper[4558]: I0120 18:11:24.837949 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-8t8cr" Jan 20 18:11:25 crc kubenswrapper[4558]: I0120 18:11:25.002453 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p587v\" (UniqueName: \"kubernetes.io/projected/223e5b07-c7fa-4dcf-ab10-62ca9ed78541-kube-api-access-p587v\") pod \"223e5b07-c7fa-4dcf-ab10-62ca9ed78541\" (UID: \"223e5b07-c7fa-4dcf-ab10-62ca9ed78541\") " Jan 20 18:11:25 crc kubenswrapper[4558]: I0120 18:11:25.002775 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/223e5b07-c7fa-4dcf-ab10-62ca9ed78541-crc-storage\") pod \"223e5b07-c7fa-4dcf-ab10-62ca9ed78541\" (UID: \"223e5b07-c7fa-4dcf-ab10-62ca9ed78541\") " Jan 20 18:11:25 crc kubenswrapper[4558]: I0120 18:11:25.002844 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/223e5b07-c7fa-4dcf-ab10-62ca9ed78541-node-mnt\") pod \"223e5b07-c7fa-4dcf-ab10-62ca9ed78541\" (UID: \"223e5b07-c7fa-4dcf-ab10-62ca9ed78541\") " Jan 20 18:11:25 crc kubenswrapper[4558]: I0120 18:11:25.003300 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/223e5b07-c7fa-4dcf-ab10-62ca9ed78541-node-mnt" (OuterVolumeSpecName: "node-mnt") pod "223e5b07-c7fa-4dcf-ab10-62ca9ed78541" (UID: "223e5b07-c7fa-4dcf-ab10-62ca9ed78541"). InnerVolumeSpecName "node-mnt". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 18:11:25 crc kubenswrapper[4558]: I0120 18:11:25.003687 4558 reconciler_common.go:293] "Volume detached for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/223e5b07-c7fa-4dcf-ab10-62ca9ed78541-node-mnt\") on node \"crc\" DevicePath \"\"" Jan 20 18:11:25 crc kubenswrapper[4558]: I0120 18:11:25.008544 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/223e5b07-c7fa-4dcf-ab10-62ca9ed78541-kube-api-access-p587v" (OuterVolumeSpecName: "kube-api-access-p587v") pod "223e5b07-c7fa-4dcf-ab10-62ca9ed78541" (UID: "223e5b07-c7fa-4dcf-ab10-62ca9ed78541"). InnerVolumeSpecName "kube-api-access-p587v". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:11:25 crc kubenswrapper[4558]: I0120 18:11:25.022565 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/223e5b07-c7fa-4dcf-ab10-62ca9ed78541-crc-storage" (OuterVolumeSpecName: "crc-storage") pod "223e5b07-c7fa-4dcf-ab10-62ca9ed78541" (UID: "223e5b07-c7fa-4dcf-ab10-62ca9ed78541"). InnerVolumeSpecName "crc-storage". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:11:25 crc kubenswrapper[4558]: I0120 18:11:25.104975 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p587v\" (UniqueName: \"kubernetes.io/projected/223e5b07-c7fa-4dcf-ab10-62ca9ed78541-kube-api-access-p587v\") on node \"crc\" DevicePath \"\"" Jan 20 18:11:25 crc kubenswrapper[4558]: I0120 18:11:25.104999 4558 reconciler_common.go:293] "Volume detached for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/223e5b07-c7fa-4dcf-ab10-62ca9ed78541-crc-storage\") on node \"crc\" DevicePath \"\"" Jan 20 18:11:25 crc kubenswrapper[4558]: I0120 18:11:25.614999 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-8t8cr" event={"ID":"223e5b07-c7fa-4dcf-ab10-62ca9ed78541","Type":"ContainerDied","Data":"14c4f6b5e7583f0d18d9d6df9f5995a45e84c01cfd89e3e07e85c9e0447bb57d"} Jan 20 18:11:25 crc kubenswrapper[4558]: I0120 18:11:25.615386 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="14c4f6b5e7583f0d18d9d6df9f5995a45e84c01cfd89e3e07e85c9e0447bb57d" Jan 20 18:11:25 crc kubenswrapper[4558]: I0120 18:11:25.615053 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-8t8cr" Jan 20 18:11:27 crc kubenswrapper[4558]: I0120 18:11:27.330660 4558 patch_prober.go:28] interesting pod/machine-config-daemon-2vr4r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 20 18:11:27 crc kubenswrapper[4558]: I0120 18:11:27.330742 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 20 18:11:27 crc kubenswrapper[4558]: I0120 18:11:27.839049 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-6fdb84cf7c-qm5q8"] Jan 20 18:11:27 crc kubenswrapper[4558]: E0120 18:11:27.839471 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="223e5b07-c7fa-4dcf-ab10-62ca9ed78541" containerName="storage" Jan 20 18:11:27 crc kubenswrapper[4558]: I0120 18:11:27.839495 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="223e5b07-c7fa-4dcf-ab10-62ca9ed78541" containerName="storage" Jan 20 18:11:27 crc kubenswrapper[4558]: I0120 18:11:27.839709 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="223e5b07-c7fa-4dcf-ab10-62ca9ed78541" containerName="storage" Jan 20 18:11:27 crc kubenswrapper[4558]: I0120 18:11:27.840569 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-6fdb84cf7c-qm5q8" Jan 20 18:11:27 crc kubenswrapper[4558]: I0120 18:11:27.842603 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"edpm-no-nodes-custom-svc" Jan 20 18:11:27 crc kubenswrapper[4558]: I0120 18:11:27.854052 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-6fdb84cf7c-qm5q8"] Jan 20 18:11:27 crc kubenswrapper[4558]: I0120 18:11:27.946730 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h9g74\" (UniqueName: \"kubernetes.io/projected/9e263ba7-d72f-4914-8be8-4ae100d5985a-kube-api-access-h9g74\") pod \"dnsmasq-dnsmasq-6fdb84cf7c-qm5q8\" (UID: \"9e263ba7-d72f-4914-8be8-4ae100d5985a\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-6fdb84cf7c-qm5q8" Jan 20 18:11:27 crc kubenswrapper[4558]: I0120 18:11:27.946870 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/9e263ba7-d72f-4914-8be8-4ae100d5985a-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-6fdb84cf7c-qm5q8\" (UID: \"9e263ba7-d72f-4914-8be8-4ae100d5985a\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-6fdb84cf7c-qm5q8" Jan 20 18:11:27 crc kubenswrapper[4558]: I0120 18:11:27.946925 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"edpm-no-nodes-custom-svc\" (UniqueName: \"kubernetes.io/configmap/9e263ba7-d72f-4914-8be8-4ae100d5985a-edpm-no-nodes-custom-svc\") pod \"dnsmasq-dnsmasq-6fdb84cf7c-qm5q8\" (UID: \"9e263ba7-d72f-4914-8be8-4ae100d5985a\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-6fdb84cf7c-qm5q8" Jan 20 18:11:27 crc kubenswrapper[4558]: I0120 18:11:27.946983 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9e263ba7-d72f-4914-8be8-4ae100d5985a-config\") pod \"dnsmasq-dnsmasq-6fdb84cf7c-qm5q8\" (UID: \"9e263ba7-d72f-4914-8be8-4ae100d5985a\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-6fdb84cf7c-qm5q8" Jan 20 18:11:28 crc kubenswrapper[4558]: I0120 18:11:28.048211 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h9g74\" (UniqueName: \"kubernetes.io/projected/9e263ba7-d72f-4914-8be8-4ae100d5985a-kube-api-access-h9g74\") pod \"dnsmasq-dnsmasq-6fdb84cf7c-qm5q8\" (UID: \"9e263ba7-d72f-4914-8be8-4ae100d5985a\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-6fdb84cf7c-qm5q8" Jan 20 18:11:28 crc kubenswrapper[4558]: I0120 18:11:28.048271 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/9e263ba7-d72f-4914-8be8-4ae100d5985a-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-6fdb84cf7c-qm5q8\" (UID: \"9e263ba7-d72f-4914-8be8-4ae100d5985a\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-6fdb84cf7c-qm5q8" Jan 20 18:11:28 crc kubenswrapper[4558]: I0120 18:11:28.048297 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"edpm-no-nodes-custom-svc\" (UniqueName: \"kubernetes.io/configmap/9e263ba7-d72f-4914-8be8-4ae100d5985a-edpm-no-nodes-custom-svc\") pod \"dnsmasq-dnsmasq-6fdb84cf7c-qm5q8\" (UID: \"9e263ba7-d72f-4914-8be8-4ae100d5985a\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-6fdb84cf7c-qm5q8" Jan 20 18:11:28 crc kubenswrapper[4558]: I0120 18:11:28.048326 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9e263ba7-d72f-4914-8be8-4ae100d5985a-config\") pod \"dnsmasq-dnsmasq-6fdb84cf7c-qm5q8\" (UID: \"9e263ba7-d72f-4914-8be8-4ae100d5985a\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-6fdb84cf7c-qm5q8" Jan 20 18:11:28 crc kubenswrapper[4558]: I0120 18:11:28.049251 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9e263ba7-d72f-4914-8be8-4ae100d5985a-config\") pod \"dnsmasq-dnsmasq-6fdb84cf7c-qm5q8\" (UID: \"9e263ba7-d72f-4914-8be8-4ae100d5985a\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-6fdb84cf7c-qm5q8" Jan 20 18:11:28 crc kubenswrapper[4558]: I0120 18:11:28.049459 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/9e263ba7-d72f-4914-8be8-4ae100d5985a-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-6fdb84cf7c-qm5q8\" (UID: \"9e263ba7-d72f-4914-8be8-4ae100d5985a\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-6fdb84cf7c-qm5q8" Jan 20 18:11:28 crc kubenswrapper[4558]: I0120 18:11:28.049584 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"edpm-no-nodes-custom-svc\" (UniqueName: \"kubernetes.io/configmap/9e263ba7-d72f-4914-8be8-4ae100d5985a-edpm-no-nodes-custom-svc\") pod \"dnsmasq-dnsmasq-6fdb84cf7c-qm5q8\" (UID: \"9e263ba7-d72f-4914-8be8-4ae100d5985a\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-6fdb84cf7c-qm5q8" Jan 20 18:11:28 crc kubenswrapper[4558]: I0120 18:11:28.065963 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h9g74\" (UniqueName: \"kubernetes.io/projected/9e263ba7-d72f-4914-8be8-4ae100d5985a-kube-api-access-h9g74\") pod \"dnsmasq-dnsmasq-6fdb84cf7c-qm5q8\" (UID: \"9e263ba7-d72f-4914-8be8-4ae100d5985a\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-6fdb84cf7c-qm5q8" Jan 20 18:11:28 crc kubenswrapper[4558]: I0120 18:11:28.154686 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-6fdb84cf7c-qm5q8" Jan 20 18:11:28 crc kubenswrapper[4558]: I0120 18:11:28.551559 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-6fdb84cf7c-qm5q8"] Jan 20 18:11:28 crc kubenswrapper[4558]: W0120 18:11:28.553583 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9e263ba7_d72f_4914_8be8_4ae100d5985a.slice/crio-e6fdad7da1a994715d2a6fa30fcb59edb7f9f870c7ea58c56667940db1f162d7 WatchSource:0}: Error finding container e6fdad7da1a994715d2a6fa30fcb59edb7f9f870c7ea58c56667940db1f162d7: Status 404 returned error can't find the container with id e6fdad7da1a994715d2a6fa30fcb59edb7f9f870c7ea58c56667940db1f162d7 Jan 20 18:11:28 crc kubenswrapper[4558]: I0120 18:11:28.641673 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-6fdb84cf7c-qm5q8" event={"ID":"9e263ba7-d72f-4914-8be8-4ae100d5985a","Type":"ContainerStarted","Data":"e6fdad7da1a994715d2a6fa30fcb59edb7f9f870c7ea58c56667940db1f162d7"} Jan 20 18:11:29 crc kubenswrapper[4558]: I0120 18:11:29.654067 4558 generic.go:334] "Generic (PLEG): container finished" podID="9e263ba7-d72f-4914-8be8-4ae100d5985a" containerID="5b39759fd56ee416c42e990df16d89302f4e39a2c6db7be3c7ce7b71f25c6336" exitCode=0 Jan 20 18:11:29 crc kubenswrapper[4558]: I0120 18:11:29.654111 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-6fdb84cf7c-qm5q8" event={"ID":"9e263ba7-d72f-4914-8be8-4ae100d5985a","Type":"ContainerDied","Data":"5b39759fd56ee416c42e990df16d89302f4e39a2c6db7be3c7ce7b71f25c6336"} Jan 20 18:11:30 crc kubenswrapper[4558]: I0120 18:11:30.663793 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-6fdb84cf7c-qm5q8" event={"ID":"9e263ba7-d72f-4914-8be8-4ae100d5985a","Type":"ContainerStarted","Data":"77f44480c225d662c0e88c45687296c932630dd932c418005031192e81b85448"} Jan 20 18:11:30 crc kubenswrapper[4558]: I0120 18:11:30.664558 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-6fdb84cf7c-qm5q8" Jan 20 18:11:30 crc kubenswrapper[4558]: I0120 18:11:30.682766 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-6fdb84cf7c-qm5q8" podStartSLOduration=3.682747074 podStartE2EDuration="3.682747074s" podCreationTimestamp="2026-01-20 18:11:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 18:11:30.678534974 +0000 UTC m=+5384.438872942" watchObservedRunningTime="2026-01-20 18:11:30.682747074 +0000 UTC m=+5384.443085041" Jan 20 18:11:33 crc kubenswrapper[4558]: I0120 18:11:33.509100 4558 scope.go:117] "RemoveContainer" containerID="1821ba1d87f40c241193772646ccf4c847153889128ed710a76f71ee8bc72037" Jan 20 18:11:33 crc kubenswrapper[4558]: I0120 18:11:33.533127 4558 scope.go:117] "RemoveContainer" containerID="69b6158d5b7b70a3e9028f7e9027602fde3cab51c171d50a19460a849f1bc9df" Jan 20 18:11:33 crc kubenswrapper[4558]: I0120 18:11:33.557103 4558 scope.go:117] "RemoveContainer" containerID="264d5eb52b0f8b6b9eb56b3a0c431e49acbf761a45324cb457decc2d43d91e79" Jan 20 18:11:33 crc kubenswrapper[4558]: I0120 18:11:33.580944 4558 scope.go:117] "RemoveContainer" containerID="d04be593f095fca90b98ce9ff32afc8b0eba6d0b7dab93ce693dbf4f1dea98e7" Jan 20 18:11:33 crc kubenswrapper[4558]: I0120 18:11:33.600028 4558 scope.go:117] "RemoveContainer" containerID="895d0ce06ac44b93eb9c9c2c176abc2e5341b42281a2cbfffa4e072ddbe5ca99" Jan 20 18:11:33 crc kubenswrapper[4558]: I0120 18:11:33.620860 4558 scope.go:117] "RemoveContainer" containerID="b4696041322202470e8a3951ad333ac41c5429c650b672e321ea7213c3833688" Jan 20 18:11:33 crc kubenswrapper[4558]: I0120 18:11:33.642023 4558 scope.go:117] "RemoveContainer" containerID="3aefef1f6fb33a2f41fd9e6a604a0537ae82ad6b586f21de3356bbe2136617bd" Jan 20 18:11:33 crc kubenswrapper[4558]: I0120 18:11:33.660014 4558 scope.go:117] "RemoveContainer" containerID="bd147f3029d01aebcf7375d84d676da29d4589fa9d6a96becd9f8ed0e2cc18ee" Jan 20 18:11:33 crc kubenswrapper[4558]: I0120 18:11:33.683590 4558 scope.go:117] "RemoveContainer" containerID="f27df2605198d2649641178381f3331ced93d1b1a1951424947c4cd32d08d215" Jan 20 18:11:33 crc kubenswrapper[4558]: I0120 18:11:33.705793 4558 scope.go:117] "RemoveContainer" containerID="5a2c0130729c72b0a7cfc472b088a1862bb308a64d24b8cdda68f8113a394887" Jan 20 18:11:33 crc kubenswrapper[4558]: I0120 18:11:33.725262 4558 scope.go:117] "RemoveContainer" containerID="5b4e3afbb4029a0e08acb0dfd1e242d44b8ef4811e92f285fec0fa23cb526282" Jan 20 18:11:33 crc kubenswrapper[4558]: I0120 18:11:33.742772 4558 scope.go:117] "RemoveContainer" containerID="e1a51d79bc486e13aed05e565704ae5ce3ed66310a43a712dce635529f2dc42a" Jan 20 18:11:38 crc kubenswrapper[4558]: I0120 18:11:38.156455 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-6fdb84cf7c-qm5q8" Jan 20 18:11:38 crc kubenswrapper[4558]: I0120 18:11:38.199131 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-7kvrx"] Jan 20 18:11:38 crc kubenswrapper[4558]: I0120 18:11:38.199415 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-7kvrx" podUID="3737f1f8-bbaa-464b-825a-2d6080e8bc3c" containerName="dnsmasq-dns" containerID="cri-o://9b96d50f333d399bfe2cd143ea78aadb1c68e344d0f3c4b9e8933659e5f38e12" gracePeriod=10 Jan 20 18:11:38 crc kubenswrapper[4558]: I0120 18:11:38.546659 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-7kvrx" Jan 20 18:11:38 crc kubenswrapper[4558]: I0120 18:11:38.691398 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3737f1f8-bbaa-464b-825a-2d6080e8bc3c-config\") pod \"3737f1f8-bbaa-464b-825a-2d6080e8bc3c\" (UID: \"3737f1f8-bbaa-464b-825a-2d6080e8bc3c\") " Jan 20 18:11:38 crc kubenswrapper[4558]: I0120 18:11:38.691523 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/3737f1f8-bbaa-464b-825a-2d6080e8bc3c-dnsmasq-svc\") pod \"3737f1f8-bbaa-464b-825a-2d6080e8bc3c\" (UID: \"3737f1f8-bbaa-464b-825a-2d6080e8bc3c\") " Jan 20 18:11:38 crc kubenswrapper[4558]: I0120 18:11:38.691568 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-stmjg\" (UniqueName: \"kubernetes.io/projected/3737f1f8-bbaa-464b-825a-2d6080e8bc3c-kube-api-access-stmjg\") pod \"3737f1f8-bbaa-464b-825a-2d6080e8bc3c\" (UID: \"3737f1f8-bbaa-464b-825a-2d6080e8bc3c\") " Jan 20 18:11:38 crc kubenswrapper[4558]: I0120 18:11:38.696717 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3737f1f8-bbaa-464b-825a-2d6080e8bc3c-kube-api-access-stmjg" (OuterVolumeSpecName: "kube-api-access-stmjg") pod "3737f1f8-bbaa-464b-825a-2d6080e8bc3c" (UID: "3737f1f8-bbaa-464b-825a-2d6080e8bc3c"). InnerVolumeSpecName "kube-api-access-stmjg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:11:38 crc kubenswrapper[4558]: I0120 18:11:38.720931 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3737f1f8-bbaa-464b-825a-2d6080e8bc3c-dnsmasq-svc" (OuterVolumeSpecName: "dnsmasq-svc") pod "3737f1f8-bbaa-464b-825a-2d6080e8bc3c" (UID: "3737f1f8-bbaa-464b-825a-2d6080e8bc3c"). InnerVolumeSpecName "dnsmasq-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:11:38 crc kubenswrapper[4558]: I0120 18:11:38.723126 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3737f1f8-bbaa-464b-825a-2d6080e8bc3c-config" (OuterVolumeSpecName: "config") pod "3737f1f8-bbaa-464b-825a-2d6080e8bc3c" (UID: "3737f1f8-bbaa-464b-825a-2d6080e8bc3c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:11:38 crc kubenswrapper[4558]: I0120 18:11:38.741587 4558 generic.go:334] "Generic (PLEG): container finished" podID="3737f1f8-bbaa-464b-825a-2d6080e8bc3c" containerID="9b96d50f333d399bfe2cd143ea78aadb1c68e344d0f3c4b9e8933659e5f38e12" exitCode=0 Jan 20 18:11:38 crc kubenswrapper[4558]: I0120 18:11:38.741647 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-7kvrx" event={"ID":"3737f1f8-bbaa-464b-825a-2d6080e8bc3c","Type":"ContainerDied","Data":"9b96d50f333d399bfe2cd143ea78aadb1c68e344d0f3c4b9e8933659e5f38e12"} Jan 20 18:11:38 crc kubenswrapper[4558]: I0120 18:11:38.741706 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-7kvrx" event={"ID":"3737f1f8-bbaa-464b-825a-2d6080e8bc3c","Type":"ContainerDied","Data":"d5238d925061d4943fa81def51dee7f0111626d012d966358afbdde112bdc855"} Jan 20 18:11:38 crc kubenswrapper[4558]: I0120 18:11:38.741721 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-7kvrx" Jan 20 18:11:38 crc kubenswrapper[4558]: I0120 18:11:38.741734 4558 scope.go:117] "RemoveContainer" containerID="9b96d50f333d399bfe2cd143ea78aadb1c68e344d0f3c4b9e8933659e5f38e12" Jan 20 18:11:38 crc kubenswrapper[4558]: I0120 18:11:38.765077 4558 scope.go:117] "RemoveContainer" containerID="1c9be81a1d6c6c1e1d522a23cd23d731473da8b07de763e31630d3e338acb1bf" Jan 20 18:11:38 crc kubenswrapper[4558]: I0120 18:11:38.770446 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-7kvrx"] Jan 20 18:11:38 crc kubenswrapper[4558]: I0120 18:11:38.775015 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-7kvrx"] Jan 20 18:11:38 crc kubenswrapper[4558]: I0120 18:11:38.794497 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3737f1f8-bbaa-464b-825a-2d6080e8bc3c-config\") on node \"crc\" DevicePath \"\"" Jan 20 18:11:38 crc kubenswrapper[4558]: I0120 18:11:38.794532 4558 reconciler_common.go:293] "Volume detached for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/3737f1f8-bbaa-464b-825a-2d6080e8bc3c-dnsmasq-svc\") on node \"crc\" DevicePath \"\"" Jan 20 18:11:38 crc kubenswrapper[4558]: I0120 18:11:38.794554 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-stmjg\" (UniqueName: \"kubernetes.io/projected/3737f1f8-bbaa-464b-825a-2d6080e8bc3c-kube-api-access-stmjg\") on node \"crc\" DevicePath \"\"" Jan 20 18:11:38 crc kubenswrapper[4558]: I0120 18:11:38.795350 4558 scope.go:117] "RemoveContainer" containerID="9b96d50f333d399bfe2cd143ea78aadb1c68e344d0f3c4b9e8933659e5f38e12" Jan 20 18:11:38 crc kubenswrapper[4558]: E0120 18:11:38.795831 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9b96d50f333d399bfe2cd143ea78aadb1c68e344d0f3c4b9e8933659e5f38e12\": container with ID starting with 9b96d50f333d399bfe2cd143ea78aadb1c68e344d0f3c4b9e8933659e5f38e12 not found: ID does not exist" containerID="9b96d50f333d399bfe2cd143ea78aadb1c68e344d0f3c4b9e8933659e5f38e12" Jan 20 18:11:38 crc kubenswrapper[4558]: I0120 18:11:38.795876 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9b96d50f333d399bfe2cd143ea78aadb1c68e344d0f3c4b9e8933659e5f38e12"} err="failed to get container status \"9b96d50f333d399bfe2cd143ea78aadb1c68e344d0f3c4b9e8933659e5f38e12\": rpc error: code = NotFound desc = could not find container \"9b96d50f333d399bfe2cd143ea78aadb1c68e344d0f3c4b9e8933659e5f38e12\": container with ID starting with 9b96d50f333d399bfe2cd143ea78aadb1c68e344d0f3c4b9e8933659e5f38e12 not found: ID does not exist" Jan 20 18:11:38 crc kubenswrapper[4558]: I0120 18:11:38.795916 4558 scope.go:117] "RemoveContainer" containerID="1c9be81a1d6c6c1e1d522a23cd23d731473da8b07de763e31630d3e338acb1bf" Jan 20 18:11:38 crc kubenswrapper[4558]: E0120 18:11:38.796392 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1c9be81a1d6c6c1e1d522a23cd23d731473da8b07de763e31630d3e338acb1bf\": container with ID starting with 1c9be81a1d6c6c1e1d522a23cd23d731473da8b07de763e31630d3e338acb1bf not found: ID does not exist" containerID="1c9be81a1d6c6c1e1d522a23cd23d731473da8b07de763e31630d3e338acb1bf" Jan 20 18:11:38 crc kubenswrapper[4558]: I0120 18:11:38.796487 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1c9be81a1d6c6c1e1d522a23cd23d731473da8b07de763e31630d3e338acb1bf"} err="failed to get container status \"1c9be81a1d6c6c1e1d522a23cd23d731473da8b07de763e31630d3e338acb1bf\": rpc error: code = NotFound desc = could not find container \"1c9be81a1d6c6c1e1d522a23cd23d731473da8b07de763e31630d3e338acb1bf\": container with ID starting with 1c9be81a1d6c6c1e1d522a23cd23d731473da8b07de763e31630d3e338acb1bf not found: ID does not exist" Jan 20 18:11:40 crc kubenswrapper[4558]: I0120 18:11:40.574386 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3737f1f8-bbaa-464b-825a-2d6080e8bc3c" path="/var/lib/kubelet/pods/3737f1f8-bbaa-464b-825a-2d6080e8bc3c/volumes" Jan 20 18:11:42 crc kubenswrapper[4558]: I0120 18:11:42.799036 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/custom-img-svc-edpm-compute-no-nodes-edpm-no-nodes-custom-npnmn"] Jan 20 18:11:42 crc kubenswrapper[4558]: E0120 18:11:42.799636 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3737f1f8-bbaa-464b-825a-2d6080e8bc3c" containerName="init" Jan 20 18:11:42 crc kubenswrapper[4558]: I0120 18:11:42.799648 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="3737f1f8-bbaa-464b-825a-2d6080e8bc3c" containerName="init" Jan 20 18:11:42 crc kubenswrapper[4558]: E0120 18:11:42.799656 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3737f1f8-bbaa-464b-825a-2d6080e8bc3c" containerName="dnsmasq-dns" Jan 20 18:11:42 crc kubenswrapper[4558]: I0120 18:11:42.799662 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="3737f1f8-bbaa-464b-825a-2d6080e8bc3c" containerName="dnsmasq-dns" Jan 20 18:11:42 crc kubenswrapper[4558]: I0120 18:11:42.799805 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="3737f1f8-bbaa-464b-825a-2d6080e8bc3c" containerName="dnsmasq-dns" Jan 20 18:11:42 crc kubenswrapper[4558]: I0120 18:11:42.800308 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/custom-img-svc-edpm-compute-no-nodes-edpm-no-nodes-custom-npnmn" Jan 20 18:11:42 crc kubenswrapper[4558]: I0120 18:11:42.802613 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"combined-ca-bundle" Jan 20 18:11:42 crc kubenswrapper[4558]: I0120 18:11:42.803833 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-aee-default-env" Jan 20 18:11:42 crc kubenswrapper[4558]: I0120 18:11:42.804060 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplane-ansible-ssh-private-key-secret" Jan 20 18:11:42 crc kubenswrapper[4558]: I0120 18:11:42.804086 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"edpm-no-nodes-custom-svc-dockercfg-5rjbh" Jan 20 18:11:42 crc kubenswrapper[4558]: I0120 18:11:42.804280 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplanenodeset-edpm-no-nodes-custom-svc" Jan 20 18:11:42 crc kubenswrapper[4558]: I0120 18:11:42.811185 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/custom-img-svc-edpm-compute-no-nodes-edpm-no-nodes-custom-npnmn"] Jan 20 18:11:42 crc kubenswrapper[4558]: I0120 18:11:42.901238 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/custom-img-svc-edpm-compute-no-nodes-edpm-no-nodes-custom-npnmn"] Jan 20 18:11:42 crc kubenswrapper[4558]: E0120 18:11:42.901851 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[custom-img-svc-combined-ca-bundle inventory kube-api-access-24sp7 ssh-key-edpm-no-nodes-custom-svc], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack-kuttl-tests/custom-img-svc-edpm-compute-no-nodes-edpm-no-nodes-custom-npnmn" podUID="c030d6fd-fcd4-4ad3-b4a5-f76d3836a3c9" Jan 20 18:11:42 crc kubenswrapper[4558]: I0120 18:11:42.948083 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-24sp7\" (UniqueName: \"kubernetes.io/projected/c030d6fd-fcd4-4ad3-b4a5-f76d3836a3c9-kube-api-access-24sp7\") pod \"custom-img-svc-edpm-compute-no-nodes-edpm-no-nodes-custom-npnmn\" (UID: \"c030d6fd-fcd4-4ad3-b4a5-f76d3836a3c9\") " pod="openstack-kuttl-tests/custom-img-svc-edpm-compute-no-nodes-edpm-no-nodes-custom-npnmn" Jan 20 18:11:42 crc kubenswrapper[4558]: I0120 18:11:42.948482 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-hztxx"] Jan 20 18:11:42 crc kubenswrapper[4558]: I0120 18:11:42.948489 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-edpm-no-nodes-custom-svc\" (UniqueName: \"kubernetes.io/secret/c030d6fd-fcd4-4ad3-b4a5-f76d3836a3c9-ssh-key-edpm-no-nodes-custom-svc\") pod \"custom-img-svc-edpm-compute-no-nodes-edpm-no-nodes-custom-npnmn\" (UID: \"c030d6fd-fcd4-4ad3-b4a5-f76d3836a3c9\") " pod="openstack-kuttl-tests/custom-img-svc-edpm-compute-no-nodes-edpm-no-nodes-custom-npnmn" Jan 20 18:11:42 crc kubenswrapper[4558]: I0120 18:11:42.948669 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c030d6fd-fcd4-4ad3-b4a5-f76d3836a3c9-inventory\") pod \"custom-img-svc-edpm-compute-no-nodes-edpm-no-nodes-custom-npnmn\" (UID: \"c030d6fd-fcd4-4ad3-b4a5-f76d3836a3c9\") " pod="openstack-kuttl-tests/custom-img-svc-edpm-compute-no-nodes-edpm-no-nodes-custom-npnmn" Jan 20 18:11:42 crc kubenswrapper[4558]: I0120 18:11:42.948723 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"custom-img-svc-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c030d6fd-fcd4-4ad3-b4a5-f76d3836a3c9-custom-img-svc-combined-ca-bundle\") pod \"custom-img-svc-edpm-compute-no-nodes-edpm-no-nodes-custom-npnmn\" (UID: \"c030d6fd-fcd4-4ad3-b4a5-f76d3836a3c9\") " pod="openstack-kuttl-tests/custom-img-svc-edpm-compute-no-nodes-edpm-no-nodes-custom-npnmn" Jan 20 18:11:42 crc kubenswrapper[4558]: I0120 18:11:42.949632 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-hztxx" Jan 20 18:11:42 crc kubenswrapper[4558]: I0120 18:11:42.958748 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-hztxx"] Jan 20 18:11:43 crc kubenswrapper[4558]: I0120 18:11:43.050312 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/831e8e0b-2fab-4b9b-a076-a47a7780e560-config\") pod \"dnsmasq-dnsmasq-84b9f45d47-hztxx\" (UID: \"831e8e0b-2fab-4b9b-a076-a47a7780e560\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-hztxx" Jan 20 18:11:43 crc kubenswrapper[4558]: I0120 18:11:43.050373 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-edpm-no-nodes-custom-svc\" (UniqueName: \"kubernetes.io/secret/c030d6fd-fcd4-4ad3-b4a5-f76d3836a3c9-ssh-key-edpm-no-nodes-custom-svc\") pod \"custom-img-svc-edpm-compute-no-nodes-edpm-no-nodes-custom-npnmn\" (UID: \"c030d6fd-fcd4-4ad3-b4a5-f76d3836a3c9\") " pod="openstack-kuttl-tests/custom-img-svc-edpm-compute-no-nodes-edpm-no-nodes-custom-npnmn" Jan 20 18:11:43 crc kubenswrapper[4558]: I0120 18:11:43.050406 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c030d6fd-fcd4-4ad3-b4a5-f76d3836a3c9-inventory\") pod \"custom-img-svc-edpm-compute-no-nodes-edpm-no-nodes-custom-npnmn\" (UID: \"c030d6fd-fcd4-4ad3-b4a5-f76d3836a3c9\") " pod="openstack-kuttl-tests/custom-img-svc-edpm-compute-no-nodes-edpm-no-nodes-custom-npnmn" Jan 20 18:11:43 crc kubenswrapper[4558]: E0120 18:11:43.050517 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/dataplanenodeset-edpm-no-nodes-custom-svc: secret "dataplanenodeset-edpm-no-nodes-custom-svc" not found Jan 20 18:11:43 crc kubenswrapper[4558]: I0120 18:11:43.050571 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"custom-img-svc-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c030d6fd-fcd4-4ad3-b4a5-f76d3836a3c9-custom-img-svc-combined-ca-bundle\") pod \"custom-img-svc-edpm-compute-no-nodes-edpm-no-nodes-custom-npnmn\" (UID: \"c030d6fd-fcd4-4ad3-b4a5-f76d3836a3c9\") " pod="openstack-kuttl-tests/custom-img-svc-edpm-compute-no-nodes-edpm-no-nodes-custom-npnmn" Jan 20 18:11:43 crc kubenswrapper[4558]: E0120 18:11:43.050585 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c030d6fd-fcd4-4ad3-b4a5-f76d3836a3c9-inventory podName:c030d6fd-fcd4-4ad3-b4a5-f76d3836a3c9 nodeName:}" failed. No retries permitted until 2026-01-20 18:11:43.550566678 +0000 UTC m=+5397.310904646 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "inventory" (UniqueName: "kubernetes.io/secret/c030d6fd-fcd4-4ad3-b4a5-f76d3836a3c9-inventory") pod "custom-img-svc-edpm-compute-no-nodes-edpm-no-nodes-custom-npnmn" (UID: "c030d6fd-fcd4-4ad3-b4a5-f76d3836a3c9") : secret "dataplanenodeset-edpm-no-nodes-custom-svc" not found Jan 20 18:11:43 crc kubenswrapper[4558]: I0120 18:11:43.050673 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/831e8e0b-2fab-4b9b-a076-a47a7780e560-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-84b9f45d47-hztxx\" (UID: \"831e8e0b-2fab-4b9b-a076-a47a7780e560\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-hztxx" Jan 20 18:11:43 crc kubenswrapper[4558]: I0120 18:11:43.050767 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-24sp7\" (UniqueName: \"kubernetes.io/projected/c030d6fd-fcd4-4ad3-b4a5-f76d3836a3c9-kube-api-access-24sp7\") pod \"custom-img-svc-edpm-compute-no-nodes-edpm-no-nodes-custom-npnmn\" (UID: \"c030d6fd-fcd4-4ad3-b4a5-f76d3836a3c9\") " pod="openstack-kuttl-tests/custom-img-svc-edpm-compute-no-nodes-edpm-no-nodes-custom-npnmn" Jan 20 18:11:43 crc kubenswrapper[4558]: I0120 18:11:43.050950 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bxbg5\" (UniqueName: \"kubernetes.io/projected/831e8e0b-2fab-4b9b-a076-a47a7780e560-kube-api-access-bxbg5\") pod \"dnsmasq-dnsmasq-84b9f45d47-hztxx\" (UID: \"831e8e0b-2fab-4b9b-a076-a47a7780e560\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-hztxx" Jan 20 18:11:43 crc kubenswrapper[4558]: E0120 18:11:43.053812 4558 projected.go:194] Error preparing data for projected volume kube-api-access-24sp7 for pod openstack-kuttl-tests/custom-img-svc-edpm-compute-no-nodes-edpm-no-nodes-custom-npnmn: failed to fetch token: serviceaccounts "edpm-no-nodes-custom-svc" not found Jan 20 18:11:43 crc kubenswrapper[4558]: E0120 18:11:43.053916 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/c030d6fd-fcd4-4ad3-b4a5-f76d3836a3c9-kube-api-access-24sp7 podName:c030d6fd-fcd4-4ad3-b4a5-f76d3836a3c9 nodeName:}" failed. No retries permitted until 2026-01-20 18:11:43.553888665 +0000 UTC m=+5397.314226631 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-24sp7" (UniqueName: "kubernetes.io/projected/c030d6fd-fcd4-4ad3-b4a5-f76d3836a3c9-kube-api-access-24sp7") pod "custom-img-svc-edpm-compute-no-nodes-edpm-no-nodes-custom-npnmn" (UID: "c030d6fd-fcd4-4ad3-b4a5-f76d3836a3c9") : failed to fetch token: serviceaccounts "edpm-no-nodes-custom-svc" not found Jan 20 18:11:43 crc kubenswrapper[4558]: I0120 18:11:43.057249 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"custom-img-svc-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c030d6fd-fcd4-4ad3-b4a5-f76d3836a3c9-custom-img-svc-combined-ca-bundle\") pod \"custom-img-svc-edpm-compute-no-nodes-edpm-no-nodes-custom-npnmn\" (UID: \"c030d6fd-fcd4-4ad3-b4a5-f76d3836a3c9\") " pod="openstack-kuttl-tests/custom-img-svc-edpm-compute-no-nodes-edpm-no-nodes-custom-npnmn" Jan 20 18:11:43 crc kubenswrapper[4558]: I0120 18:11:43.057259 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-edpm-no-nodes-custom-svc\" (UniqueName: \"kubernetes.io/secret/c030d6fd-fcd4-4ad3-b4a5-f76d3836a3c9-ssh-key-edpm-no-nodes-custom-svc\") pod \"custom-img-svc-edpm-compute-no-nodes-edpm-no-nodes-custom-npnmn\" (UID: \"c030d6fd-fcd4-4ad3-b4a5-f76d3836a3c9\") " pod="openstack-kuttl-tests/custom-img-svc-edpm-compute-no-nodes-edpm-no-nodes-custom-npnmn" Jan 20 18:11:43 crc kubenswrapper[4558]: I0120 18:11:43.152815 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/831e8e0b-2fab-4b9b-a076-a47a7780e560-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-84b9f45d47-hztxx\" (UID: \"831e8e0b-2fab-4b9b-a076-a47a7780e560\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-hztxx" Jan 20 18:11:43 crc kubenswrapper[4558]: I0120 18:11:43.152885 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bxbg5\" (UniqueName: \"kubernetes.io/projected/831e8e0b-2fab-4b9b-a076-a47a7780e560-kube-api-access-bxbg5\") pod \"dnsmasq-dnsmasq-84b9f45d47-hztxx\" (UID: \"831e8e0b-2fab-4b9b-a076-a47a7780e560\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-hztxx" Jan 20 18:11:43 crc kubenswrapper[4558]: I0120 18:11:43.152951 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/831e8e0b-2fab-4b9b-a076-a47a7780e560-config\") pod \"dnsmasq-dnsmasq-84b9f45d47-hztxx\" (UID: \"831e8e0b-2fab-4b9b-a076-a47a7780e560\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-hztxx" Jan 20 18:11:43 crc kubenswrapper[4558]: I0120 18:11:43.153642 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/831e8e0b-2fab-4b9b-a076-a47a7780e560-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-84b9f45d47-hztxx\" (UID: \"831e8e0b-2fab-4b9b-a076-a47a7780e560\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-hztxx" Jan 20 18:11:43 crc kubenswrapper[4558]: I0120 18:11:43.153655 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/831e8e0b-2fab-4b9b-a076-a47a7780e560-config\") pod \"dnsmasq-dnsmasq-84b9f45d47-hztxx\" (UID: \"831e8e0b-2fab-4b9b-a076-a47a7780e560\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-hztxx" Jan 20 18:11:43 crc kubenswrapper[4558]: I0120 18:11:43.168649 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bxbg5\" (UniqueName: \"kubernetes.io/projected/831e8e0b-2fab-4b9b-a076-a47a7780e560-kube-api-access-bxbg5\") pod \"dnsmasq-dnsmasq-84b9f45d47-hztxx\" (UID: \"831e8e0b-2fab-4b9b-a076-a47a7780e560\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-hztxx" Jan 20 18:11:43 crc kubenswrapper[4558]: I0120 18:11:43.262280 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-hztxx" Jan 20 18:11:43 crc kubenswrapper[4558]: I0120 18:11:43.560753 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c030d6fd-fcd4-4ad3-b4a5-f76d3836a3c9-inventory\") pod \"custom-img-svc-edpm-compute-no-nodes-edpm-no-nodes-custom-npnmn\" (UID: \"c030d6fd-fcd4-4ad3-b4a5-f76d3836a3c9\") " pod="openstack-kuttl-tests/custom-img-svc-edpm-compute-no-nodes-edpm-no-nodes-custom-npnmn" Jan 20 18:11:43 crc kubenswrapper[4558]: I0120 18:11:43.561114 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-24sp7\" (UniqueName: \"kubernetes.io/projected/c030d6fd-fcd4-4ad3-b4a5-f76d3836a3c9-kube-api-access-24sp7\") pod \"custom-img-svc-edpm-compute-no-nodes-edpm-no-nodes-custom-npnmn\" (UID: \"c030d6fd-fcd4-4ad3-b4a5-f76d3836a3c9\") " pod="openstack-kuttl-tests/custom-img-svc-edpm-compute-no-nodes-edpm-no-nodes-custom-npnmn" Jan 20 18:11:43 crc kubenswrapper[4558]: E0120 18:11:43.560960 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/dataplanenodeset-edpm-no-nodes-custom-svc: secret "dataplanenodeset-edpm-no-nodes-custom-svc" not found Jan 20 18:11:43 crc kubenswrapper[4558]: E0120 18:11:43.561222 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c030d6fd-fcd4-4ad3-b4a5-f76d3836a3c9-inventory podName:c030d6fd-fcd4-4ad3-b4a5-f76d3836a3c9 nodeName:}" failed. No retries permitted until 2026-01-20 18:11:44.561199409 +0000 UTC m=+5398.321537376 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "inventory" (UniqueName: "kubernetes.io/secret/c030d6fd-fcd4-4ad3-b4a5-f76d3836a3c9-inventory") pod "custom-img-svc-edpm-compute-no-nodes-edpm-no-nodes-custom-npnmn" (UID: "c030d6fd-fcd4-4ad3-b4a5-f76d3836a3c9") : secret "dataplanenodeset-edpm-no-nodes-custom-svc" not found Jan 20 18:11:43 crc kubenswrapper[4558]: E0120 18:11:43.564760 4558 projected.go:194] Error preparing data for projected volume kube-api-access-24sp7 for pod openstack-kuttl-tests/custom-img-svc-edpm-compute-no-nodes-edpm-no-nodes-custom-npnmn: failed to fetch token: serviceaccounts "edpm-no-nodes-custom-svc" not found Jan 20 18:11:43 crc kubenswrapper[4558]: E0120 18:11:43.564859 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/c030d6fd-fcd4-4ad3-b4a5-f76d3836a3c9-kube-api-access-24sp7 podName:c030d6fd-fcd4-4ad3-b4a5-f76d3836a3c9 nodeName:}" failed. No retries permitted until 2026-01-20 18:11:44.564840736 +0000 UTC m=+5398.325178703 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-24sp7" (UniqueName: "kubernetes.io/projected/c030d6fd-fcd4-4ad3-b4a5-f76d3836a3c9-kube-api-access-24sp7") pod "custom-img-svc-edpm-compute-no-nodes-edpm-no-nodes-custom-npnmn" (UID: "c030d6fd-fcd4-4ad3-b4a5-f76d3836a3c9") : failed to fetch token: serviceaccounts "edpm-no-nodes-custom-svc" not found Jan 20 18:11:43 crc kubenswrapper[4558]: I0120 18:11:43.645303 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-hztxx"] Jan 20 18:11:43 crc kubenswrapper[4558]: W0120 18:11:43.648211 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod831e8e0b_2fab_4b9b_a076_a47a7780e560.slice/crio-2c9a1e19db6375cfe9f8e766e43823395ee8f09b3a256d96571c7b0addf6cc8a WatchSource:0}: Error finding container 2c9a1e19db6375cfe9f8e766e43823395ee8f09b3a256d96571c7b0addf6cc8a: Status 404 returned error can't find the container with id 2c9a1e19db6375cfe9f8e766e43823395ee8f09b3a256d96571c7b0addf6cc8a Jan 20 18:11:43 crc kubenswrapper[4558]: I0120 18:11:43.792819 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-hztxx" event={"ID":"831e8e0b-2fab-4b9b-a076-a47a7780e560","Type":"ContainerStarted","Data":"20831afd2d3aa0b94005b2ad3e79fbd9964ee8d9e0a88db2273b49f07cb7f64a"} Jan 20 18:11:43 crc kubenswrapper[4558]: I0120 18:11:43.793122 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-hztxx" event={"ID":"831e8e0b-2fab-4b9b-a076-a47a7780e560","Type":"ContainerStarted","Data":"2c9a1e19db6375cfe9f8e766e43823395ee8f09b3a256d96571c7b0addf6cc8a"} Jan 20 18:11:43 crc kubenswrapper[4558]: I0120 18:11:43.793213 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/custom-img-svc-edpm-compute-no-nodes-edpm-no-nodes-custom-npnmn" Jan 20 18:11:43 crc kubenswrapper[4558]: I0120 18:11:43.853289 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/custom-img-svc-edpm-compute-no-nodes-edpm-no-nodes-custom-npnmn" Jan 20 18:11:43 crc kubenswrapper[4558]: I0120 18:11:43.969370 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"custom-img-svc-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c030d6fd-fcd4-4ad3-b4a5-f76d3836a3c9-custom-img-svc-combined-ca-bundle\") pod \"c030d6fd-fcd4-4ad3-b4a5-f76d3836a3c9\" (UID: \"c030d6fd-fcd4-4ad3-b4a5-f76d3836a3c9\") " Jan 20 18:11:43 crc kubenswrapper[4558]: I0120 18:11:43.969462 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-edpm-no-nodes-custom-svc\" (UniqueName: \"kubernetes.io/secret/c030d6fd-fcd4-4ad3-b4a5-f76d3836a3c9-ssh-key-edpm-no-nodes-custom-svc\") pod \"c030d6fd-fcd4-4ad3-b4a5-f76d3836a3c9\" (UID: \"c030d6fd-fcd4-4ad3-b4a5-f76d3836a3c9\") " Jan 20 18:11:43 crc kubenswrapper[4558]: I0120 18:11:43.974392 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c030d6fd-fcd4-4ad3-b4a5-f76d3836a3c9-ssh-key-edpm-no-nodes-custom-svc" (OuterVolumeSpecName: "ssh-key-edpm-no-nodes-custom-svc") pod "c030d6fd-fcd4-4ad3-b4a5-f76d3836a3c9" (UID: "c030d6fd-fcd4-4ad3-b4a5-f76d3836a3c9"). InnerVolumeSpecName "ssh-key-edpm-no-nodes-custom-svc". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:11:43 crc kubenswrapper[4558]: I0120 18:11:43.974481 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c030d6fd-fcd4-4ad3-b4a5-f76d3836a3c9-custom-img-svc-combined-ca-bundle" (OuterVolumeSpecName: "custom-img-svc-combined-ca-bundle") pod "c030d6fd-fcd4-4ad3-b4a5-f76d3836a3c9" (UID: "c030d6fd-fcd4-4ad3-b4a5-f76d3836a3c9"). InnerVolumeSpecName "custom-img-svc-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:11:44 crc kubenswrapper[4558]: I0120 18:11:44.071328 4558 reconciler_common.go:293] "Volume detached for volume \"ssh-key-edpm-no-nodes-custom-svc\" (UniqueName: \"kubernetes.io/secret/c030d6fd-fcd4-4ad3-b4a5-f76d3836a3c9-ssh-key-edpm-no-nodes-custom-svc\") on node \"crc\" DevicePath \"\"" Jan 20 18:11:44 crc kubenswrapper[4558]: I0120 18:11:44.071362 4558 reconciler_common.go:293] "Volume detached for volume \"custom-img-svc-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c030d6fd-fcd4-4ad3-b4a5-f76d3836a3c9-custom-img-svc-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 18:11:44 crc kubenswrapper[4558]: I0120 18:11:44.580866 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c030d6fd-fcd4-4ad3-b4a5-f76d3836a3c9-inventory\") pod \"custom-img-svc-edpm-compute-no-nodes-edpm-no-nodes-custom-npnmn\" (UID: \"c030d6fd-fcd4-4ad3-b4a5-f76d3836a3c9\") " pod="openstack-kuttl-tests/custom-img-svc-edpm-compute-no-nodes-edpm-no-nodes-custom-npnmn" Jan 20 18:11:44 crc kubenswrapper[4558]: I0120 18:11:44.580960 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-24sp7\" (UniqueName: \"kubernetes.io/projected/c030d6fd-fcd4-4ad3-b4a5-f76d3836a3c9-kube-api-access-24sp7\") pod \"custom-img-svc-edpm-compute-no-nodes-edpm-no-nodes-custom-npnmn\" (UID: \"c030d6fd-fcd4-4ad3-b4a5-f76d3836a3c9\") " pod="openstack-kuttl-tests/custom-img-svc-edpm-compute-no-nodes-edpm-no-nodes-custom-npnmn" Jan 20 18:11:44 crc kubenswrapper[4558]: E0120 18:11:44.581039 4558 secret.go:188] Couldn't get secret openstack-kuttl-tests/dataplanenodeset-edpm-no-nodes-custom-svc: secret "dataplanenodeset-edpm-no-nodes-custom-svc" not found Jan 20 18:11:44 crc kubenswrapper[4558]: E0120 18:11:44.581115 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c030d6fd-fcd4-4ad3-b4a5-f76d3836a3c9-inventory podName:c030d6fd-fcd4-4ad3-b4a5-f76d3836a3c9 nodeName:}" failed. No retries permitted until 2026-01-20 18:11:46.581096769 +0000 UTC m=+5400.341434736 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "inventory" (UniqueName: "kubernetes.io/secret/c030d6fd-fcd4-4ad3-b4a5-f76d3836a3c9-inventory") pod "custom-img-svc-edpm-compute-no-nodes-edpm-no-nodes-custom-npnmn" (UID: "c030d6fd-fcd4-4ad3-b4a5-f76d3836a3c9") : secret "dataplanenodeset-edpm-no-nodes-custom-svc" not found Jan 20 18:11:44 crc kubenswrapper[4558]: E0120 18:11:44.583686 4558 projected.go:194] Error preparing data for projected volume kube-api-access-24sp7 for pod openstack-kuttl-tests/custom-img-svc-edpm-compute-no-nodes-edpm-no-nodes-custom-npnmn: failed to fetch token: serviceaccounts "edpm-no-nodes-custom-svc" not found Jan 20 18:11:44 crc kubenswrapper[4558]: E0120 18:11:44.584042 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/c030d6fd-fcd4-4ad3-b4a5-f76d3836a3c9-kube-api-access-24sp7 podName:c030d6fd-fcd4-4ad3-b4a5-f76d3836a3c9 nodeName:}" failed. No retries permitted until 2026-01-20 18:11:46.584032428 +0000 UTC m=+5400.344370395 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-24sp7" (UniqueName: "kubernetes.io/projected/c030d6fd-fcd4-4ad3-b4a5-f76d3836a3c9-kube-api-access-24sp7") pod "custom-img-svc-edpm-compute-no-nodes-edpm-no-nodes-custom-npnmn" (UID: "c030d6fd-fcd4-4ad3-b4a5-f76d3836a3c9") : failed to fetch token: serviceaccounts "edpm-no-nodes-custom-svc" not found Jan 20 18:11:44 crc kubenswrapper[4558]: I0120 18:11:44.801764 4558 generic.go:334] "Generic (PLEG): container finished" podID="831e8e0b-2fab-4b9b-a076-a47a7780e560" containerID="20831afd2d3aa0b94005b2ad3e79fbd9964ee8d9e0a88db2273b49f07cb7f64a" exitCode=0 Jan 20 18:11:44 crc kubenswrapper[4558]: I0120 18:11:44.801844 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/custom-img-svc-edpm-compute-no-nodes-edpm-no-nodes-custom-npnmn" Jan 20 18:11:44 crc kubenswrapper[4558]: I0120 18:11:44.801943 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-hztxx" event={"ID":"831e8e0b-2fab-4b9b-a076-a47a7780e560","Type":"ContainerDied","Data":"20831afd2d3aa0b94005b2ad3e79fbd9964ee8d9e0a88db2273b49f07cb7f64a"} Jan 20 18:11:44 crc kubenswrapper[4558]: I0120 18:11:44.852891 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/custom-img-svc-edpm-compute-no-nodes-edpm-no-nodes-custom-npnmn"] Jan 20 18:11:44 crc kubenswrapper[4558]: I0120 18:11:44.856984 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/custom-img-svc-edpm-compute-no-nodes-edpm-no-nodes-custom-npnmn"] Jan 20 18:11:44 crc kubenswrapper[4558]: I0120 18:11:44.900508 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-24sp7\" (UniqueName: \"kubernetes.io/projected/c030d6fd-fcd4-4ad3-b4a5-f76d3836a3c9-kube-api-access-24sp7\") on node \"crc\" DevicePath \"\"" Jan 20 18:11:44 crc kubenswrapper[4558]: I0120 18:11:44.900534 4558 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c030d6fd-fcd4-4ad3-b4a5-f76d3836a3c9-inventory\") on node \"crc\" DevicePath \"\"" Jan 20 18:11:45 crc kubenswrapper[4558]: I0120 18:11:45.810813 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-hztxx" event={"ID":"831e8e0b-2fab-4b9b-a076-a47a7780e560","Type":"ContainerStarted","Data":"a34546c7d1dc7b19128ce04d64666cc8ba946a0a6a9126710375a99ba48803f7"} Jan 20 18:11:45 crc kubenswrapper[4558]: I0120 18:11:45.811185 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-hztxx" Jan 20 18:11:45 crc kubenswrapper[4558]: I0120 18:11:45.825637 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-hztxx" podStartSLOduration=3.825619247 podStartE2EDuration="3.825619247s" podCreationTimestamp="2026-01-20 18:11:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 18:11:45.822815035 +0000 UTC m=+5399.583153002" watchObservedRunningTime="2026-01-20 18:11:45.825619247 +0000 UTC m=+5399.585957214" Jan 20 18:11:46 crc kubenswrapper[4558]: I0120 18:11:46.575522 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c030d6fd-fcd4-4ad3-b4a5-f76d3836a3c9" path="/var/lib/kubelet/pods/c030d6fd-fcd4-4ad3-b4a5-f76d3836a3c9/volumes" Jan 20 18:11:49 crc kubenswrapper[4558]: I0120 18:11:49.161493 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["crc-storage/crc-storage-crc-8t8cr"] Jan 20 18:11:49 crc kubenswrapper[4558]: I0120 18:11:49.165515 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["crc-storage/crc-storage-crc-8t8cr"] Jan 20 18:11:49 crc kubenswrapper[4558]: I0120 18:11:49.269920 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["crc-storage/crc-storage-crc-mqf8g"] Jan 20 18:11:49 crc kubenswrapper[4558]: I0120 18:11:49.270880 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-mqf8g" Jan 20 18:11:49 crc kubenswrapper[4558]: I0120 18:11:49.277512 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"kube-root-ca.crt" Jan 20 18:11:49 crc kubenswrapper[4558]: I0120 18:11:49.277966 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"crc-storage" Jan 20 18:11:49 crc kubenswrapper[4558]: I0120 18:11:49.278099 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"openshift-service-ca.crt" Jan 20 18:11:49 crc kubenswrapper[4558]: I0120 18:11:49.279663 4558 reflector.go:368] Caches populated for *v1.Secret from object-"crc-storage"/"crc-storage-dockercfg-k9ht8" Jan 20 18:11:49 crc kubenswrapper[4558]: I0120 18:11:49.284738 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-mqf8g"] Jan 20 18:11:49 crc kubenswrapper[4558]: I0120 18:11:49.465395 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/9cf10d69-faf3-4190-aebd-be5a55950f2c-node-mnt\") pod \"crc-storage-crc-mqf8g\" (UID: \"9cf10d69-faf3-4190-aebd-be5a55950f2c\") " pod="crc-storage/crc-storage-crc-mqf8g" Jan 20 18:11:49 crc kubenswrapper[4558]: I0120 18:11:49.465470 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-krhwg\" (UniqueName: \"kubernetes.io/projected/9cf10d69-faf3-4190-aebd-be5a55950f2c-kube-api-access-krhwg\") pod \"crc-storage-crc-mqf8g\" (UID: \"9cf10d69-faf3-4190-aebd-be5a55950f2c\") " pod="crc-storage/crc-storage-crc-mqf8g" Jan 20 18:11:49 crc kubenswrapper[4558]: I0120 18:11:49.466031 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/9cf10d69-faf3-4190-aebd-be5a55950f2c-crc-storage\") pod \"crc-storage-crc-mqf8g\" (UID: \"9cf10d69-faf3-4190-aebd-be5a55950f2c\") " pod="crc-storage/crc-storage-crc-mqf8g" Jan 20 18:11:49 crc kubenswrapper[4558]: I0120 18:11:49.567991 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/9cf10d69-faf3-4190-aebd-be5a55950f2c-node-mnt\") pod \"crc-storage-crc-mqf8g\" (UID: \"9cf10d69-faf3-4190-aebd-be5a55950f2c\") " pod="crc-storage/crc-storage-crc-mqf8g" Jan 20 18:11:49 crc kubenswrapper[4558]: I0120 18:11:49.568081 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-krhwg\" (UniqueName: \"kubernetes.io/projected/9cf10d69-faf3-4190-aebd-be5a55950f2c-kube-api-access-krhwg\") pod \"crc-storage-crc-mqf8g\" (UID: \"9cf10d69-faf3-4190-aebd-be5a55950f2c\") " pod="crc-storage/crc-storage-crc-mqf8g" Jan 20 18:11:49 crc kubenswrapper[4558]: I0120 18:11:49.568122 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/9cf10d69-faf3-4190-aebd-be5a55950f2c-crc-storage\") pod \"crc-storage-crc-mqf8g\" (UID: \"9cf10d69-faf3-4190-aebd-be5a55950f2c\") " pod="crc-storage/crc-storage-crc-mqf8g" Jan 20 18:11:49 crc kubenswrapper[4558]: I0120 18:11:49.568376 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/9cf10d69-faf3-4190-aebd-be5a55950f2c-node-mnt\") pod \"crc-storage-crc-mqf8g\" (UID: \"9cf10d69-faf3-4190-aebd-be5a55950f2c\") " pod="crc-storage/crc-storage-crc-mqf8g" Jan 20 18:11:49 crc kubenswrapper[4558]: I0120 18:11:49.569488 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/9cf10d69-faf3-4190-aebd-be5a55950f2c-crc-storage\") pod \"crc-storage-crc-mqf8g\" (UID: \"9cf10d69-faf3-4190-aebd-be5a55950f2c\") " pod="crc-storage/crc-storage-crc-mqf8g" Jan 20 18:11:49 crc kubenswrapper[4558]: I0120 18:11:49.586689 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-krhwg\" (UniqueName: \"kubernetes.io/projected/9cf10d69-faf3-4190-aebd-be5a55950f2c-kube-api-access-krhwg\") pod \"crc-storage-crc-mqf8g\" (UID: \"9cf10d69-faf3-4190-aebd-be5a55950f2c\") " pod="crc-storage/crc-storage-crc-mqf8g" Jan 20 18:11:49 crc kubenswrapper[4558]: I0120 18:11:49.587681 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-mqf8g" Jan 20 18:11:49 crc kubenswrapper[4558]: I0120 18:11:49.972859 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-mqf8g"] Jan 20 18:11:50 crc kubenswrapper[4558]: I0120 18:11:50.575798 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="223e5b07-c7fa-4dcf-ab10-62ca9ed78541" path="/var/lib/kubelet/pods/223e5b07-c7fa-4dcf-ab10-62ca9ed78541/volumes" Jan 20 18:11:50 crc kubenswrapper[4558]: I0120 18:11:50.857607 4558 generic.go:334] "Generic (PLEG): container finished" podID="9cf10d69-faf3-4190-aebd-be5a55950f2c" containerID="21d4ed8c33a665f6312c036258ad016d79469f47f0de29d2d5704295388e7bfe" exitCode=0 Jan 20 18:11:50 crc kubenswrapper[4558]: I0120 18:11:50.857661 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-mqf8g" event={"ID":"9cf10d69-faf3-4190-aebd-be5a55950f2c","Type":"ContainerDied","Data":"21d4ed8c33a665f6312c036258ad016d79469f47f0de29d2d5704295388e7bfe"} Jan 20 18:11:50 crc kubenswrapper[4558]: I0120 18:11:50.857693 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-mqf8g" event={"ID":"9cf10d69-faf3-4190-aebd-be5a55950f2c","Type":"ContainerStarted","Data":"6e6a7f129f226535593c04fe15ffe86d5910656d9b5d01badf9594639c962ee0"} Jan 20 18:11:52 crc kubenswrapper[4558]: I0120 18:11:52.106719 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-mqf8g" Jan 20 18:11:52 crc kubenswrapper[4558]: I0120 18:11:52.205734 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-krhwg\" (UniqueName: \"kubernetes.io/projected/9cf10d69-faf3-4190-aebd-be5a55950f2c-kube-api-access-krhwg\") pod \"9cf10d69-faf3-4190-aebd-be5a55950f2c\" (UID: \"9cf10d69-faf3-4190-aebd-be5a55950f2c\") " Jan 20 18:11:52 crc kubenswrapper[4558]: I0120 18:11:52.205832 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/9cf10d69-faf3-4190-aebd-be5a55950f2c-node-mnt\") pod \"9cf10d69-faf3-4190-aebd-be5a55950f2c\" (UID: \"9cf10d69-faf3-4190-aebd-be5a55950f2c\") " Jan 20 18:11:52 crc kubenswrapper[4558]: I0120 18:11:52.205927 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/9cf10d69-faf3-4190-aebd-be5a55950f2c-crc-storage\") pod \"9cf10d69-faf3-4190-aebd-be5a55950f2c\" (UID: \"9cf10d69-faf3-4190-aebd-be5a55950f2c\") " Jan 20 18:11:52 crc kubenswrapper[4558]: I0120 18:11:52.205976 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9cf10d69-faf3-4190-aebd-be5a55950f2c-node-mnt" (OuterVolumeSpecName: "node-mnt") pod "9cf10d69-faf3-4190-aebd-be5a55950f2c" (UID: "9cf10d69-faf3-4190-aebd-be5a55950f2c"). InnerVolumeSpecName "node-mnt". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 18:11:52 crc kubenswrapper[4558]: I0120 18:11:52.206232 4558 reconciler_common.go:293] "Volume detached for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/9cf10d69-faf3-4190-aebd-be5a55950f2c-node-mnt\") on node \"crc\" DevicePath \"\"" Jan 20 18:11:52 crc kubenswrapper[4558]: I0120 18:11:52.212135 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9cf10d69-faf3-4190-aebd-be5a55950f2c-kube-api-access-krhwg" (OuterVolumeSpecName: "kube-api-access-krhwg") pod "9cf10d69-faf3-4190-aebd-be5a55950f2c" (UID: "9cf10d69-faf3-4190-aebd-be5a55950f2c"). InnerVolumeSpecName "kube-api-access-krhwg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:11:52 crc kubenswrapper[4558]: I0120 18:11:52.224232 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9cf10d69-faf3-4190-aebd-be5a55950f2c-crc-storage" (OuterVolumeSpecName: "crc-storage") pod "9cf10d69-faf3-4190-aebd-be5a55950f2c" (UID: "9cf10d69-faf3-4190-aebd-be5a55950f2c"). InnerVolumeSpecName "crc-storage". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:11:52 crc kubenswrapper[4558]: I0120 18:11:52.306773 4558 reconciler_common.go:293] "Volume detached for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/9cf10d69-faf3-4190-aebd-be5a55950f2c-crc-storage\") on node \"crc\" DevicePath \"\"" Jan 20 18:11:52 crc kubenswrapper[4558]: I0120 18:11:52.306803 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-krhwg\" (UniqueName: \"kubernetes.io/projected/9cf10d69-faf3-4190-aebd-be5a55950f2c-kube-api-access-krhwg\") on node \"crc\" DevicePath \"\"" Jan 20 18:11:52 crc kubenswrapper[4558]: I0120 18:11:52.874460 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-mqf8g" event={"ID":"9cf10d69-faf3-4190-aebd-be5a55950f2c","Type":"ContainerDied","Data":"6e6a7f129f226535593c04fe15ffe86d5910656d9b5d01badf9594639c962ee0"} Jan 20 18:11:52 crc kubenswrapper[4558]: I0120 18:11:52.874501 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-mqf8g" Jan 20 18:11:52 crc kubenswrapper[4558]: I0120 18:11:52.874526 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6e6a7f129f226535593c04fe15ffe86d5910656d9b5d01badf9594639c962ee0" Jan 20 18:11:53 crc kubenswrapper[4558]: I0120 18:11:53.264147 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-hztxx" Jan 20 18:11:53 crc kubenswrapper[4558]: I0120 18:11:53.302599 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-6fdb84cf7c-qm5q8"] Jan 20 18:11:53 crc kubenswrapper[4558]: I0120 18:11:53.302838 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-6fdb84cf7c-qm5q8" podUID="9e263ba7-d72f-4914-8be8-4ae100d5985a" containerName="dnsmasq-dns" containerID="cri-o://77f44480c225d662c0e88c45687296c932630dd932c418005031192e81b85448" gracePeriod=10 Jan 20 18:11:53 crc kubenswrapper[4558]: I0120 18:11:53.655946 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-6fdb84cf7c-qm5q8" Jan 20 18:11:53 crc kubenswrapper[4558]: I0120 18:11:53.828544 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9e263ba7-d72f-4914-8be8-4ae100d5985a-config\") pod \"9e263ba7-d72f-4914-8be8-4ae100d5985a\" (UID: \"9e263ba7-d72f-4914-8be8-4ae100d5985a\") " Jan 20 18:11:53 crc kubenswrapper[4558]: I0120 18:11:53.828612 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/9e263ba7-d72f-4914-8be8-4ae100d5985a-dnsmasq-svc\") pod \"9e263ba7-d72f-4914-8be8-4ae100d5985a\" (UID: \"9e263ba7-d72f-4914-8be8-4ae100d5985a\") " Jan 20 18:11:53 crc kubenswrapper[4558]: I0120 18:11:53.828678 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"edpm-no-nodes-custom-svc\" (UniqueName: \"kubernetes.io/configmap/9e263ba7-d72f-4914-8be8-4ae100d5985a-edpm-no-nodes-custom-svc\") pod \"9e263ba7-d72f-4914-8be8-4ae100d5985a\" (UID: \"9e263ba7-d72f-4914-8be8-4ae100d5985a\") " Jan 20 18:11:53 crc kubenswrapper[4558]: I0120 18:11:53.828970 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h9g74\" (UniqueName: \"kubernetes.io/projected/9e263ba7-d72f-4914-8be8-4ae100d5985a-kube-api-access-h9g74\") pod \"9e263ba7-d72f-4914-8be8-4ae100d5985a\" (UID: \"9e263ba7-d72f-4914-8be8-4ae100d5985a\") " Jan 20 18:11:53 crc kubenswrapper[4558]: I0120 18:11:53.843695 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9e263ba7-d72f-4914-8be8-4ae100d5985a-kube-api-access-h9g74" (OuterVolumeSpecName: "kube-api-access-h9g74") pod "9e263ba7-d72f-4914-8be8-4ae100d5985a" (UID: "9e263ba7-d72f-4914-8be8-4ae100d5985a"). InnerVolumeSpecName "kube-api-access-h9g74". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:11:53 crc kubenswrapper[4558]: I0120 18:11:53.861064 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9e263ba7-d72f-4914-8be8-4ae100d5985a-dnsmasq-svc" (OuterVolumeSpecName: "dnsmasq-svc") pod "9e263ba7-d72f-4914-8be8-4ae100d5985a" (UID: "9e263ba7-d72f-4914-8be8-4ae100d5985a"). InnerVolumeSpecName "dnsmasq-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:11:53 crc kubenswrapper[4558]: I0120 18:11:53.861273 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9e263ba7-d72f-4914-8be8-4ae100d5985a-edpm-no-nodes-custom-svc" (OuterVolumeSpecName: "edpm-no-nodes-custom-svc") pod "9e263ba7-d72f-4914-8be8-4ae100d5985a" (UID: "9e263ba7-d72f-4914-8be8-4ae100d5985a"). InnerVolumeSpecName "edpm-no-nodes-custom-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:11:53 crc kubenswrapper[4558]: I0120 18:11:53.862068 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9e263ba7-d72f-4914-8be8-4ae100d5985a-config" (OuterVolumeSpecName: "config") pod "9e263ba7-d72f-4914-8be8-4ae100d5985a" (UID: "9e263ba7-d72f-4914-8be8-4ae100d5985a"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:11:53 crc kubenswrapper[4558]: I0120 18:11:53.883915 4558 generic.go:334] "Generic (PLEG): container finished" podID="9e263ba7-d72f-4914-8be8-4ae100d5985a" containerID="77f44480c225d662c0e88c45687296c932630dd932c418005031192e81b85448" exitCode=0 Jan 20 18:11:53 crc kubenswrapper[4558]: I0120 18:11:53.883964 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-6fdb84cf7c-qm5q8" event={"ID":"9e263ba7-d72f-4914-8be8-4ae100d5985a","Type":"ContainerDied","Data":"77f44480c225d662c0e88c45687296c932630dd932c418005031192e81b85448"} Jan 20 18:11:53 crc kubenswrapper[4558]: I0120 18:11:53.883996 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-6fdb84cf7c-qm5q8" event={"ID":"9e263ba7-d72f-4914-8be8-4ae100d5985a","Type":"ContainerDied","Data":"e6fdad7da1a994715d2a6fa30fcb59edb7f9f870c7ea58c56667940db1f162d7"} Jan 20 18:11:53 crc kubenswrapper[4558]: I0120 18:11:53.884014 4558 scope.go:117] "RemoveContainer" containerID="77f44480c225d662c0e88c45687296c932630dd932c418005031192e81b85448" Jan 20 18:11:53 crc kubenswrapper[4558]: I0120 18:11:53.884066 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-6fdb84cf7c-qm5q8" Jan 20 18:11:53 crc kubenswrapper[4558]: I0120 18:11:53.900324 4558 scope.go:117] "RemoveContainer" containerID="5b39759fd56ee416c42e990df16d89302f4e39a2c6db7be3c7ce7b71f25c6336" Jan 20 18:11:53 crc kubenswrapper[4558]: I0120 18:11:53.911479 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-6fdb84cf7c-qm5q8"] Jan 20 18:11:53 crc kubenswrapper[4558]: I0120 18:11:53.915889 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-6fdb84cf7c-qm5q8"] Jan 20 18:11:53 crc kubenswrapper[4558]: I0120 18:11:53.932468 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9e263ba7-d72f-4914-8be8-4ae100d5985a-config\") on node \"crc\" DevicePath \"\"" Jan 20 18:11:53 crc kubenswrapper[4558]: I0120 18:11:53.932492 4558 reconciler_common.go:293] "Volume detached for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/9e263ba7-d72f-4914-8be8-4ae100d5985a-dnsmasq-svc\") on node \"crc\" DevicePath \"\"" Jan 20 18:11:53 crc kubenswrapper[4558]: I0120 18:11:53.932509 4558 reconciler_common.go:293] "Volume detached for volume \"edpm-no-nodes-custom-svc\" (UniqueName: \"kubernetes.io/configmap/9e263ba7-d72f-4914-8be8-4ae100d5985a-edpm-no-nodes-custom-svc\") on node \"crc\" DevicePath \"\"" Jan 20 18:11:53 crc kubenswrapper[4558]: I0120 18:11:53.932522 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h9g74\" (UniqueName: \"kubernetes.io/projected/9e263ba7-d72f-4914-8be8-4ae100d5985a-kube-api-access-h9g74\") on node \"crc\" DevicePath \"\"" Jan 20 18:11:53 crc kubenswrapper[4558]: I0120 18:11:53.935872 4558 scope.go:117] "RemoveContainer" containerID="77f44480c225d662c0e88c45687296c932630dd932c418005031192e81b85448" Jan 20 18:11:53 crc kubenswrapper[4558]: E0120 18:11:53.936364 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"77f44480c225d662c0e88c45687296c932630dd932c418005031192e81b85448\": container with ID starting with 77f44480c225d662c0e88c45687296c932630dd932c418005031192e81b85448 not found: ID does not exist" containerID="77f44480c225d662c0e88c45687296c932630dd932c418005031192e81b85448" Jan 20 18:11:53 crc kubenswrapper[4558]: I0120 18:11:53.936430 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"77f44480c225d662c0e88c45687296c932630dd932c418005031192e81b85448"} err="failed to get container status \"77f44480c225d662c0e88c45687296c932630dd932c418005031192e81b85448\": rpc error: code = NotFound desc = could not find container \"77f44480c225d662c0e88c45687296c932630dd932c418005031192e81b85448\": container with ID starting with 77f44480c225d662c0e88c45687296c932630dd932c418005031192e81b85448 not found: ID does not exist" Jan 20 18:11:53 crc kubenswrapper[4558]: I0120 18:11:53.936465 4558 scope.go:117] "RemoveContainer" containerID="5b39759fd56ee416c42e990df16d89302f4e39a2c6db7be3c7ce7b71f25c6336" Jan 20 18:11:53 crc kubenswrapper[4558]: E0120 18:11:53.936811 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5b39759fd56ee416c42e990df16d89302f4e39a2c6db7be3c7ce7b71f25c6336\": container with ID starting with 5b39759fd56ee416c42e990df16d89302f4e39a2c6db7be3c7ce7b71f25c6336 not found: ID does not exist" containerID="5b39759fd56ee416c42e990df16d89302f4e39a2c6db7be3c7ce7b71f25c6336" Jan 20 18:11:53 crc kubenswrapper[4558]: I0120 18:11:53.936865 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5b39759fd56ee416c42e990df16d89302f4e39a2c6db7be3c7ce7b71f25c6336"} err="failed to get container status \"5b39759fd56ee416c42e990df16d89302f4e39a2c6db7be3c7ce7b71f25c6336\": rpc error: code = NotFound desc = could not find container \"5b39759fd56ee416c42e990df16d89302f4e39a2c6db7be3c7ce7b71f25c6336\": container with ID starting with 5b39759fd56ee416c42e990df16d89302f4e39a2c6db7be3c7ce7b71f25c6336 not found: ID does not exist" Jan 20 18:11:54 crc kubenswrapper[4558]: I0120 18:11:54.574701 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9e263ba7-d72f-4914-8be8-4ae100d5985a" path="/var/lib/kubelet/pods/9e263ba7-d72f-4914-8be8-4ae100d5985a/volumes" Jan 20 18:11:55 crc kubenswrapper[4558]: I0120 18:11:55.041443 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["crc-storage/crc-storage-crc-mqf8g"] Jan 20 18:11:55 crc kubenswrapper[4558]: I0120 18:11:55.045279 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["crc-storage/crc-storage-crc-mqf8g"] Jan 20 18:11:55 crc kubenswrapper[4558]: I0120 18:11:55.156736 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["crc-storage/crc-storage-crc-28ghw"] Jan 20 18:11:55 crc kubenswrapper[4558]: E0120 18:11:55.157111 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9e263ba7-d72f-4914-8be8-4ae100d5985a" containerName="init" Jan 20 18:11:55 crc kubenswrapper[4558]: I0120 18:11:55.157132 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="9e263ba7-d72f-4914-8be8-4ae100d5985a" containerName="init" Jan 20 18:11:55 crc kubenswrapper[4558]: E0120 18:11:55.157193 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9e263ba7-d72f-4914-8be8-4ae100d5985a" containerName="dnsmasq-dns" Jan 20 18:11:55 crc kubenswrapper[4558]: I0120 18:11:55.157201 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="9e263ba7-d72f-4914-8be8-4ae100d5985a" containerName="dnsmasq-dns" Jan 20 18:11:55 crc kubenswrapper[4558]: E0120 18:11:55.157221 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9cf10d69-faf3-4190-aebd-be5a55950f2c" containerName="storage" Jan 20 18:11:55 crc kubenswrapper[4558]: I0120 18:11:55.157229 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="9cf10d69-faf3-4190-aebd-be5a55950f2c" containerName="storage" Jan 20 18:11:55 crc kubenswrapper[4558]: I0120 18:11:55.157418 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="9e263ba7-d72f-4914-8be8-4ae100d5985a" containerName="dnsmasq-dns" Jan 20 18:11:55 crc kubenswrapper[4558]: I0120 18:11:55.157439 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="9cf10d69-faf3-4190-aebd-be5a55950f2c" containerName="storage" Jan 20 18:11:55 crc kubenswrapper[4558]: I0120 18:11:55.158062 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-28ghw" Jan 20 18:11:55 crc kubenswrapper[4558]: I0120 18:11:55.162946 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"crc-storage" Jan 20 18:11:55 crc kubenswrapper[4558]: I0120 18:11:55.163090 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"openshift-service-ca.crt" Jan 20 18:11:55 crc kubenswrapper[4558]: I0120 18:11:55.163160 4558 reflector.go:368] Caches populated for *v1.Secret from object-"crc-storage"/"crc-storage-dockercfg-k9ht8" Jan 20 18:11:55 crc kubenswrapper[4558]: I0120 18:11:55.163299 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"crc-storage"/"kube-root-ca.crt" Jan 20 18:11:55 crc kubenswrapper[4558]: I0120 18:11:55.167410 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-28ghw"] Jan 20 18:11:55 crc kubenswrapper[4558]: I0120 18:11:55.254269 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/48737c33-de97-4513-9fda-ef3cb7e17235-node-mnt\") pod \"crc-storage-crc-28ghw\" (UID: \"48737c33-de97-4513-9fda-ef3cb7e17235\") " pod="crc-storage/crc-storage-crc-28ghw" Jan 20 18:11:55 crc kubenswrapper[4558]: I0120 18:11:55.254461 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/48737c33-de97-4513-9fda-ef3cb7e17235-crc-storage\") pod \"crc-storage-crc-28ghw\" (UID: \"48737c33-de97-4513-9fda-ef3cb7e17235\") " pod="crc-storage/crc-storage-crc-28ghw" Jan 20 18:11:55 crc kubenswrapper[4558]: I0120 18:11:55.254509 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hvxm4\" (UniqueName: \"kubernetes.io/projected/48737c33-de97-4513-9fda-ef3cb7e17235-kube-api-access-hvxm4\") pod \"crc-storage-crc-28ghw\" (UID: \"48737c33-de97-4513-9fda-ef3cb7e17235\") " pod="crc-storage/crc-storage-crc-28ghw" Jan 20 18:11:55 crc kubenswrapper[4558]: I0120 18:11:55.356871 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/48737c33-de97-4513-9fda-ef3cb7e17235-crc-storage\") pod \"crc-storage-crc-28ghw\" (UID: \"48737c33-de97-4513-9fda-ef3cb7e17235\") " pod="crc-storage/crc-storage-crc-28ghw" Jan 20 18:11:55 crc kubenswrapper[4558]: I0120 18:11:55.357082 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hvxm4\" (UniqueName: \"kubernetes.io/projected/48737c33-de97-4513-9fda-ef3cb7e17235-kube-api-access-hvxm4\") pod \"crc-storage-crc-28ghw\" (UID: \"48737c33-de97-4513-9fda-ef3cb7e17235\") " pod="crc-storage/crc-storage-crc-28ghw" Jan 20 18:11:55 crc kubenswrapper[4558]: I0120 18:11:55.357241 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/48737c33-de97-4513-9fda-ef3cb7e17235-node-mnt\") pod \"crc-storage-crc-28ghw\" (UID: \"48737c33-de97-4513-9fda-ef3cb7e17235\") " pod="crc-storage/crc-storage-crc-28ghw" Jan 20 18:11:55 crc kubenswrapper[4558]: I0120 18:11:55.357539 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/48737c33-de97-4513-9fda-ef3cb7e17235-node-mnt\") pod \"crc-storage-crc-28ghw\" (UID: \"48737c33-de97-4513-9fda-ef3cb7e17235\") " pod="crc-storage/crc-storage-crc-28ghw" Jan 20 18:11:55 crc kubenswrapper[4558]: I0120 18:11:55.358110 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/48737c33-de97-4513-9fda-ef3cb7e17235-crc-storage\") pod \"crc-storage-crc-28ghw\" (UID: \"48737c33-de97-4513-9fda-ef3cb7e17235\") " pod="crc-storage/crc-storage-crc-28ghw" Jan 20 18:11:55 crc kubenswrapper[4558]: I0120 18:11:55.374299 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hvxm4\" (UniqueName: \"kubernetes.io/projected/48737c33-de97-4513-9fda-ef3cb7e17235-kube-api-access-hvxm4\") pod \"crc-storage-crc-28ghw\" (UID: \"48737c33-de97-4513-9fda-ef3cb7e17235\") " pod="crc-storage/crc-storage-crc-28ghw" Jan 20 18:11:55 crc kubenswrapper[4558]: I0120 18:11:55.475889 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-28ghw" Jan 20 18:11:55 crc kubenswrapper[4558]: I0120 18:11:55.849607 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["crc-storage/crc-storage-crc-28ghw"] Jan 20 18:11:55 crc kubenswrapper[4558]: I0120 18:11:55.905420 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-28ghw" event={"ID":"48737c33-de97-4513-9fda-ef3cb7e17235","Type":"ContainerStarted","Data":"fc54732be546b63e48ee38e903df3d74fc77653b67c7c34f322d240853479e6f"} Jan 20 18:11:56 crc kubenswrapper[4558]: I0120 18:11:56.576553 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9cf10d69-faf3-4190-aebd-be5a55950f2c" path="/var/lib/kubelet/pods/9cf10d69-faf3-4190-aebd-be5a55950f2c/volumes" Jan 20 18:11:56 crc kubenswrapper[4558]: I0120 18:11:56.916975 4558 generic.go:334] "Generic (PLEG): container finished" podID="48737c33-de97-4513-9fda-ef3cb7e17235" containerID="288f1a12f0714f7ecfbcff8f3ff7ef4e737a927dd6674579623fa0b57234bfb7" exitCode=0 Jan 20 18:11:56 crc kubenswrapper[4558]: I0120 18:11:56.917038 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-28ghw" event={"ID":"48737c33-de97-4513-9fda-ef3cb7e17235","Type":"ContainerDied","Data":"288f1a12f0714f7ecfbcff8f3ff7ef4e737a927dd6674579623fa0b57234bfb7"} Jan 20 18:11:57 crc kubenswrapper[4558]: I0120 18:11:57.330134 4558 patch_prober.go:28] interesting pod/machine-config-daemon-2vr4r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 20 18:11:57 crc kubenswrapper[4558]: I0120 18:11:57.330220 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 20 18:11:57 crc kubenswrapper[4558]: I0120 18:11:57.330275 4558 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" Jan 20 18:11:57 crc kubenswrapper[4558]: I0120 18:11:57.330980 4558 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"fdbebed64bed48194784e96161b865cfbbfe6287c24bb11eb0fd195beaf71fc7"} pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 20 18:11:57 crc kubenswrapper[4558]: I0120 18:11:57.331039 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" containerID="cri-o://fdbebed64bed48194784e96161b865cfbbfe6287c24bb11eb0fd195beaf71fc7" gracePeriod=600 Jan 20 18:11:57 crc kubenswrapper[4558]: I0120 18:11:57.929203 4558 generic.go:334] "Generic (PLEG): container finished" podID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerID="fdbebed64bed48194784e96161b865cfbbfe6287c24bb11eb0fd195beaf71fc7" exitCode=0 Jan 20 18:11:57 crc kubenswrapper[4558]: I0120 18:11:57.929268 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" event={"ID":"68337d27-3fa6-4a29-88b0-82e60c3739eb","Type":"ContainerDied","Data":"fdbebed64bed48194784e96161b865cfbbfe6287c24bb11eb0fd195beaf71fc7"} Jan 20 18:11:57 crc kubenswrapper[4558]: I0120 18:11:57.929553 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" event={"ID":"68337d27-3fa6-4a29-88b0-82e60c3739eb","Type":"ContainerStarted","Data":"27314e1a8af2e8df8ecd2e7a06cbb630afbfecd8ca2338a1f199cf92f982f581"} Jan 20 18:11:57 crc kubenswrapper[4558]: I0120 18:11:57.929583 4558 scope.go:117] "RemoveContainer" containerID="6b02d31ec11fe5435af988e35303a6f66b5afa5ef952ce5f7cc4b3cd5f4d9497" Jan 20 18:11:58 crc kubenswrapper[4558]: I0120 18:11:58.153671 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-28ghw" Jan 20 18:11:58 crc kubenswrapper[4558]: I0120 18:11:58.302307 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/48737c33-de97-4513-9fda-ef3cb7e17235-node-mnt\") pod \"48737c33-de97-4513-9fda-ef3cb7e17235\" (UID: \"48737c33-de97-4513-9fda-ef3cb7e17235\") " Jan 20 18:11:58 crc kubenswrapper[4558]: I0120 18:11:58.302375 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/48737c33-de97-4513-9fda-ef3cb7e17235-crc-storage\") pod \"48737c33-de97-4513-9fda-ef3cb7e17235\" (UID: \"48737c33-de97-4513-9fda-ef3cb7e17235\") " Jan 20 18:11:58 crc kubenswrapper[4558]: I0120 18:11:58.302421 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/48737c33-de97-4513-9fda-ef3cb7e17235-node-mnt" (OuterVolumeSpecName: "node-mnt") pod "48737c33-de97-4513-9fda-ef3cb7e17235" (UID: "48737c33-de97-4513-9fda-ef3cb7e17235"). InnerVolumeSpecName "node-mnt". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 20 18:11:58 crc kubenswrapper[4558]: I0120 18:11:58.302445 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hvxm4\" (UniqueName: \"kubernetes.io/projected/48737c33-de97-4513-9fda-ef3cb7e17235-kube-api-access-hvxm4\") pod \"48737c33-de97-4513-9fda-ef3cb7e17235\" (UID: \"48737c33-de97-4513-9fda-ef3cb7e17235\") " Jan 20 18:11:58 crc kubenswrapper[4558]: I0120 18:11:58.303137 4558 reconciler_common.go:293] "Volume detached for volume \"node-mnt\" (UniqueName: \"kubernetes.io/host-path/48737c33-de97-4513-9fda-ef3cb7e17235-node-mnt\") on node \"crc\" DevicePath \"\"" Jan 20 18:11:58 crc kubenswrapper[4558]: I0120 18:11:58.308234 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/48737c33-de97-4513-9fda-ef3cb7e17235-kube-api-access-hvxm4" (OuterVolumeSpecName: "kube-api-access-hvxm4") pod "48737c33-de97-4513-9fda-ef3cb7e17235" (UID: "48737c33-de97-4513-9fda-ef3cb7e17235"). InnerVolumeSpecName "kube-api-access-hvxm4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:11:58 crc kubenswrapper[4558]: I0120 18:11:58.321414 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/48737c33-de97-4513-9fda-ef3cb7e17235-crc-storage" (OuterVolumeSpecName: "crc-storage") pod "48737c33-de97-4513-9fda-ef3cb7e17235" (UID: "48737c33-de97-4513-9fda-ef3cb7e17235"). InnerVolumeSpecName "crc-storage". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:11:58 crc kubenswrapper[4558]: I0120 18:11:58.405584 4558 reconciler_common.go:293] "Volume detached for volume \"crc-storage\" (UniqueName: \"kubernetes.io/configmap/48737c33-de97-4513-9fda-ef3cb7e17235-crc-storage\") on node \"crc\" DevicePath \"\"" Jan 20 18:11:58 crc kubenswrapper[4558]: I0120 18:11:58.405620 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hvxm4\" (UniqueName: \"kubernetes.io/projected/48737c33-de97-4513-9fda-ef3cb7e17235-kube-api-access-hvxm4\") on node \"crc\" DevicePath \"\"" Jan 20 18:11:58 crc kubenswrapper[4558]: I0120 18:11:58.940078 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="crc-storage/crc-storage-crc-28ghw" Jan 20 18:11:58 crc kubenswrapper[4558]: I0120 18:11:58.940263 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="crc-storage/crc-storage-crc-28ghw" event={"ID":"48737c33-de97-4513-9fda-ef3cb7e17235","Type":"ContainerDied","Data":"fc54732be546b63e48ee38e903df3d74fc77653b67c7c34f322d240853479e6f"} Jan 20 18:11:58 crc kubenswrapper[4558]: I0120 18:11:58.940536 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fc54732be546b63e48ee38e903df3d74fc77653b67c7c34f322d240853479e6f" Jan 20 18:12:00 crc kubenswrapper[4558]: I0120 18:12:00.943449 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-64864b6d57-7zmjn"] Jan 20 18:12:00 crc kubenswrapper[4558]: E0120 18:12:00.944066 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="48737c33-de97-4513-9fda-ef3cb7e17235" containerName="storage" Jan 20 18:12:00 crc kubenswrapper[4558]: I0120 18:12:00.944078 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="48737c33-de97-4513-9fda-ef3cb7e17235" containerName="storage" Jan 20 18:12:00 crc kubenswrapper[4558]: I0120 18:12:00.944255 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="48737c33-de97-4513-9fda-ef3cb7e17235" containerName="storage" Jan 20 18:12:00 crc kubenswrapper[4558]: I0120 18:12:00.944940 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-64864b6d57-7zmjn" Jan 20 18:12:00 crc kubenswrapper[4558]: I0120 18:12:00.946459 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"edpm-compute-no-nodes" Jan 20 18:12:00 crc kubenswrapper[4558]: I0120 18:12:00.955051 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-64864b6d57-7zmjn"] Jan 20 18:12:01 crc kubenswrapper[4558]: I0120 18:12:01.041384 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/configmap/94d7f76b-8cf8-4bf5-9844-526d8b79fffc-edpm-compute-no-nodes\") pod \"dnsmasq-dnsmasq-64864b6d57-7zmjn\" (UID: \"94d7f76b-8cf8-4bf5-9844-526d8b79fffc\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-64864b6d57-7zmjn" Jan 20 18:12:01 crc kubenswrapper[4558]: I0120 18:12:01.041571 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/94d7f76b-8cf8-4bf5-9844-526d8b79fffc-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-64864b6d57-7zmjn\" (UID: \"94d7f76b-8cf8-4bf5-9844-526d8b79fffc\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-64864b6d57-7zmjn" Jan 20 18:12:01 crc kubenswrapper[4558]: I0120 18:12:01.041611 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m2kqn\" (UniqueName: \"kubernetes.io/projected/94d7f76b-8cf8-4bf5-9844-526d8b79fffc-kube-api-access-m2kqn\") pod \"dnsmasq-dnsmasq-64864b6d57-7zmjn\" (UID: \"94d7f76b-8cf8-4bf5-9844-526d8b79fffc\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-64864b6d57-7zmjn" Jan 20 18:12:01 crc kubenswrapper[4558]: I0120 18:12:01.041687 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/94d7f76b-8cf8-4bf5-9844-526d8b79fffc-config\") pod \"dnsmasq-dnsmasq-64864b6d57-7zmjn\" (UID: \"94d7f76b-8cf8-4bf5-9844-526d8b79fffc\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-64864b6d57-7zmjn" Jan 20 18:12:01 crc kubenswrapper[4558]: I0120 18:12:01.143265 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/94d7f76b-8cf8-4bf5-9844-526d8b79fffc-config\") pod \"dnsmasq-dnsmasq-64864b6d57-7zmjn\" (UID: \"94d7f76b-8cf8-4bf5-9844-526d8b79fffc\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-64864b6d57-7zmjn" Jan 20 18:12:01 crc kubenswrapper[4558]: I0120 18:12:01.143371 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/configmap/94d7f76b-8cf8-4bf5-9844-526d8b79fffc-edpm-compute-no-nodes\") pod \"dnsmasq-dnsmasq-64864b6d57-7zmjn\" (UID: \"94d7f76b-8cf8-4bf5-9844-526d8b79fffc\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-64864b6d57-7zmjn" Jan 20 18:12:01 crc kubenswrapper[4558]: I0120 18:12:01.143529 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/94d7f76b-8cf8-4bf5-9844-526d8b79fffc-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-64864b6d57-7zmjn\" (UID: \"94d7f76b-8cf8-4bf5-9844-526d8b79fffc\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-64864b6d57-7zmjn" Jan 20 18:12:01 crc kubenswrapper[4558]: I0120 18:12:01.143571 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m2kqn\" (UniqueName: \"kubernetes.io/projected/94d7f76b-8cf8-4bf5-9844-526d8b79fffc-kube-api-access-m2kqn\") pod \"dnsmasq-dnsmasq-64864b6d57-7zmjn\" (UID: \"94d7f76b-8cf8-4bf5-9844-526d8b79fffc\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-64864b6d57-7zmjn" Jan 20 18:12:01 crc kubenswrapper[4558]: I0120 18:12:01.144764 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/configmap/94d7f76b-8cf8-4bf5-9844-526d8b79fffc-edpm-compute-no-nodes\") pod \"dnsmasq-dnsmasq-64864b6d57-7zmjn\" (UID: \"94d7f76b-8cf8-4bf5-9844-526d8b79fffc\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-64864b6d57-7zmjn" Jan 20 18:12:01 crc kubenswrapper[4558]: I0120 18:12:01.144826 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/94d7f76b-8cf8-4bf5-9844-526d8b79fffc-config\") pod \"dnsmasq-dnsmasq-64864b6d57-7zmjn\" (UID: \"94d7f76b-8cf8-4bf5-9844-526d8b79fffc\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-64864b6d57-7zmjn" Jan 20 18:12:01 crc kubenswrapper[4558]: I0120 18:12:01.145141 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/94d7f76b-8cf8-4bf5-9844-526d8b79fffc-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-64864b6d57-7zmjn\" (UID: \"94d7f76b-8cf8-4bf5-9844-526d8b79fffc\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-64864b6d57-7zmjn" Jan 20 18:12:01 crc kubenswrapper[4558]: I0120 18:12:01.162208 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m2kqn\" (UniqueName: \"kubernetes.io/projected/94d7f76b-8cf8-4bf5-9844-526d8b79fffc-kube-api-access-m2kqn\") pod \"dnsmasq-dnsmasq-64864b6d57-7zmjn\" (UID: \"94d7f76b-8cf8-4bf5-9844-526d8b79fffc\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-64864b6d57-7zmjn" Jan 20 18:12:01 crc kubenswrapper[4558]: I0120 18:12:01.270431 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-64864b6d57-7zmjn" Jan 20 18:12:01 crc kubenswrapper[4558]: I0120 18:12:01.651801 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-64864b6d57-7zmjn"] Jan 20 18:12:01 crc kubenswrapper[4558]: W0120 18:12:01.662281 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod94d7f76b_8cf8_4bf5_9844_526d8b79fffc.slice/crio-bed0a5643dc7bf1c9b36f9edcaa8de569b9e8a4154de37e1356843582f44345f WatchSource:0}: Error finding container bed0a5643dc7bf1c9b36f9edcaa8de569b9e8a4154de37e1356843582f44345f: Status 404 returned error can't find the container with id bed0a5643dc7bf1c9b36f9edcaa8de569b9e8a4154de37e1356843582f44345f Jan 20 18:12:01 crc kubenswrapper[4558]: I0120 18:12:01.985599 4558 generic.go:334] "Generic (PLEG): container finished" podID="94d7f76b-8cf8-4bf5-9844-526d8b79fffc" containerID="a619ea6cff0a3ab1da333a12817ece7ad694a4ca9fed5f3649a9a16f785ecc11" exitCode=0 Jan 20 18:12:01 crc kubenswrapper[4558]: I0120 18:12:01.985657 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-64864b6d57-7zmjn" event={"ID":"94d7f76b-8cf8-4bf5-9844-526d8b79fffc","Type":"ContainerDied","Data":"a619ea6cff0a3ab1da333a12817ece7ad694a4ca9fed5f3649a9a16f785ecc11"} Jan 20 18:12:01 crc kubenswrapper[4558]: I0120 18:12:01.985716 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-64864b6d57-7zmjn" event={"ID":"94d7f76b-8cf8-4bf5-9844-526d8b79fffc","Type":"ContainerStarted","Data":"bed0a5643dc7bf1c9b36f9edcaa8de569b9e8a4154de37e1356843582f44345f"} Jan 20 18:12:03 crc kubenswrapper[4558]: I0120 18:12:03.023869 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-64864b6d57-7zmjn" event={"ID":"94d7f76b-8cf8-4bf5-9844-526d8b79fffc","Type":"ContainerStarted","Data":"3d38a10fc945687b4bd468e97671352cb6a302116bad58cee0f7b303a04292a8"} Jan 20 18:12:03 crc kubenswrapper[4558]: I0120 18:12:03.024217 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-64864b6d57-7zmjn" Jan 20 18:12:03 crc kubenswrapper[4558]: I0120 18:12:03.044303 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-64864b6d57-7zmjn" podStartSLOduration=3.04428303 podStartE2EDuration="3.04428303s" podCreationTimestamp="2026-01-20 18:12:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 18:12:03.038972435 +0000 UTC m=+5416.799310402" watchObservedRunningTime="2026-01-20 18:12:03.04428303 +0000 UTC m=+5416.804620997" Jan 20 18:12:11 crc kubenswrapper[4558]: I0120 18:12:11.271343 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-64864b6d57-7zmjn" Jan 20 18:12:11 crc kubenswrapper[4558]: I0120 18:12:11.317654 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-hztxx"] Jan 20 18:12:11 crc kubenswrapper[4558]: I0120 18:12:11.317876 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-hztxx" podUID="831e8e0b-2fab-4b9b-a076-a47a7780e560" containerName="dnsmasq-dns" containerID="cri-o://a34546c7d1dc7b19128ce04d64666cc8ba946a0a6a9126710375a99ba48803f7" gracePeriod=10 Jan 20 18:12:11 crc kubenswrapper[4558]: I0120 18:12:11.680019 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-hztxx" Jan 20 18:12:11 crc kubenswrapper[4558]: I0120 18:12:11.817293 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/831e8e0b-2fab-4b9b-a076-a47a7780e560-config\") pod \"831e8e0b-2fab-4b9b-a076-a47a7780e560\" (UID: \"831e8e0b-2fab-4b9b-a076-a47a7780e560\") " Jan 20 18:12:11 crc kubenswrapper[4558]: I0120 18:12:11.817492 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bxbg5\" (UniqueName: \"kubernetes.io/projected/831e8e0b-2fab-4b9b-a076-a47a7780e560-kube-api-access-bxbg5\") pod \"831e8e0b-2fab-4b9b-a076-a47a7780e560\" (UID: \"831e8e0b-2fab-4b9b-a076-a47a7780e560\") " Jan 20 18:12:11 crc kubenswrapper[4558]: I0120 18:12:11.817535 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/831e8e0b-2fab-4b9b-a076-a47a7780e560-dnsmasq-svc\") pod \"831e8e0b-2fab-4b9b-a076-a47a7780e560\" (UID: \"831e8e0b-2fab-4b9b-a076-a47a7780e560\") " Jan 20 18:12:11 crc kubenswrapper[4558]: I0120 18:12:11.823463 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/831e8e0b-2fab-4b9b-a076-a47a7780e560-kube-api-access-bxbg5" (OuterVolumeSpecName: "kube-api-access-bxbg5") pod "831e8e0b-2fab-4b9b-a076-a47a7780e560" (UID: "831e8e0b-2fab-4b9b-a076-a47a7780e560"). InnerVolumeSpecName "kube-api-access-bxbg5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:12:11 crc kubenswrapper[4558]: I0120 18:12:11.848530 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/831e8e0b-2fab-4b9b-a076-a47a7780e560-dnsmasq-svc" (OuterVolumeSpecName: "dnsmasq-svc") pod "831e8e0b-2fab-4b9b-a076-a47a7780e560" (UID: "831e8e0b-2fab-4b9b-a076-a47a7780e560"). InnerVolumeSpecName "dnsmasq-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:12:11 crc kubenswrapper[4558]: I0120 18:12:11.850268 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/831e8e0b-2fab-4b9b-a076-a47a7780e560-config" (OuterVolumeSpecName: "config") pod "831e8e0b-2fab-4b9b-a076-a47a7780e560" (UID: "831e8e0b-2fab-4b9b-a076-a47a7780e560"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:12:11 crc kubenswrapper[4558]: I0120 18:12:11.919592 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bxbg5\" (UniqueName: \"kubernetes.io/projected/831e8e0b-2fab-4b9b-a076-a47a7780e560-kube-api-access-bxbg5\") on node \"crc\" DevicePath \"\"" Jan 20 18:12:11 crc kubenswrapper[4558]: I0120 18:12:11.919618 4558 reconciler_common.go:293] "Volume detached for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/831e8e0b-2fab-4b9b-a076-a47a7780e560-dnsmasq-svc\") on node \"crc\" DevicePath \"\"" Jan 20 18:12:11 crc kubenswrapper[4558]: I0120 18:12:11.919631 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/831e8e0b-2fab-4b9b-a076-a47a7780e560-config\") on node \"crc\" DevicePath \"\"" Jan 20 18:12:12 crc kubenswrapper[4558]: I0120 18:12:12.106234 4558 generic.go:334] "Generic (PLEG): container finished" podID="831e8e0b-2fab-4b9b-a076-a47a7780e560" containerID="a34546c7d1dc7b19128ce04d64666cc8ba946a0a6a9126710375a99ba48803f7" exitCode=0 Jan 20 18:12:12 crc kubenswrapper[4558]: I0120 18:12:12.106282 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-hztxx" event={"ID":"831e8e0b-2fab-4b9b-a076-a47a7780e560","Type":"ContainerDied","Data":"a34546c7d1dc7b19128ce04d64666cc8ba946a0a6a9126710375a99ba48803f7"} Jan 20 18:12:12 crc kubenswrapper[4558]: I0120 18:12:12.106316 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-hztxx" event={"ID":"831e8e0b-2fab-4b9b-a076-a47a7780e560","Type":"ContainerDied","Data":"2c9a1e19db6375cfe9f8e766e43823395ee8f09b3a256d96571c7b0addf6cc8a"} Jan 20 18:12:12 crc kubenswrapper[4558]: I0120 18:12:12.106338 4558 scope.go:117] "RemoveContainer" containerID="a34546c7d1dc7b19128ce04d64666cc8ba946a0a6a9126710375a99ba48803f7" Jan 20 18:12:12 crc kubenswrapper[4558]: I0120 18:12:12.106372 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-hztxx" Jan 20 18:12:12 crc kubenswrapper[4558]: I0120 18:12:12.130529 4558 scope.go:117] "RemoveContainer" containerID="20831afd2d3aa0b94005b2ad3e79fbd9964ee8d9e0a88db2273b49f07cb7f64a" Jan 20 18:12:12 crc kubenswrapper[4558]: I0120 18:12:12.135375 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-hztxx"] Jan 20 18:12:12 crc kubenswrapper[4558]: I0120 18:12:12.139377 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-hztxx"] Jan 20 18:12:12 crc kubenswrapper[4558]: I0120 18:12:12.149561 4558 scope.go:117] "RemoveContainer" containerID="a34546c7d1dc7b19128ce04d64666cc8ba946a0a6a9126710375a99ba48803f7" Jan 20 18:12:12 crc kubenswrapper[4558]: E0120 18:12:12.149945 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a34546c7d1dc7b19128ce04d64666cc8ba946a0a6a9126710375a99ba48803f7\": container with ID starting with a34546c7d1dc7b19128ce04d64666cc8ba946a0a6a9126710375a99ba48803f7 not found: ID does not exist" containerID="a34546c7d1dc7b19128ce04d64666cc8ba946a0a6a9126710375a99ba48803f7" Jan 20 18:12:12 crc kubenswrapper[4558]: I0120 18:12:12.150010 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a34546c7d1dc7b19128ce04d64666cc8ba946a0a6a9126710375a99ba48803f7"} err="failed to get container status \"a34546c7d1dc7b19128ce04d64666cc8ba946a0a6a9126710375a99ba48803f7\": rpc error: code = NotFound desc = could not find container \"a34546c7d1dc7b19128ce04d64666cc8ba946a0a6a9126710375a99ba48803f7\": container with ID starting with a34546c7d1dc7b19128ce04d64666cc8ba946a0a6a9126710375a99ba48803f7 not found: ID does not exist" Jan 20 18:12:12 crc kubenswrapper[4558]: I0120 18:12:12.150060 4558 scope.go:117] "RemoveContainer" containerID="20831afd2d3aa0b94005b2ad3e79fbd9964ee8d9e0a88db2273b49f07cb7f64a" Jan 20 18:12:12 crc kubenswrapper[4558]: E0120 18:12:12.150595 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"20831afd2d3aa0b94005b2ad3e79fbd9964ee8d9e0a88db2273b49f07cb7f64a\": container with ID starting with 20831afd2d3aa0b94005b2ad3e79fbd9964ee8d9e0a88db2273b49f07cb7f64a not found: ID does not exist" containerID="20831afd2d3aa0b94005b2ad3e79fbd9964ee8d9e0a88db2273b49f07cb7f64a" Jan 20 18:12:12 crc kubenswrapper[4558]: I0120 18:12:12.150626 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"20831afd2d3aa0b94005b2ad3e79fbd9964ee8d9e0a88db2273b49f07cb7f64a"} err="failed to get container status \"20831afd2d3aa0b94005b2ad3e79fbd9964ee8d9e0a88db2273b49f07cb7f64a\": rpc error: code = NotFound desc = could not find container \"20831afd2d3aa0b94005b2ad3e79fbd9964ee8d9e0a88db2273b49f07cb7f64a\": container with ID starting with 20831afd2d3aa0b94005b2ad3e79fbd9964ee8d9e0a88db2273b49f07cb7f64a not found: ID does not exist" Jan 20 18:12:12 crc kubenswrapper[4558]: I0120 18:12:12.576047 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="831e8e0b-2fab-4b9b-a076-a47a7780e560" path="/var/lib/kubelet/pods/831e8e0b-2fab-4b9b-a076-a47a7780e560/volumes" Jan 20 18:12:15 crc kubenswrapper[4558]: I0120 18:12:15.932695 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/failed-service-edpm-compute-no-nodes-edpm-compute-no-nodesbpnpf"] Jan 20 18:12:15 crc kubenswrapper[4558]: E0120 18:12:15.933614 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="831e8e0b-2fab-4b9b-a076-a47a7780e560" containerName="init" Jan 20 18:12:15 crc kubenswrapper[4558]: I0120 18:12:15.933627 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="831e8e0b-2fab-4b9b-a076-a47a7780e560" containerName="init" Jan 20 18:12:15 crc kubenswrapper[4558]: E0120 18:12:15.933639 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="831e8e0b-2fab-4b9b-a076-a47a7780e560" containerName="dnsmasq-dns" Jan 20 18:12:15 crc kubenswrapper[4558]: I0120 18:12:15.933644 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="831e8e0b-2fab-4b9b-a076-a47a7780e560" containerName="dnsmasq-dns" Jan 20 18:12:15 crc kubenswrapper[4558]: I0120 18:12:15.933765 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="831e8e0b-2fab-4b9b-a076-a47a7780e560" containerName="dnsmasq-dns" Jan 20 18:12:15 crc kubenswrapper[4558]: I0120 18:12:15.934265 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/failed-service-edpm-compute-no-nodes-edpm-compute-no-nodesbpnpf" Jan 20 18:12:15 crc kubenswrapper[4558]: I0120 18:12:15.935833 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplanenodeset-edpm-compute-no-nodes" Jan 20 18:12:15 crc kubenswrapper[4558]: I0120 18:12:15.935917 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplane-ansible-ssh-private-key-secret" Jan 20 18:12:15 crc kubenswrapper[4558]: I0120 18:12:15.935925 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-aee-default-env" Jan 20 18:12:15 crc kubenswrapper[4558]: I0120 18:12:15.936422 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"combined-ca-bundle" Jan 20 18:12:15 crc kubenswrapper[4558]: I0120 18:12:15.937809 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"edpm-compute-no-nodes-dockercfg-58wrc" Jan 20 18:12:15 crc kubenswrapper[4558]: I0120 18:12:15.938386 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/failed-service-edpm-compute-no-nodes-edpm-compute-no-nodesbpnpf"] Jan 20 18:12:16 crc kubenswrapper[4558]: I0120 18:12:16.071130 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/499f663d-2a7c-4896-98bb-1e2b7a7b6543-ssh-key-edpm-compute-no-nodes\") pod \"failed-service-edpm-compute-no-nodes-edpm-compute-no-nodesbpnpf\" (UID: \"499f663d-2a7c-4896-98bb-1e2b7a7b6543\") " pod="openstack-kuttl-tests/failed-service-edpm-compute-no-nodes-edpm-compute-no-nodesbpnpf" Jan 20 18:12:16 crc kubenswrapper[4558]: I0120 18:12:16.071190 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/499f663d-2a7c-4896-98bb-1e2b7a7b6543-inventory\") pod \"failed-service-edpm-compute-no-nodes-edpm-compute-no-nodesbpnpf\" (UID: \"499f663d-2a7c-4896-98bb-1e2b7a7b6543\") " pod="openstack-kuttl-tests/failed-service-edpm-compute-no-nodes-edpm-compute-no-nodesbpnpf" Jan 20 18:12:16 crc kubenswrapper[4558]: I0120 18:12:16.071217 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"failed-service-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/499f663d-2a7c-4896-98bb-1e2b7a7b6543-failed-service-combined-ca-bundle\") pod \"failed-service-edpm-compute-no-nodes-edpm-compute-no-nodesbpnpf\" (UID: \"499f663d-2a7c-4896-98bb-1e2b7a7b6543\") " pod="openstack-kuttl-tests/failed-service-edpm-compute-no-nodes-edpm-compute-no-nodesbpnpf" Jan 20 18:12:16 crc kubenswrapper[4558]: I0120 18:12:16.071246 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mvknd\" (UniqueName: \"kubernetes.io/projected/499f663d-2a7c-4896-98bb-1e2b7a7b6543-kube-api-access-mvknd\") pod \"failed-service-edpm-compute-no-nodes-edpm-compute-no-nodesbpnpf\" (UID: \"499f663d-2a7c-4896-98bb-1e2b7a7b6543\") " pod="openstack-kuttl-tests/failed-service-edpm-compute-no-nodes-edpm-compute-no-nodesbpnpf" Jan 20 18:12:16 crc kubenswrapper[4558]: I0120 18:12:16.172783 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/499f663d-2a7c-4896-98bb-1e2b7a7b6543-ssh-key-edpm-compute-no-nodes\") pod \"failed-service-edpm-compute-no-nodes-edpm-compute-no-nodesbpnpf\" (UID: \"499f663d-2a7c-4896-98bb-1e2b7a7b6543\") " pod="openstack-kuttl-tests/failed-service-edpm-compute-no-nodes-edpm-compute-no-nodesbpnpf" Jan 20 18:12:16 crc kubenswrapper[4558]: I0120 18:12:16.172837 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/499f663d-2a7c-4896-98bb-1e2b7a7b6543-inventory\") pod \"failed-service-edpm-compute-no-nodes-edpm-compute-no-nodesbpnpf\" (UID: \"499f663d-2a7c-4896-98bb-1e2b7a7b6543\") " pod="openstack-kuttl-tests/failed-service-edpm-compute-no-nodes-edpm-compute-no-nodesbpnpf" Jan 20 18:12:16 crc kubenswrapper[4558]: I0120 18:12:16.172865 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"failed-service-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/499f663d-2a7c-4896-98bb-1e2b7a7b6543-failed-service-combined-ca-bundle\") pod \"failed-service-edpm-compute-no-nodes-edpm-compute-no-nodesbpnpf\" (UID: \"499f663d-2a7c-4896-98bb-1e2b7a7b6543\") " pod="openstack-kuttl-tests/failed-service-edpm-compute-no-nodes-edpm-compute-no-nodesbpnpf" Jan 20 18:12:16 crc kubenswrapper[4558]: I0120 18:12:16.172897 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mvknd\" (UniqueName: \"kubernetes.io/projected/499f663d-2a7c-4896-98bb-1e2b7a7b6543-kube-api-access-mvknd\") pod \"failed-service-edpm-compute-no-nodes-edpm-compute-no-nodesbpnpf\" (UID: \"499f663d-2a7c-4896-98bb-1e2b7a7b6543\") " pod="openstack-kuttl-tests/failed-service-edpm-compute-no-nodes-edpm-compute-no-nodesbpnpf" Jan 20 18:12:16 crc kubenswrapper[4558]: I0120 18:12:16.179300 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/499f663d-2a7c-4896-98bb-1e2b7a7b6543-ssh-key-edpm-compute-no-nodes\") pod \"failed-service-edpm-compute-no-nodes-edpm-compute-no-nodesbpnpf\" (UID: \"499f663d-2a7c-4896-98bb-1e2b7a7b6543\") " pod="openstack-kuttl-tests/failed-service-edpm-compute-no-nodes-edpm-compute-no-nodesbpnpf" Jan 20 18:12:16 crc kubenswrapper[4558]: I0120 18:12:16.179350 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"failed-service-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/499f663d-2a7c-4896-98bb-1e2b7a7b6543-failed-service-combined-ca-bundle\") pod \"failed-service-edpm-compute-no-nodes-edpm-compute-no-nodesbpnpf\" (UID: \"499f663d-2a7c-4896-98bb-1e2b7a7b6543\") " pod="openstack-kuttl-tests/failed-service-edpm-compute-no-nodes-edpm-compute-no-nodesbpnpf" Jan 20 18:12:16 crc kubenswrapper[4558]: I0120 18:12:16.181996 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/499f663d-2a7c-4896-98bb-1e2b7a7b6543-inventory\") pod \"failed-service-edpm-compute-no-nodes-edpm-compute-no-nodesbpnpf\" (UID: \"499f663d-2a7c-4896-98bb-1e2b7a7b6543\") " pod="openstack-kuttl-tests/failed-service-edpm-compute-no-nodes-edpm-compute-no-nodesbpnpf" Jan 20 18:12:16 crc kubenswrapper[4558]: I0120 18:12:16.188535 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mvknd\" (UniqueName: \"kubernetes.io/projected/499f663d-2a7c-4896-98bb-1e2b7a7b6543-kube-api-access-mvknd\") pod \"failed-service-edpm-compute-no-nodes-edpm-compute-no-nodesbpnpf\" (UID: \"499f663d-2a7c-4896-98bb-1e2b7a7b6543\") " pod="openstack-kuttl-tests/failed-service-edpm-compute-no-nodes-edpm-compute-no-nodesbpnpf" Jan 20 18:12:16 crc kubenswrapper[4558]: I0120 18:12:16.250448 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/failed-service-edpm-compute-no-nodes-edpm-compute-no-nodesbpnpf" Jan 20 18:12:16 crc kubenswrapper[4558]: I0120 18:12:16.642481 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/failed-service-edpm-compute-no-nodes-edpm-compute-no-nodesbpnpf"] Jan 20 18:12:16 crc kubenswrapper[4558]: W0120 18:12:16.652686 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod499f663d_2a7c_4896_98bb_1e2b7a7b6543.slice/crio-bb18668db1c7234445e9b3225140038bc8a72563d6e12a47a00ceedfec878e28 WatchSource:0}: Error finding container bb18668db1c7234445e9b3225140038bc8a72563d6e12a47a00ceedfec878e28: Status 404 returned error can't find the container with id bb18668db1c7234445e9b3225140038bc8a72563d6e12a47a00ceedfec878e28 Jan 20 18:12:17 crc kubenswrapper[4558]: I0120 18:12:17.158761 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/failed-service-edpm-compute-no-nodes-edpm-compute-no-nodesbpnpf" event={"ID":"499f663d-2a7c-4896-98bb-1e2b7a7b6543","Type":"ContainerStarted","Data":"bb18668db1c7234445e9b3225140038bc8a72563d6e12a47a00ceedfec878e28"} Jan 20 18:12:18 crc kubenswrapper[4558]: I0120 18:12:18.168805 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/failed-service-edpm-compute-no-nodes-edpm-compute-no-nodesbpnpf" event={"ID":"499f663d-2a7c-4896-98bb-1e2b7a7b6543","Type":"ContainerStarted","Data":"3c20ec4df8e9e6fdacf8f828e052680dd511bff57d9a705f1b292155372a87d6"} Jan 20 18:12:18 crc kubenswrapper[4558]: I0120 18:12:18.183980 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/failed-service-edpm-compute-no-nodes-edpm-compute-no-nodesbpnpf" podStartSLOduration=2.570568216 podStartE2EDuration="3.183954776s" podCreationTimestamp="2026-01-20 18:12:15 +0000 UTC" firstStartedPulling="2026-01-20 18:12:16.657286491 +0000 UTC m=+5430.417624458" lastFinishedPulling="2026-01-20 18:12:17.27067305 +0000 UTC m=+5431.031011018" observedRunningTime="2026-01-20 18:12:18.182841072 +0000 UTC m=+5431.943179030" watchObservedRunningTime="2026-01-20 18:12:18.183954776 +0000 UTC m=+5431.944292744" Jan 20 18:12:19 crc kubenswrapper[4558]: I0120 18:12:19.180319 4558 generic.go:334] "Generic (PLEG): container finished" podID="499f663d-2a7c-4896-98bb-1e2b7a7b6543" containerID="3c20ec4df8e9e6fdacf8f828e052680dd511bff57d9a705f1b292155372a87d6" exitCode=2 Jan 20 18:12:19 crc kubenswrapper[4558]: I0120 18:12:19.180450 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/failed-service-edpm-compute-no-nodes-edpm-compute-no-nodesbpnpf" event={"ID":"499f663d-2a7c-4896-98bb-1e2b7a7b6543","Type":"ContainerDied","Data":"3c20ec4df8e9e6fdacf8f828e052680dd511bff57d9a705f1b292155372a87d6"} Jan 20 18:12:20 crc kubenswrapper[4558]: I0120 18:12:20.435516 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/failed-service-edpm-compute-no-nodes-edpm-compute-no-nodesbpnpf" Jan 20 18:12:20 crc kubenswrapper[4558]: I0120 18:12:20.541030 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"failed-service-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/499f663d-2a7c-4896-98bb-1e2b7a7b6543-failed-service-combined-ca-bundle\") pod \"499f663d-2a7c-4896-98bb-1e2b7a7b6543\" (UID: \"499f663d-2a7c-4896-98bb-1e2b7a7b6543\") " Jan 20 18:12:20 crc kubenswrapper[4558]: I0120 18:12:20.541117 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/499f663d-2a7c-4896-98bb-1e2b7a7b6543-inventory\") pod \"499f663d-2a7c-4896-98bb-1e2b7a7b6543\" (UID: \"499f663d-2a7c-4896-98bb-1e2b7a7b6543\") " Jan 20 18:12:20 crc kubenswrapper[4558]: I0120 18:12:20.541182 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mvknd\" (UniqueName: \"kubernetes.io/projected/499f663d-2a7c-4896-98bb-1e2b7a7b6543-kube-api-access-mvknd\") pod \"499f663d-2a7c-4896-98bb-1e2b7a7b6543\" (UID: \"499f663d-2a7c-4896-98bb-1e2b7a7b6543\") " Jan 20 18:12:20 crc kubenswrapper[4558]: I0120 18:12:20.541258 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/499f663d-2a7c-4896-98bb-1e2b7a7b6543-ssh-key-edpm-compute-no-nodes\") pod \"499f663d-2a7c-4896-98bb-1e2b7a7b6543\" (UID: \"499f663d-2a7c-4896-98bb-1e2b7a7b6543\") " Jan 20 18:12:20 crc kubenswrapper[4558]: I0120 18:12:20.546263 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/499f663d-2a7c-4896-98bb-1e2b7a7b6543-failed-service-combined-ca-bundle" (OuterVolumeSpecName: "failed-service-combined-ca-bundle") pod "499f663d-2a7c-4896-98bb-1e2b7a7b6543" (UID: "499f663d-2a7c-4896-98bb-1e2b7a7b6543"). InnerVolumeSpecName "failed-service-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:12:20 crc kubenswrapper[4558]: I0120 18:12:20.546390 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/499f663d-2a7c-4896-98bb-1e2b7a7b6543-kube-api-access-mvknd" (OuterVolumeSpecName: "kube-api-access-mvknd") pod "499f663d-2a7c-4896-98bb-1e2b7a7b6543" (UID: "499f663d-2a7c-4896-98bb-1e2b7a7b6543"). InnerVolumeSpecName "kube-api-access-mvknd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:12:20 crc kubenswrapper[4558]: I0120 18:12:20.558663 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/499f663d-2a7c-4896-98bb-1e2b7a7b6543-inventory" (OuterVolumeSpecName: "inventory") pod "499f663d-2a7c-4896-98bb-1e2b7a7b6543" (UID: "499f663d-2a7c-4896-98bb-1e2b7a7b6543"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:12:20 crc kubenswrapper[4558]: I0120 18:12:20.559325 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/499f663d-2a7c-4896-98bb-1e2b7a7b6543-ssh-key-edpm-compute-no-nodes" (OuterVolumeSpecName: "ssh-key-edpm-compute-no-nodes") pod "499f663d-2a7c-4896-98bb-1e2b7a7b6543" (UID: "499f663d-2a7c-4896-98bb-1e2b7a7b6543"). InnerVolumeSpecName "ssh-key-edpm-compute-no-nodes". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:12:20 crc kubenswrapper[4558]: I0120 18:12:20.643100 4558 reconciler_common.go:293] "Volume detached for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/499f663d-2a7c-4896-98bb-1e2b7a7b6543-ssh-key-edpm-compute-no-nodes\") on node \"crc\" DevicePath \"\"" Jan 20 18:12:20 crc kubenswrapper[4558]: I0120 18:12:20.643135 4558 reconciler_common.go:293] "Volume detached for volume \"failed-service-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/499f663d-2a7c-4896-98bb-1e2b7a7b6543-failed-service-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 18:12:20 crc kubenswrapper[4558]: I0120 18:12:20.643149 4558 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/499f663d-2a7c-4896-98bb-1e2b7a7b6543-inventory\") on node \"crc\" DevicePath \"\"" Jan 20 18:12:20 crc kubenswrapper[4558]: I0120 18:12:20.643179 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mvknd\" (UniqueName: \"kubernetes.io/projected/499f663d-2a7c-4896-98bb-1e2b7a7b6543-kube-api-access-mvknd\") on node \"crc\" DevicePath \"\"" Jan 20 18:12:21 crc kubenswrapper[4558]: I0120 18:12:21.199642 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/failed-service-edpm-compute-no-nodes-edpm-compute-no-nodesbpnpf" event={"ID":"499f663d-2a7c-4896-98bb-1e2b7a7b6543","Type":"ContainerDied","Data":"bb18668db1c7234445e9b3225140038bc8a72563d6e12a47a00ceedfec878e28"} Jan 20 18:12:21 crc kubenswrapper[4558]: I0120 18:12:21.199706 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bb18668db1c7234445e9b3225140038bc8a72563d6e12a47a00ceedfec878e28" Jan 20 18:12:21 crc kubenswrapper[4558]: I0120 18:12:21.199718 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/failed-service-edpm-compute-no-nodes-edpm-compute-no-nodesbpnpf" Jan 20 18:12:28 crc kubenswrapper[4558]: I0120 18:12:28.018881 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/failed-service-edpm-compute-no-nodes-edpm-compute-no-nodessnx52"] Jan 20 18:12:28 crc kubenswrapper[4558]: E0120 18:12:28.019652 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="499f663d-2a7c-4896-98bb-1e2b7a7b6543" containerName="failed-service-edpm-compute-no-nodes-edpm-compute-no-nodes" Jan 20 18:12:28 crc kubenswrapper[4558]: I0120 18:12:28.019665 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="499f663d-2a7c-4896-98bb-1e2b7a7b6543" containerName="failed-service-edpm-compute-no-nodes-edpm-compute-no-nodes" Jan 20 18:12:28 crc kubenswrapper[4558]: I0120 18:12:28.019817 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="499f663d-2a7c-4896-98bb-1e2b7a7b6543" containerName="failed-service-edpm-compute-no-nodes-edpm-compute-no-nodes" Jan 20 18:12:28 crc kubenswrapper[4558]: I0120 18:12:28.020280 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/failed-service-edpm-compute-no-nodes-edpm-compute-no-nodessnx52" Jan 20 18:12:28 crc kubenswrapper[4558]: I0120 18:12:28.022349 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplanenodeset-edpm-compute-no-nodes" Jan 20 18:12:28 crc kubenswrapper[4558]: I0120 18:12:28.022427 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"edpm-compute-no-nodes-dockercfg-58wrc" Jan 20 18:12:28 crc kubenswrapper[4558]: I0120 18:12:28.022600 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-aee-default-env" Jan 20 18:12:28 crc kubenswrapper[4558]: I0120 18:12:28.022615 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"combined-ca-bundle" Jan 20 18:12:28 crc kubenswrapper[4558]: I0120 18:12:28.022699 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplane-ansible-ssh-private-key-secret" Jan 20 18:12:28 crc kubenswrapper[4558]: I0120 18:12:28.032121 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/failed-service-edpm-compute-no-nodes-edpm-compute-no-nodessnx52"] Jan 20 18:12:28 crc kubenswrapper[4558]: I0120 18:12:28.045354 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/91000729-c813-424c-8f04-1fac4466f6e3-inventory\") pod \"failed-service-edpm-compute-no-nodes-edpm-compute-no-nodessnx52\" (UID: \"91000729-c813-424c-8f04-1fac4466f6e3\") " pod="openstack-kuttl-tests/failed-service-edpm-compute-no-nodes-edpm-compute-no-nodessnx52" Jan 20 18:12:28 crc kubenswrapper[4558]: I0120 18:12:28.045455 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/91000729-c813-424c-8f04-1fac4466f6e3-ssh-key-edpm-compute-no-nodes\") pod \"failed-service-edpm-compute-no-nodes-edpm-compute-no-nodessnx52\" (UID: \"91000729-c813-424c-8f04-1fac4466f6e3\") " pod="openstack-kuttl-tests/failed-service-edpm-compute-no-nodes-edpm-compute-no-nodessnx52" Jan 20 18:12:28 crc kubenswrapper[4558]: I0120 18:12:28.045491 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q5zwp\" (UniqueName: \"kubernetes.io/projected/91000729-c813-424c-8f04-1fac4466f6e3-kube-api-access-q5zwp\") pod \"failed-service-edpm-compute-no-nodes-edpm-compute-no-nodessnx52\" (UID: \"91000729-c813-424c-8f04-1fac4466f6e3\") " pod="openstack-kuttl-tests/failed-service-edpm-compute-no-nodes-edpm-compute-no-nodessnx52" Jan 20 18:12:28 crc kubenswrapper[4558]: I0120 18:12:28.045535 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"failed-service-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/91000729-c813-424c-8f04-1fac4466f6e3-failed-service-combined-ca-bundle\") pod \"failed-service-edpm-compute-no-nodes-edpm-compute-no-nodessnx52\" (UID: \"91000729-c813-424c-8f04-1fac4466f6e3\") " pod="openstack-kuttl-tests/failed-service-edpm-compute-no-nodes-edpm-compute-no-nodessnx52" Jan 20 18:12:28 crc kubenswrapper[4558]: I0120 18:12:28.146187 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"failed-service-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/91000729-c813-424c-8f04-1fac4466f6e3-failed-service-combined-ca-bundle\") pod \"failed-service-edpm-compute-no-nodes-edpm-compute-no-nodessnx52\" (UID: \"91000729-c813-424c-8f04-1fac4466f6e3\") " pod="openstack-kuttl-tests/failed-service-edpm-compute-no-nodes-edpm-compute-no-nodessnx52" Jan 20 18:12:28 crc kubenswrapper[4558]: I0120 18:12:28.146238 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/91000729-c813-424c-8f04-1fac4466f6e3-inventory\") pod \"failed-service-edpm-compute-no-nodes-edpm-compute-no-nodessnx52\" (UID: \"91000729-c813-424c-8f04-1fac4466f6e3\") " pod="openstack-kuttl-tests/failed-service-edpm-compute-no-nodes-edpm-compute-no-nodessnx52" Jan 20 18:12:28 crc kubenswrapper[4558]: I0120 18:12:28.146321 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/91000729-c813-424c-8f04-1fac4466f6e3-ssh-key-edpm-compute-no-nodes\") pod \"failed-service-edpm-compute-no-nodes-edpm-compute-no-nodessnx52\" (UID: \"91000729-c813-424c-8f04-1fac4466f6e3\") " pod="openstack-kuttl-tests/failed-service-edpm-compute-no-nodes-edpm-compute-no-nodessnx52" Jan 20 18:12:28 crc kubenswrapper[4558]: I0120 18:12:28.146389 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q5zwp\" (UniqueName: \"kubernetes.io/projected/91000729-c813-424c-8f04-1fac4466f6e3-kube-api-access-q5zwp\") pod \"failed-service-edpm-compute-no-nodes-edpm-compute-no-nodessnx52\" (UID: \"91000729-c813-424c-8f04-1fac4466f6e3\") " pod="openstack-kuttl-tests/failed-service-edpm-compute-no-nodes-edpm-compute-no-nodessnx52" Jan 20 18:12:28 crc kubenswrapper[4558]: I0120 18:12:28.152498 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"failed-service-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/91000729-c813-424c-8f04-1fac4466f6e3-failed-service-combined-ca-bundle\") pod \"failed-service-edpm-compute-no-nodes-edpm-compute-no-nodessnx52\" (UID: \"91000729-c813-424c-8f04-1fac4466f6e3\") " pod="openstack-kuttl-tests/failed-service-edpm-compute-no-nodes-edpm-compute-no-nodessnx52" Jan 20 18:12:28 crc kubenswrapper[4558]: I0120 18:12:28.153041 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/91000729-c813-424c-8f04-1fac4466f6e3-ssh-key-edpm-compute-no-nodes\") pod \"failed-service-edpm-compute-no-nodes-edpm-compute-no-nodessnx52\" (UID: \"91000729-c813-424c-8f04-1fac4466f6e3\") " pod="openstack-kuttl-tests/failed-service-edpm-compute-no-nodes-edpm-compute-no-nodessnx52" Jan 20 18:12:28 crc kubenswrapper[4558]: I0120 18:12:28.153048 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/91000729-c813-424c-8f04-1fac4466f6e3-inventory\") pod \"failed-service-edpm-compute-no-nodes-edpm-compute-no-nodessnx52\" (UID: \"91000729-c813-424c-8f04-1fac4466f6e3\") " pod="openstack-kuttl-tests/failed-service-edpm-compute-no-nodes-edpm-compute-no-nodessnx52" Jan 20 18:12:28 crc kubenswrapper[4558]: I0120 18:12:28.163191 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q5zwp\" (UniqueName: \"kubernetes.io/projected/91000729-c813-424c-8f04-1fac4466f6e3-kube-api-access-q5zwp\") pod \"failed-service-edpm-compute-no-nodes-edpm-compute-no-nodessnx52\" (UID: \"91000729-c813-424c-8f04-1fac4466f6e3\") " pod="openstack-kuttl-tests/failed-service-edpm-compute-no-nodes-edpm-compute-no-nodessnx52" Jan 20 18:12:28 crc kubenswrapper[4558]: I0120 18:12:28.337007 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/failed-service-edpm-compute-no-nodes-edpm-compute-no-nodessnx52" Jan 20 18:12:28 crc kubenswrapper[4558]: I0120 18:12:28.736685 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/failed-service-edpm-compute-no-nodes-edpm-compute-no-nodessnx52"] Jan 20 18:12:29 crc kubenswrapper[4558]: I0120 18:12:29.265044 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/failed-service-edpm-compute-no-nodes-edpm-compute-no-nodessnx52" event={"ID":"91000729-c813-424c-8f04-1fac4466f6e3","Type":"ContainerStarted","Data":"d89955278f785f3a6e09fdbf3b4df35723c42e38b096e5e6df580d4f128ebe84"} Jan 20 18:12:30 crc kubenswrapper[4558]: I0120 18:12:30.275276 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/failed-service-edpm-compute-no-nodes-edpm-compute-no-nodessnx52" event={"ID":"91000729-c813-424c-8f04-1fac4466f6e3","Type":"ContainerStarted","Data":"15fd0c1ca89ec4d06cb87710c8914dd6911bb232f989412db7afa6da8cf18209"} Jan 20 18:12:31 crc kubenswrapper[4558]: I0120 18:12:31.284114 4558 generic.go:334] "Generic (PLEG): container finished" podID="91000729-c813-424c-8f04-1fac4466f6e3" containerID="15fd0c1ca89ec4d06cb87710c8914dd6911bb232f989412db7afa6da8cf18209" exitCode=2 Jan 20 18:12:31 crc kubenswrapper[4558]: I0120 18:12:31.284182 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/failed-service-edpm-compute-no-nodes-edpm-compute-no-nodessnx52" event={"ID":"91000729-c813-424c-8f04-1fac4466f6e3","Type":"ContainerDied","Data":"15fd0c1ca89ec4d06cb87710c8914dd6911bb232f989412db7afa6da8cf18209"} Jan 20 18:12:32 crc kubenswrapper[4558]: I0120 18:12:32.554787 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/failed-service-edpm-compute-no-nodes-edpm-compute-no-nodessnx52" Jan 20 18:12:32 crc kubenswrapper[4558]: I0120 18:12:32.717358 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/91000729-c813-424c-8f04-1fac4466f6e3-ssh-key-edpm-compute-no-nodes\") pod \"91000729-c813-424c-8f04-1fac4466f6e3\" (UID: \"91000729-c813-424c-8f04-1fac4466f6e3\") " Jan 20 18:12:32 crc kubenswrapper[4558]: I0120 18:12:32.717430 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q5zwp\" (UniqueName: \"kubernetes.io/projected/91000729-c813-424c-8f04-1fac4466f6e3-kube-api-access-q5zwp\") pod \"91000729-c813-424c-8f04-1fac4466f6e3\" (UID: \"91000729-c813-424c-8f04-1fac4466f6e3\") " Jan 20 18:12:32 crc kubenswrapper[4558]: I0120 18:12:32.717473 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"failed-service-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/91000729-c813-424c-8f04-1fac4466f6e3-failed-service-combined-ca-bundle\") pod \"91000729-c813-424c-8f04-1fac4466f6e3\" (UID: \"91000729-c813-424c-8f04-1fac4466f6e3\") " Jan 20 18:12:32 crc kubenswrapper[4558]: I0120 18:12:32.717541 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/91000729-c813-424c-8f04-1fac4466f6e3-inventory\") pod \"91000729-c813-424c-8f04-1fac4466f6e3\" (UID: \"91000729-c813-424c-8f04-1fac4466f6e3\") " Jan 20 18:12:32 crc kubenswrapper[4558]: I0120 18:12:32.723956 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/91000729-c813-424c-8f04-1fac4466f6e3-failed-service-combined-ca-bundle" (OuterVolumeSpecName: "failed-service-combined-ca-bundle") pod "91000729-c813-424c-8f04-1fac4466f6e3" (UID: "91000729-c813-424c-8f04-1fac4466f6e3"). InnerVolumeSpecName "failed-service-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:12:32 crc kubenswrapper[4558]: I0120 18:12:32.724352 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/91000729-c813-424c-8f04-1fac4466f6e3-kube-api-access-q5zwp" (OuterVolumeSpecName: "kube-api-access-q5zwp") pod "91000729-c813-424c-8f04-1fac4466f6e3" (UID: "91000729-c813-424c-8f04-1fac4466f6e3"). InnerVolumeSpecName "kube-api-access-q5zwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:12:32 crc kubenswrapper[4558]: I0120 18:12:32.739839 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/91000729-c813-424c-8f04-1fac4466f6e3-inventory" (OuterVolumeSpecName: "inventory") pod "91000729-c813-424c-8f04-1fac4466f6e3" (UID: "91000729-c813-424c-8f04-1fac4466f6e3"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:12:32 crc kubenswrapper[4558]: I0120 18:12:32.740289 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/91000729-c813-424c-8f04-1fac4466f6e3-ssh-key-edpm-compute-no-nodes" (OuterVolumeSpecName: "ssh-key-edpm-compute-no-nodes") pod "91000729-c813-424c-8f04-1fac4466f6e3" (UID: "91000729-c813-424c-8f04-1fac4466f6e3"). InnerVolumeSpecName "ssh-key-edpm-compute-no-nodes". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:12:32 crc kubenswrapper[4558]: I0120 18:12:32.819356 4558 reconciler_common.go:293] "Volume detached for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/91000729-c813-424c-8f04-1fac4466f6e3-ssh-key-edpm-compute-no-nodes\") on node \"crc\" DevicePath \"\"" Jan 20 18:12:32 crc kubenswrapper[4558]: I0120 18:12:32.819400 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q5zwp\" (UniqueName: \"kubernetes.io/projected/91000729-c813-424c-8f04-1fac4466f6e3-kube-api-access-q5zwp\") on node \"crc\" DevicePath \"\"" Jan 20 18:12:32 crc kubenswrapper[4558]: I0120 18:12:32.819413 4558 reconciler_common.go:293] "Volume detached for volume \"failed-service-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/91000729-c813-424c-8f04-1fac4466f6e3-failed-service-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 18:12:32 crc kubenswrapper[4558]: I0120 18:12:32.819428 4558 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/91000729-c813-424c-8f04-1fac4466f6e3-inventory\") on node \"crc\" DevicePath \"\"" Jan 20 18:12:33 crc kubenswrapper[4558]: I0120 18:12:33.302363 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/failed-service-edpm-compute-no-nodes-edpm-compute-no-nodessnx52" event={"ID":"91000729-c813-424c-8f04-1fac4466f6e3","Type":"ContainerDied","Data":"d89955278f785f3a6e09fdbf3b4df35723c42e38b096e5e6df580d4f128ebe84"} Jan 20 18:12:33 crc kubenswrapper[4558]: I0120 18:12:33.302404 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/failed-service-edpm-compute-no-nodes-edpm-compute-no-nodessnx52" Jan 20 18:12:33 crc kubenswrapper[4558]: I0120 18:12:33.302406 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d89955278f785f3a6e09fdbf3b4df35723c42e38b096e5e6df580d4f128ebe84" Jan 20 18:12:33 crc kubenswrapper[4558]: I0120 18:12:33.934763 4558 scope.go:117] "RemoveContainer" containerID="48e898b9396791e356c286a06a348c31e78843284731ff15b0be127e3cd6f6cc" Jan 20 18:12:33 crc kubenswrapper[4558]: I0120 18:12:33.959651 4558 scope.go:117] "RemoveContainer" containerID="d8a8562f0ad946b83659daace7ac1118fa02810695cdf4d2a3584b817bbaff84" Jan 20 18:12:33 crc kubenswrapper[4558]: I0120 18:12:33.988005 4558 scope.go:117] "RemoveContainer" containerID="3c6a200a4e82ee7b54601eab3dc4e80432bfa9c68c2b524c193e9a8aa3493ae8" Jan 20 18:12:34 crc kubenswrapper[4558]: I0120 18:12:34.017943 4558 scope.go:117] "RemoveContainer" containerID="fd7d81dd40dc80373f8ddc8ea35dc5e972b9a241dd9309b33bf870102020f026" Jan 20 18:12:34 crc kubenswrapper[4558]: I0120 18:12:34.051203 4558 scope.go:117] "RemoveContainer" containerID="6fa574d56a21423d1f991b5e4446614a49a0945522c92b50da02bef52461ba54" Jan 20 18:12:34 crc kubenswrapper[4558]: I0120 18:12:34.073774 4558 scope.go:117] "RemoveContainer" containerID="89048fbe36b5d5a3b2ebe0e98c9e8143e7dcd0bde06fa2aee6817d55e83ca0c4" Jan 20 18:12:50 crc kubenswrapper[4558]: I0120 18:12:50.028927 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/failed-service-edpm-compute-no-nodes-edpm-compute-no-nodespmzkz"] Jan 20 18:12:50 crc kubenswrapper[4558]: E0120 18:12:50.030239 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="91000729-c813-424c-8f04-1fac4466f6e3" containerName="failed-service-edpm-compute-no-nodes-edpm-compute-no-nodes" Jan 20 18:12:50 crc kubenswrapper[4558]: I0120 18:12:50.030261 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="91000729-c813-424c-8f04-1fac4466f6e3" containerName="failed-service-edpm-compute-no-nodes-edpm-compute-no-nodes" Jan 20 18:12:50 crc kubenswrapper[4558]: I0120 18:12:50.030437 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="91000729-c813-424c-8f04-1fac4466f6e3" containerName="failed-service-edpm-compute-no-nodes-edpm-compute-no-nodes" Jan 20 18:12:50 crc kubenswrapper[4558]: I0120 18:12:50.032023 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/failed-service-edpm-compute-no-nodes-edpm-compute-no-nodespmzkz" Jan 20 18:12:50 crc kubenswrapper[4558]: I0120 18:12:50.037494 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplanenodeset-edpm-compute-no-nodes" Jan 20 18:12:50 crc kubenswrapper[4558]: I0120 18:12:50.037794 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"edpm-compute-no-nodes-dockercfg-58wrc" Jan 20 18:12:50 crc kubenswrapper[4558]: I0120 18:12:50.038018 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"combined-ca-bundle" Jan 20 18:12:50 crc kubenswrapper[4558]: I0120 18:12:50.038091 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplane-ansible-ssh-private-key-secret" Jan 20 18:12:50 crc kubenswrapper[4558]: I0120 18:12:50.038223 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-aee-default-env" Jan 20 18:12:50 crc kubenswrapper[4558]: I0120 18:12:50.043584 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/failed-service-edpm-compute-no-nodes-edpm-compute-no-nodespmzkz"] Jan 20 18:12:50 crc kubenswrapper[4558]: I0120 18:12:50.078481 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0e3f3ff4-4e9f-4a89-9c0a-918a211bf2e6-inventory\") pod \"failed-service-edpm-compute-no-nodes-edpm-compute-no-nodespmzkz\" (UID: \"0e3f3ff4-4e9f-4a89-9c0a-918a211bf2e6\") " pod="openstack-kuttl-tests/failed-service-edpm-compute-no-nodes-edpm-compute-no-nodespmzkz" Jan 20 18:12:50 crc kubenswrapper[4558]: I0120 18:12:50.078629 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rv242\" (UniqueName: \"kubernetes.io/projected/0e3f3ff4-4e9f-4a89-9c0a-918a211bf2e6-kube-api-access-rv242\") pod \"failed-service-edpm-compute-no-nodes-edpm-compute-no-nodespmzkz\" (UID: \"0e3f3ff4-4e9f-4a89-9c0a-918a211bf2e6\") " pod="openstack-kuttl-tests/failed-service-edpm-compute-no-nodes-edpm-compute-no-nodespmzkz" Jan 20 18:12:50 crc kubenswrapper[4558]: I0120 18:12:50.078785 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"failed-service-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0e3f3ff4-4e9f-4a89-9c0a-918a211bf2e6-failed-service-combined-ca-bundle\") pod \"failed-service-edpm-compute-no-nodes-edpm-compute-no-nodespmzkz\" (UID: \"0e3f3ff4-4e9f-4a89-9c0a-918a211bf2e6\") " pod="openstack-kuttl-tests/failed-service-edpm-compute-no-nodes-edpm-compute-no-nodespmzkz" Jan 20 18:12:50 crc kubenswrapper[4558]: I0120 18:12:50.078945 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/0e3f3ff4-4e9f-4a89-9c0a-918a211bf2e6-ssh-key-edpm-compute-no-nodes\") pod \"failed-service-edpm-compute-no-nodes-edpm-compute-no-nodespmzkz\" (UID: \"0e3f3ff4-4e9f-4a89-9c0a-918a211bf2e6\") " pod="openstack-kuttl-tests/failed-service-edpm-compute-no-nodes-edpm-compute-no-nodespmzkz" Jan 20 18:12:50 crc kubenswrapper[4558]: I0120 18:12:50.180058 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rv242\" (UniqueName: \"kubernetes.io/projected/0e3f3ff4-4e9f-4a89-9c0a-918a211bf2e6-kube-api-access-rv242\") pod \"failed-service-edpm-compute-no-nodes-edpm-compute-no-nodespmzkz\" (UID: \"0e3f3ff4-4e9f-4a89-9c0a-918a211bf2e6\") " pod="openstack-kuttl-tests/failed-service-edpm-compute-no-nodes-edpm-compute-no-nodespmzkz" Jan 20 18:12:50 crc kubenswrapper[4558]: I0120 18:12:50.180110 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"failed-service-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0e3f3ff4-4e9f-4a89-9c0a-918a211bf2e6-failed-service-combined-ca-bundle\") pod \"failed-service-edpm-compute-no-nodes-edpm-compute-no-nodespmzkz\" (UID: \"0e3f3ff4-4e9f-4a89-9c0a-918a211bf2e6\") " pod="openstack-kuttl-tests/failed-service-edpm-compute-no-nodes-edpm-compute-no-nodespmzkz" Jan 20 18:12:50 crc kubenswrapper[4558]: I0120 18:12:50.180205 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/0e3f3ff4-4e9f-4a89-9c0a-918a211bf2e6-ssh-key-edpm-compute-no-nodes\") pod \"failed-service-edpm-compute-no-nodes-edpm-compute-no-nodespmzkz\" (UID: \"0e3f3ff4-4e9f-4a89-9c0a-918a211bf2e6\") " pod="openstack-kuttl-tests/failed-service-edpm-compute-no-nodes-edpm-compute-no-nodespmzkz" Jan 20 18:12:50 crc kubenswrapper[4558]: I0120 18:12:50.180262 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0e3f3ff4-4e9f-4a89-9c0a-918a211bf2e6-inventory\") pod \"failed-service-edpm-compute-no-nodes-edpm-compute-no-nodespmzkz\" (UID: \"0e3f3ff4-4e9f-4a89-9c0a-918a211bf2e6\") " pod="openstack-kuttl-tests/failed-service-edpm-compute-no-nodes-edpm-compute-no-nodespmzkz" Jan 20 18:12:50 crc kubenswrapper[4558]: I0120 18:12:50.187745 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/0e3f3ff4-4e9f-4a89-9c0a-918a211bf2e6-ssh-key-edpm-compute-no-nodes\") pod \"failed-service-edpm-compute-no-nodes-edpm-compute-no-nodespmzkz\" (UID: \"0e3f3ff4-4e9f-4a89-9c0a-918a211bf2e6\") " pod="openstack-kuttl-tests/failed-service-edpm-compute-no-nodes-edpm-compute-no-nodespmzkz" Jan 20 18:12:50 crc kubenswrapper[4558]: I0120 18:12:50.187901 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"failed-service-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0e3f3ff4-4e9f-4a89-9c0a-918a211bf2e6-failed-service-combined-ca-bundle\") pod \"failed-service-edpm-compute-no-nodes-edpm-compute-no-nodespmzkz\" (UID: \"0e3f3ff4-4e9f-4a89-9c0a-918a211bf2e6\") " pod="openstack-kuttl-tests/failed-service-edpm-compute-no-nodes-edpm-compute-no-nodespmzkz" Jan 20 18:12:50 crc kubenswrapper[4558]: I0120 18:12:50.188146 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0e3f3ff4-4e9f-4a89-9c0a-918a211bf2e6-inventory\") pod \"failed-service-edpm-compute-no-nodes-edpm-compute-no-nodespmzkz\" (UID: \"0e3f3ff4-4e9f-4a89-9c0a-918a211bf2e6\") " pod="openstack-kuttl-tests/failed-service-edpm-compute-no-nodes-edpm-compute-no-nodespmzkz" Jan 20 18:12:50 crc kubenswrapper[4558]: I0120 18:12:50.196106 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rv242\" (UniqueName: \"kubernetes.io/projected/0e3f3ff4-4e9f-4a89-9c0a-918a211bf2e6-kube-api-access-rv242\") pod \"failed-service-edpm-compute-no-nodes-edpm-compute-no-nodespmzkz\" (UID: \"0e3f3ff4-4e9f-4a89-9c0a-918a211bf2e6\") " pod="openstack-kuttl-tests/failed-service-edpm-compute-no-nodes-edpm-compute-no-nodespmzkz" Jan 20 18:12:50 crc kubenswrapper[4558]: I0120 18:12:50.347537 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/failed-service-edpm-compute-no-nodes-edpm-compute-no-nodespmzkz" Jan 20 18:12:50 crc kubenswrapper[4558]: I0120 18:12:50.751394 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/failed-service-edpm-compute-no-nodes-edpm-compute-no-nodespmzkz"] Jan 20 18:12:51 crc kubenswrapper[4558]: I0120 18:12:51.466677 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/failed-service-edpm-compute-no-nodes-edpm-compute-no-nodespmzkz" event={"ID":"0e3f3ff4-4e9f-4a89-9c0a-918a211bf2e6","Type":"ContainerStarted","Data":"570cef9cc766ea8f32fb7f11568377f6dd881a55ef510f764fd31da27ec82c41"} Jan 20 18:12:52 crc kubenswrapper[4558]: I0120 18:12:52.477464 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/failed-service-edpm-compute-no-nodes-edpm-compute-no-nodespmzkz" event={"ID":"0e3f3ff4-4e9f-4a89-9c0a-918a211bf2e6","Type":"ContainerStarted","Data":"e053d239b73c5b870a9b515ba93f0c9e7c6de2c8e95b8fb1374ec803d1a01ee7"} Jan 20 18:12:52 crc kubenswrapper[4558]: I0120 18:12:52.493919 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/failed-service-edpm-compute-no-nodes-edpm-compute-no-nodespmzkz" podStartSLOduration=1.833772775 podStartE2EDuration="2.493887896s" podCreationTimestamp="2026-01-20 18:12:50 +0000 UTC" firstStartedPulling="2026-01-20 18:12:50.754102928 +0000 UTC m=+5464.514440895" lastFinishedPulling="2026-01-20 18:12:51.414218049 +0000 UTC m=+5465.174556016" observedRunningTime="2026-01-20 18:12:52.489186086 +0000 UTC m=+5466.249524043" watchObservedRunningTime="2026-01-20 18:12:52.493887896 +0000 UTC m=+5466.254225863" Jan 20 18:12:53 crc kubenswrapper[4558]: I0120 18:12:53.490158 4558 generic.go:334] "Generic (PLEG): container finished" podID="0e3f3ff4-4e9f-4a89-9c0a-918a211bf2e6" containerID="e053d239b73c5b870a9b515ba93f0c9e7c6de2c8e95b8fb1374ec803d1a01ee7" exitCode=2 Jan 20 18:12:53 crc kubenswrapper[4558]: I0120 18:12:53.490241 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/failed-service-edpm-compute-no-nodes-edpm-compute-no-nodespmzkz" event={"ID":"0e3f3ff4-4e9f-4a89-9c0a-918a211bf2e6","Type":"ContainerDied","Data":"e053d239b73c5b870a9b515ba93f0c9e7c6de2c8e95b8fb1374ec803d1a01ee7"} Jan 20 18:12:54 crc kubenswrapper[4558]: I0120 18:12:54.722888 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/failed-service-edpm-compute-no-nodes-edpm-compute-no-nodespmzkz" Jan 20 18:12:54 crc kubenswrapper[4558]: I0120 18:12:54.845861 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"failed-service-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0e3f3ff4-4e9f-4a89-9c0a-918a211bf2e6-failed-service-combined-ca-bundle\") pod \"0e3f3ff4-4e9f-4a89-9c0a-918a211bf2e6\" (UID: \"0e3f3ff4-4e9f-4a89-9c0a-918a211bf2e6\") " Jan 20 18:12:54 crc kubenswrapper[4558]: I0120 18:12:54.845945 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rv242\" (UniqueName: \"kubernetes.io/projected/0e3f3ff4-4e9f-4a89-9c0a-918a211bf2e6-kube-api-access-rv242\") pod \"0e3f3ff4-4e9f-4a89-9c0a-918a211bf2e6\" (UID: \"0e3f3ff4-4e9f-4a89-9c0a-918a211bf2e6\") " Jan 20 18:12:54 crc kubenswrapper[4558]: I0120 18:12:54.845986 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0e3f3ff4-4e9f-4a89-9c0a-918a211bf2e6-inventory\") pod \"0e3f3ff4-4e9f-4a89-9c0a-918a211bf2e6\" (UID: \"0e3f3ff4-4e9f-4a89-9c0a-918a211bf2e6\") " Jan 20 18:12:54 crc kubenswrapper[4558]: I0120 18:12:54.846011 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/0e3f3ff4-4e9f-4a89-9c0a-918a211bf2e6-ssh-key-edpm-compute-no-nodes\") pod \"0e3f3ff4-4e9f-4a89-9c0a-918a211bf2e6\" (UID: \"0e3f3ff4-4e9f-4a89-9c0a-918a211bf2e6\") " Jan 20 18:12:54 crc kubenswrapper[4558]: I0120 18:12:54.851482 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0e3f3ff4-4e9f-4a89-9c0a-918a211bf2e6-kube-api-access-rv242" (OuterVolumeSpecName: "kube-api-access-rv242") pod "0e3f3ff4-4e9f-4a89-9c0a-918a211bf2e6" (UID: "0e3f3ff4-4e9f-4a89-9c0a-918a211bf2e6"). InnerVolumeSpecName "kube-api-access-rv242". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:12:54 crc kubenswrapper[4558]: I0120 18:12:54.851807 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0e3f3ff4-4e9f-4a89-9c0a-918a211bf2e6-failed-service-combined-ca-bundle" (OuterVolumeSpecName: "failed-service-combined-ca-bundle") pod "0e3f3ff4-4e9f-4a89-9c0a-918a211bf2e6" (UID: "0e3f3ff4-4e9f-4a89-9c0a-918a211bf2e6"). InnerVolumeSpecName "failed-service-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:12:54 crc kubenswrapper[4558]: I0120 18:12:54.864439 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0e3f3ff4-4e9f-4a89-9c0a-918a211bf2e6-ssh-key-edpm-compute-no-nodes" (OuterVolumeSpecName: "ssh-key-edpm-compute-no-nodes") pod "0e3f3ff4-4e9f-4a89-9c0a-918a211bf2e6" (UID: "0e3f3ff4-4e9f-4a89-9c0a-918a211bf2e6"). InnerVolumeSpecName "ssh-key-edpm-compute-no-nodes". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:12:54 crc kubenswrapper[4558]: I0120 18:12:54.865061 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0e3f3ff4-4e9f-4a89-9c0a-918a211bf2e6-inventory" (OuterVolumeSpecName: "inventory") pod "0e3f3ff4-4e9f-4a89-9c0a-918a211bf2e6" (UID: "0e3f3ff4-4e9f-4a89-9c0a-918a211bf2e6"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:12:54 crc kubenswrapper[4558]: I0120 18:12:54.947924 4558 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0e3f3ff4-4e9f-4a89-9c0a-918a211bf2e6-inventory\") on node \"crc\" DevicePath \"\"" Jan 20 18:12:54 crc kubenswrapper[4558]: I0120 18:12:54.947958 4558 reconciler_common.go:293] "Volume detached for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/0e3f3ff4-4e9f-4a89-9c0a-918a211bf2e6-ssh-key-edpm-compute-no-nodes\") on node \"crc\" DevicePath \"\"" Jan 20 18:12:54 crc kubenswrapper[4558]: I0120 18:12:54.947970 4558 reconciler_common.go:293] "Volume detached for volume \"failed-service-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0e3f3ff4-4e9f-4a89-9c0a-918a211bf2e6-failed-service-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 18:12:54 crc kubenswrapper[4558]: I0120 18:12:54.947985 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rv242\" (UniqueName: \"kubernetes.io/projected/0e3f3ff4-4e9f-4a89-9c0a-918a211bf2e6-kube-api-access-rv242\") on node \"crc\" DevicePath \"\"" Jan 20 18:12:55 crc kubenswrapper[4558]: I0120 18:12:55.506846 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/failed-service-edpm-compute-no-nodes-edpm-compute-no-nodespmzkz" event={"ID":"0e3f3ff4-4e9f-4a89-9c0a-918a211bf2e6","Type":"ContainerDied","Data":"570cef9cc766ea8f32fb7f11568377f6dd881a55ef510f764fd31da27ec82c41"} Jan 20 18:12:55 crc kubenswrapper[4558]: I0120 18:12:55.506904 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="570cef9cc766ea8f32fb7f11568377f6dd881a55ef510f764fd31da27ec82c41" Jan 20 18:12:55 crc kubenswrapper[4558]: I0120 18:12:55.506904 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/failed-service-edpm-compute-no-nodes-edpm-compute-no-nodespmzkz" Jan 20 18:13:32 crc kubenswrapper[4558]: I0120 18:13:32.026696 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/failed-service-edpm-compute-no-nodes-edpm-compute-no-nodesvszbf"] Jan 20 18:13:32 crc kubenswrapper[4558]: E0120 18:13:32.027533 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0e3f3ff4-4e9f-4a89-9c0a-918a211bf2e6" containerName="failed-service-edpm-compute-no-nodes-edpm-compute-no-nodes" Jan 20 18:13:32 crc kubenswrapper[4558]: I0120 18:13:32.027547 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="0e3f3ff4-4e9f-4a89-9c0a-918a211bf2e6" containerName="failed-service-edpm-compute-no-nodes-edpm-compute-no-nodes" Jan 20 18:13:32 crc kubenswrapper[4558]: I0120 18:13:32.027696 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="0e3f3ff4-4e9f-4a89-9c0a-918a211bf2e6" containerName="failed-service-edpm-compute-no-nodes-edpm-compute-no-nodes" Jan 20 18:13:32 crc kubenswrapper[4558]: I0120 18:13:32.028175 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/failed-service-edpm-compute-no-nodes-edpm-compute-no-nodesvszbf" Jan 20 18:13:32 crc kubenswrapper[4558]: I0120 18:13:32.030714 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplanenodeset-edpm-compute-no-nodes" Jan 20 18:13:32 crc kubenswrapper[4558]: I0120 18:13:32.030890 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"dataplane-ansible-ssh-private-key-secret" Jan 20 18:13:32 crc kubenswrapper[4558]: I0120 18:13:32.031433 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"combined-ca-bundle" Jan 20 18:13:32 crc kubenswrapper[4558]: I0120 18:13:32.031566 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-kuttl-tests"/"openstack-aee-default-env" Jan 20 18:13:32 crc kubenswrapper[4558]: I0120 18:13:32.031779 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-kuttl-tests"/"edpm-compute-no-nodes-dockercfg-58wrc" Jan 20 18:13:32 crc kubenswrapper[4558]: I0120 18:13:32.033371 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/failed-service-edpm-compute-no-nodes-edpm-compute-no-nodesvszbf"] Jan 20 18:13:32 crc kubenswrapper[4558]: I0120 18:13:32.060192 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/c863333a-b3eb-47c4-a968-587f5a131266-ssh-key-edpm-compute-no-nodes\") pod \"failed-service-edpm-compute-no-nodes-edpm-compute-no-nodesvszbf\" (UID: \"c863333a-b3eb-47c4-a968-587f5a131266\") " pod="openstack-kuttl-tests/failed-service-edpm-compute-no-nodes-edpm-compute-no-nodesvszbf" Jan 20 18:13:32 crc kubenswrapper[4558]: I0120 18:13:32.060229 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bbvtq\" (UniqueName: \"kubernetes.io/projected/c863333a-b3eb-47c4-a968-587f5a131266-kube-api-access-bbvtq\") pod \"failed-service-edpm-compute-no-nodes-edpm-compute-no-nodesvszbf\" (UID: \"c863333a-b3eb-47c4-a968-587f5a131266\") " pod="openstack-kuttl-tests/failed-service-edpm-compute-no-nodes-edpm-compute-no-nodesvszbf" Jan 20 18:13:32 crc kubenswrapper[4558]: I0120 18:13:32.060253 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c863333a-b3eb-47c4-a968-587f5a131266-inventory\") pod \"failed-service-edpm-compute-no-nodes-edpm-compute-no-nodesvszbf\" (UID: \"c863333a-b3eb-47c4-a968-587f5a131266\") " pod="openstack-kuttl-tests/failed-service-edpm-compute-no-nodes-edpm-compute-no-nodesvszbf" Jan 20 18:13:32 crc kubenswrapper[4558]: I0120 18:13:32.060285 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"failed-service-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c863333a-b3eb-47c4-a968-587f5a131266-failed-service-combined-ca-bundle\") pod \"failed-service-edpm-compute-no-nodes-edpm-compute-no-nodesvszbf\" (UID: \"c863333a-b3eb-47c4-a968-587f5a131266\") " pod="openstack-kuttl-tests/failed-service-edpm-compute-no-nodes-edpm-compute-no-nodesvszbf" Jan 20 18:13:32 crc kubenswrapper[4558]: I0120 18:13:32.162154 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/c863333a-b3eb-47c4-a968-587f5a131266-ssh-key-edpm-compute-no-nodes\") pod \"failed-service-edpm-compute-no-nodes-edpm-compute-no-nodesvszbf\" (UID: \"c863333a-b3eb-47c4-a968-587f5a131266\") " pod="openstack-kuttl-tests/failed-service-edpm-compute-no-nodes-edpm-compute-no-nodesvszbf" Jan 20 18:13:32 crc kubenswrapper[4558]: I0120 18:13:32.162235 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bbvtq\" (UniqueName: \"kubernetes.io/projected/c863333a-b3eb-47c4-a968-587f5a131266-kube-api-access-bbvtq\") pod \"failed-service-edpm-compute-no-nodes-edpm-compute-no-nodesvszbf\" (UID: \"c863333a-b3eb-47c4-a968-587f5a131266\") " pod="openstack-kuttl-tests/failed-service-edpm-compute-no-nodes-edpm-compute-no-nodesvszbf" Jan 20 18:13:32 crc kubenswrapper[4558]: I0120 18:13:32.162273 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c863333a-b3eb-47c4-a968-587f5a131266-inventory\") pod \"failed-service-edpm-compute-no-nodes-edpm-compute-no-nodesvszbf\" (UID: \"c863333a-b3eb-47c4-a968-587f5a131266\") " pod="openstack-kuttl-tests/failed-service-edpm-compute-no-nodes-edpm-compute-no-nodesvszbf" Jan 20 18:13:32 crc kubenswrapper[4558]: I0120 18:13:32.162325 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"failed-service-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c863333a-b3eb-47c4-a968-587f5a131266-failed-service-combined-ca-bundle\") pod \"failed-service-edpm-compute-no-nodes-edpm-compute-no-nodesvszbf\" (UID: \"c863333a-b3eb-47c4-a968-587f5a131266\") " pod="openstack-kuttl-tests/failed-service-edpm-compute-no-nodes-edpm-compute-no-nodesvszbf" Jan 20 18:13:32 crc kubenswrapper[4558]: I0120 18:13:32.168552 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/c863333a-b3eb-47c4-a968-587f5a131266-ssh-key-edpm-compute-no-nodes\") pod \"failed-service-edpm-compute-no-nodes-edpm-compute-no-nodesvszbf\" (UID: \"c863333a-b3eb-47c4-a968-587f5a131266\") " pod="openstack-kuttl-tests/failed-service-edpm-compute-no-nodes-edpm-compute-no-nodesvszbf" Jan 20 18:13:32 crc kubenswrapper[4558]: I0120 18:13:32.169527 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"failed-service-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c863333a-b3eb-47c4-a968-587f5a131266-failed-service-combined-ca-bundle\") pod \"failed-service-edpm-compute-no-nodes-edpm-compute-no-nodesvszbf\" (UID: \"c863333a-b3eb-47c4-a968-587f5a131266\") " pod="openstack-kuttl-tests/failed-service-edpm-compute-no-nodes-edpm-compute-no-nodesvszbf" Jan 20 18:13:32 crc kubenswrapper[4558]: I0120 18:13:32.169638 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c863333a-b3eb-47c4-a968-587f5a131266-inventory\") pod \"failed-service-edpm-compute-no-nodes-edpm-compute-no-nodesvszbf\" (UID: \"c863333a-b3eb-47c4-a968-587f5a131266\") " pod="openstack-kuttl-tests/failed-service-edpm-compute-no-nodes-edpm-compute-no-nodesvszbf" Jan 20 18:13:32 crc kubenswrapper[4558]: I0120 18:13:32.179115 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bbvtq\" (UniqueName: \"kubernetes.io/projected/c863333a-b3eb-47c4-a968-587f5a131266-kube-api-access-bbvtq\") pod \"failed-service-edpm-compute-no-nodes-edpm-compute-no-nodesvszbf\" (UID: \"c863333a-b3eb-47c4-a968-587f5a131266\") " pod="openstack-kuttl-tests/failed-service-edpm-compute-no-nodes-edpm-compute-no-nodesvszbf" Jan 20 18:13:32 crc kubenswrapper[4558]: I0120 18:13:32.345374 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/failed-service-edpm-compute-no-nodes-edpm-compute-no-nodesvszbf" Jan 20 18:13:32 crc kubenswrapper[4558]: I0120 18:13:32.734193 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/failed-service-edpm-compute-no-nodes-edpm-compute-no-nodesvszbf"] Jan 20 18:13:32 crc kubenswrapper[4558]: I0120 18:13:32.847758 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/failed-service-edpm-compute-no-nodes-edpm-compute-no-nodesvszbf" event={"ID":"c863333a-b3eb-47c4-a968-587f5a131266","Type":"ContainerStarted","Data":"527a1b5ad654092cf509e8e2e41c9d023c10d1beeb9df3b22b12ea32e741321c"} Jan 20 18:13:33 crc kubenswrapper[4558]: I0120 18:13:33.856135 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/failed-service-edpm-compute-no-nodes-edpm-compute-no-nodesvszbf" event={"ID":"c863333a-b3eb-47c4-a968-587f5a131266","Type":"ContainerStarted","Data":"46f1035e9e52378a66569151ff15c3b3b2d4046ac1c97fbe5a83a6310941040a"} Jan 20 18:13:33 crc kubenswrapper[4558]: I0120 18:13:33.872207 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/failed-service-edpm-compute-no-nodes-edpm-compute-no-nodesvszbf" podStartSLOduration=1.332968933 podStartE2EDuration="1.872189532s" podCreationTimestamp="2026-01-20 18:13:32 +0000 UTC" firstStartedPulling="2026-01-20 18:13:32.739238624 +0000 UTC m=+5506.499576591" lastFinishedPulling="2026-01-20 18:13:33.278459222 +0000 UTC m=+5507.038797190" observedRunningTime="2026-01-20 18:13:33.871589524 +0000 UTC m=+5507.631927491" watchObservedRunningTime="2026-01-20 18:13:33.872189532 +0000 UTC m=+5507.632527489" Jan 20 18:13:34 crc kubenswrapper[4558]: I0120 18:13:34.205091 4558 scope.go:117] "RemoveContainer" containerID="7ded989e29f2ec4208dd35a489afec217e575c855f2404f510c9c3689f89afa3" Jan 20 18:13:34 crc kubenswrapper[4558]: I0120 18:13:34.237318 4558 scope.go:117] "RemoveContainer" containerID="c53a5a80f62744434ae4cb90eb9c46ceac977fddfec911746dba64fed01b7d9c" Jan 20 18:13:34 crc kubenswrapper[4558]: I0120 18:13:34.275475 4558 scope.go:117] "RemoveContainer" containerID="fd4e6633d0b1d139f29d4fabd28e07904c377771ba3431b9957079f44f7cc234" Jan 20 18:13:34 crc kubenswrapper[4558]: I0120 18:13:34.309161 4558 scope.go:117] "RemoveContainer" containerID="bf4d3811eee2d6c2a76a04c8bc4886d5e4064262d451154bc849330373e28b8c" Jan 20 18:13:34 crc kubenswrapper[4558]: I0120 18:13:34.333014 4558 scope.go:117] "RemoveContainer" containerID="5143fa3eb96cd97462024f03bea00ab9e6ac1a72f72377f2c6a7b950245e7d13" Jan 20 18:13:34 crc kubenswrapper[4558]: I0120 18:13:34.355895 4558 scope.go:117] "RemoveContainer" containerID="fbbbd3e423e43fc9a81129aadd7c7dad474a4a0d21b4964b9d8b4a1efdd4f7b8" Jan 20 18:13:34 crc kubenswrapper[4558]: I0120 18:13:34.377907 4558 scope.go:117] "RemoveContainer" containerID="899816b732eb18cc306c370d7540b48a90ca9cb17541084636e709a22ad32b62" Jan 20 18:13:34 crc kubenswrapper[4558]: I0120 18:13:34.401875 4558 scope.go:117] "RemoveContainer" containerID="39db11afea3e6c7fc12ff8f00fef07a479fffc07bbfefb56685639a3dc257c1f" Jan 20 18:13:34 crc kubenswrapper[4558]: I0120 18:13:34.427790 4558 scope.go:117] "RemoveContainer" containerID="ba04ebadbcc7e6eaa5503d46e3109eb8b44ec6528c1aa862c2caa95105251353" Jan 20 18:13:34 crc kubenswrapper[4558]: I0120 18:13:34.450737 4558 scope.go:117] "RemoveContainer" containerID="2185d2eb9908585da26e7d011d3e7ff0d23cc38c3ae717c53700ba11888b4945" Jan 20 18:13:34 crc kubenswrapper[4558]: I0120 18:13:34.482468 4558 scope.go:117] "RemoveContainer" containerID="e38d8d3187d68b2c810f9407993637c90f2e49f2057cd2d9bd1041ee58f14219" Jan 20 18:13:35 crc kubenswrapper[4558]: I0120 18:13:35.877625 4558 generic.go:334] "Generic (PLEG): container finished" podID="c863333a-b3eb-47c4-a968-587f5a131266" containerID="46f1035e9e52378a66569151ff15c3b3b2d4046ac1c97fbe5a83a6310941040a" exitCode=2 Jan 20 18:13:35 crc kubenswrapper[4558]: I0120 18:13:35.877678 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/failed-service-edpm-compute-no-nodes-edpm-compute-no-nodesvszbf" event={"ID":"c863333a-b3eb-47c4-a968-587f5a131266","Type":"ContainerDied","Data":"46f1035e9e52378a66569151ff15c3b3b2d4046ac1c97fbe5a83a6310941040a"} Jan 20 18:13:37 crc kubenswrapper[4558]: I0120 18:13:37.125868 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/failed-service-edpm-compute-no-nodes-edpm-compute-no-nodesvszbf" Jan 20 18:13:37 crc kubenswrapper[4558]: I0120 18:13:37.240239 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"failed-service-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c863333a-b3eb-47c4-a968-587f5a131266-failed-service-combined-ca-bundle\") pod \"c863333a-b3eb-47c4-a968-587f5a131266\" (UID: \"c863333a-b3eb-47c4-a968-587f5a131266\") " Jan 20 18:13:37 crc kubenswrapper[4558]: I0120 18:13:37.240486 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/c863333a-b3eb-47c4-a968-587f5a131266-ssh-key-edpm-compute-no-nodes\") pod \"c863333a-b3eb-47c4-a968-587f5a131266\" (UID: \"c863333a-b3eb-47c4-a968-587f5a131266\") " Jan 20 18:13:37 crc kubenswrapper[4558]: I0120 18:13:37.240531 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c863333a-b3eb-47c4-a968-587f5a131266-inventory\") pod \"c863333a-b3eb-47c4-a968-587f5a131266\" (UID: \"c863333a-b3eb-47c4-a968-587f5a131266\") " Jan 20 18:13:37 crc kubenswrapper[4558]: I0120 18:13:37.240624 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bbvtq\" (UniqueName: \"kubernetes.io/projected/c863333a-b3eb-47c4-a968-587f5a131266-kube-api-access-bbvtq\") pod \"c863333a-b3eb-47c4-a968-587f5a131266\" (UID: \"c863333a-b3eb-47c4-a968-587f5a131266\") " Jan 20 18:13:37 crc kubenswrapper[4558]: I0120 18:13:37.247734 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c863333a-b3eb-47c4-a968-587f5a131266-kube-api-access-bbvtq" (OuterVolumeSpecName: "kube-api-access-bbvtq") pod "c863333a-b3eb-47c4-a968-587f5a131266" (UID: "c863333a-b3eb-47c4-a968-587f5a131266"). InnerVolumeSpecName "kube-api-access-bbvtq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:13:37 crc kubenswrapper[4558]: I0120 18:13:37.247849 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c863333a-b3eb-47c4-a968-587f5a131266-failed-service-combined-ca-bundle" (OuterVolumeSpecName: "failed-service-combined-ca-bundle") pod "c863333a-b3eb-47c4-a968-587f5a131266" (UID: "c863333a-b3eb-47c4-a968-587f5a131266"). InnerVolumeSpecName "failed-service-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:13:37 crc kubenswrapper[4558]: I0120 18:13:37.263126 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c863333a-b3eb-47c4-a968-587f5a131266-inventory" (OuterVolumeSpecName: "inventory") pod "c863333a-b3eb-47c4-a968-587f5a131266" (UID: "c863333a-b3eb-47c4-a968-587f5a131266"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:13:37 crc kubenswrapper[4558]: I0120 18:13:37.263575 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c863333a-b3eb-47c4-a968-587f5a131266-ssh-key-edpm-compute-no-nodes" (OuterVolumeSpecName: "ssh-key-edpm-compute-no-nodes") pod "c863333a-b3eb-47c4-a968-587f5a131266" (UID: "c863333a-b3eb-47c4-a968-587f5a131266"). InnerVolumeSpecName "ssh-key-edpm-compute-no-nodes". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:13:37 crc kubenswrapper[4558]: I0120 18:13:37.342399 4558 reconciler_common.go:293] "Volume detached for volume \"failed-service-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c863333a-b3eb-47c4-a968-587f5a131266-failed-service-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 18:13:37 crc kubenswrapper[4558]: I0120 18:13:37.342433 4558 reconciler_common.go:293] "Volume detached for volume \"ssh-key-edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/secret/c863333a-b3eb-47c4-a968-587f5a131266-ssh-key-edpm-compute-no-nodes\") on node \"crc\" DevicePath \"\"" Jan 20 18:13:37 crc kubenswrapper[4558]: I0120 18:13:37.342448 4558 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c863333a-b3eb-47c4-a968-587f5a131266-inventory\") on node \"crc\" DevicePath \"\"" Jan 20 18:13:37 crc kubenswrapper[4558]: I0120 18:13:37.342461 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bbvtq\" (UniqueName: \"kubernetes.io/projected/c863333a-b3eb-47c4-a968-587f5a131266-kube-api-access-bbvtq\") on node \"crc\" DevicePath \"\"" Jan 20 18:13:37 crc kubenswrapper[4558]: I0120 18:13:37.898765 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/failed-service-edpm-compute-no-nodes-edpm-compute-no-nodesvszbf" event={"ID":"c863333a-b3eb-47c4-a968-587f5a131266","Type":"ContainerDied","Data":"527a1b5ad654092cf509e8e2e41c9d023c10d1beeb9df3b22b12ea32e741321c"} Jan 20 18:13:37 crc kubenswrapper[4558]: I0120 18:13:37.898826 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/failed-service-edpm-compute-no-nodes-edpm-compute-no-nodesvszbf" Jan 20 18:13:37 crc kubenswrapper[4558]: I0120 18:13:37.898828 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="527a1b5ad654092cf509e8e2e41c9d023c10d1beeb9df3b22b12ea32e741321c" Jan 20 18:13:38 crc kubenswrapper[4558]: I0120 18:13:38.361194 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/failed-service-edpm-compute-no-nodes-edpm-compute-no-nodesbpnpf"] Jan 20 18:13:38 crc kubenswrapper[4558]: I0120 18:13:38.365244 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/failed-service-edpm-compute-no-nodes-edpm-compute-no-nodessnx52"] Jan 20 18:13:38 crc kubenswrapper[4558]: I0120 18:13:38.368708 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/failed-service-edpm-compute-no-nodes-edpm-compute-no-nodesvszbf"] Jan 20 18:13:38 crc kubenswrapper[4558]: I0120 18:13:38.376282 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/failed-service-edpm-compute-no-nodes-edpm-compute-no-nodesbpnpf"] Jan 20 18:13:38 crc kubenswrapper[4558]: I0120 18:13:38.379737 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/failed-service-edpm-compute-no-nodes-edpm-compute-no-nodespmzkz"] Jan 20 18:13:38 crc kubenswrapper[4558]: I0120 18:13:38.398361 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/failed-service-edpm-compute-no-nodes-edpm-compute-no-nodessnx52"] Jan 20 18:13:38 crc kubenswrapper[4558]: I0120 18:13:38.407518 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/failed-service-edpm-compute-no-nodes-edpm-compute-no-nodesvszbf"] Jan 20 18:13:38 crc kubenswrapper[4558]: I0120 18:13:38.413080 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/failed-service-edpm-compute-no-nodes-edpm-compute-no-nodespmzkz"] Jan 20 18:13:38 crc kubenswrapper[4558]: I0120 18:13:38.458477 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-68c5z"] Jan 20 18:13:38 crc kubenswrapper[4558]: E0120 18:13:38.458798 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c863333a-b3eb-47c4-a968-587f5a131266" containerName="failed-service-edpm-compute-no-nodes-edpm-compute-no-nodes" Jan 20 18:13:38 crc kubenswrapper[4558]: I0120 18:13:38.458815 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="c863333a-b3eb-47c4-a968-587f5a131266" containerName="failed-service-edpm-compute-no-nodes-edpm-compute-no-nodes" Jan 20 18:13:38 crc kubenswrapper[4558]: I0120 18:13:38.458963 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="c863333a-b3eb-47c4-a968-587f5a131266" containerName="failed-service-edpm-compute-no-nodes-edpm-compute-no-nodes" Jan 20 18:13:38 crc kubenswrapper[4558]: I0120 18:13:38.459757 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-68c5z" Jan 20 18:13:38 crc kubenswrapper[4558]: I0120 18:13:38.471801 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-68c5z"] Jan 20 18:13:38 crc kubenswrapper[4558]: I0120 18:13:38.576256 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0e3f3ff4-4e9f-4a89-9c0a-918a211bf2e6" path="/var/lib/kubelet/pods/0e3f3ff4-4e9f-4a89-9c0a-918a211bf2e6/volumes" Jan 20 18:13:38 crc kubenswrapper[4558]: I0120 18:13:38.576989 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="499f663d-2a7c-4896-98bb-1e2b7a7b6543" path="/var/lib/kubelet/pods/499f663d-2a7c-4896-98bb-1e2b7a7b6543/volumes" Jan 20 18:13:38 crc kubenswrapper[4558]: I0120 18:13:38.577560 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="91000729-c813-424c-8f04-1fac4466f6e3" path="/var/lib/kubelet/pods/91000729-c813-424c-8f04-1fac4466f6e3/volumes" Jan 20 18:13:38 crc kubenswrapper[4558]: I0120 18:13:38.578090 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c863333a-b3eb-47c4-a968-587f5a131266" path="/var/lib/kubelet/pods/c863333a-b3eb-47c4-a968-587f5a131266/volumes" Jan 20 18:13:38 crc kubenswrapper[4558]: I0120 18:13:38.658236 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r96cd\" (UniqueName: \"kubernetes.io/projected/87f866b8-e393-493f-8b10-4758c516f4d7-kube-api-access-r96cd\") pod \"dnsmasq-dnsmasq-84b9f45d47-68c5z\" (UID: \"87f866b8-e393-493f-8b10-4758c516f4d7\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-68c5z" Jan 20 18:13:38 crc kubenswrapper[4558]: I0120 18:13:38.658315 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/87f866b8-e393-493f-8b10-4758c516f4d7-config\") pod \"dnsmasq-dnsmasq-84b9f45d47-68c5z\" (UID: \"87f866b8-e393-493f-8b10-4758c516f4d7\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-68c5z" Jan 20 18:13:38 crc kubenswrapper[4558]: I0120 18:13:38.658355 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/87f866b8-e393-493f-8b10-4758c516f4d7-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-84b9f45d47-68c5z\" (UID: \"87f866b8-e393-493f-8b10-4758c516f4d7\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-68c5z" Jan 20 18:13:38 crc kubenswrapper[4558]: I0120 18:13:38.759425 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/87f866b8-e393-493f-8b10-4758c516f4d7-config\") pod \"dnsmasq-dnsmasq-84b9f45d47-68c5z\" (UID: \"87f866b8-e393-493f-8b10-4758c516f4d7\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-68c5z" Jan 20 18:13:38 crc kubenswrapper[4558]: I0120 18:13:38.759494 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/87f866b8-e393-493f-8b10-4758c516f4d7-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-84b9f45d47-68c5z\" (UID: \"87f866b8-e393-493f-8b10-4758c516f4d7\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-68c5z" Jan 20 18:13:38 crc kubenswrapper[4558]: I0120 18:13:38.759598 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r96cd\" (UniqueName: \"kubernetes.io/projected/87f866b8-e393-493f-8b10-4758c516f4d7-kube-api-access-r96cd\") pod \"dnsmasq-dnsmasq-84b9f45d47-68c5z\" (UID: \"87f866b8-e393-493f-8b10-4758c516f4d7\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-68c5z" Jan 20 18:13:38 crc kubenswrapper[4558]: E0120 18:13:38.759763 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/edpm-compute-no-nodes: configmap "edpm-compute-no-nodes" not found Jan 20 18:13:38 crc kubenswrapper[4558]: E0120 18:13:38.759834 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/94d7f76b-8cf8-4bf5-9844-526d8b79fffc-edpm-compute-no-nodes podName:94d7f76b-8cf8-4bf5-9844-526d8b79fffc nodeName:}" failed. No retries permitted until 2026-01-20 18:13:39.259816575 +0000 UTC m=+5513.020154553 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "edpm-compute-no-nodes" (UniqueName: "kubernetes.io/configmap/94d7f76b-8cf8-4bf5-9844-526d8b79fffc-edpm-compute-no-nodes") pod "dnsmasq-dnsmasq-64864b6d57-7zmjn" (UID: "94d7f76b-8cf8-4bf5-9844-526d8b79fffc") : configmap "edpm-compute-no-nodes" not found Jan 20 18:13:38 crc kubenswrapper[4558]: I0120 18:13:38.760394 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/87f866b8-e393-493f-8b10-4758c516f4d7-config\") pod \"dnsmasq-dnsmasq-84b9f45d47-68c5z\" (UID: \"87f866b8-e393-493f-8b10-4758c516f4d7\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-68c5z" Jan 20 18:13:38 crc kubenswrapper[4558]: I0120 18:13:38.760700 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/87f866b8-e393-493f-8b10-4758c516f4d7-dnsmasq-svc\") pod \"dnsmasq-dnsmasq-84b9f45d47-68c5z\" (UID: \"87f866b8-e393-493f-8b10-4758c516f4d7\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-68c5z" Jan 20 18:13:38 crc kubenswrapper[4558]: I0120 18:13:38.774956 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r96cd\" (UniqueName: \"kubernetes.io/projected/87f866b8-e393-493f-8b10-4758c516f4d7-kube-api-access-r96cd\") pod \"dnsmasq-dnsmasq-84b9f45d47-68c5z\" (UID: \"87f866b8-e393-493f-8b10-4758c516f4d7\") " pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-68c5z" Jan 20 18:13:38 crc kubenswrapper[4558]: I0120 18:13:38.785702 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-68c5z" Jan 20 18:13:39 crc kubenswrapper[4558]: I0120 18:13:39.167766 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-68c5z"] Jan 20 18:13:39 crc kubenswrapper[4558]: E0120 18:13:39.267508 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/edpm-compute-no-nodes: configmap "edpm-compute-no-nodes" not found Jan 20 18:13:39 crc kubenswrapper[4558]: E0120 18:13:39.267712 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/94d7f76b-8cf8-4bf5-9844-526d8b79fffc-edpm-compute-no-nodes podName:94d7f76b-8cf8-4bf5-9844-526d8b79fffc nodeName:}" failed. No retries permitted until 2026-01-20 18:13:40.267694963 +0000 UTC m=+5514.028032930 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "edpm-compute-no-nodes" (UniqueName: "kubernetes.io/configmap/94d7f76b-8cf8-4bf5-9844-526d8b79fffc-edpm-compute-no-nodes") pod "dnsmasq-dnsmasq-64864b6d57-7zmjn" (UID: "94d7f76b-8cf8-4bf5-9844-526d8b79fffc") : configmap "edpm-compute-no-nodes" not found Jan 20 18:13:39 crc kubenswrapper[4558]: I0120 18:13:39.919126 4558 generic.go:334] "Generic (PLEG): container finished" podID="87f866b8-e393-493f-8b10-4758c516f4d7" containerID="a83b0a78254a79524a41d898416107972cb91955cdd99e1f27c16b67c1e99007" exitCode=0 Jan 20 18:13:39 crc kubenswrapper[4558]: I0120 18:13:39.919234 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-68c5z" event={"ID":"87f866b8-e393-493f-8b10-4758c516f4d7","Type":"ContainerDied","Data":"a83b0a78254a79524a41d898416107972cb91955cdd99e1f27c16b67c1e99007"} Jan 20 18:13:39 crc kubenswrapper[4558]: I0120 18:13:39.919303 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-68c5z" event={"ID":"87f866b8-e393-493f-8b10-4758c516f4d7","Type":"ContainerStarted","Data":"45561c79a86c1ba9bab9713acc86522edb589136c662fa96d5f99acc6fc12d37"} Jan 20 18:13:40 crc kubenswrapper[4558]: E0120 18:13:40.285625 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/edpm-compute-no-nodes: configmap "edpm-compute-no-nodes" not found Jan 20 18:13:40 crc kubenswrapper[4558]: E0120 18:13:40.286132 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/94d7f76b-8cf8-4bf5-9844-526d8b79fffc-edpm-compute-no-nodes podName:94d7f76b-8cf8-4bf5-9844-526d8b79fffc nodeName:}" failed. No retries permitted until 2026-01-20 18:13:42.286095118 +0000 UTC m=+5516.046433086 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "edpm-compute-no-nodes" (UniqueName: "kubernetes.io/configmap/94d7f76b-8cf8-4bf5-9844-526d8b79fffc-edpm-compute-no-nodes") pod "dnsmasq-dnsmasq-64864b6d57-7zmjn" (UID: "94d7f76b-8cf8-4bf5-9844-526d8b79fffc") : configmap "edpm-compute-no-nodes" not found Jan 20 18:13:40 crc kubenswrapper[4558]: I0120 18:13:40.565820 4558 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="openstack-operators/manila-operator-controller-manager-864f6b75bf-4jj8h" secret="" err="secret \"manila-operator-controller-manager-dockercfg-skvck\" not found" Jan 20 18:13:40 crc kubenswrapper[4558]: I0120 18:13:40.928746 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-68c5z" event={"ID":"87f866b8-e393-493f-8b10-4758c516f4d7","Type":"ContainerStarted","Data":"55f4abcf0e9e92d451637382ec3a5f925ec9795d226b0135c159b42ff8005c6e"} Jan 20 18:13:40 crc kubenswrapper[4558]: I0120 18:13:40.928910 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-68c5z" Jan 20 18:13:40 crc kubenswrapper[4558]: I0120 18:13:40.967409 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-68c5z" podStartSLOduration=2.967380464 podStartE2EDuration="2.967380464s" podCreationTimestamp="2026-01-20 18:13:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 18:13:40.962116176 +0000 UTC m=+5514.722454143" watchObservedRunningTime="2026-01-20 18:13:40.967380464 +0000 UTC m=+5514.727718431" Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.076142 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/swift-operator-controller-manager-85dd56d4cc-cvlzf"] Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.076395 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/swift-operator-controller-manager-85dd56d4cc-cvlzf" podUID="56c68b3c-13ac-4e77-a8a5-bd99d83d5667" containerName="manager" containerID="cri-o://51ecd2e724af29b0f6a51dc85e4ad8a754870e77bf87c5ae416f003c008d7caf" gracePeriod=10 Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.085642 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/designate-operator-controller-manager-9f958b845-q8wlk"] Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.085846 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/designate-operator-controller-manager-9f958b845-q8wlk" podUID="54c6bd49-d022-40c6-b547-58d3eb5ba7e2" containerName="manager" containerID="cri-o://ecb3d0f3bf1d477d3fea81ea29ae6dbfb39eff4f2f6120f21ae3ef0a4efba324" gracePeriod=10 Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.095452 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-7fc9b76cf6-9ld4x"] Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.095623 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/octavia-operator-controller-manager-7fc9b76cf6-9ld4x" podUID="aec45707-2c02-43bb-b7d9-a24b906cadca" containerName="manager" containerID="cri-o://9b5a1aa8966e4b09819fa1a3e8844f0277e5fd276937d6ef5896353e424ac13b" gracePeriod=10 Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.105678 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/nova-operator-controller-manager-65849867d6-js8vs"] Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.105873 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/nova-operator-controller-manager-65849867d6-js8vs" podUID="42eb153b-1142-4dc6-9e34-b4cc38e49701" containerName="manager" containerID="cri-o://ad003af07a804bd7051853e53266eefbbf5253d38becc3b1787e7568e72d2f2c" gracePeriod=10 Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.112398 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/glance-operator-controller-manager-c6994669c-78pts"] Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.113606 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/glance-operator-controller-manager-c6994669c-78pts" podUID="e645ac52-b990-4b3f-a282-59ce60cacaff" containerName="manager" containerID="cri-o://d569da36da81b9b9b27be2b8f5c9a33c02392ee1c64ee171850ebd6b90d34bc4" gracePeriod=10 Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.127954 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-64cd966744-925vf"] Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.128182 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/watcher-operator-controller-manager-64cd966744-925vf" podUID="8dea3161-8db9-4778-b59c-066f876ff6e2" containerName="manager" containerID="cri-o://e6cde12ec6613c6ca86331ad5562eff59e938f8143553a59aa1e652328de6c2b" gracePeriod=10 Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.135234 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/placement-operator-controller-manager-686df47fcb-tsdgq"] Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.135468 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/placement-operator-controller-manager-686df47fcb-tsdgq" podUID="0292902b-f0ca-4abc-b220-4e3268243db5" containerName="manager" containerID="cri-o://63231d660c87582107f72247293a009f16107c11a93e6a345163da171fa24b3e" gracePeriod=10 Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.148200 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-qwwmt"] Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.148556 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-qwwmt" podUID="af89563a-d3e9-4424-8fde-a0a77b21b7e8" containerName="operator" containerID="cri-o://5a0b4e0b6774ce77a765f98980c1f9ce2e4138aebc6117795de6f7f8346a3737" gracePeriod=10 Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.151589 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-cb4666565-6689s"] Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.151838 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/neutron-operator-controller-manager-cb4666565-6689s" podUID="f1a6abd3-1c13-40ca-8e20-344fd40bc348" containerName="manager" containerID="cri-o://0e2eb3da46b7bd583c44ebc8b13f7d1c4b0aa944896d7eec0b5487a1c1e21756" gracePeriod=10 Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.159260 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/test-operator-controller-manager-7cd8bc9dbb-9hmt6"] Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.159373 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/test-operator-controller-manager-7cd8bc9dbb-9hmt6" podUID="fafbffd7-66c2-42ab-97c2-034a1fd6fd7c" containerName="manager" containerID="cri-o://2ff3e4d2ad64773a61f739b0fa6f4ece97c0c40fd69baf1df699ce574d95ed13" gracePeriod=10 Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.166348 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/infra-operator-controller-manager-77c48c7859-4f7km"] Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.166496 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/infra-operator-controller-manager-77c48c7859-4f7km" podUID="2e9e43c0-674a-4769-b44b-7eb6ee1a69d5" containerName="manager" containerID="cri-o://5fea48a489b7236322aa035fd26c8bc64999234427695faf363f012948d76b1b" gracePeriod=10 Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.173223 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7ddb5c749-jrgz7"] Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.173404 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/barbican-operator-controller-manager-7ddb5c749-jrgz7" podUID="1c12cb14-b8f1-4939-84d9-6a6b851f095a" containerName="manager" containerID="cri-o://bd95c9802be7aa3b09b45d31c56df20489fe579fe93a21fade47b46985baa4cc" gracePeriod=10 Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.186761 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-767fdc4f47-8mw4r"] Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.187275 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/keystone-operator-controller-manager-767fdc4f47-8mw4r" podUID="115bb848-a715-4eac-8993-63842460dd3d" containerName="manager" containerID="cri-o://f1506f0ee5ae0e37dc7c3f1bab9db3a2151ee378ef978f06310c406e3b62a13b" gracePeriod=10 Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.193080 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-75bfd788c8-wflcw"] Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.193213 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/openstack-operator-controller-manager-75bfd788c8-wflcw" podUID="deea5510-f570-48ac-b23f-bba261ef3951" containerName="manager" containerID="cri-o://64bcb4cba85895bee43f773af00ef7e07c4d7bd80d07b323ae48eeb3ec59dd5a" gracePeriod=10 Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.197335 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/heat-operator-controller-manager-594c8c9d5d-k28w4"] Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.197468 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/heat-operator-controller-manager-594c8c9d5d-k28w4" podUID="88bca883-e84c-4223-80de-4c389d44f9fe" containerName="manager" containerID="cri-o://3b80173f89c66eaddb2369263d220ac89d3df4fc48524e79192762bb9e4aed17" gracePeriod=10 Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.201069 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-78757b4889-t99dx"] Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.201303 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/ironic-operator-controller-manager-78757b4889-t99dx" podUID="e7054184-22f8-43b2-b75f-0534d5bb467f" containerName="manager" containerID="cri-o://50d9b3907efdf86bfbc435a6c7984f8ffc06ca5383c5f947cc9ccf10c522de7b" gracePeriod=10 Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.204875 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-55db956ddc-6j4g5"] Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.209002 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/ovn-operator-controller-manager-55db956ddc-6j4g5" podUID="1998e4dc-a1d4-405e-b0dd-546e1d5fed6f" containerName="manager" containerID="cri-o://96b14ccca580d6f73044405aa0fb79aa275da3630e3f85445651826254e89bab" gracePeriod=10 Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.217945 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-5f8f495fcf-ldhxq"] Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.218069 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/telemetry-operator-controller-manager-5f8f495fcf-ldhxq" podUID="da27b90a-7f65-4d58-92f1-d46b0d92fd79" containerName="manager" containerID="cri-o://c4f4ac4e6be0e8cf007dc4d69369b47f2f03fdf263af1c11172de431224ca4f9" gracePeriod=10 Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.254860 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-5b9875986dl6bbk"] Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.255311 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/openstack-baremetal-operator-controller-manager-5b9875986dl6bbk" podUID="cbbd3f62-64b6-4cc2-8c3b-b21d317c0624" containerName="manager" containerID="cri-o://14786d8dc9d059a3c99ac1f27b72a5111fcedcf4c96caa245bb691b274f41a26" gracePeriod=10 Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.279257 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-77d5c5b54f-gvd5f"] Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.279525 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/horizon-operator-controller-manager-77d5c5b54f-gvd5f" podUID="31cd4c27-d472-4406-886a-7222315c465d" containerName="manager" containerID="cri-o://5ef1d53f579e55a1d5029b72751834ff89a20ca0419624b102fe6231908e54b1" gracePeriod=10 Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.301453 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-c87fff755-plg7f"] Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.301717 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/mariadb-operator-controller-manager-c87fff755-plg7f" podUID="dba009f4-faa1-4d6b-bc19-c00eedd6c7c7" containerName="manager" containerID="cri-o://b21d03a1bce545072a0f5d40d609ff0a56267d13add877890bddf1f91228ba7f" gracePeriod=10 Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.333303 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-9b68f5989-mdfnr"] Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.333624 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/cinder-operator-controller-manager-9b68f5989-mdfnr" podUID="973549d4-6a26-4166-8be0-b0dfb7c5aec6" containerName="manager" containerID="cri-o://6aa6ab1b79913f16a8c37d31d4f54307176b8cfa305a050b83890240a717629c" gracePeriod=10 Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.385061 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/manila-operator-controller-manager-864f6b75bf-4jj8h"] Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.385361 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/manila-operator-controller-manager-864f6b75bf-4jj8h" podUID="6e337f3c-6900-4314-adbb-aec361ccb7cc" containerName="manager" containerID="cri-o://45317194cb1488747739e7ccc74659e1093db6bf0596f5b55836038289216605" gracePeriod=10 Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.398342 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-pt7m4"] Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.398577 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/openstack-operator-index-pt7m4" podUID="666aae5d-ee4c-47d1-8cf7-0aa2cb8c813d" containerName="registry-server" containerID="cri-o://0fc1bcd802de9cc79769c26cb5ab84e85afa3e5dc41c20e72994c498aca1b4c0" gracePeriod=30 Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.404428 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zd9qc"] Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.404568 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zd9qc" podUID="04ab3f66-5485-4b6f-9293-ed76b2695d50" containerName="catalog-operator" containerID="cri-o://5458e46e56f7d65eab79c3c48391c11132277e97ba84585b2d13259aa9368eb2" gracePeriod=30 Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.413454 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-controller-init-6d4d7d8545-4v6kg"] Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.413762 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/7f8269a825e737cb1f2e67fcbeccb826d8bfc6ea337cf3db10b8143e2end5bq"] Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.413847 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/openstack-operator-controller-init-6d4d7d8545-4v6kg" podUID="a491bcd2-847b-41eb-addd-54e625a9e6e1" containerName="operator" containerID="cri-o://2ef63481a765808418e8fe2316f84a64976e170f6b42473a8954a04e0fa1bbe7" gracePeriod=10 Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.417474 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/7f8269a825e737cb1f2e67fcbeccb826d8bfc6ea337cf3db10b8143e2end5bq"] Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.434965 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-ljsnj"] Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.436050 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-ljsnj" Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.446649 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-ljsnj"] Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.530796 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-csfkt\" (UniqueName: \"kubernetes.io/projected/e10e41c7-a461-4506-9e95-51007725cc98-kube-api-access-csfkt\") pod \"catalog-operator-68c6474976-ljsnj\" (UID: \"e10e41c7-a461-4506-9e95-51007725cc98\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-ljsnj" Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.530985 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/e10e41c7-a461-4506-9e95-51007725cc98-srv-cert\") pod \"catalog-operator-68c6474976-ljsnj\" (UID: \"e10e41c7-a461-4506-9e95-51007725cc98\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-ljsnj" Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.531015 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/e10e41c7-a461-4506-9e95-51007725cc98-profile-collector-cert\") pod \"catalog-operator-68c6474976-ljsnj\" (UID: \"e10e41c7-a461-4506-9e95-51007725cc98\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-ljsnj" Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.639570 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-csfkt\" (UniqueName: \"kubernetes.io/projected/e10e41c7-a461-4506-9e95-51007725cc98-kube-api-access-csfkt\") pod \"catalog-operator-68c6474976-ljsnj\" (UID: \"e10e41c7-a461-4506-9e95-51007725cc98\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-ljsnj" Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.639675 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/e10e41c7-a461-4506-9e95-51007725cc98-srv-cert\") pod \"catalog-operator-68c6474976-ljsnj\" (UID: \"e10e41c7-a461-4506-9e95-51007725cc98\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-ljsnj" Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.639701 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/e10e41c7-a461-4506-9e95-51007725cc98-profile-collector-cert\") pod \"catalog-operator-68c6474976-ljsnj\" (UID: \"e10e41c7-a461-4506-9e95-51007725cc98\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-ljsnj" Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.646974 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/e10e41c7-a461-4506-9e95-51007725cc98-srv-cert\") pod \"catalog-operator-68c6474976-ljsnj\" (UID: \"e10e41c7-a461-4506-9e95-51007725cc98\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-ljsnj" Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.646976 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/e10e41c7-a461-4506-9e95-51007725cc98-profile-collector-cert\") pod \"catalog-operator-68c6474976-ljsnj\" (UID: \"e10e41c7-a461-4506-9e95-51007725cc98\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-ljsnj" Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.655363 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-csfkt\" (UniqueName: \"kubernetes.io/projected/e10e41c7-a461-4506-9e95-51007725cc98-kube-api-access-csfkt\") pod \"catalog-operator-68c6474976-ljsnj\" (UID: \"e10e41c7-a461-4506-9e95-51007725cc98\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-ljsnj" Jan 20 18:13:41 crc kubenswrapper[4558]: E0120 18:13:41.680746 4558 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod04ab3f66_5485_4b6f_9293_ed76b2695d50.slice/crio-5458e46e56f7d65eab79c3c48391c11132277e97ba84585b2d13259aa9368eb2.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda491bcd2_847b_41eb_addd_54e625a9e6e1.slice/crio-conmon-2ef63481a765808418e8fe2316f84a64976e170f6b42473a8954a04e0fa1bbe7.scope\": RecentStats: unable to find data in memory cache]" Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.942926 4558 generic.go:334] "Generic (PLEG): container finished" podID="666aae5d-ee4c-47d1-8cf7-0aa2cb8c813d" containerID="0fc1bcd802de9cc79769c26cb5ab84e85afa3e5dc41c20e72994c498aca1b4c0" exitCode=0 Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.943034 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-pt7m4" event={"ID":"666aae5d-ee4c-47d1-8cf7-0aa2cb8c813d","Type":"ContainerDied","Data":"0fc1bcd802de9cc79769c26cb5ab84e85afa3e5dc41c20e72994c498aca1b4c0"} Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.945328 4558 generic.go:334] "Generic (PLEG): container finished" podID="1c12cb14-b8f1-4939-84d9-6a6b851f095a" containerID="bd95c9802be7aa3b09b45d31c56df20489fe579fe93a21fade47b46985baa4cc" exitCode=0 Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.945400 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7ddb5c749-jrgz7" event={"ID":"1c12cb14-b8f1-4939-84d9-6a6b851f095a","Type":"ContainerDied","Data":"bd95c9802be7aa3b09b45d31c56df20489fe579fe93a21fade47b46985baa4cc"} Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.945440 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7ddb5c749-jrgz7" event={"ID":"1c12cb14-b8f1-4939-84d9-6a6b851f095a","Type":"ContainerDied","Data":"67d4063bebd7d0fadaacf160cdd38e5b88088d0299d04b5f060250c8e827bf92"} Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.945457 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="67d4063bebd7d0fadaacf160cdd38e5b88088d0299d04b5f060250c8e827bf92" Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.946996 4558 generic.go:334] "Generic (PLEG): container finished" podID="cbbd3f62-64b6-4cc2-8c3b-b21d317c0624" containerID="14786d8dc9d059a3c99ac1f27b72a5111fcedcf4c96caa245bb691b274f41a26" exitCode=0 Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.947069 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-5b9875986dl6bbk" event={"ID":"cbbd3f62-64b6-4cc2-8c3b-b21d317c0624","Type":"ContainerDied","Data":"14786d8dc9d059a3c99ac1f27b72a5111fcedcf4c96caa245bb691b274f41a26"} Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.948797 4558 generic.go:334] "Generic (PLEG): container finished" podID="8dea3161-8db9-4778-b59c-066f876ff6e2" containerID="e6cde12ec6613c6ca86331ad5562eff59e938f8143553a59aa1e652328de6c2b" exitCode=0 Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.948869 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-64cd966744-925vf" event={"ID":"8dea3161-8db9-4778-b59c-066f876ff6e2","Type":"ContainerDied","Data":"e6cde12ec6613c6ca86331ad5562eff59e938f8143553a59aa1e652328de6c2b"} Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.948905 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-64cd966744-925vf" event={"ID":"8dea3161-8db9-4778-b59c-066f876ff6e2","Type":"ContainerDied","Data":"39933de3fbfb847d356269e4650840d03a83b8f8833b334f119f985f25844f20"} Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.948929 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="39933de3fbfb847d356269e4650840d03a83b8f8833b334f119f985f25844f20" Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.950133 4558 generic.go:334] "Generic (PLEG): container finished" podID="dba009f4-faa1-4d6b-bc19-c00eedd6c7c7" containerID="b21d03a1bce545072a0f5d40d609ff0a56267d13add877890bddf1f91228ba7f" exitCode=0 Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.950191 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-c87fff755-plg7f" event={"ID":"dba009f4-faa1-4d6b-bc19-c00eedd6c7c7","Type":"ContainerDied","Data":"b21d03a1bce545072a0f5d40d609ff0a56267d13add877890bddf1f91228ba7f"} Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.951329 4558 generic.go:334] "Generic (PLEG): container finished" podID="88bca883-e84c-4223-80de-4c389d44f9fe" containerID="3b80173f89c66eaddb2369263d220ac89d3df4fc48524e79192762bb9e4aed17" exitCode=0 Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.951369 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-594c8c9d5d-k28w4" event={"ID":"88bca883-e84c-4223-80de-4c389d44f9fe","Type":"ContainerDied","Data":"3b80173f89c66eaddb2369263d220ac89d3df4fc48524e79192762bb9e4aed17"} Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.953475 4558 generic.go:334] "Generic (PLEG): container finished" podID="04ab3f66-5485-4b6f-9293-ed76b2695d50" containerID="5458e46e56f7d65eab79c3c48391c11132277e97ba84585b2d13259aa9368eb2" exitCode=0 Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.953550 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zd9qc" event={"ID":"04ab3f66-5485-4b6f-9293-ed76b2695d50","Type":"ContainerDied","Data":"5458e46e56f7d65eab79c3c48391c11132277e97ba84585b2d13259aa9368eb2"} Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.955366 4558 generic.go:334] "Generic (PLEG): container finished" podID="31cd4c27-d472-4406-886a-7222315c465d" containerID="5ef1d53f579e55a1d5029b72751834ff89a20ca0419624b102fe6231908e54b1" exitCode=0 Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.955463 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-77d5c5b54f-gvd5f" event={"ID":"31cd4c27-d472-4406-886a-7222315c465d","Type":"ContainerDied","Data":"5ef1d53f579e55a1d5029b72751834ff89a20ca0419624b102fe6231908e54b1"} Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.956832 4558 generic.go:334] "Generic (PLEG): container finished" podID="deea5510-f570-48ac-b23f-bba261ef3951" containerID="64bcb4cba85895bee43f773af00ef7e07c4d7bd80d07b323ae48eeb3ec59dd5a" exitCode=0 Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.956928 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-75bfd788c8-wflcw" event={"ID":"deea5510-f570-48ac-b23f-bba261ef3951","Type":"ContainerDied","Data":"64bcb4cba85895bee43f773af00ef7e07c4d7bd80d07b323ae48eeb3ec59dd5a"} Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.956961 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-75bfd788c8-wflcw" event={"ID":"deea5510-f570-48ac-b23f-bba261ef3951","Type":"ContainerDied","Data":"77f052a321b1fe1b30248d395ed5ac53d1857594bddd143973d267c4b4bcefc9"} Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.956974 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="77f052a321b1fe1b30248d395ed5ac53d1857594bddd143973d267c4b4bcefc9" Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.958682 4558 generic.go:334] "Generic (PLEG): container finished" podID="aec45707-2c02-43bb-b7d9-a24b906cadca" containerID="9b5a1aa8966e4b09819fa1a3e8844f0277e5fd276937d6ef5896353e424ac13b" exitCode=0 Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.958776 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-7fc9b76cf6-9ld4x" event={"ID":"aec45707-2c02-43bb-b7d9-a24b906cadca","Type":"ContainerDied","Data":"9b5a1aa8966e4b09819fa1a3e8844f0277e5fd276937d6ef5896353e424ac13b"} Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.958813 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-7fc9b76cf6-9ld4x" event={"ID":"aec45707-2c02-43bb-b7d9-a24b906cadca","Type":"ContainerDied","Data":"a78147227d827b6853f6a956387bcca6c3b48e18eec67855bdb5107d778025ef"} Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.958830 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a78147227d827b6853f6a956387bcca6c3b48e18eec67855bdb5107d778025ef" Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.960059 4558 generic.go:334] "Generic (PLEG): container finished" podID="54c6bd49-d022-40c6-b547-58d3eb5ba7e2" containerID="ecb3d0f3bf1d477d3fea81ea29ae6dbfb39eff4f2f6120f21ae3ef0a4efba324" exitCode=0 Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.960137 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-9f958b845-q8wlk" event={"ID":"54c6bd49-d022-40c6-b547-58d3eb5ba7e2","Type":"ContainerDied","Data":"ecb3d0f3bf1d477d3fea81ea29ae6dbfb39eff4f2f6120f21ae3ef0a4efba324"} Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.960185 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-9f958b845-q8wlk" event={"ID":"54c6bd49-d022-40c6-b547-58d3eb5ba7e2","Type":"ContainerDied","Data":"1d2622698d065c909f6269ea676cdd70f3bf297ed4f93892642244f06a392f55"} Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.960204 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1d2622698d065c909f6269ea676cdd70f3bf297ed4f93892642244f06a392f55" Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.961286 4558 generic.go:334] "Generic (PLEG): container finished" podID="973549d4-6a26-4166-8be0-b0dfb7c5aec6" containerID="6aa6ab1b79913f16a8c37d31d4f54307176b8cfa305a050b83890240a717629c" exitCode=0 Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.961358 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-9b68f5989-mdfnr" event={"ID":"973549d4-6a26-4166-8be0-b0dfb7c5aec6","Type":"ContainerDied","Data":"6aa6ab1b79913f16a8c37d31d4f54307176b8cfa305a050b83890240a717629c"} Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.962631 4558 generic.go:334] "Generic (PLEG): container finished" podID="a491bcd2-847b-41eb-addd-54e625a9e6e1" containerID="2ef63481a765808418e8fe2316f84a64976e170f6b42473a8954a04e0fa1bbe7" exitCode=0 Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.962702 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-init-6d4d7d8545-4v6kg" event={"ID":"a491bcd2-847b-41eb-addd-54e625a9e6e1","Type":"ContainerDied","Data":"2ef63481a765808418e8fe2316f84a64976e170f6b42473a8954a04e0fa1bbe7"} Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.966517 4558 generic.go:334] "Generic (PLEG): container finished" podID="e7054184-22f8-43b2-b75f-0534d5bb467f" containerID="50d9b3907efdf86bfbc435a6c7984f8ffc06ca5383c5f947cc9ccf10c522de7b" exitCode=0 Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.966607 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-78757b4889-t99dx" event={"ID":"e7054184-22f8-43b2-b75f-0534d5bb467f","Type":"ContainerDied","Data":"50d9b3907efdf86bfbc435a6c7984f8ffc06ca5383c5f947cc9ccf10c522de7b"} Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.967793 4558 generic.go:334] "Generic (PLEG): container finished" podID="115bb848-a715-4eac-8993-63842460dd3d" containerID="f1506f0ee5ae0e37dc7c3f1bab9db3a2151ee378ef978f06310c406e3b62a13b" exitCode=0 Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.967897 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-767fdc4f47-8mw4r" event={"ID":"115bb848-a715-4eac-8993-63842460dd3d","Type":"ContainerDied","Data":"f1506f0ee5ae0e37dc7c3f1bab9db3a2151ee378ef978f06310c406e3b62a13b"} Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.971829 4558 generic.go:334] "Generic (PLEG): container finished" podID="fafbffd7-66c2-42ab-97c2-034a1fd6fd7c" containerID="2ff3e4d2ad64773a61f739b0fa6f4ece97c0c40fd69baf1df699ce574d95ed13" exitCode=0 Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.971869 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-7cd8bc9dbb-9hmt6" event={"ID":"fafbffd7-66c2-42ab-97c2-034a1fd6fd7c","Type":"ContainerDied","Data":"2ff3e4d2ad64773a61f739b0fa6f4ece97c0c40fd69baf1df699ce574d95ed13"} Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.972889 4558 generic.go:334] "Generic (PLEG): container finished" podID="1998e4dc-a1d4-405e-b0dd-546e1d5fed6f" containerID="96b14ccca580d6f73044405aa0fb79aa275da3630e3f85445651826254e89bab" exitCode=0 Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.972939 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-55db956ddc-6j4g5" event={"ID":"1998e4dc-a1d4-405e-b0dd-546e1d5fed6f","Type":"ContainerDied","Data":"96b14ccca580d6f73044405aa0fb79aa275da3630e3f85445651826254e89bab"} Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.973868 4558 generic.go:334] "Generic (PLEG): container finished" podID="af89563a-d3e9-4424-8fde-a0a77b21b7e8" containerID="5a0b4e0b6774ce77a765f98980c1f9ce2e4138aebc6117795de6f7f8346a3737" exitCode=0 Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.973907 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-qwwmt" event={"ID":"af89563a-d3e9-4424-8fde-a0a77b21b7e8","Type":"ContainerDied","Data":"5a0b4e0b6774ce77a765f98980c1f9ce2e4138aebc6117795de6f7f8346a3737"} Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.973939 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-qwwmt" event={"ID":"af89563a-d3e9-4424-8fde-a0a77b21b7e8","Type":"ContainerDied","Data":"95e192423190c4b02745003348fc10127faf0848aab43a357cad7726917e1dae"} Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.973952 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="95e192423190c4b02745003348fc10127faf0848aab43a357cad7726917e1dae" Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.974877 4558 generic.go:334] "Generic (PLEG): container finished" podID="da27b90a-7f65-4d58-92f1-d46b0d92fd79" containerID="c4f4ac4e6be0e8cf007dc4d69369b47f2f03fdf263af1c11172de431224ca4f9" exitCode=0 Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.974937 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-5f8f495fcf-ldhxq" event={"ID":"da27b90a-7f65-4d58-92f1-d46b0d92fd79","Type":"ContainerDied","Data":"c4f4ac4e6be0e8cf007dc4d69369b47f2f03fdf263af1c11172de431224ca4f9"} Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.984331 4558 generic.go:334] "Generic (PLEG): container finished" podID="42eb153b-1142-4dc6-9e34-b4cc38e49701" containerID="ad003af07a804bd7051853e53266eefbbf5253d38becc3b1787e7568e72d2f2c" exitCode=0 Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.984565 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-65849867d6-js8vs" event={"ID":"42eb153b-1142-4dc6-9e34-b4cc38e49701","Type":"ContainerDied","Data":"ad003af07a804bd7051853e53266eefbbf5253d38becc3b1787e7568e72d2f2c"} Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.984664 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-65849867d6-js8vs" event={"ID":"42eb153b-1142-4dc6-9e34-b4cc38e49701","Type":"ContainerDied","Data":"0355fea3cd6f04ab4c1fb797c9af673336ce1d3cbdc25889bdc0212ac8bd9e76"} Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.984814 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0355fea3cd6f04ab4c1fb797c9af673336ce1d3cbdc25889bdc0212ac8bd9e76" Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.989471 4558 generic.go:334] "Generic (PLEG): container finished" podID="0292902b-f0ca-4abc-b220-4e3268243db5" containerID="63231d660c87582107f72247293a009f16107c11a93e6a345163da171fa24b3e" exitCode=0 Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.989554 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-686df47fcb-tsdgq" event={"ID":"0292902b-f0ca-4abc-b220-4e3268243db5","Type":"ContainerDied","Data":"63231d660c87582107f72247293a009f16107c11a93e6a345163da171fa24b3e"} Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.989620 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-686df47fcb-tsdgq" event={"ID":"0292902b-f0ca-4abc-b220-4e3268243db5","Type":"ContainerDied","Data":"0bd39ffb8d125a629ba0fa7a8715c8589b3898686eb6047476712f2e5529161f"} Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.989653 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0bd39ffb8d125a629ba0fa7a8715c8589b3898686eb6047476712f2e5529161f" Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.992507 4558 generic.go:334] "Generic (PLEG): container finished" podID="e645ac52-b990-4b3f-a282-59ce60cacaff" containerID="d569da36da81b9b9b27be2b8f5c9a33c02392ee1c64ee171850ebd6b90d34bc4" exitCode=0 Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.992573 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-c6994669c-78pts" event={"ID":"e645ac52-b990-4b3f-a282-59ce60cacaff","Type":"ContainerDied","Data":"d569da36da81b9b9b27be2b8f5c9a33c02392ee1c64ee171850ebd6b90d34bc4"} Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.994356 4558 generic.go:334] "Generic (PLEG): container finished" podID="2e9e43c0-674a-4769-b44b-7eb6ee1a69d5" containerID="5fea48a489b7236322aa035fd26c8bc64999234427695faf363f012948d76b1b" exitCode=0 Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.994420 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-77c48c7859-4f7km" event={"ID":"2e9e43c0-674a-4769-b44b-7eb6ee1a69d5","Type":"ContainerDied","Data":"5fea48a489b7236322aa035fd26c8bc64999234427695faf363f012948d76b1b"} Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.995835 4558 generic.go:334] "Generic (PLEG): container finished" podID="f1a6abd3-1c13-40ca-8e20-344fd40bc348" containerID="0e2eb3da46b7bd583c44ebc8b13f7d1c4b0aa944896d7eec0b5487a1c1e21756" exitCode=0 Jan 20 18:13:41 crc kubenswrapper[4558]: I0120 18:13:41.995897 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-cb4666565-6689s" event={"ID":"f1a6abd3-1c13-40ca-8e20-344fd40bc348","Type":"ContainerDied","Data":"0e2eb3da46b7bd583c44ebc8b13f7d1c4b0aa944896d7eec0b5487a1c1e21756"} Jan 20 18:13:42 crc kubenswrapper[4558]: I0120 18:13:41.997248 4558 generic.go:334] "Generic (PLEG): container finished" podID="56c68b3c-13ac-4e77-a8a5-bd99d83d5667" containerID="51ecd2e724af29b0f6a51dc85e4ad8a754870e77bf87c5ae416f003c008d7caf" exitCode=0 Jan 20 18:13:42 crc kubenswrapper[4558]: I0120 18:13:41.997360 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-85dd56d4cc-cvlzf" event={"ID":"56c68b3c-13ac-4e77-a8a5-bd99d83d5667","Type":"ContainerDied","Data":"51ecd2e724af29b0f6a51dc85e4ad8a754870e77bf87c5ae416f003c008d7caf"} Jan 20 18:13:42 crc kubenswrapper[4558]: I0120 18:13:41.997428 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-85dd56d4cc-cvlzf" event={"ID":"56c68b3c-13ac-4e77-a8a5-bd99d83d5667","Type":"ContainerDied","Data":"ec9c70089d261c58fc10859ab6e45e365d0e82bb520f8ec6f1aaa63937d5d418"} Jan 20 18:13:42 crc kubenswrapper[4558]: I0120 18:13:41.997442 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ec9c70089d261c58fc10859ab6e45e365d0e82bb520f8ec6f1aaa63937d5d418" Jan 20 18:13:42 crc kubenswrapper[4558]: I0120 18:13:41.998968 4558 generic.go:334] "Generic (PLEG): container finished" podID="6e337f3c-6900-4314-adbb-aec361ccb7cc" containerID="45317194cb1488747739e7ccc74659e1093db6bf0596f5b55836038289216605" exitCode=0 Jan 20 18:13:42 crc kubenswrapper[4558]: I0120 18:13:41.999055 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-864f6b75bf-4jj8h" event={"ID":"6e337f3c-6900-4314-adbb-aec361ccb7cc","Type":"ContainerDied","Data":"45317194cb1488747739e7ccc74659e1093db6bf0596f5b55836038289216605"} Jan 20 18:13:42 crc kubenswrapper[4558]: E0120 18:13:42.352195 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/edpm-compute-no-nodes: configmap "edpm-compute-no-nodes" not found Jan 20 18:13:42 crc kubenswrapper[4558]: E0120 18:13:42.352288 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/94d7f76b-8cf8-4bf5-9844-526d8b79fffc-edpm-compute-no-nodes podName:94d7f76b-8cf8-4bf5-9844-526d8b79fffc nodeName:}" failed. No retries permitted until 2026-01-20 18:13:46.352264951 +0000 UTC m=+5520.112602918 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "edpm-compute-no-nodes" (UniqueName: "kubernetes.io/configmap/94d7f76b-8cf8-4bf5-9844-526d8b79fffc-edpm-compute-no-nodes") pod "dnsmasq-dnsmasq-64864b6d57-7zmjn" (UID: "94d7f76b-8cf8-4bf5-9844-526d8b79fffc") : configmap "edpm-compute-no-nodes" not found Jan 20 18:13:42 crc kubenswrapper[4558]: I0120 18:13:42.578323 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cb58f5ea-4fc9-4dfd-9ca3-93a04769d2da" path="/var/lib/kubelet/pods/cb58f5ea-4fc9-4dfd-9ca3-93a04769d2da/volumes" Jan 20 18:13:42 crc kubenswrapper[4558]: I0120 18:13:42.953241 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-ljsnj" Jan 20 18:13:42 crc kubenswrapper[4558]: I0120 18:13:42.959001 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-9f958b845-q8wlk" Jan 20 18:13:42 crc kubenswrapper[4558]: I0120 18:13:42.965851 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-75bfd788c8-wflcw" Jan 20 18:13:42 crc kubenswrapper[4558]: I0120 18:13:42.971196 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-qwwmt" Jan 20 18:13:42 crc kubenswrapper[4558]: I0120 18:13:42.980420 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-65849867d6-js8vs" Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.009672 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-55db956ddc-6j4g5" event={"ID":"1998e4dc-a1d4-405e-b0dd-546e1d5fed6f","Type":"ContainerDied","Data":"6527f64ed52e3ab43a1eb08a98213cc0550684dd7dd545297a441c95f1ebba4a"} Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.009733 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6527f64ed52e3ab43a1eb08a98213cc0550684dd7dd545297a441c95f1ebba4a" Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.015525 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zd9qc" event={"ID":"04ab3f66-5485-4b6f-9293-ed76b2695d50","Type":"ContainerDied","Data":"fc848064a8e34f306dacbe53cfc5ac397d6489a3ad20433bb5c67d23d7dccbd4"} Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.015549 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fc848064a8e34f306dacbe53cfc5ac397d6489a3ad20433bb5c67d23d7dccbd4" Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.018595 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-pt7m4" event={"ID":"666aae5d-ee4c-47d1-8cf7-0aa2cb8c813d","Type":"ContainerDied","Data":"5ef4294ca664514991957dd1796480ea4ca0ba14c6896d3b9c89bd88d43388e5"} Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.018646 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5ef4294ca664514991957dd1796480ea4ca0ba14c6896d3b9c89bd88d43388e5" Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.020107 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-5b9875986dl6bbk" event={"ID":"cbbd3f62-64b6-4cc2-8c3b-b21d317c0624","Type":"ContainerDied","Data":"500e7a715909470495e104983b0e9b743961d3cf052eba13bc5362bb72398884"} Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.020150 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="500e7a715909470495e104983b0e9b743961d3cf052eba13bc5362bb72398884" Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.022999 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-594c8c9d5d-k28w4" event={"ID":"88bca883-e84c-4223-80de-4c389d44f9fe","Type":"ContainerDied","Data":"73315ccc650e02a7ee82add71d227bbb21cae3bc59495806b28f8083bed4bd15"} Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.023025 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="73315ccc650e02a7ee82add71d227bbb21cae3bc59495806b28f8083bed4bd15" Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.029068 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-9b68f5989-mdfnr" event={"ID":"973549d4-6a26-4166-8be0-b0dfb7c5aec6","Type":"ContainerDied","Data":"d5fe121897b5174786c6780029aabcd803d789144d4124c7ce2486f21b7ab7ff"} Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.029105 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d5fe121897b5174786c6780029aabcd803d789144d4124c7ce2486f21b7ab7ff" Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.033061 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-5f8f495fcf-ldhxq" event={"ID":"da27b90a-7f65-4d58-92f1-d46b0d92fd79","Type":"ContainerDied","Data":"d1faba4ee13a7bafb0b75d449fbc83fa7cb71005eb76b1ecd5bb12da92401fdd"} Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.033082 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d1faba4ee13a7bafb0b75d449fbc83fa7cb71005eb76b1ecd5bb12da92401fdd" Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.033660 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-686df47fcb-tsdgq" Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.034503 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-init-6d4d7d8545-4v6kg" event={"ID":"a491bcd2-847b-41eb-addd-54e625a9e6e1","Type":"ContainerDied","Data":"2b8abeab32c6e045c040ac133acde37b745dca669228d4ba2a5e925998835139"} Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.034530 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2b8abeab32c6e045c040ac133acde37b745dca669228d4ba2a5e925998835139" Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.035790 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-78757b4889-t99dx" event={"ID":"e7054184-22f8-43b2-b75f-0534d5bb467f","Type":"ContainerDied","Data":"fffa13f3c9140100511254269a251a0171968943c09935e7c3f8c50dc0d6f823"} Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.035813 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fffa13f3c9140100511254269a251a0171968943c09935e7c3f8c50dc0d6f823" Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.039784 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-7cd8bc9dbb-9hmt6" event={"ID":"fafbffd7-66c2-42ab-97c2-034a1fd6fd7c","Type":"ContainerDied","Data":"a769665ebdc91c1821d741cf2a08f672ea2daed89255a87c446061803b18b4e1"} Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.039823 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a769665ebdc91c1821d741cf2a08f672ea2daed89255a87c446061803b18b4e1" Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.042784 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-7ddb5c749-jrgz7" Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.043370 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-77d5c5b54f-gvd5f" event={"ID":"31cd4c27-d472-4406-886a-7222315c465d","Type":"ContainerDied","Data":"4e4469f7470b01017b7ae12b8d06566a27bf4f549e470c52d36752cc814826b0"} Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.043403 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4e4469f7470b01017b7ae12b8d06566a27bf4f549e470c52d36752cc814826b0" Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.055300 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-c6994669c-78pts" event={"ID":"e645ac52-b990-4b3f-a282-59ce60cacaff","Type":"ContainerDied","Data":"27056ae81fe7826da1d6fa9f6f1d8e788a8eb91d8889eda00b46f36834734ebc"} Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.055319 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="27056ae81fe7826da1d6fa9f6f1d8e788a8eb91d8889eda00b46f36834734ebc" Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.055523 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-7fc9b76cf6-9ld4x" Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.056647 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-c87fff755-plg7f" event={"ID":"dba009f4-faa1-4d6b-bc19-c00eedd6c7c7","Type":"ContainerDied","Data":"4b45e48a7b0a0cdd1b20556b57b0790aacc8fc228006c34bc740ab81af9229d7"} Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.056664 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4b45e48a7b0a0cdd1b20556b57b0790aacc8fc228006c34bc740ab81af9229d7" Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.058449 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-767fdc4f47-8mw4r" event={"ID":"115bb848-a715-4eac-8993-63842460dd3d","Type":"ContainerDied","Data":"c938c07e7c4489351287def83b4a4be818302d0370e36a5423e6dd9a1d8f4159"} Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.058468 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c938c07e7c4489351287def83b4a4be818302d0370e36a5423e6dd9a1d8f4159" Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.061518 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xl9zj\" (UniqueName: \"kubernetes.io/projected/54c6bd49-d022-40c6-b547-58d3eb5ba7e2-kube-api-access-xl9zj\") pod \"54c6bd49-d022-40c6-b547-58d3eb5ba7e2\" (UID: \"54c6bd49-d022-40c6-b547-58d3eb5ba7e2\") " Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.061651 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8rxpt\" (UniqueName: \"kubernetes.io/projected/0292902b-f0ca-4abc-b220-4e3268243db5-kube-api-access-8rxpt\") pod \"0292902b-f0ca-4abc-b220-4e3268243db5\" (UID: \"0292902b-f0ca-4abc-b220-4e3268243db5\") " Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.061712 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7sxlc\" (UniqueName: \"kubernetes.io/projected/deea5510-f570-48ac-b23f-bba261ef3951-kube-api-access-7sxlc\") pod \"deea5510-f570-48ac-b23f-bba261ef3951\" (UID: \"deea5510-f570-48ac-b23f-bba261ef3951\") " Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.061774 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xm2kt\" (UniqueName: \"kubernetes.io/projected/af89563a-d3e9-4424-8fde-a0a77b21b7e8-kube-api-access-xm2kt\") pod \"af89563a-d3e9-4424-8fde-a0a77b21b7e8\" (UID: \"af89563a-d3e9-4424-8fde-a0a77b21b7e8\") " Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.061807 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xs2mv\" (UniqueName: \"kubernetes.io/projected/1c12cb14-b8f1-4939-84d9-6a6b851f095a-kube-api-access-xs2mv\") pod \"1c12cb14-b8f1-4939-84d9-6a6b851f095a\" (UID: \"1c12cb14-b8f1-4939-84d9-6a6b851f095a\") " Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.061835 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/deea5510-f570-48ac-b23f-bba261ef3951-webhook-certs\") pod \"deea5510-f570-48ac-b23f-bba261ef3951\" (UID: \"deea5510-f570-48ac-b23f-bba261ef3951\") " Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.061854 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/deea5510-f570-48ac-b23f-bba261ef3951-metrics-certs\") pod \"deea5510-f570-48ac-b23f-bba261ef3951\" (UID: \"deea5510-f570-48ac-b23f-bba261ef3951\") " Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.061872 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6qfl9\" (UniqueName: \"kubernetes.io/projected/42eb153b-1142-4dc6-9e34-b4cc38e49701-kube-api-access-6qfl9\") pod \"42eb153b-1142-4dc6-9e34-b4cc38e49701\" (UID: \"42eb153b-1142-4dc6-9e34-b4cc38e49701\") " Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.065364 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-85dd56d4cc-cvlzf" Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.069059 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-864f6b75bf-4jj8h" event={"ID":"6e337f3c-6900-4314-adbb-aec361ccb7cc","Type":"ContainerDied","Data":"799f8d0787755ffca063fe9cd0642def285cace0fcb26e687d0fa1a649496264"} Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.069082 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="799f8d0787755ffca063fe9cd0642def285cace0fcb26e687d0fa1a649496264" Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.074304 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/54c6bd49-d022-40c6-b547-58d3eb5ba7e2-kube-api-access-xl9zj" (OuterVolumeSpecName: "kube-api-access-xl9zj") pod "54c6bd49-d022-40c6-b547-58d3eb5ba7e2" (UID: "54c6bd49-d022-40c6-b547-58d3eb5ba7e2"). InnerVolumeSpecName "kube-api-access-xl9zj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.073349 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-64cd966744-925vf" Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.076115 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/deea5510-f570-48ac-b23f-bba261ef3951-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "deea5510-f570-48ac-b23f-bba261ef3951" (UID: "deea5510-f570-48ac-b23f-bba261ef3951"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.076590 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1c12cb14-b8f1-4939-84d9-6a6b851f095a-kube-api-access-xs2mv" (OuterVolumeSpecName: "kube-api-access-xs2mv") pod "1c12cb14-b8f1-4939-84d9-6a6b851f095a" (UID: "1c12cb14-b8f1-4939-84d9-6a6b851f095a"). InnerVolumeSpecName "kube-api-access-xs2mv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.089478 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/deea5510-f570-48ac-b23f-bba261ef3951-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "deea5510-f570-48ac-b23f-bba261ef3951" (UID: "deea5510-f570-48ac-b23f-bba261ef3951"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.089753 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/42eb153b-1142-4dc6-9e34-b4cc38e49701-kube-api-access-6qfl9" (OuterVolumeSpecName: "kube-api-access-6qfl9") pod "42eb153b-1142-4dc6-9e34-b4cc38e49701" (UID: "42eb153b-1142-4dc6-9e34-b4cc38e49701"). InnerVolumeSpecName "kube-api-access-6qfl9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.089766 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/deea5510-f570-48ac-b23f-bba261ef3951-kube-api-access-7sxlc" (OuterVolumeSpecName: "kube-api-access-7sxlc") pod "deea5510-f570-48ac-b23f-bba261ef3951" (UID: "deea5510-f570-48ac-b23f-bba261ef3951"). InnerVolumeSpecName "kube-api-access-7sxlc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.089836 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0292902b-f0ca-4abc-b220-4e3268243db5-kube-api-access-8rxpt" (OuterVolumeSpecName: "kube-api-access-8rxpt") pod "0292902b-f0ca-4abc-b220-4e3268243db5" (UID: "0292902b-f0ca-4abc-b220-4e3268243db5"). InnerVolumeSpecName "kube-api-access-8rxpt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.090031 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-77c48c7859-4f7km" event={"ID":"2e9e43c0-674a-4769-b44b-7eb6ee1a69d5","Type":"ContainerDied","Data":"7830ae1810760294cff9779d3b1f563fd4dd2a7667783c5e89fa7f597c46168e"} Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.090065 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7830ae1810760294cff9779d3b1f563fd4dd2a7667783c5e89fa7f597c46168e" Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.090100 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/af89563a-d3e9-4424-8fde-a0a77b21b7e8-kube-api-access-xm2kt" (OuterVolumeSpecName: "kube-api-access-xm2kt") pod "af89563a-d3e9-4424-8fde-a0a77b21b7e8" (UID: "af89563a-d3e9-4424-8fde-a0a77b21b7e8"). InnerVolumeSpecName "kube-api-access-xm2kt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.091644 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-cb4666565-6689s" event={"ID":"f1a6abd3-1c13-40ca-8e20-344fd40bc348","Type":"ContainerDied","Data":"2668f4beeb53773d859e9276acc935b99cc710342eafa7af9f148e1a8c4e4023"} Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.091672 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2668f4beeb53773d859e9276acc935b99cc710342eafa7af9f148e1a8c4e4023" Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.091689 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-qwwmt" Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.091722 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-65849867d6-js8vs" Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.091887 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-75bfd788c8-wflcw" Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.091926 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-9f958b845-q8wlk" Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.100943 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-78757b4889-t99dx" Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.106520 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-5f8f495fcf-ldhxq" Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.116784 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-init-6d4d7d8545-4v6kg" Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.126458 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-cb4666565-6689s" Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.135336 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-pt7m4" Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.141526 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-55db956ddc-6j4g5" Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.152229 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-77c48c7859-4f7km" Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.155985 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-c6994669c-78pts" Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.165640 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-7cd8bc9dbb-9hmt6" Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.169956 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-75bfd788c8-wflcw"] Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.171093 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jwfxn\" (UniqueName: \"kubernetes.io/projected/1998e4dc-a1d4-405e-b0dd-546e1d5fed6f-kube-api-access-jwfxn\") pod \"1998e4dc-a1d4-405e-b0dd-546e1d5fed6f\" (UID: \"1998e4dc-a1d4-405e-b0dd-546e1d5fed6f\") " Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.171474 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qnxsc\" (UniqueName: \"kubernetes.io/projected/da27b90a-7f65-4d58-92f1-d46b0d92fd79-kube-api-access-qnxsc\") pod \"da27b90a-7f65-4d58-92f1-d46b0d92fd79\" (UID: \"da27b90a-7f65-4d58-92f1-d46b0d92fd79\") " Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.171703 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4wqzs\" (UniqueName: \"kubernetes.io/projected/2e9e43c0-674a-4769-b44b-7eb6ee1a69d5-kube-api-access-4wqzs\") pod \"2e9e43c0-674a-4769-b44b-7eb6ee1a69d5\" (UID: \"2e9e43c0-674a-4769-b44b-7eb6ee1a69d5\") " Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.171838 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k9ktz\" (UniqueName: \"kubernetes.io/projected/aec45707-2c02-43bb-b7d9-a24b906cadca-kube-api-access-k9ktz\") pod \"aec45707-2c02-43bb-b7d9-a24b906cadca\" (UID: \"aec45707-2c02-43bb-b7d9-a24b906cadca\") " Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.171981 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2z8zq\" (UniqueName: \"kubernetes.io/projected/8dea3161-8db9-4778-b59c-066f876ff6e2-kube-api-access-2z8zq\") pod \"8dea3161-8db9-4778-b59c-066f876ff6e2\" (UID: \"8dea3161-8db9-4778-b59c-066f876ff6e2\") " Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.176188 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-drmnl\" (UniqueName: \"kubernetes.io/projected/666aae5d-ee4c-47d1-8cf7-0aa2cb8c813d-kube-api-access-drmnl\") pod \"666aae5d-ee4c-47d1-8cf7-0aa2cb8c813d\" (UID: \"666aae5d-ee4c-47d1-8cf7-0aa2cb8c813d\") " Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.176295 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lncvc\" (UniqueName: \"kubernetes.io/projected/a491bcd2-847b-41eb-addd-54e625a9e6e1-kube-api-access-lncvc\") pod \"a491bcd2-847b-41eb-addd-54e625a9e6e1\" (UID: \"a491bcd2-847b-41eb-addd-54e625a9e6e1\") " Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.176596 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/2e9e43c0-674a-4769-b44b-7eb6ee1a69d5-cert\") pod \"2e9e43c0-674a-4769-b44b-7eb6ee1a69d5\" (UID: \"2e9e43c0-674a-4769-b44b-7eb6ee1a69d5\") " Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.176683 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-68gw2\" (UniqueName: \"kubernetes.io/projected/e7054184-22f8-43b2-b75f-0534d5bb467f-kube-api-access-68gw2\") pod \"e7054184-22f8-43b2-b75f-0534d5bb467f\" (UID: \"e7054184-22f8-43b2-b75f-0534d5bb467f\") " Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.176834 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n7lck\" (UniqueName: \"kubernetes.io/projected/f1a6abd3-1c13-40ca-8e20-344fd40bc348-kube-api-access-n7lck\") pod \"f1a6abd3-1c13-40ca-8e20-344fd40bc348\" (UID: \"f1a6abd3-1c13-40ca-8e20-344fd40bc348\") " Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.176981 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-99gtz\" (UniqueName: \"kubernetes.io/projected/56c68b3c-13ac-4e77-a8a5-bd99d83d5667-kube-api-access-99gtz\") pod \"56c68b3c-13ac-4e77-a8a5-bd99d83d5667\" (UID: \"56c68b3c-13ac-4e77-a8a5-bd99d83d5667\") " Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.177877 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8rxpt\" (UniqueName: \"kubernetes.io/projected/0292902b-f0ca-4abc-b220-4e3268243db5-kube-api-access-8rxpt\") on node \"crc\" DevicePath \"\"" Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.178783 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7sxlc\" (UniqueName: \"kubernetes.io/projected/deea5510-f570-48ac-b23f-bba261ef3951-kube-api-access-7sxlc\") on node \"crc\" DevicePath \"\"" Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.180646 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xm2kt\" (UniqueName: \"kubernetes.io/projected/af89563a-d3e9-4424-8fde-a0a77b21b7e8-kube-api-access-xm2kt\") on node \"crc\" DevicePath \"\"" Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.180681 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xs2mv\" (UniqueName: \"kubernetes.io/projected/1c12cb14-b8f1-4939-84d9-6a6b851f095a-kube-api-access-xs2mv\") on node \"crc\" DevicePath \"\"" Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.180713 4558 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/deea5510-f570-48ac-b23f-bba261ef3951-webhook-certs\") on node \"crc\" DevicePath \"\"" Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.180724 4558 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/deea5510-f570-48ac-b23f-bba261ef3951-metrics-certs\") on node \"crc\" DevicePath \"\"" Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.180736 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6qfl9\" (UniqueName: \"kubernetes.io/projected/42eb153b-1142-4dc6-9e34-b4cc38e49701-kube-api-access-6qfl9\") on node \"crc\" DevicePath \"\"" Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.180747 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xl9zj\" (UniqueName: \"kubernetes.io/projected/54c6bd49-d022-40c6-b547-58d3eb5ba7e2-kube-api-access-xl9zj\") on node \"crc\" DevicePath \"\"" Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.172617 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-9b68f5989-mdfnr" Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.176879 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1998e4dc-a1d4-405e-b0dd-546e1d5fed6f-kube-api-access-jwfxn" (OuterVolumeSpecName: "kube-api-access-jwfxn") pod "1998e4dc-a1d4-405e-b0dd-546e1d5fed6f" (UID: "1998e4dc-a1d4-405e-b0dd-546e1d5fed6f"). InnerVolumeSpecName "kube-api-access-jwfxn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.182753 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/666aae5d-ee4c-47d1-8cf7-0aa2cb8c813d-kube-api-access-drmnl" (OuterVolumeSpecName: "kube-api-access-drmnl") pod "666aae5d-ee4c-47d1-8cf7-0aa2cb8c813d" (UID: "666aae5d-ee4c-47d1-8cf7-0aa2cb8c813d"). InnerVolumeSpecName "kube-api-access-drmnl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.182943 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/da27b90a-7f65-4d58-92f1-d46b0d92fd79-kube-api-access-qnxsc" (OuterVolumeSpecName: "kube-api-access-qnxsc") pod "da27b90a-7f65-4d58-92f1-d46b0d92fd79" (UID: "da27b90a-7f65-4d58-92f1-d46b0d92fd79"). InnerVolumeSpecName "kube-api-access-qnxsc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.182964 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aec45707-2c02-43bb-b7d9-a24b906cadca-kube-api-access-k9ktz" (OuterVolumeSpecName: "kube-api-access-k9ktz") pod "aec45707-2c02-43bb-b7d9-a24b906cadca" (UID: "aec45707-2c02-43bb-b7d9-a24b906cadca"). InnerVolumeSpecName "kube-api-access-k9ktz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.183882 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2e9e43c0-674a-4769-b44b-7eb6ee1a69d5-cert" (OuterVolumeSpecName: "cert") pod "2e9e43c0-674a-4769-b44b-7eb6ee1a69d5" (UID: "2e9e43c0-674a-4769-b44b-7eb6ee1a69d5"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.184517 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8dea3161-8db9-4778-b59c-066f876ff6e2-kube-api-access-2z8zq" (OuterVolumeSpecName: "kube-api-access-2z8zq") pod "8dea3161-8db9-4778-b59c-066f876ff6e2" (UID: "8dea3161-8db9-4778-b59c-066f876ff6e2"). InnerVolumeSpecName "kube-api-access-2z8zq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.187713 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a491bcd2-847b-41eb-addd-54e625a9e6e1-kube-api-access-lncvc" (OuterVolumeSpecName: "kube-api-access-lncvc") pod "a491bcd2-847b-41eb-addd-54e625a9e6e1" (UID: "a491bcd2-847b-41eb-addd-54e625a9e6e1"). InnerVolumeSpecName "kube-api-access-lncvc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.188295 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-594c8c9d5d-k28w4" Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.189088 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7054184-22f8-43b2-b75f-0534d5bb467f-kube-api-access-68gw2" (OuterVolumeSpecName: "kube-api-access-68gw2") pod "e7054184-22f8-43b2-b75f-0534d5bb467f" (UID: "e7054184-22f8-43b2-b75f-0534d5bb467f"). InnerVolumeSpecName "kube-api-access-68gw2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.194877 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/56c68b3c-13ac-4e77-a8a5-bd99d83d5667-kube-api-access-99gtz" (OuterVolumeSpecName: "kube-api-access-99gtz") pod "56c68b3c-13ac-4e77-a8a5-bd99d83d5667" (UID: "56c68b3c-13ac-4e77-a8a5-bd99d83d5667"). InnerVolumeSpecName "kube-api-access-99gtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.195377 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zd9qc" Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.195698 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f1a6abd3-1c13-40ca-8e20-344fd40bc348-kube-api-access-n7lck" (OuterVolumeSpecName: "kube-api-access-n7lck") pod "f1a6abd3-1c13-40ca-8e20-344fd40bc348" (UID: "f1a6abd3-1c13-40ca-8e20-344fd40bc348"). InnerVolumeSpecName "kube-api-access-n7lck". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.203617 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-75bfd788c8-wflcw"] Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.204429 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2e9e43c0-674a-4769-b44b-7eb6ee1a69d5-kube-api-access-4wqzs" (OuterVolumeSpecName: "kube-api-access-4wqzs") pod "2e9e43c0-674a-4769-b44b-7eb6ee1a69d5" (UID: "2e9e43c0-674a-4769-b44b-7eb6ee1a69d5"). InnerVolumeSpecName "kube-api-access-4wqzs". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.206893 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-864f6b75bf-4jj8h" Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.217220 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-5b9875986dl6bbk" Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.223361 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-767fdc4f47-8mw4r" Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.228934 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-c87fff755-plg7f" Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.236560 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-qwwmt"] Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.240337 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-qwwmt"] Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.245765 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/designate-operator-controller-manager-9f958b845-q8wlk"] Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.249260 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/designate-operator-controller-manager-9f958b845-q8wlk"] Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.261002 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/nova-operator-controller-manager-65849867d6-js8vs"] Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.267438 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/nova-operator-controller-manager-65849867d6-js8vs"] Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.280026 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-77d5c5b54f-gvd5f" Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.282975 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xwdml\" (UniqueName: \"kubernetes.io/projected/115bb848-a715-4eac-8993-63842460dd3d-kube-api-access-xwdml\") pod \"115bb848-a715-4eac-8993-63842460dd3d\" (UID: \"115bb848-a715-4eac-8993-63842460dd3d\") " Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.283027 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5vndv\" (UniqueName: \"kubernetes.io/projected/cbbd3f62-64b6-4cc2-8c3b-b21d317c0624-kube-api-access-5vndv\") pod \"cbbd3f62-64b6-4cc2-8c3b-b21d317c0624\" (UID: \"cbbd3f62-64b6-4cc2-8c3b-b21d317c0624\") " Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.283097 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/04ab3f66-5485-4b6f-9293-ed76b2695d50-srv-cert\") pod \"04ab3f66-5485-4b6f-9293-ed76b2695d50\" (UID: \"04ab3f66-5485-4b6f-9293-ed76b2695d50\") " Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.283154 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/04ab3f66-5485-4b6f-9293-ed76b2695d50-profile-collector-cert\") pod \"04ab3f66-5485-4b6f-9293-ed76b2695d50\" (UID: \"04ab3f66-5485-4b6f-9293-ed76b2695d50\") " Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.283193 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/cbbd3f62-64b6-4cc2-8c3b-b21d317c0624-cert\") pod \"cbbd3f62-64b6-4cc2-8c3b-b21d317c0624\" (UID: \"cbbd3f62-64b6-4cc2-8c3b-b21d317c0624\") " Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.283243 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p92dc\" (UniqueName: \"kubernetes.io/projected/973549d4-6a26-4166-8be0-b0dfb7c5aec6-kube-api-access-p92dc\") pod \"973549d4-6a26-4166-8be0-b0dfb7c5aec6\" (UID: \"973549d4-6a26-4166-8be0-b0dfb7c5aec6\") " Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.283276 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hvbdx\" (UniqueName: \"kubernetes.io/projected/dba009f4-faa1-4d6b-bc19-c00eedd6c7c7-kube-api-access-hvbdx\") pod \"dba009f4-faa1-4d6b-bc19-c00eedd6c7c7\" (UID: \"dba009f4-faa1-4d6b-bc19-c00eedd6c7c7\") " Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.283321 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x6bm9\" (UniqueName: \"kubernetes.io/projected/88bca883-e84c-4223-80de-4c389d44f9fe-kube-api-access-x6bm9\") pod \"88bca883-e84c-4223-80de-4c389d44f9fe\" (UID: \"88bca883-e84c-4223-80de-4c389d44f9fe\") " Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.283354 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wwj4x\" (UniqueName: \"kubernetes.io/projected/04ab3f66-5485-4b6f-9293-ed76b2695d50-kube-api-access-wwj4x\") pod \"04ab3f66-5485-4b6f-9293-ed76b2695d50\" (UID: \"04ab3f66-5485-4b6f-9293-ed76b2695d50\") " Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.283374 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r2qgm\" (UniqueName: \"kubernetes.io/projected/6e337f3c-6900-4314-adbb-aec361ccb7cc-kube-api-access-r2qgm\") pod \"6e337f3c-6900-4314-adbb-aec361ccb7cc\" (UID: \"6e337f3c-6900-4314-adbb-aec361ccb7cc\") " Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.283401 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v5qjm\" (UniqueName: \"kubernetes.io/projected/fafbffd7-66c2-42ab-97c2-034a1fd6fd7c-kube-api-access-v5qjm\") pod \"fafbffd7-66c2-42ab-97c2-034a1fd6fd7c\" (UID: \"fafbffd7-66c2-42ab-97c2-034a1fd6fd7c\") " Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.283443 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v7ch6\" (UniqueName: \"kubernetes.io/projected/e645ac52-b990-4b3f-a282-59ce60cacaff-kube-api-access-v7ch6\") pod \"e645ac52-b990-4b3f-a282-59ce60cacaff\" (UID: \"e645ac52-b990-4b3f-a282-59ce60cacaff\") " Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.283877 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qnxsc\" (UniqueName: \"kubernetes.io/projected/da27b90a-7f65-4d58-92f1-d46b0d92fd79-kube-api-access-qnxsc\") on node \"crc\" DevicePath \"\"" Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.283892 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4wqzs\" (UniqueName: \"kubernetes.io/projected/2e9e43c0-674a-4769-b44b-7eb6ee1a69d5-kube-api-access-4wqzs\") on node \"crc\" DevicePath \"\"" Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.283902 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k9ktz\" (UniqueName: \"kubernetes.io/projected/aec45707-2c02-43bb-b7d9-a24b906cadca-kube-api-access-k9ktz\") on node \"crc\" DevicePath \"\"" Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.283919 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2z8zq\" (UniqueName: \"kubernetes.io/projected/8dea3161-8db9-4778-b59c-066f876ff6e2-kube-api-access-2z8zq\") on node \"crc\" DevicePath \"\"" Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.283928 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-drmnl\" (UniqueName: \"kubernetes.io/projected/666aae5d-ee4c-47d1-8cf7-0aa2cb8c813d-kube-api-access-drmnl\") on node \"crc\" DevicePath \"\"" Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.283937 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lncvc\" (UniqueName: \"kubernetes.io/projected/a491bcd2-847b-41eb-addd-54e625a9e6e1-kube-api-access-lncvc\") on node \"crc\" DevicePath \"\"" Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.283948 4558 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/2e9e43c0-674a-4769-b44b-7eb6ee1a69d5-cert\") on node \"crc\" DevicePath \"\"" Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.283957 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-68gw2\" (UniqueName: \"kubernetes.io/projected/e7054184-22f8-43b2-b75f-0534d5bb467f-kube-api-access-68gw2\") on node \"crc\" DevicePath \"\"" Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.283966 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n7lck\" (UniqueName: \"kubernetes.io/projected/f1a6abd3-1c13-40ca-8e20-344fd40bc348-kube-api-access-n7lck\") on node \"crc\" DevicePath \"\"" Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.283975 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-99gtz\" (UniqueName: \"kubernetes.io/projected/56c68b3c-13ac-4e77-a8a5-bd99d83d5667-kube-api-access-99gtz\") on node \"crc\" DevicePath \"\"" Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.283986 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jwfxn\" (UniqueName: \"kubernetes.io/projected/1998e4dc-a1d4-405e-b0dd-546e1d5fed6f-kube-api-access-jwfxn\") on node \"crc\" DevicePath \"\"" Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.297581 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/88bca883-e84c-4223-80de-4c389d44f9fe-kube-api-access-x6bm9" (OuterVolumeSpecName: "kube-api-access-x6bm9") pod "88bca883-e84c-4223-80de-4c389d44f9fe" (UID: "88bca883-e84c-4223-80de-4c389d44f9fe"). InnerVolumeSpecName "kube-api-access-x6bm9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.309352 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cbbd3f62-64b6-4cc2-8c3b-b21d317c0624-cert" (OuterVolumeSpecName: "cert") pod "cbbd3f62-64b6-4cc2-8c3b-b21d317c0624" (UID: "cbbd3f62-64b6-4cc2-8c3b-b21d317c0624"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.309540 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e645ac52-b990-4b3f-a282-59ce60cacaff-kube-api-access-v7ch6" (OuterVolumeSpecName: "kube-api-access-v7ch6") pod "e645ac52-b990-4b3f-a282-59ce60cacaff" (UID: "e645ac52-b990-4b3f-a282-59ce60cacaff"). InnerVolumeSpecName "kube-api-access-v7ch6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.314349 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cbbd3f62-64b6-4cc2-8c3b-b21d317c0624-kube-api-access-5vndv" (OuterVolumeSpecName: "kube-api-access-5vndv") pod "cbbd3f62-64b6-4cc2-8c3b-b21d317c0624" (UID: "cbbd3f62-64b6-4cc2-8c3b-b21d317c0624"). InnerVolumeSpecName "kube-api-access-5vndv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.321363 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/115bb848-a715-4eac-8993-63842460dd3d-kube-api-access-xwdml" (OuterVolumeSpecName: "kube-api-access-xwdml") pod "115bb848-a715-4eac-8993-63842460dd3d" (UID: "115bb848-a715-4eac-8993-63842460dd3d"). InnerVolumeSpecName "kube-api-access-xwdml". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.322317 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dba009f4-faa1-4d6b-bc19-c00eedd6c7c7-kube-api-access-hvbdx" (OuterVolumeSpecName: "kube-api-access-hvbdx") pod "dba009f4-faa1-4d6b-bc19-c00eedd6c7c7" (UID: "dba009f4-faa1-4d6b-bc19-c00eedd6c7c7"). InnerVolumeSpecName "kube-api-access-hvbdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.323550 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/04ab3f66-5485-4b6f-9293-ed76b2695d50-kube-api-access-wwj4x" (OuterVolumeSpecName: "kube-api-access-wwj4x") pod "04ab3f66-5485-4b6f-9293-ed76b2695d50" (UID: "04ab3f66-5485-4b6f-9293-ed76b2695d50"). InnerVolumeSpecName "kube-api-access-wwj4x". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.325609 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6e337f3c-6900-4314-adbb-aec361ccb7cc-kube-api-access-r2qgm" (OuterVolumeSpecName: "kube-api-access-r2qgm") pod "6e337f3c-6900-4314-adbb-aec361ccb7cc" (UID: "6e337f3c-6900-4314-adbb-aec361ccb7cc"). InnerVolumeSpecName "kube-api-access-r2qgm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.326976 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/04ab3f66-5485-4b6f-9293-ed76b2695d50-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "04ab3f66-5485-4b6f-9293-ed76b2695d50" (UID: "04ab3f66-5485-4b6f-9293-ed76b2695d50"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.339733 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fafbffd7-66c2-42ab-97c2-034a1fd6fd7c-kube-api-access-v5qjm" (OuterVolumeSpecName: "kube-api-access-v5qjm") pod "fafbffd7-66c2-42ab-97c2-034a1fd6fd7c" (UID: "fafbffd7-66c2-42ab-97c2-034a1fd6fd7c"). InnerVolumeSpecName "kube-api-access-v5qjm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.344584 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/973549d4-6a26-4166-8be0-b0dfb7c5aec6-kube-api-access-p92dc" (OuterVolumeSpecName: "kube-api-access-p92dc") pod "973549d4-6a26-4166-8be0-b0dfb7c5aec6" (UID: "973549d4-6a26-4166-8be0-b0dfb7c5aec6"). InnerVolumeSpecName "kube-api-access-p92dc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.358370 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/04ab3f66-5485-4b6f-9293-ed76b2695d50-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "04ab3f66-5485-4b6f-9293-ed76b2695d50" (UID: "04ab3f66-5485-4b6f-9293-ed76b2695d50"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.385758 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c4z7x\" (UniqueName: \"kubernetes.io/projected/31cd4c27-d472-4406-886a-7222315c465d-kube-api-access-c4z7x\") pod \"31cd4c27-d472-4406-886a-7222315c465d\" (UID: \"31cd4c27-d472-4406-886a-7222315c465d\") " Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.386308 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xwdml\" (UniqueName: \"kubernetes.io/projected/115bb848-a715-4eac-8993-63842460dd3d-kube-api-access-xwdml\") on node \"crc\" DevicePath \"\"" Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.386329 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5vndv\" (UniqueName: \"kubernetes.io/projected/cbbd3f62-64b6-4cc2-8c3b-b21d317c0624-kube-api-access-5vndv\") on node \"crc\" DevicePath \"\"" Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.386340 4558 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/04ab3f66-5485-4b6f-9293-ed76b2695d50-srv-cert\") on node \"crc\" DevicePath \"\"" Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.386352 4558 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/04ab3f66-5485-4b6f-9293-ed76b2695d50-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.386361 4558 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/cbbd3f62-64b6-4cc2-8c3b-b21d317c0624-cert\") on node \"crc\" DevicePath \"\"" Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.386370 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p92dc\" (UniqueName: \"kubernetes.io/projected/973549d4-6a26-4166-8be0-b0dfb7c5aec6-kube-api-access-p92dc\") on node \"crc\" DevicePath \"\"" Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.386379 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hvbdx\" (UniqueName: \"kubernetes.io/projected/dba009f4-faa1-4d6b-bc19-c00eedd6c7c7-kube-api-access-hvbdx\") on node \"crc\" DevicePath \"\"" Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.386388 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x6bm9\" (UniqueName: \"kubernetes.io/projected/88bca883-e84c-4223-80de-4c389d44f9fe-kube-api-access-x6bm9\") on node \"crc\" DevicePath \"\"" Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.386397 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wwj4x\" (UniqueName: \"kubernetes.io/projected/04ab3f66-5485-4b6f-9293-ed76b2695d50-kube-api-access-wwj4x\") on node \"crc\" DevicePath \"\"" Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.386406 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r2qgm\" (UniqueName: \"kubernetes.io/projected/6e337f3c-6900-4314-adbb-aec361ccb7cc-kube-api-access-r2qgm\") on node \"crc\" DevicePath \"\"" Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.386416 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v5qjm\" (UniqueName: \"kubernetes.io/projected/fafbffd7-66c2-42ab-97c2-034a1fd6fd7c-kube-api-access-v5qjm\") on node \"crc\" DevicePath \"\"" Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.386424 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v7ch6\" (UniqueName: \"kubernetes.io/projected/e645ac52-b990-4b3f-a282-59ce60cacaff-kube-api-access-v7ch6\") on node \"crc\" DevicePath \"\"" Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.406297 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31cd4c27-d472-4406-886a-7222315c465d-kube-api-access-c4z7x" (OuterVolumeSpecName: "kube-api-access-c4z7x") pod "31cd4c27-d472-4406-886a-7222315c465d" (UID: "31cd4c27-d472-4406-886a-7222315c465d"). InnerVolumeSpecName "kube-api-access-c4z7x". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.462592 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-ljsnj"] Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.501568 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c4z7x\" (UniqueName: \"kubernetes.io/projected/31cd4c27-d472-4406-886a-7222315c465d-kube-api-access-c4z7x\") on node \"crc\" DevicePath \"\"" Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.907245 4558 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-zd9qc container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.37:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Jan 20 18:13:43 crc kubenswrapper[4558]: I0120 18:13:43.907324 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zd9qc" podUID="04ab3f66-5485-4b6f-9293-ed76b2695d50" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.37:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Jan 20 18:13:44 crc kubenswrapper[4558]: I0120 18:13:44.102993 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-5f8f495fcf-ldhxq" Jan 20 18:13:44 crc kubenswrapper[4558]: I0120 18:13:44.103039 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-78757b4889-t99dx" Jan 20 18:13:44 crc kubenswrapper[4558]: I0120 18:13:44.103039 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-c6994669c-78pts" Jan 20 18:13:44 crc kubenswrapper[4558]: I0120 18:13:44.103057 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-7ddb5c749-jrgz7" Jan 20 18:13:44 crc kubenswrapper[4558]: I0120 18:13:44.103086 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-ljsnj" event={"ID":"e10e41c7-a461-4506-9e95-51007725cc98","Type":"ContainerStarted","Data":"a738bc47a5d43458f4326ca5f6a5039f4805013f14136c0b3a17c298d68ead82"} Jan 20 18:13:44 crc kubenswrapper[4558]: I0120 18:13:44.103132 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-ljsnj" event={"ID":"e10e41c7-a461-4506-9e95-51007725cc98","Type":"ContainerStarted","Data":"3ab7c7dba0d6ae2408a57ad66d67bc344c4aeaa13caaf50130d3d79906b99513"} Jan 20 18:13:44 crc kubenswrapper[4558]: I0120 18:13:44.103193 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-7cd8bc9dbb-9hmt6" Jan 20 18:13:44 crc kubenswrapper[4558]: I0120 18:13:44.103222 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-cb4666565-6689s" Jan 20 18:13:44 crc kubenswrapper[4558]: I0120 18:13:44.103245 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-pt7m4" Jan 20 18:13:44 crc kubenswrapper[4558]: I0120 18:13:44.103264 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-init-6d4d7d8545-4v6kg" Jan 20 18:13:44 crc kubenswrapper[4558]: I0120 18:13:44.103285 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-55db956ddc-6j4g5" Jan 20 18:13:44 crc kubenswrapper[4558]: I0120 18:13:44.103299 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-686df47fcb-tsdgq" Jan 20 18:13:44 crc kubenswrapper[4558]: I0120 18:13:44.103336 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-77c48c7859-4f7km" Jan 20 18:13:44 crc kubenswrapper[4558]: I0120 18:13:44.103338 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-85dd56d4cc-cvlzf" Jan 20 18:13:44 crc kubenswrapper[4558]: I0120 18:13:44.103359 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zd9qc" Jan 20 18:13:44 crc kubenswrapper[4558]: I0120 18:13:44.103385 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-9b68f5989-mdfnr" Jan 20 18:13:44 crc kubenswrapper[4558]: I0120 18:13:44.103391 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-c87fff755-plg7f" Jan 20 18:13:44 crc kubenswrapper[4558]: I0120 18:13:44.103416 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-864f6b75bf-4jj8h" Jan 20 18:13:44 crc kubenswrapper[4558]: I0120 18:13:44.103449 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-767fdc4f47-8mw4r" Jan 20 18:13:44 crc kubenswrapper[4558]: I0120 18:13:44.103001 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-7fc9b76cf6-9ld4x" Jan 20 18:13:44 crc kubenswrapper[4558]: I0120 18:13:44.103471 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-594c8c9d5d-k28w4" Jan 20 18:13:44 crc kubenswrapper[4558]: I0120 18:13:44.103597 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-64cd966744-925vf" Jan 20 18:13:44 crc kubenswrapper[4558]: I0120 18:13:44.103645 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-5b9875986dl6bbk" Jan 20 18:13:44 crc kubenswrapper[4558]: I0120 18:13:44.103658 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-77d5c5b54f-gvd5f" Jan 20 18:13:44 crc kubenswrapper[4558]: I0120 18:13:44.186635 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-ljsnj" podStartSLOduration=3.186614794 podStartE2EDuration="3.186614794s" podCreationTimestamp="2026-01-20 18:13:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 18:13:44.185870455 +0000 UTC m=+5517.946208421" watchObservedRunningTime="2026-01-20 18:13:44.186614794 +0000 UTC m=+5517.946952761" Jan 20 18:13:44 crc kubenswrapper[4558]: I0120 18:13:44.297602 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7ddb5c749-jrgz7"] Jan 20 18:13:44 crc kubenswrapper[4558]: I0120 18:13:44.302248 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7ddb5c749-jrgz7"] Jan 20 18:13:44 crc kubenswrapper[4558]: I0120 18:13:44.305774 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/glance-operator-controller-manager-c6994669c-78pts"] Jan 20 18:13:44 crc kubenswrapper[4558]: I0120 18:13:44.309642 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/glance-operator-controller-manager-c6994669c-78pts"] Jan 20 18:13:44 crc kubenswrapper[4558]: I0120 18:13:44.315331 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-cb4666565-6689s"] Jan 20 18:13:44 crc kubenswrapper[4558]: I0120 18:13:44.318510 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-cb4666565-6689s"] Jan 20 18:13:44 crc kubenswrapper[4558]: I0120 18:13:44.350486 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-5f8f495fcf-ldhxq"] Jan 20 18:13:44 crc kubenswrapper[4558]: I0120 18:13:44.365560 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-5f8f495fcf-ldhxq"] Jan 20 18:13:44 crc kubenswrapper[4558]: I0120 18:13:44.374148 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/test-operator-controller-manager-7cd8bc9dbb-9hmt6"] Jan 20 18:13:44 crc kubenswrapper[4558]: I0120 18:13:44.379353 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/test-operator-controller-manager-7cd8bc9dbb-9hmt6"] Jan 20 18:13:44 crc kubenswrapper[4558]: I0120 18:13:44.385105 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/infra-operator-controller-manager-77c48c7859-4f7km"] Jan 20 18:13:44 crc kubenswrapper[4558]: I0120 18:13:44.388727 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/infra-operator-controller-manager-77c48c7859-4f7km"] Jan 20 18:13:44 crc kubenswrapper[4558]: I0120 18:13:44.392570 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-55db956ddc-6j4g5"] Jan 20 18:13:44 crc kubenswrapper[4558]: I0120 18:13:44.397753 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-55db956ddc-6j4g5"] Jan 20 18:13:44 crc kubenswrapper[4558]: I0120 18:13:44.406267 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-78757b4889-t99dx"] Jan 20 18:13:44 crc kubenswrapper[4558]: I0120 18:13:44.409360 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-78757b4889-t99dx"] Jan 20 18:13:44 crc kubenswrapper[4558]: I0120 18:13:44.413886 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-pt7m4"] Jan 20 18:13:44 crc kubenswrapper[4558]: I0120 18:13:44.419189 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/openstack-operator-index-pt7m4"] Jan 20 18:13:44 crc kubenswrapper[4558]: I0120 18:13:44.422622 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-77d5c5b54f-gvd5f"] Jan 20 18:13:44 crc kubenswrapper[4558]: I0120 18:13:44.426091 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-77d5c5b54f-gvd5f"] Jan 20 18:13:44 crc kubenswrapper[4558]: I0120 18:13:44.429713 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-5b9875986dl6bbk"] Jan 20 18:13:44 crc kubenswrapper[4558]: I0120 18:13:44.434313 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-5b9875986dl6bbk"] Jan 20 18:13:44 crc kubenswrapper[4558]: I0120 18:13:44.438927 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-767fdc4f47-8mw4r"] Jan 20 18:13:44 crc kubenswrapper[4558]: I0120 18:13:44.444947 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-767fdc4f47-8mw4r"] Jan 20 18:13:44 crc kubenswrapper[4558]: I0120 18:13:44.448681 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/placement-operator-controller-manager-686df47fcb-tsdgq"] Jan 20 18:13:44 crc kubenswrapper[4558]: I0120 18:13:44.452316 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/placement-operator-controller-manager-686df47fcb-tsdgq"] Jan 20 18:13:44 crc kubenswrapper[4558]: I0120 18:13:44.455589 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-9b68f5989-mdfnr"] Jan 20 18:13:44 crc kubenswrapper[4558]: I0120 18:13:44.461974 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-9b68f5989-mdfnr"] Jan 20 18:13:44 crc kubenswrapper[4558]: I0120 18:13:44.466195 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-c87fff755-plg7f"] Jan 20 18:13:44 crc kubenswrapper[4558]: I0120 18:13:44.469502 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-c87fff755-plg7f"] Jan 20 18:13:44 crc kubenswrapper[4558]: I0120 18:13:44.472881 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/swift-operator-controller-manager-85dd56d4cc-cvlzf"] Jan 20 18:13:44 crc kubenswrapper[4558]: I0120 18:13:44.476201 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/swift-operator-controller-manager-85dd56d4cc-cvlzf"] Jan 20 18:13:44 crc kubenswrapper[4558]: I0120 18:13:44.479749 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-7fc9b76cf6-9ld4x"] Jan 20 18:13:44 crc kubenswrapper[4558]: I0120 18:13:44.483363 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-7fc9b76cf6-9ld4x"] Jan 20 18:13:44 crc kubenswrapper[4558]: I0120 18:13:44.487065 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/manila-operator-controller-manager-864f6b75bf-4jj8h"] Jan 20 18:13:44 crc kubenswrapper[4558]: I0120 18:13:44.490491 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/manila-operator-controller-manager-864f6b75bf-4jj8h"] Jan 20 18:13:44 crc kubenswrapper[4558]: I0120 18:13:44.509934 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-controller-init-6d4d7d8545-4v6kg"] Jan 20 18:13:44 crc kubenswrapper[4558]: I0120 18:13:44.515506 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/openstack-operator-controller-init-6d4d7d8545-4v6kg"] Jan 20 18:13:44 crc kubenswrapper[4558]: I0120 18:13:44.518765 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-64cd966744-925vf"] Jan 20 18:13:44 crc kubenswrapper[4558]: I0120 18:13:44.521771 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-64cd966744-925vf"] Jan 20 18:13:44 crc kubenswrapper[4558]: I0120 18:13:44.526551 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zd9qc"] Jan 20 18:13:44 crc kubenswrapper[4558]: I0120 18:13:44.530231 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-zd9qc"] Jan 20 18:13:44 crc kubenswrapper[4558]: I0120 18:13:44.533894 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/heat-operator-controller-manager-594c8c9d5d-k28w4"] Jan 20 18:13:44 crc kubenswrapper[4558]: I0120 18:13:44.537245 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/heat-operator-controller-manager-594c8c9d5d-k28w4"] Jan 20 18:13:44 crc kubenswrapper[4558]: I0120 18:13:44.573028 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0292902b-f0ca-4abc-b220-4e3268243db5" path="/var/lib/kubelet/pods/0292902b-f0ca-4abc-b220-4e3268243db5/volumes" Jan 20 18:13:44 crc kubenswrapper[4558]: I0120 18:13:44.574020 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="04ab3f66-5485-4b6f-9293-ed76b2695d50" path="/var/lib/kubelet/pods/04ab3f66-5485-4b6f-9293-ed76b2695d50/volumes" Jan 20 18:13:44 crc kubenswrapper[4558]: I0120 18:13:44.575325 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="115bb848-a715-4eac-8993-63842460dd3d" path="/var/lib/kubelet/pods/115bb848-a715-4eac-8993-63842460dd3d/volumes" Jan 20 18:13:44 crc kubenswrapper[4558]: I0120 18:13:44.575724 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1998e4dc-a1d4-405e-b0dd-546e1d5fed6f" path="/var/lib/kubelet/pods/1998e4dc-a1d4-405e-b0dd-546e1d5fed6f/volumes" Jan 20 18:13:44 crc kubenswrapper[4558]: I0120 18:13:44.576616 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1c12cb14-b8f1-4939-84d9-6a6b851f095a" path="/var/lib/kubelet/pods/1c12cb14-b8f1-4939-84d9-6a6b851f095a/volumes" Jan 20 18:13:44 crc kubenswrapper[4558]: I0120 18:13:44.577061 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2e9e43c0-674a-4769-b44b-7eb6ee1a69d5" path="/var/lib/kubelet/pods/2e9e43c0-674a-4769-b44b-7eb6ee1a69d5/volumes" Jan 20 18:13:44 crc kubenswrapper[4558]: I0120 18:13:44.577462 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31cd4c27-d472-4406-886a-7222315c465d" path="/var/lib/kubelet/pods/31cd4c27-d472-4406-886a-7222315c465d/volumes" Jan 20 18:13:44 crc kubenswrapper[4558]: I0120 18:13:44.577846 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="42eb153b-1142-4dc6-9e34-b4cc38e49701" path="/var/lib/kubelet/pods/42eb153b-1142-4dc6-9e34-b4cc38e49701/volumes" Jan 20 18:13:44 crc kubenswrapper[4558]: I0120 18:13:44.579197 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="54c6bd49-d022-40c6-b547-58d3eb5ba7e2" path="/var/lib/kubelet/pods/54c6bd49-d022-40c6-b547-58d3eb5ba7e2/volumes" Jan 20 18:13:44 crc kubenswrapper[4558]: I0120 18:13:44.579592 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="56c68b3c-13ac-4e77-a8a5-bd99d83d5667" path="/var/lib/kubelet/pods/56c68b3c-13ac-4e77-a8a5-bd99d83d5667/volumes" Jan 20 18:13:44 crc kubenswrapper[4558]: I0120 18:13:44.580202 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="666aae5d-ee4c-47d1-8cf7-0aa2cb8c813d" path="/var/lib/kubelet/pods/666aae5d-ee4c-47d1-8cf7-0aa2cb8c813d/volumes" Jan 20 18:13:44 crc kubenswrapper[4558]: I0120 18:13:44.581458 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6e337f3c-6900-4314-adbb-aec361ccb7cc" path="/var/lib/kubelet/pods/6e337f3c-6900-4314-adbb-aec361ccb7cc/volumes" Jan 20 18:13:44 crc kubenswrapper[4558]: I0120 18:13:44.581839 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="88bca883-e84c-4223-80de-4c389d44f9fe" path="/var/lib/kubelet/pods/88bca883-e84c-4223-80de-4c389d44f9fe/volumes" Jan 20 18:13:44 crc kubenswrapper[4558]: I0120 18:13:44.582454 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8dea3161-8db9-4778-b59c-066f876ff6e2" path="/var/lib/kubelet/pods/8dea3161-8db9-4778-b59c-066f876ff6e2/volumes" Jan 20 18:13:44 crc kubenswrapper[4558]: I0120 18:13:44.583006 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="973549d4-6a26-4166-8be0-b0dfb7c5aec6" path="/var/lib/kubelet/pods/973549d4-6a26-4166-8be0-b0dfb7c5aec6/volumes" Jan 20 18:13:44 crc kubenswrapper[4558]: I0120 18:13:44.583972 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a491bcd2-847b-41eb-addd-54e625a9e6e1" path="/var/lib/kubelet/pods/a491bcd2-847b-41eb-addd-54e625a9e6e1/volumes" Jan 20 18:13:44 crc kubenswrapper[4558]: I0120 18:13:44.585297 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aec45707-2c02-43bb-b7d9-a24b906cadca" path="/var/lib/kubelet/pods/aec45707-2c02-43bb-b7d9-a24b906cadca/volumes" Jan 20 18:13:44 crc kubenswrapper[4558]: I0120 18:13:44.585939 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="af89563a-d3e9-4424-8fde-a0a77b21b7e8" path="/var/lib/kubelet/pods/af89563a-d3e9-4424-8fde-a0a77b21b7e8/volumes" Jan 20 18:13:44 crc kubenswrapper[4558]: I0120 18:13:44.586361 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cbbd3f62-64b6-4cc2-8c3b-b21d317c0624" path="/var/lib/kubelet/pods/cbbd3f62-64b6-4cc2-8c3b-b21d317c0624/volumes" Jan 20 18:13:44 crc kubenswrapper[4558]: I0120 18:13:44.587139 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="da27b90a-7f65-4d58-92f1-d46b0d92fd79" path="/var/lib/kubelet/pods/da27b90a-7f65-4d58-92f1-d46b0d92fd79/volumes" Jan 20 18:13:44 crc kubenswrapper[4558]: I0120 18:13:44.587948 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dba009f4-faa1-4d6b-bc19-c00eedd6c7c7" path="/var/lib/kubelet/pods/dba009f4-faa1-4d6b-bc19-c00eedd6c7c7/volumes" Jan 20 18:13:44 crc kubenswrapper[4558]: I0120 18:13:44.588530 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="deea5510-f570-48ac-b23f-bba261ef3951" path="/var/lib/kubelet/pods/deea5510-f570-48ac-b23f-bba261ef3951/volumes" Jan 20 18:13:44 crc kubenswrapper[4558]: I0120 18:13:44.589462 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e645ac52-b990-4b3f-a282-59ce60cacaff" path="/var/lib/kubelet/pods/e645ac52-b990-4b3f-a282-59ce60cacaff/volumes" Jan 20 18:13:44 crc kubenswrapper[4558]: I0120 18:13:44.589815 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7054184-22f8-43b2-b75f-0534d5bb467f" path="/var/lib/kubelet/pods/e7054184-22f8-43b2-b75f-0534d5bb467f/volumes" Jan 20 18:13:44 crc kubenswrapper[4558]: I0120 18:13:44.590193 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f1a6abd3-1c13-40ca-8e20-344fd40bc348" path="/var/lib/kubelet/pods/f1a6abd3-1c13-40ca-8e20-344fd40bc348/volumes" Jan 20 18:13:44 crc kubenswrapper[4558]: I0120 18:13:44.590827 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fafbffd7-66c2-42ab-97c2-034a1fd6fd7c" path="/var/lib/kubelet/pods/fafbffd7-66c2-42ab-97c2-034a1fd6fd7c/volumes" Jan 20 18:13:45 crc kubenswrapper[4558]: I0120 18:13:45.110496 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-ljsnj" Jan 20 18:13:45 crc kubenswrapper[4558]: I0120 18:13:45.114415 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-ljsnj" Jan 20 18:13:46 crc kubenswrapper[4558]: E0120 18:13:46.446005 4558 configmap.go:193] Couldn't get configMap openstack-kuttl-tests/edpm-compute-no-nodes: configmap "edpm-compute-no-nodes" not found Jan 20 18:13:46 crc kubenswrapper[4558]: E0120 18:13:46.446450 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/94d7f76b-8cf8-4bf5-9844-526d8b79fffc-edpm-compute-no-nodes podName:94d7f76b-8cf8-4bf5-9844-526d8b79fffc nodeName:}" failed. No retries permitted until 2026-01-20 18:13:54.44643097 +0000 UTC m=+5528.206768937 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "edpm-compute-no-nodes" (UniqueName: "kubernetes.io/configmap/94d7f76b-8cf8-4bf5-9844-526d8b79fffc-edpm-compute-no-nodes") pod "dnsmasq-dnsmasq-64864b6d57-7zmjn" (UID: "94d7f76b-8cf8-4bf5-9844-526d8b79fffc") : configmap "edpm-compute-no-nodes" not found Jan 20 18:13:47 crc kubenswrapper[4558]: I0120 18:13:47.953961 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-vtkrz"] Jan 20 18:13:47 crc kubenswrapper[4558]: E0120 18:13:47.954572 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="31cd4c27-d472-4406-886a-7222315c465d" containerName="manager" Jan 20 18:13:47 crc kubenswrapper[4558]: I0120 18:13:47.954588 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="31cd4c27-d472-4406-886a-7222315c465d" containerName="manager" Jan 20 18:13:47 crc kubenswrapper[4558]: E0120 18:13:47.954601 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6e337f3c-6900-4314-adbb-aec361ccb7cc" containerName="manager" Jan 20 18:13:47 crc kubenswrapper[4558]: I0120 18:13:47.954607 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="6e337f3c-6900-4314-adbb-aec361ccb7cc" containerName="manager" Jan 20 18:13:47 crc kubenswrapper[4558]: E0120 18:13:47.954616 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0292902b-f0ca-4abc-b220-4e3268243db5" containerName="manager" Jan 20 18:13:47 crc kubenswrapper[4558]: I0120 18:13:47.954622 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="0292902b-f0ca-4abc-b220-4e3268243db5" containerName="manager" Jan 20 18:13:47 crc kubenswrapper[4558]: E0120 18:13:47.954641 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fafbffd7-66c2-42ab-97c2-034a1fd6fd7c" containerName="manager" Jan 20 18:13:47 crc kubenswrapper[4558]: I0120 18:13:47.954646 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="fafbffd7-66c2-42ab-97c2-034a1fd6fd7c" containerName="manager" Jan 20 18:13:47 crc kubenswrapper[4558]: E0120 18:13:47.954656 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="af89563a-d3e9-4424-8fde-a0a77b21b7e8" containerName="operator" Jan 20 18:13:47 crc kubenswrapper[4558]: I0120 18:13:47.954662 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="af89563a-d3e9-4424-8fde-a0a77b21b7e8" containerName="operator" Jan 20 18:13:47 crc kubenswrapper[4558]: E0120 18:13:47.954670 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a491bcd2-847b-41eb-addd-54e625a9e6e1" containerName="operator" Jan 20 18:13:47 crc kubenswrapper[4558]: I0120 18:13:47.954676 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="a491bcd2-847b-41eb-addd-54e625a9e6e1" containerName="operator" Jan 20 18:13:47 crc kubenswrapper[4558]: E0120 18:13:47.954685 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aec45707-2c02-43bb-b7d9-a24b906cadca" containerName="manager" Jan 20 18:13:47 crc kubenswrapper[4558]: I0120 18:13:47.954691 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="aec45707-2c02-43bb-b7d9-a24b906cadca" containerName="manager" Jan 20 18:13:47 crc kubenswrapper[4558]: E0120 18:13:47.954700 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e7054184-22f8-43b2-b75f-0534d5bb467f" containerName="manager" Jan 20 18:13:47 crc kubenswrapper[4558]: I0120 18:13:47.954707 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="e7054184-22f8-43b2-b75f-0534d5bb467f" containerName="manager" Jan 20 18:13:47 crc kubenswrapper[4558]: E0120 18:13:47.954721 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="115bb848-a715-4eac-8993-63842460dd3d" containerName="manager" Jan 20 18:13:47 crc kubenswrapper[4558]: I0120 18:13:47.954727 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="115bb848-a715-4eac-8993-63842460dd3d" containerName="manager" Jan 20 18:13:47 crc kubenswrapper[4558]: E0120 18:13:47.954735 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="973549d4-6a26-4166-8be0-b0dfb7c5aec6" containerName="manager" Jan 20 18:13:47 crc kubenswrapper[4558]: I0120 18:13:47.954741 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="973549d4-6a26-4166-8be0-b0dfb7c5aec6" containerName="manager" Jan 20 18:13:47 crc kubenswrapper[4558]: E0120 18:13:47.954748 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8dea3161-8db9-4778-b59c-066f876ff6e2" containerName="manager" Jan 20 18:13:47 crc kubenswrapper[4558]: I0120 18:13:47.954755 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="8dea3161-8db9-4778-b59c-066f876ff6e2" containerName="manager" Jan 20 18:13:47 crc kubenswrapper[4558]: E0120 18:13:47.954763 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f1a6abd3-1c13-40ca-8e20-344fd40bc348" containerName="manager" Jan 20 18:13:47 crc kubenswrapper[4558]: I0120 18:13:47.954769 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="f1a6abd3-1c13-40ca-8e20-344fd40bc348" containerName="manager" Jan 20 18:13:47 crc kubenswrapper[4558]: E0120 18:13:47.954779 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2e9e43c0-674a-4769-b44b-7eb6ee1a69d5" containerName="manager" Jan 20 18:13:47 crc kubenswrapper[4558]: I0120 18:13:47.954784 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="2e9e43c0-674a-4769-b44b-7eb6ee1a69d5" containerName="manager" Jan 20 18:13:47 crc kubenswrapper[4558]: E0120 18:13:47.954794 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cbbd3f62-64b6-4cc2-8c3b-b21d317c0624" containerName="manager" Jan 20 18:13:47 crc kubenswrapper[4558]: I0120 18:13:47.954800 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="cbbd3f62-64b6-4cc2-8c3b-b21d317c0624" containerName="manager" Jan 20 18:13:47 crc kubenswrapper[4558]: E0120 18:13:47.954810 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="56c68b3c-13ac-4e77-a8a5-bd99d83d5667" containerName="manager" Jan 20 18:13:47 crc kubenswrapper[4558]: I0120 18:13:47.954817 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="56c68b3c-13ac-4e77-a8a5-bd99d83d5667" containerName="manager" Jan 20 18:13:47 crc kubenswrapper[4558]: E0120 18:13:47.954828 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e645ac52-b990-4b3f-a282-59ce60cacaff" containerName="manager" Jan 20 18:13:47 crc kubenswrapper[4558]: I0120 18:13:47.954837 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="e645ac52-b990-4b3f-a282-59ce60cacaff" containerName="manager" Jan 20 18:13:47 crc kubenswrapper[4558]: E0120 18:13:47.954848 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1c12cb14-b8f1-4939-84d9-6a6b851f095a" containerName="manager" Jan 20 18:13:47 crc kubenswrapper[4558]: I0120 18:13:47.954853 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="1c12cb14-b8f1-4939-84d9-6a6b851f095a" containerName="manager" Jan 20 18:13:47 crc kubenswrapper[4558]: E0120 18:13:47.954860 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="deea5510-f570-48ac-b23f-bba261ef3951" containerName="manager" Jan 20 18:13:47 crc kubenswrapper[4558]: I0120 18:13:47.954867 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="deea5510-f570-48ac-b23f-bba261ef3951" containerName="manager" Jan 20 18:13:47 crc kubenswrapper[4558]: E0120 18:13:47.954875 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="666aae5d-ee4c-47d1-8cf7-0aa2cb8c813d" containerName="registry-server" Jan 20 18:13:47 crc kubenswrapper[4558]: I0120 18:13:47.954881 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="666aae5d-ee4c-47d1-8cf7-0aa2cb8c813d" containerName="registry-server" Jan 20 18:13:47 crc kubenswrapper[4558]: E0120 18:13:47.954890 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1998e4dc-a1d4-405e-b0dd-546e1d5fed6f" containerName="manager" Jan 20 18:13:47 crc kubenswrapper[4558]: I0120 18:13:47.954896 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="1998e4dc-a1d4-405e-b0dd-546e1d5fed6f" containerName="manager" Jan 20 18:13:47 crc kubenswrapper[4558]: E0120 18:13:47.954908 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="da27b90a-7f65-4d58-92f1-d46b0d92fd79" containerName="manager" Jan 20 18:13:47 crc kubenswrapper[4558]: I0120 18:13:47.954925 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="da27b90a-7f65-4d58-92f1-d46b0d92fd79" containerName="manager" Jan 20 18:13:47 crc kubenswrapper[4558]: E0120 18:13:47.954933 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="42eb153b-1142-4dc6-9e34-b4cc38e49701" containerName="manager" Jan 20 18:13:47 crc kubenswrapper[4558]: I0120 18:13:47.954939 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="42eb153b-1142-4dc6-9e34-b4cc38e49701" containerName="manager" Jan 20 18:13:47 crc kubenswrapper[4558]: E0120 18:13:47.954947 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="88bca883-e84c-4223-80de-4c389d44f9fe" containerName="manager" Jan 20 18:13:47 crc kubenswrapper[4558]: I0120 18:13:47.954954 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="88bca883-e84c-4223-80de-4c389d44f9fe" containerName="manager" Jan 20 18:13:47 crc kubenswrapper[4558]: E0120 18:13:47.954962 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="54c6bd49-d022-40c6-b547-58d3eb5ba7e2" containerName="manager" Jan 20 18:13:47 crc kubenswrapper[4558]: I0120 18:13:47.954968 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="54c6bd49-d022-40c6-b547-58d3eb5ba7e2" containerName="manager" Jan 20 18:13:47 crc kubenswrapper[4558]: E0120 18:13:47.954981 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dba009f4-faa1-4d6b-bc19-c00eedd6c7c7" containerName="manager" Jan 20 18:13:47 crc kubenswrapper[4558]: I0120 18:13:47.954986 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="dba009f4-faa1-4d6b-bc19-c00eedd6c7c7" containerName="manager" Jan 20 18:13:47 crc kubenswrapper[4558]: E0120 18:13:47.954992 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="04ab3f66-5485-4b6f-9293-ed76b2695d50" containerName="catalog-operator" Jan 20 18:13:47 crc kubenswrapper[4558]: I0120 18:13:47.954997 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="04ab3f66-5485-4b6f-9293-ed76b2695d50" containerName="catalog-operator" Jan 20 18:13:47 crc kubenswrapper[4558]: I0120 18:13:47.955126 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="0292902b-f0ca-4abc-b220-4e3268243db5" containerName="manager" Jan 20 18:13:47 crc kubenswrapper[4558]: I0120 18:13:47.955139 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="6e337f3c-6900-4314-adbb-aec361ccb7cc" containerName="manager" Jan 20 18:13:47 crc kubenswrapper[4558]: I0120 18:13:47.955148 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="56c68b3c-13ac-4e77-a8a5-bd99d83d5667" containerName="manager" Jan 20 18:13:47 crc kubenswrapper[4558]: I0120 18:13:47.955158 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="88bca883-e84c-4223-80de-4c389d44f9fe" containerName="manager" Jan 20 18:13:47 crc kubenswrapper[4558]: I0120 18:13:47.955186 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="cbbd3f62-64b6-4cc2-8c3b-b21d317c0624" containerName="manager" Jan 20 18:13:47 crc kubenswrapper[4558]: I0120 18:13:47.955196 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="aec45707-2c02-43bb-b7d9-a24b906cadca" containerName="manager" Jan 20 18:13:47 crc kubenswrapper[4558]: I0120 18:13:47.955206 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="31cd4c27-d472-4406-886a-7222315c465d" containerName="manager" Jan 20 18:13:47 crc kubenswrapper[4558]: I0120 18:13:47.955212 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="e7054184-22f8-43b2-b75f-0534d5bb467f" containerName="manager" Jan 20 18:13:47 crc kubenswrapper[4558]: I0120 18:13:47.955221 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="2e9e43c0-674a-4769-b44b-7eb6ee1a69d5" containerName="manager" Jan 20 18:13:47 crc kubenswrapper[4558]: I0120 18:13:47.955228 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="f1a6abd3-1c13-40ca-8e20-344fd40bc348" containerName="manager" Jan 20 18:13:47 crc kubenswrapper[4558]: I0120 18:13:47.955235 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="04ab3f66-5485-4b6f-9293-ed76b2695d50" containerName="catalog-operator" Jan 20 18:13:47 crc kubenswrapper[4558]: I0120 18:13:47.955243 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="54c6bd49-d022-40c6-b547-58d3eb5ba7e2" containerName="manager" Jan 20 18:13:47 crc kubenswrapper[4558]: I0120 18:13:47.955250 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="1998e4dc-a1d4-405e-b0dd-546e1d5fed6f" containerName="manager" Jan 20 18:13:47 crc kubenswrapper[4558]: I0120 18:13:47.955261 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="666aae5d-ee4c-47d1-8cf7-0aa2cb8c813d" containerName="registry-server" Jan 20 18:13:47 crc kubenswrapper[4558]: I0120 18:13:47.955268 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="a491bcd2-847b-41eb-addd-54e625a9e6e1" containerName="operator" Jan 20 18:13:47 crc kubenswrapper[4558]: I0120 18:13:47.955275 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="8dea3161-8db9-4778-b59c-066f876ff6e2" containerName="manager" Jan 20 18:13:47 crc kubenswrapper[4558]: I0120 18:13:47.955281 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="af89563a-d3e9-4424-8fde-a0a77b21b7e8" containerName="operator" Jan 20 18:13:47 crc kubenswrapper[4558]: I0120 18:13:47.955289 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="deea5510-f570-48ac-b23f-bba261ef3951" containerName="manager" Jan 20 18:13:47 crc kubenswrapper[4558]: I0120 18:13:47.955298 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="42eb153b-1142-4dc6-9e34-b4cc38e49701" containerName="manager" Jan 20 18:13:47 crc kubenswrapper[4558]: I0120 18:13:47.955305 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="e645ac52-b990-4b3f-a282-59ce60cacaff" containerName="manager" Jan 20 18:13:47 crc kubenswrapper[4558]: I0120 18:13:47.955317 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="1c12cb14-b8f1-4939-84d9-6a6b851f095a" containerName="manager" Jan 20 18:13:47 crc kubenswrapper[4558]: I0120 18:13:47.955326 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="dba009f4-faa1-4d6b-bc19-c00eedd6c7c7" containerName="manager" Jan 20 18:13:47 crc kubenswrapper[4558]: I0120 18:13:47.955335 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="115bb848-a715-4eac-8993-63842460dd3d" containerName="manager" Jan 20 18:13:47 crc kubenswrapper[4558]: I0120 18:13:47.955345 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="973549d4-6a26-4166-8be0-b0dfb7c5aec6" containerName="manager" Jan 20 18:13:47 crc kubenswrapper[4558]: I0120 18:13:47.955352 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="fafbffd7-66c2-42ab-97c2-034a1fd6fd7c" containerName="manager" Jan 20 18:13:47 crc kubenswrapper[4558]: I0120 18:13:47.955361 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="da27b90a-7f65-4d58-92f1-d46b0d92fd79" containerName="manager" Jan 20 18:13:47 crc kubenswrapper[4558]: I0120 18:13:47.956360 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vtkrz" Jan 20 18:13:47 crc kubenswrapper[4558]: I0120 18:13:47.964700 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-vtkrz"] Jan 20 18:13:48 crc kubenswrapper[4558]: I0120 18:13:48.072226 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d9e48d51-6057-4527-bb88-2cce07ed36ee-utilities\") pod \"certified-operators-vtkrz\" (UID: \"d9e48d51-6057-4527-bb88-2cce07ed36ee\") " pod="openshift-marketplace/certified-operators-vtkrz" Jan 20 18:13:48 crc kubenswrapper[4558]: I0120 18:13:48.072503 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d9e48d51-6057-4527-bb88-2cce07ed36ee-catalog-content\") pod \"certified-operators-vtkrz\" (UID: \"d9e48d51-6057-4527-bb88-2cce07ed36ee\") " pod="openshift-marketplace/certified-operators-vtkrz" Jan 20 18:13:48 crc kubenswrapper[4558]: I0120 18:13:48.072617 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-llvcx\" (UniqueName: \"kubernetes.io/projected/d9e48d51-6057-4527-bb88-2cce07ed36ee-kube-api-access-llvcx\") pod \"certified-operators-vtkrz\" (UID: \"d9e48d51-6057-4527-bb88-2cce07ed36ee\") " pod="openshift-marketplace/certified-operators-vtkrz" Jan 20 18:13:48 crc kubenswrapper[4558]: I0120 18:13:48.174706 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d9e48d51-6057-4527-bb88-2cce07ed36ee-utilities\") pod \"certified-operators-vtkrz\" (UID: \"d9e48d51-6057-4527-bb88-2cce07ed36ee\") " pod="openshift-marketplace/certified-operators-vtkrz" Jan 20 18:13:48 crc kubenswrapper[4558]: I0120 18:13:48.174864 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d9e48d51-6057-4527-bb88-2cce07ed36ee-catalog-content\") pod \"certified-operators-vtkrz\" (UID: \"d9e48d51-6057-4527-bb88-2cce07ed36ee\") " pod="openshift-marketplace/certified-operators-vtkrz" Jan 20 18:13:48 crc kubenswrapper[4558]: I0120 18:13:48.174952 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-llvcx\" (UniqueName: \"kubernetes.io/projected/d9e48d51-6057-4527-bb88-2cce07ed36ee-kube-api-access-llvcx\") pod \"certified-operators-vtkrz\" (UID: \"d9e48d51-6057-4527-bb88-2cce07ed36ee\") " pod="openshift-marketplace/certified-operators-vtkrz" Jan 20 18:13:48 crc kubenswrapper[4558]: I0120 18:13:48.175183 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d9e48d51-6057-4527-bb88-2cce07ed36ee-utilities\") pod \"certified-operators-vtkrz\" (UID: \"d9e48d51-6057-4527-bb88-2cce07ed36ee\") " pod="openshift-marketplace/certified-operators-vtkrz" Jan 20 18:13:48 crc kubenswrapper[4558]: I0120 18:13:48.175255 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d9e48d51-6057-4527-bb88-2cce07ed36ee-catalog-content\") pod \"certified-operators-vtkrz\" (UID: \"d9e48d51-6057-4527-bb88-2cce07ed36ee\") " pod="openshift-marketplace/certified-operators-vtkrz" Jan 20 18:13:48 crc kubenswrapper[4558]: I0120 18:13:48.194958 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-llvcx\" (UniqueName: \"kubernetes.io/projected/d9e48d51-6057-4527-bb88-2cce07ed36ee-kube-api-access-llvcx\") pod \"certified-operators-vtkrz\" (UID: \"d9e48d51-6057-4527-bb88-2cce07ed36ee\") " pod="openshift-marketplace/certified-operators-vtkrz" Jan 20 18:13:48 crc kubenswrapper[4558]: I0120 18:13:48.276302 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vtkrz" Jan 20 18:13:48 crc kubenswrapper[4558]: I0120 18:13:48.683396 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-vtkrz"] Jan 20 18:13:48 crc kubenswrapper[4558]: I0120 18:13:48.786362 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-84b9f45d47-68c5z" Jan 20 18:13:48 crc kubenswrapper[4558]: I0120 18:13:48.842138 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-64864b6d57-7zmjn"] Jan 20 18:13:48 crc kubenswrapper[4558]: I0120 18:13:48.842378 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-64864b6d57-7zmjn" podUID="94d7f76b-8cf8-4bf5-9844-526d8b79fffc" containerName="dnsmasq-dns" containerID="cri-o://3d38a10fc945687b4bd468e97671352cb6a302116bad58cee0f7b303a04292a8" gracePeriod=10 Jan 20 18:13:49 crc kubenswrapper[4558]: I0120 18:13:49.133637 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-64864b6d57-7zmjn" Jan 20 18:13:49 crc kubenswrapper[4558]: I0120 18:13:49.144613 4558 generic.go:334] "Generic (PLEG): container finished" podID="94d7f76b-8cf8-4bf5-9844-526d8b79fffc" containerID="3d38a10fc945687b4bd468e97671352cb6a302116bad58cee0f7b303a04292a8" exitCode=0 Jan 20 18:13:49 crc kubenswrapper[4558]: I0120 18:13:49.144666 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-64864b6d57-7zmjn" event={"ID":"94d7f76b-8cf8-4bf5-9844-526d8b79fffc","Type":"ContainerDied","Data":"3d38a10fc945687b4bd468e97671352cb6a302116bad58cee0f7b303a04292a8"} Jan 20 18:13:49 crc kubenswrapper[4558]: I0120 18:13:49.144695 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-64864b6d57-7zmjn" event={"ID":"94d7f76b-8cf8-4bf5-9844-526d8b79fffc","Type":"ContainerDied","Data":"bed0a5643dc7bf1c9b36f9edcaa8de569b9e8a4154de37e1356843582f44345f"} Jan 20 18:13:49 crc kubenswrapper[4558]: I0120 18:13:49.144711 4558 scope.go:117] "RemoveContainer" containerID="3d38a10fc945687b4bd468e97671352cb6a302116bad58cee0f7b303a04292a8" Jan 20 18:13:49 crc kubenswrapper[4558]: I0120 18:13:49.144815 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-kuttl-tests/dnsmasq-dnsmasq-64864b6d57-7zmjn" Jan 20 18:13:49 crc kubenswrapper[4558]: I0120 18:13:49.146642 4558 generic.go:334] "Generic (PLEG): container finished" podID="d9e48d51-6057-4527-bb88-2cce07ed36ee" containerID="3f0915996b59606d107a3f4c1c832d7d5d402a9b498b89f7f1984fc95ea4064d" exitCode=0 Jan 20 18:13:49 crc kubenswrapper[4558]: I0120 18:13:49.146666 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vtkrz" event={"ID":"d9e48d51-6057-4527-bb88-2cce07ed36ee","Type":"ContainerDied","Data":"3f0915996b59606d107a3f4c1c832d7d5d402a9b498b89f7f1984fc95ea4064d"} Jan 20 18:13:49 crc kubenswrapper[4558]: I0120 18:13:49.146681 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vtkrz" event={"ID":"d9e48d51-6057-4527-bb88-2cce07ed36ee","Type":"ContainerStarted","Data":"db95476352db6a83363bcc2b8ff8325e22a218aebcb868c554d59763f58bbbb4"} Jan 20 18:13:49 crc kubenswrapper[4558]: I0120 18:13:49.169619 4558 scope.go:117] "RemoveContainer" containerID="a619ea6cff0a3ab1da333a12817ece7ad694a4ca9fed5f3649a9a16f785ecc11" Jan 20 18:13:49 crc kubenswrapper[4558]: I0120 18:13:49.199613 4558 scope.go:117] "RemoveContainer" containerID="3d38a10fc945687b4bd468e97671352cb6a302116bad58cee0f7b303a04292a8" Jan 20 18:13:49 crc kubenswrapper[4558]: E0120 18:13:49.200710 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3d38a10fc945687b4bd468e97671352cb6a302116bad58cee0f7b303a04292a8\": container with ID starting with 3d38a10fc945687b4bd468e97671352cb6a302116bad58cee0f7b303a04292a8 not found: ID does not exist" containerID="3d38a10fc945687b4bd468e97671352cb6a302116bad58cee0f7b303a04292a8" Jan 20 18:13:49 crc kubenswrapper[4558]: I0120 18:13:49.200745 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3d38a10fc945687b4bd468e97671352cb6a302116bad58cee0f7b303a04292a8"} err="failed to get container status \"3d38a10fc945687b4bd468e97671352cb6a302116bad58cee0f7b303a04292a8\": rpc error: code = NotFound desc = could not find container \"3d38a10fc945687b4bd468e97671352cb6a302116bad58cee0f7b303a04292a8\": container with ID starting with 3d38a10fc945687b4bd468e97671352cb6a302116bad58cee0f7b303a04292a8 not found: ID does not exist" Jan 20 18:13:49 crc kubenswrapper[4558]: I0120 18:13:49.200768 4558 scope.go:117] "RemoveContainer" containerID="a619ea6cff0a3ab1da333a12817ece7ad694a4ca9fed5f3649a9a16f785ecc11" Jan 20 18:13:49 crc kubenswrapper[4558]: E0120 18:13:49.201004 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a619ea6cff0a3ab1da333a12817ece7ad694a4ca9fed5f3649a9a16f785ecc11\": container with ID starting with a619ea6cff0a3ab1da333a12817ece7ad694a4ca9fed5f3649a9a16f785ecc11 not found: ID does not exist" containerID="a619ea6cff0a3ab1da333a12817ece7ad694a4ca9fed5f3649a9a16f785ecc11" Jan 20 18:13:49 crc kubenswrapper[4558]: I0120 18:13:49.201025 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a619ea6cff0a3ab1da333a12817ece7ad694a4ca9fed5f3649a9a16f785ecc11"} err="failed to get container status \"a619ea6cff0a3ab1da333a12817ece7ad694a4ca9fed5f3649a9a16f785ecc11\": rpc error: code = NotFound desc = could not find container \"a619ea6cff0a3ab1da333a12817ece7ad694a4ca9fed5f3649a9a16f785ecc11\": container with ID starting with a619ea6cff0a3ab1da333a12817ece7ad694a4ca9fed5f3649a9a16f785ecc11 not found: ID does not exist" Jan 20 18:13:49 crc kubenswrapper[4558]: I0120 18:13:49.202420 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/configmap/94d7f76b-8cf8-4bf5-9844-526d8b79fffc-edpm-compute-no-nodes\") pod \"94d7f76b-8cf8-4bf5-9844-526d8b79fffc\" (UID: \"94d7f76b-8cf8-4bf5-9844-526d8b79fffc\") " Jan 20 18:13:49 crc kubenswrapper[4558]: I0120 18:13:49.202493 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m2kqn\" (UniqueName: \"kubernetes.io/projected/94d7f76b-8cf8-4bf5-9844-526d8b79fffc-kube-api-access-m2kqn\") pod \"94d7f76b-8cf8-4bf5-9844-526d8b79fffc\" (UID: \"94d7f76b-8cf8-4bf5-9844-526d8b79fffc\") " Jan 20 18:13:49 crc kubenswrapper[4558]: I0120 18:13:49.202597 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/94d7f76b-8cf8-4bf5-9844-526d8b79fffc-config\") pod \"94d7f76b-8cf8-4bf5-9844-526d8b79fffc\" (UID: \"94d7f76b-8cf8-4bf5-9844-526d8b79fffc\") " Jan 20 18:13:49 crc kubenswrapper[4558]: I0120 18:13:49.202643 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/94d7f76b-8cf8-4bf5-9844-526d8b79fffc-dnsmasq-svc\") pod \"94d7f76b-8cf8-4bf5-9844-526d8b79fffc\" (UID: \"94d7f76b-8cf8-4bf5-9844-526d8b79fffc\") " Jan 20 18:13:49 crc kubenswrapper[4558]: I0120 18:13:49.214056 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/94d7f76b-8cf8-4bf5-9844-526d8b79fffc-kube-api-access-m2kqn" (OuterVolumeSpecName: "kube-api-access-m2kqn") pod "94d7f76b-8cf8-4bf5-9844-526d8b79fffc" (UID: "94d7f76b-8cf8-4bf5-9844-526d8b79fffc"). InnerVolumeSpecName "kube-api-access-m2kqn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:13:49 crc kubenswrapper[4558]: I0120 18:13:49.244526 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/94d7f76b-8cf8-4bf5-9844-526d8b79fffc-dnsmasq-svc" (OuterVolumeSpecName: "dnsmasq-svc") pod "94d7f76b-8cf8-4bf5-9844-526d8b79fffc" (UID: "94d7f76b-8cf8-4bf5-9844-526d8b79fffc"). InnerVolumeSpecName "dnsmasq-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:13:49 crc kubenswrapper[4558]: I0120 18:13:49.246992 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/94d7f76b-8cf8-4bf5-9844-526d8b79fffc-config" (OuterVolumeSpecName: "config") pod "94d7f76b-8cf8-4bf5-9844-526d8b79fffc" (UID: "94d7f76b-8cf8-4bf5-9844-526d8b79fffc"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:13:49 crc kubenswrapper[4558]: I0120 18:13:49.254332 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/94d7f76b-8cf8-4bf5-9844-526d8b79fffc-edpm-compute-no-nodes" (OuterVolumeSpecName: "edpm-compute-no-nodes") pod "94d7f76b-8cf8-4bf5-9844-526d8b79fffc" (UID: "94d7f76b-8cf8-4bf5-9844-526d8b79fffc"). InnerVolumeSpecName "edpm-compute-no-nodes". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:13:49 crc kubenswrapper[4558]: I0120 18:13:49.304725 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m2kqn\" (UniqueName: \"kubernetes.io/projected/94d7f76b-8cf8-4bf5-9844-526d8b79fffc-kube-api-access-m2kqn\") on node \"crc\" DevicePath \"\"" Jan 20 18:13:49 crc kubenswrapper[4558]: I0120 18:13:49.304767 4558 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/94d7f76b-8cf8-4bf5-9844-526d8b79fffc-config\") on node \"crc\" DevicePath \"\"" Jan 20 18:13:49 crc kubenswrapper[4558]: I0120 18:13:49.304778 4558 reconciler_common.go:293] "Volume detached for volume \"dnsmasq-svc\" (UniqueName: \"kubernetes.io/configmap/94d7f76b-8cf8-4bf5-9844-526d8b79fffc-dnsmasq-svc\") on node \"crc\" DevicePath \"\"" Jan 20 18:13:49 crc kubenswrapper[4558]: I0120 18:13:49.304792 4558 reconciler_common.go:293] "Volume detached for volume \"edpm-compute-no-nodes\" (UniqueName: \"kubernetes.io/configmap/94d7f76b-8cf8-4bf5-9844-526d8b79fffc-edpm-compute-no-nodes\") on node \"crc\" DevicePath \"\"" Jan 20 18:13:49 crc kubenswrapper[4558]: I0120 18:13:49.480358 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-64864b6d57-7zmjn"] Jan 20 18:13:49 crc kubenswrapper[4558]: I0120 18:13:49.484236 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-kuttl-tests/dnsmasq-dnsmasq-64864b6d57-7zmjn"] Jan 20 18:13:50 crc kubenswrapper[4558]: I0120 18:13:50.156819 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vtkrz" event={"ID":"d9e48d51-6057-4527-bb88-2cce07ed36ee","Type":"ContainerStarted","Data":"111124109f883f00bcbcf3f0c239a2be81becea8031f83ff05f850fcd878aab0"} Jan 20 18:13:50 crc kubenswrapper[4558]: I0120 18:13:50.578785 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="94d7f76b-8cf8-4bf5-9844-526d8b79fffc" path="/var/lib/kubelet/pods/94d7f76b-8cf8-4bf5-9844-526d8b79fffc/volumes" Jan 20 18:13:51 crc kubenswrapper[4558]: I0120 18:13:51.168323 4558 generic.go:334] "Generic (PLEG): container finished" podID="d9e48d51-6057-4527-bb88-2cce07ed36ee" containerID="111124109f883f00bcbcf3f0c239a2be81becea8031f83ff05f850fcd878aab0" exitCode=0 Jan 20 18:13:51 crc kubenswrapper[4558]: I0120 18:13:51.168400 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vtkrz" event={"ID":"d9e48d51-6057-4527-bb88-2cce07ed36ee","Type":"ContainerDied","Data":"111124109f883f00bcbcf3f0c239a2be81becea8031f83ff05f850fcd878aab0"} Jan 20 18:13:52 crc kubenswrapper[4558]: I0120 18:13:52.183080 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vtkrz" event={"ID":"d9e48d51-6057-4527-bb88-2cce07ed36ee","Type":"ContainerStarted","Data":"ed9d07b6d7feda61004678103fcaf18acf9a416fc02189faaad534779cbc56c2"} Jan 20 18:13:52 crc kubenswrapper[4558]: I0120 18:13:52.203654 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-vtkrz" podStartSLOduration=2.689344523 podStartE2EDuration="5.203637738s" podCreationTimestamp="2026-01-20 18:13:47 +0000 UTC" firstStartedPulling="2026-01-20 18:13:49.147974081 +0000 UTC m=+5522.908312048" lastFinishedPulling="2026-01-20 18:13:51.662267297 +0000 UTC m=+5525.422605263" observedRunningTime="2026-01-20 18:13:52.201879482 +0000 UTC m=+5525.962217449" watchObservedRunningTime="2026-01-20 18:13:52.203637738 +0000 UTC m=+5525.963975705" Jan 20 18:13:53 crc kubenswrapper[4558]: I0120 18:13:53.352359 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-89nwm"] Jan 20 18:13:53 crc kubenswrapper[4558]: E0120 18:13:53.353298 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="94d7f76b-8cf8-4bf5-9844-526d8b79fffc" containerName="dnsmasq-dns" Jan 20 18:13:53 crc kubenswrapper[4558]: I0120 18:13:53.353327 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="94d7f76b-8cf8-4bf5-9844-526d8b79fffc" containerName="dnsmasq-dns" Jan 20 18:13:53 crc kubenswrapper[4558]: E0120 18:13:53.353354 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="94d7f76b-8cf8-4bf5-9844-526d8b79fffc" containerName="init" Jan 20 18:13:53 crc kubenswrapper[4558]: I0120 18:13:53.353361 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="94d7f76b-8cf8-4bf5-9844-526d8b79fffc" containerName="init" Jan 20 18:13:53 crc kubenswrapper[4558]: I0120 18:13:53.353502 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="94d7f76b-8cf8-4bf5-9844-526d8b79fffc" containerName="dnsmasq-dns" Jan 20 18:13:53 crc kubenswrapper[4558]: I0120 18:13:53.354585 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-89nwm" Jan 20 18:13:53 crc kubenswrapper[4558]: I0120 18:13:53.361911 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-89nwm"] Jan 20 18:13:53 crc kubenswrapper[4558]: I0120 18:13:53.466763 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jdc64\" (UniqueName: \"kubernetes.io/projected/48bc0f62-7673-4a1a-9d34-6394dbbb3a3a-kube-api-access-jdc64\") pod \"redhat-operators-89nwm\" (UID: \"48bc0f62-7673-4a1a-9d34-6394dbbb3a3a\") " pod="openshift-marketplace/redhat-operators-89nwm" Jan 20 18:13:53 crc kubenswrapper[4558]: I0120 18:13:53.467203 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/48bc0f62-7673-4a1a-9d34-6394dbbb3a3a-catalog-content\") pod \"redhat-operators-89nwm\" (UID: \"48bc0f62-7673-4a1a-9d34-6394dbbb3a3a\") " pod="openshift-marketplace/redhat-operators-89nwm" Jan 20 18:13:53 crc kubenswrapper[4558]: I0120 18:13:53.467285 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/48bc0f62-7673-4a1a-9d34-6394dbbb3a3a-utilities\") pod \"redhat-operators-89nwm\" (UID: \"48bc0f62-7673-4a1a-9d34-6394dbbb3a3a\") " pod="openshift-marketplace/redhat-operators-89nwm" Jan 20 18:13:53 crc kubenswrapper[4558]: I0120 18:13:53.568682 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jdc64\" (UniqueName: \"kubernetes.io/projected/48bc0f62-7673-4a1a-9d34-6394dbbb3a3a-kube-api-access-jdc64\") pod \"redhat-operators-89nwm\" (UID: \"48bc0f62-7673-4a1a-9d34-6394dbbb3a3a\") " pod="openshift-marketplace/redhat-operators-89nwm" Jan 20 18:13:53 crc kubenswrapper[4558]: I0120 18:13:53.568752 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/48bc0f62-7673-4a1a-9d34-6394dbbb3a3a-catalog-content\") pod \"redhat-operators-89nwm\" (UID: \"48bc0f62-7673-4a1a-9d34-6394dbbb3a3a\") " pod="openshift-marketplace/redhat-operators-89nwm" Jan 20 18:13:53 crc kubenswrapper[4558]: I0120 18:13:53.568824 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/48bc0f62-7673-4a1a-9d34-6394dbbb3a3a-utilities\") pod \"redhat-operators-89nwm\" (UID: \"48bc0f62-7673-4a1a-9d34-6394dbbb3a3a\") " pod="openshift-marketplace/redhat-operators-89nwm" Jan 20 18:13:53 crc kubenswrapper[4558]: I0120 18:13:53.569323 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/48bc0f62-7673-4a1a-9d34-6394dbbb3a3a-catalog-content\") pod \"redhat-operators-89nwm\" (UID: \"48bc0f62-7673-4a1a-9d34-6394dbbb3a3a\") " pod="openshift-marketplace/redhat-operators-89nwm" Jan 20 18:13:53 crc kubenswrapper[4558]: I0120 18:13:53.569483 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/48bc0f62-7673-4a1a-9d34-6394dbbb3a3a-utilities\") pod \"redhat-operators-89nwm\" (UID: \"48bc0f62-7673-4a1a-9d34-6394dbbb3a3a\") " pod="openshift-marketplace/redhat-operators-89nwm" Jan 20 18:13:53 crc kubenswrapper[4558]: I0120 18:13:53.587489 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jdc64\" (UniqueName: \"kubernetes.io/projected/48bc0f62-7673-4a1a-9d34-6394dbbb3a3a-kube-api-access-jdc64\") pod \"redhat-operators-89nwm\" (UID: \"48bc0f62-7673-4a1a-9d34-6394dbbb3a3a\") " pod="openshift-marketplace/redhat-operators-89nwm" Jan 20 18:13:53 crc kubenswrapper[4558]: I0120 18:13:53.671574 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-89nwm" Jan 20 18:13:54 crc kubenswrapper[4558]: I0120 18:13:54.082872 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-89nwm"] Jan 20 18:13:54 crc kubenswrapper[4558]: W0120 18:13:54.088607 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod48bc0f62_7673_4a1a_9d34_6394dbbb3a3a.slice/crio-a19e3a209b187fc6a0e87c569bbd1e033f7e4e6f3a42768af70499e0894dde6b WatchSource:0}: Error finding container a19e3a209b187fc6a0e87c569bbd1e033f7e4e6f3a42768af70499e0894dde6b: Status 404 returned error can't find the container with id a19e3a209b187fc6a0e87c569bbd1e033f7e4e6f3a42768af70499e0894dde6b Jan 20 18:13:54 crc kubenswrapper[4558]: I0120 18:13:54.202538 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-89nwm" event={"ID":"48bc0f62-7673-4a1a-9d34-6394dbbb3a3a","Type":"ContainerStarted","Data":"a19e3a209b187fc6a0e87c569bbd1e033f7e4e6f3a42768af70499e0894dde6b"} Jan 20 18:13:55 crc kubenswrapper[4558]: I0120 18:13:55.210382 4558 generic.go:334] "Generic (PLEG): container finished" podID="48bc0f62-7673-4a1a-9d34-6394dbbb3a3a" containerID="3d5fd6bc2c6d4fb8375a09d04ed5c6e150ab0920c0b548c4fb492237ba12f14c" exitCode=0 Jan 20 18:13:55 crc kubenswrapper[4558]: I0120 18:13:55.210471 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-89nwm" event={"ID":"48bc0f62-7673-4a1a-9d34-6394dbbb3a3a","Type":"ContainerDied","Data":"3d5fd6bc2c6d4fb8375a09d04ed5c6e150ab0920c0b548c4fb492237ba12f14c"} Jan 20 18:13:56 crc kubenswrapper[4558]: I0120 18:13:56.220115 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-89nwm" event={"ID":"48bc0f62-7673-4a1a-9d34-6394dbbb3a3a","Type":"ContainerStarted","Data":"2c18903410a2ca245a4ac146f3601533da28b64fd68adcb17b0943df021707c8"} Jan 20 18:13:57 crc kubenswrapper[4558]: I0120 18:13:57.251730 4558 generic.go:334] "Generic (PLEG): container finished" podID="48bc0f62-7673-4a1a-9d34-6394dbbb3a3a" containerID="2c18903410a2ca245a4ac146f3601533da28b64fd68adcb17b0943df021707c8" exitCode=0 Jan 20 18:13:57 crc kubenswrapper[4558]: I0120 18:13:57.251781 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-89nwm" event={"ID":"48bc0f62-7673-4a1a-9d34-6394dbbb3a3a","Type":"ContainerDied","Data":"2c18903410a2ca245a4ac146f3601533da28b64fd68adcb17b0943df021707c8"} Jan 20 18:13:57 crc kubenswrapper[4558]: I0120 18:13:57.330467 4558 patch_prober.go:28] interesting pod/machine-config-daemon-2vr4r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 20 18:13:57 crc kubenswrapper[4558]: I0120 18:13:57.330546 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 20 18:13:58 crc kubenswrapper[4558]: I0120 18:13:58.267843 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-89nwm" event={"ID":"48bc0f62-7673-4a1a-9d34-6394dbbb3a3a","Type":"ContainerStarted","Data":"8656a1d139f4ed5b8aa20c1666549dcee12b90978e5d1a1280da06a9e0229930"} Jan 20 18:13:58 crc kubenswrapper[4558]: I0120 18:13:58.277150 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-vtkrz" Jan 20 18:13:58 crc kubenswrapper[4558]: I0120 18:13:58.277239 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-vtkrz" Jan 20 18:13:58 crc kubenswrapper[4558]: I0120 18:13:58.287757 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-89nwm" podStartSLOduration=2.762053674 podStartE2EDuration="5.287743737s" podCreationTimestamp="2026-01-20 18:13:53 +0000 UTC" firstStartedPulling="2026-01-20 18:13:55.212025695 +0000 UTC m=+5528.972363662" lastFinishedPulling="2026-01-20 18:13:57.737715769 +0000 UTC m=+5531.498053725" observedRunningTime="2026-01-20 18:13:58.282354916 +0000 UTC m=+5532.042692873" watchObservedRunningTime="2026-01-20 18:13:58.287743737 +0000 UTC m=+5532.048081705" Jan 20 18:13:58 crc kubenswrapper[4558]: I0120 18:13:58.318214 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-vtkrz" Jan 20 18:13:59 crc kubenswrapper[4558]: I0120 18:13:59.315924 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-vtkrz" Jan 20 18:14:03 crc kubenswrapper[4558]: I0120 18:14:03.671735 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-89nwm" Jan 20 18:14:03 crc kubenswrapper[4558]: I0120 18:14:03.672098 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-89nwm" Jan 20 18:14:03 crc kubenswrapper[4558]: I0120 18:14:03.711281 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-89nwm" Jan 20 18:14:04 crc kubenswrapper[4558]: I0120 18:14:04.145854 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-vtkrz"] Jan 20 18:14:04 crc kubenswrapper[4558]: I0120 18:14:04.146401 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-vtkrz" podUID="d9e48d51-6057-4527-bb88-2cce07ed36ee" containerName="registry-server" containerID="cri-o://ed9d07b6d7feda61004678103fcaf18acf9a416fc02189faaad534779cbc56c2" gracePeriod=2 Jan 20 18:14:04 crc kubenswrapper[4558]: I0120 18:14:04.317099 4558 generic.go:334] "Generic (PLEG): container finished" podID="d9e48d51-6057-4527-bb88-2cce07ed36ee" containerID="ed9d07b6d7feda61004678103fcaf18acf9a416fc02189faaad534779cbc56c2" exitCode=0 Jan 20 18:14:04 crc kubenswrapper[4558]: I0120 18:14:04.317145 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vtkrz" event={"ID":"d9e48d51-6057-4527-bb88-2cce07ed36ee","Type":"ContainerDied","Data":"ed9d07b6d7feda61004678103fcaf18acf9a416fc02189faaad534779cbc56c2"} Jan 20 18:14:04 crc kubenswrapper[4558]: I0120 18:14:04.359119 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-89nwm" Jan 20 18:14:04 crc kubenswrapper[4558]: I0120 18:14:04.511312 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vtkrz" Jan 20 18:14:04 crc kubenswrapper[4558]: I0120 18:14:04.631811 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d9e48d51-6057-4527-bb88-2cce07ed36ee-utilities\") pod \"d9e48d51-6057-4527-bb88-2cce07ed36ee\" (UID: \"d9e48d51-6057-4527-bb88-2cce07ed36ee\") " Jan 20 18:14:04 crc kubenswrapper[4558]: I0120 18:14:04.631889 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-llvcx\" (UniqueName: \"kubernetes.io/projected/d9e48d51-6057-4527-bb88-2cce07ed36ee-kube-api-access-llvcx\") pod \"d9e48d51-6057-4527-bb88-2cce07ed36ee\" (UID: \"d9e48d51-6057-4527-bb88-2cce07ed36ee\") " Jan 20 18:14:04 crc kubenswrapper[4558]: I0120 18:14:04.631945 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d9e48d51-6057-4527-bb88-2cce07ed36ee-catalog-content\") pod \"d9e48d51-6057-4527-bb88-2cce07ed36ee\" (UID: \"d9e48d51-6057-4527-bb88-2cce07ed36ee\") " Jan 20 18:14:04 crc kubenswrapper[4558]: I0120 18:14:04.632718 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d9e48d51-6057-4527-bb88-2cce07ed36ee-utilities" (OuterVolumeSpecName: "utilities") pod "d9e48d51-6057-4527-bb88-2cce07ed36ee" (UID: "d9e48d51-6057-4527-bb88-2cce07ed36ee"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 18:14:04 crc kubenswrapper[4558]: I0120 18:14:04.633231 4558 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d9e48d51-6057-4527-bb88-2cce07ed36ee-utilities\") on node \"crc\" DevicePath \"\"" Jan 20 18:14:04 crc kubenswrapper[4558]: I0120 18:14:04.638250 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d9e48d51-6057-4527-bb88-2cce07ed36ee-kube-api-access-llvcx" (OuterVolumeSpecName: "kube-api-access-llvcx") pod "d9e48d51-6057-4527-bb88-2cce07ed36ee" (UID: "d9e48d51-6057-4527-bb88-2cce07ed36ee"). InnerVolumeSpecName "kube-api-access-llvcx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:14:04 crc kubenswrapper[4558]: I0120 18:14:04.672149 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d9e48d51-6057-4527-bb88-2cce07ed36ee-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d9e48d51-6057-4527-bb88-2cce07ed36ee" (UID: "d9e48d51-6057-4527-bb88-2cce07ed36ee"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 18:14:04 crc kubenswrapper[4558]: I0120 18:14:04.734848 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-llvcx\" (UniqueName: \"kubernetes.io/projected/d9e48d51-6057-4527-bb88-2cce07ed36ee-kube-api-access-llvcx\") on node \"crc\" DevicePath \"\"" Jan 20 18:14:04 crc kubenswrapper[4558]: I0120 18:14:04.734885 4558 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d9e48d51-6057-4527-bb88-2cce07ed36ee-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 20 18:14:05 crc kubenswrapper[4558]: I0120 18:14:05.329325 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vtkrz" event={"ID":"d9e48d51-6057-4527-bb88-2cce07ed36ee","Type":"ContainerDied","Data":"db95476352db6a83363bcc2b8ff8325e22a218aebcb868c554d59763f58bbbb4"} Jan 20 18:14:05 crc kubenswrapper[4558]: I0120 18:14:05.329742 4558 scope.go:117] "RemoveContainer" containerID="ed9d07b6d7feda61004678103fcaf18acf9a416fc02189faaad534779cbc56c2" Jan 20 18:14:05 crc kubenswrapper[4558]: I0120 18:14:05.329381 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vtkrz" Jan 20 18:14:05 crc kubenswrapper[4558]: I0120 18:14:05.353622 4558 scope.go:117] "RemoveContainer" containerID="111124109f883f00bcbcf3f0c239a2be81becea8031f83ff05f850fcd878aab0" Jan 20 18:14:05 crc kubenswrapper[4558]: I0120 18:14:05.355461 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-vtkrz"] Jan 20 18:14:05 crc kubenswrapper[4558]: I0120 18:14:05.359217 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-vtkrz"] Jan 20 18:14:05 crc kubenswrapper[4558]: I0120 18:14:05.368950 4558 scope.go:117] "RemoveContainer" containerID="3f0915996b59606d107a3f4c1c832d7d5d402a9b498b89f7f1984fc95ea4064d" Jan 20 18:14:06 crc kubenswrapper[4558]: I0120 18:14:06.574737 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d9e48d51-6057-4527-bb88-2cce07ed36ee" path="/var/lib/kubelet/pods/d9e48d51-6057-4527-bb88-2cce07ed36ee/volumes" Jan 20 18:14:08 crc kubenswrapper[4558]: I0120 18:14:08.743585 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-89nwm"] Jan 20 18:14:08 crc kubenswrapper[4558]: I0120 18:14:08.743843 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-89nwm" podUID="48bc0f62-7673-4a1a-9d34-6394dbbb3a3a" containerName="registry-server" containerID="cri-o://8656a1d139f4ed5b8aa20c1666549dcee12b90978e5d1a1280da06a9e0229930" gracePeriod=2 Jan 20 18:14:09 crc kubenswrapper[4558]: I0120 18:14:09.102001 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-89nwm" Jan 20 18:14:09 crc kubenswrapper[4558]: I0120 18:14:09.203753 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jdc64\" (UniqueName: \"kubernetes.io/projected/48bc0f62-7673-4a1a-9d34-6394dbbb3a3a-kube-api-access-jdc64\") pod \"48bc0f62-7673-4a1a-9d34-6394dbbb3a3a\" (UID: \"48bc0f62-7673-4a1a-9d34-6394dbbb3a3a\") " Jan 20 18:14:09 crc kubenswrapper[4558]: I0120 18:14:09.204145 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/48bc0f62-7673-4a1a-9d34-6394dbbb3a3a-utilities\") pod \"48bc0f62-7673-4a1a-9d34-6394dbbb3a3a\" (UID: \"48bc0f62-7673-4a1a-9d34-6394dbbb3a3a\") " Jan 20 18:14:09 crc kubenswrapper[4558]: I0120 18:14:09.204295 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/48bc0f62-7673-4a1a-9d34-6394dbbb3a3a-catalog-content\") pod \"48bc0f62-7673-4a1a-9d34-6394dbbb3a3a\" (UID: \"48bc0f62-7673-4a1a-9d34-6394dbbb3a3a\") " Jan 20 18:14:09 crc kubenswrapper[4558]: I0120 18:14:09.204902 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/48bc0f62-7673-4a1a-9d34-6394dbbb3a3a-utilities" (OuterVolumeSpecName: "utilities") pod "48bc0f62-7673-4a1a-9d34-6394dbbb3a3a" (UID: "48bc0f62-7673-4a1a-9d34-6394dbbb3a3a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 18:14:09 crc kubenswrapper[4558]: I0120 18:14:09.210416 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/48bc0f62-7673-4a1a-9d34-6394dbbb3a3a-kube-api-access-jdc64" (OuterVolumeSpecName: "kube-api-access-jdc64") pod "48bc0f62-7673-4a1a-9d34-6394dbbb3a3a" (UID: "48bc0f62-7673-4a1a-9d34-6394dbbb3a3a"). InnerVolumeSpecName "kube-api-access-jdc64". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:14:09 crc kubenswrapper[4558]: I0120 18:14:09.305382 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/48bc0f62-7673-4a1a-9d34-6394dbbb3a3a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "48bc0f62-7673-4a1a-9d34-6394dbbb3a3a" (UID: "48bc0f62-7673-4a1a-9d34-6394dbbb3a3a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 18:14:09 crc kubenswrapper[4558]: I0120 18:14:09.305955 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/48bc0f62-7673-4a1a-9d34-6394dbbb3a3a-catalog-content\") pod \"48bc0f62-7673-4a1a-9d34-6394dbbb3a3a\" (UID: \"48bc0f62-7673-4a1a-9d34-6394dbbb3a3a\") " Jan 20 18:14:09 crc kubenswrapper[4558]: W0120 18:14:09.306060 4558 empty_dir.go:500] Warning: Unmount skipped because path does not exist: /var/lib/kubelet/pods/48bc0f62-7673-4a1a-9d34-6394dbbb3a3a/volumes/kubernetes.io~empty-dir/catalog-content Jan 20 18:14:09 crc kubenswrapper[4558]: I0120 18:14:09.306082 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/48bc0f62-7673-4a1a-9d34-6394dbbb3a3a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "48bc0f62-7673-4a1a-9d34-6394dbbb3a3a" (UID: "48bc0f62-7673-4a1a-9d34-6394dbbb3a3a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 18:14:09 crc kubenswrapper[4558]: I0120 18:14:09.306439 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jdc64\" (UniqueName: \"kubernetes.io/projected/48bc0f62-7673-4a1a-9d34-6394dbbb3a3a-kube-api-access-jdc64\") on node \"crc\" DevicePath \"\"" Jan 20 18:14:09 crc kubenswrapper[4558]: I0120 18:14:09.306466 4558 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/48bc0f62-7673-4a1a-9d34-6394dbbb3a3a-utilities\") on node \"crc\" DevicePath \"\"" Jan 20 18:14:09 crc kubenswrapper[4558]: I0120 18:14:09.306479 4558 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/48bc0f62-7673-4a1a-9d34-6394dbbb3a3a-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 20 18:14:09 crc kubenswrapper[4558]: I0120 18:14:09.379574 4558 generic.go:334] "Generic (PLEG): container finished" podID="48bc0f62-7673-4a1a-9d34-6394dbbb3a3a" containerID="8656a1d139f4ed5b8aa20c1666549dcee12b90978e5d1a1280da06a9e0229930" exitCode=0 Jan 20 18:14:09 crc kubenswrapper[4558]: I0120 18:14:09.379638 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-89nwm" event={"ID":"48bc0f62-7673-4a1a-9d34-6394dbbb3a3a","Type":"ContainerDied","Data":"8656a1d139f4ed5b8aa20c1666549dcee12b90978e5d1a1280da06a9e0229930"} Jan 20 18:14:09 crc kubenswrapper[4558]: I0120 18:14:09.379674 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-89nwm" Jan 20 18:14:09 crc kubenswrapper[4558]: I0120 18:14:09.379702 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-89nwm" event={"ID":"48bc0f62-7673-4a1a-9d34-6394dbbb3a3a","Type":"ContainerDied","Data":"a19e3a209b187fc6a0e87c569bbd1e033f7e4e6f3a42768af70499e0894dde6b"} Jan 20 18:14:09 crc kubenswrapper[4558]: I0120 18:14:09.379724 4558 scope.go:117] "RemoveContainer" containerID="8656a1d139f4ed5b8aa20c1666549dcee12b90978e5d1a1280da06a9e0229930" Jan 20 18:14:09 crc kubenswrapper[4558]: I0120 18:14:09.399289 4558 scope.go:117] "RemoveContainer" containerID="2c18903410a2ca245a4ac146f3601533da28b64fd68adcb17b0943df021707c8" Jan 20 18:14:09 crc kubenswrapper[4558]: I0120 18:14:09.412386 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-89nwm"] Jan 20 18:14:09 crc kubenswrapper[4558]: I0120 18:14:09.416510 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-89nwm"] Jan 20 18:14:09 crc kubenswrapper[4558]: I0120 18:14:09.436297 4558 scope.go:117] "RemoveContainer" containerID="3d5fd6bc2c6d4fb8375a09d04ed5c6e150ab0920c0b548c4fb492237ba12f14c" Jan 20 18:14:09 crc kubenswrapper[4558]: I0120 18:14:09.452307 4558 scope.go:117] "RemoveContainer" containerID="8656a1d139f4ed5b8aa20c1666549dcee12b90978e5d1a1280da06a9e0229930" Jan 20 18:14:09 crc kubenswrapper[4558]: E0120 18:14:09.452757 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8656a1d139f4ed5b8aa20c1666549dcee12b90978e5d1a1280da06a9e0229930\": container with ID starting with 8656a1d139f4ed5b8aa20c1666549dcee12b90978e5d1a1280da06a9e0229930 not found: ID does not exist" containerID="8656a1d139f4ed5b8aa20c1666549dcee12b90978e5d1a1280da06a9e0229930" Jan 20 18:14:09 crc kubenswrapper[4558]: I0120 18:14:09.452792 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8656a1d139f4ed5b8aa20c1666549dcee12b90978e5d1a1280da06a9e0229930"} err="failed to get container status \"8656a1d139f4ed5b8aa20c1666549dcee12b90978e5d1a1280da06a9e0229930\": rpc error: code = NotFound desc = could not find container \"8656a1d139f4ed5b8aa20c1666549dcee12b90978e5d1a1280da06a9e0229930\": container with ID starting with 8656a1d139f4ed5b8aa20c1666549dcee12b90978e5d1a1280da06a9e0229930 not found: ID does not exist" Jan 20 18:14:09 crc kubenswrapper[4558]: I0120 18:14:09.452838 4558 scope.go:117] "RemoveContainer" containerID="2c18903410a2ca245a4ac146f3601533da28b64fd68adcb17b0943df021707c8" Jan 20 18:14:09 crc kubenswrapper[4558]: E0120 18:14:09.453236 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2c18903410a2ca245a4ac146f3601533da28b64fd68adcb17b0943df021707c8\": container with ID starting with 2c18903410a2ca245a4ac146f3601533da28b64fd68adcb17b0943df021707c8 not found: ID does not exist" containerID="2c18903410a2ca245a4ac146f3601533da28b64fd68adcb17b0943df021707c8" Jan 20 18:14:09 crc kubenswrapper[4558]: I0120 18:14:09.453279 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2c18903410a2ca245a4ac146f3601533da28b64fd68adcb17b0943df021707c8"} err="failed to get container status \"2c18903410a2ca245a4ac146f3601533da28b64fd68adcb17b0943df021707c8\": rpc error: code = NotFound desc = could not find container \"2c18903410a2ca245a4ac146f3601533da28b64fd68adcb17b0943df021707c8\": container with ID starting with 2c18903410a2ca245a4ac146f3601533da28b64fd68adcb17b0943df021707c8 not found: ID does not exist" Jan 20 18:14:09 crc kubenswrapper[4558]: I0120 18:14:09.453310 4558 scope.go:117] "RemoveContainer" containerID="3d5fd6bc2c6d4fb8375a09d04ed5c6e150ab0920c0b548c4fb492237ba12f14c" Jan 20 18:14:09 crc kubenswrapper[4558]: E0120 18:14:09.453553 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3d5fd6bc2c6d4fb8375a09d04ed5c6e150ab0920c0b548c4fb492237ba12f14c\": container with ID starting with 3d5fd6bc2c6d4fb8375a09d04ed5c6e150ab0920c0b548c4fb492237ba12f14c not found: ID does not exist" containerID="3d5fd6bc2c6d4fb8375a09d04ed5c6e150ab0920c0b548c4fb492237ba12f14c" Jan 20 18:14:09 crc kubenswrapper[4558]: I0120 18:14:09.453578 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3d5fd6bc2c6d4fb8375a09d04ed5c6e150ab0920c0b548c4fb492237ba12f14c"} err="failed to get container status \"3d5fd6bc2c6d4fb8375a09d04ed5c6e150ab0920c0b548c4fb492237ba12f14c\": rpc error: code = NotFound desc = could not find container \"3d5fd6bc2c6d4fb8375a09d04ed5c6e150ab0920c0b548c4fb492237ba12f14c\": container with ID starting with 3d5fd6bc2c6d4fb8375a09d04ed5c6e150ab0920c0b548c4fb492237ba12f14c not found: ID does not exist" Jan 20 18:14:10 crc kubenswrapper[4558]: I0120 18:14:10.574647 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="48bc0f62-7673-4a1a-9d34-6394dbbb3a3a" path="/var/lib/kubelet/pods/48bc0f62-7673-4a1a-9d34-6394dbbb3a3a/volumes" Jan 20 18:14:21 crc kubenswrapper[4558]: I0120 18:14:21.635046 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-index-9glms"] Jan 20 18:14:21 crc kubenswrapper[4558]: E0120 18:14:21.636577 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d9e48d51-6057-4527-bb88-2cce07ed36ee" containerName="extract-content" Jan 20 18:14:21 crc kubenswrapper[4558]: I0120 18:14:21.636591 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d9e48d51-6057-4527-bb88-2cce07ed36ee" containerName="extract-content" Jan 20 18:14:21 crc kubenswrapper[4558]: E0120 18:14:21.636606 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="48bc0f62-7673-4a1a-9d34-6394dbbb3a3a" containerName="extract-content" Jan 20 18:14:21 crc kubenswrapper[4558]: I0120 18:14:21.636612 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="48bc0f62-7673-4a1a-9d34-6394dbbb3a3a" containerName="extract-content" Jan 20 18:14:21 crc kubenswrapper[4558]: E0120 18:14:21.636629 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="48bc0f62-7673-4a1a-9d34-6394dbbb3a3a" containerName="registry-server" Jan 20 18:14:21 crc kubenswrapper[4558]: I0120 18:14:21.636635 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="48bc0f62-7673-4a1a-9d34-6394dbbb3a3a" containerName="registry-server" Jan 20 18:14:21 crc kubenswrapper[4558]: E0120 18:14:21.636651 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="48bc0f62-7673-4a1a-9d34-6394dbbb3a3a" containerName="extract-utilities" Jan 20 18:14:21 crc kubenswrapper[4558]: I0120 18:14:21.636657 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="48bc0f62-7673-4a1a-9d34-6394dbbb3a3a" containerName="extract-utilities" Jan 20 18:14:21 crc kubenswrapper[4558]: E0120 18:14:21.636669 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d9e48d51-6057-4527-bb88-2cce07ed36ee" containerName="extract-utilities" Jan 20 18:14:21 crc kubenswrapper[4558]: I0120 18:14:21.636675 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d9e48d51-6057-4527-bb88-2cce07ed36ee" containerName="extract-utilities" Jan 20 18:14:21 crc kubenswrapper[4558]: E0120 18:14:21.636686 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d9e48d51-6057-4527-bb88-2cce07ed36ee" containerName="registry-server" Jan 20 18:14:21 crc kubenswrapper[4558]: I0120 18:14:21.636691 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d9e48d51-6057-4527-bb88-2cce07ed36ee" containerName="registry-server" Jan 20 18:14:21 crc kubenswrapper[4558]: I0120 18:14:21.636801 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="48bc0f62-7673-4a1a-9d34-6394dbbb3a3a" containerName="registry-server" Jan 20 18:14:21 crc kubenswrapper[4558]: I0120 18:14:21.636811 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="d9e48d51-6057-4527-bb88-2cce07ed36ee" containerName="registry-server" Jan 20 18:14:21 crc kubenswrapper[4558]: I0120 18:14:21.637560 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-index-9glms" Jan 20 18:14:21 crc kubenswrapper[4558]: I0120 18:14:21.640279 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Jan 20 18:14:21 crc kubenswrapper[4558]: I0120 18:14:21.642850 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-index-dockercfg-26mth" Jan 20 18:14:21 crc kubenswrapper[4558]: I0120 18:14:21.647366 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-index-9glms"] Jan 20 18:14:21 crc kubenswrapper[4558]: I0120 18:14:21.647661 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Jan 20 18:14:21 crc kubenswrapper[4558]: I0120 18:14:21.795626 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j2dn4\" (UniqueName: \"kubernetes.io/projected/ca607b46-4184-4af0-b4b5-e58600e59ea7-kube-api-access-j2dn4\") pod \"mariadb-operator-index-9glms\" (UID: \"ca607b46-4184-4af0-b4b5-e58600e59ea7\") " pod="openstack-operators/mariadb-operator-index-9glms" Jan 20 18:14:21 crc kubenswrapper[4558]: I0120 18:14:21.896858 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j2dn4\" (UniqueName: \"kubernetes.io/projected/ca607b46-4184-4af0-b4b5-e58600e59ea7-kube-api-access-j2dn4\") pod \"mariadb-operator-index-9glms\" (UID: \"ca607b46-4184-4af0-b4b5-e58600e59ea7\") " pod="openstack-operators/mariadb-operator-index-9glms" Jan 20 18:14:21 crc kubenswrapper[4558]: I0120 18:14:21.914376 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j2dn4\" (UniqueName: \"kubernetes.io/projected/ca607b46-4184-4af0-b4b5-e58600e59ea7-kube-api-access-j2dn4\") pod \"mariadb-operator-index-9glms\" (UID: \"ca607b46-4184-4af0-b4b5-e58600e59ea7\") " pod="openstack-operators/mariadb-operator-index-9glms" Jan 20 18:14:21 crc kubenswrapper[4558]: I0120 18:14:21.954754 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-index-9glms" Jan 20 18:14:22 crc kubenswrapper[4558]: I0120 18:14:22.016615 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/mariadb-operator-index-9glms"] Jan 20 18:14:22 crc kubenswrapper[4558]: I0120 18:14:22.153805 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/mariadb-operator-index-9glms"] Jan 20 18:14:22 crc kubenswrapper[4558]: I0120 18:14:22.421926 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-index-d7sg5"] Jan 20 18:14:22 crc kubenswrapper[4558]: I0120 18:14:22.423145 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-index-d7sg5" Jan 20 18:14:22 crc kubenswrapper[4558]: I0120 18:14:22.431295 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-index-d7sg5"] Jan 20 18:14:22 crc kubenswrapper[4558]: I0120 18:14:22.502660 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-index-9glms" event={"ID":"ca607b46-4184-4af0-b4b5-e58600e59ea7","Type":"ContainerStarted","Data":"98ca333bcacd690734dd749506318d9b1043be70e1967f5cc8c75dd57bddae8f"} Jan 20 18:14:22 crc kubenswrapper[4558]: I0120 18:14:22.508925 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gx29x\" (UniqueName: \"kubernetes.io/projected/1be05cde-4e68-4010-9fbf-2c04288df80b-kube-api-access-gx29x\") pod \"mariadb-operator-index-d7sg5\" (UID: \"1be05cde-4e68-4010-9fbf-2c04288df80b\") " pod="openstack-operators/mariadb-operator-index-d7sg5" Jan 20 18:14:22 crc kubenswrapper[4558]: I0120 18:14:22.610862 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gx29x\" (UniqueName: \"kubernetes.io/projected/1be05cde-4e68-4010-9fbf-2c04288df80b-kube-api-access-gx29x\") pod \"mariadb-operator-index-d7sg5\" (UID: \"1be05cde-4e68-4010-9fbf-2c04288df80b\") " pod="openstack-operators/mariadb-operator-index-d7sg5" Jan 20 18:14:22 crc kubenswrapper[4558]: I0120 18:14:22.627816 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gx29x\" (UniqueName: \"kubernetes.io/projected/1be05cde-4e68-4010-9fbf-2c04288df80b-kube-api-access-gx29x\") pod \"mariadb-operator-index-d7sg5\" (UID: \"1be05cde-4e68-4010-9fbf-2c04288df80b\") " pod="openstack-operators/mariadb-operator-index-d7sg5" Jan 20 18:14:22 crc kubenswrapper[4558]: I0120 18:14:22.745900 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-index-d7sg5" Jan 20 18:14:22 crc kubenswrapper[4558]: I0120 18:14:22.957450 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-index-d7sg5"] Jan 20 18:14:23 crc kubenswrapper[4558]: I0120 18:14:23.512926 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-index-d7sg5" event={"ID":"1be05cde-4e68-4010-9fbf-2c04288df80b","Type":"ContainerStarted","Data":"037f3aa995c10b2fac55d3556af30dd35a730aa100d11451c528ad23b5314675"} Jan 20 18:14:24 crc kubenswrapper[4558]: I0120 18:14:24.521552 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-index-d7sg5" event={"ID":"1be05cde-4e68-4010-9fbf-2c04288df80b","Type":"ContainerStarted","Data":"1e08fcfa4a1135613dedad048f0451ce11150e9b615614e993f8192e894401e2"} Jan 20 18:14:24 crc kubenswrapper[4558]: I0120 18:14:24.523778 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-index-9glms" event={"ID":"ca607b46-4184-4af0-b4b5-e58600e59ea7","Type":"ContainerStarted","Data":"70e6d5c2a1405cdd1fed2db0b8dfb094608d2eded333af670230a82492c3cae8"} Jan 20 18:14:24 crc kubenswrapper[4558]: I0120 18:14:24.523857 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/mariadb-operator-index-9glms" podUID="ca607b46-4184-4af0-b4b5-e58600e59ea7" containerName="registry-server" containerID="cri-o://70e6d5c2a1405cdd1fed2db0b8dfb094608d2eded333af670230a82492c3cae8" gracePeriod=2 Jan 20 18:14:24 crc kubenswrapper[4558]: I0120 18:14:24.539061 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-index-d7sg5" podStartSLOduration=1.34597831 podStartE2EDuration="2.539042688s" podCreationTimestamp="2026-01-20 18:14:22 +0000 UTC" firstStartedPulling="2026-01-20 18:14:22.957094383 +0000 UTC m=+5556.717432349" lastFinishedPulling="2026-01-20 18:14:24.15015876 +0000 UTC m=+5557.910496727" observedRunningTime="2026-01-20 18:14:24.535280465 +0000 UTC m=+5558.295618432" watchObservedRunningTime="2026-01-20 18:14:24.539042688 +0000 UTC m=+5558.299380656" Jan 20 18:14:24 crc kubenswrapper[4558]: I0120 18:14:24.549078 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-index-9glms" podStartSLOduration=1.594769473 podStartE2EDuration="3.549060264s" podCreationTimestamp="2026-01-20 18:14:21 +0000 UTC" firstStartedPulling="2026-01-20 18:14:22.194186006 +0000 UTC m=+5555.954523973" lastFinishedPulling="2026-01-20 18:14:24.148476797 +0000 UTC m=+5557.908814764" observedRunningTime="2026-01-20 18:14:24.545834409 +0000 UTC m=+5558.306172375" watchObservedRunningTime="2026-01-20 18:14:24.549060264 +0000 UTC m=+5558.309398231" Jan 20 18:14:24 crc kubenswrapper[4558]: I0120 18:14:24.857383 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-index-9glms" Jan 20 18:14:24 crc kubenswrapper[4558]: I0120 18:14:24.949389 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j2dn4\" (UniqueName: \"kubernetes.io/projected/ca607b46-4184-4af0-b4b5-e58600e59ea7-kube-api-access-j2dn4\") pod \"ca607b46-4184-4af0-b4b5-e58600e59ea7\" (UID: \"ca607b46-4184-4af0-b4b5-e58600e59ea7\") " Jan 20 18:14:24 crc kubenswrapper[4558]: I0120 18:14:24.955199 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ca607b46-4184-4af0-b4b5-e58600e59ea7-kube-api-access-j2dn4" (OuterVolumeSpecName: "kube-api-access-j2dn4") pod "ca607b46-4184-4af0-b4b5-e58600e59ea7" (UID: "ca607b46-4184-4af0-b4b5-e58600e59ea7"). InnerVolumeSpecName "kube-api-access-j2dn4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:14:25 crc kubenswrapper[4558]: I0120 18:14:25.051139 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j2dn4\" (UniqueName: \"kubernetes.io/projected/ca607b46-4184-4af0-b4b5-e58600e59ea7-kube-api-access-j2dn4\") on node \"crc\" DevicePath \"\"" Jan 20 18:14:25 crc kubenswrapper[4558]: I0120 18:14:25.532897 4558 generic.go:334] "Generic (PLEG): container finished" podID="ca607b46-4184-4af0-b4b5-e58600e59ea7" containerID="70e6d5c2a1405cdd1fed2db0b8dfb094608d2eded333af670230a82492c3cae8" exitCode=0 Jan 20 18:14:25 crc kubenswrapper[4558]: I0120 18:14:25.532989 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-index-9glms" Jan 20 18:14:25 crc kubenswrapper[4558]: I0120 18:14:25.533004 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-index-9glms" event={"ID":"ca607b46-4184-4af0-b4b5-e58600e59ea7","Type":"ContainerDied","Data":"70e6d5c2a1405cdd1fed2db0b8dfb094608d2eded333af670230a82492c3cae8"} Jan 20 18:14:25 crc kubenswrapper[4558]: I0120 18:14:25.533073 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-index-9glms" event={"ID":"ca607b46-4184-4af0-b4b5-e58600e59ea7","Type":"ContainerDied","Data":"98ca333bcacd690734dd749506318d9b1043be70e1967f5cc8c75dd57bddae8f"} Jan 20 18:14:25 crc kubenswrapper[4558]: I0120 18:14:25.533102 4558 scope.go:117] "RemoveContainer" containerID="70e6d5c2a1405cdd1fed2db0b8dfb094608d2eded333af670230a82492c3cae8" Jan 20 18:14:25 crc kubenswrapper[4558]: I0120 18:14:25.556972 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/mariadb-operator-index-9glms"] Jan 20 18:14:25 crc kubenswrapper[4558]: I0120 18:14:25.558407 4558 scope.go:117] "RemoveContainer" containerID="70e6d5c2a1405cdd1fed2db0b8dfb094608d2eded333af670230a82492c3cae8" Jan 20 18:14:25 crc kubenswrapper[4558]: E0120 18:14:25.558982 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"70e6d5c2a1405cdd1fed2db0b8dfb094608d2eded333af670230a82492c3cae8\": container with ID starting with 70e6d5c2a1405cdd1fed2db0b8dfb094608d2eded333af670230a82492c3cae8 not found: ID does not exist" containerID="70e6d5c2a1405cdd1fed2db0b8dfb094608d2eded333af670230a82492c3cae8" Jan 20 18:14:25 crc kubenswrapper[4558]: I0120 18:14:25.559020 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"70e6d5c2a1405cdd1fed2db0b8dfb094608d2eded333af670230a82492c3cae8"} err="failed to get container status \"70e6d5c2a1405cdd1fed2db0b8dfb094608d2eded333af670230a82492c3cae8\": rpc error: code = NotFound desc = could not find container \"70e6d5c2a1405cdd1fed2db0b8dfb094608d2eded333af670230a82492c3cae8\": container with ID starting with 70e6d5c2a1405cdd1fed2db0b8dfb094608d2eded333af670230a82492c3cae8 not found: ID does not exist" Jan 20 18:14:25 crc kubenswrapper[4558]: I0120 18:14:25.565729 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/mariadb-operator-index-9glms"] Jan 20 18:14:26 crc kubenswrapper[4558]: I0120 18:14:26.571972 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ca607b46-4184-4af0-b4b5-e58600e59ea7" path="/var/lib/kubelet/pods/ca607b46-4184-4af0-b4b5-e58600e59ea7/volumes" Jan 20 18:14:27 crc kubenswrapper[4558]: I0120 18:14:27.330040 4558 patch_prober.go:28] interesting pod/machine-config-daemon-2vr4r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 20 18:14:27 crc kubenswrapper[4558]: I0120 18:14:27.330098 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 20 18:14:32 crc kubenswrapper[4558]: I0120 18:14:32.746730 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/mariadb-operator-index-d7sg5" Jan 20 18:14:32 crc kubenswrapper[4558]: I0120 18:14:32.747088 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-index-d7sg5" Jan 20 18:14:32 crc kubenswrapper[4558]: I0120 18:14:32.778609 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/mariadb-operator-index-d7sg5" Jan 20 18:14:33 crc kubenswrapper[4558]: I0120 18:14:33.621387 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-index-d7sg5" Jan 20 18:14:34 crc kubenswrapper[4558]: I0120 18:14:34.650862 4558 scope.go:117] "RemoveContainer" containerID="f1506f0ee5ae0e37dc7c3f1bab9db3a2151ee378ef978f06310c406e3b62a13b" Jan 20 18:14:34 crc kubenswrapper[4558]: I0120 18:14:34.681085 4558 scope.go:117] "RemoveContainer" containerID="ecb3d0f3bf1d477d3fea81ea29ae6dbfb39eff4f2f6120f21ae3ef0a4efba324" Jan 20 18:14:34 crc kubenswrapper[4558]: I0120 18:14:34.700005 4558 scope.go:117] "RemoveContainer" containerID="52089be45c0734a738c9d20c13735ae48fb1812f3e7d83fd3be74d42aa4a1e37" Jan 20 18:14:34 crc kubenswrapper[4558]: I0120 18:14:34.720239 4558 scope.go:117] "RemoveContainer" containerID="2ff3e4d2ad64773a61f739b0fa6f4ece97c0c40fd69baf1df699ce574d95ed13" Jan 20 18:14:34 crc kubenswrapper[4558]: I0120 18:14:34.739656 4558 scope.go:117] "RemoveContainer" containerID="5ed05159451f1f29587c6b9b2f7f4cebe006caa13c327bf5763d8a47e37ee283" Jan 20 18:14:34 crc kubenswrapper[4558]: I0120 18:14:34.757671 4558 scope.go:117] "RemoveContainer" containerID="0e2eb3da46b7bd583c44ebc8b13f7d1c4b0aa944896d7eec0b5487a1c1e21756" Jan 20 18:14:34 crc kubenswrapper[4558]: I0120 18:14:34.775326 4558 scope.go:117] "RemoveContainer" containerID="3b80173f89c66eaddb2369263d220ac89d3df4fc48524e79192762bb9e4aed17" Jan 20 18:14:34 crc kubenswrapper[4558]: I0120 18:14:34.792949 4558 scope.go:117] "RemoveContainer" containerID="0fc1bcd802de9cc79769c26cb5ab84e85afa3e5dc41c20e72994c498aca1b4c0" Jan 20 18:14:34 crc kubenswrapper[4558]: I0120 18:14:34.807592 4558 scope.go:117] "RemoveContainer" containerID="5fea48a489b7236322aa035fd26c8bc64999234427695faf363f012948d76b1b" Jan 20 18:14:34 crc kubenswrapper[4558]: I0120 18:14:34.826749 4558 scope.go:117] "RemoveContainer" containerID="aae98456e24c83d36df09930721945ddc3c5c2f923ce2b243599e54208df3542" Jan 20 18:14:34 crc kubenswrapper[4558]: I0120 18:14:34.844711 4558 scope.go:117] "RemoveContainer" containerID="0df63fae7a1cc2e80032258b3ae2cdc72ec9d33fa9c69df318d943fbe7a5d952" Jan 20 18:14:34 crc kubenswrapper[4558]: I0120 18:14:34.869209 4558 scope.go:117] "RemoveContainer" containerID="90c82ce4dee750ed2e69c30f5db645a7b43c50b51022061a3c35865af695b662" Jan 20 18:14:34 crc kubenswrapper[4558]: I0120 18:14:34.890718 4558 scope.go:117] "RemoveContainer" containerID="b21d03a1bce545072a0f5d40d609ff0a56267d13add877890bddf1f91228ba7f" Jan 20 18:14:34 crc kubenswrapper[4558]: I0120 18:14:34.906558 4558 scope.go:117] "RemoveContainer" containerID="5a0b4e0b6774ce77a765f98980c1f9ce2e4138aebc6117795de6f7f8346a3737" Jan 20 18:14:34 crc kubenswrapper[4558]: I0120 18:14:34.924206 4558 scope.go:117] "RemoveContainer" containerID="6aa6ab1b79913f16a8c37d31d4f54307176b8cfa305a050b83890240a717629c" Jan 20 18:14:34 crc kubenswrapper[4558]: I0120 18:14:34.947291 4558 scope.go:117] "RemoveContainer" containerID="d9e7d8f26c60d79188cdb6ee5f48b78d1911f3dfef9d85293a3d9cfc2efbd204" Jan 20 18:14:34 crc kubenswrapper[4558]: I0120 18:14:34.969257 4558 scope.go:117] "RemoveContainer" containerID="b13c9d83994729b60e9bfa90e3160a1b65ced814b36191c04ca3d9619fcd97dc" Jan 20 18:14:34 crc kubenswrapper[4558]: I0120 18:14:34.985748 4558 scope.go:117] "RemoveContainer" containerID="14786d8dc9d059a3c99ac1f27b72a5111fcedcf4c96caa245bb691b274f41a26" Jan 20 18:14:34 crc kubenswrapper[4558]: I0120 18:14:34.999871 4558 scope.go:117] "RemoveContainer" containerID="d569da36da81b9b9b27be2b8f5c9a33c02392ee1c64ee171850ebd6b90d34bc4" Jan 20 18:14:35 crc kubenswrapper[4558]: I0120 18:14:35.014903 4558 scope.go:117] "RemoveContainer" containerID="45317194cb1488747739e7ccc74659e1093db6bf0596f5b55836038289216605" Jan 20 18:14:35 crc kubenswrapper[4558]: I0120 18:14:35.028426 4558 scope.go:117] "RemoveContainer" containerID="ad003af07a804bd7051853e53266eefbbf5253d38becc3b1787e7568e72d2f2c" Jan 20 18:14:35 crc kubenswrapper[4558]: I0120 18:14:35.045415 4558 scope.go:117] "RemoveContainer" containerID="5ef1d53f579e55a1d5029b72751834ff89a20ca0419624b102fe6231908e54b1" Jan 20 18:14:35 crc kubenswrapper[4558]: I0120 18:14:35.061228 4558 scope.go:117] "RemoveContainer" containerID="fa8486dccca9e1fc6947a83a66596efd783eb166c15f19dff4450bd119503c33" Jan 20 18:14:35 crc kubenswrapper[4558]: I0120 18:14:35.076397 4558 scope.go:117] "RemoveContainer" containerID="33e3f76ef54188f55f12e6b1007e9b83a0fe891ae572d234e642967bbecc1af7" Jan 20 18:14:35 crc kubenswrapper[4558]: I0120 18:14:35.094996 4558 scope.go:117] "RemoveContainer" containerID="51ecd2e724af29b0f6a51dc85e4ad8a754870e77bf87c5ae416f003c008d7caf" Jan 20 18:14:35 crc kubenswrapper[4558]: I0120 18:14:35.112206 4558 scope.go:117] "RemoveContainer" containerID="e6cde12ec6613c6ca86331ad5562eff59e938f8143553a59aa1e652328de6c2b" Jan 20 18:14:35 crc kubenswrapper[4558]: I0120 18:14:35.126487 4558 scope.go:117] "RemoveContainer" containerID="c4f4ac4e6be0e8cf007dc4d69369b47f2f03fdf263af1c11172de431224ca4f9" Jan 20 18:14:35 crc kubenswrapper[4558]: I0120 18:14:35.146783 4558 scope.go:117] "RemoveContainer" containerID="5458e46e56f7d65eab79c3c48391c11132277e97ba84585b2d13259aa9368eb2" Jan 20 18:14:35 crc kubenswrapper[4558]: I0120 18:14:35.164971 4558 scope.go:117] "RemoveContainer" containerID="64bcb4cba85895bee43f773af00ef7e07c4d7bd80d07b323ae48eeb3ec59dd5a" Jan 20 18:14:35 crc kubenswrapper[4558]: I0120 18:14:35.184786 4558 scope.go:117] "RemoveContainer" containerID="bd95c9802be7aa3b09b45d31c56df20489fe579fe93a21fade47b46985baa4cc" Jan 20 18:14:35 crc kubenswrapper[4558]: I0120 18:14:35.199417 4558 scope.go:117] "RemoveContainer" containerID="96b14ccca580d6f73044405aa0fb79aa275da3630e3f85445651826254e89bab" Jan 20 18:14:35 crc kubenswrapper[4558]: I0120 18:14:35.213907 4558 scope.go:117] "RemoveContainer" containerID="9b5a1aa8966e4b09819fa1a3e8844f0277e5fd276937d6ef5896353e424ac13b" Jan 20 18:14:35 crc kubenswrapper[4558]: I0120 18:14:35.229964 4558 scope.go:117] "RemoveContainer" containerID="2ef63481a765808418e8fe2316f84a64976e170f6b42473a8954a04e0fa1bbe7" Jan 20 18:14:35 crc kubenswrapper[4558]: I0120 18:14:35.243736 4558 scope.go:117] "RemoveContainer" containerID="50d9b3907efdf86bfbc435a6c7984f8ffc06ca5383c5f947cc9ccf10c522de7b" Jan 20 18:14:35 crc kubenswrapper[4558]: I0120 18:14:35.258889 4558 scope.go:117] "RemoveContainer" containerID="0a652eeb8cc936df965f49bb81a148ca20240793d59d9727a5b1b22b3298a64f" Jan 20 18:14:35 crc kubenswrapper[4558]: I0120 18:14:35.278808 4558 scope.go:117] "RemoveContainer" containerID="63231d660c87582107f72247293a009f16107c11a93e6a345163da171fa24b3e" Jan 20 18:14:39 crc kubenswrapper[4558]: I0120 18:14:39.456951 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/a53044f29d0f89c197912873b7cc34569484f5de61ee55394f4939720brl7tf"] Jan 20 18:14:39 crc kubenswrapper[4558]: E0120 18:14:39.457394 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ca607b46-4184-4af0-b4b5-e58600e59ea7" containerName="registry-server" Jan 20 18:14:39 crc kubenswrapper[4558]: I0120 18:14:39.457425 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ca607b46-4184-4af0-b4b5-e58600e59ea7" containerName="registry-server" Jan 20 18:14:39 crc kubenswrapper[4558]: I0120 18:14:39.457592 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="ca607b46-4184-4af0-b4b5-e58600e59ea7" containerName="registry-server" Jan 20 18:14:39 crc kubenswrapper[4558]: I0120 18:14:39.458774 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/a53044f29d0f89c197912873b7cc34569484f5de61ee55394f4939720brl7tf" Jan 20 18:14:39 crc kubenswrapper[4558]: I0120 18:14:39.461891 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-7xp5w" Jan 20 18:14:39 crc kubenswrapper[4558]: I0120 18:14:39.465145 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/a53044f29d0f89c197912873b7cc34569484f5de61ee55394f4939720brl7tf"] Jan 20 18:14:39 crc kubenswrapper[4558]: I0120 18:14:39.541155 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lmcp2\" (UniqueName: \"kubernetes.io/projected/4d06e4b6-901f-4a66-ad62-400734d17967-kube-api-access-lmcp2\") pod \"a53044f29d0f89c197912873b7cc34569484f5de61ee55394f4939720brl7tf\" (UID: \"4d06e4b6-901f-4a66-ad62-400734d17967\") " pod="openstack-operators/a53044f29d0f89c197912873b7cc34569484f5de61ee55394f4939720brl7tf" Jan 20 18:14:39 crc kubenswrapper[4558]: I0120 18:14:39.541329 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/4d06e4b6-901f-4a66-ad62-400734d17967-util\") pod \"a53044f29d0f89c197912873b7cc34569484f5de61ee55394f4939720brl7tf\" (UID: \"4d06e4b6-901f-4a66-ad62-400734d17967\") " pod="openstack-operators/a53044f29d0f89c197912873b7cc34569484f5de61ee55394f4939720brl7tf" Jan 20 18:14:39 crc kubenswrapper[4558]: I0120 18:14:39.541473 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/4d06e4b6-901f-4a66-ad62-400734d17967-bundle\") pod \"a53044f29d0f89c197912873b7cc34569484f5de61ee55394f4939720brl7tf\" (UID: \"4d06e4b6-901f-4a66-ad62-400734d17967\") " pod="openstack-operators/a53044f29d0f89c197912873b7cc34569484f5de61ee55394f4939720brl7tf" Jan 20 18:14:39 crc kubenswrapper[4558]: I0120 18:14:39.642476 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/4d06e4b6-901f-4a66-ad62-400734d17967-util\") pod \"a53044f29d0f89c197912873b7cc34569484f5de61ee55394f4939720brl7tf\" (UID: \"4d06e4b6-901f-4a66-ad62-400734d17967\") " pod="openstack-operators/a53044f29d0f89c197912873b7cc34569484f5de61ee55394f4939720brl7tf" Jan 20 18:14:39 crc kubenswrapper[4558]: I0120 18:14:39.642566 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/4d06e4b6-901f-4a66-ad62-400734d17967-bundle\") pod \"a53044f29d0f89c197912873b7cc34569484f5de61ee55394f4939720brl7tf\" (UID: \"4d06e4b6-901f-4a66-ad62-400734d17967\") " pod="openstack-operators/a53044f29d0f89c197912873b7cc34569484f5de61ee55394f4939720brl7tf" Jan 20 18:14:39 crc kubenswrapper[4558]: I0120 18:14:39.642618 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lmcp2\" (UniqueName: \"kubernetes.io/projected/4d06e4b6-901f-4a66-ad62-400734d17967-kube-api-access-lmcp2\") pod \"a53044f29d0f89c197912873b7cc34569484f5de61ee55394f4939720brl7tf\" (UID: \"4d06e4b6-901f-4a66-ad62-400734d17967\") " pod="openstack-operators/a53044f29d0f89c197912873b7cc34569484f5de61ee55394f4939720brl7tf" Jan 20 18:14:39 crc kubenswrapper[4558]: I0120 18:14:39.643152 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/4d06e4b6-901f-4a66-ad62-400734d17967-util\") pod \"a53044f29d0f89c197912873b7cc34569484f5de61ee55394f4939720brl7tf\" (UID: \"4d06e4b6-901f-4a66-ad62-400734d17967\") " pod="openstack-operators/a53044f29d0f89c197912873b7cc34569484f5de61ee55394f4939720brl7tf" Jan 20 18:14:39 crc kubenswrapper[4558]: I0120 18:14:39.643242 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/4d06e4b6-901f-4a66-ad62-400734d17967-bundle\") pod \"a53044f29d0f89c197912873b7cc34569484f5de61ee55394f4939720brl7tf\" (UID: \"4d06e4b6-901f-4a66-ad62-400734d17967\") " pod="openstack-operators/a53044f29d0f89c197912873b7cc34569484f5de61ee55394f4939720brl7tf" Jan 20 18:14:39 crc kubenswrapper[4558]: I0120 18:14:39.662728 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lmcp2\" (UniqueName: \"kubernetes.io/projected/4d06e4b6-901f-4a66-ad62-400734d17967-kube-api-access-lmcp2\") pod \"a53044f29d0f89c197912873b7cc34569484f5de61ee55394f4939720brl7tf\" (UID: \"4d06e4b6-901f-4a66-ad62-400734d17967\") " pod="openstack-operators/a53044f29d0f89c197912873b7cc34569484f5de61ee55394f4939720brl7tf" Jan 20 18:14:39 crc kubenswrapper[4558]: I0120 18:14:39.780677 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/a53044f29d0f89c197912873b7cc34569484f5de61ee55394f4939720brl7tf" Jan 20 18:14:39 crc kubenswrapper[4558]: I0120 18:14:39.964788 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/a53044f29d0f89c197912873b7cc34569484f5de61ee55394f4939720brl7tf"] Jan 20 18:14:39 crc kubenswrapper[4558]: W0120 18:14:39.967494 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4d06e4b6_901f_4a66_ad62_400734d17967.slice/crio-483d1a9e229cd698a556dec74d92ada33c0b3addef92a967ac453ceba3fc9155 WatchSource:0}: Error finding container 483d1a9e229cd698a556dec74d92ada33c0b3addef92a967ac453ceba3fc9155: Status 404 returned error can't find the container with id 483d1a9e229cd698a556dec74d92ada33c0b3addef92a967ac453ceba3fc9155 Jan 20 18:14:40 crc kubenswrapper[4558]: I0120 18:14:40.653316 4558 generic.go:334] "Generic (PLEG): container finished" podID="4d06e4b6-901f-4a66-ad62-400734d17967" containerID="900efa0bec51e667e70b835c7d93bb351d3142d533d8120cb23e3f20fe6201a6" exitCode=0 Jan 20 18:14:40 crc kubenswrapper[4558]: I0120 18:14:40.653369 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/a53044f29d0f89c197912873b7cc34569484f5de61ee55394f4939720brl7tf" event={"ID":"4d06e4b6-901f-4a66-ad62-400734d17967","Type":"ContainerDied","Data":"900efa0bec51e667e70b835c7d93bb351d3142d533d8120cb23e3f20fe6201a6"} Jan 20 18:14:40 crc kubenswrapper[4558]: I0120 18:14:40.653431 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/a53044f29d0f89c197912873b7cc34569484f5de61ee55394f4939720brl7tf" event={"ID":"4d06e4b6-901f-4a66-ad62-400734d17967","Type":"ContainerStarted","Data":"483d1a9e229cd698a556dec74d92ada33c0b3addef92a967ac453ceba3fc9155"} Jan 20 18:14:42 crc kubenswrapper[4558]: I0120 18:14:42.669667 4558 generic.go:334] "Generic (PLEG): container finished" podID="4d06e4b6-901f-4a66-ad62-400734d17967" containerID="4f3b711c12d98bb4804e9912d09576b6818cfe804969e34a1f356abdcf1cb124" exitCode=0 Jan 20 18:14:42 crc kubenswrapper[4558]: I0120 18:14:42.669899 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/a53044f29d0f89c197912873b7cc34569484f5de61ee55394f4939720brl7tf" event={"ID":"4d06e4b6-901f-4a66-ad62-400734d17967","Type":"ContainerDied","Data":"4f3b711c12d98bb4804e9912d09576b6818cfe804969e34a1f356abdcf1cb124"} Jan 20 18:14:43 crc kubenswrapper[4558]: I0120 18:14:43.680108 4558 generic.go:334] "Generic (PLEG): container finished" podID="4d06e4b6-901f-4a66-ad62-400734d17967" containerID="f3a9e257ed5c765d05f4fa03766918f5b9e3b36814df0c3a6368a2e78c7a1036" exitCode=0 Jan 20 18:14:43 crc kubenswrapper[4558]: I0120 18:14:43.680188 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/a53044f29d0f89c197912873b7cc34569484f5de61ee55394f4939720brl7tf" event={"ID":"4d06e4b6-901f-4a66-ad62-400734d17967","Type":"ContainerDied","Data":"f3a9e257ed5c765d05f4fa03766918f5b9e3b36814df0c3a6368a2e78c7a1036"} Jan 20 18:14:44 crc kubenswrapper[4558]: I0120 18:14:44.889988 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/a53044f29d0f89c197912873b7cc34569484f5de61ee55394f4939720brl7tf" Jan 20 18:14:45 crc kubenswrapper[4558]: I0120 18:14:45.014396 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/4d06e4b6-901f-4a66-ad62-400734d17967-bundle\") pod \"4d06e4b6-901f-4a66-ad62-400734d17967\" (UID: \"4d06e4b6-901f-4a66-ad62-400734d17967\") " Jan 20 18:14:45 crc kubenswrapper[4558]: I0120 18:14:45.014458 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lmcp2\" (UniqueName: \"kubernetes.io/projected/4d06e4b6-901f-4a66-ad62-400734d17967-kube-api-access-lmcp2\") pod \"4d06e4b6-901f-4a66-ad62-400734d17967\" (UID: \"4d06e4b6-901f-4a66-ad62-400734d17967\") " Jan 20 18:14:45 crc kubenswrapper[4558]: I0120 18:14:45.014578 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/4d06e4b6-901f-4a66-ad62-400734d17967-util\") pod \"4d06e4b6-901f-4a66-ad62-400734d17967\" (UID: \"4d06e4b6-901f-4a66-ad62-400734d17967\") " Jan 20 18:14:45 crc kubenswrapper[4558]: I0120 18:14:45.016211 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4d06e4b6-901f-4a66-ad62-400734d17967-bundle" (OuterVolumeSpecName: "bundle") pod "4d06e4b6-901f-4a66-ad62-400734d17967" (UID: "4d06e4b6-901f-4a66-ad62-400734d17967"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 18:14:45 crc kubenswrapper[4558]: I0120 18:14:45.021570 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4d06e4b6-901f-4a66-ad62-400734d17967-kube-api-access-lmcp2" (OuterVolumeSpecName: "kube-api-access-lmcp2") pod "4d06e4b6-901f-4a66-ad62-400734d17967" (UID: "4d06e4b6-901f-4a66-ad62-400734d17967"). InnerVolumeSpecName "kube-api-access-lmcp2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:14:45 crc kubenswrapper[4558]: I0120 18:14:45.027284 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4d06e4b6-901f-4a66-ad62-400734d17967-util" (OuterVolumeSpecName: "util") pod "4d06e4b6-901f-4a66-ad62-400734d17967" (UID: "4d06e4b6-901f-4a66-ad62-400734d17967"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 18:14:45 crc kubenswrapper[4558]: I0120 18:14:45.117133 4558 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/4d06e4b6-901f-4a66-ad62-400734d17967-util\") on node \"crc\" DevicePath \"\"" Jan 20 18:14:45 crc kubenswrapper[4558]: I0120 18:14:45.117196 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lmcp2\" (UniqueName: \"kubernetes.io/projected/4d06e4b6-901f-4a66-ad62-400734d17967-kube-api-access-lmcp2\") on node \"crc\" DevicePath \"\"" Jan 20 18:14:45 crc kubenswrapper[4558]: I0120 18:14:45.117211 4558 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/4d06e4b6-901f-4a66-ad62-400734d17967-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 18:14:45 crc kubenswrapper[4558]: I0120 18:14:45.704578 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/a53044f29d0f89c197912873b7cc34569484f5de61ee55394f4939720brl7tf" event={"ID":"4d06e4b6-901f-4a66-ad62-400734d17967","Type":"ContainerDied","Data":"483d1a9e229cd698a556dec74d92ada33c0b3addef92a967ac453ceba3fc9155"} Jan 20 18:14:45 crc kubenswrapper[4558]: I0120 18:14:45.704634 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="483d1a9e229cd698a556dec74d92ada33c0b3addef92a967ac453ceba3fc9155" Jan 20 18:14:45 crc kubenswrapper[4558]: I0120 18:14:45.704725 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/a53044f29d0f89c197912873b7cc34569484f5de61ee55394f4939720brl7tf" Jan 20 18:14:57 crc kubenswrapper[4558]: I0120 18:14:57.330498 4558 patch_prober.go:28] interesting pod/machine-config-daemon-2vr4r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 20 18:14:57 crc kubenswrapper[4558]: I0120 18:14:57.330957 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 20 18:14:57 crc kubenswrapper[4558]: I0120 18:14:57.331021 4558 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" Jan 20 18:14:57 crc kubenswrapper[4558]: I0120 18:14:57.331997 4558 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"27314e1a8af2e8df8ecd2e7a06cbb630afbfecd8ca2338a1f199cf92f982f581"} pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 20 18:14:57 crc kubenswrapper[4558]: I0120 18:14:57.332078 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" containerID="cri-o://27314e1a8af2e8df8ecd2e7a06cbb630afbfecd8ca2338a1f199cf92f982f581" gracePeriod=600 Jan 20 18:14:57 crc kubenswrapper[4558]: E0120 18:14:57.460426 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 18:14:57 crc kubenswrapper[4558]: I0120 18:14:57.789139 4558 generic.go:334] "Generic (PLEG): container finished" podID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerID="27314e1a8af2e8df8ecd2e7a06cbb630afbfecd8ca2338a1f199cf92f982f581" exitCode=0 Jan 20 18:14:57 crc kubenswrapper[4558]: I0120 18:14:57.789206 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" event={"ID":"68337d27-3fa6-4a29-88b0-82e60c3739eb","Type":"ContainerDied","Data":"27314e1a8af2e8df8ecd2e7a06cbb630afbfecd8ca2338a1f199cf92f982f581"} Jan 20 18:14:57 crc kubenswrapper[4558]: I0120 18:14:57.789272 4558 scope.go:117] "RemoveContainer" containerID="fdbebed64bed48194784e96161b865cfbbfe6287c24bb11eb0fd195beaf71fc7" Jan 20 18:14:57 crc kubenswrapper[4558]: I0120 18:14:57.789845 4558 scope.go:117] "RemoveContainer" containerID="27314e1a8af2e8df8ecd2e7a06cbb630afbfecd8ca2338a1f199cf92f982f581" Jan 20 18:14:57 crc kubenswrapper[4558]: E0120 18:14:57.790150 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 18:14:58 crc kubenswrapper[4558]: I0120 18:14:58.729044 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-bf56cd8fd-kclzq"] Jan 20 18:14:58 crc kubenswrapper[4558]: E0120 18:14:58.729555 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4d06e4b6-901f-4a66-ad62-400734d17967" containerName="extract" Jan 20 18:14:58 crc kubenswrapper[4558]: I0120 18:14:58.729568 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="4d06e4b6-901f-4a66-ad62-400734d17967" containerName="extract" Jan 20 18:14:58 crc kubenswrapper[4558]: E0120 18:14:58.729582 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4d06e4b6-901f-4a66-ad62-400734d17967" containerName="util" Jan 20 18:14:58 crc kubenswrapper[4558]: I0120 18:14:58.729588 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="4d06e4b6-901f-4a66-ad62-400734d17967" containerName="util" Jan 20 18:14:58 crc kubenswrapper[4558]: E0120 18:14:58.729598 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4d06e4b6-901f-4a66-ad62-400734d17967" containerName="pull" Jan 20 18:14:58 crc kubenswrapper[4558]: I0120 18:14:58.729604 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="4d06e4b6-901f-4a66-ad62-400734d17967" containerName="pull" Jan 20 18:14:58 crc kubenswrapper[4558]: I0120 18:14:58.729703 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="4d06e4b6-901f-4a66-ad62-400734d17967" containerName="extract" Jan 20 18:14:58 crc kubenswrapper[4558]: I0120 18:14:58.730115 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-bf56cd8fd-kclzq" Jan 20 18:14:58 crc kubenswrapper[4558]: I0120 18:14:58.733118 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"webhook-server-cert" Jan 20 18:14:58 crc kubenswrapper[4558]: I0120 18:14:58.733323 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-service-cert" Jan 20 18:14:58 crc kubenswrapper[4558]: I0120 18:14:58.733462 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-dockercfg-j49m2" Jan 20 18:14:58 crc kubenswrapper[4558]: I0120 18:14:58.751773 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-bf56cd8fd-kclzq"] Jan 20 18:14:58 crc kubenswrapper[4558]: I0120 18:14:58.799570 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/25c7c232-6d31-4269-a2c2-e8fd3c518c47-apiservice-cert\") pod \"mariadb-operator-controller-manager-bf56cd8fd-kclzq\" (UID: \"25c7c232-6d31-4269-a2c2-e8fd3c518c47\") " pod="openstack-operators/mariadb-operator-controller-manager-bf56cd8fd-kclzq" Jan 20 18:14:58 crc kubenswrapper[4558]: I0120 18:14:58.799613 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/25c7c232-6d31-4269-a2c2-e8fd3c518c47-webhook-cert\") pod \"mariadb-operator-controller-manager-bf56cd8fd-kclzq\" (UID: \"25c7c232-6d31-4269-a2c2-e8fd3c518c47\") " pod="openstack-operators/mariadb-operator-controller-manager-bf56cd8fd-kclzq" Jan 20 18:14:58 crc kubenswrapper[4558]: I0120 18:14:58.799634 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dn2p8\" (UniqueName: \"kubernetes.io/projected/25c7c232-6d31-4269-a2c2-e8fd3c518c47-kube-api-access-dn2p8\") pod \"mariadb-operator-controller-manager-bf56cd8fd-kclzq\" (UID: \"25c7c232-6d31-4269-a2c2-e8fd3c518c47\") " pod="openstack-operators/mariadb-operator-controller-manager-bf56cd8fd-kclzq" Jan 20 18:14:58 crc kubenswrapper[4558]: I0120 18:14:58.900960 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/25c7c232-6d31-4269-a2c2-e8fd3c518c47-apiservice-cert\") pod \"mariadb-operator-controller-manager-bf56cd8fd-kclzq\" (UID: \"25c7c232-6d31-4269-a2c2-e8fd3c518c47\") " pod="openstack-operators/mariadb-operator-controller-manager-bf56cd8fd-kclzq" Jan 20 18:14:58 crc kubenswrapper[4558]: I0120 18:14:58.901022 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/25c7c232-6d31-4269-a2c2-e8fd3c518c47-webhook-cert\") pod \"mariadb-operator-controller-manager-bf56cd8fd-kclzq\" (UID: \"25c7c232-6d31-4269-a2c2-e8fd3c518c47\") " pod="openstack-operators/mariadb-operator-controller-manager-bf56cd8fd-kclzq" Jan 20 18:14:58 crc kubenswrapper[4558]: I0120 18:14:58.901051 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dn2p8\" (UniqueName: \"kubernetes.io/projected/25c7c232-6d31-4269-a2c2-e8fd3c518c47-kube-api-access-dn2p8\") pod \"mariadb-operator-controller-manager-bf56cd8fd-kclzq\" (UID: \"25c7c232-6d31-4269-a2c2-e8fd3c518c47\") " pod="openstack-operators/mariadb-operator-controller-manager-bf56cd8fd-kclzq" Jan 20 18:14:58 crc kubenswrapper[4558]: I0120 18:14:58.907329 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/25c7c232-6d31-4269-a2c2-e8fd3c518c47-webhook-cert\") pod \"mariadb-operator-controller-manager-bf56cd8fd-kclzq\" (UID: \"25c7c232-6d31-4269-a2c2-e8fd3c518c47\") " pod="openstack-operators/mariadb-operator-controller-manager-bf56cd8fd-kclzq" Jan 20 18:14:58 crc kubenswrapper[4558]: I0120 18:14:58.908075 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/25c7c232-6d31-4269-a2c2-e8fd3c518c47-apiservice-cert\") pod \"mariadb-operator-controller-manager-bf56cd8fd-kclzq\" (UID: \"25c7c232-6d31-4269-a2c2-e8fd3c518c47\") " pod="openstack-operators/mariadb-operator-controller-manager-bf56cd8fd-kclzq" Jan 20 18:14:58 crc kubenswrapper[4558]: I0120 18:14:58.914972 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dn2p8\" (UniqueName: \"kubernetes.io/projected/25c7c232-6d31-4269-a2c2-e8fd3c518c47-kube-api-access-dn2p8\") pod \"mariadb-operator-controller-manager-bf56cd8fd-kclzq\" (UID: \"25c7c232-6d31-4269-a2c2-e8fd3c518c47\") " pod="openstack-operators/mariadb-operator-controller-manager-bf56cd8fd-kclzq" Jan 20 18:14:59 crc kubenswrapper[4558]: I0120 18:14:59.047408 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-bf56cd8fd-kclzq" Jan 20 18:14:59 crc kubenswrapper[4558]: I0120 18:14:59.444840 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-bf56cd8fd-kclzq"] Jan 20 18:14:59 crc kubenswrapper[4558]: I0120 18:14:59.806516 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-bf56cd8fd-kclzq" event={"ID":"25c7c232-6d31-4269-a2c2-e8fd3c518c47","Type":"ContainerStarted","Data":"577d0fcc4ecb9a02415f8f18ed70fe9eb367dc9860ce1dd4b8b3ecbf01c0a560"} Jan 20 18:15:00 crc kubenswrapper[4558]: I0120 18:15:00.122373 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29482215-r8bkj"] Jan 20 18:15:00 crc kubenswrapper[4558]: I0120 18:15:00.123177 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29482215-r8bkj" Jan 20 18:15:00 crc kubenswrapper[4558]: I0120 18:15:00.124601 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 20 18:15:00 crc kubenswrapper[4558]: I0120 18:15:00.126983 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 20 18:15:00 crc kubenswrapper[4558]: I0120 18:15:00.130130 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29482215-r8bkj"] Jan 20 18:15:00 crc kubenswrapper[4558]: I0120 18:15:00.220850 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pgfd9\" (UniqueName: \"kubernetes.io/projected/cfc4c95c-9901-47d1-af58-db23e6350202-kube-api-access-pgfd9\") pod \"collect-profiles-29482215-r8bkj\" (UID: \"cfc4c95c-9901-47d1-af58-db23e6350202\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482215-r8bkj" Jan 20 18:15:00 crc kubenswrapper[4558]: I0120 18:15:00.220914 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/cfc4c95c-9901-47d1-af58-db23e6350202-secret-volume\") pod \"collect-profiles-29482215-r8bkj\" (UID: \"cfc4c95c-9901-47d1-af58-db23e6350202\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482215-r8bkj" Jan 20 18:15:00 crc kubenswrapper[4558]: I0120 18:15:00.221083 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/cfc4c95c-9901-47d1-af58-db23e6350202-config-volume\") pod \"collect-profiles-29482215-r8bkj\" (UID: \"cfc4c95c-9901-47d1-af58-db23e6350202\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482215-r8bkj" Jan 20 18:15:00 crc kubenswrapper[4558]: I0120 18:15:00.322810 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pgfd9\" (UniqueName: \"kubernetes.io/projected/cfc4c95c-9901-47d1-af58-db23e6350202-kube-api-access-pgfd9\") pod \"collect-profiles-29482215-r8bkj\" (UID: \"cfc4c95c-9901-47d1-af58-db23e6350202\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482215-r8bkj" Jan 20 18:15:00 crc kubenswrapper[4558]: I0120 18:15:00.322896 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/cfc4c95c-9901-47d1-af58-db23e6350202-secret-volume\") pod \"collect-profiles-29482215-r8bkj\" (UID: \"cfc4c95c-9901-47d1-af58-db23e6350202\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482215-r8bkj" Jan 20 18:15:00 crc kubenswrapper[4558]: I0120 18:15:00.322947 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/cfc4c95c-9901-47d1-af58-db23e6350202-config-volume\") pod \"collect-profiles-29482215-r8bkj\" (UID: \"cfc4c95c-9901-47d1-af58-db23e6350202\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482215-r8bkj" Jan 20 18:15:00 crc kubenswrapper[4558]: I0120 18:15:00.324186 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/cfc4c95c-9901-47d1-af58-db23e6350202-config-volume\") pod \"collect-profiles-29482215-r8bkj\" (UID: \"cfc4c95c-9901-47d1-af58-db23e6350202\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482215-r8bkj" Jan 20 18:15:00 crc kubenswrapper[4558]: I0120 18:15:00.328708 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/cfc4c95c-9901-47d1-af58-db23e6350202-secret-volume\") pod \"collect-profiles-29482215-r8bkj\" (UID: \"cfc4c95c-9901-47d1-af58-db23e6350202\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482215-r8bkj" Jan 20 18:15:00 crc kubenswrapper[4558]: I0120 18:15:00.345431 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pgfd9\" (UniqueName: \"kubernetes.io/projected/cfc4c95c-9901-47d1-af58-db23e6350202-kube-api-access-pgfd9\") pod \"collect-profiles-29482215-r8bkj\" (UID: \"cfc4c95c-9901-47d1-af58-db23e6350202\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482215-r8bkj" Jan 20 18:15:00 crc kubenswrapper[4558]: I0120 18:15:00.442769 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29482215-r8bkj" Jan 20 18:15:00 crc kubenswrapper[4558]: I0120 18:15:00.877262 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29482215-r8bkj"] Jan 20 18:15:00 crc kubenswrapper[4558]: W0120 18:15:00.879507 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcfc4c95c_9901_47d1_af58_db23e6350202.slice/crio-ae0f9b18b39ce581c3802472af57a772ac7174dc9fddb333c3057d33b7084f1a WatchSource:0}: Error finding container ae0f9b18b39ce581c3802472af57a772ac7174dc9fddb333c3057d33b7084f1a: Status 404 returned error can't find the container with id ae0f9b18b39ce581c3802472af57a772ac7174dc9fddb333c3057d33b7084f1a Jan 20 18:15:01 crc kubenswrapper[4558]: I0120 18:15:01.846420 4558 generic.go:334] "Generic (PLEG): container finished" podID="cfc4c95c-9901-47d1-af58-db23e6350202" containerID="f0ab58e4952452d7ee1b43f6dac5ef59c7f4f4ce94f157921ec816ea03694353" exitCode=0 Jan 20 18:15:01 crc kubenswrapper[4558]: I0120 18:15:01.846480 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29482215-r8bkj" event={"ID":"cfc4c95c-9901-47d1-af58-db23e6350202","Type":"ContainerDied","Data":"f0ab58e4952452d7ee1b43f6dac5ef59c7f4f4ce94f157921ec816ea03694353"} Jan 20 18:15:01 crc kubenswrapper[4558]: I0120 18:15:01.846513 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29482215-r8bkj" event={"ID":"cfc4c95c-9901-47d1-af58-db23e6350202","Type":"ContainerStarted","Data":"ae0f9b18b39ce581c3802472af57a772ac7174dc9fddb333c3057d33b7084f1a"} Jan 20 18:15:03 crc kubenswrapper[4558]: I0120 18:15:03.106563 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29482215-r8bkj" Jan 20 18:15:03 crc kubenswrapper[4558]: I0120 18:15:03.171210 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pgfd9\" (UniqueName: \"kubernetes.io/projected/cfc4c95c-9901-47d1-af58-db23e6350202-kube-api-access-pgfd9\") pod \"cfc4c95c-9901-47d1-af58-db23e6350202\" (UID: \"cfc4c95c-9901-47d1-af58-db23e6350202\") " Jan 20 18:15:03 crc kubenswrapper[4558]: I0120 18:15:03.171273 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/cfc4c95c-9901-47d1-af58-db23e6350202-secret-volume\") pod \"cfc4c95c-9901-47d1-af58-db23e6350202\" (UID: \"cfc4c95c-9901-47d1-af58-db23e6350202\") " Jan 20 18:15:03 crc kubenswrapper[4558]: I0120 18:15:03.171381 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/cfc4c95c-9901-47d1-af58-db23e6350202-config-volume\") pod \"cfc4c95c-9901-47d1-af58-db23e6350202\" (UID: \"cfc4c95c-9901-47d1-af58-db23e6350202\") " Jan 20 18:15:03 crc kubenswrapper[4558]: I0120 18:15:03.172542 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cfc4c95c-9901-47d1-af58-db23e6350202-config-volume" (OuterVolumeSpecName: "config-volume") pod "cfc4c95c-9901-47d1-af58-db23e6350202" (UID: "cfc4c95c-9901-47d1-af58-db23e6350202"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:15:03 crc kubenswrapper[4558]: I0120 18:15:03.178475 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cfc4c95c-9901-47d1-af58-db23e6350202-kube-api-access-pgfd9" (OuterVolumeSpecName: "kube-api-access-pgfd9") pod "cfc4c95c-9901-47d1-af58-db23e6350202" (UID: "cfc4c95c-9901-47d1-af58-db23e6350202"). InnerVolumeSpecName "kube-api-access-pgfd9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:15:03 crc kubenswrapper[4558]: I0120 18:15:03.178660 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cfc4c95c-9901-47d1-af58-db23e6350202-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "cfc4c95c-9901-47d1-af58-db23e6350202" (UID: "cfc4c95c-9901-47d1-af58-db23e6350202"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:15:03 crc kubenswrapper[4558]: I0120 18:15:03.272777 4558 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/cfc4c95c-9901-47d1-af58-db23e6350202-config-volume\") on node \"crc\" DevicePath \"\"" Jan 20 18:15:03 crc kubenswrapper[4558]: I0120 18:15:03.272813 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pgfd9\" (UniqueName: \"kubernetes.io/projected/cfc4c95c-9901-47d1-af58-db23e6350202-kube-api-access-pgfd9\") on node \"crc\" DevicePath \"\"" Jan 20 18:15:03 crc kubenswrapper[4558]: I0120 18:15:03.272826 4558 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/cfc4c95c-9901-47d1-af58-db23e6350202-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 20 18:15:03 crc kubenswrapper[4558]: I0120 18:15:03.862008 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29482215-r8bkj" event={"ID":"cfc4c95c-9901-47d1-af58-db23e6350202","Type":"ContainerDied","Data":"ae0f9b18b39ce581c3802472af57a772ac7174dc9fddb333c3057d33b7084f1a"} Jan 20 18:15:03 crc kubenswrapper[4558]: I0120 18:15:03.862444 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ae0f9b18b39ce581c3802472af57a772ac7174dc9fddb333c3057d33b7084f1a" Jan 20 18:15:03 crc kubenswrapper[4558]: I0120 18:15:03.862086 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29482215-r8bkj" Jan 20 18:15:04 crc kubenswrapper[4558]: I0120 18:15:04.177540 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29482170-8mhr7"] Jan 20 18:15:04 crc kubenswrapper[4558]: I0120 18:15:04.181182 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29482170-8mhr7"] Jan 20 18:15:04 crc kubenswrapper[4558]: I0120 18:15:04.590940 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c3f46696-aea2-4531-9e00-687b70338139" path="/var/lib/kubelet/pods/c3f46696-aea2-4531-9e00-687b70338139/volumes" Jan 20 18:15:05 crc kubenswrapper[4558]: I0120 18:15:05.877586 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-bf56cd8fd-kclzq" event={"ID":"25c7c232-6d31-4269-a2c2-e8fd3c518c47","Type":"ContainerStarted","Data":"d6ed4311e95b7fd12f2078100b9bf4ffbdbac97da3cf5889bb2ec94e474034fd"} Jan 20 18:15:05 crc kubenswrapper[4558]: I0120 18:15:05.877881 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-bf56cd8fd-kclzq" Jan 20 18:15:05 crc kubenswrapper[4558]: I0120 18:15:05.895006 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-controller-manager-bf56cd8fd-kclzq" podStartSLOduration=2.519855552 podStartE2EDuration="7.894975652s" podCreationTimestamp="2026-01-20 18:14:58 +0000 UTC" firstStartedPulling="2026-01-20 18:14:59.45639435 +0000 UTC m=+5593.216732318" lastFinishedPulling="2026-01-20 18:15:04.831514451 +0000 UTC m=+5598.591852418" observedRunningTime="2026-01-20 18:15:05.891077974 +0000 UTC m=+5599.651415940" watchObservedRunningTime="2026-01-20 18:15:05.894975652 +0000 UTC m=+5599.655313609" Jan 20 18:15:09 crc kubenswrapper[4558]: I0120 18:15:09.054285 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-controller-manager-bf56cd8fd-kclzq" Jan 20 18:15:10 crc kubenswrapper[4558]: I0120 18:15:10.566611 4558 scope.go:117] "RemoveContainer" containerID="27314e1a8af2e8df8ecd2e7a06cbb630afbfecd8ca2338a1f199cf92f982f581" Jan 20 18:15:10 crc kubenswrapper[4558]: E0120 18:15:10.567087 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 18:15:11 crc kubenswrapper[4558]: I0120 18:15:11.090055 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-index-7gcsw"] Jan 20 18:15:11 crc kubenswrapper[4558]: E0120 18:15:11.090389 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cfc4c95c-9901-47d1-af58-db23e6350202" containerName="collect-profiles" Jan 20 18:15:11 crc kubenswrapper[4558]: I0120 18:15:11.090404 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="cfc4c95c-9901-47d1-af58-db23e6350202" containerName="collect-profiles" Jan 20 18:15:11 crc kubenswrapper[4558]: I0120 18:15:11.090526 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="cfc4c95c-9901-47d1-af58-db23e6350202" containerName="collect-profiles" Jan 20 18:15:11 crc kubenswrapper[4558]: I0120 18:15:11.091026 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-index-7gcsw" Jan 20 18:15:11 crc kubenswrapper[4558]: I0120 18:15:11.096128 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-index-dockercfg-dwtsn" Jan 20 18:15:11 crc kubenswrapper[4558]: I0120 18:15:11.103423 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-index-7gcsw"] Jan 20 18:15:11 crc kubenswrapper[4558]: I0120 18:15:11.276768 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6jjzj\" (UniqueName: \"kubernetes.io/projected/addb5163-4e3e-4034-a741-0dde47458e80-kube-api-access-6jjzj\") pod \"infra-operator-index-7gcsw\" (UID: \"addb5163-4e3e-4034-a741-0dde47458e80\") " pod="openstack-operators/infra-operator-index-7gcsw" Jan 20 18:15:11 crc kubenswrapper[4558]: I0120 18:15:11.379028 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6jjzj\" (UniqueName: \"kubernetes.io/projected/addb5163-4e3e-4034-a741-0dde47458e80-kube-api-access-6jjzj\") pod \"infra-operator-index-7gcsw\" (UID: \"addb5163-4e3e-4034-a741-0dde47458e80\") " pod="openstack-operators/infra-operator-index-7gcsw" Jan 20 18:15:11 crc kubenswrapper[4558]: I0120 18:15:11.398987 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6jjzj\" (UniqueName: \"kubernetes.io/projected/addb5163-4e3e-4034-a741-0dde47458e80-kube-api-access-6jjzj\") pod \"infra-operator-index-7gcsw\" (UID: \"addb5163-4e3e-4034-a741-0dde47458e80\") " pod="openstack-operators/infra-operator-index-7gcsw" Jan 20 18:15:11 crc kubenswrapper[4558]: I0120 18:15:11.415076 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-index-7gcsw" Jan 20 18:15:11 crc kubenswrapper[4558]: I0120 18:15:11.813289 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-index-7gcsw"] Jan 20 18:15:11 crc kubenswrapper[4558]: I0120 18:15:11.920839 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-index-7gcsw" event={"ID":"addb5163-4e3e-4034-a741-0dde47458e80","Type":"ContainerStarted","Data":"149602ee13fa9d3bb949dbbe8490d99a50ab6cbab84f25e883530f0e8fbbb67a"} Jan 20 18:15:13 crc kubenswrapper[4558]: I0120 18:15:13.935881 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-index-7gcsw" event={"ID":"addb5163-4e3e-4034-a741-0dde47458e80","Type":"ContainerStarted","Data":"7178aebe584a22a96dedaf3c4f7a419120025ca1b8d3358032a3fb99441015bc"} Jan 20 18:15:13 crc kubenswrapper[4558]: I0120 18:15:13.952099 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-index-7gcsw" podStartSLOduration=1.576019163 podStartE2EDuration="2.952077054s" podCreationTimestamp="2026-01-20 18:15:11 +0000 UTC" firstStartedPulling="2026-01-20 18:15:11.820245321 +0000 UTC m=+5605.580583277" lastFinishedPulling="2026-01-20 18:15:13.196303201 +0000 UTC m=+5606.956641168" observedRunningTime="2026-01-20 18:15:13.948874032 +0000 UTC m=+5607.709211998" watchObservedRunningTime="2026-01-20 18:15:13.952077054 +0000 UTC m=+5607.712415020" Jan 20 18:15:15 crc kubenswrapper[4558]: I0120 18:15:15.083485 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/infra-operator-index-7gcsw"] Jan 20 18:15:15 crc kubenswrapper[4558]: I0120 18:15:15.686292 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-index-p6pgw"] Jan 20 18:15:15 crc kubenswrapper[4558]: I0120 18:15:15.687431 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-index-p6pgw" Jan 20 18:15:15 crc kubenswrapper[4558]: I0120 18:15:15.692487 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-index-p6pgw"] Jan 20 18:15:15 crc kubenswrapper[4558]: I0120 18:15:15.843159 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-clwxv\" (UniqueName: \"kubernetes.io/projected/86dc910c-5dc4-4de3-b279-df310dea30f8-kube-api-access-clwxv\") pod \"infra-operator-index-p6pgw\" (UID: \"86dc910c-5dc4-4de3-b279-df310dea30f8\") " pod="openstack-operators/infra-operator-index-p6pgw" Jan 20 18:15:15 crc kubenswrapper[4558]: I0120 18:15:15.945082 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-clwxv\" (UniqueName: \"kubernetes.io/projected/86dc910c-5dc4-4de3-b279-df310dea30f8-kube-api-access-clwxv\") pod \"infra-operator-index-p6pgw\" (UID: \"86dc910c-5dc4-4de3-b279-df310dea30f8\") " pod="openstack-operators/infra-operator-index-p6pgw" Jan 20 18:15:15 crc kubenswrapper[4558]: I0120 18:15:15.952102 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/infra-operator-index-7gcsw" podUID="addb5163-4e3e-4034-a741-0dde47458e80" containerName="registry-server" containerID="cri-o://7178aebe584a22a96dedaf3c4f7a419120025ca1b8d3358032a3fb99441015bc" gracePeriod=2 Jan 20 18:15:15 crc kubenswrapper[4558]: I0120 18:15:15.965494 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-clwxv\" (UniqueName: \"kubernetes.io/projected/86dc910c-5dc4-4de3-b279-df310dea30f8-kube-api-access-clwxv\") pod \"infra-operator-index-p6pgw\" (UID: \"86dc910c-5dc4-4de3-b279-df310dea30f8\") " pod="openstack-operators/infra-operator-index-p6pgw" Jan 20 18:15:16 crc kubenswrapper[4558]: I0120 18:15:16.003583 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-index-p6pgw" Jan 20 18:15:16 crc kubenswrapper[4558]: I0120 18:15:16.353673 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-index-7gcsw" Jan 20 18:15:16 crc kubenswrapper[4558]: I0120 18:15:16.379179 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-index-p6pgw"] Jan 20 18:15:16 crc kubenswrapper[4558]: W0120 18:15:16.393003 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod86dc910c_5dc4_4de3_b279_df310dea30f8.slice/crio-167bf43226b8bcd9d16e2e2bdf70d0a54a3d961c4ab45378cc1095bf508d2ed6 WatchSource:0}: Error finding container 167bf43226b8bcd9d16e2e2bdf70d0a54a3d961c4ab45378cc1095bf508d2ed6: Status 404 returned error can't find the container with id 167bf43226b8bcd9d16e2e2bdf70d0a54a3d961c4ab45378cc1095bf508d2ed6 Jan 20 18:15:16 crc kubenswrapper[4558]: I0120 18:15:16.553144 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6jjzj\" (UniqueName: \"kubernetes.io/projected/addb5163-4e3e-4034-a741-0dde47458e80-kube-api-access-6jjzj\") pod \"addb5163-4e3e-4034-a741-0dde47458e80\" (UID: \"addb5163-4e3e-4034-a741-0dde47458e80\") " Jan 20 18:15:16 crc kubenswrapper[4558]: I0120 18:15:16.560235 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/addb5163-4e3e-4034-a741-0dde47458e80-kube-api-access-6jjzj" (OuterVolumeSpecName: "kube-api-access-6jjzj") pod "addb5163-4e3e-4034-a741-0dde47458e80" (UID: "addb5163-4e3e-4034-a741-0dde47458e80"). InnerVolumeSpecName "kube-api-access-6jjzj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:15:16 crc kubenswrapper[4558]: I0120 18:15:16.655466 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6jjzj\" (UniqueName: \"kubernetes.io/projected/addb5163-4e3e-4034-a741-0dde47458e80-kube-api-access-6jjzj\") on node \"crc\" DevicePath \"\"" Jan 20 18:15:16 crc kubenswrapper[4558]: I0120 18:15:16.962086 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-index-p6pgw" event={"ID":"86dc910c-5dc4-4de3-b279-df310dea30f8","Type":"ContainerStarted","Data":"167bf43226b8bcd9d16e2e2bdf70d0a54a3d961c4ab45378cc1095bf508d2ed6"} Jan 20 18:15:16 crc kubenswrapper[4558]: I0120 18:15:16.963710 4558 generic.go:334] "Generic (PLEG): container finished" podID="addb5163-4e3e-4034-a741-0dde47458e80" containerID="7178aebe584a22a96dedaf3c4f7a419120025ca1b8d3358032a3fb99441015bc" exitCode=0 Jan 20 18:15:16 crc kubenswrapper[4558]: I0120 18:15:16.963817 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-index-7gcsw" event={"ID":"addb5163-4e3e-4034-a741-0dde47458e80","Type":"ContainerDied","Data":"7178aebe584a22a96dedaf3c4f7a419120025ca1b8d3358032a3fb99441015bc"} Jan 20 18:15:16 crc kubenswrapper[4558]: I0120 18:15:16.963941 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-index-7gcsw" event={"ID":"addb5163-4e3e-4034-a741-0dde47458e80","Type":"ContainerDied","Data":"149602ee13fa9d3bb949dbbe8490d99a50ab6cbab84f25e883530f0e8fbbb67a"} Jan 20 18:15:16 crc kubenswrapper[4558]: I0120 18:15:16.964199 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-index-7gcsw" Jan 20 18:15:16 crc kubenswrapper[4558]: I0120 18:15:16.965058 4558 scope.go:117] "RemoveContainer" containerID="7178aebe584a22a96dedaf3c4f7a419120025ca1b8d3358032a3fb99441015bc" Jan 20 18:15:17 crc kubenswrapper[4558]: I0120 18:15:17.011735 4558 scope.go:117] "RemoveContainer" containerID="7178aebe584a22a96dedaf3c4f7a419120025ca1b8d3358032a3fb99441015bc" Jan 20 18:15:17 crc kubenswrapper[4558]: E0120 18:15:17.012264 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7178aebe584a22a96dedaf3c4f7a419120025ca1b8d3358032a3fb99441015bc\": container with ID starting with 7178aebe584a22a96dedaf3c4f7a419120025ca1b8d3358032a3fb99441015bc not found: ID does not exist" containerID="7178aebe584a22a96dedaf3c4f7a419120025ca1b8d3358032a3fb99441015bc" Jan 20 18:15:17 crc kubenswrapper[4558]: I0120 18:15:17.012354 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7178aebe584a22a96dedaf3c4f7a419120025ca1b8d3358032a3fb99441015bc"} err="failed to get container status \"7178aebe584a22a96dedaf3c4f7a419120025ca1b8d3358032a3fb99441015bc\": rpc error: code = NotFound desc = could not find container \"7178aebe584a22a96dedaf3c4f7a419120025ca1b8d3358032a3fb99441015bc\": container with ID starting with 7178aebe584a22a96dedaf3c4f7a419120025ca1b8d3358032a3fb99441015bc not found: ID does not exist" Jan 20 18:15:17 crc kubenswrapper[4558]: I0120 18:15:17.031042 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/infra-operator-index-7gcsw"] Jan 20 18:15:17 crc kubenswrapper[4558]: I0120 18:15:17.037702 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/infra-operator-index-7gcsw"] Jan 20 18:15:17 crc kubenswrapper[4558]: I0120 18:15:17.974532 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-index-p6pgw" event={"ID":"86dc910c-5dc4-4de3-b279-df310dea30f8","Type":"ContainerStarted","Data":"b165a030a23ca666b365058d159549a088505333a43dd0b39980f6a4d53c03cd"} Jan 20 18:15:18 crc kubenswrapper[4558]: I0120 18:15:18.000434 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-index-p6pgw" podStartSLOduration=2.46032519 podStartE2EDuration="3.000412599s" podCreationTimestamp="2026-01-20 18:15:15 +0000 UTC" firstStartedPulling="2026-01-20 18:15:16.397217074 +0000 UTC m=+5610.157555041" lastFinishedPulling="2026-01-20 18:15:16.937304483 +0000 UTC m=+5610.697642450" observedRunningTime="2026-01-20 18:15:17.999799377 +0000 UTC m=+5611.760137344" watchObservedRunningTime="2026-01-20 18:15:18.000412599 +0000 UTC m=+5611.760750567" Jan 20 18:15:18 crc kubenswrapper[4558]: I0120 18:15:18.574182 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="addb5163-4e3e-4034-a741-0dde47458e80" path="/var/lib/kubelet/pods/addb5163-4e3e-4034-a741-0dde47458e80/volumes" Jan 20 18:15:25 crc kubenswrapper[4558]: I0120 18:15:25.566646 4558 scope.go:117] "RemoveContainer" containerID="27314e1a8af2e8df8ecd2e7a06cbb630afbfecd8ca2338a1f199cf92f982f581" Jan 20 18:15:25 crc kubenswrapper[4558]: E0120 18:15:25.567604 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 18:15:26 crc kubenswrapper[4558]: I0120 18:15:26.004485 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/infra-operator-index-p6pgw" Jan 20 18:15:26 crc kubenswrapper[4558]: I0120 18:15:26.004899 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-index-p6pgw" Jan 20 18:15:26 crc kubenswrapper[4558]: I0120 18:15:26.038603 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/infra-operator-index-p6pgw" Jan 20 18:15:26 crc kubenswrapper[4558]: I0120 18:15:26.067455 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-index-p6pgw" Jan 20 18:15:28 crc kubenswrapper[4558]: I0120 18:15:28.327739 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ea82580bc5724477f94b47db468c840840d4aaf95efc52f7d04b6353c1jzwfw"] Jan 20 18:15:28 crc kubenswrapper[4558]: E0120 18:15:28.328059 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="addb5163-4e3e-4034-a741-0dde47458e80" containerName="registry-server" Jan 20 18:15:28 crc kubenswrapper[4558]: I0120 18:15:28.328075 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="addb5163-4e3e-4034-a741-0dde47458e80" containerName="registry-server" Jan 20 18:15:28 crc kubenswrapper[4558]: I0120 18:15:28.328234 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="addb5163-4e3e-4034-a741-0dde47458e80" containerName="registry-server" Jan 20 18:15:28 crc kubenswrapper[4558]: I0120 18:15:28.329198 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ea82580bc5724477f94b47db468c840840d4aaf95efc52f7d04b6353c1jzwfw" Jan 20 18:15:28 crc kubenswrapper[4558]: I0120 18:15:28.331583 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-7xp5w" Jan 20 18:15:28 crc kubenswrapper[4558]: I0120 18:15:28.339707 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ea82580bc5724477f94b47db468c840840d4aaf95efc52f7d04b6353c1jzwfw"] Jan 20 18:15:28 crc kubenswrapper[4558]: I0120 18:15:28.523545 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h9lbr\" (UniqueName: \"kubernetes.io/projected/a057b440-fc08-4618-be07-956ccbe46af7-kube-api-access-h9lbr\") pod \"ea82580bc5724477f94b47db468c840840d4aaf95efc52f7d04b6353c1jzwfw\" (UID: \"a057b440-fc08-4618-be07-956ccbe46af7\") " pod="openstack-operators/ea82580bc5724477f94b47db468c840840d4aaf95efc52f7d04b6353c1jzwfw" Jan 20 18:15:28 crc kubenswrapper[4558]: I0120 18:15:28.523608 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/a057b440-fc08-4618-be07-956ccbe46af7-bundle\") pod \"ea82580bc5724477f94b47db468c840840d4aaf95efc52f7d04b6353c1jzwfw\" (UID: \"a057b440-fc08-4618-be07-956ccbe46af7\") " pod="openstack-operators/ea82580bc5724477f94b47db468c840840d4aaf95efc52f7d04b6353c1jzwfw" Jan 20 18:15:28 crc kubenswrapper[4558]: I0120 18:15:28.523806 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/a057b440-fc08-4618-be07-956ccbe46af7-util\") pod \"ea82580bc5724477f94b47db468c840840d4aaf95efc52f7d04b6353c1jzwfw\" (UID: \"a057b440-fc08-4618-be07-956ccbe46af7\") " pod="openstack-operators/ea82580bc5724477f94b47db468c840840d4aaf95efc52f7d04b6353c1jzwfw" Jan 20 18:15:28 crc kubenswrapper[4558]: I0120 18:15:28.624757 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/a057b440-fc08-4618-be07-956ccbe46af7-bundle\") pod \"ea82580bc5724477f94b47db468c840840d4aaf95efc52f7d04b6353c1jzwfw\" (UID: \"a057b440-fc08-4618-be07-956ccbe46af7\") " pod="openstack-operators/ea82580bc5724477f94b47db468c840840d4aaf95efc52f7d04b6353c1jzwfw" Jan 20 18:15:28 crc kubenswrapper[4558]: I0120 18:15:28.624835 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/a057b440-fc08-4618-be07-956ccbe46af7-util\") pod \"ea82580bc5724477f94b47db468c840840d4aaf95efc52f7d04b6353c1jzwfw\" (UID: \"a057b440-fc08-4618-be07-956ccbe46af7\") " pod="openstack-operators/ea82580bc5724477f94b47db468c840840d4aaf95efc52f7d04b6353c1jzwfw" Jan 20 18:15:28 crc kubenswrapper[4558]: I0120 18:15:28.624883 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h9lbr\" (UniqueName: \"kubernetes.io/projected/a057b440-fc08-4618-be07-956ccbe46af7-kube-api-access-h9lbr\") pod \"ea82580bc5724477f94b47db468c840840d4aaf95efc52f7d04b6353c1jzwfw\" (UID: \"a057b440-fc08-4618-be07-956ccbe46af7\") " pod="openstack-operators/ea82580bc5724477f94b47db468c840840d4aaf95efc52f7d04b6353c1jzwfw" Jan 20 18:15:28 crc kubenswrapper[4558]: I0120 18:15:28.625365 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/a057b440-fc08-4618-be07-956ccbe46af7-bundle\") pod \"ea82580bc5724477f94b47db468c840840d4aaf95efc52f7d04b6353c1jzwfw\" (UID: \"a057b440-fc08-4618-be07-956ccbe46af7\") " pod="openstack-operators/ea82580bc5724477f94b47db468c840840d4aaf95efc52f7d04b6353c1jzwfw" Jan 20 18:15:28 crc kubenswrapper[4558]: I0120 18:15:28.625410 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/a057b440-fc08-4618-be07-956ccbe46af7-util\") pod \"ea82580bc5724477f94b47db468c840840d4aaf95efc52f7d04b6353c1jzwfw\" (UID: \"a057b440-fc08-4618-be07-956ccbe46af7\") " pod="openstack-operators/ea82580bc5724477f94b47db468c840840d4aaf95efc52f7d04b6353c1jzwfw" Jan 20 18:15:28 crc kubenswrapper[4558]: I0120 18:15:28.642313 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h9lbr\" (UniqueName: \"kubernetes.io/projected/a057b440-fc08-4618-be07-956ccbe46af7-kube-api-access-h9lbr\") pod \"ea82580bc5724477f94b47db468c840840d4aaf95efc52f7d04b6353c1jzwfw\" (UID: \"a057b440-fc08-4618-be07-956ccbe46af7\") " pod="openstack-operators/ea82580bc5724477f94b47db468c840840d4aaf95efc52f7d04b6353c1jzwfw" Jan 20 18:15:28 crc kubenswrapper[4558]: I0120 18:15:28.647485 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ea82580bc5724477f94b47db468c840840d4aaf95efc52f7d04b6353c1jzwfw" Jan 20 18:15:29 crc kubenswrapper[4558]: I0120 18:15:29.024290 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ea82580bc5724477f94b47db468c840840d4aaf95efc52f7d04b6353c1jzwfw"] Jan 20 18:15:29 crc kubenswrapper[4558]: I0120 18:15:29.064135 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ea82580bc5724477f94b47db468c840840d4aaf95efc52f7d04b6353c1jzwfw" event={"ID":"a057b440-fc08-4618-be07-956ccbe46af7","Type":"ContainerStarted","Data":"7c725f1b4b8f30ee2f1cf465715a920fbdebe4b8ebab8fc17a98c4b6c003ec53"} Jan 20 18:15:30 crc kubenswrapper[4558]: I0120 18:15:30.075369 4558 generic.go:334] "Generic (PLEG): container finished" podID="a057b440-fc08-4618-be07-956ccbe46af7" containerID="820b4ece32ea6d6110e74102cffa88348736382f40c35b30730a04d00124a5e6" exitCode=0 Jan 20 18:15:30 crc kubenswrapper[4558]: I0120 18:15:30.075472 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ea82580bc5724477f94b47db468c840840d4aaf95efc52f7d04b6353c1jzwfw" event={"ID":"a057b440-fc08-4618-be07-956ccbe46af7","Type":"ContainerDied","Data":"820b4ece32ea6d6110e74102cffa88348736382f40c35b30730a04d00124a5e6"} Jan 20 18:15:31 crc kubenswrapper[4558]: I0120 18:15:31.089591 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ea82580bc5724477f94b47db468c840840d4aaf95efc52f7d04b6353c1jzwfw" event={"ID":"a057b440-fc08-4618-be07-956ccbe46af7","Type":"ContainerStarted","Data":"27aedd69e69221e2dae01ed6d942442903d22db26f9982c1188f891b75e3d04f"} Jan 20 18:15:32 crc kubenswrapper[4558]: I0120 18:15:32.098465 4558 generic.go:334] "Generic (PLEG): container finished" podID="a057b440-fc08-4618-be07-956ccbe46af7" containerID="27aedd69e69221e2dae01ed6d942442903d22db26f9982c1188f891b75e3d04f" exitCode=0 Jan 20 18:15:32 crc kubenswrapper[4558]: I0120 18:15:32.098515 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ea82580bc5724477f94b47db468c840840d4aaf95efc52f7d04b6353c1jzwfw" event={"ID":"a057b440-fc08-4618-be07-956ccbe46af7","Type":"ContainerDied","Data":"27aedd69e69221e2dae01ed6d942442903d22db26f9982c1188f891b75e3d04f"} Jan 20 18:15:33 crc kubenswrapper[4558]: I0120 18:15:33.109880 4558 generic.go:334] "Generic (PLEG): container finished" podID="a057b440-fc08-4618-be07-956ccbe46af7" containerID="6f1d1f59580a316c78b0687a18091ff56e74e966d8f0876c4a46411a485ba0d5" exitCode=0 Jan 20 18:15:33 crc kubenswrapper[4558]: I0120 18:15:33.109977 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ea82580bc5724477f94b47db468c840840d4aaf95efc52f7d04b6353c1jzwfw" event={"ID":"a057b440-fc08-4618-be07-956ccbe46af7","Type":"ContainerDied","Data":"6f1d1f59580a316c78b0687a18091ff56e74e966d8f0876c4a46411a485ba0d5"} Jan 20 18:15:34 crc kubenswrapper[4558]: I0120 18:15:34.340146 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ea82580bc5724477f94b47db468c840840d4aaf95efc52f7d04b6353c1jzwfw" Jan 20 18:15:34 crc kubenswrapper[4558]: I0120 18:15:34.508549 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h9lbr\" (UniqueName: \"kubernetes.io/projected/a057b440-fc08-4618-be07-956ccbe46af7-kube-api-access-h9lbr\") pod \"a057b440-fc08-4618-be07-956ccbe46af7\" (UID: \"a057b440-fc08-4618-be07-956ccbe46af7\") " Jan 20 18:15:34 crc kubenswrapper[4558]: I0120 18:15:34.508696 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/a057b440-fc08-4618-be07-956ccbe46af7-bundle\") pod \"a057b440-fc08-4618-be07-956ccbe46af7\" (UID: \"a057b440-fc08-4618-be07-956ccbe46af7\") " Jan 20 18:15:34 crc kubenswrapper[4558]: I0120 18:15:34.508721 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/a057b440-fc08-4618-be07-956ccbe46af7-util\") pod \"a057b440-fc08-4618-be07-956ccbe46af7\" (UID: \"a057b440-fc08-4618-be07-956ccbe46af7\") " Jan 20 18:15:34 crc kubenswrapper[4558]: I0120 18:15:34.510421 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a057b440-fc08-4618-be07-956ccbe46af7-bundle" (OuterVolumeSpecName: "bundle") pod "a057b440-fc08-4618-be07-956ccbe46af7" (UID: "a057b440-fc08-4618-be07-956ccbe46af7"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 18:15:34 crc kubenswrapper[4558]: I0120 18:15:34.514314 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a057b440-fc08-4618-be07-956ccbe46af7-kube-api-access-h9lbr" (OuterVolumeSpecName: "kube-api-access-h9lbr") pod "a057b440-fc08-4618-be07-956ccbe46af7" (UID: "a057b440-fc08-4618-be07-956ccbe46af7"). InnerVolumeSpecName "kube-api-access-h9lbr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:15:34 crc kubenswrapper[4558]: I0120 18:15:34.611093 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h9lbr\" (UniqueName: \"kubernetes.io/projected/a057b440-fc08-4618-be07-956ccbe46af7-kube-api-access-h9lbr\") on node \"crc\" DevicePath \"\"" Jan 20 18:15:34 crc kubenswrapper[4558]: I0120 18:15:34.611141 4558 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/a057b440-fc08-4618-be07-956ccbe46af7-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 18:15:34 crc kubenswrapper[4558]: I0120 18:15:34.713571 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a057b440-fc08-4618-be07-956ccbe46af7-util" (OuterVolumeSpecName: "util") pod "a057b440-fc08-4618-be07-956ccbe46af7" (UID: "a057b440-fc08-4618-be07-956ccbe46af7"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 18:15:34 crc kubenswrapper[4558]: I0120 18:15:34.813907 4558 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/a057b440-fc08-4618-be07-956ccbe46af7-util\") on node \"crc\" DevicePath \"\"" Jan 20 18:15:35 crc kubenswrapper[4558]: I0120 18:15:35.126593 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ea82580bc5724477f94b47db468c840840d4aaf95efc52f7d04b6353c1jzwfw" event={"ID":"a057b440-fc08-4618-be07-956ccbe46af7","Type":"ContainerDied","Data":"7c725f1b4b8f30ee2f1cf465715a920fbdebe4b8ebab8fc17a98c4b6c003ec53"} Jan 20 18:15:35 crc kubenswrapper[4558]: I0120 18:15:35.126647 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7c725f1b4b8f30ee2f1cf465715a920fbdebe4b8ebab8fc17a98c4b6c003ec53" Jan 20 18:15:35 crc kubenswrapper[4558]: I0120 18:15:35.126687 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ea82580bc5724477f94b47db468c840840d4aaf95efc52f7d04b6353c1jzwfw" Jan 20 18:15:35 crc kubenswrapper[4558]: I0120 18:15:35.644597 4558 scope.go:117] "RemoveContainer" containerID="79e7dba817d9ddcf1ccde8404bcb21d393c65f2484740830c37c7ba4e867d889" Jan 20 18:15:35 crc kubenswrapper[4558]: I0120 18:15:35.672504 4558 scope.go:117] "RemoveContainer" containerID="10177ca11b34dc2db0f8a49d2aa24a6836504e49e9461d83bf7e65d004666d86" Jan 20 18:15:35 crc kubenswrapper[4558]: I0120 18:15:35.690531 4558 scope.go:117] "RemoveContainer" containerID="bce9460f3bc5575a7f63631bdcf0321bf4450bb60aa3376a029bb8eca3000348" Jan 20 18:15:35 crc kubenswrapper[4558]: I0120 18:15:35.710778 4558 scope.go:117] "RemoveContainer" containerID="623eb719746436bc27eddef3e09e0db86eb6f58e32b2675bb5567415add042a7" Jan 20 18:15:35 crc kubenswrapper[4558]: I0120 18:15:35.739207 4558 scope.go:117] "RemoveContainer" containerID="fe4a38c14f60d90568e400fb5d73ba13f25589faa6542fe3038d1945adb7a0d7" Jan 20 18:15:35 crc kubenswrapper[4558]: I0120 18:15:35.761361 4558 scope.go:117] "RemoveContainer" containerID="855a300217926f1ebcb2f57d692b96e2447d32d17ccd7d30ba4aa52782efcb6b" Jan 20 18:15:35 crc kubenswrapper[4558]: I0120 18:15:35.780904 4558 scope.go:117] "RemoveContainer" containerID="0dcbbf8e86c4848e3976176c51bcf2b7b6b8fea99643dd8fc7dc65a40d20806a" Jan 20 18:15:37 crc kubenswrapper[4558]: I0120 18:15:37.989832 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["barbican-kuttl-tests/openstack-galera-0"] Jan 20 18:15:37 crc kubenswrapper[4558]: E0120 18:15:37.990341 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a057b440-fc08-4618-be07-956ccbe46af7" containerName="util" Jan 20 18:15:37 crc kubenswrapper[4558]: I0120 18:15:37.990353 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="a057b440-fc08-4618-be07-956ccbe46af7" containerName="util" Jan 20 18:15:37 crc kubenswrapper[4558]: E0120 18:15:37.990361 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a057b440-fc08-4618-be07-956ccbe46af7" containerName="pull" Jan 20 18:15:37 crc kubenswrapper[4558]: I0120 18:15:37.990368 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="a057b440-fc08-4618-be07-956ccbe46af7" containerName="pull" Jan 20 18:15:37 crc kubenswrapper[4558]: E0120 18:15:37.990393 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a057b440-fc08-4618-be07-956ccbe46af7" containerName="extract" Jan 20 18:15:37 crc kubenswrapper[4558]: I0120 18:15:37.990398 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="a057b440-fc08-4618-be07-956ccbe46af7" containerName="extract" Jan 20 18:15:37 crc kubenswrapper[4558]: I0120 18:15:37.990492 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="a057b440-fc08-4618-be07-956ccbe46af7" containerName="extract" Jan 20 18:15:37 crc kubenswrapper[4558]: I0120 18:15:37.991057 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="barbican-kuttl-tests/openstack-galera-0" Jan 20 18:15:37 crc kubenswrapper[4558]: I0120 18:15:37.993456 4558 reflector.go:368] Caches populated for *v1.Secret from object-"barbican-kuttl-tests"/"galera-openstack-dockercfg-66mvm" Jan 20 18:15:37 crc kubenswrapper[4558]: I0120 18:15:37.994149 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"barbican-kuttl-tests"/"openshift-service-ca.crt" Jan 20 18:15:37 crc kubenswrapper[4558]: I0120 18:15:37.994282 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"barbican-kuttl-tests"/"openstack-scripts" Jan 20 18:15:37 crc kubenswrapper[4558]: I0120 18:15:37.994638 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"barbican-kuttl-tests"/"kube-root-ca.crt" Jan 20 18:15:37 crc kubenswrapper[4558]: I0120 18:15:37.994639 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"barbican-kuttl-tests"/"openstack-config-data" Jan 20 18:15:37 crc kubenswrapper[4558]: I0120 18:15:37.999245 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["barbican-kuttl-tests/openstack-galera-0"] Jan 20 18:15:38 crc kubenswrapper[4558]: I0120 18:15:38.004391 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["barbican-kuttl-tests/openstack-galera-2"] Jan 20 18:15:38 crc kubenswrapper[4558]: I0120 18:15:38.005670 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="barbican-kuttl-tests/openstack-galera-2" Jan 20 18:15:38 crc kubenswrapper[4558]: I0120 18:15:38.007622 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["barbican-kuttl-tests/openstack-galera-1"] Jan 20 18:15:38 crc kubenswrapper[4558]: I0120 18:15:38.021287 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="barbican-kuttl-tests/openstack-galera-1" Jan 20 18:15:38 crc kubenswrapper[4558]: I0120 18:15:38.034045 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["barbican-kuttl-tests/openstack-galera-2"] Jan 20 18:15:38 crc kubenswrapper[4558]: I0120 18:15:38.044091 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["barbican-kuttl-tests/openstack-galera-1"] Jan 20 18:15:38 crc kubenswrapper[4558]: I0120 18:15:38.165931 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/595cc6da-b53a-4a23-8e31-ed1dd616eb3e-config-data-generated\") pod \"openstack-galera-2\" (UID: \"595cc6da-b53a-4a23-8e31-ed1dd616eb3e\") " pod="barbican-kuttl-tests/openstack-galera-2" Jan 20 18:15:38 crc kubenswrapper[4558]: I0120 18:15:38.165982 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage13-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage13-crc\") pod \"openstack-galera-2\" (UID: \"595cc6da-b53a-4a23-8e31-ed1dd616eb3e\") " pod="barbican-kuttl-tests/openstack-galera-2" Jan 20 18:15:38 crc kubenswrapper[4558]: I0120 18:15:38.166131 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/53d3a76d-e277-4efa-adbf-85d8e328caf6-config-data-default\") pod \"openstack-galera-1\" (UID: \"53d3a76d-e277-4efa-adbf-85d8e328caf6\") " pod="barbican-kuttl-tests/openstack-galera-1" Jan 20 18:15:38 crc kubenswrapper[4558]: I0120 18:15:38.166180 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/595cc6da-b53a-4a23-8e31-ed1dd616eb3e-config-data-default\") pod \"openstack-galera-2\" (UID: \"595cc6da-b53a-4a23-8e31-ed1dd616eb3e\") " pod="barbican-kuttl-tests/openstack-galera-2" Jan 20 18:15:38 crc kubenswrapper[4558]: I0120 18:15:38.166222 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"openstack-galera-0\" (UID: \"4a5a6576-dc07-4211-8b81-a53986f07041\") " pod="barbican-kuttl-tests/openstack-galera-0" Jan 20 18:15:38 crc kubenswrapper[4558]: I0120 18:15:38.166276 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wkrp4\" (UniqueName: \"kubernetes.io/projected/4a5a6576-dc07-4211-8b81-a53986f07041-kube-api-access-wkrp4\") pod \"openstack-galera-0\" (UID: \"4a5a6576-dc07-4211-8b81-a53986f07041\") " pod="barbican-kuttl-tests/openstack-galera-0" Jan 20 18:15:38 crc kubenswrapper[4558]: I0120 18:15:38.166306 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/53d3a76d-e277-4efa-adbf-85d8e328caf6-config-data-generated\") pod \"openstack-galera-1\" (UID: \"53d3a76d-e277-4efa-adbf-85d8e328caf6\") " pod="barbican-kuttl-tests/openstack-galera-1" Jan 20 18:15:38 crc kubenswrapper[4558]: I0120 18:15:38.166339 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/595cc6da-b53a-4a23-8e31-ed1dd616eb3e-kolla-config\") pod \"openstack-galera-2\" (UID: \"595cc6da-b53a-4a23-8e31-ed1dd616eb3e\") " pod="barbican-kuttl-tests/openstack-galera-2" Jan 20 18:15:38 crc kubenswrapper[4558]: I0120 18:15:38.166359 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/53d3a76d-e277-4efa-adbf-85d8e328caf6-operator-scripts\") pod \"openstack-galera-1\" (UID: \"53d3a76d-e277-4efa-adbf-85d8e328caf6\") " pod="barbican-kuttl-tests/openstack-galera-1" Jan 20 18:15:38 crc kubenswrapper[4558]: I0120 18:15:38.166378 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/4a5a6576-dc07-4211-8b81-a53986f07041-config-data-generated\") pod \"openstack-galera-0\" (UID: \"4a5a6576-dc07-4211-8b81-a53986f07041\") " pod="barbican-kuttl-tests/openstack-galera-0" Jan 20 18:15:38 crc kubenswrapper[4558]: I0120 18:15:38.166408 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zz8nz\" (UniqueName: \"kubernetes.io/projected/53d3a76d-e277-4efa-adbf-85d8e328caf6-kube-api-access-zz8nz\") pod \"openstack-galera-1\" (UID: \"53d3a76d-e277-4efa-adbf-85d8e328caf6\") " pod="barbican-kuttl-tests/openstack-galera-1" Jan 20 18:15:38 crc kubenswrapper[4558]: I0120 18:15:38.166423 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/4a5a6576-dc07-4211-8b81-a53986f07041-kolla-config\") pod \"openstack-galera-0\" (UID: \"4a5a6576-dc07-4211-8b81-a53986f07041\") " pod="barbican-kuttl-tests/openstack-galera-0" Jan 20 18:15:38 crc kubenswrapper[4558]: I0120 18:15:38.166449 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/595cc6da-b53a-4a23-8e31-ed1dd616eb3e-operator-scripts\") pod \"openstack-galera-2\" (UID: \"595cc6da-b53a-4a23-8e31-ed1dd616eb3e\") " pod="barbican-kuttl-tests/openstack-galera-2" Jan 20 18:15:38 crc kubenswrapper[4558]: I0120 18:15:38.166526 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage14-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage14-crc\") pod \"openstack-galera-1\" (UID: \"53d3a76d-e277-4efa-adbf-85d8e328caf6\") " pod="barbican-kuttl-tests/openstack-galera-1" Jan 20 18:15:38 crc kubenswrapper[4558]: I0120 18:15:38.166580 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/4a5a6576-dc07-4211-8b81-a53986f07041-config-data-default\") pod \"openstack-galera-0\" (UID: \"4a5a6576-dc07-4211-8b81-a53986f07041\") " pod="barbican-kuttl-tests/openstack-galera-0" Jan 20 18:15:38 crc kubenswrapper[4558]: I0120 18:15:38.166617 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4a5a6576-dc07-4211-8b81-a53986f07041-operator-scripts\") pod \"openstack-galera-0\" (UID: \"4a5a6576-dc07-4211-8b81-a53986f07041\") " pod="barbican-kuttl-tests/openstack-galera-0" Jan 20 18:15:38 crc kubenswrapper[4558]: I0120 18:15:38.166665 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/53d3a76d-e277-4efa-adbf-85d8e328caf6-kolla-config\") pod \"openstack-galera-1\" (UID: \"53d3a76d-e277-4efa-adbf-85d8e328caf6\") " pod="barbican-kuttl-tests/openstack-galera-1" Jan 20 18:15:38 crc kubenswrapper[4558]: I0120 18:15:38.166709 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mskhp\" (UniqueName: \"kubernetes.io/projected/595cc6da-b53a-4a23-8e31-ed1dd616eb3e-kube-api-access-mskhp\") pod \"openstack-galera-2\" (UID: \"595cc6da-b53a-4a23-8e31-ed1dd616eb3e\") " pod="barbican-kuttl-tests/openstack-galera-2" Jan 20 18:15:38 crc kubenswrapper[4558]: I0120 18:15:38.267786 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/4a5a6576-dc07-4211-8b81-a53986f07041-config-data-default\") pod \"openstack-galera-0\" (UID: \"4a5a6576-dc07-4211-8b81-a53986f07041\") " pod="barbican-kuttl-tests/openstack-galera-0" Jan 20 18:15:38 crc kubenswrapper[4558]: I0120 18:15:38.267837 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4a5a6576-dc07-4211-8b81-a53986f07041-operator-scripts\") pod \"openstack-galera-0\" (UID: \"4a5a6576-dc07-4211-8b81-a53986f07041\") " pod="barbican-kuttl-tests/openstack-galera-0" Jan 20 18:15:38 crc kubenswrapper[4558]: I0120 18:15:38.267876 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/53d3a76d-e277-4efa-adbf-85d8e328caf6-kolla-config\") pod \"openstack-galera-1\" (UID: \"53d3a76d-e277-4efa-adbf-85d8e328caf6\") " pod="barbican-kuttl-tests/openstack-galera-1" Jan 20 18:15:38 crc kubenswrapper[4558]: I0120 18:15:38.267906 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mskhp\" (UniqueName: \"kubernetes.io/projected/595cc6da-b53a-4a23-8e31-ed1dd616eb3e-kube-api-access-mskhp\") pod \"openstack-galera-2\" (UID: \"595cc6da-b53a-4a23-8e31-ed1dd616eb3e\") " pod="barbican-kuttl-tests/openstack-galera-2" Jan 20 18:15:38 crc kubenswrapper[4558]: I0120 18:15:38.267948 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/595cc6da-b53a-4a23-8e31-ed1dd616eb3e-config-data-generated\") pod \"openstack-galera-2\" (UID: \"595cc6da-b53a-4a23-8e31-ed1dd616eb3e\") " pod="barbican-kuttl-tests/openstack-galera-2" Jan 20 18:15:38 crc kubenswrapper[4558]: I0120 18:15:38.267966 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage13-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage13-crc\") pod \"openstack-galera-2\" (UID: \"595cc6da-b53a-4a23-8e31-ed1dd616eb3e\") " pod="barbican-kuttl-tests/openstack-galera-2" Jan 20 18:15:38 crc kubenswrapper[4558]: I0120 18:15:38.267988 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/53d3a76d-e277-4efa-adbf-85d8e328caf6-config-data-default\") pod \"openstack-galera-1\" (UID: \"53d3a76d-e277-4efa-adbf-85d8e328caf6\") " pod="barbican-kuttl-tests/openstack-galera-1" Jan 20 18:15:38 crc kubenswrapper[4558]: I0120 18:15:38.268408 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/595cc6da-b53a-4a23-8e31-ed1dd616eb3e-config-data-default\") pod \"openstack-galera-2\" (UID: \"595cc6da-b53a-4a23-8e31-ed1dd616eb3e\") " pod="barbican-kuttl-tests/openstack-galera-2" Jan 20 18:15:38 crc kubenswrapper[4558]: I0120 18:15:38.268337 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage13-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage13-crc\") pod \"openstack-galera-2\" (UID: \"595cc6da-b53a-4a23-8e31-ed1dd616eb3e\") device mount path \"/mnt/openstack/pv13\"" pod="barbican-kuttl-tests/openstack-galera-2" Jan 20 18:15:38 crc kubenswrapper[4558]: I0120 18:15:38.268557 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"openstack-galera-0\" (UID: \"4a5a6576-dc07-4211-8b81-a53986f07041\") " pod="barbican-kuttl-tests/openstack-galera-0" Jan 20 18:15:38 crc kubenswrapper[4558]: I0120 18:15:38.268622 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wkrp4\" (UniqueName: \"kubernetes.io/projected/4a5a6576-dc07-4211-8b81-a53986f07041-kube-api-access-wkrp4\") pod \"openstack-galera-0\" (UID: \"4a5a6576-dc07-4211-8b81-a53986f07041\") " pod="barbican-kuttl-tests/openstack-galera-0" Jan 20 18:15:38 crc kubenswrapper[4558]: I0120 18:15:38.268660 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/53d3a76d-e277-4efa-adbf-85d8e328caf6-config-data-generated\") pod \"openstack-galera-1\" (UID: \"53d3a76d-e277-4efa-adbf-85d8e328caf6\") " pod="barbican-kuttl-tests/openstack-galera-1" Jan 20 18:15:38 crc kubenswrapper[4558]: I0120 18:15:38.268695 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/595cc6da-b53a-4a23-8e31-ed1dd616eb3e-kolla-config\") pod \"openstack-galera-2\" (UID: \"595cc6da-b53a-4a23-8e31-ed1dd616eb3e\") " pod="barbican-kuttl-tests/openstack-galera-2" Jan 20 18:15:38 crc kubenswrapper[4558]: I0120 18:15:38.268557 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/595cc6da-b53a-4a23-8e31-ed1dd616eb3e-config-data-generated\") pod \"openstack-galera-2\" (UID: \"595cc6da-b53a-4a23-8e31-ed1dd616eb3e\") " pod="barbican-kuttl-tests/openstack-galera-2" Jan 20 18:15:38 crc kubenswrapper[4558]: I0120 18:15:38.268716 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/53d3a76d-e277-4efa-adbf-85d8e328caf6-operator-scripts\") pod \"openstack-galera-1\" (UID: \"53d3a76d-e277-4efa-adbf-85d8e328caf6\") " pod="barbican-kuttl-tests/openstack-galera-1" Jan 20 18:15:38 crc kubenswrapper[4558]: I0120 18:15:38.268808 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/4a5a6576-dc07-4211-8b81-a53986f07041-config-data-generated\") pod \"openstack-galera-0\" (UID: \"4a5a6576-dc07-4211-8b81-a53986f07041\") " pod="barbican-kuttl-tests/openstack-galera-0" Jan 20 18:15:38 crc kubenswrapper[4558]: I0120 18:15:38.268839 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/4a5a6576-dc07-4211-8b81-a53986f07041-config-data-default\") pod \"openstack-galera-0\" (UID: \"4a5a6576-dc07-4211-8b81-a53986f07041\") " pod="barbican-kuttl-tests/openstack-galera-0" Jan 20 18:15:38 crc kubenswrapper[4558]: I0120 18:15:38.268864 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zz8nz\" (UniqueName: \"kubernetes.io/projected/53d3a76d-e277-4efa-adbf-85d8e328caf6-kube-api-access-zz8nz\") pod \"openstack-galera-1\" (UID: \"53d3a76d-e277-4efa-adbf-85d8e328caf6\") " pod="barbican-kuttl-tests/openstack-galera-1" Jan 20 18:15:38 crc kubenswrapper[4558]: I0120 18:15:38.268885 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/4a5a6576-dc07-4211-8b81-a53986f07041-kolla-config\") pod \"openstack-galera-0\" (UID: \"4a5a6576-dc07-4211-8b81-a53986f07041\") " pod="barbican-kuttl-tests/openstack-galera-0" Jan 20 18:15:38 crc kubenswrapper[4558]: I0120 18:15:38.268946 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/595cc6da-b53a-4a23-8e31-ed1dd616eb3e-operator-scripts\") pod \"openstack-galera-2\" (UID: \"595cc6da-b53a-4a23-8e31-ed1dd616eb3e\") " pod="barbican-kuttl-tests/openstack-galera-2" Jan 20 18:15:38 crc kubenswrapper[4558]: I0120 18:15:38.268991 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage14-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage14-crc\") pod \"openstack-galera-1\" (UID: \"53d3a76d-e277-4efa-adbf-85d8e328caf6\") " pod="barbican-kuttl-tests/openstack-galera-1" Jan 20 18:15:38 crc kubenswrapper[4558]: I0120 18:15:38.269076 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/53d3a76d-e277-4efa-adbf-85d8e328caf6-config-data-generated\") pod \"openstack-galera-1\" (UID: \"53d3a76d-e277-4efa-adbf-85d8e328caf6\") " pod="barbican-kuttl-tests/openstack-galera-1" Jan 20 18:15:38 crc kubenswrapper[4558]: I0120 18:15:38.269208 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage14-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage14-crc\") pod \"openstack-galera-1\" (UID: \"53d3a76d-e277-4efa-adbf-85d8e328caf6\") device mount path \"/mnt/openstack/pv14\"" pod="barbican-kuttl-tests/openstack-galera-1" Jan 20 18:15:38 crc kubenswrapper[4558]: I0120 18:15:38.269246 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/4a5a6576-dc07-4211-8b81-a53986f07041-config-data-generated\") pod \"openstack-galera-0\" (UID: \"4a5a6576-dc07-4211-8b81-a53986f07041\") " pod="barbican-kuttl-tests/openstack-galera-0" Jan 20 18:15:38 crc kubenswrapper[4558]: I0120 18:15:38.269519 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/595cc6da-b53a-4a23-8e31-ed1dd616eb3e-config-data-default\") pod \"openstack-galera-2\" (UID: \"595cc6da-b53a-4a23-8e31-ed1dd616eb3e\") " pod="barbican-kuttl-tests/openstack-galera-2" Jan 20 18:15:38 crc kubenswrapper[4558]: I0120 18:15:38.269604 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/53d3a76d-e277-4efa-adbf-85d8e328caf6-config-data-default\") pod \"openstack-galera-1\" (UID: \"53d3a76d-e277-4efa-adbf-85d8e328caf6\") " pod="barbican-kuttl-tests/openstack-galera-1" Jan 20 18:15:38 crc kubenswrapper[4558]: I0120 18:15:38.269693 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/595cc6da-b53a-4a23-8e31-ed1dd616eb3e-kolla-config\") pod \"openstack-galera-2\" (UID: \"595cc6da-b53a-4a23-8e31-ed1dd616eb3e\") " pod="barbican-kuttl-tests/openstack-galera-2" Jan 20 18:15:38 crc kubenswrapper[4558]: I0120 18:15:38.270027 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/53d3a76d-e277-4efa-adbf-85d8e328caf6-kolla-config\") pod \"openstack-galera-1\" (UID: \"53d3a76d-e277-4efa-adbf-85d8e328caf6\") " pod="barbican-kuttl-tests/openstack-galera-1" Jan 20 18:15:38 crc kubenswrapper[4558]: I0120 18:15:38.270450 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/4a5a6576-dc07-4211-8b81-a53986f07041-kolla-config\") pod \"openstack-galera-0\" (UID: \"4a5a6576-dc07-4211-8b81-a53986f07041\") " pod="barbican-kuttl-tests/openstack-galera-0" Jan 20 18:15:38 crc kubenswrapper[4558]: I0120 18:15:38.270665 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/53d3a76d-e277-4efa-adbf-85d8e328caf6-operator-scripts\") pod \"openstack-galera-1\" (UID: \"53d3a76d-e277-4efa-adbf-85d8e328caf6\") " pod="barbican-kuttl-tests/openstack-galera-1" Jan 20 18:15:38 crc kubenswrapper[4558]: I0120 18:15:38.270668 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4a5a6576-dc07-4211-8b81-a53986f07041-operator-scripts\") pod \"openstack-galera-0\" (UID: \"4a5a6576-dc07-4211-8b81-a53986f07041\") " pod="barbican-kuttl-tests/openstack-galera-0" Jan 20 18:15:38 crc kubenswrapper[4558]: I0120 18:15:38.270780 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/595cc6da-b53a-4a23-8e31-ed1dd616eb3e-operator-scripts\") pod \"openstack-galera-2\" (UID: \"595cc6da-b53a-4a23-8e31-ed1dd616eb3e\") " pod="barbican-kuttl-tests/openstack-galera-2" Jan 20 18:15:38 crc kubenswrapper[4558]: I0120 18:15:38.276208 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"openstack-galera-0\" (UID: \"4a5a6576-dc07-4211-8b81-a53986f07041\") device mount path \"/mnt/openstack/pv08\"" pod="barbican-kuttl-tests/openstack-galera-0" Jan 20 18:15:38 crc kubenswrapper[4558]: I0120 18:15:38.288820 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage14-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage14-crc\") pod \"openstack-galera-1\" (UID: \"53d3a76d-e277-4efa-adbf-85d8e328caf6\") " pod="barbican-kuttl-tests/openstack-galera-1" Jan 20 18:15:38 crc kubenswrapper[4558]: I0120 18:15:38.290836 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mskhp\" (UniqueName: \"kubernetes.io/projected/595cc6da-b53a-4a23-8e31-ed1dd616eb3e-kube-api-access-mskhp\") pod \"openstack-galera-2\" (UID: \"595cc6da-b53a-4a23-8e31-ed1dd616eb3e\") " pod="barbican-kuttl-tests/openstack-galera-2" Jan 20 18:15:38 crc kubenswrapper[4558]: I0120 18:15:38.291647 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zz8nz\" (UniqueName: \"kubernetes.io/projected/53d3a76d-e277-4efa-adbf-85d8e328caf6-kube-api-access-zz8nz\") pod \"openstack-galera-1\" (UID: \"53d3a76d-e277-4efa-adbf-85d8e328caf6\") " pod="barbican-kuttl-tests/openstack-galera-1" Jan 20 18:15:38 crc kubenswrapper[4558]: I0120 18:15:38.293798 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wkrp4\" (UniqueName: \"kubernetes.io/projected/4a5a6576-dc07-4211-8b81-a53986f07041-kube-api-access-wkrp4\") pod \"openstack-galera-0\" (UID: \"4a5a6576-dc07-4211-8b81-a53986f07041\") " pod="barbican-kuttl-tests/openstack-galera-0" Jan 20 18:15:38 crc kubenswrapper[4558]: I0120 18:15:38.296847 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage13-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage13-crc\") pod \"openstack-galera-2\" (UID: \"595cc6da-b53a-4a23-8e31-ed1dd616eb3e\") " pod="barbican-kuttl-tests/openstack-galera-2" Jan 20 18:15:38 crc kubenswrapper[4558]: I0120 18:15:38.298246 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"openstack-galera-0\" (UID: \"4a5a6576-dc07-4211-8b81-a53986f07041\") " pod="barbican-kuttl-tests/openstack-galera-0" Jan 20 18:15:38 crc kubenswrapper[4558]: I0120 18:15:38.304909 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="barbican-kuttl-tests/openstack-galera-0" Jan 20 18:15:38 crc kubenswrapper[4558]: I0120 18:15:38.318282 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="barbican-kuttl-tests/openstack-galera-2" Jan 20 18:15:38 crc kubenswrapper[4558]: I0120 18:15:38.336492 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="barbican-kuttl-tests/openstack-galera-1" Jan 20 18:15:38 crc kubenswrapper[4558]: I0120 18:15:38.743377 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["barbican-kuttl-tests/openstack-galera-2"] Jan 20 18:15:38 crc kubenswrapper[4558]: I0120 18:15:38.747242 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["barbican-kuttl-tests/openstack-galera-0"] Jan 20 18:15:38 crc kubenswrapper[4558]: W0120 18:15:38.750887 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod595cc6da_b53a_4a23_8e31_ed1dd616eb3e.slice/crio-3093f70dc9c1a03ac227878901aee8bdc9d47bad2f397360ac43a3d9c5a866d9 WatchSource:0}: Error finding container 3093f70dc9c1a03ac227878901aee8bdc9d47bad2f397360ac43a3d9c5a866d9: Status 404 returned error can't find the container with id 3093f70dc9c1a03ac227878901aee8bdc9d47bad2f397360ac43a3d9c5a866d9 Jan 20 18:15:38 crc kubenswrapper[4558]: W0120 18:15:38.753483 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4a5a6576_dc07_4211_8b81_a53986f07041.slice/crio-8b403e7aac6fad80392710a5653fb1a7326e1b73b8cbd6e255dd7fb336d09369 WatchSource:0}: Error finding container 8b403e7aac6fad80392710a5653fb1a7326e1b73b8cbd6e255dd7fb336d09369: Status 404 returned error can't find the container with id 8b403e7aac6fad80392710a5653fb1a7326e1b73b8cbd6e255dd7fb336d09369 Jan 20 18:15:38 crc kubenswrapper[4558]: I0120 18:15:38.819575 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["barbican-kuttl-tests/openstack-galera-1"] Jan 20 18:15:38 crc kubenswrapper[4558]: W0120 18:15:38.834989 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod53d3a76d_e277_4efa_adbf_85d8e328caf6.slice/crio-100b3b09bff8ebaebe8a5c3b9e64e918ca9bac4d7b4bbcd0716598fe2007aef5 WatchSource:0}: Error finding container 100b3b09bff8ebaebe8a5c3b9e64e918ca9bac4d7b4bbcd0716598fe2007aef5: Status 404 returned error can't find the container with id 100b3b09bff8ebaebe8a5c3b9e64e918ca9bac4d7b4bbcd0716598fe2007aef5 Jan 20 18:15:39 crc kubenswrapper[4558]: I0120 18:15:39.156985 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/openstack-galera-1" event={"ID":"53d3a76d-e277-4efa-adbf-85d8e328caf6","Type":"ContainerStarted","Data":"0b3454ba088a6c47241d9de340b7da9aaccc0d047daeeba29e2c1542ce3bca33"} Jan 20 18:15:39 crc kubenswrapper[4558]: I0120 18:15:39.157332 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/openstack-galera-1" event={"ID":"53d3a76d-e277-4efa-adbf-85d8e328caf6","Type":"ContainerStarted","Data":"100b3b09bff8ebaebe8a5c3b9e64e918ca9bac4d7b4bbcd0716598fe2007aef5"} Jan 20 18:15:39 crc kubenswrapper[4558]: I0120 18:15:39.158680 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/openstack-galera-2" event={"ID":"595cc6da-b53a-4a23-8e31-ed1dd616eb3e","Type":"ContainerStarted","Data":"3fc58048255359620235831ab1838523070305b6564cf243ce6325cf4e7d0b6c"} Jan 20 18:15:39 crc kubenswrapper[4558]: I0120 18:15:39.158713 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/openstack-galera-2" event={"ID":"595cc6da-b53a-4a23-8e31-ed1dd616eb3e","Type":"ContainerStarted","Data":"3093f70dc9c1a03ac227878901aee8bdc9d47bad2f397360ac43a3d9c5a866d9"} Jan 20 18:15:39 crc kubenswrapper[4558]: I0120 18:15:39.160593 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/openstack-galera-0" event={"ID":"4a5a6576-dc07-4211-8b81-a53986f07041","Type":"ContainerStarted","Data":"8bd822892dec859a58d3d65e60a9ffce0c8d0b2f0f2210180163d24bee85092f"} Jan 20 18:15:39 crc kubenswrapper[4558]: I0120 18:15:39.160622 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/openstack-galera-0" event={"ID":"4a5a6576-dc07-4211-8b81-a53986f07041","Type":"ContainerStarted","Data":"8b403e7aac6fad80392710a5653fb1a7326e1b73b8cbd6e255dd7fb336d09369"} Jan 20 18:15:40 crc kubenswrapper[4558]: I0120 18:15:40.566522 4558 scope.go:117] "RemoveContainer" containerID="27314e1a8af2e8df8ecd2e7a06cbb630afbfecd8ca2338a1f199cf92f982f581" Jan 20 18:15:40 crc kubenswrapper[4558]: E0120 18:15:40.567808 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 18:15:42 crc kubenswrapper[4558]: I0120 18:15:42.180751 4558 generic.go:334] "Generic (PLEG): container finished" podID="4a5a6576-dc07-4211-8b81-a53986f07041" containerID="8bd822892dec859a58d3d65e60a9ffce0c8d0b2f0f2210180163d24bee85092f" exitCode=0 Jan 20 18:15:42 crc kubenswrapper[4558]: I0120 18:15:42.180840 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/openstack-galera-0" event={"ID":"4a5a6576-dc07-4211-8b81-a53986f07041","Type":"ContainerDied","Data":"8bd822892dec859a58d3d65e60a9ffce0c8d0b2f0f2210180163d24bee85092f"} Jan 20 18:15:42 crc kubenswrapper[4558]: I0120 18:15:42.182504 4558 generic.go:334] "Generic (PLEG): container finished" podID="53d3a76d-e277-4efa-adbf-85d8e328caf6" containerID="0b3454ba088a6c47241d9de340b7da9aaccc0d047daeeba29e2c1542ce3bca33" exitCode=0 Jan 20 18:15:42 crc kubenswrapper[4558]: I0120 18:15:42.182560 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/openstack-galera-1" event={"ID":"53d3a76d-e277-4efa-adbf-85d8e328caf6","Type":"ContainerDied","Data":"0b3454ba088a6c47241d9de340b7da9aaccc0d047daeeba29e2c1542ce3bca33"} Jan 20 18:15:42 crc kubenswrapper[4558]: I0120 18:15:42.184873 4558 generic.go:334] "Generic (PLEG): container finished" podID="595cc6da-b53a-4a23-8e31-ed1dd616eb3e" containerID="3fc58048255359620235831ab1838523070305b6564cf243ce6325cf4e7d0b6c" exitCode=0 Jan 20 18:15:42 crc kubenswrapper[4558]: I0120 18:15:42.184913 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/openstack-galera-2" event={"ID":"595cc6da-b53a-4a23-8e31-ed1dd616eb3e","Type":"ContainerDied","Data":"3fc58048255359620235831ab1838523070305b6564cf243ce6325cf4e7d0b6c"} Jan 20 18:15:43 crc kubenswrapper[4558]: I0120 18:15:43.195897 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/openstack-galera-0" event={"ID":"4a5a6576-dc07-4211-8b81-a53986f07041","Type":"ContainerStarted","Data":"41fbe186961e909cadffeef9e30338ab8190687a2776b303ae2843d2d4c3c9ff"} Jan 20 18:15:43 crc kubenswrapper[4558]: I0120 18:15:43.199275 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/openstack-galera-1" event={"ID":"53d3a76d-e277-4efa-adbf-85d8e328caf6","Type":"ContainerStarted","Data":"036a2449dee37ea79784360d016ed57d4f72b39b1c9f0b0aba175eff45b9a123"} Jan 20 18:15:43 crc kubenswrapper[4558]: I0120 18:15:43.201597 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/openstack-galera-2" event={"ID":"595cc6da-b53a-4a23-8e31-ed1dd616eb3e","Type":"ContainerStarted","Data":"573646459e230f5f4e9d2d63271a8542132e09cb607880437338494fcdcd9be5"} Jan 20 18:15:43 crc kubenswrapper[4558]: I0120 18:15:43.219438 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="barbican-kuttl-tests/openstack-galera-0" podStartSLOduration=7.219418897 podStartE2EDuration="7.219418897s" podCreationTimestamp="2026-01-20 18:15:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 18:15:43.21287892 +0000 UTC m=+5636.973216887" watchObservedRunningTime="2026-01-20 18:15:43.219418897 +0000 UTC m=+5636.979756864" Jan 20 18:15:43 crc kubenswrapper[4558]: I0120 18:15:43.233881 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="barbican-kuttl-tests/openstack-galera-1" podStartSLOduration=7.233866602 podStartE2EDuration="7.233866602s" podCreationTimestamp="2026-01-20 18:15:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 18:15:43.228953363 +0000 UTC m=+5636.989291331" watchObservedRunningTime="2026-01-20 18:15:43.233866602 +0000 UTC m=+5636.994204569" Jan 20 18:15:43 crc kubenswrapper[4558]: I0120 18:15:43.248348 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="barbican-kuttl-tests/openstack-galera-2" podStartSLOduration=7.248316502 podStartE2EDuration="7.248316502s" podCreationTimestamp="2026-01-20 18:15:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 18:15:43.243384278 +0000 UTC m=+5637.003722245" watchObservedRunningTime="2026-01-20 18:15:43.248316502 +0000 UTC m=+5637.008654468" Jan 20 18:15:45 crc kubenswrapper[4558]: I0120 18:15:45.493863 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-controller-manager-85749d967b-zkbfg"] Jan 20 18:15:45 crc kubenswrapper[4558]: I0120 18:15:45.495366 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-85749d967b-zkbfg" Jan 20 18:15:45 crc kubenswrapper[4558]: I0120 18:15:45.499767 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-service-cert" Jan 20 18:15:45 crc kubenswrapper[4558]: I0120 18:15:45.501083 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-hx8bl" Jan 20 18:15:45 crc kubenswrapper[4558]: I0120 18:15:45.515125 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-85749d967b-zkbfg"] Jan 20 18:15:45 crc kubenswrapper[4558]: I0120 18:15:45.580917 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f8lx4\" (UniqueName: \"kubernetes.io/projected/2679558a-dc38-4dfb-a1f6-6b04731720b4-kube-api-access-f8lx4\") pod \"infra-operator-controller-manager-85749d967b-zkbfg\" (UID: \"2679558a-dc38-4dfb-a1f6-6b04731720b4\") " pod="openstack-operators/infra-operator-controller-manager-85749d967b-zkbfg" Jan 20 18:15:45 crc kubenswrapper[4558]: I0120 18:15:45.580971 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/2679558a-dc38-4dfb-a1f6-6b04731720b4-apiservice-cert\") pod \"infra-operator-controller-manager-85749d967b-zkbfg\" (UID: \"2679558a-dc38-4dfb-a1f6-6b04731720b4\") " pod="openstack-operators/infra-operator-controller-manager-85749d967b-zkbfg" Jan 20 18:15:45 crc kubenswrapper[4558]: I0120 18:15:45.581010 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/2679558a-dc38-4dfb-a1f6-6b04731720b4-webhook-cert\") pod \"infra-operator-controller-manager-85749d967b-zkbfg\" (UID: \"2679558a-dc38-4dfb-a1f6-6b04731720b4\") " pod="openstack-operators/infra-operator-controller-manager-85749d967b-zkbfg" Jan 20 18:15:45 crc kubenswrapper[4558]: I0120 18:15:45.683570 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/2679558a-dc38-4dfb-a1f6-6b04731720b4-webhook-cert\") pod \"infra-operator-controller-manager-85749d967b-zkbfg\" (UID: \"2679558a-dc38-4dfb-a1f6-6b04731720b4\") " pod="openstack-operators/infra-operator-controller-manager-85749d967b-zkbfg" Jan 20 18:15:45 crc kubenswrapper[4558]: I0120 18:15:45.683713 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f8lx4\" (UniqueName: \"kubernetes.io/projected/2679558a-dc38-4dfb-a1f6-6b04731720b4-kube-api-access-f8lx4\") pod \"infra-operator-controller-manager-85749d967b-zkbfg\" (UID: \"2679558a-dc38-4dfb-a1f6-6b04731720b4\") " pod="openstack-operators/infra-operator-controller-manager-85749d967b-zkbfg" Jan 20 18:15:45 crc kubenswrapper[4558]: I0120 18:15:45.683764 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/2679558a-dc38-4dfb-a1f6-6b04731720b4-apiservice-cert\") pod \"infra-operator-controller-manager-85749d967b-zkbfg\" (UID: \"2679558a-dc38-4dfb-a1f6-6b04731720b4\") " pod="openstack-operators/infra-operator-controller-manager-85749d967b-zkbfg" Jan 20 18:15:45 crc kubenswrapper[4558]: I0120 18:15:45.691266 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/2679558a-dc38-4dfb-a1f6-6b04731720b4-webhook-cert\") pod \"infra-operator-controller-manager-85749d967b-zkbfg\" (UID: \"2679558a-dc38-4dfb-a1f6-6b04731720b4\") " pod="openstack-operators/infra-operator-controller-manager-85749d967b-zkbfg" Jan 20 18:15:45 crc kubenswrapper[4558]: I0120 18:15:45.691750 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/2679558a-dc38-4dfb-a1f6-6b04731720b4-apiservice-cert\") pod \"infra-operator-controller-manager-85749d967b-zkbfg\" (UID: \"2679558a-dc38-4dfb-a1f6-6b04731720b4\") " pod="openstack-operators/infra-operator-controller-manager-85749d967b-zkbfg" Jan 20 18:15:45 crc kubenswrapper[4558]: I0120 18:15:45.701323 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f8lx4\" (UniqueName: \"kubernetes.io/projected/2679558a-dc38-4dfb-a1f6-6b04731720b4-kube-api-access-f8lx4\") pod \"infra-operator-controller-manager-85749d967b-zkbfg\" (UID: \"2679558a-dc38-4dfb-a1f6-6b04731720b4\") " pod="openstack-operators/infra-operator-controller-manager-85749d967b-zkbfg" Jan 20 18:15:45 crc kubenswrapper[4558]: I0120 18:15:45.811944 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-85749d967b-zkbfg" Jan 20 18:15:46 crc kubenswrapper[4558]: I0120 18:15:46.001337 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-85749d967b-zkbfg"] Jan 20 18:15:46 crc kubenswrapper[4558]: W0120 18:15:46.011145 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2679558a_dc38_4dfb_a1f6_6b04731720b4.slice/crio-79de71d65573aef4d5b1e399dac4e75540f1de6c8881adc3f60d3f4f15966092 WatchSource:0}: Error finding container 79de71d65573aef4d5b1e399dac4e75540f1de6c8881adc3f60d3f4f15966092: Status 404 returned error can't find the container with id 79de71d65573aef4d5b1e399dac4e75540f1de6c8881adc3f60d3f4f15966092 Jan 20 18:15:46 crc kubenswrapper[4558]: I0120 18:15:46.019548 4558 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 20 18:15:46 crc kubenswrapper[4558]: I0120 18:15:46.226240 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-85749d967b-zkbfg" event={"ID":"2679558a-dc38-4dfb-a1f6-6b04731720b4","Type":"ContainerStarted","Data":"79de71d65573aef4d5b1e399dac4e75540f1de6c8881adc3f60d3f4f15966092"} Jan 20 18:15:48 crc kubenswrapper[4558]: I0120 18:15:48.305186 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="barbican-kuttl-tests/openstack-galera-0" Jan 20 18:15:48 crc kubenswrapper[4558]: I0120 18:15:48.305655 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="barbican-kuttl-tests/openstack-galera-0" Jan 20 18:15:48 crc kubenswrapper[4558]: I0120 18:15:48.319312 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="barbican-kuttl-tests/openstack-galera-2" Jan 20 18:15:48 crc kubenswrapper[4558]: I0120 18:15:48.319653 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="barbican-kuttl-tests/openstack-galera-2" Jan 20 18:15:48 crc kubenswrapper[4558]: I0120 18:15:48.336867 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="barbican-kuttl-tests/openstack-galera-1" Jan 20 18:15:48 crc kubenswrapper[4558]: I0120 18:15:48.336914 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="barbican-kuttl-tests/openstack-galera-1" Jan 20 18:15:49 crc kubenswrapper[4558]: E0120 18:15:49.739890 4558 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 192.168.25.8:50400->192.168.25.8:43883: write tcp 192.168.25.8:50400->192.168.25.8:43883: write: broken pipe Jan 20 18:15:50 crc kubenswrapper[4558]: I0120 18:15:50.260317 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-85749d967b-zkbfg" event={"ID":"2679558a-dc38-4dfb-a1f6-6b04731720b4","Type":"ContainerStarted","Data":"c7cbd5d8e0b18cf8dd45a3d65c56f7a53afda715d9aa103c60fd08bd0002f00e"} Jan 20 18:15:50 crc kubenswrapper[4558]: I0120 18:15:50.260759 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-controller-manager-85749d967b-zkbfg" Jan 20 18:15:50 crc kubenswrapper[4558]: I0120 18:15:50.281783 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-controller-manager-85749d967b-zkbfg" podStartSLOduration=1.26337461 podStartE2EDuration="5.281758209s" podCreationTimestamp="2026-01-20 18:15:45 +0000 UTC" firstStartedPulling="2026-01-20 18:15:46.019185804 +0000 UTC m=+5639.779523771" lastFinishedPulling="2026-01-20 18:15:50.037569403 +0000 UTC m=+5643.797907370" observedRunningTime="2026-01-20 18:15:50.276494963 +0000 UTC m=+5644.036832930" watchObservedRunningTime="2026-01-20 18:15:50.281758209 +0000 UTC m=+5644.042096165" Jan 20 18:15:51 crc kubenswrapper[4558]: I0120 18:15:51.566010 4558 scope.go:117] "RemoveContainer" containerID="27314e1a8af2e8df8ecd2e7a06cbb630afbfecd8ca2338a1f199cf92f982f581" Jan 20 18:15:51 crc kubenswrapper[4558]: E0120 18:15:51.566735 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 18:15:52 crc kubenswrapper[4558]: I0120 18:15:52.384479 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="barbican-kuttl-tests/openstack-galera-2" Jan 20 18:15:52 crc kubenswrapper[4558]: I0120 18:15:52.459143 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="barbican-kuttl-tests/openstack-galera-2" Jan 20 18:15:55 crc kubenswrapper[4558]: I0120 18:15:55.816279 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-controller-manager-85749d967b-zkbfg" Jan 20 18:15:57 crc kubenswrapper[4558]: I0120 18:15:57.056115 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["barbican-kuttl-tests/root-account-create-update-hzbrt"] Jan 20 18:15:57 crc kubenswrapper[4558]: I0120 18:15:57.058614 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="barbican-kuttl-tests/root-account-create-update-hzbrt" Jan 20 18:15:57 crc kubenswrapper[4558]: I0120 18:15:57.061191 4558 reflector.go:368] Caches populated for *v1.Secret from object-"barbican-kuttl-tests"/"openstack-mariadb-root-db-secret" Jan 20 18:15:57 crc kubenswrapper[4558]: I0120 18:15:57.065037 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["barbican-kuttl-tests/root-account-create-update-hzbrt"] Jan 20 18:15:57 crc kubenswrapper[4558]: I0120 18:15:57.184568 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7ml9m\" (UniqueName: \"kubernetes.io/projected/e765fff7-ce82-48c9-934c-6a40d0db6f02-kube-api-access-7ml9m\") pod \"root-account-create-update-hzbrt\" (UID: \"e765fff7-ce82-48c9-934c-6a40d0db6f02\") " pod="barbican-kuttl-tests/root-account-create-update-hzbrt" Jan 20 18:15:57 crc kubenswrapper[4558]: I0120 18:15:57.184717 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e765fff7-ce82-48c9-934c-6a40d0db6f02-operator-scripts\") pod \"root-account-create-update-hzbrt\" (UID: \"e765fff7-ce82-48c9-934c-6a40d0db6f02\") " pod="barbican-kuttl-tests/root-account-create-update-hzbrt" Jan 20 18:15:57 crc kubenswrapper[4558]: I0120 18:15:57.286478 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7ml9m\" (UniqueName: \"kubernetes.io/projected/e765fff7-ce82-48c9-934c-6a40d0db6f02-kube-api-access-7ml9m\") pod \"root-account-create-update-hzbrt\" (UID: \"e765fff7-ce82-48c9-934c-6a40d0db6f02\") " pod="barbican-kuttl-tests/root-account-create-update-hzbrt" Jan 20 18:15:57 crc kubenswrapper[4558]: I0120 18:15:57.286618 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e765fff7-ce82-48c9-934c-6a40d0db6f02-operator-scripts\") pod \"root-account-create-update-hzbrt\" (UID: \"e765fff7-ce82-48c9-934c-6a40d0db6f02\") " pod="barbican-kuttl-tests/root-account-create-update-hzbrt" Jan 20 18:15:57 crc kubenswrapper[4558]: I0120 18:15:57.287823 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e765fff7-ce82-48c9-934c-6a40d0db6f02-operator-scripts\") pod \"root-account-create-update-hzbrt\" (UID: \"e765fff7-ce82-48c9-934c-6a40d0db6f02\") " pod="barbican-kuttl-tests/root-account-create-update-hzbrt" Jan 20 18:15:57 crc kubenswrapper[4558]: I0120 18:15:57.305179 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7ml9m\" (UniqueName: \"kubernetes.io/projected/e765fff7-ce82-48c9-934c-6a40d0db6f02-kube-api-access-7ml9m\") pod \"root-account-create-update-hzbrt\" (UID: \"e765fff7-ce82-48c9-934c-6a40d0db6f02\") " pod="barbican-kuttl-tests/root-account-create-update-hzbrt" Jan 20 18:15:57 crc kubenswrapper[4558]: I0120 18:15:57.381266 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="barbican-kuttl-tests/root-account-create-update-hzbrt" Jan 20 18:15:57 crc kubenswrapper[4558]: I0120 18:15:57.765225 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["barbican-kuttl-tests/root-account-create-update-hzbrt"] Jan 20 18:15:58 crc kubenswrapper[4558]: I0120 18:15:58.329613 4558 generic.go:334] "Generic (PLEG): container finished" podID="e765fff7-ce82-48c9-934c-6a40d0db6f02" containerID="20ddc216403130197b28523f5d035146f0978efa76f7866277142574a8bf1030" exitCode=0 Jan 20 18:15:58 crc kubenswrapper[4558]: I0120 18:15:58.329759 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/root-account-create-update-hzbrt" event={"ID":"e765fff7-ce82-48c9-934c-6a40d0db6f02","Type":"ContainerDied","Data":"20ddc216403130197b28523f5d035146f0978efa76f7866277142574a8bf1030"} Jan 20 18:15:58 crc kubenswrapper[4558]: I0120 18:15:58.330044 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/root-account-create-update-hzbrt" event={"ID":"e765fff7-ce82-48c9-934c-6a40d0db6f02","Type":"ContainerStarted","Data":"276e3a8f535c5f8b83bbddecfdb86456d45c419cb1a46abfb0650bf05c8960d1"} Jan 20 18:15:58 crc kubenswrapper[4558]: I0120 18:15:58.389544 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="barbican-kuttl-tests/openstack-galera-2" podUID="595cc6da-b53a-4a23-8e31-ed1dd616eb3e" containerName="galera" probeResult="failure" output=< Jan 20 18:15:58 crc kubenswrapper[4558]: wsrep_local_state_comment (Donor/Desynced) differs from Synced Jan 20 18:15:58 crc kubenswrapper[4558]: > Jan 20 18:15:59 crc kubenswrapper[4558]: I0120 18:15:59.591236 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="barbican-kuttl-tests/root-account-create-update-hzbrt" Jan 20 18:15:59 crc kubenswrapper[4558]: I0120 18:15:59.723636 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7ml9m\" (UniqueName: \"kubernetes.io/projected/e765fff7-ce82-48c9-934c-6a40d0db6f02-kube-api-access-7ml9m\") pod \"e765fff7-ce82-48c9-934c-6a40d0db6f02\" (UID: \"e765fff7-ce82-48c9-934c-6a40d0db6f02\") " Jan 20 18:15:59 crc kubenswrapper[4558]: I0120 18:15:59.723862 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e765fff7-ce82-48c9-934c-6a40d0db6f02-operator-scripts\") pod \"e765fff7-ce82-48c9-934c-6a40d0db6f02\" (UID: \"e765fff7-ce82-48c9-934c-6a40d0db6f02\") " Jan 20 18:15:59 crc kubenswrapper[4558]: I0120 18:15:59.724815 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e765fff7-ce82-48c9-934c-6a40d0db6f02-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "e765fff7-ce82-48c9-934c-6a40d0db6f02" (UID: "e765fff7-ce82-48c9-934c-6a40d0db6f02"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:15:59 crc kubenswrapper[4558]: I0120 18:15:59.730863 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e765fff7-ce82-48c9-934c-6a40d0db6f02-kube-api-access-7ml9m" (OuterVolumeSpecName: "kube-api-access-7ml9m") pod "e765fff7-ce82-48c9-934c-6a40d0db6f02" (UID: "e765fff7-ce82-48c9-934c-6a40d0db6f02"). InnerVolumeSpecName "kube-api-access-7ml9m". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:15:59 crc kubenswrapper[4558]: I0120 18:15:59.825831 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e765fff7-ce82-48c9-934c-6a40d0db6f02-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 18:15:59 crc kubenswrapper[4558]: I0120 18:15:59.826110 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7ml9m\" (UniqueName: \"kubernetes.io/projected/e765fff7-ce82-48c9-934c-6a40d0db6f02-kube-api-access-7ml9m\") on node \"crc\" DevicePath \"\"" Jan 20 18:15:59 crc kubenswrapper[4558]: I0120 18:15:59.891409 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-index-hc69l"] Jan 20 18:15:59 crc kubenswrapper[4558]: E0120 18:15:59.891665 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e765fff7-ce82-48c9-934c-6a40d0db6f02" containerName="mariadb-account-create-update" Jan 20 18:15:59 crc kubenswrapper[4558]: I0120 18:15:59.891680 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="e765fff7-ce82-48c9-934c-6a40d0db6f02" containerName="mariadb-account-create-update" Jan 20 18:15:59 crc kubenswrapper[4558]: I0120 18:15:59.891794 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="e765fff7-ce82-48c9-934c-6a40d0db6f02" containerName="mariadb-account-create-update" Jan 20 18:15:59 crc kubenswrapper[4558]: I0120 18:15:59.892231 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-index-hc69l" Jan 20 18:15:59 crc kubenswrapper[4558]: I0120 18:15:59.895974 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-index-dockercfg-zdqpq" Jan 20 18:15:59 crc kubenswrapper[4558]: I0120 18:15:59.910072 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-index-hc69l"] Jan 20 18:16:00 crc kubenswrapper[4558]: I0120 18:16:00.030356 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8npf4\" (UniqueName: \"kubernetes.io/projected/3ab16698-4434-44a4-a088-f72b20e45702-kube-api-access-8npf4\") pod \"rabbitmq-cluster-operator-index-hc69l\" (UID: \"3ab16698-4434-44a4-a088-f72b20e45702\") " pod="openstack-operators/rabbitmq-cluster-operator-index-hc69l" Jan 20 18:16:00 crc kubenswrapper[4558]: I0120 18:16:00.131799 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8npf4\" (UniqueName: \"kubernetes.io/projected/3ab16698-4434-44a4-a088-f72b20e45702-kube-api-access-8npf4\") pod \"rabbitmq-cluster-operator-index-hc69l\" (UID: \"3ab16698-4434-44a4-a088-f72b20e45702\") " pod="openstack-operators/rabbitmq-cluster-operator-index-hc69l" Jan 20 18:16:00 crc kubenswrapper[4558]: I0120 18:16:00.146871 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8npf4\" (UniqueName: \"kubernetes.io/projected/3ab16698-4434-44a4-a088-f72b20e45702-kube-api-access-8npf4\") pod \"rabbitmq-cluster-operator-index-hc69l\" (UID: \"3ab16698-4434-44a4-a088-f72b20e45702\") " pod="openstack-operators/rabbitmq-cluster-operator-index-hc69l" Jan 20 18:16:00 crc kubenswrapper[4558]: I0120 18:16:00.211100 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-index-hc69l" Jan 20 18:16:00 crc kubenswrapper[4558]: I0120 18:16:00.347445 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/root-account-create-update-hzbrt" event={"ID":"e765fff7-ce82-48c9-934c-6a40d0db6f02","Type":"ContainerDied","Data":"276e3a8f535c5f8b83bbddecfdb86456d45c419cb1a46abfb0650bf05c8960d1"} Jan 20 18:16:00 crc kubenswrapper[4558]: I0120 18:16:00.347495 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="276e3a8f535c5f8b83bbddecfdb86456d45c419cb1a46abfb0650bf05c8960d1" Jan 20 18:16:00 crc kubenswrapper[4558]: I0120 18:16:00.347567 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="barbican-kuttl-tests/root-account-create-update-hzbrt" Jan 20 18:16:00 crc kubenswrapper[4558]: I0120 18:16:00.597114 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-index-hc69l"] Jan 20 18:16:00 crc kubenswrapper[4558]: W0120 18:16:00.601622 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3ab16698_4434_44a4_a088_f72b20e45702.slice/crio-3486f5a2c60e3bac2ff8a7c22903b79f2557bd00d1e6f79bd3d34b392fd77753 WatchSource:0}: Error finding container 3486f5a2c60e3bac2ff8a7c22903b79f2557bd00d1e6f79bd3d34b392fd77753: Status 404 returned error can't find the container with id 3486f5a2c60e3bac2ff8a7c22903b79f2557bd00d1e6f79bd3d34b392fd77753 Jan 20 18:16:01 crc kubenswrapper[4558]: I0120 18:16:01.357234 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-index-hc69l" event={"ID":"3ab16698-4434-44a4-a088-f72b20e45702","Type":"ContainerStarted","Data":"3486f5a2c60e3bac2ff8a7c22903b79f2557bd00d1e6f79bd3d34b392fd77753"} Jan 20 18:16:04 crc kubenswrapper[4558]: I0120 18:16:04.416766 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="barbican-kuttl-tests/openstack-galera-1" Jan 20 18:16:04 crc kubenswrapper[4558]: I0120 18:16:04.537074 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="barbican-kuttl-tests/openstack-galera-1" Jan 20 18:16:04 crc kubenswrapper[4558]: I0120 18:16:04.589733 4558 scope.go:117] "RemoveContainer" containerID="27314e1a8af2e8df8ecd2e7a06cbb630afbfecd8ca2338a1f199cf92f982f581" Jan 20 18:16:04 crc kubenswrapper[4558]: E0120 18:16:04.590418 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 18:16:05 crc kubenswrapper[4558]: I0120 18:16:05.404245 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-index-hc69l" event={"ID":"3ab16698-4434-44a4-a088-f72b20e45702","Type":"ContainerStarted","Data":"ac10494f58bebd5d096fb319a8cf050dd8255213b0e5c1fa3be6830e02f048c8"} Jan 20 18:16:05 crc kubenswrapper[4558]: I0120 18:16:05.420154 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-index-hc69l" podStartSLOduration=2.067906887 podStartE2EDuration="6.420129757s" podCreationTimestamp="2026-01-20 18:15:59 +0000 UTC" firstStartedPulling="2026-01-20 18:16:00.603208078 +0000 UTC m=+5654.363546045" lastFinishedPulling="2026-01-20 18:16:04.955430949 +0000 UTC m=+5658.715768915" observedRunningTime="2026-01-20 18:16:05.418712192 +0000 UTC m=+5659.179050158" watchObservedRunningTime="2026-01-20 18:16:05.420129757 +0000 UTC m=+5659.180467725" Jan 20 18:16:07 crc kubenswrapper[4558]: I0120 18:16:07.758557 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="barbican-kuttl-tests/openstack-galera-0" Jan 20 18:16:07 crc kubenswrapper[4558]: I0120 18:16:07.821134 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="barbican-kuttl-tests/openstack-galera-0" Jan 20 18:16:10 crc kubenswrapper[4558]: I0120 18:16:10.212098 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/rabbitmq-cluster-operator-index-hc69l" Jan 20 18:16:10 crc kubenswrapper[4558]: I0120 18:16:10.212597 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/rabbitmq-cluster-operator-index-hc69l" Jan 20 18:16:10 crc kubenswrapper[4558]: I0120 18:16:10.241441 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/rabbitmq-cluster-operator-index-hc69l" Jan 20 18:16:10 crc kubenswrapper[4558]: I0120 18:16:10.461206 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/rabbitmq-cluster-operator-index-hc69l" Jan 20 18:16:11 crc kubenswrapper[4558]: I0120 18:16:11.931394 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590wqlht"] Jan 20 18:16:11 crc kubenswrapper[4558]: I0120 18:16:11.933533 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590wqlht" Jan 20 18:16:11 crc kubenswrapper[4558]: I0120 18:16:11.936424 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-7xp5w" Jan 20 18:16:11 crc kubenswrapper[4558]: I0120 18:16:11.940857 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590wqlht"] Jan 20 18:16:12 crc kubenswrapper[4558]: I0120 18:16:12.031184 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/389aeea6-5bb0-4ac9-a5fd-5d6cbfa36a83-util\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590wqlht\" (UID: \"389aeea6-5bb0-4ac9-a5fd-5d6cbfa36a83\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590wqlht" Jan 20 18:16:12 crc kubenswrapper[4558]: I0120 18:16:12.031238 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/389aeea6-5bb0-4ac9-a5fd-5d6cbfa36a83-bundle\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590wqlht\" (UID: \"389aeea6-5bb0-4ac9-a5fd-5d6cbfa36a83\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590wqlht" Jan 20 18:16:12 crc kubenswrapper[4558]: I0120 18:16:12.031277 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ttpjh\" (UniqueName: \"kubernetes.io/projected/389aeea6-5bb0-4ac9-a5fd-5d6cbfa36a83-kube-api-access-ttpjh\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590wqlht\" (UID: \"389aeea6-5bb0-4ac9-a5fd-5d6cbfa36a83\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590wqlht" Jan 20 18:16:12 crc kubenswrapper[4558]: I0120 18:16:12.132695 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ttpjh\" (UniqueName: \"kubernetes.io/projected/389aeea6-5bb0-4ac9-a5fd-5d6cbfa36a83-kube-api-access-ttpjh\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590wqlht\" (UID: \"389aeea6-5bb0-4ac9-a5fd-5d6cbfa36a83\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590wqlht" Jan 20 18:16:12 crc kubenswrapper[4558]: I0120 18:16:12.132971 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/389aeea6-5bb0-4ac9-a5fd-5d6cbfa36a83-util\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590wqlht\" (UID: \"389aeea6-5bb0-4ac9-a5fd-5d6cbfa36a83\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590wqlht" Jan 20 18:16:12 crc kubenswrapper[4558]: I0120 18:16:12.133082 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/389aeea6-5bb0-4ac9-a5fd-5d6cbfa36a83-bundle\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590wqlht\" (UID: \"389aeea6-5bb0-4ac9-a5fd-5d6cbfa36a83\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590wqlht" Jan 20 18:16:12 crc kubenswrapper[4558]: I0120 18:16:12.133502 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/389aeea6-5bb0-4ac9-a5fd-5d6cbfa36a83-util\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590wqlht\" (UID: \"389aeea6-5bb0-4ac9-a5fd-5d6cbfa36a83\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590wqlht" Jan 20 18:16:12 crc kubenswrapper[4558]: I0120 18:16:12.133546 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/389aeea6-5bb0-4ac9-a5fd-5d6cbfa36a83-bundle\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590wqlht\" (UID: \"389aeea6-5bb0-4ac9-a5fd-5d6cbfa36a83\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590wqlht" Jan 20 18:16:12 crc kubenswrapper[4558]: I0120 18:16:12.150426 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ttpjh\" (UniqueName: \"kubernetes.io/projected/389aeea6-5bb0-4ac9-a5fd-5d6cbfa36a83-kube-api-access-ttpjh\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590wqlht\" (UID: \"389aeea6-5bb0-4ac9-a5fd-5d6cbfa36a83\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590wqlht" Jan 20 18:16:12 crc kubenswrapper[4558]: I0120 18:16:12.253051 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590wqlht" Jan 20 18:16:12 crc kubenswrapper[4558]: I0120 18:16:12.662438 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590wqlht"] Jan 20 18:16:13 crc kubenswrapper[4558]: I0120 18:16:13.464263 4558 generic.go:334] "Generic (PLEG): container finished" podID="389aeea6-5bb0-4ac9-a5fd-5d6cbfa36a83" containerID="4e5cca7d6418c16cfbb8637979fa7981489de21daffbfa5063ee0da6502a654e" exitCode=0 Jan 20 18:16:13 crc kubenswrapper[4558]: I0120 18:16:13.464349 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590wqlht" event={"ID":"389aeea6-5bb0-4ac9-a5fd-5d6cbfa36a83","Type":"ContainerDied","Data":"4e5cca7d6418c16cfbb8637979fa7981489de21daffbfa5063ee0da6502a654e"} Jan 20 18:16:13 crc kubenswrapper[4558]: I0120 18:16:13.465324 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590wqlht" event={"ID":"389aeea6-5bb0-4ac9-a5fd-5d6cbfa36a83","Type":"ContainerStarted","Data":"17c15c5ec8c5e128876f625dfe1ff3023e610ab1a75a94298a9f7920e8b97896"} Jan 20 18:16:14 crc kubenswrapper[4558]: I0120 18:16:14.476025 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590wqlht" event={"ID":"389aeea6-5bb0-4ac9-a5fd-5d6cbfa36a83","Type":"ContainerStarted","Data":"ecb534e6b6284b3f99c71543f0e35f255e3cc778837439424250508baa4e46eb"} Jan 20 18:16:15 crc kubenswrapper[4558]: I0120 18:16:15.493416 4558 generic.go:334] "Generic (PLEG): container finished" podID="389aeea6-5bb0-4ac9-a5fd-5d6cbfa36a83" containerID="ecb534e6b6284b3f99c71543f0e35f255e3cc778837439424250508baa4e46eb" exitCode=0 Jan 20 18:16:15 crc kubenswrapper[4558]: I0120 18:16:15.493517 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590wqlht" event={"ID":"389aeea6-5bb0-4ac9-a5fd-5d6cbfa36a83","Type":"ContainerDied","Data":"ecb534e6b6284b3f99c71543f0e35f255e3cc778837439424250508baa4e46eb"} Jan 20 18:16:15 crc kubenswrapper[4558]: I0120 18:16:15.565762 4558 scope.go:117] "RemoveContainer" containerID="27314e1a8af2e8df8ecd2e7a06cbb630afbfecd8ca2338a1f199cf92f982f581" Jan 20 18:16:15 crc kubenswrapper[4558]: E0120 18:16:15.566002 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 18:16:16 crc kubenswrapper[4558]: I0120 18:16:16.504602 4558 generic.go:334] "Generic (PLEG): container finished" podID="389aeea6-5bb0-4ac9-a5fd-5d6cbfa36a83" containerID="2a5c0d00bf04165646b76275ec6ac7b673fad8372a5fef102ec5358b18bceffc" exitCode=0 Jan 20 18:16:16 crc kubenswrapper[4558]: I0120 18:16:16.504722 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590wqlht" event={"ID":"389aeea6-5bb0-4ac9-a5fd-5d6cbfa36a83","Type":"ContainerDied","Data":"2a5c0d00bf04165646b76275ec6ac7b673fad8372a5fef102ec5358b18bceffc"} Jan 20 18:16:17 crc kubenswrapper[4558]: I0120 18:16:17.787353 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590wqlht" Jan 20 18:16:17 crc kubenswrapper[4558]: I0120 18:16:17.923079 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/389aeea6-5bb0-4ac9-a5fd-5d6cbfa36a83-bundle\") pod \"389aeea6-5bb0-4ac9-a5fd-5d6cbfa36a83\" (UID: \"389aeea6-5bb0-4ac9-a5fd-5d6cbfa36a83\") " Jan 20 18:16:17 crc kubenswrapper[4558]: I0120 18:16:17.923251 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/389aeea6-5bb0-4ac9-a5fd-5d6cbfa36a83-util\") pod \"389aeea6-5bb0-4ac9-a5fd-5d6cbfa36a83\" (UID: \"389aeea6-5bb0-4ac9-a5fd-5d6cbfa36a83\") " Jan 20 18:16:17 crc kubenswrapper[4558]: I0120 18:16:17.923357 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ttpjh\" (UniqueName: \"kubernetes.io/projected/389aeea6-5bb0-4ac9-a5fd-5d6cbfa36a83-kube-api-access-ttpjh\") pod \"389aeea6-5bb0-4ac9-a5fd-5d6cbfa36a83\" (UID: \"389aeea6-5bb0-4ac9-a5fd-5d6cbfa36a83\") " Jan 20 18:16:17 crc kubenswrapper[4558]: I0120 18:16:17.924309 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/389aeea6-5bb0-4ac9-a5fd-5d6cbfa36a83-bundle" (OuterVolumeSpecName: "bundle") pod "389aeea6-5bb0-4ac9-a5fd-5d6cbfa36a83" (UID: "389aeea6-5bb0-4ac9-a5fd-5d6cbfa36a83"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 18:16:17 crc kubenswrapper[4558]: I0120 18:16:17.929799 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/389aeea6-5bb0-4ac9-a5fd-5d6cbfa36a83-kube-api-access-ttpjh" (OuterVolumeSpecName: "kube-api-access-ttpjh") pod "389aeea6-5bb0-4ac9-a5fd-5d6cbfa36a83" (UID: "389aeea6-5bb0-4ac9-a5fd-5d6cbfa36a83"). InnerVolumeSpecName "kube-api-access-ttpjh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:16:17 crc kubenswrapper[4558]: I0120 18:16:17.935989 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/389aeea6-5bb0-4ac9-a5fd-5d6cbfa36a83-util" (OuterVolumeSpecName: "util") pod "389aeea6-5bb0-4ac9-a5fd-5d6cbfa36a83" (UID: "389aeea6-5bb0-4ac9-a5fd-5d6cbfa36a83"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 18:16:18 crc kubenswrapper[4558]: I0120 18:16:18.025504 4558 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/389aeea6-5bb0-4ac9-a5fd-5d6cbfa36a83-util\") on node \"crc\" DevicePath \"\"" Jan 20 18:16:18 crc kubenswrapper[4558]: I0120 18:16:18.025550 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ttpjh\" (UniqueName: \"kubernetes.io/projected/389aeea6-5bb0-4ac9-a5fd-5d6cbfa36a83-kube-api-access-ttpjh\") on node \"crc\" DevicePath \"\"" Jan 20 18:16:18 crc kubenswrapper[4558]: I0120 18:16:18.025564 4558 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/389aeea6-5bb0-4ac9-a5fd-5d6cbfa36a83-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 18:16:18 crc kubenswrapper[4558]: I0120 18:16:18.525000 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590wqlht" event={"ID":"389aeea6-5bb0-4ac9-a5fd-5d6cbfa36a83","Type":"ContainerDied","Data":"17c15c5ec8c5e128876f625dfe1ff3023e610ab1a75a94298a9f7920e8b97896"} Jan 20 18:16:18 crc kubenswrapper[4558]: I0120 18:16:18.525053 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="17c15c5ec8c5e128876f625dfe1ff3023e610ab1a75a94298a9f7920e8b97896" Jan 20 18:16:18 crc kubenswrapper[4558]: I0120 18:16:18.525072 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590wqlht" Jan 20 18:16:24 crc kubenswrapper[4558]: I0120 18:16:24.145627 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["barbican-kuttl-tests/memcached-0"] Jan 20 18:16:24 crc kubenswrapper[4558]: E0120 18:16:24.146819 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="389aeea6-5bb0-4ac9-a5fd-5d6cbfa36a83" containerName="util" Jan 20 18:16:24 crc kubenswrapper[4558]: I0120 18:16:24.146835 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="389aeea6-5bb0-4ac9-a5fd-5d6cbfa36a83" containerName="util" Jan 20 18:16:24 crc kubenswrapper[4558]: E0120 18:16:24.146854 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="389aeea6-5bb0-4ac9-a5fd-5d6cbfa36a83" containerName="pull" Jan 20 18:16:24 crc kubenswrapper[4558]: I0120 18:16:24.146860 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="389aeea6-5bb0-4ac9-a5fd-5d6cbfa36a83" containerName="pull" Jan 20 18:16:24 crc kubenswrapper[4558]: E0120 18:16:24.146868 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="389aeea6-5bb0-4ac9-a5fd-5d6cbfa36a83" containerName="extract" Jan 20 18:16:24 crc kubenswrapper[4558]: I0120 18:16:24.146875 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="389aeea6-5bb0-4ac9-a5fd-5d6cbfa36a83" containerName="extract" Jan 20 18:16:24 crc kubenswrapper[4558]: I0120 18:16:24.147003 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="389aeea6-5bb0-4ac9-a5fd-5d6cbfa36a83" containerName="extract" Jan 20 18:16:24 crc kubenswrapper[4558]: I0120 18:16:24.147548 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="barbican-kuttl-tests/memcached-0" Jan 20 18:16:24 crc kubenswrapper[4558]: I0120 18:16:24.149645 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"barbican-kuttl-tests"/"memcached-config-data" Jan 20 18:16:24 crc kubenswrapper[4558]: I0120 18:16:24.149666 4558 reflector.go:368] Caches populated for *v1.Secret from object-"barbican-kuttl-tests"/"memcached-memcached-dockercfg-cv2q7" Jan 20 18:16:24 crc kubenswrapper[4558]: I0120 18:16:24.157601 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["barbican-kuttl-tests/memcached-0"] Jan 20 18:16:24 crc kubenswrapper[4558]: I0120 18:16:24.324220 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/472c8b48-909b-484c-85cb-fb38c6b77c0f-config-data\") pod \"memcached-0\" (UID: \"472c8b48-909b-484c-85cb-fb38c6b77c0f\") " pod="barbican-kuttl-tests/memcached-0" Jan 20 18:16:24 crc kubenswrapper[4558]: I0120 18:16:24.324307 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bhfnf\" (UniqueName: \"kubernetes.io/projected/472c8b48-909b-484c-85cb-fb38c6b77c0f-kube-api-access-bhfnf\") pod \"memcached-0\" (UID: \"472c8b48-909b-484c-85cb-fb38c6b77c0f\") " pod="barbican-kuttl-tests/memcached-0" Jan 20 18:16:24 crc kubenswrapper[4558]: I0120 18:16:24.324362 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/472c8b48-909b-484c-85cb-fb38c6b77c0f-kolla-config\") pod \"memcached-0\" (UID: \"472c8b48-909b-484c-85cb-fb38c6b77c0f\") " pod="barbican-kuttl-tests/memcached-0" Jan 20 18:16:24 crc kubenswrapper[4558]: I0120 18:16:24.425820 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/472c8b48-909b-484c-85cb-fb38c6b77c0f-kolla-config\") pod \"memcached-0\" (UID: \"472c8b48-909b-484c-85cb-fb38c6b77c0f\") " pod="barbican-kuttl-tests/memcached-0" Jan 20 18:16:24 crc kubenswrapper[4558]: I0120 18:16:24.425913 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/472c8b48-909b-484c-85cb-fb38c6b77c0f-config-data\") pod \"memcached-0\" (UID: \"472c8b48-909b-484c-85cb-fb38c6b77c0f\") " pod="barbican-kuttl-tests/memcached-0" Jan 20 18:16:24 crc kubenswrapper[4558]: I0120 18:16:24.425976 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bhfnf\" (UniqueName: \"kubernetes.io/projected/472c8b48-909b-484c-85cb-fb38c6b77c0f-kube-api-access-bhfnf\") pod \"memcached-0\" (UID: \"472c8b48-909b-484c-85cb-fb38c6b77c0f\") " pod="barbican-kuttl-tests/memcached-0" Jan 20 18:16:24 crc kubenswrapper[4558]: I0120 18:16:24.426887 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/472c8b48-909b-484c-85cb-fb38c6b77c0f-kolla-config\") pod \"memcached-0\" (UID: \"472c8b48-909b-484c-85cb-fb38c6b77c0f\") " pod="barbican-kuttl-tests/memcached-0" Jan 20 18:16:24 crc kubenswrapper[4558]: I0120 18:16:24.426945 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/472c8b48-909b-484c-85cb-fb38c6b77c0f-config-data\") pod \"memcached-0\" (UID: \"472c8b48-909b-484c-85cb-fb38c6b77c0f\") " pod="barbican-kuttl-tests/memcached-0" Jan 20 18:16:24 crc kubenswrapper[4558]: I0120 18:16:24.445812 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bhfnf\" (UniqueName: \"kubernetes.io/projected/472c8b48-909b-484c-85cb-fb38c6b77c0f-kube-api-access-bhfnf\") pod \"memcached-0\" (UID: \"472c8b48-909b-484c-85cb-fb38c6b77c0f\") " pod="barbican-kuttl-tests/memcached-0" Jan 20 18:16:24 crc kubenswrapper[4558]: I0120 18:16:24.469564 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="barbican-kuttl-tests/memcached-0" Jan 20 18:16:25 crc kubenswrapper[4558]: I0120 18:16:25.000085 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["barbican-kuttl-tests/memcached-0"] Jan 20 18:16:25 crc kubenswrapper[4558]: I0120 18:16:25.604001 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/memcached-0" event={"ID":"472c8b48-909b-484c-85cb-fb38c6b77c0f","Type":"ContainerStarted","Data":"85362494850e98037e72c19cc9afcd67b5d5c74c8d3e9df8766d4575cb7ea45f"} Jan 20 18:16:25 crc kubenswrapper[4558]: I0120 18:16:25.604295 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/memcached-0" event={"ID":"472c8b48-909b-484c-85cb-fb38c6b77c0f","Type":"ContainerStarted","Data":"5875e5122fb2d3c0bf59a2d7954a0a04fb750b82c6583dbfe965229423df8aaf"} Jan 20 18:16:25 crc kubenswrapper[4558]: I0120 18:16:25.604310 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="barbican-kuttl-tests/memcached-0" Jan 20 18:16:25 crc kubenswrapper[4558]: I0120 18:16:25.615909 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="barbican-kuttl-tests/memcached-0" podStartSLOduration=1.615892817 podStartE2EDuration="1.615892817s" podCreationTimestamp="2026-01-20 18:16:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 18:16:25.615054331 +0000 UTC m=+5679.375392298" watchObservedRunningTime="2026-01-20 18:16:25.615892817 +0000 UTC m=+5679.376230775" Jan 20 18:16:26 crc kubenswrapper[4558]: I0120 18:16:26.793826 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-779fc9694b-q9nr2"] Jan 20 18:16:26 crc kubenswrapper[4558]: I0120 18:16:26.794726 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-q9nr2" Jan 20 18:16:26 crc kubenswrapper[4558]: I0120 18:16:26.797874 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-dockercfg-4kshj" Jan 20 18:16:26 crc kubenswrapper[4558]: I0120 18:16:26.801378 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-779fc9694b-q9nr2"] Jan 20 18:16:26 crc kubenswrapper[4558]: I0120 18:16:26.966876 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qnbgm\" (UniqueName: \"kubernetes.io/projected/4eb71fc6-5027-4724-9f33-24803c80be89-kube-api-access-qnbgm\") pod \"rabbitmq-cluster-operator-779fc9694b-q9nr2\" (UID: \"4eb71fc6-5027-4724-9f33-24803c80be89\") " pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-q9nr2" Jan 20 18:16:27 crc kubenswrapper[4558]: I0120 18:16:27.068606 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qnbgm\" (UniqueName: \"kubernetes.io/projected/4eb71fc6-5027-4724-9f33-24803c80be89-kube-api-access-qnbgm\") pod \"rabbitmq-cluster-operator-779fc9694b-q9nr2\" (UID: \"4eb71fc6-5027-4724-9f33-24803c80be89\") " pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-q9nr2" Jan 20 18:16:27 crc kubenswrapper[4558]: I0120 18:16:27.088219 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qnbgm\" (UniqueName: \"kubernetes.io/projected/4eb71fc6-5027-4724-9f33-24803c80be89-kube-api-access-qnbgm\") pod \"rabbitmq-cluster-operator-779fc9694b-q9nr2\" (UID: \"4eb71fc6-5027-4724-9f33-24803c80be89\") " pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-q9nr2" Jan 20 18:16:27 crc kubenswrapper[4558]: I0120 18:16:27.109791 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-q9nr2" Jan 20 18:16:27 crc kubenswrapper[4558]: I0120 18:16:27.520625 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-779fc9694b-q9nr2"] Jan 20 18:16:27 crc kubenswrapper[4558]: I0120 18:16:27.618458 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-q9nr2" event={"ID":"4eb71fc6-5027-4724-9f33-24803c80be89","Type":"ContainerStarted","Data":"40174a9b4ca6560c5a6e0ee358f18a6b723d79fc4e065d8ce2d1ad3502d23f05"} Jan 20 18:16:28 crc kubenswrapper[4558]: I0120 18:16:28.626991 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-q9nr2" event={"ID":"4eb71fc6-5027-4724-9f33-24803c80be89","Type":"ContainerStarted","Data":"ab774e969d548c14158bd4f1015af2a1c44551bebcd6dc0277853d961054dffb"} Jan 20 18:16:28 crc kubenswrapper[4558]: I0120 18:16:28.643676 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-q9nr2" podStartSLOduration=2.6436565549999997 podStartE2EDuration="2.643656555s" podCreationTimestamp="2026-01-20 18:16:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 18:16:28.63968061 +0000 UTC m=+5682.400018577" watchObservedRunningTime="2026-01-20 18:16:28.643656555 +0000 UTC m=+5682.403994523" Jan 20 18:16:30 crc kubenswrapper[4558]: I0120 18:16:30.566221 4558 scope.go:117] "RemoveContainer" containerID="27314e1a8af2e8df8ecd2e7a06cbb630afbfecd8ca2338a1f199cf92f982f581" Jan 20 18:16:30 crc kubenswrapper[4558]: E0120 18:16:30.566602 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 18:16:34 crc kubenswrapper[4558]: I0120 18:16:34.471336 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="barbican-kuttl-tests/memcached-0" Jan 20 18:16:34 crc kubenswrapper[4558]: I0120 18:16:34.693259 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-index-vck5c"] Jan 20 18:16:34 crc kubenswrapper[4558]: I0120 18:16:34.694545 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-index-vck5c" Jan 20 18:16:34 crc kubenswrapper[4558]: I0120 18:16:34.698280 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-index-dockercfg-h4bdp" Jan 20 18:16:34 crc kubenswrapper[4558]: I0120 18:16:34.698938 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2tb82\" (UniqueName: \"kubernetes.io/projected/c4024146-8e18-468e-b896-300502b433ee-kube-api-access-2tb82\") pod \"keystone-operator-index-vck5c\" (UID: \"c4024146-8e18-468e-b896-300502b433ee\") " pod="openstack-operators/keystone-operator-index-vck5c" Jan 20 18:16:34 crc kubenswrapper[4558]: I0120 18:16:34.706219 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-index-vck5c"] Jan 20 18:16:34 crc kubenswrapper[4558]: I0120 18:16:34.800864 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2tb82\" (UniqueName: \"kubernetes.io/projected/c4024146-8e18-468e-b896-300502b433ee-kube-api-access-2tb82\") pod \"keystone-operator-index-vck5c\" (UID: \"c4024146-8e18-468e-b896-300502b433ee\") " pod="openstack-operators/keystone-operator-index-vck5c" Jan 20 18:16:34 crc kubenswrapper[4558]: I0120 18:16:34.819696 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2tb82\" (UniqueName: \"kubernetes.io/projected/c4024146-8e18-468e-b896-300502b433ee-kube-api-access-2tb82\") pod \"keystone-operator-index-vck5c\" (UID: \"c4024146-8e18-468e-b896-300502b433ee\") " pod="openstack-operators/keystone-operator-index-vck5c" Jan 20 18:16:35 crc kubenswrapper[4558]: I0120 18:16:35.017592 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-index-vck5c" Jan 20 18:16:35 crc kubenswrapper[4558]: I0120 18:16:35.409619 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-index-vck5c"] Jan 20 18:16:35 crc kubenswrapper[4558]: W0120 18:16:35.416306 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc4024146_8e18_468e_b896_300502b433ee.slice/crio-6793d532d04f64156afa6173bd51e886485a371ce73936c784695e63469175b7 WatchSource:0}: Error finding container 6793d532d04f64156afa6173bd51e886485a371ce73936c784695e63469175b7: Status 404 returned error can't find the container with id 6793d532d04f64156afa6173bd51e886485a371ce73936c784695e63469175b7 Jan 20 18:16:35 crc kubenswrapper[4558]: I0120 18:16:35.685782 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-index-vck5c" event={"ID":"c4024146-8e18-468e-b896-300502b433ee","Type":"ContainerStarted","Data":"6793d532d04f64156afa6173bd51e886485a371ce73936c784695e63469175b7"} Jan 20 18:16:35 crc kubenswrapper[4558]: I0120 18:16:35.894871 4558 scope.go:117] "RemoveContainer" containerID="3d787a67513a01467bd03013a799bf24e5990a92f4c25bae72c6fb5e4a72cc1a" Jan 20 18:16:35 crc kubenswrapper[4558]: I0120 18:16:35.917349 4558 scope.go:117] "RemoveContainer" containerID="2c136bf4f1fc73828df55445d8c8192f0b65b0db901cbd4618de6f84b06105f3" Jan 20 18:16:36 crc kubenswrapper[4558]: I0120 18:16:36.696952 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-index-vck5c" event={"ID":"c4024146-8e18-468e-b896-300502b433ee","Type":"ContainerStarted","Data":"e620e29b143a3e833483094798c90910b75ccd8ffc0b356ddcf5c9bf51c497f0"} Jan 20 18:16:36 crc kubenswrapper[4558]: I0120 18:16:36.714040 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-index-vck5c" podStartSLOduration=1.8903271560000001 podStartE2EDuration="2.714017328s" podCreationTimestamp="2026-01-20 18:16:34 +0000 UTC" firstStartedPulling="2026-01-20 18:16:35.41882773 +0000 UTC m=+5689.179165698" lastFinishedPulling="2026-01-20 18:16:36.242517903 +0000 UTC m=+5690.002855870" observedRunningTime="2026-01-20 18:16:36.708787515 +0000 UTC m=+5690.469125483" watchObservedRunningTime="2026-01-20 18:16:36.714017328 +0000 UTC m=+5690.474355296" Jan 20 18:16:43 crc kubenswrapper[4558]: I0120 18:16:43.566139 4558 scope.go:117] "RemoveContainer" containerID="27314e1a8af2e8df8ecd2e7a06cbb630afbfecd8ca2338a1f199cf92f982f581" Jan 20 18:16:43 crc kubenswrapper[4558]: E0120 18:16:43.566422 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 18:16:45 crc kubenswrapper[4558]: I0120 18:16:45.018515 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/keystone-operator-index-vck5c" Jan 20 18:16:45 crc kubenswrapper[4558]: I0120 18:16:45.018856 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-index-vck5c" Jan 20 18:16:45 crc kubenswrapper[4558]: I0120 18:16:45.046097 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/keystone-operator-index-vck5c" Jan 20 18:16:45 crc kubenswrapper[4558]: I0120 18:16:45.803467 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-index-vck5c" Jan 20 18:16:54 crc kubenswrapper[4558]: I0120 18:16:54.115449 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/34e4e076b11e40e2796f19ad3bfdac5929942b93224fbc520400e0a0698tzfm"] Jan 20 18:16:54 crc kubenswrapper[4558]: I0120 18:16:54.117243 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/34e4e076b11e40e2796f19ad3bfdac5929942b93224fbc520400e0a0698tzfm" Jan 20 18:16:54 crc kubenswrapper[4558]: I0120 18:16:54.121638 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-7xp5w" Jan 20 18:16:54 crc kubenswrapper[4558]: I0120 18:16:54.126729 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/34e4e076b11e40e2796f19ad3bfdac5929942b93224fbc520400e0a0698tzfm"] Jan 20 18:16:54 crc kubenswrapper[4558]: I0120 18:16:54.291402 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/efc9936d-e76c-4750-914d-a0dde72ac9e2-bundle\") pod \"34e4e076b11e40e2796f19ad3bfdac5929942b93224fbc520400e0a0698tzfm\" (UID: \"efc9936d-e76c-4750-914d-a0dde72ac9e2\") " pod="openstack-operators/34e4e076b11e40e2796f19ad3bfdac5929942b93224fbc520400e0a0698tzfm" Jan 20 18:16:54 crc kubenswrapper[4558]: I0120 18:16:54.291475 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vzw74\" (UniqueName: \"kubernetes.io/projected/efc9936d-e76c-4750-914d-a0dde72ac9e2-kube-api-access-vzw74\") pod \"34e4e076b11e40e2796f19ad3bfdac5929942b93224fbc520400e0a0698tzfm\" (UID: \"efc9936d-e76c-4750-914d-a0dde72ac9e2\") " pod="openstack-operators/34e4e076b11e40e2796f19ad3bfdac5929942b93224fbc520400e0a0698tzfm" Jan 20 18:16:54 crc kubenswrapper[4558]: I0120 18:16:54.291516 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/efc9936d-e76c-4750-914d-a0dde72ac9e2-util\") pod \"34e4e076b11e40e2796f19ad3bfdac5929942b93224fbc520400e0a0698tzfm\" (UID: \"efc9936d-e76c-4750-914d-a0dde72ac9e2\") " pod="openstack-operators/34e4e076b11e40e2796f19ad3bfdac5929942b93224fbc520400e0a0698tzfm" Jan 20 18:16:54 crc kubenswrapper[4558]: I0120 18:16:54.393490 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vzw74\" (UniqueName: \"kubernetes.io/projected/efc9936d-e76c-4750-914d-a0dde72ac9e2-kube-api-access-vzw74\") pod \"34e4e076b11e40e2796f19ad3bfdac5929942b93224fbc520400e0a0698tzfm\" (UID: \"efc9936d-e76c-4750-914d-a0dde72ac9e2\") " pod="openstack-operators/34e4e076b11e40e2796f19ad3bfdac5929942b93224fbc520400e0a0698tzfm" Jan 20 18:16:54 crc kubenswrapper[4558]: I0120 18:16:54.393550 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/efc9936d-e76c-4750-914d-a0dde72ac9e2-util\") pod \"34e4e076b11e40e2796f19ad3bfdac5929942b93224fbc520400e0a0698tzfm\" (UID: \"efc9936d-e76c-4750-914d-a0dde72ac9e2\") " pod="openstack-operators/34e4e076b11e40e2796f19ad3bfdac5929942b93224fbc520400e0a0698tzfm" Jan 20 18:16:54 crc kubenswrapper[4558]: I0120 18:16:54.393646 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/efc9936d-e76c-4750-914d-a0dde72ac9e2-bundle\") pod \"34e4e076b11e40e2796f19ad3bfdac5929942b93224fbc520400e0a0698tzfm\" (UID: \"efc9936d-e76c-4750-914d-a0dde72ac9e2\") " pod="openstack-operators/34e4e076b11e40e2796f19ad3bfdac5929942b93224fbc520400e0a0698tzfm" Jan 20 18:16:54 crc kubenswrapper[4558]: I0120 18:16:54.394232 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/efc9936d-e76c-4750-914d-a0dde72ac9e2-util\") pod \"34e4e076b11e40e2796f19ad3bfdac5929942b93224fbc520400e0a0698tzfm\" (UID: \"efc9936d-e76c-4750-914d-a0dde72ac9e2\") " pod="openstack-operators/34e4e076b11e40e2796f19ad3bfdac5929942b93224fbc520400e0a0698tzfm" Jan 20 18:16:54 crc kubenswrapper[4558]: I0120 18:16:54.394265 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/efc9936d-e76c-4750-914d-a0dde72ac9e2-bundle\") pod \"34e4e076b11e40e2796f19ad3bfdac5929942b93224fbc520400e0a0698tzfm\" (UID: \"efc9936d-e76c-4750-914d-a0dde72ac9e2\") " pod="openstack-operators/34e4e076b11e40e2796f19ad3bfdac5929942b93224fbc520400e0a0698tzfm" Jan 20 18:16:54 crc kubenswrapper[4558]: I0120 18:16:54.411808 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vzw74\" (UniqueName: \"kubernetes.io/projected/efc9936d-e76c-4750-914d-a0dde72ac9e2-kube-api-access-vzw74\") pod \"34e4e076b11e40e2796f19ad3bfdac5929942b93224fbc520400e0a0698tzfm\" (UID: \"efc9936d-e76c-4750-914d-a0dde72ac9e2\") " pod="openstack-operators/34e4e076b11e40e2796f19ad3bfdac5929942b93224fbc520400e0a0698tzfm" Jan 20 18:16:54 crc kubenswrapper[4558]: I0120 18:16:54.441326 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/34e4e076b11e40e2796f19ad3bfdac5929942b93224fbc520400e0a0698tzfm" Jan 20 18:16:54 crc kubenswrapper[4558]: I0120 18:16:54.827389 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/34e4e076b11e40e2796f19ad3bfdac5929942b93224fbc520400e0a0698tzfm"] Jan 20 18:16:54 crc kubenswrapper[4558]: I0120 18:16:54.849272 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/34e4e076b11e40e2796f19ad3bfdac5929942b93224fbc520400e0a0698tzfm" event={"ID":"efc9936d-e76c-4750-914d-a0dde72ac9e2","Type":"ContainerStarted","Data":"ff21029e1c0d4736deb2d58faffc189a2eb8ca30c46143797a4e559df0e7df3e"} Jan 20 18:16:55 crc kubenswrapper[4558]: I0120 18:16:55.858849 4558 generic.go:334] "Generic (PLEG): container finished" podID="efc9936d-e76c-4750-914d-a0dde72ac9e2" containerID="386368ef23e7a031c203ecbc5d8d17951778eb2a241a99697a6d4075102c41a6" exitCode=0 Jan 20 18:16:55 crc kubenswrapper[4558]: I0120 18:16:55.858957 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/34e4e076b11e40e2796f19ad3bfdac5929942b93224fbc520400e0a0698tzfm" event={"ID":"efc9936d-e76c-4750-914d-a0dde72ac9e2","Type":"ContainerDied","Data":"386368ef23e7a031c203ecbc5d8d17951778eb2a241a99697a6d4075102c41a6"} Jan 20 18:16:57 crc kubenswrapper[4558]: I0120 18:16:57.566311 4558 scope.go:117] "RemoveContainer" containerID="27314e1a8af2e8df8ecd2e7a06cbb630afbfecd8ca2338a1f199cf92f982f581" Jan 20 18:16:57 crc kubenswrapper[4558]: E0120 18:16:57.566797 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 18:16:57 crc kubenswrapper[4558]: I0120 18:16:57.879644 4558 generic.go:334] "Generic (PLEG): container finished" podID="efc9936d-e76c-4750-914d-a0dde72ac9e2" containerID="a6c3d46d0b6a5aafa34043f11633d1c1d3dce61bb8d70b54ad40c2ea8a834c24" exitCode=0 Jan 20 18:16:57 crc kubenswrapper[4558]: I0120 18:16:57.879690 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/34e4e076b11e40e2796f19ad3bfdac5929942b93224fbc520400e0a0698tzfm" event={"ID":"efc9936d-e76c-4750-914d-a0dde72ac9e2","Type":"ContainerDied","Data":"a6c3d46d0b6a5aafa34043f11633d1c1d3dce61bb8d70b54ad40c2ea8a834c24"} Jan 20 18:16:58 crc kubenswrapper[4558]: I0120 18:16:58.889912 4558 generic.go:334] "Generic (PLEG): container finished" podID="efc9936d-e76c-4750-914d-a0dde72ac9e2" containerID="fce5c1d3d2c928b886827a18ddf9a4a929eedad628229ef3cad9bf2beeaec1a4" exitCode=0 Jan 20 18:16:58 crc kubenswrapper[4558]: I0120 18:16:58.890017 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/34e4e076b11e40e2796f19ad3bfdac5929942b93224fbc520400e0a0698tzfm" event={"ID":"efc9936d-e76c-4750-914d-a0dde72ac9e2","Type":"ContainerDied","Data":"fce5c1d3d2c928b886827a18ddf9a4a929eedad628229ef3cad9bf2beeaec1a4"} Jan 20 18:17:00 crc kubenswrapper[4558]: I0120 18:17:00.132971 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/34e4e076b11e40e2796f19ad3bfdac5929942b93224fbc520400e0a0698tzfm" Jan 20 18:17:00 crc kubenswrapper[4558]: I0120 18:17:00.277279 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vzw74\" (UniqueName: \"kubernetes.io/projected/efc9936d-e76c-4750-914d-a0dde72ac9e2-kube-api-access-vzw74\") pod \"efc9936d-e76c-4750-914d-a0dde72ac9e2\" (UID: \"efc9936d-e76c-4750-914d-a0dde72ac9e2\") " Jan 20 18:17:00 crc kubenswrapper[4558]: I0120 18:17:00.277681 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/efc9936d-e76c-4750-914d-a0dde72ac9e2-bundle\") pod \"efc9936d-e76c-4750-914d-a0dde72ac9e2\" (UID: \"efc9936d-e76c-4750-914d-a0dde72ac9e2\") " Jan 20 18:17:00 crc kubenswrapper[4558]: I0120 18:17:00.277796 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/efc9936d-e76c-4750-914d-a0dde72ac9e2-util\") pod \"efc9936d-e76c-4750-914d-a0dde72ac9e2\" (UID: \"efc9936d-e76c-4750-914d-a0dde72ac9e2\") " Jan 20 18:17:00 crc kubenswrapper[4558]: I0120 18:17:00.278610 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/efc9936d-e76c-4750-914d-a0dde72ac9e2-bundle" (OuterVolumeSpecName: "bundle") pod "efc9936d-e76c-4750-914d-a0dde72ac9e2" (UID: "efc9936d-e76c-4750-914d-a0dde72ac9e2"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 18:17:00 crc kubenswrapper[4558]: I0120 18:17:00.284044 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efc9936d-e76c-4750-914d-a0dde72ac9e2-kube-api-access-vzw74" (OuterVolumeSpecName: "kube-api-access-vzw74") pod "efc9936d-e76c-4750-914d-a0dde72ac9e2" (UID: "efc9936d-e76c-4750-914d-a0dde72ac9e2"). InnerVolumeSpecName "kube-api-access-vzw74". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:17:00 crc kubenswrapper[4558]: I0120 18:17:00.290875 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/efc9936d-e76c-4750-914d-a0dde72ac9e2-util" (OuterVolumeSpecName: "util") pod "efc9936d-e76c-4750-914d-a0dde72ac9e2" (UID: "efc9936d-e76c-4750-914d-a0dde72ac9e2"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 18:17:00 crc kubenswrapper[4558]: I0120 18:17:00.381091 4558 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/efc9936d-e76c-4750-914d-a0dde72ac9e2-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 18:17:00 crc kubenswrapper[4558]: I0120 18:17:00.381127 4558 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/efc9936d-e76c-4750-914d-a0dde72ac9e2-util\") on node \"crc\" DevicePath \"\"" Jan 20 18:17:00 crc kubenswrapper[4558]: I0120 18:17:00.381144 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vzw74\" (UniqueName: \"kubernetes.io/projected/efc9936d-e76c-4750-914d-a0dde72ac9e2-kube-api-access-vzw74\") on node \"crc\" DevicePath \"\"" Jan 20 18:17:00 crc kubenswrapper[4558]: I0120 18:17:00.805655 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["barbican-kuttl-tests/rabbitmq-server-0"] Jan 20 18:17:00 crc kubenswrapper[4558]: E0120 18:17:00.806095 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="efc9936d-e76c-4750-914d-a0dde72ac9e2" containerName="pull" Jan 20 18:17:00 crc kubenswrapper[4558]: I0120 18:17:00.806131 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="efc9936d-e76c-4750-914d-a0dde72ac9e2" containerName="pull" Jan 20 18:17:00 crc kubenswrapper[4558]: E0120 18:17:00.806156 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="efc9936d-e76c-4750-914d-a0dde72ac9e2" containerName="extract" Jan 20 18:17:00 crc kubenswrapper[4558]: I0120 18:17:00.806184 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="efc9936d-e76c-4750-914d-a0dde72ac9e2" containerName="extract" Jan 20 18:17:00 crc kubenswrapper[4558]: E0120 18:17:00.806201 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="efc9936d-e76c-4750-914d-a0dde72ac9e2" containerName="util" Jan 20 18:17:00 crc kubenswrapper[4558]: I0120 18:17:00.806208 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="efc9936d-e76c-4750-914d-a0dde72ac9e2" containerName="util" Jan 20 18:17:00 crc kubenswrapper[4558]: I0120 18:17:00.806429 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="efc9936d-e76c-4750-914d-a0dde72ac9e2" containerName="extract" Jan 20 18:17:00 crc kubenswrapper[4558]: I0120 18:17:00.807485 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="barbican-kuttl-tests/rabbitmq-server-0" Jan 20 18:17:00 crc kubenswrapper[4558]: I0120 18:17:00.809355 4558 reflector.go:368] Caches populated for *v1.Secret from object-"barbican-kuttl-tests"/"rabbitmq-server-dockercfg-msvq9" Jan 20 18:17:00 crc kubenswrapper[4558]: I0120 18:17:00.809665 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"barbican-kuttl-tests"/"rabbitmq-plugins-conf" Jan 20 18:17:00 crc kubenswrapper[4558]: I0120 18:17:00.810812 4558 reflector.go:368] Caches populated for *v1.Secret from object-"barbican-kuttl-tests"/"rabbitmq-erlang-cookie" Jan 20 18:17:00 crc kubenswrapper[4558]: I0120 18:17:00.814325 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"barbican-kuttl-tests"/"rabbitmq-server-conf" Jan 20 18:17:00 crc kubenswrapper[4558]: I0120 18:17:00.814480 4558 reflector.go:368] Caches populated for *v1.Secret from object-"barbican-kuttl-tests"/"rabbitmq-default-user" Jan 20 18:17:00 crc kubenswrapper[4558]: I0120 18:17:00.820616 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["barbican-kuttl-tests/rabbitmq-server-0"] Jan 20 18:17:00 crc kubenswrapper[4558]: I0120 18:17:00.912523 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/34e4e076b11e40e2796f19ad3bfdac5929942b93224fbc520400e0a0698tzfm" event={"ID":"efc9936d-e76c-4750-914d-a0dde72ac9e2","Type":"ContainerDied","Data":"ff21029e1c0d4736deb2d58faffc189a2eb8ca30c46143797a4e559df0e7df3e"} Jan 20 18:17:00 crc kubenswrapper[4558]: I0120 18:17:00.912575 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ff21029e1c0d4736deb2d58faffc189a2eb8ca30c46143797a4e559df0e7df3e" Jan 20 18:17:00 crc kubenswrapper[4558]: I0120 18:17:00.912607 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/34e4e076b11e40e2796f19ad3bfdac5929942b93224fbc520400e0a0698tzfm" Jan 20 18:17:00 crc kubenswrapper[4558]: I0120 18:17:00.989606 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/de78cf98-2998-46d1-add9-6a03d25c8b2b-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"de78cf98-2998-46d1-add9-6a03d25c8b2b\") " pod="barbican-kuttl-tests/rabbitmq-server-0" Jan 20 18:17:00 crc kubenswrapper[4558]: I0120 18:17:00.989658 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/de78cf98-2998-46d1-add9-6a03d25c8b2b-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"de78cf98-2998-46d1-add9-6a03d25c8b2b\") " pod="barbican-kuttl-tests/rabbitmq-server-0" Jan 20 18:17:00 crc kubenswrapper[4558]: I0120 18:17:00.989688 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/de78cf98-2998-46d1-add9-6a03d25c8b2b-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"de78cf98-2998-46d1-add9-6a03d25c8b2b\") " pod="barbican-kuttl-tests/rabbitmq-server-0" Jan 20 18:17:00 crc kubenswrapper[4558]: I0120 18:17:00.989721 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/de78cf98-2998-46d1-add9-6a03d25c8b2b-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"de78cf98-2998-46d1-add9-6a03d25c8b2b\") " pod="barbican-kuttl-tests/rabbitmq-server-0" Jan 20 18:17:00 crc kubenswrapper[4558]: I0120 18:17:00.989763 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vmjz5\" (UniqueName: \"kubernetes.io/projected/de78cf98-2998-46d1-add9-6a03d25c8b2b-kube-api-access-vmjz5\") pod \"rabbitmq-server-0\" (UID: \"de78cf98-2998-46d1-add9-6a03d25c8b2b\") " pod="barbican-kuttl-tests/rabbitmq-server-0" Jan 20 18:17:00 crc kubenswrapper[4558]: I0120 18:17:00.989804 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-734ec1d9-3fee-4c55-b0f8-0f4799cbfde9\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-734ec1d9-3fee-4c55-b0f8-0f4799cbfde9\") pod \"rabbitmq-server-0\" (UID: \"de78cf98-2998-46d1-add9-6a03d25c8b2b\") " pod="barbican-kuttl-tests/rabbitmq-server-0" Jan 20 18:17:00 crc kubenswrapper[4558]: I0120 18:17:00.989826 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/de78cf98-2998-46d1-add9-6a03d25c8b2b-pod-info\") pod \"rabbitmq-server-0\" (UID: \"de78cf98-2998-46d1-add9-6a03d25c8b2b\") " pod="barbican-kuttl-tests/rabbitmq-server-0" Jan 20 18:17:00 crc kubenswrapper[4558]: I0120 18:17:00.989851 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/de78cf98-2998-46d1-add9-6a03d25c8b2b-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"de78cf98-2998-46d1-add9-6a03d25c8b2b\") " pod="barbican-kuttl-tests/rabbitmq-server-0" Jan 20 18:17:01 crc kubenswrapper[4558]: I0120 18:17:01.092015 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/de78cf98-2998-46d1-add9-6a03d25c8b2b-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"de78cf98-2998-46d1-add9-6a03d25c8b2b\") " pod="barbican-kuttl-tests/rabbitmq-server-0" Jan 20 18:17:01 crc kubenswrapper[4558]: I0120 18:17:01.092193 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/de78cf98-2998-46d1-add9-6a03d25c8b2b-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"de78cf98-2998-46d1-add9-6a03d25c8b2b\") " pod="barbican-kuttl-tests/rabbitmq-server-0" Jan 20 18:17:01 crc kubenswrapper[4558]: I0120 18:17:01.092296 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/de78cf98-2998-46d1-add9-6a03d25c8b2b-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"de78cf98-2998-46d1-add9-6a03d25c8b2b\") " pod="barbican-kuttl-tests/rabbitmq-server-0" Jan 20 18:17:01 crc kubenswrapper[4558]: I0120 18:17:01.092413 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/de78cf98-2998-46d1-add9-6a03d25c8b2b-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"de78cf98-2998-46d1-add9-6a03d25c8b2b\") " pod="barbican-kuttl-tests/rabbitmq-server-0" Jan 20 18:17:01 crc kubenswrapper[4558]: I0120 18:17:01.092555 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vmjz5\" (UniqueName: \"kubernetes.io/projected/de78cf98-2998-46d1-add9-6a03d25c8b2b-kube-api-access-vmjz5\") pod \"rabbitmq-server-0\" (UID: \"de78cf98-2998-46d1-add9-6a03d25c8b2b\") " pod="barbican-kuttl-tests/rabbitmq-server-0" Jan 20 18:17:01 crc kubenswrapper[4558]: I0120 18:17:01.092623 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/de78cf98-2998-46d1-add9-6a03d25c8b2b-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"de78cf98-2998-46d1-add9-6a03d25c8b2b\") " pod="barbican-kuttl-tests/rabbitmq-server-0" Jan 20 18:17:01 crc kubenswrapper[4558]: I0120 18:17:01.092758 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-734ec1d9-3fee-4c55-b0f8-0f4799cbfde9\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-734ec1d9-3fee-4c55-b0f8-0f4799cbfde9\") pod \"rabbitmq-server-0\" (UID: \"de78cf98-2998-46d1-add9-6a03d25c8b2b\") " pod="barbican-kuttl-tests/rabbitmq-server-0" Jan 20 18:17:01 crc kubenswrapper[4558]: I0120 18:17:01.092845 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/de78cf98-2998-46d1-add9-6a03d25c8b2b-pod-info\") pod \"rabbitmq-server-0\" (UID: \"de78cf98-2998-46d1-add9-6a03d25c8b2b\") " pod="barbican-kuttl-tests/rabbitmq-server-0" Jan 20 18:17:01 crc kubenswrapper[4558]: I0120 18:17:01.092939 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/de78cf98-2998-46d1-add9-6a03d25c8b2b-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"de78cf98-2998-46d1-add9-6a03d25c8b2b\") " pod="barbican-kuttl-tests/rabbitmq-server-0" Jan 20 18:17:01 crc kubenswrapper[4558]: I0120 18:17:01.092964 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/de78cf98-2998-46d1-add9-6a03d25c8b2b-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"de78cf98-2998-46d1-add9-6a03d25c8b2b\") " pod="barbican-kuttl-tests/rabbitmq-server-0" Jan 20 18:17:01 crc kubenswrapper[4558]: I0120 18:17:01.093236 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/de78cf98-2998-46d1-add9-6a03d25c8b2b-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"de78cf98-2998-46d1-add9-6a03d25c8b2b\") " pod="barbican-kuttl-tests/rabbitmq-server-0" Jan 20 18:17:01 crc kubenswrapper[4558]: I0120 18:17:01.097201 4558 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 20 18:17:01 crc kubenswrapper[4558]: I0120 18:17:01.097235 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-734ec1d9-3fee-4c55-b0f8-0f4799cbfde9\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-734ec1d9-3fee-4c55-b0f8-0f4799cbfde9\") pod \"rabbitmq-server-0\" (UID: \"de78cf98-2998-46d1-add9-6a03d25c8b2b\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/93b49ccd54f9e741c0ea9d544a5714a3d9a076b50cc5688099468ffbea9f22db/globalmount\"" pod="barbican-kuttl-tests/rabbitmq-server-0" Jan 20 18:17:01 crc kubenswrapper[4558]: I0120 18:17:01.099797 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/de78cf98-2998-46d1-add9-6a03d25c8b2b-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"de78cf98-2998-46d1-add9-6a03d25c8b2b\") " pod="barbican-kuttl-tests/rabbitmq-server-0" Jan 20 18:17:01 crc kubenswrapper[4558]: I0120 18:17:01.099799 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/de78cf98-2998-46d1-add9-6a03d25c8b2b-pod-info\") pod \"rabbitmq-server-0\" (UID: \"de78cf98-2998-46d1-add9-6a03d25c8b2b\") " pod="barbican-kuttl-tests/rabbitmq-server-0" Jan 20 18:17:01 crc kubenswrapper[4558]: I0120 18:17:01.100072 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/de78cf98-2998-46d1-add9-6a03d25c8b2b-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"de78cf98-2998-46d1-add9-6a03d25c8b2b\") " pod="barbican-kuttl-tests/rabbitmq-server-0" Jan 20 18:17:01 crc kubenswrapper[4558]: I0120 18:17:01.108064 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vmjz5\" (UniqueName: \"kubernetes.io/projected/de78cf98-2998-46d1-add9-6a03d25c8b2b-kube-api-access-vmjz5\") pod \"rabbitmq-server-0\" (UID: \"de78cf98-2998-46d1-add9-6a03d25c8b2b\") " pod="barbican-kuttl-tests/rabbitmq-server-0" Jan 20 18:17:01 crc kubenswrapper[4558]: I0120 18:17:01.122263 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-734ec1d9-3fee-4c55-b0f8-0f4799cbfde9\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-734ec1d9-3fee-4c55-b0f8-0f4799cbfde9\") pod \"rabbitmq-server-0\" (UID: \"de78cf98-2998-46d1-add9-6a03d25c8b2b\") " pod="barbican-kuttl-tests/rabbitmq-server-0" Jan 20 18:17:01 crc kubenswrapper[4558]: I0120 18:17:01.125663 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="barbican-kuttl-tests/rabbitmq-server-0" Jan 20 18:17:01 crc kubenswrapper[4558]: I0120 18:17:01.526903 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["barbican-kuttl-tests/rabbitmq-server-0"] Jan 20 18:17:01 crc kubenswrapper[4558]: I0120 18:17:01.921485 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/rabbitmq-server-0" event={"ID":"de78cf98-2998-46d1-add9-6a03d25c8b2b","Type":"ContainerStarted","Data":"cfd8134f2d9a0f249583b231d6f72b7b1c993c311427bb9b2b2f1adab07aa3b3"} Jan 20 18:17:07 crc kubenswrapper[4558]: I0120 18:17:07.983623 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/rabbitmq-server-0" event={"ID":"de78cf98-2998-46d1-add9-6a03d25c8b2b","Type":"ContainerStarted","Data":"c1eb613ee08b66a59123d64dc4d5f56391ec1093d27dcbda2d2d7860f97cba52"} Jan 20 18:17:11 crc kubenswrapper[4558]: I0120 18:17:11.566595 4558 scope.go:117] "RemoveContainer" containerID="27314e1a8af2e8df8ecd2e7a06cbb630afbfecd8ca2338a1f199cf92f982f581" Jan 20 18:17:11 crc kubenswrapper[4558]: E0120 18:17:11.567942 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 18:17:12 crc kubenswrapper[4558]: I0120 18:17:12.292225 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-controller-manager-5ffd575849-prl45"] Jan 20 18:17:12 crc kubenswrapper[4558]: I0120 18:17:12.293308 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-5ffd575849-prl45" Jan 20 18:17:12 crc kubenswrapper[4558]: I0120 18:17:12.294864 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-dockercfg-9x7n4" Jan 20 18:17:12 crc kubenswrapper[4558]: I0120 18:17:12.294898 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-service-cert" Jan 20 18:17:12 crc kubenswrapper[4558]: I0120 18:17:12.309883 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-5ffd575849-prl45"] Jan 20 18:17:12 crc kubenswrapper[4558]: I0120 18:17:12.382555 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/0ddbae85-a332-464a-bd00-6c7f4fa4d7fd-apiservice-cert\") pod \"keystone-operator-controller-manager-5ffd575849-prl45\" (UID: \"0ddbae85-a332-464a-bd00-6c7f4fa4d7fd\") " pod="openstack-operators/keystone-operator-controller-manager-5ffd575849-prl45" Jan 20 18:17:12 crc kubenswrapper[4558]: I0120 18:17:12.382770 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rttxx\" (UniqueName: \"kubernetes.io/projected/0ddbae85-a332-464a-bd00-6c7f4fa4d7fd-kube-api-access-rttxx\") pod \"keystone-operator-controller-manager-5ffd575849-prl45\" (UID: \"0ddbae85-a332-464a-bd00-6c7f4fa4d7fd\") " pod="openstack-operators/keystone-operator-controller-manager-5ffd575849-prl45" Jan 20 18:17:12 crc kubenswrapper[4558]: I0120 18:17:12.382879 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/0ddbae85-a332-464a-bd00-6c7f4fa4d7fd-webhook-cert\") pod \"keystone-operator-controller-manager-5ffd575849-prl45\" (UID: \"0ddbae85-a332-464a-bd00-6c7f4fa4d7fd\") " pod="openstack-operators/keystone-operator-controller-manager-5ffd575849-prl45" Jan 20 18:17:12 crc kubenswrapper[4558]: I0120 18:17:12.483357 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/0ddbae85-a332-464a-bd00-6c7f4fa4d7fd-apiservice-cert\") pod \"keystone-operator-controller-manager-5ffd575849-prl45\" (UID: \"0ddbae85-a332-464a-bd00-6c7f4fa4d7fd\") " pod="openstack-operators/keystone-operator-controller-manager-5ffd575849-prl45" Jan 20 18:17:12 crc kubenswrapper[4558]: I0120 18:17:12.483427 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rttxx\" (UniqueName: \"kubernetes.io/projected/0ddbae85-a332-464a-bd00-6c7f4fa4d7fd-kube-api-access-rttxx\") pod \"keystone-operator-controller-manager-5ffd575849-prl45\" (UID: \"0ddbae85-a332-464a-bd00-6c7f4fa4d7fd\") " pod="openstack-operators/keystone-operator-controller-manager-5ffd575849-prl45" Jan 20 18:17:12 crc kubenswrapper[4558]: I0120 18:17:12.483478 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/0ddbae85-a332-464a-bd00-6c7f4fa4d7fd-webhook-cert\") pod \"keystone-operator-controller-manager-5ffd575849-prl45\" (UID: \"0ddbae85-a332-464a-bd00-6c7f4fa4d7fd\") " pod="openstack-operators/keystone-operator-controller-manager-5ffd575849-prl45" Jan 20 18:17:12 crc kubenswrapper[4558]: I0120 18:17:12.489348 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/0ddbae85-a332-464a-bd00-6c7f4fa4d7fd-apiservice-cert\") pod \"keystone-operator-controller-manager-5ffd575849-prl45\" (UID: \"0ddbae85-a332-464a-bd00-6c7f4fa4d7fd\") " pod="openstack-operators/keystone-operator-controller-manager-5ffd575849-prl45" Jan 20 18:17:12 crc kubenswrapper[4558]: I0120 18:17:12.489657 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/0ddbae85-a332-464a-bd00-6c7f4fa4d7fd-webhook-cert\") pod \"keystone-operator-controller-manager-5ffd575849-prl45\" (UID: \"0ddbae85-a332-464a-bd00-6c7f4fa4d7fd\") " pod="openstack-operators/keystone-operator-controller-manager-5ffd575849-prl45" Jan 20 18:17:12 crc kubenswrapper[4558]: I0120 18:17:12.500300 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rttxx\" (UniqueName: \"kubernetes.io/projected/0ddbae85-a332-464a-bd00-6c7f4fa4d7fd-kube-api-access-rttxx\") pod \"keystone-operator-controller-manager-5ffd575849-prl45\" (UID: \"0ddbae85-a332-464a-bd00-6c7f4fa4d7fd\") " pod="openstack-operators/keystone-operator-controller-manager-5ffd575849-prl45" Jan 20 18:17:12 crc kubenswrapper[4558]: I0120 18:17:12.611607 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-5ffd575849-prl45" Jan 20 18:17:13 crc kubenswrapper[4558]: I0120 18:17:13.001841 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-5ffd575849-prl45"] Jan 20 18:17:13 crc kubenswrapper[4558]: W0120 18:17:13.004820 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0ddbae85_a332_464a_bd00_6c7f4fa4d7fd.slice/crio-8f9268bd5c2b042af875a42e0a442a41a6282e9de5181d0db699f42712a125ac WatchSource:0}: Error finding container 8f9268bd5c2b042af875a42e0a442a41a6282e9de5181d0db699f42712a125ac: Status 404 returned error can't find the container with id 8f9268bd5c2b042af875a42e0a442a41a6282e9de5181d0db699f42712a125ac Jan 20 18:17:13 crc kubenswrapper[4558]: I0120 18:17:13.022538 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-5ffd575849-prl45" event={"ID":"0ddbae85-a332-464a-bd00-6c7f4fa4d7fd","Type":"ContainerStarted","Data":"8f9268bd5c2b042af875a42e0a442a41a6282e9de5181d0db699f42712a125ac"} Jan 20 18:17:17 crc kubenswrapper[4558]: I0120 18:17:17.054366 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-5ffd575849-prl45" event={"ID":"0ddbae85-a332-464a-bd00-6c7f4fa4d7fd","Type":"ContainerStarted","Data":"2c0cf13207f9da18231641c153e6b2314767254ac9a5a084cd7ffdc00f84651c"} Jan 20 18:17:17 crc kubenswrapper[4558]: I0120 18:17:17.054985 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-controller-manager-5ffd575849-prl45" Jan 20 18:17:17 crc kubenswrapper[4558]: I0120 18:17:17.084675 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-controller-manager-5ffd575849-prl45" podStartSLOduration=1.331125457 podStartE2EDuration="5.084652389s" podCreationTimestamp="2026-01-20 18:17:12 +0000 UTC" firstStartedPulling="2026-01-20 18:17:13.007258896 +0000 UTC m=+5726.767596864" lastFinishedPulling="2026-01-20 18:17:16.760785829 +0000 UTC m=+5730.521123796" observedRunningTime="2026-01-20 18:17:17.07910616 +0000 UTC m=+5730.839444127" watchObservedRunningTime="2026-01-20 18:17:17.084652389 +0000 UTC m=+5730.844990356" Jan 20 18:17:22 crc kubenswrapper[4558]: I0120 18:17:22.566311 4558 scope.go:117] "RemoveContainer" containerID="27314e1a8af2e8df8ecd2e7a06cbb630afbfecd8ca2338a1f199cf92f982f581" Jan 20 18:17:22 crc kubenswrapper[4558]: E0120 18:17:22.566887 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 18:17:22 crc kubenswrapper[4558]: I0120 18:17:22.617726 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-controller-manager-5ffd575849-prl45" Jan 20 18:17:27 crc kubenswrapper[4558]: I0120 18:17:27.495521 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/barbican-operator-index-xz7tx"] Jan 20 18:17:27 crc kubenswrapper[4558]: I0120 18:17:27.497496 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-index-xz7tx" Jan 20 18:17:27 crc kubenswrapper[4558]: I0120 18:17:27.500152 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"barbican-operator-index-dockercfg-h8868" Jan 20 18:17:27 crc kubenswrapper[4558]: I0120 18:17:27.504317 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-index-xz7tx"] Jan 20 18:17:27 crc kubenswrapper[4558]: I0120 18:17:27.536447 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x6b24\" (UniqueName: \"kubernetes.io/projected/c2d67ae8-8d4b-4242-8c31-ad0ca9c3975c-kube-api-access-x6b24\") pod \"barbican-operator-index-xz7tx\" (UID: \"c2d67ae8-8d4b-4242-8c31-ad0ca9c3975c\") " pod="openstack-operators/barbican-operator-index-xz7tx" Jan 20 18:17:27 crc kubenswrapper[4558]: I0120 18:17:27.638120 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x6b24\" (UniqueName: \"kubernetes.io/projected/c2d67ae8-8d4b-4242-8c31-ad0ca9c3975c-kube-api-access-x6b24\") pod \"barbican-operator-index-xz7tx\" (UID: \"c2d67ae8-8d4b-4242-8c31-ad0ca9c3975c\") " pod="openstack-operators/barbican-operator-index-xz7tx" Jan 20 18:17:27 crc kubenswrapper[4558]: I0120 18:17:27.656842 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x6b24\" (UniqueName: \"kubernetes.io/projected/c2d67ae8-8d4b-4242-8c31-ad0ca9c3975c-kube-api-access-x6b24\") pod \"barbican-operator-index-xz7tx\" (UID: \"c2d67ae8-8d4b-4242-8c31-ad0ca9c3975c\") " pod="openstack-operators/barbican-operator-index-xz7tx" Jan 20 18:17:27 crc kubenswrapper[4558]: I0120 18:17:27.814209 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-index-xz7tx" Jan 20 18:17:28 crc kubenswrapper[4558]: I0120 18:17:28.231716 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-index-xz7tx"] Jan 20 18:17:28 crc kubenswrapper[4558]: W0120 18:17:28.237659 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc2d67ae8_8d4b_4242_8c31_ad0ca9c3975c.slice/crio-d9c211d1c8d1874664302f4f2aeac3fcbcaf26d7007131daa12e0822b7a238c3 WatchSource:0}: Error finding container d9c211d1c8d1874664302f4f2aeac3fcbcaf26d7007131daa12e0822b7a238c3: Status 404 returned error can't find the container with id d9c211d1c8d1874664302f4f2aeac3fcbcaf26d7007131daa12e0822b7a238c3 Jan 20 18:17:29 crc kubenswrapper[4558]: I0120 18:17:29.146798 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-index-xz7tx" event={"ID":"c2d67ae8-8d4b-4242-8c31-ad0ca9c3975c","Type":"ContainerStarted","Data":"d9c211d1c8d1874664302f4f2aeac3fcbcaf26d7007131daa12e0822b7a238c3"} Jan 20 18:17:31 crc kubenswrapper[4558]: I0120 18:17:31.163727 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-index-xz7tx" event={"ID":"c2d67ae8-8d4b-4242-8c31-ad0ca9c3975c","Type":"ContainerStarted","Data":"aa7b9479c303c79db2c7368a0eee3398df9fea0ba6bc5a7a991e89fc14bba725"} Jan 20 18:17:31 crc kubenswrapper[4558]: I0120 18:17:31.179382 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/barbican-operator-index-xz7tx" podStartSLOduration=2.252410726 podStartE2EDuration="4.179365149s" podCreationTimestamp="2026-01-20 18:17:27 +0000 UTC" firstStartedPulling="2026-01-20 18:17:28.240390597 +0000 UTC m=+5742.000728564" lastFinishedPulling="2026-01-20 18:17:30.16734502 +0000 UTC m=+5743.927682987" observedRunningTime="2026-01-20 18:17:31.176809945 +0000 UTC m=+5744.937147912" watchObservedRunningTime="2026-01-20 18:17:31.179365149 +0000 UTC m=+5744.939703116" Jan 20 18:17:33 crc kubenswrapper[4558]: I0120 18:17:33.566427 4558 scope.go:117] "RemoveContainer" containerID="27314e1a8af2e8df8ecd2e7a06cbb630afbfecd8ca2338a1f199cf92f982f581" Jan 20 18:17:33 crc kubenswrapper[4558]: E0120 18:17:33.566623 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 18:17:35 crc kubenswrapper[4558]: I0120 18:17:35.977687 4558 scope.go:117] "RemoveContainer" containerID="3d1d1b07c7f18dbf75745ca312859d024aefcf96fa2be134dbdf2d603d158e21" Jan 20 18:17:36 crc kubenswrapper[4558]: I0120 18:17:36.000658 4558 scope.go:117] "RemoveContainer" containerID="7892fbaf8b021f5bdd6f5aadb0da02b4b0bef32ad452d5c9ed7e87377da2a546" Jan 20 18:17:36 crc kubenswrapper[4558]: I0120 18:17:36.026616 4558 scope.go:117] "RemoveContainer" containerID="b9e698a81357dab17b52a01c935e1efa7fc99133ba3cb959933757a8c6e559df" Jan 20 18:17:36 crc kubenswrapper[4558]: I0120 18:17:36.044777 4558 scope.go:117] "RemoveContainer" containerID="facfc46d046a836828f83df5cf26fe5cc67ca5798724e57eb89ab4ffd84463bc" Jan 20 18:17:37 crc kubenswrapper[4558]: I0120 18:17:37.815066 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/barbican-operator-index-xz7tx" Jan 20 18:17:37 crc kubenswrapper[4558]: I0120 18:17:37.815434 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/barbican-operator-index-xz7tx" Jan 20 18:17:37 crc kubenswrapper[4558]: I0120 18:17:37.846267 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/barbican-operator-index-xz7tx" Jan 20 18:17:38 crc kubenswrapper[4558]: I0120 18:17:38.252338 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/barbican-operator-index-xz7tx" Jan 20 18:17:39 crc kubenswrapper[4558]: I0120 18:17:39.232506 4558 generic.go:334] "Generic (PLEG): container finished" podID="de78cf98-2998-46d1-add9-6a03d25c8b2b" containerID="c1eb613ee08b66a59123d64dc4d5f56391ec1093d27dcbda2d2d7860f97cba52" exitCode=0 Jan 20 18:17:39 crc kubenswrapper[4558]: I0120 18:17:39.233056 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/rabbitmq-server-0" event={"ID":"de78cf98-2998-46d1-add9-6a03d25c8b2b","Type":"ContainerDied","Data":"c1eb613ee08b66a59123d64dc4d5f56391ec1093d27dcbda2d2d7860f97cba52"} Jan 20 18:17:40 crc kubenswrapper[4558]: I0120 18:17:40.243494 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/rabbitmq-server-0" event={"ID":"de78cf98-2998-46d1-add9-6a03d25c8b2b","Type":"ContainerStarted","Data":"68f810afb3a6be27dcb3f76000a3cfe636263e629fef26755689b4fb0096b465"} Jan 20 18:17:40 crc kubenswrapper[4558]: I0120 18:17:40.243713 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="barbican-kuttl-tests/rabbitmq-server-0" Jan 20 18:17:40 crc kubenswrapper[4558]: I0120 18:17:40.265140 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="barbican-kuttl-tests/rabbitmq-server-0" podStartSLOduration=35.779446867 podStartE2EDuration="41.265127799s" podCreationTimestamp="2026-01-20 18:16:59 +0000 UTC" firstStartedPulling="2026-01-20 18:17:01.530855017 +0000 UTC m=+5715.291192985" lastFinishedPulling="2026-01-20 18:17:07.016535949 +0000 UTC m=+5720.776873917" observedRunningTime="2026-01-20 18:17:40.262105256 +0000 UTC m=+5754.022443223" watchObservedRunningTime="2026-01-20 18:17:40.265127799 +0000 UTC m=+5754.025465766" Jan 20 18:17:40 crc kubenswrapper[4558]: I0120 18:17:40.408563 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/efb8278422048e49f60447fe08bd0467bd977c487c5bfd9acd08760cddhnw5w"] Jan 20 18:17:40 crc kubenswrapper[4558]: I0120 18:17:40.410955 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/efb8278422048e49f60447fe08bd0467bd977c487c5bfd9acd08760cddhnw5w" Jan 20 18:17:40 crc kubenswrapper[4558]: I0120 18:17:40.413413 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-7xp5w" Jan 20 18:17:40 crc kubenswrapper[4558]: I0120 18:17:40.418764 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/efb8278422048e49f60447fe08bd0467bd977c487c5bfd9acd08760cddhnw5w"] Jan 20 18:17:40 crc kubenswrapper[4558]: I0120 18:17:40.531233 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/1ed4e8c4-ba06-4fc9-b258-b51a3db829b5-bundle\") pod \"efb8278422048e49f60447fe08bd0467bd977c487c5bfd9acd08760cddhnw5w\" (UID: \"1ed4e8c4-ba06-4fc9-b258-b51a3db829b5\") " pod="openstack-operators/efb8278422048e49f60447fe08bd0467bd977c487c5bfd9acd08760cddhnw5w" Jan 20 18:17:40 crc kubenswrapper[4558]: I0120 18:17:40.531328 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7vj85\" (UniqueName: \"kubernetes.io/projected/1ed4e8c4-ba06-4fc9-b258-b51a3db829b5-kube-api-access-7vj85\") pod \"efb8278422048e49f60447fe08bd0467bd977c487c5bfd9acd08760cddhnw5w\" (UID: \"1ed4e8c4-ba06-4fc9-b258-b51a3db829b5\") " pod="openstack-operators/efb8278422048e49f60447fe08bd0467bd977c487c5bfd9acd08760cddhnw5w" Jan 20 18:17:40 crc kubenswrapper[4558]: I0120 18:17:40.531362 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/1ed4e8c4-ba06-4fc9-b258-b51a3db829b5-util\") pod \"efb8278422048e49f60447fe08bd0467bd977c487c5bfd9acd08760cddhnw5w\" (UID: \"1ed4e8c4-ba06-4fc9-b258-b51a3db829b5\") " pod="openstack-operators/efb8278422048e49f60447fe08bd0467bd977c487c5bfd9acd08760cddhnw5w" Jan 20 18:17:40 crc kubenswrapper[4558]: I0120 18:17:40.632528 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/1ed4e8c4-ba06-4fc9-b258-b51a3db829b5-util\") pod \"efb8278422048e49f60447fe08bd0467bd977c487c5bfd9acd08760cddhnw5w\" (UID: \"1ed4e8c4-ba06-4fc9-b258-b51a3db829b5\") " pod="openstack-operators/efb8278422048e49f60447fe08bd0467bd977c487c5bfd9acd08760cddhnw5w" Jan 20 18:17:40 crc kubenswrapper[4558]: I0120 18:17:40.632606 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/1ed4e8c4-ba06-4fc9-b258-b51a3db829b5-bundle\") pod \"efb8278422048e49f60447fe08bd0467bd977c487c5bfd9acd08760cddhnw5w\" (UID: \"1ed4e8c4-ba06-4fc9-b258-b51a3db829b5\") " pod="openstack-operators/efb8278422048e49f60447fe08bd0467bd977c487c5bfd9acd08760cddhnw5w" Jan 20 18:17:40 crc kubenswrapper[4558]: I0120 18:17:40.632672 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7vj85\" (UniqueName: \"kubernetes.io/projected/1ed4e8c4-ba06-4fc9-b258-b51a3db829b5-kube-api-access-7vj85\") pod \"efb8278422048e49f60447fe08bd0467bd977c487c5bfd9acd08760cddhnw5w\" (UID: \"1ed4e8c4-ba06-4fc9-b258-b51a3db829b5\") " pod="openstack-operators/efb8278422048e49f60447fe08bd0467bd977c487c5bfd9acd08760cddhnw5w" Jan 20 18:17:40 crc kubenswrapper[4558]: I0120 18:17:40.633475 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/1ed4e8c4-ba06-4fc9-b258-b51a3db829b5-util\") pod \"efb8278422048e49f60447fe08bd0467bd977c487c5bfd9acd08760cddhnw5w\" (UID: \"1ed4e8c4-ba06-4fc9-b258-b51a3db829b5\") " pod="openstack-operators/efb8278422048e49f60447fe08bd0467bd977c487c5bfd9acd08760cddhnw5w" Jan 20 18:17:40 crc kubenswrapper[4558]: I0120 18:17:40.633523 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/1ed4e8c4-ba06-4fc9-b258-b51a3db829b5-bundle\") pod \"efb8278422048e49f60447fe08bd0467bd977c487c5bfd9acd08760cddhnw5w\" (UID: \"1ed4e8c4-ba06-4fc9-b258-b51a3db829b5\") " pod="openstack-operators/efb8278422048e49f60447fe08bd0467bd977c487c5bfd9acd08760cddhnw5w" Jan 20 18:17:40 crc kubenswrapper[4558]: I0120 18:17:40.648834 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7vj85\" (UniqueName: \"kubernetes.io/projected/1ed4e8c4-ba06-4fc9-b258-b51a3db829b5-kube-api-access-7vj85\") pod \"efb8278422048e49f60447fe08bd0467bd977c487c5bfd9acd08760cddhnw5w\" (UID: \"1ed4e8c4-ba06-4fc9-b258-b51a3db829b5\") " pod="openstack-operators/efb8278422048e49f60447fe08bd0467bd977c487c5bfd9acd08760cddhnw5w" Jan 20 18:17:40 crc kubenswrapper[4558]: I0120 18:17:40.733688 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/efb8278422048e49f60447fe08bd0467bd977c487c5bfd9acd08760cddhnw5w" Jan 20 18:17:41 crc kubenswrapper[4558]: I0120 18:17:41.140553 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/efb8278422048e49f60447fe08bd0467bd977c487c5bfd9acd08760cddhnw5w"] Jan 20 18:17:41 crc kubenswrapper[4558]: I0120 18:17:41.251961 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/efb8278422048e49f60447fe08bd0467bd977c487c5bfd9acd08760cddhnw5w" event={"ID":"1ed4e8c4-ba06-4fc9-b258-b51a3db829b5","Type":"ContainerStarted","Data":"6a1d5e41fd9aad79c80f35c28dd52566458de3c4914f46008b592fbcbd1c29e9"} Jan 20 18:17:42 crc kubenswrapper[4558]: I0120 18:17:42.263132 4558 generic.go:334] "Generic (PLEG): container finished" podID="1ed4e8c4-ba06-4fc9-b258-b51a3db829b5" containerID="9c4d3b29cae3d7861d6b056fbb277f7758301ec5f2b733a3980c07a861b22b92" exitCode=0 Jan 20 18:17:42 crc kubenswrapper[4558]: I0120 18:17:42.263227 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/efb8278422048e49f60447fe08bd0467bd977c487c5bfd9acd08760cddhnw5w" event={"ID":"1ed4e8c4-ba06-4fc9-b258-b51a3db829b5","Type":"ContainerDied","Data":"9c4d3b29cae3d7861d6b056fbb277f7758301ec5f2b733a3980c07a861b22b92"} Jan 20 18:17:44 crc kubenswrapper[4558]: I0120 18:17:44.283739 4558 generic.go:334] "Generic (PLEG): container finished" podID="1ed4e8c4-ba06-4fc9-b258-b51a3db829b5" containerID="86c707878b5dff38defced84a4c52fcd28333c47f7429e3f87b4f498f28dbff2" exitCode=0 Jan 20 18:17:44 crc kubenswrapper[4558]: I0120 18:17:44.283808 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/efb8278422048e49f60447fe08bd0467bd977c487c5bfd9acd08760cddhnw5w" event={"ID":"1ed4e8c4-ba06-4fc9-b258-b51a3db829b5","Type":"ContainerDied","Data":"86c707878b5dff38defced84a4c52fcd28333c47f7429e3f87b4f498f28dbff2"} Jan 20 18:17:45 crc kubenswrapper[4558]: I0120 18:17:45.294113 4558 generic.go:334] "Generic (PLEG): container finished" podID="1ed4e8c4-ba06-4fc9-b258-b51a3db829b5" containerID="721d66ec74e9f91df5b1b7645acd3d7d60752a9f1d484831f0187d9bac7dcb84" exitCode=0 Jan 20 18:17:45 crc kubenswrapper[4558]: I0120 18:17:45.294209 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/efb8278422048e49f60447fe08bd0467bd977c487c5bfd9acd08760cddhnw5w" event={"ID":"1ed4e8c4-ba06-4fc9-b258-b51a3db829b5","Type":"ContainerDied","Data":"721d66ec74e9f91df5b1b7645acd3d7d60752a9f1d484831f0187d9bac7dcb84"} Jan 20 18:17:46 crc kubenswrapper[4558]: I0120 18:17:46.678548 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/efb8278422048e49f60447fe08bd0467bd977c487c5bfd9acd08760cddhnw5w" Jan 20 18:17:46 crc kubenswrapper[4558]: I0120 18:17:46.837733 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7vj85\" (UniqueName: \"kubernetes.io/projected/1ed4e8c4-ba06-4fc9-b258-b51a3db829b5-kube-api-access-7vj85\") pod \"1ed4e8c4-ba06-4fc9-b258-b51a3db829b5\" (UID: \"1ed4e8c4-ba06-4fc9-b258-b51a3db829b5\") " Jan 20 18:17:46 crc kubenswrapper[4558]: I0120 18:17:46.837825 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/1ed4e8c4-ba06-4fc9-b258-b51a3db829b5-util\") pod \"1ed4e8c4-ba06-4fc9-b258-b51a3db829b5\" (UID: \"1ed4e8c4-ba06-4fc9-b258-b51a3db829b5\") " Jan 20 18:17:46 crc kubenswrapper[4558]: I0120 18:17:46.837968 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/1ed4e8c4-ba06-4fc9-b258-b51a3db829b5-bundle\") pod \"1ed4e8c4-ba06-4fc9-b258-b51a3db829b5\" (UID: \"1ed4e8c4-ba06-4fc9-b258-b51a3db829b5\") " Jan 20 18:17:46 crc kubenswrapper[4558]: I0120 18:17:46.838768 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1ed4e8c4-ba06-4fc9-b258-b51a3db829b5-bundle" (OuterVolumeSpecName: "bundle") pod "1ed4e8c4-ba06-4fc9-b258-b51a3db829b5" (UID: "1ed4e8c4-ba06-4fc9-b258-b51a3db829b5"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 18:17:46 crc kubenswrapper[4558]: I0120 18:17:46.844008 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1ed4e8c4-ba06-4fc9-b258-b51a3db829b5-kube-api-access-7vj85" (OuterVolumeSpecName: "kube-api-access-7vj85") pod "1ed4e8c4-ba06-4fc9-b258-b51a3db829b5" (UID: "1ed4e8c4-ba06-4fc9-b258-b51a3db829b5"). InnerVolumeSpecName "kube-api-access-7vj85". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:17:46 crc kubenswrapper[4558]: I0120 18:17:46.847236 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1ed4e8c4-ba06-4fc9-b258-b51a3db829b5-util" (OuterVolumeSpecName: "util") pod "1ed4e8c4-ba06-4fc9-b258-b51a3db829b5" (UID: "1ed4e8c4-ba06-4fc9-b258-b51a3db829b5"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 18:17:46 crc kubenswrapper[4558]: I0120 18:17:46.939714 4558 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/1ed4e8c4-ba06-4fc9-b258-b51a3db829b5-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 18:17:46 crc kubenswrapper[4558]: I0120 18:17:46.939745 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7vj85\" (UniqueName: \"kubernetes.io/projected/1ed4e8c4-ba06-4fc9-b258-b51a3db829b5-kube-api-access-7vj85\") on node \"crc\" DevicePath \"\"" Jan 20 18:17:46 crc kubenswrapper[4558]: I0120 18:17:46.939757 4558 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/1ed4e8c4-ba06-4fc9-b258-b51a3db829b5-util\") on node \"crc\" DevicePath \"\"" Jan 20 18:17:47 crc kubenswrapper[4558]: I0120 18:17:47.314305 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/efb8278422048e49f60447fe08bd0467bd977c487c5bfd9acd08760cddhnw5w" event={"ID":"1ed4e8c4-ba06-4fc9-b258-b51a3db829b5","Type":"ContainerDied","Data":"6a1d5e41fd9aad79c80f35c28dd52566458de3c4914f46008b592fbcbd1c29e9"} Jan 20 18:17:47 crc kubenswrapper[4558]: I0120 18:17:47.314358 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6a1d5e41fd9aad79c80f35c28dd52566458de3c4914f46008b592fbcbd1c29e9" Jan 20 18:17:47 crc kubenswrapper[4558]: I0120 18:17:47.314379 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/efb8278422048e49f60447fe08bd0467bd977c487c5bfd9acd08760cddhnw5w" Jan 20 18:17:47 crc kubenswrapper[4558]: I0120 18:17:47.565458 4558 scope.go:117] "RemoveContainer" containerID="27314e1a8af2e8df8ecd2e7a06cbb630afbfecd8ca2338a1f199cf92f982f581" Jan 20 18:17:47 crc kubenswrapper[4558]: E0120 18:17:47.565928 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 18:17:48 crc kubenswrapper[4558]: I0120 18:17:48.590658 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["barbican-kuttl-tests/keystone-db-create-k8vsm"] Jan 20 18:17:48 crc kubenswrapper[4558]: E0120 18:17:48.591495 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1ed4e8c4-ba06-4fc9-b258-b51a3db829b5" containerName="extract" Jan 20 18:17:48 crc kubenswrapper[4558]: I0120 18:17:48.591510 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="1ed4e8c4-ba06-4fc9-b258-b51a3db829b5" containerName="extract" Jan 20 18:17:48 crc kubenswrapper[4558]: E0120 18:17:48.591551 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1ed4e8c4-ba06-4fc9-b258-b51a3db829b5" containerName="util" Jan 20 18:17:48 crc kubenswrapper[4558]: I0120 18:17:48.591559 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="1ed4e8c4-ba06-4fc9-b258-b51a3db829b5" containerName="util" Jan 20 18:17:48 crc kubenswrapper[4558]: E0120 18:17:48.591579 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1ed4e8c4-ba06-4fc9-b258-b51a3db829b5" containerName="pull" Jan 20 18:17:48 crc kubenswrapper[4558]: I0120 18:17:48.591585 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="1ed4e8c4-ba06-4fc9-b258-b51a3db829b5" containerName="pull" Jan 20 18:17:48 crc kubenswrapper[4558]: I0120 18:17:48.591812 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="1ed4e8c4-ba06-4fc9-b258-b51a3db829b5" containerName="extract" Jan 20 18:17:48 crc kubenswrapper[4558]: I0120 18:17:48.592555 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="barbican-kuttl-tests/keystone-db-create-k8vsm" Jan 20 18:17:48 crc kubenswrapper[4558]: I0120 18:17:48.594804 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["barbican-kuttl-tests/keystone-52d6-account-create-update-6n2pb"] Jan 20 18:17:48 crc kubenswrapper[4558]: I0120 18:17:48.595803 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="barbican-kuttl-tests/keystone-52d6-account-create-update-6n2pb" Jan 20 18:17:48 crc kubenswrapper[4558]: I0120 18:17:48.597339 4558 reflector.go:368] Caches populated for *v1.Secret from object-"barbican-kuttl-tests"/"keystone-db-secret" Jan 20 18:17:48 crc kubenswrapper[4558]: I0120 18:17:48.598683 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["barbican-kuttl-tests/keystone-db-create-k8vsm"] Jan 20 18:17:48 crc kubenswrapper[4558]: I0120 18:17:48.610186 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["barbican-kuttl-tests/keystone-52d6-account-create-update-6n2pb"] Jan 20 18:17:48 crc kubenswrapper[4558]: I0120 18:17:48.769734 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6c4ef399-fd08-45f3-af23-5e9faba4b549-operator-scripts\") pod \"keystone-db-create-k8vsm\" (UID: \"6c4ef399-fd08-45f3-af23-5e9faba4b549\") " pod="barbican-kuttl-tests/keystone-db-create-k8vsm" Jan 20 18:17:48 crc kubenswrapper[4558]: I0120 18:17:48.770118 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2rtx9\" (UniqueName: \"kubernetes.io/projected/6c4ef399-fd08-45f3-af23-5e9faba4b549-kube-api-access-2rtx9\") pod \"keystone-db-create-k8vsm\" (UID: \"6c4ef399-fd08-45f3-af23-5e9faba4b549\") " pod="barbican-kuttl-tests/keystone-db-create-k8vsm" Jan 20 18:17:48 crc kubenswrapper[4558]: I0120 18:17:48.770273 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2psnk\" (UniqueName: \"kubernetes.io/projected/06bc8238-a6fe-464c-a4ac-a62928c141c8-kube-api-access-2psnk\") pod \"keystone-52d6-account-create-update-6n2pb\" (UID: \"06bc8238-a6fe-464c-a4ac-a62928c141c8\") " pod="barbican-kuttl-tests/keystone-52d6-account-create-update-6n2pb" Jan 20 18:17:48 crc kubenswrapper[4558]: I0120 18:17:48.770421 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/06bc8238-a6fe-464c-a4ac-a62928c141c8-operator-scripts\") pod \"keystone-52d6-account-create-update-6n2pb\" (UID: \"06bc8238-a6fe-464c-a4ac-a62928c141c8\") " pod="barbican-kuttl-tests/keystone-52d6-account-create-update-6n2pb" Jan 20 18:17:48 crc kubenswrapper[4558]: I0120 18:17:48.871864 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2psnk\" (UniqueName: \"kubernetes.io/projected/06bc8238-a6fe-464c-a4ac-a62928c141c8-kube-api-access-2psnk\") pod \"keystone-52d6-account-create-update-6n2pb\" (UID: \"06bc8238-a6fe-464c-a4ac-a62928c141c8\") " pod="barbican-kuttl-tests/keystone-52d6-account-create-update-6n2pb" Jan 20 18:17:48 crc kubenswrapper[4558]: I0120 18:17:48.871960 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/06bc8238-a6fe-464c-a4ac-a62928c141c8-operator-scripts\") pod \"keystone-52d6-account-create-update-6n2pb\" (UID: \"06bc8238-a6fe-464c-a4ac-a62928c141c8\") " pod="barbican-kuttl-tests/keystone-52d6-account-create-update-6n2pb" Jan 20 18:17:48 crc kubenswrapper[4558]: I0120 18:17:48.872026 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6c4ef399-fd08-45f3-af23-5e9faba4b549-operator-scripts\") pod \"keystone-db-create-k8vsm\" (UID: \"6c4ef399-fd08-45f3-af23-5e9faba4b549\") " pod="barbican-kuttl-tests/keystone-db-create-k8vsm" Jan 20 18:17:48 crc kubenswrapper[4558]: I0120 18:17:48.872059 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2rtx9\" (UniqueName: \"kubernetes.io/projected/6c4ef399-fd08-45f3-af23-5e9faba4b549-kube-api-access-2rtx9\") pod \"keystone-db-create-k8vsm\" (UID: \"6c4ef399-fd08-45f3-af23-5e9faba4b549\") " pod="barbican-kuttl-tests/keystone-db-create-k8vsm" Jan 20 18:17:48 crc kubenswrapper[4558]: I0120 18:17:48.873216 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/06bc8238-a6fe-464c-a4ac-a62928c141c8-operator-scripts\") pod \"keystone-52d6-account-create-update-6n2pb\" (UID: \"06bc8238-a6fe-464c-a4ac-a62928c141c8\") " pod="barbican-kuttl-tests/keystone-52d6-account-create-update-6n2pb" Jan 20 18:17:48 crc kubenswrapper[4558]: I0120 18:17:48.873226 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6c4ef399-fd08-45f3-af23-5e9faba4b549-operator-scripts\") pod \"keystone-db-create-k8vsm\" (UID: \"6c4ef399-fd08-45f3-af23-5e9faba4b549\") " pod="barbican-kuttl-tests/keystone-db-create-k8vsm" Jan 20 18:17:48 crc kubenswrapper[4558]: I0120 18:17:48.889078 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2rtx9\" (UniqueName: \"kubernetes.io/projected/6c4ef399-fd08-45f3-af23-5e9faba4b549-kube-api-access-2rtx9\") pod \"keystone-db-create-k8vsm\" (UID: \"6c4ef399-fd08-45f3-af23-5e9faba4b549\") " pod="barbican-kuttl-tests/keystone-db-create-k8vsm" Jan 20 18:17:48 crc kubenswrapper[4558]: I0120 18:17:48.889549 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2psnk\" (UniqueName: \"kubernetes.io/projected/06bc8238-a6fe-464c-a4ac-a62928c141c8-kube-api-access-2psnk\") pod \"keystone-52d6-account-create-update-6n2pb\" (UID: \"06bc8238-a6fe-464c-a4ac-a62928c141c8\") " pod="barbican-kuttl-tests/keystone-52d6-account-create-update-6n2pb" Jan 20 18:17:48 crc kubenswrapper[4558]: I0120 18:17:48.909527 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="barbican-kuttl-tests/keystone-db-create-k8vsm" Jan 20 18:17:48 crc kubenswrapper[4558]: I0120 18:17:48.916561 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="barbican-kuttl-tests/keystone-52d6-account-create-update-6n2pb" Jan 20 18:17:49 crc kubenswrapper[4558]: I0120 18:17:49.338477 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["barbican-kuttl-tests/keystone-db-create-k8vsm"] Jan 20 18:17:49 crc kubenswrapper[4558]: W0120 18:17:49.344551 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6c4ef399_fd08_45f3_af23_5e9faba4b549.slice/crio-197b11b370b2eb16c5358c4a1cae12010ebfe9a2e22d19788da8192107fb0ca2 WatchSource:0}: Error finding container 197b11b370b2eb16c5358c4a1cae12010ebfe9a2e22d19788da8192107fb0ca2: Status 404 returned error can't find the container with id 197b11b370b2eb16c5358c4a1cae12010ebfe9a2e22d19788da8192107fb0ca2 Jan 20 18:17:49 crc kubenswrapper[4558]: W0120 18:17:49.398688 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod06bc8238_a6fe_464c_a4ac_a62928c141c8.slice/crio-589f2c3e9d98ffe023f0283834231bcccf6a37963156d88a859d6f70ec133234 WatchSource:0}: Error finding container 589f2c3e9d98ffe023f0283834231bcccf6a37963156d88a859d6f70ec133234: Status 404 returned error can't find the container with id 589f2c3e9d98ffe023f0283834231bcccf6a37963156d88a859d6f70ec133234 Jan 20 18:17:49 crc kubenswrapper[4558]: I0120 18:17:49.400095 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["barbican-kuttl-tests/keystone-52d6-account-create-update-6n2pb"] Jan 20 18:17:50 crc kubenswrapper[4558]: I0120 18:17:50.337114 4558 generic.go:334] "Generic (PLEG): container finished" podID="06bc8238-a6fe-464c-a4ac-a62928c141c8" containerID="8ad08df2841919c9374ed6c5a408a76eddcfe44ffe1422ebdd27adddca8c14d0" exitCode=0 Jan 20 18:17:50 crc kubenswrapper[4558]: I0120 18:17:50.337220 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/keystone-52d6-account-create-update-6n2pb" event={"ID":"06bc8238-a6fe-464c-a4ac-a62928c141c8","Type":"ContainerDied","Data":"8ad08df2841919c9374ed6c5a408a76eddcfe44ffe1422ebdd27adddca8c14d0"} Jan 20 18:17:50 crc kubenswrapper[4558]: I0120 18:17:50.337504 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/keystone-52d6-account-create-update-6n2pb" event={"ID":"06bc8238-a6fe-464c-a4ac-a62928c141c8","Type":"ContainerStarted","Data":"589f2c3e9d98ffe023f0283834231bcccf6a37963156d88a859d6f70ec133234"} Jan 20 18:17:50 crc kubenswrapper[4558]: I0120 18:17:50.339095 4558 generic.go:334] "Generic (PLEG): container finished" podID="6c4ef399-fd08-45f3-af23-5e9faba4b549" containerID="c72ca9b1161063f8a418b411934e48c62908e9c3cc9232ef76b56ef1ef6fa60f" exitCode=0 Jan 20 18:17:50 crc kubenswrapper[4558]: I0120 18:17:50.339125 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/keystone-db-create-k8vsm" event={"ID":"6c4ef399-fd08-45f3-af23-5e9faba4b549","Type":"ContainerDied","Data":"c72ca9b1161063f8a418b411934e48c62908e9c3cc9232ef76b56ef1ef6fa60f"} Jan 20 18:17:50 crc kubenswrapper[4558]: I0120 18:17:50.339183 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/keystone-db-create-k8vsm" event={"ID":"6c4ef399-fd08-45f3-af23-5e9faba4b549","Type":"ContainerStarted","Data":"197b11b370b2eb16c5358c4a1cae12010ebfe9a2e22d19788da8192107fb0ca2"} Jan 20 18:17:51 crc kubenswrapper[4558]: I0120 18:17:51.130357 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="barbican-kuttl-tests/rabbitmq-server-0" Jan 20 18:17:51 crc kubenswrapper[4558]: I0120 18:17:51.646184 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="barbican-kuttl-tests/keystone-52d6-account-create-update-6n2pb" Jan 20 18:17:51 crc kubenswrapper[4558]: I0120 18:17:51.705976 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="barbican-kuttl-tests/keystone-db-create-k8vsm" Jan 20 18:17:51 crc kubenswrapper[4558]: I0120 18:17:51.817641 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2psnk\" (UniqueName: \"kubernetes.io/projected/06bc8238-a6fe-464c-a4ac-a62928c141c8-kube-api-access-2psnk\") pod \"06bc8238-a6fe-464c-a4ac-a62928c141c8\" (UID: \"06bc8238-a6fe-464c-a4ac-a62928c141c8\") " Jan 20 18:17:51 crc kubenswrapper[4558]: I0120 18:17:51.817807 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2rtx9\" (UniqueName: \"kubernetes.io/projected/6c4ef399-fd08-45f3-af23-5e9faba4b549-kube-api-access-2rtx9\") pod \"6c4ef399-fd08-45f3-af23-5e9faba4b549\" (UID: \"6c4ef399-fd08-45f3-af23-5e9faba4b549\") " Jan 20 18:17:51 crc kubenswrapper[4558]: I0120 18:17:51.817914 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6c4ef399-fd08-45f3-af23-5e9faba4b549-operator-scripts\") pod \"6c4ef399-fd08-45f3-af23-5e9faba4b549\" (UID: \"6c4ef399-fd08-45f3-af23-5e9faba4b549\") " Jan 20 18:17:51 crc kubenswrapper[4558]: I0120 18:17:51.817963 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/06bc8238-a6fe-464c-a4ac-a62928c141c8-operator-scripts\") pod \"06bc8238-a6fe-464c-a4ac-a62928c141c8\" (UID: \"06bc8238-a6fe-464c-a4ac-a62928c141c8\") " Jan 20 18:17:51 crc kubenswrapper[4558]: I0120 18:17:51.818407 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6c4ef399-fd08-45f3-af23-5e9faba4b549-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "6c4ef399-fd08-45f3-af23-5e9faba4b549" (UID: "6c4ef399-fd08-45f3-af23-5e9faba4b549"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:17:51 crc kubenswrapper[4558]: I0120 18:17:51.818443 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/06bc8238-a6fe-464c-a4ac-a62928c141c8-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "06bc8238-a6fe-464c-a4ac-a62928c141c8" (UID: "06bc8238-a6fe-464c-a4ac-a62928c141c8"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:17:51 crc kubenswrapper[4558]: I0120 18:17:51.824435 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/06bc8238-a6fe-464c-a4ac-a62928c141c8-kube-api-access-2psnk" (OuterVolumeSpecName: "kube-api-access-2psnk") pod "06bc8238-a6fe-464c-a4ac-a62928c141c8" (UID: "06bc8238-a6fe-464c-a4ac-a62928c141c8"). InnerVolumeSpecName "kube-api-access-2psnk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:17:51 crc kubenswrapper[4558]: I0120 18:17:51.825056 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6c4ef399-fd08-45f3-af23-5e9faba4b549-kube-api-access-2rtx9" (OuterVolumeSpecName: "kube-api-access-2rtx9") pod "6c4ef399-fd08-45f3-af23-5e9faba4b549" (UID: "6c4ef399-fd08-45f3-af23-5e9faba4b549"). InnerVolumeSpecName "kube-api-access-2rtx9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:17:51 crc kubenswrapper[4558]: I0120 18:17:51.920064 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6c4ef399-fd08-45f3-af23-5e9faba4b549-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 18:17:51 crc kubenswrapper[4558]: I0120 18:17:51.920099 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/06bc8238-a6fe-464c-a4ac-a62928c141c8-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 18:17:51 crc kubenswrapper[4558]: I0120 18:17:51.920115 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2psnk\" (UniqueName: \"kubernetes.io/projected/06bc8238-a6fe-464c-a4ac-a62928c141c8-kube-api-access-2psnk\") on node \"crc\" DevicePath \"\"" Jan 20 18:17:51 crc kubenswrapper[4558]: I0120 18:17:51.920127 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2rtx9\" (UniqueName: \"kubernetes.io/projected/6c4ef399-fd08-45f3-af23-5e9faba4b549-kube-api-access-2rtx9\") on node \"crc\" DevicePath \"\"" Jan 20 18:17:52 crc kubenswrapper[4558]: I0120 18:17:52.355571 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/keystone-52d6-account-create-update-6n2pb" event={"ID":"06bc8238-a6fe-464c-a4ac-a62928c141c8","Type":"ContainerDied","Data":"589f2c3e9d98ffe023f0283834231bcccf6a37963156d88a859d6f70ec133234"} Jan 20 18:17:52 crc kubenswrapper[4558]: I0120 18:17:52.355614 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="barbican-kuttl-tests/keystone-52d6-account-create-update-6n2pb" Jan 20 18:17:52 crc kubenswrapper[4558]: I0120 18:17:52.355625 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="589f2c3e9d98ffe023f0283834231bcccf6a37963156d88a859d6f70ec133234" Jan 20 18:17:52 crc kubenswrapper[4558]: I0120 18:17:52.357719 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/keystone-db-create-k8vsm" event={"ID":"6c4ef399-fd08-45f3-af23-5e9faba4b549","Type":"ContainerDied","Data":"197b11b370b2eb16c5358c4a1cae12010ebfe9a2e22d19788da8192107fb0ca2"} Jan 20 18:17:52 crc kubenswrapper[4558]: I0120 18:17:52.357809 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="197b11b370b2eb16c5358c4a1cae12010ebfe9a2e22d19788da8192107fb0ca2" Jan 20 18:17:52 crc kubenswrapper[4558]: I0120 18:17:52.357818 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="barbican-kuttl-tests/keystone-db-create-k8vsm" Jan 20 18:17:54 crc kubenswrapper[4558]: I0120 18:17:54.166356 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["barbican-kuttl-tests/keystone-db-sync-d4kq6"] Jan 20 18:17:54 crc kubenswrapper[4558]: E0120 18:17:54.167095 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c4ef399-fd08-45f3-af23-5e9faba4b549" containerName="mariadb-database-create" Jan 20 18:17:54 crc kubenswrapper[4558]: I0120 18:17:54.167109 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c4ef399-fd08-45f3-af23-5e9faba4b549" containerName="mariadb-database-create" Jan 20 18:17:54 crc kubenswrapper[4558]: E0120 18:17:54.167124 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="06bc8238-a6fe-464c-a4ac-a62928c141c8" containerName="mariadb-account-create-update" Jan 20 18:17:54 crc kubenswrapper[4558]: I0120 18:17:54.167130 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="06bc8238-a6fe-464c-a4ac-a62928c141c8" containerName="mariadb-account-create-update" Jan 20 18:17:54 crc kubenswrapper[4558]: I0120 18:17:54.167307 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="6c4ef399-fd08-45f3-af23-5e9faba4b549" containerName="mariadb-database-create" Jan 20 18:17:54 crc kubenswrapper[4558]: I0120 18:17:54.167325 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="06bc8238-a6fe-464c-a4ac-a62928c141c8" containerName="mariadb-account-create-update" Jan 20 18:17:54 crc kubenswrapper[4558]: I0120 18:17:54.167863 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="barbican-kuttl-tests/keystone-db-sync-d4kq6" Jan 20 18:17:54 crc kubenswrapper[4558]: I0120 18:17:54.170116 4558 reflector.go:368] Caches populated for *v1.Secret from object-"barbican-kuttl-tests"/"keystone" Jan 20 18:17:54 crc kubenswrapper[4558]: I0120 18:17:54.170546 4558 reflector.go:368] Caches populated for *v1.Secret from object-"barbican-kuttl-tests"/"keystone-keystone-dockercfg-brg9r" Jan 20 18:17:54 crc kubenswrapper[4558]: I0120 18:17:54.170680 4558 reflector.go:368] Caches populated for *v1.Secret from object-"barbican-kuttl-tests"/"keystone-scripts" Jan 20 18:17:54 crc kubenswrapper[4558]: I0120 18:17:54.170916 4558 reflector.go:368] Caches populated for *v1.Secret from object-"barbican-kuttl-tests"/"keystone-config-data" Jan 20 18:17:54 crc kubenswrapper[4558]: I0120 18:17:54.178256 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["barbican-kuttl-tests/keystone-db-sync-d4kq6"] Jan 20 18:17:54 crc kubenswrapper[4558]: I0120 18:17:54.360981 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2637e631-04ad-4da0-82ce-cbbbaf50ad0b-config-data\") pod \"keystone-db-sync-d4kq6\" (UID: \"2637e631-04ad-4da0-82ce-cbbbaf50ad0b\") " pod="barbican-kuttl-tests/keystone-db-sync-d4kq6" Jan 20 18:17:54 crc kubenswrapper[4558]: I0120 18:17:54.361053 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xjpzk\" (UniqueName: \"kubernetes.io/projected/2637e631-04ad-4da0-82ce-cbbbaf50ad0b-kube-api-access-xjpzk\") pod \"keystone-db-sync-d4kq6\" (UID: \"2637e631-04ad-4da0-82ce-cbbbaf50ad0b\") " pod="barbican-kuttl-tests/keystone-db-sync-d4kq6" Jan 20 18:17:54 crc kubenswrapper[4558]: I0120 18:17:54.462989 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2637e631-04ad-4da0-82ce-cbbbaf50ad0b-config-data\") pod \"keystone-db-sync-d4kq6\" (UID: \"2637e631-04ad-4da0-82ce-cbbbaf50ad0b\") " pod="barbican-kuttl-tests/keystone-db-sync-d4kq6" Jan 20 18:17:54 crc kubenswrapper[4558]: I0120 18:17:54.463075 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xjpzk\" (UniqueName: \"kubernetes.io/projected/2637e631-04ad-4da0-82ce-cbbbaf50ad0b-kube-api-access-xjpzk\") pod \"keystone-db-sync-d4kq6\" (UID: \"2637e631-04ad-4da0-82ce-cbbbaf50ad0b\") " pod="barbican-kuttl-tests/keystone-db-sync-d4kq6" Jan 20 18:17:54 crc kubenswrapper[4558]: I0120 18:17:54.469363 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2637e631-04ad-4da0-82ce-cbbbaf50ad0b-config-data\") pod \"keystone-db-sync-d4kq6\" (UID: \"2637e631-04ad-4da0-82ce-cbbbaf50ad0b\") " pod="barbican-kuttl-tests/keystone-db-sync-d4kq6" Jan 20 18:17:54 crc kubenswrapper[4558]: I0120 18:17:54.478024 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xjpzk\" (UniqueName: \"kubernetes.io/projected/2637e631-04ad-4da0-82ce-cbbbaf50ad0b-kube-api-access-xjpzk\") pod \"keystone-db-sync-d4kq6\" (UID: \"2637e631-04ad-4da0-82ce-cbbbaf50ad0b\") " pod="barbican-kuttl-tests/keystone-db-sync-d4kq6" Jan 20 18:17:54 crc kubenswrapper[4558]: I0120 18:17:54.487097 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="barbican-kuttl-tests/keystone-db-sync-d4kq6" Jan 20 18:17:54 crc kubenswrapper[4558]: I0120 18:17:54.892462 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["barbican-kuttl-tests/keystone-db-sync-d4kq6"] Jan 20 18:17:55 crc kubenswrapper[4558]: I0120 18:17:55.382470 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/keystone-db-sync-d4kq6" event={"ID":"2637e631-04ad-4da0-82ce-cbbbaf50ad0b","Type":"ContainerStarted","Data":"567c1166d1795bcb0f363877401ee8881e1b4eb50601ebfc1d35d5fdbff364f4"} Jan 20 18:17:56 crc kubenswrapper[4558]: I0120 18:17:56.392112 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/keystone-db-sync-d4kq6" event={"ID":"2637e631-04ad-4da0-82ce-cbbbaf50ad0b","Type":"ContainerStarted","Data":"8fd31eed549f531993d83f4067d71c52895d4572a31caa0756de4e05c0bc8b3a"} Jan 20 18:17:56 crc kubenswrapper[4558]: I0120 18:17:56.417241 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="barbican-kuttl-tests/keystone-db-sync-d4kq6" podStartSLOduration=1.7342975379999999 podStartE2EDuration="2.417217919s" podCreationTimestamp="2026-01-20 18:17:54 +0000 UTC" firstStartedPulling="2026-01-20 18:17:54.900989473 +0000 UTC m=+5768.661327440" lastFinishedPulling="2026-01-20 18:17:55.583909854 +0000 UTC m=+5769.344247821" observedRunningTime="2026-01-20 18:17:56.413849386 +0000 UTC m=+5770.174187353" watchObservedRunningTime="2026-01-20 18:17:56.417217919 +0000 UTC m=+5770.177555876" Jan 20 18:17:58 crc kubenswrapper[4558]: I0120 18:17:58.409218 4558 generic.go:334] "Generic (PLEG): container finished" podID="2637e631-04ad-4da0-82ce-cbbbaf50ad0b" containerID="8fd31eed549f531993d83f4067d71c52895d4572a31caa0756de4e05c0bc8b3a" exitCode=0 Jan 20 18:17:58 crc kubenswrapper[4558]: I0120 18:17:58.409302 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/keystone-db-sync-d4kq6" event={"ID":"2637e631-04ad-4da0-82ce-cbbbaf50ad0b","Type":"ContainerDied","Data":"8fd31eed549f531993d83f4067d71c52895d4572a31caa0756de4e05c0bc8b3a"} Jan 20 18:17:58 crc kubenswrapper[4558]: I0120 18:17:58.572675 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/barbican-operator-controller-manager-5db5bc648b-tc7cd"] Jan 20 18:17:58 crc kubenswrapper[4558]: I0120 18:17:58.573501 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-5db5bc648b-tc7cd" Jan 20 18:17:58 crc kubenswrapper[4558]: I0120 18:17:58.575457 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"barbican-operator-controller-manager-service-cert" Jan 20 18:17:58 crc kubenswrapper[4558]: I0120 18:17:58.580419 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"barbican-operator-controller-manager-dockercfg-xz4qp" Jan 20 18:17:58 crc kubenswrapper[4558]: I0120 18:17:58.589877 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-5db5bc648b-tc7cd"] Jan 20 18:17:58 crc kubenswrapper[4558]: I0120 18:17:58.637413 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/eaed2fe0-4c9e-4d02-ba03-b6f55fffad21-webhook-cert\") pod \"barbican-operator-controller-manager-5db5bc648b-tc7cd\" (UID: \"eaed2fe0-4c9e-4d02-ba03-b6f55fffad21\") " pod="openstack-operators/barbican-operator-controller-manager-5db5bc648b-tc7cd" Jan 20 18:17:58 crc kubenswrapper[4558]: I0120 18:17:58.637591 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cj9qr\" (UniqueName: \"kubernetes.io/projected/eaed2fe0-4c9e-4d02-ba03-b6f55fffad21-kube-api-access-cj9qr\") pod \"barbican-operator-controller-manager-5db5bc648b-tc7cd\" (UID: \"eaed2fe0-4c9e-4d02-ba03-b6f55fffad21\") " pod="openstack-operators/barbican-operator-controller-manager-5db5bc648b-tc7cd" Jan 20 18:17:58 crc kubenswrapper[4558]: I0120 18:17:58.637629 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/eaed2fe0-4c9e-4d02-ba03-b6f55fffad21-apiservice-cert\") pod \"barbican-operator-controller-manager-5db5bc648b-tc7cd\" (UID: \"eaed2fe0-4c9e-4d02-ba03-b6f55fffad21\") " pod="openstack-operators/barbican-operator-controller-manager-5db5bc648b-tc7cd" Jan 20 18:17:58 crc kubenswrapper[4558]: I0120 18:17:58.739369 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cj9qr\" (UniqueName: \"kubernetes.io/projected/eaed2fe0-4c9e-4d02-ba03-b6f55fffad21-kube-api-access-cj9qr\") pod \"barbican-operator-controller-manager-5db5bc648b-tc7cd\" (UID: \"eaed2fe0-4c9e-4d02-ba03-b6f55fffad21\") " pod="openstack-operators/barbican-operator-controller-manager-5db5bc648b-tc7cd" Jan 20 18:17:58 crc kubenswrapper[4558]: I0120 18:17:58.739427 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/eaed2fe0-4c9e-4d02-ba03-b6f55fffad21-apiservice-cert\") pod \"barbican-operator-controller-manager-5db5bc648b-tc7cd\" (UID: \"eaed2fe0-4c9e-4d02-ba03-b6f55fffad21\") " pod="openstack-operators/barbican-operator-controller-manager-5db5bc648b-tc7cd" Jan 20 18:17:58 crc kubenswrapper[4558]: I0120 18:17:58.739545 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/eaed2fe0-4c9e-4d02-ba03-b6f55fffad21-webhook-cert\") pod \"barbican-operator-controller-manager-5db5bc648b-tc7cd\" (UID: \"eaed2fe0-4c9e-4d02-ba03-b6f55fffad21\") " pod="openstack-operators/barbican-operator-controller-manager-5db5bc648b-tc7cd" Jan 20 18:17:58 crc kubenswrapper[4558]: I0120 18:17:58.745885 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/eaed2fe0-4c9e-4d02-ba03-b6f55fffad21-apiservice-cert\") pod \"barbican-operator-controller-manager-5db5bc648b-tc7cd\" (UID: \"eaed2fe0-4c9e-4d02-ba03-b6f55fffad21\") " pod="openstack-operators/barbican-operator-controller-manager-5db5bc648b-tc7cd" Jan 20 18:17:58 crc kubenswrapper[4558]: I0120 18:17:58.748774 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/eaed2fe0-4c9e-4d02-ba03-b6f55fffad21-webhook-cert\") pod \"barbican-operator-controller-manager-5db5bc648b-tc7cd\" (UID: \"eaed2fe0-4c9e-4d02-ba03-b6f55fffad21\") " pod="openstack-operators/barbican-operator-controller-manager-5db5bc648b-tc7cd" Jan 20 18:17:58 crc kubenswrapper[4558]: I0120 18:17:58.760453 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cj9qr\" (UniqueName: \"kubernetes.io/projected/eaed2fe0-4c9e-4d02-ba03-b6f55fffad21-kube-api-access-cj9qr\") pod \"barbican-operator-controller-manager-5db5bc648b-tc7cd\" (UID: \"eaed2fe0-4c9e-4d02-ba03-b6f55fffad21\") " pod="openstack-operators/barbican-operator-controller-manager-5db5bc648b-tc7cd" Jan 20 18:17:58 crc kubenswrapper[4558]: I0120 18:17:58.917062 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-5db5bc648b-tc7cd" Jan 20 18:17:59 crc kubenswrapper[4558]: I0120 18:17:59.311227 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-5db5bc648b-tc7cd"] Jan 20 18:17:59 crc kubenswrapper[4558]: W0120 18:17:59.315015 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podeaed2fe0_4c9e_4d02_ba03_b6f55fffad21.slice/crio-a46ec77eb14d7f366467e2e555266298d69b73caa2527e2e59727247ea34da92 WatchSource:0}: Error finding container a46ec77eb14d7f366467e2e555266298d69b73caa2527e2e59727247ea34da92: Status 404 returned error can't find the container with id a46ec77eb14d7f366467e2e555266298d69b73caa2527e2e59727247ea34da92 Jan 20 18:17:59 crc kubenswrapper[4558]: I0120 18:17:59.417620 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-5db5bc648b-tc7cd" event={"ID":"eaed2fe0-4c9e-4d02-ba03-b6f55fffad21","Type":"ContainerStarted","Data":"a46ec77eb14d7f366467e2e555266298d69b73caa2527e2e59727247ea34da92"} Jan 20 18:17:59 crc kubenswrapper[4558]: I0120 18:17:59.698455 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="barbican-kuttl-tests/keystone-db-sync-d4kq6" Jan 20 18:17:59 crc kubenswrapper[4558]: I0120 18:17:59.755523 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2637e631-04ad-4da0-82ce-cbbbaf50ad0b-config-data\") pod \"2637e631-04ad-4da0-82ce-cbbbaf50ad0b\" (UID: \"2637e631-04ad-4da0-82ce-cbbbaf50ad0b\") " Jan 20 18:17:59 crc kubenswrapper[4558]: I0120 18:17:59.755574 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xjpzk\" (UniqueName: \"kubernetes.io/projected/2637e631-04ad-4da0-82ce-cbbbaf50ad0b-kube-api-access-xjpzk\") pod \"2637e631-04ad-4da0-82ce-cbbbaf50ad0b\" (UID: \"2637e631-04ad-4da0-82ce-cbbbaf50ad0b\") " Jan 20 18:17:59 crc kubenswrapper[4558]: I0120 18:17:59.760403 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2637e631-04ad-4da0-82ce-cbbbaf50ad0b-kube-api-access-xjpzk" (OuterVolumeSpecName: "kube-api-access-xjpzk") pod "2637e631-04ad-4da0-82ce-cbbbaf50ad0b" (UID: "2637e631-04ad-4da0-82ce-cbbbaf50ad0b"). InnerVolumeSpecName "kube-api-access-xjpzk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:17:59 crc kubenswrapper[4558]: I0120 18:17:59.786069 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2637e631-04ad-4da0-82ce-cbbbaf50ad0b-config-data" (OuterVolumeSpecName: "config-data") pod "2637e631-04ad-4da0-82ce-cbbbaf50ad0b" (UID: "2637e631-04ad-4da0-82ce-cbbbaf50ad0b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:17:59 crc kubenswrapper[4558]: I0120 18:17:59.857083 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2637e631-04ad-4da0-82ce-cbbbaf50ad0b-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 18:17:59 crc kubenswrapper[4558]: I0120 18:17:59.857119 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xjpzk\" (UniqueName: \"kubernetes.io/projected/2637e631-04ad-4da0-82ce-cbbbaf50ad0b-kube-api-access-xjpzk\") on node \"crc\" DevicePath \"\"" Jan 20 18:18:00 crc kubenswrapper[4558]: I0120 18:18:00.427539 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/keystone-db-sync-d4kq6" event={"ID":"2637e631-04ad-4da0-82ce-cbbbaf50ad0b","Type":"ContainerDied","Data":"567c1166d1795bcb0f363877401ee8881e1b4eb50601ebfc1d35d5fdbff364f4"} Jan 20 18:18:00 crc kubenswrapper[4558]: I0120 18:18:00.427588 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="567c1166d1795bcb0f363877401ee8881e1b4eb50601ebfc1d35d5fdbff364f4" Jan 20 18:18:00 crc kubenswrapper[4558]: I0120 18:18:00.428175 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="barbican-kuttl-tests/keystone-db-sync-d4kq6" Jan 20 18:18:00 crc kubenswrapper[4558]: I0120 18:18:00.610958 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["barbican-kuttl-tests/keystone-bootstrap-rl77j"] Jan 20 18:18:00 crc kubenswrapper[4558]: E0120 18:18:00.611299 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2637e631-04ad-4da0-82ce-cbbbaf50ad0b" containerName="keystone-db-sync" Jan 20 18:18:00 crc kubenswrapper[4558]: I0120 18:18:00.611312 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="2637e631-04ad-4da0-82ce-cbbbaf50ad0b" containerName="keystone-db-sync" Jan 20 18:18:00 crc kubenswrapper[4558]: I0120 18:18:00.611444 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="2637e631-04ad-4da0-82ce-cbbbaf50ad0b" containerName="keystone-db-sync" Jan 20 18:18:00 crc kubenswrapper[4558]: I0120 18:18:00.611962 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="barbican-kuttl-tests/keystone-bootstrap-rl77j" Jan 20 18:18:00 crc kubenswrapper[4558]: I0120 18:18:00.615058 4558 reflector.go:368] Caches populated for *v1.Secret from object-"barbican-kuttl-tests"/"keystone" Jan 20 18:18:00 crc kubenswrapper[4558]: I0120 18:18:00.615158 4558 reflector.go:368] Caches populated for *v1.Secret from object-"barbican-kuttl-tests"/"osp-secret" Jan 20 18:18:00 crc kubenswrapper[4558]: I0120 18:18:00.615066 4558 reflector.go:368] Caches populated for *v1.Secret from object-"barbican-kuttl-tests"/"keystone-scripts" Jan 20 18:18:00 crc kubenswrapper[4558]: I0120 18:18:00.615369 4558 reflector.go:368] Caches populated for *v1.Secret from object-"barbican-kuttl-tests"/"keystone-keystone-dockercfg-brg9r" Jan 20 18:18:00 crc kubenswrapper[4558]: I0120 18:18:00.617112 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["barbican-kuttl-tests/keystone-bootstrap-rl77j"] Jan 20 18:18:00 crc kubenswrapper[4558]: I0120 18:18:00.617924 4558 reflector.go:368] Caches populated for *v1.Secret from object-"barbican-kuttl-tests"/"keystone-config-data" Jan 20 18:18:00 crc kubenswrapper[4558]: I0120 18:18:00.773277 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aba8a877-976e-42f7-9a09-a6d491c94db9-scripts\") pod \"keystone-bootstrap-rl77j\" (UID: \"aba8a877-976e-42f7-9a09-a6d491c94db9\") " pod="barbican-kuttl-tests/keystone-bootstrap-rl77j" Jan 20 18:18:00 crc kubenswrapper[4558]: I0120 18:18:00.773829 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/aba8a877-976e-42f7-9a09-a6d491c94db9-credential-keys\") pod \"keystone-bootstrap-rl77j\" (UID: \"aba8a877-976e-42f7-9a09-a6d491c94db9\") " pod="barbican-kuttl-tests/keystone-bootstrap-rl77j" Jan 20 18:18:00 crc kubenswrapper[4558]: I0120 18:18:00.773959 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/aba8a877-976e-42f7-9a09-a6d491c94db9-fernet-keys\") pod \"keystone-bootstrap-rl77j\" (UID: \"aba8a877-976e-42f7-9a09-a6d491c94db9\") " pod="barbican-kuttl-tests/keystone-bootstrap-rl77j" Jan 20 18:18:00 crc kubenswrapper[4558]: I0120 18:18:00.774005 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aba8a877-976e-42f7-9a09-a6d491c94db9-config-data\") pod \"keystone-bootstrap-rl77j\" (UID: \"aba8a877-976e-42f7-9a09-a6d491c94db9\") " pod="barbican-kuttl-tests/keystone-bootstrap-rl77j" Jan 20 18:18:00 crc kubenswrapper[4558]: I0120 18:18:00.774052 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g8t4h\" (UniqueName: \"kubernetes.io/projected/aba8a877-976e-42f7-9a09-a6d491c94db9-kube-api-access-g8t4h\") pod \"keystone-bootstrap-rl77j\" (UID: \"aba8a877-976e-42f7-9a09-a6d491c94db9\") " pod="barbican-kuttl-tests/keystone-bootstrap-rl77j" Jan 20 18:18:00 crc kubenswrapper[4558]: I0120 18:18:00.875478 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aba8a877-976e-42f7-9a09-a6d491c94db9-scripts\") pod \"keystone-bootstrap-rl77j\" (UID: \"aba8a877-976e-42f7-9a09-a6d491c94db9\") " pod="barbican-kuttl-tests/keystone-bootstrap-rl77j" Jan 20 18:18:00 crc kubenswrapper[4558]: I0120 18:18:00.875563 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/aba8a877-976e-42f7-9a09-a6d491c94db9-credential-keys\") pod \"keystone-bootstrap-rl77j\" (UID: \"aba8a877-976e-42f7-9a09-a6d491c94db9\") " pod="barbican-kuttl-tests/keystone-bootstrap-rl77j" Jan 20 18:18:00 crc kubenswrapper[4558]: I0120 18:18:00.875622 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/aba8a877-976e-42f7-9a09-a6d491c94db9-fernet-keys\") pod \"keystone-bootstrap-rl77j\" (UID: \"aba8a877-976e-42f7-9a09-a6d491c94db9\") " pod="barbican-kuttl-tests/keystone-bootstrap-rl77j" Jan 20 18:18:00 crc kubenswrapper[4558]: I0120 18:18:00.875665 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aba8a877-976e-42f7-9a09-a6d491c94db9-config-data\") pod \"keystone-bootstrap-rl77j\" (UID: \"aba8a877-976e-42f7-9a09-a6d491c94db9\") " pod="barbican-kuttl-tests/keystone-bootstrap-rl77j" Jan 20 18:18:00 crc kubenswrapper[4558]: I0120 18:18:00.875723 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g8t4h\" (UniqueName: \"kubernetes.io/projected/aba8a877-976e-42f7-9a09-a6d491c94db9-kube-api-access-g8t4h\") pod \"keystone-bootstrap-rl77j\" (UID: \"aba8a877-976e-42f7-9a09-a6d491c94db9\") " pod="barbican-kuttl-tests/keystone-bootstrap-rl77j" Jan 20 18:18:00 crc kubenswrapper[4558]: I0120 18:18:00.880275 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/aba8a877-976e-42f7-9a09-a6d491c94db9-credential-keys\") pod \"keystone-bootstrap-rl77j\" (UID: \"aba8a877-976e-42f7-9a09-a6d491c94db9\") " pod="barbican-kuttl-tests/keystone-bootstrap-rl77j" Jan 20 18:18:00 crc kubenswrapper[4558]: I0120 18:18:00.880394 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aba8a877-976e-42f7-9a09-a6d491c94db9-config-data\") pod \"keystone-bootstrap-rl77j\" (UID: \"aba8a877-976e-42f7-9a09-a6d491c94db9\") " pod="barbican-kuttl-tests/keystone-bootstrap-rl77j" Jan 20 18:18:00 crc kubenswrapper[4558]: I0120 18:18:00.880741 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/aba8a877-976e-42f7-9a09-a6d491c94db9-fernet-keys\") pod \"keystone-bootstrap-rl77j\" (UID: \"aba8a877-976e-42f7-9a09-a6d491c94db9\") " pod="barbican-kuttl-tests/keystone-bootstrap-rl77j" Jan 20 18:18:00 crc kubenswrapper[4558]: I0120 18:18:00.881742 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aba8a877-976e-42f7-9a09-a6d491c94db9-scripts\") pod \"keystone-bootstrap-rl77j\" (UID: \"aba8a877-976e-42f7-9a09-a6d491c94db9\") " pod="barbican-kuttl-tests/keystone-bootstrap-rl77j" Jan 20 18:18:00 crc kubenswrapper[4558]: I0120 18:18:00.891328 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g8t4h\" (UniqueName: \"kubernetes.io/projected/aba8a877-976e-42f7-9a09-a6d491c94db9-kube-api-access-g8t4h\") pod \"keystone-bootstrap-rl77j\" (UID: \"aba8a877-976e-42f7-9a09-a6d491c94db9\") " pod="barbican-kuttl-tests/keystone-bootstrap-rl77j" Jan 20 18:18:00 crc kubenswrapper[4558]: I0120 18:18:00.928775 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="barbican-kuttl-tests/keystone-bootstrap-rl77j" Jan 20 18:18:01 crc kubenswrapper[4558]: I0120 18:18:01.314839 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["barbican-kuttl-tests/keystone-bootstrap-rl77j"] Jan 20 18:18:01 crc kubenswrapper[4558]: W0120 18:18:01.315089 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podaba8a877_976e_42f7_9a09_a6d491c94db9.slice/crio-070096224b2bb6e212442099afcfdecc82ac676f4061b4323c445d1b79b940bd WatchSource:0}: Error finding container 070096224b2bb6e212442099afcfdecc82ac676f4061b4323c445d1b79b940bd: Status 404 returned error can't find the container with id 070096224b2bb6e212442099afcfdecc82ac676f4061b4323c445d1b79b940bd Jan 20 18:18:01 crc kubenswrapper[4558]: I0120 18:18:01.441617 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/keystone-bootstrap-rl77j" event={"ID":"aba8a877-976e-42f7-9a09-a6d491c94db9","Type":"ContainerStarted","Data":"070096224b2bb6e212442099afcfdecc82ac676f4061b4323c445d1b79b940bd"} Jan 20 18:18:01 crc kubenswrapper[4558]: I0120 18:18:01.458447 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="barbican-kuttl-tests/keystone-bootstrap-rl77j" podStartSLOduration=1.458420703 podStartE2EDuration="1.458420703s" podCreationTimestamp="2026-01-20 18:18:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 18:18:01.455682755 +0000 UTC m=+5775.216020722" watchObservedRunningTime="2026-01-20 18:18:01.458420703 +0000 UTC m=+5775.218758670" Jan 20 18:18:02 crc kubenswrapper[4558]: I0120 18:18:02.454728 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/keystone-bootstrap-rl77j" event={"ID":"aba8a877-976e-42f7-9a09-a6d491c94db9","Type":"ContainerStarted","Data":"5be5d179a0de79ea40097ce2e9526a627b476ff2431d7bdf6a239736855da624"} Jan 20 18:18:02 crc kubenswrapper[4558]: I0120 18:18:02.566389 4558 scope.go:117] "RemoveContainer" containerID="27314e1a8af2e8df8ecd2e7a06cbb630afbfecd8ca2338a1f199cf92f982f581" Jan 20 18:18:02 crc kubenswrapper[4558]: E0120 18:18:02.566743 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 18:18:04 crc kubenswrapper[4558]: I0120 18:18:04.469218 4558 generic.go:334] "Generic (PLEG): container finished" podID="aba8a877-976e-42f7-9a09-a6d491c94db9" containerID="5be5d179a0de79ea40097ce2e9526a627b476ff2431d7bdf6a239736855da624" exitCode=0 Jan 20 18:18:04 crc kubenswrapper[4558]: I0120 18:18:04.469260 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/keystone-bootstrap-rl77j" event={"ID":"aba8a877-976e-42f7-9a09-a6d491c94db9","Type":"ContainerDied","Data":"5be5d179a0de79ea40097ce2e9526a627b476ff2431d7bdf6a239736855da624"} Jan 20 18:18:05 crc kubenswrapper[4558]: I0120 18:18:05.477508 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-5db5bc648b-tc7cd" event={"ID":"eaed2fe0-4c9e-4d02-ba03-b6f55fffad21","Type":"ContainerStarted","Data":"62c474691fd3e1fa9a0d366b3db77d1cb8baef6b35fb90f8736ed4b835ffd09d"} Jan 20 18:18:05 crc kubenswrapper[4558]: I0120 18:18:05.496531 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/barbican-operator-controller-manager-5db5bc648b-tc7cd" podStartSLOduration=2.377910206 podStartE2EDuration="7.496513069s" podCreationTimestamp="2026-01-20 18:17:58 +0000 UTC" firstStartedPulling="2026-01-20 18:17:59.317464223 +0000 UTC m=+5773.077802191" lastFinishedPulling="2026-01-20 18:18:04.436067087 +0000 UTC m=+5778.196405054" observedRunningTime="2026-01-20 18:18:05.491034218 +0000 UTC m=+5779.251372185" watchObservedRunningTime="2026-01-20 18:18:05.496513069 +0000 UTC m=+5779.256851036" Jan 20 18:18:05 crc kubenswrapper[4558]: I0120 18:18:05.775567 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="barbican-kuttl-tests/keystone-bootstrap-rl77j" Jan 20 18:18:05 crc kubenswrapper[4558]: I0120 18:18:05.860031 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aba8a877-976e-42f7-9a09-a6d491c94db9-config-data\") pod \"aba8a877-976e-42f7-9a09-a6d491c94db9\" (UID: \"aba8a877-976e-42f7-9a09-a6d491c94db9\") " Jan 20 18:18:05 crc kubenswrapper[4558]: I0120 18:18:05.860125 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/aba8a877-976e-42f7-9a09-a6d491c94db9-credential-keys\") pod \"aba8a877-976e-42f7-9a09-a6d491c94db9\" (UID: \"aba8a877-976e-42f7-9a09-a6d491c94db9\") " Jan 20 18:18:05 crc kubenswrapper[4558]: I0120 18:18:05.860198 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/aba8a877-976e-42f7-9a09-a6d491c94db9-fernet-keys\") pod \"aba8a877-976e-42f7-9a09-a6d491c94db9\" (UID: \"aba8a877-976e-42f7-9a09-a6d491c94db9\") " Jan 20 18:18:05 crc kubenswrapper[4558]: I0120 18:18:05.860237 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g8t4h\" (UniqueName: \"kubernetes.io/projected/aba8a877-976e-42f7-9a09-a6d491c94db9-kube-api-access-g8t4h\") pod \"aba8a877-976e-42f7-9a09-a6d491c94db9\" (UID: \"aba8a877-976e-42f7-9a09-a6d491c94db9\") " Jan 20 18:18:05 crc kubenswrapper[4558]: I0120 18:18:05.860305 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aba8a877-976e-42f7-9a09-a6d491c94db9-scripts\") pod \"aba8a877-976e-42f7-9a09-a6d491c94db9\" (UID: \"aba8a877-976e-42f7-9a09-a6d491c94db9\") " Jan 20 18:18:05 crc kubenswrapper[4558]: I0120 18:18:05.866703 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aba8a877-976e-42f7-9a09-a6d491c94db9-scripts" (OuterVolumeSpecName: "scripts") pod "aba8a877-976e-42f7-9a09-a6d491c94db9" (UID: "aba8a877-976e-42f7-9a09-a6d491c94db9"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:18:05 crc kubenswrapper[4558]: I0120 18:18:05.866836 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aba8a877-976e-42f7-9a09-a6d491c94db9-kube-api-access-g8t4h" (OuterVolumeSpecName: "kube-api-access-g8t4h") pod "aba8a877-976e-42f7-9a09-a6d491c94db9" (UID: "aba8a877-976e-42f7-9a09-a6d491c94db9"). InnerVolumeSpecName "kube-api-access-g8t4h". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:18:05 crc kubenswrapper[4558]: I0120 18:18:05.867258 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aba8a877-976e-42f7-9a09-a6d491c94db9-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "aba8a877-976e-42f7-9a09-a6d491c94db9" (UID: "aba8a877-976e-42f7-9a09-a6d491c94db9"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:18:05 crc kubenswrapper[4558]: I0120 18:18:05.869375 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aba8a877-976e-42f7-9a09-a6d491c94db9-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "aba8a877-976e-42f7-9a09-a6d491c94db9" (UID: "aba8a877-976e-42f7-9a09-a6d491c94db9"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:18:05 crc kubenswrapper[4558]: I0120 18:18:05.881874 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aba8a877-976e-42f7-9a09-a6d491c94db9-config-data" (OuterVolumeSpecName: "config-data") pod "aba8a877-976e-42f7-9a09-a6d491c94db9" (UID: "aba8a877-976e-42f7-9a09-a6d491c94db9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:18:05 crc kubenswrapper[4558]: I0120 18:18:05.962075 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aba8a877-976e-42f7-9a09-a6d491c94db9-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 18:18:05 crc kubenswrapper[4558]: I0120 18:18:05.962115 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aba8a877-976e-42f7-9a09-a6d491c94db9-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 18:18:05 crc kubenswrapper[4558]: I0120 18:18:05.962129 4558 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/aba8a877-976e-42f7-9a09-a6d491c94db9-credential-keys\") on node \"crc\" DevicePath \"\"" Jan 20 18:18:05 crc kubenswrapper[4558]: I0120 18:18:05.962141 4558 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/aba8a877-976e-42f7-9a09-a6d491c94db9-fernet-keys\") on node \"crc\" DevicePath \"\"" Jan 20 18:18:05 crc kubenswrapper[4558]: I0120 18:18:05.962155 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g8t4h\" (UniqueName: \"kubernetes.io/projected/aba8a877-976e-42f7-9a09-a6d491c94db9-kube-api-access-g8t4h\") on node \"crc\" DevicePath \"\"" Jan 20 18:18:06 crc kubenswrapper[4558]: I0120 18:18:06.486806 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="barbican-kuttl-tests/keystone-bootstrap-rl77j" Jan 20 18:18:06 crc kubenswrapper[4558]: I0120 18:18:06.486800 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/keystone-bootstrap-rl77j" event={"ID":"aba8a877-976e-42f7-9a09-a6d491c94db9","Type":"ContainerDied","Data":"070096224b2bb6e212442099afcfdecc82ac676f4061b4323c445d1b79b940bd"} Jan 20 18:18:06 crc kubenswrapper[4558]: I0120 18:18:06.486876 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="070096224b2bb6e212442099afcfdecc82ac676f4061b4323c445d1b79b940bd" Jan 20 18:18:06 crc kubenswrapper[4558]: I0120 18:18:06.487078 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/barbican-operator-controller-manager-5db5bc648b-tc7cd" Jan 20 18:18:06 crc kubenswrapper[4558]: I0120 18:18:06.550575 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["barbican-kuttl-tests/keystone-694b59f8cc-6bxr2"] Jan 20 18:18:06 crc kubenswrapper[4558]: E0120 18:18:06.550861 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aba8a877-976e-42f7-9a09-a6d491c94db9" containerName="keystone-bootstrap" Jan 20 18:18:06 crc kubenswrapper[4558]: I0120 18:18:06.550879 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="aba8a877-976e-42f7-9a09-a6d491c94db9" containerName="keystone-bootstrap" Jan 20 18:18:06 crc kubenswrapper[4558]: I0120 18:18:06.551023 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="aba8a877-976e-42f7-9a09-a6d491c94db9" containerName="keystone-bootstrap" Jan 20 18:18:06 crc kubenswrapper[4558]: I0120 18:18:06.551482 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="barbican-kuttl-tests/keystone-694b59f8cc-6bxr2" Jan 20 18:18:06 crc kubenswrapper[4558]: I0120 18:18:06.553260 4558 reflector.go:368] Caches populated for *v1.Secret from object-"barbican-kuttl-tests"/"keystone-scripts" Jan 20 18:18:06 crc kubenswrapper[4558]: I0120 18:18:06.553745 4558 reflector.go:368] Caches populated for *v1.Secret from object-"barbican-kuttl-tests"/"keystone" Jan 20 18:18:06 crc kubenswrapper[4558]: I0120 18:18:06.554196 4558 reflector.go:368] Caches populated for *v1.Secret from object-"barbican-kuttl-tests"/"keystone-config-data" Jan 20 18:18:06 crc kubenswrapper[4558]: I0120 18:18:06.554484 4558 reflector.go:368] Caches populated for *v1.Secret from object-"barbican-kuttl-tests"/"keystone-keystone-dockercfg-brg9r" Jan 20 18:18:06 crc kubenswrapper[4558]: I0120 18:18:06.561204 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["barbican-kuttl-tests/keystone-694b59f8cc-6bxr2"] Jan 20 18:18:06 crc kubenswrapper[4558]: I0120 18:18:06.673295 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9c946696-d624-4baf-824d-da41544b5240-scripts\") pod \"keystone-694b59f8cc-6bxr2\" (UID: \"9c946696-d624-4baf-824d-da41544b5240\") " pod="barbican-kuttl-tests/keystone-694b59f8cc-6bxr2" Jan 20 18:18:06 crc kubenswrapper[4558]: I0120 18:18:06.673418 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/9c946696-d624-4baf-824d-da41544b5240-credential-keys\") pod \"keystone-694b59f8cc-6bxr2\" (UID: \"9c946696-d624-4baf-824d-da41544b5240\") " pod="barbican-kuttl-tests/keystone-694b59f8cc-6bxr2" Jan 20 18:18:06 crc kubenswrapper[4558]: I0120 18:18:06.673700 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9c946696-d624-4baf-824d-da41544b5240-config-data\") pod \"keystone-694b59f8cc-6bxr2\" (UID: \"9c946696-d624-4baf-824d-da41544b5240\") " pod="barbican-kuttl-tests/keystone-694b59f8cc-6bxr2" Jan 20 18:18:06 crc kubenswrapper[4558]: I0120 18:18:06.673865 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/9c946696-d624-4baf-824d-da41544b5240-fernet-keys\") pod \"keystone-694b59f8cc-6bxr2\" (UID: \"9c946696-d624-4baf-824d-da41544b5240\") " pod="barbican-kuttl-tests/keystone-694b59f8cc-6bxr2" Jan 20 18:18:06 crc kubenswrapper[4558]: I0120 18:18:06.674050 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bz4xw\" (UniqueName: \"kubernetes.io/projected/9c946696-d624-4baf-824d-da41544b5240-kube-api-access-bz4xw\") pod \"keystone-694b59f8cc-6bxr2\" (UID: \"9c946696-d624-4baf-824d-da41544b5240\") " pod="barbican-kuttl-tests/keystone-694b59f8cc-6bxr2" Jan 20 18:18:06 crc kubenswrapper[4558]: I0120 18:18:06.775903 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/9c946696-d624-4baf-824d-da41544b5240-fernet-keys\") pod \"keystone-694b59f8cc-6bxr2\" (UID: \"9c946696-d624-4baf-824d-da41544b5240\") " pod="barbican-kuttl-tests/keystone-694b59f8cc-6bxr2" Jan 20 18:18:06 crc kubenswrapper[4558]: I0120 18:18:06.776036 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bz4xw\" (UniqueName: \"kubernetes.io/projected/9c946696-d624-4baf-824d-da41544b5240-kube-api-access-bz4xw\") pod \"keystone-694b59f8cc-6bxr2\" (UID: \"9c946696-d624-4baf-824d-da41544b5240\") " pod="barbican-kuttl-tests/keystone-694b59f8cc-6bxr2" Jan 20 18:18:06 crc kubenswrapper[4558]: I0120 18:18:06.776081 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9c946696-d624-4baf-824d-da41544b5240-scripts\") pod \"keystone-694b59f8cc-6bxr2\" (UID: \"9c946696-d624-4baf-824d-da41544b5240\") " pod="barbican-kuttl-tests/keystone-694b59f8cc-6bxr2" Jan 20 18:18:06 crc kubenswrapper[4558]: I0120 18:18:06.776121 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/9c946696-d624-4baf-824d-da41544b5240-credential-keys\") pod \"keystone-694b59f8cc-6bxr2\" (UID: \"9c946696-d624-4baf-824d-da41544b5240\") " pod="barbican-kuttl-tests/keystone-694b59f8cc-6bxr2" Jan 20 18:18:06 crc kubenswrapper[4558]: I0120 18:18:06.776211 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9c946696-d624-4baf-824d-da41544b5240-config-data\") pod \"keystone-694b59f8cc-6bxr2\" (UID: \"9c946696-d624-4baf-824d-da41544b5240\") " pod="barbican-kuttl-tests/keystone-694b59f8cc-6bxr2" Jan 20 18:18:06 crc kubenswrapper[4558]: I0120 18:18:06.781328 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9c946696-d624-4baf-824d-da41544b5240-config-data\") pod \"keystone-694b59f8cc-6bxr2\" (UID: \"9c946696-d624-4baf-824d-da41544b5240\") " pod="barbican-kuttl-tests/keystone-694b59f8cc-6bxr2" Jan 20 18:18:06 crc kubenswrapper[4558]: I0120 18:18:06.782356 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/9c946696-d624-4baf-824d-da41544b5240-credential-keys\") pod \"keystone-694b59f8cc-6bxr2\" (UID: \"9c946696-d624-4baf-824d-da41544b5240\") " pod="barbican-kuttl-tests/keystone-694b59f8cc-6bxr2" Jan 20 18:18:06 crc kubenswrapper[4558]: I0120 18:18:06.782516 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9c946696-d624-4baf-824d-da41544b5240-scripts\") pod \"keystone-694b59f8cc-6bxr2\" (UID: \"9c946696-d624-4baf-824d-da41544b5240\") " pod="barbican-kuttl-tests/keystone-694b59f8cc-6bxr2" Jan 20 18:18:06 crc kubenswrapper[4558]: I0120 18:18:06.782905 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/9c946696-d624-4baf-824d-da41544b5240-fernet-keys\") pod \"keystone-694b59f8cc-6bxr2\" (UID: \"9c946696-d624-4baf-824d-da41544b5240\") " pod="barbican-kuttl-tests/keystone-694b59f8cc-6bxr2" Jan 20 18:18:06 crc kubenswrapper[4558]: I0120 18:18:06.790259 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bz4xw\" (UniqueName: \"kubernetes.io/projected/9c946696-d624-4baf-824d-da41544b5240-kube-api-access-bz4xw\") pod \"keystone-694b59f8cc-6bxr2\" (UID: \"9c946696-d624-4baf-824d-da41544b5240\") " pod="barbican-kuttl-tests/keystone-694b59f8cc-6bxr2" Jan 20 18:18:06 crc kubenswrapper[4558]: I0120 18:18:06.865473 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="barbican-kuttl-tests/keystone-694b59f8cc-6bxr2" Jan 20 18:18:07 crc kubenswrapper[4558]: I0120 18:18:07.265952 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["barbican-kuttl-tests/keystone-694b59f8cc-6bxr2"] Jan 20 18:18:07 crc kubenswrapper[4558]: I0120 18:18:07.496719 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/keystone-694b59f8cc-6bxr2" event={"ID":"9c946696-d624-4baf-824d-da41544b5240","Type":"ContainerStarted","Data":"cd7ed0d3b2b2fe1e487a0b13c301a12eef74b758ee8fd6ae67b04dbe57f98f49"} Jan 20 18:18:07 crc kubenswrapper[4558]: I0120 18:18:07.497291 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="barbican-kuttl-tests/keystone-694b59f8cc-6bxr2" Jan 20 18:18:07 crc kubenswrapper[4558]: I0120 18:18:07.497334 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/keystone-694b59f8cc-6bxr2" event={"ID":"9c946696-d624-4baf-824d-da41544b5240","Type":"ContainerStarted","Data":"1bfc1f27aac439037c7c45f3f556abb8b1ab8e6d830e19c3214c7a2cfe089a80"} Jan 20 18:18:07 crc kubenswrapper[4558]: I0120 18:18:07.516276 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="barbican-kuttl-tests/keystone-694b59f8cc-6bxr2" podStartSLOduration=1.516254335 podStartE2EDuration="1.516254335s" podCreationTimestamp="2026-01-20 18:18:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 18:18:07.510258631 +0000 UTC m=+5781.270596598" watchObservedRunningTime="2026-01-20 18:18:07.516254335 +0000 UTC m=+5781.276592301" Jan 20 18:18:17 crc kubenswrapper[4558]: I0120 18:18:17.566318 4558 scope.go:117] "RemoveContainer" containerID="27314e1a8af2e8df8ecd2e7a06cbb630afbfecd8ca2338a1f199cf92f982f581" Jan 20 18:18:17 crc kubenswrapper[4558]: E0120 18:18:17.567406 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 18:18:18 crc kubenswrapper[4558]: I0120 18:18:18.922723 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/barbican-operator-controller-manager-5db5bc648b-tc7cd" Jan 20 18:18:32 crc kubenswrapper[4558]: I0120 18:18:32.565952 4558 scope.go:117] "RemoveContainer" containerID="27314e1a8af2e8df8ecd2e7a06cbb630afbfecd8ca2338a1f199cf92f982f581" Jan 20 18:18:32 crc kubenswrapper[4558]: E0120 18:18:32.566999 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 18:18:36 crc kubenswrapper[4558]: I0120 18:18:36.123122 4558 scope.go:117] "RemoveContainer" containerID="3c20ec4df8e9e6fdacf8f828e052680dd511bff57d9a705f1b292155372a87d6" Jan 20 18:18:36 crc kubenswrapper[4558]: I0120 18:18:36.151199 4558 scope.go:117] "RemoveContainer" containerID="21d4ed8c33a665f6312c036258ad016d79469f47f0de29d2d5704295388e7bfe" Jan 20 18:18:36 crc kubenswrapper[4558]: I0120 18:18:36.175579 4558 scope.go:117] "RemoveContainer" containerID="15fd0c1ca89ec4d06cb87710c8914dd6911bb232f989412db7afa6da8cf18209" Jan 20 18:18:36 crc kubenswrapper[4558]: I0120 18:18:36.635374 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["barbican-kuttl-tests/barbican-db-create-bnkm2"] Jan 20 18:18:36 crc kubenswrapper[4558]: I0120 18:18:36.636449 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="barbican-kuttl-tests/barbican-db-create-bnkm2" Jan 20 18:18:36 crc kubenswrapper[4558]: I0120 18:18:36.640420 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["barbican-kuttl-tests/barbican-1f0e-account-create-update-swv56"] Jan 20 18:18:36 crc kubenswrapper[4558]: I0120 18:18:36.641713 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="barbican-kuttl-tests/barbican-1f0e-account-create-update-swv56" Jan 20 18:18:36 crc kubenswrapper[4558]: I0120 18:18:36.644446 4558 reflector.go:368] Caches populated for *v1.Secret from object-"barbican-kuttl-tests"/"barbican-db-secret" Jan 20 18:18:36 crc kubenswrapper[4558]: I0120 18:18:36.644943 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["barbican-kuttl-tests/barbican-db-create-bnkm2"] Jan 20 18:18:36 crc kubenswrapper[4558]: I0120 18:18:36.651976 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["barbican-kuttl-tests/barbican-1f0e-account-create-update-swv56"] Jan 20 18:18:36 crc kubenswrapper[4558]: I0120 18:18:36.748772 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6pftf\" (UniqueName: \"kubernetes.io/projected/da3a3927-0645-403e-a972-3a1644222ed5-kube-api-access-6pftf\") pod \"barbican-1f0e-account-create-update-swv56\" (UID: \"da3a3927-0645-403e-a972-3a1644222ed5\") " pod="barbican-kuttl-tests/barbican-1f0e-account-create-update-swv56" Jan 20 18:18:36 crc kubenswrapper[4558]: I0120 18:18:36.748905 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/99aca88d-e31b-4cf3-8532-ff2b3ecc42c6-operator-scripts\") pod \"barbican-db-create-bnkm2\" (UID: \"99aca88d-e31b-4cf3-8532-ff2b3ecc42c6\") " pod="barbican-kuttl-tests/barbican-db-create-bnkm2" Jan 20 18:18:36 crc kubenswrapper[4558]: I0120 18:18:36.749011 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hlkvt\" (UniqueName: \"kubernetes.io/projected/99aca88d-e31b-4cf3-8532-ff2b3ecc42c6-kube-api-access-hlkvt\") pod \"barbican-db-create-bnkm2\" (UID: \"99aca88d-e31b-4cf3-8532-ff2b3ecc42c6\") " pod="barbican-kuttl-tests/barbican-db-create-bnkm2" Jan 20 18:18:36 crc kubenswrapper[4558]: I0120 18:18:36.749137 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/da3a3927-0645-403e-a972-3a1644222ed5-operator-scripts\") pod \"barbican-1f0e-account-create-update-swv56\" (UID: \"da3a3927-0645-403e-a972-3a1644222ed5\") " pod="barbican-kuttl-tests/barbican-1f0e-account-create-update-swv56" Jan 20 18:18:36 crc kubenswrapper[4558]: I0120 18:18:36.851195 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/99aca88d-e31b-4cf3-8532-ff2b3ecc42c6-operator-scripts\") pod \"barbican-db-create-bnkm2\" (UID: \"99aca88d-e31b-4cf3-8532-ff2b3ecc42c6\") " pod="barbican-kuttl-tests/barbican-db-create-bnkm2" Jan 20 18:18:36 crc kubenswrapper[4558]: I0120 18:18:36.851305 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hlkvt\" (UniqueName: \"kubernetes.io/projected/99aca88d-e31b-4cf3-8532-ff2b3ecc42c6-kube-api-access-hlkvt\") pod \"barbican-db-create-bnkm2\" (UID: \"99aca88d-e31b-4cf3-8532-ff2b3ecc42c6\") " pod="barbican-kuttl-tests/barbican-db-create-bnkm2" Jan 20 18:18:36 crc kubenswrapper[4558]: I0120 18:18:36.851346 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/da3a3927-0645-403e-a972-3a1644222ed5-operator-scripts\") pod \"barbican-1f0e-account-create-update-swv56\" (UID: \"da3a3927-0645-403e-a972-3a1644222ed5\") " pod="barbican-kuttl-tests/barbican-1f0e-account-create-update-swv56" Jan 20 18:18:36 crc kubenswrapper[4558]: I0120 18:18:36.851566 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6pftf\" (UniqueName: \"kubernetes.io/projected/da3a3927-0645-403e-a972-3a1644222ed5-kube-api-access-6pftf\") pod \"barbican-1f0e-account-create-update-swv56\" (UID: \"da3a3927-0645-403e-a972-3a1644222ed5\") " pod="barbican-kuttl-tests/barbican-1f0e-account-create-update-swv56" Jan 20 18:18:36 crc kubenswrapper[4558]: I0120 18:18:36.852125 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/99aca88d-e31b-4cf3-8532-ff2b3ecc42c6-operator-scripts\") pod \"barbican-db-create-bnkm2\" (UID: \"99aca88d-e31b-4cf3-8532-ff2b3ecc42c6\") " pod="barbican-kuttl-tests/barbican-db-create-bnkm2" Jan 20 18:18:36 crc kubenswrapper[4558]: I0120 18:18:36.852486 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/da3a3927-0645-403e-a972-3a1644222ed5-operator-scripts\") pod \"barbican-1f0e-account-create-update-swv56\" (UID: \"da3a3927-0645-403e-a972-3a1644222ed5\") " pod="barbican-kuttl-tests/barbican-1f0e-account-create-update-swv56" Jan 20 18:18:36 crc kubenswrapper[4558]: I0120 18:18:36.871428 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hlkvt\" (UniqueName: \"kubernetes.io/projected/99aca88d-e31b-4cf3-8532-ff2b3ecc42c6-kube-api-access-hlkvt\") pod \"barbican-db-create-bnkm2\" (UID: \"99aca88d-e31b-4cf3-8532-ff2b3ecc42c6\") " pod="barbican-kuttl-tests/barbican-db-create-bnkm2" Jan 20 18:18:36 crc kubenswrapper[4558]: I0120 18:18:36.871490 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6pftf\" (UniqueName: \"kubernetes.io/projected/da3a3927-0645-403e-a972-3a1644222ed5-kube-api-access-6pftf\") pod \"barbican-1f0e-account-create-update-swv56\" (UID: \"da3a3927-0645-403e-a972-3a1644222ed5\") " pod="barbican-kuttl-tests/barbican-1f0e-account-create-update-swv56" Jan 20 18:18:36 crc kubenswrapper[4558]: I0120 18:18:36.952100 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="barbican-kuttl-tests/barbican-db-create-bnkm2" Jan 20 18:18:36 crc kubenswrapper[4558]: I0120 18:18:36.962690 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="barbican-kuttl-tests/barbican-1f0e-account-create-update-swv56" Jan 20 18:18:37 crc kubenswrapper[4558]: I0120 18:18:37.363660 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["barbican-kuttl-tests/barbican-db-create-bnkm2"] Jan 20 18:18:37 crc kubenswrapper[4558]: I0120 18:18:37.453214 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["barbican-kuttl-tests/barbican-1f0e-account-create-update-swv56"] Jan 20 18:18:37 crc kubenswrapper[4558]: W0120 18:18:37.457092 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podda3a3927_0645_403e_a972_3a1644222ed5.slice/crio-3629c066a2dbc70989f73fb4b5d56c0933ece7108d00149000100cbd7f8456f4 WatchSource:0}: Error finding container 3629c066a2dbc70989f73fb4b5d56c0933ece7108d00149000100cbd7f8456f4: Status 404 returned error can't find the container with id 3629c066a2dbc70989f73fb4b5d56c0933ece7108d00149000100cbd7f8456f4 Jan 20 18:18:37 crc kubenswrapper[4558]: I0120 18:18:37.774779 4558 generic.go:334] "Generic (PLEG): container finished" podID="99aca88d-e31b-4cf3-8532-ff2b3ecc42c6" containerID="30b13d5d64bdd04a5b3eba65ef7d6ad8a6f88512fd66ccbff631c3b25c339616" exitCode=0 Jan 20 18:18:37 crc kubenswrapper[4558]: I0120 18:18:37.774894 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/barbican-db-create-bnkm2" event={"ID":"99aca88d-e31b-4cf3-8532-ff2b3ecc42c6","Type":"ContainerDied","Data":"30b13d5d64bdd04a5b3eba65ef7d6ad8a6f88512fd66ccbff631c3b25c339616"} Jan 20 18:18:37 crc kubenswrapper[4558]: I0120 18:18:37.775364 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/barbican-db-create-bnkm2" event={"ID":"99aca88d-e31b-4cf3-8532-ff2b3ecc42c6","Type":"ContainerStarted","Data":"65e66fee62b8bab9fd7d7ae7d991a15340c84e985a386756947cf2ca1e8c0317"} Jan 20 18:18:37 crc kubenswrapper[4558]: I0120 18:18:37.776895 4558 generic.go:334] "Generic (PLEG): container finished" podID="da3a3927-0645-403e-a972-3a1644222ed5" containerID="f654bb222b97f28d64921290dc753644336762502caf7dcbd69972a94743e052" exitCode=0 Jan 20 18:18:37 crc kubenswrapper[4558]: I0120 18:18:37.776951 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/barbican-1f0e-account-create-update-swv56" event={"ID":"da3a3927-0645-403e-a972-3a1644222ed5","Type":"ContainerDied","Data":"f654bb222b97f28d64921290dc753644336762502caf7dcbd69972a94743e052"} Jan 20 18:18:37 crc kubenswrapper[4558]: I0120 18:18:37.777048 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/barbican-1f0e-account-create-update-swv56" event={"ID":"da3a3927-0645-403e-a972-3a1644222ed5","Type":"ContainerStarted","Data":"3629c066a2dbc70989f73fb4b5d56c0933ece7108d00149000100cbd7f8456f4"} Jan 20 18:18:38 crc kubenswrapper[4558]: I0120 18:18:38.188432 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="barbican-kuttl-tests/keystone-694b59f8cc-6bxr2" Jan 20 18:18:39 crc kubenswrapper[4558]: I0120 18:18:39.055249 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="barbican-kuttl-tests/barbican-1f0e-account-create-update-swv56" Jan 20 18:18:39 crc kubenswrapper[4558]: I0120 18:18:39.061006 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="barbican-kuttl-tests/barbican-db-create-bnkm2" Jan 20 18:18:39 crc kubenswrapper[4558]: I0120 18:18:39.193691 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hlkvt\" (UniqueName: \"kubernetes.io/projected/99aca88d-e31b-4cf3-8532-ff2b3ecc42c6-kube-api-access-hlkvt\") pod \"99aca88d-e31b-4cf3-8532-ff2b3ecc42c6\" (UID: \"99aca88d-e31b-4cf3-8532-ff2b3ecc42c6\") " Jan 20 18:18:39 crc kubenswrapper[4558]: I0120 18:18:39.193750 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/99aca88d-e31b-4cf3-8532-ff2b3ecc42c6-operator-scripts\") pod \"99aca88d-e31b-4cf3-8532-ff2b3ecc42c6\" (UID: \"99aca88d-e31b-4cf3-8532-ff2b3ecc42c6\") " Jan 20 18:18:39 crc kubenswrapper[4558]: I0120 18:18:39.193860 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/da3a3927-0645-403e-a972-3a1644222ed5-operator-scripts\") pod \"da3a3927-0645-403e-a972-3a1644222ed5\" (UID: \"da3a3927-0645-403e-a972-3a1644222ed5\") " Jan 20 18:18:39 crc kubenswrapper[4558]: I0120 18:18:39.193993 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6pftf\" (UniqueName: \"kubernetes.io/projected/da3a3927-0645-403e-a972-3a1644222ed5-kube-api-access-6pftf\") pod \"da3a3927-0645-403e-a972-3a1644222ed5\" (UID: \"da3a3927-0645-403e-a972-3a1644222ed5\") " Jan 20 18:18:39 crc kubenswrapper[4558]: I0120 18:18:39.194508 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/da3a3927-0645-403e-a972-3a1644222ed5-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "da3a3927-0645-403e-a972-3a1644222ed5" (UID: "da3a3927-0645-403e-a972-3a1644222ed5"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:18:39 crc kubenswrapper[4558]: I0120 18:18:39.194508 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/99aca88d-e31b-4cf3-8532-ff2b3ecc42c6-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "99aca88d-e31b-4cf3-8532-ff2b3ecc42c6" (UID: "99aca88d-e31b-4cf3-8532-ff2b3ecc42c6"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:18:39 crc kubenswrapper[4558]: I0120 18:18:39.200675 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/da3a3927-0645-403e-a972-3a1644222ed5-kube-api-access-6pftf" (OuterVolumeSpecName: "kube-api-access-6pftf") pod "da3a3927-0645-403e-a972-3a1644222ed5" (UID: "da3a3927-0645-403e-a972-3a1644222ed5"). InnerVolumeSpecName "kube-api-access-6pftf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:18:39 crc kubenswrapper[4558]: I0120 18:18:39.201030 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/99aca88d-e31b-4cf3-8532-ff2b3ecc42c6-kube-api-access-hlkvt" (OuterVolumeSpecName: "kube-api-access-hlkvt") pod "99aca88d-e31b-4cf3-8532-ff2b3ecc42c6" (UID: "99aca88d-e31b-4cf3-8532-ff2b3ecc42c6"). InnerVolumeSpecName "kube-api-access-hlkvt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:18:39 crc kubenswrapper[4558]: I0120 18:18:39.296019 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6pftf\" (UniqueName: \"kubernetes.io/projected/da3a3927-0645-403e-a972-3a1644222ed5-kube-api-access-6pftf\") on node \"crc\" DevicePath \"\"" Jan 20 18:18:39 crc kubenswrapper[4558]: I0120 18:18:39.296073 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hlkvt\" (UniqueName: \"kubernetes.io/projected/99aca88d-e31b-4cf3-8532-ff2b3ecc42c6-kube-api-access-hlkvt\") on node \"crc\" DevicePath \"\"" Jan 20 18:18:39 crc kubenswrapper[4558]: I0120 18:18:39.296089 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/99aca88d-e31b-4cf3-8532-ff2b3ecc42c6-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 18:18:39 crc kubenswrapper[4558]: I0120 18:18:39.296099 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/da3a3927-0645-403e-a972-3a1644222ed5-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 18:18:39 crc kubenswrapper[4558]: I0120 18:18:39.797039 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="barbican-kuttl-tests/barbican-1f0e-account-create-update-swv56" Jan 20 18:18:39 crc kubenswrapper[4558]: I0120 18:18:39.797013 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/barbican-1f0e-account-create-update-swv56" event={"ID":"da3a3927-0645-403e-a972-3a1644222ed5","Type":"ContainerDied","Data":"3629c066a2dbc70989f73fb4b5d56c0933ece7108d00149000100cbd7f8456f4"} Jan 20 18:18:39 crc kubenswrapper[4558]: I0120 18:18:39.797221 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3629c066a2dbc70989f73fb4b5d56c0933ece7108d00149000100cbd7f8456f4" Jan 20 18:18:39 crc kubenswrapper[4558]: I0120 18:18:39.798692 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/barbican-db-create-bnkm2" event={"ID":"99aca88d-e31b-4cf3-8532-ff2b3ecc42c6","Type":"ContainerDied","Data":"65e66fee62b8bab9fd7d7ae7d991a15340c84e985a386756947cf2ca1e8c0317"} Jan 20 18:18:39 crc kubenswrapper[4558]: I0120 18:18:39.798748 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="65e66fee62b8bab9fd7d7ae7d991a15340c84e985a386756947cf2ca1e8c0317" Jan 20 18:18:39 crc kubenswrapper[4558]: I0120 18:18:39.798769 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="barbican-kuttl-tests/barbican-db-create-bnkm2" Jan 20 18:18:41 crc kubenswrapper[4558]: I0120 18:18:41.928096 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["barbican-kuttl-tests/barbican-db-sync-q6bzf"] Jan 20 18:18:41 crc kubenswrapper[4558]: E0120 18:18:41.928714 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="99aca88d-e31b-4cf3-8532-ff2b3ecc42c6" containerName="mariadb-database-create" Jan 20 18:18:41 crc kubenswrapper[4558]: I0120 18:18:41.928731 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="99aca88d-e31b-4cf3-8532-ff2b3ecc42c6" containerName="mariadb-database-create" Jan 20 18:18:41 crc kubenswrapper[4558]: E0120 18:18:41.928741 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="da3a3927-0645-403e-a972-3a1644222ed5" containerName="mariadb-account-create-update" Jan 20 18:18:41 crc kubenswrapper[4558]: I0120 18:18:41.928748 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="da3a3927-0645-403e-a972-3a1644222ed5" containerName="mariadb-account-create-update" Jan 20 18:18:41 crc kubenswrapper[4558]: I0120 18:18:41.928902 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="da3a3927-0645-403e-a972-3a1644222ed5" containerName="mariadb-account-create-update" Jan 20 18:18:41 crc kubenswrapper[4558]: I0120 18:18:41.928925 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="99aca88d-e31b-4cf3-8532-ff2b3ecc42c6" containerName="mariadb-database-create" Jan 20 18:18:41 crc kubenswrapper[4558]: I0120 18:18:41.929522 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="barbican-kuttl-tests/barbican-db-sync-q6bzf" Jan 20 18:18:41 crc kubenswrapper[4558]: I0120 18:18:41.931865 4558 reflector.go:368] Caches populated for *v1.Secret from object-"barbican-kuttl-tests"/"barbican-barbican-dockercfg-cwkfj" Jan 20 18:18:41 crc kubenswrapper[4558]: I0120 18:18:41.931978 4558 reflector.go:368] Caches populated for *v1.Secret from object-"barbican-kuttl-tests"/"barbican-config-data" Jan 20 18:18:41 crc kubenswrapper[4558]: I0120 18:18:41.937074 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["barbican-kuttl-tests/barbican-db-sync-q6bzf"] Jan 20 18:18:42 crc kubenswrapper[4558]: I0120 18:18:42.037301 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-svx94\" (UniqueName: \"kubernetes.io/projected/19e31edd-327b-4861-8446-12b3de5b9312-kube-api-access-svx94\") pod \"barbican-db-sync-q6bzf\" (UID: \"19e31edd-327b-4861-8446-12b3de5b9312\") " pod="barbican-kuttl-tests/barbican-db-sync-q6bzf" Jan 20 18:18:42 crc kubenswrapper[4558]: I0120 18:18:42.037427 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/19e31edd-327b-4861-8446-12b3de5b9312-db-sync-config-data\") pod \"barbican-db-sync-q6bzf\" (UID: \"19e31edd-327b-4861-8446-12b3de5b9312\") " pod="barbican-kuttl-tests/barbican-db-sync-q6bzf" Jan 20 18:18:42 crc kubenswrapper[4558]: I0120 18:18:42.139382 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-svx94\" (UniqueName: \"kubernetes.io/projected/19e31edd-327b-4861-8446-12b3de5b9312-kube-api-access-svx94\") pod \"barbican-db-sync-q6bzf\" (UID: \"19e31edd-327b-4861-8446-12b3de5b9312\") " pod="barbican-kuttl-tests/barbican-db-sync-q6bzf" Jan 20 18:18:42 crc kubenswrapper[4558]: I0120 18:18:42.139452 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/19e31edd-327b-4861-8446-12b3de5b9312-db-sync-config-data\") pod \"barbican-db-sync-q6bzf\" (UID: \"19e31edd-327b-4861-8446-12b3de5b9312\") " pod="barbican-kuttl-tests/barbican-db-sync-q6bzf" Jan 20 18:18:42 crc kubenswrapper[4558]: I0120 18:18:42.146624 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/19e31edd-327b-4861-8446-12b3de5b9312-db-sync-config-data\") pod \"barbican-db-sync-q6bzf\" (UID: \"19e31edd-327b-4861-8446-12b3de5b9312\") " pod="barbican-kuttl-tests/barbican-db-sync-q6bzf" Jan 20 18:18:42 crc kubenswrapper[4558]: I0120 18:18:42.153191 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-svx94\" (UniqueName: \"kubernetes.io/projected/19e31edd-327b-4861-8446-12b3de5b9312-kube-api-access-svx94\") pod \"barbican-db-sync-q6bzf\" (UID: \"19e31edd-327b-4861-8446-12b3de5b9312\") " pod="barbican-kuttl-tests/barbican-db-sync-q6bzf" Jan 20 18:18:42 crc kubenswrapper[4558]: I0120 18:18:42.245465 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="barbican-kuttl-tests/barbican-db-sync-q6bzf" Jan 20 18:18:42 crc kubenswrapper[4558]: I0120 18:18:42.630417 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["barbican-kuttl-tests/barbican-db-sync-q6bzf"] Jan 20 18:18:42 crc kubenswrapper[4558]: W0120 18:18:42.636096 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod19e31edd_327b_4861_8446_12b3de5b9312.slice/crio-19d79a6725c4b99781c652e17f19666da9c098ec57abb06a95778429342e67d5 WatchSource:0}: Error finding container 19d79a6725c4b99781c652e17f19666da9c098ec57abb06a95778429342e67d5: Status 404 returned error can't find the container with id 19d79a6725c4b99781c652e17f19666da9c098ec57abb06a95778429342e67d5 Jan 20 18:18:42 crc kubenswrapper[4558]: I0120 18:18:42.827485 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/barbican-db-sync-q6bzf" event={"ID":"19e31edd-327b-4861-8446-12b3de5b9312","Type":"ContainerStarted","Data":"19d79a6725c4b99781c652e17f19666da9c098ec57abb06a95778429342e67d5"} Jan 20 18:18:43 crc kubenswrapper[4558]: I0120 18:18:43.837126 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/barbican-db-sync-q6bzf" event={"ID":"19e31edd-327b-4861-8446-12b3de5b9312","Type":"ContainerStarted","Data":"40cf157630bb938b379ccc598d8db3cbcc30d08e930588e8be748226d603132b"} Jan 20 18:18:43 crc kubenswrapper[4558]: I0120 18:18:43.856351 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="barbican-kuttl-tests/barbican-db-sync-q6bzf" podStartSLOduration=2.156181789 podStartE2EDuration="2.856331944s" podCreationTimestamp="2026-01-20 18:18:41 +0000 UTC" firstStartedPulling="2026-01-20 18:18:42.638470682 +0000 UTC m=+5816.398808649" lastFinishedPulling="2026-01-20 18:18:43.338620838 +0000 UTC m=+5817.098958804" observedRunningTime="2026-01-20 18:18:43.853038781 +0000 UTC m=+5817.613376749" watchObservedRunningTime="2026-01-20 18:18:43.856331944 +0000 UTC m=+5817.616669910" Jan 20 18:18:44 crc kubenswrapper[4558]: I0120 18:18:44.847154 4558 generic.go:334] "Generic (PLEG): container finished" podID="19e31edd-327b-4861-8446-12b3de5b9312" containerID="40cf157630bb938b379ccc598d8db3cbcc30d08e930588e8be748226d603132b" exitCode=0 Jan 20 18:18:44 crc kubenswrapper[4558]: I0120 18:18:44.847224 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/barbican-db-sync-q6bzf" event={"ID":"19e31edd-327b-4861-8446-12b3de5b9312","Type":"ContainerDied","Data":"40cf157630bb938b379ccc598d8db3cbcc30d08e930588e8be748226d603132b"} Jan 20 18:18:46 crc kubenswrapper[4558]: I0120 18:18:46.067515 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="barbican-kuttl-tests/barbican-db-sync-q6bzf" Jan 20 18:18:46 crc kubenswrapper[4558]: I0120 18:18:46.212304 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-svx94\" (UniqueName: \"kubernetes.io/projected/19e31edd-327b-4861-8446-12b3de5b9312-kube-api-access-svx94\") pod \"19e31edd-327b-4861-8446-12b3de5b9312\" (UID: \"19e31edd-327b-4861-8446-12b3de5b9312\") " Jan 20 18:18:46 crc kubenswrapper[4558]: I0120 18:18:46.212468 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/19e31edd-327b-4861-8446-12b3de5b9312-db-sync-config-data\") pod \"19e31edd-327b-4861-8446-12b3de5b9312\" (UID: \"19e31edd-327b-4861-8446-12b3de5b9312\") " Jan 20 18:18:46 crc kubenswrapper[4558]: I0120 18:18:46.218872 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/19e31edd-327b-4861-8446-12b3de5b9312-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "19e31edd-327b-4861-8446-12b3de5b9312" (UID: "19e31edd-327b-4861-8446-12b3de5b9312"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:18:46 crc kubenswrapper[4558]: I0120 18:18:46.219242 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/19e31edd-327b-4861-8446-12b3de5b9312-kube-api-access-svx94" (OuterVolumeSpecName: "kube-api-access-svx94") pod "19e31edd-327b-4861-8446-12b3de5b9312" (UID: "19e31edd-327b-4861-8446-12b3de5b9312"). InnerVolumeSpecName "kube-api-access-svx94". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:18:46 crc kubenswrapper[4558]: I0120 18:18:46.315292 4558 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/19e31edd-327b-4861-8446-12b3de5b9312-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 18:18:46 crc kubenswrapper[4558]: I0120 18:18:46.315587 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-svx94\" (UniqueName: \"kubernetes.io/projected/19e31edd-327b-4861-8446-12b3de5b9312-kube-api-access-svx94\") on node \"crc\" DevicePath \"\"" Jan 20 18:18:46 crc kubenswrapper[4558]: I0120 18:18:46.570441 4558 scope.go:117] "RemoveContainer" containerID="27314e1a8af2e8df8ecd2e7a06cbb630afbfecd8ca2338a1f199cf92f982f581" Jan 20 18:18:46 crc kubenswrapper[4558]: E0120 18:18:46.570762 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 18:18:46 crc kubenswrapper[4558]: I0120 18:18:46.864640 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/barbican-db-sync-q6bzf" event={"ID":"19e31edd-327b-4861-8446-12b3de5b9312","Type":"ContainerDied","Data":"19d79a6725c4b99781c652e17f19666da9c098ec57abb06a95778429342e67d5"} Jan 20 18:18:46 crc kubenswrapper[4558]: I0120 18:18:46.864678 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="barbican-kuttl-tests/barbican-db-sync-q6bzf" Jan 20 18:18:46 crc kubenswrapper[4558]: I0120 18:18:46.864699 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="19d79a6725c4b99781c652e17f19666da9c098ec57abb06a95778429342e67d5" Jan 20 18:18:47 crc kubenswrapper[4558]: I0120 18:18:47.143807 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["barbican-kuttl-tests/barbican-worker-65bbb948f9-jg95w"] Jan 20 18:18:47 crc kubenswrapper[4558]: E0120 18:18:47.144147 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="19e31edd-327b-4861-8446-12b3de5b9312" containerName="barbican-db-sync" Jan 20 18:18:47 crc kubenswrapper[4558]: I0120 18:18:47.144180 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="19e31edd-327b-4861-8446-12b3de5b9312" containerName="barbican-db-sync" Jan 20 18:18:47 crc kubenswrapper[4558]: I0120 18:18:47.144370 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="19e31edd-327b-4861-8446-12b3de5b9312" containerName="barbican-db-sync" Jan 20 18:18:47 crc kubenswrapper[4558]: I0120 18:18:47.145182 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="barbican-kuttl-tests/barbican-worker-65bbb948f9-jg95w" Jan 20 18:18:47 crc kubenswrapper[4558]: I0120 18:18:47.147063 4558 reflector.go:368] Caches populated for *v1.Secret from object-"barbican-kuttl-tests"/"barbican-barbican-dockercfg-cwkfj" Jan 20 18:18:47 crc kubenswrapper[4558]: I0120 18:18:47.147177 4558 reflector.go:368] Caches populated for *v1.Secret from object-"barbican-kuttl-tests"/"barbican-worker-config-data" Jan 20 18:18:47 crc kubenswrapper[4558]: I0120 18:18:47.147358 4558 reflector.go:368] Caches populated for *v1.Secret from object-"barbican-kuttl-tests"/"barbican-config-data" Jan 20 18:18:47 crc kubenswrapper[4558]: I0120 18:18:47.159535 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["barbican-kuttl-tests/barbican-keystone-listener-6f9c66d74d-h9tvl"] Jan 20 18:18:47 crc kubenswrapper[4558]: I0120 18:18:47.160997 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="barbican-kuttl-tests/barbican-keystone-listener-6f9c66d74d-h9tvl" Jan 20 18:18:47 crc kubenswrapper[4558]: I0120 18:18:47.162552 4558 reflector.go:368] Caches populated for *v1.Secret from object-"barbican-kuttl-tests"/"barbican-keystone-listener-config-data" Jan 20 18:18:47 crc kubenswrapper[4558]: I0120 18:18:47.171693 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["barbican-kuttl-tests/barbican-worker-65bbb948f9-jg95w"] Jan 20 18:18:47 crc kubenswrapper[4558]: I0120 18:18:47.177507 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["barbican-kuttl-tests/barbican-keystone-listener-6f9c66d74d-h9tvl"] Jan 20 18:18:47 crc kubenswrapper[4558]: I0120 18:18:47.331302 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3f75c87f-8fff-45ac-ae2a-6d7ff585eb52-config-data\") pod \"barbican-worker-65bbb948f9-jg95w\" (UID: \"3f75c87f-8fff-45ac-ae2a-6d7ff585eb52\") " pod="barbican-kuttl-tests/barbican-worker-65bbb948f9-jg95w" Jan 20 18:18:47 crc kubenswrapper[4558]: I0120 18:18:47.331377 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6c3fa2e0-19e5-4ed3-8f78-fda7d9c0aa4f-config-data\") pod \"barbican-keystone-listener-6f9c66d74d-h9tvl\" (UID: \"6c3fa2e0-19e5-4ed3-8f78-fda7d9c0aa4f\") " pod="barbican-kuttl-tests/barbican-keystone-listener-6f9c66d74d-h9tvl" Jan 20 18:18:47 crc kubenswrapper[4558]: I0120 18:18:47.331415 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3f75c87f-8fff-45ac-ae2a-6d7ff585eb52-config-data-custom\") pod \"barbican-worker-65bbb948f9-jg95w\" (UID: \"3f75c87f-8fff-45ac-ae2a-6d7ff585eb52\") " pod="barbican-kuttl-tests/barbican-worker-65bbb948f9-jg95w" Jan 20 18:18:47 crc kubenswrapper[4558]: I0120 18:18:47.331448 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3f75c87f-8fff-45ac-ae2a-6d7ff585eb52-logs\") pod \"barbican-worker-65bbb948f9-jg95w\" (UID: \"3f75c87f-8fff-45ac-ae2a-6d7ff585eb52\") " pod="barbican-kuttl-tests/barbican-worker-65bbb948f9-jg95w" Jan 20 18:18:47 crc kubenswrapper[4558]: I0120 18:18:47.331562 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6c3fa2e0-19e5-4ed3-8f78-fda7d9c0aa4f-config-data-custom\") pod \"barbican-keystone-listener-6f9c66d74d-h9tvl\" (UID: \"6c3fa2e0-19e5-4ed3-8f78-fda7d9c0aa4f\") " pod="barbican-kuttl-tests/barbican-keystone-listener-6f9c66d74d-h9tvl" Jan 20 18:18:47 crc kubenswrapper[4558]: I0120 18:18:47.331680 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fjq5b\" (UniqueName: \"kubernetes.io/projected/6c3fa2e0-19e5-4ed3-8f78-fda7d9c0aa4f-kube-api-access-fjq5b\") pod \"barbican-keystone-listener-6f9c66d74d-h9tvl\" (UID: \"6c3fa2e0-19e5-4ed3-8f78-fda7d9c0aa4f\") " pod="barbican-kuttl-tests/barbican-keystone-listener-6f9c66d74d-h9tvl" Jan 20 18:18:47 crc kubenswrapper[4558]: I0120 18:18:47.331755 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6c3fa2e0-19e5-4ed3-8f78-fda7d9c0aa4f-logs\") pod \"barbican-keystone-listener-6f9c66d74d-h9tvl\" (UID: \"6c3fa2e0-19e5-4ed3-8f78-fda7d9c0aa4f\") " pod="barbican-kuttl-tests/barbican-keystone-listener-6f9c66d74d-h9tvl" Jan 20 18:18:47 crc kubenswrapper[4558]: I0120 18:18:47.331887 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hhsn9\" (UniqueName: \"kubernetes.io/projected/3f75c87f-8fff-45ac-ae2a-6d7ff585eb52-kube-api-access-hhsn9\") pod \"barbican-worker-65bbb948f9-jg95w\" (UID: \"3f75c87f-8fff-45ac-ae2a-6d7ff585eb52\") " pod="barbican-kuttl-tests/barbican-worker-65bbb948f9-jg95w" Jan 20 18:18:47 crc kubenswrapper[4558]: I0120 18:18:47.335781 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["barbican-kuttl-tests/barbican-api-7d686cb9b8-z9zrp"] Jan 20 18:18:47 crc kubenswrapper[4558]: I0120 18:18:47.337043 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="barbican-kuttl-tests/barbican-api-7d686cb9b8-z9zrp" Jan 20 18:18:47 crc kubenswrapper[4558]: I0120 18:18:47.339206 4558 reflector.go:368] Caches populated for *v1.Secret from object-"barbican-kuttl-tests"/"barbican-api-config-data" Jan 20 18:18:47 crc kubenswrapper[4558]: I0120 18:18:47.350323 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["barbican-kuttl-tests/barbican-api-7d686cb9b8-z9zrp"] Jan 20 18:18:47 crc kubenswrapper[4558]: I0120 18:18:47.434550 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3f75c87f-8fff-45ac-ae2a-6d7ff585eb52-config-data\") pod \"barbican-worker-65bbb948f9-jg95w\" (UID: \"3f75c87f-8fff-45ac-ae2a-6d7ff585eb52\") " pod="barbican-kuttl-tests/barbican-worker-65bbb948f9-jg95w" Jan 20 18:18:47 crc kubenswrapper[4558]: I0120 18:18:47.434625 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/65f781b0-c276-41cd-911f-9c964612d70d-logs\") pod \"barbican-api-7d686cb9b8-z9zrp\" (UID: \"65f781b0-c276-41cd-911f-9c964612d70d\") " pod="barbican-kuttl-tests/barbican-api-7d686cb9b8-z9zrp" Jan 20 18:18:47 crc kubenswrapper[4558]: I0120 18:18:47.434709 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6c3fa2e0-19e5-4ed3-8f78-fda7d9c0aa4f-config-data\") pod \"barbican-keystone-listener-6f9c66d74d-h9tvl\" (UID: \"6c3fa2e0-19e5-4ed3-8f78-fda7d9c0aa4f\") " pod="barbican-kuttl-tests/barbican-keystone-listener-6f9c66d74d-h9tvl" Jan 20 18:18:47 crc kubenswrapper[4558]: I0120 18:18:47.434759 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3f75c87f-8fff-45ac-ae2a-6d7ff585eb52-config-data-custom\") pod \"barbican-worker-65bbb948f9-jg95w\" (UID: \"3f75c87f-8fff-45ac-ae2a-6d7ff585eb52\") " pod="barbican-kuttl-tests/barbican-worker-65bbb948f9-jg95w" Jan 20 18:18:47 crc kubenswrapper[4558]: I0120 18:18:47.434797 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/65f781b0-c276-41cd-911f-9c964612d70d-config-data\") pod \"barbican-api-7d686cb9b8-z9zrp\" (UID: \"65f781b0-c276-41cd-911f-9c964612d70d\") " pod="barbican-kuttl-tests/barbican-api-7d686cb9b8-z9zrp" Jan 20 18:18:47 crc kubenswrapper[4558]: I0120 18:18:47.434836 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3f75c87f-8fff-45ac-ae2a-6d7ff585eb52-logs\") pod \"barbican-worker-65bbb948f9-jg95w\" (UID: \"3f75c87f-8fff-45ac-ae2a-6d7ff585eb52\") " pod="barbican-kuttl-tests/barbican-worker-65bbb948f9-jg95w" Jan 20 18:18:47 crc kubenswrapper[4558]: I0120 18:18:47.434919 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6c3fa2e0-19e5-4ed3-8f78-fda7d9c0aa4f-config-data-custom\") pod \"barbican-keystone-listener-6f9c66d74d-h9tvl\" (UID: \"6c3fa2e0-19e5-4ed3-8f78-fda7d9c0aa4f\") " pod="barbican-kuttl-tests/barbican-keystone-listener-6f9c66d74d-h9tvl" Jan 20 18:18:47 crc kubenswrapper[4558]: I0120 18:18:47.435013 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fjq5b\" (UniqueName: \"kubernetes.io/projected/6c3fa2e0-19e5-4ed3-8f78-fda7d9c0aa4f-kube-api-access-fjq5b\") pod \"barbican-keystone-listener-6f9c66d74d-h9tvl\" (UID: \"6c3fa2e0-19e5-4ed3-8f78-fda7d9c0aa4f\") " pod="barbican-kuttl-tests/barbican-keystone-listener-6f9c66d74d-h9tvl" Jan 20 18:18:47 crc kubenswrapper[4558]: I0120 18:18:47.435071 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vwxst\" (UniqueName: \"kubernetes.io/projected/65f781b0-c276-41cd-911f-9c964612d70d-kube-api-access-vwxst\") pod \"barbican-api-7d686cb9b8-z9zrp\" (UID: \"65f781b0-c276-41cd-911f-9c964612d70d\") " pod="barbican-kuttl-tests/barbican-api-7d686cb9b8-z9zrp" Jan 20 18:18:47 crc kubenswrapper[4558]: I0120 18:18:47.435132 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6c3fa2e0-19e5-4ed3-8f78-fda7d9c0aa4f-logs\") pod \"barbican-keystone-listener-6f9c66d74d-h9tvl\" (UID: \"6c3fa2e0-19e5-4ed3-8f78-fda7d9c0aa4f\") " pod="barbican-kuttl-tests/barbican-keystone-listener-6f9c66d74d-h9tvl" Jan 20 18:18:47 crc kubenswrapper[4558]: I0120 18:18:47.435278 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hhsn9\" (UniqueName: \"kubernetes.io/projected/3f75c87f-8fff-45ac-ae2a-6d7ff585eb52-kube-api-access-hhsn9\") pod \"barbican-worker-65bbb948f9-jg95w\" (UID: \"3f75c87f-8fff-45ac-ae2a-6d7ff585eb52\") " pod="barbican-kuttl-tests/barbican-worker-65bbb948f9-jg95w" Jan 20 18:18:47 crc kubenswrapper[4558]: I0120 18:18:47.435336 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/65f781b0-c276-41cd-911f-9c964612d70d-config-data-custom\") pod \"barbican-api-7d686cb9b8-z9zrp\" (UID: \"65f781b0-c276-41cd-911f-9c964612d70d\") " pod="barbican-kuttl-tests/barbican-api-7d686cb9b8-z9zrp" Jan 20 18:18:47 crc kubenswrapper[4558]: I0120 18:18:47.435880 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6c3fa2e0-19e5-4ed3-8f78-fda7d9c0aa4f-logs\") pod \"barbican-keystone-listener-6f9c66d74d-h9tvl\" (UID: \"6c3fa2e0-19e5-4ed3-8f78-fda7d9c0aa4f\") " pod="barbican-kuttl-tests/barbican-keystone-listener-6f9c66d74d-h9tvl" Jan 20 18:18:47 crc kubenswrapper[4558]: I0120 18:18:47.435897 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3f75c87f-8fff-45ac-ae2a-6d7ff585eb52-logs\") pod \"barbican-worker-65bbb948f9-jg95w\" (UID: \"3f75c87f-8fff-45ac-ae2a-6d7ff585eb52\") " pod="barbican-kuttl-tests/barbican-worker-65bbb948f9-jg95w" Jan 20 18:18:47 crc kubenswrapper[4558]: I0120 18:18:47.441503 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3f75c87f-8fff-45ac-ae2a-6d7ff585eb52-config-data-custom\") pod \"barbican-worker-65bbb948f9-jg95w\" (UID: \"3f75c87f-8fff-45ac-ae2a-6d7ff585eb52\") " pod="barbican-kuttl-tests/barbican-worker-65bbb948f9-jg95w" Jan 20 18:18:47 crc kubenswrapper[4558]: I0120 18:18:47.442050 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6c3fa2e0-19e5-4ed3-8f78-fda7d9c0aa4f-config-data\") pod \"barbican-keystone-listener-6f9c66d74d-h9tvl\" (UID: \"6c3fa2e0-19e5-4ed3-8f78-fda7d9c0aa4f\") " pod="barbican-kuttl-tests/barbican-keystone-listener-6f9c66d74d-h9tvl" Jan 20 18:18:47 crc kubenswrapper[4558]: I0120 18:18:47.443087 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6c3fa2e0-19e5-4ed3-8f78-fda7d9c0aa4f-config-data-custom\") pod \"barbican-keystone-listener-6f9c66d74d-h9tvl\" (UID: \"6c3fa2e0-19e5-4ed3-8f78-fda7d9c0aa4f\") " pod="barbican-kuttl-tests/barbican-keystone-listener-6f9c66d74d-h9tvl" Jan 20 18:18:47 crc kubenswrapper[4558]: I0120 18:18:47.443431 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3f75c87f-8fff-45ac-ae2a-6d7ff585eb52-config-data\") pod \"barbican-worker-65bbb948f9-jg95w\" (UID: \"3f75c87f-8fff-45ac-ae2a-6d7ff585eb52\") " pod="barbican-kuttl-tests/barbican-worker-65bbb948f9-jg95w" Jan 20 18:18:47 crc kubenswrapper[4558]: I0120 18:18:47.452989 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fjq5b\" (UniqueName: \"kubernetes.io/projected/6c3fa2e0-19e5-4ed3-8f78-fda7d9c0aa4f-kube-api-access-fjq5b\") pod \"barbican-keystone-listener-6f9c66d74d-h9tvl\" (UID: \"6c3fa2e0-19e5-4ed3-8f78-fda7d9c0aa4f\") " pod="barbican-kuttl-tests/barbican-keystone-listener-6f9c66d74d-h9tvl" Jan 20 18:18:47 crc kubenswrapper[4558]: I0120 18:18:47.453616 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hhsn9\" (UniqueName: \"kubernetes.io/projected/3f75c87f-8fff-45ac-ae2a-6d7ff585eb52-kube-api-access-hhsn9\") pod \"barbican-worker-65bbb948f9-jg95w\" (UID: \"3f75c87f-8fff-45ac-ae2a-6d7ff585eb52\") " pod="barbican-kuttl-tests/barbican-worker-65bbb948f9-jg95w" Jan 20 18:18:47 crc kubenswrapper[4558]: I0120 18:18:47.460122 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="barbican-kuttl-tests/barbican-worker-65bbb948f9-jg95w" Jan 20 18:18:47 crc kubenswrapper[4558]: I0120 18:18:47.474366 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="barbican-kuttl-tests/barbican-keystone-listener-6f9c66d74d-h9tvl" Jan 20 18:18:47 crc kubenswrapper[4558]: I0120 18:18:47.538007 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/65f781b0-c276-41cd-911f-9c964612d70d-config-data\") pod \"barbican-api-7d686cb9b8-z9zrp\" (UID: \"65f781b0-c276-41cd-911f-9c964612d70d\") " pod="barbican-kuttl-tests/barbican-api-7d686cb9b8-z9zrp" Jan 20 18:18:47 crc kubenswrapper[4558]: I0120 18:18:47.538405 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vwxst\" (UniqueName: \"kubernetes.io/projected/65f781b0-c276-41cd-911f-9c964612d70d-kube-api-access-vwxst\") pod \"barbican-api-7d686cb9b8-z9zrp\" (UID: \"65f781b0-c276-41cd-911f-9c964612d70d\") " pod="barbican-kuttl-tests/barbican-api-7d686cb9b8-z9zrp" Jan 20 18:18:47 crc kubenswrapper[4558]: I0120 18:18:47.538589 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/65f781b0-c276-41cd-911f-9c964612d70d-config-data-custom\") pod \"barbican-api-7d686cb9b8-z9zrp\" (UID: \"65f781b0-c276-41cd-911f-9c964612d70d\") " pod="barbican-kuttl-tests/barbican-api-7d686cb9b8-z9zrp" Jan 20 18:18:47 crc kubenswrapper[4558]: I0120 18:18:47.538720 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/65f781b0-c276-41cd-911f-9c964612d70d-logs\") pod \"barbican-api-7d686cb9b8-z9zrp\" (UID: \"65f781b0-c276-41cd-911f-9c964612d70d\") " pod="barbican-kuttl-tests/barbican-api-7d686cb9b8-z9zrp" Jan 20 18:18:47 crc kubenswrapper[4558]: I0120 18:18:47.539363 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/65f781b0-c276-41cd-911f-9c964612d70d-logs\") pod \"barbican-api-7d686cb9b8-z9zrp\" (UID: \"65f781b0-c276-41cd-911f-9c964612d70d\") " pod="barbican-kuttl-tests/barbican-api-7d686cb9b8-z9zrp" Jan 20 18:18:47 crc kubenswrapper[4558]: I0120 18:18:47.543283 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/65f781b0-c276-41cd-911f-9c964612d70d-config-data\") pod \"barbican-api-7d686cb9b8-z9zrp\" (UID: \"65f781b0-c276-41cd-911f-9c964612d70d\") " pod="barbican-kuttl-tests/barbican-api-7d686cb9b8-z9zrp" Jan 20 18:18:47 crc kubenswrapper[4558]: I0120 18:18:47.545662 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/65f781b0-c276-41cd-911f-9c964612d70d-config-data-custom\") pod \"barbican-api-7d686cb9b8-z9zrp\" (UID: \"65f781b0-c276-41cd-911f-9c964612d70d\") " pod="barbican-kuttl-tests/barbican-api-7d686cb9b8-z9zrp" Jan 20 18:18:47 crc kubenswrapper[4558]: I0120 18:18:47.554833 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vwxst\" (UniqueName: \"kubernetes.io/projected/65f781b0-c276-41cd-911f-9c964612d70d-kube-api-access-vwxst\") pod \"barbican-api-7d686cb9b8-z9zrp\" (UID: \"65f781b0-c276-41cd-911f-9c964612d70d\") " pod="barbican-kuttl-tests/barbican-api-7d686cb9b8-z9zrp" Jan 20 18:18:47 crc kubenswrapper[4558]: I0120 18:18:47.652628 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="barbican-kuttl-tests/barbican-api-7d686cb9b8-z9zrp" Jan 20 18:18:47 crc kubenswrapper[4558]: I0120 18:18:47.680810 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["barbican-kuttl-tests/barbican-api-7d686cb9b8-r56cp"] Jan 20 18:18:47 crc kubenswrapper[4558]: I0120 18:18:47.682214 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="barbican-kuttl-tests/barbican-api-7d686cb9b8-r56cp" Jan 20 18:18:47 crc kubenswrapper[4558]: I0120 18:18:47.705960 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["barbican-kuttl-tests/barbican-api-7d686cb9b8-r56cp"] Jan 20 18:18:47 crc kubenswrapper[4558]: I0120 18:18:47.766697 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["barbican-kuttl-tests/barbican-keystone-listener-6f9c66d74d-wwr54"] Jan 20 18:18:47 crc kubenswrapper[4558]: I0120 18:18:47.768397 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="barbican-kuttl-tests/barbican-keystone-listener-6f9c66d74d-wwr54" Jan 20 18:18:47 crc kubenswrapper[4558]: I0120 18:18:47.780197 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["barbican-kuttl-tests/barbican-keystone-listener-6f9c66d74d-h9tvl"] Jan 20 18:18:47 crc kubenswrapper[4558]: I0120 18:18:47.787276 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["barbican-kuttl-tests/barbican-keystone-listener-6f9c66d74d-wwr54"] Jan 20 18:18:47 crc kubenswrapper[4558]: I0120 18:18:47.847629 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9fce1875-76a1-485d-a6f1-dd503156e750-logs\") pod \"barbican-api-7d686cb9b8-r56cp\" (UID: \"9fce1875-76a1-485d-a6f1-dd503156e750\") " pod="barbican-kuttl-tests/barbican-api-7d686cb9b8-r56cp" Jan 20 18:18:47 crc kubenswrapper[4558]: I0120 18:18:47.847700 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9fce1875-76a1-485d-a6f1-dd503156e750-config-data-custom\") pod \"barbican-api-7d686cb9b8-r56cp\" (UID: \"9fce1875-76a1-485d-a6f1-dd503156e750\") " pod="barbican-kuttl-tests/barbican-api-7d686cb9b8-r56cp" Jan 20 18:18:47 crc kubenswrapper[4558]: I0120 18:18:47.847752 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h4g7f\" (UniqueName: \"kubernetes.io/projected/9fce1875-76a1-485d-a6f1-dd503156e750-kube-api-access-h4g7f\") pod \"barbican-api-7d686cb9b8-r56cp\" (UID: \"9fce1875-76a1-485d-a6f1-dd503156e750\") " pod="barbican-kuttl-tests/barbican-api-7d686cb9b8-r56cp" Jan 20 18:18:47 crc kubenswrapper[4558]: I0120 18:18:47.847841 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9fce1875-76a1-485d-a6f1-dd503156e750-config-data\") pod \"barbican-api-7d686cb9b8-r56cp\" (UID: \"9fce1875-76a1-485d-a6f1-dd503156e750\") " pod="barbican-kuttl-tests/barbican-api-7d686cb9b8-r56cp" Jan 20 18:18:47 crc kubenswrapper[4558]: I0120 18:18:47.882102 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/barbican-keystone-listener-6f9c66d74d-h9tvl" event={"ID":"6c3fa2e0-19e5-4ed3-8f78-fda7d9c0aa4f","Type":"ContainerStarted","Data":"4e1fde413f09656ada43e6653d239ed10188d97252178b4943607ada5eecaa4f"} Jan 20 18:18:47 crc kubenswrapper[4558]: I0120 18:18:47.911827 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["barbican-kuttl-tests/barbican-worker-65bbb948f9-jg95w"] Jan 20 18:18:47 crc kubenswrapper[4558]: W0120 18:18:47.918125 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3f75c87f_8fff_45ac_ae2a_6d7ff585eb52.slice/crio-d1a9ea9f13739b0f97f9113e97af7ed24db5c27b6ce7707a9d0111164730ad38 WatchSource:0}: Error finding container d1a9ea9f13739b0f97f9113e97af7ed24db5c27b6ce7707a9d0111164730ad38: Status 404 returned error can't find the container with id d1a9ea9f13739b0f97f9113e97af7ed24db5c27b6ce7707a9d0111164730ad38 Jan 20 18:18:47 crc kubenswrapper[4558]: I0120 18:18:47.949771 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h4g7f\" (UniqueName: \"kubernetes.io/projected/9fce1875-76a1-485d-a6f1-dd503156e750-kube-api-access-h4g7f\") pod \"barbican-api-7d686cb9b8-r56cp\" (UID: \"9fce1875-76a1-485d-a6f1-dd503156e750\") " pod="barbican-kuttl-tests/barbican-api-7d686cb9b8-r56cp" Jan 20 18:18:47 crc kubenswrapper[4558]: I0120 18:18:47.949887 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7b676\" (UniqueName: \"kubernetes.io/projected/0b59d155-7695-4f4e-b6bb-602ea491cb2d-kube-api-access-7b676\") pod \"barbican-keystone-listener-6f9c66d74d-wwr54\" (UID: \"0b59d155-7695-4f4e-b6bb-602ea491cb2d\") " pod="barbican-kuttl-tests/barbican-keystone-listener-6f9c66d74d-wwr54" Jan 20 18:18:47 crc kubenswrapper[4558]: I0120 18:18:47.949951 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0b59d155-7695-4f4e-b6bb-602ea491cb2d-config-data\") pod \"barbican-keystone-listener-6f9c66d74d-wwr54\" (UID: \"0b59d155-7695-4f4e-b6bb-602ea491cb2d\") " pod="barbican-kuttl-tests/barbican-keystone-listener-6f9c66d74d-wwr54" Jan 20 18:18:47 crc kubenswrapper[4558]: I0120 18:18:47.949975 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0b59d155-7695-4f4e-b6bb-602ea491cb2d-config-data-custom\") pod \"barbican-keystone-listener-6f9c66d74d-wwr54\" (UID: \"0b59d155-7695-4f4e-b6bb-602ea491cb2d\") " pod="barbican-kuttl-tests/barbican-keystone-listener-6f9c66d74d-wwr54" Jan 20 18:18:47 crc kubenswrapper[4558]: I0120 18:18:47.950069 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9fce1875-76a1-485d-a6f1-dd503156e750-config-data\") pod \"barbican-api-7d686cb9b8-r56cp\" (UID: \"9fce1875-76a1-485d-a6f1-dd503156e750\") " pod="barbican-kuttl-tests/barbican-api-7d686cb9b8-r56cp" Jan 20 18:18:47 crc kubenswrapper[4558]: I0120 18:18:47.950136 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9fce1875-76a1-485d-a6f1-dd503156e750-logs\") pod \"barbican-api-7d686cb9b8-r56cp\" (UID: \"9fce1875-76a1-485d-a6f1-dd503156e750\") " pod="barbican-kuttl-tests/barbican-api-7d686cb9b8-r56cp" Jan 20 18:18:47 crc kubenswrapper[4558]: I0120 18:18:47.950159 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0b59d155-7695-4f4e-b6bb-602ea491cb2d-logs\") pod \"barbican-keystone-listener-6f9c66d74d-wwr54\" (UID: \"0b59d155-7695-4f4e-b6bb-602ea491cb2d\") " pod="barbican-kuttl-tests/barbican-keystone-listener-6f9c66d74d-wwr54" Jan 20 18:18:47 crc kubenswrapper[4558]: I0120 18:18:47.950196 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9fce1875-76a1-485d-a6f1-dd503156e750-config-data-custom\") pod \"barbican-api-7d686cb9b8-r56cp\" (UID: \"9fce1875-76a1-485d-a6f1-dd503156e750\") " pod="barbican-kuttl-tests/barbican-api-7d686cb9b8-r56cp" Jan 20 18:18:47 crc kubenswrapper[4558]: I0120 18:18:47.951534 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9fce1875-76a1-485d-a6f1-dd503156e750-logs\") pod \"barbican-api-7d686cb9b8-r56cp\" (UID: \"9fce1875-76a1-485d-a6f1-dd503156e750\") " pod="barbican-kuttl-tests/barbican-api-7d686cb9b8-r56cp" Jan 20 18:18:47 crc kubenswrapper[4558]: I0120 18:18:47.960151 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9fce1875-76a1-485d-a6f1-dd503156e750-config-data\") pod \"barbican-api-7d686cb9b8-r56cp\" (UID: \"9fce1875-76a1-485d-a6f1-dd503156e750\") " pod="barbican-kuttl-tests/barbican-api-7d686cb9b8-r56cp" Jan 20 18:18:47 crc kubenswrapper[4558]: I0120 18:18:47.960686 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9fce1875-76a1-485d-a6f1-dd503156e750-config-data-custom\") pod \"barbican-api-7d686cb9b8-r56cp\" (UID: \"9fce1875-76a1-485d-a6f1-dd503156e750\") " pod="barbican-kuttl-tests/barbican-api-7d686cb9b8-r56cp" Jan 20 18:18:47 crc kubenswrapper[4558]: I0120 18:18:47.962822 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["barbican-kuttl-tests/barbican-worker-65bbb948f9-6kjvs"] Jan 20 18:18:47 crc kubenswrapper[4558]: I0120 18:18:47.964592 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="barbican-kuttl-tests/barbican-worker-65bbb948f9-6kjvs" Jan 20 18:18:47 crc kubenswrapper[4558]: I0120 18:18:47.966769 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h4g7f\" (UniqueName: \"kubernetes.io/projected/9fce1875-76a1-485d-a6f1-dd503156e750-kube-api-access-h4g7f\") pod \"barbican-api-7d686cb9b8-r56cp\" (UID: \"9fce1875-76a1-485d-a6f1-dd503156e750\") " pod="barbican-kuttl-tests/barbican-api-7d686cb9b8-r56cp" Jan 20 18:18:47 crc kubenswrapper[4558]: I0120 18:18:47.984529 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["barbican-kuttl-tests/barbican-worker-65bbb948f9-6kjvs"] Jan 20 18:18:47 crc kubenswrapper[4558]: I0120 18:18:47.998321 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="barbican-kuttl-tests/barbican-api-7d686cb9b8-r56cp" Jan 20 18:18:48 crc kubenswrapper[4558]: I0120 18:18:48.051778 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0b59d155-7695-4f4e-b6bb-602ea491cb2d-config-data\") pod \"barbican-keystone-listener-6f9c66d74d-wwr54\" (UID: \"0b59d155-7695-4f4e-b6bb-602ea491cb2d\") " pod="barbican-kuttl-tests/barbican-keystone-listener-6f9c66d74d-wwr54" Jan 20 18:18:48 crc kubenswrapper[4558]: I0120 18:18:48.051823 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0b59d155-7695-4f4e-b6bb-602ea491cb2d-config-data-custom\") pod \"barbican-keystone-listener-6f9c66d74d-wwr54\" (UID: \"0b59d155-7695-4f4e-b6bb-602ea491cb2d\") " pod="barbican-kuttl-tests/barbican-keystone-listener-6f9c66d74d-wwr54" Jan 20 18:18:48 crc kubenswrapper[4558]: I0120 18:18:48.051975 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0b59d155-7695-4f4e-b6bb-602ea491cb2d-logs\") pod \"barbican-keystone-listener-6f9c66d74d-wwr54\" (UID: \"0b59d155-7695-4f4e-b6bb-602ea491cb2d\") " pod="barbican-kuttl-tests/barbican-keystone-listener-6f9c66d74d-wwr54" Jan 20 18:18:48 crc kubenswrapper[4558]: I0120 18:18:48.052095 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7b676\" (UniqueName: \"kubernetes.io/projected/0b59d155-7695-4f4e-b6bb-602ea491cb2d-kube-api-access-7b676\") pod \"barbican-keystone-listener-6f9c66d74d-wwr54\" (UID: \"0b59d155-7695-4f4e-b6bb-602ea491cb2d\") " pod="barbican-kuttl-tests/barbican-keystone-listener-6f9c66d74d-wwr54" Jan 20 18:18:48 crc kubenswrapper[4558]: I0120 18:18:48.052451 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0b59d155-7695-4f4e-b6bb-602ea491cb2d-logs\") pod \"barbican-keystone-listener-6f9c66d74d-wwr54\" (UID: \"0b59d155-7695-4f4e-b6bb-602ea491cb2d\") " pod="barbican-kuttl-tests/barbican-keystone-listener-6f9c66d74d-wwr54" Jan 20 18:18:48 crc kubenswrapper[4558]: I0120 18:18:48.056761 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0b59d155-7695-4f4e-b6bb-602ea491cb2d-config-data\") pod \"barbican-keystone-listener-6f9c66d74d-wwr54\" (UID: \"0b59d155-7695-4f4e-b6bb-602ea491cb2d\") " pod="barbican-kuttl-tests/barbican-keystone-listener-6f9c66d74d-wwr54" Jan 20 18:18:48 crc kubenswrapper[4558]: I0120 18:18:48.057548 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0b59d155-7695-4f4e-b6bb-602ea491cb2d-config-data-custom\") pod \"barbican-keystone-listener-6f9c66d74d-wwr54\" (UID: \"0b59d155-7695-4f4e-b6bb-602ea491cb2d\") " pod="barbican-kuttl-tests/barbican-keystone-listener-6f9c66d74d-wwr54" Jan 20 18:18:48 crc kubenswrapper[4558]: I0120 18:18:48.068718 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7b676\" (UniqueName: \"kubernetes.io/projected/0b59d155-7695-4f4e-b6bb-602ea491cb2d-kube-api-access-7b676\") pod \"barbican-keystone-listener-6f9c66d74d-wwr54\" (UID: \"0b59d155-7695-4f4e-b6bb-602ea491cb2d\") " pod="barbican-kuttl-tests/barbican-keystone-listener-6f9c66d74d-wwr54" Jan 20 18:18:48 crc kubenswrapper[4558]: I0120 18:18:48.088300 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="barbican-kuttl-tests/barbican-keystone-listener-6f9c66d74d-wwr54" Jan 20 18:18:48 crc kubenswrapper[4558]: I0120 18:18:48.143668 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["barbican-kuttl-tests/barbican-api-7d686cb9b8-z9zrp"] Jan 20 18:18:48 crc kubenswrapper[4558]: I0120 18:18:48.153922 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h4zvz\" (UniqueName: \"kubernetes.io/projected/afc237c4-c25c-4dde-91f5-23accf56f5a9-kube-api-access-h4zvz\") pod \"barbican-worker-65bbb948f9-6kjvs\" (UID: \"afc237c4-c25c-4dde-91f5-23accf56f5a9\") " pod="barbican-kuttl-tests/barbican-worker-65bbb948f9-6kjvs" Jan 20 18:18:48 crc kubenswrapper[4558]: I0120 18:18:48.154032 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/afc237c4-c25c-4dde-91f5-23accf56f5a9-config-data\") pod \"barbican-worker-65bbb948f9-6kjvs\" (UID: \"afc237c4-c25c-4dde-91f5-23accf56f5a9\") " pod="barbican-kuttl-tests/barbican-worker-65bbb948f9-6kjvs" Jan 20 18:18:48 crc kubenswrapper[4558]: I0120 18:18:48.154089 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/afc237c4-c25c-4dde-91f5-23accf56f5a9-config-data-custom\") pod \"barbican-worker-65bbb948f9-6kjvs\" (UID: \"afc237c4-c25c-4dde-91f5-23accf56f5a9\") " pod="barbican-kuttl-tests/barbican-worker-65bbb948f9-6kjvs" Jan 20 18:18:48 crc kubenswrapper[4558]: I0120 18:18:48.154151 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/afc237c4-c25c-4dde-91f5-23accf56f5a9-logs\") pod \"barbican-worker-65bbb948f9-6kjvs\" (UID: \"afc237c4-c25c-4dde-91f5-23accf56f5a9\") " pod="barbican-kuttl-tests/barbican-worker-65bbb948f9-6kjvs" Jan 20 18:18:48 crc kubenswrapper[4558]: I0120 18:18:48.255754 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/afc237c4-c25c-4dde-91f5-23accf56f5a9-logs\") pod \"barbican-worker-65bbb948f9-6kjvs\" (UID: \"afc237c4-c25c-4dde-91f5-23accf56f5a9\") " pod="barbican-kuttl-tests/barbican-worker-65bbb948f9-6kjvs" Jan 20 18:18:48 crc kubenswrapper[4558]: I0120 18:18:48.255810 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h4zvz\" (UniqueName: \"kubernetes.io/projected/afc237c4-c25c-4dde-91f5-23accf56f5a9-kube-api-access-h4zvz\") pod \"barbican-worker-65bbb948f9-6kjvs\" (UID: \"afc237c4-c25c-4dde-91f5-23accf56f5a9\") " pod="barbican-kuttl-tests/barbican-worker-65bbb948f9-6kjvs" Jan 20 18:18:48 crc kubenswrapper[4558]: I0120 18:18:48.255896 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/afc237c4-c25c-4dde-91f5-23accf56f5a9-config-data\") pod \"barbican-worker-65bbb948f9-6kjvs\" (UID: \"afc237c4-c25c-4dde-91f5-23accf56f5a9\") " pod="barbican-kuttl-tests/barbican-worker-65bbb948f9-6kjvs" Jan 20 18:18:48 crc kubenswrapper[4558]: I0120 18:18:48.255957 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/afc237c4-c25c-4dde-91f5-23accf56f5a9-config-data-custom\") pod \"barbican-worker-65bbb948f9-6kjvs\" (UID: \"afc237c4-c25c-4dde-91f5-23accf56f5a9\") " pod="barbican-kuttl-tests/barbican-worker-65bbb948f9-6kjvs" Jan 20 18:18:48 crc kubenswrapper[4558]: I0120 18:18:48.256924 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/afc237c4-c25c-4dde-91f5-23accf56f5a9-logs\") pod \"barbican-worker-65bbb948f9-6kjvs\" (UID: \"afc237c4-c25c-4dde-91f5-23accf56f5a9\") " pod="barbican-kuttl-tests/barbican-worker-65bbb948f9-6kjvs" Jan 20 18:18:48 crc kubenswrapper[4558]: I0120 18:18:48.259117 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/afc237c4-c25c-4dde-91f5-23accf56f5a9-config-data-custom\") pod \"barbican-worker-65bbb948f9-6kjvs\" (UID: \"afc237c4-c25c-4dde-91f5-23accf56f5a9\") " pod="barbican-kuttl-tests/barbican-worker-65bbb948f9-6kjvs" Jan 20 18:18:48 crc kubenswrapper[4558]: I0120 18:18:48.260148 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/afc237c4-c25c-4dde-91f5-23accf56f5a9-config-data\") pod \"barbican-worker-65bbb948f9-6kjvs\" (UID: \"afc237c4-c25c-4dde-91f5-23accf56f5a9\") " pod="barbican-kuttl-tests/barbican-worker-65bbb948f9-6kjvs" Jan 20 18:18:48 crc kubenswrapper[4558]: I0120 18:18:48.271247 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h4zvz\" (UniqueName: \"kubernetes.io/projected/afc237c4-c25c-4dde-91f5-23accf56f5a9-kube-api-access-h4zvz\") pod \"barbican-worker-65bbb948f9-6kjvs\" (UID: \"afc237c4-c25c-4dde-91f5-23accf56f5a9\") " pod="barbican-kuttl-tests/barbican-worker-65bbb948f9-6kjvs" Jan 20 18:18:48 crc kubenswrapper[4558]: I0120 18:18:48.278515 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="barbican-kuttl-tests/barbican-worker-65bbb948f9-6kjvs" Jan 20 18:18:48 crc kubenswrapper[4558]: I0120 18:18:48.390385 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["barbican-kuttl-tests/barbican-api-7d686cb9b8-r56cp"] Jan 20 18:18:48 crc kubenswrapper[4558]: I0120 18:18:48.474211 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["barbican-kuttl-tests/barbican-keystone-listener-6f9c66d74d-wwr54"] Jan 20 18:18:48 crc kubenswrapper[4558]: W0120 18:18:48.489358 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0b59d155_7695_4f4e_b6bb_602ea491cb2d.slice/crio-e802234d4a804f0c5d2fbbbbcabfde57a43361590e14d13c7fa38a49249bb2a7 WatchSource:0}: Error finding container e802234d4a804f0c5d2fbbbbcabfde57a43361590e14d13c7fa38a49249bb2a7: Status 404 returned error can't find the container with id e802234d4a804f0c5d2fbbbbcabfde57a43361590e14d13c7fa38a49249bb2a7 Jan 20 18:18:48 crc kubenswrapper[4558]: I0120 18:18:48.501634 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["barbican-kuttl-tests/barbican-worker-65bbb948f9-6kjvs"] Jan 20 18:18:48 crc kubenswrapper[4558]: W0120 18:18:48.511000 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podafc237c4_c25c_4dde_91f5_23accf56f5a9.slice/crio-469fd79107cfc61d0386c83dbeb86bc7d68679bdd646bc122889d4d081e35769 WatchSource:0}: Error finding container 469fd79107cfc61d0386c83dbeb86bc7d68679bdd646bc122889d4d081e35769: Status 404 returned error can't find the container with id 469fd79107cfc61d0386c83dbeb86bc7d68679bdd646bc122889d4d081e35769 Jan 20 18:18:48 crc kubenswrapper[4558]: I0120 18:18:48.894705 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/barbican-api-7d686cb9b8-z9zrp" event={"ID":"65f781b0-c276-41cd-911f-9c964612d70d","Type":"ContainerStarted","Data":"360ccb7b28a8926116edd2dc0fff31e313aa72884ad5ca03c021f606c938c63d"} Jan 20 18:18:48 crc kubenswrapper[4558]: I0120 18:18:48.894792 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/barbican-api-7d686cb9b8-z9zrp" event={"ID":"65f781b0-c276-41cd-911f-9c964612d70d","Type":"ContainerStarted","Data":"1f6a18382756ee048f8366d1db72717a6b8e361e2b7c7050d003e430c11f0923"} Jan 20 18:18:48 crc kubenswrapper[4558]: I0120 18:18:48.894812 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/barbican-api-7d686cb9b8-z9zrp" event={"ID":"65f781b0-c276-41cd-911f-9c964612d70d","Type":"ContainerStarted","Data":"bdc0cf10d6bb99533295de0de5ffc0b373a88bbfe7911df4426238d198e5c706"} Jan 20 18:18:48 crc kubenswrapper[4558]: I0120 18:18:48.894907 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="barbican-kuttl-tests/barbican-api-7d686cb9b8-z9zrp" Jan 20 18:18:48 crc kubenswrapper[4558]: I0120 18:18:48.896699 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/barbican-keystone-listener-6f9c66d74d-wwr54" event={"ID":"0b59d155-7695-4f4e-b6bb-602ea491cb2d","Type":"ContainerStarted","Data":"e802234d4a804f0c5d2fbbbbcabfde57a43361590e14d13c7fa38a49249bb2a7"} Jan 20 18:18:48 crc kubenswrapper[4558]: I0120 18:18:48.900826 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/barbican-worker-65bbb948f9-jg95w" event={"ID":"3f75c87f-8fff-45ac-ae2a-6d7ff585eb52","Type":"ContainerStarted","Data":"40ffad0a2494fb759a68da86800f733040c8e2db7f259ca696af71da7fffb661"} Jan 20 18:18:48 crc kubenswrapper[4558]: I0120 18:18:48.900908 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/barbican-worker-65bbb948f9-jg95w" event={"ID":"3f75c87f-8fff-45ac-ae2a-6d7ff585eb52","Type":"ContainerStarted","Data":"d1a9ea9f13739b0f97f9113e97af7ed24db5c27b6ce7707a9d0111164730ad38"} Jan 20 18:18:48 crc kubenswrapper[4558]: I0120 18:18:48.904559 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/barbican-api-7d686cb9b8-r56cp" event={"ID":"9fce1875-76a1-485d-a6f1-dd503156e750","Type":"ContainerStarted","Data":"71fc369dd9331845f5486b4eb5e2f40365320e5ff59f4aad10e67e2f7af937e3"} Jan 20 18:18:48 crc kubenswrapper[4558]: I0120 18:18:48.904593 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/barbican-api-7d686cb9b8-r56cp" event={"ID":"9fce1875-76a1-485d-a6f1-dd503156e750","Type":"ContainerStarted","Data":"47e9d29a4aed9c610949db6f612a9990144180ca22ace7a08ea7f838800c8a22"} Jan 20 18:18:48 crc kubenswrapper[4558]: I0120 18:18:48.904606 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/barbican-api-7d686cb9b8-r56cp" event={"ID":"9fce1875-76a1-485d-a6f1-dd503156e750","Type":"ContainerStarted","Data":"1b7c563bea99ed0704a3a51e75635b71041b6f664a14216e4e7af65e805cab1f"} Jan 20 18:18:48 crc kubenswrapper[4558]: I0120 18:18:48.904819 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="barbican-kuttl-tests/barbican-api-7d686cb9b8-r56cp" Jan 20 18:18:48 crc kubenswrapper[4558]: I0120 18:18:48.904883 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="barbican-kuttl-tests/barbican-api-7d686cb9b8-r56cp" Jan 20 18:18:48 crc kubenswrapper[4558]: I0120 18:18:48.913958 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/barbican-worker-65bbb948f9-6kjvs" event={"ID":"afc237c4-c25c-4dde-91f5-23accf56f5a9","Type":"ContainerStarted","Data":"469fd79107cfc61d0386c83dbeb86bc7d68679bdd646bc122889d4d081e35769"} Jan 20 18:18:48 crc kubenswrapper[4558]: I0120 18:18:48.918921 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="barbican-kuttl-tests/barbican-api-7d686cb9b8-z9zrp" podStartSLOduration=1.918904136 podStartE2EDuration="1.918904136s" podCreationTimestamp="2026-01-20 18:18:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 18:18:48.916261688 +0000 UTC m=+5822.676599655" watchObservedRunningTime="2026-01-20 18:18:48.918904136 +0000 UTC m=+5822.679242103" Jan 20 18:18:48 crc kubenswrapper[4558]: I0120 18:18:48.942335 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="barbican-kuttl-tests/barbican-api-7d686cb9b8-r56cp" podStartSLOduration=1.9423128109999999 podStartE2EDuration="1.942312811s" podCreationTimestamp="2026-01-20 18:18:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 18:18:48.93841427 +0000 UTC m=+5822.698752237" watchObservedRunningTime="2026-01-20 18:18:48.942312811 +0000 UTC m=+5822.702650777" Jan 20 18:18:48 crc kubenswrapper[4558]: I0120 18:18:48.990172 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["barbican-kuttl-tests/barbican-api-7d686cb9b8-r56cp"] Jan 20 18:18:49 crc kubenswrapper[4558]: I0120 18:18:49.181315 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["barbican-kuttl-tests/barbican-keystone-listener-6f9c66d74d-h9tvl"] Jan 20 18:18:49 crc kubenswrapper[4558]: I0120 18:18:49.294195 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["barbican-kuttl-tests/barbican-worker-65bbb948f9-jg95w"] Jan 20 18:18:49 crc kubenswrapper[4558]: I0120 18:18:49.922385 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/barbican-worker-65bbb948f9-6kjvs" event={"ID":"afc237c4-c25c-4dde-91f5-23accf56f5a9","Type":"ContainerStarted","Data":"1d759ec87bb962d402368d8716821b4cdcc5886d59e1f98752dc517a66f23913"} Jan 20 18:18:49 crc kubenswrapper[4558]: I0120 18:18:49.922609 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/barbican-worker-65bbb948f9-6kjvs" event={"ID":"afc237c4-c25c-4dde-91f5-23accf56f5a9","Type":"ContainerStarted","Data":"9afe78cb8a1da2bb5b08c69b68f397924a7031558028c3dfc119c19cd15d5437"} Jan 20 18:18:49 crc kubenswrapper[4558]: I0120 18:18:49.925070 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/barbican-keystone-listener-6f9c66d74d-wwr54" event={"ID":"0b59d155-7695-4f4e-b6bb-602ea491cb2d","Type":"ContainerStarted","Data":"f84a9271aeceb1980a056bfe780df5b8ee246aea786cbdc53c89fc3a34a5e68c"} Jan 20 18:18:49 crc kubenswrapper[4558]: I0120 18:18:49.925100 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/barbican-keystone-listener-6f9c66d74d-wwr54" event={"ID":"0b59d155-7695-4f4e-b6bb-602ea491cb2d","Type":"ContainerStarted","Data":"db0da8cda5443858ce613edc9d8b9076f173aa1bb3746580df94d5ec5f14ccc2"} Jan 20 18:18:49 crc kubenswrapper[4558]: I0120 18:18:49.930222 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/barbican-worker-65bbb948f9-jg95w" event={"ID":"3f75c87f-8fff-45ac-ae2a-6d7ff585eb52","Type":"ContainerStarted","Data":"e47a5e5509e4731b79aa7169915e1554d46a0023265e61269bb4ed67baaa3c14"} Jan 20 18:18:49 crc kubenswrapper[4558]: I0120 18:18:49.933033 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/barbican-keystone-listener-6f9c66d74d-h9tvl" event={"ID":"6c3fa2e0-19e5-4ed3-8f78-fda7d9c0aa4f","Type":"ContainerStarted","Data":"57e799d2ff050019df5545bcaa75a294f385f74f7f397659129f9db6489312e0"} Jan 20 18:18:49 crc kubenswrapper[4558]: I0120 18:18:49.933063 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/barbican-keystone-listener-6f9c66d74d-h9tvl" event={"ID":"6c3fa2e0-19e5-4ed3-8f78-fda7d9c0aa4f","Type":"ContainerStarted","Data":"49531a534ec7383a9c7baf6296f5dda1b911df495b2c01f24a463815a298eadb"} Jan 20 18:18:49 crc kubenswrapper[4558]: I0120 18:18:49.933087 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="barbican-kuttl-tests/barbican-api-7d686cb9b8-z9zrp" Jan 20 18:18:49 crc kubenswrapper[4558]: I0120 18:18:49.959393 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="barbican-kuttl-tests/barbican-worker-65bbb948f9-6kjvs" podStartSLOduration=2.354324626 podStartE2EDuration="2.959381163s" podCreationTimestamp="2026-01-20 18:18:47 +0000 UTC" firstStartedPulling="2026-01-20 18:18:48.513144938 +0000 UTC m=+5822.273482905" lastFinishedPulling="2026-01-20 18:18:49.118201485 +0000 UTC m=+5822.878539442" observedRunningTime="2026-01-20 18:18:49.937450587 +0000 UTC m=+5823.697788555" watchObservedRunningTime="2026-01-20 18:18:49.959381163 +0000 UTC m=+5823.719719130" Jan 20 18:18:49 crc kubenswrapper[4558]: I0120 18:18:49.975053 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="barbican-kuttl-tests/barbican-keystone-listener-6f9c66d74d-h9tvl" podStartSLOduration=1.955985399 podStartE2EDuration="2.97504237s" podCreationTimestamp="2026-01-20 18:18:47 +0000 UTC" firstStartedPulling="2026-01-20 18:18:47.782754493 +0000 UTC m=+5821.543092460" lastFinishedPulling="2026-01-20 18:18:48.801811463 +0000 UTC m=+5822.562149431" observedRunningTime="2026-01-20 18:18:49.97049015 +0000 UTC m=+5823.730828117" watchObservedRunningTime="2026-01-20 18:18:49.97504237 +0000 UTC m=+5823.735380337" Jan 20 18:18:49 crc kubenswrapper[4558]: I0120 18:18:49.990105 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="barbican-kuttl-tests/barbican-worker-65bbb948f9-jg95w" podStartSLOduration=2.245637416 podStartE2EDuration="2.990094902s" podCreationTimestamp="2026-01-20 18:18:47 +0000 UTC" firstStartedPulling="2026-01-20 18:18:47.919690757 +0000 UTC m=+5821.680028724" lastFinishedPulling="2026-01-20 18:18:48.664148242 +0000 UTC m=+5822.424486210" observedRunningTime="2026-01-20 18:18:49.984638362 +0000 UTC m=+5823.744976329" watchObservedRunningTime="2026-01-20 18:18:49.990094902 +0000 UTC m=+5823.750432868" Jan 20 18:18:50 crc kubenswrapper[4558]: I0120 18:18:50.002041 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="barbican-kuttl-tests/barbican-keystone-listener-6f9c66d74d-wwr54" podStartSLOduration=2.4543485560000002 podStartE2EDuration="3.002022259s" podCreationTimestamp="2026-01-20 18:18:47 +0000 UTC" firstStartedPulling="2026-01-20 18:18:48.495220455 +0000 UTC m=+5822.255558423" lastFinishedPulling="2026-01-20 18:18:49.042894159 +0000 UTC m=+5822.803232126" observedRunningTime="2026-01-20 18:18:49.999468136 +0000 UTC m=+5823.759806104" watchObservedRunningTime="2026-01-20 18:18:50.002022259 +0000 UTC m=+5823.762360226" Jan 20 18:18:50 crc kubenswrapper[4558]: I0120 18:18:50.433206 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["barbican-kuttl-tests/barbican-api-7d686cb9b8-z9zrp"] Jan 20 18:18:50 crc kubenswrapper[4558]: I0120 18:18:50.694069 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["barbican-kuttl-tests/barbican-keystone-listener-6f9c66d74d-wwr54"] Jan 20 18:18:50 crc kubenswrapper[4558]: I0120 18:18:50.856732 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["barbican-kuttl-tests/barbican-worker-65bbb948f9-6kjvs"] Jan 20 18:18:50 crc kubenswrapper[4558]: I0120 18:18:50.938368 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="barbican-kuttl-tests/barbican-keystone-listener-6f9c66d74d-h9tvl" podUID="6c3fa2e0-19e5-4ed3-8f78-fda7d9c0aa4f" containerName="barbican-keystone-listener-log" containerID="cri-o://49531a534ec7383a9c7baf6296f5dda1b911df495b2c01f24a463815a298eadb" gracePeriod=30 Jan 20 18:18:50 crc kubenswrapper[4558]: I0120 18:18:50.938425 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="barbican-kuttl-tests/barbican-keystone-listener-6f9c66d74d-h9tvl" podUID="6c3fa2e0-19e5-4ed3-8f78-fda7d9c0aa4f" containerName="barbican-keystone-listener" containerID="cri-o://57e799d2ff050019df5545bcaa75a294f385f74f7f397659129f9db6489312e0" gracePeriod=30 Jan 20 18:18:50 crc kubenswrapper[4558]: I0120 18:18:50.939878 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="barbican-kuttl-tests/barbican-api-7d686cb9b8-r56cp" podUID="9fce1875-76a1-485d-a6f1-dd503156e750" containerName="barbican-api-log" containerID="cri-o://47e9d29a4aed9c610949db6f612a9990144180ca22ace7a08ea7f838800c8a22" gracePeriod=30 Jan 20 18:18:50 crc kubenswrapper[4558]: I0120 18:18:50.940026 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="barbican-kuttl-tests/barbican-api-7d686cb9b8-r56cp" podUID="9fce1875-76a1-485d-a6f1-dd503156e750" containerName="barbican-api" containerID="cri-o://71fc369dd9331845f5486b4eb5e2f40365320e5ff59f4aad10e67e2f7af937e3" gracePeriod=30 Jan 20 18:18:50 crc kubenswrapper[4558]: I0120 18:18:50.940070 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="barbican-kuttl-tests/barbican-worker-65bbb948f9-jg95w" podUID="3f75c87f-8fff-45ac-ae2a-6d7ff585eb52" containerName="barbican-worker-log" containerID="cri-o://40ffad0a2494fb759a68da86800f733040c8e2db7f259ca696af71da7fffb661" gracePeriod=30 Jan 20 18:18:50 crc kubenswrapper[4558]: I0120 18:18:50.940123 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="barbican-kuttl-tests/barbican-worker-65bbb948f9-jg95w" podUID="3f75c87f-8fff-45ac-ae2a-6d7ff585eb52" containerName="barbican-worker" containerID="cri-o://e47a5e5509e4731b79aa7169915e1554d46a0023265e61269bb4ed67baaa3c14" gracePeriod=30 Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:51.439434 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="barbican-kuttl-tests/barbican-api-7d686cb9b8-r56cp" Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:51.487587 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="barbican-kuttl-tests/barbican-worker-65bbb948f9-jg95w" Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:51.521677 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3f75c87f-8fff-45ac-ae2a-6d7ff585eb52-logs\") pod \"3f75c87f-8fff-45ac-ae2a-6d7ff585eb52\" (UID: \"3f75c87f-8fff-45ac-ae2a-6d7ff585eb52\") " Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:51.521719 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9fce1875-76a1-485d-a6f1-dd503156e750-logs\") pod \"9fce1875-76a1-485d-a6f1-dd503156e750\" (UID: \"9fce1875-76a1-485d-a6f1-dd503156e750\") " Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:51.521742 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9fce1875-76a1-485d-a6f1-dd503156e750-config-data\") pod \"9fce1875-76a1-485d-a6f1-dd503156e750\" (UID: \"9fce1875-76a1-485d-a6f1-dd503156e750\") " Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:51.521778 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h4g7f\" (UniqueName: \"kubernetes.io/projected/9fce1875-76a1-485d-a6f1-dd503156e750-kube-api-access-h4g7f\") pod \"9fce1875-76a1-485d-a6f1-dd503156e750\" (UID: \"9fce1875-76a1-485d-a6f1-dd503156e750\") " Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:51.521809 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3f75c87f-8fff-45ac-ae2a-6d7ff585eb52-config-data-custom\") pod \"3f75c87f-8fff-45ac-ae2a-6d7ff585eb52\" (UID: \"3f75c87f-8fff-45ac-ae2a-6d7ff585eb52\") " Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:51.521852 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9fce1875-76a1-485d-a6f1-dd503156e750-config-data-custom\") pod \"9fce1875-76a1-485d-a6f1-dd503156e750\" (UID: \"9fce1875-76a1-485d-a6f1-dd503156e750\") " Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:51.521897 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hhsn9\" (UniqueName: \"kubernetes.io/projected/3f75c87f-8fff-45ac-ae2a-6d7ff585eb52-kube-api-access-hhsn9\") pod \"3f75c87f-8fff-45ac-ae2a-6d7ff585eb52\" (UID: \"3f75c87f-8fff-45ac-ae2a-6d7ff585eb52\") " Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:51.521948 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3f75c87f-8fff-45ac-ae2a-6d7ff585eb52-config-data\") pod \"3f75c87f-8fff-45ac-ae2a-6d7ff585eb52\" (UID: \"3f75c87f-8fff-45ac-ae2a-6d7ff585eb52\") " Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:51.522155 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3f75c87f-8fff-45ac-ae2a-6d7ff585eb52-logs" (OuterVolumeSpecName: "logs") pod "3f75c87f-8fff-45ac-ae2a-6d7ff585eb52" (UID: "3f75c87f-8fff-45ac-ae2a-6d7ff585eb52"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:51.522181 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9fce1875-76a1-485d-a6f1-dd503156e750-logs" (OuterVolumeSpecName: "logs") pod "9fce1875-76a1-485d-a6f1-dd503156e750" (UID: "9fce1875-76a1-485d-a6f1-dd503156e750"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:51.527750 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3f75c87f-8fff-45ac-ae2a-6d7ff585eb52-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "3f75c87f-8fff-45ac-ae2a-6d7ff585eb52" (UID: "3f75c87f-8fff-45ac-ae2a-6d7ff585eb52"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:51.527922 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3f75c87f-8fff-45ac-ae2a-6d7ff585eb52-kube-api-access-hhsn9" (OuterVolumeSpecName: "kube-api-access-hhsn9") pod "3f75c87f-8fff-45ac-ae2a-6d7ff585eb52" (UID: "3f75c87f-8fff-45ac-ae2a-6d7ff585eb52"). InnerVolumeSpecName "kube-api-access-hhsn9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:51.527990 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9fce1875-76a1-485d-a6f1-dd503156e750-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "9fce1875-76a1-485d-a6f1-dd503156e750" (UID: "9fce1875-76a1-485d-a6f1-dd503156e750"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:51.528275 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9fce1875-76a1-485d-a6f1-dd503156e750-kube-api-access-h4g7f" (OuterVolumeSpecName: "kube-api-access-h4g7f") pod "9fce1875-76a1-485d-a6f1-dd503156e750" (UID: "9fce1875-76a1-485d-a6f1-dd503156e750"). InnerVolumeSpecName "kube-api-access-h4g7f". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:51.546441 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="barbican-kuttl-tests/barbican-keystone-listener-6f9c66d74d-h9tvl" Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:51.555950 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9fce1875-76a1-485d-a6f1-dd503156e750-config-data" (OuterVolumeSpecName: "config-data") pod "9fce1875-76a1-485d-a6f1-dd503156e750" (UID: "9fce1875-76a1-485d-a6f1-dd503156e750"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:51.560452 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3f75c87f-8fff-45ac-ae2a-6d7ff585eb52-config-data" (OuterVolumeSpecName: "config-data") pod "3f75c87f-8fff-45ac-ae2a-6d7ff585eb52" (UID: "3f75c87f-8fff-45ac-ae2a-6d7ff585eb52"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:51.623475 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6c3fa2e0-19e5-4ed3-8f78-fda7d9c0aa4f-config-data\") pod \"6c3fa2e0-19e5-4ed3-8f78-fda7d9c0aa4f\" (UID: \"6c3fa2e0-19e5-4ed3-8f78-fda7d9c0aa4f\") " Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:51.623641 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fjq5b\" (UniqueName: \"kubernetes.io/projected/6c3fa2e0-19e5-4ed3-8f78-fda7d9c0aa4f-kube-api-access-fjq5b\") pod \"6c3fa2e0-19e5-4ed3-8f78-fda7d9c0aa4f\" (UID: \"6c3fa2e0-19e5-4ed3-8f78-fda7d9c0aa4f\") " Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:51.623681 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6c3fa2e0-19e5-4ed3-8f78-fda7d9c0aa4f-config-data-custom\") pod \"6c3fa2e0-19e5-4ed3-8f78-fda7d9c0aa4f\" (UID: \"6c3fa2e0-19e5-4ed3-8f78-fda7d9c0aa4f\") " Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:51.623750 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6c3fa2e0-19e5-4ed3-8f78-fda7d9c0aa4f-logs\") pod \"6c3fa2e0-19e5-4ed3-8f78-fda7d9c0aa4f\" (UID: \"6c3fa2e0-19e5-4ed3-8f78-fda7d9c0aa4f\") " Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:51.624409 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hhsn9\" (UniqueName: \"kubernetes.io/projected/3f75c87f-8fff-45ac-ae2a-6d7ff585eb52-kube-api-access-hhsn9\") on node \"crc\" DevicePath \"\"" Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:51.624426 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3f75c87f-8fff-45ac-ae2a-6d7ff585eb52-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:51.624438 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3f75c87f-8fff-45ac-ae2a-6d7ff585eb52-logs\") on node \"crc\" DevicePath \"\"" Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:51.624449 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9fce1875-76a1-485d-a6f1-dd503156e750-logs\") on node \"crc\" DevicePath \"\"" Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:51.624459 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9fce1875-76a1-485d-a6f1-dd503156e750-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:51.624492 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h4g7f\" (UniqueName: \"kubernetes.io/projected/9fce1875-76a1-485d-a6f1-dd503156e750-kube-api-access-h4g7f\") on node \"crc\" DevicePath \"\"" Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:51.624503 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3f75c87f-8fff-45ac-ae2a-6d7ff585eb52-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:51.624513 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9fce1875-76a1-485d-a6f1-dd503156e750-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:51.626813 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6c3fa2e0-19e5-4ed3-8f78-fda7d9c0aa4f-logs" (OuterVolumeSpecName: "logs") pod "6c3fa2e0-19e5-4ed3-8f78-fda7d9c0aa4f" (UID: "6c3fa2e0-19e5-4ed3-8f78-fda7d9c0aa4f"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:51.626922 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6c3fa2e0-19e5-4ed3-8f78-fda7d9c0aa4f-kube-api-access-fjq5b" (OuterVolumeSpecName: "kube-api-access-fjq5b") pod "6c3fa2e0-19e5-4ed3-8f78-fda7d9c0aa4f" (UID: "6c3fa2e0-19e5-4ed3-8f78-fda7d9c0aa4f"). InnerVolumeSpecName "kube-api-access-fjq5b". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:51.628471 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6c3fa2e0-19e5-4ed3-8f78-fda7d9c0aa4f-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "6c3fa2e0-19e5-4ed3-8f78-fda7d9c0aa4f" (UID: "6c3fa2e0-19e5-4ed3-8f78-fda7d9c0aa4f"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:51.668972 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6c3fa2e0-19e5-4ed3-8f78-fda7d9c0aa4f-config-data" (OuterVolumeSpecName: "config-data") pod "6c3fa2e0-19e5-4ed3-8f78-fda7d9c0aa4f" (UID: "6c3fa2e0-19e5-4ed3-8f78-fda7d9c0aa4f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:51.727053 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fjq5b\" (UniqueName: \"kubernetes.io/projected/6c3fa2e0-19e5-4ed3-8f78-fda7d9c0aa4f-kube-api-access-fjq5b\") on node \"crc\" DevicePath \"\"" Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:51.727083 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6c3fa2e0-19e5-4ed3-8f78-fda7d9c0aa4f-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:51.727095 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6c3fa2e0-19e5-4ed3-8f78-fda7d9c0aa4f-logs\") on node \"crc\" DevicePath \"\"" Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:51.727108 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6c3fa2e0-19e5-4ed3-8f78-fda7d9c0aa4f-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:51.947024 4558 generic.go:334] "Generic (PLEG): container finished" podID="9fce1875-76a1-485d-a6f1-dd503156e750" containerID="71fc369dd9331845f5486b4eb5e2f40365320e5ff59f4aad10e67e2f7af937e3" exitCode=0 Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:51.947055 4558 generic.go:334] "Generic (PLEG): container finished" podID="9fce1875-76a1-485d-a6f1-dd503156e750" containerID="47e9d29a4aed9c610949db6f612a9990144180ca22ace7a08ea7f838800c8a22" exitCode=143 Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:51.947091 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="barbican-kuttl-tests/barbican-api-7d686cb9b8-r56cp" Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:51.947147 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/barbican-api-7d686cb9b8-r56cp" event={"ID":"9fce1875-76a1-485d-a6f1-dd503156e750","Type":"ContainerDied","Data":"71fc369dd9331845f5486b4eb5e2f40365320e5ff59f4aad10e67e2f7af937e3"} Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:51.947236 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/barbican-api-7d686cb9b8-r56cp" event={"ID":"9fce1875-76a1-485d-a6f1-dd503156e750","Type":"ContainerDied","Data":"47e9d29a4aed9c610949db6f612a9990144180ca22ace7a08ea7f838800c8a22"} Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:51.947250 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/barbican-api-7d686cb9b8-r56cp" event={"ID":"9fce1875-76a1-485d-a6f1-dd503156e750","Type":"ContainerDied","Data":"1b7c563bea99ed0704a3a51e75635b71041b6f664a14216e4e7af65e805cab1f"} Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:51.947268 4558 scope.go:117] "RemoveContainer" containerID="71fc369dd9331845f5486b4eb5e2f40365320e5ff59f4aad10e67e2f7af937e3" Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:51.951244 4558 generic.go:334] "Generic (PLEG): container finished" podID="6c3fa2e0-19e5-4ed3-8f78-fda7d9c0aa4f" containerID="57e799d2ff050019df5545bcaa75a294f385f74f7f397659129f9db6489312e0" exitCode=0 Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:51.951262 4558 generic.go:334] "Generic (PLEG): container finished" podID="6c3fa2e0-19e5-4ed3-8f78-fda7d9c0aa4f" containerID="49531a534ec7383a9c7baf6296f5dda1b911df495b2c01f24a463815a298eadb" exitCode=143 Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:51.951301 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/barbican-keystone-listener-6f9c66d74d-h9tvl" event={"ID":"6c3fa2e0-19e5-4ed3-8f78-fda7d9c0aa4f","Type":"ContainerDied","Data":"57e799d2ff050019df5545bcaa75a294f385f74f7f397659129f9db6489312e0"} Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:51.951322 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/barbican-keystone-listener-6f9c66d74d-h9tvl" event={"ID":"6c3fa2e0-19e5-4ed3-8f78-fda7d9c0aa4f","Type":"ContainerDied","Data":"49531a534ec7383a9c7baf6296f5dda1b911df495b2c01f24a463815a298eadb"} Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:51.951333 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/barbican-keystone-listener-6f9c66d74d-h9tvl" event={"ID":"6c3fa2e0-19e5-4ed3-8f78-fda7d9c0aa4f","Type":"ContainerDied","Data":"4e1fde413f09656ada43e6653d239ed10188d97252178b4943607ada5eecaa4f"} Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:51.951409 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="barbican-kuttl-tests/barbican-keystone-listener-6f9c66d74d-h9tvl" Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:51.953441 4558 generic.go:334] "Generic (PLEG): container finished" podID="3f75c87f-8fff-45ac-ae2a-6d7ff585eb52" containerID="e47a5e5509e4731b79aa7169915e1554d46a0023265e61269bb4ed67baaa3c14" exitCode=0 Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:51.953463 4558 generic.go:334] "Generic (PLEG): container finished" podID="3f75c87f-8fff-45ac-ae2a-6d7ff585eb52" containerID="40ffad0a2494fb759a68da86800f733040c8e2db7f259ca696af71da7fffb661" exitCode=143 Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:51.953648 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="barbican-kuttl-tests/barbican-api-7d686cb9b8-z9zrp" podUID="65f781b0-c276-41cd-911f-9c964612d70d" containerName="barbican-api-log" containerID="cri-o://1f6a18382756ee048f8366d1db72717a6b8e361e2b7c7050d003e430c11f0923" gracePeriod=30 Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:51.953911 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="barbican-kuttl-tests/barbican-worker-65bbb948f9-jg95w" Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:51.955332 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/barbican-worker-65bbb948f9-jg95w" event={"ID":"3f75c87f-8fff-45ac-ae2a-6d7ff585eb52","Type":"ContainerDied","Data":"e47a5e5509e4731b79aa7169915e1554d46a0023265e61269bb4ed67baaa3c14"} Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:51.955424 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/barbican-worker-65bbb948f9-jg95w" event={"ID":"3f75c87f-8fff-45ac-ae2a-6d7ff585eb52","Type":"ContainerDied","Data":"40ffad0a2494fb759a68da86800f733040c8e2db7f259ca696af71da7fffb661"} Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:51.955441 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/barbican-worker-65bbb948f9-jg95w" event={"ID":"3f75c87f-8fff-45ac-ae2a-6d7ff585eb52","Type":"ContainerDied","Data":"d1a9ea9f13739b0f97f9113e97af7ed24db5c27b6ce7707a9d0111164730ad38"} Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:51.955513 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="barbican-kuttl-tests/barbican-api-7d686cb9b8-z9zrp" podUID="65f781b0-c276-41cd-911f-9c964612d70d" containerName="barbican-api" containerID="cri-o://360ccb7b28a8926116edd2dc0fff31e313aa72884ad5ca03c021f606c938c63d" gracePeriod=30 Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:51.956091 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="barbican-kuttl-tests/barbican-keystone-listener-6f9c66d74d-wwr54" podUID="0b59d155-7695-4f4e-b6bb-602ea491cb2d" containerName="barbican-keystone-listener-log" containerID="cri-o://db0da8cda5443858ce613edc9d8b9076f173aa1bb3746580df94d5ec5f14ccc2" gracePeriod=30 Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:51.956261 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="barbican-kuttl-tests/barbican-worker-65bbb948f9-6kjvs" podUID="afc237c4-c25c-4dde-91f5-23accf56f5a9" containerName="barbican-worker-log" containerID="cri-o://9afe78cb8a1da2bb5b08c69b68f397924a7031558028c3dfc119c19cd15d5437" gracePeriod=30 Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:51.956306 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="barbican-kuttl-tests/barbican-keystone-listener-6f9c66d74d-wwr54" podUID="0b59d155-7695-4f4e-b6bb-602ea491cb2d" containerName="barbican-keystone-listener" containerID="cri-o://f84a9271aeceb1980a056bfe780df5b8ee246aea786cbdc53c89fc3a34a5e68c" gracePeriod=30 Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:51.956346 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="barbican-kuttl-tests/barbican-worker-65bbb948f9-6kjvs" podUID="afc237c4-c25c-4dde-91f5-23accf56f5a9" containerName="barbican-worker" containerID="cri-o://1d759ec87bb962d402368d8716821b4cdcc5886d59e1f98752dc517a66f23913" gracePeriod=30 Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:51.975033 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["barbican-kuttl-tests/barbican-db-sync-q6bzf"] Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:51.987281 4558 scope.go:117] "RemoveContainer" containerID="47e9d29a4aed9c610949db6f612a9990144180ca22ace7a08ea7f838800c8a22" Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:51.997267 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["barbican-kuttl-tests/barbican-db-sync-q6bzf"] Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:52.014058 4558 scope.go:117] "RemoveContainer" containerID="71fc369dd9331845f5486b4eb5e2f40365320e5ff59f4aad10e67e2f7af937e3" Jan 20 18:18:52 crc kubenswrapper[4558]: E0120 18:18:52.016630 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"71fc369dd9331845f5486b4eb5e2f40365320e5ff59f4aad10e67e2f7af937e3\": container with ID starting with 71fc369dd9331845f5486b4eb5e2f40365320e5ff59f4aad10e67e2f7af937e3 not found: ID does not exist" containerID="71fc369dd9331845f5486b4eb5e2f40365320e5ff59f4aad10e67e2f7af937e3" Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:52.016660 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"71fc369dd9331845f5486b4eb5e2f40365320e5ff59f4aad10e67e2f7af937e3"} err="failed to get container status \"71fc369dd9331845f5486b4eb5e2f40365320e5ff59f4aad10e67e2f7af937e3\": rpc error: code = NotFound desc = could not find container \"71fc369dd9331845f5486b4eb5e2f40365320e5ff59f4aad10e67e2f7af937e3\": container with ID starting with 71fc369dd9331845f5486b4eb5e2f40365320e5ff59f4aad10e67e2f7af937e3 not found: ID does not exist" Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:52.016680 4558 scope.go:117] "RemoveContainer" containerID="47e9d29a4aed9c610949db6f612a9990144180ca22ace7a08ea7f838800c8a22" Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:52.017226 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["barbican-kuttl-tests/barbican1f0e-account-delete-5srcg"] Jan 20 18:18:52 crc kubenswrapper[4558]: E0120 18:18:52.017581 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3f75c87f-8fff-45ac-ae2a-6d7ff585eb52" containerName="barbican-worker-log" Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:52.017594 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="3f75c87f-8fff-45ac-ae2a-6d7ff585eb52" containerName="barbican-worker-log" Jan 20 18:18:52 crc kubenswrapper[4558]: E0120 18:18:52.017619 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c3fa2e0-19e5-4ed3-8f78-fda7d9c0aa4f" containerName="barbican-keystone-listener-log" Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:52.017626 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c3fa2e0-19e5-4ed3-8f78-fda7d9c0aa4f" containerName="barbican-keystone-listener-log" Jan 20 18:18:52 crc kubenswrapper[4558]: E0120 18:18:52.017636 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9fce1875-76a1-485d-a6f1-dd503156e750" containerName="barbican-api-log" Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:52.017646 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="9fce1875-76a1-485d-a6f1-dd503156e750" containerName="barbican-api-log" Jan 20 18:18:52 crc kubenswrapper[4558]: E0120 18:18:52.017656 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c3fa2e0-19e5-4ed3-8f78-fda7d9c0aa4f" containerName="barbican-keystone-listener" Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:52.017662 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c3fa2e0-19e5-4ed3-8f78-fda7d9c0aa4f" containerName="barbican-keystone-listener" Jan 20 18:18:52 crc kubenswrapper[4558]: E0120 18:18:52.017682 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3f75c87f-8fff-45ac-ae2a-6d7ff585eb52" containerName="barbican-worker" Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:52.017688 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="3f75c87f-8fff-45ac-ae2a-6d7ff585eb52" containerName="barbican-worker" Jan 20 18:18:52 crc kubenswrapper[4558]: E0120 18:18:52.017701 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9fce1875-76a1-485d-a6f1-dd503156e750" containerName="barbican-api" Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:52.017707 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="9fce1875-76a1-485d-a6f1-dd503156e750" containerName="barbican-api" Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:52.017841 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="9fce1875-76a1-485d-a6f1-dd503156e750" containerName="barbican-api" Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:52.017850 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="3f75c87f-8fff-45ac-ae2a-6d7ff585eb52" containerName="barbican-worker" Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:52.017860 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="9fce1875-76a1-485d-a6f1-dd503156e750" containerName="barbican-api-log" Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:52.017870 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="6c3fa2e0-19e5-4ed3-8f78-fda7d9c0aa4f" containerName="barbican-keystone-listener" Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:52.017886 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="6c3fa2e0-19e5-4ed3-8f78-fda7d9c0aa4f" containerName="barbican-keystone-listener-log" Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:52.017896 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="3f75c87f-8fff-45ac-ae2a-6d7ff585eb52" containerName="barbican-worker-log" Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:52.018472 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="barbican-kuttl-tests/barbican1f0e-account-delete-5srcg" Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:52.029888 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["barbican-kuttl-tests/barbican-worker-65bbb948f9-jg95w"] Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:52.031318 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3e312336-756e-4803-9db5-f0b0e31eb6ac-operator-scripts\") pod \"barbican1f0e-account-delete-5srcg\" (UID: \"3e312336-756e-4803-9db5-f0b0e31eb6ac\") " pod="barbican-kuttl-tests/barbican1f0e-account-delete-5srcg" Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:52.031411 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rd6ks\" (UniqueName: \"kubernetes.io/projected/3e312336-756e-4803-9db5-f0b0e31eb6ac-kube-api-access-rd6ks\") pod \"barbican1f0e-account-delete-5srcg\" (UID: \"3e312336-756e-4803-9db5-f0b0e31eb6ac\") " pod="barbican-kuttl-tests/barbican1f0e-account-delete-5srcg" Jan 20 18:18:52 crc kubenswrapper[4558]: E0120 18:18:52.031815 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"47e9d29a4aed9c610949db6f612a9990144180ca22ace7a08ea7f838800c8a22\": container with ID starting with 47e9d29a4aed9c610949db6f612a9990144180ca22ace7a08ea7f838800c8a22 not found: ID does not exist" containerID="47e9d29a4aed9c610949db6f612a9990144180ca22ace7a08ea7f838800c8a22" Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:52.033484 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"47e9d29a4aed9c610949db6f612a9990144180ca22ace7a08ea7f838800c8a22"} err="failed to get container status \"47e9d29a4aed9c610949db6f612a9990144180ca22ace7a08ea7f838800c8a22\": rpc error: code = NotFound desc = could not find container \"47e9d29a4aed9c610949db6f612a9990144180ca22ace7a08ea7f838800c8a22\": container with ID starting with 47e9d29a4aed9c610949db6f612a9990144180ca22ace7a08ea7f838800c8a22 not found: ID does not exist" Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:52.033524 4558 scope.go:117] "RemoveContainer" containerID="71fc369dd9331845f5486b4eb5e2f40365320e5ff59f4aad10e67e2f7af937e3" Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:52.034312 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"71fc369dd9331845f5486b4eb5e2f40365320e5ff59f4aad10e67e2f7af937e3"} err="failed to get container status \"71fc369dd9331845f5486b4eb5e2f40365320e5ff59f4aad10e67e2f7af937e3\": rpc error: code = NotFound desc = could not find container \"71fc369dd9331845f5486b4eb5e2f40365320e5ff59f4aad10e67e2f7af937e3\": container with ID starting with 71fc369dd9331845f5486b4eb5e2f40365320e5ff59f4aad10e67e2f7af937e3 not found: ID does not exist" Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:52.034347 4558 scope.go:117] "RemoveContainer" containerID="47e9d29a4aed9c610949db6f612a9990144180ca22ace7a08ea7f838800c8a22" Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:52.034918 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"47e9d29a4aed9c610949db6f612a9990144180ca22ace7a08ea7f838800c8a22"} err="failed to get container status \"47e9d29a4aed9c610949db6f612a9990144180ca22ace7a08ea7f838800c8a22\": rpc error: code = NotFound desc = could not find container \"47e9d29a4aed9c610949db6f612a9990144180ca22ace7a08ea7f838800c8a22\": container with ID starting with 47e9d29a4aed9c610949db6f612a9990144180ca22ace7a08ea7f838800c8a22 not found: ID does not exist" Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:52.034949 4558 scope.go:117] "RemoveContainer" containerID="57e799d2ff050019df5545bcaa75a294f385f74f7f397659129f9db6489312e0" Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:52.036605 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["barbican-kuttl-tests/barbican-worker-65bbb948f9-jg95w"] Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:52.041840 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["barbican-kuttl-tests/barbican1f0e-account-delete-5srcg"] Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:52.048196 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["barbican-kuttl-tests/barbican-api-7d686cb9b8-r56cp"] Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:52.050612 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["barbican-kuttl-tests/barbican-api-7d686cb9b8-r56cp"] Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:52.056368 4558 scope.go:117] "RemoveContainer" containerID="49531a534ec7383a9c7baf6296f5dda1b911df495b2c01f24a463815a298eadb" Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:52.077448 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["barbican-kuttl-tests/barbican-keystone-listener-6f9c66d74d-h9tvl"] Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:52.090511 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["barbican-kuttl-tests/barbican-keystone-listener-6f9c66d74d-h9tvl"] Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:52.121351 4558 scope.go:117] "RemoveContainer" containerID="57e799d2ff050019df5545bcaa75a294f385f74f7f397659129f9db6489312e0" Jan 20 18:18:52 crc kubenswrapper[4558]: E0120 18:18:52.122573 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"57e799d2ff050019df5545bcaa75a294f385f74f7f397659129f9db6489312e0\": container with ID starting with 57e799d2ff050019df5545bcaa75a294f385f74f7f397659129f9db6489312e0 not found: ID does not exist" containerID="57e799d2ff050019df5545bcaa75a294f385f74f7f397659129f9db6489312e0" Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:52.122638 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"57e799d2ff050019df5545bcaa75a294f385f74f7f397659129f9db6489312e0"} err="failed to get container status \"57e799d2ff050019df5545bcaa75a294f385f74f7f397659129f9db6489312e0\": rpc error: code = NotFound desc = could not find container \"57e799d2ff050019df5545bcaa75a294f385f74f7f397659129f9db6489312e0\": container with ID starting with 57e799d2ff050019df5545bcaa75a294f385f74f7f397659129f9db6489312e0 not found: ID does not exist" Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:52.122673 4558 scope.go:117] "RemoveContainer" containerID="49531a534ec7383a9c7baf6296f5dda1b911df495b2c01f24a463815a298eadb" Jan 20 18:18:52 crc kubenswrapper[4558]: E0120 18:18:52.123024 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"49531a534ec7383a9c7baf6296f5dda1b911df495b2c01f24a463815a298eadb\": container with ID starting with 49531a534ec7383a9c7baf6296f5dda1b911df495b2c01f24a463815a298eadb not found: ID does not exist" containerID="49531a534ec7383a9c7baf6296f5dda1b911df495b2c01f24a463815a298eadb" Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:52.123063 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"49531a534ec7383a9c7baf6296f5dda1b911df495b2c01f24a463815a298eadb"} err="failed to get container status \"49531a534ec7383a9c7baf6296f5dda1b911df495b2c01f24a463815a298eadb\": rpc error: code = NotFound desc = could not find container \"49531a534ec7383a9c7baf6296f5dda1b911df495b2c01f24a463815a298eadb\": container with ID starting with 49531a534ec7383a9c7baf6296f5dda1b911df495b2c01f24a463815a298eadb not found: ID does not exist" Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:52.123082 4558 scope.go:117] "RemoveContainer" containerID="57e799d2ff050019df5545bcaa75a294f385f74f7f397659129f9db6489312e0" Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:52.124113 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"57e799d2ff050019df5545bcaa75a294f385f74f7f397659129f9db6489312e0"} err="failed to get container status \"57e799d2ff050019df5545bcaa75a294f385f74f7f397659129f9db6489312e0\": rpc error: code = NotFound desc = could not find container \"57e799d2ff050019df5545bcaa75a294f385f74f7f397659129f9db6489312e0\": container with ID starting with 57e799d2ff050019df5545bcaa75a294f385f74f7f397659129f9db6489312e0 not found: ID does not exist" Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:52.124142 4558 scope.go:117] "RemoveContainer" containerID="49531a534ec7383a9c7baf6296f5dda1b911df495b2c01f24a463815a298eadb" Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:52.127419 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"49531a534ec7383a9c7baf6296f5dda1b911df495b2c01f24a463815a298eadb"} err="failed to get container status \"49531a534ec7383a9c7baf6296f5dda1b911df495b2c01f24a463815a298eadb\": rpc error: code = NotFound desc = could not find container \"49531a534ec7383a9c7baf6296f5dda1b911df495b2c01f24a463815a298eadb\": container with ID starting with 49531a534ec7383a9c7baf6296f5dda1b911df495b2c01f24a463815a298eadb not found: ID does not exist" Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:52.127450 4558 scope.go:117] "RemoveContainer" containerID="e47a5e5509e4731b79aa7169915e1554d46a0023265e61269bb4ed67baaa3c14" Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:52.132834 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3e312336-756e-4803-9db5-f0b0e31eb6ac-operator-scripts\") pod \"barbican1f0e-account-delete-5srcg\" (UID: \"3e312336-756e-4803-9db5-f0b0e31eb6ac\") " pod="barbican-kuttl-tests/barbican1f0e-account-delete-5srcg" Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:52.132895 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rd6ks\" (UniqueName: \"kubernetes.io/projected/3e312336-756e-4803-9db5-f0b0e31eb6ac-kube-api-access-rd6ks\") pod \"barbican1f0e-account-delete-5srcg\" (UID: \"3e312336-756e-4803-9db5-f0b0e31eb6ac\") " pod="barbican-kuttl-tests/barbican1f0e-account-delete-5srcg" Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:52.135174 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3e312336-756e-4803-9db5-f0b0e31eb6ac-operator-scripts\") pod \"barbican1f0e-account-delete-5srcg\" (UID: \"3e312336-756e-4803-9db5-f0b0e31eb6ac\") " pod="barbican-kuttl-tests/barbican1f0e-account-delete-5srcg" Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:52.147165 4558 scope.go:117] "RemoveContainer" containerID="40ffad0a2494fb759a68da86800f733040c8e2db7f259ca696af71da7fffb661" Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:52.150851 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rd6ks\" (UniqueName: \"kubernetes.io/projected/3e312336-756e-4803-9db5-f0b0e31eb6ac-kube-api-access-rd6ks\") pod \"barbican1f0e-account-delete-5srcg\" (UID: \"3e312336-756e-4803-9db5-f0b0e31eb6ac\") " pod="barbican-kuttl-tests/barbican1f0e-account-delete-5srcg" Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:52.183001 4558 scope.go:117] "RemoveContainer" containerID="e47a5e5509e4731b79aa7169915e1554d46a0023265e61269bb4ed67baaa3c14" Jan 20 18:18:52 crc kubenswrapper[4558]: E0120 18:18:52.186532 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e47a5e5509e4731b79aa7169915e1554d46a0023265e61269bb4ed67baaa3c14\": container with ID starting with e47a5e5509e4731b79aa7169915e1554d46a0023265e61269bb4ed67baaa3c14 not found: ID does not exist" containerID="e47a5e5509e4731b79aa7169915e1554d46a0023265e61269bb4ed67baaa3c14" Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:52.186566 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e47a5e5509e4731b79aa7169915e1554d46a0023265e61269bb4ed67baaa3c14"} err="failed to get container status \"e47a5e5509e4731b79aa7169915e1554d46a0023265e61269bb4ed67baaa3c14\": rpc error: code = NotFound desc = could not find container \"e47a5e5509e4731b79aa7169915e1554d46a0023265e61269bb4ed67baaa3c14\": container with ID starting with e47a5e5509e4731b79aa7169915e1554d46a0023265e61269bb4ed67baaa3c14 not found: ID does not exist" Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:52.186587 4558 scope.go:117] "RemoveContainer" containerID="40ffad0a2494fb759a68da86800f733040c8e2db7f259ca696af71da7fffb661" Jan 20 18:18:52 crc kubenswrapper[4558]: E0120 18:18:52.186874 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"40ffad0a2494fb759a68da86800f733040c8e2db7f259ca696af71da7fffb661\": container with ID starting with 40ffad0a2494fb759a68da86800f733040c8e2db7f259ca696af71da7fffb661 not found: ID does not exist" containerID="40ffad0a2494fb759a68da86800f733040c8e2db7f259ca696af71da7fffb661" Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:52.186907 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"40ffad0a2494fb759a68da86800f733040c8e2db7f259ca696af71da7fffb661"} err="failed to get container status \"40ffad0a2494fb759a68da86800f733040c8e2db7f259ca696af71da7fffb661\": rpc error: code = NotFound desc = could not find container \"40ffad0a2494fb759a68da86800f733040c8e2db7f259ca696af71da7fffb661\": container with ID starting with 40ffad0a2494fb759a68da86800f733040c8e2db7f259ca696af71da7fffb661 not found: ID does not exist" Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:52.186923 4558 scope.go:117] "RemoveContainer" containerID="e47a5e5509e4731b79aa7169915e1554d46a0023265e61269bb4ed67baaa3c14" Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:52.187746 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e47a5e5509e4731b79aa7169915e1554d46a0023265e61269bb4ed67baaa3c14"} err="failed to get container status \"e47a5e5509e4731b79aa7169915e1554d46a0023265e61269bb4ed67baaa3c14\": rpc error: code = NotFound desc = could not find container \"e47a5e5509e4731b79aa7169915e1554d46a0023265e61269bb4ed67baaa3c14\": container with ID starting with e47a5e5509e4731b79aa7169915e1554d46a0023265e61269bb4ed67baaa3c14 not found: ID does not exist" Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:52.187768 4558 scope.go:117] "RemoveContainer" containerID="40ffad0a2494fb759a68da86800f733040c8e2db7f259ca696af71da7fffb661" Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:52.188074 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"40ffad0a2494fb759a68da86800f733040c8e2db7f259ca696af71da7fffb661"} err="failed to get container status \"40ffad0a2494fb759a68da86800f733040c8e2db7f259ca696af71da7fffb661\": rpc error: code = NotFound desc = could not find container \"40ffad0a2494fb759a68da86800f733040c8e2db7f259ca696af71da7fffb661\": container with ID starting with 40ffad0a2494fb759a68da86800f733040c8e2db7f259ca696af71da7fffb661 not found: ID does not exist" Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:52.345497 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="barbican-kuttl-tests/barbican1f0e-account-delete-5srcg" Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:52.482151 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="barbican-kuttl-tests/barbican-api-7d686cb9b8-z9zrp" Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:52.573963 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="19e31edd-327b-4861-8446-12b3de5b9312" path="/var/lib/kubelet/pods/19e31edd-327b-4861-8446-12b3de5b9312/volumes" Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:52.574522 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3f75c87f-8fff-45ac-ae2a-6d7ff585eb52" path="/var/lib/kubelet/pods/3f75c87f-8fff-45ac-ae2a-6d7ff585eb52/volumes" Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:52.575102 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6c3fa2e0-19e5-4ed3-8f78-fda7d9c0aa4f" path="/var/lib/kubelet/pods/6c3fa2e0-19e5-4ed3-8f78-fda7d9c0aa4f/volumes" Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:52.576135 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9fce1875-76a1-485d-a6f1-dd503156e750" path="/var/lib/kubelet/pods/9fce1875-76a1-485d-a6f1-dd503156e750/volumes" Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:52.640504 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vwxst\" (UniqueName: \"kubernetes.io/projected/65f781b0-c276-41cd-911f-9c964612d70d-kube-api-access-vwxst\") pod \"65f781b0-c276-41cd-911f-9c964612d70d\" (UID: \"65f781b0-c276-41cd-911f-9c964612d70d\") " Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:52.640928 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/65f781b0-c276-41cd-911f-9c964612d70d-config-data-custom\") pod \"65f781b0-c276-41cd-911f-9c964612d70d\" (UID: \"65f781b0-c276-41cd-911f-9c964612d70d\") " Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:52.641394 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/65f781b0-c276-41cd-911f-9c964612d70d-logs\") pod \"65f781b0-c276-41cd-911f-9c964612d70d\" (UID: \"65f781b0-c276-41cd-911f-9c964612d70d\") " Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:52.641579 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/65f781b0-c276-41cd-911f-9c964612d70d-config-data\") pod \"65f781b0-c276-41cd-911f-9c964612d70d\" (UID: \"65f781b0-c276-41cd-911f-9c964612d70d\") " Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:52.641874 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/65f781b0-c276-41cd-911f-9c964612d70d-logs" (OuterVolumeSpecName: "logs") pod "65f781b0-c276-41cd-911f-9c964612d70d" (UID: "65f781b0-c276-41cd-911f-9c964612d70d"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:52.642468 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/65f781b0-c276-41cd-911f-9c964612d70d-logs\") on node \"crc\" DevicePath \"\"" Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:52.644626 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/65f781b0-c276-41cd-911f-9c964612d70d-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "65f781b0-c276-41cd-911f-9c964612d70d" (UID: "65f781b0-c276-41cd-911f-9c964612d70d"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:52.644653 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/65f781b0-c276-41cd-911f-9c964612d70d-kube-api-access-vwxst" (OuterVolumeSpecName: "kube-api-access-vwxst") pod "65f781b0-c276-41cd-911f-9c964612d70d" (UID: "65f781b0-c276-41cd-911f-9c964612d70d"). InnerVolumeSpecName "kube-api-access-vwxst". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:52.677479 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/65f781b0-c276-41cd-911f-9c964612d70d-config-data" (OuterVolumeSpecName: "config-data") pod "65f781b0-c276-41cd-911f-9c964612d70d" (UID: "65f781b0-c276-41cd-911f-9c964612d70d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:52.744615 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/65f781b0-c276-41cd-911f-9c964612d70d-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:52.744646 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vwxst\" (UniqueName: \"kubernetes.io/projected/65f781b0-c276-41cd-911f-9c964612d70d-kube-api-access-vwxst\") on node \"crc\" DevicePath \"\"" Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:52.744663 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/65f781b0-c276-41cd-911f-9c964612d70d-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:52.766256 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["barbican-kuttl-tests/barbican1f0e-account-delete-5srcg"] Jan 20 18:18:52 crc kubenswrapper[4558]: W0120 18:18:52.772544 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3e312336_756e_4803_9db5_f0b0e31eb6ac.slice/crio-18063842f6f9888eb29bda3e1aa6714fa5031ea86173a25f3ba177efeacd70f1 WatchSource:0}: Error finding container 18063842f6f9888eb29bda3e1aa6714fa5031ea86173a25f3ba177efeacd70f1: Status 404 returned error can't find the container with id 18063842f6f9888eb29bda3e1aa6714fa5031ea86173a25f3ba177efeacd70f1 Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:52.948229 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="barbican-kuttl-tests/barbican-keystone-listener-6f9c66d74d-wwr54" Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:52.968586 4558 generic.go:334] "Generic (PLEG): container finished" podID="afc237c4-c25c-4dde-91f5-23accf56f5a9" containerID="1d759ec87bb962d402368d8716821b4cdcc5886d59e1f98752dc517a66f23913" exitCode=0 Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:52.968637 4558 generic.go:334] "Generic (PLEG): container finished" podID="afc237c4-c25c-4dde-91f5-23accf56f5a9" containerID="9afe78cb8a1da2bb5b08c69b68f397924a7031558028c3dfc119c19cd15d5437" exitCode=143 Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:52.968708 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/barbican-worker-65bbb948f9-6kjvs" event={"ID":"afc237c4-c25c-4dde-91f5-23accf56f5a9","Type":"ContainerDied","Data":"1d759ec87bb962d402368d8716821b4cdcc5886d59e1f98752dc517a66f23913"} Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:52.968759 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/barbican-worker-65bbb948f9-6kjvs" event={"ID":"afc237c4-c25c-4dde-91f5-23accf56f5a9","Type":"ContainerDied","Data":"9afe78cb8a1da2bb5b08c69b68f397924a7031558028c3dfc119c19cd15d5437"} Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:52.972313 4558 generic.go:334] "Generic (PLEG): container finished" podID="65f781b0-c276-41cd-911f-9c964612d70d" containerID="360ccb7b28a8926116edd2dc0fff31e313aa72884ad5ca03c021f606c938c63d" exitCode=0 Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:52.972343 4558 generic.go:334] "Generic (PLEG): container finished" podID="65f781b0-c276-41cd-911f-9c964612d70d" containerID="1f6a18382756ee048f8366d1db72717a6b8e361e2b7c7050d003e430c11f0923" exitCode=143 Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:52.972389 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/barbican-api-7d686cb9b8-z9zrp" event={"ID":"65f781b0-c276-41cd-911f-9c964612d70d","Type":"ContainerDied","Data":"360ccb7b28a8926116edd2dc0fff31e313aa72884ad5ca03c021f606c938c63d"} Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:52.972419 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/barbican-api-7d686cb9b8-z9zrp" event={"ID":"65f781b0-c276-41cd-911f-9c964612d70d","Type":"ContainerDied","Data":"1f6a18382756ee048f8366d1db72717a6b8e361e2b7c7050d003e430c11f0923"} Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:52.972430 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/barbican-api-7d686cb9b8-z9zrp" event={"ID":"65f781b0-c276-41cd-911f-9c964612d70d","Type":"ContainerDied","Data":"bdc0cf10d6bb99533295de0de5ffc0b373a88bbfe7911df4426238d198e5c706"} Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:52.972445 4558 scope.go:117] "RemoveContainer" containerID="360ccb7b28a8926116edd2dc0fff31e313aa72884ad5ca03c021f606c938c63d" Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:52.972537 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="barbican-kuttl-tests/barbican-api-7d686cb9b8-z9zrp" Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:52.980409 4558 generic.go:334] "Generic (PLEG): container finished" podID="0b59d155-7695-4f4e-b6bb-602ea491cb2d" containerID="f84a9271aeceb1980a056bfe780df5b8ee246aea786cbdc53c89fc3a34a5e68c" exitCode=0 Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:52.980437 4558 generic.go:334] "Generic (PLEG): container finished" podID="0b59d155-7695-4f4e-b6bb-602ea491cb2d" containerID="db0da8cda5443858ce613edc9d8b9076f173aa1bb3746580df94d5ec5f14ccc2" exitCode=143 Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:52.980485 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/barbican-keystone-listener-6f9c66d74d-wwr54" event={"ID":"0b59d155-7695-4f4e-b6bb-602ea491cb2d","Type":"ContainerDied","Data":"f84a9271aeceb1980a056bfe780df5b8ee246aea786cbdc53c89fc3a34a5e68c"} Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:52.980510 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/barbican-keystone-listener-6f9c66d74d-wwr54" event={"ID":"0b59d155-7695-4f4e-b6bb-602ea491cb2d","Type":"ContainerDied","Data":"db0da8cda5443858ce613edc9d8b9076f173aa1bb3746580df94d5ec5f14ccc2"} Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:52.980520 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/barbican-keystone-listener-6f9c66d74d-wwr54" event={"ID":"0b59d155-7695-4f4e-b6bb-602ea491cb2d","Type":"ContainerDied","Data":"e802234d4a804f0c5d2fbbbbcabfde57a43361590e14d13c7fa38a49249bb2a7"} Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:52.980570 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="barbican-kuttl-tests/barbican-keystone-listener-6f9c66d74d-wwr54" Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:52.988821 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/barbican1f0e-account-delete-5srcg" event={"ID":"3e312336-756e-4803-9db5-f0b0e31eb6ac","Type":"ContainerStarted","Data":"732989bca0f83f5be83d330c5fb2375467ff6a8a0816a9188481958969606838"} Jan 20 18:18:52 crc kubenswrapper[4558]: I0120 18:18:52.988919 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/barbican1f0e-account-delete-5srcg" event={"ID":"3e312336-756e-4803-9db5-f0b0e31eb6ac","Type":"ContainerStarted","Data":"18063842f6f9888eb29bda3e1aa6714fa5031ea86173a25f3ba177efeacd70f1"} Jan 20 18:18:53 crc kubenswrapper[4558]: I0120 18:18:53.000217 4558 scope.go:117] "RemoveContainer" containerID="1f6a18382756ee048f8366d1db72717a6b8e361e2b7c7050d003e430c11f0923" Jan 20 18:18:53 crc kubenswrapper[4558]: I0120 18:18:53.004445 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["barbican-kuttl-tests/barbican-api-7d686cb9b8-z9zrp"] Jan 20 18:18:53 crc kubenswrapper[4558]: I0120 18:18:53.008013 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["barbican-kuttl-tests/barbican-api-7d686cb9b8-z9zrp"] Jan 20 18:18:53 crc kubenswrapper[4558]: I0120 18:18:53.018540 4558 scope.go:117] "RemoveContainer" containerID="360ccb7b28a8926116edd2dc0fff31e313aa72884ad5ca03c021f606c938c63d" Jan 20 18:18:53 crc kubenswrapper[4558]: E0120 18:18:53.019485 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"360ccb7b28a8926116edd2dc0fff31e313aa72884ad5ca03c021f606c938c63d\": container with ID starting with 360ccb7b28a8926116edd2dc0fff31e313aa72884ad5ca03c021f606c938c63d not found: ID does not exist" containerID="360ccb7b28a8926116edd2dc0fff31e313aa72884ad5ca03c021f606c938c63d" Jan 20 18:18:53 crc kubenswrapper[4558]: I0120 18:18:53.019577 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"360ccb7b28a8926116edd2dc0fff31e313aa72884ad5ca03c021f606c938c63d"} err="failed to get container status \"360ccb7b28a8926116edd2dc0fff31e313aa72884ad5ca03c021f606c938c63d\": rpc error: code = NotFound desc = could not find container \"360ccb7b28a8926116edd2dc0fff31e313aa72884ad5ca03c021f606c938c63d\": container with ID starting with 360ccb7b28a8926116edd2dc0fff31e313aa72884ad5ca03c021f606c938c63d not found: ID does not exist" Jan 20 18:18:53 crc kubenswrapper[4558]: I0120 18:18:53.019648 4558 scope.go:117] "RemoveContainer" containerID="1f6a18382756ee048f8366d1db72717a6b8e361e2b7c7050d003e430c11f0923" Jan 20 18:18:53 crc kubenswrapper[4558]: E0120 18:18:53.019957 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1f6a18382756ee048f8366d1db72717a6b8e361e2b7c7050d003e430c11f0923\": container with ID starting with 1f6a18382756ee048f8366d1db72717a6b8e361e2b7c7050d003e430c11f0923 not found: ID does not exist" containerID="1f6a18382756ee048f8366d1db72717a6b8e361e2b7c7050d003e430c11f0923" Jan 20 18:18:53 crc kubenswrapper[4558]: I0120 18:18:53.019985 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1f6a18382756ee048f8366d1db72717a6b8e361e2b7c7050d003e430c11f0923"} err="failed to get container status \"1f6a18382756ee048f8366d1db72717a6b8e361e2b7c7050d003e430c11f0923\": rpc error: code = NotFound desc = could not find container \"1f6a18382756ee048f8366d1db72717a6b8e361e2b7c7050d003e430c11f0923\": container with ID starting with 1f6a18382756ee048f8366d1db72717a6b8e361e2b7c7050d003e430c11f0923 not found: ID does not exist" Jan 20 18:18:53 crc kubenswrapper[4558]: I0120 18:18:53.020005 4558 scope.go:117] "RemoveContainer" containerID="360ccb7b28a8926116edd2dc0fff31e313aa72884ad5ca03c021f606c938c63d" Jan 20 18:18:53 crc kubenswrapper[4558]: I0120 18:18:53.020218 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"360ccb7b28a8926116edd2dc0fff31e313aa72884ad5ca03c021f606c938c63d"} err="failed to get container status \"360ccb7b28a8926116edd2dc0fff31e313aa72884ad5ca03c021f606c938c63d\": rpc error: code = NotFound desc = could not find container \"360ccb7b28a8926116edd2dc0fff31e313aa72884ad5ca03c021f606c938c63d\": container with ID starting with 360ccb7b28a8926116edd2dc0fff31e313aa72884ad5ca03c021f606c938c63d not found: ID does not exist" Jan 20 18:18:53 crc kubenswrapper[4558]: I0120 18:18:53.020237 4558 scope.go:117] "RemoveContainer" containerID="1f6a18382756ee048f8366d1db72717a6b8e361e2b7c7050d003e430c11f0923" Jan 20 18:18:53 crc kubenswrapper[4558]: I0120 18:18:53.020465 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1f6a18382756ee048f8366d1db72717a6b8e361e2b7c7050d003e430c11f0923"} err="failed to get container status \"1f6a18382756ee048f8366d1db72717a6b8e361e2b7c7050d003e430c11f0923\": rpc error: code = NotFound desc = could not find container \"1f6a18382756ee048f8366d1db72717a6b8e361e2b7c7050d003e430c11f0923\": container with ID starting with 1f6a18382756ee048f8366d1db72717a6b8e361e2b7c7050d003e430c11f0923 not found: ID does not exist" Jan 20 18:18:53 crc kubenswrapper[4558]: I0120 18:18:53.020483 4558 scope.go:117] "RemoveContainer" containerID="f84a9271aeceb1980a056bfe780df5b8ee246aea786cbdc53c89fc3a34a5e68c" Jan 20 18:18:53 crc kubenswrapper[4558]: I0120 18:18:53.021577 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="barbican-kuttl-tests/barbican1f0e-account-delete-5srcg" podStartSLOduration=2.021565721 podStartE2EDuration="2.021565721s" podCreationTimestamp="2026-01-20 18:18:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 18:18:53.018500798 +0000 UTC m=+5826.778838765" watchObservedRunningTime="2026-01-20 18:18:53.021565721 +0000 UTC m=+5826.781903688" Jan 20 18:18:53 crc kubenswrapper[4558]: I0120 18:18:53.034536 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="barbican-kuttl-tests/barbican-worker-65bbb948f9-6kjvs" Jan 20 18:18:53 crc kubenswrapper[4558]: I0120 18:18:53.034923 4558 scope.go:117] "RemoveContainer" containerID="db0da8cda5443858ce613edc9d8b9076f173aa1bb3746580df94d5ec5f14ccc2" Jan 20 18:18:53 crc kubenswrapper[4558]: I0120 18:18:53.059443 4558 scope.go:117] "RemoveContainer" containerID="f84a9271aeceb1980a056bfe780df5b8ee246aea786cbdc53c89fc3a34a5e68c" Jan 20 18:18:53 crc kubenswrapper[4558]: E0120 18:18:53.060077 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f84a9271aeceb1980a056bfe780df5b8ee246aea786cbdc53c89fc3a34a5e68c\": container with ID starting with f84a9271aeceb1980a056bfe780df5b8ee246aea786cbdc53c89fc3a34a5e68c not found: ID does not exist" containerID="f84a9271aeceb1980a056bfe780df5b8ee246aea786cbdc53c89fc3a34a5e68c" Jan 20 18:18:53 crc kubenswrapper[4558]: I0120 18:18:53.060109 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f84a9271aeceb1980a056bfe780df5b8ee246aea786cbdc53c89fc3a34a5e68c"} err="failed to get container status \"f84a9271aeceb1980a056bfe780df5b8ee246aea786cbdc53c89fc3a34a5e68c\": rpc error: code = NotFound desc = could not find container \"f84a9271aeceb1980a056bfe780df5b8ee246aea786cbdc53c89fc3a34a5e68c\": container with ID starting with f84a9271aeceb1980a056bfe780df5b8ee246aea786cbdc53c89fc3a34a5e68c not found: ID does not exist" Jan 20 18:18:53 crc kubenswrapper[4558]: I0120 18:18:53.060131 4558 scope.go:117] "RemoveContainer" containerID="db0da8cda5443858ce613edc9d8b9076f173aa1bb3746580df94d5ec5f14ccc2" Jan 20 18:18:53 crc kubenswrapper[4558]: E0120 18:18:53.060544 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"db0da8cda5443858ce613edc9d8b9076f173aa1bb3746580df94d5ec5f14ccc2\": container with ID starting with db0da8cda5443858ce613edc9d8b9076f173aa1bb3746580df94d5ec5f14ccc2 not found: ID does not exist" containerID="db0da8cda5443858ce613edc9d8b9076f173aa1bb3746580df94d5ec5f14ccc2" Jan 20 18:18:53 crc kubenswrapper[4558]: I0120 18:18:53.060594 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"db0da8cda5443858ce613edc9d8b9076f173aa1bb3746580df94d5ec5f14ccc2"} err="failed to get container status \"db0da8cda5443858ce613edc9d8b9076f173aa1bb3746580df94d5ec5f14ccc2\": rpc error: code = NotFound desc = could not find container \"db0da8cda5443858ce613edc9d8b9076f173aa1bb3746580df94d5ec5f14ccc2\": container with ID starting with db0da8cda5443858ce613edc9d8b9076f173aa1bb3746580df94d5ec5f14ccc2 not found: ID does not exist" Jan 20 18:18:53 crc kubenswrapper[4558]: I0120 18:18:53.060616 4558 scope.go:117] "RemoveContainer" containerID="f84a9271aeceb1980a056bfe780df5b8ee246aea786cbdc53c89fc3a34a5e68c" Jan 20 18:18:53 crc kubenswrapper[4558]: I0120 18:18:53.062384 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f84a9271aeceb1980a056bfe780df5b8ee246aea786cbdc53c89fc3a34a5e68c"} err="failed to get container status \"f84a9271aeceb1980a056bfe780df5b8ee246aea786cbdc53c89fc3a34a5e68c\": rpc error: code = NotFound desc = could not find container \"f84a9271aeceb1980a056bfe780df5b8ee246aea786cbdc53c89fc3a34a5e68c\": container with ID starting with f84a9271aeceb1980a056bfe780df5b8ee246aea786cbdc53c89fc3a34a5e68c not found: ID does not exist" Jan 20 18:18:53 crc kubenswrapper[4558]: I0120 18:18:53.062423 4558 scope.go:117] "RemoveContainer" containerID="db0da8cda5443858ce613edc9d8b9076f173aa1bb3746580df94d5ec5f14ccc2" Jan 20 18:18:53 crc kubenswrapper[4558]: I0120 18:18:53.070848 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"db0da8cda5443858ce613edc9d8b9076f173aa1bb3746580df94d5ec5f14ccc2"} err="failed to get container status \"db0da8cda5443858ce613edc9d8b9076f173aa1bb3746580df94d5ec5f14ccc2\": rpc error: code = NotFound desc = could not find container \"db0da8cda5443858ce613edc9d8b9076f173aa1bb3746580df94d5ec5f14ccc2\": container with ID starting with db0da8cda5443858ce613edc9d8b9076f173aa1bb3746580df94d5ec5f14ccc2 not found: ID does not exist" Jan 20 18:18:53 crc kubenswrapper[4558]: I0120 18:18:53.151082 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0b59d155-7695-4f4e-b6bb-602ea491cb2d-config-data\") pod \"0b59d155-7695-4f4e-b6bb-602ea491cb2d\" (UID: \"0b59d155-7695-4f4e-b6bb-602ea491cb2d\") " Jan 20 18:18:53 crc kubenswrapper[4558]: I0120 18:18:53.151130 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0b59d155-7695-4f4e-b6bb-602ea491cb2d-logs\") pod \"0b59d155-7695-4f4e-b6bb-602ea491cb2d\" (UID: \"0b59d155-7695-4f4e-b6bb-602ea491cb2d\") " Jan 20 18:18:53 crc kubenswrapper[4558]: I0120 18:18:53.151182 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7b676\" (UniqueName: \"kubernetes.io/projected/0b59d155-7695-4f4e-b6bb-602ea491cb2d-kube-api-access-7b676\") pod \"0b59d155-7695-4f4e-b6bb-602ea491cb2d\" (UID: \"0b59d155-7695-4f4e-b6bb-602ea491cb2d\") " Jan 20 18:18:53 crc kubenswrapper[4558]: I0120 18:18:53.151224 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0b59d155-7695-4f4e-b6bb-602ea491cb2d-config-data-custom\") pod \"0b59d155-7695-4f4e-b6bb-602ea491cb2d\" (UID: \"0b59d155-7695-4f4e-b6bb-602ea491cb2d\") " Jan 20 18:18:53 crc kubenswrapper[4558]: I0120 18:18:53.151251 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/afc237c4-c25c-4dde-91f5-23accf56f5a9-config-data-custom\") pod \"afc237c4-c25c-4dde-91f5-23accf56f5a9\" (UID: \"afc237c4-c25c-4dde-91f5-23accf56f5a9\") " Jan 20 18:18:53 crc kubenswrapper[4558]: I0120 18:18:53.151286 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h4zvz\" (UniqueName: \"kubernetes.io/projected/afc237c4-c25c-4dde-91f5-23accf56f5a9-kube-api-access-h4zvz\") pod \"afc237c4-c25c-4dde-91f5-23accf56f5a9\" (UID: \"afc237c4-c25c-4dde-91f5-23accf56f5a9\") " Jan 20 18:18:53 crc kubenswrapper[4558]: I0120 18:18:53.151313 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/afc237c4-c25c-4dde-91f5-23accf56f5a9-logs\") pod \"afc237c4-c25c-4dde-91f5-23accf56f5a9\" (UID: \"afc237c4-c25c-4dde-91f5-23accf56f5a9\") " Jan 20 18:18:53 crc kubenswrapper[4558]: I0120 18:18:53.151341 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/afc237c4-c25c-4dde-91f5-23accf56f5a9-config-data\") pod \"afc237c4-c25c-4dde-91f5-23accf56f5a9\" (UID: \"afc237c4-c25c-4dde-91f5-23accf56f5a9\") " Jan 20 18:18:53 crc kubenswrapper[4558]: I0120 18:18:53.153331 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0b59d155-7695-4f4e-b6bb-602ea491cb2d-logs" (OuterVolumeSpecName: "logs") pod "0b59d155-7695-4f4e-b6bb-602ea491cb2d" (UID: "0b59d155-7695-4f4e-b6bb-602ea491cb2d"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 18:18:53 crc kubenswrapper[4558]: I0120 18:18:53.154118 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/afc237c4-c25c-4dde-91f5-23accf56f5a9-logs" (OuterVolumeSpecName: "logs") pod "afc237c4-c25c-4dde-91f5-23accf56f5a9" (UID: "afc237c4-c25c-4dde-91f5-23accf56f5a9"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 18:18:53 crc kubenswrapper[4558]: I0120 18:18:53.156786 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/afc237c4-c25c-4dde-91f5-23accf56f5a9-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "afc237c4-c25c-4dde-91f5-23accf56f5a9" (UID: "afc237c4-c25c-4dde-91f5-23accf56f5a9"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:18:53 crc kubenswrapper[4558]: I0120 18:18:53.157100 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b59d155-7695-4f4e-b6bb-602ea491cb2d-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "0b59d155-7695-4f4e-b6bb-602ea491cb2d" (UID: "0b59d155-7695-4f4e-b6bb-602ea491cb2d"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:18:53 crc kubenswrapper[4558]: I0120 18:18:53.157984 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/afc237c4-c25c-4dde-91f5-23accf56f5a9-kube-api-access-h4zvz" (OuterVolumeSpecName: "kube-api-access-h4zvz") pod "afc237c4-c25c-4dde-91f5-23accf56f5a9" (UID: "afc237c4-c25c-4dde-91f5-23accf56f5a9"). InnerVolumeSpecName "kube-api-access-h4zvz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:18:53 crc kubenswrapper[4558]: I0120 18:18:53.158304 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b59d155-7695-4f4e-b6bb-602ea491cb2d-kube-api-access-7b676" (OuterVolumeSpecName: "kube-api-access-7b676") pod "0b59d155-7695-4f4e-b6bb-602ea491cb2d" (UID: "0b59d155-7695-4f4e-b6bb-602ea491cb2d"). InnerVolumeSpecName "kube-api-access-7b676". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:18:53 crc kubenswrapper[4558]: I0120 18:18:53.180022 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/afc237c4-c25c-4dde-91f5-23accf56f5a9-config-data" (OuterVolumeSpecName: "config-data") pod "afc237c4-c25c-4dde-91f5-23accf56f5a9" (UID: "afc237c4-c25c-4dde-91f5-23accf56f5a9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:18:53 crc kubenswrapper[4558]: I0120 18:18:53.181425 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b59d155-7695-4f4e-b6bb-602ea491cb2d-config-data" (OuterVolumeSpecName: "config-data") pod "0b59d155-7695-4f4e-b6bb-602ea491cb2d" (UID: "0b59d155-7695-4f4e-b6bb-602ea491cb2d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:18:53 crc kubenswrapper[4558]: I0120 18:18:53.252895 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0b59d155-7695-4f4e-b6bb-602ea491cb2d-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 18:18:53 crc kubenswrapper[4558]: I0120 18:18:53.252932 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0b59d155-7695-4f4e-b6bb-602ea491cb2d-logs\") on node \"crc\" DevicePath \"\"" Jan 20 18:18:53 crc kubenswrapper[4558]: I0120 18:18:53.252965 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7b676\" (UniqueName: \"kubernetes.io/projected/0b59d155-7695-4f4e-b6bb-602ea491cb2d-kube-api-access-7b676\") on node \"crc\" DevicePath \"\"" Jan 20 18:18:53 crc kubenswrapper[4558]: I0120 18:18:53.252984 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0b59d155-7695-4f4e-b6bb-602ea491cb2d-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 18:18:53 crc kubenswrapper[4558]: I0120 18:18:53.252995 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/afc237c4-c25c-4dde-91f5-23accf56f5a9-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 18:18:53 crc kubenswrapper[4558]: I0120 18:18:53.253008 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h4zvz\" (UniqueName: \"kubernetes.io/projected/afc237c4-c25c-4dde-91f5-23accf56f5a9-kube-api-access-h4zvz\") on node \"crc\" DevicePath \"\"" Jan 20 18:18:53 crc kubenswrapper[4558]: I0120 18:18:53.253016 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/afc237c4-c25c-4dde-91f5-23accf56f5a9-logs\") on node \"crc\" DevicePath \"\"" Jan 20 18:18:53 crc kubenswrapper[4558]: I0120 18:18:53.253026 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/afc237c4-c25c-4dde-91f5-23accf56f5a9-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 18:18:53 crc kubenswrapper[4558]: I0120 18:18:53.309017 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["barbican-kuttl-tests/barbican-keystone-listener-6f9c66d74d-wwr54"] Jan 20 18:18:53 crc kubenswrapper[4558]: I0120 18:18:53.313518 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["barbican-kuttl-tests/barbican-keystone-listener-6f9c66d74d-wwr54"] Jan 20 18:18:54 crc kubenswrapper[4558]: I0120 18:18:54.008047 4558 generic.go:334] "Generic (PLEG): container finished" podID="3e312336-756e-4803-9db5-f0b0e31eb6ac" containerID="732989bca0f83f5be83d330c5fb2375467ff6a8a0816a9188481958969606838" exitCode=0 Jan 20 18:18:54 crc kubenswrapper[4558]: I0120 18:18:54.008213 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/barbican1f0e-account-delete-5srcg" event={"ID":"3e312336-756e-4803-9db5-f0b0e31eb6ac","Type":"ContainerDied","Data":"732989bca0f83f5be83d330c5fb2375467ff6a8a0816a9188481958969606838"} Jan 20 18:18:54 crc kubenswrapper[4558]: I0120 18:18:54.011929 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/barbican-worker-65bbb948f9-6kjvs" event={"ID":"afc237c4-c25c-4dde-91f5-23accf56f5a9","Type":"ContainerDied","Data":"469fd79107cfc61d0386c83dbeb86bc7d68679bdd646bc122889d4d081e35769"} Jan 20 18:18:54 crc kubenswrapper[4558]: I0120 18:18:54.011982 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="barbican-kuttl-tests/barbican-worker-65bbb948f9-6kjvs" Jan 20 18:18:54 crc kubenswrapper[4558]: I0120 18:18:54.012002 4558 scope.go:117] "RemoveContainer" containerID="1d759ec87bb962d402368d8716821b4cdcc5886d59e1f98752dc517a66f23913" Jan 20 18:18:54 crc kubenswrapper[4558]: I0120 18:18:54.034540 4558 scope.go:117] "RemoveContainer" containerID="9afe78cb8a1da2bb5b08c69b68f397924a7031558028c3dfc119c19cd15d5437" Jan 20 18:18:54 crc kubenswrapper[4558]: I0120 18:18:54.038375 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["barbican-kuttl-tests/barbican-worker-65bbb948f9-6kjvs"] Jan 20 18:18:54 crc kubenswrapper[4558]: I0120 18:18:54.044237 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["barbican-kuttl-tests/barbican-worker-65bbb948f9-6kjvs"] Jan 20 18:18:54 crc kubenswrapper[4558]: I0120 18:18:54.575239 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b59d155-7695-4f4e-b6bb-602ea491cb2d" path="/var/lib/kubelet/pods/0b59d155-7695-4f4e-b6bb-602ea491cb2d/volumes" Jan 20 18:18:54 crc kubenswrapper[4558]: I0120 18:18:54.575959 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="65f781b0-c276-41cd-911f-9c964612d70d" path="/var/lib/kubelet/pods/65f781b0-c276-41cd-911f-9c964612d70d/volumes" Jan 20 18:18:54 crc kubenswrapper[4558]: I0120 18:18:54.576637 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="afc237c4-c25c-4dde-91f5-23accf56f5a9" path="/var/lib/kubelet/pods/afc237c4-c25c-4dde-91f5-23accf56f5a9/volumes" Jan 20 18:18:55 crc kubenswrapper[4558]: I0120 18:18:55.282040 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="barbican-kuttl-tests/barbican1f0e-account-delete-5srcg" Jan 20 18:18:55 crc kubenswrapper[4558]: I0120 18:18:55.283918 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3e312336-756e-4803-9db5-f0b0e31eb6ac-operator-scripts\") pod \"3e312336-756e-4803-9db5-f0b0e31eb6ac\" (UID: \"3e312336-756e-4803-9db5-f0b0e31eb6ac\") " Jan 20 18:18:55 crc kubenswrapper[4558]: I0120 18:18:55.284101 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rd6ks\" (UniqueName: \"kubernetes.io/projected/3e312336-756e-4803-9db5-f0b0e31eb6ac-kube-api-access-rd6ks\") pod \"3e312336-756e-4803-9db5-f0b0e31eb6ac\" (UID: \"3e312336-756e-4803-9db5-f0b0e31eb6ac\") " Jan 20 18:18:55 crc kubenswrapper[4558]: I0120 18:18:55.284437 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3e312336-756e-4803-9db5-f0b0e31eb6ac-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "3e312336-756e-4803-9db5-f0b0e31eb6ac" (UID: "3e312336-756e-4803-9db5-f0b0e31eb6ac"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:18:55 crc kubenswrapper[4558]: I0120 18:18:55.284700 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3e312336-756e-4803-9db5-f0b0e31eb6ac-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 18:18:55 crc kubenswrapper[4558]: I0120 18:18:55.290182 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3e312336-756e-4803-9db5-f0b0e31eb6ac-kube-api-access-rd6ks" (OuterVolumeSpecName: "kube-api-access-rd6ks") pod "3e312336-756e-4803-9db5-f0b0e31eb6ac" (UID: "3e312336-756e-4803-9db5-f0b0e31eb6ac"). InnerVolumeSpecName "kube-api-access-rd6ks". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:18:55 crc kubenswrapper[4558]: I0120 18:18:55.386441 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rd6ks\" (UniqueName: \"kubernetes.io/projected/3e312336-756e-4803-9db5-f0b0e31eb6ac-kube-api-access-rd6ks\") on node \"crc\" DevicePath \"\"" Jan 20 18:18:56 crc kubenswrapper[4558]: I0120 18:18:56.033395 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/barbican1f0e-account-delete-5srcg" event={"ID":"3e312336-756e-4803-9db5-f0b0e31eb6ac","Type":"ContainerDied","Data":"18063842f6f9888eb29bda3e1aa6714fa5031ea86173a25f3ba177efeacd70f1"} Jan 20 18:18:56 crc kubenswrapper[4558]: I0120 18:18:56.033647 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="18063842f6f9888eb29bda3e1aa6714fa5031ea86173a25f3ba177efeacd70f1" Jan 20 18:18:56 crc kubenswrapper[4558]: I0120 18:18:56.033507 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="barbican-kuttl-tests/barbican1f0e-account-delete-5srcg" Jan 20 18:18:57 crc kubenswrapper[4558]: I0120 18:18:57.080260 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["barbican-kuttl-tests/barbican-db-create-bnkm2"] Jan 20 18:18:57 crc kubenswrapper[4558]: I0120 18:18:57.108251 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["barbican-kuttl-tests/barbican-db-create-bnkm2"] Jan 20 18:18:57 crc kubenswrapper[4558]: I0120 18:18:57.118215 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["barbican-kuttl-tests/barbican-1f0e-account-create-update-swv56"] Jan 20 18:18:57 crc kubenswrapper[4558]: I0120 18:18:57.133252 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["barbican-kuttl-tests/barbican-1f0e-account-create-update-swv56"] Jan 20 18:18:57 crc kubenswrapper[4558]: I0120 18:18:57.139443 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["barbican-kuttl-tests/barbican1f0e-account-delete-5srcg"] Jan 20 18:18:57 crc kubenswrapper[4558]: I0120 18:18:57.152766 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["barbican-kuttl-tests/barbican1f0e-account-delete-5srcg"] Jan 20 18:18:57 crc kubenswrapper[4558]: I0120 18:18:57.249199 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["barbican-kuttl-tests/barbican-db-create-5hlft"] Jan 20 18:18:57 crc kubenswrapper[4558]: E0120 18:18:57.249857 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0b59d155-7695-4f4e-b6bb-602ea491cb2d" containerName="barbican-keystone-listener" Jan 20 18:18:57 crc kubenswrapper[4558]: I0120 18:18:57.249954 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="0b59d155-7695-4f4e-b6bb-602ea491cb2d" containerName="barbican-keystone-listener" Jan 20 18:18:57 crc kubenswrapper[4558]: E0120 18:18:57.250015 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0b59d155-7695-4f4e-b6bb-602ea491cb2d" containerName="barbican-keystone-listener-log" Jan 20 18:18:57 crc kubenswrapper[4558]: I0120 18:18:57.250059 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="0b59d155-7695-4f4e-b6bb-602ea491cb2d" containerName="barbican-keystone-listener-log" Jan 20 18:18:57 crc kubenswrapper[4558]: E0120 18:18:57.250116 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e312336-756e-4803-9db5-f0b0e31eb6ac" containerName="mariadb-account-delete" Jan 20 18:18:57 crc kubenswrapper[4558]: I0120 18:18:57.250157 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e312336-756e-4803-9db5-f0b0e31eb6ac" containerName="mariadb-account-delete" Jan 20 18:18:57 crc kubenswrapper[4558]: E0120 18:18:57.250241 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="65f781b0-c276-41cd-911f-9c964612d70d" containerName="barbican-api" Jan 20 18:18:57 crc kubenswrapper[4558]: I0120 18:18:57.250286 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="65f781b0-c276-41cd-911f-9c964612d70d" containerName="barbican-api" Jan 20 18:18:57 crc kubenswrapper[4558]: E0120 18:18:57.250331 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="afc237c4-c25c-4dde-91f5-23accf56f5a9" containerName="barbican-worker" Jan 20 18:18:57 crc kubenswrapper[4558]: I0120 18:18:57.250370 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="afc237c4-c25c-4dde-91f5-23accf56f5a9" containerName="barbican-worker" Jan 20 18:18:57 crc kubenswrapper[4558]: E0120 18:18:57.250420 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="afc237c4-c25c-4dde-91f5-23accf56f5a9" containerName="barbican-worker-log" Jan 20 18:18:57 crc kubenswrapper[4558]: I0120 18:18:57.250459 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="afc237c4-c25c-4dde-91f5-23accf56f5a9" containerName="barbican-worker-log" Jan 20 18:18:57 crc kubenswrapper[4558]: E0120 18:18:57.250504 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="65f781b0-c276-41cd-911f-9c964612d70d" containerName="barbican-api-log" Jan 20 18:18:57 crc kubenswrapper[4558]: I0120 18:18:57.250544 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="65f781b0-c276-41cd-911f-9c964612d70d" containerName="barbican-api-log" Jan 20 18:18:57 crc kubenswrapper[4558]: I0120 18:18:57.250750 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="65f781b0-c276-41cd-911f-9c964612d70d" containerName="barbican-api-log" Jan 20 18:18:57 crc kubenswrapper[4558]: I0120 18:18:57.250802 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="0b59d155-7695-4f4e-b6bb-602ea491cb2d" containerName="barbican-keystone-listener-log" Jan 20 18:18:57 crc kubenswrapper[4558]: I0120 18:18:57.250847 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="afc237c4-c25c-4dde-91f5-23accf56f5a9" containerName="barbican-worker" Jan 20 18:18:57 crc kubenswrapper[4558]: I0120 18:18:57.250885 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="0b59d155-7695-4f4e-b6bb-602ea491cb2d" containerName="barbican-keystone-listener" Jan 20 18:18:57 crc kubenswrapper[4558]: I0120 18:18:57.250946 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="3e312336-756e-4803-9db5-f0b0e31eb6ac" containerName="mariadb-account-delete" Jan 20 18:18:57 crc kubenswrapper[4558]: I0120 18:18:57.250995 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="65f781b0-c276-41cd-911f-9c964612d70d" containerName="barbican-api" Jan 20 18:18:57 crc kubenswrapper[4558]: I0120 18:18:57.251037 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="afc237c4-c25c-4dde-91f5-23accf56f5a9" containerName="barbican-worker-log" Jan 20 18:18:57 crc kubenswrapper[4558]: I0120 18:18:57.251719 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="barbican-kuttl-tests/barbican-db-create-5hlft" Jan 20 18:18:57 crc kubenswrapper[4558]: I0120 18:18:57.262493 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["barbican-kuttl-tests/barbican-0a40-account-create-update-rs2rx"] Jan 20 18:18:57 crc kubenswrapper[4558]: I0120 18:18:57.263338 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="barbican-kuttl-tests/barbican-0a40-account-create-update-rs2rx" Jan 20 18:18:57 crc kubenswrapper[4558]: I0120 18:18:57.266083 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["barbican-kuttl-tests/barbican-db-create-5hlft"] Jan 20 18:18:57 crc kubenswrapper[4558]: I0120 18:18:57.269705 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["barbican-kuttl-tests/barbican-0a40-account-create-update-rs2rx"] Jan 20 18:18:57 crc kubenswrapper[4558]: I0120 18:18:57.271571 4558 reflector.go:368] Caches populated for *v1.Secret from object-"barbican-kuttl-tests"/"barbican-db-secret" Jan 20 18:18:57 crc kubenswrapper[4558]: I0120 18:18:57.416417 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7c30e311-5eff-45a8-ad20-e455cacd93f4-operator-scripts\") pod \"barbican-db-create-5hlft\" (UID: \"7c30e311-5eff-45a8-ad20-e455cacd93f4\") " pod="barbican-kuttl-tests/barbican-db-create-5hlft" Jan 20 18:18:57 crc kubenswrapper[4558]: I0120 18:18:57.416532 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a6c1c2bc-172b-4618-ae21-b22152ffef1f-operator-scripts\") pod \"barbican-0a40-account-create-update-rs2rx\" (UID: \"a6c1c2bc-172b-4618-ae21-b22152ffef1f\") " pod="barbican-kuttl-tests/barbican-0a40-account-create-update-rs2rx" Jan 20 18:18:57 crc kubenswrapper[4558]: I0120 18:18:57.416599 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nl6zb\" (UniqueName: \"kubernetes.io/projected/a6c1c2bc-172b-4618-ae21-b22152ffef1f-kube-api-access-nl6zb\") pod \"barbican-0a40-account-create-update-rs2rx\" (UID: \"a6c1c2bc-172b-4618-ae21-b22152ffef1f\") " pod="barbican-kuttl-tests/barbican-0a40-account-create-update-rs2rx" Jan 20 18:18:57 crc kubenswrapper[4558]: I0120 18:18:57.416655 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lvw5x\" (UniqueName: \"kubernetes.io/projected/7c30e311-5eff-45a8-ad20-e455cacd93f4-kube-api-access-lvw5x\") pod \"barbican-db-create-5hlft\" (UID: \"7c30e311-5eff-45a8-ad20-e455cacd93f4\") " pod="barbican-kuttl-tests/barbican-db-create-5hlft" Jan 20 18:18:57 crc kubenswrapper[4558]: I0120 18:18:57.517330 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lvw5x\" (UniqueName: \"kubernetes.io/projected/7c30e311-5eff-45a8-ad20-e455cacd93f4-kube-api-access-lvw5x\") pod \"barbican-db-create-5hlft\" (UID: \"7c30e311-5eff-45a8-ad20-e455cacd93f4\") " pod="barbican-kuttl-tests/barbican-db-create-5hlft" Jan 20 18:18:57 crc kubenswrapper[4558]: I0120 18:18:57.517400 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7c30e311-5eff-45a8-ad20-e455cacd93f4-operator-scripts\") pod \"barbican-db-create-5hlft\" (UID: \"7c30e311-5eff-45a8-ad20-e455cacd93f4\") " pod="barbican-kuttl-tests/barbican-db-create-5hlft" Jan 20 18:18:57 crc kubenswrapper[4558]: I0120 18:18:57.517477 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a6c1c2bc-172b-4618-ae21-b22152ffef1f-operator-scripts\") pod \"barbican-0a40-account-create-update-rs2rx\" (UID: \"a6c1c2bc-172b-4618-ae21-b22152ffef1f\") " pod="barbican-kuttl-tests/barbican-0a40-account-create-update-rs2rx" Jan 20 18:18:57 crc kubenswrapper[4558]: I0120 18:18:57.517562 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nl6zb\" (UniqueName: \"kubernetes.io/projected/a6c1c2bc-172b-4618-ae21-b22152ffef1f-kube-api-access-nl6zb\") pod \"barbican-0a40-account-create-update-rs2rx\" (UID: \"a6c1c2bc-172b-4618-ae21-b22152ffef1f\") " pod="barbican-kuttl-tests/barbican-0a40-account-create-update-rs2rx" Jan 20 18:18:57 crc kubenswrapper[4558]: I0120 18:18:57.518441 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7c30e311-5eff-45a8-ad20-e455cacd93f4-operator-scripts\") pod \"barbican-db-create-5hlft\" (UID: \"7c30e311-5eff-45a8-ad20-e455cacd93f4\") " pod="barbican-kuttl-tests/barbican-db-create-5hlft" Jan 20 18:18:57 crc kubenswrapper[4558]: I0120 18:18:57.518445 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a6c1c2bc-172b-4618-ae21-b22152ffef1f-operator-scripts\") pod \"barbican-0a40-account-create-update-rs2rx\" (UID: \"a6c1c2bc-172b-4618-ae21-b22152ffef1f\") " pod="barbican-kuttl-tests/barbican-0a40-account-create-update-rs2rx" Jan 20 18:18:57 crc kubenswrapper[4558]: I0120 18:18:57.535338 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lvw5x\" (UniqueName: \"kubernetes.io/projected/7c30e311-5eff-45a8-ad20-e455cacd93f4-kube-api-access-lvw5x\") pod \"barbican-db-create-5hlft\" (UID: \"7c30e311-5eff-45a8-ad20-e455cacd93f4\") " pod="barbican-kuttl-tests/barbican-db-create-5hlft" Jan 20 18:18:57 crc kubenswrapper[4558]: I0120 18:18:57.535925 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nl6zb\" (UniqueName: \"kubernetes.io/projected/a6c1c2bc-172b-4618-ae21-b22152ffef1f-kube-api-access-nl6zb\") pod \"barbican-0a40-account-create-update-rs2rx\" (UID: \"a6c1c2bc-172b-4618-ae21-b22152ffef1f\") " pod="barbican-kuttl-tests/barbican-0a40-account-create-update-rs2rx" Jan 20 18:18:57 crc kubenswrapper[4558]: I0120 18:18:57.566784 4558 scope.go:117] "RemoveContainer" containerID="27314e1a8af2e8df8ecd2e7a06cbb630afbfecd8ca2338a1f199cf92f982f581" Jan 20 18:18:57 crc kubenswrapper[4558]: E0120 18:18:57.567403 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 18:18:57 crc kubenswrapper[4558]: I0120 18:18:57.577993 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="barbican-kuttl-tests/barbican-db-create-5hlft" Jan 20 18:18:57 crc kubenswrapper[4558]: I0120 18:18:57.585110 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="barbican-kuttl-tests/barbican-0a40-account-create-update-rs2rx" Jan 20 18:18:57 crc kubenswrapper[4558]: I0120 18:18:57.987587 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["barbican-kuttl-tests/barbican-db-create-5hlft"] Jan 20 18:18:58 crc kubenswrapper[4558]: I0120 18:18:58.041863 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["barbican-kuttl-tests/barbican-0a40-account-create-update-rs2rx"] Jan 20 18:18:58 crc kubenswrapper[4558]: W0120 18:18:58.048024 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda6c1c2bc_172b_4618_ae21_b22152ffef1f.slice/crio-5a2cca3549c47af301e2cac39f81c2e684494ff2db07572ea3079a10d6fda39c WatchSource:0}: Error finding container 5a2cca3549c47af301e2cac39f81c2e684494ff2db07572ea3079a10d6fda39c: Status 404 returned error can't find the container with id 5a2cca3549c47af301e2cac39f81c2e684494ff2db07572ea3079a10d6fda39c Jan 20 18:18:58 crc kubenswrapper[4558]: I0120 18:18:58.048945 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/barbican-db-create-5hlft" event={"ID":"7c30e311-5eff-45a8-ad20-e455cacd93f4","Type":"ContainerStarted","Data":"91174e2e5388c5e7ba9eefcdcdf9a805f1b62ba9ba462935067d49f995f88afd"} Jan 20 18:18:58 crc kubenswrapper[4558]: I0120 18:18:58.578438 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3e312336-756e-4803-9db5-f0b0e31eb6ac" path="/var/lib/kubelet/pods/3e312336-756e-4803-9db5-f0b0e31eb6ac/volumes" Jan 20 18:18:58 crc kubenswrapper[4558]: I0120 18:18:58.579009 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="99aca88d-e31b-4cf3-8532-ff2b3ecc42c6" path="/var/lib/kubelet/pods/99aca88d-e31b-4cf3-8532-ff2b3ecc42c6/volumes" Jan 20 18:18:58 crc kubenswrapper[4558]: I0120 18:18:58.579540 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="da3a3927-0645-403e-a972-3a1644222ed5" path="/var/lib/kubelet/pods/da3a3927-0645-403e-a972-3a1644222ed5/volumes" Jan 20 18:18:59 crc kubenswrapper[4558]: I0120 18:18:59.059076 4558 generic.go:334] "Generic (PLEG): container finished" podID="7c30e311-5eff-45a8-ad20-e455cacd93f4" containerID="64d5902721ad60e46859783918d2179ac0a6d631aebd5574c3b527e6050cf780" exitCode=0 Jan 20 18:18:59 crc kubenswrapper[4558]: I0120 18:18:59.059425 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/barbican-db-create-5hlft" event={"ID":"7c30e311-5eff-45a8-ad20-e455cacd93f4","Type":"ContainerDied","Data":"64d5902721ad60e46859783918d2179ac0a6d631aebd5574c3b527e6050cf780"} Jan 20 18:18:59 crc kubenswrapper[4558]: I0120 18:18:59.061794 4558 generic.go:334] "Generic (PLEG): container finished" podID="a6c1c2bc-172b-4618-ae21-b22152ffef1f" containerID="6538b3f3df0d13d62bac2b1cc9919ad3639cd0163b9aa1ca9f045d5309d9733b" exitCode=0 Jan 20 18:18:59 crc kubenswrapper[4558]: I0120 18:18:59.061913 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/barbican-0a40-account-create-update-rs2rx" event={"ID":"a6c1c2bc-172b-4618-ae21-b22152ffef1f","Type":"ContainerDied","Data":"6538b3f3df0d13d62bac2b1cc9919ad3639cd0163b9aa1ca9f045d5309d9733b"} Jan 20 18:18:59 crc kubenswrapper[4558]: I0120 18:18:59.061995 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/barbican-0a40-account-create-update-rs2rx" event={"ID":"a6c1c2bc-172b-4618-ae21-b22152ffef1f","Type":"ContainerStarted","Data":"5a2cca3549c47af301e2cac39f81c2e684494ff2db07572ea3079a10d6fda39c"} Jan 20 18:19:00 crc kubenswrapper[4558]: I0120 18:19:00.377703 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="barbican-kuttl-tests/barbican-0a40-account-create-update-rs2rx" Jan 20 18:19:00 crc kubenswrapper[4558]: I0120 18:19:00.437508 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="barbican-kuttl-tests/barbican-db-create-5hlft" Jan 20 18:19:00 crc kubenswrapper[4558]: I0120 18:19:00.468678 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7c30e311-5eff-45a8-ad20-e455cacd93f4-operator-scripts\") pod \"7c30e311-5eff-45a8-ad20-e455cacd93f4\" (UID: \"7c30e311-5eff-45a8-ad20-e455cacd93f4\") " Jan 20 18:19:00 crc kubenswrapper[4558]: I0120 18:19:00.468730 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lvw5x\" (UniqueName: \"kubernetes.io/projected/7c30e311-5eff-45a8-ad20-e455cacd93f4-kube-api-access-lvw5x\") pod \"7c30e311-5eff-45a8-ad20-e455cacd93f4\" (UID: \"7c30e311-5eff-45a8-ad20-e455cacd93f4\") " Jan 20 18:19:00 crc kubenswrapper[4558]: I0120 18:19:00.468774 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nl6zb\" (UniqueName: \"kubernetes.io/projected/a6c1c2bc-172b-4618-ae21-b22152ffef1f-kube-api-access-nl6zb\") pod \"a6c1c2bc-172b-4618-ae21-b22152ffef1f\" (UID: \"a6c1c2bc-172b-4618-ae21-b22152ffef1f\") " Jan 20 18:19:00 crc kubenswrapper[4558]: I0120 18:19:00.468806 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a6c1c2bc-172b-4618-ae21-b22152ffef1f-operator-scripts\") pod \"a6c1c2bc-172b-4618-ae21-b22152ffef1f\" (UID: \"a6c1c2bc-172b-4618-ae21-b22152ffef1f\") " Jan 20 18:19:00 crc kubenswrapper[4558]: I0120 18:19:00.469144 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7c30e311-5eff-45a8-ad20-e455cacd93f4-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "7c30e311-5eff-45a8-ad20-e455cacd93f4" (UID: "7c30e311-5eff-45a8-ad20-e455cacd93f4"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:19:00 crc kubenswrapper[4558]: I0120 18:19:00.469407 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a6c1c2bc-172b-4618-ae21-b22152ffef1f-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "a6c1c2bc-172b-4618-ae21-b22152ffef1f" (UID: "a6c1c2bc-172b-4618-ae21-b22152ffef1f"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:19:00 crc kubenswrapper[4558]: I0120 18:19:00.474386 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7c30e311-5eff-45a8-ad20-e455cacd93f4-kube-api-access-lvw5x" (OuterVolumeSpecName: "kube-api-access-lvw5x") pod "7c30e311-5eff-45a8-ad20-e455cacd93f4" (UID: "7c30e311-5eff-45a8-ad20-e455cacd93f4"). InnerVolumeSpecName "kube-api-access-lvw5x". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:19:00 crc kubenswrapper[4558]: I0120 18:19:00.474533 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a6c1c2bc-172b-4618-ae21-b22152ffef1f-kube-api-access-nl6zb" (OuterVolumeSpecName: "kube-api-access-nl6zb") pod "a6c1c2bc-172b-4618-ae21-b22152ffef1f" (UID: "a6c1c2bc-172b-4618-ae21-b22152ffef1f"). InnerVolumeSpecName "kube-api-access-nl6zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:19:00 crc kubenswrapper[4558]: I0120 18:19:00.570060 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nl6zb\" (UniqueName: \"kubernetes.io/projected/a6c1c2bc-172b-4618-ae21-b22152ffef1f-kube-api-access-nl6zb\") on node \"crc\" DevicePath \"\"" Jan 20 18:19:00 crc kubenswrapper[4558]: I0120 18:19:00.570101 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a6c1c2bc-172b-4618-ae21-b22152ffef1f-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 18:19:00 crc kubenswrapper[4558]: I0120 18:19:00.570116 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7c30e311-5eff-45a8-ad20-e455cacd93f4-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 18:19:00 crc kubenswrapper[4558]: I0120 18:19:00.570127 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lvw5x\" (UniqueName: \"kubernetes.io/projected/7c30e311-5eff-45a8-ad20-e455cacd93f4-kube-api-access-lvw5x\") on node \"crc\" DevicePath \"\"" Jan 20 18:19:01 crc kubenswrapper[4558]: I0120 18:19:01.083083 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/barbican-0a40-account-create-update-rs2rx" event={"ID":"a6c1c2bc-172b-4618-ae21-b22152ffef1f","Type":"ContainerDied","Data":"5a2cca3549c47af301e2cac39f81c2e684494ff2db07572ea3079a10d6fda39c"} Jan 20 18:19:01 crc kubenswrapper[4558]: I0120 18:19:01.083718 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5a2cca3549c47af301e2cac39f81c2e684494ff2db07572ea3079a10d6fda39c" Jan 20 18:19:01 crc kubenswrapper[4558]: I0120 18:19:01.083112 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="barbican-kuttl-tests/barbican-0a40-account-create-update-rs2rx" Jan 20 18:19:01 crc kubenswrapper[4558]: I0120 18:19:01.085947 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/barbican-db-create-5hlft" event={"ID":"7c30e311-5eff-45a8-ad20-e455cacd93f4","Type":"ContainerDied","Data":"91174e2e5388c5e7ba9eefcdcdf9a805f1b62ba9ba462935067d49f995f88afd"} Jan 20 18:19:01 crc kubenswrapper[4558]: I0120 18:19:01.085999 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="91174e2e5388c5e7ba9eefcdcdf9a805f1b62ba9ba462935067d49f995f88afd" Jan 20 18:19:01 crc kubenswrapper[4558]: I0120 18:19:01.086011 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="barbican-kuttl-tests/barbican-db-create-5hlft" Jan 20 18:19:02 crc kubenswrapper[4558]: I0120 18:19:02.529260 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["barbican-kuttl-tests/barbican-db-sync-8hnjv"] Jan 20 18:19:02 crc kubenswrapper[4558]: E0120 18:19:02.529742 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a6c1c2bc-172b-4618-ae21-b22152ffef1f" containerName="mariadb-account-create-update" Jan 20 18:19:02 crc kubenswrapper[4558]: I0120 18:19:02.529763 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="a6c1c2bc-172b-4618-ae21-b22152ffef1f" containerName="mariadb-account-create-update" Jan 20 18:19:02 crc kubenswrapper[4558]: E0120 18:19:02.529773 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7c30e311-5eff-45a8-ad20-e455cacd93f4" containerName="mariadb-database-create" Jan 20 18:19:02 crc kubenswrapper[4558]: I0120 18:19:02.529779 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="7c30e311-5eff-45a8-ad20-e455cacd93f4" containerName="mariadb-database-create" Jan 20 18:19:02 crc kubenswrapper[4558]: I0120 18:19:02.529960 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="7c30e311-5eff-45a8-ad20-e455cacd93f4" containerName="mariadb-database-create" Jan 20 18:19:02 crc kubenswrapper[4558]: I0120 18:19:02.529985 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="a6c1c2bc-172b-4618-ae21-b22152ffef1f" containerName="mariadb-account-create-update" Jan 20 18:19:02 crc kubenswrapper[4558]: I0120 18:19:02.530826 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="barbican-kuttl-tests/barbican-db-sync-8hnjv" Jan 20 18:19:02 crc kubenswrapper[4558]: I0120 18:19:02.534264 4558 reflector.go:368] Caches populated for *v1.Secret from object-"barbican-kuttl-tests"/"barbican-config-data" Jan 20 18:19:02 crc kubenswrapper[4558]: I0120 18:19:02.534563 4558 reflector.go:368] Caches populated for *v1.Secret from object-"barbican-kuttl-tests"/"combined-ca-bundle" Jan 20 18:19:02 crc kubenswrapper[4558]: I0120 18:19:02.534743 4558 reflector.go:368] Caches populated for *v1.Secret from object-"barbican-kuttl-tests"/"barbican-barbican-dockercfg-ng4pq" Jan 20 18:19:02 crc kubenswrapper[4558]: I0120 18:19:02.541121 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["barbican-kuttl-tests/barbican-db-sync-8hnjv"] Jan 20 18:19:02 crc kubenswrapper[4558]: I0120 18:19:02.602131 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/dfad60e4-fc8b-498d-9376-840b433c50de-db-sync-config-data\") pod \"barbican-db-sync-8hnjv\" (UID: \"dfad60e4-fc8b-498d-9376-840b433c50de\") " pod="barbican-kuttl-tests/barbican-db-sync-8hnjv" Jan 20 18:19:02 crc kubenswrapper[4558]: I0120 18:19:02.602194 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g4tjv\" (UniqueName: \"kubernetes.io/projected/dfad60e4-fc8b-498d-9376-840b433c50de-kube-api-access-g4tjv\") pod \"barbican-db-sync-8hnjv\" (UID: \"dfad60e4-fc8b-498d-9376-840b433c50de\") " pod="barbican-kuttl-tests/barbican-db-sync-8hnjv" Jan 20 18:19:02 crc kubenswrapper[4558]: I0120 18:19:02.602477 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dfad60e4-fc8b-498d-9376-840b433c50de-combined-ca-bundle\") pod \"barbican-db-sync-8hnjv\" (UID: \"dfad60e4-fc8b-498d-9376-840b433c50de\") " pod="barbican-kuttl-tests/barbican-db-sync-8hnjv" Jan 20 18:19:02 crc kubenswrapper[4558]: I0120 18:19:02.703183 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dfad60e4-fc8b-498d-9376-840b433c50de-combined-ca-bundle\") pod \"barbican-db-sync-8hnjv\" (UID: \"dfad60e4-fc8b-498d-9376-840b433c50de\") " pod="barbican-kuttl-tests/barbican-db-sync-8hnjv" Jan 20 18:19:02 crc kubenswrapper[4558]: I0120 18:19:02.703290 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/dfad60e4-fc8b-498d-9376-840b433c50de-db-sync-config-data\") pod \"barbican-db-sync-8hnjv\" (UID: \"dfad60e4-fc8b-498d-9376-840b433c50de\") " pod="barbican-kuttl-tests/barbican-db-sync-8hnjv" Jan 20 18:19:02 crc kubenswrapper[4558]: I0120 18:19:02.703317 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g4tjv\" (UniqueName: \"kubernetes.io/projected/dfad60e4-fc8b-498d-9376-840b433c50de-kube-api-access-g4tjv\") pod \"barbican-db-sync-8hnjv\" (UID: \"dfad60e4-fc8b-498d-9376-840b433c50de\") " pod="barbican-kuttl-tests/barbican-db-sync-8hnjv" Jan 20 18:19:02 crc kubenswrapper[4558]: I0120 18:19:02.711198 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dfad60e4-fc8b-498d-9376-840b433c50de-combined-ca-bundle\") pod \"barbican-db-sync-8hnjv\" (UID: \"dfad60e4-fc8b-498d-9376-840b433c50de\") " pod="barbican-kuttl-tests/barbican-db-sync-8hnjv" Jan 20 18:19:02 crc kubenswrapper[4558]: I0120 18:19:02.711563 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/dfad60e4-fc8b-498d-9376-840b433c50de-db-sync-config-data\") pod \"barbican-db-sync-8hnjv\" (UID: \"dfad60e4-fc8b-498d-9376-840b433c50de\") " pod="barbican-kuttl-tests/barbican-db-sync-8hnjv" Jan 20 18:19:02 crc kubenswrapper[4558]: I0120 18:19:02.718523 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g4tjv\" (UniqueName: \"kubernetes.io/projected/dfad60e4-fc8b-498d-9376-840b433c50de-kube-api-access-g4tjv\") pod \"barbican-db-sync-8hnjv\" (UID: \"dfad60e4-fc8b-498d-9376-840b433c50de\") " pod="barbican-kuttl-tests/barbican-db-sync-8hnjv" Jan 20 18:19:02 crc kubenswrapper[4558]: I0120 18:19:02.850108 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="barbican-kuttl-tests/barbican-db-sync-8hnjv" Jan 20 18:19:03 crc kubenswrapper[4558]: I0120 18:19:03.236664 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["barbican-kuttl-tests/barbican-db-sync-8hnjv"] Jan 20 18:19:04 crc kubenswrapper[4558]: I0120 18:19:04.112760 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/barbican-db-sync-8hnjv" event={"ID":"dfad60e4-fc8b-498d-9376-840b433c50de","Type":"ContainerStarted","Data":"7f2b1b0d5def68793489498d2bf511951f72742de22aa59f6962dcab8c4ef0d1"} Jan 20 18:19:04 crc kubenswrapper[4558]: I0120 18:19:04.113200 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/barbican-db-sync-8hnjv" event={"ID":"dfad60e4-fc8b-498d-9376-840b433c50de","Type":"ContainerStarted","Data":"93913e16b2886a26b4a38596962d579ae27c89a9e2991c9f4c1d121c4a61e0a8"} Jan 20 18:19:04 crc kubenswrapper[4558]: I0120 18:19:04.141465 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="barbican-kuttl-tests/barbican-db-sync-8hnjv" podStartSLOduration=2.141442994 podStartE2EDuration="2.141442994s" podCreationTimestamp="2026-01-20 18:19:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 18:19:04.140323358 +0000 UTC m=+5837.900661325" watchObservedRunningTime="2026-01-20 18:19:04.141442994 +0000 UTC m=+5837.901780960" Jan 20 18:19:05 crc kubenswrapper[4558]: I0120 18:19:05.124473 4558 generic.go:334] "Generic (PLEG): container finished" podID="dfad60e4-fc8b-498d-9376-840b433c50de" containerID="7f2b1b0d5def68793489498d2bf511951f72742de22aa59f6962dcab8c4ef0d1" exitCode=0 Jan 20 18:19:05 crc kubenswrapper[4558]: I0120 18:19:05.124596 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/barbican-db-sync-8hnjv" event={"ID":"dfad60e4-fc8b-498d-9376-840b433c50de","Type":"ContainerDied","Data":"7f2b1b0d5def68793489498d2bf511951f72742de22aa59f6962dcab8c4ef0d1"} Jan 20 18:19:06 crc kubenswrapper[4558]: I0120 18:19:06.354926 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="barbican-kuttl-tests/barbican-db-sync-8hnjv" Jan 20 18:19:06 crc kubenswrapper[4558]: I0120 18:19:06.458980 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/dfad60e4-fc8b-498d-9376-840b433c50de-db-sync-config-data\") pod \"dfad60e4-fc8b-498d-9376-840b433c50de\" (UID: \"dfad60e4-fc8b-498d-9376-840b433c50de\") " Jan 20 18:19:06 crc kubenswrapper[4558]: I0120 18:19:06.459194 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g4tjv\" (UniqueName: \"kubernetes.io/projected/dfad60e4-fc8b-498d-9376-840b433c50de-kube-api-access-g4tjv\") pod \"dfad60e4-fc8b-498d-9376-840b433c50de\" (UID: \"dfad60e4-fc8b-498d-9376-840b433c50de\") " Jan 20 18:19:06 crc kubenswrapper[4558]: I0120 18:19:06.459305 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dfad60e4-fc8b-498d-9376-840b433c50de-combined-ca-bundle\") pod \"dfad60e4-fc8b-498d-9376-840b433c50de\" (UID: \"dfad60e4-fc8b-498d-9376-840b433c50de\") " Jan 20 18:19:06 crc kubenswrapper[4558]: I0120 18:19:06.464208 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dfad60e4-fc8b-498d-9376-840b433c50de-kube-api-access-g4tjv" (OuterVolumeSpecName: "kube-api-access-g4tjv") pod "dfad60e4-fc8b-498d-9376-840b433c50de" (UID: "dfad60e4-fc8b-498d-9376-840b433c50de"). InnerVolumeSpecName "kube-api-access-g4tjv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:19:06 crc kubenswrapper[4558]: I0120 18:19:06.464433 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dfad60e4-fc8b-498d-9376-840b433c50de-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "dfad60e4-fc8b-498d-9376-840b433c50de" (UID: "dfad60e4-fc8b-498d-9376-840b433c50de"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:19:06 crc kubenswrapper[4558]: I0120 18:19:06.477313 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dfad60e4-fc8b-498d-9376-840b433c50de-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "dfad60e4-fc8b-498d-9376-840b433c50de" (UID: "dfad60e4-fc8b-498d-9376-840b433c50de"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:19:06 crc kubenswrapper[4558]: I0120 18:19:06.561003 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dfad60e4-fc8b-498d-9376-840b433c50de-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 18:19:06 crc kubenswrapper[4558]: I0120 18:19:06.561043 4558 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/dfad60e4-fc8b-498d-9376-840b433c50de-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 18:19:06 crc kubenswrapper[4558]: I0120 18:19:06.561055 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g4tjv\" (UniqueName: \"kubernetes.io/projected/dfad60e4-fc8b-498d-9376-840b433c50de-kube-api-access-g4tjv\") on node \"crc\" DevicePath \"\"" Jan 20 18:19:07 crc kubenswrapper[4558]: I0120 18:19:07.138232 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/barbican-db-sync-8hnjv" event={"ID":"dfad60e4-fc8b-498d-9376-840b433c50de","Type":"ContainerDied","Data":"93913e16b2886a26b4a38596962d579ae27c89a9e2991c9f4c1d121c4a61e0a8"} Jan 20 18:19:07 crc kubenswrapper[4558]: I0120 18:19:07.138583 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="93913e16b2886a26b4a38596962d579ae27c89a9e2991c9f4c1d121c4a61e0a8" Jan 20 18:19:07 crc kubenswrapper[4558]: I0120 18:19:07.138286 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="barbican-kuttl-tests/barbican-db-sync-8hnjv" Jan 20 18:19:07 crc kubenswrapper[4558]: I0120 18:19:07.564762 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["barbican-kuttl-tests/barbican-worker-5c5dccb9c-s6lqt"] Jan 20 18:19:07 crc kubenswrapper[4558]: E0120 18:19:07.565157 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dfad60e4-fc8b-498d-9376-840b433c50de" containerName="barbican-db-sync" Jan 20 18:19:07 crc kubenswrapper[4558]: I0120 18:19:07.565190 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="dfad60e4-fc8b-498d-9376-840b433c50de" containerName="barbican-db-sync" Jan 20 18:19:07 crc kubenswrapper[4558]: I0120 18:19:07.565371 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="dfad60e4-fc8b-498d-9376-840b433c50de" containerName="barbican-db-sync" Jan 20 18:19:07 crc kubenswrapper[4558]: I0120 18:19:07.566364 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="barbican-kuttl-tests/barbican-worker-5c5dccb9c-s6lqt" Jan 20 18:19:07 crc kubenswrapper[4558]: I0120 18:19:07.568283 4558 reflector.go:368] Caches populated for *v1.Secret from object-"barbican-kuttl-tests"/"barbican-barbican-dockercfg-ng4pq" Jan 20 18:19:07 crc kubenswrapper[4558]: I0120 18:19:07.569221 4558 reflector.go:368] Caches populated for *v1.Secret from object-"barbican-kuttl-tests"/"barbican-worker-config-data" Jan 20 18:19:07 crc kubenswrapper[4558]: I0120 18:19:07.569353 4558 reflector.go:368] Caches populated for *v1.Secret from object-"barbican-kuttl-tests"/"combined-ca-bundle" Jan 20 18:19:07 crc kubenswrapper[4558]: I0120 18:19:07.569670 4558 reflector.go:368] Caches populated for *v1.Secret from object-"barbican-kuttl-tests"/"barbican-config-data" Jan 20 18:19:07 crc kubenswrapper[4558]: I0120 18:19:07.571629 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["barbican-kuttl-tests/barbican-keystone-listener-5f989b5ff4-h8dzk"] Jan 20 18:19:07 crc kubenswrapper[4558]: I0120 18:19:07.572866 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="barbican-kuttl-tests/barbican-keystone-listener-5f989b5ff4-h8dzk" Jan 20 18:19:07 crc kubenswrapper[4558]: I0120 18:19:07.576239 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/03970e5f-267e-4cfa-a88e-3518d540a4b8-logs\") pod \"barbican-keystone-listener-5f989b5ff4-h8dzk\" (UID: \"03970e5f-267e-4cfa-a88e-3518d540a4b8\") " pod="barbican-kuttl-tests/barbican-keystone-listener-5f989b5ff4-h8dzk" Jan 20 18:19:07 crc kubenswrapper[4558]: I0120 18:19:07.576319 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4b5de5a6-1bc7-455d-9055-5d939864f701-combined-ca-bundle\") pod \"barbican-worker-5c5dccb9c-s6lqt\" (UID: \"4b5de5a6-1bc7-455d-9055-5d939864f701\") " pod="barbican-kuttl-tests/barbican-worker-5c5dccb9c-s6lqt" Jan 20 18:19:07 crc kubenswrapper[4558]: I0120 18:19:07.576353 4558 reflector.go:368] Caches populated for *v1.Secret from object-"barbican-kuttl-tests"/"barbican-keystone-listener-config-data" Jan 20 18:19:07 crc kubenswrapper[4558]: I0120 18:19:07.576387 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/03970e5f-267e-4cfa-a88e-3518d540a4b8-combined-ca-bundle\") pod \"barbican-keystone-listener-5f989b5ff4-h8dzk\" (UID: \"03970e5f-267e-4cfa-a88e-3518d540a4b8\") " pod="barbican-kuttl-tests/barbican-keystone-listener-5f989b5ff4-h8dzk" Jan 20 18:19:07 crc kubenswrapper[4558]: I0120 18:19:07.576447 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7jr9t\" (UniqueName: \"kubernetes.io/projected/4b5de5a6-1bc7-455d-9055-5d939864f701-kube-api-access-7jr9t\") pod \"barbican-worker-5c5dccb9c-s6lqt\" (UID: \"4b5de5a6-1bc7-455d-9055-5d939864f701\") " pod="barbican-kuttl-tests/barbican-worker-5c5dccb9c-s6lqt" Jan 20 18:19:07 crc kubenswrapper[4558]: I0120 18:19:07.576466 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4b5de5a6-1bc7-455d-9055-5d939864f701-logs\") pod \"barbican-worker-5c5dccb9c-s6lqt\" (UID: \"4b5de5a6-1bc7-455d-9055-5d939864f701\") " pod="barbican-kuttl-tests/barbican-worker-5c5dccb9c-s6lqt" Jan 20 18:19:07 crc kubenswrapper[4558]: I0120 18:19:07.576550 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2rwfm\" (UniqueName: \"kubernetes.io/projected/03970e5f-267e-4cfa-a88e-3518d540a4b8-kube-api-access-2rwfm\") pod \"barbican-keystone-listener-5f989b5ff4-h8dzk\" (UID: \"03970e5f-267e-4cfa-a88e-3518d540a4b8\") " pod="barbican-kuttl-tests/barbican-keystone-listener-5f989b5ff4-h8dzk" Jan 20 18:19:07 crc kubenswrapper[4558]: I0120 18:19:07.576573 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4b5de5a6-1bc7-455d-9055-5d939864f701-config-data-custom\") pod \"barbican-worker-5c5dccb9c-s6lqt\" (UID: \"4b5de5a6-1bc7-455d-9055-5d939864f701\") " pod="barbican-kuttl-tests/barbican-worker-5c5dccb9c-s6lqt" Jan 20 18:19:07 crc kubenswrapper[4558]: I0120 18:19:07.576590 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/03970e5f-267e-4cfa-a88e-3518d540a4b8-config-data\") pod \"barbican-keystone-listener-5f989b5ff4-h8dzk\" (UID: \"03970e5f-267e-4cfa-a88e-3518d540a4b8\") " pod="barbican-kuttl-tests/barbican-keystone-listener-5f989b5ff4-h8dzk" Jan 20 18:19:07 crc kubenswrapper[4558]: I0120 18:19:07.576663 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4b5de5a6-1bc7-455d-9055-5d939864f701-config-data\") pod \"barbican-worker-5c5dccb9c-s6lqt\" (UID: \"4b5de5a6-1bc7-455d-9055-5d939864f701\") " pod="barbican-kuttl-tests/barbican-worker-5c5dccb9c-s6lqt" Jan 20 18:19:07 crc kubenswrapper[4558]: I0120 18:19:07.576784 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/03970e5f-267e-4cfa-a88e-3518d540a4b8-config-data-custom\") pod \"barbican-keystone-listener-5f989b5ff4-h8dzk\" (UID: \"03970e5f-267e-4cfa-a88e-3518d540a4b8\") " pod="barbican-kuttl-tests/barbican-keystone-listener-5f989b5ff4-h8dzk" Jan 20 18:19:07 crc kubenswrapper[4558]: I0120 18:19:07.579594 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["barbican-kuttl-tests/barbican-worker-5c5dccb9c-s6lqt"] Jan 20 18:19:07 crc kubenswrapper[4558]: I0120 18:19:07.584230 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["barbican-kuttl-tests/barbican-keystone-listener-5f989b5ff4-h8dzk"] Jan 20 18:19:07 crc kubenswrapper[4558]: I0120 18:19:07.641737 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["barbican-kuttl-tests/barbican-api-587cdccff4-jgkrc"] Jan 20 18:19:07 crc kubenswrapper[4558]: I0120 18:19:07.642840 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="barbican-kuttl-tests/barbican-api-587cdccff4-jgkrc" Jan 20 18:19:07 crc kubenswrapper[4558]: I0120 18:19:07.644206 4558 reflector.go:368] Caches populated for *v1.Secret from object-"barbican-kuttl-tests"/"cert-barbican-public-svc" Jan 20 18:19:07 crc kubenswrapper[4558]: I0120 18:19:07.644489 4558 reflector.go:368] Caches populated for *v1.Secret from object-"barbican-kuttl-tests"/"cert-barbican-internal-svc" Jan 20 18:19:07 crc kubenswrapper[4558]: I0120 18:19:07.646763 4558 reflector.go:368] Caches populated for *v1.Secret from object-"barbican-kuttl-tests"/"barbican-api-config-data" Jan 20 18:19:07 crc kubenswrapper[4558]: I0120 18:19:07.657025 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["barbican-kuttl-tests/barbican-api-587cdccff4-jgkrc"] Jan 20 18:19:07 crc kubenswrapper[4558]: I0120 18:19:07.677848 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/03970e5f-267e-4cfa-a88e-3518d540a4b8-logs\") pod \"barbican-keystone-listener-5f989b5ff4-h8dzk\" (UID: \"03970e5f-267e-4cfa-a88e-3518d540a4b8\") " pod="barbican-kuttl-tests/barbican-keystone-listener-5f989b5ff4-h8dzk" Jan 20 18:19:07 crc kubenswrapper[4558]: I0120 18:19:07.678013 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4b5de5a6-1bc7-455d-9055-5d939864f701-combined-ca-bundle\") pod \"barbican-worker-5c5dccb9c-s6lqt\" (UID: \"4b5de5a6-1bc7-455d-9055-5d939864f701\") " pod="barbican-kuttl-tests/barbican-worker-5c5dccb9c-s6lqt" Jan 20 18:19:07 crc kubenswrapper[4558]: I0120 18:19:07.678498 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/03970e5f-267e-4cfa-a88e-3518d540a4b8-logs\") pod \"barbican-keystone-listener-5f989b5ff4-h8dzk\" (UID: \"03970e5f-267e-4cfa-a88e-3518d540a4b8\") " pod="barbican-kuttl-tests/barbican-keystone-listener-5f989b5ff4-h8dzk" Jan 20 18:19:07 crc kubenswrapper[4558]: I0120 18:19:07.678512 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/03970e5f-267e-4cfa-a88e-3518d540a4b8-combined-ca-bundle\") pod \"barbican-keystone-listener-5f989b5ff4-h8dzk\" (UID: \"03970e5f-267e-4cfa-a88e-3518d540a4b8\") " pod="barbican-kuttl-tests/barbican-keystone-listener-5f989b5ff4-h8dzk" Jan 20 18:19:07 crc kubenswrapper[4558]: I0120 18:19:07.678708 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7jr9t\" (UniqueName: \"kubernetes.io/projected/4b5de5a6-1bc7-455d-9055-5d939864f701-kube-api-access-7jr9t\") pod \"barbican-worker-5c5dccb9c-s6lqt\" (UID: \"4b5de5a6-1bc7-455d-9055-5d939864f701\") " pod="barbican-kuttl-tests/barbican-worker-5c5dccb9c-s6lqt" Jan 20 18:19:07 crc kubenswrapper[4558]: I0120 18:19:07.678780 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4b5de5a6-1bc7-455d-9055-5d939864f701-logs\") pod \"barbican-worker-5c5dccb9c-s6lqt\" (UID: \"4b5de5a6-1bc7-455d-9055-5d939864f701\") " pod="barbican-kuttl-tests/barbican-worker-5c5dccb9c-s6lqt" Jan 20 18:19:07 crc kubenswrapper[4558]: I0120 18:19:07.678876 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2rwfm\" (UniqueName: \"kubernetes.io/projected/03970e5f-267e-4cfa-a88e-3518d540a4b8-kube-api-access-2rwfm\") pod \"barbican-keystone-listener-5f989b5ff4-h8dzk\" (UID: \"03970e5f-267e-4cfa-a88e-3518d540a4b8\") " pod="barbican-kuttl-tests/barbican-keystone-listener-5f989b5ff4-h8dzk" Jan 20 18:19:07 crc kubenswrapper[4558]: I0120 18:19:07.678946 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4b5de5a6-1bc7-455d-9055-5d939864f701-config-data-custom\") pod \"barbican-worker-5c5dccb9c-s6lqt\" (UID: \"4b5de5a6-1bc7-455d-9055-5d939864f701\") " pod="barbican-kuttl-tests/barbican-worker-5c5dccb9c-s6lqt" Jan 20 18:19:07 crc kubenswrapper[4558]: I0120 18:19:07.679008 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/03970e5f-267e-4cfa-a88e-3518d540a4b8-config-data\") pod \"barbican-keystone-listener-5f989b5ff4-h8dzk\" (UID: \"03970e5f-267e-4cfa-a88e-3518d540a4b8\") " pod="barbican-kuttl-tests/barbican-keystone-listener-5f989b5ff4-h8dzk" Jan 20 18:19:07 crc kubenswrapper[4558]: I0120 18:19:07.679107 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4b5de5a6-1bc7-455d-9055-5d939864f701-config-data\") pod \"barbican-worker-5c5dccb9c-s6lqt\" (UID: \"4b5de5a6-1bc7-455d-9055-5d939864f701\") " pod="barbican-kuttl-tests/barbican-worker-5c5dccb9c-s6lqt" Jan 20 18:19:07 crc kubenswrapper[4558]: I0120 18:19:07.679199 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/03970e5f-267e-4cfa-a88e-3518d540a4b8-config-data-custom\") pod \"barbican-keystone-listener-5f989b5ff4-h8dzk\" (UID: \"03970e5f-267e-4cfa-a88e-3518d540a4b8\") " pod="barbican-kuttl-tests/barbican-keystone-listener-5f989b5ff4-h8dzk" Jan 20 18:19:07 crc kubenswrapper[4558]: I0120 18:19:07.679464 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4b5de5a6-1bc7-455d-9055-5d939864f701-logs\") pod \"barbican-worker-5c5dccb9c-s6lqt\" (UID: \"4b5de5a6-1bc7-455d-9055-5d939864f701\") " pod="barbican-kuttl-tests/barbican-worker-5c5dccb9c-s6lqt" Jan 20 18:19:07 crc kubenswrapper[4558]: I0120 18:19:07.683064 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/03970e5f-267e-4cfa-a88e-3518d540a4b8-combined-ca-bundle\") pod \"barbican-keystone-listener-5f989b5ff4-h8dzk\" (UID: \"03970e5f-267e-4cfa-a88e-3518d540a4b8\") " pod="barbican-kuttl-tests/barbican-keystone-listener-5f989b5ff4-h8dzk" Jan 20 18:19:07 crc kubenswrapper[4558]: I0120 18:19:07.683954 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/03970e5f-267e-4cfa-a88e-3518d540a4b8-config-data\") pod \"barbican-keystone-listener-5f989b5ff4-h8dzk\" (UID: \"03970e5f-267e-4cfa-a88e-3518d540a4b8\") " pod="barbican-kuttl-tests/barbican-keystone-listener-5f989b5ff4-h8dzk" Jan 20 18:19:07 crc kubenswrapper[4558]: I0120 18:19:07.684252 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4b5de5a6-1bc7-455d-9055-5d939864f701-combined-ca-bundle\") pod \"barbican-worker-5c5dccb9c-s6lqt\" (UID: \"4b5de5a6-1bc7-455d-9055-5d939864f701\") " pod="barbican-kuttl-tests/barbican-worker-5c5dccb9c-s6lqt" Jan 20 18:19:07 crc kubenswrapper[4558]: I0120 18:19:07.685472 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4b5de5a6-1bc7-455d-9055-5d939864f701-config-data\") pod \"barbican-worker-5c5dccb9c-s6lqt\" (UID: \"4b5de5a6-1bc7-455d-9055-5d939864f701\") " pod="barbican-kuttl-tests/barbican-worker-5c5dccb9c-s6lqt" Jan 20 18:19:07 crc kubenswrapper[4558]: I0120 18:19:07.686133 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/03970e5f-267e-4cfa-a88e-3518d540a4b8-config-data-custom\") pod \"barbican-keystone-listener-5f989b5ff4-h8dzk\" (UID: \"03970e5f-267e-4cfa-a88e-3518d540a4b8\") " pod="barbican-kuttl-tests/barbican-keystone-listener-5f989b5ff4-h8dzk" Jan 20 18:19:07 crc kubenswrapper[4558]: I0120 18:19:07.689718 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4b5de5a6-1bc7-455d-9055-5d939864f701-config-data-custom\") pod \"barbican-worker-5c5dccb9c-s6lqt\" (UID: \"4b5de5a6-1bc7-455d-9055-5d939864f701\") " pod="barbican-kuttl-tests/barbican-worker-5c5dccb9c-s6lqt" Jan 20 18:19:07 crc kubenswrapper[4558]: I0120 18:19:07.694624 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2rwfm\" (UniqueName: \"kubernetes.io/projected/03970e5f-267e-4cfa-a88e-3518d540a4b8-kube-api-access-2rwfm\") pod \"barbican-keystone-listener-5f989b5ff4-h8dzk\" (UID: \"03970e5f-267e-4cfa-a88e-3518d540a4b8\") " pod="barbican-kuttl-tests/barbican-keystone-listener-5f989b5ff4-h8dzk" Jan 20 18:19:07 crc kubenswrapper[4558]: I0120 18:19:07.695119 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7jr9t\" (UniqueName: \"kubernetes.io/projected/4b5de5a6-1bc7-455d-9055-5d939864f701-kube-api-access-7jr9t\") pod \"barbican-worker-5c5dccb9c-s6lqt\" (UID: \"4b5de5a6-1bc7-455d-9055-5d939864f701\") " pod="barbican-kuttl-tests/barbican-worker-5c5dccb9c-s6lqt" Jan 20 18:19:07 crc kubenswrapper[4558]: I0120 18:19:07.780186 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4n2m8\" (UniqueName: \"kubernetes.io/projected/8d489296-e372-494e-842e-4449e34e042e-kube-api-access-4n2m8\") pod \"barbican-api-587cdccff4-jgkrc\" (UID: \"8d489296-e372-494e-842e-4449e34e042e\") " pod="barbican-kuttl-tests/barbican-api-587cdccff4-jgkrc" Jan 20 18:19:07 crc kubenswrapper[4558]: I0120 18:19:07.780427 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8d489296-e372-494e-842e-4449e34e042e-config-data\") pod \"barbican-api-587cdccff4-jgkrc\" (UID: \"8d489296-e372-494e-842e-4449e34e042e\") " pod="barbican-kuttl-tests/barbican-api-587cdccff4-jgkrc" Jan 20 18:19:07 crc kubenswrapper[4558]: I0120 18:19:07.780530 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8d489296-e372-494e-842e-4449e34e042e-public-tls-certs\") pod \"barbican-api-587cdccff4-jgkrc\" (UID: \"8d489296-e372-494e-842e-4449e34e042e\") " pod="barbican-kuttl-tests/barbican-api-587cdccff4-jgkrc" Jan 20 18:19:07 crc kubenswrapper[4558]: I0120 18:19:07.780631 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8d489296-e372-494e-842e-4449e34e042e-logs\") pod \"barbican-api-587cdccff4-jgkrc\" (UID: \"8d489296-e372-494e-842e-4449e34e042e\") " pod="barbican-kuttl-tests/barbican-api-587cdccff4-jgkrc" Jan 20 18:19:07 crc kubenswrapper[4558]: I0120 18:19:07.780720 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8d489296-e372-494e-842e-4449e34e042e-config-data-custom\") pod \"barbican-api-587cdccff4-jgkrc\" (UID: \"8d489296-e372-494e-842e-4449e34e042e\") " pod="barbican-kuttl-tests/barbican-api-587cdccff4-jgkrc" Jan 20 18:19:07 crc kubenswrapper[4558]: I0120 18:19:07.780794 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8d489296-e372-494e-842e-4449e34e042e-internal-tls-certs\") pod \"barbican-api-587cdccff4-jgkrc\" (UID: \"8d489296-e372-494e-842e-4449e34e042e\") " pod="barbican-kuttl-tests/barbican-api-587cdccff4-jgkrc" Jan 20 18:19:07 crc kubenswrapper[4558]: I0120 18:19:07.780864 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d489296-e372-494e-842e-4449e34e042e-combined-ca-bundle\") pod \"barbican-api-587cdccff4-jgkrc\" (UID: \"8d489296-e372-494e-842e-4449e34e042e\") " pod="barbican-kuttl-tests/barbican-api-587cdccff4-jgkrc" Jan 20 18:19:07 crc kubenswrapper[4558]: I0120 18:19:07.881627 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8d489296-e372-494e-842e-4449e34e042e-internal-tls-certs\") pod \"barbican-api-587cdccff4-jgkrc\" (UID: \"8d489296-e372-494e-842e-4449e34e042e\") " pod="barbican-kuttl-tests/barbican-api-587cdccff4-jgkrc" Jan 20 18:19:07 crc kubenswrapper[4558]: I0120 18:19:07.881691 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d489296-e372-494e-842e-4449e34e042e-combined-ca-bundle\") pod \"barbican-api-587cdccff4-jgkrc\" (UID: \"8d489296-e372-494e-842e-4449e34e042e\") " pod="barbican-kuttl-tests/barbican-api-587cdccff4-jgkrc" Jan 20 18:19:07 crc kubenswrapper[4558]: I0120 18:19:07.881743 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4n2m8\" (UniqueName: \"kubernetes.io/projected/8d489296-e372-494e-842e-4449e34e042e-kube-api-access-4n2m8\") pod \"barbican-api-587cdccff4-jgkrc\" (UID: \"8d489296-e372-494e-842e-4449e34e042e\") " pod="barbican-kuttl-tests/barbican-api-587cdccff4-jgkrc" Jan 20 18:19:07 crc kubenswrapper[4558]: I0120 18:19:07.881804 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8d489296-e372-494e-842e-4449e34e042e-config-data\") pod \"barbican-api-587cdccff4-jgkrc\" (UID: \"8d489296-e372-494e-842e-4449e34e042e\") " pod="barbican-kuttl-tests/barbican-api-587cdccff4-jgkrc" Jan 20 18:19:07 crc kubenswrapper[4558]: I0120 18:19:07.881857 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8d489296-e372-494e-842e-4449e34e042e-public-tls-certs\") pod \"barbican-api-587cdccff4-jgkrc\" (UID: \"8d489296-e372-494e-842e-4449e34e042e\") " pod="barbican-kuttl-tests/barbican-api-587cdccff4-jgkrc" Jan 20 18:19:07 crc kubenswrapper[4558]: I0120 18:19:07.882150 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="barbican-kuttl-tests/barbican-worker-5c5dccb9c-s6lqt" Jan 20 18:19:07 crc kubenswrapper[4558]: I0120 18:19:07.882359 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8d489296-e372-494e-842e-4449e34e042e-logs\") pod \"barbican-api-587cdccff4-jgkrc\" (UID: \"8d489296-e372-494e-842e-4449e34e042e\") " pod="barbican-kuttl-tests/barbican-api-587cdccff4-jgkrc" Jan 20 18:19:07 crc kubenswrapper[4558]: I0120 18:19:07.882419 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8d489296-e372-494e-842e-4449e34e042e-logs\") pod \"barbican-api-587cdccff4-jgkrc\" (UID: \"8d489296-e372-494e-842e-4449e34e042e\") " pod="barbican-kuttl-tests/barbican-api-587cdccff4-jgkrc" Jan 20 18:19:07 crc kubenswrapper[4558]: I0120 18:19:07.882652 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8d489296-e372-494e-842e-4449e34e042e-config-data-custom\") pod \"barbican-api-587cdccff4-jgkrc\" (UID: \"8d489296-e372-494e-842e-4449e34e042e\") " pod="barbican-kuttl-tests/barbican-api-587cdccff4-jgkrc" Jan 20 18:19:07 crc kubenswrapper[4558]: I0120 18:19:07.887030 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8d489296-e372-494e-842e-4449e34e042e-config-data-custom\") pod \"barbican-api-587cdccff4-jgkrc\" (UID: \"8d489296-e372-494e-842e-4449e34e042e\") " pod="barbican-kuttl-tests/barbican-api-587cdccff4-jgkrc" Jan 20 18:19:07 crc kubenswrapper[4558]: I0120 18:19:07.887212 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="barbican-kuttl-tests/barbican-keystone-listener-5f989b5ff4-h8dzk" Jan 20 18:19:07 crc kubenswrapper[4558]: I0120 18:19:07.887889 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8d489296-e372-494e-842e-4449e34e042e-internal-tls-certs\") pod \"barbican-api-587cdccff4-jgkrc\" (UID: \"8d489296-e372-494e-842e-4449e34e042e\") " pod="barbican-kuttl-tests/barbican-api-587cdccff4-jgkrc" Jan 20 18:19:07 crc kubenswrapper[4558]: I0120 18:19:07.888382 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d489296-e372-494e-842e-4449e34e042e-combined-ca-bundle\") pod \"barbican-api-587cdccff4-jgkrc\" (UID: \"8d489296-e372-494e-842e-4449e34e042e\") " pod="barbican-kuttl-tests/barbican-api-587cdccff4-jgkrc" Jan 20 18:19:07 crc kubenswrapper[4558]: I0120 18:19:07.888719 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8d489296-e372-494e-842e-4449e34e042e-config-data\") pod \"barbican-api-587cdccff4-jgkrc\" (UID: \"8d489296-e372-494e-842e-4449e34e042e\") " pod="barbican-kuttl-tests/barbican-api-587cdccff4-jgkrc" Jan 20 18:19:07 crc kubenswrapper[4558]: I0120 18:19:07.891683 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8d489296-e372-494e-842e-4449e34e042e-public-tls-certs\") pod \"barbican-api-587cdccff4-jgkrc\" (UID: \"8d489296-e372-494e-842e-4449e34e042e\") " pod="barbican-kuttl-tests/barbican-api-587cdccff4-jgkrc" Jan 20 18:19:07 crc kubenswrapper[4558]: I0120 18:19:07.908249 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4n2m8\" (UniqueName: \"kubernetes.io/projected/8d489296-e372-494e-842e-4449e34e042e-kube-api-access-4n2m8\") pod \"barbican-api-587cdccff4-jgkrc\" (UID: \"8d489296-e372-494e-842e-4449e34e042e\") " pod="barbican-kuttl-tests/barbican-api-587cdccff4-jgkrc" Jan 20 18:19:07 crc kubenswrapper[4558]: I0120 18:19:07.961050 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="barbican-kuttl-tests/barbican-api-587cdccff4-jgkrc" Jan 20 18:19:08 crc kubenswrapper[4558]: I0120 18:19:08.232692 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["barbican-kuttl-tests/barbican-db-sync-8hnjv"] Jan 20 18:19:08 crc kubenswrapper[4558]: I0120 18:19:08.237110 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["barbican-kuttl-tests/barbican-db-sync-8hnjv"] Jan 20 18:19:08 crc kubenswrapper[4558]: I0120 18:19:08.261224 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["barbican-kuttl-tests/barbican-keystone-listener-5f989b5ff4-h8dzk"] Jan 20 18:19:08 crc kubenswrapper[4558]: I0120 18:19:08.299453 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["barbican-kuttl-tests/barbican-worker-5c5dccb9c-s6lqt"] Jan 20 18:19:08 crc kubenswrapper[4558]: I0120 18:19:08.320793 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["barbican-kuttl-tests/barbican0a40-account-delete-frf72"] Jan 20 18:19:08 crc kubenswrapper[4558]: I0120 18:19:08.322986 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="barbican-kuttl-tests/barbican0a40-account-delete-frf72" Jan 20 18:19:08 crc kubenswrapper[4558]: I0120 18:19:08.330555 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["barbican-kuttl-tests/barbican0a40-account-delete-frf72"] Jan 20 18:19:08 crc kubenswrapper[4558]: I0120 18:19:08.337440 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["barbican-kuttl-tests/barbican-api-587cdccff4-jgkrc"] Jan 20 18:19:08 crc kubenswrapper[4558]: I0120 18:19:08.342154 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["barbican-kuttl-tests/barbican-keystone-listener-5f989b5ff4-h8dzk"] Jan 20 18:19:08 crc kubenswrapper[4558]: I0120 18:19:08.354689 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["barbican-kuttl-tests/barbican-worker-5c5dccb9c-s6lqt"] Jan 20 18:19:08 crc kubenswrapper[4558]: I0120 18:19:08.400349 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/30f50240-927f-4955-a922-79e1a3866910-operator-scripts\") pod \"barbican0a40-account-delete-frf72\" (UID: \"30f50240-927f-4955-a922-79e1a3866910\") " pod="barbican-kuttl-tests/barbican0a40-account-delete-frf72" Jan 20 18:19:08 crc kubenswrapper[4558]: I0120 18:19:08.400417 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zbbv7\" (UniqueName: \"kubernetes.io/projected/30f50240-927f-4955-a922-79e1a3866910-kube-api-access-zbbv7\") pod \"barbican0a40-account-delete-frf72\" (UID: \"30f50240-927f-4955-a922-79e1a3866910\") " pod="barbican-kuttl-tests/barbican0a40-account-delete-frf72" Jan 20 18:19:08 crc kubenswrapper[4558]: I0120 18:19:08.447792 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["barbican-kuttl-tests/barbican-api-587cdccff4-jgkrc"] Jan 20 18:19:08 crc kubenswrapper[4558]: I0120 18:19:08.502964 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/30f50240-927f-4955-a922-79e1a3866910-operator-scripts\") pod \"barbican0a40-account-delete-frf72\" (UID: \"30f50240-927f-4955-a922-79e1a3866910\") " pod="barbican-kuttl-tests/barbican0a40-account-delete-frf72" Jan 20 18:19:08 crc kubenswrapper[4558]: I0120 18:19:08.503666 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/30f50240-927f-4955-a922-79e1a3866910-operator-scripts\") pod \"barbican0a40-account-delete-frf72\" (UID: \"30f50240-927f-4955-a922-79e1a3866910\") " pod="barbican-kuttl-tests/barbican0a40-account-delete-frf72" Jan 20 18:19:08 crc kubenswrapper[4558]: I0120 18:19:08.503757 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zbbv7\" (UniqueName: \"kubernetes.io/projected/30f50240-927f-4955-a922-79e1a3866910-kube-api-access-zbbv7\") pod \"barbican0a40-account-delete-frf72\" (UID: \"30f50240-927f-4955-a922-79e1a3866910\") " pod="barbican-kuttl-tests/barbican0a40-account-delete-frf72" Jan 20 18:19:08 crc kubenswrapper[4558]: I0120 18:19:08.526240 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zbbv7\" (UniqueName: \"kubernetes.io/projected/30f50240-927f-4955-a922-79e1a3866910-kube-api-access-zbbv7\") pod \"barbican0a40-account-delete-frf72\" (UID: \"30f50240-927f-4955-a922-79e1a3866910\") " pod="barbican-kuttl-tests/barbican0a40-account-delete-frf72" Jan 20 18:19:08 crc kubenswrapper[4558]: I0120 18:19:08.577348 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dfad60e4-fc8b-498d-9376-840b433c50de" path="/var/lib/kubelet/pods/dfad60e4-fc8b-498d-9376-840b433c50de/volumes" Jan 20 18:19:08 crc kubenswrapper[4558]: I0120 18:19:08.650911 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="barbican-kuttl-tests/barbican0a40-account-delete-frf72" Jan 20 18:19:09 crc kubenswrapper[4558]: I0120 18:19:09.096882 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["barbican-kuttl-tests/barbican0a40-account-delete-frf72"] Jan 20 18:19:09 crc kubenswrapper[4558]: W0120 18:19:09.100460 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod30f50240_927f_4955_a922_79e1a3866910.slice/crio-dd55da37597ab23ae015a4bb3d59867d7ced3602d89ba409ac02dec5452f99c4 WatchSource:0}: Error finding container dd55da37597ab23ae015a4bb3d59867d7ced3602d89ba409ac02dec5452f99c4: Status 404 returned error can't find the container with id dd55da37597ab23ae015a4bb3d59867d7ced3602d89ba409ac02dec5452f99c4 Jan 20 18:19:09 crc kubenswrapper[4558]: I0120 18:19:09.157114 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/barbican-keystone-listener-5f989b5ff4-h8dzk" event={"ID":"03970e5f-267e-4cfa-a88e-3518d540a4b8","Type":"ContainerStarted","Data":"f7abfeddb8dbbc0d572cde28d6668afae411cf491466284e187e2b0e066a5982"} Jan 20 18:19:09 crc kubenswrapper[4558]: I0120 18:19:09.157190 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/barbican-keystone-listener-5f989b5ff4-h8dzk" event={"ID":"03970e5f-267e-4cfa-a88e-3518d540a4b8","Type":"ContainerStarted","Data":"2c632375c3ea33903dc586f3c44722f957c9d8ee7733b635d682181a15433dfe"} Jan 20 18:19:09 crc kubenswrapper[4558]: I0120 18:19:09.157204 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/barbican-keystone-listener-5f989b5ff4-h8dzk" event={"ID":"03970e5f-267e-4cfa-a88e-3518d540a4b8","Type":"ContainerStarted","Data":"25c5e1e7fe8134cd29ac44f2e3b6dc35bacba5e17618399acfe753158b44278f"} Jan 20 18:19:09 crc kubenswrapper[4558]: I0120 18:19:09.157196 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="barbican-kuttl-tests/barbican-keystone-listener-5f989b5ff4-h8dzk" podUID="03970e5f-267e-4cfa-a88e-3518d540a4b8" containerName="barbican-keystone-listener-log" containerID="cri-o://2c632375c3ea33903dc586f3c44722f957c9d8ee7733b635d682181a15433dfe" gracePeriod=30 Jan 20 18:19:09 crc kubenswrapper[4558]: I0120 18:19:09.157243 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="barbican-kuttl-tests/barbican-keystone-listener-5f989b5ff4-h8dzk" podUID="03970e5f-267e-4cfa-a88e-3518d540a4b8" containerName="barbican-keystone-listener" containerID="cri-o://f7abfeddb8dbbc0d572cde28d6668afae411cf491466284e187e2b0e066a5982" gracePeriod=30 Jan 20 18:19:09 crc kubenswrapper[4558]: I0120 18:19:09.162694 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/barbican-worker-5c5dccb9c-s6lqt" event={"ID":"4b5de5a6-1bc7-455d-9055-5d939864f701","Type":"ContainerStarted","Data":"eb28146f8d0cc07e4845816d7561c40205a9fbabf663175fb24decfff9c47971"} Jan 20 18:19:09 crc kubenswrapper[4558]: I0120 18:19:09.162749 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/barbican-worker-5c5dccb9c-s6lqt" event={"ID":"4b5de5a6-1bc7-455d-9055-5d939864f701","Type":"ContainerStarted","Data":"1050d20563b112182d93ccfef336028ff2a4eae5ce1ea346b2626acd791bb43d"} Jan 20 18:19:09 crc kubenswrapper[4558]: I0120 18:19:09.162763 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/barbican-worker-5c5dccb9c-s6lqt" event={"ID":"4b5de5a6-1bc7-455d-9055-5d939864f701","Type":"ContainerStarted","Data":"0f66dd1b07fb43696644954f0deb411dae55d3263a9265164781f6590c5bf58b"} Jan 20 18:19:09 crc kubenswrapper[4558]: I0120 18:19:09.162779 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="barbican-kuttl-tests/barbican-worker-5c5dccb9c-s6lqt" podUID="4b5de5a6-1bc7-455d-9055-5d939864f701" containerName="barbican-worker-log" containerID="cri-o://1050d20563b112182d93ccfef336028ff2a4eae5ce1ea346b2626acd791bb43d" gracePeriod=30 Jan 20 18:19:09 crc kubenswrapper[4558]: I0120 18:19:09.162804 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="barbican-kuttl-tests/barbican-worker-5c5dccb9c-s6lqt" podUID="4b5de5a6-1bc7-455d-9055-5d939864f701" containerName="barbican-worker" containerID="cri-o://eb28146f8d0cc07e4845816d7561c40205a9fbabf663175fb24decfff9c47971" gracePeriod=30 Jan 20 18:19:09 crc kubenswrapper[4558]: I0120 18:19:09.164684 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/barbican-api-587cdccff4-jgkrc" event={"ID":"8d489296-e372-494e-842e-4449e34e042e","Type":"ContainerStarted","Data":"c3fd47d82a720ca36ee67f71c741d3f93487b469c3e5eaeb41a232f8926cede9"} Jan 20 18:19:09 crc kubenswrapper[4558]: I0120 18:19:09.164714 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/barbican-api-587cdccff4-jgkrc" event={"ID":"8d489296-e372-494e-842e-4449e34e042e","Type":"ContainerStarted","Data":"7d2560016309b5cd26034bf35f69400748490fd1155a54cf913cc9ab881d4ba0"} Jan 20 18:19:09 crc kubenswrapper[4558]: I0120 18:19:09.164728 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/barbican-api-587cdccff4-jgkrc" event={"ID":"8d489296-e372-494e-842e-4449e34e042e","Type":"ContainerStarted","Data":"5a7f98d1fe4336749d89d4a5e94557a818d63906ad4528464de28342bd790af6"} Jan 20 18:19:09 crc kubenswrapper[4558]: I0120 18:19:09.164783 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="barbican-kuttl-tests/barbican-api-587cdccff4-jgkrc" podUID="8d489296-e372-494e-842e-4449e34e042e" containerName="barbican-api-log" containerID="cri-o://7d2560016309b5cd26034bf35f69400748490fd1155a54cf913cc9ab881d4ba0" gracePeriod=30 Jan 20 18:19:09 crc kubenswrapper[4558]: I0120 18:19:09.164845 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="barbican-kuttl-tests/barbican-api-587cdccff4-jgkrc" Jan 20 18:19:09 crc kubenswrapper[4558]: I0120 18:19:09.164832 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="barbican-kuttl-tests/barbican-api-587cdccff4-jgkrc" podUID="8d489296-e372-494e-842e-4449e34e042e" containerName="barbican-api" containerID="cri-o://c3fd47d82a720ca36ee67f71c741d3f93487b469c3e5eaeb41a232f8926cede9" gracePeriod=30 Jan 20 18:19:09 crc kubenswrapper[4558]: I0120 18:19:09.165900 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/barbican0a40-account-delete-frf72" event={"ID":"30f50240-927f-4955-a922-79e1a3866910","Type":"ContainerStarted","Data":"dd55da37597ab23ae015a4bb3d59867d7ced3602d89ba409ac02dec5452f99c4"} Jan 20 18:19:09 crc kubenswrapper[4558]: I0120 18:19:09.181547 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="barbican-kuttl-tests/barbican-keystone-listener-5f989b5ff4-h8dzk" podStartSLOduration=2.181536191 podStartE2EDuration="2.181536191s" podCreationTimestamp="2026-01-20 18:19:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 18:19:09.172086543 +0000 UTC m=+5842.932424510" watchObservedRunningTime="2026-01-20 18:19:09.181536191 +0000 UTC m=+5842.941874158" Jan 20 18:19:09 crc kubenswrapper[4558]: I0120 18:19:09.193472 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="barbican-kuttl-tests/barbican-worker-5c5dccb9c-s6lqt" podStartSLOduration=2.19345465 podStartE2EDuration="2.19345465s" podCreationTimestamp="2026-01-20 18:19:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 18:19:09.191320627 +0000 UTC m=+5842.951658595" watchObservedRunningTime="2026-01-20 18:19:09.19345465 +0000 UTC m=+5842.953792617" Jan 20 18:19:09 crc kubenswrapper[4558]: I0120 18:19:09.604634 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="barbican-kuttl-tests/barbican-api-587cdccff4-jgkrc" Jan 20 18:19:09 crc kubenswrapper[4558]: I0120 18:19:09.623696 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8d489296-e372-494e-842e-4449e34e042e-internal-tls-certs\") pod \"8d489296-e372-494e-842e-4449e34e042e\" (UID: \"8d489296-e372-494e-842e-4449e34e042e\") " Jan 20 18:19:09 crc kubenswrapper[4558]: I0120 18:19:09.623780 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d489296-e372-494e-842e-4449e34e042e-combined-ca-bundle\") pod \"8d489296-e372-494e-842e-4449e34e042e\" (UID: \"8d489296-e372-494e-842e-4449e34e042e\") " Jan 20 18:19:09 crc kubenswrapper[4558]: I0120 18:19:09.623805 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4n2m8\" (UniqueName: \"kubernetes.io/projected/8d489296-e372-494e-842e-4449e34e042e-kube-api-access-4n2m8\") pod \"8d489296-e372-494e-842e-4449e34e042e\" (UID: \"8d489296-e372-494e-842e-4449e34e042e\") " Jan 20 18:19:09 crc kubenswrapper[4558]: I0120 18:19:09.623908 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8d489296-e372-494e-842e-4449e34e042e-public-tls-certs\") pod \"8d489296-e372-494e-842e-4449e34e042e\" (UID: \"8d489296-e372-494e-842e-4449e34e042e\") " Jan 20 18:19:09 crc kubenswrapper[4558]: I0120 18:19:09.623932 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8d489296-e372-494e-842e-4449e34e042e-config-data\") pod \"8d489296-e372-494e-842e-4449e34e042e\" (UID: \"8d489296-e372-494e-842e-4449e34e042e\") " Jan 20 18:19:09 crc kubenswrapper[4558]: I0120 18:19:09.624006 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8d489296-e372-494e-842e-4449e34e042e-logs\") pod \"8d489296-e372-494e-842e-4449e34e042e\" (UID: \"8d489296-e372-494e-842e-4449e34e042e\") " Jan 20 18:19:09 crc kubenswrapper[4558]: I0120 18:19:09.624028 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8d489296-e372-494e-842e-4449e34e042e-config-data-custom\") pod \"8d489296-e372-494e-842e-4449e34e042e\" (UID: \"8d489296-e372-494e-842e-4449e34e042e\") " Jan 20 18:19:09 crc kubenswrapper[4558]: I0120 18:19:09.624855 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8d489296-e372-494e-842e-4449e34e042e-logs" (OuterVolumeSpecName: "logs") pod "8d489296-e372-494e-842e-4449e34e042e" (UID: "8d489296-e372-494e-842e-4449e34e042e"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 18:19:09 crc kubenswrapper[4558]: I0120 18:19:09.626095 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8d489296-e372-494e-842e-4449e34e042e-logs\") on node \"crc\" DevicePath \"\"" Jan 20 18:19:09 crc kubenswrapper[4558]: I0120 18:19:09.629354 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8d489296-e372-494e-842e-4449e34e042e-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "8d489296-e372-494e-842e-4449e34e042e" (UID: "8d489296-e372-494e-842e-4449e34e042e"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:19:09 crc kubenswrapper[4558]: I0120 18:19:09.630080 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8d489296-e372-494e-842e-4449e34e042e-kube-api-access-4n2m8" (OuterVolumeSpecName: "kube-api-access-4n2m8") pod "8d489296-e372-494e-842e-4449e34e042e" (UID: "8d489296-e372-494e-842e-4449e34e042e"). InnerVolumeSpecName "kube-api-access-4n2m8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:19:09 crc kubenswrapper[4558]: I0120 18:19:09.643369 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8d489296-e372-494e-842e-4449e34e042e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8d489296-e372-494e-842e-4449e34e042e" (UID: "8d489296-e372-494e-842e-4449e34e042e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:19:09 crc kubenswrapper[4558]: I0120 18:19:09.654387 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8d489296-e372-494e-842e-4449e34e042e-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "8d489296-e372-494e-842e-4449e34e042e" (UID: "8d489296-e372-494e-842e-4449e34e042e"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:19:09 crc kubenswrapper[4558]: I0120 18:19:09.659961 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8d489296-e372-494e-842e-4449e34e042e-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "8d489296-e372-494e-842e-4449e34e042e" (UID: "8d489296-e372-494e-842e-4449e34e042e"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:19:09 crc kubenswrapper[4558]: I0120 18:19:09.661673 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8d489296-e372-494e-842e-4449e34e042e-config-data" (OuterVolumeSpecName: "config-data") pod "8d489296-e372-494e-842e-4449e34e042e" (UID: "8d489296-e372-494e-842e-4449e34e042e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:19:09 crc kubenswrapper[4558]: I0120 18:19:09.727851 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8d489296-e372-494e-842e-4449e34e042e-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 18:19:09 crc kubenswrapper[4558]: I0120 18:19:09.727894 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d489296-e372-494e-842e-4449e34e042e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 18:19:09 crc kubenswrapper[4558]: I0120 18:19:09.727911 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4n2m8\" (UniqueName: \"kubernetes.io/projected/8d489296-e372-494e-842e-4449e34e042e-kube-api-access-4n2m8\") on node \"crc\" DevicePath \"\"" Jan 20 18:19:09 crc kubenswrapper[4558]: I0120 18:19:09.727926 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8d489296-e372-494e-842e-4449e34e042e-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 18:19:09 crc kubenswrapper[4558]: I0120 18:19:09.727945 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8d489296-e372-494e-842e-4449e34e042e-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 18:19:09 crc kubenswrapper[4558]: I0120 18:19:09.727956 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8d489296-e372-494e-842e-4449e34e042e-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 18:19:10 crc kubenswrapper[4558]: I0120 18:19:10.180398 4558 generic.go:334] "Generic (PLEG): container finished" podID="03970e5f-267e-4cfa-a88e-3518d540a4b8" containerID="f7abfeddb8dbbc0d572cde28d6668afae411cf491466284e187e2b0e066a5982" exitCode=1 Jan 20 18:19:10 crc kubenswrapper[4558]: I0120 18:19:10.180730 4558 generic.go:334] "Generic (PLEG): container finished" podID="03970e5f-267e-4cfa-a88e-3518d540a4b8" containerID="2c632375c3ea33903dc586f3c44722f957c9d8ee7733b635d682181a15433dfe" exitCode=143 Jan 20 18:19:10 crc kubenswrapper[4558]: I0120 18:19:10.180522 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/barbican-keystone-listener-5f989b5ff4-h8dzk" event={"ID":"03970e5f-267e-4cfa-a88e-3518d540a4b8","Type":"ContainerDied","Data":"f7abfeddb8dbbc0d572cde28d6668afae411cf491466284e187e2b0e066a5982"} Jan 20 18:19:10 crc kubenswrapper[4558]: I0120 18:19:10.180816 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/barbican-keystone-listener-5f989b5ff4-h8dzk" event={"ID":"03970e5f-267e-4cfa-a88e-3518d540a4b8","Type":"ContainerDied","Data":"2c632375c3ea33903dc586f3c44722f957c9d8ee7733b635d682181a15433dfe"} Jan 20 18:19:10 crc kubenswrapper[4558]: I0120 18:19:10.182869 4558 generic.go:334] "Generic (PLEG): container finished" podID="4b5de5a6-1bc7-455d-9055-5d939864f701" containerID="eb28146f8d0cc07e4845816d7561c40205a9fbabf663175fb24decfff9c47971" exitCode=1 Jan 20 18:19:10 crc kubenswrapper[4558]: I0120 18:19:10.182891 4558 generic.go:334] "Generic (PLEG): container finished" podID="4b5de5a6-1bc7-455d-9055-5d939864f701" containerID="1050d20563b112182d93ccfef336028ff2a4eae5ce1ea346b2626acd791bb43d" exitCode=143 Jan 20 18:19:10 crc kubenswrapper[4558]: I0120 18:19:10.182945 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/barbican-worker-5c5dccb9c-s6lqt" event={"ID":"4b5de5a6-1bc7-455d-9055-5d939864f701","Type":"ContainerDied","Data":"eb28146f8d0cc07e4845816d7561c40205a9fbabf663175fb24decfff9c47971"} Jan 20 18:19:10 crc kubenswrapper[4558]: I0120 18:19:10.182966 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/barbican-worker-5c5dccb9c-s6lqt" event={"ID":"4b5de5a6-1bc7-455d-9055-5d939864f701","Type":"ContainerDied","Data":"1050d20563b112182d93ccfef336028ff2a4eae5ce1ea346b2626acd791bb43d"} Jan 20 18:19:10 crc kubenswrapper[4558]: I0120 18:19:10.185208 4558 generic.go:334] "Generic (PLEG): container finished" podID="8d489296-e372-494e-842e-4449e34e042e" containerID="c3fd47d82a720ca36ee67f71c741d3f93487b469c3e5eaeb41a232f8926cede9" exitCode=0 Jan 20 18:19:10 crc kubenswrapper[4558]: I0120 18:19:10.185227 4558 generic.go:334] "Generic (PLEG): container finished" podID="8d489296-e372-494e-842e-4449e34e042e" containerID="7d2560016309b5cd26034bf35f69400748490fd1155a54cf913cc9ab881d4ba0" exitCode=143 Jan 20 18:19:10 crc kubenswrapper[4558]: I0120 18:19:10.185288 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/barbican-api-587cdccff4-jgkrc" event={"ID":"8d489296-e372-494e-842e-4449e34e042e","Type":"ContainerDied","Data":"c3fd47d82a720ca36ee67f71c741d3f93487b469c3e5eaeb41a232f8926cede9"} Jan 20 18:19:10 crc kubenswrapper[4558]: I0120 18:19:10.185374 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/barbican-api-587cdccff4-jgkrc" event={"ID":"8d489296-e372-494e-842e-4449e34e042e","Type":"ContainerDied","Data":"7d2560016309b5cd26034bf35f69400748490fd1155a54cf913cc9ab881d4ba0"} Jan 20 18:19:10 crc kubenswrapper[4558]: I0120 18:19:10.185314 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="barbican-kuttl-tests/barbican-api-587cdccff4-jgkrc" Jan 20 18:19:10 crc kubenswrapper[4558]: I0120 18:19:10.185396 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/barbican-api-587cdccff4-jgkrc" event={"ID":"8d489296-e372-494e-842e-4449e34e042e","Type":"ContainerDied","Data":"5a7f98d1fe4336749d89d4a5e94557a818d63906ad4528464de28342bd790af6"} Jan 20 18:19:10 crc kubenswrapper[4558]: I0120 18:19:10.185431 4558 scope.go:117] "RemoveContainer" containerID="c3fd47d82a720ca36ee67f71c741d3f93487b469c3e5eaeb41a232f8926cede9" Jan 20 18:19:10 crc kubenswrapper[4558]: I0120 18:19:10.187588 4558 generic.go:334] "Generic (PLEG): container finished" podID="30f50240-927f-4955-a922-79e1a3866910" containerID="e683c32b7cb9370e6a26cdd7f5ae9fef1c98049813d8a405bcfb2ea1326729ab" exitCode=0 Jan 20 18:19:10 crc kubenswrapper[4558]: I0120 18:19:10.187632 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/barbican0a40-account-delete-frf72" event={"ID":"30f50240-927f-4955-a922-79e1a3866910","Type":"ContainerDied","Data":"e683c32b7cb9370e6a26cdd7f5ae9fef1c98049813d8a405bcfb2ea1326729ab"} Jan 20 18:19:10 crc kubenswrapper[4558]: I0120 18:19:10.218703 4558 scope.go:117] "RemoveContainer" containerID="7d2560016309b5cd26034bf35f69400748490fd1155a54cf913cc9ab881d4ba0" Jan 20 18:19:10 crc kubenswrapper[4558]: I0120 18:19:10.236372 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["barbican-kuttl-tests/barbican-api-587cdccff4-jgkrc"] Jan 20 18:19:10 crc kubenswrapper[4558]: I0120 18:19:10.242959 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["barbican-kuttl-tests/barbican-api-587cdccff4-jgkrc"] Jan 20 18:19:10 crc kubenswrapper[4558]: I0120 18:19:10.272794 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="barbican-kuttl-tests/barbican-keystone-listener-5f989b5ff4-h8dzk" Jan 20 18:19:10 crc kubenswrapper[4558]: I0120 18:19:10.274731 4558 scope.go:117] "RemoveContainer" containerID="c3fd47d82a720ca36ee67f71c741d3f93487b469c3e5eaeb41a232f8926cede9" Jan 20 18:19:10 crc kubenswrapper[4558]: E0120 18:19:10.275142 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c3fd47d82a720ca36ee67f71c741d3f93487b469c3e5eaeb41a232f8926cede9\": container with ID starting with c3fd47d82a720ca36ee67f71c741d3f93487b469c3e5eaeb41a232f8926cede9 not found: ID does not exist" containerID="c3fd47d82a720ca36ee67f71c741d3f93487b469c3e5eaeb41a232f8926cede9" Jan 20 18:19:10 crc kubenswrapper[4558]: I0120 18:19:10.275260 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c3fd47d82a720ca36ee67f71c741d3f93487b469c3e5eaeb41a232f8926cede9"} err="failed to get container status \"c3fd47d82a720ca36ee67f71c741d3f93487b469c3e5eaeb41a232f8926cede9\": rpc error: code = NotFound desc = could not find container \"c3fd47d82a720ca36ee67f71c741d3f93487b469c3e5eaeb41a232f8926cede9\": container with ID starting with c3fd47d82a720ca36ee67f71c741d3f93487b469c3e5eaeb41a232f8926cede9 not found: ID does not exist" Jan 20 18:19:10 crc kubenswrapper[4558]: I0120 18:19:10.275344 4558 scope.go:117] "RemoveContainer" containerID="7d2560016309b5cd26034bf35f69400748490fd1155a54cf913cc9ab881d4ba0" Jan 20 18:19:10 crc kubenswrapper[4558]: E0120 18:19:10.275721 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7d2560016309b5cd26034bf35f69400748490fd1155a54cf913cc9ab881d4ba0\": container with ID starting with 7d2560016309b5cd26034bf35f69400748490fd1155a54cf913cc9ab881d4ba0 not found: ID does not exist" containerID="7d2560016309b5cd26034bf35f69400748490fd1155a54cf913cc9ab881d4ba0" Jan 20 18:19:10 crc kubenswrapper[4558]: I0120 18:19:10.275758 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7d2560016309b5cd26034bf35f69400748490fd1155a54cf913cc9ab881d4ba0"} err="failed to get container status \"7d2560016309b5cd26034bf35f69400748490fd1155a54cf913cc9ab881d4ba0\": rpc error: code = NotFound desc = could not find container \"7d2560016309b5cd26034bf35f69400748490fd1155a54cf913cc9ab881d4ba0\": container with ID starting with 7d2560016309b5cd26034bf35f69400748490fd1155a54cf913cc9ab881d4ba0 not found: ID does not exist" Jan 20 18:19:10 crc kubenswrapper[4558]: I0120 18:19:10.275783 4558 scope.go:117] "RemoveContainer" containerID="c3fd47d82a720ca36ee67f71c741d3f93487b469c3e5eaeb41a232f8926cede9" Jan 20 18:19:10 crc kubenswrapper[4558]: I0120 18:19:10.276140 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c3fd47d82a720ca36ee67f71c741d3f93487b469c3e5eaeb41a232f8926cede9"} err="failed to get container status \"c3fd47d82a720ca36ee67f71c741d3f93487b469c3e5eaeb41a232f8926cede9\": rpc error: code = NotFound desc = could not find container \"c3fd47d82a720ca36ee67f71c741d3f93487b469c3e5eaeb41a232f8926cede9\": container with ID starting with c3fd47d82a720ca36ee67f71c741d3f93487b469c3e5eaeb41a232f8926cede9 not found: ID does not exist" Jan 20 18:19:10 crc kubenswrapper[4558]: I0120 18:19:10.276235 4558 scope.go:117] "RemoveContainer" containerID="7d2560016309b5cd26034bf35f69400748490fd1155a54cf913cc9ab881d4ba0" Jan 20 18:19:10 crc kubenswrapper[4558]: I0120 18:19:10.276585 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7d2560016309b5cd26034bf35f69400748490fd1155a54cf913cc9ab881d4ba0"} err="failed to get container status \"7d2560016309b5cd26034bf35f69400748490fd1155a54cf913cc9ab881d4ba0\": rpc error: code = NotFound desc = could not find container \"7d2560016309b5cd26034bf35f69400748490fd1155a54cf913cc9ab881d4ba0\": container with ID starting with 7d2560016309b5cd26034bf35f69400748490fd1155a54cf913cc9ab881d4ba0 not found: ID does not exist" Jan 20 18:19:10 crc kubenswrapper[4558]: I0120 18:19:10.277508 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="barbican-kuttl-tests/barbican-worker-5c5dccb9c-s6lqt" Jan 20 18:19:10 crc kubenswrapper[4558]: I0120 18:19:10.334382 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7jr9t\" (UniqueName: \"kubernetes.io/projected/4b5de5a6-1bc7-455d-9055-5d939864f701-kube-api-access-7jr9t\") pod \"4b5de5a6-1bc7-455d-9055-5d939864f701\" (UID: \"4b5de5a6-1bc7-455d-9055-5d939864f701\") " Jan 20 18:19:10 crc kubenswrapper[4558]: I0120 18:19:10.334616 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4b5de5a6-1bc7-455d-9055-5d939864f701-config-data\") pod \"4b5de5a6-1bc7-455d-9055-5d939864f701\" (UID: \"4b5de5a6-1bc7-455d-9055-5d939864f701\") " Jan 20 18:19:10 crc kubenswrapper[4558]: I0120 18:19:10.334660 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4b5de5a6-1bc7-455d-9055-5d939864f701-config-data-custom\") pod \"4b5de5a6-1bc7-455d-9055-5d939864f701\" (UID: \"4b5de5a6-1bc7-455d-9055-5d939864f701\") " Jan 20 18:19:10 crc kubenswrapper[4558]: I0120 18:19:10.334683 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4b5de5a6-1bc7-455d-9055-5d939864f701-combined-ca-bundle\") pod \"4b5de5a6-1bc7-455d-9055-5d939864f701\" (UID: \"4b5de5a6-1bc7-455d-9055-5d939864f701\") " Jan 20 18:19:10 crc kubenswrapper[4558]: I0120 18:19:10.334801 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4b5de5a6-1bc7-455d-9055-5d939864f701-logs\") pod \"4b5de5a6-1bc7-455d-9055-5d939864f701\" (UID: \"4b5de5a6-1bc7-455d-9055-5d939864f701\") " Jan 20 18:19:10 crc kubenswrapper[4558]: I0120 18:19:10.334840 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/03970e5f-267e-4cfa-a88e-3518d540a4b8-combined-ca-bundle\") pod \"03970e5f-267e-4cfa-a88e-3518d540a4b8\" (UID: \"03970e5f-267e-4cfa-a88e-3518d540a4b8\") " Jan 20 18:19:10 crc kubenswrapper[4558]: I0120 18:19:10.334862 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2rwfm\" (UniqueName: \"kubernetes.io/projected/03970e5f-267e-4cfa-a88e-3518d540a4b8-kube-api-access-2rwfm\") pod \"03970e5f-267e-4cfa-a88e-3518d540a4b8\" (UID: \"03970e5f-267e-4cfa-a88e-3518d540a4b8\") " Jan 20 18:19:10 crc kubenswrapper[4558]: I0120 18:19:10.334962 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/03970e5f-267e-4cfa-a88e-3518d540a4b8-config-data\") pod \"03970e5f-267e-4cfa-a88e-3518d540a4b8\" (UID: \"03970e5f-267e-4cfa-a88e-3518d540a4b8\") " Jan 20 18:19:10 crc kubenswrapper[4558]: I0120 18:19:10.334990 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/03970e5f-267e-4cfa-a88e-3518d540a4b8-logs\") pod \"03970e5f-267e-4cfa-a88e-3518d540a4b8\" (UID: \"03970e5f-267e-4cfa-a88e-3518d540a4b8\") " Jan 20 18:19:10 crc kubenswrapper[4558]: I0120 18:19:10.335031 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/03970e5f-267e-4cfa-a88e-3518d540a4b8-config-data-custom\") pod \"03970e5f-267e-4cfa-a88e-3518d540a4b8\" (UID: \"03970e5f-267e-4cfa-a88e-3518d540a4b8\") " Jan 20 18:19:10 crc kubenswrapper[4558]: I0120 18:19:10.335880 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/03970e5f-267e-4cfa-a88e-3518d540a4b8-logs" (OuterVolumeSpecName: "logs") pod "03970e5f-267e-4cfa-a88e-3518d540a4b8" (UID: "03970e5f-267e-4cfa-a88e-3518d540a4b8"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 18:19:10 crc kubenswrapper[4558]: I0120 18:19:10.335958 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4b5de5a6-1bc7-455d-9055-5d939864f701-logs" (OuterVolumeSpecName: "logs") pod "4b5de5a6-1bc7-455d-9055-5d939864f701" (UID: "4b5de5a6-1bc7-455d-9055-5d939864f701"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 18:19:10 crc kubenswrapper[4558]: I0120 18:19:10.336293 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/03970e5f-267e-4cfa-a88e-3518d540a4b8-logs\") on node \"crc\" DevicePath \"\"" Jan 20 18:19:10 crc kubenswrapper[4558]: I0120 18:19:10.336318 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4b5de5a6-1bc7-455d-9055-5d939864f701-logs\") on node \"crc\" DevicePath \"\"" Jan 20 18:19:10 crc kubenswrapper[4558]: I0120 18:19:10.338508 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4b5de5a6-1bc7-455d-9055-5d939864f701-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "4b5de5a6-1bc7-455d-9055-5d939864f701" (UID: "4b5de5a6-1bc7-455d-9055-5d939864f701"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:19:10 crc kubenswrapper[4558]: I0120 18:19:10.338761 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4b5de5a6-1bc7-455d-9055-5d939864f701-kube-api-access-7jr9t" (OuterVolumeSpecName: "kube-api-access-7jr9t") pod "4b5de5a6-1bc7-455d-9055-5d939864f701" (UID: "4b5de5a6-1bc7-455d-9055-5d939864f701"). InnerVolumeSpecName "kube-api-access-7jr9t". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:19:10 crc kubenswrapper[4558]: I0120 18:19:10.339043 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/03970e5f-267e-4cfa-a88e-3518d540a4b8-kube-api-access-2rwfm" (OuterVolumeSpecName: "kube-api-access-2rwfm") pod "03970e5f-267e-4cfa-a88e-3518d540a4b8" (UID: "03970e5f-267e-4cfa-a88e-3518d540a4b8"). InnerVolumeSpecName "kube-api-access-2rwfm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:19:10 crc kubenswrapper[4558]: I0120 18:19:10.339376 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/03970e5f-267e-4cfa-a88e-3518d540a4b8-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "03970e5f-267e-4cfa-a88e-3518d540a4b8" (UID: "03970e5f-267e-4cfa-a88e-3518d540a4b8"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:19:10 crc kubenswrapper[4558]: I0120 18:19:10.352694 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4b5de5a6-1bc7-455d-9055-5d939864f701-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4b5de5a6-1bc7-455d-9055-5d939864f701" (UID: "4b5de5a6-1bc7-455d-9055-5d939864f701"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:19:10 crc kubenswrapper[4558]: I0120 18:19:10.357929 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/03970e5f-267e-4cfa-a88e-3518d540a4b8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "03970e5f-267e-4cfa-a88e-3518d540a4b8" (UID: "03970e5f-267e-4cfa-a88e-3518d540a4b8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:19:10 crc kubenswrapper[4558]: I0120 18:19:10.368541 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4b5de5a6-1bc7-455d-9055-5d939864f701-config-data" (OuterVolumeSpecName: "config-data") pod "4b5de5a6-1bc7-455d-9055-5d939864f701" (UID: "4b5de5a6-1bc7-455d-9055-5d939864f701"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:19:10 crc kubenswrapper[4558]: I0120 18:19:10.371958 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/03970e5f-267e-4cfa-a88e-3518d540a4b8-config-data" (OuterVolumeSpecName: "config-data") pod "03970e5f-267e-4cfa-a88e-3518d540a4b8" (UID: "03970e5f-267e-4cfa-a88e-3518d540a4b8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:19:10 crc kubenswrapper[4558]: I0120 18:19:10.437728 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4b5de5a6-1bc7-455d-9055-5d939864f701-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 18:19:10 crc kubenswrapper[4558]: I0120 18:19:10.437825 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4b5de5a6-1bc7-455d-9055-5d939864f701-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 18:19:10 crc kubenswrapper[4558]: I0120 18:19:10.437883 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4b5de5a6-1bc7-455d-9055-5d939864f701-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 18:19:10 crc kubenswrapper[4558]: I0120 18:19:10.437957 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/03970e5f-267e-4cfa-a88e-3518d540a4b8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 18:19:10 crc kubenswrapper[4558]: I0120 18:19:10.438013 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2rwfm\" (UniqueName: \"kubernetes.io/projected/03970e5f-267e-4cfa-a88e-3518d540a4b8-kube-api-access-2rwfm\") on node \"crc\" DevicePath \"\"" Jan 20 18:19:10 crc kubenswrapper[4558]: I0120 18:19:10.438058 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/03970e5f-267e-4cfa-a88e-3518d540a4b8-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 18:19:10 crc kubenswrapper[4558]: I0120 18:19:10.438103 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/03970e5f-267e-4cfa-a88e-3518d540a4b8-config-data-custom\") on node \"crc\" DevicePath \"\"" Jan 20 18:19:10 crc kubenswrapper[4558]: I0120 18:19:10.438147 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7jr9t\" (UniqueName: \"kubernetes.io/projected/4b5de5a6-1bc7-455d-9055-5d939864f701-kube-api-access-7jr9t\") on node \"crc\" DevicePath \"\"" Jan 20 18:19:10 crc kubenswrapper[4558]: I0120 18:19:10.565816 4558 scope.go:117] "RemoveContainer" containerID="27314e1a8af2e8df8ecd2e7a06cbb630afbfecd8ca2338a1f199cf92f982f581" Jan 20 18:19:10 crc kubenswrapper[4558]: E0120 18:19:10.566141 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 18:19:10 crc kubenswrapper[4558]: I0120 18:19:10.575876 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8d489296-e372-494e-842e-4449e34e042e" path="/var/lib/kubelet/pods/8d489296-e372-494e-842e-4449e34e042e/volumes" Jan 20 18:19:11 crc kubenswrapper[4558]: I0120 18:19:11.202785 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="barbican-kuttl-tests/barbican-keystone-listener-5f989b5ff4-h8dzk" Jan 20 18:19:11 crc kubenswrapper[4558]: I0120 18:19:11.202777 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/barbican-keystone-listener-5f989b5ff4-h8dzk" event={"ID":"03970e5f-267e-4cfa-a88e-3518d540a4b8","Type":"ContainerDied","Data":"25c5e1e7fe8134cd29ac44f2e3b6dc35bacba5e17618399acfe753158b44278f"} Jan 20 18:19:11 crc kubenswrapper[4558]: I0120 18:19:11.203356 4558 scope.go:117] "RemoveContainer" containerID="f7abfeddb8dbbc0d572cde28d6668afae411cf491466284e187e2b0e066a5982" Jan 20 18:19:11 crc kubenswrapper[4558]: I0120 18:19:11.207432 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/barbican-worker-5c5dccb9c-s6lqt" event={"ID":"4b5de5a6-1bc7-455d-9055-5d939864f701","Type":"ContainerDied","Data":"0f66dd1b07fb43696644954f0deb411dae55d3263a9265164781f6590c5bf58b"} Jan 20 18:19:11 crc kubenswrapper[4558]: I0120 18:19:11.207501 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="barbican-kuttl-tests/barbican-worker-5c5dccb9c-s6lqt" Jan 20 18:19:11 crc kubenswrapper[4558]: I0120 18:19:11.233508 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["barbican-kuttl-tests/barbican-keystone-listener-5f989b5ff4-h8dzk"] Jan 20 18:19:11 crc kubenswrapper[4558]: I0120 18:19:11.240892 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["barbican-kuttl-tests/barbican-keystone-listener-5f989b5ff4-h8dzk"] Jan 20 18:19:11 crc kubenswrapper[4558]: I0120 18:19:11.242930 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["barbican-kuttl-tests/barbican-worker-5c5dccb9c-s6lqt"] Jan 20 18:19:11 crc kubenswrapper[4558]: I0120 18:19:11.245526 4558 scope.go:117] "RemoveContainer" containerID="2c632375c3ea33903dc586f3c44722f957c9d8ee7733b635d682181a15433dfe" Jan 20 18:19:11 crc kubenswrapper[4558]: I0120 18:19:11.247636 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["barbican-kuttl-tests/barbican-worker-5c5dccb9c-s6lqt"] Jan 20 18:19:11 crc kubenswrapper[4558]: I0120 18:19:11.262332 4558 scope.go:117] "RemoveContainer" containerID="eb28146f8d0cc07e4845816d7561c40205a9fbabf663175fb24decfff9c47971" Jan 20 18:19:11 crc kubenswrapper[4558]: I0120 18:19:11.279201 4558 scope.go:117] "RemoveContainer" containerID="1050d20563b112182d93ccfef336028ff2a4eae5ce1ea346b2626acd791bb43d" Jan 20 18:19:11 crc kubenswrapper[4558]: I0120 18:19:11.466271 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="barbican-kuttl-tests/barbican0a40-account-delete-frf72" Jan 20 18:19:11 crc kubenswrapper[4558]: I0120 18:19:11.555674 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zbbv7\" (UniqueName: \"kubernetes.io/projected/30f50240-927f-4955-a922-79e1a3866910-kube-api-access-zbbv7\") pod \"30f50240-927f-4955-a922-79e1a3866910\" (UID: \"30f50240-927f-4955-a922-79e1a3866910\") " Jan 20 18:19:11 crc kubenswrapper[4558]: I0120 18:19:11.555755 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/30f50240-927f-4955-a922-79e1a3866910-operator-scripts\") pod \"30f50240-927f-4955-a922-79e1a3866910\" (UID: \"30f50240-927f-4955-a922-79e1a3866910\") " Jan 20 18:19:11 crc kubenswrapper[4558]: I0120 18:19:11.556648 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/30f50240-927f-4955-a922-79e1a3866910-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "30f50240-927f-4955-a922-79e1a3866910" (UID: "30f50240-927f-4955-a922-79e1a3866910"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:19:11 crc kubenswrapper[4558]: I0120 18:19:11.561008 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/30f50240-927f-4955-a922-79e1a3866910-kube-api-access-zbbv7" (OuterVolumeSpecName: "kube-api-access-zbbv7") pod "30f50240-927f-4955-a922-79e1a3866910" (UID: "30f50240-927f-4955-a922-79e1a3866910"). InnerVolumeSpecName "kube-api-access-zbbv7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:19:11 crc kubenswrapper[4558]: I0120 18:19:11.658297 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zbbv7\" (UniqueName: \"kubernetes.io/projected/30f50240-927f-4955-a922-79e1a3866910-kube-api-access-zbbv7\") on node \"crc\" DevicePath \"\"" Jan 20 18:19:11 crc kubenswrapper[4558]: I0120 18:19:11.658332 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/30f50240-927f-4955-a922-79e1a3866910-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 18:19:12 crc kubenswrapper[4558]: I0120 18:19:12.217626 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/barbican0a40-account-delete-frf72" event={"ID":"30f50240-927f-4955-a922-79e1a3866910","Type":"ContainerDied","Data":"dd55da37597ab23ae015a4bb3d59867d7ced3602d89ba409ac02dec5452f99c4"} Jan 20 18:19:12 crc kubenswrapper[4558]: I0120 18:19:12.218020 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dd55da37597ab23ae015a4bb3d59867d7ced3602d89ba409ac02dec5452f99c4" Jan 20 18:19:12 crc kubenswrapper[4558]: I0120 18:19:12.217693 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="barbican-kuttl-tests/barbican0a40-account-delete-frf72" Jan 20 18:19:12 crc kubenswrapper[4558]: I0120 18:19:12.576325 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="03970e5f-267e-4cfa-a88e-3518d540a4b8" path="/var/lib/kubelet/pods/03970e5f-267e-4cfa-a88e-3518d540a4b8/volumes" Jan 20 18:19:12 crc kubenswrapper[4558]: I0120 18:19:12.576953 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4b5de5a6-1bc7-455d-9055-5d939864f701" path="/var/lib/kubelet/pods/4b5de5a6-1bc7-455d-9055-5d939864f701/volumes" Jan 20 18:19:13 crc kubenswrapper[4558]: I0120 18:19:13.292793 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["barbican-kuttl-tests/barbican-db-create-5hlft"] Jan 20 18:19:13 crc kubenswrapper[4558]: I0120 18:19:13.297005 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["barbican-kuttl-tests/barbican-db-create-5hlft"] Jan 20 18:19:13 crc kubenswrapper[4558]: I0120 18:19:13.305197 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["barbican-kuttl-tests/barbican0a40-account-delete-frf72"] Jan 20 18:19:13 crc kubenswrapper[4558]: I0120 18:19:13.310193 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["barbican-kuttl-tests/barbican-0a40-account-create-update-rs2rx"] Jan 20 18:19:13 crc kubenswrapper[4558]: I0120 18:19:13.313987 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["barbican-kuttl-tests/barbican-0a40-account-create-update-rs2rx"] Jan 20 18:19:13 crc kubenswrapper[4558]: I0120 18:19:13.317776 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["barbican-kuttl-tests/barbican0a40-account-delete-frf72"] Jan 20 18:19:14 crc kubenswrapper[4558]: I0120 18:19:14.576395 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="30f50240-927f-4955-a922-79e1a3866910" path="/var/lib/kubelet/pods/30f50240-927f-4955-a922-79e1a3866910/volumes" Jan 20 18:19:14 crc kubenswrapper[4558]: I0120 18:19:14.578113 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7c30e311-5eff-45a8-ad20-e455cacd93f4" path="/var/lib/kubelet/pods/7c30e311-5eff-45a8-ad20-e455cacd93f4/volumes" Jan 20 18:19:14 crc kubenswrapper[4558]: I0120 18:19:14.578714 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a6c1c2bc-172b-4618-ae21-b22152ffef1f" path="/var/lib/kubelet/pods/a6c1c2bc-172b-4618-ae21-b22152ffef1f/volumes" Jan 20 18:19:15 crc kubenswrapper[4558]: I0120 18:19:15.772948 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["barbican-kuttl-tests/keystone-bootstrap-rl77j"] Jan 20 18:19:15 crc kubenswrapper[4558]: I0120 18:19:15.778844 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["barbican-kuttl-tests/keystone-db-sync-d4kq6"] Jan 20 18:19:15 crc kubenswrapper[4558]: I0120 18:19:15.784149 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["barbican-kuttl-tests/keystone-db-sync-d4kq6"] Jan 20 18:19:15 crc kubenswrapper[4558]: I0120 18:19:15.788565 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["barbican-kuttl-tests/keystone-bootstrap-rl77j"] Jan 20 18:19:15 crc kubenswrapper[4558]: I0120 18:19:15.792691 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["barbican-kuttl-tests/keystone-694b59f8cc-6bxr2"] Jan 20 18:19:15 crc kubenswrapper[4558]: I0120 18:19:15.792890 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="barbican-kuttl-tests/keystone-694b59f8cc-6bxr2" podUID="9c946696-d624-4baf-824d-da41544b5240" containerName="keystone-api" containerID="cri-o://cd7ed0d3b2b2fe1e487a0b13c301a12eef74b758ee8fd6ae67b04dbe57f98f49" gracePeriod=30 Jan 20 18:19:15 crc kubenswrapper[4558]: I0120 18:19:15.832023 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["barbican-kuttl-tests/keystone52d6-account-delete-pzkqm"] Jan 20 18:19:15 crc kubenswrapper[4558]: E0120 18:19:15.832385 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8d489296-e372-494e-842e-4449e34e042e" containerName="barbican-api-log" Jan 20 18:19:15 crc kubenswrapper[4558]: I0120 18:19:15.832399 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="8d489296-e372-494e-842e-4449e34e042e" containerName="barbican-api-log" Jan 20 18:19:15 crc kubenswrapper[4558]: E0120 18:19:15.832412 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="03970e5f-267e-4cfa-a88e-3518d540a4b8" containerName="barbican-keystone-listener-log" Jan 20 18:19:15 crc kubenswrapper[4558]: I0120 18:19:15.832417 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="03970e5f-267e-4cfa-a88e-3518d540a4b8" containerName="barbican-keystone-listener-log" Jan 20 18:19:15 crc kubenswrapper[4558]: E0120 18:19:15.832426 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="30f50240-927f-4955-a922-79e1a3866910" containerName="mariadb-account-delete" Jan 20 18:19:15 crc kubenswrapper[4558]: I0120 18:19:15.832432 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="30f50240-927f-4955-a922-79e1a3866910" containerName="mariadb-account-delete" Jan 20 18:19:15 crc kubenswrapper[4558]: E0120 18:19:15.832447 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="03970e5f-267e-4cfa-a88e-3518d540a4b8" containerName="barbican-keystone-listener" Jan 20 18:19:15 crc kubenswrapper[4558]: I0120 18:19:15.832452 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="03970e5f-267e-4cfa-a88e-3518d540a4b8" containerName="barbican-keystone-listener" Jan 20 18:19:15 crc kubenswrapper[4558]: E0120 18:19:15.832469 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4b5de5a6-1bc7-455d-9055-5d939864f701" containerName="barbican-worker" Jan 20 18:19:15 crc kubenswrapper[4558]: I0120 18:19:15.832474 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="4b5de5a6-1bc7-455d-9055-5d939864f701" containerName="barbican-worker" Jan 20 18:19:15 crc kubenswrapper[4558]: E0120 18:19:15.832481 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8d489296-e372-494e-842e-4449e34e042e" containerName="barbican-api" Jan 20 18:19:15 crc kubenswrapper[4558]: I0120 18:19:15.832486 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="8d489296-e372-494e-842e-4449e34e042e" containerName="barbican-api" Jan 20 18:19:15 crc kubenswrapper[4558]: E0120 18:19:15.832493 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4b5de5a6-1bc7-455d-9055-5d939864f701" containerName="barbican-worker-log" Jan 20 18:19:15 crc kubenswrapper[4558]: I0120 18:19:15.832498 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="4b5de5a6-1bc7-455d-9055-5d939864f701" containerName="barbican-worker-log" Jan 20 18:19:15 crc kubenswrapper[4558]: I0120 18:19:15.832609 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="30f50240-927f-4955-a922-79e1a3866910" containerName="mariadb-account-delete" Jan 20 18:19:15 crc kubenswrapper[4558]: I0120 18:19:15.832624 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="8d489296-e372-494e-842e-4449e34e042e" containerName="barbican-api-log" Jan 20 18:19:15 crc kubenswrapper[4558]: I0120 18:19:15.832633 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="4b5de5a6-1bc7-455d-9055-5d939864f701" containerName="barbican-worker" Jan 20 18:19:15 crc kubenswrapper[4558]: I0120 18:19:15.832643 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="4b5de5a6-1bc7-455d-9055-5d939864f701" containerName="barbican-worker-log" Jan 20 18:19:15 crc kubenswrapper[4558]: I0120 18:19:15.832652 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="8d489296-e372-494e-842e-4449e34e042e" containerName="barbican-api" Jan 20 18:19:15 crc kubenswrapper[4558]: I0120 18:19:15.832662 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="03970e5f-267e-4cfa-a88e-3518d540a4b8" containerName="barbican-keystone-listener-log" Jan 20 18:19:15 crc kubenswrapper[4558]: I0120 18:19:15.832671 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="03970e5f-267e-4cfa-a88e-3518d540a4b8" containerName="barbican-keystone-listener" Jan 20 18:19:15 crc kubenswrapper[4558]: I0120 18:19:15.833145 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="barbican-kuttl-tests/keystone52d6-account-delete-pzkqm" Jan 20 18:19:15 crc kubenswrapper[4558]: I0120 18:19:15.839289 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["barbican-kuttl-tests/keystone52d6-account-delete-pzkqm"] Jan 20 18:19:15 crc kubenswrapper[4558]: I0120 18:19:15.918735 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4kkpm\" (UniqueName: \"kubernetes.io/projected/c9734bdf-87f9-4375-b720-ee2523a59f0e-kube-api-access-4kkpm\") pod \"keystone52d6-account-delete-pzkqm\" (UID: \"c9734bdf-87f9-4375-b720-ee2523a59f0e\") " pod="barbican-kuttl-tests/keystone52d6-account-delete-pzkqm" Jan 20 18:19:15 crc kubenswrapper[4558]: I0120 18:19:15.919210 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c9734bdf-87f9-4375-b720-ee2523a59f0e-operator-scripts\") pod \"keystone52d6-account-delete-pzkqm\" (UID: \"c9734bdf-87f9-4375-b720-ee2523a59f0e\") " pod="barbican-kuttl-tests/keystone52d6-account-delete-pzkqm" Jan 20 18:19:16 crc kubenswrapper[4558]: I0120 18:19:16.020119 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c9734bdf-87f9-4375-b720-ee2523a59f0e-operator-scripts\") pod \"keystone52d6-account-delete-pzkqm\" (UID: \"c9734bdf-87f9-4375-b720-ee2523a59f0e\") " pod="barbican-kuttl-tests/keystone52d6-account-delete-pzkqm" Jan 20 18:19:16 crc kubenswrapper[4558]: I0120 18:19:16.020274 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4kkpm\" (UniqueName: \"kubernetes.io/projected/c9734bdf-87f9-4375-b720-ee2523a59f0e-kube-api-access-4kkpm\") pod \"keystone52d6-account-delete-pzkqm\" (UID: \"c9734bdf-87f9-4375-b720-ee2523a59f0e\") " pod="barbican-kuttl-tests/keystone52d6-account-delete-pzkqm" Jan 20 18:19:16 crc kubenswrapper[4558]: I0120 18:19:16.021087 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c9734bdf-87f9-4375-b720-ee2523a59f0e-operator-scripts\") pod \"keystone52d6-account-delete-pzkqm\" (UID: \"c9734bdf-87f9-4375-b720-ee2523a59f0e\") " pod="barbican-kuttl-tests/keystone52d6-account-delete-pzkqm" Jan 20 18:19:16 crc kubenswrapper[4558]: E0120 18:19:16.036530 4558 upgradeaware.go:441] Error proxying data from backend to client: writeto tcp 192.168.25.8:49660->192.168.25.8:43883: read tcp 192.168.25.8:49660->192.168.25.8:43883: read: connection reset by peer Jan 20 18:19:16 crc kubenswrapper[4558]: I0120 18:19:16.039795 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4kkpm\" (UniqueName: \"kubernetes.io/projected/c9734bdf-87f9-4375-b720-ee2523a59f0e-kube-api-access-4kkpm\") pod \"keystone52d6-account-delete-pzkqm\" (UID: \"c9734bdf-87f9-4375-b720-ee2523a59f0e\") " pod="barbican-kuttl-tests/keystone52d6-account-delete-pzkqm" Jan 20 18:19:16 crc kubenswrapper[4558]: I0120 18:19:16.149859 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="barbican-kuttl-tests/keystone52d6-account-delete-pzkqm" Jan 20 18:19:16 crc kubenswrapper[4558]: I0120 18:19:16.212225 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["barbican-kuttl-tests/root-account-create-update-hzbrt"] Jan 20 18:19:16 crc kubenswrapper[4558]: I0120 18:19:16.226759 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["barbican-kuttl-tests/root-account-create-update-hzbrt"] Jan 20 18:19:16 crc kubenswrapper[4558]: I0120 18:19:16.242751 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["barbican-kuttl-tests/root-account-create-update-x2lnc"] Jan 20 18:19:16 crc kubenswrapper[4558]: I0120 18:19:16.243629 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="barbican-kuttl-tests/root-account-create-update-x2lnc" Jan 20 18:19:16 crc kubenswrapper[4558]: I0120 18:19:16.254751 4558 reflector.go:368] Caches populated for *v1.Secret from object-"barbican-kuttl-tests"/"openstack-mariadb-root-db-secret" Jan 20 18:19:16 crc kubenswrapper[4558]: I0120 18:19:16.271889 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["barbican-kuttl-tests/root-account-create-update-x2lnc"] Jan 20 18:19:16 crc kubenswrapper[4558]: I0120 18:19:16.291500 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["barbican-kuttl-tests/openstack-galera-2"] Jan 20 18:19:16 crc kubenswrapper[4558]: I0120 18:19:16.295827 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["barbican-kuttl-tests/openstack-galera-0"] Jan 20 18:19:16 crc kubenswrapper[4558]: I0120 18:19:16.300432 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["barbican-kuttl-tests/openstack-galera-1"] Jan 20 18:19:16 crc kubenswrapper[4558]: I0120 18:19:16.315634 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["barbican-kuttl-tests/root-account-create-update-x2lnc"] Jan 20 18:19:16 crc kubenswrapper[4558]: E0120 18:19:16.316182 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[kube-api-access-wcfnt operator-scripts], unattached volumes=[], failed to process volumes=[kube-api-access-wcfnt operator-scripts]: context canceled" pod="barbican-kuttl-tests/root-account-create-update-x2lnc" podUID="4efc26b9-0860-4b93-835a-9af3cb4d9a99" Jan 20 18:19:16 crc kubenswrapper[4558]: I0120 18:19:16.408018 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="barbican-kuttl-tests/openstack-galera-2" podUID="595cc6da-b53a-4a23-8e31-ed1dd616eb3e" containerName="galera" containerID="cri-o://573646459e230f5f4e9d2d63271a8542132e09cb607880437338494fcdcd9be5" gracePeriod=30 Jan 20 18:19:16 crc kubenswrapper[4558]: I0120 18:19:16.427455 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4efc26b9-0860-4b93-835a-9af3cb4d9a99-operator-scripts\") pod \"root-account-create-update-x2lnc\" (UID: \"4efc26b9-0860-4b93-835a-9af3cb4d9a99\") " pod="barbican-kuttl-tests/root-account-create-update-x2lnc" Jan 20 18:19:16 crc kubenswrapper[4558]: I0120 18:19:16.428456 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wcfnt\" (UniqueName: \"kubernetes.io/projected/4efc26b9-0860-4b93-835a-9af3cb4d9a99-kube-api-access-wcfnt\") pod \"root-account-create-update-x2lnc\" (UID: \"4efc26b9-0860-4b93-835a-9af3cb4d9a99\") " pod="barbican-kuttl-tests/root-account-create-update-x2lnc" Jan 20 18:19:16 crc kubenswrapper[4558]: I0120 18:19:16.530366 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4efc26b9-0860-4b93-835a-9af3cb4d9a99-operator-scripts\") pod \"root-account-create-update-x2lnc\" (UID: \"4efc26b9-0860-4b93-835a-9af3cb4d9a99\") " pod="barbican-kuttl-tests/root-account-create-update-x2lnc" Jan 20 18:19:16 crc kubenswrapper[4558]: I0120 18:19:16.530446 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wcfnt\" (UniqueName: \"kubernetes.io/projected/4efc26b9-0860-4b93-835a-9af3cb4d9a99-kube-api-access-wcfnt\") pod \"root-account-create-update-x2lnc\" (UID: \"4efc26b9-0860-4b93-835a-9af3cb4d9a99\") " pod="barbican-kuttl-tests/root-account-create-update-x2lnc" Jan 20 18:19:16 crc kubenswrapper[4558]: E0120 18:19:16.530502 4558 configmap.go:193] Couldn't get configMap barbican-kuttl-tests/openstack-scripts: configmap "openstack-scripts" not found Jan 20 18:19:16 crc kubenswrapper[4558]: E0120 18:19:16.530588 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/4efc26b9-0860-4b93-835a-9af3cb4d9a99-operator-scripts podName:4efc26b9-0860-4b93-835a-9af3cb4d9a99 nodeName:}" failed. No retries permitted until 2026-01-20 18:19:17.030571383 +0000 UTC m=+5850.790909350 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/4efc26b9-0860-4b93-835a-9af3cb4d9a99-operator-scripts") pod "root-account-create-update-x2lnc" (UID: "4efc26b9-0860-4b93-835a-9af3cb4d9a99") : configmap "openstack-scripts" not found Jan 20 18:19:16 crc kubenswrapper[4558]: E0120 18:19:16.534287 4558 projected.go:194] Error preparing data for projected volume kube-api-access-wcfnt for pod barbican-kuttl-tests/root-account-create-update-x2lnc: failed to fetch token: serviceaccounts "galera-openstack" not found Jan 20 18:19:16 crc kubenswrapper[4558]: E0120 18:19:16.534372 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/4efc26b9-0860-4b93-835a-9af3cb4d9a99-kube-api-access-wcfnt podName:4efc26b9-0860-4b93-835a-9af3cb4d9a99 nodeName:}" failed. No retries permitted until 2026-01-20 18:19:17.034353924 +0000 UTC m=+5850.794691892 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-wcfnt" (UniqueName: "kubernetes.io/projected/4efc26b9-0860-4b93-835a-9af3cb4d9a99-kube-api-access-wcfnt") pod "root-account-create-update-x2lnc" (UID: "4efc26b9-0860-4b93-835a-9af3cb4d9a99") : failed to fetch token: serviceaccounts "galera-openstack" not found Jan 20 18:19:16 crc kubenswrapper[4558]: I0120 18:19:16.576762 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2637e631-04ad-4da0-82ce-cbbbaf50ad0b" path="/var/lib/kubelet/pods/2637e631-04ad-4da0-82ce-cbbbaf50ad0b/volumes" Jan 20 18:19:16 crc kubenswrapper[4558]: I0120 18:19:16.577712 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aba8a877-976e-42f7-9a09-a6d491c94db9" path="/var/lib/kubelet/pods/aba8a877-976e-42f7-9a09-a6d491c94db9/volumes" Jan 20 18:19:16 crc kubenswrapper[4558]: I0120 18:19:16.578417 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e765fff7-ce82-48c9-934c-6a40d0db6f02" path="/var/lib/kubelet/pods/e765fff7-ce82-48c9-934c-6a40d0db6f02/volumes" Jan 20 18:19:16 crc kubenswrapper[4558]: I0120 18:19:16.612589 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["barbican-kuttl-tests/keystone52d6-account-delete-pzkqm"] Jan 20 18:19:16 crc kubenswrapper[4558]: I0120 18:19:16.790846 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["barbican-kuttl-tests/memcached-0"] Jan 20 18:19:16 crc kubenswrapper[4558]: I0120 18:19:16.791320 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="barbican-kuttl-tests/memcached-0" podUID="472c8b48-909b-484c-85cb-fb38c6b77c0f" containerName="memcached" containerID="cri-o://85362494850e98037e72c19cc9afcd67b5d5c74c8d3e9df8766d4575cb7ea45f" gracePeriod=30 Jan 20 18:19:16 crc kubenswrapper[4558]: E0120 18:19:16.956742 4558 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod595cc6da_b53a_4a23_8e31_ed1dd616eb3e.slice/crio-conmon-573646459e230f5f4e9d2d63271a8542132e09cb607880437338494fcdcd9be5.scope\": RecentStats: unable to find data in memory cache]" Jan 20 18:19:17 crc kubenswrapper[4558]: I0120 18:19:17.038354 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4efc26b9-0860-4b93-835a-9af3cb4d9a99-operator-scripts\") pod \"root-account-create-update-x2lnc\" (UID: \"4efc26b9-0860-4b93-835a-9af3cb4d9a99\") " pod="barbican-kuttl-tests/root-account-create-update-x2lnc" Jan 20 18:19:17 crc kubenswrapper[4558]: I0120 18:19:17.038417 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wcfnt\" (UniqueName: \"kubernetes.io/projected/4efc26b9-0860-4b93-835a-9af3cb4d9a99-kube-api-access-wcfnt\") pod \"root-account-create-update-x2lnc\" (UID: \"4efc26b9-0860-4b93-835a-9af3cb4d9a99\") " pod="barbican-kuttl-tests/root-account-create-update-x2lnc" Jan 20 18:19:17 crc kubenswrapper[4558]: E0120 18:19:17.038532 4558 configmap.go:193] Couldn't get configMap barbican-kuttl-tests/openstack-scripts: configmap "openstack-scripts" not found Jan 20 18:19:17 crc kubenswrapper[4558]: E0120 18:19:17.038628 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/4efc26b9-0860-4b93-835a-9af3cb4d9a99-operator-scripts podName:4efc26b9-0860-4b93-835a-9af3cb4d9a99 nodeName:}" failed. No retries permitted until 2026-01-20 18:19:18.038609139 +0000 UTC m=+5851.798947106 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/4efc26b9-0860-4b93-835a-9af3cb4d9a99-operator-scripts") pod "root-account-create-update-x2lnc" (UID: "4efc26b9-0860-4b93-835a-9af3cb4d9a99") : configmap "openstack-scripts" not found Jan 20 18:19:17 crc kubenswrapper[4558]: E0120 18:19:17.041251 4558 projected.go:194] Error preparing data for projected volume kube-api-access-wcfnt for pod barbican-kuttl-tests/root-account-create-update-x2lnc: failed to fetch token: serviceaccounts "galera-openstack" not found Jan 20 18:19:17 crc kubenswrapper[4558]: E0120 18:19:17.041321 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/4efc26b9-0860-4b93-835a-9af3cb4d9a99-kube-api-access-wcfnt podName:4efc26b9-0860-4b93-835a-9af3cb4d9a99 nodeName:}" failed. No retries permitted until 2026-01-20 18:19:18.041306071 +0000 UTC m=+5851.801644038 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-wcfnt" (UniqueName: "kubernetes.io/projected/4efc26b9-0860-4b93-835a-9af3cb4d9a99-kube-api-access-wcfnt") pod "root-account-create-update-x2lnc" (UID: "4efc26b9-0860-4b93-835a-9af3cb4d9a99") : failed to fetch token: serviceaccounts "galera-openstack" not found Jan 20 18:19:17 crc kubenswrapper[4558]: I0120 18:19:17.146099 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="barbican-kuttl-tests/openstack-galera-2" Jan 20 18:19:17 crc kubenswrapper[4558]: I0120 18:19:17.233123 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["barbican-kuttl-tests/rabbitmq-server-0"] Jan 20 18:19:17 crc kubenswrapper[4558]: I0120 18:19:17.287134 4558 generic.go:334] "Generic (PLEG): container finished" podID="595cc6da-b53a-4a23-8e31-ed1dd616eb3e" containerID="573646459e230f5f4e9d2d63271a8542132e09cb607880437338494fcdcd9be5" exitCode=0 Jan 20 18:19:17 crc kubenswrapper[4558]: I0120 18:19:17.287222 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/openstack-galera-2" event={"ID":"595cc6da-b53a-4a23-8e31-ed1dd616eb3e","Type":"ContainerDied","Data":"573646459e230f5f4e9d2d63271a8542132e09cb607880437338494fcdcd9be5"} Jan 20 18:19:17 crc kubenswrapper[4558]: I0120 18:19:17.287255 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/openstack-galera-2" event={"ID":"595cc6da-b53a-4a23-8e31-ed1dd616eb3e","Type":"ContainerDied","Data":"3093f70dc9c1a03ac227878901aee8bdc9d47bad2f397360ac43a3d9c5a866d9"} Jan 20 18:19:17 crc kubenswrapper[4558]: I0120 18:19:17.287276 4558 scope.go:117] "RemoveContainer" containerID="573646459e230f5f4e9d2d63271a8542132e09cb607880437338494fcdcd9be5" Jan 20 18:19:17 crc kubenswrapper[4558]: I0120 18:19:17.287393 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="barbican-kuttl-tests/openstack-galera-2" Jan 20 18:19:17 crc kubenswrapper[4558]: I0120 18:19:17.289531 4558 generic.go:334] "Generic (PLEG): container finished" podID="c9734bdf-87f9-4375-b720-ee2523a59f0e" containerID="3102dc3c5f6eb71baab1e508ae43c97370b800235e1b9d008bd41f1a0bf99bb1" exitCode=1 Jan 20 18:19:17 crc kubenswrapper[4558]: I0120 18:19:17.289583 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/keystone52d6-account-delete-pzkqm" event={"ID":"c9734bdf-87f9-4375-b720-ee2523a59f0e","Type":"ContainerDied","Data":"3102dc3c5f6eb71baab1e508ae43c97370b800235e1b9d008bd41f1a0bf99bb1"} Jan 20 18:19:17 crc kubenswrapper[4558]: I0120 18:19:17.289612 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/keystone52d6-account-delete-pzkqm" event={"ID":"c9734bdf-87f9-4375-b720-ee2523a59f0e","Type":"ContainerStarted","Data":"feb30eea360793f16f810676ab1f226a5f48a89e3f3ae78ae7a02c648be2ba29"} Jan 20 18:19:17 crc kubenswrapper[4558]: I0120 18:19:17.289636 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="barbican-kuttl-tests/root-account-create-update-x2lnc" Jan 20 18:19:17 crc kubenswrapper[4558]: I0120 18:19:17.290278 4558 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="barbican-kuttl-tests/keystone52d6-account-delete-pzkqm" secret="" err="secret \"galera-openstack-dockercfg-66mvm\" not found" Jan 20 18:19:17 crc kubenswrapper[4558]: I0120 18:19:17.290321 4558 scope.go:117] "RemoveContainer" containerID="3102dc3c5f6eb71baab1e508ae43c97370b800235e1b9d008bd41f1a0bf99bb1" Jan 20 18:19:17 crc kubenswrapper[4558]: I0120 18:19:17.299403 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="barbican-kuttl-tests/root-account-create-update-x2lnc" Jan 20 18:19:17 crc kubenswrapper[4558]: I0120 18:19:17.333006 4558 scope.go:117] "RemoveContainer" containerID="3fc58048255359620235831ab1838523070305b6564cf243ce6325cf4e7d0b6c" Jan 20 18:19:17 crc kubenswrapper[4558]: I0120 18:19:17.342892 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/595cc6da-b53a-4a23-8e31-ed1dd616eb3e-kolla-config\") pod \"595cc6da-b53a-4a23-8e31-ed1dd616eb3e\" (UID: \"595cc6da-b53a-4a23-8e31-ed1dd616eb3e\") " Jan 20 18:19:17 crc kubenswrapper[4558]: I0120 18:19:17.342950 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mysql-db\" (UniqueName: \"kubernetes.io/local-volume/local-storage13-crc\") pod \"595cc6da-b53a-4a23-8e31-ed1dd616eb3e\" (UID: \"595cc6da-b53a-4a23-8e31-ed1dd616eb3e\") " Jan 20 18:19:17 crc kubenswrapper[4558]: I0120 18:19:17.342984 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/595cc6da-b53a-4a23-8e31-ed1dd616eb3e-config-data-default\") pod \"595cc6da-b53a-4a23-8e31-ed1dd616eb3e\" (UID: \"595cc6da-b53a-4a23-8e31-ed1dd616eb3e\") " Jan 20 18:19:17 crc kubenswrapper[4558]: I0120 18:19:17.343589 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mskhp\" (UniqueName: \"kubernetes.io/projected/595cc6da-b53a-4a23-8e31-ed1dd616eb3e-kube-api-access-mskhp\") pod \"595cc6da-b53a-4a23-8e31-ed1dd616eb3e\" (UID: \"595cc6da-b53a-4a23-8e31-ed1dd616eb3e\") " Jan 20 18:19:17 crc kubenswrapper[4558]: I0120 18:19:17.343532 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/595cc6da-b53a-4a23-8e31-ed1dd616eb3e-config-data-default" (OuterVolumeSpecName: "config-data-default") pod "595cc6da-b53a-4a23-8e31-ed1dd616eb3e" (UID: "595cc6da-b53a-4a23-8e31-ed1dd616eb3e"). InnerVolumeSpecName "config-data-default". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:19:17 crc kubenswrapper[4558]: I0120 18:19:17.343589 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/595cc6da-b53a-4a23-8e31-ed1dd616eb3e-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "595cc6da-b53a-4a23-8e31-ed1dd616eb3e" (UID: "595cc6da-b53a-4a23-8e31-ed1dd616eb3e"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:19:17 crc kubenswrapper[4558]: I0120 18:19:17.343652 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/595cc6da-b53a-4a23-8e31-ed1dd616eb3e-config-data-generated\") pod \"595cc6da-b53a-4a23-8e31-ed1dd616eb3e\" (UID: \"595cc6da-b53a-4a23-8e31-ed1dd616eb3e\") " Jan 20 18:19:17 crc kubenswrapper[4558]: I0120 18:19:17.344099 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/595cc6da-b53a-4a23-8e31-ed1dd616eb3e-config-data-generated" (OuterVolumeSpecName: "config-data-generated") pod "595cc6da-b53a-4a23-8e31-ed1dd616eb3e" (UID: "595cc6da-b53a-4a23-8e31-ed1dd616eb3e"). InnerVolumeSpecName "config-data-generated". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 18:19:17 crc kubenswrapper[4558]: I0120 18:19:17.344249 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/595cc6da-b53a-4a23-8e31-ed1dd616eb3e-operator-scripts\") pod \"595cc6da-b53a-4a23-8e31-ed1dd616eb3e\" (UID: \"595cc6da-b53a-4a23-8e31-ed1dd616eb3e\") " Jan 20 18:19:17 crc kubenswrapper[4558]: I0120 18:19:17.344783 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/595cc6da-b53a-4a23-8e31-ed1dd616eb3e-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "595cc6da-b53a-4a23-8e31-ed1dd616eb3e" (UID: "595cc6da-b53a-4a23-8e31-ed1dd616eb3e"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:19:17 crc kubenswrapper[4558]: I0120 18:19:17.345934 4558 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/595cc6da-b53a-4a23-8e31-ed1dd616eb3e-kolla-config\") on node \"crc\" DevicePath \"\"" Jan 20 18:19:17 crc kubenswrapper[4558]: I0120 18:19:17.345961 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/595cc6da-b53a-4a23-8e31-ed1dd616eb3e-config-data-default\") on node \"crc\" DevicePath \"\"" Jan 20 18:19:17 crc kubenswrapper[4558]: I0120 18:19:17.345971 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/595cc6da-b53a-4a23-8e31-ed1dd616eb3e-config-data-generated\") on node \"crc\" DevicePath \"\"" Jan 20 18:19:17 crc kubenswrapper[4558]: I0120 18:19:17.345981 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/595cc6da-b53a-4a23-8e31-ed1dd616eb3e-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 18:19:17 crc kubenswrapper[4558]: I0120 18:19:17.347781 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/595cc6da-b53a-4a23-8e31-ed1dd616eb3e-kube-api-access-mskhp" (OuterVolumeSpecName: "kube-api-access-mskhp") pod "595cc6da-b53a-4a23-8e31-ed1dd616eb3e" (UID: "595cc6da-b53a-4a23-8e31-ed1dd616eb3e"). InnerVolumeSpecName "kube-api-access-mskhp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:19:17 crc kubenswrapper[4558]: I0120 18:19:17.352018 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage13-crc" (OuterVolumeSpecName: "mysql-db") pod "595cc6da-b53a-4a23-8e31-ed1dd616eb3e" (UID: "595cc6da-b53a-4a23-8e31-ed1dd616eb3e"). InnerVolumeSpecName "local-storage13-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 18:19:17 crc kubenswrapper[4558]: I0120 18:19:17.390124 4558 scope.go:117] "RemoveContainer" containerID="573646459e230f5f4e9d2d63271a8542132e09cb607880437338494fcdcd9be5" Jan 20 18:19:17 crc kubenswrapper[4558]: E0120 18:19:17.390582 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"573646459e230f5f4e9d2d63271a8542132e09cb607880437338494fcdcd9be5\": container with ID starting with 573646459e230f5f4e9d2d63271a8542132e09cb607880437338494fcdcd9be5 not found: ID does not exist" containerID="573646459e230f5f4e9d2d63271a8542132e09cb607880437338494fcdcd9be5" Jan 20 18:19:17 crc kubenswrapper[4558]: I0120 18:19:17.390620 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"573646459e230f5f4e9d2d63271a8542132e09cb607880437338494fcdcd9be5"} err="failed to get container status \"573646459e230f5f4e9d2d63271a8542132e09cb607880437338494fcdcd9be5\": rpc error: code = NotFound desc = could not find container \"573646459e230f5f4e9d2d63271a8542132e09cb607880437338494fcdcd9be5\": container with ID starting with 573646459e230f5f4e9d2d63271a8542132e09cb607880437338494fcdcd9be5 not found: ID does not exist" Jan 20 18:19:17 crc kubenswrapper[4558]: I0120 18:19:17.390645 4558 scope.go:117] "RemoveContainer" containerID="3fc58048255359620235831ab1838523070305b6564cf243ce6325cf4e7d0b6c" Jan 20 18:19:17 crc kubenswrapper[4558]: E0120 18:19:17.390948 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3fc58048255359620235831ab1838523070305b6564cf243ce6325cf4e7d0b6c\": container with ID starting with 3fc58048255359620235831ab1838523070305b6564cf243ce6325cf4e7d0b6c not found: ID does not exist" containerID="3fc58048255359620235831ab1838523070305b6564cf243ce6325cf4e7d0b6c" Jan 20 18:19:17 crc kubenswrapper[4558]: I0120 18:19:17.390971 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3fc58048255359620235831ab1838523070305b6564cf243ce6325cf4e7d0b6c"} err="failed to get container status \"3fc58048255359620235831ab1838523070305b6564cf243ce6325cf4e7d0b6c\": rpc error: code = NotFound desc = could not find container \"3fc58048255359620235831ab1838523070305b6564cf243ce6325cf4e7d0b6c\": container with ID starting with 3fc58048255359620235831ab1838523070305b6564cf243ce6325cf4e7d0b6c not found: ID does not exist" Jan 20 18:19:17 crc kubenswrapper[4558]: I0120 18:19:17.447558 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage13-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage13-crc\") on node \"crc\" " Jan 20 18:19:17 crc kubenswrapper[4558]: I0120 18:19:17.447614 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mskhp\" (UniqueName: \"kubernetes.io/projected/595cc6da-b53a-4a23-8e31-ed1dd616eb3e-kube-api-access-mskhp\") on node \"crc\" DevicePath \"\"" Jan 20 18:19:17 crc kubenswrapper[4558]: E0120 18:19:17.447643 4558 configmap.go:193] Couldn't get configMap barbican-kuttl-tests/openstack-scripts: configmap "openstack-scripts" not found Jan 20 18:19:17 crc kubenswrapper[4558]: E0120 18:19:17.447704 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/c9734bdf-87f9-4375-b720-ee2523a59f0e-operator-scripts podName:c9734bdf-87f9-4375-b720-ee2523a59f0e nodeName:}" failed. No retries permitted until 2026-01-20 18:19:17.947690675 +0000 UTC m=+5851.708028642 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/c9734bdf-87f9-4375-b720-ee2523a59f0e-operator-scripts") pod "keystone52d6-account-delete-pzkqm" (UID: "c9734bdf-87f9-4375-b720-ee2523a59f0e") : configmap "openstack-scripts" not found Jan 20 18:19:17 crc kubenswrapper[4558]: I0120 18:19:17.458653 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage13-crc" (UniqueName: "kubernetes.io/local-volume/local-storage13-crc") on node "crc" Jan 20 18:19:17 crc kubenswrapper[4558]: I0120 18:19:17.548608 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage13-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage13-crc\") on node \"crc\" DevicePath \"\"" Jan 20 18:19:17 crc kubenswrapper[4558]: I0120 18:19:17.581249 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["barbican-kuttl-tests/rabbitmq-server-0"] Jan 20 18:19:17 crc kubenswrapper[4558]: I0120 18:19:17.615767 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["barbican-kuttl-tests/openstack-galera-2"] Jan 20 18:19:17 crc kubenswrapper[4558]: I0120 18:19:17.619143 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["barbican-kuttl-tests/openstack-galera-2"] Jan 20 18:19:17 crc kubenswrapper[4558]: I0120 18:19:17.620765 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="barbican-kuttl-tests/rabbitmq-server-0" podUID="de78cf98-2998-46d1-add9-6a03d25c8b2b" containerName="rabbitmq" containerID="cri-o://68f810afb3a6be27dcb3f76000a3cfe636263e629fef26755689b4fb0096b465" gracePeriod=604800 Jan 20 18:19:17 crc kubenswrapper[4558]: E0120 18:19:17.955805 4558 configmap.go:193] Couldn't get configMap barbican-kuttl-tests/openstack-scripts: configmap "openstack-scripts" not found Jan 20 18:19:17 crc kubenswrapper[4558]: E0120 18:19:17.955957 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/c9734bdf-87f9-4375-b720-ee2523a59f0e-operator-scripts podName:c9734bdf-87f9-4375-b720-ee2523a59f0e nodeName:}" failed. No retries permitted until 2026-01-20 18:19:18.955931132 +0000 UTC m=+5852.716269100 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/c9734bdf-87f9-4375-b720-ee2523a59f0e-operator-scripts") pod "keystone52d6-account-delete-pzkqm" (UID: "c9734bdf-87f9-4375-b720-ee2523a59f0e") : configmap "openstack-scripts" not found Jan 20 18:19:18 crc kubenswrapper[4558]: I0120 18:19:18.045131 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="barbican-kuttl-tests/memcached-0" Jan 20 18:19:18 crc kubenswrapper[4558]: I0120 18:19:18.058218 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4efc26b9-0860-4b93-835a-9af3cb4d9a99-operator-scripts\") pod \"root-account-create-update-x2lnc\" (UID: \"4efc26b9-0860-4b93-835a-9af3cb4d9a99\") " pod="barbican-kuttl-tests/root-account-create-update-x2lnc" Jan 20 18:19:18 crc kubenswrapper[4558]: I0120 18:19:18.058300 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wcfnt\" (UniqueName: \"kubernetes.io/projected/4efc26b9-0860-4b93-835a-9af3cb4d9a99-kube-api-access-wcfnt\") pod \"root-account-create-update-x2lnc\" (UID: \"4efc26b9-0860-4b93-835a-9af3cb4d9a99\") " pod="barbican-kuttl-tests/root-account-create-update-x2lnc" Jan 20 18:19:18 crc kubenswrapper[4558]: E0120 18:19:18.058595 4558 configmap.go:193] Couldn't get configMap barbican-kuttl-tests/openstack-scripts: configmap "openstack-scripts" not found Jan 20 18:19:18 crc kubenswrapper[4558]: E0120 18:19:18.058724 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/4efc26b9-0860-4b93-835a-9af3cb4d9a99-operator-scripts podName:4efc26b9-0860-4b93-835a-9af3cb4d9a99 nodeName:}" failed. No retries permitted until 2026-01-20 18:19:20.058689855 +0000 UTC m=+5853.819027822 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/4efc26b9-0860-4b93-835a-9af3cb4d9a99-operator-scripts") pod "root-account-create-update-x2lnc" (UID: "4efc26b9-0860-4b93-835a-9af3cb4d9a99") : configmap "openstack-scripts" not found Jan 20 18:19:18 crc kubenswrapper[4558]: E0120 18:19:18.064282 4558 projected.go:194] Error preparing data for projected volume kube-api-access-wcfnt for pod barbican-kuttl-tests/root-account-create-update-x2lnc: failed to fetch token: serviceaccounts "galera-openstack" not found Jan 20 18:19:18 crc kubenswrapper[4558]: E0120 18:19:18.064381 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/4efc26b9-0860-4b93-835a-9af3cb4d9a99-kube-api-access-wcfnt podName:4efc26b9-0860-4b93-835a-9af3cb4d9a99 nodeName:}" failed. No retries permitted until 2026-01-20 18:19:20.064358282 +0000 UTC m=+5853.824696250 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-wcfnt" (UniqueName: "kubernetes.io/projected/4efc26b9-0860-4b93-835a-9af3cb4d9a99-kube-api-access-wcfnt") pod "root-account-create-update-x2lnc" (UID: "4efc26b9-0860-4b93-835a-9af3cb4d9a99") : failed to fetch token: serviceaccounts "galera-openstack" not found Jan 20 18:19:18 crc kubenswrapper[4558]: I0120 18:19:18.159518 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/472c8b48-909b-484c-85cb-fb38c6b77c0f-kolla-config\") pod \"472c8b48-909b-484c-85cb-fb38c6b77c0f\" (UID: \"472c8b48-909b-484c-85cb-fb38c6b77c0f\") " Jan 20 18:19:18 crc kubenswrapper[4558]: I0120 18:19:18.159648 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/472c8b48-909b-484c-85cb-fb38c6b77c0f-config-data\") pod \"472c8b48-909b-484c-85cb-fb38c6b77c0f\" (UID: \"472c8b48-909b-484c-85cb-fb38c6b77c0f\") " Jan 20 18:19:18 crc kubenswrapper[4558]: I0120 18:19:18.159822 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bhfnf\" (UniqueName: \"kubernetes.io/projected/472c8b48-909b-484c-85cb-fb38c6b77c0f-kube-api-access-bhfnf\") pod \"472c8b48-909b-484c-85cb-fb38c6b77c0f\" (UID: \"472c8b48-909b-484c-85cb-fb38c6b77c0f\") " Jan 20 18:19:18 crc kubenswrapper[4558]: I0120 18:19:18.160377 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/472c8b48-909b-484c-85cb-fb38c6b77c0f-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "472c8b48-909b-484c-85cb-fb38c6b77c0f" (UID: "472c8b48-909b-484c-85cb-fb38c6b77c0f"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:19:18 crc kubenswrapper[4558]: I0120 18:19:18.160413 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/472c8b48-909b-484c-85cb-fb38c6b77c0f-config-data" (OuterVolumeSpecName: "config-data") pod "472c8b48-909b-484c-85cb-fb38c6b77c0f" (UID: "472c8b48-909b-484c-85cb-fb38c6b77c0f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:19:18 crc kubenswrapper[4558]: I0120 18:19:18.160543 4558 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/472c8b48-909b-484c-85cb-fb38c6b77c0f-kolla-config\") on node \"crc\" DevicePath \"\"" Jan 20 18:19:18 crc kubenswrapper[4558]: I0120 18:19:18.160564 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/472c8b48-909b-484c-85cb-fb38c6b77c0f-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 18:19:18 crc kubenswrapper[4558]: I0120 18:19:18.167289 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/472c8b48-909b-484c-85cb-fb38c6b77c0f-kube-api-access-bhfnf" (OuterVolumeSpecName: "kube-api-access-bhfnf") pod "472c8b48-909b-484c-85cb-fb38c6b77c0f" (UID: "472c8b48-909b-484c-85cb-fb38c6b77c0f"). InnerVolumeSpecName "kube-api-access-bhfnf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:19:18 crc kubenswrapper[4558]: I0120 18:19:18.262147 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bhfnf\" (UniqueName: \"kubernetes.io/projected/472c8b48-909b-484c-85cb-fb38c6b77c0f-kube-api-access-bhfnf\") on node \"crc\" DevicePath \"\"" Jan 20 18:19:18 crc kubenswrapper[4558]: I0120 18:19:18.298898 4558 generic.go:334] "Generic (PLEG): container finished" podID="c9734bdf-87f9-4375-b720-ee2523a59f0e" containerID="0eac08332bbf921f40f5dce3c14b3cfc09f11b9804dec4cfa44e689fc68f4346" exitCode=1 Jan 20 18:19:18 crc kubenswrapper[4558]: I0120 18:19:18.298973 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/keystone52d6-account-delete-pzkqm" event={"ID":"c9734bdf-87f9-4375-b720-ee2523a59f0e","Type":"ContainerDied","Data":"0eac08332bbf921f40f5dce3c14b3cfc09f11b9804dec4cfa44e689fc68f4346"} Jan 20 18:19:18 crc kubenswrapper[4558]: I0120 18:19:18.299012 4558 scope.go:117] "RemoveContainer" containerID="3102dc3c5f6eb71baab1e508ae43c97370b800235e1b9d008bd41f1a0bf99bb1" Jan 20 18:19:18 crc kubenswrapper[4558]: I0120 18:19:18.299537 4558 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="barbican-kuttl-tests/keystone52d6-account-delete-pzkqm" secret="" err="secret \"galera-openstack-dockercfg-66mvm\" not found" Jan 20 18:19:18 crc kubenswrapper[4558]: I0120 18:19:18.299588 4558 scope.go:117] "RemoveContainer" containerID="0eac08332bbf921f40f5dce3c14b3cfc09f11b9804dec4cfa44e689fc68f4346" Jan 20 18:19:18 crc kubenswrapper[4558]: E0120 18:19:18.299886 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-delete\" with CrashLoopBackOff: \"back-off 10s restarting failed container=mariadb-account-delete pod=keystone52d6-account-delete-pzkqm_barbican-kuttl-tests(c9734bdf-87f9-4375-b720-ee2523a59f0e)\"" pod="barbican-kuttl-tests/keystone52d6-account-delete-pzkqm" podUID="c9734bdf-87f9-4375-b720-ee2523a59f0e" Jan 20 18:19:18 crc kubenswrapper[4558]: I0120 18:19:18.301517 4558 generic.go:334] "Generic (PLEG): container finished" podID="472c8b48-909b-484c-85cb-fb38c6b77c0f" containerID="85362494850e98037e72c19cc9afcd67b5d5c74c8d3e9df8766d4575cb7ea45f" exitCode=0 Jan 20 18:19:18 crc kubenswrapper[4558]: I0120 18:19:18.301550 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/memcached-0" event={"ID":"472c8b48-909b-484c-85cb-fb38c6b77c0f","Type":"ContainerDied","Data":"85362494850e98037e72c19cc9afcd67b5d5c74c8d3e9df8766d4575cb7ea45f"} Jan 20 18:19:18 crc kubenswrapper[4558]: I0120 18:19:18.301574 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="barbican-kuttl-tests/root-account-create-update-x2lnc" Jan 20 18:19:18 crc kubenswrapper[4558]: I0120 18:19:18.301582 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/memcached-0" event={"ID":"472c8b48-909b-484c-85cb-fb38c6b77c0f","Type":"ContainerDied","Data":"5875e5122fb2d3c0bf59a2d7954a0a04fb750b82c6583dbfe965229423df8aaf"} Jan 20 18:19:18 crc kubenswrapper[4558]: I0120 18:19:18.301571 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="barbican-kuttl-tests/memcached-0" Jan 20 18:19:18 crc kubenswrapper[4558]: I0120 18:19:18.358413 4558 scope.go:117] "RemoveContainer" containerID="85362494850e98037e72c19cc9afcd67b5d5c74c8d3e9df8766d4575cb7ea45f" Jan 20 18:19:18 crc kubenswrapper[4558]: I0120 18:19:18.391755 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["barbican-kuttl-tests/memcached-0"] Jan 20 18:19:18 crc kubenswrapper[4558]: I0120 18:19:18.400093 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["barbican-kuttl-tests/memcached-0"] Jan 20 18:19:18 crc kubenswrapper[4558]: I0120 18:19:18.409307 4558 scope.go:117] "RemoveContainer" containerID="85362494850e98037e72c19cc9afcd67b5d5c74c8d3e9df8766d4575cb7ea45f" Jan 20 18:19:18 crc kubenswrapper[4558]: E0120 18:19:18.410123 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"85362494850e98037e72c19cc9afcd67b5d5c74c8d3e9df8766d4575cb7ea45f\": container with ID starting with 85362494850e98037e72c19cc9afcd67b5d5c74c8d3e9df8766d4575cb7ea45f not found: ID does not exist" containerID="85362494850e98037e72c19cc9afcd67b5d5c74c8d3e9df8766d4575cb7ea45f" Jan 20 18:19:18 crc kubenswrapper[4558]: I0120 18:19:18.410182 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"85362494850e98037e72c19cc9afcd67b5d5c74c8d3e9df8766d4575cb7ea45f"} err="failed to get container status \"85362494850e98037e72c19cc9afcd67b5d5c74c8d3e9df8766d4575cb7ea45f\": rpc error: code = NotFound desc = could not find container \"85362494850e98037e72c19cc9afcd67b5d5c74c8d3e9df8766d4575cb7ea45f\": container with ID starting with 85362494850e98037e72c19cc9afcd67b5d5c74c8d3e9df8766d4575cb7ea45f not found: ID does not exist" Jan 20 18:19:18 crc kubenswrapper[4558]: I0120 18:19:18.459217 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["barbican-kuttl-tests/root-account-create-update-x2lnc"] Jan 20 18:19:18 crc kubenswrapper[4558]: I0120 18:19:18.463216 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["barbican-kuttl-tests/root-account-create-update-x2lnc"] Jan 20 18:19:18 crc kubenswrapper[4558]: I0120 18:19:18.477596 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-5db5bc648b-tc7cd"] Jan 20 18:19:18 crc kubenswrapper[4558]: I0120 18:19:18.477787 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/barbican-operator-controller-manager-5db5bc648b-tc7cd" podUID="eaed2fe0-4c9e-4d02-ba03-b6f55fffad21" containerName="manager" containerID="cri-o://62c474691fd3e1fa9a0d366b3db77d1cb8baef6b35fb90f8736ed4b835ffd09d" gracePeriod=10 Jan 20 18:19:18 crc kubenswrapper[4558]: I0120 18:19:18.568421 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4efc26b9-0860-4b93-835a-9af3cb4d9a99-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 18:19:18 crc kubenswrapper[4558]: I0120 18:19:18.568452 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wcfnt\" (UniqueName: \"kubernetes.io/projected/4efc26b9-0860-4b93-835a-9af3cb4d9a99-kube-api-access-wcfnt\") on node \"crc\" DevicePath \"\"" Jan 20 18:19:18 crc kubenswrapper[4558]: I0120 18:19:18.574342 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="472c8b48-909b-484c-85cb-fb38c6b77c0f" path="/var/lib/kubelet/pods/472c8b48-909b-484c-85cb-fb38c6b77c0f/volumes" Jan 20 18:19:18 crc kubenswrapper[4558]: I0120 18:19:18.574716 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4efc26b9-0860-4b93-835a-9af3cb4d9a99" path="/var/lib/kubelet/pods/4efc26b9-0860-4b93-835a-9af3cb4d9a99/volumes" Jan 20 18:19:18 crc kubenswrapper[4558]: I0120 18:19:18.575077 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="595cc6da-b53a-4a23-8e31-ed1dd616eb3e" path="/var/lib/kubelet/pods/595cc6da-b53a-4a23-8e31-ed1dd616eb3e/volumes" Jan 20 18:19:18 crc kubenswrapper[4558]: I0120 18:19:18.669205 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/barbican-operator-index-xz7tx"] Jan 20 18:19:18 crc kubenswrapper[4558]: I0120 18:19:18.669999 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/barbican-operator-index-xz7tx" podUID="c2d67ae8-8d4b-4242-8c31-ad0ca9c3975c" containerName="registry-server" containerID="cri-o://aa7b9479c303c79db2c7368a0eee3398df9fea0ba6bc5a7a991e89fc14bba725" gracePeriod=30 Jan 20 18:19:18 crc kubenswrapper[4558]: I0120 18:19:18.676305 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/efb8278422048e49f60447fe08bd0467bd977c487c5bfd9acd08760cddhnw5w"] Jan 20 18:19:18 crc kubenswrapper[4558]: I0120 18:19:18.696425 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/efb8278422048e49f60447fe08bd0467bd977c487c5bfd9acd08760cddhnw5w"] Jan 20 18:19:18 crc kubenswrapper[4558]: I0120 18:19:18.821604 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="barbican-kuttl-tests/openstack-galera-1" podUID="53d3a76d-e277-4efa-adbf-85d8e328caf6" containerName="galera" containerID="cri-o://036a2449dee37ea79784360d016ed57d4f72b39b1c9f0b0aba175eff45b9a123" gracePeriod=28 Jan 20 18:19:18 crc kubenswrapper[4558]: E0120 18:19:18.979089 4558 configmap.go:193] Couldn't get configMap barbican-kuttl-tests/openstack-scripts: configmap "openstack-scripts" not found Jan 20 18:19:18 crc kubenswrapper[4558]: E0120 18:19:18.979249 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/c9734bdf-87f9-4375-b720-ee2523a59f0e-operator-scripts podName:c9734bdf-87f9-4375-b720-ee2523a59f0e nodeName:}" failed. No retries permitted until 2026-01-20 18:19:20.979226934 +0000 UTC m=+5854.739564901 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/c9734bdf-87f9-4375-b720-ee2523a59f0e-operator-scripts") pod "keystone52d6-account-delete-pzkqm" (UID: "c9734bdf-87f9-4375-b720-ee2523a59f0e") : configmap "openstack-scripts" not found Jan 20 18:19:19 crc kubenswrapper[4558]: I0120 18:19:19.061513 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-5db5bc648b-tc7cd" Jan 20 18:19:19 crc kubenswrapper[4558]: I0120 18:19:19.150075 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="barbican-kuttl-tests/rabbitmq-server-0" Jan 20 18:19:19 crc kubenswrapper[4558]: I0120 18:19:19.180765 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/eaed2fe0-4c9e-4d02-ba03-b6f55fffad21-webhook-cert\") pod \"eaed2fe0-4c9e-4d02-ba03-b6f55fffad21\" (UID: \"eaed2fe0-4c9e-4d02-ba03-b6f55fffad21\") " Jan 20 18:19:19 crc kubenswrapper[4558]: I0120 18:19:19.180911 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/eaed2fe0-4c9e-4d02-ba03-b6f55fffad21-apiservice-cert\") pod \"eaed2fe0-4c9e-4d02-ba03-b6f55fffad21\" (UID: \"eaed2fe0-4c9e-4d02-ba03-b6f55fffad21\") " Jan 20 18:19:19 crc kubenswrapper[4558]: I0120 18:19:19.180961 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cj9qr\" (UniqueName: \"kubernetes.io/projected/eaed2fe0-4c9e-4d02-ba03-b6f55fffad21-kube-api-access-cj9qr\") pod \"eaed2fe0-4c9e-4d02-ba03-b6f55fffad21\" (UID: \"eaed2fe0-4c9e-4d02-ba03-b6f55fffad21\") " Jan 20 18:19:19 crc kubenswrapper[4558]: I0120 18:19:19.191754 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eaed2fe0-4c9e-4d02-ba03-b6f55fffad21-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "eaed2fe0-4c9e-4d02-ba03-b6f55fffad21" (UID: "eaed2fe0-4c9e-4d02-ba03-b6f55fffad21"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:19:19 crc kubenswrapper[4558]: I0120 18:19:19.191899 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eaed2fe0-4c9e-4d02-ba03-b6f55fffad21-kube-api-access-cj9qr" (OuterVolumeSpecName: "kube-api-access-cj9qr") pod "eaed2fe0-4c9e-4d02-ba03-b6f55fffad21" (UID: "eaed2fe0-4c9e-4d02-ba03-b6f55fffad21"). InnerVolumeSpecName "kube-api-access-cj9qr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:19:19 crc kubenswrapper[4558]: I0120 18:19:19.192039 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eaed2fe0-4c9e-4d02-ba03-b6f55fffad21-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "eaed2fe0-4c9e-4d02-ba03-b6f55fffad21" (UID: "eaed2fe0-4c9e-4d02-ba03-b6f55fffad21"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:19:19 crc kubenswrapper[4558]: I0120 18:19:19.256361 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-index-xz7tx" Jan 20 18:19:19 crc kubenswrapper[4558]: I0120 18:19:19.282365 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-734ec1d9-3fee-4c55-b0f8-0f4799cbfde9\") pod \"de78cf98-2998-46d1-add9-6a03d25c8b2b\" (UID: \"de78cf98-2998-46d1-add9-6a03d25c8b2b\") " Jan 20 18:19:19 crc kubenswrapper[4558]: I0120 18:19:19.282473 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/de78cf98-2998-46d1-add9-6a03d25c8b2b-plugins-conf\") pod \"de78cf98-2998-46d1-add9-6a03d25c8b2b\" (UID: \"de78cf98-2998-46d1-add9-6a03d25c8b2b\") " Jan 20 18:19:19 crc kubenswrapper[4558]: I0120 18:19:19.282505 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/de78cf98-2998-46d1-add9-6a03d25c8b2b-rabbitmq-erlang-cookie\") pod \"de78cf98-2998-46d1-add9-6a03d25c8b2b\" (UID: \"de78cf98-2998-46d1-add9-6a03d25c8b2b\") " Jan 20 18:19:19 crc kubenswrapper[4558]: I0120 18:19:19.282534 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/de78cf98-2998-46d1-add9-6a03d25c8b2b-rabbitmq-plugins\") pod \"de78cf98-2998-46d1-add9-6a03d25c8b2b\" (UID: \"de78cf98-2998-46d1-add9-6a03d25c8b2b\") " Jan 20 18:19:19 crc kubenswrapper[4558]: I0120 18:19:19.282583 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vmjz5\" (UniqueName: \"kubernetes.io/projected/de78cf98-2998-46d1-add9-6a03d25c8b2b-kube-api-access-vmjz5\") pod \"de78cf98-2998-46d1-add9-6a03d25c8b2b\" (UID: \"de78cf98-2998-46d1-add9-6a03d25c8b2b\") " Jan 20 18:19:19 crc kubenswrapper[4558]: I0120 18:19:19.282670 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/de78cf98-2998-46d1-add9-6a03d25c8b2b-pod-info\") pod \"de78cf98-2998-46d1-add9-6a03d25c8b2b\" (UID: \"de78cf98-2998-46d1-add9-6a03d25c8b2b\") " Jan 20 18:19:19 crc kubenswrapper[4558]: I0120 18:19:19.282729 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/de78cf98-2998-46d1-add9-6a03d25c8b2b-erlang-cookie-secret\") pod \"de78cf98-2998-46d1-add9-6a03d25c8b2b\" (UID: \"de78cf98-2998-46d1-add9-6a03d25c8b2b\") " Jan 20 18:19:19 crc kubenswrapper[4558]: I0120 18:19:19.282751 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/de78cf98-2998-46d1-add9-6a03d25c8b2b-rabbitmq-confd\") pod \"de78cf98-2998-46d1-add9-6a03d25c8b2b\" (UID: \"de78cf98-2998-46d1-add9-6a03d25c8b2b\") " Jan 20 18:19:19 crc kubenswrapper[4558]: I0120 18:19:19.283091 4558 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/eaed2fe0-4c9e-4d02-ba03-b6f55fffad21-webhook-cert\") on node \"crc\" DevicePath \"\"" Jan 20 18:19:19 crc kubenswrapper[4558]: I0120 18:19:19.283110 4558 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/eaed2fe0-4c9e-4d02-ba03-b6f55fffad21-apiservice-cert\") on node \"crc\" DevicePath \"\"" Jan 20 18:19:19 crc kubenswrapper[4558]: I0120 18:19:19.283121 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cj9qr\" (UniqueName: \"kubernetes.io/projected/eaed2fe0-4c9e-4d02-ba03-b6f55fffad21-kube-api-access-cj9qr\") on node \"crc\" DevicePath \"\"" Jan 20 18:19:19 crc kubenswrapper[4558]: I0120 18:19:19.285654 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/de78cf98-2998-46d1-add9-6a03d25c8b2b-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "de78cf98-2998-46d1-add9-6a03d25c8b2b" (UID: "de78cf98-2998-46d1-add9-6a03d25c8b2b"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 18:19:19 crc kubenswrapper[4558]: I0120 18:19:19.288631 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/de78cf98-2998-46d1-add9-6a03d25c8b2b-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "de78cf98-2998-46d1-add9-6a03d25c8b2b" (UID: "de78cf98-2998-46d1-add9-6a03d25c8b2b"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:19:19 crc kubenswrapper[4558]: I0120 18:19:19.289075 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/de78cf98-2998-46d1-add9-6a03d25c8b2b-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "de78cf98-2998-46d1-add9-6a03d25c8b2b" (UID: "de78cf98-2998-46d1-add9-6a03d25c8b2b"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 18:19:19 crc kubenswrapper[4558]: I0120 18:19:19.292922 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/de78cf98-2998-46d1-add9-6a03d25c8b2b-kube-api-access-vmjz5" (OuterVolumeSpecName: "kube-api-access-vmjz5") pod "de78cf98-2998-46d1-add9-6a03d25c8b2b" (UID: "de78cf98-2998-46d1-add9-6a03d25c8b2b"). InnerVolumeSpecName "kube-api-access-vmjz5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:19:19 crc kubenswrapper[4558]: I0120 18:19:19.295340 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/de78cf98-2998-46d1-add9-6a03d25c8b2b-pod-info" (OuterVolumeSpecName: "pod-info") pod "de78cf98-2998-46d1-add9-6a03d25c8b2b" (UID: "de78cf98-2998-46d1-add9-6a03d25c8b2b"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Jan 20 18:19:19 crc kubenswrapper[4558]: I0120 18:19:19.297221 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/de78cf98-2998-46d1-add9-6a03d25c8b2b-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "de78cf98-2998-46d1-add9-6a03d25c8b2b" (UID: "de78cf98-2998-46d1-add9-6a03d25c8b2b"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:19:19 crc kubenswrapper[4558]: I0120 18:19:19.306709 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-734ec1d9-3fee-4c55-b0f8-0f4799cbfde9" (OuterVolumeSpecName: "persistence") pod "de78cf98-2998-46d1-add9-6a03d25c8b2b" (UID: "de78cf98-2998-46d1-add9-6a03d25c8b2b"). InnerVolumeSpecName "pvc-734ec1d9-3fee-4c55-b0f8-0f4799cbfde9". PluginName "kubernetes.io/csi", VolumeGidValue "" Jan 20 18:19:19 crc kubenswrapper[4558]: I0120 18:19:19.315669 4558 generic.go:334] "Generic (PLEG): container finished" podID="9c946696-d624-4baf-824d-da41544b5240" containerID="cd7ed0d3b2b2fe1e487a0b13c301a12eef74b758ee8fd6ae67b04dbe57f98f49" exitCode=0 Jan 20 18:19:19 crc kubenswrapper[4558]: I0120 18:19:19.315742 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/keystone-694b59f8cc-6bxr2" event={"ID":"9c946696-d624-4baf-824d-da41544b5240","Type":"ContainerDied","Data":"cd7ed0d3b2b2fe1e487a0b13c301a12eef74b758ee8fd6ae67b04dbe57f98f49"} Jan 20 18:19:19 crc kubenswrapper[4558]: I0120 18:19:19.320729 4558 generic.go:334] "Generic (PLEG): container finished" podID="eaed2fe0-4c9e-4d02-ba03-b6f55fffad21" containerID="62c474691fd3e1fa9a0d366b3db77d1cb8baef6b35fb90f8736ed4b835ffd09d" exitCode=0 Jan 20 18:19:19 crc kubenswrapper[4558]: I0120 18:19:19.320815 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-5db5bc648b-tc7cd" event={"ID":"eaed2fe0-4c9e-4d02-ba03-b6f55fffad21","Type":"ContainerDied","Data":"62c474691fd3e1fa9a0d366b3db77d1cb8baef6b35fb90f8736ed4b835ffd09d"} Jan 20 18:19:19 crc kubenswrapper[4558]: I0120 18:19:19.320852 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-5db5bc648b-tc7cd" event={"ID":"eaed2fe0-4c9e-4d02-ba03-b6f55fffad21","Type":"ContainerDied","Data":"a46ec77eb14d7f366467e2e555266298d69b73caa2527e2e59727247ea34da92"} Jan 20 18:19:19 crc kubenswrapper[4558]: I0120 18:19:19.320873 4558 scope.go:117] "RemoveContainer" containerID="62c474691fd3e1fa9a0d366b3db77d1cb8baef6b35fb90f8736ed4b835ffd09d" Jan 20 18:19:19 crc kubenswrapper[4558]: I0120 18:19:19.321028 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-5db5bc648b-tc7cd" Jan 20 18:19:19 crc kubenswrapper[4558]: I0120 18:19:19.326986 4558 generic.go:334] "Generic (PLEG): container finished" podID="de78cf98-2998-46d1-add9-6a03d25c8b2b" containerID="68f810afb3a6be27dcb3f76000a3cfe636263e629fef26755689b4fb0096b465" exitCode=0 Jan 20 18:19:19 crc kubenswrapper[4558]: I0120 18:19:19.327062 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/rabbitmq-server-0" event={"ID":"de78cf98-2998-46d1-add9-6a03d25c8b2b","Type":"ContainerDied","Data":"68f810afb3a6be27dcb3f76000a3cfe636263e629fef26755689b4fb0096b465"} Jan 20 18:19:19 crc kubenswrapper[4558]: I0120 18:19:19.327092 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/rabbitmq-server-0" event={"ID":"de78cf98-2998-46d1-add9-6a03d25c8b2b","Type":"ContainerDied","Data":"cfd8134f2d9a0f249583b231d6f72b7b1c993c311427bb9b2b2f1adab07aa3b3"} Jan 20 18:19:19 crc kubenswrapper[4558]: I0120 18:19:19.327750 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="barbican-kuttl-tests/rabbitmq-server-0" Jan 20 18:19:19 crc kubenswrapper[4558]: I0120 18:19:19.334474 4558 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="barbican-kuttl-tests/keystone52d6-account-delete-pzkqm" secret="" err="secret \"galera-openstack-dockercfg-66mvm\" not found" Jan 20 18:19:19 crc kubenswrapper[4558]: I0120 18:19:19.334516 4558 scope.go:117] "RemoveContainer" containerID="0eac08332bbf921f40f5dce3c14b3cfc09f11b9804dec4cfa44e689fc68f4346" Jan 20 18:19:19 crc kubenswrapper[4558]: E0120 18:19:19.334913 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-delete\" with CrashLoopBackOff: \"back-off 10s restarting failed container=mariadb-account-delete pod=keystone52d6-account-delete-pzkqm_barbican-kuttl-tests(c9734bdf-87f9-4375-b720-ee2523a59f0e)\"" pod="barbican-kuttl-tests/keystone52d6-account-delete-pzkqm" podUID="c9734bdf-87f9-4375-b720-ee2523a59f0e" Jan 20 18:19:19 crc kubenswrapper[4558]: I0120 18:19:19.341233 4558 generic.go:334] "Generic (PLEG): container finished" podID="c2d67ae8-8d4b-4242-8c31-ad0ca9c3975c" containerID="aa7b9479c303c79db2c7368a0eee3398df9fea0ba6bc5a7a991e89fc14bba725" exitCode=0 Jan 20 18:19:19 crc kubenswrapper[4558]: I0120 18:19:19.341266 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-index-xz7tx" Jan 20 18:19:19 crc kubenswrapper[4558]: I0120 18:19:19.341275 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="barbican-kuttl-tests/keystone-694b59f8cc-6bxr2" Jan 20 18:19:19 crc kubenswrapper[4558]: I0120 18:19:19.341291 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-index-xz7tx" event={"ID":"c2d67ae8-8d4b-4242-8c31-ad0ca9c3975c","Type":"ContainerDied","Data":"aa7b9479c303c79db2c7368a0eee3398df9fea0ba6bc5a7a991e89fc14bba725"} Jan 20 18:19:19 crc kubenswrapper[4558]: I0120 18:19:19.341323 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-index-xz7tx" event={"ID":"c2d67ae8-8d4b-4242-8c31-ad0ca9c3975c","Type":"ContainerDied","Data":"d9c211d1c8d1874664302f4f2aeac3fcbcaf26d7007131daa12e0822b7a238c3"} Jan 20 18:19:19 crc kubenswrapper[4558]: I0120 18:19:19.350698 4558 scope.go:117] "RemoveContainer" containerID="62c474691fd3e1fa9a0d366b3db77d1cb8baef6b35fb90f8736ed4b835ffd09d" Jan 20 18:19:19 crc kubenswrapper[4558]: E0120 18:19:19.351335 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"62c474691fd3e1fa9a0d366b3db77d1cb8baef6b35fb90f8736ed4b835ffd09d\": container with ID starting with 62c474691fd3e1fa9a0d366b3db77d1cb8baef6b35fb90f8736ed4b835ffd09d not found: ID does not exist" containerID="62c474691fd3e1fa9a0d366b3db77d1cb8baef6b35fb90f8736ed4b835ffd09d" Jan 20 18:19:19 crc kubenswrapper[4558]: I0120 18:19:19.351373 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"62c474691fd3e1fa9a0d366b3db77d1cb8baef6b35fb90f8736ed4b835ffd09d"} err="failed to get container status \"62c474691fd3e1fa9a0d366b3db77d1cb8baef6b35fb90f8736ed4b835ffd09d\": rpc error: code = NotFound desc = could not find container \"62c474691fd3e1fa9a0d366b3db77d1cb8baef6b35fb90f8736ed4b835ffd09d\": container with ID starting with 62c474691fd3e1fa9a0d366b3db77d1cb8baef6b35fb90f8736ed4b835ffd09d not found: ID does not exist" Jan 20 18:19:19 crc kubenswrapper[4558]: I0120 18:19:19.351396 4558 scope.go:117] "RemoveContainer" containerID="68f810afb3a6be27dcb3f76000a3cfe636263e629fef26755689b4fb0096b465" Jan 20 18:19:19 crc kubenswrapper[4558]: I0120 18:19:19.356477 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/de78cf98-2998-46d1-add9-6a03d25c8b2b-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "de78cf98-2998-46d1-add9-6a03d25c8b2b" (UID: "de78cf98-2998-46d1-add9-6a03d25c8b2b"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:19:19 crc kubenswrapper[4558]: I0120 18:19:19.372610 4558 scope.go:117] "RemoveContainer" containerID="c1eb613ee08b66a59123d64dc4d5f56391ec1093d27dcbda2d2d7860f97cba52" Jan 20 18:19:19 crc kubenswrapper[4558]: I0120 18:19:19.384001 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x6b24\" (UniqueName: \"kubernetes.io/projected/c2d67ae8-8d4b-4242-8c31-ad0ca9c3975c-kube-api-access-x6b24\") pod \"c2d67ae8-8d4b-4242-8c31-ad0ca9c3975c\" (UID: \"c2d67ae8-8d4b-4242-8c31-ad0ca9c3975c\") " Jan 20 18:19:19 crc kubenswrapper[4558]: I0120 18:19:19.384534 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-5db5bc648b-tc7cd"] Jan 20 18:19:19 crc kubenswrapper[4558]: I0120 18:19:19.384799 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-734ec1d9-3fee-4c55-b0f8-0f4799cbfde9\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-734ec1d9-3fee-4c55-b0f8-0f4799cbfde9\") on node \"crc\" " Jan 20 18:19:19 crc kubenswrapper[4558]: I0120 18:19:19.384827 4558 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/de78cf98-2998-46d1-add9-6a03d25c8b2b-plugins-conf\") on node \"crc\" DevicePath \"\"" Jan 20 18:19:19 crc kubenswrapper[4558]: I0120 18:19:19.384840 4558 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/de78cf98-2998-46d1-add9-6a03d25c8b2b-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Jan 20 18:19:19 crc kubenswrapper[4558]: I0120 18:19:19.384854 4558 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/de78cf98-2998-46d1-add9-6a03d25c8b2b-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Jan 20 18:19:19 crc kubenswrapper[4558]: I0120 18:19:19.384864 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vmjz5\" (UniqueName: \"kubernetes.io/projected/de78cf98-2998-46d1-add9-6a03d25c8b2b-kube-api-access-vmjz5\") on node \"crc\" DevicePath \"\"" Jan 20 18:19:19 crc kubenswrapper[4558]: I0120 18:19:19.384873 4558 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/de78cf98-2998-46d1-add9-6a03d25c8b2b-pod-info\") on node \"crc\" DevicePath \"\"" Jan 20 18:19:19 crc kubenswrapper[4558]: I0120 18:19:19.384881 4558 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/de78cf98-2998-46d1-add9-6a03d25c8b2b-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Jan 20 18:19:19 crc kubenswrapper[4558]: I0120 18:19:19.384889 4558 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/de78cf98-2998-46d1-add9-6a03d25c8b2b-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Jan 20 18:19:19 crc kubenswrapper[4558]: I0120 18:19:19.387124 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c2d67ae8-8d4b-4242-8c31-ad0ca9c3975c-kube-api-access-x6b24" (OuterVolumeSpecName: "kube-api-access-x6b24") pod "c2d67ae8-8d4b-4242-8c31-ad0ca9c3975c" (UID: "c2d67ae8-8d4b-4242-8c31-ad0ca9c3975c"). InnerVolumeSpecName "kube-api-access-x6b24". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:19:19 crc kubenswrapper[4558]: I0120 18:19:19.387751 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-5db5bc648b-tc7cd"] Jan 20 18:19:19 crc kubenswrapper[4558]: I0120 18:19:19.392255 4558 scope.go:117] "RemoveContainer" containerID="68f810afb3a6be27dcb3f76000a3cfe636263e629fef26755689b4fb0096b465" Jan 20 18:19:19 crc kubenswrapper[4558]: E0120 18:19:19.392515 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"68f810afb3a6be27dcb3f76000a3cfe636263e629fef26755689b4fb0096b465\": container with ID starting with 68f810afb3a6be27dcb3f76000a3cfe636263e629fef26755689b4fb0096b465 not found: ID does not exist" containerID="68f810afb3a6be27dcb3f76000a3cfe636263e629fef26755689b4fb0096b465" Jan 20 18:19:19 crc kubenswrapper[4558]: I0120 18:19:19.392566 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"68f810afb3a6be27dcb3f76000a3cfe636263e629fef26755689b4fb0096b465"} err="failed to get container status \"68f810afb3a6be27dcb3f76000a3cfe636263e629fef26755689b4fb0096b465\": rpc error: code = NotFound desc = could not find container \"68f810afb3a6be27dcb3f76000a3cfe636263e629fef26755689b4fb0096b465\": container with ID starting with 68f810afb3a6be27dcb3f76000a3cfe636263e629fef26755689b4fb0096b465 not found: ID does not exist" Jan 20 18:19:19 crc kubenswrapper[4558]: I0120 18:19:19.392588 4558 scope.go:117] "RemoveContainer" containerID="c1eb613ee08b66a59123d64dc4d5f56391ec1093d27dcbda2d2d7860f97cba52" Jan 20 18:19:19 crc kubenswrapper[4558]: E0120 18:19:19.394505 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c1eb613ee08b66a59123d64dc4d5f56391ec1093d27dcbda2d2d7860f97cba52\": container with ID starting with c1eb613ee08b66a59123d64dc4d5f56391ec1093d27dcbda2d2d7860f97cba52 not found: ID does not exist" containerID="c1eb613ee08b66a59123d64dc4d5f56391ec1093d27dcbda2d2d7860f97cba52" Jan 20 18:19:19 crc kubenswrapper[4558]: I0120 18:19:19.394529 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c1eb613ee08b66a59123d64dc4d5f56391ec1093d27dcbda2d2d7860f97cba52"} err="failed to get container status \"c1eb613ee08b66a59123d64dc4d5f56391ec1093d27dcbda2d2d7860f97cba52\": rpc error: code = NotFound desc = could not find container \"c1eb613ee08b66a59123d64dc4d5f56391ec1093d27dcbda2d2d7860f97cba52\": container with ID starting with c1eb613ee08b66a59123d64dc4d5f56391ec1093d27dcbda2d2d7860f97cba52 not found: ID does not exist" Jan 20 18:19:19 crc kubenswrapper[4558]: I0120 18:19:19.394543 4558 scope.go:117] "RemoveContainer" containerID="aa7b9479c303c79db2c7368a0eee3398df9fea0ba6bc5a7a991e89fc14bba725" Jan 20 18:19:19 crc kubenswrapper[4558]: I0120 18:19:19.416735 4558 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Jan 20 18:19:19 crc kubenswrapper[4558]: I0120 18:19:19.417081 4558 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-734ec1d9-3fee-4c55-b0f8-0f4799cbfde9" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-734ec1d9-3fee-4c55-b0f8-0f4799cbfde9") on node "crc" Jan 20 18:19:19 crc kubenswrapper[4558]: I0120 18:19:19.424421 4558 scope.go:117] "RemoveContainer" containerID="aa7b9479c303c79db2c7368a0eee3398df9fea0ba6bc5a7a991e89fc14bba725" Jan 20 18:19:19 crc kubenswrapper[4558]: E0120 18:19:19.425519 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aa7b9479c303c79db2c7368a0eee3398df9fea0ba6bc5a7a991e89fc14bba725\": container with ID starting with aa7b9479c303c79db2c7368a0eee3398df9fea0ba6bc5a7a991e89fc14bba725 not found: ID does not exist" containerID="aa7b9479c303c79db2c7368a0eee3398df9fea0ba6bc5a7a991e89fc14bba725" Jan 20 18:19:19 crc kubenswrapper[4558]: I0120 18:19:19.425544 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aa7b9479c303c79db2c7368a0eee3398df9fea0ba6bc5a7a991e89fc14bba725"} err="failed to get container status \"aa7b9479c303c79db2c7368a0eee3398df9fea0ba6bc5a7a991e89fc14bba725\": rpc error: code = NotFound desc = could not find container \"aa7b9479c303c79db2c7368a0eee3398df9fea0ba6bc5a7a991e89fc14bba725\": container with ID starting with aa7b9479c303c79db2c7368a0eee3398df9fea0ba6bc5a7a991e89fc14bba725 not found: ID does not exist" Jan 20 18:19:19 crc kubenswrapper[4558]: I0120 18:19:19.485638 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9c946696-d624-4baf-824d-da41544b5240-config-data\") pod \"9c946696-d624-4baf-824d-da41544b5240\" (UID: \"9c946696-d624-4baf-824d-da41544b5240\") " Jan 20 18:19:19 crc kubenswrapper[4558]: I0120 18:19:19.485713 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/9c946696-d624-4baf-824d-da41544b5240-credential-keys\") pod \"9c946696-d624-4baf-824d-da41544b5240\" (UID: \"9c946696-d624-4baf-824d-da41544b5240\") " Jan 20 18:19:19 crc kubenswrapper[4558]: I0120 18:19:19.485792 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9c946696-d624-4baf-824d-da41544b5240-scripts\") pod \"9c946696-d624-4baf-824d-da41544b5240\" (UID: \"9c946696-d624-4baf-824d-da41544b5240\") " Jan 20 18:19:19 crc kubenswrapper[4558]: I0120 18:19:19.485836 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/9c946696-d624-4baf-824d-da41544b5240-fernet-keys\") pod \"9c946696-d624-4baf-824d-da41544b5240\" (UID: \"9c946696-d624-4baf-824d-da41544b5240\") " Jan 20 18:19:19 crc kubenswrapper[4558]: I0120 18:19:19.485863 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bz4xw\" (UniqueName: \"kubernetes.io/projected/9c946696-d624-4baf-824d-da41544b5240-kube-api-access-bz4xw\") pod \"9c946696-d624-4baf-824d-da41544b5240\" (UID: \"9c946696-d624-4baf-824d-da41544b5240\") " Jan 20 18:19:19 crc kubenswrapper[4558]: I0120 18:19:19.486247 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x6b24\" (UniqueName: \"kubernetes.io/projected/c2d67ae8-8d4b-4242-8c31-ad0ca9c3975c-kube-api-access-x6b24\") on node \"crc\" DevicePath \"\"" Jan 20 18:19:19 crc kubenswrapper[4558]: I0120 18:19:19.486265 4558 reconciler_common.go:293] "Volume detached for volume \"pvc-734ec1d9-3fee-4c55-b0f8-0f4799cbfde9\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-734ec1d9-3fee-4c55-b0f8-0f4799cbfde9\") on node \"crc\" DevicePath \"\"" Jan 20 18:19:19 crc kubenswrapper[4558]: I0120 18:19:19.491277 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9c946696-d624-4baf-824d-da41544b5240-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "9c946696-d624-4baf-824d-da41544b5240" (UID: "9c946696-d624-4baf-824d-da41544b5240"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:19:19 crc kubenswrapper[4558]: I0120 18:19:19.491298 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9c946696-d624-4baf-824d-da41544b5240-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "9c946696-d624-4baf-824d-da41544b5240" (UID: "9c946696-d624-4baf-824d-da41544b5240"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:19:19 crc kubenswrapper[4558]: I0120 18:19:19.491347 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9c946696-d624-4baf-824d-da41544b5240-kube-api-access-bz4xw" (OuterVolumeSpecName: "kube-api-access-bz4xw") pod "9c946696-d624-4baf-824d-da41544b5240" (UID: "9c946696-d624-4baf-824d-da41544b5240"). InnerVolumeSpecName "kube-api-access-bz4xw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:19:19 crc kubenswrapper[4558]: I0120 18:19:19.491348 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9c946696-d624-4baf-824d-da41544b5240-scripts" (OuterVolumeSpecName: "scripts") pod "9c946696-d624-4baf-824d-da41544b5240" (UID: "9c946696-d624-4baf-824d-da41544b5240"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:19:19 crc kubenswrapper[4558]: I0120 18:19:19.502984 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9c946696-d624-4baf-824d-da41544b5240-config-data" (OuterVolumeSpecName: "config-data") pod "9c946696-d624-4baf-824d-da41544b5240" (UID: "9c946696-d624-4baf-824d-da41544b5240"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:19:19 crc kubenswrapper[4558]: I0120 18:19:19.587774 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9c946696-d624-4baf-824d-da41544b5240-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 18:19:19 crc kubenswrapper[4558]: I0120 18:19:19.587805 4558 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/9c946696-d624-4baf-824d-da41544b5240-credential-keys\") on node \"crc\" DevicePath \"\"" Jan 20 18:19:19 crc kubenswrapper[4558]: I0120 18:19:19.587826 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9c946696-d624-4baf-824d-da41544b5240-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 18:19:19 crc kubenswrapper[4558]: I0120 18:19:19.587849 4558 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/9c946696-d624-4baf-824d-da41544b5240-fernet-keys\") on node \"crc\" DevicePath \"\"" Jan 20 18:19:19 crc kubenswrapper[4558]: I0120 18:19:19.587862 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bz4xw\" (UniqueName: \"kubernetes.io/projected/9c946696-d624-4baf-824d-da41544b5240-kube-api-access-bz4xw\") on node \"crc\" DevicePath \"\"" Jan 20 18:19:19 crc kubenswrapper[4558]: I0120 18:19:19.659702 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["barbican-kuttl-tests/rabbitmq-server-0"] Jan 20 18:19:19 crc kubenswrapper[4558]: I0120 18:19:19.669922 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["barbican-kuttl-tests/rabbitmq-server-0"] Jan 20 18:19:19 crc kubenswrapper[4558]: I0120 18:19:19.674819 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/barbican-operator-index-xz7tx"] Jan 20 18:19:19 crc kubenswrapper[4558]: I0120 18:19:19.678888 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/barbican-operator-index-xz7tx"] Jan 20 18:19:19 crc kubenswrapper[4558]: I0120 18:19:19.793985 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="barbican-kuttl-tests/openstack-galera-1" podUID="53d3a76d-e277-4efa-adbf-85d8e328caf6" containerName="galera" probeResult="failure" output="command timed out" Jan 20 18:19:19 crc kubenswrapper[4558]: I0120 18:19:19.918911 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-operators/barbican-operator-controller-manager-5db5bc648b-tc7cd" podUID="eaed2fe0-4c9e-4d02-ba03-b6f55fffad21" containerName="manager" probeResult="failure" output="Get \"http://10.217.1.92:8081/readyz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 20 18:19:20 crc kubenswrapper[4558]: I0120 18:19:20.361987 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/keystone-694b59f8cc-6bxr2" event={"ID":"9c946696-d624-4baf-824d-da41544b5240","Type":"ContainerDied","Data":"1bfc1f27aac439037c7c45f3f556abb8b1ab8e6d830e19c3214c7a2cfe089a80"} Jan 20 18:19:20 crc kubenswrapper[4558]: I0120 18:19:20.362057 4558 scope.go:117] "RemoveContainer" containerID="cd7ed0d3b2b2fe1e487a0b13c301a12eef74b758ee8fd6ae67b04dbe57f98f49" Jan 20 18:19:20 crc kubenswrapper[4558]: I0120 18:19:20.362219 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="barbican-kuttl-tests/keystone-694b59f8cc-6bxr2" Jan 20 18:19:20 crc kubenswrapper[4558]: I0120 18:19:20.397039 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["barbican-kuttl-tests/keystone-694b59f8cc-6bxr2"] Jan 20 18:19:20 crc kubenswrapper[4558]: I0120 18:19:20.407107 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["barbican-kuttl-tests/keystone-694b59f8cc-6bxr2"] Jan 20 18:19:20 crc kubenswrapper[4558]: I0120 18:19:20.574935 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1ed4e8c4-ba06-4fc9-b258-b51a3db829b5" path="/var/lib/kubelet/pods/1ed4e8c4-ba06-4fc9-b258-b51a3db829b5/volumes" Jan 20 18:19:20 crc kubenswrapper[4558]: I0120 18:19:20.575698 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9c946696-d624-4baf-824d-da41544b5240" path="/var/lib/kubelet/pods/9c946696-d624-4baf-824d-da41544b5240/volumes" Jan 20 18:19:20 crc kubenswrapper[4558]: I0120 18:19:20.576093 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c2d67ae8-8d4b-4242-8c31-ad0ca9c3975c" path="/var/lib/kubelet/pods/c2d67ae8-8d4b-4242-8c31-ad0ca9c3975c/volumes" Jan 20 18:19:20 crc kubenswrapper[4558]: I0120 18:19:20.577082 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="de78cf98-2998-46d1-add9-6a03d25c8b2b" path="/var/lib/kubelet/pods/de78cf98-2998-46d1-add9-6a03d25c8b2b/volumes" Jan 20 18:19:20 crc kubenswrapper[4558]: I0120 18:19:20.577785 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eaed2fe0-4c9e-4d02-ba03-b6f55fffad21" path="/var/lib/kubelet/pods/eaed2fe0-4c9e-4d02-ba03-b6f55fffad21/volumes" Jan 20 18:19:20 crc kubenswrapper[4558]: I0120 18:19:20.620607 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="barbican-kuttl-tests/openstack-galera-1" Jan 20 18:19:20 crc kubenswrapper[4558]: I0120 18:19:20.702771 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="barbican-kuttl-tests/openstack-galera-0" podUID="4a5a6576-dc07-4211-8b81-a53986f07041" containerName="galera" containerID="cri-o://41fbe186961e909cadffeef9e30338ab8190687a2776b303ae2843d2d4c3c9ff" gracePeriod=26 Jan 20 18:19:20 crc kubenswrapper[4558]: I0120 18:19:20.804422 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zz8nz\" (UniqueName: \"kubernetes.io/projected/53d3a76d-e277-4efa-adbf-85d8e328caf6-kube-api-access-zz8nz\") pod \"53d3a76d-e277-4efa-adbf-85d8e328caf6\" (UID: \"53d3a76d-e277-4efa-adbf-85d8e328caf6\") " Jan 20 18:19:20 crc kubenswrapper[4558]: I0120 18:19:20.804485 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/53d3a76d-e277-4efa-adbf-85d8e328caf6-config-data-generated\") pod \"53d3a76d-e277-4efa-adbf-85d8e328caf6\" (UID: \"53d3a76d-e277-4efa-adbf-85d8e328caf6\") " Jan 20 18:19:20 crc kubenswrapper[4558]: I0120 18:19:20.804515 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mysql-db\" (UniqueName: \"kubernetes.io/local-volume/local-storage14-crc\") pod \"53d3a76d-e277-4efa-adbf-85d8e328caf6\" (UID: \"53d3a76d-e277-4efa-adbf-85d8e328caf6\") " Jan 20 18:19:20 crc kubenswrapper[4558]: I0120 18:19:20.804557 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/53d3a76d-e277-4efa-adbf-85d8e328caf6-operator-scripts\") pod \"53d3a76d-e277-4efa-adbf-85d8e328caf6\" (UID: \"53d3a76d-e277-4efa-adbf-85d8e328caf6\") " Jan 20 18:19:20 crc kubenswrapper[4558]: I0120 18:19:20.804596 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/53d3a76d-e277-4efa-adbf-85d8e328caf6-kolla-config\") pod \"53d3a76d-e277-4efa-adbf-85d8e328caf6\" (UID: \"53d3a76d-e277-4efa-adbf-85d8e328caf6\") " Jan 20 18:19:20 crc kubenswrapper[4558]: I0120 18:19:20.804621 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/53d3a76d-e277-4efa-adbf-85d8e328caf6-config-data-default\") pod \"53d3a76d-e277-4efa-adbf-85d8e328caf6\" (UID: \"53d3a76d-e277-4efa-adbf-85d8e328caf6\") " Jan 20 18:19:20 crc kubenswrapper[4558]: I0120 18:19:20.805084 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/53d3a76d-e277-4efa-adbf-85d8e328caf6-config-data-generated" (OuterVolumeSpecName: "config-data-generated") pod "53d3a76d-e277-4efa-adbf-85d8e328caf6" (UID: "53d3a76d-e277-4efa-adbf-85d8e328caf6"). InnerVolumeSpecName "config-data-generated". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 18:19:20 crc kubenswrapper[4558]: I0120 18:19:20.805279 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/53d3a76d-e277-4efa-adbf-85d8e328caf6-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "53d3a76d-e277-4efa-adbf-85d8e328caf6" (UID: "53d3a76d-e277-4efa-adbf-85d8e328caf6"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:19:20 crc kubenswrapper[4558]: I0120 18:19:20.805460 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/53d3a76d-e277-4efa-adbf-85d8e328caf6-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "53d3a76d-e277-4efa-adbf-85d8e328caf6" (UID: "53d3a76d-e277-4efa-adbf-85d8e328caf6"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:19:20 crc kubenswrapper[4558]: I0120 18:19:20.805649 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/53d3a76d-e277-4efa-adbf-85d8e328caf6-config-data-default" (OuterVolumeSpecName: "config-data-default") pod "53d3a76d-e277-4efa-adbf-85d8e328caf6" (UID: "53d3a76d-e277-4efa-adbf-85d8e328caf6"). InnerVolumeSpecName "config-data-default". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:19:20 crc kubenswrapper[4558]: I0120 18:19:20.821751 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/53d3a76d-e277-4efa-adbf-85d8e328caf6-kube-api-access-zz8nz" (OuterVolumeSpecName: "kube-api-access-zz8nz") pod "53d3a76d-e277-4efa-adbf-85d8e328caf6" (UID: "53d3a76d-e277-4efa-adbf-85d8e328caf6"). InnerVolumeSpecName "kube-api-access-zz8nz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:19:20 crc kubenswrapper[4558]: I0120 18:19:20.827581 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage14-crc" (OuterVolumeSpecName: "mysql-db") pod "53d3a76d-e277-4efa-adbf-85d8e328caf6" (UID: "53d3a76d-e277-4efa-adbf-85d8e328caf6"). InnerVolumeSpecName "local-storage14-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 18:19:20 crc kubenswrapper[4558]: I0120 18:19:20.858187 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["barbican-kuttl-tests/keystone-db-create-k8vsm"] Jan 20 18:19:20 crc kubenswrapper[4558]: I0120 18:19:20.862305 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["barbican-kuttl-tests/keystone-db-create-k8vsm"] Jan 20 18:19:20 crc kubenswrapper[4558]: I0120 18:19:20.869664 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["barbican-kuttl-tests/keystone52d6-account-delete-pzkqm"] Jan 20 18:19:20 crc kubenswrapper[4558]: I0120 18:19:20.873746 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["barbican-kuttl-tests/keystone-52d6-account-create-update-6n2pb"] Jan 20 18:19:20 crc kubenswrapper[4558]: I0120 18:19:20.876692 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["barbican-kuttl-tests/keystone-52d6-account-create-update-6n2pb"] Jan 20 18:19:20 crc kubenswrapper[4558]: I0120 18:19:20.906453 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/53d3a76d-e277-4efa-adbf-85d8e328caf6-config-data-default\") on node \"crc\" DevicePath \"\"" Jan 20 18:19:20 crc kubenswrapper[4558]: I0120 18:19:20.906490 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zz8nz\" (UniqueName: \"kubernetes.io/projected/53d3a76d-e277-4efa-adbf-85d8e328caf6-kube-api-access-zz8nz\") on node \"crc\" DevicePath \"\"" Jan 20 18:19:20 crc kubenswrapper[4558]: I0120 18:19:20.906502 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/53d3a76d-e277-4efa-adbf-85d8e328caf6-config-data-generated\") on node \"crc\" DevicePath \"\"" Jan 20 18:19:20 crc kubenswrapper[4558]: I0120 18:19:20.906526 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage14-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage14-crc\") on node \"crc\" " Jan 20 18:19:20 crc kubenswrapper[4558]: I0120 18:19:20.906537 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/53d3a76d-e277-4efa-adbf-85d8e328caf6-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 18:19:20 crc kubenswrapper[4558]: I0120 18:19:20.906548 4558 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/53d3a76d-e277-4efa-adbf-85d8e328caf6-kolla-config\") on node \"crc\" DevicePath \"\"" Jan 20 18:19:20 crc kubenswrapper[4558]: I0120 18:19:20.925416 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage14-crc" (UniqueName: "kubernetes.io/local-volume/local-storage14-crc") on node "crc" Jan 20 18:19:21 crc kubenswrapper[4558]: I0120 18:19:21.007923 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage14-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage14-crc\") on node \"crc\" DevicePath \"\"" Jan 20 18:19:21 crc kubenswrapper[4558]: E0120 18:19:21.008022 4558 configmap.go:193] Couldn't get configMap barbican-kuttl-tests/openstack-scripts: configmap "openstack-scripts" not found Jan 20 18:19:21 crc kubenswrapper[4558]: E0120 18:19:21.008097 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/c9734bdf-87f9-4375-b720-ee2523a59f0e-operator-scripts podName:c9734bdf-87f9-4375-b720-ee2523a59f0e nodeName:}" failed. No retries permitted until 2026-01-20 18:19:25.008074921 +0000 UTC m=+5858.768412889 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/c9734bdf-87f9-4375-b720-ee2523a59f0e-operator-scripts") pod "keystone52d6-account-delete-pzkqm" (UID: "c9734bdf-87f9-4375-b720-ee2523a59f0e") : configmap "openstack-scripts" not found Jan 20 18:19:21 crc kubenswrapper[4558]: I0120 18:19:21.180493 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="barbican-kuttl-tests/keystone52d6-account-delete-pzkqm" Jan 20 18:19:21 crc kubenswrapper[4558]: I0120 18:19:21.295287 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="barbican-kuttl-tests/openstack-galera-0" Jan 20 18:19:21 crc kubenswrapper[4558]: I0120 18:19:21.313748 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4kkpm\" (UniqueName: \"kubernetes.io/projected/c9734bdf-87f9-4375-b720-ee2523a59f0e-kube-api-access-4kkpm\") pod \"c9734bdf-87f9-4375-b720-ee2523a59f0e\" (UID: \"c9734bdf-87f9-4375-b720-ee2523a59f0e\") " Jan 20 18:19:21 crc kubenswrapper[4558]: I0120 18:19:21.313980 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c9734bdf-87f9-4375-b720-ee2523a59f0e-operator-scripts\") pod \"c9734bdf-87f9-4375-b720-ee2523a59f0e\" (UID: \"c9734bdf-87f9-4375-b720-ee2523a59f0e\") " Jan 20 18:19:21 crc kubenswrapper[4558]: I0120 18:19:21.314587 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c9734bdf-87f9-4375-b720-ee2523a59f0e-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "c9734bdf-87f9-4375-b720-ee2523a59f0e" (UID: "c9734bdf-87f9-4375-b720-ee2523a59f0e"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:19:21 crc kubenswrapper[4558]: I0120 18:19:21.314765 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c9734bdf-87f9-4375-b720-ee2523a59f0e-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 18:19:21 crc kubenswrapper[4558]: I0120 18:19:21.320393 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c9734bdf-87f9-4375-b720-ee2523a59f0e-kube-api-access-4kkpm" (OuterVolumeSpecName: "kube-api-access-4kkpm") pod "c9734bdf-87f9-4375-b720-ee2523a59f0e" (UID: "c9734bdf-87f9-4375-b720-ee2523a59f0e"). InnerVolumeSpecName "kube-api-access-4kkpm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:19:21 crc kubenswrapper[4558]: I0120 18:19:21.379293 4558 generic.go:334] "Generic (PLEG): container finished" podID="53d3a76d-e277-4efa-adbf-85d8e328caf6" containerID="036a2449dee37ea79784360d016ed57d4f72b39b1c9f0b0aba175eff45b9a123" exitCode=0 Jan 20 18:19:21 crc kubenswrapper[4558]: I0120 18:19:21.379379 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/openstack-galera-1" event={"ID":"53d3a76d-e277-4efa-adbf-85d8e328caf6","Type":"ContainerDied","Data":"036a2449dee37ea79784360d016ed57d4f72b39b1c9f0b0aba175eff45b9a123"} Jan 20 18:19:21 crc kubenswrapper[4558]: I0120 18:19:21.379388 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="barbican-kuttl-tests/openstack-galera-1" Jan 20 18:19:21 crc kubenswrapper[4558]: I0120 18:19:21.379421 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/openstack-galera-1" event={"ID":"53d3a76d-e277-4efa-adbf-85d8e328caf6","Type":"ContainerDied","Data":"100b3b09bff8ebaebe8a5c3b9e64e918ca9bac4d7b4bbcd0716598fe2007aef5"} Jan 20 18:19:21 crc kubenswrapper[4558]: I0120 18:19:21.379452 4558 scope.go:117] "RemoveContainer" containerID="036a2449dee37ea79784360d016ed57d4f72b39b1c9f0b0aba175eff45b9a123" Jan 20 18:19:21 crc kubenswrapper[4558]: I0120 18:19:21.382621 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="barbican-kuttl-tests/keystone52d6-account-delete-pzkqm" Jan 20 18:19:21 crc kubenswrapper[4558]: I0120 18:19:21.382779 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/keystone52d6-account-delete-pzkqm" event={"ID":"c9734bdf-87f9-4375-b720-ee2523a59f0e","Type":"ContainerDied","Data":"feb30eea360793f16f810676ab1f226a5f48a89e3f3ae78ae7a02c648be2ba29"} Jan 20 18:19:21 crc kubenswrapper[4558]: I0120 18:19:21.387674 4558 generic.go:334] "Generic (PLEG): container finished" podID="4a5a6576-dc07-4211-8b81-a53986f07041" containerID="41fbe186961e909cadffeef9e30338ab8190687a2776b303ae2843d2d4c3c9ff" exitCode=0 Jan 20 18:19:21 crc kubenswrapper[4558]: I0120 18:19:21.387715 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/openstack-galera-0" event={"ID":"4a5a6576-dc07-4211-8b81-a53986f07041","Type":"ContainerDied","Data":"41fbe186961e909cadffeef9e30338ab8190687a2776b303ae2843d2d4c3c9ff"} Jan 20 18:19:21 crc kubenswrapper[4558]: I0120 18:19:21.387743 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="barbican-kuttl-tests/openstack-galera-0" event={"ID":"4a5a6576-dc07-4211-8b81-a53986f07041","Type":"ContainerDied","Data":"8b403e7aac6fad80392710a5653fb1a7326e1b73b8cbd6e255dd7fb336d09369"} Jan 20 18:19:21 crc kubenswrapper[4558]: I0120 18:19:21.387804 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="barbican-kuttl-tests/openstack-galera-0" Jan 20 18:19:21 crc kubenswrapper[4558]: I0120 18:19:21.410474 4558 scope.go:117] "RemoveContainer" containerID="0b3454ba088a6c47241d9de340b7da9aaccc0d047daeeba29e2c1542ce3bca33" Jan 20 18:19:21 crc kubenswrapper[4558]: I0120 18:19:21.419072 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/4a5a6576-dc07-4211-8b81-a53986f07041-config-data-default\") pod \"4a5a6576-dc07-4211-8b81-a53986f07041\" (UID: \"4a5a6576-dc07-4211-8b81-a53986f07041\") " Jan 20 18:19:21 crc kubenswrapper[4558]: I0120 18:19:21.419194 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/4a5a6576-dc07-4211-8b81-a53986f07041-config-data-generated\") pod \"4a5a6576-dc07-4211-8b81-a53986f07041\" (UID: \"4a5a6576-dc07-4211-8b81-a53986f07041\") " Jan 20 18:19:21 crc kubenswrapper[4558]: I0120 18:19:21.419244 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/4a5a6576-dc07-4211-8b81-a53986f07041-kolla-config\") pod \"4a5a6576-dc07-4211-8b81-a53986f07041\" (UID: \"4a5a6576-dc07-4211-8b81-a53986f07041\") " Jan 20 18:19:21 crc kubenswrapper[4558]: I0120 18:19:21.419343 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4a5a6576-dc07-4211-8b81-a53986f07041-operator-scripts\") pod \"4a5a6576-dc07-4211-8b81-a53986f07041\" (UID: \"4a5a6576-dc07-4211-8b81-a53986f07041\") " Jan 20 18:19:21 crc kubenswrapper[4558]: I0120 18:19:21.419415 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mysql-db\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"4a5a6576-dc07-4211-8b81-a53986f07041\" (UID: \"4a5a6576-dc07-4211-8b81-a53986f07041\") " Jan 20 18:19:21 crc kubenswrapper[4558]: I0120 18:19:21.419452 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wkrp4\" (UniqueName: \"kubernetes.io/projected/4a5a6576-dc07-4211-8b81-a53986f07041-kube-api-access-wkrp4\") pod \"4a5a6576-dc07-4211-8b81-a53986f07041\" (UID: \"4a5a6576-dc07-4211-8b81-a53986f07041\") " Jan 20 18:19:21 crc kubenswrapper[4558]: I0120 18:19:21.420127 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4kkpm\" (UniqueName: \"kubernetes.io/projected/c9734bdf-87f9-4375-b720-ee2523a59f0e-kube-api-access-4kkpm\") on node \"crc\" DevicePath \"\"" Jan 20 18:19:21 crc kubenswrapper[4558]: I0120 18:19:21.421186 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4a5a6576-dc07-4211-8b81-a53986f07041-config-data-generated" (OuterVolumeSpecName: "config-data-generated") pod "4a5a6576-dc07-4211-8b81-a53986f07041" (UID: "4a5a6576-dc07-4211-8b81-a53986f07041"). InnerVolumeSpecName "config-data-generated". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 18:19:21 crc kubenswrapper[4558]: I0120 18:19:21.421389 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4a5a6576-dc07-4211-8b81-a53986f07041-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "4a5a6576-dc07-4211-8b81-a53986f07041" (UID: "4a5a6576-dc07-4211-8b81-a53986f07041"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:19:21 crc kubenswrapper[4558]: I0120 18:19:21.421612 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4a5a6576-dc07-4211-8b81-a53986f07041-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "4a5a6576-dc07-4211-8b81-a53986f07041" (UID: "4a5a6576-dc07-4211-8b81-a53986f07041"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:19:21 crc kubenswrapper[4558]: I0120 18:19:21.421711 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4a5a6576-dc07-4211-8b81-a53986f07041-config-data-default" (OuterVolumeSpecName: "config-data-default") pod "4a5a6576-dc07-4211-8b81-a53986f07041" (UID: "4a5a6576-dc07-4211-8b81-a53986f07041"). InnerVolumeSpecName "config-data-default". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:19:21 crc kubenswrapper[4558]: I0120 18:19:21.423570 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4a5a6576-dc07-4211-8b81-a53986f07041-kube-api-access-wkrp4" (OuterVolumeSpecName: "kube-api-access-wkrp4") pod "4a5a6576-dc07-4211-8b81-a53986f07041" (UID: "4a5a6576-dc07-4211-8b81-a53986f07041"). InnerVolumeSpecName "kube-api-access-wkrp4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:19:21 crc kubenswrapper[4558]: I0120 18:19:21.425900 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["barbican-kuttl-tests/openstack-galera-1"] Jan 20 18:19:21 crc kubenswrapper[4558]: I0120 18:19:21.429313 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage08-crc" (OuterVolumeSpecName: "mysql-db") pod "4a5a6576-dc07-4211-8b81-a53986f07041" (UID: "4a5a6576-dc07-4211-8b81-a53986f07041"). InnerVolumeSpecName "local-storage08-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 18:19:21 crc kubenswrapper[4558]: I0120 18:19:21.433351 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["barbican-kuttl-tests/openstack-galera-1"] Jan 20 18:19:21 crc kubenswrapper[4558]: I0120 18:19:21.435325 4558 scope.go:117] "RemoveContainer" containerID="036a2449dee37ea79784360d016ed57d4f72b39b1c9f0b0aba175eff45b9a123" Jan 20 18:19:21 crc kubenswrapper[4558]: E0120 18:19:21.435730 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"036a2449dee37ea79784360d016ed57d4f72b39b1c9f0b0aba175eff45b9a123\": container with ID starting with 036a2449dee37ea79784360d016ed57d4f72b39b1c9f0b0aba175eff45b9a123 not found: ID does not exist" containerID="036a2449dee37ea79784360d016ed57d4f72b39b1c9f0b0aba175eff45b9a123" Jan 20 18:19:21 crc kubenswrapper[4558]: I0120 18:19:21.435771 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"036a2449dee37ea79784360d016ed57d4f72b39b1c9f0b0aba175eff45b9a123"} err="failed to get container status \"036a2449dee37ea79784360d016ed57d4f72b39b1c9f0b0aba175eff45b9a123\": rpc error: code = NotFound desc = could not find container \"036a2449dee37ea79784360d016ed57d4f72b39b1c9f0b0aba175eff45b9a123\": container with ID starting with 036a2449dee37ea79784360d016ed57d4f72b39b1c9f0b0aba175eff45b9a123 not found: ID does not exist" Jan 20 18:19:21 crc kubenswrapper[4558]: I0120 18:19:21.435802 4558 scope.go:117] "RemoveContainer" containerID="0b3454ba088a6c47241d9de340b7da9aaccc0d047daeeba29e2c1542ce3bca33" Jan 20 18:19:21 crc kubenswrapper[4558]: E0120 18:19:21.436261 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0b3454ba088a6c47241d9de340b7da9aaccc0d047daeeba29e2c1542ce3bca33\": container with ID starting with 0b3454ba088a6c47241d9de340b7da9aaccc0d047daeeba29e2c1542ce3bca33 not found: ID does not exist" containerID="0b3454ba088a6c47241d9de340b7da9aaccc0d047daeeba29e2c1542ce3bca33" Jan 20 18:19:21 crc kubenswrapper[4558]: I0120 18:19:21.436313 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0b3454ba088a6c47241d9de340b7da9aaccc0d047daeeba29e2c1542ce3bca33"} err="failed to get container status \"0b3454ba088a6c47241d9de340b7da9aaccc0d047daeeba29e2c1542ce3bca33\": rpc error: code = NotFound desc = could not find container \"0b3454ba088a6c47241d9de340b7da9aaccc0d047daeeba29e2c1542ce3bca33\": container with ID starting with 0b3454ba088a6c47241d9de340b7da9aaccc0d047daeeba29e2c1542ce3bca33 not found: ID does not exist" Jan 20 18:19:21 crc kubenswrapper[4558]: I0120 18:19:21.436349 4558 scope.go:117] "RemoveContainer" containerID="0eac08332bbf921f40f5dce3c14b3cfc09f11b9804dec4cfa44e689fc68f4346" Jan 20 18:19:21 crc kubenswrapper[4558]: I0120 18:19:21.439965 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["barbican-kuttl-tests/keystone52d6-account-delete-pzkqm"] Jan 20 18:19:21 crc kubenswrapper[4558]: I0120 18:19:21.444687 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["barbican-kuttl-tests/keystone52d6-account-delete-pzkqm"] Jan 20 18:19:21 crc kubenswrapper[4558]: I0120 18:19:21.456902 4558 scope.go:117] "RemoveContainer" containerID="41fbe186961e909cadffeef9e30338ab8190687a2776b303ae2843d2d4c3c9ff" Jan 20 18:19:21 crc kubenswrapper[4558]: I0120 18:19:21.475191 4558 scope.go:117] "RemoveContainer" containerID="8bd822892dec859a58d3d65e60a9ffce0c8d0b2f0f2210180163d24bee85092f" Jan 20 18:19:21 crc kubenswrapper[4558]: I0120 18:19:21.492954 4558 scope.go:117] "RemoveContainer" containerID="41fbe186961e909cadffeef9e30338ab8190687a2776b303ae2843d2d4c3c9ff" Jan 20 18:19:21 crc kubenswrapper[4558]: E0120 18:19:21.493280 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"41fbe186961e909cadffeef9e30338ab8190687a2776b303ae2843d2d4c3c9ff\": container with ID starting with 41fbe186961e909cadffeef9e30338ab8190687a2776b303ae2843d2d4c3c9ff not found: ID does not exist" containerID="41fbe186961e909cadffeef9e30338ab8190687a2776b303ae2843d2d4c3c9ff" Jan 20 18:19:21 crc kubenswrapper[4558]: I0120 18:19:21.493316 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"41fbe186961e909cadffeef9e30338ab8190687a2776b303ae2843d2d4c3c9ff"} err="failed to get container status \"41fbe186961e909cadffeef9e30338ab8190687a2776b303ae2843d2d4c3c9ff\": rpc error: code = NotFound desc = could not find container \"41fbe186961e909cadffeef9e30338ab8190687a2776b303ae2843d2d4c3c9ff\": container with ID starting with 41fbe186961e909cadffeef9e30338ab8190687a2776b303ae2843d2d4c3c9ff not found: ID does not exist" Jan 20 18:19:21 crc kubenswrapper[4558]: I0120 18:19:21.493343 4558 scope.go:117] "RemoveContainer" containerID="8bd822892dec859a58d3d65e60a9ffce0c8d0b2f0f2210180163d24bee85092f" Jan 20 18:19:21 crc kubenswrapper[4558]: E0120 18:19:21.493679 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8bd822892dec859a58d3d65e60a9ffce0c8d0b2f0f2210180163d24bee85092f\": container with ID starting with 8bd822892dec859a58d3d65e60a9ffce0c8d0b2f0f2210180163d24bee85092f not found: ID does not exist" containerID="8bd822892dec859a58d3d65e60a9ffce0c8d0b2f0f2210180163d24bee85092f" Jan 20 18:19:21 crc kubenswrapper[4558]: I0120 18:19:21.493700 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8bd822892dec859a58d3d65e60a9ffce0c8d0b2f0f2210180163d24bee85092f"} err="failed to get container status \"8bd822892dec859a58d3d65e60a9ffce0c8d0b2f0f2210180163d24bee85092f\": rpc error: code = NotFound desc = could not find container \"8bd822892dec859a58d3d65e60a9ffce0c8d0b2f0f2210180163d24bee85092f\": container with ID starting with 8bd822892dec859a58d3d65e60a9ffce0c8d0b2f0f2210180163d24bee85092f not found: ID does not exist" Jan 20 18:19:21 crc kubenswrapper[4558]: I0120 18:19:21.521976 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/4a5a6576-dc07-4211-8b81-a53986f07041-config-data-default\") on node \"crc\" DevicePath \"\"" Jan 20 18:19:21 crc kubenswrapper[4558]: I0120 18:19:21.522010 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/4a5a6576-dc07-4211-8b81-a53986f07041-config-data-generated\") on node \"crc\" DevicePath \"\"" Jan 20 18:19:21 crc kubenswrapper[4558]: I0120 18:19:21.522022 4558 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/4a5a6576-dc07-4211-8b81-a53986f07041-kolla-config\") on node \"crc\" DevicePath \"\"" Jan 20 18:19:21 crc kubenswrapper[4558]: I0120 18:19:21.522036 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4a5a6576-dc07-4211-8b81-a53986f07041-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 18:19:21 crc kubenswrapper[4558]: I0120 18:19:21.522073 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" " Jan 20 18:19:21 crc kubenswrapper[4558]: I0120 18:19:21.522084 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wkrp4\" (UniqueName: \"kubernetes.io/projected/4a5a6576-dc07-4211-8b81-a53986f07041-kube-api-access-wkrp4\") on node \"crc\" DevicePath \"\"" Jan 20 18:19:21 crc kubenswrapper[4558]: I0120 18:19:21.531986 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage08-crc" (UniqueName: "kubernetes.io/local-volume/local-storage08-crc") on node "crc" Jan 20 18:19:21 crc kubenswrapper[4558]: I0120 18:19:21.623264 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" DevicePath \"\"" Jan 20 18:19:21 crc kubenswrapper[4558]: I0120 18:19:21.721532 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["barbican-kuttl-tests/openstack-galera-0"] Jan 20 18:19:21 crc kubenswrapper[4558]: I0120 18:19:21.725947 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["barbican-kuttl-tests/openstack-galera-0"] Jan 20 18:19:22 crc kubenswrapper[4558]: I0120 18:19:22.329637 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-5ffd575849-prl45"] Jan 20 18:19:22 crc kubenswrapper[4558]: I0120 18:19:22.329993 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/keystone-operator-controller-manager-5ffd575849-prl45" podUID="0ddbae85-a332-464a-bd00-6c7f4fa4d7fd" containerName="manager" containerID="cri-o://2c0cf13207f9da18231641c153e6b2314767254ac9a5a084cd7ffdc00f84651c" gracePeriod=10 Jan 20 18:19:22 crc kubenswrapper[4558]: I0120 18:19:22.511653 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/keystone-operator-index-vck5c"] Jan 20 18:19:22 crc kubenswrapper[4558]: I0120 18:19:22.512502 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/keystone-operator-index-vck5c" podUID="c4024146-8e18-468e-b896-300502b433ee" containerName="registry-server" containerID="cri-o://e620e29b143a3e833483094798c90910b75ccd8ffc0b356ddcf5c9bf51c497f0" gracePeriod=30 Jan 20 18:19:22 crc kubenswrapper[4558]: I0120 18:19:22.561215 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/34e4e076b11e40e2796f19ad3bfdac5929942b93224fbc520400e0a0698tzfm"] Jan 20 18:19:22 crc kubenswrapper[4558]: I0120 18:19:22.562416 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/34e4e076b11e40e2796f19ad3bfdac5929942b93224fbc520400e0a0698tzfm"] Jan 20 18:19:22 crc kubenswrapper[4558]: I0120 18:19:22.566565 4558 scope.go:117] "RemoveContainer" containerID="27314e1a8af2e8df8ecd2e7a06cbb630afbfecd8ca2338a1f199cf92f982f581" Jan 20 18:19:22 crc kubenswrapper[4558]: E0120 18:19:22.566826 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 18:19:22 crc kubenswrapper[4558]: I0120 18:19:22.574869 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="06bc8238-a6fe-464c-a4ac-a62928c141c8" path="/var/lib/kubelet/pods/06bc8238-a6fe-464c-a4ac-a62928c141c8/volumes" Jan 20 18:19:22 crc kubenswrapper[4558]: I0120 18:19:22.575920 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4a5a6576-dc07-4211-8b81-a53986f07041" path="/var/lib/kubelet/pods/4a5a6576-dc07-4211-8b81-a53986f07041/volumes" Jan 20 18:19:22 crc kubenswrapper[4558]: I0120 18:19:22.576558 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="53d3a76d-e277-4efa-adbf-85d8e328caf6" path="/var/lib/kubelet/pods/53d3a76d-e277-4efa-adbf-85d8e328caf6/volumes" Jan 20 18:19:22 crc kubenswrapper[4558]: I0120 18:19:22.578683 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6c4ef399-fd08-45f3-af23-5e9faba4b549" path="/var/lib/kubelet/pods/6c4ef399-fd08-45f3-af23-5e9faba4b549/volumes" Jan 20 18:19:22 crc kubenswrapper[4558]: I0120 18:19:22.579152 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c9734bdf-87f9-4375-b720-ee2523a59f0e" path="/var/lib/kubelet/pods/c9734bdf-87f9-4375-b720-ee2523a59f0e/volumes" Jan 20 18:19:22 crc kubenswrapper[4558]: I0120 18:19:22.579645 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efc9936d-e76c-4750-914d-a0dde72ac9e2" path="/var/lib/kubelet/pods/efc9936d-e76c-4750-914d-a0dde72ac9e2/volumes" Jan 20 18:19:22 crc kubenswrapper[4558]: I0120 18:19:22.788375 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-5ffd575849-prl45" Jan 20 18:19:22 crc kubenswrapper[4558]: I0120 18:19:22.907976 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-index-vck5c" Jan 20 18:19:22 crc kubenswrapper[4558]: I0120 18:19:22.946572 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/0ddbae85-a332-464a-bd00-6c7f4fa4d7fd-webhook-cert\") pod \"0ddbae85-a332-464a-bd00-6c7f4fa4d7fd\" (UID: \"0ddbae85-a332-464a-bd00-6c7f4fa4d7fd\") " Jan 20 18:19:22 crc kubenswrapper[4558]: I0120 18:19:22.946614 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/0ddbae85-a332-464a-bd00-6c7f4fa4d7fd-apiservice-cert\") pod \"0ddbae85-a332-464a-bd00-6c7f4fa4d7fd\" (UID: \"0ddbae85-a332-464a-bd00-6c7f4fa4d7fd\") " Jan 20 18:19:22 crc kubenswrapper[4558]: I0120 18:19:22.946814 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rttxx\" (UniqueName: \"kubernetes.io/projected/0ddbae85-a332-464a-bd00-6c7f4fa4d7fd-kube-api-access-rttxx\") pod \"0ddbae85-a332-464a-bd00-6c7f4fa4d7fd\" (UID: \"0ddbae85-a332-464a-bd00-6c7f4fa4d7fd\") " Jan 20 18:19:22 crc kubenswrapper[4558]: I0120 18:19:22.953852 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0ddbae85-a332-464a-bd00-6c7f4fa4d7fd-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "0ddbae85-a332-464a-bd00-6c7f4fa4d7fd" (UID: "0ddbae85-a332-464a-bd00-6c7f4fa4d7fd"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:19:22 crc kubenswrapper[4558]: I0120 18:19:22.954304 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0ddbae85-a332-464a-bd00-6c7f4fa4d7fd-kube-api-access-rttxx" (OuterVolumeSpecName: "kube-api-access-rttxx") pod "0ddbae85-a332-464a-bd00-6c7f4fa4d7fd" (UID: "0ddbae85-a332-464a-bd00-6c7f4fa4d7fd"). InnerVolumeSpecName "kube-api-access-rttxx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:19:22 crc kubenswrapper[4558]: I0120 18:19:22.964569 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0ddbae85-a332-464a-bd00-6c7f4fa4d7fd-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "0ddbae85-a332-464a-bd00-6c7f4fa4d7fd" (UID: "0ddbae85-a332-464a-bd00-6c7f4fa4d7fd"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:19:23 crc kubenswrapper[4558]: I0120 18:19:23.047835 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2tb82\" (UniqueName: \"kubernetes.io/projected/c4024146-8e18-468e-b896-300502b433ee-kube-api-access-2tb82\") pod \"c4024146-8e18-468e-b896-300502b433ee\" (UID: \"c4024146-8e18-468e-b896-300502b433ee\") " Jan 20 18:19:23 crc kubenswrapper[4558]: I0120 18:19:23.048312 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rttxx\" (UniqueName: \"kubernetes.io/projected/0ddbae85-a332-464a-bd00-6c7f4fa4d7fd-kube-api-access-rttxx\") on node \"crc\" DevicePath \"\"" Jan 20 18:19:23 crc kubenswrapper[4558]: I0120 18:19:23.048334 4558 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/0ddbae85-a332-464a-bd00-6c7f4fa4d7fd-webhook-cert\") on node \"crc\" DevicePath \"\"" Jan 20 18:19:23 crc kubenswrapper[4558]: I0120 18:19:23.048344 4558 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/0ddbae85-a332-464a-bd00-6c7f4fa4d7fd-apiservice-cert\") on node \"crc\" DevicePath \"\"" Jan 20 18:19:23 crc kubenswrapper[4558]: I0120 18:19:23.051290 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c4024146-8e18-468e-b896-300502b433ee-kube-api-access-2tb82" (OuterVolumeSpecName: "kube-api-access-2tb82") pod "c4024146-8e18-468e-b896-300502b433ee" (UID: "c4024146-8e18-468e-b896-300502b433ee"). InnerVolumeSpecName "kube-api-access-2tb82". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:19:23 crc kubenswrapper[4558]: I0120 18:19:23.151229 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2tb82\" (UniqueName: \"kubernetes.io/projected/c4024146-8e18-468e-b896-300502b433ee-kube-api-access-2tb82\") on node \"crc\" DevicePath \"\"" Jan 20 18:19:23 crc kubenswrapper[4558]: I0120 18:19:23.411094 4558 generic.go:334] "Generic (PLEG): container finished" podID="0ddbae85-a332-464a-bd00-6c7f4fa4d7fd" containerID="2c0cf13207f9da18231641c153e6b2314767254ac9a5a084cd7ffdc00f84651c" exitCode=0 Jan 20 18:19:23 crc kubenswrapper[4558]: I0120 18:19:23.411180 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-5ffd575849-prl45" Jan 20 18:19:23 crc kubenswrapper[4558]: I0120 18:19:23.411204 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-5ffd575849-prl45" event={"ID":"0ddbae85-a332-464a-bd00-6c7f4fa4d7fd","Type":"ContainerDied","Data":"2c0cf13207f9da18231641c153e6b2314767254ac9a5a084cd7ffdc00f84651c"} Jan 20 18:19:23 crc kubenswrapper[4558]: I0120 18:19:23.412178 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-5ffd575849-prl45" event={"ID":"0ddbae85-a332-464a-bd00-6c7f4fa4d7fd","Type":"ContainerDied","Data":"8f9268bd5c2b042af875a42e0a442a41a6282e9de5181d0db699f42712a125ac"} Jan 20 18:19:23 crc kubenswrapper[4558]: I0120 18:19:23.412220 4558 scope.go:117] "RemoveContainer" containerID="2c0cf13207f9da18231641c153e6b2314767254ac9a5a084cd7ffdc00f84651c" Jan 20 18:19:23 crc kubenswrapper[4558]: I0120 18:19:23.413356 4558 generic.go:334] "Generic (PLEG): container finished" podID="c4024146-8e18-468e-b896-300502b433ee" containerID="e620e29b143a3e833483094798c90910b75ccd8ffc0b356ddcf5c9bf51c497f0" exitCode=0 Jan 20 18:19:23 crc kubenswrapper[4558]: I0120 18:19:23.413397 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-index-vck5c" Jan 20 18:19:23 crc kubenswrapper[4558]: I0120 18:19:23.413400 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-index-vck5c" event={"ID":"c4024146-8e18-468e-b896-300502b433ee","Type":"ContainerDied","Data":"e620e29b143a3e833483094798c90910b75ccd8ffc0b356ddcf5c9bf51c497f0"} Jan 20 18:19:23 crc kubenswrapper[4558]: I0120 18:19:23.413525 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-index-vck5c" event={"ID":"c4024146-8e18-468e-b896-300502b433ee","Type":"ContainerDied","Data":"6793d532d04f64156afa6173bd51e886485a371ce73936c784695e63469175b7"} Jan 20 18:19:23 crc kubenswrapper[4558]: I0120 18:19:23.430538 4558 scope.go:117] "RemoveContainer" containerID="2c0cf13207f9da18231641c153e6b2314767254ac9a5a084cd7ffdc00f84651c" Jan 20 18:19:23 crc kubenswrapper[4558]: E0120 18:19:23.430973 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2c0cf13207f9da18231641c153e6b2314767254ac9a5a084cd7ffdc00f84651c\": container with ID starting with 2c0cf13207f9da18231641c153e6b2314767254ac9a5a084cd7ffdc00f84651c not found: ID does not exist" containerID="2c0cf13207f9da18231641c153e6b2314767254ac9a5a084cd7ffdc00f84651c" Jan 20 18:19:23 crc kubenswrapper[4558]: I0120 18:19:23.431034 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2c0cf13207f9da18231641c153e6b2314767254ac9a5a084cd7ffdc00f84651c"} err="failed to get container status \"2c0cf13207f9da18231641c153e6b2314767254ac9a5a084cd7ffdc00f84651c\": rpc error: code = NotFound desc = could not find container \"2c0cf13207f9da18231641c153e6b2314767254ac9a5a084cd7ffdc00f84651c\": container with ID starting with 2c0cf13207f9da18231641c153e6b2314767254ac9a5a084cd7ffdc00f84651c not found: ID does not exist" Jan 20 18:19:23 crc kubenswrapper[4558]: I0120 18:19:23.431085 4558 scope.go:117] "RemoveContainer" containerID="e620e29b143a3e833483094798c90910b75ccd8ffc0b356ddcf5c9bf51c497f0" Jan 20 18:19:23 crc kubenswrapper[4558]: I0120 18:19:23.448269 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-5ffd575849-prl45"] Jan 20 18:19:23 crc kubenswrapper[4558]: I0120 18:19:23.452311 4558 scope.go:117] "RemoveContainer" containerID="e620e29b143a3e833483094798c90910b75ccd8ffc0b356ddcf5c9bf51c497f0" Jan 20 18:19:23 crc kubenswrapper[4558]: E0120 18:19:23.452626 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e620e29b143a3e833483094798c90910b75ccd8ffc0b356ddcf5c9bf51c497f0\": container with ID starting with e620e29b143a3e833483094798c90910b75ccd8ffc0b356ddcf5c9bf51c497f0 not found: ID does not exist" containerID="e620e29b143a3e833483094798c90910b75ccd8ffc0b356ddcf5c9bf51c497f0" Jan 20 18:19:23 crc kubenswrapper[4558]: I0120 18:19:23.452648 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e620e29b143a3e833483094798c90910b75ccd8ffc0b356ddcf5c9bf51c497f0"} err="failed to get container status \"e620e29b143a3e833483094798c90910b75ccd8ffc0b356ddcf5c9bf51c497f0\": rpc error: code = NotFound desc = could not find container \"e620e29b143a3e833483094798c90910b75ccd8ffc0b356ddcf5c9bf51c497f0\": container with ID starting with e620e29b143a3e833483094798c90910b75ccd8ffc0b356ddcf5c9bf51c497f0 not found: ID does not exist" Jan 20 18:19:23 crc kubenswrapper[4558]: I0120 18:19:23.459125 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-5ffd575849-prl45"] Jan 20 18:19:23 crc kubenswrapper[4558]: I0120 18:19:23.473005 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/keystone-operator-index-vck5c"] Jan 20 18:19:23 crc kubenswrapper[4558]: I0120 18:19:23.474045 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/keystone-operator-index-vck5c"] Jan 20 18:19:23 crc kubenswrapper[4558]: I0120 18:19:23.547021 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-779fc9694b-q9nr2"] Jan 20 18:19:23 crc kubenswrapper[4558]: I0120 18:19:23.547340 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-q9nr2" podUID="4eb71fc6-5027-4724-9f33-24803c80be89" containerName="operator" containerID="cri-o://ab774e969d548c14158bd4f1015af2a1c44551bebcd6dc0277853d961054dffb" gracePeriod=10 Jan 20 18:19:23 crc kubenswrapper[4558]: I0120 18:19:23.613273 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-operators/keystone-operator-controller-manager-5ffd575849-prl45" podUID="0ddbae85-a332-464a-bd00-6c7f4fa4d7fd" containerName="manager" probeResult="failure" output="Get \"http://10.217.1.86:8081/readyz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Jan 20 18:19:23 crc kubenswrapper[4558]: I0120 18:19:23.759751 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-index-hc69l"] Jan 20 18:19:23 crc kubenswrapper[4558]: I0120 18:19:23.759975 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/rabbitmq-cluster-operator-index-hc69l" podUID="3ab16698-4434-44a4-a088-f72b20e45702" containerName="registry-server" containerID="cri-o://ac10494f58bebd5d096fb319a8cf050dd8255213b0e5c1fa3be6830e02f048c8" gracePeriod=30 Jan 20 18:19:23 crc kubenswrapper[4558]: I0120 18:19:23.785373 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590wqlht"] Jan 20 18:19:23 crc kubenswrapper[4558]: I0120 18:19:23.788994 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590wqlht"] Jan 20 18:19:23 crc kubenswrapper[4558]: I0120 18:19:23.914338 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-q9nr2" Jan 20 18:19:24 crc kubenswrapper[4558]: I0120 18:19:24.066925 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qnbgm\" (UniqueName: \"kubernetes.io/projected/4eb71fc6-5027-4724-9f33-24803c80be89-kube-api-access-qnbgm\") pod \"4eb71fc6-5027-4724-9f33-24803c80be89\" (UID: \"4eb71fc6-5027-4724-9f33-24803c80be89\") " Jan 20 18:19:24 crc kubenswrapper[4558]: I0120 18:19:24.071448 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4eb71fc6-5027-4724-9f33-24803c80be89-kube-api-access-qnbgm" (OuterVolumeSpecName: "kube-api-access-qnbgm") pod "4eb71fc6-5027-4724-9f33-24803c80be89" (UID: "4eb71fc6-5027-4724-9f33-24803c80be89"). InnerVolumeSpecName "kube-api-access-qnbgm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:19:24 crc kubenswrapper[4558]: I0120 18:19:24.169905 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qnbgm\" (UniqueName: \"kubernetes.io/projected/4eb71fc6-5027-4724-9f33-24803c80be89-kube-api-access-qnbgm\") on node \"crc\" DevicePath \"\"" Jan 20 18:19:24 crc kubenswrapper[4558]: I0120 18:19:24.173671 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-index-hc69l" Jan 20 18:19:24 crc kubenswrapper[4558]: I0120 18:19:24.271155 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8npf4\" (UniqueName: \"kubernetes.io/projected/3ab16698-4434-44a4-a088-f72b20e45702-kube-api-access-8npf4\") pod \"3ab16698-4434-44a4-a088-f72b20e45702\" (UID: \"3ab16698-4434-44a4-a088-f72b20e45702\") " Jan 20 18:19:24 crc kubenswrapper[4558]: I0120 18:19:24.280572 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab16698-4434-44a4-a088-f72b20e45702-kube-api-access-8npf4" (OuterVolumeSpecName: "kube-api-access-8npf4") pod "3ab16698-4434-44a4-a088-f72b20e45702" (UID: "3ab16698-4434-44a4-a088-f72b20e45702"). InnerVolumeSpecName "kube-api-access-8npf4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:19:24 crc kubenswrapper[4558]: I0120 18:19:24.373883 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8npf4\" (UniqueName: \"kubernetes.io/projected/3ab16698-4434-44a4-a088-f72b20e45702-kube-api-access-8npf4\") on node \"crc\" DevicePath \"\"" Jan 20 18:19:24 crc kubenswrapper[4558]: I0120 18:19:24.424288 4558 generic.go:334] "Generic (PLEG): container finished" podID="4eb71fc6-5027-4724-9f33-24803c80be89" containerID="ab774e969d548c14158bd4f1015af2a1c44551bebcd6dc0277853d961054dffb" exitCode=0 Jan 20 18:19:24 crc kubenswrapper[4558]: I0120 18:19:24.424379 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-q9nr2" event={"ID":"4eb71fc6-5027-4724-9f33-24803c80be89","Type":"ContainerDied","Data":"ab774e969d548c14158bd4f1015af2a1c44551bebcd6dc0277853d961054dffb"} Jan 20 18:19:24 crc kubenswrapper[4558]: I0120 18:19:24.424422 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-q9nr2" event={"ID":"4eb71fc6-5027-4724-9f33-24803c80be89","Type":"ContainerDied","Data":"40174a9b4ca6560c5a6e0ee358f18a6b723d79fc4e065d8ce2d1ad3502d23f05"} Jan 20 18:19:24 crc kubenswrapper[4558]: I0120 18:19:24.424445 4558 scope.go:117] "RemoveContainer" containerID="ab774e969d548c14158bd4f1015af2a1c44551bebcd6dc0277853d961054dffb" Jan 20 18:19:24 crc kubenswrapper[4558]: I0120 18:19:24.424380 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-q9nr2" Jan 20 18:19:24 crc kubenswrapper[4558]: I0120 18:19:24.426865 4558 generic.go:334] "Generic (PLEG): container finished" podID="3ab16698-4434-44a4-a088-f72b20e45702" containerID="ac10494f58bebd5d096fb319a8cf050dd8255213b0e5c1fa3be6830e02f048c8" exitCode=0 Jan 20 18:19:24 crc kubenswrapper[4558]: I0120 18:19:24.426926 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-index-hc69l" event={"ID":"3ab16698-4434-44a4-a088-f72b20e45702","Type":"ContainerDied","Data":"ac10494f58bebd5d096fb319a8cf050dd8255213b0e5c1fa3be6830e02f048c8"} Jan 20 18:19:24 crc kubenswrapper[4558]: I0120 18:19:24.426954 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-index-hc69l" event={"ID":"3ab16698-4434-44a4-a088-f72b20e45702","Type":"ContainerDied","Data":"3486f5a2c60e3bac2ff8a7c22903b79f2557bd00d1e6f79bd3d34b392fd77753"} Jan 20 18:19:24 crc kubenswrapper[4558]: I0120 18:19:24.426980 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-index-hc69l" Jan 20 18:19:24 crc kubenswrapper[4558]: I0120 18:19:24.449053 4558 scope.go:117] "RemoveContainer" containerID="ab774e969d548c14158bd4f1015af2a1c44551bebcd6dc0277853d961054dffb" Jan 20 18:19:24 crc kubenswrapper[4558]: E0120 18:19:24.449611 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ab774e969d548c14158bd4f1015af2a1c44551bebcd6dc0277853d961054dffb\": container with ID starting with ab774e969d548c14158bd4f1015af2a1c44551bebcd6dc0277853d961054dffb not found: ID does not exist" containerID="ab774e969d548c14158bd4f1015af2a1c44551bebcd6dc0277853d961054dffb" Jan 20 18:19:24 crc kubenswrapper[4558]: I0120 18:19:24.449645 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ab774e969d548c14158bd4f1015af2a1c44551bebcd6dc0277853d961054dffb"} err="failed to get container status \"ab774e969d548c14158bd4f1015af2a1c44551bebcd6dc0277853d961054dffb\": rpc error: code = NotFound desc = could not find container \"ab774e969d548c14158bd4f1015af2a1c44551bebcd6dc0277853d961054dffb\": container with ID starting with ab774e969d548c14158bd4f1015af2a1c44551bebcd6dc0277853d961054dffb not found: ID does not exist" Jan 20 18:19:24 crc kubenswrapper[4558]: I0120 18:19:24.449676 4558 scope.go:117] "RemoveContainer" containerID="ac10494f58bebd5d096fb319a8cf050dd8255213b0e5c1fa3be6830e02f048c8" Jan 20 18:19:24 crc kubenswrapper[4558]: I0120 18:19:24.461435 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-779fc9694b-q9nr2"] Jan 20 18:19:24 crc kubenswrapper[4558]: I0120 18:19:24.465310 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-779fc9694b-q9nr2"] Jan 20 18:19:24 crc kubenswrapper[4558]: I0120 18:19:24.472217 4558 scope.go:117] "RemoveContainer" containerID="ac10494f58bebd5d096fb319a8cf050dd8255213b0e5c1fa3be6830e02f048c8" Jan 20 18:19:24 crc kubenswrapper[4558]: E0120 18:19:24.472555 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ac10494f58bebd5d096fb319a8cf050dd8255213b0e5c1fa3be6830e02f048c8\": container with ID starting with ac10494f58bebd5d096fb319a8cf050dd8255213b0e5c1fa3be6830e02f048c8 not found: ID does not exist" containerID="ac10494f58bebd5d096fb319a8cf050dd8255213b0e5c1fa3be6830e02f048c8" Jan 20 18:19:24 crc kubenswrapper[4558]: I0120 18:19:24.472590 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ac10494f58bebd5d096fb319a8cf050dd8255213b0e5c1fa3be6830e02f048c8"} err="failed to get container status \"ac10494f58bebd5d096fb319a8cf050dd8255213b0e5c1fa3be6830e02f048c8\": rpc error: code = NotFound desc = could not find container \"ac10494f58bebd5d096fb319a8cf050dd8255213b0e5c1fa3be6830e02f048c8\": container with ID starting with ac10494f58bebd5d096fb319a8cf050dd8255213b0e5c1fa3be6830e02f048c8 not found: ID does not exist" Jan 20 18:19:24 crc kubenswrapper[4558]: I0120 18:19:24.474249 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-index-hc69l"] Jan 20 18:19:24 crc kubenswrapper[4558]: I0120 18:19:24.482689 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-index-hc69l"] Jan 20 18:19:24 crc kubenswrapper[4558]: I0120 18:19:24.575872 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0ddbae85-a332-464a-bd00-6c7f4fa4d7fd" path="/var/lib/kubelet/pods/0ddbae85-a332-464a-bd00-6c7f4fa4d7fd/volumes" Jan 20 18:19:24 crc kubenswrapper[4558]: I0120 18:19:24.576460 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="389aeea6-5bb0-4ac9-a5fd-5d6cbfa36a83" path="/var/lib/kubelet/pods/389aeea6-5bb0-4ac9-a5fd-5d6cbfa36a83/volumes" Jan 20 18:19:24 crc kubenswrapper[4558]: I0120 18:19:24.577061 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab16698-4434-44a4-a088-f72b20e45702" path="/var/lib/kubelet/pods/3ab16698-4434-44a4-a088-f72b20e45702/volumes" Jan 20 18:19:24 crc kubenswrapper[4558]: I0120 18:19:24.578044 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4eb71fc6-5027-4724-9f33-24803c80be89" path="/var/lib/kubelet/pods/4eb71fc6-5027-4724-9f33-24803c80be89/volumes" Jan 20 18:19:24 crc kubenswrapper[4558]: I0120 18:19:24.578488 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c4024146-8e18-468e-b896-300502b433ee" path="/var/lib/kubelet/pods/c4024146-8e18-468e-b896-300502b433ee/volumes" Jan 20 18:19:30 crc kubenswrapper[4558]: I0120 18:19:30.473619 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/infra-operator-controller-manager-85749d967b-zkbfg"] Jan 20 18:19:30 crc kubenswrapper[4558]: I0120 18:19:30.474116 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/infra-operator-controller-manager-85749d967b-zkbfg" podUID="2679558a-dc38-4dfb-a1f6-6b04731720b4" containerName="manager" containerID="cri-o://c7cbd5d8e0b18cf8dd45a3d65c56f7a53afda715d9aa103c60fd08bd0002f00e" gracePeriod=10 Jan 20 18:19:30 crc kubenswrapper[4558]: I0120 18:19:30.663096 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/infra-operator-index-p6pgw"] Jan 20 18:19:30 crc kubenswrapper[4558]: I0120 18:19:30.663317 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/infra-operator-index-p6pgw" podUID="86dc910c-5dc4-4de3-b279-df310dea30f8" containerName="registry-server" containerID="cri-o://b165a030a23ca666b365058d159549a088505333a43dd0b39980f6a4d53c03cd" gracePeriod=30 Jan 20 18:19:30 crc kubenswrapper[4558]: I0120 18:19:30.692949 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/ea82580bc5724477f94b47db468c840840d4aaf95efc52f7d04b6353c1jzwfw"] Jan 20 18:19:30 crc kubenswrapper[4558]: I0120 18:19:30.696406 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/ea82580bc5724477f94b47db468c840840d4aaf95efc52f7d04b6353c1jzwfw"] Jan 20 18:19:30 crc kubenswrapper[4558]: I0120 18:19:30.926703 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-85749d967b-zkbfg" Jan 20 18:19:31 crc kubenswrapper[4558]: I0120 18:19:31.024894 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-index-p6pgw" Jan 20 18:19:31 crc kubenswrapper[4558]: I0120 18:19:31.078495 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/2679558a-dc38-4dfb-a1f6-6b04731720b4-apiservice-cert\") pod \"2679558a-dc38-4dfb-a1f6-6b04731720b4\" (UID: \"2679558a-dc38-4dfb-a1f6-6b04731720b4\") " Jan 20 18:19:31 crc kubenswrapper[4558]: I0120 18:19:31.078683 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/2679558a-dc38-4dfb-a1f6-6b04731720b4-webhook-cert\") pod \"2679558a-dc38-4dfb-a1f6-6b04731720b4\" (UID: \"2679558a-dc38-4dfb-a1f6-6b04731720b4\") " Jan 20 18:19:31 crc kubenswrapper[4558]: I0120 18:19:31.078752 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f8lx4\" (UniqueName: \"kubernetes.io/projected/2679558a-dc38-4dfb-a1f6-6b04731720b4-kube-api-access-f8lx4\") pod \"2679558a-dc38-4dfb-a1f6-6b04731720b4\" (UID: \"2679558a-dc38-4dfb-a1f6-6b04731720b4\") " Jan 20 18:19:31 crc kubenswrapper[4558]: I0120 18:19:31.084372 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2679558a-dc38-4dfb-a1f6-6b04731720b4-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "2679558a-dc38-4dfb-a1f6-6b04731720b4" (UID: "2679558a-dc38-4dfb-a1f6-6b04731720b4"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:19:31 crc kubenswrapper[4558]: I0120 18:19:31.084393 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2679558a-dc38-4dfb-a1f6-6b04731720b4-kube-api-access-f8lx4" (OuterVolumeSpecName: "kube-api-access-f8lx4") pod "2679558a-dc38-4dfb-a1f6-6b04731720b4" (UID: "2679558a-dc38-4dfb-a1f6-6b04731720b4"). InnerVolumeSpecName "kube-api-access-f8lx4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:19:31 crc kubenswrapper[4558]: I0120 18:19:31.084548 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2679558a-dc38-4dfb-a1f6-6b04731720b4-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "2679558a-dc38-4dfb-a1f6-6b04731720b4" (UID: "2679558a-dc38-4dfb-a1f6-6b04731720b4"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:19:31 crc kubenswrapper[4558]: I0120 18:19:31.180206 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-clwxv\" (UniqueName: \"kubernetes.io/projected/86dc910c-5dc4-4de3-b279-df310dea30f8-kube-api-access-clwxv\") pod \"86dc910c-5dc4-4de3-b279-df310dea30f8\" (UID: \"86dc910c-5dc4-4de3-b279-df310dea30f8\") " Jan 20 18:19:31 crc kubenswrapper[4558]: I0120 18:19:31.181088 4558 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/2679558a-dc38-4dfb-a1f6-6b04731720b4-apiservice-cert\") on node \"crc\" DevicePath \"\"" Jan 20 18:19:31 crc kubenswrapper[4558]: I0120 18:19:31.181109 4558 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/2679558a-dc38-4dfb-a1f6-6b04731720b4-webhook-cert\") on node \"crc\" DevicePath \"\"" Jan 20 18:19:31 crc kubenswrapper[4558]: I0120 18:19:31.181120 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f8lx4\" (UniqueName: \"kubernetes.io/projected/2679558a-dc38-4dfb-a1f6-6b04731720b4-kube-api-access-f8lx4\") on node \"crc\" DevicePath \"\"" Jan 20 18:19:31 crc kubenswrapper[4558]: I0120 18:19:31.182999 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/86dc910c-5dc4-4de3-b279-df310dea30f8-kube-api-access-clwxv" (OuterVolumeSpecName: "kube-api-access-clwxv") pod "86dc910c-5dc4-4de3-b279-df310dea30f8" (UID: "86dc910c-5dc4-4de3-b279-df310dea30f8"). InnerVolumeSpecName "kube-api-access-clwxv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:19:31 crc kubenswrapper[4558]: I0120 18:19:31.282743 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-clwxv\" (UniqueName: \"kubernetes.io/projected/86dc910c-5dc4-4de3-b279-df310dea30f8-kube-api-access-clwxv\") on node \"crc\" DevicePath \"\"" Jan 20 18:19:31 crc kubenswrapper[4558]: I0120 18:19:31.516865 4558 generic.go:334] "Generic (PLEG): container finished" podID="2679558a-dc38-4dfb-a1f6-6b04731720b4" containerID="c7cbd5d8e0b18cf8dd45a3d65c56f7a53afda715d9aa103c60fd08bd0002f00e" exitCode=0 Jan 20 18:19:31 crc kubenswrapper[4558]: I0120 18:19:31.516928 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-85749d967b-zkbfg" Jan 20 18:19:31 crc kubenswrapper[4558]: I0120 18:19:31.517660 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-85749d967b-zkbfg" event={"ID":"2679558a-dc38-4dfb-a1f6-6b04731720b4","Type":"ContainerDied","Data":"c7cbd5d8e0b18cf8dd45a3d65c56f7a53afda715d9aa103c60fd08bd0002f00e"} Jan 20 18:19:31 crc kubenswrapper[4558]: I0120 18:19:31.517761 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-85749d967b-zkbfg" event={"ID":"2679558a-dc38-4dfb-a1f6-6b04731720b4","Type":"ContainerDied","Data":"79de71d65573aef4d5b1e399dac4e75540f1de6c8881adc3f60d3f4f15966092"} Jan 20 18:19:31 crc kubenswrapper[4558]: I0120 18:19:31.517865 4558 scope.go:117] "RemoveContainer" containerID="c7cbd5d8e0b18cf8dd45a3d65c56f7a53afda715d9aa103c60fd08bd0002f00e" Jan 20 18:19:31 crc kubenswrapper[4558]: I0120 18:19:31.518432 4558 generic.go:334] "Generic (PLEG): container finished" podID="86dc910c-5dc4-4de3-b279-df310dea30f8" containerID="b165a030a23ca666b365058d159549a088505333a43dd0b39980f6a4d53c03cd" exitCode=0 Jan 20 18:19:31 crc kubenswrapper[4558]: I0120 18:19:31.518475 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-index-p6pgw" event={"ID":"86dc910c-5dc4-4de3-b279-df310dea30f8","Type":"ContainerDied","Data":"b165a030a23ca666b365058d159549a088505333a43dd0b39980f6a4d53c03cd"} Jan 20 18:19:31 crc kubenswrapper[4558]: I0120 18:19:31.518507 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-index-p6pgw" event={"ID":"86dc910c-5dc4-4de3-b279-df310dea30f8","Type":"ContainerDied","Data":"167bf43226b8bcd9d16e2e2bdf70d0a54a3d961c4ab45378cc1095bf508d2ed6"} Jan 20 18:19:31 crc kubenswrapper[4558]: I0120 18:19:31.518561 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-index-p6pgw" Jan 20 18:19:31 crc kubenswrapper[4558]: I0120 18:19:31.534776 4558 scope.go:117] "RemoveContainer" containerID="c7cbd5d8e0b18cf8dd45a3d65c56f7a53afda715d9aa103c60fd08bd0002f00e" Jan 20 18:19:31 crc kubenswrapper[4558]: E0120 18:19:31.535179 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c7cbd5d8e0b18cf8dd45a3d65c56f7a53afda715d9aa103c60fd08bd0002f00e\": container with ID starting with c7cbd5d8e0b18cf8dd45a3d65c56f7a53afda715d9aa103c60fd08bd0002f00e not found: ID does not exist" containerID="c7cbd5d8e0b18cf8dd45a3d65c56f7a53afda715d9aa103c60fd08bd0002f00e" Jan 20 18:19:31 crc kubenswrapper[4558]: I0120 18:19:31.535215 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c7cbd5d8e0b18cf8dd45a3d65c56f7a53afda715d9aa103c60fd08bd0002f00e"} err="failed to get container status \"c7cbd5d8e0b18cf8dd45a3d65c56f7a53afda715d9aa103c60fd08bd0002f00e\": rpc error: code = NotFound desc = could not find container \"c7cbd5d8e0b18cf8dd45a3d65c56f7a53afda715d9aa103c60fd08bd0002f00e\": container with ID starting with c7cbd5d8e0b18cf8dd45a3d65c56f7a53afda715d9aa103c60fd08bd0002f00e not found: ID does not exist" Jan 20 18:19:31 crc kubenswrapper[4558]: I0120 18:19:31.535237 4558 scope.go:117] "RemoveContainer" containerID="b165a030a23ca666b365058d159549a088505333a43dd0b39980f6a4d53c03cd" Jan 20 18:19:31 crc kubenswrapper[4558]: I0120 18:19:31.547913 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/infra-operator-controller-manager-85749d967b-zkbfg"] Jan 20 18:19:31 crc kubenswrapper[4558]: I0120 18:19:31.550156 4558 scope.go:117] "RemoveContainer" containerID="b165a030a23ca666b365058d159549a088505333a43dd0b39980f6a4d53c03cd" Jan 20 18:19:31 crc kubenswrapper[4558]: E0120 18:19:31.551811 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b165a030a23ca666b365058d159549a088505333a43dd0b39980f6a4d53c03cd\": container with ID starting with b165a030a23ca666b365058d159549a088505333a43dd0b39980f6a4d53c03cd not found: ID does not exist" containerID="b165a030a23ca666b365058d159549a088505333a43dd0b39980f6a4d53c03cd" Jan 20 18:19:31 crc kubenswrapper[4558]: I0120 18:19:31.551841 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b165a030a23ca666b365058d159549a088505333a43dd0b39980f6a4d53c03cd"} err="failed to get container status \"b165a030a23ca666b365058d159549a088505333a43dd0b39980f6a4d53c03cd\": rpc error: code = NotFound desc = could not find container \"b165a030a23ca666b365058d159549a088505333a43dd0b39980f6a4d53c03cd\": container with ID starting with b165a030a23ca666b365058d159549a088505333a43dd0b39980f6a4d53c03cd not found: ID does not exist" Jan 20 18:19:31 crc kubenswrapper[4558]: I0120 18:19:31.555518 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/infra-operator-controller-manager-85749d967b-zkbfg"] Jan 20 18:19:31 crc kubenswrapper[4558]: I0120 18:19:31.575532 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/infra-operator-index-p6pgw"] Jan 20 18:19:31 crc kubenswrapper[4558]: I0120 18:19:31.577796 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/infra-operator-index-p6pgw"] Jan 20 18:19:31 crc kubenswrapper[4558]: I0120 18:19:31.821636 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-bf56cd8fd-kclzq"] Jan 20 18:19:31 crc kubenswrapper[4558]: I0120 18:19:31.821882 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/mariadb-operator-controller-manager-bf56cd8fd-kclzq" podUID="25c7c232-6d31-4269-a2c2-e8fd3c518c47" containerName="manager" containerID="cri-o://d6ed4311e95b7fd12f2078100b9bf4ffbdbac97da3cf5889bb2ec94e474034fd" gracePeriod=10 Jan 20 18:19:32 crc kubenswrapper[4558]: I0120 18:19:32.000705 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/mariadb-operator-index-d7sg5"] Jan 20 18:19:32 crc kubenswrapper[4558]: I0120 18:19:32.000924 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/mariadb-operator-index-d7sg5" podUID="1be05cde-4e68-4010-9fbf-2c04288df80b" containerName="registry-server" containerID="cri-o://1e08fcfa4a1135613dedad048f0451ce11150e9b615614e993f8192e894401e2" gracePeriod=30 Jan 20 18:19:32 crc kubenswrapper[4558]: I0120 18:19:32.022856 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/a53044f29d0f89c197912873b7cc34569484f5de61ee55394f4939720brl7tf"] Jan 20 18:19:32 crc kubenswrapper[4558]: I0120 18:19:32.025917 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/a53044f29d0f89c197912873b7cc34569484f5de61ee55394f4939720brl7tf"] Jan 20 18:19:32 crc kubenswrapper[4558]: I0120 18:19:32.264422 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-bf56cd8fd-kclzq" Jan 20 18:19:32 crc kubenswrapper[4558]: I0120 18:19:32.376002 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-index-d7sg5" Jan 20 18:19:32 crc kubenswrapper[4558]: I0120 18:19:32.401605 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/25c7c232-6d31-4269-a2c2-e8fd3c518c47-apiservice-cert\") pod \"25c7c232-6d31-4269-a2c2-e8fd3c518c47\" (UID: \"25c7c232-6d31-4269-a2c2-e8fd3c518c47\") " Jan 20 18:19:32 crc kubenswrapper[4558]: I0120 18:19:32.401769 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/25c7c232-6d31-4269-a2c2-e8fd3c518c47-webhook-cert\") pod \"25c7c232-6d31-4269-a2c2-e8fd3c518c47\" (UID: \"25c7c232-6d31-4269-a2c2-e8fd3c518c47\") " Jan 20 18:19:32 crc kubenswrapper[4558]: I0120 18:19:32.401899 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dn2p8\" (UniqueName: \"kubernetes.io/projected/25c7c232-6d31-4269-a2c2-e8fd3c518c47-kube-api-access-dn2p8\") pod \"25c7c232-6d31-4269-a2c2-e8fd3c518c47\" (UID: \"25c7c232-6d31-4269-a2c2-e8fd3c518c47\") " Jan 20 18:19:32 crc kubenswrapper[4558]: I0120 18:19:32.409364 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25c7c232-6d31-4269-a2c2-e8fd3c518c47-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "25c7c232-6d31-4269-a2c2-e8fd3c518c47" (UID: "25c7c232-6d31-4269-a2c2-e8fd3c518c47"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:19:32 crc kubenswrapper[4558]: I0120 18:19:32.409412 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25c7c232-6d31-4269-a2c2-e8fd3c518c47-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "25c7c232-6d31-4269-a2c2-e8fd3c518c47" (UID: "25c7c232-6d31-4269-a2c2-e8fd3c518c47"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:19:32 crc kubenswrapper[4558]: I0120 18:19:32.409553 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25c7c232-6d31-4269-a2c2-e8fd3c518c47-kube-api-access-dn2p8" (OuterVolumeSpecName: "kube-api-access-dn2p8") pod "25c7c232-6d31-4269-a2c2-e8fd3c518c47" (UID: "25c7c232-6d31-4269-a2c2-e8fd3c518c47"). InnerVolumeSpecName "kube-api-access-dn2p8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:19:32 crc kubenswrapper[4558]: I0120 18:19:32.503303 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gx29x\" (UniqueName: \"kubernetes.io/projected/1be05cde-4e68-4010-9fbf-2c04288df80b-kube-api-access-gx29x\") pod \"1be05cde-4e68-4010-9fbf-2c04288df80b\" (UID: \"1be05cde-4e68-4010-9fbf-2c04288df80b\") " Jan 20 18:19:32 crc kubenswrapper[4558]: I0120 18:19:32.503849 4558 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/25c7c232-6d31-4269-a2c2-e8fd3c518c47-apiservice-cert\") on node \"crc\" DevicePath \"\"" Jan 20 18:19:32 crc kubenswrapper[4558]: I0120 18:19:32.503951 4558 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/25c7c232-6d31-4269-a2c2-e8fd3c518c47-webhook-cert\") on node \"crc\" DevicePath \"\"" Jan 20 18:19:32 crc kubenswrapper[4558]: I0120 18:19:32.504025 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dn2p8\" (UniqueName: \"kubernetes.io/projected/25c7c232-6d31-4269-a2c2-e8fd3c518c47-kube-api-access-dn2p8\") on node \"crc\" DevicePath \"\"" Jan 20 18:19:32 crc kubenswrapper[4558]: I0120 18:19:32.506464 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1be05cde-4e68-4010-9fbf-2c04288df80b-kube-api-access-gx29x" (OuterVolumeSpecName: "kube-api-access-gx29x") pod "1be05cde-4e68-4010-9fbf-2c04288df80b" (UID: "1be05cde-4e68-4010-9fbf-2c04288df80b"). InnerVolumeSpecName "kube-api-access-gx29x". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:19:32 crc kubenswrapper[4558]: I0120 18:19:32.531222 4558 generic.go:334] "Generic (PLEG): container finished" podID="1be05cde-4e68-4010-9fbf-2c04288df80b" containerID="1e08fcfa4a1135613dedad048f0451ce11150e9b615614e993f8192e894401e2" exitCode=0 Jan 20 18:19:32 crc kubenswrapper[4558]: I0120 18:19:32.531297 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-index-d7sg5" event={"ID":"1be05cde-4e68-4010-9fbf-2c04288df80b","Type":"ContainerDied","Data":"1e08fcfa4a1135613dedad048f0451ce11150e9b615614e993f8192e894401e2"} Jan 20 18:19:32 crc kubenswrapper[4558]: I0120 18:19:32.531394 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-index-d7sg5" event={"ID":"1be05cde-4e68-4010-9fbf-2c04288df80b","Type":"ContainerDied","Data":"037f3aa995c10b2fac55d3556af30dd35a730aa100d11451c528ad23b5314675"} Jan 20 18:19:32 crc kubenswrapper[4558]: I0120 18:19:32.531424 4558 scope.go:117] "RemoveContainer" containerID="1e08fcfa4a1135613dedad048f0451ce11150e9b615614e993f8192e894401e2" Jan 20 18:19:32 crc kubenswrapper[4558]: I0120 18:19:32.531335 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-index-d7sg5" Jan 20 18:19:32 crc kubenswrapper[4558]: I0120 18:19:32.535303 4558 generic.go:334] "Generic (PLEG): container finished" podID="25c7c232-6d31-4269-a2c2-e8fd3c518c47" containerID="d6ed4311e95b7fd12f2078100b9bf4ffbdbac97da3cf5889bb2ec94e474034fd" exitCode=0 Jan 20 18:19:32 crc kubenswrapper[4558]: I0120 18:19:32.535352 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-bf56cd8fd-kclzq" event={"ID":"25c7c232-6d31-4269-a2c2-e8fd3c518c47","Type":"ContainerDied","Data":"d6ed4311e95b7fd12f2078100b9bf4ffbdbac97da3cf5889bb2ec94e474034fd"} Jan 20 18:19:32 crc kubenswrapper[4558]: I0120 18:19:32.535384 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-bf56cd8fd-kclzq" event={"ID":"25c7c232-6d31-4269-a2c2-e8fd3c518c47","Type":"ContainerDied","Data":"577d0fcc4ecb9a02415f8f18ed70fe9eb367dc9860ce1dd4b8b3ecbf01c0a560"} Jan 20 18:19:32 crc kubenswrapper[4558]: I0120 18:19:32.535452 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-bf56cd8fd-kclzq" Jan 20 18:19:32 crc kubenswrapper[4558]: I0120 18:19:32.552452 4558 scope.go:117] "RemoveContainer" containerID="1e08fcfa4a1135613dedad048f0451ce11150e9b615614e993f8192e894401e2" Jan 20 18:19:32 crc kubenswrapper[4558]: E0120 18:19:32.553147 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1e08fcfa4a1135613dedad048f0451ce11150e9b615614e993f8192e894401e2\": container with ID starting with 1e08fcfa4a1135613dedad048f0451ce11150e9b615614e993f8192e894401e2 not found: ID does not exist" containerID="1e08fcfa4a1135613dedad048f0451ce11150e9b615614e993f8192e894401e2" Jan 20 18:19:32 crc kubenswrapper[4558]: I0120 18:19:32.553257 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1e08fcfa4a1135613dedad048f0451ce11150e9b615614e993f8192e894401e2"} err="failed to get container status \"1e08fcfa4a1135613dedad048f0451ce11150e9b615614e993f8192e894401e2\": rpc error: code = NotFound desc = could not find container \"1e08fcfa4a1135613dedad048f0451ce11150e9b615614e993f8192e894401e2\": container with ID starting with 1e08fcfa4a1135613dedad048f0451ce11150e9b615614e993f8192e894401e2 not found: ID does not exist" Jan 20 18:19:32 crc kubenswrapper[4558]: I0120 18:19:32.553291 4558 scope.go:117] "RemoveContainer" containerID="d6ed4311e95b7fd12f2078100b9bf4ffbdbac97da3cf5889bb2ec94e474034fd" Jan 20 18:19:32 crc kubenswrapper[4558]: I0120 18:19:32.564309 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/mariadb-operator-index-d7sg5"] Jan 20 18:19:32 crc kubenswrapper[4558]: I0120 18:19:32.577258 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2679558a-dc38-4dfb-a1f6-6b04731720b4" path="/var/lib/kubelet/pods/2679558a-dc38-4dfb-a1f6-6b04731720b4/volumes" Jan 20 18:19:32 crc kubenswrapper[4558]: I0120 18:19:32.577921 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4d06e4b6-901f-4a66-ad62-400734d17967" path="/var/lib/kubelet/pods/4d06e4b6-901f-4a66-ad62-400734d17967/volumes" Jan 20 18:19:32 crc kubenswrapper[4558]: I0120 18:19:32.578653 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="86dc910c-5dc4-4de3-b279-df310dea30f8" path="/var/lib/kubelet/pods/86dc910c-5dc4-4de3-b279-df310dea30f8/volumes" Jan 20 18:19:32 crc kubenswrapper[4558]: I0120 18:19:32.579825 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a057b440-fc08-4618-be07-956ccbe46af7" path="/var/lib/kubelet/pods/a057b440-fc08-4618-be07-956ccbe46af7/volumes" Jan 20 18:19:32 crc kubenswrapper[4558]: I0120 18:19:32.580578 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/mariadb-operator-index-d7sg5"] Jan 20 18:19:32 crc kubenswrapper[4558]: I0120 18:19:32.580628 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-bf56cd8fd-kclzq"] Jan 20 18:19:32 crc kubenswrapper[4558]: I0120 18:19:32.580645 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-bf56cd8fd-kclzq"] Jan 20 18:19:32 crc kubenswrapper[4558]: I0120 18:19:32.583628 4558 scope.go:117] "RemoveContainer" containerID="d6ed4311e95b7fd12f2078100b9bf4ffbdbac97da3cf5889bb2ec94e474034fd" Jan 20 18:19:32 crc kubenswrapper[4558]: E0120 18:19:32.583949 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d6ed4311e95b7fd12f2078100b9bf4ffbdbac97da3cf5889bb2ec94e474034fd\": container with ID starting with d6ed4311e95b7fd12f2078100b9bf4ffbdbac97da3cf5889bb2ec94e474034fd not found: ID does not exist" containerID="d6ed4311e95b7fd12f2078100b9bf4ffbdbac97da3cf5889bb2ec94e474034fd" Jan 20 18:19:32 crc kubenswrapper[4558]: I0120 18:19:32.583978 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d6ed4311e95b7fd12f2078100b9bf4ffbdbac97da3cf5889bb2ec94e474034fd"} err="failed to get container status \"d6ed4311e95b7fd12f2078100b9bf4ffbdbac97da3cf5889bb2ec94e474034fd\": rpc error: code = NotFound desc = could not find container \"d6ed4311e95b7fd12f2078100b9bf4ffbdbac97da3cf5889bb2ec94e474034fd\": container with ID starting with d6ed4311e95b7fd12f2078100b9bf4ffbdbac97da3cf5889bb2ec94e474034fd not found: ID does not exist" Jan 20 18:19:32 crc kubenswrapper[4558]: I0120 18:19:32.609186 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gx29x\" (UniqueName: \"kubernetes.io/projected/1be05cde-4e68-4010-9fbf-2c04288df80b-kube-api-access-gx29x\") on node \"crc\" DevicePath \"\"" Jan 20 18:19:33 crc kubenswrapper[4558]: I0120 18:19:33.566269 4558 scope.go:117] "RemoveContainer" containerID="27314e1a8af2e8df8ecd2e7a06cbb630afbfecd8ca2338a1f199cf92f982f581" Jan 20 18:19:33 crc kubenswrapper[4558]: E0120 18:19:33.566457 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 18:19:34 crc kubenswrapper[4558]: I0120 18:19:34.573879 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1be05cde-4e68-4010-9fbf-2c04288df80b" path="/var/lib/kubelet/pods/1be05cde-4e68-4010-9fbf-2c04288df80b/volumes" Jan 20 18:19:34 crc kubenswrapper[4558]: I0120 18:19:34.574879 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25c7c232-6d31-4269-a2c2-e8fd3c518c47" path="/var/lib/kubelet/pods/25c7c232-6d31-4269-a2c2-e8fd3c518c47/volumes" Jan 20 18:19:36 crc kubenswrapper[4558]: I0120 18:19:36.253975 4558 scope.go:117] "RemoveContainer" containerID="e053d239b73c5b870a9b515ba93f0c9e7c6de2c8e95b8fb1374ec803d1a01ee7" Jan 20 18:19:36 crc kubenswrapper[4558]: I0120 18:19:36.287818 4558 scope.go:117] "RemoveContainer" containerID="46f1035e9e52378a66569151ff15c3b3b2d4046ac1c97fbe5a83a6310941040a" Jan 20 18:19:45 crc kubenswrapper[4558]: I0120 18:19:45.565844 4558 scope.go:117] "RemoveContainer" containerID="27314e1a8af2e8df8ecd2e7a06cbb630afbfecd8ca2338a1f199cf92f982f581" Jan 20 18:19:45 crc kubenswrapper[4558]: E0120 18:19:45.567288 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 18:20:00 crc kubenswrapper[4558]: I0120 18:20:00.565785 4558 scope.go:117] "RemoveContainer" containerID="27314e1a8af2e8df8ecd2e7a06cbb630afbfecd8ca2338a1f199cf92f982f581" Jan 20 18:20:00 crc kubenswrapper[4558]: I0120 18:20:00.763101 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" event={"ID":"68337d27-3fa6-4a29-88b0-82e60c3739eb","Type":"ContainerStarted","Data":"ed78f04bb9083c901150aea9b4470bb3023518f531b71da44ae6842f9fba65fa"} Jan 20 18:20:13 crc kubenswrapper[4558]: I0120 18:20:13.843295 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-index-cdc54"] Jan 20 18:20:13 crc kubenswrapper[4558]: E0120 18:20:13.843979 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4a5a6576-dc07-4211-8b81-a53986f07041" containerName="galera" Jan 20 18:20:13 crc kubenswrapper[4558]: I0120 18:20:13.843992 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="4a5a6576-dc07-4211-8b81-a53986f07041" containerName="galera" Jan 20 18:20:13 crc kubenswrapper[4558]: E0120 18:20:13.844003 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0ddbae85-a332-464a-bd00-6c7f4fa4d7fd" containerName="manager" Jan 20 18:20:13 crc kubenswrapper[4558]: I0120 18:20:13.844009 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="0ddbae85-a332-464a-bd00-6c7f4fa4d7fd" containerName="manager" Jan 20 18:20:13 crc kubenswrapper[4558]: E0120 18:20:13.844019 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eaed2fe0-4c9e-4d02-ba03-b6f55fffad21" containerName="manager" Jan 20 18:20:13 crc kubenswrapper[4558]: I0120 18:20:13.844025 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="eaed2fe0-4c9e-4d02-ba03-b6f55fffad21" containerName="manager" Jan 20 18:20:13 crc kubenswrapper[4558]: E0120 18:20:13.844036 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="53d3a76d-e277-4efa-adbf-85d8e328caf6" containerName="galera" Jan 20 18:20:13 crc kubenswrapper[4558]: I0120 18:20:13.844042 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="53d3a76d-e277-4efa-adbf-85d8e328caf6" containerName="galera" Jan 20 18:20:13 crc kubenswrapper[4558]: E0120 18:20:13.844048 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9c946696-d624-4baf-824d-da41544b5240" containerName="keystone-api" Jan 20 18:20:13 crc kubenswrapper[4558]: I0120 18:20:13.844053 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="9c946696-d624-4baf-824d-da41544b5240" containerName="keystone-api" Jan 20 18:20:13 crc kubenswrapper[4558]: E0120 18:20:13.844062 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4eb71fc6-5027-4724-9f33-24803c80be89" containerName="operator" Jan 20 18:20:13 crc kubenswrapper[4558]: I0120 18:20:13.844067 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="4eb71fc6-5027-4724-9f33-24803c80be89" containerName="operator" Jan 20 18:20:13 crc kubenswrapper[4558]: E0120 18:20:13.844078 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1be05cde-4e68-4010-9fbf-2c04288df80b" containerName="registry-server" Jan 20 18:20:13 crc kubenswrapper[4558]: I0120 18:20:13.844083 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="1be05cde-4e68-4010-9fbf-2c04288df80b" containerName="registry-server" Jan 20 18:20:13 crc kubenswrapper[4558]: E0120 18:20:13.844092 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="86dc910c-5dc4-4de3-b279-df310dea30f8" containerName="registry-server" Jan 20 18:20:13 crc kubenswrapper[4558]: I0120 18:20:13.844098 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="86dc910c-5dc4-4de3-b279-df310dea30f8" containerName="registry-server" Jan 20 18:20:13 crc kubenswrapper[4558]: E0120 18:20:13.844106 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c9734bdf-87f9-4375-b720-ee2523a59f0e" containerName="mariadb-account-delete" Jan 20 18:20:13 crc kubenswrapper[4558]: I0120 18:20:13.844110 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="c9734bdf-87f9-4375-b720-ee2523a59f0e" containerName="mariadb-account-delete" Jan 20 18:20:13 crc kubenswrapper[4558]: E0120 18:20:13.844119 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="25c7c232-6d31-4269-a2c2-e8fd3c518c47" containerName="manager" Jan 20 18:20:13 crc kubenswrapper[4558]: I0120 18:20:13.844124 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="25c7c232-6d31-4269-a2c2-e8fd3c518c47" containerName="manager" Jan 20 18:20:13 crc kubenswrapper[4558]: E0120 18:20:13.844132 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="472c8b48-909b-484c-85cb-fb38c6b77c0f" containerName="memcached" Jan 20 18:20:13 crc kubenswrapper[4558]: I0120 18:20:13.844137 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="472c8b48-909b-484c-85cb-fb38c6b77c0f" containerName="memcached" Jan 20 18:20:13 crc kubenswrapper[4558]: E0120 18:20:13.844143 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4a5a6576-dc07-4211-8b81-a53986f07041" containerName="mysql-bootstrap" Jan 20 18:20:13 crc kubenswrapper[4558]: I0120 18:20:13.844149 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="4a5a6576-dc07-4211-8b81-a53986f07041" containerName="mysql-bootstrap" Jan 20 18:20:13 crc kubenswrapper[4558]: E0120 18:20:13.844158 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="de78cf98-2998-46d1-add9-6a03d25c8b2b" containerName="rabbitmq" Jan 20 18:20:13 crc kubenswrapper[4558]: I0120 18:20:13.844178 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="de78cf98-2998-46d1-add9-6a03d25c8b2b" containerName="rabbitmq" Jan 20 18:20:13 crc kubenswrapper[4558]: E0120 18:20:13.844191 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2679558a-dc38-4dfb-a1f6-6b04731720b4" containerName="manager" Jan 20 18:20:13 crc kubenswrapper[4558]: I0120 18:20:13.844197 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="2679558a-dc38-4dfb-a1f6-6b04731720b4" containerName="manager" Jan 20 18:20:13 crc kubenswrapper[4558]: E0120 18:20:13.844206 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c4024146-8e18-468e-b896-300502b433ee" containerName="registry-server" Jan 20 18:20:13 crc kubenswrapper[4558]: I0120 18:20:13.844211 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="c4024146-8e18-468e-b896-300502b433ee" containerName="registry-server" Jan 20 18:20:13 crc kubenswrapper[4558]: E0120 18:20:13.844219 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c2d67ae8-8d4b-4242-8c31-ad0ca9c3975c" containerName="registry-server" Jan 20 18:20:13 crc kubenswrapper[4558]: I0120 18:20:13.844225 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="c2d67ae8-8d4b-4242-8c31-ad0ca9c3975c" containerName="registry-server" Jan 20 18:20:13 crc kubenswrapper[4558]: E0120 18:20:13.844231 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="595cc6da-b53a-4a23-8e31-ed1dd616eb3e" containerName="galera" Jan 20 18:20:13 crc kubenswrapper[4558]: I0120 18:20:13.844235 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="595cc6da-b53a-4a23-8e31-ed1dd616eb3e" containerName="galera" Jan 20 18:20:13 crc kubenswrapper[4558]: E0120 18:20:13.844245 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3ab16698-4434-44a4-a088-f72b20e45702" containerName="registry-server" Jan 20 18:20:13 crc kubenswrapper[4558]: I0120 18:20:13.844250 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="3ab16698-4434-44a4-a088-f72b20e45702" containerName="registry-server" Jan 20 18:20:13 crc kubenswrapper[4558]: E0120 18:20:13.844258 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="595cc6da-b53a-4a23-8e31-ed1dd616eb3e" containerName="mysql-bootstrap" Jan 20 18:20:13 crc kubenswrapper[4558]: I0120 18:20:13.844263 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="595cc6da-b53a-4a23-8e31-ed1dd616eb3e" containerName="mysql-bootstrap" Jan 20 18:20:13 crc kubenswrapper[4558]: E0120 18:20:13.844271 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c9734bdf-87f9-4375-b720-ee2523a59f0e" containerName="mariadb-account-delete" Jan 20 18:20:13 crc kubenswrapper[4558]: I0120 18:20:13.844277 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="c9734bdf-87f9-4375-b720-ee2523a59f0e" containerName="mariadb-account-delete" Jan 20 18:20:13 crc kubenswrapper[4558]: E0120 18:20:13.844287 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="de78cf98-2998-46d1-add9-6a03d25c8b2b" containerName="setup-container" Jan 20 18:20:13 crc kubenswrapper[4558]: I0120 18:20:13.844292 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="de78cf98-2998-46d1-add9-6a03d25c8b2b" containerName="setup-container" Jan 20 18:20:13 crc kubenswrapper[4558]: E0120 18:20:13.844299 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="53d3a76d-e277-4efa-adbf-85d8e328caf6" containerName="mysql-bootstrap" Jan 20 18:20:13 crc kubenswrapper[4558]: I0120 18:20:13.844304 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="53d3a76d-e277-4efa-adbf-85d8e328caf6" containerName="mysql-bootstrap" Jan 20 18:20:13 crc kubenswrapper[4558]: I0120 18:20:13.844393 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="c4024146-8e18-468e-b896-300502b433ee" containerName="registry-server" Jan 20 18:20:13 crc kubenswrapper[4558]: I0120 18:20:13.844402 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="2679558a-dc38-4dfb-a1f6-6b04731720b4" containerName="manager" Jan 20 18:20:13 crc kubenswrapper[4558]: I0120 18:20:13.844409 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="53d3a76d-e277-4efa-adbf-85d8e328caf6" containerName="galera" Jan 20 18:20:13 crc kubenswrapper[4558]: I0120 18:20:13.844418 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="0ddbae85-a332-464a-bd00-6c7f4fa4d7fd" containerName="manager" Jan 20 18:20:13 crc kubenswrapper[4558]: I0120 18:20:13.844426 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="c9734bdf-87f9-4375-b720-ee2523a59f0e" containerName="mariadb-account-delete" Jan 20 18:20:13 crc kubenswrapper[4558]: I0120 18:20:13.844434 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="eaed2fe0-4c9e-4d02-ba03-b6f55fffad21" containerName="manager" Jan 20 18:20:13 crc kubenswrapper[4558]: I0120 18:20:13.844442 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="25c7c232-6d31-4269-a2c2-e8fd3c518c47" containerName="manager" Jan 20 18:20:13 crc kubenswrapper[4558]: I0120 18:20:13.844447 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="595cc6da-b53a-4a23-8e31-ed1dd616eb3e" containerName="galera" Jan 20 18:20:13 crc kubenswrapper[4558]: I0120 18:20:13.844457 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="9c946696-d624-4baf-824d-da41544b5240" containerName="keystone-api" Jan 20 18:20:13 crc kubenswrapper[4558]: I0120 18:20:13.844463 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="86dc910c-5dc4-4de3-b279-df310dea30f8" containerName="registry-server" Jan 20 18:20:13 crc kubenswrapper[4558]: I0120 18:20:13.844470 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="4a5a6576-dc07-4211-8b81-a53986f07041" containerName="galera" Jan 20 18:20:13 crc kubenswrapper[4558]: I0120 18:20:13.844475 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="3ab16698-4434-44a4-a088-f72b20e45702" containerName="registry-server" Jan 20 18:20:13 crc kubenswrapper[4558]: I0120 18:20:13.844481 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="4eb71fc6-5027-4724-9f33-24803c80be89" containerName="operator" Jan 20 18:20:13 crc kubenswrapper[4558]: I0120 18:20:13.844490 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="de78cf98-2998-46d1-add9-6a03d25c8b2b" containerName="rabbitmq" Jan 20 18:20:13 crc kubenswrapper[4558]: I0120 18:20:13.844502 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="1be05cde-4e68-4010-9fbf-2c04288df80b" containerName="registry-server" Jan 20 18:20:13 crc kubenswrapper[4558]: I0120 18:20:13.844509 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="c2d67ae8-8d4b-4242-8c31-ad0ca9c3975c" containerName="registry-server" Jan 20 18:20:13 crc kubenswrapper[4558]: I0120 18:20:13.844517 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="472c8b48-909b-484c-85cb-fb38c6b77c0f" containerName="memcached" Jan 20 18:20:13 crc kubenswrapper[4558]: I0120 18:20:13.844898 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-index-cdc54" Jan 20 18:20:13 crc kubenswrapper[4558]: I0120 18:20:13.848412 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Jan 20 18:20:13 crc kubenswrapper[4558]: I0120 18:20:13.848927 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-index-dockercfg-2c4s5" Jan 20 18:20:13 crc kubenswrapper[4558]: I0120 18:20:13.848986 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Jan 20 18:20:13 crc kubenswrapper[4558]: I0120 18:20:13.854834 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-index-cdc54"] Jan 20 18:20:13 crc kubenswrapper[4558]: I0120 18:20:13.989699 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7wwwl\" (UniqueName: \"kubernetes.io/projected/9a79ee84-c642-4aeb-86e5-da222559cad3-kube-api-access-7wwwl\") pod \"mariadb-operator-index-cdc54\" (UID: \"9a79ee84-c642-4aeb-86e5-da222559cad3\") " pod="openstack-operators/mariadb-operator-index-cdc54" Jan 20 18:20:14 crc kubenswrapper[4558]: I0120 18:20:14.090951 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7wwwl\" (UniqueName: \"kubernetes.io/projected/9a79ee84-c642-4aeb-86e5-da222559cad3-kube-api-access-7wwwl\") pod \"mariadb-operator-index-cdc54\" (UID: \"9a79ee84-c642-4aeb-86e5-da222559cad3\") " pod="openstack-operators/mariadb-operator-index-cdc54" Jan 20 18:20:14 crc kubenswrapper[4558]: I0120 18:20:14.109869 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7wwwl\" (UniqueName: \"kubernetes.io/projected/9a79ee84-c642-4aeb-86e5-da222559cad3-kube-api-access-7wwwl\") pod \"mariadb-operator-index-cdc54\" (UID: \"9a79ee84-c642-4aeb-86e5-da222559cad3\") " pod="openstack-operators/mariadb-operator-index-cdc54" Jan 20 18:20:14 crc kubenswrapper[4558]: I0120 18:20:14.161898 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-index-cdc54" Jan 20 18:20:14 crc kubenswrapper[4558]: I0120 18:20:14.226059 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/mariadb-operator-index-cdc54"] Jan 20 18:20:14 crc kubenswrapper[4558]: I0120 18:20:14.563195 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/mariadb-operator-index-cdc54"] Jan 20 18:20:14 crc kubenswrapper[4558]: I0120 18:20:14.631957 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-index-pbh4r"] Jan 20 18:20:14 crc kubenswrapper[4558]: I0120 18:20:14.632503 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="c9734bdf-87f9-4375-b720-ee2523a59f0e" containerName="mariadb-account-delete" Jan 20 18:20:14 crc kubenswrapper[4558]: I0120 18:20:14.633207 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-index-pbh4r" Jan 20 18:20:14 crc kubenswrapper[4558]: I0120 18:20:14.640411 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-index-pbh4r"] Jan 20 18:20:14 crc kubenswrapper[4558]: I0120 18:20:14.802066 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bmhgd\" (UniqueName: \"kubernetes.io/projected/eff4955d-0ba2-4419-9091-dd993572c188-kube-api-access-bmhgd\") pod \"mariadb-operator-index-pbh4r\" (UID: \"eff4955d-0ba2-4419-9091-dd993572c188\") " pod="openstack-operators/mariadb-operator-index-pbh4r" Jan 20 18:20:14 crc kubenswrapper[4558]: I0120 18:20:14.862736 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-index-cdc54" event={"ID":"9a79ee84-c642-4aeb-86e5-da222559cad3","Type":"ContainerStarted","Data":"49408c7985211f645d76632530cda4d34734afff27954c253eda2ca60cdfeada"} Jan 20 18:20:14 crc kubenswrapper[4558]: I0120 18:20:14.904535 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bmhgd\" (UniqueName: \"kubernetes.io/projected/eff4955d-0ba2-4419-9091-dd993572c188-kube-api-access-bmhgd\") pod \"mariadb-operator-index-pbh4r\" (UID: \"eff4955d-0ba2-4419-9091-dd993572c188\") " pod="openstack-operators/mariadb-operator-index-pbh4r" Jan 20 18:20:14 crc kubenswrapper[4558]: I0120 18:20:14.923525 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bmhgd\" (UniqueName: \"kubernetes.io/projected/eff4955d-0ba2-4419-9091-dd993572c188-kube-api-access-bmhgd\") pod \"mariadb-operator-index-pbh4r\" (UID: \"eff4955d-0ba2-4419-9091-dd993572c188\") " pod="openstack-operators/mariadb-operator-index-pbh4r" Jan 20 18:20:14 crc kubenswrapper[4558]: I0120 18:20:14.956429 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-index-pbh4r" Jan 20 18:20:15 crc kubenswrapper[4558]: I0120 18:20:15.339210 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-index-pbh4r"] Jan 20 18:20:15 crc kubenswrapper[4558]: W0120 18:20:15.355604 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podeff4955d_0ba2_4419_9091_dd993572c188.slice/crio-e3f0762a70f482a7cb7c5ba0090597aa172152eb45e1669bdd2b9bf5f9312e27 WatchSource:0}: Error finding container e3f0762a70f482a7cb7c5ba0090597aa172152eb45e1669bdd2b9bf5f9312e27: Status 404 returned error can't find the container with id e3f0762a70f482a7cb7c5ba0090597aa172152eb45e1669bdd2b9bf5f9312e27 Jan 20 18:20:15 crc kubenswrapper[4558]: I0120 18:20:15.874219 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-index-cdc54" event={"ID":"9a79ee84-c642-4aeb-86e5-da222559cad3","Type":"ContainerStarted","Data":"127c093eb9eecd0b28f7062c4348093259acc1ab5149c2ea04e2ab6352434e58"} Jan 20 18:20:15 crc kubenswrapper[4558]: I0120 18:20:15.874346 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/mariadb-operator-index-cdc54" podUID="9a79ee84-c642-4aeb-86e5-da222559cad3" containerName="registry-server" containerID="cri-o://127c093eb9eecd0b28f7062c4348093259acc1ab5149c2ea04e2ab6352434e58" gracePeriod=2 Jan 20 18:20:15 crc kubenswrapper[4558]: I0120 18:20:15.876301 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-index-pbh4r" event={"ID":"eff4955d-0ba2-4419-9091-dd993572c188","Type":"ContainerStarted","Data":"e3f0762a70f482a7cb7c5ba0090597aa172152eb45e1669bdd2b9bf5f9312e27"} Jan 20 18:20:15 crc kubenswrapper[4558]: I0120 18:20:15.894202 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-index-cdc54" podStartSLOduration=2.20728366 podStartE2EDuration="2.89418259s" podCreationTimestamp="2026-01-20 18:20:13 +0000 UTC" firstStartedPulling="2026-01-20 18:20:14.572747464 +0000 UTC m=+5908.333085431" lastFinishedPulling="2026-01-20 18:20:15.259646394 +0000 UTC m=+5909.019984361" observedRunningTime="2026-01-20 18:20:15.887417078 +0000 UTC m=+5909.647755056" watchObservedRunningTime="2026-01-20 18:20:15.89418259 +0000 UTC m=+5909.654520557" Jan 20 18:20:16 crc kubenswrapper[4558]: I0120 18:20:16.328899 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-index-cdc54" Jan 20 18:20:16 crc kubenswrapper[4558]: I0120 18:20:16.431520 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7wwwl\" (UniqueName: \"kubernetes.io/projected/9a79ee84-c642-4aeb-86e5-da222559cad3-kube-api-access-7wwwl\") pod \"9a79ee84-c642-4aeb-86e5-da222559cad3\" (UID: \"9a79ee84-c642-4aeb-86e5-da222559cad3\") " Jan 20 18:20:16 crc kubenswrapper[4558]: I0120 18:20:16.436638 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9a79ee84-c642-4aeb-86e5-da222559cad3-kube-api-access-7wwwl" (OuterVolumeSpecName: "kube-api-access-7wwwl") pod "9a79ee84-c642-4aeb-86e5-da222559cad3" (UID: "9a79ee84-c642-4aeb-86e5-da222559cad3"). InnerVolumeSpecName "kube-api-access-7wwwl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:20:16 crc kubenswrapper[4558]: I0120 18:20:16.533664 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7wwwl\" (UniqueName: \"kubernetes.io/projected/9a79ee84-c642-4aeb-86e5-da222559cad3-kube-api-access-7wwwl\") on node \"crc\" DevicePath \"\"" Jan 20 18:20:16 crc kubenswrapper[4558]: I0120 18:20:16.886151 4558 generic.go:334] "Generic (PLEG): container finished" podID="9a79ee84-c642-4aeb-86e5-da222559cad3" containerID="127c093eb9eecd0b28f7062c4348093259acc1ab5149c2ea04e2ab6352434e58" exitCode=0 Jan 20 18:20:16 crc kubenswrapper[4558]: I0120 18:20:16.886236 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-index-cdc54" Jan 20 18:20:16 crc kubenswrapper[4558]: I0120 18:20:16.886269 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-index-cdc54" event={"ID":"9a79ee84-c642-4aeb-86e5-da222559cad3","Type":"ContainerDied","Data":"127c093eb9eecd0b28f7062c4348093259acc1ab5149c2ea04e2ab6352434e58"} Jan 20 18:20:16 crc kubenswrapper[4558]: I0120 18:20:16.886315 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-index-cdc54" event={"ID":"9a79ee84-c642-4aeb-86e5-da222559cad3","Type":"ContainerDied","Data":"49408c7985211f645d76632530cda4d34734afff27954c253eda2ca60cdfeada"} Jan 20 18:20:16 crc kubenswrapper[4558]: I0120 18:20:16.886343 4558 scope.go:117] "RemoveContainer" containerID="127c093eb9eecd0b28f7062c4348093259acc1ab5149c2ea04e2ab6352434e58" Jan 20 18:20:16 crc kubenswrapper[4558]: I0120 18:20:16.889084 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-index-pbh4r" event={"ID":"eff4955d-0ba2-4419-9091-dd993572c188","Type":"ContainerStarted","Data":"eb7dea73309f3fd70529e505b3a7e84e6a34f2bcebf008857bf8b2bd822984ab"} Jan 20 18:20:16 crc kubenswrapper[4558]: I0120 18:20:16.908680 4558 scope.go:117] "RemoveContainer" containerID="127c093eb9eecd0b28f7062c4348093259acc1ab5149c2ea04e2ab6352434e58" Jan 20 18:20:16 crc kubenswrapper[4558]: E0120 18:20:16.909246 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"127c093eb9eecd0b28f7062c4348093259acc1ab5149c2ea04e2ab6352434e58\": container with ID starting with 127c093eb9eecd0b28f7062c4348093259acc1ab5149c2ea04e2ab6352434e58 not found: ID does not exist" containerID="127c093eb9eecd0b28f7062c4348093259acc1ab5149c2ea04e2ab6352434e58" Jan 20 18:20:16 crc kubenswrapper[4558]: I0120 18:20:16.909283 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"127c093eb9eecd0b28f7062c4348093259acc1ab5149c2ea04e2ab6352434e58"} err="failed to get container status \"127c093eb9eecd0b28f7062c4348093259acc1ab5149c2ea04e2ab6352434e58\": rpc error: code = NotFound desc = could not find container \"127c093eb9eecd0b28f7062c4348093259acc1ab5149c2ea04e2ab6352434e58\": container with ID starting with 127c093eb9eecd0b28f7062c4348093259acc1ab5149c2ea04e2ab6352434e58 not found: ID does not exist" Jan 20 18:20:16 crc kubenswrapper[4558]: I0120 18:20:16.917024 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-index-pbh4r" podStartSLOduration=2.327016275 podStartE2EDuration="2.91699982s" podCreationTimestamp="2026-01-20 18:20:14 +0000 UTC" firstStartedPulling="2026-01-20 18:20:15.35791326 +0000 UTC m=+5909.118251228" lastFinishedPulling="2026-01-20 18:20:15.947896806 +0000 UTC m=+5909.708234773" observedRunningTime="2026-01-20 18:20:16.909992876 +0000 UTC m=+5910.670330843" watchObservedRunningTime="2026-01-20 18:20:16.91699982 +0000 UTC m=+5910.677337787" Jan 20 18:20:16 crc kubenswrapper[4558]: I0120 18:20:16.923515 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/mariadb-operator-index-cdc54"] Jan 20 18:20:16 crc kubenswrapper[4558]: I0120 18:20:16.927701 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/mariadb-operator-index-cdc54"] Jan 20 18:20:18 crc kubenswrapper[4558]: I0120 18:20:18.574407 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9a79ee84-c642-4aeb-86e5-da222559cad3" path="/var/lib/kubelet/pods/9a79ee84-c642-4aeb-86e5-da222559cad3/volumes" Jan 20 18:20:24 crc kubenswrapper[4558]: I0120 18:20:24.957316 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-index-pbh4r" Jan 20 18:20:24 crc kubenswrapper[4558]: I0120 18:20:24.958148 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/mariadb-operator-index-pbh4r" Jan 20 18:20:24 crc kubenswrapper[4558]: I0120 18:20:24.984578 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/mariadb-operator-index-pbh4r" Jan 20 18:20:25 crc kubenswrapper[4558]: I0120 18:20:25.992429 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-index-pbh4r" Jan 20 18:20:31 crc kubenswrapper[4558]: I0120 18:20:31.260672 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/a53044f29d0f89c197912873b7cc34569484f5de61ee55394f4939720bhxkps"] Jan 20 18:20:31 crc kubenswrapper[4558]: E0120 18:20:31.261255 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9a79ee84-c642-4aeb-86e5-da222559cad3" containerName="registry-server" Jan 20 18:20:31 crc kubenswrapper[4558]: I0120 18:20:31.261270 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="9a79ee84-c642-4aeb-86e5-da222559cad3" containerName="registry-server" Jan 20 18:20:31 crc kubenswrapper[4558]: I0120 18:20:31.261390 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="9a79ee84-c642-4aeb-86e5-da222559cad3" containerName="registry-server" Jan 20 18:20:31 crc kubenswrapper[4558]: I0120 18:20:31.262308 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/a53044f29d0f89c197912873b7cc34569484f5de61ee55394f4939720bhxkps" Jan 20 18:20:31 crc kubenswrapper[4558]: I0120 18:20:31.264074 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-7xp5w" Jan 20 18:20:31 crc kubenswrapper[4558]: I0120 18:20:31.269967 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/a53044f29d0f89c197912873b7cc34569484f5de61ee55394f4939720bhxkps"] Jan 20 18:20:31 crc kubenswrapper[4558]: I0120 18:20:31.334648 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l675g\" (UniqueName: \"kubernetes.io/projected/ee676b7f-b767-4c0c-b131-367637d0d7da-kube-api-access-l675g\") pod \"a53044f29d0f89c197912873b7cc34569484f5de61ee55394f4939720bhxkps\" (UID: \"ee676b7f-b767-4c0c-b131-367637d0d7da\") " pod="openstack-operators/a53044f29d0f89c197912873b7cc34569484f5de61ee55394f4939720bhxkps" Jan 20 18:20:31 crc kubenswrapper[4558]: I0120 18:20:31.334825 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ee676b7f-b767-4c0c-b131-367637d0d7da-bundle\") pod \"a53044f29d0f89c197912873b7cc34569484f5de61ee55394f4939720bhxkps\" (UID: \"ee676b7f-b767-4c0c-b131-367637d0d7da\") " pod="openstack-operators/a53044f29d0f89c197912873b7cc34569484f5de61ee55394f4939720bhxkps" Jan 20 18:20:31 crc kubenswrapper[4558]: I0120 18:20:31.334889 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ee676b7f-b767-4c0c-b131-367637d0d7da-util\") pod \"a53044f29d0f89c197912873b7cc34569484f5de61ee55394f4939720bhxkps\" (UID: \"ee676b7f-b767-4c0c-b131-367637d0d7da\") " pod="openstack-operators/a53044f29d0f89c197912873b7cc34569484f5de61ee55394f4939720bhxkps" Jan 20 18:20:31 crc kubenswrapper[4558]: I0120 18:20:31.436975 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l675g\" (UniqueName: \"kubernetes.io/projected/ee676b7f-b767-4c0c-b131-367637d0d7da-kube-api-access-l675g\") pod \"a53044f29d0f89c197912873b7cc34569484f5de61ee55394f4939720bhxkps\" (UID: \"ee676b7f-b767-4c0c-b131-367637d0d7da\") " pod="openstack-operators/a53044f29d0f89c197912873b7cc34569484f5de61ee55394f4939720bhxkps" Jan 20 18:20:31 crc kubenswrapper[4558]: I0120 18:20:31.437157 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ee676b7f-b767-4c0c-b131-367637d0d7da-bundle\") pod \"a53044f29d0f89c197912873b7cc34569484f5de61ee55394f4939720bhxkps\" (UID: \"ee676b7f-b767-4c0c-b131-367637d0d7da\") " pod="openstack-operators/a53044f29d0f89c197912873b7cc34569484f5de61ee55394f4939720bhxkps" Jan 20 18:20:31 crc kubenswrapper[4558]: I0120 18:20:31.437239 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ee676b7f-b767-4c0c-b131-367637d0d7da-util\") pod \"a53044f29d0f89c197912873b7cc34569484f5de61ee55394f4939720bhxkps\" (UID: \"ee676b7f-b767-4c0c-b131-367637d0d7da\") " pod="openstack-operators/a53044f29d0f89c197912873b7cc34569484f5de61ee55394f4939720bhxkps" Jan 20 18:20:31 crc kubenswrapper[4558]: I0120 18:20:31.437858 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ee676b7f-b767-4c0c-b131-367637d0d7da-bundle\") pod \"a53044f29d0f89c197912873b7cc34569484f5de61ee55394f4939720bhxkps\" (UID: \"ee676b7f-b767-4c0c-b131-367637d0d7da\") " pod="openstack-operators/a53044f29d0f89c197912873b7cc34569484f5de61ee55394f4939720bhxkps" Jan 20 18:20:31 crc kubenswrapper[4558]: I0120 18:20:31.437899 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ee676b7f-b767-4c0c-b131-367637d0d7da-util\") pod \"a53044f29d0f89c197912873b7cc34569484f5de61ee55394f4939720bhxkps\" (UID: \"ee676b7f-b767-4c0c-b131-367637d0d7da\") " pod="openstack-operators/a53044f29d0f89c197912873b7cc34569484f5de61ee55394f4939720bhxkps" Jan 20 18:20:31 crc kubenswrapper[4558]: I0120 18:20:31.458254 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l675g\" (UniqueName: \"kubernetes.io/projected/ee676b7f-b767-4c0c-b131-367637d0d7da-kube-api-access-l675g\") pod \"a53044f29d0f89c197912873b7cc34569484f5de61ee55394f4939720bhxkps\" (UID: \"ee676b7f-b767-4c0c-b131-367637d0d7da\") " pod="openstack-operators/a53044f29d0f89c197912873b7cc34569484f5de61ee55394f4939720bhxkps" Jan 20 18:20:31 crc kubenswrapper[4558]: I0120 18:20:31.587264 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/a53044f29d0f89c197912873b7cc34569484f5de61ee55394f4939720bhxkps" Jan 20 18:20:31 crc kubenswrapper[4558]: I0120 18:20:31.975793 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/a53044f29d0f89c197912873b7cc34569484f5de61ee55394f4939720bhxkps"] Jan 20 18:20:31 crc kubenswrapper[4558]: W0120 18:20:31.980432 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podee676b7f_b767_4c0c_b131_367637d0d7da.slice/crio-091eac017589e65fb7ddf3cd43c70c3fa6351e1d07f7f91d534a1957c27483b7 WatchSource:0}: Error finding container 091eac017589e65fb7ddf3cd43c70c3fa6351e1d07f7f91d534a1957c27483b7: Status 404 returned error can't find the container with id 091eac017589e65fb7ddf3cd43c70c3fa6351e1d07f7f91d534a1957c27483b7 Jan 20 18:20:32 crc kubenswrapper[4558]: I0120 18:20:32.021092 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/a53044f29d0f89c197912873b7cc34569484f5de61ee55394f4939720bhxkps" event={"ID":"ee676b7f-b767-4c0c-b131-367637d0d7da","Type":"ContainerStarted","Data":"091eac017589e65fb7ddf3cd43c70c3fa6351e1d07f7f91d534a1957c27483b7"} Jan 20 18:20:33 crc kubenswrapper[4558]: I0120 18:20:33.030298 4558 generic.go:334] "Generic (PLEG): container finished" podID="ee676b7f-b767-4c0c-b131-367637d0d7da" containerID="06d841c360d31f6671b5eda177973450a54a9f90fd709b0b8b7da92429406463" exitCode=0 Jan 20 18:20:33 crc kubenswrapper[4558]: I0120 18:20:33.030368 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/a53044f29d0f89c197912873b7cc34569484f5de61ee55394f4939720bhxkps" event={"ID":"ee676b7f-b767-4c0c-b131-367637d0d7da","Type":"ContainerDied","Data":"06d841c360d31f6671b5eda177973450a54a9f90fd709b0b8b7da92429406463"} Jan 20 18:20:34 crc kubenswrapper[4558]: I0120 18:20:34.041855 4558 generic.go:334] "Generic (PLEG): container finished" podID="ee676b7f-b767-4c0c-b131-367637d0d7da" containerID="2e29da524f592a697143c38bb0d212b939a7c38b4c08a93de96922c759e5eebc" exitCode=0 Jan 20 18:20:34 crc kubenswrapper[4558]: I0120 18:20:34.041975 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/a53044f29d0f89c197912873b7cc34569484f5de61ee55394f4939720bhxkps" event={"ID":"ee676b7f-b767-4c0c-b131-367637d0d7da","Type":"ContainerDied","Data":"2e29da524f592a697143c38bb0d212b939a7c38b4c08a93de96922c759e5eebc"} Jan 20 18:20:35 crc kubenswrapper[4558]: I0120 18:20:35.053876 4558 generic.go:334] "Generic (PLEG): container finished" podID="ee676b7f-b767-4c0c-b131-367637d0d7da" containerID="0a5c8461fb756193f036809020d399e4876cde6f4c0616cc6e432a0d968f3d8d" exitCode=0 Jan 20 18:20:35 crc kubenswrapper[4558]: I0120 18:20:35.053922 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/a53044f29d0f89c197912873b7cc34569484f5de61ee55394f4939720bhxkps" event={"ID":"ee676b7f-b767-4c0c-b131-367637d0d7da","Type":"ContainerDied","Data":"0a5c8461fb756193f036809020d399e4876cde6f4c0616cc6e432a0d968f3d8d"} Jan 20 18:20:36 crc kubenswrapper[4558]: I0120 18:20:36.283430 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/a53044f29d0f89c197912873b7cc34569484f5de61ee55394f4939720bhxkps" Jan 20 18:20:36 crc kubenswrapper[4558]: I0120 18:20:36.420549 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l675g\" (UniqueName: \"kubernetes.io/projected/ee676b7f-b767-4c0c-b131-367637d0d7da-kube-api-access-l675g\") pod \"ee676b7f-b767-4c0c-b131-367637d0d7da\" (UID: \"ee676b7f-b767-4c0c-b131-367637d0d7da\") " Jan 20 18:20:36 crc kubenswrapper[4558]: I0120 18:20:36.420615 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ee676b7f-b767-4c0c-b131-367637d0d7da-util\") pod \"ee676b7f-b767-4c0c-b131-367637d0d7da\" (UID: \"ee676b7f-b767-4c0c-b131-367637d0d7da\") " Jan 20 18:20:36 crc kubenswrapper[4558]: I0120 18:20:36.420638 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ee676b7f-b767-4c0c-b131-367637d0d7da-bundle\") pod \"ee676b7f-b767-4c0c-b131-367637d0d7da\" (UID: \"ee676b7f-b767-4c0c-b131-367637d0d7da\") " Jan 20 18:20:36 crc kubenswrapper[4558]: I0120 18:20:36.421746 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ee676b7f-b767-4c0c-b131-367637d0d7da-bundle" (OuterVolumeSpecName: "bundle") pod "ee676b7f-b767-4c0c-b131-367637d0d7da" (UID: "ee676b7f-b767-4c0c-b131-367637d0d7da"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 18:20:36 crc kubenswrapper[4558]: I0120 18:20:36.427199 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ee676b7f-b767-4c0c-b131-367637d0d7da-kube-api-access-l675g" (OuterVolumeSpecName: "kube-api-access-l675g") pod "ee676b7f-b767-4c0c-b131-367637d0d7da" (UID: "ee676b7f-b767-4c0c-b131-367637d0d7da"). InnerVolumeSpecName "kube-api-access-l675g". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:20:36 crc kubenswrapper[4558]: I0120 18:20:36.432522 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ee676b7f-b767-4c0c-b131-367637d0d7da-util" (OuterVolumeSpecName: "util") pod "ee676b7f-b767-4c0c-b131-367637d0d7da" (UID: "ee676b7f-b767-4c0c-b131-367637d0d7da"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 18:20:36 crc kubenswrapper[4558]: I0120 18:20:36.523018 4558 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/ee676b7f-b767-4c0c-b131-367637d0d7da-util\") on node \"crc\" DevicePath \"\"" Jan 20 18:20:36 crc kubenswrapper[4558]: I0120 18:20:36.523055 4558 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/ee676b7f-b767-4c0c-b131-367637d0d7da-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 18:20:36 crc kubenswrapper[4558]: I0120 18:20:36.523070 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l675g\" (UniqueName: \"kubernetes.io/projected/ee676b7f-b767-4c0c-b131-367637d0d7da-kube-api-access-l675g\") on node \"crc\" DevicePath \"\"" Jan 20 18:20:37 crc kubenswrapper[4558]: I0120 18:20:37.072561 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/a53044f29d0f89c197912873b7cc34569484f5de61ee55394f4939720bhxkps" event={"ID":"ee676b7f-b767-4c0c-b131-367637d0d7da","Type":"ContainerDied","Data":"091eac017589e65fb7ddf3cd43c70c3fa6351e1d07f7f91d534a1957c27483b7"} Jan 20 18:20:37 crc kubenswrapper[4558]: I0120 18:20:37.073013 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="091eac017589e65fb7ddf3cd43c70c3fa6351e1d07f7f91d534a1957c27483b7" Jan 20 18:20:37 crc kubenswrapper[4558]: I0120 18:20:37.072635 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/a53044f29d0f89c197912873b7cc34569484f5de61ee55394f4939720bhxkps" Jan 20 18:20:48 crc kubenswrapper[4558]: I0120 18:20:48.588082 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-5688b8d95d-l7tf9"] Jan 20 18:20:48 crc kubenswrapper[4558]: E0120 18:20:48.588909 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ee676b7f-b767-4c0c-b131-367637d0d7da" containerName="pull" Jan 20 18:20:48 crc kubenswrapper[4558]: I0120 18:20:48.588924 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ee676b7f-b767-4c0c-b131-367637d0d7da" containerName="pull" Jan 20 18:20:48 crc kubenswrapper[4558]: E0120 18:20:48.588938 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ee676b7f-b767-4c0c-b131-367637d0d7da" containerName="extract" Jan 20 18:20:48 crc kubenswrapper[4558]: I0120 18:20:48.588954 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ee676b7f-b767-4c0c-b131-367637d0d7da" containerName="extract" Jan 20 18:20:48 crc kubenswrapper[4558]: E0120 18:20:48.588966 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ee676b7f-b767-4c0c-b131-367637d0d7da" containerName="util" Jan 20 18:20:48 crc kubenswrapper[4558]: I0120 18:20:48.588972 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ee676b7f-b767-4c0c-b131-367637d0d7da" containerName="util" Jan 20 18:20:48 crc kubenswrapper[4558]: I0120 18:20:48.589113 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="ee676b7f-b767-4c0c-b131-367637d0d7da" containerName="extract" Jan 20 18:20:48 crc kubenswrapper[4558]: I0120 18:20:48.589585 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-5688b8d95d-l7tf9" Jan 20 18:20:48 crc kubenswrapper[4558]: I0120 18:20:48.591898 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"webhook-server-cert" Jan 20 18:20:48 crc kubenswrapper[4558]: I0120 18:20:48.592314 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-service-cert" Jan 20 18:20:48 crc kubenswrapper[4558]: I0120 18:20:48.592405 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-dockercfg-rrs4z" Jan 20 18:20:48 crc kubenswrapper[4558]: I0120 18:20:48.602248 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-5688b8d95d-l7tf9"] Jan 20 18:20:48 crc kubenswrapper[4558]: I0120 18:20:48.614171 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/f5670489-0e74-40ed-840e-af9195e82cb8-apiservice-cert\") pod \"mariadb-operator-controller-manager-5688b8d95d-l7tf9\" (UID: \"f5670489-0e74-40ed-840e-af9195e82cb8\") " pod="openstack-operators/mariadb-operator-controller-manager-5688b8d95d-l7tf9" Jan 20 18:20:48 crc kubenswrapper[4558]: I0120 18:20:48.614263 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7g6gt\" (UniqueName: \"kubernetes.io/projected/f5670489-0e74-40ed-840e-af9195e82cb8-kube-api-access-7g6gt\") pod \"mariadb-operator-controller-manager-5688b8d95d-l7tf9\" (UID: \"f5670489-0e74-40ed-840e-af9195e82cb8\") " pod="openstack-operators/mariadb-operator-controller-manager-5688b8d95d-l7tf9" Jan 20 18:20:48 crc kubenswrapper[4558]: I0120 18:20:48.614301 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/f5670489-0e74-40ed-840e-af9195e82cb8-webhook-cert\") pod \"mariadb-operator-controller-manager-5688b8d95d-l7tf9\" (UID: \"f5670489-0e74-40ed-840e-af9195e82cb8\") " pod="openstack-operators/mariadb-operator-controller-manager-5688b8d95d-l7tf9" Jan 20 18:20:48 crc kubenswrapper[4558]: I0120 18:20:48.715686 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/f5670489-0e74-40ed-840e-af9195e82cb8-apiservice-cert\") pod \"mariadb-operator-controller-manager-5688b8d95d-l7tf9\" (UID: \"f5670489-0e74-40ed-840e-af9195e82cb8\") " pod="openstack-operators/mariadb-operator-controller-manager-5688b8d95d-l7tf9" Jan 20 18:20:48 crc kubenswrapper[4558]: I0120 18:20:48.715808 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7g6gt\" (UniqueName: \"kubernetes.io/projected/f5670489-0e74-40ed-840e-af9195e82cb8-kube-api-access-7g6gt\") pod \"mariadb-operator-controller-manager-5688b8d95d-l7tf9\" (UID: \"f5670489-0e74-40ed-840e-af9195e82cb8\") " pod="openstack-operators/mariadb-operator-controller-manager-5688b8d95d-l7tf9" Jan 20 18:20:48 crc kubenswrapper[4558]: I0120 18:20:48.715848 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/f5670489-0e74-40ed-840e-af9195e82cb8-webhook-cert\") pod \"mariadb-operator-controller-manager-5688b8d95d-l7tf9\" (UID: \"f5670489-0e74-40ed-840e-af9195e82cb8\") " pod="openstack-operators/mariadb-operator-controller-manager-5688b8d95d-l7tf9" Jan 20 18:20:48 crc kubenswrapper[4558]: I0120 18:20:48.721706 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/f5670489-0e74-40ed-840e-af9195e82cb8-webhook-cert\") pod \"mariadb-operator-controller-manager-5688b8d95d-l7tf9\" (UID: \"f5670489-0e74-40ed-840e-af9195e82cb8\") " pod="openstack-operators/mariadb-operator-controller-manager-5688b8d95d-l7tf9" Jan 20 18:20:48 crc kubenswrapper[4558]: I0120 18:20:48.721760 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/f5670489-0e74-40ed-840e-af9195e82cb8-apiservice-cert\") pod \"mariadb-operator-controller-manager-5688b8d95d-l7tf9\" (UID: \"f5670489-0e74-40ed-840e-af9195e82cb8\") " pod="openstack-operators/mariadb-operator-controller-manager-5688b8d95d-l7tf9" Jan 20 18:20:48 crc kubenswrapper[4558]: I0120 18:20:48.737271 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7g6gt\" (UniqueName: \"kubernetes.io/projected/f5670489-0e74-40ed-840e-af9195e82cb8-kube-api-access-7g6gt\") pod \"mariadb-operator-controller-manager-5688b8d95d-l7tf9\" (UID: \"f5670489-0e74-40ed-840e-af9195e82cb8\") " pod="openstack-operators/mariadb-operator-controller-manager-5688b8d95d-l7tf9" Jan 20 18:20:48 crc kubenswrapper[4558]: I0120 18:20:48.907357 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-5688b8d95d-l7tf9" Jan 20 18:20:49 crc kubenswrapper[4558]: I0120 18:20:49.315386 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-5688b8d95d-l7tf9"] Jan 20 18:20:49 crc kubenswrapper[4558]: W0120 18:20:49.323053 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf5670489_0e74_40ed_840e_af9195e82cb8.slice/crio-4836e41cff3b5bf57951f09c0e48340606015e490f5b67705589fbd181a33212 WatchSource:0}: Error finding container 4836e41cff3b5bf57951f09c0e48340606015e490f5b67705589fbd181a33212: Status 404 returned error can't find the container with id 4836e41cff3b5bf57951f09c0e48340606015e490f5b67705589fbd181a33212 Jan 20 18:20:50 crc kubenswrapper[4558]: I0120 18:20:50.164942 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-5688b8d95d-l7tf9" event={"ID":"f5670489-0e74-40ed-840e-af9195e82cb8","Type":"ContainerStarted","Data":"eb59b04404dcdff3fada47ce0a239839d36bf3a15fe82bdfc183a2501e1fdbc0"} Jan 20 18:20:50 crc kubenswrapper[4558]: I0120 18:20:50.165426 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-5688b8d95d-l7tf9" Jan 20 18:20:50 crc kubenswrapper[4558]: I0120 18:20:50.165444 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-5688b8d95d-l7tf9" event={"ID":"f5670489-0e74-40ed-840e-af9195e82cb8","Type":"ContainerStarted","Data":"4836e41cff3b5bf57951f09c0e48340606015e490f5b67705589fbd181a33212"} Jan 20 18:20:50 crc kubenswrapper[4558]: I0120 18:20:50.197587 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-controller-manager-5688b8d95d-l7tf9" podStartSLOduration=2.197570839 podStartE2EDuration="2.197570839s" podCreationTimestamp="2026-01-20 18:20:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 18:20:50.179224392 +0000 UTC m=+5943.939562360" watchObservedRunningTime="2026-01-20 18:20:50.197570839 +0000 UTC m=+5943.957908796" Jan 20 18:20:58 crc kubenswrapper[4558]: I0120 18:20:58.912440 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-controller-manager-5688b8d95d-l7tf9" Jan 20 18:21:06 crc kubenswrapper[4558]: I0120 18:21:06.231017 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-index-cfhl6"] Jan 20 18:21:06 crc kubenswrapper[4558]: I0120 18:21:06.232725 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-index-cfhl6" Jan 20 18:21:06 crc kubenswrapper[4558]: I0120 18:21:06.234920 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-index-dockercfg-vlqk7" Jan 20 18:21:06 crc kubenswrapper[4558]: I0120 18:21:06.242334 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-index-cfhl6"] Jan 20 18:21:06 crc kubenswrapper[4558]: I0120 18:21:06.369776 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jvdd4\" (UniqueName: \"kubernetes.io/projected/22bb144c-7b65-4d05-b1f6-e15ccb620de3-kube-api-access-jvdd4\") pod \"infra-operator-index-cfhl6\" (UID: \"22bb144c-7b65-4d05-b1f6-e15ccb620de3\") " pod="openstack-operators/infra-operator-index-cfhl6" Jan 20 18:21:06 crc kubenswrapper[4558]: I0120 18:21:06.472719 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jvdd4\" (UniqueName: \"kubernetes.io/projected/22bb144c-7b65-4d05-b1f6-e15ccb620de3-kube-api-access-jvdd4\") pod \"infra-operator-index-cfhl6\" (UID: \"22bb144c-7b65-4d05-b1f6-e15ccb620de3\") " pod="openstack-operators/infra-operator-index-cfhl6" Jan 20 18:21:06 crc kubenswrapper[4558]: I0120 18:21:06.491685 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jvdd4\" (UniqueName: \"kubernetes.io/projected/22bb144c-7b65-4d05-b1f6-e15ccb620de3-kube-api-access-jvdd4\") pod \"infra-operator-index-cfhl6\" (UID: \"22bb144c-7b65-4d05-b1f6-e15ccb620de3\") " pod="openstack-operators/infra-operator-index-cfhl6" Jan 20 18:21:06 crc kubenswrapper[4558]: I0120 18:21:06.553119 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-index-cfhl6" Jan 20 18:21:06 crc kubenswrapper[4558]: I0120 18:21:06.921125 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-index-cfhl6"] Jan 20 18:21:06 crc kubenswrapper[4558]: I0120 18:21:06.928501 4558 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 20 18:21:07 crc kubenswrapper[4558]: I0120 18:21:07.302273 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-index-cfhl6" event={"ID":"22bb144c-7b65-4d05-b1f6-e15ccb620de3","Type":"ContainerStarted","Data":"e3d1ac71d21397dc9351e328596cf8e98310b704f2595924c691c5cb5e7e80dd"} Jan 20 18:21:08 crc kubenswrapper[4558]: I0120 18:21:08.309596 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-index-cfhl6" event={"ID":"22bb144c-7b65-4d05-b1f6-e15ccb620de3","Type":"ContainerStarted","Data":"99131e87ee51e530c20a29665eee45804eb9dd2e282408b09120f7146b017aca"} Jan 20 18:21:08 crc kubenswrapper[4558]: I0120 18:21:08.329656 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["keystone-kuttl-tests/openstack-galera-0"] Jan 20 18:21:08 crc kubenswrapper[4558]: I0120 18:21:08.330622 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/openstack-galera-0" Jan 20 18:21:08 crc kubenswrapper[4558]: I0120 18:21:08.333826 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"keystone-kuttl-tests"/"openshift-service-ca.crt" Jan 20 18:21:08 crc kubenswrapper[4558]: I0120 18:21:08.335339 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"keystone-kuttl-tests"/"openstack-scripts" Jan 20 18:21:08 crc kubenswrapper[4558]: I0120 18:21:08.335571 4558 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"galera-openstack-dockercfg-vbp4b" Jan 20 18:21:08 crc kubenswrapper[4558]: I0120 18:21:08.335632 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"keystone-kuttl-tests"/"openstack-config-data" Jan 20 18:21:08 crc kubenswrapper[4558]: I0120 18:21:08.335915 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"keystone-kuttl-tests"/"kube-root-ca.crt" Jan 20 18:21:08 crc kubenswrapper[4558]: I0120 18:21:08.340627 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-index-cfhl6" podStartSLOduration=1.672170524 podStartE2EDuration="2.340607563s" podCreationTimestamp="2026-01-20 18:21:06 +0000 UTC" firstStartedPulling="2026-01-20 18:21:06.928296618 +0000 UTC m=+5960.688634584" lastFinishedPulling="2026-01-20 18:21:07.596733656 +0000 UTC m=+5961.357071623" observedRunningTime="2026-01-20 18:21:08.335328247 +0000 UTC m=+5962.095666214" watchObservedRunningTime="2026-01-20 18:21:08.340607563 +0000 UTC m=+5962.100945530" Jan 20 18:21:08 crc kubenswrapper[4558]: I0120 18:21:08.346923 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["keystone-kuttl-tests/openstack-galera-1"] Jan 20 18:21:08 crc kubenswrapper[4558]: I0120 18:21:08.348050 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/openstack-galera-1" Jan 20 18:21:08 crc kubenswrapper[4558]: I0120 18:21:08.356041 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/openstack-galera-0"] Jan 20 18:21:08 crc kubenswrapper[4558]: I0120 18:21:08.359238 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/openstack-galera-1"] Jan 20 18:21:08 crc kubenswrapper[4558]: I0120 18:21:08.362119 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["keystone-kuttl-tests/openstack-galera-2"] Jan 20 18:21:08 crc kubenswrapper[4558]: I0120 18:21:08.363132 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/openstack-galera-2" Jan 20 18:21:08 crc kubenswrapper[4558]: I0120 18:21:08.386052 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/openstack-galera-2"] Jan 20 18:21:08 crc kubenswrapper[4558]: I0120 18:21:08.508175 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/160b737f-7377-4d3a-8fff-3b96b9a614bc-operator-scripts\") pod \"openstack-galera-1\" (UID: \"160b737f-7377-4d3a-8fff-3b96b9a614bc\") " pod="keystone-kuttl-tests/openstack-galera-1" Jan 20 18:21:08 crc kubenswrapper[4558]: I0120 18:21:08.508228 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage15-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") pod \"openstack-galera-1\" (UID: \"160b737f-7377-4d3a-8fff-3b96b9a614bc\") " pod="keystone-kuttl-tests/openstack-galera-1" Jan 20 18:21:08 crc kubenswrapper[4558]: I0120 18:21:08.508261 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/3b3e4088-de47-4e50-b0a9-ad92c99d033d-config-data-default\") pod \"openstack-galera-2\" (UID: \"3b3e4088-de47-4e50-b0a9-ad92c99d033d\") " pod="keystone-kuttl-tests/openstack-galera-2" Jan 20 18:21:08 crc kubenswrapper[4558]: I0120 18:21:08.508300 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2njxt\" (UniqueName: \"kubernetes.io/projected/87dd79f4-9a5b-4370-9ef8-3101a2b8bfb8-kube-api-access-2njxt\") pod \"openstack-galera-0\" (UID: \"87dd79f4-9a5b-4370-9ef8-3101a2b8bfb8\") " pod="keystone-kuttl-tests/openstack-galera-0" Jan 20 18:21:08 crc kubenswrapper[4558]: I0120 18:21:08.508365 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/87dd79f4-9a5b-4370-9ef8-3101a2b8bfb8-config-data-default\") pod \"openstack-galera-0\" (UID: \"87dd79f4-9a5b-4370-9ef8-3101a2b8bfb8\") " pod="keystone-kuttl-tests/openstack-galera-0" Jan 20 18:21:08 crc kubenswrapper[4558]: I0120 18:21:08.508387 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/3b3e4088-de47-4e50-b0a9-ad92c99d033d-config-data-generated\") pod \"openstack-galera-2\" (UID: \"3b3e4088-de47-4e50-b0a9-ad92c99d033d\") " pod="keystone-kuttl-tests/openstack-galera-2" Jan 20 18:21:08 crc kubenswrapper[4558]: I0120 18:21:08.508409 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3b3e4088-de47-4e50-b0a9-ad92c99d033d-operator-scripts\") pod \"openstack-galera-2\" (UID: \"3b3e4088-de47-4e50-b0a9-ad92c99d033d\") " pod="keystone-kuttl-tests/openstack-galera-2" Jan 20 18:21:08 crc kubenswrapper[4558]: I0120 18:21:08.508429 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/160b737f-7377-4d3a-8fff-3b96b9a614bc-kolla-config\") pod \"openstack-galera-1\" (UID: \"160b737f-7377-4d3a-8fff-3b96b9a614bc\") " pod="keystone-kuttl-tests/openstack-galera-1" Jan 20 18:21:08 crc kubenswrapper[4558]: I0120 18:21:08.508455 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nfflj\" (UniqueName: \"kubernetes.io/projected/3b3e4088-de47-4e50-b0a9-ad92c99d033d-kube-api-access-nfflj\") pod \"openstack-galera-2\" (UID: \"3b3e4088-de47-4e50-b0a9-ad92c99d033d\") " pod="keystone-kuttl-tests/openstack-galera-2" Jan 20 18:21:08 crc kubenswrapper[4558]: I0120 18:21:08.508479 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/3b3e4088-de47-4e50-b0a9-ad92c99d033d-kolla-config\") pod \"openstack-galera-2\" (UID: \"3b3e4088-de47-4e50-b0a9-ad92c99d033d\") " pod="keystone-kuttl-tests/openstack-galera-2" Jan 20 18:21:08 crc kubenswrapper[4558]: I0120 18:21:08.508501 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/160b737f-7377-4d3a-8fff-3b96b9a614bc-config-data-generated\") pod \"openstack-galera-1\" (UID: \"160b737f-7377-4d3a-8fff-3b96b9a614bc\") " pod="keystone-kuttl-tests/openstack-galera-1" Jan 20 18:21:08 crc kubenswrapper[4558]: I0120 18:21:08.508522 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-galera-2\" (UID: \"3b3e4088-de47-4e50-b0a9-ad92c99d033d\") " pod="keystone-kuttl-tests/openstack-galera-2" Jan 20 18:21:08 crc kubenswrapper[4558]: I0120 18:21:08.508544 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tk2l7\" (UniqueName: \"kubernetes.io/projected/160b737f-7377-4d3a-8fff-3b96b9a614bc-kube-api-access-tk2l7\") pod \"openstack-galera-1\" (UID: \"160b737f-7377-4d3a-8fff-3b96b9a614bc\") " pod="keystone-kuttl-tests/openstack-galera-1" Jan 20 18:21:08 crc kubenswrapper[4558]: I0120 18:21:08.508573 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/87dd79f4-9a5b-4370-9ef8-3101a2b8bfb8-kolla-config\") pod \"openstack-galera-0\" (UID: \"87dd79f4-9a5b-4370-9ef8-3101a2b8bfb8\") " pod="keystone-kuttl-tests/openstack-galera-0" Jan 20 18:21:08 crc kubenswrapper[4558]: I0120 18:21:08.508593 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"openstack-galera-0\" (UID: \"87dd79f4-9a5b-4370-9ef8-3101a2b8bfb8\") " pod="keystone-kuttl-tests/openstack-galera-0" Jan 20 18:21:08 crc kubenswrapper[4558]: I0120 18:21:08.508612 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/87dd79f4-9a5b-4370-9ef8-3101a2b8bfb8-operator-scripts\") pod \"openstack-galera-0\" (UID: \"87dd79f4-9a5b-4370-9ef8-3101a2b8bfb8\") " pod="keystone-kuttl-tests/openstack-galera-0" Jan 20 18:21:08 crc kubenswrapper[4558]: I0120 18:21:08.508630 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/160b737f-7377-4d3a-8fff-3b96b9a614bc-config-data-default\") pod \"openstack-galera-1\" (UID: \"160b737f-7377-4d3a-8fff-3b96b9a614bc\") " pod="keystone-kuttl-tests/openstack-galera-1" Jan 20 18:21:08 crc kubenswrapper[4558]: I0120 18:21:08.508661 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/87dd79f4-9a5b-4370-9ef8-3101a2b8bfb8-config-data-generated\") pod \"openstack-galera-0\" (UID: \"87dd79f4-9a5b-4370-9ef8-3101a2b8bfb8\") " pod="keystone-kuttl-tests/openstack-galera-0" Jan 20 18:21:08 crc kubenswrapper[4558]: I0120 18:21:08.610147 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-galera-2\" (UID: \"3b3e4088-de47-4e50-b0a9-ad92c99d033d\") " pod="keystone-kuttl-tests/openstack-galera-2" Jan 20 18:21:08 crc kubenswrapper[4558]: I0120 18:21:08.610264 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tk2l7\" (UniqueName: \"kubernetes.io/projected/160b737f-7377-4d3a-8fff-3b96b9a614bc-kube-api-access-tk2l7\") pod \"openstack-galera-1\" (UID: \"160b737f-7377-4d3a-8fff-3b96b9a614bc\") " pod="keystone-kuttl-tests/openstack-galera-1" Jan 20 18:21:08 crc kubenswrapper[4558]: I0120 18:21:08.610310 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/87dd79f4-9a5b-4370-9ef8-3101a2b8bfb8-kolla-config\") pod \"openstack-galera-0\" (UID: \"87dd79f4-9a5b-4370-9ef8-3101a2b8bfb8\") " pod="keystone-kuttl-tests/openstack-galera-0" Jan 20 18:21:08 crc kubenswrapper[4558]: I0120 18:21:08.610342 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"openstack-galera-0\" (UID: \"87dd79f4-9a5b-4370-9ef8-3101a2b8bfb8\") " pod="keystone-kuttl-tests/openstack-galera-0" Jan 20 18:21:08 crc kubenswrapper[4558]: I0120 18:21:08.610372 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/87dd79f4-9a5b-4370-9ef8-3101a2b8bfb8-operator-scripts\") pod \"openstack-galera-0\" (UID: \"87dd79f4-9a5b-4370-9ef8-3101a2b8bfb8\") " pod="keystone-kuttl-tests/openstack-galera-0" Jan 20 18:21:08 crc kubenswrapper[4558]: I0120 18:21:08.610397 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/160b737f-7377-4d3a-8fff-3b96b9a614bc-config-data-default\") pod \"openstack-galera-1\" (UID: \"160b737f-7377-4d3a-8fff-3b96b9a614bc\") " pod="keystone-kuttl-tests/openstack-galera-1" Jan 20 18:21:08 crc kubenswrapper[4558]: I0120 18:21:08.610464 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/87dd79f4-9a5b-4370-9ef8-3101a2b8bfb8-config-data-generated\") pod \"openstack-galera-0\" (UID: \"87dd79f4-9a5b-4370-9ef8-3101a2b8bfb8\") " pod="keystone-kuttl-tests/openstack-galera-0" Jan 20 18:21:08 crc kubenswrapper[4558]: I0120 18:21:08.610501 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/160b737f-7377-4d3a-8fff-3b96b9a614bc-operator-scripts\") pod \"openstack-galera-1\" (UID: \"160b737f-7377-4d3a-8fff-3b96b9a614bc\") " pod="keystone-kuttl-tests/openstack-galera-1" Jan 20 18:21:08 crc kubenswrapper[4558]: I0120 18:21:08.610535 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage15-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") pod \"openstack-galera-1\" (UID: \"160b737f-7377-4d3a-8fff-3b96b9a614bc\") " pod="keystone-kuttl-tests/openstack-galera-1" Jan 20 18:21:08 crc kubenswrapper[4558]: I0120 18:21:08.610573 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/3b3e4088-de47-4e50-b0a9-ad92c99d033d-config-data-default\") pod \"openstack-galera-2\" (UID: \"3b3e4088-de47-4e50-b0a9-ad92c99d033d\") " pod="keystone-kuttl-tests/openstack-galera-2" Jan 20 18:21:08 crc kubenswrapper[4558]: I0120 18:21:08.610597 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2njxt\" (UniqueName: \"kubernetes.io/projected/87dd79f4-9a5b-4370-9ef8-3101a2b8bfb8-kube-api-access-2njxt\") pod \"openstack-galera-0\" (UID: \"87dd79f4-9a5b-4370-9ef8-3101a2b8bfb8\") " pod="keystone-kuttl-tests/openstack-galera-0" Jan 20 18:21:08 crc kubenswrapper[4558]: I0120 18:21:08.610676 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage15-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") pod \"openstack-galera-1\" (UID: \"160b737f-7377-4d3a-8fff-3b96b9a614bc\") device mount path \"/mnt/openstack/pv15\"" pod="keystone-kuttl-tests/openstack-galera-1" Jan 20 18:21:08 crc kubenswrapper[4558]: I0120 18:21:08.610587 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"openstack-galera-0\" (UID: \"87dd79f4-9a5b-4370-9ef8-3101a2b8bfb8\") device mount path \"/mnt/openstack/pv10\"" pod="keystone-kuttl-tests/openstack-galera-0" Jan 20 18:21:08 crc kubenswrapper[4558]: I0120 18:21:08.611110 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/87dd79f4-9a5b-4370-9ef8-3101a2b8bfb8-config-data-default\") pod \"openstack-galera-0\" (UID: \"87dd79f4-9a5b-4370-9ef8-3101a2b8bfb8\") " pod="keystone-kuttl-tests/openstack-galera-0" Jan 20 18:21:08 crc kubenswrapper[4558]: I0120 18:21:08.611196 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/3b3e4088-de47-4e50-b0a9-ad92c99d033d-config-data-generated\") pod \"openstack-galera-2\" (UID: \"3b3e4088-de47-4e50-b0a9-ad92c99d033d\") " pod="keystone-kuttl-tests/openstack-galera-2" Jan 20 18:21:08 crc kubenswrapper[4558]: I0120 18:21:08.611265 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-galera-2\" (UID: \"3b3e4088-de47-4e50-b0a9-ad92c99d033d\") device mount path \"/mnt/openstack/pv01\"" pod="keystone-kuttl-tests/openstack-galera-2" Jan 20 18:21:08 crc kubenswrapper[4558]: I0120 18:21:08.611472 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/87dd79f4-9a5b-4370-9ef8-3101a2b8bfb8-kolla-config\") pod \"openstack-galera-0\" (UID: \"87dd79f4-9a5b-4370-9ef8-3101a2b8bfb8\") " pod="keystone-kuttl-tests/openstack-galera-0" Jan 20 18:21:08 crc kubenswrapper[4558]: I0120 18:21:08.611606 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3b3e4088-de47-4e50-b0a9-ad92c99d033d-operator-scripts\") pod \"openstack-galera-2\" (UID: \"3b3e4088-de47-4e50-b0a9-ad92c99d033d\") " pod="keystone-kuttl-tests/openstack-galera-2" Jan 20 18:21:08 crc kubenswrapper[4558]: I0120 18:21:08.611822 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/3b3e4088-de47-4e50-b0a9-ad92c99d033d-config-data-default\") pod \"openstack-galera-2\" (UID: \"3b3e4088-de47-4e50-b0a9-ad92c99d033d\") " pod="keystone-kuttl-tests/openstack-galera-2" Jan 20 18:21:08 crc kubenswrapper[4558]: I0120 18:21:08.612013 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/160b737f-7377-4d3a-8fff-3b96b9a614bc-config-data-default\") pod \"openstack-galera-1\" (UID: \"160b737f-7377-4d3a-8fff-3b96b9a614bc\") " pod="keystone-kuttl-tests/openstack-galera-1" Jan 20 18:21:08 crc kubenswrapper[4558]: I0120 18:21:08.612258 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/3b3e4088-de47-4e50-b0a9-ad92c99d033d-config-data-generated\") pod \"openstack-galera-2\" (UID: \"3b3e4088-de47-4e50-b0a9-ad92c99d033d\") " pod="keystone-kuttl-tests/openstack-galera-2" Jan 20 18:21:08 crc kubenswrapper[4558]: I0120 18:21:08.612598 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/87dd79f4-9a5b-4370-9ef8-3101a2b8bfb8-config-data-default\") pod \"openstack-galera-0\" (UID: \"87dd79f4-9a5b-4370-9ef8-3101a2b8bfb8\") " pod="keystone-kuttl-tests/openstack-galera-0" Jan 20 18:21:08 crc kubenswrapper[4558]: I0120 18:21:08.612736 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/87dd79f4-9a5b-4370-9ef8-3101a2b8bfb8-operator-scripts\") pod \"openstack-galera-0\" (UID: \"87dd79f4-9a5b-4370-9ef8-3101a2b8bfb8\") " pod="keystone-kuttl-tests/openstack-galera-0" Jan 20 18:21:08 crc kubenswrapper[4558]: I0120 18:21:08.612883 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/160b737f-7377-4d3a-8fff-3b96b9a614bc-operator-scripts\") pod \"openstack-galera-1\" (UID: \"160b737f-7377-4d3a-8fff-3b96b9a614bc\") " pod="keystone-kuttl-tests/openstack-galera-1" Jan 20 18:21:08 crc kubenswrapper[4558]: I0120 18:21:08.613530 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3b3e4088-de47-4e50-b0a9-ad92c99d033d-operator-scripts\") pod \"openstack-galera-2\" (UID: \"3b3e4088-de47-4e50-b0a9-ad92c99d033d\") " pod="keystone-kuttl-tests/openstack-galera-2" Jan 20 18:21:08 crc kubenswrapper[4558]: I0120 18:21:08.613577 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/160b737f-7377-4d3a-8fff-3b96b9a614bc-kolla-config\") pod \"openstack-galera-1\" (UID: \"160b737f-7377-4d3a-8fff-3b96b9a614bc\") " pod="keystone-kuttl-tests/openstack-galera-1" Jan 20 18:21:08 crc kubenswrapper[4558]: I0120 18:21:08.613660 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nfflj\" (UniqueName: \"kubernetes.io/projected/3b3e4088-de47-4e50-b0a9-ad92c99d033d-kube-api-access-nfflj\") pod \"openstack-galera-2\" (UID: \"3b3e4088-de47-4e50-b0a9-ad92c99d033d\") " pod="keystone-kuttl-tests/openstack-galera-2" Jan 20 18:21:08 crc kubenswrapper[4558]: I0120 18:21:08.613699 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/3b3e4088-de47-4e50-b0a9-ad92c99d033d-kolla-config\") pod \"openstack-galera-2\" (UID: \"3b3e4088-de47-4e50-b0a9-ad92c99d033d\") " pod="keystone-kuttl-tests/openstack-galera-2" Jan 20 18:21:08 crc kubenswrapper[4558]: I0120 18:21:08.613725 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/160b737f-7377-4d3a-8fff-3b96b9a614bc-config-data-generated\") pod \"openstack-galera-1\" (UID: \"160b737f-7377-4d3a-8fff-3b96b9a614bc\") " pod="keystone-kuttl-tests/openstack-galera-1" Jan 20 18:21:08 crc kubenswrapper[4558]: I0120 18:21:08.614033 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/160b737f-7377-4d3a-8fff-3b96b9a614bc-kolla-config\") pod \"openstack-galera-1\" (UID: \"160b737f-7377-4d3a-8fff-3b96b9a614bc\") " pod="keystone-kuttl-tests/openstack-galera-1" Jan 20 18:21:08 crc kubenswrapper[4558]: I0120 18:21:08.614289 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/160b737f-7377-4d3a-8fff-3b96b9a614bc-config-data-generated\") pod \"openstack-galera-1\" (UID: \"160b737f-7377-4d3a-8fff-3b96b9a614bc\") " pod="keystone-kuttl-tests/openstack-galera-1" Jan 20 18:21:08 crc kubenswrapper[4558]: I0120 18:21:08.615790 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/3b3e4088-de47-4e50-b0a9-ad92c99d033d-kolla-config\") pod \"openstack-galera-2\" (UID: \"3b3e4088-de47-4e50-b0a9-ad92c99d033d\") " pod="keystone-kuttl-tests/openstack-galera-2" Jan 20 18:21:08 crc kubenswrapper[4558]: I0120 18:21:08.616471 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/87dd79f4-9a5b-4370-9ef8-3101a2b8bfb8-config-data-generated\") pod \"openstack-galera-0\" (UID: \"87dd79f4-9a5b-4370-9ef8-3101a2b8bfb8\") " pod="keystone-kuttl-tests/openstack-galera-0" Jan 20 18:21:08 crc kubenswrapper[4558]: I0120 18:21:08.630233 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2njxt\" (UniqueName: \"kubernetes.io/projected/87dd79f4-9a5b-4370-9ef8-3101a2b8bfb8-kube-api-access-2njxt\") pod \"openstack-galera-0\" (UID: \"87dd79f4-9a5b-4370-9ef8-3101a2b8bfb8\") " pod="keystone-kuttl-tests/openstack-galera-0" Jan 20 18:21:08 crc kubenswrapper[4558]: I0120 18:21:08.631488 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-galera-2\" (UID: \"3b3e4088-de47-4e50-b0a9-ad92c99d033d\") " pod="keystone-kuttl-tests/openstack-galera-2" Jan 20 18:21:08 crc kubenswrapper[4558]: I0120 18:21:08.631515 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage15-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") pod \"openstack-galera-1\" (UID: \"160b737f-7377-4d3a-8fff-3b96b9a614bc\") " pod="keystone-kuttl-tests/openstack-galera-1" Jan 20 18:21:08 crc kubenswrapper[4558]: I0120 18:21:08.635658 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nfflj\" (UniqueName: \"kubernetes.io/projected/3b3e4088-de47-4e50-b0a9-ad92c99d033d-kube-api-access-nfflj\") pod \"openstack-galera-2\" (UID: \"3b3e4088-de47-4e50-b0a9-ad92c99d033d\") " pod="keystone-kuttl-tests/openstack-galera-2" Jan 20 18:21:08 crc kubenswrapper[4558]: I0120 18:21:08.637499 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"openstack-galera-0\" (UID: \"87dd79f4-9a5b-4370-9ef8-3101a2b8bfb8\") " pod="keystone-kuttl-tests/openstack-galera-0" Jan 20 18:21:08 crc kubenswrapper[4558]: I0120 18:21:08.637718 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tk2l7\" (UniqueName: \"kubernetes.io/projected/160b737f-7377-4d3a-8fff-3b96b9a614bc-kube-api-access-tk2l7\") pod \"openstack-galera-1\" (UID: \"160b737f-7377-4d3a-8fff-3b96b9a614bc\") " pod="keystone-kuttl-tests/openstack-galera-1" Jan 20 18:21:08 crc kubenswrapper[4558]: I0120 18:21:08.647842 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/openstack-galera-0" Jan 20 18:21:08 crc kubenswrapper[4558]: I0120 18:21:08.659768 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/openstack-galera-1" Jan 20 18:21:08 crc kubenswrapper[4558]: I0120 18:21:08.676681 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/openstack-galera-2" Jan 20 18:21:09 crc kubenswrapper[4558]: I0120 18:21:09.063840 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/openstack-galera-0"] Jan 20 18:21:09 crc kubenswrapper[4558]: I0120 18:21:09.066770 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/openstack-galera-1"] Jan 20 18:21:09 crc kubenswrapper[4558]: W0120 18:21:09.078187 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod87dd79f4_9a5b_4370_9ef8_3101a2b8bfb8.slice/crio-b5d628c5a3128b8dc2c86705ca969b89f85b278eb2da4cd64a23530e19cf6d40 WatchSource:0}: Error finding container b5d628c5a3128b8dc2c86705ca969b89f85b278eb2da4cd64a23530e19cf6d40: Status 404 returned error can't find the container with id b5d628c5a3128b8dc2c86705ca969b89f85b278eb2da4cd64a23530e19cf6d40 Jan 20 18:21:09 crc kubenswrapper[4558]: I0120 18:21:09.147224 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/openstack-galera-2"] Jan 20 18:21:09 crc kubenswrapper[4558]: W0120 18:21:09.158256 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3b3e4088_de47_4e50_b0a9_ad92c99d033d.slice/crio-f07ec8c764fc8deac7b0bdb653f8a713f381c5149034a4b0641f08249a852777 WatchSource:0}: Error finding container f07ec8c764fc8deac7b0bdb653f8a713f381c5149034a4b0641f08249a852777: Status 404 returned error can't find the container with id f07ec8c764fc8deac7b0bdb653f8a713f381c5149034a4b0641f08249a852777 Jan 20 18:21:09 crc kubenswrapper[4558]: I0120 18:21:09.319818 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/openstack-galera-2" event={"ID":"3b3e4088-de47-4e50-b0a9-ad92c99d033d","Type":"ContainerStarted","Data":"1980894cfff612e1c62e2fca2a7a9c198c73c0713bf7e0fc70065c51ec322466"} Jan 20 18:21:09 crc kubenswrapper[4558]: I0120 18:21:09.319898 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/openstack-galera-2" event={"ID":"3b3e4088-de47-4e50-b0a9-ad92c99d033d","Type":"ContainerStarted","Data":"f07ec8c764fc8deac7b0bdb653f8a713f381c5149034a4b0641f08249a852777"} Jan 20 18:21:09 crc kubenswrapper[4558]: I0120 18:21:09.322914 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/openstack-galera-0" event={"ID":"87dd79f4-9a5b-4370-9ef8-3101a2b8bfb8","Type":"ContainerStarted","Data":"90a03121d50c44895e361b832a6554b41e7a21defd42de4a323ee9fc644fed96"} Jan 20 18:21:09 crc kubenswrapper[4558]: I0120 18:21:09.322986 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/openstack-galera-0" event={"ID":"87dd79f4-9a5b-4370-9ef8-3101a2b8bfb8","Type":"ContainerStarted","Data":"b5d628c5a3128b8dc2c86705ca969b89f85b278eb2da4cd64a23530e19cf6d40"} Jan 20 18:21:09 crc kubenswrapper[4558]: I0120 18:21:09.327054 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/openstack-galera-1" event={"ID":"160b737f-7377-4d3a-8fff-3b96b9a614bc","Type":"ContainerStarted","Data":"6e984aac6ee9677cdf1c592c71b4863c21b8bd0d4ace94da03b714c88dfeedb7"} Jan 20 18:21:09 crc kubenswrapper[4558]: I0120 18:21:09.327081 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/openstack-galera-1" event={"ID":"160b737f-7377-4d3a-8fff-3b96b9a614bc","Type":"ContainerStarted","Data":"be203ee3845f9a077f56d6585dc60586c09f9fb58acf1dd477b7caeef6a2cd4c"} Jan 20 18:21:10 crc kubenswrapper[4558]: I0120 18:21:10.626236 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/infra-operator-index-cfhl6"] Jan 20 18:21:10 crc kubenswrapper[4558]: I0120 18:21:10.626642 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/infra-operator-index-cfhl6" podUID="22bb144c-7b65-4d05-b1f6-e15ccb620de3" containerName="registry-server" containerID="cri-o://99131e87ee51e530c20a29665eee45804eb9dd2e282408b09120f7146b017aca" gracePeriod=2 Jan 20 18:21:10 crc kubenswrapper[4558]: I0120 18:21:10.965273 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-index-cfhl6" Jan 20 18:21:11 crc kubenswrapper[4558]: I0120 18:21:11.057068 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jvdd4\" (UniqueName: \"kubernetes.io/projected/22bb144c-7b65-4d05-b1f6-e15ccb620de3-kube-api-access-jvdd4\") pod \"22bb144c-7b65-4d05-b1f6-e15ccb620de3\" (UID: \"22bb144c-7b65-4d05-b1f6-e15ccb620de3\") " Jan 20 18:21:11 crc kubenswrapper[4558]: I0120 18:21:11.064373 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22bb144c-7b65-4d05-b1f6-e15ccb620de3-kube-api-access-jvdd4" (OuterVolumeSpecName: "kube-api-access-jvdd4") pod "22bb144c-7b65-4d05-b1f6-e15ccb620de3" (UID: "22bb144c-7b65-4d05-b1f6-e15ccb620de3"). InnerVolumeSpecName "kube-api-access-jvdd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:21:11 crc kubenswrapper[4558]: I0120 18:21:11.158772 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jvdd4\" (UniqueName: \"kubernetes.io/projected/22bb144c-7b65-4d05-b1f6-e15ccb620de3-kube-api-access-jvdd4\") on node \"crc\" DevicePath \"\"" Jan 20 18:21:11 crc kubenswrapper[4558]: I0120 18:21:11.231114 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-index-6sn8v"] Jan 20 18:21:11 crc kubenswrapper[4558]: E0120 18:21:11.231418 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="22bb144c-7b65-4d05-b1f6-e15ccb620de3" containerName="registry-server" Jan 20 18:21:11 crc kubenswrapper[4558]: I0120 18:21:11.231433 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="22bb144c-7b65-4d05-b1f6-e15ccb620de3" containerName="registry-server" Jan 20 18:21:11 crc kubenswrapper[4558]: I0120 18:21:11.231538 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="22bb144c-7b65-4d05-b1f6-e15ccb620de3" containerName="registry-server" Jan 20 18:21:11 crc kubenswrapper[4558]: I0120 18:21:11.232013 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-index-6sn8v" Jan 20 18:21:11 crc kubenswrapper[4558]: I0120 18:21:11.239142 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-index-6sn8v"] Jan 20 18:21:11 crc kubenswrapper[4558]: I0120 18:21:11.338571 4558 generic.go:334] "Generic (PLEG): container finished" podID="22bb144c-7b65-4d05-b1f6-e15ccb620de3" containerID="99131e87ee51e530c20a29665eee45804eb9dd2e282408b09120f7146b017aca" exitCode=0 Jan 20 18:21:11 crc kubenswrapper[4558]: I0120 18:21:11.338616 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-index-cfhl6" event={"ID":"22bb144c-7b65-4d05-b1f6-e15ccb620de3","Type":"ContainerDied","Data":"99131e87ee51e530c20a29665eee45804eb9dd2e282408b09120f7146b017aca"} Jan 20 18:21:11 crc kubenswrapper[4558]: I0120 18:21:11.338630 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-index-cfhl6" Jan 20 18:21:11 crc kubenswrapper[4558]: I0120 18:21:11.338641 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-index-cfhl6" event={"ID":"22bb144c-7b65-4d05-b1f6-e15ccb620de3","Type":"ContainerDied","Data":"e3d1ac71d21397dc9351e328596cf8e98310b704f2595924c691c5cb5e7e80dd"} Jan 20 18:21:11 crc kubenswrapper[4558]: I0120 18:21:11.338672 4558 scope.go:117] "RemoveContainer" containerID="99131e87ee51e530c20a29665eee45804eb9dd2e282408b09120f7146b017aca" Jan 20 18:21:11 crc kubenswrapper[4558]: I0120 18:21:11.353442 4558 scope.go:117] "RemoveContainer" containerID="99131e87ee51e530c20a29665eee45804eb9dd2e282408b09120f7146b017aca" Jan 20 18:21:11 crc kubenswrapper[4558]: E0120 18:21:11.354240 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"99131e87ee51e530c20a29665eee45804eb9dd2e282408b09120f7146b017aca\": container with ID starting with 99131e87ee51e530c20a29665eee45804eb9dd2e282408b09120f7146b017aca not found: ID does not exist" containerID="99131e87ee51e530c20a29665eee45804eb9dd2e282408b09120f7146b017aca" Jan 20 18:21:11 crc kubenswrapper[4558]: I0120 18:21:11.354276 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"99131e87ee51e530c20a29665eee45804eb9dd2e282408b09120f7146b017aca"} err="failed to get container status \"99131e87ee51e530c20a29665eee45804eb9dd2e282408b09120f7146b017aca\": rpc error: code = NotFound desc = could not find container \"99131e87ee51e530c20a29665eee45804eb9dd2e282408b09120f7146b017aca\": container with ID starting with 99131e87ee51e530c20a29665eee45804eb9dd2e282408b09120f7146b017aca not found: ID does not exist" Jan 20 18:21:11 crc kubenswrapper[4558]: I0120 18:21:11.362231 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-brggs\" (UniqueName: \"kubernetes.io/projected/324917cd-6180-463d-af83-512250591e1c-kube-api-access-brggs\") pod \"infra-operator-index-6sn8v\" (UID: \"324917cd-6180-463d-af83-512250591e1c\") " pod="openstack-operators/infra-operator-index-6sn8v" Jan 20 18:21:11 crc kubenswrapper[4558]: I0120 18:21:11.363730 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/infra-operator-index-cfhl6"] Jan 20 18:21:11 crc kubenswrapper[4558]: I0120 18:21:11.367114 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/infra-operator-index-cfhl6"] Jan 20 18:21:11 crc kubenswrapper[4558]: I0120 18:21:11.463721 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-brggs\" (UniqueName: \"kubernetes.io/projected/324917cd-6180-463d-af83-512250591e1c-kube-api-access-brggs\") pod \"infra-operator-index-6sn8v\" (UID: \"324917cd-6180-463d-af83-512250591e1c\") " pod="openstack-operators/infra-operator-index-6sn8v" Jan 20 18:21:11 crc kubenswrapper[4558]: I0120 18:21:11.478441 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-brggs\" (UniqueName: \"kubernetes.io/projected/324917cd-6180-463d-af83-512250591e1c-kube-api-access-brggs\") pod \"infra-operator-index-6sn8v\" (UID: \"324917cd-6180-463d-af83-512250591e1c\") " pod="openstack-operators/infra-operator-index-6sn8v" Jan 20 18:21:11 crc kubenswrapper[4558]: I0120 18:21:11.554025 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-index-6sn8v" Jan 20 18:21:11 crc kubenswrapper[4558]: I0120 18:21:11.923002 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-index-6sn8v"] Jan 20 18:21:11 crc kubenswrapper[4558]: W0120 18:21:11.926984 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod324917cd_6180_463d_af83_512250591e1c.slice/crio-4bf78178f5d89dc44cd1c790fde0c188a3d6e3c45580a48dbdf2fec9a527ecbd WatchSource:0}: Error finding container 4bf78178f5d89dc44cd1c790fde0c188a3d6e3c45580a48dbdf2fec9a527ecbd: Status 404 returned error can't find the container with id 4bf78178f5d89dc44cd1c790fde0c188a3d6e3c45580a48dbdf2fec9a527ecbd Jan 20 18:21:12 crc kubenswrapper[4558]: I0120 18:21:12.346337 4558 generic.go:334] "Generic (PLEG): container finished" podID="160b737f-7377-4d3a-8fff-3b96b9a614bc" containerID="6e984aac6ee9677cdf1c592c71b4863c21b8bd0d4ace94da03b714c88dfeedb7" exitCode=0 Jan 20 18:21:12 crc kubenswrapper[4558]: I0120 18:21:12.346438 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/openstack-galera-1" event={"ID":"160b737f-7377-4d3a-8fff-3b96b9a614bc","Type":"ContainerDied","Data":"6e984aac6ee9677cdf1c592c71b4863c21b8bd0d4ace94da03b714c88dfeedb7"} Jan 20 18:21:12 crc kubenswrapper[4558]: I0120 18:21:12.348373 4558 generic.go:334] "Generic (PLEG): container finished" podID="3b3e4088-de47-4e50-b0a9-ad92c99d033d" containerID="1980894cfff612e1c62e2fca2a7a9c198c73c0713bf7e0fc70065c51ec322466" exitCode=0 Jan 20 18:21:12 crc kubenswrapper[4558]: I0120 18:21:12.348434 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/openstack-galera-2" event={"ID":"3b3e4088-de47-4e50-b0a9-ad92c99d033d","Type":"ContainerDied","Data":"1980894cfff612e1c62e2fca2a7a9c198c73c0713bf7e0fc70065c51ec322466"} Jan 20 18:21:12 crc kubenswrapper[4558]: I0120 18:21:12.350242 4558 generic.go:334] "Generic (PLEG): container finished" podID="87dd79f4-9a5b-4370-9ef8-3101a2b8bfb8" containerID="90a03121d50c44895e361b832a6554b41e7a21defd42de4a323ee9fc644fed96" exitCode=0 Jan 20 18:21:12 crc kubenswrapper[4558]: I0120 18:21:12.350291 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/openstack-galera-0" event={"ID":"87dd79f4-9a5b-4370-9ef8-3101a2b8bfb8","Type":"ContainerDied","Data":"90a03121d50c44895e361b832a6554b41e7a21defd42de4a323ee9fc644fed96"} Jan 20 18:21:12 crc kubenswrapper[4558]: I0120 18:21:12.352059 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-index-6sn8v" event={"ID":"324917cd-6180-463d-af83-512250591e1c","Type":"ContainerStarted","Data":"4bf78178f5d89dc44cd1c790fde0c188a3d6e3c45580a48dbdf2fec9a527ecbd"} Jan 20 18:21:12 crc kubenswrapper[4558]: I0120 18:21:12.573794 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22bb144c-7b65-4d05-b1f6-e15ccb620de3" path="/var/lib/kubelet/pods/22bb144c-7b65-4d05-b1f6-e15ccb620de3/volumes" Jan 20 18:21:13 crc kubenswrapper[4558]: I0120 18:21:13.370770 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-index-6sn8v" event={"ID":"324917cd-6180-463d-af83-512250591e1c","Type":"ContainerStarted","Data":"259ca3d664b42c5958cc4f36dd51a7880220954c652b939bd270f91d31865d5c"} Jan 20 18:21:13 crc kubenswrapper[4558]: I0120 18:21:13.373517 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/openstack-galera-1" event={"ID":"160b737f-7377-4d3a-8fff-3b96b9a614bc","Type":"ContainerStarted","Data":"c389d8005807225cc21db8265ae5fc2539848a0e1a797f0b1a89dda2965c682f"} Jan 20 18:21:13 crc kubenswrapper[4558]: I0120 18:21:13.375176 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/openstack-galera-2" event={"ID":"3b3e4088-de47-4e50-b0a9-ad92c99d033d","Type":"ContainerStarted","Data":"0231ad6abf0789ab8e253f5c60247d91419898315c1515e5abe79e42a48e7dfc"} Jan 20 18:21:13 crc kubenswrapper[4558]: I0120 18:21:13.376923 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/openstack-galera-0" event={"ID":"87dd79f4-9a5b-4370-9ef8-3101a2b8bfb8","Type":"ContainerStarted","Data":"7411bf3442e75080999942d3466d7ac7fa0fd8befd581f71569639739b51492a"} Jan 20 18:21:13 crc kubenswrapper[4558]: I0120 18:21:13.386329 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-index-6sn8v" podStartSLOduration=1.6621147 podStartE2EDuration="2.386310397s" podCreationTimestamp="2026-01-20 18:21:11 +0000 UTC" firstStartedPulling="2026-01-20 18:21:11.929293985 +0000 UTC m=+5965.689631952" lastFinishedPulling="2026-01-20 18:21:12.653489683 +0000 UTC m=+5966.413827649" observedRunningTime="2026-01-20 18:21:13.382852085 +0000 UTC m=+5967.143190052" watchObservedRunningTime="2026-01-20 18:21:13.386310397 +0000 UTC m=+5967.146648365" Jan 20 18:21:13 crc kubenswrapper[4558]: I0120 18:21:13.398038 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="keystone-kuttl-tests/openstack-galera-2" podStartSLOduration=6.398020916 podStartE2EDuration="6.398020916s" podCreationTimestamp="2026-01-20 18:21:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 18:21:13.39695926 +0000 UTC m=+5967.157297227" watchObservedRunningTime="2026-01-20 18:21:13.398020916 +0000 UTC m=+5967.158358883" Jan 20 18:21:13 crc kubenswrapper[4558]: I0120 18:21:13.415479 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="keystone-kuttl-tests/openstack-galera-0" podStartSLOduration=6.415464053 podStartE2EDuration="6.415464053s" podCreationTimestamp="2026-01-20 18:21:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 18:21:13.412469053 +0000 UTC m=+5967.172807020" watchObservedRunningTime="2026-01-20 18:21:13.415464053 +0000 UTC m=+5967.175802020" Jan 20 18:21:13 crc kubenswrapper[4558]: I0120 18:21:13.427324 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="keystone-kuttl-tests/openstack-galera-1" podStartSLOduration=6.427309205 podStartE2EDuration="6.427309205s" podCreationTimestamp="2026-01-20 18:21:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 18:21:13.42405154 +0000 UTC m=+5967.184389507" watchObservedRunningTime="2026-01-20 18:21:13.427309205 +0000 UTC m=+5967.187647172" Jan 20 18:21:18 crc kubenswrapper[4558]: I0120 18:21:18.648002 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="keystone-kuttl-tests/openstack-galera-0" Jan 20 18:21:18 crc kubenswrapper[4558]: I0120 18:21:18.648662 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="keystone-kuttl-tests/openstack-galera-0" Jan 20 18:21:18 crc kubenswrapper[4558]: I0120 18:21:18.660574 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="keystone-kuttl-tests/openstack-galera-1" Jan 20 18:21:18 crc kubenswrapper[4558]: I0120 18:21:18.660808 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="keystone-kuttl-tests/openstack-galera-1" Jan 20 18:21:18 crc kubenswrapper[4558]: I0120 18:21:18.676822 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="keystone-kuttl-tests/openstack-galera-2" Jan 20 18:21:18 crc kubenswrapper[4558]: I0120 18:21:18.676853 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="keystone-kuttl-tests/openstack-galera-2" Jan 20 18:21:18 crc kubenswrapper[4558]: I0120 18:21:18.914633 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="keystone-kuttl-tests/openstack-galera-2" Jan 20 18:21:19 crc kubenswrapper[4558]: I0120 18:21:19.482955 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="keystone-kuttl-tests/openstack-galera-2" Jan 20 18:21:21 crc kubenswrapper[4558]: I0120 18:21:21.554643 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/infra-operator-index-6sn8v" Jan 20 18:21:21 crc kubenswrapper[4558]: I0120 18:21:21.555007 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-index-6sn8v" Jan 20 18:21:21 crc kubenswrapper[4558]: I0120 18:21:21.579810 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/infra-operator-index-6sn8v" Jan 20 18:21:22 crc kubenswrapper[4558]: I0120 18:21:22.469242 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-index-6sn8v" Jan 20 18:21:24 crc kubenswrapper[4558]: I0120 18:21:24.874779 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ea82580bc5724477f94b47db468c840840d4aaf95efc52f7d04b6353c1xvbdj"] Jan 20 18:21:24 crc kubenswrapper[4558]: I0120 18:21:24.877145 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ea82580bc5724477f94b47db468c840840d4aaf95efc52f7d04b6353c1xvbdj" Jan 20 18:21:24 crc kubenswrapper[4558]: I0120 18:21:24.879559 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-7xp5w" Jan 20 18:21:24 crc kubenswrapper[4558]: I0120 18:21:24.887462 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ea82580bc5724477f94b47db468c840840d4aaf95efc52f7d04b6353c1xvbdj"] Jan 20 18:21:24 crc kubenswrapper[4558]: I0120 18:21:24.946526 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/c118d975-3ef3-47f2-bc99-32e6e55d1f19-util\") pod \"ea82580bc5724477f94b47db468c840840d4aaf95efc52f7d04b6353c1xvbdj\" (UID: \"c118d975-3ef3-47f2-bc99-32e6e55d1f19\") " pod="openstack-operators/ea82580bc5724477f94b47db468c840840d4aaf95efc52f7d04b6353c1xvbdj" Jan 20 18:21:24 crc kubenswrapper[4558]: I0120 18:21:24.946578 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/c118d975-3ef3-47f2-bc99-32e6e55d1f19-bundle\") pod \"ea82580bc5724477f94b47db468c840840d4aaf95efc52f7d04b6353c1xvbdj\" (UID: \"c118d975-3ef3-47f2-bc99-32e6e55d1f19\") " pod="openstack-operators/ea82580bc5724477f94b47db468c840840d4aaf95efc52f7d04b6353c1xvbdj" Jan 20 18:21:24 crc kubenswrapper[4558]: I0120 18:21:24.946632 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xs8tp\" (UniqueName: \"kubernetes.io/projected/c118d975-3ef3-47f2-bc99-32e6e55d1f19-kube-api-access-xs8tp\") pod \"ea82580bc5724477f94b47db468c840840d4aaf95efc52f7d04b6353c1xvbdj\" (UID: \"c118d975-3ef3-47f2-bc99-32e6e55d1f19\") " pod="openstack-operators/ea82580bc5724477f94b47db468c840840d4aaf95efc52f7d04b6353c1xvbdj" Jan 20 18:21:25 crc kubenswrapper[4558]: I0120 18:21:25.047843 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/c118d975-3ef3-47f2-bc99-32e6e55d1f19-bundle\") pod \"ea82580bc5724477f94b47db468c840840d4aaf95efc52f7d04b6353c1xvbdj\" (UID: \"c118d975-3ef3-47f2-bc99-32e6e55d1f19\") " pod="openstack-operators/ea82580bc5724477f94b47db468c840840d4aaf95efc52f7d04b6353c1xvbdj" Jan 20 18:21:25 crc kubenswrapper[4558]: I0120 18:21:25.047920 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xs8tp\" (UniqueName: \"kubernetes.io/projected/c118d975-3ef3-47f2-bc99-32e6e55d1f19-kube-api-access-xs8tp\") pod \"ea82580bc5724477f94b47db468c840840d4aaf95efc52f7d04b6353c1xvbdj\" (UID: \"c118d975-3ef3-47f2-bc99-32e6e55d1f19\") " pod="openstack-operators/ea82580bc5724477f94b47db468c840840d4aaf95efc52f7d04b6353c1xvbdj" Jan 20 18:21:25 crc kubenswrapper[4558]: I0120 18:21:25.048005 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/c118d975-3ef3-47f2-bc99-32e6e55d1f19-util\") pod \"ea82580bc5724477f94b47db468c840840d4aaf95efc52f7d04b6353c1xvbdj\" (UID: \"c118d975-3ef3-47f2-bc99-32e6e55d1f19\") " pod="openstack-operators/ea82580bc5724477f94b47db468c840840d4aaf95efc52f7d04b6353c1xvbdj" Jan 20 18:21:25 crc kubenswrapper[4558]: I0120 18:21:25.048414 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/c118d975-3ef3-47f2-bc99-32e6e55d1f19-util\") pod \"ea82580bc5724477f94b47db468c840840d4aaf95efc52f7d04b6353c1xvbdj\" (UID: \"c118d975-3ef3-47f2-bc99-32e6e55d1f19\") " pod="openstack-operators/ea82580bc5724477f94b47db468c840840d4aaf95efc52f7d04b6353c1xvbdj" Jan 20 18:21:25 crc kubenswrapper[4558]: I0120 18:21:25.048416 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/c118d975-3ef3-47f2-bc99-32e6e55d1f19-bundle\") pod \"ea82580bc5724477f94b47db468c840840d4aaf95efc52f7d04b6353c1xvbdj\" (UID: \"c118d975-3ef3-47f2-bc99-32e6e55d1f19\") " pod="openstack-operators/ea82580bc5724477f94b47db468c840840d4aaf95efc52f7d04b6353c1xvbdj" Jan 20 18:21:25 crc kubenswrapper[4558]: I0120 18:21:25.066613 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xs8tp\" (UniqueName: \"kubernetes.io/projected/c118d975-3ef3-47f2-bc99-32e6e55d1f19-kube-api-access-xs8tp\") pod \"ea82580bc5724477f94b47db468c840840d4aaf95efc52f7d04b6353c1xvbdj\" (UID: \"c118d975-3ef3-47f2-bc99-32e6e55d1f19\") " pod="openstack-operators/ea82580bc5724477f94b47db468c840840d4aaf95efc52f7d04b6353c1xvbdj" Jan 20 18:21:25 crc kubenswrapper[4558]: I0120 18:21:25.202939 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ea82580bc5724477f94b47db468c840840d4aaf95efc52f7d04b6353c1xvbdj" Jan 20 18:21:25 crc kubenswrapper[4558]: I0120 18:21:25.597649 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ea82580bc5724477f94b47db468c840840d4aaf95efc52f7d04b6353c1xvbdj"] Jan 20 18:21:25 crc kubenswrapper[4558]: W0120 18:21:25.604909 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc118d975_3ef3_47f2_bc99_32e6e55d1f19.slice/crio-916e04e272d919560b23cec2230f06af5b475198e82e209acafe5eb3e8271c5a WatchSource:0}: Error finding container 916e04e272d919560b23cec2230f06af5b475198e82e209acafe5eb3e8271c5a: Status 404 returned error can't find the container with id 916e04e272d919560b23cec2230f06af5b475198e82e209acafe5eb3e8271c5a Jan 20 18:21:26 crc kubenswrapper[4558]: I0120 18:21:26.473570 4558 generic.go:334] "Generic (PLEG): container finished" podID="c118d975-3ef3-47f2-bc99-32e6e55d1f19" containerID="7d0dbcca03ee91ec21f2caeaee699b7e50531520e6a7fb08ecc6d21752a4d41f" exitCode=0 Jan 20 18:21:26 crc kubenswrapper[4558]: I0120 18:21:26.473637 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ea82580bc5724477f94b47db468c840840d4aaf95efc52f7d04b6353c1xvbdj" event={"ID":"c118d975-3ef3-47f2-bc99-32e6e55d1f19","Type":"ContainerDied","Data":"7d0dbcca03ee91ec21f2caeaee699b7e50531520e6a7fb08ecc6d21752a4d41f"} Jan 20 18:21:26 crc kubenswrapper[4558]: I0120 18:21:26.473936 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ea82580bc5724477f94b47db468c840840d4aaf95efc52f7d04b6353c1xvbdj" event={"ID":"c118d975-3ef3-47f2-bc99-32e6e55d1f19","Type":"ContainerStarted","Data":"916e04e272d919560b23cec2230f06af5b475198e82e209acafe5eb3e8271c5a"} Jan 20 18:21:27 crc kubenswrapper[4558]: I0120 18:21:27.426250 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["keystone-kuttl-tests/root-account-create-update-vgq2z"] Jan 20 18:21:27 crc kubenswrapper[4558]: I0120 18:21:27.427354 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/root-account-create-update-vgq2z" Jan 20 18:21:27 crc kubenswrapper[4558]: I0120 18:21:27.429149 4558 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"openstack-mariadb-root-db-secret" Jan 20 18:21:27 crc kubenswrapper[4558]: I0120 18:21:27.432813 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/root-account-create-update-vgq2z"] Jan 20 18:21:27 crc kubenswrapper[4558]: I0120 18:21:27.481819 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ea82580bc5724477f94b47db468c840840d4aaf95efc52f7d04b6353c1xvbdj" event={"ID":"c118d975-3ef3-47f2-bc99-32e6e55d1f19","Type":"ContainerStarted","Data":"a64d1244baf80a996a2de72c19922e9ca8daf41bc6171fe988000114c74ab21a"} Jan 20 18:21:27 crc kubenswrapper[4558]: I0120 18:21:27.590184 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/363cbfda-7c1f-4556-a7bd-cec95198f89d-operator-scripts\") pod \"root-account-create-update-vgq2z\" (UID: \"363cbfda-7c1f-4556-a7bd-cec95198f89d\") " pod="keystone-kuttl-tests/root-account-create-update-vgq2z" Jan 20 18:21:27 crc kubenswrapper[4558]: I0120 18:21:27.590235 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-72brr\" (UniqueName: \"kubernetes.io/projected/363cbfda-7c1f-4556-a7bd-cec95198f89d-kube-api-access-72brr\") pod \"root-account-create-update-vgq2z\" (UID: \"363cbfda-7c1f-4556-a7bd-cec95198f89d\") " pod="keystone-kuttl-tests/root-account-create-update-vgq2z" Jan 20 18:21:27 crc kubenswrapper[4558]: I0120 18:21:27.692334 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/363cbfda-7c1f-4556-a7bd-cec95198f89d-operator-scripts\") pod \"root-account-create-update-vgq2z\" (UID: \"363cbfda-7c1f-4556-a7bd-cec95198f89d\") " pod="keystone-kuttl-tests/root-account-create-update-vgq2z" Jan 20 18:21:27 crc kubenswrapper[4558]: I0120 18:21:27.692379 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-72brr\" (UniqueName: \"kubernetes.io/projected/363cbfda-7c1f-4556-a7bd-cec95198f89d-kube-api-access-72brr\") pod \"root-account-create-update-vgq2z\" (UID: \"363cbfda-7c1f-4556-a7bd-cec95198f89d\") " pod="keystone-kuttl-tests/root-account-create-update-vgq2z" Jan 20 18:21:27 crc kubenswrapper[4558]: I0120 18:21:27.693039 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/363cbfda-7c1f-4556-a7bd-cec95198f89d-operator-scripts\") pod \"root-account-create-update-vgq2z\" (UID: \"363cbfda-7c1f-4556-a7bd-cec95198f89d\") " pod="keystone-kuttl-tests/root-account-create-update-vgq2z" Jan 20 18:21:27 crc kubenswrapper[4558]: I0120 18:21:27.710244 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-72brr\" (UniqueName: \"kubernetes.io/projected/363cbfda-7c1f-4556-a7bd-cec95198f89d-kube-api-access-72brr\") pod \"root-account-create-update-vgq2z\" (UID: \"363cbfda-7c1f-4556-a7bd-cec95198f89d\") " pod="keystone-kuttl-tests/root-account-create-update-vgq2z" Jan 20 18:21:27 crc kubenswrapper[4558]: I0120 18:21:27.749288 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/root-account-create-update-vgq2z" Jan 20 18:21:28 crc kubenswrapper[4558]: I0120 18:21:28.133150 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/root-account-create-update-vgq2z"] Jan 20 18:21:28 crc kubenswrapper[4558]: W0120 18:21:28.139072 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod363cbfda_7c1f_4556_a7bd_cec95198f89d.slice/crio-4916ba6c7b1b70d8df1a96f32cef1c17f83f49c2c1d92479293130c1ad387f6a WatchSource:0}: Error finding container 4916ba6c7b1b70d8df1a96f32cef1c17f83f49c2c1d92479293130c1ad387f6a: Status 404 returned error can't find the container with id 4916ba6c7b1b70d8df1a96f32cef1c17f83f49c2c1d92479293130c1ad387f6a Jan 20 18:21:28 crc kubenswrapper[4558]: I0120 18:21:28.492721 4558 generic.go:334] "Generic (PLEG): container finished" podID="c118d975-3ef3-47f2-bc99-32e6e55d1f19" containerID="a64d1244baf80a996a2de72c19922e9ca8daf41bc6171fe988000114c74ab21a" exitCode=0 Jan 20 18:21:28 crc kubenswrapper[4558]: I0120 18:21:28.492755 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ea82580bc5724477f94b47db468c840840d4aaf95efc52f7d04b6353c1xvbdj" event={"ID":"c118d975-3ef3-47f2-bc99-32e6e55d1f19","Type":"ContainerDied","Data":"a64d1244baf80a996a2de72c19922e9ca8daf41bc6171fe988000114c74ab21a"} Jan 20 18:21:28 crc kubenswrapper[4558]: I0120 18:21:28.495755 4558 generic.go:334] "Generic (PLEG): container finished" podID="363cbfda-7c1f-4556-a7bd-cec95198f89d" containerID="e99229ece4762d87cf6f69056a1380e8f97a8831ded4a19b000a28f82d4b6241" exitCode=0 Jan 20 18:21:28 crc kubenswrapper[4558]: I0120 18:21:28.495814 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/root-account-create-update-vgq2z" event={"ID":"363cbfda-7c1f-4556-a7bd-cec95198f89d","Type":"ContainerDied","Data":"e99229ece4762d87cf6f69056a1380e8f97a8831ded4a19b000a28f82d4b6241"} Jan 20 18:21:28 crc kubenswrapper[4558]: I0120 18:21:28.495844 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/root-account-create-update-vgq2z" event={"ID":"363cbfda-7c1f-4556-a7bd-cec95198f89d","Type":"ContainerStarted","Data":"4916ba6c7b1b70d8df1a96f32cef1c17f83f49c2c1d92479293130c1ad387f6a"} Jan 20 18:21:29 crc kubenswrapper[4558]: I0120 18:21:29.125156 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="keystone-kuttl-tests/openstack-galera-1" Jan 20 18:21:29 crc kubenswrapper[4558]: I0120 18:21:29.184982 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="keystone-kuttl-tests/openstack-galera-1" Jan 20 18:21:29 crc kubenswrapper[4558]: I0120 18:21:29.507230 4558 generic.go:334] "Generic (PLEG): container finished" podID="c118d975-3ef3-47f2-bc99-32e6e55d1f19" containerID="5db8654eac950361871a8d5fdc2ebce705c3d9091c0510295e7562add18a39a6" exitCode=0 Jan 20 18:21:29 crc kubenswrapper[4558]: I0120 18:21:29.507344 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ea82580bc5724477f94b47db468c840840d4aaf95efc52f7d04b6353c1xvbdj" event={"ID":"c118d975-3ef3-47f2-bc99-32e6e55d1f19","Type":"ContainerDied","Data":"5db8654eac950361871a8d5fdc2ebce705c3d9091c0510295e7562add18a39a6"} Jan 20 18:21:29 crc kubenswrapper[4558]: I0120 18:21:29.727664 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/root-account-create-update-vgq2z" Jan 20 18:21:29 crc kubenswrapper[4558]: I0120 18:21:29.842507 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-72brr\" (UniqueName: \"kubernetes.io/projected/363cbfda-7c1f-4556-a7bd-cec95198f89d-kube-api-access-72brr\") pod \"363cbfda-7c1f-4556-a7bd-cec95198f89d\" (UID: \"363cbfda-7c1f-4556-a7bd-cec95198f89d\") " Jan 20 18:21:29 crc kubenswrapper[4558]: I0120 18:21:29.842595 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/363cbfda-7c1f-4556-a7bd-cec95198f89d-operator-scripts\") pod \"363cbfda-7c1f-4556-a7bd-cec95198f89d\" (UID: \"363cbfda-7c1f-4556-a7bd-cec95198f89d\") " Jan 20 18:21:29 crc kubenswrapper[4558]: I0120 18:21:29.843149 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/363cbfda-7c1f-4556-a7bd-cec95198f89d-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "363cbfda-7c1f-4556-a7bd-cec95198f89d" (UID: "363cbfda-7c1f-4556-a7bd-cec95198f89d"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:21:29 crc kubenswrapper[4558]: I0120 18:21:29.843426 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/363cbfda-7c1f-4556-a7bd-cec95198f89d-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 18:21:29 crc kubenswrapper[4558]: I0120 18:21:29.849943 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/363cbfda-7c1f-4556-a7bd-cec95198f89d-kube-api-access-72brr" (OuterVolumeSpecName: "kube-api-access-72brr") pod "363cbfda-7c1f-4556-a7bd-cec95198f89d" (UID: "363cbfda-7c1f-4556-a7bd-cec95198f89d"). InnerVolumeSpecName "kube-api-access-72brr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:21:29 crc kubenswrapper[4558]: I0120 18:21:29.945690 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-72brr\" (UniqueName: \"kubernetes.io/projected/363cbfda-7c1f-4556-a7bd-cec95198f89d-kube-api-access-72brr\") on node \"crc\" DevicePath \"\"" Jan 20 18:21:30 crc kubenswrapper[4558]: I0120 18:21:30.518260 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/root-account-create-update-vgq2z" event={"ID":"363cbfda-7c1f-4556-a7bd-cec95198f89d","Type":"ContainerDied","Data":"4916ba6c7b1b70d8df1a96f32cef1c17f83f49c2c1d92479293130c1ad387f6a"} Jan 20 18:21:30 crc kubenswrapper[4558]: I0120 18:21:30.518314 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4916ba6c7b1b70d8df1a96f32cef1c17f83f49c2c1d92479293130c1ad387f6a" Jan 20 18:21:30 crc kubenswrapper[4558]: I0120 18:21:30.518312 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/root-account-create-update-vgq2z" Jan 20 18:21:30 crc kubenswrapper[4558]: I0120 18:21:30.754454 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ea82580bc5724477f94b47db468c840840d4aaf95efc52f7d04b6353c1xvbdj" Jan 20 18:21:30 crc kubenswrapper[4558]: I0120 18:21:30.857577 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/c118d975-3ef3-47f2-bc99-32e6e55d1f19-bundle\") pod \"c118d975-3ef3-47f2-bc99-32e6e55d1f19\" (UID: \"c118d975-3ef3-47f2-bc99-32e6e55d1f19\") " Jan 20 18:21:30 crc kubenswrapper[4558]: I0120 18:21:30.858044 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/c118d975-3ef3-47f2-bc99-32e6e55d1f19-util\") pod \"c118d975-3ef3-47f2-bc99-32e6e55d1f19\" (UID: \"c118d975-3ef3-47f2-bc99-32e6e55d1f19\") " Jan 20 18:21:30 crc kubenswrapper[4558]: I0120 18:21:30.858085 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xs8tp\" (UniqueName: \"kubernetes.io/projected/c118d975-3ef3-47f2-bc99-32e6e55d1f19-kube-api-access-xs8tp\") pod \"c118d975-3ef3-47f2-bc99-32e6e55d1f19\" (UID: \"c118d975-3ef3-47f2-bc99-32e6e55d1f19\") " Jan 20 18:21:30 crc kubenswrapper[4558]: I0120 18:21:30.859599 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c118d975-3ef3-47f2-bc99-32e6e55d1f19-bundle" (OuterVolumeSpecName: "bundle") pod "c118d975-3ef3-47f2-bc99-32e6e55d1f19" (UID: "c118d975-3ef3-47f2-bc99-32e6e55d1f19"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 18:21:30 crc kubenswrapper[4558]: I0120 18:21:30.862132 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c118d975-3ef3-47f2-bc99-32e6e55d1f19-kube-api-access-xs8tp" (OuterVolumeSpecName: "kube-api-access-xs8tp") pod "c118d975-3ef3-47f2-bc99-32e6e55d1f19" (UID: "c118d975-3ef3-47f2-bc99-32e6e55d1f19"). InnerVolumeSpecName "kube-api-access-xs8tp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:21:30 crc kubenswrapper[4558]: I0120 18:21:30.867581 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c118d975-3ef3-47f2-bc99-32e6e55d1f19-util" (OuterVolumeSpecName: "util") pod "c118d975-3ef3-47f2-bc99-32e6e55d1f19" (UID: "c118d975-3ef3-47f2-bc99-32e6e55d1f19"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 18:21:30 crc kubenswrapper[4558]: I0120 18:21:30.959937 4558 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/c118d975-3ef3-47f2-bc99-32e6e55d1f19-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 18:21:30 crc kubenswrapper[4558]: I0120 18:21:30.960095 4558 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/c118d975-3ef3-47f2-bc99-32e6e55d1f19-util\") on node \"crc\" DevicePath \"\"" Jan 20 18:21:30 crc kubenswrapper[4558]: I0120 18:21:30.960155 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xs8tp\" (UniqueName: \"kubernetes.io/projected/c118d975-3ef3-47f2-bc99-32e6e55d1f19-kube-api-access-xs8tp\") on node \"crc\" DevicePath \"\"" Jan 20 18:21:31 crc kubenswrapper[4558]: I0120 18:21:31.529513 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ea82580bc5724477f94b47db468c840840d4aaf95efc52f7d04b6353c1xvbdj" event={"ID":"c118d975-3ef3-47f2-bc99-32e6e55d1f19","Type":"ContainerDied","Data":"916e04e272d919560b23cec2230f06af5b475198e82e209acafe5eb3e8271c5a"} Jan 20 18:21:31 crc kubenswrapper[4558]: I0120 18:21:31.529574 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="916e04e272d919560b23cec2230f06af5b475198e82e209acafe5eb3e8271c5a" Jan 20 18:21:31 crc kubenswrapper[4558]: I0120 18:21:31.529614 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ea82580bc5724477f94b47db468c840840d4aaf95efc52f7d04b6353c1xvbdj" Jan 20 18:21:31 crc kubenswrapper[4558]: I0120 18:21:31.700300 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="keystone-kuttl-tests/openstack-galera-0" Jan 20 18:21:31 crc kubenswrapper[4558]: I0120 18:21:31.760828 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="keystone-kuttl-tests/openstack-galera-0" Jan 20 18:21:36 crc kubenswrapper[4558]: I0120 18:21:36.638693 4558 scope.go:117] "RemoveContainer" containerID="27aedd69e69221e2dae01ed6d942442903d22db26f9982c1188f891b75e3d04f" Jan 20 18:21:36 crc kubenswrapper[4558]: I0120 18:21:36.663035 4558 scope.go:117] "RemoveContainer" containerID="f3a9e257ed5c765d05f4fa03766918f5b9e3b36814df0c3a6368a2e78c7a1036" Jan 20 18:21:36 crc kubenswrapper[4558]: I0120 18:21:36.683205 4558 scope.go:117] "RemoveContainer" containerID="900efa0bec51e667e70b835c7d93bb351d3142d533d8120cb23e3f20fe6201a6" Jan 20 18:21:36 crc kubenswrapper[4558]: I0120 18:21:36.701245 4558 scope.go:117] "RemoveContainer" containerID="820b4ece32ea6d6110e74102cffa88348736382f40c35b30730a04d00124a5e6" Jan 20 18:21:36 crc kubenswrapper[4558]: I0120 18:21:36.718924 4558 scope.go:117] "RemoveContainer" containerID="6f1d1f59580a316c78b0687a18091ff56e74e966d8f0876c4a46411a485ba0d5" Jan 20 18:21:36 crc kubenswrapper[4558]: I0120 18:21:36.735591 4558 scope.go:117] "RemoveContainer" containerID="4f3b711c12d98bb4804e9912d09576b6818cfe804969e34a1f356abdcf1cb124" Jan 20 18:21:54 crc kubenswrapper[4558]: I0120 18:21:54.339710 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-controller-manager-747bd6f68-87w9d"] Jan 20 18:21:54 crc kubenswrapper[4558]: E0120 18:21:54.340560 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c118d975-3ef3-47f2-bc99-32e6e55d1f19" containerName="util" Jan 20 18:21:54 crc kubenswrapper[4558]: I0120 18:21:54.340576 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="c118d975-3ef3-47f2-bc99-32e6e55d1f19" containerName="util" Jan 20 18:21:54 crc kubenswrapper[4558]: E0120 18:21:54.340592 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="363cbfda-7c1f-4556-a7bd-cec95198f89d" containerName="mariadb-account-create-update" Jan 20 18:21:54 crc kubenswrapper[4558]: I0120 18:21:54.340598 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="363cbfda-7c1f-4556-a7bd-cec95198f89d" containerName="mariadb-account-create-update" Jan 20 18:21:54 crc kubenswrapper[4558]: E0120 18:21:54.340615 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c118d975-3ef3-47f2-bc99-32e6e55d1f19" containerName="pull" Jan 20 18:21:54 crc kubenswrapper[4558]: I0120 18:21:54.340622 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="c118d975-3ef3-47f2-bc99-32e6e55d1f19" containerName="pull" Jan 20 18:21:54 crc kubenswrapper[4558]: E0120 18:21:54.340634 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c118d975-3ef3-47f2-bc99-32e6e55d1f19" containerName="extract" Jan 20 18:21:54 crc kubenswrapper[4558]: I0120 18:21:54.340640 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="c118d975-3ef3-47f2-bc99-32e6e55d1f19" containerName="extract" Jan 20 18:21:54 crc kubenswrapper[4558]: I0120 18:21:54.340763 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="c118d975-3ef3-47f2-bc99-32e6e55d1f19" containerName="extract" Jan 20 18:21:54 crc kubenswrapper[4558]: I0120 18:21:54.340785 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="363cbfda-7c1f-4556-a7bd-cec95198f89d" containerName="mariadb-account-create-update" Jan 20 18:21:54 crc kubenswrapper[4558]: I0120 18:21:54.341305 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-747bd6f68-87w9d" Jan 20 18:21:54 crc kubenswrapper[4558]: I0120 18:21:54.345570 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-service-cert" Jan 20 18:21:54 crc kubenswrapper[4558]: I0120 18:21:54.345837 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-twqgh" Jan 20 18:21:54 crc kubenswrapper[4558]: I0120 18:21:54.366414 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-747bd6f68-87w9d"] Jan 20 18:21:54 crc kubenswrapper[4558]: I0120 18:21:54.411665 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/bb3e1e1b-cfd6-4430-88b7-9a6cdf812bf4-webhook-cert\") pod \"infra-operator-controller-manager-747bd6f68-87w9d\" (UID: \"bb3e1e1b-cfd6-4430-88b7-9a6cdf812bf4\") " pod="openstack-operators/infra-operator-controller-manager-747bd6f68-87w9d" Jan 20 18:21:54 crc kubenswrapper[4558]: I0120 18:21:54.411844 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n86v8\" (UniqueName: \"kubernetes.io/projected/bb3e1e1b-cfd6-4430-88b7-9a6cdf812bf4-kube-api-access-n86v8\") pod \"infra-operator-controller-manager-747bd6f68-87w9d\" (UID: \"bb3e1e1b-cfd6-4430-88b7-9a6cdf812bf4\") " pod="openstack-operators/infra-operator-controller-manager-747bd6f68-87w9d" Jan 20 18:21:54 crc kubenswrapper[4558]: I0120 18:21:54.411909 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/bb3e1e1b-cfd6-4430-88b7-9a6cdf812bf4-apiservice-cert\") pod \"infra-operator-controller-manager-747bd6f68-87w9d\" (UID: \"bb3e1e1b-cfd6-4430-88b7-9a6cdf812bf4\") " pod="openstack-operators/infra-operator-controller-manager-747bd6f68-87w9d" Jan 20 18:21:54 crc kubenswrapper[4558]: I0120 18:21:54.513460 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n86v8\" (UniqueName: \"kubernetes.io/projected/bb3e1e1b-cfd6-4430-88b7-9a6cdf812bf4-kube-api-access-n86v8\") pod \"infra-operator-controller-manager-747bd6f68-87w9d\" (UID: \"bb3e1e1b-cfd6-4430-88b7-9a6cdf812bf4\") " pod="openstack-operators/infra-operator-controller-manager-747bd6f68-87w9d" Jan 20 18:21:54 crc kubenswrapper[4558]: I0120 18:21:54.513537 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/bb3e1e1b-cfd6-4430-88b7-9a6cdf812bf4-apiservice-cert\") pod \"infra-operator-controller-manager-747bd6f68-87w9d\" (UID: \"bb3e1e1b-cfd6-4430-88b7-9a6cdf812bf4\") " pod="openstack-operators/infra-operator-controller-manager-747bd6f68-87w9d" Jan 20 18:21:54 crc kubenswrapper[4558]: I0120 18:21:54.513601 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/bb3e1e1b-cfd6-4430-88b7-9a6cdf812bf4-webhook-cert\") pod \"infra-operator-controller-manager-747bd6f68-87w9d\" (UID: \"bb3e1e1b-cfd6-4430-88b7-9a6cdf812bf4\") " pod="openstack-operators/infra-operator-controller-manager-747bd6f68-87w9d" Jan 20 18:21:54 crc kubenswrapper[4558]: I0120 18:21:54.521567 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/bb3e1e1b-cfd6-4430-88b7-9a6cdf812bf4-webhook-cert\") pod \"infra-operator-controller-manager-747bd6f68-87w9d\" (UID: \"bb3e1e1b-cfd6-4430-88b7-9a6cdf812bf4\") " pod="openstack-operators/infra-operator-controller-manager-747bd6f68-87w9d" Jan 20 18:21:54 crc kubenswrapper[4558]: I0120 18:21:54.521598 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/bb3e1e1b-cfd6-4430-88b7-9a6cdf812bf4-apiservice-cert\") pod \"infra-operator-controller-manager-747bd6f68-87w9d\" (UID: \"bb3e1e1b-cfd6-4430-88b7-9a6cdf812bf4\") " pod="openstack-operators/infra-operator-controller-manager-747bd6f68-87w9d" Jan 20 18:21:54 crc kubenswrapper[4558]: I0120 18:21:54.528147 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n86v8\" (UniqueName: \"kubernetes.io/projected/bb3e1e1b-cfd6-4430-88b7-9a6cdf812bf4-kube-api-access-n86v8\") pod \"infra-operator-controller-manager-747bd6f68-87w9d\" (UID: \"bb3e1e1b-cfd6-4430-88b7-9a6cdf812bf4\") " pod="openstack-operators/infra-operator-controller-manager-747bd6f68-87w9d" Jan 20 18:21:54 crc kubenswrapper[4558]: I0120 18:21:54.657747 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-747bd6f68-87w9d" Jan 20 18:21:55 crc kubenswrapper[4558]: I0120 18:21:55.039346 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-747bd6f68-87w9d"] Jan 20 18:21:55 crc kubenswrapper[4558]: W0120 18:21:55.046344 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbb3e1e1b_cfd6_4430_88b7_9a6cdf812bf4.slice/crio-e050002eda81c70183ee7b2b555edc0358400c06d0a42c5e88926a9c7ae90e62 WatchSource:0}: Error finding container e050002eda81c70183ee7b2b555edc0358400c06d0a42c5e88926a9c7ae90e62: Status 404 returned error can't find the container with id e050002eda81c70183ee7b2b555edc0358400c06d0a42c5e88926a9c7ae90e62 Jan 20 18:21:55 crc kubenswrapper[4558]: I0120 18:21:55.703767 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-747bd6f68-87w9d" event={"ID":"bb3e1e1b-cfd6-4430-88b7-9a6cdf812bf4","Type":"ContainerStarted","Data":"a92298b518d964f2280c292db6da40b46ef8eee322589502a500c90ff3e91a5c"} Jan 20 18:21:55 crc kubenswrapper[4558]: I0120 18:21:55.704118 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-controller-manager-747bd6f68-87w9d" Jan 20 18:21:55 crc kubenswrapper[4558]: I0120 18:21:55.704132 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-747bd6f68-87w9d" event={"ID":"bb3e1e1b-cfd6-4430-88b7-9a6cdf812bf4","Type":"ContainerStarted","Data":"e050002eda81c70183ee7b2b555edc0358400c06d0a42c5e88926a9c7ae90e62"} Jan 20 18:21:55 crc kubenswrapper[4558]: I0120 18:21:55.734908 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-controller-manager-747bd6f68-87w9d" podStartSLOduration=1.734890503 podStartE2EDuration="1.734890503s" podCreationTimestamp="2026-01-20 18:21:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 18:21:55.72928764 +0000 UTC m=+6009.489625606" watchObservedRunningTime="2026-01-20 18:21:55.734890503 +0000 UTC m=+6009.495228471" Jan 20 18:22:04 crc kubenswrapper[4558]: I0120 18:22:04.665107 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-controller-manager-747bd6f68-87w9d" Jan 20 18:22:07 crc kubenswrapper[4558]: I0120 18:22:07.289355 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-index-jgskb"] Jan 20 18:22:07 crc kubenswrapper[4558]: I0120 18:22:07.290319 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-index-jgskb" Jan 20 18:22:07 crc kubenswrapper[4558]: I0120 18:22:07.292542 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-index-dockercfg-p4ccr" Jan 20 18:22:07 crc kubenswrapper[4558]: I0120 18:22:07.294580 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fh5gm\" (UniqueName: \"kubernetes.io/projected/97002d7e-fbc1-473b-a351-524b1263a867-kube-api-access-fh5gm\") pod \"rabbitmq-cluster-operator-index-jgskb\" (UID: \"97002d7e-fbc1-473b-a351-524b1263a867\") " pod="openstack-operators/rabbitmq-cluster-operator-index-jgskb" Jan 20 18:22:07 crc kubenswrapper[4558]: I0120 18:22:07.298756 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-index-jgskb"] Jan 20 18:22:07 crc kubenswrapper[4558]: I0120 18:22:07.396038 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fh5gm\" (UniqueName: \"kubernetes.io/projected/97002d7e-fbc1-473b-a351-524b1263a867-kube-api-access-fh5gm\") pod \"rabbitmq-cluster-operator-index-jgskb\" (UID: \"97002d7e-fbc1-473b-a351-524b1263a867\") " pod="openstack-operators/rabbitmq-cluster-operator-index-jgskb" Jan 20 18:22:07 crc kubenswrapper[4558]: I0120 18:22:07.414239 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fh5gm\" (UniqueName: \"kubernetes.io/projected/97002d7e-fbc1-473b-a351-524b1263a867-kube-api-access-fh5gm\") pod \"rabbitmq-cluster-operator-index-jgskb\" (UID: \"97002d7e-fbc1-473b-a351-524b1263a867\") " pod="openstack-operators/rabbitmq-cluster-operator-index-jgskb" Jan 20 18:22:07 crc kubenswrapper[4558]: I0120 18:22:07.610902 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-index-jgskb" Jan 20 18:22:08 crc kubenswrapper[4558]: I0120 18:22:08.000682 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-index-jgskb"] Jan 20 18:22:08 crc kubenswrapper[4558]: W0120 18:22:08.006573 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod97002d7e_fbc1_473b_a351_524b1263a867.slice/crio-b1b059e1a6574be8bc702ceba7c7357357dacb71750b8c04d279a366ce09d53f WatchSource:0}: Error finding container b1b059e1a6574be8bc702ceba7c7357357dacb71750b8c04d279a366ce09d53f: Status 404 returned error can't find the container with id b1b059e1a6574be8bc702ceba7c7357357dacb71750b8c04d279a366ce09d53f Jan 20 18:22:08 crc kubenswrapper[4558]: I0120 18:22:08.807638 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-index-jgskb" event={"ID":"97002d7e-fbc1-473b-a351-524b1263a867","Type":"ContainerStarted","Data":"b1b059e1a6574be8bc702ceba7c7357357dacb71750b8c04d279a366ce09d53f"} Jan 20 18:22:09 crc kubenswrapper[4558]: I0120 18:22:09.816591 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-index-jgskb" event={"ID":"97002d7e-fbc1-473b-a351-524b1263a867","Type":"ContainerStarted","Data":"125dc07aaa5910698d304474de2c084492a7ea16256a2e90bdbfc2fe13fe7315"} Jan 20 18:22:09 crc kubenswrapper[4558]: I0120 18:22:09.836514 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-index-jgskb" podStartSLOduration=1.955923441 podStartE2EDuration="2.836493161s" podCreationTimestamp="2026-01-20 18:22:07 +0000 UTC" firstStartedPulling="2026-01-20 18:22:08.009336923 +0000 UTC m=+6021.769674891" lastFinishedPulling="2026-01-20 18:22:08.889906634 +0000 UTC m=+6022.650244611" observedRunningTime="2026-01-20 18:22:09.830615541 +0000 UTC m=+6023.590953508" watchObservedRunningTime="2026-01-20 18:22:09.836493161 +0000 UTC m=+6023.596831129" Jan 20 18:22:11 crc kubenswrapper[4558]: I0120 18:22:11.488219 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-index-jgskb"] Jan 20 18:22:11 crc kubenswrapper[4558]: I0120 18:22:11.828679 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/rabbitmq-cluster-operator-index-jgskb" podUID="97002d7e-fbc1-473b-a351-524b1263a867" containerName="registry-server" containerID="cri-o://125dc07aaa5910698d304474de2c084492a7ea16256a2e90bdbfc2fe13fe7315" gracePeriod=2 Jan 20 18:22:12 crc kubenswrapper[4558]: I0120 18:22:12.089599 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-index-pt2sm"] Jan 20 18:22:12 crc kubenswrapper[4558]: I0120 18:22:12.091589 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-index-pt2sm" Jan 20 18:22:12 crc kubenswrapper[4558]: I0120 18:22:12.096571 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-index-pt2sm"] Jan 20 18:22:12 crc kubenswrapper[4558]: I0120 18:22:12.163905 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p6m76\" (UniqueName: \"kubernetes.io/projected/4e175dfb-0ef0-492f-800c-8214356c0573-kube-api-access-p6m76\") pod \"rabbitmq-cluster-operator-index-pt2sm\" (UID: \"4e175dfb-0ef0-492f-800c-8214356c0573\") " pod="openstack-operators/rabbitmq-cluster-operator-index-pt2sm" Jan 20 18:22:12 crc kubenswrapper[4558]: I0120 18:22:12.179417 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-index-jgskb" Jan 20 18:22:12 crc kubenswrapper[4558]: I0120 18:22:12.265377 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p6m76\" (UniqueName: \"kubernetes.io/projected/4e175dfb-0ef0-492f-800c-8214356c0573-kube-api-access-p6m76\") pod \"rabbitmq-cluster-operator-index-pt2sm\" (UID: \"4e175dfb-0ef0-492f-800c-8214356c0573\") " pod="openstack-operators/rabbitmq-cluster-operator-index-pt2sm" Jan 20 18:22:12 crc kubenswrapper[4558]: I0120 18:22:12.286275 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p6m76\" (UniqueName: \"kubernetes.io/projected/4e175dfb-0ef0-492f-800c-8214356c0573-kube-api-access-p6m76\") pod \"rabbitmq-cluster-operator-index-pt2sm\" (UID: \"4e175dfb-0ef0-492f-800c-8214356c0573\") " pod="openstack-operators/rabbitmq-cluster-operator-index-pt2sm" Jan 20 18:22:12 crc kubenswrapper[4558]: I0120 18:22:12.366883 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fh5gm\" (UniqueName: \"kubernetes.io/projected/97002d7e-fbc1-473b-a351-524b1263a867-kube-api-access-fh5gm\") pod \"97002d7e-fbc1-473b-a351-524b1263a867\" (UID: \"97002d7e-fbc1-473b-a351-524b1263a867\") " Jan 20 18:22:12 crc kubenswrapper[4558]: I0120 18:22:12.371932 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/97002d7e-fbc1-473b-a351-524b1263a867-kube-api-access-fh5gm" (OuterVolumeSpecName: "kube-api-access-fh5gm") pod "97002d7e-fbc1-473b-a351-524b1263a867" (UID: "97002d7e-fbc1-473b-a351-524b1263a867"). InnerVolumeSpecName "kube-api-access-fh5gm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:22:12 crc kubenswrapper[4558]: I0120 18:22:12.410682 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-index-pt2sm" Jan 20 18:22:12 crc kubenswrapper[4558]: I0120 18:22:12.469269 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fh5gm\" (UniqueName: \"kubernetes.io/projected/97002d7e-fbc1-473b-a351-524b1263a867-kube-api-access-fh5gm\") on node \"crc\" DevicePath \"\"" Jan 20 18:22:12 crc kubenswrapper[4558]: I0120 18:22:12.790038 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-index-pt2sm"] Jan 20 18:22:12 crc kubenswrapper[4558]: I0120 18:22:12.838152 4558 generic.go:334] "Generic (PLEG): container finished" podID="97002d7e-fbc1-473b-a351-524b1263a867" containerID="125dc07aaa5910698d304474de2c084492a7ea16256a2e90bdbfc2fe13fe7315" exitCode=0 Jan 20 18:22:12 crc kubenswrapper[4558]: I0120 18:22:12.838237 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-index-jgskb" Jan 20 18:22:12 crc kubenswrapper[4558]: I0120 18:22:12.838230 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-index-jgskb" event={"ID":"97002d7e-fbc1-473b-a351-524b1263a867","Type":"ContainerDied","Data":"125dc07aaa5910698d304474de2c084492a7ea16256a2e90bdbfc2fe13fe7315"} Jan 20 18:22:12 crc kubenswrapper[4558]: I0120 18:22:12.838306 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-index-jgskb" event={"ID":"97002d7e-fbc1-473b-a351-524b1263a867","Type":"ContainerDied","Data":"b1b059e1a6574be8bc702ceba7c7357357dacb71750b8c04d279a366ce09d53f"} Jan 20 18:22:12 crc kubenswrapper[4558]: I0120 18:22:12.838365 4558 scope.go:117] "RemoveContainer" containerID="125dc07aaa5910698d304474de2c084492a7ea16256a2e90bdbfc2fe13fe7315" Jan 20 18:22:12 crc kubenswrapper[4558]: I0120 18:22:12.840234 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-index-pt2sm" event={"ID":"4e175dfb-0ef0-492f-800c-8214356c0573","Type":"ContainerStarted","Data":"4ca45ec9b6dd20d740311e96f8a8ed0926b0261ceb9eb0a9b7e6c86a90402b25"} Jan 20 18:22:12 crc kubenswrapper[4558]: I0120 18:22:12.858099 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-index-jgskb"] Jan 20 18:22:12 crc kubenswrapper[4558]: I0120 18:22:12.866220 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-index-jgskb"] Jan 20 18:22:12 crc kubenswrapper[4558]: I0120 18:22:12.873228 4558 scope.go:117] "RemoveContainer" containerID="125dc07aaa5910698d304474de2c084492a7ea16256a2e90bdbfc2fe13fe7315" Jan 20 18:22:12 crc kubenswrapper[4558]: E0120 18:22:12.873657 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"125dc07aaa5910698d304474de2c084492a7ea16256a2e90bdbfc2fe13fe7315\": container with ID starting with 125dc07aaa5910698d304474de2c084492a7ea16256a2e90bdbfc2fe13fe7315 not found: ID does not exist" containerID="125dc07aaa5910698d304474de2c084492a7ea16256a2e90bdbfc2fe13fe7315" Jan 20 18:22:12 crc kubenswrapper[4558]: I0120 18:22:12.873702 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"125dc07aaa5910698d304474de2c084492a7ea16256a2e90bdbfc2fe13fe7315"} err="failed to get container status \"125dc07aaa5910698d304474de2c084492a7ea16256a2e90bdbfc2fe13fe7315\": rpc error: code = NotFound desc = could not find container \"125dc07aaa5910698d304474de2c084492a7ea16256a2e90bdbfc2fe13fe7315\": container with ID starting with 125dc07aaa5910698d304474de2c084492a7ea16256a2e90bdbfc2fe13fe7315 not found: ID does not exist" Jan 20 18:22:13 crc kubenswrapper[4558]: I0120 18:22:13.066455 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["keystone-kuttl-tests/memcached-0"] Jan 20 18:22:13 crc kubenswrapper[4558]: E0120 18:22:13.066724 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="97002d7e-fbc1-473b-a351-524b1263a867" containerName="registry-server" Jan 20 18:22:13 crc kubenswrapper[4558]: I0120 18:22:13.066737 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="97002d7e-fbc1-473b-a351-524b1263a867" containerName="registry-server" Jan 20 18:22:13 crc kubenswrapper[4558]: I0120 18:22:13.066853 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="97002d7e-fbc1-473b-a351-524b1263a867" containerName="registry-server" Jan 20 18:22:13 crc kubenswrapper[4558]: I0120 18:22:13.067340 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/memcached-0" Jan 20 18:22:13 crc kubenswrapper[4558]: I0120 18:22:13.069057 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"keystone-kuttl-tests"/"memcached-config-data" Jan 20 18:22:13 crc kubenswrapper[4558]: I0120 18:22:13.069332 4558 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"memcached-memcached-dockercfg-9bkkz" Jan 20 18:22:13 crc kubenswrapper[4558]: I0120 18:22:13.074663 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/memcached-0"] Jan 20 18:22:13 crc kubenswrapper[4558]: I0120 18:22:13.078055 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/c70e04a6-d794-4290-ac24-1fecf80b3d41-kolla-config\") pod \"memcached-0\" (UID: \"c70e04a6-d794-4290-ac24-1fecf80b3d41\") " pod="keystone-kuttl-tests/memcached-0" Jan 20 18:22:13 crc kubenswrapper[4558]: I0120 18:22:13.078225 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c70e04a6-d794-4290-ac24-1fecf80b3d41-config-data\") pod \"memcached-0\" (UID: \"c70e04a6-d794-4290-ac24-1fecf80b3d41\") " pod="keystone-kuttl-tests/memcached-0" Jan 20 18:22:13 crc kubenswrapper[4558]: I0120 18:22:13.078353 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zvdrp\" (UniqueName: \"kubernetes.io/projected/c70e04a6-d794-4290-ac24-1fecf80b3d41-kube-api-access-zvdrp\") pod \"memcached-0\" (UID: \"c70e04a6-d794-4290-ac24-1fecf80b3d41\") " pod="keystone-kuttl-tests/memcached-0" Jan 20 18:22:13 crc kubenswrapper[4558]: I0120 18:22:13.180608 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/c70e04a6-d794-4290-ac24-1fecf80b3d41-kolla-config\") pod \"memcached-0\" (UID: \"c70e04a6-d794-4290-ac24-1fecf80b3d41\") " pod="keystone-kuttl-tests/memcached-0" Jan 20 18:22:13 crc kubenswrapper[4558]: I0120 18:22:13.181024 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c70e04a6-d794-4290-ac24-1fecf80b3d41-config-data\") pod \"memcached-0\" (UID: \"c70e04a6-d794-4290-ac24-1fecf80b3d41\") " pod="keystone-kuttl-tests/memcached-0" Jan 20 18:22:13 crc kubenswrapper[4558]: I0120 18:22:13.181195 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zvdrp\" (UniqueName: \"kubernetes.io/projected/c70e04a6-d794-4290-ac24-1fecf80b3d41-kube-api-access-zvdrp\") pod \"memcached-0\" (UID: \"c70e04a6-d794-4290-ac24-1fecf80b3d41\") " pod="keystone-kuttl-tests/memcached-0" Jan 20 18:22:13 crc kubenswrapper[4558]: I0120 18:22:13.181472 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/c70e04a6-d794-4290-ac24-1fecf80b3d41-kolla-config\") pod \"memcached-0\" (UID: \"c70e04a6-d794-4290-ac24-1fecf80b3d41\") " pod="keystone-kuttl-tests/memcached-0" Jan 20 18:22:13 crc kubenswrapper[4558]: I0120 18:22:13.181737 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c70e04a6-d794-4290-ac24-1fecf80b3d41-config-data\") pod \"memcached-0\" (UID: \"c70e04a6-d794-4290-ac24-1fecf80b3d41\") " pod="keystone-kuttl-tests/memcached-0" Jan 20 18:22:13 crc kubenswrapper[4558]: I0120 18:22:13.197873 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zvdrp\" (UniqueName: \"kubernetes.io/projected/c70e04a6-d794-4290-ac24-1fecf80b3d41-kube-api-access-zvdrp\") pod \"memcached-0\" (UID: \"c70e04a6-d794-4290-ac24-1fecf80b3d41\") " pod="keystone-kuttl-tests/memcached-0" Jan 20 18:22:13 crc kubenswrapper[4558]: I0120 18:22:13.383094 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/memcached-0" Jan 20 18:22:13 crc kubenswrapper[4558]: I0120 18:22:13.776798 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/memcached-0"] Jan 20 18:22:13 crc kubenswrapper[4558]: I0120 18:22:13.850862 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/memcached-0" event={"ID":"c70e04a6-d794-4290-ac24-1fecf80b3d41","Type":"ContainerStarted","Data":"43a35e62b1a6a8f712dd08fe080b7c31f6b8b61f8b607dd09fd6eef006192181"} Jan 20 18:22:13 crc kubenswrapper[4558]: I0120 18:22:13.856658 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-index-pt2sm" event={"ID":"4e175dfb-0ef0-492f-800c-8214356c0573","Type":"ContainerStarted","Data":"63761e2dc61ed042265b21210335a7fa7edc18776026efc2301859ca8e2dbf42"} Jan 20 18:22:13 crc kubenswrapper[4558]: I0120 18:22:13.883635 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-index-pt2sm" podStartSLOduration=1.333908171 podStartE2EDuration="1.88362184s" podCreationTimestamp="2026-01-20 18:22:12 +0000 UTC" firstStartedPulling="2026-01-20 18:22:12.798892158 +0000 UTC m=+6026.559230125" lastFinishedPulling="2026-01-20 18:22:13.348605827 +0000 UTC m=+6027.108943794" observedRunningTime="2026-01-20 18:22:13.874390382 +0000 UTC m=+6027.634728349" watchObservedRunningTime="2026-01-20 18:22:13.88362184 +0000 UTC m=+6027.643959807" Jan 20 18:22:14 crc kubenswrapper[4558]: I0120 18:22:14.575560 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="97002d7e-fbc1-473b-a351-524b1263a867" path="/var/lib/kubelet/pods/97002d7e-fbc1-473b-a351-524b1263a867/volumes" Jan 20 18:22:14 crc kubenswrapper[4558]: I0120 18:22:14.865726 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/memcached-0" event={"ID":"c70e04a6-d794-4290-ac24-1fecf80b3d41","Type":"ContainerStarted","Data":"f6c7ab6691baa097b9066d0f32d65f848d6362b59f85d8766a3bec60d4457114"} Jan 20 18:22:14 crc kubenswrapper[4558]: I0120 18:22:14.865916 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="keystone-kuttl-tests/memcached-0" Jan 20 18:22:14 crc kubenswrapper[4558]: I0120 18:22:14.882912 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="keystone-kuttl-tests/memcached-0" podStartSLOduration=1.882898247 podStartE2EDuration="1.882898247s" podCreationTimestamp="2026-01-20 18:22:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 18:22:14.88167632 +0000 UTC m=+6028.642014287" watchObservedRunningTime="2026-01-20 18:22:14.882898247 +0000 UTC m=+6028.643236214" Jan 20 18:22:18 crc kubenswrapper[4558]: I0120 18:22:18.385125 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="keystone-kuttl-tests/memcached-0" Jan 20 18:22:22 crc kubenswrapper[4558]: I0120 18:22:22.411182 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/rabbitmq-cluster-operator-index-pt2sm" Jan 20 18:22:22 crc kubenswrapper[4558]: I0120 18:22:22.411619 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/rabbitmq-cluster-operator-index-pt2sm" Jan 20 18:22:22 crc kubenswrapper[4558]: I0120 18:22:22.441230 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/rabbitmq-cluster-operator-index-pt2sm" Jan 20 18:22:22 crc kubenswrapper[4558]: I0120 18:22:22.961002 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/rabbitmq-cluster-operator-index-pt2sm" Jan 20 18:22:25 crc kubenswrapper[4558]: I0120 18:22:25.542721 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590xcj9f"] Jan 20 18:22:25 crc kubenswrapper[4558]: I0120 18:22:25.544296 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590xcj9f" Jan 20 18:22:25 crc kubenswrapper[4558]: I0120 18:22:25.546122 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-7xp5w" Jan 20 18:22:25 crc kubenswrapper[4558]: I0120 18:22:25.563052 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590xcj9f"] Jan 20 18:22:25 crc kubenswrapper[4558]: I0120 18:22:25.669490 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/25c79886-6ce6-4b8d-b2a1-4b2ae7d9a686-bundle\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590xcj9f\" (UID: \"25c79886-6ce6-4b8d-b2a1-4b2ae7d9a686\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590xcj9f" Jan 20 18:22:25 crc kubenswrapper[4558]: I0120 18:22:25.669705 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/25c79886-6ce6-4b8d-b2a1-4b2ae7d9a686-util\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590xcj9f\" (UID: \"25c79886-6ce6-4b8d-b2a1-4b2ae7d9a686\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590xcj9f" Jan 20 18:22:25 crc kubenswrapper[4558]: I0120 18:22:25.669783 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qbkln\" (UniqueName: \"kubernetes.io/projected/25c79886-6ce6-4b8d-b2a1-4b2ae7d9a686-kube-api-access-qbkln\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590xcj9f\" (UID: \"25c79886-6ce6-4b8d-b2a1-4b2ae7d9a686\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590xcj9f" Jan 20 18:22:25 crc kubenswrapper[4558]: I0120 18:22:25.771041 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qbkln\" (UniqueName: \"kubernetes.io/projected/25c79886-6ce6-4b8d-b2a1-4b2ae7d9a686-kube-api-access-qbkln\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590xcj9f\" (UID: \"25c79886-6ce6-4b8d-b2a1-4b2ae7d9a686\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590xcj9f" Jan 20 18:22:25 crc kubenswrapper[4558]: I0120 18:22:25.771138 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/25c79886-6ce6-4b8d-b2a1-4b2ae7d9a686-bundle\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590xcj9f\" (UID: \"25c79886-6ce6-4b8d-b2a1-4b2ae7d9a686\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590xcj9f" Jan 20 18:22:25 crc kubenswrapper[4558]: I0120 18:22:25.771208 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/25c79886-6ce6-4b8d-b2a1-4b2ae7d9a686-util\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590xcj9f\" (UID: \"25c79886-6ce6-4b8d-b2a1-4b2ae7d9a686\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590xcj9f" Jan 20 18:22:25 crc kubenswrapper[4558]: I0120 18:22:25.771659 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/25c79886-6ce6-4b8d-b2a1-4b2ae7d9a686-util\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590xcj9f\" (UID: \"25c79886-6ce6-4b8d-b2a1-4b2ae7d9a686\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590xcj9f" Jan 20 18:22:25 crc kubenswrapper[4558]: I0120 18:22:25.771765 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/25c79886-6ce6-4b8d-b2a1-4b2ae7d9a686-bundle\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590xcj9f\" (UID: \"25c79886-6ce6-4b8d-b2a1-4b2ae7d9a686\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590xcj9f" Jan 20 18:22:25 crc kubenswrapper[4558]: I0120 18:22:25.803153 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qbkln\" (UniqueName: \"kubernetes.io/projected/25c79886-6ce6-4b8d-b2a1-4b2ae7d9a686-kube-api-access-qbkln\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590xcj9f\" (UID: \"25c79886-6ce6-4b8d-b2a1-4b2ae7d9a686\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590xcj9f" Jan 20 18:22:25 crc kubenswrapper[4558]: I0120 18:22:25.864612 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590xcj9f" Jan 20 18:22:26 crc kubenswrapper[4558]: I0120 18:22:26.244014 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590xcj9f"] Jan 20 18:22:26 crc kubenswrapper[4558]: W0120 18:22:26.247712 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod25c79886_6ce6_4b8d_b2a1_4b2ae7d9a686.slice/crio-efd307f072ff773cac4c559f5454ce19a45a4c9e24d17fc1e08a58256a4e1ecd WatchSource:0}: Error finding container efd307f072ff773cac4c559f5454ce19a45a4c9e24d17fc1e08a58256a4e1ecd: Status 404 returned error can't find the container with id efd307f072ff773cac4c559f5454ce19a45a4c9e24d17fc1e08a58256a4e1ecd Jan 20 18:22:26 crc kubenswrapper[4558]: I0120 18:22:26.961141 4558 generic.go:334] "Generic (PLEG): container finished" podID="25c79886-6ce6-4b8d-b2a1-4b2ae7d9a686" containerID="4a3ec481e5c67206b7856e248ae59f098c0a8c38bf85b643d55b84d90dfc42d0" exitCode=0 Jan 20 18:22:26 crc kubenswrapper[4558]: I0120 18:22:26.961209 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590xcj9f" event={"ID":"25c79886-6ce6-4b8d-b2a1-4b2ae7d9a686","Type":"ContainerDied","Data":"4a3ec481e5c67206b7856e248ae59f098c0a8c38bf85b643d55b84d90dfc42d0"} Jan 20 18:22:26 crc kubenswrapper[4558]: I0120 18:22:26.961265 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590xcj9f" event={"ID":"25c79886-6ce6-4b8d-b2a1-4b2ae7d9a686","Type":"ContainerStarted","Data":"efd307f072ff773cac4c559f5454ce19a45a4c9e24d17fc1e08a58256a4e1ecd"} Jan 20 18:22:27 crc kubenswrapper[4558]: I0120 18:22:27.330096 4558 patch_prober.go:28] interesting pod/machine-config-daemon-2vr4r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 20 18:22:27 crc kubenswrapper[4558]: I0120 18:22:27.330201 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 20 18:22:27 crc kubenswrapper[4558]: I0120 18:22:27.973503 4558 generic.go:334] "Generic (PLEG): container finished" podID="25c79886-6ce6-4b8d-b2a1-4b2ae7d9a686" containerID="d9887ff06ad881226c9a4aa8c0ee5b08ec17cfbe8e8b1d95f62b993e378587bc" exitCode=0 Jan 20 18:22:27 crc kubenswrapper[4558]: I0120 18:22:27.973621 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590xcj9f" event={"ID":"25c79886-6ce6-4b8d-b2a1-4b2ae7d9a686","Type":"ContainerDied","Data":"d9887ff06ad881226c9a4aa8c0ee5b08ec17cfbe8e8b1d95f62b993e378587bc"} Jan 20 18:22:28 crc kubenswrapper[4558]: I0120 18:22:28.985000 4558 generic.go:334] "Generic (PLEG): container finished" podID="25c79886-6ce6-4b8d-b2a1-4b2ae7d9a686" containerID="fa21287492bfb32f5e9b259744d62f18dca24a5edf9250c51898064aace7be93" exitCode=0 Jan 20 18:22:28 crc kubenswrapper[4558]: I0120 18:22:28.985062 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590xcj9f" event={"ID":"25c79886-6ce6-4b8d-b2a1-4b2ae7d9a686","Type":"ContainerDied","Data":"fa21287492bfb32f5e9b259744d62f18dca24a5edf9250c51898064aace7be93"} Jan 20 18:22:30 crc kubenswrapper[4558]: I0120 18:22:30.255606 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590xcj9f" Jan 20 18:22:30 crc kubenswrapper[4558]: I0120 18:22:30.453364 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/25c79886-6ce6-4b8d-b2a1-4b2ae7d9a686-util\") pod \"25c79886-6ce6-4b8d-b2a1-4b2ae7d9a686\" (UID: \"25c79886-6ce6-4b8d-b2a1-4b2ae7d9a686\") " Jan 20 18:22:30 crc kubenswrapper[4558]: I0120 18:22:30.453548 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qbkln\" (UniqueName: \"kubernetes.io/projected/25c79886-6ce6-4b8d-b2a1-4b2ae7d9a686-kube-api-access-qbkln\") pod \"25c79886-6ce6-4b8d-b2a1-4b2ae7d9a686\" (UID: \"25c79886-6ce6-4b8d-b2a1-4b2ae7d9a686\") " Jan 20 18:22:30 crc kubenswrapper[4558]: I0120 18:22:30.453619 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/25c79886-6ce6-4b8d-b2a1-4b2ae7d9a686-bundle\") pod \"25c79886-6ce6-4b8d-b2a1-4b2ae7d9a686\" (UID: \"25c79886-6ce6-4b8d-b2a1-4b2ae7d9a686\") " Jan 20 18:22:30 crc kubenswrapper[4558]: I0120 18:22:30.454255 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/25c79886-6ce6-4b8d-b2a1-4b2ae7d9a686-bundle" (OuterVolumeSpecName: "bundle") pod "25c79886-6ce6-4b8d-b2a1-4b2ae7d9a686" (UID: "25c79886-6ce6-4b8d-b2a1-4b2ae7d9a686"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 18:22:30 crc kubenswrapper[4558]: I0120 18:22:30.461504 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25c79886-6ce6-4b8d-b2a1-4b2ae7d9a686-kube-api-access-qbkln" (OuterVolumeSpecName: "kube-api-access-qbkln") pod "25c79886-6ce6-4b8d-b2a1-4b2ae7d9a686" (UID: "25c79886-6ce6-4b8d-b2a1-4b2ae7d9a686"). InnerVolumeSpecName "kube-api-access-qbkln". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:22:30 crc kubenswrapper[4558]: I0120 18:22:30.465052 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/25c79886-6ce6-4b8d-b2a1-4b2ae7d9a686-util" (OuterVolumeSpecName: "util") pod "25c79886-6ce6-4b8d-b2a1-4b2ae7d9a686" (UID: "25c79886-6ce6-4b8d-b2a1-4b2ae7d9a686"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 18:22:30 crc kubenswrapper[4558]: I0120 18:22:30.555529 4558 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/25c79886-6ce6-4b8d-b2a1-4b2ae7d9a686-util\") on node \"crc\" DevicePath \"\"" Jan 20 18:22:30 crc kubenswrapper[4558]: I0120 18:22:30.555572 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qbkln\" (UniqueName: \"kubernetes.io/projected/25c79886-6ce6-4b8d-b2a1-4b2ae7d9a686-kube-api-access-qbkln\") on node \"crc\" DevicePath \"\"" Jan 20 18:22:30 crc kubenswrapper[4558]: I0120 18:22:30.555587 4558 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/25c79886-6ce6-4b8d-b2a1-4b2ae7d9a686-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 18:22:31 crc kubenswrapper[4558]: I0120 18:22:31.006900 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590xcj9f" event={"ID":"25c79886-6ce6-4b8d-b2a1-4b2ae7d9a686","Type":"ContainerDied","Data":"efd307f072ff773cac4c559f5454ce19a45a4c9e24d17fc1e08a58256a4e1ecd"} Jan 20 18:22:31 crc kubenswrapper[4558]: I0120 18:22:31.006946 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="efd307f072ff773cac4c559f5454ce19a45a4c9e24d17fc1e08a58256a4e1ecd" Jan 20 18:22:31 crc kubenswrapper[4558]: I0120 18:22:31.006978 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590xcj9f" Jan 20 18:22:36 crc kubenswrapper[4558]: I0120 18:22:36.809910 4558 scope.go:117] "RemoveContainer" containerID="2a5c0d00bf04165646b76275ec6ac7b673fad8372a5fef102ec5358b18bceffc" Jan 20 18:22:36 crc kubenswrapper[4558]: I0120 18:22:36.830423 4558 scope.go:117] "RemoveContainer" containerID="4e5cca7d6418c16cfbb8637979fa7981489de21daffbfa5063ee0da6502a654e" Jan 20 18:22:36 crc kubenswrapper[4558]: I0120 18:22:36.856049 4558 scope.go:117] "RemoveContainer" containerID="ecb534e6b6284b3f99c71543f0e35f255e3cc778837439424250508baa4e46eb" Jan 20 18:22:36 crc kubenswrapper[4558]: I0120 18:22:36.874731 4558 scope.go:117] "RemoveContainer" containerID="20ddc216403130197b28523f5d035146f0978efa76f7866277142574a8bf1030" Jan 20 18:22:37 crc kubenswrapper[4558]: I0120 18:22:37.637610 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-779fc9694b-slk8n"] Jan 20 18:22:37 crc kubenswrapper[4558]: E0120 18:22:37.637922 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="25c79886-6ce6-4b8d-b2a1-4b2ae7d9a686" containerName="extract" Jan 20 18:22:37 crc kubenswrapper[4558]: I0120 18:22:37.637941 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="25c79886-6ce6-4b8d-b2a1-4b2ae7d9a686" containerName="extract" Jan 20 18:22:37 crc kubenswrapper[4558]: E0120 18:22:37.637978 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="25c79886-6ce6-4b8d-b2a1-4b2ae7d9a686" containerName="util" Jan 20 18:22:37 crc kubenswrapper[4558]: I0120 18:22:37.637984 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="25c79886-6ce6-4b8d-b2a1-4b2ae7d9a686" containerName="util" Jan 20 18:22:37 crc kubenswrapper[4558]: E0120 18:22:37.637998 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="25c79886-6ce6-4b8d-b2a1-4b2ae7d9a686" containerName="pull" Jan 20 18:22:37 crc kubenswrapper[4558]: I0120 18:22:37.638003 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="25c79886-6ce6-4b8d-b2a1-4b2ae7d9a686" containerName="pull" Jan 20 18:22:37 crc kubenswrapper[4558]: I0120 18:22:37.638127 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="25c79886-6ce6-4b8d-b2a1-4b2ae7d9a686" containerName="extract" Jan 20 18:22:37 crc kubenswrapper[4558]: I0120 18:22:37.638629 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-slk8n" Jan 20 18:22:37 crc kubenswrapper[4558]: I0120 18:22:37.646323 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-dockercfg-cjvdx" Jan 20 18:22:37 crc kubenswrapper[4558]: I0120 18:22:37.656488 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-779fc9694b-slk8n"] Jan 20 18:22:37 crc kubenswrapper[4558]: I0120 18:22:37.671121 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nvw92\" (UniqueName: \"kubernetes.io/projected/55572588-adf6-4e8b-9bb4-f6a7ea3e5b20-kube-api-access-nvw92\") pod \"rabbitmq-cluster-operator-779fc9694b-slk8n\" (UID: \"55572588-adf6-4e8b-9bb4-f6a7ea3e5b20\") " pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-slk8n" Jan 20 18:22:37 crc kubenswrapper[4558]: I0120 18:22:37.772792 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nvw92\" (UniqueName: \"kubernetes.io/projected/55572588-adf6-4e8b-9bb4-f6a7ea3e5b20-kube-api-access-nvw92\") pod \"rabbitmq-cluster-operator-779fc9694b-slk8n\" (UID: \"55572588-adf6-4e8b-9bb4-f6a7ea3e5b20\") " pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-slk8n" Jan 20 18:22:37 crc kubenswrapper[4558]: I0120 18:22:37.802762 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nvw92\" (UniqueName: \"kubernetes.io/projected/55572588-adf6-4e8b-9bb4-f6a7ea3e5b20-kube-api-access-nvw92\") pod \"rabbitmq-cluster-operator-779fc9694b-slk8n\" (UID: \"55572588-adf6-4e8b-9bb4-f6a7ea3e5b20\") " pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-slk8n" Jan 20 18:22:37 crc kubenswrapper[4558]: I0120 18:22:37.953452 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-slk8n" Jan 20 18:22:38 crc kubenswrapper[4558]: I0120 18:22:38.350436 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-779fc9694b-slk8n"] Jan 20 18:22:39 crc kubenswrapper[4558]: I0120 18:22:39.068871 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-slk8n" event={"ID":"55572588-adf6-4e8b-9bb4-f6a7ea3e5b20","Type":"ContainerStarted","Data":"72e477c181eb6b52f76e97a2dea9cd3b48d28509a743930b7e1d03ca393c5a9a"} Jan 20 18:22:39 crc kubenswrapper[4558]: I0120 18:22:39.069347 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-slk8n" event={"ID":"55572588-adf6-4e8b-9bb4-f6a7ea3e5b20","Type":"ContainerStarted","Data":"0e6fbc018ca3d32643a5f5495068ec4e56be7d1595030279b394ba8087eddd90"} Jan 20 18:22:39 crc kubenswrapper[4558]: I0120 18:22:39.092757 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-slk8n" podStartSLOduration=2.092718576 podStartE2EDuration="2.092718576s" podCreationTimestamp="2026-01-20 18:22:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 18:22:39.084702414 +0000 UTC m=+6052.845040401" watchObservedRunningTime="2026-01-20 18:22:39.092718576 +0000 UTC m=+6052.853056544" Jan 20 18:22:42 crc kubenswrapper[4558]: I0120 18:22:42.692757 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-index-zwc6g"] Jan 20 18:22:42 crc kubenswrapper[4558]: I0120 18:22:42.694785 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-index-zwc6g" Jan 20 18:22:42 crc kubenswrapper[4558]: I0120 18:22:42.697831 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-index-dockercfg-b6rlj" Jan 20 18:22:42 crc kubenswrapper[4558]: I0120 18:22:42.708537 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-index-zwc6g"] Jan 20 18:22:42 crc kubenswrapper[4558]: I0120 18:22:42.750463 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qr7ll\" (UniqueName: \"kubernetes.io/projected/3fa5954a-a8ea-4d9f-85b9-ccf8e36865a2-kube-api-access-qr7ll\") pod \"keystone-operator-index-zwc6g\" (UID: \"3fa5954a-a8ea-4d9f-85b9-ccf8e36865a2\") " pod="openstack-operators/keystone-operator-index-zwc6g" Jan 20 18:22:42 crc kubenswrapper[4558]: I0120 18:22:42.852180 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qr7ll\" (UniqueName: \"kubernetes.io/projected/3fa5954a-a8ea-4d9f-85b9-ccf8e36865a2-kube-api-access-qr7ll\") pod \"keystone-operator-index-zwc6g\" (UID: \"3fa5954a-a8ea-4d9f-85b9-ccf8e36865a2\") " pod="openstack-operators/keystone-operator-index-zwc6g" Jan 20 18:22:42 crc kubenswrapper[4558]: I0120 18:22:42.871616 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qr7ll\" (UniqueName: \"kubernetes.io/projected/3fa5954a-a8ea-4d9f-85b9-ccf8e36865a2-kube-api-access-qr7ll\") pod \"keystone-operator-index-zwc6g\" (UID: \"3fa5954a-a8ea-4d9f-85b9-ccf8e36865a2\") " pod="openstack-operators/keystone-operator-index-zwc6g" Jan 20 18:22:43 crc kubenswrapper[4558]: I0120 18:22:43.017077 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-index-zwc6g" Jan 20 18:22:43 crc kubenswrapper[4558]: I0120 18:22:43.401535 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-index-zwc6g"] Jan 20 18:22:43 crc kubenswrapper[4558]: W0120 18:22:43.406285 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3fa5954a_a8ea_4d9f_85b9_ccf8e36865a2.slice/crio-8202a181a066579a1f100565259340e2fa9cb762c594a8310a320e581b35eda8 WatchSource:0}: Error finding container 8202a181a066579a1f100565259340e2fa9cb762c594a8310a320e581b35eda8: Status 404 returned error can't find the container with id 8202a181a066579a1f100565259340e2fa9cb762c594a8310a320e581b35eda8 Jan 20 18:22:44 crc kubenswrapper[4558]: I0120 18:22:44.122697 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-index-zwc6g" event={"ID":"3fa5954a-a8ea-4d9f-85b9-ccf8e36865a2","Type":"ContainerStarted","Data":"8202a181a066579a1f100565259340e2fa9cb762c594a8310a320e581b35eda8"} Jan 20 18:22:45 crc kubenswrapper[4558]: I0120 18:22:45.133974 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-index-zwc6g" event={"ID":"3fa5954a-a8ea-4d9f-85b9-ccf8e36865a2","Type":"ContainerStarted","Data":"679ca985e780fd8183cd32b5d3aab8580a06199168b3843be49f4a1eccf55842"} Jan 20 18:22:45 crc kubenswrapper[4558]: I0120 18:22:45.149539 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-index-zwc6g" podStartSLOduration=2.524958526 podStartE2EDuration="3.149516661s" podCreationTimestamp="2026-01-20 18:22:42 +0000 UTC" firstStartedPulling="2026-01-20 18:22:43.409201746 +0000 UTC m=+6057.169539713" lastFinishedPulling="2026-01-20 18:22:44.033759881 +0000 UTC m=+6057.794097848" observedRunningTime="2026-01-20 18:22:45.146410701 +0000 UTC m=+6058.906748668" watchObservedRunningTime="2026-01-20 18:22:45.149516661 +0000 UTC m=+6058.909854628" Jan 20 18:22:53 crc kubenswrapper[4558]: I0120 18:22:53.017427 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/keystone-operator-index-zwc6g" Jan 20 18:22:53 crc kubenswrapper[4558]: I0120 18:22:53.018046 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-index-zwc6g" Jan 20 18:22:53 crc kubenswrapper[4558]: I0120 18:22:53.045229 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/keystone-operator-index-zwc6g" Jan 20 18:22:53 crc kubenswrapper[4558]: I0120 18:22:53.221524 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-index-zwc6g" Jan 20 18:22:55 crc kubenswrapper[4558]: I0120 18:22:55.723198 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/34e4e076b11e40e2796f19ad3bfdac5929942b93224fbc520400e0a069q2k9b"] Jan 20 18:22:55 crc kubenswrapper[4558]: I0120 18:22:55.724781 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/34e4e076b11e40e2796f19ad3bfdac5929942b93224fbc520400e0a069q2k9b" Jan 20 18:22:55 crc kubenswrapper[4558]: I0120 18:22:55.726431 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-7xp5w" Jan 20 18:22:55 crc kubenswrapper[4558]: I0120 18:22:55.729458 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/34e4e076b11e40e2796f19ad3bfdac5929942b93224fbc520400e0a069q2k9b"] Jan 20 18:22:55 crc kubenswrapper[4558]: I0120 18:22:55.843336 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kbjf2\" (UniqueName: \"kubernetes.io/projected/89cbbf9e-cc1a-41c9-b39a-16efc1464749-kube-api-access-kbjf2\") pod \"34e4e076b11e40e2796f19ad3bfdac5929942b93224fbc520400e0a069q2k9b\" (UID: \"89cbbf9e-cc1a-41c9-b39a-16efc1464749\") " pod="openstack-operators/34e4e076b11e40e2796f19ad3bfdac5929942b93224fbc520400e0a069q2k9b" Jan 20 18:22:55 crc kubenswrapper[4558]: I0120 18:22:55.843382 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/89cbbf9e-cc1a-41c9-b39a-16efc1464749-bundle\") pod \"34e4e076b11e40e2796f19ad3bfdac5929942b93224fbc520400e0a069q2k9b\" (UID: \"89cbbf9e-cc1a-41c9-b39a-16efc1464749\") " pod="openstack-operators/34e4e076b11e40e2796f19ad3bfdac5929942b93224fbc520400e0a069q2k9b" Jan 20 18:22:55 crc kubenswrapper[4558]: I0120 18:22:55.843572 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/89cbbf9e-cc1a-41c9-b39a-16efc1464749-util\") pod \"34e4e076b11e40e2796f19ad3bfdac5929942b93224fbc520400e0a069q2k9b\" (UID: \"89cbbf9e-cc1a-41c9-b39a-16efc1464749\") " pod="openstack-operators/34e4e076b11e40e2796f19ad3bfdac5929942b93224fbc520400e0a069q2k9b" Jan 20 18:22:55 crc kubenswrapper[4558]: I0120 18:22:55.944669 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kbjf2\" (UniqueName: \"kubernetes.io/projected/89cbbf9e-cc1a-41c9-b39a-16efc1464749-kube-api-access-kbjf2\") pod \"34e4e076b11e40e2796f19ad3bfdac5929942b93224fbc520400e0a069q2k9b\" (UID: \"89cbbf9e-cc1a-41c9-b39a-16efc1464749\") " pod="openstack-operators/34e4e076b11e40e2796f19ad3bfdac5929942b93224fbc520400e0a069q2k9b" Jan 20 18:22:55 crc kubenswrapper[4558]: I0120 18:22:55.944718 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/89cbbf9e-cc1a-41c9-b39a-16efc1464749-bundle\") pod \"34e4e076b11e40e2796f19ad3bfdac5929942b93224fbc520400e0a069q2k9b\" (UID: \"89cbbf9e-cc1a-41c9-b39a-16efc1464749\") " pod="openstack-operators/34e4e076b11e40e2796f19ad3bfdac5929942b93224fbc520400e0a069q2k9b" Jan 20 18:22:55 crc kubenswrapper[4558]: I0120 18:22:55.944784 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/89cbbf9e-cc1a-41c9-b39a-16efc1464749-util\") pod \"34e4e076b11e40e2796f19ad3bfdac5929942b93224fbc520400e0a069q2k9b\" (UID: \"89cbbf9e-cc1a-41c9-b39a-16efc1464749\") " pod="openstack-operators/34e4e076b11e40e2796f19ad3bfdac5929942b93224fbc520400e0a069q2k9b" Jan 20 18:22:55 crc kubenswrapper[4558]: I0120 18:22:55.945262 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/89cbbf9e-cc1a-41c9-b39a-16efc1464749-bundle\") pod \"34e4e076b11e40e2796f19ad3bfdac5929942b93224fbc520400e0a069q2k9b\" (UID: \"89cbbf9e-cc1a-41c9-b39a-16efc1464749\") " pod="openstack-operators/34e4e076b11e40e2796f19ad3bfdac5929942b93224fbc520400e0a069q2k9b" Jan 20 18:22:55 crc kubenswrapper[4558]: I0120 18:22:55.945304 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/89cbbf9e-cc1a-41c9-b39a-16efc1464749-util\") pod \"34e4e076b11e40e2796f19ad3bfdac5929942b93224fbc520400e0a069q2k9b\" (UID: \"89cbbf9e-cc1a-41c9-b39a-16efc1464749\") " pod="openstack-operators/34e4e076b11e40e2796f19ad3bfdac5929942b93224fbc520400e0a069q2k9b" Jan 20 18:22:55 crc kubenswrapper[4558]: I0120 18:22:55.962891 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kbjf2\" (UniqueName: \"kubernetes.io/projected/89cbbf9e-cc1a-41c9-b39a-16efc1464749-kube-api-access-kbjf2\") pod \"34e4e076b11e40e2796f19ad3bfdac5929942b93224fbc520400e0a069q2k9b\" (UID: \"89cbbf9e-cc1a-41c9-b39a-16efc1464749\") " pod="openstack-operators/34e4e076b11e40e2796f19ad3bfdac5929942b93224fbc520400e0a069q2k9b" Jan 20 18:22:56 crc kubenswrapper[4558]: I0120 18:22:56.045549 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/34e4e076b11e40e2796f19ad3bfdac5929942b93224fbc520400e0a069q2k9b" Jan 20 18:22:56 crc kubenswrapper[4558]: I0120 18:22:56.434505 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/34e4e076b11e40e2796f19ad3bfdac5929942b93224fbc520400e0a069q2k9b"] Jan 20 18:22:56 crc kubenswrapper[4558]: W0120 18:22:56.439421 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod89cbbf9e_cc1a_41c9_b39a_16efc1464749.slice/crio-3fa7e297032d0883dbf644f2ec31c41f4903b8edacf010e319253ad91578d061 WatchSource:0}: Error finding container 3fa7e297032d0883dbf644f2ec31c41f4903b8edacf010e319253ad91578d061: Status 404 returned error can't find the container with id 3fa7e297032d0883dbf644f2ec31c41f4903b8edacf010e319253ad91578d061 Jan 20 18:22:57 crc kubenswrapper[4558]: I0120 18:22:57.233996 4558 generic.go:334] "Generic (PLEG): container finished" podID="89cbbf9e-cc1a-41c9-b39a-16efc1464749" containerID="2f88680eaf60259826ab0c83065520226713ec29255c97c5b37980455d9912b6" exitCode=0 Jan 20 18:22:57 crc kubenswrapper[4558]: I0120 18:22:57.234077 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/34e4e076b11e40e2796f19ad3bfdac5929942b93224fbc520400e0a069q2k9b" event={"ID":"89cbbf9e-cc1a-41c9-b39a-16efc1464749","Type":"ContainerDied","Data":"2f88680eaf60259826ab0c83065520226713ec29255c97c5b37980455d9912b6"} Jan 20 18:22:57 crc kubenswrapper[4558]: I0120 18:22:57.234214 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/34e4e076b11e40e2796f19ad3bfdac5929942b93224fbc520400e0a069q2k9b" event={"ID":"89cbbf9e-cc1a-41c9-b39a-16efc1464749","Type":"ContainerStarted","Data":"3fa7e297032d0883dbf644f2ec31c41f4903b8edacf010e319253ad91578d061"} Jan 20 18:22:57 crc kubenswrapper[4558]: I0120 18:22:57.330354 4558 patch_prober.go:28] interesting pod/machine-config-daemon-2vr4r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 20 18:22:57 crc kubenswrapper[4558]: I0120 18:22:57.330632 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 20 18:22:58 crc kubenswrapper[4558]: I0120 18:22:58.245866 4558 generic.go:334] "Generic (PLEG): container finished" podID="89cbbf9e-cc1a-41c9-b39a-16efc1464749" containerID="fb3b484fdacf4047640d92be4fe8ac29f980071033dab0c8b3c5bce28a7156a6" exitCode=0 Jan 20 18:22:58 crc kubenswrapper[4558]: I0120 18:22:58.245920 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/34e4e076b11e40e2796f19ad3bfdac5929942b93224fbc520400e0a069q2k9b" event={"ID":"89cbbf9e-cc1a-41c9-b39a-16efc1464749","Type":"ContainerDied","Data":"fb3b484fdacf4047640d92be4fe8ac29f980071033dab0c8b3c5bce28a7156a6"} Jan 20 18:22:59 crc kubenswrapper[4558]: I0120 18:22:59.010776 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["keystone-kuttl-tests/rabbitmq-server-0"] Jan 20 18:22:59 crc kubenswrapper[4558]: I0120 18:22:59.011809 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/rabbitmq-server-0" Jan 20 18:22:59 crc kubenswrapper[4558]: I0120 18:22:59.014454 4558 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"rabbitmq-default-user" Jan 20 18:22:59 crc kubenswrapper[4558]: I0120 18:22:59.014552 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"keystone-kuttl-tests"/"rabbitmq-server-conf" Jan 20 18:22:59 crc kubenswrapper[4558]: I0120 18:22:59.014628 4558 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"rabbitmq-erlang-cookie" Jan 20 18:22:59 crc kubenswrapper[4558]: I0120 18:22:59.014750 4558 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"rabbitmq-server-dockercfg-s9qc5" Jan 20 18:22:59 crc kubenswrapper[4558]: I0120 18:22:59.017122 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"keystone-kuttl-tests"/"rabbitmq-plugins-conf" Jan 20 18:22:59 crc kubenswrapper[4558]: I0120 18:22:59.021893 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/rabbitmq-server-0"] Jan 20 18:22:59 crc kubenswrapper[4558]: I0120 18:22:59.193992 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jfgbz\" (UniqueName: \"kubernetes.io/projected/2275b006-a890-400e-83b6-1c46bf67f62e-kube-api-access-jfgbz\") pod \"rabbitmq-server-0\" (UID: \"2275b006-a890-400e-83b6-1c46bf67f62e\") " pod="keystone-kuttl-tests/rabbitmq-server-0" Jan 20 18:22:59 crc kubenswrapper[4558]: I0120 18:22:59.194043 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/2275b006-a890-400e-83b6-1c46bf67f62e-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"2275b006-a890-400e-83b6-1c46bf67f62e\") " pod="keystone-kuttl-tests/rabbitmq-server-0" Jan 20 18:22:59 crc kubenswrapper[4558]: I0120 18:22:59.194079 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/2275b006-a890-400e-83b6-1c46bf67f62e-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"2275b006-a890-400e-83b6-1c46bf67f62e\") " pod="keystone-kuttl-tests/rabbitmq-server-0" Jan 20 18:22:59 crc kubenswrapper[4558]: I0120 18:22:59.194155 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/2275b006-a890-400e-83b6-1c46bf67f62e-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"2275b006-a890-400e-83b6-1c46bf67f62e\") " pod="keystone-kuttl-tests/rabbitmq-server-0" Jan 20 18:22:59 crc kubenswrapper[4558]: I0120 18:22:59.194211 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/2275b006-a890-400e-83b6-1c46bf67f62e-pod-info\") pod \"rabbitmq-server-0\" (UID: \"2275b006-a890-400e-83b6-1c46bf67f62e\") " pod="keystone-kuttl-tests/rabbitmq-server-0" Jan 20 18:22:59 crc kubenswrapper[4558]: I0120 18:22:59.194347 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-ce1c6728-05f2-432f-a7e7-15926523339d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-ce1c6728-05f2-432f-a7e7-15926523339d\") pod \"rabbitmq-server-0\" (UID: \"2275b006-a890-400e-83b6-1c46bf67f62e\") " pod="keystone-kuttl-tests/rabbitmq-server-0" Jan 20 18:22:59 crc kubenswrapper[4558]: I0120 18:22:59.194461 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/2275b006-a890-400e-83b6-1c46bf67f62e-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"2275b006-a890-400e-83b6-1c46bf67f62e\") " pod="keystone-kuttl-tests/rabbitmq-server-0" Jan 20 18:22:59 crc kubenswrapper[4558]: I0120 18:22:59.194530 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/2275b006-a890-400e-83b6-1c46bf67f62e-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"2275b006-a890-400e-83b6-1c46bf67f62e\") " pod="keystone-kuttl-tests/rabbitmq-server-0" Jan 20 18:22:59 crc kubenswrapper[4558]: I0120 18:22:59.255839 4558 generic.go:334] "Generic (PLEG): container finished" podID="89cbbf9e-cc1a-41c9-b39a-16efc1464749" containerID="83714c81f77d105070d2595369995b6be30f90fb7765400aef38416b6a88de94" exitCode=0 Jan 20 18:22:59 crc kubenswrapper[4558]: I0120 18:22:59.255874 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/34e4e076b11e40e2796f19ad3bfdac5929942b93224fbc520400e0a069q2k9b" event={"ID":"89cbbf9e-cc1a-41c9-b39a-16efc1464749","Type":"ContainerDied","Data":"83714c81f77d105070d2595369995b6be30f90fb7765400aef38416b6a88de94"} Jan 20 18:22:59 crc kubenswrapper[4558]: I0120 18:22:59.296289 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/2275b006-a890-400e-83b6-1c46bf67f62e-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"2275b006-a890-400e-83b6-1c46bf67f62e\") " pod="keystone-kuttl-tests/rabbitmq-server-0" Jan 20 18:22:59 crc kubenswrapper[4558]: I0120 18:22:59.296351 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/2275b006-a890-400e-83b6-1c46bf67f62e-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"2275b006-a890-400e-83b6-1c46bf67f62e\") " pod="keystone-kuttl-tests/rabbitmq-server-0" Jan 20 18:22:59 crc kubenswrapper[4558]: I0120 18:22:59.296401 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jfgbz\" (UniqueName: \"kubernetes.io/projected/2275b006-a890-400e-83b6-1c46bf67f62e-kube-api-access-jfgbz\") pod \"rabbitmq-server-0\" (UID: \"2275b006-a890-400e-83b6-1c46bf67f62e\") " pod="keystone-kuttl-tests/rabbitmq-server-0" Jan 20 18:22:59 crc kubenswrapper[4558]: I0120 18:22:59.296428 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/2275b006-a890-400e-83b6-1c46bf67f62e-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"2275b006-a890-400e-83b6-1c46bf67f62e\") " pod="keystone-kuttl-tests/rabbitmq-server-0" Jan 20 18:22:59 crc kubenswrapper[4558]: I0120 18:22:59.296462 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/2275b006-a890-400e-83b6-1c46bf67f62e-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"2275b006-a890-400e-83b6-1c46bf67f62e\") " pod="keystone-kuttl-tests/rabbitmq-server-0" Jan 20 18:22:59 crc kubenswrapper[4558]: I0120 18:22:59.296486 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/2275b006-a890-400e-83b6-1c46bf67f62e-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"2275b006-a890-400e-83b6-1c46bf67f62e\") " pod="keystone-kuttl-tests/rabbitmq-server-0" Jan 20 18:22:59 crc kubenswrapper[4558]: I0120 18:22:59.296505 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/2275b006-a890-400e-83b6-1c46bf67f62e-pod-info\") pod \"rabbitmq-server-0\" (UID: \"2275b006-a890-400e-83b6-1c46bf67f62e\") " pod="keystone-kuttl-tests/rabbitmq-server-0" Jan 20 18:22:59 crc kubenswrapper[4558]: I0120 18:22:59.296531 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-ce1c6728-05f2-432f-a7e7-15926523339d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-ce1c6728-05f2-432f-a7e7-15926523339d\") pod \"rabbitmq-server-0\" (UID: \"2275b006-a890-400e-83b6-1c46bf67f62e\") " pod="keystone-kuttl-tests/rabbitmq-server-0" Jan 20 18:22:59 crc kubenswrapper[4558]: I0120 18:22:59.297211 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/2275b006-a890-400e-83b6-1c46bf67f62e-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"2275b006-a890-400e-83b6-1c46bf67f62e\") " pod="keystone-kuttl-tests/rabbitmq-server-0" Jan 20 18:22:59 crc kubenswrapper[4558]: I0120 18:22:59.297217 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/2275b006-a890-400e-83b6-1c46bf67f62e-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"2275b006-a890-400e-83b6-1c46bf67f62e\") " pod="keystone-kuttl-tests/rabbitmq-server-0" Jan 20 18:22:59 crc kubenswrapper[4558]: I0120 18:22:59.297979 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/2275b006-a890-400e-83b6-1c46bf67f62e-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"2275b006-a890-400e-83b6-1c46bf67f62e\") " pod="keystone-kuttl-tests/rabbitmq-server-0" Jan 20 18:22:59 crc kubenswrapper[4558]: I0120 18:22:59.299763 4558 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 20 18:22:59 crc kubenswrapper[4558]: I0120 18:22:59.299802 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-ce1c6728-05f2-432f-a7e7-15926523339d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-ce1c6728-05f2-432f-a7e7-15926523339d\") pod \"rabbitmq-server-0\" (UID: \"2275b006-a890-400e-83b6-1c46bf67f62e\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/7dd7670e1f3c8bf48d892f6f7f34aca8efabf556f53571930c3e3e8c871583d6/globalmount\"" pod="keystone-kuttl-tests/rabbitmq-server-0" Jan 20 18:22:59 crc kubenswrapper[4558]: I0120 18:22:59.302724 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/2275b006-a890-400e-83b6-1c46bf67f62e-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"2275b006-a890-400e-83b6-1c46bf67f62e\") " pod="keystone-kuttl-tests/rabbitmq-server-0" Jan 20 18:22:59 crc kubenswrapper[4558]: I0120 18:22:59.302838 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/2275b006-a890-400e-83b6-1c46bf67f62e-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"2275b006-a890-400e-83b6-1c46bf67f62e\") " pod="keystone-kuttl-tests/rabbitmq-server-0" Jan 20 18:22:59 crc kubenswrapper[4558]: I0120 18:22:59.303043 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/2275b006-a890-400e-83b6-1c46bf67f62e-pod-info\") pod \"rabbitmq-server-0\" (UID: \"2275b006-a890-400e-83b6-1c46bf67f62e\") " pod="keystone-kuttl-tests/rabbitmq-server-0" Jan 20 18:22:59 crc kubenswrapper[4558]: I0120 18:22:59.311641 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jfgbz\" (UniqueName: \"kubernetes.io/projected/2275b006-a890-400e-83b6-1c46bf67f62e-kube-api-access-jfgbz\") pod \"rabbitmq-server-0\" (UID: \"2275b006-a890-400e-83b6-1c46bf67f62e\") " pod="keystone-kuttl-tests/rabbitmq-server-0" Jan 20 18:22:59 crc kubenswrapper[4558]: I0120 18:22:59.321371 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-ce1c6728-05f2-432f-a7e7-15926523339d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-ce1c6728-05f2-432f-a7e7-15926523339d\") pod \"rabbitmq-server-0\" (UID: \"2275b006-a890-400e-83b6-1c46bf67f62e\") " pod="keystone-kuttl-tests/rabbitmq-server-0" Jan 20 18:22:59 crc kubenswrapper[4558]: I0120 18:22:59.328838 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/rabbitmq-server-0" Jan 20 18:22:59 crc kubenswrapper[4558]: I0120 18:22:59.707458 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/rabbitmq-server-0"] Jan 20 18:23:00 crc kubenswrapper[4558]: I0120 18:23:00.263011 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/rabbitmq-server-0" event={"ID":"2275b006-a890-400e-83b6-1c46bf67f62e","Type":"ContainerStarted","Data":"be65202215aaffce0ad3e263130c74f870299cf034d08bff14117c0fc0cc4439"} Jan 20 18:23:00 crc kubenswrapper[4558]: I0120 18:23:00.505045 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/34e4e076b11e40e2796f19ad3bfdac5929942b93224fbc520400e0a069q2k9b" Jan 20 18:23:00 crc kubenswrapper[4558]: I0120 18:23:00.617725 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/89cbbf9e-cc1a-41c9-b39a-16efc1464749-bundle\") pod \"89cbbf9e-cc1a-41c9-b39a-16efc1464749\" (UID: \"89cbbf9e-cc1a-41c9-b39a-16efc1464749\") " Jan 20 18:23:00 crc kubenswrapper[4558]: I0120 18:23:00.618000 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/89cbbf9e-cc1a-41c9-b39a-16efc1464749-util\") pod \"89cbbf9e-cc1a-41c9-b39a-16efc1464749\" (UID: \"89cbbf9e-cc1a-41c9-b39a-16efc1464749\") " Jan 20 18:23:00 crc kubenswrapper[4558]: I0120 18:23:00.618187 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kbjf2\" (UniqueName: \"kubernetes.io/projected/89cbbf9e-cc1a-41c9-b39a-16efc1464749-kube-api-access-kbjf2\") pod \"89cbbf9e-cc1a-41c9-b39a-16efc1464749\" (UID: \"89cbbf9e-cc1a-41c9-b39a-16efc1464749\") " Jan 20 18:23:00 crc kubenswrapper[4558]: I0120 18:23:00.618799 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/89cbbf9e-cc1a-41c9-b39a-16efc1464749-bundle" (OuterVolumeSpecName: "bundle") pod "89cbbf9e-cc1a-41c9-b39a-16efc1464749" (UID: "89cbbf9e-cc1a-41c9-b39a-16efc1464749"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 18:23:00 crc kubenswrapper[4558]: I0120 18:23:00.622771 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/89cbbf9e-cc1a-41c9-b39a-16efc1464749-kube-api-access-kbjf2" (OuterVolumeSpecName: "kube-api-access-kbjf2") pod "89cbbf9e-cc1a-41c9-b39a-16efc1464749" (UID: "89cbbf9e-cc1a-41c9-b39a-16efc1464749"). InnerVolumeSpecName "kube-api-access-kbjf2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:23:00 crc kubenswrapper[4558]: I0120 18:23:00.629718 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/89cbbf9e-cc1a-41c9-b39a-16efc1464749-util" (OuterVolumeSpecName: "util") pod "89cbbf9e-cc1a-41c9-b39a-16efc1464749" (UID: "89cbbf9e-cc1a-41c9-b39a-16efc1464749"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 18:23:00 crc kubenswrapper[4558]: I0120 18:23:00.719795 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kbjf2\" (UniqueName: \"kubernetes.io/projected/89cbbf9e-cc1a-41c9-b39a-16efc1464749-kube-api-access-kbjf2\") on node \"crc\" DevicePath \"\"" Jan 20 18:23:00 crc kubenswrapper[4558]: I0120 18:23:00.719834 4558 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/89cbbf9e-cc1a-41c9-b39a-16efc1464749-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 18:23:00 crc kubenswrapper[4558]: I0120 18:23:00.719843 4558 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/89cbbf9e-cc1a-41c9-b39a-16efc1464749-util\") on node \"crc\" DevicePath \"\"" Jan 20 18:23:01 crc kubenswrapper[4558]: I0120 18:23:01.272798 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/rabbitmq-server-0" event={"ID":"2275b006-a890-400e-83b6-1c46bf67f62e","Type":"ContainerStarted","Data":"d9c022000639f60b589dd52328b89c62f22fa3c2d49f87fd4714cd3c99bfb24a"} Jan 20 18:23:01 crc kubenswrapper[4558]: I0120 18:23:01.275352 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/34e4e076b11e40e2796f19ad3bfdac5929942b93224fbc520400e0a069q2k9b" event={"ID":"89cbbf9e-cc1a-41c9-b39a-16efc1464749","Type":"ContainerDied","Data":"3fa7e297032d0883dbf644f2ec31c41f4903b8edacf010e319253ad91578d061"} Jan 20 18:23:01 crc kubenswrapper[4558]: I0120 18:23:01.275411 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3fa7e297032d0883dbf644f2ec31c41f4903b8edacf010e319253ad91578d061" Jan 20 18:23:01 crc kubenswrapper[4558]: I0120 18:23:01.275376 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/34e4e076b11e40e2796f19ad3bfdac5929942b93224fbc520400e0a069q2k9b" Jan 20 18:23:08 crc kubenswrapper[4558]: I0120 18:23:08.029304 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-controller-manager-6c45489b98-b6947"] Jan 20 18:23:08 crc kubenswrapper[4558]: E0120 18:23:08.030116 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="89cbbf9e-cc1a-41c9-b39a-16efc1464749" containerName="util" Jan 20 18:23:08 crc kubenswrapper[4558]: I0120 18:23:08.030129 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="89cbbf9e-cc1a-41c9-b39a-16efc1464749" containerName="util" Jan 20 18:23:08 crc kubenswrapper[4558]: E0120 18:23:08.030154 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="89cbbf9e-cc1a-41c9-b39a-16efc1464749" containerName="pull" Jan 20 18:23:08 crc kubenswrapper[4558]: I0120 18:23:08.030175 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="89cbbf9e-cc1a-41c9-b39a-16efc1464749" containerName="pull" Jan 20 18:23:08 crc kubenswrapper[4558]: E0120 18:23:08.030200 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="89cbbf9e-cc1a-41c9-b39a-16efc1464749" containerName="extract" Jan 20 18:23:08 crc kubenswrapper[4558]: I0120 18:23:08.030206 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="89cbbf9e-cc1a-41c9-b39a-16efc1464749" containerName="extract" Jan 20 18:23:08 crc kubenswrapper[4558]: I0120 18:23:08.030352 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="89cbbf9e-cc1a-41c9-b39a-16efc1464749" containerName="extract" Jan 20 18:23:08 crc kubenswrapper[4558]: I0120 18:23:08.030832 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-6c45489b98-b6947" Jan 20 18:23:08 crc kubenswrapper[4558]: I0120 18:23:08.039651 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-service-cert" Jan 20 18:23:08 crc kubenswrapper[4558]: I0120 18:23:08.039712 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-dockercfg-ndgks" Jan 20 18:23:08 crc kubenswrapper[4558]: I0120 18:23:08.045486 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-6c45489b98-b6947"] Jan 20 18:23:08 crc kubenswrapper[4558]: I0120 18:23:08.130099 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cg24c\" (UniqueName: \"kubernetes.io/projected/7e1d554f-8358-4ab1-ac14-764882a73ed2-kube-api-access-cg24c\") pod \"keystone-operator-controller-manager-6c45489b98-b6947\" (UID: \"7e1d554f-8358-4ab1-ac14-764882a73ed2\") " pod="openstack-operators/keystone-operator-controller-manager-6c45489b98-b6947" Jan 20 18:23:08 crc kubenswrapper[4558]: I0120 18:23:08.130150 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/7e1d554f-8358-4ab1-ac14-764882a73ed2-webhook-cert\") pod \"keystone-operator-controller-manager-6c45489b98-b6947\" (UID: \"7e1d554f-8358-4ab1-ac14-764882a73ed2\") " pod="openstack-operators/keystone-operator-controller-manager-6c45489b98-b6947" Jan 20 18:23:08 crc kubenswrapper[4558]: I0120 18:23:08.130487 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/7e1d554f-8358-4ab1-ac14-764882a73ed2-apiservice-cert\") pod \"keystone-operator-controller-manager-6c45489b98-b6947\" (UID: \"7e1d554f-8358-4ab1-ac14-764882a73ed2\") " pod="openstack-operators/keystone-operator-controller-manager-6c45489b98-b6947" Jan 20 18:23:08 crc kubenswrapper[4558]: I0120 18:23:08.232358 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/7e1d554f-8358-4ab1-ac14-764882a73ed2-apiservice-cert\") pod \"keystone-operator-controller-manager-6c45489b98-b6947\" (UID: \"7e1d554f-8358-4ab1-ac14-764882a73ed2\") " pod="openstack-operators/keystone-operator-controller-manager-6c45489b98-b6947" Jan 20 18:23:08 crc kubenswrapper[4558]: I0120 18:23:08.232488 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cg24c\" (UniqueName: \"kubernetes.io/projected/7e1d554f-8358-4ab1-ac14-764882a73ed2-kube-api-access-cg24c\") pod \"keystone-operator-controller-manager-6c45489b98-b6947\" (UID: \"7e1d554f-8358-4ab1-ac14-764882a73ed2\") " pod="openstack-operators/keystone-operator-controller-manager-6c45489b98-b6947" Jan 20 18:23:08 crc kubenswrapper[4558]: I0120 18:23:08.232517 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/7e1d554f-8358-4ab1-ac14-764882a73ed2-webhook-cert\") pod \"keystone-operator-controller-manager-6c45489b98-b6947\" (UID: \"7e1d554f-8358-4ab1-ac14-764882a73ed2\") " pod="openstack-operators/keystone-operator-controller-manager-6c45489b98-b6947" Jan 20 18:23:08 crc kubenswrapper[4558]: I0120 18:23:08.239199 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/7e1d554f-8358-4ab1-ac14-764882a73ed2-webhook-cert\") pod \"keystone-operator-controller-manager-6c45489b98-b6947\" (UID: \"7e1d554f-8358-4ab1-ac14-764882a73ed2\") " pod="openstack-operators/keystone-operator-controller-manager-6c45489b98-b6947" Jan 20 18:23:08 crc kubenswrapper[4558]: I0120 18:23:08.239229 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/7e1d554f-8358-4ab1-ac14-764882a73ed2-apiservice-cert\") pod \"keystone-operator-controller-manager-6c45489b98-b6947\" (UID: \"7e1d554f-8358-4ab1-ac14-764882a73ed2\") " pod="openstack-operators/keystone-operator-controller-manager-6c45489b98-b6947" Jan 20 18:23:08 crc kubenswrapper[4558]: I0120 18:23:08.249041 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cg24c\" (UniqueName: \"kubernetes.io/projected/7e1d554f-8358-4ab1-ac14-764882a73ed2-kube-api-access-cg24c\") pod \"keystone-operator-controller-manager-6c45489b98-b6947\" (UID: \"7e1d554f-8358-4ab1-ac14-764882a73ed2\") " pod="openstack-operators/keystone-operator-controller-manager-6c45489b98-b6947" Jan 20 18:23:08 crc kubenswrapper[4558]: I0120 18:23:08.348176 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-6c45489b98-b6947" Jan 20 18:23:08 crc kubenswrapper[4558]: I0120 18:23:08.747583 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-6c45489b98-b6947"] Jan 20 18:23:08 crc kubenswrapper[4558]: W0120 18:23:08.757357 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7e1d554f_8358_4ab1_ac14_764882a73ed2.slice/crio-9c4bdbc8ad238470d90442956d64d72149f20345568f45f2cd24317dbaa60891 WatchSource:0}: Error finding container 9c4bdbc8ad238470d90442956d64d72149f20345568f45f2cd24317dbaa60891: Status 404 returned error can't find the container with id 9c4bdbc8ad238470d90442956d64d72149f20345568f45f2cd24317dbaa60891 Jan 20 18:23:09 crc kubenswrapper[4558]: I0120 18:23:09.345917 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-6c45489b98-b6947" event={"ID":"7e1d554f-8358-4ab1-ac14-764882a73ed2","Type":"ContainerStarted","Data":"71c9945d04185339d99c62f8d4838a72ee37c8a2480fef4aa780330f9be75810"} Jan 20 18:23:09 crc kubenswrapper[4558]: I0120 18:23:09.346458 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-controller-manager-6c45489b98-b6947" Jan 20 18:23:09 crc kubenswrapper[4558]: I0120 18:23:09.346470 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-6c45489b98-b6947" event={"ID":"7e1d554f-8358-4ab1-ac14-764882a73ed2","Type":"ContainerStarted","Data":"9c4bdbc8ad238470d90442956d64d72149f20345568f45f2cd24317dbaa60891"} Jan 20 18:23:09 crc kubenswrapper[4558]: I0120 18:23:09.365751 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-controller-manager-6c45489b98-b6947" podStartSLOduration=1.3657359900000001 podStartE2EDuration="1.36573599s" podCreationTimestamp="2026-01-20 18:23:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 18:23:09.362027586 +0000 UTC m=+6083.122365544" watchObservedRunningTime="2026-01-20 18:23:09.36573599 +0000 UTC m=+6083.126073957" Jan 20 18:23:18 crc kubenswrapper[4558]: I0120 18:23:18.353184 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-controller-manager-6c45489b98-b6947" Jan 20 18:23:27 crc kubenswrapper[4558]: I0120 18:23:27.181549 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["keystone-kuttl-tests/keystone-db-create-fxkdq"] Jan 20 18:23:27 crc kubenswrapper[4558]: I0120 18:23:27.183391 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-db-create-fxkdq" Jan 20 18:23:27 crc kubenswrapper[4558]: I0120 18:23:27.190595 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["keystone-kuttl-tests/keystone-f525-account-create-update-22vbg"] Jan 20 18:23:27 crc kubenswrapper[4558]: I0120 18:23:27.191628 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-f525-account-create-update-22vbg" Jan 20 18:23:27 crc kubenswrapper[4558]: I0120 18:23:27.194591 4558 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone-db-secret" Jan 20 18:23:27 crc kubenswrapper[4558]: I0120 18:23:27.196704 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone-db-create-fxkdq"] Jan 20 18:23:27 crc kubenswrapper[4558]: I0120 18:23:27.201919 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone-f525-account-create-update-22vbg"] Jan 20 18:23:27 crc kubenswrapper[4558]: I0120 18:23:27.330158 4558 patch_prober.go:28] interesting pod/machine-config-daemon-2vr4r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 20 18:23:27 crc kubenswrapper[4558]: I0120 18:23:27.330279 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 20 18:23:27 crc kubenswrapper[4558]: I0120 18:23:27.330389 4558 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" Jan 20 18:23:27 crc kubenswrapper[4558]: I0120 18:23:27.331541 4558 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"ed78f04bb9083c901150aea9b4470bb3023518f531b71da44ae6842f9fba65fa"} pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 20 18:23:27 crc kubenswrapper[4558]: I0120 18:23:27.331614 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" containerID="cri-o://ed78f04bb9083c901150aea9b4470bb3023518f531b71da44ae6842f9fba65fa" gracePeriod=600 Jan 20 18:23:27 crc kubenswrapper[4558]: I0120 18:23:27.332863 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d21eac78-1353-4b39-8819-1b37405e07e5-operator-scripts\") pod \"keystone-db-create-fxkdq\" (UID: \"d21eac78-1353-4b39-8819-1b37405e07e5\") " pod="keystone-kuttl-tests/keystone-db-create-fxkdq" Jan 20 18:23:27 crc kubenswrapper[4558]: I0120 18:23:27.333213 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c6bf4\" (UniqueName: \"kubernetes.io/projected/53f69d5f-7460-49ee-bb1b-97a5bbda4d75-kube-api-access-c6bf4\") pod \"keystone-f525-account-create-update-22vbg\" (UID: \"53f69d5f-7460-49ee-bb1b-97a5bbda4d75\") " pod="keystone-kuttl-tests/keystone-f525-account-create-update-22vbg" Jan 20 18:23:27 crc kubenswrapper[4558]: I0120 18:23:27.333332 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xvp7x\" (UniqueName: \"kubernetes.io/projected/d21eac78-1353-4b39-8819-1b37405e07e5-kube-api-access-xvp7x\") pod \"keystone-db-create-fxkdq\" (UID: \"d21eac78-1353-4b39-8819-1b37405e07e5\") " pod="keystone-kuttl-tests/keystone-db-create-fxkdq" Jan 20 18:23:27 crc kubenswrapper[4558]: I0120 18:23:27.333416 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/53f69d5f-7460-49ee-bb1b-97a5bbda4d75-operator-scripts\") pod \"keystone-f525-account-create-update-22vbg\" (UID: \"53f69d5f-7460-49ee-bb1b-97a5bbda4d75\") " pod="keystone-kuttl-tests/keystone-f525-account-create-update-22vbg" Jan 20 18:23:27 crc kubenswrapper[4558]: I0120 18:23:27.435692 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c6bf4\" (UniqueName: \"kubernetes.io/projected/53f69d5f-7460-49ee-bb1b-97a5bbda4d75-kube-api-access-c6bf4\") pod \"keystone-f525-account-create-update-22vbg\" (UID: \"53f69d5f-7460-49ee-bb1b-97a5bbda4d75\") " pod="keystone-kuttl-tests/keystone-f525-account-create-update-22vbg" Jan 20 18:23:27 crc kubenswrapper[4558]: I0120 18:23:27.435939 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xvp7x\" (UniqueName: \"kubernetes.io/projected/d21eac78-1353-4b39-8819-1b37405e07e5-kube-api-access-xvp7x\") pod \"keystone-db-create-fxkdq\" (UID: \"d21eac78-1353-4b39-8819-1b37405e07e5\") " pod="keystone-kuttl-tests/keystone-db-create-fxkdq" Jan 20 18:23:27 crc kubenswrapper[4558]: I0120 18:23:27.436004 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/53f69d5f-7460-49ee-bb1b-97a5bbda4d75-operator-scripts\") pod \"keystone-f525-account-create-update-22vbg\" (UID: \"53f69d5f-7460-49ee-bb1b-97a5bbda4d75\") " pod="keystone-kuttl-tests/keystone-f525-account-create-update-22vbg" Jan 20 18:23:27 crc kubenswrapper[4558]: I0120 18:23:27.436074 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d21eac78-1353-4b39-8819-1b37405e07e5-operator-scripts\") pod \"keystone-db-create-fxkdq\" (UID: \"d21eac78-1353-4b39-8819-1b37405e07e5\") " pod="keystone-kuttl-tests/keystone-db-create-fxkdq" Jan 20 18:23:27 crc kubenswrapper[4558]: I0120 18:23:27.436951 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/53f69d5f-7460-49ee-bb1b-97a5bbda4d75-operator-scripts\") pod \"keystone-f525-account-create-update-22vbg\" (UID: \"53f69d5f-7460-49ee-bb1b-97a5bbda4d75\") " pod="keystone-kuttl-tests/keystone-f525-account-create-update-22vbg" Jan 20 18:23:27 crc kubenswrapper[4558]: I0120 18:23:27.436998 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d21eac78-1353-4b39-8819-1b37405e07e5-operator-scripts\") pod \"keystone-db-create-fxkdq\" (UID: \"d21eac78-1353-4b39-8819-1b37405e07e5\") " pod="keystone-kuttl-tests/keystone-db-create-fxkdq" Jan 20 18:23:27 crc kubenswrapper[4558]: I0120 18:23:27.453554 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xvp7x\" (UniqueName: \"kubernetes.io/projected/d21eac78-1353-4b39-8819-1b37405e07e5-kube-api-access-xvp7x\") pod \"keystone-db-create-fxkdq\" (UID: \"d21eac78-1353-4b39-8819-1b37405e07e5\") " pod="keystone-kuttl-tests/keystone-db-create-fxkdq" Jan 20 18:23:27 crc kubenswrapper[4558]: I0120 18:23:27.453625 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c6bf4\" (UniqueName: \"kubernetes.io/projected/53f69d5f-7460-49ee-bb1b-97a5bbda4d75-kube-api-access-c6bf4\") pod \"keystone-f525-account-create-update-22vbg\" (UID: \"53f69d5f-7460-49ee-bb1b-97a5bbda4d75\") " pod="keystone-kuttl-tests/keystone-f525-account-create-update-22vbg" Jan 20 18:23:27 crc kubenswrapper[4558]: I0120 18:23:27.502745 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-db-create-fxkdq" Jan 20 18:23:27 crc kubenswrapper[4558]: I0120 18:23:27.504420 4558 generic.go:334] "Generic (PLEG): container finished" podID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerID="ed78f04bb9083c901150aea9b4470bb3023518f531b71da44ae6842f9fba65fa" exitCode=0 Jan 20 18:23:27 crc kubenswrapper[4558]: I0120 18:23:27.504481 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" event={"ID":"68337d27-3fa6-4a29-88b0-82e60c3739eb","Type":"ContainerDied","Data":"ed78f04bb9083c901150aea9b4470bb3023518f531b71da44ae6842f9fba65fa"} Jan 20 18:23:27 crc kubenswrapper[4558]: I0120 18:23:27.504529 4558 scope.go:117] "RemoveContainer" containerID="27314e1a8af2e8df8ecd2e7a06cbb630afbfecd8ca2338a1f199cf92f982f581" Jan 20 18:23:27 crc kubenswrapper[4558]: I0120 18:23:27.509454 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-f525-account-create-update-22vbg" Jan 20 18:23:27 crc kubenswrapper[4558]: I0120 18:23:27.908815 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone-db-create-fxkdq"] Jan 20 18:23:27 crc kubenswrapper[4558]: I0120 18:23:27.970403 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone-f525-account-create-update-22vbg"] Jan 20 18:23:27 crc kubenswrapper[4558]: W0120 18:23:27.971991 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod53f69d5f_7460_49ee_bb1b_97a5bbda4d75.slice/crio-606ab8f702acdfbf0fae7396b26f4e32b404245258d04204f964e9032727cebc WatchSource:0}: Error finding container 606ab8f702acdfbf0fae7396b26f4e32b404245258d04204f964e9032727cebc: Status 404 returned error can't find the container with id 606ab8f702acdfbf0fae7396b26f4e32b404245258d04204f964e9032727cebc Jan 20 18:23:28 crc kubenswrapper[4558]: I0120 18:23:28.515340 4558 generic.go:334] "Generic (PLEG): container finished" podID="d21eac78-1353-4b39-8819-1b37405e07e5" containerID="67d36da5c123616fc1171727d20cabb6425cd135b3ab044d957ff405bc749a08" exitCode=0 Jan 20 18:23:28 crc kubenswrapper[4558]: I0120 18:23:28.515454 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-db-create-fxkdq" event={"ID":"d21eac78-1353-4b39-8819-1b37405e07e5","Type":"ContainerDied","Data":"67d36da5c123616fc1171727d20cabb6425cd135b3ab044d957ff405bc749a08"} Jan 20 18:23:28 crc kubenswrapper[4558]: I0120 18:23:28.515783 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-db-create-fxkdq" event={"ID":"d21eac78-1353-4b39-8819-1b37405e07e5","Type":"ContainerStarted","Data":"a0ac03ec8095c4ba89d01a8a8894b42a19f0baa0ef7056ac3eef30d8f1f8a35d"} Jan 20 18:23:28 crc kubenswrapper[4558]: I0120 18:23:28.518258 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" event={"ID":"68337d27-3fa6-4a29-88b0-82e60c3739eb","Type":"ContainerStarted","Data":"814c7f4e012758db904c630a7832be54bd493eba5362670666ad7cbcf498af98"} Jan 20 18:23:28 crc kubenswrapper[4558]: I0120 18:23:28.520445 4558 generic.go:334] "Generic (PLEG): container finished" podID="53f69d5f-7460-49ee-bb1b-97a5bbda4d75" containerID="4ecbc6be3564d8338b4185ca1fdb642003ffcb46d70cc9b885b85f2aaccb1a94" exitCode=0 Jan 20 18:23:28 crc kubenswrapper[4558]: I0120 18:23:28.520498 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-f525-account-create-update-22vbg" event={"ID":"53f69d5f-7460-49ee-bb1b-97a5bbda4d75","Type":"ContainerDied","Data":"4ecbc6be3564d8338b4185ca1fdb642003ffcb46d70cc9b885b85f2aaccb1a94"} Jan 20 18:23:28 crc kubenswrapper[4558]: I0120 18:23:28.520527 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-f525-account-create-update-22vbg" event={"ID":"53f69d5f-7460-49ee-bb1b-97a5bbda4d75","Type":"ContainerStarted","Data":"606ab8f702acdfbf0fae7396b26f4e32b404245258d04204f964e9032727cebc"} Jan 20 18:23:29 crc kubenswrapper[4558]: I0120 18:23:29.852529 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-db-create-fxkdq" Jan 20 18:23:29 crc kubenswrapper[4558]: I0120 18:23:29.910932 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-f525-account-create-update-22vbg" Jan 20 18:23:29 crc kubenswrapper[4558]: I0120 18:23:29.980787 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xvp7x\" (UniqueName: \"kubernetes.io/projected/d21eac78-1353-4b39-8819-1b37405e07e5-kube-api-access-xvp7x\") pod \"d21eac78-1353-4b39-8819-1b37405e07e5\" (UID: \"d21eac78-1353-4b39-8819-1b37405e07e5\") " Jan 20 18:23:29 crc kubenswrapper[4558]: I0120 18:23:29.980938 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d21eac78-1353-4b39-8819-1b37405e07e5-operator-scripts\") pod \"d21eac78-1353-4b39-8819-1b37405e07e5\" (UID: \"d21eac78-1353-4b39-8819-1b37405e07e5\") " Jan 20 18:23:29 crc kubenswrapper[4558]: I0120 18:23:29.981787 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d21eac78-1353-4b39-8819-1b37405e07e5-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "d21eac78-1353-4b39-8819-1b37405e07e5" (UID: "d21eac78-1353-4b39-8819-1b37405e07e5"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:23:29 crc kubenswrapper[4558]: I0120 18:23:29.988801 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d21eac78-1353-4b39-8819-1b37405e07e5-kube-api-access-xvp7x" (OuterVolumeSpecName: "kube-api-access-xvp7x") pod "d21eac78-1353-4b39-8819-1b37405e07e5" (UID: "d21eac78-1353-4b39-8819-1b37405e07e5"). InnerVolumeSpecName "kube-api-access-xvp7x". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:23:30 crc kubenswrapper[4558]: I0120 18:23:30.083630 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/53f69d5f-7460-49ee-bb1b-97a5bbda4d75-operator-scripts\") pod \"53f69d5f-7460-49ee-bb1b-97a5bbda4d75\" (UID: \"53f69d5f-7460-49ee-bb1b-97a5bbda4d75\") " Jan 20 18:23:30 crc kubenswrapper[4558]: I0120 18:23:30.083697 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c6bf4\" (UniqueName: \"kubernetes.io/projected/53f69d5f-7460-49ee-bb1b-97a5bbda4d75-kube-api-access-c6bf4\") pod \"53f69d5f-7460-49ee-bb1b-97a5bbda4d75\" (UID: \"53f69d5f-7460-49ee-bb1b-97a5bbda4d75\") " Jan 20 18:23:30 crc kubenswrapper[4558]: I0120 18:23:30.084177 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/53f69d5f-7460-49ee-bb1b-97a5bbda4d75-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "53f69d5f-7460-49ee-bb1b-97a5bbda4d75" (UID: "53f69d5f-7460-49ee-bb1b-97a5bbda4d75"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:23:30 crc kubenswrapper[4558]: I0120 18:23:30.084636 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xvp7x\" (UniqueName: \"kubernetes.io/projected/d21eac78-1353-4b39-8819-1b37405e07e5-kube-api-access-xvp7x\") on node \"crc\" DevicePath \"\"" Jan 20 18:23:30 crc kubenswrapper[4558]: I0120 18:23:30.084660 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d21eac78-1353-4b39-8819-1b37405e07e5-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 18:23:30 crc kubenswrapper[4558]: I0120 18:23:30.084671 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/53f69d5f-7460-49ee-bb1b-97a5bbda4d75-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 18:23:30 crc kubenswrapper[4558]: I0120 18:23:30.088052 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/53f69d5f-7460-49ee-bb1b-97a5bbda4d75-kube-api-access-c6bf4" (OuterVolumeSpecName: "kube-api-access-c6bf4") pod "53f69d5f-7460-49ee-bb1b-97a5bbda4d75" (UID: "53f69d5f-7460-49ee-bb1b-97a5bbda4d75"). InnerVolumeSpecName "kube-api-access-c6bf4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:23:30 crc kubenswrapper[4558]: I0120 18:23:30.186557 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c6bf4\" (UniqueName: \"kubernetes.io/projected/53f69d5f-7460-49ee-bb1b-97a5bbda4d75-kube-api-access-c6bf4\") on node \"crc\" DevicePath \"\"" Jan 20 18:23:30 crc kubenswrapper[4558]: I0120 18:23:30.540844 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-f525-account-create-update-22vbg" event={"ID":"53f69d5f-7460-49ee-bb1b-97a5bbda4d75","Type":"ContainerDied","Data":"606ab8f702acdfbf0fae7396b26f4e32b404245258d04204f964e9032727cebc"} Jan 20 18:23:30 crc kubenswrapper[4558]: I0120 18:23:30.540923 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="606ab8f702acdfbf0fae7396b26f4e32b404245258d04204f964e9032727cebc" Jan 20 18:23:30 crc kubenswrapper[4558]: I0120 18:23:30.540874 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-f525-account-create-update-22vbg" Jan 20 18:23:30 crc kubenswrapper[4558]: I0120 18:23:30.542880 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-db-create-fxkdq" event={"ID":"d21eac78-1353-4b39-8819-1b37405e07e5","Type":"ContainerDied","Data":"a0ac03ec8095c4ba89d01a8a8894b42a19f0baa0ef7056ac3eef30d8f1f8a35d"} Jan 20 18:23:30 crc kubenswrapper[4558]: I0120 18:23:30.542940 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a0ac03ec8095c4ba89d01a8a8894b42a19f0baa0ef7056ac3eef30d8f1f8a35d" Jan 20 18:23:30 crc kubenswrapper[4558]: I0120 18:23:30.542896 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-db-create-fxkdq" Jan 20 18:23:32 crc kubenswrapper[4558]: I0120 18:23:32.500093 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-nv4fq"] Jan 20 18:23:32 crc kubenswrapper[4558]: E0120 18:23:32.500809 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="53f69d5f-7460-49ee-bb1b-97a5bbda4d75" containerName="mariadb-account-create-update" Jan 20 18:23:32 crc kubenswrapper[4558]: I0120 18:23:32.500823 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="53f69d5f-7460-49ee-bb1b-97a5bbda4d75" containerName="mariadb-account-create-update" Jan 20 18:23:32 crc kubenswrapper[4558]: E0120 18:23:32.500842 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d21eac78-1353-4b39-8819-1b37405e07e5" containerName="mariadb-database-create" Jan 20 18:23:32 crc kubenswrapper[4558]: I0120 18:23:32.500848 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d21eac78-1353-4b39-8819-1b37405e07e5" containerName="mariadb-database-create" Jan 20 18:23:32 crc kubenswrapper[4558]: I0120 18:23:32.500989 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="53f69d5f-7460-49ee-bb1b-97a5bbda4d75" containerName="mariadb-account-create-update" Jan 20 18:23:32 crc kubenswrapper[4558]: I0120 18:23:32.501001 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="d21eac78-1353-4b39-8819-1b37405e07e5" containerName="mariadb-database-create" Jan 20 18:23:32 crc kubenswrapper[4558]: I0120 18:23:32.502002 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-nv4fq" Jan 20 18:23:32 crc kubenswrapper[4558]: I0120 18:23:32.516200 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-nv4fq"] Jan 20 18:23:32 crc kubenswrapper[4558]: I0120 18:23:32.557618 4558 generic.go:334] "Generic (PLEG): container finished" podID="2275b006-a890-400e-83b6-1c46bf67f62e" containerID="d9c022000639f60b589dd52328b89c62f22fa3c2d49f87fd4714cd3c99bfb24a" exitCode=0 Jan 20 18:23:32 crc kubenswrapper[4558]: I0120 18:23:32.557668 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/rabbitmq-server-0" event={"ID":"2275b006-a890-400e-83b6-1c46bf67f62e","Type":"ContainerDied","Data":"d9c022000639f60b589dd52328b89c62f22fa3c2d49f87fd4714cd3c99bfb24a"} Jan 20 18:23:32 crc kubenswrapper[4558]: I0120 18:23:32.620698 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1bc5cffe-452d-44a9-bd2c-d1f1d3c4c414-catalog-content\") pod \"redhat-marketplace-nv4fq\" (UID: \"1bc5cffe-452d-44a9-bd2c-d1f1d3c4c414\") " pod="openshift-marketplace/redhat-marketplace-nv4fq" Jan 20 18:23:32 crc kubenswrapper[4558]: I0120 18:23:32.620797 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c52jn\" (UniqueName: \"kubernetes.io/projected/1bc5cffe-452d-44a9-bd2c-d1f1d3c4c414-kube-api-access-c52jn\") pod \"redhat-marketplace-nv4fq\" (UID: \"1bc5cffe-452d-44a9-bd2c-d1f1d3c4c414\") " pod="openshift-marketplace/redhat-marketplace-nv4fq" Jan 20 18:23:32 crc kubenswrapper[4558]: I0120 18:23:32.620893 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1bc5cffe-452d-44a9-bd2c-d1f1d3c4c414-utilities\") pod \"redhat-marketplace-nv4fq\" (UID: \"1bc5cffe-452d-44a9-bd2c-d1f1d3c4c414\") " pod="openshift-marketplace/redhat-marketplace-nv4fq" Jan 20 18:23:32 crc kubenswrapper[4558]: I0120 18:23:32.722130 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1bc5cffe-452d-44a9-bd2c-d1f1d3c4c414-catalog-content\") pod \"redhat-marketplace-nv4fq\" (UID: \"1bc5cffe-452d-44a9-bd2c-d1f1d3c4c414\") " pod="openshift-marketplace/redhat-marketplace-nv4fq" Jan 20 18:23:32 crc kubenswrapper[4558]: I0120 18:23:32.722213 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c52jn\" (UniqueName: \"kubernetes.io/projected/1bc5cffe-452d-44a9-bd2c-d1f1d3c4c414-kube-api-access-c52jn\") pod \"redhat-marketplace-nv4fq\" (UID: \"1bc5cffe-452d-44a9-bd2c-d1f1d3c4c414\") " pod="openshift-marketplace/redhat-marketplace-nv4fq" Jan 20 18:23:32 crc kubenswrapper[4558]: I0120 18:23:32.722397 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1bc5cffe-452d-44a9-bd2c-d1f1d3c4c414-utilities\") pod \"redhat-marketplace-nv4fq\" (UID: \"1bc5cffe-452d-44a9-bd2c-d1f1d3c4c414\") " pod="openshift-marketplace/redhat-marketplace-nv4fq" Jan 20 18:23:32 crc kubenswrapper[4558]: I0120 18:23:32.722635 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1bc5cffe-452d-44a9-bd2c-d1f1d3c4c414-catalog-content\") pod \"redhat-marketplace-nv4fq\" (UID: \"1bc5cffe-452d-44a9-bd2c-d1f1d3c4c414\") " pod="openshift-marketplace/redhat-marketplace-nv4fq" Jan 20 18:23:32 crc kubenswrapper[4558]: I0120 18:23:32.723279 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1bc5cffe-452d-44a9-bd2c-d1f1d3c4c414-utilities\") pod \"redhat-marketplace-nv4fq\" (UID: \"1bc5cffe-452d-44a9-bd2c-d1f1d3c4c414\") " pod="openshift-marketplace/redhat-marketplace-nv4fq" Jan 20 18:23:32 crc kubenswrapper[4558]: I0120 18:23:32.740793 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c52jn\" (UniqueName: \"kubernetes.io/projected/1bc5cffe-452d-44a9-bd2c-d1f1d3c4c414-kube-api-access-c52jn\") pod \"redhat-marketplace-nv4fq\" (UID: \"1bc5cffe-452d-44a9-bd2c-d1f1d3c4c414\") " pod="openshift-marketplace/redhat-marketplace-nv4fq" Jan 20 18:23:32 crc kubenswrapper[4558]: I0120 18:23:32.832512 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-nv4fq" Jan 20 18:23:33 crc kubenswrapper[4558]: I0120 18:23:33.224668 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-nv4fq"] Jan 20 18:23:33 crc kubenswrapper[4558]: W0120 18:23:33.234182 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1bc5cffe_452d_44a9_bd2c_d1f1d3c4c414.slice/crio-b429ea2b220d12f38528a97744a8948e56336ad6e27e8119b827c4d9b3dae26d WatchSource:0}: Error finding container b429ea2b220d12f38528a97744a8948e56336ad6e27e8119b827c4d9b3dae26d: Status 404 returned error can't find the container with id b429ea2b220d12f38528a97744a8948e56336ad6e27e8119b827c4d9b3dae26d Jan 20 18:23:33 crc kubenswrapper[4558]: I0120 18:23:33.568716 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/rabbitmq-server-0" event={"ID":"2275b006-a890-400e-83b6-1c46bf67f62e","Type":"ContainerStarted","Data":"45e5c44ef4c79d694b34e00a259e1223a9488d8427afb7c95c4ae868a72e5bd9"} Jan 20 18:23:33 crc kubenswrapper[4558]: I0120 18:23:33.569714 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="keystone-kuttl-tests/rabbitmq-server-0" Jan 20 18:23:33 crc kubenswrapper[4558]: I0120 18:23:33.572192 4558 generic.go:334] "Generic (PLEG): container finished" podID="1bc5cffe-452d-44a9-bd2c-d1f1d3c4c414" containerID="4278b2e5201e74fdd1c5115f9bec5e037b68a0ee981132cc27e971546cefbfa3" exitCode=0 Jan 20 18:23:33 crc kubenswrapper[4558]: I0120 18:23:33.572259 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nv4fq" event={"ID":"1bc5cffe-452d-44a9-bd2c-d1f1d3c4c414","Type":"ContainerDied","Data":"4278b2e5201e74fdd1c5115f9bec5e037b68a0ee981132cc27e971546cefbfa3"} Jan 20 18:23:33 crc kubenswrapper[4558]: I0120 18:23:33.572307 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nv4fq" event={"ID":"1bc5cffe-452d-44a9-bd2c-d1f1d3c4c414","Type":"ContainerStarted","Data":"b429ea2b220d12f38528a97744a8948e56336ad6e27e8119b827c4d9b3dae26d"} Jan 20 18:23:33 crc kubenswrapper[4558]: I0120 18:23:33.594515 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="keystone-kuttl-tests/rabbitmq-server-0" podStartSLOduration=36.594498751 podStartE2EDuration="36.594498751s" podCreationTimestamp="2026-01-20 18:22:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 18:23:33.587660123 +0000 UTC m=+6107.347998090" watchObservedRunningTime="2026-01-20 18:23:33.594498751 +0000 UTC m=+6107.354836718" Jan 20 18:23:34 crc kubenswrapper[4558]: I0120 18:23:34.585809 4558 generic.go:334] "Generic (PLEG): container finished" podID="1bc5cffe-452d-44a9-bd2c-d1f1d3c4c414" containerID="52067ab2b0d018faeb7a702bbc64d46bca5d61d6ca4f6a3710a1e20d11c41a3b" exitCode=0 Jan 20 18:23:34 crc kubenswrapper[4558]: I0120 18:23:34.585854 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nv4fq" event={"ID":"1bc5cffe-452d-44a9-bd2c-d1f1d3c4c414","Type":"ContainerDied","Data":"52067ab2b0d018faeb7a702bbc64d46bca5d61d6ca4f6a3710a1e20d11c41a3b"} Jan 20 18:23:35 crc kubenswrapper[4558]: I0120 18:23:35.600432 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nv4fq" event={"ID":"1bc5cffe-452d-44a9-bd2c-d1f1d3c4c414","Type":"ContainerStarted","Data":"d6561464b19dfb14514f538b9ecf092e43cfcc8958addde40ced35d550312a86"} Jan 20 18:23:35 crc kubenswrapper[4558]: I0120 18:23:35.623621 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-nv4fq" podStartSLOduration=2.140186198 podStartE2EDuration="3.623598532s" podCreationTimestamp="2026-01-20 18:23:32 +0000 UTC" firstStartedPulling="2026-01-20 18:23:33.574085128 +0000 UTC m=+6107.334423095" lastFinishedPulling="2026-01-20 18:23:35.057497461 +0000 UTC m=+6108.817835429" observedRunningTime="2026-01-20 18:23:35.616774812 +0000 UTC m=+6109.377112779" watchObservedRunningTime="2026-01-20 18:23:35.623598532 +0000 UTC m=+6109.383936500" Jan 20 18:23:36 crc kubenswrapper[4558]: I0120 18:23:36.946966 4558 scope.go:117] "RemoveContainer" containerID="a6c3d46d0b6a5aafa34043f11633d1c1d3dce61bb8d70b54ad40c2ea8a834c24" Jan 20 18:23:36 crc kubenswrapper[4558]: I0120 18:23:36.965780 4558 scope.go:117] "RemoveContainer" containerID="fce5c1d3d2c928b886827a18ddf9a4a929eedad628229ef3cad9bf2beeaec1a4" Jan 20 18:23:36 crc kubenswrapper[4558]: I0120 18:23:36.990436 4558 scope.go:117] "RemoveContainer" containerID="386368ef23e7a031c203ecbc5d8d17951778eb2a241a99697a6d4075102c41a6" Jan 20 18:23:42 crc kubenswrapper[4558]: I0120 18:23:42.833350 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-nv4fq" Jan 20 18:23:42 crc kubenswrapper[4558]: I0120 18:23:42.833765 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-nv4fq" Jan 20 18:23:42 crc kubenswrapper[4558]: I0120 18:23:42.873733 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-nv4fq" Jan 20 18:23:43 crc kubenswrapper[4558]: I0120 18:23:43.699182 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-nv4fq" Jan 20 18:23:44 crc kubenswrapper[4558]: I0120 18:23:44.486839 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-nv4fq"] Jan 20 18:23:45 crc kubenswrapper[4558]: I0120 18:23:45.675057 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-nv4fq" podUID="1bc5cffe-452d-44a9-bd2c-d1f1d3c4c414" containerName="registry-server" containerID="cri-o://d6561464b19dfb14514f538b9ecf092e43cfcc8958addde40ced35d550312a86" gracePeriod=2 Jan 20 18:23:46 crc kubenswrapper[4558]: I0120 18:23:46.092590 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-nv4fq" Jan 20 18:23:46 crc kubenswrapper[4558]: I0120 18:23:46.147050 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1bc5cffe-452d-44a9-bd2c-d1f1d3c4c414-catalog-content\") pod \"1bc5cffe-452d-44a9-bd2c-d1f1d3c4c414\" (UID: \"1bc5cffe-452d-44a9-bd2c-d1f1d3c4c414\") " Jan 20 18:23:46 crc kubenswrapper[4558]: I0120 18:23:46.147129 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c52jn\" (UniqueName: \"kubernetes.io/projected/1bc5cffe-452d-44a9-bd2c-d1f1d3c4c414-kube-api-access-c52jn\") pod \"1bc5cffe-452d-44a9-bd2c-d1f1d3c4c414\" (UID: \"1bc5cffe-452d-44a9-bd2c-d1f1d3c4c414\") " Jan 20 18:23:46 crc kubenswrapper[4558]: I0120 18:23:46.147159 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1bc5cffe-452d-44a9-bd2c-d1f1d3c4c414-utilities\") pod \"1bc5cffe-452d-44a9-bd2c-d1f1d3c4c414\" (UID: \"1bc5cffe-452d-44a9-bd2c-d1f1d3c4c414\") " Jan 20 18:23:46 crc kubenswrapper[4558]: I0120 18:23:46.148085 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1bc5cffe-452d-44a9-bd2c-d1f1d3c4c414-utilities" (OuterVolumeSpecName: "utilities") pod "1bc5cffe-452d-44a9-bd2c-d1f1d3c4c414" (UID: "1bc5cffe-452d-44a9-bd2c-d1f1d3c4c414"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 18:23:46 crc kubenswrapper[4558]: I0120 18:23:46.154580 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bc5cffe-452d-44a9-bd2c-d1f1d3c4c414-kube-api-access-c52jn" (OuterVolumeSpecName: "kube-api-access-c52jn") pod "1bc5cffe-452d-44a9-bd2c-d1f1d3c4c414" (UID: "1bc5cffe-452d-44a9-bd2c-d1f1d3c4c414"). InnerVolumeSpecName "kube-api-access-c52jn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:23:46 crc kubenswrapper[4558]: I0120 18:23:46.166878 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1bc5cffe-452d-44a9-bd2c-d1f1d3c4c414-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1bc5cffe-452d-44a9-bd2c-d1f1d3c4c414" (UID: "1bc5cffe-452d-44a9-bd2c-d1f1d3c4c414"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 18:23:46 crc kubenswrapper[4558]: I0120 18:23:46.248450 4558 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1bc5cffe-452d-44a9-bd2c-d1f1d3c4c414-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 20 18:23:46 crc kubenswrapper[4558]: I0120 18:23:46.248487 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c52jn\" (UniqueName: \"kubernetes.io/projected/1bc5cffe-452d-44a9-bd2c-d1f1d3c4c414-kube-api-access-c52jn\") on node \"crc\" DevicePath \"\"" Jan 20 18:23:46 crc kubenswrapper[4558]: I0120 18:23:46.248499 4558 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1bc5cffe-452d-44a9-bd2c-d1f1d3c4c414-utilities\") on node \"crc\" DevicePath \"\"" Jan 20 18:23:46 crc kubenswrapper[4558]: I0120 18:23:46.688037 4558 generic.go:334] "Generic (PLEG): container finished" podID="1bc5cffe-452d-44a9-bd2c-d1f1d3c4c414" containerID="d6561464b19dfb14514f538b9ecf092e43cfcc8958addde40ced35d550312a86" exitCode=0 Jan 20 18:23:46 crc kubenswrapper[4558]: I0120 18:23:46.688098 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nv4fq" event={"ID":"1bc5cffe-452d-44a9-bd2c-d1f1d3c4c414","Type":"ContainerDied","Data":"d6561464b19dfb14514f538b9ecf092e43cfcc8958addde40ced35d550312a86"} Jan 20 18:23:46 crc kubenswrapper[4558]: I0120 18:23:46.688121 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-nv4fq" Jan 20 18:23:46 crc kubenswrapper[4558]: I0120 18:23:46.688139 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nv4fq" event={"ID":"1bc5cffe-452d-44a9-bd2c-d1f1d3c4c414","Type":"ContainerDied","Data":"b429ea2b220d12f38528a97744a8948e56336ad6e27e8119b827c4d9b3dae26d"} Jan 20 18:23:46 crc kubenswrapper[4558]: I0120 18:23:46.688176 4558 scope.go:117] "RemoveContainer" containerID="d6561464b19dfb14514f538b9ecf092e43cfcc8958addde40ced35d550312a86" Jan 20 18:23:46 crc kubenswrapper[4558]: I0120 18:23:46.712508 4558 scope.go:117] "RemoveContainer" containerID="52067ab2b0d018faeb7a702bbc64d46bca5d61d6ca4f6a3710a1e20d11c41a3b" Jan 20 18:23:46 crc kubenswrapper[4558]: I0120 18:23:46.722673 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-nv4fq"] Jan 20 18:23:46 crc kubenswrapper[4558]: I0120 18:23:46.730459 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-nv4fq"] Jan 20 18:23:46 crc kubenswrapper[4558]: I0120 18:23:46.731106 4558 scope.go:117] "RemoveContainer" containerID="4278b2e5201e74fdd1c5115f9bec5e037b68a0ee981132cc27e971546cefbfa3" Jan 20 18:23:46 crc kubenswrapper[4558]: I0120 18:23:46.757058 4558 scope.go:117] "RemoveContainer" containerID="d6561464b19dfb14514f538b9ecf092e43cfcc8958addde40ced35d550312a86" Jan 20 18:23:46 crc kubenswrapper[4558]: E0120 18:23:46.757788 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d6561464b19dfb14514f538b9ecf092e43cfcc8958addde40ced35d550312a86\": container with ID starting with d6561464b19dfb14514f538b9ecf092e43cfcc8958addde40ced35d550312a86 not found: ID does not exist" containerID="d6561464b19dfb14514f538b9ecf092e43cfcc8958addde40ced35d550312a86" Jan 20 18:23:46 crc kubenswrapper[4558]: I0120 18:23:46.757861 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d6561464b19dfb14514f538b9ecf092e43cfcc8958addde40ced35d550312a86"} err="failed to get container status \"d6561464b19dfb14514f538b9ecf092e43cfcc8958addde40ced35d550312a86\": rpc error: code = NotFound desc = could not find container \"d6561464b19dfb14514f538b9ecf092e43cfcc8958addde40ced35d550312a86\": container with ID starting with d6561464b19dfb14514f538b9ecf092e43cfcc8958addde40ced35d550312a86 not found: ID does not exist" Jan 20 18:23:46 crc kubenswrapper[4558]: I0120 18:23:46.757904 4558 scope.go:117] "RemoveContainer" containerID="52067ab2b0d018faeb7a702bbc64d46bca5d61d6ca4f6a3710a1e20d11c41a3b" Jan 20 18:23:46 crc kubenswrapper[4558]: E0120 18:23:46.758593 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"52067ab2b0d018faeb7a702bbc64d46bca5d61d6ca4f6a3710a1e20d11c41a3b\": container with ID starting with 52067ab2b0d018faeb7a702bbc64d46bca5d61d6ca4f6a3710a1e20d11c41a3b not found: ID does not exist" containerID="52067ab2b0d018faeb7a702bbc64d46bca5d61d6ca4f6a3710a1e20d11c41a3b" Jan 20 18:23:46 crc kubenswrapper[4558]: I0120 18:23:46.758632 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"52067ab2b0d018faeb7a702bbc64d46bca5d61d6ca4f6a3710a1e20d11c41a3b"} err="failed to get container status \"52067ab2b0d018faeb7a702bbc64d46bca5d61d6ca4f6a3710a1e20d11c41a3b\": rpc error: code = NotFound desc = could not find container \"52067ab2b0d018faeb7a702bbc64d46bca5d61d6ca4f6a3710a1e20d11c41a3b\": container with ID starting with 52067ab2b0d018faeb7a702bbc64d46bca5d61d6ca4f6a3710a1e20d11c41a3b not found: ID does not exist" Jan 20 18:23:46 crc kubenswrapper[4558]: I0120 18:23:46.758654 4558 scope.go:117] "RemoveContainer" containerID="4278b2e5201e74fdd1c5115f9bec5e037b68a0ee981132cc27e971546cefbfa3" Jan 20 18:23:46 crc kubenswrapper[4558]: E0120 18:23:46.758922 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4278b2e5201e74fdd1c5115f9bec5e037b68a0ee981132cc27e971546cefbfa3\": container with ID starting with 4278b2e5201e74fdd1c5115f9bec5e037b68a0ee981132cc27e971546cefbfa3 not found: ID does not exist" containerID="4278b2e5201e74fdd1c5115f9bec5e037b68a0ee981132cc27e971546cefbfa3" Jan 20 18:23:46 crc kubenswrapper[4558]: I0120 18:23:46.758975 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4278b2e5201e74fdd1c5115f9bec5e037b68a0ee981132cc27e971546cefbfa3"} err="failed to get container status \"4278b2e5201e74fdd1c5115f9bec5e037b68a0ee981132cc27e971546cefbfa3\": rpc error: code = NotFound desc = could not find container \"4278b2e5201e74fdd1c5115f9bec5e037b68a0ee981132cc27e971546cefbfa3\": container with ID starting with 4278b2e5201e74fdd1c5115f9bec5e037b68a0ee981132cc27e971546cefbfa3 not found: ID does not exist" Jan 20 18:23:48 crc kubenswrapper[4558]: I0120 18:23:48.575019 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bc5cffe-452d-44a9-bd2c-d1f1d3c4c414" path="/var/lib/kubelet/pods/1bc5cffe-452d-44a9-bd2c-d1f1d3c4c414/volumes" Jan 20 18:23:49 crc kubenswrapper[4558]: I0120 18:23:49.332710 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="keystone-kuttl-tests/rabbitmq-server-0" Jan 20 18:23:49 crc kubenswrapper[4558]: I0120 18:23:49.853801 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["keystone-kuttl-tests/keystone-db-sync-hztjt"] Jan 20 18:23:49 crc kubenswrapper[4558]: E0120 18:23:49.854091 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1bc5cffe-452d-44a9-bd2c-d1f1d3c4c414" containerName="registry-server" Jan 20 18:23:49 crc kubenswrapper[4558]: I0120 18:23:49.854106 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="1bc5cffe-452d-44a9-bd2c-d1f1d3c4c414" containerName="registry-server" Jan 20 18:23:49 crc kubenswrapper[4558]: E0120 18:23:49.854131 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1bc5cffe-452d-44a9-bd2c-d1f1d3c4c414" containerName="extract-content" Jan 20 18:23:49 crc kubenswrapper[4558]: I0120 18:23:49.854137 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="1bc5cffe-452d-44a9-bd2c-d1f1d3c4c414" containerName="extract-content" Jan 20 18:23:49 crc kubenswrapper[4558]: E0120 18:23:49.854150 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1bc5cffe-452d-44a9-bd2c-d1f1d3c4c414" containerName="extract-utilities" Jan 20 18:23:49 crc kubenswrapper[4558]: I0120 18:23:49.854157 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="1bc5cffe-452d-44a9-bd2c-d1f1d3c4c414" containerName="extract-utilities" Jan 20 18:23:49 crc kubenswrapper[4558]: I0120 18:23:49.854302 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="1bc5cffe-452d-44a9-bd2c-d1f1d3c4c414" containerName="registry-server" Jan 20 18:23:49 crc kubenswrapper[4558]: I0120 18:23:49.854798 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-db-sync-hztjt" Jan 20 18:23:49 crc kubenswrapper[4558]: I0120 18:23:49.856217 4558 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone-scripts" Jan 20 18:23:49 crc kubenswrapper[4558]: I0120 18:23:49.857733 4558 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone-config-data" Jan 20 18:23:49 crc kubenswrapper[4558]: I0120 18:23:49.860632 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone-db-sync-hztjt"] Jan 20 18:23:49 crc kubenswrapper[4558]: I0120 18:23:49.861714 4558 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone" Jan 20 18:23:49 crc kubenswrapper[4558]: I0120 18:23:49.867018 4558 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone-keystone-dockercfg-xx9fm" Jan 20 18:23:50 crc kubenswrapper[4558]: I0120 18:23:50.001970 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e61fa5e1-c612-4bfb-9585-e282477c4819-config-data\") pod \"keystone-db-sync-hztjt\" (UID: \"e61fa5e1-c612-4bfb-9585-e282477c4819\") " pod="keystone-kuttl-tests/keystone-db-sync-hztjt" Jan 20 18:23:50 crc kubenswrapper[4558]: I0120 18:23:50.002560 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dfjhs\" (UniqueName: \"kubernetes.io/projected/e61fa5e1-c612-4bfb-9585-e282477c4819-kube-api-access-dfjhs\") pod \"keystone-db-sync-hztjt\" (UID: \"e61fa5e1-c612-4bfb-9585-e282477c4819\") " pod="keystone-kuttl-tests/keystone-db-sync-hztjt" Jan 20 18:23:50 crc kubenswrapper[4558]: I0120 18:23:50.105135 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dfjhs\" (UniqueName: \"kubernetes.io/projected/e61fa5e1-c612-4bfb-9585-e282477c4819-kube-api-access-dfjhs\") pod \"keystone-db-sync-hztjt\" (UID: \"e61fa5e1-c612-4bfb-9585-e282477c4819\") " pod="keystone-kuttl-tests/keystone-db-sync-hztjt" Jan 20 18:23:50 crc kubenswrapper[4558]: I0120 18:23:50.105262 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e61fa5e1-c612-4bfb-9585-e282477c4819-config-data\") pod \"keystone-db-sync-hztjt\" (UID: \"e61fa5e1-c612-4bfb-9585-e282477c4819\") " pod="keystone-kuttl-tests/keystone-db-sync-hztjt" Jan 20 18:23:50 crc kubenswrapper[4558]: I0120 18:23:50.112864 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e61fa5e1-c612-4bfb-9585-e282477c4819-config-data\") pod \"keystone-db-sync-hztjt\" (UID: \"e61fa5e1-c612-4bfb-9585-e282477c4819\") " pod="keystone-kuttl-tests/keystone-db-sync-hztjt" Jan 20 18:23:50 crc kubenswrapper[4558]: I0120 18:23:50.121875 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dfjhs\" (UniqueName: \"kubernetes.io/projected/e61fa5e1-c612-4bfb-9585-e282477c4819-kube-api-access-dfjhs\") pod \"keystone-db-sync-hztjt\" (UID: \"e61fa5e1-c612-4bfb-9585-e282477c4819\") " pod="keystone-kuttl-tests/keystone-db-sync-hztjt" Jan 20 18:23:50 crc kubenswrapper[4558]: I0120 18:23:50.172564 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-db-sync-hztjt" Jan 20 18:23:50 crc kubenswrapper[4558]: I0120 18:23:50.586355 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone-db-sync-hztjt"] Jan 20 18:23:50 crc kubenswrapper[4558]: I0120 18:23:50.719264 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-db-sync-hztjt" event={"ID":"e61fa5e1-c612-4bfb-9585-e282477c4819","Type":"ContainerStarted","Data":"fb58c0194a8ac62cf3eb6870e16946219af99491571263c86ef45391a2d3982b"} Jan 20 18:23:50 crc kubenswrapper[4558]: I0120 18:23:50.719575 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-db-sync-hztjt" event={"ID":"e61fa5e1-c612-4bfb-9585-e282477c4819","Type":"ContainerStarted","Data":"69ae84f53ebc908b6189912de54c0bd609de2b465ec5951bf3b3fed10b915c78"} Jan 20 18:23:50 crc kubenswrapper[4558]: I0120 18:23:50.738422 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="keystone-kuttl-tests/keystone-db-sync-hztjt" podStartSLOduration=1.738394772 podStartE2EDuration="1.738394772s" podCreationTimestamp="2026-01-20 18:23:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 18:23:50.732487426 +0000 UTC m=+6124.492825392" watchObservedRunningTime="2026-01-20 18:23:50.738394772 +0000 UTC m=+6124.498732739" Jan 20 18:23:52 crc kubenswrapper[4558]: I0120 18:23:52.739763 4558 generic.go:334] "Generic (PLEG): container finished" podID="e61fa5e1-c612-4bfb-9585-e282477c4819" containerID="fb58c0194a8ac62cf3eb6870e16946219af99491571263c86ef45391a2d3982b" exitCode=0 Jan 20 18:23:52 crc kubenswrapper[4558]: I0120 18:23:52.739860 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-db-sync-hztjt" event={"ID":"e61fa5e1-c612-4bfb-9585-e282477c4819","Type":"ContainerDied","Data":"fb58c0194a8ac62cf3eb6870e16946219af99491571263c86ef45391a2d3982b"} Jan 20 18:23:53 crc kubenswrapper[4558]: I0120 18:23:53.986271 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-db-sync-hztjt" Jan 20 18:23:54 crc kubenswrapper[4558]: I0120 18:23:54.172790 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dfjhs\" (UniqueName: \"kubernetes.io/projected/e61fa5e1-c612-4bfb-9585-e282477c4819-kube-api-access-dfjhs\") pod \"e61fa5e1-c612-4bfb-9585-e282477c4819\" (UID: \"e61fa5e1-c612-4bfb-9585-e282477c4819\") " Jan 20 18:23:54 crc kubenswrapper[4558]: I0120 18:23:54.172875 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e61fa5e1-c612-4bfb-9585-e282477c4819-config-data\") pod \"e61fa5e1-c612-4bfb-9585-e282477c4819\" (UID: \"e61fa5e1-c612-4bfb-9585-e282477c4819\") " Jan 20 18:23:54 crc kubenswrapper[4558]: I0120 18:23:54.179125 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e61fa5e1-c612-4bfb-9585-e282477c4819-kube-api-access-dfjhs" (OuterVolumeSpecName: "kube-api-access-dfjhs") pod "e61fa5e1-c612-4bfb-9585-e282477c4819" (UID: "e61fa5e1-c612-4bfb-9585-e282477c4819"). InnerVolumeSpecName "kube-api-access-dfjhs". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:23:54 crc kubenswrapper[4558]: I0120 18:23:54.204451 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e61fa5e1-c612-4bfb-9585-e282477c4819-config-data" (OuterVolumeSpecName: "config-data") pod "e61fa5e1-c612-4bfb-9585-e282477c4819" (UID: "e61fa5e1-c612-4bfb-9585-e282477c4819"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:23:54 crc kubenswrapper[4558]: I0120 18:23:54.276468 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dfjhs\" (UniqueName: \"kubernetes.io/projected/e61fa5e1-c612-4bfb-9585-e282477c4819-kube-api-access-dfjhs\") on node \"crc\" DevicePath \"\"" Jan 20 18:23:54 crc kubenswrapper[4558]: I0120 18:23:54.276515 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e61fa5e1-c612-4bfb-9585-e282477c4819-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 18:23:54 crc kubenswrapper[4558]: I0120 18:23:54.758372 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-db-sync-hztjt" event={"ID":"e61fa5e1-c612-4bfb-9585-e282477c4819","Type":"ContainerDied","Data":"69ae84f53ebc908b6189912de54c0bd609de2b465ec5951bf3b3fed10b915c78"} Jan 20 18:23:54 crc kubenswrapper[4558]: I0120 18:23:54.758662 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="69ae84f53ebc908b6189912de54c0bd609de2b465ec5951bf3b3fed10b915c78" Jan 20 18:23:54 crc kubenswrapper[4558]: I0120 18:23:54.758426 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-db-sync-hztjt" Jan 20 18:23:54 crc kubenswrapper[4558]: I0120 18:23:54.962881 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["keystone-kuttl-tests/keystone-bootstrap-tlsz9"] Jan 20 18:23:54 crc kubenswrapper[4558]: E0120 18:23:54.963602 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e61fa5e1-c612-4bfb-9585-e282477c4819" containerName="keystone-db-sync" Jan 20 18:23:54 crc kubenswrapper[4558]: I0120 18:23:54.963641 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="e61fa5e1-c612-4bfb-9585-e282477c4819" containerName="keystone-db-sync" Jan 20 18:23:54 crc kubenswrapper[4558]: I0120 18:23:54.964260 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="e61fa5e1-c612-4bfb-9585-e282477c4819" containerName="keystone-db-sync" Jan 20 18:23:54 crc kubenswrapper[4558]: I0120 18:23:54.964980 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-bootstrap-tlsz9" Jan 20 18:23:54 crc kubenswrapper[4558]: I0120 18:23:54.968251 4558 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone-keystone-dockercfg-xx9fm" Jan 20 18:23:54 crc kubenswrapper[4558]: I0120 18:23:54.968428 4558 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"osp-secret" Jan 20 18:23:54 crc kubenswrapper[4558]: I0120 18:23:54.968582 4558 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone-scripts" Jan 20 18:23:54 crc kubenswrapper[4558]: I0120 18:23:54.968677 4558 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone-config-data" Jan 20 18:23:54 crc kubenswrapper[4558]: I0120 18:23:54.968897 4558 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone" Jan 20 18:23:54 crc kubenswrapper[4558]: I0120 18:23:54.983191 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone-bootstrap-tlsz9"] Jan 20 18:23:55 crc kubenswrapper[4558]: I0120 18:23:55.091707 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/ec4269b4-bbd4-418c-87ff-431233da9bd3-credential-keys\") pod \"keystone-bootstrap-tlsz9\" (UID: \"ec4269b4-bbd4-418c-87ff-431233da9bd3\") " pod="keystone-kuttl-tests/keystone-bootstrap-tlsz9" Jan 20 18:23:55 crc kubenswrapper[4558]: I0120 18:23:55.091777 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2h7xj\" (UniqueName: \"kubernetes.io/projected/ec4269b4-bbd4-418c-87ff-431233da9bd3-kube-api-access-2h7xj\") pod \"keystone-bootstrap-tlsz9\" (UID: \"ec4269b4-bbd4-418c-87ff-431233da9bd3\") " pod="keystone-kuttl-tests/keystone-bootstrap-tlsz9" Jan 20 18:23:55 crc kubenswrapper[4558]: I0120 18:23:55.091861 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ec4269b4-bbd4-418c-87ff-431233da9bd3-scripts\") pod \"keystone-bootstrap-tlsz9\" (UID: \"ec4269b4-bbd4-418c-87ff-431233da9bd3\") " pod="keystone-kuttl-tests/keystone-bootstrap-tlsz9" Jan 20 18:23:55 crc kubenswrapper[4558]: I0120 18:23:55.091947 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec4269b4-bbd4-418c-87ff-431233da9bd3-config-data\") pod \"keystone-bootstrap-tlsz9\" (UID: \"ec4269b4-bbd4-418c-87ff-431233da9bd3\") " pod="keystone-kuttl-tests/keystone-bootstrap-tlsz9" Jan 20 18:23:55 crc kubenswrapper[4558]: I0120 18:23:55.092030 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/ec4269b4-bbd4-418c-87ff-431233da9bd3-fernet-keys\") pod \"keystone-bootstrap-tlsz9\" (UID: \"ec4269b4-bbd4-418c-87ff-431233da9bd3\") " pod="keystone-kuttl-tests/keystone-bootstrap-tlsz9" Jan 20 18:23:55 crc kubenswrapper[4558]: I0120 18:23:55.194177 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/ec4269b4-bbd4-418c-87ff-431233da9bd3-credential-keys\") pod \"keystone-bootstrap-tlsz9\" (UID: \"ec4269b4-bbd4-418c-87ff-431233da9bd3\") " pod="keystone-kuttl-tests/keystone-bootstrap-tlsz9" Jan 20 18:23:55 crc kubenswrapper[4558]: I0120 18:23:55.194251 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2h7xj\" (UniqueName: \"kubernetes.io/projected/ec4269b4-bbd4-418c-87ff-431233da9bd3-kube-api-access-2h7xj\") pod \"keystone-bootstrap-tlsz9\" (UID: \"ec4269b4-bbd4-418c-87ff-431233da9bd3\") " pod="keystone-kuttl-tests/keystone-bootstrap-tlsz9" Jan 20 18:23:55 crc kubenswrapper[4558]: I0120 18:23:55.194297 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ec4269b4-bbd4-418c-87ff-431233da9bd3-scripts\") pod \"keystone-bootstrap-tlsz9\" (UID: \"ec4269b4-bbd4-418c-87ff-431233da9bd3\") " pod="keystone-kuttl-tests/keystone-bootstrap-tlsz9" Jan 20 18:23:55 crc kubenswrapper[4558]: I0120 18:23:55.194325 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec4269b4-bbd4-418c-87ff-431233da9bd3-config-data\") pod \"keystone-bootstrap-tlsz9\" (UID: \"ec4269b4-bbd4-418c-87ff-431233da9bd3\") " pod="keystone-kuttl-tests/keystone-bootstrap-tlsz9" Jan 20 18:23:55 crc kubenswrapper[4558]: I0120 18:23:55.194369 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/ec4269b4-bbd4-418c-87ff-431233da9bd3-fernet-keys\") pod \"keystone-bootstrap-tlsz9\" (UID: \"ec4269b4-bbd4-418c-87ff-431233da9bd3\") " pod="keystone-kuttl-tests/keystone-bootstrap-tlsz9" Jan 20 18:23:55 crc kubenswrapper[4558]: I0120 18:23:55.199572 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ec4269b4-bbd4-418c-87ff-431233da9bd3-scripts\") pod \"keystone-bootstrap-tlsz9\" (UID: \"ec4269b4-bbd4-418c-87ff-431233da9bd3\") " pod="keystone-kuttl-tests/keystone-bootstrap-tlsz9" Jan 20 18:23:55 crc kubenswrapper[4558]: I0120 18:23:55.199669 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/ec4269b4-bbd4-418c-87ff-431233da9bd3-credential-keys\") pod \"keystone-bootstrap-tlsz9\" (UID: \"ec4269b4-bbd4-418c-87ff-431233da9bd3\") " pod="keystone-kuttl-tests/keystone-bootstrap-tlsz9" Jan 20 18:23:55 crc kubenswrapper[4558]: I0120 18:23:55.200156 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec4269b4-bbd4-418c-87ff-431233da9bd3-config-data\") pod \"keystone-bootstrap-tlsz9\" (UID: \"ec4269b4-bbd4-418c-87ff-431233da9bd3\") " pod="keystone-kuttl-tests/keystone-bootstrap-tlsz9" Jan 20 18:23:55 crc kubenswrapper[4558]: I0120 18:23:55.201563 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/ec4269b4-bbd4-418c-87ff-431233da9bd3-fernet-keys\") pod \"keystone-bootstrap-tlsz9\" (UID: \"ec4269b4-bbd4-418c-87ff-431233da9bd3\") " pod="keystone-kuttl-tests/keystone-bootstrap-tlsz9" Jan 20 18:23:55 crc kubenswrapper[4558]: I0120 18:23:55.210031 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2h7xj\" (UniqueName: \"kubernetes.io/projected/ec4269b4-bbd4-418c-87ff-431233da9bd3-kube-api-access-2h7xj\") pod \"keystone-bootstrap-tlsz9\" (UID: \"ec4269b4-bbd4-418c-87ff-431233da9bd3\") " pod="keystone-kuttl-tests/keystone-bootstrap-tlsz9" Jan 20 18:23:55 crc kubenswrapper[4558]: I0120 18:23:55.291621 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-bootstrap-tlsz9" Jan 20 18:23:55 crc kubenswrapper[4558]: I0120 18:23:55.659923 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone-bootstrap-tlsz9"] Jan 20 18:23:55 crc kubenswrapper[4558]: W0120 18:23:55.663494 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podec4269b4_bbd4_418c_87ff_431233da9bd3.slice/crio-8748a53b94debbda28ac72dcdf379193872dea9cc7fd08afbe14d39b51068c70 WatchSource:0}: Error finding container 8748a53b94debbda28ac72dcdf379193872dea9cc7fd08afbe14d39b51068c70: Status 404 returned error can't find the container with id 8748a53b94debbda28ac72dcdf379193872dea9cc7fd08afbe14d39b51068c70 Jan 20 18:23:55 crc kubenswrapper[4558]: I0120 18:23:55.769089 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-bootstrap-tlsz9" event={"ID":"ec4269b4-bbd4-418c-87ff-431233da9bd3","Type":"ContainerStarted","Data":"8748a53b94debbda28ac72dcdf379193872dea9cc7fd08afbe14d39b51068c70"} Jan 20 18:23:56 crc kubenswrapper[4558]: I0120 18:23:56.780005 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-bootstrap-tlsz9" event={"ID":"ec4269b4-bbd4-418c-87ff-431233da9bd3","Type":"ContainerStarted","Data":"99682bfe9586bece779160cb0aa0bf66a5f5625a029c5947d715ab1d390c16e9"} Jan 20 18:23:56 crc kubenswrapper[4558]: I0120 18:23:56.808595 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="keystone-kuttl-tests/keystone-bootstrap-tlsz9" podStartSLOduration=2.808568416 podStartE2EDuration="2.808568416s" podCreationTimestamp="2026-01-20 18:23:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 18:23:56.799569235 +0000 UTC m=+6130.559907202" watchObservedRunningTime="2026-01-20 18:23:56.808568416 +0000 UTC m=+6130.568906384" Jan 20 18:23:58 crc kubenswrapper[4558]: I0120 18:23:58.811916 4558 generic.go:334] "Generic (PLEG): container finished" podID="ec4269b4-bbd4-418c-87ff-431233da9bd3" containerID="99682bfe9586bece779160cb0aa0bf66a5f5625a029c5947d715ab1d390c16e9" exitCode=0 Jan 20 18:23:58 crc kubenswrapper[4558]: I0120 18:23:58.812033 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-bootstrap-tlsz9" event={"ID":"ec4269b4-bbd4-418c-87ff-431233da9bd3","Type":"ContainerDied","Data":"99682bfe9586bece779160cb0aa0bf66a5f5625a029c5947d715ab1d390c16e9"} Jan 20 18:24:00 crc kubenswrapper[4558]: I0120 18:24:00.079662 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-bootstrap-tlsz9" Jan 20 18:24:00 crc kubenswrapper[4558]: I0120 18:24:00.184692 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/ec4269b4-bbd4-418c-87ff-431233da9bd3-credential-keys\") pod \"ec4269b4-bbd4-418c-87ff-431233da9bd3\" (UID: \"ec4269b4-bbd4-418c-87ff-431233da9bd3\") " Jan 20 18:24:00 crc kubenswrapper[4558]: I0120 18:24:00.185341 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/ec4269b4-bbd4-418c-87ff-431233da9bd3-fernet-keys\") pod \"ec4269b4-bbd4-418c-87ff-431233da9bd3\" (UID: \"ec4269b4-bbd4-418c-87ff-431233da9bd3\") " Jan 20 18:24:00 crc kubenswrapper[4558]: I0120 18:24:00.185485 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec4269b4-bbd4-418c-87ff-431233da9bd3-config-data\") pod \"ec4269b4-bbd4-418c-87ff-431233da9bd3\" (UID: \"ec4269b4-bbd4-418c-87ff-431233da9bd3\") " Jan 20 18:24:00 crc kubenswrapper[4558]: I0120 18:24:00.185657 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ec4269b4-bbd4-418c-87ff-431233da9bd3-scripts\") pod \"ec4269b4-bbd4-418c-87ff-431233da9bd3\" (UID: \"ec4269b4-bbd4-418c-87ff-431233da9bd3\") " Jan 20 18:24:00 crc kubenswrapper[4558]: I0120 18:24:00.186286 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2h7xj\" (UniqueName: \"kubernetes.io/projected/ec4269b4-bbd4-418c-87ff-431233da9bd3-kube-api-access-2h7xj\") pod \"ec4269b4-bbd4-418c-87ff-431233da9bd3\" (UID: \"ec4269b4-bbd4-418c-87ff-431233da9bd3\") " Jan 20 18:24:00 crc kubenswrapper[4558]: I0120 18:24:00.191956 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ec4269b4-bbd4-418c-87ff-431233da9bd3-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "ec4269b4-bbd4-418c-87ff-431233da9bd3" (UID: "ec4269b4-bbd4-418c-87ff-431233da9bd3"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:24:00 crc kubenswrapper[4558]: I0120 18:24:00.192262 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ec4269b4-bbd4-418c-87ff-431233da9bd3-kube-api-access-2h7xj" (OuterVolumeSpecName: "kube-api-access-2h7xj") pod "ec4269b4-bbd4-418c-87ff-431233da9bd3" (UID: "ec4269b4-bbd4-418c-87ff-431233da9bd3"). InnerVolumeSpecName "kube-api-access-2h7xj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:24:00 crc kubenswrapper[4558]: I0120 18:24:00.192375 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ec4269b4-bbd4-418c-87ff-431233da9bd3-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "ec4269b4-bbd4-418c-87ff-431233da9bd3" (UID: "ec4269b4-bbd4-418c-87ff-431233da9bd3"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:24:00 crc kubenswrapper[4558]: I0120 18:24:00.192854 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ec4269b4-bbd4-418c-87ff-431233da9bd3-scripts" (OuterVolumeSpecName: "scripts") pod "ec4269b4-bbd4-418c-87ff-431233da9bd3" (UID: "ec4269b4-bbd4-418c-87ff-431233da9bd3"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:24:00 crc kubenswrapper[4558]: I0120 18:24:00.205987 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ec4269b4-bbd4-418c-87ff-431233da9bd3-config-data" (OuterVolumeSpecName: "config-data") pod "ec4269b4-bbd4-418c-87ff-431233da9bd3" (UID: "ec4269b4-bbd4-418c-87ff-431233da9bd3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:24:00 crc kubenswrapper[4558]: I0120 18:24:00.288701 4558 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/ec4269b4-bbd4-418c-87ff-431233da9bd3-fernet-keys\") on node \"crc\" DevicePath \"\"" Jan 20 18:24:00 crc kubenswrapper[4558]: I0120 18:24:00.288866 4558 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/ec4269b4-bbd4-418c-87ff-431233da9bd3-credential-keys\") on node \"crc\" DevicePath \"\"" Jan 20 18:24:00 crc kubenswrapper[4558]: I0120 18:24:00.288943 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ec4269b4-bbd4-418c-87ff-431233da9bd3-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 18:24:00 crc kubenswrapper[4558]: I0120 18:24:00.289018 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ec4269b4-bbd4-418c-87ff-431233da9bd3-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 18:24:00 crc kubenswrapper[4558]: I0120 18:24:00.289094 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2h7xj\" (UniqueName: \"kubernetes.io/projected/ec4269b4-bbd4-418c-87ff-431233da9bd3-kube-api-access-2h7xj\") on node \"crc\" DevicePath \"\"" Jan 20 18:24:00 crc kubenswrapper[4558]: I0120 18:24:00.829764 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-bootstrap-tlsz9" event={"ID":"ec4269b4-bbd4-418c-87ff-431233da9bd3","Type":"ContainerDied","Data":"8748a53b94debbda28ac72dcdf379193872dea9cc7fd08afbe14d39b51068c70"} Jan 20 18:24:00 crc kubenswrapper[4558]: I0120 18:24:00.830120 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8748a53b94debbda28ac72dcdf379193872dea9cc7fd08afbe14d39b51068c70" Jan 20 18:24:00 crc kubenswrapper[4558]: I0120 18:24:00.829812 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-bootstrap-tlsz9" Jan 20 18:24:00 crc kubenswrapper[4558]: I0120 18:24:00.920418 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["keystone-kuttl-tests/keystone-854b758bb9-sp8gl"] Jan 20 18:24:00 crc kubenswrapper[4558]: E0120 18:24:00.920805 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ec4269b4-bbd4-418c-87ff-431233da9bd3" containerName="keystone-bootstrap" Jan 20 18:24:00 crc kubenswrapper[4558]: I0120 18:24:00.920824 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ec4269b4-bbd4-418c-87ff-431233da9bd3" containerName="keystone-bootstrap" Jan 20 18:24:00 crc kubenswrapper[4558]: I0120 18:24:00.920996 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="ec4269b4-bbd4-418c-87ff-431233da9bd3" containerName="keystone-bootstrap" Jan 20 18:24:00 crc kubenswrapper[4558]: I0120 18:24:00.921718 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-854b758bb9-sp8gl" Jan 20 18:24:00 crc kubenswrapper[4558]: I0120 18:24:00.927240 4558 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone" Jan 20 18:24:00 crc kubenswrapper[4558]: I0120 18:24:00.927378 4558 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone-scripts" Jan 20 18:24:00 crc kubenswrapper[4558]: I0120 18:24:00.927442 4558 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone-config-data" Jan 20 18:24:00 crc kubenswrapper[4558]: I0120 18:24:00.930860 4558 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone-keystone-dockercfg-xx9fm" Jan 20 18:24:00 crc kubenswrapper[4558]: I0120 18:24:00.936797 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone-854b758bb9-sp8gl"] Jan 20 18:24:01 crc kubenswrapper[4558]: I0120 18:24:01.103267 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l4gzm\" (UniqueName: \"kubernetes.io/projected/fd7cc4fe-552b-4a27-9aad-01f7faf6e242-kube-api-access-l4gzm\") pod \"keystone-854b758bb9-sp8gl\" (UID: \"fd7cc4fe-552b-4a27-9aad-01f7faf6e242\") " pod="keystone-kuttl-tests/keystone-854b758bb9-sp8gl" Jan 20 18:24:01 crc kubenswrapper[4558]: I0120 18:24:01.103381 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fd7cc4fe-552b-4a27-9aad-01f7faf6e242-scripts\") pod \"keystone-854b758bb9-sp8gl\" (UID: \"fd7cc4fe-552b-4a27-9aad-01f7faf6e242\") " pod="keystone-kuttl-tests/keystone-854b758bb9-sp8gl" Jan 20 18:24:01 crc kubenswrapper[4558]: I0120 18:24:01.103456 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/fd7cc4fe-552b-4a27-9aad-01f7faf6e242-fernet-keys\") pod \"keystone-854b758bb9-sp8gl\" (UID: \"fd7cc4fe-552b-4a27-9aad-01f7faf6e242\") " pod="keystone-kuttl-tests/keystone-854b758bb9-sp8gl" Jan 20 18:24:01 crc kubenswrapper[4558]: I0120 18:24:01.103495 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fd7cc4fe-552b-4a27-9aad-01f7faf6e242-config-data\") pod \"keystone-854b758bb9-sp8gl\" (UID: \"fd7cc4fe-552b-4a27-9aad-01f7faf6e242\") " pod="keystone-kuttl-tests/keystone-854b758bb9-sp8gl" Jan 20 18:24:01 crc kubenswrapper[4558]: I0120 18:24:01.103535 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/fd7cc4fe-552b-4a27-9aad-01f7faf6e242-credential-keys\") pod \"keystone-854b758bb9-sp8gl\" (UID: \"fd7cc4fe-552b-4a27-9aad-01f7faf6e242\") " pod="keystone-kuttl-tests/keystone-854b758bb9-sp8gl" Jan 20 18:24:01 crc kubenswrapper[4558]: I0120 18:24:01.205427 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/fd7cc4fe-552b-4a27-9aad-01f7faf6e242-fernet-keys\") pod \"keystone-854b758bb9-sp8gl\" (UID: \"fd7cc4fe-552b-4a27-9aad-01f7faf6e242\") " pod="keystone-kuttl-tests/keystone-854b758bb9-sp8gl" Jan 20 18:24:01 crc kubenswrapper[4558]: I0120 18:24:01.205495 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fd7cc4fe-552b-4a27-9aad-01f7faf6e242-config-data\") pod \"keystone-854b758bb9-sp8gl\" (UID: \"fd7cc4fe-552b-4a27-9aad-01f7faf6e242\") " pod="keystone-kuttl-tests/keystone-854b758bb9-sp8gl" Jan 20 18:24:01 crc kubenswrapper[4558]: I0120 18:24:01.205540 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/fd7cc4fe-552b-4a27-9aad-01f7faf6e242-credential-keys\") pod \"keystone-854b758bb9-sp8gl\" (UID: \"fd7cc4fe-552b-4a27-9aad-01f7faf6e242\") " pod="keystone-kuttl-tests/keystone-854b758bb9-sp8gl" Jan 20 18:24:01 crc kubenswrapper[4558]: I0120 18:24:01.205603 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l4gzm\" (UniqueName: \"kubernetes.io/projected/fd7cc4fe-552b-4a27-9aad-01f7faf6e242-kube-api-access-l4gzm\") pod \"keystone-854b758bb9-sp8gl\" (UID: \"fd7cc4fe-552b-4a27-9aad-01f7faf6e242\") " pod="keystone-kuttl-tests/keystone-854b758bb9-sp8gl" Jan 20 18:24:01 crc kubenswrapper[4558]: I0120 18:24:01.205656 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fd7cc4fe-552b-4a27-9aad-01f7faf6e242-scripts\") pod \"keystone-854b758bb9-sp8gl\" (UID: \"fd7cc4fe-552b-4a27-9aad-01f7faf6e242\") " pod="keystone-kuttl-tests/keystone-854b758bb9-sp8gl" Jan 20 18:24:01 crc kubenswrapper[4558]: I0120 18:24:01.210058 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fd7cc4fe-552b-4a27-9aad-01f7faf6e242-scripts\") pod \"keystone-854b758bb9-sp8gl\" (UID: \"fd7cc4fe-552b-4a27-9aad-01f7faf6e242\") " pod="keystone-kuttl-tests/keystone-854b758bb9-sp8gl" Jan 20 18:24:01 crc kubenswrapper[4558]: I0120 18:24:01.210414 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/fd7cc4fe-552b-4a27-9aad-01f7faf6e242-fernet-keys\") pod \"keystone-854b758bb9-sp8gl\" (UID: \"fd7cc4fe-552b-4a27-9aad-01f7faf6e242\") " pod="keystone-kuttl-tests/keystone-854b758bb9-sp8gl" Jan 20 18:24:01 crc kubenswrapper[4558]: I0120 18:24:01.210617 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fd7cc4fe-552b-4a27-9aad-01f7faf6e242-config-data\") pod \"keystone-854b758bb9-sp8gl\" (UID: \"fd7cc4fe-552b-4a27-9aad-01f7faf6e242\") " pod="keystone-kuttl-tests/keystone-854b758bb9-sp8gl" Jan 20 18:24:01 crc kubenswrapper[4558]: I0120 18:24:01.215225 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/fd7cc4fe-552b-4a27-9aad-01f7faf6e242-credential-keys\") pod \"keystone-854b758bb9-sp8gl\" (UID: \"fd7cc4fe-552b-4a27-9aad-01f7faf6e242\") " pod="keystone-kuttl-tests/keystone-854b758bb9-sp8gl" Jan 20 18:24:01 crc kubenswrapper[4558]: I0120 18:24:01.221225 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l4gzm\" (UniqueName: \"kubernetes.io/projected/fd7cc4fe-552b-4a27-9aad-01f7faf6e242-kube-api-access-l4gzm\") pod \"keystone-854b758bb9-sp8gl\" (UID: \"fd7cc4fe-552b-4a27-9aad-01f7faf6e242\") " pod="keystone-kuttl-tests/keystone-854b758bb9-sp8gl" Jan 20 18:24:01 crc kubenswrapper[4558]: I0120 18:24:01.239112 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-854b758bb9-sp8gl" Jan 20 18:24:01 crc kubenswrapper[4558]: I0120 18:24:01.635100 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone-854b758bb9-sp8gl"] Jan 20 18:24:01 crc kubenswrapper[4558]: I0120 18:24:01.839887 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-854b758bb9-sp8gl" event={"ID":"fd7cc4fe-552b-4a27-9aad-01f7faf6e242","Type":"ContainerStarted","Data":"46c6825e4ed9114b6fe8a0bd679e22ff5913b36401fbad5bf6957a23e9151d17"} Jan 20 18:24:01 crc kubenswrapper[4558]: I0120 18:24:01.840215 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-854b758bb9-sp8gl" event={"ID":"fd7cc4fe-552b-4a27-9aad-01f7faf6e242","Type":"ContainerStarted","Data":"97fe77964c831da1b43394169c9984db869be8e9f53823ba21051e1e41e0b35a"} Jan 20 18:24:01 crc kubenswrapper[4558]: I0120 18:24:01.840238 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="keystone-kuttl-tests/keystone-854b758bb9-sp8gl" Jan 20 18:24:01 crc kubenswrapper[4558]: I0120 18:24:01.861700 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="keystone-kuttl-tests/keystone-854b758bb9-sp8gl" podStartSLOduration=1.8616801010000001 podStartE2EDuration="1.861680101s" podCreationTimestamp="2026-01-20 18:24:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 18:24:01.858008598 +0000 UTC m=+6135.618346565" watchObservedRunningTime="2026-01-20 18:24:01.861680101 +0000 UTC m=+6135.622018068" Jan 20 18:24:03 crc kubenswrapper[4558]: I0120 18:24:03.095828 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-5lzr7"] Jan 20 18:24:03 crc kubenswrapper[4558]: I0120 18:24:03.097916 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5lzr7" Jan 20 18:24:03 crc kubenswrapper[4558]: I0120 18:24:03.116822 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-5lzr7"] Jan 20 18:24:03 crc kubenswrapper[4558]: I0120 18:24:03.139380 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2427w\" (UniqueName: \"kubernetes.io/projected/08693c79-e329-4cec-94ed-29153d017c38-kube-api-access-2427w\") pod \"community-operators-5lzr7\" (UID: \"08693c79-e329-4cec-94ed-29153d017c38\") " pod="openshift-marketplace/community-operators-5lzr7" Jan 20 18:24:03 crc kubenswrapper[4558]: I0120 18:24:03.139506 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/08693c79-e329-4cec-94ed-29153d017c38-catalog-content\") pod \"community-operators-5lzr7\" (UID: \"08693c79-e329-4cec-94ed-29153d017c38\") " pod="openshift-marketplace/community-operators-5lzr7" Jan 20 18:24:03 crc kubenswrapper[4558]: I0120 18:24:03.139580 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/08693c79-e329-4cec-94ed-29153d017c38-utilities\") pod \"community-operators-5lzr7\" (UID: \"08693c79-e329-4cec-94ed-29153d017c38\") " pod="openshift-marketplace/community-operators-5lzr7" Jan 20 18:24:03 crc kubenswrapper[4558]: I0120 18:24:03.241211 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/08693c79-e329-4cec-94ed-29153d017c38-catalog-content\") pod \"community-operators-5lzr7\" (UID: \"08693c79-e329-4cec-94ed-29153d017c38\") " pod="openshift-marketplace/community-operators-5lzr7" Jan 20 18:24:03 crc kubenswrapper[4558]: I0120 18:24:03.241273 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/08693c79-e329-4cec-94ed-29153d017c38-utilities\") pod \"community-operators-5lzr7\" (UID: \"08693c79-e329-4cec-94ed-29153d017c38\") " pod="openshift-marketplace/community-operators-5lzr7" Jan 20 18:24:03 crc kubenswrapper[4558]: I0120 18:24:03.241340 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2427w\" (UniqueName: \"kubernetes.io/projected/08693c79-e329-4cec-94ed-29153d017c38-kube-api-access-2427w\") pod \"community-operators-5lzr7\" (UID: \"08693c79-e329-4cec-94ed-29153d017c38\") " pod="openshift-marketplace/community-operators-5lzr7" Jan 20 18:24:03 crc kubenswrapper[4558]: I0120 18:24:03.241780 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/08693c79-e329-4cec-94ed-29153d017c38-catalog-content\") pod \"community-operators-5lzr7\" (UID: \"08693c79-e329-4cec-94ed-29153d017c38\") " pod="openshift-marketplace/community-operators-5lzr7" Jan 20 18:24:03 crc kubenswrapper[4558]: I0120 18:24:03.241824 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/08693c79-e329-4cec-94ed-29153d017c38-utilities\") pod \"community-operators-5lzr7\" (UID: \"08693c79-e329-4cec-94ed-29153d017c38\") " pod="openshift-marketplace/community-operators-5lzr7" Jan 20 18:24:03 crc kubenswrapper[4558]: I0120 18:24:03.260113 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2427w\" (UniqueName: \"kubernetes.io/projected/08693c79-e329-4cec-94ed-29153d017c38-kube-api-access-2427w\") pod \"community-operators-5lzr7\" (UID: \"08693c79-e329-4cec-94ed-29153d017c38\") " pod="openshift-marketplace/community-operators-5lzr7" Jan 20 18:24:03 crc kubenswrapper[4558]: I0120 18:24:03.432874 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5lzr7" Jan 20 18:24:03 crc kubenswrapper[4558]: I0120 18:24:03.846402 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-5lzr7"] Jan 20 18:24:03 crc kubenswrapper[4558]: W0120 18:24:03.852955 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod08693c79_e329_4cec_94ed_29153d017c38.slice/crio-7364556bcbbaa288b63a366cf5d7ed8eb10f568324df7ea58ee2d23080d75068 WatchSource:0}: Error finding container 7364556bcbbaa288b63a366cf5d7ed8eb10f568324df7ea58ee2d23080d75068: Status 404 returned error can't find the container with id 7364556bcbbaa288b63a366cf5d7ed8eb10f568324df7ea58ee2d23080d75068 Jan 20 18:24:04 crc kubenswrapper[4558]: I0120 18:24:04.873533 4558 generic.go:334] "Generic (PLEG): container finished" podID="08693c79-e329-4cec-94ed-29153d017c38" containerID="08740ad511f8f5ee33775648be1a08becf6481298696e6434583c34a5e7f1aea" exitCode=0 Jan 20 18:24:04 crc kubenswrapper[4558]: I0120 18:24:04.874157 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5lzr7" event={"ID":"08693c79-e329-4cec-94ed-29153d017c38","Type":"ContainerDied","Data":"08740ad511f8f5ee33775648be1a08becf6481298696e6434583c34a5e7f1aea"} Jan 20 18:24:04 crc kubenswrapper[4558]: I0120 18:24:04.874233 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5lzr7" event={"ID":"08693c79-e329-4cec-94ed-29153d017c38","Type":"ContainerStarted","Data":"7364556bcbbaa288b63a366cf5d7ed8eb10f568324df7ea58ee2d23080d75068"} Jan 20 18:24:05 crc kubenswrapper[4558]: I0120 18:24:05.884949 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5lzr7" event={"ID":"08693c79-e329-4cec-94ed-29153d017c38","Type":"ContainerStarted","Data":"ee8f9569b6d269210fb6cfa472690a57fdb4892305a0981a64183412c4bdfb0b"} Jan 20 18:24:06 crc kubenswrapper[4558]: I0120 18:24:06.895746 4558 generic.go:334] "Generic (PLEG): container finished" podID="08693c79-e329-4cec-94ed-29153d017c38" containerID="ee8f9569b6d269210fb6cfa472690a57fdb4892305a0981a64183412c4bdfb0b" exitCode=0 Jan 20 18:24:06 crc kubenswrapper[4558]: I0120 18:24:06.895842 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5lzr7" event={"ID":"08693c79-e329-4cec-94ed-29153d017c38","Type":"ContainerDied","Data":"ee8f9569b6d269210fb6cfa472690a57fdb4892305a0981a64183412c4bdfb0b"} Jan 20 18:24:07 crc kubenswrapper[4558]: I0120 18:24:07.907355 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5lzr7" event={"ID":"08693c79-e329-4cec-94ed-29153d017c38","Type":"ContainerStarted","Data":"7d2ec72f9cf6a9c98fb109ee4c8ed12af2a53df2d8f92fcc1b2b5a823361fbc9"} Jan 20 18:24:07 crc kubenswrapper[4558]: I0120 18:24:07.923030 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-5lzr7" podStartSLOduration=2.433460244 podStartE2EDuration="4.922998675s" podCreationTimestamp="2026-01-20 18:24:03 +0000 UTC" firstStartedPulling="2026-01-20 18:24:04.876975856 +0000 UTC m=+6138.637313822" lastFinishedPulling="2026-01-20 18:24:07.366514287 +0000 UTC m=+6141.126852253" observedRunningTime="2026-01-20 18:24:07.921415819 +0000 UTC m=+6141.681753786" watchObservedRunningTime="2026-01-20 18:24:07.922998675 +0000 UTC m=+6141.683336642" Jan 20 18:24:13 crc kubenswrapper[4558]: I0120 18:24:13.433992 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-5lzr7" Jan 20 18:24:13 crc kubenswrapper[4558]: I0120 18:24:13.434648 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-5lzr7" Jan 20 18:24:13 crc kubenswrapper[4558]: I0120 18:24:13.471647 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-5lzr7" Jan 20 18:24:13 crc kubenswrapper[4558]: I0120 18:24:13.989049 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-5lzr7" Jan 20 18:24:15 crc kubenswrapper[4558]: I0120 18:24:15.885985 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-5lzr7"] Jan 20 18:24:15 crc kubenswrapper[4558]: I0120 18:24:15.961693 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-5lzr7" podUID="08693c79-e329-4cec-94ed-29153d017c38" containerName="registry-server" containerID="cri-o://7d2ec72f9cf6a9c98fb109ee4c8ed12af2a53df2d8f92fcc1b2b5a823361fbc9" gracePeriod=2 Jan 20 18:24:16 crc kubenswrapper[4558]: I0120 18:24:16.326574 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5lzr7" Jan 20 18:24:16 crc kubenswrapper[4558]: I0120 18:24:16.442156 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/08693c79-e329-4cec-94ed-29153d017c38-catalog-content\") pod \"08693c79-e329-4cec-94ed-29153d017c38\" (UID: \"08693c79-e329-4cec-94ed-29153d017c38\") " Jan 20 18:24:16 crc kubenswrapper[4558]: I0120 18:24:16.442268 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/08693c79-e329-4cec-94ed-29153d017c38-utilities\") pod \"08693c79-e329-4cec-94ed-29153d017c38\" (UID: \"08693c79-e329-4cec-94ed-29153d017c38\") " Jan 20 18:24:16 crc kubenswrapper[4558]: I0120 18:24:16.442302 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2427w\" (UniqueName: \"kubernetes.io/projected/08693c79-e329-4cec-94ed-29153d017c38-kube-api-access-2427w\") pod \"08693c79-e329-4cec-94ed-29153d017c38\" (UID: \"08693c79-e329-4cec-94ed-29153d017c38\") " Jan 20 18:24:16 crc kubenswrapper[4558]: I0120 18:24:16.443140 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/08693c79-e329-4cec-94ed-29153d017c38-utilities" (OuterVolumeSpecName: "utilities") pod "08693c79-e329-4cec-94ed-29153d017c38" (UID: "08693c79-e329-4cec-94ed-29153d017c38"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 18:24:16 crc kubenswrapper[4558]: I0120 18:24:16.448629 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/08693c79-e329-4cec-94ed-29153d017c38-kube-api-access-2427w" (OuterVolumeSpecName: "kube-api-access-2427w") pod "08693c79-e329-4cec-94ed-29153d017c38" (UID: "08693c79-e329-4cec-94ed-29153d017c38"). InnerVolumeSpecName "kube-api-access-2427w". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:24:16 crc kubenswrapper[4558]: I0120 18:24:16.487545 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/08693c79-e329-4cec-94ed-29153d017c38-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "08693c79-e329-4cec-94ed-29153d017c38" (UID: "08693c79-e329-4cec-94ed-29153d017c38"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 18:24:16 crc kubenswrapper[4558]: I0120 18:24:16.543113 4558 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/08693c79-e329-4cec-94ed-29153d017c38-utilities\") on node \"crc\" DevicePath \"\"" Jan 20 18:24:16 crc kubenswrapper[4558]: I0120 18:24:16.543147 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2427w\" (UniqueName: \"kubernetes.io/projected/08693c79-e329-4cec-94ed-29153d017c38-kube-api-access-2427w\") on node \"crc\" DevicePath \"\"" Jan 20 18:24:16 crc kubenswrapper[4558]: I0120 18:24:16.543178 4558 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/08693c79-e329-4cec-94ed-29153d017c38-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 20 18:24:16 crc kubenswrapper[4558]: I0120 18:24:16.972565 4558 generic.go:334] "Generic (PLEG): container finished" podID="08693c79-e329-4cec-94ed-29153d017c38" containerID="7d2ec72f9cf6a9c98fb109ee4c8ed12af2a53df2d8f92fcc1b2b5a823361fbc9" exitCode=0 Jan 20 18:24:16 crc kubenswrapper[4558]: I0120 18:24:16.973011 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5lzr7" event={"ID":"08693c79-e329-4cec-94ed-29153d017c38","Type":"ContainerDied","Data":"7d2ec72f9cf6a9c98fb109ee4c8ed12af2a53df2d8f92fcc1b2b5a823361fbc9"} Jan 20 18:24:16 crc kubenswrapper[4558]: I0120 18:24:16.973056 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5lzr7" event={"ID":"08693c79-e329-4cec-94ed-29153d017c38","Type":"ContainerDied","Data":"7364556bcbbaa288b63a366cf5d7ed8eb10f568324df7ea58ee2d23080d75068"} Jan 20 18:24:16 crc kubenswrapper[4558]: I0120 18:24:16.973078 4558 scope.go:117] "RemoveContainer" containerID="7d2ec72f9cf6a9c98fb109ee4c8ed12af2a53df2d8f92fcc1b2b5a823361fbc9" Jan 20 18:24:16 crc kubenswrapper[4558]: I0120 18:24:16.973262 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5lzr7" Jan 20 18:24:16 crc kubenswrapper[4558]: I0120 18:24:16.991076 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-5lzr7"] Jan 20 18:24:16 crc kubenswrapper[4558]: I0120 18:24:16.992747 4558 scope.go:117] "RemoveContainer" containerID="ee8f9569b6d269210fb6cfa472690a57fdb4892305a0981a64183412c4bdfb0b" Jan 20 18:24:16 crc kubenswrapper[4558]: I0120 18:24:16.995549 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-5lzr7"] Jan 20 18:24:17 crc kubenswrapper[4558]: I0120 18:24:17.009803 4558 scope.go:117] "RemoveContainer" containerID="08740ad511f8f5ee33775648be1a08becf6481298696e6434583c34a5e7f1aea" Jan 20 18:24:17 crc kubenswrapper[4558]: I0120 18:24:17.028014 4558 scope.go:117] "RemoveContainer" containerID="7d2ec72f9cf6a9c98fb109ee4c8ed12af2a53df2d8f92fcc1b2b5a823361fbc9" Jan 20 18:24:17 crc kubenswrapper[4558]: E0120 18:24:17.028325 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7d2ec72f9cf6a9c98fb109ee4c8ed12af2a53df2d8f92fcc1b2b5a823361fbc9\": container with ID starting with 7d2ec72f9cf6a9c98fb109ee4c8ed12af2a53df2d8f92fcc1b2b5a823361fbc9 not found: ID does not exist" containerID="7d2ec72f9cf6a9c98fb109ee4c8ed12af2a53df2d8f92fcc1b2b5a823361fbc9" Jan 20 18:24:17 crc kubenswrapper[4558]: I0120 18:24:17.028360 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7d2ec72f9cf6a9c98fb109ee4c8ed12af2a53df2d8f92fcc1b2b5a823361fbc9"} err="failed to get container status \"7d2ec72f9cf6a9c98fb109ee4c8ed12af2a53df2d8f92fcc1b2b5a823361fbc9\": rpc error: code = NotFound desc = could not find container \"7d2ec72f9cf6a9c98fb109ee4c8ed12af2a53df2d8f92fcc1b2b5a823361fbc9\": container with ID starting with 7d2ec72f9cf6a9c98fb109ee4c8ed12af2a53df2d8f92fcc1b2b5a823361fbc9 not found: ID does not exist" Jan 20 18:24:17 crc kubenswrapper[4558]: I0120 18:24:17.028387 4558 scope.go:117] "RemoveContainer" containerID="ee8f9569b6d269210fb6cfa472690a57fdb4892305a0981a64183412c4bdfb0b" Jan 20 18:24:17 crc kubenswrapper[4558]: E0120 18:24:17.028649 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ee8f9569b6d269210fb6cfa472690a57fdb4892305a0981a64183412c4bdfb0b\": container with ID starting with ee8f9569b6d269210fb6cfa472690a57fdb4892305a0981a64183412c4bdfb0b not found: ID does not exist" containerID="ee8f9569b6d269210fb6cfa472690a57fdb4892305a0981a64183412c4bdfb0b" Jan 20 18:24:17 crc kubenswrapper[4558]: I0120 18:24:17.028686 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ee8f9569b6d269210fb6cfa472690a57fdb4892305a0981a64183412c4bdfb0b"} err="failed to get container status \"ee8f9569b6d269210fb6cfa472690a57fdb4892305a0981a64183412c4bdfb0b\": rpc error: code = NotFound desc = could not find container \"ee8f9569b6d269210fb6cfa472690a57fdb4892305a0981a64183412c4bdfb0b\": container with ID starting with ee8f9569b6d269210fb6cfa472690a57fdb4892305a0981a64183412c4bdfb0b not found: ID does not exist" Jan 20 18:24:17 crc kubenswrapper[4558]: I0120 18:24:17.028706 4558 scope.go:117] "RemoveContainer" containerID="08740ad511f8f5ee33775648be1a08becf6481298696e6434583c34a5e7f1aea" Jan 20 18:24:17 crc kubenswrapper[4558]: E0120 18:24:17.028994 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"08740ad511f8f5ee33775648be1a08becf6481298696e6434583c34a5e7f1aea\": container with ID starting with 08740ad511f8f5ee33775648be1a08becf6481298696e6434583c34a5e7f1aea not found: ID does not exist" containerID="08740ad511f8f5ee33775648be1a08becf6481298696e6434583c34a5e7f1aea" Jan 20 18:24:17 crc kubenswrapper[4558]: I0120 18:24:17.029017 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"08740ad511f8f5ee33775648be1a08becf6481298696e6434583c34a5e7f1aea"} err="failed to get container status \"08740ad511f8f5ee33775648be1a08becf6481298696e6434583c34a5e7f1aea\": rpc error: code = NotFound desc = could not find container \"08740ad511f8f5ee33775648be1a08becf6481298696e6434583c34a5e7f1aea\": container with ID starting with 08740ad511f8f5ee33775648be1a08becf6481298696e6434583c34a5e7f1aea not found: ID does not exist" Jan 20 18:24:18 crc kubenswrapper[4558]: I0120 18:24:18.576590 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="08693c79-e329-4cec-94ed-29153d017c38" path="/var/lib/kubelet/pods/08693c79-e329-4cec-94ed-29153d017c38/volumes" Jan 20 18:24:32 crc kubenswrapper[4558]: I0120 18:24:32.576075 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="keystone-kuttl-tests/keystone-854b758bb9-sp8gl" Jan 20 18:24:33 crc kubenswrapper[4558]: E0120 18:24:33.652685 4558 log.go:32] "Failed when writing line to log file" err="http2: stream closed" path="/var/log/pods/keystone-kuttl-tests_keystone-854b758bb9-sp8gl_fd7cc4fe-552b-4a27-9aad-01f7faf6e242/keystone-api/0.log" line={} Jan 20 18:24:33 crc kubenswrapper[4558]: I0120 18:24:33.928146 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["keystone-kuttl-tests/keystone-68589875c8-6jnsh"] Jan 20 18:24:33 crc kubenswrapper[4558]: E0120 18:24:33.928425 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="08693c79-e329-4cec-94ed-29153d017c38" containerName="extract-utilities" Jan 20 18:24:33 crc kubenswrapper[4558]: I0120 18:24:33.928438 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="08693c79-e329-4cec-94ed-29153d017c38" containerName="extract-utilities" Jan 20 18:24:33 crc kubenswrapper[4558]: E0120 18:24:33.928455 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="08693c79-e329-4cec-94ed-29153d017c38" containerName="extract-content" Jan 20 18:24:33 crc kubenswrapper[4558]: I0120 18:24:33.928462 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="08693c79-e329-4cec-94ed-29153d017c38" containerName="extract-content" Jan 20 18:24:33 crc kubenswrapper[4558]: E0120 18:24:33.928485 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="08693c79-e329-4cec-94ed-29153d017c38" containerName="registry-server" Jan 20 18:24:33 crc kubenswrapper[4558]: I0120 18:24:33.928492 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="08693c79-e329-4cec-94ed-29153d017c38" containerName="registry-server" Jan 20 18:24:33 crc kubenswrapper[4558]: I0120 18:24:33.928617 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="08693c79-e329-4cec-94ed-29153d017c38" containerName="registry-server" Jan 20 18:24:33 crc kubenswrapper[4558]: I0120 18:24:33.929111 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-68589875c8-6jnsh" Jan 20 18:24:33 crc kubenswrapper[4558]: I0120 18:24:33.938886 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone-68589875c8-6jnsh"] Jan 20 18:24:33 crc kubenswrapper[4558]: E0120 18:24:33.977310 4558 log.go:32] "Failed when writing line to log file" err="http2: stream closed" path="/var/log/pods/keystone-kuttl-tests_keystone-854b758bb9-sp8gl_fd7cc4fe-552b-4a27-9aad-01f7faf6e242/keystone-api/0.log" line={} Jan 20 18:24:33 crc kubenswrapper[4558]: I0120 18:24:33.977779 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/df9f765b-26aa-46aa-b92a-24773f37c847-scripts\") pod \"keystone-68589875c8-6jnsh\" (UID: \"df9f765b-26aa-46aa-b92a-24773f37c847\") " pod="keystone-kuttl-tests/keystone-68589875c8-6jnsh" Jan 20 18:24:33 crc kubenswrapper[4558]: I0120 18:24:33.977868 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/df9f765b-26aa-46aa-b92a-24773f37c847-credential-keys\") pod \"keystone-68589875c8-6jnsh\" (UID: \"df9f765b-26aa-46aa-b92a-24773f37c847\") " pod="keystone-kuttl-tests/keystone-68589875c8-6jnsh" Jan 20 18:24:33 crc kubenswrapper[4558]: I0120 18:24:33.978019 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/df9f765b-26aa-46aa-b92a-24773f37c847-fernet-keys\") pod \"keystone-68589875c8-6jnsh\" (UID: \"df9f765b-26aa-46aa-b92a-24773f37c847\") " pod="keystone-kuttl-tests/keystone-68589875c8-6jnsh" Jan 20 18:24:33 crc kubenswrapper[4558]: I0120 18:24:33.978114 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/df9f765b-26aa-46aa-b92a-24773f37c847-config-data\") pod \"keystone-68589875c8-6jnsh\" (UID: \"df9f765b-26aa-46aa-b92a-24773f37c847\") " pod="keystone-kuttl-tests/keystone-68589875c8-6jnsh" Jan 20 18:24:33 crc kubenswrapper[4558]: I0120 18:24:33.978383 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fhggj\" (UniqueName: \"kubernetes.io/projected/df9f765b-26aa-46aa-b92a-24773f37c847-kube-api-access-fhggj\") pod \"keystone-68589875c8-6jnsh\" (UID: \"df9f765b-26aa-46aa-b92a-24773f37c847\") " pod="keystone-kuttl-tests/keystone-68589875c8-6jnsh" Jan 20 18:24:34 crc kubenswrapper[4558]: I0120 18:24:34.079531 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/df9f765b-26aa-46aa-b92a-24773f37c847-scripts\") pod \"keystone-68589875c8-6jnsh\" (UID: \"df9f765b-26aa-46aa-b92a-24773f37c847\") " pod="keystone-kuttl-tests/keystone-68589875c8-6jnsh" Jan 20 18:24:34 crc kubenswrapper[4558]: I0120 18:24:34.079590 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/df9f765b-26aa-46aa-b92a-24773f37c847-credential-keys\") pod \"keystone-68589875c8-6jnsh\" (UID: \"df9f765b-26aa-46aa-b92a-24773f37c847\") " pod="keystone-kuttl-tests/keystone-68589875c8-6jnsh" Jan 20 18:24:34 crc kubenswrapper[4558]: I0120 18:24:34.079638 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/df9f765b-26aa-46aa-b92a-24773f37c847-fernet-keys\") pod \"keystone-68589875c8-6jnsh\" (UID: \"df9f765b-26aa-46aa-b92a-24773f37c847\") " pod="keystone-kuttl-tests/keystone-68589875c8-6jnsh" Jan 20 18:24:34 crc kubenswrapper[4558]: I0120 18:24:34.079668 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/df9f765b-26aa-46aa-b92a-24773f37c847-config-data\") pod \"keystone-68589875c8-6jnsh\" (UID: \"df9f765b-26aa-46aa-b92a-24773f37c847\") " pod="keystone-kuttl-tests/keystone-68589875c8-6jnsh" Jan 20 18:24:34 crc kubenswrapper[4558]: I0120 18:24:34.079717 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fhggj\" (UniqueName: \"kubernetes.io/projected/df9f765b-26aa-46aa-b92a-24773f37c847-kube-api-access-fhggj\") pod \"keystone-68589875c8-6jnsh\" (UID: \"df9f765b-26aa-46aa-b92a-24773f37c847\") " pod="keystone-kuttl-tests/keystone-68589875c8-6jnsh" Jan 20 18:24:34 crc kubenswrapper[4558]: I0120 18:24:34.086624 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/df9f765b-26aa-46aa-b92a-24773f37c847-scripts\") pod \"keystone-68589875c8-6jnsh\" (UID: \"df9f765b-26aa-46aa-b92a-24773f37c847\") " pod="keystone-kuttl-tests/keystone-68589875c8-6jnsh" Jan 20 18:24:34 crc kubenswrapper[4558]: I0120 18:24:34.086917 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/df9f765b-26aa-46aa-b92a-24773f37c847-fernet-keys\") pod \"keystone-68589875c8-6jnsh\" (UID: \"df9f765b-26aa-46aa-b92a-24773f37c847\") " pod="keystone-kuttl-tests/keystone-68589875c8-6jnsh" Jan 20 18:24:34 crc kubenswrapper[4558]: I0120 18:24:34.087186 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/df9f765b-26aa-46aa-b92a-24773f37c847-credential-keys\") pod \"keystone-68589875c8-6jnsh\" (UID: \"df9f765b-26aa-46aa-b92a-24773f37c847\") " pod="keystone-kuttl-tests/keystone-68589875c8-6jnsh" Jan 20 18:24:34 crc kubenswrapper[4558]: I0120 18:24:34.087696 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/df9f765b-26aa-46aa-b92a-24773f37c847-config-data\") pod \"keystone-68589875c8-6jnsh\" (UID: \"df9f765b-26aa-46aa-b92a-24773f37c847\") " pod="keystone-kuttl-tests/keystone-68589875c8-6jnsh" Jan 20 18:24:34 crc kubenswrapper[4558]: I0120 18:24:34.097400 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fhggj\" (UniqueName: \"kubernetes.io/projected/df9f765b-26aa-46aa-b92a-24773f37c847-kube-api-access-fhggj\") pod \"keystone-68589875c8-6jnsh\" (UID: \"df9f765b-26aa-46aa-b92a-24773f37c847\") " pod="keystone-kuttl-tests/keystone-68589875c8-6jnsh" Jan 20 18:24:34 crc kubenswrapper[4558]: I0120 18:24:34.245499 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-68589875c8-6jnsh" Jan 20 18:24:34 crc kubenswrapper[4558]: I0120 18:24:34.638992 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone-68589875c8-6jnsh"] Jan 20 18:24:35 crc kubenswrapper[4558]: I0120 18:24:35.128839 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-68589875c8-6jnsh" event={"ID":"df9f765b-26aa-46aa-b92a-24773f37c847","Type":"ContainerStarted","Data":"58e7899795036c00e0bc28a1b4464bfbaea9e317026b3819251791fa190bebc0"} Jan 20 18:24:35 crc kubenswrapper[4558]: I0120 18:24:35.129369 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="keystone-kuttl-tests/keystone-68589875c8-6jnsh" Jan 20 18:24:35 crc kubenswrapper[4558]: I0120 18:24:35.129393 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-68589875c8-6jnsh" event={"ID":"df9f765b-26aa-46aa-b92a-24773f37c847","Type":"ContainerStarted","Data":"acf69c4fd9e7f0f4ef682ae9301ee80733258f147c5a3cd1dd0bdfa19505eb68"} Jan 20 18:24:35 crc kubenswrapper[4558]: I0120 18:24:35.148197 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="keystone-kuttl-tests/keystone-68589875c8-6jnsh" podStartSLOduration=2.148155522 podStartE2EDuration="2.148155522s" podCreationTimestamp="2026-01-20 18:24:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 18:24:35.141065441 +0000 UTC m=+6168.901403408" watchObservedRunningTime="2026-01-20 18:24:35.148155522 +0000 UTC m=+6168.908493490" Jan 20 18:24:35 crc kubenswrapper[4558]: I0120 18:24:35.261640 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["keystone-kuttl-tests/keystone-bootstrap-tlsz9"] Jan 20 18:24:35 crc kubenswrapper[4558]: I0120 18:24:35.267478 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["keystone-kuttl-tests/keystone-db-sync-hztjt"] Jan 20 18:24:35 crc kubenswrapper[4558]: I0120 18:24:35.272135 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["keystone-kuttl-tests/keystone-bootstrap-tlsz9"] Jan 20 18:24:35 crc kubenswrapper[4558]: I0120 18:24:35.276235 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["keystone-kuttl-tests/keystone-854b758bb9-sp8gl"] Jan 20 18:24:35 crc kubenswrapper[4558]: I0120 18:24:35.276480 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="keystone-kuttl-tests/keystone-854b758bb9-sp8gl" podUID="fd7cc4fe-552b-4a27-9aad-01f7faf6e242" containerName="keystone-api" containerID="cri-o://46c6825e4ed9114b6fe8a0bd679e22ff5913b36401fbad5bf6957a23e9151d17" gracePeriod=30 Jan 20 18:24:35 crc kubenswrapper[4558]: I0120 18:24:35.280083 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["keystone-kuttl-tests/keystone-db-sync-hztjt"] Jan 20 18:24:35 crc kubenswrapper[4558]: I0120 18:24:35.291055 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["keystone-kuttl-tests/keystone-68589875c8-6jnsh"] Jan 20 18:24:35 crc kubenswrapper[4558]: I0120 18:24:35.323749 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["keystone-kuttl-tests/keystonef525-account-delete-9bwz2"] Jan 20 18:24:35 crc kubenswrapper[4558]: I0120 18:24:35.325125 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystonef525-account-delete-9bwz2" Jan 20 18:24:35 crc kubenswrapper[4558]: I0120 18:24:35.333682 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystonef525-account-delete-9bwz2"] Jan 20 18:24:35 crc kubenswrapper[4558]: I0120 18:24:35.403129 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/521f418f-ad7f-4c73-a9f1-e3bfd8f83d9d-operator-scripts\") pod \"keystonef525-account-delete-9bwz2\" (UID: \"521f418f-ad7f-4c73-a9f1-e3bfd8f83d9d\") " pod="keystone-kuttl-tests/keystonef525-account-delete-9bwz2" Jan 20 18:24:35 crc kubenswrapper[4558]: I0120 18:24:35.403203 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l5qf2\" (UniqueName: \"kubernetes.io/projected/521f418f-ad7f-4c73-a9f1-e3bfd8f83d9d-kube-api-access-l5qf2\") pod \"keystonef525-account-delete-9bwz2\" (UID: \"521f418f-ad7f-4c73-a9f1-e3bfd8f83d9d\") " pod="keystone-kuttl-tests/keystonef525-account-delete-9bwz2" Jan 20 18:24:35 crc kubenswrapper[4558]: I0120 18:24:35.504339 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/521f418f-ad7f-4c73-a9f1-e3bfd8f83d9d-operator-scripts\") pod \"keystonef525-account-delete-9bwz2\" (UID: \"521f418f-ad7f-4c73-a9f1-e3bfd8f83d9d\") " pod="keystone-kuttl-tests/keystonef525-account-delete-9bwz2" Jan 20 18:24:35 crc kubenswrapper[4558]: I0120 18:24:35.504395 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l5qf2\" (UniqueName: \"kubernetes.io/projected/521f418f-ad7f-4c73-a9f1-e3bfd8f83d9d-kube-api-access-l5qf2\") pod \"keystonef525-account-delete-9bwz2\" (UID: \"521f418f-ad7f-4c73-a9f1-e3bfd8f83d9d\") " pod="keystone-kuttl-tests/keystonef525-account-delete-9bwz2" Jan 20 18:24:35 crc kubenswrapper[4558]: I0120 18:24:35.505314 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/521f418f-ad7f-4c73-a9f1-e3bfd8f83d9d-operator-scripts\") pod \"keystonef525-account-delete-9bwz2\" (UID: \"521f418f-ad7f-4c73-a9f1-e3bfd8f83d9d\") " pod="keystone-kuttl-tests/keystonef525-account-delete-9bwz2" Jan 20 18:24:35 crc kubenswrapper[4558]: I0120 18:24:35.522911 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l5qf2\" (UniqueName: \"kubernetes.io/projected/521f418f-ad7f-4c73-a9f1-e3bfd8f83d9d-kube-api-access-l5qf2\") pod \"keystonef525-account-delete-9bwz2\" (UID: \"521f418f-ad7f-4c73-a9f1-e3bfd8f83d9d\") " pod="keystone-kuttl-tests/keystonef525-account-delete-9bwz2" Jan 20 18:24:35 crc kubenswrapper[4558]: I0120 18:24:35.640161 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystonef525-account-delete-9bwz2" Jan 20 18:24:36 crc kubenswrapper[4558]: I0120 18:24:36.027343 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystonef525-account-delete-9bwz2"] Jan 20 18:24:36 crc kubenswrapper[4558]: W0120 18:24:36.031195 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod521f418f_ad7f_4c73_a9f1_e3bfd8f83d9d.slice/crio-6c4de3b8a448af8f21fafe4816eeeb66eb024cbf4792ea2dc41aa7748f592d08 WatchSource:0}: Error finding container 6c4de3b8a448af8f21fafe4816eeeb66eb024cbf4792ea2dc41aa7748f592d08: Status 404 returned error can't find the container with id 6c4de3b8a448af8f21fafe4816eeeb66eb024cbf4792ea2dc41aa7748f592d08 Jan 20 18:24:36 crc kubenswrapper[4558]: I0120 18:24:36.144900 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystonef525-account-delete-9bwz2" event={"ID":"521f418f-ad7f-4c73-a9f1-e3bfd8f83d9d","Type":"ContainerStarted","Data":"6c4de3b8a448af8f21fafe4816eeeb66eb024cbf4792ea2dc41aa7748f592d08"} Jan 20 18:24:36 crc kubenswrapper[4558]: I0120 18:24:36.145327 4558 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="keystone-kuttl-tests/keystone-68589875c8-6jnsh" secret="" err="secret \"keystone-keystone-dockercfg-xx9fm\" not found" Jan 20 18:24:36 crc kubenswrapper[4558]: E0120 18:24:36.317907 4558 secret.go:188] Couldn't get secret keystone-kuttl-tests/keystone: secret "keystone" not found Jan 20 18:24:36 crc kubenswrapper[4558]: E0120 18:24:36.318057 4558 secret.go:188] Couldn't get secret keystone-kuttl-tests/keystone-config-data: secret "keystone-config-data" not found Jan 20 18:24:36 crc kubenswrapper[4558]: E0120 18:24:36.317988 4558 secret.go:188] Couldn't get secret keystone-kuttl-tests/keystone: secret "keystone" not found Jan 20 18:24:36 crc kubenswrapper[4558]: E0120 18:24:36.318253 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/df9f765b-26aa-46aa-b92a-24773f37c847-credential-keys podName:df9f765b-26aa-46aa-b92a-24773f37c847 nodeName:}" failed. No retries permitted until 2026-01-20 18:24:36.818072747 +0000 UTC m=+6170.578410714 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "credential-keys" (UniqueName: "kubernetes.io/secret/df9f765b-26aa-46aa-b92a-24773f37c847-credential-keys") pod "keystone-68589875c8-6jnsh" (UID: "df9f765b-26aa-46aa-b92a-24773f37c847") : secret "keystone" not found Jan 20 18:24:36 crc kubenswrapper[4558]: E0120 18:24:36.318338 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/df9f765b-26aa-46aa-b92a-24773f37c847-config-data podName:df9f765b-26aa-46aa-b92a-24773f37c847 nodeName:}" failed. No retries permitted until 2026-01-20 18:24:36.818326033 +0000 UTC m=+6170.578664000 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/secret/df9f765b-26aa-46aa-b92a-24773f37c847-config-data") pod "keystone-68589875c8-6jnsh" (UID: "df9f765b-26aa-46aa-b92a-24773f37c847") : secret "keystone-config-data" not found Jan 20 18:24:36 crc kubenswrapper[4558]: E0120 18:24:36.318462 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/df9f765b-26aa-46aa-b92a-24773f37c847-fernet-keys podName:df9f765b-26aa-46aa-b92a-24773f37c847 nodeName:}" failed. No retries permitted until 2026-01-20 18:24:36.818452361 +0000 UTC m=+6170.578790327 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "fernet-keys" (UniqueName: "kubernetes.io/secret/df9f765b-26aa-46aa-b92a-24773f37c847-fernet-keys") pod "keystone-68589875c8-6jnsh" (UID: "df9f765b-26aa-46aa-b92a-24773f37c847") : secret "keystone" not found Jan 20 18:24:36 crc kubenswrapper[4558]: E0120 18:24:36.318562 4558 secret.go:188] Couldn't get secret keystone-kuttl-tests/keystone-scripts: secret "keystone-scripts" not found Jan 20 18:24:36 crc kubenswrapper[4558]: E0120 18:24:36.318616 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/df9f765b-26aa-46aa-b92a-24773f37c847-scripts podName:df9f765b-26aa-46aa-b92a-24773f37c847 nodeName:}" failed. No retries permitted until 2026-01-20 18:24:36.818594888 +0000 UTC m=+6170.578932855 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "scripts" (UniqueName: "kubernetes.io/secret/df9f765b-26aa-46aa-b92a-24773f37c847-scripts") pod "keystone-68589875c8-6jnsh" (UID: "df9f765b-26aa-46aa-b92a-24773f37c847") : secret "keystone-scripts" not found Jan 20 18:24:36 crc kubenswrapper[4558]: I0120 18:24:36.575068 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e61fa5e1-c612-4bfb-9585-e282477c4819" path="/var/lib/kubelet/pods/e61fa5e1-c612-4bfb-9585-e282477c4819/volumes" Jan 20 18:24:36 crc kubenswrapper[4558]: I0120 18:24:36.576100 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ec4269b4-bbd4-418c-87ff-431233da9bd3" path="/var/lib/kubelet/pods/ec4269b4-bbd4-418c-87ff-431233da9bd3/volumes" Jan 20 18:24:36 crc kubenswrapper[4558]: E0120 18:24:36.825495 4558 secret.go:188] Couldn't get secret keystone-kuttl-tests/keystone-config-data: secret "keystone-config-data" not found Jan 20 18:24:36 crc kubenswrapper[4558]: E0120 18:24:36.825784 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/df9f765b-26aa-46aa-b92a-24773f37c847-config-data podName:df9f765b-26aa-46aa-b92a-24773f37c847 nodeName:}" failed. No retries permitted until 2026-01-20 18:24:37.825764774 +0000 UTC m=+6171.586102741 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/secret/df9f765b-26aa-46aa-b92a-24773f37c847-config-data") pod "keystone-68589875c8-6jnsh" (UID: "df9f765b-26aa-46aa-b92a-24773f37c847") : secret "keystone-config-data" not found Jan 20 18:24:36 crc kubenswrapper[4558]: E0120 18:24:36.825516 4558 secret.go:188] Couldn't get secret keystone-kuttl-tests/keystone: secret "keystone" not found Jan 20 18:24:36 crc kubenswrapper[4558]: E0120 18:24:36.826011 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/df9f765b-26aa-46aa-b92a-24773f37c847-credential-keys podName:df9f765b-26aa-46aa-b92a-24773f37c847 nodeName:}" failed. No retries permitted until 2026-01-20 18:24:37.825999184 +0000 UTC m=+6171.586337152 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "credential-keys" (UniqueName: "kubernetes.io/secret/df9f765b-26aa-46aa-b92a-24773f37c847-credential-keys") pod "keystone-68589875c8-6jnsh" (UID: "df9f765b-26aa-46aa-b92a-24773f37c847") : secret "keystone" not found Jan 20 18:24:36 crc kubenswrapper[4558]: E0120 18:24:36.825529 4558 secret.go:188] Couldn't get secret keystone-kuttl-tests/keystone-scripts: secret "keystone-scripts" not found Jan 20 18:24:36 crc kubenswrapper[4558]: E0120 18:24:36.825562 4558 secret.go:188] Couldn't get secret keystone-kuttl-tests/keystone: secret "keystone" not found Jan 20 18:24:36 crc kubenswrapper[4558]: E0120 18:24:36.826251 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/df9f765b-26aa-46aa-b92a-24773f37c847-scripts podName:df9f765b-26aa-46aa-b92a-24773f37c847 nodeName:}" failed. No retries permitted until 2026-01-20 18:24:37.82624139 +0000 UTC m=+6171.586579357 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "scripts" (UniqueName: "kubernetes.io/secret/df9f765b-26aa-46aa-b92a-24773f37c847-scripts") pod "keystone-68589875c8-6jnsh" (UID: "df9f765b-26aa-46aa-b92a-24773f37c847") : secret "keystone-scripts" not found Jan 20 18:24:36 crc kubenswrapper[4558]: E0120 18:24:36.826344 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/df9f765b-26aa-46aa-b92a-24773f37c847-fernet-keys podName:df9f765b-26aa-46aa-b92a-24773f37c847 nodeName:}" failed. No retries permitted until 2026-01-20 18:24:37.82632162 +0000 UTC m=+6171.586659597 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "fernet-keys" (UniqueName: "kubernetes.io/secret/df9f765b-26aa-46aa-b92a-24773f37c847-fernet-keys") pod "keystone-68589875c8-6jnsh" (UID: "df9f765b-26aa-46aa-b92a-24773f37c847") : secret "keystone" not found Jan 20 18:24:37 crc kubenswrapper[4558]: I0120 18:24:37.042600 4558 scope.go:117] "RemoveContainer" containerID="721d66ec74e9f91df5b1b7645acd3d7d60752a9f1d484831f0187d9bac7dcb84" Jan 20 18:24:37 crc kubenswrapper[4558]: I0120 18:24:37.061265 4558 scope.go:117] "RemoveContainer" containerID="8fd31eed549f531993d83f4067d71c52895d4572a31caa0756de4e05c0bc8b3a" Jan 20 18:24:37 crc kubenswrapper[4558]: I0120 18:24:37.097312 4558 scope.go:117] "RemoveContainer" containerID="5be5d179a0de79ea40097ce2e9526a627b476ff2431d7bdf6a239736855da624" Jan 20 18:24:37 crc kubenswrapper[4558]: I0120 18:24:37.131716 4558 scope.go:117] "RemoveContainer" containerID="c72ca9b1161063f8a418b411934e48c62908e9c3cc9232ef76b56ef1ef6fa60f" Jan 20 18:24:37 crc kubenswrapper[4558]: I0120 18:24:37.147668 4558 scope.go:117] "RemoveContainer" containerID="86c707878b5dff38defced84a4c52fcd28333c47f7429e3f87b4f498f28dbff2" Jan 20 18:24:37 crc kubenswrapper[4558]: I0120 18:24:37.153407 4558 generic.go:334] "Generic (PLEG): container finished" podID="521f418f-ad7f-4c73-a9f1-e3bfd8f83d9d" containerID="83506ee757a3b02cbab3e73d36a3f772b6e90226c69410c7ed54237f0ced2477" exitCode=0 Jan 20 18:24:37 crc kubenswrapper[4558]: I0120 18:24:37.153473 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystonef525-account-delete-9bwz2" event={"ID":"521f418f-ad7f-4c73-a9f1-e3bfd8f83d9d","Type":"ContainerDied","Data":"83506ee757a3b02cbab3e73d36a3f772b6e90226c69410c7ed54237f0ced2477"} Jan 20 18:24:37 crc kubenswrapper[4558]: I0120 18:24:37.159695 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="keystone-kuttl-tests/keystone-68589875c8-6jnsh" podUID="df9f765b-26aa-46aa-b92a-24773f37c847" containerName="keystone-api" containerID="cri-o://58e7899795036c00e0bc28a1b4464bfbaea9e317026b3819251791fa190bebc0" gracePeriod=30 Jan 20 18:24:37 crc kubenswrapper[4558]: I0120 18:24:37.176335 4558 scope.go:117] "RemoveContainer" containerID="8ad08df2841919c9374ed6c5a408a76eddcfe44ffe1422ebdd27adddca8c14d0" Jan 20 18:24:37 crc kubenswrapper[4558]: I0120 18:24:37.194187 4558 scope.go:117] "RemoveContainer" containerID="9c4d3b29cae3d7861d6b056fbb277f7758301ec5f2b733a3980c07a861b22b92" Jan 20 18:24:37 crc kubenswrapper[4558]: I0120 18:24:37.486233 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-68589875c8-6jnsh" Jan 20 18:24:37 crc kubenswrapper[4558]: I0120 18:24:37.536472 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/df9f765b-26aa-46aa-b92a-24773f37c847-scripts\") pod \"df9f765b-26aa-46aa-b92a-24773f37c847\" (UID: \"df9f765b-26aa-46aa-b92a-24773f37c847\") " Jan 20 18:24:37 crc kubenswrapper[4558]: I0120 18:24:37.536541 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/df9f765b-26aa-46aa-b92a-24773f37c847-credential-keys\") pod \"df9f765b-26aa-46aa-b92a-24773f37c847\" (UID: \"df9f765b-26aa-46aa-b92a-24773f37c847\") " Jan 20 18:24:37 crc kubenswrapper[4558]: I0120 18:24:37.536627 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/df9f765b-26aa-46aa-b92a-24773f37c847-config-data\") pod \"df9f765b-26aa-46aa-b92a-24773f37c847\" (UID: \"df9f765b-26aa-46aa-b92a-24773f37c847\") " Jan 20 18:24:37 crc kubenswrapper[4558]: I0120 18:24:37.536681 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fhggj\" (UniqueName: \"kubernetes.io/projected/df9f765b-26aa-46aa-b92a-24773f37c847-kube-api-access-fhggj\") pod \"df9f765b-26aa-46aa-b92a-24773f37c847\" (UID: \"df9f765b-26aa-46aa-b92a-24773f37c847\") " Jan 20 18:24:37 crc kubenswrapper[4558]: I0120 18:24:37.536728 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/df9f765b-26aa-46aa-b92a-24773f37c847-fernet-keys\") pod \"df9f765b-26aa-46aa-b92a-24773f37c847\" (UID: \"df9f765b-26aa-46aa-b92a-24773f37c847\") " Jan 20 18:24:37 crc kubenswrapper[4558]: I0120 18:24:37.541949 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/df9f765b-26aa-46aa-b92a-24773f37c847-kube-api-access-fhggj" (OuterVolumeSpecName: "kube-api-access-fhggj") pod "df9f765b-26aa-46aa-b92a-24773f37c847" (UID: "df9f765b-26aa-46aa-b92a-24773f37c847"). InnerVolumeSpecName "kube-api-access-fhggj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:24:37 crc kubenswrapper[4558]: I0120 18:24:37.542037 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/df9f765b-26aa-46aa-b92a-24773f37c847-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "df9f765b-26aa-46aa-b92a-24773f37c847" (UID: "df9f765b-26aa-46aa-b92a-24773f37c847"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:24:37 crc kubenswrapper[4558]: I0120 18:24:37.542296 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/df9f765b-26aa-46aa-b92a-24773f37c847-scripts" (OuterVolumeSpecName: "scripts") pod "df9f765b-26aa-46aa-b92a-24773f37c847" (UID: "df9f765b-26aa-46aa-b92a-24773f37c847"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:24:37 crc kubenswrapper[4558]: I0120 18:24:37.542723 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/df9f765b-26aa-46aa-b92a-24773f37c847-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "df9f765b-26aa-46aa-b92a-24773f37c847" (UID: "df9f765b-26aa-46aa-b92a-24773f37c847"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:24:37 crc kubenswrapper[4558]: I0120 18:24:37.555833 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/df9f765b-26aa-46aa-b92a-24773f37c847-config-data" (OuterVolumeSpecName: "config-data") pod "df9f765b-26aa-46aa-b92a-24773f37c847" (UID: "df9f765b-26aa-46aa-b92a-24773f37c847"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:24:37 crc kubenswrapper[4558]: I0120 18:24:37.638018 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/df9f765b-26aa-46aa-b92a-24773f37c847-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 18:24:37 crc kubenswrapper[4558]: I0120 18:24:37.638052 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fhggj\" (UniqueName: \"kubernetes.io/projected/df9f765b-26aa-46aa-b92a-24773f37c847-kube-api-access-fhggj\") on node \"crc\" DevicePath \"\"" Jan 20 18:24:37 crc kubenswrapper[4558]: I0120 18:24:37.638064 4558 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/df9f765b-26aa-46aa-b92a-24773f37c847-fernet-keys\") on node \"crc\" DevicePath \"\"" Jan 20 18:24:37 crc kubenswrapper[4558]: I0120 18:24:37.638077 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/df9f765b-26aa-46aa-b92a-24773f37c847-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 18:24:37 crc kubenswrapper[4558]: I0120 18:24:37.638090 4558 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/df9f765b-26aa-46aa-b92a-24773f37c847-credential-keys\") on node \"crc\" DevicePath \"\"" Jan 20 18:24:38 crc kubenswrapper[4558]: I0120 18:24:38.169418 4558 generic.go:334] "Generic (PLEG): container finished" podID="df9f765b-26aa-46aa-b92a-24773f37c847" containerID="58e7899795036c00e0bc28a1b4464bfbaea9e317026b3819251791fa190bebc0" exitCode=0 Jan 20 18:24:38 crc kubenswrapper[4558]: I0120 18:24:38.169633 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-68589875c8-6jnsh" Jan 20 18:24:38 crc kubenswrapper[4558]: I0120 18:24:38.170275 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-68589875c8-6jnsh" event={"ID":"df9f765b-26aa-46aa-b92a-24773f37c847","Type":"ContainerDied","Data":"58e7899795036c00e0bc28a1b4464bfbaea9e317026b3819251791fa190bebc0"} Jan 20 18:24:38 crc kubenswrapper[4558]: I0120 18:24:38.170583 4558 scope.go:117] "RemoveContainer" containerID="58e7899795036c00e0bc28a1b4464bfbaea9e317026b3819251791fa190bebc0" Jan 20 18:24:38 crc kubenswrapper[4558]: I0120 18:24:38.170447 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-68589875c8-6jnsh" event={"ID":"df9f765b-26aa-46aa-b92a-24773f37c847","Type":"ContainerDied","Data":"acf69c4fd9e7f0f4ef682ae9301ee80733258f147c5a3cd1dd0bdfa19505eb68"} Jan 20 18:24:38 crc kubenswrapper[4558]: I0120 18:24:38.195678 4558 scope.go:117] "RemoveContainer" containerID="58e7899795036c00e0bc28a1b4464bfbaea9e317026b3819251791fa190bebc0" Jan 20 18:24:38 crc kubenswrapper[4558]: E0120 18:24:38.196221 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"58e7899795036c00e0bc28a1b4464bfbaea9e317026b3819251791fa190bebc0\": container with ID starting with 58e7899795036c00e0bc28a1b4464bfbaea9e317026b3819251791fa190bebc0 not found: ID does not exist" containerID="58e7899795036c00e0bc28a1b4464bfbaea9e317026b3819251791fa190bebc0" Jan 20 18:24:38 crc kubenswrapper[4558]: I0120 18:24:38.196258 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"58e7899795036c00e0bc28a1b4464bfbaea9e317026b3819251791fa190bebc0"} err="failed to get container status \"58e7899795036c00e0bc28a1b4464bfbaea9e317026b3819251791fa190bebc0\": rpc error: code = NotFound desc = could not find container \"58e7899795036c00e0bc28a1b4464bfbaea9e317026b3819251791fa190bebc0\": container with ID starting with 58e7899795036c00e0bc28a1b4464bfbaea9e317026b3819251791fa190bebc0 not found: ID does not exist" Jan 20 18:24:38 crc kubenswrapper[4558]: I0120 18:24:38.199132 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["keystone-kuttl-tests/keystone-68589875c8-6jnsh"] Jan 20 18:24:38 crc kubenswrapper[4558]: I0120 18:24:38.203046 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["keystone-kuttl-tests/keystone-68589875c8-6jnsh"] Jan 20 18:24:38 crc kubenswrapper[4558]: I0120 18:24:38.453920 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystonef525-account-delete-9bwz2" Jan 20 18:24:38 crc kubenswrapper[4558]: I0120 18:24:38.549926 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l5qf2\" (UniqueName: \"kubernetes.io/projected/521f418f-ad7f-4c73-a9f1-e3bfd8f83d9d-kube-api-access-l5qf2\") pod \"521f418f-ad7f-4c73-a9f1-e3bfd8f83d9d\" (UID: \"521f418f-ad7f-4c73-a9f1-e3bfd8f83d9d\") " Jan 20 18:24:38 crc kubenswrapper[4558]: I0120 18:24:38.555266 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/521f418f-ad7f-4c73-a9f1-e3bfd8f83d9d-kube-api-access-l5qf2" (OuterVolumeSpecName: "kube-api-access-l5qf2") pod "521f418f-ad7f-4c73-a9f1-e3bfd8f83d9d" (UID: "521f418f-ad7f-4c73-a9f1-e3bfd8f83d9d"). InnerVolumeSpecName "kube-api-access-l5qf2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:24:38 crc kubenswrapper[4558]: I0120 18:24:38.622769 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="df9f765b-26aa-46aa-b92a-24773f37c847" path="/var/lib/kubelet/pods/df9f765b-26aa-46aa-b92a-24773f37c847/volumes" Jan 20 18:24:38 crc kubenswrapper[4558]: I0120 18:24:38.651339 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/521f418f-ad7f-4c73-a9f1-e3bfd8f83d9d-operator-scripts\") pod \"521f418f-ad7f-4c73-a9f1-e3bfd8f83d9d\" (UID: \"521f418f-ad7f-4c73-a9f1-e3bfd8f83d9d\") " Jan 20 18:24:38 crc kubenswrapper[4558]: I0120 18:24:38.651687 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l5qf2\" (UniqueName: \"kubernetes.io/projected/521f418f-ad7f-4c73-a9f1-e3bfd8f83d9d-kube-api-access-l5qf2\") on node \"crc\" DevicePath \"\"" Jan 20 18:24:38 crc kubenswrapper[4558]: I0120 18:24:38.651884 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/521f418f-ad7f-4c73-a9f1-e3bfd8f83d9d-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "521f418f-ad7f-4c73-a9f1-e3bfd8f83d9d" (UID: "521f418f-ad7f-4c73-a9f1-e3bfd8f83d9d"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:24:38 crc kubenswrapper[4558]: I0120 18:24:38.665295 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-854b758bb9-sp8gl" Jan 20 18:24:38 crc kubenswrapper[4558]: I0120 18:24:38.753155 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/fd7cc4fe-552b-4a27-9aad-01f7faf6e242-credential-keys\") pod \"fd7cc4fe-552b-4a27-9aad-01f7faf6e242\" (UID: \"fd7cc4fe-552b-4a27-9aad-01f7faf6e242\") " Jan 20 18:24:38 crc kubenswrapper[4558]: I0120 18:24:38.753251 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fd7cc4fe-552b-4a27-9aad-01f7faf6e242-config-data\") pod \"fd7cc4fe-552b-4a27-9aad-01f7faf6e242\" (UID: \"fd7cc4fe-552b-4a27-9aad-01f7faf6e242\") " Jan 20 18:24:38 crc kubenswrapper[4558]: I0120 18:24:38.753318 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l4gzm\" (UniqueName: \"kubernetes.io/projected/fd7cc4fe-552b-4a27-9aad-01f7faf6e242-kube-api-access-l4gzm\") pod \"fd7cc4fe-552b-4a27-9aad-01f7faf6e242\" (UID: \"fd7cc4fe-552b-4a27-9aad-01f7faf6e242\") " Jan 20 18:24:38 crc kubenswrapper[4558]: I0120 18:24:38.753446 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/fd7cc4fe-552b-4a27-9aad-01f7faf6e242-fernet-keys\") pod \"fd7cc4fe-552b-4a27-9aad-01f7faf6e242\" (UID: \"fd7cc4fe-552b-4a27-9aad-01f7faf6e242\") " Jan 20 18:24:38 crc kubenswrapper[4558]: I0120 18:24:38.753501 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fd7cc4fe-552b-4a27-9aad-01f7faf6e242-scripts\") pod \"fd7cc4fe-552b-4a27-9aad-01f7faf6e242\" (UID: \"fd7cc4fe-552b-4a27-9aad-01f7faf6e242\") " Jan 20 18:24:38 crc kubenswrapper[4558]: I0120 18:24:38.753953 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/521f418f-ad7f-4c73-a9f1-e3bfd8f83d9d-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 18:24:38 crc kubenswrapper[4558]: I0120 18:24:38.761263 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fd7cc4fe-552b-4a27-9aad-01f7faf6e242-scripts" (OuterVolumeSpecName: "scripts") pod "fd7cc4fe-552b-4a27-9aad-01f7faf6e242" (UID: "fd7cc4fe-552b-4a27-9aad-01f7faf6e242"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:24:38 crc kubenswrapper[4558]: I0120 18:24:38.761359 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fd7cc4fe-552b-4a27-9aad-01f7faf6e242-kube-api-access-l4gzm" (OuterVolumeSpecName: "kube-api-access-l4gzm") pod "fd7cc4fe-552b-4a27-9aad-01f7faf6e242" (UID: "fd7cc4fe-552b-4a27-9aad-01f7faf6e242"). InnerVolumeSpecName "kube-api-access-l4gzm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:24:38 crc kubenswrapper[4558]: I0120 18:24:38.783299 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fd7cc4fe-552b-4a27-9aad-01f7faf6e242-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "fd7cc4fe-552b-4a27-9aad-01f7faf6e242" (UID: "fd7cc4fe-552b-4a27-9aad-01f7faf6e242"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:24:38 crc kubenswrapper[4558]: I0120 18:24:38.787272 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fd7cc4fe-552b-4a27-9aad-01f7faf6e242-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "fd7cc4fe-552b-4a27-9aad-01f7faf6e242" (UID: "fd7cc4fe-552b-4a27-9aad-01f7faf6e242"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:24:38 crc kubenswrapper[4558]: I0120 18:24:38.857045 4558 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/fd7cc4fe-552b-4a27-9aad-01f7faf6e242-fernet-keys\") on node \"crc\" DevicePath \"\"" Jan 20 18:24:38 crc kubenswrapper[4558]: I0120 18:24:38.857078 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fd7cc4fe-552b-4a27-9aad-01f7faf6e242-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 18:24:38 crc kubenswrapper[4558]: I0120 18:24:38.857090 4558 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/fd7cc4fe-552b-4a27-9aad-01f7faf6e242-credential-keys\") on node \"crc\" DevicePath \"\"" Jan 20 18:24:38 crc kubenswrapper[4558]: I0120 18:24:38.857104 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l4gzm\" (UniqueName: \"kubernetes.io/projected/fd7cc4fe-552b-4a27-9aad-01f7faf6e242-kube-api-access-l4gzm\") on node \"crc\" DevicePath \"\"" Jan 20 18:24:38 crc kubenswrapper[4558]: I0120 18:24:38.918061 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fd7cc4fe-552b-4a27-9aad-01f7faf6e242-config-data" (OuterVolumeSpecName: "config-data") pod "fd7cc4fe-552b-4a27-9aad-01f7faf6e242" (UID: "fd7cc4fe-552b-4a27-9aad-01f7faf6e242"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:24:38 crc kubenswrapper[4558]: I0120 18:24:38.958798 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fd7cc4fe-552b-4a27-9aad-01f7faf6e242-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 18:24:39 crc kubenswrapper[4558]: I0120 18:24:39.180994 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystonef525-account-delete-9bwz2" event={"ID":"521f418f-ad7f-4c73-a9f1-e3bfd8f83d9d","Type":"ContainerDied","Data":"6c4de3b8a448af8f21fafe4816eeeb66eb024cbf4792ea2dc41aa7748f592d08"} Jan 20 18:24:39 crc kubenswrapper[4558]: I0120 18:24:39.181931 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6c4de3b8a448af8f21fafe4816eeeb66eb024cbf4792ea2dc41aa7748f592d08" Jan 20 18:24:39 crc kubenswrapper[4558]: I0120 18:24:39.181021 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystonef525-account-delete-9bwz2" Jan 20 18:24:39 crc kubenswrapper[4558]: I0120 18:24:39.184245 4558 generic.go:334] "Generic (PLEG): container finished" podID="fd7cc4fe-552b-4a27-9aad-01f7faf6e242" containerID="46c6825e4ed9114b6fe8a0bd679e22ff5913b36401fbad5bf6957a23e9151d17" exitCode=0 Jan 20 18:24:39 crc kubenswrapper[4558]: I0120 18:24:39.184287 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-854b758bb9-sp8gl" Jan 20 18:24:39 crc kubenswrapper[4558]: I0120 18:24:39.184310 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-854b758bb9-sp8gl" event={"ID":"fd7cc4fe-552b-4a27-9aad-01f7faf6e242","Type":"ContainerDied","Data":"46c6825e4ed9114b6fe8a0bd679e22ff5913b36401fbad5bf6957a23e9151d17"} Jan 20 18:24:39 crc kubenswrapper[4558]: I0120 18:24:39.184679 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-854b758bb9-sp8gl" event={"ID":"fd7cc4fe-552b-4a27-9aad-01f7faf6e242","Type":"ContainerDied","Data":"97fe77964c831da1b43394169c9984db869be8e9f53823ba21051e1e41e0b35a"} Jan 20 18:24:39 crc kubenswrapper[4558]: I0120 18:24:39.184756 4558 scope.go:117] "RemoveContainer" containerID="46c6825e4ed9114b6fe8a0bd679e22ff5913b36401fbad5bf6957a23e9151d17" Jan 20 18:24:39 crc kubenswrapper[4558]: I0120 18:24:39.205639 4558 scope.go:117] "RemoveContainer" containerID="46c6825e4ed9114b6fe8a0bd679e22ff5913b36401fbad5bf6957a23e9151d17" Jan 20 18:24:39 crc kubenswrapper[4558]: E0120 18:24:39.206074 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"46c6825e4ed9114b6fe8a0bd679e22ff5913b36401fbad5bf6957a23e9151d17\": container with ID starting with 46c6825e4ed9114b6fe8a0bd679e22ff5913b36401fbad5bf6957a23e9151d17 not found: ID does not exist" containerID="46c6825e4ed9114b6fe8a0bd679e22ff5913b36401fbad5bf6957a23e9151d17" Jan 20 18:24:39 crc kubenswrapper[4558]: I0120 18:24:39.206118 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"46c6825e4ed9114b6fe8a0bd679e22ff5913b36401fbad5bf6957a23e9151d17"} err="failed to get container status \"46c6825e4ed9114b6fe8a0bd679e22ff5913b36401fbad5bf6957a23e9151d17\": rpc error: code = NotFound desc = could not find container \"46c6825e4ed9114b6fe8a0bd679e22ff5913b36401fbad5bf6957a23e9151d17\": container with ID starting with 46c6825e4ed9114b6fe8a0bd679e22ff5913b36401fbad5bf6957a23e9151d17 not found: ID does not exist" Jan 20 18:24:39 crc kubenswrapper[4558]: I0120 18:24:39.215553 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["keystone-kuttl-tests/keystone-854b758bb9-sp8gl"] Jan 20 18:24:39 crc kubenswrapper[4558]: I0120 18:24:39.219083 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["keystone-kuttl-tests/keystone-854b758bb9-sp8gl"] Jan 20 18:24:40 crc kubenswrapper[4558]: I0120 18:24:40.347989 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["keystone-kuttl-tests/keystone-db-create-fxkdq"] Jan 20 18:24:40 crc kubenswrapper[4558]: I0120 18:24:40.352214 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["keystone-kuttl-tests/keystone-db-create-fxkdq"] Jan 20 18:24:40 crc kubenswrapper[4558]: I0120 18:24:40.363637 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["keystone-kuttl-tests/keystonef525-account-delete-9bwz2"] Jan 20 18:24:40 crc kubenswrapper[4558]: I0120 18:24:40.367703 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["keystone-kuttl-tests/keystonef525-account-delete-9bwz2"] Jan 20 18:24:40 crc kubenswrapper[4558]: I0120 18:24:40.371543 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["keystone-kuttl-tests/keystone-f525-account-create-update-22vbg"] Jan 20 18:24:40 crc kubenswrapper[4558]: I0120 18:24:40.375347 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["keystone-kuttl-tests/keystone-f525-account-create-update-22vbg"] Jan 20 18:24:40 crc kubenswrapper[4558]: I0120 18:24:40.433980 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["keystone-kuttl-tests/keystone-db-create-cjffv"] Jan 20 18:24:40 crc kubenswrapper[4558]: E0120 18:24:40.434390 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fd7cc4fe-552b-4a27-9aad-01f7faf6e242" containerName="keystone-api" Jan 20 18:24:40 crc kubenswrapper[4558]: I0120 18:24:40.434409 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="fd7cc4fe-552b-4a27-9aad-01f7faf6e242" containerName="keystone-api" Jan 20 18:24:40 crc kubenswrapper[4558]: E0120 18:24:40.434461 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="521f418f-ad7f-4c73-a9f1-e3bfd8f83d9d" containerName="mariadb-account-delete" Jan 20 18:24:40 crc kubenswrapper[4558]: I0120 18:24:40.434472 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="521f418f-ad7f-4c73-a9f1-e3bfd8f83d9d" containerName="mariadb-account-delete" Jan 20 18:24:40 crc kubenswrapper[4558]: E0120 18:24:40.434482 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="df9f765b-26aa-46aa-b92a-24773f37c847" containerName="keystone-api" Jan 20 18:24:40 crc kubenswrapper[4558]: I0120 18:24:40.434489 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="df9f765b-26aa-46aa-b92a-24773f37c847" containerName="keystone-api" Jan 20 18:24:40 crc kubenswrapper[4558]: I0120 18:24:40.434633 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="521f418f-ad7f-4c73-a9f1-e3bfd8f83d9d" containerName="mariadb-account-delete" Jan 20 18:24:40 crc kubenswrapper[4558]: I0120 18:24:40.434663 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="df9f765b-26aa-46aa-b92a-24773f37c847" containerName="keystone-api" Jan 20 18:24:40 crc kubenswrapper[4558]: I0120 18:24:40.434672 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="fd7cc4fe-552b-4a27-9aad-01f7faf6e242" containerName="keystone-api" Jan 20 18:24:40 crc kubenswrapper[4558]: I0120 18:24:40.435307 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-db-create-cjffv" Jan 20 18:24:40 crc kubenswrapper[4558]: I0120 18:24:40.444086 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone-db-create-cjffv"] Jan 20 18:24:40 crc kubenswrapper[4558]: I0120 18:24:40.545413 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["keystone-kuttl-tests/keystone-fb15-account-create-update-n4k6h"] Jan 20 18:24:40 crc kubenswrapper[4558]: I0120 18:24:40.546776 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-fb15-account-create-update-n4k6h" Jan 20 18:24:40 crc kubenswrapper[4558]: I0120 18:24:40.548400 4558 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone-db-secret" Jan 20 18:24:40 crc kubenswrapper[4558]: I0120 18:24:40.549677 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone-fb15-account-create-update-n4k6h"] Jan 20 18:24:40 crc kubenswrapper[4558]: I0120 18:24:40.575251 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="521f418f-ad7f-4c73-a9f1-e3bfd8f83d9d" path="/var/lib/kubelet/pods/521f418f-ad7f-4c73-a9f1-e3bfd8f83d9d/volumes" Jan 20 18:24:40 crc kubenswrapper[4558]: I0120 18:24:40.575803 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="53f69d5f-7460-49ee-bb1b-97a5bbda4d75" path="/var/lib/kubelet/pods/53f69d5f-7460-49ee-bb1b-97a5bbda4d75/volumes" Jan 20 18:24:40 crc kubenswrapper[4558]: I0120 18:24:40.576311 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d21eac78-1353-4b39-8819-1b37405e07e5" path="/var/lib/kubelet/pods/d21eac78-1353-4b39-8819-1b37405e07e5/volumes" Jan 20 18:24:40 crc kubenswrapper[4558]: I0120 18:24:40.576822 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fd7cc4fe-552b-4a27-9aad-01f7faf6e242" path="/var/lib/kubelet/pods/fd7cc4fe-552b-4a27-9aad-01f7faf6e242/volumes" Jan 20 18:24:40 crc kubenswrapper[4558]: I0120 18:24:40.581097 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k7lhb\" (UniqueName: \"kubernetes.io/projected/85e74f43-ec59-4b0f-b181-816b7517c590-kube-api-access-k7lhb\") pod \"keystone-fb15-account-create-update-n4k6h\" (UID: \"85e74f43-ec59-4b0f-b181-816b7517c590\") " pod="keystone-kuttl-tests/keystone-fb15-account-create-update-n4k6h" Jan 20 18:24:40 crc kubenswrapper[4558]: I0120 18:24:40.581243 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ljhsp\" (UniqueName: \"kubernetes.io/projected/4976d6a4-a2bb-4173-a5d7-cf308e164bcc-kube-api-access-ljhsp\") pod \"keystone-db-create-cjffv\" (UID: \"4976d6a4-a2bb-4173-a5d7-cf308e164bcc\") " pod="keystone-kuttl-tests/keystone-db-create-cjffv" Jan 20 18:24:40 crc kubenswrapper[4558]: I0120 18:24:40.581299 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4976d6a4-a2bb-4173-a5d7-cf308e164bcc-operator-scripts\") pod \"keystone-db-create-cjffv\" (UID: \"4976d6a4-a2bb-4173-a5d7-cf308e164bcc\") " pod="keystone-kuttl-tests/keystone-db-create-cjffv" Jan 20 18:24:40 crc kubenswrapper[4558]: I0120 18:24:40.581887 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/85e74f43-ec59-4b0f-b181-816b7517c590-operator-scripts\") pod \"keystone-fb15-account-create-update-n4k6h\" (UID: \"85e74f43-ec59-4b0f-b181-816b7517c590\") " pod="keystone-kuttl-tests/keystone-fb15-account-create-update-n4k6h" Jan 20 18:24:40 crc kubenswrapper[4558]: I0120 18:24:40.683444 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k7lhb\" (UniqueName: \"kubernetes.io/projected/85e74f43-ec59-4b0f-b181-816b7517c590-kube-api-access-k7lhb\") pod \"keystone-fb15-account-create-update-n4k6h\" (UID: \"85e74f43-ec59-4b0f-b181-816b7517c590\") " pod="keystone-kuttl-tests/keystone-fb15-account-create-update-n4k6h" Jan 20 18:24:40 crc kubenswrapper[4558]: I0120 18:24:40.683541 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ljhsp\" (UniqueName: \"kubernetes.io/projected/4976d6a4-a2bb-4173-a5d7-cf308e164bcc-kube-api-access-ljhsp\") pod \"keystone-db-create-cjffv\" (UID: \"4976d6a4-a2bb-4173-a5d7-cf308e164bcc\") " pod="keystone-kuttl-tests/keystone-db-create-cjffv" Jan 20 18:24:40 crc kubenswrapper[4558]: I0120 18:24:40.683576 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4976d6a4-a2bb-4173-a5d7-cf308e164bcc-operator-scripts\") pod \"keystone-db-create-cjffv\" (UID: \"4976d6a4-a2bb-4173-a5d7-cf308e164bcc\") " pod="keystone-kuttl-tests/keystone-db-create-cjffv" Jan 20 18:24:40 crc kubenswrapper[4558]: I0120 18:24:40.683629 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/85e74f43-ec59-4b0f-b181-816b7517c590-operator-scripts\") pod \"keystone-fb15-account-create-update-n4k6h\" (UID: \"85e74f43-ec59-4b0f-b181-816b7517c590\") " pod="keystone-kuttl-tests/keystone-fb15-account-create-update-n4k6h" Jan 20 18:24:40 crc kubenswrapper[4558]: I0120 18:24:40.684815 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/85e74f43-ec59-4b0f-b181-816b7517c590-operator-scripts\") pod \"keystone-fb15-account-create-update-n4k6h\" (UID: \"85e74f43-ec59-4b0f-b181-816b7517c590\") " pod="keystone-kuttl-tests/keystone-fb15-account-create-update-n4k6h" Jan 20 18:24:40 crc kubenswrapper[4558]: I0120 18:24:40.685194 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4976d6a4-a2bb-4173-a5d7-cf308e164bcc-operator-scripts\") pod \"keystone-db-create-cjffv\" (UID: \"4976d6a4-a2bb-4173-a5d7-cf308e164bcc\") " pod="keystone-kuttl-tests/keystone-db-create-cjffv" Jan 20 18:24:40 crc kubenswrapper[4558]: I0120 18:24:40.699022 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k7lhb\" (UniqueName: \"kubernetes.io/projected/85e74f43-ec59-4b0f-b181-816b7517c590-kube-api-access-k7lhb\") pod \"keystone-fb15-account-create-update-n4k6h\" (UID: \"85e74f43-ec59-4b0f-b181-816b7517c590\") " pod="keystone-kuttl-tests/keystone-fb15-account-create-update-n4k6h" Jan 20 18:24:40 crc kubenswrapper[4558]: I0120 18:24:40.699268 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ljhsp\" (UniqueName: \"kubernetes.io/projected/4976d6a4-a2bb-4173-a5d7-cf308e164bcc-kube-api-access-ljhsp\") pod \"keystone-db-create-cjffv\" (UID: \"4976d6a4-a2bb-4173-a5d7-cf308e164bcc\") " pod="keystone-kuttl-tests/keystone-db-create-cjffv" Jan 20 18:24:40 crc kubenswrapper[4558]: I0120 18:24:40.752701 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-db-create-cjffv" Jan 20 18:24:40 crc kubenswrapper[4558]: I0120 18:24:40.861336 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-fb15-account-create-update-n4k6h" Jan 20 18:24:41 crc kubenswrapper[4558]: I0120 18:24:41.111874 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone-db-create-cjffv"] Jan 20 18:24:41 crc kubenswrapper[4558]: W0120 18:24:41.116133 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4976d6a4_a2bb_4173_a5d7_cf308e164bcc.slice/crio-1c6dab19842a0294ea20248b6fdc6871fb4fa8201b6e4e5af783657912b05039 WatchSource:0}: Error finding container 1c6dab19842a0294ea20248b6fdc6871fb4fa8201b6e4e5af783657912b05039: Status 404 returned error can't find the container with id 1c6dab19842a0294ea20248b6fdc6871fb4fa8201b6e4e5af783657912b05039 Jan 20 18:24:41 crc kubenswrapper[4558]: I0120 18:24:41.202998 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-db-create-cjffv" event={"ID":"4976d6a4-a2bb-4173-a5d7-cf308e164bcc","Type":"ContainerStarted","Data":"1c6dab19842a0294ea20248b6fdc6871fb4fa8201b6e4e5af783657912b05039"} Jan 20 18:24:41 crc kubenswrapper[4558]: I0120 18:24:41.245519 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone-fb15-account-create-update-n4k6h"] Jan 20 18:24:41 crc kubenswrapper[4558]: W0120 18:24:41.251143 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod85e74f43_ec59_4b0f_b181_816b7517c590.slice/crio-03bda18da1125f4d53999b140a606abc753a0e19356be2db35ec33267f9228cf WatchSource:0}: Error finding container 03bda18da1125f4d53999b140a606abc753a0e19356be2db35ec33267f9228cf: Status 404 returned error can't find the container with id 03bda18da1125f4d53999b140a606abc753a0e19356be2db35ec33267f9228cf Jan 20 18:24:42 crc kubenswrapper[4558]: I0120 18:24:42.213447 4558 generic.go:334] "Generic (PLEG): container finished" podID="4976d6a4-a2bb-4173-a5d7-cf308e164bcc" containerID="80a1987b638b799ff8812714b3baa10fda55c3b0f1fd62dfa51756501b7f37f0" exitCode=0 Jan 20 18:24:42 crc kubenswrapper[4558]: I0120 18:24:42.213563 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-db-create-cjffv" event={"ID":"4976d6a4-a2bb-4173-a5d7-cf308e164bcc","Type":"ContainerDied","Data":"80a1987b638b799ff8812714b3baa10fda55c3b0f1fd62dfa51756501b7f37f0"} Jan 20 18:24:42 crc kubenswrapper[4558]: I0120 18:24:42.216086 4558 generic.go:334] "Generic (PLEG): container finished" podID="85e74f43-ec59-4b0f-b181-816b7517c590" containerID="b5bbe3e9a3db04e677faf3f7f7b95346a19fa660a82a2814feb3eb5cd2d96520" exitCode=0 Jan 20 18:24:42 crc kubenswrapper[4558]: I0120 18:24:42.216151 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-fb15-account-create-update-n4k6h" event={"ID":"85e74f43-ec59-4b0f-b181-816b7517c590","Type":"ContainerDied","Data":"b5bbe3e9a3db04e677faf3f7f7b95346a19fa660a82a2814feb3eb5cd2d96520"} Jan 20 18:24:42 crc kubenswrapper[4558]: I0120 18:24:42.216206 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-fb15-account-create-update-n4k6h" event={"ID":"85e74f43-ec59-4b0f-b181-816b7517c590","Type":"ContainerStarted","Data":"03bda18da1125f4d53999b140a606abc753a0e19356be2db35ec33267f9228cf"} Jan 20 18:24:43 crc kubenswrapper[4558]: I0120 18:24:43.494250 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-fb15-account-create-update-n4k6h" Jan 20 18:24:43 crc kubenswrapper[4558]: I0120 18:24:43.498111 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-db-create-cjffv" Jan 20 18:24:43 crc kubenswrapper[4558]: I0120 18:24:43.538959 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k7lhb\" (UniqueName: \"kubernetes.io/projected/85e74f43-ec59-4b0f-b181-816b7517c590-kube-api-access-k7lhb\") pod \"85e74f43-ec59-4b0f-b181-816b7517c590\" (UID: \"85e74f43-ec59-4b0f-b181-816b7517c590\") " Jan 20 18:24:43 crc kubenswrapper[4558]: I0120 18:24:43.539021 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ljhsp\" (UniqueName: \"kubernetes.io/projected/4976d6a4-a2bb-4173-a5d7-cf308e164bcc-kube-api-access-ljhsp\") pod \"4976d6a4-a2bb-4173-a5d7-cf308e164bcc\" (UID: \"4976d6a4-a2bb-4173-a5d7-cf308e164bcc\") " Jan 20 18:24:43 crc kubenswrapper[4558]: I0120 18:24:43.539073 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/85e74f43-ec59-4b0f-b181-816b7517c590-operator-scripts\") pod \"85e74f43-ec59-4b0f-b181-816b7517c590\" (UID: \"85e74f43-ec59-4b0f-b181-816b7517c590\") " Jan 20 18:24:43 crc kubenswrapper[4558]: I0120 18:24:43.539100 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4976d6a4-a2bb-4173-a5d7-cf308e164bcc-operator-scripts\") pod \"4976d6a4-a2bb-4173-a5d7-cf308e164bcc\" (UID: \"4976d6a4-a2bb-4173-a5d7-cf308e164bcc\") " Jan 20 18:24:43 crc kubenswrapper[4558]: I0120 18:24:43.539845 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/85e74f43-ec59-4b0f-b181-816b7517c590-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "85e74f43-ec59-4b0f-b181-816b7517c590" (UID: "85e74f43-ec59-4b0f-b181-816b7517c590"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:24:43 crc kubenswrapper[4558]: I0120 18:24:43.539897 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4976d6a4-a2bb-4173-a5d7-cf308e164bcc-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "4976d6a4-a2bb-4173-a5d7-cf308e164bcc" (UID: "4976d6a4-a2bb-4173-a5d7-cf308e164bcc"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:24:43 crc kubenswrapper[4558]: I0120 18:24:43.544370 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4976d6a4-a2bb-4173-a5d7-cf308e164bcc-kube-api-access-ljhsp" (OuterVolumeSpecName: "kube-api-access-ljhsp") pod "4976d6a4-a2bb-4173-a5d7-cf308e164bcc" (UID: "4976d6a4-a2bb-4173-a5d7-cf308e164bcc"). InnerVolumeSpecName "kube-api-access-ljhsp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:24:43 crc kubenswrapper[4558]: I0120 18:24:43.544489 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/85e74f43-ec59-4b0f-b181-816b7517c590-kube-api-access-k7lhb" (OuterVolumeSpecName: "kube-api-access-k7lhb") pod "85e74f43-ec59-4b0f-b181-816b7517c590" (UID: "85e74f43-ec59-4b0f-b181-816b7517c590"). InnerVolumeSpecName "kube-api-access-k7lhb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:24:43 crc kubenswrapper[4558]: I0120 18:24:43.641309 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k7lhb\" (UniqueName: \"kubernetes.io/projected/85e74f43-ec59-4b0f-b181-816b7517c590-kube-api-access-k7lhb\") on node \"crc\" DevicePath \"\"" Jan 20 18:24:43 crc kubenswrapper[4558]: I0120 18:24:43.641344 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ljhsp\" (UniqueName: \"kubernetes.io/projected/4976d6a4-a2bb-4173-a5d7-cf308e164bcc-kube-api-access-ljhsp\") on node \"crc\" DevicePath \"\"" Jan 20 18:24:43 crc kubenswrapper[4558]: I0120 18:24:43.641359 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/85e74f43-ec59-4b0f-b181-816b7517c590-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 18:24:43 crc kubenswrapper[4558]: I0120 18:24:43.641371 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4976d6a4-a2bb-4173-a5d7-cf308e164bcc-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 18:24:44 crc kubenswrapper[4558]: I0120 18:24:44.241521 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-fb15-account-create-update-n4k6h" event={"ID":"85e74f43-ec59-4b0f-b181-816b7517c590","Type":"ContainerDied","Data":"03bda18da1125f4d53999b140a606abc753a0e19356be2db35ec33267f9228cf"} Jan 20 18:24:44 crc kubenswrapper[4558]: I0120 18:24:44.241825 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="03bda18da1125f4d53999b140a606abc753a0e19356be2db35ec33267f9228cf" Jan 20 18:24:44 crc kubenswrapper[4558]: I0120 18:24:44.241535 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-fb15-account-create-update-n4k6h" Jan 20 18:24:44 crc kubenswrapper[4558]: I0120 18:24:44.243745 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-db-create-cjffv" event={"ID":"4976d6a4-a2bb-4173-a5d7-cf308e164bcc","Type":"ContainerDied","Data":"1c6dab19842a0294ea20248b6fdc6871fb4fa8201b6e4e5af783657912b05039"} Jan 20 18:24:44 crc kubenswrapper[4558]: I0120 18:24:44.243783 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1c6dab19842a0294ea20248b6fdc6871fb4fa8201b6e4e5af783657912b05039" Jan 20 18:24:44 crc kubenswrapper[4558]: I0120 18:24:44.243869 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-db-create-cjffv" Jan 20 18:24:46 crc kubenswrapper[4558]: I0120 18:24:46.089808 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["keystone-kuttl-tests/keystone-db-sync-x5n46"] Jan 20 18:24:46 crc kubenswrapper[4558]: E0120 18:24:46.090135 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4976d6a4-a2bb-4173-a5d7-cf308e164bcc" containerName="mariadb-database-create" Jan 20 18:24:46 crc kubenswrapper[4558]: I0120 18:24:46.090150 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="4976d6a4-a2bb-4173-a5d7-cf308e164bcc" containerName="mariadb-database-create" Jan 20 18:24:46 crc kubenswrapper[4558]: E0120 18:24:46.090184 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="85e74f43-ec59-4b0f-b181-816b7517c590" containerName="mariadb-account-create-update" Jan 20 18:24:46 crc kubenswrapper[4558]: I0120 18:24:46.090192 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="85e74f43-ec59-4b0f-b181-816b7517c590" containerName="mariadb-account-create-update" Jan 20 18:24:46 crc kubenswrapper[4558]: I0120 18:24:46.090330 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="85e74f43-ec59-4b0f-b181-816b7517c590" containerName="mariadb-account-create-update" Jan 20 18:24:46 crc kubenswrapper[4558]: I0120 18:24:46.090341 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="4976d6a4-a2bb-4173-a5d7-cf308e164bcc" containerName="mariadb-database-create" Jan 20 18:24:46 crc kubenswrapper[4558]: I0120 18:24:46.090780 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-db-sync-x5n46" Jan 20 18:24:46 crc kubenswrapper[4558]: I0120 18:24:46.093712 4558 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone-scripts" Jan 20 18:24:46 crc kubenswrapper[4558]: I0120 18:24:46.095280 4558 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone-config-data" Jan 20 18:24:46 crc kubenswrapper[4558]: I0120 18:24:46.096310 4558 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone" Jan 20 18:24:46 crc kubenswrapper[4558]: I0120 18:24:46.096701 4558 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone-keystone-dockercfg-zbxlt" Jan 20 18:24:46 crc kubenswrapper[4558]: I0120 18:24:46.097891 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone-db-sync-x5n46"] Jan 20 18:24:46 crc kubenswrapper[4558]: I0120 18:24:46.172655 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zs4tc\" (UniqueName: \"kubernetes.io/projected/54cb4d58-1111-48fd-a8f1-c19f467594f6-kube-api-access-zs4tc\") pod \"keystone-db-sync-x5n46\" (UID: \"54cb4d58-1111-48fd-a8f1-c19f467594f6\") " pod="keystone-kuttl-tests/keystone-db-sync-x5n46" Jan 20 18:24:46 crc kubenswrapper[4558]: I0120 18:24:46.172756 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/54cb4d58-1111-48fd-a8f1-c19f467594f6-config-data\") pod \"keystone-db-sync-x5n46\" (UID: \"54cb4d58-1111-48fd-a8f1-c19f467594f6\") " pod="keystone-kuttl-tests/keystone-db-sync-x5n46" Jan 20 18:24:46 crc kubenswrapper[4558]: I0120 18:24:46.274448 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zs4tc\" (UniqueName: \"kubernetes.io/projected/54cb4d58-1111-48fd-a8f1-c19f467594f6-kube-api-access-zs4tc\") pod \"keystone-db-sync-x5n46\" (UID: \"54cb4d58-1111-48fd-a8f1-c19f467594f6\") " pod="keystone-kuttl-tests/keystone-db-sync-x5n46" Jan 20 18:24:46 crc kubenswrapper[4558]: I0120 18:24:46.274527 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/54cb4d58-1111-48fd-a8f1-c19f467594f6-config-data\") pod \"keystone-db-sync-x5n46\" (UID: \"54cb4d58-1111-48fd-a8f1-c19f467594f6\") " pod="keystone-kuttl-tests/keystone-db-sync-x5n46" Jan 20 18:24:46 crc kubenswrapper[4558]: I0120 18:24:46.280606 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/54cb4d58-1111-48fd-a8f1-c19f467594f6-config-data\") pod \"keystone-db-sync-x5n46\" (UID: \"54cb4d58-1111-48fd-a8f1-c19f467594f6\") " pod="keystone-kuttl-tests/keystone-db-sync-x5n46" Jan 20 18:24:46 crc kubenswrapper[4558]: I0120 18:24:46.288872 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zs4tc\" (UniqueName: \"kubernetes.io/projected/54cb4d58-1111-48fd-a8f1-c19f467594f6-kube-api-access-zs4tc\") pod \"keystone-db-sync-x5n46\" (UID: \"54cb4d58-1111-48fd-a8f1-c19f467594f6\") " pod="keystone-kuttl-tests/keystone-db-sync-x5n46" Jan 20 18:24:46 crc kubenswrapper[4558]: I0120 18:24:46.406283 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-db-sync-x5n46" Jan 20 18:24:46 crc kubenswrapper[4558]: I0120 18:24:46.800672 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone-db-sync-x5n46"] Jan 20 18:24:47 crc kubenswrapper[4558]: I0120 18:24:47.272147 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-db-sync-x5n46" event={"ID":"54cb4d58-1111-48fd-a8f1-c19f467594f6","Type":"ContainerStarted","Data":"ac65b430bfda96b78e8d4fbb1460402bb0f2329655e1e23dfc82f32a90b27394"} Jan 20 18:24:47 crc kubenswrapper[4558]: I0120 18:24:47.272521 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-db-sync-x5n46" event={"ID":"54cb4d58-1111-48fd-a8f1-c19f467594f6","Type":"ContainerStarted","Data":"96bf9688419779428194278ae51f2320048d4298f5083638c8e946c7ae7088eb"} Jan 20 18:24:47 crc kubenswrapper[4558]: I0120 18:24:47.292067 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="keystone-kuttl-tests/keystone-db-sync-x5n46" podStartSLOduration=1.292051442 podStartE2EDuration="1.292051442s" podCreationTimestamp="2026-01-20 18:24:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 18:24:47.289648644 +0000 UTC m=+6181.049986611" watchObservedRunningTime="2026-01-20 18:24:47.292051442 +0000 UTC m=+6181.052389409" Jan 20 18:24:48 crc kubenswrapper[4558]: I0120 18:24:48.282712 4558 generic.go:334] "Generic (PLEG): container finished" podID="54cb4d58-1111-48fd-a8f1-c19f467594f6" containerID="ac65b430bfda96b78e8d4fbb1460402bb0f2329655e1e23dfc82f32a90b27394" exitCode=0 Jan 20 18:24:48 crc kubenswrapper[4558]: I0120 18:24:48.282774 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-db-sync-x5n46" event={"ID":"54cb4d58-1111-48fd-a8f1-c19f467594f6","Type":"ContainerDied","Data":"ac65b430bfda96b78e8d4fbb1460402bb0f2329655e1e23dfc82f32a90b27394"} Jan 20 18:24:49 crc kubenswrapper[4558]: I0120 18:24:49.518689 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-db-sync-x5n46" Jan 20 18:24:49 crc kubenswrapper[4558]: I0120 18:24:49.623855 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zs4tc\" (UniqueName: \"kubernetes.io/projected/54cb4d58-1111-48fd-a8f1-c19f467594f6-kube-api-access-zs4tc\") pod \"54cb4d58-1111-48fd-a8f1-c19f467594f6\" (UID: \"54cb4d58-1111-48fd-a8f1-c19f467594f6\") " Jan 20 18:24:49 crc kubenswrapper[4558]: I0120 18:24:49.623918 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/54cb4d58-1111-48fd-a8f1-c19f467594f6-config-data\") pod \"54cb4d58-1111-48fd-a8f1-c19f467594f6\" (UID: \"54cb4d58-1111-48fd-a8f1-c19f467594f6\") " Jan 20 18:24:49 crc kubenswrapper[4558]: I0120 18:24:49.630519 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/54cb4d58-1111-48fd-a8f1-c19f467594f6-kube-api-access-zs4tc" (OuterVolumeSpecName: "kube-api-access-zs4tc") pod "54cb4d58-1111-48fd-a8f1-c19f467594f6" (UID: "54cb4d58-1111-48fd-a8f1-c19f467594f6"). InnerVolumeSpecName "kube-api-access-zs4tc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:24:49 crc kubenswrapper[4558]: I0120 18:24:49.654384 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/54cb4d58-1111-48fd-a8f1-c19f467594f6-config-data" (OuterVolumeSpecName: "config-data") pod "54cb4d58-1111-48fd-a8f1-c19f467594f6" (UID: "54cb4d58-1111-48fd-a8f1-c19f467594f6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:24:49 crc kubenswrapper[4558]: I0120 18:24:49.725604 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zs4tc\" (UniqueName: \"kubernetes.io/projected/54cb4d58-1111-48fd-a8f1-c19f467594f6-kube-api-access-zs4tc\") on node \"crc\" DevicePath \"\"" Jan 20 18:24:49 crc kubenswrapper[4558]: I0120 18:24:49.725630 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/54cb4d58-1111-48fd-a8f1-c19f467594f6-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 18:24:50 crc kubenswrapper[4558]: I0120 18:24:50.301060 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-db-sync-x5n46" event={"ID":"54cb4d58-1111-48fd-a8f1-c19f467594f6","Type":"ContainerDied","Data":"96bf9688419779428194278ae51f2320048d4298f5083638c8e946c7ae7088eb"} Jan 20 18:24:50 crc kubenswrapper[4558]: I0120 18:24:50.301108 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="96bf9688419779428194278ae51f2320048d4298f5083638c8e946c7ae7088eb" Jan 20 18:24:50 crc kubenswrapper[4558]: I0120 18:24:50.301116 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-db-sync-x5n46" Jan 20 18:24:50 crc kubenswrapper[4558]: I0120 18:24:50.455229 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["keystone-kuttl-tests/keystone-bootstrap-hcqbp"] Jan 20 18:24:50 crc kubenswrapper[4558]: E0120 18:24:50.455545 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="54cb4d58-1111-48fd-a8f1-c19f467594f6" containerName="keystone-db-sync" Jan 20 18:24:50 crc kubenswrapper[4558]: I0120 18:24:50.455563 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="54cb4d58-1111-48fd-a8f1-c19f467594f6" containerName="keystone-db-sync" Jan 20 18:24:50 crc kubenswrapper[4558]: I0120 18:24:50.455698 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="54cb4d58-1111-48fd-a8f1-c19f467594f6" containerName="keystone-db-sync" Jan 20 18:24:50 crc kubenswrapper[4558]: I0120 18:24:50.456216 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-bootstrap-hcqbp" Jan 20 18:24:50 crc kubenswrapper[4558]: I0120 18:24:50.458488 4558 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone" Jan 20 18:24:50 crc kubenswrapper[4558]: I0120 18:24:50.458576 4558 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone-scripts" Jan 20 18:24:50 crc kubenswrapper[4558]: I0120 18:24:50.459075 4558 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone-config-data" Jan 20 18:24:50 crc kubenswrapper[4558]: I0120 18:24:50.459089 4558 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"osp-secret" Jan 20 18:24:50 crc kubenswrapper[4558]: I0120 18:24:50.459890 4558 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone-keystone-dockercfg-zbxlt" Jan 20 18:24:50 crc kubenswrapper[4558]: I0120 18:24:50.460766 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone-bootstrap-hcqbp"] Jan 20 18:24:50 crc kubenswrapper[4558]: I0120 18:24:50.537838 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/d99f5d43-54f5-48da-ac98-337f5a8a9964-credential-keys\") pod \"keystone-bootstrap-hcqbp\" (UID: \"d99f5d43-54f5-48da-ac98-337f5a8a9964\") " pod="keystone-kuttl-tests/keystone-bootstrap-hcqbp" Jan 20 18:24:50 crc kubenswrapper[4558]: I0120 18:24:50.538048 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d99f5d43-54f5-48da-ac98-337f5a8a9964-scripts\") pod \"keystone-bootstrap-hcqbp\" (UID: \"d99f5d43-54f5-48da-ac98-337f5a8a9964\") " pod="keystone-kuttl-tests/keystone-bootstrap-hcqbp" Jan 20 18:24:50 crc kubenswrapper[4558]: I0120 18:24:50.538126 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d99f5d43-54f5-48da-ac98-337f5a8a9964-fernet-keys\") pod \"keystone-bootstrap-hcqbp\" (UID: \"d99f5d43-54f5-48da-ac98-337f5a8a9964\") " pod="keystone-kuttl-tests/keystone-bootstrap-hcqbp" Jan 20 18:24:50 crc kubenswrapper[4558]: I0120 18:24:50.538154 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d99f5d43-54f5-48da-ac98-337f5a8a9964-config-data\") pod \"keystone-bootstrap-hcqbp\" (UID: \"d99f5d43-54f5-48da-ac98-337f5a8a9964\") " pod="keystone-kuttl-tests/keystone-bootstrap-hcqbp" Jan 20 18:24:50 crc kubenswrapper[4558]: I0120 18:24:50.538220 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bmj2s\" (UniqueName: \"kubernetes.io/projected/d99f5d43-54f5-48da-ac98-337f5a8a9964-kube-api-access-bmj2s\") pod \"keystone-bootstrap-hcqbp\" (UID: \"d99f5d43-54f5-48da-ac98-337f5a8a9964\") " pod="keystone-kuttl-tests/keystone-bootstrap-hcqbp" Jan 20 18:24:50 crc kubenswrapper[4558]: I0120 18:24:50.639075 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/d99f5d43-54f5-48da-ac98-337f5a8a9964-credential-keys\") pod \"keystone-bootstrap-hcqbp\" (UID: \"d99f5d43-54f5-48da-ac98-337f5a8a9964\") " pod="keystone-kuttl-tests/keystone-bootstrap-hcqbp" Jan 20 18:24:50 crc kubenswrapper[4558]: I0120 18:24:50.639150 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d99f5d43-54f5-48da-ac98-337f5a8a9964-scripts\") pod \"keystone-bootstrap-hcqbp\" (UID: \"d99f5d43-54f5-48da-ac98-337f5a8a9964\") " pod="keystone-kuttl-tests/keystone-bootstrap-hcqbp" Jan 20 18:24:50 crc kubenswrapper[4558]: I0120 18:24:50.639206 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d99f5d43-54f5-48da-ac98-337f5a8a9964-fernet-keys\") pod \"keystone-bootstrap-hcqbp\" (UID: \"d99f5d43-54f5-48da-ac98-337f5a8a9964\") " pod="keystone-kuttl-tests/keystone-bootstrap-hcqbp" Jan 20 18:24:50 crc kubenswrapper[4558]: I0120 18:24:50.639225 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d99f5d43-54f5-48da-ac98-337f5a8a9964-config-data\") pod \"keystone-bootstrap-hcqbp\" (UID: \"d99f5d43-54f5-48da-ac98-337f5a8a9964\") " pod="keystone-kuttl-tests/keystone-bootstrap-hcqbp" Jan 20 18:24:50 crc kubenswrapper[4558]: I0120 18:24:50.639245 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bmj2s\" (UniqueName: \"kubernetes.io/projected/d99f5d43-54f5-48da-ac98-337f5a8a9964-kube-api-access-bmj2s\") pod \"keystone-bootstrap-hcqbp\" (UID: \"d99f5d43-54f5-48da-ac98-337f5a8a9964\") " pod="keystone-kuttl-tests/keystone-bootstrap-hcqbp" Jan 20 18:24:50 crc kubenswrapper[4558]: I0120 18:24:50.643380 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d99f5d43-54f5-48da-ac98-337f5a8a9964-scripts\") pod \"keystone-bootstrap-hcqbp\" (UID: \"d99f5d43-54f5-48da-ac98-337f5a8a9964\") " pod="keystone-kuttl-tests/keystone-bootstrap-hcqbp" Jan 20 18:24:50 crc kubenswrapper[4558]: I0120 18:24:50.643704 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d99f5d43-54f5-48da-ac98-337f5a8a9964-config-data\") pod \"keystone-bootstrap-hcqbp\" (UID: \"d99f5d43-54f5-48da-ac98-337f5a8a9964\") " pod="keystone-kuttl-tests/keystone-bootstrap-hcqbp" Jan 20 18:24:50 crc kubenswrapper[4558]: I0120 18:24:50.643937 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d99f5d43-54f5-48da-ac98-337f5a8a9964-fernet-keys\") pod \"keystone-bootstrap-hcqbp\" (UID: \"d99f5d43-54f5-48da-ac98-337f5a8a9964\") " pod="keystone-kuttl-tests/keystone-bootstrap-hcqbp" Jan 20 18:24:50 crc kubenswrapper[4558]: I0120 18:24:50.644084 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/d99f5d43-54f5-48da-ac98-337f5a8a9964-credential-keys\") pod \"keystone-bootstrap-hcqbp\" (UID: \"d99f5d43-54f5-48da-ac98-337f5a8a9964\") " pod="keystone-kuttl-tests/keystone-bootstrap-hcqbp" Jan 20 18:24:50 crc kubenswrapper[4558]: I0120 18:24:50.652284 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bmj2s\" (UniqueName: \"kubernetes.io/projected/d99f5d43-54f5-48da-ac98-337f5a8a9964-kube-api-access-bmj2s\") pod \"keystone-bootstrap-hcqbp\" (UID: \"d99f5d43-54f5-48da-ac98-337f5a8a9964\") " pod="keystone-kuttl-tests/keystone-bootstrap-hcqbp" Jan 20 18:24:50 crc kubenswrapper[4558]: I0120 18:24:50.769413 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-bootstrap-hcqbp" Jan 20 18:24:51 crc kubenswrapper[4558]: I0120 18:24:51.138550 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone-bootstrap-hcqbp"] Jan 20 18:24:51 crc kubenswrapper[4558]: W0120 18:24:51.143425 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd99f5d43_54f5_48da_ac98_337f5a8a9964.slice/crio-0fc320187c195e334b63341999fd0a41de532c302a7c0d19650c6e9b29dcb34f WatchSource:0}: Error finding container 0fc320187c195e334b63341999fd0a41de532c302a7c0d19650c6e9b29dcb34f: Status 404 returned error can't find the container with id 0fc320187c195e334b63341999fd0a41de532c302a7c0d19650c6e9b29dcb34f Jan 20 18:24:51 crc kubenswrapper[4558]: I0120 18:24:51.308824 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-bootstrap-hcqbp" event={"ID":"d99f5d43-54f5-48da-ac98-337f5a8a9964","Type":"ContainerStarted","Data":"06be374ebfeaff166d93b6ec35fe42760322347bc5246e2622a424a5602f3c2b"} Jan 20 18:24:51 crc kubenswrapper[4558]: I0120 18:24:51.309106 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-bootstrap-hcqbp" event={"ID":"d99f5d43-54f5-48da-ac98-337f5a8a9964","Type":"ContainerStarted","Data":"0fc320187c195e334b63341999fd0a41de532c302a7c0d19650c6e9b29dcb34f"} Jan 20 18:24:51 crc kubenswrapper[4558]: I0120 18:24:51.326543 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="keystone-kuttl-tests/keystone-bootstrap-hcqbp" podStartSLOduration=1.326507842 podStartE2EDuration="1.326507842s" podCreationTimestamp="2026-01-20 18:24:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 18:24:51.32438495 +0000 UTC m=+6185.084722917" watchObservedRunningTime="2026-01-20 18:24:51.326507842 +0000 UTC m=+6185.086845809" Jan 20 18:24:54 crc kubenswrapper[4558]: I0120 18:24:54.332880 4558 generic.go:334] "Generic (PLEG): container finished" podID="d99f5d43-54f5-48da-ac98-337f5a8a9964" containerID="06be374ebfeaff166d93b6ec35fe42760322347bc5246e2622a424a5602f3c2b" exitCode=0 Jan 20 18:24:54 crc kubenswrapper[4558]: I0120 18:24:54.333002 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-bootstrap-hcqbp" event={"ID":"d99f5d43-54f5-48da-ac98-337f5a8a9964","Type":"ContainerDied","Data":"06be374ebfeaff166d93b6ec35fe42760322347bc5246e2622a424a5602f3c2b"} Jan 20 18:24:55 crc kubenswrapper[4558]: I0120 18:24:55.597717 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-bootstrap-hcqbp" Jan 20 18:24:55 crc kubenswrapper[4558]: I0120 18:24:55.609102 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d99f5d43-54f5-48da-ac98-337f5a8a9964-config-data\") pod \"d99f5d43-54f5-48da-ac98-337f5a8a9964\" (UID: \"d99f5d43-54f5-48da-ac98-337f5a8a9964\") " Jan 20 18:24:55 crc kubenswrapper[4558]: I0120 18:24:55.609246 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bmj2s\" (UniqueName: \"kubernetes.io/projected/d99f5d43-54f5-48da-ac98-337f5a8a9964-kube-api-access-bmj2s\") pod \"d99f5d43-54f5-48da-ac98-337f5a8a9964\" (UID: \"d99f5d43-54f5-48da-ac98-337f5a8a9964\") " Jan 20 18:24:55 crc kubenswrapper[4558]: I0120 18:24:55.609292 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/d99f5d43-54f5-48da-ac98-337f5a8a9964-credential-keys\") pod \"d99f5d43-54f5-48da-ac98-337f5a8a9964\" (UID: \"d99f5d43-54f5-48da-ac98-337f5a8a9964\") " Jan 20 18:24:55 crc kubenswrapper[4558]: I0120 18:24:55.609317 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d99f5d43-54f5-48da-ac98-337f5a8a9964-scripts\") pod \"d99f5d43-54f5-48da-ac98-337f5a8a9964\" (UID: \"d99f5d43-54f5-48da-ac98-337f5a8a9964\") " Jan 20 18:24:55 crc kubenswrapper[4558]: I0120 18:24:55.609370 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d99f5d43-54f5-48da-ac98-337f5a8a9964-fernet-keys\") pod \"d99f5d43-54f5-48da-ac98-337f5a8a9964\" (UID: \"d99f5d43-54f5-48da-ac98-337f5a8a9964\") " Jan 20 18:24:55 crc kubenswrapper[4558]: I0120 18:24:55.616438 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d99f5d43-54f5-48da-ac98-337f5a8a9964-kube-api-access-bmj2s" (OuterVolumeSpecName: "kube-api-access-bmj2s") pod "d99f5d43-54f5-48da-ac98-337f5a8a9964" (UID: "d99f5d43-54f5-48da-ac98-337f5a8a9964"). InnerVolumeSpecName "kube-api-access-bmj2s". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:24:55 crc kubenswrapper[4558]: I0120 18:24:55.619066 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d99f5d43-54f5-48da-ac98-337f5a8a9964-scripts" (OuterVolumeSpecName: "scripts") pod "d99f5d43-54f5-48da-ac98-337f5a8a9964" (UID: "d99f5d43-54f5-48da-ac98-337f5a8a9964"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:24:55 crc kubenswrapper[4558]: I0120 18:24:55.619135 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d99f5d43-54f5-48da-ac98-337f5a8a9964-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "d99f5d43-54f5-48da-ac98-337f5a8a9964" (UID: "d99f5d43-54f5-48da-ac98-337f5a8a9964"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:24:55 crc kubenswrapper[4558]: I0120 18:24:55.619242 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d99f5d43-54f5-48da-ac98-337f5a8a9964-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "d99f5d43-54f5-48da-ac98-337f5a8a9964" (UID: "d99f5d43-54f5-48da-ac98-337f5a8a9964"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:24:55 crc kubenswrapper[4558]: I0120 18:24:55.628123 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d99f5d43-54f5-48da-ac98-337f5a8a9964-config-data" (OuterVolumeSpecName: "config-data") pod "d99f5d43-54f5-48da-ac98-337f5a8a9964" (UID: "d99f5d43-54f5-48da-ac98-337f5a8a9964"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:24:55 crc kubenswrapper[4558]: I0120 18:24:55.710949 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d99f5d43-54f5-48da-ac98-337f5a8a9964-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 18:24:55 crc kubenswrapper[4558]: I0120 18:24:55.710985 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bmj2s\" (UniqueName: \"kubernetes.io/projected/d99f5d43-54f5-48da-ac98-337f5a8a9964-kube-api-access-bmj2s\") on node \"crc\" DevicePath \"\"" Jan 20 18:24:55 crc kubenswrapper[4558]: I0120 18:24:55.711000 4558 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/d99f5d43-54f5-48da-ac98-337f5a8a9964-credential-keys\") on node \"crc\" DevicePath \"\"" Jan 20 18:24:55 crc kubenswrapper[4558]: I0120 18:24:55.711011 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d99f5d43-54f5-48da-ac98-337f5a8a9964-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 18:24:55 crc kubenswrapper[4558]: I0120 18:24:55.711021 4558 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d99f5d43-54f5-48da-ac98-337f5a8a9964-fernet-keys\") on node \"crc\" DevicePath \"\"" Jan 20 18:24:56 crc kubenswrapper[4558]: I0120 18:24:56.350482 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-bootstrap-hcqbp" event={"ID":"d99f5d43-54f5-48da-ac98-337f5a8a9964","Type":"ContainerDied","Data":"0fc320187c195e334b63341999fd0a41de532c302a7c0d19650c6e9b29dcb34f"} Jan 20 18:24:56 crc kubenswrapper[4558]: I0120 18:24:56.350531 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0fc320187c195e334b63341999fd0a41de532c302a7c0d19650c6e9b29dcb34f" Jan 20 18:24:56 crc kubenswrapper[4558]: I0120 18:24:56.350573 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-bootstrap-hcqbp" Jan 20 18:24:56 crc kubenswrapper[4558]: I0120 18:24:56.415739 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["keystone-kuttl-tests/keystone-5d5bd56788-rvvzp"] Jan 20 18:24:56 crc kubenswrapper[4558]: E0120 18:24:56.416084 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d99f5d43-54f5-48da-ac98-337f5a8a9964" containerName="keystone-bootstrap" Jan 20 18:24:56 crc kubenswrapper[4558]: I0120 18:24:56.416102 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d99f5d43-54f5-48da-ac98-337f5a8a9964" containerName="keystone-bootstrap" Jan 20 18:24:56 crc kubenswrapper[4558]: I0120 18:24:56.416242 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="d99f5d43-54f5-48da-ac98-337f5a8a9964" containerName="keystone-bootstrap" Jan 20 18:24:56 crc kubenswrapper[4558]: I0120 18:24:56.416743 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-5d5bd56788-rvvzp" Jan 20 18:24:56 crc kubenswrapper[4558]: I0120 18:24:56.419353 4558 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone-keystone-dockercfg-zbxlt" Jan 20 18:24:56 crc kubenswrapper[4558]: I0120 18:24:56.419554 4558 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone-scripts" Jan 20 18:24:56 crc kubenswrapper[4558]: I0120 18:24:56.426331 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d18169f1-a4d2-4189-9625-7024b03c64d3-fernet-keys\") pod \"keystone-5d5bd56788-rvvzp\" (UID: \"d18169f1-a4d2-4189-9625-7024b03c64d3\") " pod="keystone-kuttl-tests/keystone-5d5bd56788-rvvzp" Jan 20 18:24:56 crc kubenswrapper[4558]: I0120 18:24:56.426358 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d18169f1-a4d2-4189-9625-7024b03c64d3-scripts\") pod \"keystone-5d5bd56788-rvvzp\" (UID: \"d18169f1-a4d2-4189-9625-7024b03c64d3\") " pod="keystone-kuttl-tests/keystone-5d5bd56788-rvvzp" Jan 20 18:24:56 crc kubenswrapper[4558]: I0120 18:24:56.426398 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n2p8n\" (UniqueName: \"kubernetes.io/projected/d18169f1-a4d2-4189-9625-7024b03c64d3-kube-api-access-n2p8n\") pod \"keystone-5d5bd56788-rvvzp\" (UID: \"d18169f1-a4d2-4189-9625-7024b03c64d3\") " pod="keystone-kuttl-tests/keystone-5d5bd56788-rvvzp" Jan 20 18:24:56 crc kubenswrapper[4558]: I0120 18:24:56.426588 4558 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone" Jan 20 18:24:56 crc kubenswrapper[4558]: I0120 18:24:56.426614 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/d18169f1-a4d2-4189-9625-7024b03c64d3-credential-keys\") pod \"keystone-5d5bd56788-rvvzp\" (UID: \"d18169f1-a4d2-4189-9625-7024b03c64d3\") " pod="keystone-kuttl-tests/keystone-5d5bd56788-rvvzp" Jan 20 18:24:56 crc kubenswrapper[4558]: I0120 18:24:56.426679 4558 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone-config-data" Jan 20 18:24:56 crc kubenswrapper[4558]: I0120 18:24:56.426727 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d18169f1-a4d2-4189-9625-7024b03c64d3-config-data\") pod \"keystone-5d5bd56788-rvvzp\" (UID: \"d18169f1-a4d2-4189-9625-7024b03c64d3\") " pod="keystone-kuttl-tests/keystone-5d5bd56788-rvvzp" Jan 20 18:24:56 crc kubenswrapper[4558]: I0120 18:24:56.432606 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone-5d5bd56788-rvvzp"] Jan 20 18:24:56 crc kubenswrapper[4558]: I0120 18:24:56.527889 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n2p8n\" (UniqueName: \"kubernetes.io/projected/d18169f1-a4d2-4189-9625-7024b03c64d3-kube-api-access-n2p8n\") pod \"keystone-5d5bd56788-rvvzp\" (UID: \"d18169f1-a4d2-4189-9625-7024b03c64d3\") " pod="keystone-kuttl-tests/keystone-5d5bd56788-rvvzp" Jan 20 18:24:56 crc kubenswrapper[4558]: I0120 18:24:56.527981 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/d18169f1-a4d2-4189-9625-7024b03c64d3-credential-keys\") pod \"keystone-5d5bd56788-rvvzp\" (UID: \"d18169f1-a4d2-4189-9625-7024b03c64d3\") " pod="keystone-kuttl-tests/keystone-5d5bd56788-rvvzp" Jan 20 18:24:56 crc kubenswrapper[4558]: I0120 18:24:56.528029 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d18169f1-a4d2-4189-9625-7024b03c64d3-config-data\") pod \"keystone-5d5bd56788-rvvzp\" (UID: \"d18169f1-a4d2-4189-9625-7024b03c64d3\") " pod="keystone-kuttl-tests/keystone-5d5bd56788-rvvzp" Jan 20 18:24:56 crc kubenswrapper[4558]: I0120 18:24:56.529265 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d18169f1-a4d2-4189-9625-7024b03c64d3-scripts\") pod \"keystone-5d5bd56788-rvvzp\" (UID: \"d18169f1-a4d2-4189-9625-7024b03c64d3\") " pod="keystone-kuttl-tests/keystone-5d5bd56788-rvvzp" Jan 20 18:24:56 crc kubenswrapper[4558]: I0120 18:24:56.529502 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d18169f1-a4d2-4189-9625-7024b03c64d3-fernet-keys\") pod \"keystone-5d5bd56788-rvvzp\" (UID: \"d18169f1-a4d2-4189-9625-7024b03c64d3\") " pod="keystone-kuttl-tests/keystone-5d5bd56788-rvvzp" Jan 20 18:24:56 crc kubenswrapper[4558]: I0120 18:24:56.535013 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/d18169f1-a4d2-4189-9625-7024b03c64d3-credential-keys\") pod \"keystone-5d5bd56788-rvvzp\" (UID: \"d18169f1-a4d2-4189-9625-7024b03c64d3\") " pod="keystone-kuttl-tests/keystone-5d5bd56788-rvvzp" Jan 20 18:24:56 crc kubenswrapper[4558]: I0120 18:24:56.535035 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d18169f1-a4d2-4189-9625-7024b03c64d3-scripts\") pod \"keystone-5d5bd56788-rvvzp\" (UID: \"d18169f1-a4d2-4189-9625-7024b03c64d3\") " pod="keystone-kuttl-tests/keystone-5d5bd56788-rvvzp" Jan 20 18:24:56 crc kubenswrapper[4558]: I0120 18:24:56.535150 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d18169f1-a4d2-4189-9625-7024b03c64d3-fernet-keys\") pod \"keystone-5d5bd56788-rvvzp\" (UID: \"d18169f1-a4d2-4189-9625-7024b03c64d3\") " pod="keystone-kuttl-tests/keystone-5d5bd56788-rvvzp" Jan 20 18:24:56 crc kubenswrapper[4558]: I0120 18:24:56.535529 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d18169f1-a4d2-4189-9625-7024b03c64d3-config-data\") pod \"keystone-5d5bd56788-rvvzp\" (UID: \"d18169f1-a4d2-4189-9625-7024b03c64d3\") " pod="keystone-kuttl-tests/keystone-5d5bd56788-rvvzp" Jan 20 18:24:56 crc kubenswrapper[4558]: I0120 18:24:56.542383 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n2p8n\" (UniqueName: \"kubernetes.io/projected/d18169f1-a4d2-4189-9625-7024b03c64d3-kube-api-access-n2p8n\") pod \"keystone-5d5bd56788-rvvzp\" (UID: \"d18169f1-a4d2-4189-9625-7024b03c64d3\") " pod="keystone-kuttl-tests/keystone-5d5bd56788-rvvzp" Jan 20 18:24:56 crc kubenswrapper[4558]: I0120 18:24:56.734536 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-5d5bd56788-rvvzp" Jan 20 18:24:57 crc kubenswrapper[4558]: I0120 18:24:57.126697 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone-5d5bd56788-rvvzp"] Jan 20 18:24:57 crc kubenswrapper[4558]: I0120 18:24:57.360953 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-5d5bd56788-rvvzp" event={"ID":"d18169f1-a4d2-4189-9625-7024b03c64d3","Type":"ContainerStarted","Data":"eca4d76757011d6cdfda2d1177ecd5e231c28e5a23800ae10da031e65eb65d40"} Jan 20 18:24:57 crc kubenswrapper[4558]: I0120 18:24:57.361299 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-5d5bd56788-rvvzp" event={"ID":"d18169f1-a4d2-4189-9625-7024b03c64d3","Type":"ContainerStarted","Data":"ffd467659d91836acbddd27f310b3f3453d94c51566fddbf001fa60068baa3d4"} Jan 20 18:24:57 crc kubenswrapper[4558]: I0120 18:24:57.361793 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="keystone-kuttl-tests/keystone-5d5bd56788-rvvzp" Jan 20 18:24:57 crc kubenswrapper[4558]: I0120 18:24:57.380639 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="keystone-kuttl-tests/keystone-5d5bd56788-rvvzp" podStartSLOduration=1.380620707 podStartE2EDuration="1.380620707s" podCreationTimestamp="2026-01-20 18:24:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 18:24:57.376877008 +0000 UTC m=+6191.137214975" watchObservedRunningTime="2026-01-20 18:24:57.380620707 +0000 UTC m=+6191.140958673" Jan 20 18:25:06 crc kubenswrapper[4558]: I0120 18:25:06.030475 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-9sxzt"] Jan 20 18:25:06 crc kubenswrapper[4558]: I0120 18:25:06.032594 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9sxzt" Jan 20 18:25:06 crc kubenswrapper[4558]: I0120 18:25:06.040437 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-9sxzt"] Jan 20 18:25:06 crc kubenswrapper[4558]: I0120 18:25:06.150345 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ced048ca-f8cb-4606-95fb-f8dac2c0ac10-catalog-content\") pod \"certified-operators-9sxzt\" (UID: \"ced048ca-f8cb-4606-95fb-f8dac2c0ac10\") " pod="openshift-marketplace/certified-operators-9sxzt" Jan 20 18:25:06 crc kubenswrapper[4558]: I0120 18:25:06.150472 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ced048ca-f8cb-4606-95fb-f8dac2c0ac10-utilities\") pod \"certified-operators-9sxzt\" (UID: \"ced048ca-f8cb-4606-95fb-f8dac2c0ac10\") " pod="openshift-marketplace/certified-operators-9sxzt" Jan 20 18:25:06 crc kubenswrapper[4558]: I0120 18:25:06.150639 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wk982\" (UniqueName: \"kubernetes.io/projected/ced048ca-f8cb-4606-95fb-f8dac2c0ac10-kube-api-access-wk982\") pod \"certified-operators-9sxzt\" (UID: \"ced048ca-f8cb-4606-95fb-f8dac2c0ac10\") " pod="openshift-marketplace/certified-operators-9sxzt" Jan 20 18:25:06 crc kubenswrapper[4558]: I0120 18:25:06.252279 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ced048ca-f8cb-4606-95fb-f8dac2c0ac10-utilities\") pod \"certified-operators-9sxzt\" (UID: \"ced048ca-f8cb-4606-95fb-f8dac2c0ac10\") " pod="openshift-marketplace/certified-operators-9sxzt" Jan 20 18:25:06 crc kubenswrapper[4558]: I0120 18:25:06.252366 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wk982\" (UniqueName: \"kubernetes.io/projected/ced048ca-f8cb-4606-95fb-f8dac2c0ac10-kube-api-access-wk982\") pod \"certified-operators-9sxzt\" (UID: \"ced048ca-f8cb-4606-95fb-f8dac2c0ac10\") " pod="openshift-marketplace/certified-operators-9sxzt" Jan 20 18:25:06 crc kubenswrapper[4558]: I0120 18:25:06.252437 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ced048ca-f8cb-4606-95fb-f8dac2c0ac10-catalog-content\") pod \"certified-operators-9sxzt\" (UID: \"ced048ca-f8cb-4606-95fb-f8dac2c0ac10\") " pod="openshift-marketplace/certified-operators-9sxzt" Jan 20 18:25:06 crc kubenswrapper[4558]: I0120 18:25:06.252907 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ced048ca-f8cb-4606-95fb-f8dac2c0ac10-utilities\") pod \"certified-operators-9sxzt\" (UID: \"ced048ca-f8cb-4606-95fb-f8dac2c0ac10\") " pod="openshift-marketplace/certified-operators-9sxzt" Jan 20 18:25:06 crc kubenswrapper[4558]: I0120 18:25:06.252945 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ced048ca-f8cb-4606-95fb-f8dac2c0ac10-catalog-content\") pod \"certified-operators-9sxzt\" (UID: \"ced048ca-f8cb-4606-95fb-f8dac2c0ac10\") " pod="openshift-marketplace/certified-operators-9sxzt" Jan 20 18:25:06 crc kubenswrapper[4558]: I0120 18:25:06.270201 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wk982\" (UniqueName: \"kubernetes.io/projected/ced048ca-f8cb-4606-95fb-f8dac2c0ac10-kube-api-access-wk982\") pod \"certified-operators-9sxzt\" (UID: \"ced048ca-f8cb-4606-95fb-f8dac2c0ac10\") " pod="openshift-marketplace/certified-operators-9sxzt" Jan 20 18:25:06 crc kubenswrapper[4558]: I0120 18:25:06.351961 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9sxzt" Jan 20 18:25:06 crc kubenswrapper[4558]: I0120 18:25:06.782334 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-9sxzt"] Jan 20 18:25:07 crc kubenswrapper[4558]: I0120 18:25:07.446368 4558 generic.go:334] "Generic (PLEG): container finished" podID="ced048ca-f8cb-4606-95fb-f8dac2c0ac10" containerID="ae37f65a0137762b4b2cbaa64e3821a358e50dadecf87dc84cad4497f7d19f5b" exitCode=0 Jan 20 18:25:07 crc kubenswrapper[4558]: I0120 18:25:07.446426 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9sxzt" event={"ID":"ced048ca-f8cb-4606-95fb-f8dac2c0ac10","Type":"ContainerDied","Data":"ae37f65a0137762b4b2cbaa64e3821a358e50dadecf87dc84cad4497f7d19f5b"} Jan 20 18:25:07 crc kubenswrapper[4558]: I0120 18:25:07.446462 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9sxzt" event={"ID":"ced048ca-f8cb-4606-95fb-f8dac2c0ac10","Type":"ContainerStarted","Data":"da89e1b1d38ebf9400fea237fd98a10cee629cf7035eac9829a6954b0533f5e7"} Jan 20 18:25:08 crc kubenswrapper[4558]: I0120 18:25:08.456212 4558 generic.go:334] "Generic (PLEG): container finished" podID="ced048ca-f8cb-4606-95fb-f8dac2c0ac10" containerID="a8851eeece6d386ff6dce9afbd9ef62c164f854e6b1e0d23b52ed39dfbbc009c" exitCode=0 Jan 20 18:25:08 crc kubenswrapper[4558]: I0120 18:25:08.456308 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9sxzt" event={"ID":"ced048ca-f8cb-4606-95fb-f8dac2c0ac10","Type":"ContainerDied","Data":"a8851eeece6d386ff6dce9afbd9ef62c164f854e6b1e0d23b52ed39dfbbc009c"} Jan 20 18:25:09 crc kubenswrapper[4558]: I0120 18:25:09.468701 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9sxzt" event={"ID":"ced048ca-f8cb-4606-95fb-f8dac2c0ac10","Type":"ContainerStarted","Data":"1bfce41cc0a1d8987024a83772c4a058cb78deb812890dad1c4b340668475703"} Jan 20 18:25:09 crc kubenswrapper[4558]: I0120 18:25:09.491321 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-9sxzt" podStartSLOduration=2.020559044 podStartE2EDuration="3.491302527s" podCreationTimestamp="2026-01-20 18:25:06 +0000 UTC" firstStartedPulling="2026-01-20 18:25:07.448112392 +0000 UTC m=+6201.208450359" lastFinishedPulling="2026-01-20 18:25:08.918855875 +0000 UTC m=+6202.679193842" observedRunningTime="2026-01-20 18:25:09.485385322 +0000 UTC m=+6203.245723278" watchObservedRunningTime="2026-01-20 18:25:09.491302527 +0000 UTC m=+6203.251640494" Jan 20 18:25:16 crc kubenswrapper[4558]: I0120 18:25:16.352851 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-9sxzt" Jan 20 18:25:16 crc kubenswrapper[4558]: I0120 18:25:16.353350 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-9sxzt" Jan 20 18:25:16 crc kubenswrapper[4558]: I0120 18:25:16.385451 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-9sxzt" Jan 20 18:25:16 crc kubenswrapper[4558]: I0120 18:25:16.550049 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-9sxzt" Jan 20 18:25:16 crc kubenswrapper[4558]: I0120 18:25:16.611962 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-9sxzt"] Jan 20 18:25:18 crc kubenswrapper[4558]: I0120 18:25:18.535524 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-9sxzt" podUID="ced048ca-f8cb-4606-95fb-f8dac2c0ac10" containerName="registry-server" containerID="cri-o://1bfce41cc0a1d8987024a83772c4a058cb78deb812890dad1c4b340668475703" gracePeriod=2 Jan 20 18:25:18 crc kubenswrapper[4558]: I0120 18:25:18.899408 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9sxzt" Jan 20 18:25:19 crc kubenswrapper[4558]: I0120 18:25:19.057150 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ced048ca-f8cb-4606-95fb-f8dac2c0ac10-utilities\") pod \"ced048ca-f8cb-4606-95fb-f8dac2c0ac10\" (UID: \"ced048ca-f8cb-4606-95fb-f8dac2c0ac10\") " Jan 20 18:25:19 crc kubenswrapper[4558]: I0120 18:25:19.057983 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ced048ca-f8cb-4606-95fb-f8dac2c0ac10-utilities" (OuterVolumeSpecName: "utilities") pod "ced048ca-f8cb-4606-95fb-f8dac2c0ac10" (UID: "ced048ca-f8cb-4606-95fb-f8dac2c0ac10"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 18:25:19 crc kubenswrapper[4558]: I0120 18:25:19.058111 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ced048ca-f8cb-4606-95fb-f8dac2c0ac10-catalog-content\") pod \"ced048ca-f8cb-4606-95fb-f8dac2c0ac10\" (UID: \"ced048ca-f8cb-4606-95fb-f8dac2c0ac10\") " Jan 20 18:25:19 crc kubenswrapper[4558]: I0120 18:25:19.058181 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wk982\" (UniqueName: \"kubernetes.io/projected/ced048ca-f8cb-4606-95fb-f8dac2c0ac10-kube-api-access-wk982\") pod \"ced048ca-f8cb-4606-95fb-f8dac2c0ac10\" (UID: \"ced048ca-f8cb-4606-95fb-f8dac2c0ac10\") " Jan 20 18:25:19 crc kubenswrapper[4558]: I0120 18:25:19.059646 4558 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ced048ca-f8cb-4606-95fb-f8dac2c0ac10-utilities\") on node \"crc\" DevicePath \"\"" Jan 20 18:25:19 crc kubenswrapper[4558]: I0120 18:25:19.065562 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ced048ca-f8cb-4606-95fb-f8dac2c0ac10-kube-api-access-wk982" (OuterVolumeSpecName: "kube-api-access-wk982") pod "ced048ca-f8cb-4606-95fb-f8dac2c0ac10" (UID: "ced048ca-f8cb-4606-95fb-f8dac2c0ac10"). InnerVolumeSpecName "kube-api-access-wk982". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:25:19 crc kubenswrapper[4558]: I0120 18:25:19.102102 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ced048ca-f8cb-4606-95fb-f8dac2c0ac10-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ced048ca-f8cb-4606-95fb-f8dac2c0ac10" (UID: "ced048ca-f8cb-4606-95fb-f8dac2c0ac10"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 18:25:19 crc kubenswrapper[4558]: I0120 18:25:19.161477 4558 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ced048ca-f8cb-4606-95fb-f8dac2c0ac10-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 20 18:25:19 crc kubenswrapper[4558]: I0120 18:25:19.161522 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wk982\" (UniqueName: \"kubernetes.io/projected/ced048ca-f8cb-4606-95fb-f8dac2c0ac10-kube-api-access-wk982\") on node \"crc\" DevicePath \"\"" Jan 20 18:25:19 crc kubenswrapper[4558]: I0120 18:25:19.547636 4558 generic.go:334] "Generic (PLEG): container finished" podID="ced048ca-f8cb-4606-95fb-f8dac2c0ac10" containerID="1bfce41cc0a1d8987024a83772c4a058cb78deb812890dad1c4b340668475703" exitCode=0 Jan 20 18:25:19 crc kubenswrapper[4558]: I0120 18:25:19.547755 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9sxzt" Jan 20 18:25:19 crc kubenswrapper[4558]: I0120 18:25:19.547771 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9sxzt" event={"ID":"ced048ca-f8cb-4606-95fb-f8dac2c0ac10","Type":"ContainerDied","Data":"1bfce41cc0a1d8987024a83772c4a058cb78deb812890dad1c4b340668475703"} Jan 20 18:25:19 crc kubenswrapper[4558]: I0120 18:25:19.548224 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9sxzt" event={"ID":"ced048ca-f8cb-4606-95fb-f8dac2c0ac10","Type":"ContainerDied","Data":"da89e1b1d38ebf9400fea237fd98a10cee629cf7035eac9829a6954b0533f5e7"} Jan 20 18:25:19 crc kubenswrapper[4558]: I0120 18:25:19.548258 4558 scope.go:117] "RemoveContainer" containerID="1bfce41cc0a1d8987024a83772c4a058cb78deb812890dad1c4b340668475703" Jan 20 18:25:19 crc kubenswrapper[4558]: I0120 18:25:19.571366 4558 scope.go:117] "RemoveContainer" containerID="a8851eeece6d386ff6dce9afbd9ef62c164f854e6b1e0d23b52ed39dfbbc009c" Jan 20 18:25:19 crc kubenswrapper[4558]: I0120 18:25:19.588057 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-9sxzt"] Jan 20 18:25:19 crc kubenswrapper[4558]: I0120 18:25:19.592882 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-9sxzt"] Jan 20 18:25:19 crc kubenswrapper[4558]: I0120 18:25:19.604950 4558 scope.go:117] "RemoveContainer" containerID="ae37f65a0137762b4b2cbaa64e3821a358e50dadecf87dc84cad4497f7d19f5b" Jan 20 18:25:19 crc kubenswrapper[4558]: I0120 18:25:19.620695 4558 scope.go:117] "RemoveContainer" containerID="1bfce41cc0a1d8987024a83772c4a058cb78deb812890dad1c4b340668475703" Jan 20 18:25:19 crc kubenswrapper[4558]: E0120 18:25:19.621106 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1bfce41cc0a1d8987024a83772c4a058cb78deb812890dad1c4b340668475703\": container with ID starting with 1bfce41cc0a1d8987024a83772c4a058cb78deb812890dad1c4b340668475703 not found: ID does not exist" containerID="1bfce41cc0a1d8987024a83772c4a058cb78deb812890dad1c4b340668475703" Jan 20 18:25:19 crc kubenswrapper[4558]: I0120 18:25:19.621157 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1bfce41cc0a1d8987024a83772c4a058cb78deb812890dad1c4b340668475703"} err="failed to get container status \"1bfce41cc0a1d8987024a83772c4a058cb78deb812890dad1c4b340668475703\": rpc error: code = NotFound desc = could not find container \"1bfce41cc0a1d8987024a83772c4a058cb78deb812890dad1c4b340668475703\": container with ID starting with 1bfce41cc0a1d8987024a83772c4a058cb78deb812890dad1c4b340668475703 not found: ID does not exist" Jan 20 18:25:19 crc kubenswrapper[4558]: I0120 18:25:19.621222 4558 scope.go:117] "RemoveContainer" containerID="a8851eeece6d386ff6dce9afbd9ef62c164f854e6b1e0d23b52ed39dfbbc009c" Jan 20 18:25:19 crc kubenswrapper[4558]: E0120 18:25:19.621542 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a8851eeece6d386ff6dce9afbd9ef62c164f854e6b1e0d23b52ed39dfbbc009c\": container with ID starting with a8851eeece6d386ff6dce9afbd9ef62c164f854e6b1e0d23b52ed39dfbbc009c not found: ID does not exist" containerID="a8851eeece6d386ff6dce9afbd9ef62c164f854e6b1e0d23b52ed39dfbbc009c" Jan 20 18:25:19 crc kubenswrapper[4558]: I0120 18:25:19.621581 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a8851eeece6d386ff6dce9afbd9ef62c164f854e6b1e0d23b52ed39dfbbc009c"} err="failed to get container status \"a8851eeece6d386ff6dce9afbd9ef62c164f854e6b1e0d23b52ed39dfbbc009c\": rpc error: code = NotFound desc = could not find container \"a8851eeece6d386ff6dce9afbd9ef62c164f854e6b1e0d23b52ed39dfbbc009c\": container with ID starting with a8851eeece6d386ff6dce9afbd9ef62c164f854e6b1e0d23b52ed39dfbbc009c not found: ID does not exist" Jan 20 18:25:19 crc kubenswrapper[4558]: I0120 18:25:19.621611 4558 scope.go:117] "RemoveContainer" containerID="ae37f65a0137762b4b2cbaa64e3821a358e50dadecf87dc84cad4497f7d19f5b" Jan 20 18:25:19 crc kubenswrapper[4558]: E0120 18:25:19.621860 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ae37f65a0137762b4b2cbaa64e3821a358e50dadecf87dc84cad4497f7d19f5b\": container with ID starting with ae37f65a0137762b4b2cbaa64e3821a358e50dadecf87dc84cad4497f7d19f5b not found: ID does not exist" containerID="ae37f65a0137762b4b2cbaa64e3821a358e50dadecf87dc84cad4497f7d19f5b" Jan 20 18:25:19 crc kubenswrapper[4558]: I0120 18:25:19.621892 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ae37f65a0137762b4b2cbaa64e3821a358e50dadecf87dc84cad4497f7d19f5b"} err="failed to get container status \"ae37f65a0137762b4b2cbaa64e3821a358e50dadecf87dc84cad4497f7d19f5b\": rpc error: code = NotFound desc = could not find container \"ae37f65a0137762b4b2cbaa64e3821a358e50dadecf87dc84cad4497f7d19f5b\": container with ID starting with ae37f65a0137762b4b2cbaa64e3821a358e50dadecf87dc84cad4497f7d19f5b not found: ID does not exist" Jan 20 18:25:20 crc kubenswrapper[4558]: I0120 18:25:20.576844 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ced048ca-f8cb-4606-95fb-f8dac2c0ac10" path="/var/lib/kubelet/pods/ced048ca-f8cb-4606-95fb-f8dac2c0ac10/volumes" Jan 20 18:25:27 crc kubenswrapper[4558]: I0120 18:25:27.330578 4558 patch_prober.go:28] interesting pod/machine-config-daemon-2vr4r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 20 18:25:27 crc kubenswrapper[4558]: I0120 18:25:27.330902 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 20 18:25:28 crc kubenswrapper[4558]: I0120 18:25:28.028266 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="keystone-kuttl-tests/keystone-5d5bd56788-rvvzp" Jan 20 18:25:29 crc kubenswrapper[4558]: I0120 18:25:29.218615 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["keystone-kuttl-tests/openstackclient"] Jan 20 18:25:29 crc kubenswrapper[4558]: E0120 18:25:29.218936 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ced048ca-f8cb-4606-95fb-f8dac2c0ac10" containerName="registry-server" Jan 20 18:25:29 crc kubenswrapper[4558]: I0120 18:25:29.218952 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ced048ca-f8cb-4606-95fb-f8dac2c0ac10" containerName="registry-server" Jan 20 18:25:29 crc kubenswrapper[4558]: E0120 18:25:29.218975 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ced048ca-f8cb-4606-95fb-f8dac2c0ac10" containerName="extract-content" Jan 20 18:25:29 crc kubenswrapper[4558]: I0120 18:25:29.218982 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ced048ca-f8cb-4606-95fb-f8dac2c0ac10" containerName="extract-content" Jan 20 18:25:29 crc kubenswrapper[4558]: E0120 18:25:29.218990 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ced048ca-f8cb-4606-95fb-f8dac2c0ac10" containerName="extract-utilities" Jan 20 18:25:29 crc kubenswrapper[4558]: I0120 18:25:29.218996 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ced048ca-f8cb-4606-95fb-f8dac2c0ac10" containerName="extract-utilities" Jan 20 18:25:29 crc kubenswrapper[4558]: I0120 18:25:29.219143 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="ced048ca-f8cb-4606-95fb-f8dac2c0ac10" containerName="registry-server" Jan 20 18:25:29 crc kubenswrapper[4558]: I0120 18:25:29.219693 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/openstackclient" Jan 20 18:25:29 crc kubenswrapper[4558]: I0120 18:25:29.224205 4558 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"openstack-config-secret" Jan 20 18:25:29 crc kubenswrapper[4558]: I0120 18:25:29.224244 4558 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"default-dockercfg-bkm59" Jan 20 18:25:29 crc kubenswrapper[4558]: I0120 18:25:29.224306 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"keystone-kuttl-tests"/"openstack-config" Jan 20 18:25:29 crc kubenswrapper[4558]: I0120 18:25:29.228536 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/openstackclient"] Jan 20 18:25:29 crc kubenswrapper[4558]: I0120 18:25:29.418331 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mdv8r\" (UniqueName: \"kubernetes.io/projected/d4e83135-f3b4-43f8-90be-1a650701041b-kube-api-access-mdv8r\") pod \"openstackclient\" (UID: \"d4e83135-f3b4-43f8-90be-1a650701041b\") " pod="keystone-kuttl-tests/openstackclient" Jan 20 18:25:29 crc kubenswrapper[4558]: I0120 18:25:29.418487 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/d4e83135-f3b4-43f8-90be-1a650701041b-openstack-config-secret\") pod \"openstackclient\" (UID: \"d4e83135-f3b4-43f8-90be-1a650701041b\") " pod="keystone-kuttl-tests/openstackclient" Jan 20 18:25:29 crc kubenswrapper[4558]: I0120 18:25:29.418533 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/d4e83135-f3b4-43f8-90be-1a650701041b-openstack-config\") pod \"openstackclient\" (UID: \"d4e83135-f3b4-43f8-90be-1a650701041b\") " pod="keystone-kuttl-tests/openstackclient" Jan 20 18:25:29 crc kubenswrapper[4558]: I0120 18:25:29.520022 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mdv8r\" (UniqueName: \"kubernetes.io/projected/d4e83135-f3b4-43f8-90be-1a650701041b-kube-api-access-mdv8r\") pod \"openstackclient\" (UID: \"d4e83135-f3b4-43f8-90be-1a650701041b\") " pod="keystone-kuttl-tests/openstackclient" Jan 20 18:25:29 crc kubenswrapper[4558]: I0120 18:25:29.520136 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/d4e83135-f3b4-43f8-90be-1a650701041b-openstack-config-secret\") pod \"openstackclient\" (UID: \"d4e83135-f3b4-43f8-90be-1a650701041b\") " pod="keystone-kuttl-tests/openstackclient" Jan 20 18:25:29 crc kubenswrapper[4558]: I0120 18:25:29.520181 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/d4e83135-f3b4-43f8-90be-1a650701041b-openstack-config\") pod \"openstackclient\" (UID: \"d4e83135-f3b4-43f8-90be-1a650701041b\") " pod="keystone-kuttl-tests/openstackclient" Jan 20 18:25:29 crc kubenswrapper[4558]: I0120 18:25:29.521117 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/d4e83135-f3b4-43f8-90be-1a650701041b-openstack-config\") pod \"openstackclient\" (UID: \"d4e83135-f3b4-43f8-90be-1a650701041b\") " pod="keystone-kuttl-tests/openstackclient" Jan 20 18:25:29 crc kubenswrapper[4558]: I0120 18:25:29.527227 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/d4e83135-f3b4-43f8-90be-1a650701041b-openstack-config-secret\") pod \"openstackclient\" (UID: \"d4e83135-f3b4-43f8-90be-1a650701041b\") " pod="keystone-kuttl-tests/openstackclient" Jan 20 18:25:29 crc kubenswrapper[4558]: I0120 18:25:29.534593 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mdv8r\" (UniqueName: \"kubernetes.io/projected/d4e83135-f3b4-43f8-90be-1a650701041b-kube-api-access-mdv8r\") pod \"openstackclient\" (UID: \"d4e83135-f3b4-43f8-90be-1a650701041b\") " pod="keystone-kuttl-tests/openstackclient" Jan 20 18:25:29 crc kubenswrapper[4558]: I0120 18:25:29.536409 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/openstackclient" Jan 20 18:25:29 crc kubenswrapper[4558]: I0120 18:25:29.906333 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/openstackclient"] Jan 20 18:25:30 crc kubenswrapper[4558]: I0120 18:25:30.633871 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/openstackclient" event={"ID":"d4e83135-f3b4-43f8-90be-1a650701041b","Type":"ContainerStarted","Data":"1122e6fe3eec40c296471ea3919f7bed1e9ee53385d916757231ac0e6c21cbea"} Jan 20 18:25:30 crc kubenswrapper[4558]: I0120 18:25:30.634225 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/openstackclient" event={"ID":"d4e83135-f3b4-43f8-90be-1a650701041b","Type":"ContainerStarted","Data":"c4c8d46f0f078b0579ef4320048044d6c3280c97ec6976798f4f271ab2c77635"} Jan 20 18:25:30 crc kubenswrapper[4558]: I0120 18:25:30.648858 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="keystone-kuttl-tests/openstackclient" podStartSLOduration=1.067875659 podStartE2EDuration="1.648831611s" podCreationTimestamp="2026-01-20 18:25:29 +0000 UTC" firstStartedPulling="2026-01-20 18:25:29.910559285 +0000 UTC m=+6223.670897252" lastFinishedPulling="2026-01-20 18:25:30.491515237 +0000 UTC m=+6224.251853204" observedRunningTime="2026-01-20 18:25:30.646826281 +0000 UTC m=+6224.407164247" watchObservedRunningTime="2026-01-20 18:25:30.648831611 +0000 UTC m=+6224.409169578" Jan 20 18:25:37 crc kubenswrapper[4558]: I0120 18:25:37.303958 4558 scope.go:117] "RemoveContainer" containerID="f654bb222b97f28d64921290dc753644336762502caf7dcbd69972a94743e052" Jan 20 18:25:37 crc kubenswrapper[4558]: I0120 18:25:37.329108 4558 scope.go:117] "RemoveContainer" containerID="732989bca0f83f5be83d330c5fb2375467ff6a8a0816a9188481958969606838" Jan 20 18:25:37 crc kubenswrapper[4558]: I0120 18:25:37.355919 4558 scope.go:117] "RemoveContainer" containerID="6538b3f3df0d13d62bac2b1cc9919ad3639cd0163b9aa1ca9f045d5309d9733b" Jan 20 18:25:37 crc kubenswrapper[4558]: I0120 18:25:37.375522 4558 scope.go:117] "RemoveContainer" containerID="64d5902721ad60e46859783918d2179ac0a6d631aebd5574c3b527e6050cf780" Jan 20 18:25:37 crc kubenswrapper[4558]: I0120 18:25:37.392533 4558 scope.go:117] "RemoveContainer" containerID="e683c32b7cb9370e6a26cdd7f5ae9fef1c98049813d8a405bcfb2ea1326729ab" Jan 20 18:25:37 crc kubenswrapper[4558]: I0120 18:25:37.410791 4558 scope.go:117] "RemoveContainer" containerID="7f2b1b0d5def68793489498d2bf511951f72742de22aa59f6962dcab8c4ef0d1" Jan 20 18:25:37 crc kubenswrapper[4558]: I0120 18:25:37.439652 4558 scope.go:117] "RemoveContainer" containerID="40cf157630bb938b379ccc598d8db3cbcc30d08e930588e8be748226d603132b" Jan 20 18:25:37 crc kubenswrapper[4558]: I0120 18:25:37.460999 4558 scope.go:117] "RemoveContainer" containerID="30b13d5d64bdd04a5b3eba65ef7d6ad8a6f88512fd66ccbff631c3b25c339616" Jan 20 18:25:57 crc kubenswrapper[4558]: I0120 18:25:57.330853 4558 patch_prober.go:28] interesting pod/machine-config-daemon-2vr4r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 20 18:25:57 crc kubenswrapper[4558]: I0120 18:25:57.331785 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 20 18:26:27 crc kubenswrapper[4558]: I0120 18:26:27.329633 4558 patch_prober.go:28] interesting pod/machine-config-daemon-2vr4r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 20 18:26:27 crc kubenswrapper[4558]: I0120 18:26:27.330078 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 20 18:26:27 crc kubenswrapper[4558]: I0120 18:26:27.330125 4558 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" Jan 20 18:26:27 crc kubenswrapper[4558]: I0120 18:26:27.330765 4558 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"814c7f4e012758db904c630a7832be54bd493eba5362670666ad7cbcf498af98"} pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 20 18:26:27 crc kubenswrapper[4558]: I0120 18:26:27.330815 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" containerID="cri-o://814c7f4e012758db904c630a7832be54bd493eba5362670666ad7cbcf498af98" gracePeriod=600 Jan 20 18:26:27 crc kubenswrapper[4558]: E0120 18:26:27.448795 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 18:26:28 crc kubenswrapper[4558]: I0120 18:26:28.084093 4558 generic.go:334] "Generic (PLEG): container finished" podID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerID="814c7f4e012758db904c630a7832be54bd493eba5362670666ad7cbcf498af98" exitCode=0 Jan 20 18:26:28 crc kubenswrapper[4558]: I0120 18:26:28.084153 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" event={"ID":"68337d27-3fa6-4a29-88b0-82e60c3739eb","Type":"ContainerDied","Data":"814c7f4e012758db904c630a7832be54bd493eba5362670666ad7cbcf498af98"} Jan 20 18:26:28 crc kubenswrapper[4558]: I0120 18:26:28.084225 4558 scope.go:117] "RemoveContainer" containerID="ed78f04bb9083c901150aea9b4470bb3023518f531b71da44ae6842f9fba65fa" Jan 20 18:26:28 crc kubenswrapper[4558]: I0120 18:26:28.084677 4558 scope.go:117] "RemoveContainer" containerID="814c7f4e012758db904c630a7832be54bd493eba5362670666ad7cbcf498af98" Jan 20 18:26:28 crc kubenswrapper[4558]: E0120 18:26:28.084953 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 18:26:41 crc kubenswrapper[4558]: I0120 18:26:41.565867 4558 scope.go:117] "RemoveContainer" containerID="814c7f4e012758db904c630a7832be54bd493eba5362670666ad7cbcf498af98" Jan 20 18:26:41 crc kubenswrapper[4558]: E0120 18:26:41.566672 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 18:26:52 crc kubenswrapper[4558]: I0120 18:26:52.567135 4558 scope.go:117] "RemoveContainer" containerID="814c7f4e012758db904c630a7832be54bd493eba5362670666ad7cbcf498af98" Jan 20 18:26:52 crc kubenswrapper[4558]: E0120 18:26:52.568112 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 18:27:05 crc kubenswrapper[4558]: I0120 18:27:05.566098 4558 scope.go:117] "RemoveContainer" containerID="814c7f4e012758db904c630a7832be54bd493eba5362670666ad7cbcf498af98" Jan 20 18:27:05 crc kubenswrapper[4558]: E0120 18:27:05.567104 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 18:27:17 crc kubenswrapper[4558]: I0120 18:27:17.565997 4558 scope.go:117] "RemoveContainer" containerID="814c7f4e012758db904c630a7832be54bd493eba5362670666ad7cbcf498af98" Jan 20 18:27:17 crc kubenswrapper[4558]: E0120 18:27:17.567113 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 18:27:32 crc kubenswrapper[4558]: I0120 18:27:32.566526 4558 scope.go:117] "RemoveContainer" containerID="814c7f4e012758db904c630a7832be54bd493eba5362670666ad7cbcf498af98" Jan 20 18:27:32 crc kubenswrapper[4558]: E0120 18:27:32.567583 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 18:27:43 crc kubenswrapper[4558]: I0120 18:27:43.566212 4558 scope.go:117] "RemoveContainer" containerID="814c7f4e012758db904c630a7832be54bd493eba5362670666ad7cbcf498af98" Jan 20 18:27:43 crc kubenswrapper[4558]: E0120 18:27:43.567095 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 18:27:56 crc kubenswrapper[4558]: I0120 18:27:56.570017 4558 scope.go:117] "RemoveContainer" containerID="814c7f4e012758db904c630a7832be54bd493eba5362670666ad7cbcf498af98" Jan 20 18:27:56 crc kubenswrapper[4558]: E0120 18:27:56.570955 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 18:28:11 crc kubenswrapper[4558]: I0120 18:28:11.566591 4558 scope.go:117] "RemoveContainer" containerID="814c7f4e012758db904c630a7832be54bd493eba5362670666ad7cbcf498af98" Jan 20 18:28:11 crc kubenswrapper[4558]: E0120 18:28:11.567584 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 18:28:21 crc kubenswrapper[4558]: I0120 18:28:21.161783 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-bqzcj"] Jan 20 18:28:21 crc kubenswrapper[4558]: I0120 18:28:21.163528 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-bqzcj" Jan 20 18:28:21 crc kubenswrapper[4558]: I0120 18:28:21.176112 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-bqzcj"] Jan 20 18:28:21 crc kubenswrapper[4558]: I0120 18:28:21.176559 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vb9mq\" (UniqueName: \"kubernetes.io/projected/70b8a0e5-67f1-4791-92b8-aba3cc0b7472-kube-api-access-vb9mq\") pod \"redhat-operators-bqzcj\" (UID: \"70b8a0e5-67f1-4791-92b8-aba3cc0b7472\") " pod="openshift-marketplace/redhat-operators-bqzcj" Jan 20 18:28:21 crc kubenswrapper[4558]: I0120 18:28:21.176728 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/70b8a0e5-67f1-4791-92b8-aba3cc0b7472-utilities\") pod \"redhat-operators-bqzcj\" (UID: \"70b8a0e5-67f1-4791-92b8-aba3cc0b7472\") " pod="openshift-marketplace/redhat-operators-bqzcj" Jan 20 18:28:21 crc kubenswrapper[4558]: I0120 18:28:21.176758 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/70b8a0e5-67f1-4791-92b8-aba3cc0b7472-catalog-content\") pod \"redhat-operators-bqzcj\" (UID: \"70b8a0e5-67f1-4791-92b8-aba3cc0b7472\") " pod="openshift-marketplace/redhat-operators-bqzcj" Jan 20 18:28:21 crc kubenswrapper[4558]: I0120 18:28:21.278410 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/70b8a0e5-67f1-4791-92b8-aba3cc0b7472-utilities\") pod \"redhat-operators-bqzcj\" (UID: \"70b8a0e5-67f1-4791-92b8-aba3cc0b7472\") " pod="openshift-marketplace/redhat-operators-bqzcj" Jan 20 18:28:21 crc kubenswrapper[4558]: I0120 18:28:21.278468 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/70b8a0e5-67f1-4791-92b8-aba3cc0b7472-catalog-content\") pod \"redhat-operators-bqzcj\" (UID: \"70b8a0e5-67f1-4791-92b8-aba3cc0b7472\") " pod="openshift-marketplace/redhat-operators-bqzcj" Jan 20 18:28:21 crc kubenswrapper[4558]: I0120 18:28:21.278651 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vb9mq\" (UniqueName: \"kubernetes.io/projected/70b8a0e5-67f1-4791-92b8-aba3cc0b7472-kube-api-access-vb9mq\") pod \"redhat-operators-bqzcj\" (UID: \"70b8a0e5-67f1-4791-92b8-aba3cc0b7472\") " pod="openshift-marketplace/redhat-operators-bqzcj" Jan 20 18:28:21 crc kubenswrapper[4558]: I0120 18:28:21.278870 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/70b8a0e5-67f1-4791-92b8-aba3cc0b7472-utilities\") pod \"redhat-operators-bqzcj\" (UID: \"70b8a0e5-67f1-4791-92b8-aba3cc0b7472\") " pod="openshift-marketplace/redhat-operators-bqzcj" Jan 20 18:28:21 crc kubenswrapper[4558]: I0120 18:28:21.278985 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/70b8a0e5-67f1-4791-92b8-aba3cc0b7472-catalog-content\") pod \"redhat-operators-bqzcj\" (UID: \"70b8a0e5-67f1-4791-92b8-aba3cc0b7472\") " pod="openshift-marketplace/redhat-operators-bqzcj" Jan 20 18:28:21 crc kubenswrapper[4558]: I0120 18:28:21.294285 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vb9mq\" (UniqueName: \"kubernetes.io/projected/70b8a0e5-67f1-4791-92b8-aba3cc0b7472-kube-api-access-vb9mq\") pod \"redhat-operators-bqzcj\" (UID: \"70b8a0e5-67f1-4791-92b8-aba3cc0b7472\") " pod="openshift-marketplace/redhat-operators-bqzcj" Jan 20 18:28:21 crc kubenswrapper[4558]: I0120 18:28:21.479383 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-bqzcj" Jan 20 18:28:21 crc kubenswrapper[4558]: I0120 18:28:21.863595 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-bqzcj"] Jan 20 18:28:21 crc kubenswrapper[4558]: W0120 18:28:21.866716 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod70b8a0e5_67f1_4791_92b8_aba3cc0b7472.slice/crio-bc1519049c5eae2e2cadb3f0378cdb0ce4718b015f79dadf91c8419e6f3d7915 WatchSource:0}: Error finding container bc1519049c5eae2e2cadb3f0378cdb0ce4718b015f79dadf91c8419e6f3d7915: Status 404 returned error can't find the container with id bc1519049c5eae2e2cadb3f0378cdb0ce4718b015f79dadf91c8419e6f3d7915 Jan 20 18:28:21 crc kubenswrapper[4558]: I0120 18:28:21.991595 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bqzcj" event={"ID":"70b8a0e5-67f1-4791-92b8-aba3cc0b7472","Type":"ContainerStarted","Data":"033c15cf417608cde2c8d59a6cae60f9ee597970abc4935ba4bdb21a1b8750ca"} Jan 20 18:28:21 crc kubenswrapper[4558]: I0120 18:28:21.991649 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bqzcj" event={"ID":"70b8a0e5-67f1-4791-92b8-aba3cc0b7472","Type":"ContainerStarted","Data":"bc1519049c5eae2e2cadb3f0378cdb0ce4718b015f79dadf91c8419e6f3d7915"} Jan 20 18:28:23 crc kubenswrapper[4558]: I0120 18:28:23.000825 4558 generic.go:334] "Generic (PLEG): container finished" podID="70b8a0e5-67f1-4791-92b8-aba3cc0b7472" containerID="033c15cf417608cde2c8d59a6cae60f9ee597970abc4935ba4bdb21a1b8750ca" exitCode=0 Jan 20 18:28:23 crc kubenswrapper[4558]: I0120 18:28:23.000886 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bqzcj" event={"ID":"70b8a0e5-67f1-4791-92b8-aba3cc0b7472","Type":"ContainerDied","Data":"033c15cf417608cde2c8d59a6cae60f9ee597970abc4935ba4bdb21a1b8750ca"} Jan 20 18:28:23 crc kubenswrapper[4558]: I0120 18:28:23.003624 4558 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 20 18:28:23 crc kubenswrapper[4558]: I0120 18:28:23.567457 4558 scope.go:117] "RemoveContainer" containerID="814c7f4e012758db904c630a7832be54bd493eba5362670666ad7cbcf498af98" Jan 20 18:28:23 crc kubenswrapper[4558]: E0120 18:28:23.567824 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 18:28:24 crc kubenswrapper[4558]: I0120 18:28:24.010698 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bqzcj" event={"ID":"70b8a0e5-67f1-4791-92b8-aba3cc0b7472","Type":"ContainerStarted","Data":"cb4698c4e0eb79a5bf83f5d568ec00fa5f61d7fb5a5eba9c3b709b692c2593ba"} Jan 20 18:28:25 crc kubenswrapper[4558]: I0120 18:28:25.019133 4558 generic.go:334] "Generic (PLEG): container finished" podID="70b8a0e5-67f1-4791-92b8-aba3cc0b7472" containerID="cb4698c4e0eb79a5bf83f5d568ec00fa5f61d7fb5a5eba9c3b709b692c2593ba" exitCode=0 Jan 20 18:28:25 crc kubenswrapper[4558]: I0120 18:28:25.019192 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bqzcj" event={"ID":"70b8a0e5-67f1-4791-92b8-aba3cc0b7472","Type":"ContainerDied","Data":"cb4698c4e0eb79a5bf83f5d568ec00fa5f61d7fb5a5eba9c3b709b692c2593ba"} Jan 20 18:28:26 crc kubenswrapper[4558]: I0120 18:28:26.030766 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bqzcj" event={"ID":"70b8a0e5-67f1-4791-92b8-aba3cc0b7472","Type":"ContainerStarted","Data":"e1902f231787f20addc8168c163dbc36f77add9e1d2533b650e7dcf7bf34af0c"} Jan 20 18:28:26 crc kubenswrapper[4558]: I0120 18:28:26.065875 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-bqzcj" podStartSLOduration=2.539775985 podStartE2EDuration="5.065851114s" podCreationTimestamp="2026-01-20 18:28:21 +0000 UTC" firstStartedPulling="2026-01-20 18:28:23.00339699 +0000 UTC m=+6396.763734957" lastFinishedPulling="2026-01-20 18:28:25.529472118 +0000 UTC m=+6399.289810086" observedRunningTime="2026-01-20 18:28:26.060884956 +0000 UTC m=+6399.821222914" watchObservedRunningTime="2026-01-20 18:28:26.065851114 +0000 UTC m=+6399.826189081" Jan 20 18:28:31 crc kubenswrapper[4558]: I0120 18:28:31.480414 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-bqzcj" Jan 20 18:28:31 crc kubenswrapper[4558]: I0120 18:28:31.480870 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-bqzcj" Jan 20 18:28:31 crc kubenswrapper[4558]: I0120 18:28:31.522352 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-bqzcj" Jan 20 18:28:32 crc kubenswrapper[4558]: I0120 18:28:32.112041 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-bqzcj" Jan 20 18:28:33 crc kubenswrapper[4558]: I0120 18:28:33.152020 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-bqzcj"] Jan 20 18:28:34 crc kubenswrapper[4558]: I0120 18:28:34.092132 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-bqzcj" podUID="70b8a0e5-67f1-4791-92b8-aba3cc0b7472" containerName="registry-server" containerID="cri-o://e1902f231787f20addc8168c163dbc36f77add9e1d2533b650e7dcf7bf34af0c" gracePeriod=2 Jan 20 18:28:34 crc kubenswrapper[4558]: I0120 18:28:34.462092 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-bqzcj" Jan 20 18:28:34 crc kubenswrapper[4558]: I0120 18:28:34.591055 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/70b8a0e5-67f1-4791-92b8-aba3cc0b7472-catalog-content\") pod \"70b8a0e5-67f1-4791-92b8-aba3cc0b7472\" (UID: \"70b8a0e5-67f1-4791-92b8-aba3cc0b7472\") " Jan 20 18:28:34 crc kubenswrapper[4558]: I0120 18:28:34.591187 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/70b8a0e5-67f1-4791-92b8-aba3cc0b7472-utilities\") pod \"70b8a0e5-67f1-4791-92b8-aba3cc0b7472\" (UID: \"70b8a0e5-67f1-4791-92b8-aba3cc0b7472\") " Jan 20 18:28:34 crc kubenswrapper[4558]: I0120 18:28:34.591376 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vb9mq\" (UniqueName: \"kubernetes.io/projected/70b8a0e5-67f1-4791-92b8-aba3cc0b7472-kube-api-access-vb9mq\") pod \"70b8a0e5-67f1-4791-92b8-aba3cc0b7472\" (UID: \"70b8a0e5-67f1-4791-92b8-aba3cc0b7472\") " Jan 20 18:28:34 crc kubenswrapper[4558]: I0120 18:28:34.592450 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/70b8a0e5-67f1-4791-92b8-aba3cc0b7472-utilities" (OuterVolumeSpecName: "utilities") pod "70b8a0e5-67f1-4791-92b8-aba3cc0b7472" (UID: "70b8a0e5-67f1-4791-92b8-aba3cc0b7472"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 18:28:34 crc kubenswrapper[4558]: I0120 18:28:34.592684 4558 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/70b8a0e5-67f1-4791-92b8-aba3cc0b7472-utilities\") on node \"crc\" DevicePath \"\"" Jan 20 18:28:34 crc kubenswrapper[4558]: I0120 18:28:34.598752 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/70b8a0e5-67f1-4791-92b8-aba3cc0b7472-kube-api-access-vb9mq" (OuterVolumeSpecName: "kube-api-access-vb9mq") pod "70b8a0e5-67f1-4791-92b8-aba3cc0b7472" (UID: "70b8a0e5-67f1-4791-92b8-aba3cc0b7472"). InnerVolumeSpecName "kube-api-access-vb9mq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:28:34 crc kubenswrapper[4558]: I0120 18:28:34.694465 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vb9mq\" (UniqueName: \"kubernetes.io/projected/70b8a0e5-67f1-4791-92b8-aba3cc0b7472-kube-api-access-vb9mq\") on node \"crc\" DevicePath \"\"" Jan 20 18:28:35 crc kubenswrapper[4558]: I0120 18:28:35.101388 4558 generic.go:334] "Generic (PLEG): container finished" podID="70b8a0e5-67f1-4791-92b8-aba3cc0b7472" containerID="e1902f231787f20addc8168c163dbc36f77add9e1d2533b650e7dcf7bf34af0c" exitCode=0 Jan 20 18:28:35 crc kubenswrapper[4558]: I0120 18:28:35.101482 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bqzcj" event={"ID":"70b8a0e5-67f1-4791-92b8-aba3cc0b7472","Type":"ContainerDied","Data":"e1902f231787f20addc8168c163dbc36f77add9e1d2533b650e7dcf7bf34af0c"} Jan 20 18:28:35 crc kubenswrapper[4558]: I0120 18:28:35.101510 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-bqzcj" Jan 20 18:28:35 crc kubenswrapper[4558]: I0120 18:28:35.101532 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bqzcj" event={"ID":"70b8a0e5-67f1-4791-92b8-aba3cc0b7472","Type":"ContainerDied","Data":"bc1519049c5eae2e2cadb3f0378cdb0ce4718b015f79dadf91c8419e6f3d7915"} Jan 20 18:28:35 crc kubenswrapper[4558]: I0120 18:28:35.101556 4558 scope.go:117] "RemoveContainer" containerID="e1902f231787f20addc8168c163dbc36f77add9e1d2533b650e7dcf7bf34af0c" Jan 20 18:28:35 crc kubenswrapper[4558]: I0120 18:28:35.121054 4558 scope.go:117] "RemoveContainer" containerID="cb4698c4e0eb79a5bf83f5d568ec00fa5f61d7fb5a5eba9c3b709b692c2593ba" Jan 20 18:28:35 crc kubenswrapper[4558]: I0120 18:28:35.140100 4558 scope.go:117] "RemoveContainer" containerID="033c15cf417608cde2c8d59a6cae60f9ee597970abc4935ba4bdb21a1b8750ca" Jan 20 18:28:35 crc kubenswrapper[4558]: I0120 18:28:35.164531 4558 scope.go:117] "RemoveContainer" containerID="e1902f231787f20addc8168c163dbc36f77add9e1d2533b650e7dcf7bf34af0c" Jan 20 18:28:35 crc kubenswrapper[4558]: E0120 18:28:35.164849 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e1902f231787f20addc8168c163dbc36f77add9e1d2533b650e7dcf7bf34af0c\": container with ID starting with e1902f231787f20addc8168c163dbc36f77add9e1d2533b650e7dcf7bf34af0c not found: ID does not exist" containerID="e1902f231787f20addc8168c163dbc36f77add9e1d2533b650e7dcf7bf34af0c" Jan 20 18:28:35 crc kubenswrapper[4558]: I0120 18:28:35.164891 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e1902f231787f20addc8168c163dbc36f77add9e1d2533b650e7dcf7bf34af0c"} err="failed to get container status \"e1902f231787f20addc8168c163dbc36f77add9e1d2533b650e7dcf7bf34af0c\": rpc error: code = NotFound desc = could not find container \"e1902f231787f20addc8168c163dbc36f77add9e1d2533b650e7dcf7bf34af0c\": container with ID starting with e1902f231787f20addc8168c163dbc36f77add9e1d2533b650e7dcf7bf34af0c not found: ID does not exist" Jan 20 18:28:35 crc kubenswrapper[4558]: I0120 18:28:35.164921 4558 scope.go:117] "RemoveContainer" containerID="cb4698c4e0eb79a5bf83f5d568ec00fa5f61d7fb5a5eba9c3b709b692c2593ba" Jan 20 18:28:35 crc kubenswrapper[4558]: E0120 18:28:35.165224 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cb4698c4e0eb79a5bf83f5d568ec00fa5f61d7fb5a5eba9c3b709b692c2593ba\": container with ID starting with cb4698c4e0eb79a5bf83f5d568ec00fa5f61d7fb5a5eba9c3b709b692c2593ba not found: ID does not exist" containerID="cb4698c4e0eb79a5bf83f5d568ec00fa5f61d7fb5a5eba9c3b709b692c2593ba" Jan 20 18:28:35 crc kubenswrapper[4558]: I0120 18:28:35.165272 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cb4698c4e0eb79a5bf83f5d568ec00fa5f61d7fb5a5eba9c3b709b692c2593ba"} err="failed to get container status \"cb4698c4e0eb79a5bf83f5d568ec00fa5f61d7fb5a5eba9c3b709b692c2593ba\": rpc error: code = NotFound desc = could not find container \"cb4698c4e0eb79a5bf83f5d568ec00fa5f61d7fb5a5eba9c3b709b692c2593ba\": container with ID starting with cb4698c4e0eb79a5bf83f5d568ec00fa5f61d7fb5a5eba9c3b709b692c2593ba not found: ID does not exist" Jan 20 18:28:35 crc kubenswrapper[4558]: I0120 18:28:35.165304 4558 scope.go:117] "RemoveContainer" containerID="033c15cf417608cde2c8d59a6cae60f9ee597970abc4935ba4bdb21a1b8750ca" Jan 20 18:28:35 crc kubenswrapper[4558]: E0120 18:28:35.165538 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"033c15cf417608cde2c8d59a6cae60f9ee597970abc4935ba4bdb21a1b8750ca\": container with ID starting with 033c15cf417608cde2c8d59a6cae60f9ee597970abc4935ba4bdb21a1b8750ca not found: ID does not exist" containerID="033c15cf417608cde2c8d59a6cae60f9ee597970abc4935ba4bdb21a1b8750ca" Jan 20 18:28:35 crc kubenswrapper[4558]: I0120 18:28:35.165564 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"033c15cf417608cde2c8d59a6cae60f9ee597970abc4935ba4bdb21a1b8750ca"} err="failed to get container status \"033c15cf417608cde2c8d59a6cae60f9ee597970abc4935ba4bdb21a1b8750ca\": rpc error: code = NotFound desc = could not find container \"033c15cf417608cde2c8d59a6cae60f9ee597970abc4935ba4bdb21a1b8750ca\": container with ID starting with 033c15cf417608cde2c8d59a6cae60f9ee597970abc4935ba4bdb21a1b8750ca not found: ID does not exist" Jan 20 18:28:35 crc kubenswrapper[4558]: I0120 18:28:35.580959 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/70b8a0e5-67f1-4791-92b8-aba3cc0b7472-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "70b8a0e5-67f1-4791-92b8-aba3cc0b7472" (UID: "70b8a0e5-67f1-4791-92b8-aba3cc0b7472"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 18:28:35 crc kubenswrapper[4558]: I0120 18:28:35.607944 4558 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/70b8a0e5-67f1-4791-92b8-aba3cc0b7472-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 20 18:28:35 crc kubenswrapper[4558]: I0120 18:28:35.733370 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-bqzcj"] Jan 20 18:28:35 crc kubenswrapper[4558]: I0120 18:28:35.738371 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-bqzcj"] Jan 20 18:28:36 crc kubenswrapper[4558]: I0120 18:28:36.572320 4558 scope.go:117] "RemoveContainer" containerID="814c7f4e012758db904c630a7832be54bd493eba5362670666ad7cbcf498af98" Jan 20 18:28:36 crc kubenswrapper[4558]: E0120 18:28:36.572671 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 18:28:36 crc kubenswrapper[4558]: I0120 18:28:36.576525 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="70b8a0e5-67f1-4791-92b8-aba3cc0b7472" path="/var/lib/kubelet/pods/70b8a0e5-67f1-4791-92b8-aba3cc0b7472/volumes" Jan 20 18:28:50 crc kubenswrapper[4558]: I0120 18:28:50.566843 4558 scope.go:117] "RemoveContainer" containerID="814c7f4e012758db904c630a7832be54bd493eba5362670666ad7cbcf498af98" Jan 20 18:28:50 crc kubenswrapper[4558]: E0120 18:28:50.567920 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 18:29:05 crc kubenswrapper[4558]: I0120 18:29:05.566362 4558 scope.go:117] "RemoveContainer" containerID="814c7f4e012758db904c630a7832be54bd493eba5362670666ad7cbcf498af98" Jan 20 18:29:05 crc kubenswrapper[4558]: E0120 18:29:05.567188 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 18:29:16 crc kubenswrapper[4558]: I0120 18:29:16.574242 4558 scope.go:117] "RemoveContainer" containerID="814c7f4e012758db904c630a7832be54bd493eba5362670666ad7cbcf498af98" Jan 20 18:29:16 crc kubenswrapper[4558]: E0120 18:29:16.575425 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 18:29:30 crc kubenswrapper[4558]: I0120 18:29:30.566620 4558 scope.go:117] "RemoveContainer" containerID="814c7f4e012758db904c630a7832be54bd493eba5362670666ad7cbcf498af98" Jan 20 18:29:30 crc kubenswrapper[4558]: E0120 18:29:30.567668 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 18:29:37 crc kubenswrapper[4558]: I0120 18:29:37.657235 4558 scope.go:117] "RemoveContainer" containerID="4ecbc6be3564d8338b4185ca1fdb642003ffcb46d70cc9b885b85f2aaccb1a94" Jan 20 18:29:37 crc kubenswrapper[4558]: I0120 18:29:37.680157 4558 scope.go:117] "RemoveContainer" containerID="67d36da5c123616fc1171727d20cabb6425cd135b3ab044d957ff405bc749a08" Jan 20 18:29:42 crc kubenswrapper[4558]: I0120 18:29:42.566401 4558 scope.go:117] "RemoveContainer" containerID="814c7f4e012758db904c630a7832be54bd493eba5362670666ad7cbcf498af98" Jan 20 18:29:42 crc kubenswrapper[4558]: E0120 18:29:42.567020 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 18:29:54 crc kubenswrapper[4558]: I0120 18:29:54.566115 4558 scope.go:117] "RemoveContainer" containerID="814c7f4e012758db904c630a7832be54bd493eba5362670666ad7cbcf498af98" Jan 20 18:29:54 crc kubenswrapper[4558]: E0120 18:29:54.567108 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 18:30:00 crc kubenswrapper[4558]: I0120 18:30:00.153867 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29482230-l98wg"] Jan 20 18:30:00 crc kubenswrapper[4558]: E0120 18:30:00.154718 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="70b8a0e5-67f1-4791-92b8-aba3cc0b7472" containerName="extract-content" Jan 20 18:30:00 crc kubenswrapper[4558]: I0120 18:30:00.154733 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="70b8a0e5-67f1-4791-92b8-aba3cc0b7472" containerName="extract-content" Jan 20 18:30:00 crc kubenswrapper[4558]: E0120 18:30:00.154751 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="70b8a0e5-67f1-4791-92b8-aba3cc0b7472" containerName="extract-utilities" Jan 20 18:30:00 crc kubenswrapper[4558]: I0120 18:30:00.154759 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="70b8a0e5-67f1-4791-92b8-aba3cc0b7472" containerName="extract-utilities" Jan 20 18:30:00 crc kubenswrapper[4558]: E0120 18:30:00.154769 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="70b8a0e5-67f1-4791-92b8-aba3cc0b7472" containerName="registry-server" Jan 20 18:30:00 crc kubenswrapper[4558]: I0120 18:30:00.154778 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="70b8a0e5-67f1-4791-92b8-aba3cc0b7472" containerName="registry-server" Jan 20 18:30:00 crc kubenswrapper[4558]: I0120 18:30:00.154932 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="70b8a0e5-67f1-4791-92b8-aba3cc0b7472" containerName="registry-server" Jan 20 18:30:00 crc kubenswrapper[4558]: I0120 18:30:00.155652 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29482230-l98wg" Jan 20 18:30:00 crc kubenswrapper[4558]: I0120 18:30:00.158208 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 20 18:30:00 crc kubenswrapper[4558]: I0120 18:30:00.158684 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 20 18:30:00 crc kubenswrapper[4558]: I0120 18:30:00.166295 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29482230-l98wg"] Jan 20 18:30:00 crc kubenswrapper[4558]: I0120 18:30:00.319591 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tfs4z\" (UniqueName: \"kubernetes.io/projected/c85e90ed-1d98-4b04-b918-512a6cc63d69-kube-api-access-tfs4z\") pod \"collect-profiles-29482230-l98wg\" (UID: \"c85e90ed-1d98-4b04-b918-512a6cc63d69\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482230-l98wg" Jan 20 18:30:00 crc kubenswrapper[4558]: I0120 18:30:00.319759 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c85e90ed-1d98-4b04-b918-512a6cc63d69-config-volume\") pod \"collect-profiles-29482230-l98wg\" (UID: \"c85e90ed-1d98-4b04-b918-512a6cc63d69\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482230-l98wg" Jan 20 18:30:00 crc kubenswrapper[4558]: I0120 18:30:00.319802 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c85e90ed-1d98-4b04-b918-512a6cc63d69-secret-volume\") pod \"collect-profiles-29482230-l98wg\" (UID: \"c85e90ed-1d98-4b04-b918-512a6cc63d69\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482230-l98wg" Jan 20 18:30:00 crc kubenswrapper[4558]: I0120 18:30:00.421387 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c85e90ed-1d98-4b04-b918-512a6cc63d69-config-volume\") pod \"collect-profiles-29482230-l98wg\" (UID: \"c85e90ed-1d98-4b04-b918-512a6cc63d69\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482230-l98wg" Jan 20 18:30:00 crc kubenswrapper[4558]: I0120 18:30:00.421442 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c85e90ed-1d98-4b04-b918-512a6cc63d69-secret-volume\") pod \"collect-profiles-29482230-l98wg\" (UID: \"c85e90ed-1d98-4b04-b918-512a6cc63d69\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482230-l98wg" Jan 20 18:30:00 crc kubenswrapper[4558]: I0120 18:30:00.421551 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tfs4z\" (UniqueName: \"kubernetes.io/projected/c85e90ed-1d98-4b04-b918-512a6cc63d69-kube-api-access-tfs4z\") pod \"collect-profiles-29482230-l98wg\" (UID: \"c85e90ed-1d98-4b04-b918-512a6cc63d69\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482230-l98wg" Jan 20 18:30:00 crc kubenswrapper[4558]: I0120 18:30:00.422399 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c85e90ed-1d98-4b04-b918-512a6cc63d69-config-volume\") pod \"collect-profiles-29482230-l98wg\" (UID: \"c85e90ed-1d98-4b04-b918-512a6cc63d69\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482230-l98wg" Jan 20 18:30:00 crc kubenswrapper[4558]: I0120 18:30:00.428348 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c85e90ed-1d98-4b04-b918-512a6cc63d69-secret-volume\") pod \"collect-profiles-29482230-l98wg\" (UID: \"c85e90ed-1d98-4b04-b918-512a6cc63d69\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482230-l98wg" Jan 20 18:30:00 crc kubenswrapper[4558]: I0120 18:30:00.437461 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tfs4z\" (UniqueName: \"kubernetes.io/projected/c85e90ed-1d98-4b04-b918-512a6cc63d69-kube-api-access-tfs4z\") pod \"collect-profiles-29482230-l98wg\" (UID: \"c85e90ed-1d98-4b04-b918-512a6cc63d69\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482230-l98wg" Jan 20 18:30:00 crc kubenswrapper[4558]: I0120 18:30:00.485140 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29482230-l98wg" Jan 20 18:30:00 crc kubenswrapper[4558]: I0120 18:30:00.868714 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29482230-l98wg"] Jan 20 18:30:01 crc kubenswrapper[4558]: I0120 18:30:01.809099 4558 generic.go:334] "Generic (PLEG): container finished" podID="c85e90ed-1d98-4b04-b918-512a6cc63d69" containerID="b99b8db89b917abd367dfe197c944dbb4fede5d28a200025cdeb7c1ed39d65d0" exitCode=0 Jan 20 18:30:01 crc kubenswrapper[4558]: I0120 18:30:01.809227 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29482230-l98wg" event={"ID":"c85e90ed-1d98-4b04-b918-512a6cc63d69","Type":"ContainerDied","Data":"b99b8db89b917abd367dfe197c944dbb4fede5d28a200025cdeb7c1ed39d65d0"} Jan 20 18:30:01 crc kubenswrapper[4558]: I0120 18:30:01.809499 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29482230-l98wg" event={"ID":"c85e90ed-1d98-4b04-b918-512a6cc63d69","Type":"ContainerStarted","Data":"78e606e434be2fb09ad2c1415c9c8da58a0e038dee9d8f48e7f42f6a63d92f62"} Jan 20 18:30:03 crc kubenswrapper[4558]: I0120 18:30:03.054519 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29482230-l98wg" Jan 20 18:30:03 crc kubenswrapper[4558]: I0120 18:30:03.172982 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tfs4z\" (UniqueName: \"kubernetes.io/projected/c85e90ed-1d98-4b04-b918-512a6cc63d69-kube-api-access-tfs4z\") pod \"c85e90ed-1d98-4b04-b918-512a6cc63d69\" (UID: \"c85e90ed-1d98-4b04-b918-512a6cc63d69\") " Jan 20 18:30:03 crc kubenswrapper[4558]: I0120 18:30:03.173038 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c85e90ed-1d98-4b04-b918-512a6cc63d69-secret-volume\") pod \"c85e90ed-1d98-4b04-b918-512a6cc63d69\" (UID: \"c85e90ed-1d98-4b04-b918-512a6cc63d69\") " Jan 20 18:30:03 crc kubenswrapper[4558]: I0120 18:30:03.173132 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c85e90ed-1d98-4b04-b918-512a6cc63d69-config-volume\") pod \"c85e90ed-1d98-4b04-b918-512a6cc63d69\" (UID: \"c85e90ed-1d98-4b04-b918-512a6cc63d69\") " Jan 20 18:30:03 crc kubenswrapper[4558]: I0120 18:30:03.174002 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c85e90ed-1d98-4b04-b918-512a6cc63d69-config-volume" (OuterVolumeSpecName: "config-volume") pod "c85e90ed-1d98-4b04-b918-512a6cc63d69" (UID: "c85e90ed-1d98-4b04-b918-512a6cc63d69"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:30:03 crc kubenswrapper[4558]: I0120 18:30:03.180003 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c85e90ed-1d98-4b04-b918-512a6cc63d69-kube-api-access-tfs4z" (OuterVolumeSpecName: "kube-api-access-tfs4z") pod "c85e90ed-1d98-4b04-b918-512a6cc63d69" (UID: "c85e90ed-1d98-4b04-b918-512a6cc63d69"). InnerVolumeSpecName "kube-api-access-tfs4z". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:30:03 crc kubenswrapper[4558]: I0120 18:30:03.180048 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c85e90ed-1d98-4b04-b918-512a6cc63d69-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "c85e90ed-1d98-4b04-b918-512a6cc63d69" (UID: "c85e90ed-1d98-4b04-b918-512a6cc63d69"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:30:03 crc kubenswrapper[4558]: I0120 18:30:03.275307 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tfs4z\" (UniqueName: \"kubernetes.io/projected/c85e90ed-1d98-4b04-b918-512a6cc63d69-kube-api-access-tfs4z\") on node \"crc\" DevicePath \"\"" Jan 20 18:30:03 crc kubenswrapper[4558]: I0120 18:30:03.275347 4558 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c85e90ed-1d98-4b04-b918-512a6cc63d69-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 20 18:30:03 crc kubenswrapper[4558]: I0120 18:30:03.275362 4558 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c85e90ed-1d98-4b04-b918-512a6cc63d69-config-volume\") on node \"crc\" DevicePath \"\"" Jan 20 18:30:03 crc kubenswrapper[4558]: I0120 18:30:03.826183 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29482230-l98wg" event={"ID":"c85e90ed-1d98-4b04-b918-512a6cc63d69","Type":"ContainerDied","Data":"78e606e434be2fb09ad2c1415c9c8da58a0e038dee9d8f48e7f42f6a63d92f62"} Jan 20 18:30:03 crc kubenswrapper[4558]: I0120 18:30:03.826234 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="78e606e434be2fb09ad2c1415c9c8da58a0e038dee9d8f48e7f42f6a63d92f62" Jan 20 18:30:03 crc kubenswrapper[4558]: I0120 18:30:03.826307 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29482230-l98wg" Jan 20 18:30:04 crc kubenswrapper[4558]: I0120 18:30:04.119132 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29482185-zcxm2"] Jan 20 18:30:04 crc kubenswrapper[4558]: I0120 18:30:04.126542 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29482185-zcxm2"] Jan 20 18:30:04 crc kubenswrapper[4558]: I0120 18:30:04.575504 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="39ad7d98-b1fa-4d75-bbbb-75b74a26170c" path="/var/lib/kubelet/pods/39ad7d98-b1fa-4d75-bbbb-75b74a26170c/volumes" Jan 20 18:30:07 crc kubenswrapper[4558]: I0120 18:30:07.565787 4558 scope.go:117] "RemoveContainer" containerID="814c7f4e012758db904c630a7832be54bd493eba5362670666ad7cbcf498af98" Jan 20 18:30:07 crc kubenswrapper[4558]: E0120 18:30:07.566434 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 18:30:19 crc kubenswrapper[4558]: I0120 18:30:19.566241 4558 scope.go:117] "RemoveContainer" containerID="814c7f4e012758db904c630a7832be54bd493eba5362670666ad7cbcf498af98" Jan 20 18:30:19 crc kubenswrapper[4558]: E0120 18:30:19.567310 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 18:30:34 crc kubenswrapper[4558]: I0120 18:30:34.567406 4558 scope.go:117] "RemoveContainer" containerID="814c7f4e012758db904c630a7832be54bd493eba5362670666ad7cbcf498af98" Jan 20 18:30:34 crc kubenswrapper[4558]: E0120 18:30:34.568490 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 18:30:37 crc kubenswrapper[4558]: I0120 18:30:37.740894 4558 scope.go:117] "RemoveContainer" containerID="fb58c0194a8ac62cf3eb6870e16946219af99491571263c86ef45391a2d3982b" Jan 20 18:30:37 crc kubenswrapper[4558]: I0120 18:30:37.775405 4558 scope.go:117] "RemoveContainer" containerID="83506ee757a3b02cbab3e73d36a3f772b6e90226c69410c7ed54237f0ced2477" Jan 20 18:30:37 crc kubenswrapper[4558]: I0120 18:30:37.792200 4558 scope.go:117] "RemoveContainer" containerID="99682bfe9586bece779160cb0aa0bf66a5f5625a029c5947d715ab1d390c16e9" Jan 20 18:30:37 crc kubenswrapper[4558]: I0120 18:30:37.826336 4558 scope.go:117] "RemoveContainer" containerID="9f1fd3eb3fe362feea3c236618713670db2e56630ed7a68938966713c5f2b528" Jan 20 18:30:49 crc kubenswrapper[4558]: I0120 18:30:49.566059 4558 scope.go:117] "RemoveContainer" containerID="814c7f4e012758db904c630a7832be54bd493eba5362670666ad7cbcf498af98" Jan 20 18:30:49 crc kubenswrapper[4558]: E0120 18:30:49.566975 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 18:31:02 crc kubenswrapper[4558]: I0120 18:31:02.566737 4558 scope.go:117] "RemoveContainer" containerID="814c7f4e012758db904c630a7832be54bd493eba5362670666ad7cbcf498af98" Jan 20 18:31:02 crc kubenswrapper[4558]: E0120 18:31:02.567544 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 18:31:14 crc kubenswrapper[4558]: I0120 18:31:14.565917 4558 scope.go:117] "RemoveContainer" containerID="814c7f4e012758db904c630a7832be54bd493eba5362670666ad7cbcf498af98" Jan 20 18:31:14 crc kubenswrapper[4558]: E0120 18:31:14.566841 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 18:31:25 crc kubenswrapper[4558]: I0120 18:31:25.565345 4558 scope.go:117] "RemoveContainer" containerID="814c7f4e012758db904c630a7832be54bd493eba5362670666ad7cbcf498af98" Jan 20 18:31:25 crc kubenswrapper[4558]: E0120 18:31:25.567386 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 18:31:30 crc kubenswrapper[4558]: I0120 18:31:30.051814 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["keystone-kuttl-tests/root-account-create-update-vgq2z"] Jan 20 18:31:30 crc kubenswrapper[4558]: I0120 18:31:30.057974 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["keystone-kuttl-tests/root-account-create-update-vgq2z"] Jan 20 18:31:30 crc kubenswrapper[4558]: I0120 18:31:30.574271 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="363cbfda-7c1f-4556-a7bd-cec95198f89d" path="/var/lib/kubelet/pods/363cbfda-7c1f-4556-a7bd-cec95198f89d/volumes" Jan 20 18:31:37 crc kubenswrapper[4558]: I0120 18:31:37.909867 4558 scope.go:117] "RemoveContainer" containerID="e99229ece4762d87cf6f69056a1380e8f97a8831ded4a19b000a28f82d4b6241" Jan 20 18:31:39 crc kubenswrapper[4558]: I0120 18:31:39.566501 4558 scope.go:117] "RemoveContainer" containerID="814c7f4e012758db904c630a7832be54bd493eba5362670666ad7cbcf498af98" Jan 20 18:31:40 crc kubenswrapper[4558]: I0120 18:31:40.594462 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" event={"ID":"68337d27-3fa6-4a29-88b0-82e60c3739eb","Type":"ContainerStarted","Data":"5858873a8a511c0bf9fb428796875a79a6b6fce6a3367ceb7e77c36d25d6f8a7"} Jan 20 18:33:57 crc kubenswrapper[4558]: I0120 18:33:57.330317 4558 patch_prober.go:28] interesting pod/machine-config-daemon-2vr4r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 20 18:33:57 crc kubenswrapper[4558]: I0120 18:33:57.330838 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 20 18:34:27 crc kubenswrapper[4558]: I0120 18:34:27.330503 4558 patch_prober.go:28] interesting pod/machine-config-daemon-2vr4r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 20 18:34:27 crc kubenswrapper[4558]: I0120 18:34:27.330908 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 20 18:34:44 crc kubenswrapper[4558]: I0120 18:34:44.036505 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["keystone-kuttl-tests/keystone-fb15-account-create-update-n4k6h"] Jan 20 18:34:44 crc kubenswrapper[4558]: I0120 18:34:44.040762 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["keystone-kuttl-tests/keystone-db-create-cjffv"] Jan 20 18:34:44 crc kubenswrapper[4558]: I0120 18:34:44.044339 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["keystone-kuttl-tests/keystone-fb15-account-create-update-n4k6h"] Jan 20 18:34:44 crc kubenswrapper[4558]: I0120 18:34:44.047878 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["keystone-kuttl-tests/keystone-db-create-cjffv"] Jan 20 18:34:44 crc kubenswrapper[4558]: I0120 18:34:44.575544 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4976d6a4-a2bb-4173-a5d7-cf308e164bcc" path="/var/lib/kubelet/pods/4976d6a4-a2bb-4173-a5d7-cf308e164bcc/volumes" Jan 20 18:34:44 crc kubenswrapper[4558]: I0120 18:34:44.576126 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="85e74f43-ec59-4b0f-b181-816b7517c590" path="/var/lib/kubelet/pods/85e74f43-ec59-4b0f-b181-816b7517c590/volumes" Jan 20 18:34:50 crc kubenswrapper[4558]: I0120 18:34:50.032777 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["keystone-kuttl-tests/keystone-db-sync-x5n46"] Jan 20 18:34:50 crc kubenswrapper[4558]: I0120 18:34:50.037277 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["keystone-kuttl-tests/keystone-db-sync-x5n46"] Jan 20 18:34:50 crc kubenswrapper[4558]: I0120 18:34:50.578204 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="54cb4d58-1111-48fd-a8f1-c19f467594f6" path="/var/lib/kubelet/pods/54cb4d58-1111-48fd-a8f1-c19f467594f6/volumes" Jan 20 18:34:56 crc kubenswrapper[4558]: I0120 18:34:56.025388 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["keystone-kuttl-tests/keystone-bootstrap-hcqbp"] Jan 20 18:34:56 crc kubenswrapper[4558]: I0120 18:34:56.030621 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["keystone-kuttl-tests/keystone-bootstrap-hcqbp"] Jan 20 18:34:56 crc kubenswrapper[4558]: I0120 18:34:56.575141 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d99f5d43-54f5-48da-ac98-337f5a8a9964" path="/var/lib/kubelet/pods/d99f5d43-54f5-48da-ac98-337f5a8a9964/volumes" Jan 20 18:34:57 crc kubenswrapper[4558]: I0120 18:34:57.329759 4558 patch_prober.go:28] interesting pod/machine-config-daemon-2vr4r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 20 18:34:57 crc kubenswrapper[4558]: I0120 18:34:57.329847 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 20 18:34:57 crc kubenswrapper[4558]: I0120 18:34:57.329917 4558 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" Jan 20 18:34:57 crc kubenswrapper[4558]: I0120 18:34:57.330986 4558 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"5858873a8a511c0bf9fb428796875a79a6b6fce6a3367ceb7e77c36d25d6f8a7"} pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 20 18:34:57 crc kubenswrapper[4558]: I0120 18:34:57.331064 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" containerID="cri-o://5858873a8a511c0bf9fb428796875a79a6b6fce6a3367ceb7e77c36d25d6f8a7" gracePeriod=600 Jan 20 18:34:58 crc kubenswrapper[4558]: I0120 18:34:58.207412 4558 generic.go:334] "Generic (PLEG): container finished" podID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerID="5858873a8a511c0bf9fb428796875a79a6b6fce6a3367ceb7e77c36d25d6f8a7" exitCode=0 Jan 20 18:34:58 crc kubenswrapper[4558]: I0120 18:34:58.207482 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" event={"ID":"68337d27-3fa6-4a29-88b0-82e60c3739eb","Type":"ContainerDied","Data":"5858873a8a511c0bf9fb428796875a79a6b6fce6a3367ceb7e77c36d25d6f8a7"} Jan 20 18:34:58 crc kubenswrapper[4558]: I0120 18:34:58.208013 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" event={"ID":"68337d27-3fa6-4a29-88b0-82e60c3739eb","Type":"ContainerStarted","Data":"77ce04c6daee7d4f9b776bd1c80b448f9c4750d5cb5c7dd182ff28963a4bcf99"} Jan 20 18:34:58 crc kubenswrapper[4558]: I0120 18:34:58.208050 4558 scope.go:117] "RemoveContainer" containerID="814c7f4e012758db904c630a7832be54bd493eba5362670666ad7cbcf498af98" Jan 20 18:35:00 crc kubenswrapper[4558]: I0120 18:35:00.920838 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-9kd4g"] Jan 20 18:35:00 crc kubenswrapper[4558]: E0120 18:35:00.921631 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c85e90ed-1d98-4b04-b918-512a6cc63d69" containerName="collect-profiles" Jan 20 18:35:00 crc kubenswrapper[4558]: I0120 18:35:00.921647 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="c85e90ed-1d98-4b04-b918-512a6cc63d69" containerName="collect-profiles" Jan 20 18:35:00 crc kubenswrapper[4558]: I0120 18:35:00.921815 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="c85e90ed-1d98-4b04-b918-512a6cc63d69" containerName="collect-profiles" Jan 20 18:35:00 crc kubenswrapper[4558]: I0120 18:35:00.928810 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-9kd4g" Jan 20 18:35:00 crc kubenswrapper[4558]: I0120 18:35:00.939487 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-9kd4g"] Jan 20 18:35:01 crc kubenswrapper[4558]: I0120 18:35:01.019135 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-99dsm\" (UniqueName: \"kubernetes.io/projected/7a066b4e-2534-4743-b11a-a917109ac2d9-kube-api-access-99dsm\") pod \"redhat-marketplace-9kd4g\" (UID: \"7a066b4e-2534-4743-b11a-a917109ac2d9\") " pod="openshift-marketplace/redhat-marketplace-9kd4g" Jan 20 18:35:01 crc kubenswrapper[4558]: I0120 18:35:01.019409 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7a066b4e-2534-4743-b11a-a917109ac2d9-catalog-content\") pod \"redhat-marketplace-9kd4g\" (UID: \"7a066b4e-2534-4743-b11a-a917109ac2d9\") " pod="openshift-marketplace/redhat-marketplace-9kd4g" Jan 20 18:35:01 crc kubenswrapper[4558]: I0120 18:35:01.019495 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7a066b4e-2534-4743-b11a-a917109ac2d9-utilities\") pod \"redhat-marketplace-9kd4g\" (UID: \"7a066b4e-2534-4743-b11a-a917109ac2d9\") " pod="openshift-marketplace/redhat-marketplace-9kd4g" Jan 20 18:35:01 crc kubenswrapper[4558]: I0120 18:35:01.121107 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-99dsm\" (UniqueName: \"kubernetes.io/projected/7a066b4e-2534-4743-b11a-a917109ac2d9-kube-api-access-99dsm\") pod \"redhat-marketplace-9kd4g\" (UID: \"7a066b4e-2534-4743-b11a-a917109ac2d9\") " pod="openshift-marketplace/redhat-marketplace-9kd4g" Jan 20 18:35:01 crc kubenswrapper[4558]: I0120 18:35:01.121182 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7a066b4e-2534-4743-b11a-a917109ac2d9-catalog-content\") pod \"redhat-marketplace-9kd4g\" (UID: \"7a066b4e-2534-4743-b11a-a917109ac2d9\") " pod="openshift-marketplace/redhat-marketplace-9kd4g" Jan 20 18:35:01 crc kubenswrapper[4558]: I0120 18:35:01.121203 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7a066b4e-2534-4743-b11a-a917109ac2d9-utilities\") pod \"redhat-marketplace-9kd4g\" (UID: \"7a066b4e-2534-4743-b11a-a917109ac2d9\") " pod="openshift-marketplace/redhat-marketplace-9kd4g" Jan 20 18:35:01 crc kubenswrapper[4558]: I0120 18:35:01.121680 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7a066b4e-2534-4743-b11a-a917109ac2d9-utilities\") pod \"redhat-marketplace-9kd4g\" (UID: \"7a066b4e-2534-4743-b11a-a917109ac2d9\") " pod="openshift-marketplace/redhat-marketplace-9kd4g" Jan 20 18:35:01 crc kubenswrapper[4558]: I0120 18:35:01.121864 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7a066b4e-2534-4743-b11a-a917109ac2d9-catalog-content\") pod \"redhat-marketplace-9kd4g\" (UID: \"7a066b4e-2534-4743-b11a-a917109ac2d9\") " pod="openshift-marketplace/redhat-marketplace-9kd4g" Jan 20 18:35:01 crc kubenswrapper[4558]: I0120 18:35:01.138087 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-99dsm\" (UniqueName: \"kubernetes.io/projected/7a066b4e-2534-4743-b11a-a917109ac2d9-kube-api-access-99dsm\") pod \"redhat-marketplace-9kd4g\" (UID: \"7a066b4e-2534-4743-b11a-a917109ac2d9\") " pod="openshift-marketplace/redhat-marketplace-9kd4g" Jan 20 18:35:01 crc kubenswrapper[4558]: I0120 18:35:01.255839 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-9kd4g" Jan 20 18:35:01 crc kubenswrapper[4558]: I0120 18:35:01.696319 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-9kd4g"] Jan 20 18:35:02 crc kubenswrapper[4558]: I0120 18:35:02.242568 4558 generic.go:334] "Generic (PLEG): container finished" podID="7a066b4e-2534-4743-b11a-a917109ac2d9" containerID="911abb0733456169eca37da50ace216165ad42d6a13c530800b15867dd062394" exitCode=0 Jan 20 18:35:02 crc kubenswrapper[4558]: I0120 18:35:02.242685 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9kd4g" event={"ID":"7a066b4e-2534-4743-b11a-a917109ac2d9","Type":"ContainerDied","Data":"911abb0733456169eca37da50ace216165ad42d6a13c530800b15867dd062394"} Jan 20 18:35:02 crc kubenswrapper[4558]: I0120 18:35:02.242907 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9kd4g" event={"ID":"7a066b4e-2534-4743-b11a-a917109ac2d9","Type":"ContainerStarted","Data":"df709493a96b30a77d7a63699dcd89c4db63b727df23418c99cb26368bfe3c6b"} Jan 20 18:35:02 crc kubenswrapper[4558]: I0120 18:35:02.245061 4558 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 20 18:35:03 crc kubenswrapper[4558]: I0120 18:35:03.251493 4558 generic.go:334] "Generic (PLEG): container finished" podID="7a066b4e-2534-4743-b11a-a917109ac2d9" containerID="f9011919ece39caca235a6430886680ae6923b849b9f3d8005fb1b053a73392d" exitCode=0 Jan 20 18:35:03 crc kubenswrapper[4558]: I0120 18:35:03.251601 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9kd4g" event={"ID":"7a066b4e-2534-4743-b11a-a917109ac2d9","Type":"ContainerDied","Data":"f9011919ece39caca235a6430886680ae6923b849b9f3d8005fb1b053a73392d"} Jan 20 18:35:04 crc kubenswrapper[4558]: I0120 18:35:04.262520 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9kd4g" event={"ID":"7a066b4e-2534-4743-b11a-a917109ac2d9","Type":"ContainerStarted","Data":"bb8bb08b896d4baf6ea6eff65a41278f59c6dd67c141478518d669dfacda9166"} Jan 20 18:35:04 crc kubenswrapper[4558]: I0120 18:35:04.282921 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-9kd4g" podStartSLOduration=2.77324768 podStartE2EDuration="4.282898535s" podCreationTimestamp="2026-01-20 18:35:00 +0000 UTC" firstStartedPulling="2026-01-20 18:35:02.244716815 +0000 UTC m=+6796.005054782" lastFinishedPulling="2026-01-20 18:35:03.75436767 +0000 UTC m=+6797.514705637" observedRunningTime="2026-01-20 18:35:04.280610913 +0000 UTC m=+6798.040948880" watchObservedRunningTime="2026-01-20 18:35:04.282898535 +0000 UTC m=+6798.043236502" Jan 20 18:35:07 crc kubenswrapper[4558]: I0120 18:35:07.308312 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-cvdnh"] Jan 20 18:35:07 crc kubenswrapper[4558]: I0120 18:35:07.313912 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-cvdnh" Jan 20 18:35:07 crc kubenswrapper[4558]: I0120 18:35:07.320706 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-cvdnh"] Jan 20 18:35:07 crc kubenswrapper[4558]: I0120 18:35:07.422187 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4dca0647-d787-4515-acc0-068305555705-utilities\") pod \"community-operators-cvdnh\" (UID: \"4dca0647-d787-4515-acc0-068305555705\") " pod="openshift-marketplace/community-operators-cvdnh" Jan 20 18:35:07 crc kubenswrapper[4558]: I0120 18:35:07.422341 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4dca0647-d787-4515-acc0-068305555705-catalog-content\") pod \"community-operators-cvdnh\" (UID: \"4dca0647-d787-4515-acc0-068305555705\") " pod="openshift-marketplace/community-operators-cvdnh" Jan 20 18:35:07 crc kubenswrapper[4558]: I0120 18:35:07.422471 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2mt8c\" (UniqueName: \"kubernetes.io/projected/4dca0647-d787-4515-acc0-068305555705-kube-api-access-2mt8c\") pod \"community-operators-cvdnh\" (UID: \"4dca0647-d787-4515-acc0-068305555705\") " pod="openshift-marketplace/community-operators-cvdnh" Jan 20 18:35:07 crc kubenswrapper[4558]: I0120 18:35:07.524968 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4dca0647-d787-4515-acc0-068305555705-utilities\") pod \"community-operators-cvdnh\" (UID: \"4dca0647-d787-4515-acc0-068305555705\") " pod="openshift-marketplace/community-operators-cvdnh" Jan 20 18:35:07 crc kubenswrapper[4558]: I0120 18:35:07.525042 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4dca0647-d787-4515-acc0-068305555705-catalog-content\") pod \"community-operators-cvdnh\" (UID: \"4dca0647-d787-4515-acc0-068305555705\") " pod="openshift-marketplace/community-operators-cvdnh" Jan 20 18:35:07 crc kubenswrapper[4558]: I0120 18:35:07.525140 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2mt8c\" (UniqueName: \"kubernetes.io/projected/4dca0647-d787-4515-acc0-068305555705-kube-api-access-2mt8c\") pod \"community-operators-cvdnh\" (UID: \"4dca0647-d787-4515-acc0-068305555705\") " pod="openshift-marketplace/community-operators-cvdnh" Jan 20 18:35:07 crc kubenswrapper[4558]: I0120 18:35:07.525770 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4dca0647-d787-4515-acc0-068305555705-utilities\") pod \"community-operators-cvdnh\" (UID: \"4dca0647-d787-4515-acc0-068305555705\") " pod="openshift-marketplace/community-operators-cvdnh" Jan 20 18:35:07 crc kubenswrapper[4558]: I0120 18:35:07.525849 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4dca0647-d787-4515-acc0-068305555705-catalog-content\") pod \"community-operators-cvdnh\" (UID: \"4dca0647-d787-4515-acc0-068305555705\") " pod="openshift-marketplace/community-operators-cvdnh" Jan 20 18:35:07 crc kubenswrapper[4558]: I0120 18:35:07.544084 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2mt8c\" (UniqueName: \"kubernetes.io/projected/4dca0647-d787-4515-acc0-068305555705-kube-api-access-2mt8c\") pod \"community-operators-cvdnh\" (UID: \"4dca0647-d787-4515-acc0-068305555705\") " pod="openshift-marketplace/community-operators-cvdnh" Jan 20 18:35:07 crc kubenswrapper[4558]: I0120 18:35:07.630440 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-cvdnh" Jan 20 18:35:08 crc kubenswrapper[4558]: I0120 18:35:08.071792 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-cvdnh"] Jan 20 18:35:08 crc kubenswrapper[4558]: I0120 18:35:08.305385 4558 generic.go:334] "Generic (PLEG): container finished" podID="4dca0647-d787-4515-acc0-068305555705" containerID="65257fc875644202c8af5615c987754cd8c463d05cbbf658917b0950f5153835" exitCode=0 Jan 20 18:35:08 crc kubenswrapper[4558]: I0120 18:35:08.305449 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cvdnh" event={"ID":"4dca0647-d787-4515-acc0-068305555705","Type":"ContainerDied","Data":"65257fc875644202c8af5615c987754cd8c463d05cbbf658917b0950f5153835"} Jan 20 18:35:08 crc kubenswrapper[4558]: I0120 18:35:08.305479 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cvdnh" event={"ID":"4dca0647-d787-4515-acc0-068305555705","Type":"ContainerStarted","Data":"ec0e41d8a5ac3775570d9436c7a7520038a06a49ac497dfb99ffbd61e2fff030"} Jan 20 18:35:09 crc kubenswrapper[4558]: I0120 18:35:09.325851 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cvdnh" event={"ID":"4dca0647-d787-4515-acc0-068305555705","Type":"ContainerStarted","Data":"1c2b7b0acfa40fe079e34a34418bec76a4e5b199e8032d01135a12bda35fedb3"} Jan 20 18:35:10 crc kubenswrapper[4558]: I0120 18:35:10.335293 4558 generic.go:334] "Generic (PLEG): container finished" podID="4dca0647-d787-4515-acc0-068305555705" containerID="1c2b7b0acfa40fe079e34a34418bec76a4e5b199e8032d01135a12bda35fedb3" exitCode=0 Jan 20 18:35:10 crc kubenswrapper[4558]: I0120 18:35:10.335351 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cvdnh" event={"ID":"4dca0647-d787-4515-acc0-068305555705","Type":"ContainerDied","Data":"1c2b7b0acfa40fe079e34a34418bec76a4e5b199e8032d01135a12bda35fedb3"} Jan 20 18:35:10 crc kubenswrapper[4558]: I0120 18:35:10.335444 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cvdnh" event={"ID":"4dca0647-d787-4515-acc0-068305555705","Type":"ContainerStarted","Data":"3935a5473d5b2a22b7e2f8ee8bbb890beb1dbfa414a53f7f85074db62691dc0b"} Jan 20 18:35:10 crc kubenswrapper[4558]: I0120 18:35:10.356959 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-cvdnh" podStartSLOduration=1.725553379 podStartE2EDuration="3.356941859s" podCreationTimestamp="2026-01-20 18:35:07 +0000 UTC" firstStartedPulling="2026-01-20 18:35:08.306952579 +0000 UTC m=+6802.067290536" lastFinishedPulling="2026-01-20 18:35:09.938341049 +0000 UTC m=+6803.698679016" observedRunningTime="2026-01-20 18:35:10.355380163 +0000 UTC m=+6804.115718130" watchObservedRunningTime="2026-01-20 18:35:10.356941859 +0000 UTC m=+6804.117279826" Jan 20 18:35:11 crc kubenswrapper[4558]: I0120 18:35:11.256766 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-9kd4g" Jan 20 18:35:11 crc kubenswrapper[4558]: I0120 18:35:11.257192 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-9kd4g" Jan 20 18:35:11 crc kubenswrapper[4558]: I0120 18:35:11.293260 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-9kd4g" Jan 20 18:35:11 crc kubenswrapper[4558]: I0120 18:35:11.379301 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-9kd4g" Jan 20 18:35:13 crc kubenswrapper[4558]: I0120 18:35:13.493359 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-9kd4g"] Jan 20 18:35:13 crc kubenswrapper[4558]: I0120 18:35:13.494036 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-9kd4g" podUID="7a066b4e-2534-4743-b11a-a917109ac2d9" containerName="registry-server" containerID="cri-o://bb8bb08b896d4baf6ea6eff65a41278f59c6dd67c141478518d669dfacda9166" gracePeriod=2 Jan 20 18:35:13 crc kubenswrapper[4558]: I0120 18:35:13.838398 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-9kd4g" Jan 20 18:35:13 crc kubenswrapper[4558]: I0120 18:35:13.913719 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7a066b4e-2534-4743-b11a-a917109ac2d9-utilities\") pod \"7a066b4e-2534-4743-b11a-a917109ac2d9\" (UID: \"7a066b4e-2534-4743-b11a-a917109ac2d9\") " Jan 20 18:35:13 crc kubenswrapper[4558]: I0120 18:35:13.913918 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7a066b4e-2534-4743-b11a-a917109ac2d9-catalog-content\") pod \"7a066b4e-2534-4743-b11a-a917109ac2d9\" (UID: \"7a066b4e-2534-4743-b11a-a917109ac2d9\") " Jan 20 18:35:13 crc kubenswrapper[4558]: I0120 18:35:13.913964 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-99dsm\" (UniqueName: \"kubernetes.io/projected/7a066b4e-2534-4743-b11a-a917109ac2d9-kube-api-access-99dsm\") pod \"7a066b4e-2534-4743-b11a-a917109ac2d9\" (UID: \"7a066b4e-2534-4743-b11a-a917109ac2d9\") " Jan 20 18:35:13 crc kubenswrapper[4558]: I0120 18:35:13.914633 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7a066b4e-2534-4743-b11a-a917109ac2d9-utilities" (OuterVolumeSpecName: "utilities") pod "7a066b4e-2534-4743-b11a-a917109ac2d9" (UID: "7a066b4e-2534-4743-b11a-a917109ac2d9"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 18:35:13 crc kubenswrapper[4558]: I0120 18:35:13.919814 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7a066b4e-2534-4743-b11a-a917109ac2d9-kube-api-access-99dsm" (OuterVolumeSpecName: "kube-api-access-99dsm") pod "7a066b4e-2534-4743-b11a-a917109ac2d9" (UID: "7a066b4e-2534-4743-b11a-a917109ac2d9"). InnerVolumeSpecName "kube-api-access-99dsm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:35:13 crc kubenswrapper[4558]: I0120 18:35:13.934609 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7a066b4e-2534-4743-b11a-a917109ac2d9-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7a066b4e-2534-4743-b11a-a917109ac2d9" (UID: "7a066b4e-2534-4743-b11a-a917109ac2d9"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 18:35:14 crc kubenswrapper[4558]: I0120 18:35:14.016594 4558 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7a066b4e-2534-4743-b11a-a917109ac2d9-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 20 18:35:14 crc kubenswrapper[4558]: I0120 18:35:14.016643 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-99dsm\" (UniqueName: \"kubernetes.io/projected/7a066b4e-2534-4743-b11a-a917109ac2d9-kube-api-access-99dsm\") on node \"crc\" DevicePath \"\"" Jan 20 18:35:14 crc kubenswrapper[4558]: I0120 18:35:14.016659 4558 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7a066b4e-2534-4743-b11a-a917109ac2d9-utilities\") on node \"crc\" DevicePath \"\"" Jan 20 18:35:14 crc kubenswrapper[4558]: I0120 18:35:14.372847 4558 generic.go:334] "Generic (PLEG): container finished" podID="7a066b4e-2534-4743-b11a-a917109ac2d9" containerID="bb8bb08b896d4baf6ea6eff65a41278f59c6dd67c141478518d669dfacda9166" exitCode=0 Jan 20 18:35:14 crc kubenswrapper[4558]: I0120 18:35:14.372922 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9kd4g" event={"ID":"7a066b4e-2534-4743-b11a-a917109ac2d9","Type":"ContainerDied","Data":"bb8bb08b896d4baf6ea6eff65a41278f59c6dd67c141478518d669dfacda9166"} Jan 20 18:35:14 crc kubenswrapper[4558]: I0120 18:35:14.372939 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-9kd4g" Jan 20 18:35:14 crc kubenswrapper[4558]: I0120 18:35:14.372982 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9kd4g" event={"ID":"7a066b4e-2534-4743-b11a-a917109ac2d9","Type":"ContainerDied","Data":"df709493a96b30a77d7a63699dcd89c4db63b727df23418c99cb26368bfe3c6b"} Jan 20 18:35:14 crc kubenswrapper[4558]: I0120 18:35:14.373010 4558 scope.go:117] "RemoveContainer" containerID="bb8bb08b896d4baf6ea6eff65a41278f59c6dd67c141478518d669dfacda9166" Jan 20 18:35:14 crc kubenswrapper[4558]: I0120 18:35:14.391904 4558 scope.go:117] "RemoveContainer" containerID="f9011919ece39caca235a6430886680ae6923b849b9f3d8005fb1b053a73392d" Jan 20 18:35:14 crc kubenswrapper[4558]: I0120 18:35:14.414422 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-9kd4g"] Jan 20 18:35:14 crc kubenswrapper[4558]: I0120 18:35:14.416627 4558 scope.go:117] "RemoveContainer" containerID="911abb0733456169eca37da50ace216165ad42d6a13c530800b15867dd062394" Jan 20 18:35:14 crc kubenswrapper[4558]: I0120 18:35:14.418889 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-9kd4g"] Jan 20 18:35:14 crc kubenswrapper[4558]: I0120 18:35:14.439393 4558 scope.go:117] "RemoveContainer" containerID="bb8bb08b896d4baf6ea6eff65a41278f59c6dd67c141478518d669dfacda9166" Jan 20 18:35:14 crc kubenswrapper[4558]: E0120 18:35:14.439870 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bb8bb08b896d4baf6ea6eff65a41278f59c6dd67c141478518d669dfacda9166\": container with ID starting with bb8bb08b896d4baf6ea6eff65a41278f59c6dd67c141478518d669dfacda9166 not found: ID does not exist" containerID="bb8bb08b896d4baf6ea6eff65a41278f59c6dd67c141478518d669dfacda9166" Jan 20 18:35:14 crc kubenswrapper[4558]: I0120 18:35:14.439915 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bb8bb08b896d4baf6ea6eff65a41278f59c6dd67c141478518d669dfacda9166"} err="failed to get container status \"bb8bb08b896d4baf6ea6eff65a41278f59c6dd67c141478518d669dfacda9166\": rpc error: code = NotFound desc = could not find container \"bb8bb08b896d4baf6ea6eff65a41278f59c6dd67c141478518d669dfacda9166\": container with ID starting with bb8bb08b896d4baf6ea6eff65a41278f59c6dd67c141478518d669dfacda9166 not found: ID does not exist" Jan 20 18:35:14 crc kubenswrapper[4558]: I0120 18:35:14.439943 4558 scope.go:117] "RemoveContainer" containerID="f9011919ece39caca235a6430886680ae6923b849b9f3d8005fb1b053a73392d" Jan 20 18:35:14 crc kubenswrapper[4558]: E0120 18:35:14.440404 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f9011919ece39caca235a6430886680ae6923b849b9f3d8005fb1b053a73392d\": container with ID starting with f9011919ece39caca235a6430886680ae6923b849b9f3d8005fb1b053a73392d not found: ID does not exist" containerID="f9011919ece39caca235a6430886680ae6923b849b9f3d8005fb1b053a73392d" Jan 20 18:35:14 crc kubenswrapper[4558]: I0120 18:35:14.440447 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f9011919ece39caca235a6430886680ae6923b849b9f3d8005fb1b053a73392d"} err="failed to get container status \"f9011919ece39caca235a6430886680ae6923b849b9f3d8005fb1b053a73392d\": rpc error: code = NotFound desc = could not find container \"f9011919ece39caca235a6430886680ae6923b849b9f3d8005fb1b053a73392d\": container with ID starting with f9011919ece39caca235a6430886680ae6923b849b9f3d8005fb1b053a73392d not found: ID does not exist" Jan 20 18:35:14 crc kubenswrapper[4558]: I0120 18:35:14.440474 4558 scope.go:117] "RemoveContainer" containerID="911abb0733456169eca37da50ace216165ad42d6a13c530800b15867dd062394" Jan 20 18:35:14 crc kubenswrapper[4558]: E0120 18:35:14.440738 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"911abb0733456169eca37da50ace216165ad42d6a13c530800b15867dd062394\": container with ID starting with 911abb0733456169eca37da50ace216165ad42d6a13c530800b15867dd062394 not found: ID does not exist" containerID="911abb0733456169eca37da50ace216165ad42d6a13c530800b15867dd062394" Jan 20 18:35:14 crc kubenswrapper[4558]: I0120 18:35:14.440764 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"911abb0733456169eca37da50ace216165ad42d6a13c530800b15867dd062394"} err="failed to get container status \"911abb0733456169eca37da50ace216165ad42d6a13c530800b15867dd062394\": rpc error: code = NotFound desc = could not find container \"911abb0733456169eca37da50ace216165ad42d6a13c530800b15867dd062394\": container with ID starting with 911abb0733456169eca37da50ace216165ad42d6a13c530800b15867dd062394 not found: ID does not exist" Jan 20 18:35:14 crc kubenswrapper[4558]: I0120 18:35:14.574320 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7a066b4e-2534-4743-b11a-a917109ac2d9" path="/var/lib/kubelet/pods/7a066b4e-2534-4743-b11a-a917109ac2d9/volumes" Jan 20 18:35:17 crc kubenswrapper[4558]: I0120 18:35:17.631255 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-cvdnh" Jan 20 18:35:17 crc kubenswrapper[4558]: I0120 18:35:17.631615 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-cvdnh" Jan 20 18:35:17 crc kubenswrapper[4558]: I0120 18:35:17.666485 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-cvdnh" Jan 20 18:35:18 crc kubenswrapper[4558]: I0120 18:35:18.431299 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-cvdnh" Jan 20 18:35:18 crc kubenswrapper[4558]: I0120 18:35:18.467082 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-cvdnh"] Jan 20 18:35:20 crc kubenswrapper[4558]: I0120 18:35:20.414678 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-cvdnh" podUID="4dca0647-d787-4515-acc0-068305555705" containerName="registry-server" containerID="cri-o://3935a5473d5b2a22b7e2f8ee8bbb890beb1dbfa414a53f7f85074db62691dc0b" gracePeriod=2 Jan 20 18:35:20 crc kubenswrapper[4558]: I0120 18:35:20.753769 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-cvdnh" Jan 20 18:35:20 crc kubenswrapper[4558]: I0120 18:35:20.931643 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2mt8c\" (UniqueName: \"kubernetes.io/projected/4dca0647-d787-4515-acc0-068305555705-kube-api-access-2mt8c\") pod \"4dca0647-d787-4515-acc0-068305555705\" (UID: \"4dca0647-d787-4515-acc0-068305555705\") " Jan 20 18:35:20 crc kubenswrapper[4558]: I0120 18:35:20.932039 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4dca0647-d787-4515-acc0-068305555705-catalog-content\") pod \"4dca0647-d787-4515-acc0-068305555705\" (UID: \"4dca0647-d787-4515-acc0-068305555705\") " Jan 20 18:35:20 crc kubenswrapper[4558]: I0120 18:35:20.932198 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4dca0647-d787-4515-acc0-068305555705-utilities\") pod \"4dca0647-d787-4515-acc0-068305555705\" (UID: \"4dca0647-d787-4515-acc0-068305555705\") " Jan 20 18:35:20 crc kubenswrapper[4558]: I0120 18:35:20.932918 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4dca0647-d787-4515-acc0-068305555705-utilities" (OuterVolumeSpecName: "utilities") pod "4dca0647-d787-4515-acc0-068305555705" (UID: "4dca0647-d787-4515-acc0-068305555705"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 18:35:20 crc kubenswrapper[4558]: I0120 18:35:20.936854 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4dca0647-d787-4515-acc0-068305555705-kube-api-access-2mt8c" (OuterVolumeSpecName: "kube-api-access-2mt8c") pod "4dca0647-d787-4515-acc0-068305555705" (UID: "4dca0647-d787-4515-acc0-068305555705"). InnerVolumeSpecName "kube-api-access-2mt8c". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:35:20 crc kubenswrapper[4558]: I0120 18:35:20.974485 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4dca0647-d787-4515-acc0-068305555705-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4dca0647-d787-4515-acc0-068305555705" (UID: "4dca0647-d787-4515-acc0-068305555705"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 18:35:21 crc kubenswrapper[4558]: I0120 18:35:21.035224 4558 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4dca0647-d787-4515-acc0-068305555705-utilities\") on node \"crc\" DevicePath \"\"" Jan 20 18:35:21 crc kubenswrapper[4558]: I0120 18:35:21.035261 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2mt8c\" (UniqueName: \"kubernetes.io/projected/4dca0647-d787-4515-acc0-068305555705-kube-api-access-2mt8c\") on node \"crc\" DevicePath \"\"" Jan 20 18:35:21 crc kubenswrapper[4558]: I0120 18:35:21.035279 4558 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4dca0647-d787-4515-acc0-068305555705-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 20 18:35:21 crc kubenswrapper[4558]: I0120 18:35:21.424124 4558 generic.go:334] "Generic (PLEG): container finished" podID="4dca0647-d787-4515-acc0-068305555705" containerID="3935a5473d5b2a22b7e2f8ee8bbb890beb1dbfa414a53f7f85074db62691dc0b" exitCode=0 Jan 20 18:35:21 crc kubenswrapper[4558]: I0120 18:35:21.424194 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cvdnh" event={"ID":"4dca0647-d787-4515-acc0-068305555705","Type":"ContainerDied","Data":"3935a5473d5b2a22b7e2f8ee8bbb890beb1dbfa414a53f7f85074db62691dc0b"} Jan 20 18:35:21 crc kubenswrapper[4558]: I0120 18:35:21.424235 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cvdnh" event={"ID":"4dca0647-d787-4515-acc0-068305555705","Type":"ContainerDied","Data":"ec0e41d8a5ac3775570d9436c7a7520038a06a49ac497dfb99ffbd61e2fff030"} Jan 20 18:35:21 crc kubenswrapper[4558]: I0120 18:35:21.424257 4558 scope.go:117] "RemoveContainer" containerID="3935a5473d5b2a22b7e2f8ee8bbb890beb1dbfa414a53f7f85074db62691dc0b" Jan 20 18:35:21 crc kubenswrapper[4558]: I0120 18:35:21.424431 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-cvdnh" Jan 20 18:35:21 crc kubenswrapper[4558]: I0120 18:35:21.448807 4558 scope.go:117] "RemoveContainer" containerID="1c2b7b0acfa40fe079e34a34418bec76a4e5b199e8032d01135a12bda35fedb3" Jan 20 18:35:21 crc kubenswrapper[4558]: I0120 18:35:21.459794 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-cvdnh"] Jan 20 18:35:21 crc kubenswrapper[4558]: I0120 18:35:21.463892 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-cvdnh"] Jan 20 18:35:21 crc kubenswrapper[4558]: I0120 18:35:21.487786 4558 scope.go:117] "RemoveContainer" containerID="65257fc875644202c8af5615c987754cd8c463d05cbbf658917b0950f5153835" Jan 20 18:35:21 crc kubenswrapper[4558]: I0120 18:35:21.502416 4558 scope.go:117] "RemoveContainer" containerID="3935a5473d5b2a22b7e2f8ee8bbb890beb1dbfa414a53f7f85074db62691dc0b" Jan 20 18:35:21 crc kubenswrapper[4558]: E0120 18:35:21.502844 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3935a5473d5b2a22b7e2f8ee8bbb890beb1dbfa414a53f7f85074db62691dc0b\": container with ID starting with 3935a5473d5b2a22b7e2f8ee8bbb890beb1dbfa414a53f7f85074db62691dc0b not found: ID does not exist" containerID="3935a5473d5b2a22b7e2f8ee8bbb890beb1dbfa414a53f7f85074db62691dc0b" Jan 20 18:35:21 crc kubenswrapper[4558]: I0120 18:35:21.502883 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3935a5473d5b2a22b7e2f8ee8bbb890beb1dbfa414a53f7f85074db62691dc0b"} err="failed to get container status \"3935a5473d5b2a22b7e2f8ee8bbb890beb1dbfa414a53f7f85074db62691dc0b\": rpc error: code = NotFound desc = could not find container \"3935a5473d5b2a22b7e2f8ee8bbb890beb1dbfa414a53f7f85074db62691dc0b\": container with ID starting with 3935a5473d5b2a22b7e2f8ee8bbb890beb1dbfa414a53f7f85074db62691dc0b not found: ID does not exist" Jan 20 18:35:21 crc kubenswrapper[4558]: I0120 18:35:21.502913 4558 scope.go:117] "RemoveContainer" containerID="1c2b7b0acfa40fe079e34a34418bec76a4e5b199e8032d01135a12bda35fedb3" Jan 20 18:35:21 crc kubenswrapper[4558]: E0120 18:35:21.503396 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1c2b7b0acfa40fe079e34a34418bec76a4e5b199e8032d01135a12bda35fedb3\": container with ID starting with 1c2b7b0acfa40fe079e34a34418bec76a4e5b199e8032d01135a12bda35fedb3 not found: ID does not exist" containerID="1c2b7b0acfa40fe079e34a34418bec76a4e5b199e8032d01135a12bda35fedb3" Jan 20 18:35:21 crc kubenswrapper[4558]: I0120 18:35:21.503419 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1c2b7b0acfa40fe079e34a34418bec76a4e5b199e8032d01135a12bda35fedb3"} err="failed to get container status \"1c2b7b0acfa40fe079e34a34418bec76a4e5b199e8032d01135a12bda35fedb3\": rpc error: code = NotFound desc = could not find container \"1c2b7b0acfa40fe079e34a34418bec76a4e5b199e8032d01135a12bda35fedb3\": container with ID starting with 1c2b7b0acfa40fe079e34a34418bec76a4e5b199e8032d01135a12bda35fedb3 not found: ID does not exist" Jan 20 18:35:21 crc kubenswrapper[4558]: I0120 18:35:21.503433 4558 scope.go:117] "RemoveContainer" containerID="65257fc875644202c8af5615c987754cd8c463d05cbbf658917b0950f5153835" Jan 20 18:35:21 crc kubenswrapper[4558]: E0120 18:35:21.503715 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"65257fc875644202c8af5615c987754cd8c463d05cbbf658917b0950f5153835\": container with ID starting with 65257fc875644202c8af5615c987754cd8c463d05cbbf658917b0950f5153835 not found: ID does not exist" containerID="65257fc875644202c8af5615c987754cd8c463d05cbbf658917b0950f5153835" Jan 20 18:35:21 crc kubenswrapper[4558]: I0120 18:35:21.503741 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"65257fc875644202c8af5615c987754cd8c463d05cbbf658917b0950f5153835"} err="failed to get container status \"65257fc875644202c8af5615c987754cd8c463d05cbbf658917b0950f5153835\": rpc error: code = NotFound desc = could not find container \"65257fc875644202c8af5615c987754cd8c463d05cbbf658917b0950f5153835\": container with ID starting with 65257fc875644202c8af5615c987754cd8c463d05cbbf658917b0950f5153835 not found: ID does not exist" Jan 20 18:35:22 crc kubenswrapper[4558]: I0120 18:35:22.575938 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4dca0647-d787-4515-acc0-068305555705" path="/var/lib/kubelet/pods/4dca0647-d787-4515-acc0-068305555705/volumes" Jan 20 18:35:28 crc kubenswrapper[4558]: I0120 18:35:28.046481 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-vvvhz"] Jan 20 18:35:28 crc kubenswrapper[4558]: E0120 18:35:28.047037 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7a066b4e-2534-4743-b11a-a917109ac2d9" containerName="extract-utilities" Jan 20 18:35:28 crc kubenswrapper[4558]: I0120 18:35:28.047050 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="7a066b4e-2534-4743-b11a-a917109ac2d9" containerName="extract-utilities" Jan 20 18:35:28 crc kubenswrapper[4558]: E0120 18:35:28.047072 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7a066b4e-2534-4743-b11a-a917109ac2d9" containerName="registry-server" Jan 20 18:35:28 crc kubenswrapper[4558]: I0120 18:35:28.047078 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="7a066b4e-2534-4743-b11a-a917109ac2d9" containerName="registry-server" Jan 20 18:35:28 crc kubenswrapper[4558]: E0120 18:35:28.047086 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4dca0647-d787-4515-acc0-068305555705" containerName="extract-content" Jan 20 18:35:28 crc kubenswrapper[4558]: I0120 18:35:28.047091 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="4dca0647-d787-4515-acc0-068305555705" containerName="extract-content" Jan 20 18:35:28 crc kubenswrapper[4558]: E0120 18:35:28.047109 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4dca0647-d787-4515-acc0-068305555705" containerName="registry-server" Jan 20 18:35:28 crc kubenswrapper[4558]: I0120 18:35:28.047115 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="4dca0647-d787-4515-acc0-068305555705" containerName="registry-server" Jan 20 18:35:28 crc kubenswrapper[4558]: E0120 18:35:28.047129 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7a066b4e-2534-4743-b11a-a917109ac2d9" containerName="extract-content" Jan 20 18:35:28 crc kubenswrapper[4558]: I0120 18:35:28.047135 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="7a066b4e-2534-4743-b11a-a917109ac2d9" containerName="extract-content" Jan 20 18:35:28 crc kubenswrapper[4558]: E0120 18:35:28.047147 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4dca0647-d787-4515-acc0-068305555705" containerName="extract-utilities" Jan 20 18:35:28 crc kubenswrapper[4558]: I0120 18:35:28.047152 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="4dca0647-d787-4515-acc0-068305555705" containerName="extract-utilities" Jan 20 18:35:28 crc kubenswrapper[4558]: I0120 18:35:28.047273 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="7a066b4e-2534-4743-b11a-a917109ac2d9" containerName="registry-server" Jan 20 18:35:28 crc kubenswrapper[4558]: I0120 18:35:28.047286 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="4dca0647-d787-4515-acc0-068305555705" containerName="registry-server" Jan 20 18:35:28 crc kubenswrapper[4558]: I0120 18:35:28.048179 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vvvhz" Jan 20 18:35:28 crc kubenswrapper[4558]: I0120 18:35:28.067227 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-vvvhz"] Jan 20 18:35:28 crc kubenswrapper[4558]: I0120 18:35:28.137628 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4762a957-4883-48b5-a7d1-0110cd355637-utilities\") pod \"certified-operators-vvvhz\" (UID: \"4762a957-4883-48b5-a7d1-0110cd355637\") " pod="openshift-marketplace/certified-operators-vvvhz" Jan 20 18:35:28 crc kubenswrapper[4558]: I0120 18:35:28.137700 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-69vw7\" (UniqueName: \"kubernetes.io/projected/4762a957-4883-48b5-a7d1-0110cd355637-kube-api-access-69vw7\") pod \"certified-operators-vvvhz\" (UID: \"4762a957-4883-48b5-a7d1-0110cd355637\") " pod="openshift-marketplace/certified-operators-vvvhz" Jan 20 18:35:28 crc kubenswrapper[4558]: I0120 18:35:28.137783 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4762a957-4883-48b5-a7d1-0110cd355637-catalog-content\") pod \"certified-operators-vvvhz\" (UID: \"4762a957-4883-48b5-a7d1-0110cd355637\") " pod="openshift-marketplace/certified-operators-vvvhz" Jan 20 18:35:28 crc kubenswrapper[4558]: I0120 18:35:28.240107 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4762a957-4883-48b5-a7d1-0110cd355637-utilities\") pod \"certified-operators-vvvhz\" (UID: \"4762a957-4883-48b5-a7d1-0110cd355637\") " pod="openshift-marketplace/certified-operators-vvvhz" Jan 20 18:35:28 crc kubenswrapper[4558]: I0120 18:35:28.240174 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-69vw7\" (UniqueName: \"kubernetes.io/projected/4762a957-4883-48b5-a7d1-0110cd355637-kube-api-access-69vw7\") pod \"certified-operators-vvvhz\" (UID: \"4762a957-4883-48b5-a7d1-0110cd355637\") " pod="openshift-marketplace/certified-operators-vvvhz" Jan 20 18:35:28 crc kubenswrapper[4558]: I0120 18:35:28.240216 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4762a957-4883-48b5-a7d1-0110cd355637-catalog-content\") pod \"certified-operators-vvvhz\" (UID: \"4762a957-4883-48b5-a7d1-0110cd355637\") " pod="openshift-marketplace/certified-operators-vvvhz" Jan 20 18:35:28 crc kubenswrapper[4558]: I0120 18:35:28.240582 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4762a957-4883-48b5-a7d1-0110cd355637-utilities\") pod \"certified-operators-vvvhz\" (UID: \"4762a957-4883-48b5-a7d1-0110cd355637\") " pod="openshift-marketplace/certified-operators-vvvhz" Jan 20 18:35:28 crc kubenswrapper[4558]: I0120 18:35:28.240598 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4762a957-4883-48b5-a7d1-0110cd355637-catalog-content\") pod \"certified-operators-vvvhz\" (UID: \"4762a957-4883-48b5-a7d1-0110cd355637\") " pod="openshift-marketplace/certified-operators-vvvhz" Jan 20 18:35:28 crc kubenswrapper[4558]: I0120 18:35:28.268437 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-69vw7\" (UniqueName: \"kubernetes.io/projected/4762a957-4883-48b5-a7d1-0110cd355637-kube-api-access-69vw7\") pod \"certified-operators-vvvhz\" (UID: \"4762a957-4883-48b5-a7d1-0110cd355637\") " pod="openshift-marketplace/certified-operators-vvvhz" Jan 20 18:35:28 crc kubenswrapper[4558]: I0120 18:35:28.365531 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vvvhz" Jan 20 18:35:29 crc kubenswrapper[4558]: I0120 18:35:29.211701 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-vvvhz"] Jan 20 18:35:29 crc kubenswrapper[4558]: I0120 18:35:29.509593 4558 generic.go:334] "Generic (PLEG): container finished" podID="4762a957-4883-48b5-a7d1-0110cd355637" containerID="12031135cb640306745bc1c315621a15d8202dfd49c260a04a571d95f9d82983" exitCode=0 Jan 20 18:35:29 crc kubenswrapper[4558]: I0120 18:35:29.509654 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vvvhz" event={"ID":"4762a957-4883-48b5-a7d1-0110cd355637","Type":"ContainerDied","Data":"12031135cb640306745bc1c315621a15d8202dfd49c260a04a571d95f9d82983"} Jan 20 18:35:29 crc kubenswrapper[4558]: I0120 18:35:29.509692 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vvvhz" event={"ID":"4762a957-4883-48b5-a7d1-0110cd355637","Type":"ContainerStarted","Data":"bfd6df254554eb3f542c6f35a2daab83cccbf94a6f7ec11318ea1ed469432e11"} Jan 20 18:35:30 crc kubenswrapper[4558]: I0120 18:35:30.520793 4558 generic.go:334] "Generic (PLEG): container finished" podID="4762a957-4883-48b5-a7d1-0110cd355637" containerID="b9f975af0d87e56031510c7f8a48c5cd1a9073d22e39f06d3ddc217d4524b7b3" exitCode=0 Jan 20 18:35:30 crc kubenswrapper[4558]: I0120 18:35:30.520851 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vvvhz" event={"ID":"4762a957-4883-48b5-a7d1-0110cd355637","Type":"ContainerDied","Data":"b9f975af0d87e56031510c7f8a48c5cd1a9073d22e39f06d3ddc217d4524b7b3"} Jan 20 18:35:31 crc kubenswrapper[4558]: I0120 18:35:31.532631 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vvvhz" event={"ID":"4762a957-4883-48b5-a7d1-0110cd355637","Type":"ContainerStarted","Data":"a98c6a8de5a802f2c24a792589b6395416e80bce05fd9b7189c303166710721e"} Jan 20 18:35:31 crc kubenswrapper[4558]: I0120 18:35:31.568059 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-vvvhz" podStartSLOduration=2.020882225 podStartE2EDuration="3.568022098s" podCreationTimestamp="2026-01-20 18:35:28 +0000 UTC" firstStartedPulling="2026-01-20 18:35:29.512883997 +0000 UTC m=+6823.273221964" lastFinishedPulling="2026-01-20 18:35:31.06002387 +0000 UTC m=+6824.820361837" observedRunningTime="2026-01-20 18:35:31.562223375 +0000 UTC m=+6825.322561332" watchObservedRunningTime="2026-01-20 18:35:31.568022098 +0000 UTC m=+6825.328360065" Jan 20 18:35:36 crc kubenswrapper[4558]: I0120 18:35:36.638094 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["keystone-kuttl-tests/openstackclient"] Jan 20 18:35:36 crc kubenswrapper[4558]: I0120 18:35:36.639131 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="keystone-kuttl-tests/openstackclient" podUID="d4e83135-f3b4-43f8-90be-1a650701041b" containerName="openstackclient" containerID="cri-o://1122e6fe3eec40c296471ea3919f7bed1e9ee53385d916757231ac0e6c21cbea" gracePeriod=30 Jan 20 18:35:37 crc kubenswrapper[4558]: I0120 18:35:37.001533 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/openstackclient" Jan 20 18:35:37 crc kubenswrapper[4558]: I0120 18:35:37.191905 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/d4e83135-f3b4-43f8-90be-1a650701041b-openstack-config\") pod \"d4e83135-f3b4-43f8-90be-1a650701041b\" (UID: \"d4e83135-f3b4-43f8-90be-1a650701041b\") " Jan 20 18:35:37 crc kubenswrapper[4558]: I0120 18:35:37.192131 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/d4e83135-f3b4-43f8-90be-1a650701041b-openstack-config-secret\") pod \"d4e83135-f3b4-43f8-90be-1a650701041b\" (UID: \"d4e83135-f3b4-43f8-90be-1a650701041b\") " Jan 20 18:35:37 crc kubenswrapper[4558]: I0120 18:35:37.192296 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mdv8r\" (UniqueName: \"kubernetes.io/projected/d4e83135-f3b4-43f8-90be-1a650701041b-kube-api-access-mdv8r\") pod \"d4e83135-f3b4-43f8-90be-1a650701041b\" (UID: \"d4e83135-f3b4-43f8-90be-1a650701041b\") " Jan 20 18:35:37 crc kubenswrapper[4558]: I0120 18:35:37.199135 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d4e83135-f3b4-43f8-90be-1a650701041b-kube-api-access-mdv8r" (OuterVolumeSpecName: "kube-api-access-mdv8r") pod "d4e83135-f3b4-43f8-90be-1a650701041b" (UID: "d4e83135-f3b4-43f8-90be-1a650701041b"). InnerVolumeSpecName "kube-api-access-mdv8r". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:35:37 crc kubenswrapper[4558]: I0120 18:35:37.211782 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d4e83135-f3b4-43f8-90be-1a650701041b-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "d4e83135-f3b4-43f8-90be-1a650701041b" (UID: "d4e83135-f3b4-43f8-90be-1a650701041b"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:35:37 crc kubenswrapper[4558]: I0120 18:35:37.213443 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d4e83135-f3b4-43f8-90be-1a650701041b-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "d4e83135-f3b4-43f8-90be-1a650701041b" (UID: "d4e83135-f3b4-43f8-90be-1a650701041b"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:35:37 crc kubenswrapper[4558]: I0120 18:35:37.295343 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mdv8r\" (UniqueName: \"kubernetes.io/projected/d4e83135-f3b4-43f8-90be-1a650701041b-kube-api-access-mdv8r\") on node \"crc\" DevicePath \"\"" Jan 20 18:35:37 crc kubenswrapper[4558]: I0120 18:35:37.295383 4558 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/d4e83135-f3b4-43f8-90be-1a650701041b-openstack-config\") on node \"crc\" DevicePath \"\"" Jan 20 18:35:37 crc kubenswrapper[4558]: I0120 18:35:37.295394 4558 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/d4e83135-f3b4-43f8-90be-1a650701041b-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Jan 20 18:35:37 crc kubenswrapper[4558]: I0120 18:35:37.581251 4558 generic.go:334] "Generic (PLEG): container finished" podID="d4e83135-f3b4-43f8-90be-1a650701041b" containerID="1122e6fe3eec40c296471ea3919f7bed1e9ee53385d916757231ac0e6c21cbea" exitCode=143 Jan 20 18:35:37 crc kubenswrapper[4558]: I0120 18:35:37.581297 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/openstackclient" event={"ID":"d4e83135-f3b4-43f8-90be-1a650701041b","Type":"ContainerDied","Data":"1122e6fe3eec40c296471ea3919f7bed1e9ee53385d916757231ac0e6c21cbea"} Jan 20 18:35:37 crc kubenswrapper[4558]: I0120 18:35:37.581336 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/openstackclient" event={"ID":"d4e83135-f3b4-43f8-90be-1a650701041b","Type":"ContainerDied","Data":"c4c8d46f0f078b0579ef4320048044d6c3280c97ec6976798f4f271ab2c77635"} Jan 20 18:35:37 crc kubenswrapper[4558]: I0120 18:35:37.581354 4558 scope.go:117] "RemoveContainer" containerID="1122e6fe3eec40c296471ea3919f7bed1e9ee53385d916757231ac0e6c21cbea" Jan 20 18:35:37 crc kubenswrapper[4558]: I0120 18:35:37.581539 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/openstackclient" Jan 20 18:35:37 crc kubenswrapper[4558]: I0120 18:35:37.600843 4558 scope.go:117] "RemoveContainer" containerID="1122e6fe3eec40c296471ea3919f7bed1e9ee53385d916757231ac0e6c21cbea" Jan 20 18:35:37 crc kubenswrapper[4558]: E0120 18:35:37.601708 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1122e6fe3eec40c296471ea3919f7bed1e9ee53385d916757231ac0e6c21cbea\": container with ID starting with 1122e6fe3eec40c296471ea3919f7bed1e9ee53385d916757231ac0e6c21cbea not found: ID does not exist" containerID="1122e6fe3eec40c296471ea3919f7bed1e9ee53385d916757231ac0e6c21cbea" Jan 20 18:35:37 crc kubenswrapper[4558]: I0120 18:35:37.601744 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1122e6fe3eec40c296471ea3919f7bed1e9ee53385d916757231ac0e6c21cbea"} err="failed to get container status \"1122e6fe3eec40c296471ea3919f7bed1e9ee53385d916757231ac0e6c21cbea\": rpc error: code = NotFound desc = could not find container \"1122e6fe3eec40c296471ea3919f7bed1e9ee53385d916757231ac0e6c21cbea\": container with ID starting with 1122e6fe3eec40c296471ea3919f7bed1e9ee53385d916757231ac0e6c21cbea not found: ID does not exist" Jan 20 18:35:37 crc kubenswrapper[4558]: I0120 18:35:37.605653 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["keystone-kuttl-tests/openstackclient"] Jan 20 18:35:37 crc kubenswrapper[4558]: I0120 18:35:37.610054 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["keystone-kuttl-tests/openstackclient"] Jan 20 18:35:37 crc kubenswrapper[4558]: I0120 18:35:37.725798 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["keystone-kuttl-tests/keystone-5d5bd56788-rvvzp"] Jan 20 18:35:37 crc kubenswrapper[4558]: I0120 18:35:37.726799 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="keystone-kuttl-tests/keystone-5d5bd56788-rvvzp" podUID="d18169f1-a4d2-4189-9625-7024b03c64d3" containerName="keystone-api" containerID="cri-o://eca4d76757011d6cdfda2d1177ecd5e231c28e5a23800ae10da031e65eb65d40" gracePeriod=30 Jan 20 18:35:37 crc kubenswrapper[4558]: I0120 18:35:37.790112 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["keystone-kuttl-tests/keystonefb15-account-delete-2s29l"] Jan 20 18:35:37 crc kubenswrapper[4558]: E0120 18:35:37.790474 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d4e83135-f3b4-43f8-90be-1a650701041b" containerName="openstackclient" Jan 20 18:35:37 crc kubenswrapper[4558]: I0120 18:35:37.790490 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d4e83135-f3b4-43f8-90be-1a650701041b" containerName="openstackclient" Jan 20 18:35:37 crc kubenswrapper[4558]: I0120 18:35:37.790637 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="d4e83135-f3b4-43f8-90be-1a650701041b" containerName="openstackclient" Jan 20 18:35:37 crc kubenswrapper[4558]: I0120 18:35:37.798105 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystonefb15-account-delete-2s29l" Jan 20 18:35:37 crc kubenswrapper[4558]: I0120 18:35:37.798400 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystonefb15-account-delete-2s29l"] Jan 20 18:35:37 crc kubenswrapper[4558]: I0120 18:35:37.810177 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ca0974c0-e5f7-4a18-8c9e-fb4131e4270b-operator-scripts\") pod \"keystonefb15-account-delete-2s29l\" (UID: \"ca0974c0-e5f7-4a18-8c9e-fb4131e4270b\") " pod="keystone-kuttl-tests/keystonefb15-account-delete-2s29l" Jan 20 18:35:37 crc kubenswrapper[4558]: I0120 18:35:37.810347 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jclwq\" (UniqueName: \"kubernetes.io/projected/ca0974c0-e5f7-4a18-8c9e-fb4131e4270b-kube-api-access-jclwq\") pod \"keystonefb15-account-delete-2s29l\" (UID: \"ca0974c0-e5f7-4a18-8c9e-fb4131e4270b\") " pod="keystone-kuttl-tests/keystonefb15-account-delete-2s29l" Jan 20 18:35:37 crc kubenswrapper[4558]: I0120 18:35:37.915624 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jclwq\" (UniqueName: \"kubernetes.io/projected/ca0974c0-e5f7-4a18-8c9e-fb4131e4270b-kube-api-access-jclwq\") pod \"keystonefb15-account-delete-2s29l\" (UID: \"ca0974c0-e5f7-4a18-8c9e-fb4131e4270b\") " pod="keystone-kuttl-tests/keystonefb15-account-delete-2s29l" Jan 20 18:35:37 crc kubenswrapper[4558]: I0120 18:35:37.915917 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ca0974c0-e5f7-4a18-8c9e-fb4131e4270b-operator-scripts\") pod \"keystonefb15-account-delete-2s29l\" (UID: \"ca0974c0-e5f7-4a18-8c9e-fb4131e4270b\") " pod="keystone-kuttl-tests/keystonefb15-account-delete-2s29l" Jan 20 18:35:37 crc kubenswrapper[4558]: I0120 18:35:37.916636 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ca0974c0-e5f7-4a18-8c9e-fb4131e4270b-operator-scripts\") pod \"keystonefb15-account-delete-2s29l\" (UID: \"ca0974c0-e5f7-4a18-8c9e-fb4131e4270b\") " pod="keystone-kuttl-tests/keystonefb15-account-delete-2s29l" Jan 20 18:35:37 crc kubenswrapper[4558]: I0120 18:35:37.931259 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jclwq\" (UniqueName: \"kubernetes.io/projected/ca0974c0-e5f7-4a18-8c9e-fb4131e4270b-kube-api-access-jclwq\") pod \"keystonefb15-account-delete-2s29l\" (UID: \"ca0974c0-e5f7-4a18-8c9e-fb4131e4270b\") " pod="keystone-kuttl-tests/keystonefb15-account-delete-2s29l" Jan 20 18:35:37 crc kubenswrapper[4558]: I0120 18:35:37.994881 4558 scope.go:117] "RemoveContainer" containerID="ac65b430bfda96b78e8d4fbb1460402bb0f2329655e1e23dfc82f32a90b27394" Jan 20 18:35:38 crc kubenswrapper[4558]: I0120 18:35:38.024531 4558 scope.go:117] "RemoveContainer" containerID="06be374ebfeaff166d93b6ec35fe42760322347bc5246e2622a424a5602f3c2b" Jan 20 18:35:38 crc kubenswrapper[4558]: I0120 18:35:38.056886 4558 scope.go:117] "RemoveContainer" containerID="80a1987b638b799ff8812714b3baa10fda55c3b0f1fd62dfa51756501b7f37f0" Jan 20 18:35:38 crc kubenswrapper[4558]: I0120 18:35:38.075671 4558 scope.go:117] "RemoveContainer" containerID="b5bbe3e9a3db04e677faf3f7f7b95346a19fa660a82a2814feb3eb5cd2d96520" Jan 20 18:35:38 crc kubenswrapper[4558]: I0120 18:35:38.115203 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystonefb15-account-delete-2s29l" Jan 20 18:35:38 crc kubenswrapper[4558]: I0120 18:35:38.366426 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-vvvhz" Jan 20 18:35:38 crc kubenswrapper[4558]: I0120 18:35:38.366782 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-vvvhz" Jan 20 18:35:38 crc kubenswrapper[4558]: I0120 18:35:38.405307 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-vvvhz" Jan 20 18:35:38 crc kubenswrapper[4558]: I0120 18:35:38.507349 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystonefb15-account-delete-2s29l"] Jan 20 18:35:38 crc kubenswrapper[4558]: I0120 18:35:38.573447 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d4e83135-f3b4-43f8-90be-1a650701041b" path="/var/lib/kubelet/pods/d4e83135-f3b4-43f8-90be-1a650701041b/volumes" Jan 20 18:35:38 crc kubenswrapper[4558]: I0120 18:35:38.587311 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystonefb15-account-delete-2s29l" event={"ID":"ca0974c0-e5f7-4a18-8c9e-fb4131e4270b","Type":"ContainerStarted","Data":"b715894e035f43384c773be1ecc87323bdd6e7949fde66e490931018a1e5c370"} Jan 20 18:35:38 crc kubenswrapper[4558]: I0120 18:35:38.616359 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-vvvhz" Jan 20 18:35:38 crc kubenswrapper[4558]: I0120 18:35:38.668256 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-vvvhz"] Jan 20 18:35:39 crc kubenswrapper[4558]: I0120 18:35:39.598127 4558 generic.go:334] "Generic (PLEG): container finished" podID="ca0974c0-e5f7-4a18-8c9e-fb4131e4270b" containerID="581d77ede432386c7e0d9c4608223b26803351bad0061d13f5e060384b5ffaac" exitCode=0 Jan 20 18:35:39 crc kubenswrapper[4558]: I0120 18:35:39.598247 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystonefb15-account-delete-2s29l" event={"ID":"ca0974c0-e5f7-4a18-8c9e-fb4131e4270b","Type":"ContainerDied","Data":"581d77ede432386c7e0d9c4608223b26803351bad0061d13f5e060384b5ffaac"} Jan 20 18:35:40 crc kubenswrapper[4558]: I0120 18:35:40.607283 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-vvvhz" podUID="4762a957-4883-48b5-a7d1-0110cd355637" containerName="registry-server" containerID="cri-o://a98c6a8de5a802f2c24a792589b6395416e80bce05fd9b7189c303166710721e" gracePeriod=2 Jan 20 18:35:40 crc kubenswrapper[4558]: I0120 18:35:40.905544 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystonefb15-account-delete-2s29l" Jan 20 18:35:40 crc kubenswrapper[4558]: I0120 18:35:40.964324 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ca0974c0-e5f7-4a18-8c9e-fb4131e4270b-operator-scripts\") pod \"ca0974c0-e5f7-4a18-8c9e-fb4131e4270b\" (UID: \"ca0974c0-e5f7-4a18-8c9e-fb4131e4270b\") " Jan 20 18:35:40 crc kubenswrapper[4558]: I0120 18:35:40.964397 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jclwq\" (UniqueName: \"kubernetes.io/projected/ca0974c0-e5f7-4a18-8c9e-fb4131e4270b-kube-api-access-jclwq\") pod \"ca0974c0-e5f7-4a18-8c9e-fb4131e4270b\" (UID: \"ca0974c0-e5f7-4a18-8c9e-fb4131e4270b\") " Jan 20 18:35:40 crc kubenswrapper[4558]: I0120 18:35:40.965481 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ca0974c0-e5f7-4a18-8c9e-fb4131e4270b-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "ca0974c0-e5f7-4a18-8c9e-fb4131e4270b" (UID: "ca0974c0-e5f7-4a18-8c9e-fb4131e4270b"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:35:40 crc kubenswrapper[4558]: I0120 18:35:40.970869 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ca0974c0-e5f7-4a18-8c9e-fb4131e4270b-kube-api-access-jclwq" (OuterVolumeSpecName: "kube-api-access-jclwq") pod "ca0974c0-e5f7-4a18-8c9e-fb4131e4270b" (UID: "ca0974c0-e5f7-4a18-8c9e-fb4131e4270b"). InnerVolumeSpecName "kube-api-access-jclwq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:35:41 crc kubenswrapper[4558]: I0120 18:35:41.067137 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ca0974c0-e5f7-4a18-8c9e-fb4131e4270b-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 18:35:41 crc kubenswrapper[4558]: I0120 18:35:41.067193 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jclwq\" (UniqueName: \"kubernetes.io/projected/ca0974c0-e5f7-4a18-8c9e-fb4131e4270b-kube-api-access-jclwq\") on node \"crc\" DevicePath \"\"" Jan 20 18:35:41 crc kubenswrapper[4558]: I0120 18:35:41.087676 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vvvhz" Jan 20 18:35:41 crc kubenswrapper[4558]: I0120 18:35:41.167712 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-69vw7\" (UniqueName: \"kubernetes.io/projected/4762a957-4883-48b5-a7d1-0110cd355637-kube-api-access-69vw7\") pod \"4762a957-4883-48b5-a7d1-0110cd355637\" (UID: \"4762a957-4883-48b5-a7d1-0110cd355637\") " Jan 20 18:35:41 crc kubenswrapper[4558]: I0120 18:35:41.167805 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4762a957-4883-48b5-a7d1-0110cd355637-utilities\") pod \"4762a957-4883-48b5-a7d1-0110cd355637\" (UID: \"4762a957-4883-48b5-a7d1-0110cd355637\") " Jan 20 18:35:41 crc kubenswrapper[4558]: I0120 18:35:41.167839 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4762a957-4883-48b5-a7d1-0110cd355637-catalog-content\") pod \"4762a957-4883-48b5-a7d1-0110cd355637\" (UID: \"4762a957-4883-48b5-a7d1-0110cd355637\") " Jan 20 18:35:41 crc kubenswrapper[4558]: I0120 18:35:41.169416 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4762a957-4883-48b5-a7d1-0110cd355637-utilities" (OuterVolumeSpecName: "utilities") pod "4762a957-4883-48b5-a7d1-0110cd355637" (UID: "4762a957-4883-48b5-a7d1-0110cd355637"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 18:35:41 crc kubenswrapper[4558]: I0120 18:35:41.171803 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4762a957-4883-48b5-a7d1-0110cd355637-kube-api-access-69vw7" (OuterVolumeSpecName: "kube-api-access-69vw7") pod "4762a957-4883-48b5-a7d1-0110cd355637" (UID: "4762a957-4883-48b5-a7d1-0110cd355637"). InnerVolumeSpecName "kube-api-access-69vw7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:35:41 crc kubenswrapper[4558]: I0120 18:35:41.203826 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4762a957-4883-48b5-a7d1-0110cd355637-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4762a957-4883-48b5-a7d1-0110cd355637" (UID: "4762a957-4883-48b5-a7d1-0110cd355637"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 18:35:41 crc kubenswrapper[4558]: I0120 18:35:41.271236 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-69vw7\" (UniqueName: \"kubernetes.io/projected/4762a957-4883-48b5-a7d1-0110cd355637-kube-api-access-69vw7\") on node \"crc\" DevicePath \"\"" Jan 20 18:35:41 crc kubenswrapper[4558]: I0120 18:35:41.271276 4558 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4762a957-4883-48b5-a7d1-0110cd355637-utilities\") on node \"crc\" DevicePath \"\"" Jan 20 18:35:41 crc kubenswrapper[4558]: I0120 18:35:41.271292 4558 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4762a957-4883-48b5-a7d1-0110cd355637-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 20 18:35:41 crc kubenswrapper[4558]: I0120 18:35:41.619509 4558 generic.go:334] "Generic (PLEG): container finished" podID="4762a957-4883-48b5-a7d1-0110cd355637" containerID="a98c6a8de5a802f2c24a792589b6395416e80bce05fd9b7189c303166710721e" exitCode=0 Jan 20 18:35:41 crc kubenswrapper[4558]: I0120 18:35:41.619581 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vvvhz" Jan 20 18:35:41 crc kubenswrapper[4558]: I0120 18:35:41.619574 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vvvhz" event={"ID":"4762a957-4883-48b5-a7d1-0110cd355637","Type":"ContainerDied","Data":"a98c6a8de5a802f2c24a792589b6395416e80bce05fd9b7189c303166710721e"} Jan 20 18:35:41 crc kubenswrapper[4558]: I0120 18:35:41.619832 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vvvhz" event={"ID":"4762a957-4883-48b5-a7d1-0110cd355637","Type":"ContainerDied","Data":"bfd6df254554eb3f542c6f35a2daab83cccbf94a6f7ec11318ea1ed469432e11"} Jan 20 18:35:41 crc kubenswrapper[4558]: I0120 18:35:41.619887 4558 scope.go:117] "RemoveContainer" containerID="a98c6a8de5a802f2c24a792589b6395416e80bce05fd9b7189c303166710721e" Jan 20 18:35:41 crc kubenswrapper[4558]: I0120 18:35:41.622790 4558 generic.go:334] "Generic (PLEG): container finished" podID="d18169f1-a4d2-4189-9625-7024b03c64d3" containerID="eca4d76757011d6cdfda2d1177ecd5e231c28e5a23800ae10da031e65eb65d40" exitCode=0 Jan 20 18:35:41 crc kubenswrapper[4558]: I0120 18:35:41.622862 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-5d5bd56788-rvvzp" event={"ID":"d18169f1-a4d2-4189-9625-7024b03c64d3","Type":"ContainerDied","Data":"eca4d76757011d6cdfda2d1177ecd5e231c28e5a23800ae10da031e65eb65d40"} Jan 20 18:35:41 crc kubenswrapper[4558]: I0120 18:35:41.624791 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystonefb15-account-delete-2s29l" event={"ID":"ca0974c0-e5f7-4a18-8c9e-fb4131e4270b","Type":"ContainerDied","Data":"b715894e035f43384c773be1ecc87323bdd6e7949fde66e490931018a1e5c370"} Jan 20 18:35:41 crc kubenswrapper[4558]: I0120 18:35:41.624828 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b715894e035f43384c773be1ecc87323bdd6e7949fde66e490931018a1e5c370" Jan 20 18:35:41 crc kubenswrapper[4558]: I0120 18:35:41.624894 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystonefb15-account-delete-2s29l" Jan 20 18:35:41 crc kubenswrapper[4558]: I0120 18:35:41.641290 4558 scope.go:117] "RemoveContainer" containerID="b9f975af0d87e56031510c7f8a48c5cd1a9073d22e39f06d3ddc217d4524b7b3" Jan 20 18:35:41 crc kubenswrapper[4558]: I0120 18:35:41.654648 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-vvvhz"] Jan 20 18:35:41 crc kubenswrapper[4558]: I0120 18:35:41.657332 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-vvvhz"] Jan 20 18:35:41 crc kubenswrapper[4558]: I0120 18:35:41.683424 4558 scope.go:117] "RemoveContainer" containerID="12031135cb640306745bc1c315621a15d8202dfd49c260a04a571d95f9d82983" Jan 20 18:35:41 crc kubenswrapper[4558]: I0120 18:35:41.683771 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-5d5bd56788-rvvzp" Jan 20 18:35:41 crc kubenswrapper[4558]: I0120 18:35:41.701519 4558 scope.go:117] "RemoveContainer" containerID="a98c6a8de5a802f2c24a792589b6395416e80bce05fd9b7189c303166710721e" Jan 20 18:35:41 crc kubenswrapper[4558]: E0120 18:35:41.702276 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a98c6a8de5a802f2c24a792589b6395416e80bce05fd9b7189c303166710721e\": container with ID starting with a98c6a8de5a802f2c24a792589b6395416e80bce05fd9b7189c303166710721e not found: ID does not exist" containerID="a98c6a8de5a802f2c24a792589b6395416e80bce05fd9b7189c303166710721e" Jan 20 18:35:41 crc kubenswrapper[4558]: I0120 18:35:41.702321 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a98c6a8de5a802f2c24a792589b6395416e80bce05fd9b7189c303166710721e"} err="failed to get container status \"a98c6a8de5a802f2c24a792589b6395416e80bce05fd9b7189c303166710721e\": rpc error: code = NotFound desc = could not find container \"a98c6a8de5a802f2c24a792589b6395416e80bce05fd9b7189c303166710721e\": container with ID starting with a98c6a8de5a802f2c24a792589b6395416e80bce05fd9b7189c303166710721e not found: ID does not exist" Jan 20 18:35:41 crc kubenswrapper[4558]: I0120 18:35:41.702345 4558 scope.go:117] "RemoveContainer" containerID="b9f975af0d87e56031510c7f8a48c5cd1a9073d22e39f06d3ddc217d4524b7b3" Jan 20 18:35:41 crc kubenswrapper[4558]: E0120 18:35:41.703880 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b9f975af0d87e56031510c7f8a48c5cd1a9073d22e39f06d3ddc217d4524b7b3\": container with ID starting with b9f975af0d87e56031510c7f8a48c5cd1a9073d22e39f06d3ddc217d4524b7b3 not found: ID does not exist" containerID="b9f975af0d87e56031510c7f8a48c5cd1a9073d22e39f06d3ddc217d4524b7b3" Jan 20 18:35:41 crc kubenswrapper[4558]: I0120 18:35:41.703935 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b9f975af0d87e56031510c7f8a48c5cd1a9073d22e39f06d3ddc217d4524b7b3"} err="failed to get container status \"b9f975af0d87e56031510c7f8a48c5cd1a9073d22e39f06d3ddc217d4524b7b3\": rpc error: code = NotFound desc = could not find container \"b9f975af0d87e56031510c7f8a48c5cd1a9073d22e39f06d3ddc217d4524b7b3\": container with ID starting with b9f975af0d87e56031510c7f8a48c5cd1a9073d22e39f06d3ddc217d4524b7b3 not found: ID does not exist" Jan 20 18:35:41 crc kubenswrapper[4558]: I0120 18:35:41.703975 4558 scope.go:117] "RemoveContainer" containerID="12031135cb640306745bc1c315621a15d8202dfd49c260a04a571d95f9d82983" Jan 20 18:35:41 crc kubenswrapper[4558]: E0120 18:35:41.704437 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"12031135cb640306745bc1c315621a15d8202dfd49c260a04a571d95f9d82983\": container with ID starting with 12031135cb640306745bc1c315621a15d8202dfd49c260a04a571d95f9d82983 not found: ID does not exist" containerID="12031135cb640306745bc1c315621a15d8202dfd49c260a04a571d95f9d82983" Jan 20 18:35:41 crc kubenswrapper[4558]: I0120 18:35:41.704470 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"12031135cb640306745bc1c315621a15d8202dfd49c260a04a571d95f9d82983"} err="failed to get container status \"12031135cb640306745bc1c315621a15d8202dfd49c260a04a571d95f9d82983\": rpc error: code = NotFound desc = could not find container \"12031135cb640306745bc1c315621a15d8202dfd49c260a04a571d95f9d82983\": container with ID starting with 12031135cb640306745bc1c315621a15d8202dfd49c260a04a571d95f9d82983 not found: ID does not exist" Jan 20 18:35:41 crc kubenswrapper[4558]: I0120 18:35:41.776446 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d18169f1-a4d2-4189-9625-7024b03c64d3-scripts\") pod \"d18169f1-a4d2-4189-9625-7024b03c64d3\" (UID: \"d18169f1-a4d2-4189-9625-7024b03c64d3\") " Jan 20 18:35:41 crc kubenswrapper[4558]: I0120 18:35:41.776775 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n2p8n\" (UniqueName: \"kubernetes.io/projected/d18169f1-a4d2-4189-9625-7024b03c64d3-kube-api-access-n2p8n\") pod \"d18169f1-a4d2-4189-9625-7024b03c64d3\" (UID: \"d18169f1-a4d2-4189-9625-7024b03c64d3\") " Jan 20 18:35:41 crc kubenswrapper[4558]: I0120 18:35:41.776824 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/d18169f1-a4d2-4189-9625-7024b03c64d3-credential-keys\") pod \"d18169f1-a4d2-4189-9625-7024b03c64d3\" (UID: \"d18169f1-a4d2-4189-9625-7024b03c64d3\") " Jan 20 18:35:41 crc kubenswrapper[4558]: I0120 18:35:41.776867 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d18169f1-a4d2-4189-9625-7024b03c64d3-config-data\") pod \"d18169f1-a4d2-4189-9625-7024b03c64d3\" (UID: \"d18169f1-a4d2-4189-9625-7024b03c64d3\") " Jan 20 18:35:41 crc kubenswrapper[4558]: I0120 18:35:41.776927 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d18169f1-a4d2-4189-9625-7024b03c64d3-fernet-keys\") pod \"d18169f1-a4d2-4189-9625-7024b03c64d3\" (UID: \"d18169f1-a4d2-4189-9625-7024b03c64d3\") " Jan 20 18:35:41 crc kubenswrapper[4558]: I0120 18:35:41.793270 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d18169f1-a4d2-4189-9625-7024b03c64d3-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "d18169f1-a4d2-4189-9625-7024b03c64d3" (UID: "d18169f1-a4d2-4189-9625-7024b03c64d3"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:35:41 crc kubenswrapper[4558]: I0120 18:35:41.801401 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d18169f1-a4d2-4189-9625-7024b03c64d3-kube-api-access-n2p8n" (OuterVolumeSpecName: "kube-api-access-n2p8n") pod "d18169f1-a4d2-4189-9625-7024b03c64d3" (UID: "d18169f1-a4d2-4189-9625-7024b03c64d3"). InnerVolumeSpecName "kube-api-access-n2p8n". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:35:41 crc kubenswrapper[4558]: I0120 18:35:41.801503 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d18169f1-a4d2-4189-9625-7024b03c64d3-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "d18169f1-a4d2-4189-9625-7024b03c64d3" (UID: "d18169f1-a4d2-4189-9625-7024b03c64d3"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:35:41 crc kubenswrapper[4558]: I0120 18:35:41.808255 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d18169f1-a4d2-4189-9625-7024b03c64d3-scripts" (OuterVolumeSpecName: "scripts") pod "d18169f1-a4d2-4189-9625-7024b03c64d3" (UID: "d18169f1-a4d2-4189-9625-7024b03c64d3"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:35:41 crc kubenswrapper[4558]: I0120 18:35:41.820322 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d18169f1-a4d2-4189-9625-7024b03c64d3-config-data" (OuterVolumeSpecName: "config-data") pod "d18169f1-a4d2-4189-9625-7024b03c64d3" (UID: "d18169f1-a4d2-4189-9625-7024b03c64d3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:35:41 crc kubenswrapper[4558]: I0120 18:35:41.879042 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d18169f1-a4d2-4189-9625-7024b03c64d3-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 18:35:41 crc kubenswrapper[4558]: I0120 18:35:41.879082 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n2p8n\" (UniqueName: \"kubernetes.io/projected/d18169f1-a4d2-4189-9625-7024b03c64d3-kube-api-access-n2p8n\") on node \"crc\" DevicePath \"\"" Jan 20 18:35:41 crc kubenswrapper[4558]: I0120 18:35:41.879095 4558 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/d18169f1-a4d2-4189-9625-7024b03c64d3-credential-keys\") on node \"crc\" DevicePath \"\"" Jan 20 18:35:41 crc kubenswrapper[4558]: I0120 18:35:41.879105 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d18169f1-a4d2-4189-9625-7024b03c64d3-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 18:35:41 crc kubenswrapper[4558]: I0120 18:35:41.879116 4558 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d18169f1-a4d2-4189-9625-7024b03c64d3-fernet-keys\") on node \"crc\" DevicePath \"\"" Jan 20 18:35:42 crc kubenswrapper[4558]: I0120 18:35:42.576442 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4762a957-4883-48b5-a7d1-0110cd355637" path="/var/lib/kubelet/pods/4762a957-4883-48b5-a7d1-0110cd355637/volumes" Jan 20 18:35:42 crc kubenswrapper[4558]: I0120 18:35:42.637291 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-5d5bd56788-rvvzp" event={"ID":"d18169f1-a4d2-4189-9625-7024b03c64d3","Type":"ContainerDied","Data":"ffd467659d91836acbddd27f310b3f3453d94c51566fddbf001fa60068baa3d4"} Jan 20 18:35:42 crc kubenswrapper[4558]: I0120 18:35:42.637379 4558 scope.go:117] "RemoveContainer" containerID="eca4d76757011d6cdfda2d1177ecd5e231c28e5a23800ae10da031e65eb65d40" Jan 20 18:35:42 crc kubenswrapper[4558]: I0120 18:35:42.637397 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-5d5bd56788-rvvzp" Jan 20 18:35:42 crc kubenswrapper[4558]: I0120 18:35:42.659145 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["keystone-kuttl-tests/keystone-5d5bd56788-rvvzp"] Jan 20 18:35:42 crc kubenswrapper[4558]: I0120 18:35:42.663229 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["keystone-kuttl-tests/keystone-5d5bd56788-rvvzp"] Jan 20 18:35:42 crc kubenswrapper[4558]: I0120 18:35:42.820372 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["keystone-kuttl-tests/keystonefb15-account-delete-2s29l"] Jan 20 18:35:42 crc kubenswrapper[4558]: I0120 18:35:42.823934 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["keystone-kuttl-tests/keystonefb15-account-delete-2s29l"] Jan 20 18:35:43 crc kubenswrapper[4558]: I0120 18:35:43.044299 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["keystone-kuttl-tests/keystone-db-create-bfkdh"] Jan 20 18:35:43 crc kubenswrapper[4558]: E0120 18:35:43.044967 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ca0974c0-e5f7-4a18-8c9e-fb4131e4270b" containerName="mariadb-account-delete" Jan 20 18:35:43 crc kubenswrapper[4558]: I0120 18:35:43.044988 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ca0974c0-e5f7-4a18-8c9e-fb4131e4270b" containerName="mariadb-account-delete" Jan 20 18:35:43 crc kubenswrapper[4558]: E0120 18:35:43.045028 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d18169f1-a4d2-4189-9625-7024b03c64d3" containerName="keystone-api" Jan 20 18:35:43 crc kubenswrapper[4558]: I0120 18:35:43.045102 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d18169f1-a4d2-4189-9625-7024b03c64d3" containerName="keystone-api" Jan 20 18:35:43 crc kubenswrapper[4558]: E0120 18:35:43.045136 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4762a957-4883-48b5-a7d1-0110cd355637" containerName="extract-content" Jan 20 18:35:43 crc kubenswrapper[4558]: I0120 18:35:43.045144 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="4762a957-4883-48b5-a7d1-0110cd355637" containerName="extract-content" Jan 20 18:35:43 crc kubenswrapper[4558]: E0120 18:35:43.045175 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4762a957-4883-48b5-a7d1-0110cd355637" containerName="extract-utilities" Jan 20 18:35:43 crc kubenswrapper[4558]: I0120 18:35:43.045183 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="4762a957-4883-48b5-a7d1-0110cd355637" containerName="extract-utilities" Jan 20 18:35:43 crc kubenswrapper[4558]: E0120 18:35:43.045210 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4762a957-4883-48b5-a7d1-0110cd355637" containerName="registry-server" Jan 20 18:35:43 crc kubenswrapper[4558]: I0120 18:35:43.045218 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="4762a957-4883-48b5-a7d1-0110cd355637" containerName="registry-server" Jan 20 18:35:43 crc kubenswrapper[4558]: I0120 18:35:43.045425 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="d18169f1-a4d2-4189-9625-7024b03c64d3" containerName="keystone-api" Jan 20 18:35:43 crc kubenswrapper[4558]: I0120 18:35:43.045440 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="4762a957-4883-48b5-a7d1-0110cd355637" containerName="registry-server" Jan 20 18:35:43 crc kubenswrapper[4558]: I0120 18:35:43.045458 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="ca0974c0-e5f7-4a18-8c9e-fb4131e4270b" containerName="mariadb-account-delete" Jan 20 18:35:43 crc kubenswrapper[4558]: I0120 18:35:43.046398 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-db-create-bfkdh" Jan 20 18:35:43 crc kubenswrapper[4558]: I0120 18:35:43.051428 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["keystone-kuttl-tests/keystone-607b-account-create-update-b2gfm"] Jan 20 18:35:43 crc kubenswrapper[4558]: I0120 18:35:43.052241 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-607b-account-create-update-b2gfm" Jan 20 18:35:43 crc kubenswrapper[4558]: I0120 18:35:43.053855 4558 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone-db-secret" Jan 20 18:35:43 crc kubenswrapper[4558]: I0120 18:35:43.055836 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone-db-create-bfkdh"] Jan 20 18:35:43 crc kubenswrapper[4558]: I0120 18:35:43.065956 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone-607b-account-create-update-b2gfm"] Jan 20 18:35:43 crc kubenswrapper[4558]: I0120 18:35:43.096229 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/98d248b3-2b4b-4707-9cf6-ee31015ed622-operator-scripts\") pod \"keystone-db-create-bfkdh\" (UID: \"98d248b3-2b4b-4707-9cf6-ee31015ed622\") " pod="keystone-kuttl-tests/keystone-db-create-bfkdh" Jan 20 18:35:43 crc kubenswrapper[4558]: I0120 18:35:43.096281 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/07c70dfc-1b41-44ce-8bb1-015cc6dbedae-operator-scripts\") pod \"keystone-607b-account-create-update-b2gfm\" (UID: \"07c70dfc-1b41-44ce-8bb1-015cc6dbedae\") " pod="keystone-kuttl-tests/keystone-607b-account-create-update-b2gfm" Jan 20 18:35:43 crc kubenswrapper[4558]: I0120 18:35:43.096325 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gw7hs\" (UniqueName: \"kubernetes.io/projected/98d248b3-2b4b-4707-9cf6-ee31015ed622-kube-api-access-gw7hs\") pod \"keystone-db-create-bfkdh\" (UID: \"98d248b3-2b4b-4707-9cf6-ee31015ed622\") " pod="keystone-kuttl-tests/keystone-db-create-bfkdh" Jan 20 18:35:43 crc kubenswrapper[4558]: I0120 18:35:43.096386 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ncxk6\" (UniqueName: \"kubernetes.io/projected/07c70dfc-1b41-44ce-8bb1-015cc6dbedae-kube-api-access-ncxk6\") pod \"keystone-607b-account-create-update-b2gfm\" (UID: \"07c70dfc-1b41-44ce-8bb1-015cc6dbedae\") " pod="keystone-kuttl-tests/keystone-607b-account-create-update-b2gfm" Jan 20 18:35:43 crc kubenswrapper[4558]: I0120 18:35:43.197610 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ncxk6\" (UniqueName: \"kubernetes.io/projected/07c70dfc-1b41-44ce-8bb1-015cc6dbedae-kube-api-access-ncxk6\") pod \"keystone-607b-account-create-update-b2gfm\" (UID: \"07c70dfc-1b41-44ce-8bb1-015cc6dbedae\") " pod="keystone-kuttl-tests/keystone-607b-account-create-update-b2gfm" Jan 20 18:35:43 crc kubenswrapper[4558]: I0120 18:35:43.198149 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/98d248b3-2b4b-4707-9cf6-ee31015ed622-operator-scripts\") pod \"keystone-db-create-bfkdh\" (UID: \"98d248b3-2b4b-4707-9cf6-ee31015ed622\") " pod="keystone-kuttl-tests/keystone-db-create-bfkdh" Jan 20 18:35:43 crc kubenswrapper[4558]: I0120 18:35:43.198977 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/07c70dfc-1b41-44ce-8bb1-015cc6dbedae-operator-scripts\") pod \"keystone-607b-account-create-update-b2gfm\" (UID: \"07c70dfc-1b41-44ce-8bb1-015cc6dbedae\") " pod="keystone-kuttl-tests/keystone-607b-account-create-update-b2gfm" Jan 20 18:35:43 crc kubenswrapper[4558]: I0120 18:35:43.199138 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/98d248b3-2b4b-4707-9cf6-ee31015ed622-operator-scripts\") pod \"keystone-db-create-bfkdh\" (UID: \"98d248b3-2b4b-4707-9cf6-ee31015ed622\") " pod="keystone-kuttl-tests/keystone-db-create-bfkdh" Jan 20 18:35:43 crc kubenswrapper[4558]: I0120 18:35:43.199227 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gw7hs\" (UniqueName: \"kubernetes.io/projected/98d248b3-2b4b-4707-9cf6-ee31015ed622-kube-api-access-gw7hs\") pod \"keystone-db-create-bfkdh\" (UID: \"98d248b3-2b4b-4707-9cf6-ee31015ed622\") " pod="keystone-kuttl-tests/keystone-db-create-bfkdh" Jan 20 18:35:43 crc kubenswrapper[4558]: I0120 18:35:43.200062 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/07c70dfc-1b41-44ce-8bb1-015cc6dbedae-operator-scripts\") pod \"keystone-607b-account-create-update-b2gfm\" (UID: \"07c70dfc-1b41-44ce-8bb1-015cc6dbedae\") " pod="keystone-kuttl-tests/keystone-607b-account-create-update-b2gfm" Jan 20 18:35:43 crc kubenswrapper[4558]: I0120 18:35:43.213673 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ncxk6\" (UniqueName: \"kubernetes.io/projected/07c70dfc-1b41-44ce-8bb1-015cc6dbedae-kube-api-access-ncxk6\") pod \"keystone-607b-account-create-update-b2gfm\" (UID: \"07c70dfc-1b41-44ce-8bb1-015cc6dbedae\") " pod="keystone-kuttl-tests/keystone-607b-account-create-update-b2gfm" Jan 20 18:35:43 crc kubenswrapper[4558]: I0120 18:35:43.214432 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gw7hs\" (UniqueName: \"kubernetes.io/projected/98d248b3-2b4b-4707-9cf6-ee31015ed622-kube-api-access-gw7hs\") pod \"keystone-db-create-bfkdh\" (UID: \"98d248b3-2b4b-4707-9cf6-ee31015ed622\") " pod="keystone-kuttl-tests/keystone-db-create-bfkdh" Jan 20 18:35:43 crc kubenswrapper[4558]: I0120 18:35:43.366696 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-db-create-bfkdh" Jan 20 18:35:43 crc kubenswrapper[4558]: I0120 18:35:43.375196 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-607b-account-create-update-b2gfm" Jan 20 18:35:43 crc kubenswrapper[4558]: I0120 18:35:43.789317 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone-db-create-bfkdh"] Jan 20 18:35:43 crc kubenswrapper[4558]: I0120 18:35:43.838771 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone-607b-account-create-update-b2gfm"] Jan 20 18:35:43 crc kubenswrapper[4558]: W0120 18:35:43.849442 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod07c70dfc_1b41_44ce_8bb1_015cc6dbedae.slice/crio-c5ec06e94442ed2f4bc8980668564a5d17f92a72a881c5cf40dffaa5d4a88d9d WatchSource:0}: Error finding container c5ec06e94442ed2f4bc8980668564a5d17f92a72a881c5cf40dffaa5d4a88d9d: Status 404 returned error can't find the container with id c5ec06e94442ed2f4bc8980668564a5d17f92a72a881c5cf40dffaa5d4a88d9d Jan 20 18:35:44 crc kubenswrapper[4558]: I0120 18:35:44.576409 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ca0974c0-e5f7-4a18-8c9e-fb4131e4270b" path="/var/lib/kubelet/pods/ca0974c0-e5f7-4a18-8c9e-fb4131e4270b/volumes" Jan 20 18:35:44 crc kubenswrapper[4558]: I0120 18:35:44.577478 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d18169f1-a4d2-4189-9625-7024b03c64d3" path="/var/lib/kubelet/pods/d18169f1-a4d2-4189-9625-7024b03c64d3/volumes" Jan 20 18:35:44 crc kubenswrapper[4558]: I0120 18:35:44.656423 4558 generic.go:334] "Generic (PLEG): container finished" podID="07c70dfc-1b41-44ce-8bb1-015cc6dbedae" containerID="5357156624c9b8928b4edfdcde4f5857b679e588775e8e81a2e717f589382ea8" exitCode=0 Jan 20 18:35:44 crc kubenswrapper[4558]: I0120 18:35:44.656492 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-607b-account-create-update-b2gfm" event={"ID":"07c70dfc-1b41-44ce-8bb1-015cc6dbedae","Type":"ContainerDied","Data":"5357156624c9b8928b4edfdcde4f5857b679e588775e8e81a2e717f589382ea8"} Jan 20 18:35:44 crc kubenswrapper[4558]: I0120 18:35:44.656520 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-607b-account-create-update-b2gfm" event={"ID":"07c70dfc-1b41-44ce-8bb1-015cc6dbedae","Type":"ContainerStarted","Data":"c5ec06e94442ed2f4bc8980668564a5d17f92a72a881c5cf40dffaa5d4a88d9d"} Jan 20 18:35:44 crc kubenswrapper[4558]: I0120 18:35:44.658457 4558 generic.go:334] "Generic (PLEG): container finished" podID="98d248b3-2b4b-4707-9cf6-ee31015ed622" containerID="68b7c6f3a2ab1f2c3b25b09a9c942e3afc3ce1b84d8f4fea490855481f14444e" exitCode=0 Jan 20 18:35:44 crc kubenswrapper[4558]: I0120 18:35:44.658487 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-db-create-bfkdh" event={"ID":"98d248b3-2b4b-4707-9cf6-ee31015ed622","Type":"ContainerDied","Data":"68b7c6f3a2ab1f2c3b25b09a9c942e3afc3ce1b84d8f4fea490855481f14444e"} Jan 20 18:35:44 crc kubenswrapper[4558]: I0120 18:35:44.658509 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-db-create-bfkdh" event={"ID":"98d248b3-2b4b-4707-9cf6-ee31015ed622","Type":"ContainerStarted","Data":"7a1c763afa0737bf3d2dfae864ed9a08a6f104c386a13369eca219438da788b9"} Jan 20 18:35:45 crc kubenswrapper[4558]: I0120 18:35:45.976834 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-607b-account-create-update-b2gfm" Jan 20 18:35:45 crc kubenswrapper[4558]: I0120 18:35:45.982056 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-db-create-bfkdh" Jan 20 18:35:46 crc kubenswrapper[4558]: I0120 18:35:46.144668 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/98d248b3-2b4b-4707-9cf6-ee31015ed622-operator-scripts\") pod \"98d248b3-2b4b-4707-9cf6-ee31015ed622\" (UID: \"98d248b3-2b4b-4707-9cf6-ee31015ed622\") " Jan 20 18:35:46 crc kubenswrapper[4558]: I0120 18:35:46.144940 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/07c70dfc-1b41-44ce-8bb1-015cc6dbedae-operator-scripts\") pod \"07c70dfc-1b41-44ce-8bb1-015cc6dbedae\" (UID: \"07c70dfc-1b41-44ce-8bb1-015cc6dbedae\") " Jan 20 18:35:46 crc kubenswrapper[4558]: I0120 18:35:46.145044 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gw7hs\" (UniqueName: \"kubernetes.io/projected/98d248b3-2b4b-4707-9cf6-ee31015ed622-kube-api-access-gw7hs\") pod \"98d248b3-2b4b-4707-9cf6-ee31015ed622\" (UID: \"98d248b3-2b4b-4707-9cf6-ee31015ed622\") " Jan 20 18:35:46 crc kubenswrapper[4558]: I0120 18:35:46.145723 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/07c70dfc-1b41-44ce-8bb1-015cc6dbedae-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "07c70dfc-1b41-44ce-8bb1-015cc6dbedae" (UID: "07c70dfc-1b41-44ce-8bb1-015cc6dbedae"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:35:46 crc kubenswrapper[4558]: I0120 18:35:46.145847 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ncxk6\" (UniqueName: \"kubernetes.io/projected/07c70dfc-1b41-44ce-8bb1-015cc6dbedae-kube-api-access-ncxk6\") pod \"07c70dfc-1b41-44ce-8bb1-015cc6dbedae\" (UID: \"07c70dfc-1b41-44ce-8bb1-015cc6dbedae\") " Jan 20 18:35:46 crc kubenswrapper[4558]: I0120 18:35:46.146153 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/98d248b3-2b4b-4707-9cf6-ee31015ed622-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "98d248b3-2b4b-4707-9cf6-ee31015ed622" (UID: "98d248b3-2b4b-4707-9cf6-ee31015ed622"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:35:46 crc kubenswrapper[4558]: I0120 18:35:46.146763 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/07c70dfc-1b41-44ce-8bb1-015cc6dbedae-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 18:35:46 crc kubenswrapper[4558]: I0120 18:35:46.146833 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/98d248b3-2b4b-4707-9cf6-ee31015ed622-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 18:35:46 crc kubenswrapper[4558]: I0120 18:35:46.152870 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/07c70dfc-1b41-44ce-8bb1-015cc6dbedae-kube-api-access-ncxk6" (OuterVolumeSpecName: "kube-api-access-ncxk6") pod "07c70dfc-1b41-44ce-8bb1-015cc6dbedae" (UID: "07c70dfc-1b41-44ce-8bb1-015cc6dbedae"). InnerVolumeSpecName "kube-api-access-ncxk6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:35:46 crc kubenswrapper[4558]: I0120 18:35:46.153268 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/98d248b3-2b4b-4707-9cf6-ee31015ed622-kube-api-access-gw7hs" (OuterVolumeSpecName: "kube-api-access-gw7hs") pod "98d248b3-2b4b-4707-9cf6-ee31015ed622" (UID: "98d248b3-2b4b-4707-9cf6-ee31015ed622"). InnerVolumeSpecName "kube-api-access-gw7hs". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:35:46 crc kubenswrapper[4558]: I0120 18:35:46.248845 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gw7hs\" (UniqueName: \"kubernetes.io/projected/98d248b3-2b4b-4707-9cf6-ee31015ed622-kube-api-access-gw7hs\") on node \"crc\" DevicePath \"\"" Jan 20 18:35:46 crc kubenswrapper[4558]: I0120 18:35:46.248877 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ncxk6\" (UniqueName: \"kubernetes.io/projected/07c70dfc-1b41-44ce-8bb1-015cc6dbedae-kube-api-access-ncxk6\") on node \"crc\" DevicePath \"\"" Jan 20 18:35:46 crc kubenswrapper[4558]: I0120 18:35:46.674742 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-607b-account-create-update-b2gfm" event={"ID":"07c70dfc-1b41-44ce-8bb1-015cc6dbedae","Type":"ContainerDied","Data":"c5ec06e94442ed2f4bc8980668564a5d17f92a72a881c5cf40dffaa5d4a88d9d"} Jan 20 18:35:46 crc kubenswrapper[4558]: I0120 18:35:46.675114 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c5ec06e94442ed2f4bc8980668564a5d17f92a72a881c5cf40dffaa5d4a88d9d" Jan 20 18:35:46 crc kubenswrapper[4558]: I0120 18:35:46.674776 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-607b-account-create-update-b2gfm" Jan 20 18:35:46 crc kubenswrapper[4558]: I0120 18:35:46.678334 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-db-create-bfkdh" event={"ID":"98d248b3-2b4b-4707-9cf6-ee31015ed622","Type":"ContainerDied","Data":"7a1c763afa0737bf3d2dfae864ed9a08a6f104c386a13369eca219438da788b9"} Jan 20 18:35:46 crc kubenswrapper[4558]: I0120 18:35:46.678416 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-db-create-bfkdh" Jan 20 18:35:46 crc kubenswrapper[4558]: I0120 18:35:46.678870 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7a1c763afa0737bf3d2dfae864ed9a08a6f104c386a13369eca219438da788b9" Jan 20 18:35:48 crc kubenswrapper[4558]: I0120 18:35:48.621185 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["keystone-kuttl-tests/keystone-db-sync-2vbxw"] Jan 20 18:35:48 crc kubenswrapper[4558]: E0120 18:35:48.622240 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="98d248b3-2b4b-4707-9cf6-ee31015ed622" containerName="mariadb-database-create" Jan 20 18:35:48 crc kubenswrapper[4558]: I0120 18:35:48.622329 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="98d248b3-2b4b-4707-9cf6-ee31015ed622" containerName="mariadb-database-create" Jan 20 18:35:48 crc kubenswrapper[4558]: E0120 18:35:48.622391 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="07c70dfc-1b41-44ce-8bb1-015cc6dbedae" containerName="mariadb-account-create-update" Jan 20 18:35:48 crc kubenswrapper[4558]: I0120 18:35:48.622433 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="07c70dfc-1b41-44ce-8bb1-015cc6dbedae" containerName="mariadb-account-create-update" Jan 20 18:35:48 crc kubenswrapper[4558]: I0120 18:35:48.622578 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="07c70dfc-1b41-44ce-8bb1-015cc6dbedae" containerName="mariadb-account-create-update" Jan 20 18:35:48 crc kubenswrapper[4558]: I0120 18:35:48.622639 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="98d248b3-2b4b-4707-9cf6-ee31015ed622" containerName="mariadb-database-create" Jan 20 18:35:48 crc kubenswrapper[4558]: I0120 18:35:48.623129 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-db-sync-2vbxw" Jan 20 18:35:48 crc kubenswrapper[4558]: I0120 18:35:48.625640 4558 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone" Jan 20 18:35:48 crc kubenswrapper[4558]: I0120 18:35:48.625643 4558 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone-scripts" Jan 20 18:35:48 crc kubenswrapper[4558]: I0120 18:35:48.625876 4558 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone-keystone-dockercfg-4l4ns" Jan 20 18:35:48 crc kubenswrapper[4558]: I0120 18:35:48.626206 4558 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone-config-data" Jan 20 18:35:48 crc kubenswrapper[4558]: I0120 18:35:48.642111 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone-db-sync-2vbxw"] Jan 20 18:35:48 crc kubenswrapper[4558]: I0120 18:35:48.679813 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/68318110-42ee-48a8-ad03-e2f75f291a09-config-data\") pod \"keystone-db-sync-2vbxw\" (UID: \"68318110-42ee-48a8-ad03-e2f75f291a09\") " pod="keystone-kuttl-tests/keystone-db-sync-2vbxw" Jan 20 18:35:48 crc kubenswrapper[4558]: I0120 18:35:48.679862 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jgpfw\" (UniqueName: \"kubernetes.io/projected/68318110-42ee-48a8-ad03-e2f75f291a09-kube-api-access-jgpfw\") pod \"keystone-db-sync-2vbxw\" (UID: \"68318110-42ee-48a8-ad03-e2f75f291a09\") " pod="keystone-kuttl-tests/keystone-db-sync-2vbxw" Jan 20 18:35:48 crc kubenswrapper[4558]: I0120 18:35:48.781875 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/68318110-42ee-48a8-ad03-e2f75f291a09-config-data\") pod \"keystone-db-sync-2vbxw\" (UID: \"68318110-42ee-48a8-ad03-e2f75f291a09\") " pod="keystone-kuttl-tests/keystone-db-sync-2vbxw" Jan 20 18:35:48 crc kubenswrapper[4558]: I0120 18:35:48.781952 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jgpfw\" (UniqueName: \"kubernetes.io/projected/68318110-42ee-48a8-ad03-e2f75f291a09-kube-api-access-jgpfw\") pod \"keystone-db-sync-2vbxw\" (UID: \"68318110-42ee-48a8-ad03-e2f75f291a09\") " pod="keystone-kuttl-tests/keystone-db-sync-2vbxw" Jan 20 18:35:48 crc kubenswrapper[4558]: I0120 18:35:48.788534 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/68318110-42ee-48a8-ad03-e2f75f291a09-config-data\") pod \"keystone-db-sync-2vbxw\" (UID: \"68318110-42ee-48a8-ad03-e2f75f291a09\") " pod="keystone-kuttl-tests/keystone-db-sync-2vbxw" Jan 20 18:35:48 crc kubenswrapper[4558]: I0120 18:35:48.798148 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jgpfw\" (UniqueName: \"kubernetes.io/projected/68318110-42ee-48a8-ad03-e2f75f291a09-kube-api-access-jgpfw\") pod \"keystone-db-sync-2vbxw\" (UID: \"68318110-42ee-48a8-ad03-e2f75f291a09\") " pod="keystone-kuttl-tests/keystone-db-sync-2vbxw" Jan 20 18:35:48 crc kubenswrapper[4558]: I0120 18:35:48.937730 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-db-sync-2vbxw" Jan 20 18:35:49 crc kubenswrapper[4558]: I0120 18:35:49.329495 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone-db-sync-2vbxw"] Jan 20 18:35:49 crc kubenswrapper[4558]: I0120 18:35:49.706067 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-db-sync-2vbxw" event={"ID":"68318110-42ee-48a8-ad03-e2f75f291a09","Type":"ContainerStarted","Data":"0d467d3ac757db96cef873308e3402f38b5140e5c7a2dd056d737c63b44610bb"} Jan 20 18:35:49 crc kubenswrapper[4558]: I0120 18:35:49.706132 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-db-sync-2vbxw" event={"ID":"68318110-42ee-48a8-ad03-e2f75f291a09","Type":"ContainerStarted","Data":"c56b972acdbb23f41462f45fe2fb1e614639b8b5c5d4bd9faafe5c89d36f10bd"} Jan 20 18:35:49 crc kubenswrapper[4558]: I0120 18:35:49.723863 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="keystone-kuttl-tests/keystone-db-sync-2vbxw" podStartSLOduration=1.723840473 podStartE2EDuration="1.723840473s" podCreationTimestamp="2026-01-20 18:35:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 18:35:49.71728121 +0000 UTC m=+6843.477619177" watchObservedRunningTime="2026-01-20 18:35:49.723840473 +0000 UTC m=+6843.484178429" Jan 20 18:35:51 crc kubenswrapper[4558]: I0120 18:35:51.722942 4558 generic.go:334] "Generic (PLEG): container finished" podID="68318110-42ee-48a8-ad03-e2f75f291a09" containerID="0d467d3ac757db96cef873308e3402f38b5140e5c7a2dd056d737c63b44610bb" exitCode=0 Jan 20 18:35:51 crc kubenswrapper[4558]: I0120 18:35:51.723041 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-db-sync-2vbxw" event={"ID":"68318110-42ee-48a8-ad03-e2f75f291a09","Type":"ContainerDied","Data":"0d467d3ac757db96cef873308e3402f38b5140e5c7a2dd056d737c63b44610bb"} Jan 20 18:35:52 crc kubenswrapper[4558]: I0120 18:35:52.959464 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-db-sync-2vbxw" Jan 20 18:35:53 crc kubenswrapper[4558]: I0120 18:35:53.148727 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/68318110-42ee-48a8-ad03-e2f75f291a09-config-data\") pod \"68318110-42ee-48a8-ad03-e2f75f291a09\" (UID: \"68318110-42ee-48a8-ad03-e2f75f291a09\") " Jan 20 18:35:53 crc kubenswrapper[4558]: I0120 18:35:53.149093 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jgpfw\" (UniqueName: \"kubernetes.io/projected/68318110-42ee-48a8-ad03-e2f75f291a09-kube-api-access-jgpfw\") pod \"68318110-42ee-48a8-ad03-e2f75f291a09\" (UID: \"68318110-42ee-48a8-ad03-e2f75f291a09\") " Jan 20 18:35:53 crc kubenswrapper[4558]: I0120 18:35:53.155670 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/68318110-42ee-48a8-ad03-e2f75f291a09-kube-api-access-jgpfw" (OuterVolumeSpecName: "kube-api-access-jgpfw") pod "68318110-42ee-48a8-ad03-e2f75f291a09" (UID: "68318110-42ee-48a8-ad03-e2f75f291a09"). InnerVolumeSpecName "kube-api-access-jgpfw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:35:53 crc kubenswrapper[4558]: I0120 18:35:53.182296 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/68318110-42ee-48a8-ad03-e2f75f291a09-config-data" (OuterVolumeSpecName: "config-data") pod "68318110-42ee-48a8-ad03-e2f75f291a09" (UID: "68318110-42ee-48a8-ad03-e2f75f291a09"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:35:53 crc kubenswrapper[4558]: I0120 18:35:53.250850 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/68318110-42ee-48a8-ad03-e2f75f291a09-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 18:35:53 crc kubenswrapper[4558]: I0120 18:35:53.250892 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jgpfw\" (UniqueName: \"kubernetes.io/projected/68318110-42ee-48a8-ad03-e2f75f291a09-kube-api-access-jgpfw\") on node \"crc\" DevicePath \"\"" Jan 20 18:35:53 crc kubenswrapper[4558]: I0120 18:35:53.740846 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-db-sync-2vbxw" event={"ID":"68318110-42ee-48a8-ad03-e2f75f291a09","Type":"ContainerDied","Data":"c56b972acdbb23f41462f45fe2fb1e614639b8b5c5d4bd9faafe5c89d36f10bd"} Jan 20 18:35:53 crc kubenswrapper[4558]: I0120 18:35:53.740893 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-db-sync-2vbxw" Jan 20 18:35:53 crc kubenswrapper[4558]: I0120 18:35:53.740901 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c56b972acdbb23f41462f45fe2fb1e614639b8b5c5d4bd9faafe5c89d36f10bd" Jan 20 18:35:53 crc kubenswrapper[4558]: I0120 18:35:53.899489 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["keystone-kuttl-tests/keystone-bootstrap-57fzg"] Jan 20 18:35:53 crc kubenswrapper[4558]: E0120 18:35:53.900276 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="68318110-42ee-48a8-ad03-e2f75f291a09" containerName="keystone-db-sync" Jan 20 18:35:53 crc kubenswrapper[4558]: I0120 18:35:53.900291 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="68318110-42ee-48a8-ad03-e2f75f291a09" containerName="keystone-db-sync" Jan 20 18:35:53 crc kubenswrapper[4558]: I0120 18:35:53.900462 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="68318110-42ee-48a8-ad03-e2f75f291a09" containerName="keystone-db-sync" Jan 20 18:35:53 crc kubenswrapper[4558]: I0120 18:35:53.901078 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-bootstrap-57fzg" Jan 20 18:35:53 crc kubenswrapper[4558]: I0120 18:35:53.903238 4558 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone" Jan 20 18:35:53 crc kubenswrapper[4558]: I0120 18:35:53.903451 4558 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"osp-secret" Jan 20 18:35:53 crc kubenswrapper[4558]: I0120 18:35:53.903959 4558 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone-config-data" Jan 20 18:35:53 crc kubenswrapper[4558]: I0120 18:35:53.904182 4558 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone-keystone-dockercfg-4l4ns" Jan 20 18:35:53 crc kubenswrapper[4558]: I0120 18:35:53.904270 4558 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone-scripts" Jan 20 18:35:53 crc kubenswrapper[4558]: I0120 18:35:53.909660 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone-bootstrap-57fzg"] Jan 20 18:35:53 crc kubenswrapper[4558]: I0120 18:35:53.961372 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tn267\" (UniqueName: \"kubernetes.io/projected/79a95494-e161-4141-8121-92209c479a12-kube-api-access-tn267\") pod \"keystone-bootstrap-57fzg\" (UID: \"79a95494-e161-4141-8121-92209c479a12\") " pod="keystone-kuttl-tests/keystone-bootstrap-57fzg" Jan 20 18:35:53 crc kubenswrapper[4558]: I0120 18:35:53.961428 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/79a95494-e161-4141-8121-92209c479a12-scripts\") pod \"keystone-bootstrap-57fzg\" (UID: \"79a95494-e161-4141-8121-92209c479a12\") " pod="keystone-kuttl-tests/keystone-bootstrap-57fzg" Jan 20 18:35:53 crc kubenswrapper[4558]: I0120 18:35:53.961495 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/79a95494-e161-4141-8121-92209c479a12-config-data\") pod \"keystone-bootstrap-57fzg\" (UID: \"79a95494-e161-4141-8121-92209c479a12\") " pod="keystone-kuttl-tests/keystone-bootstrap-57fzg" Jan 20 18:35:53 crc kubenswrapper[4558]: I0120 18:35:53.961522 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/79a95494-e161-4141-8121-92209c479a12-credential-keys\") pod \"keystone-bootstrap-57fzg\" (UID: \"79a95494-e161-4141-8121-92209c479a12\") " pod="keystone-kuttl-tests/keystone-bootstrap-57fzg" Jan 20 18:35:53 crc kubenswrapper[4558]: I0120 18:35:53.961947 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/79a95494-e161-4141-8121-92209c479a12-fernet-keys\") pod \"keystone-bootstrap-57fzg\" (UID: \"79a95494-e161-4141-8121-92209c479a12\") " pod="keystone-kuttl-tests/keystone-bootstrap-57fzg" Jan 20 18:35:54 crc kubenswrapper[4558]: I0120 18:35:54.063426 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/79a95494-e161-4141-8121-92209c479a12-fernet-keys\") pod \"keystone-bootstrap-57fzg\" (UID: \"79a95494-e161-4141-8121-92209c479a12\") " pod="keystone-kuttl-tests/keystone-bootstrap-57fzg" Jan 20 18:35:54 crc kubenswrapper[4558]: I0120 18:35:54.063485 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tn267\" (UniqueName: \"kubernetes.io/projected/79a95494-e161-4141-8121-92209c479a12-kube-api-access-tn267\") pod \"keystone-bootstrap-57fzg\" (UID: \"79a95494-e161-4141-8121-92209c479a12\") " pod="keystone-kuttl-tests/keystone-bootstrap-57fzg" Jan 20 18:35:54 crc kubenswrapper[4558]: I0120 18:35:54.063517 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/79a95494-e161-4141-8121-92209c479a12-scripts\") pod \"keystone-bootstrap-57fzg\" (UID: \"79a95494-e161-4141-8121-92209c479a12\") " pod="keystone-kuttl-tests/keystone-bootstrap-57fzg" Jan 20 18:35:54 crc kubenswrapper[4558]: I0120 18:35:54.063565 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/79a95494-e161-4141-8121-92209c479a12-config-data\") pod \"keystone-bootstrap-57fzg\" (UID: \"79a95494-e161-4141-8121-92209c479a12\") " pod="keystone-kuttl-tests/keystone-bootstrap-57fzg" Jan 20 18:35:54 crc kubenswrapper[4558]: I0120 18:35:54.063602 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/79a95494-e161-4141-8121-92209c479a12-credential-keys\") pod \"keystone-bootstrap-57fzg\" (UID: \"79a95494-e161-4141-8121-92209c479a12\") " pod="keystone-kuttl-tests/keystone-bootstrap-57fzg" Jan 20 18:35:54 crc kubenswrapper[4558]: I0120 18:35:54.069490 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/79a95494-e161-4141-8121-92209c479a12-fernet-keys\") pod \"keystone-bootstrap-57fzg\" (UID: \"79a95494-e161-4141-8121-92209c479a12\") " pod="keystone-kuttl-tests/keystone-bootstrap-57fzg" Jan 20 18:35:54 crc kubenswrapper[4558]: I0120 18:35:54.069530 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/79a95494-e161-4141-8121-92209c479a12-config-data\") pod \"keystone-bootstrap-57fzg\" (UID: \"79a95494-e161-4141-8121-92209c479a12\") " pod="keystone-kuttl-tests/keystone-bootstrap-57fzg" Jan 20 18:35:54 crc kubenswrapper[4558]: I0120 18:35:54.069935 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/79a95494-e161-4141-8121-92209c479a12-credential-keys\") pod \"keystone-bootstrap-57fzg\" (UID: \"79a95494-e161-4141-8121-92209c479a12\") " pod="keystone-kuttl-tests/keystone-bootstrap-57fzg" Jan 20 18:35:54 crc kubenswrapper[4558]: I0120 18:35:54.070180 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/79a95494-e161-4141-8121-92209c479a12-scripts\") pod \"keystone-bootstrap-57fzg\" (UID: \"79a95494-e161-4141-8121-92209c479a12\") " pod="keystone-kuttl-tests/keystone-bootstrap-57fzg" Jan 20 18:35:54 crc kubenswrapper[4558]: I0120 18:35:54.079594 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tn267\" (UniqueName: \"kubernetes.io/projected/79a95494-e161-4141-8121-92209c479a12-kube-api-access-tn267\") pod \"keystone-bootstrap-57fzg\" (UID: \"79a95494-e161-4141-8121-92209c479a12\") " pod="keystone-kuttl-tests/keystone-bootstrap-57fzg" Jan 20 18:35:54 crc kubenswrapper[4558]: I0120 18:35:54.218333 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-bootstrap-57fzg" Jan 20 18:35:54 crc kubenswrapper[4558]: I0120 18:35:54.621737 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone-bootstrap-57fzg"] Jan 20 18:35:54 crc kubenswrapper[4558]: I0120 18:35:54.757555 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-bootstrap-57fzg" event={"ID":"79a95494-e161-4141-8121-92209c479a12","Type":"ContainerStarted","Data":"69aa1c27bf9faa48f4b636ea9af7e73bd1699fceede086f9f9810e8d775e1626"} Jan 20 18:35:54 crc kubenswrapper[4558]: I0120 18:35:54.757624 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-bootstrap-57fzg" event={"ID":"79a95494-e161-4141-8121-92209c479a12","Type":"ContainerStarted","Data":"157e5e24577564119577c675e3e1cefdda7cf0bd6e47c8b5d7eaa35b7fda68c3"} Jan 20 18:35:54 crc kubenswrapper[4558]: I0120 18:35:54.782981 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="keystone-kuttl-tests/keystone-bootstrap-57fzg" podStartSLOduration=1.7829662499999999 podStartE2EDuration="1.78296625s" podCreationTimestamp="2026-01-20 18:35:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 18:35:54.774344048 +0000 UTC m=+6848.534682015" watchObservedRunningTime="2026-01-20 18:35:54.78296625 +0000 UTC m=+6848.543304217" Jan 20 18:35:57 crc kubenswrapper[4558]: I0120 18:35:57.780207 4558 generic.go:334] "Generic (PLEG): container finished" podID="79a95494-e161-4141-8121-92209c479a12" containerID="69aa1c27bf9faa48f4b636ea9af7e73bd1699fceede086f9f9810e8d775e1626" exitCode=0 Jan 20 18:35:57 crc kubenswrapper[4558]: I0120 18:35:57.780315 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-bootstrap-57fzg" event={"ID":"79a95494-e161-4141-8121-92209c479a12","Type":"ContainerDied","Data":"69aa1c27bf9faa48f4b636ea9af7e73bd1699fceede086f9f9810e8d775e1626"} Jan 20 18:35:59 crc kubenswrapper[4558]: I0120 18:35:59.026152 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-bootstrap-57fzg" Jan 20 18:35:59 crc kubenswrapper[4558]: I0120 18:35:59.043092 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tn267\" (UniqueName: \"kubernetes.io/projected/79a95494-e161-4141-8121-92209c479a12-kube-api-access-tn267\") pod \"79a95494-e161-4141-8121-92209c479a12\" (UID: \"79a95494-e161-4141-8121-92209c479a12\") " Jan 20 18:35:59 crc kubenswrapper[4558]: I0120 18:35:59.043147 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/79a95494-e161-4141-8121-92209c479a12-credential-keys\") pod \"79a95494-e161-4141-8121-92209c479a12\" (UID: \"79a95494-e161-4141-8121-92209c479a12\") " Jan 20 18:35:59 crc kubenswrapper[4558]: I0120 18:35:59.043262 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/79a95494-e161-4141-8121-92209c479a12-config-data\") pod \"79a95494-e161-4141-8121-92209c479a12\" (UID: \"79a95494-e161-4141-8121-92209c479a12\") " Jan 20 18:35:59 crc kubenswrapper[4558]: I0120 18:35:59.043378 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/79a95494-e161-4141-8121-92209c479a12-fernet-keys\") pod \"79a95494-e161-4141-8121-92209c479a12\" (UID: \"79a95494-e161-4141-8121-92209c479a12\") " Jan 20 18:35:59 crc kubenswrapper[4558]: I0120 18:35:59.043404 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/79a95494-e161-4141-8121-92209c479a12-scripts\") pod \"79a95494-e161-4141-8121-92209c479a12\" (UID: \"79a95494-e161-4141-8121-92209c479a12\") " Jan 20 18:35:59 crc kubenswrapper[4558]: I0120 18:35:59.049149 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/79a95494-e161-4141-8121-92209c479a12-kube-api-access-tn267" (OuterVolumeSpecName: "kube-api-access-tn267") pod "79a95494-e161-4141-8121-92209c479a12" (UID: "79a95494-e161-4141-8121-92209c479a12"). InnerVolumeSpecName "kube-api-access-tn267". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:35:59 crc kubenswrapper[4558]: I0120 18:35:59.050215 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/79a95494-e161-4141-8121-92209c479a12-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "79a95494-e161-4141-8121-92209c479a12" (UID: "79a95494-e161-4141-8121-92209c479a12"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:35:59 crc kubenswrapper[4558]: I0120 18:35:59.050251 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/79a95494-e161-4141-8121-92209c479a12-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "79a95494-e161-4141-8121-92209c479a12" (UID: "79a95494-e161-4141-8121-92209c479a12"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:35:59 crc kubenswrapper[4558]: I0120 18:35:59.050740 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/79a95494-e161-4141-8121-92209c479a12-scripts" (OuterVolumeSpecName: "scripts") pod "79a95494-e161-4141-8121-92209c479a12" (UID: "79a95494-e161-4141-8121-92209c479a12"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:35:59 crc kubenswrapper[4558]: I0120 18:35:59.061344 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/79a95494-e161-4141-8121-92209c479a12-config-data" (OuterVolumeSpecName: "config-data") pod "79a95494-e161-4141-8121-92209c479a12" (UID: "79a95494-e161-4141-8121-92209c479a12"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:35:59 crc kubenswrapper[4558]: I0120 18:35:59.144262 4558 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/79a95494-e161-4141-8121-92209c479a12-fernet-keys\") on node \"crc\" DevicePath \"\"" Jan 20 18:35:59 crc kubenswrapper[4558]: I0120 18:35:59.144283 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/79a95494-e161-4141-8121-92209c479a12-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 18:35:59 crc kubenswrapper[4558]: I0120 18:35:59.144295 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tn267\" (UniqueName: \"kubernetes.io/projected/79a95494-e161-4141-8121-92209c479a12-kube-api-access-tn267\") on node \"crc\" DevicePath \"\"" Jan 20 18:35:59 crc kubenswrapper[4558]: I0120 18:35:59.144307 4558 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/79a95494-e161-4141-8121-92209c479a12-credential-keys\") on node \"crc\" DevicePath \"\"" Jan 20 18:35:59 crc kubenswrapper[4558]: I0120 18:35:59.144317 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/79a95494-e161-4141-8121-92209c479a12-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 18:35:59 crc kubenswrapper[4558]: I0120 18:35:59.798598 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-bootstrap-57fzg" event={"ID":"79a95494-e161-4141-8121-92209c479a12","Type":"ContainerDied","Data":"157e5e24577564119577c675e3e1cefdda7cf0bd6e47c8b5d7eaa35b7fda68c3"} Jan 20 18:35:59 crc kubenswrapper[4558]: I0120 18:35:59.798667 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="157e5e24577564119577c675e3e1cefdda7cf0bd6e47c8b5d7eaa35b7fda68c3" Jan 20 18:35:59 crc kubenswrapper[4558]: I0120 18:35:59.798701 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-bootstrap-57fzg" Jan 20 18:35:59 crc kubenswrapper[4558]: I0120 18:35:59.869732 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["keystone-kuttl-tests/keystone-5799488558-xt6s4"] Jan 20 18:35:59 crc kubenswrapper[4558]: E0120 18:35:59.885189 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="79a95494-e161-4141-8121-92209c479a12" containerName="keystone-bootstrap" Jan 20 18:35:59 crc kubenswrapper[4558]: I0120 18:35:59.885374 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="79a95494-e161-4141-8121-92209c479a12" containerName="keystone-bootstrap" Jan 20 18:35:59 crc kubenswrapper[4558]: I0120 18:35:59.885868 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="79a95494-e161-4141-8121-92209c479a12" containerName="keystone-bootstrap" Jan 20 18:35:59 crc kubenswrapper[4558]: I0120 18:35:59.888958 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-5799488558-xt6s4" Jan 20 18:35:59 crc kubenswrapper[4558]: I0120 18:35:59.891184 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone-5799488558-xt6s4"] Jan 20 18:35:59 crc kubenswrapper[4558]: I0120 18:35:59.891455 4558 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone-scripts" Jan 20 18:35:59 crc kubenswrapper[4558]: I0120 18:35:59.891656 4558 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone-keystone-dockercfg-4l4ns" Jan 20 18:35:59 crc kubenswrapper[4558]: I0120 18:35:59.891714 4558 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone-config-data" Jan 20 18:35:59 crc kubenswrapper[4558]: I0120 18:35:59.892718 4558 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone" Jan 20 18:36:00 crc kubenswrapper[4558]: I0120 18:36:00.056657 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/47df1033-766b-4ae5-8521-14d9db3b7046-config-data\") pod \"keystone-5799488558-xt6s4\" (UID: \"47df1033-766b-4ae5-8521-14d9db3b7046\") " pod="keystone-kuttl-tests/keystone-5799488558-xt6s4" Jan 20 18:36:00 crc kubenswrapper[4558]: I0120 18:36:00.056796 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/47df1033-766b-4ae5-8521-14d9db3b7046-scripts\") pod \"keystone-5799488558-xt6s4\" (UID: \"47df1033-766b-4ae5-8521-14d9db3b7046\") " pod="keystone-kuttl-tests/keystone-5799488558-xt6s4" Jan 20 18:36:00 crc kubenswrapper[4558]: I0120 18:36:00.056834 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/47df1033-766b-4ae5-8521-14d9db3b7046-credential-keys\") pod \"keystone-5799488558-xt6s4\" (UID: \"47df1033-766b-4ae5-8521-14d9db3b7046\") " pod="keystone-kuttl-tests/keystone-5799488558-xt6s4" Jan 20 18:36:00 crc kubenswrapper[4558]: I0120 18:36:00.056907 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/47df1033-766b-4ae5-8521-14d9db3b7046-fernet-keys\") pod \"keystone-5799488558-xt6s4\" (UID: \"47df1033-766b-4ae5-8521-14d9db3b7046\") " pod="keystone-kuttl-tests/keystone-5799488558-xt6s4" Jan 20 18:36:00 crc kubenswrapper[4558]: I0120 18:36:00.057014 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-79l4q\" (UniqueName: \"kubernetes.io/projected/47df1033-766b-4ae5-8521-14d9db3b7046-kube-api-access-79l4q\") pod \"keystone-5799488558-xt6s4\" (UID: \"47df1033-766b-4ae5-8521-14d9db3b7046\") " pod="keystone-kuttl-tests/keystone-5799488558-xt6s4" Jan 20 18:36:00 crc kubenswrapper[4558]: I0120 18:36:00.158684 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-79l4q\" (UniqueName: \"kubernetes.io/projected/47df1033-766b-4ae5-8521-14d9db3b7046-kube-api-access-79l4q\") pod \"keystone-5799488558-xt6s4\" (UID: \"47df1033-766b-4ae5-8521-14d9db3b7046\") " pod="keystone-kuttl-tests/keystone-5799488558-xt6s4" Jan 20 18:36:00 crc kubenswrapper[4558]: I0120 18:36:00.158742 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/47df1033-766b-4ae5-8521-14d9db3b7046-config-data\") pod \"keystone-5799488558-xt6s4\" (UID: \"47df1033-766b-4ae5-8521-14d9db3b7046\") " pod="keystone-kuttl-tests/keystone-5799488558-xt6s4" Jan 20 18:36:00 crc kubenswrapper[4558]: I0120 18:36:00.158818 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/47df1033-766b-4ae5-8521-14d9db3b7046-scripts\") pod \"keystone-5799488558-xt6s4\" (UID: \"47df1033-766b-4ae5-8521-14d9db3b7046\") " pod="keystone-kuttl-tests/keystone-5799488558-xt6s4" Jan 20 18:36:00 crc kubenswrapper[4558]: I0120 18:36:00.158839 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/47df1033-766b-4ae5-8521-14d9db3b7046-credential-keys\") pod \"keystone-5799488558-xt6s4\" (UID: \"47df1033-766b-4ae5-8521-14d9db3b7046\") " pod="keystone-kuttl-tests/keystone-5799488558-xt6s4" Jan 20 18:36:00 crc kubenswrapper[4558]: I0120 18:36:00.158863 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/47df1033-766b-4ae5-8521-14d9db3b7046-fernet-keys\") pod \"keystone-5799488558-xt6s4\" (UID: \"47df1033-766b-4ae5-8521-14d9db3b7046\") " pod="keystone-kuttl-tests/keystone-5799488558-xt6s4" Jan 20 18:36:00 crc kubenswrapper[4558]: I0120 18:36:00.162322 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/47df1033-766b-4ae5-8521-14d9db3b7046-scripts\") pod \"keystone-5799488558-xt6s4\" (UID: \"47df1033-766b-4ae5-8521-14d9db3b7046\") " pod="keystone-kuttl-tests/keystone-5799488558-xt6s4" Jan 20 18:36:00 crc kubenswrapper[4558]: I0120 18:36:00.162868 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/47df1033-766b-4ae5-8521-14d9db3b7046-credential-keys\") pod \"keystone-5799488558-xt6s4\" (UID: \"47df1033-766b-4ae5-8521-14d9db3b7046\") " pod="keystone-kuttl-tests/keystone-5799488558-xt6s4" Jan 20 18:36:00 crc kubenswrapper[4558]: I0120 18:36:00.163147 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/47df1033-766b-4ae5-8521-14d9db3b7046-fernet-keys\") pod \"keystone-5799488558-xt6s4\" (UID: \"47df1033-766b-4ae5-8521-14d9db3b7046\") " pod="keystone-kuttl-tests/keystone-5799488558-xt6s4" Jan 20 18:36:00 crc kubenswrapper[4558]: I0120 18:36:00.163197 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/47df1033-766b-4ae5-8521-14d9db3b7046-config-data\") pod \"keystone-5799488558-xt6s4\" (UID: \"47df1033-766b-4ae5-8521-14d9db3b7046\") " pod="keystone-kuttl-tests/keystone-5799488558-xt6s4" Jan 20 18:36:00 crc kubenswrapper[4558]: I0120 18:36:00.171532 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-79l4q\" (UniqueName: \"kubernetes.io/projected/47df1033-766b-4ae5-8521-14d9db3b7046-kube-api-access-79l4q\") pod \"keystone-5799488558-xt6s4\" (UID: \"47df1033-766b-4ae5-8521-14d9db3b7046\") " pod="keystone-kuttl-tests/keystone-5799488558-xt6s4" Jan 20 18:36:00 crc kubenswrapper[4558]: I0120 18:36:00.219469 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-5799488558-xt6s4" Jan 20 18:36:00 crc kubenswrapper[4558]: I0120 18:36:00.607105 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone-5799488558-xt6s4"] Jan 20 18:36:00 crc kubenswrapper[4558]: W0120 18:36:00.613198 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod47df1033_766b_4ae5_8521_14d9db3b7046.slice/crio-08f7831cc00b235a3ebd2b7a2f41b347b3565a4377643b18fd40a20929b17b7b WatchSource:0}: Error finding container 08f7831cc00b235a3ebd2b7a2f41b347b3565a4377643b18fd40a20929b17b7b: Status 404 returned error can't find the container with id 08f7831cc00b235a3ebd2b7a2f41b347b3565a4377643b18fd40a20929b17b7b Jan 20 18:36:00 crc kubenswrapper[4558]: I0120 18:36:00.809972 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-5799488558-xt6s4" event={"ID":"47df1033-766b-4ae5-8521-14d9db3b7046","Type":"ContainerStarted","Data":"96aa78614b787ee0909ad8010f06ddf447cf59190c03d561fa2bf9ce4c3207e1"} Jan 20 18:36:00 crc kubenswrapper[4558]: I0120 18:36:00.810435 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="keystone-kuttl-tests/keystone-5799488558-xt6s4" Jan 20 18:36:00 crc kubenswrapper[4558]: I0120 18:36:00.810457 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-5799488558-xt6s4" event={"ID":"47df1033-766b-4ae5-8521-14d9db3b7046","Type":"ContainerStarted","Data":"08f7831cc00b235a3ebd2b7a2f41b347b3565a4377643b18fd40a20929b17b7b"} Jan 20 18:36:00 crc kubenswrapper[4558]: I0120 18:36:00.830824 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="keystone-kuttl-tests/keystone-5799488558-xt6s4" podStartSLOduration=1.830811486 podStartE2EDuration="1.830811486s" podCreationTimestamp="2026-01-20 18:35:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 18:36:00.826099176 +0000 UTC m=+6854.586437143" watchObservedRunningTime="2026-01-20 18:36:00.830811486 +0000 UTC m=+6854.591149453" Jan 20 18:36:31 crc kubenswrapper[4558]: I0120 18:36:31.533494 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="keystone-kuttl-tests/keystone-5799488558-xt6s4" Jan 20 18:36:32 crc kubenswrapper[4558]: I0120 18:36:32.774681 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["keystone-kuttl-tests/keystone-5799488558-hpdsd"] Jan 20 18:36:32 crc kubenswrapper[4558]: I0120 18:36:32.776386 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-5799488558-hpdsd" Jan 20 18:36:32 crc kubenswrapper[4558]: I0120 18:36:32.779956 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["keystone-kuttl-tests/keystone-5799488558-6tz7z"] Jan 20 18:36:32 crc kubenswrapper[4558]: I0120 18:36:32.781643 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-5799488558-6tz7z" Jan 20 18:36:32 crc kubenswrapper[4558]: I0120 18:36:32.791413 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone-5799488558-hpdsd"] Jan 20 18:36:32 crc kubenswrapper[4558]: I0120 18:36:32.797971 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone-5799488558-6tz7z"] Jan 20 18:36:32 crc kubenswrapper[4558]: I0120 18:36:32.952595 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8fb89c00-c708-4224-89e4-dbbc1fb06742-scripts\") pod \"keystone-5799488558-hpdsd\" (UID: \"8fb89c00-c708-4224-89e4-dbbc1fb06742\") " pod="keystone-kuttl-tests/keystone-5799488558-hpdsd" Jan 20 18:36:32 crc kubenswrapper[4558]: I0120 18:36:32.952655 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8fb89c00-c708-4224-89e4-dbbc1fb06742-config-data\") pod \"keystone-5799488558-hpdsd\" (UID: \"8fb89c00-c708-4224-89e4-dbbc1fb06742\") " pod="keystone-kuttl-tests/keystone-5799488558-hpdsd" Jan 20 18:36:32 crc kubenswrapper[4558]: I0120 18:36:32.952701 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/82f068bd-1ad6-4a63-85d0-8819c5391a5f-config-data\") pod \"keystone-5799488558-6tz7z\" (UID: \"82f068bd-1ad6-4a63-85d0-8819c5391a5f\") " pod="keystone-kuttl-tests/keystone-5799488558-6tz7z" Jan 20 18:36:32 crc kubenswrapper[4558]: I0120 18:36:32.952746 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/82f068bd-1ad6-4a63-85d0-8819c5391a5f-scripts\") pod \"keystone-5799488558-6tz7z\" (UID: \"82f068bd-1ad6-4a63-85d0-8819c5391a5f\") " pod="keystone-kuttl-tests/keystone-5799488558-6tz7z" Jan 20 18:36:32 crc kubenswrapper[4558]: I0120 18:36:32.952772 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/82f068bd-1ad6-4a63-85d0-8819c5391a5f-credential-keys\") pod \"keystone-5799488558-6tz7z\" (UID: \"82f068bd-1ad6-4a63-85d0-8819c5391a5f\") " pod="keystone-kuttl-tests/keystone-5799488558-6tz7z" Jan 20 18:36:32 crc kubenswrapper[4558]: I0120 18:36:32.952812 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gh6b7\" (UniqueName: \"kubernetes.io/projected/82f068bd-1ad6-4a63-85d0-8819c5391a5f-kube-api-access-gh6b7\") pod \"keystone-5799488558-6tz7z\" (UID: \"82f068bd-1ad6-4a63-85d0-8819c5391a5f\") " pod="keystone-kuttl-tests/keystone-5799488558-6tz7z" Jan 20 18:36:32 crc kubenswrapper[4558]: I0120 18:36:32.952854 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/8fb89c00-c708-4224-89e4-dbbc1fb06742-fernet-keys\") pod \"keystone-5799488558-hpdsd\" (UID: \"8fb89c00-c708-4224-89e4-dbbc1fb06742\") " pod="keystone-kuttl-tests/keystone-5799488558-hpdsd" Jan 20 18:36:32 crc kubenswrapper[4558]: I0120 18:36:32.952949 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/82f068bd-1ad6-4a63-85d0-8819c5391a5f-fernet-keys\") pod \"keystone-5799488558-6tz7z\" (UID: \"82f068bd-1ad6-4a63-85d0-8819c5391a5f\") " pod="keystone-kuttl-tests/keystone-5799488558-6tz7z" Jan 20 18:36:32 crc kubenswrapper[4558]: I0120 18:36:32.953018 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/8fb89c00-c708-4224-89e4-dbbc1fb06742-credential-keys\") pod \"keystone-5799488558-hpdsd\" (UID: \"8fb89c00-c708-4224-89e4-dbbc1fb06742\") " pod="keystone-kuttl-tests/keystone-5799488558-hpdsd" Jan 20 18:36:32 crc kubenswrapper[4558]: I0120 18:36:32.953090 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lk9fn\" (UniqueName: \"kubernetes.io/projected/8fb89c00-c708-4224-89e4-dbbc1fb06742-kube-api-access-lk9fn\") pod \"keystone-5799488558-hpdsd\" (UID: \"8fb89c00-c708-4224-89e4-dbbc1fb06742\") " pod="keystone-kuttl-tests/keystone-5799488558-hpdsd" Jan 20 18:36:33 crc kubenswrapper[4558]: I0120 18:36:33.054627 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/8fb89c00-c708-4224-89e4-dbbc1fb06742-fernet-keys\") pod \"keystone-5799488558-hpdsd\" (UID: \"8fb89c00-c708-4224-89e4-dbbc1fb06742\") " pod="keystone-kuttl-tests/keystone-5799488558-hpdsd" Jan 20 18:36:33 crc kubenswrapper[4558]: I0120 18:36:33.054688 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/82f068bd-1ad6-4a63-85d0-8819c5391a5f-fernet-keys\") pod \"keystone-5799488558-6tz7z\" (UID: \"82f068bd-1ad6-4a63-85d0-8819c5391a5f\") " pod="keystone-kuttl-tests/keystone-5799488558-6tz7z" Jan 20 18:36:33 crc kubenswrapper[4558]: I0120 18:36:33.054750 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/8fb89c00-c708-4224-89e4-dbbc1fb06742-credential-keys\") pod \"keystone-5799488558-hpdsd\" (UID: \"8fb89c00-c708-4224-89e4-dbbc1fb06742\") " pod="keystone-kuttl-tests/keystone-5799488558-hpdsd" Jan 20 18:36:33 crc kubenswrapper[4558]: I0120 18:36:33.054800 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lk9fn\" (UniqueName: \"kubernetes.io/projected/8fb89c00-c708-4224-89e4-dbbc1fb06742-kube-api-access-lk9fn\") pod \"keystone-5799488558-hpdsd\" (UID: \"8fb89c00-c708-4224-89e4-dbbc1fb06742\") " pod="keystone-kuttl-tests/keystone-5799488558-hpdsd" Jan 20 18:36:33 crc kubenswrapper[4558]: I0120 18:36:33.054839 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8fb89c00-c708-4224-89e4-dbbc1fb06742-scripts\") pod \"keystone-5799488558-hpdsd\" (UID: \"8fb89c00-c708-4224-89e4-dbbc1fb06742\") " pod="keystone-kuttl-tests/keystone-5799488558-hpdsd" Jan 20 18:36:33 crc kubenswrapper[4558]: I0120 18:36:33.054864 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8fb89c00-c708-4224-89e4-dbbc1fb06742-config-data\") pod \"keystone-5799488558-hpdsd\" (UID: \"8fb89c00-c708-4224-89e4-dbbc1fb06742\") " pod="keystone-kuttl-tests/keystone-5799488558-hpdsd" Jan 20 18:36:33 crc kubenswrapper[4558]: I0120 18:36:33.054896 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/82f068bd-1ad6-4a63-85d0-8819c5391a5f-config-data\") pod \"keystone-5799488558-6tz7z\" (UID: \"82f068bd-1ad6-4a63-85d0-8819c5391a5f\") " pod="keystone-kuttl-tests/keystone-5799488558-6tz7z" Jan 20 18:36:33 crc kubenswrapper[4558]: I0120 18:36:33.054932 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/82f068bd-1ad6-4a63-85d0-8819c5391a5f-scripts\") pod \"keystone-5799488558-6tz7z\" (UID: \"82f068bd-1ad6-4a63-85d0-8819c5391a5f\") " pod="keystone-kuttl-tests/keystone-5799488558-6tz7z" Jan 20 18:36:33 crc kubenswrapper[4558]: I0120 18:36:33.054958 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/82f068bd-1ad6-4a63-85d0-8819c5391a5f-credential-keys\") pod \"keystone-5799488558-6tz7z\" (UID: \"82f068bd-1ad6-4a63-85d0-8819c5391a5f\") " pod="keystone-kuttl-tests/keystone-5799488558-6tz7z" Jan 20 18:36:33 crc kubenswrapper[4558]: I0120 18:36:33.054986 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gh6b7\" (UniqueName: \"kubernetes.io/projected/82f068bd-1ad6-4a63-85d0-8819c5391a5f-kube-api-access-gh6b7\") pod \"keystone-5799488558-6tz7z\" (UID: \"82f068bd-1ad6-4a63-85d0-8819c5391a5f\") " pod="keystone-kuttl-tests/keystone-5799488558-6tz7z" Jan 20 18:36:33 crc kubenswrapper[4558]: I0120 18:36:33.061996 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8fb89c00-c708-4224-89e4-dbbc1fb06742-scripts\") pod \"keystone-5799488558-hpdsd\" (UID: \"8fb89c00-c708-4224-89e4-dbbc1fb06742\") " pod="keystone-kuttl-tests/keystone-5799488558-hpdsd" Jan 20 18:36:33 crc kubenswrapper[4558]: I0120 18:36:33.062071 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/82f068bd-1ad6-4a63-85d0-8819c5391a5f-credential-keys\") pod \"keystone-5799488558-6tz7z\" (UID: \"82f068bd-1ad6-4a63-85d0-8819c5391a5f\") " pod="keystone-kuttl-tests/keystone-5799488558-6tz7z" Jan 20 18:36:33 crc kubenswrapper[4558]: I0120 18:36:33.062306 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/82f068bd-1ad6-4a63-85d0-8819c5391a5f-config-data\") pod \"keystone-5799488558-6tz7z\" (UID: \"82f068bd-1ad6-4a63-85d0-8819c5391a5f\") " pod="keystone-kuttl-tests/keystone-5799488558-6tz7z" Jan 20 18:36:33 crc kubenswrapper[4558]: I0120 18:36:33.062961 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/8fb89c00-c708-4224-89e4-dbbc1fb06742-credential-keys\") pod \"keystone-5799488558-hpdsd\" (UID: \"8fb89c00-c708-4224-89e4-dbbc1fb06742\") " pod="keystone-kuttl-tests/keystone-5799488558-hpdsd" Jan 20 18:36:33 crc kubenswrapper[4558]: I0120 18:36:33.063434 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/82f068bd-1ad6-4a63-85d0-8819c5391a5f-scripts\") pod \"keystone-5799488558-6tz7z\" (UID: \"82f068bd-1ad6-4a63-85d0-8819c5391a5f\") " pod="keystone-kuttl-tests/keystone-5799488558-6tz7z" Jan 20 18:36:33 crc kubenswrapper[4558]: I0120 18:36:33.064482 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/8fb89c00-c708-4224-89e4-dbbc1fb06742-fernet-keys\") pod \"keystone-5799488558-hpdsd\" (UID: \"8fb89c00-c708-4224-89e4-dbbc1fb06742\") " pod="keystone-kuttl-tests/keystone-5799488558-hpdsd" Jan 20 18:36:33 crc kubenswrapper[4558]: I0120 18:36:33.064699 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8fb89c00-c708-4224-89e4-dbbc1fb06742-config-data\") pod \"keystone-5799488558-hpdsd\" (UID: \"8fb89c00-c708-4224-89e4-dbbc1fb06742\") " pod="keystone-kuttl-tests/keystone-5799488558-hpdsd" Jan 20 18:36:33 crc kubenswrapper[4558]: I0120 18:36:33.065193 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/82f068bd-1ad6-4a63-85d0-8819c5391a5f-fernet-keys\") pod \"keystone-5799488558-6tz7z\" (UID: \"82f068bd-1ad6-4a63-85d0-8819c5391a5f\") " pod="keystone-kuttl-tests/keystone-5799488558-6tz7z" Jan 20 18:36:33 crc kubenswrapper[4558]: I0120 18:36:33.069812 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lk9fn\" (UniqueName: \"kubernetes.io/projected/8fb89c00-c708-4224-89e4-dbbc1fb06742-kube-api-access-lk9fn\") pod \"keystone-5799488558-hpdsd\" (UID: \"8fb89c00-c708-4224-89e4-dbbc1fb06742\") " pod="keystone-kuttl-tests/keystone-5799488558-hpdsd" Jan 20 18:36:33 crc kubenswrapper[4558]: I0120 18:36:33.071878 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gh6b7\" (UniqueName: \"kubernetes.io/projected/82f068bd-1ad6-4a63-85d0-8819c5391a5f-kube-api-access-gh6b7\") pod \"keystone-5799488558-6tz7z\" (UID: \"82f068bd-1ad6-4a63-85d0-8819c5391a5f\") " pod="keystone-kuttl-tests/keystone-5799488558-6tz7z" Jan 20 18:36:33 crc kubenswrapper[4558]: I0120 18:36:33.100655 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-5799488558-hpdsd" Jan 20 18:36:33 crc kubenswrapper[4558]: I0120 18:36:33.109421 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-5799488558-6tz7z" Jan 20 18:36:33 crc kubenswrapper[4558]: I0120 18:36:33.326136 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone-5799488558-6tz7z"] Jan 20 18:36:33 crc kubenswrapper[4558]: I0120 18:36:33.498158 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone-5799488558-hpdsd"] Jan 20 18:36:33 crc kubenswrapper[4558]: W0120 18:36:33.503473 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8fb89c00_c708_4224_89e4_dbbc1fb06742.slice/crio-6b5506ea895692e63345f9274697d6dca37744c5738277c1475e535790af90f3 WatchSource:0}: Error finding container 6b5506ea895692e63345f9274697d6dca37744c5738277c1475e535790af90f3: Status 404 returned error can't find the container with id 6b5506ea895692e63345f9274697d6dca37744c5738277c1475e535790af90f3 Jan 20 18:36:34 crc kubenswrapper[4558]: I0120 18:36:34.062016 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-5799488558-6tz7z" event={"ID":"82f068bd-1ad6-4a63-85d0-8819c5391a5f","Type":"ContainerStarted","Data":"fc431d7a7221a30b66c622b03b93905034e264232e4f2973eab309fc5a3a48ec"} Jan 20 18:36:34 crc kubenswrapper[4558]: I0120 18:36:34.062539 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-5799488558-6tz7z" event={"ID":"82f068bd-1ad6-4a63-85d0-8819c5391a5f","Type":"ContainerStarted","Data":"4bf089c5e8b20814c6830af6ed8224f1104015cd428258b4c7ecd6a4f422854a"} Jan 20 18:36:34 crc kubenswrapper[4558]: I0120 18:36:34.062564 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="keystone-kuttl-tests/keystone-5799488558-6tz7z" Jan 20 18:36:34 crc kubenswrapper[4558]: I0120 18:36:34.064377 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-5799488558-hpdsd" event={"ID":"8fb89c00-c708-4224-89e4-dbbc1fb06742","Type":"ContainerStarted","Data":"b413b14fa137dd40e372017d8bc85eb1c85de7302534150950a49976ee39206f"} Jan 20 18:36:34 crc kubenswrapper[4558]: I0120 18:36:34.064405 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-5799488558-hpdsd" event={"ID":"8fb89c00-c708-4224-89e4-dbbc1fb06742","Type":"ContainerStarted","Data":"6b5506ea895692e63345f9274697d6dca37744c5738277c1475e535790af90f3"} Jan 20 18:36:34 crc kubenswrapper[4558]: I0120 18:36:34.064620 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="keystone-kuttl-tests/keystone-5799488558-hpdsd" Jan 20 18:36:34 crc kubenswrapper[4558]: I0120 18:36:34.086331 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="keystone-kuttl-tests/keystone-5799488558-6tz7z" podStartSLOduration=2.086313761 podStartE2EDuration="2.086313761s" podCreationTimestamp="2026-01-20 18:36:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 18:36:34.078477256 +0000 UTC m=+6887.838815223" watchObservedRunningTime="2026-01-20 18:36:34.086313761 +0000 UTC m=+6887.846651728" Jan 20 18:36:34 crc kubenswrapper[4558]: I0120 18:36:34.097989 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="keystone-kuttl-tests/keystone-5799488558-hpdsd" podStartSLOduration=2.09795513 podStartE2EDuration="2.09795513s" podCreationTimestamp="2026-01-20 18:36:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 18:36:34.092625448 +0000 UTC m=+6887.852963415" watchObservedRunningTime="2026-01-20 18:36:34.09795513 +0000 UTC m=+6887.858293097" Jan 20 18:36:57 crc kubenswrapper[4558]: I0120 18:36:57.330556 4558 patch_prober.go:28] interesting pod/machine-config-daemon-2vr4r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 20 18:36:57 crc kubenswrapper[4558]: I0120 18:36:57.332122 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 20 18:37:04 crc kubenswrapper[4558]: I0120 18:37:04.369755 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="keystone-kuttl-tests/keystone-5799488558-hpdsd" Jan 20 18:37:04 crc kubenswrapper[4558]: I0120 18:37:04.379557 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="keystone-kuttl-tests/keystone-5799488558-6tz7z" Jan 20 18:37:05 crc kubenswrapper[4558]: I0120 18:37:05.389628 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["keystone-kuttl-tests/keystone-5799488558-hpdsd"] Jan 20 18:37:05 crc kubenswrapper[4558]: I0120 18:37:05.390311 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="keystone-kuttl-tests/keystone-5799488558-hpdsd" podUID="8fb89c00-c708-4224-89e4-dbbc1fb06742" containerName="keystone-api" containerID="cri-o://b413b14fa137dd40e372017d8bc85eb1c85de7302534150950a49976ee39206f" gracePeriod=30 Jan 20 18:37:05 crc kubenswrapper[4558]: I0120 18:37:05.401175 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["keystone-kuttl-tests/keystone-5799488558-6tz7z"] Jan 20 18:37:05 crc kubenswrapper[4558]: I0120 18:37:05.401396 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="keystone-kuttl-tests/keystone-5799488558-6tz7z" podUID="82f068bd-1ad6-4a63-85d0-8819c5391a5f" containerName="keystone-api" containerID="cri-o://fc431d7a7221a30b66c622b03b93905034e264232e4f2973eab309fc5a3a48ec" gracePeriod=30 Jan 20 18:37:06 crc kubenswrapper[4558]: I0120 18:37:06.537889 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["keystone-kuttl-tests/keystone-5799488558-xt6s4"] Jan 20 18:37:06 crc kubenswrapper[4558]: I0120 18:37:06.538149 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="keystone-kuttl-tests/keystone-5799488558-xt6s4" podUID="47df1033-766b-4ae5-8521-14d9db3b7046" containerName="keystone-api" containerID="cri-o://96aa78614b787ee0909ad8010f06ddf447cf59190c03d561fa2bf9ce4c3207e1" gracePeriod=30 Jan 20 18:37:08 crc kubenswrapper[4558]: I0120 18:37:08.910580 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-5799488558-hpdsd" Jan 20 18:37:08 crc kubenswrapper[4558]: I0120 18:37:08.913243 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-5799488558-6tz7z" Jan 20 18:37:09 crc kubenswrapper[4558]: I0120 18:37:09.010682 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/82f068bd-1ad6-4a63-85d0-8819c5391a5f-scripts\") pod \"82f068bd-1ad6-4a63-85d0-8819c5391a5f\" (UID: \"82f068bd-1ad6-4a63-85d0-8819c5391a5f\") " Jan 20 18:37:09 crc kubenswrapper[4558]: I0120 18:37:09.010775 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gh6b7\" (UniqueName: \"kubernetes.io/projected/82f068bd-1ad6-4a63-85d0-8819c5391a5f-kube-api-access-gh6b7\") pod \"82f068bd-1ad6-4a63-85d0-8819c5391a5f\" (UID: \"82f068bd-1ad6-4a63-85d0-8819c5391a5f\") " Jan 20 18:37:09 crc kubenswrapper[4558]: I0120 18:37:09.010826 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/82f068bd-1ad6-4a63-85d0-8819c5391a5f-credential-keys\") pod \"82f068bd-1ad6-4a63-85d0-8819c5391a5f\" (UID: \"82f068bd-1ad6-4a63-85d0-8819c5391a5f\") " Jan 20 18:37:09 crc kubenswrapper[4558]: I0120 18:37:09.010858 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/82f068bd-1ad6-4a63-85d0-8819c5391a5f-fernet-keys\") pod \"82f068bd-1ad6-4a63-85d0-8819c5391a5f\" (UID: \"82f068bd-1ad6-4a63-85d0-8819c5391a5f\") " Jan 20 18:37:09 crc kubenswrapper[4558]: I0120 18:37:09.010895 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8fb89c00-c708-4224-89e4-dbbc1fb06742-config-data\") pod \"8fb89c00-c708-4224-89e4-dbbc1fb06742\" (UID: \"8fb89c00-c708-4224-89e4-dbbc1fb06742\") " Jan 20 18:37:09 crc kubenswrapper[4558]: I0120 18:37:09.010970 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lk9fn\" (UniqueName: \"kubernetes.io/projected/8fb89c00-c708-4224-89e4-dbbc1fb06742-kube-api-access-lk9fn\") pod \"8fb89c00-c708-4224-89e4-dbbc1fb06742\" (UID: \"8fb89c00-c708-4224-89e4-dbbc1fb06742\") " Jan 20 18:37:09 crc kubenswrapper[4558]: I0120 18:37:09.011033 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/8fb89c00-c708-4224-89e4-dbbc1fb06742-fernet-keys\") pod \"8fb89c00-c708-4224-89e4-dbbc1fb06742\" (UID: \"8fb89c00-c708-4224-89e4-dbbc1fb06742\") " Jan 20 18:37:09 crc kubenswrapper[4558]: I0120 18:37:09.011066 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8fb89c00-c708-4224-89e4-dbbc1fb06742-scripts\") pod \"8fb89c00-c708-4224-89e4-dbbc1fb06742\" (UID: \"8fb89c00-c708-4224-89e4-dbbc1fb06742\") " Jan 20 18:37:09 crc kubenswrapper[4558]: I0120 18:37:09.011089 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/82f068bd-1ad6-4a63-85d0-8819c5391a5f-config-data\") pod \"82f068bd-1ad6-4a63-85d0-8819c5391a5f\" (UID: \"82f068bd-1ad6-4a63-85d0-8819c5391a5f\") " Jan 20 18:37:09 crc kubenswrapper[4558]: I0120 18:37:09.011139 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/8fb89c00-c708-4224-89e4-dbbc1fb06742-credential-keys\") pod \"8fb89c00-c708-4224-89e4-dbbc1fb06742\" (UID: \"8fb89c00-c708-4224-89e4-dbbc1fb06742\") " Jan 20 18:37:09 crc kubenswrapper[4558]: I0120 18:37:09.016912 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/82f068bd-1ad6-4a63-85d0-8819c5391a5f-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "82f068bd-1ad6-4a63-85d0-8819c5391a5f" (UID: "82f068bd-1ad6-4a63-85d0-8819c5391a5f"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:37:09 crc kubenswrapper[4558]: I0120 18:37:09.017881 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/82f068bd-1ad6-4a63-85d0-8819c5391a5f-kube-api-access-gh6b7" (OuterVolumeSpecName: "kube-api-access-gh6b7") pod "82f068bd-1ad6-4a63-85d0-8819c5391a5f" (UID: "82f068bd-1ad6-4a63-85d0-8819c5391a5f"). InnerVolumeSpecName "kube-api-access-gh6b7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:37:09 crc kubenswrapper[4558]: I0120 18:37:09.017967 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/82f068bd-1ad6-4a63-85d0-8819c5391a5f-scripts" (OuterVolumeSpecName: "scripts") pod "82f068bd-1ad6-4a63-85d0-8819c5391a5f" (UID: "82f068bd-1ad6-4a63-85d0-8819c5391a5f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:37:09 crc kubenswrapper[4558]: I0120 18:37:09.018104 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8fb89c00-c708-4224-89e4-dbbc1fb06742-scripts" (OuterVolumeSpecName: "scripts") pod "8fb89c00-c708-4224-89e4-dbbc1fb06742" (UID: "8fb89c00-c708-4224-89e4-dbbc1fb06742"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:37:09 crc kubenswrapper[4558]: I0120 18:37:09.018288 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8fb89c00-c708-4224-89e4-dbbc1fb06742-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "8fb89c00-c708-4224-89e4-dbbc1fb06742" (UID: "8fb89c00-c708-4224-89e4-dbbc1fb06742"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:37:09 crc kubenswrapper[4558]: I0120 18:37:09.018651 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/82f068bd-1ad6-4a63-85d0-8819c5391a5f-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "82f068bd-1ad6-4a63-85d0-8819c5391a5f" (UID: "82f068bd-1ad6-4a63-85d0-8819c5391a5f"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:37:09 crc kubenswrapper[4558]: I0120 18:37:09.018657 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8fb89c00-c708-4224-89e4-dbbc1fb06742-kube-api-access-lk9fn" (OuterVolumeSpecName: "kube-api-access-lk9fn") pod "8fb89c00-c708-4224-89e4-dbbc1fb06742" (UID: "8fb89c00-c708-4224-89e4-dbbc1fb06742"). InnerVolumeSpecName "kube-api-access-lk9fn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:37:09 crc kubenswrapper[4558]: I0120 18:37:09.019531 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8fb89c00-c708-4224-89e4-dbbc1fb06742-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "8fb89c00-c708-4224-89e4-dbbc1fb06742" (UID: "8fb89c00-c708-4224-89e4-dbbc1fb06742"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:37:09 crc kubenswrapper[4558]: I0120 18:37:09.030175 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8fb89c00-c708-4224-89e4-dbbc1fb06742-config-data" (OuterVolumeSpecName: "config-data") pod "8fb89c00-c708-4224-89e4-dbbc1fb06742" (UID: "8fb89c00-c708-4224-89e4-dbbc1fb06742"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:37:09 crc kubenswrapper[4558]: I0120 18:37:09.030783 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/82f068bd-1ad6-4a63-85d0-8819c5391a5f-config-data" (OuterVolumeSpecName: "config-data") pod "82f068bd-1ad6-4a63-85d0-8819c5391a5f" (UID: "82f068bd-1ad6-4a63-85d0-8819c5391a5f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:37:09 crc kubenswrapper[4558]: I0120 18:37:09.113314 4558 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/8fb89c00-c708-4224-89e4-dbbc1fb06742-fernet-keys\") on node \"crc\" DevicePath \"\"" Jan 20 18:37:09 crc kubenswrapper[4558]: I0120 18:37:09.113351 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8fb89c00-c708-4224-89e4-dbbc1fb06742-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 18:37:09 crc kubenswrapper[4558]: I0120 18:37:09.113362 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/82f068bd-1ad6-4a63-85d0-8819c5391a5f-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 18:37:09 crc kubenswrapper[4558]: I0120 18:37:09.113374 4558 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/8fb89c00-c708-4224-89e4-dbbc1fb06742-credential-keys\") on node \"crc\" DevicePath \"\"" Jan 20 18:37:09 crc kubenswrapper[4558]: I0120 18:37:09.113384 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/82f068bd-1ad6-4a63-85d0-8819c5391a5f-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 18:37:09 crc kubenswrapper[4558]: I0120 18:37:09.113401 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gh6b7\" (UniqueName: \"kubernetes.io/projected/82f068bd-1ad6-4a63-85d0-8819c5391a5f-kube-api-access-gh6b7\") on node \"crc\" DevicePath \"\"" Jan 20 18:37:09 crc kubenswrapper[4558]: I0120 18:37:09.113411 4558 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/82f068bd-1ad6-4a63-85d0-8819c5391a5f-credential-keys\") on node \"crc\" DevicePath \"\"" Jan 20 18:37:09 crc kubenswrapper[4558]: I0120 18:37:09.113422 4558 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/82f068bd-1ad6-4a63-85d0-8819c5391a5f-fernet-keys\") on node \"crc\" DevicePath \"\"" Jan 20 18:37:09 crc kubenswrapper[4558]: I0120 18:37:09.113430 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8fb89c00-c708-4224-89e4-dbbc1fb06742-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 18:37:09 crc kubenswrapper[4558]: I0120 18:37:09.113440 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lk9fn\" (UniqueName: \"kubernetes.io/projected/8fb89c00-c708-4224-89e4-dbbc1fb06742-kube-api-access-lk9fn\") on node \"crc\" DevicePath \"\"" Jan 20 18:37:09 crc kubenswrapper[4558]: I0120 18:37:09.352511 4558 generic.go:334] "Generic (PLEG): container finished" podID="82f068bd-1ad6-4a63-85d0-8819c5391a5f" containerID="fc431d7a7221a30b66c622b03b93905034e264232e4f2973eab309fc5a3a48ec" exitCode=0 Jan 20 18:37:09 crc kubenswrapper[4558]: I0120 18:37:09.352565 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-5799488558-6tz7z" Jan 20 18:37:09 crc kubenswrapper[4558]: I0120 18:37:09.352599 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-5799488558-6tz7z" event={"ID":"82f068bd-1ad6-4a63-85d0-8819c5391a5f","Type":"ContainerDied","Data":"fc431d7a7221a30b66c622b03b93905034e264232e4f2973eab309fc5a3a48ec"} Jan 20 18:37:09 crc kubenswrapper[4558]: I0120 18:37:09.352631 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-5799488558-6tz7z" event={"ID":"82f068bd-1ad6-4a63-85d0-8819c5391a5f","Type":"ContainerDied","Data":"4bf089c5e8b20814c6830af6ed8224f1104015cd428258b4c7ecd6a4f422854a"} Jan 20 18:37:09 crc kubenswrapper[4558]: I0120 18:37:09.352653 4558 scope.go:117] "RemoveContainer" containerID="fc431d7a7221a30b66c622b03b93905034e264232e4f2973eab309fc5a3a48ec" Jan 20 18:37:09 crc kubenswrapper[4558]: I0120 18:37:09.354597 4558 generic.go:334] "Generic (PLEG): container finished" podID="8fb89c00-c708-4224-89e4-dbbc1fb06742" containerID="b413b14fa137dd40e372017d8bc85eb1c85de7302534150950a49976ee39206f" exitCode=0 Jan 20 18:37:09 crc kubenswrapper[4558]: I0120 18:37:09.354626 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-5799488558-hpdsd" event={"ID":"8fb89c00-c708-4224-89e4-dbbc1fb06742","Type":"ContainerDied","Data":"b413b14fa137dd40e372017d8bc85eb1c85de7302534150950a49976ee39206f"} Jan 20 18:37:09 crc kubenswrapper[4558]: I0120 18:37:09.354639 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-5799488558-hpdsd" Jan 20 18:37:09 crc kubenswrapper[4558]: I0120 18:37:09.354645 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-5799488558-hpdsd" event={"ID":"8fb89c00-c708-4224-89e4-dbbc1fb06742","Type":"ContainerDied","Data":"6b5506ea895692e63345f9274697d6dca37744c5738277c1475e535790af90f3"} Jan 20 18:37:09 crc kubenswrapper[4558]: I0120 18:37:09.377214 4558 scope.go:117] "RemoveContainer" containerID="fc431d7a7221a30b66c622b03b93905034e264232e4f2973eab309fc5a3a48ec" Jan 20 18:37:09 crc kubenswrapper[4558]: E0120 18:37:09.377935 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fc431d7a7221a30b66c622b03b93905034e264232e4f2973eab309fc5a3a48ec\": container with ID starting with fc431d7a7221a30b66c622b03b93905034e264232e4f2973eab309fc5a3a48ec not found: ID does not exist" containerID="fc431d7a7221a30b66c622b03b93905034e264232e4f2973eab309fc5a3a48ec" Jan 20 18:37:09 crc kubenswrapper[4558]: I0120 18:37:09.377984 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fc431d7a7221a30b66c622b03b93905034e264232e4f2973eab309fc5a3a48ec"} err="failed to get container status \"fc431d7a7221a30b66c622b03b93905034e264232e4f2973eab309fc5a3a48ec\": rpc error: code = NotFound desc = could not find container \"fc431d7a7221a30b66c622b03b93905034e264232e4f2973eab309fc5a3a48ec\": container with ID starting with fc431d7a7221a30b66c622b03b93905034e264232e4f2973eab309fc5a3a48ec not found: ID does not exist" Jan 20 18:37:09 crc kubenswrapper[4558]: I0120 18:37:09.378037 4558 scope.go:117] "RemoveContainer" containerID="b413b14fa137dd40e372017d8bc85eb1c85de7302534150950a49976ee39206f" Jan 20 18:37:09 crc kubenswrapper[4558]: I0120 18:37:09.388041 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["keystone-kuttl-tests/keystone-5799488558-6tz7z"] Jan 20 18:37:09 crc kubenswrapper[4558]: I0120 18:37:09.393435 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["keystone-kuttl-tests/keystone-5799488558-6tz7z"] Jan 20 18:37:09 crc kubenswrapper[4558]: I0120 18:37:09.397464 4558 scope.go:117] "RemoveContainer" containerID="b413b14fa137dd40e372017d8bc85eb1c85de7302534150950a49976ee39206f" Jan 20 18:37:09 crc kubenswrapper[4558]: E0120 18:37:09.397866 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b413b14fa137dd40e372017d8bc85eb1c85de7302534150950a49976ee39206f\": container with ID starting with b413b14fa137dd40e372017d8bc85eb1c85de7302534150950a49976ee39206f not found: ID does not exist" containerID="b413b14fa137dd40e372017d8bc85eb1c85de7302534150950a49976ee39206f" Jan 20 18:37:09 crc kubenswrapper[4558]: I0120 18:37:09.397909 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b413b14fa137dd40e372017d8bc85eb1c85de7302534150950a49976ee39206f"} err="failed to get container status \"b413b14fa137dd40e372017d8bc85eb1c85de7302534150950a49976ee39206f\": rpc error: code = NotFound desc = could not find container \"b413b14fa137dd40e372017d8bc85eb1c85de7302534150950a49976ee39206f\": container with ID starting with b413b14fa137dd40e372017d8bc85eb1c85de7302534150950a49976ee39206f not found: ID does not exist" Jan 20 18:37:09 crc kubenswrapper[4558]: I0120 18:37:09.398763 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["keystone-kuttl-tests/keystone-5799488558-hpdsd"] Jan 20 18:37:09 crc kubenswrapper[4558]: I0120 18:37:09.402510 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["keystone-kuttl-tests/keystone-5799488558-hpdsd"] Jan 20 18:37:09 crc kubenswrapper[4558]: I0120 18:37:09.901949 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-5799488558-xt6s4" Jan 20 18:37:10 crc kubenswrapper[4558]: I0120 18:37:10.026791 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-79l4q\" (UniqueName: \"kubernetes.io/projected/47df1033-766b-4ae5-8521-14d9db3b7046-kube-api-access-79l4q\") pod \"47df1033-766b-4ae5-8521-14d9db3b7046\" (UID: \"47df1033-766b-4ae5-8521-14d9db3b7046\") " Jan 20 18:37:10 crc kubenswrapper[4558]: I0120 18:37:10.026872 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/47df1033-766b-4ae5-8521-14d9db3b7046-config-data\") pod \"47df1033-766b-4ae5-8521-14d9db3b7046\" (UID: \"47df1033-766b-4ae5-8521-14d9db3b7046\") " Jan 20 18:37:10 crc kubenswrapper[4558]: I0120 18:37:10.027081 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/47df1033-766b-4ae5-8521-14d9db3b7046-fernet-keys\") pod \"47df1033-766b-4ae5-8521-14d9db3b7046\" (UID: \"47df1033-766b-4ae5-8521-14d9db3b7046\") " Jan 20 18:37:10 crc kubenswrapper[4558]: I0120 18:37:10.027137 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/47df1033-766b-4ae5-8521-14d9db3b7046-scripts\") pod \"47df1033-766b-4ae5-8521-14d9db3b7046\" (UID: \"47df1033-766b-4ae5-8521-14d9db3b7046\") " Jan 20 18:37:10 crc kubenswrapper[4558]: I0120 18:37:10.027228 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/47df1033-766b-4ae5-8521-14d9db3b7046-credential-keys\") pod \"47df1033-766b-4ae5-8521-14d9db3b7046\" (UID: \"47df1033-766b-4ae5-8521-14d9db3b7046\") " Jan 20 18:37:10 crc kubenswrapper[4558]: I0120 18:37:10.031837 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/47df1033-766b-4ae5-8521-14d9db3b7046-scripts" (OuterVolumeSpecName: "scripts") pod "47df1033-766b-4ae5-8521-14d9db3b7046" (UID: "47df1033-766b-4ae5-8521-14d9db3b7046"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:37:10 crc kubenswrapper[4558]: I0120 18:37:10.032504 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/47df1033-766b-4ae5-8521-14d9db3b7046-kube-api-access-79l4q" (OuterVolumeSpecName: "kube-api-access-79l4q") pod "47df1033-766b-4ae5-8521-14d9db3b7046" (UID: "47df1033-766b-4ae5-8521-14d9db3b7046"). InnerVolumeSpecName "kube-api-access-79l4q". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:37:10 crc kubenswrapper[4558]: I0120 18:37:10.032910 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/47df1033-766b-4ae5-8521-14d9db3b7046-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "47df1033-766b-4ae5-8521-14d9db3b7046" (UID: "47df1033-766b-4ae5-8521-14d9db3b7046"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:37:10 crc kubenswrapper[4558]: I0120 18:37:10.032999 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/47df1033-766b-4ae5-8521-14d9db3b7046-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "47df1033-766b-4ae5-8521-14d9db3b7046" (UID: "47df1033-766b-4ae5-8521-14d9db3b7046"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:37:10 crc kubenswrapper[4558]: I0120 18:37:10.046040 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/47df1033-766b-4ae5-8521-14d9db3b7046-config-data" (OuterVolumeSpecName: "config-data") pod "47df1033-766b-4ae5-8521-14d9db3b7046" (UID: "47df1033-766b-4ae5-8521-14d9db3b7046"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:37:10 crc kubenswrapper[4558]: I0120 18:37:10.129488 4558 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/47df1033-766b-4ae5-8521-14d9db3b7046-fernet-keys\") on node \"crc\" DevicePath \"\"" Jan 20 18:37:10 crc kubenswrapper[4558]: I0120 18:37:10.129529 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/47df1033-766b-4ae5-8521-14d9db3b7046-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 18:37:10 crc kubenswrapper[4558]: I0120 18:37:10.129541 4558 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/47df1033-766b-4ae5-8521-14d9db3b7046-credential-keys\") on node \"crc\" DevicePath \"\"" Jan 20 18:37:10 crc kubenswrapper[4558]: I0120 18:37:10.129557 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-79l4q\" (UniqueName: \"kubernetes.io/projected/47df1033-766b-4ae5-8521-14d9db3b7046-kube-api-access-79l4q\") on node \"crc\" DevicePath \"\"" Jan 20 18:37:10 crc kubenswrapper[4558]: I0120 18:37:10.129566 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/47df1033-766b-4ae5-8521-14d9db3b7046-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 18:37:10 crc kubenswrapper[4558]: I0120 18:37:10.365690 4558 generic.go:334] "Generic (PLEG): container finished" podID="47df1033-766b-4ae5-8521-14d9db3b7046" containerID="96aa78614b787ee0909ad8010f06ddf447cf59190c03d561fa2bf9ce4c3207e1" exitCode=0 Jan 20 18:37:10 crc kubenswrapper[4558]: I0120 18:37:10.365755 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-5799488558-xt6s4" event={"ID":"47df1033-766b-4ae5-8521-14d9db3b7046","Type":"ContainerDied","Data":"96aa78614b787ee0909ad8010f06ddf447cf59190c03d561fa2bf9ce4c3207e1"} Jan 20 18:37:10 crc kubenswrapper[4558]: I0120 18:37:10.365826 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-5799488558-xt6s4" event={"ID":"47df1033-766b-4ae5-8521-14d9db3b7046","Type":"ContainerDied","Data":"08f7831cc00b235a3ebd2b7a2f41b347b3565a4377643b18fd40a20929b17b7b"} Jan 20 18:37:10 crc kubenswrapper[4558]: I0120 18:37:10.365776 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-5799488558-xt6s4" Jan 20 18:37:10 crc kubenswrapper[4558]: I0120 18:37:10.365881 4558 scope.go:117] "RemoveContainer" containerID="96aa78614b787ee0909ad8010f06ddf447cf59190c03d561fa2bf9ce4c3207e1" Jan 20 18:37:10 crc kubenswrapper[4558]: I0120 18:37:10.391823 4558 scope.go:117] "RemoveContainer" containerID="96aa78614b787ee0909ad8010f06ddf447cf59190c03d561fa2bf9ce4c3207e1" Jan 20 18:37:10 crc kubenswrapper[4558]: E0120 18:37:10.392536 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"96aa78614b787ee0909ad8010f06ddf447cf59190c03d561fa2bf9ce4c3207e1\": container with ID starting with 96aa78614b787ee0909ad8010f06ddf447cf59190c03d561fa2bf9ce4c3207e1 not found: ID does not exist" containerID="96aa78614b787ee0909ad8010f06ddf447cf59190c03d561fa2bf9ce4c3207e1" Jan 20 18:37:10 crc kubenswrapper[4558]: I0120 18:37:10.392583 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"96aa78614b787ee0909ad8010f06ddf447cf59190c03d561fa2bf9ce4c3207e1"} err="failed to get container status \"96aa78614b787ee0909ad8010f06ddf447cf59190c03d561fa2bf9ce4c3207e1\": rpc error: code = NotFound desc = could not find container \"96aa78614b787ee0909ad8010f06ddf447cf59190c03d561fa2bf9ce4c3207e1\": container with ID starting with 96aa78614b787ee0909ad8010f06ddf447cf59190c03d561fa2bf9ce4c3207e1 not found: ID does not exist" Jan 20 18:37:10 crc kubenswrapper[4558]: I0120 18:37:10.399355 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["keystone-kuttl-tests/keystone-5799488558-xt6s4"] Jan 20 18:37:10 crc kubenswrapper[4558]: I0120 18:37:10.403308 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["keystone-kuttl-tests/keystone-5799488558-xt6s4"] Jan 20 18:37:10 crc kubenswrapper[4558]: I0120 18:37:10.577285 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="47df1033-766b-4ae5-8521-14d9db3b7046" path="/var/lib/kubelet/pods/47df1033-766b-4ae5-8521-14d9db3b7046/volumes" Jan 20 18:37:10 crc kubenswrapper[4558]: I0120 18:37:10.578150 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="82f068bd-1ad6-4a63-85d0-8819c5391a5f" path="/var/lib/kubelet/pods/82f068bd-1ad6-4a63-85d0-8819c5391a5f/volumes" Jan 20 18:37:10 crc kubenswrapper[4558]: I0120 18:37:10.578649 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8fb89c00-c708-4224-89e4-dbbc1fb06742" path="/var/lib/kubelet/pods/8fb89c00-c708-4224-89e4-dbbc1fb06742/volumes" Jan 20 18:37:10 crc kubenswrapper[4558]: I0120 18:37:10.665265 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["keystone-kuttl-tests/keystone-db-sync-2vbxw"] Jan 20 18:37:10 crc kubenswrapper[4558]: I0120 18:37:10.666468 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["keystone-kuttl-tests/keystone-db-sync-2vbxw"] Jan 20 18:37:10 crc kubenswrapper[4558]: I0120 18:37:10.670305 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["keystone-kuttl-tests/keystone-bootstrap-57fzg"] Jan 20 18:37:10 crc kubenswrapper[4558]: I0120 18:37:10.675762 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["keystone-kuttl-tests/keystone-bootstrap-57fzg"] Jan 20 18:37:10 crc kubenswrapper[4558]: I0120 18:37:10.726685 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["keystone-kuttl-tests/keystone607b-account-delete-wfvtg"] Jan 20 18:37:10 crc kubenswrapper[4558]: E0120 18:37:10.726997 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="82f068bd-1ad6-4a63-85d0-8819c5391a5f" containerName="keystone-api" Jan 20 18:37:10 crc kubenswrapper[4558]: I0120 18:37:10.727018 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="82f068bd-1ad6-4a63-85d0-8819c5391a5f" containerName="keystone-api" Jan 20 18:37:10 crc kubenswrapper[4558]: E0120 18:37:10.727036 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8fb89c00-c708-4224-89e4-dbbc1fb06742" containerName="keystone-api" Jan 20 18:37:10 crc kubenswrapper[4558]: I0120 18:37:10.727054 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="8fb89c00-c708-4224-89e4-dbbc1fb06742" containerName="keystone-api" Jan 20 18:37:10 crc kubenswrapper[4558]: E0120 18:37:10.727080 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="47df1033-766b-4ae5-8521-14d9db3b7046" containerName="keystone-api" Jan 20 18:37:10 crc kubenswrapper[4558]: I0120 18:37:10.727088 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="47df1033-766b-4ae5-8521-14d9db3b7046" containerName="keystone-api" Jan 20 18:37:10 crc kubenswrapper[4558]: I0120 18:37:10.727218 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="47df1033-766b-4ae5-8521-14d9db3b7046" containerName="keystone-api" Jan 20 18:37:10 crc kubenswrapper[4558]: I0120 18:37:10.727239 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="8fb89c00-c708-4224-89e4-dbbc1fb06742" containerName="keystone-api" Jan 20 18:37:10 crc kubenswrapper[4558]: I0120 18:37:10.727248 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="82f068bd-1ad6-4a63-85d0-8819c5391a5f" containerName="keystone-api" Jan 20 18:37:10 crc kubenswrapper[4558]: I0120 18:37:10.727746 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone607b-account-delete-wfvtg" Jan 20 18:37:10 crc kubenswrapper[4558]: I0120 18:37:10.731198 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone607b-account-delete-wfvtg"] Jan 20 18:37:10 crc kubenswrapper[4558]: I0120 18:37:10.840533 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d63a001f-9e68-4aa5-b376-3b602ce11306-operator-scripts\") pod \"keystone607b-account-delete-wfvtg\" (UID: \"d63a001f-9e68-4aa5-b376-3b602ce11306\") " pod="keystone-kuttl-tests/keystone607b-account-delete-wfvtg" Jan 20 18:37:10 crc kubenswrapper[4558]: I0120 18:37:10.840653 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6l9n2\" (UniqueName: \"kubernetes.io/projected/d63a001f-9e68-4aa5-b376-3b602ce11306-kube-api-access-6l9n2\") pod \"keystone607b-account-delete-wfvtg\" (UID: \"d63a001f-9e68-4aa5-b376-3b602ce11306\") " pod="keystone-kuttl-tests/keystone607b-account-delete-wfvtg" Jan 20 18:37:10 crc kubenswrapper[4558]: I0120 18:37:10.941869 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d63a001f-9e68-4aa5-b376-3b602ce11306-operator-scripts\") pod \"keystone607b-account-delete-wfvtg\" (UID: \"d63a001f-9e68-4aa5-b376-3b602ce11306\") " pod="keystone-kuttl-tests/keystone607b-account-delete-wfvtg" Jan 20 18:37:10 crc kubenswrapper[4558]: I0120 18:37:10.941949 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6l9n2\" (UniqueName: \"kubernetes.io/projected/d63a001f-9e68-4aa5-b376-3b602ce11306-kube-api-access-6l9n2\") pod \"keystone607b-account-delete-wfvtg\" (UID: \"d63a001f-9e68-4aa5-b376-3b602ce11306\") " pod="keystone-kuttl-tests/keystone607b-account-delete-wfvtg" Jan 20 18:37:10 crc kubenswrapper[4558]: I0120 18:37:10.942708 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d63a001f-9e68-4aa5-b376-3b602ce11306-operator-scripts\") pod \"keystone607b-account-delete-wfvtg\" (UID: \"d63a001f-9e68-4aa5-b376-3b602ce11306\") " pod="keystone-kuttl-tests/keystone607b-account-delete-wfvtg" Jan 20 18:37:10 crc kubenswrapper[4558]: I0120 18:37:10.956014 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6l9n2\" (UniqueName: \"kubernetes.io/projected/d63a001f-9e68-4aa5-b376-3b602ce11306-kube-api-access-6l9n2\") pod \"keystone607b-account-delete-wfvtg\" (UID: \"d63a001f-9e68-4aa5-b376-3b602ce11306\") " pod="keystone-kuttl-tests/keystone607b-account-delete-wfvtg" Jan 20 18:37:11 crc kubenswrapper[4558]: I0120 18:37:11.042819 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone607b-account-delete-wfvtg" Jan 20 18:37:11 crc kubenswrapper[4558]: I0120 18:37:11.413994 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone607b-account-delete-wfvtg"] Jan 20 18:37:12 crc kubenswrapper[4558]: I0120 18:37:12.389278 4558 generic.go:334] "Generic (PLEG): container finished" podID="d63a001f-9e68-4aa5-b376-3b602ce11306" containerID="f5b5140697a9176a9492b0ba7d37f9a6763522f323fc00bb34d4f8d0e8e5fd36" exitCode=0 Jan 20 18:37:12 crc kubenswrapper[4558]: I0120 18:37:12.389387 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone607b-account-delete-wfvtg" event={"ID":"d63a001f-9e68-4aa5-b376-3b602ce11306","Type":"ContainerDied","Data":"f5b5140697a9176a9492b0ba7d37f9a6763522f323fc00bb34d4f8d0e8e5fd36"} Jan 20 18:37:12 crc kubenswrapper[4558]: I0120 18:37:12.390552 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone607b-account-delete-wfvtg" event={"ID":"d63a001f-9e68-4aa5-b376-3b602ce11306","Type":"ContainerStarted","Data":"77c20c6a102da913d0103541f37abc730083e7b214877c54e73ade752ee38aaf"} Jan 20 18:37:12 crc kubenswrapper[4558]: I0120 18:37:12.574542 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="68318110-42ee-48a8-ad03-e2f75f291a09" path="/var/lib/kubelet/pods/68318110-42ee-48a8-ad03-e2f75f291a09/volumes" Jan 20 18:37:12 crc kubenswrapper[4558]: I0120 18:37:12.575058 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="79a95494-e161-4141-8121-92209c479a12" path="/var/lib/kubelet/pods/79a95494-e161-4141-8121-92209c479a12/volumes" Jan 20 18:37:13 crc kubenswrapper[4558]: I0120 18:37:13.651583 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone607b-account-delete-wfvtg" Jan 20 18:37:13 crc kubenswrapper[4558]: I0120 18:37:13.681516 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6l9n2\" (UniqueName: \"kubernetes.io/projected/d63a001f-9e68-4aa5-b376-3b602ce11306-kube-api-access-6l9n2\") pod \"d63a001f-9e68-4aa5-b376-3b602ce11306\" (UID: \"d63a001f-9e68-4aa5-b376-3b602ce11306\") " Jan 20 18:37:13 crc kubenswrapper[4558]: I0120 18:37:13.687082 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d63a001f-9e68-4aa5-b376-3b602ce11306-kube-api-access-6l9n2" (OuterVolumeSpecName: "kube-api-access-6l9n2") pod "d63a001f-9e68-4aa5-b376-3b602ce11306" (UID: "d63a001f-9e68-4aa5-b376-3b602ce11306"). InnerVolumeSpecName "kube-api-access-6l9n2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:37:13 crc kubenswrapper[4558]: I0120 18:37:13.783094 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d63a001f-9e68-4aa5-b376-3b602ce11306-operator-scripts\") pod \"d63a001f-9e68-4aa5-b376-3b602ce11306\" (UID: \"d63a001f-9e68-4aa5-b376-3b602ce11306\") " Jan 20 18:37:13 crc kubenswrapper[4558]: I0120 18:37:13.783992 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d63a001f-9e68-4aa5-b376-3b602ce11306-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "d63a001f-9e68-4aa5-b376-3b602ce11306" (UID: "d63a001f-9e68-4aa5-b376-3b602ce11306"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:37:13 crc kubenswrapper[4558]: I0120 18:37:13.784229 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6l9n2\" (UniqueName: \"kubernetes.io/projected/d63a001f-9e68-4aa5-b376-3b602ce11306-kube-api-access-6l9n2\") on node \"crc\" DevicePath \"\"" Jan 20 18:37:13 crc kubenswrapper[4558]: I0120 18:37:13.886892 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d63a001f-9e68-4aa5-b376-3b602ce11306-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 18:37:14 crc kubenswrapper[4558]: I0120 18:37:14.407971 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone607b-account-delete-wfvtg" event={"ID":"d63a001f-9e68-4aa5-b376-3b602ce11306","Type":"ContainerDied","Data":"77c20c6a102da913d0103541f37abc730083e7b214877c54e73ade752ee38aaf"} Jan 20 18:37:14 crc kubenswrapper[4558]: I0120 18:37:14.408022 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="77c20c6a102da913d0103541f37abc730083e7b214877c54e73ade752ee38aaf" Jan 20 18:37:14 crc kubenswrapper[4558]: I0120 18:37:14.408064 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone607b-account-delete-wfvtg" Jan 20 18:37:15 crc kubenswrapper[4558]: I0120 18:37:15.753564 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["keystone-kuttl-tests/keystone-db-create-bfkdh"] Jan 20 18:37:15 crc kubenswrapper[4558]: I0120 18:37:15.761995 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["keystone-kuttl-tests/keystone-db-create-bfkdh"] Jan 20 18:37:15 crc kubenswrapper[4558]: I0120 18:37:15.767764 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["keystone-kuttl-tests/keystone-607b-account-create-update-b2gfm"] Jan 20 18:37:15 crc kubenswrapper[4558]: I0120 18:37:15.771910 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["keystone-kuttl-tests/keystone607b-account-delete-wfvtg"] Jan 20 18:37:15 crc kubenswrapper[4558]: I0120 18:37:15.778096 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["keystone-kuttl-tests/keystone-607b-account-create-update-b2gfm"] Jan 20 18:37:15 crc kubenswrapper[4558]: I0120 18:37:15.785394 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["keystone-kuttl-tests/keystone607b-account-delete-wfvtg"] Jan 20 18:37:15 crc kubenswrapper[4558]: I0120 18:37:15.841622 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["keystone-kuttl-tests/keystone-db-create-mbtxd"] Jan 20 18:37:15 crc kubenswrapper[4558]: E0120 18:37:15.841906 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d63a001f-9e68-4aa5-b376-3b602ce11306" containerName="mariadb-account-delete" Jan 20 18:37:15 crc kubenswrapper[4558]: I0120 18:37:15.841922 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d63a001f-9e68-4aa5-b376-3b602ce11306" containerName="mariadb-account-delete" Jan 20 18:37:15 crc kubenswrapper[4558]: I0120 18:37:15.842100 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="d63a001f-9e68-4aa5-b376-3b602ce11306" containerName="mariadb-account-delete" Jan 20 18:37:15 crc kubenswrapper[4558]: I0120 18:37:15.842638 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-db-create-mbtxd" Jan 20 18:37:15 crc kubenswrapper[4558]: I0120 18:37:15.847986 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone-db-create-mbtxd"] Jan 20 18:37:15 crc kubenswrapper[4558]: I0120 18:37:15.921562 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6xnlt\" (UniqueName: \"kubernetes.io/projected/a933e619-6193-4e4e-966d-1d51435ddcc2-kube-api-access-6xnlt\") pod \"keystone-db-create-mbtxd\" (UID: \"a933e619-6193-4e4e-966d-1d51435ddcc2\") " pod="keystone-kuttl-tests/keystone-db-create-mbtxd" Jan 20 18:37:15 crc kubenswrapper[4558]: I0120 18:37:15.921852 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a933e619-6193-4e4e-966d-1d51435ddcc2-operator-scripts\") pod \"keystone-db-create-mbtxd\" (UID: \"a933e619-6193-4e4e-966d-1d51435ddcc2\") " pod="keystone-kuttl-tests/keystone-db-create-mbtxd" Jan 20 18:37:15 crc kubenswrapper[4558]: I0120 18:37:15.950501 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["keystone-kuttl-tests/keystone-66d9-account-create-update-tp4cf"] Jan 20 18:37:15 crc kubenswrapper[4558]: I0120 18:37:15.951865 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-66d9-account-create-update-tp4cf" Jan 20 18:37:15 crc kubenswrapper[4558]: I0120 18:37:15.954363 4558 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone-db-secret" Jan 20 18:37:15 crc kubenswrapper[4558]: I0120 18:37:15.956107 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone-66d9-account-create-update-tp4cf"] Jan 20 18:37:16 crc kubenswrapper[4558]: I0120 18:37:16.023871 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/36aa4806-4150-414e-9348-da78e951a8c5-operator-scripts\") pod \"keystone-66d9-account-create-update-tp4cf\" (UID: \"36aa4806-4150-414e-9348-da78e951a8c5\") " pod="keystone-kuttl-tests/keystone-66d9-account-create-update-tp4cf" Jan 20 18:37:16 crc kubenswrapper[4558]: I0120 18:37:16.023942 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rqdlt\" (UniqueName: \"kubernetes.io/projected/36aa4806-4150-414e-9348-da78e951a8c5-kube-api-access-rqdlt\") pod \"keystone-66d9-account-create-update-tp4cf\" (UID: \"36aa4806-4150-414e-9348-da78e951a8c5\") " pod="keystone-kuttl-tests/keystone-66d9-account-create-update-tp4cf" Jan 20 18:37:16 crc kubenswrapper[4558]: I0120 18:37:16.024084 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a933e619-6193-4e4e-966d-1d51435ddcc2-operator-scripts\") pod \"keystone-db-create-mbtxd\" (UID: \"a933e619-6193-4e4e-966d-1d51435ddcc2\") " pod="keystone-kuttl-tests/keystone-db-create-mbtxd" Jan 20 18:37:16 crc kubenswrapper[4558]: I0120 18:37:16.024224 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6xnlt\" (UniqueName: \"kubernetes.io/projected/a933e619-6193-4e4e-966d-1d51435ddcc2-kube-api-access-6xnlt\") pod \"keystone-db-create-mbtxd\" (UID: \"a933e619-6193-4e4e-966d-1d51435ddcc2\") " pod="keystone-kuttl-tests/keystone-db-create-mbtxd" Jan 20 18:37:16 crc kubenswrapper[4558]: I0120 18:37:16.025357 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a933e619-6193-4e4e-966d-1d51435ddcc2-operator-scripts\") pod \"keystone-db-create-mbtxd\" (UID: \"a933e619-6193-4e4e-966d-1d51435ddcc2\") " pod="keystone-kuttl-tests/keystone-db-create-mbtxd" Jan 20 18:37:16 crc kubenswrapper[4558]: I0120 18:37:16.041841 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6xnlt\" (UniqueName: \"kubernetes.io/projected/a933e619-6193-4e4e-966d-1d51435ddcc2-kube-api-access-6xnlt\") pod \"keystone-db-create-mbtxd\" (UID: \"a933e619-6193-4e4e-966d-1d51435ddcc2\") " pod="keystone-kuttl-tests/keystone-db-create-mbtxd" Jan 20 18:37:16 crc kubenswrapper[4558]: I0120 18:37:16.126683 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/36aa4806-4150-414e-9348-da78e951a8c5-operator-scripts\") pod \"keystone-66d9-account-create-update-tp4cf\" (UID: \"36aa4806-4150-414e-9348-da78e951a8c5\") " pod="keystone-kuttl-tests/keystone-66d9-account-create-update-tp4cf" Jan 20 18:37:16 crc kubenswrapper[4558]: I0120 18:37:16.126753 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rqdlt\" (UniqueName: \"kubernetes.io/projected/36aa4806-4150-414e-9348-da78e951a8c5-kube-api-access-rqdlt\") pod \"keystone-66d9-account-create-update-tp4cf\" (UID: \"36aa4806-4150-414e-9348-da78e951a8c5\") " pod="keystone-kuttl-tests/keystone-66d9-account-create-update-tp4cf" Jan 20 18:37:16 crc kubenswrapper[4558]: I0120 18:37:16.127537 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/36aa4806-4150-414e-9348-da78e951a8c5-operator-scripts\") pod \"keystone-66d9-account-create-update-tp4cf\" (UID: \"36aa4806-4150-414e-9348-da78e951a8c5\") " pod="keystone-kuttl-tests/keystone-66d9-account-create-update-tp4cf" Jan 20 18:37:16 crc kubenswrapper[4558]: I0120 18:37:16.142473 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rqdlt\" (UniqueName: \"kubernetes.io/projected/36aa4806-4150-414e-9348-da78e951a8c5-kube-api-access-rqdlt\") pod \"keystone-66d9-account-create-update-tp4cf\" (UID: \"36aa4806-4150-414e-9348-da78e951a8c5\") " pod="keystone-kuttl-tests/keystone-66d9-account-create-update-tp4cf" Jan 20 18:37:16 crc kubenswrapper[4558]: I0120 18:37:16.156744 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-db-create-mbtxd" Jan 20 18:37:16 crc kubenswrapper[4558]: I0120 18:37:16.267601 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-66d9-account-create-update-tp4cf" Jan 20 18:37:16 crc kubenswrapper[4558]: I0120 18:37:16.576213 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="07c70dfc-1b41-44ce-8bb1-015cc6dbedae" path="/var/lib/kubelet/pods/07c70dfc-1b41-44ce-8bb1-015cc6dbedae/volumes" Jan 20 18:37:16 crc kubenswrapper[4558]: I0120 18:37:16.577061 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="98d248b3-2b4b-4707-9cf6-ee31015ed622" path="/var/lib/kubelet/pods/98d248b3-2b4b-4707-9cf6-ee31015ed622/volumes" Jan 20 18:37:16 crc kubenswrapper[4558]: I0120 18:37:16.577519 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d63a001f-9e68-4aa5-b376-3b602ce11306" path="/var/lib/kubelet/pods/d63a001f-9e68-4aa5-b376-3b602ce11306/volumes" Jan 20 18:37:16 crc kubenswrapper[4558]: I0120 18:37:16.586570 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone-db-create-mbtxd"] Jan 20 18:37:16 crc kubenswrapper[4558]: I0120 18:37:16.635239 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone-66d9-account-create-update-tp4cf"] Jan 20 18:37:16 crc kubenswrapper[4558]: W0120 18:37:16.643153 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod36aa4806_4150_414e_9348_da78e951a8c5.slice/crio-1896fc123e4b81e46829a9b7bc31f434f0b32854e26aea39c9038fb04a99cbae WatchSource:0}: Error finding container 1896fc123e4b81e46829a9b7bc31f434f0b32854e26aea39c9038fb04a99cbae: Status 404 returned error can't find the container with id 1896fc123e4b81e46829a9b7bc31f434f0b32854e26aea39c9038fb04a99cbae Jan 20 18:37:17 crc kubenswrapper[4558]: I0120 18:37:17.435908 4558 generic.go:334] "Generic (PLEG): container finished" podID="36aa4806-4150-414e-9348-da78e951a8c5" containerID="f93f4a4a8ecbebc7a1ebd135ed58a49348e98e9e2c4ced98dee0206760d3814a" exitCode=0 Jan 20 18:37:17 crc kubenswrapper[4558]: I0120 18:37:17.436017 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-66d9-account-create-update-tp4cf" event={"ID":"36aa4806-4150-414e-9348-da78e951a8c5","Type":"ContainerDied","Data":"f93f4a4a8ecbebc7a1ebd135ed58a49348e98e9e2c4ced98dee0206760d3814a"} Jan 20 18:37:17 crc kubenswrapper[4558]: I0120 18:37:17.436465 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-66d9-account-create-update-tp4cf" event={"ID":"36aa4806-4150-414e-9348-da78e951a8c5","Type":"ContainerStarted","Data":"1896fc123e4b81e46829a9b7bc31f434f0b32854e26aea39c9038fb04a99cbae"} Jan 20 18:37:17 crc kubenswrapper[4558]: I0120 18:37:17.438471 4558 generic.go:334] "Generic (PLEG): container finished" podID="a933e619-6193-4e4e-966d-1d51435ddcc2" containerID="f5faa572a4cebb633298a178a235ff4593fc7536a08431df274c15d41c57e10e" exitCode=0 Jan 20 18:37:17 crc kubenswrapper[4558]: I0120 18:37:17.438529 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-db-create-mbtxd" event={"ID":"a933e619-6193-4e4e-966d-1d51435ddcc2","Type":"ContainerDied","Data":"f5faa572a4cebb633298a178a235ff4593fc7536a08431df274c15d41c57e10e"} Jan 20 18:37:17 crc kubenswrapper[4558]: I0120 18:37:17.438558 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-db-create-mbtxd" event={"ID":"a933e619-6193-4e4e-966d-1d51435ddcc2","Type":"ContainerStarted","Data":"25676b8b931ae593e6f9659dc21b3d626fd9480e6837658117ed22ad9d07e6eb"} Jan 20 18:37:18 crc kubenswrapper[4558]: I0120 18:37:18.775265 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-db-create-mbtxd" Jan 20 18:37:18 crc kubenswrapper[4558]: I0120 18:37:18.779331 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-66d9-account-create-update-tp4cf" Jan 20 18:37:18 crc kubenswrapper[4558]: I0120 18:37:18.873549 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6xnlt\" (UniqueName: \"kubernetes.io/projected/a933e619-6193-4e4e-966d-1d51435ddcc2-kube-api-access-6xnlt\") pod \"a933e619-6193-4e4e-966d-1d51435ddcc2\" (UID: \"a933e619-6193-4e4e-966d-1d51435ddcc2\") " Jan 20 18:37:18 crc kubenswrapper[4558]: I0120 18:37:18.873723 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a933e619-6193-4e4e-966d-1d51435ddcc2-operator-scripts\") pod \"a933e619-6193-4e4e-966d-1d51435ddcc2\" (UID: \"a933e619-6193-4e4e-966d-1d51435ddcc2\") " Jan 20 18:37:18 crc kubenswrapper[4558]: I0120 18:37:18.873895 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rqdlt\" (UniqueName: \"kubernetes.io/projected/36aa4806-4150-414e-9348-da78e951a8c5-kube-api-access-rqdlt\") pod \"36aa4806-4150-414e-9348-da78e951a8c5\" (UID: \"36aa4806-4150-414e-9348-da78e951a8c5\") " Jan 20 18:37:18 crc kubenswrapper[4558]: I0120 18:37:18.873951 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/36aa4806-4150-414e-9348-da78e951a8c5-operator-scripts\") pod \"36aa4806-4150-414e-9348-da78e951a8c5\" (UID: \"36aa4806-4150-414e-9348-da78e951a8c5\") " Jan 20 18:37:18 crc kubenswrapper[4558]: I0120 18:37:18.874650 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a933e619-6193-4e4e-966d-1d51435ddcc2-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "a933e619-6193-4e4e-966d-1d51435ddcc2" (UID: "a933e619-6193-4e4e-966d-1d51435ddcc2"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:37:18 crc kubenswrapper[4558]: I0120 18:37:18.874772 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/36aa4806-4150-414e-9348-da78e951a8c5-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "36aa4806-4150-414e-9348-da78e951a8c5" (UID: "36aa4806-4150-414e-9348-da78e951a8c5"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:37:18 crc kubenswrapper[4558]: I0120 18:37:18.880399 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/36aa4806-4150-414e-9348-da78e951a8c5-kube-api-access-rqdlt" (OuterVolumeSpecName: "kube-api-access-rqdlt") pod "36aa4806-4150-414e-9348-da78e951a8c5" (UID: "36aa4806-4150-414e-9348-da78e951a8c5"). InnerVolumeSpecName "kube-api-access-rqdlt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:37:18 crc kubenswrapper[4558]: I0120 18:37:18.881906 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a933e619-6193-4e4e-966d-1d51435ddcc2-kube-api-access-6xnlt" (OuterVolumeSpecName: "kube-api-access-6xnlt") pod "a933e619-6193-4e4e-966d-1d51435ddcc2" (UID: "a933e619-6193-4e4e-966d-1d51435ddcc2"). InnerVolumeSpecName "kube-api-access-6xnlt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:37:18 crc kubenswrapper[4558]: I0120 18:37:18.975113 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a933e619-6193-4e4e-966d-1d51435ddcc2-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 18:37:18 crc kubenswrapper[4558]: I0120 18:37:18.975149 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rqdlt\" (UniqueName: \"kubernetes.io/projected/36aa4806-4150-414e-9348-da78e951a8c5-kube-api-access-rqdlt\") on node \"crc\" DevicePath \"\"" Jan 20 18:37:18 crc kubenswrapper[4558]: I0120 18:37:18.975175 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/36aa4806-4150-414e-9348-da78e951a8c5-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 18:37:18 crc kubenswrapper[4558]: I0120 18:37:18.975186 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6xnlt\" (UniqueName: \"kubernetes.io/projected/a933e619-6193-4e4e-966d-1d51435ddcc2-kube-api-access-6xnlt\") on node \"crc\" DevicePath \"\"" Jan 20 18:37:19 crc kubenswrapper[4558]: I0120 18:37:19.453374 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-66d9-account-create-update-tp4cf" event={"ID":"36aa4806-4150-414e-9348-da78e951a8c5","Type":"ContainerDied","Data":"1896fc123e4b81e46829a9b7bc31f434f0b32854e26aea39c9038fb04a99cbae"} Jan 20 18:37:19 crc kubenswrapper[4558]: I0120 18:37:19.453788 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1896fc123e4b81e46829a9b7bc31f434f0b32854e26aea39c9038fb04a99cbae" Jan 20 18:37:19 crc kubenswrapper[4558]: I0120 18:37:19.453416 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-66d9-account-create-update-tp4cf" Jan 20 18:37:19 crc kubenswrapper[4558]: I0120 18:37:19.455733 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-db-create-mbtxd" event={"ID":"a933e619-6193-4e4e-966d-1d51435ddcc2","Type":"ContainerDied","Data":"25676b8b931ae593e6f9659dc21b3d626fd9480e6837658117ed22ad9d07e6eb"} Jan 20 18:37:19 crc kubenswrapper[4558]: I0120 18:37:19.455763 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="25676b8b931ae593e6f9659dc21b3d626fd9480e6837658117ed22ad9d07e6eb" Jan 20 18:37:19 crc kubenswrapper[4558]: I0120 18:37:19.455779 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-db-create-mbtxd" Jan 20 18:37:21 crc kubenswrapper[4558]: I0120 18:37:21.489816 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["keystone-kuttl-tests/keystone-db-sync-glpng"] Jan 20 18:37:21 crc kubenswrapper[4558]: E0120 18:37:21.490131 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="36aa4806-4150-414e-9348-da78e951a8c5" containerName="mariadb-account-create-update" Jan 20 18:37:21 crc kubenswrapper[4558]: I0120 18:37:21.490145 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="36aa4806-4150-414e-9348-da78e951a8c5" containerName="mariadb-account-create-update" Jan 20 18:37:21 crc kubenswrapper[4558]: E0120 18:37:21.490157 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a933e619-6193-4e4e-966d-1d51435ddcc2" containerName="mariadb-database-create" Jan 20 18:37:21 crc kubenswrapper[4558]: I0120 18:37:21.490181 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="a933e619-6193-4e4e-966d-1d51435ddcc2" containerName="mariadb-database-create" Jan 20 18:37:21 crc kubenswrapper[4558]: I0120 18:37:21.490314 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="36aa4806-4150-414e-9348-da78e951a8c5" containerName="mariadb-account-create-update" Jan 20 18:37:21 crc kubenswrapper[4558]: I0120 18:37:21.490329 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="a933e619-6193-4e4e-966d-1d51435ddcc2" containerName="mariadb-database-create" Jan 20 18:37:21 crc kubenswrapper[4558]: I0120 18:37:21.490779 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-db-sync-glpng" Jan 20 18:37:21 crc kubenswrapper[4558]: I0120 18:37:21.492220 4558 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone" Jan 20 18:37:21 crc kubenswrapper[4558]: I0120 18:37:21.492403 4558 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone-config-data" Jan 20 18:37:21 crc kubenswrapper[4558]: I0120 18:37:21.492500 4558 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone-scripts" Jan 20 18:37:21 crc kubenswrapper[4558]: I0120 18:37:21.492758 4558 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"combined-ca-bundle" Jan 20 18:37:21 crc kubenswrapper[4558]: I0120 18:37:21.492912 4558 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone-keystone-dockercfg-jgbhp" Jan 20 18:37:21 crc kubenswrapper[4558]: I0120 18:37:21.498130 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone-db-sync-glpng"] Jan 20 18:37:21 crc kubenswrapper[4558]: I0120 18:37:21.510861 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/291ca66d-5015-4468-8500-4200eba611b1-combined-ca-bundle\") pod \"keystone-db-sync-glpng\" (UID: \"291ca66d-5015-4468-8500-4200eba611b1\") " pod="keystone-kuttl-tests/keystone-db-sync-glpng" Jan 20 18:37:21 crc kubenswrapper[4558]: I0120 18:37:21.510920 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mnsqk\" (UniqueName: \"kubernetes.io/projected/291ca66d-5015-4468-8500-4200eba611b1-kube-api-access-mnsqk\") pod \"keystone-db-sync-glpng\" (UID: \"291ca66d-5015-4468-8500-4200eba611b1\") " pod="keystone-kuttl-tests/keystone-db-sync-glpng" Jan 20 18:37:21 crc kubenswrapper[4558]: I0120 18:37:21.511059 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/291ca66d-5015-4468-8500-4200eba611b1-config-data\") pod \"keystone-db-sync-glpng\" (UID: \"291ca66d-5015-4468-8500-4200eba611b1\") " pod="keystone-kuttl-tests/keystone-db-sync-glpng" Jan 20 18:37:21 crc kubenswrapper[4558]: I0120 18:37:21.612942 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mnsqk\" (UniqueName: \"kubernetes.io/projected/291ca66d-5015-4468-8500-4200eba611b1-kube-api-access-mnsqk\") pod \"keystone-db-sync-glpng\" (UID: \"291ca66d-5015-4468-8500-4200eba611b1\") " pod="keystone-kuttl-tests/keystone-db-sync-glpng" Jan 20 18:37:21 crc kubenswrapper[4558]: I0120 18:37:21.613931 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/291ca66d-5015-4468-8500-4200eba611b1-config-data\") pod \"keystone-db-sync-glpng\" (UID: \"291ca66d-5015-4468-8500-4200eba611b1\") " pod="keystone-kuttl-tests/keystone-db-sync-glpng" Jan 20 18:37:21 crc kubenswrapper[4558]: I0120 18:37:21.614627 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/291ca66d-5015-4468-8500-4200eba611b1-combined-ca-bundle\") pod \"keystone-db-sync-glpng\" (UID: \"291ca66d-5015-4468-8500-4200eba611b1\") " pod="keystone-kuttl-tests/keystone-db-sync-glpng" Jan 20 18:37:21 crc kubenswrapper[4558]: I0120 18:37:21.619962 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/291ca66d-5015-4468-8500-4200eba611b1-combined-ca-bundle\") pod \"keystone-db-sync-glpng\" (UID: \"291ca66d-5015-4468-8500-4200eba611b1\") " pod="keystone-kuttl-tests/keystone-db-sync-glpng" Jan 20 18:37:21 crc kubenswrapper[4558]: I0120 18:37:21.620365 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/291ca66d-5015-4468-8500-4200eba611b1-config-data\") pod \"keystone-db-sync-glpng\" (UID: \"291ca66d-5015-4468-8500-4200eba611b1\") " pod="keystone-kuttl-tests/keystone-db-sync-glpng" Jan 20 18:37:21 crc kubenswrapper[4558]: I0120 18:37:21.628527 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mnsqk\" (UniqueName: \"kubernetes.io/projected/291ca66d-5015-4468-8500-4200eba611b1-kube-api-access-mnsqk\") pod \"keystone-db-sync-glpng\" (UID: \"291ca66d-5015-4468-8500-4200eba611b1\") " pod="keystone-kuttl-tests/keystone-db-sync-glpng" Jan 20 18:37:21 crc kubenswrapper[4558]: I0120 18:37:21.808607 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-db-sync-glpng" Jan 20 18:37:22 crc kubenswrapper[4558]: I0120 18:37:22.191286 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone-db-sync-glpng"] Jan 20 18:37:22 crc kubenswrapper[4558]: I0120 18:37:22.476340 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-db-sync-glpng" event={"ID":"291ca66d-5015-4468-8500-4200eba611b1","Type":"ContainerStarted","Data":"6b475d151a2bacbf44781d76b94d59aa41fea570c61419ae8e87b3b9378ab84e"} Jan 20 18:37:22 crc kubenswrapper[4558]: I0120 18:37:22.476390 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-db-sync-glpng" event={"ID":"291ca66d-5015-4468-8500-4200eba611b1","Type":"ContainerStarted","Data":"777c3eb736bfc34eb6c1d6cf37f607c3f6c4a6726b77d27ba9277358603ad2ff"} Jan 20 18:37:22 crc kubenswrapper[4558]: I0120 18:37:22.496003 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="keystone-kuttl-tests/keystone-db-sync-glpng" podStartSLOduration=1.495991627 podStartE2EDuration="1.495991627s" podCreationTimestamp="2026-01-20 18:37:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 18:37:22.490400244 +0000 UTC m=+6936.250738211" watchObservedRunningTime="2026-01-20 18:37:22.495991627 +0000 UTC m=+6936.256329594" Jan 20 18:37:24 crc kubenswrapper[4558]: I0120 18:37:24.495376 4558 generic.go:334] "Generic (PLEG): container finished" podID="291ca66d-5015-4468-8500-4200eba611b1" containerID="6b475d151a2bacbf44781d76b94d59aa41fea570c61419ae8e87b3b9378ab84e" exitCode=0 Jan 20 18:37:24 crc kubenswrapper[4558]: I0120 18:37:24.495461 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-db-sync-glpng" event={"ID":"291ca66d-5015-4468-8500-4200eba611b1","Type":"ContainerDied","Data":"6b475d151a2bacbf44781d76b94d59aa41fea570c61419ae8e87b3b9378ab84e"} Jan 20 18:37:25 crc kubenswrapper[4558]: I0120 18:37:25.730483 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-db-sync-glpng" Jan 20 18:37:25 crc kubenswrapper[4558]: I0120 18:37:25.766056 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnsqk\" (UniqueName: \"kubernetes.io/projected/291ca66d-5015-4468-8500-4200eba611b1-kube-api-access-mnsqk\") pod \"291ca66d-5015-4468-8500-4200eba611b1\" (UID: \"291ca66d-5015-4468-8500-4200eba611b1\") " Jan 20 18:37:25 crc kubenswrapper[4558]: I0120 18:37:25.766277 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/291ca66d-5015-4468-8500-4200eba611b1-combined-ca-bundle\") pod \"291ca66d-5015-4468-8500-4200eba611b1\" (UID: \"291ca66d-5015-4468-8500-4200eba611b1\") " Jan 20 18:37:25 crc kubenswrapper[4558]: I0120 18:37:25.766356 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/291ca66d-5015-4468-8500-4200eba611b1-config-data\") pod \"291ca66d-5015-4468-8500-4200eba611b1\" (UID: \"291ca66d-5015-4468-8500-4200eba611b1\") " Jan 20 18:37:25 crc kubenswrapper[4558]: I0120 18:37:25.772225 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/291ca66d-5015-4468-8500-4200eba611b1-kube-api-access-mnsqk" (OuterVolumeSpecName: "kube-api-access-mnsqk") pod "291ca66d-5015-4468-8500-4200eba611b1" (UID: "291ca66d-5015-4468-8500-4200eba611b1"). InnerVolumeSpecName "kube-api-access-mnsqk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:37:25 crc kubenswrapper[4558]: I0120 18:37:25.784402 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/291ca66d-5015-4468-8500-4200eba611b1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "291ca66d-5015-4468-8500-4200eba611b1" (UID: "291ca66d-5015-4468-8500-4200eba611b1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:37:25 crc kubenswrapper[4558]: I0120 18:37:25.795328 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/291ca66d-5015-4468-8500-4200eba611b1-config-data" (OuterVolumeSpecName: "config-data") pod "291ca66d-5015-4468-8500-4200eba611b1" (UID: "291ca66d-5015-4468-8500-4200eba611b1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:37:25 crc kubenswrapper[4558]: I0120 18:37:25.869089 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnsqk\" (UniqueName: \"kubernetes.io/projected/291ca66d-5015-4468-8500-4200eba611b1-kube-api-access-mnsqk\") on node \"crc\" DevicePath \"\"" Jan 20 18:37:25 crc kubenswrapper[4558]: I0120 18:37:25.869138 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/291ca66d-5015-4468-8500-4200eba611b1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 18:37:25 crc kubenswrapper[4558]: I0120 18:37:25.869153 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/291ca66d-5015-4468-8500-4200eba611b1-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 18:37:26 crc kubenswrapper[4558]: I0120 18:37:26.515358 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-db-sync-glpng" event={"ID":"291ca66d-5015-4468-8500-4200eba611b1","Type":"ContainerDied","Data":"777c3eb736bfc34eb6c1d6cf37f607c3f6c4a6726b77d27ba9277358603ad2ff"} Jan 20 18:37:26 crc kubenswrapper[4558]: I0120 18:37:26.515423 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-db-sync-glpng" Jan 20 18:37:26 crc kubenswrapper[4558]: I0120 18:37:26.515443 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="777c3eb736bfc34eb6c1d6cf37f607c3f6c4a6726b77d27ba9277358603ad2ff" Jan 20 18:37:26 crc kubenswrapper[4558]: I0120 18:37:26.641153 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["keystone-kuttl-tests/keystone-bootstrap-c5wbh"] Jan 20 18:37:26 crc kubenswrapper[4558]: E0120 18:37:26.641805 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="291ca66d-5015-4468-8500-4200eba611b1" containerName="keystone-db-sync" Jan 20 18:37:26 crc kubenswrapper[4558]: I0120 18:37:26.641826 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="291ca66d-5015-4468-8500-4200eba611b1" containerName="keystone-db-sync" Jan 20 18:37:26 crc kubenswrapper[4558]: I0120 18:37:26.642031 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="291ca66d-5015-4468-8500-4200eba611b1" containerName="keystone-db-sync" Jan 20 18:37:26 crc kubenswrapper[4558]: I0120 18:37:26.642616 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-bootstrap-c5wbh" Jan 20 18:37:26 crc kubenswrapper[4558]: I0120 18:37:26.647798 4558 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone" Jan 20 18:37:26 crc kubenswrapper[4558]: I0120 18:37:26.647822 4558 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"combined-ca-bundle" Jan 20 18:37:26 crc kubenswrapper[4558]: I0120 18:37:26.647945 4558 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"osp-secret" Jan 20 18:37:26 crc kubenswrapper[4558]: I0120 18:37:26.647948 4558 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone-scripts" Jan 20 18:37:26 crc kubenswrapper[4558]: I0120 18:37:26.648257 4558 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone-config-data" Jan 20 18:37:26 crc kubenswrapper[4558]: I0120 18:37:26.648267 4558 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone-keystone-dockercfg-jgbhp" Jan 20 18:37:26 crc kubenswrapper[4558]: I0120 18:37:26.649449 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone-bootstrap-c5wbh"] Jan 20 18:37:26 crc kubenswrapper[4558]: I0120 18:37:26.679743 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j5h4m\" (UniqueName: \"kubernetes.io/projected/e0964c7e-1163-4945-b518-da920e3c261b-kube-api-access-j5h4m\") pod \"keystone-bootstrap-c5wbh\" (UID: \"e0964c7e-1163-4945-b518-da920e3c261b\") " pod="keystone-kuttl-tests/keystone-bootstrap-c5wbh" Jan 20 18:37:26 crc kubenswrapper[4558]: I0120 18:37:26.679896 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/e0964c7e-1163-4945-b518-da920e3c261b-fernet-keys\") pod \"keystone-bootstrap-c5wbh\" (UID: \"e0964c7e-1163-4945-b518-da920e3c261b\") " pod="keystone-kuttl-tests/keystone-bootstrap-c5wbh" Jan 20 18:37:26 crc kubenswrapper[4558]: I0120 18:37:26.680005 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e0964c7e-1163-4945-b518-da920e3c261b-scripts\") pod \"keystone-bootstrap-c5wbh\" (UID: \"e0964c7e-1163-4945-b518-da920e3c261b\") " pod="keystone-kuttl-tests/keystone-bootstrap-c5wbh" Jan 20 18:37:26 crc kubenswrapper[4558]: I0120 18:37:26.680101 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e0964c7e-1163-4945-b518-da920e3c261b-combined-ca-bundle\") pod \"keystone-bootstrap-c5wbh\" (UID: \"e0964c7e-1163-4945-b518-da920e3c261b\") " pod="keystone-kuttl-tests/keystone-bootstrap-c5wbh" Jan 20 18:37:26 crc kubenswrapper[4558]: I0120 18:37:26.680241 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e0964c7e-1163-4945-b518-da920e3c261b-config-data\") pod \"keystone-bootstrap-c5wbh\" (UID: \"e0964c7e-1163-4945-b518-da920e3c261b\") " pod="keystone-kuttl-tests/keystone-bootstrap-c5wbh" Jan 20 18:37:26 crc kubenswrapper[4558]: I0120 18:37:26.680430 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/e0964c7e-1163-4945-b518-da920e3c261b-credential-keys\") pod \"keystone-bootstrap-c5wbh\" (UID: \"e0964c7e-1163-4945-b518-da920e3c261b\") " pod="keystone-kuttl-tests/keystone-bootstrap-c5wbh" Jan 20 18:37:26 crc kubenswrapper[4558]: I0120 18:37:26.781638 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/e0964c7e-1163-4945-b518-da920e3c261b-credential-keys\") pod \"keystone-bootstrap-c5wbh\" (UID: \"e0964c7e-1163-4945-b518-da920e3c261b\") " pod="keystone-kuttl-tests/keystone-bootstrap-c5wbh" Jan 20 18:37:26 crc kubenswrapper[4558]: I0120 18:37:26.781701 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j5h4m\" (UniqueName: \"kubernetes.io/projected/e0964c7e-1163-4945-b518-da920e3c261b-kube-api-access-j5h4m\") pod \"keystone-bootstrap-c5wbh\" (UID: \"e0964c7e-1163-4945-b518-da920e3c261b\") " pod="keystone-kuttl-tests/keystone-bootstrap-c5wbh" Jan 20 18:37:26 crc kubenswrapper[4558]: I0120 18:37:26.781754 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/e0964c7e-1163-4945-b518-da920e3c261b-fernet-keys\") pod \"keystone-bootstrap-c5wbh\" (UID: \"e0964c7e-1163-4945-b518-da920e3c261b\") " pod="keystone-kuttl-tests/keystone-bootstrap-c5wbh" Jan 20 18:37:26 crc kubenswrapper[4558]: I0120 18:37:26.781790 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e0964c7e-1163-4945-b518-da920e3c261b-scripts\") pod \"keystone-bootstrap-c5wbh\" (UID: \"e0964c7e-1163-4945-b518-da920e3c261b\") " pod="keystone-kuttl-tests/keystone-bootstrap-c5wbh" Jan 20 18:37:26 crc kubenswrapper[4558]: I0120 18:37:26.781821 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e0964c7e-1163-4945-b518-da920e3c261b-combined-ca-bundle\") pod \"keystone-bootstrap-c5wbh\" (UID: \"e0964c7e-1163-4945-b518-da920e3c261b\") " pod="keystone-kuttl-tests/keystone-bootstrap-c5wbh" Jan 20 18:37:26 crc kubenswrapper[4558]: I0120 18:37:26.781857 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e0964c7e-1163-4945-b518-da920e3c261b-config-data\") pod \"keystone-bootstrap-c5wbh\" (UID: \"e0964c7e-1163-4945-b518-da920e3c261b\") " pod="keystone-kuttl-tests/keystone-bootstrap-c5wbh" Jan 20 18:37:26 crc kubenswrapper[4558]: I0120 18:37:26.787022 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e0964c7e-1163-4945-b518-da920e3c261b-combined-ca-bundle\") pod \"keystone-bootstrap-c5wbh\" (UID: \"e0964c7e-1163-4945-b518-da920e3c261b\") " pod="keystone-kuttl-tests/keystone-bootstrap-c5wbh" Jan 20 18:37:26 crc kubenswrapper[4558]: I0120 18:37:26.787127 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e0964c7e-1163-4945-b518-da920e3c261b-config-data\") pod \"keystone-bootstrap-c5wbh\" (UID: \"e0964c7e-1163-4945-b518-da920e3c261b\") " pod="keystone-kuttl-tests/keystone-bootstrap-c5wbh" Jan 20 18:37:26 crc kubenswrapper[4558]: I0120 18:37:26.787222 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e0964c7e-1163-4945-b518-da920e3c261b-scripts\") pod \"keystone-bootstrap-c5wbh\" (UID: \"e0964c7e-1163-4945-b518-da920e3c261b\") " pod="keystone-kuttl-tests/keystone-bootstrap-c5wbh" Jan 20 18:37:26 crc kubenswrapper[4558]: I0120 18:37:26.787563 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/e0964c7e-1163-4945-b518-da920e3c261b-fernet-keys\") pod \"keystone-bootstrap-c5wbh\" (UID: \"e0964c7e-1163-4945-b518-da920e3c261b\") " pod="keystone-kuttl-tests/keystone-bootstrap-c5wbh" Jan 20 18:37:26 crc kubenswrapper[4558]: I0120 18:37:26.788356 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/e0964c7e-1163-4945-b518-da920e3c261b-credential-keys\") pod \"keystone-bootstrap-c5wbh\" (UID: \"e0964c7e-1163-4945-b518-da920e3c261b\") " pod="keystone-kuttl-tests/keystone-bootstrap-c5wbh" Jan 20 18:37:26 crc kubenswrapper[4558]: I0120 18:37:26.797378 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j5h4m\" (UniqueName: \"kubernetes.io/projected/e0964c7e-1163-4945-b518-da920e3c261b-kube-api-access-j5h4m\") pod \"keystone-bootstrap-c5wbh\" (UID: \"e0964c7e-1163-4945-b518-da920e3c261b\") " pod="keystone-kuttl-tests/keystone-bootstrap-c5wbh" Jan 20 18:37:26 crc kubenswrapper[4558]: I0120 18:37:26.956373 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-bootstrap-c5wbh" Jan 20 18:37:27 crc kubenswrapper[4558]: I0120 18:37:27.330070 4558 patch_prober.go:28] interesting pod/machine-config-daemon-2vr4r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 20 18:37:27 crc kubenswrapper[4558]: I0120 18:37:27.330421 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 20 18:37:27 crc kubenswrapper[4558]: I0120 18:37:27.350393 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone-bootstrap-c5wbh"] Jan 20 18:37:27 crc kubenswrapper[4558]: I0120 18:37:27.526296 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-bootstrap-c5wbh" event={"ID":"e0964c7e-1163-4945-b518-da920e3c261b","Type":"ContainerStarted","Data":"4a9ef972cfa2208336857bf3a949814659a8cacf751d30fe2b6e80dd43173198"} Jan 20 18:37:27 crc kubenswrapper[4558]: I0120 18:37:27.526361 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-bootstrap-c5wbh" event={"ID":"e0964c7e-1163-4945-b518-da920e3c261b","Type":"ContainerStarted","Data":"1acd0904513fa3bb95c14231ce3a9e37c930381e51890626e11ac7816c1a0c17"} Jan 20 18:37:27 crc kubenswrapper[4558]: I0120 18:37:27.545397 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="keystone-kuttl-tests/keystone-bootstrap-c5wbh" podStartSLOduration=1.545365417 podStartE2EDuration="1.545365417s" podCreationTimestamp="2026-01-20 18:37:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 18:37:27.540467809 +0000 UTC m=+6941.300805776" watchObservedRunningTime="2026-01-20 18:37:27.545365417 +0000 UTC m=+6941.305703384" Jan 20 18:37:30 crc kubenswrapper[4558]: I0120 18:37:30.552116 4558 generic.go:334] "Generic (PLEG): container finished" podID="e0964c7e-1163-4945-b518-da920e3c261b" containerID="4a9ef972cfa2208336857bf3a949814659a8cacf751d30fe2b6e80dd43173198" exitCode=0 Jan 20 18:37:30 crc kubenswrapper[4558]: I0120 18:37:30.552184 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-bootstrap-c5wbh" event={"ID":"e0964c7e-1163-4945-b518-da920e3c261b","Type":"ContainerDied","Data":"4a9ef972cfa2208336857bf3a949814659a8cacf751d30fe2b6e80dd43173198"} Jan 20 18:37:31 crc kubenswrapper[4558]: I0120 18:37:31.810939 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-bootstrap-c5wbh" Jan 20 18:37:31 crc kubenswrapper[4558]: I0120 18:37:31.956006 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j5h4m\" (UniqueName: \"kubernetes.io/projected/e0964c7e-1163-4945-b518-da920e3c261b-kube-api-access-j5h4m\") pod \"e0964c7e-1163-4945-b518-da920e3c261b\" (UID: \"e0964c7e-1163-4945-b518-da920e3c261b\") " Jan 20 18:37:31 crc kubenswrapper[4558]: I0120 18:37:31.956114 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/e0964c7e-1163-4945-b518-da920e3c261b-fernet-keys\") pod \"e0964c7e-1163-4945-b518-da920e3c261b\" (UID: \"e0964c7e-1163-4945-b518-da920e3c261b\") " Jan 20 18:37:31 crc kubenswrapper[4558]: I0120 18:37:31.956315 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/e0964c7e-1163-4945-b518-da920e3c261b-credential-keys\") pod \"e0964c7e-1163-4945-b518-da920e3c261b\" (UID: \"e0964c7e-1163-4945-b518-da920e3c261b\") " Jan 20 18:37:31 crc kubenswrapper[4558]: I0120 18:37:31.956375 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e0964c7e-1163-4945-b518-da920e3c261b-scripts\") pod \"e0964c7e-1163-4945-b518-da920e3c261b\" (UID: \"e0964c7e-1163-4945-b518-da920e3c261b\") " Jan 20 18:37:31 crc kubenswrapper[4558]: I0120 18:37:31.956431 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e0964c7e-1163-4945-b518-da920e3c261b-config-data\") pod \"e0964c7e-1163-4945-b518-da920e3c261b\" (UID: \"e0964c7e-1163-4945-b518-da920e3c261b\") " Jan 20 18:37:31 crc kubenswrapper[4558]: I0120 18:37:31.956476 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e0964c7e-1163-4945-b518-da920e3c261b-combined-ca-bundle\") pod \"e0964c7e-1163-4945-b518-da920e3c261b\" (UID: \"e0964c7e-1163-4945-b518-da920e3c261b\") " Jan 20 18:37:31 crc kubenswrapper[4558]: I0120 18:37:31.963968 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e0964c7e-1163-4945-b518-da920e3c261b-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "e0964c7e-1163-4945-b518-da920e3c261b" (UID: "e0964c7e-1163-4945-b518-da920e3c261b"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:37:31 crc kubenswrapper[4558]: I0120 18:37:31.964517 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e0964c7e-1163-4945-b518-da920e3c261b-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "e0964c7e-1163-4945-b518-da920e3c261b" (UID: "e0964c7e-1163-4945-b518-da920e3c261b"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:37:31 crc kubenswrapper[4558]: I0120 18:37:31.965322 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e0964c7e-1163-4945-b518-da920e3c261b-kube-api-access-j5h4m" (OuterVolumeSpecName: "kube-api-access-j5h4m") pod "e0964c7e-1163-4945-b518-da920e3c261b" (UID: "e0964c7e-1163-4945-b518-da920e3c261b"). InnerVolumeSpecName "kube-api-access-j5h4m". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:37:31 crc kubenswrapper[4558]: I0120 18:37:31.969076 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e0964c7e-1163-4945-b518-da920e3c261b-scripts" (OuterVolumeSpecName: "scripts") pod "e0964c7e-1163-4945-b518-da920e3c261b" (UID: "e0964c7e-1163-4945-b518-da920e3c261b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:37:31 crc kubenswrapper[4558]: E0120 18:37:31.977434 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e0964c7e-1163-4945-b518-da920e3c261b-config-data podName:e0964c7e-1163-4945-b518-da920e3c261b nodeName:}" failed. No retries permitted until 2026-01-20 18:37:32.477409831 +0000 UTC m=+6946.237747798 (durationBeforeRetry 500ms). Error: error cleaning subPath mounts for volume "config-data" (UniqueName: "kubernetes.io/secret/e0964c7e-1163-4945-b518-da920e3c261b-config-data") pod "e0964c7e-1163-4945-b518-da920e3c261b" (UID: "e0964c7e-1163-4945-b518-da920e3c261b") : error deleting /var/lib/kubelet/pods/e0964c7e-1163-4945-b518-da920e3c261b/volume-subpaths: remove /var/lib/kubelet/pods/e0964c7e-1163-4945-b518-da920e3c261b/volume-subpaths: no such file or directory Jan 20 18:37:31 crc kubenswrapper[4558]: I0120 18:37:31.980312 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e0964c7e-1163-4945-b518-da920e3c261b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e0964c7e-1163-4945-b518-da920e3c261b" (UID: "e0964c7e-1163-4945-b518-da920e3c261b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:37:32 crc kubenswrapper[4558]: I0120 18:37:32.058608 4558 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/e0964c7e-1163-4945-b518-da920e3c261b-credential-keys\") on node \"crc\" DevicePath \"\"" Jan 20 18:37:32 crc kubenswrapper[4558]: I0120 18:37:32.058643 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e0964c7e-1163-4945-b518-da920e3c261b-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 18:37:32 crc kubenswrapper[4558]: I0120 18:37:32.058661 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e0964c7e-1163-4945-b518-da920e3c261b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 18:37:32 crc kubenswrapper[4558]: I0120 18:37:32.058681 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j5h4m\" (UniqueName: \"kubernetes.io/projected/e0964c7e-1163-4945-b518-da920e3c261b-kube-api-access-j5h4m\") on node \"crc\" DevicePath \"\"" Jan 20 18:37:32 crc kubenswrapper[4558]: I0120 18:37:32.058698 4558 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/e0964c7e-1163-4945-b518-da920e3c261b-fernet-keys\") on node \"crc\" DevicePath \"\"" Jan 20 18:37:32 crc kubenswrapper[4558]: I0120 18:37:32.568103 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-bootstrap-c5wbh" Jan 20 18:37:32 crc kubenswrapper[4558]: I0120 18:37:32.568697 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e0964c7e-1163-4945-b518-da920e3c261b-config-data\") pod \"e0964c7e-1163-4945-b518-da920e3c261b\" (UID: \"e0964c7e-1163-4945-b518-da920e3c261b\") " Jan 20 18:37:32 crc kubenswrapper[4558]: I0120 18:37:32.573086 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e0964c7e-1163-4945-b518-da920e3c261b-config-data" (OuterVolumeSpecName: "config-data") pod "e0964c7e-1163-4945-b518-da920e3c261b" (UID: "e0964c7e-1163-4945-b518-da920e3c261b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:37:32 crc kubenswrapper[4558]: I0120 18:37:32.591629 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-bootstrap-c5wbh" event={"ID":"e0964c7e-1163-4945-b518-da920e3c261b","Type":"ContainerDied","Data":"1acd0904513fa3bb95c14231ce3a9e37c930381e51890626e11ac7816c1a0c17"} Jan 20 18:37:32 crc kubenswrapper[4558]: I0120 18:37:32.591680 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1acd0904513fa3bb95c14231ce3a9e37c930381e51890626e11ac7816c1a0c17" Jan 20 18:37:32 crc kubenswrapper[4558]: I0120 18:37:32.634139 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["keystone-kuttl-tests/keystone-5b89fc9467-l9h86"] Jan 20 18:37:32 crc kubenswrapper[4558]: E0120 18:37:32.634469 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e0964c7e-1163-4945-b518-da920e3c261b" containerName="keystone-bootstrap" Jan 20 18:37:32 crc kubenswrapper[4558]: I0120 18:37:32.634489 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="e0964c7e-1163-4945-b518-da920e3c261b" containerName="keystone-bootstrap" Jan 20 18:37:32 crc kubenswrapper[4558]: I0120 18:37:32.634597 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="e0964c7e-1163-4945-b518-da920e3c261b" containerName="keystone-bootstrap" Jan 20 18:37:32 crc kubenswrapper[4558]: I0120 18:37:32.635081 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-5b89fc9467-l9h86" Jan 20 18:37:32 crc kubenswrapper[4558]: I0120 18:37:32.637878 4558 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"cert-keystone-public-svc" Jan 20 18:37:32 crc kubenswrapper[4558]: I0120 18:37:32.644591 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone-5b89fc9467-l9h86"] Jan 20 18:37:32 crc kubenswrapper[4558]: I0120 18:37:32.646226 4558 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"cert-keystone-internal-svc" Jan 20 18:37:32 crc kubenswrapper[4558]: I0120 18:37:32.670303 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc638cdc-a544-4dd2-b7d0-dfebe96125cc-combined-ca-bundle\") pod \"keystone-5b89fc9467-l9h86\" (UID: \"dc638cdc-a544-4dd2-b7d0-dfebe96125cc\") " pod="keystone-kuttl-tests/keystone-5b89fc9467-l9h86" Jan 20 18:37:32 crc kubenswrapper[4558]: I0120 18:37:32.670340 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/dc638cdc-a544-4dd2-b7d0-dfebe96125cc-public-tls-certs\") pod \"keystone-5b89fc9467-l9h86\" (UID: \"dc638cdc-a544-4dd2-b7d0-dfebe96125cc\") " pod="keystone-kuttl-tests/keystone-5b89fc9467-l9h86" Jan 20 18:37:32 crc kubenswrapper[4558]: I0120 18:37:32.670366 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/dc638cdc-a544-4dd2-b7d0-dfebe96125cc-credential-keys\") pod \"keystone-5b89fc9467-l9h86\" (UID: \"dc638cdc-a544-4dd2-b7d0-dfebe96125cc\") " pod="keystone-kuttl-tests/keystone-5b89fc9467-l9h86" Jan 20 18:37:32 crc kubenswrapper[4558]: I0120 18:37:32.670386 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q8t8f\" (UniqueName: \"kubernetes.io/projected/dc638cdc-a544-4dd2-b7d0-dfebe96125cc-kube-api-access-q8t8f\") pod \"keystone-5b89fc9467-l9h86\" (UID: \"dc638cdc-a544-4dd2-b7d0-dfebe96125cc\") " pod="keystone-kuttl-tests/keystone-5b89fc9467-l9h86" Jan 20 18:37:32 crc kubenswrapper[4558]: I0120 18:37:32.670429 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dc638cdc-a544-4dd2-b7d0-dfebe96125cc-scripts\") pod \"keystone-5b89fc9467-l9h86\" (UID: \"dc638cdc-a544-4dd2-b7d0-dfebe96125cc\") " pod="keystone-kuttl-tests/keystone-5b89fc9467-l9h86" Jan 20 18:37:32 crc kubenswrapper[4558]: I0120 18:37:32.670543 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/dc638cdc-a544-4dd2-b7d0-dfebe96125cc-fernet-keys\") pod \"keystone-5b89fc9467-l9h86\" (UID: \"dc638cdc-a544-4dd2-b7d0-dfebe96125cc\") " pod="keystone-kuttl-tests/keystone-5b89fc9467-l9h86" Jan 20 18:37:32 crc kubenswrapper[4558]: I0120 18:37:32.670592 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dc638cdc-a544-4dd2-b7d0-dfebe96125cc-config-data\") pod \"keystone-5b89fc9467-l9h86\" (UID: \"dc638cdc-a544-4dd2-b7d0-dfebe96125cc\") " pod="keystone-kuttl-tests/keystone-5b89fc9467-l9h86" Jan 20 18:37:32 crc kubenswrapper[4558]: I0120 18:37:32.670616 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/dc638cdc-a544-4dd2-b7d0-dfebe96125cc-internal-tls-certs\") pod \"keystone-5b89fc9467-l9h86\" (UID: \"dc638cdc-a544-4dd2-b7d0-dfebe96125cc\") " pod="keystone-kuttl-tests/keystone-5b89fc9467-l9h86" Jan 20 18:37:32 crc kubenswrapper[4558]: I0120 18:37:32.670654 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e0964c7e-1163-4945-b518-da920e3c261b-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 18:37:32 crc kubenswrapper[4558]: I0120 18:37:32.772307 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/dc638cdc-a544-4dd2-b7d0-dfebe96125cc-fernet-keys\") pod \"keystone-5b89fc9467-l9h86\" (UID: \"dc638cdc-a544-4dd2-b7d0-dfebe96125cc\") " pod="keystone-kuttl-tests/keystone-5b89fc9467-l9h86" Jan 20 18:37:32 crc kubenswrapper[4558]: I0120 18:37:32.772393 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dc638cdc-a544-4dd2-b7d0-dfebe96125cc-config-data\") pod \"keystone-5b89fc9467-l9h86\" (UID: \"dc638cdc-a544-4dd2-b7d0-dfebe96125cc\") " pod="keystone-kuttl-tests/keystone-5b89fc9467-l9h86" Jan 20 18:37:32 crc kubenswrapper[4558]: I0120 18:37:32.772424 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/dc638cdc-a544-4dd2-b7d0-dfebe96125cc-internal-tls-certs\") pod \"keystone-5b89fc9467-l9h86\" (UID: \"dc638cdc-a544-4dd2-b7d0-dfebe96125cc\") " pod="keystone-kuttl-tests/keystone-5b89fc9467-l9h86" Jan 20 18:37:32 crc kubenswrapper[4558]: I0120 18:37:32.772442 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc638cdc-a544-4dd2-b7d0-dfebe96125cc-combined-ca-bundle\") pod \"keystone-5b89fc9467-l9h86\" (UID: \"dc638cdc-a544-4dd2-b7d0-dfebe96125cc\") " pod="keystone-kuttl-tests/keystone-5b89fc9467-l9h86" Jan 20 18:37:32 crc kubenswrapper[4558]: I0120 18:37:32.772462 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/dc638cdc-a544-4dd2-b7d0-dfebe96125cc-public-tls-certs\") pod \"keystone-5b89fc9467-l9h86\" (UID: \"dc638cdc-a544-4dd2-b7d0-dfebe96125cc\") " pod="keystone-kuttl-tests/keystone-5b89fc9467-l9h86" Jan 20 18:37:32 crc kubenswrapper[4558]: I0120 18:37:32.772485 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/dc638cdc-a544-4dd2-b7d0-dfebe96125cc-credential-keys\") pod \"keystone-5b89fc9467-l9h86\" (UID: \"dc638cdc-a544-4dd2-b7d0-dfebe96125cc\") " pod="keystone-kuttl-tests/keystone-5b89fc9467-l9h86" Jan 20 18:37:32 crc kubenswrapper[4558]: I0120 18:37:32.772513 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q8t8f\" (UniqueName: \"kubernetes.io/projected/dc638cdc-a544-4dd2-b7d0-dfebe96125cc-kube-api-access-q8t8f\") pod \"keystone-5b89fc9467-l9h86\" (UID: \"dc638cdc-a544-4dd2-b7d0-dfebe96125cc\") " pod="keystone-kuttl-tests/keystone-5b89fc9467-l9h86" Jan 20 18:37:32 crc kubenswrapper[4558]: I0120 18:37:32.772567 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dc638cdc-a544-4dd2-b7d0-dfebe96125cc-scripts\") pod \"keystone-5b89fc9467-l9h86\" (UID: \"dc638cdc-a544-4dd2-b7d0-dfebe96125cc\") " pod="keystone-kuttl-tests/keystone-5b89fc9467-l9h86" Jan 20 18:37:32 crc kubenswrapper[4558]: I0120 18:37:32.778635 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc638cdc-a544-4dd2-b7d0-dfebe96125cc-combined-ca-bundle\") pod \"keystone-5b89fc9467-l9h86\" (UID: \"dc638cdc-a544-4dd2-b7d0-dfebe96125cc\") " pod="keystone-kuttl-tests/keystone-5b89fc9467-l9h86" Jan 20 18:37:32 crc kubenswrapper[4558]: I0120 18:37:32.778644 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/dc638cdc-a544-4dd2-b7d0-dfebe96125cc-credential-keys\") pod \"keystone-5b89fc9467-l9h86\" (UID: \"dc638cdc-a544-4dd2-b7d0-dfebe96125cc\") " pod="keystone-kuttl-tests/keystone-5b89fc9467-l9h86" Jan 20 18:37:32 crc kubenswrapper[4558]: I0120 18:37:32.778683 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/dc638cdc-a544-4dd2-b7d0-dfebe96125cc-public-tls-certs\") pod \"keystone-5b89fc9467-l9h86\" (UID: \"dc638cdc-a544-4dd2-b7d0-dfebe96125cc\") " pod="keystone-kuttl-tests/keystone-5b89fc9467-l9h86" Jan 20 18:37:32 crc kubenswrapper[4558]: I0120 18:37:32.778820 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dc638cdc-a544-4dd2-b7d0-dfebe96125cc-scripts\") pod \"keystone-5b89fc9467-l9h86\" (UID: \"dc638cdc-a544-4dd2-b7d0-dfebe96125cc\") " pod="keystone-kuttl-tests/keystone-5b89fc9467-l9h86" Jan 20 18:37:32 crc kubenswrapper[4558]: I0120 18:37:32.779529 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/dc638cdc-a544-4dd2-b7d0-dfebe96125cc-internal-tls-certs\") pod \"keystone-5b89fc9467-l9h86\" (UID: \"dc638cdc-a544-4dd2-b7d0-dfebe96125cc\") " pod="keystone-kuttl-tests/keystone-5b89fc9467-l9h86" Jan 20 18:37:32 crc kubenswrapper[4558]: I0120 18:37:32.779531 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/dc638cdc-a544-4dd2-b7d0-dfebe96125cc-fernet-keys\") pod \"keystone-5b89fc9467-l9h86\" (UID: \"dc638cdc-a544-4dd2-b7d0-dfebe96125cc\") " pod="keystone-kuttl-tests/keystone-5b89fc9467-l9h86" Jan 20 18:37:32 crc kubenswrapper[4558]: I0120 18:37:32.779992 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dc638cdc-a544-4dd2-b7d0-dfebe96125cc-config-data\") pod \"keystone-5b89fc9467-l9h86\" (UID: \"dc638cdc-a544-4dd2-b7d0-dfebe96125cc\") " pod="keystone-kuttl-tests/keystone-5b89fc9467-l9h86" Jan 20 18:37:32 crc kubenswrapper[4558]: I0120 18:37:32.786874 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q8t8f\" (UniqueName: \"kubernetes.io/projected/dc638cdc-a544-4dd2-b7d0-dfebe96125cc-kube-api-access-q8t8f\") pod \"keystone-5b89fc9467-l9h86\" (UID: \"dc638cdc-a544-4dd2-b7d0-dfebe96125cc\") " pod="keystone-kuttl-tests/keystone-5b89fc9467-l9h86" Jan 20 18:37:32 crc kubenswrapper[4558]: I0120 18:37:32.949489 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-5b89fc9467-l9h86" Jan 20 18:37:33 crc kubenswrapper[4558]: I0120 18:37:33.368520 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone-5b89fc9467-l9h86"] Jan 20 18:37:33 crc kubenswrapper[4558]: I0120 18:37:33.575950 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-5b89fc9467-l9h86" event={"ID":"dc638cdc-a544-4dd2-b7d0-dfebe96125cc","Type":"ContainerStarted","Data":"81628dd3740401490a8ba03aed6f712d717b6d0ca934f70ba7b6c2f25fcb6b1e"} Jan 20 18:37:33 crc kubenswrapper[4558]: I0120 18:37:33.575991 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-5b89fc9467-l9h86" event={"ID":"dc638cdc-a544-4dd2-b7d0-dfebe96125cc","Type":"ContainerStarted","Data":"54450e8f51615043670d93072ff8fb6b2f140aaef7fb5707876e532c919eb7ed"} Jan 20 18:37:33 crc kubenswrapper[4558]: I0120 18:37:33.576091 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="keystone-kuttl-tests/keystone-5b89fc9467-l9h86" Jan 20 18:37:33 crc kubenswrapper[4558]: I0120 18:37:33.610716 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="keystone-kuttl-tests/keystone-5b89fc9467-l9h86" podStartSLOduration=1.6106738489999999 podStartE2EDuration="1.610673849s" podCreationTimestamp="2026-01-20 18:37:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 18:37:33.59627738 +0000 UTC m=+6947.356615347" watchObservedRunningTime="2026-01-20 18:37:33.610673849 +0000 UTC m=+6947.371011816" Jan 20 18:37:57 crc kubenswrapper[4558]: I0120 18:37:57.330093 4558 patch_prober.go:28] interesting pod/machine-config-daemon-2vr4r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 20 18:37:57 crc kubenswrapper[4558]: I0120 18:37:57.330922 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 20 18:37:57 crc kubenswrapper[4558]: I0120 18:37:57.330976 4558 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" Jan 20 18:37:57 crc kubenswrapper[4558]: I0120 18:37:57.331802 4558 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"77ce04c6daee7d4f9b776bd1c80b448f9c4750d5cb5c7dd182ff28963a4bcf99"} pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 20 18:37:57 crc kubenswrapper[4558]: I0120 18:37:57.331893 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" containerID="cri-o://77ce04c6daee7d4f9b776bd1c80b448f9c4750d5cb5c7dd182ff28963a4bcf99" gracePeriod=600 Jan 20 18:37:57 crc kubenswrapper[4558]: E0120 18:37:57.451890 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 18:37:57 crc kubenswrapper[4558]: I0120 18:37:57.754783 4558 generic.go:334] "Generic (PLEG): container finished" podID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerID="77ce04c6daee7d4f9b776bd1c80b448f9c4750d5cb5c7dd182ff28963a4bcf99" exitCode=0 Jan 20 18:37:57 crc kubenswrapper[4558]: I0120 18:37:57.754860 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" event={"ID":"68337d27-3fa6-4a29-88b0-82e60c3739eb","Type":"ContainerDied","Data":"77ce04c6daee7d4f9b776bd1c80b448f9c4750d5cb5c7dd182ff28963a4bcf99"} Jan 20 18:37:57 crc kubenswrapper[4558]: I0120 18:37:57.754976 4558 scope.go:117] "RemoveContainer" containerID="5858873a8a511c0bf9fb428796875a79a6b6fce6a3367ceb7e77c36d25d6f8a7" Jan 20 18:37:57 crc kubenswrapper[4558]: I0120 18:37:57.755608 4558 scope.go:117] "RemoveContainer" containerID="77ce04c6daee7d4f9b776bd1c80b448f9c4750d5cb5c7dd182ff28963a4bcf99" Jan 20 18:37:57 crc kubenswrapper[4558]: E0120 18:37:57.755935 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 18:38:04 crc kubenswrapper[4558]: I0120 18:38:04.261675 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="keystone-kuttl-tests/keystone-5b89fc9467-l9h86" Jan 20 18:38:04 crc kubenswrapper[4558]: I0120 18:38:04.765181 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["keystone-kuttl-tests/keystone-bootstrap-c5wbh"] Jan 20 18:38:04 crc kubenswrapper[4558]: I0120 18:38:04.769204 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["keystone-kuttl-tests/keystone-db-sync-glpng"] Jan 20 18:38:04 crc kubenswrapper[4558]: I0120 18:38:04.772776 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["keystone-kuttl-tests/keystone-5b89fc9467-l9h86"] Jan 20 18:38:04 crc kubenswrapper[4558]: I0120 18:38:04.776771 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["keystone-kuttl-tests/keystone-db-sync-glpng"] Jan 20 18:38:04 crc kubenswrapper[4558]: I0120 18:38:04.812219 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["keystone-kuttl-tests/keystone-bootstrap-c5wbh"] Jan 20 18:38:04 crc kubenswrapper[4558]: I0120 18:38:04.812328 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="keystone-kuttl-tests/keystone-5b89fc9467-l9h86" podUID="dc638cdc-a544-4dd2-b7d0-dfebe96125cc" containerName="keystone-api" containerID="cri-o://81628dd3740401490a8ba03aed6f712d717b6d0ca934f70ba7b6c2f25fcb6b1e" gracePeriod=30 Jan 20 18:38:04 crc kubenswrapper[4558]: I0120 18:38:04.853246 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["keystone-kuttl-tests/keystone66d9-account-delete-7ffb2"] Jan 20 18:38:04 crc kubenswrapper[4558]: I0120 18:38:04.854467 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone66d9-account-delete-7ffb2" Jan 20 18:38:04 crc kubenswrapper[4558]: I0120 18:38:04.862544 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone66d9-account-delete-7ffb2"] Jan 20 18:38:04 crc kubenswrapper[4558]: I0120 18:38:04.908235 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9jht6\" (UniqueName: \"kubernetes.io/projected/e707bfce-e265-4454-940b-e792ca8d2bf3-kube-api-access-9jht6\") pod \"keystone66d9-account-delete-7ffb2\" (UID: \"e707bfce-e265-4454-940b-e792ca8d2bf3\") " pod="keystone-kuttl-tests/keystone66d9-account-delete-7ffb2" Jan 20 18:38:04 crc kubenswrapper[4558]: I0120 18:38:04.908382 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e707bfce-e265-4454-940b-e792ca8d2bf3-operator-scripts\") pod \"keystone66d9-account-delete-7ffb2\" (UID: \"e707bfce-e265-4454-940b-e792ca8d2bf3\") " pod="keystone-kuttl-tests/keystone66d9-account-delete-7ffb2" Jan 20 18:38:05 crc kubenswrapper[4558]: I0120 18:38:05.009525 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9jht6\" (UniqueName: \"kubernetes.io/projected/e707bfce-e265-4454-940b-e792ca8d2bf3-kube-api-access-9jht6\") pod \"keystone66d9-account-delete-7ffb2\" (UID: \"e707bfce-e265-4454-940b-e792ca8d2bf3\") " pod="keystone-kuttl-tests/keystone66d9-account-delete-7ffb2" Jan 20 18:38:05 crc kubenswrapper[4558]: I0120 18:38:05.009600 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e707bfce-e265-4454-940b-e792ca8d2bf3-operator-scripts\") pod \"keystone66d9-account-delete-7ffb2\" (UID: \"e707bfce-e265-4454-940b-e792ca8d2bf3\") " pod="keystone-kuttl-tests/keystone66d9-account-delete-7ffb2" Jan 20 18:38:05 crc kubenswrapper[4558]: I0120 18:38:05.010743 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e707bfce-e265-4454-940b-e792ca8d2bf3-operator-scripts\") pod \"keystone66d9-account-delete-7ffb2\" (UID: \"e707bfce-e265-4454-940b-e792ca8d2bf3\") " pod="keystone-kuttl-tests/keystone66d9-account-delete-7ffb2" Jan 20 18:38:05 crc kubenswrapper[4558]: I0120 18:38:05.025623 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9jht6\" (UniqueName: \"kubernetes.io/projected/e707bfce-e265-4454-940b-e792ca8d2bf3-kube-api-access-9jht6\") pod \"keystone66d9-account-delete-7ffb2\" (UID: \"e707bfce-e265-4454-940b-e792ca8d2bf3\") " pod="keystone-kuttl-tests/keystone66d9-account-delete-7ffb2" Jan 20 18:38:05 crc kubenswrapper[4558]: I0120 18:38:05.174950 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone66d9-account-delete-7ffb2" Jan 20 18:38:05 crc kubenswrapper[4558]: I0120 18:38:05.571782 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone66d9-account-delete-7ffb2"] Jan 20 18:38:05 crc kubenswrapper[4558]: I0120 18:38:05.821736 4558 generic.go:334] "Generic (PLEG): container finished" podID="e707bfce-e265-4454-940b-e792ca8d2bf3" containerID="cad33bf620f35d37cf9a366f1fb9f1d9d5156fcc69a7f378e57e7860accf8797" exitCode=0 Jan 20 18:38:05 crc kubenswrapper[4558]: I0120 18:38:05.821799 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone66d9-account-delete-7ffb2" event={"ID":"e707bfce-e265-4454-940b-e792ca8d2bf3","Type":"ContainerDied","Data":"cad33bf620f35d37cf9a366f1fb9f1d9d5156fcc69a7f378e57e7860accf8797"} Jan 20 18:38:05 crc kubenswrapper[4558]: I0120 18:38:05.822104 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone66d9-account-delete-7ffb2" event={"ID":"e707bfce-e265-4454-940b-e792ca8d2bf3","Type":"ContainerStarted","Data":"946932a30a61cec6d863fa8f60a147163d77e6a51b56418a84ff01233e581fdc"} Jan 20 18:38:06 crc kubenswrapper[4558]: I0120 18:38:06.577823 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="291ca66d-5015-4468-8500-4200eba611b1" path="/var/lib/kubelet/pods/291ca66d-5015-4468-8500-4200eba611b1/volumes" Jan 20 18:38:06 crc kubenswrapper[4558]: I0120 18:38:06.578477 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e0964c7e-1163-4945-b518-da920e3c261b" path="/var/lib/kubelet/pods/e0964c7e-1163-4945-b518-da920e3c261b/volumes" Jan 20 18:38:07 crc kubenswrapper[4558]: I0120 18:38:07.053485 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone66d9-account-delete-7ffb2" Jan 20 18:38:07 crc kubenswrapper[4558]: I0120 18:38:07.241297 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9jht6\" (UniqueName: \"kubernetes.io/projected/e707bfce-e265-4454-940b-e792ca8d2bf3-kube-api-access-9jht6\") pod \"e707bfce-e265-4454-940b-e792ca8d2bf3\" (UID: \"e707bfce-e265-4454-940b-e792ca8d2bf3\") " Jan 20 18:38:07 crc kubenswrapper[4558]: I0120 18:38:07.241421 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e707bfce-e265-4454-940b-e792ca8d2bf3-operator-scripts\") pod \"e707bfce-e265-4454-940b-e792ca8d2bf3\" (UID: \"e707bfce-e265-4454-940b-e792ca8d2bf3\") " Jan 20 18:38:07 crc kubenswrapper[4558]: I0120 18:38:07.241964 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e707bfce-e265-4454-940b-e792ca8d2bf3-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "e707bfce-e265-4454-940b-e792ca8d2bf3" (UID: "e707bfce-e265-4454-940b-e792ca8d2bf3"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:38:07 crc kubenswrapper[4558]: I0120 18:38:07.242199 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e707bfce-e265-4454-940b-e792ca8d2bf3-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 18:38:07 crc kubenswrapper[4558]: I0120 18:38:07.247765 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e707bfce-e265-4454-940b-e792ca8d2bf3-kube-api-access-9jht6" (OuterVolumeSpecName: "kube-api-access-9jht6") pod "e707bfce-e265-4454-940b-e792ca8d2bf3" (UID: "e707bfce-e265-4454-940b-e792ca8d2bf3"). InnerVolumeSpecName "kube-api-access-9jht6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:38:07 crc kubenswrapper[4558]: I0120 18:38:07.343993 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9jht6\" (UniqueName: \"kubernetes.io/projected/e707bfce-e265-4454-940b-e792ca8d2bf3-kube-api-access-9jht6\") on node \"crc\" DevicePath \"\"" Jan 20 18:38:07 crc kubenswrapper[4558]: I0120 18:38:07.842023 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone66d9-account-delete-7ffb2" event={"ID":"e707bfce-e265-4454-940b-e792ca8d2bf3","Type":"ContainerDied","Data":"946932a30a61cec6d863fa8f60a147163d77e6a51b56418a84ff01233e581fdc"} Jan 20 18:38:07 crc kubenswrapper[4558]: I0120 18:38:07.842092 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="946932a30a61cec6d863fa8f60a147163d77e6a51b56418a84ff01233e581fdc" Jan 20 18:38:07 crc kubenswrapper[4558]: I0120 18:38:07.842112 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone66d9-account-delete-7ffb2" Jan 20 18:38:08 crc kubenswrapper[4558]: I0120 18:38:08.169157 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-5b89fc9467-l9h86" Jan 20 18:38:08 crc kubenswrapper[4558]: I0120 18:38:08.359101 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/dc638cdc-a544-4dd2-b7d0-dfebe96125cc-internal-tls-certs\") pod \"dc638cdc-a544-4dd2-b7d0-dfebe96125cc\" (UID: \"dc638cdc-a544-4dd2-b7d0-dfebe96125cc\") " Jan 20 18:38:08 crc kubenswrapper[4558]: I0120 18:38:08.359414 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/dc638cdc-a544-4dd2-b7d0-dfebe96125cc-public-tls-certs\") pod \"dc638cdc-a544-4dd2-b7d0-dfebe96125cc\" (UID: \"dc638cdc-a544-4dd2-b7d0-dfebe96125cc\") " Jan 20 18:38:08 crc kubenswrapper[4558]: I0120 18:38:08.359476 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/dc638cdc-a544-4dd2-b7d0-dfebe96125cc-credential-keys\") pod \"dc638cdc-a544-4dd2-b7d0-dfebe96125cc\" (UID: \"dc638cdc-a544-4dd2-b7d0-dfebe96125cc\") " Jan 20 18:38:08 crc kubenswrapper[4558]: I0120 18:38:08.359501 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/dc638cdc-a544-4dd2-b7d0-dfebe96125cc-fernet-keys\") pod \"dc638cdc-a544-4dd2-b7d0-dfebe96125cc\" (UID: \"dc638cdc-a544-4dd2-b7d0-dfebe96125cc\") " Jan 20 18:38:08 crc kubenswrapper[4558]: I0120 18:38:08.359539 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dc638cdc-a544-4dd2-b7d0-dfebe96125cc-scripts\") pod \"dc638cdc-a544-4dd2-b7d0-dfebe96125cc\" (UID: \"dc638cdc-a544-4dd2-b7d0-dfebe96125cc\") " Jan 20 18:38:08 crc kubenswrapper[4558]: I0120 18:38:08.359574 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc638cdc-a544-4dd2-b7d0-dfebe96125cc-combined-ca-bundle\") pod \"dc638cdc-a544-4dd2-b7d0-dfebe96125cc\" (UID: \"dc638cdc-a544-4dd2-b7d0-dfebe96125cc\") " Jan 20 18:38:08 crc kubenswrapper[4558]: I0120 18:38:08.359599 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dc638cdc-a544-4dd2-b7d0-dfebe96125cc-config-data\") pod \"dc638cdc-a544-4dd2-b7d0-dfebe96125cc\" (UID: \"dc638cdc-a544-4dd2-b7d0-dfebe96125cc\") " Jan 20 18:38:08 crc kubenswrapper[4558]: I0120 18:38:08.359621 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q8t8f\" (UniqueName: \"kubernetes.io/projected/dc638cdc-a544-4dd2-b7d0-dfebe96125cc-kube-api-access-q8t8f\") pod \"dc638cdc-a544-4dd2-b7d0-dfebe96125cc\" (UID: \"dc638cdc-a544-4dd2-b7d0-dfebe96125cc\") " Jan 20 18:38:08 crc kubenswrapper[4558]: I0120 18:38:08.364555 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dc638cdc-a544-4dd2-b7d0-dfebe96125cc-kube-api-access-q8t8f" (OuterVolumeSpecName: "kube-api-access-q8t8f") pod "dc638cdc-a544-4dd2-b7d0-dfebe96125cc" (UID: "dc638cdc-a544-4dd2-b7d0-dfebe96125cc"). InnerVolumeSpecName "kube-api-access-q8t8f". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:38:08 crc kubenswrapper[4558]: I0120 18:38:08.364679 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dc638cdc-a544-4dd2-b7d0-dfebe96125cc-scripts" (OuterVolumeSpecName: "scripts") pod "dc638cdc-a544-4dd2-b7d0-dfebe96125cc" (UID: "dc638cdc-a544-4dd2-b7d0-dfebe96125cc"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:38:08 crc kubenswrapper[4558]: I0120 18:38:08.367383 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dc638cdc-a544-4dd2-b7d0-dfebe96125cc-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "dc638cdc-a544-4dd2-b7d0-dfebe96125cc" (UID: "dc638cdc-a544-4dd2-b7d0-dfebe96125cc"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:38:08 crc kubenswrapper[4558]: I0120 18:38:08.368952 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dc638cdc-a544-4dd2-b7d0-dfebe96125cc-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "dc638cdc-a544-4dd2-b7d0-dfebe96125cc" (UID: "dc638cdc-a544-4dd2-b7d0-dfebe96125cc"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:38:08 crc kubenswrapper[4558]: I0120 18:38:08.380903 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dc638cdc-a544-4dd2-b7d0-dfebe96125cc-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "dc638cdc-a544-4dd2-b7d0-dfebe96125cc" (UID: "dc638cdc-a544-4dd2-b7d0-dfebe96125cc"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:38:08 crc kubenswrapper[4558]: I0120 18:38:08.393588 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dc638cdc-a544-4dd2-b7d0-dfebe96125cc-config-data" (OuterVolumeSpecName: "config-data") pod "dc638cdc-a544-4dd2-b7d0-dfebe96125cc" (UID: "dc638cdc-a544-4dd2-b7d0-dfebe96125cc"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:38:08 crc kubenswrapper[4558]: I0120 18:38:08.395319 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dc638cdc-a544-4dd2-b7d0-dfebe96125cc-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "dc638cdc-a544-4dd2-b7d0-dfebe96125cc" (UID: "dc638cdc-a544-4dd2-b7d0-dfebe96125cc"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:38:08 crc kubenswrapper[4558]: I0120 18:38:08.399555 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dc638cdc-a544-4dd2-b7d0-dfebe96125cc-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "dc638cdc-a544-4dd2-b7d0-dfebe96125cc" (UID: "dc638cdc-a544-4dd2-b7d0-dfebe96125cc"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:38:08 crc kubenswrapper[4558]: I0120 18:38:08.461003 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q8t8f\" (UniqueName: \"kubernetes.io/projected/dc638cdc-a544-4dd2-b7d0-dfebe96125cc-kube-api-access-q8t8f\") on node \"crc\" DevicePath \"\"" Jan 20 18:38:08 crc kubenswrapper[4558]: I0120 18:38:08.461041 4558 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/dc638cdc-a544-4dd2-b7d0-dfebe96125cc-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 18:38:08 crc kubenswrapper[4558]: I0120 18:38:08.461061 4558 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/dc638cdc-a544-4dd2-b7d0-dfebe96125cc-public-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 18:38:08 crc kubenswrapper[4558]: I0120 18:38:08.461072 4558 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/dc638cdc-a544-4dd2-b7d0-dfebe96125cc-credential-keys\") on node \"crc\" DevicePath \"\"" Jan 20 18:38:08 crc kubenswrapper[4558]: I0120 18:38:08.461082 4558 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/dc638cdc-a544-4dd2-b7d0-dfebe96125cc-fernet-keys\") on node \"crc\" DevicePath \"\"" Jan 20 18:38:08 crc kubenswrapper[4558]: I0120 18:38:08.461090 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dc638cdc-a544-4dd2-b7d0-dfebe96125cc-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 18:38:08 crc kubenswrapper[4558]: I0120 18:38:08.461099 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dc638cdc-a544-4dd2-b7d0-dfebe96125cc-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 18:38:08 crc kubenswrapper[4558]: I0120 18:38:08.461107 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dc638cdc-a544-4dd2-b7d0-dfebe96125cc-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 18:38:08 crc kubenswrapper[4558]: I0120 18:38:08.850710 4558 generic.go:334] "Generic (PLEG): container finished" podID="dc638cdc-a544-4dd2-b7d0-dfebe96125cc" containerID="81628dd3740401490a8ba03aed6f712d717b6d0ca934f70ba7b6c2f25fcb6b1e" exitCode=0 Jan 20 18:38:08 crc kubenswrapper[4558]: I0120 18:38:08.850772 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-5b89fc9467-l9h86" event={"ID":"dc638cdc-a544-4dd2-b7d0-dfebe96125cc","Type":"ContainerDied","Data":"81628dd3740401490a8ba03aed6f712d717b6d0ca934f70ba7b6c2f25fcb6b1e"} Jan 20 18:38:08 crc kubenswrapper[4558]: I0120 18:38:08.850819 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-5b89fc9467-l9h86" event={"ID":"dc638cdc-a544-4dd2-b7d0-dfebe96125cc","Type":"ContainerDied","Data":"54450e8f51615043670d93072ff8fb6b2f140aaef7fb5707876e532c919eb7ed"} Jan 20 18:38:08 crc kubenswrapper[4558]: I0120 18:38:08.850845 4558 scope.go:117] "RemoveContainer" containerID="81628dd3740401490a8ba03aed6f712d717b6d0ca934f70ba7b6c2f25fcb6b1e" Jan 20 18:38:08 crc kubenswrapper[4558]: I0120 18:38:08.852243 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-5b89fc9467-l9h86" Jan 20 18:38:08 crc kubenswrapper[4558]: I0120 18:38:08.873299 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["keystone-kuttl-tests/keystone-5b89fc9467-l9h86"] Jan 20 18:38:08 crc kubenswrapper[4558]: I0120 18:38:08.875611 4558 scope.go:117] "RemoveContainer" containerID="81628dd3740401490a8ba03aed6f712d717b6d0ca934f70ba7b6c2f25fcb6b1e" Jan 20 18:38:08 crc kubenswrapper[4558]: E0120 18:38:08.876032 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"81628dd3740401490a8ba03aed6f712d717b6d0ca934f70ba7b6c2f25fcb6b1e\": container with ID starting with 81628dd3740401490a8ba03aed6f712d717b6d0ca934f70ba7b6c2f25fcb6b1e not found: ID does not exist" containerID="81628dd3740401490a8ba03aed6f712d717b6d0ca934f70ba7b6c2f25fcb6b1e" Jan 20 18:38:08 crc kubenswrapper[4558]: I0120 18:38:08.876081 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"81628dd3740401490a8ba03aed6f712d717b6d0ca934f70ba7b6c2f25fcb6b1e"} err="failed to get container status \"81628dd3740401490a8ba03aed6f712d717b6d0ca934f70ba7b6c2f25fcb6b1e\": rpc error: code = NotFound desc = could not find container \"81628dd3740401490a8ba03aed6f712d717b6d0ca934f70ba7b6c2f25fcb6b1e\": container with ID starting with 81628dd3740401490a8ba03aed6f712d717b6d0ca934f70ba7b6c2f25fcb6b1e not found: ID does not exist" Jan 20 18:38:08 crc kubenswrapper[4558]: I0120 18:38:08.879738 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["keystone-kuttl-tests/keystone-5b89fc9467-l9h86"] Jan 20 18:38:09 crc kubenswrapper[4558]: I0120 18:38:09.847555 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["keystone-kuttl-tests/keystone-db-create-mbtxd"] Jan 20 18:38:09 crc kubenswrapper[4558]: I0120 18:38:09.855668 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["keystone-kuttl-tests/keystone-db-create-mbtxd"] Jan 20 18:38:09 crc kubenswrapper[4558]: I0120 18:38:09.861734 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["keystone-kuttl-tests/keystone-66d9-account-create-update-tp4cf"] Jan 20 18:38:09 crc kubenswrapper[4558]: I0120 18:38:09.866072 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["keystone-kuttl-tests/keystone66d9-account-delete-7ffb2"] Jan 20 18:38:09 crc kubenswrapper[4558]: I0120 18:38:09.870140 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["keystone-kuttl-tests/keystone-66d9-account-create-update-tp4cf"] Jan 20 18:38:09 crc kubenswrapper[4558]: I0120 18:38:09.874143 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["keystone-kuttl-tests/keystone66d9-account-delete-7ffb2"] Jan 20 18:38:10 crc kubenswrapper[4558]: I0120 18:38:10.045668 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["keystone-kuttl-tests/keystone-db-create-6twzj"] Jan 20 18:38:10 crc kubenswrapper[4558]: E0120 18:38:10.045949 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e707bfce-e265-4454-940b-e792ca8d2bf3" containerName="mariadb-account-delete" Jan 20 18:38:10 crc kubenswrapper[4558]: I0120 18:38:10.045966 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="e707bfce-e265-4454-940b-e792ca8d2bf3" containerName="mariadb-account-delete" Jan 20 18:38:10 crc kubenswrapper[4558]: E0120 18:38:10.045978 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc638cdc-a544-4dd2-b7d0-dfebe96125cc" containerName="keystone-api" Jan 20 18:38:10 crc kubenswrapper[4558]: I0120 18:38:10.045984 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc638cdc-a544-4dd2-b7d0-dfebe96125cc" containerName="keystone-api" Jan 20 18:38:10 crc kubenswrapper[4558]: I0120 18:38:10.046132 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="e707bfce-e265-4454-940b-e792ca8d2bf3" containerName="mariadb-account-delete" Jan 20 18:38:10 crc kubenswrapper[4558]: I0120 18:38:10.046226 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="dc638cdc-a544-4dd2-b7d0-dfebe96125cc" containerName="keystone-api" Jan 20 18:38:10 crc kubenswrapper[4558]: I0120 18:38:10.046701 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-db-create-6twzj" Jan 20 18:38:10 crc kubenswrapper[4558]: I0120 18:38:10.054880 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone-db-create-6twzj"] Jan 20 18:38:10 crc kubenswrapper[4558]: I0120 18:38:10.058980 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["keystone-kuttl-tests/keystone-ea52-account-create-update-7q2rz"] Jan 20 18:38:10 crc kubenswrapper[4558]: I0120 18:38:10.060059 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-ea52-account-create-update-7q2rz" Jan 20 18:38:10 crc kubenswrapper[4558]: I0120 18:38:10.062409 4558 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone-db-secret" Jan 20 18:38:10 crc kubenswrapper[4558]: I0120 18:38:10.075264 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone-ea52-account-create-update-7q2rz"] Jan 20 18:38:10 crc kubenswrapper[4558]: I0120 18:38:10.088265 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lxdv7\" (UniqueName: \"kubernetes.io/projected/6142a71f-3e1f-4769-9197-0edff9d8461c-kube-api-access-lxdv7\") pod \"keystone-ea52-account-create-update-7q2rz\" (UID: \"6142a71f-3e1f-4769-9197-0edff9d8461c\") " pod="keystone-kuttl-tests/keystone-ea52-account-create-update-7q2rz" Jan 20 18:38:10 crc kubenswrapper[4558]: I0120 18:38:10.088325 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6142a71f-3e1f-4769-9197-0edff9d8461c-operator-scripts\") pod \"keystone-ea52-account-create-update-7q2rz\" (UID: \"6142a71f-3e1f-4769-9197-0edff9d8461c\") " pod="keystone-kuttl-tests/keystone-ea52-account-create-update-7q2rz" Jan 20 18:38:10 crc kubenswrapper[4558]: I0120 18:38:10.088384 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r4vcj\" (UniqueName: \"kubernetes.io/projected/625f8646-a3ab-4a12-9c99-9a85be87df11-kube-api-access-r4vcj\") pod \"keystone-db-create-6twzj\" (UID: \"625f8646-a3ab-4a12-9c99-9a85be87df11\") " pod="keystone-kuttl-tests/keystone-db-create-6twzj" Jan 20 18:38:10 crc kubenswrapper[4558]: I0120 18:38:10.088426 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/625f8646-a3ab-4a12-9c99-9a85be87df11-operator-scripts\") pod \"keystone-db-create-6twzj\" (UID: \"625f8646-a3ab-4a12-9c99-9a85be87df11\") " pod="keystone-kuttl-tests/keystone-db-create-6twzj" Jan 20 18:38:10 crc kubenswrapper[4558]: I0120 18:38:10.189724 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lxdv7\" (UniqueName: \"kubernetes.io/projected/6142a71f-3e1f-4769-9197-0edff9d8461c-kube-api-access-lxdv7\") pod \"keystone-ea52-account-create-update-7q2rz\" (UID: \"6142a71f-3e1f-4769-9197-0edff9d8461c\") " pod="keystone-kuttl-tests/keystone-ea52-account-create-update-7q2rz" Jan 20 18:38:10 crc kubenswrapper[4558]: I0120 18:38:10.189805 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6142a71f-3e1f-4769-9197-0edff9d8461c-operator-scripts\") pod \"keystone-ea52-account-create-update-7q2rz\" (UID: \"6142a71f-3e1f-4769-9197-0edff9d8461c\") " pod="keystone-kuttl-tests/keystone-ea52-account-create-update-7q2rz" Jan 20 18:38:10 crc kubenswrapper[4558]: I0120 18:38:10.189849 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r4vcj\" (UniqueName: \"kubernetes.io/projected/625f8646-a3ab-4a12-9c99-9a85be87df11-kube-api-access-r4vcj\") pod \"keystone-db-create-6twzj\" (UID: \"625f8646-a3ab-4a12-9c99-9a85be87df11\") " pod="keystone-kuttl-tests/keystone-db-create-6twzj" Jan 20 18:38:10 crc kubenswrapper[4558]: I0120 18:38:10.189895 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/625f8646-a3ab-4a12-9c99-9a85be87df11-operator-scripts\") pod \"keystone-db-create-6twzj\" (UID: \"625f8646-a3ab-4a12-9c99-9a85be87df11\") " pod="keystone-kuttl-tests/keystone-db-create-6twzj" Jan 20 18:38:10 crc kubenswrapper[4558]: I0120 18:38:10.190820 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/625f8646-a3ab-4a12-9c99-9a85be87df11-operator-scripts\") pod \"keystone-db-create-6twzj\" (UID: \"625f8646-a3ab-4a12-9c99-9a85be87df11\") " pod="keystone-kuttl-tests/keystone-db-create-6twzj" Jan 20 18:38:10 crc kubenswrapper[4558]: I0120 18:38:10.190846 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6142a71f-3e1f-4769-9197-0edff9d8461c-operator-scripts\") pod \"keystone-ea52-account-create-update-7q2rz\" (UID: \"6142a71f-3e1f-4769-9197-0edff9d8461c\") " pod="keystone-kuttl-tests/keystone-ea52-account-create-update-7q2rz" Jan 20 18:38:10 crc kubenswrapper[4558]: I0120 18:38:10.205711 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r4vcj\" (UniqueName: \"kubernetes.io/projected/625f8646-a3ab-4a12-9c99-9a85be87df11-kube-api-access-r4vcj\") pod \"keystone-db-create-6twzj\" (UID: \"625f8646-a3ab-4a12-9c99-9a85be87df11\") " pod="keystone-kuttl-tests/keystone-db-create-6twzj" Jan 20 18:38:10 crc kubenswrapper[4558]: I0120 18:38:10.206227 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lxdv7\" (UniqueName: \"kubernetes.io/projected/6142a71f-3e1f-4769-9197-0edff9d8461c-kube-api-access-lxdv7\") pod \"keystone-ea52-account-create-update-7q2rz\" (UID: \"6142a71f-3e1f-4769-9197-0edff9d8461c\") " pod="keystone-kuttl-tests/keystone-ea52-account-create-update-7q2rz" Jan 20 18:38:10 crc kubenswrapper[4558]: I0120 18:38:10.362098 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-db-create-6twzj" Jan 20 18:38:10 crc kubenswrapper[4558]: I0120 18:38:10.371437 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-ea52-account-create-update-7q2rz" Jan 20 18:38:10 crc kubenswrapper[4558]: I0120 18:38:10.566693 4558 scope.go:117] "RemoveContainer" containerID="77ce04c6daee7d4f9b776bd1c80b448f9c4750d5cb5c7dd182ff28963a4bcf99" Jan 20 18:38:10 crc kubenswrapper[4558]: E0120 18:38:10.567469 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 18:38:10 crc kubenswrapper[4558]: I0120 18:38:10.576705 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="36aa4806-4150-414e-9348-da78e951a8c5" path="/var/lib/kubelet/pods/36aa4806-4150-414e-9348-da78e951a8c5/volumes" Jan 20 18:38:10 crc kubenswrapper[4558]: I0120 18:38:10.577475 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a933e619-6193-4e4e-966d-1d51435ddcc2" path="/var/lib/kubelet/pods/a933e619-6193-4e4e-966d-1d51435ddcc2/volumes" Jan 20 18:38:10 crc kubenswrapper[4558]: I0120 18:38:10.577978 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dc638cdc-a544-4dd2-b7d0-dfebe96125cc" path="/var/lib/kubelet/pods/dc638cdc-a544-4dd2-b7d0-dfebe96125cc/volumes" Jan 20 18:38:10 crc kubenswrapper[4558]: I0120 18:38:10.578507 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e707bfce-e265-4454-940b-e792ca8d2bf3" path="/var/lib/kubelet/pods/e707bfce-e265-4454-940b-e792ca8d2bf3/volumes" Jan 20 18:38:10 crc kubenswrapper[4558]: I0120 18:38:10.784199 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone-ea52-account-create-update-7q2rz"] Jan 20 18:38:10 crc kubenswrapper[4558]: I0120 18:38:10.811044 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone-db-create-6twzj"] Jan 20 18:38:10 crc kubenswrapper[4558]: W0120 18:38:10.814153 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod625f8646_a3ab_4a12_9c99_9a85be87df11.slice/crio-01da9922194e03ca6b577aef744de918186a0ddc700c7dd6530854b535e8317b WatchSource:0}: Error finding container 01da9922194e03ca6b577aef744de918186a0ddc700c7dd6530854b535e8317b: Status 404 returned error can't find the container with id 01da9922194e03ca6b577aef744de918186a0ddc700c7dd6530854b535e8317b Jan 20 18:38:10 crc kubenswrapper[4558]: I0120 18:38:10.868977 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-ea52-account-create-update-7q2rz" event={"ID":"6142a71f-3e1f-4769-9197-0edff9d8461c","Type":"ContainerStarted","Data":"41fee0dda990c282f52db43e6112208df8631907b0d2ec6dd64cd66436f9d2f2"} Jan 20 18:38:10 crc kubenswrapper[4558]: I0120 18:38:10.870192 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-db-create-6twzj" event={"ID":"625f8646-a3ab-4a12-9c99-9a85be87df11","Type":"ContainerStarted","Data":"01da9922194e03ca6b577aef744de918186a0ddc700c7dd6530854b535e8317b"} Jan 20 18:38:11 crc kubenswrapper[4558]: I0120 18:38:11.879911 4558 generic.go:334] "Generic (PLEG): container finished" podID="625f8646-a3ab-4a12-9c99-9a85be87df11" containerID="d8cee8b135a2bc566a472b4aebed535eb36e6c617239e0be350015f5b2432920" exitCode=0 Jan 20 18:38:11 crc kubenswrapper[4558]: I0120 18:38:11.879980 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-db-create-6twzj" event={"ID":"625f8646-a3ab-4a12-9c99-9a85be87df11","Type":"ContainerDied","Data":"d8cee8b135a2bc566a472b4aebed535eb36e6c617239e0be350015f5b2432920"} Jan 20 18:38:11 crc kubenswrapper[4558]: I0120 18:38:11.881979 4558 generic.go:334] "Generic (PLEG): container finished" podID="6142a71f-3e1f-4769-9197-0edff9d8461c" containerID="ef95d5a546be1c7edb92582deb686820cb6a2afe9e3436fb02d6af327255f35b" exitCode=0 Jan 20 18:38:11 crc kubenswrapper[4558]: I0120 18:38:11.882013 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-ea52-account-create-update-7q2rz" event={"ID":"6142a71f-3e1f-4769-9197-0edff9d8461c","Type":"ContainerDied","Data":"ef95d5a546be1c7edb92582deb686820cb6a2afe9e3436fb02d6af327255f35b"} Jan 20 18:38:13 crc kubenswrapper[4558]: I0120 18:38:13.157517 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-ea52-account-create-update-7q2rz" Jan 20 18:38:13 crc kubenswrapper[4558]: I0120 18:38:13.223721 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-db-create-6twzj" Jan 20 18:38:13 crc kubenswrapper[4558]: I0120 18:38:13.244033 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6142a71f-3e1f-4769-9197-0edff9d8461c-operator-scripts\") pod \"6142a71f-3e1f-4769-9197-0edff9d8461c\" (UID: \"6142a71f-3e1f-4769-9197-0edff9d8461c\") " Jan 20 18:38:13 crc kubenswrapper[4558]: I0120 18:38:13.244246 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r4vcj\" (UniqueName: \"kubernetes.io/projected/625f8646-a3ab-4a12-9c99-9a85be87df11-kube-api-access-r4vcj\") pod \"625f8646-a3ab-4a12-9c99-9a85be87df11\" (UID: \"625f8646-a3ab-4a12-9c99-9a85be87df11\") " Jan 20 18:38:13 crc kubenswrapper[4558]: I0120 18:38:13.244316 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lxdv7\" (UniqueName: \"kubernetes.io/projected/6142a71f-3e1f-4769-9197-0edff9d8461c-kube-api-access-lxdv7\") pod \"6142a71f-3e1f-4769-9197-0edff9d8461c\" (UID: \"6142a71f-3e1f-4769-9197-0edff9d8461c\") " Jan 20 18:38:13 crc kubenswrapper[4558]: I0120 18:38:13.244426 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/625f8646-a3ab-4a12-9c99-9a85be87df11-operator-scripts\") pod \"625f8646-a3ab-4a12-9c99-9a85be87df11\" (UID: \"625f8646-a3ab-4a12-9c99-9a85be87df11\") " Jan 20 18:38:13 crc kubenswrapper[4558]: I0120 18:38:13.244712 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6142a71f-3e1f-4769-9197-0edff9d8461c-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "6142a71f-3e1f-4769-9197-0edff9d8461c" (UID: "6142a71f-3e1f-4769-9197-0edff9d8461c"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:38:13 crc kubenswrapper[4558]: I0120 18:38:13.244960 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/625f8646-a3ab-4a12-9c99-9a85be87df11-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "625f8646-a3ab-4a12-9c99-9a85be87df11" (UID: "625f8646-a3ab-4a12-9c99-9a85be87df11"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:38:13 crc kubenswrapper[4558]: I0120 18:38:13.245329 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/625f8646-a3ab-4a12-9c99-9a85be87df11-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 18:38:13 crc kubenswrapper[4558]: I0120 18:38:13.245379 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6142a71f-3e1f-4769-9197-0edff9d8461c-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 18:38:13 crc kubenswrapper[4558]: I0120 18:38:13.249715 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6142a71f-3e1f-4769-9197-0edff9d8461c-kube-api-access-lxdv7" (OuterVolumeSpecName: "kube-api-access-lxdv7") pod "6142a71f-3e1f-4769-9197-0edff9d8461c" (UID: "6142a71f-3e1f-4769-9197-0edff9d8461c"). InnerVolumeSpecName "kube-api-access-lxdv7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:38:13 crc kubenswrapper[4558]: I0120 18:38:13.249753 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/625f8646-a3ab-4a12-9c99-9a85be87df11-kube-api-access-r4vcj" (OuterVolumeSpecName: "kube-api-access-r4vcj") pod "625f8646-a3ab-4a12-9c99-9a85be87df11" (UID: "625f8646-a3ab-4a12-9c99-9a85be87df11"). InnerVolumeSpecName "kube-api-access-r4vcj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:38:13 crc kubenswrapper[4558]: I0120 18:38:13.347276 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r4vcj\" (UniqueName: \"kubernetes.io/projected/625f8646-a3ab-4a12-9c99-9a85be87df11-kube-api-access-r4vcj\") on node \"crc\" DevicePath \"\"" Jan 20 18:38:13 crc kubenswrapper[4558]: I0120 18:38:13.347314 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lxdv7\" (UniqueName: \"kubernetes.io/projected/6142a71f-3e1f-4769-9197-0edff9d8461c-kube-api-access-lxdv7\") on node \"crc\" DevicePath \"\"" Jan 20 18:38:13 crc kubenswrapper[4558]: I0120 18:38:13.897612 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-ea52-account-create-update-7q2rz" Jan 20 18:38:13 crc kubenswrapper[4558]: I0120 18:38:13.898261 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-ea52-account-create-update-7q2rz" event={"ID":"6142a71f-3e1f-4769-9197-0edff9d8461c","Type":"ContainerDied","Data":"41fee0dda990c282f52db43e6112208df8631907b0d2ec6dd64cd66436f9d2f2"} Jan 20 18:38:13 crc kubenswrapper[4558]: I0120 18:38:13.898312 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="41fee0dda990c282f52db43e6112208df8631907b0d2ec6dd64cd66436f9d2f2" Jan 20 18:38:13 crc kubenswrapper[4558]: I0120 18:38:13.899754 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-db-create-6twzj" event={"ID":"625f8646-a3ab-4a12-9c99-9a85be87df11","Type":"ContainerDied","Data":"01da9922194e03ca6b577aef744de918186a0ddc700c7dd6530854b535e8317b"} Jan 20 18:38:13 crc kubenswrapper[4558]: I0120 18:38:13.899779 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="01da9922194e03ca6b577aef744de918186a0ddc700c7dd6530854b535e8317b" Jan 20 18:38:13 crc kubenswrapper[4558]: I0120 18:38:13.899795 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-db-create-6twzj" Jan 20 18:38:15 crc kubenswrapper[4558]: I0120 18:38:15.607751 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["keystone-kuttl-tests/keystone-db-sync-hrkhg"] Jan 20 18:38:15 crc kubenswrapper[4558]: E0120 18:38:15.608358 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6142a71f-3e1f-4769-9197-0edff9d8461c" containerName="mariadb-account-create-update" Jan 20 18:38:15 crc kubenswrapper[4558]: I0120 18:38:15.608373 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="6142a71f-3e1f-4769-9197-0edff9d8461c" containerName="mariadb-account-create-update" Jan 20 18:38:15 crc kubenswrapper[4558]: E0120 18:38:15.608404 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="625f8646-a3ab-4a12-9c99-9a85be87df11" containerName="mariadb-database-create" Jan 20 18:38:15 crc kubenswrapper[4558]: I0120 18:38:15.608411 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="625f8646-a3ab-4a12-9c99-9a85be87df11" containerName="mariadb-database-create" Jan 20 18:38:15 crc kubenswrapper[4558]: I0120 18:38:15.608542 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="625f8646-a3ab-4a12-9c99-9a85be87df11" containerName="mariadb-database-create" Jan 20 18:38:15 crc kubenswrapper[4558]: I0120 18:38:15.608563 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="6142a71f-3e1f-4769-9197-0edff9d8461c" containerName="mariadb-account-create-update" Jan 20 18:38:15 crc kubenswrapper[4558]: I0120 18:38:15.609046 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-db-sync-hrkhg" Jan 20 18:38:15 crc kubenswrapper[4558]: I0120 18:38:15.611904 4558 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone" Jan 20 18:38:15 crc kubenswrapper[4558]: I0120 18:38:15.613494 4558 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone-keystone-dockercfg-m7p6n" Jan 20 18:38:15 crc kubenswrapper[4558]: I0120 18:38:15.613999 4558 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone-config-data" Jan 20 18:38:15 crc kubenswrapper[4558]: I0120 18:38:15.614068 4558 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone-scripts" Jan 20 18:38:15 crc kubenswrapper[4558]: I0120 18:38:15.618926 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone-db-sync-hrkhg"] Jan 20 18:38:15 crc kubenswrapper[4558]: I0120 18:38:15.679878 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9ae74282-903a-42d6-87be-deb438d77576-config-data\") pod \"keystone-db-sync-hrkhg\" (UID: \"9ae74282-903a-42d6-87be-deb438d77576\") " pod="keystone-kuttl-tests/keystone-db-sync-hrkhg" Jan 20 18:38:15 crc kubenswrapper[4558]: I0120 18:38:15.679960 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c5bkb\" (UniqueName: \"kubernetes.io/projected/9ae74282-903a-42d6-87be-deb438d77576-kube-api-access-c5bkb\") pod \"keystone-db-sync-hrkhg\" (UID: \"9ae74282-903a-42d6-87be-deb438d77576\") " pod="keystone-kuttl-tests/keystone-db-sync-hrkhg" Jan 20 18:38:15 crc kubenswrapper[4558]: I0120 18:38:15.781514 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c5bkb\" (UniqueName: \"kubernetes.io/projected/9ae74282-903a-42d6-87be-deb438d77576-kube-api-access-c5bkb\") pod \"keystone-db-sync-hrkhg\" (UID: \"9ae74282-903a-42d6-87be-deb438d77576\") " pod="keystone-kuttl-tests/keystone-db-sync-hrkhg" Jan 20 18:38:15 crc kubenswrapper[4558]: I0120 18:38:15.781794 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9ae74282-903a-42d6-87be-deb438d77576-config-data\") pod \"keystone-db-sync-hrkhg\" (UID: \"9ae74282-903a-42d6-87be-deb438d77576\") " pod="keystone-kuttl-tests/keystone-db-sync-hrkhg" Jan 20 18:38:15 crc kubenswrapper[4558]: I0120 18:38:15.791089 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9ae74282-903a-42d6-87be-deb438d77576-config-data\") pod \"keystone-db-sync-hrkhg\" (UID: \"9ae74282-903a-42d6-87be-deb438d77576\") " pod="keystone-kuttl-tests/keystone-db-sync-hrkhg" Jan 20 18:38:15 crc kubenswrapper[4558]: I0120 18:38:15.796245 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c5bkb\" (UniqueName: \"kubernetes.io/projected/9ae74282-903a-42d6-87be-deb438d77576-kube-api-access-c5bkb\") pod \"keystone-db-sync-hrkhg\" (UID: \"9ae74282-903a-42d6-87be-deb438d77576\") " pod="keystone-kuttl-tests/keystone-db-sync-hrkhg" Jan 20 18:38:15 crc kubenswrapper[4558]: I0120 18:38:15.927694 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-db-sync-hrkhg" Jan 20 18:38:16 crc kubenswrapper[4558]: I0120 18:38:16.312025 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone-db-sync-hrkhg"] Jan 20 18:38:16 crc kubenswrapper[4558]: I0120 18:38:16.934657 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-db-sync-hrkhg" event={"ID":"9ae74282-903a-42d6-87be-deb438d77576","Type":"ContainerStarted","Data":"b0d65e25768ceb3be84b2eff7f44c20dbc21e5f7fb58429a4d05b2f5522dce96"} Jan 20 18:38:16 crc kubenswrapper[4558]: I0120 18:38:16.935026 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-db-sync-hrkhg" event={"ID":"9ae74282-903a-42d6-87be-deb438d77576","Type":"ContainerStarted","Data":"8be7612c92ed7e961921199b754abbf8816ffd71d22cce2faea6ae478541d47b"} Jan 20 18:38:16 crc kubenswrapper[4558]: I0120 18:38:16.955289 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="keystone-kuttl-tests/keystone-db-sync-hrkhg" podStartSLOduration=1.9552650520000001 podStartE2EDuration="1.955265052s" podCreationTimestamp="2026-01-20 18:38:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 18:38:16.949116482 +0000 UTC m=+6990.709454450" watchObservedRunningTime="2026-01-20 18:38:16.955265052 +0000 UTC m=+6990.715603020" Jan 20 18:38:17 crc kubenswrapper[4558]: I0120 18:38:17.945723 4558 generic.go:334] "Generic (PLEG): container finished" podID="9ae74282-903a-42d6-87be-deb438d77576" containerID="b0d65e25768ceb3be84b2eff7f44c20dbc21e5f7fb58429a4d05b2f5522dce96" exitCode=0 Jan 20 18:38:17 crc kubenswrapper[4558]: I0120 18:38:17.945782 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-db-sync-hrkhg" event={"ID":"9ae74282-903a-42d6-87be-deb438d77576","Type":"ContainerDied","Data":"b0d65e25768ceb3be84b2eff7f44c20dbc21e5f7fb58429a4d05b2f5522dce96"} Jan 20 18:38:19 crc kubenswrapper[4558]: I0120 18:38:19.183576 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-db-sync-hrkhg" Jan 20 18:38:19 crc kubenswrapper[4558]: I0120 18:38:19.328800 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c5bkb\" (UniqueName: \"kubernetes.io/projected/9ae74282-903a-42d6-87be-deb438d77576-kube-api-access-c5bkb\") pod \"9ae74282-903a-42d6-87be-deb438d77576\" (UID: \"9ae74282-903a-42d6-87be-deb438d77576\") " Jan 20 18:38:19 crc kubenswrapper[4558]: I0120 18:38:19.328872 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9ae74282-903a-42d6-87be-deb438d77576-config-data\") pod \"9ae74282-903a-42d6-87be-deb438d77576\" (UID: \"9ae74282-903a-42d6-87be-deb438d77576\") " Jan 20 18:38:19 crc kubenswrapper[4558]: I0120 18:38:19.335840 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9ae74282-903a-42d6-87be-deb438d77576-kube-api-access-c5bkb" (OuterVolumeSpecName: "kube-api-access-c5bkb") pod "9ae74282-903a-42d6-87be-deb438d77576" (UID: "9ae74282-903a-42d6-87be-deb438d77576"). InnerVolumeSpecName "kube-api-access-c5bkb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:38:19 crc kubenswrapper[4558]: I0120 18:38:19.363112 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9ae74282-903a-42d6-87be-deb438d77576-config-data" (OuterVolumeSpecName: "config-data") pod "9ae74282-903a-42d6-87be-deb438d77576" (UID: "9ae74282-903a-42d6-87be-deb438d77576"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:38:19 crc kubenswrapper[4558]: I0120 18:38:19.430912 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c5bkb\" (UniqueName: \"kubernetes.io/projected/9ae74282-903a-42d6-87be-deb438d77576-kube-api-access-c5bkb\") on node \"crc\" DevicePath \"\"" Jan 20 18:38:19 crc kubenswrapper[4558]: I0120 18:38:19.430947 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9ae74282-903a-42d6-87be-deb438d77576-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 18:38:19 crc kubenswrapper[4558]: I0120 18:38:19.968258 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-db-sync-hrkhg" event={"ID":"9ae74282-903a-42d6-87be-deb438d77576","Type":"ContainerDied","Data":"8be7612c92ed7e961921199b754abbf8816ffd71d22cce2faea6ae478541d47b"} Jan 20 18:38:19 crc kubenswrapper[4558]: I0120 18:38:19.968313 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8be7612c92ed7e961921199b754abbf8816ffd71d22cce2faea6ae478541d47b" Jan 20 18:38:19 crc kubenswrapper[4558]: I0120 18:38:19.968355 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-db-sync-hrkhg" Jan 20 18:38:20 crc kubenswrapper[4558]: I0120 18:38:20.072362 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["keystone-kuttl-tests/keystone-bootstrap-tw56s"] Jan 20 18:38:20 crc kubenswrapper[4558]: E0120 18:38:20.072625 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9ae74282-903a-42d6-87be-deb438d77576" containerName="keystone-db-sync" Jan 20 18:38:20 crc kubenswrapper[4558]: I0120 18:38:20.072641 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="9ae74282-903a-42d6-87be-deb438d77576" containerName="keystone-db-sync" Jan 20 18:38:20 crc kubenswrapper[4558]: I0120 18:38:20.072776 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="9ae74282-903a-42d6-87be-deb438d77576" containerName="keystone-db-sync" Jan 20 18:38:20 crc kubenswrapper[4558]: I0120 18:38:20.073224 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-bootstrap-tw56s" Jan 20 18:38:20 crc kubenswrapper[4558]: I0120 18:38:20.075721 4558 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone-scripts" Jan 20 18:38:20 crc kubenswrapper[4558]: I0120 18:38:20.076702 4558 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"osp-secret" Jan 20 18:38:20 crc kubenswrapper[4558]: I0120 18:38:20.077029 4558 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone" Jan 20 18:38:20 crc kubenswrapper[4558]: I0120 18:38:20.077200 4558 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone-config-data" Jan 20 18:38:20 crc kubenswrapper[4558]: I0120 18:38:20.077735 4558 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone-keystone-dockercfg-m7p6n" Jan 20 18:38:20 crc kubenswrapper[4558]: I0120 18:38:20.088727 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone-bootstrap-tw56s"] Jan 20 18:38:20 crc kubenswrapper[4558]: I0120 18:38:20.139462 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4466d9fe-a853-466d-9897-1364d0a39e4e-scripts\") pod \"keystone-bootstrap-tw56s\" (UID: \"4466d9fe-a853-466d-9897-1364d0a39e4e\") " pod="keystone-kuttl-tests/keystone-bootstrap-tw56s" Jan 20 18:38:20 crc kubenswrapper[4558]: I0120 18:38:20.139499 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/4466d9fe-a853-466d-9897-1364d0a39e4e-credential-keys\") pod \"keystone-bootstrap-tw56s\" (UID: \"4466d9fe-a853-466d-9897-1364d0a39e4e\") " pod="keystone-kuttl-tests/keystone-bootstrap-tw56s" Jan 20 18:38:20 crc kubenswrapper[4558]: I0120 18:38:20.139524 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4466d9fe-a853-466d-9897-1364d0a39e4e-config-data\") pod \"keystone-bootstrap-tw56s\" (UID: \"4466d9fe-a853-466d-9897-1364d0a39e4e\") " pod="keystone-kuttl-tests/keystone-bootstrap-tw56s" Jan 20 18:38:20 crc kubenswrapper[4558]: I0120 18:38:20.139544 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f79r6\" (UniqueName: \"kubernetes.io/projected/4466d9fe-a853-466d-9897-1364d0a39e4e-kube-api-access-f79r6\") pod \"keystone-bootstrap-tw56s\" (UID: \"4466d9fe-a853-466d-9897-1364d0a39e4e\") " pod="keystone-kuttl-tests/keystone-bootstrap-tw56s" Jan 20 18:38:20 crc kubenswrapper[4558]: I0120 18:38:20.139748 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/4466d9fe-a853-466d-9897-1364d0a39e4e-fernet-keys\") pod \"keystone-bootstrap-tw56s\" (UID: \"4466d9fe-a853-466d-9897-1364d0a39e4e\") " pod="keystone-kuttl-tests/keystone-bootstrap-tw56s" Jan 20 18:38:20 crc kubenswrapper[4558]: I0120 18:38:20.241237 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/4466d9fe-a853-466d-9897-1364d0a39e4e-fernet-keys\") pod \"keystone-bootstrap-tw56s\" (UID: \"4466d9fe-a853-466d-9897-1364d0a39e4e\") " pod="keystone-kuttl-tests/keystone-bootstrap-tw56s" Jan 20 18:38:20 crc kubenswrapper[4558]: I0120 18:38:20.241319 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4466d9fe-a853-466d-9897-1364d0a39e4e-scripts\") pod \"keystone-bootstrap-tw56s\" (UID: \"4466d9fe-a853-466d-9897-1364d0a39e4e\") " pod="keystone-kuttl-tests/keystone-bootstrap-tw56s" Jan 20 18:38:20 crc kubenswrapper[4558]: I0120 18:38:20.241350 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/4466d9fe-a853-466d-9897-1364d0a39e4e-credential-keys\") pod \"keystone-bootstrap-tw56s\" (UID: \"4466d9fe-a853-466d-9897-1364d0a39e4e\") " pod="keystone-kuttl-tests/keystone-bootstrap-tw56s" Jan 20 18:38:20 crc kubenswrapper[4558]: I0120 18:38:20.241379 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4466d9fe-a853-466d-9897-1364d0a39e4e-config-data\") pod \"keystone-bootstrap-tw56s\" (UID: \"4466d9fe-a853-466d-9897-1364d0a39e4e\") " pod="keystone-kuttl-tests/keystone-bootstrap-tw56s" Jan 20 18:38:20 crc kubenswrapper[4558]: I0120 18:38:20.241412 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f79r6\" (UniqueName: \"kubernetes.io/projected/4466d9fe-a853-466d-9897-1364d0a39e4e-kube-api-access-f79r6\") pod \"keystone-bootstrap-tw56s\" (UID: \"4466d9fe-a853-466d-9897-1364d0a39e4e\") " pod="keystone-kuttl-tests/keystone-bootstrap-tw56s" Jan 20 18:38:20 crc kubenswrapper[4558]: I0120 18:38:20.246517 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/4466d9fe-a853-466d-9897-1364d0a39e4e-credential-keys\") pod \"keystone-bootstrap-tw56s\" (UID: \"4466d9fe-a853-466d-9897-1364d0a39e4e\") " pod="keystone-kuttl-tests/keystone-bootstrap-tw56s" Jan 20 18:38:20 crc kubenswrapper[4558]: I0120 18:38:20.246639 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/4466d9fe-a853-466d-9897-1364d0a39e4e-fernet-keys\") pod \"keystone-bootstrap-tw56s\" (UID: \"4466d9fe-a853-466d-9897-1364d0a39e4e\") " pod="keystone-kuttl-tests/keystone-bootstrap-tw56s" Jan 20 18:38:20 crc kubenswrapper[4558]: I0120 18:38:20.247330 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4466d9fe-a853-466d-9897-1364d0a39e4e-scripts\") pod \"keystone-bootstrap-tw56s\" (UID: \"4466d9fe-a853-466d-9897-1364d0a39e4e\") " pod="keystone-kuttl-tests/keystone-bootstrap-tw56s" Jan 20 18:38:20 crc kubenswrapper[4558]: I0120 18:38:20.247499 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4466d9fe-a853-466d-9897-1364d0a39e4e-config-data\") pod \"keystone-bootstrap-tw56s\" (UID: \"4466d9fe-a853-466d-9897-1364d0a39e4e\") " pod="keystone-kuttl-tests/keystone-bootstrap-tw56s" Jan 20 18:38:20 crc kubenswrapper[4558]: I0120 18:38:20.256732 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f79r6\" (UniqueName: \"kubernetes.io/projected/4466d9fe-a853-466d-9897-1364d0a39e4e-kube-api-access-f79r6\") pod \"keystone-bootstrap-tw56s\" (UID: \"4466d9fe-a853-466d-9897-1364d0a39e4e\") " pod="keystone-kuttl-tests/keystone-bootstrap-tw56s" Jan 20 18:38:20 crc kubenswrapper[4558]: I0120 18:38:20.387772 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-bootstrap-tw56s" Jan 20 18:38:20 crc kubenswrapper[4558]: I0120 18:38:20.759082 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone-bootstrap-tw56s"] Jan 20 18:38:20 crc kubenswrapper[4558]: W0120 18:38:20.762632 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4466d9fe_a853_466d_9897_1364d0a39e4e.slice/crio-a2089dff513b7b395073a938f7974fc80b461b0f9503338d93b69a45531f9fc1 WatchSource:0}: Error finding container a2089dff513b7b395073a938f7974fc80b461b0f9503338d93b69a45531f9fc1: Status 404 returned error can't find the container with id a2089dff513b7b395073a938f7974fc80b461b0f9503338d93b69a45531f9fc1 Jan 20 18:38:20 crc kubenswrapper[4558]: I0120 18:38:20.976705 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-bootstrap-tw56s" event={"ID":"4466d9fe-a853-466d-9897-1364d0a39e4e","Type":"ContainerStarted","Data":"92bbb9680ae90195d91760cb423f1150305d8dab5908c8ac0b5f009223396a4c"} Jan 20 18:38:20 crc kubenswrapper[4558]: I0120 18:38:20.976997 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-bootstrap-tw56s" event={"ID":"4466d9fe-a853-466d-9897-1364d0a39e4e","Type":"ContainerStarted","Data":"a2089dff513b7b395073a938f7974fc80b461b0f9503338d93b69a45531f9fc1"} Jan 20 18:38:20 crc kubenswrapper[4558]: I0120 18:38:20.990909 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="keystone-kuttl-tests/keystone-bootstrap-tw56s" podStartSLOduration=0.990890916 podStartE2EDuration="990.890916ms" podCreationTimestamp="2026-01-20 18:38:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 18:38:20.989864527 +0000 UTC m=+6994.750202494" watchObservedRunningTime="2026-01-20 18:38:20.990890916 +0000 UTC m=+6994.751228883" Jan 20 18:38:24 crc kubenswrapper[4558]: I0120 18:38:24.010321 4558 generic.go:334] "Generic (PLEG): container finished" podID="4466d9fe-a853-466d-9897-1364d0a39e4e" containerID="92bbb9680ae90195d91760cb423f1150305d8dab5908c8ac0b5f009223396a4c" exitCode=0 Jan 20 18:38:24 crc kubenswrapper[4558]: I0120 18:38:24.010373 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-bootstrap-tw56s" event={"ID":"4466d9fe-a853-466d-9897-1364d0a39e4e","Type":"ContainerDied","Data":"92bbb9680ae90195d91760cb423f1150305d8dab5908c8ac0b5f009223396a4c"} Jan 20 18:38:25 crc kubenswrapper[4558]: I0120 18:38:25.277203 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-bootstrap-tw56s" Jan 20 18:38:25 crc kubenswrapper[4558]: I0120 18:38:25.328221 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4466d9fe-a853-466d-9897-1364d0a39e4e-scripts\") pod \"4466d9fe-a853-466d-9897-1364d0a39e4e\" (UID: \"4466d9fe-a853-466d-9897-1364d0a39e4e\") " Jan 20 18:38:25 crc kubenswrapper[4558]: I0120 18:38:25.328274 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/4466d9fe-a853-466d-9897-1364d0a39e4e-fernet-keys\") pod \"4466d9fe-a853-466d-9897-1364d0a39e4e\" (UID: \"4466d9fe-a853-466d-9897-1364d0a39e4e\") " Jan 20 18:38:25 crc kubenswrapper[4558]: I0120 18:38:25.328295 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/4466d9fe-a853-466d-9897-1364d0a39e4e-credential-keys\") pod \"4466d9fe-a853-466d-9897-1364d0a39e4e\" (UID: \"4466d9fe-a853-466d-9897-1364d0a39e4e\") " Jan 20 18:38:25 crc kubenswrapper[4558]: I0120 18:38:25.328342 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f79r6\" (UniqueName: \"kubernetes.io/projected/4466d9fe-a853-466d-9897-1364d0a39e4e-kube-api-access-f79r6\") pod \"4466d9fe-a853-466d-9897-1364d0a39e4e\" (UID: \"4466d9fe-a853-466d-9897-1364d0a39e4e\") " Jan 20 18:38:25 crc kubenswrapper[4558]: I0120 18:38:25.328404 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4466d9fe-a853-466d-9897-1364d0a39e4e-config-data\") pod \"4466d9fe-a853-466d-9897-1364d0a39e4e\" (UID: \"4466d9fe-a853-466d-9897-1364d0a39e4e\") " Jan 20 18:38:25 crc kubenswrapper[4558]: I0120 18:38:25.334210 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4466d9fe-a853-466d-9897-1364d0a39e4e-kube-api-access-f79r6" (OuterVolumeSpecName: "kube-api-access-f79r6") pod "4466d9fe-a853-466d-9897-1364d0a39e4e" (UID: "4466d9fe-a853-466d-9897-1364d0a39e4e"). InnerVolumeSpecName "kube-api-access-f79r6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:38:25 crc kubenswrapper[4558]: I0120 18:38:25.334877 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4466d9fe-a853-466d-9897-1364d0a39e4e-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "4466d9fe-a853-466d-9897-1364d0a39e4e" (UID: "4466d9fe-a853-466d-9897-1364d0a39e4e"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:38:25 crc kubenswrapper[4558]: I0120 18:38:25.335226 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4466d9fe-a853-466d-9897-1364d0a39e4e-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "4466d9fe-a853-466d-9897-1364d0a39e4e" (UID: "4466d9fe-a853-466d-9897-1364d0a39e4e"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:38:25 crc kubenswrapper[4558]: I0120 18:38:25.337253 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4466d9fe-a853-466d-9897-1364d0a39e4e-scripts" (OuterVolumeSpecName: "scripts") pod "4466d9fe-a853-466d-9897-1364d0a39e4e" (UID: "4466d9fe-a853-466d-9897-1364d0a39e4e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:38:25 crc kubenswrapper[4558]: I0120 18:38:25.346286 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4466d9fe-a853-466d-9897-1364d0a39e4e-config-data" (OuterVolumeSpecName: "config-data") pod "4466d9fe-a853-466d-9897-1364d0a39e4e" (UID: "4466d9fe-a853-466d-9897-1364d0a39e4e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:38:25 crc kubenswrapper[4558]: I0120 18:38:25.430361 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4466d9fe-a853-466d-9897-1364d0a39e4e-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 18:38:25 crc kubenswrapper[4558]: I0120 18:38:25.430402 4558 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/4466d9fe-a853-466d-9897-1364d0a39e4e-fernet-keys\") on node \"crc\" DevicePath \"\"" Jan 20 18:38:25 crc kubenswrapper[4558]: I0120 18:38:25.430418 4558 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/4466d9fe-a853-466d-9897-1364d0a39e4e-credential-keys\") on node \"crc\" DevicePath \"\"" Jan 20 18:38:25 crc kubenswrapper[4558]: I0120 18:38:25.430438 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f79r6\" (UniqueName: \"kubernetes.io/projected/4466d9fe-a853-466d-9897-1364d0a39e4e-kube-api-access-f79r6\") on node \"crc\" DevicePath \"\"" Jan 20 18:38:25 crc kubenswrapper[4558]: I0120 18:38:25.430461 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4466d9fe-a853-466d-9897-1364d0a39e4e-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 18:38:25 crc kubenswrapper[4558]: I0120 18:38:25.566411 4558 scope.go:117] "RemoveContainer" containerID="77ce04c6daee7d4f9b776bd1c80b448f9c4750d5cb5c7dd182ff28963a4bcf99" Jan 20 18:38:25 crc kubenswrapper[4558]: E0120 18:38:25.566823 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 18:38:26 crc kubenswrapper[4558]: I0120 18:38:26.029452 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-bootstrap-tw56s" event={"ID":"4466d9fe-a853-466d-9897-1364d0a39e4e","Type":"ContainerDied","Data":"a2089dff513b7b395073a938f7974fc80b461b0f9503338d93b69a45531f9fc1"} Jan 20 18:38:26 crc kubenswrapper[4558]: I0120 18:38:26.029518 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a2089dff513b7b395073a938f7974fc80b461b0f9503338d93b69a45531f9fc1" Jan 20 18:38:26 crc kubenswrapper[4558]: I0120 18:38:26.029566 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-bootstrap-tw56s" Jan 20 18:38:26 crc kubenswrapper[4558]: I0120 18:38:26.127626 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["keystone-kuttl-tests/keystone-546dffd48c-6kpwd"] Jan 20 18:38:26 crc kubenswrapper[4558]: E0120 18:38:26.127981 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4466d9fe-a853-466d-9897-1364d0a39e4e" containerName="keystone-bootstrap" Jan 20 18:38:26 crc kubenswrapper[4558]: I0120 18:38:26.128002 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="4466d9fe-a853-466d-9897-1364d0a39e4e" containerName="keystone-bootstrap" Jan 20 18:38:26 crc kubenswrapper[4558]: I0120 18:38:26.128255 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="4466d9fe-a853-466d-9897-1364d0a39e4e" containerName="keystone-bootstrap" Jan 20 18:38:26 crc kubenswrapper[4558]: I0120 18:38:26.128956 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-546dffd48c-6kpwd" Jan 20 18:38:26 crc kubenswrapper[4558]: I0120 18:38:26.130555 4558 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone-keystone-dockercfg-m7p6n" Jan 20 18:38:26 crc kubenswrapper[4558]: I0120 18:38:26.132749 4558 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone-config-data" Jan 20 18:38:26 crc kubenswrapper[4558]: I0120 18:38:26.133127 4558 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone" Jan 20 18:38:26 crc kubenswrapper[4558]: I0120 18:38:26.133929 4558 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone-scripts" Jan 20 18:38:26 crc kubenswrapper[4558]: I0120 18:38:26.134813 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone-546dffd48c-6kpwd"] Jan 20 18:38:26 crc kubenswrapper[4558]: I0120 18:38:26.140368 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/1d200484-d79e-42ba-ba93-c4976713e5ca-credential-keys\") pod \"keystone-546dffd48c-6kpwd\" (UID: \"1d200484-d79e-42ba-ba93-c4976713e5ca\") " pod="keystone-kuttl-tests/keystone-546dffd48c-6kpwd" Jan 20 18:38:26 crc kubenswrapper[4558]: I0120 18:38:26.140437 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1d200484-d79e-42ba-ba93-c4976713e5ca-scripts\") pod \"keystone-546dffd48c-6kpwd\" (UID: \"1d200484-d79e-42ba-ba93-c4976713e5ca\") " pod="keystone-kuttl-tests/keystone-546dffd48c-6kpwd" Jan 20 18:38:26 crc kubenswrapper[4558]: I0120 18:38:26.140509 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/1d200484-d79e-42ba-ba93-c4976713e5ca-fernet-keys\") pod \"keystone-546dffd48c-6kpwd\" (UID: \"1d200484-d79e-42ba-ba93-c4976713e5ca\") " pod="keystone-kuttl-tests/keystone-546dffd48c-6kpwd" Jan 20 18:38:26 crc kubenswrapper[4558]: I0120 18:38:26.140529 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1d200484-d79e-42ba-ba93-c4976713e5ca-config-data\") pod \"keystone-546dffd48c-6kpwd\" (UID: \"1d200484-d79e-42ba-ba93-c4976713e5ca\") " pod="keystone-kuttl-tests/keystone-546dffd48c-6kpwd" Jan 20 18:38:26 crc kubenswrapper[4558]: I0120 18:38:26.140966 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fqwpc\" (UniqueName: \"kubernetes.io/projected/1d200484-d79e-42ba-ba93-c4976713e5ca-kube-api-access-fqwpc\") pod \"keystone-546dffd48c-6kpwd\" (UID: \"1d200484-d79e-42ba-ba93-c4976713e5ca\") " pod="keystone-kuttl-tests/keystone-546dffd48c-6kpwd" Jan 20 18:38:26 crc kubenswrapper[4558]: I0120 18:38:26.242942 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1d200484-d79e-42ba-ba93-c4976713e5ca-config-data\") pod \"keystone-546dffd48c-6kpwd\" (UID: \"1d200484-d79e-42ba-ba93-c4976713e5ca\") " pod="keystone-kuttl-tests/keystone-546dffd48c-6kpwd" Jan 20 18:38:26 crc kubenswrapper[4558]: I0120 18:38:26.243010 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fqwpc\" (UniqueName: \"kubernetes.io/projected/1d200484-d79e-42ba-ba93-c4976713e5ca-kube-api-access-fqwpc\") pod \"keystone-546dffd48c-6kpwd\" (UID: \"1d200484-d79e-42ba-ba93-c4976713e5ca\") " pod="keystone-kuttl-tests/keystone-546dffd48c-6kpwd" Jan 20 18:38:26 crc kubenswrapper[4558]: I0120 18:38:26.243147 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/1d200484-d79e-42ba-ba93-c4976713e5ca-credential-keys\") pod \"keystone-546dffd48c-6kpwd\" (UID: \"1d200484-d79e-42ba-ba93-c4976713e5ca\") " pod="keystone-kuttl-tests/keystone-546dffd48c-6kpwd" Jan 20 18:38:26 crc kubenswrapper[4558]: I0120 18:38:26.243237 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1d200484-d79e-42ba-ba93-c4976713e5ca-scripts\") pod \"keystone-546dffd48c-6kpwd\" (UID: \"1d200484-d79e-42ba-ba93-c4976713e5ca\") " pod="keystone-kuttl-tests/keystone-546dffd48c-6kpwd" Jan 20 18:38:26 crc kubenswrapper[4558]: I0120 18:38:26.243267 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/1d200484-d79e-42ba-ba93-c4976713e5ca-fernet-keys\") pod \"keystone-546dffd48c-6kpwd\" (UID: \"1d200484-d79e-42ba-ba93-c4976713e5ca\") " pod="keystone-kuttl-tests/keystone-546dffd48c-6kpwd" Jan 20 18:38:26 crc kubenswrapper[4558]: I0120 18:38:26.249855 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1d200484-d79e-42ba-ba93-c4976713e5ca-scripts\") pod \"keystone-546dffd48c-6kpwd\" (UID: \"1d200484-d79e-42ba-ba93-c4976713e5ca\") " pod="keystone-kuttl-tests/keystone-546dffd48c-6kpwd" Jan 20 18:38:26 crc kubenswrapper[4558]: I0120 18:38:26.250279 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/1d200484-d79e-42ba-ba93-c4976713e5ca-credential-keys\") pod \"keystone-546dffd48c-6kpwd\" (UID: \"1d200484-d79e-42ba-ba93-c4976713e5ca\") " pod="keystone-kuttl-tests/keystone-546dffd48c-6kpwd" Jan 20 18:38:26 crc kubenswrapper[4558]: I0120 18:38:26.250847 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/1d200484-d79e-42ba-ba93-c4976713e5ca-fernet-keys\") pod \"keystone-546dffd48c-6kpwd\" (UID: \"1d200484-d79e-42ba-ba93-c4976713e5ca\") " pod="keystone-kuttl-tests/keystone-546dffd48c-6kpwd" Jan 20 18:38:26 crc kubenswrapper[4558]: I0120 18:38:26.251383 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1d200484-d79e-42ba-ba93-c4976713e5ca-config-data\") pod \"keystone-546dffd48c-6kpwd\" (UID: \"1d200484-d79e-42ba-ba93-c4976713e5ca\") " pod="keystone-kuttl-tests/keystone-546dffd48c-6kpwd" Jan 20 18:38:26 crc kubenswrapper[4558]: I0120 18:38:26.260126 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fqwpc\" (UniqueName: \"kubernetes.io/projected/1d200484-d79e-42ba-ba93-c4976713e5ca-kube-api-access-fqwpc\") pod \"keystone-546dffd48c-6kpwd\" (UID: \"1d200484-d79e-42ba-ba93-c4976713e5ca\") " pod="keystone-kuttl-tests/keystone-546dffd48c-6kpwd" Jan 20 18:38:26 crc kubenswrapper[4558]: I0120 18:38:26.446116 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-546dffd48c-6kpwd" Jan 20 18:38:26 crc kubenswrapper[4558]: I0120 18:38:26.834250 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone-546dffd48c-6kpwd"] Jan 20 18:38:26 crc kubenswrapper[4558]: W0120 18:38:26.838730 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1d200484_d79e_42ba_ba93_c4976713e5ca.slice/crio-8e4c059b2c3af03c4130fc150938ac1310a34d5903d54ed48dbb94d2d5d928cf WatchSource:0}: Error finding container 8e4c059b2c3af03c4130fc150938ac1310a34d5903d54ed48dbb94d2d5d928cf: Status 404 returned error can't find the container with id 8e4c059b2c3af03c4130fc150938ac1310a34d5903d54ed48dbb94d2d5d928cf Jan 20 18:38:27 crc kubenswrapper[4558]: I0120 18:38:27.039633 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-546dffd48c-6kpwd" event={"ID":"1d200484-d79e-42ba-ba93-c4976713e5ca","Type":"ContainerStarted","Data":"3dff4d322e7efcaa76620170d37df00addbb3d2be925f99983a291aa5b26b457"} Jan 20 18:38:27 crc kubenswrapper[4558]: I0120 18:38:27.039698 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-546dffd48c-6kpwd" event={"ID":"1d200484-d79e-42ba-ba93-c4976713e5ca","Type":"ContainerStarted","Data":"8e4c059b2c3af03c4130fc150938ac1310a34d5903d54ed48dbb94d2d5d928cf"} Jan 20 18:38:27 crc kubenswrapper[4558]: I0120 18:38:27.039817 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="keystone-kuttl-tests/keystone-546dffd48c-6kpwd" Jan 20 18:38:27 crc kubenswrapper[4558]: I0120 18:38:27.059489 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="keystone-kuttl-tests/keystone-546dffd48c-6kpwd" podStartSLOduration=1.059470669 podStartE2EDuration="1.059470669s" podCreationTimestamp="2026-01-20 18:38:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 18:38:27.055001947 +0000 UTC m=+7000.815339913" watchObservedRunningTime="2026-01-20 18:38:27.059470669 +0000 UTC m=+7000.819808635" Jan 20 18:38:39 crc kubenswrapper[4558]: I0120 18:38:39.566122 4558 scope.go:117] "RemoveContainer" containerID="77ce04c6daee7d4f9b776bd1c80b448f9c4750d5cb5c7dd182ff28963a4bcf99" Jan 20 18:38:39 crc kubenswrapper[4558]: E0120 18:38:39.567410 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 18:38:54 crc kubenswrapper[4558]: I0120 18:38:54.566849 4558 scope.go:117] "RemoveContainer" containerID="77ce04c6daee7d4f9b776bd1c80b448f9c4750d5cb5c7dd182ff28963a4bcf99" Jan 20 18:38:54 crc kubenswrapper[4558]: E0120 18:38:54.567855 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 18:38:57 crc kubenswrapper[4558]: I0120 18:38:57.755974 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="keystone-kuttl-tests/keystone-546dffd48c-6kpwd" Jan 20 18:39:09 crc kubenswrapper[4558]: I0120 18:39:09.565834 4558 scope.go:117] "RemoveContainer" containerID="77ce04c6daee7d4f9b776bd1c80b448f9c4750d5cb5c7dd182ff28963a4bcf99" Jan 20 18:39:09 crc kubenswrapper[4558]: E0120 18:39:09.566764 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 18:39:12 crc kubenswrapper[4558]: I0120 18:39:12.840536 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["keystone-kuttl-tests/keystone-db-sync-hrkhg"] Jan 20 18:39:12 crc kubenswrapper[4558]: I0120 18:39:12.847521 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["keystone-kuttl-tests/keystone-db-sync-hrkhg"] Jan 20 18:39:12 crc kubenswrapper[4558]: I0120 18:39:12.856242 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["keystone-kuttl-tests/keystone-bootstrap-tw56s"] Jan 20 18:39:12 crc kubenswrapper[4558]: I0120 18:39:12.862139 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["keystone-kuttl-tests/keystone-bootstrap-tw56s"] Jan 20 18:39:12 crc kubenswrapper[4558]: I0120 18:39:12.872711 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["keystone-kuttl-tests/keystone-546dffd48c-6kpwd"] Jan 20 18:39:12 crc kubenswrapper[4558]: I0120 18:39:12.872961 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="keystone-kuttl-tests/keystone-546dffd48c-6kpwd" podUID="1d200484-d79e-42ba-ba93-c4976713e5ca" containerName="keystone-api" containerID="cri-o://3dff4d322e7efcaa76620170d37df00addbb3d2be925f99983a291aa5b26b457" gracePeriod=30 Jan 20 18:39:12 crc kubenswrapper[4558]: I0120 18:39:12.917542 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["keystone-kuttl-tests/keystoneea52-account-delete-z6xd9"] Jan 20 18:39:12 crc kubenswrapper[4558]: I0120 18:39:12.918705 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystoneea52-account-delete-z6xd9" Jan 20 18:39:12 crc kubenswrapper[4558]: I0120 18:39:12.927101 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystoneea52-account-delete-z6xd9"] Jan 20 18:39:13 crc kubenswrapper[4558]: I0120 18:39:13.023644 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c4b6h\" (UniqueName: \"kubernetes.io/projected/477c30c7-03ab-456b-a7ef-a8586f052134-kube-api-access-c4b6h\") pod \"keystoneea52-account-delete-z6xd9\" (UID: \"477c30c7-03ab-456b-a7ef-a8586f052134\") " pod="keystone-kuttl-tests/keystoneea52-account-delete-z6xd9" Jan 20 18:39:13 crc kubenswrapper[4558]: I0120 18:39:13.024033 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/477c30c7-03ab-456b-a7ef-a8586f052134-operator-scripts\") pod \"keystoneea52-account-delete-z6xd9\" (UID: \"477c30c7-03ab-456b-a7ef-a8586f052134\") " pod="keystone-kuttl-tests/keystoneea52-account-delete-z6xd9" Jan 20 18:39:13 crc kubenswrapper[4558]: I0120 18:39:13.126103 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c4b6h\" (UniqueName: \"kubernetes.io/projected/477c30c7-03ab-456b-a7ef-a8586f052134-kube-api-access-c4b6h\") pod \"keystoneea52-account-delete-z6xd9\" (UID: \"477c30c7-03ab-456b-a7ef-a8586f052134\") " pod="keystone-kuttl-tests/keystoneea52-account-delete-z6xd9" Jan 20 18:39:13 crc kubenswrapper[4558]: I0120 18:39:13.126352 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/477c30c7-03ab-456b-a7ef-a8586f052134-operator-scripts\") pod \"keystoneea52-account-delete-z6xd9\" (UID: \"477c30c7-03ab-456b-a7ef-a8586f052134\") " pod="keystone-kuttl-tests/keystoneea52-account-delete-z6xd9" Jan 20 18:39:13 crc kubenswrapper[4558]: I0120 18:39:13.127902 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/477c30c7-03ab-456b-a7ef-a8586f052134-operator-scripts\") pod \"keystoneea52-account-delete-z6xd9\" (UID: \"477c30c7-03ab-456b-a7ef-a8586f052134\") " pod="keystone-kuttl-tests/keystoneea52-account-delete-z6xd9" Jan 20 18:39:13 crc kubenswrapper[4558]: I0120 18:39:13.147947 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c4b6h\" (UniqueName: \"kubernetes.io/projected/477c30c7-03ab-456b-a7ef-a8586f052134-kube-api-access-c4b6h\") pod \"keystoneea52-account-delete-z6xd9\" (UID: \"477c30c7-03ab-456b-a7ef-a8586f052134\") " pod="keystone-kuttl-tests/keystoneea52-account-delete-z6xd9" Jan 20 18:39:13 crc kubenswrapper[4558]: I0120 18:39:13.234303 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystoneea52-account-delete-z6xd9" Jan 20 18:39:13 crc kubenswrapper[4558]: I0120 18:39:13.548811 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystoneea52-account-delete-z6xd9"] Jan 20 18:39:14 crc kubenswrapper[4558]: I0120 18:39:14.412574 4558 generic.go:334] "Generic (PLEG): container finished" podID="477c30c7-03ab-456b-a7ef-a8586f052134" containerID="0787c1784df78a549e1b1832dde7625f1871d92d6d0b7095574c910e6ce85c39" exitCode=0 Jan 20 18:39:14 crc kubenswrapper[4558]: I0120 18:39:14.412638 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystoneea52-account-delete-z6xd9" event={"ID":"477c30c7-03ab-456b-a7ef-a8586f052134","Type":"ContainerDied","Data":"0787c1784df78a549e1b1832dde7625f1871d92d6d0b7095574c910e6ce85c39"} Jan 20 18:39:14 crc kubenswrapper[4558]: I0120 18:39:14.412673 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystoneea52-account-delete-z6xd9" event={"ID":"477c30c7-03ab-456b-a7ef-a8586f052134","Type":"ContainerStarted","Data":"246e47feaa0c7f80a0fa524d18cc7ccaf50432c5dd1dfff65a4a6a40de2044f1"} Jan 20 18:39:14 crc kubenswrapper[4558]: I0120 18:39:14.575349 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4466d9fe-a853-466d-9897-1364d0a39e4e" path="/var/lib/kubelet/pods/4466d9fe-a853-466d-9897-1364d0a39e4e/volumes" Jan 20 18:39:14 crc kubenswrapper[4558]: I0120 18:39:14.575882 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9ae74282-903a-42d6-87be-deb438d77576" path="/var/lib/kubelet/pods/9ae74282-903a-42d6-87be-deb438d77576/volumes" Jan 20 18:39:15 crc kubenswrapper[4558]: I0120 18:39:15.674009 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystoneea52-account-delete-z6xd9" Jan 20 18:39:15 crc kubenswrapper[4558]: I0120 18:39:15.771860 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/477c30c7-03ab-456b-a7ef-a8586f052134-operator-scripts\") pod \"477c30c7-03ab-456b-a7ef-a8586f052134\" (UID: \"477c30c7-03ab-456b-a7ef-a8586f052134\") " Jan 20 18:39:15 crc kubenswrapper[4558]: I0120 18:39:15.772001 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c4b6h\" (UniqueName: \"kubernetes.io/projected/477c30c7-03ab-456b-a7ef-a8586f052134-kube-api-access-c4b6h\") pod \"477c30c7-03ab-456b-a7ef-a8586f052134\" (UID: \"477c30c7-03ab-456b-a7ef-a8586f052134\") " Jan 20 18:39:15 crc kubenswrapper[4558]: I0120 18:39:15.772421 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/477c30c7-03ab-456b-a7ef-a8586f052134-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "477c30c7-03ab-456b-a7ef-a8586f052134" (UID: "477c30c7-03ab-456b-a7ef-a8586f052134"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:39:15 crc kubenswrapper[4558]: I0120 18:39:15.778842 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/477c30c7-03ab-456b-a7ef-a8586f052134-kube-api-access-c4b6h" (OuterVolumeSpecName: "kube-api-access-c4b6h") pod "477c30c7-03ab-456b-a7ef-a8586f052134" (UID: "477c30c7-03ab-456b-a7ef-a8586f052134"). InnerVolumeSpecName "kube-api-access-c4b6h". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:39:15 crc kubenswrapper[4558]: I0120 18:39:15.873700 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/477c30c7-03ab-456b-a7ef-a8586f052134-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 18:39:15 crc kubenswrapper[4558]: I0120 18:39:15.873743 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c4b6h\" (UniqueName: \"kubernetes.io/projected/477c30c7-03ab-456b-a7ef-a8586f052134-kube-api-access-c4b6h\") on node \"crc\" DevicePath \"\"" Jan 20 18:39:16 crc kubenswrapper[4558]: I0120 18:39:16.216697 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-546dffd48c-6kpwd" Jan 20 18:39:16 crc kubenswrapper[4558]: I0120 18:39:16.381870 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1d200484-d79e-42ba-ba93-c4976713e5ca-config-data\") pod \"1d200484-d79e-42ba-ba93-c4976713e5ca\" (UID: \"1d200484-d79e-42ba-ba93-c4976713e5ca\") " Jan 20 18:39:16 crc kubenswrapper[4558]: I0120 18:39:16.381951 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/1d200484-d79e-42ba-ba93-c4976713e5ca-fernet-keys\") pod \"1d200484-d79e-42ba-ba93-c4976713e5ca\" (UID: \"1d200484-d79e-42ba-ba93-c4976713e5ca\") " Jan 20 18:39:16 crc kubenswrapper[4558]: I0120 18:39:16.382127 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1d200484-d79e-42ba-ba93-c4976713e5ca-scripts\") pod \"1d200484-d79e-42ba-ba93-c4976713e5ca\" (UID: \"1d200484-d79e-42ba-ba93-c4976713e5ca\") " Jan 20 18:39:16 crc kubenswrapper[4558]: I0120 18:39:16.382229 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/1d200484-d79e-42ba-ba93-c4976713e5ca-credential-keys\") pod \"1d200484-d79e-42ba-ba93-c4976713e5ca\" (UID: \"1d200484-d79e-42ba-ba93-c4976713e5ca\") " Jan 20 18:39:16 crc kubenswrapper[4558]: I0120 18:39:16.382311 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqwpc\" (UniqueName: \"kubernetes.io/projected/1d200484-d79e-42ba-ba93-c4976713e5ca-kube-api-access-fqwpc\") pod \"1d200484-d79e-42ba-ba93-c4976713e5ca\" (UID: \"1d200484-d79e-42ba-ba93-c4976713e5ca\") " Jan 20 18:39:16 crc kubenswrapper[4558]: I0120 18:39:16.387459 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1d200484-d79e-42ba-ba93-c4976713e5ca-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "1d200484-d79e-42ba-ba93-c4976713e5ca" (UID: "1d200484-d79e-42ba-ba93-c4976713e5ca"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:39:16 crc kubenswrapper[4558]: I0120 18:39:16.387592 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1d200484-d79e-42ba-ba93-c4976713e5ca-scripts" (OuterVolumeSpecName: "scripts") pod "1d200484-d79e-42ba-ba93-c4976713e5ca" (UID: "1d200484-d79e-42ba-ba93-c4976713e5ca"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:39:16 crc kubenswrapper[4558]: I0120 18:39:16.387636 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1d200484-d79e-42ba-ba93-c4976713e5ca-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "1d200484-d79e-42ba-ba93-c4976713e5ca" (UID: "1d200484-d79e-42ba-ba93-c4976713e5ca"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:39:16 crc kubenswrapper[4558]: I0120 18:39:16.387693 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d200484-d79e-42ba-ba93-c4976713e5ca-kube-api-access-fqwpc" (OuterVolumeSpecName: "kube-api-access-fqwpc") pod "1d200484-d79e-42ba-ba93-c4976713e5ca" (UID: "1d200484-d79e-42ba-ba93-c4976713e5ca"). InnerVolumeSpecName "kube-api-access-fqwpc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:39:16 crc kubenswrapper[4558]: I0120 18:39:16.401362 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1d200484-d79e-42ba-ba93-c4976713e5ca-config-data" (OuterVolumeSpecName: "config-data") pod "1d200484-d79e-42ba-ba93-c4976713e5ca" (UID: "1d200484-d79e-42ba-ba93-c4976713e5ca"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:39:16 crc kubenswrapper[4558]: I0120 18:39:16.432191 4558 generic.go:334] "Generic (PLEG): container finished" podID="1d200484-d79e-42ba-ba93-c4976713e5ca" containerID="3dff4d322e7efcaa76620170d37df00addbb3d2be925f99983a291aa5b26b457" exitCode=0 Jan 20 18:39:16 crc kubenswrapper[4558]: I0120 18:39:16.432235 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-546dffd48c-6kpwd" event={"ID":"1d200484-d79e-42ba-ba93-c4976713e5ca","Type":"ContainerDied","Data":"3dff4d322e7efcaa76620170d37df00addbb3d2be925f99983a291aa5b26b457"} Jan 20 18:39:16 crc kubenswrapper[4558]: I0120 18:39:16.432278 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-546dffd48c-6kpwd" event={"ID":"1d200484-d79e-42ba-ba93-c4976713e5ca","Type":"ContainerDied","Data":"8e4c059b2c3af03c4130fc150938ac1310a34d5903d54ed48dbb94d2d5d928cf"} Jan 20 18:39:16 crc kubenswrapper[4558]: I0120 18:39:16.432299 4558 scope.go:117] "RemoveContainer" containerID="3dff4d322e7efcaa76620170d37df00addbb3d2be925f99983a291aa5b26b457" Jan 20 18:39:16 crc kubenswrapper[4558]: I0120 18:39:16.432306 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-546dffd48c-6kpwd" Jan 20 18:39:16 crc kubenswrapper[4558]: I0120 18:39:16.434106 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystoneea52-account-delete-z6xd9" event={"ID":"477c30c7-03ab-456b-a7ef-a8586f052134","Type":"ContainerDied","Data":"246e47feaa0c7f80a0fa524d18cc7ccaf50432c5dd1dfff65a4a6a40de2044f1"} Jan 20 18:39:16 crc kubenswrapper[4558]: I0120 18:39:16.434158 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystoneea52-account-delete-z6xd9" Jan 20 18:39:16 crc kubenswrapper[4558]: I0120 18:39:16.434185 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="246e47feaa0c7f80a0fa524d18cc7ccaf50432c5dd1dfff65a4a6a40de2044f1" Jan 20 18:39:16 crc kubenswrapper[4558]: I0120 18:39:16.454684 4558 scope.go:117] "RemoveContainer" containerID="3dff4d322e7efcaa76620170d37df00addbb3d2be925f99983a291aa5b26b457" Jan 20 18:39:16 crc kubenswrapper[4558]: E0120 18:39:16.455107 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3dff4d322e7efcaa76620170d37df00addbb3d2be925f99983a291aa5b26b457\": container with ID starting with 3dff4d322e7efcaa76620170d37df00addbb3d2be925f99983a291aa5b26b457 not found: ID does not exist" containerID="3dff4d322e7efcaa76620170d37df00addbb3d2be925f99983a291aa5b26b457" Jan 20 18:39:16 crc kubenswrapper[4558]: I0120 18:39:16.455159 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3dff4d322e7efcaa76620170d37df00addbb3d2be925f99983a291aa5b26b457"} err="failed to get container status \"3dff4d322e7efcaa76620170d37df00addbb3d2be925f99983a291aa5b26b457\": rpc error: code = NotFound desc = could not find container \"3dff4d322e7efcaa76620170d37df00addbb3d2be925f99983a291aa5b26b457\": container with ID starting with 3dff4d322e7efcaa76620170d37df00addbb3d2be925f99983a291aa5b26b457 not found: ID does not exist" Jan 20 18:39:16 crc kubenswrapper[4558]: I0120 18:39:16.474535 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["keystone-kuttl-tests/keystone-546dffd48c-6kpwd"] Jan 20 18:39:16 crc kubenswrapper[4558]: I0120 18:39:16.483038 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["keystone-kuttl-tests/keystone-546dffd48c-6kpwd"] Jan 20 18:39:16 crc kubenswrapper[4558]: I0120 18:39:16.484577 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1d200484-d79e-42ba-ba93-c4976713e5ca-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 18:39:16 crc kubenswrapper[4558]: I0120 18:39:16.484617 4558 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/1d200484-d79e-42ba-ba93-c4976713e5ca-credential-keys\") on node \"crc\" DevicePath \"\"" Jan 20 18:39:16 crc kubenswrapper[4558]: I0120 18:39:16.484635 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqwpc\" (UniqueName: \"kubernetes.io/projected/1d200484-d79e-42ba-ba93-c4976713e5ca-kube-api-access-fqwpc\") on node \"crc\" DevicePath \"\"" Jan 20 18:39:16 crc kubenswrapper[4558]: I0120 18:39:16.484651 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1d200484-d79e-42ba-ba93-c4976713e5ca-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 18:39:16 crc kubenswrapper[4558]: I0120 18:39:16.484663 4558 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/1d200484-d79e-42ba-ba93-c4976713e5ca-fernet-keys\") on node \"crc\" DevicePath \"\"" Jan 20 18:39:16 crc kubenswrapper[4558]: I0120 18:39:16.576400 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d200484-d79e-42ba-ba93-c4976713e5ca" path="/var/lib/kubelet/pods/1d200484-d79e-42ba-ba93-c4976713e5ca/volumes" Jan 20 18:39:17 crc kubenswrapper[4558]: I0120 18:39:17.947466 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["keystone-kuttl-tests/keystone-db-create-6twzj"] Jan 20 18:39:17 crc kubenswrapper[4558]: I0120 18:39:17.954529 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["keystone-kuttl-tests/keystone-db-create-6twzj"] Jan 20 18:39:17 crc kubenswrapper[4558]: I0120 18:39:17.959578 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["keystone-kuttl-tests/keystoneea52-account-delete-z6xd9"] Jan 20 18:39:17 crc kubenswrapper[4558]: I0120 18:39:17.965031 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["keystone-kuttl-tests/keystone-ea52-account-create-update-7q2rz"] Jan 20 18:39:17 crc kubenswrapper[4558]: I0120 18:39:17.969581 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["keystone-kuttl-tests/keystoneea52-account-delete-z6xd9"] Jan 20 18:39:17 crc kubenswrapper[4558]: I0120 18:39:17.972979 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["keystone-kuttl-tests/keystone-ea52-account-create-update-7q2rz"] Jan 20 18:39:18 crc kubenswrapper[4558]: I0120 18:39:18.574153 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="477c30c7-03ab-456b-a7ef-a8586f052134" path="/var/lib/kubelet/pods/477c30c7-03ab-456b-a7ef-a8586f052134/volumes" Jan 20 18:39:18 crc kubenswrapper[4558]: I0120 18:39:18.574844 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6142a71f-3e1f-4769-9197-0edff9d8461c" path="/var/lib/kubelet/pods/6142a71f-3e1f-4769-9197-0edff9d8461c/volumes" Jan 20 18:39:18 crc kubenswrapper[4558]: I0120 18:39:18.575327 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="625f8646-a3ab-4a12-9c99-9a85be87df11" path="/var/lib/kubelet/pods/625f8646-a3ab-4a12-9c99-9a85be87df11/volumes" Jan 20 18:39:22 crc kubenswrapper[4558]: I0120 18:39:22.638415 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["keystone-kuttl-tests/root-account-create-update-h8c94"] Jan 20 18:39:22 crc kubenswrapper[4558]: E0120 18:39:22.639361 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1d200484-d79e-42ba-ba93-c4976713e5ca" containerName="keystone-api" Jan 20 18:39:22 crc kubenswrapper[4558]: I0120 18:39:22.639378 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="1d200484-d79e-42ba-ba93-c4976713e5ca" containerName="keystone-api" Jan 20 18:39:22 crc kubenswrapper[4558]: E0120 18:39:22.639408 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="477c30c7-03ab-456b-a7ef-a8586f052134" containerName="mariadb-account-delete" Jan 20 18:39:22 crc kubenswrapper[4558]: I0120 18:39:22.639414 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="477c30c7-03ab-456b-a7ef-a8586f052134" containerName="mariadb-account-delete" Jan 20 18:39:22 crc kubenswrapper[4558]: I0120 18:39:22.639585 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="1d200484-d79e-42ba-ba93-c4976713e5ca" containerName="keystone-api" Jan 20 18:39:22 crc kubenswrapper[4558]: I0120 18:39:22.639597 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="477c30c7-03ab-456b-a7ef-a8586f052134" containerName="mariadb-account-delete" Jan 20 18:39:22 crc kubenswrapper[4558]: I0120 18:39:22.640270 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/root-account-create-update-h8c94" Jan 20 18:39:22 crc kubenswrapper[4558]: I0120 18:39:22.642567 4558 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"openstack-mariadb-root-db-secret" Jan 20 18:39:22 crc kubenswrapper[4558]: I0120 18:39:22.654752 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/root-account-create-update-h8c94"] Jan 20 18:39:22 crc kubenswrapper[4558]: I0120 18:39:22.678655 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["keystone-kuttl-tests/openstack-galera-1"] Jan 20 18:39:22 crc kubenswrapper[4558]: I0120 18:39:22.687975 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["keystone-kuttl-tests/openstack-galera-0"] Jan 20 18:39:22 crc kubenswrapper[4558]: I0120 18:39:22.694648 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["keystone-kuttl-tests/openstack-galera-2"] Jan 20 18:39:22 crc kubenswrapper[4558]: I0120 18:39:22.703153 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["keystone-kuttl-tests/root-account-create-update-h8c94"] Jan 20 18:39:22 crc kubenswrapper[4558]: E0120 18:39:22.703794 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[kube-api-access-rn27h operator-scripts], unattached volumes=[], failed to process volumes=[]: context canceled" pod="keystone-kuttl-tests/root-account-create-update-h8c94" podUID="68bb5058-bb38-48b8-94eb-3a7c87a62dd8" Jan 20 18:39:22 crc kubenswrapper[4558]: I0120 18:39:22.770644 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/68bb5058-bb38-48b8-94eb-3a7c87a62dd8-operator-scripts\") pod \"root-account-create-update-h8c94\" (UID: \"68bb5058-bb38-48b8-94eb-3a7c87a62dd8\") " pod="keystone-kuttl-tests/root-account-create-update-h8c94" Jan 20 18:39:22 crc kubenswrapper[4558]: I0120 18:39:22.770742 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rn27h\" (UniqueName: \"kubernetes.io/projected/68bb5058-bb38-48b8-94eb-3a7c87a62dd8-kube-api-access-rn27h\") pod \"root-account-create-update-h8c94\" (UID: \"68bb5058-bb38-48b8-94eb-3a7c87a62dd8\") " pod="keystone-kuttl-tests/root-account-create-update-h8c94" Jan 20 18:39:22 crc kubenswrapper[4558]: I0120 18:39:22.795793 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="keystone-kuttl-tests/openstack-galera-2" podUID="3b3e4088-de47-4e50-b0a9-ad92c99d033d" containerName="galera" containerID="cri-o://0231ad6abf0789ab8e253f5c60247d91419898315c1515e5abe79e42a48e7dfc" gracePeriod=30 Jan 20 18:39:22 crc kubenswrapper[4558]: I0120 18:39:22.872012 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/68bb5058-bb38-48b8-94eb-3a7c87a62dd8-operator-scripts\") pod \"root-account-create-update-h8c94\" (UID: \"68bb5058-bb38-48b8-94eb-3a7c87a62dd8\") " pod="keystone-kuttl-tests/root-account-create-update-h8c94" Jan 20 18:39:22 crc kubenswrapper[4558]: I0120 18:39:22.872072 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rn27h\" (UniqueName: \"kubernetes.io/projected/68bb5058-bb38-48b8-94eb-3a7c87a62dd8-kube-api-access-rn27h\") pod \"root-account-create-update-h8c94\" (UID: \"68bb5058-bb38-48b8-94eb-3a7c87a62dd8\") " pod="keystone-kuttl-tests/root-account-create-update-h8c94" Jan 20 18:39:22 crc kubenswrapper[4558]: E0120 18:39:22.872209 4558 configmap.go:193] Couldn't get configMap keystone-kuttl-tests/openstack-scripts: configmap "openstack-scripts" not found Jan 20 18:39:22 crc kubenswrapper[4558]: E0120 18:39:22.872307 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/68bb5058-bb38-48b8-94eb-3a7c87a62dd8-operator-scripts podName:68bb5058-bb38-48b8-94eb-3a7c87a62dd8 nodeName:}" failed. No retries permitted until 2026-01-20 18:39:23.37228473 +0000 UTC m=+7057.132622697 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/68bb5058-bb38-48b8-94eb-3a7c87a62dd8-operator-scripts") pod "root-account-create-update-h8c94" (UID: "68bb5058-bb38-48b8-94eb-3a7c87a62dd8") : configmap "openstack-scripts" not found Jan 20 18:39:22 crc kubenswrapper[4558]: E0120 18:39:22.875177 4558 projected.go:194] Error preparing data for projected volume kube-api-access-rn27h for pod keystone-kuttl-tests/root-account-create-update-h8c94: failed to fetch token: serviceaccounts "galera-openstack" not found Jan 20 18:39:22 crc kubenswrapper[4558]: E0120 18:39:22.875253 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/68bb5058-bb38-48b8-94eb-3a7c87a62dd8-kube-api-access-rn27h podName:68bb5058-bb38-48b8-94eb-3a7c87a62dd8 nodeName:}" failed. No retries permitted until 2026-01-20 18:39:23.375235057 +0000 UTC m=+7057.135573025 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-rn27h" (UniqueName: "kubernetes.io/projected/68bb5058-bb38-48b8-94eb-3a7c87a62dd8-kube-api-access-rn27h") pod "root-account-create-update-h8c94" (UID: "68bb5058-bb38-48b8-94eb-3a7c87a62dd8") : failed to fetch token: serviceaccounts "galera-openstack" not found Jan 20 18:39:23 crc kubenswrapper[4558]: I0120 18:39:23.252031 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["keystone-kuttl-tests/memcached-0"] Jan 20 18:39:23 crc kubenswrapper[4558]: I0120 18:39:23.252314 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="keystone-kuttl-tests/memcached-0" podUID="c70e04a6-d794-4290-ac24-1fecf80b3d41" containerName="memcached" containerID="cri-o://f6c7ab6691baa097b9066d0f32d65f848d6362b59f85d8766a3bec60d4457114" gracePeriod=30 Jan 20 18:39:23 crc kubenswrapper[4558]: I0120 18:39:23.381701 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/68bb5058-bb38-48b8-94eb-3a7c87a62dd8-operator-scripts\") pod \"root-account-create-update-h8c94\" (UID: \"68bb5058-bb38-48b8-94eb-3a7c87a62dd8\") " pod="keystone-kuttl-tests/root-account-create-update-h8c94" Jan 20 18:39:23 crc kubenswrapper[4558]: I0120 18:39:23.382002 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rn27h\" (UniqueName: \"kubernetes.io/projected/68bb5058-bb38-48b8-94eb-3a7c87a62dd8-kube-api-access-rn27h\") pod \"root-account-create-update-h8c94\" (UID: \"68bb5058-bb38-48b8-94eb-3a7c87a62dd8\") " pod="keystone-kuttl-tests/root-account-create-update-h8c94" Jan 20 18:39:23 crc kubenswrapper[4558]: E0120 18:39:23.381909 4558 configmap.go:193] Couldn't get configMap keystone-kuttl-tests/openstack-scripts: configmap "openstack-scripts" not found Jan 20 18:39:23 crc kubenswrapper[4558]: E0120 18:39:23.382153 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/68bb5058-bb38-48b8-94eb-3a7c87a62dd8-operator-scripts podName:68bb5058-bb38-48b8-94eb-3a7c87a62dd8 nodeName:}" failed. No retries permitted until 2026-01-20 18:39:24.382131263 +0000 UTC m=+7058.142469231 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/68bb5058-bb38-48b8-94eb-3a7c87a62dd8-operator-scripts") pod "root-account-create-update-h8c94" (UID: "68bb5058-bb38-48b8-94eb-3a7c87a62dd8") : configmap "openstack-scripts" not found Jan 20 18:39:23 crc kubenswrapper[4558]: E0120 18:39:23.386284 4558 projected.go:194] Error preparing data for projected volume kube-api-access-rn27h for pod keystone-kuttl-tests/root-account-create-update-h8c94: failed to fetch token: serviceaccounts "galera-openstack" not found Jan 20 18:39:23 crc kubenswrapper[4558]: E0120 18:39:23.386400 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/68bb5058-bb38-48b8-94eb-3a7c87a62dd8-kube-api-access-rn27h podName:68bb5058-bb38-48b8-94eb-3a7c87a62dd8 nodeName:}" failed. No retries permitted until 2026-01-20 18:39:24.386375374 +0000 UTC m=+7058.146713341 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-rn27h" (UniqueName: "kubernetes.io/projected/68bb5058-bb38-48b8-94eb-3a7c87a62dd8-kube-api-access-rn27h") pod "root-account-create-update-h8c94" (UID: "68bb5058-bb38-48b8-94eb-3a7c87a62dd8") : failed to fetch token: serviceaccounts "galera-openstack" not found Jan 20 18:39:23 crc kubenswrapper[4558]: I0120 18:39:23.455232 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/openstack-galera-2" Jan 20 18:39:23 crc kubenswrapper[4558]: I0120 18:39:23.488334 4558 generic.go:334] "Generic (PLEG): container finished" podID="3b3e4088-de47-4e50-b0a9-ad92c99d033d" containerID="0231ad6abf0789ab8e253f5c60247d91419898315c1515e5abe79e42a48e7dfc" exitCode=0 Jan 20 18:39:23 crc kubenswrapper[4558]: I0120 18:39:23.488396 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/openstack-galera-2" Jan 20 18:39:23 crc kubenswrapper[4558]: I0120 18:39:23.488406 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/root-account-create-update-h8c94" Jan 20 18:39:23 crc kubenswrapper[4558]: I0120 18:39:23.488430 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/openstack-galera-2" event={"ID":"3b3e4088-de47-4e50-b0a9-ad92c99d033d","Type":"ContainerDied","Data":"0231ad6abf0789ab8e253f5c60247d91419898315c1515e5abe79e42a48e7dfc"} Jan 20 18:39:23 crc kubenswrapper[4558]: I0120 18:39:23.488483 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/openstack-galera-2" event={"ID":"3b3e4088-de47-4e50-b0a9-ad92c99d033d","Type":"ContainerDied","Data":"f07ec8c764fc8deac7b0bdb653f8a713f381c5149034a4b0641f08249a852777"} Jan 20 18:39:23 crc kubenswrapper[4558]: I0120 18:39:23.488506 4558 scope.go:117] "RemoveContainer" containerID="0231ad6abf0789ab8e253f5c60247d91419898315c1515e5abe79e42a48e7dfc" Jan 20 18:39:23 crc kubenswrapper[4558]: I0120 18:39:23.498199 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/root-account-create-update-h8c94" Jan 20 18:39:23 crc kubenswrapper[4558]: I0120 18:39:23.505613 4558 scope.go:117] "RemoveContainer" containerID="1980894cfff612e1c62e2fca2a7a9c198c73c0713bf7e0fc70065c51ec322466" Jan 20 18:39:23 crc kubenswrapper[4558]: I0120 18:39:23.524759 4558 scope.go:117] "RemoveContainer" containerID="0231ad6abf0789ab8e253f5c60247d91419898315c1515e5abe79e42a48e7dfc" Jan 20 18:39:23 crc kubenswrapper[4558]: E0120 18:39:23.527594 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0231ad6abf0789ab8e253f5c60247d91419898315c1515e5abe79e42a48e7dfc\": container with ID starting with 0231ad6abf0789ab8e253f5c60247d91419898315c1515e5abe79e42a48e7dfc not found: ID does not exist" containerID="0231ad6abf0789ab8e253f5c60247d91419898315c1515e5abe79e42a48e7dfc" Jan 20 18:39:23 crc kubenswrapper[4558]: I0120 18:39:23.527630 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0231ad6abf0789ab8e253f5c60247d91419898315c1515e5abe79e42a48e7dfc"} err="failed to get container status \"0231ad6abf0789ab8e253f5c60247d91419898315c1515e5abe79e42a48e7dfc\": rpc error: code = NotFound desc = could not find container \"0231ad6abf0789ab8e253f5c60247d91419898315c1515e5abe79e42a48e7dfc\": container with ID starting with 0231ad6abf0789ab8e253f5c60247d91419898315c1515e5abe79e42a48e7dfc not found: ID does not exist" Jan 20 18:39:23 crc kubenswrapper[4558]: I0120 18:39:23.527656 4558 scope.go:117] "RemoveContainer" containerID="1980894cfff612e1c62e2fca2a7a9c198c73c0713bf7e0fc70065c51ec322466" Jan 20 18:39:23 crc kubenswrapper[4558]: E0120 18:39:23.528020 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1980894cfff612e1c62e2fca2a7a9c198c73c0713bf7e0fc70065c51ec322466\": container with ID starting with 1980894cfff612e1c62e2fca2a7a9c198c73c0713bf7e0fc70065c51ec322466 not found: ID does not exist" containerID="1980894cfff612e1c62e2fca2a7a9c198c73c0713bf7e0fc70065c51ec322466" Jan 20 18:39:23 crc kubenswrapper[4558]: I0120 18:39:23.528076 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1980894cfff612e1c62e2fca2a7a9c198c73c0713bf7e0fc70065c51ec322466"} err="failed to get container status \"1980894cfff612e1c62e2fca2a7a9c198c73c0713bf7e0fc70065c51ec322466\": rpc error: code = NotFound desc = could not find container \"1980894cfff612e1c62e2fca2a7a9c198c73c0713bf7e0fc70065c51ec322466\": container with ID starting with 1980894cfff612e1c62e2fca2a7a9c198c73c0713bf7e0fc70065c51ec322466 not found: ID does not exist" Jan 20 18:39:23 crc kubenswrapper[4558]: I0120 18:39:23.566052 4558 scope.go:117] "RemoveContainer" containerID="77ce04c6daee7d4f9b776bd1c80b448f9c4750d5cb5c7dd182ff28963a4bcf99" Jan 20 18:39:23 crc kubenswrapper[4558]: E0120 18:39:23.566527 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 18:39:23 crc kubenswrapper[4558]: I0120 18:39:23.586855 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/3b3e4088-de47-4e50-b0a9-ad92c99d033d-config-data-default\") pod \"3b3e4088-de47-4e50-b0a9-ad92c99d033d\" (UID: \"3b3e4088-de47-4e50-b0a9-ad92c99d033d\") " Jan 20 18:39:23 crc kubenswrapper[4558]: I0120 18:39:23.586911 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nfflj\" (UniqueName: \"kubernetes.io/projected/3b3e4088-de47-4e50-b0a9-ad92c99d033d-kube-api-access-nfflj\") pod \"3b3e4088-de47-4e50-b0a9-ad92c99d033d\" (UID: \"3b3e4088-de47-4e50-b0a9-ad92c99d033d\") " Jan 20 18:39:23 crc kubenswrapper[4558]: I0120 18:39:23.586955 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mysql-db\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"3b3e4088-de47-4e50-b0a9-ad92c99d033d\" (UID: \"3b3e4088-de47-4e50-b0a9-ad92c99d033d\") " Jan 20 18:39:23 crc kubenswrapper[4558]: I0120 18:39:23.587011 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3b3e4088-de47-4e50-b0a9-ad92c99d033d-operator-scripts\") pod \"3b3e4088-de47-4e50-b0a9-ad92c99d033d\" (UID: \"3b3e4088-de47-4e50-b0a9-ad92c99d033d\") " Jan 20 18:39:23 crc kubenswrapper[4558]: I0120 18:39:23.587096 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/3b3e4088-de47-4e50-b0a9-ad92c99d033d-config-data-generated\") pod \"3b3e4088-de47-4e50-b0a9-ad92c99d033d\" (UID: \"3b3e4088-de47-4e50-b0a9-ad92c99d033d\") " Jan 20 18:39:23 crc kubenswrapper[4558]: I0120 18:39:23.587135 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/3b3e4088-de47-4e50-b0a9-ad92c99d033d-kolla-config\") pod \"3b3e4088-de47-4e50-b0a9-ad92c99d033d\" (UID: \"3b3e4088-de47-4e50-b0a9-ad92c99d033d\") " Jan 20 18:39:23 crc kubenswrapper[4558]: I0120 18:39:23.588056 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3b3e4088-de47-4e50-b0a9-ad92c99d033d-config-data-generated" (OuterVolumeSpecName: "config-data-generated") pod "3b3e4088-de47-4e50-b0a9-ad92c99d033d" (UID: "3b3e4088-de47-4e50-b0a9-ad92c99d033d"). InnerVolumeSpecName "config-data-generated". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 18:39:23 crc kubenswrapper[4558]: I0120 18:39:23.588301 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3b3e4088-de47-4e50-b0a9-ad92c99d033d-config-data-default" (OuterVolumeSpecName: "config-data-default") pod "3b3e4088-de47-4e50-b0a9-ad92c99d033d" (UID: "3b3e4088-de47-4e50-b0a9-ad92c99d033d"). InnerVolumeSpecName "config-data-default". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:39:23 crc kubenswrapper[4558]: I0120 18:39:23.588436 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3b3e4088-de47-4e50-b0a9-ad92c99d033d-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "3b3e4088-de47-4e50-b0a9-ad92c99d033d" (UID: "3b3e4088-de47-4e50-b0a9-ad92c99d033d"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:39:23 crc kubenswrapper[4558]: I0120 18:39:23.589332 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3b3e4088-de47-4e50-b0a9-ad92c99d033d-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "3b3e4088-de47-4e50-b0a9-ad92c99d033d" (UID: "3b3e4088-de47-4e50-b0a9-ad92c99d033d"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:39:23 crc kubenswrapper[4558]: I0120 18:39:23.594112 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3b3e4088-de47-4e50-b0a9-ad92c99d033d-kube-api-access-nfflj" (OuterVolumeSpecName: "kube-api-access-nfflj") pod "3b3e4088-de47-4e50-b0a9-ad92c99d033d" (UID: "3b3e4088-de47-4e50-b0a9-ad92c99d033d"). InnerVolumeSpecName "kube-api-access-nfflj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:39:23 crc kubenswrapper[4558]: I0120 18:39:23.600314 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage01-crc" (OuterVolumeSpecName: "mysql-db") pod "3b3e4088-de47-4e50-b0a9-ad92c99d033d" (UID: "3b3e4088-de47-4e50-b0a9-ad92c99d033d"). InnerVolumeSpecName "local-storage01-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 18:39:23 crc kubenswrapper[4558]: I0120 18:39:23.689672 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/3b3e4088-de47-4e50-b0a9-ad92c99d033d-config-data-default\") on node \"crc\" DevicePath \"\"" Jan 20 18:39:23 crc kubenswrapper[4558]: I0120 18:39:23.689704 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nfflj\" (UniqueName: \"kubernetes.io/projected/3b3e4088-de47-4e50-b0a9-ad92c99d033d-kube-api-access-nfflj\") on node \"crc\" DevicePath \"\"" Jan 20 18:39:23 crc kubenswrapper[4558]: I0120 18:39:23.689733 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" " Jan 20 18:39:23 crc kubenswrapper[4558]: I0120 18:39:23.689747 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3b3e4088-de47-4e50-b0a9-ad92c99d033d-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 18:39:23 crc kubenswrapper[4558]: I0120 18:39:23.689758 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/3b3e4088-de47-4e50-b0a9-ad92c99d033d-config-data-generated\") on node \"crc\" DevicePath \"\"" Jan 20 18:39:23 crc kubenswrapper[4558]: I0120 18:39:23.689770 4558 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/3b3e4088-de47-4e50-b0a9-ad92c99d033d-kolla-config\") on node \"crc\" DevicePath \"\"" Jan 20 18:39:23 crc kubenswrapper[4558]: I0120 18:39:23.700467 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage01-crc" (UniqueName: "kubernetes.io/local-volume/local-storage01-crc") on node "crc" Jan 20 18:39:23 crc kubenswrapper[4558]: I0120 18:39:23.733805 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/rabbitmq-server-0"] Jan 20 18:39:23 crc kubenswrapper[4558]: I0120 18:39:23.791279 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" DevicePath \"\"" Jan 20 18:39:23 crc kubenswrapper[4558]: I0120 18:39:23.812687 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["keystone-kuttl-tests/openstack-galera-2"] Jan 20 18:39:23 crc kubenswrapper[4558]: I0120 18:39:23.815786 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["keystone-kuttl-tests/openstack-galera-2"] Jan 20 18:39:24 crc kubenswrapper[4558]: I0120 18:39:24.089726 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["keystone-kuttl-tests/rabbitmq-server-0"] Jan 20 18:39:24 crc kubenswrapper[4558]: I0120 18:39:24.401601 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/68bb5058-bb38-48b8-94eb-3a7c87a62dd8-operator-scripts\") pod \"root-account-create-update-h8c94\" (UID: \"68bb5058-bb38-48b8-94eb-3a7c87a62dd8\") " pod="keystone-kuttl-tests/root-account-create-update-h8c94" Jan 20 18:39:24 crc kubenswrapper[4558]: I0120 18:39:24.401705 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rn27h\" (UniqueName: \"kubernetes.io/projected/68bb5058-bb38-48b8-94eb-3a7c87a62dd8-kube-api-access-rn27h\") pod \"root-account-create-update-h8c94\" (UID: \"68bb5058-bb38-48b8-94eb-3a7c87a62dd8\") " pod="keystone-kuttl-tests/root-account-create-update-h8c94" Jan 20 18:39:24 crc kubenswrapper[4558]: E0120 18:39:24.401888 4558 configmap.go:193] Couldn't get configMap keystone-kuttl-tests/openstack-scripts: configmap "openstack-scripts" not found Jan 20 18:39:24 crc kubenswrapper[4558]: E0120 18:39:24.402074 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/68bb5058-bb38-48b8-94eb-3a7c87a62dd8-operator-scripts podName:68bb5058-bb38-48b8-94eb-3a7c87a62dd8 nodeName:}" failed. No retries permitted until 2026-01-20 18:39:26.402014317 +0000 UTC m=+7060.162352285 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/68bb5058-bb38-48b8-94eb-3a7c87a62dd8-operator-scripts") pod "root-account-create-update-h8c94" (UID: "68bb5058-bb38-48b8-94eb-3a7c87a62dd8") : configmap "openstack-scripts" not found Jan 20 18:39:24 crc kubenswrapper[4558]: E0120 18:39:24.405326 4558 projected.go:194] Error preparing data for projected volume kube-api-access-rn27h for pod keystone-kuttl-tests/root-account-create-update-h8c94: failed to fetch token: serviceaccounts "galera-openstack" not found Jan 20 18:39:24 crc kubenswrapper[4558]: E0120 18:39:24.405426 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/68bb5058-bb38-48b8-94eb-3a7c87a62dd8-kube-api-access-rn27h podName:68bb5058-bb38-48b8-94eb-3a7c87a62dd8 nodeName:}" failed. No retries permitted until 2026-01-20 18:39:26.40540332 +0000 UTC m=+7060.165741287 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-rn27h" (UniqueName: "kubernetes.io/projected/68bb5058-bb38-48b8-94eb-3a7c87a62dd8-kube-api-access-rn27h") pod "root-account-create-update-h8c94" (UID: "68bb5058-bb38-48b8-94eb-3a7c87a62dd8") : failed to fetch token: serviceaccounts "galera-openstack" not found Jan 20 18:39:24 crc kubenswrapper[4558]: I0120 18:39:24.499750 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/root-account-create-update-h8c94" Jan 20 18:39:24 crc kubenswrapper[4558]: I0120 18:39:24.530032 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="keystone-kuttl-tests/rabbitmq-server-0" podUID="2275b006-a890-400e-83b6-1c46bf67f62e" containerName="rabbitmq" containerID="cri-o://45e5c44ef4c79d694b34e00a259e1223a9488d8427afb7c95c4ae868a72e5bd9" gracePeriod=604800 Jan 20 18:39:24 crc kubenswrapper[4558]: I0120 18:39:24.532266 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["keystone-kuttl-tests/root-account-create-update-h8c94"] Jan 20 18:39:24 crc kubenswrapper[4558]: I0120 18:39:24.536206 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["keystone-kuttl-tests/root-account-create-update-h8c94"] Jan 20 18:39:24 crc kubenswrapper[4558]: I0120 18:39:24.573229 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3b3e4088-de47-4e50-b0a9-ad92c99d033d" path="/var/lib/kubelet/pods/3b3e4088-de47-4e50-b0a9-ad92c99d033d/volumes" Jan 20 18:39:24 crc kubenswrapper[4558]: I0120 18:39:24.573815 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="68bb5058-bb38-48b8-94eb-3a7c87a62dd8" path="/var/lib/kubelet/pods/68bb5058-bb38-48b8-94eb-3a7c87a62dd8/volumes" Jan 20 18:39:24 crc kubenswrapper[4558]: I0120 18:39:24.707804 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rn27h\" (UniqueName: \"kubernetes.io/projected/68bb5058-bb38-48b8-94eb-3a7c87a62dd8-kube-api-access-rn27h\") on node \"crc\" DevicePath \"\"" Jan 20 18:39:24 crc kubenswrapper[4558]: I0120 18:39:24.707861 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/68bb5058-bb38-48b8-94eb-3a7c87a62dd8-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 18:39:24 crc kubenswrapper[4558]: I0120 18:39:24.899300 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-6c45489b98-b6947"] Jan 20 18:39:24 crc kubenswrapper[4558]: I0120 18:39:24.899527 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/keystone-operator-controller-manager-6c45489b98-b6947" podUID="7e1d554f-8358-4ab1-ac14-764882a73ed2" containerName="manager" containerID="cri-o://71c9945d04185339d99c62f8d4838a72ee37c8a2480fef4aa780330f9be75810" gracePeriod=10 Jan 20 18:39:25 crc kubenswrapper[4558]: I0120 18:39:25.039414 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="keystone-kuttl-tests/openstack-galera-1" podUID="160b737f-7377-4d3a-8fff-3b96b9a614bc" containerName="galera" containerID="cri-o://c389d8005807225cc21db8265ae5fc2539848a0e1a797f0b1a89dda2965c682f" gracePeriod=28 Jan 20 18:39:25 crc kubenswrapper[4558]: I0120 18:39:25.042600 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/memcached-0" Jan 20 18:39:25 crc kubenswrapper[4558]: I0120 18:39:25.114750 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c70e04a6-d794-4290-ac24-1fecf80b3d41-config-data\") pod \"c70e04a6-d794-4290-ac24-1fecf80b3d41\" (UID: \"c70e04a6-d794-4290-ac24-1fecf80b3d41\") " Jan 20 18:39:25 crc kubenswrapper[4558]: I0120 18:39:25.114858 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/c70e04a6-d794-4290-ac24-1fecf80b3d41-kolla-config\") pod \"c70e04a6-d794-4290-ac24-1fecf80b3d41\" (UID: \"c70e04a6-d794-4290-ac24-1fecf80b3d41\") " Jan 20 18:39:25 crc kubenswrapper[4558]: I0120 18:39:25.114918 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zvdrp\" (UniqueName: \"kubernetes.io/projected/c70e04a6-d794-4290-ac24-1fecf80b3d41-kube-api-access-zvdrp\") pod \"c70e04a6-d794-4290-ac24-1fecf80b3d41\" (UID: \"c70e04a6-d794-4290-ac24-1fecf80b3d41\") " Jan 20 18:39:25 crc kubenswrapper[4558]: I0120 18:39:25.115541 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c70e04a6-d794-4290-ac24-1fecf80b3d41-config-data" (OuterVolumeSpecName: "config-data") pod "c70e04a6-d794-4290-ac24-1fecf80b3d41" (UID: "c70e04a6-d794-4290-ac24-1fecf80b3d41"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:39:25 crc kubenswrapper[4558]: I0120 18:39:25.115570 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c70e04a6-d794-4290-ac24-1fecf80b3d41-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "c70e04a6-d794-4290-ac24-1fecf80b3d41" (UID: "c70e04a6-d794-4290-ac24-1fecf80b3d41"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:39:25 crc kubenswrapper[4558]: I0120 18:39:25.133502 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c70e04a6-d794-4290-ac24-1fecf80b3d41-kube-api-access-zvdrp" (OuterVolumeSpecName: "kube-api-access-zvdrp") pod "c70e04a6-d794-4290-ac24-1fecf80b3d41" (UID: "c70e04a6-d794-4290-ac24-1fecf80b3d41"). InnerVolumeSpecName "kube-api-access-zvdrp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:39:25 crc kubenswrapper[4558]: I0120 18:39:25.139072 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/keystone-operator-index-zwc6g"] Jan 20 18:39:25 crc kubenswrapper[4558]: I0120 18:39:25.143914 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/keystone-operator-index-zwc6g" podUID="3fa5954a-a8ea-4d9f-85b9-ccf8e36865a2" containerName="registry-server" containerID="cri-o://679ca985e780fd8183cd32b5d3aab8580a06199168b3843be49f4a1eccf55842" gracePeriod=30 Jan 20 18:39:25 crc kubenswrapper[4558]: I0120 18:39:25.190786 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/34e4e076b11e40e2796f19ad3bfdac5929942b93224fbc520400e0a069q2k9b"] Jan 20 18:39:25 crc kubenswrapper[4558]: I0120 18:39:25.225798 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c70e04a6-d794-4290-ac24-1fecf80b3d41-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 18:39:25 crc kubenswrapper[4558]: I0120 18:39:25.225836 4558 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/c70e04a6-d794-4290-ac24-1fecf80b3d41-kolla-config\") on node \"crc\" DevicePath \"\"" Jan 20 18:39:25 crc kubenswrapper[4558]: I0120 18:39:25.225852 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zvdrp\" (UniqueName: \"kubernetes.io/projected/c70e04a6-d794-4290-ac24-1fecf80b3d41-kube-api-access-zvdrp\") on node \"crc\" DevicePath \"\"" Jan 20 18:39:25 crc kubenswrapper[4558]: I0120 18:39:25.228749 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/34e4e076b11e40e2796f19ad3bfdac5929942b93224fbc520400e0a069q2k9b"] Jan 20 18:39:25 crc kubenswrapper[4558]: I0120 18:39:25.465260 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-6c45489b98-b6947" Jan 20 18:39:25 crc kubenswrapper[4558]: I0120 18:39:25.510330 4558 generic.go:334] "Generic (PLEG): container finished" podID="c70e04a6-d794-4290-ac24-1fecf80b3d41" containerID="f6c7ab6691baa097b9066d0f32d65f848d6362b59f85d8766a3bec60d4457114" exitCode=0 Jan 20 18:39:25 crc kubenswrapper[4558]: I0120 18:39:25.510391 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/memcached-0" event={"ID":"c70e04a6-d794-4290-ac24-1fecf80b3d41","Type":"ContainerDied","Data":"f6c7ab6691baa097b9066d0f32d65f848d6362b59f85d8766a3bec60d4457114"} Jan 20 18:39:25 crc kubenswrapper[4558]: I0120 18:39:25.510421 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/memcached-0" event={"ID":"c70e04a6-d794-4290-ac24-1fecf80b3d41","Type":"ContainerDied","Data":"43a35e62b1a6a8f712dd08fe080b7c31f6b8b61f8b607dd09fd6eef006192181"} Jan 20 18:39:25 crc kubenswrapper[4558]: I0120 18:39:25.510439 4558 scope.go:117] "RemoveContainer" containerID="f6c7ab6691baa097b9066d0f32d65f848d6362b59f85d8766a3bec60d4457114" Jan 20 18:39:25 crc kubenswrapper[4558]: I0120 18:39:25.510564 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/memcached-0" Jan 20 18:39:25 crc kubenswrapper[4558]: I0120 18:39:25.515417 4558 generic.go:334] "Generic (PLEG): container finished" podID="3fa5954a-a8ea-4d9f-85b9-ccf8e36865a2" containerID="679ca985e780fd8183cd32b5d3aab8580a06199168b3843be49f4a1eccf55842" exitCode=0 Jan 20 18:39:25 crc kubenswrapper[4558]: I0120 18:39:25.515530 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-index-zwc6g" event={"ID":"3fa5954a-a8ea-4d9f-85b9-ccf8e36865a2","Type":"ContainerDied","Data":"679ca985e780fd8183cd32b5d3aab8580a06199168b3843be49f4a1eccf55842"} Jan 20 18:39:25 crc kubenswrapper[4558]: I0120 18:39:25.529013 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cg24c\" (UniqueName: \"kubernetes.io/projected/7e1d554f-8358-4ab1-ac14-764882a73ed2-kube-api-access-cg24c\") pod \"7e1d554f-8358-4ab1-ac14-764882a73ed2\" (UID: \"7e1d554f-8358-4ab1-ac14-764882a73ed2\") " Jan 20 18:39:25 crc kubenswrapper[4558]: I0120 18:39:25.529090 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/7e1d554f-8358-4ab1-ac14-764882a73ed2-apiservice-cert\") pod \"7e1d554f-8358-4ab1-ac14-764882a73ed2\" (UID: \"7e1d554f-8358-4ab1-ac14-764882a73ed2\") " Jan 20 18:39:25 crc kubenswrapper[4558]: I0120 18:39:25.529118 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/7e1d554f-8358-4ab1-ac14-764882a73ed2-webhook-cert\") pod \"7e1d554f-8358-4ab1-ac14-764882a73ed2\" (UID: \"7e1d554f-8358-4ab1-ac14-764882a73ed2\") " Jan 20 18:39:25 crc kubenswrapper[4558]: I0120 18:39:25.529806 4558 generic.go:334] "Generic (PLEG): container finished" podID="7e1d554f-8358-4ab1-ac14-764882a73ed2" containerID="71c9945d04185339d99c62f8d4838a72ee37c8a2480fef4aa780330f9be75810" exitCode=0 Jan 20 18:39:25 crc kubenswrapper[4558]: I0120 18:39:25.529833 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-6c45489b98-b6947" event={"ID":"7e1d554f-8358-4ab1-ac14-764882a73ed2","Type":"ContainerDied","Data":"71c9945d04185339d99c62f8d4838a72ee37c8a2480fef4aa780330f9be75810"} Jan 20 18:39:25 crc kubenswrapper[4558]: I0120 18:39:25.529852 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-6c45489b98-b6947" event={"ID":"7e1d554f-8358-4ab1-ac14-764882a73ed2","Type":"ContainerDied","Data":"9c4bdbc8ad238470d90442956d64d72149f20345568f45f2cd24317dbaa60891"} Jan 20 18:39:25 crc kubenswrapper[4558]: I0120 18:39:25.529893 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-6c45489b98-b6947" Jan 20 18:39:25 crc kubenswrapper[4558]: I0120 18:39:25.542431 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7e1d554f-8358-4ab1-ac14-764882a73ed2-kube-api-access-cg24c" (OuterVolumeSpecName: "kube-api-access-cg24c") pod "7e1d554f-8358-4ab1-ac14-764882a73ed2" (UID: "7e1d554f-8358-4ab1-ac14-764882a73ed2"). InnerVolumeSpecName "kube-api-access-cg24c". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:39:25 crc kubenswrapper[4558]: I0120 18:39:25.544327 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7e1d554f-8358-4ab1-ac14-764882a73ed2-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "7e1d554f-8358-4ab1-ac14-764882a73ed2" (UID: "7e1d554f-8358-4ab1-ac14-764882a73ed2"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:39:25 crc kubenswrapper[4558]: I0120 18:39:25.549046 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7e1d554f-8358-4ab1-ac14-764882a73ed2-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "7e1d554f-8358-4ab1-ac14-764882a73ed2" (UID: "7e1d554f-8358-4ab1-ac14-764882a73ed2"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:39:25 crc kubenswrapper[4558]: I0120 18:39:25.573854 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-index-zwc6g" Jan 20 18:39:25 crc kubenswrapper[4558]: I0120 18:39:25.585768 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["keystone-kuttl-tests/memcached-0"] Jan 20 18:39:25 crc kubenswrapper[4558]: I0120 18:39:25.591231 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["keystone-kuttl-tests/memcached-0"] Jan 20 18:39:25 crc kubenswrapper[4558]: I0120 18:39:25.594376 4558 scope.go:117] "RemoveContainer" containerID="f6c7ab6691baa097b9066d0f32d65f848d6362b59f85d8766a3bec60d4457114" Jan 20 18:39:25 crc kubenswrapper[4558]: E0120 18:39:25.597620 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f6c7ab6691baa097b9066d0f32d65f848d6362b59f85d8766a3bec60d4457114\": container with ID starting with f6c7ab6691baa097b9066d0f32d65f848d6362b59f85d8766a3bec60d4457114 not found: ID does not exist" containerID="f6c7ab6691baa097b9066d0f32d65f848d6362b59f85d8766a3bec60d4457114" Jan 20 18:39:25 crc kubenswrapper[4558]: I0120 18:39:25.597711 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f6c7ab6691baa097b9066d0f32d65f848d6362b59f85d8766a3bec60d4457114"} err="failed to get container status \"f6c7ab6691baa097b9066d0f32d65f848d6362b59f85d8766a3bec60d4457114\": rpc error: code = NotFound desc = could not find container \"f6c7ab6691baa097b9066d0f32d65f848d6362b59f85d8766a3bec60d4457114\": container with ID starting with f6c7ab6691baa097b9066d0f32d65f848d6362b59f85d8766a3bec60d4457114 not found: ID does not exist" Jan 20 18:39:25 crc kubenswrapper[4558]: I0120 18:39:25.597825 4558 scope.go:117] "RemoveContainer" containerID="71c9945d04185339d99c62f8d4838a72ee37c8a2480fef4aa780330f9be75810" Jan 20 18:39:25 crc kubenswrapper[4558]: I0120 18:39:25.622371 4558 scope.go:117] "RemoveContainer" containerID="71c9945d04185339d99c62f8d4838a72ee37c8a2480fef4aa780330f9be75810" Jan 20 18:39:25 crc kubenswrapper[4558]: E0120 18:39:25.623387 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"71c9945d04185339d99c62f8d4838a72ee37c8a2480fef4aa780330f9be75810\": container with ID starting with 71c9945d04185339d99c62f8d4838a72ee37c8a2480fef4aa780330f9be75810 not found: ID does not exist" containerID="71c9945d04185339d99c62f8d4838a72ee37c8a2480fef4aa780330f9be75810" Jan 20 18:39:25 crc kubenswrapper[4558]: I0120 18:39:25.623434 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"71c9945d04185339d99c62f8d4838a72ee37c8a2480fef4aa780330f9be75810"} err="failed to get container status \"71c9945d04185339d99c62f8d4838a72ee37c8a2480fef4aa780330f9be75810\": rpc error: code = NotFound desc = could not find container \"71c9945d04185339d99c62f8d4838a72ee37c8a2480fef4aa780330f9be75810\": container with ID starting with 71c9945d04185339d99c62f8d4838a72ee37c8a2480fef4aa780330f9be75810 not found: ID does not exist" Jan 20 18:39:25 crc kubenswrapper[4558]: I0120 18:39:25.631884 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cg24c\" (UniqueName: \"kubernetes.io/projected/7e1d554f-8358-4ab1-ac14-764882a73ed2-kube-api-access-cg24c\") on node \"crc\" DevicePath \"\"" Jan 20 18:39:25 crc kubenswrapper[4558]: I0120 18:39:25.631917 4558 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/7e1d554f-8358-4ab1-ac14-764882a73ed2-apiservice-cert\") on node \"crc\" DevicePath \"\"" Jan 20 18:39:25 crc kubenswrapper[4558]: I0120 18:39:25.631927 4558 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/7e1d554f-8358-4ab1-ac14-764882a73ed2-webhook-cert\") on node \"crc\" DevicePath \"\"" Jan 20 18:39:25 crc kubenswrapper[4558]: I0120 18:39:25.732727 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qr7ll\" (UniqueName: \"kubernetes.io/projected/3fa5954a-a8ea-4d9f-85b9-ccf8e36865a2-kube-api-access-qr7ll\") pod \"3fa5954a-a8ea-4d9f-85b9-ccf8e36865a2\" (UID: \"3fa5954a-a8ea-4d9f-85b9-ccf8e36865a2\") " Jan 20 18:39:25 crc kubenswrapper[4558]: I0120 18:39:25.736110 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3fa5954a-a8ea-4d9f-85b9-ccf8e36865a2-kube-api-access-qr7ll" (OuterVolumeSpecName: "kube-api-access-qr7ll") pod "3fa5954a-a8ea-4d9f-85b9-ccf8e36865a2" (UID: "3fa5954a-a8ea-4d9f-85b9-ccf8e36865a2"). InnerVolumeSpecName "kube-api-access-qr7ll". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:39:25 crc kubenswrapper[4558]: I0120 18:39:25.845312 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qr7ll\" (UniqueName: \"kubernetes.io/projected/3fa5954a-a8ea-4d9f-85b9-ccf8e36865a2-kube-api-access-qr7ll\") on node \"crc\" DevicePath \"\"" Jan 20 18:39:25 crc kubenswrapper[4558]: I0120 18:39:25.884429 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-6c45489b98-b6947"] Jan 20 18:39:25 crc kubenswrapper[4558]: I0120 18:39:25.891142 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-6c45489b98-b6947"] Jan 20 18:39:25 crc kubenswrapper[4558]: I0120 18:39:25.918052 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/rabbitmq-server-0" Jan 20 18:39:25 crc kubenswrapper[4558]: E0120 18:39:25.920357 4558 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7e1d554f_8358_4ab1_ac14_764882a73ed2.slice\": RecentStats: unable to find data in memory cache]" Jan 20 18:39:26 crc kubenswrapper[4558]: I0120 18:39:26.048095 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/2275b006-a890-400e-83b6-1c46bf67f62e-rabbitmq-confd\") pod \"2275b006-a890-400e-83b6-1c46bf67f62e\" (UID: \"2275b006-a890-400e-83b6-1c46bf67f62e\") " Jan 20 18:39:26 crc kubenswrapper[4558]: I0120 18:39:26.048476 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/2275b006-a890-400e-83b6-1c46bf67f62e-plugins-conf\") pod \"2275b006-a890-400e-83b6-1c46bf67f62e\" (UID: \"2275b006-a890-400e-83b6-1c46bf67f62e\") " Jan 20 18:39:26 crc kubenswrapper[4558]: I0120 18:39:26.048537 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/2275b006-a890-400e-83b6-1c46bf67f62e-erlang-cookie-secret\") pod \"2275b006-a890-400e-83b6-1c46bf67f62e\" (UID: \"2275b006-a890-400e-83b6-1c46bf67f62e\") " Jan 20 18:39:26 crc kubenswrapper[4558]: I0120 18:39:26.048690 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-ce1c6728-05f2-432f-a7e7-15926523339d\") pod \"2275b006-a890-400e-83b6-1c46bf67f62e\" (UID: \"2275b006-a890-400e-83b6-1c46bf67f62e\") " Jan 20 18:39:26 crc kubenswrapper[4558]: I0120 18:39:26.048750 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/2275b006-a890-400e-83b6-1c46bf67f62e-rabbitmq-plugins\") pod \"2275b006-a890-400e-83b6-1c46bf67f62e\" (UID: \"2275b006-a890-400e-83b6-1c46bf67f62e\") " Jan 20 18:39:26 crc kubenswrapper[4558]: I0120 18:39:26.048859 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jfgbz\" (UniqueName: \"kubernetes.io/projected/2275b006-a890-400e-83b6-1c46bf67f62e-kube-api-access-jfgbz\") pod \"2275b006-a890-400e-83b6-1c46bf67f62e\" (UID: \"2275b006-a890-400e-83b6-1c46bf67f62e\") " Jan 20 18:39:26 crc kubenswrapper[4558]: I0120 18:39:26.048907 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/2275b006-a890-400e-83b6-1c46bf67f62e-pod-info\") pod \"2275b006-a890-400e-83b6-1c46bf67f62e\" (UID: \"2275b006-a890-400e-83b6-1c46bf67f62e\") " Jan 20 18:39:26 crc kubenswrapper[4558]: I0120 18:39:26.048947 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/2275b006-a890-400e-83b6-1c46bf67f62e-rabbitmq-erlang-cookie\") pod \"2275b006-a890-400e-83b6-1c46bf67f62e\" (UID: \"2275b006-a890-400e-83b6-1c46bf67f62e\") " Jan 20 18:39:26 crc kubenswrapper[4558]: I0120 18:39:26.049058 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2275b006-a890-400e-83b6-1c46bf67f62e-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "2275b006-a890-400e-83b6-1c46bf67f62e" (UID: "2275b006-a890-400e-83b6-1c46bf67f62e"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:39:26 crc kubenswrapper[4558]: I0120 18:39:26.049314 4558 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/2275b006-a890-400e-83b6-1c46bf67f62e-plugins-conf\") on node \"crc\" DevicePath \"\"" Jan 20 18:39:26 crc kubenswrapper[4558]: I0120 18:39:26.049371 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2275b006-a890-400e-83b6-1c46bf67f62e-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "2275b006-a890-400e-83b6-1c46bf67f62e" (UID: "2275b006-a890-400e-83b6-1c46bf67f62e"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 18:39:26 crc kubenswrapper[4558]: I0120 18:39:26.049683 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2275b006-a890-400e-83b6-1c46bf67f62e-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "2275b006-a890-400e-83b6-1c46bf67f62e" (UID: "2275b006-a890-400e-83b6-1c46bf67f62e"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 18:39:26 crc kubenswrapper[4558]: I0120 18:39:26.052207 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2275b006-a890-400e-83b6-1c46bf67f62e-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "2275b006-a890-400e-83b6-1c46bf67f62e" (UID: "2275b006-a890-400e-83b6-1c46bf67f62e"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:39:26 crc kubenswrapper[4558]: I0120 18:39:26.052646 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/2275b006-a890-400e-83b6-1c46bf67f62e-pod-info" (OuterVolumeSpecName: "pod-info") pod "2275b006-a890-400e-83b6-1c46bf67f62e" (UID: "2275b006-a890-400e-83b6-1c46bf67f62e"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Jan 20 18:39:26 crc kubenswrapper[4558]: I0120 18:39:26.055698 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2275b006-a890-400e-83b6-1c46bf67f62e-kube-api-access-jfgbz" (OuterVolumeSpecName: "kube-api-access-jfgbz") pod "2275b006-a890-400e-83b6-1c46bf67f62e" (UID: "2275b006-a890-400e-83b6-1c46bf67f62e"). InnerVolumeSpecName "kube-api-access-jfgbz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:39:26 crc kubenswrapper[4558]: I0120 18:39:26.070149 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-ce1c6728-05f2-432f-a7e7-15926523339d" (OuterVolumeSpecName: "persistence") pod "2275b006-a890-400e-83b6-1c46bf67f62e" (UID: "2275b006-a890-400e-83b6-1c46bf67f62e"). InnerVolumeSpecName "pvc-ce1c6728-05f2-432f-a7e7-15926523339d". PluginName "kubernetes.io/csi", VolumeGidValue "" Jan 20 18:39:26 crc kubenswrapper[4558]: I0120 18:39:26.098777 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2275b006-a890-400e-83b6-1c46bf67f62e-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "2275b006-a890-400e-83b6-1c46bf67f62e" (UID: "2275b006-a890-400e-83b6-1c46bf67f62e"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:39:26 crc kubenswrapper[4558]: I0120 18:39:26.150750 4558 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/2275b006-a890-400e-83b6-1c46bf67f62e-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Jan 20 18:39:26 crc kubenswrapper[4558]: I0120 18:39:26.150789 4558 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/2275b006-a890-400e-83b6-1c46bf67f62e-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Jan 20 18:39:26 crc kubenswrapper[4558]: I0120 18:39:26.150832 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-ce1c6728-05f2-432f-a7e7-15926523339d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-ce1c6728-05f2-432f-a7e7-15926523339d\") on node \"crc\" " Jan 20 18:39:26 crc kubenswrapper[4558]: I0120 18:39:26.150851 4558 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/2275b006-a890-400e-83b6-1c46bf67f62e-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Jan 20 18:39:26 crc kubenswrapper[4558]: I0120 18:39:26.150864 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jfgbz\" (UniqueName: \"kubernetes.io/projected/2275b006-a890-400e-83b6-1c46bf67f62e-kube-api-access-jfgbz\") on node \"crc\" DevicePath \"\"" Jan 20 18:39:26 crc kubenswrapper[4558]: I0120 18:39:26.150874 4558 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/2275b006-a890-400e-83b6-1c46bf67f62e-pod-info\") on node \"crc\" DevicePath \"\"" Jan 20 18:39:26 crc kubenswrapper[4558]: I0120 18:39:26.150883 4558 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/2275b006-a890-400e-83b6-1c46bf67f62e-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Jan 20 18:39:26 crc kubenswrapper[4558]: I0120 18:39:26.166252 4558 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Jan 20 18:39:26 crc kubenswrapper[4558]: I0120 18:39:26.166430 4558 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-ce1c6728-05f2-432f-a7e7-15926523339d" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-ce1c6728-05f2-432f-a7e7-15926523339d") on node "crc" Jan 20 18:39:26 crc kubenswrapper[4558]: I0120 18:39:26.252309 4558 reconciler_common.go:293] "Volume detached for volume \"pvc-ce1c6728-05f2-432f-a7e7-15926523339d\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-ce1c6728-05f2-432f-a7e7-15926523339d\") on node \"crc\" DevicePath \"\"" Jan 20 18:39:26 crc kubenswrapper[4558]: I0120 18:39:26.538248 4558 generic.go:334] "Generic (PLEG): container finished" podID="2275b006-a890-400e-83b6-1c46bf67f62e" containerID="45e5c44ef4c79d694b34e00a259e1223a9488d8427afb7c95c4ae868a72e5bd9" exitCode=0 Jan 20 18:39:26 crc kubenswrapper[4558]: I0120 18:39:26.538311 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/rabbitmq-server-0" Jan 20 18:39:26 crc kubenswrapper[4558]: I0120 18:39:26.538358 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/rabbitmq-server-0" event={"ID":"2275b006-a890-400e-83b6-1c46bf67f62e","Type":"ContainerDied","Data":"45e5c44ef4c79d694b34e00a259e1223a9488d8427afb7c95c4ae868a72e5bd9"} Jan 20 18:39:26 crc kubenswrapper[4558]: I0120 18:39:26.538428 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/rabbitmq-server-0" event={"ID":"2275b006-a890-400e-83b6-1c46bf67f62e","Type":"ContainerDied","Data":"be65202215aaffce0ad3e263130c74f870299cf034d08bff14117c0fc0cc4439"} Jan 20 18:39:26 crc kubenswrapper[4558]: I0120 18:39:26.538450 4558 scope.go:117] "RemoveContainer" containerID="45e5c44ef4c79d694b34e00a259e1223a9488d8427afb7c95c4ae868a72e5bd9" Jan 20 18:39:26 crc kubenswrapper[4558]: I0120 18:39:26.542481 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-index-zwc6g" event={"ID":"3fa5954a-a8ea-4d9f-85b9-ccf8e36865a2","Type":"ContainerDied","Data":"8202a181a066579a1f100565259340e2fa9cb762c594a8310a320e581b35eda8"} Jan 20 18:39:26 crc kubenswrapper[4558]: I0120 18:39:26.542503 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-index-zwc6g" Jan 20 18:39:26 crc kubenswrapper[4558]: I0120 18:39:26.573889 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7e1d554f-8358-4ab1-ac14-764882a73ed2" path="/var/lib/kubelet/pods/7e1d554f-8358-4ab1-ac14-764882a73ed2/volumes" Jan 20 18:39:26 crc kubenswrapper[4558]: I0120 18:39:26.574439 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="89cbbf9e-cc1a-41c9-b39a-16efc1464749" path="/var/lib/kubelet/pods/89cbbf9e-cc1a-41c9-b39a-16efc1464749/volumes" Jan 20 18:39:26 crc kubenswrapper[4558]: I0120 18:39:26.575044 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c70e04a6-d794-4290-ac24-1fecf80b3d41" path="/var/lib/kubelet/pods/c70e04a6-d794-4290-ac24-1fecf80b3d41/volumes" Jan 20 18:39:26 crc kubenswrapper[4558]: I0120 18:39:26.587528 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["keystone-kuttl-tests/rabbitmq-server-0"] Jan 20 18:39:26 crc kubenswrapper[4558]: I0120 18:39:26.594089 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["keystone-kuttl-tests/rabbitmq-server-0"] Jan 20 18:39:26 crc kubenswrapper[4558]: I0120 18:39:26.597665 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/keystone-operator-index-zwc6g"] Jan 20 18:39:26 crc kubenswrapper[4558]: I0120 18:39:26.600824 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/keystone-operator-index-zwc6g"] Jan 20 18:39:26 crc kubenswrapper[4558]: I0120 18:39:26.602407 4558 scope.go:117] "RemoveContainer" containerID="d9c022000639f60b589dd52328b89c62f22fa3c2d49f87fd4714cd3c99bfb24a" Jan 20 18:39:26 crc kubenswrapper[4558]: I0120 18:39:26.622189 4558 scope.go:117] "RemoveContainer" containerID="45e5c44ef4c79d694b34e00a259e1223a9488d8427afb7c95c4ae868a72e5bd9" Jan 20 18:39:26 crc kubenswrapper[4558]: E0120 18:39:26.622704 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"45e5c44ef4c79d694b34e00a259e1223a9488d8427afb7c95c4ae868a72e5bd9\": container with ID starting with 45e5c44ef4c79d694b34e00a259e1223a9488d8427afb7c95c4ae868a72e5bd9 not found: ID does not exist" containerID="45e5c44ef4c79d694b34e00a259e1223a9488d8427afb7c95c4ae868a72e5bd9" Jan 20 18:39:26 crc kubenswrapper[4558]: I0120 18:39:26.622750 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"45e5c44ef4c79d694b34e00a259e1223a9488d8427afb7c95c4ae868a72e5bd9"} err="failed to get container status \"45e5c44ef4c79d694b34e00a259e1223a9488d8427afb7c95c4ae868a72e5bd9\": rpc error: code = NotFound desc = could not find container \"45e5c44ef4c79d694b34e00a259e1223a9488d8427afb7c95c4ae868a72e5bd9\": container with ID starting with 45e5c44ef4c79d694b34e00a259e1223a9488d8427afb7c95c4ae868a72e5bd9 not found: ID does not exist" Jan 20 18:39:26 crc kubenswrapper[4558]: I0120 18:39:26.622785 4558 scope.go:117] "RemoveContainer" containerID="d9c022000639f60b589dd52328b89c62f22fa3c2d49f87fd4714cd3c99bfb24a" Jan 20 18:39:26 crc kubenswrapper[4558]: E0120 18:39:26.623099 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d9c022000639f60b589dd52328b89c62f22fa3c2d49f87fd4714cd3c99bfb24a\": container with ID starting with d9c022000639f60b589dd52328b89c62f22fa3c2d49f87fd4714cd3c99bfb24a not found: ID does not exist" containerID="d9c022000639f60b589dd52328b89c62f22fa3c2d49f87fd4714cd3c99bfb24a" Jan 20 18:39:26 crc kubenswrapper[4558]: I0120 18:39:26.623123 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d9c022000639f60b589dd52328b89c62f22fa3c2d49f87fd4714cd3c99bfb24a"} err="failed to get container status \"d9c022000639f60b589dd52328b89c62f22fa3c2d49f87fd4714cd3c99bfb24a\": rpc error: code = NotFound desc = could not find container \"d9c022000639f60b589dd52328b89c62f22fa3c2d49f87fd4714cd3c99bfb24a\": container with ID starting with d9c022000639f60b589dd52328b89c62f22fa3c2d49f87fd4714cd3c99bfb24a not found: ID does not exist" Jan 20 18:39:26 crc kubenswrapper[4558]: I0120 18:39:26.623137 4558 scope.go:117] "RemoveContainer" containerID="679ca985e780fd8183cd32b5d3aab8580a06199168b3843be49f4a1eccf55842" Jan 20 18:39:26 crc kubenswrapper[4558]: I0120 18:39:26.790955 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/openstack-galera-1" Jan 20 18:39:26 crc kubenswrapper[4558]: I0120 18:39:26.961030 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="keystone-kuttl-tests/openstack-galera-0" podUID="87dd79f4-9a5b-4370-9ef8-3101a2b8bfb8" containerName="galera" containerID="cri-o://7411bf3442e75080999942d3466d7ac7fa0fd8befd581f71569639739b51492a" gracePeriod=26 Jan 20 18:39:26 crc kubenswrapper[4558]: I0120 18:39:26.966261 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/160b737f-7377-4d3a-8fff-3b96b9a614bc-config-data-generated\") pod \"160b737f-7377-4d3a-8fff-3b96b9a614bc\" (UID: \"160b737f-7377-4d3a-8fff-3b96b9a614bc\") " Jan 20 18:39:26 crc kubenswrapper[4558]: I0120 18:39:26.966375 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/160b737f-7377-4d3a-8fff-3b96b9a614bc-operator-scripts\") pod \"160b737f-7377-4d3a-8fff-3b96b9a614bc\" (UID: \"160b737f-7377-4d3a-8fff-3b96b9a614bc\") " Jan 20 18:39:26 crc kubenswrapper[4558]: I0120 18:39:26.966497 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/160b737f-7377-4d3a-8fff-3b96b9a614bc-config-data-default\") pod \"160b737f-7377-4d3a-8fff-3b96b9a614bc\" (UID: \"160b737f-7377-4d3a-8fff-3b96b9a614bc\") " Jan 20 18:39:26 crc kubenswrapper[4558]: I0120 18:39:26.966548 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mysql-db\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") pod \"160b737f-7377-4d3a-8fff-3b96b9a614bc\" (UID: \"160b737f-7377-4d3a-8fff-3b96b9a614bc\") " Jan 20 18:39:26 crc kubenswrapper[4558]: I0120 18:39:26.966600 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/160b737f-7377-4d3a-8fff-3b96b9a614bc-kolla-config\") pod \"160b737f-7377-4d3a-8fff-3b96b9a614bc\" (UID: \"160b737f-7377-4d3a-8fff-3b96b9a614bc\") " Jan 20 18:39:26 crc kubenswrapper[4558]: I0120 18:39:26.966734 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk2l7\" (UniqueName: \"kubernetes.io/projected/160b737f-7377-4d3a-8fff-3b96b9a614bc-kube-api-access-tk2l7\") pod \"160b737f-7377-4d3a-8fff-3b96b9a614bc\" (UID: \"160b737f-7377-4d3a-8fff-3b96b9a614bc\") " Jan 20 18:39:26 crc kubenswrapper[4558]: I0120 18:39:26.967036 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/160b737f-7377-4d3a-8fff-3b96b9a614bc-config-data-generated" (OuterVolumeSpecName: "config-data-generated") pod "160b737f-7377-4d3a-8fff-3b96b9a614bc" (UID: "160b737f-7377-4d3a-8fff-3b96b9a614bc"). InnerVolumeSpecName "config-data-generated". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 18:39:26 crc kubenswrapper[4558]: I0120 18:39:26.967320 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/160b737f-7377-4d3a-8fff-3b96b9a614bc-config-data-generated\") on node \"crc\" DevicePath \"\"" Jan 20 18:39:26 crc kubenswrapper[4558]: I0120 18:39:26.967526 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/160b737f-7377-4d3a-8fff-3b96b9a614bc-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "160b737f-7377-4d3a-8fff-3b96b9a614bc" (UID: "160b737f-7377-4d3a-8fff-3b96b9a614bc"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:39:26 crc kubenswrapper[4558]: I0120 18:39:26.967800 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/160b737f-7377-4d3a-8fff-3b96b9a614bc-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "160b737f-7377-4d3a-8fff-3b96b9a614bc" (UID: "160b737f-7377-4d3a-8fff-3b96b9a614bc"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:39:26 crc kubenswrapper[4558]: I0120 18:39:26.967912 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/160b737f-7377-4d3a-8fff-3b96b9a614bc-config-data-default" (OuterVolumeSpecName: "config-data-default") pod "160b737f-7377-4d3a-8fff-3b96b9a614bc" (UID: "160b737f-7377-4d3a-8fff-3b96b9a614bc"). InnerVolumeSpecName "config-data-default". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:39:26 crc kubenswrapper[4558]: I0120 18:39:26.972877 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/160b737f-7377-4d3a-8fff-3b96b9a614bc-kube-api-access-tk2l7" (OuterVolumeSpecName: "kube-api-access-tk2l7") pod "160b737f-7377-4d3a-8fff-3b96b9a614bc" (UID: "160b737f-7377-4d3a-8fff-3b96b9a614bc"). InnerVolumeSpecName "kube-api-access-tk2l7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:39:26 crc kubenswrapper[4558]: I0120 18:39:26.977645 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage15-crc" (OuterVolumeSpecName: "mysql-db") pod "160b737f-7377-4d3a-8fff-3b96b9a614bc" (UID: "160b737f-7377-4d3a-8fff-3b96b9a614bc"). InnerVolumeSpecName "local-storage15-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 18:39:27 crc kubenswrapper[4558]: I0120 18:39:27.069314 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/160b737f-7377-4d3a-8fff-3b96b9a614bc-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 18:39:27 crc kubenswrapper[4558]: I0120 18:39:27.069353 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/160b737f-7377-4d3a-8fff-3b96b9a614bc-config-data-default\") on node \"crc\" DevicePath \"\"" Jan 20 18:39:27 crc kubenswrapper[4558]: I0120 18:39:27.069414 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage15-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") on node \"crc\" " Jan 20 18:39:27 crc kubenswrapper[4558]: I0120 18:39:27.069428 4558 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/160b737f-7377-4d3a-8fff-3b96b9a614bc-kolla-config\") on node \"crc\" DevicePath \"\"" Jan 20 18:39:27 crc kubenswrapper[4558]: I0120 18:39:27.069449 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk2l7\" (UniqueName: \"kubernetes.io/projected/160b737f-7377-4d3a-8fff-3b96b9a614bc-kube-api-access-tk2l7\") on node \"crc\" DevicePath \"\"" Jan 20 18:39:27 crc kubenswrapper[4558]: I0120 18:39:27.080334 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage15-crc" (UniqueName: "kubernetes.io/local-volume/local-storage15-crc") on node "crc" Jan 20 18:39:27 crc kubenswrapper[4558]: I0120 18:39:27.171017 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage15-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage15-crc\") on node \"crc\" DevicePath \"\"" Jan 20 18:39:27 crc kubenswrapper[4558]: I0120 18:39:27.506850 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/openstack-galera-0" Jan 20 18:39:27 crc kubenswrapper[4558]: I0120 18:39:27.554752 4558 generic.go:334] "Generic (PLEG): container finished" podID="160b737f-7377-4d3a-8fff-3b96b9a614bc" containerID="c389d8005807225cc21db8265ae5fc2539848a0e1a797f0b1a89dda2965c682f" exitCode=0 Jan 20 18:39:27 crc kubenswrapper[4558]: I0120 18:39:27.554789 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/openstack-galera-1" Jan 20 18:39:27 crc kubenswrapper[4558]: I0120 18:39:27.554864 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/openstack-galera-1" event={"ID":"160b737f-7377-4d3a-8fff-3b96b9a614bc","Type":"ContainerDied","Data":"c389d8005807225cc21db8265ae5fc2539848a0e1a797f0b1a89dda2965c682f"} Jan 20 18:39:27 crc kubenswrapper[4558]: I0120 18:39:27.554933 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/openstack-galera-1" event={"ID":"160b737f-7377-4d3a-8fff-3b96b9a614bc","Type":"ContainerDied","Data":"be203ee3845f9a077f56d6585dc60586c09f9fb58acf1dd477b7caeef6a2cd4c"} Jan 20 18:39:27 crc kubenswrapper[4558]: I0120 18:39:27.554958 4558 scope.go:117] "RemoveContainer" containerID="c389d8005807225cc21db8265ae5fc2539848a0e1a797f0b1a89dda2965c682f" Jan 20 18:39:27 crc kubenswrapper[4558]: I0120 18:39:27.558919 4558 generic.go:334] "Generic (PLEG): container finished" podID="87dd79f4-9a5b-4370-9ef8-3101a2b8bfb8" containerID="7411bf3442e75080999942d3466d7ac7fa0fd8befd581f71569639739b51492a" exitCode=0 Jan 20 18:39:27 crc kubenswrapper[4558]: I0120 18:39:27.558969 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/openstack-galera-0" event={"ID":"87dd79f4-9a5b-4370-9ef8-3101a2b8bfb8","Type":"ContainerDied","Data":"7411bf3442e75080999942d3466d7ac7fa0fd8befd581f71569639739b51492a"} Jan 20 18:39:27 crc kubenswrapper[4558]: I0120 18:39:27.559003 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/openstack-galera-0" event={"ID":"87dd79f4-9a5b-4370-9ef8-3101a2b8bfb8","Type":"ContainerDied","Data":"b5d628c5a3128b8dc2c86705ca969b89f85b278eb2da4cd64a23530e19cf6d40"} Jan 20 18:39:27 crc kubenswrapper[4558]: I0120 18:39:27.559074 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/openstack-galera-0" Jan 20 18:39:27 crc kubenswrapper[4558]: I0120 18:39:27.581930 4558 scope.go:117] "RemoveContainer" containerID="6e984aac6ee9677cdf1c592c71b4863c21b8bd0d4ace94da03b714c88dfeedb7" Jan 20 18:39:27 crc kubenswrapper[4558]: I0120 18:39:27.587274 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["keystone-kuttl-tests/openstack-galera-1"] Jan 20 18:39:27 crc kubenswrapper[4558]: I0120 18:39:27.609756 4558 scope.go:117] "RemoveContainer" containerID="c389d8005807225cc21db8265ae5fc2539848a0e1a797f0b1a89dda2965c682f" Jan 20 18:39:27 crc kubenswrapper[4558]: E0120 18:39:27.610529 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c389d8005807225cc21db8265ae5fc2539848a0e1a797f0b1a89dda2965c682f\": container with ID starting with c389d8005807225cc21db8265ae5fc2539848a0e1a797f0b1a89dda2965c682f not found: ID does not exist" containerID="c389d8005807225cc21db8265ae5fc2539848a0e1a797f0b1a89dda2965c682f" Jan 20 18:39:27 crc kubenswrapper[4558]: I0120 18:39:27.610578 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c389d8005807225cc21db8265ae5fc2539848a0e1a797f0b1a89dda2965c682f"} err="failed to get container status \"c389d8005807225cc21db8265ae5fc2539848a0e1a797f0b1a89dda2965c682f\": rpc error: code = NotFound desc = could not find container \"c389d8005807225cc21db8265ae5fc2539848a0e1a797f0b1a89dda2965c682f\": container with ID starting with c389d8005807225cc21db8265ae5fc2539848a0e1a797f0b1a89dda2965c682f not found: ID does not exist" Jan 20 18:39:27 crc kubenswrapper[4558]: I0120 18:39:27.610607 4558 scope.go:117] "RemoveContainer" containerID="6e984aac6ee9677cdf1c592c71b4863c21b8bd0d4ace94da03b714c88dfeedb7" Jan 20 18:39:27 crc kubenswrapper[4558]: E0120 18:39:27.611210 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6e984aac6ee9677cdf1c592c71b4863c21b8bd0d4ace94da03b714c88dfeedb7\": container with ID starting with 6e984aac6ee9677cdf1c592c71b4863c21b8bd0d4ace94da03b714c88dfeedb7 not found: ID does not exist" containerID="6e984aac6ee9677cdf1c592c71b4863c21b8bd0d4ace94da03b714c88dfeedb7" Jan 20 18:39:27 crc kubenswrapper[4558]: I0120 18:39:27.611242 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6e984aac6ee9677cdf1c592c71b4863c21b8bd0d4ace94da03b714c88dfeedb7"} err="failed to get container status \"6e984aac6ee9677cdf1c592c71b4863c21b8bd0d4ace94da03b714c88dfeedb7\": rpc error: code = NotFound desc = could not find container \"6e984aac6ee9677cdf1c592c71b4863c21b8bd0d4ace94da03b714c88dfeedb7\": container with ID starting with 6e984aac6ee9677cdf1c592c71b4863c21b8bd0d4ace94da03b714c88dfeedb7 not found: ID does not exist" Jan 20 18:39:27 crc kubenswrapper[4558]: I0120 18:39:27.611256 4558 scope.go:117] "RemoveContainer" containerID="7411bf3442e75080999942d3466d7ac7fa0fd8befd581f71569639739b51492a" Jan 20 18:39:27 crc kubenswrapper[4558]: I0120 18:39:27.611350 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["keystone-kuttl-tests/openstack-galera-1"] Jan 20 18:39:27 crc kubenswrapper[4558]: I0120 18:39:27.627840 4558 scope.go:117] "RemoveContainer" containerID="90a03121d50c44895e361b832a6554b41e7a21defd42de4a323ee9fc644fed96" Jan 20 18:39:27 crc kubenswrapper[4558]: I0120 18:39:27.645680 4558 scope.go:117] "RemoveContainer" containerID="7411bf3442e75080999942d3466d7ac7fa0fd8befd581f71569639739b51492a" Jan 20 18:39:27 crc kubenswrapper[4558]: E0120 18:39:27.645984 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7411bf3442e75080999942d3466d7ac7fa0fd8befd581f71569639739b51492a\": container with ID starting with 7411bf3442e75080999942d3466d7ac7fa0fd8befd581f71569639739b51492a not found: ID does not exist" containerID="7411bf3442e75080999942d3466d7ac7fa0fd8befd581f71569639739b51492a" Jan 20 18:39:27 crc kubenswrapper[4558]: I0120 18:39:27.646019 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7411bf3442e75080999942d3466d7ac7fa0fd8befd581f71569639739b51492a"} err="failed to get container status \"7411bf3442e75080999942d3466d7ac7fa0fd8befd581f71569639739b51492a\": rpc error: code = NotFound desc = could not find container \"7411bf3442e75080999942d3466d7ac7fa0fd8befd581f71569639739b51492a\": container with ID starting with 7411bf3442e75080999942d3466d7ac7fa0fd8befd581f71569639739b51492a not found: ID does not exist" Jan 20 18:39:27 crc kubenswrapper[4558]: I0120 18:39:27.646036 4558 scope.go:117] "RemoveContainer" containerID="90a03121d50c44895e361b832a6554b41e7a21defd42de4a323ee9fc644fed96" Jan 20 18:39:27 crc kubenswrapper[4558]: E0120 18:39:27.646301 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"90a03121d50c44895e361b832a6554b41e7a21defd42de4a323ee9fc644fed96\": container with ID starting with 90a03121d50c44895e361b832a6554b41e7a21defd42de4a323ee9fc644fed96 not found: ID does not exist" containerID="90a03121d50c44895e361b832a6554b41e7a21defd42de4a323ee9fc644fed96" Jan 20 18:39:27 crc kubenswrapper[4558]: I0120 18:39:27.646335 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"90a03121d50c44895e361b832a6554b41e7a21defd42de4a323ee9fc644fed96"} err="failed to get container status \"90a03121d50c44895e361b832a6554b41e7a21defd42de4a323ee9fc644fed96\": rpc error: code = NotFound desc = could not find container \"90a03121d50c44895e361b832a6554b41e7a21defd42de4a323ee9fc644fed96\": container with ID starting with 90a03121d50c44895e361b832a6554b41e7a21defd42de4a323ee9fc644fed96 not found: ID does not exist" Jan 20 18:39:27 crc kubenswrapper[4558]: I0120 18:39:27.681292 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/87dd79f4-9a5b-4370-9ef8-3101a2b8bfb8-kolla-config\") pod \"87dd79f4-9a5b-4370-9ef8-3101a2b8bfb8\" (UID: \"87dd79f4-9a5b-4370-9ef8-3101a2b8bfb8\") " Jan 20 18:39:27 crc kubenswrapper[4558]: I0120 18:39:27.681404 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/87dd79f4-9a5b-4370-9ef8-3101a2b8bfb8-config-data-generated\") pod \"87dd79f4-9a5b-4370-9ef8-3101a2b8bfb8\" (UID: \"87dd79f4-9a5b-4370-9ef8-3101a2b8bfb8\") " Jan 20 18:39:27 crc kubenswrapper[4558]: I0120 18:39:27.681516 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2njxt\" (UniqueName: \"kubernetes.io/projected/87dd79f4-9a5b-4370-9ef8-3101a2b8bfb8-kube-api-access-2njxt\") pod \"87dd79f4-9a5b-4370-9ef8-3101a2b8bfb8\" (UID: \"87dd79f4-9a5b-4370-9ef8-3101a2b8bfb8\") " Jan 20 18:39:27 crc kubenswrapper[4558]: I0120 18:39:27.681569 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/87dd79f4-9a5b-4370-9ef8-3101a2b8bfb8-operator-scripts\") pod \"87dd79f4-9a5b-4370-9ef8-3101a2b8bfb8\" (UID: \"87dd79f4-9a5b-4370-9ef8-3101a2b8bfb8\") " Jan 20 18:39:27 crc kubenswrapper[4558]: I0120 18:39:27.681648 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/87dd79f4-9a5b-4370-9ef8-3101a2b8bfb8-config-data-default\") pod \"87dd79f4-9a5b-4370-9ef8-3101a2b8bfb8\" (UID: \"87dd79f4-9a5b-4370-9ef8-3101a2b8bfb8\") " Jan 20 18:39:27 crc kubenswrapper[4558]: I0120 18:39:27.681731 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mysql-db\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"87dd79f4-9a5b-4370-9ef8-3101a2b8bfb8\" (UID: \"87dd79f4-9a5b-4370-9ef8-3101a2b8bfb8\") " Jan 20 18:39:27 crc kubenswrapper[4558]: I0120 18:39:27.681881 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87dd79f4-9a5b-4370-9ef8-3101a2b8bfb8-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "87dd79f4-9a5b-4370-9ef8-3101a2b8bfb8" (UID: "87dd79f4-9a5b-4370-9ef8-3101a2b8bfb8"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:39:27 crc kubenswrapper[4558]: I0120 18:39:27.681962 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/87dd79f4-9a5b-4370-9ef8-3101a2b8bfb8-config-data-generated" (OuterVolumeSpecName: "config-data-generated") pod "87dd79f4-9a5b-4370-9ef8-3101a2b8bfb8" (UID: "87dd79f4-9a5b-4370-9ef8-3101a2b8bfb8"). InnerVolumeSpecName "config-data-generated". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 18:39:27 crc kubenswrapper[4558]: I0120 18:39:27.682242 4558 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/87dd79f4-9a5b-4370-9ef8-3101a2b8bfb8-kolla-config\") on node \"crc\" DevicePath \"\"" Jan 20 18:39:27 crc kubenswrapper[4558]: I0120 18:39:27.682292 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/87dd79f4-9a5b-4370-9ef8-3101a2b8bfb8-config-data-generated\") on node \"crc\" DevicePath \"\"" Jan 20 18:39:27 crc kubenswrapper[4558]: I0120 18:39:27.683331 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87dd79f4-9a5b-4370-9ef8-3101a2b8bfb8-config-data-default" (OuterVolumeSpecName: "config-data-default") pod "87dd79f4-9a5b-4370-9ef8-3101a2b8bfb8" (UID: "87dd79f4-9a5b-4370-9ef8-3101a2b8bfb8"). InnerVolumeSpecName "config-data-default". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:39:27 crc kubenswrapper[4558]: I0120 18:39:27.683373 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87dd79f4-9a5b-4370-9ef8-3101a2b8bfb8-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "87dd79f4-9a5b-4370-9ef8-3101a2b8bfb8" (UID: "87dd79f4-9a5b-4370-9ef8-3101a2b8bfb8"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:39:27 crc kubenswrapper[4558]: I0120 18:39:27.686123 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87dd79f4-9a5b-4370-9ef8-3101a2b8bfb8-kube-api-access-2njxt" (OuterVolumeSpecName: "kube-api-access-2njxt") pod "87dd79f4-9a5b-4370-9ef8-3101a2b8bfb8" (UID: "87dd79f4-9a5b-4370-9ef8-3101a2b8bfb8"). InnerVolumeSpecName "kube-api-access-2njxt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:39:27 crc kubenswrapper[4558]: I0120 18:39:27.690445 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage10-crc" (OuterVolumeSpecName: "mysql-db") pod "87dd79f4-9a5b-4370-9ef8-3101a2b8bfb8" (UID: "87dd79f4-9a5b-4370-9ef8-3101a2b8bfb8"). InnerVolumeSpecName "local-storage10-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 18:39:27 crc kubenswrapper[4558]: I0120 18:39:27.783635 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2njxt\" (UniqueName: \"kubernetes.io/projected/87dd79f4-9a5b-4370-9ef8-3101a2b8bfb8-kube-api-access-2njxt\") on node \"crc\" DevicePath \"\"" Jan 20 18:39:27 crc kubenswrapper[4558]: I0120 18:39:27.783674 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/87dd79f4-9a5b-4370-9ef8-3101a2b8bfb8-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 18:39:27 crc kubenswrapper[4558]: I0120 18:39:27.783685 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/87dd79f4-9a5b-4370-9ef8-3101a2b8bfb8-config-data-default\") on node \"crc\" DevicePath \"\"" Jan 20 18:39:27 crc kubenswrapper[4558]: I0120 18:39:27.783729 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" " Jan 20 18:39:27 crc kubenswrapper[4558]: I0120 18:39:27.793968 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage10-crc" (UniqueName: "kubernetes.io/local-volume/local-storage10-crc") on node "crc" Jan 20 18:39:27 crc kubenswrapper[4558]: I0120 18:39:27.886879 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" DevicePath \"\"" Jan 20 18:39:27 crc kubenswrapper[4558]: I0120 18:39:27.889146 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["keystone-kuttl-tests/openstack-galera-0"] Jan 20 18:39:27 crc kubenswrapper[4558]: I0120 18:39:27.893653 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["keystone-kuttl-tests/openstack-galera-0"] Jan 20 18:39:28 crc kubenswrapper[4558]: I0120 18:39:28.573104 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="160b737f-7377-4d3a-8fff-3b96b9a614bc" path="/var/lib/kubelet/pods/160b737f-7377-4d3a-8fff-3b96b9a614bc/volumes" Jan 20 18:39:28 crc kubenswrapper[4558]: I0120 18:39:28.573893 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2275b006-a890-400e-83b6-1c46bf67f62e" path="/var/lib/kubelet/pods/2275b006-a890-400e-83b6-1c46bf67f62e/volumes" Jan 20 18:39:28 crc kubenswrapper[4558]: I0120 18:39:28.574455 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3fa5954a-a8ea-4d9f-85b9-ccf8e36865a2" path="/var/lib/kubelet/pods/3fa5954a-a8ea-4d9f-85b9-ccf8e36865a2/volumes" Jan 20 18:39:28 crc kubenswrapper[4558]: I0120 18:39:28.576213 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87dd79f4-9a5b-4370-9ef8-3101a2b8bfb8" path="/var/lib/kubelet/pods/87dd79f4-9a5b-4370-9ef8-3101a2b8bfb8/volumes" Jan 20 18:39:29 crc kubenswrapper[4558]: I0120 18:39:29.185019 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/infra-operator-controller-manager-747bd6f68-87w9d"] Jan 20 18:39:29 crc kubenswrapper[4558]: I0120 18:39:29.185295 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/infra-operator-controller-manager-747bd6f68-87w9d" podUID="bb3e1e1b-cfd6-4430-88b7-9a6cdf812bf4" containerName="manager" containerID="cri-o://a92298b518d964f2280c292db6da40b46ef8eee322589502a500c90ff3e91a5c" gracePeriod=10 Jan 20 18:39:29 crc kubenswrapper[4558]: I0120 18:39:29.467878 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/infra-operator-index-6sn8v"] Jan 20 18:39:29 crc kubenswrapper[4558]: I0120 18:39:29.468306 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/infra-operator-index-6sn8v" podUID="324917cd-6180-463d-af83-512250591e1c" containerName="registry-server" containerID="cri-o://259ca3d664b42c5958cc4f36dd51a7880220954c652b939bd270f91d31865d5c" gracePeriod=30 Jan 20 18:39:29 crc kubenswrapper[4558]: I0120 18:39:29.495254 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/ea82580bc5724477f94b47db468c840840d4aaf95efc52f7d04b6353c1xvbdj"] Jan 20 18:39:29 crc kubenswrapper[4558]: I0120 18:39:29.498759 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/ea82580bc5724477f94b47db468c840840d4aaf95efc52f7d04b6353c1xvbdj"] Jan 20 18:39:29 crc kubenswrapper[4558]: I0120 18:39:29.583447 4558 generic.go:334] "Generic (PLEG): container finished" podID="bb3e1e1b-cfd6-4430-88b7-9a6cdf812bf4" containerID="a92298b518d964f2280c292db6da40b46ef8eee322589502a500c90ff3e91a5c" exitCode=0 Jan 20 18:39:29 crc kubenswrapper[4558]: I0120 18:39:29.583493 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-747bd6f68-87w9d" event={"ID":"bb3e1e1b-cfd6-4430-88b7-9a6cdf812bf4","Type":"ContainerDied","Data":"a92298b518d964f2280c292db6da40b46ef8eee322589502a500c90ff3e91a5c"} Jan 20 18:39:29 crc kubenswrapper[4558]: I0120 18:39:29.583519 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-747bd6f68-87w9d" event={"ID":"bb3e1e1b-cfd6-4430-88b7-9a6cdf812bf4","Type":"ContainerDied","Data":"e050002eda81c70183ee7b2b555edc0358400c06d0a42c5e88926a9c7ae90e62"} Jan 20 18:39:29 crc kubenswrapper[4558]: I0120 18:39:29.583532 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e050002eda81c70183ee7b2b555edc0358400c06d0a42c5e88926a9c7ae90e62" Jan 20 18:39:29 crc kubenswrapper[4558]: I0120 18:39:29.584213 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-747bd6f68-87w9d" Jan 20 18:39:29 crc kubenswrapper[4558]: I0120 18:39:29.711296 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/bb3e1e1b-cfd6-4430-88b7-9a6cdf812bf4-apiservice-cert\") pod \"bb3e1e1b-cfd6-4430-88b7-9a6cdf812bf4\" (UID: \"bb3e1e1b-cfd6-4430-88b7-9a6cdf812bf4\") " Jan 20 18:39:29 crc kubenswrapper[4558]: I0120 18:39:29.711369 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n86v8\" (UniqueName: \"kubernetes.io/projected/bb3e1e1b-cfd6-4430-88b7-9a6cdf812bf4-kube-api-access-n86v8\") pod \"bb3e1e1b-cfd6-4430-88b7-9a6cdf812bf4\" (UID: \"bb3e1e1b-cfd6-4430-88b7-9a6cdf812bf4\") " Jan 20 18:39:29 crc kubenswrapper[4558]: I0120 18:39:29.711450 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/bb3e1e1b-cfd6-4430-88b7-9a6cdf812bf4-webhook-cert\") pod \"bb3e1e1b-cfd6-4430-88b7-9a6cdf812bf4\" (UID: \"bb3e1e1b-cfd6-4430-88b7-9a6cdf812bf4\") " Jan 20 18:39:29 crc kubenswrapper[4558]: I0120 18:39:29.716710 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bb3e1e1b-cfd6-4430-88b7-9a6cdf812bf4-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "bb3e1e1b-cfd6-4430-88b7-9a6cdf812bf4" (UID: "bb3e1e1b-cfd6-4430-88b7-9a6cdf812bf4"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:39:29 crc kubenswrapper[4558]: I0120 18:39:29.717288 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bb3e1e1b-cfd6-4430-88b7-9a6cdf812bf4-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "bb3e1e1b-cfd6-4430-88b7-9a6cdf812bf4" (UID: "bb3e1e1b-cfd6-4430-88b7-9a6cdf812bf4"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:39:29 crc kubenswrapper[4558]: I0120 18:39:29.717374 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bb3e1e1b-cfd6-4430-88b7-9a6cdf812bf4-kube-api-access-n86v8" (OuterVolumeSpecName: "kube-api-access-n86v8") pod "bb3e1e1b-cfd6-4430-88b7-9a6cdf812bf4" (UID: "bb3e1e1b-cfd6-4430-88b7-9a6cdf812bf4"). InnerVolumeSpecName "kube-api-access-n86v8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:39:29 crc kubenswrapper[4558]: I0120 18:39:29.814230 4558 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/bb3e1e1b-cfd6-4430-88b7-9a6cdf812bf4-webhook-cert\") on node \"crc\" DevicePath \"\"" Jan 20 18:39:29 crc kubenswrapper[4558]: I0120 18:39:29.814262 4558 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/bb3e1e1b-cfd6-4430-88b7-9a6cdf812bf4-apiservice-cert\") on node \"crc\" DevicePath \"\"" Jan 20 18:39:29 crc kubenswrapper[4558]: I0120 18:39:29.814282 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n86v8\" (UniqueName: \"kubernetes.io/projected/bb3e1e1b-cfd6-4430-88b7-9a6cdf812bf4-kube-api-access-n86v8\") on node \"crc\" DevicePath \"\"" Jan 20 18:39:29 crc kubenswrapper[4558]: I0120 18:39:29.852497 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-index-6sn8v" Jan 20 18:39:30 crc kubenswrapper[4558]: I0120 18:39:30.016606 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-brggs\" (UniqueName: \"kubernetes.io/projected/324917cd-6180-463d-af83-512250591e1c-kube-api-access-brggs\") pod \"324917cd-6180-463d-af83-512250591e1c\" (UID: \"324917cd-6180-463d-af83-512250591e1c\") " Jan 20 18:39:30 crc kubenswrapper[4558]: I0120 18:39:30.021351 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/324917cd-6180-463d-af83-512250591e1c-kube-api-access-brggs" (OuterVolumeSpecName: "kube-api-access-brggs") pod "324917cd-6180-463d-af83-512250591e1c" (UID: "324917cd-6180-463d-af83-512250591e1c"). InnerVolumeSpecName "kube-api-access-brggs". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:39:30 crc kubenswrapper[4558]: I0120 18:39:30.119391 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-brggs\" (UniqueName: \"kubernetes.io/projected/324917cd-6180-463d-af83-512250591e1c-kube-api-access-brggs\") on node \"crc\" DevicePath \"\"" Jan 20 18:39:30 crc kubenswrapper[4558]: I0120 18:39:30.576413 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c118d975-3ef3-47f2-bc99-32e6e55d1f19" path="/var/lib/kubelet/pods/c118d975-3ef3-47f2-bc99-32e6e55d1f19/volumes" Jan 20 18:39:30 crc kubenswrapper[4558]: I0120 18:39:30.595148 4558 generic.go:334] "Generic (PLEG): container finished" podID="324917cd-6180-463d-af83-512250591e1c" containerID="259ca3d664b42c5958cc4f36dd51a7880220954c652b939bd270f91d31865d5c" exitCode=0 Jan 20 18:39:30 crc kubenswrapper[4558]: I0120 18:39:30.595279 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-747bd6f68-87w9d" Jan 20 18:39:30 crc kubenswrapper[4558]: I0120 18:39:30.595903 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-index-6sn8v" Jan 20 18:39:30 crc kubenswrapper[4558]: I0120 18:39:30.596092 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-index-6sn8v" event={"ID":"324917cd-6180-463d-af83-512250591e1c","Type":"ContainerDied","Data":"259ca3d664b42c5958cc4f36dd51a7880220954c652b939bd270f91d31865d5c"} Jan 20 18:39:30 crc kubenswrapper[4558]: I0120 18:39:30.596224 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-index-6sn8v" event={"ID":"324917cd-6180-463d-af83-512250591e1c","Type":"ContainerDied","Data":"4bf78178f5d89dc44cd1c790fde0c188a3d6e3c45580a48dbdf2fec9a527ecbd"} Jan 20 18:39:30 crc kubenswrapper[4558]: I0120 18:39:30.596269 4558 scope.go:117] "RemoveContainer" containerID="259ca3d664b42c5958cc4f36dd51a7880220954c652b939bd270f91d31865d5c" Jan 20 18:39:30 crc kubenswrapper[4558]: I0120 18:39:30.619075 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/infra-operator-controller-manager-747bd6f68-87w9d"] Jan 20 18:39:30 crc kubenswrapper[4558]: I0120 18:39:30.619735 4558 scope.go:117] "RemoveContainer" containerID="259ca3d664b42c5958cc4f36dd51a7880220954c652b939bd270f91d31865d5c" Jan 20 18:39:30 crc kubenswrapper[4558]: E0120 18:39:30.620380 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"259ca3d664b42c5958cc4f36dd51a7880220954c652b939bd270f91d31865d5c\": container with ID starting with 259ca3d664b42c5958cc4f36dd51a7880220954c652b939bd270f91d31865d5c not found: ID does not exist" containerID="259ca3d664b42c5958cc4f36dd51a7880220954c652b939bd270f91d31865d5c" Jan 20 18:39:30 crc kubenswrapper[4558]: I0120 18:39:30.620418 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"259ca3d664b42c5958cc4f36dd51a7880220954c652b939bd270f91d31865d5c"} err="failed to get container status \"259ca3d664b42c5958cc4f36dd51a7880220954c652b939bd270f91d31865d5c\": rpc error: code = NotFound desc = could not find container \"259ca3d664b42c5958cc4f36dd51a7880220954c652b939bd270f91d31865d5c\": container with ID starting with 259ca3d664b42c5958cc4f36dd51a7880220954c652b939bd270f91d31865d5c not found: ID does not exist" Jan 20 18:39:30 crc kubenswrapper[4558]: I0120 18:39:30.622731 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/infra-operator-controller-manager-747bd6f68-87w9d"] Jan 20 18:39:30 crc kubenswrapper[4558]: I0120 18:39:30.626029 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/infra-operator-index-6sn8v"] Jan 20 18:39:30 crc kubenswrapper[4558]: I0120 18:39:30.630199 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/infra-operator-index-6sn8v"] Jan 20 18:39:31 crc kubenswrapper[4558]: I0120 18:39:31.671736 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-5688b8d95d-l7tf9"] Jan 20 18:39:31 crc kubenswrapper[4558]: I0120 18:39:31.671940 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/mariadb-operator-controller-manager-5688b8d95d-l7tf9" podUID="f5670489-0e74-40ed-840e-af9195e82cb8" containerName="manager" containerID="cri-o://eb59b04404dcdff3fada47ce0a239839d36bf3a15fe82bdfc183a2501e1fdbc0" gracePeriod=10 Jan 20 18:39:31 crc kubenswrapper[4558]: I0120 18:39:31.891944 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/mariadb-operator-index-pbh4r"] Jan 20 18:39:31 crc kubenswrapper[4558]: I0120 18:39:31.892408 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/mariadb-operator-index-pbh4r" podUID="eff4955d-0ba2-4419-9091-dd993572c188" containerName="registry-server" containerID="cri-o://eb7dea73309f3fd70529e505b3a7e84e6a34f2bcebf008857bf8b2bd822984ab" gracePeriod=30 Jan 20 18:39:31 crc kubenswrapper[4558]: I0120 18:39:31.934080 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/a53044f29d0f89c197912873b7cc34569484f5de61ee55394f4939720bhxkps"] Jan 20 18:39:31 crc kubenswrapper[4558]: I0120 18:39:31.953754 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/a53044f29d0f89c197912873b7cc34569484f5de61ee55394f4939720bhxkps"] Jan 20 18:39:32 crc kubenswrapper[4558]: I0120 18:39:32.267815 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-5688b8d95d-l7tf9" Jan 20 18:39:32 crc kubenswrapper[4558]: I0120 18:39:32.364534 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7g6gt\" (UniqueName: \"kubernetes.io/projected/f5670489-0e74-40ed-840e-af9195e82cb8-kube-api-access-7g6gt\") pod \"f5670489-0e74-40ed-840e-af9195e82cb8\" (UID: \"f5670489-0e74-40ed-840e-af9195e82cb8\") " Jan 20 18:39:32 crc kubenswrapper[4558]: I0120 18:39:32.364666 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/f5670489-0e74-40ed-840e-af9195e82cb8-apiservice-cert\") pod \"f5670489-0e74-40ed-840e-af9195e82cb8\" (UID: \"f5670489-0e74-40ed-840e-af9195e82cb8\") " Jan 20 18:39:32 crc kubenswrapper[4558]: I0120 18:39:32.364757 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/f5670489-0e74-40ed-840e-af9195e82cb8-webhook-cert\") pod \"f5670489-0e74-40ed-840e-af9195e82cb8\" (UID: \"f5670489-0e74-40ed-840e-af9195e82cb8\") " Jan 20 18:39:32 crc kubenswrapper[4558]: I0120 18:39:32.369839 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f5670489-0e74-40ed-840e-af9195e82cb8-kube-api-access-7g6gt" (OuterVolumeSpecName: "kube-api-access-7g6gt") pod "f5670489-0e74-40ed-840e-af9195e82cb8" (UID: "f5670489-0e74-40ed-840e-af9195e82cb8"). InnerVolumeSpecName "kube-api-access-7g6gt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:39:32 crc kubenswrapper[4558]: I0120 18:39:32.369977 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f5670489-0e74-40ed-840e-af9195e82cb8-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "f5670489-0e74-40ed-840e-af9195e82cb8" (UID: "f5670489-0e74-40ed-840e-af9195e82cb8"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:39:32 crc kubenswrapper[4558]: I0120 18:39:32.370126 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f5670489-0e74-40ed-840e-af9195e82cb8-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "f5670489-0e74-40ed-840e-af9195e82cb8" (UID: "f5670489-0e74-40ed-840e-af9195e82cb8"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:39:32 crc kubenswrapper[4558]: I0120 18:39:32.386328 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-index-pbh4r" Jan 20 18:39:32 crc kubenswrapper[4558]: I0120 18:39:32.467231 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bmhgd\" (UniqueName: \"kubernetes.io/projected/eff4955d-0ba2-4419-9091-dd993572c188-kube-api-access-bmhgd\") pod \"eff4955d-0ba2-4419-9091-dd993572c188\" (UID: \"eff4955d-0ba2-4419-9091-dd993572c188\") " Jan 20 18:39:32 crc kubenswrapper[4558]: I0120 18:39:32.467831 4558 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/f5670489-0e74-40ed-840e-af9195e82cb8-apiservice-cert\") on node \"crc\" DevicePath \"\"" Jan 20 18:39:32 crc kubenswrapper[4558]: I0120 18:39:32.467851 4558 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/f5670489-0e74-40ed-840e-af9195e82cb8-webhook-cert\") on node \"crc\" DevicePath \"\"" Jan 20 18:39:32 crc kubenswrapper[4558]: I0120 18:39:32.467870 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7g6gt\" (UniqueName: \"kubernetes.io/projected/f5670489-0e74-40ed-840e-af9195e82cb8-kube-api-access-7g6gt\") on node \"crc\" DevicePath \"\"" Jan 20 18:39:32 crc kubenswrapper[4558]: I0120 18:39:32.469787 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eff4955d-0ba2-4419-9091-dd993572c188-kube-api-access-bmhgd" (OuterVolumeSpecName: "kube-api-access-bmhgd") pod "eff4955d-0ba2-4419-9091-dd993572c188" (UID: "eff4955d-0ba2-4419-9091-dd993572c188"). InnerVolumeSpecName "kube-api-access-bmhgd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:39:32 crc kubenswrapper[4558]: I0120 18:39:32.570438 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bmhgd\" (UniqueName: \"kubernetes.io/projected/eff4955d-0ba2-4419-9091-dd993572c188-kube-api-access-bmhgd\") on node \"crc\" DevicePath \"\"" Jan 20 18:39:32 crc kubenswrapper[4558]: I0120 18:39:32.574551 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="324917cd-6180-463d-af83-512250591e1c" path="/var/lib/kubelet/pods/324917cd-6180-463d-af83-512250591e1c/volumes" Jan 20 18:39:32 crc kubenswrapper[4558]: I0120 18:39:32.575229 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bb3e1e1b-cfd6-4430-88b7-9a6cdf812bf4" path="/var/lib/kubelet/pods/bb3e1e1b-cfd6-4430-88b7-9a6cdf812bf4/volumes" Jan 20 18:39:32 crc kubenswrapper[4558]: I0120 18:39:32.575869 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ee676b7f-b767-4c0c-b131-367637d0d7da" path="/var/lib/kubelet/pods/ee676b7f-b767-4c0c-b131-367637d0d7da/volumes" Jan 20 18:39:32 crc kubenswrapper[4558]: I0120 18:39:32.613792 4558 generic.go:334] "Generic (PLEG): container finished" podID="f5670489-0e74-40ed-840e-af9195e82cb8" containerID="eb59b04404dcdff3fada47ce0a239839d36bf3a15fe82bdfc183a2501e1fdbc0" exitCode=0 Jan 20 18:39:32 crc kubenswrapper[4558]: I0120 18:39:32.613857 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-5688b8d95d-l7tf9" event={"ID":"f5670489-0e74-40ed-840e-af9195e82cb8","Type":"ContainerDied","Data":"eb59b04404dcdff3fada47ce0a239839d36bf3a15fe82bdfc183a2501e1fdbc0"} Jan 20 18:39:32 crc kubenswrapper[4558]: I0120 18:39:32.613886 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-5688b8d95d-l7tf9" event={"ID":"f5670489-0e74-40ed-840e-af9195e82cb8","Type":"ContainerDied","Data":"4836e41cff3b5bf57951f09c0e48340606015e490f5b67705589fbd181a33212"} Jan 20 18:39:32 crc kubenswrapper[4558]: I0120 18:39:32.613906 4558 scope.go:117] "RemoveContainer" containerID="eb59b04404dcdff3fada47ce0a239839d36bf3a15fe82bdfc183a2501e1fdbc0" Jan 20 18:39:32 crc kubenswrapper[4558]: I0120 18:39:32.614020 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-5688b8d95d-l7tf9" Jan 20 18:39:32 crc kubenswrapper[4558]: I0120 18:39:32.616915 4558 generic.go:334] "Generic (PLEG): container finished" podID="eff4955d-0ba2-4419-9091-dd993572c188" containerID="eb7dea73309f3fd70529e505b3a7e84e6a34f2bcebf008857bf8b2bd822984ab" exitCode=0 Jan 20 18:39:32 crc kubenswrapper[4558]: I0120 18:39:32.616943 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-index-pbh4r" event={"ID":"eff4955d-0ba2-4419-9091-dd993572c188","Type":"ContainerDied","Data":"eb7dea73309f3fd70529e505b3a7e84e6a34f2bcebf008857bf8b2bd822984ab"} Jan 20 18:39:32 crc kubenswrapper[4558]: I0120 18:39:32.617072 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-index-pbh4r" event={"ID":"eff4955d-0ba2-4419-9091-dd993572c188","Type":"ContainerDied","Data":"e3f0762a70f482a7cb7c5ba0090597aa172152eb45e1669bdd2b9bf5f9312e27"} Jan 20 18:39:32 crc kubenswrapper[4558]: I0120 18:39:32.617023 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-index-pbh4r" Jan 20 18:39:32 crc kubenswrapper[4558]: I0120 18:39:32.631771 4558 scope.go:117] "RemoveContainer" containerID="eb59b04404dcdff3fada47ce0a239839d36bf3a15fe82bdfc183a2501e1fdbc0" Jan 20 18:39:32 crc kubenswrapper[4558]: I0120 18:39:32.632250 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-5688b8d95d-l7tf9"] Jan 20 18:39:32 crc kubenswrapper[4558]: E0120 18:39:32.632500 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eb59b04404dcdff3fada47ce0a239839d36bf3a15fe82bdfc183a2501e1fdbc0\": container with ID starting with eb59b04404dcdff3fada47ce0a239839d36bf3a15fe82bdfc183a2501e1fdbc0 not found: ID does not exist" containerID="eb59b04404dcdff3fada47ce0a239839d36bf3a15fe82bdfc183a2501e1fdbc0" Jan 20 18:39:32 crc kubenswrapper[4558]: I0120 18:39:32.632626 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eb59b04404dcdff3fada47ce0a239839d36bf3a15fe82bdfc183a2501e1fdbc0"} err="failed to get container status \"eb59b04404dcdff3fada47ce0a239839d36bf3a15fe82bdfc183a2501e1fdbc0\": rpc error: code = NotFound desc = could not find container \"eb59b04404dcdff3fada47ce0a239839d36bf3a15fe82bdfc183a2501e1fdbc0\": container with ID starting with eb59b04404dcdff3fada47ce0a239839d36bf3a15fe82bdfc183a2501e1fdbc0 not found: ID does not exist" Jan 20 18:39:32 crc kubenswrapper[4558]: I0120 18:39:32.632731 4558 scope.go:117] "RemoveContainer" containerID="eb7dea73309f3fd70529e505b3a7e84e6a34f2bcebf008857bf8b2bd822984ab" Jan 20 18:39:32 crc kubenswrapper[4558]: I0120 18:39:32.636903 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-5688b8d95d-l7tf9"] Jan 20 18:39:32 crc kubenswrapper[4558]: I0120 18:39:32.641425 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/mariadb-operator-index-pbh4r"] Jan 20 18:39:32 crc kubenswrapper[4558]: I0120 18:39:32.644445 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/mariadb-operator-index-pbh4r"] Jan 20 18:39:32 crc kubenswrapper[4558]: I0120 18:39:32.648913 4558 scope.go:117] "RemoveContainer" containerID="eb7dea73309f3fd70529e505b3a7e84e6a34f2bcebf008857bf8b2bd822984ab" Jan 20 18:39:32 crc kubenswrapper[4558]: E0120 18:39:32.649412 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eb7dea73309f3fd70529e505b3a7e84e6a34f2bcebf008857bf8b2bd822984ab\": container with ID starting with eb7dea73309f3fd70529e505b3a7e84e6a34f2bcebf008857bf8b2bd822984ab not found: ID does not exist" containerID="eb7dea73309f3fd70529e505b3a7e84e6a34f2bcebf008857bf8b2bd822984ab" Jan 20 18:39:32 crc kubenswrapper[4558]: I0120 18:39:32.649494 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eb7dea73309f3fd70529e505b3a7e84e6a34f2bcebf008857bf8b2bd822984ab"} err="failed to get container status \"eb7dea73309f3fd70529e505b3a7e84e6a34f2bcebf008857bf8b2bd822984ab\": rpc error: code = NotFound desc = could not find container \"eb7dea73309f3fd70529e505b3a7e84e6a34f2bcebf008857bf8b2bd822984ab\": container with ID starting with eb7dea73309f3fd70529e505b3a7e84e6a34f2bcebf008857bf8b2bd822984ab not found: ID does not exist" Jan 20 18:39:34 crc kubenswrapper[4558]: I0120 18:39:34.201751 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-779fc9694b-slk8n"] Jan 20 18:39:34 crc kubenswrapper[4558]: I0120 18:39:34.202691 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-slk8n" podUID="55572588-adf6-4e8b-9bb4-f6a7ea3e5b20" containerName="operator" containerID="cri-o://72e477c181eb6b52f76e97a2dea9cd3b48d28509a743930b7e1d03ca393c5a9a" gracePeriod=10 Jan 20 18:39:34 crc kubenswrapper[4558]: I0120 18:39:34.484311 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-index-pt2sm"] Jan 20 18:39:34 crc kubenswrapper[4558]: I0120 18:39:34.484815 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/rabbitmq-cluster-operator-index-pt2sm" podUID="4e175dfb-0ef0-492f-800c-8214356c0573" containerName="registry-server" containerID="cri-o://63761e2dc61ed042265b21210335a7fa7edc18776026efc2301859ca8e2dbf42" gracePeriod=30 Jan 20 18:39:34 crc kubenswrapper[4558]: I0120 18:39:34.513759 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590xcj9f"] Jan 20 18:39:34 crc kubenswrapper[4558]: I0120 18:39:34.518444 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590xcj9f"] Jan 20 18:39:34 crc kubenswrapper[4558]: I0120 18:39:34.575226 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25c79886-6ce6-4b8d-b2a1-4b2ae7d9a686" path="/var/lib/kubelet/pods/25c79886-6ce6-4b8d-b2a1-4b2ae7d9a686/volumes" Jan 20 18:39:34 crc kubenswrapper[4558]: I0120 18:39:34.576055 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eff4955d-0ba2-4419-9091-dd993572c188" path="/var/lib/kubelet/pods/eff4955d-0ba2-4419-9091-dd993572c188/volumes" Jan 20 18:39:34 crc kubenswrapper[4558]: I0120 18:39:34.576546 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f5670489-0e74-40ed-840e-af9195e82cb8" path="/var/lib/kubelet/pods/f5670489-0e74-40ed-840e-af9195e82cb8/volumes" Jan 20 18:39:34 crc kubenswrapper[4558]: I0120 18:39:34.634519 4558 generic.go:334] "Generic (PLEG): container finished" podID="4e175dfb-0ef0-492f-800c-8214356c0573" containerID="63761e2dc61ed042265b21210335a7fa7edc18776026efc2301859ca8e2dbf42" exitCode=0 Jan 20 18:39:34 crc kubenswrapper[4558]: I0120 18:39:34.634600 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-index-pt2sm" event={"ID":"4e175dfb-0ef0-492f-800c-8214356c0573","Type":"ContainerDied","Data":"63761e2dc61ed042265b21210335a7fa7edc18776026efc2301859ca8e2dbf42"} Jan 20 18:39:34 crc kubenswrapper[4558]: I0120 18:39:34.636336 4558 generic.go:334] "Generic (PLEG): container finished" podID="55572588-adf6-4e8b-9bb4-f6a7ea3e5b20" containerID="72e477c181eb6b52f76e97a2dea9cd3b48d28509a743930b7e1d03ca393c5a9a" exitCode=0 Jan 20 18:39:34 crc kubenswrapper[4558]: I0120 18:39:34.636379 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-slk8n" event={"ID":"55572588-adf6-4e8b-9bb4-f6a7ea3e5b20","Type":"ContainerDied","Data":"72e477c181eb6b52f76e97a2dea9cd3b48d28509a743930b7e1d03ca393c5a9a"} Jan 20 18:39:34 crc kubenswrapper[4558]: I0120 18:39:34.636412 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-slk8n" event={"ID":"55572588-adf6-4e8b-9bb4-f6a7ea3e5b20","Type":"ContainerDied","Data":"0e6fbc018ca3d32643a5f5495068ec4e56be7d1595030279b394ba8087eddd90"} Jan 20 18:39:34 crc kubenswrapper[4558]: I0120 18:39:34.636425 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0e6fbc018ca3d32643a5f5495068ec4e56be7d1595030279b394ba8087eddd90" Jan 20 18:39:34 crc kubenswrapper[4558]: I0120 18:39:34.639608 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-slk8n" Jan 20 18:39:34 crc kubenswrapper[4558]: I0120 18:39:34.805201 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nvw92\" (UniqueName: \"kubernetes.io/projected/55572588-adf6-4e8b-9bb4-f6a7ea3e5b20-kube-api-access-nvw92\") pod \"55572588-adf6-4e8b-9bb4-f6a7ea3e5b20\" (UID: \"55572588-adf6-4e8b-9bb4-f6a7ea3e5b20\") " Jan 20 18:39:34 crc kubenswrapper[4558]: I0120 18:39:34.811761 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/55572588-adf6-4e8b-9bb4-f6a7ea3e5b20-kube-api-access-nvw92" (OuterVolumeSpecName: "kube-api-access-nvw92") pod "55572588-adf6-4e8b-9bb4-f6a7ea3e5b20" (UID: "55572588-adf6-4e8b-9bb4-f6a7ea3e5b20"). InnerVolumeSpecName "kube-api-access-nvw92". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:39:34 crc kubenswrapper[4558]: I0120 18:39:34.829746 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-index-pt2sm" Jan 20 18:39:34 crc kubenswrapper[4558]: I0120 18:39:34.907930 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nvw92\" (UniqueName: \"kubernetes.io/projected/55572588-adf6-4e8b-9bb4-f6a7ea3e5b20-kube-api-access-nvw92\") on node \"crc\" DevicePath \"\"" Jan 20 18:39:35 crc kubenswrapper[4558]: I0120 18:39:35.009440 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p6m76\" (UniqueName: \"kubernetes.io/projected/4e175dfb-0ef0-492f-800c-8214356c0573-kube-api-access-p6m76\") pod \"4e175dfb-0ef0-492f-800c-8214356c0573\" (UID: \"4e175dfb-0ef0-492f-800c-8214356c0573\") " Jan 20 18:39:35 crc kubenswrapper[4558]: I0120 18:39:35.012831 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4e175dfb-0ef0-492f-800c-8214356c0573-kube-api-access-p6m76" (OuterVolumeSpecName: "kube-api-access-p6m76") pod "4e175dfb-0ef0-492f-800c-8214356c0573" (UID: "4e175dfb-0ef0-492f-800c-8214356c0573"). InnerVolumeSpecName "kube-api-access-p6m76". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:39:35 crc kubenswrapper[4558]: I0120 18:39:35.111272 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p6m76\" (UniqueName: \"kubernetes.io/projected/4e175dfb-0ef0-492f-800c-8214356c0573-kube-api-access-p6m76\") on node \"crc\" DevicePath \"\"" Jan 20 18:39:35 crc kubenswrapper[4558]: I0120 18:39:35.649770 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-index-pt2sm" Jan 20 18:39:35 crc kubenswrapper[4558]: I0120 18:39:35.649806 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-index-pt2sm" event={"ID":"4e175dfb-0ef0-492f-800c-8214356c0573","Type":"ContainerDied","Data":"4ca45ec9b6dd20d740311e96f8a8ed0926b0261ceb9eb0a9b7e6c86a90402b25"} Jan 20 18:39:35 crc kubenswrapper[4558]: I0120 18:39:35.649882 4558 scope.go:117] "RemoveContainer" containerID="63761e2dc61ed042265b21210335a7fa7edc18776026efc2301859ca8e2dbf42" Jan 20 18:39:35 crc kubenswrapper[4558]: I0120 18:39:35.649772 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-slk8n" Jan 20 18:39:35 crc kubenswrapper[4558]: I0120 18:39:35.685837 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-779fc9694b-slk8n"] Jan 20 18:39:35 crc kubenswrapper[4558]: I0120 18:39:35.689123 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-779fc9694b-slk8n"] Jan 20 18:39:35 crc kubenswrapper[4558]: I0120 18:39:35.693525 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-index-pt2sm"] Jan 20 18:39:35 crc kubenswrapper[4558]: I0120 18:39:35.726180 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-index-pt2sm"] Jan 20 18:39:36 crc kubenswrapper[4558]: I0120 18:39:36.578796 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4e175dfb-0ef0-492f-800c-8214356c0573" path="/var/lib/kubelet/pods/4e175dfb-0ef0-492f-800c-8214356c0573/volumes" Jan 20 18:39:36 crc kubenswrapper[4558]: I0120 18:39:36.579439 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="55572588-adf6-4e8b-9bb4-f6a7ea3e5b20" path="/var/lib/kubelet/pods/55572588-adf6-4e8b-9bb4-f6a7ea3e5b20/volumes" Jan 20 18:39:38 crc kubenswrapper[4558]: I0120 18:39:38.308766 4558 scope.go:117] "RemoveContainer" containerID="fb3b484fdacf4047640d92be4fe8ac29f980071033dab0c8b3c5bce28a7156a6" Jan 20 18:39:38 crc kubenswrapper[4558]: I0120 18:39:38.331560 4558 scope.go:117] "RemoveContainer" containerID="2e29da524f592a697143c38bb0d212b939a7c38b4c08a93de96922c759e5eebc" Jan 20 18:39:38 crc kubenswrapper[4558]: I0120 18:39:38.347061 4558 scope.go:117] "RemoveContainer" containerID="2f88680eaf60259826ab0c83065520226713ec29255c97c5b37980455d9912b6" Jan 20 18:39:38 crc kubenswrapper[4558]: I0120 18:39:38.364521 4558 scope.go:117] "RemoveContainer" containerID="fa21287492bfb32f5e9b259744d62f18dca24a5edf9250c51898064aace7be93" Jan 20 18:39:38 crc kubenswrapper[4558]: I0120 18:39:38.381127 4558 scope.go:117] "RemoveContainer" containerID="a64d1244baf80a996a2de72c19922e9ca8daf41bc6171fe988000114c74ab21a" Jan 20 18:39:38 crc kubenswrapper[4558]: I0120 18:39:38.396059 4558 scope.go:117] "RemoveContainer" containerID="0a5c8461fb756193f036809020d399e4876cde6f4c0616cc6e432a0d968f3d8d" Jan 20 18:39:38 crc kubenswrapper[4558]: I0120 18:39:38.410934 4558 scope.go:117] "RemoveContainer" containerID="d9887ff06ad881226c9a4aa8c0ee5b08ec17cfbe8e8b1d95f62b993e378587bc" Jan 20 18:39:38 crc kubenswrapper[4558]: I0120 18:39:38.425366 4558 scope.go:117] "RemoveContainer" containerID="7d0dbcca03ee91ec21f2caeaee699b7e50531520e6a7fb08ecc6d21752a4d41f" Jan 20 18:39:38 crc kubenswrapper[4558]: I0120 18:39:38.439085 4558 scope.go:117] "RemoveContainer" containerID="5db8654eac950361871a8d5fdc2ebce705c3d9091c0510295e7562add18a39a6" Jan 20 18:39:38 crc kubenswrapper[4558]: I0120 18:39:38.449878 4558 scope.go:117] "RemoveContainer" containerID="83714c81f77d105070d2595369995b6be30f90fb7765400aef38416b6a88de94" Jan 20 18:39:38 crc kubenswrapper[4558]: I0120 18:39:38.462989 4558 scope.go:117] "RemoveContainer" containerID="4a3ec481e5c67206b7856e248ae59f098c0a8c38bf85b643d55b84d90dfc42d0" Jan 20 18:39:38 crc kubenswrapper[4558]: I0120 18:39:38.479189 4558 scope.go:117] "RemoveContainer" containerID="72e477c181eb6b52f76e97a2dea9cd3b48d28509a743930b7e1d03ca393c5a9a" Jan 20 18:39:38 crc kubenswrapper[4558]: I0120 18:39:38.494995 4558 scope.go:117] "RemoveContainer" containerID="06d841c360d31f6671b5eda177973450a54a9f90fd709b0b8b7da92429406463" Jan 20 18:39:38 crc kubenswrapper[4558]: I0120 18:39:38.510724 4558 scope.go:117] "RemoveContainer" containerID="a92298b518d964f2280c292db6da40b46ef8eee322589502a500c90ff3e91a5c" Jan 20 18:39:38 crc kubenswrapper[4558]: I0120 18:39:38.566137 4558 scope.go:117] "RemoveContainer" containerID="77ce04c6daee7d4f9b776bd1c80b448f9c4750d5cb5c7dd182ff28963a4bcf99" Jan 20 18:39:38 crc kubenswrapper[4558]: E0120 18:39:38.566450 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 18:39:53 crc kubenswrapper[4558]: I0120 18:39:53.566249 4558 scope.go:117] "RemoveContainer" containerID="77ce04c6daee7d4f9b776bd1c80b448f9c4750d5cb5c7dd182ff28963a4bcf99" Jan 20 18:39:53 crc kubenswrapper[4558]: E0120 18:39:53.567200 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 18:40:08 crc kubenswrapper[4558]: I0120 18:40:08.566484 4558 scope.go:117] "RemoveContainer" containerID="77ce04c6daee7d4f9b776bd1c80b448f9c4750d5cb5c7dd182ff28963a4bcf99" Jan 20 18:40:08 crc kubenswrapper[4558]: E0120 18:40:08.567339 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 18:40:20 crc kubenswrapper[4558]: I0120 18:40:20.872976 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-index-wf2wm"] Jan 20 18:40:20 crc kubenswrapper[4558]: E0120 18:40:20.873847 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="55572588-adf6-4e8b-9bb4-f6a7ea3e5b20" containerName="operator" Jan 20 18:40:20 crc kubenswrapper[4558]: I0120 18:40:20.873865 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="55572588-adf6-4e8b-9bb4-f6a7ea3e5b20" containerName="operator" Jan 20 18:40:20 crc kubenswrapper[4558]: E0120 18:40:20.873875 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f5670489-0e74-40ed-840e-af9195e82cb8" containerName="manager" Jan 20 18:40:20 crc kubenswrapper[4558]: I0120 18:40:20.873880 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="f5670489-0e74-40ed-840e-af9195e82cb8" containerName="manager" Jan 20 18:40:20 crc kubenswrapper[4558]: E0120 18:40:20.873888 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="87dd79f4-9a5b-4370-9ef8-3101a2b8bfb8" containerName="mysql-bootstrap" Jan 20 18:40:20 crc kubenswrapper[4558]: I0120 18:40:20.873893 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="87dd79f4-9a5b-4370-9ef8-3101a2b8bfb8" containerName="mysql-bootstrap" Jan 20 18:40:20 crc kubenswrapper[4558]: E0120 18:40:20.873908 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="160b737f-7377-4d3a-8fff-3b96b9a614bc" containerName="galera" Jan 20 18:40:20 crc kubenswrapper[4558]: I0120 18:40:20.873913 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="160b737f-7377-4d3a-8fff-3b96b9a614bc" containerName="galera" Jan 20 18:40:20 crc kubenswrapper[4558]: E0120 18:40:20.873918 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c70e04a6-d794-4290-ac24-1fecf80b3d41" containerName="memcached" Jan 20 18:40:20 crc kubenswrapper[4558]: I0120 18:40:20.873923 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="c70e04a6-d794-4290-ac24-1fecf80b3d41" containerName="memcached" Jan 20 18:40:20 crc kubenswrapper[4558]: E0120 18:40:20.873935 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="324917cd-6180-463d-af83-512250591e1c" containerName="registry-server" Jan 20 18:40:20 crc kubenswrapper[4558]: I0120 18:40:20.873941 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="324917cd-6180-463d-af83-512250591e1c" containerName="registry-server" Jan 20 18:40:20 crc kubenswrapper[4558]: E0120 18:40:20.873950 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7e1d554f-8358-4ab1-ac14-764882a73ed2" containerName="manager" Jan 20 18:40:20 crc kubenswrapper[4558]: I0120 18:40:20.873955 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="7e1d554f-8358-4ab1-ac14-764882a73ed2" containerName="manager" Jan 20 18:40:20 crc kubenswrapper[4558]: E0120 18:40:20.873965 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3b3e4088-de47-4e50-b0a9-ad92c99d033d" containerName="mysql-bootstrap" Jan 20 18:40:20 crc kubenswrapper[4558]: I0120 18:40:20.873970 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="3b3e4088-de47-4e50-b0a9-ad92c99d033d" containerName="mysql-bootstrap" Jan 20 18:40:20 crc kubenswrapper[4558]: E0120 18:40:20.873977 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e175dfb-0ef0-492f-800c-8214356c0573" containerName="registry-server" Jan 20 18:40:20 crc kubenswrapper[4558]: I0120 18:40:20.873982 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e175dfb-0ef0-492f-800c-8214356c0573" containerName="registry-server" Jan 20 18:40:20 crc kubenswrapper[4558]: E0120 18:40:20.873990 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2275b006-a890-400e-83b6-1c46bf67f62e" containerName="rabbitmq" Jan 20 18:40:20 crc kubenswrapper[4558]: I0120 18:40:20.873995 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="2275b006-a890-400e-83b6-1c46bf67f62e" containerName="rabbitmq" Jan 20 18:40:20 crc kubenswrapper[4558]: E0120 18:40:20.874003 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3b3e4088-de47-4e50-b0a9-ad92c99d033d" containerName="galera" Jan 20 18:40:20 crc kubenswrapper[4558]: I0120 18:40:20.874008 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="3b3e4088-de47-4e50-b0a9-ad92c99d033d" containerName="galera" Jan 20 18:40:20 crc kubenswrapper[4558]: E0120 18:40:20.874016 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2275b006-a890-400e-83b6-1c46bf67f62e" containerName="setup-container" Jan 20 18:40:20 crc kubenswrapper[4558]: I0120 18:40:20.874021 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="2275b006-a890-400e-83b6-1c46bf67f62e" containerName="setup-container" Jan 20 18:40:20 crc kubenswrapper[4558]: E0120 18:40:20.874027 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eff4955d-0ba2-4419-9091-dd993572c188" containerName="registry-server" Jan 20 18:40:20 crc kubenswrapper[4558]: I0120 18:40:20.874033 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="eff4955d-0ba2-4419-9091-dd993572c188" containerName="registry-server" Jan 20 18:40:20 crc kubenswrapper[4558]: E0120 18:40:20.874042 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="160b737f-7377-4d3a-8fff-3b96b9a614bc" containerName="mysql-bootstrap" Jan 20 18:40:20 crc kubenswrapper[4558]: I0120 18:40:20.874050 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="160b737f-7377-4d3a-8fff-3b96b9a614bc" containerName="mysql-bootstrap" Jan 20 18:40:20 crc kubenswrapper[4558]: E0120 18:40:20.874058 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="87dd79f4-9a5b-4370-9ef8-3101a2b8bfb8" containerName="galera" Jan 20 18:40:20 crc kubenswrapper[4558]: I0120 18:40:20.874063 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="87dd79f4-9a5b-4370-9ef8-3101a2b8bfb8" containerName="galera" Jan 20 18:40:20 crc kubenswrapper[4558]: E0120 18:40:20.874083 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3fa5954a-a8ea-4d9f-85b9-ccf8e36865a2" containerName="registry-server" Jan 20 18:40:20 crc kubenswrapper[4558]: I0120 18:40:20.874089 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="3fa5954a-a8ea-4d9f-85b9-ccf8e36865a2" containerName="registry-server" Jan 20 18:40:20 crc kubenswrapper[4558]: E0120 18:40:20.874097 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bb3e1e1b-cfd6-4430-88b7-9a6cdf812bf4" containerName="manager" Jan 20 18:40:20 crc kubenswrapper[4558]: I0120 18:40:20.874102 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="bb3e1e1b-cfd6-4430-88b7-9a6cdf812bf4" containerName="manager" Jan 20 18:40:20 crc kubenswrapper[4558]: I0120 18:40:20.874254 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="f5670489-0e74-40ed-840e-af9195e82cb8" containerName="manager" Jan 20 18:40:20 crc kubenswrapper[4558]: I0120 18:40:20.874270 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="3b3e4088-de47-4e50-b0a9-ad92c99d033d" containerName="galera" Jan 20 18:40:20 crc kubenswrapper[4558]: I0120 18:40:20.874280 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="3fa5954a-a8ea-4d9f-85b9-ccf8e36865a2" containerName="registry-server" Jan 20 18:40:20 crc kubenswrapper[4558]: I0120 18:40:20.874288 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="7e1d554f-8358-4ab1-ac14-764882a73ed2" containerName="manager" Jan 20 18:40:20 crc kubenswrapper[4558]: I0120 18:40:20.874296 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="324917cd-6180-463d-af83-512250591e1c" containerName="registry-server" Jan 20 18:40:20 crc kubenswrapper[4558]: I0120 18:40:20.874304 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="c70e04a6-d794-4290-ac24-1fecf80b3d41" containerName="memcached" Jan 20 18:40:20 crc kubenswrapper[4558]: I0120 18:40:20.874314 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="87dd79f4-9a5b-4370-9ef8-3101a2b8bfb8" containerName="galera" Jan 20 18:40:20 crc kubenswrapper[4558]: I0120 18:40:20.874320 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="eff4955d-0ba2-4419-9091-dd993572c188" containerName="registry-server" Jan 20 18:40:20 crc kubenswrapper[4558]: I0120 18:40:20.874328 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="bb3e1e1b-cfd6-4430-88b7-9a6cdf812bf4" containerName="manager" Jan 20 18:40:20 crc kubenswrapper[4558]: I0120 18:40:20.874335 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="55572588-adf6-4e8b-9bb4-f6a7ea3e5b20" containerName="operator" Jan 20 18:40:20 crc kubenswrapper[4558]: I0120 18:40:20.874342 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="4e175dfb-0ef0-492f-800c-8214356c0573" containerName="registry-server" Jan 20 18:40:20 crc kubenswrapper[4558]: I0120 18:40:20.874352 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="160b737f-7377-4d3a-8fff-3b96b9a614bc" containerName="galera" Jan 20 18:40:20 crc kubenswrapper[4558]: I0120 18:40:20.874358 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="2275b006-a890-400e-83b6-1c46bf67f62e" containerName="rabbitmq" Jan 20 18:40:20 crc kubenswrapper[4558]: I0120 18:40:20.874800 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-index-wf2wm" Jan 20 18:40:20 crc kubenswrapper[4558]: I0120 18:40:20.877139 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Jan 20 18:40:20 crc kubenswrapper[4558]: I0120 18:40:20.878299 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-index-dockercfg-95z7z" Jan 20 18:40:20 crc kubenswrapper[4558]: I0120 18:40:20.879438 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Jan 20 18:40:20 crc kubenswrapper[4558]: I0120 18:40:20.885139 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-index-wf2wm"] Jan 20 18:40:21 crc kubenswrapper[4558]: I0120 18:40:21.042995 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j58s2\" (UniqueName: \"kubernetes.io/projected/9dc9812b-eeef-4ab4-9e54-b9dba919edba-kube-api-access-j58s2\") pod \"mariadb-operator-index-wf2wm\" (UID: \"9dc9812b-eeef-4ab4-9e54-b9dba919edba\") " pod="openstack-operators/mariadb-operator-index-wf2wm" Jan 20 18:40:21 crc kubenswrapper[4558]: I0120 18:40:21.145413 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j58s2\" (UniqueName: \"kubernetes.io/projected/9dc9812b-eeef-4ab4-9e54-b9dba919edba-kube-api-access-j58s2\") pod \"mariadb-operator-index-wf2wm\" (UID: \"9dc9812b-eeef-4ab4-9e54-b9dba919edba\") " pod="openstack-operators/mariadb-operator-index-wf2wm" Jan 20 18:40:21 crc kubenswrapper[4558]: I0120 18:40:21.164305 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j58s2\" (UniqueName: \"kubernetes.io/projected/9dc9812b-eeef-4ab4-9e54-b9dba919edba-kube-api-access-j58s2\") pod \"mariadb-operator-index-wf2wm\" (UID: \"9dc9812b-eeef-4ab4-9e54-b9dba919edba\") " pod="openstack-operators/mariadb-operator-index-wf2wm" Jan 20 18:40:21 crc kubenswrapper[4558]: I0120 18:40:21.195186 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-index-wf2wm" Jan 20 18:40:21 crc kubenswrapper[4558]: I0120 18:40:21.255203 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/mariadb-operator-index-wf2wm"] Jan 20 18:40:21 crc kubenswrapper[4558]: I0120 18:40:21.565634 4558 scope.go:117] "RemoveContainer" containerID="77ce04c6daee7d4f9b776bd1c80b448f9c4750d5cb5c7dd182ff28963a4bcf99" Jan 20 18:40:21 crc kubenswrapper[4558]: E0120 18:40:21.566513 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 18:40:21 crc kubenswrapper[4558]: I0120 18:40:21.577499 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/mariadb-operator-index-wf2wm"] Jan 20 18:40:21 crc kubenswrapper[4558]: I0120 18:40:21.590032 4558 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 20 18:40:21 crc kubenswrapper[4558]: I0120 18:40:21.658525 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-index-pdq8m"] Jan 20 18:40:21 crc kubenswrapper[4558]: I0120 18:40:21.659412 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-index-pdq8m" Jan 20 18:40:21 crc kubenswrapper[4558]: I0120 18:40:21.664658 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-index-pdq8m"] Jan 20 18:40:21 crc kubenswrapper[4558]: I0120 18:40:21.755640 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qfkjk\" (UniqueName: \"kubernetes.io/projected/5995f068-2b84-4ad0-8dc7-a24b00cc8ed8-kube-api-access-qfkjk\") pod \"mariadb-operator-index-pdq8m\" (UID: \"5995f068-2b84-4ad0-8dc7-a24b00cc8ed8\") " pod="openstack-operators/mariadb-operator-index-pdq8m" Jan 20 18:40:21 crc kubenswrapper[4558]: I0120 18:40:21.856913 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qfkjk\" (UniqueName: \"kubernetes.io/projected/5995f068-2b84-4ad0-8dc7-a24b00cc8ed8-kube-api-access-qfkjk\") pod \"mariadb-operator-index-pdq8m\" (UID: \"5995f068-2b84-4ad0-8dc7-a24b00cc8ed8\") " pod="openstack-operators/mariadb-operator-index-pdq8m" Jan 20 18:40:21 crc kubenswrapper[4558]: I0120 18:40:21.873998 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qfkjk\" (UniqueName: \"kubernetes.io/projected/5995f068-2b84-4ad0-8dc7-a24b00cc8ed8-kube-api-access-qfkjk\") pod \"mariadb-operator-index-pdq8m\" (UID: \"5995f068-2b84-4ad0-8dc7-a24b00cc8ed8\") " pod="openstack-operators/mariadb-operator-index-pdq8m" Jan 20 18:40:21 crc kubenswrapper[4558]: I0120 18:40:21.969420 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-index-wf2wm" event={"ID":"9dc9812b-eeef-4ab4-9e54-b9dba919edba","Type":"ContainerStarted","Data":"424b38b87839d8af1f4892522b8c16ca872630c37cd9ea364fa3a611f46aac0e"} Jan 20 18:40:21 crc kubenswrapper[4558]: I0120 18:40:21.975608 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-index-pdq8m" Jan 20 18:40:22 crc kubenswrapper[4558]: I0120 18:40:22.338545 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-index-pdq8m"] Jan 20 18:40:22 crc kubenswrapper[4558]: I0120 18:40:22.977586 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-index-wf2wm" event={"ID":"9dc9812b-eeef-4ab4-9e54-b9dba919edba","Type":"ContainerStarted","Data":"ffd8460f710e728dd068e6372c17e852b7135df2af9554dfc726aba31159eef6"} Jan 20 18:40:22 crc kubenswrapper[4558]: I0120 18:40:22.977874 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/mariadb-operator-index-wf2wm" podUID="9dc9812b-eeef-4ab4-9e54-b9dba919edba" containerName="registry-server" containerID="cri-o://ffd8460f710e728dd068e6372c17e852b7135df2af9554dfc726aba31159eef6" gracePeriod=2 Jan 20 18:40:22 crc kubenswrapper[4558]: I0120 18:40:22.979899 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-index-pdq8m" event={"ID":"5995f068-2b84-4ad0-8dc7-a24b00cc8ed8","Type":"ContainerStarted","Data":"8165927947d739919261f2284c86a80965539b15b6f016ffa6227e3179051eda"} Jan 20 18:40:22 crc kubenswrapper[4558]: I0120 18:40:22.995609 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-index-wf2wm" podStartSLOduration=2.372808184 podStartE2EDuration="2.99558356s" podCreationTimestamp="2026-01-20 18:40:20 +0000 UTC" firstStartedPulling="2026-01-20 18:40:21.58982066 +0000 UTC m=+7115.350158617" lastFinishedPulling="2026-01-20 18:40:22.212596026 +0000 UTC m=+7115.972933993" observedRunningTime="2026-01-20 18:40:22.990853316 +0000 UTC m=+7116.751191283" watchObservedRunningTime="2026-01-20 18:40:22.99558356 +0000 UTC m=+7116.755921527" Jan 20 18:40:23 crc kubenswrapper[4558]: I0120 18:40:23.315347 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-index-wf2wm" Jan 20 18:40:23 crc kubenswrapper[4558]: I0120 18:40:23.479629 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j58s2\" (UniqueName: \"kubernetes.io/projected/9dc9812b-eeef-4ab4-9e54-b9dba919edba-kube-api-access-j58s2\") pod \"9dc9812b-eeef-4ab4-9e54-b9dba919edba\" (UID: \"9dc9812b-eeef-4ab4-9e54-b9dba919edba\") " Jan 20 18:40:23 crc kubenswrapper[4558]: I0120 18:40:23.485260 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9dc9812b-eeef-4ab4-9e54-b9dba919edba-kube-api-access-j58s2" (OuterVolumeSpecName: "kube-api-access-j58s2") pod "9dc9812b-eeef-4ab4-9e54-b9dba919edba" (UID: "9dc9812b-eeef-4ab4-9e54-b9dba919edba"). InnerVolumeSpecName "kube-api-access-j58s2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:40:23 crc kubenswrapper[4558]: I0120 18:40:23.581803 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j58s2\" (UniqueName: \"kubernetes.io/projected/9dc9812b-eeef-4ab4-9e54-b9dba919edba-kube-api-access-j58s2\") on node \"crc\" DevicePath \"\"" Jan 20 18:40:23 crc kubenswrapper[4558]: I0120 18:40:23.988551 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-index-pdq8m" event={"ID":"5995f068-2b84-4ad0-8dc7-a24b00cc8ed8","Type":"ContainerStarted","Data":"4503d75dc3e2dff19603937b0d4459f8b0693a6a5912e6af723092f8b9b97ad3"} Jan 20 18:40:23 crc kubenswrapper[4558]: I0120 18:40:23.990111 4558 generic.go:334] "Generic (PLEG): container finished" podID="9dc9812b-eeef-4ab4-9e54-b9dba919edba" containerID="ffd8460f710e728dd068e6372c17e852b7135df2af9554dfc726aba31159eef6" exitCode=0 Jan 20 18:40:23 crc kubenswrapper[4558]: I0120 18:40:23.990149 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-index-wf2wm" Jan 20 18:40:23 crc kubenswrapper[4558]: I0120 18:40:23.990192 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-index-wf2wm" event={"ID":"9dc9812b-eeef-4ab4-9e54-b9dba919edba","Type":"ContainerDied","Data":"ffd8460f710e728dd068e6372c17e852b7135df2af9554dfc726aba31159eef6"} Jan 20 18:40:23 crc kubenswrapper[4558]: I0120 18:40:23.990273 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-index-wf2wm" event={"ID":"9dc9812b-eeef-4ab4-9e54-b9dba919edba","Type":"ContainerDied","Data":"424b38b87839d8af1f4892522b8c16ca872630c37cd9ea364fa3a611f46aac0e"} Jan 20 18:40:23 crc kubenswrapper[4558]: I0120 18:40:23.990299 4558 scope.go:117] "RemoveContainer" containerID="ffd8460f710e728dd068e6372c17e852b7135df2af9554dfc726aba31159eef6" Jan 20 18:40:24 crc kubenswrapper[4558]: I0120 18:40:24.007645 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-index-pdq8m" podStartSLOduration=2.436039624 podStartE2EDuration="3.007633206s" podCreationTimestamp="2026-01-20 18:40:21 +0000 UTC" firstStartedPulling="2026-01-20 18:40:22.346356398 +0000 UTC m=+7116.106694355" lastFinishedPulling="2026-01-20 18:40:22.91794997 +0000 UTC m=+7116.678287937" observedRunningTime="2026-01-20 18:40:24.002310878 +0000 UTC m=+7117.762648845" watchObservedRunningTime="2026-01-20 18:40:24.007633206 +0000 UTC m=+7117.767971173" Jan 20 18:40:24 crc kubenswrapper[4558]: I0120 18:40:24.008389 4558 scope.go:117] "RemoveContainer" containerID="ffd8460f710e728dd068e6372c17e852b7135df2af9554dfc726aba31159eef6" Jan 20 18:40:24 crc kubenswrapper[4558]: E0120 18:40:24.009023 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ffd8460f710e728dd068e6372c17e852b7135df2af9554dfc726aba31159eef6\": container with ID starting with ffd8460f710e728dd068e6372c17e852b7135df2af9554dfc726aba31159eef6 not found: ID does not exist" containerID="ffd8460f710e728dd068e6372c17e852b7135df2af9554dfc726aba31159eef6" Jan 20 18:40:24 crc kubenswrapper[4558]: I0120 18:40:24.009098 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ffd8460f710e728dd068e6372c17e852b7135df2af9554dfc726aba31159eef6"} err="failed to get container status \"ffd8460f710e728dd068e6372c17e852b7135df2af9554dfc726aba31159eef6\": rpc error: code = NotFound desc = could not find container \"ffd8460f710e728dd068e6372c17e852b7135df2af9554dfc726aba31159eef6\": container with ID starting with ffd8460f710e728dd068e6372c17e852b7135df2af9554dfc726aba31159eef6 not found: ID does not exist" Jan 20 18:40:24 crc kubenswrapper[4558]: I0120 18:40:24.017571 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/mariadb-operator-index-wf2wm"] Jan 20 18:40:24 crc kubenswrapper[4558]: I0120 18:40:24.021067 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/mariadb-operator-index-wf2wm"] Jan 20 18:40:24 crc kubenswrapper[4558]: I0120 18:40:24.572654 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9dc9812b-eeef-4ab4-9e54-b9dba919edba" path="/var/lib/kubelet/pods/9dc9812b-eeef-4ab4-9e54-b9dba919edba/volumes" Jan 20 18:40:27 crc kubenswrapper[4558]: I0120 18:40:27.889879 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/a53044f29d0f89c197912873b7cc34569484f5de61ee55394f4939720bcbg7n"] Jan 20 18:40:27 crc kubenswrapper[4558]: E0120 18:40:27.890426 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9dc9812b-eeef-4ab4-9e54-b9dba919edba" containerName="registry-server" Jan 20 18:40:27 crc kubenswrapper[4558]: I0120 18:40:27.890441 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="9dc9812b-eeef-4ab4-9e54-b9dba919edba" containerName="registry-server" Jan 20 18:40:27 crc kubenswrapper[4558]: I0120 18:40:27.890570 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="9dc9812b-eeef-4ab4-9e54-b9dba919edba" containerName="registry-server" Jan 20 18:40:27 crc kubenswrapper[4558]: I0120 18:40:27.891432 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/a53044f29d0f89c197912873b7cc34569484f5de61ee55394f4939720bcbg7n" Jan 20 18:40:27 crc kubenswrapper[4558]: I0120 18:40:27.893529 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-7xp5w" Jan 20 18:40:27 crc kubenswrapper[4558]: I0120 18:40:27.897422 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/a53044f29d0f89c197912873b7cc34569484f5de61ee55394f4939720bcbg7n"] Jan 20 18:40:28 crc kubenswrapper[4558]: I0120 18:40:28.037392 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/12251664-831b-4494-80cb-a15f255704f4-bundle\") pod \"a53044f29d0f89c197912873b7cc34569484f5de61ee55394f4939720bcbg7n\" (UID: \"12251664-831b-4494-80cb-a15f255704f4\") " pod="openstack-operators/a53044f29d0f89c197912873b7cc34569484f5de61ee55394f4939720bcbg7n" Jan 20 18:40:28 crc kubenswrapper[4558]: I0120 18:40:28.037828 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/12251664-831b-4494-80cb-a15f255704f4-util\") pod \"a53044f29d0f89c197912873b7cc34569484f5de61ee55394f4939720bcbg7n\" (UID: \"12251664-831b-4494-80cb-a15f255704f4\") " pod="openstack-operators/a53044f29d0f89c197912873b7cc34569484f5de61ee55394f4939720bcbg7n" Jan 20 18:40:28 crc kubenswrapper[4558]: I0120 18:40:28.037859 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-prbmr\" (UniqueName: \"kubernetes.io/projected/12251664-831b-4494-80cb-a15f255704f4-kube-api-access-prbmr\") pod \"a53044f29d0f89c197912873b7cc34569484f5de61ee55394f4939720bcbg7n\" (UID: \"12251664-831b-4494-80cb-a15f255704f4\") " pod="openstack-operators/a53044f29d0f89c197912873b7cc34569484f5de61ee55394f4939720bcbg7n" Jan 20 18:40:28 crc kubenswrapper[4558]: I0120 18:40:28.138897 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/12251664-831b-4494-80cb-a15f255704f4-util\") pod \"a53044f29d0f89c197912873b7cc34569484f5de61ee55394f4939720bcbg7n\" (UID: \"12251664-831b-4494-80cb-a15f255704f4\") " pod="openstack-operators/a53044f29d0f89c197912873b7cc34569484f5de61ee55394f4939720bcbg7n" Jan 20 18:40:28 crc kubenswrapper[4558]: I0120 18:40:28.138937 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-prbmr\" (UniqueName: \"kubernetes.io/projected/12251664-831b-4494-80cb-a15f255704f4-kube-api-access-prbmr\") pod \"a53044f29d0f89c197912873b7cc34569484f5de61ee55394f4939720bcbg7n\" (UID: \"12251664-831b-4494-80cb-a15f255704f4\") " pod="openstack-operators/a53044f29d0f89c197912873b7cc34569484f5de61ee55394f4939720bcbg7n" Jan 20 18:40:28 crc kubenswrapper[4558]: I0120 18:40:28.138974 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/12251664-831b-4494-80cb-a15f255704f4-bundle\") pod \"a53044f29d0f89c197912873b7cc34569484f5de61ee55394f4939720bcbg7n\" (UID: \"12251664-831b-4494-80cb-a15f255704f4\") " pod="openstack-operators/a53044f29d0f89c197912873b7cc34569484f5de61ee55394f4939720bcbg7n" Jan 20 18:40:28 crc kubenswrapper[4558]: I0120 18:40:28.139412 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/12251664-831b-4494-80cb-a15f255704f4-util\") pod \"a53044f29d0f89c197912873b7cc34569484f5de61ee55394f4939720bcbg7n\" (UID: \"12251664-831b-4494-80cb-a15f255704f4\") " pod="openstack-operators/a53044f29d0f89c197912873b7cc34569484f5de61ee55394f4939720bcbg7n" Jan 20 18:40:28 crc kubenswrapper[4558]: I0120 18:40:28.139494 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/12251664-831b-4494-80cb-a15f255704f4-bundle\") pod \"a53044f29d0f89c197912873b7cc34569484f5de61ee55394f4939720bcbg7n\" (UID: \"12251664-831b-4494-80cb-a15f255704f4\") " pod="openstack-operators/a53044f29d0f89c197912873b7cc34569484f5de61ee55394f4939720bcbg7n" Jan 20 18:40:28 crc kubenswrapper[4558]: I0120 18:40:28.156230 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-prbmr\" (UniqueName: \"kubernetes.io/projected/12251664-831b-4494-80cb-a15f255704f4-kube-api-access-prbmr\") pod \"a53044f29d0f89c197912873b7cc34569484f5de61ee55394f4939720bcbg7n\" (UID: \"12251664-831b-4494-80cb-a15f255704f4\") " pod="openstack-operators/a53044f29d0f89c197912873b7cc34569484f5de61ee55394f4939720bcbg7n" Jan 20 18:40:28 crc kubenswrapper[4558]: I0120 18:40:28.207240 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/a53044f29d0f89c197912873b7cc34569484f5de61ee55394f4939720bcbg7n" Jan 20 18:40:28 crc kubenswrapper[4558]: I0120 18:40:28.585761 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/a53044f29d0f89c197912873b7cc34569484f5de61ee55394f4939720bcbg7n"] Jan 20 18:40:29 crc kubenswrapper[4558]: I0120 18:40:29.027009 4558 generic.go:334] "Generic (PLEG): container finished" podID="12251664-831b-4494-80cb-a15f255704f4" containerID="2569ffbff9f349bf3ef902262d977b2504a6cddf9adac5b4e8a4ead9d3b2eaa6" exitCode=0 Jan 20 18:40:29 crc kubenswrapper[4558]: I0120 18:40:29.027059 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/a53044f29d0f89c197912873b7cc34569484f5de61ee55394f4939720bcbg7n" event={"ID":"12251664-831b-4494-80cb-a15f255704f4","Type":"ContainerDied","Data":"2569ffbff9f349bf3ef902262d977b2504a6cddf9adac5b4e8a4ead9d3b2eaa6"} Jan 20 18:40:29 crc kubenswrapper[4558]: I0120 18:40:29.027101 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/a53044f29d0f89c197912873b7cc34569484f5de61ee55394f4939720bcbg7n" event={"ID":"12251664-831b-4494-80cb-a15f255704f4","Type":"ContainerStarted","Data":"306bf142c72901069f5dab282fc03b0346349f7e8d49c9fb4552bae8b2a12bc6"} Jan 20 18:40:30 crc kubenswrapper[4558]: I0120 18:40:30.036105 4558 generic.go:334] "Generic (PLEG): container finished" podID="12251664-831b-4494-80cb-a15f255704f4" containerID="c2355e839f369cfd7b4ad021acc9b0854699f1f9fd51d98e91a204a902774df3" exitCode=0 Jan 20 18:40:30 crc kubenswrapper[4558]: I0120 18:40:30.036161 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/a53044f29d0f89c197912873b7cc34569484f5de61ee55394f4939720bcbg7n" event={"ID":"12251664-831b-4494-80cb-a15f255704f4","Type":"ContainerDied","Data":"c2355e839f369cfd7b4ad021acc9b0854699f1f9fd51d98e91a204a902774df3"} Jan 20 18:40:31 crc kubenswrapper[4558]: I0120 18:40:31.044821 4558 generic.go:334] "Generic (PLEG): container finished" podID="12251664-831b-4494-80cb-a15f255704f4" containerID="8e0fb96310ab3053ec08bfa9592bbc80ed7fdc59dd5d4aba121300c20f91c804" exitCode=0 Jan 20 18:40:31 crc kubenswrapper[4558]: I0120 18:40:31.044874 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/a53044f29d0f89c197912873b7cc34569484f5de61ee55394f4939720bcbg7n" event={"ID":"12251664-831b-4494-80cb-a15f255704f4","Type":"ContainerDied","Data":"8e0fb96310ab3053ec08bfa9592bbc80ed7fdc59dd5d4aba121300c20f91c804"} Jan 20 18:40:31 crc kubenswrapper[4558]: I0120 18:40:31.976143 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-index-pdq8m" Jan 20 18:40:31 crc kubenswrapper[4558]: I0120 18:40:31.976479 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/mariadb-operator-index-pdq8m" Jan 20 18:40:32 crc kubenswrapper[4558]: I0120 18:40:32.003980 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/mariadb-operator-index-pdq8m" Jan 20 18:40:32 crc kubenswrapper[4558]: I0120 18:40:32.075038 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-index-pdq8m" Jan 20 18:40:32 crc kubenswrapper[4558]: I0120 18:40:32.287388 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/a53044f29d0f89c197912873b7cc34569484f5de61ee55394f4939720bcbg7n" Jan 20 18:40:32 crc kubenswrapper[4558]: I0120 18:40:32.393811 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/12251664-831b-4494-80cb-a15f255704f4-bundle\") pod \"12251664-831b-4494-80cb-a15f255704f4\" (UID: \"12251664-831b-4494-80cb-a15f255704f4\") " Jan 20 18:40:32 crc kubenswrapper[4558]: I0120 18:40:32.393859 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/12251664-831b-4494-80cb-a15f255704f4-util\") pod \"12251664-831b-4494-80cb-a15f255704f4\" (UID: \"12251664-831b-4494-80cb-a15f255704f4\") " Jan 20 18:40:32 crc kubenswrapper[4558]: I0120 18:40:32.393939 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-prbmr\" (UniqueName: \"kubernetes.io/projected/12251664-831b-4494-80cb-a15f255704f4-kube-api-access-prbmr\") pod \"12251664-831b-4494-80cb-a15f255704f4\" (UID: \"12251664-831b-4494-80cb-a15f255704f4\") " Jan 20 18:40:32 crc kubenswrapper[4558]: I0120 18:40:32.394845 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/12251664-831b-4494-80cb-a15f255704f4-bundle" (OuterVolumeSpecName: "bundle") pod "12251664-831b-4494-80cb-a15f255704f4" (UID: "12251664-831b-4494-80cb-a15f255704f4"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 18:40:32 crc kubenswrapper[4558]: I0120 18:40:32.402305 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/12251664-831b-4494-80cb-a15f255704f4-kube-api-access-prbmr" (OuterVolumeSpecName: "kube-api-access-prbmr") pod "12251664-831b-4494-80cb-a15f255704f4" (UID: "12251664-831b-4494-80cb-a15f255704f4"). InnerVolumeSpecName "kube-api-access-prbmr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:40:32 crc kubenswrapper[4558]: I0120 18:40:32.406356 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/12251664-831b-4494-80cb-a15f255704f4-util" (OuterVolumeSpecName: "util") pod "12251664-831b-4494-80cb-a15f255704f4" (UID: "12251664-831b-4494-80cb-a15f255704f4"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 18:40:32 crc kubenswrapper[4558]: I0120 18:40:32.496139 4558 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/12251664-831b-4494-80cb-a15f255704f4-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 18:40:32 crc kubenswrapper[4558]: I0120 18:40:32.496194 4558 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/12251664-831b-4494-80cb-a15f255704f4-util\") on node \"crc\" DevicePath \"\"" Jan 20 18:40:32 crc kubenswrapper[4558]: I0120 18:40:32.496209 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-prbmr\" (UniqueName: \"kubernetes.io/projected/12251664-831b-4494-80cb-a15f255704f4-kube-api-access-prbmr\") on node \"crc\" DevicePath \"\"" Jan 20 18:40:33 crc kubenswrapper[4558]: I0120 18:40:33.059881 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/a53044f29d0f89c197912873b7cc34569484f5de61ee55394f4939720bcbg7n" event={"ID":"12251664-831b-4494-80cb-a15f255704f4","Type":"ContainerDied","Data":"306bf142c72901069f5dab282fc03b0346349f7e8d49c9fb4552bae8b2a12bc6"} Jan 20 18:40:33 crc kubenswrapper[4558]: I0120 18:40:33.059920 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/a53044f29d0f89c197912873b7cc34569484f5de61ee55394f4939720bcbg7n" Jan 20 18:40:33 crc kubenswrapper[4558]: I0120 18:40:33.059926 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="306bf142c72901069f5dab282fc03b0346349f7e8d49c9fb4552bae8b2a12bc6" Jan 20 18:40:35 crc kubenswrapper[4558]: I0120 18:40:35.566197 4558 scope.go:117] "RemoveContainer" containerID="77ce04c6daee7d4f9b776bd1c80b448f9c4750d5cb5c7dd182ff28963a4bcf99" Jan 20 18:40:35 crc kubenswrapper[4558]: E0120 18:40:35.566806 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 18:40:46 crc kubenswrapper[4558]: I0120 18:40:46.898586 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-6bf49b779b-px8lt"] Jan 20 18:40:46 crc kubenswrapper[4558]: E0120 18:40:46.899354 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="12251664-831b-4494-80cb-a15f255704f4" containerName="pull" Jan 20 18:40:46 crc kubenswrapper[4558]: I0120 18:40:46.899368 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="12251664-831b-4494-80cb-a15f255704f4" containerName="pull" Jan 20 18:40:46 crc kubenswrapper[4558]: E0120 18:40:46.899391 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="12251664-831b-4494-80cb-a15f255704f4" containerName="util" Jan 20 18:40:46 crc kubenswrapper[4558]: I0120 18:40:46.899396 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="12251664-831b-4494-80cb-a15f255704f4" containerName="util" Jan 20 18:40:46 crc kubenswrapper[4558]: E0120 18:40:46.899405 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="12251664-831b-4494-80cb-a15f255704f4" containerName="extract" Jan 20 18:40:46 crc kubenswrapper[4558]: I0120 18:40:46.899410 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="12251664-831b-4494-80cb-a15f255704f4" containerName="extract" Jan 20 18:40:46 crc kubenswrapper[4558]: I0120 18:40:46.899544 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="12251664-831b-4494-80cb-a15f255704f4" containerName="extract" Jan 20 18:40:46 crc kubenswrapper[4558]: I0120 18:40:46.899977 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-6bf49b779b-px8lt" Jan 20 18:40:46 crc kubenswrapper[4558]: I0120 18:40:46.901573 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"webhook-server-cert" Jan 20 18:40:46 crc kubenswrapper[4558]: I0120 18:40:46.901669 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-dockercfg-7tlvp" Jan 20 18:40:46 crc kubenswrapper[4558]: I0120 18:40:46.902873 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-service-cert" Jan 20 18:40:46 crc kubenswrapper[4558]: I0120 18:40:46.915839 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-6bf49b779b-px8lt"] Jan 20 18:40:46 crc kubenswrapper[4558]: I0120 18:40:46.989930 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/cd5771a4-2a64-4d8d-9092-843789e8f695-webhook-cert\") pod \"mariadb-operator-controller-manager-6bf49b779b-px8lt\" (UID: \"cd5771a4-2a64-4d8d-9092-843789e8f695\") " pod="openstack-operators/mariadb-operator-controller-manager-6bf49b779b-px8lt" Jan 20 18:40:46 crc kubenswrapper[4558]: I0120 18:40:46.990030 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4m8g4\" (UniqueName: \"kubernetes.io/projected/cd5771a4-2a64-4d8d-9092-843789e8f695-kube-api-access-4m8g4\") pod \"mariadb-operator-controller-manager-6bf49b779b-px8lt\" (UID: \"cd5771a4-2a64-4d8d-9092-843789e8f695\") " pod="openstack-operators/mariadb-operator-controller-manager-6bf49b779b-px8lt" Jan 20 18:40:46 crc kubenswrapper[4558]: I0120 18:40:46.990104 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/cd5771a4-2a64-4d8d-9092-843789e8f695-apiservice-cert\") pod \"mariadb-operator-controller-manager-6bf49b779b-px8lt\" (UID: \"cd5771a4-2a64-4d8d-9092-843789e8f695\") " pod="openstack-operators/mariadb-operator-controller-manager-6bf49b779b-px8lt" Jan 20 18:40:47 crc kubenswrapper[4558]: I0120 18:40:47.091446 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/cd5771a4-2a64-4d8d-9092-843789e8f695-webhook-cert\") pod \"mariadb-operator-controller-manager-6bf49b779b-px8lt\" (UID: \"cd5771a4-2a64-4d8d-9092-843789e8f695\") " pod="openstack-operators/mariadb-operator-controller-manager-6bf49b779b-px8lt" Jan 20 18:40:47 crc kubenswrapper[4558]: I0120 18:40:47.091522 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4m8g4\" (UniqueName: \"kubernetes.io/projected/cd5771a4-2a64-4d8d-9092-843789e8f695-kube-api-access-4m8g4\") pod \"mariadb-operator-controller-manager-6bf49b779b-px8lt\" (UID: \"cd5771a4-2a64-4d8d-9092-843789e8f695\") " pod="openstack-operators/mariadb-operator-controller-manager-6bf49b779b-px8lt" Jan 20 18:40:47 crc kubenswrapper[4558]: I0120 18:40:47.091572 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/cd5771a4-2a64-4d8d-9092-843789e8f695-apiservice-cert\") pod \"mariadb-operator-controller-manager-6bf49b779b-px8lt\" (UID: \"cd5771a4-2a64-4d8d-9092-843789e8f695\") " pod="openstack-operators/mariadb-operator-controller-manager-6bf49b779b-px8lt" Jan 20 18:40:47 crc kubenswrapper[4558]: I0120 18:40:47.097054 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/cd5771a4-2a64-4d8d-9092-843789e8f695-apiservice-cert\") pod \"mariadb-operator-controller-manager-6bf49b779b-px8lt\" (UID: \"cd5771a4-2a64-4d8d-9092-843789e8f695\") " pod="openstack-operators/mariadb-operator-controller-manager-6bf49b779b-px8lt" Jan 20 18:40:47 crc kubenswrapper[4558]: I0120 18:40:47.101783 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/cd5771a4-2a64-4d8d-9092-843789e8f695-webhook-cert\") pod \"mariadb-operator-controller-manager-6bf49b779b-px8lt\" (UID: \"cd5771a4-2a64-4d8d-9092-843789e8f695\") " pod="openstack-operators/mariadb-operator-controller-manager-6bf49b779b-px8lt" Jan 20 18:40:47 crc kubenswrapper[4558]: I0120 18:40:47.111145 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4m8g4\" (UniqueName: \"kubernetes.io/projected/cd5771a4-2a64-4d8d-9092-843789e8f695-kube-api-access-4m8g4\") pod \"mariadb-operator-controller-manager-6bf49b779b-px8lt\" (UID: \"cd5771a4-2a64-4d8d-9092-843789e8f695\") " pod="openstack-operators/mariadb-operator-controller-manager-6bf49b779b-px8lt" Jan 20 18:40:47 crc kubenswrapper[4558]: I0120 18:40:47.214128 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-6bf49b779b-px8lt" Jan 20 18:40:47 crc kubenswrapper[4558]: I0120 18:40:47.591365 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-6bf49b779b-px8lt"] Jan 20 18:40:48 crc kubenswrapper[4558]: I0120 18:40:48.163059 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-6bf49b779b-px8lt" event={"ID":"cd5771a4-2a64-4d8d-9092-843789e8f695","Type":"ContainerStarted","Data":"cd323804e98b77e7a0cfe9105bb06ffd913d5af965880592b6f58f12a81ad73a"} Jan 20 18:40:48 crc kubenswrapper[4558]: I0120 18:40:48.163123 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-6bf49b779b-px8lt" event={"ID":"cd5771a4-2a64-4d8d-9092-843789e8f695","Type":"ContainerStarted","Data":"6e634ee9c8be469f047197f7fb8744b0bed8aeaed5fe015d788c90cae0345234"} Jan 20 18:40:48 crc kubenswrapper[4558]: I0120 18:40:48.163284 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-6bf49b779b-px8lt" Jan 20 18:40:48 crc kubenswrapper[4558]: I0120 18:40:48.181361 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-controller-manager-6bf49b779b-px8lt" podStartSLOduration=2.181347 podStartE2EDuration="2.181347s" podCreationTimestamp="2026-01-20 18:40:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 18:40:48.179541586 +0000 UTC m=+7141.939879554" watchObservedRunningTime="2026-01-20 18:40:48.181347 +0000 UTC m=+7141.941684968" Jan 20 18:40:48 crc kubenswrapper[4558]: I0120 18:40:48.566104 4558 scope.go:117] "RemoveContainer" containerID="77ce04c6daee7d4f9b776bd1c80b448f9c4750d5cb5c7dd182ff28963a4bcf99" Jan 20 18:40:48 crc kubenswrapper[4558]: E0120 18:40:48.566396 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 18:40:57 crc kubenswrapper[4558]: I0120 18:40:57.229653 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-controller-manager-6bf49b779b-px8lt" Jan 20 18:40:58 crc kubenswrapper[4558]: I0120 18:40:58.152022 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-index-gxzr6"] Jan 20 18:40:58 crc kubenswrapper[4558]: I0120 18:40:58.153370 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-index-gxzr6" Jan 20 18:40:58 crc kubenswrapper[4558]: I0120 18:40:58.155242 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-index-dockercfg-rk2j9" Jan 20 18:40:58 crc kubenswrapper[4558]: I0120 18:40:58.162230 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-index-gxzr6"] Jan 20 18:40:58 crc kubenswrapper[4558]: I0120 18:40:58.250574 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mrgfd\" (UniqueName: \"kubernetes.io/projected/54b73115-d409-40c7-b097-bc4b5e8ec291-kube-api-access-mrgfd\") pod \"infra-operator-index-gxzr6\" (UID: \"54b73115-d409-40c7-b097-bc4b5e8ec291\") " pod="openstack-operators/infra-operator-index-gxzr6" Jan 20 18:40:58 crc kubenswrapper[4558]: I0120 18:40:58.351979 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mrgfd\" (UniqueName: \"kubernetes.io/projected/54b73115-d409-40c7-b097-bc4b5e8ec291-kube-api-access-mrgfd\") pod \"infra-operator-index-gxzr6\" (UID: \"54b73115-d409-40c7-b097-bc4b5e8ec291\") " pod="openstack-operators/infra-operator-index-gxzr6" Jan 20 18:40:58 crc kubenswrapper[4558]: I0120 18:40:58.370987 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mrgfd\" (UniqueName: \"kubernetes.io/projected/54b73115-d409-40c7-b097-bc4b5e8ec291-kube-api-access-mrgfd\") pod \"infra-operator-index-gxzr6\" (UID: \"54b73115-d409-40c7-b097-bc4b5e8ec291\") " pod="openstack-operators/infra-operator-index-gxzr6" Jan 20 18:40:58 crc kubenswrapper[4558]: I0120 18:40:58.480504 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-index-gxzr6" Jan 20 18:40:58 crc kubenswrapper[4558]: I0120 18:40:58.858801 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-index-gxzr6"] Jan 20 18:40:59 crc kubenswrapper[4558]: I0120 18:40:59.236599 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-index-gxzr6" event={"ID":"54b73115-d409-40c7-b097-bc4b5e8ec291","Type":"ContainerStarted","Data":"6468b318f4298319fa4bbf49f5ad5b2af814f9a6b43a42671fb2e026a66bce24"} Jan 20 18:41:00 crc kubenswrapper[4558]: I0120 18:41:00.249296 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-index-gxzr6" event={"ID":"54b73115-d409-40c7-b097-bc4b5e8ec291","Type":"ContainerStarted","Data":"48db9aa693782f9ee644a653eff1aedbebb0c0f3f91035bc71e2ac724a9e1732"} Jan 20 18:41:00 crc kubenswrapper[4558]: I0120 18:41:00.272344 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-index-gxzr6" podStartSLOduration=1.646093727 podStartE2EDuration="2.272319842s" podCreationTimestamp="2026-01-20 18:40:58 +0000 UTC" firstStartedPulling="2026-01-20 18:40:58.872439944 +0000 UTC m=+7152.632777910" lastFinishedPulling="2026-01-20 18:40:59.498666058 +0000 UTC m=+7153.259004025" observedRunningTime="2026-01-20 18:41:00.2635749 +0000 UTC m=+7154.023912867" watchObservedRunningTime="2026-01-20 18:41:00.272319842 +0000 UTC m=+7154.032657809" Jan 20 18:41:02 crc kubenswrapper[4558]: I0120 18:41:02.129523 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/infra-operator-index-gxzr6"] Jan 20 18:41:02 crc kubenswrapper[4558]: I0120 18:41:02.259807 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/infra-operator-index-gxzr6" podUID="54b73115-d409-40c7-b097-bc4b5e8ec291" containerName="registry-server" containerID="cri-o://48db9aa693782f9ee644a653eff1aedbebb0c0f3f91035bc71e2ac724a9e1732" gracePeriod=2 Jan 20 18:41:02 crc kubenswrapper[4558]: I0120 18:41:02.573600 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ea82580bc5724477f94b47db468c840840d4aaf95efc52f7d04b6353c1wpjhp"] Jan 20 18:41:02 crc kubenswrapper[4558]: I0120 18:41:02.574992 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ea82580bc5724477f94b47db468c840840d4aaf95efc52f7d04b6353c1wpjhp" Jan 20 18:41:02 crc kubenswrapper[4558]: I0120 18:41:02.576620 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-7xp5w" Jan 20 18:41:02 crc kubenswrapper[4558]: I0120 18:41:02.581426 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ea82580bc5724477f94b47db468c840840d4aaf95efc52f7d04b6353c1wpjhp"] Jan 20 18:41:02 crc kubenswrapper[4558]: I0120 18:41:02.633226 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x2mcr\" (UniqueName: \"kubernetes.io/projected/55e27c4e-389f-4ae0-814b-c8a92814add5-kube-api-access-x2mcr\") pod \"ea82580bc5724477f94b47db468c840840d4aaf95efc52f7d04b6353c1wpjhp\" (UID: \"55e27c4e-389f-4ae0-814b-c8a92814add5\") " pod="openstack-operators/ea82580bc5724477f94b47db468c840840d4aaf95efc52f7d04b6353c1wpjhp" Jan 20 18:41:02 crc kubenswrapper[4558]: I0120 18:41:02.633302 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/55e27c4e-389f-4ae0-814b-c8a92814add5-util\") pod \"ea82580bc5724477f94b47db468c840840d4aaf95efc52f7d04b6353c1wpjhp\" (UID: \"55e27c4e-389f-4ae0-814b-c8a92814add5\") " pod="openstack-operators/ea82580bc5724477f94b47db468c840840d4aaf95efc52f7d04b6353c1wpjhp" Jan 20 18:41:02 crc kubenswrapper[4558]: I0120 18:41:02.633352 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/55e27c4e-389f-4ae0-814b-c8a92814add5-bundle\") pod \"ea82580bc5724477f94b47db468c840840d4aaf95efc52f7d04b6353c1wpjhp\" (UID: \"55e27c4e-389f-4ae0-814b-c8a92814add5\") " pod="openstack-operators/ea82580bc5724477f94b47db468c840840d4aaf95efc52f7d04b6353c1wpjhp" Jan 20 18:41:02 crc kubenswrapper[4558]: I0120 18:41:02.638595 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-index-gxzr6" Jan 20 18:41:02 crc kubenswrapper[4558]: I0120 18:41:02.734500 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mrgfd\" (UniqueName: \"kubernetes.io/projected/54b73115-d409-40c7-b097-bc4b5e8ec291-kube-api-access-mrgfd\") pod \"54b73115-d409-40c7-b097-bc4b5e8ec291\" (UID: \"54b73115-d409-40c7-b097-bc4b5e8ec291\") " Jan 20 18:41:02 crc kubenswrapper[4558]: I0120 18:41:02.734922 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/55e27c4e-389f-4ae0-814b-c8a92814add5-bundle\") pod \"ea82580bc5724477f94b47db468c840840d4aaf95efc52f7d04b6353c1wpjhp\" (UID: \"55e27c4e-389f-4ae0-814b-c8a92814add5\") " pod="openstack-operators/ea82580bc5724477f94b47db468c840840d4aaf95efc52f7d04b6353c1wpjhp" Jan 20 18:41:02 crc kubenswrapper[4558]: I0120 18:41:02.735009 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x2mcr\" (UniqueName: \"kubernetes.io/projected/55e27c4e-389f-4ae0-814b-c8a92814add5-kube-api-access-x2mcr\") pod \"ea82580bc5724477f94b47db468c840840d4aaf95efc52f7d04b6353c1wpjhp\" (UID: \"55e27c4e-389f-4ae0-814b-c8a92814add5\") " pod="openstack-operators/ea82580bc5724477f94b47db468c840840d4aaf95efc52f7d04b6353c1wpjhp" Jan 20 18:41:02 crc kubenswrapper[4558]: I0120 18:41:02.735085 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/55e27c4e-389f-4ae0-814b-c8a92814add5-util\") pod \"ea82580bc5724477f94b47db468c840840d4aaf95efc52f7d04b6353c1wpjhp\" (UID: \"55e27c4e-389f-4ae0-814b-c8a92814add5\") " pod="openstack-operators/ea82580bc5724477f94b47db468c840840d4aaf95efc52f7d04b6353c1wpjhp" Jan 20 18:41:02 crc kubenswrapper[4558]: I0120 18:41:02.735518 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/55e27c4e-389f-4ae0-814b-c8a92814add5-util\") pod \"ea82580bc5724477f94b47db468c840840d4aaf95efc52f7d04b6353c1wpjhp\" (UID: \"55e27c4e-389f-4ae0-814b-c8a92814add5\") " pod="openstack-operators/ea82580bc5724477f94b47db468c840840d4aaf95efc52f7d04b6353c1wpjhp" Jan 20 18:41:02 crc kubenswrapper[4558]: I0120 18:41:02.735674 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/55e27c4e-389f-4ae0-814b-c8a92814add5-bundle\") pod \"ea82580bc5724477f94b47db468c840840d4aaf95efc52f7d04b6353c1wpjhp\" (UID: \"55e27c4e-389f-4ae0-814b-c8a92814add5\") " pod="openstack-operators/ea82580bc5724477f94b47db468c840840d4aaf95efc52f7d04b6353c1wpjhp" Jan 20 18:41:02 crc kubenswrapper[4558]: I0120 18:41:02.742099 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/54b73115-d409-40c7-b097-bc4b5e8ec291-kube-api-access-mrgfd" (OuterVolumeSpecName: "kube-api-access-mrgfd") pod "54b73115-d409-40c7-b097-bc4b5e8ec291" (UID: "54b73115-d409-40c7-b097-bc4b5e8ec291"). InnerVolumeSpecName "kube-api-access-mrgfd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:41:02 crc kubenswrapper[4558]: I0120 18:41:02.749339 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x2mcr\" (UniqueName: \"kubernetes.io/projected/55e27c4e-389f-4ae0-814b-c8a92814add5-kube-api-access-x2mcr\") pod \"ea82580bc5724477f94b47db468c840840d4aaf95efc52f7d04b6353c1wpjhp\" (UID: \"55e27c4e-389f-4ae0-814b-c8a92814add5\") " pod="openstack-operators/ea82580bc5724477f94b47db468c840840d4aaf95efc52f7d04b6353c1wpjhp" Jan 20 18:41:02 crc kubenswrapper[4558]: I0120 18:41:02.836537 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mrgfd\" (UniqueName: \"kubernetes.io/projected/54b73115-d409-40c7-b097-bc4b5e8ec291-kube-api-access-mrgfd\") on node \"crc\" DevicePath \"\"" Jan 20 18:41:02 crc kubenswrapper[4558]: I0120 18:41:02.887989 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ea82580bc5724477f94b47db468c840840d4aaf95efc52f7d04b6353c1wpjhp" Jan 20 18:41:02 crc kubenswrapper[4558]: I0120 18:41:02.936818 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-index-dqwqb"] Jan 20 18:41:02 crc kubenswrapper[4558]: E0120 18:41:02.937177 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="54b73115-d409-40c7-b097-bc4b5e8ec291" containerName="registry-server" Jan 20 18:41:02 crc kubenswrapper[4558]: I0120 18:41:02.937196 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="54b73115-d409-40c7-b097-bc4b5e8ec291" containerName="registry-server" Jan 20 18:41:02 crc kubenswrapper[4558]: I0120 18:41:02.937319 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="54b73115-d409-40c7-b097-bc4b5e8ec291" containerName="registry-server" Jan 20 18:41:02 crc kubenswrapper[4558]: I0120 18:41:02.937783 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-index-dqwqb" Jan 20 18:41:02 crc kubenswrapper[4558]: I0120 18:41:02.942086 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-index-dqwqb"] Jan 20 18:41:03 crc kubenswrapper[4558]: I0120 18:41:03.038568 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pqd6g\" (UniqueName: \"kubernetes.io/projected/d3dc106a-9ff2-49b9-bc47-3d986cec7afa-kube-api-access-pqd6g\") pod \"infra-operator-index-dqwqb\" (UID: \"d3dc106a-9ff2-49b9-bc47-3d986cec7afa\") " pod="openstack-operators/infra-operator-index-dqwqb" Jan 20 18:41:03 crc kubenswrapper[4558]: I0120 18:41:03.077515 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ea82580bc5724477f94b47db468c840840d4aaf95efc52f7d04b6353c1wpjhp"] Jan 20 18:41:03 crc kubenswrapper[4558]: I0120 18:41:03.140045 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pqd6g\" (UniqueName: \"kubernetes.io/projected/d3dc106a-9ff2-49b9-bc47-3d986cec7afa-kube-api-access-pqd6g\") pod \"infra-operator-index-dqwqb\" (UID: \"d3dc106a-9ff2-49b9-bc47-3d986cec7afa\") " pod="openstack-operators/infra-operator-index-dqwqb" Jan 20 18:41:03 crc kubenswrapper[4558]: I0120 18:41:03.156468 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pqd6g\" (UniqueName: \"kubernetes.io/projected/d3dc106a-9ff2-49b9-bc47-3d986cec7afa-kube-api-access-pqd6g\") pod \"infra-operator-index-dqwqb\" (UID: \"d3dc106a-9ff2-49b9-bc47-3d986cec7afa\") " pod="openstack-operators/infra-operator-index-dqwqb" Jan 20 18:41:03 crc kubenswrapper[4558]: I0120 18:41:03.250370 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-index-dqwqb" Jan 20 18:41:03 crc kubenswrapper[4558]: I0120 18:41:03.267367 4558 generic.go:334] "Generic (PLEG): container finished" podID="54b73115-d409-40c7-b097-bc4b5e8ec291" containerID="48db9aa693782f9ee644a653eff1aedbebb0c0f3f91035bc71e2ac724a9e1732" exitCode=0 Jan 20 18:41:03 crc kubenswrapper[4558]: I0120 18:41:03.267422 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-index-gxzr6" Jan 20 18:41:03 crc kubenswrapper[4558]: I0120 18:41:03.267443 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-index-gxzr6" event={"ID":"54b73115-d409-40c7-b097-bc4b5e8ec291","Type":"ContainerDied","Data":"48db9aa693782f9ee644a653eff1aedbebb0c0f3f91035bc71e2ac724a9e1732"} Jan 20 18:41:03 crc kubenswrapper[4558]: I0120 18:41:03.267483 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-index-gxzr6" event={"ID":"54b73115-d409-40c7-b097-bc4b5e8ec291","Type":"ContainerDied","Data":"6468b318f4298319fa4bbf49f5ad5b2af814f9a6b43a42671fb2e026a66bce24"} Jan 20 18:41:03 crc kubenswrapper[4558]: I0120 18:41:03.267505 4558 scope.go:117] "RemoveContainer" containerID="48db9aa693782f9ee644a653eff1aedbebb0c0f3f91035bc71e2ac724a9e1732" Jan 20 18:41:03 crc kubenswrapper[4558]: I0120 18:41:03.269958 4558 generic.go:334] "Generic (PLEG): container finished" podID="55e27c4e-389f-4ae0-814b-c8a92814add5" containerID="1bb11f9c07ec5accf6528934fc37bdfda34aa8c9f39efea0a9349f0f1f35e493" exitCode=0 Jan 20 18:41:03 crc kubenswrapper[4558]: I0120 18:41:03.269986 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ea82580bc5724477f94b47db468c840840d4aaf95efc52f7d04b6353c1wpjhp" event={"ID":"55e27c4e-389f-4ae0-814b-c8a92814add5","Type":"ContainerDied","Data":"1bb11f9c07ec5accf6528934fc37bdfda34aa8c9f39efea0a9349f0f1f35e493"} Jan 20 18:41:03 crc kubenswrapper[4558]: I0120 18:41:03.270023 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ea82580bc5724477f94b47db468c840840d4aaf95efc52f7d04b6353c1wpjhp" event={"ID":"55e27c4e-389f-4ae0-814b-c8a92814add5","Type":"ContainerStarted","Data":"84eab21624d9375c3bc476269a5a1f0006fcbb851c9f55af1c69a7e02ad04932"} Jan 20 18:41:03 crc kubenswrapper[4558]: I0120 18:41:03.301560 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/infra-operator-index-gxzr6"] Jan 20 18:41:03 crc kubenswrapper[4558]: I0120 18:41:03.304833 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/infra-operator-index-gxzr6"] Jan 20 18:41:03 crc kubenswrapper[4558]: I0120 18:41:03.305685 4558 scope.go:117] "RemoveContainer" containerID="48db9aa693782f9ee644a653eff1aedbebb0c0f3f91035bc71e2ac724a9e1732" Jan 20 18:41:03 crc kubenswrapper[4558]: E0120 18:41:03.306766 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"48db9aa693782f9ee644a653eff1aedbebb0c0f3f91035bc71e2ac724a9e1732\": container with ID starting with 48db9aa693782f9ee644a653eff1aedbebb0c0f3f91035bc71e2ac724a9e1732 not found: ID does not exist" containerID="48db9aa693782f9ee644a653eff1aedbebb0c0f3f91035bc71e2ac724a9e1732" Jan 20 18:41:03 crc kubenswrapper[4558]: I0120 18:41:03.306814 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"48db9aa693782f9ee644a653eff1aedbebb0c0f3f91035bc71e2ac724a9e1732"} err="failed to get container status \"48db9aa693782f9ee644a653eff1aedbebb0c0f3f91035bc71e2ac724a9e1732\": rpc error: code = NotFound desc = could not find container \"48db9aa693782f9ee644a653eff1aedbebb0c0f3f91035bc71e2ac724a9e1732\": container with ID starting with 48db9aa693782f9ee644a653eff1aedbebb0c0f3f91035bc71e2ac724a9e1732 not found: ID does not exist" Jan 20 18:41:03 crc kubenswrapper[4558]: I0120 18:41:03.565726 4558 scope.go:117] "RemoveContainer" containerID="77ce04c6daee7d4f9b776bd1c80b448f9c4750d5cb5c7dd182ff28963a4bcf99" Jan 20 18:41:03 crc kubenswrapper[4558]: E0120 18:41:03.566007 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 18:41:03 crc kubenswrapper[4558]: I0120 18:41:03.614755 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-index-dqwqb"] Jan 20 18:41:03 crc kubenswrapper[4558]: W0120 18:41:03.625744 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd3dc106a_9ff2_49b9_bc47_3d986cec7afa.slice/crio-9fbb3073069554c0cf9d1a285adb9d606b87cb11b92c5c9c3929d7bb462d3ed2 WatchSource:0}: Error finding container 9fbb3073069554c0cf9d1a285adb9d606b87cb11b92c5c9c3929d7bb462d3ed2: Status 404 returned error can't find the container with id 9fbb3073069554c0cf9d1a285adb9d606b87cb11b92c5c9c3929d7bb462d3ed2 Jan 20 18:41:04 crc kubenswrapper[4558]: I0120 18:41:04.281434 4558 generic.go:334] "Generic (PLEG): container finished" podID="55e27c4e-389f-4ae0-814b-c8a92814add5" containerID="24e7b80a9c4427be34e1b04fd8ee61e8db3324e560f603763318c33836b97cf3" exitCode=0 Jan 20 18:41:04 crc kubenswrapper[4558]: I0120 18:41:04.281507 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ea82580bc5724477f94b47db468c840840d4aaf95efc52f7d04b6353c1wpjhp" event={"ID":"55e27c4e-389f-4ae0-814b-c8a92814add5","Type":"ContainerDied","Data":"24e7b80a9c4427be34e1b04fd8ee61e8db3324e560f603763318c33836b97cf3"} Jan 20 18:41:04 crc kubenswrapper[4558]: I0120 18:41:04.284328 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-index-dqwqb" event={"ID":"d3dc106a-9ff2-49b9-bc47-3d986cec7afa","Type":"ContainerStarted","Data":"9fbb3073069554c0cf9d1a285adb9d606b87cb11b92c5c9c3929d7bb462d3ed2"} Jan 20 18:41:04 crc kubenswrapper[4558]: I0120 18:41:04.572682 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="54b73115-d409-40c7-b097-bc4b5e8ec291" path="/var/lib/kubelet/pods/54b73115-d409-40c7-b097-bc4b5e8ec291/volumes" Jan 20 18:41:04 crc kubenswrapper[4558]: I0120 18:41:04.848263 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["horizon-kuttl-tests/openstack-galera-0"] Jan 20 18:41:04 crc kubenswrapper[4558]: I0120 18:41:04.849431 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/openstack-galera-0" Jan 20 18:41:04 crc kubenswrapper[4558]: I0120 18:41:04.851338 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"horizon-kuttl-tests"/"openshift-service-ca.crt" Jan 20 18:41:04 crc kubenswrapper[4558]: I0120 18:41:04.852193 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"horizon-kuttl-tests"/"openstack-scripts" Jan 20 18:41:04 crc kubenswrapper[4558]: I0120 18:41:04.852323 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"horizon-kuttl-tests"/"openstack-config-data" Jan 20 18:41:04 crc kubenswrapper[4558]: I0120 18:41:04.852454 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"horizon-kuttl-tests"/"kube-root-ca.crt" Jan 20 18:41:04 crc kubenswrapper[4558]: I0120 18:41:04.852573 4558 reflector.go:368] Caches populated for *v1.Secret from object-"horizon-kuttl-tests"/"galera-openstack-dockercfg-9kvfp" Jan 20 18:41:04 crc kubenswrapper[4558]: I0120 18:41:04.856362 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["horizon-kuttl-tests/openstack-galera-1"] Jan 20 18:41:04 crc kubenswrapper[4558]: I0120 18:41:04.857516 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/openstack-galera-1" Jan 20 18:41:04 crc kubenswrapper[4558]: I0120 18:41:04.865250 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/c0f454c3-2a4b-4d33-b137-2c92c0773378-config-data-generated\") pod \"openstack-galera-0\" (UID: \"c0f454c3-2a4b-4d33-b137-2c92c0773378\") " pod="horizon-kuttl-tests/openstack-galera-0" Jan 20 18:41:04 crc kubenswrapper[4558]: I0120 18:41:04.865298 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c0f454c3-2a4b-4d33-b137-2c92c0773378-operator-scripts\") pod \"openstack-galera-0\" (UID: \"c0f454c3-2a4b-4d33-b137-2c92c0773378\") " pod="horizon-kuttl-tests/openstack-galera-0" Jan 20 18:41:04 crc kubenswrapper[4558]: I0120 18:41:04.865333 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/c0f454c3-2a4b-4d33-b137-2c92c0773378-config-data-default\") pod \"openstack-galera-0\" (UID: \"c0f454c3-2a4b-4d33-b137-2c92c0773378\") " pod="horizon-kuttl-tests/openstack-galera-0" Jan 20 18:41:04 crc kubenswrapper[4558]: I0120 18:41:04.865353 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qgn84\" (UniqueName: \"kubernetes.io/projected/c0f454c3-2a4b-4d33-b137-2c92c0773378-kube-api-access-qgn84\") pod \"openstack-galera-0\" (UID: \"c0f454c3-2a4b-4d33-b137-2c92c0773378\") " pod="horizon-kuttl-tests/openstack-galera-0" Jan 20 18:41:04 crc kubenswrapper[4558]: I0120 18:41:04.865388 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage20-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage20-crc\") pod \"openstack-galera-0\" (UID: \"c0f454c3-2a4b-4d33-b137-2c92c0773378\") " pod="horizon-kuttl-tests/openstack-galera-0" Jan 20 18:41:04 crc kubenswrapper[4558]: I0120 18:41:04.865417 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/c0f454c3-2a4b-4d33-b137-2c92c0773378-kolla-config\") pod \"openstack-galera-0\" (UID: \"c0f454c3-2a4b-4d33-b137-2c92c0773378\") " pod="horizon-kuttl-tests/openstack-galera-0" Jan 20 18:41:04 crc kubenswrapper[4558]: I0120 18:41:04.865929 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["horizon-kuttl-tests/openstack-galera-2"] Jan 20 18:41:04 crc kubenswrapper[4558]: I0120 18:41:04.875391 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/openstack-galera-2" Jan 20 18:41:04 crc kubenswrapper[4558]: I0120 18:41:04.893416 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["horizon-kuttl-tests/openstack-galera-0"] Jan 20 18:41:04 crc kubenswrapper[4558]: I0120 18:41:04.901304 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["horizon-kuttl-tests/openstack-galera-2"] Jan 20 18:41:04 crc kubenswrapper[4558]: I0120 18:41:04.906068 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["horizon-kuttl-tests/openstack-galera-1"] Jan 20 18:41:04 crc kubenswrapper[4558]: I0120 18:41:04.967025 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/c0f454c3-2a4b-4d33-b137-2c92c0773378-config-data-generated\") pod \"openstack-galera-0\" (UID: \"c0f454c3-2a4b-4d33-b137-2c92c0773378\") " pod="horizon-kuttl-tests/openstack-galera-0" Jan 20 18:41:04 crc kubenswrapper[4558]: I0120 18:41:04.967113 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c0f454c3-2a4b-4d33-b137-2c92c0773378-operator-scripts\") pod \"openstack-galera-0\" (UID: \"c0f454c3-2a4b-4d33-b137-2c92c0773378\") " pod="horizon-kuttl-tests/openstack-galera-0" Jan 20 18:41:04 crc kubenswrapper[4558]: I0120 18:41:04.967470 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/c0f454c3-2a4b-4d33-b137-2c92c0773378-config-data-default\") pod \"openstack-galera-0\" (UID: \"c0f454c3-2a4b-4d33-b137-2c92c0773378\") " pod="horizon-kuttl-tests/openstack-galera-0" Jan 20 18:41:04 crc kubenswrapper[4558]: I0120 18:41:04.967515 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qgn84\" (UniqueName: \"kubernetes.io/projected/c0f454c3-2a4b-4d33-b137-2c92c0773378-kube-api-access-qgn84\") pod \"openstack-galera-0\" (UID: \"c0f454c3-2a4b-4d33-b137-2c92c0773378\") " pod="horizon-kuttl-tests/openstack-galera-0" Jan 20 18:41:04 crc kubenswrapper[4558]: I0120 18:41:04.967554 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage20-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage20-crc\") pod \"openstack-galera-0\" (UID: \"c0f454c3-2a4b-4d33-b137-2c92c0773378\") " pod="horizon-kuttl-tests/openstack-galera-0" Jan 20 18:41:04 crc kubenswrapper[4558]: I0120 18:41:04.967620 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/c0f454c3-2a4b-4d33-b137-2c92c0773378-kolla-config\") pod \"openstack-galera-0\" (UID: \"c0f454c3-2a4b-4d33-b137-2c92c0773378\") " pod="horizon-kuttl-tests/openstack-galera-0" Jan 20 18:41:04 crc kubenswrapper[4558]: I0120 18:41:04.967997 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/c0f454c3-2a4b-4d33-b137-2c92c0773378-config-data-generated\") pod \"openstack-galera-0\" (UID: \"c0f454c3-2a4b-4d33-b137-2c92c0773378\") " pod="horizon-kuttl-tests/openstack-galera-0" Jan 20 18:41:04 crc kubenswrapper[4558]: I0120 18:41:04.968199 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage20-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage20-crc\") pod \"openstack-galera-0\" (UID: \"c0f454c3-2a4b-4d33-b137-2c92c0773378\") device mount path \"/mnt/openstack/pv20\"" pod="horizon-kuttl-tests/openstack-galera-0" Jan 20 18:41:04 crc kubenswrapper[4558]: I0120 18:41:04.968512 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/c0f454c3-2a4b-4d33-b137-2c92c0773378-kolla-config\") pod \"openstack-galera-0\" (UID: \"c0f454c3-2a4b-4d33-b137-2c92c0773378\") " pod="horizon-kuttl-tests/openstack-galera-0" Jan 20 18:41:04 crc kubenswrapper[4558]: I0120 18:41:04.968623 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/c0f454c3-2a4b-4d33-b137-2c92c0773378-config-data-default\") pod \"openstack-galera-0\" (UID: \"c0f454c3-2a4b-4d33-b137-2c92c0773378\") " pod="horizon-kuttl-tests/openstack-galera-0" Jan 20 18:41:04 crc kubenswrapper[4558]: I0120 18:41:04.970837 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c0f454c3-2a4b-4d33-b137-2c92c0773378-operator-scripts\") pod \"openstack-galera-0\" (UID: \"c0f454c3-2a4b-4d33-b137-2c92c0773378\") " pod="horizon-kuttl-tests/openstack-galera-0" Jan 20 18:41:04 crc kubenswrapper[4558]: I0120 18:41:04.984950 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qgn84\" (UniqueName: \"kubernetes.io/projected/c0f454c3-2a4b-4d33-b137-2c92c0773378-kube-api-access-qgn84\") pod \"openstack-galera-0\" (UID: \"c0f454c3-2a4b-4d33-b137-2c92c0773378\") " pod="horizon-kuttl-tests/openstack-galera-0" Jan 20 18:41:04 crc kubenswrapper[4558]: I0120 18:41:04.985822 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage20-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage20-crc\") pod \"openstack-galera-0\" (UID: \"c0f454c3-2a4b-4d33-b137-2c92c0773378\") " pod="horizon-kuttl-tests/openstack-galera-0" Jan 20 18:41:05 crc kubenswrapper[4558]: I0120 18:41:05.069111 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/761b8e2c-cee0-49d5-8310-3eaf29af06bc-operator-scripts\") pod \"openstack-galera-2\" (UID: \"761b8e2c-cee0-49d5-8310-3eaf29af06bc\") " pod="horizon-kuttl-tests/openstack-galera-2" Jan 20 18:41:05 crc kubenswrapper[4558]: I0120 18:41:05.069203 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c6bkf\" (UniqueName: \"kubernetes.io/projected/c42137c4-70ef-4a24-85f5-d76f3883620b-kube-api-access-c6bkf\") pod \"openstack-galera-1\" (UID: \"c42137c4-70ef-4a24-85f5-d76f3883620b\") " pod="horizon-kuttl-tests/openstack-galera-1" Jan 20 18:41:05 crc kubenswrapper[4558]: I0120 18:41:05.069241 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h7dzv\" (UniqueName: \"kubernetes.io/projected/761b8e2c-cee0-49d5-8310-3eaf29af06bc-kube-api-access-h7dzv\") pod \"openstack-galera-2\" (UID: \"761b8e2c-cee0-49d5-8310-3eaf29af06bc\") " pod="horizon-kuttl-tests/openstack-galera-2" Jan 20 18:41:05 crc kubenswrapper[4558]: I0120 18:41:05.069271 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/c42137c4-70ef-4a24-85f5-d76f3883620b-kolla-config\") pod \"openstack-galera-1\" (UID: \"c42137c4-70ef-4a24-85f5-d76f3883620b\") " pod="horizon-kuttl-tests/openstack-galera-1" Jan 20 18:41:05 crc kubenswrapper[4558]: I0120 18:41:05.069407 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"openstack-galera-2\" (UID: \"761b8e2c-cee0-49d5-8310-3eaf29af06bc\") " pod="horizon-kuttl-tests/openstack-galera-2" Jan 20 18:41:05 crc kubenswrapper[4558]: I0120 18:41:05.069518 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/c42137c4-70ef-4a24-85f5-d76f3883620b-config-data-generated\") pod \"openstack-galera-1\" (UID: \"c42137c4-70ef-4a24-85f5-d76f3883620b\") " pod="horizon-kuttl-tests/openstack-galera-1" Jan 20 18:41:05 crc kubenswrapper[4558]: I0120 18:41:05.069683 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c42137c4-70ef-4a24-85f5-d76f3883620b-operator-scripts\") pod \"openstack-galera-1\" (UID: \"c42137c4-70ef-4a24-85f5-d76f3883620b\") " pod="horizon-kuttl-tests/openstack-galera-1" Jan 20 18:41:05 crc kubenswrapper[4558]: I0120 18:41:05.069751 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/761b8e2c-cee0-49d5-8310-3eaf29af06bc-config-data-default\") pod \"openstack-galera-2\" (UID: \"761b8e2c-cee0-49d5-8310-3eaf29af06bc\") " pod="horizon-kuttl-tests/openstack-galera-2" Jan 20 18:41:05 crc kubenswrapper[4558]: I0120 18:41:05.070051 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/761b8e2c-cee0-49d5-8310-3eaf29af06bc-config-data-generated\") pod \"openstack-galera-2\" (UID: \"761b8e2c-cee0-49d5-8310-3eaf29af06bc\") " pod="horizon-kuttl-tests/openstack-galera-2" Jan 20 18:41:05 crc kubenswrapper[4558]: I0120 18:41:05.070120 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/c42137c4-70ef-4a24-85f5-d76f3883620b-config-data-default\") pod \"openstack-galera-1\" (UID: \"c42137c4-70ef-4a24-85f5-d76f3883620b\") " pod="horizon-kuttl-tests/openstack-galera-1" Jan 20 18:41:05 crc kubenswrapper[4558]: I0120 18:41:05.070202 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/761b8e2c-cee0-49d5-8310-3eaf29af06bc-kolla-config\") pod \"openstack-galera-2\" (UID: \"761b8e2c-cee0-49d5-8310-3eaf29af06bc\") " pod="horizon-kuttl-tests/openstack-galera-2" Jan 20 18:41:05 crc kubenswrapper[4558]: I0120 18:41:05.070256 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage18-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage18-crc\") pod \"openstack-galera-1\" (UID: \"c42137c4-70ef-4a24-85f5-d76f3883620b\") " pod="horizon-kuttl-tests/openstack-galera-1" Jan 20 18:41:05 crc kubenswrapper[4558]: I0120 18:41:05.164104 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/openstack-galera-0" Jan 20 18:41:05 crc kubenswrapper[4558]: I0120 18:41:05.177660 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage18-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage18-crc\") pod \"openstack-galera-1\" (UID: \"c42137c4-70ef-4a24-85f5-d76f3883620b\") " pod="horizon-kuttl-tests/openstack-galera-1" Jan 20 18:41:05 crc kubenswrapper[4558]: I0120 18:41:05.177745 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/761b8e2c-cee0-49d5-8310-3eaf29af06bc-operator-scripts\") pod \"openstack-galera-2\" (UID: \"761b8e2c-cee0-49d5-8310-3eaf29af06bc\") " pod="horizon-kuttl-tests/openstack-galera-2" Jan 20 18:41:05 crc kubenswrapper[4558]: I0120 18:41:05.177814 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c6bkf\" (UniqueName: \"kubernetes.io/projected/c42137c4-70ef-4a24-85f5-d76f3883620b-kube-api-access-c6bkf\") pod \"openstack-galera-1\" (UID: \"c42137c4-70ef-4a24-85f5-d76f3883620b\") " pod="horizon-kuttl-tests/openstack-galera-1" Jan 20 18:41:05 crc kubenswrapper[4558]: I0120 18:41:05.177990 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h7dzv\" (UniqueName: \"kubernetes.io/projected/761b8e2c-cee0-49d5-8310-3eaf29af06bc-kube-api-access-h7dzv\") pod \"openstack-galera-2\" (UID: \"761b8e2c-cee0-49d5-8310-3eaf29af06bc\") " pod="horizon-kuttl-tests/openstack-galera-2" Jan 20 18:41:05 crc kubenswrapper[4558]: I0120 18:41:05.178057 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/c42137c4-70ef-4a24-85f5-d76f3883620b-kolla-config\") pod \"openstack-galera-1\" (UID: \"c42137c4-70ef-4a24-85f5-d76f3883620b\") " pod="horizon-kuttl-tests/openstack-galera-1" Jan 20 18:41:05 crc kubenswrapper[4558]: I0120 18:41:05.178012 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage18-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage18-crc\") pod \"openstack-galera-1\" (UID: \"c42137c4-70ef-4a24-85f5-d76f3883620b\") device mount path \"/mnt/openstack/pv18\"" pod="horizon-kuttl-tests/openstack-galera-1" Jan 20 18:41:05 crc kubenswrapper[4558]: I0120 18:41:05.178376 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"openstack-galera-2\" (UID: \"761b8e2c-cee0-49d5-8310-3eaf29af06bc\") device mount path \"/mnt/openstack/pv04\"" pod="horizon-kuttl-tests/openstack-galera-2" Jan 20 18:41:05 crc kubenswrapper[4558]: I0120 18:41:05.178977 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/c42137c4-70ef-4a24-85f5-d76f3883620b-kolla-config\") pod \"openstack-galera-1\" (UID: \"c42137c4-70ef-4a24-85f5-d76f3883620b\") " pod="horizon-kuttl-tests/openstack-galera-1" Jan 20 18:41:05 crc kubenswrapper[4558]: I0120 18:41:05.179262 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/761b8e2c-cee0-49d5-8310-3eaf29af06bc-operator-scripts\") pod \"openstack-galera-2\" (UID: \"761b8e2c-cee0-49d5-8310-3eaf29af06bc\") " pod="horizon-kuttl-tests/openstack-galera-2" Jan 20 18:41:05 crc kubenswrapper[4558]: I0120 18:41:05.178257 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"openstack-galera-2\" (UID: \"761b8e2c-cee0-49d5-8310-3eaf29af06bc\") " pod="horizon-kuttl-tests/openstack-galera-2" Jan 20 18:41:05 crc kubenswrapper[4558]: I0120 18:41:05.179636 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/c42137c4-70ef-4a24-85f5-d76f3883620b-config-data-generated\") pod \"openstack-galera-1\" (UID: \"c42137c4-70ef-4a24-85f5-d76f3883620b\") " pod="horizon-kuttl-tests/openstack-galera-1" Jan 20 18:41:05 crc kubenswrapper[4558]: I0120 18:41:05.179690 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c42137c4-70ef-4a24-85f5-d76f3883620b-operator-scripts\") pod \"openstack-galera-1\" (UID: \"c42137c4-70ef-4a24-85f5-d76f3883620b\") " pod="horizon-kuttl-tests/openstack-galera-1" Jan 20 18:41:05 crc kubenswrapper[4558]: I0120 18:41:05.179723 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/761b8e2c-cee0-49d5-8310-3eaf29af06bc-config-data-default\") pod \"openstack-galera-2\" (UID: \"761b8e2c-cee0-49d5-8310-3eaf29af06bc\") " pod="horizon-kuttl-tests/openstack-galera-2" Jan 20 18:41:05 crc kubenswrapper[4558]: I0120 18:41:05.179790 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/761b8e2c-cee0-49d5-8310-3eaf29af06bc-config-data-generated\") pod \"openstack-galera-2\" (UID: \"761b8e2c-cee0-49d5-8310-3eaf29af06bc\") " pod="horizon-kuttl-tests/openstack-galera-2" Jan 20 18:41:05 crc kubenswrapper[4558]: I0120 18:41:05.179840 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/c42137c4-70ef-4a24-85f5-d76f3883620b-config-data-default\") pod \"openstack-galera-1\" (UID: \"c42137c4-70ef-4a24-85f5-d76f3883620b\") " pod="horizon-kuttl-tests/openstack-galera-1" Jan 20 18:41:05 crc kubenswrapper[4558]: I0120 18:41:05.179880 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/761b8e2c-cee0-49d5-8310-3eaf29af06bc-kolla-config\") pod \"openstack-galera-2\" (UID: \"761b8e2c-cee0-49d5-8310-3eaf29af06bc\") " pod="horizon-kuttl-tests/openstack-galera-2" Jan 20 18:41:05 crc kubenswrapper[4558]: I0120 18:41:05.179905 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/c42137c4-70ef-4a24-85f5-d76f3883620b-config-data-generated\") pod \"openstack-galera-1\" (UID: \"c42137c4-70ef-4a24-85f5-d76f3883620b\") " pod="horizon-kuttl-tests/openstack-galera-1" Jan 20 18:41:05 crc kubenswrapper[4558]: I0120 18:41:05.180709 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/761b8e2c-cee0-49d5-8310-3eaf29af06bc-config-data-default\") pod \"openstack-galera-2\" (UID: \"761b8e2c-cee0-49d5-8310-3eaf29af06bc\") " pod="horizon-kuttl-tests/openstack-galera-2" Jan 20 18:41:05 crc kubenswrapper[4558]: I0120 18:41:05.181083 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/761b8e2c-cee0-49d5-8310-3eaf29af06bc-config-data-generated\") pod \"openstack-galera-2\" (UID: \"761b8e2c-cee0-49d5-8310-3eaf29af06bc\") " pod="horizon-kuttl-tests/openstack-galera-2" Jan 20 18:41:05 crc kubenswrapper[4558]: I0120 18:41:05.181114 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c42137c4-70ef-4a24-85f5-d76f3883620b-operator-scripts\") pod \"openstack-galera-1\" (UID: \"c42137c4-70ef-4a24-85f5-d76f3883620b\") " pod="horizon-kuttl-tests/openstack-galera-1" Jan 20 18:41:05 crc kubenswrapper[4558]: I0120 18:41:05.181646 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/761b8e2c-cee0-49d5-8310-3eaf29af06bc-kolla-config\") pod \"openstack-galera-2\" (UID: \"761b8e2c-cee0-49d5-8310-3eaf29af06bc\") " pod="horizon-kuttl-tests/openstack-galera-2" Jan 20 18:41:05 crc kubenswrapper[4558]: I0120 18:41:05.181847 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/c42137c4-70ef-4a24-85f5-d76f3883620b-config-data-default\") pod \"openstack-galera-1\" (UID: \"c42137c4-70ef-4a24-85f5-d76f3883620b\") " pod="horizon-kuttl-tests/openstack-galera-1" Jan 20 18:41:05 crc kubenswrapper[4558]: I0120 18:41:05.192384 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h7dzv\" (UniqueName: \"kubernetes.io/projected/761b8e2c-cee0-49d5-8310-3eaf29af06bc-kube-api-access-h7dzv\") pod \"openstack-galera-2\" (UID: \"761b8e2c-cee0-49d5-8310-3eaf29af06bc\") " pod="horizon-kuttl-tests/openstack-galera-2" Jan 20 18:41:05 crc kubenswrapper[4558]: I0120 18:41:05.192522 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c6bkf\" (UniqueName: \"kubernetes.io/projected/c42137c4-70ef-4a24-85f5-d76f3883620b-kube-api-access-c6bkf\") pod \"openstack-galera-1\" (UID: \"c42137c4-70ef-4a24-85f5-d76f3883620b\") " pod="horizon-kuttl-tests/openstack-galera-1" Jan 20 18:41:05 crc kubenswrapper[4558]: I0120 18:41:05.194517 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"openstack-galera-2\" (UID: \"761b8e2c-cee0-49d5-8310-3eaf29af06bc\") " pod="horizon-kuttl-tests/openstack-galera-2" Jan 20 18:41:05 crc kubenswrapper[4558]: I0120 18:41:05.196399 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage18-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage18-crc\") pod \"openstack-galera-1\" (UID: \"c42137c4-70ef-4a24-85f5-d76f3883620b\") " pod="horizon-kuttl-tests/openstack-galera-1" Jan 20 18:41:05 crc kubenswrapper[4558]: I0120 18:41:05.299295 4558 generic.go:334] "Generic (PLEG): container finished" podID="55e27c4e-389f-4ae0-814b-c8a92814add5" containerID="228cd3a6fde62665397ae5ffd3353e6f53d4d82fb3ade94a962d5ddfcedc43e3" exitCode=0 Jan 20 18:41:05 crc kubenswrapper[4558]: I0120 18:41:05.299456 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ea82580bc5724477f94b47db468c840840d4aaf95efc52f7d04b6353c1wpjhp" event={"ID":"55e27c4e-389f-4ae0-814b-c8a92814add5","Type":"ContainerDied","Data":"228cd3a6fde62665397ae5ffd3353e6f53d4d82fb3ade94a962d5ddfcedc43e3"} Jan 20 18:41:05 crc kubenswrapper[4558]: I0120 18:41:05.309139 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-index-dqwqb" event={"ID":"d3dc106a-9ff2-49b9-bc47-3d986cec7afa","Type":"ContainerStarted","Data":"df0354184c0307e1f1972d3215362617b02dbd1c0c7dc94d36c92202409746db"} Jan 20 18:41:05 crc kubenswrapper[4558]: I0120 18:41:05.344100 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-index-dqwqb" podStartSLOduration=2.784699204 podStartE2EDuration="3.344069184s" podCreationTimestamp="2026-01-20 18:41:02 +0000 UTC" firstStartedPulling="2026-01-20 18:41:03.627868767 +0000 UTC m=+7157.388206733" lastFinishedPulling="2026-01-20 18:41:04.187238746 +0000 UTC m=+7157.947576713" observedRunningTime="2026-01-20 18:41:05.343564736 +0000 UTC m=+7159.103902703" watchObservedRunningTime="2026-01-20 18:41:05.344069184 +0000 UTC m=+7159.104407152" Jan 20 18:41:05 crc kubenswrapper[4558]: I0120 18:41:05.478792 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/openstack-galera-1" Jan 20 18:41:05 crc kubenswrapper[4558]: I0120 18:41:05.488517 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/openstack-galera-2" Jan 20 18:41:05 crc kubenswrapper[4558]: I0120 18:41:05.574017 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["horizon-kuttl-tests/openstack-galera-0"] Jan 20 18:41:05 crc kubenswrapper[4558]: I0120 18:41:05.731288 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["horizon-kuttl-tests/openstack-galera-2"] Jan 20 18:41:05 crc kubenswrapper[4558]: I0120 18:41:05.893890 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["horizon-kuttl-tests/openstack-galera-1"] Jan 20 18:41:05 crc kubenswrapper[4558]: W0120 18:41:05.895626 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc42137c4_70ef_4a24_85f5_d76f3883620b.slice/crio-0bddfd51f0ab571316a3af4ee910d8865776bdcf377b5b7ee6f80a759d92e7f2 WatchSource:0}: Error finding container 0bddfd51f0ab571316a3af4ee910d8865776bdcf377b5b7ee6f80a759d92e7f2: Status 404 returned error can't find the container with id 0bddfd51f0ab571316a3af4ee910d8865776bdcf377b5b7ee6f80a759d92e7f2 Jan 20 18:41:06 crc kubenswrapper[4558]: I0120 18:41:06.316353 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/openstack-galera-2" event={"ID":"761b8e2c-cee0-49d5-8310-3eaf29af06bc","Type":"ContainerStarted","Data":"8f8a09703a8fa88474a892fa8fded2fcadd55d5c2f197e56f808d9b8a4404c19"} Jan 20 18:41:06 crc kubenswrapper[4558]: I0120 18:41:06.316714 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/openstack-galera-2" event={"ID":"761b8e2c-cee0-49d5-8310-3eaf29af06bc","Type":"ContainerStarted","Data":"a260c33c14b9f388143f915fe24279c11e1dea53fe11fdea104e7a57d73f6814"} Jan 20 18:41:06 crc kubenswrapper[4558]: I0120 18:41:06.317805 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/openstack-galera-1" event={"ID":"c42137c4-70ef-4a24-85f5-d76f3883620b","Type":"ContainerStarted","Data":"2c0e0fa1bacf4208658bf79dda5c95bedac7a0121b74d1c1e9c398b99613418f"} Jan 20 18:41:06 crc kubenswrapper[4558]: I0120 18:41:06.317829 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/openstack-galera-1" event={"ID":"c42137c4-70ef-4a24-85f5-d76f3883620b","Type":"ContainerStarted","Data":"0bddfd51f0ab571316a3af4ee910d8865776bdcf377b5b7ee6f80a759d92e7f2"} Jan 20 18:41:06 crc kubenswrapper[4558]: I0120 18:41:06.319440 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/openstack-galera-0" event={"ID":"c0f454c3-2a4b-4d33-b137-2c92c0773378","Type":"ContainerStarted","Data":"88d96a94954ba1d41bd6a5777f4e5d4d73c7400a921cbb812764d351796baa44"} Jan 20 18:41:06 crc kubenswrapper[4558]: I0120 18:41:06.319493 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/openstack-galera-0" event={"ID":"c0f454c3-2a4b-4d33-b137-2c92c0773378","Type":"ContainerStarted","Data":"3a09cfd53101e0c5beac39f6249317fdec67eeb3fb89ed4f86b2ce213525bf6d"} Jan 20 18:41:06 crc kubenswrapper[4558]: I0120 18:41:06.654331 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ea82580bc5724477f94b47db468c840840d4aaf95efc52f7d04b6353c1wpjhp" Jan 20 18:41:06 crc kubenswrapper[4558]: I0120 18:41:06.807830 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2mcr\" (UniqueName: \"kubernetes.io/projected/55e27c4e-389f-4ae0-814b-c8a92814add5-kube-api-access-x2mcr\") pod \"55e27c4e-389f-4ae0-814b-c8a92814add5\" (UID: \"55e27c4e-389f-4ae0-814b-c8a92814add5\") " Jan 20 18:41:06 crc kubenswrapper[4558]: I0120 18:41:06.808288 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/55e27c4e-389f-4ae0-814b-c8a92814add5-util\") pod \"55e27c4e-389f-4ae0-814b-c8a92814add5\" (UID: \"55e27c4e-389f-4ae0-814b-c8a92814add5\") " Jan 20 18:41:06 crc kubenswrapper[4558]: I0120 18:41:06.808375 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/55e27c4e-389f-4ae0-814b-c8a92814add5-bundle\") pod \"55e27c4e-389f-4ae0-814b-c8a92814add5\" (UID: \"55e27c4e-389f-4ae0-814b-c8a92814add5\") " Jan 20 18:41:06 crc kubenswrapper[4558]: I0120 18:41:06.810019 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/55e27c4e-389f-4ae0-814b-c8a92814add5-bundle" (OuterVolumeSpecName: "bundle") pod "55e27c4e-389f-4ae0-814b-c8a92814add5" (UID: "55e27c4e-389f-4ae0-814b-c8a92814add5"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 18:41:06 crc kubenswrapper[4558]: I0120 18:41:06.813819 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/55e27c4e-389f-4ae0-814b-c8a92814add5-kube-api-access-x2mcr" (OuterVolumeSpecName: "kube-api-access-x2mcr") pod "55e27c4e-389f-4ae0-814b-c8a92814add5" (UID: "55e27c4e-389f-4ae0-814b-c8a92814add5"). InnerVolumeSpecName "kube-api-access-x2mcr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:41:06 crc kubenswrapper[4558]: I0120 18:41:06.821786 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/55e27c4e-389f-4ae0-814b-c8a92814add5-util" (OuterVolumeSpecName: "util") pod "55e27c4e-389f-4ae0-814b-c8a92814add5" (UID: "55e27c4e-389f-4ae0-814b-c8a92814add5"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 18:41:06 crc kubenswrapper[4558]: I0120 18:41:06.910279 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2mcr\" (UniqueName: \"kubernetes.io/projected/55e27c4e-389f-4ae0-814b-c8a92814add5-kube-api-access-x2mcr\") on node \"crc\" DevicePath \"\"" Jan 20 18:41:06 crc kubenswrapper[4558]: I0120 18:41:06.910311 4558 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/55e27c4e-389f-4ae0-814b-c8a92814add5-util\") on node \"crc\" DevicePath \"\"" Jan 20 18:41:06 crc kubenswrapper[4558]: I0120 18:41:06.910323 4558 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/55e27c4e-389f-4ae0-814b-c8a92814add5-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 18:41:07 crc kubenswrapper[4558]: I0120 18:41:07.329767 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ea82580bc5724477f94b47db468c840840d4aaf95efc52f7d04b6353c1wpjhp" event={"ID":"55e27c4e-389f-4ae0-814b-c8a92814add5","Type":"ContainerDied","Data":"84eab21624d9375c3bc476269a5a1f0006fcbb851c9f55af1c69a7e02ad04932"} Jan 20 18:41:07 crc kubenswrapper[4558]: I0120 18:41:07.329840 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="84eab21624d9375c3bc476269a5a1f0006fcbb851c9f55af1c69a7e02ad04932" Jan 20 18:41:07 crc kubenswrapper[4558]: I0120 18:41:07.330474 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ea82580bc5724477f94b47db468c840840d4aaf95efc52f7d04b6353c1wpjhp" Jan 20 18:41:09 crc kubenswrapper[4558]: I0120 18:41:09.343963 4558 generic.go:334] "Generic (PLEG): container finished" podID="c0f454c3-2a4b-4d33-b137-2c92c0773378" containerID="88d96a94954ba1d41bd6a5777f4e5d4d73c7400a921cbb812764d351796baa44" exitCode=0 Jan 20 18:41:09 crc kubenswrapper[4558]: I0120 18:41:09.344046 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/openstack-galera-0" event={"ID":"c0f454c3-2a4b-4d33-b137-2c92c0773378","Type":"ContainerDied","Data":"88d96a94954ba1d41bd6a5777f4e5d4d73c7400a921cbb812764d351796baa44"} Jan 20 18:41:09 crc kubenswrapper[4558]: I0120 18:41:09.346879 4558 generic.go:334] "Generic (PLEG): container finished" podID="761b8e2c-cee0-49d5-8310-3eaf29af06bc" containerID="8f8a09703a8fa88474a892fa8fded2fcadd55d5c2f197e56f808d9b8a4404c19" exitCode=0 Jan 20 18:41:09 crc kubenswrapper[4558]: I0120 18:41:09.346904 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/openstack-galera-2" event={"ID":"761b8e2c-cee0-49d5-8310-3eaf29af06bc","Type":"ContainerDied","Data":"8f8a09703a8fa88474a892fa8fded2fcadd55d5c2f197e56f808d9b8a4404c19"} Jan 20 18:41:09 crc kubenswrapper[4558]: I0120 18:41:09.348960 4558 generic.go:334] "Generic (PLEG): container finished" podID="c42137c4-70ef-4a24-85f5-d76f3883620b" containerID="2c0e0fa1bacf4208658bf79dda5c95bedac7a0121b74d1c1e9c398b99613418f" exitCode=0 Jan 20 18:41:09 crc kubenswrapper[4558]: I0120 18:41:09.349012 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/openstack-galera-1" event={"ID":"c42137c4-70ef-4a24-85f5-d76f3883620b","Type":"ContainerDied","Data":"2c0e0fa1bacf4208658bf79dda5c95bedac7a0121b74d1c1e9c398b99613418f"} Jan 20 18:41:10 crc kubenswrapper[4558]: I0120 18:41:10.361957 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/openstack-galera-0" event={"ID":"c0f454c3-2a4b-4d33-b137-2c92c0773378","Type":"ContainerStarted","Data":"0d3a126d5497a22d039dda1193d044fde10dd62c9fe892d8e5e517a70a778453"} Jan 20 18:41:10 crc kubenswrapper[4558]: I0120 18:41:10.364041 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/openstack-galera-2" event={"ID":"761b8e2c-cee0-49d5-8310-3eaf29af06bc","Type":"ContainerStarted","Data":"a19305d22ff904d020f81094f35b6533077dd226eabc2094fee643c83ae14064"} Jan 20 18:41:10 crc kubenswrapper[4558]: I0120 18:41:10.378690 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/openstack-galera-1" event={"ID":"c42137c4-70ef-4a24-85f5-d76f3883620b","Type":"ContainerStarted","Data":"f15aced75af8322f8bfc910378537e364f8e7d75fd8f4eb285e818331324d226"} Jan 20 18:41:10 crc kubenswrapper[4558]: I0120 18:41:10.391001 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="horizon-kuttl-tests/openstack-galera-0" podStartSLOduration=7.390991485 podStartE2EDuration="7.390991485s" podCreationTimestamp="2026-01-20 18:41:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 18:41:10.385293182 +0000 UTC m=+7164.145631148" watchObservedRunningTime="2026-01-20 18:41:10.390991485 +0000 UTC m=+7164.151329452" Jan 20 18:41:10 crc kubenswrapper[4558]: I0120 18:41:10.401019 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="horizon-kuttl-tests/openstack-galera-1" podStartSLOduration=7.401011786 podStartE2EDuration="7.401011786s" podCreationTimestamp="2026-01-20 18:41:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 18:41:10.4000511 +0000 UTC m=+7164.160389087" watchObservedRunningTime="2026-01-20 18:41:10.401011786 +0000 UTC m=+7164.161349753" Jan 20 18:41:10 crc kubenswrapper[4558]: I0120 18:41:10.413757 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="horizon-kuttl-tests/openstack-galera-2" podStartSLOduration=7.413742683 podStartE2EDuration="7.413742683s" podCreationTimestamp="2026-01-20 18:41:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 18:41:10.412836859 +0000 UTC m=+7164.173174827" watchObservedRunningTime="2026-01-20 18:41:10.413742683 +0000 UTC m=+7164.174080650" Jan 20 18:41:13 crc kubenswrapper[4558]: I0120 18:41:13.250564 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/infra-operator-index-dqwqb" Jan 20 18:41:13 crc kubenswrapper[4558]: I0120 18:41:13.251009 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-index-dqwqb" Jan 20 18:41:13 crc kubenswrapper[4558]: I0120 18:41:13.277334 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/infra-operator-index-dqwqb" Jan 20 18:41:13 crc kubenswrapper[4558]: I0120 18:41:13.438026 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-index-dqwqb" Jan 20 18:41:14 crc kubenswrapper[4558]: I0120 18:41:14.566182 4558 scope.go:117] "RemoveContainer" containerID="77ce04c6daee7d4f9b776bd1c80b448f9c4750d5cb5c7dd182ff28963a4bcf99" Jan 20 18:41:14 crc kubenswrapper[4558]: E0120 18:41:14.566395 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 18:41:15 crc kubenswrapper[4558]: I0120 18:41:15.165235 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="horizon-kuttl-tests/openstack-galera-0" Jan 20 18:41:15 crc kubenswrapper[4558]: I0120 18:41:15.165290 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="horizon-kuttl-tests/openstack-galera-0" Jan 20 18:41:15 crc kubenswrapper[4558]: I0120 18:41:15.479930 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="horizon-kuttl-tests/openstack-galera-1" Jan 20 18:41:15 crc kubenswrapper[4558]: I0120 18:41:15.480318 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="horizon-kuttl-tests/openstack-galera-1" Jan 20 18:41:15 crc kubenswrapper[4558]: I0120 18:41:15.489350 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="horizon-kuttl-tests/openstack-galera-2" Jan 20 18:41:15 crc kubenswrapper[4558]: I0120 18:41:15.489648 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="horizon-kuttl-tests/openstack-galera-2" Jan 20 18:41:19 crc kubenswrapper[4558]: I0120 18:41:19.751323 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="horizon-kuttl-tests/openstack-galera-2" Jan 20 18:41:19 crc kubenswrapper[4558]: I0120 18:41:19.813656 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="horizon-kuttl-tests/openstack-galera-2" Jan 20 18:41:20 crc kubenswrapper[4558]: E0120 18:41:20.047896 4558 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 192.168.25.8:58836->192.168.25.8:43883: write tcp 192.168.25.8:58836->192.168.25.8:43883: write: broken pipe Jan 20 18:41:23 crc kubenswrapper[4558]: I0120 18:41:23.925104 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["horizon-kuttl-tests/root-account-create-update-mqt2k"] Jan 20 18:41:23 crc kubenswrapper[4558]: E0120 18:41:23.925703 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="55e27c4e-389f-4ae0-814b-c8a92814add5" containerName="extract" Jan 20 18:41:23 crc kubenswrapper[4558]: I0120 18:41:23.925717 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="55e27c4e-389f-4ae0-814b-c8a92814add5" containerName="extract" Jan 20 18:41:23 crc kubenswrapper[4558]: E0120 18:41:23.925740 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="55e27c4e-389f-4ae0-814b-c8a92814add5" containerName="util" Jan 20 18:41:23 crc kubenswrapper[4558]: I0120 18:41:23.925745 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="55e27c4e-389f-4ae0-814b-c8a92814add5" containerName="util" Jan 20 18:41:23 crc kubenswrapper[4558]: E0120 18:41:23.925771 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="55e27c4e-389f-4ae0-814b-c8a92814add5" containerName="pull" Jan 20 18:41:23 crc kubenswrapper[4558]: I0120 18:41:23.925778 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="55e27c4e-389f-4ae0-814b-c8a92814add5" containerName="pull" Jan 20 18:41:23 crc kubenswrapper[4558]: I0120 18:41:23.925887 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="55e27c4e-389f-4ae0-814b-c8a92814add5" containerName="extract" Jan 20 18:41:23 crc kubenswrapper[4558]: I0120 18:41:23.926369 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/root-account-create-update-mqt2k" Jan 20 18:41:23 crc kubenswrapper[4558]: I0120 18:41:23.928589 4558 reflector.go:368] Caches populated for *v1.Secret from object-"horizon-kuttl-tests"/"openstack-mariadb-root-db-secret" Jan 20 18:41:23 crc kubenswrapper[4558]: I0120 18:41:23.932389 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["horizon-kuttl-tests/root-account-create-update-mqt2k"] Jan 20 18:41:23 crc kubenswrapper[4558]: I0120 18:41:23.965827 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2lrm4\" (UniqueName: \"kubernetes.io/projected/76cacf7d-5f06-48d2-8ba1-95e760fbe91e-kube-api-access-2lrm4\") pod \"root-account-create-update-mqt2k\" (UID: \"76cacf7d-5f06-48d2-8ba1-95e760fbe91e\") " pod="horizon-kuttl-tests/root-account-create-update-mqt2k" Jan 20 18:41:23 crc kubenswrapper[4558]: I0120 18:41:23.966049 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/76cacf7d-5f06-48d2-8ba1-95e760fbe91e-operator-scripts\") pod \"root-account-create-update-mqt2k\" (UID: \"76cacf7d-5f06-48d2-8ba1-95e760fbe91e\") " pod="horizon-kuttl-tests/root-account-create-update-mqt2k" Jan 20 18:41:24 crc kubenswrapper[4558]: I0120 18:41:24.068512 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/76cacf7d-5f06-48d2-8ba1-95e760fbe91e-operator-scripts\") pod \"root-account-create-update-mqt2k\" (UID: \"76cacf7d-5f06-48d2-8ba1-95e760fbe91e\") " pod="horizon-kuttl-tests/root-account-create-update-mqt2k" Jan 20 18:41:24 crc kubenswrapper[4558]: I0120 18:41:24.068715 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2lrm4\" (UniqueName: \"kubernetes.io/projected/76cacf7d-5f06-48d2-8ba1-95e760fbe91e-kube-api-access-2lrm4\") pod \"root-account-create-update-mqt2k\" (UID: \"76cacf7d-5f06-48d2-8ba1-95e760fbe91e\") " pod="horizon-kuttl-tests/root-account-create-update-mqt2k" Jan 20 18:41:24 crc kubenswrapper[4558]: I0120 18:41:24.069531 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/76cacf7d-5f06-48d2-8ba1-95e760fbe91e-operator-scripts\") pod \"root-account-create-update-mqt2k\" (UID: \"76cacf7d-5f06-48d2-8ba1-95e760fbe91e\") " pod="horizon-kuttl-tests/root-account-create-update-mqt2k" Jan 20 18:41:24 crc kubenswrapper[4558]: I0120 18:41:24.086490 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2lrm4\" (UniqueName: \"kubernetes.io/projected/76cacf7d-5f06-48d2-8ba1-95e760fbe91e-kube-api-access-2lrm4\") pod \"root-account-create-update-mqt2k\" (UID: \"76cacf7d-5f06-48d2-8ba1-95e760fbe91e\") " pod="horizon-kuttl-tests/root-account-create-update-mqt2k" Jan 20 18:41:24 crc kubenswrapper[4558]: I0120 18:41:24.244995 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/root-account-create-update-mqt2k" Jan 20 18:41:24 crc kubenswrapper[4558]: I0120 18:41:24.632648 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["horizon-kuttl-tests/root-account-create-update-mqt2k"] Jan 20 18:41:25 crc kubenswrapper[4558]: I0120 18:41:25.497596 4558 generic.go:334] "Generic (PLEG): container finished" podID="76cacf7d-5f06-48d2-8ba1-95e760fbe91e" containerID="d3f06b4a5105b533a4fcf4a0a4aca89122b75111f61a99be6bfa56afb037e949" exitCode=0 Jan 20 18:41:25 crc kubenswrapper[4558]: I0120 18:41:25.497650 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/root-account-create-update-mqt2k" event={"ID":"76cacf7d-5f06-48d2-8ba1-95e760fbe91e","Type":"ContainerDied","Data":"d3f06b4a5105b533a4fcf4a0a4aca89122b75111f61a99be6bfa56afb037e949"} Jan 20 18:41:25 crc kubenswrapper[4558]: I0120 18:41:25.497682 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/root-account-create-update-mqt2k" event={"ID":"76cacf7d-5f06-48d2-8ba1-95e760fbe91e","Type":"ContainerStarted","Data":"52fb7cdb1a66617692429fed0cc6edcb6fa5caf8cbf3a100783b0b6b5f028080"} Jan 20 18:41:25 crc kubenswrapper[4558]: I0120 18:41:25.557673 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="horizon-kuttl-tests/openstack-galera-2" podUID="761b8e2c-cee0-49d5-8310-3eaf29af06bc" containerName="galera" probeResult="failure" output=< Jan 20 18:41:25 crc kubenswrapper[4558]: wsrep_local_state_comment (Donor/Desynced) differs from Synced Jan 20 18:41:25 crc kubenswrapper[4558]: > Jan 20 18:41:26 crc kubenswrapper[4558]: I0120 18:41:26.364785 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="horizon-kuttl-tests/openstack-galera-1" Jan 20 18:41:26 crc kubenswrapper[4558]: I0120 18:41:26.427925 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="horizon-kuttl-tests/openstack-galera-1" Jan 20 18:41:26 crc kubenswrapper[4558]: I0120 18:41:26.570145 4558 scope.go:117] "RemoveContainer" containerID="77ce04c6daee7d4f9b776bd1c80b448f9c4750d5cb5c7dd182ff28963a4bcf99" Jan 20 18:41:26 crc kubenswrapper[4558]: E0120 18:41:26.570926 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 18:41:26 crc kubenswrapper[4558]: I0120 18:41:26.768042 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/root-account-create-update-mqt2k" Jan 20 18:41:26 crc kubenswrapper[4558]: I0120 18:41:26.912565 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2lrm4\" (UniqueName: \"kubernetes.io/projected/76cacf7d-5f06-48d2-8ba1-95e760fbe91e-kube-api-access-2lrm4\") pod \"76cacf7d-5f06-48d2-8ba1-95e760fbe91e\" (UID: \"76cacf7d-5f06-48d2-8ba1-95e760fbe91e\") " Jan 20 18:41:26 crc kubenswrapper[4558]: I0120 18:41:26.912677 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/76cacf7d-5f06-48d2-8ba1-95e760fbe91e-operator-scripts\") pod \"76cacf7d-5f06-48d2-8ba1-95e760fbe91e\" (UID: \"76cacf7d-5f06-48d2-8ba1-95e760fbe91e\") " Jan 20 18:41:26 crc kubenswrapper[4558]: I0120 18:41:26.913111 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/76cacf7d-5f06-48d2-8ba1-95e760fbe91e-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "76cacf7d-5f06-48d2-8ba1-95e760fbe91e" (UID: "76cacf7d-5f06-48d2-8ba1-95e760fbe91e"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:41:26 crc kubenswrapper[4558]: I0120 18:41:26.918888 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/76cacf7d-5f06-48d2-8ba1-95e760fbe91e-kube-api-access-2lrm4" (OuterVolumeSpecName: "kube-api-access-2lrm4") pod "76cacf7d-5f06-48d2-8ba1-95e760fbe91e" (UID: "76cacf7d-5f06-48d2-8ba1-95e760fbe91e"). InnerVolumeSpecName "kube-api-access-2lrm4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:41:27 crc kubenswrapper[4558]: I0120 18:41:27.014939 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2lrm4\" (UniqueName: \"kubernetes.io/projected/76cacf7d-5f06-48d2-8ba1-95e760fbe91e-kube-api-access-2lrm4\") on node \"crc\" DevicePath \"\"" Jan 20 18:41:27 crc kubenswrapper[4558]: I0120 18:41:27.014973 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/76cacf7d-5f06-48d2-8ba1-95e760fbe91e-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 18:41:27 crc kubenswrapper[4558]: I0120 18:41:27.512330 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/root-account-create-update-mqt2k" event={"ID":"76cacf7d-5f06-48d2-8ba1-95e760fbe91e","Type":"ContainerDied","Data":"52fb7cdb1a66617692429fed0cc6edcb6fa5caf8cbf3a100783b0b6b5f028080"} Jan 20 18:41:27 crc kubenswrapper[4558]: I0120 18:41:27.512583 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="52fb7cdb1a66617692429fed0cc6edcb6fa5caf8cbf3a100783b0b6b5f028080" Jan 20 18:41:27 crc kubenswrapper[4558]: I0120 18:41:27.512365 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/root-account-create-update-mqt2k" Jan 20 18:41:27 crc kubenswrapper[4558]: E0120 18:41:27.617801 4558 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod76cacf7d_5f06_48d2_8ba1_95e760fbe91e.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod76cacf7d_5f06_48d2_8ba1_95e760fbe91e.slice/crio-52fb7cdb1a66617692429fed0cc6edcb6fa5caf8cbf3a100783b0b6b5f028080\": RecentStats: unable to find data in memory cache]" Jan 20 18:41:31 crc kubenswrapper[4558]: I0120 18:41:31.535705 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="horizon-kuttl-tests/openstack-galera-0" Jan 20 18:41:31 crc kubenswrapper[4558]: I0120 18:41:31.604796 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="horizon-kuttl-tests/openstack-galera-0" Jan 20 18:41:34 crc kubenswrapper[4558]: I0120 18:41:34.372053 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-controller-manager-797f876cdb-fxjkw"] Jan 20 18:41:34 crc kubenswrapper[4558]: E0120 18:41:34.372671 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="76cacf7d-5f06-48d2-8ba1-95e760fbe91e" containerName="mariadb-account-create-update" Jan 20 18:41:34 crc kubenswrapper[4558]: I0120 18:41:34.372686 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="76cacf7d-5f06-48d2-8ba1-95e760fbe91e" containerName="mariadb-account-create-update" Jan 20 18:41:34 crc kubenswrapper[4558]: I0120 18:41:34.372827 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="76cacf7d-5f06-48d2-8ba1-95e760fbe91e" containerName="mariadb-account-create-update" Jan 20 18:41:34 crc kubenswrapper[4558]: I0120 18:41:34.373372 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-797f876cdb-fxjkw" Jan 20 18:41:34 crc kubenswrapper[4558]: I0120 18:41:34.375544 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-service-cert" Jan 20 18:41:34 crc kubenswrapper[4558]: I0120 18:41:34.375784 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-65mx2" Jan 20 18:41:34 crc kubenswrapper[4558]: I0120 18:41:34.394148 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-797f876cdb-fxjkw"] Jan 20 18:41:34 crc kubenswrapper[4558]: I0120 18:41:34.536102 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/623a0db3-27c7-4e16-b902-784517ccad46-apiservice-cert\") pod \"infra-operator-controller-manager-797f876cdb-fxjkw\" (UID: \"623a0db3-27c7-4e16-b902-784517ccad46\") " pod="openstack-operators/infra-operator-controller-manager-797f876cdb-fxjkw" Jan 20 18:41:34 crc kubenswrapper[4558]: I0120 18:41:34.536270 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fctvz\" (UniqueName: \"kubernetes.io/projected/623a0db3-27c7-4e16-b902-784517ccad46-kube-api-access-fctvz\") pod \"infra-operator-controller-manager-797f876cdb-fxjkw\" (UID: \"623a0db3-27c7-4e16-b902-784517ccad46\") " pod="openstack-operators/infra-operator-controller-manager-797f876cdb-fxjkw" Jan 20 18:41:34 crc kubenswrapper[4558]: I0120 18:41:34.536428 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/623a0db3-27c7-4e16-b902-784517ccad46-webhook-cert\") pod \"infra-operator-controller-manager-797f876cdb-fxjkw\" (UID: \"623a0db3-27c7-4e16-b902-784517ccad46\") " pod="openstack-operators/infra-operator-controller-manager-797f876cdb-fxjkw" Jan 20 18:41:34 crc kubenswrapper[4558]: I0120 18:41:34.638003 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fctvz\" (UniqueName: \"kubernetes.io/projected/623a0db3-27c7-4e16-b902-784517ccad46-kube-api-access-fctvz\") pod \"infra-operator-controller-manager-797f876cdb-fxjkw\" (UID: \"623a0db3-27c7-4e16-b902-784517ccad46\") " pod="openstack-operators/infra-operator-controller-manager-797f876cdb-fxjkw" Jan 20 18:41:34 crc kubenswrapper[4558]: I0120 18:41:34.638106 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/623a0db3-27c7-4e16-b902-784517ccad46-webhook-cert\") pod \"infra-operator-controller-manager-797f876cdb-fxjkw\" (UID: \"623a0db3-27c7-4e16-b902-784517ccad46\") " pod="openstack-operators/infra-operator-controller-manager-797f876cdb-fxjkw" Jan 20 18:41:34 crc kubenswrapper[4558]: I0120 18:41:34.638143 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/623a0db3-27c7-4e16-b902-784517ccad46-apiservice-cert\") pod \"infra-operator-controller-manager-797f876cdb-fxjkw\" (UID: \"623a0db3-27c7-4e16-b902-784517ccad46\") " pod="openstack-operators/infra-operator-controller-manager-797f876cdb-fxjkw" Jan 20 18:41:34 crc kubenswrapper[4558]: I0120 18:41:34.649866 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/623a0db3-27c7-4e16-b902-784517ccad46-apiservice-cert\") pod \"infra-operator-controller-manager-797f876cdb-fxjkw\" (UID: \"623a0db3-27c7-4e16-b902-784517ccad46\") " pod="openstack-operators/infra-operator-controller-manager-797f876cdb-fxjkw" Jan 20 18:41:34 crc kubenswrapper[4558]: I0120 18:41:34.649887 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/623a0db3-27c7-4e16-b902-784517ccad46-webhook-cert\") pod \"infra-operator-controller-manager-797f876cdb-fxjkw\" (UID: \"623a0db3-27c7-4e16-b902-784517ccad46\") " pod="openstack-operators/infra-operator-controller-manager-797f876cdb-fxjkw" Jan 20 18:41:34 crc kubenswrapper[4558]: I0120 18:41:34.652572 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fctvz\" (UniqueName: \"kubernetes.io/projected/623a0db3-27c7-4e16-b902-784517ccad46-kube-api-access-fctvz\") pod \"infra-operator-controller-manager-797f876cdb-fxjkw\" (UID: \"623a0db3-27c7-4e16-b902-784517ccad46\") " pod="openstack-operators/infra-operator-controller-manager-797f876cdb-fxjkw" Jan 20 18:41:34 crc kubenswrapper[4558]: I0120 18:41:34.689897 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-797f876cdb-fxjkw" Jan 20 18:41:35 crc kubenswrapper[4558]: I0120 18:41:35.082095 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-797f876cdb-fxjkw"] Jan 20 18:41:35 crc kubenswrapper[4558]: I0120 18:41:35.574839 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-797f876cdb-fxjkw" event={"ID":"623a0db3-27c7-4e16-b902-784517ccad46","Type":"ContainerStarted","Data":"6007c648fde8fe226779756fc6e3041e21ca68284b37695f96aca4e839d68feb"} Jan 20 18:41:35 crc kubenswrapper[4558]: I0120 18:41:35.575276 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-797f876cdb-fxjkw" event={"ID":"623a0db3-27c7-4e16-b902-784517ccad46","Type":"ContainerStarted","Data":"e2c6f714e81df8683527700c111ebf24c52141202e8dd521d7ffbef5626d3e5a"} Jan 20 18:41:35 crc kubenswrapper[4558]: I0120 18:41:35.575303 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-controller-manager-797f876cdb-fxjkw" Jan 20 18:41:35 crc kubenswrapper[4558]: I0120 18:41:35.598737 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-controller-manager-797f876cdb-fxjkw" podStartSLOduration=1.598716282 podStartE2EDuration="1.598716282s" podCreationTimestamp="2026-01-20 18:41:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 18:41:35.596416428 +0000 UTC m=+7189.356754386" watchObservedRunningTime="2026-01-20 18:41:35.598716282 +0000 UTC m=+7189.359054250" Jan 20 18:41:38 crc kubenswrapper[4558]: I0120 18:41:38.728212 4558 scope.go:117] "RemoveContainer" containerID="581d77ede432386c7e0d9c4608223b26803351bad0061d13f5e060384b5ffaac" Jan 20 18:41:39 crc kubenswrapper[4558]: I0120 18:41:39.566415 4558 scope.go:117] "RemoveContainer" containerID="77ce04c6daee7d4f9b776bd1c80b448f9c4750d5cb5c7dd182ff28963a4bcf99" Jan 20 18:41:39 crc kubenswrapper[4558]: E0120 18:41:39.567027 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 18:41:44 crc kubenswrapper[4558]: I0120 18:41:44.700007 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-controller-manager-797f876cdb-fxjkw" Jan 20 18:41:47 crc kubenswrapper[4558]: I0120 18:41:47.105793 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-index-8wgbq"] Jan 20 18:41:47 crc kubenswrapper[4558]: I0120 18:41:47.107146 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-index-8wgbq" Jan 20 18:41:47 crc kubenswrapper[4558]: I0120 18:41:47.108908 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-index-dockercfg-dp485" Jan 20 18:41:47 crc kubenswrapper[4558]: I0120 18:41:47.116096 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-index-8wgbq"] Jan 20 18:41:47 crc kubenswrapper[4558]: I0120 18:41:47.225521 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k4lgw\" (UniqueName: \"kubernetes.io/projected/7738de4d-c8ff-41b6-a840-a74722879e73-kube-api-access-k4lgw\") pod \"rabbitmq-cluster-operator-index-8wgbq\" (UID: \"7738de4d-c8ff-41b6-a840-a74722879e73\") " pod="openstack-operators/rabbitmq-cluster-operator-index-8wgbq" Jan 20 18:41:47 crc kubenswrapper[4558]: I0120 18:41:47.326875 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k4lgw\" (UniqueName: \"kubernetes.io/projected/7738de4d-c8ff-41b6-a840-a74722879e73-kube-api-access-k4lgw\") pod \"rabbitmq-cluster-operator-index-8wgbq\" (UID: \"7738de4d-c8ff-41b6-a840-a74722879e73\") " pod="openstack-operators/rabbitmq-cluster-operator-index-8wgbq" Jan 20 18:41:47 crc kubenswrapper[4558]: I0120 18:41:47.344421 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k4lgw\" (UniqueName: \"kubernetes.io/projected/7738de4d-c8ff-41b6-a840-a74722879e73-kube-api-access-k4lgw\") pod \"rabbitmq-cluster-operator-index-8wgbq\" (UID: \"7738de4d-c8ff-41b6-a840-a74722879e73\") " pod="openstack-operators/rabbitmq-cluster-operator-index-8wgbq" Jan 20 18:41:47 crc kubenswrapper[4558]: I0120 18:41:47.429343 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-index-8wgbq" Jan 20 18:41:47 crc kubenswrapper[4558]: I0120 18:41:47.843236 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-index-8wgbq"] Jan 20 18:41:47 crc kubenswrapper[4558]: W0120 18:41:47.848589 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7738de4d_c8ff_41b6_a840_a74722879e73.slice/crio-fd8fc0c09808e18c96a48efd6a84a036674c4090b151ba0ce99df3c2945ee1a3 WatchSource:0}: Error finding container fd8fc0c09808e18c96a48efd6a84a036674c4090b151ba0ce99df3c2945ee1a3: Status 404 returned error can't find the container with id fd8fc0c09808e18c96a48efd6a84a036674c4090b151ba0ce99df3c2945ee1a3 Jan 20 18:41:48 crc kubenswrapper[4558]: I0120 18:41:48.665315 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-index-8wgbq" event={"ID":"7738de4d-c8ff-41b6-a840-a74722879e73","Type":"ContainerStarted","Data":"7bffcb000c99d37123cd59bb8be19cebedd396c27bfdbbf3b6d8e15b8d5ef4e3"} Jan 20 18:41:48 crc kubenswrapper[4558]: I0120 18:41:48.665379 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-index-8wgbq" event={"ID":"7738de4d-c8ff-41b6-a840-a74722879e73","Type":"ContainerStarted","Data":"fd8fc0c09808e18c96a48efd6a84a036674c4090b151ba0ce99df3c2945ee1a3"} Jan 20 18:41:48 crc kubenswrapper[4558]: I0120 18:41:48.682132 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-index-8wgbq" podStartSLOduration=1.028867085 podStartE2EDuration="1.682114235s" podCreationTimestamp="2026-01-20 18:41:47 +0000 UTC" firstStartedPulling="2026-01-20 18:41:47.851667647 +0000 UTC m=+7201.612005614" lastFinishedPulling="2026-01-20 18:41:48.504914798 +0000 UTC m=+7202.265252764" observedRunningTime="2026-01-20 18:41:48.67783582 +0000 UTC m=+7202.438173788" watchObservedRunningTime="2026-01-20 18:41:48.682114235 +0000 UTC m=+7202.442452202" Jan 20 18:41:50 crc kubenswrapper[4558]: I0120 18:41:50.566318 4558 scope.go:117] "RemoveContainer" containerID="77ce04c6daee7d4f9b776bd1c80b448f9c4750d5cb5c7dd182ff28963a4bcf99" Jan 20 18:41:50 crc kubenswrapper[4558]: E0120 18:41:50.566595 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 18:41:51 crc kubenswrapper[4558]: I0120 18:41:51.301583 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-index-8wgbq"] Jan 20 18:41:51 crc kubenswrapper[4558]: I0120 18:41:51.301890 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/rabbitmq-cluster-operator-index-8wgbq" podUID="7738de4d-c8ff-41b6-a840-a74722879e73" containerName="registry-server" containerID="cri-o://7bffcb000c99d37123cd59bb8be19cebedd396c27bfdbbf3b6d8e15b8d5ef4e3" gracePeriod=2 Jan 20 18:41:51 crc kubenswrapper[4558]: I0120 18:41:51.663079 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-index-8wgbq" Jan 20 18:41:51 crc kubenswrapper[4558]: I0120 18:41:51.690550 4558 generic.go:334] "Generic (PLEG): container finished" podID="7738de4d-c8ff-41b6-a840-a74722879e73" containerID="7bffcb000c99d37123cd59bb8be19cebedd396c27bfdbbf3b6d8e15b8d5ef4e3" exitCode=0 Jan 20 18:41:51 crc kubenswrapper[4558]: I0120 18:41:51.690596 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-index-8wgbq" event={"ID":"7738de4d-c8ff-41b6-a840-a74722879e73","Type":"ContainerDied","Data":"7bffcb000c99d37123cd59bb8be19cebedd396c27bfdbbf3b6d8e15b8d5ef4e3"} Jan 20 18:41:51 crc kubenswrapper[4558]: I0120 18:41:51.690626 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-index-8wgbq" event={"ID":"7738de4d-c8ff-41b6-a840-a74722879e73","Type":"ContainerDied","Data":"fd8fc0c09808e18c96a48efd6a84a036674c4090b151ba0ce99df3c2945ee1a3"} Jan 20 18:41:51 crc kubenswrapper[4558]: I0120 18:41:51.690645 4558 scope.go:117] "RemoveContainer" containerID="7bffcb000c99d37123cd59bb8be19cebedd396c27bfdbbf3b6d8e15b8d5ef4e3" Jan 20 18:41:51 crc kubenswrapper[4558]: I0120 18:41:51.690754 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-index-8wgbq" Jan 20 18:41:51 crc kubenswrapper[4558]: I0120 18:41:51.707480 4558 scope.go:117] "RemoveContainer" containerID="7bffcb000c99d37123cd59bb8be19cebedd396c27bfdbbf3b6d8e15b8d5ef4e3" Jan 20 18:41:51 crc kubenswrapper[4558]: E0120 18:41:51.707813 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7bffcb000c99d37123cd59bb8be19cebedd396c27bfdbbf3b6d8e15b8d5ef4e3\": container with ID starting with 7bffcb000c99d37123cd59bb8be19cebedd396c27bfdbbf3b6d8e15b8d5ef4e3 not found: ID does not exist" containerID="7bffcb000c99d37123cd59bb8be19cebedd396c27bfdbbf3b6d8e15b8d5ef4e3" Jan 20 18:41:51 crc kubenswrapper[4558]: I0120 18:41:51.707850 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7bffcb000c99d37123cd59bb8be19cebedd396c27bfdbbf3b6d8e15b8d5ef4e3"} err="failed to get container status \"7bffcb000c99d37123cd59bb8be19cebedd396c27bfdbbf3b6d8e15b8d5ef4e3\": rpc error: code = NotFound desc = could not find container \"7bffcb000c99d37123cd59bb8be19cebedd396c27bfdbbf3b6d8e15b8d5ef4e3\": container with ID starting with 7bffcb000c99d37123cd59bb8be19cebedd396c27bfdbbf3b6d8e15b8d5ef4e3 not found: ID does not exist" Jan 20 18:41:51 crc kubenswrapper[4558]: I0120 18:41:51.788002 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k4lgw\" (UniqueName: \"kubernetes.io/projected/7738de4d-c8ff-41b6-a840-a74722879e73-kube-api-access-k4lgw\") pod \"7738de4d-c8ff-41b6-a840-a74722879e73\" (UID: \"7738de4d-c8ff-41b6-a840-a74722879e73\") " Jan 20 18:41:51 crc kubenswrapper[4558]: I0120 18:41:51.794427 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7738de4d-c8ff-41b6-a840-a74722879e73-kube-api-access-k4lgw" (OuterVolumeSpecName: "kube-api-access-k4lgw") pod "7738de4d-c8ff-41b6-a840-a74722879e73" (UID: "7738de4d-c8ff-41b6-a840-a74722879e73"). InnerVolumeSpecName "kube-api-access-k4lgw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:41:51 crc kubenswrapper[4558]: I0120 18:41:51.890309 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k4lgw\" (UniqueName: \"kubernetes.io/projected/7738de4d-c8ff-41b6-a840-a74722879e73-kube-api-access-k4lgw\") on node \"crc\" DevicePath \"\"" Jan 20 18:41:51 crc kubenswrapper[4558]: I0120 18:41:51.908822 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-index-wdg6v"] Jan 20 18:41:51 crc kubenswrapper[4558]: E0120 18:41:51.909207 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7738de4d-c8ff-41b6-a840-a74722879e73" containerName="registry-server" Jan 20 18:41:51 crc kubenswrapper[4558]: I0120 18:41:51.909229 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="7738de4d-c8ff-41b6-a840-a74722879e73" containerName="registry-server" Jan 20 18:41:51 crc kubenswrapper[4558]: I0120 18:41:51.909451 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="7738de4d-c8ff-41b6-a840-a74722879e73" containerName="registry-server" Jan 20 18:41:51 crc kubenswrapper[4558]: I0120 18:41:51.910072 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-index-wdg6v" Jan 20 18:41:51 crc kubenswrapper[4558]: I0120 18:41:51.923359 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-index-wdg6v"] Jan 20 18:41:51 crc kubenswrapper[4558]: I0120 18:41:51.994298 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tw8nt\" (UniqueName: \"kubernetes.io/projected/08aa1ffd-1990-4e80-b8b2-ba4cc54c5057-kube-api-access-tw8nt\") pod \"rabbitmq-cluster-operator-index-wdg6v\" (UID: \"08aa1ffd-1990-4e80-b8b2-ba4cc54c5057\") " pod="openstack-operators/rabbitmq-cluster-operator-index-wdg6v" Jan 20 18:41:52 crc kubenswrapper[4558]: I0120 18:41:52.024009 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-index-8wgbq"] Jan 20 18:41:52 crc kubenswrapper[4558]: I0120 18:41:52.032016 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-index-8wgbq"] Jan 20 18:41:52 crc kubenswrapper[4558]: I0120 18:41:52.097029 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tw8nt\" (UniqueName: \"kubernetes.io/projected/08aa1ffd-1990-4e80-b8b2-ba4cc54c5057-kube-api-access-tw8nt\") pod \"rabbitmq-cluster-operator-index-wdg6v\" (UID: \"08aa1ffd-1990-4e80-b8b2-ba4cc54c5057\") " pod="openstack-operators/rabbitmq-cluster-operator-index-wdg6v" Jan 20 18:41:52 crc kubenswrapper[4558]: I0120 18:41:52.111491 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tw8nt\" (UniqueName: \"kubernetes.io/projected/08aa1ffd-1990-4e80-b8b2-ba4cc54c5057-kube-api-access-tw8nt\") pod \"rabbitmq-cluster-operator-index-wdg6v\" (UID: \"08aa1ffd-1990-4e80-b8b2-ba4cc54c5057\") " pod="openstack-operators/rabbitmq-cluster-operator-index-wdg6v" Jan 20 18:41:52 crc kubenswrapper[4558]: I0120 18:41:52.226915 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-index-wdg6v" Jan 20 18:41:52 crc kubenswrapper[4558]: I0120 18:41:52.573227 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7738de4d-c8ff-41b6-a840-a74722879e73" path="/var/lib/kubelet/pods/7738de4d-c8ff-41b6-a840-a74722879e73/volumes" Jan 20 18:41:52 crc kubenswrapper[4558]: I0120 18:41:52.602978 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e5907g9xn"] Jan 20 18:41:52 crc kubenswrapper[4558]: I0120 18:41:52.612013 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e5907g9xn" Jan 20 18:41:52 crc kubenswrapper[4558]: I0120 18:41:52.635856 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-7xp5w" Jan 20 18:41:52 crc kubenswrapper[4558]: I0120 18:41:52.658215 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e5907g9xn"] Jan 20 18:41:52 crc kubenswrapper[4558]: I0120 18:41:52.703206 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-index-wdg6v"] Jan 20 18:41:52 crc kubenswrapper[4558]: I0120 18:41:52.819741 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/b6f21f27-20ad-443e-bd90-3b739863d0ae-bundle\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e5907g9xn\" (UID: \"b6f21f27-20ad-443e-bd90-3b739863d0ae\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e5907g9xn" Jan 20 18:41:52 crc kubenswrapper[4558]: I0120 18:41:52.819813 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/b6f21f27-20ad-443e-bd90-3b739863d0ae-util\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e5907g9xn\" (UID: \"b6f21f27-20ad-443e-bd90-3b739863d0ae\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e5907g9xn" Jan 20 18:41:52 crc kubenswrapper[4558]: I0120 18:41:52.819898 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9m4lz\" (UniqueName: \"kubernetes.io/projected/b6f21f27-20ad-443e-bd90-3b739863d0ae-kube-api-access-9m4lz\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e5907g9xn\" (UID: \"b6f21f27-20ad-443e-bd90-3b739863d0ae\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e5907g9xn" Jan 20 18:41:52 crc kubenswrapper[4558]: I0120 18:41:52.921726 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/b6f21f27-20ad-443e-bd90-3b739863d0ae-bundle\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e5907g9xn\" (UID: \"b6f21f27-20ad-443e-bd90-3b739863d0ae\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e5907g9xn" Jan 20 18:41:52 crc kubenswrapper[4558]: I0120 18:41:52.921778 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/b6f21f27-20ad-443e-bd90-3b739863d0ae-util\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e5907g9xn\" (UID: \"b6f21f27-20ad-443e-bd90-3b739863d0ae\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e5907g9xn" Jan 20 18:41:52 crc kubenswrapper[4558]: I0120 18:41:52.921828 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9m4lz\" (UniqueName: \"kubernetes.io/projected/b6f21f27-20ad-443e-bd90-3b739863d0ae-kube-api-access-9m4lz\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e5907g9xn\" (UID: \"b6f21f27-20ad-443e-bd90-3b739863d0ae\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e5907g9xn" Jan 20 18:41:52 crc kubenswrapper[4558]: I0120 18:41:52.922523 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/b6f21f27-20ad-443e-bd90-3b739863d0ae-bundle\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e5907g9xn\" (UID: \"b6f21f27-20ad-443e-bd90-3b739863d0ae\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e5907g9xn" Jan 20 18:41:52 crc kubenswrapper[4558]: I0120 18:41:52.922757 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/b6f21f27-20ad-443e-bd90-3b739863d0ae-util\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e5907g9xn\" (UID: \"b6f21f27-20ad-443e-bd90-3b739863d0ae\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e5907g9xn" Jan 20 18:41:52 crc kubenswrapper[4558]: I0120 18:41:52.938590 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9m4lz\" (UniqueName: \"kubernetes.io/projected/b6f21f27-20ad-443e-bd90-3b739863d0ae-kube-api-access-9m4lz\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e5907g9xn\" (UID: \"b6f21f27-20ad-443e-bd90-3b739863d0ae\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e5907g9xn" Jan 20 18:41:53 crc kubenswrapper[4558]: I0120 18:41:53.233544 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e5907g9xn" Jan 20 18:41:53 crc kubenswrapper[4558]: I0120 18:41:53.491560 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["horizon-kuttl-tests/memcached-0"] Jan 20 18:41:53 crc kubenswrapper[4558]: I0120 18:41:53.492614 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/memcached-0" Jan 20 18:41:53 crc kubenswrapper[4558]: I0120 18:41:53.495855 4558 reflector.go:368] Caches populated for *v1.Secret from object-"horizon-kuttl-tests"/"memcached-memcached-dockercfg-p6pkn" Jan 20 18:41:53 crc kubenswrapper[4558]: I0120 18:41:53.496010 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"horizon-kuttl-tests"/"memcached-config-data" Jan 20 18:41:53 crc kubenswrapper[4558]: I0120 18:41:53.500392 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["horizon-kuttl-tests/memcached-0"] Jan 20 18:41:53 crc kubenswrapper[4558]: I0120 18:41:53.633088 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x457m\" (UniqueName: \"kubernetes.io/projected/54015202-e6bd-4bf5-9b3f-df0dd67d24d9-kube-api-access-x457m\") pod \"memcached-0\" (UID: \"54015202-e6bd-4bf5-9b3f-df0dd67d24d9\") " pod="horizon-kuttl-tests/memcached-0" Jan 20 18:41:53 crc kubenswrapper[4558]: I0120 18:41:53.633230 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/54015202-e6bd-4bf5-9b3f-df0dd67d24d9-kolla-config\") pod \"memcached-0\" (UID: \"54015202-e6bd-4bf5-9b3f-df0dd67d24d9\") " pod="horizon-kuttl-tests/memcached-0" Jan 20 18:41:53 crc kubenswrapper[4558]: I0120 18:41:53.633347 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/54015202-e6bd-4bf5-9b3f-df0dd67d24d9-config-data\") pod \"memcached-0\" (UID: \"54015202-e6bd-4bf5-9b3f-df0dd67d24d9\") " pod="horizon-kuttl-tests/memcached-0" Jan 20 18:41:53 crc kubenswrapper[4558]: I0120 18:41:53.706261 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e5907g9xn"] Jan 20 18:41:53 crc kubenswrapper[4558]: W0120 18:41:53.706679 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb6f21f27_20ad_443e_bd90_3b739863d0ae.slice/crio-2cf6638cd6d1353ea5f4ecafcbeb54e15cff2e623994aff11e0dc413ec22173e WatchSource:0}: Error finding container 2cf6638cd6d1353ea5f4ecafcbeb54e15cff2e623994aff11e0dc413ec22173e: Status 404 returned error can't find the container with id 2cf6638cd6d1353ea5f4ecafcbeb54e15cff2e623994aff11e0dc413ec22173e Jan 20 18:41:53 crc kubenswrapper[4558]: I0120 18:41:53.734747 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/54015202-e6bd-4bf5-9b3f-df0dd67d24d9-config-data\") pod \"memcached-0\" (UID: \"54015202-e6bd-4bf5-9b3f-df0dd67d24d9\") " pod="horizon-kuttl-tests/memcached-0" Jan 20 18:41:53 crc kubenswrapper[4558]: I0120 18:41:53.734998 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x457m\" (UniqueName: \"kubernetes.io/projected/54015202-e6bd-4bf5-9b3f-df0dd67d24d9-kube-api-access-x457m\") pod \"memcached-0\" (UID: \"54015202-e6bd-4bf5-9b3f-df0dd67d24d9\") " pod="horizon-kuttl-tests/memcached-0" Jan 20 18:41:53 crc kubenswrapper[4558]: I0120 18:41:53.735218 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/54015202-e6bd-4bf5-9b3f-df0dd67d24d9-kolla-config\") pod \"memcached-0\" (UID: \"54015202-e6bd-4bf5-9b3f-df0dd67d24d9\") " pod="horizon-kuttl-tests/memcached-0" Jan 20 18:41:53 crc kubenswrapper[4558]: I0120 18:41:53.736196 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/54015202-e6bd-4bf5-9b3f-df0dd67d24d9-kolla-config\") pod \"memcached-0\" (UID: \"54015202-e6bd-4bf5-9b3f-df0dd67d24d9\") " pod="horizon-kuttl-tests/memcached-0" Jan 20 18:41:53 crc kubenswrapper[4558]: I0120 18:41:53.736204 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-index-wdg6v" event={"ID":"08aa1ffd-1990-4e80-b8b2-ba4cc54c5057","Type":"ContainerStarted","Data":"d5131bbb9a0d0b9f2dd6dae8531a145a202ec0c639b82f51e6569bdff316cbca"} Jan 20 18:41:53 crc kubenswrapper[4558]: I0120 18:41:53.736444 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-index-wdg6v" event={"ID":"08aa1ffd-1990-4e80-b8b2-ba4cc54c5057","Type":"ContainerStarted","Data":"5f3bf5c003d5ce2b64185818a5e1bf963154f024700ee32899b4f8cd669d41e4"} Jan 20 18:41:53 crc kubenswrapper[4558]: I0120 18:41:53.736717 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/54015202-e6bd-4bf5-9b3f-df0dd67d24d9-config-data\") pod \"memcached-0\" (UID: \"54015202-e6bd-4bf5-9b3f-df0dd67d24d9\") " pod="horizon-kuttl-tests/memcached-0" Jan 20 18:41:53 crc kubenswrapper[4558]: I0120 18:41:53.738325 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e5907g9xn" event={"ID":"b6f21f27-20ad-443e-bd90-3b739863d0ae","Type":"ContainerStarted","Data":"2cf6638cd6d1353ea5f4ecafcbeb54e15cff2e623994aff11e0dc413ec22173e"} Jan 20 18:41:53 crc kubenswrapper[4558]: I0120 18:41:53.754701 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-index-wdg6v" podStartSLOduration=2.189757372 podStartE2EDuration="2.754669583s" podCreationTimestamp="2026-01-20 18:41:51 +0000 UTC" firstStartedPulling="2026-01-20 18:41:52.716430195 +0000 UTC m=+7206.476768162" lastFinishedPulling="2026-01-20 18:41:53.281342405 +0000 UTC m=+7207.041680373" observedRunningTime="2026-01-20 18:41:53.753594934 +0000 UTC m=+7207.513932900" watchObservedRunningTime="2026-01-20 18:41:53.754669583 +0000 UTC m=+7207.515007560" Jan 20 18:41:53 crc kubenswrapper[4558]: I0120 18:41:53.764005 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x457m\" (UniqueName: \"kubernetes.io/projected/54015202-e6bd-4bf5-9b3f-df0dd67d24d9-kube-api-access-x457m\") pod \"memcached-0\" (UID: \"54015202-e6bd-4bf5-9b3f-df0dd67d24d9\") " pod="horizon-kuttl-tests/memcached-0" Jan 20 18:41:53 crc kubenswrapper[4558]: I0120 18:41:53.809807 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/memcached-0" Jan 20 18:41:54 crc kubenswrapper[4558]: I0120 18:41:54.209331 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["horizon-kuttl-tests/memcached-0"] Jan 20 18:41:54 crc kubenswrapper[4558]: I0120 18:41:54.751664 4558 generic.go:334] "Generic (PLEG): container finished" podID="b6f21f27-20ad-443e-bd90-3b739863d0ae" containerID="30e0b5f5b1cb5c59016bfc7ad6bdad93db6731d03d725f58d00f4dc03b92bc5c" exitCode=0 Jan 20 18:41:54 crc kubenswrapper[4558]: I0120 18:41:54.751798 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e5907g9xn" event={"ID":"b6f21f27-20ad-443e-bd90-3b739863d0ae","Type":"ContainerDied","Data":"30e0b5f5b1cb5c59016bfc7ad6bdad93db6731d03d725f58d00f4dc03b92bc5c"} Jan 20 18:41:54 crc kubenswrapper[4558]: I0120 18:41:54.755295 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/memcached-0" event={"ID":"54015202-e6bd-4bf5-9b3f-df0dd67d24d9","Type":"ContainerStarted","Data":"d779906ca149e1996729bdbafc2df8ce472e9efb0fb4cdf7c24dbb13fdabb76d"} Jan 20 18:41:54 crc kubenswrapper[4558]: I0120 18:41:54.755349 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/memcached-0" event={"ID":"54015202-e6bd-4bf5-9b3f-df0dd67d24d9","Type":"ContainerStarted","Data":"27eefccfdd833bae089c5b44499a83ae7433de855af4b28b6331370eafdc8357"} Jan 20 18:41:54 crc kubenswrapper[4558]: I0120 18:41:54.755732 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="horizon-kuttl-tests/memcached-0" Jan 20 18:41:54 crc kubenswrapper[4558]: I0120 18:41:54.789909 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="horizon-kuttl-tests/memcached-0" podStartSLOduration=1.789886149 podStartE2EDuration="1.789886149s" podCreationTimestamp="2026-01-20 18:41:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 18:41:54.782288914 +0000 UTC m=+7208.542626882" watchObservedRunningTime="2026-01-20 18:41:54.789886149 +0000 UTC m=+7208.550224116" Jan 20 18:41:55 crc kubenswrapper[4558]: I0120 18:41:55.766521 4558 generic.go:334] "Generic (PLEG): container finished" podID="b6f21f27-20ad-443e-bd90-3b739863d0ae" containerID="fab2ee8cde5caae0741da173af35897e3844e829215f358aafdf4ce9852ff0d7" exitCode=0 Jan 20 18:41:55 crc kubenswrapper[4558]: I0120 18:41:55.766651 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e5907g9xn" event={"ID":"b6f21f27-20ad-443e-bd90-3b739863d0ae","Type":"ContainerDied","Data":"fab2ee8cde5caae0741da173af35897e3844e829215f358aafdf4ce9852ff0d7"} Jan 20 18:41:56 crc kubenswrapper[4558]: I0120 18:41:56.774877 4558 generic.go:334] "Generic (PLEG): container finished" podID="b6f21f27-20ad-443e-bd90-3b739863d0ae" containerID="cbed135122fbd8fe1ca3d82bfc9ad6d6aa915bb48d915e47024000c4256b643d" exitCode=0 Jan 20 18:41:56 crc kubenswrapper[4558]: I0120 18:41:56.774935 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e5907g9xn" event={"ID":"b6f21f27-20ad-443e-bd90-3b739863d0ae","Type":"ContainerDied","Data":"cbed135122fbd8fe1ca3d82bfc9ad6d6aa915bb48d915e47024000c4256b643d"} Jan 20 18:41:58 crc kubenswrapper[4558]: I0120 18:41:58.050944 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e5907g9xn" Jan 20 18:41:58 crc kubenswrapper[4558]: I0120 18:41:58.209969 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9m4lz\" (UniqueName: \"kubernetes.io/projected/b6f21f27-20ad-443e-bd90-3b739863d0ae-kube-api-access-9m4lz\") pod \"b6f21f27-20ad-443e-bd90-3b739863d0ae\" (UID: \"b6f21f27-20ad-443e-bd90-3b739863d0ae\") " Jan 20 18:41:58 crc kubenswrapper[4558]: I0120 18:41:58.210131 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/b6f21f27-20ad-443e-bd90-3b739863d0ae-util\") pod \"b6f21f27-20ad-443e-bd90-3b739863d0ae\" (UID: \"b6f21f27-20ad-443e-bd90-3b739863d0ae\") " Jan 20 18:41:58 crc kubenswrapper[4558]: I0120 18:41:58.210301 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/b6f21f27-20ad-443e-bd90-3b739863d0ae-bundle\") pod \"b6f21f27-20ad-443e-bd90-3b739863d0ae\" (UID: \"b6f21f27-20ad-443e-bd90-3b739863d0ae\") " Jan 20 18:41:58 crc kubenswrapper[4558]: I0120 18:41:58.211544 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b6f21f27-20ad-443e-bd90-3b739863d0ae-bundle" (OuterVolumeSpecName: "bundle") pod "b6f21f27-20ad-443e-bd90-3b739863d0ae" (UID: "b6f21f27-20ad-443e-bd90-3b739863d0ae"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 18:41:58 crc kubenswrapper[4558]: I0120 18:41:58.215729 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6f21f27-20ad-443e-bd90-3b739863d0ae-kube-api-access-9m4lz" (OuterVolumeSpecName: "kube-api-access-9m4lz") pod "b6f21f27-20ad-443e-bd90-3b739863d0ae" (UID: "b6f21f27-20ad-443e-bd90-3b739863d0ae"). InnerVolumeSpecName "kube-api-access-9m4lz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:41:58 crc kubenswrapper[4558]: I0120 18:41:58.221110 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b6f21f27-20ad-443e-bd90-3b739863d0ae-util" (OuterVolumeSpecName: "util") pod "b6f21f27-20ad-443e-bd90-3b739863d0ae" (UID: "b6f21f27-20ad-443e-bd90-3b739863d0ae"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 18:41:58 crc kubenswrapper[4558]: I0120 18:41:58.312971 4558 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/b6f21f27-20ad-443e-bd90-3b739863d0ae-util\") on node \"crc\" DevicePath \"\"" Jan 20 18:41:58 crc kubenswrapper[4558]: I0120 18:41:58.313004 4558 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/b6f21f27-20ad-443e-bd90-3b739863d0ae-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 18:41:58 crc kubenswrapper[4558]: I0120 18:41:58.313020 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9m4lz\" (UniqueName: \"kubernetes.io/projected/b6f21f27-20ad-443e-bd90-3b739863d0ae-kube-api-access-9m4lz\") on node \"crc\" DevicePath \"\"" Jan 20 18:41:58 crc kubenswrapper[4558]: I0120 18:41:58.793212 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e5907g9xn" event={"ID":"b6f21f27-20ad-443e-bd90-3b739863d0ae","Type":"ContainerDied","Data":"2cf6638cd6d1353ea5f4ecafcbeb54e15cff2e623994aff11e0dc413ec22173e"} Jan 20 18:41:58 crc kubenswrapper[4558]: I0120 18:41:58.793262 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2cf6638cd6d1353ea5f4ecafcbeb54e15cff2e623994aff11e0dc413ec22173e" Jan 20 18:41:58 crc kubenswrapper[4558]: I0120 18:41:58.793293 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e5907g9xn" Jan 20 18:42:02 crc kubenswrapper[4558]: I0120 18:42:02.227203 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/rabbitmq-cluster-operator-index-wdg6v" Jan 20 18:42:02 crc kubenswrapper[4558]: I0120 18:42:02.227256 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/rabbitmq-cluster-operator-index-wdg6v" Jan 20 18:42:02 crc kubenswrapper[4558]: I0120 18:42:02.253785 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/rabbitmq-cluster-operator-index-wdg6v" Jan 20 18:42:02 crc kubenswrapper[4558]: I0120 18:42:02.841565 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/rabbitmq-cluster-operator-index-wdg6v" Jan 20 18:42:03 crc kubenswrapper[4558]: I0120 18:42:03.811515 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="horizon-kuttl-tests/memcached-0" Jan 20 18:42:05 crc kubenswrapper[4558]: I0120 18:42:05.566044 4558 scope.go:117] "RemoveContainer" containerID="77ce04c6daee7d4f9b776bd1c80b448f9c4750d5cb5c7dd182ff28963a4bcf99" Jan 20 18:42:05 crc kubenswrapper[4558]: E0120 18:42:05.566618 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 18:42:09 crc kubenswrapper[4558]: I0120 18:42:09.006661 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-779fc9694b-5xm4r"] Jan 20 18:42:09 crc kubenswrapper[4558]: E0120 18:42:09.007428 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b6f21f27-20ad-443e-bd90-3b739863d0ae" containerName="extract" Jan 20 18:42:09 crc kubenswrapper[4558]: I0120 18:42:09.007443 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="b6f21f27-20ad-443e-bd90-3b739863d0ae" containerName="extract" Jan 20 18:42:09 crc kubenswrapper[4558]: E0120 18:42:09.007451 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b6f21f27-20ad-443e-bd90-3b739863d0ae" containerName="util" Jan 20 18:42:09 crc kubenswrapper[4558]: I0120 18:42:09.007457 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="b6f21f27-20ad-443e-bd90-3b739863d0ae" containerName="util" Jan 20 18:42:09 crc kubenswrapper[4558]: E0120 18:42:09.007466 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b6f21f27-20ad-443e-bd90-3b739863d0ae" containerName="pull" Jan 20 18:42:09 crc kubenswrapper[4558]: I0120 18:42:09.007471 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="b6f21f27-20ad-443e-bd90-3b739863d0ae" containerName="pull" Jan 20 18:42:09 crc kubenswrapper[4558]: I0120 18:42:09.007597 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="b6f21f27-20ad-443e-bd90-3b739863d0ae" containerName="extract" Jan 20 18:42:09 crc kubenswrapper[4558]: I0120 18:42:09.008000 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-5xm4r" Jan 20 18:42:09 crc kubenswrapper[4558]: I0120 18:42:09.011853 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-dockercfg-blmjd" Jan 20 18:42:09 crc kubenswrapper[4558]: I0120 18:42:09.013392 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-779fc9694b-5xm4r"] Jan 20 18:42:09 crc kubenswrapper[4558]: I0120 18:42:09.084444 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jcp9h\" (UniqueName: \"kubernetes.io/projected/d062a00c-aa04-4761-97a1-0a89f1f5cfa1-kube-api-access-jcp9h\") pod \"rabbitmq-cluster-operator-779fc9694b-5xm4r\" (UID: \"d062a00c-aa04-4761-97a1-0a89f1f5cfa1\") " pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-5xm4r" Jan 20 18:42:09 crc kubenswrapper[4558]: I0120 18:42:09.185865 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jcp9h\" (UniqueName: \"kubernetes.io/projected/d062a00c-aa04-4761-97a1-0a89f1f5cfa1-kube-api-access-jcp9h\") pod \"rabbitmq-cluster-operator-779fc9694b-5xm4r\" (UID: \"d062a00c-aa04-4761-97a1-0a89f1f5cfa1\") " pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-5xm4r" Jan 20 18:42:09 crc kubenswrapper[4558]: I0120 18:42:09.204586 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jcp9h\" (UniqueName: \"kubernetes.io/projected/d062a00c-aa04-4761-97a1-0a89f1f5cfa1-kube-api-access-jcp9h\") pod \"rabbitmq-cluster-operator-779fc9694b-5xm4r\" (UID: \"d062a00c-aa04-4761-97a1-0a89f1f5cfa1\") " pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-5xm4r" Jan 20 18:42:09 crc kubenswrapper[4558]: I0120 18:42:09.323813 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-5xm4r" Jan 20 18:42:09 crc kubenswrapper[4558]: I0120 18:42:09.702152 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-779fc9694b-5xm4r"] Jan 20 18:42:09 crc kubenswrapper[4558]: I0120 18:42:09.875352 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-5xm4r" event={"ID":"d062a00c-aa04-4761-97a1-0a89f1f5cfa1","Type":"ContainerStarted","Data":"a6f582be0f85505d0f185819a29a9598ca6f3bfa257e645f94a3c6c3a51e39b8"} Jan 20 18:42:10 crc kubenswrapper[4558]: I0120 18:42:10.884830 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-5xm4r" event={"ID":"d062a00c-aa04-4761-97a1-0a89f1f5cfa1","Type":"ContainerStarted","Data":"906f3f711e2a066b7111ced7541cdfd4472b0e2a493f39337c7b06dd1f76c470"} Jan 20 18:42:10 crc kubenswrapper[4558]: I0120 18:42:10.901305 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-5xm4r" podStartSLOduration=2.901273904 podStartE2EDuration="2.901273904s" podCreationTimestamp="2026-01-20 18:42:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 18:42:10.897254657 +0000 UTC m=+7224.657592624" watchObservedRunningTime="2026-01-20 18:42:10.901273904 +0000 UTC m=+7224.661611871" Jan 20 18:42:17 crc kubenswrapper[4558]: I0120 18:42:17.565953 4558 scope.go:117] "RemoveContainer" containerID="77ce04c6daee7d4f9b776bd1c80b448f9c4750d5cb5c7dd182ff28963a4bcf99" Jan 20 18:42:17 crc kubenswrapper[4558]: E0120 18:42:17.566940 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 18:42:17 crc kubenswrapper[4558]: I0120 18:42:17.705878 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-index-hwm7b"] Jan 20 18:42:17 crc kubenswrapper[4558]: I0120 18:42:17.706933 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-index-hwm7b" Jan 20 18:42:17 crc kubenswrapper[4558]: I0120 18:42:17.709153 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-index-dockercfg-t5fmc" Jan 20 18:42:17 crc kubenswrapper[4558]: I0120 18:42:17.719411 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-index-hwm7b"] Jan 20 18:42:17 crc kubenswrapper[4558]: I0120 18:42:17.814667 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qnb8p\" (UniqueName: \"kubernetes.io/projected/8ca9a5ad-b201-42d6-b177-9b299381d1ab-kube-api-access-qnb8p\") pod \"keystone-operator-index-hwm7b\" (UID: \"8ca9a5ad-b201-42d6-b177-9b299381d1ab\") " pod="openstack-operators/keystone-operator-index-hwm7b" Jan 20 18:42:17 crc kubenswrapper[4558]: I0120 18:42:17.915854 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qnb8p\" (UniqueName: \"kubernetes.io/projected/8ca9a5ad-b201-42d6-b177-9b299381d1ab-kube-api-access-qnb8p\") pod \"keystone-operator-index-hwm7b\" (UID: \"8ca9a5ad-b201-42d6-b177-9b299381d1ab\") " pod="openstack-operators/keystone-operator-index-hwm7b" Jan 20 18:42:17 crc kubenswrapper[4558]: I0120 18:42:17.938960 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qnb8p\" (UniqueName: \"kubernetes.io/projected/8ca9a5ad-b201-42d6-b177-9b299381d1ab-kube-api-access-qnb8p\") pod \"keystone-operator-index-hwm7b\" (UID: \"8ca9a5ad-b201-42d6-b177-9b299381d1ab\") " pod="openstack-operators/keystone-operator-index-hwm7b" Jan 20 18:42:18 crc kubenswrapper[4558]: I0120 18:42:18.026469 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-index-hwm7b" Jan 20 18:42:18 crc kubenswrapper[4558]: I0120 18:42:18.416776 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-index-hwm7b"] Jan 20 18:42:18 crc kubenswrapper[4558]: I0120 18:42:18.934646 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-index-hwm7b" event={"ID":"8ca9a5ad-b201-42d6-b177-9b299381d1ab","Type":"ContainerStarted","Data":"239bf120d86507555ffcbad5cc6c7ff4e7157abb56da7186e67abb90a1545f8c"} Jan 20 18:42:19 crc kubenswrapper[4558]: I0120 18:42:19.943825 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-index-hwm7b" event={"ID":"8ca9a5ad-b201-42d6-b177-9b299381d1ab","Type":"ContainerStarted","Data":"a0a9b0d44a6b8ce4ad3ff9f384d6459ab8298ba66d2f8e376454d7a55e3f4780"} Jan 20 18:42:19 crc kubenswrapper[4558]: I0120 18:42:19.961033 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-index-hwm7b" podStartSLOduration=2.216240552 podStartE2EDuration="2.961018733s" podCreationTimestamp="2026-01-20 18:42:17 +0000 UTC" firstStartedPulling="2026-01-20 18:42:18.424101371 +0000 UTC m=+7232.184439338" lastFinishedPulling="2026-01-20 18:42:19.168879552 +0000 UTC m=+7232.929217519" observedRunningTime="2026-01-20 18:42:19.955545833 +0000 UTC m=+7233.715883799" watchObservedRunningTime="2026-01-20 18:42:19.961018733 +0000 UTC m=+7233.721356700" Jan 20 18:42:22 crc kubenswrapper[4558]: I0120 18:42:22.301800 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/keystone-operator-index-hwm7b"] Jan 20 18:42:22 crc kubenswrapper[4558]: I0120 18:42:22.302534 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/keystone-operator-index-hwm7b" podUID="8ca9a5ad-b201-42d6-b177-9b299381d1ab" containerName="registry-server" containerID="cri-o://a0a9b0d44a6b8ce4ad3ff9f384d6459ab8298ba66d2f8e376454d7a55e3f4780" gracePeriod=2 Jan 20 18:42:22 crc kubenswrapper[4558]: I0120 18:42:22.683813 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-index-hwm7b" Jan 20 18:42:22 crc kubenswrapper[4558]: I0120 18:42:22.788120 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qnb8p\" (UniqueName: \"kubernetes.io/projected/8ca9a5ad-b201-42d6-b177-9b299381d1ab-kube-api-access-qnb8p\") pod \"8ca9a5ad-b201-42d6-b177-9b299381d1ab\" (UID: \"8ca9a5ad-b201-42d6-b177-9b299381d1ab\") " Jan 20 18:42:22 crc kubenswrapper[4558]: I0120 18:42:22.795482 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8ca9a5ad-b201-42d6-b177-9b299381d1ab-kube-api-access-qnb8p" (OuterVolumeSpecName: "kube-api-access-qnb8p") pod "8ca9a5ad-b201-42d6-b177-9b299381d1ab" (UID: "8ca9a5ad-b201-42d6-b177-9b299381d1ab"). InnerVolumeSpecName "kube-api-access-qnb8p". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:42:22 crc kubenswrapper[4558]: I0120 18:42:22.889506 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qnb8p\" (UniqueName: \"kubernetes.io/projected/8ca9a5ad-b201-42d6-b177-9b299381d1ab-kube-api-access-qnb8p\") on node \"crc\" DevicePath \"\"" Jan 20 18:42:22 crc kubenswrapper[4558]: I0120 18:42:22.909782 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-index-55crg"] Jan 20 18:42:22 crc kubenswrapper[4558]: E0120 18:42:22.910254 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ca9a5ad-b201-42d6-b177-9b299381d1ab" containerName="registry-server" Jan 20 18:42:22 crc kubenswrapper[4558]: I0120 18:42:22.910275 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ca9a5ad-b201-42d6-b177-9b299381d1ab" containerName="registry-server" Jan 20 18:42:22 crc kubenswrapper[4558]: I0120 18:42:22.910538 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="8ca9a5ad-b201-42d6-b177-9b299381d1ab" containerName="registry-server" Jan 20 18:42:22 crc kubenswrapper[4558]: I0120 18:42:22.911197 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-index-55crg" Jan 20 18:42:22 crc kubenswrapper[4558]: I0120 18:42:22.919802 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-index-55crg"] Jan 20 18:42:22 crc kubenswrapper[4558]: I0120 18:42:22.974324 4558 generic.go:334] "Generic (PLEG): container finished" podID="8ca9a5ad-b201-42d6-b177-9b299381d1ab" containerID="a0a9b0d44a6b8ce4ad3ff9f384d6459ab8298ba66d2f8e376454d7a55e3f4780" exitCode=0 Jan 20 18:42:22 crc kubenswrapper[4558]: I0120 18:42:22.974404 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-index-hwm7b" event={"ID":"8ca9a5ad-b201-42d6-b177-9b299381d1ab","Type":"ContainerDied","Data":"a0a9b0d44a6b8ce4ad3ff9f384d6459ab8298ba66d2f8e376454d7a55e3f4780"} Jan 20 18:42:22 crc kubenswrapper[4558]: I0120 18:42:22.974464 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-index-hwm7b" event={"ID":"8ca9a5ad-b201-42d6-b177-9b299381d1ab","Type":"ContainerDied","Data":"239bf120d86507555ffcbad5cc6c7ff4e7157abb56da7186e67abb90a1545f8c"} Jan 20 18:42:22 crc kubenswrapper[4558]: I0120 18:42:22.974501 4558 scope.go:117] "RemoveContainer" containerID="a0a9b0d44a6b8ce4ad3ff9f384d6459ab8298ba66d2f8e376454d7a55e3f4780" Jan 20 18:42:22 crc kubenswrapper[4558]: I0120 18:42:22.974418 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-index-hwm7b" Jan 20 18:42:22 crc kubenswrapper[4558]: I0120 18:42:22.993553 4558 scope.go:117] "RemoveContainer" containerID="a0a9b0d44a6b8ce4ad3ff9f384d6459ab8298ba66d2f8e376454d7a55e3f4780" Jan 20 18:42:22 crc kubenswrapper[4558]: E0120 18:42:22.994070 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a0a9b0d44a6b8ce4ad3ff9f384d6459ab8298ba66d2f8e376454d7a55e3f4780\": container with ID starting with a0a9b0d44a6b8ce4ad3ff9f384d6459ab8298ba66d2f8e376454d7a55e3f4780 not found: ID does not exist" containerID="a0a9b0d44a6b8ce4ad3ff9f384d6459ab8298ba66d2f8e376454d7a55e3f4780" Jan 20 18:42:22 crc kubenswrapper[4558]: I0120 18:42:22.994198 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a0a9b0d44a6b8ce4ad3ff9f384d6459ab8298ba66d2f8e376454d7a55e3f4780"} err="failed to get container status \"a0a9b0d44a6b8ce4ad3ff9f384d6459ab8298ba66d2f8e376454d7a55e3f4780\": rpc error: code = NotFound desc = could not find container \"a0a9b0d44a6b8ce4ad3ff9f384d6459ab8298ba66d2f8e376454d7a55e3f4780\": container with ID starting with a0a9b0d44a6b8ce4ad3ff9f384d6459ab8298ba66d2f8e376454d7a55e3f4780 not found: ID does not exist" Jan 20 18:42:23 crc kubenswrapper[4558]: I0120 18:42:23.004096 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/keystone-operator-index-hwm7b"] Jan 20 18:42:23 crc kubenswrapper[4558]: I0120 18:42:23.011738 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/keystone-operator-index-hwm7b"] Jan 20 18:42:23 crc kubenswrapper[4558]: I0120 18:42:23.093396 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pcvbn\" (UniqueName: \"kubernetes.io/projected/b6ca7feb-5a45-46a4-987c-6f58d12c94ae-kube-api-access-pcvbn\") pod \"keystone-operator-index-55crg\" (UID: \"b6ca7feb-5a45-46a4-987c-6f58d12c94ae\") " pod="openstack-operators/keystone-operator-index-55crg" Jan 20 18:42:23 crc kubenswrapper[4558]: I0120 18:42:23.194960 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pcvbn\" (UniqueName: \"kubernetes.io/projected/b6ca7feb-5a45-46a4-987c-6f58d12c94ae-kube-api-access-pcvbn\") pod \"keystone-operator-index-55crg\" (UID: \"b6ca7feb-5a45-46a4-987c-6f58d12c94ae\") " pod="openstack-operators/keystone-operator-index-55crg" Jan 20 18:42:23 crc kubenswrapper[4558]: I0120 18:42:23.211486 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pcvbn\" (UniqueName: \"kubernetes.io/projected/b6ca7feb-5a45-46a4-987c-6f58d12c94ae-kube-api-access-pcvbn\") pod \"keystone-operator-index-55crg\" (UID: \"b6ca7feb-5a45-46a4-987c-6f58d12c94ae\") " pod="openstack-operators/keystone-operator-index-55crg" Jan 20 18:42:23 crc kubenswrapper[4558]: I0120 18:42:23.226128 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-index-55crg" Jan 20 18:42:23 crc kubenswrapper[4558]: I0120 18:42:23.630447 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-index-55crg"] Jan 20 18:42:23 crc kubenswrapper[4558]: I0120 18:42:23.981284 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-index-55crg" event={"ID":"b6ca7feb-5a45-46a4-987c-6f58d12c94ae","Type":"ContainerStarted","Data":"05338ae85df5ff40aedba76d442505104f4fa1f970ae56ed8c7f441656afe8fa"} Jan 20 18:42:24 crc kubenswrapper[4558]: I0120 18:42:24.572795 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8ca9a5ad-b201-42d6-b177-9b299381d1ab" path="/var/lib/kubelet/pods/8ca9a5ad-b201-42d6-b177-9b299381d1ab/volumes" Jan 20 18:42:24 crc kubenswrapper[4558]: I0120 18:42:24.990798 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-index-55crg" event={"ID":"b6ca7feb-5a45-46a4-987c-6f58d12c94ae","Type":"ContainerStarted","Data":"43e7a77c79fbd25ced0635535d552e3444aae2d88de4165f7c8a22e90afa22cb"} Jan 20 18:42:25 crc kubenswrapper[4558]: I0120 18:42:25.007709 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-index-55crg" podStartSLOduration=2.498383835 podStartE2EDuration="3.007685755s" podCreationTimestamp="2026-01-20 18:42:22 +0000 UTC" firstStartedPulling="2026-01-20 18:42:23.639845367 +0000 UTC m=+7237.400183334" lastFinishedPulling="2026-01-20 18:42:24.149147287 +0000 UTC m=+7237.909485254" observedRunningTime="2026-01-20 18:42:25.00188602 +0000 UTC m=+7238.762223987" watchObservedRunningTime="2026-01-20 18:42:25.007685755 +0000 UTC m=+7238.768023722" Jan 20 18:42:30 crc kubenswrapper[4558]: I0120 18:42:30.528385 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["horizon-kuttl-tests/rabbitmq-server-0"] Jan 20 18:42:30 crc kubenswrapper[4558]: I0120 18:42:30.529546 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/rabbitmq-server-0" Jan 20 18:42:30 crc kubenswrapper[4558]: I0120 18:42:30.531935 4558 reflector.go:368] Caches populated for *v1.Secret from object-"horizon-kuttl-tests"/"rabbitmq-server-dockercfg-hdjpv" Jan 20 18:42:30 crc kubenswrapper[4558]: I0120 18:42:30.532412 4558 reflector.go:368] Caches populated for *v1.Secret from object-"horizon-kuttl-tests"/"rabbitmq-default-user" Jan 20 18:42:30 crc kubenswrapper[4558]: I0120 18:42:30.532576 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"horizon-kuttl-tests"/"rabbitmq-server-conf" Jan 20 18:42:30 crc kubenswrapper[4558]: I0120 18:42:30.532761 4558 reflector.go:368] Caches populated for *v1.Secret from object-"horizon-kuttl-tests"/"rabbitmq-erlang-cookie" Jan 20 18:42:30 crc kubenswrapper[4558]: I0120 18:42:30.533437 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"horizon-kuttl-tests"/"rabbitmq-plugins-conf" Jan 20 18:42:30 crc kubenswrapper[4558]: I0120 18:42:30.541155 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["horizon-kuttl-tests/rabbitmq-server-0"] Jan 20 18:42:30 crc kubenswrapper[4558]: I0120 18:42:30.566718 4558 scope.go:117] "RemoveContainer" containerID="77ce04c6daee7d4f9b776bd1c80b448f9c4750d5cb5c7dd182ff28963a4bcf99" Jan 20 18:42:30 crc kubenswrapper[4558]: E0120 18:42:30.567005 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 18:42:30 crc kubenswrapper[4558]: I0120 18:42:30.719120 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/0842b2d6-f024-4981-bc33-7af5f2cb702e-pod-info\") pod \"rabbitmq-server-0\" (UID: \"0842b2d6-f024-4981-bc33-7af5f2cb702e\") " pod="horizon-kuttl-tests/rabbitmq-server-0" Jan 20 18:42:30 crc kubenswrapper[4558]: I0120 18:42:30.719352 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/0842b2d6-f024-4981-bc33-7af5f2cb702e-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"0842b2d6-f024-4981-bc33-7af5f2cb702e\") " pod="horizon-kuttl-tests/rabbitmq-server-0" Jan 20 18:42:30 crc kubenswrapper[4558]: I0120 18:42:30.719418 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/0842b2d6-f024-4981-bc33-7af5f2cb702e-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"0842b2d6-f024-4981-bc33-7af5f2cb702e\") " pod="horizon-kuttl-tests/rabbitmq-server-0" Jan 20 18:42:30 crc kubenswrapper[4558]: I0120 18:42:30.719445 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4mqvd\" (UniqueName: \"kubernetes.io/projected/0842b2d6-f024-4981-bc33-7af5f2cb702e-kube-api-access-4mqvd\") pod \"rabbitmq-server-0\" (UID: \"0842b2d6-f024-4981-bc33-7af5f2cb702e\") " pod="horizon-kuttl-tests/rabbitmq-server-0" Jan 20 18:42:30 crc kubenswrapper[4558]: I0120 18:42:30.719575 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/0842b2d6-f024-4981-bc33-7af5f2cb702e-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"0842b2d6-f024-4981-bc33-7af5f2cb702e\") " pod="horizon-kuttl-tests/rabbitmq-server-0" Jan 20 18:42:30 crc kubenswrapper[4558]: I0120 18:42:30.719630 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/0842b2d6-f024-4981-bc33-7af5f2cb702e-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"0842b2d6-f024-4981-bc33-7af5f2cb702e\") " pod="horizon-kuttl-tests/rabbitmq-server-0" Jan 20 18:42:30 crc kubenswrapper[4558]: I0120 18:42:30.719674 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-f25c550e-ed1f-4958-807c-5b0699f4d60c\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f25c550e-ed1f-4958-807c-5b0699f4d60c\") pod \"rabbitmq-server-0\" (UID: \"0842b2d6-f024-4981-bc33-7af5f2cb702e\") " pod="horizon-kuttl-tests/rabbitmq-server-0" Jan 20 18:42:30 crc kubenswrapper[4558]: I0120 18:42:30.719710 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/0842b2d6-f024-4981-bc33-7af5f2cb702e-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"0842b2d6-f024-4981-bc33-7af5f2cb702e\") " pod="horizon-kuttl-tests/rabbitmq-server-0" Jan 20 18:42:30 crc kubenswrapper[4558]: I0120 18:42:30.820082 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/0842b2d6-f024-4981-bc33-7af5f2cb702e-pod-info\") pod \"rabbitmq-server-0\" (UID: \"0842b2d6-f024-4981-bc33-7af5f2cb702e\") " pod="horizon-kuttl-tests/rabbitmq-server-0" Jan 20 18:42:30 crc kubenswrapper[4558]: I0120 18:42:30.820150 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/0842b2d6-f024-4981-bc33-7af5f2cb702e-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"0842b2d6-f024-4981-bc33-7af5f2cb702e\") " pod="horizon-kuttl-tests/rabbitmq-server-0" Jan 20 18:42:30 crc kubenswrapper[4558]: I0120 18:42:30.820208 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/0842b2d6-f024-4981-bc33-7af5f2cb702e-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"0842b2d6-f024-4981-bc33-7af5f2cb702e\") " pod="horizon-kuttl-tests/rabbitmq-server-0" Jan 20 18:42:30 crc kubenswrapper[4558]: I0120 18:42:30.820231 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4mqvd\" (UniqueName: \"kubernetes.io/projected/0842b2d6-f024-4981-bc33-7af5f2cb702e-kube-api-access-4mqvd\") pod \"rabbitmq-server-0\" (UID: \"0842b2d6-f024-4981-bc33-7af5f2cb702e\") " pod="horizon-kuttl-tests/rabbitmq-server-0" Jan 20 18:42:30 crc kubenswrapper[4558]: I0120 18:42:30.820273 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/0842b2d6-f024-4981-bc33-7af5f2cb702e-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"0842b2d6-f024-4981-bc33-7af5f2cb702e\") " pod="horizon-kuttl-tests/rabbitmq-server-0" Jan 20 18:42:30 crc kubenswrapper[4558]: I0120 18:42:30.820304 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/0842b2d6-f024-4981-bc33-7af5f2cb702e-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"0842b2d6-f024-4981-bc33-7af5f2cb702e\") " pod="horizon-kuttl-tests/rabbitmq-server-0" Jan 20 18:42:30 crc kubenswrapper[4558]: I0120 18:42:30.820330 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-f25c550e-ed1f-4958-807c-5b0699f4d60c\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f25c550e-ed1f-4958-807c-5b0699f4d60c\") pod \"rabbitmq-server-0\" (UID: \"0842b2d6-f024-4981-bc33-7af5f2cb702e\") " pod="horizon-kuttl-tests/rabbitmq-server-0" Jan 20 18:42:30 crc kubenswrapper[4558]: I0120 18:42:30.820348 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/0842b2d6-f024-4981-bc33-7af5f2cb702e-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"0842b2d6-f024-4981-bc33-7af5f2cb702e\") " pod="horizon-kuttl-tests/rabbitmq-server-0" Jan 20 18:42:30 crc kubenswrapper[4558]: I0120 18:42:30.821736 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/0842b2d6-f024-4981-bc33-7af5f2cb702e-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"0842b2d6-f024-4981-bc33-7af5f2cb702e\") " pod="horizon-kuttl-tests/rabbitmq-server-0" Jan 20 18:42:30 crc kubenswrapper[4558]: I0120 18:42:30.821914 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/0842b2d6-f024-4981-bc33-7af5f2cb702e-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"0842b2d6-f024-4981-bc33-7af5f2cb702e\") " pod="horizon-kuttl-tests/rabbitmq-server-0" Jan 20 18:42:30 crc kubenswrapper[4558]: I0120 18:42:30.821962 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/0842b2d6-f024-4981-bc33-7af5f2cb702e-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"0842b2d6-f024-4981-bc33-7af5f2cb702e\") " pod="horizon-kuttl-tests/rabbitmq-server-0" Jan 20 18:42:30 crc kubenswrapper[4558]: I0120 18:42:30.826833 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/0842b2d6-f024-4981-bc33-7af5f2cb702e-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"0842b2d6-f024-4981-bc33-7af5f2cb702e\") " pod="horizon-kuttl-tests/rabbitmq-server-0" Jan 20 18:42:30 crc kubenswrapper[4558]: I0120 18:42:30.827320 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/0842b2d6-f024-4981-bc33-7af5f2cb702e-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"0842b2d6-f024-4981-bc33-7af5f2cb702e\") " pod="horizon-kuttl-tests/rabbitmq-server-0" Jan 20 18:42:30 crc kubenswrapper[4558]: I0120 18:42:30.827741 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/0842b2d6-f024-4981-bc33-7af5f2cb702e-pod-info\") pod \"rabbitmq-server-0\" (UID: \"0842b2d6-f024-4981-bc33-7af5f2cb702e\") " pod="horizon-kuttl-tests/rabbitmq-server-0" Jan 20 18:42:30 crc kubenswrapper[4558]: I0120 18:42:30.828033 4558 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 20 18:42:30 crc kubenswrapper[4558]: I0120 18:42:30.828078 4558 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-f25c550e-ed1f-4958-807c-5b0699f4d60c\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f25c550e-ed1f-4958-807c-5b0699f4d60c\") pod \"rabbitmq-server-0\" (UID: \"0842b2d6-f024-4981-bc33-7af5f2cb702e\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/ba2826f1650e0ba8d0c68dc4ccafb2cae176fd990ce708869ea62a3cab93ec43/globalmount\"" pod="horizon-kuttl-tests/rabbitmq-server-0" Jan 20 18:42:30 crc kubenswrapper[4558]: I0120 18:42:30.835924 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4mqvd\" (UniqueName: \"kubernetes.io/projected/0842b2d6-f024-4981-bc33-7af5f2cb702e-kube-api-access-4mqvd\") pod \"rabbitmq-server-0\" (UID: \"0842b2d6-f024-4981-bc33-7af5f2cb702e\") " pod="horizon-kuttl-tests/rabbitmq-server-0" Jan 20 18:42:30 crc kubenswrapper[4558]: I0120 18:42:30.851080 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-f25c550e-ed1f-4958-807c-5b0699f4d60c\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f25c550e-ed1f-4958-807c-5b0699f4d60c\") pod \"rabbitmq-server-0\" (UID: \"0842b2d6-f024-4981-bc33-7af5f2cb702e\") " pod="horizon-kuttl-tests/rabbitmq-server-0" Jan 20 18:42:31 crc kubenswrapper[4558]: I0120 18:42:31.151268 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/rabbitmq-server-0" Jan 20 18:42:31 crc kubenswrapper[4558]: I0120 18:42:31.557354 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["horizon-kuttl-tests/rabbitmq-server-0"] Jan 20 18:42:31 crc kubenswrapper[4558]: W0120 18:42:31.558614 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0842b2d6_f024_4981_bc33_7af5f2cb702e.slice/crio-08274c29628de65c2989a1624dc308d2b705cc7aafe2197c8e6b554d11c6647c WatchSource:0}: Error finding container 08274c29628de65c2989a1624dc308d2b705cc7aafe2197c8e6b554d11c6647c: Status 404 returned error can't find the container with id 08274c29628de65c2989a1624dc308d2b705cc7aafe2197c8e6b554d11c6647c Jan 20 18:42:32 crc kubenswrapper[4558]: I0120 18:42:32.050544 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/rabbitmq-server-0" event={"ID":"0842b2d6-f024-4981-bc33-7af5f2cb702e","Type":"ContainerStarted","Data":"08274c29628de65c2989a1624dc308d2b705cc7aafe2197c8e6b554d11c6647c"} Jan 20 18:42:33 crc kubenswrapper[4558]: I0120 18:42:33.062918 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/rabbitmq-server-0" event={"ID":"0842b2d6-f024-4981-bc33-7af5f2cb702e","Type":"ContainerStarted","Data":"5a34773916056ec973e86f0eacf4edd81228b6f42f30cb79e9070f0ac481a67e"} Jan 20 18:42:33 crc kubenswrapper[4558]: I0120 18:42:33.226735 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-index-55crg" Jan 20 18:42:33 crc kubenswrapper[4558]: I0120 18:42:33.226786 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/keystone-operator-index-55crg" Jan 20 18:42:33 crc kubenswrapper[4558]: I0120 18:42:33.293772 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/keystone-operator-index-55crg" Jan 20 18:42:34 crc kubenswrapper[4558]: I0120 18:42:34.097779 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-index-55crg" Jan 20 18:42:35 crc kubenswrapper[4558]: I0120 18:42:35.948841 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/34e4e076b11e40e2796f19ad3bfdac5929942b93224fbc520400e0a069px9rs"] Jan 20 18:42:35 crc kubenswrapper[4558]: I0120 18:42:35.951422 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/34e4e076b11e40e2796f19ad3bfdac5929942b93224fbc520400e0a069px9rs" Jan 20 18:42:35 crc kubenswrapper[4558]: I0120 18:42:35.954119 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-7xp5w" Jan 20 18:42:35 crc kubenswrapper[4558]: I0120 18:42:35.959986 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/34e4e076b11e40e2796f19ad3bfdac5929942b93224fbc520400e0a069px9rs"] Jan 20 18:42:36 crc kubenswrapper[4558]: I0120 18:42:36.112601 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d90ff752-1abb-4f9b-91b1-73f56adad0c6-util\") pod \"34e4e076b11e40e2796f19ad3bfdac5929942b93224fbc520400e0a069px9rs\" (UID: \"d90ff752-1abb-4f9b-91b1-73f56adad0c6\") " pod="openstack-operators/34e4e076b11e40e2796f19ad3bfdac5929942b93224fbc520400e0a069px9rs" Jan 20 18:42:36 crc kubenswrapper[4558]: I0120 18:42:36.112845 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jrfn7\" (UniqueName: \"kubernetes.io/projected/d90ff752-1abb-4f9b-91b1-73f56adad0c6-kube-api-access-jrfn7\") pod \"34e4e076b11e40e2796f19ad3bfdac5929942b93224fbc520400e0a069px9rs\" (UID: \"d90ff752-1abb-4f9b-91b1-73f56adad0c6\") " pod="openstack-operators/34e4e076b11e40e2796f19ad3bfdac5929942b93224fbc520400e0a069px9rs" Jan 20 18:42:36 crc kubenswrapper[4558]: I0120 18:42:36.112979 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d90ff752-1abb-4f9b-91b1-73f56adad0c6-bundle\") pod \"34e4e076b11e40e2796f19ad3bfdac5929942b93224fbc520400e0a069px9rs\" (UID: \"d90ff752-1abb-4f9b-91b1-73f56adad0c6\") " pod="openstack-operators/34e4e076b11e40e2796f19ad3bfdac5929942b93224fbc520400e0a069px9rs" Jan 20 18:42:36 crc kubenswrapper[4558]: I0120 18:42:36.214271 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d90ff752-1abb-4f9b-91b1-73f56adad0c6-util\") pod \"34e4e076b11e40e2796f19ad3bfdac5929942b93224fbc520400e0a069px9rs\" (UID: \"d90ff752-1abb-4f9b-91b1-73f56adad0c6\") " pod="openstack-operators/34e4e076b11e40e2796f19ad3bfdac5929942b93224fbc520400e0a069px9rs" Jan 20 18:42:36 crc kubenswrapper[4558]: I0120 18:42:36.214357 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jrfn7\" (UniqueName: \"kubernetes.io/projected/d90ff752-1abb-4f9b-91b1-73f56adad0c6-kube-api-access-jrfn7\") pod \"34e4e076b11e40e2796f19ad3bfdac5929942b93224fbc520400e0a069px9rs\" (UID: \"d90ff752-1abb-4f9b-91b1-73f56adad0c6\") " pod="openstack-operators/34e4e076b11e40e2796f19ad3bfdac5929942b93224fbc520400e0a069px9rs" Jan 20 18:42:36 crc kubenswrapper[4558]: I0120 18:42:36.214402 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d90ff752-1abb-4f9b-91b1-73f56adad0c6-bundle\") pod \"34e4e076b11e40e2796f19ad3bfdac5929942b93224fbc520400e0a069px9rs\" (UID: \"d90ff752-1abb-4f9b-91b1-73f56adad0c6\") " pod="openstack-operators/34e4e076b11e40e2796f19ad3bfdac5929942b93224fbc520400e0a069px9rs" Jan 20 18:42:36 crc kubenswrapper[4558]: I0120 18:42:36.214977 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d90ff752-1abb-4f9b-91b1-73f56adad0c6-util\") pod \"34e4e076b11e40e2796f19ad3bfdac5929942b93224fbc520400e0a069px9rs\" (UID: \"d90ff752-1abb-4f9b-91b1-73f56adad0c6\") " pod="openstack-operators/34e4e076b11e40e2796f19ad3bfdac5929942b93224fbc520400e0a069px9rs" Jan 20 18:42:36 crc kubenswrapper[4558]: I0120 18:42:36.214997 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d90ff752-1abb-4f9b-91b1-73f56adad0c6-bundle\") pod \"34e4e076b11e40e2796f19ad3bfdac5929942b93224fbc520400e0a069px9rs\" (UID: \"d90ff752-1abb-4f9b-91b1-73f56adad0c6\") " pod="openstack-operators/34e4e076b11e40e2796f19ad3bfdac5929942b93224fbc520400e0a069px9rs" Jan 20 18:42:36 crc kubenswrapper[4558]: I0120 18:42:36.232389 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jrfn7\" (UniqueName: \"kubernetes.io/projected/d90ff752-1abb-4f9b-91b1-73f56adad0c6-kube-api-access-jrfn7\") pod \"34e4e076b11e40e2796f19ad3bfdac5929942b93224fbc520400e0a069px9rs\" (UID: \"d90ff752-1abb-4f9b-91b1-73f56adad0c6\") " pod="openstack-operators/34e4e076b11e40e2796f19ad3bfdac5929942b93224fbc520400e0a069px9rs" Jan 20 18:42:36 crc kubenswrapper[4558]: I0120 18:42:36.276046 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/34e4e076b11e40e2796f19ad3bfdac5929942b93224fbc520400e0a069px9rs" Jan 20 18:42:36 crc kubenswrapper[4558]: I0120 18:42:36.672556 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/34e4e076b11e40e2796f19ad3bfdac5929942b93224fbc520400e0a069px9rs"] Jan 20 18:42:37 crc kubenswrapper[4558]: I0120 18:42:37.094495 4558 generic.go:334] "Generic (PLEG): container finished" podID="d90ff752-1abb-4f9b-91b1-73f56adad0c6" containerID="97142091aa6d643d811b188bd0da1649829bee35d0962992a649a3005e47ee4c" exitCode=0 Jan 20 18:42:37 crc kubenswrapper[4558]: I0120 18:42:37.094547 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/34e4e076b11e40e2796f19ad3bfdac5929942b93224fbc520400e0a069px9rs" event={"ID":"d90ff752-1abb-4f9b-91b1-73f56adad0c6","Type":"ContainerDied","Data":"97142091aa6d643d811b188bd0da1649829bee35d0962992a649a3005e47ee4c"} Jan 20 18:42:37 crc kubenswrapper[4558]: I0120 18:42:37.094581 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/34e4e076b11e40e2796f19ad3bfdac5929942b93224fbc520400e0a069px9rs" event={"ID":"d90ff752-1abb-4f9b-91b1-73f56adad0c6","Type":"ContainerStarted","Data":"3c926c25ef393c80ed07e20920df8b2c148e6c068db403012e707999f933b22f"} Jan 20 18:42:38 crc kubenswrapper[4558]: I0120 18:42:38.105053 4558 generic.go:334] "Generic (PLEG): container finished" podID="d90ff752-1abb-4f9b-91b1-73f56adad0c6" containerID="9626062908b705c00d0d1f4d94cb7c82140890fa637b50c8deb2205fc671efd7" exitCode=0 Jan 20 18:42:38 crc kubenswrapper[4558]: I0120 18:42:38.105207 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/34e4e076b11e40e2796f19ad3bfdac5929942b93224fbc520400e0a069px9rs" event={"ID":"d90ff752-1abb-4f9b-91b1-73f56adad0c6","Type":"ContainerDied","Data":"9626062908b705c00d0d1f4d94cb7c82140890fa637b50c8deb2205fc671efd7"} Jan 20 18:42:38 crc kubenswrapper[4558]: I0120 18:42:38.798997 4558 scope.go:117] "RemoveContainer" containerID="5357156624c9b8928b4edfdcde4f5857b679e588775e8e81a2e717f589382ea8" Jan 20 18:42:38 crc kubenswrapper[4558]: I0120 18:42:38.817882 4558 scope.go:117] "RemoveContainer" containerID="68b7c6f3a2ab1f2c3b25b09a9c942e3afc3ce1b84d8f4fea490855481f14444e" Jan 20 18:42:38 crc kubenswrapper[4558]: I0120 18:42:38.845237 4558 scope.go:117] "RemoveContainer" containerID="0d467d3ac757db96cef873308e3402f38b5140e5c7a2dd056d737c63b44610bb" Jan 20 18:42:38 crc kubenswrapper[4558]: I0120 18:42:38.888546 4558 scope.go:117] "RemoveContainer" containerID="69aa1c27bf9faa48f4b636ea9af7e73bd1699fceede086f9f9810e8d775e1626" Jan 20 18:42:39 crc kubenswrapper[4558]: I0120 18:42:39.114195 4558 generic.go:334] "Generic (PLEG): container finished" podID="d90ff752-1abb-4f9b-91b1-73f56adad0c6" containerID="8e273e5599d4ea2c4600eb6a4b8b33ea5e7cb5c15d64e2ec21c0e12e22101826" exitCode=0 Jan 20 18:42:39 crc kubenswrapper[4558]: I0120 18:42:39.114258 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/34e4e076b11e40e2796f19ad3bfdac5929942b93224fbc520400e0a069px9rs" event={"ID":"d90ff752-1abb-4f9b-91b1-73f56adad0c6","Type":"ContainerDied","Data":"8e273e5599d4ea2c4600eb6a4b8b33ea5e7cb5c15d64e2ec21c0e12e22101826"} Jan 20 18:42:40 crc kubenswrapper[4558]: I0120 18:42:40.377263 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/34e4e076b11e40e2796f19ad3bfdac5929942b93224fbc520400e0a069px9rs" Jan 20 18:42:40 crc kubenswrapper[4558]: I0120 18:42:40.484990 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d90ff752-1abb-4f9b-91b1-73f56adad0c6-util\") pod \"d90ff752-1abb-4f9b-91b1-73f56adad0c6\" (UID: \"d90ff752-1abb-4f9b-91b1-73f56adad0c6\") " Jan 20 18:42:40 crc kubenswrapper[4558]: I0120 18:42:40.485094 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d90ff752-1abb-4f9b-91b1-73f56adad0c6-bundle\") pod \"d90ff752-1abb-4f9b-91b1-73f56adad0c6\" (UID: \"d90ff752-1abb-4f9b-91b1-73f56adad0c6\") " Jan 20 18:42:40 crc kubenswrapper[4558]: I0120 18:42:40.485208 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jrfn7\" (UniqueName: \"kubernetes.io/projected/d90ff752-1abb-4f9b-91b1-73f56adad0c6-kube-api-access-jrfn7\") pod \"d90ff752-1abb-4f9b-91b1-73f56adad0c6\" (UID: \"d90ff752-1abb-4f9b-91b1-73f56adad0c6\") " Jan 20 18:42:40 crc kubenswrapper[4558]: I0120 18:42:40.486142 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d90ff752-1abb-4f9b-91b1-73f56adad0c6-bundle" (OuterVolumeSpecName: "bundle") pod "d90ff752-1abb-4f9b-91b1-73f56adad0c6" (UID: "d90ff752-1abb-4f9b-91b1-73f56adad0c6"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 18:42:40 crc kubenswrapper[4558]: I0120 18:42:40.490823 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d90ff752-1abb-4f9b-91b1-73f56adad0c6-kube-api-access-jrfn7" (OuterVolumeSpecName: "kube-api-access-jrfn7") pod "d90ff752-1abb-4f9b-91b1-73f56adad0c6" (UID: "d90ff752-1abb-4f9b-91b1-73f56adad0c6"). InnerVolumeSpecName "kube-api-access-jrfn7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:42:40 crc kubenswrapper[4558]: I0120 18:42:40.496234 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d90ff752-1abb-4f9b-91b1-73f56adad0c6-util" (OuterVolumeSpecName: "util") pod "d90ff752-1abb-4f9b-91b1-73f56adad0c6" (UID: "d90ff752-1abb-4f9b-91b1-73f56adad0c6"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 18:42:40 crc kubenswrapper[4558]: I0120 18:42:40.587396 4558 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d90ff752-1abb-4f9b-91b1-73f56adad0c6-util\") on node \"crc\" DevicePath \"\"" Jan 20 18:42:40 crc kubenswrapper[4558]: I0120 18:42:40.587435 4558 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d90ff752-1abb-4f9b-91b1-73f56adad0c6-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 18:42:40 crc kubenswrapper[4558]: I0120 18:42:40.587452 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jrfn7\" (UniqueName: \"kubernetes.io/projected/d90ff752-1abb-4f9b-91b1-73f56adad0c6-kube-api-access-jrfn7\") on node \"crc\" DevicePath \"\"" Jan 20 18:42:41 crc kubenswrapper[4558]: I0120 18:42:41.142124 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/34e4e076b11e40e2796f19ad3bfdac5929942b93224fbc520400e0a069px9rs" event={"ID":"d90ff752-1abb-4f9b-91b1-73f56adad0c6","Type":"ContainerDied","Data":"3c926c25ef393c80ed07e20920df8b2c148e6c068db403012e707999f933b22f"} Jan 20 18:42:41 crc kubenswrapper[4558]: I0120 18:42:41.142216 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3c926c25ef393c80ed07e20920df8b2c148e6c068db403012e707999f933b22f" Jan 20 18:42:41 crc kubenswrapper[4558]: I0120 18:42:41.142425 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/34e4e076b11e40e2796f19ad3bfdac5929942b93224fbc520400e0a069px9rs" Jan 20 18:42:43 crc kubenswrapper[4558]: I0120 18:42:43.566676 4558 scope.go:117] "RemoveContainer" containerID="77ce04c6daee7d4f9b776bd1c80b448f9c4750d5cb5c7dd182ff28963a4bcf99" Jan 20 18:42:43 crc kubenswrapper[4558]: E0120 18:42:43.567220 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 18:42:49 crc kubenswrapper[4558]: I0120 18:42:49.291744 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-controller-manager-866dbb996d-p69bb"] Jan 20 18:42:49 crc kubenswrapper[4558]: E0120 18:42:49.292384 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d90ff752-1abb-4f9b-91b1-73f56adad0c6" containerName="extract" Jan 20 18:42:49 crc kubenswrapper[4558]: I0120 18:42:49.292400 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d90ff752-1abb-4f9b-91b1-73f56adad0c6" containerName="extract" Jan 20 18:42:49 crc kubenswrapper[4558]: E0120 18:42:49.292420 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d90ff752-1abb-4f9b-91b1-73f56adad0c6" containerName="util" Jan 20 18:42:49 crc kubenswrapper[4558]: I0120 18:42:49.292426 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d90ff752-1abb-4f9b-91b1-73f56adad0c6" containerName="util" Jan 20 18:42:49 crc kubenswrapper[4558]: E0120 18:42:49.292436 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d90ff752-1abb-4f9b-91b1-73f56adad0c6" containerName="pull" Jan 20 18:42:49 crc kubenswrapper[4558]: I0120 18:42:49.292444 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d90ff752-1abb-4f9b-91b1-73f56adad0c6" containerName="pull" Jan 20 18:42:49 crc kubenswrapper[4558]: I0120 18:42:49.292603 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="d90ff752-1abb-4f9b-91b1-73f56adad0c6" containerName="extract" Jan 20 18:42:49 crc kubenswrapper[4558]: I0120 18:42:49.293244 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-866dbb996d-p69bb" Jan 20 18:42:49 crc kubenswrapper[4558]: I0120 18:42:49.300745 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-service-cert" Jan 20 18:42:49 crc kubenswrapper[4558]: I0120 18:42:49.307388 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-dockercfg-2k7wv" Jan 20 18:42:49 crc kubenswrapper[4558]: I0120 18:42:49.308996 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-866dbb996d-p69bb"] Jan 20 18:42:49 crc kubenswrapper[4558]: I0120 18:42:49.314423 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/2f0e4759-5bd0-4265-8781-57738bd9fc93-apiservice-cert\") pod \"keystone-operator-controller-manager-866dbb996d-p69bb\" (UID: \"2f0e4759-5bd0-4265-8781-57738bd9fc93\") " pod="openstack-operators/keystone-operator-controller-manager-866dbb996d-p69bb" Jan 20 18:42:49 crc kubenswrapper[4558]: I0120 18:42:49.314543 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/2f0e4759-5bd0-4265-8781-57738bd9fc93-webhook-cert\") pod \"keystone-operator-controller-manager-866dbb996d-p69bb\" (UID: \"2f0e4759-5bd0-4265-8781-57738bd9fc93\") " pod="openstack-operators/keystone-operator-controller-manager-866dbb996d-p69bb" Jan 20 18:42:49 crc kubenswrapper[4558]: I0120 18:42:49.314660 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dnllf\" (UniqueName: \"kubernetes.io/projected/2f0e4759-5bd0-4265-8781-57738bd9fc93-kube-api-access-dnllf\") pod \"keystone-operator-controller-manager-866dbb996d-p69bb\" (UID: \"2f0e4759-5bd0-4265-8781-57738bd9fc93\") " pod="openstack-operators/keystone-operator-controller-manager-866dbb996d-p69bb" Jan 20 18:42:49 crc kubenswrapper[4558]: I0120 18:42:49.415897 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/2f0e4759-5bd0-4265-8781-57738bd9fc93-apiservice-cert\") pod \"keystone-operator-controller-manager-866dbb996d-p69bb\" (UID: \"2f0e4759-5bd0-4265-8781-57738bd9fc93\") " pod="openstack-operators/keystone-operator-controller-manager-866dbb996d-p69bb" Jan 20 18:42:49 crc kubenswrapper[4558]: I0120 18:42:49.415941 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/2f0e4759-5bd0-4265-8781-57738bd9fc93-webhook-cert\") pod \"keystone-operator-controller-manager-866dbb996d-p69bb\" (UID: \"2f0e4759-5bd0-4265-8781-57738bd9fc93\") " pod="openstack-operators/keystone-operator-controller-manager-866dbb996d-p69bb" Jan 20 18:42:49 crc kubenswrapper[4558]: I0120 18:42:49.415972 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dnllf\" (UniqueName: \"kubernetes.io/projected/2f0e4759-5bd0-4265-8781-57738bd9fc93-kube-api-access-dnllf\") pod \"keystone-operator-controller-manager-866dbb996d-p69bb\" (UID: \"2f0e4759-5bd0-4265-8781-57738bd9fc93\") " pod="openstack-operators/keystone-operator-controller-manager-866dbb996d-p69bb" Jan 20 18:42:49 crc kubenswrapper[4558]: I0120 18:42:49.422767 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/2f0e4759-5bd0-4265-8781-57738bd9fc93-webhook-cert\") pod \"keystone-operator-controller-manager-866dbb996d-p69bb\" (UID: \"2f0e4759-5bd0-4265-8781-57738bd9fc93\") " pod="openstack-operators/keystone-operator-controller-manager-866dbb996d-p69bb" Jan 20 18:42:49 crc kubenswrapper[4558]: I0120 18:42:49.430340 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dnllf\" (UniqueName: \"kubernetes.io/projected/2f0e4759-5bd0-4265-8781-57738bd9fc93-kube-api-access-dnllf\") pod \"keystone-operator-controller-manager-866dbb996d-p69bb\" (UID: \"2f0e4759-5bd0-4265-8781-57738bd9fc93\") " pod="openstack-operators/keystone-operator-controller-manager-866dbb996d-p69bb" Jan 20 18:42:49 crc kubenswrapper[4558]: I0120 18:42:49.436030 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/2f0e4759-5bd0-4265-8781-57738bd9fc93-apiservice-cert\") pod \"keystone-operator-controller-manager-866dbb996d-p69bb\" (UID: \"2f0e4759-5bd0-4265-8781-57738bd9fc93\") " pod="openstack-operators/keystone-operator-controller-manager-866dbb996d-p69bb" Jan 20 18:42:49 crc kubenswrapper[4558]: I0120 18:42:49.608412 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-866dbb996d-p69bb" Jan 20 18:42:50 crc kubenswrapper[4558]: I0120 18:42:50.035464 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-866dbb996d-p69bb"] Jan 20 18:42:50 crc kubenswrapper[4558]: W0120 18:42:50.039547 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2f0e4759_5bd0_4265_8781_57738bd9fc93.slice/crio-3c3da68139600eeca6e91a2e2446d476a289877f1db76d34fb555d87f03fcd6c WatchSource:0}: Error finding container 3c3da68139600eeca6e91a2e2446d476a289877f1db76d34fb555d87f03fcd6c: Status 404 returned error can't find the container with id 3c3da68139600eeca6e91a2e2446d476a289877f1db76d34fb555d87f03fcd6c Jan 20 18:42:50 crc kubenswrapper[4558]: I0120 18:42:50.213520 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-866dbb996d-p69bb" event={"ID":"2f0e4759-5bd0-4265-8781-57738bd9fc93","Type":"ContainerStarted","Data":"d8badf873df2955eb71c4385d046c84384befe12e1855d8c531bceeaccd9075b"} Jan 20 18:42:50 crc kubenswrapper[4558]: I0120 18:42:50.213574 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-866dbb996d-p69bb" event={"ID":"2f0e4759-5bd0-4265-8781-57738bd9fc93","Type":"ContainerStarted","Data":"3c3da68139600eeca6e91a2e2446d476a289877f1db76d34fb555d87f03fcd6c"} Jan 20 18:42:50 crc kubenswrapper[4558]: I0120 18:42:50.213680 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-controller-manager-866dbb996d-p69bb" Jan 20 18:42:50 crc kubenswrapper[4558]: I0120 18:42:50.236441 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-controller-manager-866dbb996d-p69bb" podStartSLOduration=1.236427845 podStartE2EDuration="1.236427845s" podCreationTimestamp="2026-01-20 18:42:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 18:42:50.231689155 +0000 UTC m=+7263.992027122" watchObservedRunningTime="2026-01-20 18:42:50.236427845 +0000 UTC m=+7263.996765813" Jan 20 18:42:55 crc kubenswrapper[4558]: I0120 18:42:55.911759 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-ldv9l"] Jan 20 18:42:55 crc kubenswrapper[4558]: I0120 18:42:55.913475 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-ldv9l" Jan 20 18:42:55 crc kubenswrapper[4558]: I0120 18:42:55.932688 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-ldv9l"] Jan 20 18:42:56 crc kubenswrapper[4558]: I0120 18:42:56.020233 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a8e021a2-9ea0-4110-9390-4c17c191cb59-utilities\") pod \"redhat-operators-ldv9l\" (UID: \"a8e021a2-9ea0-4110-9390-4c17c191cb59\") " pod="openshift-marketplace/redhat-operators-ldv9l" Jan 20 18:42:56 crc kubenswrapper[4558]: I0120 18:42:56.020324 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a8e021a2-9ea0-4110-9390-4c17c191cb59-catalog-content\") pod \"redhat-operators-ldv9l\" (UID: \"a8e021a2-9ea0-4110-9390-4c17c191cb59\") " pod="openshift-marketplace/redhat-operators-ldv9l" Jan 20 18:42:56 crc kubenswrapper[4558]: I0120 18:42:56.020530 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4dh24\" (UniqueName: \"kubernetes.io/projected/a8e021a2-9ea0-4110-9390-4c17c191cb59-kube-api-access-4dh24\") pod \"redhat-operators-ldv9l\" (UID: \"a8e021a2-9ea0-4110-9390-4c17c191cb59\") " pod="openshift-marketplace/redhat-operators-ldv9l" Jan 20 18:42:56 crc kubenswrapper[4558]: I0120 18:42:56.122454 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a8e021a2-9ea0-4110-9390-4c17c191cb59-utilities\") pod \"redhat-operators-ldv9l\" (UID: \"a8e021a2-9ea0-4110-9390-4c17c191cb59\") " pod="openshift-marketplace/redhat-operators-ldv9l" Jan 20 18:42:56 crc kubenswrapper[4558]: I0120 18:42:56.122511 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a8e021a2-9ea0-4110-9390-4c17c191cb59-catalog-content\") pod \"redhat-operators-ldv9l\" (UID: \"a8e021a2-9ea0-4110-9390-4c17c191cb59\") " pod="openshift-marketplace/redhat-operators-ldv9l" Jan 20 18:42:56 crc kubenswrapper[4558]: I0120 18:42:56.122554 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4dh24\" (UniqueName: \"kubernetes.io/projected/a8e021a2-9ea0-4110-9390-4c17c191cb59-kube-api-access-4dh24\") pod \"redhat-operators-ldv9l\" (UID: \"a8e021a2-9ea0-4110-9390-4c17c191cb59\") " pod="openshift-marketplace/redhat-operators-ldv9l" Jan 20 18:42:56 crc kubenswrapper[4558]: I0120 18:42:56.123037 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a8e021a2-9ea0-4110-9390-4c17c191cb59-utilities\") pod \"redhat-operators-ldv9l\" (UID: \"a8e021a2-9ea0-4110-9390-4c17c191cb59\") " pod="openshift-marketplace/redhat-operators-ldv9l" Jan 20 18:42:56 crc kubenswrapper[4558]: I0120 18:42:56.123080 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a8e021a2-9ea0-4110-9390-4c17c191cb59-catalog-content\") pod \"redhat-operators-ldv9l\" (UID: \"a8e021a2-9ea0-4110-9390-4c17c191cb59\") " pod="openshift-marketplace/redhat-operators-ldv9l" Jan 20 18:42:56 crc kubenswrapper[4558]: I0120 18:42:56.139752 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4dh24\" (UniqueName: \"kubernetes.io/projected/a8e021a2-9ea0-4110-9390-4c17c191cb59-kube-api-access-4dh24\") pod \"redhat-operators-ldv9l\" (UID: \"a8e021a2-9ea0-4110-9390-4c17c191cb59\") " pod="openshift-marketplace/redhat-operators-ldv9l" Jan 20 18:42:56 crc kubenswrapper[4558]: I0120 18:42:56.228194 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-ldv9l" Jan 20 18:42:56 crc kubenswrapper[4558]: I0120 18:42:56.609380 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-ldv9l"] Jan 20 18:42:57 crc kubenswrapper[4558]: I0120 18:42:57.271361 4558 generic.go:334] "Generic (PLEG): container finished" podID="a8e021a2-9ea0-4110-9390-4c17c191cb59" containerID="479e96f0979ff9c2abee02ac54c47567d4490e7cbeb703b07816d0c050c5dc24" exitCode=0 Jan 20 18:42:57 crc kubenswrapper[4558]: I0120 18:42:57.271404 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ldv9l" event={"ID":"a8e021a2-9ea0-4110-9390-4c17c191cb59","Type":"ContainerDied","Data":"479e96f0979ff9c2abee02ac54c47567d4490e7cbeb703b07816d0c050c5dc24"} Jan 20 18:42:57 crc kubenswrapper[4558]: I0120 18:42:57.271431 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ldv9l" event={"ID":"a8e021a2-9ea0-4110-9390-4c17c191cb59","Type":"ContainerStarted","Data":"f0b14b4af4bb943436b8c2fd57a33e6c53f1e87f186946c670517c5bc24e1040"} Jan 20 18:42:57 crc kubenswrapper[4558]: I0120 18:42:57.565947 4558 scope.go:117] "RemoveContainer" containerID="77ce04c6daee7d4f9b776bd1c80b448f9c4750d5cb5c7dd182ff28963a4bcf99" Jan 20 18:42:58 crc kubenswrapper[4558]: I0120 18:42:58.280582 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ldv9l" event={"ID":"a8e021a2-9ea0-4110-9390-4c17c191cb59","Type":"ContainerStarted","Data":"79d1b41bc8506a97c883e59c8227116ba2524e84ccfd8f34a2ce88fa687e3399"} Jan 20 18:42:58 crc kubenswrapper[4558]: I0120 18:42:58.282882 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" event={"ID":"68337d27-3fa6-4a29-88b0-82e60c3739eb","Type":"ContainerStarted","Data":"46c3b6e0005c86abe846af98397dbd3ec4bce46d6d33d8269c6f5d826617c1cf"} Jan 20 18:42:59 crc kubenswrapper[4558]: I0120 18:42:59.293328 4558 generic.go:334] "Generic (PLEG): container finished" podID="a8e021a2-9ea0-4110-9390-4c17c191cb59" containerID="79d1b41bc8506a97c883e59c8227116ba2524e84ccfd8f34a2ce88fa687e3399" exitCode=0 Jan 20 18:42:59 crc kubenswrapper[4558]: I0120 18:42:59.293426 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ldv9l" event={"ID":"a8e021a2-9ea0-4110-9390-4c17c191cb59","Type":"ContainerDied","Data":"79d1b41bc8506a97c883e59c8227116ba2524e84ccfd8f34a2ce88fa687e3399"} Jan 20 18:42:59 crc kubenswrapper[4558]: I0120 18:42:59.613535 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-controller-manager-866dbb996d-p69bb" Jan 20 18:43:00 crc kubenswrapper[4558]: I0120 18:43:00.303117 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ldv9l" event={"ID":"a8e021a2-9ea0-4110-9390-4c17c191cb59","Type":"ContainerStarted","Data":"f245c79e6444caaa31ff6151b372bdfbcd9a23d914fccd48c7c5927ba35a789d"} Jan 20 18:43:00 crc kubenswrapper[4558]: I0120 18:43:00.323262 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-ldv9l" podStartSLOduration=2.723183127 podStartE2EDuration="5.323246428s" podCreationTimestamp="2026-01-20 18:42:55 +0000 UTC" firstStartedPulling="2026-01-20 18:42:57.274264041 +0000 UTC m=+7271.034602009" lastFinishedPulling="2026-01-20 18:42:59.874327343 +0000 UTC m=+7273.634665310" observedRunningTime="2026-01-20 18:43:00.320926506 +0000 UTC m=+7274.081264473" watchObservedRunningTime="2026-01-20 18:43:00.323246428 +0000 UTC m=+7274.083584395" Jan 20 18:43:05 crc kubenswrapper[4558]: I0120 18:43:05.350873 4558 generic.go:334] "Generic (PLEG): container finished" podID="0842b2d6-f024-4981-bc33-7af5f2cb702e" containerID="5a34773916056ec973e86f0eacf4edd81228b6f42f30cb79e9070f0ac481a67e" exitCode=0 Jan 20 18:43:05 crc kubenswrapper[4558]: I0120 18:43:05.350976 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/rabbitmq-server-0" event={"ID":"0842b2d6-f024-4981-bc33-7af5f2cb702e","Type":"ContainerDied","Data":"5a34773916056ec973e86f0eacf4edd81228b6f42f30cb79e9070f0ac481a67e"} Jan 20 18:43:06 crc kubenswrapper[4558]: I0120 18:43:06.229259 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-ldv9l" Jan 20 18:43:06 crc kubenswrapper[4558]: I0120 18:43:06.229894 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-ldv9l" Jan 20 18:43:06 crc kubenswrapper[4558]: I0120 18:43:06.270528 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-ldv9l" Jan 20 18:43:06 crc kubenswrapper[4558]: I0120 18:43:06.361109 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/rabbitmq-server-0" event={"ID":"0842b2d6-f024-4981-bc33-7af5f2cb702e","Type":"ContainerStarted","Data":"2a7e386eb453bb6586227a632c1d243da6244a3191cd8800ff3c511a0ee504e4"} Jan 20 18:43:06 crc kubenswrapper[4558]: I0120 18:43:06.361357 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="horizon-kuttl-tests/rabbitmq-server-0" Jan 20 18:43:06 crc kubenswrapper[4558]: I0120 18:43:06.380195 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="horizon-kuttl-tests/rabbitmq-server-0" podStartSLOduration=37.380178089 podStartE2EDuration="37.380178089s" podCreationTimestamp="2026-01-20 18:42:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 18:43:06.377448025 +0000 UTC m=+7280.137785992" watchObservedRunningTime="2026-01-20 18:43:06.380178089 +0000 UTC m=+7280.140516055" Jan 20 18:43:06 crc kubenswrapper[4558]: I0120 18:43:06.396646 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-ldv9l" Jan 20 18:43:09 crc kubenswrapper[4558]: I0120 18:43:09.098001 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-ldv9l"] Jan 20 18:43:09 crc kubenswrapper[4558]: I0120 18:43:09.099194 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-ldv9l" podUID="a8e021a2-9ea0-4110-9390-4c17c191cb59" containerName="registry-server" containerID="cri-o://f245c79e6444caaa31ff6151b372bdfbcd9a23d914fccd48c7c5927ba35a789d" gracePeriod=2 Jan 20 18:43:09 crc kubenswrapper[4558]: E0120 18:43:09.240952 4558 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda8e021a2_9ea0_4110_9390_4c17c191cb59.slice/crio-f245c79e6444caaa31ff6151b372bdfbcd9a23d914fccd48c7c5927ba35a789d.scope\": RecentStats: unable to find data in memory cache]" Jan 20 18:43:09 crc kubenswrapper[4558]: I0120 18:43:09.822871 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["horizon-kuttl-tests/keystone-db-create-fbkpc"] Jan 20 18:43:09 crc kubenswrapper[4558]: I0120 18:43:09.823996 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/keystone-db-create-fbkpc" Jan 20 18:43:09 crc kubenswrapper[4558]: I0120 18:43:09.828178 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["horizon-kuttl-tests/keystone-db-create-fbkpc"] Jan 20 18:43:09 crc kubenswrapper[4558]: I0120 18:43:09.948462 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f03b5622-3007-44c2-8e57-6633f6a467e4-operator-scripts\") pod \"keystone-db-create-fbkpc\" (UID: \"f03b5622-3007-44c2-8e57-6633f6a467e4\") " pod="horizon-kuttl-tests/keystone-db-create-fbkpc" Jan 20 18:43:09 crc kubenswrapper[4558]: I0120 18:43:09.948597 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-76tv7\" (UniqueName: \"kubernetes.io/projected/f03b5622-3007-44c2-8e57-6633f6a467e4-kube-api-access-76tv7\") pod \"keystone-db-create-fbkpc\" (UID: \"f03b5622-3007-44c2-8e57-6633f6a467e4\") " pod="horizon-kuttl-tests/keystone-db-create-fbkpc" Jan 20 18:43:09 crc kubenswrapper[4558]: I0120 18:43:09.985068 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["horizon-kuttl-tests/keystone-5ae5-account-create-update-jthgq"] Jan 20 18:43:09 crc kubenswrapper[4558]: I0120 18:43:09.986456 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/keystone-5ae5-account-create-update-jthgq" Jan 20 18:43:09 crc kubenswrapper[4558]: I0120 18:43:09.990301 4558 reflector.go:368] Caches populated for *v1.Secret from object-"horizon-kuttl-tests"/"keystone-db-secret" Jan 20 18:43:09 crc kubenswrapper[4558]: I0120 18:43:09.995874 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["horizon-kuttl-tests/keystone-5ae5-account-create-update-jthgq"] Jan 20 18:43:10 crc kubenswrapper[4558]: I0120 18:43:10.050316 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-76tv7\" (UniqueName: \"kubernetes.io/projected/f03b5622-3007-44c2-8e57-6633f6a467e4-kube-api-access-76tv7\") pod \"keystone-db-create-fbkpc\" (UID: \"f03b5622-3007-44c2-8e57-6633f6a467e4\") " pod="horizon-kuttl-tests/keystone-db-create-fbkpc" Jan 20 18:43:10 crc kubenswrapper[4558]: I0120 18:43:10.050418 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f03b5622-3007-44c2-8e57-6633f6a467e4-operator-scripts\") pod \"keystone-db-create-fbkpc\" (UID: \"f03b5622-3007-44c2-8e57-6633f6a467e4\") " pod="horizon-kuttl-tests/keystone-db-create-fbkpc" Jan 20 18:43:10 crc kubenswrapper[4558]: I0120 18:43:10.051206 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f03b5622-3007-44c2-8e57-6633f6a467e4-operator-scripts\") pod \"keystone-db-create-fbkpc\" (UID: \"f03b5622-3007-44c2-8e57-6633f6a467e4\") " pod="horizon-kuttl-tests/keystone-db-create-fbkpc" Jan 20 18:43:10 crc kubenswrapper[4558]: I0120 18:43:10.075987 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-76tv7\" (UniqueName: \"kubernetes.io/projected/f03b5622-3007-44c2-8e57-6633f6a467e4-kube-api-access-76tv7\") pod \"keystone-db-create-fbkpc\" (UID: \"f03b5622-3007-44c2-8e57-6633f6a467e4\") " pod="horizon-kuttl-tests/keystone-db-create-fbkpc" Jan 20 18:43:10 crc kubenswrapper[4558]: I0120 18:43:10.139293 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/keystone-db-create-fbkpc" Jan 20 18:43:10 crc kubenswrapper[4558]: I0120 18:43:10.153109 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/13ad68d2-8d17-4219-90c0-05ac4f81d820-operator-scripts\") pod \"keystone-5ae5-account-create-update-jthgq\" (UID: \"13ad68d2-8d17-4219-90c0-05ac4f81d820\") " pod="horizon-kuttl-tests/keystone-5ae5-account-create-update-jthgq" Jan 20 18:43:10 crc kubenswrapper[4558]: I0120 18:43:10.153220 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-28msd\" (UniqueName: \"kubernetes.io/projected/13ad68d2-8d17-4219-90c0-05ac4f81d820-kube-api-access-28msd\") pod \"keystone-5ae5-account-create-update-jthgq\" (UID: \"13ad68d2-8d17-4219-90c0-05ac4f81d820\") " pod="horizon-kuttl-tests/keystone-5ae5-account-create-update-jthgq" Jan 20 18:43:10 crc kubenswrapper[4558]: I0120 18:43:10.254681 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/13ad68d2-8d17-4219-90c0-05ac4f81d820-operator-scripts\") pod \"keystone-5ae5-account-create-update-jthgq\" (UID: \"13ad68d2-8d17-4219-90c0-05ac4f81d820\") " pod="horizon-kuttl-tests/keystone-5ae5-account-create-update-jthgq" Jan 20 18:43:10 crc kubenswrapper[4558]: I0120 18:43:10.254826 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-28msd\" (UniqueName: \"kubernetes.io/projected/13ad68d2-8d17-4219-90c0-05ac4f81d820-kube-api-access-28msd\") pod \"keystone-5ae5-account-create-update-jthgq\" (UID: \"13ad68d2-8d17-4219-90c0-05ac4f81d820\") " pod="horizon-kuttl-tests/keystone-5ae5-account-create-update-jthgq" Jan 20 18:43:10 crc kubenswrapper[4558]: I0120 18:43:10.256579 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/13ad68d2-8d17-4219-90c0-05ac4f81d820-operator-scripts\") pod \"keystone-5ae5-account-create-update-jthgq\" (UID: \"13ad68d2-8d17-4219-90c0-05ac4f81d820\") " pod="horizon-kuttl-tests/keystone-5ae5-account-create-update-jthgq" Jan 20 18:43:10 crc kubenswrapper[4558]: I0120 18:43:10.279471 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-28msd\" (UniqueName: \"kubernetes.io/projected/13ad68d2-8d17-4219-90c0-05ac4f81d820-kube-api-access-28msd\") pod \"keystone-5ae5-account-create-update-jthgq\" (UID: \"13ad68d2-8d17-4219-90c0-05ac4f81d820\") " pod="horizon-kuttl-tests/keystone-5ae5-account-create-update-jthgq" Jan 20 18:43:10 crc kubenswrapper[4558]: I0120 18:43:10.341566 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/keystone-5ae5-account-create-update-jthgq" Jan 20 18:43:10 crc kubenswrapper[4558]: I0120 18:43:10.402714 4558 generic.go:334] "Generic (PLEG): container finished" podID="a8e021a2-9ea0-4110-9390-4c17c191cb59" containerID="f245c79e6444caaa31ff6151b372bdfbcd9a23d914fccd48c7c5927ba35a789d" exitCode=0 Jan 20 18:43:10 crc kubenswrapper[4558]: I0120 18:43:10.402763 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ldv9l" event={"ID":"a8e021a2-9ea0-4110-9390-4c17c191cb59","Type":"ContainerDied","Data":"f245c79e6444caaa31ff6151b372bdfbcd9a23d914fccd48c7c5927ba35a789d"} Jan 20 18:43:10 crc kubenswrapper[4558]: I0120 18:43:10.542272 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["horizon-kuttl-tests/keystone-db-create-fbkpc"] Jan 20 18:43:10 crc kubenswrapper[4558]: W0120 18:43:10.549506 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf03b5622_3007_44c2_8e57_6633f6a467e4.slice/crio-c2d9b60a1fd9663b10aa99527cd72b1d275760dd69bb41512fd7ccb51da0d1e1 WatchSource:0}: Error finding container c2d9b60a1fd9663b10aa99527cd72b1d275760dd69bb41512fd7ccb51da0d1e1: Status 404 returned error can't find the container with id c2d9b60a1fd9663b10aa99527cd72b1d275760dd69bb41512fd7ccb51da0d1e1 Jan 20 18:43:10 crc kubenswrapper[4558]: I0120 18:43:10.581814 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-ldv9l" Jan 20 18:43:10 crc kubenswrapper[4558]: I0120 18:43:10.662731 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a8e021a2-9ea0-4110-9390-4c17c191cb59-utilities\") pod \"a8e021a2-9ea0-4110-9390-4c17c191cb59\" (UID: \"a8e021a2-9ea0-4110-9390-4c17c191cb59\") " Jan 20 18:43:10 crc kubenswrapper[4558]: I0120 18:43:10.663260 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4dh24\" (UniqueName: \"kubernetes.io/projected/a8e021a2-9ea0-4110-9390-4c17c191cb59-kube-api-access-4dh24\") pod \"a8e021a2-9ea0-4110-9390-4c17c191cb59\" (UID: \"a8e021a2-9ea0-4110-9390-4c17c191cb59\") " Jan 20 18:43:10 crc kubenswrapper[4558]: I0120 18:43:10.663622 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a8e021a2-9ea0-4110-9390-4c17c191cb59-catalog-content\") pod \"a8e021a2-9ea0-4110-9390-4c17c191cb59\" (UID: \"a8e021a2-9ea0-4110-9390-4c17c191cb59\") " Jan 20 18:43:10 crc kubenswrapper[4558]: I0120 18:43:10.663950 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a8e021a2-9ea0-4110-9390-4c17c191cb59-utilities" (OuterVolumeSpecName: "utilities") pod "a8e021a2-9ea0-4110-9390-4c17c191cb59" (UID: "a8e021a2-9ea0-4110-9390-4c17c191cb59"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 18:43:10 crc kubenswrapper[4558]: I0120 18:43:10.664769 4558 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a8e021a2-9ea0-4110-9390-4c17c191cb59-utilities\") on node \"crc\" DevicePath \"\"" Jan 20 18:43:10 crc kubenswrapper[4558]: I0120 18:43:10.670216 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a8e021a2-9ea0-4110-9390-4c17c191cb59-kube-api-access-4dh24" (OuterVolumeSpecName: "kube-api-access-4dh24") pod "a8e021a2-9ea0-4110-9390-4c17c191cb59" (UID: "a8e021a2-9ea0-4110-9390-4c17c191cb59"). InnerVolumeSpecName "kube-api-access-4dh24". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:43:10 crc kubenswrapper[4558]: I0120 18:43:10.766089 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4dh24\" (UniqueName: \"kubernetes.io/projected/a8e021a2-9ea0-4110-9390-4c17c191cb59-kube-api-access-4dh24\") on node \"crc\" DevicePath \"\"" Jan 20 18:43:10 crc kubenswrapper[4558]: I0120 18:43:10.769381 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["horizon-kuttl-tests/keystone-5ae5-account-create-update-jthgq"] Jan 20 18:43:10 crc kubenswrapper[4558]: I0120 18:43:10.777847 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a8e021a2-9ea0-4110-9390-4c17c191cb59-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a8e021a2-9ea0-4110-9390-4c17c191cb59" (UID: "a8e021a2-9ea0-4110-9390-4c17c191cb59"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 18:43:10 crc kubenswrapper[4558]: W0120 18:43:10.784154 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod13ad68d2_8d17_4219_90c0_05ac4f81d820.slice/crio-277b35ed149935319b417ec8d20f27d2cb36cf9e4dc034d02594fc680da38b94 WatchSource:0}: Error finding container 277b35ed149935319b417ec8d20f27d2cb36cf9e4dc034d02594fc680da38b94: Status 404 returned error can't find the container with id 277b35ed149935319b417ec8d20f27d2cb36cf9e4dc034d02594fc680da38b94 Jan 20 18:43:10 crc kubenswrapper[4558]: I0120 18:43:10.868132 4558 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a8e021a2-9ea0-4110-9390-4c17c191cb59-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 20 18:43:11 crc kubenswrapper[4558]: I0120 18:43:11.411221 4558 generic.go:334] "Generic (PLEG): container finished" podID="13ad68d2-8d17-4219-90c0-05ac4f81d820" containerID="30380171873dc7b4ae6fec296abf518f7899299f15631980b87e5b9966a9b40e" exitCode=0 Jan 20 18:43:11 crc kubenswrapper[4558]: I0120 18:43:11.411339 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/keystone-5ae5-account-create-update-jthgq" event={"ID":"13ad68d2-8d17-4219-90c0-05ac4f81d820","Type":"ContainerDied","Data":"30380171873dc7b4ae6fec296abf518f7899299f15631980b87e5b9966a9b40e"} Jan 20 18:43:11 crc kubenswrapper[4558]: I0120 18:43:11.411408 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/keystone-5ae5-account-create-update-jthgq" event={"ID":"13ad68d2-8d17-4219-90c0-05ac4f81d820","Type":"ContainerStarted","Data":"277b35ed149935319b417ec8d20f27d2cb36cf9e4dc034d02594fc680da38b94"} Jan 20 18:43:11 crc kubenswrapper[4558]: I0120 18:43:11.413891 4558 generic.go:334] "Generic (PLEG): container finished" podID="f03b5622-3007-44c2-8e57-6633f6a467e4" containerID="52bea5df6af3d3230afdec9d37339147b7f7a04f0233e8325443b5dbf6807e1a" exitCode=0 Jan 20 18:43:11 crc kubenswrapper[4558]: I0120 18:43:11.413979 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/keystone-db-create-fbkpc" event={"ID":"f03b5622-3007-44c2-8e57-6633f6a467e4","Type":"ContainerDied","Data":"52bea5df6af3d3230afdec9d37339147b7f7a04f0233e8325443b5dbf6807e1a"} Jan 20 18:43:11 crc kubenswrapper[4558]: I0120 18:43:11.414023 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/keystone-db-create-fbkpc" event={"ID":"f03b5622-3007-44c2-8e57-6633f6a467e4","Type":"ContainerStarted","Data":"c2d9b60a1fd9663b10aa99527cd72b1d275760dd69bb41512fd7ccb51da0d1e1"} Jan 20 18:43:11 crc kubenswrapper[4558]: I0120 18:43:11.417040 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-ldv9l" event={"ID":"a8e021a2-9ea0-4110-9390-4c17c191cb59","Type":"ContainerDied","Data":"f0b14b4af4bb943436b8c2fd57a33e6c53f1e87f186946c670517c5bc24e1040"} Jan 20 18:43:11 crc kubenswrapper[4558]: I0120 18:43:11.417105 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-ldv9l" Jan 20 18:43:11 crc kubenswrapper[4558]: I0120 18:43:11.417110 4558 scope.go:117] "RemoveContainer" containerID="f245c79e6444caaa31ff6151b372bdfbcd9a23d914fccd48c7c5927ba35a789d" Jan 20 18:43:11 crc kubenswrapper[4558]: I0120 18:43:11.439102 4558 scope.go:117] "RemoveContainer" containerID="79d1b41bc8506a97c883e59c8227116ba2524e84ccfd8f34a2ce88fa687e3399" Jan 20 18:43:11 crc kubenswrapper[4558]: I0120 18:43:11.469159 4558 scope.go:117] "RemoveContainer" containerID="479e96f0979ff9c2abee02ac54c47567d4490e7cbeb703b07816d0c050c5dc24" Jan 20 18:43:11 crc kubenswrapper[4558]: I0120 18:43:11.472152 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-ldv9l"] Jan 20 18:43:11 crc kubenswrapper[4558]: I0120 18:43:11.476361 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-ldv9l"] Jan 20 18:43:12 crc kubenswrapper[4558]: I0120 18:43:12.585754 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a8e021a2-9ea0-4110-9390-4c17c191cb59" path="/var/lib/kubelet/pods/a8e021a2-9ea0-4110-9390-4c17c191cb59/volumes" Jan 20 18:43:12 crc kubenswrapper[4558]: I0120 18:43:12.746085 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/keystone-5ae5-account-create-update-jthgq" Jan 20 18:43:12 crc kubenswrapper[4558]: I0120 18:43:12.749971 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/keystone-db-create-fbkpc" Jan 20 18:43:12 crc kubenswrapper[4558]: I0120 18:43:12.897471 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-76tv7\" (UniqueName: \"kubernetes.io/projected/f03b5622-3007-44c2-8e57-6633f6a467e4-kube-api-access-76tv7\") pod \"f03b5622-3007-44c2-8e57-6633f6a467e4\" (UID: \"f03b5622-3007-44c2-8e57-6633f6a467e4\") " Jan 20 18:43:12 crc kubenswrapper[4558]: I0120 18:43:12.897587 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/13ad68d2-8d17-4219-90c0-05ac4f81d820-operator-scripts\") pod \"13ad68d2-8d17-4219-90c0-05ac4f81d820\" (UID: \"13ad68d2-8d17-4219-90c0-05ac4f81d820\") " Jan 20 18:43:12 crc kubenswrapper[4558]: I0120 18:43:12.897650 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-28msd\" (UniqueName: \"kubernetes.io/projected/13ad68d2-8d17-4219-90c0-05ac4f81d820-kube-api-access-28msd\") pod \"13ad68d2-8d17-4219-90c0-05ac4f81d820\" (UID: \"13ad68d2-8d17-4219-90c0-05ac4f81d820\") " Jan 20 18:43:12 crc kubenswrapper[4558]: I0120 18:43:12.897767 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f03b5622-3007-44c2-8e57-6633f6a467e4-operator-scripts\") pod \"f03b5622-3007-44c2-8e57-6633f6a467e4\" (UID: \"f03b5622-3007-44c2-8e57-6633f6a467e4\") " Jan 20 18:43:12 crc kubenswrapper[4558]: I0120 18:43:12.898397 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/13ad68d2-8d17-4219-90c0-05ac4f81d820-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "13ad68d2-8d17-4219-90c0-05ac4f81d820" (UID: "13ad68d2-8d17-4219-90c0-05ac4f81d820"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:43:12 crc kubenswrapper[4558]: I0120 18:43:12.898411 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f03b5622-3007-44c2-8e57-6633f6a467e4-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "f03b5622-3007-44c2-8e57-6633f6a467e4" (UID: "f03b5622-3007-44c2-8e57-6633f6a467e4"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:43:12 crc kubenswrapper[4558]: I0120 18:43:12.904470 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f03b5622-3007-44c2-8e57-6633f6a467e4-kube-api-access-76tv7" (OuterVolumeSpecName: "kube-api-access-76tv7") pod "f03b5622-3007-44c2-8e57-6633f6a467e4" (UID: "f03b5622-3007-44c2-8e57-6633f6a467e4"). InnerVolumeSpecName "kube-api-access-76tv7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:43:12 crc kubenswrapper[4558]: I0120 18:43:12.904653 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/13ad68d2-8d17-4219-90c0-05ac4f81d820-kube-api-access-28msd" (OuterVolumeSpecName: "kube-api-access-28msd") pod "13ad68d2-8d17-4219-90c0-05ac4f81d820" (UID: "13ad68d2-8d17-4219-90c0-05ac4f81d820"). InnerVolumeSpecName "kube-api-access-28msd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:43:13 crc kubenswrapper[4558]: I0120 18:43:13.000476 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-76tv7\" (UniqueName: \"kubernetes.io/projected/f03b5622-3007-44c2-8e57-6633f6a467e4-kube-api-access-76tv7\") on node \"crc\" DevicePath \"\"" Jan 20 18:43:13 crc kubenswrapper[4558]: I0120 18:43:13.000514 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/13ad68d2-8d17-4219-90c0-05ac4f81d820-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 18:43:13 crc kubenswrapper[4558]: I0120 18:43:13.000526 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-28msd\" (UniqueName: \"kubernetes.io/projected/13ad68d2-8d17-4219-90c0-05ac4f81d820-kube-api-access-28msd\") on node \"crc\" DevicePath \"\"" Jan 20 18:43:13 crc kubenswrapper[4558]: I0120 18:43:13.000537 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f03b5622-3007-44c2-8e57-6633f6a467e4-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 18:43:13 crc kubenswrapper[4558]: I0120 18:43:13.439250 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/keystone-5ae5-account-create-update-jthgq" Jan 20 18:43:13 crc kubenswrapper[4558]: I0120 18:43:13.439260 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/keystone-5ae5-account-create-update-jthgq" event={"ID":"13ad68d2-8d17-4219-90c0-05ac4f81d820","Type":"ContainerDied","Data":"277b35ed149935319b417ec8d20f27d2cb36cf9e4dc034d02594fc680da38b94"} Jan 20 18:43:13 crc kubenswrapper[4558]: I0120 18:43:13.439423 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="277b35ed149935319b417ec8d20f27d2cb36cf9e4dc034d02594fc680da38b94" Jan 20 18:43:13 crc kubenswrapper[4558]: I0120 18:43:13.441553 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/keystone-db-create-fbkpc" event={"ID":"f03b5622-3007-44c2-8e57-6633f6a467e4","Type":"ContainerDied","Data":"c2d9b60a1fd9663b10aa99527cd72b1d275760dd69bb41512fd7ccb51da0d1e1"} Jan 20 18:43:13 crc kubenswrapper[4558]: I0120 18:43:13.441610 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c2d9b60a1fd9663b10aa99527cd72b1d275760dd69bb41512fd7ccb51da0d1e1" Jan 20 18:43:13 crc kubenswrapper[4558]: I0120 18:43:13.441613 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/keystone-db-create-fbkpc" Jan 20 18:43:13 crc kubenswrapper[4558]: I0120 18:43:13.509699 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/horizon-operator-index-9bkts"] Jan 20 18:43:13 crc kubenswrapper[4558]: E0120 18:43:13.510240 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a8e021a2-9ea0-4110-9390-4c17c191cb59" containerName="extract-utilities" Jan 20 18:43:13 crc kubenswrapper[4558]: I0120 18:43:13.510263 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="a8e021a2-9ea0-4110-9390-4c17c191cb59" containerName="extract-utilities" Jan 20 18:43:13 crc kubenswrapper[4558]: E0120 18:43:13.510283 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a8e021a2-9ea0-4110-9390-4c17c191cb59" containerName="extract-content" Jan 20 18:43:13 crc kubenswrapper[4558]: I0120 18:43:13.510292 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="a8e021a2-9ea0-4110-9390-4c17c191cb59" containerName="extract-content" Jan 20 18:43:13 crc kubenswrapper[4558]: E0120 18:43:13.510303 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a8e021a2-9ea0-4110-9390-4c17c191cb59" containerName="registry-server" Jan 20 18:43:13 crc kubenswrapper[4558]: I0120 18:43:13.510311 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="a8e021a2-9ea0-4110-9390-4c17c191cb59" containerName="registry-server" Jan 20 18:43:13 crc kubenswrapper[4558]: E0120 18:43:13.510344 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="13ad68d2-8d17-4219-90c0-05ac4f81d820" containerName="mariadb-account-create-update" Jan 20 18:43:13 crc kubenswrapper[4558]: I0120 18:43:13.510352 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="13ad68d2-8d17-4219-90c0-05ac4f81d820" containerName="mariadb-account-create-update" Jan 20 18:43:13 crc kubenswrapper[4558]: E0120 18:43:13.510382 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f03b5622-3007-44c2-8e57-6633f6a467e4" containerName="mariadb-database-create" Jan 20 18:43:13 crc kubenswrapper[4558]: I0120 18:43:13.510392 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="f03b5622-3007-44c2-8e57-6633f6a467e4" containerName="mariadb-database-create" Jan 20 18:43:13 crc kubenswrapper[4558]: I0120 18:43:13.510820 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="a8e021a2-9ea0-4110-9390-4c17c191cb59" containerName="registry-server" Jan 20 18:43:13 crc kubenswrapper[4558]: I0120 18:43:13.510852 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="13ad68d2-8d17-4219-90c0-05ac4f81d820" containerName="mariadb-account-create-update" Jan 20 18:43:13 crc kubenswrapper[4558]: I0120 18:43:13.510872 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="f03b5622-3007-44c2-8e57-6633f6a467e4" containerName="mariadb-database-create" Jan 20 18:43:13 crc kubenswrapper[4558]: I0120 18:43:13.511581 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-index-9bkts" Jan 20 18:43:13 crc kubenswrapper[4558]: I0120 18:43:13.514618 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-index-dockercfg-pdnq8" Jan 20 18:43:13 crc kubenswrapper[4558]: I0120 18:43:13.519665 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-index-9bkts"] Jan 20 18:43:13 crc kubenswrapper[4558]: I0120 18:43:13.611811 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q5x27\" (UniqueName: \"kubernetes.io/projected/17600d40-7d65-476f-83be-d1cd168c750a-kube-api-access-q5x27\") pod \"horizon-operator-index-9bkts\" (UID: \"17600d40-7d65-476f-83be-d1cd168c750a\") " pod="openstack-operators/horizon-operator-index-9bkts" Jan 20 18:43:13 crc kubenswrapper[4558]: I0120 18:43:13.713985 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q5x27\" (UniqueName: \"kubernetes.io/projected/17600d40-7d65-476f-83be-d1cd168c750a-kube-api-access-q5x27\") pod \"horizon-operator-index-9bkts\" (UID: \"17600d40-7d65-476f-83be-d1cd168c750a\") " pod="openstack-operators/horizon-operator-index-9bkts" Jan 20 18:43:13 crc kubenswrapper[4558]: I0120 18:43:13.728546 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q5x27\" (UniqueName: \"kubernetes.io/projected/17600d40-7d65-476f-83be-d1cd168c750a-kube-api-access-q5x27\") pod \"horizon-operator-index-9bkts\" (UID: \"17600d40-7d65-476f-83be-d1cd168c750a\") " pod="openstack-operators/horizon-operator-index-9bkts" Jan 20 18:43:13 crc kubenswrapper[4558]: I0120 18:43:13.829053 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-index-9bkts" Jan 20 18:43:14 crc kubenswrapper[4558]: I0120 18:43:14.218642 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-index-9bkts"] Jan 20 18:43:14 crc kubenswrapper[4558]: W0120 18:43:14.220938 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod17600d40_7d65_476f_83be_d1cd168c750a.slice/crio-998b1bb2f2e53965c9750eebc81e0a9d297d21b2189bb80d242ff091a59e4647 WatchSource:0}: Error finding container 998b1bb2f2e53965c9750eebc81e0a9d297d21b2189bb80d242ff091a59e4647: Status 404 returned error can't find the container with id 998b1bb2f2e53965c9750eebc81e0a9d297d21b2189bb80d242ff091a59e4647 Jan 20 18:43:14 crc kubenswrapper[4558]: I0120 18:43:14.450064 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-index-9bkts" event={"ID":"17600d40-7d65-476f-83be-d1cd168c750a","Type":"ContainerStarted","Data":"998b1bb2f2e53965c9750eebc81e0a9d297d21b2189bb80d242ff091a59e4647"} Jan 20 18:43:16 crc kubenswrapper[4558]: I0120 18:43:16.472588 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-index-9bkts" event={"ID":"17600d40-7d65-476f-83be-d1cd168c750a","Type":"ContainerStarted","Data":"b3f8c0b28e0e903d26923e943caca5217cc36cae3ca65374fac3b46b1ee43f42"} Jan 20 18:43:16 crc kubenswrapper[4558]: I0120 18:43:16.491373 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/horizon-operator-index-9bkts" podStartSLOduration=2.022631419 podStartE2EDuration="3.491352345s" podCreationTimestamp="2026-01-20 18:43:13 +0000 UTC" firstStartedPulling="2026-01-20 18:43:14.223679046 +0000 UTC m=+7287.984017014" lastFinishedPulling="2026-01-20 18:43:15.692399974 +0000 UTC m=+7289.452737940" observedRunningTime="2026-01-20 18:43:16.486543284 +0000 UTC m=+7290.246881251" watchObservedRunningTime="2026-01-20 18:43:16.491352345 +0000 UTC m=+7290.251690312" Jan 20 18:43:21 crc kubenswrapper[4558]: I0120 18:43:21.155397 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="horizon-kuttl-tests/rabbitmq-server-0" Jan 20 18:43:21 crc kubenswrapper[4558]: I0120 18:43:21.701563 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["horizon-kuttl-tests/keystone-db-sync-fv444"] Jan 20 18:43:21 crc kubenswrapper[4558]: I0120 18:43:21.702641 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/keystone-db-sync-fv444" Jan 20 18:43:21 crc kubenswrapper[4558]: I0120 18:43:21.704817 4558 reflector.go:368] Caches populated for *v1.Secret from object-"horizon-kuttl-tests"/"keystone-scripts" Jan 20 18:43:21 crc kubenswrapper[4558]: I0120 18:43:21.705307 4558 reflector.go:368] Caches populated for *v1.Secret from object-"horizon-kuttl-tests"/"keystone-keystone-dockercfg-mjr2v" Jan 20 18:43:21 crc kubenswrapper[4558]: I0120 18:43:21.705603 4558 reflector.go:368] Caches populated for *v1.Secret from object-"horizon-kuttl-tests"/"keystone" Jan 20 18:43:21 crc kubenswrapper[4558]: I0120 18:43:21.708041 4558 reflector.go:368] Caches populated for *v1.Secret from object-"horizon-kuttl-tests"/"keystone-config-data" Jan 20 18:43:21 crc kubenswrapper[4558]: I0120 18:43:21.722649 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["horizon-kuttl-tests/keystone-db-sync-fv444"] Jan 20 18:43:21 crc kubenswrapper[4558]: I0120 18:43:21.840795 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ggnzd\" (UniqueName: \"kubernetes.io/projected/195c8a3c-2f2e-43aa-afe5-a4932ff9dbbf-kube-api-access-ggnzd\") pod \"keystone-db-sync-fv444\" (UID: \"195c8a3c-2f2e-43aa-afe5-a4932ff9dbbf\") " pod="horizon-kuttl-tests/keystone-db-sync-fv444" Jan 20 18:43:21 crc kubenswrapper[4558]: I0120 18:43:21.840888 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/195c8a3c-2f2e-43aa-afe5-a4932ff9dbbf-config-data\") pod \"keystone-db-sync-fv444\" (UID: \"195c8a3c-2f2e-43aa-afe5-a4932ff9dbbf\") " pod="horizon-kuttl-tests/keystone-db-sync-fv444" Jan 20 18:43:21 crc kubenswrapper[4558]: I0120 18:43:21.942779 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/195c8a3c-2f2e-43aa-afe5-a4932ff9dbbf-config-data\") pod \"keystone-db-sync-fv444\" (UID: \"195c8a3c-2f2e-43aa-afe5-a4932ff9dbbf\") " pod="horizon-kuttl-tests/keystone-db-sync-fv444" Jan 20 18:43:21 crc kubenswrapper[4558]: I0120 18:43:21.943150 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ggnzd\" (UniqueName: \"kubernetes.io/projected/195c8a3c-2f2e-43aa-afe5-a4932ff9dbbf-kube-api-access-ggnzd\") pod \"keystone-db-sync-fv444\" (UID: \"195c8a3c-2f2e-43aa-afe5-a4932ff9dbbf\") " pod="horizon-kuttl-tests/keystone-db-sync-fv444" Jan 20 18:43:21 crc kubenswrapper[4558]: I0120 18:43:21.957918 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/195c8a3c-2f2e-43aa-afe5-a4932ff9dbbf-config-data\") pod \"keystone-db-sync-fv444\" (UID: \"195c8a3c-2f2e-43aa-afe5-a4932ff9dbbf\") " pod="horizon-kuttl-tests/keystone-db-sync-fv444" Jan 20 18:43:21 crc kubenswrapper[4558]: I0120 18:43:21.960846 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ggnzd\" (UniqueName: \"kubernetes.io/projected/195c8a3c-2f2e-43aa-afe5-a4932ff9dbbf-kube-api-access-ggnzd\") pod \"keystone-db-sync-fv444\" (UID: \"195c8a3c-2f2e-43aa-afe5-a4932ff9dbbf\") " pod="horizon-kuttl-tests/keystone-db-sync-fv444" Jan 20 18:43:22 crc kubenswrapper[4558]: I0120 18:43:22.021440 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/keystone-db-sync-fv444" Jan 20 18:43:22 crc kubenswrapper[4558]: I0120 18:43:22.417476 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["horizon-kuttl-tests/keystone-db-sync-fv444"] Jan 20 18:43:22 crc kubenswrapper[4558]: W0120 18:43:22.420588 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod195c8a3c_2f2e_43aa_afe5_a4932ff9dbbf.slice/crio-ccda10dbd920873979696f02a5f6f5fecce85d308a2fc92da9892f6101b35aac WatchSource:0}: Error finding container ccda10dbd920873979696f02a5f6f5fecce85d308a2fc92da9892f6101b35aac: Status 404 returned error can't find the container with id ccda10dbd920873979696f02a5f6f5fecce85d308a2fc92da9892f6101b35aac Jan 20 18:43:22 crc kubenswrapper[4558]: I0120 18:43:22.533478 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/keystone-db-sync-fv444" event={"ID":"195c8a3c-2f2e-43aa-afe5-a4932ff9dbbf","Type":"ContainerStarted","Data":"ccda10dbd920873979696f02a5f6f5fecce85d308a2fc92da9892f6101b35aac"} Jan 20 18:43:23 crc kubenswrapper[4558]: I0120 18:43:23.540936 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/keystone-db-sync-fv444" event={"ID":"195c8a3c-2f2e-43aa-afe5-a4932ff9dbbf","Type":"ContainerStarted","Data":"d54e0faf8f75a1568bdfd7fb47cf9ef381e5576856c397db3cfb8744ff9b7938"} Jan 20 18:43:23 crc kubenswrapper[4558]: I0120 18:43:23.561817 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="horizon-kuttl-tests/keystone-db-sync-fv444" podStartSLOduration=2.561804137 podStartE2EDuration="2.561804137s" podCreationTimestamp="2026-01-20 18:43:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 18:43:23.558244293 +0000 UTC m=+7297.318582260" watchObservedRunningTime="2026-01-20 18:43:23.561804137 +0000 UTC m=+7297.322142104" Jan 20 18:43:23 crc kubenswrapper[4558]: I0120 18:43:23.829633 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/horizon-operator-index-9bkts" Jan 20 18:43:23 crc kubenswrapper[4558]: I0120 18:43:23.829698 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/horizon-operator-index-9bkts" Jan 20 18:43:23 crc kubenswrapper[4558]: I0120 18:43:23.909454 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/horizon-operator-index-9bkts" Jan 20 18:43:24 crc kubenswrapper[4558]: I0120 18:43:24.553481 4558 generic.go:334] "Generic (PLEG): container finished" podID="195c8a3c-2f2e-43aa-afe5-a4932ff9dbbf" containerID="d54e0faf8f75a1568bdfd7fb47cf9ef381e5576856c397db3cfb8744ff9b7938" exitCode=0 Jan 20 18:43:24 crc kubenswrapper[4558]: I0120 18:43:24.553582 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/keystone-db-sync-fv444" event={"ID":"195c8a3c-2f2e-43aa-afe5-a4932ff9dbbf","Type":"ContainerDied","Data":"d54e0faf8f75a1568bdfd7fb47cf9ef381e5576856c397db3cfb8744ff9b7938"} Jan 20 18:43:24 crc kubenswrapper[4558]: I0120 18:43:24.583101 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/horizon-operator-index-9bkts" Jan 20 18:43:25 crc kubenswrapper[4558]: I0120 18:43:25.795776 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/keystone-db-sync-fv444" Jan 20 18:43:25 crc kubenswrapper[4558]: I0120 18:43:25.913517 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ggnzd\" (UniqueName: \"kubernetes.io/projected/195c8a3c-2f2e-43aa-afe5-a4932ff9dbbf-kube-api-access-ggnzd\") pod \"195c8a3c-2f2e-43aa-afe5-a4932ff9dbbf\" (UID: \"195c8a3c-2f2e-43aa-afe5-a4932ff9dbbf\") " Jan 20 18:43:25 crc kubenswrapper[4558]: I0120 18:43:25.913736 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/195c8a3c-2f2e-43aa-afe5-a4932ff9dbbf-config-data\") pod \"195c8a3c-2f2e-43aa-afe5-a4932ff9dbbf\" (UID: \"195c8a3c-2f2e-43aa-afe5-a4932ff9dbbf\") " Jan 20 18:43:25 crc kubenswrapper[4558]: I0120 18:43:25.920477 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/195c8a3c-2f2e-43aa-afe5-a4932ff9dbbf-kube-api-access-ggnzd" (OuterVolumeSpecName: "kube-api-access-ggnzd") pod "195c8a3c-2f2e-43aa-afe5-a4932ff9dbbf" (UID: "195c8a3c-2f2e-43aa-afe5-a4932ff9dbbf"). InnerVolumeSpecName "kube-api-access-ggnzd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:43:25 crc kubenswrapper[4558]: I0120 18:43:25.949919 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/195c8a3c-2f2e-43aa-afe5-a4932ff9dbbf-config-data" (OuterVolumeSpecName: "config-data") pod "195c8a3c-2f2e-43aa-afe5-a4932ff9dbbf" (UID: "195c8a3c-2f2e-43aa-afe5-a4932ff9dbbf"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:43:25 crc kubenswrapper[4558]: I0120 18:43:25.953901 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/653b3b52ace42a43c00bbe11ec09bdc12d146890fa0ae4874ac941672ahx2xr"] Jan 20 18:43:25 crc kubenswrapper[4558]: E0120 18:43:25.954430 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="195c8a3c-2f2e-43aa-afe5-a4932ff9dbbf" containerName="keystone-db-sync" Jan 20 18:43:25 crc kubenswrapper[4558]: I0120 18:43:25.954505 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="195c8a3c-2f2e-43aa-afe5-a4932ff9dbbf" containerName="keystone-db-sync" Jan 20 18:43:25 crc kubenswrapper[4558]: I0120 18:43:25.954769 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="195c8a3c-2f2e-43aa-afe5-a4932ff9dbbf" containerName="keystone-db-sync" Jan 20 18:43:25 crc kubenswrapper[4558]: I0120 18:43:25.956540 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/653b3b52ace42a43c00bbe11ec09bdc12d146890fa0ae4874ac941672ahx2xr" Jan 20 18:43:25 crc kubenswrapper[4558]: I0120 18:43:25.959571 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-7xp5w" Jan 20 18:43:25 crc kubenswrapper[4558]: I0120 18:43:25.959630 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/653b3b52ace42a43c00bbe11ec09bdc12d146890fa0ae4874ac941672ahx2xr"] Jan 20 18:43:26 crc kubenswrapper[4558]: I0120 18:43:26.016493 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ggnzd\" (UniqueName: \"kubernetes.io/projected/195c8a3c-2f2e-43aa-afe5-a4932ff9dbbf-kube-api-access-ggnzd\") on node \"crc\" DevicePath \"\"" Jan 20 18:43:26 crc kubenswrapper[4558]: I0120 18:43:26.016525 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/195c8a3c-2f2e-43aa-afe5-a4932ff9dbbf-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 18:43:26 crc kubenswrapper[4558]: I0120 18:43:26.119118 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/3a7c364b-9972-4300-9e72-ce120d9c5d9a-bundle\") pod \"653b3b52ace42a43c00bbe11ec09bdc12d146890fa0ae4874ac941672ahx2xr\" (UID: \"3a7c364b-9972-4300-9e72-ce120d9c5d9a\") " pod="openstack-operators/653b3b52ace42a43c00bbe11ec09bdc12d146890fa0ae4874ac941672ahx2xr" Jan 20 18:43:26 crc kubenswrapper[4558]: I0120 18:43:26.119268 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/3a7c364b-9972-4300-9e72-ce120d9c5d9a-util\") pod \"653b3b52ace42a43c00bbe11ec09bdc12d146890fa0ae4874ac941672ahx2xr\" (UID: \"3a7c364b-9972-4300-9e72-ce120d9c5d9a\") " pod="openstack-operators/653b3b52ace42a43c00bbe11ec09bdc12d146890fa0ae4874ac941672ahx2xr" Jan 20 18:43:26 crc kubenswrapper[4558]: I0120 18:43:26.119720 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g9mp9\" (UniqueName: \"kubernetes.io/projected/3a7c364b-9972-4300-9e72-ce120d9c5d9a-kube-api-access-g9mp9\") pod \"653b3b52ace42a43c00bbe11ec09bdc12d146890fa0ae4874ac941672ahx2xr\" (UID: \"3a7c364b-9972-4300-9e72-ce120d9c5d9a\") " pod="openstack-operators/653b3b52ace42a43c00bbe11ec09bdc12d146890fa0ae4874ac941672ahx2xr" Jan 20 18:43:26 crc kubenswrapper[4558]: I0120 18:43:26.222953 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/3a7c364b-9972-4300-9e72-ce120d9c5d9a-bundle\") pod \"653b3b52ace42a43c00bbe11ec09bdc12d146890fa0ae4874ac941672ahx2xr\" (UID: \"3a7c364b-9972-4300-9e72-ce120d9c5d9a\") " pod="openstack-operators/653b3b52ace42a43c00bbe11ec09bdc12d146890fa0ae4874ac941672ahx2xr" Jan 20 18:43:26 crc kubenswrapper[4558]: I0120 18:43:26.223085 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/3a7c364b-9972-4300-9e72-ce120d9c5d9a-util\") pod \"653b3b52ace42a43c00bbe11ec09bdc12d146890fa0ae4874ac941672ahx2xr\" (UID: \"3a7c364b-9972-4300-9e72-ce120d9c5d9a\") " pod="openstack-operators/653b3b52ace42a43c00bbe11ec09bdc12d146890fa0ae4874ac941672ahx2xr" Jan 20 18:43:26 crc kubenswrapper[4558]: I0120 18:43:26.223280 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g9mp9\" (UniqueName: \"kubernetes.io/projected/3a7c364b-9972-4300-9e72-ce120d9c5d9a-kube-api-access-g9mp9\") pod \"653b3b52ace42a43c00bbe11ec09bdc12d146890fa0ae4874ac941672ahx2xr\" (UID: \"3a7c364b-9972-4300-9e72-ce120d9c5d9a\") " pod="openstack-operators/653b3b52ace42a43c00bbe11ec09bdc12d146890fa0ae4874ac941672ahx2xr" Jan 20 18:43:26 crc kubenswrapper[4558]: I0120 18:43:26.223524 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/3a7c364b-9972-4300-9e72-ce120d9c5d9a-bundle\") pod \"653b3b52ace42a43c00bbe11ec09bdc12d146890fa0ae4874ac941672ahx2xr\" (UID: \"3a7c364b-9972-4300-9e72-ce120d9c5d9a\") " pod="openstack-operators/653b3b52ace42a43c00bbe11ec09bdc12d146890fa0ae4874ac941672ahx2xr" Jan 20 18:43:26 crc kubenswrapper[4558]: I0120 18:43:26.223593 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/3a7c364b-9972-4300-9e72-ce120d9c5d9a-util\") pod \"653b3b52ace42a43c00bbe11ec09bdc12d146890fa0ae4874ac941672ahx2xr\" (UID: \"3a7c364b-9972-4300-9e72-ce120d9c5d9a\") " pod="openstack-operators/653b3b52ace42a43c00bbe11ec09bdc12d146890fa0ae4874ac941672ahx2xr" Jan 20 18:43:26 crc kubenswrapper[4558]: I0120 18:43:26.239982 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g9mp9\" (UniqueName: \"kubernetes.io/projected/3a7c364b-9972-4300-9e72-ce120d9c5d9a-kube-api-access-g9mp9\") pod \"653b3b52ace42a43c00bbe11ec09bdc12d146890fa0ae4874ac941672ahx2xr\" (UID: \"3a7c364b-9972-4300-9e72-ce120d9c5d9a\") " pod="openstack-operators/653b3b52ace42a43c00bbe11ec09bdc12d146890fa0ae4874ac941672ahx2xr" Jan 20 18:43:26 crc kubenswrapper[4558]: I0120 18:43:26.294248 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/653b3b52ace42a43c00bbe11ec09bdc12d146890fa0ae4874ac941672ahx2xr" Jan 20 18:43:26 crc kubenswrapper[4558]: I0120 18:43:26.568599 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/keystone-db-sync-fv444" Jan 20 18:43:26 crc kubenswrapper[4558]: I0120 18:43:26.572685 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/keystone-db-sync-fv444" event={"ID":"195c8a3c-2f2e-43aa-afe5-a4932ff9dbbf","Type":"ContainerDied","Data":"ccda10dbd920873979696f02a5f6f5fecce85d308a2fc92da9892f6101b35aac"} Jan 20 18:43:26 crc kubenswrapper[4558]: I0120 18:43:26.572728 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ccda10dbd920873979696f02a5f6f5fecce85d308a2fc92da9892f6101b35aac" Jan 20 18:43:26 crc kubenswrapper[4558]: I0120 18:43:26.674373 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/653b3b52ace42a43c00bbe11ec09bdc12d146890fa0ae4874ac941672ahx2xr"] Jan 20 18:43:26 crc kubenswrapper[4558]: W0120 18:43:26.679870 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3a7c364b_9972_4300_9e72_ce120d9c5d9a.slice/crio-62a526a416e26a0024495c0caf4490ffa1789c32b2294a5032a24286f1f49077 WatchSource:0}: Error finding container 62a526a416e26a0024495c0caf4490ffa1789c32b2294a5032a24286f1f49077: Status 404 returned error can't find the container with id 62a526a416e26a0024495c0caf4490ffa1789c32b2294a5032a24286f1f49077 Jan 20 18:43:26 crc kubenswrapper[4558]: I0120 18:43:26.749065 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["horizon-kuttl-tests/keystone-bootstrap-jsjkw"] Jan 20 18:43:26 crc kubenswrapper[4558]: I0120 18:43:26.750475 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/keystone-bootstrap-jsjkw" Jan 20 18:43:26 crc kubenswrapper[4558]: I0120 18:43:26.752055 4558 reflector.go:368] Caches populated for *v1.Secret from object-"horizon-kuttl-tests"/"osp-secret" Jan 20 18:43:26 crc kubenswrapper[4558]: I0120 18:43:26.752626 4558 reflector.go:368] Caches populated for *v1.Secret from object-"horizon-kuttl-tests"/"keystone-scripts" Jan 20 18:43:26 crc kubenswrapper[4558]: I0120 18:43:26.753071 4558 reflector.go:368] Caches populated for *v1.Secret from object-"horizon-kuttl-tests"/"keystone" Jan 20 18:43:26 crc kubenswrapper[4558]: I0120 18:43:26.753140 4558 reflector.go:368] Caches populated for *v1.Secret from object-"horizon-kuttl-tests"/"keystone-config-data" Jan 20 18:43:26 crc kubenswrapper[4558]: I0120 18:43:26.753076 4558 reflector.go:368] Caches populated for *v1.Secret from object-"horizon-kuttl-tests"/"keystone-keystone-dockercfg-mjr2v" Jan 20 18:43:26 crc kubenswrapper[4558]: I0120 18:43:26.765924 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["horizon-kuttl-tests/keystone-bootstrap-jsjkw"] Jan 20 18:43:26 crc kubenswrapper[4558]: I0120 18:43:26.936112 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/eb102007-e2b6-44de-bbc2-600f91c4f560-credential-keys\") pod \"keystone-bootstrap-jsjkw\" (UID: \"eb102007-e2b6-44de-bbc2-600f91c4f560\") " pod="horizon-kuttl-tests/keystone-bootstrap-jsjkw" Jan 20 18:43:26 crc kubenswrapper[4558]: I0120 18:43:26.936748 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/eb102007-e2b6-44de-bbc2-600f91c4f560-scripts\") pod \"keystone-bootstrap-jsjkw\" (UID: \"eb102007-e2b6-44de-bbc2-600f91c4f560\") " pod="horizon-kuttl-tests/keystone-bootstrap-jsjkw" Jan 20 18:43:26 crc kubenswrapper[4558]: I0120 18:43:26.936782 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/eb102007-e2b6-44de-bbc2-600f91c4f560-fernet-keys\") pod \"keystone-bootstrap-jsjkw\" (UID: \"eb102007-e2b6-44de-bbc2-600f91c4f560\") " pod="horizon-kuttl-tests/keystone-bootstrap-jsjkw" Jan 20 18:43:26 crc kubenswrapper[4558]: I0120 18:43:26.936884 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eb102007-e2b6-44de-bbc2-600f91c4f560-config-data\") pod \"keystone-bootstrap-jsjkw\" (UID: \"eb102007-e2b6-44de-bbc2-600f91c4f560\") " pod="horizon-kuttl-tests/keystone-bootstrap-jsjkw" Jan 20 18:43:26 crc kubenswrapper[4558]: I0120 18:43:26.936990 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cdgl7\" (UniqueName: \"kubernetes.io/projected/eb102007-e2b6-44de-bbc2-600f91c4f560-kube-api-access-cdgl7\") pod \"keystone-bootstrap-jsjkw\" (UID: \"eb102007-e2b6-44de-bbc2-600f91c4f560\") " pod="horizon-kuttl-tests/keystone-bootstrap-jsjkw" Jan 20 18:43:27 crc kubenswrapper[4558]: I0120 18:43:27.039200 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/eb102007-e2b6-44de-bbc2-600f91c4f560-scripts\") pod \"keystone-bootstrap-jsjkw\" (UID: \"eb102007-e2b6-44de-bbc2-600f91c4f560\") " pod="horizon-kuttl-tests/keystone-bootstrap-jsjkw" Jan 20 18:43:27 crc kubenswrapper[4558]: I0120 18:43:27.039246 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/eb102007-e2b6-44de-bbc2-600f91c4f560-fernet-keys\") pod \"keystone-bootstrap-jsjkw\" (UID: \"eb102007-e2b6-44de-bbc2-600f91c4f560\") " pod="horizon-kuttl-tests/keystone-bootstrap-jsjkw" Jan 20 18:43:27 crc kubenswrapper[4558]: I0120 18:43:27.039288 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eb102007-e2b6-44de-bbc2-600f91c4f560-config-data\") pod \"keystone-bootstrap-jsjkw\" (UID: \"eb102007-e2b6-44de-bbc2-600f91c4f560\") " pod="horizon-kuttl-tests/keystone-bootstrap-jsjkw" Jan 20 18:43:27 crc kubenswrapper[4558]: I0120 18:43:27.039343 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cdgl7\" (UniqueName: \"kubernetes.io/projected/eb102007-e2b6-44de-bbc2-600f91c4f560-kube-api-access-cdgl7\") pod \"keystone-bootstrap-jsjkw\" (UID: \"eb102007-e2b6-44de-bbc2-600f91c4f560\") " pod="horizon-kuttl-tests/keystone-bootstrap-jsjkw" Jan 20 18:43:27 crc kubenswrapper[4558]: I0120 18:43:27.039377 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/eb102007-e2b6-44de-bbc2-600f91c4f560-credential-keys\") pod \"keystone-bootstrap-jsjkw\" (UID: \"eb102007-e2b6-44de-bbc2-600f91c4f560\") " pod="horizon-kuttl-tests/keystone-bootstrap-jsjkw" Jan 20 18:43:27 crc kubenswrapper[4558]: I0120 18:43:27.046001 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/eb102007-e2b6-44de-bbc2-600f91c4f560-fernet-keys\") pod \"keystone-bootstrap-jsjkw\" (UID: \"eb102007-e2b6-44de-bbc2-600f91c4f560\") " pod="horizon-kuttl-tests/keystone-bootstrap-jsjkw" Jan 20 18:43:27 crc kubenswrapper[4558]: I0120 18:43:27.046049 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/eb102007-e2b6-44de-bbc2-600f91c4f560-credential-keys\") pod \"keystone-bootstrap-jsjkw\" (UID: \"eb102007-e2b6-44de-bbc2-600f91c4f560\") " pod="horizon-kuttl-tests/keystone-bootstrap-jsjkw" Jan 20 18:43:27 crc kubenswrapper[4558]: I0120 18:43:27.046228 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eb102007-e2b6-44de-bbc2-600f91c4f560-config-data\") pod \"keystone-bootstrap-jsjkw\" (UID: \"eb102007-e2b6-44de-bbc2-600f91c4f560\") " pod="horizon-kuttl-tests/keystone-bootstrap-jsjkw" Jan 20 18:43:27 crc kubenswrapper[4558]: I0120 18:43:27.046254 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/eb102007-e2b6-44de-bbc2-600f91c4f560-scripts\") pod \"keystone-bootstrap-jsjkw\" (UID: \"eb102007-e2b6-44de-bbc2-600f91c4f560\") " pod="horizon-kuttl-tests/keystone-bootstrap-jsjkw" Jan 20 18:43:27 crc kubenswrapper[4558]: I0120 18:43:27.055838 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cdgl7\" (UniqueName: \"kubernetes.io/projected/eb102007-e2b6-44de-bbc2-600f91c4f560-kube-api-access-cdgl7\") pod \"keystone-bootstrap-jsjkw\" (UID: \"eb102007-e2b6-44de-bbc2-600f91c4f560\") " pod="horizon-kuttl-tests/keystone-bootstrap-jsjkw" Jan 20 18:43:27 crc kubenswrapper[4558]: I0120 18:43:27.101007 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/keystone-bootstrap-jsjkw" Jan 20 18:43:27 crc kubenswrapper[4558]: I0120 18:43:27.482897 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["horizon-kuttl-tests/keystone-bootstrap-jsjkw"] Jan 20 18:43:27 crc kubenswrapper[4558]: W0120 18:43:27.487121 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podeb102007_e2b6_44de_bbc2_600f91c4f560.slice/crio-e05dca4d6465c316ec41c51e6f7b28563420b33fe833e3ad75811b4e2fb9a5ff WatchSource:0}: Error finding container e05dca4d6465c316ec41c51e6f7b28563420b33fe833e3ad75811b4e2fb9a5ff: Status 404 returned error can't find the container with id e05dca4d6465c316ec41c51e6f7b28563420b33fe833e3ad75811b4e2fb9a5ff Jan 20 18:43:27 crc kubenswrapper[4558]: I0120 18:43:27.575905 4558 generic.go:334] "Generic (PLEG): container finished" podID="3a7c364b-9972-4300-9e72-ce120d9c5d9a" containerID="7e5b52f231b342cd34c453344723a866f4c235117492bc415f83e04d18f37e25" exitCode=0 Jan 20 18:43:27 crc kubenswrapper[4558]: I0120 18:43:27.575953 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/653b3b52ace42a43c00bbe11ec09bdc12d146890fa0ae4874ac941672ahx2xr" event={"ID":"3a7c364b-9972-4300-9e72-ce120d9c5d9a","Type":"ContainerDied","Data":"7e5b52f231b342cd34c453344723a866f4c235117492bc415f83e04d18f37e25"} Jan 20 18:43:27 crc kubenswrapper[4558]: I0120 18:43:27.576007 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/653b3b52ace42a43c00bbe11ec09bdc12d146890fa0ae4874ac941672ahx2xr" event={"ID":"3a7c364b-9972-4300-9e72-ce120d9c5d9a","Type":"ContainerStarted","Data":"62a526a416e26a0024495c0caf4490ffa1789c32b2294a5032a24286f1f49077"} Jan 20 18:43:27 crc kubenswrapper[4558]: I0120 18:43:27.577225 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/keystone-bootstrap-jsjkw" event={"ID":"eb102007-e2b6-44de-bbc2-600f91c4f560","Type":"ContainerStarted","Data":"e05dca4d6465c316ec41c51e6f7b28563420b33fe833e3ad75811b4e2fb9a5ff"} Jan 20 18:43:28 crc kubenswrapper[4558]: I0120 18:43:28.592965 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/653b3b52ace42a43c00bbe11ec09bdc12d146890fa0ae4874ac941672ahx2xr" event={"ID":"3a7c364b-9972-4300-9e72-ce120d9c5d9a","Type":"ContainerStarted","Data":"41175a9819f276c55db011dad595ab834483c95a3df0be7eda373dd7bf174b5a"} Jan 20 18:43:28 crc kubenswrapper[4558]: I0120 18:43:28.594732 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/keystone-bootstrap-jsjkw" event={"ID":"eb102007-e2b6-44de-bbc2-600f91c4f560","Type":"ContainerStarted","Data":"96270ee0588590f74d43d05f70fefc4015a2d7ef2c13af4116bf8847338dee52"} Jan 20 18:43:28 crc kubenswrapper[4558]: I0120 18:43:28.652979 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="horizon-kuttl-tests/keystone-bootstrap-jsjkw" podStartSLOduration=2.652948546 podStartE2EDuration="2.652948546s" podCreationTimestamp="2026-01-20 18:43:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 18:43:28.641979702 +0000 UTC m=+7302.402317669" watchObservedRunningTime="2026-01-20 18:43:28.652948546 +0000 UTC m=+7302.413286513" Jan 20 18:43:29 crc kubenswrapper[4558]: I0120 18:43:29.603087 4558 generic.go:334] "Generic (PLEG): container finished" podID="3a7c364b-9972-4300-9e72-ce120d9c5d9a" containerID="41175a9819f276c55db011dad595ab834483c95a3df0be7eda373dd7bf174b5a" exitCode=0 Jan 20 18:43:29 crc kubenswrapper[4558]: I0120 18:43:29.603154 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/653b3b52ace42a43c00bbe11ec09bdc12d146890fa0ae4874ac941672ahx2xr" event={"ID":"3a7c364b-9972-4300-9e72-ce120d9c5d9a","Type":"ContainerDied","Data":"41175a9819f276c55db011dad595ab834483c95a3df0be7eda373dd7bf174b5a"} Jan 20 18:43:30 crc kubenswrapper[4558]: I0120 18:43:30.612066 4558 generic.go:334] "Generic (PLEG): container finished" podID="3a7c364b-9972-4300-9e72-ce120d9c5d9a" containerID="f344bf4154d72c6f0f375558c32584bb199dea07b741de2ee0bf0a9985efffe5" exitCode=0 Jan 20 18:43:30 crc kubenswrapper[4558]: I0120 18:43:30.612186 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/653b3b52ace42a43c00bbe11ec09bdc12d146890fa0ae4874ac941672ahx2xr" event={"ID":"3a7c364b-9972-4300-9e72-ce120d9c5d9a","Type":"ContainerDied","Data":"f344bf4154d72c6f0f375558c32584bb199dea07b741de2ee0bf0a9985efffe5"} Jan 20 18:43:30 crc kubenswrapper[4558]: I0120 18:43:30.613631 4558 generic.go:334] "Generic (PLEG): container finished" podID="eb102007-e2b6-44de-bbc2-600f91c4f560" containerID="96270ee0588590f74d43d05f70fefc4015a2d7ef2c13af4116bf8847338dee52" exitCode=0 Jan 20 18:43:30 crc kubenswrapper[4558]: I0120 18:43:30.613667 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/keystone-bootstrap-jsjkw" event={"ID":"eb102007-e2b6-44de-bbc2-600f91c4f560","Type":"ContainerDied","Data":"96270ee0588590f74d43d05f70fefc4015a2d7ef2c13af4116bf8847338dee52"} Jan 20 18:43:31 crc kubenswrapper[4558]: I0120 18:43:31.944715 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/653b3b52ace42a43c00bbe11ec09bdc12d146890fa0ae4874ac941672ahx2xr" Jan 20 18:43:31 crc kubenswrapper[4558]: I0120 18:43:31.948361 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/keystone-bootstrap-jsjkw" Jan 20 18:43:32 crc kubenswrapper[4558]: I0120 18:43:32.022575 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/3a7c364b-9972-4300-9e72-ce120d9c5d9a-bundle\") pod \"3a7c364b-9972-4300-9e72-ce120d9c5d9a\" (UID: \"3a7c364b-9972-4300-9e72-ce120d9c5d9a\") " Jan 20 18:43:32 crc kubenswrapper[4558]: I0120 18:43:32.022880 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g9mp9\" (UniqueName: \"kubernetes.io/projected/3a7c364b-9972-4300-9e72-ce120d9c5d9a-kube-api-access-g9mp9\") pod \"3a7c364b-9972-4300-9e72-ce120d9c5d9a\" (UID: \"3a7c364b-9972-4300-9e72-ce120d9c5d9a\") " Jan 20 18:43:32 crc kubenswrapper[4558]: I0120 18:43:32.022944 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/eb102007-e2b6-44de-bbc2-600f91c4f560-fernet-keys\") pod \"eb102007-e2b6-44de-bbc2-600f91c4f560\" (UID: \"eb102007-e2b6-44de-bbc2-600f91c4f560\") " Jan 20 18:43:32 crc kubenswrapper[4558]: I0120 18:43:32.022988 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/eb102007-e2b6-44de-bbc2-600f91c4f560-credential-keys\") pod \"eb102007-e2b6-44de-bbc2-600f91c4f560\" (UID: \"eb102007-e2b6-44de-bbc2-600f91c4f560\") " Jan 20 18:43:32 crc kubenswrapper[4558]: I0120 18:43:32.023006 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/3a7c364b-9972-4300-9e72-ce120d9c5d9a-util\") pod \"3a7c364b-9972-4300-9e72-ce120d9c5d9a\" (UID: \"3a7c364b-9972-4300-9e72-ce120d9c5d9a\") " Jan 20 18:43:32 crc kubenswrapper[4558]: I0120 18:43:32.023051 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/eb102007-e2b6-44de-bbc2-600f91c4f560-scripts\") pod \"eb102007-e2b6-44de-bbc2-600f91c4f560\" (UID: \"eb102007-e2b6-44de-bbc2-600f91c4f560\") " Jan 20 18:43:32 crc kubenswrapper[4558]: I0120 18:43:32.023131 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cdgl7\" (UniqueName: \"kubernetes.io/projected/eb102007-e2b6-44de-bbc2-600f91c4f560-kube-api-access-cdgl7\") pod \"eb102007-e2b6-44de-bbc2-600f91c4f560\" (UID: \"eb102007-e2b6-44de-bbc2-600f91c4f560\") " Jan 20 18:43:32 crc kubenswrapper[4558]: I0120 18:43:32.023153 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eb102007-e2b6-44de-bbc2-600f91c4f560-config-data\") pod \"eb102007-e2b6-44de-bbc2-600f91c4f560\" (UID: \"eb102007-e2b6-44de-bbc2-600f91c4f560\") " Jan 20 18:43:32 crc kubenswrapper[4558]: I0120 18:43:32.023432 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3a7c364b-9972-4300-9e72-ce120d9c5d9a-bundle" (OuterVolumeSpecName: "bundle") pod "3a7c364b-9972-4300-9e72-ce120d9c5d9a" (UID: "3a7c364b-9972-4300-9e72-ce120d9c5d9a"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 18:43:32 crc kubenswrapper[4558]: I0120 18:43:32.023977 4558 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/3a7c364b-9972-4300-9e72-ce120d9c5d9a-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 18:43:32 crc kubenswrapper[4558]: I0120 18:43:32.027967 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eb102007-e2b6-44de-bbc2-600f91c4f560-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "eb102007-e2b6-44de-bbc2-600f91c4f560" (UID: "eb102007-e2b6-44de-bbc2-600f91c4f560"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:43:32 crc kubenswrapper[4558]: I0120 18:43:32.028112 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eb102007-e2b6-44de-bbc2-600f91c4f560-kube-api-access-cdgl7" (OuterVolumeSpecName: "kube-api-access-cdgl7") pod "eb102007-e2b6-44de-bbc2-600f91c4f560" (UID: "eb102007-e2b6-44de-bbc2-600f91c4f560"). InnerVolumeSpecName "kube-api-access-cdgl7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:43:32 crc kubenswrapper[4558]: I0120 18:43:32.028617 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3a7c364b-9972-4300-9e72-ce120d9c5d9a-kube-api-access-g9mp9" (OuterVolumeSpecName: "kube-api-access-g9mp9") pod "3a7c364b-9972-4300-9e72-ce120d9c5d9a" (UID: "3a7c364b-9972-4300-9e72-ce120d9c5d9a"). InnerVolumeSpecName "kube-api-access-g9mp9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:43:32 crc kubenswrapper[4558]: I0120 18:43:32.033289 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eb102007-e2b6-44de-bbc2-600f91c4f560-scripts" (OuterVolumeSpecName: "scripts") pod "eb102007-e2b6-44de-bbc2-600f91c4f560" (UID: "eb102007-e2b6-44de-bbc2-600f91c4f560"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:43:32 crc kubenswrapper[4558]: I0120 18:43:32.033439 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eb102007-e2b6-44de-bbc2-600f91c4f560-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "eb102007-e2b6-44de-bbc2-600f91c4f560" (UID: "eb102007-e2b6-44de-bbc2-600f91c4f560"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:43:32 crc kubenswrapper[4558]: I0120 18:43:32.035053 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3a7c364b-9972-4300-9e72-ce120d9c5d9a-util" (OuterVolumeSpecName: "util") pod "3a7c364b-9972-4300-9e72-ce120d9c5d9a" (UID: "3a7c364b-9972-4300-9e72-ce120d9c5d9a"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 18:43:32 crc kubenswrapper[4558]: I0120 18:43:32.039922 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eb102007-e2b6-44de-bbc2-600f91c4f560-config-data" (OuterVolumeSpecName: "config-data") pod "eb102007-e2b6-44de-bbc2-600f91c4f560" (UID: "eb102007-e2b6-44de-bbc2-600f91c4f560"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:43:32 crc kubenswrapper[4558]: I0120 18:43:32.124835 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g9mp9\" (UniqueName: \"kubernetes.io/projected/3a7c364b-9972-4300-9e72-ce120d9c5d9a-kube-api-access-g9mp9\") on node \"crc\" DevicePath \"\"" Jan 20 18:43:32 crc kubenswrapper[4558]: I0120 18:43:32.125239 4558 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/eb102007-e2b6-44de-bbc2-600f91c4f560-fernet-keys\") on node \"crc\" DevicePath \"\"" Jan 20 18:43:32 crc kubenswrapper[4558]: I0120 18:43:32.125314 4558 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/eb102007-e2b6-44de-bbc2-600f91c4f560-credential-keys\") on node \"crc\" DevicePath \"\"" Jan 20 18:43:32 crc kubenswrapper[4558]: I0120 18:43:32.125381 4558 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/3a7c364b-9972-4300-9e72-ce120d9c5d9a-util\") on node \"crc\" DevicePath \"\"" Jan 20 18:43:32 crc kubenswrapper[4558]: I0120 18:43:32.125436 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/eb102007-e2b6-44de-bbc2-600f91c4f560-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 18:43:32 crc kubenswrapper[4558]: I0120 18:43:32.125505 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cdgl7\" (UniqueName: \"kubernetes.io/projected/eb102007-e2b6-44de-bbc2-600f91c4f560-kube-api-access-cdgl7\") on node \"crc\" DevicePath \"\"" Jan 20 18:43:32 crc kubenswrapper[4558]: I0120 18:43:32.125555 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eb102007-e2b6-44de-bbc2-600f91c4f560-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 18:43:32 crc kubenswrapper[4558]: I0120 18:43:32.630749 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/keystone-bootstrap-jsjkw" event={"ID":"eb102007-e2b6-44de-bbc2-600f91c4f560","Type":"ContainerDied","Data":"e05dca4d6465c316ec41c51e6f7b28563420b33fe833e3ad75811b4e2fb9a5ff"} Jan 20 18:43:32 crc kubenswrapper[4558]: I0120 18:43:32.630825 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e05dca4d6465c316ec41c51e6f7b28563420b33fe833e3ad75811b4e2fb9a5ff" Jan 20 18:43:32 crc kubenswrapper[4558]: I0120 18:43:32.630785 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/keystone-bootstrap-jsjkw" Jan 20 18:43:32 crc kubenswrapper[4558]: I0120 18:43:32.634276 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/653b3b52ace42a43c00bbe11ec09bdc12d146890fa0ae4874ac941672ahx2xr" event={"ID":"3a7c364b-9972-4300-9e72-ce120d9c5d9a","Type":"ContainerDied","Data":"62a526a416e26a0024495c0caf4490ffa1789c32b2294a5032a24286f1f49077"} Jan 20 18:43:32 crc kubenswrapper[4558]: I0120 18:43:32.634392 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="62a526a416e26a0024495c0caf4490ffa1789c32b2294a5032a24286f1f49077" Jan 20 18:43:32 crc kubenswrapper[4558]: I0120 18:43:32.634411 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/653b3b52ace42a43c00bbe11ec09bdc12d146890fa0ae4874ac941672ahx2xr" Jan 20 18:43:33 crc kubenswrapper[4558]: I0120 18:43:33.038220 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["horizon-kuttl-tests/keystone-f9689d859-jt4kw"] Jan 20 18:43:33 crc kubenswrapper[4558]: E0120 18:43:33.038645 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3a7c364b-9972-4300-9e72-ce120d9c5d9a" containerName="extract" Jan 20 18:43:33 crc kubenswrapper[4558]: I0120 18:43:33.038661 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="3a7c364b-9972-4300-9e72-ce120d9c5d9a" containerName="extract" Jan 20 18:43:33 crc kubenswrapper[4558]: E0120 18:43:33.038693 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3a7c364b-9972-4300-9e72-ce120d9c5d9a" containerName="util" Jan 20 18:43:33 crc kubenswrapper[4558]: I0120 18:43:33.038698 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="3a7c364b-9972-4300-9e72-ce120d9c5d9a" containerName="util" Jan 20 18:43:33 crc kubenswrapper[4558]: E0120 18:43:33.038713 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eb102007-e2b6-44de-bbc2-600f91c4f560" containerName="keystone-bootstrap" Jan 20 18:43:33 crc kubenswrapper[4558]: I0120 18:43:33.038720 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="eb102007-e2b6-44de-bbc2-600f91c4f560" containerName="keystone-bootstrap" Jan 20 18:43:33 crc kubenswrapper[4558]: E0120 18:43:33.038728 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3a7c364b-9972-4300-9e72-ce120d9c5d9a" containerName="pull" Jan 20 18:43:33 crc kubenswrapper[4558]: I0120 18:43:33.038734 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="3a7c364b-9972-4300-9e72-ce120d9c5d9a" containerName="pull" Jan 20 18:43:33 crc kubenswrapper[4558]: I0120 18:43:33.038899 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="3a7c364b-9972-4300-9e72-ce120d9c5d9a" containerName="extract" Jan 20 18:43:33 crc kubenswrapper[4558]: I0120 18:43:33.038911 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="eb102007-e2b6-44de-bbc2-600f91c4f560" containerName="keystone-bootstrap" Jan 20 18:43:33 crc kubenswrapper[4558]: I0120 18:43:33.039542 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/keystone-f9689d859-jt4kw" Jan 20 18:43:33 crc kubenswrapper[4558]: I0120 18:43:33.041819 4558 reflector.go:368] Caches populated for *v1.Secret from object-"horizon-kuttl-tests"/"keystone" Jan 20 18:43:33 crc kubenswrapper[4558]: I0120 18:43:33.042081 4558 reflector.go:368] Caches populated for *v1.Secret from object-"horizon-kuttl-tests"/"keystone-scripts" Jan 20 18:43:33 crc kubenswrapper[4558]: I0120 18:43:33.042130 4558 reflector.go:368] Caches populated for *v1.Secret from object-"horizon-kuttl-tests"/"keystone-keystone-dockercfg-mjr2v" Jan 20 18:43:33 crc kubenswrapper[4558]: I0120 18:43:33.042198 4558 reflector.go:368] Caches populated for *v1.Secret from object-"horizon-kuttl-tests"/"keystone-config-data" Jan 20 18:43:33 crc kubenswrapper[4558]: I0120 18:43:33.049039 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["horizon-kuttl-tests/keystone-f9689d859-jt4kw"] Jan 20 18:43:33 crc kubenswrapper[4558]: I0120 18:43:33.139892 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1e63d3ee-13d4-4b9c-894f-03861181bd49-config-data\") pod \"keystone-f9689d859-jt4kw\" (UID: \"1e63d3ee-13d4-4b9c-894f-03861181bd49\") " pod="horizon-kuttl-tests/keystone-f9689d859-jt4kw" Jan 20 18:43:33 crc kubenswrapper[4558]: I0120 18:43:33.140342 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/1e63d3ee-13d4-4b9c-894f-03861181bd49-credential-keys\") pod \"keystone-f9689d859-jt4kw\" (UID: \"1e63d3ee-13d4-4b9c-894f-03861181bd49\") " pod="horizon-kuttl-tests/keystone-f9689d859-jt4kw" Jan 20 18:43:33 crc kubenswrapper[4558]: I0120 18:43:33.140380 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1e63d3ee-13d4-4b9c-894f-03861181bd49-scripts\") pod \"keystone-f9689d859-jt4kw\" (UID: \"1e63d3ee-13d4-4b9c-894f-03861181bd49\") " pod="horizon-kuttl-tests/keystone-f9689d859-jt4kw" Jan 20 18:43:33 crc kubenswrapper[4558]: I0120 18:43:33.140402 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/1e63d3ee-13d4-4b9c-894f-03861181bd49-fernet-keys\") pod \"keystone-f9689d859-jt4kw\" (UID: \"1e63d3ee-13d4-4b9c-894f-03861181bd49\") " pod="horizon-kuttl-tests/keystone-f9689d859-jt4kw" Jan 20 18:43:33 crc kubenswrapper[4558]: I0120 18:43:33.140435 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w2vr2\" (UniqueName: \"kubernetes.io/projected/1e63d3ee-13d4-4b9c-894f-03861181bd49-kube-api-access-w2vr2\") pod \"keystone-f9689d859-jt4kw\" (UID: \"1e63d3ee-13d4-4b9c-894f-03861181bd49\") " pod="horizon-kuttl-tests/keystone-f9689d859-jt4kw" Jan 20 18:43:33 crc kubenswrapper[4558]: I0120 18:43:33.243037 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/1e63d3ee-13d4-4b9c-894f-03861181bd49-credential-keys\") pod \"keystone-f9689d859-jt4kw\" (UID: \"1e63d3ee-13d4-4b9c-894f-03861181bd49\") " pod="horizon-kuttl-tests/keystone-f9689d859-jt4kw" Jan 20 18:43:33 crc kubenswrapper[4558]: I0120 18:43:33.243155 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1e63d3ee-13d4-4b9c-894f-03861181bd49-scripts\") pod \"keystone-f9689d859-jt4kw\" (UID: \"1e63d3ee-13d4-4b9c-894f-03861181bd49\") " pod="horizon-kuttl-tests/keystone-f9689d859-jt4kw" Jan 20 18:43:33 crc kubenswrapper[4558]: I0120 18:43:33.243229 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/1e63d3ee-13d4-4b9c-894f-03861181bd49-fernet-keys\") pod \"keystone-f9689d859-jt4kw\" (UID: \"1e63d3ee-13d4-4b9c-894f-03861181bd49\") " pod="horizon-kuttl-tests/keystone-f9689d859-jt4kw" Jan 20 18:43:33 crc kubenswrapper[4558]: I0120 18:43:33.243284 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w2vr2\" (UniqueName: \"kubernetes.io/projected/1e63d3ee-13d4-4b9c-894f-03861181bd49-kube-api-access-w2vr2\") pod \"keystone-f9689d859-jt4kw\" (UID: \"1e63d3ee-13d4-4b9c-894f-03861181bd49\") " pod="horizon-kuttl-tests/keystone-f9689d859-jt4kw" Jan 20 18:43:33 crc kubenswrapper[4558]: I0120 18:43:33.243443 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1e63d3ee-13d4-4b9c-894f-03861181bd49-config-data\") pod \"keystone-f9689d859-jt4kw\" (UID: \"1e63d3ee-13d4-4b9c-894f-03861181bd49\") " pod="horizon-kuttl-tests/keystone-f9689d859-jt4kw" Jan 20 18:43:33 crc kubenswrapper[4558]: I0120 18:43:33.247375 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/1e63d3ee-13d4-4b9c-894f-03861181bd49-credential-keys\") pod \"keystone-f9689d859-jt4kw\" (UID: \"1e63d3ee-13d4-4b9c-894f-03861181bd49\") " pod="horizon-kuttl-tests/keystone-f9689d859-jt4kw" Jan 20 18:43:33 crc kubenswrapper[4558]: I0120 18:43:33.251140 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1e63d3ee-13d4-4b9c-894f-03861181bd49-scripts\") pod \"keystone-f9689d859-jt4kw\" (UID: \"1e63d3ee-13d4-4b9c-894f-03861181bd49\") " pod="horizon-kuttl-tests/keystone-f9689d859-jt4kw" Jan 20 18:43:33 crc kubenswrapper[4558]: I0120 18:43:33.251340 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1e63d3ee-13d4-4b9c-894f-03861181bd49-config-data\") pod \"keystone-f9689d859-jt4kw\" (UID: \"1e63d3ee-13d4-4b9c-894f-03861181bd49\") " pod="horizon-kuttl-tests/keystone-f9689d859-jt4kw" Jan 20 18:43:33 crc kubenswrapper[4558]: I0120 18:43:33.252300 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/1e63d3ee-13d4-4b9c-894f-03861181bd49-fernet-keys\") pod \"keystone-f9689d859-jt4kw\" (UID: \"1e63d3ee-13d4-4b9c-894f-03861181bd49\") " pod="horizon-kuttl-tests/keystone-f9689d859-jt4kw" Jan 20 18:43:33 crc kubenswrapper[4558]: I0120 18:43:33.262919 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w2vr2\" (UniqueName: \"kubernetes.io/projected/1e63d3ee-13d4-4b9c-894f-03861181bd49-kube-api-access-w2vr2\") pod \"keystone-f9689d859-jt4kw\" (UID: \"1e63d3ee-13d4-4b9c-894f-03861181bd49\") " pod="horizon-kuttl-tests/keystone-f9689d859-jt4kw" Jan 20 18:43:33 crc kubenswrapper[4558]: I0120 18:43:33.361737 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/keystone-f9689d859-jt4kw" Jan 20 18:43:33 crc kubenswrapper[4558]: I0120 18:43:33.780714 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["horizon-kuttl-tests/keystone-f9689d859-jt4kw"] Jan 20 18:43:34 crc kubenswrapper[4558]: I0120 18:43:34.655003 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/keystone-f9689d859-jt4kw" event={"ID":"1e63d3ee-13d4-4b9c-894f-03861181bd49","Type":"ContainerStarted","Data":"d3edd9e7128ed1cf8bb955a82c34b19fdad9e82d1fadfef801ecf5a9289cdaf5"} Jan 20 18:43:34 crc kubenswrapper[4558]: I0120 18:43:34.656250 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="horizon-kuttl-tests/keystone-f9689d859-jt4kw" Jan 20 18:43:34 crc kubenswrapper[4558]: I0120 18:43:34.656275 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/keystone-f9689d859-jt4kw" event={"ID":"1e63d3ee-13d4-4b9c-894f-03861181bd49","Type":"ContainerStarted","Data":"cd371d9428762010fa6e6dca6cee90df03d527df20e0e3307e374869aeed9ccf"} Jan 20 18:43:34 crc kubenswrapper[4558]: I0120 18:43:34.671019 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="horizon-kuttl-tests/keystone-f9689d859-jt4kw" podStartSLOduration=1.6709857449999999 podStartE2EDuration="1.670985745s" podCreationTimestamp="2026-01-20 18:43:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 18:43:34.668397178 +0000 UTC m=+7308.428735146" watchObservedRunningTime="2026-01-20 18:43:34.670985745 +0000 UTC m=+7308.431323713" Jan 20 18:43:38 crc kubenswrapper[4558]: I0120 18:43:38.998017 4558 scope.go:117] "RemoveContainer" containerID="f5b5140697a9176a9492b0ba7d37f9a6763522f323fc00bb34d4f8d0e8e5fd36" Jan 20 18:43:39 crc kubenswrapper[4558]: I0120 18:43:39.017491 4558 scope.go:117] "RemoveContainer" containerID="f93f4a4a8ecbebc7a1ebd135ed58a49348e98e9e2c4ced98dee0206760d3814a" Jan 20 18:43:39 crc kubenswrapper[4558]: I0120 18:43:39.038932 4558 scope.go:117] "RemoveContainer" containerID="4a9ef972cfa2208336857bf3a949814659a8cacf751d30fe2b6e80dd43173198" Jan 20 18:43:39 crc kubenswrapper[4558]: I0120 18:43:39.082029 4558 scope.go:117] "RemoveContainer" containerID="6b475d151a2bacbf44781d76b94d59aa41fea570c61419ae8e87b3b9378ab84e" Jan 20 18:43:39 crc kubenswrapper[4558]: I0120 18:43:39.108830 4558 scope.go:117] "RemoveContainer" containerID="f5faa572a4cebb633298a178a235ff4593fc7536a08431df274c15d41c57e10e" Jan 20 18:43:46 crc kubenswrapper[4558]: I0120 18:43:46.888470 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/horizon-operator-controller-manager-788c44577b-c22sm"] Jan 20 18:43:46 crc kubenswrapper[4558]: I0120 18:43:46.889760 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-788c44577b-c22sm" Jan 20 18:43:46 crc kubenswrapper[4558]: I0120 18:43:46.892090 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-controller-manager-dockercfg-xm2ck" Jan 20 18:43:46 crc kubenswrapper[4558]: I0120 18:43:46.895240 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-controller-manager-service-cert" Jan 20 18:43:46 crc kubenswrapper[4558]: I0120 18:43:46.906787 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-788c44577b-c22sm"] Jan 20 18:43:47 crc kubenswrapper[4558]: I0120 18:43:47.069762 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/6f1cb567-179f-4bd7-b76f-bcc43d2474f0-apiservice-cert\") pod \"horizon-operator-controller-manager-788c44577b-c22sm\" (UID: \"6f1cb567-179f-4bd7-b76f-bcc43d2474f0\") " pod="openstack-operators/horizon-operator-controller-manager-788c44577b-c22sm" Jan 20 18:43:47 crc kubenswrapper[4558]: I0120 18:43:47.069856 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d5btp\" (UniqueName: \"kubernetes.io/projected/6f1cb567-179f-4bd7-b76f-bcc43d2474f0-kube-api-access-d5btp\") pod \"horizon-operator-controller-manager-788c44577b-c22sm\" (UID: \"6f1cb567-179f-4bd7-b76f-bcc43d2474f0\") " pod="openstack-operators/horizon-operator-controller-manager-788c44577b-c22sm" Jan 20 18:43:47 crc kubenswrapper[4558]: I0120 18:43:47.069985 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/6f1cb567-179f-4bd7-b76f-bcc43d2474f0-webhook-cert\") pod \"horizon-operator-controller-manager-788c44577b-c22sm\" (UID: \"6f1cb567-179f-4bd7-b76f-bcc43d2474f0\") " pod="openstack-operators/horizon-operator-controller-manager-788c44577b-c22sm" Jan 20 18:43:47 crc kubenswrapper[4558]: I0120 18:43:47.171388 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/6f1cb567-179f-4bd7-b76f-bcc43d2474f0-apiservice-cert\") pod \"horizon-operator-controller-manager-788c44577b-c22sm\" (UID: \"6f1cb567-179f-4bd7-b76f-bcc43d2474f0\") " pod="openstack-operators/horizon-operator-controller-manager-788c44577b-c22sm" Jan 20 18:43:47 crc kubenswrapper[4558]: I0120 18:43:47.171449 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d5btp\" (UniqueName: \"kubernetes.io/projected/6f1cb567-179f-4bd7-b76f-bcc43d2474f0-kube-api-access-d5btp\") pod \"horizon-operator-controller-manager-788c44577b-c22sm\" (UID: \"6f1cb567-179f-4bd7-b76f-bcc43d2474f0\") " pod="openstack-operators/horizon-operator-controller-manager-788c44577b-c22sm" Jan 20 18:43:47 crc kubenswrapper[4558]: I0120 18:43:47.171540 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/6f1cb567-179f-4bd7-b76f-bcc43d2474f0-webhook-cert\") pod \"horizon-operator-controller-manager-788c44577b-c22sm\" (UID: \"6f1cb567-179f-4bd7-b76f-bcc43d2474f0\") " pod="openstack-operators/horizon-operator-controller-manager-788c44577b-c22sm" Jan 20 18:43:47 crc kubenswrapper[4558]: I0120 18:43:47.177921 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/6f1cb567-179f-4bd7-b76f-bcc43d2474f0-webhook-cert\") pod \"horizon-operator-controller-manager-788c44577b-c22sm\" (UID: \"6f1cb567-179f-4bd7-b76f-bcc43d2474f0\") " pod="openstack-operators/horizon-operator-controller-manager-788c44577b-c22sm" Jan 20 18:43:47 crc kubenswrapper[4558]: I0120 18:43:47.177924 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/6f1cb567-179f-4bd7-b76f-bcc43d2474f0-apiservice-cert\") pod \"horizon-operator-controller-manager-788c44577b-c22sm\" (UID: \"6f1cb567-179f-4bd7-b76f-bcc43d2474f0\") " pod="openstack-operators/horizon-operator-controller-manager-788c44577b-c22sm" Jan 20 18:43:47 crc kubenswrapper[4558]: I0120 18:43:47.184354 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d5btp\" (UniqueName: \"kubernetes.io/projected/6f1cb567-179f-4bd7-b76f-bcc43d2474f0-kube-api-access-d5btp\") pod \"horizon-operator-controller-manager-788c44577b-c22sm\" (UID: \"6f1cb567-179f-4bd7-b76f-bcc43d2474f0\") " pod="openstack-operators/horizon-operator-controller-manager-788c44577b-c22sm" Jan 20 18:43:47 crc kubenswrapper[4558]: I0120 18:43:47.205911 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-788c44577b-c22sm" Jan 20 18:43:47 crc kubenswrapper[4558]: I0120 18:43:47.573836 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-788c44577b-c22sm"] Jan 20 18:43:47 crc kubenswrapper[4558]: W0120 18:43:47.576592 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6f1cb567_179f_4bd7_b76f_bcc43d2474f0.slice/crio-6c061ccb4dde6fce3407f24890c2cc9112f424d916e34aee67ae2ad04405e1c5 WatchSource:0}: Error finding container 6c061ccb4dde6fce3407f24890c2cc9112f424d916e34aee67ae2ad04405e1c5: Status 404 returned error can't find the container with id 6c061ccb4dde6fce3407f24890c2cc9112f424d916e34aee67ae2ad04405e1c5 Jan 20 18:43:47 crc kubenswrapper[4558]: I0120 18:43:47.758609 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-788c44577b-c22sm" event={"ID":"6f1cb567-179f-4bd7-b76f-bcc43d2474f0","Type":"ContainerStarted","Data":"25b89c90c78d10da1d5159b0ce7adc9014f1138c61c6218c01bad1e79dfa6e6b"} Jan 20 18:43:47 crc kubenswrapper[4558]: I0120 18:43:47.758659 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-788c44577b-c22sm" event={"ID":"6f1cb567-179f-4bd7-b76f-bcc43d2474f0","Type":"ContainerStarted","Data":"6c061ccb4dde6fce3407f24890c2cc9112f424d916e34aee67ae2ad04405e1c5"} Jan 20 18:43:47 crc kubenswrapper[4558]: I0120 18:43:47.758784 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/horizon-operator-controller-manager-788c44577b-c22sm" Jan 20 18:43:47 crc kubenswrapper[4558]: I0120 18:43:47.777957 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/horizon-operator-controller-manager-788c44577b-c22sm" podStartSLOduration=1.777936288 podStartE2EDuration="1.777936288s" podCreationTimestamp="2026-01-20 18:43:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 18:43:47.772518933 +0000 UTC m=+7321.532856900" watchObservedRunningTime="2026-01-20 18:43:47.777936288 +0000 UTC m=+7321.538274256" Jan 20 18:43:57 crc kubenswrapper[4558]: I0120 18:43:57.213251 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/horizon-operator-controller-manager-788c44577b-c22sm" Jan 20 18:44:04 crc kubenswrapper[4558]: I0120 18:44:04.603315 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="horizon-kuttl-tests/keystone-f9689d859-jt4kw" Jan 20 18:44:20 crc kubenswrapper[4558]: I0120 18:44:20.538550 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["horizon-kuttl-tests/horizon-9b986b9c-nz6z9"] Jan 20 18:44:20 crc kubenswrapper[4558]: I0120 18:44:20.540353 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/horizon-9b986b9c-nz6z9" Jan 20 18:44:20 crc kubenswrapper[4558]: I0120 18:44:20.541954 4558 reflector.go:368] Caches populated for *v1.Secret from object-"horizon-kuttl-tests"/"horizon" Jan 20 18:44:20 crc kubenswrapper[4558]: I0120 18:44:20.542040 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"horizon-kuttl-tests"/"horizon-scripts" Jan 20 18:44:20 crc kubenswrapper[4558]: I0120 18:44:20.542637 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"horizon-kuttl-tests"/"horizon-config-data" Jan 20 18:44:20 crc kubenswrapper[4558]: I0120 18:44:20.543259 4558 reflector.go:368] Caches populated for *v1.Secret from object-"horizon-kuttl-tests"/"horizon-horizon-dockercfg-2wq6j" Jan 20 18:44:20 crc kubenswrapper[4558]: I0120 18:44:20.553916 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["horizon-kuttl-tests/horizon-9b986b9c-nz6z9"] Jan 20 18:44:20 crc kubenswrapper[4558]: I0120 18:44:20.593112 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/faa3fd5a-1d25-421a-8f05-046b725fa16c-scripts\") pod \"horizon-9b986b9c-nz6z9\" (UID: \"faa3fd5a-1d25-421a-8f05-046b725fa16c\") " pod="horizon-kuttl-tests/horizon-9b986b9c-nz6z9" Jan 20 18:44:20 crc kubenswrapper[4558]: I0120 18:44:20.594262 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/faa3fd5a-1d25-421a-8f05-046b725fa16c-logs\") pod \"horizon-9b986b9c-nz6z9\" (UID: \"faa3fd5a-1d25-421a-8f05-046b725fa16c\") " pod="horizon-kuttl-tests/horizon-9b986b9c-nz6z9" Jan 20 18:44:20 crc kubenswrapper[4558]: I0120 18:44:20.594422 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/faa3fd5a-1d25-421a-8f05-046b725fa16c-horizon-secret-key\") pod \"horizon-9b986b9c-nz6z9\" (UID: \"faa3fd5a-1d25-421a-8f05-046b725fa16c\") " pod="horizon-kuttl-tests/horizon-9b986b9c-nz6z9" Jan 20 18:44:20 crc kubenswrapper[4558]: I0120 18:44:20.594484 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qt5nx\" (UniqueName: \"kubernetes.io/projected/faa3fd5a-1d25-421a-8f05-046b725fa16c-kube-api-access-qt5nx\") pod \"horizon-9b986b9c-nz6z9\" (UID: \"faa3fd5a-1d25-421a-8f05-046b725fa16c\") " pod="horizon-kuttl-tests/horizon-9b986b9c-nz6z9" Jan 20 18:44:20 crc kubenswrapper[4558]: I0120 18:44:20.594511 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/faa3fd5a-1d25-421a-8f05-046b725fa16c-config-data\") pod \"horizon-9b986b9c-nz6z9\" (UID: \"faa3fd5a-1d25-421a-8f05-046b725fa16c\") " pod="horizon-kuttl-tests/horizon-9b986b9c-nz6z9" Jan 20 18:44:20 crc kubenswrapper[4558]: I0120 18:44:20.599203 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["horizon-kuttl-tests/horizon-598f976c49-c8x57"] Jan 20 18:44:20 crc kubenswrapper[4558]: I0120 18:44:20.602399 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/horizon-598f976c49-c8x57" Jan 20 18:44:20 crc kubenswrapper[4558]: I0120 18:44:20.606681 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["horizon-kuttl-tests/horizon-598f976c49-c8x57"] Jan 20 18:44:20 crc kubenswrapper[4558]: I0120 18:44:20.696416 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/faa3fd5a-1d25-421a-8f05-046b725fa16c-logs\") pod \"horizon-9b986b9c-nz6z9\" (UID: \"faa3fd5a-1d25-421a-8f05-046b725fa16c\") " pod="horizon-kuttl-tests/horizon-9b986b9c-nz6z9" Jan 20 18:44:20 crc kubenswrapper[4558]: I0120 18:44:20.696508 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/faa3fd5a-1d25-421a-8f05-046b725fa16c-horizon-secret-key\") pod \"horizon-9b986b9c-nz6z9\" (UID: \"faa3fd5a-1d25-421a-8f05-046b725fa16c\") " pod="horizon-kuttl-tests/horizon-9b986b9c-nz6z9" Jan 20 18:44:20 crc kubenswrapper[4558]: I0120 18:44:20.696539 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0d8e5344-72f5-4f27-bfbf-8b6a00c3d3e3-scripts\") pod \"horizon-598f976c49-c8x57\" (UID: \"0d8e5344-72f5-4f27-bfbf-8b6a00c3d3e3\") " pod="horizon-kuttl-tests/horizon-598f976c49-c8x57" Jan 20 18:44:20 crc kubenswrapper[4558]: I0120 18:44:20.696580 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qt5nx\" (UniqueName: \"kubernetes.io/projected/faa3fd5a-1d25-421a-8f05-046b725fa16c-kube-api-access-qt5nx\") pod \"horizon-9b986b9c-nz6z9\" (UID: \"faa3fd5a-1d25-421a-8f05-046b725fa16c\") " pod="horizon-kuttl-tests/horizon-9b986b9c-nz6z9" Jan 20 18:44:20 crc kubenswrapper[4558]: I0120 18:44:20.696600 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/0d8e5344-72f5-4f27-bfbf-8b6a00c3d3e3-config-data\") pod \"horizon-598f976c49-c8x57\" (UID: \"0d8e5344-72f5-4f27-bfbf-8b6a00c3d3e3\") " pod="horizon-kuttl-tests/horizon-598f976c49-c8x57" Jan 20 18:44:20 crc kubenswrapper[4558]: I0120 18:44:20.696619 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/faa3fd5a-1d25-421a-8f05-046b725fa16c-config-data\") pod \"horizon-9b986b9c-nz6z9\" (UID: \"faa3fd5a-1d25-421a-8f05-046b725fa16c\") " pod="horizon-kuttl-tests/horizon-9b986b9c-nz6z9" Jan 20 18:44:20 crc kubenswrapper[4558]: I0120 18:44:20.696682 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2hs8f\" (UniqueName: \"kubernetes.io/projected/0d8e5344-72f5-4f27-bfbf-8b6a00c3d3e3-kube-api-access-2hs8f\") pod \"horizon-598f976c49-c8x57\" (UID: \"0d8e5344-72f5-4f27-bfbf-8b6a00c3d3e3\") " pod="horizon-kuttl-tests/horizon-598f976c49-c8x57" Jan 20 18:44:20 crc kubenswrapper[4558]: I0120 18:44:20.696713 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/faa3fd5a-1d25-421a-8f05-046b725fa16c-scripts\") pod \"horizon-9b986b9c-nz6z9\" (UID: \"faa3fd5a-1d25-421a-8f05-046b725fa16c\") " pod="horizon-kuttl-tests/horizon-9b986b9c-nz6z9" Jan 20 18:44:20 crc kubenswrapper[4558]: I0120 18:44:20.696741 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/0d8e5344-72f5-4f27-bfbf-8b6a00c3d3e3-horizon-secret-key\") pod \"horizon-598f976c49-c8x57\" (UID: \"0d8e5344-72f5-4f27-bfbf-8b6a00c3d3e3\") " pod="horizon-kuttl-tests/horizon-598f976c49-c8x57" Jan 20 18:44:20 crc kubenswrapper[4558]: I0120 18:44:20.696761 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0d8e5344-72f5-4f27-bfbf-8b6a00c3d3e3-logs\") pod \"horizon-598f976c49-c8x57\" (UID: \"0d8e5344-72f5-4f27-bfbf-8b6a00c3d3e3\") " pod="horizon-kuttl-tests/horizon-598f976c49-c8x57" Jan 20 18:44:20 crc kubenswrapper[4558]: I0120 18:44:20.696850 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/faa3fd5a-1d25-421a-8f05-046b725fa16c-logs\") pod \"horizon-9b986b9c-nz6z9\" (UID: \"faa3fd5a-1d25-421a-8f05-046b725fa16c\") " pod="horizon-kuttl-tests/horizon-9b986b9c-nz6z9" Jan 20 18:44:20 crc kubenswrapper[4558]: I0120 18:44:20.697566 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/faa3fd5a-1d25-421a-8f05-046b725fa16c-scripts\") pod \"horizon-9b986b9c-nz6z9\" (UID: \"faa3fd5a-1d25-421a-8f05-046b725fa16c\") " pod="horizon-kuttl-tests/horizon-9b986b9c-nz6z9" Jan 20 18:44:20 crc kubenswrapper[4558]: I0120 18:44:20.697842 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/faa3fd5a-1d25-421a-8f05-046b725fa16c-config-data\") pod \"horizon-9b986b9c-nz6z9\" (UID: \"faa3fd5a-1d25-421a-8f05-046b725fa16c\") " pod="horizon-kuttl-tests/horizon-9b986b9c-nz6z9" Jan 20 18:44:20 crc kubenswrapper[4558]: I0120 18:44:20.703785 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/faa3fd5a-1d25-421a-8f05-046b725fa16c-horizon-secret-key\") pod \"horizon-9b986b9c-nz6z9\" (UID: \"faa3fd5a-1d25-421a-8f05-046b725fa16c\") " pod="horizon-kuttl-tests/horizon-9b986b9c-nz6z9" Jan 20 18:44:20 crc kubenswrapper[4558]: I0120 18:44:20.715309 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qt5nx\" (UniqueName: \"kubernetes.io/projected/faa3fd5a-1d25-421a-8f05-046b725fa16c-kube-api-access-qt5nx\") pod \"horizon-9b986b9c-nz6z9\" (UID: \"faa3fd5a-1d25-421a-8f05-046b725fa16c\") " pod="horizon-kuttl-tests/horizon-9b986b9c-nz6z9" Jan 20 18:44:20 crc kubenswrapper[4558]: I0120 18:44:20.798445 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0d8e5344-72f5-4f27-bfbf-8b6a00c3d3e3-scripts\") pod \"horizon-598f976c49-c8x57\" (UID: \"0d8e5344-72f5-4f27-bfbf-8b6a00c3d3e3\") " pod="horizon-kuttl-tests/horizon-598f976c49-c8x57" Jan 20 18:44:20 crc kubenswrapper[4558]: I0120 18:44:20.798500 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/0d8e5344-72f5-4f27-bfbf-8b6a00c3d3e3-config-data\") pod \"horizon-598f976c49-c8x57\" (UID: \"0d8e5344-72f5-4f27-bfbf-8b6a00c3d3e3\") " pod="horizon-kuttl-tests/horizon-598f976c49-c8x57" Jan 20 18:44:20 crc kubenswrapper[4558]: I0120 18:44:20.798620 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2hs8f\" (UniqueName: \"kubernetes.io/projected/0d8e5344-72f5-4f27-bfbf-8b6a00c3d3e3-kube-api-access-2hs8f\") pod \"horizon-598f976c49-c8x57\" (UID: \"0d8e5344-72f5-4f27-bfbf-8b6a00c3d3e3\") " pod="horizon-kuttl-tests/horizon-598f976c49-c8x57" Jan 20 18:44:20 crc kubenswrapper[4558]: I0120 18:44:20.798697 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/0d8e5344-72f5-4f27-bfbf-8b6a00c3d3e3-horizon-secret-key\") pod \"horizon-598f976c49-c8x57\" (UID: \"0d8e5344-72f5-4f27-bfbf-8b6a00c3d3e3\") " pod="horizon-kuttl-tests/horizon-598f976c49-c8x57" Jan 20 18:44:20 crc kubenswrapper[4558]: I0120 18:44:20.798727 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0d8e5344-72f5-4f27-bfbf-8b6a00c3d3e3-logs\") pod \"horizon-598f976c49-c8x57\" (UID: \"0d8e5344-72f5-4f27-bfbf-8b6a00c3d3e3\") " pod="horizon-kuttl-tests/horizon-598f976c49-c8x57" Jan 20 18:44:20 crc kubenswrapper[4558]: I0120 18:44:20.799393 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0d8e5344-72f5-4f27-bfbf-8b6a00c3d3e3-logs\") pod \"horizon-598f976c49-c8x57\" (UID: \"0d8e5344-72f5-4f27-bfbf-8b6a00c3d3e3\") " pod="horizon-kuttl-tests/horizon-598f976c49-c8x57" Jan 20 18:44:20 crc kubenswrapper[4558]: I0120 18:44:20.799417 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0d8e5344-72f5-4f27-bfbf-8b6a00c3d3e3-scripts\") pod \"horizon-598f976c49-c8x57\" (UID: \"0d8e5344-72f5-4f27-bfbf-8b6a00c3d3e3\") " pod="horizon-kuttl-tests/horizon-598f976c49-c8x57" Jan 20 18:44:20 crc kubenswrapper[4558]: I0120 18:44:20.800056 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/0d8e5344-72f5-4f27-bfbf-8b6a00c3d3e3-config-data\") pod \"horizon-598f976c49-c8x57\" (UID: \"0d8e5344-72f5-4f27-bfbf-8b6a00c3d3e3\") " pod="horizon-kuttl-tests/horizon-598f976c49-c8x57" Jan 20 18:44:20 crc kubenswrapper[4558]: I0120 18:44:20.801774 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/0d8e5344-72f5-4f27-bfbf-8b6a00c3d3e3-horizon-secret-key\") pod \"horizon-598f976c49-c8x57\" (UID: \"0d8e5344-72f5-4f27-bfbf-8b6a00c3d3e3\") " pod="horizon-kuttl-tests/horizon-598f976c49-c8x57" Jan 20 18:44:20 crc kubenswrapper[4558]: I0120 18:44:20.810588 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2hs8f\" (UniqueName: \"kubernetes.io/projected/0d8e5344-72f5-4f27-bfbf-8b6a00c3d3e3-kube-api-access-2hs8f\") pod \"horizon-598f976c49-c8x57\" (UID: \"0d8e5344-72f5-4f27-bfbf-8b6a00c3d3e3\") " pod="horizon-kuttl-tests/horizon-598f976c49-c8x57" Jan 20 18:44:20 crc kubenswrapper[4558]: I0120 18:44:20.859016 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/horizon-9b986b9c-nz6z9" Jan 20 18:44:20 crc kubenswrapper[4558]: I0120 18:44:20.922678 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/horizon-598f976c49-c8x57" Jan 20 18:44:21 crc kubenswrapper[4558]: I0120 18:44:21.244386 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["horizon-kuttl-tests/horizon-9b986b9c-nz6z9"] Jan 20 18:44:21 crc kubenswrapper[4558]: I0120 18:44:21.342849 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["horizon-kuttl-tests/horizon-598f976c49-c8x57"] Jan 20 18:44:21 crc kubenswrapper[4558]: W0120 18:44:21.343515 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0d8e5344_72f5_4f27_bfbf_8b6a00c3d3e3.slice/crio-6234a84329e44fbc491bfd409acee1a4bbbb0db1ffc1af86d7007eef34260864 WatchSource:0}: Error finding container 6234a84329e44fbc491bfd409acee1a4bbbb0db1ffc1af86d7007eef34260864: Status 404 returned error can't find the container with id 6234a84329e44fbc491bfd409acee1a4bbbb0db1ffc1af86d7007eef34260864 Jan 20 18:44:22 crc kubenswrapper[4558]: I0120 18:44:22.042515 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/horizon-598f976c49-c8x57" event={"ID":"0d8e5344-72f5-4f27-bfbf-8b6a00c3d3e3","Type":"ContainerStarted","Data":"6234a84329e44fbc491bfd409acee1a4bbbb0db1ffc1af86d7007eef34260864"} Jan 20 18:44:22 crc kubenswrapper[4558]: I0120 18:44:22.043652 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/horizon-9b986b9c-nz6z9" event={"ID":"faa3fd5a-1d25-421a-8f05-046b725fa16c","Type":"ContainerStarted","Data":"8bdd1e52aa7b132cbb6d546bdca1153ce198672d5be580ed9f37573f9fd15d02"} Jan 20 18:44:29 crc kubenswrapper[4558]: I0120 18:44:29.111117 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/horizon-9b986b9c-nz6z9" event={"ID":"faa3fd5a-1d25-421a-8f05-046b725fa16c","Type":"ContainerStarted","Data":"9a4d4756d9f6aaeafc335e3f85143281119d107b836fb1b29b365e5cff54acff"} Jan 20 18:44:29 crc kubenswrapper[4558]: I0120 18:44:29.111722 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/horizon-9b986b9c-nz6z9" event={"ID":"faa3fd5a-1d25-421a-8f05-046b725fa16c","Type":"ContainerStarted","Data":"e70ccb2925fecb215d00942e3ba5962cb6a0f1d9e425c0fd685e7fbd7ddb39b4"} Jan 20 18:44:29 crc kubenswrapper[4558]: I0120 18:44:29.114616 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/horizon-598f976c49-c8x57" event={"ID":"0d8e5344-72f5-4f27-bfbf-8b6a00c3d3e3","Type":"ContainerStarted","Data":"468366f51379cc21d36c5d1382767e010720b6ff7948e08b8ec4bcf062faedd1"} Jan 20 18:44:29 crc kubenswrapper[4558]: I0120 18:44:29.114667 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/horizon-598f976c49-c8x57" event={"ID":"0d8e5344-72f5-4f27-bfbf-8b6a00c3d3e3","Type":"ContainerStarted","Data":"608c55329d1788390874107f80379fb6c6a092750c643d8249f066787fc32e77"} Jan 20 18:44:29 crc kubenswrapper[4558]: I0120 18:44:29.133471 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="horizon-kuttl-tests/horizon-9b986b9c-nz6z9" podStartSLOduration=1.614632835 podStartE2EDuration="9.133456982s" podCreationTimestamp="2026-01-20 18:44:20 +0000 UTC" firstStartedPulling="2026-01-20 18:44:21.251658906 +0000 UTC m=+7355.011996863" lastFinishedPulling="2026-01-20 18:44:28.770483043 +0000 UTC m=+7362.530821010" observedRunningTime="2026-01-20 18:44:29.130648812 +0000 UTC m=+7362.890986778" watchObservedRunningTime="2026-01-20 18:44:29.133456982 +0000 UTC m=+7362.893794949" Jan 20 18:44:29 crc kubenswrapper[4558]: I0120 18:44:29.144715 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="horizon-kuttl-tests/horizon-598f976c49-c8x57" podStartSLOduration=1.728458512 podStartE2EDuration="9.144694891s" podCreationTimestamp="2026-01-20 18:44:20 +0000 UTC" firstStartedPulling="2026-01-20 18:44:21.346146255 +0000 UTC m=+7355.106484222" lastFinishedPulling="2026-01-20 18:44:28.762382643 +0000 UTC m=+7362.522720601" observedRunningTime="2026-01-20 18:44:29.144246769 +0000 UTC m=+7362.904584736" watchObservedRunningTime="2026-01-20 18:44:29.144694891 +0000 UTC m=+7362.905032858" Jan 20 18:44:30 crc kubenswrapper[4558]: I0120 18:44:30.860028 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="horizon-kuttl-tests/horizon-9b986b9c-nz6z9" Jan 20 18:44:30 crc kubenswrapper[4558]: I0120 18:44:30.860360 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="horizon-kuttl-tests/horizon-9b986b9c-nz6z9" Jan 20 18:44:30 crc kubenswrapper[4558]: I0120 18:44:30.923967 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="horizon-kuttl-tests/horizon-598f976c49-c8x57" Jan 20 18:44:30 crc kubenswrapper[4558]: I0120 18:44:30.924056 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="horizon-kuttl-tests/horizon-598f976c49-c8x57" Jan 20 18:44:39 crc kubenswrapper[4558]: I0120 18:44:39.203360 4558 scope.go:117] "RemoveContainer" containerID="b0d65e25768ceb3be84b2eff7f44c20dbc21e5f7fb58429a4d05b2f5522dce96" Jan 20 18:44:39 crc kubenswrapper[4558]: I0120 18:44:39.247332 4558 scope.go:117] "RemoveContainer" containerID="d8cee8b135a2bc566a472b4aebed535eb36e6c617239e0be350015f5b2432920" Jan 20 18:44:39 crc kubenswrapper[4558]: I0120 18:44:39.265637 4558 scope.go:117] "RemoveContainer" containerID="cad33bf620f35d37cf9a366f1fb9f1d9d5156fcc69a7f378e57e7860accf8797" Jan 20 18:44:39 crc kubenswrapper[4558]: I0120 18:44:39.282426 4558 scope.go:117] "RemoveContainer" containerID="ef95d5a546be1c7edb92582deb686820cb6a2afe9e3436fb02d6af327255f35b" Jan 20 18:44:39 crc kubenswrapper[4558]: I0120 18:44:39.305787 4558 scope.go:117] "RemoveContainer" containerID="92bbb9680ae90195d91760cb423f1150305d8dab5908c8ac0b5f009223396a4c" Jan 20 18:44:40 crc kubenswrapper[4558]: I0120 18:44:40.861887 4558 prober.go:107] "Probe failed" probeType="Startup" pod="horizon-kuttl-tests/horizon-9b986b9c-nz6z9" podUID="faa3fd5a-1d25-421a-8f05-046b725fa16c" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.214:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.214:8080: connect: connection refused" Jan 20 18:44:40 crc kubenswrapper[4558]: I0120 18:44:40.924581 4558 prober.go:107] "Probe failed" probeType="Startup" pod="horizon-kuttl-tests/horizon-598f976c49-c8x57" podUID="0d8e5344-72f5-4f27-bfbf-8b6a00c3d3e3" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.215:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.215:8080: connect: connection refused" Jan 20 18:44:52 crc kubenswrapper[4558]: I0120 18:44:52.452404 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="horizon-kuttl-tests/horizon-9b986b9c-nz6z9" Jan 20 18:44:52 crc kubenswrapper[4558]: I0120 18:44:52.486903 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="horizon-kuttl-tests/horizon-598f976c49-c8x57" Jan 20 18:44:53 crc kubenswrapper[4558]: I0120 18:44:53.942447 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="horizon-kuttl-tests/horizon-598f976c49-c8x57" Jan 20 18:44:53 crc kubenswrapper[4558]: I0120 18:44:53.943884 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="horizon-kuttl-tests/horizon-9b986b9c-nz6z9" Jan 20 18:44:54 crc kubenswrapper[4558]: I0120 18:44:54.006023 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["horizon-kuttl-tests/horizon-9b986b9c-nz6z9"] Jan 20 18:44:54 crc kubenswrapper[4558]: I0120 18:44:54.356299 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="horizon-kuttl-tests/horizon-9b986b9c-nz6z9" podUID="faa3fd5a-1d25-421a-8f05-046b725fa16c" containerName="horizon-log" containerID="cri-o://e70ccb2925fecb215d00942e3ba5962cb6a0f1d9e425c0fd685e7fbd7ddb39b4" gracePeriod=30 Jan 20 18:44:54 crc kubenswrapper[4558]: I0120 18:44:54.356417 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="horizon-kuttl-tests/horizon-9b986b9c-nz6z9" podUID="faa3fd5a-1d25-421a-8f05-046b725fa16c" containerName="horizon" containerID="cri-o://9a4d4756d9f6aaeafc335e3f85143281119d107b836fb1b29b365e5cff54acff" gracePeriod=30 Jan 20 18:44:56 crc kubenswrapper[4558]: I0120 18:44:56.953049 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["horizon-kuttl-tests/horizon-6f6c5c44d8-lr7hc"] Jan 20 18:44:56 crc kubenswrapper[4558]: I0120 18:44:56.954799 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/horizon-6f6c5c44d8-lr7hc" Jan 20 18:44:56 crc kubenswrapper[4558]: I0120 18:44:56.962550 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"horizon-kuttl-tests"/"horizon-policy" Jan 20 18:44:56 crc kubenswrapper[4558]: I0120 18:44:56.964146 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["horizon-kuttl-tests/horizon-6f6c5c44d8-lr7hc"] Jan 20 18:44:56 crc kubenswrapper[4558]: I0120 18:44:56.985370 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/aff60bd8-b19b-421f-b788-f17774b16886-logs\") pod \"horizon-6f6c5c44d8-lr7hc\" (UID: \"aff60bd8-b19b-421f-b788-f17774b16886\") " pod="horizon-kuttl-tests/horizon-6f6c5c44d8-lr7hc" Jan 20 18:44:56 crc kubenswrapper[4558]: I0120 18:44:56.985445 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/aff60bd8-b19b-421f-b788-f17774b16886-scripts\") pod \"horizon-6f6c5c44d8-lr7hc\" (UID: \"aff60bd8-b19b-421f-b788-f17774b16886\") " pod="horizon-kuttl-tests/horizon-6f6c5c44d8-lr7hc" Jan 20 18:44:56 crc kubenswrapper[4558]: I0120 18:44:56.985542 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/aff60bd8-b19b-421f-b788-f17774b16886-horizon-secret-key\") pod \"horizon-6f6c5c44d8-lr7hc\" (UID: \"aff60bd8-b19b-421f-b788-f17774b16886\") " pod="horizon-kuttl-tests/horizon-6f6c5c44d8-lr7hc" Jan 20 18:44:56 crc kubenswrapper[4558]: I0120 18:44:56.985575 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"policy\" (UniqueName: \"kubernetes.io/configmap/aff60bd8-b19b-421f-b788-f17774b16886-policy\") pod \"horizon-6f6c5c44d8-lr7hc\" (UID: \"aff60bd8-b19b-421f-b788-f17774b16886\") " pod="horizon-kuttl-tests/horizon-6f6c5c44d8-lr7hc" Jan 20 18:44:56 crc kubenswrapper[4558]: I0120 18:44:56.985764 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/aff60bd8-b19b-421f-b788-f17774b16886-config-data\") pod \"horizon-6f6c5c44d8-lr7hc\" (UID: \"aff60bd8-b19b-421f-b788-f17774b16886\") " pod="horizon-kuttl-tests/horizon-6f6c5c44d8-lr7hc" Jan 20 18:44:56 crc kubenswrapper[4558]: I0120 18:44:56.985814 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vml2k\" (UniqueName: \"kubernetes.io/projected/aff60bd8-b19b-421f-b788-f17774b16886-kube-api-access-vml2k\") pod \"horizon-6f6c5c44d8-lr7hc\" (UID: \"aff60bd8-b19b-421f-b788-f17774b16886\") " pod="horizon-kuttl-tests/horizon-6f6c5c44d8-lr7hc" Jan 20 18:44:57 crc kubenswrapper[4558]: I0120 18:44:57.022061 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["horizon-kuttl-tests/horizon-6f6c5c44d8-lr7hc"] Jan 20 18:44:57 crc kubenswrapper[4558]: E0120 18:44:57.023075 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[config-data horizon-secret-key kube-api-access-vml2k logs policy scripts], unattached volumes=[], failed to process volumes=[]: context canceled" pod="horizon-kuttl-tests/horizon-6f6c5c44d8-lr7hc" podUID="aff60bd8-b19b-421f-b788-f17774b16886" Jan 20 18:44:57 crc kubenswrapper[4558]: I0120 18:44:57.026015 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["horizon-kuttl-tests/horizon-598f976c49-c8x57"] Jan 20 18:44:57 crc kubenswrapper[4558]: I0120 18:44:57.026297 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="horizon-kuttl-tests/horizon-598f976c49-c8x57" podUID="0d8e5344-72f5-4f27-bfbf-8b6a00c3d3e3" containerName="horizon-log" containerID="cri-o://608c55329d1788390874107f80379fb6c6a092750c643d8249f066787fc32e77" gracePeriod=30 Jan 20 18:44:57 crc kubenswrapper[4558]: I0120 18:44:57.026362 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="horizon-kuttl-tests/horizon-598f976c49-c8x57" podUID="0d8e5344-72f5-4f27-bfbf-8b6a00c3d3e3" containerName="horizon" containerID="cri-o://468366f51379cc21d36c5d1382767e010720b6ff7948e08b8ec4bcf062faedd1" gracePeriod=30 Jan 20 18:44:57 crc kubenswrapper[4558]: I0120 18:44:57.087395 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/aff60bd8-b19b-421f-b788-f17774b16886-config-data\") pod \"horizon-6f6c5c44d8-lr7hc\" (UID: \"aff60bd8-b19b-421f-b788-f17774b16886\") " pod="horizon-kuttl-tests/horizon-6f6c5c44d8-lr7hc" Jan 20 18:44:57 crc kubenswrapper[4558]: I0120 18:44:57.087454 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vml2k\" (UniqueName: \"kubernetes.io/projected/aff60bd8-b19b-421f-b788-f17774b16886-kube-api-access-vml2k\") pod \"horizon-6f6c5c44d8-lr7hc\" (UID: \"aff60bd8-b19b-421f-b788-f17774b16886\") " pod="horizon-kuttl-tests/horizon-6f6c5c44d8-lr7hc" Jan 20 18:44:57 crc kubenswrapper[4558]: I0120 18:44:57.087505 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/aff60bd8-b19b-421f-b788-f17774b16886-logs\") pod \"horizon-6f6c5c44d8-lr7hc\" (UID: \"aff60bd8-b19b-421f-b788-f17774b16886\") " pod="horizon-kuttl-tests/horizon-6f6c5c44d8-lr7hc" Jan 20 18:44:57 crc kubenswrapper[4558]: I0120 18:44:57.087538 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/aff60bd8-b19b-421f-b788-f17774b16886-scripts\") pod \"horizon-6f6c5c44d8-lr7hc\" (UID: \"aff60bd8-b19b-421f-b788-f17774b16886\") " pod="horizon-kuttl-tests/horizon-6f6c5c44d8-lr7hc" Jan 20 18:44:57 crc kubenswrapper[4558]: E0120 18:44:57.087558 4558 configmap.go:193] Couldn't get configMap horizon-kuttl-tests/horizon-config-data: configmap "horizon-config-data" not found Jan 20 18:44:57 crc kubenswrapper[4558]: I0120 18:44:57.087595 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/aff60bd8-b19b-421f-b788-f17774b16886-horizon-secret-key\") pod \"horizon-6f6c5c44d8-lr7hc\" (UID: \"aff60bd8-b19b-421f-b788-f17774b16886\") " pod="horizon-kuttl-tests/horizon-6f6c5c44d8-lr7hc" Jan 20 18:44:57 crc kubenswrapper[4558]: I0120 18:44:57.087620 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"policy\" (UniqueName: \"kubernetes.io/configmap/aff60bd8-b19b-421f-b788-f17774b16886-policy\") pod \"horizon-6f6c5c44d8-lr7hc\" (UID: \"aff60bd8-b19b-421f-b788-f17774b16886\") " pod="horizon-kuttl-tests/horizon-6f6c5c44d8-lr7hc" Jan 20 18:44:57 crc kubenswrapper[4558]: E0120 18:44:57.087651 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/aff60bd8-b19b-421f-b788-f17774b16886-config-data podName:aff60bd8-b19b-421f-b788-f17774b16886 nodeName:}" failed. No retries permitted until 2026-01-20 18:44:57.587631861 +0000 UTC m=+7391.347969829 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/aff60bd8-b19b-421f-b788-f17774b16886-config-data") pod "horizon-6f6c5c44d8-lr7hc" (UID: "aff60bd8-b19b-421f-b788-f17774b16886") : configmap "horizon-config-data" not found Jan 20 18:44:57 crc kubenswrapper[4558]: E0120 18:44:57.087721 4558 configmap.go:193] Couldn't get configMap horizon-kuttl-tests/horizon-scripts: configmap "horizon-scripts" not found Jan 20 18:44:57 crc kubenswrapper[4558]: E0120 18:44:57.087791 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/aff60bd8-b19b-421f-b788-f17774b16886-scripts podName:aff60bd8-b19b-421f-b788-f17774b16886 nodeName:}" failed. No retries permitted until 2026-01-20 18:44:57.587772146 +0000 UTC m=+7391.348110113 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "scripts" (UniqueName: "kubernetes.io/configmap/aff60bd8-b19b-421f-b788-f17774b16886-scripts") pod "horizon-6f6c5c44d8-lr7hc" (UID: "aff60bd8-b19b-421f-b788-f17774b16886") : configmap "horizon-scripts" not found Jan 20 18:44:57 crc kubenswrapper[4558]: E0120 18:44:57.087810 4558 secret.go:188] Couldn't get secret horizon-kuttl-tests/horizon: secret "horizon" not found Jan 20 18:44:57 crc kubenswrapper[4558]: E0120 18:44:57.087875 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/aff60bd8-b19b-421f-b788-f17774b16886-horizon-secret-key podName:aff60bd8-b19b-421f-b788-f17774b16886 nodeName:}" failed. No retries permitted until 2026-01-20 18:44:57.587855021 +0000 UTC m=+7391.348192988 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "horizon-secret-key" (UniqueName: "kubernetes.io/secret/aff60bd8-b19b-421f-b788-f17774b16886-horizon-secret-key") pod "horizon-6f6c5c44d8-lr7hc" (UID: "aff60bd8-b19b-421f-b788-f17774b16886") : secret "horizon" not found Jan 20 18:44:57 crc kubenswrapper[4558]: I0120 18:44:57.088056 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/aff60bd8-b19b-421f-b788-f17774b16886-logs\") pod \"horizon-6f6c5c44d8-lr7hc\" (UID: \"aff60bd8-b19b-421f-b788-f17774b16886\") " pod="horizon-kuttl-tests/horizon-6f6c5c44d8-lr7hc" Jan 20 18:44:57 crc kubenswrapper[4558]: I0120 18:44:57.088409 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"policy\" (UniqueName: \"kubernetes.io/configmap/aff60bd8-b19b-421f-b788-f17774b16886-policy\") pod \"horizon-6f6c5c44d8-lr7hc\" (UID: \"aff60bd8-b19b-421f-b788-f17774b16886\") " pod="horizon-kuttl-tests/horizon-6f6c5c44d8-lr7hc" Jan 20 18:44:57 crc kubenswrapper[4558]: E0120 18:44:57.092221 4558 projected.go:194] Error preparing data for projected volume kube-api-access-vml2k for pod horizon-kuttl-tests/horizon-6f6c5c44d8-lr7hc: failed to fetch token: serviceaccounts "horizon-horizon" not found Jan 20 18:44:57 crc kubenswrapper[4558]: E0120 18:44:57.092321 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/aff60bd8-b19b-421f-b788-f17774b16886-kube-api-access-vml2k podName:aff60bd8-b19b-421f-b788-f17774b16886 nodeName:}" failed. No retries permitted until 2026-01-20 18:44:57.592290762 +0000 UTC m=+7391.352628729 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-vml2k" (UniqueName: "kubernetes.io/projected/aff60bd8-b19b-421f-b788-f17774b16886-kube-api-access-vml2k") pod "horizon-6f6c5c44d8-lr7hc" (UID: "aff60bd8-b19b-421f-b788-f17774b16886") : failed to fetch token: serviceaccounts "horizon-horizon" not found Jan 20 18:44:57 crc kubenswrapper[4558]: I0120 18:44:57.330141 4558 patch_prober.go:28] interesting pod/machine-config-daemon-2vr4r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 20 18:44:57 crc kubenswrapper[4558]: I0120 18:44:57.330231 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 20 18:44:57 crc kubenswrapper[4558]: I0120 18:44:57.392013 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/horizon-6f6c5c44d8-lr7hc" Jan 20 18:44:57 crc kubenswrapper[4558]: I0120 18:44:57.400295 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/horizon-6f6c5c44d8-lr7hc" Jan 20 18:44:57 crc kubenswrapper[4558]: I0120 18:44:57.595452 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"policy\" (UniqueName: \"kubernetes.io/configmap/aff60bd8-b19b-421f-b788-f17774b16886-policy\") pod \"aff60bd8-b19b-421f-b788-f17774b16886\" (UID: \"aff60bd8-b19b-421f-b788-f17774b16886\") " Jan 20 18:44:57 crc kubenswrapper[4558]: I0120 18:44:57.595702 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/aff60bd8-b19b-421f-b788-f17774b16886-logs\") pod \"aff60bd8-b19b-421f-b788-f17774b16886\" (UID: \"aff60bd8-b19b-421f-b788-f17774b16886\") " Jan 20 18:44:57 crc kubenswrapper[4558]: I0120 18:44:57.596220 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/aff60bd8-b19b-421f-b788-f17774b16886-scripts\") pod \"horizon-6f6c5c44d8-lr7hc\" (UID: \"aff60bd8-b19b-421f-b788-f17774b16886\") " pod="horizon-kuttl-tests/horizon-6f6c5c44d8-lr7hc" Jan 20 18:44:57 crc kubenswrapper[4558]: I0120 18:44:57.596296 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/aff60bd8-b19b-421f-b788-f17774b16886-horizon-secret-key\") pod \"horizon-6f6c5c44d8-lr7hc\" (UID: \"aff60bd8-b19b-421f-b788-f17774b16886\") " pod="horizon-kuttl-tests/horizon-6f6c5c44d8-lr7hc" Jan 20 18:44:57 crc kubenswrapper[4558]: I0120 18:44:57.596420 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/aff60bd8-b19b-421f-b788-f17774b16886-config-data\") pod \"horizon-6f6c5c44d8-lr7hc\" (UID: \"aff60bd8-b19b-421f-b788-f17774b16886\") " pod="horizon-kuttl-tests/horizon-6f6c5c44d8-lr7hc" Jan 20 18:44:57 crc kubenswrapper[4558]: I0120 18:44:57.596461 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vml2k\" (UniqueName: \"kubernetes.io/projected/aff60bd8-b19b-421f-b788-f17774b16886-kube-api-access-vml2k\") pod \"horizon-6f6c5c44d8-lr7hc\" (UID: \"aff60bd8-b19b-421f-b788-f17774b16886\") " pod="horizon-kuttl-tests/horizon-6f6c5c44d8-lr7hc" Jan 20 18:44:57 crc kubenswrapper[4558]: I0120 18:44:57.596535 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/aff60bd8-b19b-421f-b788-f17774b16886-policy" (OuterVolumeSpecName: "policy") pod "aff60bd8-b19b-421f-b788-f17774b16886" (UID: "aff60bd8-b19b-421f-b788-f17774b16886"). InnerVolumeSpecName "policy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:44:57 crc kubenswrapper[4558]: E0120 18:44:57.596669 4558 configmap.go:193] Couldn't get configMap horizon-kuttl-tests/horizon-scripts: configmap "horizon-scripts" not found Jan 20 18:44:57 crc kubenswrapper[4558]: E0120 18:44:57.596735 4558 secret.go:188] Couldn't get secret horizon-kuttl-tests/horizon: secret "horizon" not found Jan 20 18:44:57 crc kubenswrapper[4558]: E0120 18:44:57.596801 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/aff60bd8-b19b-421f-b788-f17774b16886-scripts podName:aff60bd8-b19b-421f-b788-f17774b16886 nodeName:}" failed. No retries permitted until 2026-01-20 18:44:58.596776947 +0000 UTC m=+7392.357114913 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "scripts" (UniqueName: "kubernetes.io/configmap/aff60bd8-b19b-421f-b788-f17774b16886-scripts") pod "horizon-6f6c5c44d8-lr7hc" (UID: "aff60bd8-b19b-421f-b788-f17774b16886") : configmap "horizon-scripts" not found Jan 20 18:44:57 crc kubenswrapper[4558]: E0120 18:44:57.598055 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/aff60bd8-b19b-421f-b788-f17774b16886-horizon-secret-key podName:aff60bd8-b19b-421f-b788-f17774b16886 nodeName:}" failed. No retries permitted until 2026-01-20 18:44:58.598015004 +0000 UTC m=+7392.358352961 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "horizon-secret-key" (UniqueName: "kubernetes.io/secret/aff60bd8-b19b-421f-b788-f17774b16886-horizon-secret-key") pod "horizon-6f6c5c44d8-lr7hc" (UID: "aff60bd8-b19b-421f-b788-f17774b16886") : secret "horizon" not found Jan 20 18:44:57 crc kubenswrapper[4558]: I0120 18:44:57.596853 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/aff60bd8-b19b-421f-b788-f17774b16886-logs" (OuterVolumeSpecName: "logs") pod "aff60bd8-b19b-421f-b788-f17774b16886" (UID: "aff60bd8-b19b-421f-b788-f17774b16886"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 18:44:57 crc kubenswrapper[4558]: E0120 18:44:57.597335 4558 configmap.go:193] Couldn't get configMap horizon-kuttl-tests/horizon-config-data: configmap "horizon-config-data" not found Jan 20 18:44:57 crc kubenswrapper[4558]: E0120 18:44:57.598238 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/aff60bd8-b19b-421f-b788-f17774b16886-config-data podName:aff60bd8-b19b-421f-b788-f17774b16886 nodeName:}" failed. No retries permitted until 2026-01-20 18:44:58.598218406 +0000 UTC m=+7392.358556634 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/configmap/aff60bd8-b19b-421f-b788-f17774b16886-config-data") pod "horizon-6f6c5c44d8-lr7hc" (UID: "aff60bd8-b19b-421f-b788-f17774b16886") : configmap "horizon-config-data" not found Jan 20 18:44:57 crc kubenswrapper[4558]: E0120 18:44:57.600270 4558 projected.go:194] Error preparing data for projected volume kube-api-access-vml2k for pod horizon-kuttl-tests/horizon-6f6c5c44d8-lr7hc: failed to fetch token: serviceaccounts "horizon-horizon" not found Jan 20 18:44:57 crc kubenswrapper[4558]: E0120 18:44:57.600339 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/aff60bd8-b19b-421f-b788-f17774b16886-kube-api-access-vml2k podName:aff60bd8-b19b-421f-b788-f17774b16886 nodeName:}" failed. No retries permitted until 2026-01-20 18:44:58.600329236 +0000 UTC m=+7392.360667203 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-vml2k" (UniqueName: "kubernetes.io/projected/aff60bd8-b19b-421f-b788-f17774b16886-kube-api-access-vml2k") pod "horizon-6f6c5c44d8-lr7hc" (UID: "aff60bd8-b19b-421f-b788-f17774b16886") : failed to fetch token: serviceaccounts "horizon-horizon" not found Jan 20 18:44:57 crc kubenswrapper[4558]: I0120 18:44:57.699217 4558 reconciler_common.go:293] "Volume detached for volume \"policy\" (UniqueName: \"kubernetes.io/configmap/aff60bd8-b19b-421f-b788-f17774b16886-policy\") on node \"crc\" DevicePath \"\"" Jan 20 18:44:57 crc kubenswrapper[4558]: I0120 18:44:57.699385 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/aff60bd8-b19b-421f-b788-f17774b16886-logs\") on node \"crc\" DevicePath \"\"" Jan 20 18:44:58 crc kubenswrapper[4558]: I0120 18:44:58.405768 4558 generic.go:334] "Generic (PLEG): container finished" podID="faa3fd5a-1d25-421a-8f05-046b725fa16c" containerID="9a4d4756d9f6aaeafc335e3f85143281119d107b836fb1b29b365e5cff54acff" exitCode=0 Jan 20 18:44:58 crc kubenswrapper[4558]: I0120 18:44:58.405860 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/horizon-9b986b9c-nz6z9" event={"ID":"faa3fd5a-1d25-421a-8f05-046b725fa16c","Type":"ContainerDied","Data":"9a4d4756d9f6aaeafc335e3f85143281119d107b836fb1b29b365e5cff54acff"} Jan 20 18:44:58 crc kubenswrapper[4558]: I0120 18:44:58.405889 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/horizon-6f6c5c44d8-lr7hc" Jan 20 18:44:58 crc kubenswrapper[4558]: I0120 18:44:58.441515 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["horizon-kuttl-tests/horizon-6f6c5c44d8-lr7hc"] Jan 20 18:44:58 crc kubenswrapper[4558]: I0120 18:44:58.445842 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["horizon-kuttl-tests/horizon-6f6c5c44d8-lr7hc"] Jan 20 18:44:58 crc kubenswrapper[4558]: I0120 18:44:58.512635 4558 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/aff60bd8-b19b-421f-b788-f17774b16886-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Jan 20 18:44:58 crc kubenswrapper[4558]: I0120 18:44:58.512666 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/aff60bd8-b19b-421f-b788-f17774b16886-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 18:44:58 crc kubenswrapper[4558]: I0120 18:44:58.512676 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/aff60bd8-b19b-421f-b788-f17774b16886-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 18:44:58 crc kubenswrapper[4558]: I0120 18:44:58.512690 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vml2k\" (UniqueName: \"kubernetes.io/projected/aff60bd8-b19b-421f-b788-f17774b16886-kube-api-access-vml2k\") on node \"crc\" DevicePath \"\"" Jan 20 18:44:58 crc kubenswrapper[4558]: I0120 18:44:58.585325 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aff60bd8-b19b-421f-b788-f17774b16886" path="/var/lib/kubelet/pods/aff60bd8-b19b-421f-b788-f17774b16886/volumes" Jan 20 18:45:00 crc kubenswrapper[4558]: I0120 18:45:00.153154 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29482245-j4sm9"] Jan 20 18:45:00 crc kubenswrapper[4558]: I0120 18:45:00.156413 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29482245-j4sm9" Jan 20 18:45:00 crc kubenswrapper[4558]: I0120 18:45:00.161269 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 20 18:45:00 crc kubenswrapper[4558]: I0120 18:45:00.161596 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 20 18:45:00 crc kubenswrapper[4558]: I0120 18:45:00.180088 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29482245-j4sm9"] Jan 20 18:45:00 crc kubenswrapper[4558]: I0120 18:45:00.240548 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8smh9\" (UniqueName: \"kubernetes.io/projected/6ab78d9a-8984-4689-b98e-7a25324b9af0-kube-api-access-8smh9\") pod \"collect-profiles-29482245-j4sm9\" (UID: \"6ab78d9a-8984-4689-b98e-7a25324b9af0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482245-j4sm9" Jan 20 18:45:00 crc kubenswrapper[4558]: I0120 18:45:00.240607 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6ab78d9a-8984-4689-b98e-7a25324b9af0-config-volume\") pod \"collect-profiles-29482245-j4sm9\" (UID: \"6ab78d9a-8984-4689-b98e-7a25324b9af0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482245-j4sm9" Jan 20 18:45:00 crc kubenswrapper[4558]: I0120 18:45:00.240658 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6ab78d9a-8984-4689-b98e-7a25324b9af0-secret-volume\") pod \"collect-profiles-29482245-j4sm9\" (UID: \"6ab78d9a-8984-4689-b98e-7a25324b9af0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482245-j4sm9" Jan 20 18:45:00 crc kubenswrapper[4558]: I0120 18:45:00.341942 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8smh9\" (UniqueName: \"kubernetes.io/projected/6ab78d9a-8984-4689-b98e-7a25324b9af0-kube-api-access-8smh9\") pod \"collect-profiles-29482245-j4sm9\" (UID: \"6ab78d9a-8984-4689-b98e-7a25324b9af0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482245-j4sm9" Jan 20 18:45:00 crc kubenswrapper[4558]: I0120 18:45:00.342040 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6ab78d9a-8984-4689-b98e-7a25324b9af0-config-volume\") pod \"collect-profiles-29482245-j4sm9\" (UID: \"6ab78d9a-8984-4689-b98e-7a25324b9af0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482245-j4sm9" Jan 20 18:45:00 crc kubenswrapper[4558]: I0120 18:45:00.342142 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6ab78d9a-8984-4689-b98e-7a25324b9af0-secret-volume\") pod \"collect-profiles-29482245-j4sm9\" (UID: \"6ab78d9a-8984-4689-b98e-7a25324b9af0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482245-j4sm9" Jan 20 18:45:00 crc kubenswrapper[4558]: I0120 18:45:00.343521 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6ab78d9a-8984-4689-b98e-7a25324b9af0-config-volume\") pod \"collect-profiles-29482245-j4sm9\" (UID: \"6ab78d9a-8984-4689-b98e-7a25324b9af0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482245-j4sm9" Jan 20 18:45:00 crc kubenswrapper[4558]: I0120 18:45:00.351200 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6ab78d9a-8984-4689-b98e-7a25324b9af0-secret-volume\") pod \"collect-profiles-29482245-j4sm9\" (UID: \"6ab78d9a-8984-4689-b98e-7a25324b9af0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482245-j4sm9" Jan 20 18:45:00 crc kubenswrapper[4558]: I0120 18:45:00.359725 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8smh9\" (UniqueName: \"kubernetes.io/projected/6ab78d9a-8984-4689-b98e-7a25324b9af0-kube-api-access-8smh9\") pod \"collect-profiles-29482245-j4sm9\" (UID: \"6ab78d9a-8984-4689-b98e-7a25324b9af0\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482245-j4sm9" Jan 20 18:45:00 crc kubenswrapper[4558]: I0120 18:45:00.425029 4558 generic.go:334] "Generic (PLEG): container finished" podID="0d8e5344-72f5-4f27-bfbf-8b6a00c3d3e3" containerID="468366f51379cc21d36c5d1382767e010720b6ff7948e08b8ec4bcf062faedd1" exitCode=0 Jan 20 18:45:00 crc kubenswrapper[4558]: I0120 18:45:00.425083 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/horizon-598f976c49-c8x57" event={"ID":"0d8e5344-72f5-4f27-bfbf-8b6a00c3d3e3","Type":"ContainerDied","Data":"468366f51379cc21d36c5d1382767e010720b6ff7948e08b8ec4bcf062faedd1"} Jan 20 18:45:00 crc kubenswrapper[4558]: I0120 18:45:00.476921 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29482245-j4sm9" Jan 20 18:45:00 crc kubenswrapper[4558]: I0120 18:45:00.859833 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="horizon-kuttl-tests/horizon-9b986b9c-nz6z9" podUID="faa3fd5a-1d25-421a-8f05-046b725fa16c" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.214:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.214:8080: connect: connection refused" Jan 20 18:45:00 crc kubenswrapper[4558]: I0120 18:45:00.881065 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29482245-j4sm9"] Jan 20 18:45:00 crc kubenswrapper[4558]: I0120 18:45:00.924243 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="horizon-kuttl-tests/horizon-598f976c49-c8x57" podUID="0d8e5344-72f5-4f27-bfbf-8b6a00c3d3e3" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.215:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.215:8080: connect: connection refused" Jan 20 18:45:01 crc kubenswrapper[4558]: I0120 18:45:01.439596 4558 generic.go:334] "Generic (PLEG): container finished" podID="6ab78d9a-8984-4689-b98e-7a25324b9af0" containerID="52251df8b66ae23a742d2ac33be4bfc639f190b64911d0aeaf6af12caa58df97" exitCode=0 Jan 20 18:45:01 crc kubenswrapper[4558]: I0120 18:45:01.439731 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29482245-j4sm9" event={"ID":"6ab78d9a-8984-4689-b98e-7a25324b9af0","Type":"ContainerDied","Data":"52251df8b66ae23a742d2ac33be4bfc639f190b64911d0aeaf6af12caa58df97"} Jan 20 18:45:01 crc kubenswrapper[4558]: I0120 18:45:01.440082 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29482245-j4sm9" event={"ID":"6ab78d9a-8984-4689-b98e-7a25324b9af0","Type":"ContainerStarted","Data":"0e624e14bde0a56368fd86ccc7c9e5f9a2277328aa98688d362f07f4cf4c298e"} Jan 20 18:45:02 crc kubenswrapper[4558]: I0120 18:45:02.704403 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29482245-j4sm9" Jan 20 18:45:02 crc kubenswrapper[4558]: I0120 18:45:02.885190 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6ab78d9a-8984-4689-b98e-7a25324b9af0-config-volume\") pod \"6ab78d9a-8984-4689-b98e-7a25324b9af0\" (UID: \"6ab78d9a-8984-4689-b98e-7a25324b9af0\") " Jan 20 18:45:02 crc kubenswrapper[4558]: I0120 18:45:02.885540 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6ab78d9a-8984-4689-b98e-7a25324b9af0-secret-volume\") pod \"6ab78d9a-8984-4689-b98e-7a25324b9af0\" (UID: \"6ab78d9a-8984-4689-b98e-7a25324b9af0\") " Jan 20 18:45:02 crc kubenswrapper[4558]: I0120 18:45:02.885589 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8smh9\" (UniqueName: \"kubernetes.io/projected/6ab78d9a-8984-4689-b98e-7a25324b9af0-kube-api-access-8smh9\") pod \"6ab78d9a-8984-4689-b98e-7a25324b9af0\" (UID: \"6ab78d9a-8984-4689-b98e-7a25324b9af0\") " Jan 20 18:45:02 crc kubenswrapper[4558]: I0120 18:45:02.886431 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ab78d9a-8984-4689-b98e-7a25324b9af0-config-volume" (OuterVolumeSpecName: "config-volume") pod "6ab78d9a-8984-4689-b98e-7a25324b9af0" (UID: "6ab78d9a-8984-4689-b98e-7a25324b9af0"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:45:02 crc kubenswrapper[4558]: I0120 18:45:02.891718 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ab78d9a-8984-4689-b98e-7a25324b9af0-kube-api-access-8smh9" (OuterVolumeSpecName: "kube-api-access-8smh9") pod "6ab78d9a-8984-4689-b98e-7a25324b9af0" (UID: "6ab78d9a-8984-4689-b98e-7a25324b9af0"). InnerVolumeSpecName "kube-api-access-8smh9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:45:02 crc kubenswrapper[4558]: I0120 18:45:02.892749 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ab78d9a-8984-4689-b98e-7a25324b9af0-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "6ab78d9a-8984-4689-b98e-7a25324b9af0" (UID: "6ab78d9a-8984-4689-b98e-7a25324b9af0"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:45:02 crc kubenswrapper[4558]: I0120 18:45:02.988239 4558 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6ab78d9a-8984-4689-b98e-7a25324b9af0-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 20 18:45:02 crc kubenswrapper[4558]: I0120 18:45:02.988280 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8smh9\" (UniqueName: \"kubernetes.io/projected/6ab78d9a-8984-4689-b98e-7a25324b9af0-kube-api-access-8smh9\") on node \"crc\" DevicePath \"\"" Jan 20 18:45:02 crc kubenswrapper[4558]: I0120 18:45:02.988293 4558 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6ab78d9a-8984-4689-b98e-7a25324b9af0-config-volume\") on node \"crc\" DevicePath \"\"" Jan 20 18:45:03 crc kubenswrapper[4558]: I0120 18:45:03.461783 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29482245-j4sm9" event={"ID":"6ab78d9a-8984-4689-b98e-7a25324b9af0","Type":"ContainerDied","Data":"0e624e14bde0a56368fd86ccc7c9e5f9a2277328aa98688d362f07f4cf4c298e"} Jan 20 18:45:03 crc kubenswrapper[4558]: I0120 18:45:03.462075 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0e624e14bde0a56368fd86ccc7c9e5f9a2277328aa98688d362f07f4cf4c298e" Jan 20 18:45:03 crc kubenswrapper[4558]: I0120 18:45:03.461894 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29482245-j4sm9" Jan 20 18:45:03 crc kubenswrapper[4558]: I0120 18:45:03.772620 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29482200-wl5jw"] Jan 20 18:45:03 crc kubenswrapper[4558]: I0120 18:45:03.781484 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29482200-wl5jw"] Jan 20 18:45:04 crc kubenswrapper[4558]: I0120 18:45:04.575384 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f7c2d02a-545b-4766-bdc3-6273e29a76e3" path="/var/lib/kubelet/pods/f7c2d02a-545b-4766-bdc3-6273e29a76e3/volumes" Jan 20 18:45:07 crc kubenswrapper[4558]: I0120 18:45:07.108999 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-thzm9"] Jan 20 18:45:07 crc kubenswrapper[4558]: E0120 18:45:07.109773 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6ab78d9a-8984-4689-b98e-7a25324b9af0" containerName="collect-profiles" Jan 20 18:45:07 crc kubenswrapper[4558]: I0120 18:45:07.109789 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="6ab78d9a-8984-4689-b98e-7a25324b9af0" containerName="collect-profiles" Jan 20 18:45:07 crc kubenswrapper[4558]: I0120 18:45:07.109957 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="6ab78d9a-8984-4689-b98e-7a25324b9af0" containerName="collect-profiles" Jan 20 18:45:07 crc kubenswrapper[4558]: I0120 18:45:07.111062 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-thzm9" Jan 20 18:45:07 crc kubenswrapper[4558]: I0120 18:45:07.129504 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-thzm9"] Jan 20 18:45:07 crc kubenswrapper[4558]: I0120 18:45:07.253248 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8af16e38-e131-4735-a788-2aca07129693-catalog-content\") pod \"redhat-marketplace-thzm9\" (UID: \"8af16e38-e131-4735-a788-2aca07129693\") " pod="openshift-marketplace/redhat-marketplace-thzm9" Jan 20 18:45:07 crc kubenswrapper[4558]: I0120 18:45:07.253311 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s4bx9\" (UniqueName: \"kubernetes.io/projected/8af16e38-e131-4735-a788-2aca07129693-kube-api-access-s4bx9\") pod \"redhat-marketplace-thzm9\" (UID: \"8af16e38-e131-4735-a788-2aca07129693\") " pod="openshift-marketplace/redhat-marketplace-thzm9" Jan 20 18:45:07 crc kubenswrapper[4558]: I0120 18:45:07.253561 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8af16e38-e131-4735-a788-2aca07129693-utilities\") pod \"redhat-marketplace-thzm9\" (UID: \"8af16e38-e131-4735-a788-2aca07129693\") " pod="openshift-marketplace/redhat-marketplace-thzm9" Jan 20 18:45:07 crc kubenswrapper[4558]: I0120 18:45:07.355261 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8af16e38-e131-4735-a788-2aca07129693-catalog-content\") pod \"redhat-marketplace-thzm9\" (UID: \"8af16e38-e131-4735-a788-2aca07129693\") " pod="openshift-marketplace/redhat-marketplace-thzm9" Jan 20 18:45:07 crc kubenswrapper[4558]: I0120 18:45:07.355315 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s4bx9\" (UniqueName: \"kubernetes.io/projected/8af16e38-e131-4735-a788-2aca07129693-kube-api-access-s4bx9\") pod \"redhat-marketplace-thzm9\" (UID: \"8af16e38-e131-4735-a788-2aca07129693\") " pod="openshift-marketplace/redhat-marketplace-thzm9" Jan 20 18:45:07 crc kubenswrapper[4558]: I0120 18:45:07.355423 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8af16e38-e131-4735-a788-2aca07129693-utilities\") pod \"redhat-marketplace-thzm9\" (UID: \"8af16e38-e131-4735-a788-2aca07129693\") " pod="openshift-marketplace/redhat-marketplace-thzm9" Jan 20 18:45:07 crc kubenswrapper[4558]: I0120 18:45:07.355779 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8af16e38-e131-4735-a788-2aca07129693-catalog-content\") pod \"redhat-marketplace-thzm9\" (UID: \"8af16e38-e131-4735-a788-2aca07129693\") " pod="openshift-marketplace/redhat-marketplace-thzm9" Jan 20 18:45:07 crc kubenswrapper[4558]: I0120 18:45:07.355912 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8af16e38-e131-4735-a788-2aca07129693-utilities\") pod \"redhat-marketplace-thzm9\" (UID: \"8af16e38-e131-4735-a788-2aca07129693\") " pod="openshift-marketplace/redhat-marketplace-thzm9" Jan 20 18:45:07 crc kubenswrapper[4558]: I0120 18:45:07.373241 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s4bx9\" (UniqueName: \"kubernetes.io/projected/8af16e38-e131-4735-a788-2aca07129693-kube-api-access-s4bx9\") pod \"redhat-marketplace-thzm9\" (UID: \"8af16e38-e131-4735-a788-2aca07129693\") " pod="openshift-marketplace/redhat-marketplace-thzm9" Jan 20 18:45:07 crc kubenswrapper[4558]: I0120 18:45:07.435869 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-thzm9" Jan 20 18:45:07 crc kubenswrapper[4558]: I0120 18:45:07.850932 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-thzm9"] Jan 20 18:45:08 crc kubenswrapper[4558]: I0120 18:45:08.503263 4558 generic.go:334] "Generic (PLEG): container finished" podID="8af16e38-e131-4735-a788-2aca07129693" containerID="1790b872248554115d5d1b56a65246cf19921b08a53b285834bd2da373607b9a" exitCode=0 Jan 20 18:45:08 crc kubenswrapper[4558]: I0120 18:45:08.503311 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-thzm9" event={"ID":"8af16e38-e131-4735-a788-2aca07129693","Type":"ContainerDied","Data":"1790b872248554115d5d1b56a65246cf19921b08a53b285834bd2da373607b9a"} Jan 20 18:45:08 crc kubenswrapper[4558]: I0120 18:45:08.503559 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-thzm9" event={"ID":"8af16e38-e131-4735-a788-2aca07129693","Type":"ContainerStarted","Data":"d4c59fef9a52a6f3267fa81ddfa2e668423df487c5af947424f3e6df4899ec9b"} Jan 20 18:45:09 crc kubenswrapper[4558]: I0120 18:45:09.516042 4558 generic.go:334] "Generic (PLEG): container finished" podID="8af16e38-e131-4735-a788-2aca07129693" containerID="8160e2e3a17b9ff5f8b6a42a231d264223c859a16a99a1e2a89a7c30f00b5ef5" exitCode=0 Jan 20 18:45:09 crc kubenswrapper[4558]: I0120 18:45:09.516129 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-thzm9" event={"ID":"8af16e38-e131-4735-a788-2aca07129693","Type":"ContainerDied","Data":"8160e2e3a17b9ff5f8b6a42a231d264223c859a16a99a1e2a89a7c30f00b5ef5"} Jan 20 18:45:10 crc kubenswrapper[4558]: I0120 18:45:10.530745 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-thzm9" event={"ID":"8af16e38-e131-4735-a788-2aca07129693","Type":"ContainerStarted","Data":"71756f5256bb8a315e366883895ec173d75472bf60e5b8d703dfe41bf6f0452b"} Jan 20 18:45:10 crc kubenswrapper[4558]: I0120 18:45:10.553229 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-thzm9" podStartSLOduration=2.025982988 podStartE2EDuration="3.553203859s" podCreationTimestamp="2026-01-20 18:45:07 +0000 UTC" firstStartedPulling="2026-01-20 18:45:08.504654928 +0000 UTC m=+7402.264992884" lastFinishedPulling="2026-01-20 18:45:10.031875788 +0000 UTC m=+7403.792213755" observedRunningTime="2026-01-20 18:45:10.547675244 +0000 UTC m=+7404.308013212" watchObservedRunningTime="2026-01-20 18:45:10.553203859 +0000 UTC m=+7404.313541827" Jan 20 18:45:10 crc kubenswrapper[4558]: I0120 18:45:10.860304 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="horizon-kuttl-tests/horizon-9b986b9c-nz6z9" podUID="faa3fd5a-1d25-421a-8f05-046b725fa16c" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.214:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.214:8080: connect: connection refused" Jan 20 18:45:10 crc kubenswrapper[4558]: I0120 18:45:10.924413 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="horizon-kuttl-tests/horizon-598f976c49-c8x57" podUID="0d8e5344-72f5-4f27-bfbf-8b6a00c3d3e3" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.215:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.215:8080: connect: connection refused" Jan 20 18:45:17 crc kubenswrapper[4558]: I0120 18:45:17.436510 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-thzm9" Jan 20 18:45:17 crc kubenswrapper[4558]: I0120 18:45:17.437084 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-thzm9" Jan 20 18:45:17 crc kubenswrapper[4558]: I0120 18:45:17.472715 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-thzm9" Jan 20 18:45:17 crc kubenswrapper[4558]: I0120 18:45:17.619895 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-thzm9" Jan 20 18:45:20 crc kubenswrapper[4558]: I0120 18:45:20.860013 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="horizon-kuttl-tests/horizon-9b986b9c-nz6z9" podUID="faa3fd5a-1d25-421a-8f05-046b725fa16c" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.214:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.214:8080: connect: connection refused" Jan 20 18:45:20 crc kubenswrapper[4558]: I0120 18:45:20.860582 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="horizon-kuttl-tests/horizon-9b986b9c-nz6z9" Jan 20 18:45:20 crc kubenswrapper[4558]: I0120 18:45:20.924397 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="horizon-kuttl-tests/horizon-598f976c49-c8x57" podUID="0d8e5344-72f5-4f27-bfbf-8b6a00c3d3e3" containerName="horizon" probeResult="failure" output="Get \"http://10.217.1.215:8080/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.215:8080: connect: connection refused" Jan 20 18:45:20 crc kubenswrapper[4558]: I0120 18:45:20.924565 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="horizon-kuttl-tests/horizon-598f976c49-c8x57" Jan 20 18:45:21 crc kubenswrapper[4558]: I0120 18:45:21.907696 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-srbtw"] Jan 20 18:45:21 crc kubenswrapper[4558]: I0120 18:45:21.909049 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-srbtw" Jan 20 18:45:21 crc kubenswrapper[4558]: I0120 18:45:21.918808 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-srbtw"] Jan 20 18:45:22 crc kubenswrapper[4558]: I0120 18:45:22.085895 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bf5d23cd-b290-4b1c-b84a-d93f1817c6ae-utilities\") pod \"community-operators-srbtw\" (UID: \"bf5d23cd-b290-4b1c-b84a-d93f1817c6ae\") " pod="openshift-marketplace/community-operators-srbtw" Jan 20 18:45:22 crc kubenswrapper[4558]: I0120 18:45:22.085990 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-86xcm\" (UniqueName: \"kubernetes.io/projected/bf5d23cd-b290-4b1c-b84a-d93f1817c6ae-kube-api-access-86xcm\") pod \"community-operators-srbtw\" (UID: \"bf5d23cd-b290-4b1c-b84a-d93f1817c6ae\") " pod="openshift-marketplace/community-operators-srbtw" Jan 20 18:45:22 crc kubenswrapper[4558]: I0120 18:45:22.086035 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bf5d23cd-b290-4b1c-b84a-d93f1817c6ae-catalog-content\") pod \"community-operators-srbtw\" (UID: \"bf5d23cd-b290-4b1c-b84a-d93f1817c6ae\") " pod="openshift-marketplace/community-operators-srbtw" Jan 20 18:45:22 crc kubenswrapper[4558]: I0120 18:45:22.099545 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-thzm9"] Jan 20 18:45:22 crc kubenswrapper[4558]: I0120 18:45:22.099783 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-thzm9" podUID="8af16e38-e131-4735-a788-2aca07129693" containerName="registry-server" containerID="cri-o://71756f5256bb8a315e366883895ec173d75472bf60e5b8d703dfe41bf6f0452b" gracePeriod=2 Jan 20 18:45:22 crc kubenswrapper[4558]: I0120 18:45:22.188232 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bf5d23cd-b290-4b1c-b84a-d93f1817c6ae-utilities\") pod \"community-operators-srbtw\" (UID: \"bf5d23cd-b290-4b1c-b84a-d93f1817c6ae\") " pod="openshift-marketplace/community-operators-srbtw" Jan 20 18:45:22 crc kubenswrapper[4558]: I0120 18:45:22.188531 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-86xcm\" (UniqueName: \"kubernetes.io/projected/bf5d23cd-b290-4b1c-b84a-d93f1817c6ae-kube-api-access-86xcm\") pod \"community-operators-srbtw\" (UID: \"bf5d23cd-b290-4b1c-b84a-d93f1817c6ae\") " pod="openshift-marketplace/community-operators-srbtw" Jan 20 18:45:22 crc kubenswrapper[4558]: I0120 18:45:22.189314 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bf5d23cd-b290-4b1c-b84a-d93f1817c6ae-catalog-content\") pod \"community-operators-srbtw\" (UID: \"bf5d23cd-b290-4b1c-b84a-d93f1817c6ae\") " pod="openshift-marketplace/community-operators-srbtw" Jan 20 18:45:22 crc kubenswrapper[4558]: I0120 18:45:22.188864 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bf5d23cd-b290-4b1c-b84a-d93f1817c6ae-utilities\") pod \"community-operators-srbtw\" (UID: \"bf5d23cd-b290-4b1c-b84a-d93f1817c6ae\") " pod="openshift-marketplace/community-operators-srbtw" Jan 20 18:45:22 crc kubenswrapper[4558]: I0120 18:45:22.189743 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bf5d23cd-b290-4b1c-b84a-d93f1817c6ae-catalog-content\") pod \"community-operators-srbtw\" (UID: \"bf5d23cd-b290-4b1c-b84a-d93f1817c6ae\") " pod="openshift-marketplace/community-operators-srbtw" Jan 20 18:45:22 crc kubenswrapper[4558]: I0120 18:45:22.216395 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-86xcm\" (UniqueName: \"kubernetes.io/projected/bf5d23cd-b290-4b1c-b84a-d93f1817c6ae-kube-api-access-86xcm\") pod \"community-operators-srbtw\" (UID: \"bf5d23cd-b290-4b1c-b84a-d93f1817c6ae\") " pod="openshift-marketplace/community-operators-srbtw" Jan 20 18:45:22 crc kubenswrapper[4558]: I0120 18:45:22.224690 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-srbtw" Jan 20 18:45:22 crc kubenswrapper[4558]: I0120 18:45:22.472615 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-thzm9" Jan 20 18:45:22 crc kubenswrapper[4558]: I0120 18:45:22.600816 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4bx9\" (UniqueName: \"kubernetes.io/projected/8af16e38-e131-4735-a788-2aca07129693-kube-api-access-s4bx9\") pod \"8af16e38-e131-4735-a788-2aca07129693\" (UID: \"8af16e38-e131-4735-a788-2aca07129693\") " Jan 20 18:45:22 crc kubenswrapper[4558]: I0120 18:45:22.600916 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8af16e38-e131-4735-a788-2aca07129693-catalog-content\") pod \"8af16e38-e131-4735-a788-2aca07129693\" (UID: \"8af16e38-e131-4735-a788-2aca07129693\") " Jan 20 18:45:22 crc kubenswrapper[4558]: I0120 18:45:22.600995 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8af16e38-e131-4735-a788-2aca07129693-utilities\") pod \"8af16e38-e131-4735-a788-2aca07129693\" (UID: \"8af16e38-e131-4735-a788-2aca07129693\") " Jan 20 18:45:22 crc kubenswrapper[4558]: I0120 18:45:22.602156 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8af16e38-e131-4735-a788-2aca07129693-utilities" (OuterVolumeSpecName: "utilities") pod "8af16e38-e131-4735-a788-2aca07129693" (UID: "8af16e38-e131-4735-a788-2aca07129693"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 18:45:22 crc kubenswrapper[4558]: I0120 18:45:22.605122 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8af16e38-e131-4735-a788-2aca07129693-kube-api-access-s4bx9" (OuterVolumeSpecName: "kube-api-access-s4bx9") pod "8af16e38-e131-4735-a788-2aca07129693" (UID: "8af16e38-e131-4735-a788-2aca07129693"). InnerVolumeSpecName "kube-api-access-s4bx9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:45:22 crc kubenswrapper[4558]: I0120 18:45:22.620437 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8af16e38-e131-4735-a788-2aca07129693-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8af16e38-e131-4735-a788-2aca07129693" (UID: "8af16e38-e131-4735-a788-2aca07129693"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 18:45:22 crc kubenswrapper[4558]: I0120 18:45:22.624082 4558 generic.go:334] "Generic (PLEG): container finished" podID="8af16e38-e131-4735-a788-2aca07129693" containerID="71756f5256bb8a315e366883895ec173d75472bf60e5b8d703dfe41bf6f0452b" exitCode=0 Jan 20 18:45:22 crc kubenswrapper[4558]: I0120 18:45:22.624142 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-thzm9" event={"ID":"8af16e38-e131-4735-a788-2aca07129693","Type":"ContainerDied","Data":"71756f5256bb8a315e366883895ec173d75472bf60e5b8d703dfe41bf6f0452b"} Jan 20 18:45:22 crc kubenswrapper[4558]: I0120 18:45:22.624200 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-thzm9" event={"ID":"8af16e38-e131-4735-a788-2aca07129693","Type":"ContainerDied","Data":"d4c59fef9a52a6f3267fa81ddfa2e668423df487c5af947424f3e6df4899ec9b"} Jan 20 18:45:22 crc kubenswrapper[4558]: I0120 18:45:22.624211 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-thzm9" Jan 20 18:45:22 crc kubenswrapper[4558]: I0120 18:45:22.624223 4558 scope.go:117] "RemoveContainer" containerID="71756f5256bb8a315e366883895ec173d75472bf60e5b8d703dfe41bf6f0452b" Jan 20 18:45:22 crc kubenswrapper[4558]: I0120 18:45:22.643791 4558 scope.go:117] "RemoveContainer" containerID="8160e2e3a17b9ff5f8b6a42a231d264223c859a16a99a1e2a89a7c30f00b5ef5" Jan 20 18:45:22 crc kubenswrapper[4558]: I0120 18:45:22.656705 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-thzm9"] Jan 20 18:45:22 crc kubenswrapper[4558]: I0120 18:45:22.661006 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-thzm9"] Jan 20 18:45:22 crc kubenswrapper[4558]: I0120 18:45:22.681795 4558 scope.go:117] "RemoveContainer" containerID="1790b872248554115d5d1b56a65246cf19921b08a53b285834bd2da373607b9a" Jan 20 18:45:22 crc kubenswrapper[4558]: I0120 18:45:22.694849 4558 scope.go:117] "RemoveContainer" containerID="71756f5256bb8a315e366883895ec173d75472bf60e5b8d703dfe41bf6f0452b" Jan 20 18:45:22 crc kubenswrapper[4558]: E0120 18:45:22.695351 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"71756f5256bb8a315e366883895ec173d75472bf60e5b8d703dfe41bf6f0452b\": container with ID starting with 71756f5256bb8a315e366883895ec173d75472bf60e5b8d703dfe41bf6f0452b not found: ID does not exist" containerID="71756f5256bb8a315e366883895ec173d75472bf60e5b8d703dfe41bf6f0452b" Jan 20 18:45:22 crc kubenswrapper[4558]: I0120 18:45:22.695400 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"71756f5256bb8a315e366883895ec173d75472bf60e5b8d703dfe41bf6f0452b"} err="failed to get container status \"71756f5256bb8a315e366883895ec173d75472bf60e5b8d703dfe41bf6f0452b\": rpc error: code = NotFound desc = could not find container \"71756f5256bb8a315e366883895ec173d75472bf60e5b8d703dfe41bf6f0452b\": container with ID starting with 71756f5256bb8a315e366883895ec173d75472bf60e5b8d703dfe41bf6f0452b not found: ID does not exist" Jan 20 18:45:22 crc kubenswrapper[4558]: I0120 18:45:22.695429 4558 scope.go:117] "RemoveContainer" containerID="8160e2e3a17b9ff5f8b6a42a231d264223c859a16a99a1e2a89a7c30f00b5ef5" Jan 20 18:45:22 crc kubenswrapper[4558]: E0120 18:45:22.695777 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8160e2e3a17b9ff5f8b6a42a231d264223c859a16a99a1e2a89a7c30f00b5ef5\": container with ID starting with 8160e2e3a17b9ff5f8b6a42a231d264223c859a16a99a1e2a89a7c30f00b5ef5 not found: ID does not exist" containerID="8160e2e3a17b9ff5f8b6a42a231d264223c859a16a99a1e2a89a7c30f00b5ef5" Jan 20 18:45:22 crc kubenswrapper[4558]: I0120 18:45:22.695813 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8160e2e3a17b9ff5f8b6a42a231d264223c859a16a99a1e2a89a7c30f00b5ef5"} err="failed to get container status \"8160e2e3a17b9ff5f8b6a42a231d264223c859a16a99a1e2a89a7c30f00b5ef5\": rpc error: code = NotFound desc = could not find container \"8160e2e3a17b9ff5f8b6a42a231d264223c859a16a99a1e2a89a7c30f00b5ef5\": container with ID starting with 8160e2e3a17b9ff5f8b6a42a231d264223c859a16a99a1e2a89a7c30f00b5ef5 not found: ID does not exist" Jan 20 18:45:22 crc kubenswrapper[4558]: I0120 18:45:22.695838 4558 scope.go:117] "RemoveContainer" containerID="1790b872248554115d5d1b56a65246cf19921b08a53b285834bd2da373607b9a" Jan 20 18:45:22 crc kubenswrapper[4558]: E0120 18:45:22.696092 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1790b872248554115d5d1b56a65246cf19921b08a53b285834bd2da373607b9a\": container with ID starting with 1790b872248554115d5d1b56a65246cf19921b08a53b285834bd2da373607b9a not found: ID does not exist" containerID="1790b872248554115d5d1b56a65246cf19921b08a53b285834bd2da373607b9a" Jan 20 18:45:22 crc kubenswrapper[4558]: I0120 18:45:22.696124 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1790b872248554115d5d1b56a65246cf19921b08a53b285834bd2da373607b9a"} err="failed to get container status \"1790b872248554115d5d1b56a65246cf19921b08a53b285834bd2da373607b9a\": rpc error: code = NotFound desc = could not find container \"1790b872248554115d5d1b56a65246cf19921b08a53b285834bd2da373607b9a\": container with ID starting with 1790b872248554115d5d1b56a65246cf19921b08a53b285834bd2da373607b9a not found: ID does not exist" Jan 20 18:45:22 crc kubenswrapper[4558]: I0120 18:45:22.703484 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4bx9\" (UniqueName: \"kubernetes.io/projected/8af16e38-e131-4735-a788-2aca07129693-kube-api-access-s4bx9\") on node \"crc\" DevicePath \"\"" Jan 20 18:45:22 crc kubenswrapper[4558]: I0120 18:45:22.703512 4558 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8af16e38-e131-4735-a788-2aca07129693-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 20 18:45:22 crc kubenswrapper[4558]: I0120 18:45:22.703524 4558 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8af16e38-e131-4735-a788-2aca07129693-utilities\") on node \"crc\" DevicePath \"\"" Jan 20 18:45:22 crc kubenswrapper[4558]: I0120 18:45:22.742296 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-srbtw"] Jan 20 18:45:22 crc kubenswrapper[4558]: W0120 18:45:22.742420 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbf5d23cd_b290_4b1c_b84a_d93f1817c6ae.slice/crio-1e7b7aad8e4b11c955fcd73c8e7384561861ac115888b5d668f218c48497374e WatchSource:0}: Error finding container 1e7b7aad8e4b11c955fcd73c8e7384561861ac115888b5d668f218c48497374e: Status 404 returned error can't find the container with id 1e7b7aad8e4b11c955fcd73c8e7384561861ac115888b5d668f218c48497374e Jan 20 18:45:23 crc kubenswrapper[4558]: I0120 18:45:23.633441 4558 generic.go:334] "Generic (PLEG): container finished" podID="bf5d23cd-b290-4b1c-b84a-d93f1817c6ae" containerID="15232bc6b0abf02f1d5c71f75d58d8cb0c74297b517ba33b66aa243d6c6afee9" exitCode=0 Jan 20 18:45:23 crc kubenswrapper[4558]: I0120 18:45:23.633553 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-srbtw" event={"ID":"bf5d23cd-b290-4b1c-b84a-d93f1817c6ae","Type":"ContainerDied","Data":"15232bc6b0abf02f1d5c71f75d58d8cb0c74297b517ba33b66aa243d6c6afee9"} Jan 20 18:45:23 crc kubenswrapper[4558]: I0120 18:45:23.633825 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-srbtw" event={"ID":"bf5d23cd-b290-4b1c-b84a-d93f1817c6ae","Type":"ContainerStarted","Data":"1e7b7aad8e4b11c955fcd73c8e7384561861ac115888b5d668f218c48497374e"} Jan 20 18:45:23 crc kubenswrapper[4558]: I0120 18:45:23.636620 4558 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 20 18:45:24 crc kubenswrapper[4558]: I0120 18:45:24.576276 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8af16e38-e131-4735-a788-2aca07129693" path="/var/lib/kubelet/pods/8af16e38-e131-4735-a788-2aca07129693/volumes" Jan 20 18:45:24 crc kubenswrapper[4558]: I0120 18:45:24.643624 4558 generic.go:334] "Generic (PLEG): container finished" podID="faa3fd5a-1d25-421a-8f05-046b725fa16c" containerID="e70ccb2925fecb215d00942e3ba5962cb6a0f1d9e425c0fd685e7fbd7ddb39b4" exitCode=137 Jan 20 18:45:24 crc kubenswrapper[4558]: I0120 18:45:24.643703 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/horizon-9b986b9c-nz6z9" event={"ID":"faa3fd5a-1d25-421a-8f05-046b725fa16c","Type":"ContainerDied","Data":"e70ccb2925fecb215d00942e3ba5962cb6a0f1d9e425c0fd685e7fbd7ddb39b4"} Jan 20 18:45:24 crc kubenswrapper[4558]: I0120 18:45:24.645348 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-srbtw" event={"ID":"bf5d23cd-b290-4b1c-b84a-d93f1817c6ae","Type":"ContainerStarted","Data":"b2586c214f5fa9049949c2cadfec26270afd3b8bdd0d56f562450353a7f46d22"} Jan 20 18:45:24 crc kubenswrapper[4558]: I0120 18:45:24.700252 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/horizon-9b986b9c-nz6z9" Jan 20 18:45:24 crc kubenswrapper[4558]: I0120 18:45:24.837651 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/faa3fd5a-1d25-421a-8f05-046b725fa16c-scripts\") pod \"faa3fd5a-1d25-421a-8f05-046b725fa16c\" (UID: \"faa3fd5a-1d25-421a-8f05-046b725fa16c\") " Jan 20 18:45:24 crc kubenswrapper[4558]: I0120 18:45:24.837747 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qt5nx\" (UniqueName: \"kubernetes.io/projected/faa3fd5a-1d25-421a-8f05-046b725fa16c-kube-api-access-qt5nx\") pod \"faa3fd5a-1d25-421a-8f05-046b725fa16c\" (UID: \"faa3fd5a-1d25-421a-8f05-046b725fa16c\") " Jan 20 18:45:24 crc kubenswrapper[4558]: I0120 18:45:24.837795 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/faa3fd5a-1d25-421a-8f05-046b725fa16c-logs\") pod \"faa3fd5a-1d25-421a-8f05-046b725fa16c\" (UID: \"faa3fd5a-1d25-421a-8f05-046b725fa16c\") " Jan 20 18:45:24 crc kubenswrapper[4558]: I0120 18:45:24.837826 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/faa3fd5a-1d25-421a-8f05-046b725fa16c-config-data\") pod \"faa3fd5a-1d25-421a-8f05-046b725fa16c\" (UID: \"faa3fd5a-1d25-421a-8f05-046b725fa16c\") " Jan 20 18:45:24 crc kubenswrapper[4558]: I0120 18:45:24.837957 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/faa3fd5a-1d25-421a-8f05-046b725fa16c-horizon-secret-key\") pod \"faa3fd5a-1d25-421a-8f05-046b725fa16c\" (UID: \"faa3fd5a-1d25-421a-8f05-046b725fa16c\") " Jan 20 18:45:24 crc kubenswrapper[4558]: I0120 18:45:24.838593 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/faa3fd5a-1d25-421a-8f05-046b725fa16c-logs" (OuterVolumeSpecName: "logs") pod "faa3fd5a-1d25-421a-8f05-046b725fa16c" (UID: "faa3fd5a-1d25-421a-8f05-046b725fa16c"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 18:45:24 crc kubenswrapper[4558]: I0120 18:45:24.844275 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/faa3fd5a-1d25-421a-8f05-046b725fa16c-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "faa3fd5a-1d25-421a-8f05-046b725fa16c" (UID: "faa3fd5a-1d25-421a-8f05-046b725fa16c"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:45:24 crc kubenswrapper[4558]: I0120 18:45:24.844716 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/faa3fd5a-1d25-421a-8f05-046b725fa16c-kube-api-access-qt5nx" (OuterVolumeSpecName: "kube-api-access-qt5nx") pod "faa3fd5a-1d25-421a-8f05-046b725fa16c" (UID: "faa3fd5a-1d25-421a-8f05-046b725fa16c"). InnerVolumeSpecName "kube-api-access-qt5nx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:45:24 crc kubenswrapper[4558]: I0120 18:45:24.855528 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/faa3fd5a-1d25-421a-8f05-046b725fa16c-config-data" (OuterVolumeSpecName: "config-data") pod "faa3fd5a-1d25-421a-8f05-046b725fa16c" (UID: "faa3fd5a-1d25-421a-8f05-046b725fa16c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:45:24 crc kubenswrapper[4558]: I0120 18:45:24.855896 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/faa3fd5a-1d25-421a-8f05-046b725fa16c-scripts" (OuterVolumeSpecName: "scripts") pod "faa3fd5a-1d25-421a-8f05-046b725fa16c" (UID: "faa3fd5a-1d25-421a-8f05-046b725fa16c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:45:24 crc kubenswrapper[4558]: I0120 18:45:24.941029 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/faa3fd5a-1d25-421a-8f05-046b725fa16c-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 18:45:24 crc kubenswrapper[4558]: I0120 18:45:24.941070 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qt5nx\" (UniqueName: \"kubernetes.io/projected/faa3fd5a-1d25-421a-8f05-046b725fa16c-kube-api-access-qt5nx\") on node \"crc\" DevicePath \"\"" Jan 20 18:45:24 crc kubenswrapper[4558]: I0120 18:45:24.941083 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/faa3fd5a-1d25-421a-8f05-046b725fa16c-logs\") on node \"crc\" DevicePath \"\"" Jan 20 18:45:24 crc kubenswrapper[4558]: I0120 18:45:24.941094 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/faa3fd5a-1d25-421a-8f05-046b725fa16c-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 18:45:24 crc kubenswrapper[4558]: I0120 18:45:24.941117 4558 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/faa3fd5a-1d25-421a-8f05-046b725fa16c-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Jan 20 18:45:25 crc kubenswrapper[4558]: I0120 18:45:25.656696 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/horizon-9b986b9c-nz6z9" event={"ID":"faa3fd5a-1d25-421a-8f05-046b725fa16c","Type":"ContainerDied","Data":"8bdd1e52aa7b132cbb6d546bdca1153ce198672d5be580ed9f37573f9fd15d02"} Jan 20 18:45:25 crc kubenswrapper[4558]: I0120 18:45:25.656746 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/horizon-9b986b9c-nz6z9" Jan 20 18:45:25 crc kubenswrapper[4558]: I0120 18:45:25.657085 4558 scope.go:117] "RemoveContainer" containerID="9a4d4756d9f6aaeafc335e3f85143281119d107b836fb1b29b365e5cff54acff" Jan 20 18:45:25 crc kubenswrapper[4558]: I0120 18:45:25.658835 4558 generic.go:334] "Generic (PLEG): container finished" podID="bf5d23cd-b290-4b1c-b84a-d93f1817c6ae" containerID="b2586c214f5fa9049949c2cadfec26270afd3b8bdd0d56f562450353a7f46d22" exitCode=0 Jan 20 18:45:25 crc kubenswrapper[4558]: I0120 18:45:25.658884 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-srbtw" event={"ID":"bf5d23cd-b290-4b1c-b84a-d93f1817c6ae","Type":"ContainerDied","Data":"b2586c214f5fa9049949c2cadfec26270afd3b8bdd0d56f562450353a7f46d22"} Jan 20 18:45:25 crc kubenswrapper[4558]: I0120 18:45:25.699885 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["horizon-kuttl-tests/horizon-9b986b9c-nz6z9"] Jan 20 18:45:25 crc kubenswrapper[4558]: I0120 18:45:25.705391 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["horizon-kuttl-tests/horizon-9b986b9c-nz6z9"] Jan 20 18:45:25 crc kubenswrapper[4558]: I0120 18:45:25.802497 4558 scope.go:117] "RemoveContainer" containerID="e70ccb2925fecb215d00942e3ba5962cb6a0f1d9e425c0fd685e7fbd7ddb39b4" Jan 20 18:45:26 crc kubenswrapper[4558]: I0120 18:45:26.574539 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="faa3fd5a-1d25-421a-8f05-046b725fa16c" path="/var/lib/kubelet/pods/faa3fd5a-1d25-421a-8f05-046b725fa16c/volumes" Jan 20 18:45:26 crc kubenswrapper[4558]: I0120 18:45:26.678378 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-srbtw" event={"ID":"bf5d23cd-b290-4b1c-b84a-d93f1817c6ae","Type":"ContainerStarted","Data":"1ad1be4537b145745b4a25db2ec84fd331f11a1b157625db0546f892a8775716"} Jan 20 18:45:26 crc kubenswrapper[4558]: I0120 18:45:26.701061 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-srbtw" podStartSLOduration=3.014920429 podStartE2EDuration="5.701051069s" podCreationTimestamp="2026-01-20 18:45:21 +0000 UTC" firstStartedPulling="2026-01-20 18:45:23.636390081 +0000 UTC m=+7417.396728048" lastFinishedPulling="2026-01-20 18:45:26.322520721 +0000 UTC m=+7420.082858688" observedRunningTime="2026-01-20 18:45:26.694518506 +0000 UTC m=+7420.454856493" watchObservedRunningTime="2026-01-20 18:45:26.701051069 +0000 UTC m=+7420.461389035" Jan 20 18:45:27 crc kubenswrapper[4558]: I0120 18:45:27.327066 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/horizon-598f976c49-c8x57" Jan 20 18:45:27 crc kubenswrapper[4558]: I0120 18:45:27.329812 4558 patch_prober.go:28] interesting pod/machine-config-daemon-2vr4r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 20 18:45:27 crc kubenswrapper[4558]: I0120 18:45:27.329872 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 20 18:45:27 crc kubenswrapper[4558]: I0120 18:45:27.480748 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/0d8e5344-72f5-4f27-bfbf-8b6a00c3d3e3-horizon-secret-key\") pod \"0d8e5344-72f5-4f27-bfbf-8b6a00c3d3e3\" (UID: \"0d8e5344-72f5-4f27-bfbf-8b6a00c3d3e3\") " Jan 20 18:45:27 crc kubenswrapper[4558]: I0120 18:45:27.480937 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0d8e5344-72f5-4f27-bfbf-8b6a00c3d3e3-scripts\") pod \"0d8e5344-72f5-4f27-bfbf-8b6a00c3d3e3\" (UID: \"0d8e5344-72f5-4f27-bfbf-8b6a00c3d3e3\") " Jan 20 18:45:27 crc kubenswrapper[4558]: I0120 18:45:27.480990 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2hs8f\" (UniqueName: \"kubernetes.io/projected/0d8e5344-72f5-4f27-bfbf-8b6a00c3d3e3-kube-api-access-2hs8f\") pod \"0d8e5344-72f5-4f27-bfbf-8b6a00c3d3e3\" (UID: \"0d8e5344-72f5-4f27-bfbf-8b6a00c3d3e3\") " Jan 20 18:45:27 crc kubenswrapper[4558]: I0120 18:45:27.481093 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0d8e5344-72f5-4f27-bfbf-8b6a00c3d3e3-logs\") pod \"0d8e5344-72f5-4f27-bfbf-8b6a00c3d3e3\" (UID: \"0d8e5344-72f5-4f27-bfbf-8b6a00c3d3e3\") " Jan 20 18:45:27 crc kubenswrapper[4558]: I0120 18:45:27.481179 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/0d8e5344-72f5-4f27-bfbf-8b6a00c3d3e3-config-data\") pod \"0d8e5344-72f5-4f27-bfbf-8b6a00c3d3e3\" (UID: \"0d8e5344-72f5-4f27-bfbf-8b6a00c3d3e3\") " Jan 20 18:45:27 crc kubenswrapper[4558]: I0120 18:45:27.481895 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0d8e5344-72f5-4f27-bfbf-8b6a00c3d3e3-logs" (OuterVolumeSpecName: "logs") pod "0d8e5344-72f5-4f27-bfbf-8b6a00c3d3e3" (UID: "0d8e5344-72f5-4f27-bfbf-8b6a00c3d3e3"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 18:45:27 crc kubenswrapper[4558]: I0120 18:45:27.489247 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0d8e5344-72f5-4f27-bfbf-8b6a00c3d3e3-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "0d8e5344-72f5-4f27-bfbf-8b6a00c3d3e3" (UID: "0d8e5344-72f5-4f27-bfbf-8b6a00c3d3e3"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:45:27 crc kubenswrapper[4558]: I0120 18:45:27.489851 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0d8e5344-72f5-4f27-bfbf-8b6a00c3d3e3-kube-api-access-2hs8f" (OuterVolumeSpecName: "kube-api-access-2hs8f") pod "0d8e5344-72f5-4f27-bfbf-8b6a00c3d3e3" (UID: "0d8e5344-72f5-4f27-bfbf-8b6a00c3d3e3"). InnerVolumeSpecName "kube-api-access-2hs8f". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:45:27 crc kubenswrapper[4558]: I0120 18:45:27.502017 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0d8e5344-72f5-4f27-bfbf-8b6a00c3d3e3-scripts" (OuterVolumeSpecName: "scripts") pod "0d8e5344-72f5-4f27-bfbf-8b6a00c3d3e3" (UID: "0d8e5344-72f5-4f27-bfbf-8b6a00c3d3e3"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:45:27 crc kubenswrapper[4558]: I0120 18:45:27.502622 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0d8e5344-72f5-4f27-bfbf-8b6a00c3d3e3-config-data" (OuterVolumeSpecName: "config-data") pod "0d8e5344-72f5-4f27-bfbf-8b6a00c3d3e3" (UID: "0d8e5344-72f5-4f27-bfbf-8b6a00c3d3e3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:45:27 crc kubenswrapper[4558]: I0120 18:45:27.583653 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0d8e5344-72f5-4f27-bfbf-8b6a00c3d3e3-logs\") on node \"crc\" DevicePath \"\"" Jan 20 18:45:27 crc kubenswrapper[4558]: I0120 18:45:27.583814 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/0d8e5344-72f5-4f27-bfbf-8b6a00c3d3e3-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 18:45:27 crc kubenswrapper[4558]: I0120 18:45:27.583877 4558 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/0d8e5344-72f5-4f27-bfbf-8b6a00c3d3e3-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Jan 20 18:45:27 crc kubenswrapper[4558]: I0120 18:45:27.583938 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0d8e5344-72f5-4f27-bfbf-8b6a00c3d3e3-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 18:45:27 crc kubenswrapper[4558]: I0120 18:45:27.583994 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2hs8f\" (UniqueName: \"kubernetes.io/projected/0d8e5344-72f5-4f27-bfbf-8b6a00c3d3e3-kube-api-access-2hs8f\") on node \"crc\" DevicePath \"\"" Jan 20 18:45:27 crc kubenswrapper[4558]: I0120 18:45:27.691905 4558 generic.go:334] "Generic (PLEG): container finished" podID="0d8e5344-72f5-4f27-bfbf-8b6a00c3d3e3" containerID="608c55329d1788390874107f80379fb6c6a092750c643d8249f066787fc32e77" exitCode=137 Jan 20 18:45:27 crc kubenswrapper[4558]: I0120 18:45:27.692009 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/horizon-598f976c49-c8x57" Jan 20 18:45:27 crc kubenswrapper[4558]: I0120 18:45:27.692008 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/horizon-598f976c49-c8x57" event={"ID":"0d8e5344-72f5-4f27-bfbf-8b6a00c3d3e3","Type":"ContainerDied","Data":"608c55329d1788390874107f80379fb6c6a092750c643d8249f066787fc32e77"} Jan 20 18:45:27 crc kubenswrapper[4558]: I0120 18:45:27.692205 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/horizon-598f976c49-c8x57" event={"ID":"0d8e5344-72f5-4f27-bfbf-8b6a00c3d3e3","Type":"ContainerDied","Data":"6234a84329e44fbc491bfd409acee1a4bbbb0db1ffc1af86d7007eef34260864"} Jan 20 18:45:27 crc kubenswrapper[4558]: I0120 18:45:27.692236 4558 scope.go:117] "RemoveContainer" containerID="468366f51379cc21d36c5d1382767e010720b6ff7948e08b8ec4bcf062faedd1" Jan 20 18:45:27 crc kubenswrapper[4558]: I0120 18:45:27.722951 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["horizon-kuttl-tests/horizon-598f976c49-c8x57"] Jan 20 18:45:27 crc kubenswrapper[4558]: I0120 18:45:27.733235 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["horizon-kuttl-tests/horizon-598f976c49-c8x57"] Jan 20 18:45:27 crc kubenswrapper[4558]: I0120 18:45:27.847582 4558 scope.go:117] "RemoveContainer" containerID="608c55329d1788390874107f80379fb6c6a092750c643d8249f066787fc32e77" Jan 20 18:45:27 crc kubenswrapper[4558]: I0120 18:45:27.864738 4558 scope.go:117] "RemoveContainer" containerID="468366f51379cc21d36c5d1382767e010720b6ff7948e08b8ec4bcf062faedd1" Jan 20 18:45:27 crc kubenswrapper[4558]: E0120 18:45:27.865239 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"468366f51379cc21d36c5d1382767e010720b6ff7948e08b8ec4bcf062faedd1\": container with ID starting with 468366f51379cc21d36c5d1382767e010720b6ff7948e08b8ec4bcf062faedd1 not found: ID does not exist" containerID="468366f51379cc21d36c5d1382767e010720b6ff7948e08b8ec4bcf062faedd1" Jan 20 18:45:27 crc kubenswrapper[4558]: I0120 18:45:27.865286 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"468366f51379cc21d36c5d1382767e010720b6ff7948e08b8ec4bcf062faedd1"} err="failed to get container status \"468366f51379cc21d36c5d1382767e010720b6ff7948e08b8ec4bcf062faedd1\": rpc error: code = NotFound desc = could not find container \"468366f51379cc21d36c5d1382767e010720b6ff7948e08b8ec4bcf062faedd1\": container with ID starting with 468366f51379cc21d36c5d1382767e010720b6ff7948e08b8ec4bcf062faedd1 not found: ID does not exist" Jan 20 18:45:27 crc kubenswrapper[4558]: I0120 18:45:27.865319 4558 scope.go:117] "RemoveContainer" containerID="608c55329d1788390874107f80379fb6c6a092750c643d8249f066787fc32e77" Jan 20 18:45:27 crc kubenswrapper[4558]: E0120 18:45:27.865667 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"608c55329d1788390874107f80379fb6c6a092750c643d8249f066787fc32e77\": container with ID starting with 608c55329d1788390874107f80379fb6c6a092750c643d8249f066787fc32e77 not found: ID does not exist" containerID="608c55329d1788390874107f80379fb6c6a092750c643d8249f066787fc32e77" Jan 20 18:45:27 crc kubenswrapper[4558]: I0120 18:45:27.865701 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"608c55329d1788390874107f80379fb6c6a092750c643d8249f066787fc32e77"} err="failed to get container status \"608c55329d1788390874107f80379fb6c6a092750c643d8249f066787fc32e77\": rpc error: code = NotFound desc = could not find container \"608c55329d1788390874107f80379fb6c6a092750c643d8249f066787fc32e77\": container with ID starting with 608c55329d1788390874107f80379fb6c6a092750c643d8249f066787fc32e77 not found: ID does not exist" Jan 20 18:45:28 crc kubenswrapper[4558]: I0120 18:45:28.574156 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0d8e5344-72f5-4f27-bfbf-8b6a00c3d3e3" path="/var/lib/kubelet/pods/0d8e5344-72f5-4f27-bfbf-8b6a00c3d3e3/volumes" Jan 20 18:45:28 crc kubenswrapper[4558]: I0120 18:45:28.829465 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["horizon-kuttl-tests/horizon-754c467c7d-w875x"] Jan 20 18:45:28 crc kubenswrapper[4558]: E0120 18:45:28.829912 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="faa3fd5a-1d25-421a-8f05-046b725fa16c" containerName="horizon-log" Jan 20 18:45:28 crc kubenswrapper[4558]: I0120 18:45:28.829927 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="faa3fd5a-1d25-421a-8f05-046b725fa16c" containerName="horizon-log" Jan 20 18:45:28 crc kubenswrapper[4558]: E0120 18:45:28.829985 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d8e5344-72f5-4f27-bfbf-8b6a00c3d3e3" containerName="horizon" Jan 20 18:45:28 crc kubenswrapper[4558]: I0120 18:45:28.829992 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d8e5344-72f5-4f27-bfbf-8b6a00c3d3e3" containerName="horizon" Jan 20 18:45:28 crc kubenswrapper[4558]: E0120 18:45:28.830004 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8af16e38-e131-4735-a788-2aca07129693" containerName="extract-content" Jan 20 18:45:28 crc kubenswrapper[4558]: I0120 18:45:28.830011 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="8af16e38-e131-4735-a788-2aca07129693" containerName="extract-content" Jan 20 18:45:28 crc kubenswrapper[4558]: E0120 18:45:28.830020 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d8e5344-72f5-4f27-bfbf-8b6a00c3d3e3" containerName="horizon-log" Jan 20 18:45:28 crc kubenswrapper[4558]: I0120 18:45:28.830025 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d8e5344-72f5-4f27-bfbf-8b6a00c3d3e3" containerName="horizon-log" Jan 20 18:45:28 crc kubenswrapper[4558]: E0120 18:45:28.830047 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="faa3fd5a-1d25-421a-8f05-046b725fa16c" containerName="horizon" Jan 20 18:45:28 crc kubenswrapper[4558]: I0120 18:45:28.830053 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="faa3fd5a-1d25-421a-8f05-046b725fa16c" containerName="horizon" Jan 20 18:45:28 crc kubenswrapper[4558]: E0120 18:45:28.830059 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8af16e38-e131-4735-a788-2aca07129693" containerName="registry-server" Jan 20 18:45:28 crc kubenswrapper[4558]: I0120 18:45:28.830065 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="8af16e38-e131-4735-a788-2aca07129693" containerName="registry-server" Jan 20 18:45:28 crc kubenswrapper[4558]: E0120 18:45:28.830074 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8af16e38-e131-4735-a788-2aca07129693" containerName="extract-utilities" Jan 20 18:45:28 crc kubenswrapper[4558]: I0120 18:45:28.830079 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="8af16e38-e131-4735-a788-2aca07129693" containerName="extract-utilities" Jan 20 18:45:28 crc kubenswrapper[4558]: I0120 18:45:28.830268 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="0d8e5344-72f5-4f27-bfbf-8b6a00c3d3e3" containerName="horizon" Jan 20 18:45:28 crc kubenswrapper[4558]: I0120 18:45:28.830287 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="0d8e5344-72f5-4f27-bfbf-8b6a00c3d3e3" containerName="horizon-log" Jan 20 18:45:28 crc kubenswrapper[4558]: I0120 18:45:28.830300 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="faa3fd5a-1d25-421a-8f05-046b725fa16c" containerName="horizon" Jan 20 18:45:28 crc kubenswrapper[4558]: I0120 18:45:28.830307 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="8af16e38-e131-4735-a788-2aca07129693" containerName="registry-server" Jan 20 18:45:28 crc kubenswrapper[4558]: I0120 18:45:28.830315 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="faa3fd5a-1d25-421a-8f05-046b725fa16c" containerName="horizon-log" Jan 20 18:45:28 crc kubenswrapper[4558]: I0120 18:45:28.831283 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/horizon-754c467c7d-w875x" Jan 20 18:45:28 crc kubenswrapper[4558]: I0120 18:45:28.839332 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["horizon-kuttl-tests/horizon-754c467c7d-w875x"] Jan 20 18:45:28 crc kubenswrapper[4558]: I0120 18:45:28.840496 4558 reflector.go:368] Caches populated for *v1.Secret from object-"horizon-kuttl-tests"/"horizon" Jan 20 18:45:28 crc kubenswrapper[4558]: I0120 18:45:28.840930 4558 reflector.go:368] Caches populated for *v1.Secret from object-"horizon-kuttl-tests"/"combined-ca-bundle" Jan 20 18:45:28 crc kubenswrapper[4558]: I0120 18:45:28.841045 4558 reflector.go:368] Caches populated for *v1.Secret from object-"horizon-kuttl-tests"/"cert-horizon-svc" Jan 20 18:45:28 crc kubenswrapper[4558]: I0120 18:45:28.840963 4558 reflector.go:368] Caches populated for *v1.Secret from object-"horizon-kuttl-tests"/"horizon-horizon-dockercfg-4fv4v" Jan 20 18:45:28 crc kubenswrapper[4558]: I0120 18:45:28.843666 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"horizon-kuttl-tests"/"horizon-scripts" Jan 20 18:45:28 crc kubenswrapper[4558]: I0120 18:45:28.843697 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"horizon-kuttl-tests"/"horizon-config-data" Jan 20 18:45:28 crc kubenswrapper[4558]: I0120 18:45:28.880404 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["horizon-kuttl-tests/horizon-8cd586586-mn6lm"] Jan 20 18:45:28 crc kubenswrapper[4558]: I0120 18:45:28.881672 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/horizon-8cd586586-mn6lm" Jan 20 18:45:28 crc kubenswrapper[4558]: I0120 18:45:28.894333 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["horizon-kuttl-tests/horizon-8cd586586-mn6lm"] Jan 20 18:45:29 crc kubenswrapper[4558]: I0120 18:45:29.007999 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/c4475f51-d8fe-4b28-8795-936c54f1a20f-horizon-secret-key\") pod \"horizon-8cd586586-mn6lm\" (UID: \"c4475f51-d8fe-4b28-8795-936c54f1a20f\") " pod="horizon-kuttl-tests/horizon-8cd586586-mn6lm" Jan 20 18:45:29 crc kubenswrapper[4558]: I0120 18:45:29.008048 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/95c4bcc4-30e7-43f5-b6bc-ff90d8c5d583-logs\") pod \"horizon-754c467c7d-w875x\" (UID: \"95c4bcc4-30e7-43f5-b6bc-ff90d8c5d583\") " pod="horizon-kuttl-tests/horizon-754c467c7d-w875x" Jan 20 18:45:29 crc kubenswrapper[4558]: I0120 18:45:29.008071 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/c4475f51-d8fe-4b28-8795-936c54f1a20f-horizon-tls-certs\") pod \"horizon-8cd586586-mn6lm\" (UID: \"c4475f51-d8fe-4b28-8795-936c54f1a20f\") " pod="horizon-kuttl-tests/horizon-8cd586586-mn6lm" Jan 20 18:45:29 crc kubenswrapper[4558]: I0120 18:45:29.008099 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tgvkt\" (UniqueName: \"kubernetes.io/projected/c4475f51-d8fe-4b28-8795-936c54f1a20f-kube-api-access-tgvkt\") pod \"horizon-8cd586586-mn6lm\" (UID: \"c4475f51-d8fe-4b28-8795-936c54f1a20f\") " pod="horizon-kuttl-tests/horizon-8cd586586-mn6lm" Jan 20 18:45:29 crc kubenswrapper[4558]: I0120 18:45:29.008131 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/95c4bcc4-30e7-43f5-b6bc-ff90d8c5d583-config-data\") pod \"horizon-754c467c7d-w875x\" (UID: \"95c4bcc4-30e7-43f5-b6bc-ff90d8c5d583\") " pod="horizon-kuttl-tests/horizon-754c467c7d-w875x" Jan 20 18:45:29 crc kubenswrapper[4558]: I0120 18:45:29.008250 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/95c4bcc4-30e7-43f5-b6bc-ff90d8c5d583-horizon-tls-certs\") pod \"horizon-754c467c7d-w875x\" (UID: \"95c4bcc4-30e7-43f5-b6bc-ff90d8c5d583\") " pod="horizon-kuttl-tests/horizon-754c467c7d-w875x" Jan 20 18:45:29 crc kubenswrapper[4558]: I0120 18:45:29.008331 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c4475f51-d8fe-4b28-8795-936c54f1a20f-combined-ca-bundle\") pod \"horizon-8cd586586-mn6lm\" (UID: \"c4475f51-d8fe-4b28-8795-936c54f1a20f\") " pod="horizon-kuttl-tests/horizon-8cd586586-mn6lm" Jan 20 18:45:29 crc kubenswrapper[4558]: I0120 18:45:29.008370 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c4475f51-d8fe-4b28-8795-936c54f1a20f-scripts\") pod \"horizon-8cd586586-mn6lm\" (UID: \"c4475f51-d8fe-4b28-8795-936c54f1a20f\") " pod="horizon-kuttl-tests/horizon-8cd586586-mn6lm" Jan 20 18:45:29 crc kubenswrapper[4558]: I0120 18:45:29.008391 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/95c4bcc4-30e7-43f5-b6bc-ff90d8c5d583-scripts\") pod \"horizon-754c467c7d-w875x\" (UID: \"95c4bcc4-30e7-43f5-b6bc-ff90d8c5d583\") " pod="horizon-kuttl-tests/horizon-754c467c7d-w875x" Jan 20 18:45:29 crc kubenswrapper[4558]: I0120 18:45:29.008412 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/95c4bcc4-30e7-43f5-b6bc-ff90d8c5d583-combined-ca-bundle\") pod \"horizon-754c467c7d-w875x\" (UID: \"95c4bcc4-30e7-43f5-b6bc-ff90d8c5d583\") " pod="horizon-kuttl-tests/horizon-754c467c7d-w875x" Jan 20 18:45:29 crc kubenswrapper[4558]: I0120 18:45:29.008438 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/95c4bcc4-30e7-43f5-b6bc-ff90d8c5d583-horizon-secret-key\") pod \"horizon-754c467c7d-w875x\" (UID: \"95c4bcc4-30e7-43f5-b6bc-ff90d8c5d583\") " pod="horizon-kuttl-tests/horizon-754c467c7d-w875x" Jan 20 18:45:29 crc kubenswrapper[4558]: I0120 18:45:29.008455 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c4475f51-d8fe-4b28-8795-936c54f1a20f-config-data\") pod \"horizon-8cd586586-mn6lm\" (UID: \"c4475f51-d8fe-4b28-8795-936c54f1a20f\") " pod="horizon-kuttl-tests/horizon-8cd586586-mn6lm" Jan 20 18:45:29 crc kubenswrapper[4558]: I0120 18:45:29.008471 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c4475f51-d8fe-4b28-8795-936c54f1a20f-logs\") pod \"horizon-8cd586586-mn6lm\" (UID: \"c4475f51-d8fe-4b28-8795-936c54f1a20f\") " pod="horizon-kuttl-tests/horizon-8cd586586-mn6lm" Jan 20 18:45:29 crc kubenswrapper[4558]: I0120 18:45:29.008499 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j9288\" (UniqueName: \"kubernetes.io/projected/95c4bcc4-30e7-43f5-b6bc-ff90d8c5d583-kube-api-access-j9288\") pod \"horizon-754c467c7d-w875x\" (UID: \"95c4bcc4-30e7-43f5-b6bc-ff90d8c5d583\") " pod="horizon-kuttl-tests/horizon-754c467c7d-w875x" Jan 20 18:45:29 crc kubenswrapper[4558]: I0120 18:45:29.109620 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j9288\" (UniqueName: \"kubernetes.io/projected/95c4bcc4-30e7-43f5-b6bc-ff90d8c5d583-kube-api-access-j9288\") pod \"horizon-754c467c7d-w875x\" (UID: \"95c4bcc4-30e7-43f5-b6bc-ff90d8c5d583\") " pod="horizon-kuttl-tests/horizon-754c467c7d-w875x" Jan 20 18:45:29 crc kubenswrapper[4558]: I0120 18:45:29.109703 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/c4475f51-d8fe-4b28-8795-936c54f1a20f-horizon-secret-key\") pod \"horizon-8cd586586-mn6lm\" (UID: \"c4475f51-d8fe-4b28-8795-936c54f1a20f\") " pod="horizon-kuttl-tests/horizon-8cd586586-mn6lm" Jan 20 18:45:29 crc kubenswrapper[4558]: I0120 18:45:29.109730 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/95c4bcc4-30e7-43f5-b6bc-ff90d8c5d583-logs\") pod \"horizon-754c467c7d-w875x\" (UID: \"95c4bcc4-30e7-43f5-b6bc-ff90d8c5d583\") " pod="horizon-kuttl-tests/horizon-754c467c7d-w875x" Jan 20 18:45:29 crc kubenswrapper[4558]: I0120 18:45:29.109750 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/c4475f51-d8fe-4b28-8795-936c54f1a20f-horizon-tls-certs\") pod \"horizon-8cd586586-mn6lm\" (UID: \"c4475f51-d8fe-4b28-8795-936c54f1a20f\") " pod="horizon-kuttl-tests/horizon-8cd586586-mn6lm" Jan 20 18:45:29 crc kubenswrapper[4558]: I0120 18:45:29.109772 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tgvkt\" (UniqueName: \"kubernetes.io/projected/c4475f51-d8fe-4b28-8795-936c54f1a20f-kube-api-access-tgvkt\") pod \"horizon-8cd586586-mn6lm\" (UID: \"c4475f51-d8fe-4b28-8795-936c54f1a20f\") " pod="horizon-kuttl-tests/horizon-8cd586586-mn6lm" Jan 20 18:45:29 crc kubenswrapper[4558]: I0120 18:45:29.109798 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/95c4bcc4-30e7-43f5-b6bc-ff90d8c5d583-config-data\") pod \"horizon-754c467c7d-w875x\" (UID: \"95c4bcc4-30e7-43f5-b6bc-ff90d8c5d583\") " pod="horizon-kuttl-tests/horizon-754c467c7d-w875x" Jan 20 18:45:29 crc kubenswrapper[4558]: I0120 18:45:29.109824 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/95c4bcc4-30e7-43f5-b6bc-ff90d8c5d583-horizon-tls-certs\") pod \"horizon-754c467c7d-w875x\" (UID: \"95c4bcc4-30e7-43f5-b6bc-ff90d8c5d583\") " pod="horizon-kuttl-tests/horizon-754c467c7d-w875x" Jan 20 18:45:29 crc kubenswrapper[4558]: I0120 18:45:29.109841 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c4475f51-d8fe-4b28-8795-936c54f1a20f-combined-ca-bundle\") pod \"horizon-8cd586586-mn6lm\" (UID: \"c4475f51-d8fe-4b28-8795-936c54f1a20f\") " pod="horizon-kuttl-tests/horizon-8cd586586-mn6lm" Jan 20 18:45:29 crc kubenswrapper[4558]: I0120 18:45:29.109869 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c4475f51-d8fe-4b28-8795-936c54f1a20f-scripts\") pod \"horizon-8cd586586-mn6lm\" (UID: \"c4475f51-d8fe-4b28-8795-936c54f1a20f\") " pod="horizon-kuttl-tests/horizon-8cd586586-mn6lm" Jan 20 18:45:29 crc kubenswrapper[4558]: I0120 18:45:29.109888 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/95c4bcc4-30e7-43f5-b6bc-ff90d8c5d583-scripts\") pod \"horizon-754c467c7d-w875x\" (UID: \"95c4bcc4-30e7-43f5-b6bc-ff90d8c5d583\") " pod="horizon-kuttl-tests/horizon-754c467c7d-w875x" Jan 20 18:45:29 crc kubenswrapper[4558]: I0120 18:45:29.109914 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/95c4bcc4-30e7-43f5-b6bc-ff90d8c5d583-combined-ca-bundle\") pod \"horizon-754c467c7d-w875x\" (UID: \"95c4bcc4-30e7-43f5-b6bc-ff90d8c5d583\") " pod="horizon-kuttl-tests/horizon-754c467c7d-w875x" Jan 20 18:45:29 crc kubenswrapper[4558]: I0120 18:45:29.109936 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/95c4bcc4-30e7-43f5-b6bc-ff90d8c5d583-horizon-secret-key\") pod \"horizon-754c467c7d-w875x\" (UID: \"95c4bcc4-30e7-43f5-b6bc-ff90d8c5d583\") " pod="horizon-kuttl-tests/horizon-754c467c7d-w875x" Jan 20 18:45:29 crc kubenswrapper[4558]: I0120 18:45:29.109951 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c4475f51-d8fe-4b28-8795-936c54f1a20f-config-data\") pod \"horizon-8cd586586-mn6lm\" (UID: \"c4475f51-d8fe-4b28-8795-936c54f1a20f\") " pod="horizon-kuttl-tests/horizon-8cd586586-mn6lm" Jan 20 18:45:29 crc kubenswrapper[4558]: I0120 18:45:29.109967 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c4475f51-d8fe-4b28-8795-936c54f1a20f-logs\") pod \"horizon-8cd586586-mn6lm\" (UID: \"c4475f51-d8fe-4b28-8795-936c54f1a20f\") " pod="horizon-kuttl-tests/horizon-8cd586586-mn6lm" Jan 20 18:45:29 crc kubenswrapper[4558]: I0120 18:45:29.110207 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/95c4bcc4-30e7-43f5-b6bc-ff90d8c5d583-logs\") pod \"horizon-754c467c7d-w875x\" (UID: \"95c4bcc4-30e7-43f5-b6bc-ff90d8c5d583\") " pod="horizon-kuttl-tests/horizon-754c467c7d-w875x" Jan 20 18:45:29 crc kubenswrapper[4558]: I0120 18:45:29.111015 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/95c4bcc4-30e7-43f5-b6bc-ff90d8c5d583-scripts\") pod \"horizon-754c467c7d-w875x\" (UID: \"95c4bcc4-30e7-43f5-b6bc-ff90d8c5d583\") " pod="horizon-kuttl-tests/horizon-754c467c7d-w875x" Jan 20 18:45:29 crc kubenswrapper[4558]: I0120 18:45:29.111137 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c4475f51-d8fe-4b28-8795-936c54f1a20f-logs\") pod \"horizon-8cd586586-mn6lm\" (UID: \"c4475f51-d8fe-4b28-8795-936c54f1a20f\") " pod="horizon-kuttl-tests/horizon-8cd586586-mn6lm" Jan 20 18:45:29 crc kubenswrapper[4558]: I0120 18:45:29.111611 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c4475f51-d8fe-4b28-8795-936c54f1a20f-config-data\") pod \"horizon-8cd586586-mn6lm\" (UID: \"c4475f51-d8fe-4b28-8795-936c54f1a20f\") " pod="horizon-kuttl-tests/horizon-8cd586586-mn6lm" Jan 20 18:45:29 crc kubenswrapper[4558]: I0120 18:45:29.112147 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/95c4bcc4-30e7-43f5-b6bc-ff90d8c5d583-config-data\") pod \"horizon-754c467c7d-w875x\" (UID: \"95c4bcc4-30e7-43f5-b6bc-ff90d8c5d583\") " pod="horizon-kuttl-tests/horizon-754c467c7d-w875x" Jan 20 18:45:29 crc kubenswrapper[4558]: I0120 18:45:29.112226 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c4475f51-d8fe-4b28-8795-936c54f1a20f-scripts\") pod \"horizon-8cd586586-mn6lm\" (UID: \"c4475f51-d8fe-4b28-8795-936c54f1a20f\") " pod="horizon-kuttl-tests/horizon-8cd586586-mn6lm" Jan 20 18:45:29 crc kubenswrapper[4558]: I0120 18:45:29.115997 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/c4475f51-d8fe-4b28-8795-936c54f1a20f-horizon-secret-key\") pod \"horizon-8cd586586-mn6lm\" (UID: \"c4475f51-d8fe-4b28-8795-936c54f1a20f\") " pod="horizon-kuttl-tests/horizon-8cd586586-mn6lm" Jan 20 18:45:29 crc kubenswrapper[4558]: I0120 18:45:29.116345 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/95c4bcc4-30e7-43f5-b6bc-ff90d8c5d583-horizon-tls-certs\") pod \"horizon-754c467c7d-w875x\" (UID: \"95c4bcc4-30e7-43f5-b6bc-ff90d8c5d583\") " pod="horizon-kuttl-tests/horizon-754c467c7d-w875x" Jan 20 18:45:29 crc kubenswrapper[4558]: I0120 18:45:29.116465 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c4475f51-d8fe-4b28-8795-936c54f1a20f-combined-ca-bundle\") pod \"horizon-8cd586586-mn6lm\" (UID: \"c4475f51-d8fe-4b28-8795-936c54f1a20f\") " pod="horizon-kuttl-tests/horizon-8cd586586-mn6lm" Jan 20 18:45:29 crc kubenswrapper[4558]: I0120 18:45:29.116737 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/95c4bcc4-30e7-43f5-b6bc-ff90d8c5d583-horizon-secret-key\") pod \"horizon-754c467c7d-w875x\" (UID: \"95c4bcc4-30e7-43f5-b6bc-ff90d8c5d583\") " pod="horizon-kuttl-tests/horizon-754c467c7d-w875x" Jan 20 18:45:29 crc kubenswrapper[4558]: I0120 18:45:29.116967 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/95c4bcc4-30e7-43f5-b6bc-ff90d8c5d583-combined-ca-bundle\") pod \"horizon-754c467c7d-w875x\" (UID: \"95c4bcc4-30e7-43f5-b6bc-ff90d8c5d583\") " pod="horizon-kuttl-tests/horizon-754c467c7d-w875x" Jan 20 18:45:29 crc kubenswrapper[4558]: I0120 18:45:29.117270 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/c4475f51-d8fe-4b28-8795-936c54f1a20f-horizon-tls-certs\") pod \"horizon-8cd586586-mn6lm\" (UID: \"c4475f51-d8fe-4b28-8795-936c54f1a20f\") " pod="horizon-kuttl-tests/horizon-8cd586586-mn6lm" Jan 20 18:45:29 crc kubenswrapper[4558]: I0120 18:45:29.124369 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j9288\" (UniqueName: \"kubernetes.io/projected/95c4bcc4-30e7-43f5-b6bc-ff90d8c5d583-kube-api-access-j9288\") pod \"horizon-754c467c7d-w875x\" (UID: \"95c4bcc4-30e7-43f5-b6bc-ff90d8c5d583\") " pod="horizon-kuttl-tests/horizon-754c467c7d-w875x" Jan 20 18:45:29 crc kubenswrapper[4558]: I0120 18:45:29.124922 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tgvkt\" (UniqueName: \"kubernetes.io/projected/c4475f51-d8fe-4b28-8795-936c54f1a20f-kube-api-access-tgvkt\") pod \"horizon-8cd586586-mn6lm\" (UID: \"c4475f51-d8fe-4b28-8795-936c54f1a20f\") " pod="horizon-kuttl-tests/horizon-8cd586586-mn6lm" Jan 20 18:45:29 crc kubenswrapper[4558]: I0120 18:45:29.145683 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/horizon-754c467c7d-w875x" Jan 20 18:45:29 crc kubenswrapper[4558]: I0120 18:45:29.195470 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/horizon-8cd586586-mn6lm" Jan 20 18:45:29 crc kubenswrapper[4558]: I0120 18:45:29.558860 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["horizon-kuttl-tests/horizon-754c467c7d-w875x"] Jan 20 18:45:29 crc kubenswrapper[4558]: I0120 18:45:29.620347 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["horizon-kuttl-tests/horizon-8cd586586-mn6lm"] Jan 20 18:45:29 crc kubenswrapper[4558]: W0120 18:45:29.620666 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc4475f51_d8fe_4b28_8795_936c54f1a20f.slice/crio-b00ab3dddd5e7675463de88d275fc60cf730d1c8210c91e84b8e6f4ef61fc585 WatchSource:0}: Error finding container b00ab3dddd5e7675463de88d275fc60cf730d1c8210c91e84b8e6f4ef61fc585: Status 404 returned error can't find the container with id b00ab3dddd5e7675463de88d275fc60cf730d1c8210c91e84b8e6f4ef61fc585 Jan 20 18:45:29 crc kubenswrapper[4558]: I0120 18:45:29.721768 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/horizon-8cd586586-mn6lm" event={"ID":"c4475f51-d8fe-4b28-8795-936c54f1a20f","Type":"ContainerStarted","Data":"bfb3aa89ed656c3833534a18481937901cb1380920ef0efba84b233acb0543a8"} Jan 20 18:45:29 crc kubenswrapper[4558]: I0120 18:45:29.721833 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/horizon-8cd586586-mn6lm" event={"ID":"c4475f51-d8fe-4b28-8795-936c54f1a20f","Type":"ContainerStarted","Data":"b00ab3dddd5e7675463de88d275fc60cf730d1c8210c91e84b8e6f4ef61fc585"} Jan 20 18:45:29 crc kubenswrapper[4558]: I0120 18:45:29.723892 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/horizon-754c467c7d-w875x" event={"ID":"95c4bcc4-30e7-43f5-b6bc-ff90d8c5d583","Type":"ContainerStarted","Data":"0bfbdd89ca382da7993c2c2e7b6cc68f70c7dd1bd276a7f6949813759ec0b227"} Jan 20 18:45:29 crc kubenswrapper[4558]: I0120 18:45:29.723945 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/horizon-754c467c7d-w875x" event={"ID":"95c4bcc4-30e7-43f5-b6bc-ff90d8c5d583","Type":"ContainerStarted","Data":"e784bb0a54bdb289d8deb2544b73e4dc6164ca51a6d335d3bd0db0c92f53b798"} Jan 20 18:45:30 crc kubenswrapper[4558]: I0120 18:45:30.736705 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/horizon-8cd586586-mn6lm" event={"ID":"c4475f51-d8fe-4b28-8795-936c54f1a20f","Type":"ContainerStarted","Data":"421d081cca941e1c5b614cc3a5bd5b3f957acfa82042d4c171c75b89e2c0ecac"} Jan 20 18:45:30 crc kubenswrapper[4558]: I0120 18:45:30.740734 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/horizon-754c467c7d-w875x" event={"ID":"95c4bcc4-30e7-43f5-b6bc-ff90d8c5d583","Type":"ContainerStarted","Data":"4593653edb6a833b449f93755bf65bc53044826df22199c7ffcfd1fb5bb6b0a6"} Jan 20 18:45:30 crc kubenswrapper[4558]: I0120 18:45:30.774088 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="horizon-kuttl-tests/horizon-8cd586586-mn6lm" podStartSLOduration=2.774058107 podStartE2EDuration="2.774058107s" podCreationTimestamp="2026-01-20 18:45:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 18:45:30.762770222 +0000 UTC m=+7424.523108189" watchObservedRunningTime="2026-01-20 18:45:30.774058107 +0000 UTC m=+7424.534396074" Jan 20 18:45:32 crc kubenswrapper[4558]: I0120 18:45:32.225518 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-srbtw" Jan 20 18:45:32 crc kubenswrapper[4558]: I0120 18:45:32.225916 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-srbtw" Jan 20 18:45:32 crc kubenswrapper[4558]: I0120 18:45:32.261290 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-srbtw" Jan 20 18:45:32 crc kubenswrapper[4558]: I0120 18:45:32.279909 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="horizon-kuttl-tests/horizon-754c467c7d-w875x" podStartSLOduration=4.279890422 podStartE2EDuration="4.279890422s" podCreationTimestamp="2026-01-20 18:45:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 18:45:30.792185039 +0000 UTC m=+7424.552523007" watchObservedRunningTime="2026-01-20 18:45:32.279890422 +0000 UTC m=+7426.040228378" Jan 20 18:45:32 crc kubenswrapper[4558]: I0120 18:45:32.796880 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-srbtw" Jan 20 18:45:33 crc kubenswrapper[4558]: I0120 18:45:33.703471 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-srbtw"] Jan 20 18:45:34 crc kubenswrapper[4558]: I0120 18:45:34.776237 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-srbtw" podUID="bf5d23cd-b290-4b1c-b84a-d93f1817c6ae" containerName="registry-server" containerID="cri-o://1ad1be4537b145745b4a25db2ec84fd331f11a1b157625db0546f892a8775716" gracePeriod=2 Jan 20 18:45:35 crc kubenswrapper[4558]: I0120 18:45:35.674959 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-srbtw" Jan 20 18:45:35 crc kubenswrapper[4558]: I0120 18:45:35.784073 4558 generic.go:334] "Generic (PLEG): container finished" podID="bf5d23cd-b290-4b1c-b84a-d93f1817c6ae" containerID="1ad1be4537b145745b4a25db2ec84fd331f11a1b157625db0546f892a8775716" exitCode=0 Jan 20 18:45:35 crc kubenswrapper[4558]: I0120 18:45:35.784138 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-srbtw" event={"ID":"bf5d23cd-b290-4b1c-b84a-d93f1817c6ae","Type":"ContainerDied","Data":"1ad1be4537b145745b4a25db2ec84fd331f11a1b157625db0546f892a8775716"} Jan 20 18:45:35 crc kubenswrapper[4558]: I0120 18:45:35.784200 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-srbtw" Jan 20 18:45:35 crc kubenswrapper[4558]: I0120 18:45:35.784214 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-srbtw" event={"ID":"bf5d23cd-b290-4b1c-b84a-d93f1817c6ae","Type":"ContainerDied","Data":"1e7b7aad8e4b11c955fcd73c8e7384561861ac115888b5d668f218c48497374e"} Jan 20 18:45:35 crc kubenswrapper[4558]: I0120 18:45:35.784241 4558 scope.go:117] "RemoveContainer" containerID="1ad1be4537b145745b4a25db2ec84fd331f11a1b157625db0546f892a8775716" Jan 20 18:45:35 crc kubenswrapper[4558]: I0120 18:45:35.802361 4558 scope.go:117] "RemoveContainer" containerID="b2586c214f5fa9049949c2cadfec26270afd3b8bdd0d56f562450353a7f46d22" Jan 20 18:45:35 crc kubenswrapper[4558]: I0120 18:45:35.821848 4558 scope.go:117] "RemoveContainer" containerID="15232bc6b0abf02f1d5c71f75d58d8cb0c74297b517ba33b66aa243d6c6afee9" Jan 20 18:45:35 crc kubenswrapper[4558]: I0120 18:45:35.828280 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-86xcm\" (UniqueName: \"kubernetes.io/projected/bf5d23cd-b290-4b1c-b84a-d93f1817c6ae-kube-api-access-86xcm\") pod \"bf5d23cd-b290-4b1c-b84a-d93f1817c6ae\" (UID: \"bf5d23cd-b290-4b1c-b84a-d93f1817c6ae\") " Jan 20 18:45:35 crc kubenswrapper[4558]: I0120 18:45:35.828332 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bf5d23cd-b290-4b1c-b84a-d93f1817c6ae-catalog-content\") pod \"bf5d23cd-b290-4b1c-b84a-d93f1817c6ae\" (UID: \"bf5d23cd-b290-4b1c-b84a-d93f1817c6ae\") " Jan 20 18:45:35 crc kubenswrapper[4558]: I0120 18:45:35.828509 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bf5d23cd-b290-4b1c-b84a-d93f1817c6ae-utilities\") pod \"bf5d23cd-b290-4b1c-b84a-d93f1817c6ae\" (UID: \"bf5d23cd-b290-4b1c-b84a-d93f1817c6ae\") " Jan 20 18:45:35 crc kubenswrapper[4558]: I0120 18:45:35.829280 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bf5d23cd-b290-4b1c-b84a-d93f1817c6ae-utilities" (OuterVolumeSpecName: "utilities") pod "bf5d23cd-b290-4b1c-b84a-d93f1817c6ae" (UID: "bf5d23cd-b290-4b1c-b84a-d93f1817c6ae"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 18:45:35 crc kubenswrapper[4558]: I0120 18:45:35.834838 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf5d23cd-b290-4b1c-b84a-d93f1817c6ae-kube-api-access-86xcm" (OuterVolumeSpecName: "kube-api-access-86xcm") pod "bf5d23cd-b290-4b1c-b84a-d93f1817c6ae" (UID: "bf5d23cd-b290-4b1c-b84a-d93f1817c6ae"). InnerVolumeSpecName "kube-api-access-86xcm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:45:35 crc kubenswrapper[4558]: I0120 18:45:35.881524 4558 scope.go:117] "RemoveContainer" containerID="1ad1be4537b145745b4a25db2ec84fd331f11a1b157625db0546f892a8775716" Jan 20 18:45:35 crc kubenswrapper[4558]: E0120 18:45:35.882041 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1ad1be4537b145745b4a25db2ec84fd331f11a1b157625db0546f892a8775716\": container with ID starting with 1ad1be4537b145745b4a25db2ec84fd331f11a1b157625db0546f892a8775716 not found: ID does not exist" containerID="1ad1be4537b145745b4a25db2ec84fd331f11a1b157625db0546f892a8775716" Jan 20 18:45:35 crc kubenswrapper[4558]: I0120 18:45:35.882085 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1ad1be4537b145745b4a25db2ec84fd331f11a1b157625db0546f892a8775716"} err="failed to get container status \"1ad1be4537b145745b4a25db2ec84fd331f11a1b157625db0546f892a8775716\": rpc error: code = NotFound desc = could not find container \"1ad1be4537b145745b4a25db2ec84fd331f11a1b157625db0546f892a8775716\": container with ID starting with 1ad1be4537b145745b4a25db2ec84fd331f11a1b157625db0546f892a8775716 not found: ID does not exist" Jan 20 18:45:35 crc kubenswrapper[4558]: I0120 18:45:35.882125 4558 scope.go:117] "RemoveContainer" containerID="b2586c214f5fa9049949c2cadfec26270afd3b8bdd0d56f562450353a7f46d22" Jan 20 18:45:35 crc kubenswrapper[4558]: I0120 18:45:35.882370 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bf5d23cd-b290-4b1c-b84a-d93f1817c6ae-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "bf5d23cd-b290-4b1c-b84a-d93f1817c6ae" (UID: "bf5d23cd-b290-4b1c-b84a-d93f1817c6ae"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 18:45:35 crc kubenswrapper[4558]: E0120 18:45:35.882513 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b2586c214f5fa9049949c2cadfec26270afd3b8bdd0d56f562450353a7f46d22\": container with ID starting with b2586c214f5fa9049949c2cadfec26270afd3b8bdd0d56f562450353a7f46d22 not found: ID does not exist" containerID="b2586c214f5fa9049949c2cadfec26270afd3b8bdd0d56f562450353a7f46d22" Jan 20 18:45:35 crc kubenswrapper[4558]: I0120 18:45:35.882557 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b2586c214f5fa9049949c2cadfec26270afd3b8bdd0d56f562450353a7f46d22"} err="failed to get container status \"b2586c214f5fa9049949c2cadfec26270afd3b8bdd0d56f562450353a7f46d22\": rpc error: code = NotFound desc = could not find container \"b2586c214f5fa9049949c2cadfec26270afd3b8bdd0d56f562450353a7f46d22\": container with ID starting with b2586c214f5fa9049949c2cadfec26270afd3b8bdd0d56f562450353a7f46d22 not found: ID does not exist" Jan 20 18:45:35 crc kubenswrapper[4558]: I0120 18:45:35.882594 4558 scope.go:117] "RemoveContainer" containerID="15232bc6b0abf02f1d5c71f75d58d8cb0c74297b517ba33b66aa243d6c6afee9" Jan 20 18:45:35 crc kubenswrapper[4558]: E0120 18:45:35.883233 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"15232bc6b0abf02f1d5c71f75d58d8cb0c74297b517ba33b66aa243d6c6afee9\": container with ID starting with 15232bc6b0abf02f1d5c71f75d58d8cb0c74297b517ba33b66aa243d6c6afee9 not found: ID does not exist" containerID="15232bc6b0abf02f1d5c71f75d58d8cb0c74297b517ba33b66aa243d6c6afee9" Jan 20 18:45:35 crc kubenswrapper[4558]: I0120 18:45:35.883260 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"15232bc6b0abf02f1d5c71f75d58d8cb0c74297b517ba33b66aa243d6c6afee9"} err="failed to get container status \"15232bc6b0abf02f1d5c71f75d58d8cb0c74297b517ba33b66aa243d6c6afee9\": rpc error: code = NotFound desc = could not find container \"15232bc6b0abf02f1d5c71f75d58d8cb0c74297b517ba33b66aa243d6c6afee9\": container with ID starting with 15232bc6b0abf02f1d5c71f75d58d8cb0c74297b517ba33b66aa243d6c6afee9 not found: ID does not exist" Jan 20 18:45:35 crc kubenswrapper[4558]: I0120 18:45:35.931110 4558 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bf5d23cd-b290-4b1c-b84a-d93f1817c6ae-utilities\") on node \"crc\" DevicePath \"\"" Jan 20 18:45:35 crc kubenswrapper[4558]: I0120 18:45:35.931149 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-86xcm\" (UniqueName: \"kubernetes.io/projected/bf5d23cd-b290-4b1c-b84a-d93f1817c6ae-kube-api-access-86xcm\") on node \"crc\" DevicePath \"\"" Jan 20 18:45:35 crc kubenswrapper[4558]: I0120 18:45:35.931183 4558 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bf5d23cd-b290-4b1c-b84a-d93f1817c6ae-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 20 18:45:36 crc kubenswrapper[4558]: I0120 18:45:36.116200 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-srbtw"] Jan 20 18:45:36 crc kubenswrapper[4558]: I0120 18:45:36.119837 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-srbtw"] Jan 20 18:45:36 crc kubenswrapper[4558]: I0120 18:45:36.589074 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf5d23cd-b290-4b1c-b84a-d93f1817c6ae" path="/var/lib/kubelet/pods/bf5d23cd-b290-4b1c-b84a-d93f1817c6ae/volumes" Jan 20 18:45:39 crc kubenswrapper[4558]: I0120 18:45:39.146155 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="horizon-kuttl-tests/horizon-754c467c7d-w875x" Jan 20 18:45:39 crc kubenswrapper[4558]: I0120 18:45:39.147349 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="horizon-kuttl-tests/horizon-754c467c7d-w875x" Jan 20 18:45:39 crc kubenswrapper[4558]: I0120 18:45:39.150190 4558 prober.go:107] "Probe failed" probeType="Startup" pod="horizon-kuttl-tests/horizon-754c467c7d-w875x" podUID="95c4bcc4-30e7-43f5-b6bc-ff90d8c5d583" containerName="horizon" probeResult="failure" output="Get \"https://10.217.1.220:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.220:8443: connect: connection refused" Jan 20 18:45:39 crc kubenswrapper[4558]: I0120 18:45:39.196143 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="horizon-kuttl-tests/horizon-8cd586586-mn6lm" Jan 20 18:45:39 crc kubenswrapper[4558]: I0120 18:45:39.196302 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="horizon-kuttl-tests/horizon-8cd586586-mn6lm" Jan 20 18:45:39 crc kubenswrapper[4558]: I0120 18:45:39.197580 4558 prober.go:107] "Probe failed" probeType="Startup" pod="horizon-kuttl-tests/horizon-8cd586586-mn6lm" podUID="c4475f51-d8fe-4b28-8795-936c54f1a20f" containerName="horizon" probeResult="failure" output="Get \"https://10.217.1.221:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.221:8443: connect: connection refused" Jan 20 18:45:39 crc kubenswrapper[4558]: I0120 18:45:39.406749 4558 scope.go:117] "RemoveContainer" containerID="0787c1784df78a549e1b1832dde7625f1871d92d6d0b7095574c910e6ce85c39" Jan 20 18:45:39 crc kubenswrapper[4558]: I0120 18:45:39.428177 4558 scope.go:117] "RemoveContainer" containerID="eb6fe33e4dda395eccf2487f9595d079173c44e56a22cb3987c1e00b84d32dca" Jan 20 18:45:50 crc kubenswrapper[4558]: I0120 18:45:50.713564 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="horizon-kuttl-tests/horizon-754c467c7d-w875x" Jan 20 18:45:50 crc kubenswrapper[4558]: I0120 18:45:50.805134 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="horizon-kuttl-tests/horizon-8cd586586-mn6lm" Jan 20 18:45:51 crc kubenswrapper[4558]: I0120 18:45:51.910334 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-prw6x"] Jan 20 18:45:51 crc kubenswrapper[4558]: E0120 18:45:51.910931 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bf5d23cd-b290-4b1c-b84a-d93f1817c6ae" containerName="extract-utilities" Jan 20 18:45:51 crc kubenswrapper[4558]: I0120 18:45:51.910947 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="bf5d23cd-b290-4b1c-b84a-d93f1817c6ae" containerName="extract-utilities" Jan 20 18:45:51 crc kubenswrapper[4558]: E0120 18:45:51.910966 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bf5d23cd-b290-4b1c-b84a-d93f1817c6ae" containerName="registry-server" Jan 20 18:45:51 crc kubenswrapper[4558]: I0120 18:45:51.910972 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="bf5d23cd-b290-4b1c-b84a-d93f1817c6ae" containerName="registry-server" Jan 20 18:45:51 crc kubenswrapper[4558]: E0120 18:45:51.910997 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bf5d23cd-b290-4b1c-b84a-d93f1817c6ae" containerName="extract-content" Jan 20 18:45:51 crc kubenswrapper[4558]: I0120 18:45:51.911009 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="bf5d23cd-b290-4b1c-b84a-d93f1817c6ae" containerName="extract-content" Jan 20 18:45:51 crc kubenswrapper[4558]: I0120 18:45:51.911287 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="bf5d23cd-b290-4b1c-b84a-d93f1817c6ae" containerName="registry-server" Jan 20 18:45:51 crc kubenswrapper[4558]: I0120 18:45:51.912398 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-prw6x" Jan 20 18:45:51 crc kubenswrapper[4558]: I0120 18:45:51.919973 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-prw6x"] Jan 20 18:45:51 crc kubenswrapper[4558]: I0120 18:45:51.992962 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ca076029-983c-4c56-a821-793daa569460-catalog-content\") pod \"certified-operators-prw6x\" (UID: \"ca076029-983c-4c56-a821-793daa569460\") " pod="openshift-marketplace/certified-operators-prw6x" Jan 20 18:45:51 crc kubenswrapper[4558]: I0120 18:45:51.993079 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qngjm\" (UniqueName: \"kubernetes.io/projected/ca076029-983c-4c56-a821-793daa569460-kube-api-access-qngjm\") pod \"certified-operators-prw6x\" (UID: \"ca076029-983c-4c56-a821-793daa569460\") " pod="openshift-marketplace/certified-operators-prw6x" Jan 20 18:45:51 crc kubenswrapper[4558]: I0120 18:45:51.993139 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ca076029-983c-4c56-a821-793daa569460-utilities\") pod \"certified-operators-prw6x\" (UID: \"ca076029-983c-4c56-a821-793daa569460\") " pod="openshift-marketplace/certified-operators-prw6x" Jan 20 18:45:52 crc kubenswrapper[4558]: I0120 18:45:52.095223 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ca076029-983c-4c56-a821-793daa569460-catalog-content\") pod \"certified-operators-prw6x\" (UID: \"ca076029-983c-4c56-a821-793daa569460\") " pod="openshift-marketplace/certified-operators-prw6x" Jan 20 18:45:52 crc kubenswrapper[4558]: I0120 18:45:52.095303 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qngjm\" (UniqueName: \"kubernetes.io/projected/ca076029-983c-4c56-a821-793daa569460-kube-api-access-qngjm\") pod \"certified-operators-prw6x\" (UID: \"ca076029-983c-4c56-a821-793daa569460\") " pod="openshift-marketplace/certified-operators-prw6x" Jan 20 18:45:52 crc kubenswrapper[4558]: I0120 18:45:52.095347 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ca076029-983c-4c56-a821-793daa569460-utilities\") pod \"certified-operators-prw6x\" (UID: \"ca076029-983c-4c56-a821-793daa569460\") " pod="openshift-marketplace/certified-operators-prw6x" Jan 20 18:45:52 crc kubenswrapper[4558]: I0120 18:45:52.095750 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ca076029-983c-4c56-a821-793daa569460-catalog-content\") pod \"certified-operators-prw6x\" (UID: \"ca076029-983c-4c56-a821-793daa569460\") " pod="openshift-marketplace/certified-operators-prw6x" Jan 20 18:45:52 crc kubenswrapper[4558]: I0120 18:45:52.095831 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ca076029-983c-4c56-a821-793daa569460-utilities\") pod \"certified-operators-prw6x\" (UID: \"ca076029-983c-4c56-a821-793daa569460\") " pod="openshift-marketplace/certified-operators-prw6x" Jan 20 18:45:52 crc kubenswrapper[4558]: I0120 18:45:52.124895 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qngjm\" (UniqueName: \"kubernetes.io/projected/ca076029-983c-4c56-a821-793daa569460-kube-api-access-qngjm\") pod \"certified-operators-prw6x\" (UID: \"ca076029-983c-4c56-a821-793daa569460\") " pod="openshift-marketplace/certified-operators-prw6x" Jan 20 18:45:52 crc kubenswrapper[4558]: I0120 18:45:52.203038 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="horizon-kuttl-tests/horizon-754c467c7d-w875x" Jan 20 18:45:52 crc kubenswrapper[4558]: I0120 18:45:52.227388 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-prw6x" Jan 20 18:45:52 crc kubenswrapper[4558]: I0120 18:45:52.299386 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="horizon-kuttl-tests/horizon-8cd586586-mn6lm" Jan 20 18:45:52 crc kubenswrapper[4558]: I0120 18:45:52.351544 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["horizon-kuttl-tests/horizon-754c467c7d-w875x"] Jan 20 18:45:52 crc kubenswrapper[4558]: I0120 18:45:52.701859 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-prw6x"] Jan 20 18:45:52 crc kubenswrapper[4558]: W0120 18:45:52.702159 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podca076029_983c_4c56_a821_793daa569460.slice/crio-f4a10900eda427104c54b05d3691f9411216130bae8fc57189e61b5aaec287e3 WatchSource:0}: Error finding container f4a10900eda427104c54b05d3691f9411216130bae8fc57189e61b5aaec287e3: Status 404 returned error can't find the container with id f4a10900eda427104c54b05d3691f9411216130bae8fc57189e61b5aaec287e3 Jan 20 18:45:52 crc kubenswrapper[4558]: I0120 18:45:52.941419 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["horizon-kuttl-tests/horizon-8cd586586-mn6lm"] Jan 20 18:45:52 crc kubenswrapper[4558]: I0120 18:45:52.943275 4558 generic.go:334] "Generic (PLEG): container finished" podID="ca076029-983c-4c56-a821-793daa569460" containerID="422496854d3e021d78522a9add2a0e6883719f5358064cddec89d4c2c769e1cd" exitCode=0 Jan 20 18:45:52 crc kubenswrapper[4558]: I0120 18:45:52.943481 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-prw6x" event={"ID":"ca076029-983c-4c56-a821-793daa569460","Type":"ContainerDied","Data":"422496854d3e021d78522a9add2a0e6883719f5358064cddec89d4c2c769e1cd"} Jan 20 18:45:52 crc kubenswrapper[4558]: I0120 18:45:52.943547 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-prw6x" event={"ID":"ca076029-983c-4c56-a821-793daa569460","Type":"ContainerStarted","Data":"f4a10900eda427104c54b05d3691f9411216130bae8fc57189e61b5aaec287e3"} Jan 20 18:45:52 crc kubenswrapper[4558]: I0120 18:45:52.943693 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="horizon-kuttl-tests/horizon-8cd586586-mn6lm" podUID="c4475f51-d8fe-4b28-8795-936c54f1a20f" containerName="horizon-log" containerID="cri-o://bfb3aa89ed656c3833534a18481937901cb1380920ef0efba84b233acb0543a8" gracePeriod=30 Jan 20 18:45:52 crc kubenswrapper[4558]: I0120 18:45:52.943724 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="horizon-kuttl-tests/horizon-8cd586586-mn6lm" podUID="c4475f51-d8fe-4b28-8795-936c54f1a20f" containerName="horizon" containerID="cri-o://421d081cca941e1c5b614cc3a5bd5b3f957acfa82042d4c171c75b89e2c0ecac" gracePeriod=30 Jan 20 18:45:52 crc kubenswrapper[4558]: I0120 18:45:52.943783 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="horizon-kuttl-tests/horizon-754c467c7d-w875x" podUID="95c4bcc4-30e7-43f5-b6bc-ff90d8c5d583" containerName="horizon-log" containerID="cri-o://0bfbdd89ca382da7993c2c2e7b6cc68f70c7dd1bd276a7f6949813759ec0b227" gracePeriod=30 Jan 20 18:45:52 crc kubenswrapper[4558]: I0120 18:45:52.943880 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="horizon-kuttl-tests/horizon-754c467c7d-w875x" podUID="95c4bcc4-30e7-43f5-b6bc-ff90d8c5d583" containerName="horizon" containerID="cri-o://4593653edb6a833b449f93755bf65bc53044826df22199c7ffcfd1fb5bb6b0a6" gracePeriod=30 Jan 20 18:45:53 crc kubenswrapper[4558]: I0120 18:45:53.952459 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-prw6x" event={"ID":"ca076029-983c-4c56-a821-793daa569460","Type":"ContainerStarted","Data":"fcdd9b839469c57e206a7206f6cb9aa054231ea8037b147871c77d6db7cb10b6"} Jan 20 18:45:54 crc kubenswrapper[4558]: I0120 18:45:54.959991 4558 generic.go:334] "Generic (PLEG): container finished" podID="ca076029-983c-4c56-a821-793daa569460" containerID="fcdd9b839469c57e206a7206f6cb9aa054231ea8037b147871c77d6db7cb10b6" exitCode=0 Jan 20 18:45:54 crc kubenswrapper[4558]: I0120 18:45:54.960065 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-prw6x" event={"ID":"ca076029-983c-4c56-a821-793daa569460","Type":"ContainerDied","Data":"fcdd9b839469c57e206a7206f6cb9aa054231ea8037b147871c77d6db7cb10b6"} Jan 20 18:45:55 crc kubenswrapper[4558]: I0120 18:45:55.969020 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-prw6x" event={"ID":"ca076029-983c-4c56-a821-793daa569460","Type":"ContainerStarted","Data":"75666cebc67d6862a537b16d27655e49f7632d0ea714758bde246e6dc22ffa56"} Jan 20 18:45:55 crc kubenswrapper[4558]: I0120 18:45:55.992340 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-prw6x" podStartSLOduration=2.490909381 podStartE2EDuration="4.992321164s" podCreationTimestamp="2026-01-20 18:45:51 +0000 UTC" firstStartedPulling="2026-01-20 18:45:52.94731761 +0000 UTC m=+7446.707655576" lastFinishedPulling="2026-01-20 18:45:55.448729392 +0000 UTC m=+7449.209067359" observedRunningTime="2026-01-20 18:45:55.987279455 +0000 UTC m=+7449.747617422" watchObservedRunningTime="2026-01-20 18:45:55.992321164 +0000 UTC m=+7449.752659132" Jan 20 18:45:56 crc kubenswrapper[4558]: I0120 18:45:56.979959 4558 generic.go:334] "Generic (PLEG): container finished" podID="95c4bcc4-30e7-43f5-b6bc-ff90d8c5d583" containerID="4593653edb6a833b449f93755bf65bc53044826df22199c7ffcfd1fb5bb6b0a6" exitCode=0 Jan 20 18:45:56 crc kubenswrapper[4558]: I0120 18:45:56.980067 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/horizon-754c467c7d-w875x" event={"ID":"95c4bcc4-30e7-43f5-b6bc-ff90d8c5d583","Type":"ContainerDied","Data":"4593653edb6a833b449f93755bf65bc53044826df22199c7ffcfd1fb5bb6b0a6"} Jan 20 18:45:56 crc kubenswrapper[4558]: I0120 18:45:56.982668 4558 generic.go:334] "Generic (PLEG): container finished" podID="c4475f51-d8fe-4b28-8795-936c54f1a20f" containerID="421d081cca941e1c5b614cc3a5bd5b3f957acfa82042d4c171c75b89e2c0ecac" exitCode=0 Jan 20 18:45:56 crc kubenswrapper[4558]: I0120 18:45:56.983119 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/horizon-8cd586586-mn6lm" event={"ID":"c4475f51-d8fe-4b28-8795-936c54f1a20f","Type":"ContainerDied","Data":"421d081cca941e1c5b614cc3a5bd5b3f957acfa82042d4c171c75b89e2c0ecac"} Jan 20 18:45:57 crc kubenswrapper[4558]: I0120 18:45:57.330388 4558 patch_prober.go:28] interesting pod/machine-config-daemon-2vr4r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 20 18:45:57 crc kubenswrapper[4558]: I0120 18:45:57.330491 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 20 18:45:57 crc kubenswrapper[4558]: I0120 18:45:57.330579 4558 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" Jan 20 18:45:57 crc kubenswrapper[4558]: I0120 18:45:57.331727 4558 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"46c3b6e0005c86abe846af98397dbd3ec4bce46d6d33d8269c6f5d826617c1cf"} pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 20 18:45:57 crc kubenswrapper[4558]: I0120 18:45:57.331815 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" containerID="cri-o://46c3b6e0005c86abe846af98397dbd3ec4bce46d6d33d8269c6f5d826617c1cf" gracePeriod=600 Jan 20 18:45:57 crc kubenswrapper[4558]: I0120 18:45:57.997899 4558 generic.go:334] "Generic (PLEG): container finished" podID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerID="46c3b6e0005c86abe846af98397dbd3ec4bce46d6d33d8269c6f5d826617c1cf" exitCode=0 Jan 20 18:45:57 crc kubenswrapper[4558]: I0120 18:45:57.998000 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" event={"ID":"68337d27-3fa6-4a29-88b0-82e60c3739eb","Type":"ContainerDied","Data":"46c3b6e0005c86abe846af98397dbd3ec4bce46d6d33d8269c6f5d826617c1cf"} Jan 20 18:45:57 crc kubenswrapper[4558]: I0120 18:45:57.998320 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" event={"ID":"68337d27-3fa6-4a29-88b0-82e60c3739eb","Type":"ContainerStarted","Data":"499ec313cf27a56b071eb6186a175031344542ae6ab21658ac5fb0feccc71012"} Jan 20 18:45:57 crc kubenswrapper[4558]: I0120 18:45:57.998353 4558 scope.go:117] "RemoveContainer" containerID="77ce04c6daee7d4f9b776bd1c80b448f9c4750d5cb5c7dd182ff28963a4bcf99" Jan 20 18:45:59 crc kubenswrapper[4558]: I0120 18:45:59.146879 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="horizon-kuttl-tests/horizon-754c467c7d-w875x" podUID="95c4bcc4-30e7-43f5-b6bc-ff90d8c5d583" containerName="horizon" probeResult="failure" output="Get \"https://10.217.1.220:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.220:8443: connect: connection refused" Jan 20 18:45:59 crc kubenswrapper[4558]: I0120 18:45:59.196950 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="horizon-kuttl-tests/horizon-8cd586586-mn6lm" podUID="c4475f51-d8fe-4b28-8795-936c54f1a20f" containerName="horizon" probeResult="failure" output="Get \"https://10.217.1.221:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.221:8443: connect: connection refused" Jan 20 18:46:02 crc kubenswrapper[4558]: I0120 18:46:02.227771 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-prw6x" Jan 20 18:46:02 crc kubenswrapper[4558]: I0120 18:46:02.228244 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-prw6x" Jan 20 18:46:02 crc kubenswrapper[4558]: I0120 18:46:02.264525 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-prw6x" Jan 20 18:46:03 crc kubenswrapper[4558]: I0120 18:46:03.079405 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-prw6x" Jan 20 18:46:05 crc kubenswrapper[4558]: I0120 18:46:05.500301 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-prw6x"] Jan 20 18:46:05 crc kubenswrapper[4558]: I0120 18:46:05.500871 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-prw6x" podUID="ca076029-983c-4c56-a821-793daa569460" containerName="registry-server" containerID="cri-o://75666cebc67d6862a537b16d27655e49f7632d0ea714758bde246e6dc22ffa56" gracePeriod=2 Jan 20 18:46:05 crc kubenswrapper[4558]: I0120 18:46:05.841738 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-prw6x" Jan 20 18:46:06 crc kubenswrapper[4558]: I0120 18:46:06.023252 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ca076029-983c-4c56-a821-793daa569460-catalog-content\") pod \"ca076029-983c-4c56-a821-793daa569460\" (UID: \"ca076029-983c-4c56-a821-793daa569460\") " Jan 20 18:46:06 crc kubenswrapper[4558]: I0120 18:46:06.023327 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qngjm\" (UniqueName: \"kubernetes.io/projected/ca076029-983c-4c56-a821-793daa569460-kube-api-access-qngjm\") pod \"ca076029-983c-4c56-a821-793daa569460\" (UID: \"ca076029-983c-4c56-a821-793daa569460\") " Jan 20 18:46:06 crc kubenswrapper[4558]: I0120 18:46:06.023436 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ca076029-983c-4c56-a821-793daa569460-utilities\") pod \"ca076029-983c-4c56-a821-793daa569460\" (UID: \"ca076029-983c-4c56-a821-793daa569460\") " Jan 20 18:46:06 crc kubenswrapper[4558]: I0120 18:46:06.024577 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ca076029-983c-4c56-a821-793daa569460-utilities" (OuterVolumeSpecName: "utilities") pod "ca076029-983c-4c56-a821-793daa569460" (UID: "ca076029-983c-4c56-a821-793daa569460"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 18:46:06 crc kubenswrapper[4558]: I0120 18:46:06.030465 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ca076029-983c-4c56-a821-793daa569460-kube-api-access-qngjm" (OuterVolumeSpecName: "kube-api-access-qngjm") pod "ca076029-983c-4c56-a821-793daa569460" (UID: "ca076029-983c-4c56-a821-793daa569460"). InnerVolumeSpecName "kube-api-access-qngjm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:46:06 crc kubenswrapper[4558]: I0120 18:46:06.058869 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ca076029-983c-4c56-a821-793daa569460-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ca076029-983c-4c56-a821-793daa569460" (UID: "ca076029-983c-4c56-a821-793daa569460"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 18:46:06 crc kubenswrapper[4558]: I0120 18:46:06.070442 4558 generic.go:334] "Generic (PLEG): container finished" podID="ca076029-983c-4c56-a821-793daa569460" containerID="75666cebc67d6862a537b16d27655e49f7632d0ea714758bde246e6dc22ffa56" exitCode=0 Jan 20 18:46:06 crc kubenswrapper[4558]: I0120 18:46:06.070505 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-prw6x" event={"ID":"ca076029-983c-4c56-a821-793daa569460","Type":"ContainerDied","Data":"75666cebc67d6862a537b16d27655e49f7632d0ea714758bde246e6dc22ffa56"} Jan 20 18:46:06 crc kubenswrapper[4558]: I0120 18:46:06.070544 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-prw6x" Jan 20 18:46:06 crc kubenswrapper[4558]: I0120 18:46:06.070574 4558 scope.go:117] "RemoveContainer" containerID="75666cebc67d6862a537b16d27655e49f7632d0ea714758bde246e6dc22ffa56" Jan 20 18:46:06 crc kubenswrapper[4558]: I0120 18:46:06.070554 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-prw6x" event={"ID":"ca076029-983c-4c56-a821-793daa569460","Type":"ContainerDied","Data":"f4a10900eda427104c54b05d3691f9411216130bae8fc57189e61b5aaec287e3"} Jan 20 18:46:06 crc kubenswrapper[4558]: I0120 18:46:06.095996 4558 scope.go:117] "RemoveContainer" containerID="fcdd9b839469c57e206a7206f6cb9aa054231ea8037b147871c77d6db7cb10b6" Jan 20 18:46:06 crc kubenswrapper[4558]: I0120 18:46:06.102729 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-prw6x"] Jan 20 18:46:06 crc kubenswrapper[4558]: I0120 18:46:06.107303 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-prw6x"] Jan 20 18:46:06 crc kubenswrapper[4558]: I0120 18:46:06.125210 4558 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ca076029-983c-4c56-a821-793daa569460-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 20 18:46:06 crc kubenswrapper[4558]: I0120 18:46:06.125242 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qngjm\" (UniqueName: \"kubernetes.io/projected/ca076029-983c-4c56-a821-793daa569460-kube-api-access-qngjm\") on node \"crc\" DevicePath \"\"" Jan 20 18:46:06 crc kubenswrapper[4558]: I0120 18:46:06.125254 4558 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ca076029-983c-4c56-a821-793daa569460-utilities\") on node \"crc\" DevicePath \"\"" Jan 20 18:46:06 crc kubenswrapper[4558]: I0120 18:46:06.130483 4558 scope.go:117] "RemoveContainer" containerID="422496854d3e021d78522a9add2a0e6883719f5358064cddec89d4c2c769e1cd" Jan 20 18:46:06 crc kubenswrapper[4558]: I0120 18:46:06.146525 4558 scope.go:117] "RemoveContainer" containerID="75666cebc67d6862a537b16d27655e49f7632d0ea714758bde246e6dc22ffa56" Jan 20 18:46:06 crc kubenswrapper[4558]: E0120 18:46:06.146877 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"75666cebc67d6862a537b16d27655e49f7632d0ea714758bde246e6dc22ffa56\": container with ID starting with 75666cebc67d6862a537b16d27655e49f7632d0ea714758bde246e6dc22ffa56 not found: ID does not exist" containerID="75666cebc67d6862a537b16d27655e49f7632d0ea714758bde246e6dc22ffa56" Jan 20 18:46:06 crc kubenswrapper[4558]: I0120 18:46:06.146920 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"75666cebc67d6862a537b16d27655e49f7632d0ea714758bde246e6dc22ffa56"} err="failed to get container status \"75666cebc67d6862a537b16d27655e49f7632d0ea714758bde246e6dc22ffa56\": rpc error: code = NotFound desc = could not find container \"75666cebc67d6862a537b16d27655e49f7632d0ea714758bde246e6dc22ffa56\": container with ID starting with 75666cebc67d6862a537b16d27655e49f7632d0ea714758bde246e6dc22ffa56 not found: ID does not exist" Jan 20 18:46:06 crc kubenswrapper[4558]: I0120 18:46:06.146950 4558 scope.go:117] "RemoveContainer" containerID="fcdd9b839469c57e206a7206f6cb9aa054231ea8037b147871c77d6db7cb10b6" Jan 20 18:46:06 crc kubenswrapper[4558]: E0120 18:46:06.147405 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fcdd9b839469c57e206a7206f6cb9aa054231ea8037b147871c77d6db7cb10b6\": container with ID starting with fcdd9b839469c57e206a7206f6cb9aa054231ea8037b147871c77d6db7cb10b6 not found: ID does not exist" containerID="fcdd9b839469c57e206a7206f6cb9aa054231ea8037b147871c77d6db7cb10b6" Jan 20 18:46:06 crc kubenswrapper[4558]: I0120 18:46:06.147454 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fcdd9b839469c57e206a7206f6cb9aa054231ea8037b147871c77d6db7cb10b6"} err="failed to get container status \"fcdd9b839469c57e206a7206f6cb9aa054231ea8037b147871c77d6db7cb10b6\": rpc error: code = NotFound desc = could not find container \"fcdd9b839469c57e206a7206f6cb9aa054231ea8037b147871c77d6db7cb10b6\": container with ID starting with fcdd9b839469c57e206a7206f6cb9aa054231ea8037b147871c77d6db7cb10b6 not found: ID does not exist" Jan 20 18:46:06 crc kubenswrapper[4558]: I0120 18:46:06.147493 4558 scope.go:117] "RemoveContainer" containerID="422496854d3e021d78522a9add2a0e6883719f5358064cddec89d4c2c769e1cd" Jan 20 18:46:06 crc kubenswrapper[4558]: E0120 18:46:06.147823 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"422496854d3e021d78522a9add2a0e6883719f5358064cddec89d4c2c769e1cd\": container with ID starting with 422496854d3e021d78522a9add2a0e6883719f5358064cddec89d4c2c769e1cd not found: ID does not exist" containerID="422496854d3e021d78522a9add2a0e6883719f5358064cddec89d4c2c769e1cd" Jan 20 18:46:06 crc kubenswrapper[4558]: I0120 18:46:06.147904 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"422496854d3e021d78522a9add2a0e6883719f5358064cddec89d4c2c769e1cd"} err="failed to get container status \"422496854d3e021d78522a9add2a0e6883719f5358064cddec89d4c2c769e1cd\": rpc error: code = NotFound desc = could not find container \"422496854d3e021d78522a9add2a0e6883719f5358064cddec89d4c2c769e1cd\": container with ID starting with 422496854d3e021d78522a9add2a0e6883719f5358064cddec89d4c2c769e1cd not found: ID does not exist" Jan 20 18:46:06 crc kubenswrapper[4558]: I0120 18:46:06.578537 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ca076029-983c-4c56-a821-793daa569460" path="/var/lib/kubelet/pods/ca076029-983c-4c56-a821-793daa569460/volumes" Jan 20 18:46:09 crc kubenswrapper[4558]: I0120 18:46:09.146676 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="horizon-kuttl-tests/horizon-754c467c7d-w875x" podUID="95c4bcc4-30e7-43f5-b6bc-ff90d8c5d583" containerName="horizon" probeResult="failure" output="Get \"https://10.217.1.220:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.220:8443: connect: connection refused" Jan 20 18:46:09 crc kubenswrapper[4558]: I0120 18:46:09.196973 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="horizon-kuttl-tests/horizon-8cd586586-mn6lm" podUID="c4475f51-d8fe-4b28-8795-936c54f1a20f" containerName="horizon" probeResult="failure" output="Get \"https://10.217.1.221:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.221:8443: connect: connection refused" Jan 20 18:46:19 crc kubenswrapper[4558]: I0120 18:46:19.146819 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="horizon-kuttl-tests/horizon-754c467c7d-w875x" podUID="95c4bcc4-30e7-43f5-b6bc-ff90d8c5d583" containerName="horizon" probeResult="failure" output="Get \"https://10.217.1.220:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.220:8443: connect: connection refused" Jan 20 18:46:19 crc kubenswrapper[4558]: I0120 18:46:19.147651 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="horizon-kuttl-tests/horizon-754c467c7d-w875x" Jan 20 18:46:19 crc kubenswrapper[4558]: I0120 18:46:19.196794 4558 prober.go:107] "Probe failed" probeType="Readiness" pod="horizon-kuttl-tests/horizon-8cd586586-mn6lm" podUID="c4475f51-d8fe-4b28-8795-936c54f1a20f" containerName="horizon" probeResult="failure" output="Get \"https://10.217.1.221:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.1.221:8443: connect: connection refused" Jan 20 18:46:19 crc kubenswrapper[4558]: I0120 18:46:19.196916 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="horizon-kuttl-tests/horizon-8cd586586-mn6lm" Jan 20 18:46:23 crc kubenswrapper[4558]: I0120 18:46:23.207957 4558 generic.go:334] "Generic (PLEG): container finished" podID="95c4bcc4-30e7-43f5-b6bc-ff90d8c5d583" containerID="0bfbdd89ca382da7993c2c2e7b6cc68f70c7dd1bd276a7f6949813759ec0b227" exitCode=137 Jan 20 18:46:23 crc kubenswrapper[4558]: I0120 18:46:23.208042 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/horizon-754c467c7d-w875x" event={"ID":"95c4bcc4-30e7-43f5-b6bc-ff90d8c5d583","Type":"ContainerDied","Data":"0bfbdd89ca382da7993c2c2e7b6cc68f70c7dd1bd276a7f6949813759ec0b227"} Jan 20 18:46:23 crc kubenswrapper[4558]: I0120 18:46:23.214146 4558 generic.go:334] "Generic (PLEG): container finished" podID="c4475f51-d8fe-4b28-8795-936c54f1a20f" containerID="bfb3aa89ed656c3833534a18481937901cb1380920ef0efba84b233acb0543a8" exitCode=137 Jan 20 18:46:23 crc kubenswrapper[4558]: I0120 18:46:23.214188 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/horizon-8cd586586-mn6lm" event={"ID":"c4475f51-d8fe-4b28-8795-936c54f1a20f","Type":"ContainerDied","Data":"bfb3aa89ed656c3833534a18481937901cb1380920ef0efba84b233acb0543a8"} Jan 20 18:46:23 crc kubenswrapper[4558]: I0120 18:46:23.267675 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/horizon-8cd586586-mn6lm" Jan 20 18:46:23 crc kubenswrapper[4558]: I0120 18:46:23.275196 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/horizon-754c467c7d-w875x" Jan 20 18:46:23 crc kubenswrapper[4558]: I0120 18:46:23.300181 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j9288\" (UniqueName: \"kubernetes.io/projected/95c4bcc4-30e7-43f5-b6bc-ff90d8c5d583-kube-api-access-j9288\") pod \"95c4bcc4-30e7-43f5-b6bc-ff90d8c5d583\" (UID: \"95c4bcc4-30e7-43f5-b6bc-ff90d8c5d583\") " Jan 20 18:46:23 crc kubenswrapper[4558]: I0120 18:46:23.300663 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/95c4bcc4-30e7-43f5-b6bc-ff90d8c5d583-horizon-secret-key\") pod \"95c4bcc4-30e7-43f5-b6bc-ff90d8c5d583\" (UID: \"95c4bcc4-30e7-43f5-b6bc-ff90d8c5d583\") " Jan 20 18:46:23 crc kubenswrapper[4558]: I0120 18:46:23.300746 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/95c4bcc4-30e7-43f5-b6bc-ff90d8c5d583-combined-ca-bundle\") pod \"95c4bcc4-30e7-43f5-b6bc-ff90d8c5d583\" (UID: \"95c4bcc4-30e7-43f5-b6bc-ff90d8c5d583\") " Jan 20 18:46:23 crc kubenswrapper[4558]: I0120 18:46:23.300810 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/95c4bcc4-30e7-43f5-b6bc-ff90d8c5d583-config-data\") pod \"95c4bcc4-30e7-43f5-b6bc-ff90d8c5d583\" (UID: \"95c4bcc4-30e7-43f5-b6bc-ff90d8c5d583\") " Jan 20 18:46:23 crc kubenswrapper[4558]: I0120 18:46:23.300838 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/c4475f51-d8fe-4b28-8795-936c54f1a20f-horizon-secret-key\") pod \"c4475f51-d8fe-4b28-8795-936c54f1a20f\" (UID: \"c4475f51-d8fe-4b28-8795-936c54f1a20f\") " Jan 20 18:46:23 crc kubenswrapper[4558]: I0120 18:46:23.300897 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c4475f51-d8fe-4b28-8795-936c54f1a20f-logs\") pod \"c4475f51-d8fe-4b28-8795-936c54f1a20f\" (UID: \"c4475f51-d8fe-4b28-8795-936c54f1a20f\") " Jan 20 18:46:23 crc kubenswrapper[4558]: I0120 18:46:23.300980 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/95c4bcc4-30e7-43f5-b6bc-ff90d8c5d583-horizon-tls-certs\") pod \"95c4bcc4-30e7-43f5-b6bc-ff90d8c5d583\" (UID: \"95c4bcc4-30e7-43f5-b6bc-ff90d8c5d583\") " Jan 20 18:46:23 crc kubenswrapper[4558]: I0120 18:46:23.301032 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/95c4bcc4-30e7-43f5-b6bc-ff90d8c5d583-scripts\") pod \"95c4bcc4-30e7-43f5-b6bc-ff90d8c5d583\" (UID: \"95c4bcc4-30e7-43f5-b6bc-ff90d8c5d583\") " Jan 20 18:46:23 crc kubenswrapper[4558]: I0120 18:46:23.301153 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/95c4bcc4-30e7-43f5-b6bc-ff90d8c5d583-logs\") pod \"95c4bcc4-30e7-43f5-b6bc-ff90d8c5d583\" (UID: \"95c4bcc4-30e7-43f5-b6bc-ff90d8c5d583\") " Jan 20 18:46:23 crc kubenswrapper[4558]: I0120 18:46:23.301251 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c4475f51-d8fe-4b28-8795-936c54f1a20f-scripts\") pod \"c4475f51-d8fe-4b28-8795-936c54f1a20f\" (UID: \"c4475f51-d8fe-4b28-8795-936c54f1a20f\") " Jan 20 18:46:23 crc kubenswrapper[4558]: I0120 18:46:23.301304 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c4475f51-d8fe-4b28-8795-936c54f1a20f-config-data\") pod \"c4475f51-d8fe-4b28-8795-936c54f1a20f\" (UID: \"c4475f51-d8fe-4b28-8795-936c54f1a20f\") " Jan 20 18:46:23 crc kubenswrapper[4558]: I0120 18:46:23.301338 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/c4475f51-d8fe-4b28-8795-936c54f1a20f-horizon-tls-certs\") pod \"c4475f51-d8fe-4b28-8795-936c54f1a20f\" (UID: \"c4475f51-d8fe-4b28-8795-936c54f1a20f\") " Jan 20 18:46:23 crc kubenswrapper[4558]: I0120 18:46:23.301431 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c4475f51-d8fe-4b28-8795-936c54f1a20f-combined-ca-bundle\") pod \"c4475f51-d8fe-4b28-8795-936c54f1a20f\" (UID: \"c4475f51-d8fe-4b28-8795-936c54f1a20f\") " Jan 20 18:46:23 crc kubenswrapper[4558]: I0120 18:46:23.301512 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tgvkt\" (UniqueName: \"kubernetes.io/projected/c4475f51-d8fe-4b28-8795-936c54f1a20f-kube-api-access-tgvkt\") pod \"c4475f51-d8fe-4b28-8795-936c54f1a20f\" (UID: \"c4475f51-d8fe-4b28-8795-936c54f1a20f\") " Jan 20 18:46:23 crc kubenswrapper[4558]: I0120 18:46:23.302574 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c4475f51-d8fe-4b28-8795-936c54f1a20f-logs" (OuterVolumeSpecName: "logs") pod "c4475f51-d8fe-4b28-8795-936c54f1a20f" (UID: "c4475f51-d8fe-4b28-8795-936c54f1a20f"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 18:46:23 crc kubenswrapper[4558]: I0120 18:46:23.303207 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c4475f51-d8fe-4b28-8795-936c54f1a20f-logs\") on node \"crc\" DevicePath \"\"" Jan 20 18:46:23 crc kubenswrapper[4558]: I0120 18:46:23.304415 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/95c4bcc4-30e7-43f5-b6bc-ff90d8c5d583-logs" (OuterVolumeSpecName: "logs") pod "95c4bcc4-30e7-43f5-b6bc-ff90d8c5d583" (UID: "95c4bcc4-30e7-43f5-b6bc-ff90d8c5d583"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 18:46:23 crc kubenswrapper[4558]: I0120 18:46:23.307953 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/95c4bcc4-30e7-43f5-b6bc-ff90d8c5d583-kube-api-access-j9288" (OuterVolumeSpecName: "kube-api-access-j9288") pod "95c4bcc4-30e7-43f5-b6bc-ff90d8c5d583" (UID: "95c4bcc4-30e7-43f5-b6bc-ff90d8c5d583"). InnerVolumeSpecName "kube-api-access-j9288". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:46:23 crc kubenswrapper[4558]: I0120 18:46:23.309952 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/95c4bcc4-30e7-43f5-b6bc-ff90d8c5d583-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "95c4bcc4-30e7-43f5-b6bc-ff90d8c5d583" (UID: "95c4bcc4-30e7-43f5-b6bc-ff90d8c5d583"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:46:23 crc kubenswrapper[4558]: I0120 18:46:23.310815 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c4475f51-d8fe-4b28-8795-936c54f1a20f-kube-api-access-tgvkt" (OuterVolumeSpecName: "kube-api-access-tgvkt") pod "c4475f51-d8fe-4b28-8795-936c54f1a20f" (UID: "c4475f51-d8fe-4b28-8795-936c54f1a20f"). InnerVolumeSpecName "kube-api-access-tgvkt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:46:23 crc kubenswrapper[4558]: I0120 18:46:23.311828 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c4475f51-d8fe-4b28-8795-936c54f1a20f-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "c4475f51-d8fe-4b28-8795-936c54f1a20f" (UID: "c4475f51-d8fe-4b28-8795-936c54f1a20f"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:46:23 crc kubenswrapper[4558]: I0120 18:46:23.347731 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/95c4bcc4-30e7-43f5-b6bc-ff90d8c5d583-config-data" (OuterVolumeSpecName: "config-data") pod "95c4bcc4-30e7-43f5-b6bc-ff90d8c5d583" (UID: "95c4bcc4-30e7-43f5-b6bc-ff90d8c5d583"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:46:23 crc kubenswrapper[4558]: I0120 18:46:23.351534 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c4475f51-d8fe-4b28-8795-936c54f1a20f-config-data" (OuterVolumeSpecName: "config-data") pod "c4475f51-d8fe-4b28-8795-936c54f1a20f" (UID: "c4475f51-d8fe-4b28-8795-936c54f1a20f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:46:23 crc kubenswrapper[4558]: I0120 18:46:23.353050 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/95c4bcc4-30e7-43f5-b6bc-ff90d8c5d583-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "95c4bcc4-30e7-43f5-b6bc-ff90d8c5d583" (UID: "95c4bcc4-30e7-43f5-b6bc-ff90d8c5d583"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:46:23 crc kubenswrapper[4558]: I0120 18:46:23.354393 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c4475f51-d8fe-4b28-8795-936c54f1a20f-scripts" (OuterVolumeSpecName: "scripts") pod "c4475f51-d8fe-4b28-8795-936c54f1a20f" (UID: "c4475f51-d8fe-4b28-8795-936c54f1a20f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:46:23 crc kubenswrapper[4558]: I0120 18:46:23.358590 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c4475f51-d8fe-4b28-8795-936c54f1a20f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c4475f51-d8fe-4b28-8795-936c54f1a20f" (UID: "c4475f51-d8fe-4b28-8795-936c54f1a20f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:46:23 crc kubenswrapper[4558]: I0120 18:46:23.361543 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/95c4bcc4-30e7-43f5-b6bc-ff90d8c5d583-scripts" (OuterVolumeSpecName: "scripts") pod "95c4bcc4-30e7-43f5-b6bc-ff90d8c5d583" (UID: "95c4bcc4-30e7-43f5-b6bc-ff90d8c5d583"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:46:23 crc kubenswrapper[4558]: I0120 18:46:23.365154 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c4475f51-d8fe-4b28-8795-936c54f1a20f-horizon-tls-certs" (OuterVolumeSpecName: "horizon-tls-certs") pod "c4475f51-d8fe-4b28-8795-936c54f1a20f" (UID: "c4475f51-d8fe-4b28-8795-936c54f1a20f"). InnerVolumeSpecName "horizon-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:46:23 crc kubenswrapper[4558]: I0120 18:46:23.371826 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/95c4bcc4-30e7-43f5-b6bc-ff90d8c5d583-horizon-tls-certs" (OuterVolumeSpecName: "horizon-tls-certs") pod "95c4bcc4-30e7-43f5-b6bc-ff90d8c5d583" (UID: "95c4bcc4-30e7-43f5-b6bc-ff90d8c5d583"). InnerVolumeSpecName "horizon-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:46:23 crc kubenswrapper[4558]: I0120 18:46:23.404509 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c4475f51-d8fe-4b28-8795-936c54f1a20f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 18:46:23 crc kubenswrapper[4558]: I0120 18:46:23.404542 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tgvkt\" (UniqueName: \"kubernetes.io/projected/c4475f51-d8fe-4b28-8795-936c54f1a20f-kube-api-access-tgvkt\") on node \"crc\" DevicePath \"\"" Jan 20 18:46:23 crc kubenswrapper[4558]: I0120 18:46:23.404554 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j9288\" (UniqueName: \"kubernetes.io/projected/95c4bcc4-30e7-43f5-b6bc-ff90d8c5d583-kube-api-access-j9288\") on node \"crc\" DevicePath \"\"" Jan 20 18:46:23 crc kubenswrapper[4558]: I0120 18:46:23.404564 4558 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/95c4bcc4-30e7-43f5-b6bc-ff90d8c5d583-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Jan 20 18:46:23 crc kubenswrapper[4558]: I0120 18:46:23.404574 4558 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/95c4bcc4-30e7-43f5-b6bc-ff90d8c5d583-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 20 18:46:23 crc kubenswrapper[4558]: I0120 18:46:23.404584 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/95c4bcc4-30e7-43f5-b6bc-ff90d8c5d583-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 18:46:23 crc kubenswrapper[4558]: I0120 18:46:23.404593 4558 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/c4475f51-d8fe-4b28-8795-936c54f1a20f-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Jan 20 18:46:23 crc kubenswrapper[4558]: I0120 18:46:23.404602 4558 reconciler_common.go:293] "Volume detached for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/95c4bcc4-30e7-43f5-b6bc-ff90d8c5d583-horizon-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 18:46:23 crc kubenswrapper[4558]: I0120 18:46:23.404611 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/95c4bcc4-30e7-43f5-b6bc-ff90d8c5d583-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 18:46:23 crc kubenswrapper[4558]: I0120 18:46:23.404620 4558 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/95c4bcc4-30e7-43f5-b6bc-ff90d8c5d583-logs\") on node \"crc\" DevicePath \"\"" Jan 20 18:46:23 crc kubenswrapper[4558]: I0120 18:46:23.404629 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c4475f51-d8fe-4b28-8795-936c54f1a20f-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 18:46:23 crc kubenswrapper[4558]: I0120 18:46:23.404637 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c4475f51-d8fe-4b28-8795-936c54f1a20f-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 18:46:23 crc kubenswrapper[4558]: I0120 18:46:23.404645 4558 reconciler_common.go:293] "Volume detached for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/c4475f51-d8fe-4b28-8795-936c54f1a20f-horizon-tls-certs\") on node \"crc\" DevicePath \"\"" Jan 20 18:46:24 crc kubenswrapper[4558]: I0120 18:46:24.227811 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/horizon-8cd586586-mn6lm" event={"ID":"c4475f51-d8fe-4b28-8795-936c54f1a20f","Type":"ContainerDied","Data":"b00ab3dddd5e7675463de88d275fc60cf730d1c8210c91e84b8e6f4ef61fc585"} Jan 20 18:46:24 crc kubenswrapper[4558]: I0120 18:46:24.227884 4558 scope.go:117] "RemoveContainer" containerID="421d081cca941e1c5b614cc3a5bd5b3f957acfa82042d4c171c75b89e2c0ecac" Jan 20 18:46:24 crc kubenswrapper[4558]: I0120 18:46:24.227903 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/horizon-8cd586586-mn6lm" Jan 20 18:46:24 crc kubenswrapper[4558]: I0120 18:46:24.230244 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/horizon-754c467c7d-w875x" event={"ID":"95c4bcc4-30e7-43f5-b6bc-ff90d8c5d583","Type":"ContainerDied","Data":"e784bb0a54bdb289d8deb2544b73e4dc6164ca51a6d335d3bd0db0c92f53b798"} Jan 20 18:46:24 crc kubenswrapper[4558]: I0120 18:46:24.230362 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/horizon-754c467c7d-w875x" Jan 20 18:46:24 crc kubenswrapper[4558]: I0120 18:46:24.265728 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["horizon-kuttl-tests/horizon-754c467c7d-w875x"] Jan 20 18:46:24 crc kubenswrapper[4558]: I0120 18:46:24.276301 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["horizon-kuttl-tests/horizon-754c467c7d-w875x"] Jan 20 18:46:24 crc kubenswrapper[4558]: I0120 18:46:24.282534 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["horizon-kuttl-tests/horizon-8cd586586-mn6lm"] Jan 20 18:46:24 crc kubenswrapper[4558]: I0120 18:46:24.287038 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["horizon-kuttl-tests/horizon-8cd586586-mn6lm"] Jan 20 18:46:24 crc kubenswrapper[4558]: I0120 18:46:24.380932 4558 scope.go:117] "RemoveContainer" containerID="bfb3aa89ed656c3833534a18481937901cb1380920ef0efba84b233acb0543a8" Jan 20 18:46:24 crc kubenswrapper[4558]: I0120 18:46:24.399760 4558 scope.go:117] "RemoveContainer" containerID="4593653edb6a833b449f93755bf65bc53044826df22199c7ffcfd1fb5bb6b0a6" Jan 20 18:46:24 crc kubenswrapper[4558]: I0120 18:46:24.549877 4558 scope.go:117] "RemoveContainer" containerID="0bfbdd89ca382da7993c2c2e7b6cc68f70c7dd1bd276a7f6949813759ec0b227" Jan 20 18:46:24 crc kubenswrapper[4558]: I0120 18:46:24.574816 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="95c4bcc4-30e7-43f5-b6bc-ff90d8c5d583" path="/var/lib/kubelet/pods/95c4bcc4-30e7-43f5-b6bc-ff90d8c5d583/volumes" Jan 20 18:46:24 crc kubenswrapper[4558]: I0120 18:46:24.575496 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c4475f51-d8fe-4b28-8795-936c54f1a20f" path="/var/lib/kubelet/pods/c4475f51-d8fe-4b28-8795-936c54f1a20f/volumes" Jan 20 18:46:28 crc kubenswrapper[4558]: I0120 18:46:28.991457 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["horizon-kuttl-tests/keystone-db-sync-fv444"] Jan 20 18:46:29 crc kubenswrapper[4558]: I0120 18:46:29.001276 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["horizon-kuttl-tests/keystone-bootstrap-jsjkw"] Jan 20 18:46:29 crc kubenswrapper[4558]: I0120 18:46:29.005733 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["horizon-kuttl-tests/keystone-db-sync-fv444"] Jan 20 18:46:29 crc kubenswrapper[4558]: I0120 18:46:29.011835 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["horizon-kuttl-tests/keystone-bootstrap-jsjkw"] Jan 20 18:46:29 crc kubenswrapper[4558]: I0120 18:46:29.016639 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["horizon-kuttl-tests/keystone-f9689d859-jt4kw"] Jan 20 18:46:29 crc kubenswrapper[4558]: I0120 18:46:29.016902 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="horizon-kuttl-tests/keystone-f9689d859-jt4kw" podUID="1e63d3ee-13d4-4b9c-894f-03861181bd49" containerName="keystone-api" containerID="cri-o://d3edd9e7128ed1cf8bb955a82c34b19fdad9e82d1fadfef801ecf5a9289cdaf5" gracePeriod=30 Jan 20 18:46:29 crc kubenswrapper[4558]: I0120 18:46:29.077652 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["horizon-kuttl-tests/keystone5ae5-account-delete-9thz7"] Jan 20 18:46:29 crc kubenswrapper[4558]: E0120 18:46:29.078289 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="95c4bcc4-30e7-43f5-b6bc-ff90d8c5d583" containerName="horizon" Jan 20 18:46:29 crc kubenswrapper[4558]: I0120 18:46:29.078394 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="95c4bcc4-30e7-43f5-b6bc-ff90d8c5d583" containerName="horizon" Jan 20 18:46:29 crc kubenswrapper[4558]: E0120 18:46:29.078449 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ca076029-983c-4c56-a821-793daa569460" containerName="extract-content" Jan 20 18:46:29 crc kubenswrapper[4558]: I0120 18:46:29.078504 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ca076029-983c-4c56-a821-793daa569460" containerName="extract-content" Jan 20 18:46:29 crc kubenswrapper[4558]: E0120 18:46:29.078570 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ca076029-983c-4c56-a821-793daa569460" containerName="registry-server" Jan 20 18:46:29 crc kubenswrapper[4558]: I0120 18:46:29.078613 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ca076029-983c-4c56-a821-793daa569460" containerName="registry-server" Jan 20 18:46:29 crc kubenswrapper[4558]: E0120 18:46:29.078663 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c4475f51-d8fe-4b28-8795-936c54f1a20f" containerName="horizon" Jan 20 18:46:29 crc kubenswrapper[4558]: I0120 18:46:29.078708 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="c4475f51-d8fe-4b28-8795-936c54f1a20f" containerName="horizon" Jan 20 18:46:29 crc kubenswrapper[4558]: E0120 18:46:29.078758 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="95c4bcc4-30e7-43f5-b6bc-ff90d8c5d583" containerName="horizon-log" Jan 20 18:46:29 crc kubenswrapper[4558]: I0120 18:46:29.078798 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="95c4bcc4-30e7-43f5-b6bc-ff90d8c5d583" containerName="horizon-log" Jan 20 18:46:29 crc kubenswrapper[4558]: E0120 18:46:29.078847 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c4475f51-d8fe-4b28-8795-936c54f1a20f" containerName="horizon-log" Jan 20 18:46:29 crc kubenswrapper[4558]: I0120 18:46:29.078892 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="c4475f51-d8fe-4b28-8795-936c54f1a20f" containerName="horizon-log" Jan 20 18:46:29 crc kubenswrapper[4558]: E0120 18:46:29.078951 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ca076029-983c-4c56-a821-793daa569460" containerName="extract-utilities" Jan 20 18:46:29 crc kubenswrapper[4558]: I0120 18:46:29.078999 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ca076029-983c-4c56-a821-793daa569460" containerName="extract-utilities" Jan 20 18:46:29 crc kubenswrapper[4558]: I0120 18:46:29.079227 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="ca076029-983c-4c56-a821-793daa569460" containerName="registry-server" Jan 20 18:46:29 crc kubenswrapper[4558]: I0120 18:46:29.079306 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="c4475f51-d8fe-4b28-8795-936c54f1a20f" containerName="horizon-log" Jan 20 18:46:29 crc kubenswrapper[4558]: I0120 18:46:29.079362 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="95c4bcc4-30e7-43f5-b6bc-ff90d8c5d583" containerName="horizon" Jan 20 18:46:29 crc kubenswrapper[4558]: I0120 18:46:29.079416 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="c4475f51-d8fe-4b28-8795-936c54f1a20f" containerName="horizon" Jan 20 18:46:29 crc kubenswrapper[4558]: I0120 18:46:29.079466 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="95c4bcc4-30e7-43f5-b6bc-ff90d8c5d583" containerName="horizon-log" Jan 20 18:46:29 crc kubenswrapper[4558]: I0120 18:46:29.080096 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/keystone5ae5-account-delete-9thz7" Jan 20 18:46:29 crc kubenswrapper[4558]: I0120 18:46:29.086971 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["horizon-kuttl-tests/keystone5ae5-account-delete-9thz7"] Jan 20 18:46:29 crc kubenswrapper[4558]: I0120 18:46:29.101248 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kxgkc\" (UniqueName: \"kubernetes.io/projected/1a5d2627-1730-47b3-94f9-110dc23d42bc-kube-api-access-kxgkc\") pod \"keystone5ae5-account-delete-9thz7\" (UID: \"1a5d2627-1730-47b3-94f9-110dc23d42bc\") " pod="horizon-kuttl-tests/keystone5ae5-account-delete-9thz7" Jan 20 18:46:29 crc kubenswrapper[4558]: I0120 18:46:29.101314 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1a5d2627-1730-47b3-94f9-110dc23d42bc-operator-scripts\") pod \"keystone5ae5-account-delete-9thz7\" (UID: \"1a5d2627-1730-47b3-94f9-110dc23d42bc\") " pod="horizon-kuttl-tests/keystone5ae5-account-delete-9thz7" Jan 20 18:46:29 crc kubenswrapper[4558]: I0120 18:46:29.203457 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1a5d2627-1730-47b3-94f9-110dc23d42bc-operator-scripts\") pod \"keystone5ae5-account-delete-9thz7\" (UID: \"1a5d2627-1730-47b3-94f9-110dc23d42bc\") " pod="horizon-kuttl-tests/keystone5ae5-account-delete-9thz7" Jan 20 18:46:29 crc kubenswrapper[4558]: I0120 18:46:29.203627 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kxgkc\" (UniqueName: \"kubernetes.io/projected/1a5d2627-1730-47b3-94f9-110dc23d42bc-kube-api-access-kxgkc\") pod \"keystone5ae5-account-delete-9thz7\" (UID: \"1a5d2627-1730-47b3-94f9-110dc23d42bc\") " pod="horizon-kuttl-tests/keystone5ae5-account-delete-9thz7" Jan 20 18:46:29 crc kubenswrapper[4558]: I0120 18:46:29.204271 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1a5d2627-1730-47b3-94f9-110dc23d42bc-operator-scripts\") pod \"keystone5ae5-account-delete-9thz7\" (UID: \"1a5d2627-1730-47b3-94f9-110dc23d42bc\") " pod="horizon-kuttl-tests/keystone5ae5-account-delete-9thz7" Jan 20 18:46:29 crc kubenswrapper[4558]: I0120 18:46:29.220735 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kxgkc\" (UniqueName: \"kubernetes.io/projected/1a5d2627-1730-47b3-94f9-110dc23d42bc-kube-api-access-kxgkc\") pod \"keystone5ae5-account-delete-9thz7\" (UID: \"1a5d2627-1730-47b3-94f9-110dc23d42bc\") " pod="horizon-kuttl-tests/keystone5ae5-account-delete-9thz7" Jan 20 18:46:29 crc kubenswrapper[4558]: I0120 18:46:29.396444 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/keystone5ae5-account-delete-9thz7" Jan 20 18:46:29 crc kubenswrapper[4558]: I0120 18:46:29.448965 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["horizon-kuttl-tests/root-account-create-update-mqt2k"] Jan 20 18:46:29 crc kubenswrapper[4558]: I0120 18:46:29.454461 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["horizon-kuttl-tests/root-account-create-update-mqt2k"] Jan 20 18:46:29 crc kubenswrapper[4558]: I0120 18:46:29.471559 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["horizon-kuttl-tests/openstack-galera-0"] Jan 20 18:46:29 crc kubenswrapper[4558]: I0120 18:46:29.474345 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["horizon-kuttl-tests/openstack-galera-1"] Jan 20 18:46:29 crc kubenswrapper[4558]: I0120 18:46:29.481470 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["horizon-kuttl-tests/openstack-galera-2"] Jan 20 18:46:29 crc kubenswrapper[4558]: I0120 18:46:29.607516 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="horizon-kuttl-tests/openstack-galera-2" podUID="761b8e2c-cee0-49d5-8310-3eaf29af06bc" containerName="galera" containerID="cri-o://a19305d22ff904d020f81094f35b6533077dd226eabc2094fee643c83ae14064" gracePeriod=30 Jan 20 18:46:29 crc kubenswrapper[4558]: I0120 18:46:29.817904 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["horizon-kuttl-tests/keystone5ae5-account-delete-9thz7"] Jan 20 18:46:30 crc kubenswrapper[4558]: I0120 18:46:30.012099 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["horizon-kuttl-tests/memcached-0"] Jan 20 18:46:30 crc kubenswrapper[4558]: I0120 18:46:30.013179 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="horizon-kuttl-tests/memcached-0" podUID="54015202-e6bd-4bf5-9b3f-df0dd67d24d9" containerName="memcached" containerID="cri-o://d779906ca149e1996729bdbafc2df8ce472e9efb0fb4cdf7c24dbb13fdabb76d" gracePeriod=30 Jan 20 18:46:30 crc kubenswrapper[4558]: I0120 18:46:30.287408 4558 generic.go:334] "Generic (PLEG): container finished" podID="761b8e2c-cee0-49d5-8310-3eaf29af06bc" containerID="a19305d22ff904d020f81094f35b6533077dd226eabc2094fee643c83ae14064" exitCode=0 Jan 20 18:46:30 crc kubenswrapper[4558]: I0120 18:46:30.287492 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/openstack-galera-2" event={"ID":"761b8e2c-cee0-49d5-8310-3eaf29af06bc","Type":"ContainerDied","Data":"a19305d22ff904d020f81094f35b6533077dd226eabc2094fee643c83ae14064"} Jan 20 18:46:30 crc kubenswrapper[4558]: I0120 18:46:30.289256 4558 generic.go:334] "Generic (PLEG): container finished" podID="1a5d2627-1730-47b3-94f9-110dc23d42bc" containerID="1e99dd53b07669b62f99ab59dbd94205921b513124ec0821e3dec8fb63542b05" exitCode=1 Jan 20 18:46:30 crc kubenswrapper[4558]: I0120 18:46:30.289284 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/keystone5ae5-account-delete-9thz7" event={"ID":"1a5d2627-1730-47b3-94f9-110dc23d42bc","Type":"ContainerDied","Data":"1e99dd53b07669b62f99ab59dbd94205921b513124ec0821e3dec8fb63542b05"} Jan 20 18:46:30 crc kubenswrapper[4558]: I0120 18:46:30.289303 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/keystone5ae5-account-delete-9thz7" event={"ID":"1a5d2627-1730-47b3-94f9-110dc23d42bc","Type":"ContainerStarted","Data":"5d1f80755d408379afe7bebbd41a659783913d82b70d9b576c031f4d9d1ebd12"} Jan 20 18:46:30 crc kubenswrapper[4558]: I0120 18:46:30.289946 4558 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="horizon-kuttl-tests/keystone5ae5-account-delete-9thz7" secret="" err="secret \"galera-openstack-dockercfg-9kvfp\" not found" Jan 20 18:46:30 crc kubenswrapper[4558]: I0120 18:46:30.290006 4558 scope.go:117] "RemoveContainer" containerID="1e99dd53b07669b62f99ab59dbd94205921b513124ec0821e3dec8fb63542b05" Jan 20 18:46:30 crc kubenswrapper[4558]: E0120 18:46:30.325592 4558 configmap.go:193] Couldn't get configMap horizon-kuttl-tests/openstack-scripts: configmap "openstack-scripts" not found Jan 20 18:46:30 crc kubenswrapper[4558]: E0120 18:46:30.325681 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/1a5d2627-1730-47b3-94f9-110dc23d42bc-operator-scripts podName:1a5d2627-1730-47b3-94f9-110dc23d42bc nodeName:}" failed. No retries permitted until 2026-01-20 18:46:30.825659397 +0000 UTC m=+7484.585997363 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/1a5d2627-1730-47b3-94f9-110dc23d42bc-operator-scripts") pod "keystone5ae5-account-delete-9thz7" (UID: "1a5d2627-1730-47b3-94f9-110dc23d42bc") : configmap "openstack-scripts" not found Jan 20 18:46:30 crc kubenswrapper[4558]: I0120 18:46:30.389972 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/openstack-galera-2" Jan 20 18:46:30 crc kubenswrapper[4558]: I0120 18:46:30.426733 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/761b8e2c-cee0-49d5-8310-3eaf29af06bc-config-data-generated\") pod \"761b8e2c-cee0-49d5-8310-3eaf29af06bc\" (UID: \"761b8e2c-cee0-49d5-8310-3eaf29af06bc\") " Jan 20 18:46:30 crc kubenswrapper[4558]: I0120 18:46:30.426793 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mysql-db\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"761b8e2c-cee0-49d5-8310-3eaf29af06bc\" (UID: \"761b8e2c-cee0-49d5-8310-3eaf29af06bc\") " Jan 20 18:46:30 crc kubenswrapper[4558]: I0120 18:46:30.427015 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/761b8e2c-cee0-49d5-8310-3eaf29af06bc-kolla-config\") pod \"761b8e2c-cee0-49d5-8310-3eaf29af06bc\" (UID: \"761b8e2c-cee0-49d5-8310-3eaf29af06bc\") " Jan 20 18:46:30 crc kubenswrapper[4558]: I0120 18:46:30.427081 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h7dzv\" (UniqueName: \"kubernetes.io/projected/761b8e2c-cee0-49d5-8310-3eaf29af06bc-kube-api-access-h7dzv\") pod \"761b8e2c-cee0-49d5-8310-3eaf29af06bc\" (UID: \"761b8e2c-cee0-49d5-8310-3eaf29af06bc\") " Jan 20 18:46:30 crc kubenswrapper[4558]: I0120 18:46:30.427158 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/761b8e2c-cee0-49d5-8310-3eaf29af06bc-operator-scripts\") pod \"761b8e2c-cee0-49d5-8310-3eaf29af06bc\" (UID: \"761b8e2c-cee0-49d5-8310-3eaf29af06bc\") " Jan 20 18:46:30 crc kubenswrapper[4558]: I0120 18:46:30.427212 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/761b8e2c-cee0-49d5-8310-3eaf29af06bc-config-data-default\") pod \"761b8e2c-cee0-49d5-8310-3eaf29af06bc\" (UID: \"761b8e2c-cee0-49d5-8310-3eaf29af06bc\") " Jan 20 18:46:30 crc kubenswrapper[4558]: I0120 18:46:30.427591 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/761b8e2c-cee0-49d5-8310-3eaf29af06bc-config-data-generated" (OuterVolumeSpecName: "config-data-generated") pod "761b8e2c-cee0-49d5-8310-3eaf29af06bc" (UID: "761b8e2c-cee0-49d5-8310-3eaf29af06bc"). InnerVolumeSpecName "config-data-generated". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 18:46:30 crc kubenswrapper[4558]: I0120 18:46:30.428334 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/761b8e2c-cee0-49d5-8310-3eaf29af06bc-config-data-generated\") on node \"crc\" DevicePath \"\"" Jan 20 18:46:30 crc kubenswrapper[4558]: I0120 18:46:30.428345 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/761b8e2c-cee0-49d5-8310-3eaf29af06bc-config-data-default" (OuterVolumeSpecName: "config-data-default") pod "761b8e2c-cee0-49d5-8310-3eaf29af06bc" (UID: "761b8e2c-cee0-49d5-8310-3eaf29af06bc"). InnerVolumeSpecName "config-data-default". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:46:30 crc kubenswrapper[4558]: I0120 18:46:30.429412 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/761b8e2c-cee0-49d5-8310-3eaf29af06bc-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "761b8e2c-cee0-49d5-8310-3eaf29af06bc" (UID: "761b8e2c-cee0-49d5-8310-3eaf29af06bc"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:46:30 crc kubenswrapper[4558]: I0120 18:46:30.430027 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/761b8e2c-cee0-49d5-8310-3eaf29af06bc-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "761b8e2c-cee0-49d5-8310-3eaf29af06bc" (UID: "761b8e2c-cee0-49d5-8310-3eaf29af06bc"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:46:30 crc kubenswrapper[4558]: I0120 18:46:30.435230 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/761b8e2c-cee0-49d5-8310-3eaf29af06bc-kube-api-access-h7dzv" (OuterVolumeSpecName: "kube-api-access-h7dzv") pod "761b8e2c-cee0-49d5-8310-3eaf29af06bc" (UID: "761b8e2c-cee0-49d5-8310-3eaf29af06bc"). InnerVolumeSpecName "kube-api-access-h7dzv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:46:30 crc kubenswrapper[4558]: I0120 18:46:30.444777 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage04-crc" (OuterVolumeSpecName: "mysql-db") pod "761b8e2c-cee0-49d5-8310-3eaf29af06bc" (UID: "761b8e2c-cee0-49d5-8310-3eaf29af06bc"). InnerVolumeSpecName "local-storage04-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 18:46:30 crc kubenswrapper[4558]: I0120 18:46:30.476638 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["horizon-kuttl-tests/rabbitmq-server-0"] Jan 20 18:46:30 crc kubenswrapper[4558]: I0120 18:46:30.529904 4558 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/761b8e2c-cee0-49d5-8310-3eaf29af06bc-kolla-config\") on node \"crc\" DevicePath \"\"" Jan 20 18:46:30 crc kubenswrapper[4558]: I0120 18:46:30.529943 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h7dzv\" (UniqueName: \"kubernetes.io/projected/761b8e2c-cee0-49d5-8310-3eaf29af06bc-kube-api-access-h7dzv\") on node \"crc\" DevicePath \"\"" Jan 20 18:46:30 crc kubenswrapper[4558]: I0120 18:46:30.529960 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/761b8e2c-cee0-49d5-8310-3eaf29af06bc-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 18:46:30 crc kubenswrapper[4558]: I0120 18:46:30.529973 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/761b8e2c-cee0-49d5-8310-3eaf29af06bc-config-data-default\") on node \"crc\" DevicePath \"\"" Jan 20 18:46:30 crc kubenswrapper[4558]: I0120 18:46:30.530008 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" " Jan 20 18:46:30 crc kubenswrapper[4558]: I0120 18:46:30.559996 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage04-crc" (UniqueName: "kubernetes.io/local-volume/local-storage04-crc") on node "crc" Jan 20 18:46:30 crc kubenswrapper[4558]: I0120 18:46:30.577183 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="195c8a3c-2f2e-43aa-afe5-a4932ff9dbbf" path="/var/lib/kubelet/pods/195c8a3c-2f2e-43aa-afe5-a4932ff9dbbf/volumes" Jan 20 18:46:30 crc kubenswrapper[4558]: I0120 18:46:30.578276 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="76cacf7d-5f06-48d2-8ba1-95e760fbe91e" path="/var/lib/kubelet/pods/76cacf7d-5f06-48d2-8ba1-95e760fbe91e/volumes" Jan 20 18:46:30 crc kubenswrapper[4558]: I0120 18:46:30.578803 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eb102007-e2b6-44de-bbc2-600f91c4f560" path="/var/lib/kubelet/pods/eb102007-e2b6-44de-bbc2-600f91c4f560/volumes" Jan 20 18:46:30 crc kubenswrapper[4558]: I0120 18:46:30.631388 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" DevicePath \"\"" Jan 20 18:46:30 crc kubenswrapper[4558]: I0120 18:46:30.822647 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["horizon-kuttl-tests/rabbitmq-server-0"] Jan 20 18:46:30 crc kubenswrapper[4558]: E0120 18:46:30.836548 4558 configmap.go:193] Couldn't get configMap horizon-kuttl-tests/openstack-scripts: configmap "openstack-scripts" not found Jan 20 18:46:30 crc kubenswrapper[4558]: E0120 18:46:30.836628 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/1a5d2627-1730-47b3-94f9-110dc23d42bc-operator-scripts podName:1a5d2627-1730-47b3-94f9-110dc23d42bc nodeName:}" failed. No retries permitted until 2026-01-20 18:46:31.836610427 +0000 UTC m=+7485.596948393 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/1a5d2627-1730-47b3-94f9-110dc23d42bc-operator-scripts") pod "keystone5ae5-account-delete-9thz7" (UID: "1a5d2627-1730-47b3-94f9-110dc23d42bc") : configmap "openstack-scripts" not found Jan 20 18:46:31 crc kubenswrapper[4558]: I0120 18:46:31.152986 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/memcached-0" Jan 20 18:46:31 crc kubenswrapper[4558]: I0120 18:46:31.301994 4558 generic.go:334] "Generic (PLEG): container finished" podID="1a5d2627-1730-47b3-94f9-110dc23d42bc" containerID="886c152236283bd27bd08065a239574dae960a46795d07c145494c22e35a2dde" exitCode=1 Jan 20 18:46:31 crc kubenswrapper[4558]: I0120 18:46:31.302108 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/keystone5ae5-account-delete-9thz7" event={"ID":"1a5d2627-1730-47b3-94f9-110dc23d42bc","Type":"ContainerDied","Data":"886c152236283bd27bd08065a239574dae960a46795d07c145494c22e35a2dde"} Jan 20 18:46:31 crc kubenswrapper[4558]: I0120 18:46:31.302227 4558 scope.go:117] "RemoveContainer" containerID="1e99dd53b07669b62f99ab59dbd94205921b513124ec0821e3dec8fb63542b05" Jan 20 18:46:31 crc kubenswrapper[4558]: I0120 18:46:31.302971 4558 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="horizon-kuttl-tests/keystone5ae5-account-delete-9thz7" secret="" err="secret \"galera-openstack-dockercfg-9kvfp\" not found" Jan 20 18:46:31 crc kubenswrapper[4558]: I0120 18:46:31.303053 4558 scope.go:117] "RemoveContainer" containerID="886c152236283bd27bd08065a239574dae960a46795d07c145494c22e35a2dde" Jan 20 18:46:31 crc kubenswrapper[4558]: E0120 18:46:31.304368 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-delete\" with CrashLoopBackOff: \"back-off 10s restarting failed container=mariadb-account-delete pod=keystone5ae5-account-delete-9thz7_horizon-kuttl-tests(1a5d2627-1730-47b3-94f9-110dc23d42bc)\"" pod="horizon-kuttl-tests/keystone5ae5-account-delete-9thz7" podUID="1a5d2627-1730-47b3-94f9-110dc23d42bc" Jan 20 18:46:31 crc kubenswrapper[4558]: I0120 18:46:31.306274 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/openstack-galera-2" event={"ID":"761b8e2c-cee0-49d5-8310-3eaf29af06bc","Type":"ContainerDied","Data":"a260c33c14b9f388143f915fe24279c11e1dea53fe11fdea104e7a57d73f6814"} Jan 20 18:46:31 crc kubenswrapper[4558]: I0120 18:46:31.306438 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/openstack-galera-2" Jan 20 18:46:31 crc kubenswrapper[4558]: I0120 18:46:31.310510 4558 generic.go:334] "Generic (PLEG): container finished" podID="54015202-e6bd-4bf5-9b3f-df0dd67d24d9" containerID="d779906ca149e1996729bdbafc2df8ce472e9efb0fb4cdf7c24dbb13fdabb76d" exitCode=0 Jan 20 18:46:31 crc kubenswrapper[4558]: I0120 18:46:31.310618 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/memcached-0" Jan 20 18:46:31 crc kubenswrapper[4558]: I0120 18:46:31.310619 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/memcached-0" event={"ID":"54015202-e6bd-4bf5-9b3f-df0dd67d24d9","Type":"ContainerDied","Data":"d779906ca149e1996729bdbafc2df8ce472e9efb0fb4cdf7c24dbb13fdabb76d"} Jan 20 18:46:31 crc kubenswrapper[4558]: I0120 18:46:31.310696 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/memcached-0" event={"ID":"54015202-e6bd-4bf5-9b3f-df0dd67d24d9","Type":"ContainerDied","Data":"27eefccfdd833bae089c5b44499a83ae7433de855af4b28b6331370eafdc8357"} Jan 20 18:46:31 crc kubenswrapper[4558]: I0120 18:46:31.341522 4558 scope.go:117] "RemoveContainer" containerID="a19305d22ff904d020f81094f35b6533077dd226eabc2094fee643c83ae14064" Jan 20 18:46:31 crc kubenswrapper[4558]: I0120 18:46:31.346466 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/54015202-e6bd-4bf5-9b3f-df0dd67d24d9-config-data\") pod \"54015202-e6bd-4bf5-9b3f-df0dd67d24d9\" (UID: \"54015202-e6bd-4bf5-9b3f-df0dd67d24d9\") " Jan 20 18:46:31 crc kubenswrapper[4558]: I0120 18:46:31.346519 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/54015202-e6bd-4bf5-9b3f-df0dd67d24d9-kolla-config\") pod \"54015202-e6bd-4bf5-9b3f-df0dd67d24d9\" (UID: \"54015202-e6bd-4bf5-9b3f-df0dd67d24d9\") " Jan 20 18:46:31 crc kubenswrapper[4558]: I0120 18:46:31.346626 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x457m\" (UniqueName: \"kubernetes.io/projected/54015202-e6bd-4bf5-9b3f-df0dd67d24d9-kube-api-access-x457m\") pod \"54015202-e6bd-4bf5-9b3f-df0dd67d24d9\" (UID: \"54015202-e6bd-4bf5-9b3f-df0dd67d24d9\") " Jan 20 18:46:31 crc kubenswrapper[4558]: I0120 18:46:31.347038 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["horizon-kuttl-tests/openstack-galera-2"] Jan 20 18:46:31 crc kubenswrapper[4558]: I0120 18:46:31.347144 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/54015202-e6bd-4bf5-9b3f-df0dd67d24d9-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "54015202-e6bd-4bf5-9b3f-df0dd67d24d9" (UID: "54015202-e6bd-4bf5-9b3f-df0dd67d24d9"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:46:31 crc kubenswrapper[4558]: I0120 18:46:31.347159 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/54015202-e6bd-4bf5-9b3f-df0dd67d24d9-config-data" (OuterVolumeSpecName: "config-data") pod "54015202-e6bd-4bf5-9b3f-df0dd67d24d9" (UID: "54015202-e6bd-4bf5-9b3f-df0dd67d24d9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:46:31 crc kubenswrapper[4558]: I0120 18:46:31.350751 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/54015202-e6bd-4bf5-9b3f-df0dd67d24d9-kube-api-access-x457m" (OuterVolumeSpecName: "kube-api-access-x457m") pod "54015202-e6bd-4bf5-9b3f-df0dd67d24d9" (UID: "54015202-e6bd-4bf5-9b3f-df0dd67d24d9"). InnerVolumeSpecName "kube-api-access-x457m". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:46:31 crc kubenswrapper[4558]: I0120 18:46:31.351290 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="horizon-kuttl-tests/rabbitmq-server-0" podUID="0842b2d6-f024-4981-bc33-7af5f2cb702e" containerName="rabbitmq" containerID="cri-o://2a7e386eb453bb6586227a632c1d243da6244a3191cd8800ff3c511a0ee504e4" gracePeriod=604800 Jan 20 18:46:31 crc kubenswrapper[4558]: I0120 18:46:31.354012 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["horizon-kuttl-tests/openstack-galera-2"] Jan 20 18:46:31 crc kubenswrapper[4558]: I0120 18:46:31.359342 4558 scope.go:117] "RemoveContainer" containerID="8f8a09703a8fa88474a892fa8fded2fcadd55d5c2f197e56f808d9b8a4404c19" Jan 20 18:46:31 crc kubenswrapper[4558]: I0120 18:46:31.382034 4558 scope.go:117] "RemoveContainer" containerID="d779906ca149e1996729bdbafc2df8ce472e9efb0fb4cdf7c24dbb13fdabb76d" Jan 20 18:46:31 crc kubenswrapper[4558]: I0120 18:46:31.406589 4558 scope.go:117] "RemoveContainer" containerID="d779906ca149e1996729bdbafc2df8ce472e9efb0fb4cdf7c24dbb13fdabb76d" Jan 20 18:46:31 crc kubenswrapper[4558]: E0120 18:46:31.407105 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d779906ca149e1996729bdbafc2df8ce472e9efb0fb4cdf7c24dbb13fdabb76d\": container with ID starting with d779906ca149e1996729bdbafc2df8ce472e9efb0fb4cdf7c24dbb13fdabb76d not found: ID does not exist" containerID="d779906ca149e1996729bdbafc2df8ce472e9efb0fb4cdf7c24dbb13fdabb76d" Jan 20 18:46:31 crc kubenswrapper[4558]: I0120 18:46:31.407154 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d779906ca149e1996729bdbafc2df8ce472e9efb0fb4cdf7c24dbb13fdabb76d"} err="failed to get container status \"d779906ca149e1996729bdbafc2df8ce472e9efb0fb4cdf7c24dbb13fdabb76d\": rpc error: code = NotFound desc = could not find container \"d779906ca149e1996729bdbafc2df8ce472e9efb0fb4cdf7c24dbb13fdabb76d\": container with ID starting with d779906ca149e1996729bdbafc2df8ce472e9efb0fb4cdf7c24dbb13fdabb76d not found: ID does not exist" Jan 20 18:46:31 crc kubenswrapper[4558]: I0120 18:46:31.448952 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x457m\" (UniqueName: \"kubernetes.io/projected/54015202-e6bd-4bf5-9b3f-df0dd67d24d9-kube-api-access-x457m\") on node \"crc\" DevicePath \"\"" Jan 20 18:46:31 crc kubenswrapper[4558]: I0120 18:46:31.448981 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/54015202-e6bd-4bf5-9b3f-df0dd67d24d9-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 18:46:31 crc kubenswrapper[4558]: I0120 18:46:31.448993 4558 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/54015202-e6bd-4bf5-9b3f-df0dd67d24d9-kolla-config\") on node \"crc\" DevicePath \"\"" Jan 20 18:46:31 crc kubenswrapper[4558]: I0120 18:46:31.621758 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="horizon-kuttl-tests/openstack-galera-1" podUID="c42137c4-70ef-4a24-85f5-d76f3883620b" containerName="galera" containerID="cri-o://f15aced75af8322f8bfc910378537e364f8e7d75fd8f4eb285e818331324d226" gracePeriod=28 Jan 20 18:46:31 crc kubenswrapper[4558]: I0120 18:46:31.638941 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["horizon-kuttl-tests/memcached-0"] Jan 20 18:46:31 crc kubenswrapper[4558]: I0120 18:46:31.644200 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["horizon-kuttl-tests/memcached-0"] Jan 20 18:46:31 crc kubenswrapper[4558]: E0120 18:46:31.859008 4558 configmap.go:193] Couldn't get configMap horizon-kuttl-tests/openstack-scripts: configmap "openstack-scripts" not found Jan 20 18:46:31 crc kubenswrapper[4558]: E0120 18:46:31.859089 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/1a5d2627-1730-47b3-94f9-110dc23d42bc-operator-scripts podName:1a5d2627-1730-47b3-94f9-110dc23d42bc nodeName:}" failed. No retries permitted until 2026-01-20 18:46:33.859071548 +0000 UTC m=+7487.619409515 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/1a5d2627-1730-47b3-94f9-110dc23d42bc-operator-scripts") pod "keystone5ae5-account-delete-9thz7" (UID: "1a5d2627-1730-47b3-94f9-110dc23d42bc") : configmap "openstack-scripts" not found Jan 20 18:46:32 crc kubenswrapper[4558]: I0120 18:46:32.330408 4558 generic.go:334] "Generic (PLEG): container finished" podID="1e63d3ee-13d4-4b9c-894f-03861181bd49" containerID="d3edd9e7128ed1cf8bb955a82c34b19fdad9e82d1fadfef801ecf5a9289cdaf5" exitCode=0 Jan 20 18:46:32 crc kubenswrapper[4558]: I0120 18:46:32.330609 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/keystone-f9689d859-jt4kw" event={"ID":"1e63d3ee-13d4-4b9c-894f-03861181bd49","Type":"ContainerDied","Data":"d3edd9e7128ed1cf8bb955a82c34b19fdad9e82d1fadfef801ecf5a9289cdaf5"} Jan 20 18:46:32 crc kubenswrapper[4558]: I0120 18:46:32.339334 4558 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="horizon-kuttl-tests/keystone5ae5-account-delete-9thz7" secret="" err="secret \"galera-openstack-dockercfg-9kvfp\" not found" Jan 20 18:46:32 crc kubenswrapper[4558]: I0120 18:46:32.339386 4558 scope.go:117] "RemoveContainer" containerID="886c152236283bd27bd08065a239574dae960a46795d07c145494c22e35a2dde" Jan 20 18:46:32 crc kubenswrapper[4558]: E0120 18:46:32.339638 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-delete\" with CrashLoopBackOff: \"back-off 10s restarting failed container=mariadb-account-delete pod=keystone5ae5-account-delete-9thz7_horizon-kuttl-tests(1a5d2627-1730-47b3-94f9-110dc23d42bc)\"" pod="horizon-kuttl-tests/keystone5ae5-account-delete-9thz7" podUID="1a5d2627-1730-47b3-94f9-110dc23d42bc" Jan 20 18:46:32 crc kubenswrapper[4558]: I0120 18:46:32.407766 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/keystone-f9689d859-jt4kw" Jan 20 18:46:32 crc kubenswrapper[4558]: E0120 18:46:32.529029 4558 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0842b2d6_f024_4981_bc33_7af5f2cb702e.slice/crio-conmon-2a7e386eb453bb6586227a632c1d243da6244a3191cd8800ff3c511a0ee504e4.scope\": RecentStats: unable to find data in memory cache]" Jan 20 18:46:32 crc kubenswrapper[4558]: I0120 18:46:32.570469 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/1e63d3ee-13d4-4b9c-894f-03861181bd49-credential-keys\") pod \"1e63d3ee-13d4-4b9c-894f-03861181bd49\" (UID: \"1e63d3ee-13d4-4b9c-894f-03861181bd49\") " Jan 20 18:46:32 crc kubenswrapper[4558]: I0120 18:46:32.570647 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/1e63d3ee-13d4-4b9c-894f-03861181bd49-fernet-keys\") pod \"1e63d3ee-13d4-4b9c-894f-03861181bd49\" (UID: \"1e63d3ee-13d4-4b9c-894f-03861181bd49\") " Jan 20 18:46:32 crc kubenswrapper[4558]: I0120 18:46:32.570779 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w2vr2\" (UniqueName: \"kubernetes.io/projected/1e63d3ee-13d4-4b9c-894f-03861181bd49-kube-api-access-w2vr2\") pod \"1e63d3ee-13d4-4b9c-894f-03861181bd49\" (UID: \"1e63d3ee-13d4-4b9c-894f-03861181bd49\") " Jan 20 18:46:32 crc kubenswrapper[4558]: I0120 18:46:32.570946 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1e63d3ee-13d4-4b9c-894f-03861181bd49-scripts\") pod \"1e63d3ee-13d4-4b9c-894f-03861181bd49\" (UID: \"1e63d3ee-13d4-4b9c-894f-03861181bd49\") " Jan 20 18:46:32 crc kubenswrapper[4558]: I0120 18:46:32.572790 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1e63d3ee-13d4-4b9c-894f-03861181bd49-config-data\") pod \"1e63d3ee-13d4-4b9c-894f-03861181bd49\" (UID: \"1e63d3ee-13d4-4b9c-894f-03861181bd49\") " Jan 20 18:46:32 crc kubenswrapper[4558]: I0120 18:46:32.576637 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="54015202-e6bd-4bf5-9b3f-df0dd67d24d9" path="/var/lib/kubelet/pods/54015202-e6bd-4bf5-9b3f-df0dd67d24d9/volumes" Jan 20 18:46:32 crc kubenswrapper[4558]: I0120 18:46:32.577808 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="761b8e2c-cee0-49d5-8310-3eaf29af06bc" path="/var/lib/kubelet/pods/761b8e2c-cee0-49d5-8310-3eaf29af06bc/volumes" Jan 20 18:46:32 crc kubenswrapper[4558]: I0120 18:46:32.579906 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1e63d3ee-13d4-4b9c-894f-03861181bd49-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "1e63d3ee-13d4-4b9c-894f-03861181bd49" (UID: "1e63d3ee-13d4-4b9c-894f-03861181bd49"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:46:32 crc kubenswrapper[4558]: I0120 18:46:32.582039 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1e63d3ee-13d4-4b9c-894f-03861181bd49-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "1e63d3ee-13d4-4b9c-894f-03861181bd49" (UID: "1e63d3ee-13d4-4b9c-894f-03861181bd49"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:46:32 crc kubenswrapper[4558]: I0120 18:46:32.583900 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1e63d3ee-13d4-4b9c-894f-03861181bd49-scripts" (OuterVolumeSpecName: "scripts") pod "1e63d3ee-13d4-4b9c-894f-03861181bd49" (UID: "1e63d3ee-13d4-4b9c-894f-03861181bd49"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:46:32 crc kubenswrapper[4558]: I0120 18:46:32.584390 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1e63d3ee-13d4-4b9c-894f-03861181bd49-kube-api-access-w2vr2" (OuterVolumeSpecName: "kube-api-access-w2vr2") pod "1e63d3ee-13d4-4b9c-894f-03861181bd49" (UID: "1e63d3ee-13d4-4b9c-894f-03861181bd49"). InnerVolumeSpecName "kube-api-access-w2vr2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:46:32 crc kubenswrapper[4558]: I0120 18:46:32.600744 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1e63d3ee-13d4-4b9c-894f-03861181bd49-config-data" (OuterVolumeSpecName: "config-data") pod "1e63d3ee-13d4-4b9c-894f-03861181bd49" (UID: "1e63d3ee-13d4-4b9c-894f-03861181bd49"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:46:32 crc kubenswrapper[4558]: I0120 18:46:32.676781 4558 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/1e63d3ee-13d4-4b9c-894f-03861181bd49-fernet-keys\") on node \"crc\" DevicePath \"\"" Jan 20 18:46:32 crc kubenswrapper[4558]: I0120 18:46:32.676913 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w2vr2\" (UniqueName: \"kubernetes.io/projected/1e63d3ee-13d4-4b9c-894f-03861181bd49-kube-api-access-w2vr2\") on node \"crc\" DevicePath \"\"" Jan 20 18:46:32 crc kubenswrapper[4558]: I0120 18:46:32.676978 4558 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1e63d3ee-13d4-4b9c-894f-03861181bd49-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 18:46:32 crc kubenswrapper[4558]: I0120 18:46:32.677046 4558 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1e63d3ee-13d4-4b9c-894f-03861181bd49-config-data\") on node \"crc\" DevicePath \"\"" Jan 20 18:46:32 crc kubenswrapper[4558]: I0120 18:46:32.677107 4558 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/1e63d3ee-13d4-4b9c-894f-03861181bd49-credential-keys\") on node \"crc\" DevicePath \"\"" Jan 20 18:46:32 crc kubenswrapper[4558]: I0120 18:46:32.719914 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/rabbitmq-server-0" Jan 20 18:46:32 crc kubenswrapper[4558]: I0120 18:46:32.777841 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/0842b2d6-f024-4981-bc33-7af5f2cb702e-rabbitmq-plugins\") pod \"0842b2d6-f024-4981-bc33-7af5f2cb702e\" (UID: \"0842b2d6-f024-4981-bc33-7af5f2cb702e\") " Jan 20 18:46:32 crc kubenswrapper[4558]: I0120 18:46:32.777938 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/0842b2d6-f024-4981-bc33-7af5f2cb702e-plugins-conf\") pod \"0842b2d6-f024-4981-bc33-7af5f2cb702e\" (UID: \"0842b2d6-f024-4981-bc33-7af5f2cb702e\") " Jan 20 18:46:32 crc kubenswrapper[4558]: I0120 18:46:32.777975 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/0842b2d6-f024-4981-bc33-7af5f2cb702e-pod-info\") pod \"0842b2d6-f024-4981-bc33-7af5f2cb702e\" (UID: \"0842b2d6-f024-4981-bc33-7af5f2cb702e\") " Jan 20 18:46:32 crc kubenswrapper[4558]: I0120 18:46:32.778043 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/0842b2d6-f024-4981-bc33-7af5f2cb702e-rabbitmq-confd\") pod \"0842b2d6-f024-4981-bc33-7af5f2cb702e\" (UID: \"0842b2d6-f024-4981-bc33-7af5f2cb702e\") " Jan 20 18:46:32 crc kubenswrapper[4558]: I0120 18:46:32.778091 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4mqvd\" (UniqueName: \"kubernetes.io/projected/0842b2d6-f024-4981-bc33-7af5f2cb702e-kube-api-access-4mqvd\") pod \"0842b2d6-f024-4981-bc33-7af5f2cb702e\" (UID: \"0842b2d6-f024-4981-bc33-7af5f2cb702e\") " Jan 20 18:46:32 crc kubenswrapper[4558]: I0120 18:46:32.778128 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/0842b2d6-f024-4981-bc33-7af5f2cb702e-erlang-cookie-secret\") pod \"0842b2d6-f024-4981-bc33-7af5f2cb702e\" (UID: \"0842b2d6-f024-4981-bc33-7af5f2cb702e\") " Jan 20 18:46:32 crc kubenswrapper[4558]: I0120 18:46:32.778212 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/0842b2d6-f024-4981-bc33-7af5f2cb702e-rabbitmq-erlang-cookie\") pod \"0842b2d6-f024-4981-bc33-7af5f2cb702e\" (UID: \"0842b2d6-f024-4981-bc33-7af5f2cb702e\") " Jan 20 18:46:32 crc kubenswrapper[4558]: I0120 18:46:32.778352 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f25c550e-ed1f-4958-807c-5b0699f4d60c\") pod \"0842b2d6-f024-4981-bc33-7af5f2cb702e\" (UID: \"0842b2d6-f024-4981-bc33-7af5f2cb702e\") " Jan 20 18:46:32 crc kubenswrapper[4558]: I0120 18:46:32.778365 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0842b2d6-f024-4981-bc33-7af5f2cb702e-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "0842b2d6-f024-4981-bc33-7af5f2cb702e" (UID: "0842b2d6-f024-4981-bc33-7af5f2cb702e"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 18:46:32 crc kubenswrapper[4558]: I0120 18:46:32.778604 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0842b2d6-f024-4981-bc33-7af5f2cb702e-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "0842b2d6-f024-4981-bc33-7af5f2cb702e" (UID: "0842b2d6-f024-4981-bc33-7af5f2cb702e"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:46:32 crc kubenswrapper[4558]: I0120 18:46:32.778967 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0842b2d6-f024-4981-bc33-7af5f2cb702e-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "0842b2d6-f024-4981-bc33-7af5f2cb702e" (UID: "0842b2d6-f024-4981-bc33-7af5f2cb702e"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 18:46:32 crc kubenswrapper[4558]: I0120 18:46:32.779088 4558 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/0842b2d6-f024-4981-bc33-7af5f2cb702e-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Jan 20 18:46:32 crc kubenswrapper[4558]: I0120 18:46:32.779111 4558 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/0842b2d6-f024-4981-bc33-7af5f2cb702e-plugins-conf\") on node \"crc\" DevicePath \"\"" Jan 20 18:46:32 crc kubenswrapper[4558]: I0120 18:46:32.781943 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0842b2d6-f024-4981-bc33-7af5f2cb702e-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "0842b2d6-f024-4981-bc33-7af5f2cb702e" (UID: "0842b2d6-f024-4981-bc33-7af5f2cb702e"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:46:32 crc kubenswrapper[4558]: I0120 18:46:32.781942 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/0842b2d6-f024-4981-bc33-7af5f2cb702e-pod-info" (OuterVolumeSpecName: "pod-info") pod "0842b2d6-f024-4981-bc33-7af5f2cb702e" (UID: "0842b2d6-f024-4981-bc33-7af5f2cb702e"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Jan 20 18:46:32 crc kubenswrapper[4558]: I0120 18:46:32.784097 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0842b2d6-f024-4981-bc33-7af5f2cb702e-kube-api-access-4mqvd" (OuterVolumeSpecName: "kube-api-access-4mqvd") pod "0842b2d6-f024-4981-bc33-7af5f2cb702e" (UID: "0842b2d6-f024-4981-bc33-7af5f2cb702e"). InnerVolumeSpecName "kube-api-access-4mqvd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:46:32 crc kubenswrapper[4558]: I0120 18:46:32.789833 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f25c550e-ed1f-4958-807c-5b0699f4d60c" (OuterVolumeSpecName: "persistence") pod "0842b2d6-f024-4981-bc33-7af5f2cb702e" (UID: "0842b2d6-f024-4981-bc33-7af5f2cb702e"). InnerVolumeSpecName "pvc-f25c550e-ed1f-4958-807c-5b0699f4d60c". PluginName "kubernetes.io/csi", VolumeGidValue "" Jan 20 18:46:32 crc kubenswrapper[4558]: I0120 18:46:32.824937 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0842b2d6-f024-4981-bc33-7af5f2cb702e-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "0842b2d6-f024-4981-bc33-7af5f2cb702e" (UID: "0842b2d6-f024-4981-bc33-7af5f2cb702e"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:46:32 crc kubenswrapper[4558]: I0120 18:46:32.883200 4558 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/0842b2d6-f024-4981-bc33-7af5f2cb702e-pod-info\") on node \"crc\" DevicePath \"\"" Jan 20 18:46:32 crc kubenswrapper[4558]: I0120 18:46:32.883236 4558 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/0842b2d6-f024-4981-bc33-7af5f2cb702e-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Jan 20 18:46:32 crc kubenswrapper[4558]: I0120 18:46:32.883269 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4mqvd\" (UniqueName: \"kubernetes.io/projected/0842b2d6-f024-4981-bc33-7af5f2cb702e-kube-api-access-4mqvd\") on node \"crc\" DevicePath \"\"" Jan 20 18:46:32 crc kubenswrapper[4558]: I0120 18:46:32.883288 4558 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/0842b2d6-f024-4981-bc33-7af5f2cb702e-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Jan 20 18:46:32 crc kubenswrapper[4558]: I0120 18:46:32.883301 4558 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/0842b2d6-f024-4981-bc33-7af5f2cb702e-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Jan 20 18:46:32 crc kubenswrapper[4558]: I0120 18:46:32.883350 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-f25c550e-ed1f-4958-807c-5b0699f4d60c\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f25c550e-ed1f-4958-807c-5b0699f4d60c\") on node \"crc\" " Jan 20 18:46:32 crc kubenswrapper[4558]: I0120 18:46:32.905706 4558 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Jan 20 18:46:32 crc kubenswrapper[4558]: I0120 18:46:32.905878 4558 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-f25c550e-ed1f-4958-807c-5b0699f4d60c" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f25c550e-ed1f-4958-807c-5b0699f4d60c") on node "crc" Jan 20 18:46:32 crc kubenswrapper[4558]: I0120 18:46:32.984760 4558 reconciler_common.go:293] "Volume detached for volume \"pvc-f25c550e-ed1f-4958-807c-5b0699f4d60c\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f25c550e-ed1f-4958-807c-5b0699f4d60c\") on node \"crc\" DevicePath \"\"" Jan 20 18:46:33 crc kubenswrapper[4558]: I0120 18:46:33.362239 4558 generic.go:334] "Generic (PLEG): container finished" podID="c42137c4-70ef-4a24-85f5-d76f3883620b" containerID="f15aced75af8322f8bfc910378537e364f8e7d75fd8f4eb285e818331324d226" exitCode=0 Jan 20 18:46:33 crc kubenswrapper[4558]: I0120 18:46:33.362390 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/openstack-galera-1" event={"ID":"c42137c4-70ef-4a24-85f5-d76f3883620b","Type":"ContainerDied","Data":"f15aced75af8322f8bfc910378537e364f8e7d75fd8f4eb285e818331324d226"} Jan 20 18:46:33 crc kubenswrapper[4558]: I0120 18:46:33.369894 4558 generic.go:334] "Generic (PLEG): container finished" podID="0842b2d6-f024-4981-bc33-7af5f2cb702e" containerID="2a7e386eb453bb6586227a632c1d243da6244a3191cd8800ff3c511a0ee504e4" exitCode=0 Jan 20 18:46:33 crc kubenswrapper[4558]: I0120 18:46:33.369976 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/rabbitmq-server-0" event={"ID":"0842b2d6-f024-4981-bc33-7af5f2cb702e","Type":"ContainerDied","Data":"2a7e386eb453bb6586227a632c1d243da6244a3191cd8800ff3c511a0ee504e4"} Jan 20 18:46:33 crc kubenswrapper[4558]: I0120 18:46:33.370006 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/rabbitmq-server-0" event={"ID":"0842b2d6-f024-4981-bc33-7af5f2cb702e","Type":"ContainerDied","Data":"08274c29628de65c2989a1624dc308d2b705cc7aafe2197c8e6b554d11c6647c"} Jan 20 18:46:33 crc kubenswrapper[4558]: I0120 18:46:33.370027 4558 scope.go:117] "RemoveContainer" containerID="2a7e386eb453bb6586227a632c1d243da6244a3191cd8800ff3c511a0ee504e4" Jan 20 18:46:33 crc kubenswrapper[4558]: I0120 18:46:33.370223 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/rabbitmq-server-0" Jan 20 18:46:33 crc kubenswrapper[4558]: I0120 18:46:33.384941 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/keystone-f9689d859-jt4kw" event={"ID":"1e63d3ee-13d4-4b9c-894f-03861181bd49","Type":"ContainerDied","Data":"cd371d9428762010fa6e6dca6cee90df03d527df20e0e3307e374869aeed9ccf"} Jan 20 18:46:33 crc kubenswrapper[4558]: I0120 18:46:33.385046 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/keystone-f9689d859-jt4kw" Jan 20 18:46:33 crc kubenswrapper[4558]: I0120 18:46:33.398956 4558 scope.go:117] "RemoveContainer" containerID="5a34773916056ec973e86f0eacf4edd81228b6f42f30cb79e9070f0ac481a67e" Jan 20 18:46:33 crc kubenswrapper[4558]: I0120 18:46:33.403440 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["horizon-kuttl-tests/rabbitmq-server-0"] Jan 20 18:46:33 crc kubenswrapper[4558]: I0120 18:46:33.410222 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["horizon-kuttl-tests/rabbitmq-server-0"] Jan 20 18:46:33 crc kubenswrapper[4558]: I0120 18:46:33.423237 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["horizon-kuttl-tests/keystone-f9689d859-jt4kw"] Jan 20 18:46:33 crc kubenswrapper[4558]: I0120 18:46:33.425919 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["horizon-kuttl-tests/keystone-f9689d859-jt4kw"] Jan 20 18:46:33 crc kubenswrapper[4558]: I0120 18:46:33.450000 4558 scope.go:117] "RemoveContainer" containerID="2a7e386eb453bb6586227a632c1d243da6244a3191cd8800ff3c511a0ee504e4" Jan 20 18:46:33 crc kubenswrapper[4558]: E0120 18:46:33.457114 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2a7e386eb453bb6586227a632c1d243da6244a3191cd8800ff3c511a0ee504e4\": container with ID starting with 2a7e386eb453bb6586227a632c1d243da6244a3191cd8800ff3c511a0ee504e4 not found: ID does not exist" containerID="2a7e386eb453bb6586227a632c1d243da6244a3191cd8800ff3c511a0ee504e4" Jan 20 18:46:33 crc kubenswrapper[4558]: I0120 18:46:33.457177 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2a7e386eb453bb6586227a632c1d243da6244a3191cd8800ff3c511a0ee504e4"} err="failed to get container status \"2a7e386eb453bb6586227a632c1d243da6244a3191cd8800ff3c511a0ee504e4\": rpc error: code = NotFound desc = could not find container \"2a7e386eb453bb6586227a632c1d243da6244a3191cd8800ff3c511a0ee504e4\": container with ID starting with 2a7e386eb453bb6586227a632c1d243da6244a3191cd8800ff3c511a0ee504e4 not found: ID does not exist" Jan 20 18:46:33 crc kubenswrapper[4558]: I0120 18:46:33.457208 4558 scope.go:117] "RemoveContainer" containerID="5a34773916056ec973e86f0eacf4edd81228b6f42f30cb79e9070f0ac481a67e" Jan 20 18:46:33 crc kubenswrapper[4558]: E0120 18:46:33.459799 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5a34773916056ec973e86f0eacf4edd81228b6f42f30cb79e9070f0ac481a67e\": container with ID starting with 5a34773916056ec973e86f0eacf4edd81228b6f42f30cb79e9070f0ac481a67e not found: ID does not exist" containerID="5a34773916056ec973e86f0eacf4edd81228b6f42f30cb79e9070f0ac481a67e" Jan 20 18:46:33 crc kubenswrapper[4558]: I0120 18:46:33.459833 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5a34773916056ec973e86f0eacf4edd81228b6f42f30cb79e9070f0ac481a67e"} err="failed to get container status \"5a34773916056ec973e86f0eacf4edd81228b6f42f30cb79e9070f0ac481a67e\": rpc error: code = NotFound desc = could not find container \"5a34773916056ec973e86f0eacf4edd81228b6f42f30cb79e9070f0ac481a67e\": container with ID starting with 5a34773916056ec973e86f0eacf4edd81228b6f42f30cb79e9070f0ac481a67e not found: ID does not exist" Jan 20 18:46:33 crc kubenswrapper[4558]: I0120 18:46:33.459852 4558 scope.go:117] "RemoveContainer" containerID="d3edd9e7128ed1cf8bb955a82c34b19fdad9e82d1fadfef801ecf5a9289cdaf5" Jan 20 18:46:33 crc kubenswrapper[4558]: I0120 18:46:33.501696 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-788c44577b-c22sm"] Jan 20 18:46:33 crc kubenswrapper[4558]: I0120 18:46:33.501882 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/horizon-operator-controller-manager-788c44577b-c22sm" podUID="6f1cb567-179f-4bd7-b76f-bcc43d2474f0" containerName="manager" containerID="cri-o://25b89c90c78d10da1d5159b0ce7adc9014f1138c61c6218c01bad1e79dfa6e6b" gracePeriod=10 Jan 20 18:46:33 crc kubenswrapper[4558]: I0120 18:46:33.637738 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/openstack-galera-1" Jan 20 18:46:33 crc kubenswrapper[4558]: I0120 18:46:33.642720 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="horizon-kuttl-tests/openstack-galera-0" podUID="c0f454c3-2a4b-4d33-b137-2c92c0773378" containerName="galera" containerID="cri-o://0d3a126d5497a22d039dda1193d044fde10dd62c9fe892d8e5e517a70a778453" gracePeriod=26 Jan 20 18:46:33 crc kubenswrapper[4558]: I0120 18:46:33.670615 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/horizon-operator-index-9bkts"] Jan 20 18:46:33 crc kubenswrapper[4558]: I0120 18:46:33.671424 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/horizon-operator-index-9bkts" podUID="17600d40-7d65-476f-83be-d1cd168c750a" containerName="registry-server" containerID="cri-o://b3f8c0b28e0e903d26923e943caca5217cc36cae3ca65374fac3b46b1ee43f42" gracePeriod=30 Jan 20 18:46:33 crc kubenswrapper[4558]: I0120 18:46:33.696895 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mysql-db\" (UniqueName: \"kubernetes.io/local-volume/local-storage18-crc\") pod \"c42137c4-70ef-4a24-85f5-d76f3883620b\" (UID: \"c42137c4-70ef-4a24-85f5-d76f3883620b\") " Jan 20 18:46:33 crc kubenswrapper[4558]: I0120 18:46:33.696976 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/c42137c4-70ef-4a24-85f5-d76f3883620b-kolla-config\") pod \"c42137c4-70ef-4a24-85f5-d76f3883620b\" (UID: \"c42137c4-70ef-4a24-85f5-d76f3883620b\") " Jan 20 18:46:33 crc kubenswrapper[4558]: I0120 18:46:33.697010 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/c42137c4-70ef-4a24-85f5-d76f3883620b-config-data-default\") pod \"c42137c4-70ef-4a24-85f5-d76f3883620b\" (UID: \"c42137c4-70ef-4a24-85f5-d76f3883620b\") " Jan 20 18:46:33 crc kubenswrapper[4558]: I0120 18:46:33.697141 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/c42137c4-70ef-4a24-85f5-d76f3883620b-config-data-generated\") pod \"c42137c4-70ef-4a24-85f5-d76f3883620b\" (UID: \"c42137c4-70ef-4a24-85f5-d76f3883620b\") " Jan 20 18:46:33 crc kubenswrapper[4558]: I0120 18:46:33.697206 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c6bkf\" (UniqueName: \"kubernetes.io/projected/c42137c4-70ef-4a24-85f5-d76f3883620b-kube-api-access-c6bkf\") pod \"c42137c4-70ef-4a24-85f5-d76f3883620b\" (UID: \"c42137c4-70ef-4a24-85f5-d76f3883620b\") " Jan 20 18:46:33 crc kubenswrapper[4558]: I0120 18:46:33.697250 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c42137c4-70ef-4a24-85f5-d76f3883620b-operator-scripts\") pod \"c42137c4-70ef-4a24-85f5-d76f3883620b\" (UID: \"c42137c4-70ef-4a24-85f5-d76f3883620b\") " Jan 20 18:46:33 crc kubenswrapper[4558]: I0120 18:46:33.697980 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c42137c4-70ef-4a24-85f5-d76f3883620b-config-data-default" (OuterVolumeSpecName: "config-data-default") pod "c42137c4-70ef-4a24-85f5-d76f3883620b" (UID: "c42137c4-70ef-4a24-85f5-d76f3883620b"). InnerVolumeSpecName "config-data-default". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:46:33 crc kubenswrapper[4558]: I0120 18:46:33.698156 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c42137c4-70ef-4a24-85f5-d76f3883620b-config-data-generated" (OuterVolumeSpecName: "config-data-generated") pod "c42137c4-70ef-4a24-85f5-d76f3883620b" (UID: "c42137c4-70ef-4a24-85f5-d76f3883620b"). InnerVolumeSpecName "config-data-generated". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 18:46:33 crc kubenswrapper[4558]: I0120 18:46:33.698238 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c42137c4-70ef-4a24-85f5-d76f3883620b-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "c42137c4-70ef-4a24-85f5-d76f3883620b" (UID: "c42137c4-70ef-4a24-85f5-d76f3883620b"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:46:33 crc kubenswrapper[4558]: I0120 18:46:33.700459 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c42137c4-70ef-4a24-85f5-d76f3883620b-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "c42137c4-70ef-4a24-85f5-d76f3883620b" (UID: "c42137c4-70ef-4a24-85f5-d76f3883620b"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:46:33 crc kubenswrapper[4558]: I0120 18:46:33.703541 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c42137c4-70ef-4a24-85f5-d76f3883620b-kube-api-access-c6bkf" (OuterVolumeSpecName: "kube-api-access-c6bkf") pod "c42137c4-70ef-4a24-85f5-d76f3883620b" (UID: "c42137c4-70ef-4a24-85f5-d76f3883620b"). InnerVolumeSpecName "kube-api-access-c6bkf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:46:33 crc kubenswrapper[4558]: I0120 18:46:33.703819 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/653b3b52ace42a43c00bbe11ec09bdc12d146890fa0ae4874ac941672ahx2xr"] Jan 20 18:46:33 crc kubenswrapper[4558]: I0120 18:46:33.708533 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage18-crc" (OuterVolumeSpecName: "mysql-db") pod "c42137c4-70ef-4a24-85f5-d76f3883620b" (UID: "c42137c4-70ef-4a24-85f5-d76f3883620b"). InnerVolumeSpecName "local-storage18-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 18:46:33 crc kubenswrapper[4558]: I0120 18:46:33.709772 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/653b3b52ace42a43c00bbe11ec09bdc12d146890fa0ae4874ac941672ahx2xr"] Jan 20 18:46:33 crc kubenswrapper[4558]: I0120 18:46:33.798780 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage18-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage18-crc\") on node \"crc\" " Jan 20 18:46:33 crc kubenswrapper[4558]: I0120 18:46:33.799049 4558 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/c42137c4-70ef-4a24-85f5-d76f3883620b-kolla-config\") on node \"crc\" DevicePath \"\"" Jan 20 18:46:33 crc kubenswrapper[4558]: I0120 18:46:33.799061 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/c42137c4-70ef-4a24-85f5-d76f3883620b-config-data-default\") on node \"crc\" DevicePath \"\"" Jan 20 18:46:33 crc kubenswrapper[4558]: I0120 18:46:33.799076 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/c42137c4-70ef-4a24-85f5-d76f3883620b-config-data-generated\") on node \"crc\" DevicePath \"\"" Jan 20 18:46:33 crc kubenswrapper[4558]: I0120 18:46:33.799086 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c6bkf\" (UniqueName: \"kubernetes.io/projected/c42137c4-70ef-4a24-85f5-d76f3883620b-kube-api-access-c6bkf\") on node \"crc\" DevicePath \"\"" Jan 20 18:46:33 crc kubenswrapper[4558]: I0120 18:46:33.799097 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c42137c4-70ef-4a24-85f5-d76f3883620b-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 18:46:33 crc kubenswrapper[4558]: I0120 18:46:33.809831 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage18-crc" (UniqueName: "kubernetes.io/local-volume/local-storage18-crc") on node "crc" Jan 20 18:46:33 crc kubenswrapper[4558]: E0120 18:46:33.831440 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of b3f8c0b28e0e903d26923e943caca5217cc36cae3ca65374fac3b46b1ee43f42 is running failed: container process not found" containerID="b3f8c0b28e0e903d26923e943caca5217cc36cae3ca65374fac3b46b1ee43f42" cmd=["grpc_health_probe","-addr=:50051"] Jan 20 18:46:33 crc kubenswrapper[4558]: E0120 18:46:33.831768 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of b3f8c0b28e0e903d26923e943caca5217cc36cae3ca65374fac3b46b1ee43f42 is running failed: container process not found" containerID="b3f8c0b28e0e903d26923e943caca5217cc36cae3ca65374fac3b46b1ee43f42" cmd=["grpc_health_probe","-addr=:50051"] Jan 20 18:46:33 crc kubenswrapper[4558]: E0120 18:46:33.832404 4558 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of b3f8c0b28e0e903d26923e943caca5217cc36cae3ca65374fac3b46b1ee43f42 is running failed: container process not found" containerID="b3f8c0b28e0e903d26923e943caca5217cc36cae3ca65374fac3b46b1ee43f42" cmd=["grpc_health_probe","-addr=:50051"] Jan 20 18:46:33 crc kubenswrapper[4558]: E0120 18:46:33.832455 4558 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of b3f8c0b28e0e903d26923e943caca5217cc36cae3ca65374fac3b46b1ee43f42 is running failed: container process not found" probeType="Readiness" pod="openstack-operators/horizon-operator-index-9bkts" podUID="17600d40-7d65-476f-83be-d1cd168c750a" containerName="registry-server" Jan 20 18:46:33 crc kubenswrapper[4558]: I0120 18:46:33.851469 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-788c44577b-c22sm" Jan 20 18:46:33 crc kubenswrapper[4558]: I0120 18:46:33.900905 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/6f1cb567-179f-4bd7-b76f-bcc43d2474f0-apiservice-cert\") pod \"6f1cb567-179f-4bd7-b76f-bcc43d2474f0\" (UID: \"6f1cb567-179f-4bd7-b76f-bcc43d2474f0\") " Jan 20 18:46:33 crc kubenswrapper[4558]: I0120 18:46:33.901182 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d5btp\" (UniqueName: \"kubernetes.io/projected/6f1cb567-179f-4bd7-b76f-bcc43d2474f0-kube-api-access-d5btp\") pod \"6f1cb567-179f-4bd7-b76f-bcc43d2474f0\" (UID: \"6f1cb567-179f-4bd7-b76f-bcc43d2474f0\") " Jan 20 18:46:33 crc kubenswrapper[4558]: I0120 18:46:33.901288 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/6f1cb567-179f-4bd7-b76f-bcc43d2474f0-webhook-cert\") pod \"6f1cb567-179f-4bd7-b76f-bcc43d2474f0\" (UID: \"6f1cb567-179f-4bd7-b76f-bcc43d2474f0\") " Jan 20 18:46:33 crc kubenswrapper[4558]: E0120 18:46:33.901986 4558 configmap.go:193] Couldn't get configMap horizon-kuttl-tests/openstack-scripts: configmap "openstack-scripts" not found Jan 20 18:46:33 crc kubenswrapper[4558]: E0120 18:46:33.902063 4558 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/1a5d2627-1730-47b3-94f9-110dc23d42bc-operator-scripts podName:1a5d2627-1730-47b3-94f9-110dc23d42bc nodeName:}" failed. No retries permitted until 2026-01-20 18:46:37.902043976 +0000 UTC m=+7491.662381942 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/1a5d2627-1730-47b3-94f9-110dc23d42bc-operator-scripts") pod "keystone5ae5-account-delete-9thz7" (UID: "1a5d2627-1730-47b3-94f9-110dc23d42bc") : configmap "openstack-scripts" not found Jan 20 18:46:33 crc kubenswrapper[4558]: I0120 18:46:33.902237 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage18-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage18-crc\") on node \"crc\" DevicePath \"\"" Jan 20 18:46:33 crc kubenswrapper[4558]: I0120 18:46:33.905294 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6f1cb567-179f-4bd7-b76f-bcc43d2474f0-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "6f1cb567-179f-4bd7-b76f-bcc43d2474f0" (UID: "6f1cb567-179f-4bd7-b76f-bcc43d2474f0"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:46:33 crc kubenswrapper[4558]: I0120 18:46:33.905994 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6f1cb567-179f-4bd7-b76f-bcc43d2474f0-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "6f1cb567-179f-4bd7-b76f-bcc43d2474f0" (UID: "6f1cb567-179f-4bd7-b76f-bcc43d2474f0"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:46:33 crc kubenswrapper[4558]: I0120 18:46:33.906210 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6f1cb567-179f-4bd7-b76f-bcc43d2474f0-kube-api-access-d5btp" (OuterVolumeSpecName: "kube-api-access-d5btp") pod "6f1cb567-179f-4bd7-b76f-bcc43d2474f0" (UID: "6f1cb567-179f-4bd7-b76f-bcc43d2474f0"). InnerVolumeSpecName "kube-api-access-d5btp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:46:34 crc kubenswrapper[4558]: I0120 18:46:34.004624 4558 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/6f1cb567-179f-4bd7-b76f-bcc43d2474f0-apiservice-cert\") on node \"crc\" DevicePath \"\"" Jan 20 18:46:34 crc kubenswrapper[4558]: I0120 18:46:34.004681 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d5btp\" (UniqueName: \"kubernetes.io/projected/6f1cb567-179f-4bd7-b76f-bcc43d2474f0-kube-api-access-d5btp\") on node \"crc\" DevicePath \"\"" Jan 20 18:46:34 crc kubenswrapper[4558]: I0120 18:46:34.004695 4558 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/6f1cb567-179f-4bd7-b76f-bcc43d2474f0-webhook-cert\") on node \"crc\" DevicePath \"\"" Jan 20 18:46:34 crc kubenswrapper[4558]: I0120 18:46:34.079144 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-index-9bkts" Jan 20 18:46:34 crc kubenswrapper[4558]: I0120 18:46:34.094620 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["horizon-kuttl-tests/keystone-db-create-fbkpc"] Jan 20 18:46:34 crc kubenswrapper[4558]: I0120 18:46:34.098525 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["horizon-kuttl-tests/keystone-db-create-fbkpc"] Jan 20 18:46:34 crc kubenswrapper[4558]: I0120 18:46:34.106297 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q5x27\" (UniqueName: \"kubernetes.io/projected/17600d40-7d65-476f-83be-d1cd168c750a-kube-api-access-q5x27\") pod \"17600d40-7d65-476f-83be-d1cd168c750a\" (UID: \"17600d40-7d65-476f-83be-d1cd168c750a\") " Jan 20 18:46:34 crc kubenswrapper[4558]: I0120 18:46:34.109350 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/17600d40-7d65-476f-83be-d1cd168c750a-kube-api-access-q5x27" (OuterVolumeSpecName: "kube-api-access-q5x27") pod "17600d40-7d65-476f-83be-d1cd168c750a" (UID: "17600d40-7d65-476f-83be-d1cd168c750a"). InnerVolumeSpecName "kube-api-access-q5x27". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:46:34 crc kubenswrapper[4558]: I0120 18:46:34.113361 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["horizon-kuttl-tests/keystone5ae5-account-delete-9thz7"] Jan 20 18:46:34 crc kubenswrapper[4558]: I0120 18:46:34.116202 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["horizon-kuttl-tests/keystone-5ae5-account-create-update-jthgq"] Jan 20 18:46:34 crc kubenswrapper[4558]: I0120 18:46:34.123266 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["horizon-kuttl-tests/keystone-5ae5-account-create-update-jthgq"] Jan 20 18:46:34 crc kubenswrapper[4558]: I0120 18:46:34.208094 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q5x27\" (UniqueName: \"kubernetes.io/projected/17600d40-7d65-476f-83be-d1cd168c750a-kube-api-access-q5x27\") on node \"crc\" DevicePath \"\"" Jan 20 18:46:34 crc kubenswrapper[4558]: I0120 18:46:34.341956 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/keystone5ae5-account-delete-9thz7" Jan 20 18:46:34 crc kubenswrapper[4558]: I0120 18:46:34.346648 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/openstack-galera-0" Jan 20 18:46:34 crc kubenswrapper[4558]: I0120 18:46:34.396415 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/keystone5ae5-account-delete-9thz7" event={"ID":"1a5d2627-1730-47b3-94f9-110dc23d42bc","Type":"ContainerDied","Data":"5d1f80755d408379afe7bebbd41a659783913d82b70d9b576c031f4d9d1ebd12"} Jan 20 18:46:34 crc kubenswrapper[4558]: I0120 18:46:34.396774 4558 scope.go:117] "RemoveContainer" containerID="886c152236283bd27bd08065a239574dae960a46795d07c145494c22e35a2dde" Jan 20 18:46:34 crc kubenswrapper[4558]: I0120 18:46:34.396621 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/keystone5ae5-account-delete-9thz7" Jan 20 18:46:34 crc kubenswrapper[4558]: I0120 18:46:34.401559 4558 generic.go:334] "Generic (PLEG): container finished" podID="c0f454c3-2a4b-4d33-b137-2c92c0773378" containerID="0d3a126d5497a22d039dda1193d044fde10dd62c9fe892d8e5e517a70a778453" exitCode=0 Jan 20 18:46:34 crc kubenswrapper[4558]: I0120 18:46:34.401628 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/openstack-galera-0" event={"ID":"c0f454c3-2a4b-4d33-b137-2c92c0773378","Type":"ContainerDied","Data":"0d3a126d5497a22d039dda1193d044fde10dd62c9fe892d8e5e517a70a778453"} Jan 20 18:46:34 crc kubenswrapper[4558]: I0120 18:46:34.401656 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/openstack-galera-0" event={"ID":"c0f454c3-2a4b-4d33-b137-2c92c0773378","Type":"ContainerDied","Data":"3a09cfd53101e0c5beac39f6249317fdec67eeb3fb89ed4f86b2ce213525bf6d"} Jan 20 18:46:34 crc kubenswrapper[4558]: I0120 18:46:34.401742 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/openstack-galera-0" Jan 20 18:46:34 crc kubenswrapper[4558]: I0120 18:46:34.409919 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="horizon-kuttl-tests/openstack-galera-1" Jan 20 18:46:34 crc kubenswrapper[4558]: I0120 18:46:34.409923 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="horizon-kuttl-tests/openstack-galera-1" event={"ID":"c42137c4-70ef-4a24-85f5-d76f3883620b","Type":"ContainerDied","Data":"0bddfd51f0ab571316a3af4ee910d8865776bdcf377b5b7ee6f80a759d92e7f2"} Jan 20 18:46:34 crc kubenswrapper[4558]: I0120 18:46:34.410754 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/c0f454c3-2a4b-4d33-b137-2c92c0773378-config-data-generated\") pod \"c0f454c3-2a4b-4d33-b137-2c92c0773378\" (UID: \"c0f454c3-2a4b-4d33-b137-2c92c0773378\") " Jan 20 18:46:34 crc kubenswrapper[4558]: I0120 18:46:34.410785 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c0f454c3-2a4b-4d33-b137-2c92c0773378-operator-scripts\") pod \"c0f454c3-2a4b-4d33-b137-2c92c0773378\" (UID: \"c0f454c3-2a4b-4d33-b137-2c92c0773378\") " Jan 20 18:46:34 crc kubenswrapper[4558]: I0120 18:46:34.410812 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/c0f454c3-2a4b-4d33-b137-2c92c0773378-config-data-default\") pod \"c0f454c3-2a4b-4d33-b137-2c92c0773378\" (UID: \"c0f454c3-2a4b-4d33-b137-2c92c0773378\") " Jan 20 18:46:34 crc kubenswrapper[4558]: I0120 18:46:34.410843 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mysql-db\" (UniqueName: \"kubernetes.io/local-volume/local-storage20-crc\") pod \"c0f454c3-2a4b-4d33-b137-2c92c0773378\" (UID: \"c0f454c3-2a4b-4d33-b137-2c92c0773378\") " Jan 20 18:46:34 crc kubenswrapper[4558]: I0120 18:46:34.410895 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kxgkc\" (UniqueName: \"kubernetes.io/projected/1a5d2627-1730-47b3-94f9-110dc23d42bc-kube-api-access-kxgkc\") pod \"1a5d2627-1730-47b3-94f9-110dc23d42bc\" (UID: \"1a5d2627-1730-47b3-94f9-110dc23d42bc\") " Jan 20 18:46:34 crc kubenswrapper[4558]: I0120 18:46:34.410922 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/c0f454c3-2a4b-4d33-b137-2c92c0773378-kolla-config\") pod \"c0f454c3-2a4b-4d33-b137-2c92c0773378\" (UID: \"c0f454c3-2a4b-4d33-b137-2c92c0773378\") " Jan 20 18:46:34 crc kubenswrapper[4558]: I0120 18:46:34.410953 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1a5d2627-1730-47b3-94f9-110dc23d42bc-operator-scripts\") pod \"1a5d2627-1730-47b3-94f9-110dc23d42bc\" (UID: \"1a5d2627-1730-47b3-94f9-110dc23d42bc\") " Jan 20 18:46:34 crc kubenswrapper[4558]: I0120 18:46:34.410980 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qgn84\" (UniqueName: \"kubernetes.io/projected/c0f454c3-2a4b-4d33-b137-2c92c0773378-kube-api-access-qgn84\") pod \"c0f454c3-2a4b-4d33-b137-2c92c0773378\" (UID: \"c0f454c3-2a4b-4d33-b137-2c92c0773378\") " Jan 20 18:46:34 crc kubenswrapper[4558]: I0120 18:46:34.413665 4558 generic.go:334] "Generic (PLEG): container finished" podID="6f1cb567-179f-4bd7-b76f-bcc43d2474f0" containerID="25b89c90c78d10da1d5159b0ce7adc9014f1138c61c6218c01bad1e79dfa6e6b" exitCode=0 Jan 20 18:46:34 crc kubenswrapper[4558]: I0120 18:46:34.413737 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-788c44577b-c22sm" event={"ID":"6f1cb567-179f-4bd7-b76f-bcc43d2474f0","Type":"ContainerDied","Data":"25b89c90c78d10da1d5159b0ce7adc9014f1138c61c6218c01bad1e79dfa6e6b"} Jan 20 18:46:34 crc kubenswrapper[4558]: I0120 18:46:34.413778 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-788c44577b-c22sm" event={"ID":"6f1cb567-179f-4bd7-b76f-bcc43d2474f0","Type":"ContainerDied","Data":"6c061ccb4dde6fce3407f24890c2cc9112f424d916e34aee67ae2ad04405e1c5"} Jan 20 18:46:34 crc kubenswrapper[4558]: I0120 18:46:34.413821 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-788c44577b-c22sm" Jan 20 18:46:34 crc kubenswrapper[4558]: I0120 18:46:34.421656 4558 scope.go:117] "RemoveContainer" containerID="0d3a126d5497a22d039dda1193d044fde10dd62c9fe892d8e5e517a70a778453" Jan 20 18:46:34 crc kubenswrapper[4558]: I0120 18:46:34.422637 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c0f454c3-2a4b-4d33-b137-2c92c0773378-config-data-generated" (OuterVolumeSpecName: "config-data-generated") pod "c0f454c3-2a4b-4d33-b137-2c92c0773378" (UID: "c0f454c3-2a4b-4d33-b137-2c92c0773378"). InnerVolumeSpecName "config-data-generated". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 18:46:34 crc kubenswrapper[4558]: I0120 18:46:34.422804 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c0f454c3-2a4b-4d33-b137-2c92c0773378-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "c0f454c3-2a4b-4d33-b137-2c92c0773378" (UID: "c0f454c3-2a4b-4d33-b137-2c92c0773378"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:46:34 crc kubenswrapper[4558]: I0120 18:46:34.423202 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c0f454c3-2a4b-4d33-b137-2c92c0773378-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "c0f454c3-2a4b-4d33-b137-2c92c0773378" (UID: "c0f454c3-2a4b-4d33-b137-2c92c0773378"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:46:34 crc kubenswrapper[4558]: I0120 18:46:34.423215 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1a5d2627-1730-47b3-94f9-110dc23d42bc-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "1a5d2627-1730-47b3-94f9-110dc23d42bc" (UID: "1a5d2627-1730-47b3-94f9-110dc23d42bc"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:46:34 crc kubenswrapper[4558]: I0120 18:46:34.423589 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c0f454c3-2a4b-4d33-b137-2c92c0773378-config-data-default" (OuterVolumeSpecName: "config-data-default") pod "c0f454c3-2a4b-4d33-b137-2c92c0773378" (UID: "c0f454c3-2a4b-4d33-b137-2c92c0773378"). InnerVolumeSpecName "config-data-default". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 18:46:34 crc kubenswrapper[4558]: I0120 18:46:34.425633 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c0f454c3-2a4b-4d33-b137-2c92c0773378-kube-api-access-qgn84" (OuterVolumeSpecName: "kube-api-access-qgn84") pod "c0f454c3-2a4b-4d33-b137-2c92c0773378" (UID: "c0f454c3-2a4b-4d33-b137-2c92c0773378"). InnerVolumeSpecName "kube-api-access-qgn84". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:46:34 crc kubenswrapper[4558]: I0120 18:46:34.429063 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1a5d2627-1730-47b3-94f9-110dc23d42bc-kube-api-access-kxgkc" (OuterVolumeSpecName: "kube-api-access-kxgkc") pod "1a5d2627-1730-47b3-94f9-110dc23d42bc" (UID: "1a5d2627-1730-47b3-94f9-110dc23d42bc"). InnerVolumeSpecName "kube-api-access-kxgkc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:46:34 crc kubenswrapper[4558]: I0120 18:46:34.433926 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage20-crc" (OuterVolumeSpecName: "mysql-db") pod "c0f454c3-2a4b-4d33-b137-2c92c0773378" (UID: "c0f454c3-2a4b-4d33-b137-2c92c0773378"). InnerVolumeSpecName "local-storage20-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Jan 20 18:46:34 crc kubenswrapper[4558]: I0120 18:46:34.441803 4558 generic.go:334] "Generic (PLEG): container finished" podID="17600d40-7d65-476f-83be-d1cd168c750a" containerID="b3f8c0b28e0e903d26923e943caca5217cc36cae3ca65374fac3b46b1ee43f42" exitCode=0 Jan 20 18:46:34 crc kubenswrapper[4558]: I0120 18:46:34.441852 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-index-9bkts" event={"ID":"17600d40-7d65-476f-83be-d1cd168c750a","Type":"ContainerDied","Data":"b3f8c0b28e0e903d26923e943caca5217cc36cae3ca65374fac3b46b1ee43f42"} Jan 20 18:46:34 crc kubenswrapper[4558]: I0120 18:46:34.441890 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-index-9bkts" event={"ID":"17600d40-7d65-476f-83be-d1cd168c750a","Type":"ContainerDied","Data":"998b1bb2f2e53965c9750eebc81e0a9d297d21b2189bb80d242ff091a59e4647"} Jan 20 18:46:34 crc kubenswrapper[4558]: I0120 18:46:34.441953 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-index-9bkts" Jan 20 18:46:34 crc kubenswrapper[4558]: I0120 18:46:34.467915 4558 scope.go:117] "RemoveContainer" containerID="88d96a94954ba1d41bd6a5777f4e5d4d73c7400a921cbb812764d351796baa44" Jan 20 18:46:34 crc kubenswrapper[4558]: I0120 18:46:34.496905 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-788c44577b-c22sm"] Jan 20 18:46:34 crc kubenswrapper[4558]: I0120 18:46:34.499375 4558 scope.go:117] "RemoveContainer" containerID="0d3a126d5497a22d039dda1193d044fde10dd62c9fe892d8e5e517a70a778453" Jan 20 18:46:34 crc kubenswrapper[4558]: E0120 18:46:34.499807 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0d3a126d5497a22d039dda1193d044fde10dd62c9fe892d8e5e517a70a778453\": container with ID starting with 0d3a126d5497a22d039dda1193d044fde10dd62c9fe892d8e5e517a70a778453 not found: ID does not exist" containerID="0d3a126d5497a22d039dda1193d044fde10dd62c9fe892d8e5e517a70a778453" Jan 20 18:46:34 crc kubenswrapper[4558]: I0120 18:46:34.499843 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0d3a126d5497a22d039dda1193d044fde10dd62c9fe892d8e5e517a70a778453"} err="failed to get container status \"0d3a126d5497a22d039dda1193d044fde10dd62c9fe892d8e5e517a70a778453\": rpc error: code = NotFound desc = could not find container \"0d3a126d5497a22d039dda1193d044fde10dd62c9fe892d8e5e517a70a778453\": container with ID starting with 0d3a126d5497a22d039dda1193d044fde10dd62c9fe892d8e5e517a70a778453 not found: ID does not exist" Jan 20 18:46:34 crc kubenswrapper[4558]: I0120 18:46:34.499873 4558 scope.go:117] "RemoveContainer" containerID="88d96a94954ba1d41bd6a5777f4e5d4d73c7400a921cbb812764d351796baa44" Jan 20 18:46:34 crc kubenswrapper[4558]: E0120 18:46:34.500158 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"88d96a94954ba1d41bd6a5777f4e5d4d73c7400a921cbb812764d351796baa44\": container with ID starting with 88d96a94954ba1d41bd6a5777f4e5d4d73c7400a921cbb812764d351796baa44 not found: ID does not exist" containerID="88d96a94954ba1d41bd6a5777f4e5d4d73c7400a921cbb812764d351796baa44" Jan 20 18:46:34 crc kubenswrapper[4558]: I0120 18:46:34.500195 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"88d96a94954ba1d41bd6a5777f4e5d4d73c7400a921cbb812764d351796baa44"} err="failed to get container status \"88d96a94954ba1d41bd6a5777f4e5d4d73c7400a921cbb812764d351796baa44\": rpc error: code = NotFound desc = could not find container \"88d96a94954ba1d41bd6a5777f4e5d4d73c7400a921cbb812764d351796baa44\": container with ID starting with 88d96a94954ba1d41bd6a5777f4e5d4d73c7400a921cbb812764d351796baa44 not found: ID does not exist" Jan 20 18:46:34 crc kubenswrapper[4558]: I0120 18:46:34.500211 4558 scope.go:117] "RemoveContainer" containerID="f15aced75af8322f8bfc910378537e364f8e7d75fd8f4eb285e818331324d226" Jan 20 18:46:34 crc kubenswrapper[4558]: I0120 18:46:34.504587 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-788c44577b-c22sm"] Jan 20 18:46:34 crc kubenswrapper[4558]: I0120 18:46:34.511880 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/c0f454c3-2a4b-4d33-b137-2c92c0773378-config-data-generated\") on node \"crc\" DevicePath \"\"" Jan 20 18:46:34 crc kubenswrapper[4558]: I0120 18:46:34.512012 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c0f454c3-2a4b-4d33-b137-2c92c0773378-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 18:46:34 crc kubenswrapper[4558]: I0120 18:46:34.512067 4558 reconciler_common.go:293] "Volume detached for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/c0f454c3-2a4b-4d33-b137-2c92c0773378-config-data-default\") on node \"crc\" DevicePath \"\"" Jan 20 18:46:34 crc kubenswrapper[4558]: I0120 18:46:34.512141 4558 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage20-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage20-crc\") on node \"crc\" " Jan 20 18:46:34 crc kubenswrapper[4558]: I0120 18:46:34.512231 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kxgkc\" (UniqueName: \"kubernetes.io/projected/1a5d2627-1730-47b3-94f9-110dc23d42bc-kube-api-access-kxgkc\") on node \"crc\" DevicePath \"\"" Jan 20 18:46:34 crc kubenswrapper[4558]: I0120 18:46:34.512281 4558 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/c0f454c3-2a4b-4d33-b137-2c92c0773378-kolla-config\") on node \"crc\" DevicePath \"\"" Jan 20 18:46:34 crc kubenswrapper[4558]: I0120 18:46:34.512343 4558 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1a5d2627-1730-47b3-94f9-110dc23d42bc-operator-scripts\") on node \"crc\" DevicePath \"\"" Jan 20 18:46:34 crc kubenswrapper[4558]: I0120 18:46:34.512414 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qgn84\" (UniqueName: \"kubernetes.io/projected/c0f454c3-2a4b-4d33-b137-2c92c0773378-kube-api-access-qgn84\") on node \"crc\" DevicePath \"\"" Jan 20 18:46:34 crc kubenswrapper[4558]: I0120 18:46:34.516324 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/horizon-operator-index-9bkts"] Jan 20 18:46:34 crc kubenswrapper[4558]: I0120 18:46:34.517382 4558 scope.go:117] "RemoveContainer" containerID="2c0e0fa1bacf4208658bf79dda5c95bedac7a0121b74d1c1e9c398b99613418f" Jan 20 18:46:34 crc kubenswrapper[4558]: I0120 18:46:34.522292 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/horizon-operator-index-9bkts"] Jan 20 18:46:34 crc kubenswrapper[4558]: I0120 18:46:34.524834 4558 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage20-crc" (UniqueName: "kubernetes.io/local-volume/local-storage20-crc") on node "crc" Jan 20 18:46:34 crc kubenswrapper[4558]: I0120 18:46:34.527174 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["horizon-kuttl-tests/openstack-galera-1"] Jan 20 18:46:34 crc kubenswrapper[4558]: I0120 18:46:34.531104 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["horizon-kuttl-tests/openstack-galera-1"] Jan 20 18:46:34 crc kubenswrapper[4558]: I0120 18:46:34.531699 4558 scope.go:117] "RemoveContainer" containerID="25b89c90c78d10da1d5159b0ce7adc9014f1138c61c6218c01bad1e79dfa6e6b" Jan 20 18:46:34 crc kubenswrapper[4558]: I0120 18:46:34.549859 4558 scope.go:117] "RemoveContainer" containerID="25b89c90c78d10da1d5159b0ce7adc9014f1138c61c6218c01bad1e79dfa6e6b" Jan 20 18:46:34 crc kubenswrapper[4558]: E0120 18:46:34.550290 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"25b89c90c78d10da1d5159b0ce7adc9014f1138c61c6218c01bad1e79dfa6e6b\": container with ID starting with 25b89c90c78d10da1d5159b0ce7adc9014f1138c61c6218c01bad1e79dfa6e6b not found: ID does not exist" containerID="25b89c90c78d10da1d5159b0ce7adc9014f1138c61c6218c01bad1e79dfa6e6b" Jan 20 18:46:34 crc kubenswrapper[4558]: I0120 18:46:34.550337 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"25b89c90c78d10da1d5159b0ce7adc9014f1138c61c6218c01bad1e79dfa6e6b"} err="failed to get container status \"25b89c90c78d10da1d5159b0ce7adc9014f1138c61c6218c01bad1e79dfa6e6b\": rpc error: code = NotFound desc = could not find container \"25b89c90c78d10da1d5159b0ce7adc9014f1138c61c6218c01bad1e79dfa6e6b\": container with ID starting with 25b89c90c78d10da1d5159b0ce7adc9014f1138c61c6218c01bad1e79dfa6e6b not found: ID does not exist" Jan 20 18:46:34 crc kubenswrapper[4558]: I0120 18:46:34.550380 4558 scope.go:117] "RemoveContainer" containerID="b3f8c0b28e0e903d26923e943caca5217cc36cae3ca65374fac3b46b1ee43f42" Jan 20 18:46:34 crc kubenswrapper[4558]: I0120 18:46:34.566677 4558 scope.go:117] "RemoveContainer" containerID="b3f8c0b28e0e903d26923e943caca5217cc36cae3ca65374fac3b46b1ee43f42" Jan 20 18:46:34 crc kubenswrapper[4558]: E0120 18:46:34.567276 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b3f8c0b28e0e903d26923e943caca5217cc36cae3ca65374fac3b46b1ee43f42\": container with ID starting with b3f8c0b28e0e903d26923e943caca5217cc36cae3ca65374fac3b46b1ee43f42 not found: ID does not exist" containerID="b3f8c0b28e0e903d26923e943caca5217cc36cae3ca65374fac3b46b1ee43f42" Jan 20 18:46:34 crc kubenswrapper[4558]: I0120 18:46:34.567341 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b3f8c0b28e0e903d26923e943caca5217cc36cae3ca65374fac3b46b1ee43f42"} err="failed to get container status \"b3f8c0b28e0e903d26923e943caca5217cc36cae3ca65374fac3b46b1ee43f42\": rpc error: code = NotFound desc = could not find container \"b3f8c0b28e0e903d26923e943caca5217cc36cae3ca65374fac3b46b1ee43f42\": container with ID starting with b3f8c0b28e0e903d26923e943caca5217cc36cae3ca65374fac3b46b1ee43f42 not found: ID does not exist" Jan 20 18:46:34 crc kubenswrapper[4558]: I0120 18:46:34.583371 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0842b2d6-f024-4981-bc33-7af5f2cb702e" path="/var/lib/kubelet/pods/0842b2d6-f024-4981-bc33-7af5f2cb702e/volumes" Jan 20 18:46:34 crc kubenswrapper[4558]: I0120 18:46:34.583881 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="13ad68d2-8d17-4219-90c0-05ac4f81d820" path="/var/lib/kubelet/pods/13ad68d2-8d17-4219-90c0-05ac4f81d820/volumes" Jan 20 18:46:34 crc kubenswrapper[4558]: I0120 18:46:34.584322 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="17600d40-7d65-476f-83be-d1cd168c750a" path="/var/lib/kubelet/pods/17600d40-7d65-476f-83be-d1cd168c750a/volumes" Jan 20 18:46:34 crc kubenswrapper[4558]: I0120 18:46:34.585588 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1e63d3ee-13d4-4b9c-894f-03861181bd49" path="/var/lib/kubelet/pods/1e63d3ee-13d4-4b9c-894f-03861181bd49/volumes" Jan 20 18:46:34 crc kubenswrapper[4558]: I0120 18:46:34.586155 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3a7c364b-9972-4300-9e72-ce120d9c5d9a" path="/var/lib/kubelet/pods/3a7c364b-9972-4300-9e72-ce120d9c5d9a/volumes" Jan 20 18:46:34 crc kubenswrapper[4558]: I0120 18:46:34.586688 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6f1cb567-179f-4bd7-b76f-bcc43d2474f0" path="/var/lib/kubelet/pods/6f1cb567-179f-4bd7-b76f-bcc43d2474f0/volumes" Jan 20 18:46:34 crc kubenswrapper[4558]: I0120 18:46:34.592714 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c42137c4-70ef-4a24-85f5-d76f3883620b" path="/var/lib/kubelet/pods/c42137c4-70ef-4a24-85f5-d76f3883620b/volumes" Jan 20 18:46:34 crc kubenswrapper[4558]: I0120 18:46:34.593391 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f03b5622-3007-44c2-8e57-6633f6a467e4" path="/var/lib/kubelet/pods/f03b5622-3007-44c2-8e57-6633f6a467e4/volumes" Jan 20 18:46:34 crc kubenswrapper[4558]: I0120 18:46:34.622079 4558 reconciler_common.go:293] "Volume detached for volume \"local-storage20-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage20-crc\") on node \"crc\" DevicePath \"\"" Jan 20 18:46:34 crc kubenswrapper[4558]: I0120 18:46:34.718935 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["horizon-kuttl-tests/keystone5ae5-account-delete-9thz7"] Jan 20 18:46:34 crc kubenswrapper[4558]: I0120 18:46:34.726961 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["horizon-kuttl-tests/keystone5ae5-account-delete-9thz7"] Jan 20 18:46:34 crc kubenswrapper[4558]: I0120 18:46:34.734789 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["horizon-kuttl-tests/openstack-galera-0"] Jan 20 18:46:34 crc kubenswrapper[4558]: I0120 18:46:34.739442 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["horizon-kuttl-tests/openstack-galera-0"] Jan 20 18:46:36 crc kubenswrapper[4558]: I0120 18:46:36.569353 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-866dbb996d-p69bb"] Jan 20 18:46:36 crc kubenswrapper[4558]: I0120 18:46:36.569833 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/keystone-operator-controller-manager-866dbb996d-p69bb" podUID="2f0e4759-5bd0-4265-8781-57738bd9fc93" containerName="manager" containerID="cri-o://d8badf873df2955eb71c4385d046c84384befe12e1855d8c531bceeaccd9075b" gracePeriod=10 Jan 20 18:46:36 crc kubenswrapper[4558]: I0120 18:46:36.579261 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1a5d2627-1730-47b3-94f9-110dc23d42bc" path="/var/lib/kubelet/pods/1a5d2627-1730-47b3-94f9-110dc23d42bc/volumes" Jan 20 18:46:36 crc kubenswrapper[4558]: I0120 18:46:36.579990 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c0f454c3-2a4b-4d33-b137-2c92c0773378" path="/var/lib/kubelet/pods/c0f454c3-2a4b-4d33-b137-2c92c0773378/volumes" Jan 20 18:46:36 crc kubenswrapper[4558]: I0120 18:46:36.773890 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/keystone-operator-index-55crg"] Jan 20 18:46:36 crc kubenswrapper[4558]: I0120 18:46:36.774331 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/keystone-operator-index-55crg" podUID="b6ca7feb-5a45-46a4-987c-6f58d12c94ae" containerName="registry-server" containerID="cri-o://43e7a77c79fbd25ced0635535d552e3444aae2d88de4165f7c8a22e90afa22cb" gracePeriod=30 Jan 20 18:46:36 crc kubenswrapper[4558]: I0120 18:46:36.798476 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/34e4e076b11e40e2796f19ad3bfdac5929942b93224fbc520400e0a069px9rs"] Jan 20 18:46:36 crc kubenswrapper[4558]: I0120 18:46:36.805884 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/34e4e076b11e40e2796f19ad3bfdac5929942b93224fbc520400e0a069px9rs"] Jan 20 18:46:36 crc kubenswrapper[4558]: I0120 18:46:36.966389 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-866dbb996d-p69bb" Jan 20 18:46:37 crc kubenswrapper[4558]: I0120 18:46:37.064893 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dnllf\" (UniqueName: \"kubernetes.io/projected/2f0e4759-5bd0-4265-8781-57738bd9fc93-kube-api-access-dnllf\") pod \"2f0e4759-5bd0-4265-8781-57738bd9fc93\" (UID: \"2f0e4759-5bd0-4265-8781-57738bd9fc93\") " Jan 20 18:46:37 crc kubenswrapper[4558]: I0120 18:46:37.064957 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/2f0e4759-5bd0-4265-8781-57738bd9fc93-webhook-cert\") pod \"2f0e4759-5bd0-4265-8781-57738bd9fc93\" (UID: \"2f0e4759-5bd0-4265-8781-57738bd9fc93\") " Jan 20 18:46:37 crc kubenswrapper[4558]: I0120 18:46:37.065001 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/2f0e4759-5bd0-4265-8781-57738bd9fc93-apiservice-cert\") pod \"2f0e4759-5bd0-4265-8781-57738bd9fc93\" (UID: \"2f0e4759-5bd0-4265-8781-57738bd9fc93\") " Jan 20 18:46:37 crc kubenswrapper[4558]: I0120 18:46:37.072362 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2f0e4759-5bd0-4265-8781-57738bd9fc93-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "2f0e4759-5bd0-4265-8781-57738bd9fc93" (UID: "2f0e4759-5bd0-4265-8781-57738bd9fc93"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:46:37 crc kubenswrapper[4558]: I0120 18:46:37.072569 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2f0e4759-5bd0-4265-8781-57738bd9fc93-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "2f0e4759-5bd0-4265-8781-57738bd9fc93" (UID: "2f0e4759-5bd0-4265-8781-57738bd9fc93"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:46:37 crc kubenswrapper[4558]: I0120 18:46:37.077314 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2f0e4759-5bd0-4265-8781-57738bd9fc93-kube-api-access-dnllf" (OuterVolumeSpecName: "kube-api-access-dnllf") pod "2f0e4759-5bd0-4265-8781-57738bd9fc93" (UID: "2f0e4759-5bd0-4265-8781-57738bd9fc93"). InnerVolumeSpecName "kube-api-access-dnllf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:46:37 crc kubenswrapper[4558]: I0120 18:46:37.087425 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-index-55crg" Jan 20 18:46:37 crc kubenswrapper[4558]: I0120 18:46:37.167029 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcvbn\" (UniqueName: \"kubernetes.io/projected/b6ca7feb-5a45-46a4-987c-6f58d12c94ae-kube-api-access-pcvbn\") pod \"b6ca7feb-5a45-46a4-987c-6f58d12c94ae\" (UID: \"b6ca7feb-5a45-46a4-987c-6f58d12c94ae\") " Jan 20 18:46:37 crc kubenswrapper[4558]: I0120 18:46:37.167434 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dnllf\" (UniqueName: \"kubernetes.io/projected/2f0e4759-5bd0-4265-8781-57738bd9fc93-kube-api-access-dnllf\") on node \"crc\" DevicePath \"\"" Jan 20 18:46:37 crc kubenswrapper[4558]: I0120 18:46:37.167458 4558 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/2f0e4759-5bd0-4265-8781-57738bd9fc93-webhook-cert\") on node \"crc\" DevicePath \"\"" Jan 20 18:46:37 crc kubenswrapper[4558]: I0120 18:46:37.167469 4558 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/2f0e4759-5bd0-4265-8781-57738bd9fc93-apiservice-cert\") on node \"crc\" DevicePath \"\"" Jan 20 18:46:37 crc kubenswrapper[4558]: I0120 18:46:37.169791 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6ca7feb-5a45-46a4-987c-6f58d12c94ae-kube-api-access-pcvbn" (OuterVolumeSpecName: "kube-api-access-pcvbn") pod "b6ca7feb-5a45-46a4-987c-6f58d12c94ae" (UID: "b6ca7feb-5a45-46a4-987c-6f58d12c94ae"). InnerVolumeSpecName "kube-api-access-pcvbn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:46:37 crc kubenswrapper[4558]: I0120 18:46:37.268495 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcvbn\" (UniqueName: \"kubernetes.io/projected/b6ca7feb-5a45-46a4-987c-6f58d12c94ae-kube-api-access-pcvbn\") on node \"crc\" DevicePath \"\"" Jan 20 18:46:37 crc kubenswrapper[4558]: I0120 18:46:37.473241 4558 generic.go:334] "Generic (PLEG): container finished" podID="2f0e4759-5bd0-4265-8781-57738bd9fc93" containerID="d8badf873df2955eb71c4385d046c84384befe12e1855d8c531bceeaccd9075b" exitCode=0 Jan 20 18:46:37 crc kubenswrapper[4558]: I0120 18:46:37.473292 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-866dbb996d-p69bb" event={"ID":"2f0e4759-5bd0-4265-8781-57738bd9fc93","Type":"ContainerDied","Data":"d8badf873df2955eb71c4385d046c84384befe12e1855d8c531bceeaccd9075b"} Jan 20 18:46:37 crc kubenswrapper[4558]: I0120 18:46:37.473316 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-866dbb996d-p69bb" Jan 20 18:46:37 crc kubenswrapper[4558]: I0120 18:46:37.473721 4558 scope.go:117] "RemoveContainer" containerID="d8badf873df2955eb71c4385d046c84384befe12e1855d8c531bceeaccd9075b" Jan 20 18:46:37 crc kubenswrapper[4558]: I0120 18:46:37.473644 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-866dbb996d-p69bb" event={"ID":"2f0e4759-5bd0-4265-8781-57738bd9fc93","Type":"ContainerDied","Data":"3c3da68139600eeca6e91a2e2446d476a289877f1db76d34fb555d87f03fcd6c"} Jan 20 18:46:37 crc kubenswrapper[4558]: I0120 18:46:37.475641 4558 generic.go:334] "Generic (PLEG): container finished" podID="b6ca7feb-5a45-46a4-987c-6f58d12c94ae" containerID="43e7a77c79fbd25ced0635535d552e3444aae2d88de4165f7c8a22e90afa22cb" exitCode=0 Jan 20 18:46:37 crc kubenswrapper[4558]: I0120 18:46:37.475681 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-index-55crg" Jan 20 18:46:37 crc kubenswrapper[4558]: I0120 18:46:37.475704 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-index-55crg" event={"ID":"b6ca7feb-5a45-46a4-987c-6f58d12c94ae","Type":"ContainerDied","Data":"43e7a77c79fbd25ced0635535d552e3444aae2d88de4165f7c8a22e90afa22cb"} Jan 20 18:46:37 crc kubenswrapper[4558]: I0120 18:46:37.475789 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-index-55crg" event={"ID":"b6ca7feb-5a45-46a4-987c-6f58d12c94ae","Type":"ContainerDied","Data":"05338ae85df5ff40aedba76d442505104f4fa1f970ae56ed8c7f441656afe8fa"} Jan 20 18:46:37 crc kubenswrapper[4558]: I0120 18:46:37.490578 4558 scope.go:117] "RemoveContainer" containerID="d8badf873df2955eb71c4385d046c84384befe12e1855d8c531bceeaccd9075b" Jan 20 18:46:37 crc kubenswrapper[4558]: E0120 18:46:37.490956 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d8badf873df2955eb71c4385d046c84384befe12e1855d8c531bceeaccd9075b\": container with ID starting with d8badf873df2955eb71c4385d046c84384befe12e1855d8c531bceeaccd9075b not found: ID does not exist" containerID="d8badf873df2955eb71c4385d046c84384befe12e1855d8c531bceeaccd9075b" Jan 20 18:46:37 crc kubenswrapper[4558]: I0120 18:46:37.490990 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d8badf873df2955eb71c4385d046c84384befe12e1855d8c531bceeaccd9075b"} err="failed to get container status \"d8badf873df2955eb71c4385d046c84384befe12e1855d8c531bceeaccd9075b\": rpc error: code = NotFound desc = could not find container \"d8badf873df2955eb71c4385d046c84384befe12e1855d8c531bceeaccd9075b\": container with ID starting with d8badf873df2955eb71c4385d046c84384befe12e1855d8c531bceeaccd9075b not found: ID does not exist" Jan 20 18:46:37 crc kubenswrapper[4558]: I0120 18:46:37.491012 4558 scope.go:117] "RemoveContainer" containerID="43e7a77c79fbd25ced0635535d552e3444aae2d88de4165f7c8a22e90afa22cb" Jan 20 18:46:37 crc kubenswrapper[4558]: I0120 18:46:37.507374 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-866dbb996d-p69bb"] Jan 20 18:46:37 crc kubenswrapper[4558]: I0120 18:46:37.511804 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-866dbb996d-p69bb"] Jan 20 18:46:37 crc kubenswrapper[4558]: I0120 18:46:37.512264 4558 scope.go:117] "RemoveContainer" containerID="43e7a77c79fbd25ced0635535d552e3444aae2d88de4165f7c8a22e90afa22cb" Jan 20 18:46:37 crc kubenswrapper[4558]: E0120 18:46:37.512791 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"43e7a77c79fbd25ced0635535d552e3444aae2d88de4165f7c8a22e90afa22cb\": container with ID starting with 43e7a77c79fbd25ced0635535d552e3444aae2d88de4165f7c8a22e90afa22cb not found: ID does not exist" containerID="43e7a77c79fbd25ced0635535d552e3444aae2d88de4165f7c8a22e90afa22cb" Jan 20 18:46:37 crc kubenswrapper[4558]: I0120 18:46:37.512826 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"43e7a77c79fbd25ced0635535d552e3444aae2d88de4165f7c8a22e90afa22cb"} err="failed to get container status \"43e7a77c79fbd25ced0635535d552e3444aae2d88de4165f7c8a22e90afa22cb\": rpc error: code = NotFound desc = could not find container \"43e7a77c79fbd25ced0635535d552e3444aae2d88de4165f7c8a22e90afa22cb\": container with ID starting with 43e7a77c79fbd25ced0635535d552e3444aae2d88de4165f7c8a22e90afa22cb not found: ID does not exist" Jan 20 18:46:37 crc kubenswrapper[4558]: I0120 18:46:37.515089 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/keystone-operator-index-55crg"] Jan 20 18:46:37 crc kubenswrapper[4558]: I0120 18:46:37.518546 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/keystone-operator-index-55crg"] Jan 20 18:46:38 crc kubenswrapper[4558]: I0120 18:46:38.573882 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2f0e4759-5bd0-4265-8781-57738bd9fc93" path="/var/lib/kubelet/pods/2f0e4759-5bd0-4265-8781-57738bd9fc93/volumes" Jan 20 18:46:38 crc kubenswrapper[4558]: I0120 18:46:38.574666 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6ca7feb-5a45-46a4-987c-6f58d12c94ae" path="/var/lib/kubelet/pods/b6ca7feb-5a45-46a4-987c-6f58d12c94ae/volumes" Jan 20 18:46:38 crc kubenswrapper[4558]: I0120 18:46:38.575149 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d90ff752-1abb-4f9b-91b1-73f56adad0c6" path="/var/lib/kubelet/pods/d90ff752-1abb-4f9b-91b1-73f56adad0c6/volumes" Jan 20 18:46:39 crc kubenswrapper[4558]: I0120 18:46:39.023580 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-779fc9694b-5xm4r"] Jan 20 18:46:39 crc kubenswrapper[4558]: I0120 18:46:39.023885 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-5xm4r" podUID="d062a00c-aa04-4761-97a1-0a89f1f5cfa1" containerName="operator" containerID="cri-o://906f3f711e2a066b7111ced7541cdfd4472b0e2a493f39337c7b06dd1f76c470" gracePeriod=10 Jan 20 18:46:39 crc kubenswrapper[4558]: I0120 18:46:39.213974 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-index-wdg6v"] Jan 20 18:46:39 crc kubenswrapper[4558]: I0120 18:46:39.214231 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/rabbitmq-cluster-operator-index-wdg6v" podUID="08aa1ffd-1990-4e80-b8b2-ba4cc54c5057" containerName="registry-server" containerID="cri-o://d5131bbb9a0d0b9f2dd6dae8531a145a202ec0c639b82f51e6569bdff316cbca" gracePeriod=30 Jan 20 18:46:39 crc kubenswrapper[4558]: I0120 18:46:39.241457 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e5907g9xn"] Jan 20 18:46:39 crc kubenswrapper[4558]: I0120 18:46:39.246578 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e5907g9xn"] Jan 20 18:46:39 crc kubenswrapper[4558]: I0120 18:46:39.430524 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-5xm4r" Jan 20 18:46:39 crc kubenswrapper[4558]: I0120 18:46:39.495072 4558 generic.go:334] "Generic (PLEG): container finished" podID="08aa1ffd-1990-4e80-b8b2-ba4cc54c5057" containerID="d5131bbb9a0d0b9f2dd6dae8531a145a202ec0c639b82f51e6569bdff316cbca" exitCode=0 Jan 20 18:46:39 crc kubenswrapper[4558]: I0120 18:46:39.495177 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-index-wdg6v" event={"ID":"08aa1ffd-1990-4e80-b8b2-ba4cc54c5057","Type":"ContainerDied","Data":"d5131bbb9a0d0b9f2dd6dae8531a145a202ec0c639b82f51e6569bdff316cbca"} Jan 20 18:46:39 crc kubenswrapper[4558]: I0120 18:46:39.497342 4558 generic.go:334] "Generic (PLEG): container finished" podID="d062a00c-aa04-4761-97a1-0a89f1f5cfa1" containerID="906f3f711e2a066b7111ced7541cdfd4472b0e2a493f39337c7b06dd1f76c470" exitCode=0 Jan 20 18:46:39 crc kubenswrapper[4558]: I0120 18:46:39.497378 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-5xm4r" event={"ID":"d062a00c-aa04-4761-97a1-0a89f1f5cfa1","Type":"ContainerDied","Data":"906f3f711e2a066b7111ced7541cdfd4472b0e2a493f39337c7b06dd1f76c470"} Jan 20 18:46:39 crc kubenswrapper[4558]: I0120 18:46:39.497414 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-5xm4r" Jan 20 18:46:39 crc kubenswrapper[4558]: I0120 18:46:39.497434 4558 scope.go:117] "RemoveContainer" containerID="906f3f711e2a066b7111ced7541cdfd4472b0e2a493f39337c7b06dd1f76c470" Jan 20 18:46:39 crc kubenswrapper[4558]: I0120 18:46:39.497417 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-5xm4r" event={"ID":"d062a00c-aa04-4761-97a1-0a89f1f5cfa1","Type":"ContainerDied","Data":"a6f582be0f85505d0f185819a29a9598ca6f3bfa257e645f94a3c6c3a51e39b8"} Jan 20 18:46:39 crc kubenswrapper[4558]: I0120 18:46:39.514504 4558 scope.go:117] "RemoveContainer" containerID="906f3f711e2a066b7111ced7541cdfd4472b0e2a493f39337c7b06dd1f76c470" Jan 20 18:46:39 crc kubenswrapper[4558]: E0120 18:46:39.515661 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"906f3f711e2a066b7111ced7541cdfd4472b0e2a493f39337c7b06dd1f76c470\": container with ID starting with 906f3f711e2a066b7111ced7541cdfd4472b0e2a493f39337c7b06dd1f76c470 not found: ID does not exist" containerID="906f3f711e2a066b7111ced7541cdfd4472b0e2a493f39337c7b06dd1f76c470" Jan 20 18:46:39 crc kubenswrapper[4558]: I0120 18:46:39.515701 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"906f3f711e2a066b7111ced7541cdfd4472b0e2a493f39337c7b06dd1f76c470"} err="failed to get container status \"906f3f711e2a066b7111ced7541cdfd4472b0e2a493f39337c7b06dd1f76c470\": rpc error: code = NotFound desc = could not find container \"906f3f711e2a066b7111ced7541cdfd4472b0e2a493f39337c7b06dd1f76c470\": container with ID starting with 906f3f711e2a066b7111ced7541cdfd4472b0e2a493f39337c7b06dd1f76c470 not found: ID does not exist" Jan 20 18:46:39 crc kubenswrapper[4558]: I0120 18:46:39.517802 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jcp9h\" (UniqueName: \"kubernetes.io/projected/d062a00c-aa04-4761-97a1-0a89f1f5cfa1-kube-api-access-jcp9h\") pod \"d062a00c-aa04-4761-97a1-0a89f1f5cfa1\" (UID: \"d062a00c-aa04-4761-97a1-0a89f1f5cfa1\") " Jan 20 18:46:39 crc kubenswrapper[4558]: I0120 18:46:39.524156 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d062a00c-aa04-4761-97a1-0a89f1f5cfa1-kube-api-access-jcp9h" (OuterVolumeSpecName: "kube-api-access-jcp9h") pod "d062a00c-aa04-4761-97a1-0a89f1f5cfa1" (UID: "d062a00c-aa04-4761-97a1-0a89f1f5cfa1"). InnerVolumeSpecName "kube-api-access-jcp9h". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:46:39 crc kubenswrapper[4558]: I0120 18:46:39.547566 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-index-wdg6v" Jan 20 18:46:39 crc kubenswrapper[4558]: I0120 18:46:39.621003 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tw8nt\" (UniqueName: \"kubernetes.io/projected/08aa1ffd-1990-4e80-b8b2-ba4cc54c5057-kube-api-access-tw8nt\") pod \"08aa1ffd-1990-4e80-b8b2-ba4cc54c5057\" (UID: \"08aa1ffd-1990-4e80-b8b2-ba4cc54c5057\") " Jan 20 18:46:39 crc kubenswrapper[4558]: I0120 18:46:39.621662 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jcp9h\" (UniqueName: \"kubernetes.io/projected/d062a00c-aa04-4761-97a1-0a89f1f5cfa1-kube-api-access-jcp9h\") on node \"crc\" DevicePath \"\"" Jan 20 18:46:39 crc kubenswrapper[4558]: I0120 18:46:39.625407 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/08aa1ffd-1990-4e80-b8b2-ba4cc54c5057-kube-api-access-tw8nt" (OuterVolumeSpecName: "kube-api-access-tw8nt") pod "08aa1ffd-1990-4e80-b8b2-ba4cc54c5057" (UID: "08aa1ffd-1990-4e80-b8b2-ba4cc54c5057"). InnerVolumeSpecName "kube-api-access-tw8nt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:46:39 crc kubenswrapper[4558]: I0120 18:46:39.722440 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tw8nt\" (UniqueName: \"kubernetes.io/projected/08aa1ffd-1990-4e80-b8b2-ba4cc54c5057-kube-api-access-tw8nt\") on node \"crc\" DevicePath \"\"" Jan 20 18:46:39 crc kubenswrapper[4558]: I0120 18:46:39.823740 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-779fc9694b-5xm4r"] Jan 20 18:46:39 crc kubenswrapper[4558]: I0120 18:46:39.830443 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-779fc9694b-5xm4r"] Jan 20 18:46:40 crc kubenswrapper[4558]: I0120 18:46:40.506851 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-index-wdg6v" event={"ID":"08aa1ffd-1990-4e80-b8b2-ba4cc54c5057","Type":"ContainerDied","Data":"5f3bf5c003d5ce2b64185818a5e1bf963154f024700ee32899b4f8cd669d41e4"} Jan 20 18:46:40 crc kubenswrapper[4558]: I0120 18:46:40.506889 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-index-wdg6v" Jan 20 18:46:40 crc kubenswrapper[4558]: I0120 18:46:40.506959 4558 scope.go:117] "RemoveContainer" containerID="d5131bbb9a0d0b9f2dd6dae8531a145a202ec0c639b82f51e6569bdff316cbca" Jan 20 18:46:40 crc kubenswrapper[4558]: I0120 18:46:40.536840 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-index-wdg6v"] Jan 20 18:46:40 crc kubenswrapper[4558]: I0120 18:46:40.543462 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-index-wdg6v"] Jan 20 18:46:40 crc kubenswrapper[4558]: I0120 18:46:40.579253 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="08aa1ffd-1990-4e80-b8b2-ba4cc54c5057" path="/var/lib/kubelet/pods/08aa1ffd-1990-4e80-b8b2-ba4cc54c5057/volumes" Jan 20 18:46:40 crc kubenswrapper[4558]: I0120 18:46:40.579833 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6f21f27-20ad-443e-bd90-3b739863d0ae" path="/var/lib/kubelet/pods/b6f21f27-20ad-443e-bd90-3b739863d0ae/volumes" Jan 20 18:46:40 crc kubenswrapper[4558]: I0120 18:46:40.580475 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d062a00c-aa04-4761-97a1-0a89f1f5cfa1" path="/var/lib/kubelet/pods/d062a00c-aa04-4761-97a1-0a89f1f5cfa1/volumes" Jan 20 18:46:42 crc kubenswrapper[4558]: I0120 18:46:42.502555 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/infra-operator-controller-manager-797f876cdb-fxjkw"] Jan 20 18:46:42 crc kubenswrapper[4558]: I0120 18:46:42.504017 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/infra-operator-controller-manager-797f876cdb-fxjkw" podUID="623a0db3-27c7-4e16-b902-784517ccad46" containerName="manager" containerID="cri-o://6007c648fde8fe226779756fc6e3041e21ca68284b37695f96aca4e839d68feb" gracePeriod=10 Jan 20 18:46:42 crc kubenswrapper[4558]: I0120 18:46:42.704156 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/infra-operator-index-dqwqb"] Jan 20 18:46:42 crc kubenswrapper[4558]: I0120 18:46:42.704459 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/infra-operator-index-dqwqb" podUID="d3dc106a-9ff2-49b9-bc47-3d986cec7afa" containerName="registry-server" containerID="cri-o://df0354184c0307e1f1972d3215362617b02dbd1c0c7dc94d36c92202409746db" gracePeriod=30 Jan 20 18:46:42 crc kubenswrapper[4558]: I0120 18:46:42.741523 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/ea82580bc5724477f94b47db468c840840d4aaf95efc52f7d04b6353c1wpjhp"] Jan 20 18:46:42 crc kubenswrapper[4558]: I0120 18:46:42.744982 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/ea82580bc5724477f94b47db468c840840d4aaf95efc52f7d04b6353c1wpjhp"] Jan 20 18:46:43 crc kubenswrapper[4558]: I0120 18:46:43.012768 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-797f876cdb-fxjkw" Jan 20 18:46:43 crc kubenswrapper[4558]: I0120 18:46:43.071686 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/623a0db3-27c7-4e16-b902-784517ccad46-apiservice-cert\") pod \"623a0db3-27c7-4e16-b902-784517ccad46\" (UID: \"623a0db3-27c7-4e16-b902-784517ccad46\") " Jan 20 18:46:43 crc kubenswrapper[4558]: I0120 18:46:43.071757 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fctvz\" (UniqueName: \"kubernetes.io/projected/623a0db3-27c7-4e16-b902-784517ccad46-kube-api-access-fctvz\") pod \"623a0db3-27c7-4e16-b902-784517ccad46\" (UID: \"623a0db3-27c7-4e16-b902-784517ccad46\") " Jan 20 18:46:43 crc kubenswrapper[4558]: I0120 18:46:43.071892 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/623a0db3-27c7-4e16-b902-784517ccad46-webhook-cert\") pod \"623a0db3-27c7-4e16-b902-784517ccad46\" (UID: \"623a0db3-27c7-4e16-b902-784517ccad46\") " Jan 20 18:46:43 crc kubenswrapper[4558]: I0120 18:46:43.078452 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/623a0db3-27c7-4e16-b902-784517ccad46-kube-api-access-fctvz" (OuterVolumeSpecName: "kube-api-access-fctvz") pod "623a0db3-27c7-4e16-b902-784517ccad46" (UID: "623a0db3-27c7-4e16-b902-784517ccad46"). InnerVolumeSpecName "kube-api-access-fctvz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:46:43 crc kubenswrapper[4558]: I0120 18:46:43.081094 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/623a0db3-27c7-4e16-b902-784517ccad46-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "623a0db3-27c7-4e16-b902-784517ccad46" (UID: "623a0db3-27c7-4e16-b902-784517ccad46"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:46:43 crc kubenswrapper[4558]: I0120 18:46:43.082478 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/623a0db3-27c7-4e16-b902-784517ccad46-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "623a0db3-27c7-4e16-b902-784517ccad46" (UID: "623a0db3-27c7-4e16-b902-784517ccad46"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:46:43 crc kubenswrapper[4558]: I0120 18:46:43.107666 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-index-dqwqb" Jan 20 18:46:43 crc kubenswrapper[4558]: I0120 18:46:43.173974 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pqd6g\" (UniqueName: \"kubernetes.io/projected/d3dc106a-9ff2-49b9-bc47-3d986cec7afa-kube-api-access-pqd6g\") pod \"d3dc106a-9ff2-49b9-bc47-3d986cec7afa\" (UID: \"d3dc106a-9ff2-49b9-bc47-3d986cec7afa\") " Jan 20 18:46:43 crc kubenswrapper[4558]: I0120 18:46:43.174625 4558 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/623a0db3-27c7-4e16-b902-784517ccad46-apiservice-cert\") on node \"crc\" DevicePath \"\"" Jan 20 18:46:43 crc kubenswrapper[4558]: I0120 18:46:43.174657 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fctvz\" (UniqueName: \"kubernetes.io/projected/623a0db3-27c7-4e16-b902-784517ccad46-kube-api-access-fctvz\") on node \"crc\" DevicePath \"\"" Jan 20 18:46:43 crc kubenswrapper[4558]: I0120 18:46:43.174674 4558 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/623a0db3-27c7-4e16-b902-784517ccad46-webhook-cert\") on node \"crc\" DevicePath \"\"" Jan 20 18:46:43 crc kubenswrapper[4558]: I0120 18:46:43.179206 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d3dc106a-9ff2-49b9-bc47-3d986cec7afa-kube-api-access-pqd6g" (OuterVolumeSpecName: "kube-api-access-pqd6g") pod "d3dc106a-9ff2-49b9-bc47-3d986cec7afa" (UID: "d3dc106a-9ff2-49b9-bc47-3d986cec7afa"). InnerVolumeSpecName "kube-api-access-pqd6g". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:46:43 crc kubenswrapper[4558]: I0120 18:46:43.276860 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pqd6g\" (UniqueName: \"kubernetes.io/projected/d3dc106a-9ff2-49b9-bc47-3d986cec7afa-kube-api-access-pqd6g\") on node \"crc\" DevicePath \"\"" Jan 20 18:46:43 crc kubenswrapper[4558]: I0120 18:46:43.537503 4558 generic.go:334] "Generic (PLEG): container finished" podID="623a0db3-27c7-4e16-b902-784517ccad46" containerID="6007c648fde8fe226779756fc6e3041e21ca68284b37695f96aca4e839d68feb" exitCode=0 Jan 20 18:46:43 crc kubenswrapper[4558]: I0120 18:46:43.537614 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-797f876cdb-fxjkw" event={"ID":"623a0db3-27c7-4e16-b902-784517ccad46","Type":"ContainerDied","Data":"6007c648fde8fe226779756fc6e3041e21ca68284b37695f96aca4e839d68feb"} Jan 20 18:46:43 crc kubenswrapper[4558]: I0120 18:46:43.537688 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-797f876cdb-fxjkw" Jan 20 18:46:43 crc kubenswrapper[4558]: I0120 18:46:43.537989 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-797f876cdb-fxjkw" event={"ID":"623a0db3-27c7-4e16-b902-784517ccad46","Type":"ContainerDied","Data":"e2c6f714e81df8683527700c111ebf24c52141202e8dd521d7ffbef5626d3e5a"} Jan 20 18:46:43 crc kubenswrapper[4558]: I0120 18:46:43.538031 4558 scope.go:117] "RemoveContainer" containerID="6007c648fde8fe226779756fc6e3041e21ca68284b37695f96aca4e839d68feb" Jan 20 18:46:43 crc kubenswrapper[4558]: I0120 18:46:43.540290 4558 generic.go:334] "Generic (PLEG): container finished" podID="d3dc106a-9ff2-49b9-bc47-3d986cec7afa" containerID="df0354184c0307e1f1972d3215362617b02dbd1c0c7dc94d36c92202409746db" exitCode=0 Jan 20 18:46:43 crc kubenswrapper[4558]: I0120 18:46:43.540360 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-index-dqwqb" event={"ID":"d3dc106a-9ff2-49b9-bc47-3d986cec7afa","Type":"ContainerDied","Data":"df0354184c0307e1f1972d3215362617b02dbd1c0c7dc94d36c92202409746db"} Jan 20 18:46:43 crc kubenswrapper[4558]: I0120 18:46:43.540403 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-index-dqwqb" event={"ID":"d3dc106a-9ff2-49b9-bc47-3d986cec7afa","Type":"ContainerDied","Data":"9fbb3073069554c0cf9d1a285adb9d606b87cb11b92c5c9c3929d7bb462d3ed2"} Jan 20 18:46:43 crc kubenswrapper[4558]: I0120 18:46:43.540475 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-index-dqwqb" Jan 20 18:46:43 crc kubenswrapper[4558]: I0120 18:46:43.560552 4558 scope.go:117] "RemoveContainer" containerID="6007c648fde8fe226779756fc6e3041e21ca68284b37695f96aca4e839d68feb" Jan 20 18:46:43 crc kubenswrapper[4558]: E0120 18:46:43.561117 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6007c648fde8fe226779756fc6e3041e21ca68284b37695f96aca4e839d68feb\": container with ID starting with 6007c648fde8fe226779756fc6e3041e21ca68284b37695f96aca4e839d68feb not found: ID does not exist" containerID="6007c648fde8fe226779756fc6e3041e21ca68284b37695f96aca4e839d68feb" Jan 20 18:46:43 crc kubenswrapper[4558]: I0120 18:46:43.561230 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6007c648fde8fe226779756fc6e3041e21ca68284b37695f96aca4e839d68feb"} err="failed to get container status \"6007c648fde8fe226779756fc6e3041e21ca68284b37695f96aca4e839d68feb\": rpc error: code = NotFound desc = could not find container \"6007c648fde8fe226779756fc6e3041e21ca68284b37695f96aca4e839d68feb\": container with ID starting with 6007c648fde8fe226779756fc6e3041e21ca68284b37695f96aca4e839d68feb not found: ID does not exist" Jan 20 18:46:43 crc kubenswrapper[4558]: I0120 18:46:43.561309 4558 scope.go:117] "RemoveContainer" containerID="df0354184c0307e1f1972d3215362617b02dbd1c0c7dc94d36c92202409746db" Jan 20 18:46:43 crc kubenswrapper[4558]: I0120 18:46:43.583272 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/infra-operator-index-dqwqb"] Jan 20 18:46:43 crc kubenswrapper[4558]: I0120 18:46:43.590367 4558 scope.go:117] "RemoveContainer" containerID="df0354184c0307e1f1972d3215362617b02dbd1c0c7dc94d36c92202409746db" Jan 20 18:46:43 crc kubenswrapper[4558]: E0120 18:46:43.590672 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"df0354184c0307e1f1972d3215362617b02dbd1c0c7dc94d36c92202409746db\": container with ID starting with df0354184c0307e1f1972d3215362617b02dbd1c0c7dc94d36c92202409746db not found: ID does not exist" containerID="df0354184c0307e1f1972d3215362617b02dbd1c0c7dc94d36c92202409746db" Jan 20 18:46:43 crc kubenswrapper[4558]: I0120 18:46:43.590702 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"df0354184c0307e1f1972d3215362617b02dbd1c0c7dc94d36c92202409746db"} err="failed to get container status \"df0354184c0307e1f1972d3215362617b02dbd1c0c7dc94d36c92202409746db\": rpc error: code = NotFound desc = could not find container \"df0354184c0307e1f1972d3215362617b02dbd1c0c7dc94d36c92202409746db\": container with ID starting with df0354184c0307e1f1972d3215362617b02dbd1c0c7dc94d36c92202409746db not found: ID does not exist" Jan 20 18:46:43 crc kubenswrapper[4558]: I0120 18:46:43.591715 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/infra-operator-index-dqwqb"] Jan 20 18:46:43 crc kubenswrapper[4558]: I0120 18:46:43.595413 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/infra-operator-controller-manager-797f876cdb-fxjkw"] Jan 20 18:46:43 crc kubenswrapper[4558]: I0120 18:46:43.598520 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/infra-operator-controller-manager-797f876cdb-fxjkw"] Jan 20 18:46:44 crc kubenswrapper[4558]: I0120 18:46:44.330790 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-6bf49b779b-px8lt"] Jan 20 18:46:44 crc kubenswrapper[4558]: I0120 18:46:44.331028 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/mariadb-operator-controller-manager-6bf49b779b-px8lt" podUID="cd5771a4-2a64-4d8d-9092-843789e8f695" containerName="manager" containerID="cri-o://cd323804e98b77e7a0cfe9105bb06ffd913d5af965880592b6f58f12a81ad73a" gracePeriod=10 Jan 20 18:46:44 crc kubenswrapper[4558]: I0120 18:46:44.504964 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/mariadb-operator-index-pdq8m"] Jan 20 18:46:44 crc kubenswrapper[4558]: I0120 18:46:44.505238 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/mariadb-operator-index-pdq8m" podUID="5995f068-2b84-4ad0-8dc7-a24b00cc8ed8" containerName="registry-server" containerID="cri-o://4503d75dc3e2dff19603937b0d4459f8b0693a6a5912e6af723092f8b9b97ad3" gracePeriod=30 Jan 20 18:46:44 crc kubenswrapper[4558]: I0120 18:46:44.539479 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/a53044f29d0f89c197912873b7cc34569484f5de61ee55394f4939720bcbg7n"] Jan 20 18:46:44 crc kubenswrapper[4558]: I0120 18:46:44.545824 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/a53044f29d0f89c197912873b7cc34569484f5de61ee55394f4939720bcbg7n"] Jan 20 18:46:44 crc kubenswrapper[4558]: I0120 18:46:44.550180 4558 generic.go:334] "Generic (PLEG): container finished" podID="cd5771a4-2a64-4d8d-9092-843789e8f695" containerID="cd323804e98b77e7a0cfe9105bb06ffd913d5af965880592b6f58f12a81ad73a" exitCode=0 Jan 20 18:46:44 crc kubenswrapper[4558]: I0120 18:46:44.550204 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-6bf49b779b-px8lt" event={"ID":"cd5771a4-2a64-4d8d-9092-843789e8f695","Type":"ContainerDied","Data":"cd323804e98b77e7a0cfe9105bb06ffd913d5af965880592b6f58f12a81ad73a"} Jan 20 18:46:44 crc kubenswrapper[4558]: I0120 18:46:44.579195 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="12251664-831b-4494-80cb-a15f255704f4" path="/var/lib/kubelet/pods/12251664-831b-4494-80cb-a15f255704f4/volumes" Jan 20 18:46:44 crc kubenswrapper[4558]: I0120 18:46:44.579827 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="55e27c4e-389f-4ae0-814b-c8a92814add5" path="/var/lib/kubelet/pods/55e27c4e-389f-4ae0-814b-c8a92814add5/volumes" Jan 20 18:46:44 crc kubenswrapper[4558]: I0120 18:46:44.580437 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="623a0db3-27c7-4e16-b902-784517ccad46" path="/var/lib/kubelet/pods/623a0db3-27c7-4e16-b902-784517ccad46/volumes" Jan 20 18:46:44 crc kubenswrapper[4558]: I0120 18:46:44.584326 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d3dc106a-9ff2-49b9-bc47-3d986cec7afa" path="/var/lib/kubelet/pods/d3dc106a-9ff2-49b9-bc47-3d986cec7afa/volumes" Jan 20 18:46:44 crc kubenswrapper[4558]: I0120 18:46:44.705031 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-6bf49b779b-px8lt" Jan 20 18:46:44 crc kubenswrapper[4558]: I0120 18:46:44.801654 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/cd5771a4-2a64-4d8d-9092-843789e8f695-apiservice-cert\") pod \"cd5771a4-2a64-4d8d-9092-843789e8f695\" (UID: \"cd5771a4-2a64-4d8d-9092-843789e8f695\") " Jan 20 18:46:44 crc kubenswrapper[4558]: I0120 18:46:44.801765 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4m8g4\" (UniqueName: \"kubernetes.io/projected/cd5771a4-2a64-4d8d-9092-843789e8f695-kube-api-access-4m8g4\") pod \"cd5771a4-2a64-4d8d-9092-843789e8f695\" (UID: \"cd5771a4-2a64-4d8d-9092-843789e8f695\") " Jan 20 18:46:44 crc kubenswrapper[4558]: I0120 18:46:44.801838 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/cd5771a4-2a64-4d8d-9092-843789e8f695-webhook-cert\") pod \"cd5771a4-2a64-4d8d-9092-843789e8f695\" (UID: \"cd5771a4-2a64-4d8d-9092-843789e8f695\") " Jan 20 18:46:44 crc kubenswrapper[4558]: I0120 18:46:44.810332 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cd5771a4-2a64-4d8d-9092-843789e8f695-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "cd5771a4-2a64-4d8d-9092-843789e8f695" (UID: "cd5771a4-2a64-4d8d-9092-843789e8f695"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:46:44 crc kubenswrapper[4558]: I0120 18:46:44.810381 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cd5771a4-2a64-4d8d-9092-843789e8f695-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "cd5771a4-2a64-4d8d-9092-843789e8f695" (UID: "cd5771a4-2a64-4d8d-9092-843789e8f695"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 18:46:44 crc kubenswrapper[4558]: I0120 18:46:44.815471 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd5771a4-2a64-4d8d-9092-843789e8f695-kube-api-access-4m8g4" (OuterVolumeSpecName: "kube-api-access-4m8g4") pod "cd5771a4-2a64-4d8d-9092-843789e8f695" (UID: "cd5771a4-2a64-4d8d-9092-843789e8f695"). InnerVolumeSpecName "kube-api-access-4m8g4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:46:44 crc kubenswrapper[4558]: I0120 18:46:44.856545 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-index-pdq8m" Jan 20 18:46:44 crc kubenswrapper[4558]: I0120 18:46:44.903937 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qfkjk\" (UniqueName: \"kubernetes.io/projected/5995f068-2b84-4ad0-8dc7-a24b00cc8ed8-kube-api-access-qfkjk\") pod \"5995f068-2b84-4ad0-8dc7-a24b00cc8ed8\" (UID: \"5995f068-2b84-4ad0-8dc7-a24b00cc8ed8\") " Jan 20 18:46:44 crc kubenswrapper[4558]: I0120 18:46:44.904402 4558 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/cd5771a4-2a64-4d8d-9092-843789e8f695-apiservice-cert\") on node \"crc\" DevicePath \"\"" Jan 20 18:46:44 crc kubenswrapper[4558]: I0120 18:46:44.904426 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4m8g4\" (UniqueName: \"kubernetes.io/projected/cd5771a4-2a64-4d8d-9092-843789e8f695-kube-api-access-4m8g4\") on node \"crc\" DevicePath \"\"" Jan 20 18:46:44 crc kubenswrapper[4558]: I0120 18:46:44.904437 4558 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/cd5771a4-2a64-4d8d-9092-843789e8f695-webhook-cert\") on node \"crc\" DevicePath \"\"" Jan 20 18:46:44 crc kubenswrapper[4558]: I0120 18:46:44.907342 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5995f068-2b84-4ad0-8dc7-a24b00cc8ed8-kube-api-access-qfkjk" (OuterVolumeSpecName: "kube-api-access-qfkjk") pod "5995f068-2b84-4ad0-8dc7-a24b00cc8ed8" (UID: "5995f068-2b84-4ad0-8dc7-a24b00cc8ed8"). InnerVolumeSpecName "kube-api-access-qfkjk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:46:45 crc kubenswrapper[4558]: I0120 18:46:45.005757 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qfkjk\" (UniqueName: \"kubernetes.io/projected/5995f068-2b84-4ad0-8dc7-a24b00cc8ed8-kube-api-access-qfkjk\") on node \"crc\" DevicePath \"\"" Jan 20 18:46:45 crc kubenswrapper[4558]: I0120 18:46:45.563844 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-6bf49b779b-px8lt" event={"ID":"cd5771a4-2a64-4d8d-9092-843789e8f695","Type":"ContainerDied","Data":"6e634ee9c8be469f047197f7fb8744b0bed8aeaed5fe015d788c90cae0345234"} Jan 20 18:46:45 crc kubenswrapper[4558]: I0120 18:46:45.564261 4558 scope.go:117] "RemoveContainer" containerID="cd323804e98b77e7a0cfe9105bb06ffd913d5af965880592b6f58f12a81ad73a" Jan 20 18:46:45 crc kubenswrapper[4558]: I0120 18:46:45.563868 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-6bf49b779b-px8lt" Jan 20 18:46:45 crc kubenswrapper[4558]: I0120 18:46:45.568554 4558 generic.go:334] "Generic (PLEG): container finished" podID="5995f068-2b84-4ad0-8dc7-a24b00cc8ed8" containerID="4503d75dc3e2dff19603937b0d4459f8b0693a6a5912e6af723092f8b9b97ad3" exitCode=0 Jan 20 18:46:45 crc kubenswrapper[4558]: I0120 18:46:45.568589 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-index-pdq8m" Jan 20 18:46:45 crc kubenswrapper[4558]: I0120 18:46:45.568752 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-index-pdq8m" event={"ID":"5995f068-2b84-4ad0-8dc7-a24b00cc8ed8","Type":"ContainerDied","Data":"4503d75dc3e2dff19603937b0d4459f8b0693a6a5912e6af723092f8b9b97ad3"} Jan 20 18:46:45 crc kubenswrapper[4558]: I0120 18:46:45.568889 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-index-pdq8m" event={"ID":"5995f068-2b84-4ad0-8dc7-a24b00cc8ed8","Type":"ContainerDied","Data":"8165927947d739919261f2284c86a80965539b15b6f016ffa6227e3179051eda"} Jan 20 18:46:45 crc kubenswrapper[4558]: I0120 18:46:45.589424 4558 scope.go:117] "RemoveContainer" containerID="4503d75dc3e2dff19603937b0d4459f8b0693a6a5912e6af723092f8b9b97ad3" Jan 20 18:46:45 crc kubenswrapper[4558]: I0120 18:46:45.601393 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-6bf49b779b-px8lt"] Jan 20 18:46:45 crc kubenswrapper[4558]: I0120 18:46:45.613938 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-6bf49b779b-px8lt"] Jan 20 18:46:45 crc kubenswrapper[4558]: I0120 18:46:45.616148 4558 scope.go:117] "RemoveContainer" containerID="4503d75dc3e2dff19603937b0d4459f8b0693a6a5912e6af723092f8b9b97ad3" Jan 20 18:46:45 crc kubenswrapper[4558]: E0120 18:46:45.617039 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4503d75dc3e2dff19603937b0d4459f8b0693a6a5912e6af723092f8b9b97ad3\": container with ID starting with 4503d75dc3e2dff19603937b0d4459f8b0693a6a5912e6af723092f8b9b97ad3 not found: ID does not exist" containerID="4503d75dc3e2dff19603937b0d4459f8b0693a6a5912e6af723092f8b9b97ad3" Jan 20 18:46:45 crc kubenswrapper[4558]: I0120 18:46:45.617093 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4503d75dc3e2dff19603937b0d4459f8b0693a6a5912e6af723092f8b9b97ad3"} err="failed to get container status \"4503d75dc3e2dff19603937b0d4459f8b0693a6a5912e6af723092f8b9b97ad3\": rpc error: code = NotFound desc = could not find container \"4503d75dc3e2dff19603937b0d4459f8b0693a6a5912e6af723092f8b9b97ad3\": container with ID starting with 4503d75dc3e2dff19603937b0d4459f8b0693a6a5912e6af723092f8b9b97ad3 not found: ID does not exist" Jan 20 18:46:45 crc kubenswrapper[4558]: I0120 18:46:45.618319 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/mariadb-operator-index-pdq8m"] Jan 20 18:46:45 crc kubenswrapper[4558]: I0120 18:46:45.621287 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/mariadb-operator-index-pdq8m"] Jan 20 18:46:46 crc kubenswrapper[4558]: I0120 18:46:46.576029 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5995f068-2b84-4ad0-8dc7-a24b00cc8ed8" path="/var/lib/kubelet/pods/5995f068-2b84-4ad0-8dc7-a24b00cc8ed8/volumes" Jan 20 18:46:46 crc kubenswrapper[4558]: I0120 18:46:46.576557 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd5771a4-2a64-4d8d-9092-843789e8f695" path="/var/lib/kubelet/pods/cd5771a4-2a64-4d8d-9092-843789e8f695/volumes" Jan 20 18:46:58 crc kubenswrapper[4558]: I0120 18:46:58.732338 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-k285q/must-gather-9xxbp"] Jan 20 18:46:58 crc kubenswrapper[4558]: E0120 18:46:58.733981 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5995f068-2b84-4ad0-8dc7-a24b00cc8ed8" containerName="registry-server" Jan 20 18:46:58 crc kubenswrapper[4558]: I0120 18:46:58.734065 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="5995f068-2b84-4ad0-8dc7-a24b00cc8ed8" containerName="registry-server" Jan 20 18:46:58 crc kubenswrapper[4558]: E0120 18:46:58.734119 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="623a0db3-27c7-4e16-b902-784517ccad46" containerName="manager" Jan 20 18:46:58 crc kubenswrapper[4558]: I0120 18:46:58.734224 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="623a0db3-27c7-4e16-b902-784517ccad46" containerName="manager" Jan 20 18:46:58 crc kubenswrapper[4558]: E0120 18:46:58.734290 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0842b2d6-f024-4981-bc33-7af5f2cb702e" containerName="setup-container" Jan 20 18:46:58 crc kubenswrapper[4558]: I0120 18:46:58.734346 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="0842b2d6-f024-4981-bc33-7af5f2cb702e" containerName="setup-container" Jan 20 18:46:58 crc kubenswrapper[4558]: E0120 18:46:58.734394 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1a5d2627-1730-47b3-94f9-110dc23d42bc" containerName="mariadb-account-delete" Jan 20 18:46:58 crc kubenswrapper[4558]: I0120 18:46:58.734435 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="1a5d2627-1730-47b3-94f9-110dc23d42bc" containerName="mariadb-account-delete" Jan 20 18:46:58 crc kubenswrapper[4558]: E0120 18:46:58.734479 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="761b8e2c-cee0-49d5-8310-3eaf29af06bc" containerName="galera" Jan 20 18:46:58 crc kubenswrapper[4558]: I0120 18:46:58.734520 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="761b8e2c-cee0-49d5-8310-3eaf29af06bc" containerName="galera" Jan 20 18:46:58 crc kubenswrapper[4558]: E0120 18:46:58.734569 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="54015202-e6bd-4bf5-9b3f-df0dd67d24d9" containerName="memcached" Jan 20 18:46:58 crc kubenswrapper[4558]: I0120 18:46:58.734610 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="54015202-e6bd-4bf5-9b3f-df0dd67d24d9" containerName="memcached" Jan 20 18:46:58 crc kubenswrapper[4558]: E0120 18:46:58.734656 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b6ca7feb-5a45-46a4-987c-6f58d12c94ae" containerName="registry-server" Jan 20 18:46:58 crc kubenswrapper[4558]: I0120 18:46:58.734696 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="b6ca7feb-5a45-46a4-987c-6f58d12c94ae" containerName="registry-server" Jan 20 18:46:58 crc kubenswrapper[4558]: E0120 18:46:58.734742 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0842b2d6-f024-4981-bc33-7af5f2cb702e" containerName="rabbitmq" Jan 20 18:46:58 crc kubenswrapper[4558]: I0120 18:46:58.734783 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="0842b2d6-f024-4981-bc33-7af5f2cb702e" containerName="rabbitmq" Jan 20 18:46:58 crc kubenswrapper[4558]: E0120 18:46:58.734831 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d062a00c-aa04-4761-97a1-0a89f1f5cfa1" containerName="operator" Jan 20 18:46:58 crc kubenswrapper[4558]: I0120 18:46:58.734907 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d062a00c-aa04-4761-97a1-0a89f1f5cfa1" containerName="operator" Jan 20 18:46:58 crc kubenswrapper[4558]: E0120 18:46:58.734961 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c0f454c3-2a4b-4d33-b137-2c92c0773378" containerName="galera" Jan 20 18:46:58 crc kubenswrapper[4558]: I0120 18:46:58.735002 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="c0f454c3-2a4b-4d33-b137-2c92c0773378" containerName="galera" Jan 20 18:46:58 crc kubenswrapper[4558]: E0120 18:46:58.735053 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cd5771a4-2a64-4d8d-9092-843789e8f695" containerName="manager" Jan 20 18:46:58 crc kubenswrapper[4558]: I0120 18:46:58.735095 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="cd5771a4-2a64-4d8d-9092-843789e8f695" containerName="manager" Jan 20 18:46:58 crc kubenswrapper[4558]: E0120 18:46:58.735156 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c0f454c3-2a4b-4d33-b137-2c92c0773378" containerName="mysql-bootstrap" Jan 20 18:46:58 crc kubenswrapper[4558]: I0120 18:46:58.735218 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="c0f454c3-2a4b-4d33-b137-2c92c0773378" containerName="mysql-bootstrap" Jan 20 18:46:58 crc kubenswrapper[4558]: E0120 18:46:58.735277 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="761b8e2c-cee0-49d5-8310-3eaf29af06bc" containerName="mysql-bootstrap" Jan 20 18:46:58 crc kubenswrapper[4558]: I0120 18:46:58.735321 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="761b8e2c-cee0-49d5-8310-3eaf29af06bc" containerName="mysql-bootstrap" Jan 20 18:46:58 crc kubenswrapper[4558]: E0120 18:46:58.735365 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="08aa1ffd-1990-4e80-b8b2-ba4cc54c5057" containerName="registry-server" Jan 20 18:46:58 crc kubenswrapper[4558]: I0120 18:46:58.735404 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="08aa1ffd-1990-4e80-b8b2-ba4cc54c5057" containerName="registry-server" Jan 20 18:46:58 crc kubenswrapper[4558]: E0120 18:46:58.735451 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6f1cb567-179f-4bd7-b76f-bcc43d2474f0" containerName="manager" Jan 20 18:46:58 crc kubenswrapper[4558]: I0120 18:46:58.735492 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="6f1cb567-179f-4bd7-b76f-bcc43d2474f0" containerName="manager" Jan 20 18:46:58 crc kubenswrapper[4558]: E0120 18:46:58.735537 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c42137c4-70ef-4a24-85f5-d76f3883620b" containerName="galera" Jan 20 18:46:58 crc kubenswrapper[4558]: I0120 18:46:58.735576 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="c42137c4-70ef-4a24-85f5-d76f3883620b" containerName="galera" Jan 20 18:46:58 crc kubenswrapper[4558]: E0120 18:46:58.735621 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1e63d3ee-13d4-4b9c-894f-03861181bd49" containerName="keystone-api" Jan 20 18:46:58 crc kubenswrapper[4558]: I0120 18:46:58.735667 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="1e63d3ee-13d4-4b9c-894f-03861181bd49" containerName="keystone-api" Jan 20 18:46:58 crc kubenswrapper[4558]: E0120 18:46:58.735721 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2f0e4759-5bd0-4265-8781-57738bd9fc93" containerName="manager" Jan 20 18:46:58 crc kubenswrapper[4558]: I0120 18:46:58.735762 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="2f0e4759-5bd0-4265-8781-57738bd9fc93" containerName="manager" Jan 20 18:46:58 crc kubenswrapper[4558]: E0120 18:46:58.735805 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d3dc106a-9ff2-49b9-bc47-3d986cec7afa" containerName="registry-server" Jan 20 18:46:58 crc kubenswrapper[4558]: I0120 18:46:58.735844 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d3dc106a-9ff2-49b9-bc47-3d986cec7afa" containerName="registry-server" Jan 20 18:46:58 crc kubenswrapper[4558]: E0120 18:46:58.735886 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c42137c4-70ef-4a24-85f5-d76f3883620b" containerName="mysql-bootstrap" Jan 20 18:46:58 crc kubenswrapper[4558]: I0120 18:46:58.735926 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="c42137c4-70ef-4a24-85f5-d76f3883620b" containerName="mysql-bootstrap" Jan 20 18:46:58 crc kubenswrapper[4558]: E0120 18:46:58.735970 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="17600d40-7d65-476f-83be-d1cd168c750a" containerName="registry-server" Jan 20 18:46:58 crc kubenswrapper[4558]: I0120 18:46:58.736015 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="17600d40-7d65-476f-83be-d1cd168c750a" containerName="registry-server" Jan 20 18:46:58 crc kubenswrapper[4558]: I0120 18:46:58.736232 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="d3dc106a-9ff2-49b9-bc47-3d986cec7afa" containerName="registry-server" Jan 20 18:46:58 crc kubenswrapper[4558]: I0120 18:46:58.736303 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="761b8e2c-cee0-49d5-8310-3eaf29af06bc" containerName="galera" Jan 20 18:46:58 crc kubenswrapper[4558]: I0120 18:46:58.736352 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="08aa1ffd-1990-4e80-b8b2-ba4cc54c5057" containerName="registry-server" Jan 20 18:46:58 crc kubenswrapper[4558]: I0120 18:46:58.736395 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="1e63d3ee-13d4-4b9c-894f-03861181bd49" containerName="keystone-api" Jan 20 18:46:58 crc kubenswrapper[4558]: I0120 18:46:58.736450 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="b6ca7feb-5a45-46a4-987c-6f58d12c94ae" containerName="registry-server" Jan 20 18:46:58 crc kubenswrapper[4558]: I0120 18:46:58.736501 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="cd5771a4-2a64-4d8d-9092-843789e8f695" containerName="manager" Jan 20 18:46:58 crc kubenswrapper[4558]: I0120 18:46:58.736549 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="d062a00c-aa04-4761-97a1-0a89f1f5cfa1" containerName="operator" Jan 20 18:46:58 crc kubenswrapper[4558]: I0120 18:46:58.736591 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="623a0db3-27c7-4e16-b902-784517ccad46" containerName="manager" Jan 20 18:46:58 crc kubenswrapper[4558]: I0120 18:46:58.736634 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="17600d40-7d65-476f-83be-d1cd168c750a" containerName="registry-server" Jan 20 18:46:58 crc kubenswrapper[4558]: I0120 18:46:58.736677 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="1a5d2627-1730-47b3-94f9-110dc23d42bc" containerName="mariadb-account-delete" Jan 20 18:46:58 crc kubenswrapper[4558]: I0120 18:46:58.736720 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="1a5d2627-1730-47b3-94f9-110dc23d42bc" containerName="mariadb-account-delete" Jan 20 18:46:58 crc kubenswrapper[4558]: I0120 18:46:58.736767 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="5995f068-2b84-4ad0-8dc7-a24b00cc8ed8" containerName="registry-server" Jan 20 18:46:58 crc kubenswrapper[4558]: I0120 18:46:58.736815 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="2f0e4759-5bd0-4265-8781-57738bd9fc93" containerName="manager" Jan 20 18:46:58 crc kubenswrapper[4558]: I0120 18:46:58.736860 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="c42137c4-70ef-4a24-85f5-d76f3883620b" containerName="galera" Jan 20 18:46:58 crc kubenswrapper[4558]: I0120 18:46:58.736900 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="c0f454c3-2a4b-4d33-b137-2c92c0773378" containerName="galera" Jan 20 18:46:58 crc kubenswrapper[4558]: I0120 18:46:58.736941 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="54015202-e6bd-4bf5-9b3f-df0dd67d24d9" containerName="memcached" Jan 20 18:46:58 crc kubenswrapper[4558]: I0120 18:46:58.736983 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="0842b2d6-f024-4981-bc33-7af5f2cb702e" containerName="rabbitmq" Jan 20 18:46:58 crc kubenswrapper[4558]: I0120 18:46:58.737029 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="6f1cb567-179f-4bd7-b76f-bcc43d2474f0" containerName="manager" Jan 20 18:46:58 crc kubenswrapper[4558]: E0120 18:46:58.740206 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1a5d2627-1730-47b3-94f9-110dc23d42bc" containerName="mariadb-account-delete" Jan 20 18:46:58 crc kubenswrapper[4558]: I0120 18:46:58.740294 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="1a5d2627-1730-47b3-94f9-110dc23d42bc" containerName="mariadb-account-delete" Jan 20 18:46:58 crc kubenswrapper[4558]: I0120 18:46:58.741002 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-k285q/must-gather-9xxbp" Jan 20 18:46:58 crc kubenswrapper[4558]: I0120 18:46:58.742761 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-k285q"/"openshift-service-ca.crt" Jan 20 18:46:58 crc kubenswrapper[4558]: I0120 18:46:58.753105 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-k285q"/"kube-root-ca.crt" Jan 20 18:46:58 crc kubenswrapper[4558]: I0120 18:46:58.753677 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-k285q/must-gather-9xxbp"] Jan 20 18:46:58 crc kubenswrapper[4558]: I0120 18:46:58.807838 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7h89w\" (UniqueName: \"kubernetes.io/projected/55c28545-405d-48f2-a2e7-a40a88d556f7-kube-api-access-7h89w\") pod \"must-gather-9xxbp\" (UID: \"55c28545-405d-48f2-a2e7-a40a88d556f7\") " pod="openshift-must-gather-k285q/must-gather-9xxbp" Jan 20 18:46:58 crc kubenswrapper[4558]: I0120 18:46:58.808151 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/55c28545-405d-48f2-a2e7-a40a88d556f7-must-gather-output\") pod \"must-gather-9xxbp\" (UID: \"55c28545-405d-48f2-a2e7-a40a88d556f7\") " pod="openshift-must-gather-k285q/must-gather-9xxbp" Jan 20 18:46:58 crc kubenswrapper[4558]: I0120 18:46:58.910719 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7h89w\" (UniqueName: \"kubernetes.io/projected/55c28545-405d-48f2-a2e7-a40a88d556f7-kube-api-access-7h89w\") pod \"must-gather-9xxbp\" (UID: \"55c28545-405d-48f2-a2e7-a40a88d556f7\") " pod="openshift-must-gather-k285q/must-gather-9xxbp" Jan 20 18:46:58 crc kubenswrapper[4558]: I0120 18:46:58.910853 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/55c28545-405d-48f2-a2e7-a40a88d556f7-must-gather-output\") pod \"must-gather-9xxbp\" (UID: \"55c28545-405d-48f2-a2e7-a40a88d556f7\") " pod="openshift-must-gather-k285q/must-gather-9xxbp" Jan 20 18:46:58 crc kubenswrapper[4558]: I0120 18:46:58.911830 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/55c28545-405d-48f2-a2e7-a40a88d556f7-must-gather-output\") pod \"must-gather-9xxbp\" (UID: \"55c28545-405d-48f2-a2e7-a40a88d556f7\") " pod="openshift-must-gather-k285q/must-gather-9xxbp" Jan 20 18:46:58 crc kubenswrapper[4558]: I0120 18:46:58.936673 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7h89w\" (UniqueName: \"kubernetes.io/projected/55c28545-405d-48f2-a2e7-a40a88d556f7-kube-api-access-7h89w\") pod \"must-gather-9xxbp\" (UID: \"55c28545-405d-48f2-a2e7-a40a88d556f7\") " pod="openshift-must-gather-k285q/must-gather-9xxbp" Jan 20 18:46:59 crc kubenswrapper[4558]: I0120 18:46:59.056415 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-k285q/must-gather-9xxbp" Jan 20 18:46:59 crc kubenswrapper[4558]: I0120 18:46:59.448527 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-k285q/must-gather-9xxbp"] Jan 20 18:46:59 crc kubenswrapper[4558]: I0120 18:46:59.680000 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-k285q/must-gather-9xxbp" event={"ID":"55c28545-405d-48f2-a2e7-a40a88d556f7","Type":"ContainerStarted","Data":"0dd36e101341812775154fd6561391ed3acc7b131cffacb0bd9f9ca5fc5d3e00"} Jan 20 18:47:05 crc kubenswrapper[4558]: I0120 18:47:05.729629 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-k285q/must-gather-9xxbp" event={"ID":"55c28545-405d-48f2-a2e7-a40a88d556f7","Type":"ContainerStarted","Data":"5e35e7c5e0a15939c110f9e46f4713f9645ea0bdf35e77b06eb0960a948dcecd"} Jan 20 18:47:05 crc kubenswrapper[4558]: I0120 18:47:05.730289 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-k285q/must-gather-9xxbp" event={"ID":"55c28545-405d-48f2-a2e7-a40a88d556f7","Type":"ContainerStarted","Data":"e3b13514e6a06f102f1094851c0e00df2e0099c9e370ab6524dc63aeaa972981"} Jan 20 18:47:05 crc kubenswrapper[4558]: I0120 18:47:05.744586 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-k285q/must-gather-9xxbp" podStartSLOduration=2.227597766 podStartE2EDuration="7.744570275s" podCreationTimestamp="2026-01-20 18:46:58 +0000 UTC" firstStartedPulling="2026-01-20 18:46:59.459685638 +0000 UTC m=+7513.220023605" lastFinishedPulling="2026-01-20 18:47:04.976658147 +0000 UTC m=+7518.736996114" observedRunningTime="2026-01-20 18:47:05.74207862 +0000 UTC m=+7519.502416587" watchObservedRunningTime="2026-01-20 18:47:05.744570275 +0000 UTC m=+7519.504908242" Jan 20 18:47:32 crc kubenswrapper[4558]: I0120 18:47:32.528960 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-6968d8fdc4-d9p9k_5835ce2a-d074-4a95-aa34-ad3a62f77503/controller/0.log" Jan 20 18:47:32 crc kubenswrapper[4558]: I0120 18:47:32.536107 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-6968d8fdc4-d9p9k_5835ce2a-d074-4a95-aa34-ad3a62f77503/kube-rbac-proxy/0.log" Jan 20 18:47:32 crc kubenswrapper[4558]: I0120 18:47:32.556121 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-b75qk_59de8a65-6da7-4086-93d4-d76fa35f7660/controller/0.log" Jan 20 18:47:33 crc kubenswrapper[4558]: I0120 18:47:33.801798 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-b75qk_59de8a65-6da7-4086-93d4-d76fa35f7660/frr/0.log" Jan 20 18:47:33 crc kubenswrapper[4558]: I0120 18:47:33.810265 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-b75qk_59de8a65-6da7-4086-93d4-d76fa35f7660/reloader/0.log" Jan 20 18:47:33 crc kubenswrapper[4558]: I0120 18:47:33.814258 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-b75qk_59de8a65-6da7-4086-93d4-d76fa35f7660/frr-metrics/0.log" Jan 20 18:47:33 crc kubenswrapper[4558]: I0120 18:47:33.822287 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-b75qk_59de8a65-6da7-4086-93d4-d76fa35f7660/kube-rbac-proxy/0.log" Jan 20 18:47:33 crc kubenswrapper[4558]: I0120 18:47:33.830693 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-b75qk_59de8a65-6da7-4086-93d4-d76fa35f7660/kube-rbac-proxy-frr/0.log" Jan 20 18:47:33 crc kubenswrapper[4558]: I0120 18:47:33.835982 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-b75qk_59de8a65-6da7-4086-93d4-d76fa35f7660/cp-frr-files/0.log" Jan 20 18:47:33 crc kubenswrapper[4558]: I0120 18:47:33.842893 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-b75qk_59de8a65-6da7-4086-93d4-d76fa35f7660/cp-reloader/0.log" Jan 20 18:47:33 crc kubenswrapper[4558]: I0120 18:47:33.849115 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-b75qk_59de8a65-6da7-4086-93d4-d76fa35f7660/cp-metrics/0.log" Jan 20 18:47:33 crc kubenswrapper[4558]: I0120 18:47:33.857208 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-7df86c4f6c-vf5fl_e7f30f4b-a84e-47b2-b393-c52757e6ca69/frr-k8s-webhook-server/0.log" Jan 20 18:47:33 crc kubenswrapper[4558]: I0120 18:47:33.878701 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-747dffd588-lx9c6_704a513a-fe43-4730-9f48-6c85506e338b/manager/0.log" Jan 20 18:47:33 crc kubenswrapper[4558]: I0120 18:47:33.885914 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-7d7c8846bf-bnzd9_e29496dd-2347-49e8-b4ca-1d071c0dcf2a/webhook-server/0.log" Jan 20 18:47:35 crc kubenswrapper[4558]: I0120 18:47:35.291087 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-w684s_241ae411-7e63-4bb6-a110-b9983a418f9e/speaker/0.log" Jan 20 18:47:35 crc kubenswrapper[4558]: I0120 18:47:35.298551 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-w684s_241ae411-7e63-4bb6-a110-b9983a418f9e/kube-rbac-proxy/0.log" Jan 20 18:47:39 crc kubenswrapper[4558]: I0120 18:47:39.729825 4558 scope.go:117] "RemoveContainer" containerID="1bb11f9c07ec5accf6528934fc37bdfda34aa8c9f39efea0a9349f0f1f35e493" Jan 20 18:47:39 crc kubenswrapper[4558]: I0120 18:47:39.753114 4558 scope.go:117] "RemoveContainer" containerID="228cd3a6fde62665397ae5ffd3353e6f53d4d82fb3ade94a962d5ddfcedc43e3" Jan 20 18:47:39 crc kubenswrapper[4558]: I0120 18:47:39.770464 4558 scope.go:117] "RemoveContainer" containerID="24e7b80a9c4427be34e1b04fd8ee61e8db3324e560f603763318c33836b97cf3" Jan 20 18:47:39 crc kubenswrapper[4558]: I0120 18:47:39.789858 4558 scope.go:117] "RemoveContainer" containerID="2569ffbff9f349bf3ef902262d977b2504a6cddf9adac5b4e8a4ead9d3b2eaa6" Jan 20 18:47:39 crc kubenswrapper[4558]: I0120 18:47:39.807332 4558 scope.go:117] "RemoveContainer" containerID="d3f06b4a5105b533a4fcf4a0a4aca89122b75111f61a99be6bfa56afb037e949" Jan 20 18:47:39 crc kubenswrapper[4558]: I0120 18:47:39.828996 4558 scope.go:117] "RemoveContainer" containerID="c2355e839f369cfd7b4ad021acc9b0854699f1f9fd51d98e91a204a902774df3" Jan 20 18:47:39 crc kubenswrapper[4558]: I0120 18:47:39.845404 4558 scope.go:117] "RemoveContainer" containerID="8e0fb96310ab3053ec08bfa9592bbc80ed7fdc59dd5d4aba121300c20f91c804" Jan 20 18:47:43 crc kubenswrapper[4558]: I0120 18:47:43.027763 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-4w5mk_119fb5fe-2460-4d2d-9db9-452afaa1e93e/control-plane-machine-set-operator/0.log" Jan 20 18:47:43 crc kubenswrapper[4558]: I0120 18:47:43.037116 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-hsls5_350b4e8e-0147-4d0a-b9c0-1cdcdc2312f1/kube-rbac-proxy/0.log" Jan 20 18:47:43 crc kubenswrapper[4558]: I0120 18:47:43.047840 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-hsls5_350b4e8e-0147-4d0a-b9c0-1cdcdc2312f1/machine-api-operator/0.log" Jan 20 18:47:47 crc kubenswrapper[4558]: I0120 18:47:47.745203 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-86cb77c54b-4t55w_857afdf1-c962-4b4a-a79b-15547f6b407c/cert-manager-controller/0.log" Jan 20 18:47:47 crc kubenswrapper[4558]: I0120 18:47:47.803026 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-855d9ccff4-n85r2_dd2168dc-3cb1-45db-b8fd-7e112804ffcd/cert-manager-cainjector/0.log" Jan 20 18:47:47 crc kubenswrapper[4558]: I0120 18:47:47.815024 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-f4fb5df64-s79l7_0aafb007-c951-4a3f-90ee-14897538c76d/cert-manager-webhook/0.log" Jan 20 18:47:51 crc kubenswrapper[4558]: I0120 18:47:51.943344 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-7754f76f8b-n5w94_7abfa901-9433-43f3-8f51-1da05d50f84d/nmstate-console-plugin/0.log" Jan 20 18:47:51 crc kubenswrapper[4558]: I0120 18:47:51.960875 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-btt28_f54cd5f9-30bf-494b-8528-4f25f8fa1521/nmstate-handler/0.log" Jan 20 18:47:51 crc kubenswrapper[4558]: I0120 18:47:51.968906 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-54757c584b-l6chb_1b599dc7-9188-4dbc-ad4a-32db989cb635/nmstate-metrics/0.log" Jan 20 18:47:51 crc kubenswrapper[4558]: I0120 18:47:51.979461 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-54757c584b-l6chb_1b599dc7-9188-4dbc-ad4a-32db989cb635/kube-rbac-proxy/0.log" Jan 20 18:47:51 crc kubenswrapper[4558]: I0120 18:47:51.993816 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-646758c888-r2zgd_b1c60395-86ef-4c6c-8432-b2bc357aac2d/nmstate-operator/0.log" Jan 20 18:47:52 crc kubenswrapper[4558]: I0120 18:47:52.006707 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-8474b5b9d8-t4zzd_ecd5abb7-faaf-42b3-8698-71ea80253e9d/nmstate-webhook/0.log" Jan 20 18:47:56 crc kubenswrapper[4558]: I0120 18:47:56.354433 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-68bc856cb9-nfzlw_fef8d546-d905-4701-ac1e-09cd4c4b1ed8/prometheus-operator/0.log" Jan 20 18:47:56 crc kubenswrapper[4558]: I0120 18:47:56.373571 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-558658b7b5-cpc8s_42ac1cad-9697-4a87-974d-f1dfe31a3627/prometheus-operator-admission-webhook/0.log" Jan 20 18:47:56 crc kubenswrapper[4558]: I0120 18:47:56.382393 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-558658b7b5-hhs5k_8c562f2d-0fe1-4a6a-9664-00e2cad8c416/prometheus-operator-admission-webhook/0.log" Jan 20 18:47:56 crc kubenswrapper[4558]: I0120 18:47:56.411261 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_observability-operator-59bdc8b94-rlprt_0c2f4961-5cb9-4460-b8f2-ff20d5bafc08/operator/0.log" Jan 20 18:47:56 crc kubenswrapper[4558]: I0120 18:47:56.419152 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_perses-operator-5bf474d74f-6nh6n_6f4871c7-a64d-4d49-b830-66b6718d608a/perses-operator/0.log" Jan 20 18:47:57 crc kubenswrapper[4558]: I0120 18:47:57.329905 4558 patch_prober.go:28] interesting pod/machine-config-daemon-2vr4r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 20 18:47:57 crc kubenswrapper[4558]: I0120 18:47:57.330346 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 20 18:48:01 crc kubenswrapper[4558]: I0120 18:48:01.238116 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-6968d8fdc4-d9p9k_5835ce2a-d074-4a95-aa34-ad3a62f77503/controller/0.log" Jan 20 18:48:01 crc kubenswrapper[4558]: I0120 18:48:01.244540 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-6968d8fdc4-d9p9k_5835ce2a-d074-4a95-aa34-ad3a62f77503/kube-rbac-proxy/0.log" Jan 20 18:48:01 crc kubenswrapper[4558]: I0120 18:48:01.261132 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-b75qk_59de8a65-6da7-4086-93d4-d76fa35f7660/controller/0.log" Jan 20 18:48:02 crc kubenswrapper[4558]: I0120 18:48:02.512286 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-b75qk_59de8a65-6da7-4086-93d4-d76fa35f7660/frr/0.log" Jan 20 18:48:02 crc kubenswrapper[4558]: I0120 18:48:02.522194 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-b75qk_59de8a65-6da7-4086-93d4-d76fa35f7660/reloader/0.log" Jan 20 18:48:02 crc kubenswrapper[4558]: I0120 18:48:02.528296 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-b75qk_59de8a65-6da7-4086-93d4-d76fa35f7660/frr-metrics/0.log" Jan 20 18:48:02 crc kubenswrapper[4558]: I0120 18:48:02.535628 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-b75qk_59de8a65-6da7-4086-93d4-d76fa35f7660/kube-rbac-proxy/0.log" Jan 20 18:48:02 crc kubenswrapper[4558]: I0120 18:48:02.544105 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-b75qk_59de8a65-6da7-4086-93d4-d76fa35f7660/kube-rbac-proxy-frr/0.log" Jan 20 18:48:02 crc kubenswrapper[4558]: I0120 18:48:02.549923 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-b75qk_59de8a65-6da7-4086-93d4-d76fa35f7660/cp-frr-files/0.log" Jan 20 18:48:02 crc kubenswrapper[4558]: I0120 18:48:02.557569 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-b75qk_59de8a65-6da7-4086-93d4-d76fa35f7660/cp-reloader/0.log" Jan 20 18:48:02 crc kubenswrapper[4558]: I0120 18:48:02.564297 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-b75qk_59de8a65-6da7-4086-93d4-d76fa35f7660/cp-metrics/0.log" Jan 20 18:48:02 crc kubenswrapper[4558]: I0120 18:48:02.576555 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-7df86c4f6c-vf5fl_e7f30f4b-a84e-47b2-b393-c52757e6ca69/frr-k8s-webhook-server/0.log" Jan 20 18:48:02 crc kubenswrapper[4558]: I0120 18:48:02.611539 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-747dffd588-lx9c6_704a513a-fe43-4730-9f48-6c85506e338b/manager/0.log" Jan 20 18:48:02 crc kubenswrapper[4558]: I0120 18:48:02.620080 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-7d7c8846bf-bnzd9_e29496dd-2347-49e8-b4ca-1d071c0dcf2a/webhook-server/0.log" Jan 20 18:48:03 crc kubenswrapper[4558]: I0120 18:48:03.952459 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-w684s_241ae411-7e63-4bb6-a110-b9983a418f9e/speaker/0.log" Jan 20 18:48:03 crc kubenswrapper[4558]: I0120 18:48:03.968894 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-w684s_241ae411-7e63-4bb6-a110-b9983a418f9e/kube-rbac-proxy/0.log" Jan 20 18:48:20 crc kubenswrapper[4558]: I0120 18:48:20.875100 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_dnsmasq-dnsmasq-84b9f45d47-68c5z_87f866b8-e393-493f-8b10-4758c516f4d7/dnsmasq-dns/0.log" Jan 20 18:48:20 crc kubenswrapper[4558]: I0120 18:48:20.884068 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_dnsmasq-dnsmasq-84b9f45d47-68c5z_87f866b8-e393-493f-8b10-4758c516f4d7/init/0.log" Jan 20 18:48:25 crc kubenswrapper[4558]: I0120 18:48:25.742827 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931ajdtsz_117625c2-239a-43e3-8bfa-69f6b5985a5c/extract/0.log" Jan 20 18:48:25 crc kubenswrapper[4558]: I0120 18:48:25.752796 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931ajdtsz_117625c2-239a-43e3-8bfa-69f6b5985a5c/util/0.log" Jan 20 18:48:25 crc kubenswrapper[4558]: I0120 18:48:25.784335 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931ajdtsz_117625c2-239a-43e3-8bfa-69f6b5985a5c/pull/0.log" Jan 20 18:48:25 crc kubenswrapper[4558]: I0120 18:48:25.792730 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc2zr95_53937263-221c-4ee7-87fb-d1b03392fd73/extract/0.log" Jan 20 18:48:25 crc kubenswrapper[4558]: I0120 18:48:25.802010 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc2zr95_53937263-221c-4ee7-87fb-d1b03392fd73/util/0.log" Jan 20 18:48:25 crc kubenswrapper[4558]: I0120 18:48:25.809141 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc2zr95_53937263-221c-4ee7-87fb-d1b03392fd73/pull/0.log" Jan 20 18:48:25 crc kubenswrapper[4558]: I0120 18:48:25.817071 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713dqrqd_3d57036a-69b1-4789-87cf-8d1cfe930ba4/extract/0.log" Jan 20 18:48:25 crc kubenswrapper[4558]: I0120 18:48:25.824428 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713dqrqd_3d57036a-69b1-4789-87cf-8d1cfe930ba4/util/0.log" Jan 20 18:48:25 crc kubenswrapper[4558]: I0120 18:48:25.830077 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713dqrqd_3d57036a-69b1-4789-87cf-8d1cfe930ba4/pull/0.log" Jan 20 18:48:25 crc kubenswrapper[4558]: I0120 18:48:25.840189 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08jrb5p_a606311e-7c02-48a9-9f83-46f862549670/extract/0.log" Jan 20 18:48:25 crc kubenswrapper[4558]: I0120 18:48:25.845978 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08jrb5p_a606311e-7c02-48a9-9f83-46f862549670/util/0.log" Jan 20 18:48:25 crc kubenswrapper[4558]: I0120 18:48:25.852843 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08jrb5p_a606311e-7c02-48a9-9f83-46f862549670/pull/0.log" Jan 20 18:48:26 crc kubenswrapper[4558]: I0120 18:48:26.436738 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-v2gb7_7d3d2009-8b03-4d74-881b-1dd341ead556/registry-server/0.log" Jan 20 18:48:26 crc kubenswrapper[4558]: I0120 18:48:26.446182 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-v2gb7_7d3d2009-8b03-4d74-881b-1dd341ead556/extract-utilities/0.log" Jan 20 18:48:26 crc kubenswrapper[4558]: I0120 18:48:26.455725 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-v2gb7_7d3d2009-8b03-4d74-881b-1dd341ead556/extract-content/0.log" Jan 20 18:48:27 crc kubenswrapper[4558]: I0120 18:48:27.329443 4558 patch_prober.go:28] interesting pod/machine-config-daemon-2vr4r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 20 18:48:27 crc kubenswrapper[4558]: I0120 18:48:27.329503 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 20 18:48:27 crc kubenswrapper[4558]: I0120 18:48:27.858194 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-hgqdq_da87968b-3eb6-4478-bf20-ec2b8d7bbf95/registry-server/0.log" Jan 20 18:48:27 crc kubenswrapper[4558]: I0120 18:48:27.865927 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-hgqdq_da87968b-3eb6-4478-bf20-ec2b8d7bbf95/extract-utilities/0.log" Jan 20 18:48:27 crc kubenswrapper[4558]: I0120 18:48:27.873472 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-hgqdq_da87968b-3eb6-4478-bf20-ec2b8d7bbf95/extract-content/0.log" Jan 20 18:48:27 crc kubenswrapper[4558]: I0120 18:48:27.890450 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-fhnw5_6e227740-1076-4ebf-9fd4-b1ae12cc7beb/marketplace-operator/0.log" Jan 20 18:48:28 crc kubenswrapper[4558]: I0120 18:48:28.171159 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-fnqb8_3e16ce3b-593e-4c3d-a1c0-ab3c557da830/registry-server/0.log" Jan 20 18:48:28 crc kubenswrapper[4558]: I0120 18:48:28.180085 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-fnqb8_3e16ce3b-593e-4c3d-a1c0-ab3c557da830/extract-utilities/0.log" Jan 20 18:48:28 crc kubenswrapper[4558]: I0120 18:48:28.188879 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-fnqb8_3e16ce3b-593e-4c3d-a1c0-ab3c557da830/extract-content/0.log" Jan 20 18:48:29 crc kubenswrapper[4558]: I0120 18:48:29.351397 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-sj9bb_282a24f3-5eac-4fa8-8a40-6260b94e2164/registry-server/0.log" Jan 20 18:48:29 crc kubenswrapper[4558]: I0120 18:48:29.356940 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-sj9bb_282a24f3-5eac-4fa8-8a40-6260b94e2164/extract-utilities/0.log" Jan 20 18:48:29 crc kubenswrapper[4558]: I0120 18:48:29.386666 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-sj9bb_282a24f3-5eac-4fa8-8a40-6260b94e2164/extract-content/0.log" Jan 20 18:48:31 crc kubenswrapper[4558]: I0120 18:48:31.032007 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-68bc856cb9-nfzlw_fef8d546-d905-4701-ac1e-09cd4c4b1ed8/prometheus-operator/0.log" Jan 20 18:48:31 crc kubenswrapper[4558]: I0120 18:48:31.042641 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-558658b7b5-cpc8s_42ac1cad-9697-4a87-974d-f1dfe31a3627/prometheus-operator-admission-webhook/0.log" Jan 20 18:48:31 crc kubenswrapper[4558]: I0120 18:48:31.052857 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-558658b7b5-hhs5k_8c562f2d-0fe1-4a6a-9664-00e2cad8c416/prometheus-operator-admission-webhook/0.log" Jan 20 18:48:31 crc kubenswrapper[4558]: I0120 18:48:31.079132 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_observability-operator-59bdc8b94-rlprt_0c2f4961-5cb9-4460-b8f2-ff20d5bafc08/operator/0.log" Jan 20 18:48:31 crc kubenswrapper[4558]: I0120 18:48:31.089237 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_perses-operator-5bf474d74f-6nh6n_6f4871c7-a64d-4d49-b830-66b6718d608a/perses-operator/0.log" Jan 20 18:48:39 crc kubenswrapper[4558]: I0120 18:48:39.931795 4558 scope.go:117] "RemoveContainer" containerID="8e273e5599d4ea2c4600eb6a4b8b33ea5e7cb5c15d64e2ec21c0e12e22101826" Jan 20 18:48:39 crc kubenswrapper[4558]: I0120 18:48:39.953080 4558 scope.go:117] "RemoveContainer" containerID="30e0b5f5b1cb5c59016bfc7ad6bdad93db6731d03d725f58d00f4dc03b92bc5c" Jan 20 18:48:39 crc kubenswrapper[4558]: I0120 18:48:39.983452 4558 scope.go:117] "RemoveContainer" containerID="9626062908b705c00d0d1f4d94cb7c82140890fa637b50c8deb2205fc671efd7" Jan 20 18:48:40 crc kubenswrapper[4558]: I0120 18:48:40.002504 4558 scope.go:117] "RemoveContainer" containerID="cbed135122fbd8fe1ca3d82bfc9ad6d6aa915bb48d915e47024000c4256b643d" Jan 20 18:48:40 crc kubenswrapper[4558]: I0120 18:48:40.022988 4558 scope.go:117] "RemoveContainer" containerID="fab2ee8cde5caae0741da173af35897e3844e829215f358aafdf4ce9852ff0d7" Jan 20 18:48:40 crc kubenswrapper[4558]: I0120 18:48:40.054814 4558 scope.go:117] "RemoveContainer" containerID="97142091aa6d643d811b188bd0da1649829bee35d0962992a649a3005e47ee4c" Jan 20 18:48:57 crc kubenswrapper[4558]: I0120 18:48:57.329763 4558 patch_prober.go:28] interesting pod/machine-config-daemon-2vr4r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 20 18:48:57 crc kubenswrapper[4558]: I0120 18:48:57.330297 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 20 18:48:57 crc kubenswrapper[4558]: I0120 18:48:57.330344 4558 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" Jan 20 18:48:57 crc kubenswrapper[4558]: I0120 18:48:57.331037 4558 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"499ec313cf27a56b071eb6186a175031344542ae6ab21658ac5fb0feccc71012"} pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 20 18:48:57 crc kubenswrapper[4558]: I0120 18:48:57.331092 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" containerID="cri-o://499ec313cf27a56b071eb6186a175031344542ae6ab21658ac5fb0feccc71012" gracePeriod=600 Jan 20 18:48:57 crc kubenswrapper[4558]: E0120 18:48:57.474604 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 18:48:57 crc kubenswrapper[4558]: I0120 18:48:57.503888 4558 generic.go:334] "Generic (PLEG): container finished" podID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerID="499ec313cf27a56b071eb6186a175031344542ae6ab21658ac5fb0feccc71012" exitCode=0 Jan 20 18:48:57 crc kubenswrapper[4558]: I0120 18:48:57.503921 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" event={"ID":"68337d27-3fa6-4a29-88b0-82e60c3739eb","Type":"ContainerDied","Data":"499ec313cf27a56b071eb6186a175031344542ae6ab21658ac5fb0feccc71012"} Jan 20 18:48:57 crc kubenswrapper[4558]: I0120 18:48:57.503969 4558 scope.go:117] "RemoveContainer" containerID="46c3b6e0005c86abe846af98397dbd3ec4bce46d6d33d8269c6f5d826617c1cf" Jan 20 18:48:57 crc kubenswrapper[4558]: I0120 18:48:57.504375 4558 scope.go:117] "RemoveContainer" containerID="499ec313cf27a56b071eb6186a175031344542ae6ab21658ac5fb0feccc71012" Jan 20 18:48:57 crc kubenswrapper[4558]: E0120 18:48:57.504588 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 18:49:10 crc kubenswrapper[4558]: I0120 18:49:10.566011 4558 scope.go:117] "RemoveContainer" containerID="499ec313cf27a56b071eb6186a175031344542ae6ab21658ac5fb0feccc71012" Jan 20 18:49:10 crc kubenswrapper[4558]: E0120 18:49:10.566813 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 18:49:25 crc kubenswrapper[4558]: I0120 18:49:25.566803 4558 scope.go:117] "RemoveContainer" containerID="499ec313cf27a56b071eb6186a175031344542ae6ab21658ac5fb0feccc71012" Jan 20 18:49:25 crc kubenswrapper[4558]: E0120 18:49:25.567582 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 18:49:38 crc kubenswrapper[4558]: I0120 18:49:38.565914 4558 scope.go:117] "RemoveContainer" containerID="499ec313cf27a56b071eb6186a175031344542ae6ab21658ac5fb0feccc71012" Jan 20 18:49:38 crc kubenswrapper[4558]: E0120 18:49:38.566695 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 18:49:40 crc kubenswrapper[4558]: I0120 18:49:40.107280 4558 scope.go:117] "RemoveContainer" containerID="52bea5df6af3d3230afdec9d37339147b7f7a04f0233e8325443b5dbf6807e1a" Jan 20 18:49:40 crc kubenswrapper[4558]: I0120 18:49:40.131187 4558 scope.go:117] "RemoveContainer" containerID="96270ee0588590f74d43d05f70fefc4015a2d7ef2c13af4116bf8847338dee52" Jan 20 18:49:40 crc kubenswrapper[4558]: I0120 18:49:40.162455 4558 scope.go:117] "RemoveContainer" containerID="f344bf4154d72c6f0f375558c32584bb199dea07b741de2ee0bf0a9985efffe5" Jan 20 18:49:40 crc kubenswrapper[4558]: I0120 18:49:40.181002 4558 scope.go:117] "RemoveContainer" containerID="41175a9819f276c55db011dad595ab834483c95a3df0be7eda373dd7bf174b5a" Jan 20 18:49:40 crc kubenswrapper[4558]: I0120 18:49:40.195016 4558 scope.go:117] "RemoveContainer" containerID="d54e0faf8f75a1568bdfd7fb47cf9ef381e5576856c397db3cfb8744ff9b7938" Jan 20 18:49:40 crc kubenswrapper[4558]: I0120 18:49:40.232465 4558 scope.go:117] "RemoveContainer" containerID="30380171873dc7b4ae6fec296abf518f7899299f15631980b87e5b9966a9b40e" Jan 20 18:49:40 crc kubenswrapper[4558]: I0120 18:49:40.247185 4558 scope.go:117] "RemoveContainer" containerID="7e5b52f231b342cd34c453344723a866f4c235117492bc415f83e04d18f37e25" Jan 20 18:49:49 crc kubenswrapper[4558]: I0120 18:49:49.566077 4558 scope.go:117] "RemoveContainer" containerID="499ec313cf27a56b071eb6186a175031344542ae6ab21658ac5fb0feccc71012" Jan 20 18:49:49 crc kubenswrapper[4558]: E0120 18:49:49.566862 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 18:50:01 crc kubenswrapper[4558]: I0120 18:50:01.566471 4558 scope.go:117] "RemoveContainer" containerID="499ec313cf27a56b071eb6186a175031344542ae6ab21658ac5fb0feccc71012" Jan 20 18:50:01 crc kubenswrapper[4558]: E0120 18:50:01.567622 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 18:50:04 crc kubenswrapper[4558]: I0120 18:50:04.023342 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-68bc856cb9-nfzlw_fef8d546-d905-4701-ac1e-09cd4c4b1ed8/prometheus-operator/0.log" Jan 20 18:50:04 crc kubenswrapper[4558]: I0120 18:50:04.031339 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-558658b7b5-cpc8s_42ac1cad-9697-4a87-974d-f1dfe31a3627/prometheus-operator-admission-webhook/0.log" Jan 20 18:50:04 crc kubenswrapper[4558]: I0120 18:50:04.040344 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-558658b7b5-hhs5k_8c562f2d-0fe1-4a6a-9664-00e2cad8c416/prometheus-operator-admission-webhook/0.log" Jan 20 18:50:04 crc kubenswrapper[4558]: I0120 18:50:04.064774 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_observability-operator-59bdc8b94-rlprt_0c2f4961-5cb9-4460-b8f2-ff20d5bafc08/operator/0.log" Jan 20 18:50:04 crc kubenswrapper[4558]: I0120 18:50:04.072203 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_perses-operator-5bf474d74f-6nh6n_6f4871c7-a64d-4d49-b830-66b6718d608a/perses-operator/0.log" Jan 20 18:50:04 crc kubenswrapper[4558]: I0120 18:50:04.441481 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-86cb77c54b-4t55w_857afdf1-c962-4b4a-a79b-15547f6b407c/cert-manager-controller/0.log" Jan 20 18:50:04 crc kubenswrapper[4558]: I0120 18:50:04.514910 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-855d9ccff4-n85r2_dd2168dc-3cb1-45db-b8fd-7e112804ffcd/cert-manager-cainjector/0.log" Jan 20 18:50:04 crc kubenswrapper[4558]: I0120 18:50:04.525379 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-f4fb5df64-s79l7_0aafb007-c951-4a3f-90ee-14897538c76d/cert-manager-webhook/0.log" Jan 20 18:50:05 crc kubenswrapper[4558]: I0120 18:50:05.188967 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-6968d8fdc4-d9p9k_5835ce2a-d074-4a95-aa34-ad3a62f77503/controller/0.log" Jan 20 18:50:05 crc kubenswrapper[4558]: I0120 18:50:05.195939 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-6968d8fdc4-d9p9k_5835ce2a-d074-4a95-aa34-ad3a62f77503/kube-rbac-proxy/0.log" Jan 20 18:50:05 crc kubenswrapper[4558]: I0120 18:50:05.219367 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-b75qk_59de8a65-6da7-4086-93d4-d76fa35f7660/controller/0.log" Jan 20 18:50:05 crc kubenswrapper[4558]: I0120 18:50:05.355184 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-7754f76f8b-n5w94_7abfa901-9433-43f3-8f51-1da05d50f84d/nmstate-console-plugin/0.log" Jan 20 18:50:05 crc kubenswrapper[4558]: I0120 18:50:05.374020 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-btt28_f54cd5f9-30bf-494b-8528-4f25f8fa1521/nmstate-handler/0.log" Jan 20 18:50:05 crc kubenswrapper[4558]: I0120 18:50:05.383970 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-54757c584b-l6chb_1b599dc7-9188-4dbc-ad4a-32db989cb635/nmstate-metrics/0.log" Jan 20 18:50:05 crc kubenswrapper[4558]: I0120 18:50:05.390952 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-54757c584b-l6chb_1b599dc7-9188-4dbc-ad4a-32db989cb635/kube-rbac-proxy/0.log" Jan 20 18:50:05 crc kubenswrapper[4558]: I0120 18:50:05.409521 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-646758c888-r2zgd_b1c60395-86ef-4c6c-8432-b2bc357aac2d/nmstate-operator/0.log" Jan 20 18:50:05 crc kubenswrapper[4558]: I0120 18:50:05.419098 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-8474b5b9d8-t4zzd_ecd5abb7-faaf-42b3-8698-71ea80253e9d/nmstate-webhook/0.log" Jan 20 18:50:06 crc kubenswrapper[4558]: I0120 18:50:06.205530 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-b75qk_59de8a65-6da7-4086-93d4-d76fa35f7660/frr/0.log" Jan 20 18:50:06 crc kubenswrapper[4558]: I0120 18:50:06.213590 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-b75qk_59de8a65-6da7-4086-93d4-d76fa35f7660/reloader/0.log" Jan 20 18:50:06 crc kubenswrapper[4558]: I0120 18:50:06.220281 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-b75qk_59de8a65-6da7-4086-93d4-d76fa35f7660/frr-metrics/0.log" Jan 20 18:50:06 crc kubenswrapper[4558]: I0120 18:50:06.227623 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-b75qk_59de8a65-6da7-4086-93d4-d76fa35f7660/kube-rbac-proxy/0.log" Jan 20 18:50:06 crc kubenswrapper[4558]: I0120 18:50:06.233279 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-b75qk_59de8a65-6da7-4086-93d4-d76fa35f7660/kube-rbac-proxy-frr/0.log" Jan 20 18:50:06 crc kubenswrapper[4558]: I0120 18:50:06.239957 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-b75qk_59de8a65-6da7-4086-93d4-d76fa35f7660/cp-frr-files/0.log" Jan 20 18:50:06 crc kubenswrapper[4558]: I0120 18:50:06.245716 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-b75qk_59de8a65-6da7-4086-93d4-d76fa35f7660/cp-reloader/0.log" Jan 20 18:50:06 crc kubenswrapper[4558]: I0120 18:50:06.254475 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-b75qk_59de8a65-6da7-4086-93d4-d76fa35f7660/cp-metrics/0.log" Jan 20 18:50:06 crc kubenswrapper[4558]: I0120 18:50:06.271248 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-7df86c4f6c-vf5fl_e7f30f4b-a84e-47b2-b393-c52757e6ca69/frr-k8s-webhook-server/0.log" Jan 20 18:50:06 crc kubenswrapper[4558]: I0120 18:50:06.291959 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-747dffd588-lx9c6_704a513a-fe43-4730-9f48-6c85506e338b/manager/0.log" Jan 20 18:50:06 crc kubenswrapper[4558]: I0120 18:50:06.308652 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-7d7c8846bf-bnzd9_e29496dd-2347-49e8-b4ca-1d071c0dcf2a/webhook-server/0.log" Jan 20 18:50:07 crc kubenswrapper[4558]: I0120 18:50:07.374930 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-w684s_241ae411-7e63-4bb6-a110-b9983a418f9e/speaker/0.log" Jan 20 18:50:07 crc kubenswrapper[4558]: I0120 18:50:07.385762 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-w684s_241ae411-7e63-4bb6-a110-b9983a418f9e/kube-rbac-proxy/0.log" Jan 20 18:50:08 crc kubenswrapper[4558]: I0120 18:50:08.388183 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-86cb77c54b-4t55w_857afdf1-c962-4b4a-a79b-15547f6b407c/cert-manager-controller/0.log" Jan 20 18:50:08 crc kubenswrapper[4558]: I0120 18:50:08.458150 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-855d9ccff4-n85r2_dd2168dc-3cb1-45db-b8fd-7e112804ffcd/cert-manager-cainjector/0.log" Jan 20 18:50:08 crc kubenswrapper[4558]: I0120 18:50:08.469194 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-f4fb5df64-s79l7_0aafb007-c951-4a3f-90ee-14897538c76d/cert-manager-webhook/0.log" Jan 20 18:50:09 crc kubenswrapper[4558]: I0120 18:50:09.024463 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-4w5mk_119fb5fe-2460-4d2d-9db9-452afaa1e93e/control-plane-machine-set-operator/0.log" Jan 20 18:50:09 crc kubenswrapper[4558]: I0120 18:50:09.037243 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-hsls5_350b4e8e-0147-4d0a-b9c0-1cdcdc2312f1/kube-rbac-proxy/0.log" Jan 20 18:50:09 crc kubenswrapper[4558]: I0120 18:50:09.045555 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-hsls5_350b4e8e-0147-4d0a-b9c0-1cdcdc2312f1/machine-api-operator/0.log" Jan 20 18:50:10 crc kubenswrapper[4558]: I0120 18:50:10.252737 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-additional-cni-plugins-f5t7h_0f90cb04-2e7a-4ee8-83fc-d6c0ee1702a4/kube-multus-additional-cni-plugins/0.log" Jan 20 18:50:10 crc kubenswrapper[4558]: I0120 18:50:10.259303 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-additional-cni-plugins-f5t7h_0f90cb04-2e7a-4ee8-83fc-d6c0ee1702a4/egress-router-binary-copy/0.log" Jan 20 18:50:10 crc kubenswrapper[4558]: I0120 18:50:10.269660 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-additional-cni-plugins-f5t7h_0f90cb04-2e7a-4ee8-83fc-d6c0ee1702a4/cni-plugins/0.log" Jan 20 18:50:10 crc kubenswrapper[4558]: I0120 18:50:10.276575 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-additional-cni-plugins-f5t7h_0f90cb04-2e7a-4ee8-83fc-d6c0ee1702a4/bond-cni-plugin/0.log" Jan 20 18:50:10 crc kubenswrapper[4558]: I0120 18:50:10.282316 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-additional-cni-plugins-f5t7h_0f90cb04-2e7a-4ee8-83fc-d6c0ee1702a4/routeoverride-cni/0.log" Jan 20 18:50:10 crc kubenswrapper[4558]: I0120 18:50:10.288218 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-additional-cni-plugins-f5t7h_0f90cb04-2e7a-4ee8-83fc-d6c0ee1702a4/whereabouts-cni-bincopy/0.log" Jan 20 18:50:10 crc kubenswrapper[4558]: I0120 18:50:10.294823 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-additional-cni-plugins-f5t7h_0f90cb04-2e7a-4ee8-83fc-d6c0ee1702a4/whereabouts-cni/0.log" Jan 20 18:50:10 crc kubenswrapper[4558]: I0120 18:50:10.349281 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-admission-controller-857f4d67dd-7wfbg_34b7c77f-6a7d-43de-9ee4-bdba78dc8248/multus-admission-controller/0.log" Jan 20 18:50:10 crc kubenswrapper[4558]: I0120 18:50:10.356541 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-admission-controller-857f4d67dd-7wfbg_34b7c77f-6a7d-43de-9ee4-bdba78dc8248/kube-rbac-proxy/0.log" Jan 20 18:50:10 crc kubenswrapper[4558]: I0120 18:50:10.403643 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-jsqvf_bedf08c7-1f93-4931-a7f3-e729e2a137af/kube-multus/2.log" Jan 20 18:50:10 crc kubenswrapper[4558]: I0120 18:50:10.864095 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-jsqvf_bedf08c7-1f93-4931-a7f3-e729e2a137af/kube-multus/3.log" Jan 20 18:50:11 crc kubenswrapper[4558]: I0120 18:50:11.014975 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_network-metrics-daemon-9wrq6_30032328-bd33-4073-9366-e10bc5e2aa77/network-metrics-daemon/0.log" Jan 20 18:50:11 crc kubenswrapper[4558]: I0120 18:50:11.033828 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_network-metrics-daemon-9wrq6_30032328-bd33-4073-9366-e10bc5e2aa77/kube-rbac-proxy/0.log" Jan 20 18:50:14 crc kubenswrapper[4558]: I0120 18:50:14.566762 4558 scope.go:117] "RemoveContainer" containerID="499ec313cf27a56b071eb6186a175031344542ae6ab21658ac5fb0feccc71012" Jan 20 18:50:14 crc kubenswrapper[4558]: E0120 18:50:14.567281 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 18:50:28 crc kubenswrapper[4558]: I0120 18:50:28.566456 4558 scope.go:117] "RemoveContainer" containerID="499ec313cf27a56b071eb6186a175031344542ae6ab21658ac5fb0feccc71012" Jan 20 18:50:28 crc kubenswrapper[4558]: E0120 18:50:28.567421 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 18:50:40 crc kubenswrapper[4558]: I0120 18:50:40.566279 4558 scope.go:117] "RemoveContainer" containerID="499ec313cf27a56b071eb6186a175031344542ae6ab21658ac5fb0feccc71012" Jan 20 18:50:40 crc kubenswrapper[4558]: E0120 18:50:40.567157 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 18:50:52 crc kubenswrapper[4558]: I0120 18:50:52.567085 4558 scope.go:117] "RemoveContainer" containerID="499ec313cf27a56b071eb6186a175031344542ae6ab21658ac5fb0feccc71012" Jan 20 18:50:52 crc kubenswrapper[4558]: E0120 18:50:52.568061 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 18:51:04 crc kubenswrapper[4558]: I0120 18:51:04.565970 4558 scope.go:117] "RemoveContainer" containerID="499ec313cf27a56b071eb6186a175031344542ae6ab21658ac5fb0feccc71012" Jan 20 18:51:04 crc kubenswrapper[4558]: E0120 18:51:04.567089 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 18:51:18 crc kubenswrapper[4558]: I0120 18:51:18.569563 4558 scope.go:117] "RemoveContainer" containerID="499ec313cf27a56b071eb6186a175031344542ae6ab21658ac5fb0feccc71012" Jan 20 18:51:18 crc kubenswrapper[4558]: E0120 18:51:18.571852 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 18:51:31 crc kubenswrapper[4558]: I0120 18:51:31.566103 4558 scope.go:117] "RemoveContainer" containerID="499ec313cf27a56b071eb6186a175031344542ae6ab21658ac5fb0feccc71012" Jan 20 18:51:31 crc kubenswrapper[4558]: E0120 18:51:31.567110 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 18:51:45 crc kubenswrapper[4558]: I0120 18:51:45.566787 4558 scope.go:117] "RemoveContainer" containerID="499ec313cf27a56b071eb6186a175031344542ae6ab21658ac5fb0feccc71012" Jan 20 18:51:45 crc kubenswrapper[4558]: E0120 18:51:45.568015 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 18:51:58 crc kubenswrapper[4558]: I0120 18:51:58.568373 4558 scope.go:117] "RemoveContainer" containerID="499ec313cf27a56b071eb6186a175031344542ae6ab21658ac5fb0feccc71012" Jan 20 18:51:58 crc kubenswrapper[4558]: E0120 18:51:58.569451 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 18:52:09 crc kubenswrapper[4558]: I0120 18:52:09.566133 4558 scope.go:117] "RemoveContainer" containerID="499ec313cf27a56b071eb6186a175031344542ae6ab21658ac5fb0feccc71012" Jan 20 18:52:09 crc kubenswrapper[4558]: E0120 18:52:09.566937 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 18:52:21 crc kubenswrapper[4558]: I0120 18:52:21.566067 4558 scope.go:117] "RemoveContainer" containerID="499ec313cf27a56b071eb6186a175031344542ae6ab21658ac5fb0feccc71012" Jan 20 18:52:21 crc kubenswrapper[4558]: E0120 18:52:21.567889 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 18:52:36 crc kubenswrapper[4558]: I0120 18:52:36.569751 4558 scope.go:117] "RemoveContainer" containerID="499ec313cf27a56b071eb6186a175031344542ae6ab21658ac5fb0feccc71012" Jan 20 18:52:36 crc kubenswrapper[4558]: E0120 18:52:36.570872 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 18:52:50 crc kubenswrapper[4558]: I0120 18:52:50.566549 4558 scope.go:117] "RemoveContainer" containerID="499ec313cf27a56b071eb6186a175031344542ae6ab21658ac5fb0feccc71012" Jan 20 18:52:50 crc kubenswrapper[4558]: E0120 18:52:50.567550 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 18:52:58 crc kubenswrapper[4558]: I0120 18:52:58.598983 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-dfvpb"] Jan 20 18:52:58 crc kubenswrapper[4558]: I0120 18:52:58.601640 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-dfvpb" Jan 20 18:52:58 crc kubenswrapper[4558]: I0120 18:52:58.609912 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-dfvpb"] Jan 20 18:52:58 crc kubenswrapper[4558]: I0120 18:52:58.681847 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a825b673-344a-4233-9889-f3bb55b4f544-catalog-content\") pod \"redhat-operators-dfvpb\" (UID: \"a825b673-344a-4233-9889-f3bb55b4f544\") " pod="openshift-marketplace/redhat-operators-dfvpb" Jan 20 18:52:58 crc kubenswrapper[4558]: I0120 18:52:58.681897 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8gx4k\" (UniqueName: \"kubernetes.io/projected/a825b673-344a-4233-9889-f3bb55b4f544-kube-api-access-8gx4k\") pod \"redhat-operators-dfvpb\" (UID: \"a825b673-344a-4233-9889-f3bb55b4f544\") " pod="openshift-marketplace/redhat-operators-dfvpb" Jan 20 18:52:58 crc kubenswrapper[4558]: I0120 18:52:58.681977 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a825b673-344a-4233-9889-f3bb55b4f544-utilities\") pod \"redhat-operators-dfvpb\" (UID: \"a825b673-344a-4233-9889-f3bb55b4f544\") " pod="openshift-marketplace/redhat-operators-dfvpb" Jan 20 18:52:58 crc kubenswrapper[4558]: I0120 18:52:58.783150 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a825b673-344a-4233-9889-f3bb55b4f544-catalog-content\") pod \"redhat-operators-dfvpb\" (UID: \"a825b673-344a-4233-9889-f3bb55b4f544\") " pod="openshift-marketplace/redhat-operators-dfvpb" Jan 20 18:52:58 crc kubenswrapper[4558]: I0120 18:52:58.783228 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8gx4k\" (UniqueName: \"kubernetes.io/projected/a825b673-344a-4233-9889-f3bb55b4f544-kube-api-access-8gx4k\") pod \"redhat-operators-dfvpb\" (UID: \"a825b673-344a-4233-9889-f3bb55b4f544\") " pod="openshift-marketplace/redhat-operators-dfvpb" Jan 20 18:52:58 crc kubenswrapper[4558]: I0120 18:52:58.783288 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a825b673-344a-4233-9889-f3bb55b4f544-utilities\") pod \"redhat-operators-dfvpb\" (UID: \"a825b673-344a-4233-9889-f3bb55b4f544\") " pod="openshift-marketplace/redhat-operators-dfvpb" Jan 20 18:52:58 crc kubenswrapper[4558]: I0120 18:52:58.783721 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a825b673-344a-4233-9889-f3bb55b4f544-utilities\") pod \"redhat-operators-dfvpb\" (UID: \"a825b673-344a-4233-9889-f3bb55b4f544\") " pod="openshift-marketplace/redhat-operators-dfvpb" Jan 20 18:52:58 crc kubenswrapper[4558]: I0120 18:52:58.783788 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a825b673-344a-4233-9889-f3bb55b4f544-catalog-content\") pod \"redhat-operators-dfvpb\" (UID: \"a825b673-344a-4233-9889-f3bb55b4f544\") " pod="openshift-marketplace/redhat-operators-dfvpb" Jan 20 18:52:58 crc kubenswrapper[4558]: I0120 18:52:58.803720 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8gx4k\" (UniqueName: \"kubernetes.io/projected/a825b673-344a-4233-9889-f3bb55b4f544-kube-api-access-8gx4k\") pod \"redhat-operators-dfvpb\" (UID: \"a825b673-344a-4233-9889-f3bb55b4f544\") " pod="openshift-marketplace/redhat-operators-dfvpb" Jan 20 18:52:58 crc kubenswrapper[4558]: I0120 18:52:58.924039 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-dfvpb" Jan 20 18:52:59 crc kubenswrapper[4558]: I0120 18:52:59.123299 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-dfvpb"] Jan 20 18:53:00 crc kubenswrapper[4558]: I0120 18:53:00.049764 4558 generic.go:334] "Generic (PLEG): container finished" podID="a825b673-344a-4233-9889-f3bb55b4f544" containerID="9d0cddf81e77e2e2cd339c7aa8f8d14071bb5cc2d4aff1de9806a86afab6032b" exitCode=0 Jan 20 18:53:00 crc kubenswrapper[4558]: I0120 18:53:00.049860 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dfvpb" event={"ID":"a825b673-344a-4233-9889-f3bb55b4f544","Type":"ContainerDied","Data":"9d0cddf81e77e2e2cd339c7aa8f8d14071bb5cc2d4aff1de9806a86afab6032b"} Jan 20 18:53:00 crc kubenswrapper[4558]: I0120 18:53:00.050099 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dfvpb" event={"ID":"a825b673-344a-4233-9889-f3bb55b4f544","Type":"ContainerStarted","Data":"293ea0bf1e50da6a48c9b75541825a0526698ad86ea1d9ee7cc7e7734f7ccad1"} Jan 20 18:53:00 crc kubenswrapper[4558]: I0120 18:53:00.051712 4558 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 20 18:53:02 crc kubenswrapper[4558]: I0120 18:53:02.066204 4558 generic.go:334] "Generic (PLEG): container finished" podID="a825b673-344a-4233-9889-f3bb55b4f544" containerID="2a4ea4dc76a535c88f4c5f6f4656cdfbef784f14f7d00e1d600aff3657e5932d" exitCode=0 Jan 20 18:53:02 crc kubenswrapper[4558]: I0120 18:53:02.066535 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dfvpb" event={"ID":"a825b673-344a-4233-9889-f3bb55b4f544","Type":"ContainerDied","Data":"2a4ea4dc76a535c88f4c5f6f4656cdfbef784f14f7d00e1d600aff3657e5932d"} Jan 20 18:53:03 crc kubenswrapper[4558]: I0120 18:53:03.076088 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dfvpb" event={"ID":"a825b673-344a-4233-9889-f3bb55b4f544","Type":"ContainerStarted","Data":"9f0b8fe7090c12fb91ab56884d3a4ac175b1f1d88bb474c23e8dfb008974ce14"} Jan 20 18:53:03 crc kubenswrapper[4558]: I0120 18:53:03.095524 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-dfvpb" podStartSLOduration=2.605383777 podStartE2EDuration="5.095486947s" podCreationTimestamp="2026-01-20 18:52:58 +0000 UTC" firstStartedPulling="2026-01-20 18:53:00.051484516 +0000 UTC m=+7873.811822473" lastFinishedPulling="2026-01-20 18:53:02.541587676 +0000 UTC m=+7876.301925643" observedRunningTime="2026-01-20 18:53:03.092353967 +0000 UTC m=+7876.852691934" watchObservedRunningTime="2026-01-20 18:53:03.095486947 +0000 UTC m=+7876.855824914" Jan 20 18:53:04 crc kubenswrapper[4558]: I0120 18:53:04.568906 4558 scope.go:117] "RemoveContainer" containerID="499ec313cf27a56b071eb6186a175031344542ae6ab21658ac5fb0feccc71012" Jan 20 18:53:04 crc kubenswrapper[4558]: E0120 18:53:04.569551 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 18:53:08 crc kubenswrapper[4558]: I0120 18:53:08.925216 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-dfvpb" Jan 20 18:53:08 crc kubenswrapper[4558]: I0120 18:53:08.925840 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-dfvpb" Jan 20 18:53:08 crc kubenswrapper[4558]: I0120 18:53:08.964882 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-dfvpb" Jan 20 18:53:09 crc kubenswrapper[4558]: I0120 18:53:09.157031 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-dfvpb" Jan 20 18:53:09 crc kubenswrapper[4558]: I0120 18:53:09.203007 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-dfvpb"] Jan 20 18:53:11 crc kubenswrapper[4558]: I0120 18:53:11.138799 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-dfvpb" podUID="a825b673-344a-4233-9889-f3bb55b4f544" containerName="registry-server" containerID="cri-o://9f0b8fe7090c12fb91ab56884d3a4ac175b1f1d88bb474c23e8dfb008974ce14" gracePeriod=2 Jan 20 18:53:11 crc kubenswrapper[4558]: I0120 18:53:11.490907 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-dfvpb" Jan 20 18:53:11 crc kubenswrapper[4558]: I0120 18:53:11.545850 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8gx4k\" (UniqueName: \"kubernetes.io/projected/a825b673-344a-4233-9889-f3bb55b4f544-kube-api-access-8gx4k\") pod \"a825b673-344a-4233-9889-f3bb55b4f544\" (UID: \"a825b673-344a-4233-9889-f3bb55b4f544\") " Jan 20 18:53:11 crc kubenswrapper[4558]: I0120 18:53:11.545933 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a825b673-344a-4233-9889-f3bb55b4f544-utilities\") pod \"a825b673-344a-4233-9889-f3bb55b4f544\" (UID: \"a825b673-344a-4233-9889-f3bb55b4f544\") " Jan 20 18:53:11 crc kubenswrapper[4558]: I0120 18:53:11.546010 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a825b673-344a-4233-9889-f3bb55b4f544-catalog-content\") pod \"a825b673-344a-4233-9889-f3bb55b4f544\" (UID: \"a825b673-344a-4233-9889-f3bb55b4f544\") " Jan 20 18:53:11 crc kubenswrapper[4558]: I0120 18:53:11.548714 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a825b673-344a-4233-9889-f3bb55b4f544-utilities" (OuterVolumeSpecName: "utilities") pod "a825b673-344a-4233-9889-f3bb55b4f544" (UID: "a825b673-344a-4233-9889-f3bb55b4f544"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 18:53:11 crc kubenswrapper[4558]: I0120 18:53:11.553277 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a825b673-344a-4233-9889-f3bb55b4f544-kube-api-access-8gx4k" (OuterVolumeSpecName: "kube-api-access-8gx4k") pod "a825b673-344a-4233-9889-f3bb55b4f544" (UID: "a825b673-344a-4233-9889-f3bb55b4f544"). InnerVolumeSpecName "kube-api-access-8gx4k". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:53:11 crc kubenswrapper[4558]: I0120 18:53:11.648027 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8gx4k\" (UniqueName: \"kubernetes.io/projected/a825b673-344a-4233-9889-f3bb55b4f544-kube-api-access-8gx4k\") on node \"crc\" DevicePath \"\"" Jan 20 18:53:11 crc kubenswrapper[4558]: I0120 18:53:11.648065 4558 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a825b673-344a-4233-9889-f3bb55b4f544-utilities\") on node \"crc\" DevicePath \"\"" Jan 20 18:53:12 crc kubenswrapper[4558]: I0120 18:53:12.146321 4558 generic.go:334] "Generic (PLEG): container finished" podID="a825b673-344a-4233-9889-f3bb55b4f544" containerID="9f0b8fe7090c12fb91ab56884d3a4ac175b1f1d88bb474c23e8dfb008974ce14" exitCode=0 Jan 20 18:53:12 crc kubenswrapper[4558]: I0120 18:53:12.146369 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dfvpb" event={"ID":"a825b673-344a-4233-9889-f3bb55b4f544","Type":"ContainerDied","Data":"9f0b8fe7090c12fb91ab56884d3a4ac175b1f1d88bb474c23e8dfb008974ce14"} Jan 20 18:53:12 crc kubenswrapper[4558]: I0120 18:53:12.146399 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dfvpb" event={"ID":"a825b673-344a-4233-9889-f3bb55b4f544","Type":"ContainerDied","Data":"293ea0bf1e50da6a48c9b75541825a0526698ad86ea1d9ee7cc7e7734f7ccad1"} Jan 20 18:53:12 crc kubenswrapper[4558]: I0120 18:53:12.146421 4558 scope.go:117] "RemoveContainer" containerID="9f0b8fe7090c12fb91ab56884d3a4ac175b1f1d88bb474c23e8dfb008974ce14" Jan 20 18:53:12 crc kubenswrapper[4558]: I0120 18:53:12.146554 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-dfvpb" Jan 20 18:53:12 crc kubenswrapper[4558]: I0120 18:53:12.162994 4558 scope.go:117] "RemoveContainer" containerID="2a4ea4dc76a535c88f4c5f6f4656cdfbef784f14f7d00e1d600aff3657e5932d" Jan 20 18:53:12 crc kubenswrapper[4558]: I0120 18:53:12.177442 4558 scope.go:117] "RemoveContainer" containerID="9d0cddf81e77e2e2cd339c7aa8f8d14071bb5cc2d4aff1de9806a86afab6032b" Jan 20 18:53:12 crc kubenswrapper[4558]: I0120 18:53:12.190520 4558 scope.go:117] "RemoveContainer" containerID="9f0b8fe7090c12fb91ab56884d3a4ac175b1f1d88bb474c23e8dfb008974ce14" Jan 20 18:53:12 crc kubenswrapper[4558]: E0120 18:53:12.190883 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9f0b8fe7090c12fb91ab56884d3a4ac175b1f1d88bb474c23e8dfb008974ce14\": container with ID starting with 9f0b8fe7090c12fb91ab56884d3a4ac175b1f1d88bb474c23e8dfb008974ce14 not found: ID does not exist" containerID="9f0b8fe7090c12fb91ab56884d3a4ac175b1f1d88bb474c23e8dfb008974ce14" Jan 20 18:53:12 crc kubenswrapper[4558]: I0120 18:53:12.190923 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9f0b8fe7090c12fb91ab56884d3a4ac175b1f1d88bb474c23e8dfb008974ce14"} err="failed to get container status \"9f0b8fe7090c12fb91ab56884d3a4ac175b1f1d88bb474c23e8dfb008974ce14\": rpc error: code = NotFound desc = could not find container \"9f0b8fe7090c12fb91ab56884d3a4ac175b1f1d88bb474c23e8dfb008974ce14\": container with ID starting with 9f0b8fe7090c12fb91ab56884d3a4ac175b1f1d88bb474c23e8dfb008974ce14 not found: ID does not exist" Jan 20 18:53:12 crc kubenswrapper[4558]: I0120 18:53:12.190943 4558 scope.go:117] "RemoveContainer" containerID="2a4ea4dc76a535c88f4c5f6f4656cdfbef784f14f7d00e1d600aff3657e5932d" Jan 20 18:53:12 crc kubenswrapper[4558]: E0120 18:53:12.191251 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2a4ea4dc76a535c88f4c5f6f4656cdfbef784f14f7d00e1d600aff3657e5932d\": container with ID starting with 2a4ea4dc76a535c88f4c5f6f4656cdfbef784f14f7d00e1d600aff3657e5932d not found: ID does not exist" containerID="2a4ea4dc76a535c88f4c5f6f4656cdfbef784f14f7d00e1d600aff3657e5932d" Jan 20 18:53:12 crc kubenswrapper[4558]: I0120 18:53:12.191290 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2a4ea4dc76a535c88f4c5f6f4656cdfbef784f14f7d00e1d600aff3657e5932d"} err="failed to get container status \"2a4ea4dc76a535c88f4c5f6f4656cdfbef784f14f7d00e1d600aff3657e5932d\": rpc error: code = NotFound desc = could not find container \"2a4ea4dc76a535c88f4c5f6f4656cdfbef784f14f7d00e1d600aff3657e5932d\": container with ID starting with 2a4ea4dc76a535c88f4c5f6f4656cdfbef784f14f7d00e1d600aff3657e5932d not found: ID does not exist" Jan 20 18:53:12 crc kubenswrapper[4558]: I0120 18:53:12.191338 4558 scope.go:117] "RemoveContainer" containerID="9d0cddf81e77e2e2cd339c7aa8f8d14071bb5cc2d4aff1de9806a86afab6032b" Jan 20 18:53:12 crc kubenswrapper[4558]: E0120 18:53:12.191605 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9d0cddf81e77e2e2cd339c7aa8f8d14071bb5cc2d4aff1de9806a86afab6032b\": container with ID starting with 9d0cddf81e77e2e2cd339c7aa8f8d14071bb5cc2d4aff1de9806a86afab6032b not found: ID does not exist" containerID="9d0cddf81e77e2e2cd339c7aa8f8d14071bb5cc2d4aff1de9806a86afab6032b" Jan 20 18:53:12 crc kubenswrapper[4558]: I0120 18:53:12.191653 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9d0cddf81e77e2e2cd339c7aa8f8d14071bb5cc2d4aff1de9806a86afab6032b"} err="failed to get container status \"9d0cddf81e77e2e2cd339c7aa8f8d14071bb5cc2d4aff1de9806a86afab6032b\": rpc error: code = NotFound desc = could not find container \"9d0cddf81e77e2e2cd339c7aa8f8d14071bb5cc2d4aff1de9806a86afab6032b\": container with ID starting with 9d0cddf81e77e2e2cd339c7aa8f8d14071bb5cc2d4aff1de9806a86afab6032b not found: ID does not exist" Jan 20 18:53:12 crc kubenswrapper[4558]: I0120 18:53:12.677537 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a825b673-344a-4233-9889-f3bb55b4f544-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a825b673-344a-4233-9889-f3bb55b4f544" (UID: "a825b673-344a-4233-9889-f3bb55b4f544"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 18:53:12 crc kubenswrapper[4558]: I0120 18:53:12.761486 4558 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a825b673-344a-4233-9889-f3bb55b4f544-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 20 18:53:12 crc kubenswrapper[4558]: I0120 18:53:12.777892 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-dfvpb"] Jan 20 18:53:12 crc kubenswrapper[4558]: I0120 18:53:12.782479 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-dfvpb"] Jan 20 18:53:14 crc kubenswrapper[4558]: I0120 18:53:14.572599 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a825b673-344a-4233-9889-f3bb55b4f544" path="/var/lib/kubelet/pods/a825b673-344a-4233-9889-f3bb55b4f544/volumes" Jan 20 18:53:18 crc kubenswrapper[4558]: I0120 18:53:18.565609 4558 scope.go:117] "RemoveContainer" containerID="499ec313cf27a56b071eb6186a175031344542ae6ab21658ac5fb0feccc71012" Jan 20 18:53:18 crc kubenswrapper[4558]: E0120 18:53:18.566322 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 18:53:32 crc kubenswrapper[4558]: I0120 18:53:32.566117 4558 scope.go:117] "RemoveContainer" containerID="499ec313cf27a56b071eb6186a175031344542ae6ab21658ac5fb0feccc71012" Jan 20 18:53:32 crc kubenswrapper[4558]: E0120 18:53:32.567053 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 18:53:44 crc kubenswrapper[4558]: I0120 18:53:44.567491 4558 scope.go:117] "RemoveContainer" containerID="499ec313cf27a56b071eb6186a175031344542ae6ab21658ac5fb0feccc71012" Jan 20 18:53:44 crc kubenswrapper[4558]: E0120 18:53:44.568091 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 18:53:55 crc kubenswrapper[4558]: I0120 18:53:55.565762 4558 scope.go:117] "RemoveContainer" containerID="499ec313cf27a56b071eb6186a175031344542ae6ab21658ac5fb0feccc71012" Jan 20 18:53:55 crc kubenswrapper[4558]: E0120 18:53:55.566366 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 18:54:10 crc kubenswrapper[4558]: I0120 18:54:10.565772 4558 scope.go:117] "RemoveContainer" containerID="499ec313cf27a56b071eb6186a175031344542ae6ab21658ac5fb0feccc71012" Jan 20 18:54:11 crc kubenswrapper[4558]: I0120 18:54:11.550572 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" event={"ID":"68337d27-3fa6-4a29-88b0-82e60c3739eb","Type":"ContainerStarted","Data":"8a12e08019cdf90bc021796246b8cb4cd26600e714b44a9fcbd96e1616ed01db"} Jan 20 18:55:30 crc kubenswrapper[4558]: I0120 18:55:30.911325 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-s5w7q"] Jan 20 18:55:30 crc kubenswrapper[4558]: E0120 18:55:30.919213 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a825b673-344a-4233-9889-f3bb55b4f544" containerName="extract-content" Jan 20 18:55:30 crc kubenswrapper[4558]: I0120 18:55:30.919235 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="a825b673-344a-4233-9889-f3bb55b4f544" containerName="extract-content" Jan 20 18:55:30 crc kubenswrapper[4558]: E0120 18:55:30.919252 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a825b673-344a-4233-9889-f3bb55b4f544" containerName="registry-server" Jan 20 18:55:30 crc kubenswrapper[4558]: I0120 18:55:30.919258 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="a825b673-344a-4233-9889-f3bb55b4f544" containerName="registry-server" Jan 20 18:55:30 crc kubenswrapper[4558]: E0120 18:55:30.919270 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a825b673-344a-4233-9889-f3bb55b4f544" containerName="extract-utilities" Jan 20 18:55:30 crc kubenswrapper[4558]: I0120 18:55:30.919277 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="a825b673-344a-4233-9889-f3bb55b4f544" containerName="extract-utilities" Jan 20 18:55:30 crc kubenswrapper[4558]: I0120 18:55:30.919433 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="a825b673-344a-4233-9889-f3bb55b4f544" containerName="registry-server" Jan 20 18:55:30 crc kubenswrapper[4558]: I0120 18:55:30.920430 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-s5w7q" Jan 20 18:55:30 crc kubenswrapper[4558]: I0120 18:55:30.926492 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-s5w7q"] Jan 20 18:55:31 crc kubenswrapper[4558]: I0120 18:55:31.112947 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/271574bd-cd80-4d95-966e-db377b38f1cc-utilities\") pod \"redhat-marketplace-s5w7q\" (UID: \"271574bd-cd80-4d95-966e-db377b38f1cc\") " pod="openshift-marketplace/redhat-marketplace-s5w7q" Jan 20 18:55:31 crc kubenswrapper[4558]: I0120 18:55:31.113007 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/271574bd-cd80-4d95-966e-db377b38f1cc-catalog-content\") pod \"redhat-marketplace-s5w7q\" (UID: \"271574bd-cd80-4d95-966e-db377b38f1cc\") " pod="openshift-marketplace/redhat-marketplace-s5w7q" Jan 20 18:55:31 crc kubenswrapper[4558]: I0120 18:55:31.113580 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kczld\" (UniqueName: \"kubernetes.io/projected/271574bd-cd80-4d95-966e-db377b38f1cc-kube-api-access-kczld\") pod \"redhat-marketplace-s5w7q\" (UID: \"271574bd-cd80-4d95-966e-db377b38f1cc\") " pod="openshift-marketplace/redhat-marketplace-s5w7q" Jan 20 18:55:31 crc kubenswrapper[4558]: I0120 18:55:31.215858 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/271574bd-cd80-4d95-966e-db377b38f1cc-utilities\") pod \"redhat-marketplace-s5w7q\" (UID: \"271574bd-cd80-4d95-966e-db377b38f1cc\") " pod="openshift-marketplace/redhat-marketplace-s5w7q" Jan 20 18:55:31 crc kubenswrapper[4558]: I0120 18:55:31.215943 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/271574bd-cd80-4d95-966e-db377b38f1cc-catalog-content\") pod \"redhat-marketplace-s5w7q\" (UID: \"271574bd-cd80-4d95-966e-db377b38f1cc\") " pod="openshift-marketplace/redhat-marketplace-s5w7q" Jan 20 18:55:31 crc kubenswrapper[4558]: I0120 18:55:31.215993 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kczld\" (UniqueName: \"kubernetes.io/projected/271574bd-cd80-4d95-966e-db377b38f1cc-kube-api-access-kczld\") pod \"redhat-marketplace-s5w7q\" (UID: \"271574bd-cd80-4d95-966e-db377b38f1cc\") " pod="openshift-marketplace/redhat-marketplace-s5w7q" Jan 20 18:55:31 crc kubenswrapper[4558]: I0120 18:55:31.216540 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/271574bd-cd80-4d95-966e-db377b38f1cc-catalog-content\") pod \"redhat-marketplace-s5w7q\" (UID: \"271574bd-cd80-4d95-966e-db377b38f1cc\") " pod="openshift-marketplace/redhat-marketplace-s5w7q" Jan 20 18:55:31 crc kubenswrapper[4558]: I0120 18:55:31.216625 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/271574bd-cd80-4d95-966e-db377b38f1cc-utilities\") pod \"redhat-marketplace-s5w7q\" (UID: \"271574bd-cd80-4d95-966e-db377b38f1cc\") " pod="openshift-marketplace/redhat-marketplace-s5w7q" Jan 20 18:55:31 crc kubenswrapper[4558]: I0120 18:55:31.237974 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kczld\" (UniqueName: \"kubernetes.io/projected/271574bd-cd80-4d95-966e-db377b38f1cc-kube-api-access-kczld\") pod \"redhat-marketplace-s5w7q\" (UID: \"271574bd-cd80-4d95-966e-db377b38f1cc\") " pod="openshift-marketplace/redhat-marketplace-s5w7q" Jan 20 18:55:31 crc kubenswrapper[4558]: I0120 18:55:31.239458 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-s5w7q" Jan 20 18:55:31 crc kubenswrapper[4558]: I0120 18:55:31.643807 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-s5w7q"] Jan 20 18:55:32 crc kubenswrapper[4558]: I0120 18:55:32.101844 4558 generic.go:334] "Generic (PLEG): container finished" podID="271574bd-cd80-4d95-966e-db377b38f1cc" containerID="32dbcbfa5ee383c2de241d868808b4d5cd583de3cfeafde34ffcf8dc2e8b7e51" exitCode=0 Jan 20 18:55:32 crc kubenswrapper[4558]: I0120 18:55:32.101905 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-s5w7q" event={"ID":"271574bd-cd80-4d95-966e-db377b38f1cc","Type":"ContainerDied","Data":"32dbcbfa5ee383c2de241d868808b4d5cd583de3cfeafde34ffcf8dc2e8b7e51"} Jan 20 18:55:32 crc kubenswrapper[4558]: I0120 18:55:32.102202 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-s5w7q" event={"ID":"271574bd-cd80-4d95-966e-db377b38f1cc","Type":"ContainerStarted","Data":"85bfe86204f2e89811cbcc4c68d94bfe70424a00116b0b86b8e14a853b7b37b1"} Jan 20 18:55:33 crc kubenswrapper[4558]: I0120 18:55:33.114754 4558 generic.go:334] "Generic (PLEG): container finished" podID="271574bd-cd80-4d95-966e-db377b38f1cc" containerID="38261967f58dc6bd44e8772744bd8fd7224e253a6f229a5b59b46387423c10d0" exitCode=0 Jan 20 18:55:33 crc kubenswrapper[4558]: I0120 18:55:33.115136 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-s5w7q" event={"ID":"271574bd-cd80-4d95-966e-db377b38f1cc","Type":"ContainerDied","Data":"38261967f58dc6bd44e8772744bd8fd7224e253a6f229a5b59b46387423c10d0"} Jan 20 18:55:34 crc kubenswrapper[4558]: I0120 18:55:34.127014 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-s5w7q" event={"ID":"271574bd-cd80-4d95-966e-db377b38f1cc","Type":"ContainerStarted","Data":"385bdd88bd2bb811542bdf1b10b42abc474f0e3b24db5974cda56d930ab65b9b"} Jan 20 18:55:41 crc kubenswrapper[4558]: I0120 18:55:41.239886 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-s5w7q" Jan 20 18:55:41 crc kubenswrapper[4558]: I0120 18:55:41.240615 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-s5w7q" Jan 20 18:55:41 crc kubenswrapper[4558]: I0120 18:55:41.275436 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-s5w7q" Jan 20 18:55:41 crc kubenswrapper[4558]: I0120 18:55:41.296607 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-s5w7q" podStartSLOduration=9.742672477 podStartE2EDuration="11.296592737s" podCreationTimestamp="2026-01-20 18:55:30 +0000 UTC" firstStartedPulling="2026-01-20 18:55:32.103759898 +0000 UTC m=+8025.864097865" lastFinishedPulling="2026-01-20 18:55:33.657680158 +0000 UTC m=+8027.418018125" observedRunningTime="2026-01-20 18:55:34.152121166 +0000 UTC m=+8027.912459133" watchObservedRunningTime="2026-01-20 18:55:41.296592737 +0000 UTC m=+8035.056930705" Jan 20 18:55:42 crc kubenswrapper[4558]: I0120 18:55:42.214207 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-s5w7q" Jan 20 18:55:42 crc kubenswrapper[4558]: I0120 18:55:42.261599 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-s5w7q"] Jan 20 18:55:44 crc kubenswrapper[4558]: I0120 18:55:44.197215 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-s5w7q" podUID="271574bd-cd80-4d95-966e-db377b38f1cc" containerName="registry-server" containerID="cri-o://385bdd88bd2bb811542bdf1b10b42abc474f0e3b24db5974cda56d930ab65b9b" gracePeriod=2 Jan 20 18:55:44 crc kubenswrapper[4558]: I0120 18:55:44.524620 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-s5w7q" Jan 20 18:55:44 crc kubenswrapper[4558]: I0120 18:55:44.612832 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/271574bd-cd80-4d95-966e-db377b38f1cc-catalog-content\") pod \"271574bd-cd80-4d95-966e-db377b38f1cc\" (UID: \"271574bd-cd80-4d95-966e-db377b38f1cc\") " Jan 20 18:55:44 crc kubenswrapper[4558]: I0120 18:55:44.612914 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kczld\" (UniqueName: \"kubernetes.io/projected/271574bd-cd80-4d95-966e-db377b38f1cc-kube-api-access-kczld\") pod \"271574bd-cd80-4d95-966e-db377b38f1cc\" (UID: \"271574bd-cd80-4d95-966e-db377b38f1cc\") " Jan 20 18:55:44 crc kubenswrapper[4558]: I0120 18:55:44.612993 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/271574bd-cd80-4d95-966e-db377b38f1cc-utilities\") pod \"271574bd-cd80-4d95-966e-db377b38f1cc\" (UID: \"271574bd-cd80-4d95-966e-db377b38f1cc\") " Jan 20 18:55:44 crc kubenswrapper[4558]: I0120 18:55:44.614846 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/271574bd-cd80-4d95-966e-db377b38f1cc-utilities" (OuterVolumeSpecName: "utilities") pod "271574bd-cd80-4d95-966e-db377b38f1cc" (UID: "271574bd-cd80-4d95-966e-db377b38f1cc"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 18:55:44 crc kubenswrapper[4558]: I0120 18:55:44.625293 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/271574bd-cd80-4d95-966e-db377b38f1cc-kube-api-access-kczld" (OuterVolumeSpecName: "kube-api-access-kczld") pod "271574bd-cd80-4d95-966e-db377b38f1cc" (UID: "271574bd-cd80-4d95-966e-db377b38f1cc"). InnerVolumeSpecName "kube-api-access-kczld". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:55:44 crc kubenswrapper[4558]: I0120 18:55:44.635454 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/271574bd-cd80-4d95-966e-db377b38f1cc-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "271574bd-cd80-4d95-966e-db377b38f1cc" (UID: "271574bd-cd80-4d95-966e-db377b38f1cc"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 18:55:44 crc kubenswrapper[4558]: I0120 18:55:44.715219 4558 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/271574bd-cd80-4d95-966e-db377b38f1cc-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 20 18:55:44 crc kubenswrapper[4558]: I0120 18:55:44.715260 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kczld\" (UniqueName: \"kubernetes.io/projected/271574bd-cd80-4d95-966e-db377b38f1cc-kube-api-access-kczld\") on node \"crc\" DevicePath \"\"" Jan 20 18:55:44 crc kubenswrapper[4558]: I0120 18:55:44.715274 4558 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/271574bd-cd80-4d95-966e-db377b38f1cc-utilities\") on node \"crc\" DevicePath \"\"" Jan 20 18:55:45 crc kubenswrapper[4558]: I0120 18:55:45.205914 4558 generic.go:334] "Generic (PLEG): container finished" podID="271574bd-cd80-4d95-966e-db377b38f1cc" containerID="385bdd88bd2bb811542bdf1b10b42abc474f0e3b24db5974cda56d930ab65b9b" exitCode=0 Jan 20 18:55:45 crc kubenswrapper[4558]: I0120 18:55:45.205979 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-s5w7q" event={"ID":"271574bd-cd80-4d95-966e-db377b38f1cc","Type":"ContainerDied","Data":"385bdd88bd2bb811542bdf1b10b42abc474f0e3b24db5974cda56d930ab65b9b"} Jan 20 18:55:45 crc kubenswrapper[4558]: I0120 18:55:45.206024 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-s5w7q" Jan 20 18:55:45 crc kubenswrapper[4558]: I0120 18:55:45.206062 4558 scope.go:117] "RemoveContainer" containerID="385bdd88bd2bb811542bdf1b10b42abc474f0e3b24db5974cda56d930ab65b9b" Jan 20 18:55:45 crc kubenswrapper[4558]: I0120 18:55:45.206043 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-s5w7q" event={"ID":"271574bd-cd80-4d95-966e-db377b38f1cc","Type":"ContainerDied","Data":"85bfe86204f2e89811cbcc4c68d94bfe70424a00116b0b86b8e14a853b7b37b1"} Jan 20 18:55:45 crc kubenswrapper[4558]: I0120 18:55:45.245243 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-s5w7q"] Jan 20 18:55:45 crc kubenswrapper[4558]: I0120 18:55:45.252447 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-s5w7q"] Jan 20 18:55:45 crc kubenswrapper[4558]: I0120 18:55:45.274361 4558 scope.go:117] "RemoveContainer" containerID="38261967f58dc6bd44e8772744bd8fd7224e253a6f229a5b59b46387423c10d0" Jan 20 18:55:45 crc kubenswrapper[4558]: I0120 18:55:45.293187 4558 scope.go:117] "RemoveContainer" containerID="32dbcbfa5ee383c2de241d868808b4d5cd583de3cfeafde34ffcf8dc2e8b7e51" Jan 20 18:55:45 crc kubenswrapper[4558]: I0120 18:55:45.313879 4558 scope.go:117] "RemoveContainer" containerID="385bdd88bd2bb811542bdf1b10b42abc474f0e3b24db5974cda56d930ab65b9b" Jan 20 18:55:45 crc kubenswrapper[4558]: E0120 18:55:45.314219 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"385bdd88bd2bb811542bdf1b10b42abc474f0e3b24db5974cda56d930ab65b9b\": container with ID starting with 385bdd88bd2bb811542bdf1b10b42abc474f0e3b24db5974cda56d930ab65b9b not found: ID does not exist" containerID="385bdd88bd2bb811542bdf1b10b42abc474f0e3b24db5974cda56d930ab65b9b" Jan 20 18:55:45 crc kubenswrapper[4558]: I0120 18:55:45.314255 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"385bdd88bd2bb811542bdf1b10b42abc474f0e3b24db5974cda56d930ab65b9b"} err="failed to get container status \"385bdd88bd2bb811542bdf1b10b42abc474f0e3b24db5974cda56d930ab65b9b\": rpc error: code = NotFound desc = could not find container \"385bdd88bd2bb811542bdf1b10b42abc474f0e3b24db5974cda56d930ab65b9b\": container with ID starting with 385bdd88bd2bb811542bdf1b10b42abc474f0e3b24db5974cda56d930ab65b9b not found: ID does not exist" Jan 20 18:55:45 crc kubenswrapper[4558]: I0120 18:55:45.314279 4558 scope.go:117] "RemoveContainer" containerID="38261967f58dc6bd44e8772744bd8fd7224e253a6f229a5b59b46387423c10d0" Jan 20 18:55:45 crc kubenswrapper[4558]: E0120 18:55:45.314544 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"38261967f58dc6bd44e8772744bd8fd7224e253a6f229a5b59b46387423c10d0\": container with ID starting with 38261967f58dc6bd44e8772744bd8fd7224e253a6f229a5b59b46387423c10d0 not found: ID does not exist" containerID="38261967f58dc6bd44e8772744bd8fd7224e253a6f229a5b59b46387423c10d0" Jan 20 18:55:45 crc kubenswrapper[4558]: I0120 18:55:45.314577 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"38261967f58dc6bd44e8772744bd8fd7224e253a6f229a5b59b46387423c10d0"} err="failed to get container status \"38261967f58dc6bd44e8772744bd8fd7224e253a6f229a5b59b46387423c10d0\": rpc error: code = NotFound desc = could not find container \"38261967f58dc6bd44e8772744bd8fd7224e253a6f229a5b59b46387423c10d0\": container with ID starting with 38261967f58dc6bd44e8772744bd8fd7224e253a6f229a5b59b46387423c10d0 not found: ID does not exist" Jan 20 18:55:45 crc kubenswrapper[4558]: I0120 18:55:45.314592 4558 scope.go:117] "RemoveContainer" containerID="32dbcbfa5ee383c2de241d868808b4d5cd583de3cfeafde34ffcf8dc2e8b7e51" Jan 20 18:55:45 crc kubenswrapper[4558]: E0120 18:55:45.314856 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"32dbcbfa5ee383c2de241d868808b4d5cd583de3cfeafde34ffcf8dc2e8b7e51\": container with ID starting with 32dbcbfa5ee383c2de241d868808b4d5cd583de3cfeafde34ffcf8dc2e8b7e51 not found: ID does not exist" containerID="32dbcbfa5ee383c2de241d868808b4d5cd583de3cfeafde34ffcf8dc2e8b7e51" Jan 20 18:55:45 crc kubenswrapper[4558]: I0120 18:55:45.314878 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"32dbcbfa5ee383c2de241d868808b4d5cd583de3cfeafde34ffcf8dc2e8b7e51"} err="failed to get container status \"32dbcbfa5ee383c2de241d868808b4d5cd583de3cfeafde34ffcf8dc2e8b7e51\": rpc error: code = NotFound desc = could not find container \"32dbcbfa5ee383c2de241d868808b4d5cd583de3cfeafde34ffcf8dc2e8b7e51\": container with ID starting with 32dbcbfa5ee383c2de241d868808b4d5cd583de3cfeafde34ffcf8dc2e8b7e51 not found: ID does not exist" Jan 20 18:55:46 crc kubenswrapper[4558]: I0120 18:55:46.588890 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="271574bd-cd80-4d95-966e-db377b38f1cc" path="/var/lib/kubelet/pods/271574bd-cd80-4d95-966e-db377b38f1cc/volumes" Jan 20 18:56:27 crc kubenswrapper[4558]: I0120 18:56:27.329783 4558 patch_prober.go:28] interesting pod/machine-config-daemon-2vr4r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 20 18:56:27 crc kubenswrapper[4558]: I0120 18:56:27.330620 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 20 18:56:57 crc kubenswrapper[4558]: I0120 18:56:57.330001 4558 patch_prober.go:28] interesting pod/machine-config-daemon-2vr4r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 20 18:56:57 crc kubenswrapper[4558]: I0120 18:56:57.330612 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 20 18:57:25 crc kubenswrapper[4558]: I0120 18:57:25.342641 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-pmdb5"] Jan 20 18:57:25 crc kubenswrapper[4558]: E0120 18:57:25.343535 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="271574bd-cd80-4d95-966e-db377b38f1cc" containerName="extract-content" Jan 20 18:57:25 crc kubenswrapper[4558]: I0120 18:57:25.343548 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="271574bd-cd80-4d95-966e-db377b38f1cc" containerName="extract-content" Jan 20 18:57:25 crc kubenswrapper[4558]: E0120 18:57:25.343560 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="271574bd-cd80-4d95-966e-db377b38f1cc" containerName="extract-utilities" Jan 20 18:57:25 crc kubenswrapper[4558]: I0120 18:57:25.343565 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="271574bd-cd80-4d95-966e-db377b38f1cc" containerName="extract-utilities" Jan 20 18:57:25 crc kubenswrapper[4558]: E0120 18:57:25.343581 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="271574bd-cd80-4d95-966e-db377b38f1cc" containerName="registry-server" Jan 20 18:57:25 crc kubenswrapper[4558]: I0120 18:57:25.343587 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="271574bd-cd80-4d95-966e-db377b38f1cc" containerName="registry-server" Jan 20 18:57:25 crc kubenswrapper[4558]: I0120 18:57:25.343716 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="271574bd-cd80-4d95-966e-db377b38f1cc" containerName="registry-server" Jan 20 18:57:25 crc kubenswrapper[4558]: I0120 18:57:25.344582 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-pmdb5" Jan 20 18:57:25 crc kubenswrapper[4558]: I0120 18:57:25.351783 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-pmdb5"] Jan 20 18:57:25 crc kubenswrapper[4558]: I0120 18:57:25.458391 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-msptf\" (UniqueName: \"kubernetes.io/projected/62f077fc-f52f-460a-afe8-e2a51fb285c4-kube-api-access-msptf\") pod \"certified-operators-pmdb5\" (UID: \"62f077fc-f52f-460a-afe8-e2a51fb285c4\") " pod="openshift-marketplace/certified-operators-pmdb5" Jan 20 18:57:25 crc kubenswrapper[4558]: I0120 18:57:25.458445 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/62f077fc-f52f-460a-afe8-e2a51fb285c4-catalog-content\") pod \"certified-operators-pmdb5\" (UID: \"62f077fc-f52f-460a-afe8-e2a51fb285c4\") " pod="openshift-marketplace/certified-operators-pmdb5" Jan 20 18:57:25 crc kubenswrapper[4558]: I0120 18:57:25.458470 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/62f077fc-f52f-460a-afe8-e2a51fb285c4-utilities\") pod \"certified-operators-pmdb5\" (UID: \"62f077fc-f52f-460a-afe8-e2a51fb285c4\") " pod="openshift-marketplace/certified-operators-pmdb5" Jan 20 18:57:25 crc kubenswrapper[4558]: I0120 18:57:25.561008 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-msptf\" (UniqueName: \"kubernetes.io/projected/62f077fc-f52f-460a-afe8-e2a51fb285c4-kube-api-access-msptf\") pod \"certified-operators-pmdb5\" (UID: \"62f077fc-f52f-460a-afe8-e2a51fb285c4\") " pod="openshift-marketplace/certified-operators-pmdb5" Jan 20 18:57:25 crc kubenswrapper[4558]: I0120 18:57:25.561079 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/62f077fc-f52f-460a-afe8-e2a51fb285c4-catalog-content\") pod \"certified-operators-pmdb5\" (UID: \"62f077fc-f52f-460a-afe8-e2a51fb285c4\") " pod="openshift-marketplace/certified-operators-pmdb5" Jan 20 18:57:25 crc kubenswrapper[4558]: I0120 18:57:25.561115 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/62f077fc-f52f-460a-afe8-e2a51fb285c4-utilities\") pod \"certified-operators-pmdb5\" (UID: \"62f077fc-f52f-460a-afe8-e2a51fb285c4\") " pod="openshift-marketplace/certified-operators-pmdb5" Jan 20 18:57:25 crc kubenswrapper[4558]: I0120 18:57:25.561741 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/62f077fc-f52f-460a-afe8-e2a51fb285c4-utilities\") pod \"certified-operators-pmdb5\" (UID: \"62f077fc-f52f-460a-afe8-e2a51fb285c4\") " pod="openshift-marketplace/certified-operators-pmdb5" Jan 20 18:57:25 crc kubenswrapper[4558]: I0120 18:57:25.562090 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/62f077fc-f52f-460a-afe8-e2a51fb285c4-catalog-content\") pod \"certified-operators-pmdb5\" (UID: \"62f077fc-f52f-460a-afe8-e2a51fb285c4\") " pod="openshift-marketplace/certified-operators-pmdb5" Jan 20 18:57:25 crc kubenswrapper[4558]: I0120 18:57:25.588199 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-msptf\" (UniqueName: \"kubernetes.io/projected/62f077fc-f52f-460a-afe8-e2a51fb285c4-kube-api-access-msptf\") pod \"certified-operators-pmdb5\" (UID: \"62f077fc-f52f-460a-afe8-e2a51fb285c4\") " pod="openshift-marketplace/certified-operators-pmdb5" Jan 20 18:57:25 crc kubenswrapper[4558]: I0120 18:57:25.669652 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-pmdb5" Jan 20 18:57:26 crc kubenswrapper[4558]: I0120 18:57:26.102107 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-pmdb5"] Jan 20 18:57:26 crc kubenswrapper[4558]: I0120 18:57:26.953694 4558 generic.go:334] "Generic (PLEG): container finished" podID="62f077fc-f52f-460a-afe8-e2a51fb285c4" containerID="c1ba3324d28bfc6f98c3f5bfbf196dd3addf6dc0557109ae952905604bc465d7" exitCode=0 Jan 20 18:57:26 crc kubenswrapper[4558]: I0120 18:57:26.953753 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pmdb5" event={"ID":"62f077fc-f52f-460a-afe8-e2a51fb285c4","Type":"ContainerDied","Data":"c1ba3324d28bfc6f98c3f5bfbf196dd3addf6dc0557109ae952905604bc465d7"} Jan 20 18:57:26 crc kubenswrapper[4558]: I0120 18:57:26.953786 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pmdb5" event={"ID":"62f077fc-f52f-460a-afe8-e2a51fb285c4","Type":"ContainerStarted","Data":"4389a01266102d61229ba661ea754d39b96fcd813e163b37e3c23360c673d626"} Jan 20 18:57:27 crc kubenswrapper[4558]: I0120 18:57:27.144138 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-npkcz"] Jan 20 18:57:27 crc kubenswrapper[4558]: I0120 18:57:27.145699 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-npkcz" Jan 20 18:57:27 crc kubenswrapper[4558]: I0120 18:57:27.157388 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-npkcz"] Jan 20 18:57:27 crc kubenswrapper[4558]: I0120 18:57:27.288182 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2dj9t\" (UniqueName: \"kubernetes.io/projected/48f66b59-5414-48f5-b887-25da697bc698-kube-api-access-2dj9t\") pod \"community-operators-npkcz\" (UID: \"48f66b59-5414-48f5-b887-25da697bc698\") " pod="openshift-marketplace/community-operators-npkcz" Jan 20 18:57:27 crc kubenswrapper[4558]: I0120 18:57:27.288229 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/48f66b59-5414-48f5-b887-25da697bc698-catalog-content\") pod \"community-operators-npkcz\" (UID: \"48f66b59-5414-48f5-b887-25da697bc698\") " pod="openshift-marketplace/community-operators-npkcz" Jan 20 18:57:27 crc kubenswrapper[4558]: I0120 18:57:27.288326 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/48f66b59-5414-48f5-b887-25da697bc698-utilities\") pod \"community-operators-npkcz\" (UID: \"48f66b59-5414-48f5-b887-25da697bc698\") " pod="openshift-marketplace/community-operators-npkcz" Jan 20 18:57:27 crc kubenswrapper[4558]: I0120 18:57:27.329706 4558 patch_prober.go:28] interesting pod/machine-config-daemon-2vr4r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 20 18:57:27 crc kubenswrapper[4558]: I0120 18:57:27.329769 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 20 18:57:27 crc kubenswrapper[4558]: I0120 18:57:27.329820 4558 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" Jan 20 18:57:27 crc kubenswrapper[4558]: I0120 18:57:27.330589 4558 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"8a12e08019cdf90bc021796246b8cb4cd26600e714b44a9fcbd96e1616ed01db"} pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 20 18:57:27 crc kubenswrapper[4558]: I0120 18:57:27.330655 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" containerID="cri-o://8a12e08019cdf90bc021796246b8cb4cd26600e714b44a9fcbd96e1616ed01db" gracePeriod=600 Jan 20 18:57:27 crc kubenswrapper[4558]: I0120 18:57:27.390202 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2dj9t\" (UniqueName: \"kubernetes.io/projected/48f66b59-5414-48f5-b887-25da697bc698-kube-api-access-2dj9t\") pod \"community-operators-npkcz\" (UID: \"48f66b59-5414-48f5-b887-25da697bc698\") " pod="openshift-marketplace/community-operators-npkcz" Jan 20 18:57:27 crc kubenswrapper[4558]: I0120 18:57:27.390274 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/48f66b59-5414-48f5-b887-25da697bc698-catalog-content\") pod \"community-operators-npkcz\" (UID: \"48f66b59-5414-48f5-b887-25da697bc698\") " pod="openshift-marketplace/community-operators-npkcz" Jan 20 18:57:27 crc kubenswrapper[4558]: I0120 18:57:27.390348 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/48f66b59-5414-48f5-b887-25da697bc698-utilities\") pod \"community-operators-npkcz\" (UID: \"48f66b59-5414-48f5-b887-25da697bc698\") " pod="openshift-marketplace/community-operators-npkcz" Jan 20 18:57:27 crc kubenswrapper[4558]: I0120 18:57:27.390896 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/48f66b59-5414-48f5-b887-25da697bc698-catalog-content\") pod \"community-operators-npkcz\" (UID: \"48f66b59-5414-48f5-b887-25da697bc698\") " pod="openshift-marketplace/community-operators-npkcz" Jan 20 18:57:27 crc kubenswrapper[4558]: I0120 18:57:27.390926 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/48f66b59-5414-48f5-b887-25da697bc698-utilities\") pod \"community-operators-npkcz\" (UID: \"48f66b59-5414-48f5-b887-25da697bc698\") " pod="openshift-marketplace/community-operators-npkcz" Jan 20 18:57:27 crc kubenswrapper[4558]: I0120 18:57:27.406348 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2dj9t\" (UniqueName: \"kubernetes.io/projected/48f66b59-5414-48f5-b887-25da697bc698-kube-api-access-2dj9t\") pod \"community-operators-npkcz\" (UID: \"48f66b59-5414-48f5-b887-25da697bc698\") " pod="openshift-marketplace/community-operators-npkcz" Jan 20 18:57:27 crc kubenswrapper[4558]: I0120 18:57:27.466823 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-npkcz" Jan 20 18:57:27 crc kubenswrapper[4558]: I0120 18:57:27.755457 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-npkcz"] Jan 20 18:57:27 crc kubenswrapper[4558]: W0120 18:57:27.762422 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod48f66b59_5414_48f5_b887_25da697bc698.slice/crio-b79500f62787a28926d1fa7cd052c80554741a0b230886f7de3d24cd5f40d322 WatchSource:0}: Error finding container b79500f62787a28926d1fa7cd052c80554741a0b230886f7de3d24cd5f40d322: Status 404 returned error can't find the container with id b79500f62787a28926d1fa7cd052c80554741a0b230886f7de3d24cd5f40d322 Jan 20 18:57:27 crc kubenswrapper[4558]: I0120 18:57:27.961017 4558 generic.go:334] "Generic (PLEG): container finished" podID="48f66b59-5414-48f5-b887-25da697bc698" containerID="346a7170bf6eaa2019f60d25c189f6fcc4feea4f6d5e7497457b34942ea1c1fe" exitCode=0 Jan 20 18:57:27 crc kubenswrapper[4558]: I0120 18:57:27.961216 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-npkcz" event={"ID":"48f66b59-5414-48f5-b887-25da697bc698","Type":"ContainerDied","Data":"346a7170bf6eaa2019f60d25c189f6fcc4feea4f6d5e7497457b34942ea1c1fe"} Jan 20 18:57:27 crc kubenswrapper[4558]: I0120 18:57:27.961374 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-npkcz" event={"ID":"48f66b59-5414-48f5-b887-25da697bc698","Type":"ContainerStarted","Data":"b79500f62787a28926d1fa7cd052c80554741a0b230886f7de3d24cd5f40d322"} Jan 20 18:57:27 crc kubenswrapper[4558]: I0120 18:57:27.966771 4558 generic.go:334] "Generic (PLEG): container finished" podID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerID="8a12e08019cdf90bc021796246b8cb4cd26600e714b44a9fcbd96e1616ed01db" exitCode=0 Jan 20 18:57:27 crc kubenswrapper[4558]: I0120 18:57:27.966860 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" event={"ID":"68337d27-3fa6-4a29-88b0-82e60c3739eb","Type":"ContainerDied","Data":"8a12e08019cdf90bc021796246b8cb4cd26600e714b44a9fcbd96e1616ed01db"} Jan 20 18:57:27 crc kubenswrapper[4558]: I0120 18:57:27.966911 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" event={"ID":"68337d27-3fa6-4a29-88b0-82e60c3739eb","Type":"ContainerStarted","Data":"34f5e56ad45f6257f106e7ec0fead6f98610b65caceb3cfc7626a2add684d678"} Jan 20 18:57:27 crc kubenswrapper[4558]: I0120 18:57:27.966933 4558 scope.go:117] "RemoveContainer" containerID="499ec313cf27a56b071eb6186a175031344542ae6ab21658ac5fb0feccc71012" Jan 20 18:57:27 crc kubenswrapper[4558]: I0120 18:57:27.969624 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pmdb5" event={"ID":"62f077fc-f52f-460a-afe8-e2a51fb285c4","Type":"ContainerStarted","Data":"e4e6ad7b8f2202cf64d7d225de4ce14138cf7aaa3cbd043486bc625f421a0df9"} Jan 20 18:57:28 crc kubenswrapper[4558]: I0120 18:57:28.985147 4558 generic.go:334] "Generic (PLEG): container finished" podID="62f077fc-f52f-460a-afe8-e2a51fb285c4" containerID="e4e6ad7b8f2202cf64d7d225de4ce14138cf7aaa3cbd043486bc625f421a0df9" exitCode=0 Jan 20 18:57:28 crc kubenswrapper[4558]: I0120 18:57:28.985201 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pmdb5" event={"ID":"62f077fc-f52f-460a-afe8-e2a51fb285c4","Type":"ContainerDied","Data":"e4e6ad7b8f2202cf64d7d225de4ce14138cf7aaa3cbd043486bc625f421a0df9"} Jan 20 18:57:28 crc kubenswrapper[4558]: I0120 18:57:28.990857 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-npkcz" event={"ID":"48f66b59-5414-48f5-b887-25da697bc698","Type":"ContainerStarted","Data":"4ea13c8f6f8651644d2f6851d3a5a170bee9e98bd5b89b5aaed5fa5e8b21f8ae"} Jan 20 18:57:30 crc kubenswrapper[4558]: I0120 18:57:30.002495 4558 generic.go:334] "Generic (PLEG): container finished" podID="48f66b59-5414-48f5-b887-25da697bc698" containerID="4ea13c8f6f8651644d2f6851d3a5a170bee9e98bd5b89b5aaed5fa5e8b21f8ae" exitCode=0 Jan 20 18:57:30 crc kubenswrapper[4558]: I0120 18:57:30.002687 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-npkcz" event={"ID":"48f66b59-5414-48f5-b887-25da697bc698","Type":"ContainerDied","Data":"4ea13c8f6f8651644d2f6851d3a5a170bee9e98bd5b89b5aaed5fa5e8b21f8ae"} Jan 20 18:57:30 crc kubenswrapper[4558]: I0120 18:57:30.006245 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pmdb5" event={"ID":"62f077fc-f52f-460a-afe8-e2a51fb285c4","Type":"ContainerStarted","Data":"c25e520cd2c70fda479ef4d0b3c789dd7c17ddfdf7ef329fe6f5e1fb7a612f62"} Jan 20 18:57:30 crc kubenswrapper[4558]: I0120 18:57:30.040739 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-pmdb5" podStartSLOduration=2.429687574 podStartE2EDuration="5.040700529s" podCreationTimestamp="2026-01-20 18:57:25 +0000 UTC" firstStartedPulling="2026-01-20 18:57:26.961239057 +0000 UTC m=+8140.721577024" lastFinishedPulling="2026-01-20 18:57:29.572252012 +0000 UTC m=+8143.332589979" observedRunningTime="2026-01-20 18:57:30.039330863 +0000 UTC m=+8143.799668830" watchObservedRunningTime="2026-01-20 18:57:30.040700529 +0000 UTC m=+8143.801038496" Jan 20 18:57:31 crc kubenswrapper[4558]: I0120 18:57:31.016649 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-npkcz" event={"ID":"48f66b59-5414-48f5-b887-25da697bc698","Type":"ContainerStarted","Data":"8efbb175addd96a9fb8a7dbc3727bca76876aef94e09cfdd642b31eb46c80a5c"} Jan 20 18:57:31 crc kubenswrapper[4558]: I0120 18:57:31.034604 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-npkcz" podStartSLOduration=1.488410203 podStartE2EDuration="4.034583236s" podCreationTimestamp="2026-01-20 18:57:27 +0000 UTC" firstStartedPulling="2026-01-20 18:57:27.96327825 +0000 UTC m=+8141.723616217" lastFinishedPulling="2026-01-20 18:57:30.509451283 +0000 UTC m=+8144.269789250" observedRunningTime="2026-01-20 18:57:31.029781218 +0000 UTC m=+8144.790119185" watchObservedRunningTime="2026-01-20 18:57:31.034583236 +0000 UTC m=+8144.794921203" Jan 20 18:57:35 crc kubenswrapper[4558]: I0120 18:57:35.670634 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-pmdb5" Jan 20 18:57:35 crc kubenswrapper[4558]: I0120 18:57:35.671374 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-pmdb5" Jan 20 18:57:35 crc kubenswrapper[4558]: I0120 18:57:35.706365 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-pmdb5" Jan 20 18:57:36 crc kubenswrapper[4558]: I0120 18:57:36.080328 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-pmdb5" Jan 20 18:57:36 crc kubenswrapper[4558]: I0120 18:57:36.125879 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-pmdb5"] Jan 20 18:57:37 crc kubenswrapper[4558]: I0120 18:57:37.467809 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-npkcz" Jan 20 18:57:37 crc kubenswrapper[4558]: I0120 18:57:37.468967 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-npkcz" Jan 20 18:57:37 crc kubenswrapper[4558]: I0120 18:57:37.502848 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-npkcz" Jan 20 18:57:38 crc kubenswrapper[4558]: I0120 18:57:38.061440 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-pmdb5" podUID="62f077fc-f52f-460a-afe8-e2a51fb285c4" containerName="registry-server" containerID="cri-o://c25e520cd2c70fda479ef4d0b3c789dd7c17ddfdf7ef329fe6f5e1fb7a612f62" gracePeriod=2 Jan 20 18:57:38 crc kubenswrapper[4558]: I0120 18:57:38.100694 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-npkcz" Jan 20 18:57:38 crc kubenswrapper[4558]: I0120 18:57:38.337089 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-npkcz"] Jan 20 18:57:38 crc kubenswrapper[4558]: I0120 18:57:38.436085 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-pmdb5" Jan 20 18:57:38 crc kubenswrapper[4558]: I0120 18:57:38.575227 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/62f077fc-f52f-460a-afe8-e2a51fb285c4-catalog-content\") pod \"62f077fc-f52f-460a-afe8-e2a51fb285c4\" (UID: \"62f077fc-f52f-460a-afe8-e2a51fb285c4\") " Jan 20 18:57:38 crc kubenswrapper[4558]: I0120 18:57:38.575404 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/62f077fc-f52f-460a-afe8-e2a51fb285c4-utilities\") pod \"62f077fc-f52f-460a-afe8-e2a51fb285c4\" (UID: \"62f077fc-f52f-460a-afe8-e2a51fb285c4\") " Jan 20 18:57:38 crc kubenswrapper[4558]: I0120 18:57:38.575472 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-msptf\" (UniqueName: \"kubernetes.io/projected/62f077fc-f52f-460a-afe8-e2a51fb285c4-kube-api-access-msptf\") pod \"62f077fc-f52f-460a-afe8-e2a51fb285c4\" (UID: \"62f077fc-f52f-460a-afe8-e2a51fb285c4\") " Jan 20 18:57:38 crc kubenswrapper[4558]: I0120 18:57:38.578071 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/62f077fc-f52f-460a-afe8-e2a51fb285c4-utilities" (OuterVolumeSpecName: "utilities") pod "62f077fc-f52f-460a-afe8-e2a51fb285c4" (UID: "62f077fc-f52f-460a-afe8-e2a51fb285c4"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 18:57:38 crc kubenswrapper[4558]: I0120 18:57:38.583765 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/62f077fc-f52f-460a-afe8-e2a51fb285c4-kube-api-access-msptf" (OuterVolumeSpecName: "kube-api-access-msptf") pod "62f077fc-f52f-460a-afe8-e2a51fb285c4" (UID: "62f077fc-f52f-460a-afe8-e2a51fb285c4"). InnerVolumeSpecName "kube-api-access-msptf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:57:38 crc kubenswrapper[4558]: I0120 18:57:38.630013 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/62f077fc-f52f-460a-afe8-e2a51fb285c4-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "62f077fc-f52f-460a-afe8-e2a51fb285c4" (UID: "62f077fc-f52f-460a-afe8-e2a51fb285c4"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 18:57:38 crc kubenswrapper[4558]: I0120 18:57:38.678021 4558 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/62f077fc-f52f-460a-afe8-e2a51fb285c4-utilities\") on node \"crc\" DevicePath \"\"" Jan 20 18:57:38 crc kubenswrapper[4558]: I0120 18:57:38.678065 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-msptf\" (UniqueName: \"kubernetes.io/projected/62f077fc-f52f-460a-afe8-e2a51fb285c4-kube-api-access-msptf\") on node \"crc\" DevicePath \"\"" Jan 20 18:57:38 crc kubenswrapper[4558]: I0120 18:57:38.678085 4558 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/62f077fc-f52f-460a-afe8-e2a51fb285c4-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 20 18:57:39 crc kubenswrapper[4558]: I0120 18:57:39.069092 4558 generic.go:334] "Generic (PLEG): container finished" podID="62f077fc-f52f-460a-afe8-e2a51fb285c4" containerID="c25e520cd2c70fda479ef4d0b3c789dd7c17ddfdf7ef329fe6f5e1fb7a612f62" exitCode=0 Jan 20 18:57:39 crc kubenswrapper[4558]: I0120 18:57:39.069796 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-pmdb5" Jan 20 18:57:39 crc kubenswrapper[4558]: I0120 18:57:39.070951 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pmdb5" event={"ID":"62f077fc-f52f-460a-afe8-e2a51fb285c4","Type":"ContainerDied","Data":"c25e520cd2c70fda479ef4d0b3c789dd7c17ddfdf7ef329fe6f5e1fb7a612f62"} Jan 20 18:57:39 crc kubenswrapper[4558]: I0120 18:57:39.071057 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pmdb5" event={"ID":"62f077fc-f52f-460a-afe8-e2a51fb285c4","Type":"ContainerDied","Data":"4389a01266102d61229ba661ea754d39b96fcd813e163b37e3c23360c673d626"} Jan 20 18:57:39 crc kubenswrapper[4558]: I0120 18:57:39.071088 4558 scope.go:117] "RemoveContainer" containerID="c25e520cd2c70fda479ef4d0b3c789dd7c17ddfdf7ef329fe6f5e1fb7a612f62" Jan 20 18:57:39 crc kubenswrapper[4558]: I0120 18:57:39.089031 4558 scope.go:117] "RemoveContainer" containerID="e4e6ad7b8f2202cf64d7d225de4ce14138cf7aaa3cbd043486bc625f421a0df9" Jan 20 18:57:39 crc kubenswrapper[4558]: I0120 18:57:39.104453 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-pmdb5"] Jan 20 18:57:39 crc kubenswrapper[4558]: I0120 18:57:39.109932 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-pmdb5"] Jan 20 18:57:39 crc kubenswrapper[4558]: I0120 18:57:39.118608 4558 scope.go:117] "RemoveContainer" containerID="c1ba3324d28bfc6f98c3f5bfbf196dd3addf6dc0557109ae952905604bc465d7" Jan 20 18:57:39 crc kubenswrapper[4558]: I0120 18:57:39.133539 4558 scope.go:117] "RemoveContainer" containerID="c25e520cd2c70fda479ef4d0b3c789dd7c17ddfdf7ef329fe6f5e1fb7a612f62" Jan 20 18:57:39 crc kubenswrapper[4558]: E0120 18:57:39.133921 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c25e520cd2c70fda479ef4d0b3c789dd7c17ddfdf7ef329fe6f5e1fb7a612f62\": container with ID starting with c25e520cd2c70fda479ef4d0b3c789dd7c17ddfdf7ef329fe6f5e1fb7a612f62 not found: ID does not exist" containerID="c25e520cd2c70fda479ef4d0b3c789dd7c17ddfdf7ef329fe6f5e1fb7a612f62" Jan 20 18:57:39 crc kubenswrapper[4558]: I0120 18:57:39.133955 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c25e520cd2c70fda479ef4d0b3c789dd7c17ddfdf7ef329fe6f5e1fb7a612f62"} err="failed to get container status \"c25e520cd2c70fda479ef4d0b3c789dd7c17ddfdf7ef329fe6f5e1fb7a612f62\": rpc error: code = NotFound desc = could not find container \"c25e520cd2c70fda479ef4d0b3c789dd7c17ddfdf7ef329fe6f5e1fb7a612f62\": container with ID starting with c25e520cd2c70fda479ef4d0b3c789dd7c17ddfdf7ef329fe6f5e1fb7a612f62 not found: ID does not exist" Jan 20 18:57:39 crc kubenswrapper[4558]: I0120 18:57:39.133979 4558 scope.go:117] "RemoveContainer" containerID="e4e6ad7b8f2202cf64d7d225de4ce14138cf7aaa3cbd043486bc625f421a0df9" Jan 20 18:57:39 crc kubenswrapper[4558]: E0120 18:57:39.134292 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e4e6ad7b8f2202cf64d7d225de4ce14138cf7aaa3cbd043486bc625f421a0df9\": container with ID starting with e4e6ad7b8f2202cf64d7d225de4ce14138cf7aaa3cbd043486bc625f421a0df9 not found: ID does not exist" containerID="e4e6ad7b8f2202cf64d7d225de4ce14138cf7aaa3cbd043486bc625f421a0df9" Jan 20 18:57:39 crc kubenswrapper[4558]: I0120 18:57:39.134315 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e4e6ad7b8f2202cf64d7d225de4ce14138cf7aaa3cbd043486bc625f421a0df9"} err="failed to get container status \"e4e6ad7b8f2202cf64d7d225de4ce14138cf7aaa3cbd043486bc625f421a0df9\": rpc error: code = NotFound desc = could not find container \"e4e6ad7b8f2202cf64d7d225de4ce14138cf7aaa3cbd043486bc625f421a0df9\": container with ID starting with e4e6ad7b8f2202cf64d7d225de4ce14138cf7aaa3cbd043486bc625f421a0df9 not found: ID does not exist" Jan 20 18:57:39 crc kubenswrapper[4558]: I0120 18:57:39.134328 4558 scope.go:117] "RemoveContainer" containerID="c1ba3324d28bfc6f98c3f5bfbf196dd3addf6dc0557109ae952905604bc465d7" Jan 20 18:57:39 crc kubenswrapper[4558]: E0120 18:57:39.134711 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c1ba3324d28bfc6f98c3f5bfbf196dd3addf6dc0557109ae952905604bc465d7\": container with ID starting with c1ba3324d28bfc6f98c3f5bfbf196dd3addf6dc0557109ae952905604bc465d7 not found: ID does not exist" containerID="c1ba3324d28bfc6f98c3f5bfbf196dd3addf6dc0557109ae952905604bc465d7" Jan 20 18:57:39 crc kubenswrapper[4558]: I0120 18:57:39.134733 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c1ba3324d28bfc6f98c3f5bfbf196dd3addf6dc0557109ae952905604bc465d7"} err="failed to get container status \"c1ba3324d28bfc6f98c3f5bfbf196dd3addf6dc0557109ae952905604bc465d7\": rpc error: code = NotFound desc = could not find container \"c1ba3324d28bfc6f98c3f5bfbf196dd3addf6dc0557109ae952905604bc465d7\": container with ID starting with c1ba3324d28bfc6f98c3f5bfbf196dd3addf6dc0557109ae952905604bc465d7 not found: ID does not exist" Jan 20 18:57:40 crc kubenswrapper[4558]: I0120 18:57:40.077214 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-npkcz" podUID="48f66b59-5414-48f5-b887-25da697bc698" containerName="registry-server" containerID="cri-o://8efbb175addd96a9fb8a7dbc3727bca76876aef94e09cfdd642b31eb46c80a5c" gracePeriod=2 Jan 20 18:57:40 crc kubenswrapper[4558]: I0120 18:57:40.423278 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-npkcz" Jan 20 18:57:40 crc kubenswrapper[4558]: I0120 18:57:40.513757 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/48f66b59-5414-48f5-b887-25da697bc698-catalog-content\") pod \"48f66b59-5414-48f5-b887-25da697bc698\" (UID: \"48f66b59-5414-48f5-b887-25da697bc698\") " Jan 20 18:57:40 crc kubenswrapper[4558]: I0120 18:57:40.513976 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/48f66b59-5414-48f5-b887-25da697bc698-utilities\") pod \"48f66b59-5414-48f5-b887-25da697bc698\" (UID: \"48f66b59-5414-48f5-b887-25da697bc698\") " Jan 20 18:57:40 crc kubenswrapper[4558]: I0120 18:57:40.514038 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2dj9t\" (UniqueName: \"kubernetes.io/projected/48f66b59-5414-48f5-b887-25da697bc698-kube-api-access-2dj9t\") pod \"48f66b59-5414-48f5-b887-25da697bc698\" (UID: \"48f66b59-5414-48f5-b887-25da697bc698\") " Jan 20 18:57:40 crc kubenswrapper[4558]: I0120 18:57:40.514675 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/48f66b59-5414-48f5-b887-25da697bc698-utilities" (OuterVolumeSpecName: "utilities") pod "48f66b59-5414-48f5-b887-25da697bc698" (UID: "48f66b59-5414-48f5-b887-25da697bc698"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 18:57:40 crc kubenswrapper[4558]: I0120 18:57:40.519418 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/48f66b59-5414-48f5-b887-25da697bc698-kube-api-access-2dj9t" (OuterVolumeSpecName: "kube-api-access-2dj9t") pod "48f66b59-5414-48f5-b887-25da697bc698" (UID: "48f66b59-5414-48f5-b887-25da697bc698"). InnerVolumeSpecName "kube-api-access-2dj9t". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 18:57:40 crc kubenswrapper[4558]: I0120 18:57:40.554789 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/48f66b59-5414-48f5-b887-25da697bc698-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "48f66b59-5414-48f5-b887-25da697bc698" (UID: "48f66b59-5414-48f5-b887-25da697bc698"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 18:57:40 crc kubenswrapper[4558]: I0120 18:57:40.574443 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="62f077fc-f52f-460a-afe8-e2a51fb285c4" path="/var/lib/kubelet/pods/62f077fc-f52f-460a-afe8-e2a51fb285c4/volumes" Jan 20 18:57:40 crc kubenswrapper[4558]: I0120 18:57:40.616727 4558 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/48f66b59-5414-48f5-b887-25da697bc698-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 20 18:57:40 crc kubenswrapper[4558]: I0120 18:57:40.616766 4558 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/48f66b59-5414-48f5-b887-25da697bc698-utilities\") on node \"crc\" DevicePath \"\"" Jan 20 18:57:40 crc kubenswrapper[4558]: I0120 18:57:40.616782 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2dj9t\" (UniqueName: \"kubernetes.io/projected/48f66b59-5414-48f5-b887-25da697bc698-kube-api-access-2dj9t\") on node \"crc\" DevicePath \"\"" Jan 20 18:57:41 crc kubenswrapper[4558]: I0120 18:57:41.091291 4558 generic.go:334] "Generic (PLEG): container finished" podID="48f66b59-5414-48f5-b887-25da697bc698" containerID="8efbb175addd96a9fb8a7dbc3727bca76876aef94e09cfdd642b31eb46c80a5c" exitCode=0 Jan 20 18:57:41 crc kubenswrapper[4558]: I0120 18:57:41.091645 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-npkcz" event={"ID":"48f66b59-5414-48f5-b887-25da697bc698","Type":"ContainerDied","Data":"8efbb175addd96a9fb8a7dbc3727bca76876aef94e09cfdd642b31eb46c80a5c"} Jan 20 18:57:41 crc kubenswrapper[4558]: I0120 18:57:41.091698 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-npkcz" event={"ID":"48f66b59-5414-48f5-b887-25da697bc698","Type":"ContainerDied","Data":"b79500f62787a28926d1fa7cd052c80554741a0b230886f7de3d24cd5f40d322"} Jan 20 18:57:41 crc kubenswrapper[4558]: I0120 18:57:41.091728 4558 scope.go:117] "RemoveContainer" containerID="8efbb175addd96a9fb8a7dbc3727bca76876aef94e09cfdd642b31eb46c80a5c" Jan 20 18:57:41 crc kubenswrapper[4558]: I0120 18:57:41.091884 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-npkcz" Jan 20 18:57:41 crc kubenswrapper[4558]: I0120 18:57:41.119296 4558 scope.go:117] "RemoveContainer" containerID="4ea13c8f6f8651644d2f6851d3a5a170bee9e98bd5b89b5aaed5fa5e8b21f8ae" Jan 20 18:57:41 crc kubenswrapper[4558]: I0120 18:57:41.120154 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-npkcz"] Jan 20 18:57:41 crc kubenswrapper[4558]: I0120 18:57:41.124494 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-npkcz"] Jan 20 18:57:41 crc kubenswrapper[4558]: I0120 18:57:41.141111 4558 scope.go:117] "RemoveContainer" containerID="346a7170bf6eaa2019f60d25c189f6fcc4feea4f6d5e7497457b34942ea1c1fe" Jan 20 18:57:41 crc kubenswrapper[4558]: I0120 18:57:41.157391 4558 scope.go:117] "RemoveContainer" containerID="8efbb175addd96a9fb8a7dbc3727bca76876aef94e09cfdd642b31eb46c80a5c" Jan 20 18:57:41 crc kubenswrapper[4558]: E0120 18:57:41.157893 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8efbb175addd96a9fb8a7dbc3727bca76876aef94e09cfdd642b31eb46c80a5c\": container with ID starting with 8efbb175addd96a9fb8a7dbc3727bca76876aef94e09cfdd642b31eb46c80a5c not found: ID does not exist" containerID="8efbb175addd96a9fb8a7dbc3727bca76876aef94e09cfdd642b31eb46c80a5c" Jan 20 18:57:41 crc kubenswrapper[4558]: I0120 18:57:41.157937 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8efbb175addd96a9fb8a7dbc3727bca76876aef94e09cfdd642b31eb46c80a5c"} err="failed to get container status \"8efbb175addd96a9fb8a7dbc3727bca76876aef94e09cfdd642b31eb46c80a5c\": rpc error: code = NotFound desc = could not find container \"8efbb175addd96a9fb8a7dbc3727bca76876aef94e09cfdd642b31eb46c80a5c\": container with ID starting with 8efbb175addd96a9fb8a7dbc3727bca76876aef94e09cfdd642b31eb46c80a5c not found: ID does not exist" Jan 20 18:57:41 crc kubenswrapper[4558]: I0120 18:57:41.157969 4558 scope.go:117] "RemoveContainer" containerID="4ea13c8f6f8651644d2f6851d3a5a170bee9e98bd5b89b5aaed5fa5e8b21f8ae" Jan 20 18:57:41 crc kubenswrapper[4558]: E0120 18:57:41.158378 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4ea13c8f6f8651644d2f6851d3a5a170bee9e98bd5b89b5aaed5fa5e8b21f8ae\": container with ID starting with 4ea13c8f6f8651644d2f6851d3a5a170bee9e98bd5b89b5aaed5fa5e8b21f8ae not found: ID does not exist" containerID="4ea13c8f6f8651644d2f6851d3a5a170bee9e98bd5b89b5aaed5fa5e8b21f8ae" Jan 20 18:57:41 crc kubenswrapper[4558]: I0120 18:57:41.158403 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4ea13c8f6f8651644d2f6851d3a5a170bee9e98bd5b89b5aaed5fa5e8b21f8ae"} err="failed to get container status \"4ea13c8f6f8651644d2f6851d3a5a170bee9e98bd5b89b5aaed5fa5e8b21f8ae\": rpc error: code = NotFound desc = could not find container \"4ea13c8f6f8651644d2f6851d3a5a170bee9e98bd5b89b5aaed5fa5e8b21f8ae\": container with ID starting with 4ea13c8f6f8651644d2f6851d3a5a170bee9e98bd5b89b5aaed5fa5e8b21f8ae not found: ID does not exist" Jan 20 18:57:41 crc kubenswrapper[4558]: I0120 18:57:41.158418 4558 scope.go:117] "RemoveContainer" containerID="346a7170bf6eaa2019f60d25c189f6fcc4feea4f6d5e7497457b34942ea1c1fe" Jan 20 18:57:41 crc kubenswrapper[4558]: E0120 18:57:41.158701 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"346a7170bf6eaa2019f60d25c189f6fcc4feea4f6d5e7497457b34942ea1c1fe\": container with ID starting with 346a7170bf6eaa2019f60d25c189f6fcc4feea4f6d5e7497457b34942ea1c1fe not found: ID does not exist" containerID="346a7170bf6eaa2019f60d25c189f6fcc4feea4f6d5e7497457b34942ea1c1fe" Jan 20 18:57:41 crc kubenswrapper[4558]: I0120 18:57:41.158737 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"346a7170bf6eaa2019f60d25c189f6fcc4feea4f6d5e7497457b34942ea1c1fe"} err="failed to get container status \"346a7170bf6eaa2019f60d25c189f6fcc4feea4f6d5e7497457b34942ea1c1fe\": rpc error: code = NotFound desc = could not find container \"346a7170bf6eaa2019f60d25c189f6fcc4feea4f6d5e7497457b34942ea1c1fe\": container with ID starting with 346a7170bf6eaa2019f60d25c189f6fcc4feea4f6d5e7497457b34942ea1c1fe not found: ID does not exist" Jan 20 18:57:42 crc kubenswrapper[4558]: I0120 18:57:42.573978 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="48f66b59-5414-48f5-b887-25da697bc698" path="/var/lib/kubelet/pods/48f66b59-5414-48f5-b887-25da697bc698/volumes" Jan 20 18:59:27 crc kubenswrapper[4558]: I0120 18:59:27.330487 4558 patch_prober.go:28] interesting pod/machine-config-daemon-2vr4r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 20 18:59:27 crc kubenswrapper[4558]: I0120 18:59:27.331126 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 20 18:59:57 crc kubenswrapper[4558]: I0120 18:59:57.330067 4558 patch_prober.go:28] interesting pod/machine-config-daemon-2vr4r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 20 18:59:57 crc kubenswrapper[4558]: I0120 18:59:57.330644 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 20 19:00:00 crc kubenswrapper[4558]: I0120 19:00:00.156177 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29482260-422w6"] Jan 20 19:00:00 crc kubenswrapper[4558]: E0120 19:00:00.156450 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="48f66b59-5414-48f5-b887-25da697bc698" containerName="registry-server" Jan 20 19:00:00 crc kubenswrapper[4558]: I0120 19:00:00.156470 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="48f66b59-5414-48f5-b887-25da697bc698" containerName="registry-server" Jan 20 19:00:00 crc kubenswrapper[4558]: E0120 19:00:00.156494 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="62f077fc-f52f-460a-afe8-e2a51fb285c4" containerName="registry-server" Jan 20 19:00:00 crc kubenswrapper[4558]: I0120 19:00:00.156499 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="62f077fc-f52f-460a-afe8-e2a51fb285c4" containerName="registry-server" Jan 20 19:00:00 crc kubenswrapper[4558]: E0120 19:00:00.156506 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="48f66b59-5414-48f5-b887-25da697bc698" containerName="extract-utilities" Jan 20 19:00:00 crc kubenswrapper[4558]: I0120 19:00:00.156512 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="48f66b59-5414-48f5-b887-25da697bc698" containerName="extract-utilities" Jan 20 19:00:00 crc kubenswrapper[4558]: E0120 19:00:00.156526 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="62f077fc-f52f-460a-afe8-e2a51fb285c4" containerName="extract-content" Jan 20 19:00:00 crc kubenswrapper[4558]: I0120 19:00:00.156533 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="62f077fc-f52f-460a-afe8-e2a51fb285c4" containerName="extract-content" Jan 20 19:00:00 crc kubenswrapper[4558]: E0120 19:00:00.156540 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="62f077fc-f52f-460a-afe8-e2a51fb285c4" containerName="extract-utilities" Jan 20 19:00:00 crc kubenswrapper[4558]: I0120 19:00:00.156546 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="62f077fc-f52f-460a-afe8-e2a51fb285c4" containerName="extract-utilities" Jan 20 19:00:00 crc kubenswrapper[4558]: E0120 19:00:00.156562 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="48f66b59-5414-48f5-b887-25da697bc698" containerName="extract-content" Jan 20 19:00:00 crc kubenswrapper[4558]: I0120 19:00:00.156568 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="48f66b59-5414-48f5-b887-25da697bc698" containerName="extract-content" Jan 20 19:00:00 crc kubenswrapper[4558]: I0120 19:00:00.156678 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="62f077fc-f52f-460a-afe8-e2a51fb285c4" containerName="registry-server" Jan 20 19:00:00 crc kubenswrapper[4558]: I0120 19:00:00.156698 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="48f66b59-5414-48f5-b887-25da697bc698" containerName="registry-server" Jan 20 19:00:00 crc kubenswrapper[4558]: I0120 19:00:00.157480 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29482260-422w6" Jan 20 19:00:00 crc kubenswrapper[4558]: I0120 19:00:00.161448 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 20 19:00:00 crc kubenswrapper[4558]: I0120 19:00:00.161751 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 20 19:00:00 crc kubenswrapper[4558]: I0120 19:00:00.167879 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29482260-422w6"] Jan 20 19:00:00 crc kubenswrapper[4558]: I0120 19:00:00.267203 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/55dca4f6-b612-43f2-851d-888a552c942b-secret-volume\") pod \"collect-profiles-29482260-422w6\" (UID: \"55dca4f6-b612-43f2-851d-888a552c942b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482260-422w6" Jan 20 19:00:00 crc kubenswrapper[4558]: I0120 19:00:00.267275 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zk6c2\" (UniqueName: \"kubernetes.io/projected/55dca4f6-b612-43f2-851d-888a552c942b-kube-api-access-zk6c2\") pod \"collect-profiles-29482260-422w6\" (UID: \"55dca4f6-b612-43f2-851d-888a552c942b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482260-422w6" Jan 20 19:00:00 crc kubenswrapper[4558]: I0120 19:00:00.267314 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/55dca4f6-b612-43f2-851d-888a552c942b-config-volume\") pod \"collect-profiles-29482260-422w6\" (UID: \"55dca4f6-b612-43f2-851d-888a552c942b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482260-422w6" Jan 20 19:00:00 crc kubenswrapper[4558]: I0120 19:00:00.368739 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/55dca4f6-b612-43f2-851d-888a552c942b-secret-volume\") pod \"collect-profiles-29482260-422w6\" (UID: \"55dca4f6-b612-43f2-851d-888a552c942b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482260-422w6" Jan 20 19:00:00 crc kubenswrapper[4558]: I0120 19:00:00.368825 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zk6c2\" (UniqueName: \"kubernetes.io/projected/55dca4f6-b612-43f2-851d-888a552c942b-kube-api-access-zk6c2\") pod \"collect-profiles-29482260-422w6\" (UID: \"55dca4f6-b612-43f2-851d-888a552c942b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482260-422w6" Jan 20 19:00:00 crc kubenswrapper[4558]: I0120 19:00:00.368862 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/55dca4f6-b612-43f2-851d-888a552c942b-config-volume\") pod \"collect-profiles-29482260-422w6\" (UID: \"55dca4f6-b612-43f2-851d-888a552c942b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482260-422w6" Jan 20 19:00:00 crc kubenswrapper[4558]: I0120 19:00:00.370237 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/55dca4f6-b612-43f2-851d-888a552c942b-config-volume\") pod \"collect-profiles-29482260-422w6\" (UID: \"55dca4f6-b612-43f2-851d-888a552c942b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482260-422w6" Jan 20 19:00:00 crc kubenswrapper[4558]: I0120 19:00:00.393202 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/55dca4f6-b612-43f2-851d-888a552c942b-secret-volume\") pod \"collect-profiles-29482260-422w6\" (UID: \"55dca4f6-b612-43f2-851d-888a552c942b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482260-422w6" Jan 20 19:00:00 crc kubenswrapper[4558]: I0120 19:00:00.397019 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zk6c2\" (UniqueName: \"kubernetes.io/projected/55dca4f6-b612-43f2-851d-888a552c942b-kube-api-access-zk6c2\") pod \"collect-profiles-29482260-422w6\" (UID: \"55dca4f6-b612-43f2-851d-888a552c942b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482260-422w6" Jan 20 19:00:00 crc kubenswrapper[4558]: I0120 19:00:00.484721 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29482260-422w6" Jan 20 19:00:00 crc kubenswrapper[4558]: I0120 19:00:00.859330 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29482260-422w6"] Jan 20 19:00:01 crc kubenswrapper[4558]: I0120 19:00:01.061567 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29482260-422w6" event={"ID":"55dca4f6-b612-43f2-851d-888a552c942b","Type":"ContainerStarted","Data":"08cf3cfd4f06cdf84d3de2b0e748f5cbc63bedef105d01ccb1c91389e1b1fc3b"} Jan 20 19:00:01 crc kubenswrapper[4558]: I0120 19:00:01.061643 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29482260-422w6" event={"ID":"55dca4f6-b612-43f2-851d-888a552c942b","Type":"ContainerStarted","Data":"23fba7b0145d10bbbe12a44c4e0b35d2bd156922c4738ad66e6385d439652ab4"} Jan 20 19:00:01 crc kubenswrapper[4558]: I0120 19:00:01.081499 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29482260-422w6" podStartSLOduration=1.08147667 podStartE2EDuration="1.08147667s" podCreationTimestamp="2026-01-20 19:00:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 19:00:01.073932705 +0000 UTC m=+8294.834270673" watchObservedRunningTime="2026-01-20 19:00:01.08147667 +0000 UTC m=+8294.841814638" Jan 20 19:00:02 crc kubenswrapper[4558]: I0120 19:00:02.070766 4558 generic.go:334] "Generic (PLEG): container finished" podID="55dca4f6-b612-43f2-851d-888a552c942b" containerID="08cf3cfd4f06cdf84d3de2b0e748f5cbc63bedef105d01ccb1c91389e1b1fc3b" exitCode=0 Jan 20 19:00:02 crc kubenswrapper[4558]: I0120 19:00:02.070955 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29482260-422w6" event={"ID":"55dca4f6-b612-43f2-851d-888a552c942b","Type":"ContainerDied","Data":"08cf3cfd4f06cdf84d3de2b0e748f5cbc63bedef105d01ccb1c91389e1b1fc3b"} Jan 20 19:00:03 crc kubenswrapper[4558]: I0120 19:00:03.294793 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29482260-422w6" Jan 20 19:00:03 crc kubenswrapper[4558]: I0120 19:00:03.405997 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/55dca4f6-b612-43f2-851d-888a552c942b-config-volume\") pod \"55dca4f6-b612-43f2-851d-888a552c942b\" (UID: \"55dca4f6-b612-43f2-851d-888a552c942b\") " Jan 20 19:00:03 crc kubenswrapper[4558]: I0120 19:00:03.406083 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/55dca4f6-b612-43f2-851d-888a552c942b-secret-volume\") pod \"55dca4f6-b612-43f2-851d-888a552c942b\" (UID: \"55dca4f6-b612-43f2-851d-888a552c942b\") " Jan 20 19:00:03 crc kubenswrapper[4558]: I0120 19:00:03.406140 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zk6c2\" (UniqueName: \"kubernetes.io/projected/55dca4f6-b612-43f2-851d-888a552c942b-kube-api-access-zk6c2\") pod \"55dca4f6-b612-43f2-851d-888a552c942b\" (UID: \"55dca4f6-b612-43f2-851d-888a552c942b\") " Jan 20 19:00:03 crc kubenswrapper[4558]: I0120 19:00:03.406996 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/55dca4f6-b612-43f2-851d-888a552c942b-config-volume" (OuterVolumeSpecName: "config-volume") pod "55dca4f6-b612-43f2-851d-888a552c942b" (UID: "55dca4f6-b612-43f2-851d-888a552c942b"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 19:00:03 crc kubenswrapper[4558]: I0120 19:00:03.411993 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/55dca4f6-b612-43f2-851d-888a552c942b-kube-api-access-zk6c2" (OuterVolumeSpecName: "kube-api-access-zk6c2") pod "55dca4f6-b612-43f2-851d-888a552c942b" (UID: "55dca4f6-b612-43f2-851d-888a552c942b"). InnerVolumeSpecName "kube-api-access-zk6c2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 19:00:03 crc kubenswrapper[4558]: I0120 19:00:03.412077 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/55dca4f6-b612-43f2-851d-888a552c942b-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "55dca4f6-b612-43f2-851d-888a552c942b" (UID: "55dca4f6-b612-43f2-851d-888a552c942b"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 19:00:03 crc kubenswrapper[4558]: I0120 19:00:03.508733 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zk6c2\" (UniqueName: \"kubernetes.io/projected/55dca4f6-b612-43f2-851d-888a552c942b-kube-api-access-zk6c2\") on node \"crc\" DevicePath \"\"" Jan 20 19:00:03 crc kubenswrapper[4558]: I0120 19:00:03.509080 4558 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/55dca4f6-b612-43f2-851d-888a552c942b-config-volume\") on node \"crc\" DevicePath \"\"" Jan 20 19:00:03 crc kubenswrapper[4558]: I0120 19:00:03.509095 4558 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/55dca4f6-b612-43f2-851d-888a552c942b-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 20 19:00:04 crc kubenswrapper[4558]: I0120 19:00:04.090375 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29482260-422w6" event={"ID":"55dca4f6-b612-43f2-851d-888a552c942b","Type":"ContainerDied","Data":"23fba7b0145d10bbbe12a44c4e0b35d2bd156922c4738ad66e6385d439652ab4"} Jan 20 19:00:04 crc kubenswrapper[4558]: I0120 19:00:04.090436 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="23fba7b0145d10bbbe12a44c4e0b35d2bd156922c4738ad66e6385d439652ab4" Jan 20 19:00:04 crc kubenswrapper[4558]: I0120 19:00:04.090512 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29482260-422w6" Jan 20 19:00:04 crc kubenswrapper[4558]: I0120 19:00:04.360721 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29482215-r8bkj"] Jan 20 19:00:04 crc kubenswrapper[4558]: I0120 19:00:04.368983 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29482215-r8bkj"] Jan 20 19:00:04 crc kubenswrapper[4558]: I0120 19:00:04.575356 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cfc4c95c-9901-47d1-af58-db23e6350202" path="/var/lib/kubelet/pods/cfc4c95c-9901-47d1-af58-db23e6350202/volumes" Jan 20 19:00:27 crc kubenswrapper[4558]: I0120 19:00:27.330226 4558 patch_prober.go:28] interesting pod/machine-config-daemon-2vr4r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 20 19:00:27 crc kubenswrapper[4558]: I0120 19:00:27.330765 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 20 19:00:27 crc kubenswrapper[4558]: I0120 19:00:27.330822 4558 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" Jan 20 19:00:27 crc kubenswrapper[4558]: I0120 19:00:27.331402 4558 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"34f5e56ad45f6257f106e7ec0fead6f98610b65caceb3cfc7626a2add684d678"} pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 20 19:00:27 crc kubenswrapper[4558]: I0120 19:00:27.331465 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" containerID="cri-o://34f5e56ad45f6257f106e7ec0fead6f98610b65caceb3cfc7626a2add684d678" gracePeriod=600 Jan 20 19:00:27 crc kubenswrapper[4558]: E0120 19:00:27.469668 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:00:28 crc kubenswrapper[4558]: I0120 19:00:28.248557 4558 generic.go:334] "Generic (PLEG): container finished" podID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerID="34f5e56ad45f6257f106e7ec0fead6f98610b65caceb3cfc7626a2add684d678" exitCode=0 Jan 20 19:00:28 crc kubenswrapper[4558]: I0120 19:00:28.248643 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" event={"ID":"68337d27-3fa6-4a29-88b0-82e60c3739eb","Type":"ContainerDied","Data":"34f5e56ad45f6257f106e7ec0fead6f98610b65caceb3cfc7626a2add684d678"} Jan 20 19:00:28 crc kubenswrapper[4558]: I0120 19:00:28.248992 4558 scope.go:117] "RemoveContainer" containerID="8a12e08019cdf90bc021796246b8cb4cd26600e714b44a9fcbd96e1616ed01db" Jan 20 19:00:28 crc kubenswrapper[4558]: I0120 19:00:28.249904 4558 scope.go:117] "RemoveContainer" containerID="34f5e56ad45f6257f106e7ec0fead6f98610b65caceb3cfc7626a2add684d678" Jan 20 19:00:28 crc kubenswrapper[4558]: E0120 19:00:28.250456 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:00:38 crc kubenswrapper[4558]: I0120 19:00:38.566517 4558 scope.go:117] "RemoveContainer" containerID="34f5e56ad45f6257f106e7ec0fead6f98610b65caceb3cfc7626a2add684d678" Jan 20 19:00:38 crc kubenswrapper[4558]: E0120 19:00:38.567493 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:00:40 crc kubenswrapper[4558]: I0120 19:00:40.511351 4558 scope.go:117] "RemoveContainer" containerID="f0ab58e4952452d7ee1b43f6dac5ef59c7f4f4ce94f157921ec816ea03694353" Jan 20 19:00:52 crc kubenswrapper[4558]: I0120 19:00:52.566531 4558 scope.go:117] "RemoveContainer" containerID="34f5e56ad45f6257f106e7ec0fead6f98610b65caceb3cfc7626a2add684d678" Jan 20 19:00:52 crc kubenswrapper[4558]: E0120 19:00:52.567181 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:01:07 crc kubenswrapper[4558]: I0120 19:01:07.565761 4558 scope.go:117] "RemoveContainer" containerID="34f5e56ad45f6257f106e7ec0fead6f98610b65caceb3cfc7626a2add684d678" Jan 20 19:01:07 crc kubenswrapper[4558]: E0120 19:01:07.566719 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:01:22 crc kubenswrapper[4558]: I0120 19:01:22.566112 4558 scope.go:117] "RemoveContainer" containerID="34f5e56ad45f6257f106e7ec0fead6f98610b65caceb3cfc7626a2add684d678" Jan 20 19:01:22 crc kubenswrapper[4558]: E0120 19:01:22.566800 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:01:37 crc kubenswrapper[4558]: I0120 19:01:37.566148 4558 scope.go:117] "RemoveContainer" containerID="34f5e56ad45f6257f106e7ec0fead6f98610b65caceb3cfc7626a2add684d678" Jan 20 19:01:37 crc kubenswrapper[4558]: E0120 19:01:37.567255 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:01:49 crc kubenswrapper[4558]: I0120 19:01:49.566595 4558 scope.go:117] "RemoveContainer" containerID="34f5e56ad45f6257f106e7ec0fead6f98610b65caceb3cfc7626a2add684d678" Jan 20 19:01:49 crc kubenswrapper[4558]: E0120 19:01:49.567807 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:02:02 crc kubenswrapper[4558]: I0120 19:02:02.565778 4558 scope.go:117] "RemoveContainer" containerID="34f5e56ad45f6257f106e7ec0fead6f98610b65caceb3cfc7626a2add684d678" Jan 20 19:02:02 crc kubenswrapper[4558]: E0120 19:02:02.566567 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:02:13 crc kubenswrapper[4558]: I0120 19:02:13.566296 4558 scope.go:117] "RemoveContainer" containerID="34f5e56ad45f6257f106e7ec0fead6f98610b65caceb3cfc7626a2add684d678" Jan 20 19:02:13 crc kubenswrapper[4558]: E0120 19:02:13.567122 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:02:26 crc kubenswrapper[4558]: I0120 19:02:26.565879 4558 scope.go:117] "RemoveContainer" containerID="34f5e56ad45f6257f106e7ec0fead6f98610b65caceb3cfc7626a2add684d678" Jan 20 19:02:26 crc kubenswrapper[4558]: E0120 19:02:26.567822 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:02:40 crc kubenswrapper[4558]: I0120 19:02:40.566942 4558 scope.go:117] "RemoveContainer" containerID="34f5e56ad45f6257f106e7ec0fead6f98610b65caceb3cfc7626a2add684d678" Jan 20 19:02:40 crc kubenswrapper[4558]: E0120 19:02:40.567736 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:02:54 crc kubenswrapper[4558]: I0120 19:02:54.566716 4558 scope.go:117] "RemoveContainer" containerID="34f5e56ad45f6257f106e7ec0fead6f98610b65caceb3cfc7626a2add684d678" Jan 20 19:02:54 crc kubenswrapper[4558]: E0120 19:02:54.567699 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:03:06 crc kubenswrapper[4558]: I0120 19:03:06.568727 4558 scope.go:117] "RemoveContainer" containerID="34f5e56ad45f6257f106e7ec0fead6f98610b65caceb3cfc7626a2add684d678" Jan 20 19:03:06 crc kubenswrapper[4558]: E0120 19:03:06.569446 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:03:21 crc kubenswrapper[4558]: I0120 19:03:21.565600 4558 scope.go:117] "RemoveContainer" containerID="34f5e56ad45f6257f106e7ec0fead6f98610b65caceb3cfc7626a2add684d678" Jan 20 19:03:21 crc kubenswrapper[4558]: E0120 19:03:21.566222 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:03:35 crc kubenswrapper[4558]: I0120 19:03:35.410949 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-nprlh"] Jan 20 19:03:35 crc kubenswrapper[4558]: E0120 19:03:35.412047 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="55dca4f6-b612-43f2-851d-888a552c942b" containerName="collect-profiles" Jan 20 19:03:35 crc kubenswrapper[4558]: I0120 19:03:35.412065 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="55dca4f6-b612-43f2-851d-888a552c942b" containerName="collect-profiles" Jan 20 19:03:35 crc kubenswrapper[4558]: I0120 19:03:35.412283 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="55dca4f6-b612-43f2-851d-888a552c942b" containerName="collect-profiles" Jan 20 19:03:35 crc kubenswrapper[4558]: I0120 19:03:35.413372 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-nprlh" Jan 20 19:03:35 crc kubenswrapper[4558]: I0120 19:03:35.425088 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-nprlh"] Jan 20 19:03:35 crc kubenswrapper[4558]: I0120 19:03:35.465907 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e167bda4-10a1-452e-91ac-8448595c1c24-catalog-content\") pod \"redhat-operators-nprlh\" (UID: \"e167bda4-10a1-452e-91ac-8448595c1c24\") " pod="openshift-marketplace/redhat-operators-nprlh" Jan 20 19:03:35 crc kubenswrapper[4558]: I0120 19:03:35.465959 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e167bda4-10a1-452e-91ac-8448595c1c24-utilities\") pod \"redhat-operators-nprlh\" (UID: \"e167bda4-10a1-452e-91ac-8448595c1c24\") " pod="openshift-marketplace/redhat-operators-nprlh" Jan 20 19:03:35 crc kubenswrapper[4558]: I0120 19:03:35.465981 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wgpv4\" (UniqueName: \"kubernetes.io/projected/e167bda4-10a1-452e-91ac-8448595c1c24-kube-api-access-wgpv4\") pod \"redhat-operators-nprlh\" (UID: \"e167bda4-10a1-452e-91ac-8448595c1c24\") " pod="openshift-marketplace/redhat-operators-nprlh" Jan 20 19:03:35 crc kubenswrapper[4558]: I0120 19:03:35.566484 4558 scope.go:117] "RemoveContainer" containerID="34f5e56ad45f6257f106e7ec0fead6f98610b65caceb3cfc7626a2add684d678" Jan 20 19:03:35 crc kubenswrapper[4558]: I0120 19:03:35.567189 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e167bda4-10a1-452e-91ac-8448595c1c24-catalog-content\") pod \"redhat-operators-nprlh\" (UID: \"e167bda4-10a1-452e-91ac-8448595c1c24\") " pod="openshift-marketplace/redhat-operators-nprlh" Jan 20 19:03:35 crc kubenswrapper[4558]: I0120 19:03:35.567263 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e167bda4-10a1-452e-91ac-8448595c1c24-utilities\") pod \"redhat-operators-nprlh\" (UID: \"e167bda4-10a1-452e-91ac-8448595c1c24\") " pod="openshift-marketplace/redhat-operators-nprlh" Jan 20 19:03:35 crc kubenswrapper[4558]: I0120 19:03:35.567289 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wgpv4\" (UniqueName: \"kubernetes.io/projected/e167bda4-10a1-452e-91ac-8448595c1c24-kube-api-access-wgpv4\") pod \"redhat-operators-nprlh\" (UID: \"e167bda4-10a1-452e-91ac-8448595c1c24\") " pod="openshift-marketplace/redhat-operators-nprlh" Jan 20 19:03:35 crc kubenswrapper[4558]: I0120 19:03:35.567881 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e167bda4-10a1-452e-91ac-8448595c1c24-catalog-content\") pod \"redhat-operators-nprlh\" (UID: \"e167bda4-10a1-452e-91ac-8448595c1c24\") " pod="openshift-marketplace/redhat-operators-nprlh" Jan 20 19:03:35 crc kubenswrapper[4558]: E0120 19:03:35.582696 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:03:35 crc kubenswrapper[4558]: I0120 19:03:35.567911 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e167bda4-10a1-452e-91ac-8448595c1c24-utilities\") pod \"redhat-operators-nprlh\" (UID: \"e167bda4-10a1-452e-91ac-8448595c1c24\") " pod="openshift-marketplace/redhat-operators-nprlh" Jan 20 19:03:35 crc kubenswrapper[4558]: I0120 19:03:35.601510 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wgpv4\" (UniqueName: \"kubernetes.io/projected/e167bda4-10a1-452e-91ac-8448595c1c24-kube-api-access-wgpv4\") pod \"redhat-operators-nprlh\" (UID: \"e167bda4-10a1-452e-91ac-8448595c1c24\") " pod="openshift-marketplace/redhat-operators-nprlh" Jan 20 19:03:35 crc kubenswrapper[4558]: I0120 19:03:35.732523 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-nprlh" Jan 20 19:03:36 crc kubenswrapper[4558]: I0120 19:03:36.144365 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-nprlh"] Jan 20 19:03:36 crc kubenswrapper[4558]: I0120 19:03:36.653048 4558 generic.go:334] "Generic (PLEG): container finished" podID="e167bda4-10a1-452e-91ac-8448595c1c24" containerID="e1928ffe2df30157e12d07af5e7d04f5889370661af02df2d1cbe5fede7cc55c" exitCode=0 Jan 20 19:03:36 crc kubenswrapper[4558]: I0120 19:03:36.653200 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nprlh" event={"ID":"e167bda4-10a1-452e-91ac-8448595c1c24","Type":"ContainerDied","Data":"e1928ffe2df30157e12d07af5e7d04f5889370661af02df2d1cbe5fede7cc55c"} Jan 20 19:03:36 crc kubenswrapper[4558]: I0120 19:03:36.653459 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nprlh" event={"ID":"e167bda4-10a1-452e-91ac-8448595c1c24","Type":"ContainerStarted","Data":"6faa99218571da3f1361fc405be38af222beccb1a7b6ec838dcae83e8b41851a"} Jan 20 19:03:36 crc kubenswrapper[4558]: I0120 19:03:36.655089 4558 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 20 19:03:38 crc kubenswrapper[4558]: I0120 19:03:38.667485 4558 generic.go:334] "Generic (PLEG): container finished" podID="e167bda4-10a1-452e-91ac-8448595c1c24" containerID="bd4d12cc52b254971e5187f411ce08d3e405ed0cadd8bdaf4552289ed6a122ca" exitCode=0 Jan 20 19:03:38 crc kubenswrapper[4558]: I0120 19:03:38.667856 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nprlh" event={"ID":"e167bda4-10a1-452e-91ac-8448595c1c24","Type":"ContainerDied","Data":"bd4d12cc52b254971e5187f411ce08d3e405ed0cadd8bdaf4552289ed6a122ca"} Jan 20 19:03:39 crc kubenswrapper[4558]: I0120 19:03:39.677394 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nprlh" event={"ID":"e167bda4-10a1-452e-91ac-8448595c1c24","Type":"ContainerStarted","Data":"2424a7c2a9a5f5737819db8fa162e35f1bf8c8841b06cf4c138d863f40ef1e68"} Jan 20 19:03:39 crc kubenswrapper[4558]: I0120 19:03:39.698985 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-nprlh" podStartSLOduration=2.149922621 podStartE2EDuration="4.698967454s" podCreationTimestamp="2026-01-20 19:03:35 +0000 UTC" firstStartedPulling="2026-01-20 19:03:36.654826772 +0000 UTC m=+8510.415164739" lastFinishedPulling="2026-01-20 19:03:39.203871604 +0000 UTC m=+8512.964209572" observedRunningTime="2026-01-20 19:03:39.69503488 +0000 UTC m=+8513.455372847" watchObservedRunningTime="2026-01-20 19:03:39.698967454 +0000 UTC m=+8513.459305421" Jan 20 19:03:45 crc kubenswrapper[4558]: I0120 19:03:45.733439 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-nprlh" Jan 20 19:03:45 crc kubenswrapper[4558]: I0120 19:03:45.733824 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-nprlh" Jan 20 19:03:45 crc kubenswrapper[4558]: I0120 19:03:45.787407 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-nprlh" Jan 20 19:03:46 crc kubenswrapper[4558]: I0120 19:03:46.762460 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-nprlh" Jan 20 19:03:46 crc kubenswrapper[4558]: I0120 19:03:46.805862 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-nprlh"] Jan 20 19:03:48 crc kubenswrapper[4558]: I0120 19:03:48.738492 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-nprlh" podUID="e167bda4-10a1-452e-91ac-8448595c1c24" containerName="registry-server" containerID="cri-o://2424a7c2a9a5f5737819db8fa162e35f1bf8c8841b06cf4c138d863f40ef1e68" gracePeriod=2 Jan 20 19:03:49 crc kubenswrapper[4558]: I0120 19:03:49.566460 4558 scope.go:117] "RemoveContainer" containerID="34f5e56ad45f6257f106e7ec0fead6f98610b65caceb3cfc7626a2add684d678" Jan 20 19:03:49 crc kubenswrapper[4558]: E0120 19:03:49.566905 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:03:49 crc kubenswrapper[4558]: I0120 19:03:49.750931 4558 generic.go:334] "Generic (PLEG): container finished" podID="e167bda4-10a1-452e-91ac-8448595c1c24" containerID="2424a7c2a9a5f5737819db8fa162e35f1bf8c8841b06cf4c138d863f40ef1e68" exitCode=0 Jan 20 19:03:49 crc kubenswrapper[4558]: I0120 19:03:49.750981 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nprlh" event={"ID":"e167bda4-10a1-452e-91ac-8448595c1c24","Type":"ContainerDied","Data":"2424a7c2a9a5f5737819db8fa162e35f1bf8c8841b06cf4c138d863f40ef1e68"} Jan 20 19:03:50 crc kubenswrapper[4558]: I0120 19:03:50.194955 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-nprlh" Jan 20 19:03:50 crc kubenswrapper[4558]: I0120 19:03:50.382041 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e167bda4-10a1-452e-91ac-8448595c1c24-catalog-content\") pod \"e167bda4-10a1-452e-91ac-8448595c1c24\" (UID: \"e167bda4-10a1-452e-91ac-8448595c1c24\") " Jan 20 19:03:50 crc kubenswrapper[4558]: I0120 19:03:50.382229 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wgpv4\" (UniqueName: \"kubernetes.io/projected/e167bda4-10a1-452e-91ac-8448595c1c24-kube-api-access-wgpv4\") pod \"e167bda4-10a1-452e-91ac-8448595c1c24\" (UID: \"e167bda4-10a1-452e-91ac-8448595c1c24\") " Jan 20 19:03:50 crc kubenswrapper[4558]: I0120 19:03:50.382284 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e167bda4-10a1-452e-91ac-8448595c1c24-utilities\") pod \"e167bda4-10a1-452e-91ac-8448595c1c24\" (UID: \"e167bda4-10a1-452e-91ac-8448595c1c24\") " Jan 20 19:03:50 crc kubenswrapper[4558]: I0120 19:03:50.383155 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e167bda4-10a1-452e-91ac-8448595c1c24-utilities" (OuterVolumeSpecName: "utilities") pod "e167bda4-10a1-452e-91ac-8448595c1c24" (UID: "e167bda4-10a1-452e-91ac-8448595c1c24"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 19:03:50 crc kubenswrapper[4558]: I0120 19:03:50.388732 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e167bda4-10a1-452e-91ac-8448595c1c24-kube-api-access-wgpv4" (OuterVolumeSpecName: "kube-api-access-wgpv4") pod "e167bda4-10a1-452e-91ac-8448595c1c24" (UID: "e167bda4-10a1-452e-91ac-8448595c1c24"). InnerVolumeSpecName "kube-api-access-wgpv4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 19:03:50 crc kubenswrapper[4558]: I0120 19:03:50.485467 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wgpv4\" (UniqueName: \"kubernetes.io/projected/e167bda4-10a1-452e-91ac-8448595c1c24-kube-api-access-wgpv4\") on node \"crc\" DevicePath \"\"" Jan 20 19:03:50 crc kubenswrapper[4558]: I0120 19:03:50.485504 4558 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e167bda4-10a1-452e-91ac-8448595c1c24-utilities\") on node \"crc\" DevicePath \"\"" Jan 20 19:03:50 crc kubenswrapper[4558]: I0120 19:03:50.516850 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e167bda4-10a1-452e-91ac-8448595c1c24-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e167bda4-10a1-452e-91ac-8448595c1c24" (UID: "e167bda4-10a1-452e-91ac-8448595c1c24"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 19:03:50 crc kubenswrapper[4558]: I0120 19:03:50.586288 4558 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e167bda4-10a1-452e-91ac-8448595c1c24-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 20 19:03:50 crc kubenswrapper[4558]: I0120 19:03:50.767348 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nprlh" event={"ID":"e167bda4-10a1-452e-91ac-8448595c1c24","Type":"ContainerDied","Data":"6faa99218571da3f1361fc405be38af222beccb1a7b6ec838dcae83e8b41851a"} Jan 20 19:03:50 crc kubenswrapper[4558]: I0120 19:03:50.767468 4558 scope.go:117] "RemoveContainer" containerID="2424a7c2a9a5f5737819db8fa162e35f1bf8c8841b06cf4c138d863f40ef1e68" Jan 20 19:03:50 crc kubenswrapper[4558]: I0120 19:03:50.767744 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-nprlh" Jan 20 19:03:50 crc kubenswrapper[4558]: I0120 19:03:50.788520 4558 scope.go:117] "RemoveContainer" containerID="bd4d12cc52b254971e5187f411ce08d3e405ed0cadd8bdaf4552289ed6a122ca" Jan 20 19:03:50 crc kubenswrapper[4558]: I0120 19:03:50.788916 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-nprlh"] Jan 20 19:03:50 crc kubenswrapper[4558]: I0120 19:03:50.799765 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-nprlh"] Jan 20 19:03:50 crc kubenswrapper[4558]: I0120 19:03:50.805212 4558 scope.go:117] "RemoveContainer" containerID="e1928ffe2df30157e12d07af5e7d04f5889370661af02df2d1cbe5fede7cc55c" Jan 20 19:03:52 crc kubenswrapper[4558]: I0120 19:03:52.595927 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e167bda4-10a1-452e-91ac-8448595c1c24" path="/var/lib/kubelet/pods/e167bda4-10a1-452e-91ac-8448595c1c24/volumes" Jan 20 19:04:03 crc kubenswrapper[4558]: I0120 19:04:03.565570 4558 scope.go:117] "RemoveContainer" containerID="34f5e56ad45f6257f106e7ec0fead6f98610b65caceb3cfc7626a2add684d678" Jan 20 19:04:03 crc kubenswrapper[4558]: E0120 19:04:03.566318 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:04:17 crc kubenswrapper[4558]: I0120 19:04:17.565861 4558 scope.go:117] "RemoveContainer" containerID="34f5e56ad45f6257f106e7ec0fead6f98610b65caceb3cfc7626a2add684d678" Jan 20 19:04:17 crc kubenswrapper[4558]: E0120 19:04:17.566422 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:04:32 crc kubenswrapper[4558]: I0120 19:04:32.566385 4558 scope.go:117] "RemoveContainer" containerID="34f5e56ad45f6257f106e7ec0fead6f98610b65caceb3cfc7626a2add684d678" Jan 20 19:04:32 crc kubenswrapper[4558]: E0120 19:04:32.567313 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:04:44 crc kubenswrapper[4558]: I0120 19:04:44.566257 4558 scope.go:117] "RemoveContainer" containerID="34f5e56ad45f6257f106e7ec0fead6f98610b65caceb3cfc7626a2add684d678" Jan 20 19:04:44 crc kubenswrapper[4558]: E0120 19:04:44.567182 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:04:59 crc kubenswrapper[4558]: I0120 19:04:59.567028 4558 scope.go:117] "RemoveContainer" containerID="34f5e56ad45f6257f106e7ec0fead6f98610b65caceb3cfc7626a2add684d678" Jan 20 19:04:59 crc kubenswrapper[4558]: E0120 19:04:59.568085 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:05:10 crc kubenswrapper[4558]: I0120 19:05:10.565954 4558 scope.go:117] "RemoveContainer" containerID="34f5e56ad45f6257f106e7ec0fead6f98610b65caceb3cfc7626a2add684d678" Jan 20 19:05:10 crc kubenswrapper[4558]: E0120 19:05:10.566906 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:05:24 crc kubenswrapper[4558]: I0120 19:05:24.567310 4558 scope.go:117] "RemoveContainer" containerID="34f5e56ad45f6257f106e7ec0fead6f98610b65caceb3cfc7626a2add684d678" Jan 20 19:05:24 crc kubenswrapper[4558]: E0120 19:05:24.567902 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:05:39 crc kubenswrapper[4558]: I0120 19:05:39.566328 4558 scope.go:117] "RemoveContainer" containerID="34f5e56ad45f6257f106e7ec0fead6f98610b65caceb3cfc7626a2add684d678" Jan 20 19:05:40 crc kubenswrapper[4558]: I0120 19:05:40.525592 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" event={"ID":"68337d27-3fa6-4a29-88b0-82e60c3739eb","Type":"ContainerStarted","Data":"fc482c813b9ae92dec3b3665026210640e14fa0e406eb14c2b13c4577051c155"} Jan 20 19:06:55 crc kubenswrapper[4558]: I0120 19:06:55.434484 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-cr754"] Jan 20 19:06:55 crc kubenswrapper[4558]: E0120 19:06:55.435212 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e167bda4-10a1-452e-91ac-8448595c1c24" containerName="registry-server" Jan 20 19:06:55 crc kubenswrapper[4558]: I0120 19:06:55.435226 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="e167bda4-10a1-452e-91ac-8448595c1c24" containerName="registry-server" Jan 20 19:06:55 crc kubenswrapper[4558]: E0120 19:06:55.435236 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e167bda4-10a1-452e-91ac-8448595c1c24" containerName="extract-utilities" Jan 20 19:06:55 crc kubenswrapper[4558]: I0120 19:06:55.435243 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="e167bda4-10a1-452e-91ac-8448595c1c24" containerName="extract-utilities" Jan 20 19:06:55 crc kubenswrapper[4558]: E0120 19:06:55.435270 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e167bda4-10a1-452e-91ac-8448595c1c24" containerName="extract-content" Jan 20 19:06:55 crc kubenswrapper[4558]: I0120 19:06:55.435276 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="e167bda4-10a1-452e-91ac-8448595c1c24" containerName="extract-content" Jan 20 19:06:55 crc kubenswrapper[4558]: I0120 19:06:55.435404 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="e167bda4-10a1-452e-91ac-8448595c1c24" containerName="registry-server" Jan 20 19:06:55 crc kubenswrapper[4558]: I0120 19:06:55.436209 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-cr754" Jan 20 19:06:55 crc kubenswrapper[4558]: I0120 19:06:55.444301 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-cr754"] Jan 20 19:06:55 crc kubenswrapper[4558]: I0120 19:06:55.605692 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pgdqw\" (UniqueName: \"kubernetes.io/projected/68e1a307-6cef-496e-a9ff-849e257f2a21-kube-api-access-pgdqw\") pod \"redhat-marketplace-cr754\" (UID: \"68e1a307-6cef-496e-a9ff-849e257f2a21\") " pod="openshift-marketplace/redhat-marketplace-cr754" Jan 20 19:06:55 crc kubenswrapper[4558]: I0120 19:06:55.605750 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/68e1a307-6cef-496e-a9ff-849e257f2a21-catalog-content\") pod \"redhat-marketplace-cr754\" (UID: \"68e1a307-6cef-496e-a9ff-849e257f2a21\") " pod="openshift-marketplace/redhat-marketplace-cr754" Jan 20 19:06:55 crc kubenswrapper[4558]: I0120 19:06:55.605777 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/68e1a307-6cef-496e-a9ff-849e257f2a21-utilities\") pod \"redhat-marketplace-cr754\" (UID: \"68e1a307-6cef-496e-a9ff-849e257f2a21\") " pod="openshift-marketplace/redhat-marketplace-cr754" Jan 20 19:06:55 crc kubenswrapper[4558]: I0120 19:06:55.707476 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pgdqw\" (UniqueName: \"kubernetes.io/projected/68e1a307-6cef-496e-a9ff-849e257f2a21-kube-api-access-pgdqw\") pod \"redhat-marketplace-cr754\" (UID: \"68e1a307-6cef-496e-a9ff-849e257f2a21\") " pod="openshift-marketplace/redhat-marketplace-cr754" Jan 20 19:06:55 crc kubenswrapper[4558]: I0120 19:06:55.707544 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/68e1a307-6cef-496e-a9ff-849e257f2a21-catalog-content\") pod \"redhat-marketplace-cr754\" (UID: \"68e1a307-6cef-496e-a9ff-849e257f2a21\") " pod="openshift-marketplace/redhat-marketplace-cr754" Jan 20 19:06:55 crc kubenswrapper[4558]: I0120 19:06:55.707581 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/68e1a307-6cef-496e-a9ff-849e257f2a21-utilities\") pod \"redhat-marketplace-cr754\" (UID: \"68e1a307-6cef-496e-a9ff-849e257f2a21\") " pod="openshift-marketplace/redhat-marketplace-cr754" Jan 20 19:06:55 crc kubenswrapper[4558]: I0120 19:06:55.708085 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/68e1a307-6cef-496e-a9ff-849e257f2a21-utilities\") pod \"redhat-marketplace-cr754\" (UID: \"68e1a307-6cef-496e-a9ff-849e257f2a21\") " pod="openshift-marketplace/redhat-marketplace-cr754" Jan 20 19:06:55 crc kubenswrapper[4558]: I0120 19:06:55.709816 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/68e1a307-6cef-496e-a9ff-849e257f2a21-catalog-content\") pod \"redhat-marketplace-cr754\" (UID: \"68e1a307-6cef-496e-a9ff-849e257f2a21\") " pod="openshift-marketplace/redhat-marketplace-cr754" Jan 20 19:06:55 crc kubenswrapper[4558]: I0120 19:06:55.738344 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pgdqw\" (UniqueName: \"kubernetes.io/projected/68e1a307-6cef-496e-a9ff-849e257f2a21-kube-api-access-pgdqw\") pod \"redhat-marketplace-cr754\" (UID: \"68e1a307-6cef-496e-a9ff-849e257f2a21\") " pod="openshift-marketplace/redhat-marketplace-cr754" Jan 20 19:06:55 crc kubenswrapper[4558]: I0120 19:06:55.752962 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-cr754" Jan 20 19:06:55 crc kubenswrapper[4558]: I0120 19:06:55.953493 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-cr754"] Jan 20 19:06:56 crc kubenswrapper[4558]: I0120 19:06:56.043473 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-cr754" event={"ID":"68e1a307-6cef-496e-a9ff-849e257f2a21","Type":"ContainerStarted","Data":"10a6951f98fcffd05b35089c69df5408f34c403dcc99872ed19640538b68b599"} Jan 20 19:06:57 crc kubenswrapper[4558]: I0120 19:06:57.050841 4558 generic.go:334] "Generic (PLEG): container finished" podID="68e1a307-6cef-496e-a9ff-849e257f2a21" containerID="70c465811d62ea21eadc94e9d4847b004fffa8905805e800e758364df9da3ee1" exitCode=0 Jan 20 19:06:57 crc kubenswrapper[4558]: I0120 19:06:57.050892 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-cr754" event={"ID":"68e1a307-6cef-496e-a9ff-849e257f2a21","Type":"ContainerDied","Data":"70c465811d62ea21eadc94e9d4847b004fffa8905805e800e758364df9da3ee1"} Jan 20 19:06:58 crc kubenswrapper[4558]: I0120 19:06:58.060481 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-cr754" event={"ID":"68e1a307-6cef-496e-a9ff-849e257f2a21","Type":"ContainerStarted","Data":"2e1eb4d8d91f6985167366c8160e37d12954172ad5b08bcd9025789f38cf8cc7"} Jan 20 19:06:59 crc kubenswrapper[4558]: I0120 19:06:59.072967 4558 generic.go:334] "Generic (PLEG): container finished" podID="68e1a307-6cef-496e-a9ff-849e257f2a21" containerID="2e1eb4d8d91f6985167366c8160e37d12954172ad5b08bcd9025789f38cf8cc7" exitCode=0 Jan 20 19:06:59 crc kubenswrapper[4558]: I0120 19:06:59.073051 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-cr754" event={"ID":"68e1a307-6cef-496e-a9ff-849e257f2a21","Type":"ContainerDied","Data":"2e1eb4d8d91f6985167366c8160e37d12954172ad5b08bcd9025789f38cf8cc7"} Jan 20 19:07:00 crc kubenswrapper[4558]: I0120 19:07:00.083158 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-cr754" event={"ID":"68e1a307-6cef-496e-a9ff-849e257f2a21","Type":"ContainerStarted","Data":"3ce514ffcb870196f261bf39a5c49a9b34d41cb9538418a330380f63b0cbff71"} Jan 20 19:07:00 crc kubenswrapper[4558]: I0120 19:07:00.103464 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-cr754" podStartSLOduration=2.56902395 podStartE2EDuration="5.103445465s" podCreationTimestamp="2026-01-20 19:06:55 +0000 UTC" firstStartedPulling="2026-01-20 19:06:57.052332376 +0000 UTC m=+8710.812670343" lastFinishedPulling="2026-01-20 19:06:59.586753891 +0000 UTC m=+8713.347091858" observedRunningTime="2026-01-20 19:07:00.101393947 +0000 UTC m=+8713.861731915" watchObservedRunningTime="2026-01-20 19:07:00.103445465 +0000 UTC m=+8713.863783432" Jan 20 19:07:05 crc kubenswrapper[4558]: I0120 19:07:05.753435 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-cr754" Jan 20 19:07:05 crc kubenswrapper[4558]: I0120 19:07:05.754066 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-cr754" Jan 20 19:07:05 crc kubenswrapper[4558]: I0120 19:07:05.792120 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-cr754" Jan 20 19:07:06 crc kubenswrapper[4558]: I0120 19:07:06.154280 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-cr754" Jan 20 19:07:06 crc kubenswrapper[4558]: I0120 19:07:06.196735 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-cr754"] Jan 20 19:07:08 crc kubenswrapper[4558]: I0120 19:07:08.134780 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-cr754" podUID="68e1a307-6cef-496e-a9ff-849e257f2a21" containerName="registry-server" containerID="cri-o://3ce514ffcb870196f261bf39a5c49a9b34d41cb9538418a330380f63b0cbff71" gracePeriod=2 Jan 20 19:07:08 crc kubenswrapper[4558]: I0120 19:07:08.478540 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-cr754" Jan 20 19:07:08 crc kubenswrapper[4558]: I0120 19:07:08.595709 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pgdqw\" (UniqueName: \"kubernetes.io/projected/68e1a307-6cef-496e-a9ff-849e257f2a21-kube-api-access-pgdqw\") pod \"68e1a307-6cef-496e-a9ff-849e257f2a21\" (UID: \"68e1a307-6cef-496e-a9ff-849e257f2a21\") " Jan 20 19:07:08 crc kubenswrapper[4558]: I0120 19:07:08.595784 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/68e1a307-6cef-496e-a9ff-849e257f2a21-catalog-content\") pod \"68e1a307-6cef-496e-a9ff-849e257f2a21\" (UID: \"68e1a307-6cef-496e-a9ff-849e257f2a21\") " Jan 20 19:07:08 crc kubenswrapper[4558]: I0120 19:07:08.595915 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/68e1a307-6cef-496e-a9ff-849e257f2a21-utilities\") pod \"68e1a307-6cef-496e-a9ff-849e257f2a21\" (UID: \"68e1a307-6cef-496e-a9ff-849e257f2a21\") " Jan 20 19:07:08 crc kubenswrapper[4558]: I0120 19:07:08.596764 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/68e1a307-6cef-496e-a9ff-849e257f2a21-utilities" (OuterVolumeSpecName: "utilities") pod "68e1a307-6cef-496e-a9ff-849e257f2a21" (UID: "68e1a307-6cef-496e-a9ff-849e257f2a21"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 19:07:08 crc kubenswrapper[4558]: I0120 19:07:08.601452 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/68e1a307-6cef-496e-a9ff-849e257f2a21-kube-api-access-pgdqw" (OuterVolumeSpecName: "kube-api-access-pgdqw") pod "68e1a307-6cef-496e-a9ff-849e257f2a21" (UID: "68e1a307-6cef-496e-a9ff-849e257f2a21"). InnerVolumeSpecName "kube-api-access-pgdqw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 19:07:08 crc kubenswrapper[4558]: I0120 19:07:08.621977 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/68e1a307-6cef-496e-a9ff-849e257f2a21-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "68e1a307-6cef-496e-a9ff-849e257f2a21" (UID: "68e1a307-6cef-496e-a9ff-849e257f2a21"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 19:07:08 crc kubenswrapper[4558]: I0120 19:07:08.698554 4558 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/68e1a307-6cef-496e-a9ff-849e257f2a21-utilities\") on node \"crc\" DevicePath \"\"" Jan 20 19:07:08 crc kubenswrapper[4558]: I0120 19:07:08.698855 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pgdqw\" (UniqueName: \"kubernetes.io/projected/68e1a307-6cef-496e-a9ff-849e257f2a21-kube-api-access-pgdqw\") on node \"crc\" DevicePath \"\"" Jan 20 19:07:08 crc kubenswrapper[4558]: I0120 19:07:08.698869 4558 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/68e1a307-6cef-496e-a9ff-849e257f2a21-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 20 19:07:09 crc kubenswrapper[4558]: I0120 19:07:09.144309 4558 generic.go:334] "Generic (PLEG): container finished" podID="68e1a307-6cef-496e-a9ff-849e257f2a21" containerID="3ce514ffcb870196f261bf39a5c49a9b34d41cb9538418a330380f63b0cbff71" exitCode=0 Jan 20 19:07:09 crc kubenswrapper[4558]: I0120 19:07:09.144373 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-cr754" event={"ID":"68e1a307-6cef-496e-a9ff-849e257f2a21","Type":"ContainerDied","Data":"3ce514ffcb870196f261bf39a5c49a9b34d41cb9538418a330380f63b0cbff71"} Jan 20 19:07:09 crc kubenswrapper[4558]: I0120 19:07:09.144394 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-cr754" Jan 20 19:07:09 crc kubenswrapper[4558]: I0120 19:07:09.144418 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-cr754" event={"ID":"68e1a307-6cef-496e-a9ff-849e257f2a21","Type":"ContainerDied","Data":"10a6951f98fcffd05b35089c69df5408f34c403dcc99872ed19640538b68b599"} Jan 20 19:07:09 crc kubenswrapper[4558]: I0120 19:07:09.144440 4558 scope.go:117] "RemoveContainer" containerID="3ce514ffcb870196f261bf39a5c49a9b34d41cb9538418a330380f63b0cbff71" Jan 20 19:07:09 crc kubenswrapper[4558]: I0120 19:07:09.177711 4558 scope.go:117] "RemoveContainer" containerID="2e1eb4d8d91f6985167366c8160e37d12954172ad5b08bcd9025789f38cf8cc7" Jan 20 19:07:09 crc kubenswrapper[4558]: I0120 19:07:09.180475 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-cr754"] Jan 20 19:07:09 crc kubenswrapper[4558]: I0120 19:07:09.187616 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-cr754"] Jan 20 19:07:09 crc kubenswrapper[4558]: I0120 19:07:09.192890 4558 scope.go:117] "RemoveContainer" containerID="70c465811d62ea21eadc94e9d4847b004fffa8905805e800e758364df9da3ee1" Jan 20 19:07:09 crc kubenswrapper[4558]: I0120 19:07:09.220902 4558 scope.go:117] "RemoveContainer" containerID="3ce514ffcb870196f261bf39a5c49a9b34d41cb9538418a330380f63b0cbff71" Jan 20 19:07:09 crc kubenswrapper[4558]: E0120 19:07:09.221402 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3ce514ffcb870196f261bf39a5c49a9b34d41cb9538418a330380f63b0cbff71\": container with ID starting with 3ce514ffcb870196f261bf39a5c49a9b34d41cb9538418a330380f63b0cbff71 not found: ID does not exist" containerID="3ce514ffcb870196f261bf39a5c49a9b34d41cb9538418a330380f63b0cbff71" Jan 20 19:07:09 crc kubenswrapper[4558]: I0120 19:07:09.221506 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3ce514ffcb870196f261bf39a5c49a9b34d41cb9538418a330380f63b0cbff71"} err="failed to get container status \"3ce514ffcb870196f261bf39a5c49a9b34d41cb9538418a330380f63b0cbff71\": rpc error: code = NotFound desc = could not find container \"3ce514ffcb870196f261bf39a5c49a9b34d41cb9538418a330380f63b0cbff71\": container with ID starting with 3ce514ffcb870196f261bf39a5c49a9b34d41cb9538418a330380f63b0cbff71 not found: ID does not exist" Jan 20 19:07:09 crc kubenswrapper[4558]: I0120 19:07:09.221584 4558 scope.go:117] "RemoveContainer" containerID="2e1eb4d8d91f6985167366c8160e37d12954172ad5b08bcd9025789f38cf8cc7" Jan 20 19:07:09 crc kubenswrapper[4558]: E0120 19:07:09.221921 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2e1eb4d8d91f6985167366c8160e37d12954172ad5b08bcd9025789f38cf8cc7\": container with ID starting with 2e1eb4d8d91f6985167366c8160e37d12954172ad5b08bcd9025789f38cf8cc7 not found: ID does not exist" containerID="2e1eb4d8d91f6985167366c8160e37d12954172ad5b08bcd9025789f38cf8cc7" Jan 20 19:07:09 crc kubenswrapper[4558]: I0120 19:07:09.221949 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2e1eb4d8d91f6985167366c8160e37d12954172ad5b08bcd9025789f38cf8cc7"} err="failed to get container status \"2e1eb4d8d91f6985167366c8160e37d12954172ad5b08bcd9025789f38cf8cc7\": rpc error: code = NotFound desc = could not find container \"2e1eb4d8d91f6985167366c8160e37d12954172ad5b08bcd9025789f38cf8cc7\": container with ID starting with 2e1eb4d8d91f6985167366c8160e37d12954172ad5b08bcd9025789f38cf8cc7 not found: ID does not exist" Jan 20 19:07:09 crc kubenswrapper[4558]: I0120 19:07:09.221969 4558 scope.go:117] "RemoveContainer" containerID="70c465811d62ea21eadc94e9d4847b004fffa8905805e800e758364df9da3ee1" Jan 20 19:07:09 crc kubenswrapper[4558]: E0120 19:07:09.222244 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"70c465811d62ea21eadc94e9d4847b004fffa8905805e800e758364df9da3ee1\": container with ID starting with 70c465811d62ea21eadc94e9d4847b004fffa8905805e800e758364df9da3ee1 not found: ID does not exist" containerID="70c465811d62ea21eadc94e9d4847b004fffa8905805e800e758364df9da3ee1" Jan 20 19:07:09 crc kubenswrapper[4558]: I0120 19:07:09.222273 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"70c465811d62ea21eadc94e9d4847b004fffa8905805e800e758364df9da3ee1"} err="failed to get container status \"70c465811d62ea21eadc94e9d4847b004fffa8905805e800e758364df9da3ee1\": rpc error: code = NotFound desc = could not find container \"70c465811d62ea21eadc94e9d4847b004fffa8905805e800e758364df9da3ee1\": container with ID starting with 70c465811d62ea21eadc94e9d4847b004fffa8905805e800e758364df9da3ee1 not found: ID does not exist" Jan 20 19:07:10 crc kubenswrapper[4558]: I0120 19:07:10.574995 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="68e1a307-6cef-496e-a9ff-849e257f2a21" path="/var/lib/kubelet/pods/68e1a307-6cef-496e-a9ff-849e257f2a21/volumes" Jan 20 19:07:57 crc kubenswrapper[4558]: I0120 19:07:57.329780 4558 patch_prober.go:28] interesting pod/machine-config-daemon-2vr4r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 20 19:07:57 crc kubenswrapper[4558]: I0120 19:07:57.331952 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 20 19:08:27 crc kubenswrapper[4558]: I0120 19:08:27.329852 4558 patch_prober.go:28] interesting pod/machine-config-daemon-2vr4r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 20 19:08:27 crc kubenswrapper[4558]: I0120 19:08:27.330545 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 20 19:08:27 crc kubenswrapper[4558]: I0120 19:08:27.556065 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-zbtpp"] Jan 20 19:08:27 crc kubenswrapper[4558]: E0120 19:08:27.557278 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="68e1a307-6cef-496e-a9ff-849e257f2a21" containerName="extract-content" Jan 20 19:08:27 crc kubenswrapper[4558]: I0120 19:08:27.557305 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="68e1a307-6cef-496e-a9ff-849e257f2a21" containerName="extract-content" Jan 20 19:08:27 crc kubenswrapper[4558]: E0120 19:08:27.557315 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="68e1a307-6cef-496e-a9ff-849e257f2a21" containerName="registry-server" Jan 20 19:08:27 crc kubenswrapper[4558]: I0120 19:08:27.557321 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="68e1a307-6cef-496e-a9ff-849e257f2a21" containerName="registry-server" Jan 20 19:08:27 crc kubenswrapper[4558]: E0120 19:08:27.557351 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="68e1a307-6cef-496e-a9ff-849e257f2a21" containerName="extract-utilities" Jan 20 19:08:27 crc kubenswrapper[4558]: I0120 19:08:27.557547 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="68e1a307-6cef-496e-a9ff-849e257f2a21" containerName="extract-utilities" Jan 20 19:08:27 crc kubenswrapper[4558]: I0120 19:08:27.557754 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="68e1a307-6cef-496e-a9ff-849e257f2a21" containerName="registry-server" Jan 20 19:08:27 crc kubenswrapper[4558]: I0120 19:08:27.559837 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zbtpp" Jan 20 19:08:27 crc kubenswrapper[4558]: I0120 19:08:27.561088 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-zbtpp"] Jan 20 19:08:27 crc kubenswrapper[4558]: I0120 19:08:27.661185 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/01945132-03a4-4378-b46e-0466fb2f52a6-utilities\") pod \"certified-operators-zbtpp\" (UID: \"01945132-03a4-4378-b46e-0466fb2f52a6\") " pod="openshift-marketplace/certified-operators-zbtpp" Jan 20 19:08:27 crc kubenswrapper[4558]: I0120 19:08:27.661323 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-56ljn\" (UniqueName: \"kubernetes.io/projected/01945132-03a4-4378-b46e-0466fb2f52a6-kube-api-access-56ljn\") pod \"certified-operators-zbtpp\" (UID: \"01945132-03a4-4378-b46e-0466fb2f52a6\") " pod="openshift-marketplace/certified-operators-zbtpp" Jan 20 19:08:27 crc kubenswrapper[4558]: I0120 19:08:27.661398 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/01945132-03a4-4378-b46e-0466fb2f52a6-catalog-content\") pod \"certified-operators-zbtpp\" (UID: \"01945132-03a4-4378-b46e-0466fb2f52a6\") " pod="openshift-marketplace/certified-operators-zbtpp" Jan 20 19:08:27 crc kubenswrapper[4558]: I0120 19:08:27.762911 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/01945132-03a4-4378-b46e-0466fb2f52a6-catalog-content\") pod \"certified-operators-zbtpp\" (UID: \"01945132-03a4-4378-b46e-0466fb2f52a6\") " pod="openshift-marketplace/certified-operators-zbtpp" Jan 20 19:08:27 crc kubenswrapper[4558]: I0120 19:08:27.763046 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/01945132-03a4-4378-b46e-0466fb2f52a6-utilities\") pod \"certified-operators-zbtpp\" (UID: \"01945132-03a4-4378-b46e-0466fb2f52a6\") " pod="openshift-marketplace/certified-operators-zbtpp" Jan 20 19:08:27 crc kubenswrapper[4558]: I0120 19:08:27.763197 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-56ljn\" (UniqueName: \"kubernetes.io/projected/01945132-03a4-4378-b46e-0466fb2f52a6-kube-api-access-56ljn\") pod \"certified-operators-zbtpp\" (UID: \"01945132-03a4-4378-b46e-0466fb2f52a6\") " pod="openshift-marketplace/certified-operators-zbtpp" Jan 20 19:08:27 crc kubenswrapper[4558]: I0120 19:08:27.764002 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/01945132-03a4-4378-b46e-0466fb2f52a6-catalog-content\") pod \"certified-operators-zbtpp\" (UID: \"01945132-03a4-4378-b46e-0466fb2f52a6\") " pod="openshift-marketplace/certified-operators-zbtpp" Jan 20 19:08:27 crc kubenswrapper[4558]: I0120 19:08:27.764509 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/01945132-03a4-4378-b46e-0466fb2f52a6-utilities\") pod \"certified-operators-zbtpp\" (UID: \"01945132-03a4-4378-b46e-0466fb2f52a6\") " pod="openshift-marketplace/certified-operators-zbtpp" Jan 20 19:08:27 crc kubenswrapper[4558]: I0120 19:08:27.787844 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-56ljn\" (UniqueName: \"kubernetes.io/projected/01945132-03a4-4378-b46e-0466fb2f52a6-kube-api-access-56ljn\") pod \"certified-operators-zbtpp\" (UID: \"01945132-03a4-4378-b46e-0466fb2f52a6\") " pod="openshift-marketplace/certified-operators-zbtpp" Jan 20 19:08:27 crc kubenswrapper[4558]: I0120 19:08:27.875001 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zbtpp" Jan 20 19:08:28 crc kubenswrapper[4558]: I0120 19:08:28.074459 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-zbtpp"] Jan 20 19:08:28 crc kubenswrapper[4558]: I0120 19:08:28.669510 4558 generic.go:334] "Generic (PLEG): container finished" podID="01945132-03a4-4378-b46e-0466fb2f52a6" containerID="f6be32c69a9ed4bb6c195cdb22febc2fa79d77b7e186c96292f91b3dcb88a16d" exitCode=0 Jan 20 19:08:28 crc kubenswrapper[4558]: I0120 19:08:28.669569 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zbtpp" event={"ID":"01945132-03a4-4378-b46e-0466fb2f52a6","Type":"ContainerDied","Data":"f6be32c69a9ed4bb6c195cdb22febc2fa79d77b7e186c96292f91b3dcb88a16d"} Jan 20 19:08:28 crc kubenswrapper[4558]: I0120 19:08:28.669604 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zbtpp" event={"ID":"01945132-03a4-4378-b46e-0466fb2f52a6","Type":"ContainerStarted","Data":"48df6db81723c2e0d3e61c38c2e386c18c0593a7b8938d7e0e643bba6f55f9ab"} Jan 20 19:08:29 crc kubenswrapper[4558]: I0120 19:08:29.680390 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zbtpp" event={"ID":"01945132-03a4-4378-b46e-0466fb2f52a6","Type":"ContainerStarted","Data":"13dd4e7795286a274df3828e85082b9ab31c4d933f8694747bd678ce7f4edb4d"} Jan 20 19:08:30 crc kubenswrapper[4558]: I0120 19:08:30.688683 4558 generic.go:334] "Generic (PLEG): container finished" podID="01945132-03a4-4378-b46e-0466fb2f52a6" containerID="13dd4e7795286a274df3828e85082b9ab31c4d933f8694747bd678ce7f4edb4d" exitCode=0 Jan 20 19:08:30 crc kubenswrapper[4558]: I0120 19:08:30.688739 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zbtpp" event={"ID":"01945132-03a4-4378-b46e-0466fb2f52a6","Type":"ContainerDied","Data":"13dd4e7795286a274df3828e85082b9ab31c4d933f8694747bd678ce7f4edb4d"} Jan 20 19:08:31 crc kubenswrapper[4558]: I0120 19:08:31.699278 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zbtpp" event={"ID":"01945132-03a4-4378-b46e-0466fb2f52a6","Type":"ContainerStarted","Data":"924789e23493797625b8de9bc7e7c2bd1456c9b1b066da43f1367b5591f7d58e"} Jan 20 19:08:31 crc kubenswrapper[4558]: I0120 19:08:31.720792 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-zbtpp" podStartSLOduration=2.204323607 podStartE2EDuration="4.720765451s" podCreationTimestamp="2026-01-20 19:08:27 +0000 UTC" firstStartedPulling="2026-01-20 19:08:28.67121008 +0000 UTC m=+8802.431548047" lastFinishedPulling="2026-01-20 19:08:31.187651923 +0000 UTC m=+8804.947989891" observedRunningTime="2026-01-20 19:08:31.716119417 +0000 UTC m=+8805.476457384" watchObservedRunningTime="2026-01-20 19:08:31.720765451 +0000 UTC m=+8805.481103418" Jan 20 19:08:37 crc kubenswrapper[4558]: I0120 19:08:37.875469 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-zbtpp" Jan 20 19:08:37 crc kubenswrapper[4558]: I0120 19:08:37.875857 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-zbtpp" Jan 20 19:08:37 crc kubenswrapper[4558]: I0120 19:08:37.911856 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-zbtpp" Jan 20 19:08:38 crc kubenswrapper[4558]: I0120 19:08:38.796894 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-zbtpp" Jan 20 19:08:38 crc kubenswrapper[4558]: I0120 19:08:38.837653 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-zbtpp"] Jan 20 19:08:40 crc kubenswrapper[4558]: I0120 19:08:40.779050 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-zbtpp" podUID="01945132-03a4-4378-b46e-0466fb2f52a6" containerName="registry-server" containerID="cri-o://924789e23493797625b8de9bc7e7c2bd1456c9b1b066da43f1367b5591f7d58e" gracePeriod=2 Jan 20 19:08:41 crc kubenswrapper[4558]: I0120 19:08:41.119314 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zbtpp" Jan 20 19:08:41 crc kubenswrapper[4558]: I0120 19:08:41.252133 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-56ljn\" (UniqueName: \"kubernetes.io/projected/01945132-03a4-4378-b46e-0466fb2f52a6-kube-api-access-56ljn\") pod \"01945132-03a4-4378-b46e-0466fb2f52a6\" (UID: \"01945132-03a4-4378-b46e-0466fb2f52a6\") " Jan 20 19:08:41 crc kubenswrapper[4558]: I0120 19:08:41.252292 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/01945132-03a4-4378-b46e-0466fb2f52a6-catalog-content\") pod \"01945132-03a4-4378-b46e-0466fb2f52a6\" (UID: \"01945132-03a4-4378-b46e-0466fb2f52a6\") " Jan 20 19:08:41 crc kubenswrapper[4558]: I0120 19:08:41.252337 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/01945132-03a4-4378-b46e-0466fb2f52a6-utilities\") pod \"01945132-03a4-4378-b46e-0466fb2f52a6\" (UID: \"01945132-03a4-4378-b46e-0466fb2f52a6\") " Jan 20 19:08:41 crc kubenswrapper[4558]: I0120 19:08:41.254361 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/01945132-03a4-4378-b46e-0466fb2f52a6-utilities" (OuterVolumeSpecName: "utilities") pod "01945132-03a4-4378-b46e-0466fb2f52a6" (UID: "01945132-03a4-4378-b46e-0466fb2f52a6"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 19:08:41 crc kubenswrapper[4558]: I0120 19:08:41.256231 4558 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/01945132-03a4-4378-b46e-0466fb2f52a6-utilities\") on node \"crc\" DevicePath \"\"" Jan 20 19:08:41 crc kubenswrapper[4558]: I0120 19:08:41.260524 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01945132-03a4-4378-b46e-0466fb2f52a6-kube-api-access-56ljn" (OuterVolumeSpecName: "kube-api-access-56ljn") pod "01945132-03a4-4378-b46e-0466fb2f52a6" (UID: "01945132-03a4-4378-b46e-0466fb2f52a6"). InnerVolumeSpecName "kube-api-access-56ljn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 19:08:41 crc kubenswrapper[4558]: I0120 19:08:41.308021 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/01945132-03a4-4378-b46e-0466fb2f52a6-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "01945132-03a4-4378-b46e-0466fb2f52a6" (UID: "01945132-03a4-4378-b46e-0466fb2f52a6"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 19:08:41 crc kubenswrapper[4558]: I0120 19:08:41.357857 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-56ljn\" (UniqueName: \"kubernetes.io/projected/01945132-03a4-4378-b46e-0466fb2f52a6-kube-api-access-56ljn\") on node \"crc\" DevicePath \"\"" Jan 20 19:08:41 crc kubenswrapper[4558]: I0120 19:08:41.357891 4558 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/01945132-03a4-4378-b46e-0466fb2f52a6-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 20 19:08:41 crc kubenswrapper[4558]: I0120 19:08:41.787415 4558 generic.go:334] "Generic (PLEG): container finished" podID="01945132-03a4-4378-b46e-0466fb2f52a6" containerID="924789e23493797625b8de9bc7e7c2bd1456c9b1b066da43f1367b5591f7d58e" exitCode=0 Jan 20 19:08:41 crc kubenswrapper[4558]: I0120 19:08:41.787470 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zbtpp" event={"ID":"01945132-03a4-4378-b46e-0466fb2f52a6","Type":"ContainerDied","Data":"924789e23493797625b8de9bc7e7c2bd1456c9b1b066da43f1367b5591f7d58e"} Jan 20 19:08:41 crc kubenswrapper[4558]: I0120 19:08:41.787548 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zbtpp" Jan 20 19:08:41 crc kubenswrapper[4558]: I0120 19:08:41.787572 4558 scope.go:117] "RemoveContainer" containerID="924789e23493797625b8de9bc7e7c2bd1456c9b1b066da43f1367b5591f7d58e" Jan 20 19:08:41 crc kubenswrapper[4558]: I0120 19:08:41.787548 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zbtpp" event={"ID":"01945132-03a4-4378-b46e-0466fb2f52a6","Type":"ContainerDied","Data":"48df6db81723c2e0d3e61c38c2e386c18c0593a7b8938d7e0e643bba6f55f9ab"} Jan 20 19:08:41 crc kubenswrapper[4558]: I0120 19:08:41.806504 4558 scope.go:117] "RemoveContainer" containerID="13dd4e7795286a274df3828e85082b9ab31c4d933f8694747bd678ce7f4edb4d" Jan 20 19:08:41 crc kubenswrapper[4558]: I0120 19:08:41.821296 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-zbtpp"] Jan 20 19:08:41 crc kubenswrapper[4558]: I0120 19:08:41.826800 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-zbtpp"] Jan 20 19:08:41 crc kubenswrapper[4558]: I0120 19:08:41.845741 4558 scope.go:117] "RemoveContainer" containerID="f6be32c69a9ed4bb6c195cdb22febc2fa79d77b7e186c96292f91b3dcb88a16d" Jan 20 19:08:41 crc kubenswrapper[4558]: I0120 19:08:41.856821 4558 scope.go:117] "RemoveContainer" containerID="924789e23493797625b8de9bc7e7c2bd1456c9b1b066da43f1367b5591f7d58e" Jan 20 19:08:41 crc kubenswrapper[4558]: E0120 19:08:41.857211 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"924789e23493797625b8de9bc7e7c2bd1456c9b1b066da43f1367b5591f7d58e\": container with ID starting with 924789e23493797625b8de9bc7e7c2bd1456c9b1b066da43f1367b5591f7d58e not found: ID does not exist" containerID="924789e23493797625b8de9bc7e7c2bd1456c9b1b066da43f1367b5591f7d58e" Jan 20 19:08:41 crc kubenswrapper[4558]: I0120 19:08:41.857243 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"924789e23493797625b8de9bc7e7c2bd1456c9b1b066da43f1367b5591f7d58e"} err="failed to get container status \"924789e23493797625b8de9bc7e7c2bd1456c9b1b066da43f1367b5591f7d58e\": rpc error: code = NotFound desc = could not find container \"924789e23493797625b8de9bc7e7c2bd1456c9b1b066da43f1367b5591f7d58e\": container with ID starting with 924789e23493797625b8de9bc7e7c2bd1456c9b1b066da43f1367b5591f7d58e not found: ID does not exist" Jan 20 19:08:41 crc kubenswrapper[4558]: I0120 19:08:41.857265 4558 scope.go:117] "RemoveContainer" containerID="13dd4e7795286a274df3828e85082b9ab31c4d933f8694747bd678ce7f4edb4d" Jan 20 19:08:41 crc kubenswrapper[4558]: E0120 19:08:41.857642 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"13dd4e7795286a274df3828e85082b9ab31c4d933f8694747bd678ce7f4edb4d\": container with ID starting with 13dd4e7795286a274df3828e85082b9ab31c4d933f8694747bd678ce7f4edb4d not found: ID does not exist" containerID="13dd4e7795286a274df3828e85082b9ab31c4d933f8694747bd678ce7f4edb4d" Jan 20 19:08:41 crc kubenswrapper[4558]: I0120 19:08:41.857681 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"13dd4e7795286a274df3828e85082b9ab31c4d933f8694747bd678ce7f4edb4d"} err="failed to get container status \"13dd4e7795286a274df3828e85082b9ab31c4d933f8694747bd678ce7f4edb4d\": rpc error: code = NotFound desc = could not find container \"13dd4e7795286a274df3828e85082b9ab31c4d933f8694747bd678ce7f4edb4d\": container with ID starting with 13dd4e7795286a274df3828e85082b9ab31c4d933f8694747bd678ce7f4edb4d not found: ID does not exist" Jan 20 19:08:41 crc kubenswrapper[4558]: I0120 19:08:41.857709 4558 scope.go:117] "RemoveContainer" containerID="f6be32c69a9ed4bb6c195cdb22febc2fa79d77b7e186c96292f91b3dcb88a16d" Jan 20 19:08:41 crc kubenswrapper[4558]: E0120 19:08:41.857981 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f6be32c69a9ed4bb6c195cdb22febc2fa79d77b7e186c96292f91b3dcb88a16d\": container with ID starting with f6be32c69a9ed4bb6c195cdb22febc2fa79d77b7e186c96292f91b3dcb88a16d not found: ID does not exist" containerID="f6be32c69a9ed4bb6c195cdb22febc2fa79d77b7e186c96292f91b3dcb88a16d" Jan 20 19:08:41 crc kubenswrapper[4558]: I0120 19:08:41.858006 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f6be32c69a9ed4bb6c195cdb22febc2fa79d77b7e186c96292f91b3dcb88a16d"} err="failed to get container status \"f6be32c69a9ed4bb6c195cdb22febc2fa79d77b7e186c96292f91b3dcb88a16d\": rpc error: code = NotFound desc = could not find container \"f6be32c69a9ed4bb6c195cdb22febc2fa79d77b7e186c96292f91b3dcb88a16d\": container with ID starting with f6be32c69a9ed4bb6c195cdb22febc2fa79d77b7e186c96292f91b3dcb88a16d not found: ID does not exist" Jan 20 19:08:42 crc kubenswrapper[4558]: I0120 19:08:42.574677 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01945132-03a4-4378-b46e-0466fb2f52a6" path="/var/lib/kubelet/pods/01945132-03a4-4378-b46e-0466fb2f52a6/volumes" Jan 20 19:08:57 crc kubenswrapper[4558]: I0120 19:08:57.330264 4558 patch_prober.go:28] interesting pod/machine-config-daemon-2vr4r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 20 19:08:57 crc kubenswrapper[4558]: I0120 19:08:57.330992 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 20 19:08:57 crc kubenswrapper[4558]: I0120 19:08:57.331044 4558 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" Jan 20 19:08:57 crc kubenswrapper[4558]: I0120 19:08:57.331585 4558 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"fc482c813b9ae92dec3b3665026210640e14fa0e406eb14c2b13c4577051c155"} pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 20 19:08:57 crc kubenswrapper[4558]: I0120 19:08:57.331644 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" containerID="cri-o://fc482c813b9ae92dec3b3665026210640e14fa0e406eb14c2b13c4577051c155" gracePeriod=600 Jan 20 19:08:57 crc kubenswrapper[4558]: I0120 19:08:57.898573 4558 generic.go:334] "Generic (PLEG): container finished" podID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerID="fc482c813b9ae92dec3b3665026210640e14fa0e406eb14c2b13c4577051c155" exitCode=0 Jan 20 19:08:57 crc kubenswrapper[4558]: I0120 19:08:57.898849 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" event={"ID":"68337d27-3fa6-4a29-88b0-82e60c3739eb","Type":"ContainerDied","Data":"fc482c813b9ae92dec3b3665026210640e14fa0e406eb14c2b13c4577051c155"} Jan 20 19:08:57 crc kubenswrapper[4558]: I0120 19:08:57.898889 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" event={"ID":"68337d27-3fa6-4a29-88b0-82e60c3739eb","Type":"ContainerStarted","Data":"f564c14a07109e541e9fbbbe365d8cc006309f8b6f2a46364e7e327b0d781226"} Jan 20 19:08:57 crc kubenswrapper[4558]: I0120 19:08:57.898909 4558 scope.go:117] "RemoveContainer" containerID="34f5e56ad45f6257f106e7ec0fead6f98610b65caceb3cfc7626a2add684d678" Jan 20 19:09:55 crc kubenswrapper[4558]: I0120 19:09:55.911516 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-46hxk/must-gather-s6gln"] Jan 20 19:09:55 crc kubenswrapper[4558]: E0120 19:09:55.912401 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="01945132-03a4-4378-b46e-0466fb2f52a6" containerName="registry-server" Jan 20 19:09:55 crc kubenswrapper[4558]: I0120 19:09:55.912415 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="01945132-03a4-4378-b46e-0466fb2f52a6" containerName="registry-server" Jan 20 19:09:55 crc kubenswrapper[4558]: E0120 19:09:55.912434 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="01945132-03a4-4378-b46e-0466fb2f52a6" containerName="extract-utilities" Jan 20 19:09:55 crc kubenswrapper[4558]: I0120 19:09:55.912440 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="01945132-03a4-4378-b46e-0466fb2f52a6" containerName="extract-utilities" Jan 20 19:09:55 crc kubenswrapper[4558]: E0120 19:09:55.912448 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="01945132-03a4-4378-b46e-0466fb2f52a6" containerName="extract-content" Jan 20 19:09:55 crc kubenswrapper[4558]: I0120 19:09:55.912454 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="01945132-03a4-4378-b46e-0466fb2f52a6" containerName="extract-content" Jan 20 19:09:55 crc kubenswrapper[4558]: I0120 19:09:55.912595 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="01945132-03a4-4378-b46e-0466fb2f52a6" containerName="registry-server" Jan 20 19:09:55 crc kubenswrapper[4558]: I0120 19:09:55.914135 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-46hxk/must-gather-s6gln" Jan 20 19:09:55 crc kubenswrapper[4558]: I0120 19:09:55.917306 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-46hxk"/"kube-root-ca.crt" Jan 20 19:09:55 crc kubenswrapper[4558]: I0120 19:09:55.917332 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-46hxk"/"openshift-service-ca.crt" Jan 20 19:09:55 crc kubenswrapper[4558]: I0120 19:09:55.917655 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-46hxk"/"default-dockercfg-gj675" Jan 20 19:09:55 crc kubenswrapper[4558]: I0120 19:09:55.922975 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-46hxk/must-gather-s6gln"] Jan 20 19:09:56 crc kubenswrapper[4558]: I0120 19:09:56.051713 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/4b2413a2-affa-4c7b-b1a6-2a83e8dd4cf2-must-gather-output\") pod \"must-gather-s6gln\" (UID: \"4b2413a2-affa-4c7b-b1a6-2a83e8dd4cf2\") " pod="openshift-must-gather-46hxk/must-gather-s6gln" Jan 20 19:09:56 crc kubenswrapper[4558]: I0120 19:09:56.051802 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lcwtf\" (UniqueName: \"kubernetes.io/projected/4b2413a2-affa-4c7b-b1a6-2a83e8dd4cf2-kube-api-access-lcwtf\") pod \"must-gather-s6gln\" (UID: \"4b2413a2-affa-4c7b-b1a6-2a83e8dd4cf2\") " pod="openshift-must-gather-46hxk/must-gather-s6gln" Jan 20 19:09:56 crc kubenswrapper[4558]: I0120 19:09:56.153611 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/4b2413a2-affa-4c7b-b1a6-2a83e8dd4cf2-must-gather-output\") pod \"must-gather-s6gln\" (UID: \"4b2413a2-affa-4c7b-b1a6-2a83e8dd4cf2\") " pod="openshift-must-gather-46hxk/must-gather-s6gln" Jan 20 19:09:56 crc kubenswrapper[4558]: I0120 19:09:56.153669 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lcwtf\" (UniqueName: \"kubernetes.io/projected/4b2413a2-affa-4c7b-b1a6-2a83e8dd4cf2-kube-api-access-lcwtf\") pod \"must-gather-s6gln\" (UID: \"4b2413a2-affa-4c7b-b1a6-2a83e8dd4cf2\") " pod="openshift-must-gather-46hxk/must-gather-s6gln" Jan 20 19:09:56 crc kubenswrapper[4558]: I0120 19:09:56.154183 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/4b2413a2-affa-4c7b-b1a6-2a83e8dd4cf2-must-gather-output\") pod \"must-gather-s6gln\" (UID: \"4b2413a2-affa-4c7b-b1a6-2a83e8dd4cf2\") " pod="openshift-must-gather-46hxk/must-gather-s6gln" Jan 20 19:09:56 crc kubenswrapper[4558]: I0120 19:09:56.179024 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lcwtf\" (UniqueName: \"kubernetes.io/projected/4b2413a2-affa-4c7b-b1a6-2a83e8dd4cf2-kube-api-access-lcwtf\") pod \"must-gather-s6gln\" (UID: \"4b2413a2-affa-4c7b-b1a6-2a83e8dd4cf2\") " pod="openshift-must-gather-46hxk/must-gather-s6gln" Jan 20 19:09:56 crc kubenswrapper[4558]: I0120 19:09:56.234254 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-46hxk/must-gather-s6gln" Jan 20 19:09:56 crc kubenswrapper[4558]: I0120 19:09:56.620207 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-46hxk/must-gather-s6gln"] Jan 20 19:09:57 crc kubenswrapper[4558]: I0120 19:09:57.372345 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-46hxk/must-gather-s6gln" event={"ID":"4b2413a2-affa-4c7b-b1a6-2a83e8dd4cf2","Type":"ContainerStarted","Data":"be39cada10b994c3ae1352031b62a68826d71305aaf7a6176c122739ce899d32"} Jan 20 19:09:57 crc kubenswrapper[4558]: I0120 19:09:57.374084 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-46hxk/must-gather-s6gln" event={"ID":"4b2413a2-affa-4c7b-b1a6-2a83e8dd4cf2","Type":"ContainerStarted","Data":"dfb2bd6b65962eb44b289e9d80a8db87f2e0919059b7cb889f4b03721098172e"} Jan 20 19:09:57 crc kubenswrapper[4558]: I0120 19:09:57.374233 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-46hxk/must-gather-s6gln" event={"ID":"4b2413a2-affa-4c7b-b1a6-2a83e8dd4cf2","Type":"ContainerStarted","Data":"21d96d74604b66b66196455d1d152622d077c9756611660634b0bc774ec92d33"} Jan 20 19:09:57 crc kubenswrapper[4558]: I0120 19:09:57.396869 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-46hxk/must-gather-s6gln" podStartSLOduration=2.396845018 podStartE2EDuration="2.396845018s" podCreationTimestamp="2026-01-20 19:09:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-20 19:09:57.387185919 +0000 UTC m=+8891.147523886" watchObservedRunningTime="2026-01-20 19:09:57.396845018 +0000 UTC m=+8891.157182985" Jan 20 19:10:24 crc kubenswrapper[4558]: I0120 19:10:24.622525 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-6968d8fdc4-d9p9k_5835ce2a-d074-4a95-aa34-ad3a62f77503/controller/0.log" Jan 20 19:10:24 crc kubenswrapper[4558]: I0120 19:10:24.629881 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-6968d8fdc4-d9p9k_5835ce2a-d074-4a95-aa34-ad3a62f77503/kube-rbac-proxy/0.log" Jan 20 19:10:24 crc kubenswrapper[4558]: I0120 19:10:24.654222 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-b75qk_59de8a65-6da7-4086-93d4-d76fa35f7660/controller/0.log" Jan 20 19:10:26 crc kubenswrapper[4558]: I0120 19:10:26.066380 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-b75qk_59de8a65-6da7-4086-93d4-d76fa35f7660/frr/0.log" Jan 20 19:10:26 crc kubenswrapper[4558]: I0120 19:10:26.077692 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-b75qk_59de8a65-6da7-4086-93d4-d76fa35f7660/reloader/0.log" Jan 20 19:10:26 crc kubenswrapper[4558]: I0120 19:10:26.082661 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-b75qk_59de8a65-6da7-4086-93d4-d76fa35f7660/frr-metrics/0.log" Jan 20 19:10:26 crc kubenswrapper[4558]: I0120 19:10:26.090660 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-b75qk_59de8a65-6da7-4086-93d4-d76fa35f7660/kube-rbac-proxy/0.log" Jan 20 19:10:26 crc kubenswrapper[4558]: I0120 19:10:26.098353 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-b75qk_59de8a65-6da7-4086-93d4-d76fa35f7660/kube-rbac-proxy-frr/0.log" Jan 20 19:10:26 crc kubenswrapper[4558]: I0120 19:10:26.103683 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-b75qk_59de8a65-6da7-4086-93d4-d76fa35f7660/cp-frr-files/0.log" Jan 20 19:10:26 crc kubenswrapper[4558]: I0120 19:10:26.112692 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-b75qk_59de8a65-6da7-4086-93d4-d76fa35f7660/cp-reloader/0.log" Jan 20 19:10:26 crc kubenswrapper[4558]: I0120 19:10:26.117647 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-b75qk_59de8a65-6da7-4086-93d4-d76fa35f7660/cp-metrics/0.log" Jan 20 19:10:26 crc kubenswrapper[4558]: I0120 19:10:26.127606 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-7df86c4f6c-vf5fl_e7f30f4b-a84e-47b2-b393-c52757e6ca69/frr-k8s-webhook-server/0.log" Jan 20 19:10:26 crc kubenswrapper[4558]: I0120 19:10:26.153487 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-747dffd588-lx9c6_704a513a-fe43-4730-9f48-6c85506e338b/manager/0.log" Jan 20 19:10:26 crc kubenswrapper[4558]: I0120 19:10:26.163121 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-7d7c8846bf-bnzd9_e29496dd-2347-49e8-b4ca-1d071c0dcf2a/webhook-server/0.log" Jan 20 19:10:27 crc kubenswrapper[4558]: I0120 19:10:27.473997 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-w684s_241ae411-7e63-4bb6-a110-b9983a418f9e/speaker/0.log" Jan 20 19:10:27 crc kubenswrapper[4558]: I0120 19:10:27.481809 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-w684s_241ae411-7e63-4bb6-a110-b9983a418f9e/kube-rbac-proxy/0.log" Jan 20 19:10:35 crc kubenswrapper[4558]: I0120 19:10:35.529450 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-4w5mk_119fb5fe-2460-4d2d-9db9-452afaa1e93e/control-plane-machine-set-operator/0.log" Jan 20 19:10:35 crc kubenswrapper[4558]: I0120 19:10:35.546859 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-hsls5_350b4e8e-0147-4d0a-b9c0-1cdcdc2312f1/kube-rbac-proxy/0.log" Jan 20 19:10:35 crc kubenswrapper[4558]: I0120 19:10:35.553066 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-hsls5_350b4e8e-0147-4d0a-b9c0-1cdcdc2312f1/machine-api-operator/0.log" Jan 20 19:10:40 crc kubenswrapper[4558]: I0120 19:10:40.457103 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-86cb77c54b-4t55w_857afdf1-c962-4b4a-a79b-15547f6b407c/cert-manager-controller/0.log" Jan 20 19:10:40 crc kubenswrapper[4558]: I0120 19:10:40.542051 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-855d9ccff4-n85r2_dd2168dc-3cb1-45db-b8fd-7e112804ffcd/cert-manager-cainjector/0.log" Jan 20 19:10:40 crc kubenswrapper[4558]: I0120 19:10:40.557744 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-f4fb5df64-s79l7_0aafb007-c951-4a3f-90ee-14897538c76d/cert-manager-webhook/0.log" Jan 20 19:10:44 crc kubenswrapper[4558]: I0120 19:10:44.940507 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-7754f76f8b-n5w94_7abfa901-9433-43f3-8f51-1da05d50f84d/nmstate-console-plugin/0.log" Jan 20 19:10:44 crc kubenswrapper[4558]: I0120 19:10:44.959231 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-btt28_f54cd5f9-30bf-494b-8528-4f25f8fa1521/nmstate-handler/0.log" Jan 20 19:10:44 crc kubenswrapper[4558]: I0120 19:10:44.968604 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-54757c584b-l6chb_1b599dc7-9188-4dbc-ad4a-32db989cb635/nmstate-metrics/0.log" Jan 20 19:10:44 crc kubenswrapper[4558]: I0120 19:10:44.974526 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-54757c584b-l6chb_1b599dc7-9188-4dbc-ad4a-32db989cb635/kube-rbac-proxy/0.log" Jan 20 19:10:44 crc kubenswrapper[4558]: I0120 19:10:44.987731 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-646758c888-r2zgd_b1c60395-86ef-4c6c-8432-b2bc357aac2d/nmstate-operator/0.log" Jan 20 19:10:44 crc kubenswrapper[4558]: I0120 19:10:44.999556 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-8474b5b9d8-t4zzd_ecd5abb7-faaf-42b3-8698-71ea80253e9d/nmstate-webhook/0.log" Jan 20 19:10:49 crc kubenswrapper[4558]: I0120 19:10:49.729952 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-68bc856cb9-nfzlw_fef8d546-d905-4701-ac1e-09cd4c4b1ed8/prometheus-operator/0.log" Jan 20 19:10:49 crc kubenswrapper[4558]: I0120 19:10:49.737670 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-558658b7b5-cpc8s_42ac1cad-9697-4a87-974d-f1dfe31a3627/prometheus-operator-admission-webhook/0.log" Jan 20 19:10:49 crc kubenswrapper[4558]: I0120 19:10:49.746812 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-558658b7b5-hhs5k_8c562f2d-0fe1-4a6a-9664-00e2cad8c416/prometheus-operator-admission-webhook/0.log" Jan 20 19:10:49 crc kubenswrapper[4558]: I0120 19:10:49.769109 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_observability-operator-59bdc8b94-rlprt_0c2f4961-5cb9-4460-b8f2-ff20d5bafc08/operator/0.log" Jan 20 19:10:49 crc kubenswrapper[4558]: I0120 19:10:49.790010 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_perses-operator-5bf474d74f-6nh6n_6f4871c7-a64d-4d49-b830-66b6718d608a/perses-operator/0.log" Jan 20 19:10:54 crc kubenswrapper[4558]: I0120 19:10:54.726751 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-6968d8fdc4-d9p9k_5835ce2a-d074-4a95-aa34-ad3a62f77503/controller/0.log" Jan 20 19:10:54 crc kubenswrapper[4558]: I0120 19:10:54.733279 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-6968d8fdc4-d9p9k_5835ce2a-d074-4a95-aa34-ad3a62f77503/kube-rbac-proxy/0.log" Jan 20 19:10:54 crc kubenswrapper[4558]: I0120 19:10:54.751835 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-b75qk_59de8a65-6da7-4086-93d4-d76fa35f7660/controller/0.log" Jan 20 19:10:55 crc kubenswrapper[4558]: I0120 19:10:55.804472 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-b75qk_59de8a65-6da7-4086-93d4-d76fa35f7660/frr/0.log" Jan 20 19:10:55 crc kubenswrapper[4558]: I0120 19:10:55.822140 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-b75qk_59de8a65-6da7-4086-93d4-d76fa35f7660/reloader/0.log" Jan 20 19:10:55 crc kubenswrapper[4558]: I0120 19:10:55.830428 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-b75qk_59de8a65-6da7-4086-93d4-d76fa35f7660/frr-metrics/0.log" Jan 20 19:10:55 crc kubenswrapper[4558]: I0120 19:10:55.837952 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-b75qk_59de8a65-6da7-4086-93d4-d76fa35f7660/kube-rbac-proxy/0.log" Jan 20 19:10:55 crc kubenswrapper[4558]: I0120 19:10:55.844825 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-b75qk_59de8a65-6da7-4086-93d4-d76fa35f7660/kube-rbac-proxy-frr/0.log" Jan 20 19:10:55 crc kubenswrapper[4558]: I0120 19:10:55.849995 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-b75qk_59de8a65-6da7-4086-93d4-d76fa35f7660/cp-frr-files/0.log" Jan 20 19:10:55 crc kubenswrapper[4558]: I0120 19:10:55.856654 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-b75qk_59de8a65-6da7-4086-93d4-d76fa35f7660/cp-reloader/0.log" Jan 20 19:10:55 crc kubenswrapper[4558]: I0120 19:10:55.862512 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-b75qk_59de8a65-6da7-4086-93d4-d76fa35f7660/cp-metrics/0.log" Jan 20 19:10:55 crc kubenswrapper[4558]: I0120 19:10:55.871094 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-7df86c4f6c-vf5fl_e7f30f4b-a84e-47b2-b393-c52757e6ca69/frr-k8s-webhook-server/0.log" Jan 20 19:10:55 crc kubenswrapper[4558]: I0120 19:10:55.896820 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-747dffd588-lx9c6_704a513a-fe43-4730-9f48-6c85506e338b/manager/0.log" Jan 20 19:10:55 crc kubenswrapper[4558]: I0120 19:10:55.904252 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-7d7c8846bf-bnzd9_e29496dd-2347-49e8-b4ca-1d071c0dcf2a/webhook-server/0.log" Jan 20 19:10:57 crc kubenswrapper[4558]: I0120 19:10:57.167582 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-w684s_241ae411-7e63-4bb6-a110-b9983a418f9e/speaker/0.log" Jan 20 19:10:57 crc kubenswrapper[4558]: I0120 19:10:57.179968 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-w684s_241ae411-7e63-4bb6-a110-b9983a418f9e/kube-rbac-proxy/0.log" Jan 20 19:10:57 crc kubenswrapper[4558]: I0120 19:10:57.330035 4558 patch_prober.go:28] interesting pod/machine-config-daemon-2vr4r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 20 19:10:57 crc kubenswrapper[4558]: I0120 19:10:57.330135 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 20 19:11:13 crc kubenswrapper[4558]: I0120 19:11:13.654312 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_dnsmasq-dnsmasq-84b9f45d47-68c5z_87f866b8-e393-493f-8b10-4758c516f4d7/dnsmasq-dns/0.log" Jan 20 19:11:13 crc kubenswrapper[4558]: I0120 19:11:13.663372 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-kuttl-tests_dnsmasq-dnsmasq-84b9f45d47-68c5z_87f866b8-e393-493f-8b10-4758c516f4d7/init/0.log" Jan 20 19:11:19 crc kubenswrapper[4558]: I0120 19:11:19.316282 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931ajdtsz_117625c2-239a-43e3-8bfa-69f6b5985a5c/extract/0.log" Jan 20 19:11:19 crc kubenswrapper[4558]: I0120 19:11:19.327032 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931ajdtsz_117625c2-239a-43e3-8bfa-69f6b5985a5c/util/0.log" Jan 20 19:11:19 crc kubenswrapper[4558]: I0120 19:11:19.353017 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931ajdtsz_117625c2-239a-43e3-8bfa-69f6b5985a5c/pull/0.log" Jan 20 19:11:19 crc kubenswrapper[4558]: I0120 19:11:19.360333 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc2zr95_53937263-221c-4ee7-87fb-d1b03392fd73/extract/0.log" Jan 20 19:11:19 crc kubenswrapper[4558]: I0120 19:11:19.368912 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc2zr95_53937263-221c-4ee7-87fb-d1b03392fd73/util/0.log" Jan 20 19:11:19 crc kubenswrapper[4558]: I0120 19:11:19.374887 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dc2zr95_53937263-221c-4ee7-87fb-d1b03392fd73/pull/0.log" Jan 20 19:11:19 crc kubenswrapper[4558]: I0120 19:11:19.384136 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713dqrqd_3d57036a-69b1-4789-87cf-8d1cfe930ba4/extract/0.log" Jan 20 19:11:19 crc kubenswrapper[4558]: I0120 19:11:19.391262 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713dqrqd_3d57036a-69b1-4789-87cf-8d1cfe930ba4/util/0.log" Jan 20 19:11:19 crc kubenswrapper[4558]: I0120 19:11:19.402110 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_53efe8611d43ac2275911d954e05efbbba7920a530aff9253ed1cec713dqrqd_3d57036a-69b1-4789-87cf-8d1cfe930ba4/pull/0.log" Jan 20 19:11:19 crc kubenswrapper[4558]: I0120 19:11:19.410816 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08jrb5p_a606311e-7c02-48a9-9f83-46f862549670/extract/0.log" Jan 20 19:11:19 crc kubenswrapper[4558]: I0120 19:11:19.416699 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08jrb5p_a606311e-7c02-48a9-9f83-46f862549670/util/0.log" Jan 20 19:11:19 crc kubenswrapper[4558]: I0120 19:11:19.424471 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08jrb5p_a606311e-7c02-48a9-9f83-46f862549670/pull/0.log" Jan 20 19:11:20 crc kubenswrapper[4558]: I0120 19:11:20.125836 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-v2gb7_7d3d2009-8b03-4d74-881b-1dd341ead556/registry-server/0.log" Jan 20 19:11:20 crc kubenswrapper[4558]: I0120 19:11:20.133501 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-v2gb7_7d3d2009-8b03-4d74-881b-1dd341ead556/extract-utilities/0.log" Jan 20 19:11:20 crc kubenswrapper[4558]: I0120 19:11:20.143905 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-v2gb7_7d3d2009-8b03-4d74-881b-1dd341ead556/extract-content/0.log" Jan 20 19:11:21 crc kubenswrapper[4558]: I0120 19:11:21.574804 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-hgqdq_da87968b-3eb6-4478-bf20-ec2b8d7bbf95/registry-server/0.log" Jan 20 19:11:21 crc kubenswrapper[4558]: I0120 19:11:21.583107 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-hgqdq_da87968b-3eb6-4478-bf20-ec2b8d7bbf95/extract-utilities/0.log" Jan 20 19:11:21 crc kubenswrapper[4558]: I0120 19:11:21.593671 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-hgqdq_da87968b-3eb6-4478-bf20-ec2b8d7bbf95/extract-content/0.log" Jan 20 19:11:21 crc kubenswrapper[4558]: I0120 19:11:21.611599 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-fhnw5_6e227740-1076-4ebf-9fd4-b1ae12cc7beb/marketplace-operator/0.log" Jan 20 19:11:21 crc kubenswrapper[4558]: I0120 19:11:21.890993 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-fnqb8_3e16ce3b-593e-4c3d-a1c0-ab3c557da830/registry-server/0.log" Jan 20 19:11:21 crc kubenswrapper[4558]: I0120 19:11:21.944106 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-fnqb8_3e16ce3b-593e-4c3d-a1c0-ab3c557da830/extract-utilities/0.log" Jan 20 19:11:21 crc kubenswrapper[4558]: I0120 19:11:21.969107 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-fnqb8_3e16ce3b-593e-4c3d-a1c0-ab3c557da830/extract-content/0.log" Jan 20 19:11:23 crc kubenswrapper[4558]: I0120 19:11:23.077030 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-sj9bb_282a24f3-5eac-4fa8-8a40-6260b94e2164/registry-server/0.log" Jan 20 19:11:23 crc kubenswrapper[4558]: I0120 19:11:23.082879 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-sj9bb_282a24f3-5eac-4fa8-8a40-6260b94e2164/extract-utilities/0.log" Jan 20 19:11:23 crc kubenswrapper[4558]: I0120 19:11:23.090629 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-sj9bb_282a24f3-5eac-4fa8-8a40-6260b94e2164/extract-content/0.log" Jan 20 19:11:25 crc kubenswrapper[4558]: I0120 19:11:25.232865 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-68bc856cb9-nfzlw_fef8d546-d905-4701-ac1e-09cd4c4b1ed8/prometheus-operator/0.log" Jan 20 19:11:25 crc kubenswrapper[4558]: I0120 19:11:25.244650 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-558658b7b5-cpc8s_42ac1cad-9697-4a87-974d-f1dfe31a3627/prometheus-operator-admission-webhook/0.log" Jan 20 19:11:25 crc kubenswrapper[4558]: I0120 19:11:25.257812 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-558658b7b5-hhs5k_8c562f2d-0fe1-4a6a-9664-00e2cad8c416/prometheus-operator-admission-webhook/0.log" Jan 20 19:11:25 crc kubenswrapper[4558]: I0120 19:11:25.285393 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_observability-operator-59bdc8b94-rlprt_0c2f4961-5cb9-4460-b8f2-ff20d5bafc08/operator/0.log" Jan 20 19:11:25 crc kubenswrapper[4558]: I0120 19:11:25.292272 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_perses-operator-5bf474d74f-6nh6n_6f4871c7-a64d-4d49-b830-66b6718d608a/perses-operator/0.log" Jan 20 19:11:27 crc kubenswrapper[4558]: I0120 19:11:27.330193 4558 patch_prober.go:28] interesting pod/machine-config-daemon-2vr4r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 20 19:11:27 crc kubenswrapper[4558]: I0120 19:11:27.330259 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 20 19:11:57 crc kubenswrapper[4558]: I0120 19:11:57.329733 4558 patch_prober.go:28] interesting pod/machine-config-daemon-2vr4r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 20 19:11:57 crc kubenswrapper[4558]: I0120 19:11:57.330243 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 20 19:11:57 crc kubenswrapper[4558]: I0120 19:11:57.330291 4558 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" Jan 20 19:11:57 crc kubenswrapper[4558]: I0120 19:11:57.330809 4558 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"f564c14a07109e541e9fbbbe365d8cc006309f8b6f2a46364e7e327b0d781226"} pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 20 19:11:57 crc kubenswrapper[4558]: I0120 19:11:57.330857 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" containerID="cri-o://f564c14a07109e541e9fbbbe365d8cc006309f8b6f2a46364e7e327b0d781226" gracePeriod=600 Jan 20 19:11:57 crc kubenswrapper[4558]: E0120 19:11:57.464322 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:11:58 crc kubenswrapper[4558]: I0120 19:11:58.188413 4558 generic.go:334] "Generic (PLEG): container finished" podID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerID="f564c14a07109e541e9fbbbe365d8cc006309f8b6f2a46364e7e327b0d781226" exitCode=0 Jan 20 19:11:58 crc kubenswrapper[4558]: I0120 19:11:58.188456 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" event={"ID":"68337d27-3fa6-4a29-88b0-82e60c3739eb","Type":"ContainerDied","Data":"f564c14a07109e541e9fbbbe365d8cc006309f8b6f2a46364e7e327b0d781226"} Jan 20 19:11:58 crc kubenswrapper[4558]: I0120 19:11:58.188499 4558 scope.go:117] "RemoveContainer" containerID="fc482c813b9ae92dec3b3665026210640e14fa0e406eb14c2b13c4577051c155" Jan 20 19:11:58 crc kubenswrapper[4558]: I0120 19:11:58.189045 4558 scope.go:117] "RemoveContainer" containerID="f564c14a07109e541e9fbbbe365d8cc006309f8b6f2a46364e7e327b0d781226" Jan 20 19:11:58 crc kubenswrapper[4558]: E0120 19:11:58.189358 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:11:58 crc kubenswrapper[4558]: I0120 19:11:58.731023 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-chh5l"] Jan 20 19:11:58 crc kubenswrapper[4558]: I0120 19:11:58.732191 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-chh5l" Jan 20 19:11:58 crc kubenswrapper[4558]: I0120 19:11:58.753177 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-chh5l"] Jan 20 19:11:58 crc kubenswrapper[4558]: I0120 19:11:58.789153 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/767cfef2-0b55-4ea9-a1f9-dd029b1b375f-catalog-content\") pod \"community-operators-chh5l\" (UID: \"767cfef2-0b55-4ea9-a1f9-dd029b1b375f\") " pod="openshift-marketplace/community-operators-chh5l" Jan 20 19:11:58 crc kubenswrapper[4558]: I0120 19:11:58.789271 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/767cfef2-0b55-4ea9-a1f9-dd029b1b375f-utilities\") pod \"community-operators-chh5l\" (UID: \"767cfef2-0b55-4ea9-a1f9-dd029b1b375f\") " pod="openshift-marketplace/community-operators-chh5l" Jan 20 19:11:58 crc kubenswrapper[4558]: I0120 19:11:58.789425 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zctvk\" (UniqueName: \"kubernetes.io/projected/767cfef2-0b55-4ea9-a1f9-dd029b1b375f-kube-api-access-zctvk\") pod \"community-operators-chh5l\" (UID: \"767cfef2-0b55-4ea9-a1f9-dd029b1b375f\") " pod="openshift-marketplace/community-operators-chh5l" Jan 20 19:11:58 crc kubenswrapper[4558]: I0120 19:11:58.890458 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zctvk\" (UniqueName: \"kubernetes.io/projected/767cfef2-0b55-4ea9-a1f9-dd029b1b375f-kube-api-access-zctvk\") pod \"community-operators-chh5l\" (UID: \"767cfef2-0b55-4ea9-a1f9-dd029b1b375f\") " pod="openshift-marketplace/community-operators-chh5l" Jan 20 19:11:58 crc kubenswrapper[4558]: I0120 19:11:58.890532 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/767cfef2-0b55-4ea9-a1f9-dd029b1b375f-catalog-content\") pod \"community-operators-chh5l\" (UID: \"767cfef2-0b55-4ea9-a1f9-dd029b1b375f\") " pod="openshift-marketplace/community-operators-chh5l" Jan 20 19:11:58 crc kubenswrapper[4558]: I0120 19:11:58.890568 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/767cfef2-0b55-4ea9-a1f9-dd029b1b375f-utilities\") pod \"community-operators-chh5l\" (UID: \"767cfef2-0b55-4ea9-a1f9-dd029b1b375f\") " pod="openshift-marketplace/community-operators-chh5l" Jan 20 19:11:58 crc kubenswrapper[4558]: I0120 19:11:58.891179 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/767cfef2-0b55-4ea9-a1f9-dd029b1b375f-utilities\") pod \"community-operators-chh5l\" (UID: \"767cfef2-0b55-4ea9-a1f9-dd029b1b375f\") " pod="openshift-marketplace/community-operators-chh5l" Jan 20 19:11:58 crc kubenswrapper[4558]: I0120 19:11:58.891265 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/767cfef2-0b55-4ea9-a1f9-dd029b1b375f-catalog-content\") pod \"community-operators-chh5l\" (UID: \"767cfef2-0b55-4ea9-a1f9-dd029b1b375f\") " pod="openshift-marketplace/community-operators-chh5l" Jan 20 19:11:58 crc kubenswrapper[4558]: I0120 19:11:58.909096 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zctvk\" (UniqueName: \"kubernetes.io/projected/767cfef2-0b55-4ea9-a1f9-dd029b1b375f-kube-api-access-zctvk\") pod \"community-operators-chh5l\" (UID: \"767cfef2-0b55-4ea9-a1f9-dd029b1b375f\") " pod="openshift-marketplace/community-operators-chh5l" Jan 20 19:11:59 crc kubenswrapper[4558]: I0120 19:11:59.049689 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-chh5l" Jan 20 19:11:59 crc kubenswrapper[4558]: I0120 19:11:59.381635 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-chh5l"] Jan 20 19:12:00 crc kubenswrapper[4558]: I0120 19:12:00.278189 4558 generic.go:334] "Generic (PLEG): container finished" podID="767cfef2-0b55-4ea9-a1f9-dd029b1b375f" containerID="1b9e824d39ba5c714ab27b0efa7b0db803becad673a54fa35cd815018fb4b85c" exitCode=0 Jan 20 19:12:00 crc kubenswrapper[4558]: I0120 19:12:00.278285 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-chh5l" event={"ID":"767cfef2-0b55-4ea9-a1f9-dd029b1b375f","Type":"ContainerDied","Data":"1b9e824d39ba5c714ab27b0efa7b0db803becad673a54fa35cd815018fb4b85c"} Jan 20 19:12:00 crc kubenswrapper[4558]: I0120 19:12:00.278506 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-chh5l" event={"ID":"767cfef2-0b55-4ea9-a1f9-dd029b1b375f","Type":"ContainerStarted","Data":"b1d9e6d7f346041be8ca107a0a7454a362aac5030f99a9aba46bfeea94f88130"} Jan 20 19:12:00 crc kubenswrapper[4558]: I0120 19:12:00.280267 4558 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 20 19:12:01 crc kubenswrapper[4558]: I0120 19:12:01.285521 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-chh5l" event={"ID":"767cfef2-0b55-4ea9-a1f9-dd029b1b375f","Type":"ContainerStarted","Data":"ea5f0ed8d5ba7fe04f32367081a12c74c8abdd30650ae92796adf6e712740bef"} Jan 20 19:12:02 crc kubenswrapper[4558]: I0120 19:12:02.292769 4558 generic.go:334] "Generic (PLEG): container finished" podID="767cfef2-0b55-4ea9-a1f9-dd029b1b375f" containerID="ea5f0ed8d5ba7fe04f32367081a12c74c8abdd30650ae92796adf6e712740bef" exitCode=0 Jan 20 19:12:02 crc kubenswrapper[4558]: I0120 19:12:02.293067 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-chh5l" event={"ID":"767cfef2-0b55-4ea9-a1f9-dd029b1b375f","Type":"ContainerDied","Data":"ea5f0ed8d5ba7fe04f32367081a12c74c8abdd30650ae92796adf6e712740bef"} Jan 20 19:12:03 crc kubenswrapper[4558]: I0120 19:12:03.305072 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-chh5l" event={"ID":"767cfef2-0b55-4ea9-a1f9-dd029b1b375f","Type":"ContainerStarted","Data":"5b373482a2200c4a8ec44959e8e6eed5f1564870c86e594b6c169c63698b30b5"} Jan 20 19:12:03 crc kubenswrapper[4558]: I0120 19:12:03.322095 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-chh5l" podStartSLOduration=2.828020842 podStartE2EDuration="5.322076281s" podCreationTimestamp="2026-01-20 19:11:58 +0000 UTC" firstStartedPulling="2026-01-20 19:12:00.27999902 +0000 UTC m=+9014.040336986" lastFinishedPulling="2026-01-20 19:12:02.774054458 +0000 UTC m=+9016.534392425" observedRunningTime="2026-01-20 19:12:03.320203912 +0000 UTC m=+9017.080541878" watchObservedRunningTime="2026-01-20 19:12:03.322076281 +0000 UTC m=+9017.082414249" Jan 20 19:12:09 crc kubenswrapper[4558]: I0120 19:12:09.050770 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-chh5l" Jan 20 19:12:09 crc kubenswrapper[4558]: I0120 19:12:09.050844 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-chh5l" Jan 20 19:12:09 crc kubenswrapper[4558]: I0120 19:12:09.088138 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-chh5l" Jan 20 19:12:09 crc kubenswrapper[4558]: I0120 19:12:09.382721 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-chh5l" Jan 20 19:12:09 crc kubenswrapper[4558]: I0120 19:12:09.427273 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-chh5l"] Jan 20 19:12:11 crc kubenswrapper[4558]: I0120 19:12:11.376662 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-chh5l" podUID="767cfef2-0b55-4ea9-a1f9-dd029b1b375f" containerName="registry-server" containerID="cri-o://5b373482a2200c4a8ec44959e8e6eed5f1564870c86e594b6c169c63698b30b5" gracePeriod=2 Jan 20 19:12:11 crc kubenswrapper[4558]: I0120 19:12:11.738429 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-chh5l" Jan 20 19:12:11 crc kubenswrapper[4558]: I0120 19:12:11.862681 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/767cfef2-0b55-4ea9-a1f9-dd029b1b375f-catalog-content\") pod \"767cfef2-0b55-4ea9-a1f9-dd029b1b375f\" (UID: \"767cfef2-0b55-4ea9-a1f9-dd029b1b375f\") " Jan 20 19:12:11 crc kubenswrapper[4558]: I0120 19:12:11.862751 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/767cfef2-0b55-4ea9-a1f9-dd029b1b375f-utilities\") pod \"767cfef2-0b55-4ea9-a1f9-dd029b1b375f\" (UID: \"767cfef2-0b55-4ea9-a1f9-dd029b1b375f\") " Jan 20 19:12:11 crc kubenswrapper[4558]: I0120 19:12:11.862814 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zctvk\" (UniqueName: \"kubernetes.io/projected/767cfef2-0b55-4ea9-a1f9-dd029b1b375f-kube-api-access-zctvk\") pod \"767cfef2-0b55-4ea9-a1f9-dd029b1b375f\" (UID: \"767cfef2-0b55-4ea9-a1f9-dd029b1b375f\") " Jan 20 19:12:11 crc kubenswrapper[4558]: I0120 19:12:11.864946 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/767cfef2-0b55-4ea9-a1f9-dd029b1b375f-utilities" (OuterVolumeSpecName: "utilities") pod "767cfef2-0b55-4ea9-a1f9-dd029b1b375f" (UID: "767cfef2-0b55-4ea9-a1f9-dd029b1b375f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 19:12:11 crc kubenswrapper[4558]: I0120 19:12:11.869925 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/767cfef2-0b55-4ea9-a1f9-dd029b1b375f-kube-api-access-zctvk" (OuterVolumeSpecName: "kube-api-access-zctvk") pod "767cfef2-0b55-4ea9-a1f9-dd029b1b375f" (UID: "767cfef2-0b55-4ea9-a1f9-dd029b1b375f"). InnerVolumeSpecName "kube-api-access-zctvk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 19:12:11 crc kubenswrapper[4558]: I0120 19:12:11.927453 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/767cfef2-0b55-4ea9-a1f9-dd029b1b375f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "767cfef2-0b55-4ea9-a1f9-dd029b1b375f" (UID: "767cfef2-0b55-4ea9-a1f9-dd029b1b375f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 19:12:11 crc kubenswrapper[4558]: I0120 19:12:11.964522 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zctvk\" (UniqueName: \"kubernetes.io/projected/767cfef2-0b55-4ea9-a1f9-dd029b1b375f-kube-api-access-zctvk\") on node \"crc\" DevicePath \"\"" Jan 20 19:12:11 crc kubenswrapper[4558]: I0120 19:12:11.964554 4558 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/767cfef2-0b55-4ea9-a1f9-dd029b1b375f-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 20 19:12:11 crc kubenswrapper[4558]: I0120 19:12:11.964566 4558 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/767cfef2-0b55-4ea9-a1f9-dd029b1b375f-utilities\") on node \"crc\" DevicePath \"\"" Jan 20 19:12:12 crc kubenswrapper[4558]: I0120 19:12:12.380278 4558 generic.go:334] "Generic (PLEG): container finished" podID="767cfef2-0b55-4ea9-a1f9-dd029b1b375f" containerID="5b373482a2200c4a8ec44959e8e6eed5f1564870c86e594b6c169c63698b30b5" exitCode=0 Jan 20 19:12:12 crc kubenswrapper[4558]: I0120 19:12:12.380333 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-chh5l" event={"ID":"767cfef2-0b55-4ea9-a1f9-dd029b1b375f","Type":"ContainerDied","Data":"5b373482a2200c4a8ec44959e8e6eed5f1564870c86e594b6c169c63698b30b5"} Jan 20 19:12:12 crc kubenswrapper[4558]: I0120 19:12:12.380344 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-chh5l" Jan 20 19:12:12 crc kubenswrapper[4558]: I0120 19:12:12.380373 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-chh5l" event={"ID":"767cfef2-0b55-4ea9-a1f9-dd029b1b375f","Type":"ContainerDied","Data":"b1d9e6d7f346041be8ca107a0a7454a362aac5030f99a9aba46bfeea94f88130"} Jan 20 19:12:12 crc kubenswrapper[4558]: I0120 19:12:12.380394 4558 scope.go:117] "RemoveContainer" containerID="5b373482a2200c4a8ec44959e8e6eed5f1564870c86e594b6c169c63698b30b5" Jan 20 19:12:12 crc kubenswrapper[4558]: I0120 19:12:12.398083 4558 scope.go:117] "RemoveContainer" containerID="ea5f0ed8d5ba7fe04f32367081a12c74c8abdd30650ae92796adf6e712740bef" Jan 20 19:12:12 crc kubenswrapper[4558]: I0120 19:12:12.411502 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-chh5l"] Jan 20 19:12:12 crc kubenswrapper[4558]: I0120 19:12:12.412767 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-chh5l"] Jan 20 19:12:12 crc kubenswrapper[4558]: I0120 19:12:12.412999 4558 scope.go:117] "RemoveContainer" containerID="1b9e824d39ba5c714ab27b0efa7b0db803becad673a54fa35cd815018fb4b85c" Jan 20 19:12:12 crc kubenswrapper[4558]: I0120 19:12:12.428858 4558 scope.go:117] "RemoveContainer" containerID="5b373482a2200c4a8ec44959e8e6eed5f1564870c86e594b6c169c63698b30b5" Jan 20 19:12:12 crc kubenswrapper[4558]: E0120 19:12:12.429314 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5b373482a2200c4a8ec44959e8e6eed5f1564870c86e594b6c169c63698b30b5\": container with ID starting with 5b373482a2200c4a8ec44959e8e6eed5f1564870c86e594b6c169c63698b30b5 not found: ID does not exist" containerID="5b373482a2200c4a8ec44959e8e6eed5f1564870c86e594b6c169c63698b30b5" Jan 20 19:12:12 crc kubenswrapper[4558]: I0120 19:12:12.429347 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5b373482a2200c4a8ec44959e8e6eed5f1564870c86e594b6c169c63698b30b5"} err="failed to get container status \"5b373482a2200c4a8ec44959e8e6eed5f1564870c86e594b6c169c63698b30b5\": rpc error: code = NotFound desc = could not find container \"5b373482a2200c4a8ec44959e8e6eed5f1564870c86e594b6c169c63698b30b5\": container with ID starting with 5b373482a2200c4a8ec44959e8e6eed5f1564870c86e594b6c169c63698b30b5 not found: ID does not exist" Jan 20 19:12:12 crc kubenswrapper[4558]: I0120 19:12:12.429369 4558 scope.go:117] "RemoveContainer" containerID="ea5f0ed8d5ba7fe04f32367081a12c74c8abdd30650ae92796adf6e712740bef" Jan 20 19:12:12 crc kubenswrapper[4558]: E0120 19:12:12.429607 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ea5f0ed8d5ba7fe04f32367081a12c74c8abdd30650ae92796adf6e712740bef\": container with ID starting with ea5f0ed8d5ba7fe04f32367081a12c74c8abdd30650ae92796adf6e712740bef not found: ID does not exist" containerID="ea5f0ed8d5ba7fe04f32367081a12c74c8abdd30650ae92796adf6e712740bef" Jan 20 19:12:12 crc kubenswrapper[4558]: I0120 19:12:12.429630 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ea5f0ed8d5ba7fe04f32367081a12c74c8abdd30650ae92796adf6e712740bef"} err="failed to get container status \"ea5f0ed8d5ba7fe04f32367081a12c74c8abdd30650ae92796adf6e712740bef\": rpc error: code = NotFound desc = could not find container \"ea5f0ed8d5ba7fe04f32367081a12c74c8abdd30650ae92796adf6e712740bef\": container with ID starting with ea5f0ed8d5ba7fe04f32367081a12c74c8abdd30650ae92796adf6e712740bef not found: ID does not exist" Jan 20 19:12:12 crc kubenswrapper[4558]: I0120 19:12:12.429643 4558 scope.go:117] "RemoveContainer" containerID="1b9e824d39ba5c714ab27b0efa7b0db803becad673a54fa35cd815018fb4b85c" Jan 20 19:12:12 crc kubenswrapper[4558]: E0120 19:12:12.429863 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1b9e824d39ba5c714ab27b0efa7b0db803becad673a54fa35cd815018fb4b85c\": container with ID starting with 1b9e824d39ba5c714ab27b0efa7b0db803becad673a54fa35cd815018fb4b85c not found: ID does not exist" containerID="1b9e824d39ba5c714ab27b0efa7b0db803becad673a54fa35cd815018fb4b85c" Jan 20 19:12:12 crc kubenswrapper[4558]: I0120 19:12:12.429885 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1b9e824d39ba5c714ab27b0efa7b0db803becad673a54fa35cd815018fb4b85c"} err="failed to get container status \"1b9e824d39ba5c714ab27b0efa7b0db803becad673a54fa35cd815018fb4b85c\": rpc error: code = NotFound desc = could not find container \"1b9e824d39ba5c714ab27b0efa7b0db803becad673a54fa35cd815018fb4b85c\": container with ID starting with 1b9e824d39ba5c714ab27b0efa7b0db803becad673a54fa35cd815018fb4b85c not found: ID does not exist" Jan 20 19:12:12 crc kubenswrapper[4558]: I0120 19:12:12.566446 4558 scope.go:117] "RemoveContainer" containerID="f564c14a07109e541e9fbbbe365d8cc006309f8b6f2a46364e7e327b0d781226" Jan 20 19:12:12 crc kubenswrapper[4558]: E0120 19:12:12.566661 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:12:12 crc kubenswrapper[4558]: I0120 19:12:12.573091 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="767cfef2-0b55-4ea9-a1f9-dd029b1b375f" path="/var/lib/kubelet/pods/767cfef2-0b55-4ea9-a1f9-dd029b1b375f/volumes" Jan 20 19:12:27 crc kubenswrapper[4558]: I0120 19:12:27.566345 4558 scope.go:117] "RemoveContainer" containerID="f564c14a07109e541e9fbbbe365d8cc006309f8b6f2a46364e7e327b0d781226" Jan 20 19:12:27 crc kubenswrapper[4558]: E0120 19:12:27.567102 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:12:38 crc kubenswrapper[4558]: I0120 19:12:38.565917 4558 scope.go:117] "RemoveContainer" containerID="f564c14a07109e541e9fbbbe365d8cc006309f8b6f2a46364e7e327b0d781226" Jan 20 19:12:38 crc kubenswrapper[4558]: E0120 19:12:38.566751 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:12:49 crc kubenswrapper[4558]: I0120 19:12:49.565878 4558 scope.go:117] "RemoveContainer" containerID="f564c14a07109e541e9fbbbe365d8cc006309f8b6f2a46364e7e327b0d781226" Jan 20 19:12:49 crc kubenswrapper[4558]: E0120 19:12:49.566769 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:13:01 crc kubenswrapper[4558]: I0120 19:13:01.566519 4558 scope.go:117] "RemoveContainer" containerID="f564c14a07109e541e9fbbbe365d8cc006309f8b6f2a46364e7e327b0d781226" Jan 20 19:13:01 crc kubenswrapper[4558]: E0120 19:13:01.568673 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:13:04 crc kubenswrapper[4558]: I0120 19:13:04.364977 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-68bc856cb9-nfzlw_fef8d546-d905-4701-ac1e-09cd4c4b1ed8/prometheus-operator/0.log" Jan 20 19:13:04 crc kubenswrapper[4558]: I0120 19:13:04.379333 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-558658b7b5-cpc8s_42ac1cad-9697-4a87-974d-f1dfe31a3627/prometheus-operator-admission-webhook/0.log" Jan 20 19:13:04 crc kubenswrapper[4558]: I0120 19:13:04.393204 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-558658b7b5-hhs5k_8c562f2d-0fe1-4a6a-9664-00e2cad8c416/prometheus-operator-admission-webhook/0.log" Jan 20 19:13:04 crc kubenswrapper[4558]: I0120 19:13:04.415483 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_observability-operator-59bdc8b94-rlprt_0c2f4961-5cb9-4460-b8f2-ff20d5bafc08/operator/0.log" Jan 20 19:13:04 crc kubenswrapper[4558]: I0120 19:13:04.422680 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_perses-operator-5bf474d74f-6nh6n_6f4871c7-a64d-4d49-b830-66b6718d608a/perses-operator/0.log" Jan 20 19:13:04 crc kubenswrapper[4558]: I0120 19:13:04.767762 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-86cb77c54b-4t55w_857afdf1-c962-4b4a-a79b-15547f6b407c/cert-manager-controller/0.log" Jan 20 19:13:04 crc kubenswrapper[4558]: I0120 19:13:04.829472 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-855d9ccff4-n85r2_dd2168dc-3cb1-45db-b8fd-7e112804ffcd/cert-manager-cainjector/0.log" Jan 20 19:13:04 crc kubenswrapper[4558]: I0120 19:13:04.837029 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-f4fb5df64-s79l7_0aafb007-c951-4a3f-90ee-14897538c76d/cert-manager-webhook/0.log" Jan 20 19:13:05 crc kubenswrapper[4558]: I0120 19:13:05.603655 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-6968d8fdc4-d9p9k_5835ce2a-d074-4a95-aa34-ad3a62f77503/controller/0.log" Jan 20 19:13:05 crc kubenswrapper[4558]: I0120 19:13:05.606321 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-7754f76f8b-n5w94_7abfa901-9433-43f3-8f51-1da05d50f84d/nmstate-console-plugin/0.log" Jan 20 19:13:05 crc kubenswrapper[4558]: I0120 19:13:05.609604 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-6968d8fdc4-d9p9k_5835ce2a-d074-4a95-aa34-ad3a62f77503/kube-rbac-proxy/0.log" Jan 20 19:13:05 crc kubenswrapper[4558]: I0120 19:13:05.620881 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-btt28_f54cd5f9-30bf-494b-8528-4f25f8fa1521/nmstate-handler/0.log" Jan 20 19:13:05 crc kubenswrapper[4558]: I0120 19:13:05.630794 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-54757c584b-l6chb_1b599dc7-9188-4dbc-ad4a-32db989cb635/nmstate-metrics/0.log" Jan 20 19:13:05 crc kubenswrapper[4558]: I0120 19:13:05.632707 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-b75qk_59de8a65-6da7-4086-93d4-d76fa35f7660/controller/0.log" Jan 20 19:13:05 crc kubenswrapper[4558]: I0120 19:13:05.637187 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-54757c584b-l6chb_1b599dc7-9188-4dbc-ad4a-32db989cb635/kube-rbac-proxy/0.log" Jan 20 19:13:05 crc kubenswrapper[4558]: I0120 19:13:05.655683 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-646758c888-r2zgd_b1c60395-86ef-4c6c-8432-b2bc357aac2d/nmstate-operator/0.log" Jan 20 19:13:05 crc kubenswrapper[4558]: I0120 19:13:05.672895 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-8474b5b9d8-t4zzd_ecd5abb7-faaf-42b3-8698-71ea80253e9d/nmstate-webhook/0.log" Jan 20 19:13:06 crc kubenswrapper[4558]: I0120 19:13:06.909371 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-b75qk_59de8a65-6da7-4086-93d4-d76fa35f7660/frr/0.log" Jan 20 19:13:06 crc kubenswrapper[4558]: I0120 19:13:06.916922 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-b75qk_59de8a65-6da7-4086-93d4-d76fa35f7660/reloader/0.log" Jan 20 19:13:06 crc kubenswrapper[4558]: I0120 19:13:06.922489 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-b75qk_59de8a65-6da7-4086-93d4-d76fa35f7660/frr-metrics/0.log" Jan 20 19:13:06 crc kubenswrapper[4558]: I0120 19:13:06.929255 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-b75qk_59de8a65-6da7-4086-93d4-d76fa35f7660/kube-rbac-proxy/0.log" Jan 20 19:13:06 crc kubenswrapper[4558]: I0120 19:13:06.937518 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-b75qk_59de8a65-6da7-4086-93d4-d76fa35f7660/kube-rbac-proxy-frr/0.log" Jan 20 19:13:06 crc kubenswrapper[4558]: I0120 19:13:06.943870 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-b75qk_59de8a65-6da7-4086-93d4-d76fa35f7660/cp-frr-files/0.log" Jan 20 19:13:06 crc kubenswrapper[4558]: I0120 19:13:06.949203 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-b75qk_59de8a65-6da7-4086-93d4-d76fa35f7660/cp-reloader/0.log" Jan 20 19:13:06 crc kubenswrapper[4558]: I0120 19:13:06.956887 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-b75qk_59de8a65-6da7-4086-93d4-d76fa35f7660/cp-metrics/0.log" Jan 20 19:13:06 crc kubenswrapper[4558]: I0120 19:13:06.964573 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-7df86c4f6c-vf5fl_e7f30f4b-a84e-47b2-b393-c52757e6ca69/frr-k8s-webhook-server/0.log" Jan 20 19:13:06 crc kubenswrapper[4558]: I0120 19:13:06.986289 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-747dffd588-lx9c6_704a513a-fe43-4730-9f48-6c85506e338b/manager/0.log" Jan 20 19:13:06 crc kubenswrapper[4558]: I0120 19:13:06.996757 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-7d7c8846bf-bnzd9_e29496dd-2347-49e8-b4ca-1d071c0dcf2a/webhook-server/0.log" Jan 20 19:13:08 crc kubenswrapper[4558]: I0120 19:13:08.065102 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-w684s_241ae411-7e63-4bb6-a110-b9983a418f9e/speaker/0.log" Jan 20 19:13:08 crc kubenswrapper[4558]: I0120 19:13:08.078495 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-w684s_241ae411-7e63-4bb6-a110-b9983a418f9e/kube-rbac-proxy/0.log" Jan 20 19:13:08 crc kubenswrapper[4558]: I0120 19:13:08.948054 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-86cb77c54b-4t55w_857afdf1-c962-4b4a-a79b-15547f6b407c/cert-manager-controller/0.log" Jan 20 19:13:09 crc kubenswrapper[4558]: I0120 19:13:09.020726 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-855d9ccff4-n85r2_dd2168dc-3cb1-45db-b8fd-7e112804ffcd/cert-manager-cainjector/0.log" Jan 20 19:13:09 crc kubenswrapper[4558]: I0120 19:13:09.031660 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-f4fb5df64-s79l7_0aafb007-c951-4a3f-90ee-14897538c76d/cert-manager-webhook/0.log" Jan 20 19:13:09 crc kubenswrapper[4558]: I0120 19:13:09.625913 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-4w5mk_119fb5fe-2460-4d2d-9db9-452afaa1e93e/control-plane-machine-set-operator/0.log" Jan 20 19:13:09 crc kubenswrapper[4558]: I0120 19:13:09.638227 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-hsls5_350b4e8e-0147-4d0a-b9c0-1cdcdc2312f1/kube-rbac-proxy/0.log" Jan 20 19:13:09 crc kubenswrapper[4558]: I0120 19:13:09.649451 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-hsls5_350b4e8e-0147-4d0a-b9c0-1cdcdc2312f1/machine-api-operator/0.log" Jan 20 19:13:10 crc kubenswrapper[4558]: I0120 19:13:10.796640 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-additional-cni-plugins-f5t7h_0f90cb04-2e7a-4ee8-83fc-d6c0ee1702a4/kube-multus-additional-cni-plugins/0.log" Jan 20 19:13:10 crc kubenswrapper[4558]: I0120 19:13:10.803716 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-additional-cni-plugins-f5t7h_0f90cb04-2e7a-4ee8-83fc-d6c0ee1702a4/egress-router-binary-copy/0.log" Jan 20 19:13:10 crc kubenswrapper[4558]: I0120 19:13:10.811564 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-additional-cni-plugins-f5t7h_0f90cb04-2e7a-4ee8-83fc-d6c0ee1702a4/cni-plugins/0.log" Jan 20 19:13:10 crc kubenswrapper[4558]: I0120 19:13:10.818102 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-additional-cni-plugins-f5t7h_0f90cb04-2e7a-4ee8-83fc-d6c0ee1702a4/bond-cni-plugin/0.log" Jan 20 19:13:10 crc kubenswrapper[4558]: I0120 19:13:10.824374 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-additional-cni-plugins-f5t7h_0f90cb04-2e7a-4ee8-83fc-d6c0ee1702a4/routeoverride-cni/0.log" Jan 20 19:13:10 crc kubenswrapper[4558]: I0120 19:13:10.832263 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-additional-cni-plugins-f5t7h_0f90cb04-2e7a-4ee8-83fc-d6c0ee1702a4/whereabouts-cni-bincopy/0.log" Jan 20 19:13:10 crc kubenswrapper[4558]: I0120 19:13:10.839999 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-additional-cni-plugins-f5t7h_0f90cb04-2e7a-4ee8-83fc-d6c0ee1702a4/whereabouts-cni/0.log" Jan 20 19:13:10 crc kubenswrapper[4558]: I0120 19:13:10.922052 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-admission-controller-857f4d67dd-7wfbg_34b7c77f-6a7d-43de-9ee4-bdba78dc8248/multus-admission-controller/0.log" Jan 20 19:13:10 crc kubenswrapper[4558]: I0120 19:13:10.926799 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-admission-controller-857f4d67dd-7wfbg_34b7c77f-6a7d-43de-9ee4-bdba78dc8248/kube-rbac-proxy/0.log" Jan 20 19:13:10 crc kubenswrapper[4558]: I0120 19:13:10.986012 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-jsqvf_bedf08c7-1f93-4931-a7f3-e729e2a137af/kube-multus/2.log" Jan 20 19:13:11 crc kubenswrapper[4558]: I0120 19:13:11.452217 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-jsqvf_bedf08c7-1f93-4931-a7f3-e729e2a137af/kube-multus/3.log" Jan 20 19:13:11 crc kubenswrapper[4558]: I0120 19:13:11.633643 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_network-metrics-daemon-9wrq6_30032328-bd33-4073-9366-e10bc5e2aa77/network-metrics-daemon/0.log" Jan 20 19:13:11 crc kubenswrapper[4558]: I0120 19:13:11.640402 4558 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_network-metrics-daemon-9wrq6_30032328-bd33-4073-9366-e10bc5e2aa77/kube-rbac-proxy/0.log" Jan 20 19:13:16 crc kubenswrapper[4558]: I0120 19:13:16.573096 4558 scope.go:117] "RemoveContainer" containerID="f564c14a07109e541e9fbbbe365d8cc006309f8b6f2a46364e7e327b0d781226" Jan 20 19:13:16 crc kubenswrapper[4558]: E0120 19:13:16.575451 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:13:30 crc kubenswrapper[4558]: I0120 19:13:30.566690 4558 scope.go:117] "RemoveContainer" containerID="f564c14a07109e541e9fbbbe365d8cc006309f8b6f2a46364e7e327b0d781226" Jan 20 19:13:30 crc kubenswrapper[4558]: E0120 19:13:30.567745 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:13:41 crc kubenswrapper[4558]: I0120 19:13:41.566154 4558 scope.go:117] "RemoveContainer" containerID="f564c14a07109e541e9fbbbe365d8cc006309f8b6f2a46364e7e327b0d781226" Jan 20 19:13:41 crc kubenswrapper[4558]: E0120 19:13:41.566891 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:13:53 crc kubenswrapper[4558]: I0120 19:13:53.566308 4558 scope.go:117] "RemoveContainer" containerID="f564c14a07109e541e9fbbbe365d8cc006309f8b6f2a46364e7e327b0d781226" Jan 20 19:13:53 crc kubenswrapper[4558]: E0120 19:13:53.567231 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:14:04 crc kubenswrapper[4558]: I0120 19:14:04.566938 4558 scope.go:117] "RemoveContainer" containerID="f564c14a07109e541e9fbbbe365d8cc006309f8b6f2a46364e7e327b0d781226" Jan 20 19:14:04 crc kubenswrapper[4558]: E0120 19:14:04.567713 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:14:16 crc kubenswrapper[4558]: I0120 19:14:16.569277 4558 scope.go:117] "RemoveContainer" containerID="f564c14a07109e541e9fbbbe365d8cc006309f8b6f2a46364e7e327b0d781226" Jan 20 19:14:16 crc kubenswrapper[4558]: E0120 19:14:16.570274 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:14:30 crc kubenswrapper[4558]: I0120 19:14:30.568854 4558 scope.go:117] "RemoveContainer" containerID="f564c14a07109e541e9fbbbe365d8cc006309f8b6f2a46364e7e327b0d781226" Jan 20 19:14:30 crc kubenswrapper[4558]: E0120 19:14:30.569930 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:14:44 crc kubenswrapper[4558]: I0120 19:14:44.566613 4558 scope.go:117] "RemoveContainer" containerID="f564c14a07109e541e9fbbbe365d8cc006309f8b6f2a46364e7e327b0d781226" Jan 20 19:14:44 crc kubenswrapper[4558]: E0120 19:14:44.567475 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:14:59 crc kubenswrapper[4558]: I0120 19:14:59.565433 4558 scope.go:117] "RemoveContainer" containerID="f564c14a07109e541e9fbbbe365d8cc006309f8b6f2a46364e7e327b0d781226" Jan 20 19:14:59 crc kubenswrapper[4558]: E0120 19:14:59.566250 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:15:00 crc kubenswrapper[4558]: I0120 19:15:00.137975 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29482275-zlntx"] Jan 20 19:15:00 crc kubenswrapper[4558]: E0120 19:15:00.138493 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="767cfef2-0b55-4ea9-a1f9-dd029b1b375f" containerName="registry-server" Jan 20 19:15:00 crc kubenswrapper[4558]: I0120 19:15:00.138513 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="767cfef2-0b55-4ea9-a1f9-dd029b1b375f" containerName="registry-server" Jan 20 19:15:00 crc kubenswrapper[4558]: E0120 19:15:00.138549 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="767cfef2-0b55-4ea9-a1f9-dd029b1b375f" containerName="extract-utilities" Jan 20 19:15:00 crc kubenswrapper[4558]: I0120 19:15:00.138557 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="767cfef2-0b55-4ea9-a1f9-dd029b1b375f" containerName="extract-utilities" Jan 20 19:15:00 crc kubenswrapper[4558]: E0120 19:15:00.138584 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="767cfef2-0b55-4ea9-a1f9-dd029b1b375f" containerName="extract-content" Jan 20 19:15:00 crc kubenswrapper[4558]: I0120 19:15:00.138590 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="767cfef2-0b55-4ea9-a1f9-dd029b1b375f" containerName="extract-content" Jan 20 19:15:00 crc kubenswrapper[4558]: I0120 19:15:00.138912 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="767cfef2-0b55-4ea9-a1f9-dd029b1b375f" containerName="registry-server" Jan 20 19:15:00 crc kubenswrapper[4558]: I0120 19:15:00.139911 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29482275-zlntx" Jan 20 19:15:00 crc kubenswrapper[4558]: I0120 19:15:00.143204 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 20 19:15:00 crc kubenswrapper[4558]: I0120 19:15:00.143390 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 20 19:15:00 crc kubenswrapper[4558]: I0120 19:15:00.157193 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29482275-zlntx"] Jan 20 19:15:00 crc kubenswrapper[4558]: I0120 19:15:00.198354 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/87abf43d-a04a-4f0f-b921-549db3364465-secret-volume\") pod \"collect-profiles-29482275-zlntx\" (UID: \"87abf43d-a04a-4f0f-b921-549db3364465\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482275-zlntx" Jan 20 19:15:00 crc kubenswrapper[4558]: I0120 19:15:00.198462 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87abf43d-a04a-4f0f-b921-549db3364465-config-volume\") pod \"collect-profiles-29482275-zlntx\" (UID: \"87abf43d-a04a-4f0f-b921-549db3364465\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482275-zlntx" Jan 20 19:15:00 crc kubenswrapper[4558]: I0120 19:15:00.198526 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c64l2\" (UniqueName: \"kubernetes.io/projected/87abf43d-a04a-4f0f-b921-549db3364465-kube-api-access-c64l2\") pod \"collect-profiles-29482275-zlntx\" (UID: \"87abf43d-a04a-4f0f-b921-549db3364465\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482275-zlntx" Jan 20 19:15:00 crc kubenswrapper[4558]: I0120 19:15:00.299854 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/87abf43d-a04a-4f0f-b921-549db3364465-secret-volume\") pod \"collect-profiles-29482275-zlntx\" (UID: \"87abf43d-a04a-4f0f-b921-549db3364465\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482275-zlntx" Jan 20 19:15:00 crc kubenswrapper[4558]: I0120 19:15:00.299936 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87abf43d-a04a-4f0f-b921-549db3364465-config-volume\") pod \"collect-profiles-29482275-zlntx\" (UID: \"87abf43d-a04a-4f0f-b921-549db3364465\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482275-zlntx" Jan 20 19:15:00 crc kubenswrapper[4558]: I0120 19:15:00.299985 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c64l2\" (UniqueName: \"kubernetes.io/projected/87abf43d-a04a-4f0f-b921-549db3364465-kube-api-access-c64l2\") pod \"collect-profiles-29482275-zlntx\" (UID: \"87abf43d-a04a-4f0f-b921-549db3364465\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482275-zlntx" Jan 20 19:15:00 crc kubenswrapper[4558]: I0120 19:15:00.301219 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87abf43d-a04a-4f0f-b921-549db3364465-config-volume\") pod \"collect-profiles-29482275-zlntx\" (UID: \"87abf43d-a04a-4f0f-b921-549db3364465\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482275-zlntx" Jan 20 19:15:00 crc kubenswrapper[4558]: I0120 19:15:00.307321 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/87abf43d-a04a-4f0f-b921-549db3364465-secret-volume\") pod \"collect-profiles-29482275-zlntx\" (UID: \"87abf43d-a04a-4f0f-b921-549db3364465\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482275-zlntx" Jan 20 19:15:00 crc kubenswrapper[4558]: I0120 19:15:00.316030 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c64l2\" (UniqueName: \"kubernetes.io/projected/87abf43d-a04a-4f0f-b921-549db3364465-kube-api-access-c64l2\") pod \"collect-profiles-29482275-zlntx\" (UID: \"87abf43d-a04a-4f0f-b921-549db3364465\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482275-zlntx" Jan 20 19:15:00 crc kubenswrapper[4558]: I0120 19:15:00.463564 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29482275-zlntx" Jan 20 19:15:00 crc kubenswrapper[4558]: I0120 19:15:00.886862 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29482275-zlntx"] Jan 20 19:15:01 crc kubenswrapper[4558]: I0120 19:15:01.404784 4558 generic.go:334] "Generic (PLEG): container finished" podID="87abf43d-a04a-4f0f-b921-549db3364465" containerID="855242f607674b6692f3baab6b26d85012c451c1db1b4555d9bc570158355556" exitCode=0 Jan 20 19:15:01 crc kubenswrapper[4558]: I0120 19:15:01.404977 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29482275-zlntx" event={"ID":"87abf43d-a04a-4f0f-b921-549db3364465","Type":"ContainerDied","Data":"855242f607674b6692f3baab6b26d85012c451c1db1b4555d9bc570158355556"} Jan 20 19:15:01 crc kubenswrapper[4558]: I0120 19:15:01.405180 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29482275-zlntx" event={"ID":"87abf43d-a04a-4f0f-b921-549db3364465","Type":"ContainerStarted","Data":"a7ccc8f367f9620014528a4708ffd2bccbfdffbd7d838418f526e45ed71b101d"} Jan 20 19:15:02 crc kubenswrapper[4558]: I0120 19:15:02.637308 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29482275-zlntx" Jan 20 19:15:02 crc kubenswrapper[4558]: I0120 19:15:02.832679 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/87abf43d-a04a-4f0f-b921-549db3364465-secret-volume\") pod \"87abf43d-a04a-4f0f-b921-549db3364465\" (UID: \"87abf43d-a04a-4f0f-b921-549db3364465\") " Jan 20 19:15:02 crc kubenswrapper[4558]: I0120 19:15:02.832760 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87abf43d-a04a-4f0f-b921-549db3364465-config-volume\") pod \"87abf43d-a04a-4f0f-b921-549db3364465\" (UID: \"87abf43d-a04a-4f0f-b921-549db3364465\") " Jan 20 19:15:02 crc kubenswrapper[4558]: I0120 19:15:02.832842 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c64l2\" (UniqueName: \"kubernetes.io/projected/87abf43d-a04a-4f0f-b921-549db3364465-kube-api-access-c64l2\") pod \"87abf43d-a04a-4f0f-b921-549db3364465\" (UID: \"87abf43d-a04a-4f0f-b921-549db3364465\") " Jan 20 19:15:02 crc kubenswrapper[4558]: I0120 19:15:02.833678 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87abf43d-a04a-4f0f-b921-549db3364465-config-volume" (OuterVolumeSpecName: "config-volume") pod "87abf43d-a04a-4f0f-b921-549db3364465" (UID: "87abf43d-a04a-4f0f-b921-549db3364465"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 19:15:02 crc kubenswrapper[4558]: I0120 19:15:02.838424 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87abf43d-a04a-4f0f-b921-549db3364465-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "87abf43d-a04a-4f0f-b921-549db3364465" (UID: "87abf43d-a04a-4f0f-b921-549db3364465"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 19:15:02 crc kubenswrapper[4558]: I0120 19:15:02.838570 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87abf43d-a04a-4f0f-b921-549db3364465-kube-api-access-c64l2" (OuterVolumeSpecName: "kube-api-access-c64l2") pod "87abf43d-a04a-4f0f-b921-549db3364465" (UID: "87abf43d-a04a-4f0f-b921-549db3364465"). InnerVolumeSpecName "kube-api-access-c64l2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 19:15:02 crc kubenswrapper[4558]: I0120 19:15:02.934607 4558 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/87abf43d-a04a-4f0f-b921-549db3364465-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 20 19:15:02 crc kubenswrapper[4558]: I0120 19:15:02.934649 4558 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87abf43d-a04a-4f0f-b921-549db3364465-config-volume\") on node \"crc\" DevicePath \"\"" Jan 20 19:15:02 crc kubenswrapper[4558]: I0120 19:15:02.934665 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c64l2\" (UniqueName: \"kubernetes.io/projected/87abf43d-a04a-4f0f-b921-549db3364465-kube-api-access-c64l2\") on node \"crc\" DevicePath \"\"" Jan 20 19:15:03 crc kubenswrapper[4558]: I0120 19:15:03.420530 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29482275-zlntx" event={"ID":"87abf43d-a04a-4f0f-b921-549db3364465","Type":"ContainerDied","Data":"a7ccc8f367f9620014528a4708ffd2bccbfdffbd7d838418f526e45ed71b101d"} Jan 20 19:15:03 crc kubenswrapper[4558]: I0120 19:15:03.420583 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a7ccc8f367f9620014528a4708ffd2bccbfdffbd7d838418f526e45ed71b101d" Jan 20 19:15:03 crc kubenswrapper[4558]: I0120 19:15:03.420627 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29482275-zlntx" Jan 20 19:15:03 crc kubenswrapper[4558]: I0120 19:15:03.701391 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29482230-l98wg"] Jan 20 19:15:03 crc kubenswrapper[4558]: I0120 19:15:03.709046 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29482230-l98wg"] Jan 20 19:15:03 crc kubenswrapper[4558]: I0120 19:15:03.873406 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-5lwhz"] Jan 20 19:15:03 crc kubenswrapper[4558]: E0120 19:15:03.874297 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="87abf43d-a04a-4f0f-b921-549db3364465" containerName="collect-profiles" Jan 20 19:15:03 crc kubenswrapper[4558]: I0120 19:15:03.874319 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="87abf43d-a04a-4f0f-b921-549db3364465" containerName="collect-profiles" Jan 20 19:15:03 crc kubenswrapper[4558]: I0120 19:15:03.874564 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="87abf43d-a04a-4f0f-b921-549db3364465" containerName="collect-profiles" Jan 20 19:15:03 crc kubenswrapper[4558]: I0120 19:15:03.876053 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5lwhz" Jan 20 19:15:03 crc kubenswrapper[4558]: I0120 19:15:03.890754 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-5lwhz"] Jan 20 19:15:04 crc kubenswrapper[4558]: I0120 19:15:04.048282 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nrk4f\" (UniqueName: \"kubernetes.io/projected/d19724d7-cd4f-4a4e-87c8-849cc170ddb0-kube-api-access-nrk4f\") pod \"redhat-operators-5lwhz\" (UID: \"d19724d7-cd4f-4a4e-87c8-849cc170ddb0\") " pod="openshift-marketplace/redhat-operators-5lwhz" Jan 20 19:15:04 crc kubenswrapper[4558]: I0120 19:15:04.048325 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d19724d7-cd4f-4a4e-87c8-849cc170ddb0-utilities\") pod \"redhat-operators-5lwhz\" (UID: \"d19724d7-cd4f-4a4e-87c8-849cc170ddb0\") " pod="openshift-marketplace/redhat-operators-5lwhz" Jan 20 19:15:04 crc kubenswrapper[4558]: I0120 19:15:04.048470 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d19724d7-cd4f-4a4e-87c8-849cc170ddb0-catalog-content\") pod \"redhat-operators-5lwhz\" (UID: \"d19724d7-cd4f-4a4e-87c8-849cc170ddb0\") " pod="openshift-marketplace/redhat-operators-5lwhz" Jan 20 19:15:04 crc kubenswrapper[4558]: I0120 19:15:04.150515 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nrk4f\" (UniqueName: \"kubernetes.io/projected/d19724d7-cd4f-4a4e-87c8-849cc170ddb0-kube-api-access-nrk4f\") pod \"redhat-operators-5lwhz\" (UID: \"d19724d7-cd4f-4a4e-87c8-849cc170ddb0\") " pod="openshift-marketplace/redhat-operators-5lwhz" Jan 20 19:15:04 crc kubenswrapper[4558]: I0120 19:15:04.150615 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d19724d7-cd4f-4a4e-87c8-849cc170ddb0-utilities\") pod \"redhat-operators-5lwhz\" (UID: \"d19724d7-cd4f-4a4e-87c8-849cc170ddb0\") " pod="openshift-marketplace/redhat-operators-5lwhz" Jan 20 19:15:04 crc kubenswrapper[4558]: I0120 19:15:04.150713 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d19724d7-cd4f-4a4e-87c8-849cc170ddb0-catalog-content\") pod \"redhat-operators-5lwhz\" (UID: \"d19724d7-cd4f-4a4e-87c8-849cc170ddb0\") " pod="openshift-marketplace/redhat-operators-5lwhz" Jan 20 19:15:04 crc kubenswrapper[4558]: I0120 19:15:04.151794 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d19724d7-cd4f-4a4e-87c8-849cc170ddb0-utilities\") pod \"redhat-operators-5lwhz\" (UID: \"d19724d7-cd4f-4a4e-87c8-849cc170ddb0\") " pod="openshift-marketplace/redhat-operators-5lwhz" Jan 20 19:15:04 crc kubenswrapper[4558]: I0120 19:15:04.151838 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d19724d7-cd4f-4a4e-87c8-849cc170ddb0-catalog-content\") pod \"redhat-operators-5lwhz\" (UID: \"d19724d7-cd4f-4a4e-87c8-849cc170ddb0\") " pod="openshift-marketplace/redhat-operators-5lwhz" Jan 20 19:15:04 crc kubenswrapper[4558]: I0120 19:15:04.171532 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nrk4f\" (UniqueName: \"kubernetes.io/projected/d19724d7-cd4f-4a4e-87c8-849cc170ddb0-kube-api-access-nrk4f\") pod \"redhat-operators-5lwhz\" (UID: \"d19724d7-cd4f-4a4e-87c8-849cc170ddb0\") " pod="openshift-marketplace/redhat-operators-5lwhz" Jan 20 19:15:04 crc kubenswrapper[4558]: I0120 19:15:04.198081 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5lwhz" Jan 20 19:15:04 crc kubenswrapper[4558]: I0120 19:15:04.423328 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-5lwhz"] Jan 20 19:15:04 crc kubenswrapper[4558]: I0120 19:15:04.434801 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5lwhz" event={"ID":"d19724d7-cd4f-4a4e-87c8-849cc170ddb0","Type":"ContainerStarted","Data":"4a0fe00ab6eeee9a3d09d650369a26fc1ec5930416cb7e0b7547caf2c8be190b"} Jan 20 19:15:04 crc kubenswrapper[4558]: I0120 19:15:04.575831 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c85e90ed-1d98-4b04-b918-512a6cc63d69" path="/var/lib/kubelet/pods/c85e90ed-1d98-4b04-b918-512a6cc63d69/volumes" Jan 20 19:15:05 crc kubenswrapper[4558]: I0120 19:15:05.444532 4558 generic.go:334] "Generic (PLEG): container finished" podID="d19724d7-cd4f-4a4e-87c8-849cc170ddb0" containerID="5ce48ce3d1041d1d794e3e66f12f8aa1b213a124ecd77cf3f10e1d706f940e22" exitCode=0 Jan 20 19:15:05 crc kubenswrapper[4558]: I0120 19:15:05.444639 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5lwhz" event={"ID":"d19724d7-cd4f-4a4e-87c8-849cc170ddb0","Type":"ContainerDied","Data":"5ce48ce3d1041d1d794e3e66f12f8aa1b213a124ecd77cf3f10e1d706f940e22"} Jan 20 19:15:07 crc kubenswrapper[4558]: I0120 19:15:07.462023 4558 generic.go:334] "Generic (PLEG): container finished" podID="d19724d7-cd4f-4a4e-87c8-849cc170ddb0" containerID="de5ad6a212892db5da5b2c945334a08825bd93b2363fa9e64f2e647791f1d8ba" exitCode=0 Jan 20 19:15:07 crc kubenswrapper[4558]: I0120 19:15:07.462246 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5lwhz" event={"ID":"d19724d7-cd4f-4a4e-87c8-849cc170ddb0","Type":"ContainerDied","Data":"de5ad6a212892db5da5b2c945334a08825bd93b2363fa9e64f2e647791f1d8ba"} Jan 20 19:15:08 crc kubenswrapper[4558]: I0120 19:15:08.472772 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5lwhz" event={"ID":"d19724d7-cd4f-4a4e-87c8-849cc170ddb0","Type":"ContainerStarted","Data":"d3a4d371f367bddac5740602db356872600e21644e1f930662e682d530dbc6ce"} Jan 20 19:15:08 crc kubenswrapper[4558]: I0120 19:15:08.498577 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-5lwhz" podStartSLOduration=2.762745493 podStartE2EDuration="5.498561214s" podCreationTimestamp="2026-01-20 19:15:03 +0000 UTC" firstStartedPulling="2026-01-20 19:15:05.447337145 +0000 UTC m=+9199.207675112" lastFinishedPulling="2026-01-20 19:15:08.183152866 +0000 UTC m=+9201.943490833" observedRunningTime="2026-01-20 19:15:08.496988366 +0000 UTC m=+9202.257326334" watchObservedRunningTime="2026-01-20 19:15:08.498561214 +0000 UTC m=+9202.258899181" Jan 20 19:15:11 crc kubenswrapper[4558]: I0120 19:15:11.566307 4558 scope.go:117] "RemoveContainer" containerID="f564c14a07109e541e9fbbbe365d8cc006309f8b6f2a46364e7e327b0d781226" Jan 20 19:15:11 crc kubenswrapper[4558]: E0120 19:15:11.566837 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:15:14 crc kubenswrapper[4558]: I0120 19:15:14.198934 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-5lwhz" Jan 20 19:15:14 crc kubenswrapper[4558]: I0120 19:15:14.199949 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-5lwhz" Jan 20 19:15:14 crc kubenswrapper[4558]: I0120 19:15:14.234918 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-5lwhz" Jan 20 19:15:14 crc kubenswrapper[4558]: I0120 19:15:14.542983 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-5lwhz" Jan 20 19:15:14 crc kubenswrapper[4558]: I0120 19:15:14.586387 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-5lwhz"] Jan 20 19:15:16 crc kubenswrapper[4558]: I0120 19:15:16.524834 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-5lwhz" podUID="d19724d7-cd4f-4a4e-87c8-849cc170ddb0" containerName="registry-server" containerID="cri-o://d3a4d371f367bddac5740602db356872600e21644e1f930662e682d530dbc6ce" gracePeriod=2 Jan 20 19:15:16 crc kubenswrapper[4558]: I0120 19:15:16.880066 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5lwhz" Jan 20 19:15:17 crc kubenswrapper[4558]: I0120 19:15:17.032480 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nrk4f\" (UniqueName: \"kubernetes.io/projected/d19724d7-cd4f-4a4e-87c8-849cc170ddb0-kube-api-access-nrk4f\") pod \"d19724d7-cd4f-4a4e-87c8-849cc170ddb0\" (UID: \"d19724d7-cd4f-4a4e-87c8-849cc170ddb0\") " Jan 20 19:15:17 crc kubenswrapper[4558]: I0120 19:15:17.032609 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d19724d7-cd4f-4a4e-87c8-849cc170ddb0-utilities\") pod \"d19724d7-cd4f-4a4e-87c8-849cc170ddb0\" (UID: \"d19724d7-cd4f-4a4e-87c8-849cc170ddb0\") " Jan 20 19:15:17 crc kubenswrapper[4558]: I0120 19:15:17.032636 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d19724d7-cd4f-4a4e-87c8-849cc170ddb0-catalog-content\") pod \"d19724d7-cd4f-4a4e-87c8-849cc170ddb0\" (UID: \"d19724d7-cd4f-4a4e-87c8-849cc170ddb0\") " Jan 20 19:15:17 crc kubenswrapper[4558]: I0120 19:15:17.033594 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d19724d7-cd4f-4a4e-87c8-849cc170ddb0-utilities" (OuterVolumeSpecName: "utilities") pod "d19724d7-cd4f-4a4e-87c8-849cc170ddb0" (UID: "d19724d7-cd4f-4a4e-87c8-849cc170ddb0"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 19:15:17 crc kubenswrapper[4558]: I0120 19:15:17.060689 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d19724d7-cd4f-4a4e-87c8-849cc170ddb0-kube-api-access-nrk4f" (OuterVolumeSpecName: "kube-api-access-nrk4f") pod "d19724d7-cd4f-4a4e-87c8-849cc170ddb0" (UID: "d19724d7-cd4f-4a4e-87c8-849cc170ddb0"). InnerVolumeSpecName "kube-api-access-nrk4f". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 19:15:17 crc kubenswrapper[4558]: I0120 19:15:17.135087 4558 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d19724d7-cd4f-4a4e-87c8-849cc170ddb0-utilities\") on node \"crc\" DevicePath \"\"" Jan 20 19:15:17 crc kubenswrapper[4558]: I0120 19:15:17.135121 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nrk4f\" (UniqueName: \"kubernetes.io/projected/d19724d7-cd4f-4a4e-87c8-849cc170ddb0-kube-api-access-nrk4f\") on node \"crc\" DevicePath \"\"" Jan 20 19:15:17 crc kubenswrapper[4558]: I0120 19:15:17.537391 4558 generic.go:334] "Generic (PLEG): container finished" podID="d19724d7-cd4f-4a4e-87c8-849cc170ddb0" containerID="d3a4d371f367bddac5740602db356872600e21644e1f930662e682d530dbc6ce" exitCode=0 Jan 20 19:15:17 crc kubenswrapper[4558]: I0120 19:15:17.537447 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5lwhz" event={"ID":"d19724d7-cd4f-4a4e-87c8-849cc170ddb0","Type":"ContainerDied","Data":"d3a4d371f367bddac5740602db356872600e21644e1f930662e682d530dbc6ce"} Jan 20 19:15:17 crc kubenswrapper[4558]: I0120 19:15:17.537511 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5lwhz" Jan 20 19:15:17 crc kubenswrapper[4558]: I0120 19:15:17.537527 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5lwhz" event={"ID":"d19724d7-cd4f-4a4e-87c8-849cc170ddb0","Type":"ContainerDied","Data":"4a0fe00ab6eeee9a3d09d650369a26fc1ec5930416cb7e0b7547caf2c8be190b"} Jan 20 19:15:17 crc kubenswrapper[4558]: I0120 19:15:17.537558 4558 scope.go:117] "RemoveContainer" containerID="d3a4d371f367bddac5740602db356872600e21644e1f930662e682d530dbc6ce" Jan 20 19:15:17 crc kubenswrapper[4558]: I0120 19:15:17.556220 4558 scope.go:117] "RemoveContainer" containerID="de5ad6a212892db5da5b2c945334a08825bd93b2363fa9e64f2e647791f1d8ba" Jan 20 19:15:17 crc kubenswrapper[4558]: I0120 19:15:17.575236 4558 scope.go:117] "RemoveContainer" containerID="5ce48ce3d1041d1d794e3e66f12f8aa1b213a124ecd77cf3f10e1d706f940e22" Jan 20 19:15:17 crc kubenswrapper[4558]: I0120 19:15:17.593317 4558 scope.go:117] "RemoveContainer" containerID="d3a4d371f367bddac5740602db356872600e21644e1f930662e682d530dbc6ce" Jan 20 19:15:17 crc kubenswrapper[4558]: E0120 19:15:17.593865 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d3a4d371f367bddac5740602db356872600e21644e1f930662e682d530dbc6ce\": container with ID starting with d3a4d371f367bddac5740602db356872600e21644e1f930662e682d530dbc6ce not found: ID does not exist" containerID="d3a4d371f367bddac5740602db356872600e21644e1f930662e682d530dbc6ce" Jan 20 19:15:17 crc kubenswrapper[4558]: I0120 19:15:17.593917 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d3a4d371f367bddac5740602db356872600e21644e1f930662e682d530dbc6ce"} err="failed to get container status \"d3a4d371f367bddac5740602db356872600e21644e1f930662e682d530dbc6ce\": rpc error: code = NotFound desc = could not find container \"d3a4d371f367bddac5740602db356872600e21644e1f930662e682d530dbc6ce\": container with ID starting with d3a4d371f367bddac5740602db356872600e21644e1f930662e682d530dbc6ce not found: ID does not exist" Jan 20 19:15:17 crc kubenswrapper[4558]: I0120 19:15:17.593948 4558 scope.go:117] "RemoveContainer" containerID="de5ad6a212892db5da5b2c945334a08825bd93b2363fa9e64f2e647791f1d8ba" Jan 20 19:15:17 crc kubenswrapper[4558]: E0120 19:15:17.594371 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"de5ad6a212892db5da5b2c945334a08825bd93b2363fa9e64f2e647791f1d8ba\": container with ID starting with de5ad6a212892db5da5b2c945334a08825bd93b2363fa9e64f2e647791f1d8ba not found: ID does not exist" containerID="de5ad6a212892db5da5b2c945334a08825bd93b2363fa9e64f2e647791f1d8ba" Jan 20 19:15:17 crc kubenswrapper[4558]: I0120 19:15:17.594402 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"de5ad6a212892db5da5b2c945334a08825bd93b2363fa9e64f2e647791f1d8ba"} err="failed to get container status \"de5ad6a212892db5da5b2c945334a08825bd93b2363fa9e64f2e647791f1d8ba\": rpc error: code = NotFound desc = could not find container \"de5ad6a212892db5da5b2c945334a08825bd93b2363fa9e64f2e647791f1d8ba\": container with ID starting with de5ad6a212892db5da5b2c945334a08825bd93b2363fa9e64f2e647791f1d8ba not found: ID does not exist" Jan 20 19:15:17 crc kubenswrapper[4558]: I0120 19:15:17.594418 4558 scope.go:117] "RemoveContainer" containerID="5ce48ce3d1041d1d794e3e66f12f8aa1b213a124ecd77cf3f10e1d706f940e22" Jan 20 19:15:17 crc kubenswrapper[4558]: E0120 19:15:17.594867 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5ce48ce3d1041d1d794e3e66f12f8aa1b213a124ecd77cf3f10e1d706f940e22\": container with ID starting with 5ce48ce3d1041d1d794e3e66f12f8aa1b213a124ecd77cf3f10e1d706f940e22 not found: ID does not exist" containerID="5ce48ce3d1041d1d794e3e66f12f8aa1b213a124ecd77cf3f10e1d706f940e22" Jan 20 19:15:17 crc kubenswrapper[4558]: I0120 19:15:17.594897 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5ce48ce3d1041d1d794e3e66f12f8aa1b213a124ecd77cf3f10e1d706f940e22"} err="failed to get container status \"5ce48ce3d1041d1d794e3e66f12f8aa1b213a124ecd77cf3f10e1d706f940e22\": rpc error: code = NotFound desc = could not find container \"5ce48ce3d1041d1d794e3e66f12f8aa1b213a124ecd77cf3f10e1d706f940e22\": container with ID starting with 5ce48ce3d1041d1d794e3e66f12f8aa1b213a124ecd77cf3f10e1d706f940e22 not found: ID does not exist" Jan 20 19:15:18 crc kubenswrapper[4558]: I0120 19:15:18.347191 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d19724d7-cd4f-4a4e-87c8-849cc170ddb0-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d19724d7-cd4f-4a4e-87c8-849cc170ddb0" (UID: "d19724d7-cd4f-4a4e-87c8-849cc170ddb0"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 19:15:18 crc kubenswrapper[4558]: I0120 19:15:18.351180 4558 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d19724d7-cd4f-4a4e-87c8-849cc170ddb0-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 20 19:15:18 crc kubenswrapper[4558]: I0120 19:15:18.472422 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-5lwhz"] Jan 20 19:15:18 crc kubenswrapper[4558]: I0120 19:15:18.476745 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-5lwhz"] Jan 20 19:15:18 crc kubenswrapper[4558]: I0120 19:15:18.574671 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d19724d7-cd4f-4a4e-87c8-849cc170ddb0" path="/var/lib/kubelet/pods/d19724d7-cd4f-4a4e-87c8-849cc170ddb0/volumes" Jan 20 19:15:24 crc kubenswrapper[4558]: I0120 19:15:24.568353 4558 scope.go:117] "RemoveContainer" containerID="f564c14a07109e541e9fbbbe365d8cc006309f8b6f2a46364e7e327b0d781226" Jan 20 19:15:24 crc kubenswrapper[4558]: E0120 19:15:24.569040 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:15:36 crc kubenswrapper[4558]: I0120 19:15:36.567850 4558 scope.go:117] "RemoveContainer" containerID="f564c14a07109e541e9fbbbe365d8cc006309f8b6f2a46364e7e327b0d781226" Jan 20 19:15:36 crc kubenswrapper[4558]: E0120 19:15:36.571100 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:15:40 crc kubenswrapper[4558]: I0120 19:15:40.757804 4558 scope.go:117] "RemoveContainer" containerID="b99b8db89b917abd367dfe197c944dbb4fede5d28a200025cdeb7c1ed39d65d0" Jan 20 19:15:50 crc kubenswrapper[4558]: I0120 19:15:50.566310 4558 scope.go:117] "RemoveContainer" containerID="f564c14a07109e541e9fbbbe365d8cc006309f8b6f2a46364e7e327b0d781226" Jan 20 19:15:50 crc kubenswrapper[4558]: E0120 19:15:50.567851 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:16:01 crc kubenswrapper[4558]: I0120 19:16:01.566509 4558 scope.go:117] "RemoveContainer" containerID="f564c14a07109e541e9fbbbe365d8cc006309f8b6f2a46364e7e327b0d781226" Jan 20 19:16:01 crc kubenswrapper[4558]: E0120 19:16:01.567273 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:16:14 crc kubenswrapper[4558]: I0120 19:16:14.566790 4558 scope.go:117] "RemoveContainer" containerID="f564c14a07109e541e9fbbbe365d8cc006309f8b6f2a46364e7e327b0d781226" Jan 20 19:16:14 crc kubenswrapper[4558]: E0120 19:16:14.567820 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:16:29 crc kubenswrapper[4558]: I0120 19:16:29.565835 4558 scope.go:117] "RemoveContainer" containerID="f564c14a07109e541e9fbbbe365d8cc006309f8b6f2a46364e7e327b0d781226" Jan 20 19:16:29 crc kubenswrapper[4558]: E0120 19:16:29.566951 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:16:42 crc kubenswrapper[4558]: I0120 19:16:42.566185 4558 scope.go:117] "RemoveContainer" containerID="f564c14a07109e541e9fbbbe365d8cc006309f8b6f2a46364e7e327b0d781226" Jan 20 19:16:42 crc kubenswrapper[4558]: E0120 19:16:42.567015 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:16:54 crc kubenswrapper[4558]: I0120 19:16:54.566046 4558 scope.go:117] "RemoveContainer" containerID="f564c14a07109e541e9fbbbe365d8cc006309f8b6f2a46364e7e327b0d781226" Jan 20 19:16:54 crc kubenswrapper[4558]: E0120 19:16:54.566947 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:17:09 crc kubenswrapper[4558]: I0120 19:17:09.566617 4558 scope.go:117] "RemoveContainer" containerID="f564c14a07109e541e9fbbbe365d8cc006309f8b6f2a46364e7e327b0d781226" Jan 20 19:17:10 crc kubenswrapper[4558]: I0120 19:17:10.310241 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" event={"ID":"68337d27-3fa6-4a29-88b0-82e60c3739eb","Type":"ContainerStarted","Data":"74a09cf01a9482e3a74e95530f462255ee83e3acfe30e90074e36f1623c24d9f"} Jan 20 19:17:46 crc kubenswrapper[4558]: I0120 19:17:46.607234 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-pwzff"] Jan 20 19:17:46 crc kubenswrapper[4558]: E0120 19:17:46.610333 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d19724d7-cd4f-4a4e-87c8-849cc170ddb0" containerName="registry-server" Jan 20 19:17:46 crc kubenswrapper[4558]: I0120 19:17:46.610363 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d19724d7-cd4f-4a4e-87c8-849cc170ddb0" containerName="registry-server" Jan 20 19:17:46 crc kubenswrapper[4558]: E0120 19:17:46.610388 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d19724d7-cd4f-4a4e-87c8-849cc170ddb0" containerName="extract-content" Jan 20 19:17:46 crc kubenswrapper[4558]: I0120 19:17:46.610398 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d19724d7-cd4f-4a4e-87c8-849cc170ddb0" containerName="extract-content" Jan 20 19:17:46 crc kubenswrapper[4558]: E0120 19:17:46.610411 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d19724d7-cd4f-4a4e-87c8-849cc170ddb0" containerName="extract-utilities" Jan 20 19:17:46 crc kubenswrapper[4558]: I0120 19:17:46.610419 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="d19724d7-cd4f-4a4e-87c8-849cc170ddb0" containerName="extract-utilities" Jan 20 19:17:46 crc kubenswrapper[4558]: I0120 19:17:46.610562 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="d19724d7-cd4f-4a4e-87c8-849cc170ddb0" containerName="registry-server" Jan 20 19:17:46 crc kubenswrapper[4558]: I0120 19:17:46.611777 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pwzff" Jan 20 19:17:46 crc kubenswrapper[4558]: I0120 19:17:46.620854 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-pwzff"] Jan 20 19:17:46 crc kubenswrapper[4558]: I0120 19:17:46.636484 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f461abc1-c8e0-4017-b9d4-1f679c364213-catalog-content\") pod \"redhat-marketplace-pwzff\" (UID: \"f461abc1-c8e0-4017-b9d4-1f679c364213\") " pod="openshift-marketplace/redhat-marketplace-pwzff" Jan 20 19:17:46 crc kubenswrapper[4558]: I0120 19:17:46.636578 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xk5cf\" (UniqueName: \"kubernetes.io/projected/f461abc1-c8e0-4017-b9d4-1f679c364213-kube-api-access-xk5cf\") pod \"redhat-marketplace-pwzff\" (UID: \"f461abc1-c8e0-4017-b9d4-1f679c364213\") " pod="openshift-marketplace/redhat-marketplace-pwzff" Jan 20 19:17:46 crc kubenswrapper[4558]: I0120 19:17:46.636637 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f461abc1-c8e0-4017-b9d4-1f679c364213-utilities\") pod \"redhat-marketplace-pwzff\" (UID: \"f461abc1-c8e0-4017-b9d4-1f679c364213\") " pod="openshift-marketplace/redhat-marketplace-pwzff" Jan 20 19:17:46 crc kubenswrapper[4558]: I0120 19:17:46.738759 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f461abc1-c8e0-4017-b9d4-1f679c364213-catalog-content\") pod \"redhat-marketplace-pwzff\" (UID: \"f461abc1-c8e0-4017-b9d4-1f679c364213\") " pod="openshift-marketplace/redhat-marketplace-pwzff" Jan 20 19:17:46 crc kubenswrapper[4558]: I0120 19:17:46.738865 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xk5cf\" (UniqueName: \"kubernetes.io/projected/f461abc1-c8e0-4017-b9d4-1f679c364213-kube-api-access-xk5cf\") pod \"redhat-marketplace-pwzff\" (UID: \"f461abc1-c8e0-4017-b9d4-1f679c364213\") " pod="openshift-marketplace/redhat-marketplace-pwzff" Jan 20 19:17:46 crc kubenswrapper[4558]: I0120 19:17:46.738913 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f461abc1-c8e0-4017-b9d4-1f679c364213-utilities\") pod \"redhat-marketplace-pwzff\" (UID: \"f461abc1-c8e0-4017-b9d4-1f679c364213\") " pod="openshift-marketplace/redhat-marketplace-pwzff" Jan 20 19:17:46 crc kubenswrapper[4558]: I0120 19:17:46.739334 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f461abc1-c8e0-4017-b9d4-1f679c364213-catalog-content\") pod \"redhat-marketplace-pwzff\" (UID: \"f461abc1-c8e0-4017-b9d4-1f679c364213\") " pod="openshift-marketplace/redhat-marketplace-pwzff" Jan 20 19:17:46 crc kubenswrapper[4558]: I0120 19:17:46.739414 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f461abc1-c8e0-4017-b9d4-1f679c364213-utilities\") pod \"redhat-marketplace-pwzff\" (UID: \"f461abc1-c8e0-4017-b9d4-1f679c364213\") " pod="openshift-marketplace/redhat-marketplace-pwzff" Jan 20 19:17:46 crc kubenswrapper[4558]: I0120 19:17:46.757972 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xk5cf\" (UniqueName: \"kubernetes.io/projected/f461abc1-c8e0-4017-b9d4-1f679c364213-kube-api-access-xk5cf\") pod \"redhat-marketplace-pwzff\" (UID: \"f461abc1-c8e0-4017-b9d4-1f679c364213\") " pod="openshift-marketplace/redhat-marketplace-pwzff" Jan 20 19:17:46 crc kubenswrapper[4558]: I0120 19:17:46.929814 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pwzff" Jan 20 19:17:47 crc kubenswrapper[4558]: I0120 19:17:47.479576 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-pwzff"] Jan 20 19:17:47 crc kubenswrapper[4558]: I0120 19:17:47.598202 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pwzff" event={"ID":"f461abc1-c8e0-4017-b9d4-1f679c364213","Type":"ContainerStarted","Data":"0e87672c309c916b2405c96994d81c2867381709380431d0a5cf3ee615472698"} Jan 20 19:17:48 crc kubenswrapper[4558]: I0120 19:17:48.606126 4558 generic.go:334] "Generic (PLEG): container finished" podID="f461abc1-c8e0-4017-b9d4-1f679c364213" containerID="b2e591c0b9f91814b5f5384102a0eae80afe73a8ffa112a4ef51c1c01631d4f8" exitCode=0 Jan 20 19:17:48 crc kubenswrapper[4558]: I0120 19:17:48.606197 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pwzff" event={"ID":"f461abc1-c8e0-4017-b9d4-1f679c364213","Type":"ContainerDied","Data":"b2e591c0b9f91814b5f5384102a0eae80afe73a8ffa112a4ef51c1c01631d4f8"} Jan 20 19:17:48 crc kubenswrapper[4558]: I0120 19:17:48.607885 4558 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 20 19:17:50 crc kubenswrapper[4558]: I0120 19:17:50.619461 4558 generic.go:334] "Generic (PLEG): container finished" podID="f461abc1-c8e0-4017-b9d4-1f679c364213" containerID="e66632e78cc698a1ce1830eb91de7d80de2ad7552aecb45bf4e6f8cc1d0753fe" exitCode=0 Jan 20 19:17:50 crc kubenswrapper[4558]: I0120 19:17:50.619512 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pwzff" event={"ID":"f461abc1-c8e0-4017-b9d4-1f679c364213","Type":"ContainerDied","Data":"e66632e78cc698a1ce1830eb91de7d80de2ad7552aecb45bf4e6f8cc1d0753fe"} Jan 20 19:17:51 crc kubenswrapper[4558]: I0120 19:17:51.626380 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pwzff" event={"ID":"f461abc1-c8e0-4017-b9d4-1f679c364213","Type":"ContainerStarted","Data":"c02804510e99b18528cbb2778b7987ea640b24422970e3f92e10de39dffd3147"} Jan 20 19:17:51 crc kubenswrapper[4558]: I0120 19:17:51.647141 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-pwzff" podStartSLOduration=3.0903725 podStartE2EDuration="5.647124597s" podCreationTimestamp="2026-01-20 19:17:46 +0000 UTC" firstStartedPulling="2026-01-20 19:17:48.607596638 +0000 UTC m=+9362.367934604" lastFinishedPulling="2026-01-20 19:17:51.164348734 +0000 UTC m=+9364.924686701" observedRunningTime="2026-01-20 19:17:51.638707223 +0000 UTC m=+9365.399045190" watchObservedRunningTime="2026-01-20 19:17:51.647124597 +0000 UTC m=+9365.407462564" Jan 20 19:17:56 crc kubenswrapper[4558]: I0120 19:17:56.930995 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-pwzff" Jan 20 19:17:56 crc kubenswrapper[4558]: I0120 19:17:56.931679 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-pwzff" Jan 20 19:17:56 crc kubenswrapper[4558]: I0120 19:17:56.968572 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-pwzff" Jan 20 19:17:57 crc kubenswrapper[4558]: I0120 19:17:57.917911 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-pwzff" Jan 20 19:17:57 crc kubenswrapper[4558]: I0120 19:17:57.957743 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-pwzff"] Jan 20 19:17:59 crc kubenswrapper[4558]: I0120 19:17:59.673419 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-pwzff" podUID="f461abc1-c8e0-4017-b9d4-1f679c364213" containerName="registry-server" containerID="cri-o://c02804510e99b18528cbb2778b7987ea640b24422970e3f92e10de39dffd3147" gracePeriod=2 Jan 20 19:18:00 crc kubenswrapper[4558]: I0120 19:18:00.010278 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pwzff" Jan 20 19:18:00 crc kubenswrapper[4558]: I0120 19:18:00.135228 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f461abc1-c8e0-4017-b9d4-1f679c364213-catalog-content\") pod \"f461abc1-c8e0-4017-b9d4-1f679c364213\" (UID: \"f461abc1-c8e0-4017-b9d4-1f679c364213\") " Jan 20 19:18:00 crc kubenswrapper[4558]: I0120 19:18:00.135374 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f461abc1-c8e0-4017-b9d4-1f679c364213-utilities\") pod \"f461abc1-c8e0-4017-b9d4-1f679c364213\" (UID: \"f461abc1-c8e0-4017-b9d4-1f679c364213\") " Jan 20 19:18:00 crc kubenswrapper[4558]: I0120 19:18:00.135449 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xk5cf\" (UniqueName: \"kubernetes.io/projected/f461abc1-c8e0-4017-b9d4-1f679c364213-kube-api-access-xk5cf\") pod \"f461abc1-c8e0-4017-b9d4-1f679c364213\" (UID: \"f461abc1-c8e0-4017-b9d4-1f679c364213\") " Jan 20 19:18:00 crc kubenswrapper[4558]: I0120 19:18:00.142888 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f461abc1-c8e0-4017-b9d4-1f679c364213-utilities" (OuterVolumeSpecName: "utilities") pod "f461abc1-c8e0-4017-b9d4-1f679c364213" (UID: "f461abc1-c8e0-4017-b9d4-1f679c364213"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 19:18:00 crc kubenswrapper[4558]: I0120 19:18:00.153868 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f461abc1-c8e0-4017-b9d4-1f679c364213-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f461abc1-c8e0-4017-b9d4-1f679c364213" (UID: "f461abc1-c8e0-4017-b9d4-1f679c364213"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 19:18:00 crc kubenswrapper[4558]: I0120 19:18:00.166333 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f461abc1-c8e0-4017-b9d4-1f679c364213-kube-api-access-xk5cf" (OuterVolumeSpecName: "kube-api-access-xk5cf") pod "f461abc1-c8e0-4017-b9d4-1f679c364213" (UID: "f461abc1-c8e0-4017-b9d4-1f679c364213"). InnerVolumeSpecName "kube-api-access-xk5cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 19:18:00 crc kubenswrapper[4558]: I0120 19:18:00.237048 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xk5cf\" (UniqueName: \"kubernetes.io/projected/f461abc1-c8e0-4017-b9d4-1f679c364213-kube-api-access-xk5cf\") on node \"crc\" DevicePath \"\"" Jan 20 19:18:00 crc kubenswrapper[4558]: I0120 19:18:00.237291 4558 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f461abc1-c8e0-4017-b9d4-1f679c364213-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 20 19:18:00 crc kubenswrapper[4558]: I0120 19:18:00.237360 4558 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f461abc1-c8e0-4017-b9d4-1f679c364213-utilities\") on node \"crc\" DevicePath \"\"" Jan 20 19:18:00 crc kubenswrapper[4558]: I0120 19:18:00.680355 4558 generic.go:334] "Generic (PLEG): container finished" podID="f461abc1-c8e0-4017-b9d4-1f679c364213" containerID="c02804510e99b18528cbb2778b7987ea640b24422970e3f92e10de39dffd3147" exitCode=0 Jan 20 19:18:00 crc kubenswrapper[4558]: I0120 19:18:00.680432 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pwzff" Jan 20 19:18:00 crc kubenswrapper[4558]: I0120 19:18:00.680425 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pwzff" event={"ID":"f461abc1-c8e0-4017-b9d4-1f679c364213","Type":"ContainerDied","Data":"c02804510e99b18528cbb2778b7987ea640b24422970e3f92e10de39dffd3147"} Jan 20 19:18:00 crc kubenswrapper[4558]: I0120 19:18:00.680773 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pwzff" event={"ID":"f461abc1-c8e0-4017-b9d4-1f679c364213","Type":"ContainerDied","Data":"0e87672c309c916b2405c96994d81c2867381709380431d0a5cf3ee615472698"} Jan 20 19:18:00 crc kubenswrapper[4558]: I0120 19:18:00.680801 4558 scope.go:117] "RemoveContainer" containerID="c02804510e99b18528cbb2778b7987ea640b24422970e3f92e10de39dffd3147" Jan 20 19:18:00 crc kubenswrapper[4558]: I0120 19:18:00.700204 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-pwzff"] Jan 20 19:18:00 crc kubenswrapper[4558]: I0120 19:18:00.709566 4558 scope.go:117] "RemoveContainer" containerID="e66632e78cc698a1ce1830eb91de7d80de2ad7552aecb45bf4e6f8cc1d0753fe" Jan 20 19:18:00 crc kubenswrapper[4558]: I0120 19:18:00.710878 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-pwzff"] Jan 20 19:18:00 crc kubenswrapper[4558]: I0120 19:18:00.742561 4558 scope.go:117] "RemoveContainer" containerID="b2e591c0b9f91814b5f5384102a0eae80afe73a8ffa112a4ef51c1c01631d4f8" Jan 20 19:18:00 crc kubenswrapper[4558]: I0120 19:18:00.757701 4558 scope.go:117] "RemoveContainer" containerID="c02804510e99b18528cbb2778b7987ea640b24422970e3f92e10de39dffd3147" Jan 20 19:18:00 crc kubenswrapper[4558]: E0120 19:18:00.758041 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c02804510e99b18528cbb2778b7987ea640b24422970e3f92e10de39dffd3147\": container with ID starting with c02804510e99b18528cbb2778b7987ea640b24422970e3f92e10de39dffd3147 not found: ID does not exist" containerID="c02804510e99b18528cbb2778b7987ea640b24422970e3f92e10de39dffd3147" Jan 20 19:18:00 crc kubenswrapper[4558]: I0120 19:18:00.758089 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c02804510e99b18528cbb2778b7987ea640b24422970e3f92e10de39dffd3147"} err="failed to get container status \"c02804510e99b18528cbb2778b7987ea640b24422970e3f92e10de39dffd3147\": rpc error: code = NotFound desc = could not find container \"c02804510e99b18528cbb2778b7987ea640b24422970e3f92e10de39dffd3147\": container with ID starting with c02804510e99b18528cbb2778b7987ea640b24422970e3f92e10de39dffd3147 not found: ID does not exist" Jan 20 19:18:00 crc kubenswrapper[4558]: I0120 19:18:00.758116 4558 scope.go:117] "RemoveContainer" containerID="e66632e78cc698a1ce1830eb91de7d80de2ad7552aecb45bf4e6f8cc1d0753fe" Jan 20 19:18:00 crc kubenswrapper[4558]: E0120 19:18:00.758492 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e66632e78cc698a1ce1830eb91de7d80de2ad7552aecb45bf4e6f8cc1d0753fe\": container with ID starting with e66632e78cc698a1ce1830eb91de7d80de2ad7552aecb45bf4e6f8cc1d0753fe not found: ID does not exist" containerID="e66632e78cc698a1ce1830eb91de7d80de2ad7552aecb45bf4e6f8cc1d0753fe" Jan 20 19:18:00 crc kubenswrapper[4558]: I0120 19:18:00.758522 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e66632e78cc698a1ce1830eb91de7d80de2ad7552aecb45bf4e6f8cc1d0753fe"} err="failed to get container status \"e66632e78cc698a1ce1830eb91de7d80de2ad7552aecb45bf4e6f8cc1d0753fe\": rpc error: code = NotFound desc = could not find container \"e66632e78cc698a1ce1830eb91de7d80de2ad7552aecb45bf4e6f8cc1d0753fe\": container with ID starting with e66632e78cc698a1ce1830eb91de7d80de2ad7552aecb45bf4e6f8cc1d0753fe not found: ID does not exist" Jan 20 19:18:00 crc kubenswrapper[4558]: I0120 19:18:00.758538 4558 scope.go:117] "RemoveContainer" containerID="b2e591c0b9f91814b5f5384102a0eae80afe73a8ffa112a4ef51c1c01631d4f8" Jan 20 19:18:00 crc kubenswrapper[4558]: E0120 19:18:00.758749 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b2e591c0b9f91814b5f5384102a0eae80afe73a8ffa112a4ef51c1c01631d4f8\": container with ID starting with b2e591c0b9f91814b5f5384102a0eae80afe73a8ffa112a4ef51c1c01631d4f8 not found: ID does not exist" containerID="b2e591c0b9f91814b5f5384102a0eae80afe73a8ffa112a4ef51c1c01631d4f8" Jan 20 19:18:00 crc kubenswrapper[4558]: I0120 19:18:00.758770 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b2e591c0b9f91814b5f5384102a0eae80afe73a8ffa112a4ef51c1c01631d4f8"} err="failed to get container status \"b2e591c0b9f91814b5f5384102a0eae80afe73a8ffa112a4ef51c1c01631d4f8\": rpc error: code = NotFound desc = could not find container \"b2e591c0b9f91814b5f5384102a0eae80afe73a8ffa112a4ef51c1c01631d4f8\": container with ID starting with b2e591c0b9f91814b5f5384102a0eae80afe73a8ffa112a4ef51c1c01631d4f8 not found: ID does not exist" Jan 20 19:18:02 crc kubenswrapper[4558]: I0120 19:18:02.573553 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f461abc1-c8e0-4017-b9d4-1f679c364213" path="/var/lib/kubelet/pods/f461abc1-c8e0-4017-b9d4-1f679c364213/volumes" Jan 20 19:19:03 crc kubenswrapper[4558]: I0120 19:19:03.465812 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-5j74t"] Jan 20 19:19:03 crc kubenswrapper[4558]: E0120 19:19:03.467087 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f461abc1-c8e0-4017-b9d4-1f679c364213" containerName="extract-content" Jan 20 19:19:03 crc kubenswrapper[4558]: I0120 19:19:03.467109 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="f461abc1-c8e0-4017-b9d4-1f679c364213" containerName="extract-content" Jan 20 19:19:03 crc kubenswrapper[4558]: E0120 19:19:03.467132 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f461abc1-c8e0-4017-b9d4-1f679c364213" containerName="extract-utilities" Jan 20 19:19:03 crc kubenswrapper[4558]: I0120 19:19:03.467140 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="f461abc1-c8e0-4017-b9d4-1f679c364213" containerName="extract-utilities" Jan 20 19:19:03 crc kubenswrapper[4558]: E0120 19:19:03.467158 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f461abc1-c8e0-4017-b9d4-1f679c364213" containerName="registry-server" Jan 20 19:19:03 crc kubenswrapper[4558]: I0120 19:19:03.467182 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="f461abc1-c8e0-4017-b9d4-1f679c364213" containerName="registry-server" Jan 20 19:19:03 crc kubenswrapper[4558]: I0120 19:19:03.467356 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="f461abc1-c8e0-4017-b9d4-1f679c364213" containerName="registry-server" Jan 20 19:19:03 crc kubenswrapper[4558]: I0120 19:19:03.468657 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5j74t" Jan 20 19:19:03 crc kubenswrapper[4558]: I0120 19:19:03.476812 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-5j74t"] Jan 20 19:19:03 crc kubenswrapper[4558]: I0120 19:19:03.564609 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d7d6f74-5354-48de-85da-41599c9957a1-catalog-content\") pod \"certified-operators-5j74t\" (UID: \"1d7d6f74-5354-48de-85da-41599c9957a1\") " pod="openshift-marketplace/certified-operators-5j74t" Jan 20 19:19:03 crc kubenswrapper[4558]: I0120 19:19:03.564799 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d7spn\" (UniqueName: \"kubernetes.io/projected/1d7d6f74-5354-48de-85da-41599c9957a1-kube-api-access-d7spn\") pod \"certified-operators-5j74t\" (UID: \"1d7d6f74-5354-48de-85da-41599c9957a1\") " pod="openshift-marketplace/certified-operators-5j74t" Jan 20 19:19:03 crc kubenswrapper[4558]: I0120 19:19:03.564907 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d7d6f74-5354-48de-85da-41599c9957a1-utilities\") pod \"certified-operators-5j74t\" (UID: \"1d7d6f74-5354-48de-85da-41599c9957a1\") " pod="openshift-marketplace/certified-operators-5j74t" Jan 20 19:19:03 crc kubenswrapper[4558]: I0120 19:19:03.666309 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d7spn\" (UniqueName: \"kubernetes.io/projected/1d7d6f74-5354-48de-85da-41599c9957a1-kube-api-access-d7spn\") pod \"certified-operators-5j74t\" (UID: \"1d7d6f74-5354-48de-85da-41599c9957a1\") " pod="openshift-marketplace/certified-operators-5j74t" Jan 20 19:19:03 crc kubenswrapper[4558]: I0120 19:19:03.666396 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d7d6f74-5354-48de-85da-41599c9957a1-utilities\") pod \"certified-operators-5j74t\" (UID: \"1d7d6f74-5354-48de-85da-41599c9957a1\") " pod="openshift-marketplace/certified-operators-5j74t" Jan 20 19:19:03 crc kubenswrapper[4558]: I0120 19:19:03.666570 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d7d6f74-5354-48de-85da-41599c9957a1-catalog-content\") pod \"certified-operators-5j74t\" (UID: \"1d7d6f74-5354-48de-85da-41599c9957a1\") " pod="openshift-marketplace/certified-operators-5j74t" Jan 20 19:19:03 crc kubenswrapper[4558]: I0120 19:19:03.667527 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d7d6f74-5354-48de-85da-41599c9957a1-catalog-content\") pod \"certified-operators-5j74t\" (UID: \"1d7d6f74-5354-48de-85da-41599c9957a1\") " pod="openshift-marketplace/certified-operators-5j74t" Jan 20 19:19:03 crc kubenswrapper[4558]: I0120 19:19:03.667847 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d7d6f74-5354-48de-85da-41599c9957a1-utilities\") pod \"certified-operators-5j74t\" (UID: \"1d7d6f74-5354-48de-85da-41599c9957a1\") " pod="openshift-marketplace/certified-operators-5j74t" Jan 20 19:19:03 crc kubenswrapper[4558]: I0120 19:19:03.689319 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d7spn\" (UniqueName: \"kubernetes.io/projected/1d7d6f74-5354-48de-85da-41599c9957a1-kube-api-access-d7spn\") pod \"certified-operators-5j74t\" (UID: \"1d7d6f74-5354-48de-85da-41599c9957a1\") " pod="openshift-marketplace/certified-operators-5j74t" Jan 20 19:19:03 crc kubenswrapper[4558]: I0120 19:19:03.793117 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5j74t" Jan 20 19:19:04 crc kubenswrapper[4558]: I0120 19:19:04.215763 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-5j74t"] Jan 20 19:19:05 crc kubenswrapper[4558]: I0120 19:19:05.152329 4558 generic.go:334] "Generic (PLEG): container finished" podID="1d7d6f74-5354-48de-85da-41599c9957a1" containerID="48278375a6eb085fa0f4da703a0017382e4d20e46675d2b640499ff6fe4dfa0b" exitCode=0 Jan 20 19:19:05 crc kubenswrapper[4558]: I0120 19:19:05.152429 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5j74t" event={"ID":"1d7d6f74-5354-48de-85da-41599c9957a1","Type":"ContainerDied","Data":"48278375a6eb085fa0f4da703a0017382e4d20e46675d2b640499ff6fe4dfa0b"} Jan 20 19:19:05 crc kubenswrapper[4558]: I0120 19:19:05.152783 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5j74t" event={"ID":"1d7d6f74-5354-48de-85da-41599c9957a1","Type":"ContainerStarted","Data":"fc678ee6a11e0e98141636e0095946edb16f1938e44454d0c0f39f191e168e20"} Jan 20 19:19:06 crc kubenswrapper[4558]: I0120 19:19:06.160384 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5j74t" event={"ID":"1d7d6f74-5354-48de-85da-41599c9957a1","Type":"ContainerStarted","Data":"9d269730e7f99674dd9dd398c1afbe72341c069dd57679ed697613d9ddaaf78d"} Jan 20 19:19:07 crc kubenswrapper[4558]: I0120 19:19:07.170862 4558 generic.go:334] "Generic (PLEG): container finished" podID="1d7d6f74-5354-48de-85da-41599c9957a1" containerID="9d269730e7f99674dd9dd398c1afbe72341c069dd57679ed697613d9ddaaf78d" exitCode=0 Jan 20 19:19:07 crc kubenswrapper[4558]: I0120 19:19:07.170926 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5j74t" event={"ID":"1d7d6f74-5354-48de-85da-41599c9957a1","Type":"ContainerDied","Data":"9d269730e7f99674dd9dd398c1afbe72341c069dd57679ed697613d9ddaaf78d"} Jan 20 19:19:08 crc kubenswrapper[4558]: I0120 19:19:08.181265 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5j74t" event={"ID":"1d7d6f74-5354-48de-85da-41599c9957a1","Type":"ContainerStarted","Data":"90a8911e9ff3da51ec37b8458d720f85b391c2fa4701398ad0773bd0907fe46c"} Jan 20 19:19:08 crc kubenswrapper[4558]: I0120 19:19:08.202304 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-5j74t" podStartSLOduration=2.630822272 podStartE2EDuration="5.202280715s" podCreationTimestamp="2026-01-20 19:19:03 +0000 UTC" firstStartedPulling="2026-01-20 19:19:05.154934638 +0000 UTC m=+9438.915272605" lastFinishedPulling="2026-01-20 19:19:07.726393081 +0000 UTC m=+9441.486731048" observedRunningTime="2026-01-20 19:19:08.199980612 +0000 UTC m=+9441.960318568" watchObservedRunningTime="2026-01-20 19:19:08.202280715 +0000 UTC m=+9441.962618673" Jan 20 19:19:13 crc kubenswrapper[4558]: I0120 19:19:13.794043 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-5j74t" Jan 20 19:19:13 crc kubenswrapper[4558]: I0120 19:19:13.794604 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-5j74t" Jan 20 19:19:13 crc kubenswrapper[4558]: I0120 19:19:13.836477 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-5j74t" Jan 20 19:19:14 crc kubenswrapper[4558]: I0120 19:19:14.254480 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-5j74t" Jan 20 19:19:14 crc kubenswrapper[4558]: I0120 19:19:14.298869 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-5j74t"] Jan 20 19:19:16 crc kubenswrapper[4558]: I0120 19:19:16.230511 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-5j74t" podUID="1d7d6f74-5354-48de-85da-41599c9957a1" containerName="registry-server" containerID="cri-o://90a8911e9ff3da51ec37b8458d720f85b391c2fa4701398ad0773bd0907fe46c" gracePeriod=2 Jan 20 19:19:16 crc kubenswrapper[4558]: I0120 19:19:16.602127 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5j74t" Jan 20 19:19:16 crc kubenswrapper[4558]: I0120 19:19:16.666444 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d7d6f74-5354-48de-85da-41599c9957a1-utilities\") pod \"1d7d6f74-5354-48de-85da-41599c9957a1\" (UID: \"1d7d6f74-5354-48de-85da-41599c9957a1\") " Jan 20 19:19:16 crc kubenswrapper[4558]: I0120 19:19:16.666613 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d7spn\" (UniqueName: \"kubernetes.io/projected/1d7d6f74-5354-48de-85da-41599c9957a1-kube-api-access-d7spn\") pod \"1d7d6f74-5354-48de-85da-41599c9957a1\" (UID: \"1d7d6f74-5354-48de-85da-41599c9957a1\") " Jan 20 19:19:16 crc kubenswrapper[4558]: I0120 19:19:16.666694 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d7d6f74-5354-48de-85da-41599c9957a1-catalog-content\") pod \"1d7d6f74-5354-48de-85da-41599c9957a1\" (UID: \"1d7d6f74-5354-48de-85da-41599c9957a1\") " Jan 20 19:19:16 crc kubenswrapper[4558]: I0120 19:19:16.667599 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d7d6f74-5354-48de-85da-41599c9957a1-utilities" (OuterVolumeSpecName: "utilities") pod "1d7d6f74-5354-48de-85da-41599c9957a1" (UID: "1d7d6f74-5354-48de-85da-41599c9957a1"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 19:19:16 crc kubenswrapper[4558]: I0120 19:19:16.667922 4558 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d7d6f74-5354-48de-85da-41599c9957a1-utilities\") on node \"crc\" DevicePath \"\"" Jan 20 19:19:16 crc kubenswrapper[4558]: I0120 19:19:16.695571 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d7d6f74-5354-48de-85da-41599c9957a1-kube-api-access-d7spn" (OuterVolumeSpecName: "kube-api-access-d7spn") pod "1d7d6f74-5354-48de-85da-41599c9957a1" (UID: "1d7d6f74-5354-48de-85da-41599c9957a1"). InnerVolumeSpecName "kube-api-access-d7spn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 19:19:16 crc kubenswrapper[4558]: I0120 19:19:16.770306 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d7spn\" (UniqueName: \"kubernetes.io/projected/1d7d6f74-5354-48de-85da-41599c9957a1-kube-api-access-d7spn\") on node \"crc\" DevicePath \"\"" Jan 20 19:19:16 crc kubenswrapper[4558]: I0120 19:19:16.805994 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d7d6f74-5354-48de-85da-41599c9957a1-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d7d6f74-5354-48de-85da-41599c9957a1" (UID: "1d7d6f74-5354-48de-85da-41599c9957a1"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 19:19:16 crc kubenswrapper[4558]: I0120 19:19:16.871627 4558 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d7d6f74-5354-48de-85da-41599c9957a1-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 20 19:19:17 crc kubenswrapper[4558]: I0120 19:19:17.240869 4558 generic.go:334] "Generic (PLEG): container finished" podID="1d7d6f74-5354-48de-85da-41599c9957a1" containerID="90a8911e9ff3da51ec37b8458d720f85b391c2fa4701398ad0773bd0907fe46c" exitCode=0 Jan 20 19:19:17 crc kubenswrapper[4558]: I0120 19:19:17.240924 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5j74t" event={"ID":"1d7d6f74-5354-48de-85da-41599c9957a1","Type":"ContainerDied","Data":"90a8911e9ff3da51ec37b8458d720f85b391c2fa4701398ad0773bd0907fe46c"} Jan 20 19:19:17 crc kubenswrapper[4558]: I0120 19:19:17.240985 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5j74t" event={"ID":"1d7d6f74-5354-48de-85da-41599c9957a1","Type":"ContainerDied","Data":"fc678ee6a11e0e98141636e0095946edb16f1938e44454d0c0f39f191e168e20"} Jan 20 19:19:17 crc kubenswrapper[4558]: I0120 19:19:17.241010 4558 scope.go:117] "RemoveContainer" containerID="90a8911e9ff3da51ec37b8458d720f85b391c2fa4701398ad0773bd0907fe46c" Jan 20 19:19:17 crc kubenswrapper[4558]: I0120 19:19:17.241206 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5j74t" Jan 20 19:19:17 crc kubenswrapper[4558]: I0120 19:19:17.268215 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-5j74t"] Jan 20 19:19:17 crc kubenswrapper[4558]: I0120 19:19:17.269459 4558 scope.go:117] "RemoveContainer" containerID="9d269730e7f99674dd9dd398c1afbe72341c069dd57679ed697613d9ddaaf78d" Jan 20 19:19:17 crc kubenswrapper[4558]: I0120 19:19:17.271470 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-5j74t"] Jan 20 19:19:17 crc kubenswrapper[4558]: I0120 19:19:17.297656 4558 scope.go:117] "RemoveContainer" containerID="48278375a6eb085fa0f4da703a0017382e4d20e46675d2b640499ff6fe4dfa0b" Jan 20 19:19:17 crc kubenswrapper[4558]: I0120 19:19:17.610960 4558 scope.go:117] "RemoveContainer" containerID="90a8911e9ff3da51ec37b8458d720f85b391c2fa4701398ad0773bd0907fe46c" Jan 20 19:19:17 crc kubenswrapper[4558]: E0120 19:19:17.611767 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"90a8911e9ff3da51ec37b8458d720f85b391c2fa4701398ad0773bd0907fe46c\": container with ID starting with 90a8911e9ff3da51ec37b8458d720f85b391c2fa4701398ad0773bd0907fe46c not found: ID does not exist" containerID="90a8911e9ff3da51ec37b8458d720f85b391c2fa4701398ad0773bd0907fe46c" Jan 20 19:19:17 crc kubenswrapper[4558]: I0120 19:19:17.611845 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"90a8911e9ff3da51ec37b8458d720f85b391c2fa4701398ad0773bd0907fe46c"} err="failed to get container status \"90a8911e9ff3da51ec37b8458d720f85b391c2fa4701398ad0773bd0907fe46c\": rpc error: code = NotFound desc = could not find container \"90a8911e9ff3da51ec37b8458d720f85b391c2fa4701398ad0773bd0907fe46c\": container with ID starting with 90a8911e9ff3da51ec37b8458d720f85b391c2fa4701398ad0773bd0907fe46c not found: ID does not exist" Jan 20 19:19:17 crc kubenswrapper[4558]: I0120 19:19:17.611884 4558 scope.go:117] "RemoveContainer" containerID="9d269730e7f99674dd9dd398c1afbe72341c069dd57679ed697613d9ddaaf78d" Jan 20 19:19:17 crc kubenswrapper[4558]: E0120 19:19:17.612656 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9d269730e7f99674dd9dd398c1afbe72341c069dd57679ed697613d9ddaaf78d\": container with ID starting with 9d269730e7f99674dd9dd398c1afbe72341c069dd57679ed697613d9ddaaf78d not found: ID does not exist" containerID="9d269730e7f99674dd9dd398c1afbe72341c069dd57679ed697613d9ddaaf78d" Jan 20 19:19:17 crc kubenswrapper[4558]: I0120 19:19:17.612727 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9d269730e7f99674dd9dd398c1afbe72341c069dd57679ed697613d9ddaaf78d"} err="failed to get container status \"9d269730e7f99674dd9dd398c1afbe72341c069dd57679ed697613d9ddaaf78d\": rpc error: code = NotFound desc = could not find container \"9d269730e7f99674dd9dd398c1afbe72341c069dd57679ed697613d9ddaaf78d\": container with ID starting with 9d269730e7f99674dd9dd398c1afbe72341c069dd57679ed697613d9ddaaf78d not found: ID does not exist" Jan 20 19:19:17 crc kubenswrapper[4558]: I0120 19:19:17.612770 4558 scope.go:117] "RemoveContainer" containerID="48278375a6eb085fa0f4da703a0017382e4d20e46675d2b640499ff6fe4dfa0b" Jan 20 19:19:17 crc kubenswrapper[4558]: E0120 19:19:17.613551 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"48278375a6eb085fa0f4da703a0017382e4d20e46675d2b640499ff6fe4dfa0b\": container with ID starting with 48278375a6eb085fa0f4da703a0017382e4d20e46675d2b640499ff6fe4dfa0b not found: ID does not exist" containerID="48278375a6eb085fa0f4da703a0017382e4d20e46675d2b640499ff6fe4dfa0b" Jan 20 19:19:17 crc kubenswrapper[4558]: I0120 19:19:17.613609 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"48278375a6eb085fa0f4da703a0017382e4d20e46675d2b640499ff6fe4dfa0b"} err="failed to get container status \"48278375a6eb085fa0f4da703a0017382e4d20e46675d2b640499ff6fe4dfa0b\": rpc error: code = NotFound desc = could not find container \"48278375a6eb085fa0f4da703a0017382e4d20e46675d2b640499ff6fe4dfa0b\": container with ID starting with 48278375a6eb085fa0f4da703a0017382e4d20e46675d2b640499ff6fe4dfa0b not found: ID does not exist" Jan 20 19:19:18 crc kubenswrapper[4558]: I0120 19:19:18.574049 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d7d6f74-5354-48de-85da-41599c9957a1" path="/var/lib/kubelet/pods/1d7d6f74-5354-48de-85da-41599c9957a1/volumes" Jan 20 19:19:27 crc kubenswrapper[4558]: I0120 19:19:27.329803 4558 patch_prober.go:28] interesting pod/machine-config-daemon-2vr4r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 20 19:19:27 crc kubenswrapper[4558]: I0120 19:19:27.330289 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 20 19:19:57 crc kubenswrapper[4558]: I0120 19:19:57.329729 4558 patch_prober.go:28] interesting pod/machine-config-daemon-2vr4r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 20 19:19:57 crc kubenswrapper[4558]: I0120 19:19:57.330231 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 20 19:20:27 crc kubenswrapper[4558]: I0120 19:20:27.329691 4558 patch_prober.go:28] interesting pod/machine-config-daemon-2vr4r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 20 19:20:27 crc kubenswrapper[4558]: I0120 19:20:27.330284 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 20 19:20:27 crc kubenswrapper[4558]: I0120 19:20:27.330336 4558 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" Jan 20 19:20:27 crc kubenswrapper[4558]: I0120 19:20:27.331141 4558 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"74a09cf01a9482e3a74e95530f462255ee83e3acfe30e90074e36f1623c24d9f"} pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 20 19:20:27 crc kubenswrapper[4558]: I0120 19:20:27.331217 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" containerID="cri-o://74a09cf01a9482e3a74e95530f462255ee83e3acfe30e90074e36f1623c24d9f" gracePeriod=600 Jan 20 19:20:27 crc kubenswrapper[4558]: I0120 19:20:27.754789 4558 generic.go:334] "Generic (PLEG): container finished" podID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerID="74a09cf01a9482e3a74e95530f462255ee83e3acfe30e90074e36f1623c24d9f" exitCode=0 Jan 20 19:20:27 crc kubenswrapper[4558]: I0120 19:20:27.754874 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" event={"ID":"68337d27-3fa6-4a29-88b0-82e60c3739eb","Type":"ContainerDied","Data":"74a09cf01a9482e3a74e95530f462255ee83e3acfe30e90074e36f1623c24d9f"} Jan 20 19:20:27 crc kubenswrapper[4558]: I0120 19:20:27.755045 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" event={"ID":"68337d27-3fa6-4a29-88b0-82e60c3739eb","Type":"ContainerStarted","Data":"54aafd1463e4e36a97be0dd83a2647d2c23b657acbdf41034d7bf93d4b3d68fd"} Jan 20 19:20:27 crc kubenswrapper[4558]: I0120 19:20:27.755079 4558 scope.go:117] "RemoveContainer" containerID="f564c14a07109e541e9fbbbe365d8cc006309f8b6f2a46364e7e327b0d781226" Jan 20 19:22:19 crc kubenswrapper[4558]: I0120 19:22:19.478326 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-9fpcr"] Jan 20 19:22:19 crc kubenswrapper[4558]: E0120 19:22:19.479386 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1d7d6f74-5354-48de-85da-41599c9957a1" containerName="extract-content" Jan 20 19:22:19 crc kubenswrapper[4558]: I0120 19:22:19.479408 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="1d7d6f74-5354-48de-85da-41599c9957a1" containerName="extract-content" Jan 20 19:22:19 crc kubenswrapper[4558]: E0120 19:22:19.479451 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1d7d6f74-5354-48de-85da-41599c9957a1" containerName="extract-utilities" Jan 20 19:22:19 crc kubenswrapper[4558]: I0120 19:22:19.479472 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="1d7d6f74-5354-48de-85da-41599c9957a1" containerName="extract-utilities" Jan 20 19:22:19 crc kubenswrapper[4558]: E0120 19:22:19.479483 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1d7d6f74-5354-48de-85da-41599c9957a1" containerName="registry-server" Jan 20 19:22:19 crc kubenswrapper[4558]: I0120 19:22:19.479489 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="1d7d6f74-5354-48de-85da-41599c9957a1" containerName="registry-server" Jan 20 19:22:19 crc kubenswrapper[4558]: I0120 19:22:19.479617 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="1d7d6f74-5354-48de-85da-41599c9957a1" containerName="registry-server" Jan 20 19:22:19 crc kubenswrapper[4558]: I0120 19:22:19.480578 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9fpcr" Jan 20 19:22:19 crc kubenswrapper[4558]: I0120 19:22:19.496120 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-9fpcr"] Jan 20 19:22:19 crc kubenswrapper[4558]: I0120 19:22:19.517089 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5xwlc\" (UniqueName: \"kubernetes.io/projected/dd2b181d-95d7-4b7d-8fb3-383f1eafaf7b-kube-api-access-5xwlc\") pod \"community-operators-9fpcr\" (UID: \"dd2b181d-95d7-4b7d-8fb3-383f1eafaf7b\") " pod="openshift-marketplace/community-operators-9fpcr" Jan 20 19:22:19 crc kubenswrapper[4558]: I0120 19:22:19.517154 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dd2b181d-95d7-4b7d-8fb3-383f1eafaf7b-catalog-content\") pod \"community-operators-9fpcr\" (UID: \"dd2b181d-95d7-4b7d-8fb3-383f1eafaf7b\") " pod="openshift-marketplace/community-operators-9fpcr" Jan 20 19:22:19 crc kubenswrapper[4558]: I0120 19:22:19.517224 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dd2b181d-95d7-4b7d-8fb3-383f1eafaf7b-utilities\") pod \"community-operators-9fpcr\" (UID: \"dd2b181d-95d7-4b7d-8fb3-383f1eafaf7b\") " pod="openshift-marketplace/community-operators-9fpcr" Jan 20 19:22:19 crc kubenswrapper[4558]: I0120 19:22:19.618265 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5xwlc\" (UniqueName: \"kubernetes.io/projected/dd2b181d-95d7-4b7d-8fb3-383f1eafaf7b-kube-api-access-5xwlc\") pod \"community-operators-9fpcr\" (UID: \"dd2b181d-95d7-4b7d-8fb3-383f1eafaf7b\") " pod="openshift-marketplace/community-operators-9fpcr" Jan 20 19:22:19 crc kubenswrapper[4558]: I0120 19:22:19.618376 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dd2b181d-95d7-4b7d-8fb3-383f1eafaf7b-catalog-content\") pod \"community-operators-9fpcr\" (UID: \"dd2b181d-95d7-4b7d-8fb3-383f1eafaf7b\") " pod="openshift-marketplace/community-operators-9fpcr" Jan 20 19:22:19 crc kubenswrapper[4558]: I0120 19:22:19.618402 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dd2b181d-95d7-4b7d-8fb3-383f1eafaf7b-utilities\") pod \"community-operators-9fpcr\" (UID: \"dd2b181d-95d7-4b7d-8fb3-383f1eafaf7b\") " pod="openshift-marketplace/community-operators-9fpcr" Jan 20 19:22:19 crc kubenswrapper[4558]: I0120 19:22:19.618914 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dd2b181d-95d7-4b7d-8fb3-383f1eafaf7b-utilities\") pod \"community-operators-9fpcr\" (UID: \"dd2b181d-95d7-4b7d-8fb3-383f1eafaf7b\") " pod="openshift-marketplace/community-operators-9fpcr" Jan 20 19:22:19 crc kubenswrapper[4558]: I0120 19:22:19.618921 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dd2b181d-95d7-4b7d-8fb3-383f1eafaf7b-catalog-content\") pod \"community-operators-9fpcr\" (UID: \"dd2b181d-95d7-4b7d-8fb3-383f1eafaf7b\") " pod="openshift-marketplace/community-operators-9fpcr" Jan 20 19:22:19 crc kubenswrapper[4558]: I0120 19:22:19.635465 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5xwlc\" (UniqueName: \"kubernetes.io/projected/dd2b181d-95d7-4b7d-8fb3-383f1eafaf7b-kube-api-access-5xwlc\") pod \"community-operators-9fpcr\" (UID: \"dd2b181d-95d7-4b7d-8fb3-383f1eafaf7b\") " pod="openshift-marketplace/community-operators-9fpcr" Jan 20 19:22:19 crc kubenswrapper[4558]: I0120 19:22:19.797981 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9fpcr" Jan 20 19:22:20 crc kubenswrapper[4558]: I0120 19:22:20.051832 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-9fpcr"] Jan 20 19:22:20 crc kubenswrapper[4558]: I0120 19:22:20.550486 4558 generic.go:334] "Generic (PLEG): container finished" podID="dd2b181d-95d7-4b7d-8fb3-383f1eafaf7b" containerID="a568c3bf70729fe672f05b8ed428afada2ac99024a67b9bf38d644363632ffc7" exitCode=0 Jan 20 19:22:20 crc kubenswrapper[4558]: I0120 19:22:20.550571 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9fpcr" event={"ID":"dd2b181d-95d7-4b7d-8fb3-383f1eafaf7b","Type":"ContainerDied","Data":"a568c3bf70729fe672f05b8ed428afada2ac99024a67b9bf38d644363632ffc7"} Jan 20 19:22:20 crc kubenswrapper[4558]: I0120 19:22:20.550949 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9fpcr" event={"ID":"dd2b181d-95d7-4b7d-8fb3-383f1eafaf7b","Type":"ContainerStarted","Data":"cc3c08c7316eb40aa8927545be84c55a7af0e5e3f7bcb201fdfffee266d6901f"} Jan 20 19:22:21 crc kubenswrapper[4558]: I0120 19:22:21.561855 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9fpcr" event={"ID":"dd2b181d-95d7-4b7d-8fb3-383f1eafaf7b","Type":"ContainerStarted","Data":"27865ba84e10d5f2679bf07a70dcba761861c8e784d2be9ab20a853574035b8f"} Jan 20 19:22:22 crc kubenswrapper[4558]: I0120 19:22:22.574889 4558 generic.go:334] "Generic (PLEG): container finished" podID="dd2b181d-95d7-4b7d-8fb3-383f1eafaf7b" containerID="27865ba84e10d5f2679bf07a70dcba761861c8e784d2be9ab20a853574035b8f" exitCode=0 Jan 20 19:22:22 crc kubenswrapper[4558]: I0120 19:22:22.575144 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9fpcr" event={"ID":"dd2b181d-95d7-4b7d-8fb3-383f1eafaf7b","Type":"ContainerDied","Data":"27865ba84e10d5f2679bf07a70dcba761861c8e784d2be9ab20a853574035b8f"} Jan 20 19:22:23 crc kubenswrapper[4558]: I0120 19:22:23.585542 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9fpcr" event={"ID":"dd2b181d-95d7-4b7d-8fb3-383f1eafaf7b","Type":"ContainerStarted","Data":"1b2d7d4918f766decf705a5fdafef58bc17fc998b48311755e84007795657f4c"} Jan 20 19:22:23 crc kubenswrapper[4558]: I0120 19:22:23.604437 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-9fpcr" podStartSLOduration=2.07684681 podStartE2EDuration="4.604420704s" podCreationTimestamp="2026-01-20 19:22:19 +0000 UTC" firstStartedPulling="2026-01-20 19:22:20.552230407 +0000 UTC m=+9634.312568374" lastFinishedPulling="2026-01-20 19:22:23.079804291 +0000 UTC m=+9636.840142268" observedRunningTime="2026-01-20 19:22:23.6010627 +0000 UTC m=+9637.361400667" watchObservedRunningTime="2026-01-20 19:22:23.604420704 +0000 UTC m=+9637.364758661" Jan 20 19:22:27 crc kubenswrapper[4558]: I0120 19:22:27.330064 4558 patch_prober.go:28] interesting pod/machine-config-daemon-2vr4r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 20 19:22:27 crc kubenswrapper[4558]: I0120 19:22:27.330464 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 20 19:22:29 crc kubenswrapper[4558]: I0120 19:22:29.798622 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-9fpcr" Jan 20 19:22:29 crc kubenswrapper[4558]: I0120 19:22:29.798670 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-9fpcr" Jan 20 19:22:29 crc kubenswrapper[4558]: I0120 19:22:29.839274 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-9fpcr" Jan 20 19:22:30 crc kubenswrapper[4558]: I0120 19:22:30.659575 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-9fpcr" Jan 20 19:22:30 crc kubenswrapper[4558]: I0120 19:22:30.717784 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-9fpcr"] Jan 20 19:22:32 crc kubenswrapper[4558]: I0120 19:22:32.646794 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-9fpcr" podUID="dd2b181d-95d7-4b7d-8fb3-383f1eafaf7b" containerName="registry-server" containerID="cri-o://1b2d7d4918f766decf705a5fdafef58bc17fc998b48311755e84007795657f4c" gracePeriod=2 Jan 20 19:22:33 crc kubenswrapper[4558]: I0120 19:22:33.002392 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9fpcr" Jan 20 19:22:33 crc kubenswrapper[4558]: I0120 19:22:33.114737 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dd2b181d-95d7-4b7d-8fb3-383f1eafaf7b-catalog-content\") pod \"dd2b181d-95d7-4b7d-8fb3-383f1eafaf7b\" (UID: \"dd2b181d-95d7-4b7d-8fb3-383f1eafaf7b\") " Jan 20 19:22:33 crc kubenswrapper[4558]: I0120 19:22:33.114888 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5xwlc\" (UniqueName: \"kubernetes.io/projected/dd2b181d-95d7-4b7d-8fb3-383f1eafaf7b-kube-api-access-5xwlc\") pod \"dd2b181d-95d7-4b7d-8fb3-383f1eafaf7b\" (UID: \"dd2b181d-95d7-4b7d-8fb3-383f1eafaf7b\") " Jan 20 19:22:33 crc kubenswrapper[4558]: I0120 19:22:33.114928 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dd2b181d-95d7-4b7d-8fb3-383f1eafaf7b-utilities\") pod \"dd2b181d-95d7-4b7d-8fb3-383f1eafaf7b\" (UID: \"dd2b181d-95d7-4b7d-8fb3-383f1eafaf7b\") " Jan 20 19:22:33 crc kubenswrapper[4558]: I0120 19:22:33.115879 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dd2b181d-95d7-4b7d-8fb3-383f1eafaf7b-utilities" (OuterVolumeSpecName: "utilities") pod "dd2b181d-95d7-4b7d-8fb3-383f1eafaf7b" (UID: "dd2b181d-95d7-4b7d-8fb3-383f1eafaf7b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 19:22:33 crc kubenswrapper[4558]: I0120 19:22:33.124328 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dd2b181d-95d7-4b7d-8fb3-383f1eafaf7b-kube-api-access-5xwlc" (OuterVolumeSpecName: "kube-api-access-5xwlc") pod "dd2b181d-95d7-4b7d-8fb3-383f1eafaf7b" (UID: "dd2b181d-95d7-4b7d-8fb3-383f1eafaf7b"). InnerVolumeSpecName "kube-api-access-5xwlc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 19:22:33 crc kubenswrapper[4558]: I0120 19:22:33.158133 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dd2b181d-95d7-4b7d-8fb3-383f1eafaf7b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "dd2b181d-95d7-4b7d-8fb3-383f1eafaf7b" (UID: "dd2b181d-95d7-4b7d-8fb3-383f1eafaf7b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 19:22:33 crc kubenswrapper[4558]: I0120 19:22:33.217537 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5xwlc\" (UniqueName: \"kubernetes.io/projected/dd2b181d-95d7-4b7d-8fb3-383f1eafaf7b-kube-api-access-5xwlc\") on node \"crc\" DevicePath \"\"" Jan 20 19:22:33 crc kubenswrapper[4558]: I0120 19:22:33.217576 4558 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dd2b181d-95d7-4b7d-8fb3-383f1eafaf7b-utilities\") on node \"crc\" DevicePath \"\"" Jan 20 19:22:33 crc kubenswrapper[4558]: I0120 19:22:33.217591 4558 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dd2b181d-95d7-4b7d-8fb3-383f1eafaf7b-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 20 19:22:33 crc kubenswrapper[4558]: I0120 19:22:33.653679 4558 generic.go:334] "Generic (PLEG): container finished" podID="dd2b181d-95d7-4b7d-8fb3-383f1eafaf7b" containerID="1b2d7d4918f766decf705a5fdafef58bc17fc998b48311755e84007795657f4c" exitCode=0 Jan 20 19:22:33 crc kubenswrapper[4558]: I0120 19:22:33.653721 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9fpcr" event={"ID":"dd2b181d-95d7-4b7d-8fb3-383f1eafaf7b","Type":"ContainerDied","Data":"1b2d7d4918f766decf705a5fdafef58bc17fc998b48311755e84007795657f4c"} Jan 20 19:22:33 crc kubenswrapper[4558]: I0120 19:22:33.653744 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9fpcr" event={"ID":"dd2b181d-95d7-4b7d-8fb3-383f1eafaf7b","Type":"ContainerDied","Data":"cc3c08c7316eb40aa8927545be84c55a7af0e5e3f7bcb201fdfffee266d6901f"} Jan 20 19:22:33 crc kubenswrapper[4558]: I0120 19:22:33.653762 4558 scope.go:117] "RemoveContainer" containerID="1b2d7d4918f766decf705a5fdafef58bc17fc998b48311755e84007795657f4c" Jan 20 19:22:33 crc kubenswrapper[4558]: I0120 19:22:33.653883 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9fpcr" Jan 20 19:22:33 crc kubenswrapper[4558]: I0120 19:22:33.675028 4558 scope.go:117] "RemoveContainer" containerID="27865ba84e10d5f2679bf07a70dcba761861c8e784d2be9ab20a853574035b8f" Jan 20 19:22:33 crc kubenswrapper[4558]: I0120 19:22:33.686989 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-9fpcr"] Jan 20 19:22:33 crc kubenswrapper[4558]: I0120 19:22:33.692550 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-9fpcr"] Jan 20 19:22:33 crc kubenswrapper[4558]: I0120 19:22:33.702705 4558 scope.go:117] "RemoveContainer" containerID="a568c3bf70729fe672f05b8ed428afada2ac99024a67b9bf38d644363632ffc7" Jan 20 19:22:33 crc kubenswrapper[4558]: I0120 19:22:33.715009 4558 scope.go:117] "RemoveContainer" containerID="1b2d7d4918f766decf705a5fdafef58bc17fc998b48311755e84007795657f4c" Jan 20 19:22:33 crc kubenswrapper[4558]: E0120 19:22:33.715354 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1b2d7d4918f766decf705a5fdafef58bc17fc998b48311755e84007795657f4c\": container with ID starting with 1b2d7d4918f766decf705a5fdafef58bc17fc998b48311755e84007795657f4c not found: ID does not exist" containerID="1b2d7d4918f766decf705a5fdafef58bc17fc998b48311755e84007795657f4c" Jan 20 19:22:33 crc kubenswrapper[4558]: I0120 19:22:33.715414 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1b2d7d4918f766decf705a5fdafef58bc17fc998b48311755e84007795657f4c"} err="failed to get container status \"1b2d7d4918f766decf705a5fdafef58bc17fc998b48311755e84007795657f4c\": rpc error: code = NotFound desc = could not find container \"1b2d7d4918f766decf705a5fdafef58bc17fc998b48311755e84007795657f4c\": container with ID starting with 1b2d7d4918f766decf705a5fdafef58bc17fc998b48311755e84007795657f4c not found: ID does not exist" Jan 20 19:22:33 crc kubenswrapper[4558]: I0120 19:22:33.715451 4558 scope.go:117] "RemoveContainer" containerID="27865ba84e10d5f2679bf07a70dcba761861c8e784d2be9ab20a853574035b8f" Jan 20 19:22:33 crc kubenswrapper[4558]: E0120 19:22:33.715907 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"27865ba84e10d5f2679bf07a70dcba761861c8e784d2be9ab20a853574035b8f\": container with ID starting with 27865ba84e10d5f2679bf07a70dcba761861c8e784d2be9ab20a853574035b8f not found: ID does not exist" containerID="27865ba84e10d5f2679bf07a70dcba761861c8e784d2be9ab20a853574035b8f" Jan 20 19:22:33 crc kubenswrapper[4558]: I0120 19:22:33.715944 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"27865ba84e10d5f2679bf07a70dcba761861c8e784d2be9ab20a853574035b8f"} err="failed to get container status \"27865ba84e10d5f2679bf07a70dcba761861c8e784d2be9ab20a853574035b8f\": rpc error: code = NotFound desc = could not find container \"27865ba84e10d5f2679bf07a70dcba761861c8e784d2be9ab20a853574035b8f\": container with ID starting with 27865ba84e10d5f2679bf07a70dcba761861c8e784d2be9ab20a853574035b8f not found: ID does not exist" Jan 20 19:22:33 crc kubenswrapper[4558]: I0120 19:22:33.715973 4558 scope.go:117] "RemoveContainer" containerID="a568c3bf70729fe672f05b8ed428afada2ac99024a67b9bf38d644363632ffc7" Jan 20 19:22:33 crc kubenswrapper[4558]: E0120 19:22:33.716552 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a568c3bf70729fe672f05b8ed428afada2ac99024a67b9bf38d644363632ffc7\": container with ID starting with a568c3bf70729fe672f05b8ed428afada2ac99024a67b9bf38d644363632ffc7 not found: ID does not exist" containerID="a568c3bf70729fe672f05b8ed428afada2ac99024a67b9bf38d644363632ffc7" Jan 20 19:22:33 crc kubenswrapper[4558]: I0120 19:22:33.716590 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a568c3bf70729fe672f05b8ed428afada2ac99024a67b9bf38d644363632ffc7"} err="failed to get container status \"a568c3bf70729fe672f05b8ed428afada2ac99024a67b9bf38d644363632ffc7\": rpc error: code = NotFound desc = could not find container \"a568c3bf70729fe672f05b8ed428afada2ac99024a67b9bf38d644363632ffc7\": container with ID starting with a568c3bf70729fe672f05b8ed428afada2ac99024a67b9bf38d644363632ffc7 not found: ID does not exist" Jan 20 19:22:34 crc kubenswrapper[4558]: I0120 19:22:34.572351 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dd2b181d-95d7-4b7d-8fb3-383f1eafaf7b" path="/var/lib/kubelet/pods/dd2b181d-95d7-4b7d-8fb3-383f1eafaf7b/volumes" Jan 20 19:22:57 crc kubenswrapper[4558]: I0120 19:22:57.329412 4558 patch_prober.go:28] interesting pod/machine-config-daemon-2vr4r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 20 19:22:57 crc kubenswrapper[4558]: I0120 19:22:57.329829 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 20 19:23:27 crc kubenswrapper[4558]: I0120 19:23:27.330563 4558 patch_prober.go:28] interesting pod/machine-config-daemon-2vr4r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 20 19:23:27 crc kubenswrapper[4558]: I0120 19:23:27.332188 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 20 19:23:27 crc kubenswrapper[4558]: I0120 19:23:27.332390 4558 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" Jan 20 19:23:27 crc kubenswrapper[4558]: I0120 19:23:27.332997 4558 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"54aafd1463e4e36a97be0dd83a2647d2c23b657acbdf41034d7bf93d4b3d68fd"} pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 20 19:23:27 crc kubenswrapper[4558]: I0120 19:23:27.333140 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" containerID="cri-o://54aafd1463e4e36a97be0dd83a2647d2c23b657acbdf41034d7bf93d4b3d68fd" gracePeriod=600 Jan 20 19:23:27 crc kubenswrapper[4558]: E0120 19:23:27.454415 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:23:28 crc kubenswrapper[4558]: I0120 19:23:28.006736 4558 generic.go:334] "Generic (PLEG): container finished" podID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerID="54aafd1463e4e36a97be0dd83a2647d2c23b657acbdf41034d7bf93d4b3d68fd" exitCode=0 Jan 20 19:23:28 crc kubenswrapper[4558]: I0120 19:23:28.007015 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" event={"ID":"68337d27-3fa6-4a29-88b0-82e60c3739eb","Type":"ContainerDied","Data":"54aafd1463e4e36a97be0dd83a2647d2c23b657acbdf41034d7bf93d4b3d68fd"} Jan 20 19:23:28 crc kubenswrapper[4558]: I0120 19:23:28.007175 4558 scope.go:117] "RemoveContainer" containerID="74a09cf01a9482e3a74e95530f462255ee83e3acfe30e90074e36f1623c24d9f" Jan 20 19:23:28 crc kubenswrapper[4558]: I0120 19:23:28.007720 4558 scope.go:117] "RemoveContainer" containerID="54aafd1463e4e36a97be0dd83a2647d2c23b657acbdf41034d7bf93d4b3d68fd" Jan 20 19:23:28 crc kubenswrapper[4558]: E0120 19:23:28.008062 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:23:40 crc kubenswrapper[4558]: I0120 19:23:40.565753 4558 scope.go:117] "RemoveContainer" containerID="54aafd1463e4e36a97be0dd83a2647d2c23b657acbdf41034d7bf93d4b3d68fd" Jan 20 19:23:40 crc kubenswrapper[4558]: E0120 19:23:40.566560 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:23:51 crc kubenswrapper[4558]: I0120 19:23:51.566697 4558 scope.go:117] "RemoveContainer" containerID="54aafd1463e4e36a97be0dd83a2647d2c23b657acbdf41034d7bf93d4b3d68fd" Jan 20 19:23:51 crc kubenswrapper[4558]: E0120 19:23:51.568392 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:24:02 crc kubenswrapper[4558]: I0120 19:24:02.566283 4558 scope.go:117] "RemoveContainer" containerID="54aafd1463e4e36a97be0dd83a2647d2c23b657acbdf41034d7bf93d4b3d68fd" Jan 20 19:24:02 crc kubenswrapper[4558]: E0120 19:24:02.566991 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:24:17 crc kubenswrapper[4558]: I0120 19:24:17.565992 4558 scope.go:117] "RemoveContainer" containerID="54aafd1463e4e36a97be0dd83a2647d2c23b657acbdf41034d7bf93d4b3d68fd" Jan 20 19:24:17 crc kubenswrapper[4558]: E0120 19:24:17.566733 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:24:28 crc kubenswrapper[4558]: I0120 19:24:28.566451 4558 scope.go:117] "RemoveContainer" containerID="54aafd1463e4e36a97be0dd83a2647d2c23b657acbdf41034d7bf93d4b3d68fd" Jan 20 19:24:28 crc kubenswrapper[4558]: E0120 19:24:28.567443 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:24:41 crc kubenswrapper[4558]: I0120 19:24:41.566417 4558 scope.go:117] "RemoveContainer" containerID="54aafd1463e4e36a97be0dd83a2647d2c23b657acbdf41034d7bf93d4b3d68fd" Jan 20 19:24:41 crc kubenswrapper[4558]: E0120 19:24:41.569011 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:24:54 crc kubenswrapper[4558]: I0120 19:24:54.568149 4558 scope.go:117] "RemoveContainer" containerID="54aafd1463e4e36a97be0dd83a2647d2c23b657acbdf41034d7bf93d4b3d68fd" Jan 20 19:24:54 crc kubenswrapper[4558]: E0120 19:24:54.569290 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:25:06 crc kubenswrapper[4558]: I0120 19:25:06.570507 4558 scope.go:117] "RemoveContainer" containerID="54aafd1463e4e36a97be0dd83a2647d2c23b657acbdf41034d7bf93d4b3d68fd" Jan 20 19:25:06 crc kubenswrapper[4558]: E0120 19:25:06.571645 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:25:21 crc kubenswrapper[4558]: I0120 19:25:21.565802 4558 scope.go:117] "RemoveContainer" containerID="54aafd1463e4e36a97be0dd83a2647d2c23b657acbdf41034d7bf93d4b3d68fd" Jan 20 19:25:21 crc kubenswrapper[4558]: E0120 19:25:21.566477 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:25:32 crc kubenswrapper[4558]: I0120 19:25:32.570033 4558 scope.go:117] "RemoveContainer" containerID="54aafd1463e4e36a97be0dd83a2647d2c23b657acbdf41034d7bf93d4b3d68fd" Jan 20 19:25:32 crc kubenswrapper[4558]: E0120 19:25:32.571270 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:25:33 crc kubenswrapper[4558]: I0120 19:25:33.526033 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-m888r"] Jan 20 19:25:33 crc kubenswrapper[4558]: E0120 19:25:33.526357 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd2b181d-95d7-4b7d-8fb3-383f1eafaf7b" containerName="extract-content" Jan 20 19:25:33 crc kubenswrapper[4558]: I0120 19:25:33.526371 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd2b181d-95d7-4b7d-8fb3-383f1eafaf7b" containerName="extract-content" Jan 20 19:25:33 crc kubenswrapper[4558]: E0120 19:25:33.526387 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd2b181d-95d7-4b7d-8fb3-383f1eafaf7b" containerName="extract-utilities" Jan 20 19:25:33 crc kubenswrapper[4558]: I0120 19:25:33.526393 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd2b181d-95d7-4b7d-8fb3-383f1eafaf7b" containerName="extract-utilities" Jan 20 19:25:33 crc kubenswrapper[4558]: E0120 19:25:33.526408 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd2b181d-95d7-4b7d-8fb3-383f1eafaf7b" containerName="registry-server" Jan 20 19:25:33 crc kubenswrapper[4558]: I0120 19:25:33.526413 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd2b181d-95d7-4b7d-8fb3-383f1eafaf7b" containerName="registry-server" Jan 20 19:25:33 crc kubenswrapper[4558]: I0120 19:25:33.526519 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="dd2b181d-95d7-4b7d-8fb3-383f1eafaf7b" containerName="registry-server" Jan 20 19:25:33 crc kubenswrapper[4558]: I0120 19:25:33.527393 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-m888r" Jan 20 19:25:33 crc kubenswrapper[4558]: I0120 19:25:33.537451 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-m888r"] Jan 20 19:25:33 crc kubenswrapper[4558]: I0120 19:25:33.564500 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-crh2q\" (UniqueName: \"kubernetes.io/projected/54cf4b62-4d17-4270-946e-71a2a9f0d365-kube-api-access-crh2q\") pod \"redhat-operators-m888r\" (UID: \"54cf4b62-4d17-4270-946e-71a2a9f0d365\") " pod="openshift-marketplace/redhat-operators-m888r" Jan 20 19:25:33 crc kubenswrapper[4558]: I0120 19:25:33.564540 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/54cf4b62-4d17-4270-946e-71a2a9f0d365-utilities\") pod \"redhat-operators-m888r\" (UID: \"54cf4b62-4d17-4270-946e-71a2a9f0d365\") " pod="openshift-marketplace/redhat-operators-m888r" Jan 20 19:25:33 crc kubenswrapper[4558]: I0120 19:25:33.564586 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/54cf4b62-4d17-4270-946e-71a2a9f0d365-catalog-content\") pod \"redhat-operators-m888r\" (UID: \"54cf4b62-4d17-4270-946e-71a2a9f0d365\") " pod="openshift-marketplace/redhat-operators-m888r" Jan 20 19:25:33 crc kubenswrapper[4558]: I0120 19:25:33.665724 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-crh2q\" (UniqueName: \"kubernetes.io/projected/54cf4b62-4d17-4270-946e-71a2a9f0d365-kube-api-access-crh2q\") pod \"redhat-operators-m888r\" (UID: \"54cf4b62-4d17-4270-946e-71a2a9f0d365\") " pod="openshift-marketplace/redhat-operators-m888r" Jan 20 19:25:33 crc kubenswrapper[4558]: I0120 19:25:33.665787 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/54cf4b62-4d17-4270-946e-71a2a9f0d365-utilities\") pod \"redhat-operators-m888r\" (UID: \"54cf4b62-4d17-4270-946e-71a2a9f0d365\") " pod="openshift-marketplace/redhat-operators-m888r" Jan 20 19:25:33 crc kubenswrapper[4558]: I0120 19:25:33.665862 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/54cf4b62-4d17-4270-946e-71a2a9f0d365-catalog-content\") pod \"redhat-operators-m888r\" (UID: \"54cf4b62-4d17-4270-946e-71a2a9f0d365\") " pod="openshift-marketplace/redhat-operators-m888r" Jan 20 19:25:33 crc kubenswrapper[4558]: I0120 19:25:33.666500 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/54cf4b62-4d17-4270-946e-71a2a9f0d365-catalog-content\") pod \"redhat-operators-m888r\" (UID: \"54cf4b62-4d17-4270-946e-71a2a9f0d365\") " pod="openshift-marketplace/redhat-operators-m888r" Jan 20 19:25:33 crc kubenswrapper[4558]: I0120 19:25:33.666627 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/54cf4b62-4d17-4270-946e-71a2a9f0d365-utilities\") pod \"redhat-operators-m888r\" (UID: \"54cf4b62-4d17-4270-946e-71a2a9f0d365\") " pod="openshift-marketplace/redhat-operators-m888r" Jan 20 19:25:33 crc kubenswrapper[4558]: I0120 19:25:33.689956 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-crh2q\" (UniqueName: \"kubernetes.io/projected/54cf4b62-4d17-4270-946e-71a2a9f0d365-kube-api-access-crh2q\") pod \"redhat-operators-m888r\" (UID: \"54cf4b62-4d17-4270-946e-71a2a9f0d365\") " pod="openshift-marketplace/redhat-operators-m888r" Jan 20 19:25:33 crc kubenswrapper[4558]: I0120 19:25:33.845713 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-m888r" Jan 20 19:25:34 crc kubenswrapper[4558]: I0120 19:25:34.240653 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-m888r"] Jan 20 19:25:34 crc kubenswrapper[4558]: I0120 19:25:34.914931 4558 generic.go:334] "Generic (PLEG): container finished" podID="54cf4b62-4d17-4270-946e-71a2a9f0d365" containerID="45549398c410ece63f1eac88f8838d918370e0a66f2768685edde3a73497c3d2" exitCode=0 Jan 20 19:25:34 crc kubenswrapper[4558]: I0120 19:25:34.915020 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m888r" event={"ID":"54cf4b62-4d17-4270-946e-71a2a9f0d365","Type":"ContainerDied","Data":"45549398c410ece63f1eac88f8838d918370e0a66f2768685edde3a73497c3d2"} Jan 20 19:25:34 crc kubenswrapper[4558]: I0120 19:25:34.915430 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m888r" event={"ID":"54cf4b62-4d17-4270-946e-71a2a9f0d365","Type":"ContainerStarted","Data":"84d95fb061e34b70ac37cd943fb4510d593a8a5dde88a51a02f0cb1161dbd35b"} Jan 20 19:25:34 crc kubenswrapper[4558]: I0120 19:25:34.917473 4558 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 20 19:25:36 crc kubenswrapper[4558]: I0120 19:25:36.944187 4558 generic.go:334] "Generic (PLEG): container finished" podID="54cf4b62-4d17-4270-946e-71a2a9f0d365" containerID="294e02c99439bfef09ac0b0d017466b1627b50d9e0976077a1489dbbbdef2e8b" exitCode=0 Jan 20 19:25:36 crc kubenswrapper[4558]: I0120 19:25:36.944587 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m888r" event={"ID":"54cf4b62-4d17-4270-946e-71a2a9f0d365","Type":"ContainerDied","Data":"294e02c99439bfef09ac0b0d017466b1627b50d9e0976077a1489dbbbdef2e8b"} Jan 20 19:25:37 crc kubenswrapper[4558]: I0120 19:25:37.952833 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m888r" event={"ID":"54cf4b62-4d17-4270-946e-71a2a9f0d365","Type":"ContainerStarted","Data":"82cbff07872ca565b7c39b26f9fbf0136d47aacf0f3ee9114ee9c76ef529ad35"} Jan 20 19:25:37 crc kubenswrapper[4558]: I0120 19:25:37.972710 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-m888r" podStartSLOduration=2.44810027 podStartE2EDuration="4.972690675s" podCreationTimestamp="2026-01-20 19:25:33 +0000 UTC" firstStartedPulling="2026-01-20 19:25:34.917226103 +0000 UTC m=+9828.677564070" lastFinishedPulling="2026-01-20 19:25:37.441816507 +0000 UTC m=+9831.202154475" observedRunningTime="2026-01-20 19:25:37.968723075 +0000 UTC m=+9831.729061042" watchObservedRunningTime="2026-01-20 19:25:37.972690675 +0000 UTC m=+9831.733028642" Jan 20 19:25:43 crc kubenswrapper[4558]: I0120 19:25:43.846413 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-m888r" Jan 20 19:25:43 crc kubenswrapper[4558]: I0120 19:25:43.846691 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-m888r" Jan 20 19:25:43 crc kubenswrapper[4558]: I0120 19:25:43.883106 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-m888r" Jan 20 19:25:44 crc kubenswrapper[4558]: I0120 19:25:44.026332 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-m888r" Jan 20 19:25:44 crc kubenswrapper[4558]: I0120 19:25:44.113237 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-m888r"] Jan 20 19:25:46 crc kubenswrapper[4558]: I0120 19:25:46.003847 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-m888r" podUID="54cf4b62-4d17-4270-946e-71a2a9f0d365" containerName="registry-server" containerID="cri-o://82cbff07872ca565b7c39b26f9fbf0136d47aacf0f3ee9114ee9c76ef529ad35" gracePeriod=2 Jan 20 19:25:46 crc kubenswrapper[4558]: I0120 19:25:46.348955 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-m888r" Jan 20 19:25:46 crc kubenswrapper[4558]: I0120 19:25:46.529687 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/54cf4b62-4d17-4270-946e-71a2a9f0d365-utilities\") pod \"54cf4b62-4d17-4270-946e-71a2a9f0d365\" (UID: \"54cf4b62-4d17-4270-946e-71a2a9f0d365\") " Jan 20 19:25:46 crc kubenswrapper[4558]: I0120 19:25:46.529782 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/54cf4b62-4d17-4270-946e-71a2a9f0d365-catalog-content\") pod \"54cf4b62-4d17-4270-946e-71a2a9f0d365\" (UID: \"54cf4b62-4d17-4270-946e-71a2a9f0d365\") " Jan 20 19:25:46 crc kubenswrapper[4558]: I0120 19:25:46.529849 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-crh2q\" (UniqueName: \"kubernetes.io/projected/54cf4b62-4d17-4270-946e-71a2a9f0d365-kube-api-access-crh2q\") pod \"54cf4b62-4d17-4270-946e-71a2a9f0d365\" (UID: \"54cf4b62-4d17-4270-946e-71a2a9f0d365\") " Jan 20 19:25:46 crc kubenswrapper[4558]: I0120 19:25:46.530694 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/54cf4b62-4d17-4270-946e-71a2a9f0d365-utilities" (OuterVolumeSpecName: "utilities") pod "54cf4b62-4d17-4270-946e-71a2a9f0d365" (UID: "54cf4b62-4d17-4270-946e-71a2a9f0d365"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 19:25:46 crc kubenswrapper[4558]: I0120 19:25:46.536298 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/54cf4b62-4d17-4270-946e-71a2a9f0d365-kube-api-access-crh2q" (OuterVolumeSpecName: "kube-api-access-crh2q") pod "54cf4b62-4d17-4270-946e-71a2a9f0d365" (UID: "54cf4b62-4d17-4270-946e-71a2a9f0d365"). InnerVolumeSpecName "kube-api-access-crh2q". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 19:25:46 crc kubenswrapper[4558]: I0120 19:25:46.632135 4558 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/54cf4b62-4d17-4270-946e-71a2a9f0d365-utilities\") on node \"crc\" DevicePath \"\"" Jan 20 19:25:46 crc kubenswrapper[4558]: I0120 19:25:46.632587 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-crh2q\" (UniqueName: \"kubernetes.io/projected/54cf4b62-4d17-4270-946e-71a2a9f0d365-kube-api-access-crh2q\") on node \"crc\" DevicePath \"\"" Jan 20 19:25:46 crc kubenswrapper[4558]: I0120 19:25:46.648727 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/54cf4b62-4d17-4270-946e-71a2a9f0d365-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "54cf4b62-4d17-4270-946e-71a2a9f0d365" (UID: "54cf4b62-4d17-4270-946e-71a2a9f0d365"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 19:25:46 crc kubenswrapper[4558]: I0120 19:25:46.733489 4558 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/54cf4b62-4d17-4270-946e-71a2a9f0d365-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 20 19:25:47 crc kubenswrapper[4558]: I0120 19:25:47.011273 4558 generic.go:334] "Generic (PLEG): container finished" podID="54cf4b62-4d17-4270-946e-71a2a9f0d365" containerID="82cbff07872ca565b7c39b26f9fbf0136d47aacf0f3ee9114ee9c76ef529ad35" exitCode=0 Jan 20 19:25:47 crc kubenswrapper[4558]: I0120 19:25:47.011335 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-m888r" Jan 20 19:25:47 crc kubenswrapper[4558]: I0120 19:25:47.011327 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m888r" event={"ID":"54cf4b62-4d17-4270-946e-71a2a9f0d365","Type":"ContainerDied","Data":"82cbff07872ca565b7c39b26f9fbf0136d47aacf0f3ee9114ee9c76ef529ad35"} Jan 20 19:25:47 crc kubenswrapper[4558]: I0120 19:25:47.011467 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m888r" event={"ID":"54cf4b62-4d17-4270-946e-71a2a9f0d365","Type":"ContainerDied","Data":"84d95fb061e34b70ac37cd943fb4510d593a8a5dde88a51a02f0cb1161dbd35b"} Jan 20 19:25:47 crc kubenswrapper[4558]: I0120 19:25:47.011488 4558 scope.go:117] "RemoveContainer" containerID="82cbff07872ca565b7c39b26f9fbf0136d47aacf0f3ee9114ee9c76ef529ad35" Jan 20 19:25:47 crc kubenswrapper[4558]: I0120 19:25:47.035903 4558 scope.go:117] "RemoveContainer" containerID="294e02c99439bfef09ac0b0d017466b1627b50d9e0976077a1489dbbbdef2e8b" Jan 20 19:25:47 crc kubenswrapper[4558]: I0120 19:25:47.080130 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-m888r"] Jan 20 19:25:47 crc kubenswrapper[4558]: I0120 19:25:47.081221 4558 scope.go:117] "RemoveContainer" containerID="45549398c410ece63f1eac88f8838d918370e0a66f2768685edde3a73497c3d2" Jan 20 19:25:47 crc kubenswrapper[4558]: I0120 19:25:47.087064 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-m888r"] Jan 20 19:25:47 crc kubenswrapper[4558]: I0120 19:25:47.102058 4558 scope.go:117] "RemoveContainer" containerID="82cbff07872ca565b7c39b26f9fbf0136d47aacf0f3ee9114ee9c76ef529ad35" Jan 20 19:25:47 crc kubenswrapper[4558]: E0120 19:25:47.103143 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"82cbff07872ca565b7c39b26f9fbf0136d47aacf0f3ee9114ee9c76ef529ad35\": container with ID starting with 82cbff07872ca565b7c39b26f9fbf0136d47aacf0f3ee9114ee9c76ef529ad35 not found: ID does not exist" containerID="82cbff07872ca565b7c39b26f9fbf0136d47aacf0f3ee9114ee9c76ef529ad35" Jan 20 19:25:47 crc kubenswrapper[4558]: I0120 19:25:47.103231 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"82cbff07872ca565b7c39b26f9fbf0136d47aacf0f3ee9114ee9c76ef529ad35"} err="failed to get container status \"82cbff07872ca565b7c39b26f9fbf0136d47aacf0f3ee9114ee9c76ef529ad35\": rpc error: code = NotFound desc = could not find container \"82cbff07872ca565b7c39b26f9fbf0136d47aacf0f3ee9114ee9c76ef529ad35\": container with ID starting with 82cbff07872ca565b7c39b26f9fbf0136d47aacf0f3ee9114ee9c76ef529ad35 not found: ID does not exist" Jan 20 19:25:47 crc kubenswrapper[4558]: I0120 19:25:47.103265 4558 scope.go:117] "RemoveContainer" containerID="294e02c99439bfef09ac0b0d017466b1627b50d9e0976077a1489dbbbdef2e8b" Jan 20 19:25:47 crc kubenswrapper[4558]: E0120 19:25:47.103784 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"294e02c99439bfef09ac0b0d017466b1627b50d9e0976077a1489dbbbdef2e8b\": container with ID starting with 294e02c99439bfef09ac0b0d017466b1627b50d9e0976077a1489dbbbdef2e8b not found: ID does not exist" containerID="294e02c99439bfef09ac0b0d017466b1627b50d9e0976077a1489dbbbdef2e8b" Jan 20 19:25:47 crc kubenswrapper[4558]: I0120 19:25:47.103862 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"294e02c99439bfef09ac0b0d017466b1627b50d9e0976077a1489dbbbdef2e8b"} err="failed to get container status \"294e02c99439bfef09ac0b0d017466b1627b50d9e0976077a1489dbbbdef2e8b\": rpc error: code = NotFound desc = could not find container \"294e02c99439bfef09ac0b0d017466b1627b50d9e0976077a1489dbbbdef2e8b\": container with ID starting with 294e02c99439bfef09ac0b0d017466b1627b50d9e0976077a1489dbbbdef2e8b not found: ID does not exist" Jan 20 19:25:47 crc kubenswrapper[4558]: I0120 19:25:47.103893 4558 scope.go:117] "RemoveContainer" containerID="45549398c410ece63f1eac88f8838d918370e0a66f2768685edde3a73497c3d2" Jan 20 19:25:47 crc kubenswrapper[4558]: E0120 19:25:47.104365 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"45549398c410ece63f1eac88f8838d918370e0a66f2768685edde3a73497c3d2\": container with ID starting with 45549398c410ece63f1eac88f8838d918370e0a66f2768685edde3a73497c3d2 not found: ID does not exist" containerID="45549398c410ece63f1eac88f8838d918370e0a66f2768685edde3a73497c3d2" Jan 20 19:25:47 crc kubenswrapper[4558]: I0120 19:25:47.104391 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"45549398c410ece63f1eac88f8838d918370e0a66f2768685edde3a73497c3d2"} err="failed to get container status \"45549398c410ece63f1eac88f8838d918370e0a66f2768685edde3a73497c3d2\": rpc error: code = NotFound desc = could not find container \"45549398c410ece63f1eac88f8838d918370e0a66f2768685edde3a73497c3d2\": container with ID starting with 45549398c410ece63f1eac88f8838d918370e0a66f2768685edde3a73497c3d2 not found: ID does not exist" Jan 20 19:25:47 crc kubenswrapper[4558]: E0120 19:25:47.149460 4558 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod54cf4b62_4d17_4270_946e_71a2a9f0d365.slice/crio-84d95fb061e34b70ac37cd943fb4510d593a8a5dde88a51a02f0cb1161dbd35b\": RecentStats: unable to find data in memory cache]" Jan 20 19:25:47 crc kubenswrapper[4558]: I0120 19:25:47.565681 4558 scope.go:117] "RemoveContainer" containerID="54aafd1463e4e36a97be0dd83a2647d2c23b657acbdf41034d7bf93d4b3d68fd" Jan 20 19:25:47 crc kubenswrapper[4558]: E0120 19:25:47.566002 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:25:48 crc kubenswrapper[4558]: I0120 19:25:48.574106 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="54cf4b62-4d17-4270-946e-71a2a9f0d365" path="/var/lib/kubelet/pods/54cf4b62-4d17-4270-946e-71a2a9f0d365/volumes" Jan 20 19:26:00 crc kubenswrapper[4558]: I0120 19:26:00.568488 4558 scope.go:117] "RemoveContainer" containerID="54aafd1463e4e36a97be0dd83a2647d2c23b657acbdf41034d7bf93d4b3d68fd" Jan 20 19:26:00 crc kubenswrapper[4558]: E0120 19:26:00.569283 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:26:11 crc kubenswrapper[4558]: I0120 19:26:11.565727 4558 scope.go:117] "RemoveContainer" containerID="54aafd1463e4e36a97be0dd83a2647d2c23b657acbdf41034d7bf93d4b3d68fd" Jan 20 19:26:11 crc kubenswrapper[4558]: E0120 19:26:11.566697 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:26:24 crc kubenswrapper[4558]: I0120 19:26:24.565877 4558 scope.go:117] "RemoveContainer" containerID="54aafd1463e4e36a97be0dd83a2647d2c23b657acbdf41034d7bf93d4b3d68fd" Jan 20 19:26:24 crc kubenswrapper[4558]: E0120 19:26:24.566762 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:26:36 crc kubenswrapper[4558]: I0120 19:26:36.569957 4558 scope.go:117] "RemoveContainer" containerID="54aafd1463e4e36a97be0dd83a2647d2c23b657acbdf41034d7bf93d4b3d68fd" Jan 20 19:26:36 crc kubenswrapper[4558]: E0120 19:26:36.570662 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:26:47 crc kubenswrapper[4558]: I0120 19:26:47.566280 4558 scope.go:117] "RemoveContainer" containerID="54aafd1463e4e36a97be0dd83a2647d2c23b657acbdf41034d7bf93d4b3d68fd" Jan 20 19:26:47 crc kubenswrapper[4558]: E0120 19:26:47.567138 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:26:58 crc kubenswrapper[4558]: I0120 19:26:58.566560 4558 scope.go:117] "RemoveContainer" containerID="54aafd1463e4e36a97be0dd83a2647d2c23b657acbdf41034d7bf93d4b3d68fd" Jan 20 19:26:58 crc kubenswrapper[4558]: E0120 19:26:58.567558 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:27:11 crc kubenswrapper[4558]: I0120 19:27:11.566033 4558 scope.go:117] "RemoveContainer" containerID="54aafd1463e4e36a97be0dd83a2647d2c23b657acbdf41034d7bf93d4b3d68fd" Jan 20 19:27:11 crc kubenswrapper[4558]: E0120 19:27:11.566913 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:27:26 crc kubenswrapper[4558]: I0120 19:27:26.572112 4558 scope.go:117] "RemoveContainer" containerID="54aafd1463e4e36a97be0dd83a2647d2c23b657acbdf41034d7bf93d4b3d68fd" Jan 20 19:27:26 crc kubenswrapper[4558]: E0120 19:27:26.574776 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:27:38 crc kubenswrapper[4558]: I0120 19:27:38.566802 4558 scope.go:117] "RemoveContainer" containerID="54aafd1463e4e36a97be0dd83a2647d2c23b657acbdf41034d7bf93d4b3d68fd" Jan 20 19:27:38 crc kubenswrapper[4558]: E0120 19:27:38.567921 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:27:53 crc kubenswrapper[4558]: I0120 19:27:53.567218 4558 scope.go:117] "RemoveContainer" containerID="54aafd1463e4e36a97be0dd83a2647d2c23b657acbdf41034d7bf93d4b3d68fd" Jan 20 19:27:53 crc kubenswrapper[4558]: E0120 19:27:53.568212 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:28:01 crc kubenswrapper[4558]: I0120 19:28:01.667883 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-7m72g"] Jan 20 19:28:01 crc kubenswrapper[4558]: E0120 19:28:01.668979 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="54cf4b62-4d17-4270-946e-71a2a9f0d365" containerName="extract-utilities" Jan 20 19:28:01 crc kubenswrapper[4558]: I0120 19:28:01.668999 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="54cf4b62-4d17-4270-946e-71a2a9f0d365" containerName="extract-utilities" Jan 20 19:28:01 crc kubenswrapper[4558]: E0120 19:28:01.669034 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="54cf4b62-4d17-4270-946e-71a2a9f0d365" containerName="registry-server" Jan 20 19:28:01 crc kubenswrapper[4558]: I0120 19:28:01.669047 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="54cf4b62-4d17-4270-946e-71a2a9f0d365" containerName="registry-server" Jan 20 19:28:01 crc kubenswrapper[4558]: E0120 19:28:01.669058 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="54cf4b62-4d17-4270-946e-71a2a9f0d365" containerName="extract-content" Jan 20 19:28:01 crc kubenswrapper[4558]: I0120 19:28:01.669065 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="54cf4b62-4d17-4270-946e-71a2a9f0d365" containerName="extract-content" Jan 20 19:28:01 crc kubenswrapper[4558]: I0120 19:28:01.669242 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="54cf4b62-4d17-4270-946e-71a2a9f0d365" containerName="registry-server" Jan 20 19:28:01 crc kubenswrapper[4558]: I0120 19:28:01.670362 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7m72g" Jan 20 19:28:01 crc kubenswrapper[4558]: I0120 19:28:01.677353 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-7m72g"] Jan 20 19:28:01 crc kubenswrapper[4558]: I0120 19:28:01.683991 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9bsmg\" (UniqueName: \"kubernetes.io/projected/8156f1d3-9f22-4afa-a956-5eb65824eed2-kube-api-access-9bsmg\") pod \"redhat-marketplace-7m72g\" (UID: \"8156f1d3-9f22-4afa-a956-5eb65824eed2\") " pod="openshift-marketplace/redhat-marketplace-7m72g" Jan 20 19:28:01 crc kubenswrapper[4558]: I0120 19:28:01.684063 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8156f1d3-9f22-4afa-a956-5eb65824eed2-catalog-content\") pod \"redhat-marketplace-7m72g\" (UID: \"8156f1d3-9f22-4afa-a956-5eb65824eed2\") " pod="openshift-marketplace/redhat-marketplace-7m72g" Jan 20 19:28:01 crc kubenswrapper[4558]: I0120 19:28:01.684096 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8156f1d3-9f22-4afa-a956-5eb65824eed2-utilities\") pod \"redhat-marketplace-7m72g\" (UID: \"8156f1d3-9f22-4afa-a956-5eb65824eed2\") " pod="openshift-marketplace/redhat-marketplace-7m72g" Jan 20 19:28:01 crc kubenswrapper[4558]: I0120 19:28:01.785718 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8156f1d3-9f22-4afa-a956-5eb65824eed2-catalog-content\") pod \"redhat-marketplace-7m72g\" (UID: \"8156f1d3-9f22-4afa-a956-5eb65824eed2\") " pod="openshift-marketplace/redhat-marketplace-7m72g" Jan 20 19:28:01 crc kubenswrapper[4558]: I0120 19:28:01.785793 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8156f1d3-9f22-4afa-a956-5eb65824eed2-utilities\") pod \"redhat-marketplace-7m72g\" (UID: \"8156f1d3-9f22-4afa-a956-5eb65824eed2\") " pod="openshift-marketplace/redhat-marketplace-7m72g" Jan 20 19:28:01 crc kubenswrapper[4558]: I0120 19:28:01.785863 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9bsmg\" (UniqueName: \"kubernetes.io/projected/8156f1d3-9f22-4afa-a956-5eb65824eed2-kube-api-access-9bsmg\") pod \"redhat-marketplace-7m72g\" (UID: \"8156f1d3-9f22-4afa-a956-5eb65824eed2\") " pod="openshift-marketplace/redhat-marketplace-7m72g" Jan 20 19:28:01 crc kubenswrapper[4558]: I0120 19:28:01.786560 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8156f1d3-9f22-4afa-a956-5eb65824eed2-utilities\") pod \"redhat-marketplace-7m72g\" (UID: \"8156f1d3-9f22-4afa-a956-5eb65824eed2\") " pod="openshift-marketplace/redhat-marketplace-7m72g" Jan 20 19:28:01 crc kubenswrapper[4558]: I0120 19:28:01.786560 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8156f1d3-9f22-4afa-a956-5eb65824eed2-catalog-content\") pod \"redhat-marketplace-7m72g\" (UID: \"8156f1d3-9f22-4afa-a956-5eb65824eed2\") " pod="openshift-marketplace/redhat-marketplace-7m72g" Jan 20 19:28:01 crc kubenswrapper[4558]: I0120 19:28:01.814069 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9bsmg\" (UniqueName: \"kubernetes.io/projected/8156f1d3-9f22-4afa-a956-5eb65824eed2-kube-api-access-9bsmg\") pod \"redhat-marketplace-7m72g\" (UID: \"8156f1d3-9f22-4afa-a956-5eb65824eed2\") " pod="openshift-marketplace/redhat-marketplace-7m72g" Jan 20 19:28:01 crc kubenswrapper[4558]: I0120 19:28:01.987386 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7m72g" Jan 20 19:28:02 crc kubenswrapper[4558]: I0120 19:28:02.380659 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-7m72g"] Jan 20 19:28:03 crc kubenswrapper[4558]: I0120 19:28:03.018750 4558 generic.go:334] "Generic (PLEG): container finished" podID="8156f1d3-9f22-4afa-a956-5eb65824eed2" containerID="4ddc1f551362ba571af1c4383c22ac94fa91c0291bab280c7a26c0b81a31964c" exitCode=0 Jan 20 19:28:03 crc kubenswrapper[4558]: I0120 19:28:03.018909 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7m72g" event={"ID":"8156f1d3-9f22-4afa-a956-5eb65824eed2","Type":"ContainerDied","Data":"4ddc1f551362ba571af1c4383c22ac94fa91c0291bab280c7a26c0b81a31964c"} Jan 20 19:28:03 crc kubenswrapper[4558]: I0120 19:28:03.031136 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7m72g" event={"ID":"8156f1d3-9f22-4afa-a956-5eb65824eed2","Type":"ContainerStarted","Data":"d801fc5fe50a686999ada58dcec7c62de15f0c773ceacb434eab55332bda7e83"} Jan 20 19:28:04 crc kubenswrapper[4558]: I0120 19:28:04.033760 4558 generic.go:334] "Generic (PLEG): container finished" podID="8156f1d3-9f22-4afa-a956-5eb65824eed2" containerID="a87a191c03f9742b8e532289f704b0fbe2724d194f5e3a70005e83caf4a6c471" exitCode=0 Jan 20 19:28:04 crc kubenswrapper[4558]: I0120 19:28:04.033824 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7m72g" event={"ID":"8156f1d3-9f22-4afa-a956-5eb65824eed2","Type":"ContainerDied","Data":"a87a191c03f9742b8e532289f704b0fbe2724d194f5e3a70005e83caf4a6c471"} Jan 20 19:28:05 crc kubenswrapper[4558]: I0120 19:28:05.042395 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7m72g" event={"ID":"8156f1d3-9f22-4afa-a956-5eb65824eed2","Type":"ContainerStarted","Data":"b3af52b1de510ea22850eaf38bd8400803e57041501d5bbc88fc044abe4f3551"} Jan 20 19:28:05 crc kubenswrapper[4558]: I0120 19:28:05.061310 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-7m72g" podStartSLOduration=2.476549931 podStartE2EDuration="4.061291985s" podCreationTimestamp="2026-01-20 19:28:01 +0000 UTC" firstStartedPulling="2026-01-20 19:28:03.020963118 +0000 UTC m=+9976.781301085" lastFinishedPulling="2026-01-20 19:28:04.605705162 +0000 UTC m=+9978.366043139" observedRunningTime="2026-01-20 19:28:05.058489657 +0000 UTC m=+9978.818827624" watchObservedRunningTime="2026-01-20 19:28:05.061291985 +0000 UTC m=+9978.821629953" Jan 20 19:28:06 crc kubenswrapper[4558]: I0120 19:28:06.568556 4558 scope.go:117] "RemoveContainer" containerID="54aafd1463e4e36a97be0dd83a2647d2c23b657acbdf41034d7bf93d4b3d68fd" Jan 20 19:28:06 crc kubenswrapper[4558]: E0120 19:28:06.569193 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:28:11 crc kubenswrapper[4558]: I0120 19:28:11.988339 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-7m72g" Jan 20 19:28:11 crc kubenswrapper[4558]: I0120 19:28:11.989679 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-7m72g" Jan 20 19:28:12 crc kubenswrapper[4558]: I0120 19:28:12.027612 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-7m72g" Jan 20 19:28:12 crc kubenswrapper[4558]: I0120 19:28:12.124744 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-7m72g" Jan 20 19:28:12 crc kubenswrapper[4558]: I0120 19:28:12.258311 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-7m72g"] Jan 20 19:28:14 crc kubenswrapper[4558]: I0120 19:28:14.106485 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-7m72g" podUID="8156f1d3-9f22-4afa-a956-5eb65824eed2" containerName="registry-server" containerID="cri-o://b3af52b1de510ea22850eaf38bd8400803e57041501d5bbc88fc044abe4f3551" gracePeriod=2 Jan 20 19:28:14 crc kubenswrapper[4558]: I0120 19:28:14.451429 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7m72g" Jan 20 19:28:14 crc kubenswrapper[4558]: I0120 19:28:14.459105 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8156f1d3-9f22-4afa-a956-5eb65824eed2-utilities\") pod \"8156f1d3-9f22-4afa-a956-5eb65824eed2\" (UID: \"8156f1d3-9f22-4afa-a956-5eb65824eed2\") " Jan 20 19:28:14 crc kubenswrapper[4558]: I0120 19:28:14.459185 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9bsmg\" (UniqueName: \"kubernetes.io/projected/8156f1d3-9f22-4afa-a956-5eb65824eed2-kube-api-access-9bsmg\") pod \"8156f1d3-9f22-4afa-a956-5eb65824eed2\" (UID: \"8156f1d3-9f22-4afa-a956-5eb65824eed2\") " Jan 20 19:28:14 crc kubenswrapper[4558]: I0120 19:28:14.459373 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8156f1d3-9f22-4afa-a956-5eb65824eed2-catalog-content\") pod \"8156f1d3-9f22-4afa-a956-5eb65824eed2\" (UID: \"8156f1d3-9f22-4afa-a956-5eb65824eed2\") " Jan 20 19:28:14 crc kubenswrapper[4558]: I0120 19:28:14.463150 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8156f1d3-9f22-4afa-a956-5eb65824eed2-utilities" (OuterVolumeSpecName: "utilities") pod "8156f1d3-9f22-4afa-a956-5eb65824eed2" (UID: "8156f1d3-9f22-4afa-a956-5eb65824eed2"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 19:28:14 crc kubenswrapper[4558]: I0120 19:28:14.471929 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8156f1d3-9f22-4afa-a956-5eb65824eed2-kube-api-access-9bsmg" (OuterVolumeSpecName: "kube-api-access-9bsmg") pod "8156f1d3-9f22-4afa-a956-5eb65824eed2" (UID: "8156f1d3-9f22-4afa-a956-5eb65824eed2"). InnerVolumeSpecName "kube-api-access-9bsmg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 19:28:14 crc kubenswrapper[4558]: I0120 19:28:14.490154 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8156f1d3-9f22-4afa-a956-5eb65824eed2-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8156f1d3-9f22-4afa-a956-5eb65824eed2" (UID: "8156f1d3-9f22-4afa-a956-5eb65824eed2"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 19:28:14 crc kubenswrapper[4558]: I0120 19:28:14.561050 4558 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8156f1d3-9f22-4afa-a956-5eb65824eed2-utilities\") on node \"crc\" DevicePath \"\"" Jan 20 19:28:14 crc kubenswrapper[4558]: I0120 19:28:14.561082 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9bsmg\" (UniqueName: \"kubernetes.io/projected/8156f1d3-9f22-4afa-a956-5eb65824eed2-kube-api-access-9bsmg\") on node \"crc\" DevicePath \"\"" Jan 20 19:28:14 crc kubenswrapper[4558]: I0120 19:28:14.561095 4558 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8156f1d3-9f22-4afa-a956-5eb65824eed2-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 20 19:28:15 crc kubenswrapper[4558]: I0120 19:28:15.113174 4558 generic.go:334] "Generic (PLEG): container finished" podID="8156f1d3-9f22-4afa-a956-5eb65824eed2" containerID="b3af52b1de510ea22850eaf38bd8400803e57041501d5bbc88fc044abe4f3551" exitCode=0 Jan 20 19:28:15 crc kubenswrapper[4558]: I0120 19:28:15.113224 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7m72g" event={"ID":"8156f1d3-9f22-4afa-a956-5eb65824eed2","Type":"ContainerDied","Data":"b3af52b1de510ea22850eaf38bd8400803e57041501d5bbc88fc044abe4f3551"} Jan 20 19:28:15 crc kubenswrapper[4558]: I0120 19:28:15.113255 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7m72g" event={"ID":"8156f1d3-9f22-4afa-a956-5eb65824eed2","Type":"ContainerDied","Data":"d801fc5fe50a686999ada58dcec7c62de15f0c773ceacb434eab55332bda7e83"} Jan 20 19:28:15 crc kubenswrapper[4558]: I0120 19:28:15.113278 4558 scope.go:117] "RemoveContainer" containerID="b3af52b1de510ea22850eaf38bd8400803e57041501d5bbc88fc044abe4f3551" Jan 20 19:28:15 crc kubenswrapper[4558]: I0120 19:28:15.113395 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7m72g" Jan 20 19:28:15 crc kubenswrapper[4558]: I0120 19:28:15.133123 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-7m72g"] Jan 20 19:28:15 crc kubenswrapper[4558]: I0120 19:28:15.133909 4558 scope.go:117] "RemoveContainer" containerID="a87a191c03f9742b8e532289f704b0fbe2724d194f5e3a70005e83caf4a6c471" Jan 20 19:28:15 crc kubenswrapper[4558]: I0120 19:28:15.148158 4558 scope.go:117] "RemoveContainer" containerID="4ddc1f551362ba571af1c4383c22ac94fa91c0291bab280c7a26c0b81a31964c" Jan 20 19:28:15 crc kubenswrapper[4558]: I0120 19:28:15.160324 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-7m72g"] Jan 20 19:28:15 crc kubenswrapper[4558]: I0120 19:28:15.185243 4558 scope.go:117] "RemoveContainer" containerID="b3af52b1de510ea22850eaf38bd8400803e57041501d5bbc88fc044abe4f3551" Jan 20 19:28:15 crc kubenswrapper[4558]: E0120 19:28:15.186071 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b3af52b1de510ea22850eaf38bd8400803e57041501d5bbc88fc044abe4f3551\": container with ID starting with b3af52b1de510ea22850eaf38bd8400803e57041501d5bbc88fc044abe4f3551 not found: ID does not exist" containerID="b3af52b1de510ea22850eaf38bd8400803e57041501d5bbc88fc044abe4f3551" Jan 20 19:28:15 crc kubenswrapper[4558]: I0120 19:28:15.186111 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b3af52b1de510ea22850eaf38bd8400803e57041501d5bbc88fc044abe4f3551"} err="failed to get container status \"b3af52b1de510ea22850eaf38bd8400803e57041501d5bbc88fc044abe4f3551\": rpc error: code = NotFound desc = could not find container \"b3af52b1de510ea22850eaf38bd8400803e57041501d5bbc88fc044abe4f3551\": container with ID starting with b3af52b1de510ea22850eaf38bd8400803e57041501d5bbc88fc044abe4f3551 not found: ID does not exist" Jan 20 19:28:15 crc kubenswrapper[4558]: I0120 19:28:15.186141 4558 scope.go:117] "RemoveContainer" containerID="a87a191c03f9742b8e532289f704b0fbe2724d194f5e3a70005e83caf4a6c471" Jan 20 19:28:15 crc kubenswrapper[4558]: E0120 19:28:15.186432 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a87a191c03f9742b8e532289f704b0fbe2724d194f5e3a70005e83caf4a6c471\": container with ID starting with a87a191c03f9742b8e532289f704b0fbe2724d194f5e3a70005e83caf4a6c471 not found: ID does not exist" containerID="a87a191c03f9742b8e532289f704b0fbe2724d194f5e3a70005e83caf4a6c471" Jan 20 19:28:15 crc kubenswrapper[4558]: I0120 19:28:15.186460 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a87a191c03f9742b8e532289f704b0fbe2724d194f5e3a70005e83caf4a6c471"} err="failed to get container status \"a87a191c03f9742b8e532289f704b0fbe2724d194f5e3a70005e83caf4a6c471\": rpc error: code = NotFound desc = could not find container \"a87a191c03f9742b8e532289f704b0fbe2724d194f5e3a70005e83caf4a6c471\": container with ID starting with a87a191c03f9742b8e532289f704b0fbe2724d194f5e3a70005e83caf4a6c471 not found: ID does not exist" Jan 20 19:28:15 crc kubenswrapper[4558]: I0120 19:28:15.186475 4558 scope.go:117] "RemoveContainer" containerID="4ddc1f551362ba571af1c4383c22ac94fa91c0291bab280c7a26c0b81a31964c" Jan 20 19:28:15 crc kubenswrapper[4558]: E0120 19:28:15.186675 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4ddc1f551362ba571af1c4383c22ac94fa91c0291bab280c7a26c0b81a31964c\": container with ID starting with 4ddc1f551362ba571af1c4383c22ac94fa91c0291bab280c7a26c0b81a31964c not found: ID does not exist" containerID="4ddc1f551362ba571af1c4383c22ac94fa91c0291bab280c7a26c0b81a31964c" Jan 20 19:28:15 crc kubenswrapper[4558]: I0120 19:28:15.186701 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4ddc1f551362ba571af1c4383c22ac94fa91c0291bab280c7a26c0b81a31964c"} err="failed to get container status \"4ddc1f551362ba571af1c4383c22ac94fa91c0291bab280c7a26c0b81a31964c\": rpc error: code = NotFound desc = could not find container \"4ddc1f551362ba571af1c4383c22ac94fa91c0291bab280c7a26c0b81a31964c\": container with ID starting with 4ddc1f551362ba571af1c4383c22ac94fa91c0291bab280c7a26c0b81a31964c not found: ID does not exist" Jan 20 19:28:16 crc kubenswrapper[4558]: I0120 19:28:16.575253 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8156f1d3-9f22-4afa-a956-5eb65824eed2" path="/var/lib/kubelet/pods/8156f1d3-9f22-4afa-a956-5eb65824eed2/volumes" Jan 20 19:28:19 crc kubenswrapper[4558]: I0120 19:28:19.565404 4558 scope.go:117] "RemoveContainer" containerID="54aafd1463e4e36a97be0dd83a2647d2c23b657acbdf41034d7bf93d4b3d68fd" Jan 20 19:28:19 crc kubenswrapper[4558]: E0120 19:28:19.566011 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:28:34 crc kubenswrapper[4558]: I0120 19:28:34.567027 4558 scope.go:117] "RemoveContainer" containerID="54aafd1463e4e36a97be0dd83a2647d2c23b657acbdf41034d7bf93d4b3d68fd" Jan 20 19:28:35 crc kubenswrapper[4558]: I0120 19:28:35.241143 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" event={"ID":"68337d27-3fa6-4a29-88b0-82e60c3739eb","Type":"ContainerStarted","Data":"fc3bb650c694b75525835a3fc944d51cba4cad094d0a629e128aef87f9339648"} Jan 20 19:29:58 crc kubenswrapper[4558]: I0120 19:29:58.448677 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-8frd8"] Jan 20 19:29:58 crc kubenswrapper[4558]: E0120 19:29:58.449585 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8156f1d3-9f22-4afa-a956-5eb65824eed2" containerName="registry-server" Jan 20 19:29:58 crc kubenswrapper[4558]: I0120 19:29:58.449600 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="8156f1d3-9f22-4afa-a956-5eb65824eed2" containerName="registry-server" Jan 20 19:29:58 crc kubenswrapper[4558]: E0120 19:29:58.449624 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8156f1d3-9f22-4afa-a956-5eb65824eed2" containerName="extract-utilities" Jan 20 19:29:58 crc kubenswrapper[4558]: I0120 19:29:58.449632 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="8156f1d3-9f22-4afa-a956-5eb65824eed2" containerName="extract-utilities" Jan 20 19:29:58 crc kubenswrapper[4558]: E0120 19:29:58.449643 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8156f1d3-9f22-4afa-a956-5eb65824eed2" containerName="extract-content" Jan 20 19:29:58 crc kubenswrapper[4558]: I0120 19:29:58.449648 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="8156f1d3-9f22-4afa-a956-5eb65824eed2" containerName="extract-content" Jan 20 19:29:58 crc kubenswrapper[4558]: I0120 19:29:58.449823 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="8156f1d3-9f22-4afa-a956-5eb65824eed2" containerName="registry-server" Jan 20 19:29:58 crc kubenswrapper[4558]: I0120 19:29:58.450749 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8frd8" Jan 20 19:29:58 crc kubenswrapper[4558]: I0120 19:29:58.459819 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-8frd8"] Jan 20 19:29:58 crc kubenswrapper[4558]: I0120 19:29:58.628242 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1960f9b3-7449-4c05-9045-99dc9514a1fb-catalog-content\") pod \"certified-operators-8frd8\" (UID: \"1960f9b3-7449-4c05-9045-99dc9514a1fb\") " pod="openshift-marketplace/certified-operators-8frd8" Jan 20 19:29:58 crc kubenswrapper[4558]: I0120 19:29:58.628379 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-48d62\" (UniqueName: \"kubernetes.io/projected/1960f9b3-7449-4c05-9045-99dc9514a1fb-kube-api-access-48d62\") pod \"certified-operators-8frd8\" (UID: \"1960f9b3-7449-4c05-9045-99dc9514a1fb\") " pod="openshift-marketplace/certified-operators-8frd8" Jan 20 19:29:58 crc kubenswrapper[4558]: I0120 19:29:58.628425 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1960f9b3-7449-4c05-9045-99dc9514a1fb-utilities\") pod \"certified-operators-8frd8\" (UID: \"1960f9b3-7449-4c05-9045-99dc9514a1fb\") " pod="openshift-marketplace/certified-operators-8frd8" Jan 20 19:29:58 crc kubenswrapper[4558]: I0120 19:29:58.730408 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1960f9b3-7449-4c05-9045-99dc9514a1fb-catalog-content\") pod \"certified-operators-8frd8\" (UID: \"1960f9b3-7449-4c05-9045-99dc9514a1fb\") " pod="openshift-marketplace/certified-operators-8frd8" Jan 20 19:29:58 crc kubenswrapper[4558]: I0120 19:29:58.730489 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-48d62\" (UniqueName: \"kubernetes.io/projected/1960f9b3-7449-4c05-9045-99dc9514a1fb-kube-api-access-48d62\") pod \"certified-operators-8frd8\" (UID: \"1960f9b3-7449-4c05-9045-99dc9514a1fb\") " pod="openshift-marketplace/certified-operators-8frd8" Jan 20 19:29:58 crc kubenswrapper[4558]: I0120 19:29:58.730534 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1960f9b3-7449-4c05-9045-99dc9514a1fb-utilities\") pod \"certified-operators-8frd8\" (UID: \"1960f9b3-7449-4c05-9045-99dc9514a1fb\") " pod="openshift-marketplace/certified-operators-8frd8" Jan 20 19:29:58 crc kubenswrapper[4558]: I0120 19:29:58.730941 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1960f9b3-7449-4c05-9045-99dc9514a1fb-catalog-content\") pod \"certified-operators-8frd8\" (UID: \"1960f9b3-7449-4c05-9045-99dc9514a1fb\") " pod="openshift-marketplace/certified-operators-8frd8" Jan 20 19:29:58 crc kubenswrapper[4558]: I0120 19:29:58.731002 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1960f9b3-7449-4c05-9045-99dc9514a1fb-utilities\") pod \"certified-operators-8frd8\" (UID: \"1960f9b3-7449-4c05-9045-99dc9514a1fb\") " pod="openshift-marketplace/certified-operators-8frd8" Jan 20 19:29:58 crc kubenswrapper[4558]: I0120 19:29:58.748380 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-48d62\" (UniqueName: \"kubernetes.io/projected/1960f9b3-7449-4c05-9045-99dc9514a1fb-kube-api-access-48d62\") pod \"certified-operators-8frd8\" (UID: \"1960f9b3-7449-4c05-9045-99dc9514a1fb\") " pod="openshift-marketplace/certified-operators-8frd8" Jan 20 19:29:58 crc kubenswrapper[4558]: I0120 19:29:58.794124 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8frd8" Jan 20 19:29:59 crc kubenswrapper[4558]: I0120 19:29:59.266460 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-8frd8"] Jan 20 19:29:59 crc kubenswrapper[4558]: I0120 19:29:59.901368 4558 generic.go:334] "Generic (PLEG): container finished" podID="1960f9b3-7449-4c05-9045-99dc9514a1fb" containerID="67f1d256cdec052450dba7a79b0c32ac00648fc79060276ea849259e73f86b5f" exitCode=0 Jan 20 19:29:59 crc kubenswrapper[4558]: I0120 19:29:59.901417 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8frd8" event={"ID":"1960f9b3-7449-4c05-9045-99dc9514a1fb","Type":"ContainerDied","Data":"67f1d256cdec052450dba7a79b0c32ac00648fc79060276ea849259e73f86b5f"} Jan 20 19:29:59 crc kubenswrapper[4558]: I0120 19:29:59.901441 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8frd8" event={"ID":"1960f9b3-7449-4c05-9045-99dc9514a1fb","Type":"ContainerStarted","Data":"2fcd7c1cd26cf7f354f8fc2e45bc0a9b8adf3fc5d314ba1986cc7a4536bd0189"} Jan 20 19:30:00 crc kubenswrapper[4558]: I0120 19:30:00.154774 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29482290-jzchh"] Jan 20 19:30:00 crc kubenswrapper[4558]: I0120 19:30:00.155954 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29482290-jzchh" Jan 20 19:30:00 crc kubenswrapper[4558]: I0120 19:30:00.158375 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 20 19:30:00 crc kubenswrapper[4558]: I0120 19:30:00.158448 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 20 19:30:00 crc kubenswrapper[4558]: I0120 19:30:00.159348 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29482290-jzchh"] Jan 20 19:30:00 crc kubenswrapper[4558]: I0120 19:30:00.354513 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/71e22c0f-7832-42a7-b0a2-3f77cdfb9cea-secret-volume\") pod \"collect-profiles-29482290-jzchh\" (UID: \"71e22c0f-7832-42a7-b0a2-3f77cdfb9cea\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482290-jzchh" Jan 20 19:30:00 crc kubenswrapper[4558]: I0120 19:30:00.354590 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4bq2n\" (UniqueName: \"kubernetes.io/projected/71e22c0f-7832-42a7-b0a2-3f77cdfb9cea-kube-api-access-4bq2n\") pod \"collect-profiles-29482290-jzchh\" (UID: \"71e22c0f-7832-42a7-b0a2-3f77cdfb9cea\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482290-jzchh" Jan 20 19:30:00 crc kubenswrapper[4558]: I0120 19:30:00.354652 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/71e22c0f-7832-42a7-b0a2-3f77cdfb9cea-config-volume\") pod \"collect-profiles-29482290-jzchh\" (UID: \"71e22c0f-7832-42a7-b0a2-3f77cdfb9cea\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482290-jzchh" Jan 20 19:30:00 crc kubenswrapper[4558]: I0120 19:30:00.456001 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/71e22c0f-7832-42a7-b0a2-3f77cdfb9cea-secret-volume\") pod \"collect-profiles-29482290-jzchh\" (UID: \"71e22c0f-7832-42a7-b0a2-3f77cdfb9cea\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482290-jzchh" Jan 20 19:30:00 crc kubenswrapper[4558]: I0120 19:30:00.456071 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4bq2n\" (UniqueName: \"kubernetes.io/projected/71e22c0f-7832-42a7-b0a2-3f77cdfb9cea-kube-api-access-4bq2n\") pod \"collect-profiles-29482290-jzchh\" (UID: \"71e22c0f-7832-42a7-b0a2-3f77cdfb9cea\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482290-jzchh" Jan 20 19:30:00 crc kubenswrapper[4558]: I0120 19:30:00.456117 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/71e22c0f-7832-42a7-b0a2-3f77cdfb9cea-config-volume\") pod \"collect-profiles-29482290-jzchh\" (UID: \"71e22c0f-7832-42a7-b0a2-3f77cdfb9cea\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482290-jzchh" Jan 20 19:30:00 crc kubenswrapper[4558]: I0120 19:30:00.456870 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/71e22c0f-7832-42a7-b0a2-3f77cdfb9cea-config-volume\") pod \"collect-profiles-29482290-jzchh\" (UID: \"71e22c0f-7832-42a7-b0a2-3f77cdfb9cea\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482290-jzchh" Jan 20 19:30:00 crc kubenswrapper[4558]: I0120 19:30:00.469850 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/71e22c0f-7832-42a7-b0a2-3f77cdfb9cea-secret-volume\") pod \"collect-profiles-29482290-jzchh\" (UID: \"71e22c0f-7832-42a7-b0a2-3f77cdfb9cea\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482290-jzchh" Jan 20 19:30:00 crc kubenswrapper[4558]: I0120 19:30:00.470236 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4bq2n\" (UniqueName: \"kubernetes.io/projected/71e22c0f-7832-42a7-b0a2-3f77cdfb9cea-kube-api-access-4bq2n\") pod \"collect-profiles-29482290-jzchh\" (UID: \"71e22c0f-7832-42a7-b0a2-3f77cdfb9cea\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482290-jzchh" Jan 20 19:30:00 crc kubenswrapper[4558]: I0120 19:30:00.477622 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29482290-jzchh" Jan 20 19:30:00 crc kubenswrapper[4558]: I0120 19:30:00.877422 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29482290-jzchh"] Jan 20 19:30:00 crc kubenswrapper[4558]: I0120 19:30:00.912564 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8frd8" event={"ID":"1960f9b3-7449-4c05-9045-99dc9514a1fb","Type":"ContainerStarted","Data":"b74b43feaf17c605f1c3b331d8779380553ea839b87de3bbe314652f51069d9e"} Jan 20 19:30:00 crc kubenswrapper[4558]: I0120 19:30:00.916388 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29482290-jzchh" event={"ID":"71e22c0f-7832-42a7-b0a2-3f77cdfb9cea","Type":"ContainerStarted","Data":"829c3eb58ad31080135c3143980c4bf8294de4c478dcdef34782194b4e363f53"} Jan 20 19:30:01 crc kubenswrapper[4558]: I0120 19:30:01.930765 4558 generic.go:334] "Generic (PLEG): container finished" podID="1960f9b3-7449-4c05-9045-99dc9514a1fb" containerID="b74b43feaf17c605f1c3b331d8779380553ea839b87de3bbe314652f51069d9e" exitCode=0 Jan 20 19:30:01 crc kubenswrapper[4558]: I0120 19:30:01.930891 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8frd8" event={"ID":"1960f9b3-7449-4c05-9045-99dc9514a1fb","Type":"ContainerDied","Data":"b74b43feaf17c605f1c3b331d8779380553ea839b87de3bbe314652f51069d9e"} Jan 20 19:30:01 crc kubenswrapper[4558]: I0120 19:30:01.933501 4558 generic.go:334] "Generic (PLEG): container finished" podID="71e22c0f-7832-42a7-b0a2-3f77cdfb9cea" containerID="ac6e3c268db708c90adeca67269831cea7c18f9f470c8b6b5cfc0b97b12694bd" exitCode=0 Jan 20 19:30:01 crc kubenswrapper[4558]: I0120 19:30:01.933563 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29482290-jzchh" event={"ID":"71e22c0f-7832-42a7-b0a2-3f77cdfb9cea","Type":"ContainerDied","Data":"ac6e3c268db708c90adeca67269831cea7c18f9f470c8b6b5cfc0b97b12694bd"} Jan 20 19:30:02 crc kubenswrapper[4558]: I0120 19:30:02.942572 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8frd8" event={"ID":"1960f9b3-7449-4c05-9045-99dc9514a1fb","Type":"ContainerStarted","Data":"831c3a39f3d540cb00d359455efcc8f03578c12f887d4d8293b43b7d18d659a0"} Jan 20 19:30:03 crc kubenswrapper[4558]: I0120 19:30:03.165464 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29482290-jzchh" Jan 20 19:30:03 crc kubenswrapper[4558]: I0120 19:30:03.184295 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-8frd8" podStartSLOduration=2.619716965 podStartE2EDuration="5.18427125s" podCreationTimestamp="2026-01-20 19:29:58 +0000 UTC" firstStartedPulling="2026-01-20 19:29:59.902627461 +0000 UTC m=+10093.662965419" lastFinishedPulling="2026-01-20 19:30:02.467181727 +0000 UTC m=+10096.227519704" observedRunningTime="2026-01-20 19:30:02.965561889 +0000 UTC m=+10096.725899855" watchObservedRunningTime="2026-01-20 19:30:03.18427125 +0000 UTC m=+10096.944609218" Jan 20 19:30:03 crc kubenswrapper[4558]: I0120 19:30:03.297809 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4bq2n\" (UniqueName: \"kubernetes.io/projected/71e22c0f-7832-42a7-b0a2-3f77cdfb9cea-kube-api-access-4bq2n\") pod \"71e22c0f-7832-42a7-b0a2-3f77cdfb9cea\" (UID: \"71e22c0f-7832-42a7-b0a2-3f77cdfb9cea\") " Jan 20 19:30:03 crc kubenswrapper[4558]: I0120 19:30:03.297870 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/71e22c0f-7832-42a7-b0a2-3f77cdfb9cea-secret-volume\") pod \"71e22c0f-7832-42a7-b0a2-3f77cdfb9cea\" (UID: \"71e22c0f-7832-42a7-b0a2-3f77cdfb9cea\") " Jan 20 19:30:03 crc kubenswrapper[4558]: I0120 19:30:03.298102 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/71e22c0f-7832-42a7-b0a2-3f77cdfb9cea-config-volume\") pod \"71e22c0f-7832-42a7-b0a2-3f77cdfb9cea\" (UID: \"71e22c0f-7832-42a7-b0a2-3f77cdfb9cea\") " Jan 20 19:30:03 crc kubenswrapper[4558]: I0120 19:30:03.298731 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/71e22c0f-7832-42a7-b0a2-3f77cdfb9cea-config-volume" (OuterVolumeSpecName: "config-volume") pod "71e22c0f-7832-42a7-b0a2-3f77cdfb9cea" (UID: "71e22c0f-7832-42a7-b0a2-3f77cdfb9cea"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 19:30:03 crc kubenswrapper[4558]: I0120 19:30:03.304058 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/71e22c0f-7832-42a7-b0a2-3f77cdfb9cea-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "71e22c0f-7832-42a7-b0a2-3f77cdfb9cea" (UID: "71e22c0f-7832-42a7-b0a2-3f77cdfb9cea"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 19:30:03 crc kubenswrapper[4558]: I0120 19:30:03.304534 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/71e22c0f-7832-42a7-b0a2-3f77cdfb9cea-kube-api-access-4bq2n" (OuterVolumeSpecName: "kube-api-access-4bq2n") pod "71e22c0f-7832-42a7-b0a2-3f77cdfb9cea" (UID: "71e22c0f-7832-42a7-b0a2-3f77cdfb9cea"). InnerVolumeSpecName "kube-api-access-4bq2n". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 19:30:03 crc kubenswrapper[4558]: I0120 19:30:03.399538 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4bq2n\" (UniqueName: \"kubernetes.io/projected/71e22c0f-7832-42a7-b0a2-3f77cdfb9cea-kube-api-access-4bq2n\") on node \"crc\" DevicePath \"\"" Jan 20 19:30:03 crc kubenswrapper[4558]: I0120 19:30:03.399574 4558 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/71e22c0f-7832-42a7-b0a2-3f77cdfb9cea-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 20 19:30:03 crc kubenswrapper[4558]: I0120 19:30:03.399589 4558 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/71e22c0f-7832-42a7-b0a2-3f77cdfb9cea-config-volume\") on node \"crc\" DevicePath \"\"" Jan 20 19:30:03 crc kubenswrapper[4558]: I0120 19:30:03.951718 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29482290-jzchh" Jan 20 19:30:03 crc kubenswrapper[4558]: I0120 19:30:03.951937 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29482290-jzchh" event={"ID":"71e22c0f-7832-42a7-b0a2-3f77cdfb9cea","Type":"ContainerDied","Data":"829c3eb58ad31080135c3143980c4bf8294de4c478dcdef34782194b4e363f53"} Jan 20 19:30:03 crc kubenswrapper[4558]: I0120 19:30:03.952007 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="829c3eb58ad31080135c3143980c4bf8294de4c478dcdef34782194b4e363f53" Jan 20 19:30:04 crc kubenswrapper[4558]: I0120 19:30:04.227887 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29482245-j4sm9"] Jan 20 19:30:04 crc kubenswrapper[4558]: I0120 19:30:04.231143 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29482245-j4sm9"] Jan 20 19:30:04 crc kubenswrapper[4558]: I0120 19:30:04.573907 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ab78d9a-8984-4689-b98e-7a25324b9af0" path="/var/lib/kubelet/pods/6ab78d9a-8984-4689-b98e-7a25324b9af0/volumes" Jan 20 19:30:08 crc kubenswrapper[4558]: I0120 19:30:08.794531 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-8frd8" Jan 20 19:30:08 crc kubenswrapper[4558]: I0120 19:30:08.796349 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-8frd8" Jan 20 19:30:08 crc kubenswrapper[4558]: I0120 19:30:08.831640 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-8frd8" Jan 20 19:30:09 crc kubenswrapper[4558]: I0120 19:30:09.017863 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-8frd8" Jan 20 19:30:09 crc kubenswrapper[4558]: I0120 19:30:09.061825 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-8frd8"] Jan 20 19:30:10 crc kubenswrapper[4558]: I0120 19:30:10.997902 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-8frd8" podUID="1960f9b3-7449-4c05-9045-99dc9514a1fb" containerName="registry-server" containerID="cri-o://831c3a39f3d540cb00d359455efcc8f03578c12f887d4d8293b43b7d18d659a0" gracePeriod=2 Jan 20 19:30:11 crc kubenswrapper[4558]: E0120 19:30:11.140257 4558 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1960f9b3_7449_4c05_9045_99dc9514a1fb.slice/crio-831c3a39f3d540cb00d359455efcc8f03578c12f887d4d8293b43b7d18d659a0.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1960f9b3_7449_4c05_9045_99dc9514a1fb.slice/crio-conmon-831c3a39f3d540cb00d359455efcc8f03578c12f887d4d8293b43b7d18d659a0.scope\": RecentStats: unable to find data in memory cache]" Jan 20 19:30:11 crc kubenswrapper[4558]: I0120 19:30:11.319081 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8frd8" Jan 20 19:30:11 crc kubenswrapper[4558]: I0120 19:30:11.413322 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1960f9b3-7449-4c05-9045-99dc9514a1fb-catalog-content\") pod \"1960f9b3-7449-4c05-9045-99dc9514a1fb\" (UID: \"1960f9b3-7449-4c05-9045-99dc9514a1fb\") " Jan 20 19:30:11 crc kubenswrapper[4558]: I0120 19:30:11.413380 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-48d62\" (UniqueName: \"kubernetes.io/projected/1960f9b3-7449-4c05-9045-99dc9514a1fb-kube-api-access-48d62\") pod \"1960f9b3-7449-4c05-9045-99dc9514a1fb\" (UID: \"1960f9b3-7449-4c05-9045-99dc9514a1fb\") " Jan 20 19:30:11 crc kubenswrapper[4558]: I0120 19:30:11.413454 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1960f9b3-7449-4c05-9045-99dc9514a1fb-utilities\") pod \"1960f9b3-7449-4c05-9045-99dc9514a1fb\" (UID: \"1960f9b3-7449-4c05-9045-99dc9514a1fb\") " Jan 20 19:30:11 crc kubenswrapper[4558]: I0120 19:30:11.416072 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1960f9b3-7449-4c05-9045-99dc9514a1fb-utilities" (OuterVolumeSpecName: "utilities") pod "1960f9b3-7449-4c05-9045-99dc9514a1fb" (UID: "1960f9b3-7449-4c05-9045-99dc9514a1fb"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 19:30:11 crc kubenswrapper[4558]: I0120 19:30:11.420789 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1960f9b3-7449-4c05-9045-99dc9514a1fb-kube-api-access-48d62" (OuterVolumeSpecName: "kube-api-access-48d62") pod "1960f9b3-7449-4c05-9045-99dc9514a1fb" (UID: "1960f9b3-7449-4c05-9045-99dc9514a1fb"). InnerVolumeSpecName "kube-api-access-48d62". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 19:30:11 crc kubenswrapper[4558]: I0120 19:30:11.516626 4558 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1960f9b3-7449-4c05-9045-99dc9514a1fb-utilities\") on node \"crc\" DevicePath \"\"" Jan 20 19:30:11 crc kubenswrapper[4558]: I0120 19:30:11.516660 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-48d62\" (UniqueName: \"kubernetes.io/projected/1960f9b3-7449-4c05-9045-99dc9514a1fb-kube-api-access-48d62\") on node \"crc\" DevicePath \"\"" Jan 20 19:30:11 crc kubenswrapper[4558]: I0120 19:30:11.644261 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1960f9b3-7449-4c05-9045-99dc9514a1fb-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1960f9b3-7449-4c05-9045-99dc9514a1fb" (UID: "1960f9b3-7449-4c05-9045-99dc9514a1fb"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 19:30:11 crc kubenswrapper[4558]: I0120 19:30:11.718531 4558 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1960f9b3-7449-4c05-9045-99dc9514a1fb-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 20 19:30:12 crc kubenswrapper[4558]: I0120 19:30:12.005899 4558 generic.go:334] "Generic (PLEG): container finished" podID="1960f9b3-7449-4c05-9045-99dc9514a1fb" containerID="831c3a39f3d540cb00d359455efcc8f03578c12f887d4d8293b43b7d18d659a0" exitCode=0 Jan 20 19:30:12 crc kubenswrapper[4558]: I0120 19:30:12.006262 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8frd8" event={"ID":"1960f9b3-7449-4c05-9045-99dc9514a1fb","Type":"ContainerDied","Data":"831c3a39f3d540cb00d359455efcc8f03578c12f887d4d8293b43b7d18d659a0"} Jan 20 19:30:12 crc kubenswrapper[4558]: I0120 19:30:12.006303 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8frd8" event={"ID":"1960f9b3-7449-4c05-9045-99dc9514a1fb","Type":"ContainerDied","Data":"2fcd7c1cd26cf7f354f8fc2e45bc0a9b8adf3fc5d314ba1986cc7a4536bd0189"} Jan 20 19:30:12 crc kubenswrapper[4558]: I0120 19:30:12.006327 4558 scope.go:117] "RemoveContainer" containerID="831c3a39f3d540cb00d359455efcc8f03578c12f887d4d8293b43b7d18d659a0" Jan 20 19:30:12 crc kubenswrapper[4558]: I0120 19:30:12.006470 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8frd8" Jan 20 19:30:12 crc kubenswrapper[4558]: I0120 19:30:12.038882 4558 scope.go:117] "RemoveContainer" containerID="b74b43feaf17c605f1c3b331d8779380553ea839b87de3bbe314652f51069d9e" Jan 20 19:30:12 crc kubenswrapper[4558]: I0120 19:30:12.059860 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-8frd8"] Jan 20 19:30:12 crc kubenswrapper[4558]: I0120 19:30:12.065391 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-8frd8"] Jan 20 19:30:12 crc kubenswrapper[4558]: I0120 19:30:12.082744 4558 scope.go:117] "RemoveContainer" containerID="67f1d256cdec052450dba7a79b0c32ac00648fc79060276ea849259e73f86b5f" Jan 20 19:30:12 crc kubenswrapper[4558]: I0120 19:30:12.096237 4558 scope.go:117] "RemoveContainer" containerID="831c3a39f3d540cb00d359455efcc8f03578c12f887d4d8293b43b7d18d659a0" Jan 20 19:30:12 crc kubenswrapper[4558]: E0120 19:30:12.096557 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"831c3a39f3d540cb00d359455efcc8f03578c12f887d4d8293b43b7d18d659a0\": container with ID starting with 831c3a39f3d540cb00d359455efcc8f03578c12f887d4d8293b43b7d18d659a0 not found: ID does not exist" containerID="831c3a39f3d540cb00d359455efcc8f03578c12f887d4d8293b43b7d18d659a0" Jan 20 19:30:12 crc kubenswrapper[4558]: I0120 19:30:12.096589 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"831c3a39f3d540cb00d359455efcc8f03578c12f887d4d8293b43b7d18d659a0"} err="failed to get container status \"831c3a39f3d540cb00d359455efcc8f03578c12f887d4d8293b43b7d18d659a0\": rpc error: code = NotFound desc = could not find container \"831c3a39f3d540cb00d359455efcc8f03578c12f887d4d8293b43b7d18d659a0\": container with ID starting with 831c3a39f3d540cb00d359455efcc8f03578c12f887d4d8293b43b7d18d659a0 not found: ID does not exist" Jan 20 19:30:12 crc kubenswrapper[4558]: I0120 19:30:12.096611 4558 scope.go:117] "RemoveContainer" containerID="b74b43feaf17c605f1c3b331d8779380553ea839b87de3bbe314652f51069d9e" Jan 20 19:30:12 crc kubenswrapper[4558]: E0120 19:30:12.096818 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b74b43feaf17c605f1c3b331d8779380553ea839b87de3bbe314652f51069d9e\": container with ID starting with b74b43feaf17c605f1c3b331d8779380553ea839b87de3bbe314652f51069d9e not found: ID does not exist" containerID="b74b43feaf17c605f1c3b331d8779380553ea839b87de3bbe314652f51069d9e" Jan 20 19:30:12 crc kubenswrapper[4558]: I0120 19:30:12.096838 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b74b43feaf17c605f1c3b331d8779380553ea839b87de3bbe314652f51069d9e"} err="failed to get container status \"b74b43feaf17c605f1c3b331d8779380553ea839b87de3bbe314652f51069d9e\": rpc error: code = NotFound desc = could not find container \"b74b43feaf17c605f1c3b331d8779380553ea839b87de3bbe314652f51069d9e\": container with ID starting with b74b43feaf17c605f1c3b331d8779380553ea839b87de3bbe314652f51069d9e not found: ID does not exist" Jan 20 19:30:12 crc kubenswrapper[4558]: I0120 19:30:12.096852 4558 scope.go:117] "RemoveContainer" containerID="67f1d256cdec052450dba7a79b0c32ac00648fc79060276ea849259e73f86b5f" Jan 20 19:30:12 crc kubenswrapper[4558]: E0120 19:30:12.097207 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"67f1d256cdec052450dba7a79b0c32ac00648fc79060276ea849259e73f86b5f\": container with ID starting with 67f1d256cdec052450dba7a79b0c32ac00648fc79060276ea849259e73f86b5f not found: ID does not exist" containerID="67f1d256cdec052450dba7a79b0c32ac00648fc79060276ea849259e73f86b5f" Jan 20 19:30:12 crc kubenswrapper[4558]: I0120 19:30:12.097240 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"67f1d256cdec052450dba7a79b0c32ac00648fc79060276ea849259e73f86b5f"} err="failed to get container status \"67f1d256cdec052450dba7a79b0c32ac00648fc79060276ea849259e73f86b5f\": rpc error: code = NotFound desc = could not find container \"67f1d256cdec052450dba7a79b0c32ac00648fc79060276ea849259e73f86b5f\": container with ID starting with 67f1d256cdec052450dba7a79b0c32ac00648fc79060276ea849259e73f86b5f not found: ID does not exist" Jan 20 19:30:12 crc kubenswrapper[4558]: I0120 19:30:12.573798 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1960f9b3-7449-4c05-9045-99dc9514a1fb" path="/var/lib/kubelet/pods/1960f9b3-7449-4c05-9045-99dc9514a1fb/volumes" Jan 20 19:30:41 crc kubenswrapper[4558]: I0120 19:30:41.078753 4558 scope.go:117] "RemoveContainer" containerID="52251df8b66ae23a742d2ac33be4bfc639f190b64911d0aeaf6af12caa58df97" Jan 20 19:30:57 crc kubenswrapper[4558]: I0120 19:30:57.329949 4558 patch_prober.go:28] interesting pod/machine-config-daemon-2vr4r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 20 19:30:57 crc kubenswrapper[4558]: I0120 19:30:57.330574 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 20 19:31:27 crc kubenswrapper[4558]: I0120 19:31:27.329775 4558 patch_prober.go:28] interesting pod/machine-config-daemon-2vr4r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 20 19:31:27 crc kubenswrapper[4558]: I0120 19:31:27.331997 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 20 19:31:57 crc kubenswrapper[4558]: I0120 19:31:57.330556 4558 patch_prober.go:28] interesting pod/machine-config-daemon-2vr4r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 20 19:31:57 crc kubenswrapper[4558]: I0120 19:31:57.331115 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 20 19:31:57 crc kubenswrapper[4558]: I0120 19:31:57.331185 4558 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" Jan 20 19:31:57 crc kubenswrapper[4558]: I0120 19:31:57.331780 4558 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"fc3bb650c694b75525835a3fc944d51cba4cad094d0a629e128aef87f9339648"} pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 20 19:31:57 crc kubenswrapper[4558]: I0120 19:31:57.331832 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" containerID="cri-o://fc3bb650c694b75525835a3fc944d51cba4cad094d0a629e128aef87f9339648" gracePeriod=600 Jan 20 19:31:57 crc kubenswrapper[4558]: I0120 19:31:57.733583 4558 generic.go:334] "Generic (PLEG): container finished" podID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerID="fc3bb650c694b75525835a3fc944d51cba4cad094d0a629e128aef87f9339648" exitCode=0 Jan 20 19:31:57 crc kubenswrapper[4558]: I0120 19:31:57.734015 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" event={"ID":"68337d27-3fa6-4a29-88b0-82e60c3739eb","Type":"ContainerDied","Data":"fc3bb650c694b75525835a3fc944d51cba4cad094d0a629e128aef87f9339648"} Jan 20 19:31:57 crc kubenswrapper[4558]: I0120 19:31:57.734080 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" event={"ID":"68337d27-3fa6-4a29-88b0-82e60c3739eb","Type":"ContainerStarted","Data":"08f0a41ff708380b079ac76e9307c7eeefc97e2c465ca694068ed4284847bc34"} Jan 20 19:31:57 crc kubenswrapper[4558]: I0120 19:31:57.734104 4558 scope.go:117] "RemoveContainer" containerID="54aafd1463e4e36a97be0dd83a2647d2c23b657acbdf41034d7bf93d4b3d68fd" Jan 20 19:33:18 crc kubenswrapper[4558]: I0120 19:33:18.916937 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-d8pjj"] Jan 20 19:33:18 crc kubenswrapper[4558]: E0120 19:33:18.918885 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1960f9b3-7449-4c05-9045-99dc9514a1fb" containerName="extract-content" Jan 20 19:33:18 crc kubenswrapper[4558]: I0120 19:33:18.918901 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="1960f9b3-7449-4c05-9045-99dc9514a1fb" containerName="extract-content" Jan 20 19:33:18 crc kubenswrapper[4558]: E0120 19:33:18.918910 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="71e22c0f-7832-42a7-b0a2-3f77cdfb9cea" containerName="collect-profiles" Jan 20 19:33:18 crc kubenswrapper[4558]: I0120 19:33:18.918917 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="71e22c0f-7832-42a7-b0a2-3f77cdfb9cea" containerName="collect-profiles" Jan 20 19:33:18 crc kubenswrapper[4558]: E0120 19:33:18.918949 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1960f9b3-7449-4c05-9045-99dc9514a1fb" containerName="extract-utilities" Jan 20 19:33:18 crc kubenswrapper[4558]: I0120 19:33:18.918958 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="1960f9b3-7449-4c05-9045-99dc9514a1fb" containerName="extract-utilities" Jan 20 19:33:18 crc kubenswrapper[4558]: E0120 19:33:18.918968 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1960f9b3-7449-4c05-9045-99dc9514a1fb" containerName="registry-server" Jan 20 19:33:18 crc kubenswrapper[4558]: I0120 19:33:18.918974 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="1960f9b3-7449-4c05-9045-99dc9514a1fb" containerName="registry-server" Jan 20 19:33:18 crc kubenswrapper[4558]: I0120 19:33:18.919913 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="71e22c0f-7832-42a7-b0a2-3f77cdfb9cea" containerName="collect-profiles" Jan 20 19:33:18 crc kubenswrapper[4558]: I0120 19:33:18.919939 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="1960f9b3-7449-4c05-9045-99dc9514a1fb" containerName="registry-server" Jan 20 19:33:18 crc kubenswrapper[4558]: I0120 19:33:18.924416 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-d8pjj" Jan 20 19:33:18 crc kubenswrapper[4558]: I0120 19:33:18.927389 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-d8pjj"] Jan 20 19:33:19 crc kubenswrapper[4558]: I0120 19:33:19.051325 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g89hg\" (UniqueName: \"kubernetes.io/projected/58be65c3-ffcf-4596-858f-f6116cd7c2e8-kube-api-access-g89hg\") pod \"community-operators-d8pjj\" (UID: \"58be65c3-ffcf-4596-858f-f6116cd7c2e8\") " pod="openshift-marketplace/community-operators-d8pjj" Jan 20 19:33:19 crc kubenswrapper[4558]: I0120 19:33:19.051389 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/58be65c3-ffcf-4596-858f-f6116cd7c2e8-utilities\") pod \"community-operators-d8pjj\" (UID: \"58be65c3-ffcf-4596-858f-f6116cd7c2e8\") " pod="openshift-marketplace/community-operators-d8pjj" Jan 20 19:33:19 crc kubenswrapper[4558]: I0120 19:33:19.051450 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/58be65c3-ffcf-4596-858f-f6116cd7c2e8-catalog-content\") pod \"community-operators-d8pjj\" (UID: \"58be65c3-ffcf-4596-858f-f6116cd7c2e8\") " pod="openshift-marketplace/community-operators-d8pjj" Jan 20 19:33:19 crc kubenswrapper[4558]: I0120 19:33:19.152486 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g89hg\" (UniqueName: \"kubernetes.io/projected/58be65c3-ffcf-4596-858f-f6116cd7c2e8-kube-api-access-g89hg\") pod \"community-operators-d8pjj\" (UID: \"58be65c3-ffcf-4596-858f-f6116cd7c2e8\") " pod="openshift-marketplace/community-operators-d8pjj" Jan 20 19:33:19 crc kubenswrapper[4558]: I0120 19:33:19.152551 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/58be65c3-ffcf-4596-858f-f6116cd7c2e8-utilities\") pod \"community-operators-d8pjj\" (UID: \"58be65c3-ffcf-4596-858f-f6116cd7c2e8\") " pod="openshift-marketplace/community-operators-d8pjj" Jan 20 19:33:19 crc kubenswrapper[4558]: I0120 19:33:19.152606 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/58be65c3-ffcf-4596-858f-f6116cd7c2e8-catalog-content\") pod \"community-operators-d8pjj\" (UID: \"58be65c3-ffcf-4596-858f-f6116cd7c2e8\") " pod="openshift-marketplace/community-operators-d8pjj" Jan 20 19:33:19 crc kubenswrapper[4558]: I0120 19:33:19.153113 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/58be65c3-ffcf-4596-858f-f6116cd7c2e8-catalog-content\") pod \"community-operators-d8pjj\" (UID: \"58be65c3-ffcf-4596-858f-f6116cd7c2e8\") " pod="openshift-marketplace/community-operators-d8pjj" Jan 20 19:33:19 crc kubenswrapper[4558]: I0120 19:33:19.153193 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/58be65c3-ffcf-4596-858f-f6116cd7c2e8-utilities\") pod \"community-operators-d8pjj\" (UID: \"58be65c3-ffcf-4596-858f-f6116cd7c2e8\") " pod="openshift-marketplace/community-operators-d8pjj" Jan 20 19:33:19 crc kubenswrapper[4558]: I0120 19:33:19.173050 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g89hg\" (UniqueName: \"kubernetes.io/projected/58be65c3-ffcf-4596-858f-f6116cd7c2e8-kube-api-access-g89hg\") pod \"community-operators-d8pjj\" (UID: \"58be65c3-ffcf-4596-858f-f6116cd7c2e8\") " pod="openshift-marketplace/community-operators-d8pjj" Jan 20 19:33:19 crc kubenswrapper[4558]: I0120 19:33:19.245444 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-d8pjj" Jan 20 19:33:19 crc kubenswrapper[4558]: I0120 19:33:19.769899 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-d8pjj"] Jan 20 19:33:19 crc kubenswrapper[4558]: W0120 19:33:19.780822 4558 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod58be65c3_ffcf_4596_858f_f6116cd7c2e8.slice/crio-d08784ce8ade7ed70d480c4c77b88fee4d4faab16febaba2be4128e9b2919c12 WatchSource:0}: Error finding container d08784ce8ade7ed70d480c4c77b88fee4d4faab16febaba2be4128e9b2919c12: Status 404 returned error can't find the container with id d08784ce8ade7ed70d480c4c77b88fee4d4faab16febaba2be4128e9b2919c12 Jan 20 19:33:20 crc kubenswrapper[4558]: I0120 19:33:20.282362 4558 generic.go:334] "Generic (PLEG): container finished" podID="58be65c3-ffcf-4596-858f-f6116cd7c2e8" containerID="a3ce671a7b68721bcfc39cf2553dab41ed6f976f0ff38a8963db07528b30e107" exitCode=0 Jan 20 19:33:20 crc kubenswrapper[4558]: I0120 19:33:20.282409 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-d8pjj" event={"ID":"58be65c3-ffcf-4596-858f-f6116cd7c2e8","Type":"ContainerDied","Data":"a3ce671a7b68721bcfc39cf2553dab41ed6f976f0ff38a8963db07528b30e107"} Jan 20 19:33:20 crc kubenswrapper[4558]: I0120 19:33:20.282441 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-d8pjj" event={"ID":"58be65c3-ffcf-4596-858f-f6116cd7c2e8","Type":"ContainerStarted","Data":"d08784ce8ade7ed70d480c4c77b88fee4d4faab16febaba2be4128e9b2919c12"} Jan 20 19:33:20 crc kubenswrapper[4558]: I0120 19:33:20.286084 4558 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 20 19:33:21 crc kubenswrapper[4558]: I0120 19:33:21.292441 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-d8pjj" event={"ID":"58be65c3-ffcf-4596-858f-f6116cd7c2e8","Type":"ContainerStarted","Data":"f252c9f1aafea1d31c03a3411d2454c1e52ac082247fe7d4fc9510a7ff38a7c4"} Jan 20 19:33:22 crc kubenswrapper[4558]: I0120 19:33:22.299953 4558 generic.go:334] "Generic (PLEG): container finished" podID="58be65c3-ffcf-4596-858f-f6116cd7c2e8" containerID="f252c9f1aafea1d31c03a3411d2454c1e52ac082247fe7d4fc9510a7ff38a7c4" exitCode=0 Jan 20 19:33:22 crc kubenswrapper[4558]: I0120 19:33:22.300011 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-d8pjj" event={"ID":"58be65c3-ffcf-4596-858f-f6116cd7c2e8","Type":"ContainerDied","Data":"f252c9f1aafea1d31c03a3411d2454c1e52ac082247fe7d4fc9510a7ff38a7c4"} Jan 20 19:33:23 crc kubenswrapper[4558]: I0120 19:33:23.308374 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-d8pjj" event={"ID":"58be65c3-ffcf-4596-858f-f6116cd7c2e8","Type":"ContainerStarted","Data":"92e23dde7a6b63ed98e1c863b20be6e2f2566ef4575bbf0d74067cab54b7e3de"} Jan 20 19:33:23 crc kubenswrapper[4558]: I0120 19:33:23.330258 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-d8pjj" podStartSLOduration=2.827728573 podStartE2EDuration="5.330239705s" podCreationTimestamp="2026-01-20 19:33:18 +0000 UTC" firstStartedPulling="2026-01-20 19:33:20.28574064 +0000 UTC m=+10294.046078607" lastFinishedPulling="2026-01-20 19:33:22.788251773 +0000 UTC m=+10296.548589739" observedRunningTime="2026-01-20 19:33:23.326771263 +0000 UTC m=+10297.087109230" watchObservedRunningTime="2026-01-20 19:33:23.330239705 +0000 UTC m=+10297.090577672" Jan 20 19:33:29 crc kubenswrapper[4558]: I0120 19:33:29.245872 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-d8pjj" Jan 20 19:33:29 crc kubenswrapper[4558]: I0120 19:33:29.246536 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-d8pjj" Jan 20 19:33:29 crc kubenswrapper[4558]: I0120 19:33:29.284572 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-d8pjj" Jan 20 19:33:29 crc kubenswrapper[4558]: I0120 19:33:29.384877 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-d8pjj" Jan 20 19:33:29 crc kubenswrapper[4558]: I0120 19:33:29.515251 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-d8pjj"] Jan 20 19:33:31 crc kubenswrapper[4558]: I0120 19:33:31.363714 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-d8pjj" podUID="58be65c3-ffcf-4596-858f-f6116cd7c2e8" containerName="registry-server" containerID="cri-o://92e23dde7a6b63ed98e1c863b20be6e2f2566ef4575bbf0d74067cab54b7e3de" gracePeriod=2 Jan 20 19:33:31 crc kubenswrapper[4558]: I0120 19:33:31.688881 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-d8pjj" Jan 20 19:33:31 crc kubenswrapper[4558]: I0120 19:33:31.832274 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/58be65c3-ffcf-4596-858f-f6116cd7c2e8-utilities\") pod \"58be65c3-ffcf-4596-858f-f6116cd7c2e8\" (UID: \"58be65c3-ffcf-4596-858f-f6116cd7c2e8\") " Jan 20 19:33:31 crc kubenswrapper[4558]: I0120 19:33:31.832482 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/58be65c3-ffcf-4596-858f-f6116cd7c2e8-catalog-content\") pod \"58be65c3-ffcf-4596-858f-f6116cd7c2e8\" (UID: \"58be65c3-ffcf-4596-858f-f6116cd7c2e8\") " Jan 20 19:33:31 crc kubenswrapper[4558]: I0120 19:33:31.832550 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g89hg\" (UniqueName: \"kubernetes.io/projected/58be65c3-ffcf-4596-858f-f6116cd7c2e8-kube-api-access-g89hg\") pod \"58be65c3-ffcf-4596-858f-f6116cd7c2e8\" (UID: \"58be65c3-ffcf-4596-858f-f6116cd7c2e8\") " Jan 20 19:33:31 crc kubenswrapper[4558]: I0120 19:33:31.833196 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/58be65c3-ffcf-4596-858f-f6116cd7c2e8-utilities" (OuterVolumeSpecName: "utilities") pod "58be65c3-ffcf-4596-858f-f6116cd7c2e8" (UID: "58be65c3-ffcf-4596-858f-f6116cd7c2e8"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 19:33:31 crc kubenswrapper[4558]: I0120 19:33:31.851490 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/58be65c3-ffcf-4596-858f-f6116cd7c2e8-kube-api-access-g89hg" (OuterVolumeSpecName: "kube-api-access-g89hg") pod "58be65c3-ffcf-4596-858f-f6116cd7c2e8" (UID: "58be65c3-ffcf-4596-858f-f6116cd7c2e8"). InnerVolumeSpecName "kube-api-access-g89hg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 19:33:31 crc kubenswrapper[4558]: I0120 19:33:31.878991 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/58be65c3-ffcf-4596-858f-f6116cd7c2e8-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "58be65c3-ffcf-4596-858f-f6116cd7c2e8" (UID: "58be65c3-ffcf-4596-858f-f6116cd7c2e8"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 19:33:31 crc kubenswrapper[4558]: I0120 19:33:31.934030 4558 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/58be65c3-ffcf-4596-858f-f6116cd7c2e8-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 20 19:33:31 crc kubenswrapper[4558]: I0120 19:33:31.934078 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g89hg\" (UniqueName: \"kubernetes.io/projected/58be65c3-ffcf-4596-858f-f6116cd7c2e8-kube-api-access-g89hg\") on node \"crc\" DevicePath \"\"" Jan 20 19:33:31 crc kubenswrapper[4558]: I0120 19:33:31.934091 4558 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/58be65c3-ffcf-4596-858f-f6116cd7c2e8-utilities\") on node \"crc\" DevicePath \"\"" Jan 20 19:33:32 crc kubenswrapper[4558]: I0120 19:33:32.373771 4558 generic.go:334] "Generic (PLEG): container finished" podID="58be65c3-ffcf-4596-858f-f6116cd7c2e8" containerID="92e23dde7a6b63ed98e1c863b20be6e2f2566ef4575bbf0d74067cab54b7e3de" exitCode=0 Jan 20 19:33:32 crc kubenswrapper[4558]: I0120 19:33:32.373830 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-d8pjj" event={"ID":"58be65c3-ffcf-4596-858f-f6116cd7c2e8","Type":"ContainerDied","Data":"92e23dde7a6b63ed98e1c863b20be6e2f2566ef4575bbf0d74067cab54b7e3de"} Jan 20 19:33:32 crc kubenswrapper[4558]: I0120 19:33:32.373868 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-d8pjj" event={"ID":"58be65c3-ffcf-4596-858f-f6116cd7c2e8","Type":"ContainerDied","Data":"d08784ce8ade7ed70d480c4c77b88fee4d4faab16febaba2be4128e9b2919c12"} Jan 20 19:33:32 crc kubenswrapper[4558]: I0120 19:33:32.373888 4558 scope.go:117] "RemoveContainer" containerID="92e23dde7a6b63ed98e1c863b20be6e2f2566ef4575bbf0d74067cab54b7e3de" Jan 20 19:33:32 crc kubenswrapper[4558]: I0120 19:33:32.374028 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-d8pjj" Jan 20 19:33:32 crc kubenswrapper[4558]: I0120 19:33:32.409714 4558 scope.go:117] "RemoveContainer" containerID="f252c9f1aafea1d31c03a3411d2454c1e52ac082247fe7d4fc9510a7ff38a7c4" Jan 20 19:33:32 crc kubenswrapper[4558]: I0120 19:33:32.414803 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-d8pjj"] Jan 20 19:33:32 crc kubenswrapper[4558]: I0120 19:33:32.438479 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-d8pjj"] Jan 20 19:33:32 crc kubenswrapper[4558]: I0120 19:33:32.439696 4558 scope.go:117] "RemoveContainer" containerID="a3ce671a7b68721bcfc39cf2553dab41ed6f976f0ff38a8963db07528b30e107" Jan 20 19:33:32 crc kubenswrapper[4558]: I0120 19:33:32.462321 4558 scope.go:117] "RemoveContainer" containerID="92e23dde7a6b63ed98e1c863b20be6e2f2566ef4575bbf0d74067cab54b7e3de" Jan 20 19:33:32 crc kubenswrapper[4558]: E0120 19:33:32.462659 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"92e23dde7a6b63ed98e1c863b20be6e2f2566ef4575bbf0d74067cab54b7e3de\": container with ID starting with 92e23dde7a6b63ed98e1c863b20be6e2f2566ef4575bbf0d74067cab54b7e3de not found: ID does not exist" containerID="92e23dde7a6b63ed98e1c863b20be6e2f2566ef4575bbf0d74067cab54b7e3de" Jan 20 19:33:32 crc kubenswrapper[4558]: I0120 19:33:32.462691 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"92e23dde7a6b63ed98e1c863b20be6e2f2566ef4575bbf0d74067cab54b7e3de"} err="failed to get container status \"92e23dde7a6b63ed98e1c863b20be6e2f2566ef4575bbf0d74067cab54b7e3de\": rpc error: code = NotFound desc = could not find container \"92e23dde7a6b63ed98e1c863b20be6e2f2566ef4575bbf0d74067cab54b7e3de\": container with ID starting with 92e23dde7a6b63ed98e1c863b20be6e2f2566ef4575bbf0d74067cab54b7e3de not found: ID does not exist" Jan 20 19:33:32 crc kubenswrapper[4558]: I0120 19:33:32.462714 4558 scope.go:117] "RemoveContainer" containerID="f252c9f1aafea1d31c03a3411d2454c1e52ac082247fe7d4fc9510a7ff38a7c4" Jan 20 19:33:32 crc kubenswrapper[4558]: E0120 19:33:32.462999 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f252c9f1aafea1d31c03a3411d2454c1e52ac082247fe7d4fc9510a7ff38a7c4\": container with ID starting with f252c9f1aafea1d31c03a3411d2454c1e52ac082247fe7d4fc9510a7ff38a7c4 not found: ID does not exist" containerID="f252c9f1aafea1d31c03a3411d2454c1e52ac082247fe7d4fc9510a7ff38a7c4" Jan 20 19:33:32 crc kubenswrapper[4558]: I0120 19:33:32.463036 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f252c9f1aafea1d31c03a3411d2454c1e52ac082247fe7d4fc9510a7ff38a7c4"} err="failed to get container status \"f252c9f1aafea1d31c03a3411d2454c1e52ac082247fe7d4fc9510a7ff38a7c4\": rpc error: code = NotFound desc = could not find container \"f252c9f1aafea1d31c03a3411d2454c1e52ac082247fe7d4fc9510a7ff38a7c4\": container with ID starting with f252c9f1aafea1d31c03a3411d2454c1e52ac082247fe7d4fc9510a7ff38a7c4 not found: ID does not exist" Jan 20 19:33:32 crc kubenswrapper[4558]: I0120 19:33:32.463074 4558 scope.go:117] "RemoveContainer" containerID="a3ce671a7b68721bcfc39cf2553dab41ed6f976f0ff38a8963db07528b30e107" Jan 20 19:33:32 crc kubenswrapper[4558]: E0120 19:33:32.463691 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a3ce671a7b68721bcfc39cf2553dab41ed6f976f0ff38a8963db07528b30e107\": container with ID starting with a3ce671a7b68721bcfc39cf2553dab41ed6f976f0ff38a8963db07528b30e107 not found: ID does not exist" containerID="a3ce671a7b68721bcfc39cf2553dab41ed6f976f0ff38a8963db07528b30e107" Jan 20 19:33:32 crc kubenswrapper[4558]: I0120 19:33:32.463721 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a3ce671a7b68721bcfc39cf2553dab41ed6f976f0ff38a8963db07528b30e107"} err="failed to get container status \"a3ce671a7b68721bcfc39cf2553dab41ed6f976f0ff38a8963db07528b30e107\": rpc error: code = NotFound desc = could not find container \"a3ce671a7b68721bcfc39cf2553dab41ed6f976f0ff38a8963db07528b30e107\": container with ID starting with a3ce671a7b68721bcfc39cf2553dab41ed6f976f0ff38a8963db07528b30e107 not found: ID does not exist" Jan 20 19:33:32 crc kubenswrapper[4558]: I0120 19:33:32.573755 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="58be65c3-ffcf-4596-858f-f6116cd7c2e8" path="/var/lib/kubelet/pods/58be65c3-ffcf-4596-858f-f6116cd7c2e8/volumes" Jan 20 19:33:57 crc kubenswrapper[4558]: I0120 19:33:57.330326 4558 patch_prober.go:28] interesting pod/machine-config-daemon-2vr4r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 20 19:33:57 crc kubenswrapper[4558]: I0120 19:33:57.330851 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 20 19:34:27 crc kubenswrapper[4558]: I0120 19:34:27.329686 4558 patch_prober.go:28] interesting pod/machine-config-daemon-2vr4r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 20 19:34:27 crc kubenswrapper[4558]: I0120 19:34:27.330378 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 20 19:34:57 crc kubenswrapper[4558]: I0120 19:34:57.329634 4558 patch_prober.go:28] interesting pod/machine-config-daemon-2vr4r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 20 19:34:57 crc kubenswrapper[4558]: I0120 19:34:57.330264 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 20 19:34:57 crc kubenswrapper[4558]: I0120 19:34:57.330318 4558 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" Jan 20 19:34:57 crc kubenswrapper[4558]: I0120 19:34:57.330836 4558 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"08f0a41ff708380b079ac76e9307c7eeefc97e2c465ca694068ed4284847bc34"} pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 20 19:34:57 crc kubenswrapper[4558]: I0120 19:34:57.330888 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" containerID="cri-o://08f0a41ff708380b079ac76e9307c7eeefc97e2c465ca694068ed4284847bc34" gracePeriod=600 Jan 20 19:34:57 crc kubenswrapper[4558]: E0120 19:34:57.458536 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:34:57 crc kubenswrapper[4558]: I0120 19:34:57.935023 4558 generic.go:334] "Generic (PLEG): container finished" podID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerID="08f0a41ff708380b079ac76e9307c7eeefc97e2c465ca694068ed4284847bc34" exitCode=0 Jan 20 19:34:57 crc kubenswrapper[4558]: I0120 19:34:57.935078 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" event={"ID":"68337d27-3fa6-4a29-88b0-82e60c3739eb","Type":"ContainerDied","Data":"08f0a41ff708380b079ac76e9307c7eeefc97e2c465ca694068ed4284847bc34"} Jan 20 19:34:57 crc kubenswrapper[4558]: I0120 19:34:57.935114 4558 scope.go:117] "RemoveContainer" containerID="fc3bb650c694b75525835a3fc944d51cba4cad094d0a629e128aef87f9339648" Jan 20 19:34:57 crc kubenswrapper[4558]: I0120 19:34:57.935423 4558 scope.go:117] "RemoveContainer" containerID="08f0a41ff708380b079ac76e9307c7eeefc97e2c465ca694068ed4284847bc34" Jan 20 19:34:57 crc kubenswrapper[4558]: E0120 19:34:57.935618 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:35:12 crc kubenswrapper[4558]: I0120 19:35:12.565856 4558 scope.go:117] "RemoveContainer" containerID="08f0a41ff708380b079ac76e9307c7eeefc97e2c465ca694068ed4284847bc34" Jan 20 19:35:12 crc kubenswrapper[4558]: E0120 19:35:12.567963 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:35:23 crc kubenswrapper[4558]: I0120 19:35:23.566629 4558 scope.go:117] "RemoveContainer" containerID="08f0a41ff708380b079ac76e9307c7eeefc97e2c465ca694068ed4284847bc34" Jan 20 19:35:23 crc kubenswrapper[4558]: E0120 19:35:23.567218 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:35:38 crc kubenswrapper[4558]: I0120 19:35:38.569515 4558 scope.go:117] "RemoveContainer" containerID="08f0a41ff708380b079ac76e9307c7eeefc97e2c465ca694068ed4284847bc34" Jan 20 19:35:38 crc kubenswrapper[4558]: E0120 19:35:38.570058 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:35:51 crc kubenswrapper[4558]: I0120 19:35:51.565446 4558 scope.go:117] "RemoveContainer" containerID="08f0a41ff708380b079ac76e9307c7eeefc97e2c465ca694068ed4284847bc34" Jan 20 19:35:51 crc kubenswrapper[4558]: E0120 19:35:51.565943 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:36:03 crc kubenswrapper[4558]: I0120 19:36:03.566440 4558 scope.go:117] "RemoveContainer" containerID="08f0a41ff708380b079ac76e9307c7eeefc97e2c465ca694068ed4284847bc34" Jan 20 19:36:03 crc kubenswrapper[4558]: E0120 19:36:03.567159 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:36:13 crc kubenswrapper[4558]: I0120 19:36:13.245547 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-44pnj"] Jan 20 19:36:13 crc kubenswrapper[4558]: E0120 19:36:13.246520 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="58be65c3-ffcf-4596-858f-f6116cd7c2e8" containerName="registry-server" Jan 20 19:36:13 crc kubenswrapper[4558]: I0120 19:36:13.246535 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="58be65c3-ffcf-4596-858f-f6116cd7c2e8" containerName="registry-server" Jan 20 19:36:13 crc kubenswrapper[4558]: E0120 19:36:13.246560 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="58be65c3-ffcf-4596-858f-f6116cd7c2e8" containerName="extract-utilities" Jan 20 19:36:13 crc kubenswrapper[4558]: I0120 19:36:13.246566 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="58be65c3-ffcf-4596-858f-f6116cd7c2e8" containerName="extract-utilities" Jan 20 19:36:13 crc kubenswrapper[4558]: E0120 19:36:13.246580 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="58be65c3-ffcf-4596-858f-f6116cd7c2e8" containerName="extract-content" Jan 20 19:36:13 crc kubenswrapper[4558]: I0120 19:36:13.246586 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="58be65c3-ffcf-4596-858f-f6116cd7c2e8" containerName="extract-content" Jan 20 19:36:13 crc kubenswrapper[4558]: I0120 19:36:13.246714 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="58be65c3-ffcf-4596-858f-f6116cd7c2e8" containerName="registry-server" Jan 20 19:36:13 crc kubenswrapper[4558]: I0120 19:36:13.247642 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-44pnj" Jan 20 19:36:13 crc kubenswrapper[4558]: I0120 19:36:13.297394 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-44pnj"] Jan 20 19:36:13 crc kubenswrapper[4558]: I0120 19:36:13.392653 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2aeea0ea-f1ef-41e4-9e88-23af89260f27-catalog-content\") pod \"redhat-operators-44pnj\" (UID: \"2aeea0ea-f1ef-41e4-9e88-23af89260f27\") " pod="openshift-marketplace/redhat-operators-44pnj" Jan 20 19:36:13 crc kubenswrapper[4558]: I0120 19:36:13.392720 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2aeea0ea-f1ef-41e4-9e88-23af89260f27-utilities\") pod \"redhat-operators-44pnj\" (UID: \"2aeea0ea-f1ef-41e4-9e88-23af89260f27\") " pod="openshift-marketplace/redhat-operators-44pnj" Jan 20 19:36:13 crc kubenswrapper[4558]: I0120 19:36:13.392949 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gcgsf\" (UniqueName: \"kubernetes.io/projected/2aeea0ea-f1ef-41e4-9e88-23af89260f27-kube-api-access-gcgsf\") pod \"redhat-operators-44pnj\" (UID: \"2aeea0ea-f1ef-41e4-9e88-23af89260f27\") " pod="openshift-marketplace/redhat-operators-44pnj" Jan 20 19:36:13 crc kubenswrapper[4558]: I0120 19:36:13.494844 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2aeea0ea-f1ef-41e4-9e88-23af89260f27-utilities\") pod \"redhat-operators-44pnj\" (UID: \"2aeea0ea-f1ef-41e4-9e88-23af89260f27\") " pod="openshift-marketplace/redhat-operators-44pnj" Jan 20 19:36:13 crc kubenswrapper[4558]: I0120 19:36:13.494935 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gcgsf\" (UniqueName: \"kubernetes.io/projected/2aeea0ea-f1ef-41e4-9e88-23af89260f27-kube-api-access-gcgsf\") pod \"redhat-operators-44pnj\" (UID: \"2aeea0ea-f1ef-41e4-9e88-23af89260f27\") " pod="openshift-marketplace/redhat-operators-44pnj" Jan 20 19:36:13 crc kubenswrapper[4558]: I0120 19:36:13.495016 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2aeea0ea-f1ef-41e4-9e88-23af89260f27-catalog-content\") pod \"redhat-operators-44pnj\" (UID: \"2aeea0ea-f1ef-41e4-9e88-23af89260f27\") " pod="openshift-marketplace/redhat-operators-44pnj" Jan 20 19:36:13 crc kubenswrapper[4558]: I0120 19:36:13.495597 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2aeea0ea-f1ef-41e4-9e88-23af89260f27-catalog-content\") pod \"redhat-operators-44pnj\" (UID: \"2aeea0ea-f1ef-41e4-9e88-23af89260f27\") " pod="openshift-marketplace/redhat-operators-44pnj" Jan 20 19:36:13 crc kubenswrapper[4558]: I0120 19:36:13.495850 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2aeea0ea-f1ef-41e4-9e88-23af89260f27-utilities\") pod \"redhat-operators-44pnj\" (UID: \"2aeea0ea-f1ef-41e4-9e88-23af89260f27\") " pod="openshift-marketplace/redhat-operators-44pnj" Jan 20 19:36:13 crc kubenswrapper[4558]: I0120 19:36:13.517071 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gcgsf\" (UniqueName: \"kubernetes.io/projected/2aeea0ea-f1ef-41e4-9e88-23af89260f27-kube-api-access-gcgsf\") pod \"redhat-operators-44pnj\" (UID: \"2aeea0ea-f1ef-41e4-9e88-23af89260f27\") " pod="openshift-marketplace/redhat-operators-44pnj" Jan 20 19:36:13 crc kubenswrapper[4558]: I0120 19:36:13.564646 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-44pnj" Jan 20 19:36:13 crc kubenswrapper[4558]: I0120 19:36:13.997197 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-44pnj"] Jan 20 19:36:14 crc kubenswrapper[4558]: I0120 19:36:14.464138 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-44pnj" event={"ID":"2aeea0ea-f1ef-41e4-9e88-23af89260f27","Type":"ContainerDied","Data":"1d975425d3fea41823b558b3583622412eac0c8cc8073a1c2f3c4f800beb7e39"} Jan 20 19:36:14 crc kubenswrapper[4558]: I0120 19:36:14.463978 4558 generic.go:334] "Generic (PLEG): container finished" podID="2aeea0ea-f1ef-41e4-9e88-23af89260f27" containerID="1d975425d3fea41823b558b3583622412eac0c8cc8073a1c2f3c4f800beb7e39" exitCode=0 Jan 20 19:36:14 crc kubenswrapper[4558]: I0120 19:36:14.469076 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-44pnj" event={"ID":"2aeea0ea-f1ef-41e4-9e88-23af89260f27","Type":"ContainerStarted","Data":"5571ee2fda973c78added5a13ad362f07299b20d8448a961920b803eeed9a615"} Jan 20 19:36:15 crc kubenswrapper[4558]: I0120 19:36:15.482028 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-44pnj" event={"ID":"2aeea0ea-f1ef-41e4-9e88-23af89260f27","Type":"ContainerStarted","Data":"45b03983e52fe7e74aeb591cb707fd24ea2a18b3a59322f70405536ef49266f9"} Jan 20 19:36:15 crc kubenswrapper[4558]: I0120 19:36:15.565555 4558 scope.go:117] "RemoveContainer" containerID="08f0a41ff708380b079ac76e9307c7eeefc97e2c465ca694068ed4284847bc34" Jan 20 19:36:15 crc kubenswrapper[4558]: E0120 19:36:15.565884 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:36:16 crc kubenswrapper[4558]: I0120 19:36:16.492147 4558 generic.go:334] "Generic (PLEG): container finished" podID="2aeea0ea-f1ef-41e4-9e88-23af89260f27" containerID="45b03983e52fe7e74aeb591cb707fd24ea2a18b3a59322f70405536ef49266f9" exitCode=0 Jan 20 19:36:16 crc kubenswrapper[4558]: I0120 19:36:16.492262 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-44pnj" event={"ID":"2aeea0ea-f1ef-41e4-9e88-23af89260f27","Type":"ContainerDied","Data":"45b03983e52fe7e74aeb591cb707fd24ea2a18b3a59322f70405536ef49266f9"} Jan 20 19:36:17 crc kubenswrapper[4558]: I0120 19:36:17.501573 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-44pnj" event={"ID":"2aeea0ea-f1ef-41e4-9e88-23af89260f27","Type":"ContainerStarted","Data":"936676e2b0e216e700de64bbaa01345a4267dcadbc06c8fc6228f0ce665c4139"} Jan 20 19:36:17 crc kubenswrapper[4558]: I0120 19:36:17.519265 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-44pnj" podStartSLOduration=1.97890769 podStartE2EDuration="4.519252846s" podCreationTimestamp="2026-01-20 19:36:13 +0000 UTC" firstStartedPulling="2026-01-20 19:36:14.471251336 +0000 UTC m=+10468.231589303" lastFinishedPulling="2026-01-20 19:36:17.011596493 +0000 UTC m=+10470.771934459" observedRunningTime="2026-01-20 19:36:17.516095379 +0000 UTC m=+10471.276433346" watchObservedRunningTime="2026-01-20 19:36:17.519252846 +0000 UTC m=+10471.279590813" Jan 20 19:36:23 crc kubenswrapper[4558]: I0120 19:36:23.564968 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-44pnj" Jan 20 19:36:23 crc kubenswrapper[4558]: I0120 19:36:23.565451 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-44pnj" Jan 20 19:36:23 crc kubenswrapper[4558]: I0120 19:36:23.602372 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-44pnj" Jan 20 19:36:24 crc kubenswrapper[4558]: I0120 19:36:24.608325 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-44pnj" Jan 20 19:36:24 crc kubenswrapper[4558]: I0120 19:36:24.658915 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-44pnj"] Jan 20 19:36:26 crc kubenswrapper[4558]: I0120 19:36:26.585898 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-44pnj" podUID="2aeea0ea-f1ef-41e4-9e88-23af89260f27" containerName="registry-server" containerID="cri-o://936676e2b0e216e700de64bbaa01345a4267dcadbc06c8fc6228f0ce665c4139" gracePeriod=2 Jan 20 19:36:27 crc kubenswrapper[4558]: I0120 19:36:27.433978 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-44pnj" Jan 20 19:36:27 crc kubenswrapper[4558]: I0120 19:36:27.591683 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2aeea0ea-f1ef-41e4-9e88-23af89260f27-utilities\") pod \"2aeea0ea-f1ef-41e4-9e88-23af89260f27\" (UID: \"2aeea0ea-f1ef-41e4-9e88-23af89260f27\") " Jan 20 19:36:27 crc kubenswrapper[4558]: I0120 19:36:27.591798 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2aeea0ea-f1ef-41e4-9e88-23af89260f27-catalog-content\") pod \"2aeea0ea-f1ef-41e4-9e88-23af89260f27\" (UID: \"2aeea0ea-f1ef-41e4-9e88-23af89260f27\") " Jan 20 19:36:27 crc kubenswrapper[4558]: I0120 19:36:27.591926 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gcgsf\" (UniqueName: \"kubernetes.io/projected/2aeea0ea-f1ef-41e4-9e88-23af89260f27-kube-api-access-gcgsf\") pod \"2aeea0ea-f1ef-41e4-9e88-23af89260f27\" (UID: \"2aeea0ea-f1ef-41e4-9e88-23af89260f27\") " Jan 20 19:36:27 crc kubenswrapper[4558]: I0120 19:36:27.592992 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2aeea0ea-f1ef-41e4-9e88-23af89260f27-utilities" (OuterVolumeSpecName: "utilities") pod "2aeea0ea-f1ef-41e4-9e88-23af89260f27" (UID: "2aeea0ea-f1ef-41e4-9e88-23af89260f27"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 19:36:27 crc kubenswrapper[4558]: I0120 19:36:27.599097 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2aeea0ea-f1ef-41e4-9e88-23af89260f27-kube-api-access-gcgsf" (OuterVolumeSpecName: "kube-api-access-gcgsf") pod "2aeea0ea-f1ef-41e4-9e88-23af89260f27" (UID: "2aeea0ea-f1ef-41e4-9e88-23af89260f27"). InnerVolumeSpecName "kube-api-access-gcgsf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 19:36:27 crc kubenswrapper[4558]: I0120 19:36:27.599142 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-44pnj" event={"ID":"2aeea0ea-f1ef-41e4-9e88-23af89260f27","Type":"ContainerDied","Data":"936676e2b0e216e700de64bbaa01345a4267dcadbc06c8fc6228f0ce665c4139"} Jan 20 19:36:27 crc kubenswrapper[4558]: I0120 19:36:27.599107 4558 generic.go:334] "Generic (PLEG): container finished" podID="2aeea0ea-f1ef-41e4-9e88-23af89260f27" containerID="936676e2b0e216e700de64bbaa01345a4267dcadbc06c8fc6228f0ce665c4139" exitCode=0 Jan 20 19:36:27 crc kubenswrapper[4558]: I0120 19:36:27.599180 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-44pnj" Jan 20 19:36:27 crc kubenswrapper[4558]: I0120 19:36:27.599201 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-44pnj" event={"ID":"2aeea0ea-f1ef-41e4-9e88-23af89260f27","Type":"ContainerDied","Data":"5571ee2fda973c78added5a13ad362f07299b20d8448a961920b803eeed9a615"} Jan 20 19:36:27 crc kubenswrapper[4558]: I0120 19:36:27.599229 4558 scope.go:117] "RemoveContainer" containerID="936676e2b0e216e700de64bbaa01345a4267dcadbc06c8fc6228f0ce665c4139" Jan 20 19:36:27 crc kubenswrapper[4558]: I0120 19:36:27.627299 4558 scope.go:117] "RemoveContainer" containerID="45b03983e52fe7e74aeb591cb707fd24ea2a18b3a59322f70405536ef49266f9" Jan 20 19:36:27 crc kubenswrapper[4558]: I0120 19:36:27.644569 4558 scope.go:117] "RemoveContainer" containerID="1d975425d3fea41823b558b3583622412eac0c8cc8073a1c2f3c4f800beb7e39" Jan 20 19:36:27 crc kubenswrapper[4558]: I0120 19:36:27.657753 4558 scope.go:117] "RemoveContainer" containerID="936676e2b0e216e700de64bbaa01345a4267dcadbc06c8fc6228f0ce665c4139" Jan 20 19:36:27 crc kubenswrapper[4558]: E0120 19:36:27.658142 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"936676e2b0e216e700de64bbaa01345a4267dcadbc06c8fc6228f0ce665c4139\": container with ID starting with 936676e2b0e216e700de64bbaa01345a4267dcadbc06c8fc6228f0ce665c4139 not found: ID does not exist" containerID="936676e2b0e216e700de64bbaa01345a4267dcadbc06c8fc6228f0ce665c4139" Jan 20 19:36:27 crc kubenswrapper[4558]: I0120 19:36:27.658187 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"936676e2b0e216e700de64bbaa01345a4267dcadbc06c8fc6228f0ce665c4139"} err="failed to get container status \"936676e2b0e216e700de64bbaa01345a4267dcadbc06c8fc6228f0ce665c4139\": rpc error: code = NotFound desc = could not find container \"936676e2b0e216e700de64bbaa01345a4267dcadbc06c8fc6228f0ce665c4139\": container with ID starting with 936676e2b0e216e700de64bbaa01345a4267dcadbc06c8fc6228f0ce665c4139 not found: ID does not exist" Jan 20 19:36:27 crc kubenswrapper[4558]: I0120 19:36:27.658208 4558 scope.go:117] "RemoveContainer" containerID="45b03983e52fe7e74aeb591cb707fd24ea2a18b3a59322f70405536ef49266f9" Jan 20 19:36:27 crc kubenswrapper[4558]: E0120 19:36:27.658559 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"45b03983e52fe7e74aeb591cb707fd24ea2a18b3a59322f70405536ef49266f9\": container with ID starting with 45b03983e52fe7e74aeb591cb707fd24ea2a18b3a59322f70405536ef49266f9 not found: ID does not exist" containerID="45b03983e52fe7e74aeb591cb707fd24ea2a18b3a59322f70405536ef49266f9" Jan 20 19:36:27 crc kubenswrapper[4558]: I0120 19:36:27.658623 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"45b03983e52fe7e74aeb591cb707fd24ea2a18b3a59322f70405536ef49266f9"} err="failed to get container status \"45b03983e52fe7e74aeb591cb707fd24ea2a18b3a59322f70405536ef49266f9\": rpc error: code = NotFound desc = could not find container \"45b03983e52fe7e74aeb591cb707fd24ea2a18b3a59322f70405536ef49266f9\": container with ID starting with 45b03983e52fe7e74aeb591cb707fd24ea2a18b3a59322f70405536ef49266f9 not found: ID does not exist" Jan 20 19:36:27 crc kubenswrapper[4558]: I0120 19:36:27.658662 4558 scope.go:117] "RemoveContainer" containerID="1d975425d3fea41823b558b3583622412eac0c8cc8073a1c2f3c4f800beb7e39" Jan 20 19:36:27 crc kubenswrapper[4558]: E0120 19:36:27.659059 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1d975425d3fea41823b558b3583622412eac0c8cc8073a1c2f3c4f800beb7e39\": container with ID starting with 1d975425d3fea41823b558b3583622412eac0c8cc8073a1c2f3c4f800beb7e39 not found: ID does not exist" containerID="1d975425d3fea41823b558b3583622412eac0c8cc8073a1c2f3c4f800beb7e39" Jan 20 19:36:27 crc kubenswrapper[4558]: I0120 19:36:27.659088 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1d975425d3fea41823b558b3583622412eac0c8cc8073a1c2f3c4f800beb7e39"} err="failed to get container status \"1d975425d3fea41823b558b3583622412eac0c8cc8073a1c2f3c4f800beb7e39\": rpc error: code = NotFound desc = could not find container \"1d975425d3fea41823b558b3583622412eac0c8cc8073a1c2f3c4f800beb7e39\": container with ID starting with 1d975425d3fea41823b558b3583622412eac0c8cc8073a1c2f3c4f800beb7e39 not found: ID does not exist" Jan 20 19:36:27 crc kubenswrapper[4558]: I0120 19:36:27.694150 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gcgsf\" (UniqueName: \"kubernetes.io/projected/2aeea0ea-f1ef-41e4-9e88-23af89260f27-kube-api-access-gcgsf\") on node \"crc\" DevicePath \"\"" Jan 20 19:36:27 crc kubenswrapper[4558]: I0120 19:36:27.694188 4558 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2aeea0ea-f1ef-41e4-9e88-23af89260f27-utilities\") on node \"crc\" DevicePath \"\"" Jan 20 19:36:27 crc kubenswrapper[4558]: I0120 19:36:27.705425 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2aeea0ea-f1ef-41e4-9e88-23af89260f27-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2aeea0ea-f1ef-41e4-9e88-23af89260f27" (UID: "2aeea0ea-f1ef-41e4-9e88-23af89260f27"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 19:36:27 crc kubenswrapper[4558]: I0120 19:36:27.794775 4558 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2aeea0ea-f1ef-41e4-9e88-23af89260f27-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 20 19:36:27 crc kubenswrapper[4558]: I0120 19:36:27.927888 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-44pnj"] Jan 20 19:36:27 crc kubenswrapper[4558]: I0120 19:36:27.931504 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-44pnj"] Jan 20 19:36:28 crc kubenswrapper[4558]: I0120 19:36:28.573503 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2aeea0ea-f1ef-41e4-9e88-23af89260f27" path="/var/lib/kubelet/pods/2aeea0ea-f1ef-41e4-9e88-23af89260f27/volumes" Jan 20 19:36:30 crc kubenswrapper[4558]: I0120 19:36:30.565820 4558 scope.go:117] "RemoveContainer" containerID="08f0a41ff708380b079ac76e9307c7eeefc97e2c465ca694068ed4284847bc34" Jan 20 19:36:30 crc kubenswrapper[4558]: E0120 19:36:30.566206 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:36:41 crc kubenswrapper[4558]: I0120 19:36:41.566649 4558 scope.go:117] "RemoveContainer" containerID="08f0a41ff708380b079ac76e9307c7eeefc97e2c465ca694068ed4284847bc34" Jan 20 19:36:41 crc kubenswrapper[4558]: E0120 19:36:41.567522 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:36:52 crc kubenswrapper[4558]: I0120 19:36:52.566462 4558 scope.go:117] "RemoveContainer" containerID="08f0a41ff708380b079ac76e9307c7eeefc97e2c465ca694068ed4284847bc34" Jan 20 19:36:52 crc kubenswrapper[4558]: E0120 19:36:52.567498 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:37:05 crc kubenswrapper[4558]: I0120 19:37:05.565911 4558 scope.go:117] "RemoveContainer" containerID="08f0a41ff708380b079ac76e9307c7eeefc97e2c465ca694068ed4284847bc34" Jan 20 19:37:05 crc kubenswrapper[4558]: E0120 19:37:05.566656 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:37:18 crc kubenswrapper[4558]: I0120 19:37:18.566388 4558 scope.go:117] "RemoveContainer" containerID="08f0a41ff708380b079ac76e9307c7eeefc97e2c465ca694068ed4284847bc34" Jan 20 19:37:18 crc kubenswrapper[4558]: E0120 19:37:18.567115 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:37:30 crc kubenswrapper[4558]: I0120 19:37:30.569101 4558 scope.go:117] "RemoveContainer" containerID="08f0a41ff708380b079ac76e9307c7eeefc97e2c465ca694068ed4284847bc34" Jan 20 19:37:30 crc kubenswrapper[4558]: E0120 19:37:30.569939 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:37:41 crc kubenswrapper[4558]: I0120 19:37:41.566966 4558 scope.go:117] "RemoveContainer" containerID="08f0a41ff708380b079ac76e9307c7eeefc97e2c465ca694068ed4284847bc34" Jan 20 19:37:41 crc kubenswrapper[4558]: E0120 19:37:41.567734 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:37:52 crc kubenswrapper[4558]: I0120 19:37:52.566261 4558 scope.go:117] "RemoveContainer" containerID="08f0a41ff708380b079ac76e9307c7eeefc97e2c465ca694068ed4284847bc34" Jan 20 19:37:52 crc kubenswrapper[4558]: E0120 19:37:52.567302 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:38:04 crc kubenswrapper[4558]: I0120 19:38:04.566791 4558 scope.go:117] "RemoveContainer" containerID="08f0a41ff708380b079ac76e9307c7eeefc97e2c465ca694068ed4284847bc34" Jan 20 19:38:04 crc kubenswrapper[4558]: E0120 19:38:04.568721 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:38:07 crc kubenswrapper[4558]: I0120 19:38:07.252030 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-d88ds"] Jan 20 19:38:07 crc kubenswrapper[4558]: E0120 19:38:07.252381 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2aeea0ea-f1ef-41e4-9e88-23af89260f27" containerName="registry-server" Jan 20 19:38:07 crc kubenswrapper[4558]: I0120 19:38:07.252397 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="2aeea0ea-f1ef-41e4-9e88-23af89260f27" containerName="registry-server" Jan 20 19:38:07 crc kubenswrapper[4558]: E0120 19:38:07.252421 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2aeea0ea-f1ef-41e4-9e88-23af89260f27" containerName="extract-utilities" Jan 20 19:38:07 crc kubenswrapper[4558]: I0120 19:38:07.252427 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="2aeea0ea-f1ef-41e4-9e88-23af89260f27" containerName="extract-utilities" Jan 20 19:38:07 crc kubenswrapper[4558]: E0120 19:38:07.252435 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2aeea0ea-f1ef-41e4-9e88-23af89260f27" containerName="extract-content" Jan 20 19:38:07 crc kubenswrapper[4558]: I0120 19:38:07.252440 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="2aeea0ea-f1ef-41e4-9e88-23af89260f27" containerName="extract-content" Jan 20 19:38:07 crc kubenswrapper[4558]: I0120 19:38:07.252562 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="2aeea0ea-f1ef-41e4-9e88-23af89260f27" containerName="registry-server" Jan 20 19:38:07 crc kubenswrapper[4558]: I0120 19:38:07.254584 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-d88ds" Jan 20 19:38:07 crc kubenswrapper[4558]: I0120 19:38:07.264751 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-d88ds"] Jan 20 19:38:07 crc kubenswrapper[4558]: I0120 19:38:07.356939 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-np8l6\" (UniqueName: \"kubernetes.io/projected/03d62f48-19a6-4e6e-83cd-5ddff5311120-kube-api-access-np8l6\") pod \"redhat-marketplace-d88ds\" (UID: \"03d62f48-19a6-4e6e-83cd-5ddff5311120\") " pod="openshift-marketplace/redhat-marketplace-d88ds" Jan 20 19:38:07 crc kubenswrapper[4558]: I0120 19:38:07.357395 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/03d62f48-19a6-4e6e-83cd-5ddff5311120-utilities\") pod \"redhat-marketplace-d88ds\" (UID: \"03d62f48-19a6-4e6e-83cd-5ddff5311120\") " pod="openshift-marketplace/redhat-marketplace-d88ds" Jan 20 19:38:07 crc kubenswrapper[4558]: I0120 19:38:07.357504 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/03d62f48-19a6-4e6e-83cd-5ddff5311120-catalog-content\") pod \"redhat-marketplace-d88ds\" (UID: \"03d62f48-19a6-4e6e-83cd-5ddff5311120\") " pod="openshift-marketplace/redhat-marketplace-d88ds" Jan 20 19:38:07 crc kubenswrapper[4558]: I0120 19:38:07.459307 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/03d62f48-19a6-4e6e-83cd-5ddff5311120-utilities\") pod \"redhat-marketplace-d88ds\" (UID: \"03d62f48-19a6-4e6e-83cd-5ddff5311120\") " pod="openshift-marketplace/redhat-marketplace-d88ds" Jan 20 19:38:07 crc kubenswrapper[4558]: I0120 19:38:07.459385 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/03d62f48-19a6-4e6e-83cd-5ddff5311120-catalog-content\") pod \"redhat-marketplace-d88ds\" (UID: \"03d62f48-19a6-4e6e-83cd-5ddff5311120\") " pod="openshift-marketplace/redhat-marketplace-d88ds" Jan 20 19:38:07 crc kubenswrapper[4558]: I0120 19:38:07.459463 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-np8l6\" (UniqueName: \"kubernetes.io/projected/03d62f48-19a6-4e6e-83cd-5ddff5311120-kube-api-access-np8l6\") pod \"redhat-marketplace-d88ds\" (UID: \"03d62f48-19a6-4e6e-83cd-5ddff5311120\") " pod="openshift-marketplace/redhat-marketplace-d88ds" Jan 20 19:38:07 crc kubenswrapper[4558]: I0120 19:38:07.460447 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/03d62f48-19a6-4e6e-83cd-5ddff5311120-utilities\") pod \"redhat-marketplace-d88ds\" (UID: \"03d62f48-19a6-4e6e-83cd-5ddff5311120\") " pod="openshift-marketplace/redhat-marketplace-d88ds" Jan 20 19:38:07 crc kubenswrapper[4558]: I0120 19:38:07.460925 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/03d62f48-19a6-4e6e-83cd-5ddff5311120-catalog-content\") pod \"redhat-marketplace-d88ds\" (UID: \"03d62f48-19a6-4e6e-83cd-5ddff5311120\") " pod="openshift-marketplace/redhat-marketplace-d88ds" Jan 20 19:38:07 crc kubenswrapper[4558]: I0120 19:38:07.489016 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-np8l6\" (UniqueName: \"kubernetes.io/projected/03d62f48-19a6-4e6e-83cd-5ddff5311120-kube-api-access-np8l6\") pod \"redhat-marketplace-d88ds\" (UID: \"03d62f48-19a6-4e6e-83cd-5ddff5311120\") " pod="openshift-marketplace/redhat-marketplace-d88ds" Jan 20 19:38:07 crc kubenswrapper[4558]: I0120 19:38:07.587569 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-d88ds" Jan 20 19:38:07 crc kubenswrapper[4558]: I0120 19:38:07.999061 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-d88ds"] Jan 20 19:38:08 crc kubenswrapper[4558]: I0120 19:38:08.305570 4558 generic.go:334] "Generic (PLEG): container finished" podID="03d62f48-19a6-4e6e-83cd-5ddff5311120" containerID="36cfb9436c0c60efd2e72da195c635eca2592d16c7b2e306772129e7966a4123" exitCode=0 Jan 20 19:38:08 crc kubenswrapper[4558]: I0120 19:38:08.305643 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-d88ds" event={"ID":"03d62f48-19a6-4e6e-83cd-5ddff5311120","Type":"ContainerDied","Data":"36cfb9436c0c60efd2e72da195c635eca2592d16c7b2e306772129e7966a4123"} Jan 20 19:38:08 crc kubenswrapper[4558]: I0120 19:38:08.305720 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-d88ds" event={"ID":"03d62f48-19a6-4e6e-83cd-5ddff5311120","Type":"ContainerStarted","Data":"69f45e813f204f4aac0d12ff6694a34673bdf608e291a7cf7d987064800d1d3b"} Jan 20 19:38:09 crc kubenswrapper[4558]: I0120 19:38:09.315216 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-d88ds" event={"ID":"03d62f48-19a6-4e6e-83cd-5ddff5311120","Type":"ContainerStarted","Data":"99fa465af92391a615d9491bc64057c8352e35b1e281e3b24d1b075e2b600741"} Jan 20 19:38:10 crc kubenswrapper[4558]: I0120 19:38:10.324677 4558 generic.go:334] "Generic (PLEG): container finished" podID="03d62f48-19a6-4e6e-83cd-5ddff5311120" containerID="99fa465af92391a615d9491bc64057c8352e35b1e281e3b24d1b075e2b600741" exitCode=0 Jan 20 19:38:10 crc kubenswrapper[4558]: I0120 19:38:10.324775 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-d88ds" event={"ID":"03d62f48-19a6-4e6e-83cd-5ddff5311120","Type":"ContainerDied","Data":"99fa465af92391a615d9491bc64057c8352e35b1e281e3b24d1b075e2b600741"} Jan 20 19:38:11 crc kubenswrapper[4558]: I0120 19:38:11.332139 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-d88ds" event={"ID":"03d62f48-19a6-4e6e-83cd-5ddff5311120","Type":"ContainerStarted","Data":"4e22389b3b3949d633aad492bfff1ea2e95971195560085f652ab0d0a5605a7e"} Jan 20 19:38:11 crc kubenswrapper[4558]: I0120 19:38:11.353606 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-d88ds" podStartSLOduration=1.791365291 podStartE2EDuration="4.353585847s" podCreationTimestamp="2026-01-20 19:38:07 +0000 UTC" firstStartedPulling="2026-01-20 19:38:08.30690461 +0000 UTC m=+10582.067242577" lastFinishedPulling="2026-01-20 19:38:10.869125176 +0000 UTC m=+10584.629463133" observedRunningTime="2026-01-20 19:38:11.348697496 +0000 UTC m=+10585.109035463" watchObservedRunningTime="2026-01-20 19:38:11.353585847 +0000 UTC m=+10585.113923815" Jan 20 19:38:16 crc kubenswrapper[4558]: I0120 19:38:16.570088 4558 scope.go:117] "RemoveContainer" containerID="08f0a41ff708380b079ac76e9307c7eeefc97e2c465ca694068ed4284847bc34" Jan 20 19:38:16 crc kubenswrapper[4558]: E0120 19:38:16.570961 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:38:17 crc kubenswrapper[4558]: I0120 19:38:17.588646 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-d88ds" Jan 20 19:38:17 crc kubenswrapper[4558]: I0120 19:38:17.590460 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-d88ds" Jan 20 19:38:17 crc kubenswrapper[4558]: I0120 19:38:17.622908 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-d88ds" Jan 20 19:38:18 crc kubenswrapper[4558]: I0120 19:38:18.425779 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-d88ds" Jan 20 19:38:18 crc kubenswrapper[4558]: I0120 19:38:18.467654 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-d88ds"] Jan 20 19:38:20 crc kubenswrapper[4558]: I0120 19:38:20.398887 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-d88ds" podUID="03d62f48-19a6-4e6e-83cd-5ddff5311120" containerName="registry-server" containerID="cri-o://4e22389b3b3949d633aad492bfff1ea2e95971195560085f652ab0d0a5605a7e" gracePeriod=2 Jan 20 19:38:20 crc kubenswrapper[4558]: I0120 19:38:20.751408 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-d88ds" Jan 20 19:38:20 crc kubenswrapper[4558]: I0120 19:38:20.781361 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/03d62f48-19a6-4e6e-83cd-5ddff5311120-utilities\") pod \"03d62f48-19a6-4e6e-83cd-5ddff5311120\" (UID: \"03d62f48-19a6-4e6e-83cd-5ddff5311120\") " Jan 20 19:38:20 crc kubenswrapper[4558]: I0120 19:38:20.781431 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-np8l6\" (UniqueName: \"kubernetes.io/projected/03d62f48-19a6-4e6e-83cd-5ddff5311120-kube-api-access-np8l6\") pod \"03d62f48-19a6-4e6e-83cd-5ddff5311120\" (UID: \"03d62f48-19a6-4e6e-83cd-5ddff5311120\") " Jan 20 19:38:20 crc kubenswrapper[4558]: I0120 19:38:20.781461 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/03d62f48-19a6-4e6e-83cd-5ddff5311120-catalog-content\") pod \"03d62f48-19a6-4e6e-83cd-5ddff5311120\" (UID: \"03d62f48-19a6-4e6e-83cd-5ddff5311120\") " Jan 20 19:38:20 crc kubenswrapper[4558]: I0120 19:38:20.782420 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/03d62f48-19a6-4e6e-83cd-5ddff5311120-utilities" (OuterVolumeSpecName: "utilities") pod "03d62f48-19a6-4e6e-83cd-5ddff5311120" (UID: "03d62f48-19a6-4e6e-83cd-5ddff5311120"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 19:38:20 crc kubenswrapper[4558]: I0120 19:38:20.788831 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/03d62f48-19a6-4e6e-83cd-5ddff5311120-kube-api-access-np8l6" (OuterVolumeSpecName: "kube-api-access-np8l6") pod "03d62f48-19a6-4e6e-83cd-5ddff5311120" (UID: "03d62f48-19a6-4e6e-83cd-5ddff5311120"). InnerVolumeSpecName "kube-api-access-np8l6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 19:38:20 crc kubenswrapper[4558]: I0120 19:38:20.801753 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/03d62f48-19a6-4e6e-83cd-5ddff5311120-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "03d62f48-19a6-4e6e-83cd-5ddff5311120" (UID: "03d62f48-19a6-4e6e-83cd-5ddff5311120"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 19:38:20 crc kubenswrapper[4558]: I0120 19:38:20.882870 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-np8l6\" (UniqueName: \"kubernetes.io/projected/03d62f48-19a6-4e6e-83cd-5ddff5311120-kube-api-access-np8l6\") on node \"crc\" DevicePath \"\"" Jan 20 19:38:20 crc kubenswrapper[4558]: I0120 19:38:20.882907 4558 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/03d62f48-19a6-4e6e-83cd-5ddff5311120-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 20 19:38:20 crc kubenswrapper[4558]: I0120 19:38:20.883486 4558 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/03d62f48-19a6-4e6e-83cd-5ddff5311120-utilities\") on node \"crc\" DevicePath \"\"" Jan 20 19:38:21 crc kubenswrapper[4558]: I0120 19:38:21.406087 4558 generic.go:334] "Generic (PLEG): container finished" podID="03d62f48-19a6-4e6e-83cd-5ddff5311120" containerID="4e22389b3b3949d633aad492bfff1ea2e95971195560085f652ab0d0a5605a7e" exitCode=0 Jan 20 19:38:21 crc kubenswrapper[4558]: I0120 19:38:21.406129 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-d88ds" event={"ID":"03d62f48-19a6-4e6e-83cd-5ddff5311120","Type":"ContainerDied","Data":"4e22389b3b3949d633aad492bfff1ea2e95971195560085f652ab0d0a5605a7e"} Jan 20 19:38:21 crc kubenswrapper[4558]: I0120 19:38:21.406155 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-d88ds" event={"ID":"03d62f48-19a6-4e6e-83cd-5ddff5311120","Type":"ContainerDied","Data":"69f45e813f204f4aac0d12ff6694a34673bdf608e291a7cf7d987064800d1d3b"} Jan 20 19:38:21 crc kubenswrapper[4558]: I0120 19:38:21.406188 4558 scope.go:117] "RemoveContainer" containerID="4e22389b3b3949d633aad492bfff1ea2e95971195560085f652ab0d0a5605a7e" Jan 20 19:38:21 crc kubenswrapper[4558]: I0120 19:38:21.406297 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-d88ds" Jan 20 19:38:21 crc kubenswrapper[4558]: I0120 19:38:21.438919 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-d88ds"] Jan 20 19:38:21 crc kubenswrapper[4558]: I0120 19:38:21.438959 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-d88ds"] Jan 20 19:38:21 crc kubenswrapper[4558]: I0120 19:38:21.444580 4558 scope.go:117] "RemoveContainer" containerID="99fa465af92391a615d9491bc64057c8352e35b1e281e3b24d1b075e2b600741" Jan 20 19:38:21 crc kubenswrapper[4558]: I0120 19:38:21.462481 4558 scope.go:117] "RemoveContainer" containerID="36cfb9436c0c60efd2e72da195c635eca2592d16c7b2e306772129e7966a4123" Jan 20 19:38:21 crc kubenswrapper[4558]: I0120 19:38:21.481232 4558 scope.go:117] "RemoveContainer" containerID="4e22389b3b3949d633aad492bfff1ea2e95971195560085f652ab0d0a5605a7e" Jan 20 19:38:21 crc kubenswrapper[4558]: E0120 19:38:21.481661 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4e22389b3b3949d633aad492bfff1ea2e95971195560085f652ab0d0a5605a7e\": container with ID starting with 4e22389b3b3949d633aad492bfff1ea2e95971195560085f652ab0d0a5605a7e not found: ID does not exist" containerID="4e22389b3b3949d633aad492bfff1ea2e95971195560085f652ab0d0a5605a7e" Jan 20 19:38:21 crc kubenswrapper[4558]: I0120 19:38:21.481695 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4e22389b3b3949d633aad492bfff1ea2e95971195560085f652ab0d0a5605a7e"} err="failed to get container status \"4e22389b3b3949d633aad492bfff1ea2e95971195560085f652ab0d0a5605a7e\": rpc error: code = NotFound desc = could not find container \"4e22389b3b3949d633aad492bfff1ea2e95971195560085f652ab0d0a5605a7e\": container with ID starting with 4e22389b3b3949d633aad492bfff1ea2e95971195560085f652ab0d0a5605a7e not found: ID does not exist" Jan 20 19:38:21 crc kubenswrapper[4558]: I0120 19:38:21.481719 4558 scope.go:117] "RemoveContainer" containerID="99fa465af92391a615d9491bc64057c8352e35b1e281e3b24d1b075e2b600741" Jan 20 19:38:21 crc kubenswrapper[4558]: E0120 19:38:21.481967 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"99fa465af92391a615d9491bc64057c8352e35b1e281e3b24d1b075e2b600741\": container with ID starting with 99fa465af92391a615d9491bc64057c8352e35b1e281e3b24d1b075e2b600741 not found: ID does not exist" containerID="99fa465af92391a615d9491bc64057c8352e35b1e281e3b24d1b075e2b600741" Jan 20 19:38:21 crc kubenswrapper[4558]: I0120 19:38:21.482015 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"99fa465af92391a615d9491bc64057c8352e35b1e281e3b24d1b075e2b600741"} err="failed to get container status \"99fa465af92391a615d9491bc64057c8352e35b1e281e3b24d1b075e2b600741\": rpc error: code = NotFound desc = could not find container \"99fa465af92391a615d9491bc64057c8352e35b1e281e3b24d1b075e2b600741\": container with ID starting with 99fa465af92391a615d9491bc64057c8352e35b1e281e3b24d1b075e2b600741 not found: ID does not exist" Jan 20 19:38:21 crc kubenswrapper[4558]: I0120 19:38:21.482031 4558 scope.go:117] "RemoveContainer" containerID="36cfb9436c0c60efd2e72da195c635eca2592d16c7b2e306772129e7966a4123" Jan 20 19:38:21 crc kubenswrapper[4558]: E0120 19:38:21.482319 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"36cfb9436c0c60efd2e72da195c635eca2592d16c7b2e306772129e7966a4123\": container with ID starting with 36cfb9436c0c60efd2e72da195c635eca2592d16c7b2e306772129e7966a4123 not found: ID does not exist" containerID="36cfb9436c0c60efd2e72da195c635eca2592d16c7b2e306772129e7966a4123" Jan 20 19:38:21 crc kubenswrapper[4558]: I0120 19:38:21.482374 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"36cfb9436c0c60efd2e72da195c635eca2592d16c7b2e306772129e7966a4123"} err="failed to get container status \"36cfb9436c0c60efd2e72da195c635eca2592d16c7b2e306772129e7966a4123\": rpc error: code = NotFound desc = could not find container \"36cfb9436c0c60efd2e72da195c635eca2592d16c7b2e306772129e7966a4123\": container with ID starting with 36cfb9436c0c60efd2e72da195c635eca2592d16c7b2e306772129e7966a4123 not found: ID does not exist" Jan 20 19:38:22 crc kubenswrapper[4558]: I0120 19:38:22.575211 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="03d62f48-19a6-4e6e-83cd-5ddff5311120" path="/var/lib/kubelet/pods/03d62f48-19a6-4e6e-83cd-5ddff5311120/volumes" Jan 20 19:38:31 crc kubenswrapper[4558]: I0120 19:38:31.566325 4558 scope.go:117] "RemoveContainer" containerID="08f0a41ff708380b079ac76e9307c7eeefc97e2c465ca694068ed4284847bc34" Jan 20 19:38:31 crc kubenswrapper[4558]: E0120 19:38:31.567217 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:38:44 crc kubenswrapper[4558]: I0120 19:38:44.566690 4558 scope.go:117] "RemoveContainer" containerID="08f0a41ff708380b079ac76e9307c7eeefc97e2c465ca694068ed4284847bc34" Jan 20 19:38:44 crc kubenswrapper[4558]: E0120 19:38:44.567666 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:38:58 crc kubenswrapper[4558]: I0120 19:38:58.565842 4558 scope.go:117] "RemoveContainer" containerID="08f0a41ff708380b079ac76e9307c7eeefc97e2c465ca694068ed4284847bc34" Jan 20 19:38:58 crc kubenswrapper[4558]: E0120 19:38:58.567435 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:39:12 crc kubenswrapper[4558]: I0120 19:39:12.568280 4558 scope.go:117] "RemoveContainer" containerID="08f0a41ff708380b079ac76e9307c7eeefc97e2c465ca694068ed4284847bc34" Jan 20 19:39:12 crc kubenswrapper[4558]: E0120 19:39:12.568929 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:39:26 crc kubenswrapper[4558]: I0120 19:39:26.571107 4558 scope.go:117] "RemoveContainer" containerID="08f0a41ff708380b079ac76e9307c7eeefc97e2c465ca694068ed4284847bc34" Jan 20 19:39:26 crc kubenswrapper[4558]: E0120 19:39:26.572117 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:39:39 crc kubenswrapper[4558]: I0120 19:39:39.565867 4558 scope.go:117] "RemoveContainer" containerID="08f0a41ff708380b079ac76e9307c7eeefc97e2c465ca694068ed4284847bc34" Jan 20 19:39:39 crc kubenswrapper[4558]: E0120 19:39:39.566566 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:39:51 crc kubenswrapper[4558]: I0120 19:39:51.567516 4558 scope.go:117] "RemoveContainer" containerID="08f0a41ff708380b079ac76e9307c7eeefc97e2c465ca694068ed4284847bc34" Jan 20 19:39:51 crc kubenswrapper[4558]: E0120 19:39:51.569547 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:40:03 crc kubenswrapper[4558]: I0120 19:40:03.565965 4558 scope.go:117] "RemoveContainer" containerID="08f0a41ff708380b079ac76e9307c7eeefc97e2c465ca694068ed4284847bc34" Jan 20 19:40:04 crc kubenswrapper[4558]: I0120 19:40:04.155065 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" event={"ID":"68337d27-3fa6-4a29-88b0-82e60c3739eb","Type":"ContainerStarted","Data":"c9f1ef733c7c7a9f4b483629db0474b41245df77ed9c7ff44058afb88192b813"} Jan 20 19:40:29 crc kubenswrapper[4558]: I0120 19:40:29.599402 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-hwrwk"] Jan 20 19:40:29 crc kubenswrapper[4558]: E0120 19:40:29.600503 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="03d62f48-19a6-4e6e-83cd-5ddff5311120" containerName="extract-utilities" Jan 20 19:40:29 crc kubenswrapper[4558]: I0120 19:40:29.600522 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="03d62f48-19a6-4e6e-83cd-5ddff5311120" containerName="extract-utilities" Jan 20 19:40:29 crc kubenswrapper[4558]: E0120 19:40:29.600536 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="03d62f48-19a6-4e6e-83cd-5ddff5311120" containerName="extract-content" Jan 20 19:40:29 crc kubenswrapper[4558]: I0120 19:40:29.600545 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="03d62f48-19a6-4e6e-83cd-5ddff5311120" containerName="extract-content" Jan 20 19:40:29 crc kubenswrapper[4558]: E0120 19:40:29.600558 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="03d62f48-19a6-4e6e-83cd-5ddff5311120" containerName="registry-server" Jan 20 19:40:29 crc kubenswrapper[4558]: I0120 19:40:29.600564 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="03d62f48-19a6-4e6e-83cd-5ddff5311120" containerName="registry-server" Jan 20 19:40:29 crc kubenswrapper[4558]: I0120 19:40:29.600750 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="03d62f48-19a6-4e6e-83cd-5ddff5311120" containerName="registry-server" Jan 20 19:40:29 crc kubenswrapper[4558]: I0120 19:40:29.601832 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hwrwk" Jan 20 19:40:29 crc kubenswrapper[4558]: I0120 19:40:29.620448 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-hwrwk"] Jan 20 19:40:29 crc kubenswrapper[4558]: I0120 19:40:29.776242 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k7fmz\" (UniqueName: \"kubernetes.io/projected/bb377a56-a1dd-4e87-aa0b-64a1308f290b-kube-api-access-k7fmz\") pod \"certified-operators-hwrwk\" (UID: \"bb377a56-a1dd-4e87-aa0b-64a1308f290b\") " pod="openshift-marketplace/certified-operators-hwrwk" Jan 20 19:40:29 crc kubenswrapper[4558]: I0120 19:40:29.777007 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bb377a56-a1dd-4e87-aa0b-64a1308f290b-utilities\") pod \"certified-operators-hwrwk\" (UID: \"bb377a56-a1dd-4e87-aa0b-64a1308f290b\") " pod="openshift-marketplace/certified-operators-hwrwk" Jan 20 19:40:29 crc kubenswrapper[4558]: I0120 19:40:29.777104 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bb377a56-a1dd-4e87-aa0b-64a1308f290b-catalog-content\") pod \"certified-operators-hwrwk\" (UID: \"bb377a56-a1dd-4e87-aa0b-64a1308f290b\") " pod="openshift-marketplace/certified-operators-hwrwk" Jan 20 19:40:29 crc kubenswrapper[4558]: I0120 19:40:29.880194 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k7fmz\" (UniqueName: \"kubernetes.io/projected/bb377a56-a1dd-4e87-aa0b-64a1308f290b-kube-api-access-k7fmz\") pod \"certified-operators-hwrwk\" (UID: \"bb377a56-a1dd-4e87-aa0b-64a1308f290b\") " pod="openshift-marketplace/certified-operators-hwrwk" Jan 20 19:40:29 crc kubenswrapper[4558]: I0120 19:40:29.880315 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bb377a56-a1dd-4e87-aa0b-64a1308f290b-utilities\") pod \"certified-operators-hwrwk\" (UID: \"bb377a56-a1dd-4e87-aa0b-64a1308f290b\") " pod="openshift-marketplace/certified-operators-hwrwk" Jan 20 19:40:29 crc kubenswrapper[4558]: I0120 19:40:29.880354 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bb377a56-a1dd-4e87-aa0b-64a1308f290b-catalog-content\") pod \"certified-operators-hwrwk\" (UID: \"bb377a56-a1dd-4e87-aa0b-64a1308f290b\") " pod="openshift-marketplace/certified-operators-hwrwk" Jan 20 19:40:29 crc kubenswrapper[4558]: I0120 19:40:29.880950 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bb377a56-a1dd-4e87-aa0b-64a1308f290b-utilities\") pod \"certified-operators-hwrwk\" (UID: \"bb377a56-a1dd-4e87-aa0b-64a1308f290b\") " pod="openshift-marketplace/certified-operators-hwrwk" Jan 20 19:40:29 crc kubenswrapper[4558]: I0120 19:40:29.881088 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bb377a56-a1dd-4e87-aa0b-64a1308f290b-catalog-content\") pod \"certified-operators-hwrwk\" (UID: \"bb377a56-a1dd-4e87-aa0b-64a1308f290b\") " pod="openshift-marketplace/certified-operators-hwrwk" Jan 20 19:40:29 crc kubenswrapper[4558]: I0120 19:40:29.907278 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k7fmz\" (UniqueName: \"kubernetes.io/projected/bb377a56-a1dd-4e87-aa0b-64a1308f290b-kube-api-access-k7fmz\") pod \"certified-operators-hwrwk\" (UID: \"bb377a56-a1dd-4e87-aa0b-64a1308f290b\") " pod="openshift-marketplace/certified-operators-hwrwk" Jan 20 19:40:29 crc kubenswrapper[4558]: I0120 19:40:29.923931 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hwrwk" Jan 20 19:40:30 crc kubenswrapper[4558]: I0120 19:40:30.342186 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-hwrwk"] Jan 20 19:40:30 crc kubenswrapper[4558]: I0120 19:40:30.357310 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hwrwk" event={"ID":"bb377a56-a1dd-4e87-aa0b-64a1308f290b","Type":"ContainerStarted","Data":"3360a8dd98e6bc00597d127f4920435002dde467f10c5e4ad8265bb952c1edbe"} Jan 20 19:40:31 crc kubenswrapper[4558]: I0120 19:40:31.364890 4558 generic.go:334] "Generic (PLEG): container finished" podID="bb377a56-a1dd-4e87-aa0b-64a1308f290b" containerID="b06f8d62c8a168e55404cbf3e650821c3e5e8bdc33b8cb262db24d43eb21697b" exitCode=0 Jan 20 19:40:31 crc kubenswrapper[4558]: I0120 19:40:31.364956 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hwrwk" event={"ID":"bb377a56-a1dd-4e87-aa0b-64a1308f290b","Type":"ContainerDied","Data":"b06f8d62c8a168e55404cbf3e650821c3e5e8bdc33b8cb262db24d43eb21697b"} Jan 20 19:40:31 crc kubenswrapper[4558]: I0120 19:40:31.366941 4558 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 20 19:40:32 crc kubenswrapper[4558]: I0120 19:40:32.378119 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hwrwk" event={"ID":"bb377a56-a1dd-4e87-aa0b-64a1308f290b","Type":"ContainerStarted","Data":"38056e5ca67e336380dd7b8114f9c8b39a9adc1125d0179f8cd2763643f887fc"} Jan 20 19:40:33 crc kubenswrapper[4558]: I0120 19:40:33.388284 4558 generic.go:334] "Generic (PLEG): container finished" podID="bb377a56-a1dd-4e87-aa0b-64a1308f290b" containerID="38056e5ca67e336380dd7b8114f9c8b39a9adc1125d0179f8cd2763643f887fc" exitCode=0 Jan 20 19:40:33 crc kubenswrapper[4558]: I0120 19:40:33.388357 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hwrwk" event={"ID":"bb377a56-a1dd-4e87-aa0b-64a1308f290b","Type":"ContainerDied","Data":"38056e5ca67e336380dd7b8114f9c8b39a9adc1125d0179f8cd2763643f887fc"} Jan 20 19:40:34 crc kubenswrapper[4558]: I0120 19:40:34.399615 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hwrwk" event={"ID":"bb377a56-a1dd-4e87-aa0b-64a1308f290b","Type":"ContainerStarted","Data":"89820585a5e5970ed9d1686e456f158bd51c4041906ad88e0119c01aa5f749f9"} Jan 20 19:40:34 crc kubenswrapper[4558]: I0120 19:40:34.421615 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-hwrwk" podStartSLOduration=2.9345547 podStartE2EDuration="5.421594307s" podCreationTimestamp="2026-01-20 19:40:29 +0000 UTC" firstStartedPulling="2026-01-20 19:40:31.366598364 +0000 UTC m=+10725.126936332" lastFinishedPulling="2026-01-20 19:40:33.853637971 +0000 UTC m=+10727.613975939" observedRunningTime="2026-01-20 19:40:34.416307457 +0000 UTC m=+10728.176645424" watchObservedRunningTime="2026-01-20 19:40:34.421594307 +0000 UTC m=+10728.181932274" Jan 20 19:40:39 crc kubenswrapper[4558]: I0120 19:40:39.924282 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-hwrwk" Jan 20 19:40:39 crc kubenswrapper[4558]: I0120 19:40:39.926288 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-hwrwk" Jan 20 19:40:39 crc kubenswrapper[4558]: I0120 19:40:39.960307 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-hwrwk" Jan 20 19:40:40 crc kubenswrapper[4558]: I0120 19:40:40.472756 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-hwrwk" Jan 20 19:40:40 crc kubenswrapper[4558]: I0120 19:40:40.508895 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-hwrwk"] Jan 20 19:40:42 crc kubenswrapper[4558]: I0120 19:40:42.469348 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-hwrwk" podUID="bb377a56-a1dd-4e87-aa0b-64a1308f290b" containerName="registry-server" containerID="cri-o://89820585a5e5970ed9d1686e456f158bd51c4041906ad88e0119c01aa5f749f9" gracePeriod=2 Jan 20 19:40:42 crc kubenswrapper[4558]: I0120 19:40:42.785856 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hwrwk" Jan 20 19:40:42 crc kubenswrapper[4558]: I0120 19:40:42.975283 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bb377a56-a1dd-4e87-aa0b-64a1308f290b-catalog-content\") pod \"bb377a56-a1dd-4e87-aa0b-64a1308f290b\" (UID: \"bb377a56-a1dd-4e87-aa0b-64a1308f290b\") " Jan 20 19:40:42 crc kubenswrapper[4558]: I0120 19:40:42.975494 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k7fmz\" (UniqueName: \"kubernetes.io/projected/bb377a56-a1dd-4e87-aa0b-64a1308f290b-kube-api-access-k7fmz\") pod \"bb377a56-a1dd-4e87-aa0b-64a1308f290b\" (UID: \"bb377a56-a1dd-4e87-aa0b-64a1308f290b\") " Jan 20 19:40:42 crc kubenswrapper[4558]: I0120 19:40:42.975610 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bb377a56-a1dd-4e87-aa0b-64a1308f290b-utilities\") pod \"bb377a56-a1dd-4e87-aa0b-64a1308f290b\" (UID: \"bb377a56-a1dd-4e87-aa0b-64a1308f290b\") " Jan 20 19:40:42 crc kubenswrapper[4558]: I0120 19:40:42.976587 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bb377a56-a1dd-4e87-aa0b-64a1308f290b-utilities" (OuterVolumeSpecName: "utilities") pod "bb377a56-a1dd-4e87-aa0b-64a1308f290b" (UID: "bb377a56-a1dd-4e87-aa0b-64a1308f290b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 19:40:42 crc kubenswrapper[4558]: I0120 19:40:42.977524 4558 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bb377a56-a1dd-4e87-aa0b-64a1308f290b-utilities\") on node \"crc\" DevicePath \"\"" Jan 20 19:40:42 crc kubenswrapper[4558]: I0120 19:40:42.981818 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bb377a56-a1dd-4e87-aa0b-64a1308f290b-kube-api-access-k7fmz" (OuterVolumeSpecName: "kube-api-access-k7fmz") pod "bb377a56-a1dd-4e87-aa0b-64a1308f290b" (UID: "bb377a56-a1dd-4e87-aa0b-64a1308f290b"). InnerVolumeSpecName "kube-api-access-k7fmz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 19:40:43 crc kubenswrapper[4558]: I0120 19:40:43.022757 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bb377a56-a1dd-4e87-aa0b-64a1308f290b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "bb377a56-a1dd-4e87-aa0b-64a1308f290b" (UID: "bb377a56-a1dd-4e87-aa0b-64a1308f290b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 19:40:43 crc kubenswrapper[4558]: I0120 19:40:43.078692 4558 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bb377a56-a1dd-4e87-aa0b-64a1308f290b-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 20 19:40:43 crc kubenswrapper[4558]: I0120 19:40:43.078758 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k7fmz\" (UniqueName: \"kubernetes.io/projected/bb377a56-a1dd-4e87-aa0b-64a1308f290b-kube-api-access-k7fmz\") on node \"crc\" DevicePath \"\"" Jan 20 19:40:43 crc kubenswrapper[4558]: I0120 19:40:43.477053 4558 generic.go:334] "Generic (PLEG): container finished" podID="bb377a56-a1dd-4e87-aa0b-64a1308f290b" containerID="89820585a5e5970ed9d1686e456f158bd51c4041906ad88e0119c01aa5f749f9" exitCode=0 Jan 20 19:40:43 crc kubenswrapper[4558]: I0120 19:40:43.477101 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hwrwk" event={"ID":"bb377a56-a1dd-4e87-aa0b-64a1308f290b","Type":"ContainerDied","Data":"89820585a5e5970ed9d1686e456f158bd51c4041906ad88e0119c01aa5f749f9"} Jan 20 19:40:43 crc kubenswrapper[4558]: I0120 19:40:43.477132 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hwrwk" event={"ID":"bb377a56-a1dd-4e87-aa0b-64a1308f290b","Type":"ContainerDied","Data":"3360a8dd98e6bc00597d127f4920435002dde467f10c5e4ad8265bb952c1edbe"} Jan 20 19:40:43 crc kubenswrapper[4558]: I0120 19:40:43.477150 4558 scope.go:117] "RemoveContainer" containerID="89820585a5e5970ed9d1686e456f158bd51c4041906ad88e0119c01aa5f749f9" Jan 20 19:40:43 crc kubenswrapper[4558]: I0120 19:40:43.477302 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hwrwk" Jan 20 19:40:43 crc kubenswrapper[4558]: I0120 19:40:43.508885 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-hwrwk"] Jan 20 19:40:43 crc kubenswrapper[4558]: I0120 19:40:43.508898 4558 scope.go:117] "RemoveContainer" containerID="38056e5ca67e336380dd7b8114f9c8b39a9adc1125d0179f8cd2763643f887fc" Jan 20 19:40:43 crc kubenswrapper[4558]: I0120 19:40:43.514706 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-hwrwk"] Jan 20 19:40:43 crc kubenswrapper[4558]: I0120 19:40:43.550816 4558 scope.go:117] "RemoveContainer" containerID="b06f8d62c8a168e55404cbf3e650821c3e5e8bdc33b8cb262db24d43eb21697b" Jan 20 19:40:43 crc kubenswrapper[4558]: I0120 19:40:43.582011 4558 scope.go:117] "RemoveContainer" containerID="89820585a5e5970ed9d1686e456f158bd51c4041906ad88e0119c01aa5f749f9" Jan 20 19:40:43 crc kubenswrapper[4558]: E0120 19:40:43.591175 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"89820585a5e5970ed9d1686e456f158bd51c4041906ad88e0119c01aa5f749f9\": container with ID starting with 89820585a5e5970ed9d1686e456f158bd51c4041906ad88e0119c01aa5f749f9 not found: ID does not exist" containerID="89820585a5e5970ed9d1686e456f158bd51c4041906ad88e0119c01aa5f749f9" Jan 20 19:40:43 crc kubenswrapper[4558]: I0120 19:40:43.591211 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"89820585a5e5970ed9d1686e456f158bd51c4041906ad88e0119c01aa5f749f9"} err="failed to get container status \"89820585a5e5970ed9d1686e456f158bd51c4041906ad88e0119c01aa5f749f9\": rpc error: code = NotFound desc = could not find container \"89820585a5e5970ed9d1686e456f158bd51c4041906ad88e0119c01aa5f749f9\": container with ID starting with 89820585a5e5970ed9d1686e456f158bd51c4041906ad88e0119c01aa5f749f9 not found: ID does not exist" Jan 20 19:40:43 crc kubenswrapper[4558]: I0120 19:40:43.591234 4558 scope.go:117] "RemoveContainer" containerID="38056e5ca67e336380dd7b8114f9c8b39a9adc1125d0179f8cd2763643f887fc" Jan 20 19:40:43 crc kubenswrapper[4558]: E0120 19:40:43.591832 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"38056e5ca67e336380dd7b8114f9c8b39a9adc1125d0179f8cd2763643f887fc\": container with ID starting with 38056e5ca67e336380dd7b8114f9c8b39a9adc1125d0179f8cd2763643f887fc not found: ID does not exist" containerID="38056e5ca67e336380dd7b8114f9c8b39a9adc1125d0179f8cd2763643f887fc" Jan 20 19:40:43 crc kubenswrapper[4558]: I0120 19:40:43.591878 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"38056e5ca67e336380dd7b8114f9c8b39a9adc1125d0179f8cd2763643f887fc"} err="failed to get container status \"38056e5ca67e336380dd7b8114f9c8b39a9adc1125d0179f8cd2763643f887fc\": rpc error: code = NotFound desc = could not find container \"38056e5ca67e336380dd7b8114f9c8b39a9adc1125d0179f8cd2763643f887fc\": container with ID starting with 38056e5ca67e336380dd7b8114f9c8b39a9adc1125d0179f8cd2763643f887fc not found: ID does not exist" Jan 20 19:40:43 crc kubenswrapper[4558]: I0120 19:40:43.591909 4558 scope.go:117] "RemoveContainer" containerID="b06f8d62c8a168e55404cbf3e650821c3e5e8bdc33b8cb262db24d43eb21697b" Jan 20 19:40:43 crc kubenswrapper[4558]: E0120 19:40:43.592506 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b06f8d62c8a168e55404cbf3e650821c3e5e8bdc33b8cb262db24d43eb21697b\": container with ID starting with b06f8d62c8a168e55404cbf3e650821c3e5e8bdc33b8cb262db24d43eb21697b not found: ID does not exist" containerID="b06f8d62c8a168e55404cbf3e650821c3e5e8bdc33b8cb262db24d43eb21697b" Jan 20 19:40:43 crc kubenswrapper[4558]: I0120 19:40:43.592595 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b06f8d62c8a168e55404cbf3e650821c3e5e8bdc33b8cb262db24d43eb21697b"} err="failed to get container status \"b06f8d62c8a168e55404cbf3e650821c3e5e8bdc33b8cb262db24d43eb21697b\": rpc error: code = NotFound desc = could not find container \"b06f8d62c8a168e55404cbf3e650821c3e5e8bdc33b8cb262db24d43eb21697b\": container with ID starting with b06f8d62c8a168e55404cbf3e650821c3e5e8bdc33b8cb262db24d43eb21697b not found: ID does not exist" Jan 20 19:40:44 crc kubenswrapper[4558]: I0120 19:40:44.573787 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bb377a56-a1dd-4e87-aa0b-64a1308f290b" path="/var/lib/kubelet/pods/bb377a56-a1dd-4e87-aa0b-64a1308f290b/volumes" Jan 20 19:42:27 crc kubenswrapper[4558]: I0120 19:42:27.330960 4558 patch_prober.go:28] interesting pod/machine-config-daemon-2vr4r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 20 19:42:27 crc kubenswrapper[4558]: I0120 19:42:27.331691 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 20 19:42:57 crc kubenswrapper[4558]: I0120 19:42:57.330371 4558 patch_prober.go:28] interesting pod/machine-config-daemon-2vr4r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 20 19:42:57 crc kubenswrapper[4558]: I0120 19:42:57.331112 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 20 19:43:27 crc kubenswrapper[4558]: I0120 19:43:27.330079 4558 patch_prober.go:28] interesting pod/machine-config-daemon-2vr4r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 20 19:43:27 crc kubenswrapper[4558]: I0120 19:43:27.330534 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 20 19:43:27 crc kubenswrapper[4558]: I0120 19:43:27.330578 4558 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" Jan 20 19:43:27 crc kubenswrapper[4558]: I0120 19:43:27.331117 4558 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"c9f1ef733c7c7a9f4b483629db0474b41245df77ed9c7ff44058afb88192b813"} pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 20 19:43:27 crc kubenswrapper[4558]: I0120 19:43:27.331186 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" containerID="cri-o://c9f1ef733c7c7a9f4b483629db0474b41245df77ed9c7ff44058afb88192b813" gracePeriod=600 Jan 20 19:43:27 crc kubenswrapper[4558]: I0120 19:43:27.587790 4558 generic.go:334] "Generic (PLEG): container finished" podID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerID="c9f1ef733c7c7a9f4b483629db0474b41245df77ed9c7ff44058afb88192b813" exitCode=0 Jan 20 19:43:27 crc kubenswrapper[4558]: I0120 19:43:27.587839 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" event={"ID":"68337d27-3fa6-4a29-88b0-82e60c3739eb","Type":"ContainerDied","Data":"c9f1ef733c7c7a9f4b483629db0474b41245df77ed9c7ff44058afb88192b813"} Jan 20 19:43:27 crc kubenswrapper[4558]: I0120 19:43:27.587869 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" event={"ID":"68337d27-3fa6-4a29-88b0-82e60c3739eb","Type":"ContainerStarted","Data":"7787f59ff7454681fc282d06e7c92824f0d53eec7abb7bc051c812fd67aa7fed"} Jan 20 19:43:27 crc kubenswrapper[4558]: I0120 19:43:27.587894 4558 scope.go:117] "RemoveContainer" containerID="08f0a41ff708380b079ac76e9307c7eeefc97e2c465ca694068ed4284847bc34" Jan 20 19:44:35 crc kubenswrapper[4558]: I0120 19:44:35.937082 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-f5l6f"] Jan 20 19:44:35 crc kubenswrapper[4558]: E0120 19:44:35.937825 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bb377a56-a1dd-4e87-aa0b-64a1308f290b" containerName="extract-content" Jan 20 19:44:35 crc kubenswrapper[4558]: I0120 19:44:35.937838 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="bb377a56-a1dd-4e87-aa0b-64a1308f290b" containerName="extract-content" Jan 20 19:44:35 crc kubenswrapper[4558]: E0120 19:44:35.937851 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bb377a56-a1dd-4e87-aa0b-64a1308f290b" containerName="registry-server" Jan 20 19:44:35 crc kubenswrapper[4558]: I0120 19:44:35.937857 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="bb377a56-a1dd-4e87-aa0b-64a1308f290b" containerName="registry-server" Jan 20 19:44:35 crc kubenswrapper[4558]: E0120 19:44:35.937872 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bb377a56-a1dd-4e87-aa0b-64a1308f290b" containerName="extract-utilities" Jan 20 19:44:35 crc kubenswrapper[4558]: I0120 19:44:35.937878 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="bb377a56-a1dd-4e87-aa0b-64a1308f290b" containerName="extract-utilities" Jan 20 19:44:35 crc kubenswrapper[4558]: I0120 19:44:35.937988 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="bb377a56-a1dd-4e87-aa0b-64a1308f290b" containerName="registry-server" Jan 20 19:44:35 crc kubenswrapper[4558]: I0120 19:44:35.938830 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-f5l6f" Jan 20 19:44:35 crc kubenswrapper[4558]: I0120 19:44:35.951135 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-f5l6f"] Jan 20 19:44:35 crc kubenswrapper[4558]: I0120 19:44:35.984340 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0eb199a7-f7a8-4100-96c0-2c10e6dc7ce2-catalog-content\") pod \"community-operators-f5l6f\" (UID: \"0eb199a7-f7a8-4100-96c0-2c10e6dc7ce2\") " pod="openshift-marketplace/community-operators-f5l6f" Jan 20 19:44:35 crc kubenswrapper[4558]: I0120 19:44:35.984395 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0eb199a7-f7a8-4100-96c0-2c10e6dc7ce2-utilities\") pod \"community-operators-f5l6f\" (UID: \"0eb199a7-f7a8-4100-96c0-2c10e6dc7ce2\") " pod="openshift-marketplace/community-operators-f5l6f" Jan 20 19:44:35 crc kubenswrapper[4558]: I0120 19:44:35.984482 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-75q96\" (UniqueName: \"kubernetes.io/projected/0eb199a7-f7a8-4100-96c0-2c10e6dc7ce2-kube-api-access-75q96\") pod \"community-operators-f5l6f\" (UID: \"0eb199a7-f7a8-4100-96c0-2c10e6dc7ce2\") " pod="openshift-marketplace/community-operators-f5l6f" Jan 20 19:44:36 crc kubenswrapper[4558]: I0120 19:44:36.087146 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0eb199a7-f7a8-4100-96c0-2c10e6dc7ce2-catalog-content\") pod \"community-operators-f5l6f\" (UID: \"0eb199a7-f7a8-4100-96c0-2c10e6dc7ce2\") " pod="openshift-marketplace/community-operators-f5l6f" Jan 20 19:44:36 crc kubenswrapper[4558]: I0120 19:44:36.087222 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0eb199a7-f7a8-4100-96c0-2c10e6dc7ce2-utilities\") pod \"community-operators-f5l6f\" (UID: \"0eb199a7-f7a8-4100-96c0-2c10e6dc7ce2\") " pod="openshift-marketplace/community-operators-f5l6f" Jan 20 19:44:36 crc kubenswrapper[4558]: I0120 19:44:36.087378 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-75q96\" (UniqueName: \"kubernetes.io/projected/0eb199a7-f7a8-4100-96c0-2c10e6dc7ce2-kube-api-access-75q96\") pod \"community-operators-f5l6f\" (UID: \"0eb199a7-f7a8-4100-96c0-2c10e6dc7ce2\") " pod="openshift-marketplace/community-operators-f5l6f" Jan 20 19:44:36 crc kubenswrapper[4558]: I0120 19:44:36.087750 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0eb199a7-f7a8-4100-96c0-2c10e6dc7ce2-catalog-content\") pod \"community-operators-f5l6f\" (UID: \"0eb199a7-f7a8-4100-96c0-2c10e6dc7ce2\") " pod="openshift-marketplace/community-operators-f5l6f" Jan 20 19:44:36 crc kubenswrapper[4558]: I0120 19:44:36.087784 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0eb199a7-f7a8-4100-96c0-2c10e6dc7ce2-utilities\") pod \"community-operators-f5l6f\" (UID: \"0eb199a7-f7a8-4100-96c0-2c10e6dc7ce2\") " pod="openshift-marketplace/community-operators-f5l6f" Jan 20 19:44:36 crc kubenswrapper[4558]: I0120 19:44:36.110537 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-75q96\" (UniqueName: \"kubernetes.io/projected/0eb199a7-f7a8-4100-96c0-2c10e6dc7ce2-kube-api-access-75q96\") pod \"community-operators-f5l6f\" (UID: \"0eb199a7-f7a8-4100-96c0-2c10e6dc7ce2\") " pod="openshift-marketplace/community-operators-f5l6f" Jan 20 19:44:36 crc kubenswrapper[4558]: I0120 19:44:36.256091 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-f5l6f" Jan 20 19:44:36 crc kubenswrapper[4558]: I0120 19:44:36.718072 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-f5l6f"] Jan 20 19:44:37 crc kubenswrapper[4558]: I0120 19:44:37.063665 4558 generic.go:334] "Generic (PLEG): container finished" podID="0eb199a7-f7a8-4100-96c0-2c10e6dc7ce2" containerID="2241bcbe72fa5275a09a7ad75789d6f1ac2b70ea09ed01c09b76e8e50748987c" exitCode=0 Jan 20 19:44:37 crc kubenswrapper[4558]: I0120 19:44:37.063931 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-f5l6f" event={"ID":"0eb199a7-f7a8-4100-96c0-2c10e6dc7ce2","Type":"ContainerDied","Data":"2241bcbe72fa5275a09a7ad75789d6f1ac2b70ea09ed01c09b76e8e50748987c"} Jan 20 19:44:37 crc kubenswrapper[4558]: I0120 19:44:37.063960 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-f5l6f" event={"ID":"0eb199a7-f7a8-4100-96c0-2c10e6dc7ce2","Type":"ContainerStarted","Data":"8452568b939e035a4f337a612a419627580cbc4c357590fc83e3181a7608ace1"} Jan 20 19:44:38 crc kubenswrapper[4558]: I0120 19:44:38.073446 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-f5l6f" event={"ID":"0eb199a7-f7a8-4100-96c0-2c10e6dc7ce2","Type":"ContainerStarted","Data":"a97d62fa0cd132ea0f85af504f891d0047cb769157d316c39b072726e5df911d"} Jan 20 19:44:39 crc kubenswrapper[4558]: I0120 19:44:39.084539 4558 generic.go:334] "Generic (PLEG): container finished" podID="0eb199a7-f7a8-4100-96c0-2c10e6dc7ce2" containerID="a97d62fa0cd132ea0f85af504f891d0047cb769157d316c39b072726e5df911d" exitCode=0 Jan 20 19:44:39 crc kubenswrapper[4558]: I0120 19:44:39.084600 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-f5l6f" event={"ID":"0eb199a7-f7a8-4100-96c0-2c10e6dc7ce2","Type":"ContainerDied","Data":"a97d62fa0cd132ea0f85af504f891d0047cb769157d316c39b072726e5df911d"} Jan 20 19:44:40 crc kubenswrapper[4558]: I0120 19:44:40.101218 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-f5l6f" event={"ID":"0eb199a7-f7a8-4100-96c0-2c10e6dc7ce2","Type":"ContainerStarted","Data":"58de78b30d124d69476b0c2db0b9b864dd9fe5849e9628e1bd7b854fda67c973"} Jan 20 19:44:40 crc kubenswrapper[4558]: I0120 19:44:40.126985 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-f5l6f" podStartSLOduration=2.590916034 podStartE2EDuration="5.126964192s" podCreationTimestamp="2026-01-20 19:44:35 +0000 UTC" firstStartedPulling="2026-01-20 19:44:37.066583054 +0000 UTC m=+10970.826921021" lastFinishedPulling="2026-01-20 19:44:39.602631211 +0000 UTC m=+10973.362969179" observedRunningTime="2026-01-20 19:44:40.126071402 +0000 UTC m=+10973.886409370" watchObservedRunningTime="2026-01-20 19:44:40.126964192 +0000 UTC m=+10973.887302159" Jan 20 19:44:46 crc kubenswrapper[4558]: I0120 19:44:46.257276 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-f5l6f" Jan 20 19:44:46 crc kubenswrapper[4558]: I0120 19:44:46.258580 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-f5l6f" Jan 20 19:44:46 crc kubenswrapper[4558]: I0120 19:44:46.293362 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-f5l6f" Jan 20 19:44:47 crc kubenswrapper[4558]: I0120 19:44:47.200835 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-f5l6f" Jan 20 19:44:47 crc kubenswrapper[4558]: I0120 19:44:47.255105 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-f5l6f"] Jan 20 19:44:49 crc kubenswrapper[4558]: I0120 19:44:49.177710 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-f5l6f" podUID="0eb199a7-f7a8-4100-96c0-2c10e6dc7ce2" containerName="registry-server" containerID="cri-o://58de78b30d124d69476b0c2db0b9b864dd9fe5849e9628e1bd7b854fda67c973" gracePeriod=2 Jan 20 19:44:49 crc kubenswrapper[4558]: I0120 19:44:49.488892 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-f5l6f" Jan 20 19:44:49 crc kubenswrapper[4558]: I0120 19:44:49.662650 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0eb199a7-f7a8-4100-96c0-2c10e6dc7ce2-catalog-content\") pod \"0eb199a7-f7a8-4100-96c0-2c10e6dc7ce2\" (UID: \"0eb199a7-f7a8-4100-96c0-2c10e6dc7ce2\") " Jan 20 19:44:49 crc kubenswrapper[4558]: I0120 19:44:49.662785 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-75q96\" (UniqueName: \"kubernetes.io/projected/0eb199a7-f7a8-4100-96c0-2c10e6dc7ce2-kube-api-access-75q96\") pod \"0eb199a7-f7a8-4100-96c0-2c10e6dc7ce2\" (UID: \"0eb199a7-f7a8-4100-96c0-2c10e6dc7ce2\") " Jan 20 19:44:49 crc kubenswrapper[4558]: I0120 19:44:49.662946 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0eb199a7-f7a8-4100-96c0-2c10e6dc7ce2-utilities\") pod \"0eb199a7-f7a8-4100-96c0-2c10e6dc7ce2\" (UID: \"0eb199a7-f7a8-4100-96c0-2c10e6dc7ce2\") " Jan 20 19:44:49 crc kubenswrapper[4558]: I0120 19:44:49.663776 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0eb199a7-f7a8-4100-96c0-2c10e6dc7ce2-utilities" (OuterVolumeSpecName: "utilities") pod "0eb199a7-f7a8-4100-96c0-2c10e6dc7ce2" (UID: "0eb199a7-f7a8-4100-96c0-2c10e6dc7ce2"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 19:44:49 crc kubenswrapper[4558]: I0120 19:44:49.664459 4558 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0eb199a7-f7a8-4100-96c0-2c10e6dc7ce2-utilities\") on node \"crc\" DevicePath \"\"" Jan 20 19:44:49 crc kubenswrapper[4558]: I0120 19:44:49.669588 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0eb199a7-f7a8-4100-96c0-2c10e6dc7ce2-kube-api-access-75q96" (OuterVolumeSpecName: "kube-api-access-75q96") pod "0eb199a7-f7a8-4100-96c0-2c10e6dc7ce2" (UID: "0eb199a7-f7a8-4100-96c0-2c10e6dc7ce2"). InnerVolumeSpecName "kube-api-access-75q96". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 19:44:49 crc kubenswrapper[4558]: I0120 19:44:49.708119 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0eb199a7-f7a8-4100-96c0-2c10e6dc7ce2-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0eb199a7-f7a8-4100-96c0-2c10e6dc7ce2" (UID: "0eb199a7-f7a8-4100-96c0-2c10e6dc7ce2"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 19:44:49 crc kubenswrapper[4558]: I0120 19:44:49.766754 4558 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0eb199a7-f7a8-4100-96c0-2c10e6dc7ce2-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 20 19:44:49 crc kubenswrapper[4558]: I0120 19:44:49.766805 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-75q96\" (UniqueName: \"kubernetes.io/projected/0eb199a7-f7a8-4100-96c0-2c10e6dc7ce2-kube-api-access-75q96\") on node \"crc\" DevicePath \"\"" Jan 20 19:44:50 crc kubenswrapper[4558]: I0120 19:44:50.187043 4558 generic.go:334] "Generic (PLEG): container finished" podID="0eb199a7-f7a8-4100-96c0-2c10e6dc7ce2" containerID="58de78b30d124d69476b0c2db0b9b864dd9fe5849e9628e1bd7b854fda67c973" exitCode=0 Jan 20 19:44:50 crc kubenswrapper[4558]: I0120 19:44:50.187115 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-f5l6f" Jan 20 19:44:50 crc kubenswrapper[4558]: I0120 19:44:50.187112 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-f5l6f" event={"ID":"0eb199a7-f7a8-4100-96c0-2c10e6dc7ce2","Type":"ContainerDied","Data":"58de78b30d124d69476b0c2db0b9b864dd9fe5849e9628e1bd7b854fda67c973"} Jan 20 19:44:50 crc kubenswrapper[4558]: I0120 19:44:50.187209 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-f5l6f" event={"ID":"0eb199a7-f7a8-4100-96c0-2c10e6dc7ce2","Type":"ContainerDied","Data":"8452568b939e035a4f337a612a419627580cbc4c357590fc83e3181a7608ace1"} Jan 20 19:44:50 crc kubenswrapper[4558]: I0120 19:44:50.187244 4558 scope.go:117] "RemoveContainer" containerID="58de78b30d124d69476b0c2db0b9b864dd9fe5849e9628e1bd7b854fda67c973" Jan 20 19:44:50 crc kubenswrapper[4558]: I0120 19:44:50.229345 4558 scope.go:117] "RemoveContainer" containerID="a97d62fa0cd132ea0f85af504f891d0047cb769157d316c39b072726e5df911d" Jan 20 19:44:50 crc kubenswrapper[4558]: I0120 19:44:50.229858 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-f5l6f"] Jan 20 19:44:50 crc kubenswrapper[4558]: I0120 19:44:50.234921 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-f5l6f"] Jan 20 19:44:50 crc kubenswrapper[4558]: I0120 19:44:50.245440 4558 scope.go:117] "RemoveContainer" containerID="2241bcbe72fa5275a09a7ad75789d6f1ac2b70ea09ed01c09b76e8e50748987c" Jan 20 19:44:50 crc kubenswrapper[4558]: I0120 19:44:50.258785 4558 scope.go:117] "RemoveContainer" containerID="58de78b30d124d69476b0c2db0b9b864dd9fe5849e9628e1bd7b854fda67c973" Jan 20 19:44:50 crc kubenswrapper[4558]: E0120 19:44:50.259086 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"58de78b30d124d69476b0c2db0b9b864dd9fe5849e9628e1bd7b854fda67c973\": container with ID starting with 58de78b30d124d69476b0c2db0b9b864dd9fe5849e9628e1bd7b854fda67c973 not found: ID does not exist" containerID="58de78b30d124d69476b0c2db0b9b864dd9fe5849e9628e1bd7b854fda67c973" Jan 20 19:44:50 crc kubenswrapper[4558]: I0120 19:44:50.259123 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"58de78b30d124d69476b0c2db0b9b864dd9fe5849e9628e1bd7b854fda67c973"} err="failed to get container status \"58de78b30d124d69476b0c2db0b9b864dd9fe5849e9628e1bd7b854fda67c973\": rpc error: code = NotFound desc = could not find container \"58de78b30d124d69476b0c2db0b9b864dd9fe5849e9628e1bd7b854fda67c973\": container with ID starting with 58de78b30d124d69476b0c2db0b9b864dd9fe5849e9628e1bd7b854fda67c973 not found: ID does not exist" Jan 20 19:44:50 crc kubenswrapper[4558]: I0120 19:44:50.259148 4558 scope.go:117] "RemoveContainer" containerID="a97d62fa0cd132ea0f85af504f891d0047cb769157d316c39b072726e5df911d" Jan 20 19:44:50 crc kubenswrapper[4558]: E0120 19:44:50.259340 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a97d62fa0cd132ea0f85af504f891d0047cb769157d316c39b072726e5df911d\": container with ID starting with a97d62fa0cd132ea0f85af504f891d0047cb769157d316c39b072726e5df911d not found: ID does not exist" containerID="a97d62fa0cd132ea0f85af504f891d0047cb769157d316c39b072726e5df911d" Jan 20 19:44:50 crc kubenswrapper[4558]: I0120 19:44:50.259364 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a97d62fa0cd132ea0f85af504f891d0047cb769157d316c39b072726e5df911d"} err="failed to get container status \"a97d62fa0cd132ea0f85af504f891d0047cb769157d316c39b072726e5df911d\": rpc error: code = NotFound desc = could not find container \"a97d62fa0cd132ea0f85af504f891d0047cb769157d316c39b072726e5df911d\": container with ID starting with a97d62fa0cd132ea0f85af504f891d0047cb769157d316c39b072726e5df911d not found: ID does not exist" Jan 20 19:44:50 crc kubenswrapper[4558]: I0120 19:44:50.259378 4558 scope.go:117] "RemoveContainer" containerID="2241bcbe72fa5275a09a7ad75789d6f1ac2b70ea09ed01c09b76e8e50748987c" Jan 20 19:44:50 crc kubenswrapper[4558]: E0120 19:44:50.259524 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2241bcbe72fa5275a09a7ad75789d6f1ac2b70ea09ed01c09b76e8e50748987c\": container with ID starting with 2241bcbe72fa5275a09a7ad75789d6f1ac2b70ea09ed01c09b76e8e50748987c not found: ID does not exist" containerID="2241bcbe72fa5275a09a7ad75789d6f1ac2b70ea09ed01c09b76e8e50748987c" Jan 20 19:44:50 crc kubenswrapper[4558]: I0120 19:44:50.259545 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2241bcbe72fa5275a09a7ad75789d6f1ac2b70ea09ed01c09b76e8e50748987c"} err="failed to get container status \"2241bcbe72fa5275a09a7ad75789d6f1ac2b70ea09ed01c09b76e8e50748987c\": rpc error: code = NotFound desc = could not find container \"2241bcbe72fa5275a09a7ad75789d6f1ac2b70ea09ed01c09b76e8e50748987c\": container with ID starting with 2241bcbe72fa5275a09a7ad75789d6f1ac2b70ea09ed01c09b76e8e50748987c not found: ID does not exist" Jan 20 19:44:50 crc kubenswrapper[4558]: I0120 19:44:50.574869 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0eb199a7-f7a8-4100-96c0-2c10e6dc7ce2" path="/var/lib/kubelet/pods/0eb199a7-f7a8-4100-96c0-2c10e6dc7ce2/volumes" Jan 20 19:45:00 crc kubenswrapper[4558]: I0120 19:45:00.140473 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29482305-xvdt6"] Jan 20 19:45:00 crc kubenswrapper[4558]: E0120 19:45:00.142466 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0eb199a7-f7a8-4100-96c0-2c10e6dc7ce2" containerName="registry-server" Jan 20 19:45:00 crc kubenswrapper[4558]: I0120 19:45:00.142484 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="0eb199a7-f7a8-4100-96c0-2c10e6dc7ce2" containerName="registry-server" Jan 20 19:45:00 crc kubenswrapper[4558]: E0120 19:45:00.142508 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0eb199a7-f7a8-4100-96c0-2c10e6dc7ce2" containerName="extract-content" Jan 20 19:45:00 crc kubenswrapper[4558]: I0120 19:45:00.142513 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="0eb199a7-f7a8-4100-96c0-2c10e6dc7ce2" containerName="extract-content" Jan 20 19:45:00 crc kubenswrapper[4558]: E0120 19:45:00.142530 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0eb199a7-f7a8-4100-96c0-2c10e6dc7ce2" containerName="extract-utilities" Jan 20 19:45:00 crc kubenswrapper[4558]: I0120 19:45:00.142536 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="0eb199a7-f7a8-4100-96c0-2c10e6dc7ce2" containerName="extract-utilities" Jan 20 19:45:00 crc kubenswrapper[4558]: I0120 19:45:00.142653 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="0eb199a7-f7a8-4100-96c0-2c10e6dc7ce2" containerName="registry-server" Jan 20 19:45:00 crc kubenswrapper[4558]: I0120 19:45:00.144669 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29482305-xvdt6" Jan 20 19:45:00 crc kubenswrapper[4558]: I0120 19:45:00.147327 4558 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 20 19:45:00 crc kubenswrapper[4558]: I0120 19:45:00.148469 4558 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 20 19:45:00 crc kubenswrapper[4558]: I0120 19:45:00.155563 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29482305-xvdt6"] Jan 20 19:45:00 crc kubenswrapper[4558]: I0120 19:45:00.226989 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ace43de8-9e59-4d8d-8c6a-376177aefded-config-volume\") pod \"collect-profiles-29482305-xvdt6\" (UID: \"ace43de8-9e59-4d8d-8c6a-376177aefded\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482305-xvdt6" Jan 20 19:45:00 crc kubenswrapper[4558]: I0120 19:45:00.227061 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ace43de8-9e59-4d8d-8c6a-376177aefded-secret-volume\") pod \"collect-profiles-29482305-xvdt6\" (UID: \"ace43de8-9e59-4d8d-8c6a-376177aefded\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482305-xvdt6" Jan 20 19:45:00 crc kubenswrapper[4558]: I0120 19:45:00.227100 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-shjbv\" (UniqueName: \"kubernetes.io/projected/ace43de8-9e59-4d8d-8c6a-376177aefded-kube-api-access-shjbv\") pod \"collect-profiles-29482305-xvdt6\" (UID: \"ace43de8-9e59-4d8d-8c6a-376177aefded\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482305-xvdt6" Jan 20 19:45:00 crc kubenswrapper[4558]: I0120 19:45:00.327929 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ace43de8-9e59-4d8d-8c6a-376177aefded-config-volume\") pod \"collect-profiles-29482305-xvdt6\" (UID: \"ace43de8-9e59-4d8d-8c6a-376177aefded\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482305-xvdt6" Jan 20 19:45:00 crc kubenswrapper[4558]: I0120 19:45:00.327975 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ace43de8-9e59-4d8d-8c6a-376177aefded-secret-volume\") pod \"collect-profiles-29482305-xvdt6\" (UID: \"ace43de8-9e59-4d8d-8c6a-376177aefded\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482305-xvdt6" Jan 20 19:45:00 crc kubenswrapper[4558]: I0120 19:45:00.328010 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-shjbv\" (UniqueName: \"kubernetes.io/projected/ace43de8-9e59-4d8d-8c6a-376177aefded-kube-api-access-shjbv\") pod \"collect-profiles-29482305-xvdt6\" (UID: \"ace43de8-9e59-4d8d-8c6a-376177aefded\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482305-xvdt6" Jan 20 19:45:00 crc kubenswrapper[4558]: I0120 19:45:00.329080 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ace43de8-9e59-4d8d-8c6a-376177aefded-config-volume\") pod \"collect-profiles-29482305-xvdt6\" (UID: \"ace43de8-9e59-4d8d-8c6a-376177aefded\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482305-xvdt6" Jan 20 19:45:00 crc kubenswrapper[4558]: I0120 19:45:00.335710 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ace43de8-9e59-4d8d-8c6a-376177aefded-secret-volume\") pod \"collect-profiles-29482305-xvdt6\" (UID: \"ace43de8-9e59-4d8d-8c6a-376177aefded\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482305-xvdt6" Jan 20 19:45:00 crc kubenswrapper[4558]: I0120 19:45:00.342396 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-shjbv\" (UniqueName: \"kubernetes.io/projected/ace43de8-9e59-4d8d-8c6a-376177aefded-kube-api-access-shjbv\") pod \"collect-profiles-29482305-xvdt6\" (UID: \"ace43de8-9e59-4d8d-8c6a-376177aefded\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29482305-xvdt6" Jan 20 19:45:00 crc kubenswrapper[4558]: I0120 19:45:00.473427 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29482305-xvdt6" Jan 20 19:45:00 crc kubenswrapper[4558]: I0120 19:45:00.870072 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29482305-xvdt6"] Jan 20 19:45:01 crc kubenswrapper[4558]: I0120 19:45:01.272916 4558 generic.go:334] "Generic (PLEG): container finished" podID="ace43de8-9e59-4d8d-8c6a-376177aefded" containerID="516941e60000880a993feb42e209046e4e1b6748dc8da0ffe022cf71d00ccb69" exitCode=0 Jan 20 19:45:01 crc kubenswrapper[4558]: I0120 19:45:01.272977 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29482305-xvdt6" event={"ID":"ace43de8-9e59-4d8d-8c6a-376177aefded","Type":"ContainerDied","Data":"516941e60000880a993feb42e209046e4e1b6748dc8da0ffe022cf71d00ccb69"} Jan 20 19:45:01 crc kubenswrapper[4558]: I0120 19:45:01.273032 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29482305-xvdt6" event={"ID":"ace43de8-9e59-4d8d-8c6a-376177aefded","Type":"ContainerStarted","Data":"84370948f7e7972e5c0b9b6c9a1f0e0b3ea52cce0520c6d03a61708e0e0fc5ab"} Jan 20 19:45:02 crc kubenswrapper[4558]: I0120 19:45:02.502768 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29482305-xvdt6" Jan 20 19:45:02 crc kubenswrapper[4558]: I0120 19:45:02.577618 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-shjbv\" (UniqueName: \"kubernetes.io/projected/ace43de8-9e59-4d8d-8c6a-376177aefded-kube-api-access-shjbv\") pod \"ace43de8-9e59-4d8d-8c6a-376177aefded\" (UID: \"ace43de8-9e59-4d8d-8c6a-376177aefded\") " Jan 20 19:45:02 crc kubenswrapper[4558]: I0120 19:45:02.577800 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ace43de8-9e59-4d8d-8c6a-376177aefded-config-volume\") pod \"ace43de8-9e59-4d8d-8c6a-376177aefded\" (UID: \"ace43de8-9e59-4d8d-8c6a-376177aefded\") " Jan 20 19:45:02 crc kubenswrapper[4558]: I0120 19:45:02.577962 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ace43de8-9e59-4d8d-8c6a-376177aefded-secret-volume\") pod \"ace43de8-9e59-4d8d-8c6a-376177aefded\" (UID: \"ace43de8-9e59-4d8d-8c6a-376177aefded\") " Jan 20 19:45:02 crc kubenswrapper[4558]: I0120 19:45:02.578550 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ace43de8-9e59-4d8d-8c6a-376177aefded-config-volume" (OuterVolumeSpecName: "config-volume") pod "ace43de8-9e59-4d8d-8c6a-376177aefded" (UID: "ace43de8-9e59-4d8d-8c6a-376177aefded"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 20 19:45:02 crc kubenswrapper[4558]: I0120 19:45:02.584220 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ace43de8-9e59-4d8d-8c6a-376177aefded-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "ace43de8-9e59-4d8d-8c6a-376177aefded" (UID: "ace43de8-9e59-4d8d-8c6a-376177aefded"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 20 19:45:02 crc kubenswrapper[4558]: I0120 19:45:02.584392 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ace43de8-9e59-4d8d-8c6a-376177aefded-kube-api-access-shjbv" (OuterVolumeSpecName: "kube-api-access-shjbv") pod "ace43de8-9e59-4d8d-8c6a-376177aefded" (UID: "ace43de8-9e59-4d8d-8c6a-376177aefded"). InnerVolumeSpecName "kube-api-access-shjbv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 19:45:02 crc kubenswrapper[4558]: I0120 19:45:02.680838 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-shjbv\" (UniqueName: \"kubernetes.io/projected/ace43de8-9e59-4d8d-8c6a-376177aefded-kube-api-access-shjbv\") on node \"crc\" DevicePath \"\"" Jan 20 19:45:02 crc kubenswrapper[4558]: I0120 19:45:02.681056 4558 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ace43de8-9e59-4d8d-8c6a-376177aefded-config-volume\") on node \"crc\" DevicePath \"\"" Jan 20 19:45:02 crc kubenswrapper[4558]: I0120 19:45:02.681145 4558 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ace43de8-9e59-4d8d-8c6a-376177aefded-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 20 19:45:03 crc kubenswrapper[4558]: I0120 19:45:03.303053 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29482305-xvdt6" event={"ID":"ace43de8-9e59-4d8d-8c6a-376177aefded","Type":"ContainerDied","Data":"84370948f7e7972e5c0b9b6c9a1f0e0b3ea52cce0520c6d03a61708e0e0fc5ab"} Jan 20 19:45:03 crc kubenswrapper[4558]: I0120 19:45:03.303119 4558 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="84370948f7e7972e5c0b9b6c9a1f0e0b3ea52cce0520c6d03a61708e0e0fc5ab" Jan 20 19:45:03 crc kubenswrapper[4558]: I0120 19:45:03.303123 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29482305-xvdt6" Jan 20 19:45:03 crc kubenswrapper[4558]: I0120 19:45:03.564113 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29482260-422w6"] Jan 20 19:45:03 crc kubenswrapper[4558]: I0120 19:45:03.568314 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29482260-422w6"] Jan 20 19:45:04 crc kubenswrapper[4558]: I0120 19:45:04.590595 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="55dca4f6-b612-43f2-851d-888a552c942b" path="/var/lib/kubelet/pods/55dca4f6-b612-43f2-851d-888a552c942b/volumes" Jan 20 19:45:27 crc kubenswrapper[4558]: I0120 19:45:27.329921 4558 patch_prober.go:28] interesting pod/machine-config-daemon-2vr4r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 20 19:45:27 crc kubenswrapper[4558]: I0120 19:45:27.330396 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 20 19:45:41 crc kubenswrapper[4558]: I0120 19:45:41.339961 4558 scope.go:117] "RemoveContainer" containerID="08cf3cfd4f06cdf84d3de2b0e748f5cbc63bedef105d01ccb1c91389e1b1fc3b" Jan 20 19:45:57 crc kubenswrapper[4558]: I0120 19:45:57.329450 4558 patch_prober.go:28] interesting pod/machine-config-daemon-2vr4r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 20 19:45:57 crc kubenswrapper[4558]: I0120 19:45:57.329861 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 20 19:46:27 crc kubenswrapper[4558]: I0120 19:46:27.329714 4558 patch_prober.go:28] interesting pod/machine-config-daemon-2vr4r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 20 19:46:27 crc kubenswrapper[4558]: I0120 19:46:27.330388 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 20 19:46:27 crc kubenswrapper[4558]: I0120 19:46:27.330461 4558 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" Jan 20 19:46:27 crc kubenswrapper[4558]: I0120 19:46:27.332609 4558 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"7787f59ff7454681fc282d06e7c92824f0d53eec7abb7bc051c812fd67aa7fed"} pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 20 19:46:27 crc kubenswrapper[4558]: I0120 19:46:27.332692 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" containerID="cri-o://7787f59ff7454681fc282d06e7c92824f0d53eec7abb7bc051c812fd67aa7fed" gracePeriod=600 Jan 20 19:46:27 crc kubenswrapper[4558]: E0120 19:46:27.460334 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:46:27 crc kubenswrapper[4558]: I0120 19:46:27.888935 4558 generic.go:334] "Generic (PLEG): container finished" podID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerID="7787f59ff7454681fc282d06e7c92824f0d53eec7abb7bc051c812fd67aa7fed" exitCode=0 Jan 20 19:46:27 crc kubenswrapper[4558]: I0120 19:46:27.888997 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" event={"ID":"68337d27-3fa6-4a29-88b0-82e60c3739eb","Type":"ContainerDied","Data":"7787f59ff7454681fc282d06e7c92824f0d53eec7abb7bc051c812fd67aa7fed"} Jan 20 19:46:27 crc kubenswrapper[4558]: I0120 19:46:27.889070 4558 scope.go:117] "RemoveContainer" containerID="c9f1ef733c7c7a9f4b483629db0474b41245df77ed9c7ff44058afb88192b813" Jan 20 19:46:27 crc kubenswrapper[4558]: I0120 19:46:27.889522 4558 scope.go:117] "RemoveContainer" containerID="7787f59ff7454681fc282d06e7c92824f0d53eec7abb7bc051c812fd67aa7fed" Jan 20 19:46:27 crc kubenswrapper[4558]: E0120 19:46:27.889893 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:46:38 crc kubenswrapper[4558]: I0120 19:46:38.565932 4558 scope.go:117] "RemoveContainer" containerID="7787f59ff7454681fc282d06e7c92824f0d53eec7abb7bc051c812fd67aa7fed" Jan 20 19:46:38 crc kubenswrapper[4558]: E0120 19:46:38.566948 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:46:49 crc kubenswrapper[4558]: I0120 19:46:49.566473 4558 scope.go:117] "RemoveContainer" containerID="7787f59ff7454681fc282d06e7c92824f0d53eec7abb7bc051c812fd67aa7fed" Jan 20 19:46:49 crc kubenswrapper[4558]: E0120 19:46:49.568284 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:46:54 crc kubenswrapper[4558]: I0120 19:46:54.695856 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-mpl4p"] Jan 20 19:46:54 crc kubenswrapper[4558]: E0120 19:46:54.696583 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ace43de8-9e59-4d8d-8c6a-376177aefded" containerName="collect-profiles" Jan 20 19:46:54 crc kubenswrapper[4558]: I0120 19:46:54.696598 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ace43de8-9e59-4d8d-8c6a-376177aefded" containerName="collect-profiles" Jan 20 19:46:54 crc kubenswrapper[4558]: I0120 19:46:54.696765 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="ace43de8-9e59-4d8d-8c6a-376177aefded" containerName="collect-profiles" Jan 20 19:46:54 crc kubenswrapper[4558]: I0120 19:46:54.697666 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mpl4p" Jan 20 19:46:54 crc kubenswrapper[4558]: I0120 19:46:54.722349 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-mpl4p"] Jan 20 19:46:54 crc kubenswrapper[4558]: I0120 19:46:54.839458 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6wxn5\" (UniqueName: \"kubernetes.io/projected/557148a5-3032-4e4a-83cd-be913116f3f2-kube-api-access-6wxn5\") pod \"redhat-operators-mpl4p\" (UID: \"557148a5-3032-4e4a-83cd-be913116f3f2\") " pod="openshift-marketplace/redhat-operators-mpl4p" Jan 20 19:46:54 crc kubenswrapper[4558]: I0120 19:46:54.839530 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/557148a5-3032-4e4a-83cd-be913116f3f2-catalog-content\") pod \"redhat-operators-mpl4p\" (UID: \"557148a5-3032-4e4a-83cd-be913116f3f2\") " pod="openshift-marketplace/redhat-operators-mpl4p" Jan 20 19:46:54 crc kubenswrapper[4558]: I0120 19:46:54.839573 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/557148a5-3032-4e4a-83cd-be913116f3f2-utilities\") pod \"redhat-operators-mpl4p\" (UID: \"557148a5-3032-4e4a-83cd-be913116f3f2\") " pod="openshift-marketplace/redhat-operators-mpl4p" Jan 20 19:46:54 crc kubenswrapper[4558]: I0120 19:46:54.940621 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6wxn5\" (UniqueName: \"kubernetes.io/projected/557148a5-3032-4e4a-83cd-be913116f3f2-kube-api-access-6wxn5\") pod \"redhat-operators-mpl4p\" (UID: \"557148a5-3032-4e4a-83cd-be913116f3f2\") " pod="openshift-marketplace/redhat-operators-mpl4p" Jan 20 19:46:54 crc kubenswrapper[4558]: I0120 19:46:54.940680 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/557148a5-3032-4e4a-83cd-be913116f3f2-catalog-content\") pod \"redhat-operators-mpl4p\" (UID: \"557148a5-3032-4e4a-83cd-be913116f3f2\") " pod="openshift-marketplace/redhat-operators-mpl4p" Jan 20 19:46:54 crc kubenswrapper[4558]: I0120 19:46:54.940718 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/557148a5-3032-4e4a-83cd-be913116f3f2-utilities\") pod \"redhat-operators-mpl4p\" (UID: \"557148a5-3032-4e4a-83cd-be913116f3f2\") " pod="openshift-marketplace/redhat-operators-mpl4p" Jan 20 19:46:54 crc kubenswrapper[4558]: I0120 19:46:54.941542 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/557148a5-3032-4e4a-83cd-be913116f3f2-catalog-content\") pod \"redhat-operators-mpl4p\" (UID: \"557148a5-3032-4e4a-83cd-be913116f3f2\") " pod="openshift-marketplace/redhat-operators-mpl4p" Jan 20 19:46:54 crc kubenswrapper[4558]: I0120 19:46:54.941565 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/557148a5-3032-4e4a-83cd-be913116f3f2-utilities\") pod \"redhat-operators-mpl4p\" (UID: \"557148a5-3032-4e4a-83cd-be913116f3f2\") " pod="openshift-marketplace/redhat-operators-mpl4p" Jan 20 19:46:54 crc kubenswrapper[4558]: I0120 19:46:54.971556 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6wxn5\" (UniqueName: \"kubernetes.io/projected/557148a5-3032-4e4a-83cd-be913116f3f2-kube-api-access-6wxn5\") pod \"redhat-operators-mpl4p\" (UID: \"557148a5-3032-4e4a-83cd-be913116f3f2\") " pod="openshift-marketplace/redhat-operators-mpl4p" Jan 20 19:46:55 crc kubenswrapper[4558]: I0120 19:46:55.012882 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mpl4p" Jan 20 19:46:55 crc kubenswrapper[4558]: I0120 19:46:55.447272 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-mpl4p"] Jan 20 19:46:56 crc kubenswrapper[4558]: I0120 19:46:56.101882 4558 generic.go:334] "Generic (PLEG): container finished" podID="557148a5-3032-4e4a-83cd-be913116f3f2" containerID="c548ae10861586c5094169518a95ec1627d0787a50f4ebcea863684e3320f6e3" exitCode=0 Jan 20 19:46:56 crc kubenswrapper[4558]: I0120 19:46:56.101987 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mpl4p" event={"ID":"557148a5-3032-4e4a-83cd-be913116f3f2","Type":"ContainerDied","Data":"c548ae10861586c5094169518a95ec1627d0787a50f4ebcea863684e3320f6e3"} Jan 20 19:46:56 crc kubenswrapper[4558]: I0120 19:46:56.102263 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mpl4p" event={"ID":"557148a5-3032-4e4a-83cd-be913116f3f2","Type":"ContainerStarted","Data":"89a8adc8f18d195a8ad44319e0e264813f9ce18bea1907742c0be2bb0d3f87db"} Jan 20 19:46:56 crc kubenswrapper[4558]: I0120 19:46:56.103821 4558 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 20 19:46:57 crc kubenswrapper[4558]: I0120 19:46:57.112121 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mpl4p" event={"ID":"557148a5-3032-4e4a-83cd-be913116f3f2","Type":"ContainerStarted","Data":"80356d3b473059600ba107434eb7fbe79896bc33cf8cfd69d8e0c698f8c90b07"} Jan 20 19:46:58 crc kubenswrapper[4558]: I0120 19:46:58.121963 4558 generic.go:334] "Generic (PLEG): container finished" podID="557148a5-3032-4e4a-83cd-be913116f3f2" containerID="80356d3b473059600ba107434eb7fbe79896bc33cf8cfd69d8e0c698f8c90b07" exitCode=0 Jan 20 19:46:58 crc kubenswrapper[4558]: I0120 19:46:58.122039 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mpl4p" event={"ID":"557148a5-3032-4e4a-83cd-be913116f3f2","Type":"ContainerDied","Data":"80356d3b473059600ba107434eb7fbe79896bc33cf8cfd69d8e0c698f8c90b07"} Jan 20 19:46:59 crc kubenswrapper[4558]: I0120 19:46:59.136293 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mpl4p" event={"ID":"557148a5-3032-4e4a-83cd-be913116f3f2","Type":"ContainerStarted","Data":"cf3ce8ceab63e0c21a937a64a92574f8e01d1aafeb1295f5bd66fc183495ab01"} Jan 20 19:46:59 crc kubenswrapper[4558]: I0120 19:46:59.156486 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-mpl4p" podStartSLOduration=2.6128642810000002 podStartE2EDuration="5.156469277s" podCreationTimestamp="2026-01-20 19:46:54 +0000 UTC" firstStartedPulling="2026-01-20 19:46:56.103546152 +0000 UTC m=+11109.863884118" lastFinishedPulling="2026-01-20 19:46:58.647151147 +0000 UTC m=+11112.407489114" observedRunningTime="2026-01-20 19:46:59.155720218 +0000 UTC m=+11112.916058185" watchObservedRunningTime="2026-01-20 19:46:59.156469277 +0000 UTC m=+11112.916807243" Jan 20 19:47:02 crc kubenswrapper[4558]: I0120 19:47:02.566347 4558 scope.go:117] "RemoveContainer" containerID="7787f59ff7454681fc282d06e7c92824f0d53eec7abb7bc051c812fd67aa7fed" Jan 20 19:47:02 crc kubenswrapper[4558]: E0120 19:47:02.566847 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:47:05 crc kubenswrapper[4558]: I0120 19:47:05.013829 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-mpl4p" Jan 20 19:47:05 crc kubenswrapper[4558]: I0120 19:47:05.014348 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-mpl4p" Jan 20 19:47:05 crc kubenswrapper[4558]: I0120 19:47:05.043913 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-mpl4p" Jan 20 19:47:05 crc kubenswrapper[4558]: I0120 19:47:05.206843 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-mpl4p" Jan 20 19:47:05 crc kubenswrapper[4558]: I0120 19:47:05.276416 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-mpl4p"] Jan 20 19:47:07 crc kubenswrapper[4558]: I0120 19:47:07.185183 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-mpl4p" podUID="557148a5-3032-4e4a-83cd-be913116f3f2" containerName="registry-server" containerID="cri-o://cf3ce8ceab63e0c21a937a64a92574f8e01d1aafeb1295f5bd66fc183495ab01" gracePeriod=2 Jan 20 19:47:08 crc kubenswrapper[4558]: I0120 19:47:08.659307 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mpl4p" Jan 20 19:47:08 crc kubenswrapper[4558]: I0120 19:47:08.751294 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/557148a5-3032-4e4a-83cd-be913116f3f2-catalog-content\") pod \"557148a5-3032-4e4a-83cd-be913116f3f2\" (UID: \"557148a5-3032-4e4a-83cd-be913116f3f2\") " Jan 20 19:47:08 crc kubenswrapper[4558]: I0120 19:47:08.751472 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/557148a5-3032-4e4a-83cd-be913116f3f2-utilities\") pod \"557148a5-3032-4e4a-83cd-be913116f3f2\" (UID: \"557148a5-3032-4e4a-83cd-be913116f3f2\") " Jan 20 19:47:08 crc kubenswrapper[4558]: I0120 19:47:08.751529 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6wxn5\" (UniqueName: \"kubernetes.io/projected/557148a5-3032-4e4a-83cd-be913116f3f2-kube-api-access-6wxn5\") pod \"557148a5-3032-4e4a-83cd-be913116f3f2\" (UID: \"557148a5-3032-4e4a-83cd-be913116f3f2\") " Jan 20 19:47:08 crc kubenswrapper[4558]: I0120 19:47:08.753283 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/557148a5-3032-4e4a-83cd-be913116f3f2-utilities" (OuterVolumeSpecName: "utilities") pod "557148a5-3032-4e4a-83cd-be913116f3f2" (UID: "557148a5-3032-4e4a-83cd-be913116f3f2"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 19:47:08 crc kubenswrapper[4558]: I0120 19:47:08.759548 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/557148a5-3032-4e4a-83cd-be913116f3f2-kube-api-access-6wxn5" (OuterVolumeSpecName: "kube-api-access-6wxn5") pod "557148a5-3032-4e4a-83cd-be913116f3f2" (UID: "557148a5-3032-4e4a-83cd-be913116f3f2"). InnerVolumeSpecName "kube-api-access-6wxn5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 19:47:08 crc kubenswrapper[4558]: I0120 19:47:08.853859 4558 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/557148a5-3032-4e4a-83cd-be913116f3f2-utilities\") on node \"crc\" DevicePath \"\"" Jan 20 19:47:08 crc kubenswrapper[4558]: I0120 19:47:08.853898 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6wxn5\" (UniqueName: \"kubernetes.io/projected/557148a5-3032-4e4a-83cd-be913116f3f2-kube-api-access-6wxn5\") on node \"crc\" DevicePath \"\"" Jan 20 19:47:08 crc kubenswrapper[4558]: I0120 19:47:08.864439 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/557148a5-3032-4e4a-83cd-be913116f3f2-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "557148a5-3032-4e4a-83cd-be913116f3f2" (UID: "557148a5-3032-4e4a-83cd-be913116f3f2"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 19:47:08 crc kubenswrapper[4558]: I0120 19:47:08.954589 4558 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/557148a5-3032-4e4a-83cd-be913116f3f2-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 20 19:47:09 crc kubenswrapper[4558]: I0120 19:47:09.202752 4558 generic.go:334] "Generic (PLEG): container finished" podID="557148a5-3032-4e4a-83cd-be913116f3f2" containerID="cf3ce8ceab63e0c21a937a64a92574f8e01d1aafeb1295f5bd66fc183495ab01" exitCode=0 Jan 20 19:47:09 crc kubenswrapper[4558]: I0120 19:47:09.202835 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mpl4p" event={"ID":"557148a5-3032-4e4a-83cd-be913116f3f2","Type":"ContainerDied","Data":"cf3ce8ceab63e0c21a937a64a92574f8e01d1aafeb1295f5bd66fc183495ab01"} Jan 20 19:47:09 crc kubenswrapper[4558]: I0120 19:47:09.203101 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mpl4p" event={"ID":"557148a5-3032-4e4a-83cd-be913116f3f2","Type":"ContainerDied","Data":"89a8adc8f18d195a8ad44319e0e264813f9ce18bea1907742c0be2bb0d3f87db"} Jan 20 19:47:09 crc kubenswrapper[4558]: I0120 19:47:09.202861 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mpl4p" Jan 20 19:47:09 crc kubenswrapper[4558]: I0120 19:47:09.203182 4558 scope.go:117] "RemoveContainer" containerID="cf3ce8ceab63e0c21a937a64a92574f8e01d1aafeb1295f5bd66fc183495ab01" Jan 20 19:47:09 crc kubenswrapper[4558]: I0120 19:47:09.226293 4558 scope.go:117] "RemoveContainer" containerID="80356d3b473059600ba107434eb7fbe79896bc33cf8cfd69d8e0c698f8c90b07" Jan 20 19:47:09 crc kubenswrapper[4558]: I0120 19:47:09.250469 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-mpl4p"] Jan 20 19:47:09 crc kubenswrapper[4558]: I0120 19:47:09.252982 4558 scope.go:117] "RemoveContainer" containerID="c548ae10861586c5094169518a95ec1627d0787a50f4ebcea863684e3320f6e3" Jan 20 19:47:09 crc kubenswrapper[4558]: I0120 19:47:09.256802 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-mpl4p"] Jan 20 19:47:09 crc kubenswrapper[4558]: I0120 19:47:09.268908 4558 scope.go:117] "RemoveContainer" containerID="cf3ce8ceab63e0c21a937a64a92574f8e01d1aafeb1295f5bd66fc183495ab01" Jan 20 19:47:09 crc kubenswrapper[4558]: E0120 19:47:09.269804 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cf3ce8ceab63e0c21a937a64a92574f8e01d1aafeb1295f5bd66fc183495ab01\": container with ID starting with cf3ce8ceab63e0c21a937a64a92574f8e01d1aafeb1295f5bd66fc183495ab01 not found: ID does not exist" containerID="cf3ce8ceab63e0c21a937a64a92574f8e01d1aafeb1295f5bd66fc183495ab01" Jan 20 19:47:09 crc kubenswrapper[4558]: I0120 19:47:09.269912 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cf3ce8ceab63e0c21a937a64a92574f8e01d1aafeb1295f5bd66fc183495ab01"} err="failed to get container status \"cf3ce8ceab63e0c21a937a64a92574f8e01d1aafeb1295f5bd66fc183495ab01\": rpc error: code = NotFound desc = could not find container \"cf3ce8ceab63e0c21a937a64a92574f8e01d1aafeb1295f5bd66fc183495ab01\": container with ID starting with cf3ce8ceab63e0c21a937a64a92574f8e01d1aafeb1295f5bd66fc183495ab01 not found: ID does not exist" Jan 20 19:47:09 crc kubenswrapper[4558]: I0120 19:47:09.270057 4558 scope.go:117] "RemoveContainer" containerID="80356d3b473059600ba107434eb7fbe79896bc33cf8cfd69d8e0c698f8c90b07" Jan 20 19:47:09 crc kubenswrapper[4558]: E0120 19:47:09.270449 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"80356d3b473059600ba107434eb7fbe79896bc33cf8cfd69d8e0c698f8c90b07\": container with ID starting with 80356d3b473059600ba107434eb7fbe79896bc33cf8cfd69d8e0c698f8c90b07 not found: ID does not exist" containerID="80356d3b473059600ba107434eb7fbe79896bc33cf8cfd69d8e0c698f8c90b07" Jan 20 19:47:09 crc kubenswrapper[4558]: I0120 19:47:09.270473 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"80356d3b473059600ba107434eb7fbe79896bc33cf8cfd69d8e0c698f8c90b07"} err="failed to get container status \"80356d3b473059600ba107434eb7fbe79896bc33cf8cfd69d8e0c698f8c90b07\": rpc error: code = NotFound desc = could not find container \"80356d3b473059600ba107434eb7fbe79896bc33cf8cfd69d8e0c698f8c90b07\": container with ID starting with 80356d3b473059600ba107434eb7fbe79896bc33cf8cfd69d8e0c698f8c90b07 not found: ID does not exist" Jan 20 19:47:09 crc kubenswrapper[4558]: I0120 19:47:09.270488 4558 scope.go:117] "RemoveContainer" containerID="c548ae10861586c5094169518a95ec1627d0787a50f4ebcea863684e3320f6e3" Jan 20 19:47:09 crc kubenswrapper[4558]: E0120 19:47:09.270950 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c548ae10861586c5094169518a95ec1627d0787a50f4ebcea863684e3320f6e3\": container with ID starting with c548ae10861586c5094169518a95ec1627d0787a50f4ebcea863684e3320f6e3 not found: ID does not exist" containerID="c548ae10861586c5094169518a95ec1627d0787a50f4ebcea863684e3320f6e3" Jan 20 19:47:09 crc kubenswrapper[4558]: I0120 19:47:09.271056 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c548ae10861586c5094169518a95ec1627d0787a50f4ebcea863684e3320f6e3"} err="failed to get container status \"c548ae10861586c5094169518a95ec1627d0787a50f4ebcea863684e3320f6e3\": rpc error: code = NotFound desc = could not find container \"c548ae10861586c5094169518a95ec1627d0787a50f4ebcea863684e3320f6e3\": container with ID starting with c548ae10861586c5094169518a95ec1627d0787a50f4ebcea863684e3320f6e3 not found: ID does not exist" Jan 20 19:47:10 crc kubenswrapper[4558]: I0120 19:47:10.575023 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="557148a5-3032-4e4a-83cd-be913116f3f2" path="/var/lib/kubelet/pods/557148a5-3032-4e4a-83cd-be913116f3f2/volumes" Jan 20 19:47:13 crc kubenswrapper[4558]: I0120 19:47:13.566186 4558 scope.go:117] "RemoveContainer" containerID="7787f59ff7454681fc282d06e7c92824f0d53eec7abb7bc051c812fd67aa7fed" Jan 20 19:47:13 crc kubenswrapper[4558]: E0120 19:47:13.566456 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:47:27 crc kubenswrapper[4558]: I0120 19:47:27.565919 4558 scope.go:117] "RemoveContainer" containerID="7787f59ff7454681fc282d06e7c92824f0d53eec7abb7bc051c812fd67aa7fed" Jan 20 19:47:27 crc kubenswrapper[4558]: E0120 19:47:27.566940 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:47:38 crc kubenswrapper[4558]: I0120 19:47:38.569087 4558 scope.go:117] "RemoveContainer" containerID="7787f59ff7454681fc282d06e7c92824f0d53eec7abb7bc051c812fd67aa7fed" Jan 20 19:47:38 crc kubenswrapper[4558]: E0120 19:47:38.569909 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:47:51 crc kubenswrapper[4558]: I0120 19:47:51.566366 4558 scope.go:117] "RemoveContainer" containerID="7787f59ff7454681fc282d06e7c92824f0d53eec7abb7bc051c812fd67aa7fed" Jan 20 19:47:51 crc kubenswrapper[4558]: E0120 19:47:51.567398 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:48:06 crc kubenswrapper[4558]: I0120 19:48:06.574119 4558 scope.go:117] "RemoveContainer" containerID="7787f59ff7454681fc282d06e7c92824f0d53eec7abb7bc051c812fd67aa7fed" Jan 20 19:48:06 crc kubenswrapper[4558]: E0120 19:48:06.574825 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:48:17 crc kubenswrapper[4558]: I0120 19:48:17.566453 4558 scope.go:117] "RemoveContainer" containerID="7787f59ff7454681fc282d06e7c92824f0d53eec7abb7bc051c812fd67aa7fed" Jan 20 19:48:17 crc kubenswrapper[4558]: E0120 19:48:17.567576 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:48:30 crc kubenswrapper[4558]: I0120 19:48:30.448545 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-4tc5w"] Jan 20 19:48:30 crc kubenswrapper[4558]: E0120 19:48:30.450345 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="557148a5-3032-4e4a-83cd-be913116f3f2" containerName="registry-server" Jan 20 19:48:30 crc kubenswrapper[4558]: I0120 19:48:30.450381 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="557148a5-3032-4e4a-83cd-be913116f3f2" containerName="registry-server" Jan 20 19:48:30 crc kubenswrapper[4558]: E0120 19:48:30.450405 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="557148a5-3032-4e4a-83cd-be913116f3f2" containerName="extract-utilities" Jan 20 19:48:30 crc kubenswrapper[4558]: I0120 19:48:30.450413 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="557148a5-3032-4e4a-83cd-be913116f3f2" containerName="extract-utilities" Jan 20 19:48:30 crc kubenswrapper[4558]: E0120 19:48:30.450441 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="557148a5-3032-4e4a-83cd-be913116f3f2" containerName="extract-content" Jan 20 19:48:30 crc kubenswrapper[4558]: I0120 19:48:30.450447 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="557148a5-3032-4e4a-83cd-be913116f3f2" containerName="extract-content" Jan 20 19:48:30 crc kubenswrapper[4558]: I0120 19:48:30.450629 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="557148a5-3032-4e4a-83cd-be913116f3f2" containerName="registry-server" Jan 20 19:48:30 crc kubenswrapper[4558]: I0120 19:48:30.451721 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4tc5w" Jan 20 19:48:30 crc kubenswrapper[4558]: I0120 19:48:30.462646 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-4tc5w"] Jan 20 19:48:30 crc kubenswrapper[4558]: I0120 19:48:30.575305 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/22b6450c-dfd4-4a48-be41-ba2c84f41f5a-utilities\") pod \"redhat-marketplace-4tc5w\" (UID: \"22b6450c-dfd4-4a48-be41-ba2c84f41f5a\") " pod="openshift-marketplace/redhat-marketplace-4tc5w" Jan 20 19:48:30 crc kubenswrapper[4558]: I0120 19:48:30.575362 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/22b6450c-dfd4-4a48-be41-ba2c84f41f5a-catalog-content\") pod \"redhat-marketplace-4tc5w\" (UID: \"22b6450c-dfd4-4a48-be41-ba2c84f41f5a\") " pod="openshift-marketplace/redhat-marketplace-4tc5w" Jan 20 19:48:30 crc kubenswrapper[4558]: I0120 19:48:30.575765 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-djmvd\" (UniqueName: \"kubernetes.io/projected/22b6450c-dfd4-4a48-be41-ba2c84f41f5a-kube-api-access-djmvd\") pod \"redhat-marketplace-4tc5w\" (UID: \"22b6450c-dfd4-4a48-be41-ba2c84f41f5a\") " pod="openshift-marketplace/redhat-marketplace-4tc5w" Jan 20 19:48:30 crc kubenswrapper[4558]: I0120 19:48:30.676523 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/22b6450c-dfd4-4a48-be41-ba2c84f41f5a-utilities\") pod \"redhat-marketplace-4tc5w\" (UID: \"22b6450c-dfd4-4a48-be41-ba2c84f41f5a\") " pod="openshift-marketplace/redhat-marketplace-4tc5w" Jan 20 19:48:30 crc kubenswrapper[4558]: I0120 19:48:30.676586 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/22b6450c-dfd4-4a48-be41-ba2c84f41f5a-catalog-content\") pod \"redhat-marketplace-4tc5w\" (UID: \"22b6450c-dfd4-4a48-be41-ba2c84f41f5a\") " pod="openshift-marketplace/redhat-marketplace-4tc5w" Jan 20 19:48:30 crc kubenswrapper[4558]: I0120 19:48:30.676643 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-djmvd\" (UniqueName: \"kubernetes.io/projected/22b6450c-dfd4-4a48-be41-ba2c84f41f5a-kube-api-access-djmvd\") pod \"redhat-marketplace-4tc5w\" (UID: \"22b6450c-dfd4-4a48-be41-ba2c84f41f5a\") " pod="openshift-marketplace/redhat-marketplace-4tc5w" Jan 20 19:48:30 crc kubenswrapper[4558]: I0120 19:48:30.677128 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/22b6450c-dfd4-4a48-be41-ba2c84f41f5a-utilities\") pod \"redhat-marketplace-4tc5w\" (UID: \"22b6450c-dfd4-4a48-be41-ba2c84f41f5a\") " pod="openshift-marketplace/redhat-marketplace-4tc5w" Jan 20 19:48:30 crc kubenswrapper[4558]: I0120 19:48:30.677294 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/22b6450c-dfd4-4a48-be41-ba2c84f41f5a-catalog-content\") pod \"redhat-marketplace-4tc5w\" (UID: \"22b6450c-dfd4-4a48-be41-ba2c84f41f5a\") " pod="openshift-marketplace/redhat-marketplace-4tc5w" Jan 20 19:48:30 crc kubenswrapper[4558]: I0120 19:48:30.696727 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-djmvd\" (UniqueName: \"kubernetes.io/projected/22b6450c-dfd4-4a48-be41-ba2c84f41f5a-kube-api-access-djmvd\") pod \"redhat-marketplace-4tc5w\" (UID: \"22b6450c-dfd4-4a48-be41-ba2c84f41f5a\") " pod="openshift-marketplace/redhat-marketplace-4tc5w" Jan 20 19:48:30 crc kubenswrapper[4558]: I0120 19:48:30.776537 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4tc5w" Jan 20 19:48:31 crc kubenswrapper[4558]: I0120 19:48:31.207063 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-4tc5w"] Jan 20 19:48:31 crc kubenswrapper[4558]: I0120 19:48:31.566658 4558 scope.go:117] "RemoveContainer" containerID="7787f59ff7454681fc282d06e7c92824f0d53eec7abb7bc051c812fd67aa7fed" Jan 20 19:48:31 crc kubenswrapper[4558]: E0120 19:48:31.567440 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:48:31 crc kubenswrapper[4558]: I0120 19:48:31.841694 4558 generic.go:334] "Generic (PLEG): container finished" podID="22b6450c-dfd4-4a48-be41-ba2c84f41f5a" containerID="ed9005930cf640bc16f4bbadddd4cf44f5dc69cc7a7f05c338f8b42b09f4694a" exitCode=0 Jan 20 19:48:31 crc kubenswrapper[4558]: I0120 19:48:31.841754 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4tc5w" event={"ID":"22b6450c-dfd4-4a48-be41-ba2c84f41f5a","Type":"ContainerDied","Data":"ed9005930cf640bc16f4bbadddd4cf44f5dc69cc7a7f05c338f8b42b09f4694a"} Jan 20 19:48:31 crc kubenswrapper[4558]: I0120 19:48:31.842261 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4tc5w" event={"ID":"22b6450c-dfd4-4a48-be41-ba2c84f41f5a","Type":"ContainerStarted","Data":"cc3656b76934a28de94a429879a66074b06d93fce024adcb3c536cbcd1e8f3b8"} Jan 20 19:48:33 crc kubenswrapper[4558]: I0120 19:48:33.859872 4558 generic.go:334] "Generic (PLEG): container finished" podID="22b6450c-dfd4-4a48-be41-ba2c84f41f5a" containerID="7dbeae0c57636fe9a139a3e1949349a4e91d3c1c6e55461f58a797bcd3dd8305" exitCode=0 Jan 20 19:48:33 crc kubenswrapper[4558]: I0120 19:48:33.859995 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4tc5w" event={"ID":"22b6450c-dfd4-4a48-be41-ba2c84f41f5a","Type":"ContainerDied","Data":"7dbeae0c57636fe9a139a3e1949349a4e91d3c1c6e55461f58a797bcd3dd8305"} Jan 20 19:48:34 crc kubenswrapper[4558]: I0120 19:48:34.868438 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4tc5w" event={"ID":"22b6450c-dfd4-4a48-be41-ba2c84f41f5a","Type":"ContainerStarted","Data":"39eaab396e3ce28e7bb910049808466da16081949cd6311806ffbbf0ca557cbe"} Jan 20 19:48:34 crc kubenswrapper[4558]: I0120 19:48:34.885470 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-4tc5w" podStartSLOduration=2.356529605 podStartE2EDuration="4.885453079s" podCreationTimestamp="2026-01-20 19:48:30 +0000 UTC" firstStartedPulling="2026-01-20 19:48:31.843499236 +0000 UTC m=+11205.603837203" lastFinishedPulling="2026-01-20 19:48:34.372422709 +0000 UTC m=+11208.132760677" observedRunningTime="2026-01-20 19:48:34.885320569 +0000 UTC m=+11208.645658536" watchObservedRunningTime="2026-01-20 19:48:34.885453079 +0000 UTC m=+11208.645791046" Jan 20 19:48:40 crc kubenswrapper[4558]: I0120 19:48:40.777772 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-4tc5w" Jan 20 19:48:40 crc kubenswrapper[4558]: I0120 19:48:40.778577 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-4tc5w" Jan 20 19:48:40 crc kubenswrapper[4558]: I0120 19:48:40.827365 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-4tc5w" Jan 20 19:48:40 crc kubenswrapper[4558]: I0120 19:48:40.953112 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-4tc5w" Jan 20 19:48:41 crc kubenswrapper[4558]: I0120 19:48:41.061357 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-4tc5w"] Jan 20 19:48:42 crc kubenswrapper[4558]: I0120 19:48:42.926426 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-4tc5w" podUID="22b6450c-dfd4-4a48-be41-ba2c84f41f5a" containerName="registry-server" containerID="cri-o://39eaab396e3ce28e7bb910049808466da16081949cd6311806ffbbf0ca557cbe" gracePeriod=2 Jan 20 19:48:43 crc kubenswrapper[4558]: I0120 19:48:43.353026 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4tc5w" Jan 20 19:48:43 crc kubenswrapper[4558]: I0120 19:48:43.459223 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-djmvd\" (UniqueName: \"kubernetes.io/projected/22b6450c-dfd4-4a48-be41-ba2c84f41f5a-kube-api-access-djmvd\") pod \"22b6450c-dfd4-4a48-be41-ba2c84f41f5a\" (UID: \"22b6450c-dfd4-4a48-be41-ba2c84f41f5a\") " Jan 20 19:48:43 crc kubenswrapper[4558]: I0120 19:48:43.459339 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/22b6450c-dfd4-4a48-be41-ba2c84f41f5a-utilities\") pod \"22b6450c-dfd4-4a48-be41-ba2c84f41f5a\" (UID: \"22b6450c-dfd4-4a48-be41-ba2c84f41f5a\") " Jan 20 19:48:43 crc kubenswrapper[4558]: I0120 19:48:43.459517 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/22b6450c-dfd4-4a48-be41-ba2c84f41f5a-catalog-content\") pod \"22b6450c-dfd4-4a48-be41-ba2c84f41f5a\" (UID: \"22b6450c-dfd4-4a48-be41-ba2c84f41f5a\") " Jan 20 19:48:43 crc kubenswrapper[4558]: I0120 19:48:43.460293 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/22b6450c-dfd4-4a48-be41-ba2c84f41f5a-utilities" (OuterVolumeSpecName: "utilities") pod "22b6450c-dfd4-4a48-be41-ba2c84f41f5a" (UID: "22b6450c-dfd4-4a48-be41-ba2c84f41f5a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 19:48:43 crc kubenswrapper[4558]: I0120 19:48:43.467492 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22b6450c-dfd4-4a48-be41-ba2c84f41f5a-kube-api-access-djmvd" (OuterVolumeSpecName: "kube-api-access-djmvd") pod "22b6450c-dfd4-4a48-be41-ba2c84f41f5a" (UID: "22b6450c-dfd4-4a48-be41-ba2c84f41f5a"). InnerVolumeSpecName "kube-api-access-djmvd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 19:48:43 crc kubenswrapper[4558]: I0120 19:48:43.478955 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/22b6450c-dfd4-4a48-be41-ba2c84f41f5a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "22b6450c-dfd4-4a48-be41-ba2c84f41f5a" (UID: "22b6450c-dfd4-4a48-be41-ba2c84f41f5a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 19:48:43 crc kubenswrapper[4558]: I0120 19:48:43.561661 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-djmvd\" (UniqueName: \"kubernetes.io/projected/22b6450c-dfd4-4a48-be41-ba2c84f41f5a-kube-api-access-djmvd\") on node \"crc\" DevicePath \"\"" Jan 20 19:48:43 crc kubenswrapper[4558]: I0120 19:48:43.561694 4558 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/22b6450c-dfd4-4a48-be41-ba2c84f41f5a-utilities\") on node \"crc\" DevicePath \"\"" Jan 20 19:48:43 crc kubenswrapper[4558]: I0120 19:48:43.561710 4558 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/22b6450c-dfd4-4a48-be41-ba2c84f41f5a-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 20 19:48:43 crc kubenswrapper[4558]: I0120 19:48:43.566123 4558 scope.go:117] "RemoveContainer" containerID="7787f59ff7454681fc282d06e7c92824f0d53eec7abb7bc051c812fd67aa7fed" Jan 20 19:48:43 crc kubenswrapper[4558]: E0120 19:48:43.566394 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:48:43 crc kubenswrapper[4558]: I0120 19:48:43.935861 4558 generic.go:334] "Generic (PLEG): container finished" podID="22b6450c-dfd4-4a48-be41-ba2c84f41f5a" containerID="39eaab396e3ce28e7bb910049808466da16081949cd6311806ffbbf0ca557cbe" exitCode=0 Jan 20 19:48:43 crc kubenswrapper[4558]: I0120 19:48:43.935910 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4tc5w" event={"ID":"22b6450c-dfd4-4a48-be41-ba2c84f41f5a","Type":"ContainerDied","Data":"39eaab396e3ce28e7bb910049808466da16081949cd6311806ffbbf0ca557cbe"} Jan 20 19:48:43 crc kubenswrapper[4558]: I0120 19:48:43.935920 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4tc5w" Jan 20 19:48:43 crc kubenswrapper[4558]: I0120 19:48:43.935944 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4tc5w" event={"ID":"22b6450c-dfd4-4a48-be41-ba2c84f41f5a","Type":"ContainerDied","Data":"cc3656b76934a28de94a429879a66074b06d93fce024adcb3c536cbcd1e8f3b8"} Jan 20 19:48:43 crc kubenswrapper[4558]: I0120 19:48:43.935966 4558 scope.go:117] "RemoveContainer" containerID="39eaab396e3ce28e7bb910049808466da16081949cd6311806ffbbf0ca557cbe" Jan 20 19:48:43 crc kubenswrapper[4558]: I0120 19:48:43.955030 4558 scope.go:117] "RemoveContainer" containerID="7dbeae0c57636fe9a139a3e1949349a4e91d3c1c6e55461f58a797bcd3dd8305" Jan 20 19:48:43 crc kubenswrapper[4558]: I0120 19:48:43.975763 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-4tc5w"] Jan 20 19:48:43 crc kubenswrapper[4558]: I0120 19:48:43.976109 4558 scope.go:117] "RemoveContainer" containerID="ed9005930cf640bc16f4bbadddd4cf44f5dc69cc7a7f05c338f8b42b09f4694a" Jan 20 19:48:44 crc kubenswrapper[4558]: I0120 19:48:44.010155 4558 scope.go:117] "RemoveContainer" containerID="39eaab396e3ce28e7bb910049808466da16081949cd6311806ffbbf0ca557cbe" Jan 20 19:48:44 crc kubenswrapper[4558]: I0120 19:48:44.010352 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-4tc5w"] Jan 20 19:48:44 crc kubenswrapper[4558]: E0120 19:48:44.010891 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"39eaab396e3ce28e7bb910049808466da16081949cd6311806ffbbf0ca557cbe\": container with ID starting with 39eaab396e3ce28e7bb910049808466da16081949cd6311806ffbbf0ca557cbe not found: ID does not exist" containerID="39eaab396e3ce28e7bb910049808466da16081949cd6311806ffbbf0ca557cbe" Jan 20 19:48:44 crc kubenswrapper[4558]: I0120 19:48:44.010956 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"39eaab396e3ce28e7bb910049808466da16081949cd6311806ffbbf0ca557cbe"} err="failed to get container status \"39eaab396e3ce28e7bb910049808466da16081949cd6311806ffbbf0ca557cbe\": rpc error: code = NotFound desc = could not find container \"39eaab396e3ce28e7bb910049808466da16081949cd6311806ffbbf0ca557cbe\": container with ID starting with 39eaab396e3ce28e7bb910049808466da16081949cd6311806ffbbf0ca557cbe not found: ID does not exist" Jan 20 19:48:44 crc kubenswrapper[4558]: I0120 19:48:44.010989 4558 scope.go:117] "RemoveContainer" containerID="7dbeae0c57636fe9a139a3e1949349a4e91d3c1c6e55461f58a797bcd3dd8305" Jan 20 19:48:44 crc kubenswrapper[4558]: E0120 19:48:44.011491 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7dbeae0c57636fe9a139a3e1949349a4e91d3c1c6e55461f58a797bcd3dd8305\": container with ID starting with 7dbeae0c57636fe9a139a3e1949349a4e91d3c1c6e55461f58a797bcd3dd8305 not found: ID does not exist" containerID="7dbeae0c57636fe9a139a3e1949349a4e91d3c1c6e55461f58a797bcd3dd8305" Jan 20 19:48:44 crc kubenswrapper[4558]: I0120 19:48:44.011517 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7dbeae0c57636fe9a139a3e1949349a4e91d3c1c6e55461f58a797bcd3dd8305"} err="failed to get container status \"7dbeae0c57636fe9a139a3e1949349a4e91d3c1c6e55461f58a797bcd3dd8305\": rpc error: code = NotFound desc = could not find container \"7dbeae0c57636fe9a139a3e1949349a4e91d3c1c6e55461f58a797bcd3dd8305\": container with ID starting with 7dbeae0c57636fe9a139a3e1949349a4e91d3c1c6e55461f58a797bcd3dd8305 not found: ID does not exist" Jan 20 19:48:44 crc kubenswrapper[4558]: I0120 19:48:44.011533 4558 scope.go:117] "RemoveContainer" containerID="ed9005930cf640bc16f4bbadddd4cf44f5dc69cc7a7f05c338f8b42b09f4694a" Jan 20 19:48:44 crc kubenswrapper[4558]: E0120 19:48:44.011861 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ed9005930cf640bc16f4bbadddd4cf44f5dc69cc7a7f05c338f8b42b09f4694a\": container with ID starting with ed9005930cf640bc16f4bbadddd4cf44f5dc69cc7a7f05c338f8b42b09f4694a not found: ID does not exist" containerID="ed9005930cf640bc16f4bbadddd4cf44f5dc69cc7a7f05c338f8b42b09f4694a" Jan 20 19:48:44 crc kubenswrapper[4558]: I0120 19:48:44.011897 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ed9005930cf640bc16f4bbadddd4cf44f5dc69cc7a7f05c338f8b42b09f4694a"} err="failed to get container status \"ed9005930cf640bc16f4bbadddd4cf44f5dc69cc7a7f05c338f8b42b09f4694a\": rpc error: code = NotFound desc = could not find container \"ed9005930cf640bc16f4bbadddd4cf44f5dc69cc7a7f05c338f8b42b09f4694a\": container with ID starting with ed9005930cf640bc16f4bbadddd4cf44f5dc69cc7a7f05c338f8b42b09f4694a not found: ID does not exist" Jan 20 19:48:44 crc kubenswrapper[4558]: I0120 19:48:44.577655 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22b6450c-dfd4-4a48-be41-ba2c84f41f5a" path="/var/lib/kubelet/pods/22b6450c-dfd4-4a48-be41-ba2c84f41f5a/volumes" Jan 20 19:48:57 crc kubenswrapper[4558]: I0120 19:48:57.566084 4558 scope.go:117] "RemoveContainer" containerID="7787f59ff7454681fc282d06e7c92824f0d53eec7abb7bc051c812fd67aa7fed" Jan 20 19:48:57 crc kubenswrapper[4558]: E0120 19:48:57.566752 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:49:10 crc kubenswrapper[4558]: I0120 19:49:10.567544 4558 scope.go:117] "RemoveContainer" containerID="7787f59ff7454681fc282d06e7c92824f0d53eec7abb7bc051c812fd67aa7fed" Jan 20 19:49:10 crc kubenswrapper[4558]: E0120 19:49:10.568511 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:49:23 crc kubenswrapper[4558]: I0120 19:49:23.565589 4558 scope.go:117] "RemoveContainer" containerID="7787f59ff7454681fc282d06e7c92824f0d53eec7abb7bc051c812fd67aa7fed" Jan 20 19:49:23 crc kubenswrapper[4558]: E0120 19:49:23.566443 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:49:38 crc kubenswrapper[4558]: I0120 19:49:38.565616 4558 scope.go:117] "RemoveContainer" containerID="7787f59ff7454681fc282d06e7c92824f0d53eec7abb7bc051c812fd67aa7fed" Jan 20 19:49:38 crc kubenswrapper[4558]: E0120 19:49:38.566458 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:49:50 crc kubenswrapper[4558]: I0120 19:49:50.566955 4558 scope.go:117] "RemoveContainer" containerID="7787f59ff7454681fc282d06e7c92824f0d53eec7abb7bc051c812fd67aa7fed" Jan 20 19:49:50 crc kubenswrapper[4558]: E0120 19:49:50.568681 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:50:05 crc kubenswrapper[4558]: I0120 19:50:05.566464 4558 scope.go:117] "RemoveContainer" containerID="7787f59ff7454681fc282d06e7c92824f0d53eec7abb7bc051c812fd67aa7fed" Jan 20 19:50:05 crc kubenswrapper[4558]: E0120 19:50:05.567108 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:50:19 crc kubenswrapper[4558]: I0120 19:50:19.566464 4558 scope.go:117] "RemoveContainer" containerID="7787f59ff7454681fc282d06e7c92824f0d53eec7abb7bc051c812fd67aa7fed" Jan 20 19:50:19 crc kubenswrapper[4558]: E0120 19:50:19.567690 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:50:30 crc kubenswrapper[4558]: I0120 19:50:30.566138 4558 scope.go:117] "RemoveContainer" containerID="7787f59ff7454681fc282d06e7c92824f0d53eec7abb7bc051c812fd67aa7fed" Jan 20 19:50:30 crc kubenswrapper[4558]: E0120 19:50:30.567255 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:50:42 crc kubenswrapper[4558]: I0120 19:50:42.566497 4558 scope.go:117] "RemoveContainer" containerID="7787f59ff7454681fc282d06e7c92824f0d53eec7abb7bc051c812fd67aa7fed" Jan 20 19:50:42 crc kubenswrapper[4558]: E0120 19:50:42.567390 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:50:47 crc kubenswrapper[4558]: I0120 19:50:47.905243 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-9zrpl"] Jan 20 19:50:47 crc kubenswrapper[4558]: E0120 19:50:47.906002 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="22b6450c-dfd4-4a48-be41-ba2c84f41f5a" containerName="extract-utilities" Jan 20 19:50:47 crc kubenswrapper[4558]: I0120 19:50:47.906016 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="22b6450c-dfd4-4a48-be41-ba2c84f41f5a" containerName="extract-utilities" Jan 20 19:50:47 crc kubenswrapper[4558]: E0120 19:50:47.906031 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="22b6450c-dfd4-4a48-be41-ba2c84f41f5a" containerName="extract-content" Jan 20 19:50:47 crc kubenswrapper[4558]: I0120 19:50:47.906037 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="22b6450c-dfd4-4a48-be41-ba2c84f41f5a" containerName="extract-content" Jan 20 19:50:47 crc kubenswrapper[4558]: E0120 19:50:47.906045 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="22b6450c-dfd4-4a48-be41-ba2c84f41f5a" containerName="registry-server" Jan 20 19:50:47 crc kubenswrapper[4558]: I0120 19:50:47.906051 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="22b6450c-dfd4-4a48-be41-ba2c84f41f5a" containerName="registry-server" Jan 20 19:50:47 crc kubenswrapper[4558]: I0120 19:50:47.906185 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="22b6450c-dfd4-4a48-be41-ba2c84f41f5a" containerName="registry-server" Jan 20 19:50:47 crc kubenswrapper[4558]: I0120 19:50:47.908468 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9zrpl" Jan 20 19:50:47 crc kubenswrapper[4558]: I0120 19:50:47.915982 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-9zrpl"] Jan 20 19:50:48 crc kubenswrapper[4558]: I0120 19:50:48.094520 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ad6f3fb5-5bf9-4cc9-b6e4-9f70345e25fa-catalog-content\") pod \"certified-operators-9zrpl\" (UID: \"ad6f3fb5-5bf9-4cc9-b6e4-9f70345e25fa\") " pod="openshift-marketplace/certified-operators-9zrpl" Jan 20 19:50:48 crc kubenswrapper[4558]: I0120 19:50:48.094857 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hwhvp\" (UniqueName: \"kubernetes.io/projected/ad6f3fb5-5bf9-4cc9-b6e4-9f70345e25fa-kube-api-access-hwhvp\") pod \"certified-operators-9zrpl\" (UID: \"ad6f3fb5-5bf9-4cc9-b6e4-9f70345e25fa\") " pod="openshift-marketplace/certified-operators-9zrpl" Jan 20 19:50:48 crc kubenswrapper[4558]: I0120 19:50:48.094898 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ad6f3fb5-5bf9-4cc9-b6e4-9f70345e25fa-utilities\") pod \"certified-operators-9zrpl\" (UID: \"ad6f3fb5-5bf9-4cc9-b6e4-9f70345e25fa\") " pod="openshift-marketplace/certified-operators-9zrpl" Jan 20 19:50:48 crc kubenswrapper[4558]: I0120 19:50:48.197031 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ad6f3fb5-5bf9-4cc9-b6e4-9f70345e25fa-catalog-content\") pod \"certified-operators-9zrpl\" (UID: \"ad6f3fb5-5bf9-4cc9-b6e4-9f70345e25fa\") " pod="openshift-marketplace/certified-operators-9zrpl" Jan 20 19:50:48 crc kubenswrapper[4558]: I0120 19:50:48.197184 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hwhvp\" (UniqueName: \"kubernetes.io/projected/ad6f3fb5-5bf9-4cc9-b6e4-9f70345e25fa-kube-api-access-hwhvp\") pod \"certified-operators-9zrpl\" (UID: \"ad6f3fb5-5bf9-4cc9-b6e4-9f70345e25fa\") " pod="openshift-marketplace/certified-operators-9zrpl" Jan 20 19:50:48 crc kubenswrapper[4558]: I0120 19:50:48.197222 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ad6f3fb5-5bf9-4cc9-b6e4-9f70345e25fa-utilities\") pod \"certified-operators-9zrpl\" (UID: \"ad6f3fb5-5bf9-4cc9-b6e4-9f70345e25fa\") " pod="openshift-marketplace/certified-operators-9zrpl" Jan 20 19:50:48 crc kubenswrapper[4558]: I0120 19:50:48.197674 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ad6f3fb5-5bf9-4cc9-b6e4-9f70345e25fa-catalog-content\") pod \"certified-operators-9zrpl\" (UID: \"ad6f3fb5-5bf9-4cc9-b6e4-9f70345e25fa\") " pod="openshift-marketplace/certified-operators-9zrpl" Jan 20 19:50:48 crc kubenswrapper[4558]: I0120 19:50:48.197707 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ad6f3fb5-5bf9-4cc9-b6e4-9f70345e25fa-utilities\") pod \"certified-operators-9zrpl\" (UID: \"ad6f3fb5-5bf9-4cc9-b6e4-9f70345e25fa\") " pod="openshift-marketplace/certified-operators-9zrpl" Jan 20 19:50:48 crc kubenswrapper[4558]: I0120 19:50:48.230242 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hwhvp\" (UniqueName: \"kubernetes.io/projected/ad6f3fb5-5bf9-4cc9-b6e4-9f70345e25fa-kube-api-access-hwhvp\") pod \"certified-operators-9zrpl\" (UID: \"ad6f3fb5-5bf9-4cc9-b6e4-9f70345e25fa\") " pod="openshift-marketplace/certified-operators-9zrpl" Jan 20 19:50:48 crc kubenswrapper[4558]: I0120 19:50:48.525840 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9zrpl" Jan 20 19:50:48 crc kubenswrapper[4558]: I0120 19:50:48.907924 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-9zrpl"] Jan 20 19:50:49 crc kubenswrapper[4558]: I0120 19:50:49.759074 4558 generic.go:334] "Generic (PLEG): container finished" podID="ad6f3fb5-5bf9-4cc9-b6e4-9f70345e25fa" containerID="b7bd53c5d0f56acf6e51e9b92779d369639c897e1cff4d61ff46f50499ef4b0d" exitCode=0 Jan 20 19:50:49 crc kubenswrapper[4558]: I0120 19:50:49.759200 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9zrpl" event={"ID":"ad6f3fb5-5bf9-4cc9-b6e4-9f70345e25fa","Type":"ContainerDied","Data":"b7bd53c5d0f56acf6e51e9b92779d369639c897e1cff4d61ff46f50499ef4b0d"} Jan 20 19:50:49 crc kubenswrapper[4558]: I0120 19:50:49.759418 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9zrpl" event={"ID":"ad6f3fb5-5bf9-4cc9-b6e4-9f70345e25fa","Type":"ContainerStarted","Data":"325f40963c9f47a3fbff0ed06130cf76b20a9f5cdb5cd0915773279dbe393a5f"} Jan 20 19:50:50 crc kubenswrapper[4558]: I0120 19:50:50.769203 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9zrpl" event={"ID":"ad6f3fb5-5bf9-4cc9-b6e4-9f70345e25fa","Type":"ContainerStarted","Data":"539f798872fdb34a70231e590bd4fdd9c6b7ca7f075bb4e08c32e3522703493b"} Jan 20 19:50:51 crc kubenswrapper[4558]: I0120 19:50:51.780598 4558 generic.go:334] "Generic (PLEG): container finished" podID="ad6f3fb5-5bf9-4cc9-b6e4-9f70345e25fa" containerID="539f798872fdb34a70231e590bd4fdd9c6b7ca7f075bb4e08c32e3522703493b" exitCode=0 Jan 20 19:50:51 crc kubenswrapper[4558]: I0120 19:50:51.780711 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9zrpl" event={"ID":"ad6f3fb5-5bf9-4cc9-b6e4-9f70345e25fa","Type":"ContainerDied","Data":"539f798872fdb34a70231e590bd4fdd9c6b7ca7f075bb4e08c32e3522703493b"} Jan 20 19:50:52 crc kubenswrapper[4558]: I0120 19:50:52.799258 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9zrpl" event={"ID":"ad6f3fb5-5bf9-4cc9-b6e4-9f70345e25fa","Type":"ContainerStarted","Data":"867a4b1681791c211d78284c21b9fc4bcd176aee06dcc056533486fe715df75f"} Jan 20 19:50:52 crc kubenswrapper[4558]: I0120 19:50:52.826575 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-9zrpl" podStartSLOduration=3.163684478 podStartE2EDuration="5.826554494s" podCreationTimestamp="2026-01-20 19:50:47 +0000 UTC" firstStartedPulling="2026-01-20 19:50:49.761198013 +0000 UTC m=+11343.521535980" lastFinishedPulling="2026-01-20 19:50:52.424068029 +0000 UTC m=+11346.184405996" observedRunningTime="2026-01-20 19:50:52.822284026 +0000 UTC m=+11346.582621993" watchObservedRunningTime="2026-01-20 19:50:52.826554494 +0000 UTC m=+11346.586892461" Jan 20 19:50:55 crc kubenswrapper[4558]: I0120 19:50:55.566299 4558 scope.go:117] "RemoveContainer" containerID="7787f59ff7454681fc282d06e7c92824f0d53eec7abb7bc051c812fd67aa7fed" Jan 20 19:50:55 crc kubenswrapper[4558]: E0120 19:50:55.566894 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:50:58 crc kubenswrapper[4558]: I0120 19:50:58.526740 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-9zrpl" Jan 20 19:50:58 crc kubenswrapper[4558]: I0120 19:50:58.526843 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-9zrpl" Jan 20 19:50:58 crc kubenswrapper[4558]: I0120 19:50:58.563329 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-9zrpl" Jan 20 19:50:58 crc kubenswrapper[4558]: I0120 19:50:58.881717 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-9zrpl" Jan 20 19:50:58 crc kubenswrapper[4558]: I0120 19:50:58.916492 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-9zrpl"] Jan 20 19:51:00 crc kubenswrapper[4558]: I0120 19:51:00.858378 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-9zrpl" podUID="ad6f3fb5-5bf9-4cc9-b6e4-9f70345e25fa" containerName="registry-server" containerID="cri-o://867a4b1681791c211d78284c21b9fc4bcd176aee06dcc056533486fe715df75f" gracePeriod=2 Jan 20 19:51:01 crc kubenswrapper[4558]: I0120 19:51:01.206805 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9zrpl" Jan 20 19:51:01 crc kubenswrapper[4558]: I0120 19:51:01.289197 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ad6f3fb5-5bf9-4cc9-b6e4-9f70345e25fa-utilities\") pod \"ad6f3fb5-5bf9-4cc9-b6e4-9f70345e25fa\" (UID: \"ad6f3fb5-5bf9-4cc9-b6e4-9f70345e25fa\") " Jan 20 19:51:01 crc kubenswrapper[4558]: I0120 19:51:01.289418 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hwhvp\" (UniqueName: \"kubernetes.io/projected/ad6f3fb5-5bf9-4cc9-b6e4-9f70345e25fa-kube-api-access-hwhvp\") pod \"ad6f3fb5-5bf9-4cc9-b6e4-9f70345e25fa\" (UID: \"ad6f3fb5-5bf9-4cc9-b6e4-9f70345e25fa\") " Jan 20 19:51:01 crc kubenswrapper[4558]: I0120 19:51:01.289537 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ad6f3fb5-5bf9-4cc9-b6e4-9f70345e25fa-catalog-content\") pod \"ad6f3fb5-5bf9-4cc9-b6e4-9f70345e25fa\" (UID: \"ad6f3fb5-5bf9-4cc9-b6e4-9f70345e25fa\") " Jan 20 19:51:01 crc kubenswrapper[4558]: I0120 19:51:01.290099 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ad6f3fb5-5bf9-4cc9-b6e4-9f70345e25fa-utilities" (OuterVolumeSpecName: "utilities") pod "ad6f3fb5-5bf9-4cc9-b6e4-9f70345e25fa" (UID: "ad6f3fb5-5bf9-4cc9-b6e4-9f70345e25fa"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 19:51:01 crc kubenswrapper[4558]: I0120 19:51:01.296249 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ad6f3fb5-5bf9-4cc9-b6e4-9f70345e25fa-kube-api-access-hwhvp" (OuterVolumeSpecName: "kube-api-access-hwhvp") pod "ad6f3fb5-5bf9-4cc9-b6e4-9f70345e25fa" (UID: "ad6f3fb5-5bf9-4cc9-b6e4-9f70345e25fa"). InnerVolumeSpecName "kube-api-access-hwhvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 19:51:01 crc kubenswrapper[4558]: I0120 19:51:01.390603 4558 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ad6f3fb5-5bf9-4cc9-b6e4-9f70345e25fa-utilities\") on node \"crc\" DevicePath \"\"" Jan 20 19:51:01 crc kubenswrapper[4558]: I0120 19:51:01.390732 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hwhvp\" (UniqueName: \"kubernetes.io/projected/ad6f3fb5-5bf9-4cc9-b6e4-9f70345e25fa-kube-api-access-hwhvp\") on node \"crc\" DevicePath \"\"" Jan 20 19:51:01 crc kubenswrapper[4558]: I0120 19:51:01.442676 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ad6f3fb5-5bf9-4cc9-b6e4-9f70345e25fa-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ad6f3fb5-5bf9-4cc9-b6e4-9f70345e25fa" (UID: "ad6f3fb5-5bf9-4cc9-b6e4-9f70345e25fa"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 19:51:01 crc kubenswrapper[4558]: I0120 19:51:01.492002 4558 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ad6f3fb5-5bf9-4cc9-b6e4-9f70345e25fa-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 20 19:51:01 crc kubenswrapper[4558]: I0120 19:51:01.866255 4558 generic.go:334] "Generic (PLEG): container finished" podID="ad6f3fb5-5bf9-4cc9-b6e4-9f70345e25fa" containerID="867a4b1681791c211d78284c21b9fc4bcd176aee06dcc056533486fe715df75f" exitCode=0 Jan 20 19:51:01 crc kubenswrapper[4558]: I0120 19:51:01.867117 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9zrpl" event={"ID":"ad6f3fb5-5bf9-4cc9-b6e4-9f70345e25fa","Type":"ContainerDied","Data":"867a4b1681791c211d78284c21b9fc4bcd176aee06dcc056533486fe715df75f"} Jan 20 19:51:01 crc kubenswrapper[4558]: I0120 19:51:01.867238 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9zrpl" event={"ID":"ad6f3fb5-5bf9-4cc9-b6e4-9f70345e25fa","Type":"ContainerDied","Data":"325f40963c9f47a3fbff0ed06130cf76b20a9f5cdb5cd0915773279dbe393a5f"} Jan 20 19:51:01 crc kubenswrapper[4558]: I0120 19:51:01.867318 4558 scope.go:117] "RemoveContainer" containerID="867a4b1681791c211d78284c21b9fc4bcd176aee06dcc056533486fe715df75f" Jan 20 19:51:01 crc kubenswrapper[4558]: I0120 19:51:01.867496 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9zrpl" Jan 20 19:51:01 crc kubenswrapper[4558]: I0120 19:51:01.903776 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-9zrpl"] Jan 20 19:51:01 crc kubenswrapper[4558]: I0120 19:51:01.905919 4558 scope.go:117] "RemoveContainer" containerID="539f798872fdb34a70231e590bd4fdd9c6b7ca7f075bb4e08c32e3522703493b" Jan 20 19:51:01 crc kubenswrapper[4558]: I0120 19:51:01.907754 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-9zrpl"] Jan 20 19:51:01 crc kubenswrapper[4558]: I0120 19:51:01.920974 4558 scope.go:117] "RemoveContainer" containerID="b7bd53c5d0f56acf6e51e9b92779d369639c897e1cff4d61ff46f50499ef4b0d" Jan 20 19:51:01 crc kubenswrapper[4558]: I0120 19:51:01.940379 4558 scope.go:117] "RemoveContainer" containerID="867a4b1681791c211d78284c21b9fc4bcd176aee06dcc056533486fe715df75f" Jan 20 19:51:01 crc kubenswrapper[4558]: E0120 19:51:01.940852 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"867a4b1681791c211d78284c21b9fc4bcd176aee06dcc056533486fe715df75f\": container with ID starting with 867a4b1681791c211d78284c21b9fc4bcd176aee06dcc056533486fe715df75f not found: ID does not exist" containerID="867a4b1681791c211d78284c21b9fc4bcd176aee06dcc056533486fe715df75f" Jan 20 19:51:01 crc kubenswrapper[4558]: I0120 19:51:01.940949 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"867a4b1681791c211d78284c21b9fc4bcd176aee06dcc056533486fe715df75f"} err="failed to get container status \"867a4b1681791c211d78284c21b9fc4bcd176aee06dcc056533486fe715df75f\": rpc error: code = NotFound desc = could not find container \"867a4b1681791c211d78284c21b9fc4bcd176aee06dcc056533486fe715df75f\": container with ID starting with 867a4b1681791c211d78284c21b9fc4bcd176aee06dcc056533486fe715df75f not found: ID does not exist" Jan 20 19:51:01 crc kubenswrapper[4558]: I0120 19:51:01.941039 4558 scope.go:117] "RemoveContainer" containerID="539f798872fdb34a70231e590bd4fdd9c6b7ca7f075bb4e08c32e3522703493b" Jan 20 19:51:01 crc kubenswrapper[4558]: E0120 19:51:01.941394 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"539f798872fdb34a70231e590bd4fdd9c6b7ca7f075bb4e08c32e3522703493b\": container with ID starting with 539f798872fdb34a70231e590bd4fdd9c6b7ca7f075bb4e08c32e3522703493b not found: ID does not exist" containerID="539f798872fdb34a70231e590bd4fdd9c6b7ca7f075bb4e08c32e3522703493b" Jan 20 19:51:01 crc kubenswrapper[4558]: I0120 19:51:01.941427 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"539f798872fdb34a70231e590bd4fdd9c6b7ca7f075bb4e08c32e3522703493b"} err="failed to get container status \"539f798872fdb34a70231e590bd4fdd9c6b7ca7f075bb4e08c32e3522703493b\": rpc error: code = NotFound desc = could not find container \"539f798872fdb34a70231e590bd4fdd9c6b7ca7f075bb4e08c32e3522703493b\": container with ID starting with 539f798872fdb34a70231e590bd4fdd9c6b7ca7f075bb4e08c32e3522703493b not found: ID does not exist" Jan 20 19:51:01 crc kubenswrapper[4558]: I0120 19:51:01.941452 4558 scope.go:117] "RemoveContainer" containerID="b7bd53c5d0f56acf6e51e9b92779d369639c897e1cff4d61ff46f50499ef4b0d" Jan 20 19:51:01 crc kubenswrapper[4558]: E0120 19:51:01.941698 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b7bd53c5d0f56acf6e51e9b92779d369639c897e1cff4d61ff46f50499ef4b0d\": container with ID starting with b7bd53c5d0f56acf6e51e9b92779d369639c897e1cff4d61ff46f50499ef4b0d not found: ID does not exist" containerID="b7bd53c5d0f56acf6e51e9b92779d369639c897e1cff4d61ff46f50499ef4b0d" Jan 20 19:51:01 crc kubenswrapper[4558]: I0120 19:51:01.941782 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b7bd53c5d0f56acf6e51e9b92779d369639c897e1cff4d61ff46f50499ef4b0d"} err="failed to get container status \"b7bd53c5d0f56acf6e51e9b92779d369639c897e1cff4d61ff46f50499ef4b0d\": rpc error: code = NotFound desc = could not find container \"b7bd53c5d0f56acf6e51e9b92779d369639c897e1cff4d61ff46f50499ef4b0d\": container with ID starting with b7bd53c5d0f56acf6e51e9b92779d369639c897e1cff4d61ff46f50499ef4b0d not found: ID does not exist" Jan 20 19:51:02 crc kubenswrapper[4558]: I0120 19:51:02.572530 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ad6f3fb5-5bf9-4cc9-b6e4-9f70345e25fa" path="/var/lib/kubelet/pods/ad6f3fb5-5bf9-4cc9-b6e4-9f70345e25fa/volumes" Jan 20 19:51:09 crc kubenswrapper[4558]: I0120 19:51:09.566514 4558 scope.go:117] "RemoveContainer" containerID="7787f59ff7454681fc282d06e7c92824f0d53eec7abb7bc051c812fd67aa7fed" Jan 20 19:51:09 crc kubenswrapper[4558]: E0120 19:51:09.567304 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:51:23 crc kubenswrapper[4558]: I0120 19:51:23.566505 4558 scope.go:117] "RemoveContainer" containerID="7787f59ff7454681fc282d06e7c92824f0d53eec7abb7bc051c812fd67aa7fed" Jan 20 19:51:23 crc kubenswrapper[4558]: E0120 19:51:23.567125 4558 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vr4r_openshift-machine-config-operator(68337d27-3fa6-4a29-88b0-82e60c3739eb)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" Jan 20 19:51:38 crc kubenswrapper[4558]: I0120 19:51:38.565811 4558 scope.go:117] "RemoveContainer" containerID="7787f59ff7454681fc282d06e7c92824f0d53eec7abb7bc051c812fd67aa7fed" Jan 20 19:51:39 crc kubenswrapper[4558]: I0120 19:51:39.188025 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" event={"ID":"68337d27-3fa6-4a29-88b0-82e60c3739eb","Type":"ContainerStarted","Data":"6aea7546ef0aeb70372fc561243b65e806afe78e61c0bf9c2b2a89c7272c8119"} Jan 20 19:53:57 crc kubenswrapper[4558]: I0120 19:53:57.329806 4558 patch_prober.go:28] interesting pod/machine-config-daemon-2vr4r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 20 19:53:57 crc kubenswrapper[4558]: I0120 19:53:57.330640 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 20 19:54:27 crc kubenswrapper[4558]: I0120 19:54:27.329540 4558 patch_prober.go:28] interesting pod/machine-config-daemon-2vr4r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 20 19:54:27 crc kubenswrapper[4558]: I0120 19:54:27.330280 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 20 19:54:57 crc kubenswrapper[4558]: I0120 19:54:57.329787 4558 patch_prober.go:28] interesting pod/machine-config-daemon-2vr4r container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 20 19:54:57 crc kubenswrapper[4558]: I0120 19:54:57.330404 4558 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 20 19:54:57 crc kubenswrapper[4558]: I0120 19:54:57.330463 4558 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" Jan 20 19:54:57 crc kubenswrapper[4558]: I0120 19:54:57.331059 4558 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"6aea7546ef0aeb70372fc561243b65e806afe78e61c0bf9c2b2a89c7272c8119"} pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 20 19:54:57 crc kubenswrapper[4558]: I0120 19:54:57.331124 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" podUID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerName="machine-config-daemon" containerID="cri-o://6aea7546ef0aeb70372fc561243b65e806afe78e61c0bf9c2b2a89c7272c8119" gracePeriod=600 Jan 20 19:54:57 crc kubenswrapper[4558]: I0120 19:54:57.647148 4558 generic.go:334] "Generic (PLEG): container finished" podID="68337d27-3fa6-4a29-88b0-82e60c3739eb" containerID="6aea7546ef0aeb70372fc561243b65e806afe78e61c0bf9c2b2a89c7272c8119" exitCode=0 Jan 20 19:54:57 crc kubenswrapper[4558]: I0120 19:54:57.647281 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" event={"ID":"68337d27-3fa6-4a29-88b0-82e60c3739eb","Type":"ContainerDied","Data":"6aea7546ef0aeb70372fc561243b65e806afe78e61c0bf9c2b2a89c7272c8119"} Jan 20 19:54:57 crc kubenswrapper[4558]: I0120 19:54:57.647495 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vr4r" event={"ID":"68337d27-3fa6-4a29-88b0-82e60c3739eb","Type":"ContainerStarted","Data":"ba47d36703ce83fdd2cc6355032ae30e7f011308897924a97763deb374206847"} Jan 20 19:54:57 crc kubenswrapper[4558]: I0120 19:54:57.647528 4558 scope.go:117] "RemoveContainer" containerID="7787f59ff7454681fc282d06e7c92824f0d53eec7abb7bc051c812fd67aa7fed" Jan 20 19:55:41 crc kubenswrapper[4558]: I0120 19:55:41.693664 4558 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-5css7"] Jan 20 19:55:41 crc kubenswrapper[4558]: E0120 19:55:41.694835 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad6f3fb5-5bf9-4cc9-b6e4-9f70345e25fa" containerName="registry-server" Jan 20 19:55:41 crc kubenswrapper[4558]: I0120 19:55:41.694851 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad6f3fb5-5bf9-4cc9-b6e4-9f70345e25fa" containerName="registry-server" Jan 20 19:55:41 crc kubenswrapper[4558]: E0120 19:55:41.694879 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad6f3fb5-5bf9-4cc9-b6e4-9f70345e25fa" containerName="extract-content" Jan 20 19:55:41 crc kubenswrapper[4558]: I0120 19:55:41.694943 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad6f3fb5-5bf9-4cc9-b6e4-9f70345e25fa" containerName="extract-content" Jan 20 19:55:41 crc kubenswrapper[4558]: E0120 19:55:41.694962 4558 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad6f3fb5-5bf9-4cc9-b6e4-9f70345e25fa" containerName="extract-utilities" Jan 20 19:55:41 crc kubenswrapper[4558]: I0120 19:55:41.694982 4558 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad6f3fb5-5bf9-4cc9-b6e4-9f70345e25fa" containerName="extract-utilities" Jan 20 19:55:41 crc kubenswrapper[4558]: I0120 19:55:41.695151 4558 memory_manager.go:354] "RemoveStaleState removing state" podUID="ad6f3fb5-5bf9-4cc9-b6e4-9f70345e25fa" containerName="registry-server" Jan 20 19:55:41 crc kubenswrapper[4558]: I0120 19:55:41.696453 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5css7" Jan 20 19:55:41 crc kubenswrapper[4558]: I0120 19:55:41.707952 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-5css7"] Jan 20 19:55:41 crc kubenswrapper[4558]: I0120 19:55:41.727842 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ce68ae90-f160-4693-8cfe-cfb0243db216-utilities\") pod \"community-operators-5css7\" (UID: \"ce68ae90-f160-4693-8cfe-cfb0243db216\") " pod="openshift-marketplace/community-operators-5css7" Jan 20 19:55:41 crc kubenswrapper[4558]: I0120 19:55:41.727984 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ce68ae90-f160-4693-8cfe-cfb0243db216-catalog-content\") pod \"community-operators-5css7\" (UID: \"ce68ae90-f160-4693-8cfe-cfb0243db216\") " pod="openshift-marketplace/community-operators-5css7" Jan 20 19:55:41 crc kubenswrapper[4558]: I0120 19:55:41.728051 4558 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jnhb4\" (UniqueName: \"kubernetes.io/projected/ce68ae90-f160-4693-8cfe-cfb0243db216-kube-api-access-jnhb4\") pod \"community-operators-5css7\" (UID: \"ce68ae90-f160-4693-8cfe-cfb0243db216\") " pod="openshift-marketplace/community-operators-5css7" Jan 20 19:55:41 crc kubenswrapper[4558]: I0120 19:55:41.828880 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ce68ae90-f160-4693-8cfe-cfb0243db216-catalog-content\") pod \"community-operators-5css7\" (UID: \"ce68ae90-f160-4693-8cfe-cfb0243db216\") " pod="openshift-marketplace/community-operators-5css7" Jan 20 19:55:41 crc kubenswrapper[4558]: I0120 19:55:41.828938 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jnhb4\" (UniqueName: \"kubernetes.io/projected/ce68ae90-f160-4693-8cfe-cfb0243db216-kube-api-access-jnhb4\") pod \"community-operators-5css7\" (UID: \"ce68ae90-f160-4693-8cfe-cfb0243db216\") " pod="openshift-marketplace/community-operators-5css7" Jan 20 19:55:41 crc kubenswrapper[4558]: I0120 19:55:41.829058 4558 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ce68ae90-f160-4693-8cfe-cfb0243db216-utilities\") pod \"community-operators-5css7\" (UID: \"ce68ae90-f160-4693-8cfe-cfb0243db216\") " pod="openshift-marketplace/community-operators-5css7" Jan 20 19:55:41 crc kubenswrapper[4558]: I0120 19:55:41.829374 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ce68ae90-f160-4693-8cfe-cfb0243db216-catalog-content\") pod \"community-operators-5css7\" (UID: \"ce68ae90-f160-4693-8cfe-cfb0243db216\") " pod="openshift-marketplace/community-operators-5css7" Jan 20 19:55:41 crc kubenswrapper[4558]: I0120 19:55:41.829405 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ce68ae90-f160-4693-8cfe-cfb0243db216-utilities\") pod \"community-operators-5css7\" (UID: \"ce68ae90-f160-4693-8cfe-cfb0243db216\") " pod="openshift-marketplace/community-operators-5css7" Jan 20 19:55:41 crc kubenswrapper[4558]: I0120 19:55:41.853963 4558 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jnhb4\" (UniqueName: \"kubernetes.io/projected/ce68ae90-f160-4693-8cfe-cfb0243db216-kube-api-access-jnhb4\") pod \"community-operators-5css7\" (UID: \"ce68ae90-f160-4693-8cfe-cfb0243db216\") " pod="openshift-marketplace/community-operators-5css7" Jan 20 19:55:42 crc kubenswrapper[4558]: I0120 19:55:42.015990 4558 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5css7" Jan 20 19:55:42 crc kubenswrapper[4558]: I0120 19:55:42.478517 4558 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-5css7"] Jan 20 19:55:42 crc kubenswrapper[4558]: I0120 19:55:42.984900 4558 generic.go:334] "Generic (PLEG): container finished" podID="ce68ae90-f160-4693-8cfe-cfb0243db216" containerID="c082b78198ff2170b81e21beddb02d92aa9a73c3af80156213eb45acff8bb587" exitCode=0 Jan 20 19:55:42 crc kubenswrapper[4558]: I0120 19:55:42.984953 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5css7" event={"ID":"ce68ae90-f160-4693-8cfe-cfb0243db216","Type":"ContainerDied","Data":"c082b78198ff2170b81e21beddb02d92aa9a73c3af80156213eb45acff8bb587"} Jan 20 19:55:42 crc kubenswrapper[4558]: I0120 19:55:42.984990 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5css7" event={"ID":"ce68ae90-f160-4693-8cfe-cfb0243db216","Type":"ContainerStarted","Data":"52e7d3e283960948cb7999e82c6622b55da37b4e915cc07ea17651cbf9f1c33c"} Jan 20 19:55:42 crc kubenswrapper[4558]: I0120 19:55:42.986887 4558 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 20 19:55:44 crc kubenswrapper[4558]: I0120 19:55:43.994209 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5css7" event={"ID":"ce68ae90-f160-4693-8cfe-cfb0243db216","Type":"ContainerStarted","Data":"da4b46b031e5069145341a03670e8a4e8666ae9be16ccc054634b2d2621e7bf7"} Jan 20 19:55:45 crc kubenswrapper[4558]: I0120 19:55:45.000116 4558 generic.go:334] "Generic (PLEG): container finished" podID="ce68ae90-f160-4693-8cfe-cfb0243db216" containerID="da4b46b031e5069145341a03670e8a4e8666ae9be16ccc054634b2d2621e7bf7" exitCode=0 Jan 20 19:55:45 crc kubenswrapper[4558]: I0120 19:55:45.000185 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5css7" event={"ID":"ce68ae90-f160-4693-8cfe-cfb0243db216","Type":"ContainerDied","Data":"da4b46b031e5069145341a03670e8a4e8666ae9be16ccc054634b2d2621e7bf7"} Jan 20 19:55:46 crc kubenswrapper[4558]: I0120 19:55:46.007694 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5css7" event={"ID":"ce68ae90-f160-4693-8cfe-cfb0243db216","Type":"ContainerStarted","Data":"8123004038dbed52e59339ed1fdd210cc459825a8a61c9c61b0175bb91e9c775"} Jan 20 19:55:46 crc kubenswrapper[4558]: I0120 19:55:46.027759 4558 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-5css7" podStartSLOduration=2.369914127 podStartE2EDuration="5.027736028s" podCreationTimestamp="2026-01-20 19:55:41 +0000 UTC" firstStartedPulling="2026-01-20 19:55:42.986644712 +0000 UTC m=+11636.746982680" lastFinishedPulling="2026-01-20 19:55:45.644466614 +0000 UTC m=+11639.404804581" observedRunningTime="2026-01-20 19:55:46.02664718 +0000 UTC m=+11639.786985147" watchObservedRunningTime="2026-01-20 19:55:46.027736028 +0000 UTC m=+11639.788073995" Jan 20 19:55:52 crc kubenswrapper[4558]: I0120 19:55:52.017290 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-5css7" Jan 20 19:55:52 crc kubenswrapper[4558]: I0120 19:55:52.018891 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-5css7" Jan 20 19:55:52 crc kubenswrapper[4558]: I0120 19:55:52.055739 4558 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-5css7" Jan 20 19:55:52 crc kubenswrapper[4558]: I0120 19:55:52.115919 4558 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-5css7" Jan 20 19:55:52 crc kubenswrapper[4558]: I0120 19:55:52.292003 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-5css7"] Jan 20 19:55:54 crc kubenswrapper[4558]: I0120 19:55:54.094375 4558 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-5css7" podUID="ce68ae90-f160-4693-8cfe-cfb0243db216" containerName="registry-server" containerID="cri-o://8123004038dbed52e59339ed1fdd210cc459825a8a61c9c61b0175bb91e9c775" gracePeriod=2 Jan 20 19:55:54 crc kubenswrapper[4558]: I0120 19:55:54.992283 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5css7" Jan 20 19:55:55 crc kubenswrapper[4558]: I0120 19:55:55.015661 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jnhb4\" (UniqueName: \"kubernetes.io/projected/ce68ae90-f160-4693-8cfe-cfb0243db216-kube-api-access-jnhb4\") pod \"ce68ae90-f160-4693-8cfe-cfb0243db216\" (UID: \"ce68ae90-f160-4693-8cfe-cfb0243db216\") " Jan 20 19:55:55 crc kubenswrapper[4558]: I0120 19:55:55.015709 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ce68ae90-f160-4693-8cfe-cfb0243db216-catalog-content\") pod \"ce68ae90-f160-4693-8cfe-cfb0243db216\" (UID: \"ce68ae90-f160-4693-8cfe-cfb0243db216\") " Jan 20 19:55:55 crc kubenswrapper[4558]: I0120 19:55:55.015748 4558 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ce68ae90-f160-4693-8cfe-cfb0243db216-utilities\") pod \"ce68ae90-f160-4693-8cfe-cfb0243db216\" (UID: \"ce68ae90-f160-4693-8cfe-cfb0243db216\") " Jan 20 19:55:55 crc kubenswrapper[4558]: I0120 19:55:55.016729 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ce68ae90-f160-4693-8cfe-cfb0243db216-utilities" (OuterVolumeSpecName: "utilities") pod "ce68ae90-f160-4693-8cfe-cfb0243db216" (UID: "ce68ae90-f160-4693-8cfe-cfb0243db216"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 19:55:55 crc kubenswrapper[4558]: I0120 19:55:55.025307 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ce68ae90-f160-4693-8cfe-cfb0243db216-kube-api-access-jnhb4" (OuterVolumeSpecName: "kube-api-access-jnhb4") pod "ce68ae90-f160-4693-8cfe-cfb0243db216" (UID: "ce68ae90-f160-4693-8cfe-cfb0243db216"). InnerVolumeSpecName "kube-api-access-jnhb4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 20 19:55:55 crc kubenswrapper[4558]: I0120 19:55:55.084484 4558 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ce68ae90-f160-4693-8cfe-cfb0243db216-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ce68ae90-f160-4693-8cfe-cfb0243db216" (UID: "ce68ae90-f160-4693-8cfe-cfb0243db216"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 20 19:55:55 crc kubenswrapper[4558]: I0120 19:55:55.125242 4558 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ce68ae90-f160-4693-8cfe-cfb0243db216-utilities\") on node \"crc\" DevicePath \"\"" Jan 20 19:55:55 crc kubenswrapper[4558]: I0120 19:55:55.126019 4558 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jnhb4\" (UniqueName: \"kubernetes.io/projected/ce68ae90-f160-4693-8cfe-cfb0243db216-kube-api-access-jnhb4\") on node \"crc\" DevicePath \"\"" Jan 20 19:55:55 crc kubenswrapper[4558]: I0120 19:55:55.126054 4558 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ce68ae90-f160-4693-8cfe-cfb0243db216-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 20 19:55:55 crc kubenswrapper[4558]: I0120 19:55:55.131189 4558 generic.go:334] "Generic (PLEG): container finished" podID="ce68ae90-f160-4693-8cfe-cfb0243db216" containerID="8123004038dbed52e59339ed1fdd210cc459825a8a61c9c61b0175bb91e9c775" exitCode=0 Jan 20 19:55:55 crc kubenswrapper[4558]: I0120 19:55:55.131237 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5css7" event={"ID":"ce68ae90-f160-4693-8cfe-cfb0243db216","Type":"ContainerDied","Data":"8123004038dbed52e59339ed1fdd210cc459825a8a61c9c61b0175bb91e9c775"} Jan 20 19:55:55 crc kubenswrapper[4558]: I0120 19:55:55.131273 4558 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5css7" event={"ID":"ce68ae90-f160-4693-8cfe-cfb0243db216","Type":"ContainerDied","Data":"52e7d3e283960948cb7999e82c6622b55da37b4e915cc07ea17651cbf9f1c33c"} Jan 20 19:55:55 crc kubenswrapper[4558]: I0120 19:55:55.131295 4558 scope.go:117] "RemoveContainer" containerID="8123004038dbed52e59339ed1fdd210cc459825a8a61c9c61b0175bb91e9c775" Jan 20 19:55:55 crc kubenswrapper[4558]: I0120 19:55:55.131439 4558 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5css7" Jan 20 19:55:55 crc kubenswrapper[4558]: I0120 19:55:55.177531 4558 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-5css7"] Jan 20 19:55:55 crc kubenswrapper[4558]: I0120 19:55:55.181097 4558 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-5css7"] Jan 20 19:55:55 crc kubenswrapper[4558]: I0120 19:55:55.181243 4558 scope.go:117] "RemoveContainer" containerID="da4b46b031e5069145341a03670e8a4e8666ae9be16ccc054634b2d2621e7bf7" Jan 20 19:55:55 crc kubenswrapper[4558]: I0120 19:55:55.218140 4558 scope.go:117] "RemoveContainer" containerID="c082b78198ff2170b81e21beddb02d92aa9a73c3af80156213eb45acff8bb587" Jan 20 19:55:55 crc kubenswrapper[4558]: I0120 19:55:55.234245 4558 scope.go:117] "RemoveContainer" containerID="8123004038dbed52e59339ed1fdd210cc459825a8a61c9c61b0175bb91e9c775" Jan 20 19:55:55 crc kubenswrapper[4558]: E0120 19:55:55.234753 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8123004038dbed52e59339ed1fdd210cc459825a8a61c9c61b0175bb91e9c775\": container with ID starting with 8123004038dbed52e59339ed1fdd210cc459825a8a61c9c61b0175bb91e9c775 not found: ID does not exist" containerID="8123004038dbed52e59339ed1fdd210cc459825a8a61c9c61b0175bb91e9c775" Jan 20 19:55:55 crc kubenswrapper[4558]: I0120 19:55:55.234788 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8123004038dbed52e59339ed1fdd210cc459825a8a61c9c61b0175bb91e9c775"} err="failed to get container status \"8123004038dbed52e59339ed1fdd210cc459825a8a61c9c61b0175bb91e9c775\": rpc error: code = NotFound desc = could not find container \"8123004038dbed52e59339ed1fdd210cc459825a8a61c9c61b0175bb91e9c775\": container with ID starting with 8123004038dbed52e59339ed1fdd210cc459825a8a61c9c61b0175bb91e9c775 not found: ID does not exist" Jan 20 19:55:55 crc kubenswrapper[4558]: I0120 19:55:55.234812 4558 scope.go:117] "RemoveContainer" containerID="da4b46b031e5069145341a03670e8a4e8666ae9be16ccc054634b2d2621e7bf7" Jan 20 19:55:55 crc kubenswrapper[4558]: E0120 19:55:55.235109 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"da4b46b031e5069145341a03670e8a4e8666ae9be16ccc054634b2d2621e7bf7\": container with ID starting with da4b46b031e5069145341a03670e8a4e8666ae9be16ccc054634b2d2621e7bf7 not found: ID does not exist" containerID="da4b46b031e5069145341a03670e8a4e8666ae9be16ccc054634b2d2621e7bf7" Jan 20 19:55:55 crc kubenswrapper[4558]: I0120 19:55:55.235146 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"da4b46b031e5069145341a03670e8a4e8666ae9be16ccc054634b2d2621e7bf7"} err="failed to get container status \"da4b46b031e5069145341a03670e8a4e8666ae9be16ccc054634b2d2621e7bf7\": rpc error: code = NotFound desc = could not find container \"da4b46b031e5069145341a03670e8a4e8666ae9be16ccc054634b2d2621e7bf7\": container with ID starting with da4b46b031e5069145341a03670e8a4e8666ae9be16ccc054634b2d2621e7bf7 not found: ID does not exist" Jan 20 19:55:55 crc kubenswrapper[4558]: I0120 19:55:55.235228 4558 scope.go:117] "RemoveContainer" containerID="c082b78198ff2170b81e21beddb02d92aa9a73c3af80156213eb45acff8bb587" Jan 20 19:55:55 crc kubenswrapper[4558]: E0120 19:55:55.236196 4558 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c082b78198ff2170b81e21beddb02d92aa9a73c3af80156213eb45acff8bb587\": container with ID starting with c082b78198ff2170b81e21beddb02d92aa9a73c3af80156213eb45acff8bb587 not found: ID does not exist" containerID="c082b78198ff2170b81e21beddb02d92aa9a73c3af80156213eb45acff8bb587" Jan 20 19:55:55 crc kubenswrapper[4558]: I0120 19:55:55.236242 4558 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c082b78198ff2170b81e21beddb02d92aa9a73c3af80156213eb45acff8bb587"} err="failed to get container status \"c082b78198ff2170b81e21beddb02d92aa9a73c3af80156213eb45acff8bb587\": rpc error: code = NotFound desc = could not find container \"c082b78198ff2170b81e21beddb02d92aa9a73c3af80156213eb45acff8bb587\": container with ID starting with c082b78198ff2170b81e21beddb02d92aa9a73c3af80156213eb45acff8bb587 not found: ID does not exist" Jan 20 19:55:56 crc kubenswrapper[4558]: I0120 19:55:56.574735 4558 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ce68ae90-f160-4693-8cfe-cfb0243db216" path="/var/lib/kubelet/pods/ce68ae90-f160-4693-8cfe-cfb0243db216/volumes" var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515133756743024463 0ustar coreroot  Om77'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015133756744017401 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015133727724016520 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015133727725015471 5ustar corecore

fp,?#2}dhY+ i6ɻ V~2v1D-Z fI\DcDG%,\>6ALGTD$SK$+A|c޶hA5N99Ӫְڨ-xjV V/?soVR5^p27[ D|!)Eୠ1LPq_rqtJ–f9r DoԷ.!z}=&{C{_]|r'K>wS+mǘR't| d,t=eÐ5,z%[`.mRܴ MhOJJOu~ +~KEL{85t[[ N1h@iݶ!!o\D-SRS+EwqTwf鹻]|._烟{at48XOۓ; }]Z''ojN кr洯V%7m1ͧ} Pa@l=')To#?z,O6[I7KRԃ{{5Kx>ygH6 7\y&ާ;5\GyV E):.#)hB1L1Q>%FQ#{ Q'\P3V*jw\c\+,wPgcPVүRA h^F2w:L.A4_Qv5+4ᨒ[#QP8V*Y\ײШ:L6JlO`}G?O,-K"R$Cڧ k}ȳhsHN'RӞťC᧭O[,7=_d©-yq~cS\Jh NLt$qH ▄ =^9 YZ3KJ $djS5 UȔ:[pLc1q/et+⥥IY5h"_-EPϴhSWC%g43իk2E]SǏPLב WN)Mé)9J#Q~8rܺfFȚm}TXC9Hg~-QRFgI<CPpɭ: -gC",8:I Uߗ[{;h(ʁf_*ct)(5"?8I@nXLFS(T1%ѩ) 5E%.p!(1tшܑ#'ۭv Z7tzƨו:g m+M G ݱMRiWvRD\H'zSQk1IRxyFe(p?CݫbY9Zt G(35=\2`[Gs1U8θ 'KRRՒIWWߠcMbL FxfyJ0FicKy_vהEݐΟ0t<p<r9NLJ*VVH9-uY x% sC\rt@Y ӹ,)ayJ-$(82,2,8%g]qg GK֖>8eG&h@9gSI]l8%ι,pS⼝hh/(1VhAEO!+U64(ल0,+0|7Ѿ% s%1 E!4"ţ6)]Ime N2p(+NJ薶|QX*BB-3aušd-K"9G P#ب0`X*۽2 0^*C R /2m`ME F9U8IKBR8A &%%Js490Z*kC/W2 ᕲ;z`6wǟi{BwG֦faY;- gd&jӾf Ɔw*qL!B͇m$6lƵ8_njBjQN(f@r`y6CqIqn!pɣ<>.1 [9A><|97plðM0?uWN\2+rCDN:Q}bP?f,#IV)29b= .\3ݔg$kJc(GJ@D`F(':ވ_ȖhtYM97|Z84ݕÝ)}5ZF;o1)o?wįn9`ݠtq_~4N^yk%/X^ ؒ Ot,w^ლ{D8}cX}eo%A7J` eWȞ}7pw;+kmv?,DSJθ' ͤNgrNbʺVL"\j5e `oO9q/Aȇ?VBv_xuE1a¤B$Å~Rzzp}l& ! q d\=8 G i9zzp3pj;_Yy݄G<~gɷyHĴ?4Wp8tΠQw#ΩL' ^d(/'ǜIUv!r:o Bf!3O+F̮688hŌ?ؘ#`{c. ϊDݨ)"޲ v)`\ {`Y1LSi`QT8OuBvgPwe{5*8Q 7YJKFH:EpNg@ R?n6ꏼ|ES:}F(}wcza|յo>J?taptZr]ZN-]eqU!p5LF0)=$1AȐ:=V+GV]*{8ѣAc>"9Uj,rK(ZL%4 X#f@.aToBVj/Npx <"wU%ʱ'sֶT+@*CepW .4 f\zWt|3;-/X NyF.Ii(^xg5t 3]b7wgJyc vޯC+d %Z )o?wZ ٸ<)Cq%f¯D'aJ CwmJ~Y`[,E<@ Yd߷ݒZ[n!Cn"Y~ï4ڊYK<ۗû"pT_>_E{3j8[y?g:= m&w鞉 & RBx=o 0f;ޱRi涤nߜAW_:$v߫ h[3>]Ar6~YHV;vypцSރ]sِ0+1Y+}>Hrk/FÍp  Yô>9!5O2d/DE fk{Xύc8ύcܸ>W܁/8#LLUUp R[lx|AC$s(#kE8jlz=/<%2%/ 1I^I -6HT9&=ue%MХ4ZNXK]پ6%Q]i3wdhMn2Oݕ_ƳԢRd=gxq%Y>}ʺ/~ya:?߭~Y{qķiտnn!rql%|"D1}-i2AK BgbgHRw?=Q&"0h%6 BGg\,$?RcCr- ;|ʡdn"u7m`c<8:q}FM+8 +E""WОqiڰJ3kSn(k}Hv$wV3Xhlm!F[K/="FՋ6^Pi/{~=Y4DY67E)Sy\iL퉅s]> rk+w5v;dQ!?FǷvo_~ho>ue)RFS {vI=[<=8bt{Q)4L:ʵKiu^5Zۛrp{/wU_Ė ,co]ceY[N,Q2(j"Q3jOpuv-UY#-%wYSpRR_3)MlD3 &o  ބwAU iBPN}:AGLᖝ1ٚŰ@v2H-Ȁh- ƉPTVˡ1lSϗ ht我,QT6RB T \$-2paEAJ#L*[JAVXaR *dP?rkM#F J4. 2ZT#9/*]qw?]#t)gBY>~_^6b^ג~?_Eb$ e }~LMUkN4O?uhҤm pyN&7jꫨ$j͡;F#M^-TB=eSN V*̧hUsNo^yw{UFWL=˷W{y+EnbنZpNx)_*4ʩ@|i  c6+RxBb6k)zߜA"hwjndFFPϓ+)WLI¶<㒏"vzSeD(B;w ]̨aC<OЦ\ň[9ܘ>`bkk~-ռP!-NVx1i.fOĭ˯׳7WW!:FWNf׿^~sobV>\?1l1CcĽc$~cTW_>d)٬խH?λ\_#+k*{ ҜgPDBXf )4  M# -/)<>M%2C@s0BqhwXh.1L>Zq4hD&^Hi[U.0pUYbFsPÉlK&>}QVܗ<HQ;u̡sn "rS}NsWn\` *h%U.=PJ@rw4wT k`İn(0b~:qkmI˿34FNQRd fz7>:jD Xc7dN"g'q,шcL)H_0ZR~z3PVKMNɻf2M ^xTm.pgK .a)DcH`Ɔ6M,C6ye"1tsJ+kBQ@sV8 7\9mq┠$oJ;ςJN 8SVXIHF@m%l*ſם!B7nsPyP*s⺬H¬=Zj*Xō *; yJuk>> fNeKڲйM'P%޴k<ߵC)5_˾43܌(.J?#\~\ITƦf$e 6 |n%nSñQ5#C .:6EDS%ixAEMmqa@̟ǟB߸zs!CHn%7#"pNS^9:+ Õ2#} ;ޱ)mHT.ыfu\HANQ8ҴIG1>̾fچ$U[+صjLRk'iiz&G ퟲ UG#~kc kX2i:l~r)vIzx̶]1AQQ;CСNCկn2Fr3;d>Ӄ,FW|N2B}#zǍp!LY69=%ض5<:8| 2 ZȽ<18`/'*3OnNS\-p9 ߠg-i)Gl~ګ$W@矽j@3wrR٫d+gDg(*E!zNEOLx9XJe=TJ΄a|3.-iX%EUy3kSNX>Q{9IX؛ PhP5C$)Yi 8T8#Hy01#G1_$s4k(_-E"59~;9vnjt:Y0RE20[sHhĖ@Dnh^TЦJ3CAIrEOD>21{=L"AGn^?lYua?n :e rf!ki1&F?L-?(${NZL@@@'^r?s|S^%1 k08,EYJI b= Ux*a ٌdxiĂQӞZ/)6DLn{i%hy!}6DeYGs$ty^†CI+LF)nEbD  {8m>*~| zY P>w.hs&҃hcG9{ZHpMw 6q%*aڥ Jy^xBeYfsb$,y!I5oF8Ð՞!joXfW[j>CRiB9WyNũ~kQR}r^Z "pr5OI^֕Ru!iũ+nZ* p .,)CӸ HW _JV/+y?e-RqLd.s5c/YЊUat ͖1@' Bv) Q-*X .rGDDd"a\W,M:32ecC]YoG+^@QyDF 'o; ffAi&)嵁$Rꮪf70*3⋌#3oY{Յ3 [whI,]M#S(t£/95fCYlRl8_§@";r1R(&4EDɇM ۬_\D͉#*YTc@CRBS^8FJTW◝X@|iG3sYh 6+нd??l ?X-v_ˇ&_ЮaŲv>SaOu^n\o~81qگ&~QAhk 6h*S#LEɴ&TU60[d5Mn9D~fn߉n:㩝]x_uPe$ډ&'&3YaŇ(M#H暕_ׇNLVd FNd#[)@,iGNb>Js8/5Bsk64rE7Wȫq)NkMkqɄJ]P,>?*ZlknYϽw z8.ޣ8fdӹ##Hچ>w\ y<@? A?fK3?J|lm킒CW:6r;dH+G];u`Nmyx 6226oU}}l[oz?|]PԲf3^nF "ͬl&GbUpPkh S}ѧJ"Jlɭ)ȼ^U 2j@+BF]1`L';ĆD]HN/`Bn WmQ /l)9hImsMBDmMh?yF<,)=dq]7Ʊ(6ted>Ғ*W^]E.SuЖ|lٕٔ:k+ld9\ߦ_Ǟ l8B2JwkIC[Ax x l[Amt遐V"@5)ڋ}_'i<>mL|5ze4.湯Ϛm B#zNCe8%.XA*0MqZ3)jg,kRE8G"+EVL' MqnќF[fs1 vJqvx !y-/X. m/!~REo?IW^6]yteӕ}] l^\C*:Uea^["%F7gB~.X,1~kPI6o|jM^J& w tM$Ag.s1PYڀ +좉cpFM$A(OwIrs9Ӵ=/QΠ jC.* %i-! rTJ PDkZEj a"1r޹yp1nL%MYh0EXMsa< )X~lw?s+.;eGpOao3g7-VBڳZ5+!u(XաF h k, Ya.h"CGL&UG:AlzuzQ'P49mðN hzBٝupTи)V?1ߦvI}uRߘaw5F+Qx6D'\C. t؀TsiV򗦄jq!DځtWwR@lwiڕK))jXРR&rZAe,O;^S%$"SYX\0UJ$ac4@ SŨƁc^wf L ?əlesf.3swf$((L[TagKnmn bR6 )'^;vQc[m>9bmZD46(?|臜.81x]&찵%";eYuC6 _=dáw7( n#6ΐ_k'6LC6߇>8qײ3&v:x _-V~o7oK~cFaZt/63޻3&p-!HLK6|̱41:'0pl1C%D0FR Ӳ&U ,F^toRqk927V>6CWnϯwKEÉ)t~ӂ6rx$+"5&@$F!:& b(YhFݲ1cfp"3v֡yf_ mѬQ tpz \Iv6Ӕb6 R漾hFɚ&̚,Dn NF\"b|wkt}ɿlu]B$BFJHFV%ȄV Q ^:;ĉ8 A),lgbv HU"a"9S+ Cs3WjCɀ&hH2 w^B$8{LI#Oz抯V\Y)xT6Q.V(lbIbBڱ;*5gؐ2,^G)Aq\""W$*jBdv}b8" %l )sL%/U]ǚ}JXF2IJ#hSQsD`r6?!+29&B$j5Jל,.8y-Af7f]jsaO/@x?1BO}iX}Fb7$晐8; a4tg0!NY턀}MYM nc3Ҝ}0apfrb,x&vxS)qd=r& &ꛜN~*<^pJ$ؐ'wG5bݽtzIWJ鐓BXO])uPardV}5\DJ Xd+vS8m0|NTdtT?owkKi؟0jSɜ 9Κ{>-+ZGc=$ձ؎kJ%\aAp9lQhgK=;1381 1Cͥ>{,QM6z>Sk7^*(CWN1M%n="pYb^ܧ`u]UJKZ=, ! / $2֘rp- XWކ],swߝ;W[,+`Z^9X6BW0ЭܧC6E1q j V0Q+q2tŜ W5Ppv'~a w6o@ftٍ-)`#y3i-p 4h.ՄtF`F7ㅻ._&n-J߇W:9;= 9DŽcs+lg )b:juCM6^}1792ڍ6g!\=>iFJiǏ'$OCr4$OCr^'9,˘FJj0̀qBpZLƪ `m&XFvOycv vs9zj9Xb@ܽI34iH34i9ͤZ`x'b m3 eb^TQL<3L,rb'Y^~2,;]w}x4 뾛+Z6?;XVx_gQ@%n y 6υÚs+ ҄ B[ RS<v,G(FKLfRPUELY^߮~q 2t+ sGLϮwkq Z 'A|S=ujw <[%Ad7t(u|OfefJ4FG+354Lf݀n*܃=Ms3D3O8 C b&$ 2;J{)s#'4P1/TU:T}!1uw({+QL,ԅJ D}.iƵJp CSR x:]o6EwhH2G됁ʨ [sWJ= $*&x%Ú+) , CKiqqq]_˅  }V8'WVպޭNPˇ߼ )FG0srnxN[Ks "&OߎA,nk+o-G}fw> p VDWӣ7הHI}%W߹q;hzqsMB;fG!ZQ/"A?&B3P(yg0C %0/yStg06tо$iekqrv fΦ5c_mLcX21ΤL9'2)Mf<#Ac)^)3Dz02" V˄U,Cd+r 1n)LhƘ`Q+%!s2!e2Ttkba@z6+a1W_&A&RSgSZذx]ǯJaJhgM(5](xiǝxiǝq}.AI5R0[K8%f^GsE, 4$) 4Q&%Gj獅T`*o4gtd@}1 Ӑ1 rƖ `P3af382gyi,Gzyds혗L Ďab#GޛJ`to.B(CI171OjhƝGkvbcȐ+;|Epk_׻}>]̮WٟZPs9ڐ&ߺw*SG] $ٌ+iۮw*8PȺU-#+W7ףȡw ' h2 GU:dpzQX 3/)ɽwZ2 ʍ&("# {K4)9&&,vCkVG!uπ |?Z1XoI6ƲTv6<*p7>;1R&8v`k!8V?"3('O@pp f^+M4R#rBPamh!wuݏQ 9TwoHV#E,Fl09d(9c/"xc{USS*-r>/^*$ (5x.D(,E=ה^ZbI,EP\4=򋛫M~O6U+Jc\tTOM]rgؔ"Dz{3(KR-n?+7pͥF 0e펂YBD#kL*؜lBt{y[9@¯]JRI;ſt:Hty]9-Rk{Ӯg6/;=$jtίSu D7e8 #`iT4wx$0h nϊ*½;@EW Q%v!­F &e*X1g&$SjʌUU&HƸ!OH A"⭉VޓQ\ m-(-d4j`Jde4GYU `Qh`@,Jz:X$ty.1ec/q']!&Ť?t%']:1I:r@+[Vvq;*غ!6E7vu0xECrJ[AOCKV1 c~b#Uv~b\uBXTk휄$&aTAkuNFQmƕ۔iJ#fiY{$,GS:8z;+zv G4O;D#B|CHX̓ڞŸrkC7=@B{t72)$]$}z:wpw~{4F]04dH&4Ad22c yGAT(cmF|Ub< 3BS)Y3jRNy٭> JdH*H3$%ڮF7),#iMDN=ژ%1k—&\R6=Z`ƍ.CJ$ЀM%" 95X J&De *izv\u+z+o$>P\j>Z=i'k1P¾XzꜴYڔl|alk e42e-5juJ.9mGY}ygE9}]U(Ђ{ *aD ٖa .;I11Z`HjNPFm 9!+ͬS"4AT=ҦfouFwoP_*_4))Q;1@i(f*ӒJdEqQ\TY)jT`Q mYo LGca%kl?>V݂ܷU__P5*gzbt|$ q9-QI3 ?|d6jh{x^DDpQ'zU/Ǔb:8 Ow? dD.d<_ <-4רrː`j~cK1nS#q9ҊMnԿW3QK\WMT07V:`2ʬ fJll 7-|4;vb)jiVBoe8+RxNu|i.*5q4>Sh~ ڻw4CA~W۟=Fu`֞\">X&UpŌj>NnǾ7|W秽^Mw77 ހZy3]AoF'fqR0vX~MxHċQ57~f1%WO+j)-VSC(% F](eQȁ 6Fí#/O.IYgׂ۫ Lz|s- @),3m4Kzt2 sG"uW3A0f. UCph9XL8T%b 4vWI(Cڑd+r֎Z IL[5yrC^a;JKj`7Ba XJit!)c`k4s]je xSf% lK2t[~ӱ{3X+ʲ$5h`0^2Y_D d3#s欬 E m8RA dkqV }S~ Acp`S..d ࣠ 2=xT9h#/a}k\ɨHɫk[t-&0rCQU_IήˍD@tKQ'zG$$ ? TD,)w}6%M gST,)*mmjK٤0†]j"@Fh9mD9Ĉzv6$oR{IHt~ԗ#w8N@ F΁X]1fPDP)>M;PUB :%8IU5JVj p9nbSA|\T)shf|;(y1EDvףH`{5jfP5j|ka;ޱ1F2_㚉1|]ꢤKI ZנWEt)Ԙ/ՄxPhReO~w%6IWo_WѠXwEi=yR{ĝ . *~_F)X epU*⨲- u)Q`1bYjmDR!] @{8p~~DX_Tyh+ :Jf^q UqH!%`µd:D vF|-DuVO*)AϖvmHZB2"a:*-h|.{s6)%ȟPz]]|`ݑ1ܔ#<=^Ïa *"'.i6-5 VQbfdeL MSa"gΆl(Jڭ8]sG() :d1PNyk=J~zNV﫡֤q?||<IebീE,xg VX^#J溴a]R%żfPԱJkupwL/35Fӡ1 3ϔoNpֆ}JrƾӾdGfcׁξ^yZӼH%Q5YD "c j+ۛ}QWҶ8'jy!(='*{,{ %5tD8~Q`"bV,&~h{}Z8 * e DG#e+TnsҚ@@4xgQ\|5t<|X>pMn$ᘉ3p8m J?^'RL1%׷/'Oi97Ql>@uEj^eIDȵª+0iŤ3ɱAIHW]T/qU_V"BoLյFrbKzUESjOտ{2տr`WE0Ōj/jvN~8^U>a>NG? fkڿw +Hxҫ^ww#UKzI`SǖVXbTz@j镵2$zb޻zZmzlǡmx5gSP/QJc )BE+‹Xu;(ea&NE❆MMy s]9Yn"lv(.>o#!G*c؎w"j,[&#g. oN:y:DOx3_} 'Ӣjje>W֛f¸@Iz!t'm=3Fky-P,,g۸URV+Rf2V8k?}k(I,H"c!}Y25< q3|%(ֺ-<(IbflJo܏JTiUkU-(j>MQe$VTQֻնMW;7':dz/H4W?.bd~4VÞyR+ -,u[ǪvHkʵo4b^b/9ܞX93 +[0o D:S/W%[!d J]{1w$7x @H[KMyjsMޝWP(|ʡZӖ-%UG<el`iE`i(&;6YJS fYH]Sޡ$ݕ> s8Wj+ O9y 5%9r'_d'{Trlk\hu* (W{SDQZT3NEKfEENs x&jF-.xbIE=D_ ( 5A`-{ᨭU[S5;Amp̏f&I_WG38: &%Ah* QF"O ~m{ YW(.BRv~{i;(9@;_|:( Vn$uIrLX7_{L{3Iaz~Dթ9jg7L7$R?s?pk;-q5 s2Sg_Qeol|LRco*uT&}IH(e%{6 IiI@iT2cSbFnt7ҽ[1י='6b9/j5`Q-Lm#K6 \ !$*)aN075}z6Gi@3. d;wX=,F:;Y ;%ב_}qx'֤3'uX7~{5em f|\5ĖXtZ]iQRcZ aeT6]bDK 7$c}5O/,</~.scxf_jq:gF۬7C;S@IG$UňF)7x#J2̵I ~|;49-JR$XQ`Ji*'Іsޣ(3pX0aq;w{Pxr5/eBy?}ѻY2{b`0|6п>祗~>{3ZVL>|wWZ|O8ywCCCòFj!< ywFKK5 ۷[; .;dn⑽U(Ȧ: ^DKpv=էQ6ИO̔Ѥ1)5PU񎃢M2tƎ@fZ2ύ"'`aD cERP.:Ey<ۉ>(|2@©}1X탗r;-.;SL@F#MΛʖŐ+v}qՕtߐeCVٳl#¿cBGix~D4]?C[􈏁)e #~ic[<|ȕD:tcdOnk{v_G]UZh`Ei35Q$X1@e>}̓&ugtw-O?\P#X4ǜ;nP]ȥ}`HLF*JQ|}w8nqGY/ I_GIQ[#QBPG'%Ox_rs "ȮOf"g'b|?- p}=\opw˟N'g.1a2筀d)ϟܜUstEX"~(w>;krB\X*TVԯ <µq#П%G_8, Xt"p,` 2s&Je%;ty5gX#{5gXVm vtsLiZ E%0MNmBvl}_Uk!uZJYOHW:}qec"w)K}e~u TGupQ%Y]ymp⬪<'BrgNx2s`皧ҰHP%8!9Ir-qZl$Ղ2y8P'(*%)Jv/2T d21 Oi2BqB&ORqSxs TJJqh$:]Fh1H"-rC~RyQQe Jea2tZ- Ysy}OWO^!,Ο/6}wvb= w ۫ ԽPZm` D0ZkeZ`H Lp.H4! NeݯfQS!0f5^Ik] {B5sߍ@m8ycs%5Զ87`E dɛ* RCԥzen#Q JT==p8z)ڌIT*;ŌhGQ4h>,yB8wQ3,~rcWb ✻=\>/:xZߥ]A6ov'|1 LE֔פt;ٰK~)&%ZSv6fc:u TѢ;G7h~!6)nYM-cq04N82|*&ӕ W*`#dhHXtTaDbD۴zߥh$h k&7E98^t1ǿ m!}g2JXo2Ɯ`(V~uOLyӘ߬bը_ A]J-t0)EYvf6B2,PVj,p0~s;6[Euh }tIYIlYmZUO^|mwm,Y/Z!?*If@a/^.OM:|;v뇫d!m7csrOЃGYٌJN`*-C[deጷ1w6e|GK=J,sXrpw嗿b Iɣ[kAJ֟ں:Wsţ9Ǒ&'7NMS(Eݢpu wϴ@c%BKnH#.sҁ 0:.{M6Œ."RIƃyې 6yD*_k}Ҡ~Rk QVLЎD_E@5FΘ9n?.w,FRwoCN-RZƾ=; mЌx;T2a[h/I5JB^)ED!{sHL_Qcàmo;/$ܩO,.ڶ|+opڸ"1}(({ptҟSsԸzA)$8R %^sTl/hc*PP-:mui'w] \ .\ _(\Y nϗ(.ژ$ mDܺ1Aѐ(O]KhĄֽ5ZOG?S.FVER꜠:'FVEc lQM[8*4Gci缕ˋDݫ]|)a]2dVGS=*rJ <-2DP)5 K8ZՔvm$DXLm1sִ#=Rp@Ge&>\t;#MŤǤfp:(Jzm8^)~݇ʹfHpÑR=@2O31_It3Eo'[#;MH29-4PluH1J1BSh ,تBFHpPcn $W:2Χ-ll~/g iۙ@/e ;,vhfsB ϴ /Fip$MaD ̨Dyi*9d'* RY֊.<Lo^<xz{Y_q*ZHQ\2sjxȬ5yp:sGEX:O&w?[_>8nwञ=&gl3 ӎl+(Yi\!3y!̊˗ 0}&&f/0VnYrm3нɋ^%שs}s_o3S+&Yж3’SOs0jr+d I,1/Ի+B,؞D~ |قY"$"[(-1O"OE]Q*ڲy'CjץN e>(g[xc> ts{(Nq^Ҿcs]ɻ1_|ivC>\|ݍ+3HHo7Q8nz3UUA=: ¤|㾸γ<{dwS yyP nS1.ٓIyJjZZV1Xh?6=ϊ}^QI 9#Пe?iI4U4Ywe\l+/)c9 !.Xքq񇍩^G-Q2e@CdT fRg9azK.o[ja~?g#M% ^Uf&'C9&TQP}}/uIij<]%i޼J[)Zxˇv)ecnDeLe'^+}(lQ\Lݯ\8qVCMӮ?i{], ζDlYD%|Җ2HL/е/FܜY>il+{PSjiA)G6[ ^\߹L\;lZ &;9ͫܛURVu+eKc) ?+*11 }GN4%KX2S By}f' BвG3lE?GSjC'{sDhwBtxKF5NU]J[̯U?>m#|٧@ϋږ~W=76˴2gn&gwHrI'>I*nAmi5WazpJPt֯:-_m$aAHUЦ>zARvԧh1賿4Lmݵ8P -O _mcA޾kڳ/ {&R8={0 4\77Y?ﱃ39ޕ-jyse.}i2P`8Nbyr͢#s\@(" IogӻC3Am3)KxC$1bcd#* Qm(Kzaq1/'latT}zq.t9M>zlj= v{WT^vb3h9.F7dž_N\;v~eAҬsˆS/;ݏpfGɯ޼짝Lg[]`Dkێ];m>jmĵmL\%m-;oG 觯(CJi`r=>EHz3hZ_?{=( =0o}R3eLs[0?w3=d> X|y 7CGKOKZ勛~lxLizuicE wO_aNY8W\~wz8M7 3GcZ*7hQvF-S̮F9ڽkW52?Ew}vvs^F&$Lpmx{m;U_8ÂΞ _b^'׭'s=9 Jzs7nMi3C#>(ϿtO#(%`M'@IѲw=^]W0a"4ƫ`C_8Niәz~~ \TӮ}ɀ\0>EAv t~7pytv{p0 T nٝ3S\߄B33)n0/VvYؙyE gm&u/guxA xӱigtvgeiN];o4Q@j]/6~848^J$3"ª;Emwh6 A}wu:{# On.TtZd(ZMƙm^G۷IŃ8^$Kg DCebqc%%VZaXO7`>v\@*RmߕzՊѮtkϱգJ0/yS7u#d;(,wj0o|&aiqVbW$iWTxY_qu-K\[L#S_a!kŢEy1^\ZLǯml,iB(7f ty[ 7䔰%~l-E赘cC-=:[S8 {u+\1%o1]zb[輑m&:BJ{F.%=|ҁȵQxDG3x,,vyFnM}*axfH:"XA`#䇄S{Ln j:2Emw1ti({t+Ό"뙕j kq! xO JwU3&ENx"KDH9#1 KODtd2PRUu<_wKmXW6 pԟжHnGXB|UЅA'~z `p'.BF"dd(gd`{m _QZDiփԎ pɨㄇ0Qb\ZKqV,?O ye |sII5Cz+EΣN}ƺzDK@G>Wtĝ%&U>{(ߗ a`to;g򬢉[ PD9P*uYOkn=Mm!/}LXo4K xr/QJaqGNh=;F1]wiEҡao Zl8͋kX##E6%JZe79??tBkn:h|B:k.NS^h-’[8mE$㱰XB;䤣pQ)bbNt섊]鈺/5Qi4*m?{:K=7A)̖m ,tퟟ. ۱-cM1#0,(*e,p6N3ݴ ,FH 0͈}K虎b5kG!41ɄK92`i{B6mC(jS(DSk26YNeB+(Jxĸ7EZID\C ! 4aL'L-bF<')';lIT"]C|+8p6(̜Ga< 3Qy u@ɘ( ^+v*N;/l,RX"VQl|q%qRI{ *vC?P[<K6P{c1aXXbw!a9DA6e9\r&L %N#!5*\`cQ@6kC5E􉞓B3bxC&`[%1i11 FSKJ0M$ତ!y奀8Dha@oE•I bI+d2#=s#@: DHp(a4Ng)"lQA$WƥnTu< x䐜=ԲOH(dZM5ML,,, \hp 6ޟ6 c37|7,&HүW.qi6:f0|5Np D߮N]& klh+f-.DD3+ Q[ %Tq:$@LkAIebS^.u@=d:IJлR'(8(ZA's!Q*R' 2aT.;kά,wlIPJQL I跉/'=6NzNc.%J]*Q]W) Zoj=as  ǻuJUrj'>F]%~Ar )ٸ[<@`SlB_r-P/RTU. R)ʈr7vABp ^zqAzXpt!1wY&?{W۸dᗻBo 0Ow wp&0G[ZbVKLI$%1s0S5N.a!I4$d.kIIj!*ن[_p°A*!a`DsvlT J}..q?= k ^(t"4͕SG;UaJ5K,7Z!kFքSe;Q:Pjx+ƒ˽qYxMbD on[}@PÎJĭ;7!  :BTD>1,(lg΁SX&αTl 4k"$PW)t:lLL\T4')`f5L !4P&ŴKXf47)E0$]jLZSEFXygdEw^UO8WEK^r+vL]ʇ>^0`} 4cLΦОYz@2"$8 %W:l8՟IkK 4L3tj-QĖs n$WI4߉3֐w3}~o!.2yO$gDlT쵖i&8xG8{$keD'ƬN+Ӊ@HI샄)J޾sj ¥o_jo GBuOZ, ȅ,P'9Ҙ.g;{%/WFވAU|4r8_aRVmN40@]I=f^ۺgl:䑨/U$BXKm2CyfXqYl- ]lT@uI@[&a4R繓DO==縆 Z ,^):.j0"(Vǘ#tR m}.+K%ڦ*W`DQ´:Y5IS6t!#ӒW%*8DEEpT;%D's$9 8'HIvELWH=grK8v-k1qèz84=@՗]3۶!/(8 aih4gq!wc90Eo5K*<Eȃo 9 ;{b'axAa#j`M,xk*+|!CV z!PI i%YgVﯯRUꅨo VPTyo.}t2-57l¿`7ܯ,YW<, {_Ͼ~dQ/X{zO6G}(G^'uTΜ.٣o|r>? MgfCU9K˞]MڠIpդ-9 LP46m(J*˟\kTZfoZEK*,ys"Kd|QaFd*y[sV!*E|N\>ݹ ,I"bXjsSt(s[Hlu7r tS?h* m#hckx)*8{o&Ŝ0FDƘ=ٌ;zr 2*8R\8j6QD*}1}}&|v $>O2cX|N"O'Ob2=-Z_:i~;M?^>lqٸ[|%_nn*4m|hU凋;>yl#z٭4ܞ?~`c/qOɅ*C?,- [&&'*I`J SIfk)639 fR"RM L4:J؏oA_[jۘ+MOi$?foUg[&JA=7ݸju%zhqPZ"O#"2ο݄0CPgn*s&&FNsf` n)3X#P&iH> KRd0a%(fYjt s Ng9<JTX=:FTHRrJe)89Gr*㢆:jMǚ괯ƞ+:ud'b&MxG7):;6ێM- #R*oDB`od3XwiB"0ddOCLCj@ sL:X,O7DPNs5e܏NX(UYzn!⿪xx|HrgU7bARArk<-x˯l68]Z6-RrDۡFYy yFbZ) uuCDȬ4Lys}3$kd a^'M;؃c#eLcý>tߨ6A)B܀a5'|J+SK{Gǩ*Ѧ_-6bG]Zp\ |+k'zY._7D>Vq@wlzYR+"O9#eWUT}*s7jߜ؄ hpf3̬خ*P![G]Z ՜챗Zs.B̭um)RRz %(B % PLƸ8b*3QU'SrdEj('R— Z ]jGL{\Oz! *9<}jcK7_wb%/rO-OI 'LK)If + I6^Z!YXa*o-9m|w^=h9y0/ 2YA`W|cȐ+\ȇY82SwIJ,.a9IE䙖Ne4 w {)<&!SM"j[ \KxĖhخH~X3qQMCFi A"@,\;nboIRX2z?'k~/|x*"ЛVDZ*D7>~%ۜǿq_YrV^1lp?\]2n ϷO7\x|{zO6!OA QIs܅|c]Um yKZfNKA\!WZ+~k8BKVH@%SwW!tlURVtkTfTSG]<ݹ,F|H6펲޴$ΕD +*{sY&D,J"ؠ|JXp7#[ ʆvv큻:v!V(NiB(â %iLni5 lzӋ]ĸ+f(0~}Tsڨ(h ݟĄ#QG/,`OƅC@$Fdot} !3Dh47F297*8ks[76ԲP|+"~ZK[7˿|R04&>>\2qjS2&KɨCӹQ1fS \W.32$Ɨ>v 5B^[jGP.˛@.6|/>6]ne-@G76'yPp$'\v,יS32KE*ss<Ԛ 3 n&:3d{mk 1Vkc}4?6 W2)?۪4Ow4q ?_dzB/@ux;y"ҟ1(h k RGfA/Zڰԓ|#P튎7uUB]㭍-o4HSL 5Jt|9 &sK^9'3*/d cL'E"agaͻx*+6m r7gF$(q(?k)%s&zP-OTfӗoy&!LU|TRnG'\8r^$GЩ,J;KL _5;LJC_!JYnS{™' JL"Rj%7^(ERwiu$v%qE@XΤ]͈Dv57^@L.dfiW}p1VןԡC}U:[k7nrLS"(hA@at/;ͅ@Zݫf\S=hk:aGDŝ.Z*.P'笓 H_mN4굗E20!'XbOH.9$&9D8bȦ@Hk橬gbA,쬌cˌC+qQա(2]/R^FbE-/Rwξ}S#%H3c$n]C(6,9o)w丣+/k+ csxԜAWwJf[;yOalhpʹd-Be]YG+D,3lGޙdcf׆D}hْ=Yv}^?ټ_/XZ{0ABnL&r`f;}'}q8#2,&l7cX>q QN~$RFTw@}=$TJE^+=HGa(DY@J;J)'cE7oFonHQތ4ƻwiȾ59}~> $=M8ӋKpgO"I1QZy)cIhX1ǹ4٣U eXJђ(iBt8N&YAT[RGIB.}n!LO7Eĕr!$CF<_3 ~\$GBV\lUϲU/(I 61l4<$1, ׁOt |W>xnaI'xpTEh0Is5QD} : P.}P.}3 9DkuLlJ#=&$8Gz:?# !+,c+|LV*#  o &-ep9)?>K=?J~add8#?Lrj$LrjɤZRg$χYA0ӘzYtZM!(D;G$UBcF.}P#>|{DyO?>$_f%@e&-f,9P,qk(:)pC˚G郊 :tVj5;jAjHڗ!f=?n7q$z;By29j|"b reu@Ÿ%v8jv$8:g( D)(QO3XaBS@Y ޫ<{Gs,X)ÑբGp#&t6}{v- oTVg92ٶ6V zmumChtj{׈j%>^~z{F1SV?}}6Jӛbz~K՟Ow.c߹쇻+>/pim֖F%!o.(y. ZbDPv6~ .R. ;Y{ʸ3b1`Z jL)J052yTUĠj'‰ _ ynh4Q }YbBbFtfE`^kq~?ίª/K{t ՂvA I-2h/%BF֒}G% D@^W񑬖(5rj>]Ђ(+'9Eiʩ1{)۞ ]hjg'cldq]̞FޭLSn 8%}߻ۇPG4d{*<9څEgq"k ꌁZww|Tu/ 9.x,d\JLy&mDムNje 9Ə8FzL-p8ǬĂԳ!5g6!SmUr%G4N"D (rJ)`:d gȮEm&*stmT޴7DPJ(C~*| EoMRNy6!y>f-`P;#0)0N7kd#X" %oH>cO7g_rsS_nME1h܎xHG]܎3 6`^hx{ bY0c{ x!lDɃ^әԒj#iecCو6ݵ(hyڞNQGB-FIlިQ)&qwK:D|  ;VKJh")˼EB\ϕ@N)VxB0ZWᇢ0JtD^%& $HH" 5*|8GZAfz'm3ro*kya;pc}cjB56+zzj6֧8]\}o7_}Un﫝ePxVzx@+ ;7:Xk_P) ]m:Xpȣc =zYsA˜YS`iI9A+0Bsy㚎|fW3v\ $v5 {*`ۛFPMĎK 4E[!/i=ЪhMA-sFPIhqpk8͌ Zp! pqL8'hqخ8'Lh]qۻyMl&DRV᎔~h5 釼Hh@RtaUM3jŋ\+Iɒpּ{.'yQ4Eq Ⱥ^Ż- ' |kTZO@#@c.AcX[,}R1_e[o6"@ ͮ"w1C}2"z-hFkH {S?Dly\<\'̏/i< /a e; 72Ȣ6ucM?E{Lr{ AnoSsT= #s4]ѷWW{wWl//ua/J{'Os%ԳvNgDG:&/' -9)W/cEQ阢D2u--W\sBX&*Α蘄J:$޻pkΉSH =al8dK-a$s6P9D@U2Z(RyMT>{uFWס Lx|ڻyxG284c׭#+&Tco3G>J+&vI^Qiۢgo(@t^z]Vkhvw]"P;沘~ޤ>ѤdQh1Q(hLT'P R f(ƿs֛Ry+(:"(A 3KZim`z 9,ϥED>dDRJdJ4p6e\lhȶhE3ā,9d=Qx8`&%n3yd˸>l {?q]Wy=˹FjR-IjMUNRB0B)zKE|&DвD`\u':sbYz߷ש--qaDg I 'Q!ZJ ~|R֦^\Ly=\S/eeq0hVUSgO~^GoGs=&۵o=. _lTfֻc>Z좪CIUn{Ϲ'=})}GYjy{ռ[ݺt7}rkGwEBId LJkR`e4#"{{;puϾvLSK}?AJhu_Q|-HQwDf#CUh&me}[c!XY/_rK< j/Y^VrM7!ll`ITY&RZtU+z_uyٗv~C٪)ڤvcEl>ק)Jr,cis~lE8ރyC_:r Mn0nU-F h!AV5"źV#Am n }CԮ}rcvyF/;aC}qd_? A[OlDM,Em4$lh#_!+//( S4!^j)]gXy}porTPk>E*|_ m)\жXz:\il>(l?Wkq lW~RkFҹ89E;dSϡ֋s^>ydC|6@-Xi[˜;E#B_R~3;İB?-m$RH'"}/)ݗA8ؼTwuUw)R9cްJTW_^_}r#]/>TFIgqj*1+gb0)Z,wG*R;Bc\TIbZ5%V o FK2SJzn%*ԛW,~>h'7_b&(&ϼU}=bͥgۛ3އdvy`:0Nw&g RU5Q IDg]PA>xNtxkzLY; .FHv&D)zGKBaӌpKV"㐌l X4t ͌^`L jW0Fya4mLZ*O]*_FxQ(fAt vzEg㚆xnt@D ^{KWKnM׉46&9-WIr3vwSB@M伏Rxp¨H q544:1g;F0BIeSx9& Axy$qrNL)ɺX^±,9MTy[ 4cB^s[IF+3<B./kYzEQ-:KK&i13[p>`e4ZR*(FVU OB^Xe '5"gHb#. #D1l[,:mN^DP:epQBN(cܹ&GPŘhKטg µ3G$b`DL1:FKV e6a{7 3a#D;h UPsa40a "Wr OWYiYd:hpF^8 [P=LU@!'!Ҟ`A1o!`r ;AgB4"o^6E!q WsAf~\ͅ b9W\37maFwԨl4 Sv)ױ ;x"c84┟[睏wlR1I"UdV5V!_^&׎ +>Q/&}9PӨ\i~<2drv>_+^ɝk^ǝO*|"h<')Ǭ[Yua1_]?q֬7}ww2ڊBy3 bP{fnUܓ;Ni[uĶz֋s:/ccWg5W/^˫~qӎĴlPjkѢsm-^} maJd]N bŤŃVQ6  B۷ ξ p=ao5`%? oJ$zW=)9I߶>\bF3 w:V7=Ͼ5:(F)鯑4x[aTxgJ;'ՔkֱZ,5 z/SVi` [킣rIT(puXFkHP_\seJ {!H9ku7!2Cº{3# h$EyOUgB#d !Z-\`ӈpU3akzIydQ41Xx*(1^9$"pL[k#b$=Ĵ0TĪ{܂3Жg!.#/N)cZ#}T,xA^ 2Ѹ(= p \`4! rJQ\X0-ҹl>:@ȶiG_³Mz.HTx'HP Ɂצs:Xj$ ˳"#"qYoORRw%צM9\j@GPAP8-ftKȒ}qvM>] /0z&.3 W`}/K}#uP.s:\߇E7 vX^C𾠚T }t(kit CM(դRjסMkCTPϱTXE (T*ءrucE-Ŕӄ/z4ZlJ(:{KQv잴7諾R;}HE8p?9¬ӒčlaN8Ļ90TOhyrC^{#%\W%#TSMihA3Hj7@Z73[/ƚ$e bޛ4 ?mǣ;_'Ԑ*w.?*ao$}kDTSVFk[|v ל_"Dw :8'K5;(^h0}ƽ[U?+hrwdKy~4◑ģgqȖwR>&r('av6KNmVEdK˹iB-֡IYj)*r%4ng)a9/ :AFP.87DdJ7wnPRWlZ.)u?oQ^瀺&nuo&]}~Vw Ĺ߮HS&ZBW-)ŝ7c1P t#[uTJW4Gm!ry6d{V1W  #[rEŨ/Y뽄|W{!L~|7W8r`׺u;ZK$i_Vl͗'hjn*XpS0n#<1E6q4^ Pgn3z7}G)'dvfw3!ks=ZH3xb79WXJo^cA"`9fްj?VdcH)..ݭ2]SNi|w^#(:&Fx?jR$X1XOۻ< u3q7s{(mPX<ݺk߂!)l?dۯmNlOkԷVKi #F(54Fb+lTk%` |(пB_ގǣ/}UCc^_8eӤxgorVo}헯9edd92L)GoDɥ(ai7#.AT(_!:DIzO rhM2~9R~u] @^Mug3&t)QBL~ʴz+u ] '(g>]<0XRDjCbpsV F{i&Ţȵ҉md>R`b )H1iInֹHYF-f.q]D6ӇD4t_'cfD} /sz޺秳2мÿz+RE훧/J GS>|쉹6>gXQ^_1~UnZcyu~_wo!z?}plI۽W'b.f~|q35hffbñX w^,\*«$Jӻɫ$t h:y&)I^3~)Ɨ7Mn`D?^\z{nI8gkߝ. K_{arfkfV 3HiqEk7=-B )YTƑrB0!>7s' s1rp^;T+-xyFi?wZZr YyҀ Տ@x}j2BtHeHYHo*8AIRoK[Yշ/޺s3jWQu4dl֝+p4JU5Zoeت M;WviqD*YR꒳CkM5:V!r0guuTqN6sχ ao89N/}ܺLnO끣P}pZ [:nvQMY*&{s0>P Al$'ч(RL9qj爔XkQIψs %*"AEqRAhuN3$2*j̄#Nr cHK#ZMI}UzC7"<:8'r)Y/0` #((p4:RF2Shz[2sɤ (Rs%ŝH] 1\H[D;]xn sã5 a ڱ5ÞSrHUy9vnMx%w 9P"sI3/ed=44a[rGHJ!RpBVia4Ĥ:&$H $ . !Rz|8nx&FZj6$TaNƣaKdZjxf@0JeLqՏ߆F:[TG\->ەN?oɷzm> =xힵikfV wD>c\Ǹ 6TP Z c Çmtuy)8x%>`gҰAhG ^>9vf+x/oB/J߱yUzWMgc.|Z 9qmQ1W:,{=ETϭ}yО*2jT.˖4W"muS;GVAꔾu: ўͺ{֭ Utw"s 3h_e}|R ɗOS7!׬3!rfi$|k>dOվX~A gjiug]}-U}-U·sQh D*kjt6[z{pMK!}/zi2G: nBI\qt7[@QB'-Avw7k&WH!w Nͮv./+Y"{|&ٞ3 W )våu3ݮN-|סL3ߢ\=$n DsP-x†@YF9!J r%m۷MTn4 ׳5 yG!jR@w7eFǮgf5 0f)Մ58*\=v^~zf3/SX4ft$M,!֮-͆6(^E-zG.zd0yPd̛ɾ )zǓu^(gOH\CV\.>.DJIt Ĕz'ՋsM,9QpwT_oR;ܦE_yH{۴S==سjuW4l߰(W?qeufK!]y3of7"J,\YRzO&3LsJG?{><@,B#WLX)iQj+sVЫ<.7HY|Q¶&!\/' Ylx1Re켓EsNm '[xM;4 2uɛjDê'"NxCІhtYB'3PJЎQ? UCi Ha?s6y۽m;kQ2Q` `,/??‚%EZ4Cw?v!~> ]x0T]$Hw~wOpḒX!>(5\èz.N <1|"jb+~+]ɝW.w,}ѫ]:Cl9|gܡ/~~L-&XM-;jrc;ӨDe1ъR#̖JXiJ݌|X3 KPGcbPX'V)eZ0BmF+6Ø8[ vxD]דh["Y;@ۗ3Г|{HV]  Nc4Us<ӜoL19)IȹM"Zi[z<Ɨ;8=e{<}X;FO=Eq(Xw9-LHH> /,{dgr{T{"}Y!3hsE)[o|yN f7c`'DOyn'zG,-7B㎨8͘`[7{O =sbxWĜ1!hs'uZ&1j?35@0$8-PC=H@&LiI˄6mw$ D利a"y{=E7y9=:G7sң6SxEjZOaD2l K H!gjYKndm{Yc֔|A u>`8C^@)|<@ Q@9rRHUHYePj9s 3 Dgy"`&CDjQSɩRJːH#ST n R7)}}MLh}own>= W@7sj)y;jMqx4W7"BDO?9d:SnG3+ z5|w|ww>)Lb}u<9ꍘDfHCw|z DPƻ aqmorTI,R#]bw! u¶cp N'*錨 D KJɍ#9u[0T9̟݅@oځND ?чRfͮ 6BD65j|ۆOG} "0ui7dE!DX'rGaWV B^N\ԩEaK]k?t4.s1 a+$t0u(BVޱf֢ߚyQjq_C P#BbgvDnB$T.[K9; 8ϲ&]:$A=kPP;q4їOOtf 6(O[09rbj3Si;ެ8-qr)ՒyBl:^kC_5 ]WX 0D+'@*\JUs6EPorAtT тJɂшSc#!ĘpsbQE(Q^Vn< Lr#orHZ{Ao-&E}cx Ya4q cm"5=vq= g$V)p"#SЕXkPmJYMb̵VV\{3%i7s>ZiYO 4*Hqxm)txIi9͠-l'92sꥻNs:yv o+mN^jSV(&moЩ/_?ֶJ !bHEiM,Y@d} Z+Jo}C ,Fʠ`u KǪ xofs"wg 0B6`b3~Dj\߯;\|Zv!Ou$andKƉBrby(&?ycHY6gk6Mgn؏\W6§؇רAyb,75,튾@= .o:6Eb+M*9TqKہDcJe-rMi,RkzDi7_MR6;epAq;IN3DYiYG@6ƅ"reXӺj`p.%{=k,W7 m n1_K dÆp›Hp<nYk0JG&Yp 1 8.S{1yr\9^\cGN(WyK4o`(< /d g;yKsQ2vHK²nemz[XTf:"Y" *B%Wi0#%FP /Ⱦn>BX9I*ќĘ#!c1* SYgA`Қ!0#:piAUK[xeYۦGp8A\j*냥vSE Ho,!ŕ-ǜ+1v`a iqe)BҘ@^YTrehg>JtJ[eBs-ƷHTW;߽6cxm[ rFr0A=1h˜ SIQ⊀Tt=`e -f^ #fJ(l=AY4C-dQ4cE{cZnZS_k`@^0FG@7`/fCܩ["n5K6V"X9}JW#%-cf\[b<2 -"HO{5GH=Y'^ nt 6|zpƓπ5{zr$al\)A:=b D:LFAJXR7/VԟZ随Y(6Quv| S,]pLv0E"m{ AjHt Ĵ B]r+NxIym4CxTe+N^qoǔ A5BC]8x](E<";;M$cD r;4SR{e[&HZ}4h(a|#Rىl id[8ɺ񟮯~n͛]!i4!gHmUx~Kwm͍XHU~HTj']tv ֌,yuqgj,) RЎe888gq3:+~ݬj@ޭWj5QEWCgH]ƃg} $OY~ğZt3ۦqPcqidq-*2^˿}$z)$ oxqBV9' O?*@ns_9*icnZԑ_7B.*D<]R=ԉ  w 鄒t;ndQNa괛A^Tyr=)oôI *5΂ppY}qmOv:xu'|=]qW?vwCs/9!R6LYqpb)s)-mv]mY+fF>hNWNfJoЂQS^#̈́E/N=)>srР .Xe ztG; f8nF=M">X}}Ø38x FsCvA~6wV{|)$-QX*nWM2vMckb〫Upv7YlmORKە>vMw1߀uPyمpgUE)u^B, BW;.+b1{iFeʃ.,GןJ{ɠ`ˣR]ƽ٤6nY$ߘh= b1:s2qA`hVX,F'jSڮ֌fObt(c5Yiobt(f^Y:~6AIρ 9> ` )s/ p+i='|qGgW֮58SfPZSQjׇ#@x}&8EpVg$ۄ` p %áR`9_ \tzQ]p\}>fk}}vjp*@MH$pSVA`KL 8 ʟ4NwD!"4 G$ + m2Hɘ  q0] L" P)`'c X"F {g !"$.pʙH@Q@9 f%T yġ(nʋb\5M CXc)} Vїw֕>[TQHݔrT*ΩK-!!WQ!8BcEZB?&g @Ӏ((Zc$ohU1۪7v8X׋H+^_["j=+͌a^_oye?z!77>ݓ D=YpM9ojY7z՟ wjh4]wEs q=|@[ + j^ӯ\G'T=l12{ck?(m,eW)W2 Pd#_үBȉ(WN;T@W"+Ҫ]v.X1uֻ,1qF݉YMr_@E.HTع9$Ct\ F'ĉ;5E/ + rl$(T 1 iCt5l8.Y&"c$ $;R@ktمAwF ;kGwbg SRҎ=1 )`qHOƽj\>ԇp؟AհJho9P% νQDdThعo}1,)с''@_="ߒ$Sm^(c&~5K-ܟgRA? qK:h\$x86ɜ6oNn-w'ZZLox1~Nj C( ꄌ(IF^$* %#oQ]vq?=Ik{`[Qp,ypO.>д$ y/?бB y.?k qYk/SX%6Dq^`#u<7n`Ah(dR^+T#Oq<|_P([bL>CLXK_,=i 鋙hbɼ),}yA>>׼؂sF2y&Uዕ}p&@À_L Zo\/<ČRѴ^,2)Ci bR[ X+Pҟ{L,E$c8=" ;;ȹAJw `߷  te5IB-J2ҿ?;303.?065e{>"+,`"BD.e, J LB3@J?R߀.Zӓ,~$>pmUm}jgxU`E:Y}LbknA+ Ue3=ڄZs Z=rr P^ VҟCFfweѷ`.N=RWwƌHvl.[`4q0_<~I,Mcr=z WIxiLVq 6mwZvy~D( p @!X!#c("CT(,cc`]JI2^OD^j=`-"5oCg0|ʹ?yl,Q/Cڜ_[Av p[n]i> t9e"%I#y=)gLLHisLN2ۜ4o>~ZVp:y٢XVHcC[I@X_}:8] !IWl`k$mYuwŴIya繶AkpŐ;; 5%N]j"Wa$h r DȓШYzX'@#z4aMK\`BۭA`2CF'&l^}I 4yR 1қI$yhboQtpRᤖY;&=D)`k fdqz%Me<5?&X]#`3'xFT5ؤ8O?ıMRFl14Ej$XOWL83uK%9h.7XXQiC u`mK$" 68$qn}J7 *Pg AT+L $,k9aUAsG$NlcC$趴h! D$!WXRlGN$zȒXlItU vfӽ#{=|^Il~bPOC* N>,$-Clب(Z+A Ebh:K_*.iא35ulP.%v[MH-]=l .Hc[C' 4Ĕi&t: f_  I#= Zš"[ޜݔ~G_MS ݥ`*q G<9 TƘE G,0Xqa,q[F,'fJ |=ܝd%ʝ Sa != U8qI"aDz kd TQE!2IA's*Ԫ,(ݸ,)(ʽiuf;P2 +ldtPvB?fAL a%B F?{(1g?!!D%VJ)ը-. g)%bIlbMb>̴a3w4`EyĭQEULwƤ]gV{N؝o&K]oo>nș}/n* L/}Zh۴ծKS{s{ 5ΝfjJ`nդ" GqȈHx0 qܛCOoi,Rtr93_O+ ^Lz8SOH:uDVx%@aSuujmETI6|w`.Bhs:[.C䇌<'k[l@|6G,e $ZӯDhM?Y5p[t̕t2p `//AlN.aڲqa'$Ani\+r!lmiN:Q$h8eC0򗉩"K0ʑiÔ\7J(?i"%CKap9Fa.p)q rZ{u %陏}?(>88VY"ؾw-Sqf弝<`[?/26&zEoess=z WIxi`j>aԲ]Yo9+_2`ӬXngI2mJ!=>dUIJIV^6lJF3a0=dLs3x'Ʊ 6"9"jVCztᆮҹeRPr<DKѦACNP:DXXZsk֠^C QMR/! o`Q񠴪Qԅ-D57 _JQ ŔL?c1%BNf"*Dgk=N'wW[?!WY@wYfiUwIf> i?YP "2jOwfQw%fWf)>[ K0ԃV <0kRI C:5wVjҭE1IgJ/7_/}|\wծrP.K%Ei1ZKqEv9>8Yؕ$} |E&1qHꠀh,DVM,K&u8gcO՗Quq>. PUׯ;l0g+b"E!)HjC>6lyiS"p@ / mndªD`r&"j$c# Tv=mD'-MrNnb1QJZ򩎁Bm\ؓ<}9pyUNDdbOx;hSMz"OK2*Z>ΖRV_蛱S`|ū+r1ҳɨ/:Ts}:'}rps_պƑ4И A-\ )׎P-@-ց3HesSo3ȟWcYI1]XD,_E§~9Pun|,FFZNR2NZm02L ԜxgT]aȤ M@b?@xz*φ-nõa^͆Zajo.fxȂ6F?u9 E5:p.s&F9Ol=K %%zPr2 !X˂e$z>c㚗 'rb]m=i}g dG٢mG 6z@vכֿ~F3.T~>zR 9%$w%קH"S-ɼ&^^+Ҷumn]hP+t@f8%,M.Or=\H~{0>'u5VQFNps^Xd:@Iozmz[Ћ @Yhq C+wptfNjhxvMS*6p #="Sìt<$.v3=]|t>A~ 1ԃG츭fZm>WW]vv^6G7esp@)w˖o[Ew]Lm:^u/+Dh}̂-2z,JCK4(g|0ke) qYVNZޠ)]@<;ɣ & _^6X"5 mbkAgbR5F/c)s R?q",ӵC ڏg<E)@+?tPt}mݙ8i1e!tDE>B% IY-ip>j;I -&oaPm_۝\z!Ǭu\U52V鹬9ZiPnܾx@+̻㡵' 5Gc#=/ ڜ2s,H)6Ry I ?Ph*EhM6ɑ$lVrՎA阓 t2$ mlZHđgňi1:pFӁ9Dsd( ,B 0ߞ2CO H!G]3)Lip+Xlb0/#}yzi@9u9D&:'R\84Tn8f(*uN$L \2-A{=Z92ED=%nRe7O͗ @Ӟzc᱙D k0JMYFZD8ˣP1s/Ě)߾^?|<:NT/"c\f~t|xW~h}4CRE:(BES!^VMVi~L T3fNރ0jRk>J$q[G0KeyV_$_7B$˵ŧcEhfR=3VR/_-ڇεJ0_!!1U0~<7`PolXPkfԻCA B@9'32e#2%k7Z5u=eQj%F{@ciuȥ:RQ)[uشø 7Zdnn b+5++$plqKcjz{ '#JiAGh!L.cZbKW#̏k]^%ӊ׮biPP2h[NVdfwod ڴӂ6T}wg ʨ$cwN>9A;{pZ["k}LZOPw:ڃt)08a2!w؀j!U Cř>dgg&P$&ᕀ_[Քv1 g XGBk@A-\ )׎P-@-ց3HeՀ8Q /-lԌ1I'BD&"5l~%ӷ9-":cGcº WQX?\qцۭ S\\k ~~w|>trm% ]ŗ{8VX6Sx{"ٵCh_X6_3ӷ"2N<ӳ0?Ϧۼ̻R)?sqO=Ļh>P0-? -j(KqEvvCCtB#-M#*Rә-045bs\7t Z'w&wc/l<.V(}ה %#Rꗻt%z`JWaMUm ;)+ m<_MFv&`=PRђi$Y{|(d:qRwGsj#_HQ鎅pkިDgIާ73o]NLwp3s3Q*$x; Q K'vl%a|lyhXq?) ?>uYiTUM@iOO^>yu>x}8PaH"&Z3C HT5چ:B 1wi%?E}6Cyu1^C.&umbqa'XΉg|q:7wgy*M2Gd`EEa uMc zCc J"V4ZicC͕#,H.:.3MZymI]I3( Zd$ީ#'%D"Rywв㚗j NBRP7D6J7l I6CNt\<[`##6TLӟE"QJRB$}*uq $𩺌'٩˽iXS 1ѴH:d/HؼC¿]"l߾Z-[fL˳H=8b3%$KxA$@ࡳoGZc8]E1N+IB#H&{~if-G{J Y>G2RITEGY!?\8vkxh!/~y~ن6 6) !|Z*,r,vM)6ߤ>%#z^׾yN7QrB >6 NDYb߻ų;H&ܬ384ijbr.JyYuN n#6֌fHx}6T1X oyy.jro$MgCA!|Ov(hJ =rW&=.X̺}2NxMy܃mQ6gU TPp:h5 ߛZS N<]UD)cOS+1[%?;߶؟?Qp8{$h?RN:r%LWPedGۍO5샾#&(kMB!9__oGBN[$nY8)#fCG0hr_49?>$w)fd00&Z\Ü6ϛm)aH,QA%!A*Y3 q,s@":X%8L|SR 8 <$,@s넠R9EQtZӠA| *~LtI#u$Qn(ЋukaCS]HXz.<ƭrA& ]Tk{pƸBYBt90hT Gmː6Tw-+]ZIH5QNw!atruWu'9j=:I(jLW $zaZD ՛2d|N(ƤRKy+' 1RJ[*]m$VhyZg&r뀐B,9' ]@ bK:nt);tVz8R5F37׍{F_r,)7wc, >,';WlvKjlCD'حfWzo'we/ڃh1DjK"J)0dawޞ.쯧y/~Ko;6åF*%EpemJCJ_&u7J#˘ltI>`Z#A@5뀞BϞ#ʅ"o?/QOnᅠ]'QbT6iE"MrQ6C{޽}xܼzӛXk^:?ee5;_'K~o9En~dWWHiqV}iK?s77|&i(=}/Hv&P .~*ƗN 'syUvjWڎ"LuuW[#et0hwE?g14>\"x7\Ibʇ )C[̽Gфdɳ_{{ϛf?'HMb_>E`̓._"[x`kߝps>v}賓W?\,~eƧ=Yw{-N֣osi}p ݼ( [W@ug^ ˧yUR7O~)f2Ƅ!GنVѵD{V\.&|9a)!.n! 9 8# e h>QBA P[UWӣ+f2Skٴ L;KۭƓۣ ƶL!r2%mnme`ãs^Xh Yhj# §l{zfL{|lnq_&|'oBA0=ǃ#xfS0ax7p#V_Eqpo\@%'9(%p/=&PNj*0Źhb#- uN`+bP CQ #:f:-#BxK8&Y`c G*_9^~TAۅj.Ӵ zšܶU-7U|$x4 4D!\ 6N!D\/朧Ľdtaӈ Ϯz PJFOz-]K]ϵB\\ Hg r@X.SKbHA j ML),Gp%XC#@:" Z5MTC^ͺP[޾];wyŚcwav)N< э^jddZkc`xh$_XTlbKW ^5Ll5٥6ޮ۠/_>?}EEGcz0TCGfss{.}^gHiu1_\ɢ6MΛх/?\A1vn]nziOz jbX'6Ԣٰ |J#2w͞Vos+ո?|R4M8Y 积ػWWrWNIQJػO壮Zx02#"$|AS'Ir9hY PIsi=s n*!]4YkcD L3A"ҩe&IEefM;g&TB~~k6:;P))'r@*WFlEv^dyQvT1zTOH8x.$90 .dQ '&xf1uy]uYIBAT/UP.o ddh L2mt+;>nc!ƋAŠAĉPL yg)̓!Dn\kB^bzA1 )S|P ݖnΔ񌢇k˦c TCHL*Tlɢx:OB|Ht}/P̔fXWl$Ra|}L2q8JZeX/eYҹ2ZQEop! *nH@ō ~4%0ԗmMI, U4 7I28Dx^*$ ̺BqCTxd7(NiHB4&Q84DzI66qSy$hZ kӖrĆ%y!bXs!DP4i. Dt woqE\6O[N$o)8.ɼd~F̌Fr}YL3 x`w&=ձ&.R5>ۋ]+Y~m"]){|X;ȡ1@3y"4JMr_Lj/5BxyzZ[>, ܗf~켯^#i?:c&Denw_ezztm&.sm88# Хl0y?V/th5[v( 5Rܣ}IQ+tU/hQa#;zM@0uߎ(fzFkGAS63rʥjѱZّsJްZ Qث)S)y-=tE^{y]wQ_{^M\H Uɣ0XIe| J3a-uNkb:Jh$94D%^z㽧I.Er^q3rQ~c6ZLҨgBR;3V4Oxtd=(mY`B[RZp B&^bO$Lu;Љ{vF:X"^ʦhQJ%**!MhIRzP?Ƣ,pP%KB0NڢU37I 8t?Uұ(R):ZRhx ˜6 F+QPEY1& Z( R ÙO[%>r˜+/G*~,XVckYmEb+UՐ]-א]<=:諆l J܎Hc]HpZݫIpcn,d\#hY &C*ʎ {(:>( /[)@aXzkZieT5$P:D3iVSӹ|@u8=[-c}2`48ji%r'9)s &D0E2RAGEd&qJes-"D"Kg !gkmF/wm>i2Y,6w /9l2d[lVKjYl_=3VYUd ߉l|@C!֑\Lk(j86QhWz y7qADN&,|L?Z YiMMY+)aH5Mm_0ނ~ <$E^|Ʃg.ϤsEDBe\ТD(_H#v긴?pDEɘƧtƻ>W(EqiSdd<'k'suV2Jic@75T4W'LJ2k[oU#I)iߗ5Q$7B ws揟 lz[ `BCc[xQWżVk4gdB7)b[?17?5 ם)b*u~Y=ހFg Cng%ؔzԶI n!Iå/)˧g`3]f|Xs$ȾL[մXxTVL8;[#?L}lK']b")H)mCf/jFt'VյR'ngrldrR@h"W T4+W(K--Z%咔f'  0,9 uXBOsQ_o_2k4?5C>#Įo"zry":{aR(\>s7\\-f?]/}ǿ*}qu ^䷳m>Qsv::nCk[y^_.S@kkxw$ϾCwy@}KYB9w3sAL5Gq*l|ba9s6k܌4Ǔ= ߜlwy`˨`KxjTv4E@%3kƟG!g)C%;1ݛR}\ V+Q\F})eyY+m1K>~Q<{o3u!ՀeP_9˧f2y ӡrt7y}R-O¯ # Q˄çg(7lv\_)x'}ɍ:f@'vmu me[%kS@ゟ"h) 4yVHxh^Zg)qP\yMn R#Ɩr0*JM,pU=.Oڭ9N+1?O_Aɵ;A"dp<ոPjPRF>w;m5ħmQ8*ճW-+1 }*=Իl~o7a쿿,ҚK`{\f;kMS2|KsZ1y/ hSW~YzzsNzϕE\to)=knQc2ե# y"$SYSjMj5햊A褎F Oi@-Ru!!o\Dd EwHavwL"͉D(RVnmB+-zadz5PtRJ5ʎ9iHQ}4:3 &1CQюfh%3⊛5n^WB|Y[]mYSQ(Zoځj'(n Ю `豈Ya`iNˆwH$6 iN?5q2H5kTjw9b!U ׉`G(k4R@Hh2i>(3^;!>M6{!:!dͣ)g{xC㻥KanHEh4t"pttzU"ev 2EU O}ߥ-1e'LVQE⮷JD=BѴczExE^1iF@"WY?TA |uyx?2jx,?-m˳_Mq"$.FX}%S>|e< eq T&R 2F0/żxf"жz$ SVS5ޤh{Ƙ.*ņ:ӫ%*zz8vhI;@)ji}0F>(VuCn:CjX0-1N0 ÊFg9ՍEU¡="OY21$WNlډP/|J5'0sWxMcy,4"TN47 K"/$ ,BR$];&UK5nT%)>':8)K 1 JeGr@Xo,-i}'Ԕ\ZҵVGvɂs+ 2BKC<7&o)ڪe+%V >ݴ@/^h3&{\J C'y!$iC",i.r 7Ttޡ <k-LsZq'VU/2)z_w?31A{0zD&ITu?mSDوS1)D:F#n_HTtZKR7.)2$?N%; TVz#ێ6@juLAxj'--w;k3Z Wik. L%[Nc<1KbQh Zyr% 6Y+=t$ISQtІt֞k\tsAc`MīAaܮQ}m ZƇ1 zkP H9!nZdPDz}Ax[&<ܡںZusp6{Ŧ/\_{5)?&+Μ'eYn Zk_VPT똗z'P 9Or]gQEqfmOk46MCS8:I f_:u6G'O˘Z wnW>VT[Dyr#ʼW<S ͔օ턙lQv8vTrpG48`-;D?TKJ!F1` H2PϮ9!AhwzAH3o9@n:5cfA}}e}ԆO[qN={n^FLfβ~W&n@>K|`߼MewxeCXO(o`#Qf1UKv#Ż QS ގep>p9g=ǩ~䵎d'319RaHwD_ [dJvmk)f<' `k׫Rm :ȬZrUg@ufR<2j  ;V@`+)rn;oLh1VRa&wrKU; 'p U0W\ @]9/4%%(,U7Pߒ :#9R:P~@e'S”4^ ZTV"Z;vQ)*wIࡅ5'Zxp]js T-ΩT@Erxl1g$Kb MM}{sUd\Qx%ma\2 _LB3 0IiT'=0FN{B<_waYy$T77V}FWOe w/YuOOx^qrG?3Ɍl=M5?3Ŋ?ޢpyҬ7#C;UBǻ;|>D[`/>_U8F=ΖNw|E0ȮWCBj6VaЊ*fZ?FEHBn3 ^Pj|)hŷD8\(.@u8KcްqW{i1oHMEn R Jz猶Dk9'^;q Ȏi$xtL}El< ٬,4/<[ZxZw{Kö|g! .ՠ. MvWD 炍0a pP |G*w"AI+bݗ#$c1 Rv%Dw$){-h S2{7ۑH,5 g yN?]vq[lB6fDG::^ DIx"k.}Ki.~|"<>]c1S}SD!P{x],q?*#n1zpqA9ꜜS>hRbVDfx{y^#;<*4k6'{(yZpWbOaKu7BeCu~;ԎrC&C9Dd@ћST VcXjV{>T*&*,El6 FTJp:A[iN1.%ΆUMe+uj$+ ED I4`ΉqnCN ` :若On|M۽tH.+Aӣ [ѱ;ێv/ӡ@᰹kE( R!}DHd:i a;cjEY>;^ZE0BWu'W}|A?)6;Pm4Iǁ- Rb={JaPsJ\KD(Dya҈0J\)4 Z@C@ZS-֐@p^Kg v0'n:s7ߓ+(X@@ˠLP0} 1RCI2L!1W[%PNbuRvE)(Q@} P\W_p8` nu8ZIUH̗o} U D!@9ZhlZÍ$cb〘a5 6졗' [p͂P1gb(RAS^λ*Ph F~UM1\sQITg|V SLwj9IW `7mc"ծGw+I lVeBVzݚ3%Ű{է^t>β%Ƚ@wRzvP(ˋĤI6|8{[Ώ3e\^]}JƑ6& j4rc)bˡ='%B;y$@{'&9/XY1e=N܀gn]9)RGނG;m~OY!T_Of?\]=n;nd|G_~s󫺸Of[ky'[O C#%op]v2,6kTʼn(oFavx7IuG;fH2B'n- &߶GgA)H>wX '{xf-0& GNu!OWP/oo ߫'?zWݷH xn>A{AlF}IY| GD avND!8D0N'Q*+]~HN$i}hu$8$gfN6_ q71CDI B+(-f<&#`A blJyKJJ 0N`Jb< g߆FI=FhfQPT2\2C4OV9 r[GclH+ऒ8`t!h=u&ZlRw=r@!` ɱ8 n=0lfa¦VkԖ G1C 9efPQII h@1X!Td =h4Nqoo3TipחU,,˭u{{퍮7[fofu.׍ wƯV,?1 (փ\}F-yix#oi̿h%;7;%z~~PGu86騢ɨNYk[VhE4䅫hNE[&h2tXjS"ݻ!;ےyTkϿ?K_O6z:}\`|XVOA1AL{(SȢ!ʈ1 @ETc*ZnG|ݧ8Ҍ*(,Gl:  ߋ/1Ohz?nEyd59sas5k8N8ubHf#ׁ(Mu;ec܅ի(}6, 7wVMboɇ*V.CU,ױH4շqRsrWKs.8ӻq2 ԟKޭ#_ ـ#`\Yicaw`BGHh(7sK>oGR$ "6Bҹ}zKP gj4@Wp(/#_F lo`yn@h} 뷼NP>. Vgehq7>ONyO}M=!}t!4Bh3Ѭ!~.i>ZsL;FhE6>gAowuqSC+$]C{tw- scq\.AѷKA! i?iwe ? >4b @!-bB?IBAvYTGRݸ+/X|Y؉J~YtЬ>$u!Bh{(9("ʠi. L<[0rrE2猖E ,,7P%-ipÒijK&3 rc"5 CD 8jOcˋEE =7|m}J_%ѵo*gL5bnb@ezy΀'@RǙ?|UՏ|+^z :領&4Ay>bKSH.) p:sr-/ϿPz>>N| d:)"9¹I@H\Nh(j.R N9˜2&`/o'FKܖN(2\hSB!?Y <%eF[ dw,`EfRKigS0lhbǢΫr7sly9YAί OjR@qrZj##(2NׄzեP'R<-CE)z:; a/5f V Uƙ'8Kd76Setrz[Tz‚9x{aUS`}W̼uż|juЙ)oxjUFܶәOTSް40?~&3?~Ek%*JT/.6v5v曙?35&VsgxaM0(X!]Y@DHlWR~壄bUҭׯ!jd`4e  w%֌B?K%Pb{Uw}7 : +C5m: )i-Xye:ncc5/6` [B;%@̝@$x{:_jg $E@Z`̔@K!Ju[wY }WK8JofBFw7!jJ9_zk?+~?!j[==3wx?S?T 獧 ٻ_E,WW?{ x7W],W|f7pZ΅Z,_zOwWW `lL ߞ]B Ɯ]_q@Fh?orz9I1_zX7{wbآ&}'ţ(vqۓh'c- 'w{/?EWuyU]SxbtUī`pKPM+a$<Ҡetp8 ^\\_^0$-`{]BJSX,yQJd1}P$'X5 ֦ۧ_ 6S= mLrPDn8"}=Nrm%=#Z}ZwS)g{=N={0{do~zpmcxo3~˥mG]m۰EbCrOqt 6xpJE%z{Le@l KĄ@{Í&D[A$($Dx}Id- ViTH_@Cz r$7[s]p}s:;JAc|u ʀ6Ë6~F@sHH~{$oj,$?lLRŽ A(o7aJFHsDjK-Vḭ*f$D^tb|ԍ~,|GҤR$f^2JK7wY1 %lȂ@N鸦褯H́s5YpQwDXİV V;+Έ$K &xVXFTJ*0S5gf8h0b \ R{Ѧ툓+cW6wL,Aۅp)u˞t3~Rv=}p>Gldr_ 0@KJVXǡB4BGFpQ`I_%r꛻2`/[e{-E=r@g.W(_:g3{ثb;@]ե,JQ2|/Ogxw9Ĩ$W~_~4@GiиH ҕR7Gbw8hsc)\A0  nHCBt )QAkD : (NHBM:\݅Z@@Bmdp_-핺D1Lw[{b {ɹlf/y4ϩvK?KKBC^fTt:՝-&)u EurhcFfR@6uhuBC^SI{L@^IREQA a*Z04x5۴ k&aATAwnV¶9rol=^Zx,J[b0Dh!7RVjδlKslj=E.ڔQ(Iڵڥ={f kw7ɢm浘 ?@d3a!\g1iN=?B9q/^Wx~]Ilh`B12=Sd|6Ϻvnl6~{s7~V76Ϟe[h|#=~P9|r^@0J3vbZ[ÛAO̟>u"m/KĚ*2&2IR(eP(jkvr 7;gٵ}L* Zbz 91x!R"F'S#+ V9< idZv바Ǎ ͮ~1 UBbAiIpӑ.KDFf!#m(X`0!QqF6L՛]^Jri$"^]Faq6*%`\ZfVc֞Y8; _OoL}XmxO wtӢvO(6}G`m‰Z[fG}&J7qQA['{TbSB!$H#6V dKڛ^~i<nӺϟz=#nZxߋsxӏ[e 'Բlڽ ( x0IR0!I\qKVej7N]ASkݵ)_zl|g.iAGwɸg 0 I|A`Zn]>?]vtZ\ծYX3عBZr/w sn+?v<:"قYHjGSnFx8uyt}D%6mzT,)P0믁]9}uvF s 1wa* 0 h>6(ɇRw Ӏǁ/dU$;kii8HQNLz >; Q^ll8]V./GO0G弬2Kbfع9UX4 0kO>ٔ2FXy$~Q#f^ΰU'ڕ'5?Ê}mQպuH]mK3Kݮn\Av-()R[^nUHw.^2jBf٣t&blƟr&鵝P5_3_Q5P-iOt&(ĮODOh*gH] @ ~l 9hsF % N?* St}~?* L8  JK PLZ^)n?ʙ@%d X&Kd&;wr`&65Hjd&5yGRN"MI>K:5B0QY {S o8ӬFuQM9u?]{Q{)gp[37}xn)Q}#D p fYn<0Dx(R'I!7ʘ&!mwjv;myA@Q7ūOr{WμxP$tXG85\ xlN/3n8Y;HƐ\%G[,Kii8ytIcj@h5Y_lIG}A02r8Gw^PfcQsQ@x۫;יyŤo93KDEᒡzbK*Ӫ[4s}/7߉ 88`1 %3PE_D0s_WjyRScbNx7NCwR76A%') t| ,t1wfP/.)Ic,XE\q~m`aTϛ8)Y~D)~K;_)<fNY GMɛHJ)!Kw0VJKؚ%Dʜq{pKA}t c^P8B]D16"Єpa1EV2 _cmKUH*a0+}η#{p-Dڬe<.fS&YP30Z-m+䐋"DԆXk5>Q;A)5vI/(ݮFTPĄ3.ЕwFi.YEBbbfBG$-]m3|ӂ)$yt|l,?|^$X݊#_>,D Rwy;4mvbG{4m~>}mLg } ۆ8Hҫ~;p:iqAwF ~#6 |֠FZww4VlP%E b|%IttOʜ!|k#Z3ɂR# @ NztxYx'98%lw? xw"~Spl ,maؖUCNYVt>wb4{;w%CVݸݸ7\dә3@ ~Vdp,<KK넂pv2}㉼ G^D^u(YIr-.a9YEl= hnfkjEA"a^pQl \Y'9k$J G2y!?ޚ!)じ`VѰ~meg4/C3 %cGi:QʧWq1:bGiѰ\aSs{J'HgW]ըrjڏA_Q#@U6dE3ꒆx\z^fS  E$׸a# l1tE`;U qr$Mrg.7sR12vk|C9?Sd{ ݋Mm)(Ob?G/ȭA3q.Qb=sZx괇Am1Ή`a8F(70xJ69e~({uMQ \sƣ kr!:GJc15;)ddδp.79f!ꤢ݁LBC#!|JoDT1[{skae潤Ӆ4Atܺ@g aq{ؾ~9>y0Y={SlUS0Ƃ(`) (R_j0Z[[`,x49l.ԖRaR/PeD"s4&=ei$&GB lUѷ<?sC #Armnn.DL̜1^ kl"L 0/zj0EfpX V'#KH8VƵ!3=6β' ˞TK);#n"J@u Ф );iz ;hN1z XOùdK:;>Of'ô&t[:([w|xLwT2?hrBBQ(M")oקMi(eKw7Ƕq6l]eK4(Bb >Ip<[4ńNK[dK ~%U(9L|p#p(Q#MzEj▆Djv0,<Ӗ\cDXr1/q%L:3Xΰ{`:UI[Z@ak#d_tӕ$5%[dw"XGMT]1w*shbR' þT.uP3IapB_*ڕ P#ۃ$2. r.V9[:] D#feTE4$ Ḧ́5ڨ(nx?/ky?O>lG#V+h|<.%XJ ~AZ.[p ȾAPꡊ4]<5!ΟvND[i7pVnH/9ހX)=%m֪v R)y˚sZ=?߬dpJ$&5jyVfPl<=>1m1:)WVbeoA@:۲%@l( B "0YF8w"k-(Q8c5;9IEv o/5AXT/\.ijLϱ6\2|%&β^n%EԚ"L dCg ,A skfLO5*k|h t෨D;'WKZ^Wo "z$OP!MD_/]v蒁{ c*RN^V=I-%eH(22%e0GUF3J{Np!T;Ը^TxGJt"%01x!u4]C{+5DҨ ]Z+K,,Wpm-&-G ;]6eV]1#³)߬ x6֒ //ܲe\Mg'Qj@(嶒a\+t=>yC.$|{k[ΟaJQrm~XWnI6%wmR116xw@[rGc[hMUF-$AjhV_)HfLi2\uf&p))dަ5I['C,-3O=\X)Ѷhp+:5̅zH:J [rc*mg Zu$ԍ b ]cp"vzZ[q_A|ܶw2qJB=nj#RJȈT b6ƋbABjz.=)r=ׅ|&dSM>n hDA餶ĻNp-ޭ MM &bq-H9+ )L1LUm2S`*qiȷBAj~]n$N7-o9q捓CfS1#`Nz˞EE1SDI<@c>QF]j32~(~ZҾ4t@߿_DQHGBQJ9Yq<4ѠiJR˭OMaF| CL_3G?X Jp9eZL(mԴq )~j݌QiU=*ZoQdtPf'[ƒͩo!oG2XOx;9[^P'(ۢvR :->ʏQ[)zQ]`y[89HPFGvQ1oQ99lE`-X(@w?mUDwIإSJ]WMDcVO.SZ/)pWEPDjG? ]! |a?gnx ?uwhVhr U13 U7n &xڼ.X܅P{n=H0JQV7#$Hp&SL=ww^Sykicuл Cl~XܔZݺM"&x;V }dMp7U_b'4t,H})U}57!\-t!c8$+Nmp \"&:RK ua&gPтzsFrER@Kxx/k]^>p*V!?/R}f nT Ǩmƀ'U0,aPN0 Pjb`-1Qsk9DSa|axM"x3x~.<."z,gD0|๟gs7Ίyc|u{}q)_\զ0;Fڴ}ypǝ~1o뼌켼dWiǹ^siaPP V-oI2L4L}7{ Մ pђ l -9^sQXafau^;S- I x\J*K (O7zZ*`2M= ӳimX O}פ1)7_TXxpW[DDpVݨ=f oF?Ac*BJ^zR\<7xǣ/* JB5TGaPbkm/v$0jHjl;[4,T$"mG]JSؚ3ao.f/Bm=pKPߚ ,;8Q260a.Lڝf" 1i, J;yw_ 0y3މ$Y#-kj67eg < m,B -kc&,4X>,J#ɉDӑ/@I[,Hd*}E$K+g2&~Ȍ#EvcT#E5z#hH #t;H 3?cg0ώ@XjY},lh$&&='O]8*1dE!Mpa2]Ys#7+~7ᇍi{b^v>ZDioBUXb`QjKaS"YT(jq|dV^Jכk[1g2䭠(25-d!&aDs ? _zb{lAyM9t.[SZ-PpcnA眇 6Bdᘉ|ύ[97"O.M.#⍕Ay l e cB#^?%( 6jILee`T}|qCFHPgThoߟ֝wn.#GX^҃ Ԕ_b7%ajL5ҠB+`%,3'T6k]Uì>[cP l`|L WjJ}TAZh2_WhZJg7FD5T#D9QZM)jLRE3ˢ7j4> T?:RȌmmy5QD4%-4#(RbQj_P0 6xkS6i1K+<8"1DtB@2ҜZ=фxFX0W*w'V, 'U?ufwH-qHZ"{mpGa+TI!-H4!xȎ{'R> f?Blg@zT .dMI܆[QHY6\ݫ%^rFHFWO?eJ%?y=ې] P}XN~@ϱ3 _Ǵ:s狫.`*1p? oAjBvv핽7 Mܮn4&jMnxTD$ ȴZ%bdmX|}a$?Zr?poy h(!Yvb%@g׼hܡ2Qs%):AV(j7[VQ|4yBKo0e:Y62 佣Cs0q~YIL]HXd-5CBE"LTӈ EU .g)3$Yd{b-р"=,.0Y%@': hmٻz g]3*΍I90-;g)d 7+UcTb^Nh`r|#@eŕm3@w?ЅZc$aOVݨg)P:~jˀF5E)1.T[;fز5z93Ti&r Ⱥ2pW9`J(#$S$#p}QU9gFCJJ96qsR#&rd tBkeaT;}, fR9H_fWp/;NkZVi^^ֵå`ե4&6Cxn([]ޢ]\6mG581i)P%9?#BFX-sWw7d'V͚V(qz>փI9a@@NMŜhA+}a# z@G$ywaPC1o6,/n5CMR5kFlJwXGppǾ8\?\5hU.E@2oMS)1cSCk#m9l̈7w" szdo JC쨏?O7gw|'9,-Sa8, \Ŭg#/]&Xntx$Of󹷝!8 sa7螰HlB .$T`*y պoäqVчm>Uow(sWSzC"y}uqV/s\~ay/PϮY|2Krumh#4ZN"*[v%FaǽOF-y3*f5s3@VMAn[#fwb" UdW:4# B#(HTu(prJ &(CcGT4r9r<$S+w%D $B? itf̳cc6^9JPŰ3i󽝖)e8`z|'hi.@5T һ-9\ݨ= *! 0Rzat|S'` /+q-BJy˩x1\M6LkdLYPM ٽsL:.(L ʁ0Y< &c1zC}+]w뻈nWapb{}`J~n?-~]\g/~uY?%G{KU@K_^߮onP~_qoaF6YΛitd?>mPL Ttjb"P۱t!h޺UcH<^"c}5jRnvj` fj&!4l̨MUon!($AqJIxڨ8 , bMىXbZ7s=,b064Iϝh#" ;h@F|5Ύs#4ڎ;'+㉲ѿkm5i~M9 C|C`E|֧W̴WN8aľ7WoiW&S4S VMz͇dNnk˜ dOH}ԫVZv_=\rQ꧝9`M9{, LkN-J:{mN|}?̑'-AxQ ̧|l8Lbl,?|"퉞JJ%Jjf60ͺ=+V`m .-,Z3v9$pDfY :iKVw?LM&CGe A%IcVL\9 gz&s:zq 挍-vsY1h o9& +_+l8TD&Hi+K@ӤjsR;_ѠdH3`/%ID^[)K.D2YeߟZM|,Ue/7ny[<%ڭ|n ?T~J֟_r%3 +:"A ; *~WrϥV?!qJˇI'ڈJO4m}fe!0_=zj o@^!TL*C?7]xBtEϜh<,z6%EAO/5(;@WJImKojWDZػM+d"ݧ*Mpzq(}J~NS(H}vn4v pA鲳lx3p0>(Af z=+NO0諵 <[" m;N^t䫕3fw9-U`w.}4O:_]'L ++Ax~ S)7/}FTc$7|]i_0B_kknVMpY[:UW=D;)m /!9$!3UpFw{CknSf!U4IkcFH']9EP1Q Ç̝{̬|~ 4x?\/~s31p9JGbn67]p#tys!x{1v!S̕}vR]3#2NbỞWQT:ZIFl m['P۠Q+qRI8)aEB.9J֬XWr[ ~ %~źQd$4)[%q;CeuE,FL#ZI"JSXw$ ddc %\MLҀf1CSL=^jӛSH`Wh <&*ض@eFs(@h+X93;pN}lF9R*`$Gw,+_9$%`Oo|`<|YLFƯwWk߇_A,uu/~5%[.%JVc9VȘ!`Z׌{ !&-'C-k'F%!j\$Apf0#LS•K,`V▃:$9BN=HR%`;ѱ\9 |AEs^$ly†a;A5jhVRKȠyKdhV AXF:B!@kaXʨ>ylɟ=wx[й !/DPpDM/{Xw7r5. n4zu1zpW+06|~pg/n2 ƟmqO+8}S5|49_3Xe&Y9:3#Xڙn`7W]L3Eͩ~^3W<>T?D"lCp?\}{b %,uR)hsc 1|wb7#f+0L,AOy򓧛vJgTQ4JWޚyu',XPN_>Fи%ch H,tK,Xr+a#,Ңw7 :|ar"r.}Kb&f8>h=Tm5j=кv\x3e Ox5t, h5J!e1_*Ę oGV99M1z>:1z=SVr|&)SK!_8FTP!T]JkMsOAbЅub߱v;pJ`Ü[ş]Rp΢Q<8UmeSΟhIA-g:a/p^H@IN8|Ԅ)84W0d)1DɨH5b/I#S7f2faI7x/߼yW#8L؇_̽'F m;[āWO*b3um㽛}|ٹ゚Z˝]b(3\߹)̦PA,hZ]a< a} !}IBY"(IͦPA:nB١b%xyE.:\Ǟ:IYu9t+]ǛM 8>>q(9t-Y8\̀'yGBPZhDfΧ#,=S&ɜ]96YANHSڪ2-]y&Ƒ<6ŒY)y\M=+ϰaY ?c2<<ޚy_n6oW/{q1v*b̊"g]b̨UC|6b ,p8ks"oOwVO{vj{gXgw[0LN(>IP8ASu aW>Ȣ6Q9~߽9O7DsȻ<#/`BwCN4:aN<Wus8#O9 DB4?g' 8Bw_ %DB4䧇ِ3K\$ b6#^<W,"JVD,$׶%=T\Ib-Q+ҹ&|\X2gK<;?p3_;XLYOHK'Esrnϙ8qJ\&9+ $Ϩ96|A·rK]]]3]a,cXo;/eE'.cBjA4BCB-@FZ`IL^wn BRݱez1E#!9i˜:ul2, *fIA"5_/$mv:a,Ŕk68S íw*3rN~:KIN.HNu#iȋwQ;i&tiMO;i>4wvSCp):5Ƹb1:GȃiP'vhW햂Cp)AnVt^|BI٭`HFf=:\wfIМjZoo[3]vg4h`BvVލIO%Mwvʴ nQ'2b]+I ؕ^}v;vE(2BXFCY@xB%,4.;uӯm$}Ur)ush;MwGM面RoF͢1t=b+#P-J?%<ߑbyH׷޺m*< N5&,K0P9cTAW!ׄ Ua؆IQP /U92 B28p+2E]hIfqcޣ  ~waE}s3~a>G 3։P#BFHVTNkd+Vr4 ;]WDZֆ:Kˊbp]UdD_0兖(GeZ,jN.Dg$@X,DGd`L\;G1keIPE0"! N²HLP<ಌY [ YLѣVjR(>>5e2.^7ǨS]YGukGVWMQxw=gz8UNehXϊjS=Xe h8eI&V'IafL%BZڏ d+v  =T/84y>3#̚f`[@|-@8P0ÊWQZUiX`-2 /#9!b ^c.Ui(v.0ŌY Dj5!Ž0L @:3Mn==)ZAJEK}K܅-pK)JR&*qnJh{ps:z:Tݫ8&IZv23nlc[5Kui`)=R6'* Mz`*|30܀w3M8wNR 6"Dc \`9I <#E,5i|^EP3a6Z8 o)V[d,c^]P&:h(3u7jEPiN&;:S@D:Ta`#E Cy R=8+ro77/x/G_e"Y'`6kD/5K0iyOD1HWgLmˌ^] :\:VŹ4KYi `IΰFO`8S8&Pՠ84M@ oѪq>w`\'u5c4m ^'"tՙ7AȈr%͍ dJp00iiJ?N֚2 X" B 0:}"S "B1Gt: !6ll&L Z`C}ͮ&QqĹx ,sY.TRL,^9AENR06,NS xS#1T   DffWKڝv[oKcoY%SM+ί,:Y&c$$x!A.1E#p\!b|gh= Vgok/prן`Y7PR2ȼw&?>2jLr@AX-H[J8XT> R"Hk,VQ؀=%Ȩ"hB,h:nuK(ky@[ ?:k}b4k+fhe%OFmWRK&P)`IKY |>O&RωTaְPLiۧxϳoRj E.1ꙴU>% N+F醸sȱ#Jk?2Ҝ `"Lxch 2"}kϾ+(Zd ڔuZ[e/q)!D`f q+CQ})(BN{k`."151uRm8*RR}L\PO {DfO׬j5^w.uJ0(inGd5`~l19X({8] ~ٚ "h1Ӓ)(xYc03lj,(&FqS‹ ,^rda-䘚+s+Rh3ta^p"Mm |M[Ҋ.!+BcGO@#9`v^fC-Bw0S] O6]|c2hD3tVXcVjF8PigQz#D`+F*ZJLAxJ*b4c/N fjCVr{}>II”ۦaU<4[Yu D0smk%7*Li;CyA\v0`Za>.깯׸1\ߴc#5/,cgkB$WG-)D*o9&_5o9F{`E)nr -|iMk/sFnlKN^lٮW\/V:iF%ʣ4ԂaڄYX4̽{2b.w1 +fѲ5|ϙF}"4*5ydB$Dj"|A˹yp#s ͹jί[5oU_捋o|bAQ-u4vjmi+9- ^ڀMFQpáj3HOFns߬+ œOSx*QةjsUh8ə:g"sAb0CVUۯ\TS"VS(Ӓhi4bt^3d2\HƊ[2E8MT"jJSjUY寰>1EIl>~z)lUda0x3.%,6INͷ(tnr$q]0hfC4p(_˴n$GXx`9#TCAw.vא2 (\SՀFoaRv_uLלa4 ;v'2 ҕkGV,XvnH[`> n|>|Q?u`<|8N[gNQMaI4$IV5t1rv=bl dT+~G>ł]̽E94"A1{ 8'"B ELW~isBc 3|lf>2/6 !KPuBBACgGo `{J ^ًå# m.qrjRx{eYXRHVz/Pc. K.ۯψf~M*CG5Z-?=FRnq)Y%'&y90\\؆uQQj|%hRm7x郎TDlZM|qgg#zZ/ .Q.k09\ecK0kebk3 yƭ)7s"muA`S(B>mJg36QLrWr>wQtMSZ~,9RI9Nkە_>>yAyyS0 ^%u\;?tmt.TP 8IltVR^"xB;Mɸ\}7L`g3)rgĬ KϽrH~,bRqv5wܦzͰ%!rd/KcOBo,1w$-pz[<60_BwJ}A(*k&Mʠi,qjeuhS8>u-}<]?_՟=~O^iZ1vtv 1*2v` (K=7/v VG+^dcٛDM{4r?DCey,a1˟gU )o><>=zV:p/DN1`/fr"?=^ p38{4;'''WI| 7}&^u^%m 8nO,{Oe}MsFWY#CD/ѷG'ٳe函g5\AZ '1{_C`< ==2pՓB5M\S?v4ݦGA-:ϝj5|be+:&{g?^W@#o mS3Lלe']M_"dyi`d{S=(ur-.Յm[ϰ֧_k1ljz/Ͽx>.G0냟GlZoZq;~W2ݸ{80yil ?cxb8 t8‚voq sUo?*0 }5Aa8kvOF@h.??K˽~zBb+}HUR"n¯e+_`4Wycz?}CErVk)WLd0luӏ%%@9Dٜ s&,7"Rd`,e|ȴ'rq?]?w6h ZcUʹyWi/w pr0^ܹ wn~9;YxrI,;r҅O4ٻǍ CߏA65 # `%93ͬF3辶^HJؤ$gWKQd]UUE9hl}oϨ8nFm#r(ta.I.;sIDkx(PZh0 e.BJb6h>Jeo.?# {Cwʌhn^]^͋5%C|Q#|3tK5`<5dZxzKt\IjNTJcjTp(ϧ3Χ=ϚZ"==ڇ;C7d4r8x-ftJI)ip1%,zلk{Dq+?KVzsB_t:p™h8M[ f: kT|q;=V~kI$:S c,/32&rISxal<+cfhW*h5q< 1"i!˜kNvm'|-OnGf}H664#kc3HJ\}Ģ"|!!lK,~&n^nu$wW0o%.wri':*?sҧ@Do#l(X5GT09L騘pNx%J`ƐH "Xk@ueZ{8 ]S߇_gkf{=ZJ%Hqŕ}s!Es10/Rв3ü響<׫Rkn@Vm'X8Tk ( P $:Yn=nMw8c) tVHs[m:U<ˉ5!|d++(%s(-orZwņ#=cma9RyoN(0FPƃ,'ڔKE{{@lsf{$K?rփ-> LVqw[ettVTi`cjMpk,%V&gT Xb˝"kRYhp9fұ;XmX0b=BI@E/@3DC)0E-rO#3xWu.kfOָ(ў _Dy!Ʃۙ0\pJbAb"R0(dZ zR&wT@!`$ * a5k{ÂyjprsZ儛oD/ 8Pc) xX9;Rzkc肠^s6J 1b҄E$A#lʐW¤?"BǘnN.zK >Ɣm`Il# Yޫ&XS)4jS D,AYω@ߩ0nVtIrA Ȥh"լv ۀߢ&j# bltFGT!0]:ֻ7]c 1HF9cfs4{ǂ-EV`lVݵ>|8$'/S[1 =LOUVZtmٽsٚ 8ݬWUæH9){[eRqӰ%p rvpY($,6Z\}H7(!@~[6W@Mms۷:˺BN|w]/EyCsI=^ b:we=`|3`M;fq~pg=دo`a`*ǀ1xIþD»VϿ?UxYj?v B:! )/T@59ݯNYp9S6Jҝ7RbL0 P;X~{=n];rmU&|pqG7>8~1 WZ ApVpDr{z]g-R}v9|(+C<Ԙw/ j]o)]GL6ŽU!0k(BE;DٷT!T%pmxlĖZ6$Ǵg-Rt(+.d{qCIe|0CH^\x-{(XCi1sHd~u2ˏrHӗZ馓#<Ɍպ8剌 ^cyxD`@Uyrđwjm{~vMUgJ<QTU޴N՛Fs5j=lXs#.X7NNCJy!I.=]bZl0,F䲍E㺾ag&V ,H,)n*,Hn-OrjڏYL1z}K5~1\_5N!2e]m,2Xki&ipGÙGF]p-MXr}orPKbƈ5Q mis>Ij{ ”#+O?鰑k|ZY?iL1* |5?*_rQ͓2am?VpT)8X{(cJͮPЈk;]J 4]N17(&%.\'Gr A:Uѝ+%4ZXw0O:VBIjz'ο: >偀r*< /Q932d@L׃lK(훗[ z6TܾK? ږ @J 9"{\[ @9"{W\Ur jdvI*= OOVe S rAediwa@+`5ie0Tj9DGAS0Ep潈8<[",J&>Tig} WϯHwWpu1hEX]OoytOR+E6}<ؔ ZAA,(^?T!QX: /8w6͡RR I - h5ϰrzգ5>^u ~*v6{~Io;;7JJm0[w\ߔfju'L2V[KOj\2 />9 w߂:J~fwYNRm}5ݒc+ p[IJ fe>`.GA9wdV/ɩl hR"tFavJ:eO2B=RJK܇IR X/R0Oy)Grl͎ÞC "jC^)BX6xѕ^ՉlGʶ˘+kcUNbD•Q'uG319D2k֤A1O^^i%/F1)#ӫR$O.&*dgaj4n:\-m[O2qƳOX E z $.+g>?@Z9{{ŠS>3;b7x5F9@pN@ ,~/,Vgs@amV7Om i:G3;Loݲ;);U͔h[s98{]0(=׳vAh+jmi5m;<}hg߂WgEQ^ EMLlHuOV|hC豵 txIwRIcDytm[,*ŷW@Uݹ[vKhw-CW\KS=D]G~a xřAqt\>v:^FsYSZe$[b4Z .@.cZ i٭DPc7A._LDxx# xutZ˓ӓI5OJ9OJpmyV|~勗ϋ/|1+WXPƐ/|UmY_ӣ<+J ߿/,w7: C==2#pՓBjqVnh7=ӧɦj>wVՄrrO}-ykd>tZ~m*COtJVze4嫯2  /:EdO7ԅ{C^]޽iF*ڔ!Onb7C{?nwqN=PVow5pQן.>S_ӻ|Yڮ_TVhw/Z0tn=|6p5~vR_NޝT4]&"8)-X.\]p8 a.QO0||V;ח*W/L5C Ā"LTT hw'~~P7м>ˏC"0yUa|Ozz_h?9]p7خ:Ws٭\*ջZiAv5B~;}>bF^QbG@T[H}ZXÕHЍu~bip\viuÔ~^zfG{q*pۅq?)np2ǡzXIǕ00 |.-&c5}| Z} + Q ;h]߷]x7*sf"٪~uoXZ țI7ojRIjyC(o<ЩUbjubI9OggL¢QK{f};fs 7_67u-V*OzH,9`qeT)1Q:H_AFY Qj$yY  X{ 4)?5BR34U"(d sfOHm #̌4I1A8,%ޤ*M&2$LŚj70%1j&eT~3d`ѡQnJ(LSSNKFNUW(v3 LĈGN{d $wR /䵊%.. ƪWʃ{[ꇀeokE>yI/\^l2YZ'Ef̓{l;>ɑ杀k\O5W5d]3S0!@{R1ٟz*`2X:˸k92Ǯr?xrgoHsH(.nJgu,tfuޅu#li id@\0 T:3vO4A@5>(X`#԰C^>\"AT`r% |@"f`sMM1(a 4MTǬwڞ|qwylɻrܛVٞf2xйYHѠ6ÂjWH("-#,rqE Jk3?A.`P@D50D/K*D- +=&lu[3< 6ZtMw0ETi #JJqES( Pc9Iy086"98K$<#VgrF>+rB9U؛YBײEc6m nc<֘m]WmٵO |=576m%p4 ro>|/>РX4Ab!LY~A:@ʆ <]ηsx@1拼tIK_RJĉGy\%'Wn*rO\1XVeTw +v>a:b_F$l!d+bsfK= L3iT 6 mѧfO㪰)C{tŕBj-OoMw>NUL3Y AR]E,YYh"+bs6ڈReF ,T>£NWY3ZW{ y u^e\⟧Qx.GT5 baRc$]լ͵A)[H7ulo_*F#4,#Z@@,p^ q/Oh7q:sU* /y2sC2ARBICy(6j\e\M&H8~dtϭTJh`,FSve6Z58c(8>E͘.))#M0b!i,Fh>>6Ϙ ;I5U5N' u?m]LJ#\#!.3V}rx4jo8=L%p9x>|kA:Sʼnnʀetf^0OeI@Dmwڦ?x1JˬiǗ>MpSQ-,L)g3pgGu;|D`± jXOJasKKOLXT%eĖ:M-EsR,M!dQC:mr Y4FRWF9X -!δj{N.<OUiU7)\0;אtEke̎+MB ߛ\-TIiȽVi!+(jqN&Nމ&s nz@ H@ĒK { % (4jCycB W%k>Jpy_j?aG%P_R=6"ޫ:)F6*gETbɐrB27CQ̓Gj,4m$wJ+RBJ{  JXZ), cX3Tc![G>AQTmBߏFQ3j}\[2:ΤB_d"\"Ena}t AIeA x '*Hc5Jq dGƆ&UwXYP5sq 2;,n Τ8*l (&e߆Lǖk3=cmkp~rxӹ1k>#kmhO)$Fpd"KA\%=GQ ,2 Y4 Q  XZL]k1Vy_A2NCf ik۔*&$\\b^z)4ǂN,WhXk 5ol0m& sW5:==B% \uKe+^[Fc6Br5*Nx0+G8Qjãe\|q`E#F$G_=ԺAt&&N1|΃dXII &+se ed ,L N%x2+%JC%WͣM%'@́fsR8PV-QuuzsҒxpF{7kx-tVYmbLh8i_,+ۼ5"ע$eAR) 36|SKJsK{gRT_cl ~Y{:=^-(x4摃KQF@ NYW^h1% 2&9(AqC, !өCޟv5L{:fUI4ni$I\j,7)z`b7x6U .Ik0bBmŃ_*zEC ljZˆ@?/C[ՃOpȓ-(Qs2=T1xi!juQQ8tIwZXp..uXL x*+@h2J:ID衒P~AbI=5\qaH k&1Z넏29тlrp%TvʹG"Rd2WCy"e?M~E^o=3Ycek6M/r D? mX@hllZ}AաoӉPg.E^%yɘ4 $\}JJKHJHOF!f= {4ZO{QFK!1Y7~N߅+H'uvq AA4SvwTmC]$ ۡ}({ܹ>Wwӏ{A`Dl&Mw D~#؄}Q|E \R$AHGEqkRw/zP_z_^];><٤5fsomU;/17Xn:`Fѿ]1>~)Ftl`Lԋ7f#m|9笋ȮZ t}tmz'G'AHm^؎]M>- 0uFj ltyz7>:Үj~Vm920ؗ/fDF"Ex;v{;/{Fppw_OZ) ɂi^~7Ȣ ״?l5]{<; tnG.ჯǟ.2)jwN4%'hJaÜOW͎kL3ٽO݇An 6ݝ6nN$;M?4]xOKwş!M>cZ~w}87|޿$Yanc| ߞ|vk|t!++\U`r^̸x5L<.Ue硜P('Nޥi3teZٛ~f[ipҖM69/6z. PUP6 vs1YYڠ0~0nc<89ms!z?''^[Fz(&Q/8rMuC=dui6+٧_- ^ i.o zuo/}Co > u,1Z-Qjl b5Q]xy\^QU k>eܻ7{dg^![ f\Y͛O'bfq G\ Ĩ Mjq h'K@kw8ʩC> Z3`ltzL)s)8aB MDeg%pT͟:&H8W״N^&8`WJXKi2 }t3b]?+v+KNa|~ {yK _N-:Q; 0:{?lFj5[ %DC+=@ƨn#Ko%R1U߼VG.eD!#x{2p2C 3(&03RWnxr/Xԉm3B$VM%eۄE}wnT azKL1s 8~N*2K|qs9 `trZ :F f| #mNϿq6Hb&,#(%zNOULƪUQO7KTg{<%ZrDDh_oUm#A*)u,@VXsGg-$5YYgUDV_),CvuK+ W0qV)N+'FS=Ff{{霬&1GbFfMV־\%SBROlv}:)|/At? 2ٌ^Ie,ѫQ| |kBhbbƘ F:8eh~EZO9TXM<,թN%.^8i(*<8ˌEsƈYF:hFvwrBA02퐽&r-ׯ:mU .`gM\Yb^eRAc1;hG)I Hj=%Hx'$Mag\&|娟_r^?*=tyQ<(L)#X%XXxqY#eLDfw)V\8[dc1?]s0xM%i/ )[Pɡu X?9YR-Jx'Py-͐t`·8SkmO.9/xD4B:3Z@N٪qvyЙLw!*پɜL 6`rCD^^3EM7*ԚG h2C'y`BWf98U3O[uRq2Z'pW{[-lM8I{޵m@np4nq-t8lzYۓG~,{,ےHJ@fƴDW/@䋘/> %$nZh!0 pYBX#D ̉XG/I.l%#KJ!ֻ֨(Eɡ+XCl; $F~!M!j/GP .0ާvpFumwc _C8p N@"#ǰG<5ݨ _.<Ȩ'֜oUhM @:ajd iaXNQK6UN`՚ 5'}kg9hG`3#q ˪M|$%Pp|5!)21BHLTs0 1?@2ITp8d%RMϏ'r\/=WsLyyٝm\%DY -Yk_ Q\?H~N1(ԥT$hlP=J_FI3Ws `TRd63SPMTpb|8Œ73&@w@=wTDAuR$/)̙kHW`3_IPڅÖelRm4x (!!e:曟ս1OχW\M}|uyq⟦V-uN?;>D\XU H O݈awrs><ݞJ\W-R V-_->͛dMޛ{gdU{wgvS7n7i5wko}>y;p3ԲIoo?ٽ# %l۔!;e 52bN u nj[|thlDI cgvU #BB=IO);q@]7rusp>ѓ'FfSv |om@@TF \'ލSRx䄽p 5O3/HCp%N-˺>ڦnz;*,Z®͵ov^r岼Vm^=(G#l g[oNٴ֐;02 ^F@~<jdcm/k1΁l$q6cG(SġͺIy3Gz3]; n)*2;?gc E [ZoGo}{3JyP {Ǐ[(CFƸ2nf!/,"T)& X3iE\:adpk {8|߶0`As`JNxTdOXެ8D!p<ڀ@1deTpm-t98IFW;^^^4}͞ՒØ\ZdYa: W1NeC›udөJ<5.ޮ[N'תJjti;t6SRm-w`@#}q2襂dFH!!(䟜D{єhHL ȑD() [u!swJ*#VT*1k1ecRP -V0r:Ab"juS] Ui[MZ#3&!MW~Zi$,Rfܝj2B9τ:5# mwR|QKnާ7m 9 .~ 2Jj"BwjzrF WНTd2:S ۜ@a YwH<~Hsub՜%j1sXCd((iUБEB! ^؞D3N `ThMO4`&Uڝ)G^/V,"G '8O|!cFxg+KdI1 bQ~-3>ËhO (8H-tX‹zOL1ҫ4L^y[D'OfH YcutLxL:ߵt gA'JK# Yo:;ym2GD}?bW?sm7_VX.Zhv|ްܶoG?yuGӉ9CP֒:G{}}|v/NC%)J'!`"/WX4!w;!Ocwwt3:9K6Oc%&hrI.nlLjct9``Z"r~~ !$W6 Wd %dKsK e4aե^K)5ʵE&!PIpbyJe7a_Ft$c;όyQC?I CVȑxz3F@&c2zg3ņ @ }<2?X)O$rWw7-[#wh7:lz``Ã]oV׶x6p nH䅤\!RwEM g<7oܱ8jj\\'Nӫn o@yBY %RV2$eAMc*Z=^Jb]f9Q`A!uBM:{\ ^kTy AG͗k'GV)JF=|elGOv{(e7G]UӣJkimjoG?:^{⤷:طT:!\6ƼOa⾰ZYlA|Ɨl#&),kǍ߱|&BBJwC86<6GE2x}tWCw6 mHј !'fHf(7 hnAt$nh$qo<9qZ+[t}}:FkfP"SH2 vgV(tl3#˵{ 4kft[31,=%FCE$lPa#%cq5Cd '%Z1frF vLo(QΤ6HwY`@ҪV}c4b2da1,GXӦ8ӟo|cz.J.$e6(h5DǨ02&=:4Fń0Qu.*&OLZ.*8 ^Tr+5\Ts-$j.t50] e]b0^A!ؐmIiĜQ}gh-W9ʔ6H9JGaR# 0)]d&'"#LPI} Q3@-V\RђbI,aưPȁYЀ CHP1ʿ"0 bz# Qi21Oebٿy0_RE 6)nh|=Kcd67_kJ1iPx5*zLT cul.,$9#:Y,B"w(&5ڭ &ajsIotre6~ C@Z:o<beN>;A&;B FBa$is;q[bLkKyHpB*s9V,RkiɑWBjD `Zr`ɵ2(m ?3XS/13"Oޕ0q\2umm*dS' `aC Hf }|y,eYP7Hb"P~sr+W~~v'n7p7=vf'Ô(Swzv *tCM:Əe =cϦ0LH,QhiZr69(wGosdp39* ]7s/G߾ a/g^ˎLTN>*6٣?oumQR5K^<;zGϲ=fRl2ݣl+c|y/^8{*>x1Fx!́{ ./L7WdT[4(Znh.a=Z=Uw종+< }(r g^$YyKZe> {>ɨoBM9Ro~_3꥽iVKSdI I]?Ala~Dz8YӃ'x W+e/ρߖ͏y/Pt>- 7gŦ@る^۴-}.zXNzQx=y]]Q1b?iOfކ_z"3e7]$!Ow =/qħŒa׃׃,-ʃ-ZTÑ9)J{el6)G)2]<6>T@qj}khVJ^쿄))훔b*O^ ޅioP=43'ktϩI) !z"T$YAh-|]pԌL}׾;?>3Xg?jތ{zfeopdI@rpl| 94A _m>kvóQsUo <{iJSt bp4:gS A4_'.{B**}*bHyyXIY5-Y^zqjW,M/m-.lcWɉ"\TAu\c X;uoO H-Q[޲zI֔aT0.(J'%gUA+:^_߻vcZ5`7tfn~msY!B-SDrFɵ0Q.b iNsT2 L) AJG}DsbD7,-6 .+bFۀι5csfKbjF t5-9p%.8?*(NĢٙ%HL 2$ k=G%g؁H$s:#z^.$e >Ġ5mR SlY [) ֌HeFNv^s- %&{q/_sa^s5KhّЬehc6:83NDLs{Xsyޑ2m!*ϫ6Fo-`af8b8qckLV($1S-ǚ!`MvN%D7HiysGPyI r$p(],r piFm(`퍁bgi&ҍR;@(|y04|< S(c9){ 1geg-r- 婋V0 cKlcN u syta96e7&!{/4L5̳T  Xy&jՖ l%:՗<(Wo.!9ޗZ=BVO O oL&9tu18Κ jyH.%ALMDSuD`>}R~!cSС]_L" uද|q5R0;,ڊ~pofAbJlΐT\ca% Һ1[ N tz MP$Kiw+ǎ+i0 F݄VH\=+Xxw]o e.抡 CV4ѢmǸKjз^hSC?gV<7ۺ VLH*+sH`́xK$2R[DT. )Q692O)kkBwkA 5]]@TO\u'E  \z C(dԠ%)YGG,c-q*Ko-4MǺPb\FBǢ4<]-R6'*qk;4Cf <('E~gy%ts.SNmo fK#nY^Ip=Fےe/VmSnFc֙.bo8f 09E9sJj ΚBk2:7 7РoPkq Hԅ 4_Z_1ߦWxwLZ[AwARKL޶saűUizdh:{LՖq5[b W"j0a0̃"(Mץ_;/;Ou6M "obzqjVoRi in}b ?!ivO3& M{V.koaqVe?,Bؿ~{^׵AΘPג_r_RKT%]s1"տf1ֽ; "2,J "LYGQ;(bS$U~M0/j`$G:&BH, ՏcF#tz[yZ^oS3qEvY &ɽnn=d2so!]/L#rbBQݝNU?8nVw.ZkM.Jc&jD3:MFHn]0 DoQ"ڨ&cG`ށC5)g8l"vFXfJp/[:QT;a%R*s4˭!N[N\N`(]L,hXg=GX!͐Ak<98Q±nR;%+XZ jSk&gE"〛ܩYjGuWArQ ,a5k7X+ @}MtND ڵC5g` TFcw)Í<SQdA ]A4RAƭ"Ց:u #HD΂8yW;]A2.(*-Fk2 hǝE&M IL["6J*Q0RȈg!"C`ZgH;(7B04JrE*(@H4  ]]TRb7fjX\B\kr\k7r=XZ{?n="ijzȵ!'kCn[u1w\kϮ ru6{=K+܂%8jy]ZAeAQ0cށr\ph&S5gϷMC 1hXւ0 A#JPX:%r w',c9c_(+bad)'s\Ȭ`i57} [a>ap0K^qO.:9p\;;pEݟ:H4W ltur=uiO;+jH`Dv~;+~[m&#-դWaSYW=fTgEbfX{lG*&04;L^-W %)]l 7n1e6SSqe$ŦBa`l?$󧐍ݓl!dSsR=>5_(~<NՏv D^UFzб*|cxF GmOKw):`8~𭄆#A]w.QǨSGuK\OO$#]b3W;cX_ι:z=B?,MѨ3$YFǙ^|0[&w}ѧ̙~)GNe߫_|yϞsD.UQ֟O(&=_Nc,o.yAS3oWI-zf }/c`>"t85?ﯓG޳(m\+(mG}/[{[N5U,_Iin&xeT-k,D",jd"\ysDY?P;Y9s1lu?~/[?!:2>ϋk`sf/UR>:q;s >OA'1 ;Qx`D@#6ƝMhu!{ba CA0[Da΍_x>?]ۓW QgU=?7uZ$<-FFSu`8,W>62p2k8?(=̍HN@g "ȯ2,aָu"Ը4.Oߟ>.˹V ?*BM pWK6+j\_%OίO/A;28-1I$F|?qzv V~xwqmB"^r{Jm<Œ7ޞ1x62ǜhdHxyNw7yzp☕-TO`wz'`Erze,a#Օ L*n\,sO 3^>ԒOӬBҖTnc`ؚ^DJ^G ȹ |Dӡs 5t%`/)no0p goJ~Y*X)}&׶ɟWjaV8v;&яoo{ڢT)a'5I(6ɟ?'ͯb AGAbr=zz_UVZ>U3ùB Weɤ+9vߥ]Tyka<WKO`| RPUMi3mX=?~b$U ?j?kJ`KC =#,%f2/gqPNSV¦8u-y5wiL ѝv;Zƅ[+ԽlϤ#L Nt5qE A6U6N~5IA`RSͥTۋ՗&9sIbBr@q U+7Z9R\-s\v;o@RbYL0N5fo8a*^nl&`G K*V\?2mW,T5RbQFBgӦK6i5[e]"{͛C-pU$JgKY!qfʀ!pn&RP^V.'tD<9^̀Z't34԰cdcX>Cv|~xz{KXgؒ}F a[@a_˥q( 6gYf7lk,m.X& b[R褊lMCt&eݹ+3Ä(j8<8GKȔ`W4P,y՚)k%wSAb2'q.,vL\}$64?U$Wؼ;Jxɥ jg'LbYPFtߜG#'d ʐPpsFYVر-[j['YsU|/k(#ҨIgS>KDbO؞2K9cE41Rt_fD`K"/h-]^`zk@626&肄+L\*MCȩ8Dj3YsƖikwK_VrŐ]@]B. sڤ,ruR6`}d} = )KJeQ04c&*qW4G?nJ7]sBE;`#)&&SN/$:ͪ2Lo6AҎ_*yS"n*| t8ٛ$J?;MU~9wL$Ė}US@a|oSql5=*#z9%Ek/v"erFQ~z0wp*Uzl/BB7T3mˈqd{}+qHP_1ePn#[]AcZ,&̆[텖n:0wup[ÊB&ce殍{$8.Px#NhX` y׼u)0]L K[ɻeV˼XۄVRl1>xQ1i̅X|EWW_xߺ2϶Q 35-u*I{`8gل{L+k>v]DD02ϳf*ݱlG!v-B*]=VcLl?{L^.ekډ,}nlR< y^ 8vH\>CD9xEh,ĩ+=z^=%Ҋ,̬Cq8_GuPٌ1L}(lsMOP&5efH2ΟlNg&sK}_IE2蒒R >dU}L/O3f?. uӝ{-ޚ?Bct`EkgNT]cS[&QB9鐿e"`g`4ߛ`VG9 _GL!:X&mH*p)Wٝ@$5RR">*éuI[w}T.#fMZVEЁ!sS)>@#n B1D`#!A\ EZ&*Pt@Xv4fH1 r=ӵk"lzLRʼR5 pjTW(7-"Lk_q ݂Kyt2L rS?lgMn B7!!ޠW1/e)@<`H*CXқ}un mfm/"y [Ҝfr#_߃Dvt}[=W֋&x]ܹ/Y Q 8`6mnl'\DŁl&ac abT쭙tQ2=. b%Tj :U] Oiy|I{NZ @XVȫN.~ C?V՗G$8ꨴPn, b]kw 3q'B5wsL=JHjMW rCW2řk˼b}Ố0eJ@dP C&ܐ,n׮2Cds,{IlB>s흲ReB;[ iӢ qQaDMk A%(E- *R#O?Fz/{%m @HhLVvot\GT`lx Mt1ooep#dMKQU2f/is98,6-GrHB%TL4o(XIvbeϲ;J/2 :eP!9\%/Z[ZI6Drd{=+$VP$ =ʢ)96pEPvCm*umsuR^(FݪIhb\ci"z[,X>ehV&~UWR mBE4FL[vJu+Re533\e,$UFrS2Vu\"([UQtNPҧMVAH}Og"D|JS-ѭqV 5\[Ct]|byEfh2>N]H $ !*]^ nWb!uԵBfaDY!yG}aזje<] T̯hoov*$tM/)ya-Ii/ykSVd T9S3UOy`.3hʐ{ U9/RLѴbulkel%V4ǂh+]>wM`'">jnN-2kwek%ͻ0Ɣ|Ss"5VKkeˆVȞ9kߦ- DMnAٴ҃ x+ᅁ[BϟSk H3LNlo0٣5asFH3z' L7N#P*EGf§ĩ3R $AX ZgZ8 ǒ3[z\rD[}ml#:c˲H,lkpԚ%j]m@ۖra_(QS7S5C6j2QST.pP|c$SS80]PxsKue^I\BH.T c4ys JR!-᥂-O" M'TM{d(x2MR.׼&m$al)DΤ9DE6n5uT5R* 8dRtB\ Y]GR,`C7 y;cN}Ȉ!%-^0`yɼKb yx.(*""K}F`C1OnB{WZIXB+A`O|2"WZN9Qo%M"ڙv7p-A² i:n8ݞ,S6@4KMσ3Sujrn76E`‡߂qUoHG閒O=c܁w~^t̓Q_s?> Hbq1CCG>3f6YL^}~chxAQxSP zG^zfRNMmlV7%IW-z$%|C|AyλyħRpFM~|'?Obd'sYP}<[p}9:Q8*Ո=ة7GOsp_Χq᳅8[<|( ^g]zP<:(<(&^S)*k;5/RHFθ\je6qB!:HF@K4e+fm8=;/.E#2.~ܸ/_T 26?Y7H!ikmEO->MK'`(:H{6mJi_"8+Tϓ+BR|saM[ƌ@А%FѤ ]!Gw̿Re^޿x#;_ --!Q)yTbm{K5mQޚ(Oʐ- }3hd*Ywc[44&^pp(Թ(Ni z腚 ģNrYte".9IAKSb=FhA# K:e%fX&L8h# ^0ޚ71}h吖*!+I;}FhAd$(q긽zC)1:T^ >#4!^7K#! +7ZO LBzvp tdcⓔ$E-c!_^>T;a@C-pws{a |0KezeȕNFj>aws2rL; x6H5}OFZH7.U2@>ՐۊvDcjJ zN;n "Ox H7.52zoWS>-jzzĚ+$x͇[6ϟ1~fU ّ()\3:),dK)úGs3XHwg~snյïXN.6$H$aYCNVwO_vnX]#/nx'j 43^V(3W4 x0$eT;j#?pz13kcaUQ(NIi` nё bvQRXtx{Gh'zgYp*^}Cɡ_2m7ϖ @ܘ*#V